From 86319e2add206e5747aa56f29b4607ca593acdbc Mon Sep 17 00:00:00 2001 From: Devin Smith Date: Wed, 25 Aug 2021 11:52:09 -0700 Subject: [PATCH] Update code style (#1121) * Update style guide * Reapply style guide This reverts commit 8a5099f1071dd90d429f0036fb932155460270aa. * Fix test after code style update --- .../java/io/deephaven/base/ArrayUtil.java | 36 +- .../main/java/io/deephaven/base/Base64.java | 41 +- .../java/io/deephaven/base/ClassUtil.java | 6 +- .../java/io/deephaven/base/CompareUtils.java | 38 +- .../java/io/deephaven/base/Encodeable.java | 4 +- .../java/io/deephaven/base/FIFOMutex.java | 11 +- .../java/io/deephaven/base/FIFOSemaphore.java | 7 +- .../java/io/deephaven/base/FileUtils.java | 63 +- .../main/java/io/deephaven/base/Function.java | 4 +- .../io/deephaven/base/LockFreeArrayQueue.java | 54 +- .../io/deephaven/base/LongRingBuffer.java | 8 +- .../base/LowGarbageArrayIntegerMap.java | 11 +- .../deephaven/base/LowGarbageArrayList.java | 12 +- .../io/deephaven/base/LowGarbageArraySet.java | 20 +- .../java/io/deephaven/base/Procedure.java | 4 +- .../java/io/deephaven/base/RAPriQueue.java | 13 +- .../java/io/deephaven/base/RingBuffer.java | 4 +- .../java/io/deephaven/base/SafeCloneable.java | 8 +- .../java/io/deephaven/base/StringUtils.java | 3 +- .../java/io/deephaven/base/UnfairMutex.java | 19 +- .../io/deephaven/base/UnfairSemaphore.java | 7 +- .../deephaven/base/WeakReferenceManager.java | 12 +- .../io/deephaven/base/array/FastArray.java | 67 +- .../io/deephaven/base/array/FastArrayExt.java | 16 +- .../base/array/FastBooleanArray.java | 10 +- .../deephaven/base/array/FastByteArray.java | 10 +- .../deephaven/base/array/FastCharArray.java | 10 +- .../deephaven/base/array/FastDoubleArray.java | 10 +- .../deephaven/base/array/FastFloatArray.java | 10 +- .../io/deephaven/base/array/FastIntArray.java | 10 +- .../deephaven/base/array/FastLongArray.java | 10 +- .../deephaven/base/array/FastShortArray.java | 10 +- .../base/array/PrimitiveReaderImpl.java | 13 +- .../base/cache/KeyedObjectCache.java | 50 +- .../OpenAddressedCanonicalizationCache.java | 58 +- .../deephaven/base/cache/RetentionCache.java | 15 +- .../classloaders/MapBackedClassLoader.java | 6 +- .../java/io/deephaven/base/clock/Clock.java | 9 +- .../deephaven/base/clock/TimeConstants.java | 7 +- .../base/formatters/EnumFormatter.java | 39 +- .../base/formatters/FormatBitSet.java | 3 +- .../java/io/deephaven/base/log/LogOutput.java | 123 +- .../base/log/LogOutputAppendable.java | 4 +- .../io/deephaven/base/map/FastArrayMap.java | 19 +- .../base/map/FastArrayMapLongToObject.java | 27 +- .../io/deephaven/base/map/KeyValuePair.java | 2 +- .../base/map/KeyValuePairLongToObject.java | 2 +- .../java/io/deephaven/base/pool/Pool.java | 27 +- .../base/pool/ThreadSafeFixedSizePool.java | 32 +- .../pool/ThreadSafeLenientFixedSizePool.java | 40 +- .../deephaven/base/queue/ConcurrentQueue.java | 9 +- .../base/queue/ProducerConsumer.java | 11 +- .../base/reference/CleanupReference.java | 3 +- .../base/reference/DowngradableReference.java | 7 +- .../base/reference/WeakCleanupReference.java | 3 +- .../base/reference/WeakReferenceWrapper.java | 15 +- .../io/deephaven/base/stats/Composite.java | 14 +- .../java/io/deephaven/base/stats/Counter.java | 28 +- .../java/io/deephaven/base/stats/Group.java | 11 +- .../deephaven/base/stats/HistogramPower2.java | 31 +- .../deephaven/base/stats/HistogramState.java | 37 +- .../java/io/deephaven/base/stats/History.java | 15 +- .../base/stats/ItemUpdateListener.java | 8 +- .../java/io/deephaven/base/stats/State.java | 17 +- .../java/io/deephaven/base/stats/Stats.java | 79 +- .../java/io/deephaven/base/stats/Value.java | 8 +- .../deephaven/base/string/EncodingInfo.java | 35 +- .../cache/AbstractCompressedString.java | 36 +- .../string/cache/AlwaysCreateStringCache.java | 13 +- .../ByteArrayCharSequenceAdapterImpl.java | 3 +- .../ByteBufferCharSequenceAdapterImpl.java | 3 +- .../string/cache/CharSequenceAdapter.java | 30 +- .../cache/CharSequenceAdapterBuilder.java | 12 +- .../base/string/cache/CharSequenceUtils.java | 44 +- .../base/string/cache/CompressedString.java | 3 +- .../cache/ConcurrentBoundedStringCache.java | 22 +- .../cache/ConcurrentUnboundedStringCache.java | 53 +- .../string/cache/MappedCompressedString.java | 58 +- ...OpenAddressedWeakUnboundedStringCache.java | 24 +- .../base/string/cache/StringAlike.java | 18 +- .../base/string/cache/StringCache.java | 13 +- .../string/cache/StringCacheTypeAdapter.java | 3 +- ...gCacheTypeAdapterCompressedStringImpl.java | 8 +- ...TypeAdapterMappedCompressedStringImpl.java | 7 +- .../StringCacheTypeAdapterStringImpl.java | 3 +- .../base/string/cache/StringCompatible.java | 36 +- .../io/deephaven/base/system/AsyncSystem.java | 51 +- .../base/system/PrintStreamGlobals.java | 12 +- .../base/system/StandardStreamReceiver.java | 4 +- .../base/system/StandardStreamState.java | 22 +- .../java/io/deephaven/base/text/Convert.java | 40 +- .../deephaven/base/text/TimestampBuffer.java | 30 +- .../base/text/TimestampBufferMicros.java | 47 +- .../java/io/deephaven/base/verify/Assert.java | 119 +- .../base/verify/ExceptionMessageUtil.java | 383 +- .../io/deephaven/base/verify/Require.java | 894 ++-- .../base/verify/RequirementFailure.java | 19 +- .../io/log/impl/LogOutputStringImpl.java | 8 +- .../java/io/deephaven/base/ArrayUtilTest.java | 40 +- .../io/deephaven/base/ExerciseFIFOMutex.java | 7 +- .../base/ExerciseLockFreeArrayQueue.java | 126 +- .../base/MemoryModelVolatileTest.java | 3 +- .../io/deephaven/base/RingBufferTest.java | 24 +- .../io/deephaven/base/TestStringUtils.java | 3 +- .../deephaven/base/array/FastArrayTest.java | 7 +- .../base/array/FastBooleanArrayTest.java | 6 +- .../base/array/FastByteArrayTest.java | 6 +- .../base/array/FastCharArrayTest.java | 6 +- .../base/array/FastDoubleArrayTest.java | 6 +- .../base/array/FastFloatArrayTest.java | 6 +- .../base/array/FastIntArrayTest.java | 6 +- .../base/array/FastLongArrayTest.java | 6 +- .../base/array/FastShortArrayTest.java | 6 +- .../io/deephaven/base/array/TrialClassA.java | 15 +- .../base/cache/TestKeyedObjectCache.java | 43 +- ...estOpenAddressedCanonicalizationCache.java | 40 +- .../map/FastArrayMapLongToObjectTest.java | 42 +- .../deephaven/base/map/FastArrayMapTest.java | 17 +- .../map/KeyValuePairLongToObjectTest.java | 31 +- .../deephaven/base/map/KeyValuePairTest.java | 34 +- .../pool/TestThreadSafeFixedSizePool.java | 26 +- .../TestThreadSafeLenientFixedSizePool.java | 38 +- .../base/stats/HistogramPower2Test.java | 37 +- .../base/stats/HistogramStateTest.java | 35 +- .../io/deephaven/base/stats/TestValue.java | 3 +- .../cache/TestCharSequenceAdapterBuilder.java | 16 +- .../string/cache/TestCompressedString.java | 9 +- .../TestConcurrentBoundedStringCache.java | 32 +- .../TestConcurrentUnboundedStringCache.java | 82 +- .../cache/TestMappedCompressedString.java | 26 +- .../base/testing/BaseArrayTestCase.java | 21 +- .../base/testing/BaseCachedJMockTestCase.java | 48 +- .../base/testing/CheckedMessageRegexp.java | 7 +- .../base/testing/LoggingRecorder.java | 37 +- .../deephaven/base/testing/MTTestSupport.java | 12 +- .../io/deephaven/base/testing/Matchers.java | 12 +- .../base/testing/SimpleTestSupport.java | 50 +- .../base/testing/TimeLimitedTest.java | 9 +- .../io/deephaven/base/text/TestConvert.java | 32 +- .../benchmarking/BenchmarkTools.java | 74 +- .../ConcurrentResourceProfiler.java | 21 +- .../benchmarking/CsvResultWriter.java | 10 +- .../benchmarking/ResourceProfiler.java | 24 +- .../generator/AbstractNumColumnGenerator.java | 3 +- .../generator/ColumnGenerator.java | 13 +- .../generator/EnumStringColumnGenerator.java | 14 +- .../generator/FuzzyNumColumnGenerator.java | 7 +- .../SequentialNumColumnGenerator.java | 3 +- .../generator/StringGenerator.java | 8 +- .../generator/random/ExtendedRandom.java | 8 +- .../impl/AbstractBenchmarkTable.java | 7 +- .../impl/AbstractBenchmarkTableBuilder.java | 12 +- .../impl/AbstractGeneratedTable.java | 16 +- .../impl/InMemoryBenchmarkTable.java | 14 +- .../impl/InMemoryBenchmarkTableBuilder.java | 4 +- .../impl/PersistentBenchmarkTableBuilder.java | 10 +- .../impl/TableBackedBenchmarkTable.java | 10 +- .../TableBackedBenchmarkTableBuilder.java | 4 +- .../benchmarking/runner/BenchmarkRunner.java | 63 +- .../benchmarking/runner/StatsGatherer.java | 12 +- .../runner/TableBenchmarkState.java | 11 +- .../impl/TestTableGeneration.java | 45 +- .../plotdownsampling/BucketState.java | 148 +- .../DownsampleChunkContext.java | 48 +- .../ReplicateDownsamplingValueTrackers.java | 6 +- .../plotdownsampling/RunChartDownsample.java | 316 +- .../deephaven/table/sort/SortDirective.java | 11 +- .../treetable/AbstractTreeSnapshotImpl.java | 288 +- .../treetable/BitSetColumnSource.java | 6 +- .../deephaven/treetable/RollupDefinition.java | 130 +- .../treetable/RollupSnapshotImpl.java | 84 +- .../io/deephaven/treetable/SnapshotState.java | 275 +- .../io/deephaven/treetable/TableDetails.java | 9 +- .../treetable/TreeSnapshotQuery.java | 44 +- .../treetable/TreeSnapshotResult.java | 38 +- .../TreeTableClientTableManager.java | 36 +- .../treetable/TreeTableConstants.java | 3 +- .../treetable/TreeTableSnapshotImpl.java | 48 +- .../treetable/SnapshotStateTest.java | 16 +- .../treetable/TreeSnapshotQueryTest.java | 321 +- .../TreeTableClientTableManagerTest.java | 19 +- .../compilertools/CompilerTools.java | 623 +-- .../compilertools/ReplicatePrimitiveCode.java | 376 +- .../compilertools/ReplicateUtilities.java | 77 +- .../compilertools/TestCompilerTools.java | 84 +- .../configuration/Configuration.java | 218 +- .../configuration/ConfigurationContext.java | 44 +- .../configuration/ConfigurationScope.java | 10 +- .../configuration/ParsedProperties.java | 334 +- .../deephaven/configuration/PropertyFile.java | 67 +- .../configuration/PropertyHistory.java | 17 +- .../PropertyInputStreamLoader.java | 4 +- .../PropertyInputStreamLoaderFactory.java | 34 +- .../PropertyInputStreamLoaderTraditional.java | 4 +- .../PropertyInputStreamLoaderFactoryTest.java | 9 +- ...pertyInputStreamLoaderTraditionalTest.java | 8 +- .../configuration/TestConfiguration.java | 253 +- .../rsp/container/AbortableRangeConsumer.java | 5 +- .../utils/rsp/container/ArrayContainer.java | 225 +- .../ArrayContainerRangeIterator.java | 11 +- .../v2/utils/rsp/container/BatchIterator.java | 3 +- .../utils/rsp/container/BitmapContainer.java | 73 +- .../BitmapContainerRangeIterator.java | 23 +- .../db/v2/utils/rsp/container/Container.java | 398 +- .../ContainerShortBatchIterator.java | 7 +- .../v2/utils/rsp/container/ContainerUtil.java | 144 +- .../utils/rsp/container/EmptyContainer.java | 3 +- .../rsp/container/ImmutableContainer.java | 2 +- .../v2/utils/rsp/container/RangeConsumer.java | 5 +- .../v2/utils/rsp/container/RangeIterator.java | 20 +- .../v2/utils/rsp/container/RunContainer.java | 260 +- .../container/RunContainerRangeIterator.java | 3 +- .../rsp/container/SearchRangeIterator.java | 60 +- .../rsp/container/ShortAdvanceIterator.java | 16 +- .../v2/utils/rsp/container/ShortConsumer.java | 7 +- .../v2/utils/rsp/container/ShortIterator.java | 4 +- .../rsp/container/ShortRangeConsumer.java | 11 +- .../rsp/container/SingleRangeContainer.java | 21 +- .../rsp/container/SingletonContainer.java | 15 +- .../rsp/container/TwoValuesContainer.java | 19 +- .../rsp/container/ContainerTestCommon.java | 220 +- .../rsp/container/TestArrayContainer.java | 35 +- .../rsp/container/TestBitmapContainer.java | 15 +- .../v2/utils/rsp/container/TestContainer.java | 33 +- .../rsp/container/TestContainerBase.java | 5 +- .../rsp/container/TestContainerUtil.java | 43 +- .../TestLargeContainersRandomOps.java | 54 +- .../rsp/container/TestRangeCardinality.java | 3 +- .../utils/rsp/container/TestRunContainer.java | 51 +- .../rsp/container/TestSingletonContainer.java | 15 +- .../TestSmallContainersRandomOps.java | 73 +- .../rsp/container/TestTwoValuesContainer.java | 9 +- .../tables/select/PythonMatchFilterTest.java | 11 +- .../db/util/TestWorkerPythonEnvironment.java | 19 +- .../db/v2/select/TestConditionFilter.java | 172 +- .../iris/db/v2/utils/SmallIndexCreation.java | 6 +- .../deephaven/benchmark/db/AjBenchmark.java | 132 +- .../deephaven/benchmark/db/ByBenchmark.java | 48 +- .../ConditionFilterMultipleColumnsBench.java | 9 +- ...onFilterMultipleColumnsFillChunkBench.java | 6 +- .../benchmark/db/IncrementalBenchmark.java | 47 +- .../benchmark/db/LastByBenchmark.java | 81 +- .../benchmark/db/MatchFilterBenchmark.java | 13 +- .../db/MultiTableKeyedOperations.java | 13 +- .../benchmark/db/NaturalJoinBenchmark.java | 51 +- .../db/NaturalJoinMultipleColumnsBench.java | 21 +- ...uralJoinMultipleColumnsFillChunkBench.java | 13 +- .../benchmark/db/PercentileByBenchmark.java | 56 +- .../benchmark/db/RangeFilterBenchmark.java | 23 +- .../db/RedirectedColumnSourceBench.java | 30 +- .../benchmark/db/RedirectionBenchBase.java | 16 +- .../benchmark/db/RedirectionIndexBench.java | 25 +- .../db/RegionedColumnSourceBenchmark.java | 48 +- .../benchmark/db/ResultSizeProfiler.java | 11 +- .../SingleTableKeyedDateTimeOperations.java | 12 +- .../db/SingleTableKeyedOperations.java | 9 +- .../db/SingleTableKeyedStringOperations.java | 12 +- .../benchmark/db/SingleTableOperations.java | 6 +- .../deephaven/benchmark/db/SortBenchmark.java | 53 +- .../benchmark/db/SortMultiBenchmark.java | 31 +- .../benchmark/db/SparseSelectBenchmark.java | 39 +- .../benchmark/db/SumByBenchmark.java | 109 +- .../db/TestSortIncrementalPerformance.java | 14 +- .../db/UngroupedColumnSourceBench.java | 25 +- .../benchmark/db/UpdateBenchmark.java | 26 +- .../benchmark/db/WhereBenchmark.java | 43 +- .../partition/PartitionKernelBenchmark.java | 36 +- .../v2/sort/timsort/SortKernelBenchmark.java | 19 +- .../v2/sources/ColumnSourceFillBenchmark.java | 16 +- .../ReplicateColumnSourceFillBenchmark.java | 3 +- .../db/v2/utils/BuildSortedRangesBench.java | 3 +- .../v2/utils/ComplementRangeIteratorTest.java | 39 +- .../deephaven/db/v2/utils/IndexCreation.java | 7 +- .../deephaven/db/v2/utils/IndexIteration.java | 62 +- .../db/v2/utils/IndexIterationRaw.java | 28 +- .../db/v2/utils/OrderedKeysBench.java | 2 +- .../db/v2/utils/ShiftAwareBench.java | 17 +- .../io/deephaven/db/v2/utils/TestValues.java | 24 +- .../io/deephaven/db/NotSortableException.java | 10 +- .../db/exceptions/ArgumentException.java | 3 +- .../db/exceptions/ExpressionException.java | 3 +- .../db/exceptions/OperationException.java | 3 +- .../db/exceptions/SizeException.java | 25 +- .../db/exceptions/StateException.java | 3 +- .../db/exceptions/TableAccessException.java | 12 +- .../db/exceptions/TableIOException.java | 9 +- .../exceptions/UncheckedTableException.java | 3 +- .../db/tablelogger/ProcessInfoLogLogger.java | 24 +- .../tablelogger/ProcessMetricsLogLogger.java | 68 +- .../QueryOperationPerformanceLogLogger.java | 68 +- .../QueryPerformanceLogLogger.java | 67 +- .../UpdatePerformanceLogLogger.java | 83 +- .../io/deephaven/db/tables/CodecLookup.java | 49 +- .../deephaven/db/tables/ColumnDefinition.java | 112 +- .../io/deephaven/db/tables/DataColumn.java | 33 +- .../java/io/deephaven/db/tables/SortPair.java | 11 +- .../db/tables/StringSetArrayWrapper.java | 2 +- .../deephaven/db/tables/StringSetWrapper.java | 3 +- .../java/io/deephaven/db/tables/Table.java | 1222 +++-- .../deephaven/db/tables/TableCreatorImpl.java | 24 +- .../deephaven/db/tables/TableDefinition.java | 189 +- .../deephaven/db/tables/dbarrays/DbArray.java | 6 +- .../db/tables/dbarrays/DbArrayBase.java | 26 +- .../db/tables/dbarrays/DbArrayDirect.java | 3 +- .../db/tables/dbarrays/DbArraySlice.java | 42 +- .../db/tables/dbarrays/DbSubArray.java | 11 +- .../db/tables/dbarrays/ReplicateDbArrays.java | 66 +- .../lang/DBLanguageFunctionGenerator.java | 712 ++- .../tables/lang/DBLanguageFunctionUtil.java | 2740 ++++------ .../db/tables/lang/DBLanguageParser.java | 685 ++- .../db/tables/libs/QueryLibrary.java | 25 +- .../db/tables/libs/QueryLibraryImports.java | 9 +- .../QueryLibraryImportsConfiguration.java | 8 +- .../libs/QueryLibraryImportsDefaults.java | 148 +- .../tables/libs/QueryLibraryImportsImpl.java | 2 +- .../deephaven/db/tables/libs/StringSet.java | 3 +- .../db/tables/libs/StringSetFromEnum.java | 11 +- .../db/tables/live/LiveTableMonitor.java | 577 +-- .../db/tables/live/LiveTableMonitorLock.java | 81 +- .../tables/live/LiveTableRefreshCombiner.java | 16 +- .../db/tables/live/NotificationQueue.java | 36 +- .../db/tables/live/NotificationWrapper.java | 7 +- .../live/NullIndexUpdateNotification.java | 4 +- .../db/tables/live/WaitNotification.java | 21 +- .../db/tables/remote/AsyncMethod.java | 5 +- .../tables/remote/preview/ArrayPreview.java | 7 +- .../remote/preview/ColumnPreviewManager.java | 44 +- .../tables/remote/preview/DisplayWrapper.java | 6 +- .../db/tables/remote/preview/PreviewType.java | 3 +- .../db/tables/select/AjMatchPairFactory.java | 113 +- .../deephaven/db/tables/select/MatchPair.java | 57 +- .../db/tables/select/MatchPairFactory.java | 17 +- .../io/deephaven/db/tables/select/Param.java | 19 +- .../db/tables/select/QueryScope.java | 57 +- .../db/tables/select/SelectColumnFactory.java | 76 +- .../tables/select/SelectFactoryConstants.java | 13 +- .../db/tables/select/SelectFilterFactory.java | 295 +- .../io/deephaven/db/tables/select/Utils.java | 23 +- .../db/tables/select/WouldMatchPair.java | 2 +- .../tables/select/WouldMatchPairFactory.java | 7 +- .../deephaven/db/tables/utils/ArrayUtils.java | 19 +- .../db/tables/utils/CacheLogUtils.java | 7 +- .../CachedStringSetWrapperEnumFormatter.java | 7 +- .../db/tables/utils/ColumnsSpecHelper.java | 4 +- .../deephaven/db/tables/utils/CsvHelpers.java | 382 +- .../db/tables/utils/CsvParserFormat.java | 20 +- .../deephaven/db/tables/utils/DBDateTime.java | 29 +- .../db/tables/utils/DBDateTimeFormatter.java | 8 +- .../db/tables/utils/DBDateTimeFormatters.java | 46 +- .../db/tables/utils/DBTimeUtils.java | 1032 ++-- .../deephaven/db/tables/utils/DBTimeZone.java | 7 +- .../db/tables/utils/DbTimeConverter.java | 7 +- .../db/tables/utils/ExpressionParser.java | 24 +- .../db/tables/utils/FigureWidgetMarker.java | 4 +- .../utils/InMemoryBlockTableWriter.java | 3 +- .../db/tables/utils/LayoutHintBuilder.java | 63 +- .../utils/LiveWidgetVisibilityProvider.java | 14 +- .../db/tables/utils/PandasWidgetMarker.java | 4 +- .../db/tables/utils/ParquetTools.java | 402 +- .../utils/QueryPerformanceLogThreshold.java | 23 +- .../tables/utils/QueryPerformanceNugget.java | 67 +- .../utils/QueryPerformanceRecorder.java | 132 +- .../deephaven/db/tables/utils/TableDiff.java | 225 +- .../db/tables/utils/TableShowTools.java | 24 +- .../deephaven/db/tables/utils/TableTools.java | 526 +- .../tables/utils/TableToolsMergeHelper.java | 69 +- .../db/tables/utils/WhereClause.java | 6 +- .../db/tables/utils/WindowCheck.java | 148 +- .../verify/AppendOnlyAssertionFailure.java | 3 +- ...yAssertionInstrumentedListenerAdapter.java | 12 +- .../tables/verify/SortedAssertionFailure.java | 11 +- ...dAssertionInstrumentedListenerAdapter.java | 52 +- .../db/tables/verify/TableAssertions.java | 108 +- .../db/util/AbstractScriptSession.java | 52 +- .../java/io/deephaven/db/util/ClassList.java | 11 +- .../db/util/ColumnFormattingValues.java | 5 +- .../io/deephaven/db/util/DBColorUtil.java | 12 +- .../io/deephaven/db/util/DBColorUtilImpl.java | 170 +- .../db/util/DynamicCompileUtils.java | 30 +- .../db/util/GroovyDeephavenSession.java | 303 +- .../db/util/GroovyExceptionWrapper.java | 33 +- .../java/io/deephaven/db/util/IsWidget.java | 12 +- .../io/deephaven/db/util/IterableUtils.java | 8 +- .../db/util/NoLanguageDeephavenSession.java | 12 +- .../io/deephaven/db/util/PrintListener.java | 16 +- .../db/util/PythonDeephavenSession.java | 42 +- .../deephaven/db/util/PythonEvaluatorJpy.java | 18 +- .../deephaven/db/util/PythonLogAdapter.java | 19 +- .../io/deephaven/db/util/PythonScope.java | 35 +- .../deephaven/db/util/PythonScopeJpyImpl.java | 23 +- .../db/util/ScalaDeephavenSession.java | 34 +- .../io/deephaven/db/util/ScriptFinder.java | 20 +- .../io/deephaven/db/util/ScriptSession.java | 41 +- .../java/io/deephaven/db/util/SortedBy.java | 85 +- .../io/deephaven/db/util/SourceClosure.java | 8 +- .../io/deephaven/db/util/TickSuppressor.java | 254 +- .../io/deephaven/db/util/ToMapListener.java | 60 +- .../db/util/WorkerPythonEnvironment.java | 45 +- .../deephaven/db/util/caching/C14nUtil.java | 69 +- .../db/util/config/InputTableRowSetter.java | 48 +- .../db/util/config/MutableInputTable.java | 16 +- .../io/deephaven/db/util/file/FileHandle.java | 71 +- .../db/util/file/FileHandleAccessor.java | 12 +- .../db/util/file/FileHandleFactory.java | 40 +- .../util/file/TrackedFileHandleFactory.java | 53 +- .../io/deephaven/db/util/jpy/JpyInit.java | 7 +- .../deephaven/db/util/liveness/Liveness.java | 43 +- .../db/util/liveness/LivenessArtifact.java | 19 +- .../util/liveness/LivenessDebugException.java | 3 +- .../db/util/liveness/LivenessManager.java | 7 +- .../db/util/liveness/LivenessNode.java | 3 +- .../db/util/liveness/LivenessReferent.java | 11 +- .../db/util/liveness/LivenessScope.java | 45 +- .../db/util/liveness/LivenessScopeStack.java | 46 +- .../liveness/PermanentLivenessManager.java | 7 +- .../ReferenceCountedLivenessNode.java | 43 +- .../liveness/ReleasableLivenessManager.java | 10 +- .../liveness/RetainedReferenceTracker.java | 150 +- .../liveness/SingletonLivenessManager.java | 18 +- .../CleanupReferenceProcessorInstance.java | 23 +- .../util/scripts/ConsoleScriptPathLoader.java | 7 +- .../util/scripts/MultiScriptPathLoader.java | 33 +- .../db/util/scripts/ScriptPathLoader.java | 19 +- .../db/util/scripts/ScriptRepository.java | 300 +- .../StateOverrideScriptPathLoader.java | 3 +- .../serialization/SerializationUtils.java | 19 +- .../StreamingExternalizable.java | 47 +- .../deephaven/db/util/string/StringUtils.java | 113 +- .../deephaven/db/util/tuples/ArrayTuple.java | 22 +- .../db/util/tuples/CanonicalizableTuple.java | 4 +- .../deephaven/db/util/tuples/EmptyTuple.java | 3 +- .../db/util/tuples/TupleCodeGenerator.java | 663 ++- .../io/deephaven/db/v2/AsOfJoinHelper.java | 1553 +++--- .../io/deephaven/db/v2/AsOfStampContext.java | 26 +- .../java/io/deephaven/db/v2/BaseTable.java | 624 ++- .../io/deephaven/db/v2/BucketingContext.java | 104 +- .../db/v2/ColumnComparatorFactory.java | 82 +- .../db/v2/ColumnRenderersBuilder.java | 18 +- .../deephaven/db/v2/ColumnSourceManager.java | 18 +- .../io/deephaven/db/v2/CrossJoinHelper.java | 974 ++-- .../db/v2/CrossJoinModifiedSlotTracker.java | 59 +- .../io/deephaven/db/v2/DeferredViewTable.java | 104 +- .../java/io/deephaven/db/v2/DynamicNode.java | 31 +- .../java/io/deephaven/db/v2/DynamicTable.java | 91 +- .../db/v2/EvenlyDividedTableMap.java | 46 +- .../io/deephaven/db/v2/FlattenOperation.java | 44 +- .../io/deephaven/db/v2/HierarchicalTable.java | 98 +- .../io/deephaven/db/v2/InMemoryTable.java | 30 +- .../io/deephaven/db/v2/IndexedDataColumn.java | 82 +- .../deephaven/db/v2/InstrumentedListener.java | 13 +- .../db/v2/InstrumentedListenerAdapter.java | 35 +- .../db/v2/InstrumentedListenerBase.java | 88 +- .../db/v2/InstrumentedShiftAwareListener.java | 6 +- ...InstrumentedShiftAwareListenerAdapter.java | 42 +- .../java/io/deephaven/db/v2/JoinControl.java | 37 +- .../deephaven/db/v2/JoinRedirectionIndex.java | 12 +- .../deephaven/db/v2/KeyedTableListener.java | 9 +- .../io/deephaven/db/v2/LazySnapshotTable.java | 4 +- .../db/v2/LazySnapshotTableProvider.java | 7 +- .../java/io/deephaven/db/v2/Listener.java | 3 +- .../java/io/deephaven/db/v2/ListenerBase.java | 2 +- .../io/deephaven/db/v2/ListenerRecorder.java | 20 +- .../io/deephaven/db/v2/LocalTableMap.java | 158 +- .../deephaven/db/v2/MemoizedOperationKey.java | 84 +- .../io/deephaven/db/v2/MergedListener.java | 77 +- .../io/deephaven/db/v2/ModifiedColumnSet.java | 115 +- .../io/deephaven/db/v2/NaturalJoinHelper.java | 616 +-- .../db/v2/NaturalJoinModifiedSlotTracker.java | 43 +- .../db/v2/NoSuchColumnException.java | 6 +- .../db/v2/NotificationStepReceiver.java | 3 +- .../db/v2/PartitionAwareSourceTable.java | 271 +- .../java/io/deephaven/db/v2/QueryTable.java | 2416 ++++----- .../io/deephaven/db/v2/RedefinableTable.java | 50 +- .../deephaven/db/v2/ReplicateHashTable.java | 265 +- .../io/deephaven/db/v2/ReverseLookup.java | 21 +- .../db/v2/ReverseLookupListener.java | 104 +- .../io/deephaven/db/v2/ReverseOperation.java | 75 +- .../db/v2/RollupAttributeCopier.java | 17 +- .../java/io/deephaven/db/v2/RollupInfo.java | 13 +- .../deephaven/db/v2/ShiftAwareListener.java | 47 +- .../db/v2/ShiftAwareSwapListener.java | 6 +- .../io/deephaven/db/v2/SimpleSourceTable.java | 40 +- .../deephaven/db/v2/SliceLikeOperation.java | 49 +- .../java/io/deephaven/db/v2/SortHelpers.java | 284 +- .../java/io/deephaven/db/v2/SortListener.java | 376 +- .../io/deephaven/db/v2/SortOperation.java | 167 +- .../db/v2/SortedColumnsAttribute.java | 11 +- .../java/io/deephaven/db/v2/SourceTable.java | 168 +- .../db/v2/SourceTableComponentFactory.java | 6 +- .../io/deephaven/db/v2/SourceTableMap.java | 97 +- .../java/io/deephaven/db/v2/SparseSelect.java | 372 +- .../io/deephaven/db/v2/StreamTableTools.java | 149 +- .../java/io/deephaven/db/v2/SwapListener.java | 11 +- .../io/deephaven/db/v2/SwapListenerBase.java | 125 +- .../db/v2/TableKeyStateRegistry.java | 10 +- .../java/io/deephaven/db/v2/TableMap.java | 55 +- .../java/io/deephaven/db/v2/TableMapImpl.java | 6 +- .../deephaven/db/v2/TableMapProxyHandler.java | 324 +- .../io/deephaven/db/v2/TableMapSupplier.java | 37 +- .../db/v2/TableMapTransformThreadPool.java | 6 +- .../io/deephaven/db/v2/TableSupplier.java | 144 +- .../deephaven/db/v2/TableUpdateValidator.java | 128 +- .../java/io/deephaven/db/v2/TimeTable.java | 22 +- .../deephaven/db/v2/TotalsTableBuilder.java | 137 +- .../db/v2/TransformTablesFunction.java | 3 +- .../db/v2/TransformableTableMap.java | 56 +- .../io/deephaven/db/v2/TreeTableFilter.java | 187 +- .../db/v2/TreeTableOrphanPromoter.java | 524 +- .../io/deephaven/db/v2/UncoalescedTable.java | 40 +- .../io/deephaven/db/v2/UpdatableTable.java | 26 +- .../deephaven/db/v2/WouldMatchOperation.java | 168 +- .../db/v2/by/AbsSumStateFactory.java | 2 +- .../by/AddOnlyFirstOrLastChunkedOperator.java | 16 +- .../by/AddOnlyMinMaxByStateFactoryImpl.java | 5 +- .../java/io/deephaven/db/v2/by/AggType.java | 4 +- .../db/v2/by/AggregationContext.java | 142 +- .../db/v2/by/AggregationContextFactory.java | 16 +- .../v2/by/AggregationContextTransformer.java | 13 +- .../db/v2/by/AggregationControl.java | 21 +- .../v2/by/AggregationFormulaStateFactory.java | 2 +- .../deephaven/db/v2/by/AggregationHelper.java | 677 ++- .../v2/by/AggregationIndexStateFactory.java | 3 +- .../db/v2/by/AggregationStateFactory.java | 10 +- .../db/v2/by/ApproximatePercentile.java | 174 +- .../java/io/deephaven/db/v2/by/AvgState.java | 6 +- .../deephaven/db/v2/by/AvgStateFactory.java | 2 +- .../deephaven/db/v2/by/AvgStateWithNan.java | 8 +- ...BaseAddOnlyFirstOrLastChunkedOperator.java | 55 +- .../BaseStreamFirstOrLastChunkedOperator.java | 100 +- .../v2/by/BigDecimalChunkedAvgOperator.java | 58 +- .../v2/by/BigDecimalChunkedReAvgOperator.java | 51 +- .../v2/by/BigDecimalChunkedReVarOperator.java | 58 +- .../v2/by/BigDecimalChunkedSumOperator.java | 90 +- .../v2/by/BigDecimalChunkedVarOperator.java | 70 +- .../db/v2/by/ByAggregationFactory.java | 37 +- .../deephaven/db/v2/by/ByChunkedOperator.java | 188 +- .../v2/by/ByExternalAggregationFactory.java | 85 +- .../db/v2/by/ByExternalChunkedOperator.java | 650 +-- .../by/ChunkedOperatorAggregationHelper.java | 1405 +++--- .../v2/by/ChunkedWeightedAverageOperator.java | 109 +- .../db/v2/by/ComboAggregateFactory.java | 865 ++-- ...mutedStreamFirstOrLastChunkedOperator.java | 57 +- .../db/v2/by/CountAggregationFactory.java | 9 +- .../db/v2/by/CountAggregationOperator.java | 30 +- .../db/v2/by/CountDistinctStateFactory.java | 2 +- .../db/v2/by/DistinctStateFactory.java | 6 +- .../db/v2/by/FirstByStateFactoryImpl.java | 14 +- .../by/FirstOrLastByAggregationFactory.java | 24 +- .../db/v2/by/FirstOrLastChunkedOperator.java | 297 +- .../db/v2/by/FormulaAggregationFactory.java | 69 +- .../db/v2/by/FormulaChunkedOperator.java | 235 +- .../db/v2/by/FpChunkedNonNormalCounter.java | 53 +- .../db/v2/by/FreezeByAggregationFactory.java | 24 +- .../db/v2/by/HashTableColumnSource.java | 239 +- ...IncrementalByAggregationUpdateTracker.java | 211 +- .../IterativeChunkedAggregationOperator.java | 198 +- .../by/IterativeChunkedOperatorFactory.java | 2 +- .../db/v2/by/IterativeIndexStateFactory.java | 15 +- .../v2/by/IterativeOperatorStateFactory.java | 38 +- .../db/v2/by/KeyOnlyAggregationFactory.java | 10 +- .../by/KeyOnlyFirstOrLastByStateFactory.java | 8 +- .../db/v2/by/LastByStateFactoryImpl.java | 14 +- .../db/v2/by/MinMaxByStateFactoryImpl.java | 12 +- .../v2/by/MinMaxIterativeOperatorFactory.java | 2 +- .../v2/by/NonKeyColumnAggregationFactory.java | 36 +- .../by/NullColumnAggregationTransformer.java | 4 +- .../v2/by/PercentileByStateFactoryImpl.java | 2 +- .../PercentileIterativeOperatorFactory.java | 5 +- .../db/v2/by/ReaggregatableStatefactory.java | 10 +- .../db/v2/by/ReplicateOperators.java | 86 +- ...pSmartKeyColumnDuplicationTransformer.java | 3 +- ...SortedFirstOrLastByAggregationFactory.java | 36 +- .../v2/by/SortedFirstOrLastByFactoryImpl.java | 9 +- .../by/SortedFirstOrLastChunkedOperator.java | 211 +- .../by/StaticFirstOrLastChunkedOperator.java | 16 +- .../java/io/deephaven/db/v2/by/StdState.java | 8 +- .../deephaven/db/v2/by/StdStateFactory.java | 2 +- .../deephaven/db/v2/by/StdStateWithNan.java | 3 +- .../db/v2/by/StreamFirstChunkedOperator.java | 91 +- .../db/v2/by/StreamLastChunkedOperator.java | 31 +- .../db/v2/by/SumBigDecimalChunk.java | 15 +- .../deephaven/db/v2/by/SumStateFactory.java | 2 +- .../db/v2/by/TDigestPercentileOperator.java | 53 +- .../by/TrackingFirstByStateFactoryImpl.java | 14 +- .../v2/by/TrackingLastByStateFactoryImpl.java | 14 +- .../db/v2/by/UniqueStateFactory.java | 15 +- .../java/io/deephaven/db/v2/by/VarState.java | 8 +- .../deephaven/db/v2/by/VarStateFactory.java | 2 +- .../deephaven/db/v2/by/VarStateWithNan.java | 3 +- .../db/v2/by/WeightedAverageOperator.java | 51 +- .../WeightedAverageSumAggregationFactory.java | 57 +- .../db/v2/by/WeightedSumStateFactoryImpl.java | 2 +- .../BucketSsmDistinctContext.java | 2 +- .../BucketSsmDistinctRollupContext.java | 2 +- .../DbDateTimeSsmSourceWrapper.java | 4 +- .../DistinctOperatorFactory.java | 197 +- .../SsmBackedColumnSource.java | 24 +- .../ssmcountdistinct/SsmDistinctContext.java | 2 +- .../SsmDistinctRollupContext.java | 5 +- .../ssmminmax/SsmChunkedMinMaxOperator.java | 125 +- .../DateTimePercentileTypeHelper.java | 35 +- .../SsmChunkedPercentileOperator.java | 174 +- .../db/v2/dbarrays/DbArrayColumnWrapper.java | 25 +- .../v2/dbarrays/DbPrevArrayColumnWrapper.java | 26 +- .../ReplicateDbArrayColumnWrappers.java | 6 +- .../deephaven/db/v2/hashing/ChunkEquals.java | 78 +- .../deephaven/db/v2/hashing/ChunkHasher.java | 6 +- .../deephaven/db/v2/hashing/HashMapBase.java | 130 +- .../deephaven/db/v2/hashing/HashMapK1V1.java | 33 +- .../deephaven/db/v2/hashing/HashMapK2V2.java | 33 +- .../deephaven/db/v2/hashing/HashMapK4V4.java | 33 +- .../db/v2/hashing/ReplicateHashing.java | 137 +- .../db/v2/iterators/ColumnIterator.java | 3 +- .../join/BucketedChunkedAjMergedListener.java | 408 +- .../db/v2/join/ChunkedAjUtilities.java | 44 +- .../io/deephaven/db/v2/join/JoinKeyState.java | 13 +- .../db/v2/join/JoinListenerRecorder.java | 16 +- .../join/ZeroKeyChunkedAjMergedListener.java | 275 +- .../v2/join/dupcompact/DupCompactKernel.java | 12 +- .../dupcompact/ReplicateDupCompactKernel.java | 69 +- .../db/v2/join/dupexpand/DupExpandKernel.java | 10 +- .../dupexpand/ReplicateDupExpandKernel.java | 11 +- .../v2/join/stamp/ReplicateStampKernel.java | 41 +- .../db/v2/join/stamp/StampKernel.java | 11 +- .../v2/locations/BasicTableDataListener.java | 7 +- .../db/v2/locations/ColumnLocation.java | 27 +- .../db/v2/locations/GroupingProvider.java | 20 +- .../locations/KeyRangeGroupingProvider.java | 7 +- .../db/v2/locations/TableDataException.java | 16 +- .../db/v2/locations/TableLocation.java | 37 +- .../db/v2/locations/TableLocationKey.java | 28 +- .../v2/locations/TableLocationProvider.java | 39 +- .../db/v2/locations/TableLocationState.java | 30 +- .../locations/TableLocationStateHolder.java | 16 +- .../UnknownPartitionKeyException.java | 5 +- .../impl/AbstractColumnLocation.java | 3 +- .../impl/AbstractTableDataService.java | 8 +- .../locations/impl/AbstractTableLocation.java | 22 +- .../impl/AbstractTableLocationProvider.java | 79 +- .../impl/CompositeTableDataService.java | 104 +- ...iteTableDataServiceConsistencyMonitor.java | 12 +- .../impl/FilteredTableDataService.java | 38 +- .../impl/FunctionConsistencyMonitor.java | 34 +- .../impl/KnownLocationKeyFinder.java | 15 +- .../impl/NonexistentTableLocation.java | 8 +- .../ParallelDeferredGroupingProvider.java | 161 +- .../impl/PartitionedTableLocationKey.java | 70 +- .../impl/PollingTableLocationProvider.java | 17 +- .../locations/impl/RandomGroupingBuilder.java | 12 +- .../impl/RecordingLocationKeyFinder.java | 6 +- .../impl/SingleTableLocationProvider.java | 9 +- .../v2/locations/impl/StandaloneTableKey.java | 3 +- .../impl/StandaloneTableLocationKey.java | 2 +- .../impl/SubscriptionAggregator.java | 63 +- .../locations/impl/TableLocationFactory.java | 10 +- .../impl/TableLocationSubscriptionBuffer.java | 25 +- ...TableLocationUpdateSubscriptionBuffer.java | 23 +- .../local/DeephavenNestedPartitionLayout.java | 64 +- .../locations/local/FileTableLocationKey.java | 44 +- .../v2/locations/local/FlatParquetLayout.java | 13 +- .../local/KeyValuePartitionLayout.java | 168 +- .../local/ParquetMetadataFileLayout.java | 130 +- .../local/SingleParquetFileLayout.java | 5 +- .../parquet/ColumnChunkPageStore.java | 50 +- .../FixedPageSizeColumnChunkPageStore.java | 15 +- .../VariablePageSizeColumnChunkPageStore.java | 21 +- .../parquet/local/ParquetColumnLocation.java | 364 +- .../parquet/local/ParquetTableLocation.java | 56 +- .../local/ParquetTableLocationFactory.java | 7 +- .../local/ParquetTableLocationKey.java | 55 +- .../TrackedSeekableChannelsProvider.java | 30 +- .../locations/parquet/topage/Dictionary.java | 3 +- .../parquet/topage/ReplicateToPage.java | 7 +- .../locations/parquet/topage/ToArrayPage.java | 20 +- .../parquet/topage/ToDBDateTimePage.java | 20 +- .../parquet/topage/ToDbArrayPage.java | 22 +- .../v2/locations/parquet/topage/ToPage.java | 16 +- .../parquet/topage/ToPageWithDictionary.java | 15 +- .../parquet/topage/ToStringPage.java | 22 +- .../parquet/topage/ToStringSetPage.java | 19 +- .../util/ExecutorTableDataRefreshService.java | 52 +- .../util/TableDataRefreshService.java | 40 +- .../db/v2/parquet/DictionaryAdapter.java | 30 +- .../deephaven/db/v2/parquet/MappedSchema.java | 12 +- .../db/v2/parquet/ParquetInstructions.java | 135 +- .../db/v2/parquet/ParquetSchemaReader.java | 238 +- .../db/v2/parquet/ParquetTableWriter.java | 313 +- .../io/deephaven/db/v2/parquet/TypeInfos.java | 114 +- .../db/v2/parquet/metadata/CodecInfo.java | 3 +- .../v2/parquet/metadata/ColumnTypeInfo.java | 4 +- .../parquet/metadata/GroupingColumnInfo.java | 4 +- .../db/v2/parquet/metadata/TableInfo.java | 18 +- .../db/v2/remote/ConstructSnapshot.java | 820 ++- .../db/v2/remote/InitialSnapshot.java | 18 +- .../db/v2/remote/InitialSnapshotTable.java | 63 +- .../db/v2/remote/WrappedDelegatingTable.java | 48 +- .../db/v2/replay/QueryReplayGroupedTable.java | 15 +- .../db/v2/replay/ReplayGroupedFullTable.java | 15 +- .../v2/replay/ReplayLastByGroupedTable.java | 14 +- .../deephaven/db/v2/replay/ReplayTable.java | 3 +- .../io/deephaven/db/v2/replay/Replayer.java | 35 +- .../db/v2/select/AbstractConditionFilter.java | 90 +- .../db/v2/select/AbstractFormulaColumn.java | 76 +- .../db/v2/select/AbstractRangeFilter.java | 22 +- .../AutoTuningIncrementalReleaseFilter.java | 148 +- .../select/BaseIncrementalReleaseFilter.java | 12 +- .../deephaven/db/v2/select/ChunkFilter.java | 63 +- .../deephaven/db/v2/select/ClockFilter.java | 36 +- .../db/v2/select/ComparableRangeFilter.java | 69 +- .../db/v2/select/ComposedFilter.java | 16 +- .../db/v2/select/ConditionFilter.java | 191 +- .../db/v2/select/ConjunctiveFilter.java | 10 +- .../db/v2/select/DateTimeRangeFilter.java | 28 +- .../db/v2/select/DbArrayChunkAdapter.java | 24 +- .../db/v2/select/DhFormulaColumn.java | 624 ++- .../db/v2/select/DisjunctiveFilter.java | 13 +- .../db/v2/select/DownsampledWhereFilter.java | 24 +- .../db/v2/select/DynamicWhereFilter.java | 82 +- .../io/deephaven/db/v2/select/Formula.java | 16 +- .../deephaven/db/v2/select/FormulaColumn.java | 8 +- .../db/v2/select/FormulaGenerator.java | 8 +- .../db/v2/select/FormulaKernelTypedBase.java | 117 +- .../v2/select/FormulaParserConfiguration.java | 17 +- .../db/v2/select/FunctionalColumn.java | 64 +- .../v2/select/IncrementalReleaseFilter.java | 6 +- .../deephaven/db/v2/select/MatchFilter.java | 79 +- .../select/MultiSourceFunctionalColumn.java | 39 +- .../db/v2/select/NullSelectColumn.java | 6 +- .../deephaven/db/v2/select/PatternFilter.java | 62 +- .../db/v2/select/RangeConditionFilter.java | 61 +- .../deephaven/db/v2/select/RegexFilter.java | 6 +- .../db/v2/select/ReindexingFilter.java | 13 +- .../db/v2/select/ReinterpretedColumn.java | 37 +- .../db/v2/select/RollingReleaseFilter.java | 8 +- .../deephaven/db/v2/select/SelectColumn.java | 25 +- .../deephaven/db/v2/select/SelectFilter.java | 70 +- .../SelectFilterLivenessArtifactImpl.java | 3 +- .../db/v2/select/SimulationClock.java | 24 +- .../SingleSidedComparableRangeFilter.java | 51 +- .../db/v2/select/SortedClockFilter.java | 35 +- .../deephaven/db/v2/select/SourceColumn.java | 6 +- .../db/v2/select/StringContainsFilter.java | 49 +- .../deephaven/db/v2/select/SwitchColumn.java | 12 +- .../db/v2/select/TimeSeriesFilter.java | 5 +- .../db/v2/select/UnsortedClockFilter.java | 40 +- .../db/v2/select/analyzers/BaseLayer.java | 12 +- .../select/analyzers/DependencyLayerBase.java | 21 +- .../select/analyzers/PreserveColumnLayer.java | 15 +- .../v2/select/analyzers/RedirectionLayer.java | 40 +- .../analyzers/SelectAndViewAnalyzer.java | 87 +- .../select/analyzers/SelectColumnLayer.java | 68 +- .../analyzers/SelectOrViewColumnLayer.java | 4 +- .../v2/select/analyzers/ViewColumnLayer.java | 11 +- .../chunkfilters/ChunkMatchFilterFactory.java | 4 +- .../chunkfilters/ReplicateChunkFilters.java | 21 +- .../StringChunkMatchFilterFactory.java | 73 +- .../db/v2/select/codegen/FormulaAnalyzer.java | 66 +- .../v2/select/codegen/JavaKernelBuilder.java | 221 +- .../db/v2/select/codegen/RichType.java | 3 +- .../db/v2/select/formula/FormulaFactory.java | 5 +- .../db/v2/select/formula/FormulaKernel.java | 5 +- .../select/formula/FormulaKernelAdapter.java | 85 +- .../formula/FormulaSourceDescriptor.java | 3 +- .../db/v2/select/python/ArgumentsChunked.java | 3 +- .../v2/select/python/ArgumentsSingular.java | 15 +- .../python/DeephavenCompatibleFunction.java | 41 +- .../FilterKernelPythonChunkedFunction.java | 15 +- .../FilterKernelPythonSingularFunction.java | 10 +- .../v2/select/python/FormulaColumnPython.java | 13 +- .../FormulaKernelPythonChunkedFunction.java | 93 +- .../FormulaKernelPythonSingularFunction.java | 89 +- .../ReplicateSetInclusionKernel.java | 3 +- .../setinclusion/SetInclusionKernel.java | 3 +- .../snapshot/SnapshotIncrementalListener.java | 27 +- .../db/v2/snapshot/SnapshotUtils.java | 42 +- .../db/v2/sort/ReplicateSortKernel.java | 249 +- .../db/v2/sort/findruns/FindRunsKernel.java | 23 +- .../v2/sort/partition/PartitionUtilities.java | 7 +- .../db/v2/sort/permute/PermuteKernel.java | 44 +- .../db/v2/sort/timsort/TimsortUtilities.java | 11 +- .../db/v2/sortcheck/ReplicateSortCheck.java | 18 +- .../db/v2/sources/AbstractColumnSource.java | 193 +- .../AbstractDeferredGroupingColumnSource.java | 2 +- .../v2/sources/ArrayBackedColumnSource.java | 218 +- .../db/v2/sources/ArraySourceHelper.java | 82 +- .../db/v2/sources/BitMaskingColumnSource.java | 125 +- .../v2/sources/BitShiftingColumnSource.java | 139 +- .../db/v2/sources/BoxedColumnSource.java | 43 +- .../deephaven/db/v2/sources/ColumnSource.java | 66 +- .../v2/sources/ColumnSourceGetDefaults.java | 4 +- .../sources/CrossJoinRightColumnSource.java | 166 +- .../db/v2/sources/DateTimeArraySource.java | 78 +- .../v2/sources/DateTimeSparseArraySource.java | 45 +- .../db/v2/sources/FillFromUnordered.java | 13 +- .../db/v2/sources/FillUnordered.java | 26 +- .../db/v2/sources/ImmutableColumnSource.java | 5 +- .../ImmutableColumnSourceGetDefaults.java | 25 +- .../deephaven/db/v2/sources/LogicalClock.java | 32 +- .../db/v2/sources/MutableColumnSource.java | 4 +- .../MutableColumnSourceGetDefaults.java | 20 +- .../db/v2/sources/NullValueColumnSource.java | 48 +- .../db/v2/sources/PrevColumnSource.java | 20 +- .../ReadOnlyRedirectedColumnSource.java | 170 +- .../db/v2/sources/RedirectedColumnSource.java | 38 +- .../db/v2/sources/ReinterpretUtilities.java | 19 +- .../deephaven/db/v2/sources/Releasable.java | 4 +- .../v2/sources/ReplicateSourcesAndChunks.java | 1202 +++-- .../v2/sources/ReverseLookupColumnSource.java | 18 +- .../db/v2/sources/ReversedColumnSource.java | 17 +- .../deephaven/db/v2/sources/RowIdSource.java | 3 +- .../v2/sources/SingleValueColumnSource.java | 3 +- .../deephaven/db/v2/sources/SinkFiller.java | 6 +- .../db/v2/sources/SizedColumnSource.java | 3 +- .../v2/sources/SparseArrayColumnSource.java | 126 +- .../db/v2/sources/StringSetImpl.java | 56 +- .../db/v2/sources/SwitchColumnSource.java | 47 +- .../sources/UnboxedDateTimeColumnSource.java | 11 +- .../UnboxedDateTimeWritableSource.java | 3 +- .../sources/UngroupedArrayColumnSource.java | 2 +- .../db/v2/sources/UngroupedColumnSource.java | 44 +- .../sources/UngroupedDbArrayColumnSource.java | 7 +- .../db/v2/sources/UnionColumnSource.java | 93 +- .../db/v2/sources/UnionRedirection.java | 31 +- .../db/v2/sources/UnionSourceManager.java | 140 +- .../db/v2/sources/ViewColumnSource.java | 34 +- .../db/v2/sources/WritableChunkSink.java | 13 +- .../db/v2/sources/WritableSource.java | 23 +- .../aggregate/AggregateColumnSource.java | 17 +- .../aggregate/BaseAggregateColumnSource.java | 53 +- .../UngroupedAggregateColumnSource.java | 91 +- .../db/v2/sources/chunk/Attributes.java | 4 +- .../deephaven/db/v2/sources/chunk/Chunk.java | 45 +- .../db/v2/sources/chunk/ChunkBase.java | 23 +- .../db/v2/sources/chunk/ChunkChunkBase.java | 8 +- .../db/v2/sources/chunk/ChunkSource.java | 92 +- .../db/v2/sources/chunk/ChunkStream.java | 6 +- .../db/v2/sources/chunk/ChunkType.java | 28 +- .../db/v2/sources/chunk/Context.java | 7 +- .../db/v2/sources/chunk/ContextWithChunk.java | 71 +- .../v2/sources/chunk/DefaultChunkSource.java | 48 +- .../v2/sources/chunk/DefaultGetContext.java | 13 +- .../db/v2/sources/chunk/FillContextMaker.java | 14 +- .../db/v2/sources/chunk/GetContextMaker.java | 14 +- .../v2/sources/chunk/OrderedChunkUtils.java | 7 +- .../db/v2/sources/chunk/ResettableChunk.java | 10 +- .../sources/chunk/ResettableChunkChunk.java | 7 +- .../v2/sources/chunk/ResettableContext.java | 4 +- .../chunk/ResettableReadOnlyChunk.java | 16 +- .../chunk/ResettableWritableChunk.java | 12 +- .../chunk/ResettableWritableChunkChunk.java | 7 +- .../db/v2/sources/chunk/SharedContext.java | 47 +- .../db/v2/sources/chunk/WritableChunk.java | 44 +- .../db/v2/sources/chunk/page/ChunkPage.java | 15 +- .../db/v2/sources/chunk/page/Page.java | 95 +- .../db/v2/sources/chunk/page/PageStore.java | 37 +- .../sources/chunk/page/PagingChunkSource.java | 71 +- .../db/v2/sources/chunk/sized/SizedChunk.java | 4 +- .../v2/sources/chunk/util/SimpleTypeMap.java | 5 +- .../chunk/util/chunkfillers/ChunkFiller.java | 14 +- .../chunk/util/factories/ChunkFactory.java | 13 +- .../sources/chunk/util/pools/ChunkPool.java | 14 +- .../chunk/util/pools/ChunkPoolConstants.java | 6 +- .../util/pools/ChunkPoolReleaseTracking.java | 9 +- .../chunk/util/pools/MultiChunkPool.java | 7 +- .../chunk/util/pools/PoolableChunk.java | 4 +- .../chunkcolumnsource/ChunkColumnSource.java | 32 +- .../v2/sources/deltaaware/ChunkAdapter.java | 11 +- .../db/v2/sources/deltaaware/ChunkMerger.java | 15 +- .../deltaaware/DeltaAwareColumnSource.java | 192 +- .../db/v2/sources/deltaaware/SoleKey.java | 3 +- .../ImmutableDateTimeArraySource.java | 2 +- .../db/v2/sources/regioned/ColumnRegion.java | 6 +- .../regioned/ColumnRegionChunkDictionary.java | 50 +- .../regioned/ColumnRegionReferencing.java | 12 +- .../regioned/ColumnRegionReferencingImpl.java | 12 +- .../regioned/DeferredColumnRegion.java | 13 +- .../regioned/DeferredColumnRegionBase.java | 28 +- .../DeferredColumnRegionReferencing.java | 10 +- .../db/v2/sources/regioned/MakeRegion.java | 11 +- .../regioned/ParquetColumnRegionBase.java | 36 +- .../regioned/PartitioningSourceFactory.java | 2 +- .../sources/regioned/RegionContextHolder.java | 3 +- .../regioned/RegionedColumnSource.java | 40 +- .../regioned/RegionedColumnSourceArray.java | 60 +- .../regioned/RegionedColumnSourceBase.java | 32 +- .../RegionedColumnSourceDBDateTime.java | 16 +- .../regioned/RegionedColumnSourceInner.java | 19 +- .../regioned/RegionedColumnSourceManager.java | 199 +- .../RegionedColumnSourceReferencing.java | 60 +- .../RegionedColumnSourceWithDictionary.java | 167 +- .../sources/regioned/RegionedPageStore.java | 37 +- .../RegionedTableComponentFactory.java | 4 +- .../RegionedTableComponentFactoryImpl.java | 30 +- .../ReplicateRegionsAndRegionedSources.java | 4 +- .../sources/regioned/SymbolTableSource.java | 43 +- .../decoder/EncodedStringDecoder.java | 6 +- .../regioned/decoder/SimpleStringDecoder.java | 14 +- .../io/deephaven/db/v2/ssa/ChunkSsaStamp.java | 26 +- .../v2/ssa/ReplicateSegmentedSortedArray.java | 101 +- .../db/v2/ssa/SegmentedSortedArray.java | 46 +- .../io/deephaven/db/v2/ssa/SsaChecker.java | 16 +- .../io/deephaven/db/v2/ssa/SsaSsaStamp.java | 22 +- .../ReplicateSegmentedSortedMultiset.java | 504 +- .../db/v2/ssms/SegmentedSortedMultiSet.java | 27 +- .../db/v2/tuples/AbstractTupleSource.java | 16 +- .../db/v2/tuples/EmptyTupleSource.java | 22 +- .../db/v2/tuples/MultiColumnTupleSource.java | 63 +- .../db/v2/tuples/SingleColumnTupleSource.java | 18 +- .../db/v2/tuples/SmartKeySource.java | 65 +- .../tuples/ThreeColumnTupleSourceFactory.java | 4 +- .../deephaven/db/v2/tuples/TupleExporter.java | 19 +- .../deephaven/db/v2/tuples/TupleSource.java | 10 +- .../v2/tuples/TupleSourceCodeGenerator.java | 607 +-- .../db/v2/tuples/TupleSourceFactory.java | 47 +- .../tuples/TwoColumnTupleSourceFactory.java | 2 +- .../AbstractIndexUpdateNotification.java | 2 +- .../AbstractTreeIndexImplRandomBuilder.java | 3 +- .../AppendOnlyArrayBackedMutableTable.java | 42 +- .../db/v2/utils/AsyncClientErrorNotifier.java | 4 +- .../db/v2/utils/AsyncErrorLogger.java | 29 +- .../deephaven/db/v2/utils/BarrageMessage.java | 6 +- .../v2/utils/BaseArrayBackedMutableTable.java | 115 +- .../io/deephaven/db/v2/utils/ChunkUtils.java | 174 +- .../deephaven/db/v2/utils/ColumnHolder.java | 64 +- .../db/v2/utils/ColumnsToRowsTransform.java | 172 +- .../utils/ContiguousRedirectionIndexImpl.java | 28 +- .../deephaven/db/v2/utils/CumulativeUtil.java | 93 +- .../db/v2/utils/CurrentOnlyIndex.java | 20 +- .../db/v2/utils/DynamicTableWriter.java | 75 +- .../db/v2/utils/ExternalizableIndexUtils.java | 34 +- .../db/v2/utils/FilteredTableMap.java | 10 +- .../io/deephaven/db/v2/utils/FreezeBy.java | 35 +- .../utils/FunctionGeneratedTableFactory.java | 69 +- .../db/v2/utils/GroupedRedirectionIndex.java | 51 +- .../v2/utils/HashSetBackedTableFactory.java | 31 +- .../v2/utils/ImplementedByTreeIndexImpl.java | 4 +- .../java/io/deephaven/db/v2/utils/Index.java | 133 +- .../io/deephaven/db/v2/utils/IndexCounts.java | 98 +- .../db/v2/utils/IndexPeformanceTest.java | 14 +- .../deephaven/db/v2/utils/IndexShiftData.java | 342 +- .../db/v2/utils/IndexShiftDataExpander.java | 102 +- .../deephaven/db/v2/utils/IndexUtilities.java | 42 +- .../utils/KeyedArrayBackedMutableTable.java | 106 +- .../db/v2/utils/MemoryTableLogger.java | 27 +- .../db/v2/utils/MemoryTableLoggers.java | 27 +- .../db/v2/utils/MergeSortedHelper.java | 36 +- .../deephaven/db/v2/utils/MixedBuilder.java | 6 +- .../io/deephaven/db/v2/utils/OrderedKeys.java | 131 +- .../db/v2/utils/OrderedKeysAsChunkImpl.java | 2 +- .../utils/OrderedKeysKeyIndicesChunkImpl.java | 28 +- .../utils/OrderedKeysKeyRangesChunkImpl.java | 124 +- .../db/v2/utils/OutOfKeySpaceException.java | 6 +- .../db/v2/utils/PerformanceQueries.java | 194 +- .../v2/utils/RangePriorityQueueBuilder.java | 39 +- .../deephaven/db/v2/utils/ReadOnlyIndex.java | 151 +- .../db/v2/utils/RedirectionIndex.java | 57 +- .../utils/RedirectionIndexLockFreeImpl.java | 218 +- .../v2/utils/RedirectionIndexUtilities.java | 5 +- .../deephaven/db/v2/utils/RefCountedCow.java | 56 +- .../v2/utils/RspBitmapSequentialBuilder.java | 25 +- .../deephaven/db/v2/utils/RuntimeMemory.java | 26 +- .../io/deephaven/db/v2/utils/ShiftData.java | 18 +- .../db/v2/utils/ShiftedOrderedKeys.java | 15 +- .../io/deephaven/db/v2/utils/SortedIndex.java | 295 +- ...StaticSingleValueRedirectionIndexImpl.java | 12 +- .../db/v2/utils/SyncTableFilter.java | 209 +- .../deephaven/db/v2/utils/TableBuilder.java | 17 +- .../deephaven/db/v2/utils/TableLoggers.java | 7 +- .../db/v2/utils/TailInitializationFilter.java | 90 +- .../db/v2/utils/TerminalNotification.java | 3 +- ...ickingSingleValueRedirectionIndexImpl.java | 16 +- .../io/deephaven/db/v2/utils/TreeIndex.java | 50 +- .../deephaven/db/v2/utils/TreeIndexImpl.java | 45 +- .../utils/TreeIndexImplSequentialBuilder.java | 3 +- .../db/v2/utils/UpdatePerformanceTracker.java | 167 +- .../db/v2/utils/UpdateSizeCalculator.java | 17 +- .../WrappedIndexRedirectionIndexImpl.java | 89 +- .../db/v2/utils/codegen/CodeGenerator.java | 35 +- .../db/v2/utils/codegen/TypeAnalyzer.java | 44 +- .../db/v2/utils/compact/CompactKernel.java | 49 +- .../db/v2/utils/copy/CopyKernel.java | 16 +- .../utils/freezeby/FreezeByCountOperator.java | 29 +- .../v2/utils/freezeby/FreezeByOperator.java | 56 +- .../v2/utils/freezeby/ReplicateFreezeBy.java | 30 +- .../db/v2/utils/metrics/MetricsManager.java | 213 +- .../db/v2/utils/rsp/DisposableRspBitmap.java | 3 +- .../v2/utils/rsp/IndexRangeIteratorView.java | 3 +- .../deephaven/db/v2/utils/rsp/RspArray.java | 773 ++- .../deephaven/db/v2/utils/rsp/RspBitmap.java | 201 +- .../db/v2/utils/rsp/RspIterator.java | 23 +- .../db/v2/utils/rsp/RspOrderedKeys.java | 138 +- .../v2/utils/rsp/RspRangeBatchIterator.java | 42 +- .../db/v2/utils/rsp/RspRangeIterator.java | 76 +- .../db/v2/utils/rsp/RspReverseIterator.java | 15 +- .../db/v2/utils/singlerange/SingleRange.java | 65 +- .../utils/singlerange/SingleRangeMixin.java | 14 +- .../v2/utils/sortedranges/SortedRanges.java | 491 +- .../sortedranges/SortedRangesOrderedKeys.java | 54 +- .../sortedranges/SortedRangesPacked.java | 24 +- .../utils/sortedranges/SortedRangesTyped.java | 17 +- .../db/v2/utils/unboxer/ChunkUnboxer.java | 4 +- .../utils/unboxer/ReplicateUnboxerKernel.java | 3 +- .../java/io/deephaven/gui/color/Color.java | 109 +- .../gui/color/ColorPaletteAlgorithms.java | 29 +- .../java/io/deephaven/gui/color/Colors.java | 934 ++-- .../deephaven/gui/table/QuickFilterMode.java | 4 +- .../gui/table/filters/Condition.java | 24 +- .../libs/primitives/Comparators.java | 21 +- .../libs/primitives/ComparePrimitives.java | 81 +- .../deephaven/libs/primitives/Replicate.java | 17 +- .../deephaven/process/ProcessInfoConfig.java | 34 +- .../process/ProcessInfoStoreDBImpl.java | 3 +- .../python/PyModuleFromResource.java | 12 +- .../github/javaparser/ExpressionParser.java | 19 +- .../io/deephaven/stream/StreamConsumer.java | 15 +- .../stream/StreamFailureConsumer.java | 3 +- .../io/deephaven/stream/StreamPublisher.java | 14 +- .../stream/StreamToTableAdapter.java | 71 +- .../calendar/AbstractBusinessCalendar.java | 34 +- .../util/calendar/AbstractCalendar.java | 11 +- .../util/calendar/BusinessCalendar.java | 169 +- .../util/calendar/BusinessPeriod.java | 9 +- .../util/calendar/BusinessSchedule.java | 11 +- .../io/deephaven/util/calendar/Calendar.java | 29 +- .../io/deephaven/util/calendar/Calendars.java | 30 +- .../util/calendar/DateStringUtils.java | 25 +- .../calendar/DefaultBusinessCalendar.java | 133 +- .../DefaultNoHolidayBusinessCalendar.java | 4 +- .../util/calendar/StaticCalendarMethods.java | 9 +- .../java/io/deephaven/utils/ArrayParser.java | 34 +- .../db/tables/QueryTableHugeSortTest.java | 25 +- .../db/tables/TestAppendableColumn.java | 8 +- .../db/tables/TestColumnDefinition.java | 9 +- .../deephaven/db/tables/TestMoveColumns.java | 9 +- .../db/tables/dbarrays/DbArrayTest.java | 33 +- .../db/tables/dbarrays/ReplicateTst.java | 3 +- .../lang/DBLanguageParserDummyClass.java | 5 +- .../lang/TestDBLanguageFunctionUtil.java | 4442 +++++++---------- .../db/tables/lang/TestDBLanguageParser.java | 879 ++-- .../db/tables/libs/QueryLibraryTest.java | 14 +- .../db/tables/live/TestConstructSnapshot.java | 19 +- .../tables/live/TestLiveTableMonitorLock.java | 14 +- .../select/SelectFilterFactoryTest.java | 192 +- .../db/tables/select/SelectFilterTest.java | 21 +- .../db/tables/utils/TestDBDateTime.java | 48 +- .../tables/utils/TestDBDateTimeFormatter.java | 21 +- .../db/tables/utils/TestDBTimeUtils.java | 428 +- .../db/tables/utils/TestParquetTools.java | 126 +- .../db/tables/utils/TestTableTools.java | 499 +- .../db/tables/utils/TestTypeUtils.java | 126 +- .../db/tables/utils/TestWindowCheck.java | 41 +- .../db/tables/verify/TestTableAssertions.java | 55 +- .../java/io/deephaven/db/util/JpyPlaypen.java | 30 +- .../db/util/ScriptEnginePlaypen.java | 5 +- .../io/deephaven/db/util/TestComparisons.java | 48 +- .../db/util/TestCompileSimpleFunction.java | 6 +- .../io/deephaven/db/util/TestDBColorUtil.java | 126 +- .../deephaven/db/util/TestToMapListener.java | 8 +- .../db/util/file/TestFileHandle.java | 18 +- .../file/TestTrackedFileHandleFactory.java | 8 +- .../db/util/jpy/JpyConfigFlagTest.java | 6 +- .../db/util/jpy/JpyConfigLoaderTest.java | 120 +- .../db/util/liveness/TestLiveness.java | 6 +- .../db/util/scripts/TestScriptRepository.java | 44 +- .../serialization/TestSerializationUtils.java | 19 +- .../io/deephaven/db/v2/CountingTable.java | 26 +- .../io/deephaven/db/v2/ErrorListener.java | 3 +- .../java/io/deephaven/db/v2/EvalNugget.java | 22 +- .../io/deephaven/db/v2/FailureListener.java | 2 +- .../deephaven/db/v2/FuzzerPrintListener.java | 2 +- .../java/io/deephaven/db/v2/FuzzerTest.java | 61 +- .../deephaven/db/v2/GenerateTableUpdates.java | 89 +- .../io/deephaven/db/v2/GroupingValidator.java | 77 +- .../io/deephaven/db/v2/IndexGroupingTest.java | 239 +- .../db/v2/JUnit4QueryTableTestBase.java | 10 +- .../io/deephaven/db/v2/LiveTableTestCase.java | 39 +- .../deephaven/db/v2/MultiColumnSortTest.java | 85 +- .../java/io/deephaven/db/v2/QueryFactory.java | 412 +- .../db/v2/QueryTableAggregationTest.java | 1647 +++--- ...leAggregationTestFormulaStaticMethods.java | 118 +- .../io/deephaven/db/v2/QueryTableAjTest.java | 943 ++-- ...QueryTableCrossJoinSmallRightBitsTest.java | 34 +- .../db/v2/QueryTableCrossJoinTestBase.java | 174 +- .../db/v2/QueryTableFlattenTest.java | 89 +- .../deephaven/db/v2/QueryTableJoinTest.java | 464 +- .../db/v2/QueryTableNaturalJoinTest.java | 530 +- .../db/v2/QueryTableSelectUpdateTest.java | 411 +- .../deephaven/db/v2/QueryTableSliceTest.java | 88 +- .../deephaven/db/v2/QueryTableSortTest.java | 446 +- ...QueryTableStaticNaturalJoinRandomTest.java | 123 +- .../io/deephaven/db/v2/QueryTableTest.java | 1762 +++---- .../deephaven/db/v2/QueryTableTestBase.java | 82 +- .../deephaven/db/v2/QueryTableTreeTest.java | 1002 ++-- .../deephaven/db/v2/QueryTableWhereTest.java | 470 +- .../db/v2/QueryTableWouldMatchTest.java | 147 +- .../db/v2/SelectOverheadLimiter.java | 46 +- .../db/v2/SimpleShiftAwareListener.java | 21 +- .../db/v2/StreamTableAggregationTest.java | 131 +- .../db/v2/StreamTableOperationsTest.java | 54 +- .../java/io/deephaven/db/v2/TableMapTest.java | 264 +- .../deephaven/db/v2/TestAggregatedSelect.java | 35 +- .../io/deephaven/db/v2/TestByExternal.java | 140 +- .../io/deephaven/db/v2/TestCodecColumns.java | 54 +- .../v2/TestColumnDescriptionInheritance.java | 140 +- .../db/v2/TestColumnRenderersBuilder.java | 53 +- .../java/io/deephaven/db/v2/TestComboBy.java | 640 ++- .../db/v2/TestConcurrentInstantiation.java | 516 +- .../db/v2/TestDownsampledWhereFilter.java | 20 +- .../db/v2/TestEvenlyDividedTableMap.java | 8 +- .../db/v2/TestKeyedTableListener.java | 111 +- .../deephaven/db/v2/TestListenerFailure.java | 26 +- .../deephaven/db/v2/TestMapCodecColumns.java | 74 +- .../db/v2/TestPartitionAwareSourceTable.java | 173 +- .../db/v2/TestPartitioningColumns.java | 71 +- .../db/v2/TestReverseLookupListener.java | 35 +- .../db/v2/TestSelectOverheadLimiter.java | 87 +- .../db/v2/TestSelectPreserveGrouping.java | 13 +- .../db/v2/TestSimpleSourceTable.java | 102 +- .../java/io/deephaven/db/v2/TestSort.java | 228 +- .../deephaven/db/v2/TestStreamTableTools.java | 23 +- .../db/v2/TestSymbolTableCombiner.java | 67 +- .../deephaven/db/v2/TestTableValidator.java | 8 +- .../io/deephaven/db/v2/TestTotalsTable.java | 122 +- .../io/deephaven/db/v2/TestUngroupRebase.java | 19 +- .../deephaven/db/v2/TickSuppressorTest.java | 120 +- .../java/io/deephaven/db/v2/TstUtils.java | 338 +- .../db/v2/UpdateValidatorNugget.java | 26 +- .../v2/by/TestSortedFirstOrLastByFactory.java | 101 +- .../io/deephaven/db/v2/hashing/TestKnVn.java | 33 +- .../impl/SimpleTableLocationKey.java | 7 +- .../impl/TestFunctionConsistencyMonitor.java | 75 +- .../locations/impl/TestGroupingProviders.java | 146 +- .../impl/TestKeyValuePartitionLayout.java | 122 +- .../v2/parquet/ParquetTableReadWriteTest.java | 95 +- .../db/v2/select/FormulaTestUtil.java | 49 +- .../db/v2/select/SelectColumnTest.java | 3 +- .../db/v2/select/TestClockFilters.java | 42 +- .../select/TestConditionFilterGeneration.java | 12 +- .../db/v2/select/TestFormulaColumn.java | 186 +- .../select/TestFormulaColumnGeneration.java | 12 +- .../db/v2/select/TestSelectFilterFactory.java | 24 +- .../v2/sort/megamerge/MegaMergeTestUtils.java | 85 +- .../db/v2/sort/timsort/MergeSort.java | 6 +- .../timsort/ReplicateSortKernelTests.java | 75 +- .../timsort/TestFpTimsortComparisons.java | 6 +- .../db/v2/sort/timsort/TestTimSortKernel.java | 41 +- .../sources/ArrayBackedColumnSourceTest.java | 17 +- .../db/v2/sources/ArrayGenerator.java | 71 +- .../db/v2/sources/DateTimeTreeMapSource.java | 12 +- .../db/v2/sources/ImmutableTreeMapSource.java | 5 +- .../sources/ReplicateSourcesAndChunkTest.java | 133 +- .../db/v2/sources/TreeMapSource.java | 15 +- .../sources/UnboxedDateTimeTreeMapSource.java | 8 +- .../db/v2/sources/chunk/RandomResetter.java | 3 +- .../v2/sources/chunk/TestSharedContext.java | 9 +- .../db/v2/sources/chunk/TestSourceSink.java | 24 +- .../chunk/util/pools/TestChunkPooling.java | 3 +- .../TestChunkColumnSource.java | 135 +- ...eplicateRegionsAndRegionedSourcesTest.java | 10 +- .../TestChunkedRegionedOperations.java | 288 +- .../sources/regioned/TestDbArrayUngroup.java | 12 +- .../TestRegionedColumnSourceDBDateTime.java | 116 +- .../TestRegionedColumnSourceManager.java | 145 +- .../regioned/TstColumnRegionPrimative.java | 5 +- .../TstRegionedColumnSourcePrimitive.java | 31 +- .../TstRegionedColumnSourceReferencing.java | 3 +- .../ReplicateSegmentedSortedArrayTests.java | 7 +- .../deephaven/db/v2/ssa/SsaTestHelpers.java | 11 +- ...ReplicateSegmentedSortedMultisetTests.java | 18 +- .../deephaven/db/v2/utils/ChunkUtilsTest.java | 70 +- .../db/v2/utils/CumulativeUtilTest.java | 10 +- .../db/v2/utils/ImmutableColumnHolder.java | 2 +- .../db/v2/utils/IndexCoalescerTest.java | 76 +- .../v2/utils/IndexCreationRandomPerfTest.java | 26 +- .../db/v2/utils/IndexCreationSeqPerfTest.java | 17 +- .../utils/IndexSequentialBuilderPerfTest.java | 10 +- .../v2/utils/IndexShiftDataExpanderTest.java | 36 +- .../db/v2/utils/IndexShiftDataTest.java | 24 +- .../db/v2/utils/IterPerformanceTest.java | 48 +- .../db/v2/utils/ModelFileGenerator.java | 10 +- .../OrderedKeysKeyRangesChunkImplTest.java | 5 +- .../db/v2/utils/OrderedKeysTestBase.java | 219 +- .../io/deephaven/db/v2/utils/PerfMeasure.java | 3 +- .../io/deephaven/db/v2/utils/PerfStats.java | 9 +- .../utils/RedirectionIndexLockFreeTest.java | 26 +- .../db/v2/utils/RedirectionIndexTest.java | 9 +- .../deephaven/db/v2/utils/ShiftDataTest.java | 12 +- .../db/v2/utils/ShiftedOrderedKeysTest.java | 6 +- .../db/v2/utils/SingleRangeTest.java | 45 +- .../db/v2/utils/SortedIndexTestBase.java | 107 +- .../v2/utils/TestColumnsToRowsTransform.java | 222 +- .../db/v2/utils/TestDynamicTableWriter.java | 129 +- .../deephaven/db/v2/utils/TestFreezeBy.java | 57 +- .../utils/TestFunctionBackedTableFactory.java | 20 +- .../utils/TestHashSetBackedTableFactory.java | 5 +- .../utils/TestIncrementalReleaseFilter.java | 26 +- .../TestKeyedArrayBackedMutableTable.java | 96 +- .../TestReadOnlyRedirectedColumnSource.java | 59 +- .../db/v2/utils/TestSyncTableFilter.java | 310 +- .../utils/TestTailInitializationFilter.java | 17 +- .../db/v2/utils/TestTimeSeriesFilter.java | 22 +- .../deephaven/db/v2/utils/TreeIndexTest.java | 269 +- .../db/v2/utils/UpdatePerformanceTest.java | 51 +- .../deephaven/db/v2/utils/ValidationSet.java | 5 +- .../utils/copy/ReplicateCopyKernelTest.java | 6 +- .../db/v2/utils/rsp/RspBitmapTest.java | 317 +- .../db/v2/utils/rsp/RspOrderedKeysTest.java | 28 +- .../SortedRangesOrderedKeysTest.java | 16 +- .../utils/sortedranges/SortedRangesTest.java | 257 +- .../libs/primitives/ReplicateTst.java | 10 +- .../libs/primitives/TestAmbiguity.java | 178 +- .../libs/primitives/TestCasting.java | 51 +- .../primitives/TestPrimitiveParseUtil.java | 19 +- .../qst/type/DbPrimitiveArrayTest.java | 19 +- .../stream/TestStreamToTableAdapter.java | 51 +- .../test/junit4/JUnit4BaseArrayTestCase.java | 10 +- .../test/junit4/JUnit4LiveTableTestCase.java | 10 +- .../calendar/StaticCalendarMethodsTest.java | 598 +-- .../util/calendar/TestBusinessSchedule.java | 83 +- .../calendar/TestDefaultBusinessCalendar.java | 320 +- .../TestDefaultNoHolidayBusinessCalendar.java | 135 +- .../io/deephaven/process/BaseboardOshi.java | 24 +- .../deephaven/process/ComputerSystemOshi.java | 49 +- .../io/deephaven/process/FirmwareOshi.java | 27 +- .../deephaven/process/MemoryMxBeanInfo.java | 6 +- .../io/deephaven/process/NetworkOshi.java | 17 +- .../process/OperatingSystemOshi.java | 12 +- .../process/OperatingSystemVersionOshi.java | 8 +- .../io/deephaven/process/ProcessStyle.java | 6 +- .../deephaven/process/RuntimeMxBeanInfo.java | 12 +- .../io/deephaven/process/SystemCpuOshi.java | 55 +- .../io/deephaven/process/SystemInfoOshi.java | 10 +- .../deephaven/process/SystemMemoryOshi.java | 12 +- .../deephaven/process/ThreadMxBeanInfo.java | 21 +- .../java/io/deephaven/process/Wrapped.java | 14 +- .../io/deephaven/process/_HostPathInfo.java | 7 +- .../io/deephaven/properties/PropertySet.java | 15 +- .../deephaven/properties/PropertyVisitor.java | 41 +- .../properties/PropertyVisitorError.java | 21 +- .../properties/PropertyVisitorPrefixed.java | 2 +- .../properties/PropertyVisitorStringBase.java | 6 +- .../PropertyVisitorStripPrefix.java | 13 +- .../io/deephaven/properties/SplayedPath.java | 56 +- .../datastructures/util/CollectionUtil.java | 36 +- .../datastructures/util/SmartKey.java | 3 +- .../java/io/deephaven/dbtypes/DbImage.java | 34 +- .../deephaven/dbtypes/FactoryInstances.java | 6 +- .../io/deephaven/dbtypes/DbImageTest.java | 2 +- .../io/deephaven/dbtypes/DbImageImpl.java | 19 +- .../io/deephaven/dbtypes/DbFileImplTest.java | 5 +- .../io/deephaven/dbtypes/DbImageImplTest.java | 5 +- .../main/java/io/deephaven/util/DateUtil.java | 263 +- .../java/io/deephaven/util/HungReportJob.java | 10 +- .../main/java/io/deephaven/util/Mailer.java | 8 +- .../main/java/io/deephaven/util/OSUtil.java | 10 +- .../java/io/deephaven/util/PidFileUtil.java | 15 +- .../io/deephaven/util/PropertyRetriever.java | 85 +- .../java/io/deephaven/util/SMTPMailer.java | 66 +- .../deephaven/util/ThreadSafeDateFormat.java | 5 +- .../main/java/io/deephaven/util/Validate.java | 11 +- .../io/deephaven/util/clock/MicroTimer.java | 33 +- .../deephaven/util/loggers/AsyncAppender.java | 18 +- .../loggers/DailyRollingFileAppender.java | 7 +- .../loggers/Log4JTimedBufferedWriter.java | 7 +- .../io/deephaven/util/loggers/LoggerUtil.java | 357 +- .../util/loggers/ProcessNameFileAppender.java | 32 +- .../util/loggers/SimpleMailAppender.java | 9 +- .../util/process/BaseProcessEnvironment.java | 11 +- .../process/DefaultFatalErrorReporter.java | 21 +- .../process/DefaultProcessEnvironment.java | 10 +- .../util/process/FatalErrorReporterBase.java | 17 +- .../util/process/LoggerShutdownTask.java | 30 +- .../util/process/OnetimeShutdownTask.java | 3 +- .../util/process/ProcessEnvironment.java | 99 +- .../util/process/ShutdownManager.java | 29 +- .../util/process/ShutdownManagerImpl.java | 52 +- .../deephaven/util/signals/SignalSender.java | 20 +- .../deephaven/util/signals/SignalUtils.java | 25 +- .../io/deephaven/util/threads/ThreadDump.java | 5 +- .../db/plot/util/GenerateAxesPlotMethods.java | 618 ++- .../db/plot/util/GenerateFigureImmutable.java | 923 ++-- .../db/plot/util/GenerateMultiSeries.java | 577 +-- .../util/GeneratePlottingConvenience.java | 112 +- .../libs/GroovyStaticImportGenerator.java | 94 +- .../python/PythonGeneratorParser.java | 119 +- .../python/PythonPlottingGenerator.java | 72 +- .../python/PythonStaticGenerator.java | 190 +- .../io/deephaven/io/CompressedFileUtil.java | 88 +- IO/src/main/java/io/deephaven/io/NioUtil.java | 36 +- .../java/io/deephaven/io/log/LogEntry.java | 6 +- .../java/io/deephaven/io/log/LogSink.java | 13 +- .../io/log/impl/DelayedLogEntryImpl.java | 22 +- .../io/log/impl/DelayedLogEntryImpl2.java | 20 +- .../log/impl/DelayedLogEntryUnsafeImpl.java | 22 +- .../impl/DynamicDelayedLogEntryPoolImpl.java | 16 +- .../DynamicDelayedLogEntryUnsafePoolImpl.java | 12 +- .../io/log/impl/DynamicLogBufferPoolImpl.java | 13 +- .../io/log/impl/DynamicLogEntryPoolImpl.java | 13 +- .../io/log/impl/LogBufferPoolImpl.java | 3 +- .../deephaven/io/log/impl/LogEntryImpl.java | 3 +- .../io/log/impl/LogOutputCsvImpl.java | 5 +- .../io/deephaven/io/log/impl/LogSinkImpl.java | 96 +- .../deephaven/io/logger/CustomLog4jLevel.java | 5 +- .../io/logger/InternalLoggerUtil.java | 9 +- .../io/deephaven/io/logger/Log4jAdapter.java | 27 +- .../deephaven/io/logger/Log4jLoggerImpl.java | 41 +- .../io/logger/LogBufferInterceptor.java | 6 +- .../io/logger/LogBufferOutputStream.java | 3 +- .../deephaven/io/logger/LogBufferRecord.java | 7 +- .../io/deephaven/io/logger/LogCrashDump.java | 22 +- .../io/deephaven/io/logger/LoggerImpl.java | 8 +- .../io/logger/LoggerOutputStream.java | 18 +- .../deephaven/io/logger/NullLoggerImpl.java | 28 +- .../io/deephaven/io/logger/PatternLayout.java | 4 +- .../io/logger/ProcessStreamLoggerImpl.java | 39 +- .../io/logger/RollingFileAppender.java | 3 +- .../deephaven/io/logger/StreamLoggerImpl.java | 20 +- .../io/logger/StreamToLogBuffer.java | 10 +- .../io/logger/StringsLoggerImpl.java | 10 +- .../main/java/io/deephaven/io/sched/Job.java | 9 +- .../io/sched/JobStateTimeoutQueue.java | 16 +- .../java/io/deephaven/io/sched/Scheduler.java | 36 +- .../java/io/deephaven/io/sched/TimedJob.java | 4 +- .../deephaven/io/sched/YASchedulerImpl.java | 286 +- .../io/streams/BufferedChannelReader.java | 8 +- .../io/streams/ByteBufferInputStream.java | 22 +- .../io/streams/ByteBufferOutputStream.java | 19 +- .../deephaven/io/streams/ByteBufferSink.java | 13 +- .../io/streams/ByteBufferStreams.java | 39 +- .../io/streams/CurrentByteBufferSink.java | 35 +- .../io/streams/MultiFileInputStream.java | 3 +- .../io/streams/SevenZipInputStream.java | 368 +- .../io/streams/SimpleByteBufferSink.java | 3 +- .../deephaven/io/streams/ZipInputStream.java | 8 +- .../deephaven/io/streams/ZipOutputStream.java | 5 +- .../io/log/impl/ConsolidatingLogEntry.java | 7 +- .../deephaven/io/log/impl/TestLogOutput.java | 9 +- .../io/logger/TestLog4jLoggerImpl.java | 3 +- .../io/sched/TestJobStateTimeoutQueue.java | 3 +- .../io/streams/OutputInputStreamTest.java | 19 +- .../io/streams/TestByteBufferStreams.java | 9 +- .../PrimitiveArrayConversionUtility.java | 22 +- .../integrations/numpy/Java2NumpyCopy.java | 447 +- .../integrations/python/PythonFunction.java | 24 +- .../python/PythonListenerAdapter.java | 28 +- .../python/PythonReplayListenerAdapter.java | 38 +- .../PythonShiftAwareListenerAdapter.java | 29 +- ...PythonShiftAwareReplayListenerAdapter.java | 41 +- .../python/PythonThrowingRunnable.java | 10 +- .../integrations/python/PythonTools.java | 13 +- .../integrations/python/PythonUtilities.java | 14 +- .../python/TableSnapshotReplayer.java | 4 +- .../numpy/Java2NumpyCopyTest.java | 92 +- .../internals/TestJdkInternalsLoader.java | 3 +- .../java/io/deephaven/kafka/KafkaTools.java | 436 +- .../deephaven/kafka/StreamPublisherImpl.java | 13 +- ...onsumerRecordToStreamPublisherAdapter.java | 3 +- .../ingest/GenericRecordChunkAdapter.java | 45 +- ...icRecordLongFieldCopierWithMultiplier.java | 13 +- .../kafka/ingest/JsonNodeChunkAdapter.java | 22 +- .../deephaven/kafka/ingest/JsonNodeUtil.java | 168 +- .../deephaven/kafka/ingest/KafkaIngester.java | 139 +- .../kafka/ingest/KafkaIngesterException.java | 3 +- .../kafka/ingest/KafkaStreamConsumer.java | 7 +- .../kafka/ingest/KafkaStreamPublisher.java | 125 +- .../kafka/ingest/KeyOrValueProcessor.java | 12 +- .../kafka/ingest/MultiFieldChunkAdapter.java | 29 +- .../kafka/ingest/ReplicateKafka.java | 6 +- .../io/deephaven/kafka/KafkaToolsTest.java | 156 +- .../kafka/ingest/TestAvroAdapter.java | 51 +- .../modelfarm/ConditionalModels.java | 34 +- .../deephaven/modelfarm/ExecPrioritizer.java | 6 +- .../io/deephaven/modelfarm/ModelFarm.java | 25 +- .../io/deephaven/modelfarm/ModelFarmBase.java | 120 +- .../modelfarm/ModelFarmOnDemand.java | 79 +- .../modelfarm/ModelFarmRealTime.java | 76 +- .../io/deephaven/modelfarm/ModelFarmTick.java | 46 +- .../modelfarm/ModelInputDeserializer.java | 3 +- .../io/deephaven/modelfarm/RDMModelFarm.java | 54 +- .../deephaven/modelfarm/RowDataManager.java | 10 +- .../util/KeyedPriorityBlockingQueue.java | 15 +- .../modelfarm/util/ModelFarmUtils.java | 9 +- .../modelfarm/ConditonalModelsTest.java | 6 +- .../io/deephaven/modelfarm/TestModelFarm.java | 225 +- .../TestModelInputSerializerDeserializer.java | 7 +- .../modelfarm/util/TestModelFarmUtils.java | 10 +- .../main/java/io/deephaven/net/CommBase.java | 25 +- .../deephaven/net/impl/nio/FastNIODriver.java | 95 +- .../io/deephaven/net/impl/nio/NIODriver.java | 78 +- .../numerics/derivatives/BlackScholes.java | 187 +- .../numerics/interpolation/Interpolator.java | 65 +- .../numerics/movingaverages/ByEma.java | 26 +- .../numerics/movingaverages/ByEmaSimple.java | 8 +- .../numerics/movingaverages/Ema.java | 3 +- .../ExponentiallyDecayedSum.java | 6 +- .../derivatives/TestBlackScholes.java | 380 +- .../interpolation/InterpolatorTest.java | 20 +- .../numerics/movingaverages/ByEmaTest.java | 40 +- .../numerics/movingaverages/EmaArrayTest.java | 10 +- .../numerics/movingaverages/EmaTest.java | 4 +- .../ExponentiallyDecayedSumTest.java | 4 +- .../parquet/AbstractBulkValuesWriter.java | 22 +- .../java/io/deephaven/parquet/BulkWriter.java | 13 +- .../deephaven/parquet/ColumnChunkReader.java | 7 +- .../parquet/ColumnChunkReaderImpl.java | 73 +- .../deephaven/parquet/ColumnPageReader.java | 4 +- .../parquet/ColumnPageReaderImpl.java | 287 +- .../io/deephaven/parquet/ColumnWriter.java | 3 +- .../deephaven/parquet/ColumnWriterImpl.java | 225 +- .../deephaven/parquet/LevelsController.java | 109 +- .../deephaven/parquet/ParquetFileReader.java | 117 +- .../deephaven/parquet/ParquetFileWriter.java | 46 +- .../java/io/deephaven/parquet/Reader.java | 10 +- .../java/io/deephaven/parquet/Replicate.java | 12 +- .../deephaven/parquet/RowGroupReaderImpl.java | 13 +- .../deephaven/parquet/RowGroupWriterImpl.java | 39 +- .../parquet/utils/CachedChannelProvider.java | 91 +- .../io/deephaven/parquet/utils/Helpers.java | 14 +- .../parquet/utils/LocalFSChannelProvider.java | 9 +- ...RunLenghBitPackingHybridBufferDecoder.java | 13 +- .../utils/SeekableChannelsProvider.java | 3 +- .../utils/CachedChannelProviderTest.java | 30 +- .../io/deephaven/db/plot/AxesLocation.java | 3 +- .../main/java/io/deephaven/db/plot/Axis.java | 22 +- .../java/io/deephaven/db/plot/AxisImpl.java | 10 +- .../io/deephaven/db/plot/AxisLocation.java | 3 +- .../java/io/deephaven/db/plot/BaseFigure.java | 37 +- .../io/deephaven/db/plot/BaseFigureImpl.java | 48 +- .../main/java/io/deephaven/db/plot/Chart.java | 40 +- .../java/io/deephaven/db/plot/ChartArray.java | 32 +- .../java/io/deephaven/db/plot/ChartImpl.java | 66 +- .../java/io/deephaven/db/plot/ChartTitle.java | 3 +- .../deephaven/db/plot/DynamicChartTitle.java | 35 +- .../io/deephaven/db/plot/FigureWidget.java | 3 +- .../main/java/io/deephaven/db/plot/Font.java | 9 +- .../java/io/deephaven/db/plot/LineStyle.java | 130 +- .../deephaven/db/plot/SeriesCollection.java | 13 +- .../io/deephaven/db/plot/SeriesLocation.java | 3 +- .../db/plot/axisformatters/AxisFormat.java | 7 +- .../axisformatters/DecimalAxisFormat.java | 4 +- .../plot/axisformatters/NanosAxisFormat.java | 13 +- .../ScientificNumberFormatter.java | 24 +- .../axistransformations/AxisTransform.java | 10 +- .../AxisTransformBusinessCalendar.java | 44 +- .../AxisTransformLambda.java | 22 +- .../axistransformations/AxisTransforms.java | 18 +- .../chartmodifiers/OneClickChartModifier.java | 10 +- .../deephaven/db/plot/colors/ColorMaps.java | 183 +- .../db/plot/composite/ScatterPlotMatrix.java | 186 +- .../db/plot/datasets/AbstractDataSeries.java | 7 +- .../db/plot/datasets/DataSeries.java | 32 +- .../db/plot/datasets/DynamicSeriesNamer.java | 7 +- .../category/AbstractCategoryDataSeries.java | 242 +- .../AbstractMapBasedCategoryDataSeries.java | 15 +- ...SwappableTableBasedCategoryDataSeries.java | 118 +- .../AbstractTableBasedCategoryDataSeries.java | 54 +- .../datasets/category/CategoryDataSeries.java | 148 +- .../category/CategoryDataSeriesInternal.java | 4 +- .../category/CategoryDataSeriesKernel.java | 14 +- .../category/CategoryDataSeriesMap.java | 41 +- .../CategoryDataSeriesSwappableTableMap.java | 33 +- .../category/CategoryDataSeriesTableMap.java | 31 +- .../CategoryErrorBarDataSeriesInternal.java | 3 +- .../CategoryErrorBarDataSeriesKernel.java | 18 +- .../CategoryErrorBarDataSeriesMap.java | 62 +- ...ryErrorBarDataSeriesSwappableTableMap.java | 56 +- .../CategoryErrorBarDataSeriesTableMap.java | 59 +- .../data/AssociativeDataSwappableTable.java | 45 +- .../datasets/data/AssociativeDataTable.java | 51 +- .../data/AssociativeDataWithDefault.java | 10 +- .../data/IndexableDataArrayNullCategory.java | 3 +- .../plot/datasets/data/IndexableDataByte.java | 3 +- .../datasets/data/IndexableDataCharacter.java | 3 +- .../data/IndexableDataDBDateTime.java | 3 +- .../datasets/data/IndexableDataDouble.java | 6 +- .../datasets/data/IndexableDataInfinite.java | 8 +- .../datasets/data/IndexableDataInteger.java | 3 +- .../data/IndexableDataListNullCategory.java | 3 +- .../data/IndexableDataSwappableTable.java | 16 +- .../IndexableDataSwappableTableDouble.java | 16 +- .../datasets/data/IndexableDataTable.java | 6 +- .../data/IndexableDataWithDefault.java | 6 +- .../IndexableNumericDataArrayDBDateTime.java | 4 +- .../data/IndexableNumericDataArrayDate.java | 4 +- .../data/IndexableNumericDataArrayFloat.java | 4 +- .../data/IndexableNumericDataArrayInt.java | 4 +- .../data/IndexableNumericDataArrayLong.java | 4 +- .../data/IndexableNumericDataArrayNumber.java | 4 +- .../data/IndexableNumericDataArrayShort.java | 4 +- .../data/IndexableNumericDataListNumber.java | 4 +- .../IndexableNumericDataSwappableTable.java | 21 +- .../data/IndexableNumericDataTable.java | 22 +- .../datasets/data/LiveAssociativeData.java | 3 +- .../histogram/DiscretizedRangeEqual.java | 7 +- .../histogram/HistogramCalculator.java | 130 +- .../interval/IntervalXYDataSeriesArray.java | 58 +- .../datasets/ohlc/OHLCDataSeriesArray.java | 17 +- .../OHLCDataSeriesSwappableTableArray.java | 28 +- .../ohlc/OHLCDataSeriesTableArray.java | 12 +- .../datasets/xy/AbstractXYDataSeries.java | 113 +- .../db/plot/datasets/xy/XYDataSeries.java | 105 +- .../plot/datasets/xy/XYDataSeriesArray.java | 12 +- .../datasets/xy/XYDataSeriesFunction.java | 11 +- .../datasets/xy/XYDataSeriesFunctionImpl.java | 66 +- .../xy/XYDataSeriesFunctionInternal.java | 8 +- .../datasets/xy/XYDataSeriesInternal.java | 4 +- .../xy/XYDataSeriesSwappableTableArray.java | 32 +- .../datasets/xy/XYDataSeriesTableArray.java | 17 +- .../xyerrorbar/XYErrorBarDataSeriesArray.java | 20 +- ...ErrorBarDataSeriesSwappableTableArray.java | 41 +- .../XYErrorBarDataSeriesTableArray.java | 30 +- .../errors/PlotIllegalArgumentException.java | 7 +- .../errors/PlotIllegalStateException.java | 7 +- .../io/deephaven/db/plot/errors/PlotInfo.java | 16 +- .../plot/errors/PlotRenderingException.java | 3 +- .../db/plot/errors/PlotRuntimeException.java | 13 +- .../PlotUnsupportedOperationException.java | 16 +- .../db/plot/filters/SelectableDataSet.java | 26 +- .../filters/SelectableDataSetOneClick.java | 89 +- .../SelectableDataSetSwappableTable.java | 6 +- .../db/plot/filters/Selectables.java | 33 +- .../db/plot/util/ArgumentValidations.java | 320 +- .../io/deephaven/db/plot/util/PlotUtils.java | 265 +- .../io/deephaven/db/plot/util/ShapeUtils.java | 14 +- .../util/functions/ClosureBiFunction.java | 3 +- .../ClosureDoubleBinaryOperator.java | 2 +- .../functions/ClosureDoubleUnaryOperator.java | 2 +- .../plot/util/functions/ClosureFunction.java | 3 +- .../util/functions/FigureImplFunction.java | 16 +- .../util/functions/SerializableClosure.java | 22 +- .../util/tables/ColumnHandlerFactory.java | 88 +- .../SwappableTableOneClickAbstract.java | 11 +- .../tables/SwappableTableOneClickMap.java | 14 +- .../tables/TableBackedTableMapHandle.java | 10 +- .../db/plot/util/tables/TableHandle.java | 15 +- .../tables/TableMapBackedTableMapHandle.java | 15 +- .../db/plot/util/tables/TableMapHandle.java | 10 +- .../java/io/deephaven/gui/shape/JShapes.java | 53 +- .../io/deephaven/gui/shape/NamedShape.java | 3 +- .../db/plot/Filters/TestSelectables.java | 15 +- .../java/io/deephaven/db/plot/TestColor.java | 15 +- .../java/io/deephaven/db/plot/TestFont.java | 19 +- .../db/plot/TestSeriesCollection.java | 16 +- .../deephaven/db/plot/TestSeriesLocation.java | 15 +- .../ScientificNumberFormatterTest.java | 44 +- .../TestAxisTransformBusinessCalendar.java | 17 +- .../TestAxisTransformLambda.java | 3 +- .../TestAxisTransforms.java | 2 +- .../db/plot/colors/TestColorMap.java | 3 +- .../plot/composite/TestScatterPlotMatrix.java | 26 +- .../TestAbstractCategoryDataSeries.java | 14 +- .../category/TestCategoryDataSeriesMap.java | 10 +- ...stCategoryDataSeriesSwappableTableMap.java | 6 +- .../TestCategoryDataSeriesTableMap.java | 10 +- .../datasets/data/TestAssociativeData.java | 23 +- .../plot/datasets/data/TestIndexableData.java | 53 +- ...ategoryErrorBarDataSeriesTableMapTest.java | 12 +- .../XYErrorBarDataSeriesTableArrayTest.java | 34 +- .../datasets/ohlc/TestOHLCDataSeries.java | 45 +- .../datasets/xy/TestAbstractXYDataSeries.java | 40 +- .../datasets/xy/TestXYDataSeriesArray.java | 45 +- .../xy/TestXYDataSeriesFunctionImpl.java | 24 +- .../db/plot/example_plots/BusinessTime.java | 21 +- .../db/plot/example_plots/CatErrorPlotBy.java | 12 +- .../db/plot/example_plots/CatPlotBy.java | 8 +- .../db/plot/example_plots/CatStackedBar.java | 16 +- .../plot/example_plots/ExamplePlotUtils.java | 3 +- .../db/plot/example_plots/OHLCChart.java | 8 +- .../db/plot/example_plots/OHLCPlotBy.java | 18 +- .../db/plot/example_plots/PieChartArray.java | 8 +- .../db/plot/example_plots/PrettyChart1.java | 21 +- .../plot/example_plots/ScatterPlotTable.java | 6 +- .../db/plot/example_plots/SimpleCatError.java | 6 +- .../SimpleCatErrorAxisTransform.java | 6 +- .../SimpleCategoryAxisTransform.java | 10 +- .../example_plots/SimpleCategoryColor.java | 23 +- .../example_plots/SimpleCategoryPlot.java | 10 +- .../example_plots/SimpleCategoryPlot2.java | 22 +- .../plot/example_plots/SimpleColorMaps.java | 14 +- .../plot/example_plots/SimpleHistoTable.java | 8 +- .../db/plot/example_plots/SimplePlotBy.java | 13 +- .../SimplePlotDynamicChartTitle.java | 9 +- .../example_plots/SimpleTsDBDatePlot.java | 13 +- .../db/plot/example_plots/SimpleTsPlot.java | 6 +- .../db/plot/example_plots/SimpleTsPlot2.java | 14 +- .../db/plot/example_plots/SimpleXYArea.java | 10 +- .../example_plots/SimpleXYAxisTransform.java | 10 +- .../db/plot/example_plots/SimpleXYBar.java | 6 +- .../db/plot/example_plots/SimpleXYColor.java | 12 +- .../plot/example_plots/SimpleXYDateBar.java | 8 +- .../plot/example_plots/SimpleXYDateTime.java | 11 +- .../db/plot/example_plots/SimpleXYError.java | 9 +- .../SimpleXYErrorAxisTransform.java | 8 +- .../db/plot/example_plots/SimpleXYHisto.java | 8 +- .../db/plot/example_plots/SimpleXYPlot.java | 4 +- .../db/plot/example_plots/SimpleXYPlot2.java | 18 +- .../db/plot/example_plots/SimpleXYPlot2b.java | 26 +- .../db/plot/example_plots/SimpleXYPlot3.java | 32 +- .../db/plot/example_plots/SimpleXYPlot4.java | 14 +- .../db/plot/example_plots/SimpleXYPlot5.java | 34 +- .../db/plot/example_plots/SimpleXYPlot5b.java | 36 +- .../db/plot/example_plots/SimpleXYSize.java | 12 +- .../example_plots/SimpleXYStackedArea.java | 14 +- .../plot/example_plots/SimpleXYStepPlot.java | 2 +- .../db/plot/example_plots/SimpleXYTable.java | 27 +- .../db/plot/example_plots/SimpleXYTicks.java | 24 +- .../db/plot/example_plots/XYStackedBar.java | 4 +- .../db/plot/util/TestArgumentValidations.java | 38 +- .../db/plot/util/TestShapeUtils.java | 3 +- .../functions/TestSerializableClosure.java | 11 +- .../util/tables/TestColumnHandlerFactory.java | 132 +- .../db/util/PlotReflectionPlaypen.java | 12 +- .../java/SevenZip/Compression/LZ/BinTree.java | 16 +- .../java/SevenZip/Compression/LZMA/Base.java | 2 +- .../SevenZip/Compression/LZMA/Decoder.java | 36 +- .../SevenZip/Compression/LZMA/Encoder.java | 198 +- .../RangeCoder/BitTreeDecoder.java | 2 +- .../RangeCoder/BitTreeEncoder.java | 4 +- .../Compression/RangeCoder/Encoder.java | 5 +- .../src/main/java/SevenZip/LzmaAlone.java | 31 +- .../src/main/java/SevenZip/LzmaBench.java | 22 +- .../stats/ObjectAllocationCollector.java | 64 +- .../io/deephaven/stats/StatsCPUCollector.java | 186 +- .../java/io/deephaven/stats/StatsDriver.java | 154 +- .../deephaven/stats/StatsIntradayLogger.java | 18 +- .../deephaven/stats/StatsMemoryCollector.java | 121 +- .../stats/TestStatsCPUCollector.java | 3 +- .../java/io/deephaven/tablelogger/Row.java | 28 +- .../io/deephaven/tablelogger/RowSetter.java | 3 +- .../io/deephaven/tablelogger/TableLogger.java | 10 +- .../tablelogger/TableLoggerImpl2.java | 18 +- .../io/deephaven/tablelogger/TableWriter.java | 18 +- .../tablelogger/WritableRowContainer.java | 5 +- .../UncheckedDeephavenException.java | 2 +- .../java/io/deephaven/stats/GcEventStats.java | 23 +- .../deephaven/test/types/OutOfBandTest.java | 4 +- .../io/deephaven/test/types/ParallelTest.java | 21 +- .../io/deephaven/test/types/SerialTest.java | 45 +- .../java/io/deephaven/util/BooleanUtils.java | 2 +- .../io/deephaven/util/BridgingLogHandler.java | 71 +- .../java/io/deephaven/util/BufferUtil.java | 47 +- .../java/io/deephaven/util/EncodingUtil.java | 35 +- .../io/deephaven/util/ExceptionDetails.java | 5 +- .../ExpandingThreadPoolExecutorFactory.java | 65 +- .../io/deephaven/util/FindExceptionCause.java | 33 +- .../deephaven/util/FunctionalInterfaces.java | 6 +- .../java/io/deephaven/util/GrpcLogging.java | 5 +- .../main/java/io/deephaven/util/HeapDump.java | 24 +- .../io/deephaven/util/MultiException.java | 13 +- .../io/deephaven/util/SafeCloseableArray.java | 7 +- .../io/deephaven/util/SafeCloseablePair.java | 12 +- .../java/io/deephaven/util/SoftRecycler.java | 40 +- .../main/java/io/deephaven/util/Utils.java | 91 +- .../deephaven/util/annotations/ArrayType.java | 3 +- .../util/annotations/ArrayTypeGetter.java | 6 +- .../util/annotations/DynamicUse.java | 7 +- .../util/annotations/InternalUseOnly.java | 4 +- .../annotations/ReferentialIntegrity.java | 10 +- .../util/annotations/TestUseOnly.java | 4 +- .../util/audit/AuditEventLoggerBasic.java | 6 +- .../deephaven/util/codec/BigDecimalCodec.java | 42 +- .../deephaven/util/codec/BigIntegerCodec.java | 15 +- .../io/deephaven/util/codec/CodecCache.java | 25 +- .../deephaven/util/codec/LocalDateCodec.java | 45 +- .../deephaven/util/codec/LocalTimeCodec.java | 31 +- .../io/deephaven/util/codec/MapCodec.java | 18 +- .../io/deephaven/util/codec/ObjectCodec.java | 27 +- .../deephaven/util/codec/ObjectDecoder.java | 18 +- .../util/codec/SimpleByteArrayCodec.java | 9 +- .../util/codec/StringBooleanMapCodec.java | 3 +- .../util/codec/StringDoubleMapCodec.java | 3 +- .../util/codec/StringFloatMapCodec.java | 3 +- .../util/codec/StringIntMapCodec.java | 3 +- .../util/codec/StringKeyedMapCodec.java | 4 +- .../util/codec/StringLongMapCodec.java | 3 +- .../util/codec/StringStringMapCodec.java | 3 +- .../codec/UTF8StringAsByteArrayCodec.java | 8 +- .../datastructures/RandomAccessDeque.java | 3 +- .../util/datastructures/ReleaseTracker.java | 69 +- .../datastructures/SegmentedSoftPool.java | 50 +- .../SimpleReferenceManager.java | 48 +- .../SortedIndexableMapWrapper.java | 19 +- .../util/datastructures/SubscriptionSet.java | 41 +- .../datastructures/WeakIdentityHashSet.java | 38 +- .../cache/ArrayBackedOffsetLookupCache.java | 32 +- .../cache/BaseOffsetLookupCache.java | 10 +- .../cache/BoundedIntrusiveMappingCache.java | 110 +- .../datastructures/cache/OffsetLookup.java | 3 +- .../cache/OffsetLookupCache.java | 4 +- .../cache/ReverseOffsetLookupCache.java | 30 +- .../SoftArrayBackedOffsetLookupCache.java | 50 +- .../hash/IntrusiveChainedEntryPool.java | 8 +- .../intrusive/IntrusiveArraySet.java | 10 +- .../linked/IntrusiveDoublyLinkedNode.java | 14 +- .../linked/IntrusiveDoublyLinkedQueue.java | 35 +- .../IntrusiveDoublyLinkedStructureBase.java | 13 +- .../linked/IntrusiveSinglyLinkedQueue.java | 4 +- .../deephaven/util/files/DirWatchService.java | 178 +- .../io/deephaven/util/files/FileHelper.java | 38 +- .../util/files/ResourceResolution.java | 125 +- .../util/locks/AwareFunctionalLock.java | 3 +- .../io/deephaven/util/locks/AwareLock.java | 3 +- .../deephaven/util/locks/FunctionalLock.java | 42 +- .../util/pool/ThreadSafeFixedSizePool.java | 29 +- .../pool/ThreadSafeLenientFixedSizePool.java | 31 +- .../BaselineThreadMXBeanThreadProfiler.java | 7 +- .../SunThreadMXBeanThreadProfiler.java | 28 +- .../profiling/ThreadMXBeanThreadProfiler.java | 21 +- .../util/profiling/ThreadProfiler.java | 31 +- .../util/progress/ProcessStatusSubrange.java | 7 +- .../util/progress/ProgressLogger.java | 3 +- .../util/progress/StatusCallback.java | 3 +- .../reference/CleanupReferenceProcessor.java | 34 +- .../ProceduralReferenceCounted.java | 9 +- .../referencecounting/ReferenceCounted.java | 49 +- .../java/io/deephaven/util/text/Indenter.java | 4 +- .../deephaven/util/text/ScriptSanitizer.java | 4 +- .../util/text/SplitIgnoreQuotes.java | 4 +- .../util/thread/NamingThreadFactory.java | 7 +- .../io/deephaven/util/thread/ThreadDump.java | 115 +- .../io/deephaven/util/type/ClassUtils.java | 9 +- .../io/deephaven/util/type/EnumValue.java | 11 +- .../util/type/NamedImplementation.java | 4 +- .../io/deephaven/util/type/TypeUtils.java | 82 +- .../io/deephaven/utils/BigDecimalUtils.java | 7 +- .../java/io/deephaven/utils/ClockFactory.java | 4 +- .../java/io/deephaven/util/UtilsTest.java | 15 +- .../util/codec/BigDecimalCodecTest.java | 45 +- .../util/codec/BigIntegerCodecTest.java | 10 +- .../util/codec/LocalDateCodecTest.java | 7 +- .../util/codec/LocalTimeCodecTest.java | 45 +- .../datastructures/TestRandomAccessDeque.java | 5 +- .../datastructures/TestSegmentedSoftPool.java | 48 +- .../TestSimpleReferenceManager.java | 26 +- .../TestWeakIdentityHashSet.java | 9 +- .../TestBoundedIntrusiveMappingCache.java | 24 +- .../intrusive/TestIntrusiveArraySet.java | 12 +- .../TestIntrusiveDoublyLinkedQueue.java | 58 +- .../TestIntrusiveSinglyLinkedQueue.java | 13 +- .../util/files/TestDirWatchService.java | 85 +- .../util/files/TestResourceResolution.java | 45 +- .../pool/TestThreadSafeFixedSizePool.java | 37 +- .../TestThreadSafeLenientFixedSizePool.java | 49 +- .../util/profiling/TestThreadProfiler.java | 32 +- .../deephaven/utils/TestBigDecimalSqrt.java | 14 +- .../deephaven/utils/test/PropertySaver.java | 7 +- .../utils/test/PropertySaverTest.java | 24 +- .../extensions/ClassGraphExtension.java | 27 +- .../numerics/suanshu/SuanShuIntegration.java | 446 +- .../suanshu/TestSuanShuIntegration.java | 236 +- ...ropertyInputStreamLoaderResourcesOnly.java | 6 +- .../grpc_api/example/ConsoleClient.java | 207 +- .../example/SimpleDeephavenClient.java | 179 +- .../barrage/chunk/ReplicateBarrageUtils.java | 5 +- .../grpc_api_client/table/BarrageTable.java | 189 +- .../util/BarrageProtoUtil.java | 7 +- .../util/GrpcServiceOverrideBuilder.java | 150 +- .../chunk/BarrageColumnRoundTripTest.java | 220 +- .../grpc_api/runner/DockerEmpty.java | 4 +- .../flight/FlightMessageRoundTripTest.java | 114 +- .../figures/FigureWidgetTranslator.java | 688 ++- .../grpc_api/arrow/ArrowFlightUtil.java | 205 +- .../deephaven/grpc_api/arrow/ArrowModule.java | 5 +- .../BrowserFlightServiceGrpcBinding.java | 47 +- .../arrow/BrowserFlightServiceGrpcImpl.java | 69 +- .../arrow/FlightServiceGrpcBinding.java | 64 +- .../grpc_api/arrow/FlightServiceGrpcImpl.java | 159 +- .../grpc_api/auth/AuthContextProvider.java | 8 +- .../barrage/BarrageClientSubscription.java | 170 +- .../barrage/BarrageMessageConsumer.java | 8 +- .../barrage/BarrageMessageProducer.java | 650 ++- .../barrage/BarrageStreamGenerator.java | 204 +- .../grpc_api/barrage/BarrageStreamReader.java | 88 +- .../barrage/util/BarrageSchemaUtil.java | 161 +- .../console/ConsoleServiceGrpcImpl.java | 453 +- .../grpc_api/console/ScopeTicketResolver.java | 96 +- .../io/deephaven/grpc_api/log/LogInit.java | 11 +- .../io/deephaven/grpc_api/log/LogModule.java | 10 +- .../grpc_api/runner/DeephavenApiServer.java | 50 +- .../runner/DeephavenApiServerModule.java | 29 +- .../io/deephaven/grpc_api/runner/Main.java | 13 +- .../session/ExportTicketResolver.java | 42 +- .../grpc_api/session/SessionModule.java | 2 +- .../grpc_api/session/SessionService.java | 49 +- .../session/SessionServiceGrpcImpl.java | 95 +- .../grpc_api/session/SessionState.java | 354 +- .../grpc_api/session/TicketResolver.java | 21 +- .../grpc_api/session/TicketRouter.java | 108 +- .../table/ExportedTableUpdateListener.java | 60 +- .../grpc_api/table/TableServiceGrpcImpl.java | 230 +- .../table/ops/ComboAggregateGrpcImpl.java | 55 +- .../table/ops/DropColumnsGrpcImpl.java | 5 +- .../table/ops/EmptyTableGrpcImpl.java | 6 +- .../table/ops/FilterTableGrpcImpl.java | 36 +- .../table/ops/FlattenTableGrpcImpl.java | 6 +- .../table/ops/GrpcTableOperation.java | 19 +- .../table/ops/HeadOrTailByGrpcImpl.java | 21 +- .../table/ops/HeadOrTailGrpcImpl.java | 10 +- .../table/ops/JoinTablesGrpcImpl.java | 114 +- .../table/ops/MergeTablesGrpcImpl.java | 15 +- .../table/ops/RunChartDownsampleGrpcImpl.java | 15 +- .../table/ops/SelectDistinctGrpcImpl.java | 6 +- .../table/ops/SnapshotTableGrpcImpl.java | 29 +- .../grpc_api/table/ops/SortTableGrpcImpl.java | 14 +- .../grpc_api/table/ops/TimeTableGrpcImpl.java | 9 +- .../grpc_api/table/ops/UngroupGrpcImpl.java | 10 +- .../ops/UnstructuredFilterTableGrpcImpl.java | 16 +- .../table/ops/UpdateOrSelectGrpcImpl.java | 9 +- .../ops/filter/AbstractNormalizeFilters.java | 12 +- .../filter/ConvertInvalidInExpressions.java | 27 +- .../table/ops/filter/FilterFactory.java | 113 +- .../table/ops/filter/FilterPrinter.java | 20 +- .../table/ops/filter/FilterVisitor.java | 37 +- .../FlipNonReferenceMatchExpression.java | 37 +- .../ops/filter/MakeExpressionsNullSafe.java | 38 +- .../filter/MergeNestedBinaryOperations.java | 17 +- .../table/ops/filter/NormalizeFilterUtil.java | 96 +- .../table/ops/filter/NormalizeNots.java | 49 +- .../validation/ColumnExpressionValidator.java | 105 +- .../grpc_api/util/BrowserStream.java | 44 +- .../io/deephaven/grpc_api/util/GrpcUtil.java | 47 +- .../PassthroughInputStreamMarshaller.java | 3 +- .../io/deephaven/grpc_api/util/Scheduler.java | 6 +- .../util/UnaryInputStreamMarshaller.java | 2 +- .../grpc_api/session/SessionServiceTest.java | 38 +- .../grpc_api/session/SessionStateTest.java | 377 +- .../table/BarrageMessageRoundTripTest.java | 574 +-- .../table/ExportTableUpdateListenerTest.java | 48 +- .../filter/AbstractNormalizingFilterTest.java | 3 +- .../ConvertInvalidInExpressionsTest.java | 16 +- .../table/ops/filter/FilterPrinterTest.java | 12 +- .../table/ops/filter/FilterTestUtils.java | 51 +- .../FlipNonReferenceMatchExpressionTest.java | 41 +- .../filter/MakeExpressionsNullSafeTest.java | 28 +- .../MergeNestedBinaryOperationsTest.java | 84 +- .../table/ops/filter/NormalizeNotsTest.java | 135 +- .../util/TestControlledScheduler.java | 13 +- .../io/deephaven/grpc_api/util/TestUtil.java | 3 +- .../internal/log/LoggerFactoryFile.java | 18 +- .../deephaven/internal/log/LoggerSlf4j.java | 32 +- .../internal/log/LoggerFactoryStream.java | 4 +- .../deephaven/logback/LogBufferAppender.java | 4 +- .../io/deephaven/internal/log/Bootstrap.java | 2 +- .../log/LoggerFactoryServiceLoaderImpl.java | 4 +- .../log/LoggerFactorySingleCache.java | 4 +- .../parse/api/CompletionParseService.java | 6 +- .../parse/api/CompletionParseServiceNoOp.java | 7 +- .../lang/parse/api/ParsedResult.java | 4 +- .../deephaven/lang/api/ChunkerInvokable.java | 7 +- .../java/io/deephaven/lang/api/IsScope.java | 6 +- .../io/deephaven/lang/api/ParseCancelled.java | 4 +- .../io/deephaven/lang/api/ParseState.java | 8 +- .../io/deephaven/lang/meta/Messenger.java | 4 +- .../lang/parse/CompletionParser.java | 81 +- .../io/deephaven/lang/parse/LspTools.java | 33 +- .../deephaven/lang/parse/ParsedDocument.java | 70 +- .../io/deephaven/lang/parse/PendingParse.java | 190 +- .../lang/shared/lsp/CompletionCancelled.java | 4 +- .../lang/completion/ChunkerCompleter.java | 539 +- .../lang/completion/CompletionFragment.java | 14 +- .../lang/completion/CompletionLookups.java | 6 +- .../lang/completion/CompletionOptions.java | 4 +- .../lang/completion/CompletionRequest.java | 94 +- .../DelegatingCompletionHandler.java | 32 +- .../results/CompleteAssignment.java | 18 +- .../results/CompleteColumnExpression.java | 55 +- .../results/CompleteColumnName.java | 29 +- .../results/CompleteInvocation.java | 34 +- .../completion/results/CompleteTableName.java | 26 +- .../results/CompleteTableNamespace.java | 39 +- .../completion/results/CompleteVarName.java | 6 +- .../completion/results/CompletionBuilder.java | 32 +- .../lang/completion/results/FuzzyList.java | 8 +- .../lang/completion/AllTokenIterableImpl.java | 15 +- .../web/shared/cmd/ServerReplyHandle.java | 8 +- .../web/shared/fu/LinkedIterable.java | 13 +- .../web/shared/ide/ConsoleConfig.java | 29 +- .../web/shared/ide/ConsoleSessionType.java | 2 +- .../web/shared/ide/ExecutionHandle.java | 3 +- .../web/shared/ide/ScriptHandle.java | 29 +- .../web/shared/ide/lsp/CompletionItem.java | 28 +- .../web/shared/ide/lsp/Diagnostic.java | 24 +- .../ide/lsp/DidChangeTextDocumentParams.java | 6 +- .../ide/lsp/DidCloseTextDocumentParams.java | 3 +- .../web/shared/ide/lsp/DocumentRange.java | 12 +- .../web/shared/ide/lsp/Position.java | 10 +- .../lsp/TextDocumentContentChangeEvent.java | 8 +- .../ide/lsp/TextDocumentPositionParams.java | 3 +- .../web/shared/ide/lsp/TextEdit.java | 6 +- .../lsp/VersionedTextDocumentIdentifier.java | 3 +- .../deephaven/grpc_api/util/Exceptions.java | 4 +- .../grpc_api/util/ExportTicketHelper.java | 58 +- .../grpc_api/util/TicketRouterHelper.java | 2 +- .../main/java/io/deephaven/jpy/JpyConfig.java | 63 +- .../io/deephaven/jpy/JpyConfigSource.java | 60 +- .../java/io/deephaven/jpy/BuiltinsModule.java | 5 +- .../java/io/deephaven/jpy/JpyConfigExt.java | 33 +- .../main/java/io/deephaven/jpy/JpyModule.java | 36 +- .../io/deephaven/util/PrimitiveArrayType.java | 16 +- .../io/deephaven/jpy/CreateModuleTest.java | 26 +- .../java/io/deephaven/jpy/PyProxyTest.java | 34 +- .../java/io/deephaven/jpy/PythonTest.java | 4 +- .../integration/DestructorModuleParent.java | 15 +- .../jpy/integration/IntegerOutTest.java | 2 +- .../deephaven/jpy/integration/NoopModule.java | 6 +- .../jpy/integration/PrimitiveArrayTest.java | 21 +- .../io/deephaven/jpy/integration/PyDebug.java | 12 +- .../deephaven/jpy/integration/PyDictTest.java | 17 +- .../jpy/integration/PyLibNullArgTest.java | 6 +- .../deephaven/jpy/integration/PyLibTest.java | 8 +- .../deephaven/jpy/integration/PySysPath.java | 12 +- .../jpy/integration/ReferenceCounting.java | 13 +- .../integration/ReferenceCountingTest.java | 11 +- .../jpy/integration/SimpleObject.java | 6 +- .../deephaven/jpy/integration/TypeTest.java | 8 +- .../jpy/integration/PassPyObjectToJava.java | 10 +- .../jpy/integration/PingPongStack.java | 14 +- .../io/deephaven/annotations/AllowNulls.java | 3 +- .../io/deephaven/annotations/LeafStyle.java | 2 +- .../io/deephaven/annotations/NodeStyle.java | 7 +- .../io/deephaven/qst/TableAdapterImpl.java | 20 +- .../java/io/deephaven/qst/TableCreator.java | 14 +- .../io/deephaven/qst/array/BooleanArray.java | 2 +- .../io/deephaven/qst/array/ByteArray.java | 2 +- .../io/deephaven/qst/array/CharArray.java | 2 +- .../io/deephaven/qst/array/DoubleArray.java | 2 +- .../io/deephaven/qst/array/FloatArray.java | 2 +- .../io/deephaven/qst/array/GenericArray.java | 4 +- .../java/io/deephaven/qst/array/IntArray.java | 2 +- .../io/deephaven/qst/array/LongArray.java | 2 +- .../deephaven/qst/array/PrimitiveArray.java | 2 +- .../io/deephaven/qst/array/ShortArray.java | 2 +- .../qst/array/TypeToArrayBuilder.java | 2 +- .../qst/column/header/ColumnHeader.java | 38 +- .../qst/column/header/ColumnHeaders6.java | 4 +- .../qst/column/header/ColumnHeaders7.java | 6 +- .../qst/column/header/ColumnHeaders8.java | 8 +- .../qst/column/header/ColumnHeaders9.java | 4 +- .../qst/column/header/ColumnHeadersN.java | 12 +- .../io/deephaven/qst/table/HeadTable.java | 2 +- .../java/io/deephaven/qst/table/JoinBase.java | 4 +- .../io/deephaven/qst/table/JoinTable.java | 3 +- .../io/deephaven/qst/table/LabeledTable.java | 2 +- .../io/deephaven/qst/table/MergeTable.java | 14 +- .../java/io/deephaven/qst/table/NewTable.java | 7 +- .../deephaven/qst/table/ParentsVisitor.java | 10 +- .../io/deephaven/qst/table/TableBase.java | 92 +- .../deephaven/qst/table/TableCreatorImpl.java | 7 +- .../io/deephaven/qst/table/TableHeader.java | 7 +- .../io/deephaven/qst/table/TableSpec.java | 15 +- .../io/deephaven/qst/table/TailTable.java | 2 +- .../io/deephaven/qst/table/TimeTable.java | 8 +- .../io/deephaven/qst/type/ArrayTypeBase.java | 2 +- .../io/deephaven/qst/type/CustomType.java | 8 +- .../qst/type/DbGenericArrayType.java | 2 +- .../qst/type/DbPrimitiveArrayType.java | 16 +- .../deephaven/qst/type/NativeArrayType.java | 4 +- .../main/java/io/deephaven/qst/type/Type.java | 9 +- .../io/deephaven/qst/type/TypeHelper.java | 12 +- .../deephaven/qst/array/BooleanArrayTest.java | 4 +- .../io/deephaven/qst/array/ByteArrayTest.java | 4 +- .../io/deephaven/qst/array/CharArrayTest.java | 2 +- .../deephaven/qst/array/DoubleArrayTest.java | 2 +- .../deephaven/qst/array/FloatArrayTest.java | 2 +- .../io/deephaven/qst/array/IntArrayTest.java | 2 +- .../io/deephaven/qst/array/LongArrayTest.java | 2 +- .../deephaven/qst/array/ShortArrayTest.java | 4 +- .../qst/column/header/ColumnHeadersTest.java | 6 +- .../qst/examples/EmployeesExample.java | 20 +- .../qst/table/ParentsVisitorTest.java | 27 +- .../qst/table/TableCreatorImplTest.java | 34 +- .../java/io/deephaven/qst/type/TypeTest.java | 20 +- style/eclipse-java-google-style.xml | 10 +- .../deephaven/annotations/BuildableStyle.java | 8 +- .../io/deephaven/annotations/SimpleStyle.java | 8 +- .../java/io/deephaven/api/ColumnName.java | 2 +- .../java/io/deephaven/api/JoinAddition.java | 6 +- .../io/deephaven/api/JoinAdditionImpl.java | 2 +- .../main/java/io/deephaven/api/JoinMatch.java | 15 +- .../java/io/deephaven/api/JoinMatchImpl.java | 2 +- .../main/java/io/deephaven/api/RawString.java | 4 +- .../java/io/deephaven/api/Selectable.java | 6 +- .../java/io/deephaven/api/SelectableImpl.java | 2 +- .../main/java/io/deephaven/api/Strings.java | 11 +- .../io/deephaven/api/TableOperations.java | 217 +- .../io/deephaven/api/agg/Aggregation.java | 6 +- .../api/agg/AggregationFinisher.java | 13 +- .../deephaven/api/agg/AggregationOutputs.java | 3 +- .../main/java/io/deephaven/api/agg/Multi.java | 6 +- .../main/java/io/deephaven/api/agg/Pair.java | 10 +- .../java/io/deephaven/api/agg/PairImpl.java | 2 +- .../main/java/io/deephaven/api/agg/Pct.java | 2 +- .../io/deephaven/api/agg/SortedFirst.java | 9 +- .../java/io/deephaven/api/agg/SortedLast.java | 9 +- .../io/deephaven/api/filter/FilterAnd.java | 5 +- .../deephaven/api/filter/FilterCondition.java | 12 +- .../io/deephaven/api/filter/FilterOr.java | 5 +- .../io/deephaven/api/value/ValueLong.java | 2 +- .../db/tables/utils/NameValidator.java | 129 +- .../io/deephaven/api/JoinAdditionTest.java | 2 +- .../java/io/deephaven/api/JoinMatchTest.java | 2 +- .../java/io/deephaven/api/SelectableTest.java | 4 +- .../io/deephaven/api/filter/FilterTest.java | 12 +- .../db/tables/utils/TestNameValidator.java | 4 +- .../io/deephaven/ide/shared/IdeSession.java | 72 +- .../io/deephaven/ide/shared/LspTranslate.java | 14 +- .../deephaven/web/DeephavenJsApiLinker.java | 9 +- .../web/client/api/BigDecimalWrapper.java | 3 +- .../web/client/api/BigIntegerWrapper.java | 3 +- .../deephaven/web/client/api/Callbacks.java | 21 +- .../web/client/api/ClientConfiguration.java | 8 +- .../io/deephaven/web/client/api/Column.java | 38 +- .../web/client/api/HasEventHandling.java | 22 +- .../web/client/api/JsColumnStatistics.java | 29 +- .../deephaven/web/client/api/JsRangeSet.java | 18 +- .../io/deephaven/web/client/api/JsTable.java | 541 +- .../web/client/api/JsTableFetch.java | 4 +- .../web/client/api/JsTotalsTable.java | 16 +- .../web/client/api/JsTotalsTableConfig.java | 82 +- .../deephaven/web/client/api/LazyString.java | 3 +- .../web/client/api/LocalDateWrapper.java | 7 +- .../web/client/api/LocalTimeWrapper.java | 9 +- .../web/client/api/QueryConnectable.java | 32 +- .../web/client/api/ReconnectState.java | 22 +- .../web/client/api/ResponseStreamWrapper.java | 5 +- .../io/deephaven/web/client/api/Sort.java | 17 +- .../io/deephaven/web/client/api/TableMap.java | 41 +- .../deephaven/web/client/api/TableTicket.java | 17 +- .../web/client/api/WorkerConnection.java | 654 ++- .../web/client/api/barrage/BarrageUtils.java | 197 +- .../api/barrage/CompressedRangeSetReader.java | 25 +- .../api/barrage/ShiftedRangeReader.java | 2 +- .../web/client/api/batch/BatchBuilder.java | 62 +- .../web/client/api/batch/RequestBatcher.java | 91 +- .../web/client/api/batch/TableConfig.java | 83 +- .../client/api/console/JsVariableChanges.java | 15 +- .../web/client/api/csv/CsvTypeParser.java | 59 +- .../client/api/filter/FilterCondition.java | 16 +- .../web/client/api/filter/FilterValue.java | 15 +- .../web/client/api/i18n/JsDateTimeFormat.java | 67 +- .../web/client/api/i18n/JsNumberFormat.java | 3 +- .../web/client/api/i18n/JsTimeZone.java | 3 +- .../api/input/ColumnValueDehydrater.java | 36 +- .../web/client/api/input/JsInputTable.java | 17 +- .../client/api/lifecycle/HasLifecycle.java | 8 +- .../web/client/api/state/StateCache.java | 9 +- .../subscription/SubscriptionTableData.java | 77 +- .../api/subscription/TableSubscription.java | 16 +- .../TableViewportSubscription.java | 130 +- .../client/api/subscription/ViewportData.java | 59 +- .../client/api/subscription/ViewportRow.java | 13 +- .../web/client/api/tree/JsRollupConfig.java | 4 +- .../web/client/api/tree/JsTreeTable.java | 387 +- .../client/api/tree/JsTreeTableConfig.java | 3 +- .../tree/enums/JsAggregationOperation.java | 30 +- .../widget/calendar/JsBusinessCalendar.java | 9 +- .../client/api/widget/calendar/JsHoliday.java | 5 +- .../web/client/api/widget/plot/ChartData.java | 87 +- .../api/widget/plot/DataUpdateEvent.java | 12 +- .../api/widget/plot/DownsampleOptions.java | 18 +- .../api/widget/plot/FigureSubscription.java | 165 +- .../web/client/api/widget/plot/JsAxis.java | 23 +- .../web/client/api/widget/plot/JsChart.java | 12 +- .../web/client/api/widget/plot/JsFigure.java | 227 +- .../api/widget/plot/JsFigureFactory.java | 129 +- .../client/api/widget/plot/JsMultiSeries.java | 59 +- .../web/client/api/widget/plot/JsSeries.java | 15 +- .../api/widget/plot/JsSeriesDescriptor.java | 3 +- .../web/client/api/widget/plot/OneClick.java | 31 +- .../api/widget/plot/SeriesDataSource.java | 4 +- .../web/client/fu/CancellablePromise.java | 6 +- .../io/deephaven/web/client/fu/JsData.java | 44 +- .../io/deephaven/web/client/fu/JsLog.java | 3 +- .../deephaven/web/client/fu/LazyPromise.java | 52 +- .../web/client/state/ActiveTableBinding.java | 54 +- .../web/client/state/ClientTableState.java | 302 +- .../web/client/state/HasTableBinding.java | 5 +- .../web/client/state/PausedTableBinding.java | 7 +- .../web/client/state/TableReviver.java | 36 +- .../barrage/CompressedRangeSetReaderTest.java | 3 +- .../api/filter/FilterConditionTestGwt.java | 61 +- .../api/i18n/JsDateTimeFormatTestGwt.java | 32 +- .../api/i18n/JsNumberFormatTestGwt.java | 28 +- .../org/apache/arrow/Flatbuf.java | 36 +- .../apache/arrow/flatbuf/BodyCompression.java | 13 +- .../arrow/flatbuf/BodyCompressionMethod.java | 6 +- .../apache/arrow/flatbuf/CompressionType.java | 8 +- .../apache/arrow/flatbuf/DictionaryBatch.java | 11 +- .../org/apache/arrow/flatbuf/FieldNode.java | 6 +- .../org/apache/arrow/flatbuf/Message.java | 18 +- .../apache/arrow/flatbuf/MessageHeader.java | 12 +- .../org/apache/arrow/flatbuf/RecordBatch.java | 9 +- .../org/apache/arrow/Flatbuf.java | 81 +- .../org/apache/arrow/flatbuf/Binary.java | 6 +- .../org/apache/arrow/flatbuf/Bool.java | 6 +- .../org/apache/arrow/flatbuf/Buffer.java | 6 +- .../org/apache/arrow/flatbuf/Date.java | 6 +- .../org/apache/arrow/flatbuf/DateUnit.java | 8 +- .../org/apache/arrow/flatbuf/Decimal.java | 8 +- .../arrow/flatbuf/DictionaryEncoding.java | 10 +- .../apache/arrow/flatbuf/DictionaryKind.java | 6 +- .../org/apache/arrow/flatbuf/Duration.java | 6 +- .../org/apache/arrow/flatbuf/Endianness.java | 8 +- .../org/apache/arrow/flatbuf/Feature.java | 10 +- .../org/apache/arrow/flatbuf/Field.java | 6 +- .../apache/arrow/flatbuf/FixedSizeBinary.java | 11 +- .../apache/arrow/flatbuf/FixedSizeList.java | 8 +- .../apache/arrow/flatbuf/FloatingPoint.java | 8 +- .../org/apache/arrow/flatbuf/Int.java | 6 +- .../org/apache/arrow/flatbuf/Interval.java | 6 +- .../apache/arrow/flatbuf/IntervalUnit.java | 8 +- .../org/apache/arrow/flatbuf/KeyValue.java | 9 +- .../org/apache/arrow/flatbuf/LargeBinary.java | 9 +- .../org/apache/arrow/flatbuf/LargeList.java | 6 +- .../org/apache/arrow/flatbuf/LargeUtf8.java | 6 +- .../org/apache/arrow/flatbuf/List.java | 6 +- .../org/apache/arrow/flatbuf/Map.java | 6 +- .../apache/arrow/flatbuf/MetadataVersion.java | 14 +- .../org/apache/arrow/flatbuf/Null.java | 6 +- .../org/apache/arrow/flatbuf/Precision.java | 10 +- .../org/apache/arrow/flatbuf/Schema.java | 16 +- .../org/apache/arrow/flatbuf/Struct_.java | 6 +- .../org/apache/arrow/flatbuf/Time.java | 6 +- .../org/apache/arrow/flatbuf/TimeUnit.java | 12 +- .../org/apache/arrow/flatbuf/Timestamp.java | 8 +- .../org/apache/arrow/flatbuf/Type.java | 48 +- .../org/apache/arrow/flatbuf/Union.java | 14 +- .../org/apache/arrow/flatbuf/UnionMode.java | 8 +- .../org/apache/arrow/flatbuf/Utf8.java | 6 +- .../browserflight_pb/BrowserNextResponse.java | 31 +- .../BidirectionalStream.java | 6 +- .../BrowserFlightService.java | 6 +- .../BrowserFlightServiceClient.java | 199 +- .../RequestStream.java | 6 +- .../ResponseStream.java | 6 +- .../UnaryResponse.java | 6 +- .../flight/protocol/flight_pb/Action.java | 6 +- .../flight/protocol/flight_pb/ActionType.java | 8 +- .../flight/protocol/flight_pb/BasicAuth.java | 8 +- .../flight/protocol/flight_pb/Criteria.java | 17 +- .../flight/protocol/flight_pb/Empty.java | 6 +- .../flight/protocol/flight_pb/FlightData.java | 70 +- .../protocol/flight_pb/FlightDescriptor.java | 10 +- .../protocol/flight_pb/FlightEndpoint.java | 54 +- .../flight/protocol/flight_pb/FlightInfo.java | 99 +- .../protocol/flight_pb/HandshakeRequest.java | 20 +- .../protocol/flight_pb/HandshakeResponse.java | 22 +- .../flight/protocol/flight_pb/Location.java | 9 +- .../flight/protocol/flight_pb/PutResult.java | 20 +- .../flight/protocol/flight_pb/Result.java | 6 +- .../protocol/flight_pb/SchemaResult.java | 16 +- .../flight/protocol/flight_pb/Ticket.java | 6 +- .../flightdescriptor/DescriptorTypeMap.java | 6 +- .../BidirectionalStream.java | 6 +- .../flight_pb_service/FlightService.java | 6 +- .../FlightServiceClient.java | 130 +- .../flight_pb_service/RequestStream.java | 6 +- .../flight_pb_service/ResponseStream.java | 6 +- .../flight_pb_service/UnaryResponse.java | 6 +- .../browserheaders/BrowserHeaders.java | 22 +- .../browserheaders/IterateHeaders.java | 10 +- .../windowheaders/WindowHeaders.java | 6 +- .../dhinternal/flatbuffers/Encoding.java | 2 +- .../proto/dhinternal/grpcweb/Client.java | 2 +- .../proto/dhinternal/grpcweb/Grpc.java | 2 +- .../proto/dhinternal/grpcweb/Invoke.java | 2 +- .../proto/dhinternal/grpcweb/Unary.java | 2 +- .../grpcweb/chunkparser/ChunkParser.java | 6 +- .../grpcweb/chunkparser/ChunkType.java | 8 +- .../dhinternal/grpcweb/client/Client.java | 2 +- .../grpcweb/client/ClientRpcOptions.java | 6 +- .../dhinternal/grpcweb/client/RpcOptions.java | 6 +- .../proto/dhinternal/grpcweb/grpc/Client.java | 2 +- .../grpcweb/grpc/ClientRpcOptions.java | 8 +- .../proto/dhinternal/grpcweb/grpc/Code.java | 32 +- .../grpc/CrossBrowserHttpTransportInit.java | 9 +- .../grpcweb/grpc/FetchReadableStreamInit.java | 6 +- .../grpcweb/grpc/InvokeRpcOptions.java | 8 +- .../grpcweb/grpc/MethodDefinition.java | 8 +- .../grpcweb/grpc/ProtobufMessage.java | 8 +- .../grpcweb/grpc/ProtobufMessageClass.java | 8 +- .../dhinternal/grpcweb/grpc/RpcOptions.java | 2 +- .../grpcweb/grpc/ServiceDefinition.java | 8 +- .../dhinternal/grpcweb/grpc/Transport.java | 2 +- .../grpcweb/grpc/TransportFactory.java | 8 +- .../grpcweb/grpc/TransportOptions.java | 8 +- .../grpcweb/grpc/UnaryMethodDefinition.java | 8 +- .../dhinternal/grpcweb/grpc/UnaryOutput.java | 2 +- .../grpcweb/grpc/UnaryRpcOptions.java | 8 +- .../grpcweb/grpc/XhrTransportInit.java | 8 +- .../grpcweb/invoke/InvokeRpcOptions.java | 8 +- .../grpcweb/message/ProtobufMessage.java | 6 +- .../grpcweb/message/ProtobufMessageClass.java | 6 +- .../grpcweb/service/MethodDefinition.java | 6 +- .../grpcweb/service/ServiceDefinition.java | 6 +- .../service/UnaryMethodDefinition.java | 6 +- .../grpcweb/transports/Transport.java | 8 +- .../grpcweb/transports/http/Fetch.java | 6 +- .../grpcweb/transports/http/Http.java | 8 +- .../grpcweb/transports/http/Xhr.java | 6 +- .../grpcweb/transports/http/XhrUtil.java | 6 +- .../http/CrossBrowserHttpTransportInit.java | 6 +- .../http/xhr/MozChunkedArrayBufferXHR.java | 6 +- .../grpcweb/transports/http/xhr/XHR.java | 6 +- .../transports/http/xhr/XhrTransportInit.java | 6 +- .../transports/transport/Transport.java | 6 +- .../transport/TransportFactory.java | 6 +- .../transport/TransportOptions.java | 6 +- .../dhinternal/grpcweb/unary/UnaryOutput.java | 6 +- .../grpcweb/unary/UnaryRpcOptions.java | 8 +- .../barrage/flatbuf/BarrageMessageType.java | 18 +- .../flatbuf/BarrageMessageWrapper.java | 50 +- .../flatbuf/BarrageModColumnMetadata.java | 28 +- .../flatbuf/BarrageSerializationOptions.java | 18 +- .../flatbuf/BarrageSubscriptionRequest.java | 58 +- .../flatbuf/BarrageUpdateMetadata.java | 137 +- .../barrage/flatbuf/ColumnConversionMode.java | 10 +- .../barrage/flatbuf/NewSessionRequest.java | 20 +- .../flatbuf/RefreshSessionRequest.java | 23 +- .../barrage/flatbuf/SessionInfoResponse.java | 49 +- .../io/deephaven/proto/Table_pb.java | 6 +- .../BindTableToVariableRequest.java | 42 +- .../BindTableToVariableResponse.java | 10 +- .../console_pb/CancelCommandRequest.java | 34 +- .../console_pb/CancelCommandResponse.java | 8 +- .../console_pb/ChangeDocumentRequest.java | 66 +- .../console_pb/ChangeDocumentResponse.java | 11 +- .../console_pb/CloseDocumentRequest.java | 38 +- .../console_pb/CloseDocumentResponse.java | 8 +- .../proto/console_pb/CompletionContext.java | 10 +- .../proto/console_pb/CompletionItem.java | 17 +- .../proto/console_pb/DocumentRange.java | 10 +- .../console_pb/ExecuteCommandRequest.java | 34 +- .../console_pb/ExecuteCommandResponse.java | 29 +- .../proto/console_pb/FetchFigureRequest.java | 34 +- .../proto/console_pb/FetchFigureResponse.java | 262 +- .../console_pb/FetchPandasTableRequest.java | 37 +- .../console_pb/FetchTableMapRequest.java | 34 +- .../console_pb/FetchTableMapResponse.java | 8 +- .../proto/console_pb/FetchTableRequest.java | 34 +- .../proto/console_pb/FigureDescriptor.java | 250 +- .../console_pb/GetCompletionItemsRequest.java | 47 +- .../GetCompletionItemsResponse.java | 46 +- .../console_pb/GetConsoleTypesRequest.java | 11 +- .../console_pb/GetConsoleTypesResponse.java | 13 +- .../proto/console_pb/LogSubscriptionData.java | 10 +- .../console_pb/LogSubscriptionRequest.java | 13 +- .../proto/console_pb/OpenDocumentRequest.java | 39 +- .../console_pb/OpenDocumentResponse.java | 8 +- .../deephaven/proto/console_pb/Position.java | 9 +- .../proto/console_pb/StartConsoleRequest.java | 34 +- .../console_pb/StartConsoleResponse.java | 34 +- .../proto/console_pb/TextDocumentItem.java | 10 +- .../deephaven/proto/console_pb/TextEdit.java | 9 +- .../proto/console_pb/VariableDefinition.java | 10 +- .../VersionedTextDocumentIdentifier.java | 14 +- .../TextDocumentContentChangeEvent.java | 18 +- .../figuredescriptor/AxisDescriptor.java | 50 +- .../figuredescriptor/BoolMapWithDefault.java | 10 +- .../BusinessCalendarDescriptor.java | 48 +- .../figuredescriptor/ChartDescriptor.java | 152 +- .../DoubleMapWithDefault.java | 10 +- .../MultiSeriesDescriptor.java | 32 +- .../MultiSeriesSourceDescriptor.java | 12 +- .../figuredescriptor/OneClickDescriptor.java | 10 +- .../figuredescriptor/SeriesDescriptor.java | 30 +- .../figuredescriptor/SeriesPlotStyleMap.java | 6 +- .../figuredescriptor/SourceDescriptor.java | 10 +- .../figuredescriptor/SourceTypeMap.java | 6 +- .../StringMapWithDefault.java | 10 +- .../axisdescriptor/AxisFormatTypeMap.java | 6 +- .../axisdescriptor/AxisPositionMap.java | 6 +- .../axisdescriptor/AxisTypeMap.java | 6 +- .../BusinessPeriod.java | 10 +- .../DayOfWeekMap.java | 6 +- .../businesscalendardescriptor/Holiday.java | 22 +- .../businesscalendardescriptor/LocalDate.java | 8 +- .../chartdescriptor/ChartTypeMap.java | 6 +- .../BidirectionalStream.java | 6 +- .../console_pb_service/ConsoleService.java | 6 +- .../ConsoleServiceClient.java | 821 ++- .../console_pb_service/RequestStream.java | 6 +- .../console_pb_service/ResponseStream.java | 6 +- .../console_pb_service/UnaryResponse.java | 6 +- .../deephaven/proto/session_pb/BasicAuth.java | 8 +- .../proto/session_pb/ExportNotification.java | 34 +- .../session_pb/ExportNotificationRequest.java | 10 +- .../proto/session_pb/HandshakeRequest.java | 20 +- .../proto/session_pb/HandshakeResponse.java | 56 +- .../proto/session_pb/ReleaseResponse.java | 10 +- .../exportnotification/StateMap.java | 6 +- .../BidirectionalStream.java | 6 +- .../session_pb_service/RequestStream.java | 6 +- .../session_pb_service/ResponseStream.java | 6 +- .../session_pb_service/SessionService.java | 6 +- .../SessionServiceClient.java | 245 +- .../session_pb_service/UnaryResponse.java | 6 +- .../proto/table_pb/AndCondition.java | 82 +- .../proto/table_pb/AsOfJoinTablesRequest.java | 34 +- .../proto/table_pb/BatchTableRequest.java | 256 +- .../proto/table_pb/CaseSensitivityMap.java | 6 +- .../proto/table_pb/ComboAggregateRequest.java | 50 +- .../proto/table_pb/CompareCondition.java | 20 +- .../deephaven/proto/table_pb/Condition.java | 22 +- .../proto/table_pb/ContainsCondition.java | 10 +- .../table_pb/CrossJoinTablesRequest.java | 37 +- .../proto/table_pb/DropColumnsRequest.java | 34 +- .../proto/table_pb/EmptyTableRequest.java | 34 +- .../table_pb/ExactJoinTablesRequest.java | 37 +- .../ExportedTableCreationResponse.java | 79 +- .../table_pb/ExportedTableUpdateMessage.java | 37 +- .../table_pb/ExportedTableUpdatesRequest.java | 10 +- .../proto/table_pb/FilterTableRequest.java | 108 +- .../proto/table_pb/FlattenRequest.java | 36 +- .../proto/table_pb/HeadOrTailByRequest.java | 34 +- .../proto/table_pb/HeadOrTailRequest.java | 34 +- .../deephaven/proto/table_pb/InCondition.java | 21 +- .../proto/table_pb/InvokeCondition.java | 20 +- .../proto/table_pb/IsNullCondition.java | 10 +- .../proto/table_pb/LeftJoinTablesRequest.java | 34 +- .../io/deephaven/proto/table_pb/Literal.java | 6 +- .../proto/table_pb/MatchTypeMap.java | 6 +- .../proto/table_pb/MatchesCondition.java | 10 +- .../proto/table_pb/MergeTablesRequest.java | 50 +- .../table_pb/NaturalJoinTablesRequest.java | 36 +- .../proto/table_pb/NotCondition.java | 46 +- .../deephaven/proto/table_pb/OrCondition.java | 83 +- .../deephaven/proto/table_pb/Reference.java | 8 +- .../table_pb/RunChartDownsampleRequest.java | 43 +- .../proto/table_pb/SearchCondition.java | 26 +- .../proto/table_pb/SelectDistinctRequest.java | 34 +- .../proto/table_pb/SelectOrUpdateRequest.java | 34 +- .../proto/table_pb/SnapshotTableRequest.java | 34 +- .../proto/table_pb/SortDescriptor.java | 10 +- .../proto/table_pb/SortTableRequest.java | 54 +- .../proto/table_pb/TableReference.java | 38 +- .../proto/table_pb/TimeTableRequest.java | 34 +- .../proto/table_pb/UngroupRequest.java | 36 +- .../UnstructuredFilterTableRequest.java | 50 +- .../io/deephaven/proto/table_pb/Value.java | 6 +- .../asofjointablesrequest/MatchRuleMap.java | 6 +- .../table_pb/batchtablerequest/Operation.java | 171 +- .../batchtablerequest/operation/OpCase.java | 60 +- .../comboaggregaterequest/AggTypeMap.java | 6 +- .../comboaggregaterequest/Aggregate.java | 8 +- .../comparecondition/CompareOperationMap.java | 6 +- .../proto/table_pb/condition/DataCase.java | 26 +- .../proto/table_pb/literal/ValueCase.java | 16 +- .../runchartdownsamplerequest/ZoomRange.java | 8 +- .../sortdescriptor/SortDirectionMap.java | 6 +- .../table_pb/tablereference/RefCase.java | 10 +- .../proto/table_pb/value/DataCase.java | 10 +- .../table_pb_service/BidirectionalStream.java | 6 +- .../proto/table_pb_service/RequestStream.java | 6 +- .../table_pb_service/ResponseStream.java | 6 +- .../proto/table_pb_service/TableService.java | 6 +- .../table_pb_service/TableServiceClient.java | 1706 ++++--- .../proto/table_pb_service/UnaryResponse.java | 6 +- .../io/deephaven/proto/ticket_pb/Ticket.java | 6 +- .../proto/dhinternal/jspb/BinaryDecoder.java | 6 +- .../proto/dhinternal/jspb/BinaryIterator.java | 33 +- .../proto/dhinternal/jspb/BinaryReader.java | 29 +- .../proto/dhinternal/jspb/BinaryWriter.java | 36 +- .../jspb/ExtensionFieldBinaryInfo.java | 26 +- .../dhinternal/jspb/ExtensionFieldInfo.java | 10 +- .../javascript/proto/dhinternal/jspb/Map.java | 12 +- .../proto/dhinternal/jspb/Message.java | 421 +- .../jspb/binaryconstants/FieldType.java | 46 +- .../jspb/binaryconstants/WireType.java | 18 +- .../io/deephaven/ide/client/IdeClient.java | 8 +- .../deephaven/ide/client/IdeConnection.java | 12 +- .../web/shared/ast/ConvertEqToIn.java | 25 +- .../ast/ConvertInvalidInExpressions.java | 25 +- .../web/shared/ast/FilterPrinter.java | 11 +- .../web/shared/ast/FilterValidator.java | 63 +- .../ast/FlipNonReferenceMatchExpression.java | 40 +- .../web/shared/ast/GetTopLevelFilters.java | 10 +- .../shared/ast/MakeExpressionsNullSafe.java | 23 +- .../ast/MergeNestedBinaryOperations.java | 17 +- .../ast/MergeRelatedSiblingExpressions.java | 47 +- .../web/shared/ast/NormalizeNots.java | 17 +- .../web/shared/ast/ReplacingVisitor.java | 3 +- .../web/shared/batch/BatchTableRequest.java | 43 +- .../web/shared/batch/BatchTableResponse.java | 13 +- .../web/shared/data/ColumnHolder.java | 8 +- .../web/shared/data/ColumnStatistics.java | 33 +- .../web/shared/data/ColumnValue.java | 22 +- .../shared/data/CustomColumnDescriptor.java | 25 +- .../web/shared/data/DeltaUpdates.java | 8 +- .../web/shared/data/FilterDescriptor.java | 28 +- .../web/shared/data/HandleMapping.java | 6 +- .../web/shared/data/HeadOrTailDescriptor.java | 6 +- .../shared/data/InitialTableDefinition.java | 4 +- .../deephaven/web/shared/data/LocalDate.java | 5 +- .../deephaven/web/shared/data/LocalTime.java | 6 +- .../io/deephaven/web/shared/data/LogItem.java | 3 +- .../io/deephaven/web/shared/data/Range.java | 21 +- .../deephaven/web/shared/data/RangeSet.java | 79 +- .../deephaven/web/shared/data/RowValues.java | 2 +- .../web/shared/data/SortDescriptor.java | 8 +- .../data/TableAttributesDefinition.java | 15 +- .../web/shared/data/TableHandle.java | 51 +- .../web/shared/data/TableMapDeclaration.java | 5 +- .../shared/data/TableSubscriptionRequest.java | 30 +- .../deephaven/web/shared/data/Viewport.java | 6 +- .../columns/LocalDateArrayColumnData.java | 4 +- .../columns/LocalTimeArrayColumnData.java | 4 +- .../web/shared/data/treetable/Key.java | 13 +- .../shared/data/treetable/TableDetails.java | 6 +- .../data/treetable/TreeTableRequest.java | 18 +- .../data/treetable/TreeTableResult.java | 24 +- .../web/shared/fu/IdentityHashSet.java | 7 +- .../deephaven/web/shared/fu/PromiseLike.java | 5 +- .../io/deephaven/web/shared/fu/RemoverFn.java | 3 +- .../shared/requests/RollupTableRequest.java | 14 +- .../deephaven/web/shared/util/ParseUtils.java | 4 +- .../ast/AbstractReplacingVisitorTest.java | 7 +- .../web/shared/ast/ConvertEqToInTest.java | 12 +- .../ast/ConvertInvalidInExpressionsTest.java | 12 +- .../web/shared/ast/FilterTestUtils.java | 37 +- .../FlipNonReferenceMatchExpressionTest.java | 27 +- .../ast/MakeExpressionsNullSafeTest.java | 21 +- .../ast/MergeNestedBinaryOperationsTest.java | 84 +- .../MergeRelatedSiblingExpressionsTest.java | 50 +- .../web/shared/ast/NormalizeNotsTest.java | 122 +- .../web/shared/data/RangeSetTest.java | 137 +- 2426 files changed, 63787 insertions(+), 82644 deletions(-) diff --git a/Base/src/main/java/io/deephaven/base/ArrayUtil.java b/Base/src/main/java/io/deephaven/base/ArrayUtil.java index 48000b07537..91df3fe8759 100644 --- a/Base/src/main/java/io/deephaven/base/ArrayUtil.java +++ b/Base/src/main/java/io/deephaven/base/ArrayUtil.java @@ -51,8 +51,7 @@ public static T[] deleteArrayPos(int i, T[] a) { assert a != null && i < a.length; T[] new_a = null; if (a.length > 1) { - new_a = (T[]) java.lang.reflect.Array.newInstance(a.getClass().getComponentType(), - a.length - 1); + new_a = (T[]) java.lang.reflect.Array.newInstance(a.getClass().getComponentType(), a.length - 1); System.arraycopy(a, 0, new_a, 0, i); System.arraycopy(a, i + 1, new_a, i, a.length - (i + 1)); } @@ -182,8 +181,7 @@ public static boolean replaceInArray(T e, T[] a) { return false; } - public static T[] addUnless(T[] a, Class c, Predicate.Unary pred, - Function.Nullary factory) { + public static T[] addUnless(T[] a, Class c, Predicate.Unary pred, Function.Nullary factory) { if (a != null) { for (int i = 0; i < a.length; ++i) { if (pred.call(a[i])) { @@ -194,8 +192,8 @@ public static T[] addUnless(T[] a, Class c, Predicate.Unary pred, return pushArray(factory.call(), a, c); } - public static T[] addUnless(T[] a, Class c, Predicate.Binary pred, - Function.Unary factory, A arg) { + public static T[] addUnless(T[] a, Class c, Predicate.Binary pred, Function.Unary factory, + A arg) { if (a != null) { for (int i = 0; i < a.length; ++i) { if (pred.call(a[i], arg)) { @@ -206,8 +204,8 @@ public static T[] addUnless(T[] a, Class c, Predicate.Binary pre return pushArray(factory.call(arg), a, c); } - public static T[] replaceOrAdd(T[] a, Class c, Predicate.Binary pred, - Function.Unary factory, A arg) { + public static T[] replaceOrAdd(T[] a, Class c, Predicate.Binary pred, Function.Unary factory, + A arg) { if (a != null) { for (int i = 0; i < a.length; ++i) { if (pred.call(a[i], arg)) { @@ -663,18 +661,15 @@ public static T[] insert(T[] a, int i, T v, Class c) { return newArray; } - public static T[] insert(T[] a, int insertionPoint, int numElements, T v, - Class c) { + public static T[] insert(T[] a, int insertionPoint, int numElements, T v, Class c) { if (a.length < numElements + 1) { T[] a2 = extendNoCopy(a, numElements, c); System.arraycopy(a, 0, a2, 0, insertionPoint); a2[insertionPoint] = v; - System.arraycopy(a, insertionPoint, a2, insertionPoint + 1, - numElements - insertionPoint); + System.arraycopy(a, insertionPoint, a2, insertionPoint + 1, numElements - insertionPoint); return a2; } else { - System.arraycopy(a, insertionPoint, a, insertionPoint + 1, - numElements - insertionPoint); + System.arraycopy(a, insertionPoint, a, insertionPoint + 1, numElements - insertionPoint); a[insertionPoint] = v; return a; } @@ -1281,8 +1276,7 @@ public static int hashCodeAnyOrderAnySign(double a[], int min, int length) { return result; } - public static Map mapFromArray(Class keyType, Class valueType, - Object... data) { + public static Map mapFromArray(Class keyType, Class valueType, Object... data) { Map map = new HashMap(data.length); for (int nIndex = 0; nIndex < data.length; nIndex += 2) { Object key = data[nIndex]; @@ -1332,8 +1326,7 @@ public static String toString(long[] longs, int nOffset, int nLength) { } } - public static StringBuilder appendIntArray(final StringBuilder sb, final int[] vs, - final boolean compact) { + public static StringBuilder appendIntArray(final StringBuilder sb, final int[] vs, final boolean compact) { for (int i = 0; i < vs.length; ++i) { if (i != 0) { if (compact) { @@ -1446,13 +1439,12 @@ public static boolean isSorted(double[] doubles) { public static > boolean isSorted(T[] objects) { if (objects != null && objects.length >= 2) { for (int i = 1; i < objects.length; i++) { - // if the previous one is null it's either smaller or equal to the next 'null' and - // therefore sorted + // if the previous one is null it's either smaller or equal to the next 'null' and therefore sorted if ((objects[i - 1] == null)) { continue; } - // if the later element is null (previous one cannot be) then its out of order, - // otherwise compare values (o1 < o2 == negative integer) + // if the later element is null (previous one cannot be) then its out of order, otherwise compare values + // (o1 < o2 == negative integer) if (objects[i] == null || objects[i].compareTo(objects[i - 1]) < 0) { return false; } diff --git a/Base/src/main/java/io/deephaven/base/Base64.java b/Base/src/main/java/io/deephaven/base/Base64.java index 782358dd707..5605583ae4e 100644 --- a/Base/src/main/java/io/deephaven/base/Base64.java +++ b/Base/src/main/java/io/deephaven/base/Base64.java @@ -14,9 +14,9 @@ public static String byteArrayToBase64(byte[] a) { } /** - * Translates the specified byte array into an "alternate representation" Base64 string. This - * non-standard variant uses an alphabet that does not contain the uppercase alphabetic - * characters, which makes it suitable for use in situations where case-folding occurs. + * Translates the specified byte array into an "alternate representation" Base64 string. This non-standard variant + * uses an alphabet that does not contain the uppercase alphabetic characters, which makes it suitable for use in + * situations where case-folding occurs. */ static String byteArrayToAltBase64(byte[] a) { return byteArrayToBase64(a, true); @@ -63,8 +63,8 @@ private static String byteArrayToBase64(byte[] a, boolean alternate) { } /** - * This array is a lookup table that translates 6-bit positive integer index values into their - * "Base64 Alphabet" equivalents as specified in Table 1 of RFC 2045. + * This array is a lookup table that translates 6-bit positive integer index values into their "Base64 Alphabet" + * equivalents as specified in Table 1 of RFC 2045. */ private static final char intToBase64[] = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', @@ -75,10 +75,9 @@ private static String byteArrayToBase64(byte[] a, boolean alternate) { }; /** - * This array is a lookup table that translates 6-bit positive integer index values into their - * "Alternate Base64 Alphabet" equivalents. This is NOT the real Base64 Alphabet as per in Table - * 1 of RFC 2045. This alternate alphabet does not use the capital letters. It is designed for - * use in environments where "case folding" occurs. + * This array is a lookup table that translates 6-bit positive integer index values into their "Alternate Base64 + * Alphabet" equivalents. This is NOT the real Base64 Alphabet as per in Table 1 of RFC 2045. This alternate + * alphabet does not use the capital letters. It is designed for use in environments where "case folding" occurs. */ private static final char intToAltBase64[] = { '!', '"', '#', '$', '%', '&', '\'', '(', ')', ',', '-', '.', ':', @@ -100,8 +99,8 @@ public static byte[] base64ToByteArray(String s) { /** * Translates the specified "alternate representation" Base64 string into a byte array. * - * @throw IllegalArgumentException or ArrayOutOfBoundsException if s is not a valid - * alternate representation Base64 string. + * @throw IllegalArgumentException or ArrayOutOfBoundsException if s is not a valid alternate + * representation Base64 string. */ public static byte[] altBase64ToByteArray(String s) { return base64ToByteArray(s, true); @@ -113,7 +112,7 @@ private static byte[] base64ToByteArray(String s, boolean alternate) { int numGroups = sLen / 4; if (4 * numGroups != sLen) throw new IllegalArgumentException( - "String length must be a multiple of four."); + "String length must be a multiple of four."); int missingBytesInLastGroup = 0; int numFullGroups = numGroups; if (sLen != 0) { @@ -155,11 +154,10 @@ private static byte[] base64ToByteArray(String s, boolean alternate) { } /** - * Translates the specified character, which is assumed to be in the "Base 64 Alphabet" into its - * equivalent 6-bit positive integer. + * Translates the specified character, which is assumed to be in the "Base 64 Alphabet" into its equivalent 6-bit + * positive integer. * - * @throw IllegalArgumentException or ArrayOutOfBoundsException if c is not in the Base64 - * Alphabet. + * @throw IllegalArgumentException or ArrayOutOfBoundsException if c is not in the Base64 Alphabet. */ private static int base64toInt(char c, byte[] alphaToInt) { int result = alphaToInt[c]; @@ -169,10 +167,9 @@ private static int base64toInt(char c, byte[] alphaToInt) { } /** - * This array is a lookup table that translates unicode characters drawn from the "Base64 - * Alphabet" (as specified in Table 1 of RFC 2045) into their 6-bit positive integer - * equivalents. Characters that are not in the Base64 alphabet but fall within the bounds of the - * array are translated to -1. + * This array is a lookup table that translates unicode characters drawn from the "Base64 Alphabet" (as specified in + * Table 1 of RFC 2045) into their 6-bit positive integer equivalents. Characters that are not in the Base64 + * alphabet but fall within the bounds of the array are translated to -1. */ private static final byte base64ToInt[] = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, @@ -185,8 +182,8 @@ private static int base64toInt(char c, byte[] alphaToInt) { }; /** - * This array is the analogue of base64ToInt, but for the nonstandard variant that avoids the - * use of uppercase alphabetic characters. + * This array is the analogue of base64ToInt, but for the nonstandard variant that avoids the use of uppercase + * alphabetic characters. */ private static final byte altBase64ToInt[] = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, diff --git a/Base/src/main/java/io/deephaven/base/ClassUtil.java b/Base/src/main/java/io/deephaven/base/ClassUtil.java index 9e8d251bf35..78bde6be23b 100644 --- a/Base/src/main/java/io/deephaven/base/ClassUtil.java +++ b/Base/src/main/java/io/deephaven/base/ClassUtil.java @@ -30,9 +30,9 @@ public static void dumpFinals(final Logger log, final String prefix, final Objec final String name = f.getName(); final Object value = f.get(p); log.info(prefix - + tName - + " " + name - + " = " + value.toString()); + + tName + + " " + name + + " = " + value.toString()); } catch (Exception ignored) { } } diff --git a/Base/src/main/java/io/deephaven/base/CompareUtils.java b/Base/src/main/java/io/deephaven/base/CompareUtils.java index 0c20ebc71aa..bbc8a7e0215 100644 --- a/Base/src/main/java/io/deephaven/base/CompareUtils.java +++ b/Base/src/main/java/io/deephaven/base/CompareUtils.java @@ -60,8 +60,7 @@ public static boolean equals(Object data1, Object data2) { } /** - * Returns true if the given objects are both null, or equal by the first object's - * {@link #equals} method. + * Returns true if the given objects are both null, or equal by the first object's {@link #equals} method. */ public static boolean nullSafeEquals(Object left, Object right) { if (null == left) { @@ -72,13 +71,12 @@ public static boolean nullSafeEquals(Object left, Object right) { } /** - * You can't do double1 == double2 because floating point numbers are not exact values. Just - * make sure that x-y is less than some allowable error factor. + * You can't do double1 == double2 because floating point numbers are not exact values. Just make sure that x-y is + * less than some allowable error factor. * * @param x * @param y - * @return True if the two doubles are equal to each other (or so close that we don't care that - * they are different). + * @return True if the two doubles are equal to each other (or so close that we don't care that they are different). */ public static boolean EQ(double x, double y) { return doubleEquals(x, y); @@ -93,13 +91,13 @@ public static boolean doubleEqualsZero(double x) { } /** - * You can't do double1 == double2 because floating point numbers are not exact values. Just - * make sure that x-y is less than some allowable error factor. + * You can't do double1 == double2 because floating point numbers are not exact values. Just make sure that x-y is + * less than some allowable error factor. * * @param x * @param y - * @return True if the two doubles are equal to each other (or so close that we don't care that - * they are different). Also true if both are NaN. + * @return True if the two doubles are equal to each other (or so close that we don't care that they are different). + * Also true if both are NaN. */ public static boolean doubleEquals(double x, double y, double tolerance) { return (Double.isNaN(x) && Double.isNaN(y)) || (Math.abs(x - y) < tolerance); @@ -119,17 +117,17 @@ public static boolean doubleEquals2(double x, double y) { public static boolean doubleEquals2(double x, double y, double tolerance) { return Double.compare(x, y) == 0 || - Math.abs(x - y) < tolerance; + Math.abs(x - y) < tolerance; } /** - * Since logical comparison of double values considerig error is effectively a three-value - * logic, you can't really do !equals when you mean notEquals. + * Since logical comparison of double values considerig error is effectively a three-value logic, you can't really + * do !equals when you mean notEquals. * * @param x * @param y - * @return True if two doubles are apart from each other enough that we consider them different. - * False if both of them are NaN + * @return True if two doubles are apart from each other enough that we consider them different. False if both of + * them are NaN */ public static boolean NE(double x, double y) { return doubleNotEquals(x, y); @@ -158,12 +156,12 @@ public static boolean doubleNotEquals2(double x, double y) { public static boolean doubleNotEquals2(double x, double y, double tolerance) { return Double.compare(x, y) != 0 && - !(Math.abs(x - y) < tolerance); + !(Math.abs(x - y) < tolerance); } /** - * You can't do double1 > double2 because floating point numbers are not exact values. Just make - * sure that x-y is greater than some allowable error factor for equality + * You can't do double1 > double2 because floating point numbers are not exact values. Just make sure that x-y is + * greater than some allowable error factor for equality * * @param x * @param y @@ -194,8 +192,8 @@ public static boolean doubleGreaterEqual(double x, double y, double tolerance) { } /** - * You can't do double1 < double2 because floating point numbers are not exact values. Just make - * sure that y - x is greater than some allowable error factor for equality + * You can't do double1 < double2 because floating point numbers are not exact values. Just make sure that y - x is + * greater than some allowable error factor for equality * * @param x * @param y diff --git a/Base/src/main/java/io/deephaven/base/Encodeable.java b/Base/src/main/java/io/deephaven/base/Encodeable.java index 610bac47c84..dfaab857da1 100644 --- a/Base/src/main/java/io/deephaven/base/Encodeable.java +++ b/Base/src/main/java/io/deephaven/base/Encodeable.java @@ -9,8 +9,8 @@ import java.io.IOException; /** - * This interface is a slightly weaker version of java.io.Externalizable, in that it only allows the - * use of the DataInput and -Output interfaces for reading and writing, not ObjectInput and -Output. + * This interface is a slightly weaker version of java.io.Externalizable, in that it only allows the use of the + * DataInput and -Output interfaces for reading and writing, not ObjectInput and -Output. */ public interface Encodeable { public void encode(DataOutput out) throws IOException; diff --git a/Base/src/main/java/io/deephaven/base/FIFOMutex.java b/Base/src/main/java/io/deephaven/base/FIFOMutex.java index b105cb3b3ce..74b55e90325 100644 --- a/Base/src/main/java/io/deephaven/base/FIFOMutex.java +++ b/Base/src/main/java/io/deephaven/base/FIFOMutex.java @@ -39,10 +39,9 @@ public void lock() { } int spins = 0; boolean peekNotMe = true; - while ((peekNotMe && (peekNotMe = (threads.peek() != me))) || // once we've peeked ourselves - // once, we don't need to do - // it again! - !leader.compareAndSet(null, me)) { + while ((peekNotMe && (peekNotMe = (threads.peek() != me))) || // once we've peeked ourselves once, we don't need + // to do it again! + !leader.compareAndSet(null, me)) { if ((++spins % 1000) == 0) { LockSupport.park(this); @@ -62,7 +61,7 @@ public void lock() { if (debugName != null) { lastLeadChange = System.nanoTime(); log.info("FIFOMutex: " + debugName + ": thread " + me.getName() + " leading after " - + ((lastLeadChange - t0 + 500) / 1000) + " micros"); + + ((lastLeadChange - t0 + 500) / 1000) + " micros"); } } @@ -70,7 +69,7 @@ public void unlock() { Thread me = Thread.currentThread(); if (debugName != null) { log.info("FIFOMutex: " + debugName + ": thread " + me.getName() + " handing off after " - + ((System.nanoTime() - lastLeadChange + 500) / 1000) + " micros"); + + ((System.nanoTime() - lastLeadChange + 500) / 1000) + " micros"); } if (!leader.compareAndSet(me, null)) { throw new IllegalStateException("wrong thread called handoff"); diff --git a/Base/src/main/java/io/deephaven/base/FIFOSemaphore.java b/Base/src/main/java/io/deephaven/base/FIFOSemaphore.java index 9d16f2b07aa..0a82c2efcab 100644 --- a/Base/src/main/java/io/deephaven/base/FIFOSemaphore.java +++ b/Base/src/main/java/io/deephaven/base/FIFOSemaphore.java @@ -74,10 +74,9 @@ private void acquire(int toAcquire, boolean doUnpark) { int spins = 0; int resourcesAvailable; boolean peekNotMe = true; - while ((peekNotMe && (peekNotMe = (threads.peek() != me))) || // once we've peeked ourselves - // once, we don't need to do - // it again! - (resourcesAvailable = getAndDecreaseIfCan(toAcquire)) < toAcquire) { + while ((peekNotMe && (peekNotMe = (threads.peek() != me))) || // once we've peeked ourselves once, we don't need + // to do it again! + (resourcesAvailable = getAndDecreaseIfCan(toAcquire)) < toAcquire) { if ((++spins % spinsUntilPark) == 0) { LockSupport.park(this); diff --git a/Base/src/main/java/io/deephaven/base/FileUtils.java b/Base/src/main/java/io/deephaven/base/FileUtils.java index 1a94dd10ca2..c59aec3eb24 100644 --- a/Base/src/main/java/io/deephaven/base/FileUtils.java +++ b/Base/src/main/java/io/deephaven/base/FileUtils.java @@ -28,8 +28,8 @@ public boolean accept(File dir, String name) { private final static String[] EMPTY_STRING_ARRAY = new String[0]; /** - * Cleans the specified path. All files and subdirectories in the path will be deleted. (ie - * you'll be left with an empty directory). + * Cleans the specified path. All files and subdirectories in the path will be deleted. (ie you'll be left with an + * empty directory). * * @param path The path to clean */ @@ -64,33 +64,30 @@ public static void deleteRecursively(File file) { } /** - * Move files accepted by a filter from their relative path under source to the same relative - * path under destination. Creates missing destination subdirectories as needed. + * Move files accepted by a filter from their relative path under source to the same relative path under + * destination. Creates missing destination subdirectories as needed. * * @param source Must be a directory. * @param destination Must be a directory if it exists. * @param filter Applied to normal files, only. We recurse on directories automatically. - * @param allowReplace If the destination regular file exists, do we replace it, or silently - * ignore it? + * @param allowReplace If the destination regular file exists, do we replace it, or silently ignore it? */ public static void moveRecursively(File source, File destination, @Nullable FileFilter filter, - boolean allowReplace) { + boolean allowReplace) { Require.neqNull(source, "source"); Require.requirement(source.isDirectory(), "source.isDirectory()"); Require.neqNull(destination, "destination"); Require.requirement(!destination.exists() || destination.isDirectory(), - "!destination.exists() || destination.isDirectory()"); - moveRecursivelyInternal(source, destination, new RecursingNormalFileFilter(filter), - allowReplace); + "!destination.exists() || destination.isDirectory()"); + moveRecursivelyInternal(source, destination, new RecursingNormalFileFilter(filter), allowReplace); } private static void moveRecursivelyInternal(File source, File destination, FileFilter filter, - boolean allowReplace) { + boolean allowReplace) { final boolean sourceIsDirectory = source.isDirectory(); if (sourceIsDirectory) { for (final File file : source.listFiles(filter)) { - moveRecursivelyInternal(file, new File(destination, file.getName()), filter, - allowReplace); + moveRecursivelyInternal(file, new File(destination, file.getName()), filter, allowReplace); } return; } @@ -100,38 +97,34 @@ private static void moveRecursivelyInternal(File source, File destination, FileF final File destinationParent = destination.getParentFile(); if (!destinationParent.isDirectory()) { if (destinationParent.exists()) { - throw new IllegalArgumentException("Destination parent " - + destinationParent.getAbsolutePath() - + " exists but is not a directory, when moving " + source.getAbsolutePath()); + throw new IllegalArgumentException("Destination parent " + destinationParent.getAbsolutePath() + + " exists but is not a directory, when moving " + source.getAbsolutePath()); } if (!destinationParent.mkdirs()) { throw new RuntimeException("Failed to create missing destination parent directory " - + destinationParent.getAbsolutePath() + " when moving " - + source.getAbsolutePath()); + + destinationParent.getAbsolutePath() + " when moving " + source.getAbsolutePath()); } } if (!source.renameTo(destination)) { - throw new RuntimeException("Failed to move file " + source.getAbsolutePath() + " to " - + destination.getAbsolutePath()); + throw new RuntimeException( + "Failed to move file " + source.getAbsolutePath() + " to " + destination.getAbsolutePath()); } } /** - * Recursive delete method that copes with .nfs files. Uses the file's parent as the trash - * directory. + * Recursive delete method that copes with .nfs files. Uses the file's parent as the trash directory. * * @param file */ public static void deleteRecursivelyOnNFS(File file) { - deleteRecursivelyOnNFS(new File(file.getParentFile(), '.' + file.getName() + ".trash"), - file); + deleteRecursivelyOnNFS(new File(file.getParentFile(), '.' + file.getName() + ".trash"), file); } /** * Recursive delete method that copes with .nfs files. * - * @param trashFile Filename to move regular files to before deletion. .nfs files may be created - * in its parent directory. + * @param trashFile Filename to move regular files to before deletion. .nfs files may be created in its parent + * directory. * @param fileToBeDeleted File or directory at which to begin recursive deletion. */ public static void deleteRecursivelyOnNFS(final File trashFile, final File fileToBeDeleted) { @@ -143,18 +136,16 @@ public static void deleteRecursivelyOnNFS(final File trashFile, final File fileT } } if (!fileToBeDeleted.delete()) { - throw new RuntimeException("Failed to delete expected empty directory " - + fileToBeDeleted.getAbsolutePath()); + throw new RuntimeException( + "Failed to delete expected empty directory " + fileToBeDeleted.getAbsolutePath()); } } else if (fileToBeDeleted.exists()) { if (!fileToBeDeleted.renameTo(trashFile)) { - throw new RuntimeException( - "Failed to move file " + fileToBeDeleted.getAbsolutePath() + throw new RuntimeException("Failed to move file " + fileToBeDeleted.getAbsolutePath() + " to temporary location " + trashFile.getAbsolutePath()); } if (!trashFile.delete()) { - throw new RuntimeException( - "Failed to delete temporary location " + trashFile.getAbsolutePath() + throw new RuntimeException("Failed to delete temporary location " + trashFile.getAbsolutePath() + " for file " + fileToBeDeleted.getAbsolutePath()); } } @@ -242,8 +233,8 @@ public static String readTextFile(InputStream txtFile) throws IOException { } /** - * I have no idea what to call this class. It accepts all directories, and normal files accepted - * by its delegate filter. + * I have no idea what to call this class. It accepts all directories, and normal files accepted by its delegate + * filter. */ private static class RecursingNormalFileFilter implements FileFilter { @@ -255,8 +246,8 @@ private RecursingNormalFileFilter(FileFilter normalFileFilter) { @Override public boolean accept(File pathname) { - return pathname.isDirectory() || (pathname.isFile() - && (normalFileFilter == null || normalFileFilter.accept(pathname))); + return pathname.isDirectory() + || (pathname.isFile() && (normalFileFilter == null || normalFileFilter.accept(pathname))); } } } diff --git a/Base/src/main/java/io/deephaven/base/Function.java b/Base/src/main/java/io/deephaven/base/Function.java index 5ee60a9549d..38b2481fb28 100644 --- a/Base/src/main/java/io/deephaven/base/Function.java +++ b/Base/src/main/java/io/deephaven/base/Function.java @@ -4,8 +4,8 @@ package io.deephaven.base; -@SuppressWarnings("unused") // There's no reason to force anyone to figure out that 5-ary is quinary - // (etc) ever again, so don't complain about unused interfaces. +@SuppressWarnings("unused") // There's no reason to force anyone to figure out that 5-ary is quinary (etc) ever again, + // so don't complain about unused interfaces. public class Function { @FunctionalInterface diff --git a/Base/src/main/java/io/deephaven/base/LockFreeArrayQueue.java b/Base/src/main/java/io/deephaven/base/LockFreeArrayQueue.java index 1ac40c6644c..58527be48f6 100644 --- a/Base/src/main/java/io/deephaven/base/LockFreeArrayQueue.java +++ b/Base/src/main/java/io/deephaven/base/LockFreeArrayQueue.java @@ -13,23 +13,20 @@ /** * A Java implementation of the algorithm described in: * - * Philippas Tsigas, Yi Zhang, "A simple, fast and scalable non-blocking concurrent FIFO queue for - * shared memory multiprocessor systems", Proceedings of the thirteenth annual ACM symposium on - * Parallel algorithms and architectures, p.134-143, July 2001, Crete Island, Greece + * Philippas Tsigas, Yi Zhang, "A simple, fast and scalable non-blocking concurrent FIFO queue for shared memory + * multiprocessor systems", Proceedings of the thirteenth annual ACM symposium on Parallel algorithms and architectures, + * p.134-143, July 2001, Crete Island, Greece * - * This version modifies the way we choose which NULL to use when dequeuing: 1) We let the head and - * tail pointers range over the entire set of 32-bit unsigned values. We can convert a 32-bit - * unsigned integer into a node index with the mod operator (or a bit mask, if we limit the queue - * sizes to powers of two). 2) On each successive "pass" over the array, we want to alternate - * between NULL(0) and NULL(1), that is, the first time the head pointer goes from zero to cap, we - * replace dequeued values with NULL(0), then when head wraps back to zero we switch to using - * NULL(1). Since we allow head to range over all 32-bit values, we can compute which null to use a - * NULL((head / cap) % 2). If we are using powers of two, then the low-order bits [0,N] specify the - * index into the nodes array, and bit N+1 specifies whether to use NULL(0) or NULL(1) when - * dequeuing. + * This version modifies the way we choose which NULL to use when dequeuing: 1) We let the head and tail pointers range + * over the entire set of 32-bit unsigned values. We can convert a 32-bit unsigned integer into a node index with the + * mod operator (or a bit mask, if we limit the queue sizes to powers of two). 2) On each successive "pass" over the + * array, we want to alternate between NULL(0) and NULL(1), that is, the first time the head pointer goes from zero to + * cap, we replace dequeued values with NULL(0), then when head wraps back to zero we switch to using NULL(1). Since we + * allow head to range over all 32-bit values, we can compute which null to use a NULL((head / cap) % 2). If we are + * using powers of two, then the low-order bits [0,N] specify the index into the nodes array, and bit N+1 specifies + * whether to use NULL(0) or NULL(1) when dequeuing. */ -public class LockFreeArrayQueue - implements ConcurrentQueue, ProducerConsumer.MultiProducerConsumer { +public class LockFreeArrayQueue implements ConcurrentQueue, ProducerConsumer.MultiProducerConsumer { /* private */ final int cap; // capacity of the queue - a power of two /* private */ final int mask; @@ -80,8 +77,8 @@ public static int getMaxAllowedCapacity() { public LockFreeArrayQueue(int log2cap) { if (log2cap < LOG2CAP_MIN || log2cap > LOG2CAP_MAX) { - throw new IllegalArgumentException("log2cap must be in [" + LOG2CAP_MIN + "," - + LOG2CAP_MAX + "], got " + log2cap + "."); + throw new IllegalArgumentException( + "log2cap must be in [" + LOG2CAP_MIN + "," + LOG2CAP_MAX + "], got " + log2cap + "."); } this.cap = 1 << log2cap; this.mask = cap - 1; @@ -190,8 +187,7 @@ public boolean enqueue(T new_value) { // if ( debug_search_count > cap/2 ) { // debug_stop(); // synchronized ( this ) { - // debug_dump(head.get(), tail.get(), head0, tail0, actual_tail, new_value, - // debug_search_count); + // debug_dump(head.get(), tail.get(), head0, tail0, actual_tail, new_value, debug_search_count); // } // debug_go(); // } @@ -285,8 +281,7 @@ private T do_dequeue(boolean peek, T expected, Predicate.Unary predicate) { while (true) { // initial value of the head index - must not change while we are working final int head0 = head.get(); - // initial value of the tail index - most not change while we are searching for the - // actual head + // initial value of the tail index - most not change while we are searching for the actual head final int tail0 = tail.get(); // the slot from which we are going to dequeue int actual_head = head0 + 1; @@ -399,16 +394,15 @@ private void debug_check(String what) { // } } - private void debug_dump(int h, int t, int h0, int t0, int at, Object new_value, - int scan_count) { + private void debug_dump(int h, int t, int h0, int t0, int at, Object new_value, int scan_count) { if (scan_count > 0) { - System.out.println("LFAQ.enqueuing " + new_value + ": scanned " + scan_count - + " slots looking for actual tail" - + ", h0=" + h0 + "=" + (h0 % cap) + "/" + (h0 >> shift) + "/" + get_null(h0) - + ", t0=" + t0 + "=" + (t0 % cap) + "/" + (t0 >> shift) + "/" + get_null(t0) - + ", h=" + h + "=" + (h % cap) + "/" + (h >> shift) + "/" + get_null(h) - + ", t=" + t + "=" + (t % cap) + "/" + (t >> shift) + "/" + get_null(t) - + ", at=" + at + "=" + (at % cap) + "/" + (at >> shift) + "/" + get_null(at)); + System.out.println( + "LFAQ.enqueuing " + new_value + ": scanned " + scan_count + " slots looking for actual tail" + + ", h0=" + h0 + "=" + (h0 % cap) + "/" + (h0 >> shift) + "/" + get_null(h0) + + ", t0=" + t0 + "=" + (t0 % cap) + "/" + (t0 >> shift) + "/" + get_null(t0) + + ", h=" + h + "=" + (h % cap) + "/" + (h >> shift) + "/" + get_null(h) + + ", t=" + t + "=" + (t % cap) + "/" + (t >> shift) + "/" + get_null(t) + + ", at=" + at + "=" + (at % cap) + "/" + (at >> shift) + "/" + get_null(at)); } for (int i = 0; i < cap; ++i) { diff --git a/Base/src/main/java/io/deephaven/base/LongRingBuffer.java b/Base/src/main/java/io/deephaven/base/LongRingBuffer.java index d9da13eeaf6..787b0b85155 100644 --- a/Base/src/main/java/io/deephaven/base/LongRingBuffer.java +++ b/Base/src/main/java/io/deephaven/base/LongRingBuffer.java @@ -8,10 +8,10 @@ import java.util.NoSuchElementException; /** - * A trivial circular buffer for primitive longs, like java.util.concurrent.ArrayBlockingQueue but - * without all the synchronization and collection cruft. Storage is between head (incl.) and tail - * (excl.) wrapping around the end of the array. If the buffer is *not* growable, it will make room - * for a new element by dropping off the oldest element in the buffer instead. + * A trivial circular buffer for primitive longs, like java.util.concurrent.ArrayBlockingQueue but without all the + * synchronization and collection cruft. Storage is between head (incl.) and tail (excl.) wrapping around the end of the + * array. If the buffer is *not* growable, it will make room for a new element by dropping off the oldest element in the + * buffer instead. */ public class LongRingBuffer implements Serializable { diff --git a/Base/src/main/java/io/deephaven/base/LowGarbageArrayIntegerMap.java b/Base/src/main/java/io/deephaven/base/LowGarbageArrayIntegerMap.java index 8bad2bc4d84..03c49a894a1 100644 --- a/Base/src/main/java/io/deephaven/base/LowGarbageArrayIntegerMap.java +++ b/Base/src/main/java/io/deephaven/base/LowGarbageArrayIntegerMap.java @@ -16,13 +16,12 @@ // -------------------------------------------------------------------- /** - * A very simple {@link Map} for small maps with Integer keys (uses direct array access) that - * creates no garbage (except when expanding). This set only has one {@link Iterator}, which is - * reused. This set is not thread safe. + * A very simple {@link Map} for small maps with Integer keys (uses direct array access) that creates no garbage (except + * when expanding). This set only has one {@link Iterator}, which is reused. This set is not thread safe. *

- * Note: This class extends {@link HashMap} rather than {@link Map} (or {@link AbstractMap}) only - * because one of the fields where we want to use it ({@link sun.nio.ch.EPollSelectorImpl#fdToKey}) - * is (improperly) declared as a HashMap rather than a Map. + * Note: This class extends {@link HashMap} rather than {@link Map} (or {@link AbstractMap}) only because one of the + * fields where we want to use it ({@link sun.nio.ch.EPollSelectorImpl#fdToKey}) is (improperly) declared as a HashMap + * rather than a Map. */ public class LowGarbageArrayIntegerMap extends HashMap { diff --git a/Base/src/main/java/io/deephaven/base/LowGarbageArrayList.java b/Base/src/main/java/io/deephaven/base/LowGarbageArrayList.java index 2c922cb5660..aaf774149ac 100644 --- a/Base/src/main/java/io/deephaven/base/LowGarbageArrayList.java +++ b/Base/src/main/java/io/deephaven/base/LowGarbageArrayList.java @@ -14,8 +14,8 @@ // -------------------------------------------------------------------- /** - * This is a special version of {@link ArrayList} that can be substituted for a regular Array list - * but produces no garbage. It only has one iterator, which is reused. It is not thread safe. + * This is a special version of {@link ArrayList} that can be substituted for a regular Array list but produces no + * garbage. It only has one iterator, which is reused. It is not thread safe. */ public class LowGarbageArrayList extends ArrayList { @@ -49,14 +49,14 @@ private class Itr implements Iterator { int cursor; /** - * Index of element returned by most recent call to next or previous. Reset to -1 if this - * element is deleted by a call to remove. + * Index of element returned by most recent call to next or previous. Reset to -1 if this element is deleted by + * a call to remove. */ int lastRet; /** - * The modCount value that the iterator believes that the backing List should have. If this - * expectation is violated, the iterator has detected concurrent modification. + * The modCount value that the iterator believes that the backing List should have. If this expectation is + * violated, the iterator has detected concurrent modification. */ int expectedModCount; diff --git a/Base/src/main/java/io/deephaven/base/LowGarbageArraySet.java b/Base/src/main/java/io/deephaven/base/LowGarbageArraySet.java index ee7152bf05a..488e543e623 100644 --- a/Base/src/main/java/io/deephaven/base/LowGarbageArraySet.java +++ b/Base/src/main/java/io/deephaven/base/LowGarbageArraySet.java @@ -13,13 +13,12 @@ // -------------------------------------------------------------------- /** - * A very simple {@link Set} for small sets (uses linear time algorithms) that creates no garbage - * (except when expanding). This set only has one {@link Iterator}, which is reused. This set is not - * thread safe. + * A very simple {@link Set} for small sets (uses linear time algorithms) that creates no garbage (except when + * expanding). This set only has one {@link Iterator}, which is reused. This set is not thread safe. *

- * Note: This class extends {@link HashSet} rather than {@link Set} (or {@link AbstractSet}) only - * because one of the fields where we want to use it ({@link sun.nio.ch.SelectorImpl#keys}) is - * (improperly) declared as a HashSet rather than a Set. + * Note: This class extends {@link HashSet} rather than {@link Set} (or {@link AbstractSet}) only because one of the + * fields where we want to use it ({@link sun.nio.ch.SelectorImpl#keys}) is (improperly) declared as a HashSet rather + * than a Set. */ public class LowGarbageArraySet extends HashSet { @@ -67,8 +66,7 @@ public boolean isEmpty() { @Override public boolean contains(Object o) { for (int nIndex = 0; nIndex < m_nElements; nIndex++) { - if ((null == o && null == m_elements[nIndex]) - || (null != o && o.equals(m_elements[nIndex]))) { + if ((null == o && null == m_elements[nIndex]) || (null != o && o.equals(m_elements[nIndex]))) { return true; } } @@ -79,8 +77,7 @@ public boolean contains(Object o) { @Override public boolean add(T t) { for (int nIndex = 0; nIndex < m_nElements; nIndex++) { - if ((null == t && null == m_elements[nIndex]) - || (null != t && t.equals(m_elements[nIndex]))) { + if ((null == t && null == m_elements[nIndex]) || (null != t && t.equals(m_elements[nIndex]))) { return false; } } @@ -98,8 +95,7 @@ public boolean add(T t) { @Override public boolean remove(Object o) { for (int nIndex = 0; nIndex < m_nElements; nIndex++) { - if ((null == o && null == m_elements[nIndex]) - || (null != o && o.equals(m_elements[nIndex]))) { + if ((null == o && null == m_elements[nIndex]) || (null != o && o.equals(m_elements[nIndex]))) { m_elements[nIndex] = m_elements[--m_nElements]; m_elements[m_nElements] = null; return true; diff --git a/Base/src/main/java/io/deephaven/base/Procedure.java b/Base/src/main/java/io/deephaven/base/Procedure.java index 05602c8134b..dea6526d659 100644 --- a/Base/src/main/java/io/deephaven/base/Procedure.java +++ b/Base/src/main/java/io/deephaven/base/Procedure.java @@ -4,8 +4,8 @@ package io.deephaven.base; -@SuppressWarnings("unused") // There's no reason to force anyone to figure out that 5-ary is quinary - // (etc) ever again, so don't complain about unused interfaces. +@SuppressWarnings("unused") // There's no reason to force anyone to figure out that 5-ary is quinary (etc) ever again, + // so don't complain about unused interfaces. public class Procedure { @FunctionalInterface diff --git a/Base/src/main/java/io/deephaven/base/RAPriQueue.java b/Base/src/main/java/io/deephaven/base/RAPriQueue.java index 5e1ce60c66c..9a068290b60 100644 --- a/Base/src/main/java/io/deephaven/base/RAPriQueue.java +++ b/Base/src/main/java/io/deephaven/base/RAPriQueue.java @@ -63,8 +63,7 @@ public void enter(T el) { int k = adapter.getPos(el); if (k <= 0) { if (++size == queue.length) { - T[] newQueue = - (T[]) java.lang.reflect.Array.newInstance(elementClass, 2 * queue.length); + T[] newQueue = (T[]) java.lang.reflect.Array.newInstance(elementClass, 2 * queue.length); System.arraycopy(queue, 0, newQueue, 0, size); queue = newQueue; } @@ -201,8 +200,7 @@ public int dump(T2[] result, int startIndex, Function.Unary f) { boolean testInvariantAux(int i, String what) { if (i <= size) { if (adapter.getPos(queue[i]) != i) { - System.err.println( - what + ": queue[" + i + "].tqPos=" + (adapter.getPos(queue[i])) + " != " + i); + System.err.println(what + ": queue[" + i + "].tqPos=" + (adapter.getPos(queue[i])) + " != " + i); } if (!testInvariantAux(i * 2, what)) { return false; @@ -212,8 +210,8 @@ boolean testInvariantAux(int i, String what) { } if (i > 1) { if (adapter.less(queue[i], queue[i / 2])) { - System.err.println(what + ": child[" + i + "]=" + queue[i] + " < parent[" - + (i / 2) + "]=" + queue[i / 2]); + System.err.println( + what + ": child[" + i + "]=" + queue[i] + " < parent[" + (i / 2) + "]=" + queue[i / 2]); return false; } } @@ -226,8 +224,7 @@ boolean testInvariant(String what) { if (result) { for (int i = size + 1; i < queue.length; ++i) { if (queue[i] != null) { - System.err.println( - what + ": size = " + size + ", child[" + i + "]=" + queue[i] + " != null"); + System.err.println(what + ": size = " + size + ", child[" + i + "]=" + queue[i] + " != null"); result = false; } } diff --git a/Base/src/main/java/io/deephaven/base/RingBuffer.java b/Base/src/main/java/io/deephaven/base/RingBuffer.java index 5da346b0ac7..6d6f94e9c7c 100644 --- a/Base/src/main/java/io/deephaven/base/RingBuffer.java +++ b/Base/src/main/java/io/deephaven/base/RingBuffer.java @@ -8,8 +8,8 @@ import java.util.NoSuchElementException; /** - * A trivial circular buffer, like java.util.concurrent.ArrayBlockingQueue but without all the - * synchronization and collection cruft. + * A trivial circular buffer, like java.util.concurrent.ArrayBlockingQueue but without all the synchronization and + * collection cruft. */ public class RingBuffer { private Object[] storage; diff --git a/Base/src/main/java/io/deephaven/base/SafeCloneable.java b/Base/src/main/java/io/deephaven/base/SafeCloneable.java index 0904445868c..2f141dbc73a 100644 --- a/Base/src/main/java/io/deephaven/base/SafeCloneable.java +++ b/Base/src/main/java/io/deephaven/base/SafeCloneable.java @@ -5,11 +5,11 @@ package io.deephaven.base; /** - * This interface specifies a safe clone operation that never throws a CloneNotSupported exception, - * and also allows a bound to be placed on the result object's type. + * This interface specifies a safe clone operation that never throws a CloneNotSupported exception, and also allows a + * bound to be placed on the result object's type. * - * Note that any class that extends a base that implements SafeCloneable must *always* re-implement - * the safeClone method. + * Note that any class that extends a base that implements SafeCloneable must *always* re-implement the safeClone + * method. */ public interface SafeCloneable extends Cloneable { T safeClone(); diff --git a/Base/src/main/java/io/deephaven/base/StringUtils.java b/Base/src/main/java/io/deephaven/base/StringUtils.java index 5553643ced4..8c029e8e883 100644 --- a/Base/src/main/java/io/deephaven/base/StringUtils.java +++ b/Base/src/main/java/io/deephaven/base/StringUtils.java @@ -75,8 +75,7 @@ public static int caseInsensitiveIndexOf(String source, String target) { if (i <= max) { int j = i + 1; int end = j + target.length() - 1; - for (int k = 1; j < end - && toUpperCase(source.charAt(j)) == toUpperCase(target.charAt(k)); j++, k++); + for (int k = 1; j < end && toUpperCase(source.charAt(j)) == toUpperCase(target.charAt(k)); j++, k++); if (j == end) { /* Found whole string. */ diff --git a/Base/src/main/java/io/deephaven/base/UnfairMutex.java b/Base/src/main/java/io/deephaven/base/UnfairMutex.java index 5f57d57550b..9b75b6fd9f6 100644 --- a/Base/src/main/java/io/deephaven/base/UnfairMutex.java +++ b/Base/src/main/java/io/deephaven/base/UnfairMutex.java @@ -52,8 +52,8 @@ public void lock() { if (leader.compareAndSet(null, me)) { // if ( debugName != null ) { // lastLeadChange = System.nanoTime(); - // log.info("UnfairMutex: "+debugName+": UNFAIR thread "+me.getName()+" leading - // after "+((lastLeadChange - t0 + 500) / 1000)+" micros"); + // log.info("UnfairMutex: "+debugName+": UNFAIR thread "+me.getName()+" leading after "+((lastLeadChange + // - t0 + 500) / 1000)+" micros"); // } return; } @@ -64,10 +64,9 @@ public void lock() { } int spins = 0; boolean peekNotMe = true; - while ((peekNotMe && (peekNotMe = (threads.peek() != me))) || // once we've peeked ourselves - // once, we don't need to do - // it again! - !leader.compareAndSet(null, me)) { + while ((peekNotMe && (peekNotMe = (threads.peek() != me))) || // once we've peeked ourselves once, we don't need + // to do it again! + !leader.compareAndSet(null, me)) { if ((++spins % spinsUntilPark) == 0) { LockSupport.park(this); @@ -86,16 +85,16 @@ public void lock() { } // if ( debugName != null ) { // lastLeadChange = System.nanoTime(); - // log.info("UnfairMutex: "+debugName+": thread "+me.getName()+" leading after - // "+((lastLeadChange - t0 + 500) / 1000)+" micros"); + // log.info("UnfairMutex: "+debugName+": thread "+me.getName()+" leading after "+((lastLeadChange - t0 + 500) / + // 1000)+" micros"); // } } public void unlock() { Thread me = Thread.currentThread(); // if ( debugName != null ) { - // log.info("UnfairMutex: "+debugName+": thread "+me.getName()+" handing off after - // "+((System.nanoTime() - lastLeadChange + 500) / 1000)+" micros"); + // log.info("UnfairMutex: "+debugName+": thread "+me.getName()+" handing off after "+((System.nanoTime() - + // lastLeadChange + 500) / 1000)+" micros"); // } if (!leader.compareAndSet(me, null)) { throw new IllegalStateException("wrong thread called handoff"); diff --git a/Base/src/main/java/io/deephaven/base/UnfairSemaphore.java b/Base/src/main/java/io/deephaven/base/UnfairSemaphore.java index 1541132a1c5..749ffcd97f2 100644 --- a/Base/src/main/java/io/deephaven/base/UnfairSemaphore.java +++ b/Base/src/main/java/io/deephaven/base/UnfairSemaphore.java @@ -88,10 +88,9 @@ private boolean acquire(int toAcquire, boolean doUnpark) { int resourcesAvailable; int spins = 0; boolean peekNotMe = true; - while ((peekNotMe && (peekNotMe = (threads.peek() != me))) || // once we've peeked ourselves - // once, we don't need to do - // it again! - (resourcesAvailable = getAndDecreaseIfCan(toAcquire)) < toAcquire) { + while ((peekNotMe && (peekNotMe = (threads.peek() != me))) || // once we've peeked ourselves once, we don't need + // to do it again! + (resourcesAvailable = getAndDecreaseIfCan(toAcquire)) < toAcquire) { if ((++spins % spinsUntilPark) == 0) { LockSupport.park(this); diff --git a/Base/src/main/java/io/deephaven/base/WeakReferenceManager.java b/Base/src/main/java/io/deephaven/base/WeakReferenceManager.java index 0b797a57242..29e6d0f823a 100644 --- a/Base/src/main/java/io/deephaven/base/WeakReferenceManager.java +++ b/Base/src/main/java/io/deephaven/base/WeakReferenceManager.java @@ -9,8 +9,8 @@ import java.util.function.Predicate; /** - * A helper for manging a list of WeakReferences. It hides the internal management of expired - * references and provides for iteration over the valid ones + * A helper for manging a list of WeakReferences. It hides the internal management of expired references and provides + * for iteration over the valid ones */ public class WeakReferenceManager { private final List> refs; @@ -23,8 +23,8 @@ public WeakReferenceManager() { } /** - * Create a WeakReferenceManager, with either {@link ArrayList} or {@link CopyOnWriteArrayList} - * as backing structure. + * Create a WeakReferenceManager, with either {@link ArrayList} or {@link CopyOnWriteArrayList} as backing + * structure. * * @param useCowList Use CopyOnWriteArrayList if true, else ArrayList. */ @@ -60,8 +60,8 @@ public void removeAll(Collection items) { } /** - * Execute the provided procedure on each listener that has not been GC'd. If a listener was - * GC'd the reference will be removed from the internal list of refs. + * Execute the provided procedure on each listener that has not been GC'd. If a listener was GC'd the reference will + * be removed from the internal list of refs. * * @param proc The procedure to call with each valid listener */ diff --git a/Base/src/main/java/io/deephaven/base/array/FastArray.java b/Base/src/main/java/io/deephaven/base/array/FastArray.java index d3f49eed2e2..be57ec6af58 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastArray.java +++ b/Base/src/main/java/io/deephaven/base/array/FastArray.java @@ -52,16 +52,15 @@ public T call() { }, initialSize, true); } - // newInstance shouldn't use a pool or any special mechanism! (Sharing happens w/ newInstance w/ - // other cloned/copied instances) + // newInstance shouldn't use a pool or any special mechanism! (Sharing happens w/ newInstance w/ other cloned/copied + // instances) // need to use clazz b/c we want array to actually be an array of type T - public FastArray(final Class clazz, - final Function.Nullary newInstance) { + public FastArray(final Class clazz, final Function.Nullary newInstance) { this(clazz, newInstance, 0, true); } - public FastArray(final Class clazz, - final Function.Nullary newInstance, int initialSize, boolean preallocate) { + public FastArray(final Class clazz, final Function.Nullary newInstance, int initialSize, + boolean preallocate) { this.clazz = clazz; this.newInstance = newInstance; this.length = 0; @@ -106,8 +105,7 @@ public final boolean addUnique(T t) { return true; } - // useful when using FastArray w/ immutable objects. enums, string, etc. or when you must - // remember a previous value + // useful when using FastArray w/ immutable objects. enums, string, etc. or when you must remember a previous value public final void fastAdd(T t) { array = ArrayUtil.put(array, length, t, clazz); ++length; @@ -118,8 +116,8 @@ public final void fastAdd(T[] t, int startIndex, int len) { length += len; } - // useful when using FastArray as a cache / when FastArray is managing memory. must remember to - // reset or copy over returned value + // useful when using FastArray as a cache / when FastArray is managing memory. must remember to reset or copy over + // returned value public final T next() { T t; if ((length >= array.length) || ((t = array[length]) == null)) { @@ -191,11 +189,11 @@ public final T[] getUnsafeArray() { public T removeThisIndex(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { final T t = array[index]; @@ -214,11 +212,11 @@ public T removeThisIndex(int index) { public T removeThisIndexDontCareAboutOrder(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { final T t = array[index]; @@ -266,8 +264,7 @@ public String toStringXml(String pre) { msg.append(pre).append(extra).append("").append(length).append("\n"); msg.append(pre).append(extra).append("\n"); for (int i = 0; i < array.length; i++) { - msg.append(pre).append(extra).append(extra).append("").append(i) - .append("\n"); + msg.append(pre).append(extra).append(extra).append("").append(i).append("\n"); msg.append(pre).append(extra).append(extra).append("\n"); if (array[i] == null) { msg.append(pre).append(extra).append(extra).append(extra).append("null"); @@ -301,8 +298,7 @@ public static FastArray cloneShallow(final FastArray THIS) { * @param right content holder. Not-modified * @param */ - public static > void copyValuesDeep(final FastArray THIS, - final FastArray right) { + public static > void copyValuesDeep(final FastArray THIS, final FastArray right) { Assert.eqTrue(maybeCopyValuesDeep(THIS, right), "maybeCopyValuesDeep(THIS, right)"); } @@ -310,7 +306,7 @@ public static > void copyValuesDeep(final FastArray THI // Right still might have changed... use other methods to detect // However, if it returns false, we know right has changed! public static > boolean maybeCopyValuesDeep(final FastArray THIS, - final FastArray right) { + final FastArray right) { if (THIS == right) return true; @@ -334,13 +330,13 @@ public static > FastArray cloneDeep(final FastArray return clone; } - public static void writeExternal(final FastArray THIS, - ObjectOutput out) throws IOException { + public static void writeExternal(final FastArray THIS, ObjectOutput out) + throws IOException { writeExternal(THIS, out, THIS.getLength()); } - public static void writeExternal(final FastArray THIS, - ObjectOutput out, int maxToWrite) throws IOException { + public static void writeExternal(final FastArray THIS, ObjectOutput out, + int maxToWrite) throws IOException { maxToWrite = Math.min(maxToWrite, THIS.getLength()); out.writeInt(maxToWrite); for (int i = 0; i < maxToWrite; ++i) { @@ -348,8 +344,8 @@ public static void writeExternal(final FastArray T } } - public static void readExternal(final FastArray THIS, - ObjectInput in) throws IOException, ClassNotFoundException { + public static void readExternal(final FastArray THIS, ObjectInput in) + throws IOException, ClassNotFoundException { THIS.quickReset(); final int len = in.readInt(); for (int i = 0; i < len; ++i) { @@ -362,10 +358,9 @@ public static interface WriteExternalFunction { } public static void writeExternal(final FastArray THIS, ObjectOutput out, - WriteExternalFunction writeExternalFunction) throws IOException { + WriteExternalFunction writeExternalFunction) throws IOException { if (THIS == null) { - throw new IllegalArgumentException( - "FastArray.writeExternal(): THIS was null and is not supported"); + throw new IllegalArgumentException("FastArray.writeExternal(): THIS was null and is not supported"); } out.writeInt(THIS.length); for (int i = 0; i < THIS.length; ++i) { @@ -374,15 +369,13 @@ public static void writeExternal(final FastArray THIS, ObjectOutput out, } public static interface ReadExternalFunction { - public void readExternal(final ObjectInput in, final C item) - throws IOException, ClassNotFoundException; + public void readExternal(final ObjectInput in, final C item) throws IOException, ClassNotFoundException; } public static void readExternal(final FastArray THIS, ObjectInput in, - ReadExternalFunction readExternalFunction) throws IOException, ClassNotFoundException { + ReadExternalFunction readExternalFunction) throws IOException, ClassNotFoundException { if (THIS == null) { - throw new IllegalArgumentException( - "FastArray.readExternal(): THIS was null and is not supported"); + throw new IllegalArgumentException("FastArray.readExternal(): THIS was null and is not supported"); } THIS.quickReset(); final int len = in.readInt(); diff --git a/Base/src/main/java/io/deephaven/base/array/FastArrayExt.java b/Base/src/main/java/io/deephaven/base/array/FastArrayExt.java index 3f860fec762..4b81304268f 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastArrayExt.java +++ b/Base/src/main/java/io/deephaven/base/array/FastArrayExt.java @@ -16,10 +16,10 @@ * */ public final class FastArrayExt> extends FastArray - implements Externalizable, Copyable> { + implements Externalizable, Copyable> { public static > Function.Nullary> createFactory( - final Class clazz, final Function.Nullary itemFactory) { + final Class clazz, final Function.Nullary itemFactory) { return new Function.Nullary>() { @Override public FastArrayExt call() { @@ -29,8 +29,8 @@ public FastArrayExt call() { } /** - * No empty args constructor. We should never be reading this directly off the wire, always goes - * through another readExternalizable + * No empty args constructor. We should never be reading this directly off the wire, always goes through another + * readExternalizable */ public FastArrayExt(final Class clazz) { @@ -41,14 +41,12 @@ public FastArrayExt(final Class clazz, final int initialSize) { super(clazz, initialSize); } - public FastArrayExt(final Class clazz, - final Function.Nullary newInstance) { + public FastArrayExt(final Class clazz, final Function.Nullary newInstance) { super(clazz, newInstance); } - public FastArrayExt(final Class clazz, - final Function.Nullary newInstance, final int initialSize, - final boolean preallocate) { + public FastArrayExt(final Class clazz, final Function.Nullary newInstance, + final int initialSize, final boolean preallocate) { super(clazz, newInstance, initialSize, preallocate); } diff --git a/Base/src/main/java/io/deephaven/base/array/FastBooleanArray.java b/Base/src/main/java/io/deephaven/base/array/FastBooleanArray.java index fec131b832f..a872b5e2093 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastBooleanArray.java +++ b/Base/src/main/java/io/deephaven/base/array/FastBooleanArray.java @@ -75,11 +75,11 @@ public boolean[] getUnsafeArray() { public void removeThisIndex(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { // move all the items ahead one index and reduce the length for (int i = index; i < length; i++) { @@ -138,7 +138,7 @@ public String toStringXml(String pre) { msg.append(pre).append("\n"); for (int i = 0; i < array.length; i++) { msg.append(pre).append(extra).append("").append(i).append("") - .append(length).append("").append(array[i]).append("\n"); + .append(length).append("").append(array[i]).append("\n"); } msg.append(pre).append("\n"); return msg.toString(); diff --git a/Base/src/main/java/io/deephaven/base/array/FastByteArray.java b/Base/src/main/java/io/deephaven/base/array/FastByteArray.java index 9067f89f46b..13a6126f012 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastByteArray.java +++ b/Base/src/main/java/io/deephaven/base/array/FastByteArray.java @@ -75,11 +75,11 @@ public byte[] getUnsafeArray() { public void removeThisIndex(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { // move all the items ahead one index and reduce the length for (int i = index; i < length; i++) { @@ -136,7 +136,7 @@ public String toStringXml(String pre) { msg.append(pre).append("\n"); for (int i = 0; i < array.length; i++) { msg.append(pre).append(extra).append("").append(i).append("") - .append(length).append("").append(array[i]).append("\n"); + .append(length).append("").append(array[i]).append("\n"); } msg.append(pre).append("\n"); return msg.toString(); diff --git a/Base/src/main/java/io/deephaven/base/array/FastCharArray.java b/Base/src/main/java/io/deephaven/base/array/FastCharArray.java index d1756840f0d..f6049c61dcc 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastCharArray.java +++ b/Base/src/main/java/io/deephaven/base/array/FastCharArray.java @@ -75,11 +75,11 @@ public char[] getUnsafeArray() { public void removeThisIndex(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { // move all the items ahead one index and reduce the length for (int i = index; i < length; i++) { @@ -138,7 +138,7 @@ public String toStringXml(String pre) { msg.append(pre).append("\n"); for (int i = 0; i < array.length; i++) { msg.append(pre).append(extra).append("").append(i).append("") - .append(length).append("").append(array[i]).append("\n"); + .append(length).append("").append(array[i]).append("\n"); } msg.append(pre).append("\n"); return msg.toString(); diff --git a/Base/src/main/java/io/deephaven/base/array/FastDoubleArray.java b/Base/src/main/java/io/deephaven/base/array/FastDoubleArray.java index 8f545984125..f936fe38ab7 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastDoubleArray.java +++ b/Base/src/main/java/io/deephaven/base/array/FastDoubleArray.java @@ -75,11 +75,11 @@ public double[] getUnsafeArray() { public void removeThisIndex(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { // move all the items ahead one index and reduce the length for (int i = index; i < length; i++) { @@ -138,7 +138,7 @@ public String toStringXml(String pre) { msg.append(pre).append("\n"); for (int i = 0; i < array.length; i++) { msg.append(pre).append(extra).append("").append(i).append("") - .append(length).append("").append(array[i]).append("\n"); + .append(length).append("").append(array[i]).append("\n"); } msg.append(pre).append("\n"); return msg.toString(); diff --git a/Base/src/main/java/io/deephaven/base/array/FastFloatArray.java b/Base/src/main/java/io/deephaven/base/array/FastFloatArray.java index 69da29d2384..c9d6b84e8eb 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastFloatArray.java +++ b/Base/src/main/java/io/deephaven/base/array/FastFloatArray.java @@ -75,11 +75,11 @@ public float[] getUnsafeArray() { public void removeThisIndex(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { // move all the items ahead one index and reduce the length for (int i = index; i < length; i++) { @@ -138,7 +138,7 @@ public String toStringXml(String pre) { msg.append(pre).append("\n"); for (int i = 0; i < array.length; i++) { msg.append(pre).append(extra).append("").append(i).append("") - .append(length).append("").append(array[i]).append("\n"); + .append(length).append("").append(array[i]).append("\n"); } msg.append(pre).append("\n"); return msg.toString(); diff --git a/Base/src/main/java/io/deephaven/base/array/FastIntArray.java b/Base/src/main/java/io/deephaven/base/array/FastIntArray.java index f571f84d27d..5c47093c55f 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastIntArray.java +++ b/Base/src/main/java/io/deephaven/base/array/FastIntArray.java @@ -75,11 +75,11 @@ public int[] getUnsafeArray() { public void removeThisIndex(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { // move all the items ahead one index and reduce the length for (int i = index; i < length; i++) { @@ -138,7 +138,7 @@ public String toStringXml(String pre) { msg.append(pre).append("\n"); for (int i = 0; i < array.length; i++) { msg.append(pre).append(extra).append("").append(i).append("") - .append(length).append("").append(array[i]).append("\n"); + .append(length).append("").append(array[i]).append("\n"); } msg.append(pre).append("\n"); return msg.toString(); diff --git a/Base/src/main/java/io/deephaven/base/array/FastLongArray.java b/Base/src/main/java/io/deephaven/base/array/FastLongArray.java index 58ff80b948f..4d80fac53a5 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastLongArray.java +++ b/Base/src/main/java/io/deephaven/base/array/FastLongArray.java @@ -75,11 +75,11 @@ public long[] getUnsafeArray() { public void removeThisIndex(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { // move all the items ahead one index and reduce the length for (int i = index; i < length; i++) { @@ -138,7 +138,7 @@ public String toStringXml(String pre) { msg.append(pre).append("\n"); for (int i = 0; i < array.length; i++) { msg.append(pre).append(extra).append("").append(i).append("") - .append(length).append("").append(array[i]).append("\n"); + .append(length).append("").append(array[i]).append("\n"); } msg.append(pre).append("\n"); return msg.toString(); diff --git a/Base/src/main/java/io/deephaven/base/array/FastShortArray.java b/Base/src/main/java/io/deephaven/base/array/FastShortArray.java index e2fba49aa1a..adb42471912 100644 --- a/Base/src/main/java/io/deephaven/base/array/FastShortArray.java +++ b/Base/src/main/java/io/deephaven/base/array/FastShortArray.java @@ -75,11 +75,11 @@ public short[] getUnsafeArray() { public void removeThisIndex(int index) { if (index >= length) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when the array is only this long: " + length); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when the array is only this long: " + length); } else if (index < 0) { - throw new IllegalArgumentException("you tried to remove this index: " + index - + " when we can only remove positive indices"); + throw new IllegalArgumentException( + "you tried to remove this index: " + index + " when we can only remove positive indices"); } else { // move all the items ahead one index and reduce the length for (int i = index; i < length; i++) { @@ -138,7 +138,7 @@ public String toStringXml(String pre) { msg.append(pre).append("\n"); for (int i = 0; i < array.length; i++) { msg.append(pre).append(extra).append("").append(i).append("") - .append(length).append("").append(array[i]).append("\n"); + .append(length).append("").append(array[i]).append("\n"); } msg.append(pre).append("\n"); return msg.toString(); diff --git a/Base/src/main/java/io/deephaven/base/array/PrimitiveReaderImpl.java b/Base/src/main/java/io/deephaven/base/array/PrimitiveReaderImpl.java index f045b192c2d..7844823d53e 100644 --- a/Base/src/main/java/io/deephaven/base/array/PrimitiveReaderImpl.java +++ b/Base/src/main/java/io/deephaven/base/array/PrimitiveReaderImpl.java @@ -17,8 +17,8 @@ * Wrapper for Unsafe operations, using reflection to operate on the appropriate class. */ public class PrimitiveReaderImpl implements PrimitiveReader { - private static final Unsafe unsafe = AccessController.doPrivileged( - (PrivilegedAction) () -> JdkInternalsLoader.getInstance().getUnsafe()); + private static final Unsafe unsafe = AccessController + .doPrivileged((PrivilegedAction) () -> JdkInternalsLoader.getInstance().getUnsafe()); private long address; private long offset; @@ -26,7 +26,7 @@ public class PrimitiveReaderImpl implements PrimitiveReader { private long allocated; private static final Value reallocatedSize = - Stats.makeItem("PrimitiveReaderImpl", "reallocatedSize", State.FACTORY).getValue(); + Stats.makeItem("PrimitiveReaderImpl", "reallocatedSize", State.FACTORY).getValue(); public PrimitiveReaderImpl(int bytes) { this.allocated = bytes; @@ -35,8 +35,7 @@ public PrimitiveReaderImpl(int bytes) { address = unsafe.allocateMemory(allocated); if (address == 0) { - throw new OutOfMemoryError( - "Can't allocate unsafe memory... tried to allocate=" + allocated); + throw new OutOfMemoryError("Can't allocate unsafe memory... tried to allocate=" + allocated); } } @@ -54,8 +53,8 @@ private void ensureRemaining(int bytesRemaining) { final long newSize = Math.max(offset + bytesRemaining, allocated * 2); final long newAddress = unsafe.reallocateMemory(address, newSize); if (newAddress == 0) { - throw new OutOfMemoryError("Not enough memore for reallocate memory! currentSize=" - + allocated + ", newSize=" + newSize); + throw new OutOfMemoryError( + "Not enough memore for reallocate memory! currentSize=" + allocated + ", newSize=" + newSize); } address = newAddress; allocated = newSize; diff --git a/Base/src/main/java/io/deephaven/base/cache/KeyedObjectCache.java b/Base/src/main/java/io/deephaven/base/cache/KeyedObjectCache.java index 1bec75f80d0..7af3627c573 100644 --- a/Base/src/main/java/io/deephaven/base/cache/KeyedObjectCache.java +++ b/Base/src/main/java/io/deephaven/base/cache/KeyedObjectCache.java @@ -14,24 +14,21 @@ import java.util.Random; /** - * The central idea is that we can use an open-addressed map as a bounded cache with concurrent get - * and synchronized put access. + * The central idea is that we can use an open-addressed map as a bounded cache with concurrent get and synchronized put + * access. * - * Rather than rely on expensive and/or concurrency-destroying bookkeeping schemes to allow "smart" - * cache replacement, we rely on the assumption that our hash function and probe sequence - * computation does a fairly good job of distributing keyed objects that have a high likelihood of - * being useful to cache during overlapping timeframes. + * Rather than rely on expensive and/or concurrency-destroying bookkeeping schemes to allow "smart" cache replacement, + * we rely on the assumption that our hash function and probe sequence computation does a fairly good job of + * distributing keyed objects that have a high likelihood of being useful to cache during overlapping timeframes. * - * We never remove anything from the cache without replacing it with a new item. A callback is - * accepted to allow for item resource cleanup upon eviction from the cache. + * We never remove anything from the cache without replacing it with a new item. A callback is accepted to allow for + * item resource cleanup upon eviction from the cache. * - * The impact of collisions (for the bucket an item hashes to, or any other bucket in its associated - * probe sequence) is mitigated by randomized eviction of a victim item in a probe sequence of - * bounded length. + * The impact of collisions (for the bucket an item hashes to, or any other bucket in its associated probe sequence) is + * mitigated by randomized eviction of a victim item in a probe sequence of bounded length. * - * Note that, unlike common open-addressed hashing schemes, we're unconcerned with load factor - we - * have an explicitly bounded capacity, and explicitly bounded probe sequence length, which must be - * tuned for the workload in question. + * Note that, unlike common open-addressed hashing schemes, we're unconcerned with load factor - we have an explicitly + * bounded capacity, and explicitly bounded probe sequence length, which must be tuned for the workload in question. */ public class KeyedObjectCache { @@ -46,8 +43,8 @@ public class KeyedObjectCache { private final Procedure.Unary postEvictionProcedure; /** - * A source of pseudo-random numbers for choosing which slot in a bounded probe sequence to - * evict if no empty slots are found. + * A source of pseudo-random numbers for choosing which slot in a bounded probe sequence to evict if no empty slots + * are found. */ private final Random random; @@ -63,17 +60,17 @@ public class KeyedObjectCache { /** * @param capacity Lower bound on maximum capacity. Rounded up to to a prime number. - * @param probeSequenceLength Lower bound on number of slots to probe (inclusive of the one a - * key hashes directly to). Rounded up to a power of 2. + * @param probeSequenceLength Lower bound on number of slots to probe (inclusive of the one a key hashes directly + * to). Rounded up to a power of 2. * @param keyDefinition The key definition * @param postEvictionProcedure Optional. Invoked without any extra synchronization. * @param random Pseudo-random number generator */ public KeyedObjectCache(final int capacity, - final int probeSequenceLength, - final KeyedObjectKey keyDefinition, - @Nullable final Procedure.Unary postEvictionProcedure, - final Random random) { + final int probeSequenceLength, + final KeyedObjectKey keyDefinition, + @Nullable final Procedure.Unary postEvictionProcedure, + final Random random) { Require.gtZero(capacity, "capacity"); Require.inRange(probeSequenceLength, "probeSequenceLength", capacity / 2, "capacity / 2"); @@ -140,12 +137,9 @@ public final VALUE_TYPE putIfAbsent(VALUE_TYPE value) { int slot = hashCode % storage.length; final VALUE_TYPE evictedValue; - // NB: We might be better off getting our random to-evict probed-slot-index pessimistically - // before grabbing the - // lock. We could then eliminate the probedSlots array and its maintenance, and do the - // pseudo-random number - // generation without a lock. On the other hand, we'd create contention for atomic updates - // to the PRNG's + // NB: We might be better off getting our random to-evict probed-slot-index pessimistically before grabbing the + // lock. We could then eliminate the probedSlots array and its maintenance, and do the pseudo-random number + // generation without a lock. On the other hand, we'd create contention for atomic updates to the PRNG's // internal state, and potentially waste computation time if the pessimism was unwarranted. synchronized (storage) { VALUE_TYPE candidate = storage[slot]; diff --git a/Base/src/main/java/io/deephaven/base/cache/OpenAddressedCanonicalizationCache.java b/Base/src/main/java/io/deephaven/base/cache/OpenAddressedCanonicalizationCache.java index 38f45f05b8b..21a80af853d 100644 --- a/Base/src/main/java/io/deephaven/base/cache/OpenAddressedCanonicalizationCache.java +++ b/Base/src/main/java/io/deephaven/base/cache/OpenAddressedCanonicalizationCache.java @@ -13,30 +13,28 @@ import java.lang.ref.WeakReference; /** - * An object canonicalization cache, suitable for use with objects that define equals(...) in such a - * way as to identify objects that can be mutually substituted in a manner appropriate for the - * application using the cache. Objects with an improper hashCode() implementation will cause - * undefined behavior. + * An object canonicalization cache, suitable for use with objects that define equals(...) in such a way as to identify + * objects that can be mutually substituted in a manner appropriate for the application using the cache. Objects with an + * improper hashCode() implementation will cause undefined behavior. * - * See KeyedObjectHashMap and its parent classes for many of the ideas I'm working from. The - * implementation is (loosely) based on an open-addressed hash map. + * See KeyedObjectHashMap and its parent classes for many of the ideas I'm working from. The implementation is (loosely) + * based on an open-addressed hash map. * - * The intended problem domain is effectively single-threaded, so I've optimized on single-threaded - * efficiency and used coarse synchronization instead of optimizing for concurrency. + * The intended problem domain is effectively single-threaded, so I've optimized on single-threaded efficiency and used + * coarse synchronization instead of optimizing for concurrency. */ public class OpenAddressedCanonicalizationCache { /** - * Allows cache users to supercede the equals() and hashCode() methods of their input items, and - * supply an alternative object to cache. + * Allows cache users to supercede the equals() and hashCode() methods of their input items, and supply an + * alternative object to cache. * * @param * @param */ public interface Adapter { /** - * Note: equals(inputItem, cachedItem) implies hashCode(inputItem) == cachedItem.hashCode() - * must be true. + * Note: equals(inputItem, cachedItem) implies hashCode(inputItem) == cachedItem.hashCode() must be true. * * @param inputItem The input item * @param cachedItem The cached item @@ -51,8 +49,8 @@ public interface Adapter { int hashCode(@NotNull final INPUT_TYPE inputItem); /** - * Note: The following must be true: hashCode(inputItem) == outputItem.hashCode() && - * equals(inputItem, outputItem) + * Note: The following must be true: hashCode(inputItem) == outputItem.hashCode() && equals(inputItem, + * outputItem) * * @param inputItem The input item * @return A cacheable version of inputItem. @@ -107,8 +105,7 @@ void markReclaimed() { private final ReferenceQueue cleanupQueue = new ReferenceQueue<>(); @SuppressWarnings("WeakerAccess") - public OpenAddressedCanonicalizationCache(final int minimumInitialCapacity, - final float loadFactor) { + public OpenAddressedCanonicalizationCache(final int minimumInitialCapacity, final float loadFactor) { this.loadFactor = Require.inRange(loadFactor, 0.0f, 1.0f, "loadFactor"); initialize(computeInitialCapacity(minimumInitialCapacity, loadFactor)); } @@ -141,21 +138,19 @@ int getOccupancyThreshold() { /** * Note: Intended for unit test use only. * - * @return The number of items in the cache (may be briefly larger, if the cleanupQueue needs to - * be drained) + * @return The number of items in the cache (may be briefly larger, if the cleanupQueue needs to be drained) */ int getOccupiedSlots() { return occupiedSlots; } - public synchronized INPUT_OUTPUT_TYPE getCachedItem( - @NotNull final INPUT_OUTPUT_TYPE item) { + public synchronized INPUT_OUTPUT_TYPE getCachedItem(@NotNull final INPUT_OUTPUT_TYPE item) { // noinspection unchecked return getCachedItem(item, (Adapter) DEFAULT_ADAPTER); } - public synchronized OUTPUT_TYPE getCachedItem( - @NotNull final INPUT_TYPE item, @NotNull final Adapter adapter) { + public synchronized OUTPUT_TYPE getCachedItem(@NotNull final INPUT_TYPE item, + @NotNull final Adapter adapter) { cleanup(); return getOrInsertCachedItem(item, adapter); } @@ -175,8 +170,8 @@ private void maybeReclaim(@NotNull final ItemReference itemReference) { } } - private OUTPUT_TYPE getOrInsertCachedItem( - @NotNull final INPUT_TYPE item, @NotNull final Adapter adapter) { + private OUTPUT_TYPE getOrInsertCachedItem(@NotNull final INPUT_TYPE item, + @NotNull final Adapter adapter) { final int length = storage.length; final int hashCode = adapter.hashCode(item) & 0x7FFFFFFF; final int probeInterval = computeProbeInterval(hashCode, length); @@ -187,10 +182,8 @@ private OUTPUT_TYPE getOrInsertCachedItem( final ItemReference candidateReference = storage[slot]; if (candidateReference == null) { final OUTPUT_TYPE cacheableItem = adapter.makeCacheableItem(item); - // Assert.eq(hashCode, "hashCode", cacheableItem.hashCode(), - // "cacheableItem.hashCode()"); - // Assert.assertion(adapter.equals(item, cacheableItem), "adapter.equals(item, - // cacheableItem)"); + // Assert.eq(hashCode, "hashCode", cacheableItem.hashCode(), "cacheableItem.hashCode()"); + // Assert.assertion(adapter.equals(item, cacheableItem), "adapter.equals(item, cacheableItem)"); if (firstDeletedSlot == -1) { --emptySlots; storage[slot] = new ItemReference<>(cacheableItem, cleanupQueue); @@ -247,8 +240,7 @@ private void rehash(final int newCapacity) { if (item != null) { insertReferenceForRehash(itemReference, item); } else if (!itemReference.reclaimed()) { - // NB: We don't need to decrement occupiedSlots here - we're instead not - // incrementing it. + // NB: We don't need to decrement occupiedSlots here - we're instead not incrementing it. itemReference.markReclaimed(); } } @@ -280,8 +272,7 @@ private void insertReferenceForRehash(final ItemReference itemReference, fina } maybeReclaim(candidateReference); } - // NB: No need to test if item.equals(candidate) here - should be impossible during a - // rehash. + // NB: No need to test if item.equals(candidate) here - should be impossible during a rehash. if ((slot -= probeInterval) < 0) { slot += length; @@ -290,8 +281,7 @@ private void insertReferenceForRehash(final ItemReference itemReference, fina } - private static int computeInitialCapacity(final int minimumInitialCapacity, - final float loadFactor) { + private static int computeInitialCapacity(final int minimumInitialCapacity, final float loadFactor) { return PrimeFinder.nextPrime((int) Math.ceil(minimumInitialCapacity / loadFactor) + 1); } diff --git a/Base/src/main/java/io/deephaven/base/cache/RetentionCache.java b/Base/src/main/java/io/deephaven/base/cache/RetentionCache.java index 4031b62e8e3..a71ff288355 100644 --- a/Base/src/main/java/io/deephaven/base/cache/RetentionCache.java +++ b/Base/src/main/java/io/deephaven/base/cache/RetentionCache.java @@ -12,25 +12,24 @@ import org.jetbrains.annotations.NotNull; /** - * Utility for holding strong references to otherwise unreachable classes (e.g. listeners that will - * be weakly held by the object they subscribe to). + * Utility for holding strong references to otherwise unreachable classes (e.g. listeners that will be weakly held by + * the object they subscribe to). */ public class RetentionCache { private final TObjectIntMap retainedObjectToReferenceCount = - new TObjectIntCustomHashMap<>(IdentityHashingStrategy.INSTANCE, Constants.DEFAULT_CAPACITY, - Constants.DEFAULT_LOAD_FACTOR, 0); + new TObjectIntCustomHashMap<>(IdentityHashingStrategy.INSTANCE, Constants.DEFAULT_CAPACITY, + Constants.DEFAULT_LOAD_FACTOR, 0); /** - * Ask this RetentionCache to hold on to a reference in order to ensure that {@code referent} - * remains strongly-reachable for the garbage collector. + * Ask this RetentionCache to hold on to a reference in order to ensure that {@code referent} remains + * strongly-reachable for the garbage collector. * * @param referent The object to hold a reference to * @return {@code referent}, for convenience when retaining anonymous class instances */ public synchronized TYPE retain(@NotNull final TYPE referent) { - retainedObjectToReferenceCount.put(referent, - retainedObjectToReferenceCount.get(referent) + 1); + retainedObjectToReferenceCount.put(referent, retainedObjectToReferenceCount.get(referent) + 1); return referent; } diff --git a/Base/src/main/java/io/deephaven/base/classloaders/MapBackedClassLoader.java b/Base/src/main/java/io/deephaven/base/classloaders/MapBackedClassLoader.java index 83c8c8550e0..3abf881be14 100644 --- a/Base/src/main/java/io/deephaven/base/classloaders/MapBackedClassLoader.java +++ b/Base/src/main/java/io/deephaven/base/classloaders/MapBackedClassLoader.java @@ -10,8 +10,7 @@ import java.util.Map; /** - * @note This ClassLoader doesn't follow the standard delegation model - it tries to find the class - * itself first. + * @note This ClassLoader doesn't follow the standard delegation model - it tries to find the class itself first. */ public class MapBackedClassLoader extends ClassLoader { @@ -22,8 +21,7 @@ public synchronized void addClassData(@NotNull final String name, @NotNull final } @Override - protected synchronized Class loadClass(final String name, final boolean resolve) - throws ClassNotFoundException { + protected synchronized Class loadClass(final String name, final boolean resolve) throws ClassNotFoundException { Class clazz = findLoadedClass(name); if (clazz == null) { try { diff --git a/Base/src/main/java/io/deephaven/base/clock/Clock.java b/Base/src/main/java/io/deephaven/base/clock/Clock.java index 61dc26f28ac..716deb0991a 100644 --- a/Base/src/main/java/io/deephaven/base/clock/Clock.java +++ b/Base/src/main/java/io/deephaven/base/clock/Clock.java @@ -29,9 +29,8 @@ public long currentTimeMicros() { Null NULL = new Null(); /** - * This implementation just returns the last value passed to set(). It allows for precise - * control over when clock calls are made to the underlying system (e.g. - * AppClock.currentTimeMicros()). + * This implementation just returns the last value passed to set(). It allows for precise control over when clock + * calls are made to the underlying system (e.g. AppClock.currentTimeMicros()). */ class Cached implements Clock { private long cachedNowMicros; @@ -52,8 +51,8 @@ public final long currentTimeMicros() { } /** - * This implementation is similar to cached, except that is calls set() itself on a the Clock - * instance given to the constructor exactly once between reset() calls. + * This implementation is similar to cached, except that is calls set() itself on a the Clock instance given to the + * constructor exactly once between reset() calls. */ class CachedOnDemand implements Clock { private final Clock realClock; diff --git a/Base/src/main/java/io/deephaven/base/clock/TimeConstants.java b/Base/src/main/java/io/deephaven/base/clock/TimeConstants.java index d7e90663e9b..af7b159fb57 100644 --- a/Base/src/main/java/io/deephaven/base/clock/TimeConstants.java +++ b/Base/src/main/java/io/deephaven/base/clock/TimeConstants.java @@ -5,10 +5,9 @@ package io.deephaven.base.clock; public class TimeConstants { - // this constant is a stop gap that lets us detect if a timestamp is in millis or micros by - // looking at the magnitude, - // anything greater than this is assumed to be micros. This is the year 2265 - // (http://en.memory-alpha.org/wiki/2265) + // this constant is a stop gap that lets us detect if a timestamp is in millis or micros by looking at the + // magnitude, + // anything greater than this is assumed to be micros. This is the year 2265 (http://en.memory-alpha.org/wiki/2265) public static final long MICROTIME_THRESHOLD = 9309341000000L; public final static long SECOND = 1000; public final static long MINUTE = 60 * SECOND; diff --git a/Base/src/main/java/io/deephaven/base/formatters/EnumFormatter.java b/Base/src/main/java/io/deephaven/base/formatters/EnumFormatter.java index db13ae9f7ca..b127933a978 100644 --- a/Base/src/main/java/io/deephaven/base/formatters/EnumFormatter.java +++ b/Base/src/main/java/io/deephaven/base/formatters/EnumFormatter.java @@ -12,10 +12,9 @@ import java.util.Arrays; /** - * This class provides a {@code Format} object that converts from and to a comma-separated list of - * {@code String} values and their binary masks. (The first string corresponds to the value 1, the - * second to 2, the third to 4, etc.) Because of the use of values of 2, this conversion can handle - * bitsets. It is however limited to 31 possible enum values. + * This class provides a {@code Format} object that converts from and to a comma-separated list of {@code String} values + * and their binary masks. (The first string corresponds to the value 1, the second to 2, the third to 4, etc.) Because + * of the use of values of 2, this conversion can handle bitsets. It is however limited to 31 possible enum values. */ public class EnumFormatter extends Format { @@ -31,8 +30,8 @@ protected StringBuilder initialValue() { protected final String possibleValuesString; /** - * Create a formatter for the sequence of {@code enums}, where the i-th enum in the sequence is - * associated with the value {@code Math.pow(2,i)} (starting with index 0 and value 1). + * Create a formatter for the sequence of {@code enums}, where the i-th enum in the sequence is associated with the + * value {@code Math.pow(2,i)} (starting with index 0 and value 1). */ public EnumFormatter(String enums[]) { strings = Arrays.copyOf(enums, enums.length); @@ -84,8 +83,7 @@ public String format(int index) { } @Deprecated - public StringBuffer format(Object obj, @NotNull StringBuffer toAppendTo, - @NotNull FieldPosition pos) { + public StringBuffer format(Object obj, @NotNull StringBuffer toAppendTo, @NotNull FieldPosition pos) { int num = ((Number) obj).intValue(); int nullValue = obj instanceof Byte ? Byte.MIN_VALUE : Integer.MIN_VALUE; @@ -101,12 +99,11 @@ public Object parseObject(String source, @NotNull ParsePosition pos) { } /** - * Return a binary bitset representation of the comma-separated string {@code s}, with the i-th - * bit set for the corresponding occurrence of the i-th enum (and i ranging from 0 to the length - * of the enums passed at construction). + * Return a binary bitset representation of the comma-separated string {@code s}, with the i-th bit set for the + * corresponding occurrence of the i-th enum (and i ranging from 0 to the length of the enums passed at + * construction). *

- * If an enum in s is not recognized, it is silently ignored and contributes no bit to the - * result. + * If an enum in s is not recognized, it is silently ignored and contributes no bit to the result. */ public int parse(String s) { if (s.length() < 12 && s.indexOf(',') == -1) { @@ -123,9 +120,9 @@ public int parse(String s) { } /** - * Return a binary bitset representation of the coma-separate string {@code s}, with the i-th - * bit set for the corresponding occurrence of the i-th enum (and i ranging from 0 to the length - * of the enums passed at construction). + * Return a binary bitset representation of the coma-separate string {@code s}, with the i-th bit set for the + * corresponding occurrence of the i-th enum (and i ranging from 0 to the length of the enums passed at + * construction). *

* If an enum in s is not recognized, an exception is thrown. */ @@ -136,8 +133,9 @@ public int parseErrorChecking(String s) throws ParseException { if (s.length() < 12 && s.indexOf(',') == -1) { int val = stringToEnums.get(s); if (val == 0) { - throw new ParseException("Unparseable enum: string=" + s + ", token=" + s - + ", possibleValues=" + possibleValuesString, 0); + throw new ParseException( + "Unparseable enum: string=" + s + ", token=" + s + ", possibleValues=" + possibleValuesString, + 0); } return val; } @@ -148,8 +146,9 @@ public int parseErrorChecking(String s) throws ParseException { int val = stringToEnums.get(ss); if (val == 0) { - throw new ParseException("Unparseable enum: string=" + s + ", token=" + ss - + ", possibleValues=" + possibleValuesString, 0); + throw new ParseException( + "Unparseable enum: string=" + s + ", token=" + ss + ", possibleValues=" + possibleValuesString, + 0); } result |= val; diff --git a/Base/src/main/java/io/deephaven/base/formatters/FormatBitSet.java b/Base/src/main/java/io/deephaven/base/formatters/FormatBitSet.java index f7990a5dabc..cd0b72be44c 100644 --- a/Base/src/main/java/io/deephaven/base/formatters/FormatBitSet.java +++ b/Base/src/main/java/io/deephaven/base/formatters/FormatBitSet.java @@ -26,8 +26,7 @@ public static LogOutputAppendable formatBitSet(final BitSet bitSet) { logOutput.append(runStart); int runEnd; // noinspection StatementWithEmptyBody - for (runEnd = runStart + 1; runEnd < bitSet.size() - && bitSet.get(runEnd); runEnd++); + for (runEnd = runStart + 1; runEnd < bitSet.size() && bitSet.get(runEnd); runEnd++); if (runEnd > runStart + 1) { logOutput.append("-").append(runEnd - 1); } diff --git a/Base/src/main/java/io/deephaven/base/log/LogOutput.java b/Base/src/main/java/io/deephaven/base/log/LogOutput.java index 1d9d2a34b74..56f8123cec4 100644 --- a/Base/src/main/java/io/deephaven/base/log/LogOutput.java +++ b/Base/src/main/java/io/deephaven/base/log/LogOutput.java @@ -16,13 +16,12 @@ // -------------------------------------------------------------------- /** - * Collects output for efficient textual logging. The append methods are intended to behave like - * StringBuilder to the extent possible, but the fast loggers make no pretense about handling - * Unicode or producing anything but good old ASCII. + * Collects output for efficient textual logging. The append methods are intended to behave like StringBuilder to the + * extent possible, but the fast loggers make no pretense about handling Unicode or producing anything but good old + * ASCII. *

- * Note that although the output will probably be single byte ASCII, we behave like StringBuilder - * and by the standard overload and promotion rules appending a byte actually appends an integer - * ("65") not a character ("A"). + * Note that although the output will probably be single byte ASCII, we behave like StringBuilder and by the standard + * overload and promotion rules appending a byte actually appends an integer ("65") not a character ("A"). */ public interface LogOutput { @@ -57,8 +56,8 @@ public interface LogOutput { LogOutput append(CharSequence seq, int start, int length); /** - * Appends the character equivalent of each byte remaining in the given ByteBuffer ([65 66 67] - * == "ABC" not "656667"), but does not modify the mark, position, or limit of the ByteBuffer. + * Appends the character equivalent of each byte remaining in the given ByteBuffer ([65 66 67] == "ABC" not + * "656667"), but does not modify the mark, position, or limit of the ByteBuffer. */ LogOutput append(ByteBuffer bb); @@ -164,27 +163,26 @@ public void format(LogOutput logOutput, Object o) { /** * Formats an InetSocketAddress */ - ObjFormatter SOCKADDR_FORMATTER = - new LogOutput.ObjFormatter() { - @Override - public void format(LogOutput logOutput, SocketAddress sockaddr) { - if (sockaddr instanceof InetSocketAddress) { - InetSocketAddress addr = (InetSocketAddress) sockaddr; - if (addr.getAddress() == null) { - logOutput.append("null"); - } else { - byte[] b = addr.getAddress().getAddress(); - logOutput.append((int) b[0] & 0xff); - for (int i = 1; i < b.length; ++i) { - logOutput.append('.').append((int) b[i] & 0xff); - } - logOutput.append(':').append(addr.getPort()); - } + ObjFormatter SOCKADDR_FORMATTER = new LogOutput.ObjFormatter() { + @Override + public void format(LogOutput logOutput, SocketAddress sockaddr) { + if (sockaddr instanceof InetSocketAddress) { + InetSocketAddress addr = (InetSocketAddress) sockaddr; + if (addr.getAddress() == null) { + logOutput.append("null"); } else { - BASIC_FORMATTER.format(logOutput, sockaddr); + byte[] b = addr.getAddress().getAddress(); + logOutput.append((int) b[0] & 0xff); + for (int i = 1; i < b.length; ++i) { + logOutput.append('.').append((int) b[i] & 0xff); + } + logOutput.append(':').append(addr.getPort()); } + } else { + BASIC_FORMATTER.format(logOutput, sockaddr); } - }; + } + }; /** * Formats an int array @@ -231,46 +229,45 @@ public void format(LogOutput logOutput, String[] array) { /** * Formats a String Collection */ - ObjFormatter> STRING_COLLECTION_FORMATTER = - new LogOutput.ObjFormatter>() { - @Override - public void format(LogOutput logOutput, Collection collection) { - if (collection == null) { - logOutput.append("null"); - } else if (collection.isEmpty()) { - logOutput.append("{}"); - } else { - char delim = '{'; - for (final String elem : collection) { - logOutput.append(delim).append(elem); - delim = ','; - } - logOutput.append('}'); + ObjFormatter> STRING_COLLECTION_FORMATTER = new LogOutput.ObjFormatter>() { + @Override + public void format(LogOutput logOutput, Collection collection) { + if (collection == null) { + logOutput.append("null"); + } else if (collection.isEmpty()) { + logOutput.append("{}"); + } else { + char delim = '{'; + for (final String elem : collection) { + logOutput.append(delim).append(elem); + delim = ','; } + logOutput.append('}'); } - }; + } + }; /** * Formats a String Collection */ ObjFormatter> APPENDABLE_COLLECTION_FORMATTER = - new LogOutput.ObjFormatter>() { - @Override - public void format(LogOutput logOutput, Collection collection) { - if (collection == null) { - logOutput.append("null"); - } else if (collection.isEmpty()) { - logOutput.append("{}"); - } else { - char delim = '{'; - for (final LogOutputAppendable elem : collection) { - logOutput.append(delim).append(elem); - delim = ','; + new LogOutput.ObjFormatter>() { + @Override + public void format(LogOutput logOutput, Collection collection) { + if (collection == null) { + logOutput.append("null"); + } else if (collection.isEmpty()) { + logOutput.append("{}"); + } else { + char delim = '{'; + for (final LogOutputAppendable elem : collection) { + logOutput.append(delim).append(elem); + delim = ','; + } + logOutput.append('}'); } - logOutput.append('}'); } - } - }; + }; /** * Formats a boolean array @@ -317,8 +314,8 @@ public void format(LogOutput logOutput, byte[] array) { logOutput.append("null"); } else { // This involves some overhead, but a timezone is really useful in logs - logOutput.append(millisFormatter.getTimestamp( - localDateTime.atZone(TimeZone.getDefault().toZoneId()).toInstant().toEpochMilli())); + logOutput.append(millisFormatter + .getTimestamp(localDateTime.atZone(TimeZone.getDefault().toZoneId()).toInstant().toEpochMilli())); } }; @@ -326,7 +323,7 @@ public void format(LogOutput logOutput, byte[] array) { * Formats long millis from epoch based on the default timezone */ LongFormatter MILLIS_FROM_EPOCH_FORMATTER = - (logOutput, millis) -> logOutput.append(millisFormatter.getTimestamp(millis)); + (logOutput, millis) -> logOutput.append(millisFormatter.getTimestamp(millis)); // --------------------------------------------------------------------------------------------- // null implementation @@ -370,8 +367,7 @@ public LogOutput appendDouble(double f) { return this; } - private LogOutput appendDouble(double f, int digits, boolean forceScientific, - boolean trailingZeroes) { + private LogOutput appendDouble(double f, int digits, boolean forceScientific, boolean trailingZeroes) { return this; } @@ -386,8 +382,7 @@ public LogOutput append(ObjFormatter objFormatter, T t) { } @Override - public LogOutput append(ObjIntIntFormatter objFormatter, T t, int nOffset, - int nLength) { + public LogOutput append(ObjIntIntFormatter objFormatter, T t, int nOffset, int nLength) { return this; } diff --git a/Base/src/main/java/io/deephaven/base/log/LogOutputAppendable.java b/Base/src/main/java/io/deephaven/base/log/LogOutputAppendable.java index a946a58ae92..f95a0bd5ced 100644 --- a/Base/src/main/java/io/deephaven/base/log/LogOutputAppendable.java +++ b/Base/src/main/java/io/deephaven/base/log/LogOutputAppendable.java @@ -5,8 +5,8 @@ package io.deephaven.base.log; /** - * Allows objects to be smart about appending themselves to LogOutput instances... (instead of - * always calling LogOutput.append(Object.toString())) + * Allows objects to be smart about appending themselves to LogOutput instances... (instead of always calling + * LogOutput.append(Object.toString())) */ public interface LogOutputAppendable { LogOutput append(LogOutput logOutput); diff --git a/Base/src/main/java/io/deephaven/base/map/FastArrayMap.java b/Base/src/main/java/io/deephaven/base/map/FastArrayMap.java index 0d19fedebe6..238e9e017b0 100644 --- a/Base/src/main/java/io/deephaven/base/map/FastArrayMap.java +++ b/Base/src/main/java/io/deephaven/base/map/FastArrayMap.java @@ -16,7 +16,7 @@ import java.util.Arrays; public class FastArrayMap & Externalizable & Copyable, V extends Externalizable & Copyable> - implements Copyable> { + implements Copyable> { private FastArray> array; @@ -118,12 +118,10 @@ public FastArrayMap safeClone() { } public static & Copyable, V extends Externalizable & Copyable> void writeExternal( - final FastArrayMap THIS, ObjectOutput out, - FastArray.WriteExternalFunction> writeExternalFunction) - throws IOException { + final FastArrayMap THIS, ObjectOutput out, + FastArray.WriteExternalFunction> writeExternalFunction) throws IOException { if (THIS == null) { - throw new IllegalArgumentException( - "FastArray.writeExternal(): THIS was null and is not supported"); + throw new IllegalArgumentException("FastArray.writeExternal(): THIS was null and is not supported"); } out.writeInt(THIS.array.getLength()); for (int i = 0; i < THIS.array.getLength(); ++i) { @@ -132,12 +130,11 @@ public static & Copyable, V extends } public static & Copyable, V extends Externalizable & Copyable> void readExternal( - final FastArrayMap THIS, ObjectInput in, - FastArray.ReadExternalFunction> readExternalFunction) - throws IOException, ClassNotFoundException { + final FastArrayMap THIS, ObjectInput in, + FastArray.ReadExternalFunction> readExternalFunction) + throws IOException, ClassNotFoundException { if (THIS == null) { - throw new IllegalArgumentException( - "FastArray.readExternal(): THIS was null and is not supported"); + throw new IllegalArgumentException("FastArray.readExternal(): THIS was null and is not supported"); } THIS.array.quickReset(); final int len = in.readInt(); diff --git a/Base/src/main/java/io/deephaven/base/map/FastArrayMapLongToObject.java b/Base/src/main/java/io/deephaven/base/map/FastArrayMapLongToObject.java index e9223ffda05..e25b230db4a 100644 --- a/Base/src/main/java/io/deephaven/base/map/FastArrayMapLongToObject.java +++ b/Base/src/main/java/io/deephaven/base/map/FastArrayMapLongToObject.java @@ -16,14 +16,13 @@ import java.util.Arrays; public class FastArrayMapLongToObject> - implements Copyable> { + implements Copyable> { private FastArray> array; public FastArrayMapLongToObject() {} - public FastArrayMapLongToObject( - final Function.Nullary> newInstance) { + public FastArrayMapLongToObject(final Function.Nullary> newInstance) { array = new FastArray>(newInstance); } @@ -107,13 +106,12 @@ public FastArrayMapLongToObject cloneDeep() { } - public static > void writeExternal( - final FastArrayMapLongToObject THIS, ObjectOutput out, - FastArray.WriteExternalFunction> writeExternalFunction) - throws IOException { + public static > void writeExternal(final FastArrayMapLongToObject THIS, + ObjectOutput out, + FastArray.WriteExternalFunction> writeExternalFunction) throws IOException { if (THIS == null) { throw new IllegalArgumentException( - "FastArrayMapLongToObject.writeExternal(): THIS was null and is not supported"); + "FastArrayMapLongToObject.writeExternal(): THIS was null and is not supported"); } out.writeInt(THIS.array.getLength()); for (int i = 0; i < THIS.array.getLength(); ++i) { @@ -121,13 +119,13 @@ public static > void writeExternal( } } - public static > void readExternal( - final FastArrayMapLongToObject THIS, ObjectInput in, - FastArray.ReadExternalFunction> readExternalFunction) - throws IOException, ClassNotFoundException { + public static > void readExternal(final FastArrayMapLongToObject THIS, + ObjectInput in, + FastArray.ReadExternalFunction> readExternalFunction) + throws IOException, ClassNotFoundException { if (THIS == null) { throw new IllegalArgumentException( - "FastArrayMapLongToObject.readExternal(): THIS was null and is not supported"); + "FastArrayMapLongToObject.readExternal(): THIS was null and is not supported"); } THIS.array.quickReset(); final int len = in.readInt(); @@ -180,8 +178,7 @@ public void copyValues(FastArrayMapLongToObject other) { @Override public FastArrayMapLongToObject safeClone() { - FastArrayMapLongToObject result = - new FastArrayMapLongToObject(array.getNewInstance()); + FastArrayMapLongToObject result = new FastArrayMapLongToObject(array.getNewInstance()); for (int i = 0; i < array.getLength(); i++) { KeyValuePairLongToObject pair = array.getUnsafeArray()[i]; result.put(pair.getKey(), pair.getValue()); diff --git a/Base/src/main/java/io/deephaven/base/map/KeyValuePair.java b/Base/src/main/java/io/deephaven/base/map/KeyValuePair.java index 6782836adf6..76b2affe4ae 100644 --- a/Base/src/main/java/io/deephaven/base/map/KeyValuePair.java +++ b/Base/src/main/java/io/deephaven/base/map/KeyValuePair.java @@ -9,7 +9,7 @@ import java.io.Externalizable; public class KeyValuePair & Copyable, V extends Externalizable & Copyable> - implements Comparable>, Copyable> { + implements Comparable>, Copyable> { private K key; private V value; diff --git a/Base/src/main/java/io/deephaven/base/map/KeyValuePairLongToObject.java b/Base/src/main/java/io/deephaven/base/map/KeyValuePairLongToObject.java index d42d2b865b9..c4f74fa7029 100644 --- a/Base/src/main/java/io/deephaven/base/map/KeyValuePairLongToObject.java +++ b/Base/src/main/java/io/deephaven/base/map/KeyValuePairLongToObject.java @@ -9,7 +9,7 @@ import java.io.Externalizable; public class KeyValuePairLongToObject> - implements Comparable>, Copyable> { + implements Comparable>, Copyable> { private long key; private V value; diff --git a/Base/src/main/java/io/deephaven/base/pool/Pool.java b/Base/src/main/java/io/deephaven/base/pool/Pool.java index ca8a124df97..679ed4538f3 100644 --- a/Base/src/main/java/io/deephaven/base/pool/Pool.java +++ b/Base/src/main/java/io/deephaven/base/pool/Pool.java @@ -14,15 +14,15 @@ public interface Pool { /** - * Takes an item from the pool. Depending on pool policy, if there are no items available, this - * may block, create a new item, or throw a {@link PoolEmptyException}. + * Takes an item from the pool. Depending on pool policy, if there are no items available, this may block, create a + * new item, or throw a {@link PoolEmptyException}. */ T take(); /** - * Gives an unused item back to the pool. Passing null is safe and has no effect. - * If the pool has a clearing procedure, the item will be cleared. Depending on pool policy, if - * the pool is full, this may block, discard the item, or throw a {@link PoolFullException}. + * Gives an unused item back to the pool. Passing null is safe and has no effect. If the pool has a + * clearing procedure, the item will be cleared. Depending on pool policy, if the pool is full, this may block, + * discard the item, or throw a {@link PoolFullException}. */ void give(T item); @@ -32,16 +32,15 @@ interface Factory { /** * Creates a new pool. * - * @param nSize A hint of the maximum number of items expected to be taken from the pool at - * once. The behavior when more items are taken depends on the pool. The pool may - * preallocate this many items. When the maximum can't be given, use 0. - * @param itemFactory Creates new items. May be null if items will be - * {@link Pool#give give}n rather than created. - * @param clearingProcedure Called on each item given to the pool to clear the fields of the - * item. May be null. + * @param nSize A hint of the maximum number of items expected to be taken from the pool at once. The behavior + * when more items are taken depends on the pool. The pool may preallocate this many items. When the + * maximum can't be given, use 0. + * @param itemFactory Creates new items. May be null if items will be {@link Pool#give give}n + * rather than created. + * @param clearingProcedure Called on each item given to the pool to clear the fields of the item. May be + * null. */ - Pool create(int nSize, Function.Nullary itemFactory, - Procedure.Unary clearingProcedure); + Pool create(int nSize, Function.Nullary itemFactory, Procedure.Unary clearingProcedure); } /** diff --git a/Base/src/main/java/io/deephaven/base/pool/ThreadSafeFixedSizePool.java b/Base/src/main/java/io/deephaven/base/pool/ThreadSafeFixedSizePool.java index a0946989ced..1babb6abbc0 100644 --- a/Base/src/main/java/io/deephaven/base/pool/ThreadSafeFixedSizePool.java +++ b/Base/src/main/java/io/deephaven/base/pool/ThreadSafeFixedSizePool.java @@ -26,10 +26,9 @@ public class ThreadSafeFixedSizePool implements Pool { public static Factory FACTORY = new Factory() { @Override - public Pool create(int nSize, Function.Nullary itemFactory, - Procedure.Unary clearingProcedure) { + public Pool create(int nSize, Function.Nullary itemFactory, Procedure.Unary clearingProcedure) { return new ThreadSafeFixedSizePool(Require.geq(nSize, "nSize", MIN_SIZE, "MIN_SIZE"), - Require.neqNull(itemFactory, "itemFactory"), clearingProcedure); + Require.neqNull(itemFactory, "itemFactory"), clearingProcedure); } }; @@ -42,21 +41,20 @@ public Pool create(int nSize, Function.Nullary itemFactory, private final String logPfx; private final Logger log; - public ThreadSafeFixedSizePool(int size, Function.Nullary factory, - Procedure.Unary clearingProcedure, @Nullable Logger log, @Nullable String logPfx) { + public ThreadSafeFixedSizePool(int size, Function.Nullary factory, Procedure.Unary clearingProcedure, + @Nullable Logger log, @Nullable String logPfx) { this(size, Require.neqNull(factory, "factory"), clearingProcedure, log, logPfx, false); } - protected ThreadSafeFixedSizePool(int size, Procedure.Unary clearingProcedure, Logger log, - String logPfx) { + protected ThreadSafeFixedSizePool(int size, Procedure.Unary clearingProcedure, Logger log, String logPfx) { this(size, null, clearingProcedure, log, logPfx, false); } private ThreadSafeFixedSizePool(int size, @Nullable Function.Nullary factory, - Procedure.Unary clearingProcedure, Logger log, String logPfx, boolean dummy) { + Procedure.Unary clearingProcedure, Logger log, String logPfx, boolean dummy) { Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"); Require.requirement((log == null) == (logPfx == null), - "log and logPfx must either both be null, or both non-null"); + "log and logPfx must either both be null, or both non-null"); this.clearingProcedure = clearingProcedure; this.log = log; this.logPfx = logPfx; @@ -73,8 +71,7 @@ private ThreadSafeFixedSizePool(int size, @Nullable Function.Nullary factory, } } - public ThreadSafeFixedSizePool(int size, Function.Nullary factory, - Procedure.Unary clearingProcedure) { + public ThreadSafeFixedSizePool(int size, Function.Nullary factory, Procedure.Unary clearingProcedure) { this(size, factory, clearingProcedure, null, null); } @@ -102,8 +99,7 @@ public void give(T item) { if (now > nextGiveLog) { nextGiveLog = (now + 100000) - (now % 100000); long dt = (now - t0); - log.warn(logPfx + ": give() can't enqueue returned item, yield count = " - + yields); + log.warn(logPfx + ": give() can't enqueue returned item, yield count = " + yields); } } Thread.yield(); @@ -116,8 +112,8 @@ public void give(T item) { if (now > nextGiveLog) { nextGiveLog = (now + 100000) - (now % 100000); long dt = (now - t0); - log.warn(logPfx + ": give() took " + dt + " micros, with " + yields - + " yields and " + spins + " additional spins"); + log.warn(logPfx + ": give() took " + dt + " micros, with " + yields + " yields and " + spins + + " additional spins"); } } } @@ -143,7 +139,7 @@ public T take() { nextTakeLog = (now + 100000) - (now % 100000); long dt = (now - t0); log.warn(logPfx + ": take() can't dequeue from pool, waiting for " + dt - + " micros, yield count = " + yields); + + " micros, yield count = " + yields); } } Thread.yield(); @@ -157,8 +153,8 @@ public T take() { if (now > nextTakeLog) { nextTakeLog = (now + 100000) - (now % 100000); long dt = (now - t0); - log.warn(logPfx + ": take() took " + dt + " micros, with " + yields - + " yields and " + spins + " additional spins"); + log.warn(logPfx + ": take() took " + dt + " micros, with " + yields + " yields and " + spins + + " additional spins"); } } } diff --git a/Base/src/main/java/io/deephaven/base/pool/ThreadSafeLenientFixedSizePool.java b/Base/src/main/java/io/deephaven/base/pool/ThreadSafeLenientFixedSizePool.java index 9aebcfa895a..e43c125d41b 100644 --- a/Base/src/main/java/io/deephaven/base/pool/ThreadSafeLenientFixedSizePool.java +++ b/Base/src/main/java/io/deephaven/base/pool/ThreadSafeLenientFixedSizePool.java @@ -27,16 +27,14 @@ public class ThreadSafeLenientFixedSizePool implements Pool.MultiPool { public static Factory FACTORY = new Factory() { @Override - public Pool create(int nSize, Function.Nullary itemFactory, - Procedure.Unary clearingProcedure) { - return new ThreadSafeLenientFixedSizePool( - Require.geq(nSize, "nSize", MIN_SIZE, "MIN_SIZE"), - Require.neqNull(itemFactory, "itemFactory"), clearingProcedure); + public Pool create(int nSize, Function.Nullary itemFactory, Procedure.Unary clearingProcedure) { + return new ThreadSafeLenientFixedSizePool(Require.geq(nSize, "nSize", MIN_SIZE, "MIN_SIZE"), + Require.neqNull(itemFactory, "itemFactory"), clearingProcedure); } }; private static Function.Unary> makeNullaryFactoryAdapter( - final Function.Nullary factory) { + final Function.Nullary factory) { return new Function.Unary>() { @Override public T call(ThreadSafeLenientFixedSizePool arg) { @@ -53,31 +51,30 @@ public T call(ThreadSafeLenientFixedSizePool arg) { private final Counter extraFactoryCalls; public ThreadSafeLenientFixedSizePool(int size, Function.Nullary factory, - Procedure.Unary clearingProcedure) { + Procedure.Unary clearingProcedure) { this( - Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"), - makeNullaryFactoryAdapter(Require.neqNull(factory, "factory")), - clearingProcedure); + Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"), + makeNullaryFactoryAdapter(Require.neqNull(factory, "factory")), + clearingProcedure); } public ThreadSafeLenientFixedSizePool(String name, int size, Function.Nullary factory, - Procedure.Unary clearingProcedure) { + Procedure.Unary clearingProcedure) { this( - name, - Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"), - makeNullaryFactoryAdapter(Require.neqNull(factory, "factory")), - clearingProcedure); + name, + Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"), + makeNullaryFactoryAdapter(Require.neqNull(factory, "factory")), + clearingProcedure); } - public ThreadSafeLenientFixedSizePool(int size, - Function.Unary> factory, - Procedure.Unary clearingProcedure) { + public ThreadSafeLenientFixedSizePool(int size, Function.Unary> factory, + Procedure.Unary clearingProcedure) { this(null, size, factory, clearingProcedure); } public ThreadSafeLenientFixedSizePool(String name, int size, - Function.Unary> factory, - Procedure.Unary clearingProcedure) { + Function.Unary> factory, + Procedure.Unary clearingProcedure) { Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"); Require.neqNull(factory, "factory"); this.factory = factory; @@ -86,8 +83,7 @@ public ThreadSafeLenientFixedSizePool(String name, int size, for (int i = 0; i < size; ++i) { pool.enqueue(factory.call(this)); } - extraFactoryCalls = name == null ? null - : Stats.makeItem(name, "extraFactoryCalls", Counter.FACTORY).getValue(); + extraFactoryCalls = name == null ? null : Stats.makeItem(name, "extraFactoryCalls", Counter.FACTORY).getValue(); } public T take() { diff --git a/Base/src/main/java/io/deephaven/base/queue/ConcurrentQueue.java b/Base/src/main/java/io/deephaven/base/queue/ConcurrentQueue.java index 72005ec6817..696bb044f7a 100644 --- a/Base/src/main/java/io/deephaven/base/queue/ConcurrentQueue.java +++ b/Base/src/main/java/io/deephaven/base/queue/ConcurrentQueue.java @@ -11,20 +11,17 @@ */ public interface ConcurrentQueue { /** - * Returns false when the queue is full This method should never block (but it may spin for a - * finite amount of time) + * Returns false when the queue is full This method should never block (but it may spin for a finite amount of time) */ boolean enqueue(T new_value); /** - * Spins forever until the item can be enqueued. Calls yield() after the number of specified - * spins. + * Spins forever until the item can be enqueued. Calls yield() after the number of specified spins. */ boolean enqueue(T new_value, long spins_between_yields); /** - * Returns null when the queue is empty This method should never block (but it may spin for a - * finite amount of time) + * Returns null when the queue is empty This method should never block (but it may spin for a finite amount of time) */ T dequeue(); diff --git a/Base/src/main/java/io/deephaven/base/queue/ProducerConsumer.java b/Base/src/main/java/io/deephaven/base/queue/ProducerConsumer.java index 4691fbfd75d..95657cadaeb 100644 --- a/Base/src/main/java/io/deephaven/base/queue/ProducerConsumer.java +++ b/Base/src/main/java/io/deephaven/base/queue/ProducerConsumer.java @@ -9,14 +9,14 @@ */ public interface ProducerConsumer { /** - * This method should never block (but it may spin for a finite amount of time) Returns true - * when t was successfully produced, else false + * This method should never block (but it may spin for a finite amount of time) Returns true when t was successfully + * produced, else false */ boolean produce(T t); /** - * This method should never block (but it may spin for a finite amount of time) Returns null - * when there is nothing to consume [may create new objects on the fly if necessary] + * This method should never block (but it may spin for a finite amount of time) Returns null when there is nothing + * to consume [may create new objects on the fly if necessary] */ T consume(); @@ -39,8 +39,7 @@ public static interface MultiConsumer extends SingleProducerConsumer { } /** - * Must implement at least: Multiple threads may call produce(), multiple threads may call - * consume() + * Must implement at least: Multiple threads may call produce(), multiple threads may call consume() */ public static interface MultiProducerConsumer extends MultiProducer, MultiConsumer { } diff --git a/Base/src/main/java/io/deephaven/base/reference/CleanupReference.java b/Base/src/main/java/io/deephaven/base/reference/CleanupReference.java index 7c4ec2e5bd9..bdea1a43509 100644 --- a/Base/src/main/java/io/deephaven/base/reference/CleanupReference.java +++ b/Base/src/main/java/io/deephaven/base/reference/CleanupReference.java @@ -10,8 +10,7 @@ public interface CleanupReference extends SimpleReference { /** - * Allow for a reference queue consumer to invoke a custom cleanup method, for post-GC resource - * reclamation. + * Allow for a reference queue consumer to invoke a custom cleanup method, for post-GC resource reclamation. */ void cleanup(); } diff --git a/Base/src/main/java/io/deephaven/base/reference/DowngradableReference.java b/Base/src/main/java/io/deephaven/base/reference/DowngradableReference.java index a63d7d6518c..ccf2963bddd 100644 --- a/Base/src/main/java/io/deephaven/base/reference/DowngradableReference.java +++ b/Base/src/main/java/io/deephaven/base/reference/DowngradableReference.java @@ -7,8 +7,7 @@ import java.lang.ref.WeakReference; /** - * SimpleReference implementation that allows a downgrade from strongly reachable to weakly - * reachable. + * SimpleReference implementation that allows a downgrade from strongly reachable to weakly reachable. * * @note This only applies (obviously) to this reference's relationship to the referent. * @param @@ -32,8 +31,8 @@ public void clear() { } /** - * Eliminate this object's hard reference to the referent. Converts the reachability enforced by - * this object from hard to weak. + * Eliminate this object's hard reference to the referent. Converts the reachability enforced by this object from + * hard to weak. */ public void downgrade() { hardReference = null; diff --git a/Base/src/main/java/io/deephaven/base/reference/WeakCleanupReference.java b/Base/src/main/java/io/deephaven/base/reference/WeakCleanupReference.java index e6ac4aae814..fc8ea50c97e 100644 --- a/Base/src/main/java/io/deephaven/base/reference/WeakCleanupReference.java +++ b/Base/src/main/java/io/deephaven/base/reference/WeakCleanupReference.java @@ -9,8 +9,7 @@ /** * A weakly-held CleanupReference. */ -public abstract class WeakCleanupReference extends WeakSimpleReference - implements CleanupReference { +public abstract class WeakCleanupReference extends WeakSimpleReference implements CleanupReference { public WeakCleanupReference(final T referent, final ReferenceQueue referenceQueue) { super(referent, referenceQueue); diff --git a/Base/src/main/java/io/deephaven/base/reference/WeakReferenceWrapper.java b/Base/src/main/java/io/deephaven/base/reference/WeakReferenceWrapper.java index c40cfce7b8d..7abfe31e3fe 100644 --- a/Base/src/main/java/io/deephaven/base/reference/WeakReferenceWrapper.java +++ b/Base/src/main/java/io/deephaven/base/reference/WeakReferenceWrapper.java @@ -6,16 +6,15 @@ /** *

- * SimpleReference implementation created to interpose a strong/hard reference in place of a weak - * reference, with reachability subject to the continued reachability of the wrapped referent via - * the wrapped reference. + * SimpleReference implementation created to interpose a strong/hard reference in place of a weak reference, with + * reachability subject to the continued reachability of the wrapped referent via the wrapped reference. * *

- * In general, this only makes sense for concrete subclasses that are simultaneously T's and - * SimpleReferences to T's. The intended use case is for callback/listener registration chains that - * maintain reachability for all but the final link in the chain. Classes that wish to enable this - * functionality must construct their listener references with maybeCreateWeakReference in order to - * avoid rendering a WeakReferenceWrapper weakly reachable and thereby breaking the chain. + * In general, this only makes sense for concrete subclasses that are simultaneously T's and SimpleReferences to T's. + * The intended use case is for callback/listener registration chains that maintain reachability for all but the final + * link in the chain. Classes that wish to enable this functionality must construct their listener references with + * maybeCreateWeakReference in order to avoid rendering a WeakReferenceWrapper weakly reachable and thereby breaking the + * chain. */ public abstract class WeakReferenceWrapper implements SimpleReference { diff --git a/Base/src/main/java/io/deephaven/base/stats/Composite.java b/Base/src/main/java/io/deephaven/base/stats/Composite.java index 2d91aca11d3..141d0884244 100644 --- a/Base/src/main/java/io/deephaven/base/stats/Composite.java +++ b/Base/src/main/java/io/deephaven/base/stats/Composite.java @@ -9,10 +9,10 @@ // -------------------------------------------------------------------- /** - * A statistic that represents the composite or rolled-up value of a set of child statistics, as - * best as possible. This statistic cannot be {@link #sample}d or {@link #increment}ed. Currently, - * calls to {@link #update} are silently ignored, which also means that Composites are not logged. - * Thus a Composite is currently only useful for summaries in web/JMX displays. + * A statistic that represents the composite or rolled-up value of a set of child statistics, as best as possible. This + * statistic cannot be {@link #sample}d or {@link #increment}ed. Currently, calls to {@link #update} are silently + * ignored, which also means that Composites are not logged. Thus a Composite is currently only useful for summaries in + * web/JMX displays. */ public class Composite extends Value { @@ -32,8 +32,7 @@ private static Value[] checkValues(Value[] values) { char typeTag = values[0].getTypeTag(); for (int nIndex = 1; nIndex < values.length; nIndex++) { Require.neqNull(values[nIndex], "values[nIndex]", 1); - Require.eq(values[nIndex].getTypeTag(), "values[nIndex].getTypeTag()", typeTag, - "typeTag", 1); + Require.eq(values[nIndex].getTypeTag(), "values[nIndex].getTypeTag()", typeTag, "typeTag", 1); } return values; } @@ -114,8 +113,7 @@ public History getHistory() { // ---------------------------------------------------------------- @Override // from Value - public void update(Item item, ItemUpdateListener listener, long logInterval, long now, - long appNow) { + public void update(Item item, ItemUpdateListener listener, long logInterval, long now, long appNow) { // composites are not updated (or logged) } diff --git a/Base/src/main/java/io/deephaven/base/stats/Counter.java b/Base/src/main/java/io/deephaven/base/stats/Counter.java index 962bc419e4a..e1032486f32 100644 --- a/Base/src/main/java/io/deephaven/base/stats/Counter.java +++ b/Base/src/main/java/io/deephaven/base/stats/Counter.java @@ -8,16 +8,15 @@ // -------------------------------------------------------------------- /** - * A statistic where each value represents a additive quantity, and thus the sum of the values - * does have meaning. Examples include event counts and processing duration. If the sum of - * the values does not have a useful interpretation, use {@link State} instead. + * A statistic where each value represents a additive quantity, and thus the sum of the values does have meaning. + * Examples include event counts and processing duration. If the sum of the values does not have a useful + * interpretation, use {@link State} instead. *

    - *
  • {@link #increment} updates the counter, recording a single value. This is the most common - * usage. ({@link #sample} does exactly the same thing but is a poor verb to use with a Counter.) - *
  • {@link #incrementFromSample} updates the counter, recording a value that is the difference - * between this sample and the last sample. (The first call just sets the "last" sample and does not - * record a value.) For example, this can be used to CPU usage rate when only a running total is - * available by periodically sampling the running total. + *
  • {@link #increment} updates the counter, recording a single value. This is the most common usage. ({@link #sample} + * does exactly the same thing but is a poor verb to use with a Counter.) + *
  • {@link #incrementFromSample} updates the counter, recording a value that is the difference between this sample + * and the last sample. (The first call just sets the "last" sample and does not record a value.) For example, this can + * be used to CPU usage rate when only a running total is available by periodically sampling the running total. *
*/ public class Counter extends Value { @@ -41,10 +40,9 @@ public char getTypeTag() { return TYPE_TAG; } - public static final Function.Unary FACTORY = - new Function.Unary() { - public Counter call(Long now) { - return new Counter(now); - } - }; + public static final Function.Unary FACTORY = new Function.Unary() { + public Counter call(Long now) { + return new Counter(now); + } + }; } diff --git a/Base/src/main/java/io/deephaven/base/stats/Group.java b/Base/src/main/java/io/deephaven/base/stats/Group.java index 409aa174939..b5cbc1f8e43 100644 --- a/Base/src/main/java/io/deephaven/base/stats/Group.java +++ b/Base/src/main/java/io/deephaven/base/stats/Group.java @@ -30,8 +30,8 @@ void setDescription(String description) { } /** get or create a named, top-level item */ - synchronized Item makeItem(String name, - Function.Unary valueFactory, String description, long now) { + synchronized Item makeItem(String name, Function.Unary valueFactory, + String description, long now) { for (Item i : items) { if (i.getName().equals(name)) { return i; @@ -43,8 +43,8 @@ synchronized Item makeItem(String name, } /** get or create a named, top-level item */ - synchronized Item makeItem(String name, - Function.Binary valueFactory, String description, long now, Arg arg) { + synchronized Item makeItem(String name, Function.Binary valueFactory, + String description, long now, Arg arg) { for (Item i : items) { if (i.getName().equals(name)) { return i; @@ -96,8 +96,7 @@ public Item[] getItems() { } /** - * Update the histories of all items in this group, logging all updated intervals >= - * logInterval. + * Update the histories of all items in this group, logging all updated intervals >= logInterval. */ public void update(ItemUpdateListener listener, long logInterval, long now, long appNow) { Item[] arr = itemsArray; diff --git a/Base/src/main/java/io/deephaven/base/stats/HistogramPower2.java b/Base/src/main/java/io/deephaven/base/stats/HistogramPower2.java index f9996e311e6..95093dd866a 100644 --- a/Base/src/main/java/io/deephaven/base/stats/HistogramPower2.java +++ b/Base/src/main/java/io/deephaven/base/stats/HistogramPower2.java @@ -7,10 +7,9 @@ import io.deephaven.base.Function; /** - * This class accumulates samples in a 64 bin histogram with the property that for a sample value of - * n, the bin index will be log2(n)+1 (offset by 1) since there are no unsinged numbers, and placing - * negative and 0 values in bin 0 preserves order, with maximum appearing in bin index 63 (max pos - * numbers) + * This class accumulates samples in a 64 bin histogram with the property that for a sample value of n, the bin index + * will be log2(n)+1 (offset by 1) since there are no unsinged numbers, and placing negative and 0 values in bin 0 + * preserves order, with maximum appearing in bin index 63 (max pos numbers) */ public class HistogramPower2 extends Value { @@ -34,13 +33,12 @@ public void sample(long n) { ++m_samples; } - private final ThreadLocal threadLocalStringBuilder = - new ThreadLocal() { - @Override - protected StringBuilder initialValue() { - return new StringBuilder(100); - } - }; + private final ThreadLocal threadLocalStringBuilder = new ThreadLocal() { + @Override + protected StringBuilder initialValue() { + return new StringBuilder(100); + } + }; public String getHistogramString() { if (m_samples == 0) { @@ -85,10 +83,9 @@ public char getTypeTag() { return TYPE_TAG; } - public static final Function.Unary FACTORY = - new Function.Unary() { - public HistogramPower2 call(Long now) { - return new HistogramPower2(now); - } - }; + public static final Function.Unary FACTORY = new Function.Unary() { + public HistogramPower2 call(Long now) { + return new HistogramPower2(now); + } + }; } diff --git a/Base/src/main/java/io/deephaven/base/stats/HistogramState.java b/Base/src/main/java/io/deephaven/base/stats/HistogramState.java index 005017a19c8..8649a6c2584 100644 --- a/Base/src/main/java/io/deephaven/base/stats/HistogramState.java +++ b/Base/src/main/java/io/deephaven/base/stats/HistogramState.java @@ -22,16 +22,14 @@ private int getBucket(long sample) { return 1 + (int) ((sample - rangeMin) / rangeBucket); } - public static class Spec { // For packaging as a single argument to makeItem through the FACTORY - // (see below) + public static class Spec { // For packaging as a single argument to makeItem through the FACTORY (see below) String groupName; String itemName; long rangeMin; long rangeMax; int numBuckets; - public Spec(String groupName, String itemName, long rangeMin, long rangeMax, - int numBuckets) { + public Spec(String groupName, String itemName, long rangeMin, long rangeMax, int numBuckets) { this.groupName = groupName; this.itemName = itemName; this.rangeMin = rangeMin; @@ -47,19 +45,18 @@ public HistogramState(long now, Spec spec) { this.rangeBucket = (double) (rangeMax - rangeMin) / spec.numBuckets; this.buckets = new State[spec.numBuckets + 2]; this.buckets[0] = Stats.makeItem(spec.groupName, spec.itemName + "[0]", State.FACTORY, - "Values of " + spec.itemName + " less than " + rangeMin, now).getValue(); + "Values of " + spec.itemName + " less than " + rangeMin, now).getValue(); for (int i = 1; i <= spec.numBuckets; ++i) { - this.buckets[i] = - Stats.makeItem(spec.groupName, spec.itemName + '[' + i + ']', State.FACTORY, - "Values of " + spec.itemName + " between " - + (long) (rangeMin + (i - 1) * rangeBucket) + " (incl.) and " - + (long) (rangeMin + i * rangeBucket) + " (excl.)", - now).getValue(); + this.buckets[i] = Stats + .makeItem(spec.groupName, spec.itemName + '[' + i + ']', State.FACTORY, + "Values of " + spec.itemName + " between " + (long) (rangeMin + (i - 1) * rangeBucket) + + " (incl.) and " + (long) (rangeMin + i * rangeBucket) + " (excl.)", + now) + .getValue(); } - this.buckets[spec.numBuckets + 1] = Stats - .makeItem(spec.groupName, spec.itemName + '[' + (spec.numBuckets + 1) + ']', - State.FACTORY, "Values of " + spec.itemName + " at least " + rangeMax, now) - .getValue(); + this.buckets[spec.numBuckets + 1] = + Stats.makeItem(spec.groupName, spec.itemName + '[' + (spec.numBuckets + 1) + ']', State.FACTORY, + "Values of " + spec.itemName + " at least " + rangeMax, now).getValue(); } public char getTypeTag() { @@ -87,9 +84,9 @@ public void reset() { } public static final Function.Binary FACTORY = - new Function.Binary() { - public HistogramState call(Long now, Spec spec) { - return new HistogramState(now, spec); - } - }; + new Function.Binary() { + public HistogramState call(Long now, Spec spec) { + return new HistogramState(now, spec); + } + }; } diff --git a/Base/src/main/java/io/deephaven/base/stats/History.java b/Base/src/main/java/io/deephaven/base/stats/History.java index bdcae2c243a..144174a8639 100644 --- a/Base/src/main/java/io/deephaven/base/stats/History.java +++ b/Base/src/main/java/io/deephaven/base/stats/History.java @@ -16,8 +16,8 @@ public class History { public static final int INTERVAL_15M_INDEX = 4; public static final int INTERVAL_TOTAL_INDEX = 5; - public static final int DEPTH = 2; // min is 2 (current partial interval + last full interval), - // can be larger to retain more history + public static final int DEPTH = 2; // min is 2 (current partial interval + last full interval), can be larger to + // retain more history public static int intervalIndex(String s) { for (int i = 0; i < INTERVAL_NAMES.length; ++i) { @@ -57,8 +57,8 @@ public History(long now) { } /** - * Update the depth-zero history for all intervals, and begin new intervals for those whose - * depth-zero histories are now in the past. + * Update the depth-zero history for all intervals, and begin new intervals for those whose depth-zero histories are + * now in the past. * * @return the highest-numbered interval which has been newly started. */ @@ -165,11 +165,10 @@ public long getStdev(int i, int d) { if (nSamples <= 1) { return 0; } - double var = - sum2 / (nSamples - 1) - ave * ave * ((double) nSamples / (double) (nSamples - 1)); + double var = sum2 / (nSamples - 1) - ave * ave * ((double) nSamples / (double) (nSamples - 1)); if (var < 0.0) { - return -1; // if sum2 goes overflow on us variance could go nuts ... be safe and log a - // junk variance value instead of exploding + return -1; // if sum2 goes overflow on us variance could go nuts ... be safe and log a junk variance value + // instead of exploding } else { return (long) Math.ceil(Math.sqrt(var)); } diff --git a/Base/src/main/java/io/deephaven/base/stats/ItemUpdateListener.java b/Base/src/main/java/io/deephaven/base/stats/ItemUpdateListener.java index 5d98f0b81f7..1637a1171f5 100644 --- a/Base/src/main/java/io/deephaven/base/stats/ItemUpdateListener.java +++ b/Base/src/main/java/io/deephaven/base/stats/ItemUpdateListener.java @@ -5,12 +5,12 @@ package io.deephaven.base.stats; public interface ItemUpdateListener { - public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, - long intervalMillis, String intervalName); + public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, long intervalMillis, + String intervalName); public static final ItemUpdateListener NULL = new ItemUpdateListener() { - public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, - long intervalMillis, String intervalName) { + public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, long intervalMillis, + String intervalName) { // empty } }; diff --git a/Base/src/main/java/io/deephaven/base/stats/State.java b/Base/src/main/java/io/deephaven/base/stats/State.java index d3515d5bd85..031ecbc8927 100644 --- a/Base/src/main/java/io/deephaven/base/stats/State.java +++ b/Base/src/main/java/io/deephaven/base/stats/State.java @@ -8,16 +8,15 @@ // -------------------------------------------------------------------- /** - * A statistic where each value represents a snapshot of the state of the system, and thus the sum - * of the values does not have any meaning. Examples include queue size and latency. If the - * sum of the values does have a useful interpretation, use {@link Counter} instead. + * A statistic where each value represents a snapshot of the state of the system, and thus the sum of the values does + * not have any meaning. Examples include queue size and latency. If the sum of the values does have a useful + * interpretation, use {@link Counter} instead. *
    - *
  • {@link #sample} takes a sample, recording a single value. This is the most common usage. - * ({@link #increment} does exactly the same thing but is a poor verb to use with a State.) - *
  • {@link #sampleFromIncrement} takes a sample, recording a value that is the last sample plus - * the given increment. (If no samples have yet been taken, the "last" sample is assumed to be 0). - * For example, this can be used to track a queue's size by calling it every time an item is added - * or removed. + *
  • {@link #sample} takes a sample, recording a single value. This is the most common usage. ({@link #increment} does + * exactly the same thing but is a poor verb to use with a State.) + *
  • {@link #sampleFromIncrement} takes a sample, recording a value that is the last sample plus the given increment. + * (If no samples have yet been taken, the "last" sample is assumed to be 0). For example, this can be used to track a + * queue's size by calling it every time an item is added or removed. *
*/ public class State extends Value { diff --git a/Base/src/main/java/io/deephaven/base/stats/Stats.java b/Base/src/main/java/io/deephaven/base/stats/Stats.java index 110abb84d2d..beff834e2c8 100644 --- a/Base/src/main/java/io/deephaven/base/stats/Stats.java +++ b/Base/src/main/java/io/deephaven/base/stats/Stats.java @@ -12,13 +12,12 @@ public class Stats { /** A non-static interface to the Stats component. */ public interface Maker { - Item makeItem(String groupName, String itemName, - Function.Unary valueFactory); + Item makeItem(String groupName, String itemName, Function.Unary valueFactory); Maker DEFAULT = new Maker() { @Override public Item makeItem(String groupName, String itemName, - Function.Unary valueFactory) { + Function.Unary valueFactory) { return Stats.makeItem(groupName, itemName, valueFactory); } }; @@ -65,88 +64,80 @@ public static synchronized Group makeGroup(String name, String description) { /** get or create a new item */ public static synchronized Item makeItem(String groupName, String itemName, - Function.Unary valueFactory) { - return makeItem(groupName, itemName, valueFactory, UNKNOWN_DESCRIPTION, - timeSource.currentTimeMillis()); + Function.Unary valueFactory) { + return makeItem(groupName, itemName, valueFactory, UNKNOWN_DESCRIPTION, timeSource.currentTimeMillis()); } /** get or create a new item */ public static synchronized Item makeItem(String groupName, String itemName, - Function.Unary valueFactory, long now) { + Function.Unary valueFactory, long now) { return makeItem(groupName, itemName, valueFactory, UNKNOWN_DESCRIPTION, now); } /** get or create a new item */ public static synchronized Item makeItem(String groupName, String itemName, - Function.Unary valueFactory, String description) { - return makeItem(groupName, itemName, valueFactory, description, - timeSource.currentTimeMillis()); + Function.Unary valueFactory, String description) { + return makeItem(groupName, itemName, valueFactory, description, timeSource.currentTimeMillis()); } /** get or create a new item */ public static synchronized Item makeItem(String groupName, String itemName, - Function.Unary valueFactory, String description, long now) { + Function.Unary valueFactory, String description, long now) { Group g = makeGroup(groupName, null); return g.makeItem(itemName, valueFactory, description, now); } /** get or create a new item with a one-argument factory */ - public static synchronized Item makeItem(String groupName, - String itemName, Function.Binary valueFactory, Arg arg) { - return makeItem(groupName, itemName, valueFactory, UNKNOWN_DESCRIPTION, - timeSource.currentTimeMillis(), arg); + public static synchronized Item makeItem(String groupName, String itemName, + Function.Binary valueFactory, Arg arg) { + return makeItem(groupName, itemName, valueFactory, UNKNOWN_DESCRIPTION, timeSource.currentTimeMillis(), arg); } /** get or create a new item with a one-argument factory */ - public static synchronized Item makeItem(String groupName, - String itemName, Function.Binary valueFactory, long now, Arg arg) { + public static synchronized Item makeItem(String groupName, String itemName, + Function.Binary valueFactory, long now, Arg arg) { return makeItem(groupName, itemName, valueFactory, UNKNOWN_DESCRIPTION, now, arg); } /** get or create a new item with a one-argument factory */ - public static synchronized Item makeItem(String groupName, - String itemName, Function.Binary valueFactory, String description, Arg arg) { - return makeItem(groupName, itemName, valueFactory, description, - timeSource.currentTimeMillis(), arg); + public static synchronized Item makeItem(String groupName, String itemName, + Function.Binary valueFactory, String description, Arg arg) { + return makeItem(groupName, itemName, valueFactory, description, timeSource.currentTimeMillis(), arg); } /** get or create a new item with a one-argument factory */ - public static synchronized Item makeItem(String groupName, - String itemName, Function.Binary valueFactory, String description, long now, - Arg arg) { + public static synchronized Item makeItem(String groupName, String itemName, + Function.Binary valueFactory, String description, long now, Arg arg) { Group g = makeGroup(groupName, null); return g.makeItem(itemName, valueFactory, description, now, arg); } /** get or create a new histogrammed item */ - public static synchronized Item makeHistogram( - String groupName, String itemName, long rangeMin, long rangeMax, int numBuckets) { - return makeHistogram(groupName, itemName, UNKNOWN_DESCRIPTION, - timeSource.currentTimeMillis(), rangeMin, rangeMax, numBuckets); + public static synchronized Item makeHistogram(String groupName, String itemName, + long rangeMin, long rangeMax, int numBuckets) { + return makeHistogram(groupName, itemName, UNKNOWN_DESCRIPTION, timeSource.currentTimeMillis(), rangeMin, + rangeMax, numBuckets); } /** get or create a new histogrammed item */ - public static synchronized Item makeHistogram( - String groupName, String itemName, long now, long rangeMin, long rangeMax, int numBuckets) { - return makeHistogram(groupName, itemName, UNKNOWN_DESCRIPTION, now, rangeMin, rangeMax, - numBuckets); + public static synchronized Item makeHistogram(String groupName, String itemName, + long now, long rangeMin, long rangeMax, int numBuckets) { + return makeHistogram(groupName, itemName, UNKNOWN_DESCRIPTION, now, rangeMin, rangeMax, numBuckets); } /** get or create a new histogrammed item */ - public static synchronized Item makeHistogram( - String groupName, String itemName, String description, long rangeMin, long rangeMax, - int numBuckets) { - return makeHistogram(groupName, itemName, description, timeSource.currentTimeMillis(), - rangeMin, rangeMax, numBuckets); + public static synchronized Item makeHistogram(String groupName, String itemName, + String description, long rangeMin, long rangeMax, int numBuckets) { + return makeHistogram(groupName, itemName, description, timeSource.currentTimeMillis(), rangeMin, rangeMax, + numBuckets); } /** get or create a new histogrammed item */ - public static synchronized Item makeHistogram( - String groupName, String itemName, String description, long now, long rangeMin, - long rangeMax, int numBuckets) { + public static synchronized Item makeHistogram(String groupName, String itemName, + String description, long now, long rangeMin, long rangeMax, int numBuckets) { return Stats.makeItem(groupName, itemName, HistogramState.FACTORY, description, now, - new HistogramState.Spec(groupName, itemName, rangeMin, rangeMax, numBuckets)); + new HistogramState.Spec(groupName, itemName, rangeMin, rangeMax, numBuckets)); } /** return an array of all groups */ @@ -171,11 +162,9 @@ public synchronized static Group getGroup(String name) { } /** - * Update the histories of all items in all groups, logging all updated intervals >= - * logInterval. + * Update the histories of all items in all groups, logging all updated intervals >= logInterval. */ - public synchronized static void update(ItemUpdateListener listener, long now, long appNow, - long logInterval) { + public synchronized static void update(ItemUpdateListener listener, long now, long appNow, long logInterval) { for (Group g : groups) { g.update(listener, logInterval, now, appNow); } diff --git a/Base/src/main/java/io/deephaven/base/stats/Value.java b/Base/src/main/java/io/deephaven/base/stats/Value.java index 66651d07729..d7bdfede90c 100644 --- a/Base/src/main/java/io/deephaven/base/stats/Value.java +++ b/Base/src/main/java/io/deephaven/base/stats/Value.java @@ -86,17 +86,15 @@ public void reset() { min = Long.MAX_VALUE; } - public void update(Item item, ItemUpdateListener listener, long logInterval, long now, - long appNow) { + public void update(Item item, ItemUpdateListener listener, long logInterval, long now, long appNow) { int topInterval = history.update(this, now); reset(); if (History.INTERVALS[topInterval] >= logInterval) { for (int i = 0; i <= topInterval; ++i) { - if (History.INTERVALS[i] >= logInterval && history.getN(i, 1) > 0 - || alwaysUpdated) { + if (History.INTERVALS[i] >= logInterval && history.getN(i, 1) > 0 || alwaysUpdated) { if (listener != null) { listener.handleItemUpdated(item, now, appNow, i, History.INTERVALS[i], - History.INTERVAL_NAMES[i]); + History.INTERVAL_NAMES[i]); } } } diff --git a/Base/src/main/java/io/deephaven/base/string/EncodingInfo.java b/Base/src/main/java/io/deephaven/base/string/EncodingInfo.java index 4abb5289ae6..2e5d40cf82a 100644 --- a/Base/src/main/java/io/deephaven/base/string/EncodingInfo.java +++ b/Base/src/main/java/io/deephaven/base/string/EncodingInfo.java @@ -13,26 +13,26 @@ import java.nio.charset.*; /** - * Encoding info enum, to help code determine what assumptions it can make about a CharSet, as well - * as simplification for encode/decode operations. + * Encoding info enum, to help code determine what assumptions it can make about a CharSet, as well as simplification + * for encode/decode operations. */ public enum EncodingInfo implements LogOutputAppendable { US_ASCII(StandardCharsets.US_ASCII, true), ISO_8859_1(StandardCharsets.ISO_8859_1, true), UTF_8( - StandardCharsets.UTF_8, false), UTF_16BE(StandardCharsets.UTF_16BE, false), UTF_16LE( - StandardCharsets.UTF_16LE, false), UTF_16(StandardCharsets.UTF_16, false); + StandardCharsets.UTF_8, false), UTF_16BE(StandardCharsets.UTF_16BE, + false), UTF_16LE(StandardCharsets.UTF_16LE, false), UTF_16(StandardCharsets.UTF_16, false); private final Charset charset; private final String encodingName; - private final boolean isSimple; // Can we simply cast single bytes to single chars without - // breaking anything? Not multi-byte, subset of UCS-2. + private final boolean isSimple; // Can we simply cast single bytes to single chars without breaking anything? Not + // multi-byte, subset of UCS-2. private final ThreadLocal> encoder; private final ThreadLocal> decoder; EncodingInfo(@NotNull final Charset charset, - @NotNull final String encodingName, - final boolean isSimple) { + @NotNull final String encodingName, + final boolean isSimple) { this.charset = Require.neqNull(charset, "charSet"); this.encodingName = Require.neqNull(encodingName, "encodingName"); this.isSimple = isSimple; @@ -42,13 +42,13 @@ public enum EncodingInfo implements LogOutputAppendable { } EncodingInfo(@NotNull final Charset charset, - final boolean isSimple) { + final boolean isSimple) { this(charset, charset.name(), isSimple); } @SuppressWarnings("unused") EncodingInfo(@NotNull final String encodingName, - final boolean isSimple) { + final boolean isSimple) { this(Charset.forName(encodingName), encodingName, isSimple); } @@ -57,8 +57,7 @@ public String getEncodingName() { } /** - * Can this encoding info's charset be encoded or decoded by simple linear assignment of - * char->byte or byte->char. + * Can this encoding info's charset be encoded or decoded by simple linear assignment of char->byte or byte->char. * * @return Whether this encoding info's charset is simple */ @@ -72,7 +71,7 @@ public Charset getCharset() { private CharsetEncoder makeEncoder() { return charset.newEncoder().onMalformedInput(CodingErrorAction.REPLACE) - .onUnmappableCharacter(CodingErrorAction.REPLACE); + .onUnmappableCharacter(CodingErrorAction.REPLACE); } /** @@ -80,8 +79,8 @@ private CharsetEncoder makeEncoder() { * Get a thread local encoder for this encoding info. * *

- * The encoder will be setup to replace malformed input or unmappable characters, and these - * settings should be restored if changed. + * The encoder will be setup to replace malformed input or unmappable characters, and these settings should be + * restored if changed. * * @return A thread local encoder for this encoding info */ @@ -95,7 +94,7 @@ public CharsetEncoder getEncoder() { private CharsetDecoder makeDecoder() { return charset.newDecoder().onMalformedInput(CodingErrorAction.REPLACE) - .onUnmappableCharacter(CodingErrorAction.REPLACE); + .onUnmappableCharacter(CodingErrorAction.REPLACE); } /** @@ -103,8 +102,8 @@ private CharsetDecoder makeDecoder() { * Get a thread local decoder for this encoding info. * *

- * The decoder will be setup to replace malformed input or unmappable characters, and these - * settings should be restored if changed. + * The decoder will be setup to replace malformed input or unmappable characters, and these settings should be + * restored if changed. * * @return A thread local decoder for this encoding info */ diff --git a/Base/src/main/java/io/deephaven/base/string/cache/AbstractCompressedString.java b/Base/src/main/java/io/deephaven/base/string/cache/AbstractCompressedString.java index 842c87f60b4..b2fb1aa1c14 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/AbstractCompressedString.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/AbstractCompressedString.java @@ -17,8 +17,7 @@ /** * Base class for immutable byte[]-backed String replacements. */ -public abstract class AbstractCompressedString - implements StringAlike { +public abstract class AbstractCompressedString implements StringAlike { private static final long serialVersionUID = -2596527344240947333L; private static final Charset ENCODING = StandardCharsets.ISO_8859_1; @@ -83,8 +82,7 @@ public abstract class AbstractCompressedString - implements StringCache { +public class AlwaysCreateStringCache implements StringCache { public static final StringCache STRING_INSTANCE = - new AlwaysCreateStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE); + new AlwaysCreateStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE); public static final StringCache COMPRESSED_STRING_INSTANCE = - new AlwaysCreateStringCache<>(StringCacheTypeAdapterCompressedStringImpl.INSTANCE); + new AlwaysCreateStringCache<>(StringCacheTypeAdapterCompressedStringImpl.INSTANCE); public static final StringCache MAPPED_COMPRESSED_STRING_INSTANCE = - new AlwaysCreateStringCache<>(StringCacheTypeAdapterMappedCompressedStringImpl.INSTANCE); + new AlwaysCreateStringCache<>(StringCacheTypeAdapterMappedCompressedStringImpl.INSTANCE); /** * Adapter to make and compare cache members. diff --git a/Base/src/main/java/io/deephaven/base/string/cache/ByteArrayCharSequenceAdapterImpl.java b/Base/src/main/java/io/deephaven/base/string/cache/ByteArrayCharSequenceAdapterImpl.java index 928cd632a83..76793b85e22 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/ByteArrayCharSequenceAdapterImpl.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/ByteArrayCharSequenceAdapterImpl.java @@ -32,8 +32,7 @@ public MappedCompressedString toMappedCompressedString() { /** * Set the byte array backing this CharSequenceAdapter. * - * @param byteArray A byte[] instance that contains a proto-String this adapter knows how to - * convert. + * @param byteArray A byte[] instance that contains a proto-String this adapter knows how to convert. * @param offset The index of the first char in byteArray that belongs to the proto-String. * @param length The length of the proto-String in chars. * @return This CharSequenceAdapter. diff --git a/Base/src/main/java/io/deephaven/base/string/cache/ByteBufferCharSequenceAdapterImpl.java b/Base/src/main/java/io/deephaven/base/string/cache/ByteBufferCharSequenceAdapterImpl.java index c8ff412947a..6ad2a1d4801 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/ByteBufferCharSequenceAdapterImpl.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/ByteBufferCharSequenceAdapterImpl.java @@ -34,8 +34,7 @@ public MappedCompressedString toMappedCompressedString() { /** * Set the ByteBuffer backing this CharSequenceAdapter. * - * @param buffer A ByteBuffer instance that contains a proto-String this adapter knows how to - * convert. + * @param buffer A ByteBuffer instance that contains a proto-String this adapter knows how to convert. * @param offset The index of the first char in buffer that belongs to the proto-String. * @param length The length of the proto-String in chars. * @return This CharSequenceAdapter. diff --git a/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceAdapter.java b/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceAdapter.java index 84993195d9b..237afe752c6 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceAdapter.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceAdapter.java @@ -7,29 +7,25 @@ import org.jetbrains.annotations.NotNull; /** - * This class provides base functionality for several implementations of CharSequenceAdapter. - * Currently, there's one for ByteBuffers, another for byte arrays, one for sequences of chars, and - * one for chains of Strings. If you have array-backed, writable ByteBuffers, it's probably best to - * use the byte array implementation with the backing array, e.g. for ByteBuffer b, - * ByteBufferCharSequenceAdapterImpl a, proto-String length l, use: a.set(b.array(), b.position() + - * b.arrayOffset(), l); + * This class provides base functionality for several implementations of CharSequenceAdapter. Currently, there's one for + * ByteBuffers, another for byte arrays, one for sequences of chars, and one for chains of Strings. If you have + * array-backed, writable ByteBuffers, it's probably best to use the byte array implementation with the backing array, + * e.g. for ByteBuffer b, ByteBufferCharSequenceAdapterImpl a, proto-String length l, use: a.set(b.array(), b.position() + * + b.arrayOffset(), l); * - * Note: trim() support hasn't been needed/implemented so far. Note: Only Latin-1 (ISO-8859-1) - * characters are expected at this time. Bytes are converted to chars one-for-one with the result - * masked by 0xFF. + * Note: trim() support hasn't been needed/implemented so far. Note: Only Latin-1 (ISO-8859-1) characters are expected + * at this time. Bytes are converted to chars one-for-one with the result masked by 0xFF. * - * Implementations are not thread-safe. Pool them, create ThreadLocal instances, or (better) - * instantiate them along natural concurrency boundaries. Implementations allow any proto-String - * (that is, anything that can be expressed as a sequence of chars) to be used as a cache key (in - * ConcurrentUnboundedStringCache instances) and a String creator. + * Implementations are not thread-safe. Pool them, create ThreadLocal instances, or (better) instantiate them along + * natural concurrency boundaries. Implementations allow any proto-String (that is, anything that can be expressed as a + * sequence of chars) to be used as a cache key (in ConcurrentUnboundedStringCache instances) and a String creator. * * Note Subclasses *must* support length(), and charAt(int index). * - * Note The makeString() implementation *must* be consistent with length() and charAt(int index) - - * that is, we require that makeString().contentEquals(this). + * Note The makeString() implementation *must* be consistent with length() and charAt(int index) - that is, we require + * that makeString().contentEquals(this). * - * Note subSequence(int start, int end) is unsupported by default - no StringCache implementations - * need it at this time. + * Note subSequence(int start, int end) is unsupported by default - no StringCache implementations need it at this time. */ public abstract class CharSequenceAdapter implements StringCompatible { diff --git a/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceAdapterBuilder.java b/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceAdapterBuilder.java index 4a2e824f5c0..afa28ad1380 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceAdapterBuilder.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceAdapterBuilder.java @@ -67,8 +67,7 @@ private char[] ensureSpace(final int needed) { * @param length The length of the proto-String in chars. * @return This adapter. */ - public final CharSequenceAdapterBuilder append(final String value, final int offset, - final int length) { + public final CharSequenceAdapterBuilder append(final String value, final int offset, final int length) { if (length > 0) { value.getChars(offset, offset + length, ensureSpace(length), used); used += length; @@ -95,8 +94,7 @@ public final CharSequenceAdapterBuilder append(final String value) { * @param length The length of the proto-String in chars. * @return This adapter. */ - public final CharSequenceAdapterBuilder append(final CharSequence value, final int offset, - final int length) { + public final CharSequenceAdapterBuilder append(final CharSequence value, final int offset, final int length) { if (length > 0) { ensureSpace(length); cachedHashCode = 0; @@ -117,8 +115,7 @@ public final CharSequenceAdapterBuilder append(final CharSequence value) { return append(value, 0, value.length()); } - public final CharSequenceAdapterBuilder append(final char[] value, final int offset, - final int length) { + public final CharSequenceAdapterBuilder append(final char[] value, final int offset, final int length) { if (length > 0) { System.arraycopy(value, offset, ensureSpace(length), used, length); used += length; @@ -143,8 +140,7 @@ public final CharSequenceAdapterBuilder append(final char value) { return this; } - public final CharSequenceAdapterBuilder append(final byte[] value, final int offset, - final int length) { + public final CharSequenceAdapterBuilder append(final byte[] value, final int offset, final int length) { if (length > 0) { ensureSpace(length); for (int bi = offset; bi < offset + length; ++bi) { diff --git a/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceUtils.java b/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceUtils.java index bfb7f4bf01d..009b7e7de54 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceUtils.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/CharSequenceUtils.java @@ -52,8 +52,7 @@ public static int caseInsensitiveHashCode(@NotNull final CharSequence charSequen * @param cs2 The second CharSequence * @return Whether the supplied CharSequences represent an equal sequence of chars. */ - public static boolean contentEquals(@NotNull final CharSequence cs1, - @NotNull final CharSequence cs2) { + public static boolean contentEquals(@NotNull final CharSequence cs1, @NotNull final CharSequence cs2) { if (cs1 == cs2) { return true; } @@ -77,7 +76,7 @@ public static boolean contentEquals(@NotNull final CharSequence cs1, * @return Whether the supplied CharSequences represent an equal sequence of chars. */ public static boolean nullSafeContentEquals(@Nullable final CharSequence cs1, - @Nullable final CharSequence cs2) { + @Nullable final CharSequence cs2) { if (cs1 == null || cs2 == null) { return cs1 == cs2; } @@ -89,11 +88,9 @@ public static boolean nullSafeContentEquals(@Nullable final CharSequence cs1, * * @param cs1 The first CharSequence * @param cs2 The second CharSequence - * @return Whether the supplied CharSequences represent an equal sequence of chars, disregarding - * case. + * @return Whether the supplied CharSequences represent an equal sequence of chars, disregarding case. */ - public static boolean contentEqualsIgnoreCase(@NotNull final CharSequence cs1, - @NotNull final CharSequence cs2) { + public static boolean contentEqualsIgnoreCase(@NotNull final CharSequence cs1, @NotNull final CharSequence cs2) { if (cs1 == cs2) { return true; } @@ -112,8 +109,8 @@ public static boolean contentEqualsIgnoreCase(@NotNull final CharSequence cs1, if (cs1Char == cs2Char) { continue; } - // Uncomment this if we start caring about any alphabets (e.g. Georgian) that don't have - // consistent conversion to uppercase. + // Uncomment this if we start caring about any alphabets (e.g. Georgian) that don't have consistent + // conversion to uppercase. // cs1Char = Character.toLowerCase(cs1Char); // cs2Char = Character.toLowerCase(cs2Char); // if (cs1Char == cs2Char) { @@ -125,15 +122,14 @@ public static boolean contentEqualsIgnoreCase(@NotNull final CharSequence cs1, } /** - * Compare two CharSequences for case-insensitive equality, disregarding class and allowing for - * nullity. + * Compare two CharSequences for case-insensitive equality, disregarding class and allowing for nullity. * * @param cs1 The first CharSequence * @param cs2 The second CharSequence * @return Whether the supplied CharSequences represent an equal sequence of chars. */ public static boolean nullSafeContentEqualsIgnoreCase(@Nullable final CharSequence cs1, - @Nullable final CharSequence cs2) { + @Nullable final CharSequence cs2) { if (cs1 == null || cs2 == null) { return cs1 == cs2; } @@ -152,11 +148,11 @@ public static boolean nullSafeContentEqualsIgnoreCase(@Nullable final CharSequen * @return Whether the regions match */ public static boolean regionMatches(final boolean ignoreCase, - final CharSequence cs1, - final int cs1Offset, - final CharSequence cs2, - final int cs2Offset, - final int length) { + final CharSequence cs1, + final int cs1Offset, + final CharSequence cs2, + final int cs2Offset, + final int length) { for (int ci = 0; ci < length; ++ci) { char cs1Char = cs1.charAt(cs1Offset + ci); char cs2Char = cs2.charAt(cs2Offset + ci); @@ -169,8 +165,8 @@ public static boolean regionMatches(final boolean ignoreCase, if (cs1Char == cs2Char) { continue; } - // Uncomment this if we start caring about any alphabets (e.g. Georgian) that don't - // have consistent conversion to uppercase. + // Uncomment this if we start caring about any alphabets (e.g. Georgian) that don't have consistent + // conversion to uppercase. // cs1Char = Character.toLowerCase(cs1Char); // cs2Char = Character.toLowerCase(cs2Char); // if (cs1Char == cs2Char) { @@ -185,8 +181,7 @@ public static boolean regionMatches(final boolean ignoreCase, /** * A re-usable case-sensitive Comparator for CharSequences. */ - public static final Comparator CASE_SENSITIVE_COMPARATOR = - new CaseSensitiveComparator(); + public static final Comparator CASE_SENSITIVE_COMPARATOR = new CaseSensitiveComparator(); private static class CaseSensitiveComparator implements Comparator { @@ -210,8 +205,7 @@ public int compare(@NotNull final CharSequence cs1, @NotNull final CharSequence /** * A re-usable case-insensitive Comparator for CharSequences. */ - public static final Comparator CASE_INSENSITIVE_COMPARATOR = - new CaseInsensitiveComparator(); + public static final Comparator CASE_INSENSITIVE_COMPARATOR = new CaseInsensitiveComparator(); private static class CaseInsensitiveComparator implements Comparator { @@ -232,8 +226,8 @@ public int compare(@NotNull final CharSequence cs1, @NotNull final CharSequence if (cs1Char == cs2Char) { continue; } - // Uncomment this if we start caring about any alphabets (e.g. Georgian) that don't - // have consistent conversion to uppercase. + // Uncomment this if we start caring about any alphabets (e.g. Georgian) that don't have consistent + // conversion to uppercase. // cs1Char = Character.toLowerCase(cs1Char); // cs2Char = Character.toLowerCase(cs2Char); // if (cs1Char == cs2Char) { diff --git a/Base/src/main/java/io/deephaven/base/string/cache/CompressedString.java b/Base/src/main/java/io/deephaven/base/string/cache/CompressedString.java index bba127d6963..37484e336e0 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/CompressedString.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/CompressedString.java @@ -61,8 +61,7 @@ protected final CompressedString convertValue(final String string) { } @Override - protected final CompressedString convertValue(final byte[] data, final int offset, - final int length) { + protected final CompressedString convertValue(final byte[] data, final int offset, final int length) { return new CompressedString(data, offset, length); } diff --git a/Base/src/main/java/io/deephaven/base/string/cache/ConcurrentBoundedStringCache.java b/Base/src/main/java/io/deephaven/base/string/cache/ConcurrentBoundedStringCache.java index 855fce60241..e971547881f 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/ConcurrentBoundedStringCache.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/ConcurrentBoundedStringCache.java @@ -12,14 +12,13 @@ import java.util.Random; /** - * This cache follows the same design as ConcurrentUnboundedStringCache, but uses a KeyedObjectCache - * (bounded, concurrent-get, "pseudo-random pseudo-LRU" replacement) for its internal storage. + * This cache follows the same design as ConcurrentUnboundedStringCache, but uses a KeyedObjectCache (bounded, + * concurrent-get, "pseudo-random pseudo-LRU" replacement) for its internal storage. * - * This implementation is thread-safe, and lock-free except for the insertion of new cached Strings - * on a cache miss. + * This implementation is thread-safe, and lock-free except for the insertion of new cached Strings on a cache miss. */ public class ConcurrentBoundedStringCache - implements StringCache { + implements StringCache { /** * Adapter to make and compare cache members. @@ -36,11 +35,10 @@ public class ConcurrentBoundedStringCache * @param capacity Minimum capacity of the storage backing this cache. * @param collisionFactor Number of possible storage slots a given element might be stored in. */ - public ConcurrentBoundedStringCache(final StringCacheTypeAdapter typeAdapter, - final int capacity, final int collisionFactor) { + public ConcurrentBoundedStringCache(final StringCacheTypeAdapter typeAdapter, final int capacity, + final int collisionFactor) { this.typeAdapter = Require.neqNull(typeAdapter, "typeAdapter"); - cache = - new KeyedObjectCache<>(capacity, collisionFactor, new KeyImpl(), null, new Random()); + cache = new KeyedObjectCache<>(capacity, collisionFactor, new KeyImpl(), null, new Random()); } @Override @@ -64,16 +62,14 @@ public final STRING_LIKE_TYPE getEmptyString() { @NotNull public final STRING_LIKE_TYPE getCachedString(@NotNull final StringCompatible protoString) { final STRING_LIKE_TYPE existingValue = cache.get(protoString); - return existingValue != null ? existingValue - : cache.putIfAbsent(typeAdapter.create(protoString)); + return existingValue != null ? existingValue : cache.putIfAbsent(typeAdapter.create(protoString)); } @Override @NotNull public final STRING_LIKE_TYPE getCachedString(@NotNull final String string) { final STRING_LIKE_TYPE existingValue = cache.get(string); - return existingValue != null ? existingValue - : cache.putIfAbsent(typeAdapter.create(string)); + return existingValue != null ? existingValue : cache.putIfAbsent(typeAdapter.create(string)); } // ----------------------------------------------------------------------------------------------------------------- diff --git a/Base/src/main/java/io/deephaven/base/string/cache/ConcurrentUnboundedStringCache.java b/Base/src/main/java/io/deephaven/base/string/cache/ConcurrentUnboundedStringCache.java index 252d3577500..16b73912435 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/ConcurrentUnboundedStringCache.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/ConcurrentUnboundedStringCache.java @@ -12,22 +12,21 @@ import org.jetbrains.annotations.NotNull; /** - * A very limited interface is specified, in order to decouple typeAdapter pooling and related - * concerns from the cache itself. + * A very limited interface is specified, in order to decouple typeAdapter pooling and related concerns from the cache + * itself. * - * StringCompatibles or Strings used as keys (or values) when probing/populating the cache are - * allowed to use their own hashCode() implementation. This is dangerous, because we rely on our key - * implementation to hash CharSequences identically to a String of the same characters. An assertion - * in the value factory should catch any cases where the built-in assumption breaks down, but we've - * deemed that unnecessary at this time. Specify "debug" in the constructor if you need this check. - * String.hashCode()'s implementation has been stable since JDK 1.2, and is specified in the + * StringCompatibles or Strings used as keys (or values) when probing/populating the cache are allowed to use their own + * hashCode() implementation. This is dangerous, because we rely on our key implementation to hash CharSequences + * identically to a String of the same characters. An assertion in the value factory should catch any cases where the + * built-in assumption breaks down, but we've deemed that unnecessary at this time. Specify "debug" in the constructor + * if you need this check. String.hashCode()'s implementation has been stable since JDK 1.2, and is specified in the * JavaDocs. * - * This implementation is thread-safe, and lock-free except for the insertion of new cached Strings - * on a cache miss. StringCompatible implementation thread-safety is a separate concern. + * This implementation is thread-safe, and lock-free except for the insertion of new cached Strings on a cache miss. + * StringCompatible implementation thread-safety is a separate concern. */ public class ConcurrentUnboundedStringCache - implements StringCache { + implements StringCache { /** * Adapter to make and compare cache members. @@ -53,16 +52,14 @@ public class ConcurrentUnboundedStringCache typeAdapter, - final int initialCapacity, final boolean debug) { + public ConcurrentUnboundedStringCache(@NotNull final StringCacheTypeAdapter typeAdapter, + final int initialCapacity, final boolean debug) { this.typeAdapter = Require.neqNull(typeAdapter, "typeAdapter"); cache = new KeyedObjectHashMap<>(initialCapacity, new KeyImpl()); - stringCompatibleKeyValueFactory = debug ? new CheckedStringCompatibleKeyValueFactory() - : new UncheckedStringCompatibleKeyValueFactory(); + stringCompatibleKeyValueFactory = + debug ? new CheckedStringCompatibleKeyValueFactory() : new UncheckedStringCompatibleKeyValueFactory(); stringKeyValueFactory = new StringKeyValueFactory(); } @@ -86,19 +83,16 @@ public final STRING_LIKE_TYPE getEmptyString() { @Override @NotNull public final STRING_LIKE_TYPE getCachedString(@NotNull final StringCompatible protoString) { - // There's an inherent trade-off between the length of time we hold the cache's lock and the - // possibility of + // There's an inherent trade-off between the length of time we hold the cache's lock and the possibility of // wasting constructed Strings due to optimistic construction on cache miss. - // For now, this implementation is optimistic. Switch to the following implementation (one - // line) if production + // For now, this implementation is optimistic. Switch to the following implementation (one line) if production // performance shows this to be a poor choice: // return cache.putIfAbsent(protoString, stringCompatibleKeyValueFactory); STRING_LIKE_TYPE existingValue = cache.get(protoString); if (existingValue != null) { return existingValue; } - final STRING_LIKE_TYPE candidateValue = - stringCompatibleKeyValueFactory.newValue(protoString); + final STRING_LIKE_TYPE candidateValue = stringCompatibleKeyValueFactory.newValue(protoString); existingValue = cache.putIfAbsent(candidateValue, candidateValue); if (existingValue != null) { return existingValue; @@ -139,7 +133,7 @@ public boolean equalKey(CharSequence key, STRING_LIKE_TYPE value) { // ----------------------------------------------------------------------------------------------------------------- private class UncheckedStringCompatibleKeyValueFactory - implements KeyedObjectHash.ValueFactory { + implements KeyedObjectHash.ValueFactory { @Override public STRING_LIKE_TYPE newValue(CharSequence key) { @@ -148,20 +142,19 @@ public STRING_LIKE_TYPE newValue(CharSequence key) { } private class CheckedStringCompatibleKeyValueFactory - implements KeyedObjectHash.ValueFactory { + implements KeyedObjectHash.ValueFactory { @Override public STRING_LIKE_TYPE newValue(final CharSequence key) { final STRING_LIKE_TYPE value = typeAdapter.create((StringCompatible) key); - Assert.assertion(CharSequenceUtils.contentEquals(key, value), - "CharSequenceUtils.contentEquals(key, value)", key, "key", value, "value"); + Assert.assertion(CharSequenceUtils.contentEquals(key, value), "CharSequenceUtils.contentEquals(key, value)", + key, "key", value, "value"); Assert.eq(key.hashCode(), "key.hashCode", value.hashCode(), "value.hashCode()"); return value; } } - private class StringKeyValueFactory - implements KeyedObjectHash.ValueFactory { + private class StringKeyValueFactory implements KeyedObjectHash.ValueFactory { @Override public STRING_LIKE_TYPE newValue(final CharSequence key) { diff --git a/Base/src/main/java/io/deephaven/base/string/cache/MappedCompressedString.java b/Base/src/main/java/io/deephaven/base/string/cache/MappedCompressedString.java index 1080989e406..6a51d632484 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/MappedCompressedString.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/MappedCompressedString.java @@ -11,27 +11,24 @@ import java.nio.ByteBuffer; /** - * This is a ridiculously simple, light-as-I-can-make-it, but decidedly single-purpose data - * structure. Specifically, it's a CompressedString with an embedded (to avoid reference or Object - * instance overhead) open-addressed SimpleReference-identity -> int hash map with load - * factor 1 (100%) and no public operations other than "putIfAbsent". + * This is a ridiculously simple, light-as-I-can-make-it, but decidedly single-purpose data structure. Specifically, + * it's a CompressedString with an embedded (to avoid reference or Object instance overhead) open-addressed + * SimpleReference-identity -> int hash map with load factor 1 (100%) and no public operations other than + * "putIfAbsent". * - * The reason for requiring that key objects be SimpleReferences is to allow for O(1) automatic slot - * reclamation across many MappedCompressedString instances at the same time. A given source of - * mappings creates a single SimpleReference to use as a key, and may invalidate mappings (creating - * a deleted slot) simply by clearing the SimpleReference. In practice, a WeakSimpleReference to the - * source itself is used, in order to also allow garbage collection of the mapping source object to - * invalidate all of its mappings. + * The reason for requiring that key objects be SimpleReferences is to allow for O(1) automatic slot reclamation across + * many MappedCompressedString instances at the same time. A given source of mappings creates a single SimpleReference + * to use as a key, and may invalidate mappings (creating a deleted slot) simply by clearing the SimpleReference. In + * practice, a WeakSimpleReference to the source itself is used, in order to also allow garbage collection of the + * mapping source object to invalidate all of its mappings. * * Unfortunately, I haven't figured out a way to make this allow concurrent gets. * - * The intended use is in Deephaven import code, for storing SymbolManager -> SymbolId mappings on - * the CompressedString that represents the Symbol itself, typically inside of a (bounded) - * StringCache of MappedCompressedString instances. + * The intended use is in Deephaven import code, for storing SymbolManager -> SymbolId mappings on the CompressedString + * that represents the Symbol itself, typically inside of a (bounded) StringCache of MappedCompressedString instances. * - * Note that this uses io.deephaven.base.reference.SimpleReference instead of - * java.lang.ref.Reference so that unit tests can avoid being required to use the concrete - * sub-classes of Reference, which all come with GC-related side-effects. + * Note that this uses io.deephaven.base.reference.SimpleReference instead of java.lang.ref.Reference so that unit tests + * can avoid being required to use the concrete sub-classes of Reference, which all come with GC-related side-effects. */ public final class MappedCompressedString extends AbstractCompressedString { @@ -87,8 +84,7 @@ protected final MappedCompressedString convertValue(final String string) { } @Override - protected final MappedCompressedString convertValue(final byte[] data, final int offset, - final int length) { + protected final MappedCompressedString convertValue(final byte[] data, final int offset, final int length) { return new MappedCompressedString(data, offset, length); } @@ -99,22 +95,16 @@ public final synchronized int capacity() { /** * Add the specified pair if no mapping already exists for key. * - * @param key A non-null Reference to an arbitrary object whose reachability determines mapping - * validity. - * @param potentialValue The value to insert if none already exists. Must not equal - * NULL_MAPPING_VALUE. - * @return The existing mapped value, if present, or NULL_MAPPING_VALUE if potentialValue was - * used. + * @param key A non-null Reference to an arbitrary object whose reachability determines mapping validity. + * @param potentialValue The value to insert if none already exists. Must not equal NULL_MAPPING_VALUE. + * @return The existing mapped value, if present, or NULL_MAPPING_VALUE if potentialValue was used. */ - public final synchronized int putIfAbsent(final SimpleReference key, - final int potentialValue) { + public final synchronized int putIfAbsent(final SimpleReference key, final int potentialValue) { return putIfAbsentInternal(Require.neqNull(key, "key"), - Require.neq(potentialValue, "potentialValue", NULL_MAPPING_VALUE, "NULL_MAPPING_VALUE"), - true); + Require.neq(potentialValue, "potentialValue", NULL_MAPPING_VALUE, "NULL_MAPPING_VALUE"), true); } - private int putIfAbsentInternal(final SimpleReference key, final int potentialValue, - final boolean allowRehash) { + private int putIfAbsentInternal(final SimpleReference key, final int potentialValue, final boolean allowRehash) { final int firstIndex = firstIndexFor(key); int firstDeletedIndex = NULL_INDEX; @@ -161,8 +151,8 @@ private int putIfAbsentInternal(final SimpleReference key, final int potentia rehash(); return putIfAbsentInternal(key, potentialValue, false); } - throw new IllegalStateException("BUG: No free space found for <" + key + ',' - + potentialValue + ">, but allowRehash is false!"); + throw new IllegalStateException( + "BUG: No free space found for <" + key + ',' + potentialValue + ">, but allowRehash is false!"); } private int firstIndexFor(final SimpleReference key) { @@ -186,8 +176,8 @@ private void rehash() { continue; } if (putIfAbsentInternal(key, oldValues[oki], false) != NULL_MAPPING_VALUE) { - throw new IllegalStateException("BUG: Mapping for <" + oldKeys[oki] + ',' - + oldValues[oki] + "> already present during rehash!"); + throw new IllegalStateException("BUG: Mapping for <" + oldKeys[oki] + ',' + oldValues[oki] + + "> already present during rehash!"); } } } diff --git a/Base/src/main/java/io/deephaven/base/string/cache/OpenAddressedWeakUnboundedStringCache.java b/Base/src/main/java/io/deephaven/base/string/cache/OpenAddressedWeakUnboundedStringCache.java index 4d8860cd9b0..08625191317 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/OpenAddressedWeakUnboundedStringCache.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/OpenAddressedWeakUnboundedStringCache.java @@ -8,11 +8,11 @@ import org.jetbrains.annotations.NotNull; /** - * Unbounded StringCache built around a OpenAddressedCanonicalizationCache, which only enforces weak - * reachability on its STRING_LIKE_TYPE members. + * Unbounded StringCache built around a OpenAddressedCanonicalizationCache, which only enforces weak reachability on its + * STRING_LIKE_TYPE members. */ public class OpenAddressedWeakUnboundedStringCache - implements StringCache { + implements StringCache { /** * Adapter to make and compare cache members. @@ -28,13 +28,13 @@ public class OpenAddressedWeakUnboundedStringCache { + implements OpenAddressedCanonicalizationCache.Adapter { @Override public boolean equals(@NotNull String inputItem, @NotNull Object cachedItem) { // noinspection unchecked return typeAdapter.getType() == cachedItem.getClass() - && typeAdapter.areEqual(inputItem, (STRING_LIKE_TYPE) cachedItem); + && typeAdapter.areEqual(inputItem, (STRING_LIKE_TYPE) cachedItem); } @Override @@ -57,13 +57,13 @@ public STRING_LIKE_TYPE makeCacheableItem(@NotNull String inputItem) { * Canonicalization cache adapter implementation for StringCompatible inputs. */ private class StringCompatibleCanonicalizationCacheAdapter - implements OpenAddressedCanonicalizationCache.Adapter { + implements OpenAddressedCanonicalizationCache.Adapter { @Override public boolean equals(@NotNull StringCompatible inputItem, @NotNull Object cachedItem) { // noinspection unchecked return typeAdapter.getType() == cachedItem.getClass() - && typeAdapter.areEqual(inputItem, (STRING_LIKE_TYPE) cachedItem); + && typeAdapter.areEqual(inputItem, (STRING_LIKE_TYPE) cachedItem); } @Override @@ -87,9 +87,8 @@ public STRING_LIKE_TYPE makeCacheableItem(@NotNull StringCompatible inputItem) { * @param initialCapacity Initial capacity of the map backing this cache */ @SuppressWarnings("unused") - public OpenAddressedWeakUnboundedStringCache( - @NotNull final StringCacheTypeAdapter typeAdapter, - final int initialCapacity) { + public OpenAddressedWeakUnboundedStringCache(@NotNull final StringCacheTypeAdapter typeAdapter, + final int initialCapacity) { this(typeAdapter, new OpenAddressedCanonicalizationCache(initialCapacity)); } @@ -97,9 +96,8 @@ public OpenAddressedWeakUnboundedStringCache( * @param typeAdapter The type adapter for this String cache * @param cache The internal canonicalization cache */ - public OpenAddressedWeakUnboundedStringCache( - @NotNull final StringCacheTypeAdapter typeAdapter, - @NotNull final OpenAddressedCanonicalizationCache cache) { + public OpenAddressedWeakUnboundedStringCache(@NotNull final StringCacheTypeAdapter typeAdapter, + @NotNull final OpenAddressedCanonicalizationCache cache) { this.typeAdapter = typeAdapter; this.cache = cache; stringC14nAdapter = new StringCanonicalizationCacheAdapter(); diff --git a/Base/src/main/java/io/deephaven/base/string/cache/StringAlike.java b/Base/src/main/java/io/deephaven/base/string/cache/StringAlike.java index 21186a5aed5..7e4c3d7937c 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/StringAlike.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/StringAlike.java @@ -10,12 +10,12 @@ import java.util.Locale; /** - * This creates an interface (with more permissive argument types, and generified return types) for - * most public instance methods of class String. + * This creates an interface (with more permissive argument types, and generified return types) for most public instance + * methods of class String. * - * For static methods, often the String implementation should be used and its results converted to - * the desired type via construction. That is, for String method M, and StringAlike class SAC, - * follow the following pattern: new SAC(String.M(args...)) + * For static methods, often the String implementation should be used and its results converted to the desired type via + * construction. That is, for String method M, and StringAlike class SAC, follow the following pattern: new + * SAC(String.M(args...)) * * For JavaDocs, see {@link String}. */ @@ -40,10 +40,10 @@ public interface StringAlike extends StringCompatible, Serializable { int compareToIgnoreCase(@NotNull CharSequence that); boolean regionMatches(boolean ignoreCase, - int offset, - CharSequence that, - int thatOffset, - int length); + int offset, + CharSequence that, + int thatOffset, + int length); boolean startsWith(@NotNull CharSequence prefix, int offset); diff --git a/Base/src/main/java/io/deephaven/base/string/cache/StringCache.java b/Base/src/main/java/io/deephaven/base/string/cache/StringCache.java index 75bb4de7471..cbc5da23e7d 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/StringCache.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/StringCache.java @@ -16,8 +16,8 @@ public interface StringCache { /** * Get a hint about this cache's capacity and behavior. * - * @return -1 : This is an unbounded cache. 0 : This "cache" doesn't actually perform any - * caching. >0 : Actual capacity bound. + * @return -1 : This is an unbounded cache. 0 : This "cache" doesn't actually perform any caching. >0 : Actual + * capacity bound. */ int capacity(); @@ -35,8 +35,8 @@ public interface StringCache { /** * @param protoString The string-like CharSequence to look up - * @return A cached STRING_LIKE_TYPE that corresponds to the current value of the CharSequence - * expressed by protoString + * @return A cached STRING_LIKE_TYPE that corresponds to the current value of the CharSequence expressed by + * protoString */ @NotNull STRING_LIKE_TYPE getCachedString(@NotNull StringCompatible protoString); @@ -55,8 +55,7 @@ public interface StringCache { * @return A cached STRING_LIKE_TYPE that corresponds to charSequence */ default @NotNull STRING_LIKE_TYPE getCachedString(@NotNull CharSequence charSequence) { - return charSequence instanceof StringCompatible - ? getCachedString((StringCompatible) charSequence) - : getCachedString(charSequence.toString()); + return charSequence instanceof StringCompatible ? getCachedString((StringCompatible) charSequence) + : getCachedString(charSequence.toString()); } } diff --git a/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapter.java b/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapter.java index 88354eae4e2..88b1279bde2 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapter.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapter.java @@ -42,8 +42,7 @@ public interface StringCacheTypeAdapter { STRING_LIKE_TYPE create(@NotNull StringCompatible protoString); /** - * Compare key (Assumed to be a String *or* a StringCompatible) with value (created by this - * factory). + * Compare key (Assumed to be a String *or* a StringCompatible) with value (created by this factory). * * @param key The key * @param value The value diff --git a/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterCompressedStringImpl.java b/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterCompressedStringImpl.java index 4c8aed96e8a..cee10ed2991 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterCompressedStringImpl.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterCompressedStringImpl.java @@ -9,11 +9,10 @@ /** * Type adapter for CompressedStrings. */ -public class StringCacheTypeAdapterCompressedStringImpl - implements StringCacheTypeAdapter { +public class StringCacheTypeAdapterCompressedStringImpl implements StringCacheTypeAdapter { public static final StringCacheTypeAdapter INSTANCE = - new StringCacheTypeAdapterCompressedStringImpl(); + new StringCacheTypeAdapterCompressedStringImpl(); private static final CompressedString EMPTY_VALUE = new CompressedString(""); @@ -44,8 +43,7 @@ public final CompressedString create(@NotNull final StringCompatible protoString } @Override - public final boolean areEqual(@NotNull final CharSequence key, - @NotNull final CompressedString value) { + public final boolean areEqual(@NotNull final CharSequence key, @NotNull final CompressedString value) { return CharSequenceUtils.contentEquals(key, value); } } diff --git a/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterMappedCompressedStringImpl.java b/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterMappedCompressedStringImpl.java index 9c365dc4941..eb6f969943e 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterMappedCompressedStringImpl.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterMappedCompressedStringImpl.java @@ -10,10 +10,10 @@ * Type adapter for MappedCompressedStrings. */ public class StringCacheTypeAdapterMappedCompressedStringImpl - implements StringCacheTypeAdapter { + implements StringCacheTypeAdapter { public static final StringCacheTypeAdapter INSTANCE = - new StringCacheTypeAdapterMappedCompressedStringImpl(); + new StringCacheTypeAdapterMappedCompressedStringImpl(); private static final MappedCompressedString EMPTY_VALUE = new MappedCompressedString(""); @@ -44,8 +44,7 @@ public final MappedCompressedString create(@NotNull final StringCompatible proto } @Override - public final boolean areEqual(@NotNull final CharSequence key, - @NotNull final MappedCompressedString value) { + public final boolean areEqual(@NotNull final CharSequence key, @NotNull final MappedCompressedString value) { return CharSequenceUtils.contentEquals(key, value); } } diff --git a/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterStringImpl.java b/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterStringImpl.java index 9dad0c0938a..860bdbd9ab7 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterStringImpl.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/StringCacheTypeAdapterStringImpl.java @@ -11,8 +11,7 @@ */ public class StringCacheTypeAdapterStringImpl implements StringCacheTypeAdapter { - public static final StringCacheTypeAdapter INSTANCE = - new StringCacheTypeAdapterStringImpl(); + public static final StringCacheTypeAdapter INSTANCE = new StringCacheTypeAdapterStringImpl(); private static final String EMPTY_VALUE = ""; diff --git a/Base/src/main/java/io/deephaven/base/string/cache/StringCompatible.java b/Base/src/main/java/io/deephaven/base/string/cache/StringCompatible.java index a04bf84911f..592d19f1403 100644 --- a/Base/src/main/java/io/deephaven/base/string/cache/StringCompatible.java +++ b/Base/src/main/java/io/deephaven/base/string/cache/StringCompatible.java @@ -13,35 +13,32 @@ public interface StringCompatible extends CharSequence, Comparable { /** - * Convert this StringCompatible into a String. Implementations should not cache result Strings, - * in order to avoid inadvertently allowing promotion of short-lived objects under generational - * garbage collection. + * Convert this StringCompatible into a String. Implementations should not cache result Strings, in order to avoid + * inadvertently allowing promotion of short-lived objects under generational garbage collection. * - * @return A newly constructed String representing the same sequence of characters as this - * StringCompatible. + * @return A newly constructed String representing the same sequence of characters as this StringCompatible. */ @Override @NotNull String toString(); /** - * Convert this StringCompatible into a CompressedString. Implementations should not cache - * result CompressedStrings, in order to avoid inadvertently allowing promotion of short-lived - * objects under generational garbage collection. + * Convert this StringCompatible into a CompressedString. Implementations should not cache result CompressedStrings, + * in order to avoid inadvertently allowing promotion of short-lived objects under generational garbage collection. * - * @return A newly constructed CompressedString representing the same sequence of characters as - * this StringCompatible (or this object, if appropriate). + * @return A newly constructed CompressedString representing the same sequence of characters as this + * StringCompatible (or this object, if appropriate). */ @NotNull CompressedString toCompressedString(); /** - * Convert this StringCompatible into a MappedCompressedString. Implementations should not cache - * result CompressedStrings, in order to avoid inadvertently allowing promotion of short-lived - * objects under generational garbage collection. + * Convert this StringCompatible into a MappedCompressedString. Implementations should not cache result + * CompressedStrings, in order to avoid inadvertently allowing promotion of short-lived objects under generational + * garbage collection. * - * @return A newly constructed MappedCompressedString representing the same sequence of - * characters as this StringCompatible (or this object, if appropriate). + * @return A newly constructed MappedCompressedString representing the same sequence of characters as this + * StringCompatible (or this object, if appropriate). */ @NotNull MappedCompressedString toMappedCompressedString(); @@ -49,8 +46,7 @@ public interface StringCompatible extends CharSequence, Comparable /** * Implementations MUST match the current implementation of String.hashCode(). * - * @return A hashcode value for this StringCompatible that matches the value a String of the - * same chars. + * @return A hashcode value for this StringCompatible that matches the value a String of the same chars. */ @Override int hashCode(); @@ -79,10 +75,8 @@ static int hash(@Nullable final CharSequence cs) { if (cs == null) { return 0; } - // NB: For these classes/markers, we know we can trust their hashCode implementation to - // match - // CharSequenceUtils.hashCode(CharSequence), so use hashCode() directly and allow for - // caching. + // NB: For these classes/markers, we know we can trust their hashCode implementation to match + // CharSequenceUtils.hashCode(CharSequence), so use hashCode() directly and allow for caching. if (cs instanceof String || cs instanceof StringCompatible) { return cs.hashCode(); } diff --git a/Base/src/main/java/io/deephaven/base/system/AsyncSystem.java b/Base/src/main/java/io/deephaven/base/system/AsyncSystem.java index 9717d1a8d61..5babc2ca613 100644 --- a/Base/src/main/java/io/deephaven/base/system/AsyncSystem.java +++ b/Base/src/main/java/io/deephaven/base/system/AsyncSystem.java @@ -12,7 +12,7 @@ public class AsyncSystem { private static class AsyncSystemExitUncaughtExceptionHandler implements - UncaughtExceptionHandler { + UncaughtExceptionHandler { private final PrintStream out; private final int status; @@ -41,20 +41,19 @@ public static UncaughtExceptionHandler uncaughtExceptionHandler(int status, Prin } /** - * Starts an asynchronous call to {@link System#exit(int)}. A new daemon thread will be started - * and it will invoke only {@code System.exit(status)}. In the event that - * {@link System#exit(int)} throws an exception, the name of the thread and the stacktrace will - * be printed out. + * Starts an asynchronous call to {@link System#exit(int)}. A new daemon thread will be started and it will invoke + * only {@code System.exit(status)}. In the event that {@link System#exit(int)} throws an exception, the name of the + * thread and the stacktrace will be printed out. * *

- * Note: this call will return, unlike a direct call to {@link System#exit(int)}. Callers should - * manage this as appropriate. + * Note: this call will return, unlike a direct call to {@link System#exit(int)}. Callers should manage this as + * appropriate. * * @param name the name to attach to the thread * @param status exit status * @param out the output print stream (on exception) - * @throws SecurityException if a security manager exists and its {@code checkExit} method - * doesn't allow exit with the specified status. + * @throws SecurityException if a security manager exists and its {@code checkExit} method doesn't allow exit with + * the specified status. */ public static void exit(String name, int status, PrintStream out) { // preemptively checks security manager in the same way that System.exit does @@ -63,25 +62,23 @@ public static void exit(String name, int status, PrintStream out) { security.checkExit(status); } createThread(name, status, out) - .start(); + .start(); } /** - * Prints out a message and stacktrace, and then calls {@link #exit(String, int, PrintStream)}. - * This should only be called from {@link UncaughtExceptionHandler uncaught exception - * handlers}. + * Prints out a message and stacktrace, and then calls {@link #exit(String, int, PrintStream)}. This should + * only be called from {@link UncaughtExceptionHandler uncaught exception handlers}. * * @param thread the thread * @param throwable the throwable * @param status the status * @param out the print stream */ - public static void exitUncaught(Thread thread, Throwable throwable, int status, - PrintStream out) { + public static void exitUncaught(Thread thread, Throwable throwable, int status, PrintStream out) { try { out.println(String.format( - "Uncaught exception in thread %s. Shutting down with asynchronous system exit.", - thread)); + "Uncaught exception in thread %s. Shutting down with asynchronous system exit.", + thread)); throwable.printStackTrace(out); } finally { exit(thread.getName(), status, out); @@ -96,8 +93,8 @@ public static void exitCaught(Thread thread, Throwable throwable, int status, Pr } /** - * Prints out a message and stacktrace, and then calls {@link #exit(String, int, PrintStream)}. - * This can be called from a thread which catches its own exceptions and wants to exit. + * Prints out a message and stacktrace, and then calls {@link #exit(String, int, PrintStream)}. This can be called + * from a thread which catches its own exceptions and wants to exit. * * @param thread the thread * @param throwable the throwable @@ -106,16 +103,16 @@ public static void exitCaught(Thread thread, Throwable throwable, int status, Pr * @param message the optional additional message */ public static void exitCaught(Thread thread, Throwable throwable, int status, PrintStream out, - @Nullable String message) { + @Nullable String message) { try { if (message == null) { out.println(String.format( - "Caught exception in thread %s. Shutting down with asynchronous system exit.", - thread)); + "Caught exception in thread %s. Shutting down with asynchronous system exit.", + thread)); } else { out.println(String.format( - "Caught exception in thread %s: %s. Shutting down with asynchronous system exit.", - thread, message)); + "Caught exception in thread %s: %s. Shutting down with asynchronous system exit.", + thread, message)); } throwable.printStackTrace(out); } finally { @@ -125,9 +122,9 @@ public static void exitCaught(Thread thread, Throwable throwable, int status, Pr private static Thread createThread(String name, int status, PrintStream out) { return new AsyncSystemExitThread( - String.format("AsyncSystemExit[%d,%s]", status, name), - status, - out); + String.format("AsyncSystemExit[%d,%s]", status, name), + status, + out); } private static class AsyncSystemExitThread extends Thread { diff --git a/Base/src/main/java/io/deephaven/base/system/PrintStreamGlobals.java b/Base/src/main/java/io/deephaven/base/system/PrintStreamGlobals.java index 3c67c7d38ae..64b736df52e 100644 --- a/Base/src/main/java/io/deephaven/base/system/PrintStreamGlobals.java +++ b/Base/src/main/java/io/deephaven/base/system/PrintStreamGlobals.java @@ -4,8 +4,8 @@ import java.util.Objects; /** - * Captures {@link System#out} and {@link System#err}. It is primarily useful for logging - * infrastructure where the output streams may have been redirected. + * Captures {@link System#out} and {@link System#err}. It is primarily useful for logging infrastructure where the + * output streams may have been redirected. */ public class PrintStreamGlobals { @@ -18,10 +18,10 @@ public class PrintStreamGlobals { } /** - * Use this method to force this class and its statics to be initialized. Should be used before - * an application is re-directing stdout / stderr if it wants to have global access to the - * original streams. While the other methods in this class could be used for initialization, - * this method provides the appropriate context, and should be used instead. + * Use this method to force this class and its statics to be initialized. Should be used before an application is + * re-directing stdout / stderr if it wants to have global access to the original streams. While the other methods + * in this class could be used for initialization, this method provides the appropriate context, and should be used + * instead. */ public static void init() { // empty on purpose diff --git a/Base/src/main/java/io/deephaven/base/system/StandardStreamReceiver.java b/Base/src/main/java/io/deephaven/base/system/StandardStreamReceiver.java index 4adffeae15e..717bd432148 100644 --- a/Base/src/main/java/io/deephaven/base/system/StandardStreamReceiver.java +++ b/Base/src/main/java/io/deephaven/base/system/StandardStreamReceiver.java @@ -4,8 +4,8 @@ import java.util.Optional; /** - * Allows classes to register interest in receiving application level calls to {@link System#out} - * and {@link System#err}. + * Allows classes to register interest in receiving application level calls to {@link System#out} and + * {@link System#err}. */ public interface StandardStreamReceiver { diff --git a/Base/src/main/java/io/deephaven/base/system/StandardStreamState.java b/Base/src/main/java/io/deephaven/base/system/StandardStreamState.java index 471b2f75a32..c5299eec568 100644 --- a/Base/src/main/java/io/deephaven/base/system/StandardStreamState.java +++ b/Base/src/main/java/io/deephaven/base/system/StandardStreamState.java @@ -22,23 +22,22 @@ public StandardStreamState(Set receivers) { public void setupRedirection() throws UnsupportedEncodingException { if (!initialized.compareAndSet(false, true)) { - throw new IllegalStateException( - "May only call StandardStreamState#setupRedirection once"); + throw new IllegalStateException("May only call StandardStreamState#setupRedirection once"); } // get all of the out sinks List outReceivers = receivers.stream() - .map(StandardStreamReceiver::receiveOut) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(Collectors.toList()); + .map(StandardStreamReceiver::receiveOut) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); // get all of the err sinks List errReceivers = receivers.stream() - .map(StandardStreamReceiver::receiveErr) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(Collectors.toList()); + .map(StandardStreamReceiver::receiveErr) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); if (!outReceivers.isEmpty()) { PrintStream out = adapt(outReceivers); @@ -57,8 +56,7 @@ public void setupRedirection() throws UnsupportedEncodingException { } } - private static PrintStream adapt(List outputStreams) - throws UnsupportedEncodingException { + private static PrintStream adapt(List outputStreams) throws UnsupportedEncodingException { // TODO (core#88): Figure out appropriate stdout / LogBuffer encoding if (outputStreams.size() == 1) { OutputStream out = outputStreams.get(0); diff --git a/Base/src/main/java/io/deephaven/base/text/Convert.java b/Base/src/main/java/io/deephaven/base/text/Convert.java index 48d1ba67940..554cb727670 100644 --- a/Base/src/main/java/io/deephaven/base/text/Convert.java +++ b/Base/src/main/java/io/deephaven/base/text/Convert.java @@ -24,8 +24,8 @@ public class Convert { public static int MAX_LONG_BYTES = 20; /** - * the maximum number of bytes in the ASCII decimal representation of a double: 17 digits, - * decimal point, sign, 'E', exponent + * the maximum number of bytes in the ASCII decimal representation of a double: 17 digits, decimal point, sign, 'E', + * exponent */ public static int MAX_DOUBLE_BYTES = 24; @@ -134,7 +134,7 @@ public static ByteBuffer appendLong(long n, ByteBuffer b) { // ------------------------------------------------------------------------------------------ private static final ThreadLocal STRING_BUILDER_THREAD_LOCAL = - ThreadLocal.withInitial(() -> new StringBuilder(MAX_DOUBLE_BYTES)); + ThreadLocal.withInitial(() -> new StringBuilder(MAX_DOUBLE_BYTES)); /** * Append a decimal representation of a {@code double} to a {@link ByteBuffer}. Works as if @@ -160,8 +160,7 @@ public static ByteBuffer appendDouble(final double input, @NotNull final ByteBuf // ------------------------------------------------------------------------------------------ /** - * The number of days preceding the first day of the given month, ignoring leap days, with Jan - * == 1. + * The number of days preceding the first day of the given month, ignoring leap days, with Jan == 1. */ private static final int[] DAYS_SO_FAR = { 0, 0, 31, 59, 90, 120, // xxx Jan Feb Mar Apr May @@ -195,9 +194,8 @@ private static int countLeapYear(int year) { } /** - * Append an ISO 8601 representation of millis-since-the-epoch timestamp to a byte buffer. The - * output length is always 23 bytes plus the length of the GMT offset suffix: - * YYYY-MM-DDTHH:MM:SS.MMM<suffix>. + * Append an ISO 8601 representation of millis-since-the-epoch timestamp to a byte buffer. The output length is + * always 23 bytes plus the length of the GMT offset suffix: YYYY-MM-DDTHH:MM:SS.MMM<suffix>. * * @param t the timestamp to be converted, millis since 1970-01-01T00:00:00 GMT * @param gmtOffsetSuffix the time zone suffix, or null for no suffix @@ -249,9 +247,8 @@ public static ByteBuffer appendISO8601Millis(long t, byte[] gmtOffsetSuffix, Byt } /** - * Append an ISO 8601 representation of a broken-down time to a byte buffer. The output length - * is always 23 bytes plus the length of the GMT offset suffix: - * YYYY-MM-DDTHH:MM:SS.MMM<suffix>. + * Append an ISO 8601 representation of a broken-down time to a byte buffer. The output length is always 23 bytes + * plus the length of the GMT offset suffix: YYYY-MM-DDTHH:MM:SS.MMM<suffix>. * * @param year the year * @param month the month @@ -266,8 +263,8 @@ public static ByteBuffer appendISO8601Millis(long t, byte[] gmtOffsetSuffix, Byt * @throws java.nio.BufferOverflowException if there is not enough space in the buffer */ public static ByteBuffer appendISO8601(int year, int month, int day, - int hour, int minute, int second, int millis, - byte[] gmtOffsetSuffix, ByteBuffer b) { + int hour, int minute, int second, int millis, + byte[] gmtOffsetSuffix, ByteBuffer b) { b.put((byte) ('0' + year / 1000)); b.put((byte) ('0' + (year % 1000) / 100)); b.put((byte) ('0' + (year % 100) / 10)); @@ -298,9 +295,8 @@ public static ByteBuffer appendISO8601(int year, int month, int day, } /** - * Append an ISO 8601 representation of micros-since-the-epoch timestamp to a byte buffer. The - * output length is always 26 bytes plus the length of the GMT offset suffix: - * YYYY-MM-DDTHH:MM:SS.MMMMMM<suffix>. + * Append an ISO 8601 representation of micros-since-the-epoch timestamp to a byte buffer. The output length is + * always 26 bytes plus the length of the GMT offset suffix: YYYY-MM-DDTHH:MM:SS.MMMMMM<suffix>. * * @param t the timestamp to be converted, micros since 1970-01-01T00:00:00 GMT * @param gmtOffsetSuffix the time zone suffix, or null for no suffix @@ -350,14 +346,12 @@ public static ByteBuffer appendISO8601Micros(long t, byte[] gmtOffsetSuffix, Byt minute %= 60; /* put it into the byte buffer */ - return appendISO8601Micros(year, month, day, hour, minute, second, millis, (int) micros, - gmtOffsetSuffix, b); + return appendISO8601Micros(year, month, day, hour, minute, second, millis, (int) micros, gmtOffsetSuffix, b); } /** - * Append an ISO 8601 representation of a broken-down time to a byte buffer. The output length - * is always 23 bytes plus the length of the GMT offset suffix: - * YYYY-MM-DDTHH:MM:SS.MMM<suffix>. + * Append an ISO 8601 representation of a broken-down time to a byte buffer. The output length is always 23 bytes + * plus the length of the GMT offset suffix: YYYY-MM-DDTHH:MM:SS.MMM<suffix>. * * @param year the year * @param month the month @@ -373,8 +367,8 @@ public static ByteBuffer appendISO8601Micros(long t, byte[] gmtOffsetSuffix, Byt * @throws java.nio.BufferOverflowException if there is not enough space in the buffer */ public static ByteBuffer appendISO8601Micros(int year, int month, int day, - int hour, int minute, int second, int millis, int micros, - byte[] gmtOffsetSuffix, ByteBuffer b) { + int hour, int minute, int second, int millis, int micros, + byte[] gmtOffsetSuffix, ByteBuffer b) { b.put((byte) ('0' + year / 1000)); b.put((byte) ('0' + (year % 1000) / 100)); b.put((byte) ('0' + (year % 100) / 10)); diff --git a/Base/src/main/java/io/deephaven/base/text/TimestampBuffer.java b/Base/src/main/java/io/deephaven/base/text/TimestampBuffer.java index f02fe675931..26ae405c8be 100644 --- a/Base/src/main/java/io/deephaven/base/text/TimestampBuffer.java +++ b/Base/src/main/java/io/deephaven/base/text/TimestampBuffer.java @@ -16,8 +16,7 @@ public class TimestampBuffer { private final ZoneRules zoneRules; private class ThreadLocalState { - private long currentTimeMillis = Long.MIN_VALUE; // Ensure we enter the calculation logic - // the first time through + private long currentTimeMillis = Long.MIN_VALUE; // Ensure we enter the calculation logic the first time through // Magic values for the previous/next transition times: // MAX_VALUE/MIN_VALUE mean they haven't been initialized // MIN_VALUE/MAX_VALUE mean they don't have previous/next transitions (e.g. GMT) @@ -28,12 +27,12 @@ private class ThreadLocalState { private final ByteBuffer buffer = ByteBuffer.allocate(Convert.MAX_ISO8601_BYTES); public void update(long nowMillis) { - // See if the change is more than the seconds/millis component - if so then we should - // check for DST transition and + // See if the change is more than the seconds/millis component - if so then we should check for DST + // transition and // regenerate the whole buffer if (nowMillis / 60_000 != currentTimeMillis / 60_000) { if ((nowMillis < previousDSTTransitionMillis) || - (nowMillis >= nextDSTTransitionMillis)) { + (nowMillis >= nextDSTTransitionMillis)) { calculateDSTTransitions(nowMillis); } @@ -56,18 +55,14 @@ public void update(long nowMillis) { private void calculateDSTTransitions(final long nowMillis) { final Instant nowInstant = Instant.ofEpochMilli(nowMillis); - final ZoneOffsetTransition previousTransitionOffset = - zoneRules.previousTransition(nowInstant); + final ZoneOffsetTransition previousTransitionOffset = zoneRules.previousTransition(nowInstant); final ZoneOffsetTransition nextTransitionOffset = zoneRules.nextTransition(nowInstant); - // It's possible there's no previous or next transition, in that case set the value so - // we'll never cross it + // It's possible there's no previous or next transition, in that case set the value so we'll never cross it previousDSTTransitionMillis = - previousTransitionOffset != null ? previousTransitionOffset.toEpochSecond() * 1000 - : Long.MIN_VALUE; + previousTransitionOffset != null ? previousTransitionOffset.toEpochSecond() * 1000 : Long.MIN_VALUE; nextDSTTransitionMillis = - nextTransitionOffset != null ? nextTransitionOffset.toEpochSecond() * 1000 - : Long.MAX_VALUE; + nextTransitionOffset != null ? nextTransitionOffset.toEpochSecond() * 1000 : Long.MAX_VALUE; gmtOffsetMillis = zoneRules.getOffset(nowInstant).getTotalSeconds() * 1000L; @@ -87,7 +82,7 @@ private void calculateDSTTransitions(final long nowMillis) { } private ThreadLocal threadLocals = - ThreadLocal.withInitial(TimestampBuffer.ThreadLocalState::new); + ThreadLocal.withInitial(TimestampBuffer.ThreadLocalState::new); public TimestampBuffer(TimeZone tz) { zoneRules = tz.toZoneId().getRules(); @@ -99,10 +94,9 @@ public TimestampBuffer(long localNow, TimeZone tz) { } /** - * Return a thread-local byte buffer containing the give time, formatted in ISO 8601. The - * buffer's position is set to zero in preparation for writing its contents to another buffer or - * a channel. Since the buffer is thread-local, the caller can safely assume that it won't - * change until the same thread accesses this TimestampBuffer again. + * Return a thread-local byte buffer containing the give time, formatted in ISO 8601. The buffer's position is set + * to zero in preparation for writing its contents to another buffer or a channel. Since the buffer is thread-local, + * the caller can safely assume that it won't change until the same thread accesses this TimestampBuffer again. */ public ByteBuffer getTimestamp(long nowMillis) { ThreadLocalState state = threadLocals.get(); diff --git a/Base/src/main/java/io/deephaven/base/text/TimestampBufferMicros.java b/Base/src/main/java/io/deephaven/base/text/TimestampBufferMicros.java index 5dbdd5514b4..025ff581407 100644 --- a/Base/src/main/java/io/deephaven/base/text/TimestampBufferMicros.java +++ b/Base/src/main/java/io/deephaven/base/text/TimestampBufferMicros.java @@ -15,8 +15,7 @@ public class TimestampBufferMicros { private final ZoneRules zoneRules; private class ThreadLocalState { - private long currentTimeMicros = Long.MIN_VALUE; // Ensure we enter the calculation logic - // the first time through + private long currentTimeMicros = Long.MIN_VALUE; // Ensure we enter the calculation logic the first time through // Magic values for the previous/next transition times: // MAX_VALUE/MIN_VALUE mean they haven't been initialized // MIN_VALUE/MAX_VALUE mean they don't have previous/next transitions (e.g. GMT) @@ -27,12 +26,12 @@ private class ThreadLocalState { private final ByteBuffer buffer = ByteBuffer.allocate(Convert.MAX_ISO8601_MICROS_BYTES); public void update(long nowMicros) { - // See if the change is more than the seconds/millis/micros component - if so then we - // should check for DST transition and + // See if the change is more than the seconds/millis/micros component - if so then we should check for DST + // transition and // regenerate the whole buffer if (nowMicros / 60_000_000 != currentTimeMicros / 60_000_000) { if ((nowMicros < previousDSTTransitionMicros) || - (nowMicros >= nextDSTTransitionMicros)) { + (nowMicros >= nextDSTTransitionMicros)) { calculateDSTTransitions(nowMicros); } @@ -45,11 +44,9 @@ public void update(long nowMicros) { v += 60_000_000; } // for dates before the epoch buffer.put(Convert.ISO8601_SECOND_OFFSET, (byte) ('0' + (v / 10_000_000))); - buffer.put(Convert.ISO8601_SECOND_OFFSET + 1, - (byte) ('0' + (v % 10_000_000) / 1_000_000)); + buffer.put(Convert.ISO8601_SECOND_OFFSET + 1, (byte) ('0' + (v % 10_000_000) / 1_000_000)); buffer.put(Convert.ISO8601_MILLIS_OFFSET, (byte) ('0' + (v % 1_000_000) / 100_000)); - buffer.put(Convert.ISO8601_MILLIS_OFFSET + 1, - (byte) ('0' + (v % 100_000) / 10_000)); + buffer.put(Convert.ISO8601_MILLIS_OFFSET + 1, (byte) ('0' + (v % 100_000) / 10_000)); buffer.put(Convert.ISO8601_MILLIS_OFFSET + 2, (byte) ('0' + (v % 10_000) / 1000)); buffer.put(Convert.ISO8601_MICROS_OFFSET, (byte) ('0' + (v % 1000) / 100)); buffer.put(Convert.ISO8601_MICROS_OFFSET + 1, (byte) ('0' + (v % 100) / 10)); @@ -60,18 +57,16 @@ public void update(long nowMicros) { private void calculateDSTTransitions(final long nowMicros) { final Instant nowInstant = Instant.ofEpochMilli(nowMicros / 1000); - final ZoneOffsetTransition previousTransitionOffset = - zoneRules.previousTransition(nowInstant); + final ZoneOffsetTransition previousTransitionOffset = zoneRules.previousTransition(nowInstant); final ZoneOffsetTransition nextTransitionOffset = zoneRules.nextTransition(nowInstant); - // It's possible there's no previous or next transition, in that case set the value so - // we'll never cross it - previousDSTTransitionMicros = previousTransitionOffset != null - ? previousTransitionOffset.toEpochSecond() * 1000L * 1000L - : Long.MIN_VALUE; + // It's possible there's no previous or next transition, in that case set the value so we'll never cross it + previousDSTTransitionMicros = + previousTransitionOffset != null ? previousTransitionOffset.toEpochSecond() * 1000L * 1000L + : Long.MIN_VALUE; nextDSTTransitionMicros = - nextTransitionOffset != null ? nextTransitionOffset.toEpochSecond() * 1000L * 1000L - : Long.MAX_VALUE; + nextTransitionOffset != null ? nextTransitionOffset.toEpochSecond() * 1000L * 1000L + : Long.MAX_VALUE; gmtOffsetMicros = zoneRules.getOffset(nowInstant).getTotalSeconds() * 1000L * 1000L; @@ -81,8 +76,7 @@ private void calculateDSTTransitions(final long nowMicros) { gmtOffsetSuffix = new byte[5]; gmtOffsetSuffix[0] = (byte) (gmtOffsetMicros < 0 ? '-' : '+'); int hours = (int) Math.abs(gmtOffsetMicros / 3_600_000 / 1000); - int minutes = - (int) ((Math.abs(gmtOffsetMicros / 1000) - hours * 3_600_000) / 60_000); + int minutes = (int) ((Math.abs(gmtOffsetMicros / 1000) - hours * 3_600_000) / 60_000); gmtOffsetSuffix[1] = (byte) ('0' + hours / 10); gmtOffsetSuffix[2] = (byte) ('0' + hours % 10); gmtOffsetSuffix[3] = (byte) ('0' + minutes / 10); @@ -91,8 +85,7 @@ private void calculateDSTTransitions(final long nowMicros) { } } - private ThreadLocal threadLocals = - ThreadLocal.withInitial(ThreadLocalState::new); + private ThreadLocal threadLocals = ThreadLocal.withInitial(ThreadLocalState::new); public TimestampBufferMicros(TimeZone tz) { zoneRules = tz.toZoneId().getRules(); @@ -104,10 +97,9 @@ public TimestampBufferMicros(long localNowMicros, TimeZone tz) { } /** - * Return a thread-local byte buffer containing the give time, formatted in ISO 8601. The - * buffer's position is set to zero in preparation for writing its contents to another buffer or - * a channel. Since the buffer is thread-local, the caller can safely assume that it won't - * change until the same thread accesses this TimestampBuffer again. + * Return a thread-local byte buffer containing the give time, formatted in ISO 8601. The buffer's position is set + * to zero in preparation for writing its contents to another buffer or a channel. Since the buffer is thread-local, + * the caller can safely assume that it won't change until the same thread accesses this TimestampBuffer again. */ public ByteBuffer getTimestamp(long nowMicros) { ThreadLocalState state = threadLocals.get(); @@ -119,8 +111,7 @@ public ByteBuffer getTimestamp(long nowMicros) { /** * Format the current time into a ByteBuffer in according to ISO 8601. * - * @throws java.nio.BufferOverflowException if there is not enough room in the destination - * buffer. + * @throws java.nio.BufferOverflowException if there is not enough room in the destination buffer. */ public void getTimestamp(long nowMicros, ByteBuffer dest) { dest.put(getTimestamp(nowMicros)); diff --git a/Base/src/main/java/io/deephaven/base/verify/Assert.java b/Base/src/main/java/io/deephaven/base/verify/Assert.java index fe0f77c3220..e7461deadee 100644 --- a/Base/src/main/java/io/deephaven/base/verify/Assert.java +++ b/Base/src/main/java/io/deephaven/base/verify/Assert.java @@ -9,14 +9,12 @@ // -------------------------------------------------------------------- /** - * Assertion methods for simple runtime program verification. Failed assertions throw - * {@link AssertionFailure}. + * Assertion methods for simple runtime program verification. Failed assertions throw {@link AssertionFailure}. *

* Methods: *

    *
  • void assertion(boolean condition, String conditionText[, String detailMessage]) - *
  • void assertion(boolean condition, String conditionText, value0, String name0, value1, String - * name0, ... ) + *
  • void assertion(boolean condition, String conditionText, value0, String name0, value1, String name0, ... ) *
*
    *
  • void statementNeverExecuted() @@ -35,8 +33,8 @@ *
      *
    • void eq/neq(boolean/char/byte/short/int/long/float/double, String name0, * boolean/char/byte/short/int/long/float/double[, String name1]) - *
    • void lt/leq/gt/geq(char/byte/short/int/long/float/double, String name0, - * char/byte/short/int/long/float/double[, String name1]) + *
    • void lt/leq/gt/geq(char/byte/short/int/long/float/double, String name0, char/byte/short/int/long/float/double[, + * String name1]) *
    *
      *
    • void eqFalse/neqFalse/eqTrue/neqTrue(boolean, String name) @@ -62,8 +60,7 @@ *
    *
  • equals corresponds to Object.equals (preceded by necessary null checks), e.g., *
      - *
    • For Object a and b, Assert.equals(a, "a", b, "b") corresponds to assert (a!= null && b != - * null && a.equals(b)) + *
    • For Object a and b, Assert.equals(a, "a", b, "b") corresponds to assert (a!= null && b != null && a.equals(b)) *
    • for String s, Assert.nonempty(s, "s") corresponds to assert (s != null && s.length() != 0) *
    *
@@ -86,8 +83,8 @@ private Assert() {} // ---------------------------------------------------------------- private static void fail(String conditionText) { - final AssertionFailure assertionFailure = new AssertionFailure( - ExceptionMessageUtil.failureMessage("Assertion", "asserted", conditionText, null)); + final AssertionFailure assertionFailure = + new AssertionFailure(ExceptionMessageUtil.failureMessage("Assertion", "asserted", conditionText, null)); if (onAssertionCallback != null) { try { onAssertionCallback.accept(assertionFailure); @@ -99,8 +96,8 @@ private static void fail(String conditionText) { // ---------------------------------------------------------------- private static void fail(String conditionText, String detailMessage) { - final AssertionFailure assertionFailure = new AssertionFailure(ExceptionMessageUtil - .failureMessage("Assertion", "asserted", conditionText, detailMessage)); + final AssertionFailure assertionFailure = new AssertionFailure( + ExceptionMessageUtil.failureMessage("Assertion", "asserted", conditionText, detailMessage)); if (onAssertionCallback != null) { try { onAssertionCallback.accept(assertionFailure); @@ -137,65 +134,61 @@ public static void assertion(boolean condition, String conditionText, Object o0, } } - public static void assertion(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1) { + public static void assertion(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1) { if (!(condition)) { fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1)); } } - public static void assertion(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1, Object o2, String name2) { + public static void assertion(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1, Object o2, String name2) { if (!(condition)) { fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1, o2, name2)); } } - public static void assertion(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1, int i2, String name2) { + public static void assertion(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1, int i2, String name2) { if (!(condition)) { - fail(conditionText, - ExceptionMessageUtil.concat(ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), + fail(conditionText, ExceptionMessageUtil.concat(ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), ExceptionMessageUtil.valueAndName(i2, name2))); } } - public static void assertion(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1, Object o2, String name2, Object o3, String name3) { + public static void assertion(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1, Object o2, String name2, Object o3, String name3) { if (!(condition)) { - fail(conditionText, - ExceptionMessageUtil.valueAndName(o0, name0, o1, name1, o2, name2, o3, name3)); + fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1, o2, name2, o3, name3)); } } // ---------------------------------------------------------------- /** assert (condition, conditionText, boolean b0, String name0, ... ) */ - public static void assertion(boolean condition, String conditionText, boolean b0, - String name0) { + public static void assertion(boolean condition, String conditionText, boolean b0, String name0) { if (!(condition)) { fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0)); } } - public static void assertion(boolean condition, String conditionText, boolean b0, String name0, - boolean b1, String name1) { + public static void assertion(boolean condition, String conditionText, boolean b0, String name0, boolean b1, + String name1) { if (!(condition)) { fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1)); } } - public static void assertion(boolean condition, String conditionText, boolean b0, String name0, - boolean b1, String name1, boolean b2, String name2) { + public static void assertion(boolean condition, String conditionText, boolean b0, String name0, boolean b1, + String name1, boolean b2, String name2) { if (!(condition)) { fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1, b2, name2)); } } - public static void assertion(boolean condition, String conditionText, boolean b0, String name0, - boolean b1, String name1, boolean b2, String name2, boolean b3, String name3) { + public static void assertion(boolean condition, String conditionText, boolean b0, String name0, boolean b1, + String name1, boolean b2, String name2, boolean b3, String name3) { if (!(condition)) { - fail(conditionText, - ExceptionMessageUtil.valueAndName(b0, name0, b1, name1, b2, name2, b3, name3)); + fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1, b2, name2, b3, name3)); } } @@ -207,8 +200,7 @@ public static void assertion(boolean condition, String conditionText, int i0, St } } - public static void assertion(boolean condition, String conditionText, int i0, String name0, - int i1, String name1) { + public static void assertion(boolean condition, String conditionText, int i0, String name0, int i1, String name1) { if (!(condition)) { fail(conditionText, ExceptionMessageUtil.valueAndName(i0, name0, i1, name1)); } @@ -239,7 +231,7 @@ public static AssertionFailure statementNeverExecuted(String statementDescriptio public static AssertionFailure exceptionNeverCaught(Exception e) { try { fail(e.getClass().getName() + " is never caught", - e.getClass().getName() + "(" + e.getMessage() + ") caught"); + e.getClass().getName() + "(" + e.getMessage() + ") caught"); } catch (AssertionFailure assertionFailure) { assertionFailure.initCause(e); throw assertionFailure; @@ -249,11 +241,9 @@ public static AssertionFailure exceptionNeverCaught(Exception e) { // ---------------------------------------------------------------- /** assert (tryStatementDescription succeeds, Exception e) */ - public static AssertionFailure exceptionNeverCaught(String tryStatementDescription, - Exception e) { + public static AssertionFailure exceptionNeverCaught(String tryStatementDescription, Exception e) { try { - fail(tryStatementDescription + " succeeds", - e.getClass().getName() + "(" + e.getMessage() + ") caught"); + fail(tryStatementDescription + " succeeds", e.getClass().getName() + "(" + e.getMessage() + ") caught"); } catch (AssertionFailure assertionFailure) { assertionFailure.initCause(e); throw assertionFailure; @@ -355,10 +345,8 @@ public static void notHoldsLock(Object o, String name) { /** assert (o instanceof type) */ public static void instanceOf(Object o, String name, Class type) { if (!type.isInstance(o)) { - fail(name + " instanceof " + type, - null == o ? ExceptionMessageUtil.valueAndName(o, name) - : name + " instanceof " + o.getClass() + " (" - + ExceptionMessageUtil.valueAndName(o, name) + ")"); + fail(name + " instanceof " + type, null == o ? ExceptionMessageUtil.valueAndName(o, name) + : name + " instanceof " + o.getClass() + " (" + ExceptionMessageUtil.valueAndName(o, name) + ")"); } } @@ -366,8 +354,8 @@ public static void instanceOf(Object o, String name, Class type) { /** assert !(o instanceof type) */ public static void notInstanceOf(Object o, String name, Class type) { if (type.isInstance(o)) { - fail("!(" + name + " instanceof " + type + ")", name + " instanceof " + o.getClass() - + " (" + ExceptionMessageUtil.valueAndName(o, name) + ")"); + fail("!(" + name + " instanceof " + type + ")", + name + " instanceof " + o.getClass() + " (" + ExceptionMessageUtil.valueAndName(o, name) + ")"); } } @@ -417,8 +405,7 @@ public static void eq(char c0, String name0, char c1, String name1) { public static void eq(char c0, String name0, char c1) { if (!(c0 == c1)) { - fail(name0 + " == " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0)); + fail(name0 + " == " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0)); } } @@ -533,8 +520,7 @@ public static void neq(char c0, String name0, char c1, String name1) { public static void neq(char c0, String name0, char c1) { if (!(c0 != c1)) { - fail(name0 + " != " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0)); + fail(name0 + " != " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0)); } } @@ -635,8 +621,7 @@ public static void lt(char c0, String name0, char c1, String name1) { public static void lt(char c0, String name0, char c1) { if (!(c0 < c1)) { - fail(name0 + " < " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0)); + fail(name0 + " < " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0)); } } @@ -737,8 +722,7 @@ public static void leq(char c0, String name0, char c1, String name1) { public static void leq(char c0, String name0, char c1) { if (!(c0 <= c1)) { - fail(name0 + " <= " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0)); + fail(name0 + " <= " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0)); } } @@ -839,8 +823,7 @@ public static void gt(char c0, String name0, char c1, String name1) { public static void gt(char c0, String name0, char c1) { if (!(c0 > c1)) { - fail(name0 + " > " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0)); + fail(name0 + " > " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0)); } } @@ -941,8 +924,7 @@ public static void geq(char c0, String name0, char c1, String name1) { public static void geq(char c0, String name0, char c1) { if (!(c0 >= c1)) { - fail(name0 + " >= " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0)); + fail(name0 + " >= " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0)); } } @@ -1402,8 +1384,7 @@ public static void eq(Object o0, String name0, Object o1, String name1) { public static void eq(Object o0, String name0, Object o1) { // noinspection ObjectEquality if (!(o0 == o1)) { - fail(name0 + " == " + ExceptionMessageUtil.valueString(o1), - ExceptionMessageUtil.valueAndName(o0, name0)); + fail(name0 + " == " + ExceptionMessageUtil.valueString(o1), ExceptionMessageUtil.valueAndName(o0, name0)); } } @@ -1419,8 +1400,7 @@ public static void neq(Object o0, String name0, Object o1, String name1) { public static void neq(Object o0, String name0, Object o1) { // noinspection ObjectEquality if (!(o0 != o1)) { - fail(name0 + " != " + ExceptionMessageUtil.valueString(o1), - ExceptionMessageUtil.valueAndName(o0, name0)); + fail(name0 + " != " + ExceptionMessageUtil.valueString(o1), ExceptionMessageUtil.valueAndName(o0, name0)); } } @@ -1471,8 +1451,7 @@ public static void equals(Object o0, String name0, Object o1, String name1) { neqNull(o0, name0); neqNull(o1, name1); if (!(o0.equals(o1))) { - fail(name0 + ".equals(" + name1 + ")", - ExceptionMessageUtil.valueAndName(o0, name0, o1, name1)); + fail(name0 + ".equals(" + name1 + ")", ExceptionMessageUtil.valueAndName(o0, name0, o1, name1)); } } @@ -1483,7 +1462,7 @@ public static void equals(Object o0, String name0, Object o1) { neqNull(o1, "o1"); if (!(o0.equals(o1))) { fail(name0 + ".equals(" + ExceptionMessageUtil.valueString(o1) + ")", - ExceptionMessageUtil.valueAndName(o0, name0)); + ExceptionMessageUtil.valueAndName(o0, name0)); } } @@ -1491,8 +1470,7 @@ public static void equals(Object o0, String name0, Object o1) { /** assert ((o0 == null && o1 == null) || (o0 != null && o0.equals(o1))) */ public static void nullSafeEquals(Object o0, String name0, Object o1, String name1) { if ((null == o0 && null != o1) || (null != o1 && !o0.equals(o1))) { - fail(name0 + ".equals(" + name1 + ")", - ExceptionMessageUtil.valueAndName(o0, name0, o1, name1)); + fail(name0 + ".equals(" + name1 + ")", ExceptionMessageUtil.valueAndName(o0, name0, o1, name1)); } } @@ -1503,8 +1481,7 @@ public static void notEquals(Object o0, String name0, Object o1, String name1) { neqNull(o0, name0); neqNull(o1, name1); if (o0.equals(o1)) { - fail("!" + name0 + ".equals(" + name1 + ")", - ExceptionMessageUtil.valueAndName(o0, name0, o1, name1)); + fail("!" + name0 + ".equals(" + name1 + ")", ExceptionMessageUtil.valueAndName(o0, name0, o1, name1)); } } @@ -1515,7 +1492,7 @@ public static void notEquals(Object o0, String name0, Object o1) { neqNull(o1, "o1"); if (o0.equals(o1)) { fail("!" + name0 + ".equals(" + ExceptionMessageUtil.valueString(o1) + ")", - ExceptionMessageUtil.valueAndName(o0, name0)); + ExceptionMessageUtil.valueAndName(o0, name0)); } } diff --git a/Base/src/main/java/io/deephaven/base/verify/ExceptionMessageUtil.java b/Base/src/main/java/io/deephaven/base/verify/ExceptionMessageUtil.java index be96cff3549..f1aab8f702b 100644 --- a/Base/src/main/java/io/deephaven/base/verify/ExceptionMessageUtil.java +++ b/Base/src/main/java/io/deephaven/base/verify/ExceptionMessageUtil.java @@ -7,8 +7,7 @@ /** * String utility methods related to assertions. *
    - *
  • (package) String failureMessage(String assertionType, String preamble, String message, String - * detailMessage) + *
  • (package) String failureMessage(String assertionType, String preamble, String message, String detailMessage) *
  • (package) String valueString(Object) *
  • (public) String valueAndName(value0, String name0, value1, String name1, ... ) *
  • (public) String concat(String valueAndName0, String valueAndName1, ... ) @@ -22,10 +21,10 @@ public final class ExceptionMessageUtil { // ---------------------------------------------------------------- /** Return message string for assertion failure. */ public static String failureMessage( - String assertionType, // e.g., "Assertion" or "Requirement" - String assertedText, // e.g., "asserted" or "required" - String conditionText, - String detailMessage) { + String assertionType, // e.g., "Assertion" or "Requirement" + String assertedText, // e.g., "asserted" or "required" + String conditionText, + String detailMessage) { String resultMessage = assertionType + " failed"; if (conditionText != null && conditionText.length() > 0) { resultMessage += ": " + assertedText + " " + conditionText; @@ -84,334 +83,334 @@ public static String fillChar(char c, int count) { // ---------------------------------------------------------------- /** Return the values and names of one or more Objects. */ public static String valueAndName( - Object o0, String name0) { + Object o0, String name0) { return name0 + " == " + valueString(o0); } public static String valueAndName( - Object o0, String name0, - Object o1, String name1) { + Object o0, String name0, + Object o1, String name1) { return concat( - valueAndName(o0, name0), - valueAndName(o1, name1)); + valueAndName(o0, name0), + valueAndName(o1, name1)); } public static String valueAndName( - Object o0, String name0, - Object o1, String name1, - Object o2, String name2) { + Object o0, String name0, + Object o1, String name1, + Object o2, String name2) { return concat( - valueAndName(o0, name0), - valueAndName(o1, name1), - valueAndName(o2, name2)); + valueAndName(o0, name0), + valueAndName(o1, name1), + valueAndName(o2, name2)); } public static String valueAndName( - Object o0, String name0, - Object o1, String name1, - Object o2, String name2, - Object o3, String name3) { + Object o0, String name0, + Object o1, String name1, + Object o2, String name2, + Object o3, String name3) { return concat( - valueAndName(o0, name0), - valueAndName(o1, name1), - valueAndName(o2, name2), - valueAndName(o3, name3)); + valueAndName(o0, name0), + valueAndName(o1, name1), + valueAndName(o2, name2), + valueAndName(o3, name3)); } // ---------------------------------------------------------------- /** Return the values and names of one or more booleans. */ public static String valueAndName( - boolean b0, String name0) { + boolean b0, String name0) { return name0 + " == " + b0; } public static String valueAndName( - boolean b0, String name0, - boolean b1, String name1) { + boolean b0, String name0, + boolean b1, String name1) { return concat( - valueAndName(b0, name0), - valueAndName(b1, name1)); + valueAndName(b0, name0), + valueAndName(b1, name1)); } public static String valueAndName( - boolean b0, String name0, - boolean b1, String name1, - boolean b2, String name2) { + boolean b0, String name0, + boolean b1, String name1, + boolean b2, String name2) { return concat( - valueAndName(b0, name0), - valueAndName(b1, name1), - valueAndName(b2, name2)); + valueAndName(b0, name0), + valueAndName(b1, name1), + valueAndName(b2, name2)); } public static String valueAndName( - boolean b0, String name0, - boolean b1, String name1, - boolean b2, String name2, - boolean b3, String name3) { + boolean b0, String name0, + boolean b1, String name1, + boolean b2, String name2, + boolean b3, String name3) { return concat( - valueAndName(b0, name0), - valueAndName(b1, name1), - valueAndName(b2, name2), - valueAndName(b3, name3)); + valueAndName(b0, name0), + valueAndName(b1, name1), + valueAndName(b2, name2), + valueAndName(b3, name3)); } // ---------------------------------------------------------------- /** Return the values and names of one or more chars */ public static String valueAndName( - char c0, String name0) { + char c0, String name0) { return name0 + " == " + valueString(c0); } public static String valueAndName( - char c0, String name0, - char c1, String name1) { + char c0, String name0, + char c1, String name1) { return concat( - valueAndName(c0, name0), - valueAndName(c1, name1)); + valueAndName(c0, name0), + valueAndName(c1, name1)); } public static String valueAndName( - char c0, String name0, - char c1, String name1, - char c2, String name2) { + char c0, String name0, + char c1, String name1, + char c2, String name2) { return concat( - valueAndName(c0, name0), - valueAndName(c1, name1), - valueAndName(c2, name2)); + valueAndName(c0, name0), + valueAndName(c1, name1), + valueAndName(c2, name2)); } public static String valueAndName( - char c0, String name0, - char c1, String name1, - char c2, String name2, - char c3, String name3) { + char c0, String name0, + char c1, String name1, + char c2, String name2, + char c3, String name3) { return concat( - valueAndName(c0, name0), - valueAndName(c1, name1), - valueAndName(c2, name2), - valueAndName(c3, name3)); + valueAndName(c0, name0), + valueAndName(c1, name1), + valueAndName(c2, name2), + valueAndName(c3, name3)); } // ---------------------------------------------------------------- /** Return the values and names of one or more bytes. */ public static String valueAndName( - byte b0, String name0) { + byte b0, String name0) { return name0 + " == " + b0; } public static String valueAndName( - byte b0, String name0, - byte b1, String name1) { + byte b0, String name0, + byte b1, String name1) { return concat( - valueAndName(b0, name0), - valueAndName(b1, name1)); + valueAndName(b0, name0), + valueAndName(b1, name1)); } public static String valueAndName( - byte b0, String name0, - byte b1, String name1, - byte b2, String name2) { + byte b0, String name0, + byte b1, String name1, + byte b2, String name2) { return concat( - valueAndName(b0, name0), - valueAndName(b1, name1), - valueAndName(b2, name2)); + valueAndName(b0, name0), + valueAndName(b1, name1), + valueAndName(b2, name2)); } public static String valueAndName( - byte b0, String name0, - byte b1, String name1, - byte b2, String name2, - byte b3, String name3) { + byte b0, String name0, + byte b1, String name1, + byte b2, String name2, + byte b3, String name3) { return concat( - valueAndName(b0, name0), - valueAndName(b1, name1), - valueAndName(b2, name2), - valueAndName(b3, name3)); + valueAndName(b0, name0), + valueAndName(b1, name1), + valueAndName(b2, name2), + valueAndName(b3, name3)); } // ---------------------------------------------------------------- /** Return the values and names of one or more shorts. */ public static String valueAndName( - short s0, String name0) { + short s0, String name0) { return name0 + " == " + s0; } public static String valueAndName( - short s0, String name0, - short s1, String name1) { + short s0, String name0, + short s1, String name1) { return concat( - valueAndName(s0, name0), - valueAndName(s1, name1)); + valueAndName(s0, name0), + valueAndName(s1, name1)); } public static String valueAndName( - short s0, String name0, - short s1, String name1, - short s2, String name2) { + short s0, String name0, + short s1, String name1, + short s2, String name2) { return concat( - valueAndName(s0, name0), - valueAndName(s1, name1), - valueAndName(s2, name2)); + valueAndName(s0, name0), + valueAndName(s1, name1), + valueAndName(s2, name2)); } public static String valueAndName( - short s0, String name0, - short s1, String name1, - short s2, String name2, - short s3, String name3) { + short s0, String name0, + short s1, String name1, + short s2, String name2, + short s3, String name3) { return concat( - valueAndName(s0, name0), - valueAndName(s1, name1), - valueAndName(s2, name2), - valueAndName(s3, name3)); + valueAndName(s0, name0), + valueAndName(s1, name1), + valueAndName(s2, name2), + valueAndName(s3, name3)); } // ---------------------------------------------------------------- /** Return the values and names of one or more ints. */ public static String valueAndName( - int i0, String name0) { + int i0, String name0) { return name0 + " == " + i0; } public static String valueAndName( - int i0, String name0, - int i1, String name1) { + int i0, String name0, + int i1, String name1) { return concat( - valueAndName(i0, name0), - valueAndName(i1, name1)); + valueAndName(i0, name0), + valueAndName(i1, name1)); } public static String valueAndName( - int i0, String name0, - int i1, String name1, - int i2, String name2) { + int i0, String name0, + int i1, String name1, + int i2, String name2) { return concat( - valueAndName(i0, name0), - valueAndName(i1, name1), - valueAndName(i2, name2)); + valueAndName(i0, name0), + valueAndName(i1, name1), + valueAndName(i2, name2)); } public static String valueAndName( - int i0, String name0, - int i1, String name1, - int i2, String name2, - int i3, String name3) { + int i0, String name0, + int i1, String name1, + int i2, String name2, + int i3, String name3) { return concat( - valueAndName(i0, name0), - valueAndName(i1, name1), - valueAndName(i2, name2), - valueAndName(i3, name3)); + valueAndName(i0, name0), + valueAndName(i1, name1), + valueAndName(i2, name2), + valueAndName(i3, name3)); } // ---------------------------------------------------------------- /** Return the values and names of one or more longs. */ public static String valueAndName( - long l0, String name0) { + long l0, String name0) { return name0 + " == " + l0; } public static String valueAndName( - long l0, String name0, - long l1, String name1) { + long l0, String name0, + long l1, String name1) { return concat( - valueAndName(l0, name0), - valueAndName(l1, name1)); + valueAndName(l0, name0), + valueAndName(l1, name1)); } public static String valueAndName( - long l0, String name0, - long l1, String name1, - long l2, String name2) { + long l0, String name0, + long l1, String name1, + long l2, String name2) { return concat( - valueAndName(l0, name0), - valueAndName(l1, name1), - valueAndName(l2, name2)); + valueAndName(l0, name0), + valueAndName(l1, name1), + valueAndName(l2, name2)); } public static String valueAndName( - long l0, String name0, - long l1, String name1, - long l2, String name2, - long l3, String name3) { + long l0, String name0, + long l1, String name1, + long l2, String name2, + long l3, String name3) { return concat( - valueAndName(l0, name0), - valueAndName(l1, name1), - valueAndName(l2, name2), - valueAndName(l3, name3)); + valueAndName(l0, name0), + valueAndName(l1, name1), + valueAndName(l2, name2), + valueAndName(l3, name3)); } // ---------------------------------------------------------------- /** Return the values and names of one or more floats. */ public static String valueAndName( - float f0, String name0) { + float f0, String name0) { return name0 + " == " + f0; } public static String valueAndName( - float f0, String name0, - float f1, String name1) { + float f0, String name0, + float f1, String name1) { return concat( - valueAndName(f0, name0), - valueAndName(f1, name1)); + valueAndName(f0, name0), + valueAndName(f1, name1)); } public static String valueAndName( - float f0, String name0, - float f1, String name1, - float f2, String name2) { + float f0, String name0, + float f1, String name1, + float f2, String name2) { return concat( - valueAndName(f0, name0), - valueAndName(f1, name1), - valueAndName(f2, name2)); + valueAndName(f0, name0), + valueAndName(f1, name1), + valueAndName(f2, name2)); } public static String valueAndName( - float f0, String name0, - float f1, String name1, - float f2, String name2, - float f3, String name3) { + float f0, String name0, + float f1, String name1, + float f2, String name2, + float f3, String name3) { return concat( - valueAndName(f0, name0), - valueAndName(f1, name1), - valueAndName(f2, name2), - valueAndName(f3, name3)); + valueAndName(f0, name0), + valueAndName(f1, name1), + valueAndName(f2, name2), + valueAndName(f3, name3)); } // ---------------------------------------------------------------- /** Return the values and names of one or more doubles. */ public static String valueAndName( - double d0, String name0) { + double d0, String name0) { return name0 + " == " + d0; } public static String valueAndName( - double d0, String name0, - double d1, String name1) { + double d0, String name0, + double d1, String name1) { return concat( - valueAndName(d0, name0), - valueAndName(d1, name1)); + valueAndName(d0, name0), + valueAndName(d1, name1)); } public static String valueAndName( - double d0, String name0, - double d1, String name1, - double d2, String name2) { + double d0, String name0, + double d1, String name1, + double d2, String name2) { return concat( - valueAndName(d0, name0), - valueAndName(d1, name1), - valueAndName(d2, name2)); + valueAndName(d0, name0), + valueAndName(d1, name1), + valueAndName(d2, name2)); } public static String valueAndName( - double d0, String name0, - double d1, String name1, - double d2, String name2, - double d3, String name3) { + double d0, String name0, + double d1, String name1, + double d2, String name2, + double d3, String name3) { return concat( - valueAndName(d0, name0), - valueAndName(d1, name1), - valueAndName(d2, name2), - valueAndName(d3, name3)); + valueAndName(d0, name0), + valueAndName(d1, name1), + valueAndName(d2, name2), + valueAndName(d3, name3)); } // ################################################################ @@ -420,30 +419,30 @@ public static String valueAndName( // ---------------------------------------------------------------- /** Return the concatenation of a list of valueAndName strings. */ public static String concat( - String valueAndName0, - String valueAndName1) { + String valueAndName0, + String valueAndName1) { return valueAndName0 + ", " + - valueAndName1; + valueAndName1; } public static String concat( - String valueAndName0, - String valueAndName1, - String valueAndName2) { + String valueAndName0, + String valueAndName1, + String valueAndName2) { return valueAndName0 + ", " + - valueAndName1 + ", " + - valueAndName2; + valueAndName1 + ", " + + valueAndName2; } public static String concat( - String valueAndName0, - String valueAndName1, - String valueAndName2, - String valueAndName3) { + String valueAndName0, + String valueAndName1, + String valueAndName2, + String valueAndName3) { return valueAndName0 + ", " + - valueAndName1 + ", " + - valueAndName2 + ", " + - valueAndName3; + valueAndName1 + ", " + + valueAndName2 + ", " + + valueAndName3; } } diff --git a/Base/src/main/java/io/deephaven/base/verify/Require.java b/Base/src/main/java/io/deephaven/base/verify/Require.java index 4b6820225bd..b270cea6c5b 100644 --- a/Base/src/main/java/io/deephaven/base/verify/Require.java +++ b/Base/src/main/java/io/deephaven/base/verify/Require.java @@ -15,22 +15,19 @@ // -------------------------------------------------------------------- /** - * Requirement methods for simple runtime program verification. Failed requirements throw - * {@link RequirementFailure}. + * Requirement methods for simple runtime program verification. Failed requirements throw {@link RequirementFailure}. *

    * Methods: *

      - *
    • void requirement(boolean condition, String conditionText[, String detailMessage][, int + *
    • void requirement(boolean condition, String conditionText[, String detailMessage][, int numCallsBelowRequirer]) + *
    • void requirement(boolean condition, String conditionText, value0, String name0, value1, String name1, ... [, int * numCallsBelowRequirer]) - *
    • void requirement(boolean condition, String conditionText, value0, String name0, value1, - * String name1, ... [, int numCallsBelowRequirer]) *
    *
      *
    • void statementNeverExecuted([int numCallsBelowRequirer]) *
    • void statementNeverExecuted(String statementDescription[, int numCallsBelowRequirer]) *
    • void exceptionNeverCaught(Exception caughtException[, int numCallsBelowRequirer]) - *
    • void exceptionNeverCaught(String tryStatementDescription, Exception caughtException[, int - * numCallsBelowRequirer]) + *
    • void exceptionNeverCaught(String tryStatementDescription, Exception caughtException[, int numCallsBelowRequirer]) *
    • void valueNeverOccurs(value, String name[, int numCallsBelowRequirer]) *
    • void valuesNeverOccur(value0, name0, value1, name1, ... [, int numCallsBelowRequirer]) *
    @@ -43,15 +40,13 @@ *
      *
    • void eq/neq(boolean/char/byte/short/int/long/float/double, String name0, * boolean/char/byte/short/int/long/float/double[, String name1][, int numCallsBelowRequirer]) - *
    • void lt/leq/gt/geq(char/byte/short/int/long/float/double, String name0, - * char/byte/short/int/long/float/double[, String name1][, int numCallsBelowRequirer]) + *
    • void lt/leq/gt/geq(char/byte/short/int/long/float/double, String name0, char/byte/short/int/long/float/double[, + * String name1][, int numCallsBelowRequirer]) *
    *
      *
    • void eqFalse/neqFalse/eqTrue/neqTrue(boolean, String name[, int numCallsBelowRequirer]) - *
    • void eqZero/neqZero(char/byte/short/int/long/float/double, String name[, int - * numCallsBelowRequirer]) - *
    • void ltZero/leqZero/gtZero/geqZero(byte/short/int/long/float/double, String name[, int - * numCallsBelowRequirer]) + *
    • void eqZero/neqZero(char/byte/short/int/long/float/double, String name[, int numCallsBelowRequirer]) + *
    • void ltZero/leqZero/gtZero/geqZero(byte/short/int/long/float/double, String name[, int numCallsBelowRequirer]) *
    *
      *
    • void eq/neq(Object, name0, Object[, name1][, int numCallsBelowRequirer]) @@ -72,8 +67,7 @@ *
    *
  • equals corresponds to Object.equals (preceded by necessary null checks), e.g., *
      - *
    • For Object a and b, Require.equals(a, "a", b, "b") corresponds to require (a!= null && b != - * null && a.equals(b)) + *
    • For Object a and b, Require.equals(a, "a", b, "b") corresponds to require (a!= null && b != null && a.equals(b)) *
    • for String s, Require.nonempty(s, "s") corresponds to require (s != null && s.length() != 0) *
    *
@@ -97,8 +91,8 @@ private Require() {} // ---------------------------------------------------------------- private static void fail(String conditionText, int numCallsBelowRequirer) { final RequirementFailure requirementFailure = new RequirementFailure( - ExceptionMessageUtil.failureMessage("Requirement", "required", conditionText, null), - numCallsBelowRequirer + 1); + ExceptionMessageUtil.failureMessage("Requirement", "required", conditionText, null), + numCallsBelowRequirer + 1); if (onFailureCallback != null) { try { onFailureCallback.accept(requirementFailure); @@ -109,11 +103,10 @@ private static void fail(String conditionText, int numCallsBelowRequirer) { } // ---------------------------------------------------------------- - private static void fail(String conditionText, String detailMessage, - int numCallsBelowRequirer) { - final RequirementFailure requirementFailure = - new RequirementFailure(ExceptionMessageUtil.failureMessage("Requirement", "required", - conditionText, detailMessage), numCallsBelowRequirer + 1); + private static void fail(String conditionText, String detailMessage, int numCallsBelowRequirer) { + final RequirementFailure requirementFailure = new RequirementFailure( + ExceptionMessageUtil.failureMessage("Requirement", "required", conditionText, detailMessage), + numCallsBelowRequirer + 1); if (onFailureCallback != null) { try { onFailureCallback.accept(requirementFailure); @@ -130,8 +123,7 @@ private static void fail(String conditionText, String detailMessage, /** * require (condition, conditionText) */ - public static void requirement(boolean condition, String conditionText, - int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, int numCallsBelowRequirer) { if (!(condition)) { fail(conditionText, numCallsBelowRequirer + 1); } @@ -146,7 +138,7 @@ public static void requirement(boolean condition, String conditionText) { * require (condition, conditionText, detailMessage) */ public static void requirement(boolean condition, String conditionText, String detailMessage, - int numCallsBelowRequirer) { + int numCallsBelowRequirer) { if (!(condition)) { fail(conditionText, detailMessage, numCallsBelowRequirer + 1); } @@ -161,69 +153,64 @@ public static void requirement(boolean condition, String conditionText, String d * require (condition, Object o0, String name0, ... ) */ public static void requirement(boolean condition, String conditionText, Object o0, String name0, - int numCallsBelowRequirer) { + int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0), numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, Object o0, - String name0) { + public static void requirement(boolean condition, String conditionText, Object o0, String name0) { requirement(condition, conditionText, o0, name0, 1); } - public static void requirement(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1, int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1, int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1) { + public static void requirement(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1) { requirement(condition, conditionText, o0, name0, o1, name1, 1); } - public static void requirement(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1, Object o2, String name2, int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1, Object o2, String name2, int numCallsBelowRequirer) { if (!(condition)) { fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1, o2, name2), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, long o0, String name0, - long o1, String name1, long o2, String name2, int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, long o0, String name0, long o1, + String name1, long o2, String name2, int numCallsBelowRequirer) { if (!(condition)) { fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1, o2, name2), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1, Object o2, String name2) { + public static void requirement(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1, Object o2, String name2) { requirement(condition, conditionText, o0, name0, o1, name1, o2, name2, 1); } - public static void requirement(boolean condition, String conditionText, long o0, String name0, - long o1, String name1, long o2, String name2) { + public static void requirement(boolean condition, String conditionText, long o0, String name0, long o1, + String name1, long o2, String name2) { requirement(condition, conditionText, o0, name0, o1, name1, o2, name2, 1); } - public static void requirement(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1, Object o2, String name2, Object o3, String name3, - int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1, Object o2, String name2, Object o3, String name3, int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, - ExceptionMessageUtil.valueAndName(o0, name0, o1, name1, o2, name2, o3, name3), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1, o2, name2, o3, name3), + numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, Object o0, String name0, - Object o1, String name1, Object o2, String name2, Object o3, String name3) { + public static void requirement(boolean condition, String conditionText, Object o0, String name0, Object o1, + String name1, Object o2, String name2, Object o3, String name3) { requirement(condition, conditionText, o0, name0, o1, name1, o2, name2, o3, name3, 1); } @@ -231,74 +218,65 @@ public static void requirement(boolean condition, String conditionText, Object o /** * require (condition, boolean b0, String name0, ... ) */ - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0, int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0, + int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0), numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0) { requirement(condition, conditionText, b0, name0, 1); } - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0, boolean b1, String name1, int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0, boolean b1, + String name1, int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0, boolean b1, String name1) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0, boolean b1, + String name1) { requirement(condition, conditionText, b0, name0, b1, name1, 1); } - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0, double d1, String name1, int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0, double d1, + String name1, int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, - ExceptionMessageUtil.concat(ExceptionMessageUtil.valueAndName(b0, name0), - ExceptionMessageUtil.valueAndName(d1, name1)), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.concat(ExceptionMessageUtil.valueAndName(b0, name0), + ExceptionMessageUtil.valueAndName(d1, name1)), numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0, double d1, String name1) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0, double d1, + String name1) { requirement(condition, conditionText, b0, name0, d1, name1, 1); } - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0, boolean b1, String name1, boolean b2, String name2, - int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0, boolean b1, + String name1, boolean b2, String name2, int numCallsBelowRequirer) { if (!(condition)) { fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1, b2, name2), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0, boolean b1, String name1, boolean b2, String name2) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0, boolean b1, + String name1, boolean b2, String name2) { requirement(condition, conditionText, b0, name0, b1, name1, b2, name2, 1); } - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0, boolean b1, String name1, boolean b2, String name2, boolean b3, String name3, - int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0, boolean b1, + String name1, boolean b2, String name2, boolean b3, String name3, int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, - ExceptionMessageUtil.valueAndName(b0, name0, b1, name1, b2, name2, b3, name3), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1, b2, name2, b3, name3), + numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, boolean b0, - String name0, boolean b1, String name1, boolean b2, String name2, boolean b3, - String name3) { + public static void requirement(boolean condition, String conditionText, boolean b0, String name0, boolean b1, + String name1, boolean b2, String name2, boolean b3, String name3) { requirement(condition, conditionText, b0, name0, b1, name1, b2, name2, b3, name3, 1); } @@ -307,10 +285,9 @@ public static void requirement(boolean condition, String conditionText, boolean * require (condition, int i0, String name0, ... ) */ public static void requirement(boolean condition, String conditionText, int i0, String name0, - int numCallsBelowRequirer) { + int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, ExceptionMessageUtil.valueAndName(i0, name0), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.valueAndName(i0, name0), numCallsBelowRequirer + 1); } } @@ -318,16 +295,15 @@ public static void requirement(boolean condition, String conditionText, int i0, requirement(condition, conditionText, i0, name0, 1); } - public static void requirement(boolean condition, String conditionText, int i0, String name0, - int i1, String name1, int numCallsBelowRequirer) { + public static void requirement(boolean condition, String conditionText, int i0, String name0, int i1, String name1, + int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, ExceptionMessageUtil.valueAndName(i0, name0, i1, name1), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.valueAndName(i0, name0, i1, name1), numCallsBelowRequirer + 1); } } - public static void requirement(boolean condition, String conditionText, int i0, String name0, - int i1, String name1) { + public static void requirement(boolean condition, String conditionText, int i0, String name0, int i1, + String name1) { requirement(condition, conditionText, i0, name0, i1, name1, 1); } @@ -336,10 +312,9 @@ public static void requirement(boolean condition, String conditionText, int i0, * require (condition, long l0, String name0, ... ) */ public static void requirement(boolean condition, String conditionText, long l0, String name0, - int numCallsBelowRequirer) { + int numCallsBelowRequirer) { if (!(condition)) { - fail(conditionText, ExceptionMessageUtil.valueAndName(l0, name0), - numCallsBelowRequirer + 1); + fail(conditionText, ExceptionMessageUtil.valueAndName(l0, name0), numCallsBelowRequirer + 1); } } @@ -367,8 +342,7 @@ public static RequirementFailure statementNeverExecuted() { /** * require (statementDescription is never executed) */ - public static RequirementFailure statementNeverExecuted(String statementDescription, - int numCallsBelowRequirer) { + public static RequirementFailure statementNeverExecuted(String statementDescription, int numCallsBelowRequirer) { fail(statementDescription + " is never executed", numCallsBelowRequirer + 1); return null; } @@ -387,8 +361,7 @@ public static RequirementFailure statementNeverExecuted(String statementDescript public static RequirementFailure exceptionNeverCaught(Exception e, int numCallsBelowRequirer) { try { fail(e.getClass().getName() + " is never caught", - e.getClass().getName() + "(" + e.getMessage() + ") caught", - numCallsBelowRequirer + 1); + e.getClass().getName() + "(" + e.getMessage() + ") caught", numCallsBelowRequirer + 1); } catch (RequirementFailure requirementFailure) { requirementFailure.initCause(e); throw requirementFailure; @@ -404,12 +377,11 @@ public static RequirementFailure exceptionNeverCaught(Exception e) { /** * require (tryStatementDescription succeeds, Exception e) */ - public static RequirementFailure exceptionNeverCaught(String tryStatementDescription, - Exception e, int numCallsBelowRequirer) { + public static RequirementFailure exceptionNeverCaught(String tryStatementDescription, Exception e, + int numCallsBelowRequirer) { try { - fail(tryStatementDescription + " succeeds", - e.getClass().getName() + "(" + e.getMessage() + ") caught", - numCallsBelowRequirer + 1); + fail(tryStatementDescription + " succeeds", e.getClass().getName() + "(" + e.getMessage() + ") caught", + numCallsBelowRequirer + 1); } catch (RequirementFailure requirementFailure) { requirementFailure.initCause(e); throw requirementFailure; @@ -417,8 +389,7 @@ public static RequirementFailure exceptionNeverCaught(String tryStatementDescrip return null; } - public static RequirementFailure exceptionNeverCaught(String tryStatementDescription, - Exception e) { + public static RequirementFailure exceptionNeverCaught(String tryStatementDescription, Exception e) { return exceptionNeverCaught(tryStatementDescription, e, 1); } @@ -429,10 +400,8 @@ public static RequirementFailure exceptionNeverCaught(String tryStatementDescrip /** * require (this value never occurs, Object o, name) */ - public static RequirementFailure valueNeverOccurs(Object o, String name, - int numCallsBelowRequirer) { - fail(ExceptionMessageUtil.valueAndName(o, name) + " never occurs", - numCallsBelowRequirer + 1); + public static RequirementFailure valueNeverOccurs(Object o, String name, int numCallsBelowRequirer) { + fail(ExceptionMessageUtil.valueAndName(o, name) + " never occurs", numCallsBelowRequirer + 1); return null; } @@ -444,10 +413,8 @@ public static RequirementFailure valueNeverOccurs(Object o, String name) { /** * require (this value never occurs, boolean b, name) */ - public static RequirementFailure valueNeverOccurs(boolean b, String name, - int numCallsBelowRequirer) { - fail(ExceptionMessageUtil.valueAndName(b, name) + " never occurs", - numCallsBelowRequirer + 1); + public static RequirementFailure valueNeverOccurs(boolean b, String name, int numCallsBelowRequirer) { + fail(ExceptionMessageUtil.valueAndName(b, name) + " never occurs", numCallsBelowRequirer + 1); return null; } @@ -459,10 +426,8 @@ public static RequirementFailure valueNeverOccurs(boolean b, String name) { /** * require (this value never occurs, char c, name) */ - public static RequirementFailure valueNeverOccurs(char c, String name, - int numCallsBelowRequirer) { - fail(ExceptionMessageUtil.valueAndName(c, name) + " never occurs", - numCallsBelowRequirer + 1); + public static RequirementFailure valueNeverOccurs(char c, String name, int numCallsBelowRequirer) { + fail(ExceptionMessageUtil.valueAndName(c, name) + " never occurs", numCallsBelowRequirer + 1); return null; } @@ -474,10 +439,8 @@ public static RequirementFailure valueNeverOccurs(char c, String name) { /** * require (this value never occurs, byte b, name) */ - public static RequirementFailure valueNeverOccurs(byte b, String name, - int numCallsBelowRequirer) { - fail(ExceptionMessageUtil.valueAndName(b, name) + " never occurs", - numCallsBelowRequirer + 1); + public static RequirementFailure valueNeverOccurs(byte b, String name, int numCallsBelowRequirer) { + fail(ExceptionMessageUtil.valueAndName(b, name) + " never occurs", numCallsBelowRequirer + 1); return null; } @@ -489,10 +452,8 @@ public static RequirementFailure valueNeverOccurs(byte b, String name) { /** * require (this value never occurs, short s, name) */ - public static RequirementFailure valueNeverOccurs(short s, String name, - int numCallsBelowRequirer) { - fail(ExceptionMessageUtil.valueAndName(s, name) + " never occurs", - numCallsBelowRequirer + 1); + public static RequirementFailure valueNeverOccurs(short s, String name, int numCallsBelowRequirer) { + fail(ExceptionMessageUtil.valueAndName(s, name) + " never occurs", numCallsBelowRequirer + 1); return null; } @@ -504,10 +465,8 @@ public static RequirementFailure valueNeverOccurs(short s, String name) { /** * require (this value never occurs, int i, name) */ - public static RequirementFailure valueNeverOccurs(int i, String name, - int numCallsBelowRequirer) { - fail(ExceptionMessageUtil.valueAndName(i, name) + " never occurs", - numCallsBelowRequirer + 1); + public static RequirementFailure valueNeverOccurs(int i, String name, int numCallsBelowRequirer) { + fail(ExceptionMessageUtil.valueAndName(i, name) + " never occurs", numCallsBelowRequirer + 1); return null; } @@ -519,10 +478,8 @@ public static RequirementFailure valueNeverOccurs(int i, String name) { /** * require (this value never occurs, long l, name) */ - public static RequirementFailure valueNeverOccurs(long l, String name, - int numCallsBelowRequirer) { - fail(ExceptionMessageUtil.valueAndName(l, name) + " never occurs", - numCallsBelowRequirer + 1); + public static RequirementFailure valueNeverOccurs(long l, String name, int numCallsBelowRequirer) { + fail(ExceptionMessageUtil.valueAndName(l, name) + " never occurs", numCallsBelowRequirer + 1); return null; } @@ -534,10 +491,8 @@ public static RequirementFailure valueNeverOccurs(long l, String name) { /** * require (this value never occurs, float f, name) */ - public static RequirementFailure valueNeverOccurs(float f, String name, - int numCallsBelowRequirer) { - fail(ExceptionMessageUtil.valueAndName(f, name) + " never occurs", - numCallsBelowRequirer + 1); + public static RequirementFailure valueNeverOccurs(float f, String name, int numCallsBelowRequirer) { + fail(ExceptionMessageUtil.valueAndName(f, name) + " never occurs", numCallsBelowRequirer + 1); return null; } @@ -549,10 +504,8 @@ public static RequirementFailure valueNeverOccurs(float f, String name) { /** * require (this value never occurs, double d, name) */ - public static RequirementFailure valueNeverOccurs(double d, String name, - int numCallsBelowRequirer) { - fail(ExceptionMessageUtil.valueAndName(d, name) + " never occurs", - numCallsBelowRequirer + 1); + public static RequirementFailure valueNeverOccurs(double d, String name, int numCallsBelowRequirer) { + fail(ExceptionMessageUtil.valueAndName(d, name) + " never occurs", numCallsBelowRequirer + 1); return null; } @@ -570,8 +523,7 @@ public static RequirementFailure valueNeverOccurs(double d, String name) { public static void holdsLock(Object o, String name, int numCallsBelowRequirer) { neqNull(o, "o"); if (!Thread.holdsLock(o)) { - fail("\"" + Thread.currentThread().getName() + "\".holdsLock(" + name + ")", - numCallsBelowRequirer + 1); + fail("\"" + Thread.currentThread().getName() + "\".holdsLock(" + name + ")", numCallsBelowRequirer + 1); } } @@ -586,8 +538,7 @@ public static void holdsLock(Object o, String name) { public static void notHoldsLock(Object o, String name, int numCallsBelowRequirer) { neqNull(o, "o"); if (Thread.holdsLock(o)) { - fail("!\"" + Thread.currentThread().getName() + "\".holdsLock(" + name + ")", - numCallsBelowRequirer + 1); + fail("!\"" + Thread.currentThread().getName() + "\".holdsLock(" + name + ")", numCallsBelowRequirer + 1); } } @@ -603,14 +554,11 @@ public static void notHoldsLock(Object o, String name) { /** * require (o instanceof type) */ - public static void instanceOf(Object o, String name, Class type, - int numCallsBelowRequirer) { + public static void instanceOf(Object o, String name, Class type, int numCallsBelowRequirer) { if (!type.isInstance(o)) { - fail(name + " instanceof " + type, - null == o ? ExceptionMessageUtil.valueAndName(o, name) - : name + " instanceof " + o.getClass() + " (" - + ExceptionMessageUtil.valueAndName(o, name) + ")", - numCallsBelowRequirer + 1); + fail(name + " instanceof " + type, null == o ? ExceptionMessageUtil.valueAndName(o, name) + : name + " instanceof " + o.getClass() + " (" + ExceptionMessageUtil.valueAndName(o, name) + ")", + numCallsBelowRequirer + 1); } } @@ -622,13 +570,11 @@ public static void instanceOf(Object o, String name, Class type) { /** * require !(o instanceof type) */ - public static void notInstanceOf(Object o, String name, Class type, - int numCallsBelowRequirer) { + public static void notInstanceOf(Object o, String name, Class type, int numCallsBelowRequirer) { if (type.isInstance(o)) { - fail( - "!(" + name + " instanceof " + type + ")", name + " instanceof " + o.getClass() - + " (" + ExceptionMessageUtil.valueAndName(o, name) + ")", - numCallsBelowRequirer + 1); + fail("!(" + name + " instanceof " + type + ")", + name + " instanceof " + o.getClass() + " (" + ExceptionMessageUtil.valueAndName(o, name) + ")", + numCallsBelowRequirer + 1); } } @@ -649,8 +595,7 @@ public static void isAWTThread() { public static void isAWTThread(int numCallsBelowRequirer) { if (!EventQueue.isDispatchThread()) { - fail("\"" + Thread.currentThread().getName() + "\".isAWTThread()", - numCallsBelowRequirer + 1); + fail("\"" + Thread.currentThread().getName() + "\".isAWTThread()", numCallsBelowRequirer + 1); } } @@ -664,8 +609,7 @@ public static void isNotAWTThread() { public static void isNotAWTThread(int numCallsBelowRequirer) { if (EventQueue.isDispatchThread()) { - fail("!\"" + Thread.currentThread().getName() + "\".isAWTThread()", - numCallsBelowRequirer + 1); + fail("!\"" + Thread.currentThread().getName() + "\".isAWTThread()", numCallsBelowRequirer + 1); } } @@ -676,11 +620,10 @@ public static void isNotAWTThread(int numCallsBelowRequirer) { /** * require (b0 == b1) */ - public static void eq(boolean b0, String name0, boolean b1, String name1, - int numCallsBelowRequirer) { + public static void eq(boolean b0, String name0, boolean b1, String name1, int numCallsBelowRequirer) { if (!(b0 == b1)) { fail(name0 + " == " + name1, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -690,8 +633,7 @@ public static void eq(boolean b0, String name0, boolean b1, String name1) { public static void eq(boolean b0, String name0, boolean b1, int numCallsBelowRequirer) { if (!(b0 == b1)) { - fail(name0 + " == " + b1, ExceptionMessageUtil.valueAndName(b0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " == " + b1, ExceptionMessageUtil.valueAndName(b0, name0), numCallsBelowRequirer + 1); } } @@ -706,7 +648,7 @@ public static void eq(boolean b0, String name0, boolean b1) { public static void eq(char c0, String name0, char c1, String name1, int numCallsBelowRequirer) { if (!(c0 == c1)) { fail(name0 + " == " + name1, ExceptionMessageUtil.valueAndName(c0, name0, c1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -716,8 +658,8 @@ public static void eq(char c0, String name0, char c1, String name1) { public static void eq(char c0, String name0, char c1, int numCallsBelowRequirer) { if (!(c0 == c1)) { - fail(name0 + " == " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0), numCallsBelowRequirer + 1); + fail(name0 + " == " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0), + numCallsBelowRequirer + 1); } } @@ -732,7 +674,7 @@ public static void eq(char c0, String name0, char c1) { public static void eq(byte b0, String name0, byte b1, String name1, int numCallsBelowRequirer) { if (!(b0 == b1)) { fail(name0 + " == " + name1, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -742,8 +684,7 @@ public static void eq(byte b0, String name0, byte b1, String name1) { public static void eq(byte b0, String name0, byte b1, int numCallsBelowRequirer) { if (!(b0 == b1)) { - fail(name0 + " == " + b1, ExceptionMessageUtil.valueAndName(b0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " == " + b1, ExceptionMessageUtil.valueAndName(b0, name0), numCallsBelowRequirer + 1); } } @@ -755,11 +696,10 @@ public static void eq(byte b0, String name0, byte b1) { /** * require (s0 == s1) */ - public static void eq(short s0, String name0, short s1, String name1, - int numCallsBelowRequirer) { + public static void eq(short s0, String name0, short s1, String name1, int numCallsBelowRequirer) { if (!(s0 == s1)) { fail(name0 + " == " + name1, ExceptionMessageUtil.valueAndName(s0, name0, s1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -769,8 +709,7 @@ public static void eq(short s0, String name0, short s1, String name1) { public static void eq(short s0, String name0, short s1, int numCallsBelowRequirer) { if (!(s0 == s1)) { - fail(name0 + " == " + s1, ExceptionMessageUtil.valueAndName(s0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " == " + s1, ExceptionMessageUtil.valueAndName(s0, name0), numCallsBelowRequirer + 1); } } @@ -785,7 +724,7 @@ public static void eq(short s0, String name0, short s1) { public static void eq(int i0, String name0, int i1, String name1, int numCallsBelowRequirer) { if (!(i0 == i1)) { fail(name0 + " == " + name1, ExceptionMessageUtil.valueAndName(i0, name0, i1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -795,8 +734,7 @@ public static void eq(int i0, String name0, int i1, String name1) { public static void eq(int i0, String name0, int i1, int numCallsBelowRequirer) { if (!(i0 == i1)) { - fail(name0 + " == " + i1, ExceptionMessageUtil.valueAndName(i0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " == " + i1, ExceptionMessageUtil.valueAndName(i0, name0), numCallsBelowRequirer + 1); } } @@ -811,7 +749,7 @@ public static void eq(int i0, String name0, int i1) { public static void eq(long l0, String name0, long l1, String name1, int numCallsBelowRequirer) { if (!(l0 == l1)) { fail(name0 + " == " + name1, ExceptionMessageUtil.valueAndName(l0, name0, l1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -821,8 +759,7 @@ public static void eq(long l0, String name0, long l1, String name1) { public static void eq(long l0, String name0, long l1, int numCallsBelowRequirer) { if (!(l0 == l1)) { - fail(name0 + " == " + l1, ExceptionMessageUtil.valueAndName(l0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " == " + l1, ExceptionMessageUtil.valueAndName(l0, name0), numCallsBelowRequirer + 1); } } @@ -834,11 +771,10 @@ public static void eq(long l0, String name0, long l1) { /** * require (f0 == f1) */ - public static void eq(float f0, String name0, float f1, String name1, - int numCallsBelowRequirer) { + public static void eq(float f0, String name0, float f1, String name1, int numCallsBelowRequirer) { if (!(f0 == f1)) { fail(name0 + " == " + name1, ExceptionMessageUtil.valueAndName(f0, name0, f1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -848,8 +784,7 @@ public static void eq(float f0, String name0, float f1, String name1) { public static void eq(float f0, String name0, float f1, int numCallsBelowRequirer) { if (!(f0 == f1)) { - fail(name0 + " == " + f1, ExceptionMessageUtil.valueAndName(f0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " == " + f1, ExceptionMessageUtil.valueAndName(f0, name0), numCallsBelowRequirer + 1); } } @@ -861,11 +796,10 @@ public static void eq(float f0, String name0, float f1) { /** * require (d0 == d1) */ - public static void eq(double d0, String name0, double d1, String name1, - int numCallsBelowRequirer) { + public static void eq(double d0, String name0, double d1, String name1, int numCallsBelowRequirer) { if (!(d0 == d1)) { fail(name0 + " == " + name1, ExceptionMessageUtil.valueAndName(d0, name0, d1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -875,8 +809,7 @@ public static void eq(double d0, String name0, double d1, String name1) { public static void eq(double d0, String name0, double d1, int numCallsBelowRequirer) { if (!(d0 == d1)) { - fail(name0 + " == " + d1, ExceptionMessageUtil.valueAndName(d0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " == " + d1, ExceptionMessageUtil.valueAndName(d0, name0), numCallsBelowRequirer + 1); } } @@ -891,11 +824,10 @@ public static void eq(double d0, String name0, double d1) { /** * require (b0 != b1) */ - public static void neq(boolean b0, String name0, boolean b1, String name1, - int numCallsBelowRequirer) { + public static void neq(boolean b0, String name0, boolean b1, String name1, int numCallsBelowRequirer) { if (!(b0 != b1)) { fail(name0 + " != " + name1, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -905,8 +837,7 @@ public static void neq(boolean b0, String name0, boolean b1, String name1) { public static void neq(boolean b0, String name0, boolean b1, int numCallsBelowRequirer) { if (!(b0 != b1)) { - fail(name0 + " != " + b1, ExceptionMessageUtil.valueAndName(b0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " != " + b1, ExceptionMessageUtil.valueAndName(b0, name0), numCallsBelowRequirer + 1); } } @@ -918,11 +849,10 @@ public static void neq(boolean b0, String name0, boolean b1) { /** * require (c0 != c1) */ - public static void neq(char c0, String name0, char c1, String name1, - int numCallsBelowRequirer) { + public static void neq(char c0, String name0, char c1, String name1, int numCallsBelowRequirer) { if (!(c0 != c1)) { fail(name0 + " != " + name1, ExceptionMessageUtil.valueAndName(c0, name0, c1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -932,8 +862,8 @@ public static void neq(char c0, String name0, char c1, String name1) { public static void neq(char c0, String name0, char c1, int numCallsBelowRequirer) { if (!(c0 != c1)) { - fail(name0 + " != " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0), numCallsBelowRequirer + 1); + fail(name0 + " != " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0), + numCallsBelowRequirer + 1); } } @@ -945,11 +875,10 @@ public static void neq(char c0, String name0, char c1) { /** * require (b0 != b1) */ - public static void neq(byte b0, String name0, byte b1, String name1, - int numCallsBelowRequirer) { + public static void neq(byte b0, String name0, byte b1, String name1, int numCallsBelowRequirer) { if (!(b0 != b1)) { fail(name0 + " != " + name1, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -959,8 +888,7 @@ public static void neq(byte b0, String name0, byte b1, String name1) { public static void neq(byte b0, String name0, byte b1, int numCallsBelowRequirer) { if (!(b0 != b1)) { - fail(name0 + " != " + b1, ExceptionMessageUtil.valueAndName(b0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " != " + b1, ExceptionMessageUtil.valueAndName(b0, name0), numCallsBelowRequirer + 1); } } @@ -972,11 +900,10 @@ public static void neq(byte b0, String name0, byte b1) { /** * require (s0 != s1) */ - public static void neq(short s0, String name0, short s1, String name1, - int numCallsBelowRequirer) { + public static void neq(short s0, String name0, short s1, String name1, int numCallsBelowRequirer) { if (!(s0 != s1)) { fail(name0 + " != " + name1, ExceptionMessageUtil.valueAndName(s0, name0, s1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -986,8 +913,7 @@ public static void neq(short s0, String name0, short s1, String name1) { public static void neq(short s0, String name0, short s1, int numCallsBelowRequirer) { if (!(s0 != s1)) { - fail(name0 + " != " + s1, ExceptionMessageUtil.valueAndName(s0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " != " + s1, ExceptionMessageUtil.valueAndName(s0, name0), numCallsBelowRequirer + 1); } } @@ -1002,7 +928,7 @@ public static void neq(short s0, String name0, short s1) { public static int neq(int i0, String name0, int i1, String name1, int numCallsBelowRequirer) { if (!(i0 != i1)) { fail(name0 + " != " + name1, ExceptionMessageUtil.valueAndName(i0, name0, i1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return i0; } @@ -1013,8 +939,7 @@ public static int neq(int i0, String name0, int i1, String name1) { public static void neq(int i0, String name0, int i1, int numCallsBelowRequirer) { if (!(i0 != i1)) { - fail(name0 + " != " + i1, ExceptionMessageUtil.valueAndName(i0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " != " + i1, ExceptionMessageUtil.valueAndName(i0, name0), numCallsBelowRequirer + 1); } } @@ -1026,11 +951,10 @@ public static void neq(int i0, String name0, int i1) { /** * require (l0 != l1) */ - public static void neq(long l0, String name0, long l1, String name1, - int numCallsBelowRequirer) { + public static void neq(long l0, String name0, long l1, String name1, int numCallsBelowRequirer) { if (!(l0 != l1)) { fail(name0 + " != " + name1, ExceptionMessageUtil.valueAndName(l0, name0, l1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -1040,8 +964,7 @@ public static void neq(long l0, String name0, long l1, String name1) { public static void neq(long l0, String name0, long l1, int numCallsBelowRequirer) { if (!(l0 != l1)) { - fail(name0 + " != " + l1, ExceptionMessageUtil.valueAndName(l0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " != " + l1, ExceptionMessageUtil.valueAndName(l0, name0), numCallsBelowRequirer + 1); } } @@ -1053,11 +976,10 @@ public static void neq(long l0, String name0, long l1) { /** * require (f0 != f1) */ - public static void neq(float f0, String name0, float f1, String name1, - int numCallsBelowRequirer) { + public static void neq(float f0, String name0, float f1, String name1, int numCallsBelowRequirer) { if (!(f0 != f1)) { fail(name0 + " != " + name1, ExceptionMessageUtil.valueAndName(f0, name0, f1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -1067,8 +989,7 @@ public static void neq(float f0, String name0, float f1, String name1) { public static void neq(float f0, String name0, float f1, int numCallsBelowRequirer) { if (!(f0 != f1)) { - fail(name0 + " != " + f1, ExceptionMessageUtil.valueAndName(f0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " != " + f1, ExceptionMessageUtil.valueAndName(f0, name0), numCallsBelowRequirer + 1); } } @@ -1080,11 +1001,10 @@ public static void neq(float f0, String name0, float f1) { /** * require (d0 != d1) */ - public static void neq(double d0, String name0, double d1, String name1, - int numCallsBelowRequirer) { + public static void neq(double d0, String name0, double d1, String name1, int numCallsBelowRequirer) { if (!(d0 != d1)) { fail(name0 + " != " + name1, ExceptionMessageUtil.valueAndName(d0, name0, d1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -1094,8 +1014,7 @@ public static void neq(double d0, String name0, double d1, String name1) { public static void neq(double d0, String name0, double d1, int numCallsBelowRequirer) { if (!(d0 != d1)) { - fail(name0 + " != " + d1, ExceptionMessageUtil.valueAndName(d0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " != " + d1, ExceptionMessageUtil.valueAndName(d0, name0), numCallsBelowRequirer + 1); } } @@ -1113,7 +1032,7 @@ public static void neq(double d0, String name0, double d1) { public static char lt(char c0, String name0, char c1, String name1, int numCallsBelowRequirer) { if (!(c0 < c1)) { fail(name0 + " < " + name1, ExceptionMessageUtil.valueAndName(c0, name0, c1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return c0; } @@ -1124,8 +1043,8 @@ public static char lt(char c0, String name0, char c1, String name1) { public static char lt(char c0, String name0, char c1, int numCallsBelowRequirer) { if (!(c0 < c1)) { - fail(name0 + " < " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0), numCallsBelowRequirer + 1); + fail(name0 + " < " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0), + numCallsBelowRequirer + 1); } return c0; } @@ -1141,7 +1060,7 @@ public static char lt(char c0, String name0, char c1) { public static byte lt(byte b0, String name0, byte b1, String name1, int numCallsBelowRequirer) { if (!(b0 < b1)) { fail(name0 + " < " + name1, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return b0; } @@ -1152,8 +1071,7 @@ public static byte lt(byte b0, String name0, byte b1, String name1) { public static byte lt(byte b0, String name0, byte b1, int numCallsBelowRequirer) { if (!(b0 < b1)) { - fail(name0 + " < " + b1, ExceptionMessageUtil.valueAndName(b0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " < " + b1, ExceptionMessageUtil.valueAndName(b0, name0), numCallsBelowRequirer + 1); } return b0; } @@ -1166,11 +1084,10 @@ public static byte lt(byte b0, String name0, byte b1) { /** * require (s0 < s1) */ - public static short lt(short s0, String name0, short s1, String name1, - int numCallsBelowRequirer) { + public static short lt(short s0, String name0, short s1, String name1, int numCallsBelowRequirer) { if (!(s0 < s1)) { fail(name0 + " < " + name1, ExceptionMessageUtil.valueAndName(s0, name0, s1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return s0; } @@ -1181,8 +1098,7 @@ public static short lt(short s0, String name0, short s1, String name1) { public static short lt(short s0, String name0, short s1, int numCallsBelowRequirer) { if (!(s0 < s1)) { - fail(name0 + " < " + s1, ExceptionMessageUtil.valueAndName(s0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " < " + s1, ExceptionMessageUtil.valueAndName(s0, name0), numCallsBelowRequirer + 1); } return s0; } @@ -1198,7 +1114,7 @@ public static short lt(short s0, String name0, short s1) { public static int lt(int i0, String name0, int i1, String name1, int numCallsBelowRequirer) { if (!(i0 < i1)) { fail(name0 + " < " + name1, ExceptionMessageUtil.valueAndName(i0, name0, i1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return i0; } @@ -1209,8 +1125,7 @@ public static int lt(int i0, String name0, int i1, String name1) { public static int lt(int i0, String name0, int i1, int numCallsBelowRequirer) { if (!(i0 < i1)) { - fail(name0 + " < " + i1, ExceptionMessageUtil.valueAndName(i0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " < " + i1, ExceptionMessageUtil.valueAndName(i0, name0), numCallsBelowRequirer + 1); } return i0; } @@ -1226,7 +1141,7 @@ public static int lt(int i0, String name0, int i1) { public static long lt(long l0, String name0, long l1, String name1, int numCallsBelowRequirer) { if (!(l0 < l1)) { fail(name0 + " < " + name1, ExceptionMessageUtil.valueAndName(l0, name0, l1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return l0; } @@ -1237,8 +1152,7 @@ public static long lt(long l0, String name0, long l1, String name1) { public static long lt(long l0, String name0, long l1, int numCallsBelowRequirer) { if (!(l0 < l1)) { - fail(name0 + " < " + l1, ExceptionMessageUtil.valueAndName(l0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " < " + l1, ExceptionMessageUtil.valueAndName(l0, name0), numCallsBelowRequirer + 1); } return l0; } @@ -1251,11 +1165,10 @@ public static long lt(long l0, String name0, long l1) { /** * require (f0 < f1) */ - public static float lt(float f0, String name0, float f1, String name1, - int numCallsBelowRequirer) { + public static float lt(float f0, String name0, float f1, String name1, int numCallsBelowRequirer) { if (!(f0 < f1)) { fail(name0 + " < " + name1, ExceptionMessageUtil.valueAndName(f0, name0, f1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return f0; } @@ -1266,8 +1179,7 @@ public static float lt(float f0, String name0, float f1, String name1) { public static float lt(float f0, String name0, float f1, int numCallsBelowRequirer) { if (!(f0 < f1)) { - fail(name0 + " < " + f1, ExceptionMessageUtil.valueAndName(f0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " < " + f1, ExceptionMessageUtil.valueAndName(f0, name0), numCallsBelowRequirer + 1); } return f0; } @@ -1280,11 +1192,10 @@ public static float lt(float f0, String name0, float f1) { /** * require (d0 < d1) */ - public static double lt(double d0, String name0, double d1, String name1, - int numCallsBelowRequirer) { + public static double lt(double d0, String name0, double d1, String name1, int numCallsBelowRequirer) { if (!(d0 < d1)) { fail(name0 + " < " + name1, ExceptionMessageUtil.valueAndName(d0, name0, d1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return d0; } @@ -1295,8 +1206,7 @@ public static double lt(double d0, String name0, double d1, String name1) { public static double lt(double d0, String name0, double d1, int numCallsBelowRequirer) { if (!(d0 < d1)) { - fail(name0 + " < " + d1, ExceptionMessageUtil.valueAndName(d0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " < " + d1, ExceptionMessageUtil.valueAndName(d0, name0), numCallsBelowRequirer + 1); } return d0; } @@ -1312,11 +1222,10 @@ public static double lt(double d0, String name0, double d1) { /** * require (c0 <= c1) */ - public static char leq(char c0, String name0, char c1, String name1, - int numCallsBelowRequirer) { + public static char leq(char c0, String name0, char c1, String name1, int numCallsBelowRequirer) { if (!(c0 <= c1)) { fail(name0 + " <= " + name1, ExceptionMessageUtil.valueAndName(c0, name0, c1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return c0; } @@ -1327,8 +1236,8 @@ public static char leq(char c0, String name0, char c1, String name1) { public static char leq(char c0, String name0, char c1, int numCallsBelowRequirer) { if (!(c0 <= c1)) { - fail(name0 + " <= " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0), numCallsBelowRequirer + 1); + fail(name0 + " <= " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0), + numCallsBelowRequirer + 1); } return c0; } @@ -1341,11 +1250,10 @@ public static char leq(char c0, String name0, char c1) { /** * require (b0 <= b1) */ - public static byte leq(byte b0, String name0, byte b1, String name1, - int numCallsBelowRequirer) { + public static byte leq(byte b0, String name0, byte b1, String name1, int numCallsBelowRequirer) { if (!(b0 <= b1)) { fail(name0 + " <= " + name1, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return b0; } @@ -1356,8 +1264,7 @@ public static byte leq(byte b0, String name0, byte b1, String name1) { public static byte leq(byte b0, String name0, byte b1, int numCallsBelowRequirer) { if (!(b0 <= b1)) { - fail(name0 + " <= " + b1, ExceptionMessageUtil.valueAndName(b0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " <= " + b1, ExceptionMessageUtil.valueAndName(b0, name0), numCallsBelowRequirer + 1); } return b0; } @@ -1370,11 +1277,10 @@ public static byte leq(byte b0, String name0, byte b1) { /** * require (s0 <= s1) */ - public static short leq(short s0, String name0, short s1, String name1, - int numCallsBelowRequirer) { + public static short leq(short s0, String name0, short s1, String name1, int numCallsBelowRequirer) { if (!(s0 <= s1)) { fail(name0 + " <= " + name1, ExceptionMessageUtil.valueAndName(s0, name0, s1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return s0; } @@ -1385,8 +1291,7 @@ public static short leq(short s0, String name0, short s1, String name1) { public static short leq(short s0, String name0, short s1, int numCallsBelowRequirer) { if (!(s0 <= s1)) { - fail(name0 + " <= " + s1, ExceptionMessageUtil.valueAndName(s0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " <= " + s1, ExceptionMessageUtil.valueAndName(s0, name0), numCallsBelowRequirer + 1); } return s0; } @@ -1402,7 +1307,7 @@ public static short leq(short s0, String name0, short s1) { public static int leq(int i0, String name0, int i1, String name1, int numCallsBelowRequirer) { if (!(i0 <= i1)) { fail(name0 + " <= " + name1, ExceptionMessageUtil.valueAndName(i0, name0, i1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return i0; } @@ -1413,8 +1318,7 @@ public static int leq(int i0, String name0, int i1, String name1) { public static int leq(int i0, String name0, int i1, int numCallsBelowRequirer) { if (!(i0 <= i1)) { - fail(name0 + " <= " + i1, ExceptionMessageUtil.valueAndName(i0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " <= " + i1, ExceptionMessageUtil.valueAndName(i0, name0), numCallsBelowRequirer + 1); } return i0; } @@ -1427,11 +1331,10 @@ public static int leq(int i0, String name0, int i1) { /** * require (l0 <= l1) */ - public static long leq(long l0, String name0, long l1, String name1, - int numCallsBelowRequirer) { + public static long leq(long l0, String name0, long l1, String name1, int numCallsBelowRequirer) { if (!(l0 <= l1)) { fail(name0 + " <= " + name1, ExceptionMessageUtil.valueAndName(l0, name0, l1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return l0; } @@ -1442,8 +1345,7 @@ public static long leq(long l0, String name0, long l1, String name1) { public static long leq(long l0, String name0, long l1, int numCallsBelowRequirer) { if (!(l0 <= l1)) { - fail(name0 + " <= " + l1, ExceptionMessageUtil.valueAndName(l0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " <= " + l1, ExceptionMessageUtil.valueAndName(l0, name0), numCallsBelowRequirer + 1); } return l0; } @@ -1456,11 +1358,10 @@ public static long leq(long l0, String name0, long l1) { /** * require (f0 <= f1) */ - public static float leq(float f0, String name0, float f1, String name1, - int numCallsBelowRequirer) { + public static float leq(float f0, String name0, float f1, String name1, int numCallsBelowRequirer) { if (!(f0 <= f1)) { fail(name0 + " <= " + name1, ExceptionMessageUtil.valueAndName(f0, name0, f1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return f0; } @@ -1471,8 +1372,7 @@ public static float leq(float f0, String name0, float f1, String name1) { public static float leq(float f0, String name0, float f1, int numCallsBelowRequirer) { if (!(f0 <= f1)) { - fail(name0 + " <= " + f1, ExceptionMessageUtil.valueAndName(f0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " <= " + f1, ExceptionMessageUtil.valueAndName(f0, name0), numCallsBelowRequirer + 1); } return f0; } @@ -1485,11 +1385,10 @@ public static float leq(float f0, String name0, float f1) { /** * require (d0 <= d1) */ - public static double leq(double d0, String name0, double d1, String name1, - int numCallsBelowRequirer) { + public static double leq(double d0, String name0, double d1, String name1, int numCallsBelowRequirer) { if (!(d0 <= d1)) { fail(name0 + " <= " + name1, ExceptionMessageUtil.valueAndName(d0, name0, d1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return d0; } @@ -1500,8 +1399,7 @@ public static double leq(double d0, String name0, double d1, String name1) { public static double leq(double d0, String name0, double d1, int numCallsBelowRequirer) { if (!(d0 <= d1)) { - fail(name0 + " <= " + d1, ExceptionMessageUtil.valueAndName(d0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " <= " + d1, ExceptionMessageUtil.valueAndName(d0, name0), numCallsBelowRequirer + 1); } return d0; } @@ -1520,7 +1418,7 @@ public static double leq(double d0, String name0, double d1) { public static char gt(char c0, String name0, char c1, String name1, int numCallsBelowRequirer) { if (!(c0 > c1)) { fail(name0 + " > " + name1, ExceptionMessageUtil.valueAndName(c0, name0, c1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return c0; } @@ -1531,8 +1429,8 @@ public static char gt(char c0, String name0, char c1, String name1) { public static char gt(char c0, String name0, char c1, int numCallsBelowRequirer) { if (!(c0 > c1)) { - fail(name0 + " > " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0), numCallsBelowRequirer + 1); + fail(name0 + " > " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0), + numCallsBelowRequirer + 1); } return c0; } @@ -1548,7 +1446,7 @@ public static char gt(char c0, String name0, char c1) { public static byte gt(byte b0, String name0, byte b1, String name1, int numCallsBelowRequirer) { if (!(b0 > b1)) { fail(name0 + " > " + name1, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return b0; } @@ -1559,8 +1457,7 @@ public static byte gt(byte b0, String name0, byte b1, String name1) { public static byte gt(byte b0, String name0, byte b1, int numCallsBelowRequirer) { if (!(b0 > b1)) { - fail(name0 + " > " + b1, ExceptionMessageUtil.valueAndName(b0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " > " + b1, ExceptionMessageUtil.valueAndName(b0, name0), numCallsBelowRequirer + 1); } return b0; } @@ -1573,11 +1470,10 @@ public static byte gt(byte b0, String name0, byte b1) { /** * require (s0 > s1) */ - public static short gt(short s0, String name0, short s1, String name1, - int numCallsBelowRequirer) { + public static short gt(short s0, String name0, short s1, String name1, int numCallsBelowRequirer) { if (!(s0 > s1)) { fail(name0 + " > " + name1, ExceptionMessageUtil.valueAndName(s0, name0, s1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return s0; } @@ -1588,8 +1484,7 @@ public static short gt(short s0, String name0, short s1, String name1) { public static short gt(short s0, String name0, short s1, int numCallsBelowRequirer) { if (!(s0 > s1)) { - fail(name0 + " > " + s1, ExceptionMessageUtil.valueAndName(s0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " > " + s1, ExceptionMessageUtil.valueAndName(s0, name0), numCallsBelowRequirer + 1); } return s0; } @@ -1605,7 +1500,7 @@ public static short gt(short s0, String name0, short s1) { public static int gt(int i0, String name0, int i1, String name1, int numCallsBelowRequirer) { if (!(i0 > i1)) { fail(name0 + " > " + name1, ExceptionMessageUtil.valueAndName(i0, name0, i1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return i0; } @@ -1616,8 +1511,7 @@ public static int gt(int i0, String name0, int i1, String name1) { public static int gt(int i0, String name0, int i1, int numCallsBelowRequirer) { if (!(i0 > i1)) { - fail(name0 + " > " + i1, ExceptionMessageUtil.valueAndName(i0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " > " + i1, ExceptionMessageUtil.valueAndName(i0, name0), numCallsBelowRequirer + 1); } return i0; } @@ -1633,7 +1527,7 @@ public static int gt(int i0, String name0, int i1) { public static long gt(long l0, String name0, long l1, String name1, int numCallsBelowRequirer) { if (!(l0 > l1)) { fail(name0 + " > " + name1, ExceptionMessageUtil.valueAndName(l0, name0, l1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return l0; } @@ -1644,8 +1538,7 @@ public static long gt(long l0, String name0, long l1, String name1) { public static long gt(long l0, String name0, long l1, int numCallsBelowRequirer) { if (!(l0 > l1)) { - fail(name0 + " > " + l1, ExceptionMessageUtil.valueAndName(l0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " > " + l1, ExceptionMessageUtil.valueAndName(l0, name0), numCallsBelowRequirer + 1); } return l0; } @@ -1658,11 +1551,10 @@ public static long gt(long l0, String name0, long l1) { /** * require (f0 > f1) */ - public static float gt(float f0, String name0, float f1, String name1, - int numCallsBelowRequirer) { + public static float gt(float f0, String name0, float f1, String name1, int numCallsBelowRequirer) { if (!(f0 > f1)) { fail(name0 + " > " + name1, ExceptionMessageUtil.valueAndName(f0, name0, f1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return f0; } @@ -1673,8 +1565,7 @@ public static float gt(float f0, String name0, float f1, String name1) { public static float gt(float f0, String name0, float f1, int numCallsBelowRequirer) { if (!(f0 > f1)) { - fail(name0 + " > " + f1, ExceptionMessageUtil.valueAndName(f0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " > " + f1, ExceptionMessageUtil.valueAndName(f0, name0), numCallsBelowRequirer + 1); } return f0; } @@ -1687,11 +1578,10 @@ public static float gt(float f0, String name0, float f1) { /** * require (d0 > d1) */ - public static double gt(double d0, String name0, double d1, String name1, - int numCallsBelowRequirer) { + public static double gt(double d0, String name0, double d1, String name1, int numCallsBelowRequirer) { if (!(d0 > d1)) { fail(name0 + " > " + name1, ExceptionMessageUtil.valueAndName(d0, name0, d1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return d0; } @@ -1702,8 +1592,7 @@ public static double gt(double d0, String name0, double d1, String name1) { public static double gt(double d0, String name0, double d1, int numCallsBelowRequirer) { if (!(d0 > d1)) { - fail(name0 + " > " + d1, ExceptionMessageUtil.valueAndName(d0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " > " + d1, ExceptionMessageUtil.valueAndName(d0, name0), numCallsBelowRequirer + 1); } return d0; } @@ -1719,11 +1608,10 @@ public static double gt(double d0, String name0, double d1) { /** * require (c0 >= c1) */ - public static char geq(char c0, String name0, char c1, String name1, - int numCallsBelowRequirer) { + public static char geq(char c0, String name0, char c1, String name1, int numCallsBelowRequirer) { if (!(c0 >= c1)) { fail(name0 + " >= " + name1, ExceptionMessageUtil.valueAndName(c0, name0, c1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return c0; } @@ -1734,8 +1622,8 @@ public static char geq(char c0, String name0, char c1, String name1) { public static char geq(char c0, String name0, char c1, int numCallsBelowRequirer) { if (!(c0 >= c1)) { - fail(name0 + " >= " + ExceptionMessageUtil.valueString(c1), - ExceptionMessageUtil.valueAndName(c0, name0), numCallsBelowRequirer + 1); + fail(name0 + " >= " + ExceptionMessageUtil.valueString(c1), ExceptionMessageUtil.valueAndName(c0, name0), + numCallsBelowRequirer + 1); } return c0; } @@ -1748,11 +1636,10 @@ public static char geq(char c0, String name0, char c1) { /** * require (b0 >= b1) */ - public static byte geq(byte b0, String name0, byte b1, String name1, - int numCallsBelowRequirer) { + public static byte geq(byte b0, String name0, byte b1, String name1, int numCallsBelowRequirer) { if (!(b0 >= b1)) { fail(name0 + " >= " + name1, ExceptionMessageUtil.valueAndName(b0, name0, b1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return b0; } @@ -1763,8 +1650,7 @@ public static byte geq(byte b0, String name0, byte b1, String name1) { public static byte geq(byte b0, String name0, byte b1, int numCallsBelowRequirer) { if (!(b0 >= b1)) { - fail(name0 + " >= " + b1, ExceptionMessageUtil.valueAndName(b0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " >= " + b1, ExceptionMessageUtil.valueAndName(b0, name0), numCallsBelowRequirer + 1); } return b0; } @@ -1777,11 +1663,10 @@ public static byte geq(byte b0, String name0, byte b1) { /** * require (s0 >= s1) */ - public static short geq(short s0, String name0, short s1, String name1, - int numCallsBelowRequirer) { + public static short geq(short s0, String name0, short s1, String name1, int numCallsBelowRequirer) { if (!(s0 >= s1)) { fail(name0 + " >= " + name1, ExceptionMessageUtil.valueAndName(s0, name0, s1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return s0; } @@ -1792,8 +1677,7 @@ public static short geq(short s0, String name0, short s1, String name1) { public static short geq(short s0, String name0, short s1, int numCallsBelowRequirer) { if (!(s0 >= s1)) { - fail(name0 + " >= " + s1, ExceptionMessageUtil.valueAndName(s0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " >= " + s1, ExceptionMessageUtil.valueAndName(s0, name0), numCallsBelowRequirer + 1); } return s0; } @@ -1809,7 +1693,7 @@ public static short geq(short s0, String name0, short s1) { public static int geq(int i0, String name0, int i1, String name1, int numCallsBelowRequirer) { if (!(i0 >= i1)) { fail(name0 + " >= " + name1, ExceptionMessageUtil.valueAndName(i0, name0, i1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return i0; } @@ -1820,8 +1704,7 @@ public static int geq(int i0, String name0, int i1, String name1) { public static int geq(int i0, String name0, int i1, int numCallsBelowRequirer) { if (!(i0 >= i1)) { - fail(name0 + " >= " + i1, ExceptionMessageUtil.valueAndName(i0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " >= " + i1, ExceptionMessageUtil.valueAndName(i0, name0), numCallsBelowRequirer + 1); } return i0; } @@ -1834,11 +1717,10 @@ public static int geq(int i0, String name0, int i1) { /** * require (l0 >= l1) */ - public static long geq(long l0, String name0, long l1, String name1, - int numCallsBelowRequirer) { + public static long geq(long l0, String name0, long l1, String name1, int numCallsBelowRequirer) { if (!(l0 >= l1)) { fail(name0 + " >= " + name1, ExceptionMessageUtil.valueAndName(l0, name0, l1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return l0; } @@ -1849,8 +1731,7 @@ public static long geq(long l0, String name0, long l1, String name1) { public static long geq(long l0, String name0, long l1, int numCallsBelowRequirer) { if (!(l0 >= l1)) { - fail(name0 + " >= " + l1, ExceptionMessageUtil.valueAndName(l0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " >= " + l1, ExceptionMessageUtil.valueAndName(l0, name0), numCallsBelowRequirer + 1); } return l0; } @@ -1863,11 +1744,10 @@ public static long geq(long l0, String name0, long l1) { /** * require (f0 >= f1) */ - public static float geq(float f0, String name0, float f1, String name1, - int numCallsBelowRequirer) { + public static float geq(float f0, String name0, float f1, String name1, int numCallsBelowRequirer) { if (!(f0 >= f1)) { fail(name0 + " >= " + name1, ExceptionMessageUtil.valueAndName(f0, name0, f1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return f0; } @@ -1878,8 +1758,7 @@ public static float geq(float f0, String name0, float f1, String name1) { public static float geq(float f0, String name0, float f1, int numCallsBelowRequirer) { if (!(f0 >= f1)) { - fail(name0 + " >= " + f1, ExceptionMessageUtil.valueAndName(f0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " >= " + f1, ExceptionMessageUtil.valueAndName(f0, name0), numCallsBelowRequirer + 1); } return f0; } @@ -1892,11 +1771,10 @@ public static float geq(float f0, String name0, float f1) { /** * require (d0 >= d1) */ - public static double geq(double d0, String name0, double d1, String name1, - int numCallsBelowRequirer) { + public static double geq(double d0, String name0, double d1, String name1, int numCallsBelowRequirer) { if (!(d0 >= d1)) { fail(name0 + " >= " + name1, ExceptionMessageUtil.valueAndName(d0, name0, d1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } return d0; } @@ -1907,8 +1785,7 @@ public static double geq(double d0, String name0, double d1, String name1) { public static double geq(double d0, String name0, double d1, int numCallsBelowRequirer) { if (!(d0 >= d1)) { - fail(name0 + " >= " + d1, ExceptionMessageUtil.valueAndName(d0, name0), - numCallsBelowRequirer + 1); + fail(name0 + " >= " + d1, ExceptionMessageUtil.valueAndName(d0, name0), numCallsBelowRequirer + 1); } return d0; } @@ -1927,8 +1804,7 @@ public static double geq(double d0, String name0, double d1) { */ public static void eqFalse(boolean b, String name, int numCallsBelowRequirer) { if (!(false == b)) { - fail(name + " == false", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " == false", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } } @@ -1942,8 +1818,7 @@ public static void eqFalse(boolean b, String name) { */ public static void neqFalse(boolean b, String name, int numCallsBelowRequirer) { if (!(false != b)) { - fail(name + " != false", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " != false", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } } @@ -1957,8 +1832,7 @@ public static void neqFalse(boolean b, String name) { */ public static void eqTrue(boolean b, String name, int numCallsBelowRequirer) { if (!(true == b)) { - fail(name + " == true", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " == true", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } } @@ -1972,8 +1846,7 @@ public static void eqTrue(boolean b, String name) { */ public static void neqTrue(boolean b, String name, int numCallsBelowRequirer) { if (!(true != b)) { - fail(name + " != true", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " != true", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } } @@ -1990,8 +1863,7 @@ public static void neqTrue(boolean b, String name) { */ public static void eqZero(char c, String name, int numCallsBelowRequirer) { if (!(0 == c)) { - fail(name + " == 0", ExceptionMessageUtil.valueAndName(c, name), - numCallsBelowRequirer + 1); + fail(name + " == 0", ExceptionMessageUtil.valueAndName(c, name), numCallsBelowRequirer + 1); } } @@ -2005,8 +1877,7 @@ public static void eqZero(char c, String name) { */ public static void eqZero(byte b, String name, int numCallsBelowRequirer) { if (!(0 == b)) { - fail(name + " == 0", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " == 0", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } } @@ -2020,8 +1891,7 @@ public static void eqZero(byte b, String name) { */ public static void eqZero(short s, String name, int numCallsBelowRequirer) { if (!(0 == s)) { - fail(name + " == 0", ExceptionMessageUtil.valueAndName(s, name), - numCallsBelowRequirer + 1); + fail(name + " == 0", ExceptionMessageUtil.valueAndName(s, name), numCallsBelowRequirer + 1); } } @@ -2035,8 +1905,7 @@ public static void eqZero(short s, String name) { */ public static void eqZero(int i, String name, int numCallsBelowRequirer) { if (!(0 == i)) { - fail(name + " == 0", ExceptionMessageUtil.valueAndName(i, name), - numCallsBelowRequirer + 1); + fail(name + " == 0", ExceptionMessageUtil.valueAndName(i, name), numCallsBelowRequirer + 1); } } @@ -2050,8 +1919,7 @@ public static void eqZero(int i, String name) { */ public static void eqZero(long l, String name, int numCallsBelowRequirer) { if (!(0 == l)) { - fail(name + " == 0", ExceptionMessageUtil.valueAndName(l, name), - numCallsBelowRequirer + 1); + fail(name + " == 0", ExceptionMessageUtil.valueAndName(l, name), numCallsBelowRequirer + 1); } } @@ -2065,8 +1933,7 @@ public static void eqZero(long l, String name) { */ public static void eqZero(float f, String name, int numCallsBelowRequirer) { if (!(0 == f)) { - fail(name + " == 0", ExceptionMessageUtil.valueAndName(f, name), - numCallsBelowRequirer + 1); + fail(name + " == 0", ExceptionMessageUtil.valueAndName(f, name), numCallsBelowRequirer + 1); } } @@ -2080,8 +1947,7 @@ public static void eqZero(float f, String name) { */ public static void eqZero(double d, String name, int numCallsBelowRequirer) { if (!(0 == d)) { - fail(name + " == 0", ExceptionMessageUtil.valueAndName(d, name), - numCallsBelowRequirer + 1); + fail(name + " == 0", ExceptionMessageUtil.valueAndName(d, name), numCallsBelowRequirer + 1); } } @@ -2098,8 +1964,7 @@ public static void eqZero(double d, String name) { */ public static char neqZero(char c, String name, int numCallsBelowRequirer) { if (!(0 != c)) { - fail(name + " != 0", ExceptionMessageUtil.valueAndName(c, name), - numCallsBelowRequirer + 1); + fail(name + " != 0", ExceptionMessageUtil.valueAndName(c, name), numCallsBelowRequirer + 1); } return c; } @@ -2114,8 +1979,7 @@ public static char neqZero(char c, String name) { */ public static byte neqZero(byte b, String name, int numCallsBelowRequirer) { if (!(0 != b)) { - fail(name + " != 0", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " != 0", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } return b; } @@ -2130,8 +1994,7 @@ public static byte neqZero(byte b, String name) { */ public static short neqZero(short s, String name, int numCallsBelowRequirer) { if (!(0 != s)) { - fail(name + " != 0", ExceptionMessageUtil.valueAndName(s, name), - numCallsBelowRequirer + 1); + fail(name + " != 0", ExceptionMessageUtil.valueAndName(s, name), numCallsBelowRequirer + 1); } return s; } @@ -2146,8 +2009,7 @@ public static short neqZero(short s, String name) { */ public static int neqZero(int i, String name, int numCallsBelowRequirer) { if (!(0 != i)) { - fail(name + " != 0", ExceptionMessageUtil.valueAndName(i, name), - numCallsBelowRequirer + 1); + fail(name + " != 0", ExceptionMessageUtil.valueAndName(i, name), numCallsBelowRequirer + 1); } return i; } @@ -2162,8 +2024,7 @@ public static int neqZero(int i, String name) { */ public static long neqZero(long l, String name, int numCallsBelowRequirer) { if (!(0 != l)) { - fail(name + " != 0", ExceptionMessageUtil.valueAndName(l, name), - numCallsBelowRequirer + 1); + fail(name + " != 0", ExceptionMessageUtil.valueAndName(l, name), numCallsBelowRequirer + 1); } return l; } @@ -2178,8 +2039,7 @@ public static long neqZero(long l, String name) { */ public static float neqZero(float f, String name, int numCallsBelowRequirer) { if (!(0 != f)) { - fail(name + " != 0", ExceptionMessageUtil.valueAndName(f, name), - numCallsBelowRequirer + 1); + fail(name + " != 0", ExceptionMessageUtil.valueAndName(f, name), numCallsBelowRequirer + 1); } return f; } @@ -2194,8 +2054,7 @@ public static float neqZero(float f, String name) { */ public static double neqZero(double d, String name, int numCallsBelowRequirer) { if (!(0 != d)) { - fail(name + " != 0", ExceptionMessageUtil.valueAndName(d, name), - numCallsBelowRequirer + 1); + fail(name + " != 0", ExceptionMessageUtil.valueAndName(d, name), numCallsBelowRequirer + 1); } return d; } @@ -2213,8 +2072,7 @@ public static double neqZero(double d, String name) { */ public static byte ltZero(byte b, String name, int numCallsBelowRequirer) { if (!(b < 0)) { - fail(name + " < 0", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " < 0", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } return b; } @@ -2229,8 +2087,7 @@ public static byte ltZero(byte b, String name) { */ public static short ltZero(short s, String name, int numCallsBelowRequirer) { if (!(s < 0)) { - fail(name + " < 0", ExceptionMessageUtil.valueAndName(s, name), - numCallsBelowRequirer + 1); + fail(name + " < 0", ExceptionMessageUtil.valueAndName(s, name), numCallsBelowRequirer + 1); } return s; } @@ -2245,8 +2102,7 @@ public static short ltZero(short s, String name) { */ public static int ltZero(int i, String name, int numCallsBelowRequirer) { if (!(i < 0)) { - fail(name + " < 0", ExceptionMessageUtil.valueAndName(i, name), - numCallsBelowRequirer + 1); + fail(name + " < 0", ExceptionMessageUtil.valueAndName(i, name), numCallsBelowRequirer + 1); } return i; } @@ -2261,8 +2117,7 @@ public static int ltZero(int i, String name) { */ public static long ltZero(long l, String name, int numCallsBelowRequirer) { if (!(l < 0)) { - fail(name + " < 0", ExceptionMessageUtil.valueAndName(l, name), - numCallsBelowRequirer + 1); + fail(name + " < 0", ExceptionMessageUtil.valueAndName(l, name), numCallsBelowRequirer + 1); } return l; } @@ -2277,8 +2132,7 @@ public static long ltZero(long l, String name) { */ public static float ltZero(float f, String name, int numCallsBelowRequirer) { if (!(f < 0)) { - fail(name + " < 0", ExceptionMessageUtil.valueAndName(f, name), - numCallsBelowRequirer + 1); + fail(name + " < 0", ExceptionMessageUtil.valueAndName(f, name), numCallsBelowRequirer + 1); } return f; } @@ -2293,8 +2147,7 @@ public static float ltZero(float f, String name) { */ public static double ltZero(double d, String name, int numCallsBelowRequirer) { if (!(d < 0)) { - fail(name + " < 0", ExceptionMessageUtil.valueAndName(d, name), - numCallsBelowRequirer + 1); + fail(name + " < 0", ExceptionMessageUtil.valueAndName(d, name), numCallsBelowRequirer + 1); } return d; } @@ -2312,8 +2165,7 @@ public static double ltZero(double d, String name) { */ public static byte leqZero(byte b, String name, int numCallsBelowRequirer) { if (!(b <= 0)) { - fail(name + " <= 0", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " <= 0", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } return b; } @@ -2328,8 +2180,7 @@ public static byte leqZero(byte b, String name) { */ public static short leqZero(short s, String name, int numCallsBelowRequirer) { if (!(s <= 0)) { - fail(name + " <= 0", ExceptionMessageUtil.valueAndName(s, name), - numCallsBelowRequirer + 1); + fail(name + " <= 0", ExceptionMessageUtil.valueAndName(s, name), numCallsBelowRequirer + 1); } return s; } @@ -2344,8 +2195,7 @@ public static short leqZero(short s, String name) { */ public static int leqZero(int i, String name, int numCallsBelowRequirer) { if (!(i <= 0)) { - fail(name + " <= 0", ExceptionMessageUtil.valueAndName(i, name), - numCallsBelowRequirer + 1); + fail(name + " <= 0", ExceptionMessageUtil.valueAndName(i, name), numCallsBelowRequirer + 1); } return i; } @@ -2360,8 +2210,7 @@ public static int leqZero(int i, String name) { */ public static long leqZero(long l, String name, int numCallsBelowRequirer) { if (!(l <= 0)) { - fail(name + " <= 0", ExceptionMessageUtil.valueAndName(l, name), - numCallsBelowRequirer + 1); + fail(name + " <= 0", ExceptionMessageUtil.valueAndName(l, name), numCallsBelowRequirer + 1); } return l; } @@ -2376,8 +2225,7 @@ public static long leqZero(long l, String name) { */ public static float leqZero(float f, String name, int numCallsBelowRequirer) { if (!(f <= 0)) { - fail(name + " <= 0", ExceptionMessageUtil.valueAndName(f, name), - numCallsBelowRequirer + 1); + fail(name + " <= 0", ExceptionMessageUtil.valueAndName(f, name), numCallsBelowRequirer + 1); } return f; } @@ -2392,8 +2240,7 @@ public static float leqZero(float f, String name) { */ public static double leqZero(double d, String name, int numCallsBelowRequirer) { if (!(d <= 0)) { - fail(name + " <= 0", ExceptionMessageUtil.valueAndName(d, name), - numCallsBelowRequirer + 1); + fail(name + " <= 0", ExceptionMessageUtil.valueAndName(d, name), numCallsBelowRequirer + 1); } return d; } @@ -2411,8 +2258,7 @@ public static double leqZero(double d, String name) { */ public static byte gtZero(byte b, String name, int numCallsBelowRequirer) { if (!(b > 0)) { - fail(name + " > 0", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " > 0", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } return b; } @@ -2427,8 +2273,7 @@ public static byte gtZero(byte b, String name) { */ public static short gtZero(short s, String name, int numCallsBelowRequirer) { if (!(s > 0)) { - fail(name + " > 0", ExceptionMessageUtil.valueAndName(s, name), - numCallsBelowRequirer + 1); + fail(name + " > 0", ExceptionMessageUtil.valueAndName(s, name), numCallsBelowRequirer + 1); } return s; } @@ -2443,8 +2288,7 @@ public static short gtZero(short s, String name) { */ public static int gtZero(int i, String name, int numCallsBelowRequirer) { if (!(i > 0)) { - fail(name + " > 0", ExceptionMessageUtil.valueAndName(i, name), - numCallsBelowRequirer + 1); + fail(name + " > 0", ExceptionMessageUtil.valueAndName(i, name), numCallsBelowRequirer + 1); } return i; } @@ -2459,8 +2303,7 @@ public static int gtZero(int i, String name) { */ public static long gtZero(long l, String name, int numCallsBelowRequirer) { if (!(l > 0)) { - fail(name + " > 0", ExceptionMessageUtil.valueAndName(l, name), - numCallsBelowRequirer + 1); + fail(name + " > 0", ExceptionMessageUtil.valueAndName(l, name), numCallsBelowRequirer + 1); } return l; } @@ -2475,8 +2318,7 @@ public static long gtZero(long l, String name) { */ public static float gtZero(float f, String name, int numCallsBelowRequirer) { if (!(f > 0)) { - fail(name + " > 0", ExceptionMessageUtil.valueAndName(f, name), - numCallsBelowRequirer + 1); + fail(name + " > 0", ExceptionMessageUtil.valueAndName(f, name), numCallsBelowRequirer + 1); } return f; } @@ -2491,8 +2333,7 @@ public static float gtZero(float f, String name) { */ public static double gtZero(double d, String name, int numCallsBelowRequirer) { if (!(d > 0)) { - fail(name + " > 0", ExceptionMessageUtil.valueAndName(d, name), - numCallsBelowRequirer + 1); + fail(name + " > 0", ExceptionMessageUtil.valueAndName(d, name), numCallsBelowRequirer + 1); } return d; } @@ -2510,8 +2351,7 @@ public static double gtZero(double d, String name) { */ public static byte geqZero(byte b, String name, int numCallsBelowRequirer) { if (!(b >= 0)) { - fail(name + " >= 0", ExceptionMessageUtil.valueAndName(b, name), - numCallsBelowRequirer + 1); + fail(name + " >= 0", ExceptionMessageUtil.valueAndName(b, name), numCallsBelowRequirer + 1); } return b; } @@ -2526,8 +2366,7 @@ public static byte geqZero(byte b, String name) { */ public static short geqZero(short s, String name, int numCallsBelowRequirer) { if (!(s >= 0)) { - fail(name + " >= 0", ExceptionMessageUtil.valueAndName(s, name), - numCallsBelowRequirer + 1); + fail(name + " >= 0", ExceptionMessageUtil.valueAndName(s, name), numCallsBelowRequirer + 1); } return s; } @@ -2542,8 +2381,7 @@ public static short geqZero(short s, String name) { */ public static int geqZero(int i, String name, int numCallsBelowRequirer) { if (!(i >= 0)) { - fail(name + " >= 0", ExceptionMessageUtil.valueAndName(i, name), - numCallsBelowRequirer + 1); + fail(name + " >= 0", ExceptionMessageUtil.valueAndName(i, name), numCallsBelowRequirer + 1); } return i; } @@ -2558,8 +2396,7 @@ public static int geqZero(int i, String name) { */ public static long geqZero(long l, String name, int numCallsBelowRequirer) { if (!(l >= 0)) { - fail(name + " >= 0", ExceptionMessageUtil.valueAndName(l, name), - numCallsBelowRequirer + 1); + fail(name + " >= 0", ExceptionMessageUtil.valueAndName(l, name), numCallsBelowRequirer + 1); } return l; } @@ -2574,8 +2411,7 @@ public static long geqZero(long l, String name) { */ public static float geqZero(float f, String name, int numCallsBelowRequirer) { if (!(f >= 0)) { - fail(name + " >= 0", ExceptionMessageUtil.valueAndName(f, name), - numCallsBelowRequirer + 1); + fail(name + " >= 0", ExceptionMessageUtil.valueAndName(f, name), numCallsBelowRequirer + 1); } return f; } @@ -2590,8 +2426,7 @@ public static float geqZero(float f, String name) { */ public static double geqZero(double d, String name, int numCallsBelowRequirer) { if (!(d >= 0)) { - fail(name + " >= 0", ExceptionMessageUtil.valueAndName(d, name), - numCallsBelowRequirer + 1); + fail(name + " >= 0", ExceptionMessageUtil.valueAndName(d, name), numCallsBelowRequirer + 1); } return d; } @@ -2608,11 +2443,10 @@ public static double geqZero(double d, String name) { /** * require (o0 == o1) */ - public static void eq(Object o0, String name0, Object o1, String name1, - int numCallsBelowRequirer) { + public static void eq(Object o0, String name0, Object o1, String name1, int numCallsBelowRequirer) { if (!(o0 == o1)) { fail(name0 + " == " + name1, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -2622,8 +2456,8 @@ public static void eq(Object o0, String name0, Object o1, String name1) { public static void eq(Object o0, String name0, Object o1, int numCallsBelowRequirer) { if (!(o0 == o1)) { - fail(name0 + " == " + ExceptionMessageUtil.valueString(o1), - ExceptionMessageUtil.valueAndName(o0, name0), numCallsBelowRequirer + 1); + fail(name0 + " == " + ExceptionMessageUtil.valueString(o1), ExceptionMessageUtil.valueAndName(o0, name0), + numCallsBelowRequirer + 1); } } @@ -2635,11 +2469,10 @@ public static void eq(Object o0, String name0, Object o1) { /** * require (o0 != o1) */ - public static void neq(Object o0, String name0, Object o1, String name1, - int numCallsBelowRequirer) { + public static void neq(Object o0, String name0, Object o1, String name1, int numCallsBelowRequirer) { if (!(o0 != o1)) { fail(name0 + " != " + name1, ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } } @@ -2649,8 +2482,8 @@ public static void neq(Object o0, String name0, Object o1, String name1) { public static void neq(Object o0, String name0, Object o1, int numCallsBelowRequirer) { if (!(o0 != o1)) { - fail(name0 + " != " + ExceptionMessageUtil.valueString(o1), - ExceptionMessageUtil.valueAndName(o0, name0), numCallsBelowRequirer + 1); + fail(name0 + " != " + ExceptionMessageUtil.valueString(o1), ExceptionMessageUtil.valueAndName(o0, name0), + numCallsBelowRequirer + 1); } } @@ -2668,8 +2501,7 @@ public static void neq(Object o0, String name0, Object o1) { */ public static void eqNull(Object o, String name, int numCallsBelowRequirer) { if (!(null == o)) { - fail(name + " == null", ExceptionMessageUtil.valueAndName(o, name), - numCallsBelowRequirer + 1); + fail(name + " == null", ExceptionMessageUtil.valueAndName(o, name), numCallsBelowRequirer + 1); } } @@ -2684,8 +2516,7 @@ public static void eqNull(Object o, String name) { @NotNull public static T neqNull(T o, String name, int numCallsBelowRequirer) { if (!(null != o)) { - fail(name + " != null", ExceptionMessageUtil.valueAndName(o, name), - numCallsBelowRequirer + 1); + fail(name + " != null", ExceptionMessageUtil.valueAndName(o, name), numCallsBelowRequirer + 1); } return o; } @@ -2701,8 +2532,7 @@ public static T neqNull(T o, String name) { */ public static double neqNaN(double o, String name, int numCallsBelowRequirer) { if (Double.isNaN(o)) { - fail(name + " != NaN", ExceptionMessageUtil.valueAndName(o, name), - numCallsBelowRequirer + 1); + fail(name + " != NaN", ExceptionMessageUtil.valueAndName(o, name), numCallsBelowRequirer + 1); } return o; } @@ -2717,8 +2547,7 @@ public static double neqNaN(double o, String name) { */ public static double neqInf(double o, String name, int numCallsBelowRequirer) { if (Double.isInfinite(o)) { - fail(name + " != +/-Inf", ExceptionMessageUtil.valueAndName(o, name), - numCallsBelowRequirer + 1); + fail(name + " != +/-Inf", ExceptionMessageUtil.valueAndName(o, name), numCallsBelowRequirer + 1); } return o; } @@ -2734,13 +2563,12 @@ public static double neqInf(double o, String name) { /** * require (o0 != null && o1 != null && o0.equals(o1)) */ - public static void equals(Object o0, String name0, Object o1, String name1, - int numCallsBelowRequirer) { + public static void equals(Object o0, String name0, Object o1, String name1, int numCallsBelowRequirer) { neqNull(o0, name0, numCallsBelowRequirer + 1); neqNull(o1, name1, numCallsBelowRequirer + 1); if (!(o0.equals(o1))) { - fail(name0 + ".equals(" + name1 + ")", - ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), numCallsBelowRequirer + 1); + fail(name0 + ".equals(" + name1 + ")", ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), + numCallsBelowRequirer + 1); } } @@ -2759,7 +2587,7 @@ public static void equals(Object o0, String name0, Object o1, int numCallsBelowR neqNull(o1, "o1", numCallsBelowRequirer + 1); if (!(o0.equals(o1))) { fail(name0 + ".equals(" + ExceptionMessageUtil.valueString(o1) + ")", - ExceptionMessageUtil.valueAndName(o0, name0), numCallsBelowRequirer + 1); + ExceptionMessageUtil.valueAndName(o0, name0), numCallsBelowRequirer + 1); } } @@ -2774,13 +2602,12 @@ public static void equals(Object o0, String name0, Object o1) { /** * require (o0 != null && o1 != null && !o0.equals(o1)) */ - public static void notEquals(Object o0, String name0, Object o1, String name1, - int numCallsBelowRequirer) { + public static void notEquals(Object o0, String name0, Object o1, String name1, int numCallsBelowRequirer) { neqNull(o0, name0, numCallsBelowRequirer + 1); neqNull(o1, name1, numCallsBelowRequirer + 1); if (o0.equals(o1)) { - fail("!" + name0 + ".equals(" + name1 + ")", - ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), numCallsBelowRequirer + 1); + fail("!" + name0 + ".equals(" + name1 + ")", ExceptionMessageUtil.valueAndName(o0, name0, o1, name1), + numCallsBelowRequirer + 1); } } @@ -2799,7 +2626,7 @@ public static void notEquals(Object o0, String name0, Object o1, int numCallsBel neqNull(o1, "o1", numCallsBelowRequirer + 1); if (o0.equals(o1)) { fail("!" + name0 + ".equals(" + ExceptionMessageUtil.valueString(o1) + ")", - ExceptionMessageUtil.valueAndName(o0, name0), numCallsBelowRequirer + 1); + ExceptionMessageUtil.valueAndName(o0, name0), numCallsBelowRequirer + 1); } } @@ -2820,8 +2647,7 @@ public static void notEquals(Object o0, String name0, Object o1) { public static String nonempty(String s, String name, int numCallsBelowRequirer) { neqNull(s, name, numCallsBelowRequirer + 1); if (!(s.length() > 0)) { - fail(name + ".length() > 0", ExceptionMessageUtil.valueAndName(s, name), - numCallsBelowRequirer + 1); + fail(name + ".length() > 0", ExceptionMessageUtil.valueAndName(s, name), numCallsBelowRequirer + 1); } return s; } @@ -2834,42 +2660,42 @@ public static String nonempty(String s, String name) { // ---------------------------------------------------------------- /** require (collection != null && collection.contains(element)) */ - public static , T> C contains(C collection, String collectionName, - T element, String elementName, int numCallsBelowRequirer) { + public static , T> C contains(C collection, String collectionName, T element, + String elementName, int numCallsBelowRequirer) { neqNull(collection, collectionName, numCallsBelowRequirer + 1); if (!(collection.contains(element))) { fail(collectionName + ".contains(" + elementName + ")", - ExceptionMessageUtil.valueAndName(element, elementName), numCallsBelowRequirer + 1); + ExceptionMessageUtil.valueAndName(element, elementName), numCallsBelowRequirer + 1); } return collection; } - public static , T> C contains(C collection, String collectionName, - T element, String elementName) { + public static , T> C contains(C collection, String collectionName, T element, + String elementName) { return contains(collection, collectionName, element, elementName, 1); } // ---------------------------------------------------------------- /** require (collection != null && !collection.contains(element)) */ - public static , T> C notContains(C collection, String collectionName, - T element, String elementName, int numCallsBelowRequirer) { + public static , T> C notContains(C collection, String collectionName, T element, + String elementName, int numCallsBelowRequirer) { neqNull(collection, collectionName, numCallsBelowRequirer + 1); if (collection.contains(element)) { fail("!" + collectionName + ".contains(" + elementName + ")", - ExceptionMessageUtil.valueAndName(element, elementName), numCallsBelowRequirer + 1); + ExceptionMessageUtil.valueAndName(element, elementName), numCallsBelowRequirer + 1); } return collection; } - public static , T> C notContains(C collection, String collectionName, - T element, String elementName) { + public static , T> C notContains(C collection, String collectionName, T element, + String elementName) { return notContains(collection, collectionName, element, elementName, 1); } // ---------------------------------------------------------------- /** require (collection != null && !collection.stream().anyMatch(Objects::isNull) */ - public static , T> C notContainsNull(C collection, - String collectionName, int numCallsBelowRequirer) { + public static , T> C notContainsNull(C collection, String collectionName, + int numCallsBelowRequirer) { neqNull(collection, collectionName, numCallsBelowRequirer + 1); if (collection.stream().anyMatch(Objects::isNull)) { fail(collectionName + " does not contain null", numCallsBelowRequirer + 1); @@ -2877,56 +2703,52 @@ public static , T> C notContainsNull(C collection, return collection; } - public static , T> C notContainsNull(C collection, - String collectionName) { + public static , T> C notContainsNull(C collection, String collectionName) { return notContainsNull(collection, collectionName, 1); } // ---------------------------------------------------------------- /** require (map != null && map.containsKey(key)) */ - public static , K, V> M containsKey(M map, String mapName, K key, - String keyName, int numCallsBelowRequirer) { + public static , K, V> M containsKey(M map, String mapName, K key, String keyName, + int numCallsBelowRequirer) { neqNull(map, mapName, numCallsBelowRequirer + 1); if (!(map.containsKey(key))) { - fail(mapName + ".containsKey(" + keyName + ")", - ExceptionMessageUtil.valueAndName(key, keyName), numCallsBelowRequirer + 1); + fail(mapName + ".containsKey(" + keyName + ")", ExceptionMessageUtil.valueAndName(key, keyName), + numCallsBelowRequirer + 1); } return map; } - public static , K, V> M containsKey(M map, String mapName, K key, - String keyName) { + public static , K, V> M containsKey(M map, String mapName, K key, String keyName) { return containsKey(map, mapName, key, keyName, 1); } // ---------------------------------------------------------------- /** require (map != null && !map.containsKey(element)) */ - public static , K, V> M notContainsKey(M map, String mapName, K key, - String keyName, int numCallsBelowRequirer) { + public static , K, V> M notContainsKey(M map, String mapName, K key, String keyName, + int numCallsBelowRequirer) { neqNull(map, mapName, numCallsBelowRequirer + 1); if (map.containsKey(key)) { - fail("!" + mapName + ".containsKey(" + keyName + ")", - ExceptionMessageUtil.valueAndName(key, keyName), numCallsBelowRequirer + 1); + fail("!" + mapName + ".containsKey(" + keyName + ")", ExceptionMessageUtil.valueAndName(key, keyName), + numCallsBelowRequirer + 1); } return map; } - public static , K, V> M notContainsKey(M map, String mapName, K key, - String keyName) { + public static , K, V> M notContainsKey(M map, String mapName, K key, String keyName) { return notContainsKey(map, mapName, key, keyName, 1); } // ---------------------------------------------------------------- /** require (offset >= 0 && offset < length) */ - public static int inRange(int offset, String offsetName, int length, String lengthName, - int numCallsBelowRequirer) { + public static int inRange(int offset, String offsetName, int length, String lengthName, int numCallsBelowRequirer) { if (!(offset >= 0)) { fail(offsetName + " >= 0", ExceptionMessageUtil.valueAndName(offset, offsetName), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } else if (!(offset < length)) { fail(offsetName + " < " + lengthName, - ExceptionMessageUtil.valueAndName(offset, offsetName, length, lengthName), - numCallsBelowRequirer + 1); + ExceptionMessageUtil.valueAndName(offset, offsetName, length, lengthName), + numCallsBelowRequirer + 1); } return offset; } @@ -2938,37 +2760,34 @@ public static int inRange(int offset, String offsetName, int length, String leng // ---------------------------------------------------------------- /** require (offset >= start && offset < end) */ - public static int inRange(int offset, String offsetName, int start, String startName, int end, - String endName, int numCallsBelowRequirer) { + public static int inRange(int offset, String offsetName, int start, String startName, int end, String endName, + int numCallsBelowRequirer) { if (!(offset >= start)) { fail(offsetName + " >= " + startName, - ExceptionMessageUtil.valueAndName(offset, offsetName, start, startName), - numCallsBelowRequirer + 1); + ExceptionMessageUtil.valueAndName(offset, offsetName, start, startName), numCallsBelowRequirer + 1); } else if (!(offset < end)) { - fail(offsetName + " < " + endName, - ExceptionMessageUtil.valueAndName(offset, offsetName, end, endName), - numCallsBelowRequirer + 1); + fail(offsetName + " < " + endName, ExceptionMessageUtil.valueAndName(offset, offsetName, end, endName), + numCallsBelowRequirer + 1); } return offset; } /** require (offset >= start && offset < end) */ - public static int inRange(int offset, String offsetName, int start, String startName, int end, - String endName) { + public static int inRange(int offset, String offsetName, int start, String startName, int end, String endName) { return inRange(offset, offsetName, start, startName, end, endName, 1); } // ---------------------------------------------------------------- /** require (offset >= 0 && offset < length) */ public static long inRange(long offset, String offsetName, long length, String lengthName, - int numCallsBelowRequirer) { + int numCallsBelowRequirer) { if (!(offset >= 0L)) { fail(offsetName + " >= 0L", ExceptionMessageUtil.valueAndName(offset, offsetName), - numCallsBelowRequirer + 1); + numCallsBelowRequirer + 1); } else if (!(offset < length)) { fail(offsetName + " < " + lengthName, - ExceptionMessageUtil.valueAndName(offset, offsetName, length, lengthName), - numCallsBelowRequirer + 1); + ExceptionMessageUtil.valueAndName(offset, offsetName, length, lengthName), + numCallsBelowRequirer + 1); } return offset; } @@ -2980,23 +2799,20 @@ public static long inRange(long offset, String offsetName, long length, String l // ---------------------------------------------------------------- /** require (offset >= start && offset < end) */ - public static long inRange(long offset, String offsetName, long start, String startName, - long end, String endName, int numCallsBelowRequirer) { + public static long inRange(long offset, String offsetName, long start, String startName, long end, String endName, + int numCallsBelowRequirer) { if (!(offset >= start)) { fail(offsetName + " >= " + startName, - ExceptionMessageUtil.valueAndName(offset, offsetName, start, startName), - numCallsBelowRequirer + 1); + ExceptionMessageUtil.valueAndName(offset, offsetName, start, startName), numCallsBelowRequirer + 1); } else if (!(offset < end)) { - fail(offsetName + " < " + endName, - ExceptionMessageUtil.valueAndName(offset, offsetName, end, endName), - numCallsBelowRequirer + 1); + fail(offsetName + " < " + endName, ExceptionMessageUtil.valueAndName(offset, offsetName, end, endName), + numCallsBelowRequirer + 1); } return offset; } /** require (offset >= start && offset < end) */ - public static long inRange(long offset, String offsetName, long start, String startName, - long end, String endName) { + public static long inRange(long offset, String offsetName, long start, String startName, long end, String endName) { return inRange(offset, offsetName, start, startName, end, endName, 1); } @@ -3005,8 +2821,8 @@ public static long inRange(long offset, String offsetName, long start, String st /** require d != {Infinity, -Infinity, NaN}. */ public static double normalized(double d, String name, int numCallsBelowRequirer) { if (!(!Double.isNaN(d) && !Double.isInfinite(d))) { - fail(name + " is normalized (not infinity or NaN)", - ExceptionMessageUtil.valueAndName(d, name), numCallsBelowRequirer + 1); + fail(name + " is normalized (not infinity or NaN)", ExceptionMessageUtil.valueAndName(d, name), + numCallsBelowRequirer + 1); } return d; } @@ -3016,12 +2832,10 @@ public static double normalized(double d, String name) { return normalized(d, name, 1); } - public static T[] nonEmpty(final T[] a, final String name, - final int numCallsBelowRequirer) { + public static T[] nonEmpty(final T[] a, final String name, final int numCallsBelowRequirer) { neqNull(a, name, numCallsBelowRequirer + 1); if (!(a.length > 0)) { - fail(name + ".length > 0", ExceptionMessageUtil.valueAndName(a, name), - numCallsBelowRequirer + 1); + fail(name + ".length > 0", ExceptionMessageUtil.valueAndName(a, name), numCallsBelowRequirer + 1); } return a; } @@ -3031,10 +2845,9 @@ public static T[] nonEmpty(final T[] a, final String name) { } public static int[] lengthEqual(final int[] a, final String name, final int length, - final int numCallsBelowRequirer) { + final int numCallsBelowRequirer) { if (!(a.length == length)) { - fail(name + ".length == " + length, ExceptionMessageUtil.valueAndName(a, name), - numCallsBelowRequirer + 1); + fail(name + ".length == " + length, ExceptionMessageUtil.valueAndName(a, name), numCallsBelowRequirer + 1); } return a; } @@ -3102,8 +2915,7 @@ public static void isSquare(double[][] m, String name) { } // ---------------------------------------------------------------- - public static double inRange(double trialValue, double endPointA, double endPointB, - String name) { + public static double inRange(double trialValue, double endPointA, double endPointB, String name) { double minRange = endPointA; double maxRange = endPointB; if (endPointA > endPointB) { @@ -3111,8 +2923,8 @@ public static double inRange(double trialValue, double endPointA, double endPoin maxRange = endPointA; } if (trialValue < minRange || maxRange < trialValue) { - fail(name + " = " + trialValue + " is expected to be in the range of [" + minRange + "," - + maxRange + "] but was not", 1); + fail(name + " = " + trialValue + " is expected to be in the range of [" + minRange + "," + maxRange + + "] but was not", 1); } return trialValue; } @@ -3126,8 +2938,8 @@ public static float inRange(float trialValue, float endPointA, float endPointB, maxRange = endPointA; } if (trialValue < minRange || maxRange < trialValue) { - fail(name + " = " + trialValue + " is expected to be in the range of [" + minRange + "," - + maxRange + "] but was not", 1); + fail(name + " = " + trialValue + " is expected to be in the range of [" + minRange + "," + maxRange + + "] but was not", 1); } return trialValue; } diff --git a/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java b/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java index 671e6551c53..3d2999b618d 100644 --- a/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java +++ b/Base/src/main/java/io/deephaven/base/verify/RequirementFailure.java @@ -15,8 +15,8 @@ public class RequirementFailure extends RuntimeException { /** - * The number of stack frames that should be removed from the stack to find the method whose - * requirements did not hold. + * The number of stack frames that should be removed from the stack to find the method whose requirements did not + * hold. */ private int m_nCallsBelowRequirer; @@ -34,8 +34,8 @@ public RequirementFailure(String message, Exception caughtException, int nCallsB // ---------------------------------------------------------------- /** - * Gets the number of stack frames that should be removed from the stack to find the caller - * which failed to meet requirements. + * Gets the number of stack frames that should be removed from the stack to find the caller which failed to meet + * requirements. */ public int getNumCallsBelowRequirer() { return m_nCallsBelowRequirer; @@ -91,8 +91,8 @@ public StringBuffer getOriginalStackTrace() { // ---------------------------------------------------------------- /** - * If this stack frame caused the exception, adjust the culprit to be the caller. Used when a - * delegating method can't verify all requirements itself but shouldn't receive the blame. + * If this stack frame caused the exception, adjust the culprit to be the caller. Used when a delegating method + * can't verify all requirements itself but shouldn't receive the blame. */ public RequirementFailure adjustForDelegatingMethod() { if (isThisStackFrameCulprit(1)) { @@ -103,8 +103,8 @@ public RequirementFailure adjustForDelegatingMethod() { // ---------------------------------------------------------------- /** - * If this stack frame caused the exception, adjust the culprit to be the caller. Used when a - * delegating method can't verify all requirements itself but shouldn't receive the blame. + * If this stack frame caused the exception, adjust the culprit to be the caller. Used when a delegating method + * can't verify all requirements itself but shouldn't receive the blame. */ public RequirementFailure adjustForDelegatingMethodAndSyntheticAccessor() { if (isThisStackFrameCulprit(0)) { @@ -120,7 +120,6 @@ public RequirementFailure adjustForDelegatingMethodAndSyntheticAccessor() { public boolean isThisStackFrameCulprit(int nFramesBelowTargetFrame) { StackTraceElement[] stackTrace = new Throwable().getStackTrace(); StackTraceElement[] failureStackTrace = getStackTrace(); - return failureStackTrace.length - m_nCallsBelowRequirer == stackTrace.length - - nFramesBelowTargetFrame; + return failureStackTrace.length - m_nCallsBelowRequirer == stackTrace.length - nFramesBelowTargetFrame; } } diff --git a/Base/src/main/java/io/deephaven/io/log/impl/LogOutputStringImpl.java b/Base/src/main/java/io/deephaven/io/log/impl/LogOutputStringImpl.java index c3c9442b2cd..013ebaa53d9 100644 --- a/Base/src/main/java/io/deephaven/io/log/impl/LogOutputStringImpl.java +++ b/Base/src/main/java/io/deephaven/io/log/impl/LogOutputStringImpl.java @@ -13,8 +13,7 @@ import java.nio.ByteBuffer; /** - * A LogOutput implementation, designed solely as an adapter for LogOutputAppendable's to produce - * Strings. + * A LogOutput implementation, designed solely as an adapter for LogOutputAppendable's to produce Strings. */ public class LogOutputStringImpl implements LogOutput, CharSequence { @@ -200,9 +199,8 @@ public LogOutput append(Throwable t) { append(t.getClass().getName()).append(": ").append(t.getMessage()); for (StackTraceElement e : t.getStackTrace()) { append(delim) - .append(e.getClassName()).append(".").append(e.getMethodName()) - .append("(").append(e.getFileName()).append(":").append(e.getLineNumber()) - .append(")"); + .append(e.getClassName()).append(".").append(e.getMethodName()) + .append("(").append(e.getFileName()).append(":").append(e.getLineNumber()).append(")"); delim = ";"; } } while ((t = t.getCause()) != null); diff --git a/Base/src/test/java/io/deephaven/base/ArrayUtilTest.java b/Base/src/test/java/io/deephaven/base/ArrayUtilTest.java index 0411eb82a6a..24eefe3c657 100644 --- a/Base/src/test/java/io/deephaven/base/ArrayUtilTest.java +++ b/Base/src/test/java/io/deephaven/base/ArrayUtilTest.java @@ -385,21 +385,21 @@ public void testReplaceInArray() { } public void testAddUnlessUnary() { - StringWrapper[] sa = ArrayUtil.addUnless(null, StringWrapper.class, - new StringWrapper.UnaryEquals("foo"), new StringWrapper.NullaryFactory("foo")); + StringWrapper[] sa = ArrayUtil.addUnless(null, StringWrapper.class, new StringWrapper.UnaryEquals("foo"), + new StringWrapper.NullaryFactory("foo")); assertFalse(sa == null); assertEquals(StringWrapper.class, sa.getClass().getComponentType()); assertEquals(1, sa.length); assertEquals("foo", sa[0].s); StringWrapper sw1 = sa[0]; - StringWrapper[] sa2 = ArrayUtil.addUnless(sa, StringWrapper.class, - new StringWrapper.UnaryEquals("foo"), new StringWrapper.NullaryFactory("foo")); + StringWrapper[] sa2 = ArrayUtil.addUnless(sa, StringWrapper.class, new StringWrapper.UnaryEquals("foo"), + new StringWrapper.NullaryFactory("foo")); assertTrue(sa2 == sa); assertTrue(sa2[0] == sw1); - StringWrapper[] sa3 = ArrayUtil.addUnless(sa, StringWrapper.class, - new StringWrapper.UnaryEquals("bar"), new StringWrapper.NullaryFactory("bar")); + StringWrapper[] sa3 = ArrayUtil.addUnless(sa, StringWrapper.class, new StringWrapper.UnaryEquals("bar"), + new StringWrapper.NullaryFactory("bar")); assertFalse(sa3 == sa); assertEquals(StringWrapper.class, sa.getClass().getComponentType()); assertEquals(2, sa3.length); @@ -408,21 +408,21 @@ public void testAddUnlessUnary() { } public void testAddUnlessBinary() { - StringWrapper[] sa = ArrayUtil.addUnless(null, StringWrapper.class, - new StringWrapper.BinaryEquals(), new StringWrapper.UnaryFactory(), "foo"); + StringWrapper[] sa = ArrayUtil.addUnless(null, StringWrapper.class, new StringWrapper.BinaryEquals(), + new StringWrapper.UnaryFactory(), "foo"); assertFalse(sa == null); assertEquals(StringWrapper.class, sa.getClass().getComponentType()); assertEquals(1, sa.length); assertEquals("foo", sa[0].s); StringWrapper sw1 = sa[0]; - StringWrapper[] sa2 = ArrayUtil.addUnless(sa, StringWrapper.class, - new StringWrapper.BinaryEquals(), new StringWrapper.UnaryFactory(), "foo"); + StringWrapper[] sa2 = ArrayUtil.addUnless(sa, StringWrapper.class, new StringWrapper.BinaryEquals(), + new StringWrapper.UnaryFactory(), "foo"); assertTrue(sa2 == sa); assertTrue(sa2[0] == sw1); - StringWrapper[] sa3 = ArrayUtil.addUnless(sa, StringWrapper.class, - new StringWrapper.BinaryEquals(), new StringWrapper.UnaryFactory(), "bar"); + StringWrapper[] sa3 = ArrayUtil.addUnless(sa, StringWrapper.class, new StringWrapper.BinaryEquals(), + new StringWrapper.UnaryFactory(), "bar"); assertFalse(sa3 == sa); assertEquals(StringWrapper.class, sa.getClass().getComponentType()); assertEquals(2, sa3.length); @@ -431,22 +431,22 @@ public void testAddUnlessBinary() { } public void testReplaceOrAdd() { - StringWrapper[] sa = ArrayUtil.replaceOrAdd(null, StringWrapper.class, - new StringWrapper.BinaryEquals(), new StringWrapper.UnaryFactory(), "foo"); + StringWrapper[] sa = ArrayUtil.replaceOrAdd(null, StringWrapper.class, new StringWrapper.BinaryEquals(), + new StringWrapper.UnaryFactory(), "foo"); assertFalse(sa == null); assertEquals(StringWrapper.class, sa.getClass().getComponentType()); assertEquals(1, sa.length); assertEquals("foo", sa[0].s); StringWrapper sw1 = sa[0]; - StringWrapper[] sa2 = ArrayUtil.replaceOrAdd(sa, StringWrapper.class, - new StringWrapper.BinaryEquals(), new StringWrapper.UnaryFactory(), "foo"); + StringWrapper[] sa2 = ArrayUtil.replaceOrAdd(sa, StringWrapper.class, new StringWrapper.BinaryEquals(), + new StringWrapper.UnaryFactory(), "foo"); assertTrue(sa2 == sa); assertEquals("foo", sa2[0].s); assertTrue(sa2[0] != sw1); - StringWrapper[] sa3 = ArrayUtil.addUnless(sa, StringWrapper.class, - new StringWrapper.BinaryEquals(), new StringWrapper.UnaryFactory(), "bar"); + StringWrapper[] sa3 = ArrayUtil.addUnless(sa, StringWrapper.class, new StringWrapper.BinaryEquals(), + new StringWrapper.UnaryFactory(), "bar"); assertFalse(sa3 == sa); assertEquals(StringWrapper.class, sa.getClass().getComponentType()); assertEquals(2, sa3.length); @@ -527,8 +527,8 @@ private static void nullOrEqual(final String sa, final String sb) { } private static void checkRange(final String[] a, final int aFromIndex, - final String[] b, final int bFromIndex, - final int count) { + final String[] b, final int bFromIndex, + final int count) { assertTrue(aFromIndex + count <= a.length); assertTrue(bFromIndex + count <= b.length); for (int i = 0; i < count; ++i) { diff --git a/Base/src/test/java/io/deephaven/base/ExerciseFIFOMutex.java b/Base/src/test/java/io/deephaven/base/ExerciseFIFOMutex.java index 6614e492c0e..738e0075439 100644 --- a/Base/src/test/java/io/deephaven/base/ExerciseFIFOMutex.java +++ b/Base/src/test/java/io/deephaven/base/ExerciseFIFOMutex.java @@ -71,8 +71,7 @@ private void start(final int NTHREADS) { if (t.isAlive()) { System.out.println("Couldn't stop thread " + i); } else { - // System.out.println("Locker "+i+"/"+NTHREADS+" executed - // "+lockers[i].loopCount+" loops"); + // System.out.println("Locker "+i+"/"+NTHREADS+" executed "+lockers[i].loopCount+" loops"); totalLoops += lockers[i].loopCount; minLoops = Math.min(minLoops, lockers[i].loopCount); maxLoops = Math.max(maxLoops, lockers[i].loopCount); @@ -81,8 +80,8 @@ private void start(final int NTHREADS) { // ignore } } - System.out.println(NTHREADS + " lockers executed " + totalLoops + " loops in total; min=" - + minLoops + ", max=" + maxLoops); + System.out.println(NTHREADS + " lockers executed " + totalLoops + " loops in total; min=" + minLoops + ", max=" + + maxLoops); } public static void main(String[] args) { diff --git a/Base/src/test/java/io/deephaven/base/ExerciseLockFreeArrayQueue.java b/Base/src/test/java/io/deephaven/base/ExerciseLockFreeArrayQueue.java index feeded582e9..53a3a534a2b 100644 --- a/Base/src/test/java/io/deephaven/base/ExerciseLockFreeArrayQueue.java +++ b/Base/src/test/java/io/deephaven/base/ExerciseLockFreeArrayQueue.java @@ -221,14 +221,11 @@ public void put(T el, BenchRunner runner) { } else if (spin > 1000000) { if (spin % 1000000 == 0) { long dt = System.nanoTime() - t0; - error("TZ2: stuck producer " + runner.id + ": " + spin - + " spins on element " + el - + ", initial: " + head0 + "/" + tail0 + ", now: " + q.head.get() + "/" - + q.tail.get() - + ", dt=" + (dt / 1000000) + " " + runnerState()); + error("TZ2: stuck producer " + runner.id + ": " + spin + " spins on element " + el + + ", initial: " + head0 + "/" + tail0 + ", now: " + q.head.get() + "/" + q.tail.get() + + ", dt=" + (dt / 1000000) + " " + runnerState()); if (dt - t0 > ABORT_NANOS) { - error("TZ2: aborting producer " + runner.id + " after " + (dt / 1000000) - + " millis"); + error("TZ2: aborting producer " + runner.id + " after " + (dt / 1000000) + " millis"); break; } } @@ -255,12 +252,10 @@ public T get(BenchRunner runner) { } else if (spin % 1000000 == 0) { long dt = System.nanoTime() - t0; error("TZ2: stuck consumer " + runner.id + ": " + spin + " spins" - + ", initial: " + head0 + "/" + tail0 + ", now: " + q.head.get() + "/" - + q.tail.get() - + ", dt=" + (dt / 1000000) + " " + runnerState()); + + ", initial: " + head0 + "/" + tail0 + ", now: " + q.head.get() + "/" + q.tail.get() + + ", dt=" + (dt / 1000000) + " " + runnerState()); if (dt > ABORT_NANOS) { - error("TZ2: aborting consumer " + runner.id + " after " + (dt / 1000000) - + " millis"); + error("TZ2: aborting consumer " + runner.id + " after " + (dt / 1000000) + " millis"); break; } } @@ -350,8 +345,8 @@ public void put(T el, BenchRunner runner) { if (spin == 1000000) { t0 = System.nanoTime(); } else if (spin % 1000000 == 0 && System.nanoTime() - t0 > ABORT_NANOS) { - error("PBQ: aborting stuck producer after " + (ABORT_NANOS / 1000000) - + " millis and " + spin + " spins on element " + el); + error("PBQ: aborting stuck producer after " + (ABORT_NANOS / 1000000) + " millis and " + spin + + " spins on element " + el); break; } } @@ -368,8 +363,8 @@ public T get(BenchRunner runner) { if (spin == 1000000) { t0 = System.nanoTime(); } else if (spin % 1000000 == 0 && System.nanoTime() - t0 > ABORT_NANOS) { - error("PBQ: aborting stuck consumer after " + (ABORT_NANOS / 1000000) - + " millis and " + spin + " spins"); + error("PBQ: aborting stuck consumer after " + (ABORT_NANOS / 1000000) + " millis and " + spin + + " spins"); break; } } @@ -476,8 +471,8 @@ public void run() { public static AtomicBoolean testTwoThreadsFailed = new AtomicBoolean(false); public static void testOrdered(final int trial, final BenchQueue q, final int N, - final int PRODUCER_MIN_WORK, final int PRODUCER_MAX_WORK, - final int CONSUMER_MIN_WORK, final int CONSUMER_MAX_WORK) { + final int PRODUCER_MIN_WORK, final int PRODUCER_MAX_WORK, + final int CONSUMER_MIN_WORK, final int CONSUMER_MAX_WORK) { q.init(); resetRunners(); @@ -519,25 +514,25 @@ public void run() { long t1 = System.nanoTime(); System.out.println("testOrdered" + "," - + q.getClass().getSimpleName() + "," - + trial + "," - + q.cap() + "," - + N + "," - + 1 + "," - + 1 + "," - + PRODUCER_MIN_WORK + "," - + PRODUCER_MAX_WORK + "," - + CONSUMER_MIN_WORK + "," - + CONSUMER_MAX_WORK + "," - + (runners[0].put_work / N) + "," - + (runners[1].get_work / N) + "," - + ((double) runners[0].put_spins / N) + "," - + ((double) runners[1].get_spins / N) + "," - + ((t1 - t0) / N) + "," - + q.head() + "," - + q.tail() + "," - + (q.head() % q.cap()) + "," - + (q.tail() % q.cap()) + ","); + + q.getClass().getSimpleName() + "," + + trial + "," + + q.cap() + "," + + N + "," + + 1 + "," + + 1 + "," + + PRODUCER_MIN_WORK + "," + + PRODUCER_MAX_WORK + "," + + CONSUMER_MIN_WORK + "," + + CONSUMER_MAX_WORK + "," + + (runners[0].put_work / N) + "," + + (runners[1].get_work / N) + "," + + ((double) runners[0].put_spins / N) + "," + + ((double) runners[1].get_spins / N) + "," + + ((t1 - t0) / N) + "," + + q.head() + "," + + q.tail() + "," + + (q.head() % q.cap()) + "," + + (q.tail() % q.cap()) + ","); if (testTwoThreadsFailed.get()) { System.err.println("Trial " + trial + ": testOrdered: failed "); @@ -549,9 +544,9 @@ public void run() { // ------------------------------------------------------------------------------------------------ public static void testManyThreads(final int trial, final BenchQueue q, - final int num_producers, final int num_consumers, final int N, - final int PRODUCER_MIN_WORK, final int PRODUCER_MAX_WORK, - final int CONSUMER_MIN_WORK, final int CONSUMER_MAX_WORK) { + final int num_producers, final int num_consumers, final int N, + final int PRODUCER_MIN_WORK, final int PRODUCER_MAX_WORK, + final int CONSUMER_MIN_WORK, final int CONSUMER_MAX_WORK) { q.init(); resetDequeueCounts(); resetRunners(); @@ -620,30 +615,30 @@ public void run() { } System.out.println("testManyThreads" + "," - + q.getClass().getSimpleName() + "," - + trial + "," - + q.cap() + "," - + N + "," - + num_producers + "," - + num_consumers + "," - + PRODUCER_MIN_WORK + "," - + PRODUCER_MAX_WORK + "," - + CONSUMER_MIN_WORK + "," - + CONSUMER_MAX_WORK + "," - + (total_producer_work / N) + "," - + (total_consumer_work / N) + "," - + ((double) total_producer_spins / N) + "," - + ((double) total_consumer_spins / N) + "," - + ((t1 - t0) / N) + "," - + q.head() + "," - + q.tail() + "," - + (q.head() % q.cap()) + "," - + (q.tail() % q.cap()) + ","); + + q.getClass().getSimpleName() + "," + + trial + "," + + q.cap() + "," + + N + "," + + num_producers + "," + + num_consumers + "," + + PRODUCER_MIN_WORK + "," + + PRODUCER_MAX_WORK + "," + + CONSUMER_MIN_WORK + "," + + CONSUMER_MAX_WORK + "," + + (total_producer_work / N) + "," + + (total_consumer_work / N) + "," + + ((double) total_producer_spins / N) + "," + + ((double) total_consumer_spins / N) + "," + + ((t1 - t0) / N) + "," + + q.head() + "," + + q.tail() + "," + + (q.head() % q.cap()) + "," + + (q.tail() % q.cap()) + ","); for (int i = 0; i < N; ++i) { if (dequeueCounts.get(i) != 1) { - error("Trial " + trial + " testManyThreads: object " + i + " was dequeued " - + dequeueCounts.get(i) + " times"); + error("Trial " + trial + " testManyThreads: object " + i + " was dequeued " + dequeueCounts.get(i) + + " times"); } } } @@ -695,11 +690,10 @@ public static void main(String[] args) { // 0, cw); // min, max consumer work iterations between ops testManyThreads(i, q, - p, c, // number of producers/consumers - 10000000, // number of operations - 0, pw * 20, // min, max producer work iterations between ops - 0, cw * 20); // min, max consumer work iterations between - // ops + p, c, // number of producers/consumers + 10000000, // number of operations + 0, pw * 20, // min, max producer work iterations between ops + 0, cw * 20); // min, max consumer work iterations between ops } } } diff --git a/Base/src/test/java/io/deephaven/base/MemoryModelVolatileTest.java b/Base/src/test/java/io/deephaven/base/MemoryModelVolatileTest.java index 10432b9e219..4ce84445dfb 100644 --- a/Base/src/test/java/io/deephaven/base/MemoryModelVolatileTest.java +++ b/Base/src/test/java/io/deephaven/base/MemoryModelVolatileTest.java @@ -24,8 +24,7 @@ public void fail() { private final static long ratherFrequently = 1 << 10; // power of 2. private final static long rfmask = ratherFrequently - 1; - private final static long ratherUnfrequently = 1 << 15; // power of 2, bigger than - // ratherFrequently. + private final static long ratherUnfrequently = 1 << 15; // power of 2, bigger than ratherFrequently. private final static long rumask = ratherUnfrequently - 1; void writeConsistently() { diff --git a/Base/src/test/java/io/deephaven/base/RingBufferTest.java b/Base/src/test/java/io/deephaven/base/RingBufferTest.java index 05f270a5289..89bf3bae1e8 100644 --- a/Base/src/test/java/io/deephaven/base/RingBufferTest.java +++ b/Base/src/test/java/io/deephaven/base/RingBufferTest.java @@ -56,16 +56,15 @@ private void assertNotEmpty(RingBuffer rb, int expectedSize, Object expe } } - private void assertAdd(RingBuffer rb, Object newElement, int expectedSize, - Object expectedHead) { + private void assertAdd(RingBuffer rb, Object newElement, int expectedSize, Object expectedHead) { assertTrue(rb.add(newElement)); assertEquals(newElement, rb.back()); assertEquals(newElement, rb.peekLast()); assertNotEmpty(rb, expectedSize, expectedHead); } - private void assertAddOverwrite(RingBuffer rb, Object newElement, int expectedSize, - Object expectedHead, Object expectedOverwrite) { + private void assertAddOverwrite(RingBuffer rb, Object newElement, int expectedSize, Object expectedHead, + Object expectedOverwrite) { assertEquals(expectedOverwrite, rb.addOverwrite(newElement)); assertEquals(newElement, rb.back()); assertEquals(newElement, rb.peekLast()); @@ -79,8 +78,7 @@ private void assertAddFirst(RingBuffer rb, Object newElement, int expect assertNotEmpty(rb, expectedSize, newElement); } - private void assertOffer(RingBuffer rb, Object newElement, int expectedSize, - Object expectedHead) { + private void assertOffer(RingBuffer rb, Object newElement, int expectedSize, Object expectedHead) { assertTrue(rb.offer(newElement)); assertEquals(newElement, rb.back()); assertEquals(newElement, rb.peekLast()); @@ -108,8 +106,8 @@ private void assertRemove(RingBuffer rb, int expectedSize, Object expect } } - private void assertRemoveAtSwapLast(RingBuffer rb, int expectedSize, - Object expectedHead, Object expectedResult, int offset) { + private void assertRemoveAtSwapLast(RingBuffer rb, int expectedSize, Object expectedHead, + Object expectedResult, int offset) { assertNotEmpty(rb, expectedSize, expectedHead); try { assertTrue(expectedResult == rb.removeAtSwapLast(offset)); @@ -258,8 +256,8 @@ public void testOfferPoll() { } public void testGrowSimple() { - // In order to keep internal storage size as a power of 2, the following now applies: - // capacity = 2^ceil(log2(requestedCapacity+1)) - 1 + // In order to keep internal storage size as a power of 2, the following now applies: capacity = + // 2^ceil(log2(requestedCapacity+1)) - 1 RingBuffer rb = new RingBuffer<>(5); assertAdd(rb, A, 1, A); @@ -354,8 +352,7 @@ public void testCircularArrayList() { } public void testOverwrite() { - RingBuffer rb = new RingBuffer<>(3); // means array will be size 4, 1 always needs - // to be null + RingBuffer rb = new RingBuffer<>(3); // means array will be size 4, 1 always needs to be null assertAddOverwrite(rb, A, 1, A, null); assertAddOverwrite(rb, B, 2, A, null); assertAddOverwrite(rb, C, 3, A, null); @@ -367,8 +364,7 @@ public void testOverwrite() { } public void testPeekLast() { - RingBuffer rb = new RingBuffer<>(7); // means array will be size 8, 1 always needs - // to be null + RingBuffer rb = new RingBuffer<>(7); // means array will be size 8, 1 always needs to be null assertOffer(rb, A, 1, A); assertOffer(rb, B, 2, A); assertOffer(rb, C, 3, A); diff --git a/Base/src/test/java/io/deephaven/base/TestStringUtils.java b/Base/src/test/java/io/deephaven/base/TestStringUtils.java index f699dcdf6a3..d6761289153 100644 --- a/Base/src/test/java/io/deephaven/base/TestStringUtils.java +++ b/Base/src/test/java/io/deephaven/base/TestStringUtils.java @@ -29,8 +29,7 @@ public void testJoinStrings() { objects.add(2); assertEquals("1\n2\n3\n7", StringUtils.joinStrings(objects, "\n")); - assertEquals("1\\3\\7", - StringUtils.joinStrings(objects.stream().filter(i -> i % 2 != 0), "\\")); + assertEquals("1\\3\\7", StringUtils.joinStrings(objects.stream().filter(i -> i % 2 != 0), "\\")); assertEquals("1,2,3,7", StringUtils.joinStrings(objects.iterator(), ",")); } diff --git a/Base/src/test/java/io/deephaven/base/array/FastArrayTest.java b/Base/src/test/java/io/deephaven/base/array/FastArrayTest.java index a61f776b7ad..be24126f029 100644 --- a/Base/src/test/java/io/deephaven/base/array/FastArrayTest.java +++ b/Base/src/test/java/io/deephaven/base/array/FastArrayTest.java @@ -342,8 +342,8 @@ public void testDeepCopyAndEquals() { assertTrue(arrayCopy.equals(arrayOrig)); } - public void checkExternalization(FastArray arrayInput, - FastArray arrayReceiver) throws Exception { + public void checkExternalization(FastArray arrayInput, FastArray arrayReceiver) + throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -556,8 +556,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemReceive.equals(itemInput)); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there FastArray.copyValuesDeep(arrayReceiver, arrayInput); // check the receive array against the input array diff --git a/Base/src/test/java/io/deephaven/base/array/FastBooleanArrayTest.java b/Base/src/test/java/io/deephaven/base/array/FastBooleanArrayTest.java index 9c39eb84bf5..6dd4d6bf70c 100644 --- a/Base/src/test/java/io/deephaven/base/array/FastBooleanArrayTest.java +++ b/Base/src/test/java/io/deephaven/base/array/FastBooleanArrayTest.java @@ -287,8 +287,7 @@ public void testDeepCopyAndEquals() { assertTrue(arrayCopy.equals(arrayOrig)); } - public void checkExternalization(FastBooleanArray arrayInput, FastBooleanArray arrayReceiver) - throws Exception { + public void checkExternalization(FastBooleanArray arrayInput, FastBooleanArray arrayReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -461,8 +460,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemInput == itemReceive); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.safeClone(); // check the receive array against the input array diff --git a/Base/src/test/java/io/deephaven/base/array/FastByteArrayTest.java b/Base/src/test/java/io/deephaven/base/array/FastByteArrayTest.java index 10699aad6b1..30467dabd39 100644 --- a/Base/src/test/java/io/deephaven/base/array/FastByteArrayTest.java +++ b/Base/src/test/java/io/deephaven/base/array/FastByteArrayTest.java @@ -286,8 +286,7 @@ public void testDeepCopyAndEquals() { assertTrue(arrayCopy.equals(arrayOrig)); } - public void checkExternalization(FastByteArray arrayInput, FastByteArray arrayReceiver) - throws Exception { + public void checkExternalization(FastByteArray arrayInput, FastByteArray arrayReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -448,8 +447,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemInput == itemReceive); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.safeClone(); // check the receive array against the input array diff --git a/Base/src/test/java/io/deephaven/base/array/FastCharArrayTest.java b/Base/src/test/java/io/deephaven/base/array/FastCharArrayTest.java index 5babf16203f..3811940aab5 100644 --- a/Base/src/test/java/io/deephaven/base/array/FastCharArrayTest.java +++ b/Base/src/test/java/io/deephaven/base/array/FastCharArrayTest.java @@ -288,8 +288,7 @@ public void testDeepCopyAndEquals() { assertTrue(arrayCopy.equals(arrayOrig)); } - public void checkExternalization(FastCharArray arrayInput, FastCharArray arrayReceiver) - throws Exception { + public void checkExternalization(FastCharArray arrayInput, FastCharArray arrayReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -450,8 +449,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemInput == itemReceive); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.safeClone(); // check the receive array against the input array diff --git a/Base/src/test/java/io/deephaven/base/array/FastDoubleArrayTest.java b/Base/src/test/java/io/deephaven/base/array/FastDoubleArrayTest.java index 0b2edd662c9..35abea890f7 100644 --- a/Base/src/test/java/io/deephaven/base/array/FastDoubleArrayTest.java +++ b/Base/src/test/java/io/deephaven/base/array/FastDoubleArrayTest.java @@ -286,8 +286,7 @@ public void testDeepCopyAndEquals() { assertTrue(arrayCopy.equals(arrayOrig)); } - public void checkExternalization(FastDoubleArray arrayInput, FastDoubleArray arrayReceiver) - throws Exception { + public void checkExternalization(FastDoubleArray arrayInput, FastDoubleArray arrayReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -448,8 +447,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemInput == itemReceive); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.safeClone(); // check the receive array against the input array diff --git a/Base/src/test/java/io/deephaven/base/array/FastFloatArrayTest.java b/Base/src/test/java/io/deephaven/base/array/FastFloatArrayTest.java index 0f2f56c376d..577db12ca9c 100644 --- a/Base/src/test/java/io/deephaven/base/array/FastFloatArrayTest.java +++ b/Base/src/test/java/io/deephaven/base/array/FastFloatArrayTest.java @@ -286,8 +286,7 @@ public void testDeepCopyAndEquals() { assertTrue(arrayCopy.equals(arrayOrig)); } - public void checkExternalization(FastFloatArray arrayInput, FastFloatArray arrayReceiver) - throws Exception { + public void checkExternalization(FastFloatArray arrayInput, FastFloatArray arrayReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -448,8 +447,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemInput == itemReceive); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.safeClone(); // check the receive array against the input array diff --git a/Base/src/test/java/io/deephaven/base/array/FastIntArrayTest.java b/Base/src/test/java/io/deephaven/base/array/FastIntArrayTest.java index dade745ce5c..7c2327d8e1c 100644 --- a/Base/src/test/java/io/deephaven/base/array/FastIntArrayTest.java +++ b/Base/src/test/java/io/deephaven/base/array/FastIntArrayTest.java @@ -286,8 +286,7 @@ public void testDeepCopyAndEquals() { assertTrue(arrayCopy.equals(arrayOrig)); } - public void checkExternalization(FastIntArray arrayInput, FastIntArray arrayReceiver) - throws Exception { + public void checkExternalization(FastIntArray arrayInput, FastIntArray arrayReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -448,8 +447,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemInput == itemReceive); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.safeClone(); // check the receive array against the input array diff --git a/Base/src/test/java/io/deephaven/base/array/FastLongArrayTest.java b/Base/src/test/java/io/deephaven/base/array/FastLongArrayTest.java index 296564e3365..df2b37ec6af 100644 --- a/Base/src/test/java/io/deephaven/base/array/FastLongArrayTest.java +++ b/Base/src/test/java/io/deephaven/base/array/FastLongArrayTest.java @@ -288,8 +288,7 @@ public void testDeepCopyAndEquals() { assertTrue(arrayCopy.equals(arrayOrig)); } - public void checkExternalization(FastLongArray arrayInput, FastLongArray arrayReceiver) - throws Exception { + public void checkExternalization(FastLongArray arrayInput, FastLongArray arrayReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -450,8 +449,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemInput == itemReceive); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.safeClone(); // check the receive array against the input array diff --git a/Base/src/test/java/io/deephaven/base/array/FastShortArrayTest.java b/Base/src/test/java/io/deephaven/base/array/FastShortArrayTest.java index 2f8e2fe782f..0b429c701c4 100644 --- a/Base/src/test/java/io/deephaven/base/array/FastShortArrayTest.java +++ b/Base/src/test/java/io/deephaven/base/array/FastShortArrayTest.java @@ -288,8 +288,7 @@ public void testDeepCopyAndEquals() { assertTrue(arrayCopy.equals(arrayOrig)); } - public void checkExternalization(FastShortArray arrayInput, FastShortArray arrayReceiver) - throws Exception { + public void checkExternalization(FastShortArray arrayInput, FastShortArray arrayReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -450,8 +449,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemInput == itemReceive); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.safeClone(); // check the receive array against the input array diff --git a/Base/src/test/java/io/deephaven/base/array/TrialClassA.java b/Base/src/test/java/io/deephaven/base/array/TrialClassA.java index a8d2ee70b22..fdeee0e13ba 100644 --- a/Base/src/test/java/io/deephaven/base/array/TrialClassA.java +++ b/Base/src/test/java/io/deephaven/base/array/TrialClassA.java @@ -43,8 +43,7 @@ public synchronized void writeExternal(ObjectOutput out) throws IOException { } @Override - public synchronized void readExternal(ObjectInput in) - throws IOException, ClassNotFoundException { + public synchronized void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { int localVersionId = in.readInt(); double1 = in.readDouble(); int1 = in.readInt(); @@ -156,8 +155,7 @@ public void writeExternal(ObjectOutput out, TrialClassA item) throws IOException private static class TestClassReader implements FastArray.ReadExternalFunction { @Override - public void readExternal(ObjectInput in, TrialClassA item) - throws IOException, ClassNotFoundException { + public void readExternal(ObjectInput in, TrialClassA item) throws IOException, ClassNotFoundException { TrialClassA readObject = (TrialClassA) in.readObject(); item.copyValues(readObject); } @@ -173,8 +171,7 @@ public void readExternal(ObjectInput in, TrialClassA item) // return new TestClassReaderF(); // } // - // private static class TestClassWriterF implements - // FastArrayF.WriteExternalFunction { + // private static class TestClassWriterF implements FastArrayF.WriteExternalFunction { // // @Override // public void writeExternal(ObjectOutput out, TrialClassA item) throws IOException { @@ -182,11 +179,9 @@ public void readExternal(ObjectInput in, TrialClassA item) // } // } // - // private static class TestClassReaderF implements FastArrayF.ReadExternalFunction - // { + // private static class TestClassReaderF implements FastArrayF.ReadExternalFunction { // @Override - // public void readExternal(ObjectInput in, TrialClassA item) throws IOException, - // ClassNotFoundException { + // public void readExternal(ObjectInput in, TrialClassA item) throws IOException, ClassNotFoundException { // TrialClassA readObject = (TrialClassA) in.readObject(); // item.copyValues(readObject); // } diff --git a/Base/src/test/java/io/deephaven/base/cache/TestKeyedObjectCache.java b/Base/src/test/java/io/deephaven/base/cache/TestKeyedObjectCache.java index bcd5d76071d..159eb1807ad 100644 --- a/Base/src/test/java/io/deephaven/base/cache/TestKeyedObjectCache.java +++ b/Base/src/test/java/io/deephaven/base/cache/TestKeyedObjectCache.java @@ -14,13 +14,12 @@ @SuppressWarnings({"RedundantStringConstructorCall", "JUnit4AnnotatedMethodInJUnit3TestCase"}) public class TestKeyedObjectCache extends TestCase { - private static final KeyedObjectKey KEY = - new KeyedObjectKey.Basic() { - @Override - public Object getKey(Object o) { - return o; - } - }; + private static final KeyedObjectKey KEY = new KeyedObjectKey.Basic() { + @Override + public Object getKey(Object o) { + return o; + } + }; private static final Procedure.Unary POST = new Procedure.Unary() { @Override public void call(Object arg) { @@ -39,29 +38,19 @@ public int nextInt(int n) { @Test public void testKeyedObjectCache() { - final KeyedObjectCache cache = - new KeyedObjectCache<>(6, 2, KEY, POST, RANDOM); + final KeyedObjectCache cache = new KeyedObjectCache<>(6, 2, KEY, POST, RANDOM); TestCase.assertEquals(7, cache.getCapacity()); TestCase.assertEquals(2, cache.getProbeSequenceLength()); - // A=65, hashes to 65, first bucket is 65 % 7 == 2, second bucket is 2 - (1 + (65 % (7 - - // 2))) == 1 - // B=66, hashes to 66, first bucket is 66 % 7 == 3, second bucket is 3 - (1 + (66 % (7 - - // 2))) == 1 - // C=67, hashes to 67, first bucket is 67 % 7 == 4, second bucket is 4 - (1 + (67 % (7 - - // 2))) == 1 - // D=68, hashes to 68, first bucket is 68 % 7 == 5, second bucket is 5 - (1 + (68 % (7 - - // 2))) == 1 - // E=69, hashes to 69, first bucket is 69 % 7 == 6, second bucket is 6 - (1 + (69 % (7 - - // 2))) == 1 - // F=70, hashes to 70, first bucket is 70 % 7 == 0, second bucket is 0 - (1 + (70 % (7 - - // 2))) + 7 == 6 - // G=71, hashes to 71, first bucket is 71 % 7 == 1, second bucket is 1 - (1 + (71 % (7 - - // 2))) + 7 == 6 - // H=72, hashes to 72, first bucket is 72 % 7 == 2, second bucket is 2 - (1 + (72 % (7 - - // 2))) + 7 == 6 - // I=73, hashes to 73, first bucket is 73 % 7 == 3, second bucket is 3 - (1 + (73 % (7 - - // 2))) + 7 == 6 + // A=65, hashes to 65, first bucket is 65 % 7 == 2, second bucket is 2 - (1 + (65 % (7 - 2))) == 1 + // B=66, hashes to 66, first bucket is 66 % 7 == 3, second bucket is 3 - (1 + (66 % (7 - 2))) == 1 + // C=67, hashes to 67, first bucket is 67 % 7 == 4, second bucket is 4 - (1 + (67 % (7 - 2))) == 1 + // D=68, hashes to 68, first bucket is 68 % 7 == 5, second bucket is 5 - (1 + (68 % (7 - 2))) == 1 + // E=69, hashes to 69, first bucket is 69 % 7 == 6, second bucket is 6 - (1 + (69 % (7 - 2))) == 1 + // F=70, hashes to 70, first bucket is 70 % 7 == 0, second bucket is 0 - (1 + (70 % (7 - 2))) + 7 == 6 + // G=71, hashes to 71, first bucket is 71 % 7 == 1, second bucket is 1 - (1 + (71 % (7 - 2))) + 7 == 6 + // H=72, hashes to 72, first bucket is 72 % 7 == 2, second bucket is 2 - (1 + (72 % (7 - 2))) + 7 == 6 + // I=73, hashes to 73, first bucket is 73 % 7 == 3, second bucket is 3 - (1 + (73 % (7 - 2))) + 7 == 6 // Fill bucket 2 final String A = "A"; diff --git a/Base/src/test/java/io/deephaven/base/cache/TestOpenAddressedCanonicalizationCache.java b/Base/src/test/java/io/deephaven/base/cache/TestOpenAddressedCanonicalizationCache.java index 94d50699406..4a08b6e3568 100644 --- a/Base/src/test/java/io/deephaven/base/cache/TestOpenAddressedCanonicalizationCache.java +++ b/Base/src/test/java/io/deephaven/base/cache/TestOpenAddressedCanonicalizationCache.java @@ -13,8 +13,7 @@ public class TestOpenAddressedCanonicalizationCache extends TestCase { @Test public void testDefaultAdapter() { - final OpenAddressedCanonicalizationCache SUT = - new OpenAddressedCanonicalizationCache(1, 0.9f); + final OpenAddressedCanonicalizationCache SUT = new OpenAddressedCanonicalizationCache(1, 0.9f); final Integer[] cachedIntegers = new Integer[SUT.getOccupancyThreshold() * 100]; final Double[] cachedDoubles = new Double[cachedIntegers.length]; @@ -123,31 +122,30 @@ public void testDefaultAdapter() { * This is a really crappy example... but it's fine for a unit test. */ private static OpenAddressedCanonicalizationCache.Adapter OSA = - new OpenAddressedCanonicalizationCache.Adapter() { - - @Override - public boolean equals(@NotNull Object inputItem, @NotNull Object cachedItem) { - if (cachedItem instanceof String) { - return inputItem.toString().equals(cachedItem); + new OpenAddressedCanonicalizationCache.Adapter() { + + @Override + public boolean equals(@NotNull Object inputItem, @NotNull Object cachedItem) { + if (cachedItem instanceof String) { + return inputItem.toString().equals(cachedItem); + } + return false; } - return false; - } - @Override - public int hashCode(@NotNull Object inputItem) { - return inputItem.toString().hashCode(); - } + @Override + public int hashCode(@NotNull Object inputItem) { + return inputItem.toString().hashCode(); + } - @Override - public String makeCacheableItem(@NotNull Object inputItem) { - return inputItem.toString(); - } - }; + @Override + public String makeCacheableItem(@NotNull Object inputItem) { + return inputItem.toString(); + } + }; @Test public void testSpecialAdapters() { - final OpenAddressedCanonicalizationCache SUT = - new OpenAddressedCanonicalizationCache(1, 0.9f); + final OpenAddressedCanonicalizationCache SUT = new OpenAddressedCanonicalizationCache(1, 0.9f); final String[] cachedStrings = new String[SUT.getOccupancyThreshold() * 100 * 2]; diff --git a/Base/src/test/java/io/deephaven/base/map/FastArrayMapLongToObjectTest.java b/Base/src/test/java/io/deephaven/base/map/FastArrayMapLongToObjectTest.java index 138c0085daf..22d0c126ecc 100644 --- a/Base/src/test/java/io/deephaven/base/map/FastArrayMapLongToObjectTest.java +++ b/Base/src/test/java/io/deephaven/base/map/FastArrayMapLongToObjectTest.java @@ -15,10 +15,8 @@ public class FastArrayMapLongToObjectTest extends TestCase { private FastArrayMapLongToObject constructTestMap() { - Function.Nullary> factoryLongToObject = - new FactoryLongToObject(); - FastArrayMapLongToObject map = - new FastArrayMapLongToObject(factoryLongToObject); + Function.Nullary> factoryLongToObject = new FactoryLongToObject(); + FastArrayMapLongToObject map = new FastArrayMapLongToObject(factoryLongToObject); return map; } @@ -399,12 +397,11 @@ public void testRemove() { } public void checkExternalization(FastArrayMapLongToObject mapInput, - FastArrayMapLongToObject mapReceiver) throws Exception { + FastArrayMapLongToObject mapReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { - FastArrayMapLongToObject.writeExternal(mapInput, oos, - KeyValuePairLongToObjectTest.writer); + FastArrayMapLongToObject.writeExternal(mapInput, oos, KeyValuePairLongToObjectTest.writer); } catch (IllegalArgumentException e) { if (mapInput == null) { // this is an expected failure @@ -418,8 +415,7 @@ public void checkExternalization(FastArrayMapLongToObject mapInput, ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray()); ObjectInputStream ois = new ObjectInputStream(bis); try { - FastArrayMapLongToObject.readExternal(mapReceiver, ois, - KeyValuePairLongToObjectTest.reader); + FastArrayMapLongToObject.readExternal(mapReceiver, ois, KeyValuePairLongToObjectTest.reader); } catch (IllegalArgumentException e) { if (mapReceiver == null) { // this is an expected failure @@ -523,8 +519,7 @@ private void checkCopyValuesDeep(int nItems) { for (int i = 0; i < nItems; i++) { long key = myRandom.nextLong(); TrialClassA val = FastArrayTest.makeRandomTestObject(myRandom); - arrayReceiver.getArray().getUnsafeArray()[i] = - new KeyValuePairLongToObject(key, val); + arrayReceiver.getArray().getUnsafeArray()[i] = new KeyValuePairLongToObject(key, val); } // verify they are not equal @@ -539,8 +534,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemReceive.equals(itemInput)); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.cloneDeep(); // check the receive array against the input array @@ -620,15 +614,13 @@ public void testDeepCloneManyItemsGeneralScan() { } public void testCompareToBaseFastArrayMap() { - Function.Nullary> factoryLongToObject = - new FactoryLongToObject(); + Function.Nullary> factoryLongToObject = new FactoryLongToObject(); FastArrayMapLongToObject mapLongToObject = - new FastArrayMapLongToObject(factoryLongToObject); + new FastArrayMapLongToObject(factoryLongToObject); - Function.Nullary> factoryObjectToObject = - new FactoryObjectToObject(); + Function.Nullary> factoryObjectToObject = new FactoryObjectToObject(); FastArrayMap mapObjectToObject = - new FastArrayMap(factoryObjectToObject); + new FastArrayMap(factoryObjectToObject); Random myRandom = new Random(98723498L); double clearFraction = 0.03; @@ -642,8 +634,7 @@ public void testCompareToBaseFastArrayMap() { if (myRandom.nextFloat() < addFraction) { // System.out.println(i+" ADD"); long key = myRandom.nextInt(maxKey); // get a key between 0 and maxKey - TrialClassA value = - new TrialClassA(myRandom.nextDouble(), myRandom.nextInt(), myRandom.nextLong()); + TrialClassA value = new TrialClassA(myRandom.nextDouble(), myRandom.nextInt(), myRandom.nextLong()); mapObjectToObject.put(new LongWrapper(key), value); mapLongToObject.put(key, value); } @@ -679,10 +670,8 @@ public void testCompareToBaseFastArrayMap() { assertEquals(mapObjectToObject.size(), mapLongToObject.size()); assertEquals(mapObjectToObject.isEmpty(), mapLongToObject.isEmpty()); for (int j = 0; j < mapObjectToObject.size(); j++) { - KeyValuePair refPair = - mapObjectToObject.getArray().getUnsafeArray()[j]; - KeyValuePairLongToObject trialPair = - mapLongToObject.getArray().getUnsafeArray()[j]; + KeyValuePair refPair = mapObjectToObject.getArray().getUnsafeArray()[j]; + KeyValuePairLongToObject trialPair = mapLongToObject.getArray().getUnsafeArray()[j]; assertEquals(refPair.getKey().getVal(), trialPair.getKey()); assertTrue(refPair.getValue().equals(trialPair.getValue())); } @@ -699,8 +688,7 @@ class FactoryLongToObject implements Function.Nullary call() { long key = Long.MIN_VALUE; TrialClassA value = TrialClassA.makeNull(); - KeyValuePairLongToObject result = - new KeyValuePairLongToObject(key, value); + KeyValuePairLongToObject result = new KeyValuePairLongToObject(key, value); return result; } } diff --git a/Base/src/test/java/io/deephaven/base/map/FastArrayMapTest.java b/Base/src/test/java/io/deephaven/base/map/FastArrayMapTest.java index ae5d0859aa0..f75cc0e8bad 100644 --- a/Base/src/test/java/io/deephaven/base/map/FastArrayMapTest.java +++ b/Base/src/test/java/io/deephaven/base/map/FastArrayMapTest.java @@ -15,10 +15,8 @@ public class FastArrayMapTest extends TestCase { private FastArrayMap constructTestMap() { - Function.Nullary> factoryObjectToObject = - new FactoryObjectToObject(); - FastArrayMap map = - new FastArrayMap(factoryObjectToObject); + Function.Nullary> factoryObjectToObject = new FactoryObjectToObject(); + FastArrayMap map = new FastArrayMap(factoryObjectToObject); return map; } @@ -399,7 +397,7 @@ public void testRemove() { } public void checkExternalization(FastArrayMap mapInput, - FastArrayMap mapReceiver) throws Exception { + FastArrayMap mapReceiver) throws Exception { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); try { @@ -521,8 +519,7 @@ private void checkCopyValuesDeep(int nItems) { for (int i = 0; i < nItems; i++) { LongWrapper key = new LongWrapper(myRandom.nextLong()); TrialClassA val = FastArrayTest.makeRandomTestObject(myRandom); - arrayReceiver.getArray().getUnsafeArray()[i] = - new KeyValuePair(key, val); + arrayReceiver.getArray().getUnsafeArray()[i] = new KeyValuePair(key, val); } // verify they are not equal @@ -537,8 +534,7 @@ private void checkCopyValuesDeep(int nItems) { assertFalse(itemReceive.equals(itemInput)); } - // copy the value from the input back into the receive array (now that we already have - // values in there + // copy the value from the input back into the receive array (now that we already have values in there arrayReceiver = arrayInput.cloneDeep(); // check the receive array against the input array @@ -625,8 +621,7 @@ class FactoryObjectToObject implements Function.Nullary call() { LongWrapper key = new LongWrapper(Long.MIN_VALUE); TrialClassA value = TrialClassA.makeNull(); - KeyValuePair result = - new KeyValuePair(key, value); + KeyValuePair result = new KeyValuePair(key, value); return result; } } diff --git a/Base/src/test/java/io/deephaven/base/map/KeyValuePairLongToObjectTest.java b/Base/src/test/java/io/deephaven/base/map/KeyValuePairLongToObjectTest.java index 742d7705afc..54d36376428 100644 --- a/Base/src/test/java/io/deephaven/base/map/KeyValuePairLongToObjectTest.java +++ b/Base/src/test/java/io/deephaven/base/map/KeyValuePairLongToObjectTest.java @@ -21,12 +21,11 @@ private static KeyValuePairLongToObject makeRandomKvp(Random myRand } - private static class Reader - implements FastArray.ReadExternalFunction> { + private static class Reader implements FastArray.ReadExternalFunction> { @Override public void readExternal(ObjectInput in, KeyValuePairLongToObject item) - throws IOException, ClassNotFoundException { + throws IOException, ClassNotFoundException { byte nullByteA = in.readByte(); // System.out.println("read nullByteA: " + nullByteA); if (nullByteA == 1) { @@ -55,8 +54,7 @@ public void readExternal(ObjectInput in, KeyValuePairLongToObject i oldValue.readExternal(in); // System.out.println("read in value\n" + oldValue); } else { - throw new IllegalStateException( - "did not recognize your nullByteC: " + nullByteC); + throw new IllegalStateException("did not recognize your nullByteC: " + nullByteC); } @@ -66,12 +64,10 @@ public void readExternal(ObjectInput in, KeyValuePairLongToObject i } } - private static class Writer - implements FastArray.WriteExternalFunction> { + private static class Writer implements FastArray.WriteExternalFunction> { @Override - public void writeExternal(ObjectOutput out, KeyValuePairLongToObject item) - throws IOException { + public void writeExternal(ObjectOutput out, KeyValuePairLongToObject item) throws IOException { if (item == null) { // System.out.println("write nullByteA = 1"); out.writeByte(1); // nullByteA @@ -110,8 +106,7 @@ public void testSimple() { long long1 = 978234897L; TrialClassA value = new TrialClassA(double1, int1, long1); - KeyValuePairLongToObject kvp = - new KeyValuePairLongToObject(key, value); + KeyValuePairLongToObject kvp = new KeyValuePairLongToObject(key, value); // values in key assertEquals(key, kvp.getKey()); @@ -168,7 +163,7 @@ public void testCopyValuesDeep() { } public void checkExternalization(KeyValuePairLongToObject kvpInput, - KeyValuePairLongToObject kvpReceiver) throws Exception { + KeyValuePairLongToObject kvpReceiver) throws Exception { if (kvpInput == null) { fail("writing from a null kvpInput"); } @@ -196,25 +191,21 @@ public void checkExternalization(KeyValuePairLongToObject kvpInput, } public void testExternalizationNullInputsNullReceiver() throws Exception { - KeyValuePairLongToObject kvpInput = - new KeyValuePairLongToObject(); - KeyValuePairLongToObject kvpReceiver = - new KeyValuePairLongToObject(); + KeyValuePairLongToObject kvpInput = new KeyValuePairLongToObject(); + KeyValuePairLongToObject kvpReceiver = new KeyValuePairLongToObject(); checkExternalization(kvpInput, kvpReceiver); } public void testExternalizationNullInputsValidReceiver() throws Exception { Random myRandom = new Random(89324L); - KeyValuePairLongToObject kvpInput = - new KeyValuePairLongToObject(); + KeyValuePairLongToObject kvpInput = new KeyValuePairLongToObject(); KeyValuePairLongToObject kvpReceiver = makeRandomKvp(myRandom); checkExternalization(kvpInput, kvpReceiver); } public void testExternalizationValidInputsNullReceiver() throws Exception { Random myRandom = new Random(89324L); - KeyValuePairLongToObject kvpReceiver = - new KeyValuePairLongToObject(); + KeyValuePairLongToObject kvpReceiver = new KeyValuePairLongToObject(); KeyValuePairLongToObject kvpInput = makeRandomKvp(myRandom); checkExternalization(kvpInput, kvpReceiver); } diff --git a/Base/src/test/java/io/deephaven/base/map/KeyValuePairTest.java b/Base/src/test/java/io/deephaven/base/map/KeyValuePairTest.java index 6c6a871cf9c..63a611213ea 100644 --- a/Base/src/test/java/io/deephaven/base/map/KeyValuePairTest.java +++ b/Base/src/test/java/io/deephaven/base/map/KeyValuePairTest.java @@ -20,12 +20,11 @@ private static KeyValuePair makeRandomKvp(Random myRan return new KeyValuePair(key, value); } - private static class Reader - implements FastArray.ReadExternalFunction> { + private static class Reader implements FastArray.ReadExternalFunction> { @Override public void readExternal(ObjectInput in, KeyValuePair item) - throws IOException, ClassNotFoundException { + throws IOException, ClassNotFoundException { byte nullByteA = in.readByte(); // System.out.println("read nullByteA: " + nullByteA); if (nullByteA == 1) { @@ -49,8 +48,7 @@ public void readExternal(ObjectInput in, KeyValuePair oldKey.readExternal(in); // System.out.println("read in key\n" + oldKey); } else { - throw new IllegalStateException( - "did not recognize your nullByteB: " + nullByteB); + throw new IllegalStateException("did not recognize your nullByteB: " + nullByteB); } // value @@ -69,8 +67,7 @@ public void readExternal(ObjectInput in, KeyValuePair oldValue.readExternal(in); // System.out.println("read in value\n" + oldValue); } else { - throw new IllegalStateException( - "did not recognize your nullByteC: " + nullByteC); + throw new IllegalStateException("did not recognize your nullByteC: " + nullByteC); } @@ -80,12 +77,10 @@ public void readExternal(ObjectInput in, KeyValuePair } } - private static class Writer - implements FastArray.WriteExternalFunction> { + private static class Writer implements FastArray.WriteExternalFunction> { @Override - public void writeExternal(ObjectOutput out, KeyValuePair item) - throws IOException { + public void writeExternal(ObjectOutput out, KeyValuePair item) throws IOException { if (item == null) { // System.out.println("write nullByteA = 1"); out.writeByte(1); // nullByteA @@ -133,8 +128,7 @@ public void testSimple() { long long1 = 978234897L; TrialClassA value = new TrialClassA(double1, int1, long1); - KeyValuePair kvp = - new KeyValuePair(key, value); + KeyValuePair kvp = new KeyValuePair(key, value); // values in key assertEquals(longKey, kvp.getKey().getVal()); @@ -191,7 +185,7 @@ public void testCopyValuesDeep() { } public void checkExternalization(KeyValuePair kvpInput, - KeyValuePair kvpReceiver) throws Exception { + KeyValuePair kvpReceiver) throws Exception { if (kvpInput == null) { fail("writing from a null kvpInput"); } @@ -219,25 +213,21 @@ public void checkExternalization(KeyValuePair kvpInput } public void testExternalizationNullInputsNullReceiver() throws Exception { - KeyValuePair kvpInput = - new KeyValuePair(); - KeyValuePair kvpReceiver = - new KeyValuePair(); + KeyValuePair kvpInput = new KeyValuePair(); + KeyValuePair kvpReceiver = new KeyValuePair(); checkExternalization(kvpInput, kvpReceiver); } public void testExternalizationNullInputsValidReceiver() throws Exception { Random myRandom = new Random(89324L); - KeyValuePair kvpInput = - new KeyValuePair(); + KeyValuePair kvpInput = new KeyValuePair(); KeyValuePair kvpReceiver = makeRandomKvp(myRandom); checkExternalization(kvpInput, kvpReceiver); } public void testExternalizationValidInputsNullReceiver() throws Exception { Random myRandom = new Random(89324L); - KeyValuePair kvpReceiver = - new KeyValuePair(); + KeyValuePair kvpReceiver = new KeyValuePair(); KeyValuePair kvpInput = makeRandomKvp(myRandom); checkExternalization(kvpInput, kvpReceiver); } diff --git a/Base/src/test/java/io/deephaven/base/pool/TestThreadSafeFixedSizePool.java b/Base/src/test/java/io/deephaven/base/pool/TestThreadSafeFixedSizePool.java index 4f3e1b268f0..be714d848fb 100644 --- a/Base/src/test/java/io/deephaven/base/pool/TestThreadSafeFixedSizePool.java +++ b/Base/src/test/java/io/deephaven/base/pool/TestThreadSafeFixedSizePool.java @@ -35,11 +35,10 @@ public void testThreadSafeFixedSizePool() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - Pool pool = ThreadSafeFixedSizePool.FACTORY.create(OBJECTS.length, - m_mockObjectFactory, m_mockClearingProcedure); - assertEquals( - "call()call()call()call()call()call()call()call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + Pool pool = + ThreadSafeFixedSizePool.FACTORY.create(OBJECTS.length, m_mockObjectFactory, m_mockClearingProcedure); + assertEquals("call()call()call()call()call()call()call()call()call()call()call()call()call()call()", + m_mockObjectFactory.getActivityRecordAndReset()); // take Object alphaObject = OBJECTS[0]; @@ -72,8 +71,7 @@ public void testThreadSafeFixedSizePool() { // give for (Object object : OBJECTS) { pool.give(object); - assertEquals("call(" + object + ")", - m_mockClearingProcedure.getActivityRecordAndReset()); + assertEquals("call(" + object + ")", m_mockClearingProcedure.getActivityRecordAndReset()); checkNoOtherActivity(); } @@ -91,11 +89,9 @@ public void testThreadSafeFixedSizePoolNoClearingProcedure() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - Pool pool = - ThreadSafeFixedSizePool.FACTORY.create(OBJECTS.length, m_mockObjectFactory, null); - assertEquals( - "call()call()call()call()call()call()call()call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + Pool pool = ThreadSafeFixedSizePool.FACTORY.create(OBJECTS.length, m_mockObjectFactory, null); + assertEquals("call()call()call()call()call()call()call()call()call()call()call()call()call()call()", + m_mockObjectFactory.getActivityRecordAndReset()); // take Object alphaObject = OBJECTS[0]; @@ -139,8 +135,7 @@ public void testThreadSafeFixedSizePoolNoFactory() { m_mockObjectFactory.add(object); } new ThreadSafeFixedSizePool(7, m_mockObjectFactory, null); - assertEquals("call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + assertEquals("call()call()call()call()call()call()call()", m_mockObjectFactory.getActivityRecordAndReset()); // no factory try { @@ -163,8 +158,7 @@ public void testThreadSafeFixedSizePoolNoFactory() { m_mockObjectFactory.add(object); } ThreadSafeFixedSizePool.FACTORY.create(7, m_mockObjectFactory, null); - assertEquals("call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + assertEquals("call()call()call()call()call()call()call()", m_mockObjectFactory.getActivityRecordAndReset()); } // ---------------------------------------------------------------- diff --git a/Base/src/test/java/io/deephaven/base/pool/TestThreadSafeLenientFixedSizePool.java b/Base/src/test/java/io/deephaven/base/pool/TestThreadSafeLenientFixedSizePool.java index 03930484875..23b8ddb154d 100644 --- a/Base/src/test/java/io/deephaven/base/pool/TestThreadSafeLenientFixedSizePool.java +++ b/Base/src/test/java/io/deephaven/base/pool/TestThreadSafeLenientFixedSizePool.java @@ -36,11 +36,10 @@ public void testThreadSafeLenientFixedSizePool() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - Pool pool = ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, - m_mockObjectFactory, m_mockClearingProcedure); - assertEquals( - "call()call()call()call()call()call()call()call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + Pool pool = ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, m_mockObjectFactory, + m_mockClearingProcedure); + assertEquals("call()call()call()call()call()call()call()call()call()call()call()call()call()call()", + m_mockObjectFactory.getActivityRecordAndReset()); // take Object alphaObject = OBJECTS[0]; @@ -82,8 +81,7 @@ public void testThreadSafeLenientFixedSizePool() { // give for (Object object : OBJECTS) { pool.give(object); - assertEquals("call(" + object + ")", - m_mockClearingProcedure.getActivityRecordAndReset()); + assertEquals("call(" + object + ")", m_mockClearingProcedure.getActivityRecordAndReset()); checkNoOtherActivity(); } @@ -101,11 +99,9 @@ public void testThreadSafeLenientFixedSizePoolNoClearingProcedure() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - Pool pool = ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, - m_mockObjectFactory, null); - assertEquals( - "call()call()call()call()call()call()call()call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + Pool pool = ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, m_mockObjectFactory, null); + assertEquals("call()call()call()call()call()call()call()call()call()call()call()call()call()call()", + m_mockObjectFactory.getActivityRecordAndReset()); // take Object alphaObject = OBJECTS[0]; @@ -135,8 +131,7 @@ public void testThreadSafeLenientFixedSizePoolNoFactory() { RequirementFailure failure = null; try { new ThreadSafeLenientFixedSizePool(OBJECTS.length, - (Function.Unary>) null, - m_mockClearingProcedure); + (Function.Unary>) null, m_mockClearingProcedure); } catch (RequirementFailure requirementFailure) { failure = requirementFailure; // assertTrue(requirementFailure.isThisStackFrameCulprit(0)); @@ -146,8 +141,7 @@ public void testThreadSafeLenientFixedSizePoolNoFactory() { // too small try { - new ThreadSafeLenientFixedSizePool(6, m_mockObjectFactory, - m_mockClearingProcedure); + new ThreadSafeLenientFixedSizePool(6, m_mockObjectFactory, m_mockClearingProcedure); } catch (RequirementFailure requirementFailure) { assertTrue(requirementFailure.isThisStackFrameCulprit(0)); } @@ -157,21 +151,18 @@ public void testThreadSafeLenientFixedSizePoolNoFactory() { m_mockObjectFactory.add(object); } new ThreadSafeLenientFixedSizePool(7, m_mockObjectFactory, null); - assertEquals("call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + assertEquals("call()call()call()call()call()call()call()", m_mockObjectFactory.getActivityRecordAndReset()); // no factory try { - ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, null, - m_mockClearingProcedure); + ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, null, m_mockClearingProcedure); } catch (RequirementFailure requirementFailure) { assertTrue(requirementFailure.isThisStackFrameCulprit(0)); } // too small try { - ThreadSafeLenientFixedSizePool.FACTORY.create(6, m_mockObjectFactory, - m_mockClearingProcedure); + ThreadSafeLenientFixedSizePool.FACTORY.create(6, m_mockObjectFactory, m_mockClearingProcedure); } catch (RequirementFailure requirementFailure) { assertTrue(requirementFailure.isThisStackFrameCulprit(0)); } @@ -181,8 +172,7 @@ public void testThreadSafeLenientFixedSizePoolNoFactory() { m_mockObjectFactory.add(object); } ThreadSafeLenientFixedSizePool.FACTORY.create(7, m_mockObjectFactory, null); - assertEquals("call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + assertEquals("call()call()call()call()call()call()call()", m_mockObjectFactory.getActivityRecordAndReset()); } // ---------------------------------------------------------------- diff --git a/Base/src/test/java/io/deephaven/base/stats/HistogramPower2Test.java b/Base/src/test/java/io/deephaven/base/stats/HistogramPower2Test.java index b3dc60075e5..c0d162dad6d 100644 --- a/Base/src/test/java/io/deephaven/base/stats/HistogramPower2Test.java +++ b/Base/src/test/java/io/deephaven/base/stats/HistogramPower2Test.java @@ -15,8 +15,7 @@ public void setUp() { } public void testSample() throws Exception { - Item testItem = - Stats.makeItem("HistogramPower2Test", "testData", HistogramPower2.FACTORY, NOW); + Item testItem = Stats.makeItem("HistogramPower2Test", "testData", HistogramPower2.FACTORY, NOW); Value testNewHistoState = testItem.getValue(); assertEquals(testNewHistoState.getTypeTag(), 'N'); @@ -35,29 +34,29 @@ public void testSample() throws Exception { // should have a count of 1 in bin[1]..bin[63]; bin[0]=2 Stats.update(new ItemUpdateListener() { - public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, - long intervalMillis, String intervalName) { + public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, long intervalMillis, + String intervalName) { // Value v = item.getValue(); HistogramPower2 nh; nh = (HistogramPower2) item.getValue(); History history = nh.getHistory(); StringBuilder sb = new StringBuilder(); sb.append("STAT") - .append(',').append(intervalName) - .append(',').append(now / 1000.) - .append(',').append(appNow / 1000.) - .append(',').append(nh.getTypeTag()) - .append(',').append(item.getGroupName()) - .append('.').append(item.getName()) - .append(',').append(history.getN(intervalIndex, 1)) - .append(',').append(history.getSum(intervalIndex, 1)) - .append(',').append(history.getLast(intervalIndex, 1)) - .append(',').append(history.getMin(intervalIndex, 1)) - .append(',').append(history.getMax(intervalIndex, 1)) - .append(',').append(history.getAvg(intervalIndex, 1)) - .append(',').append(history.getSum2(intervalIndex, 1)) - .append(',').append(history.getStdev(intervalIndex, 1)) - .append(',').append(nh.getHistogramString()); + .append(',').append(intervalName) + .append(',').append(now / 1000.) + .append(',').append(appNow / 1000.) + .append(',').append(nh.getTypeTag()) + .append(',').append(item.getGroupName()) + .append('.').append(item.getName()) + .append(',').append(history.getN(intervalIndex, 1)) + .append(',').append(history.getSum(intervalIndex, 1)) + .append(',').append(history.getLast(intervalIndex, 1)) + .append(',').append(history.getMin(intervalIndex, 1)) + .append(',').append(history.getMax(intervalIndex, 1)) + .append(',').append(history.getAvg(intervalIndex, 1)) + .append(',').append(history.getSum2(intervalIndex, 1)) + .append(',').append(history.getStdev(intervalIndex, 1)) + .append(',').append(nh.getHistogramString()); System.out.println(sb); } diff --git a/Base/src/test/java/io/deephaven/base/stats/HistogramStateTest.java b/Base/src/test/java/io/deephaven/base/stats/HistogramStateTest.java index ec16e906e64..566ea30b4f5 100644 --- a/Base/src/test/java/io/deephaven/base/stats/HistogramStateTest.java +++ b/Base/src/test/java/io/deephaven/base/stats/HistogramStateTest.java @@ -11,8 +11,7 @@ public class HistogramStateTest extends TestCase { static long NOW = 123456789L; public void testSample() throws Exception { - Item testItem = - Stats.makeItem("HistogramStateTest", "testData", HistogramState.FACTORY, NOW, + Item testItem = Stats.makeItem("HistogramStateTest", "testData", HistogramState.FACTORY, NOW, new HistogramState.Spec("HistogramStateTest", "testDate", 0, 100, 10)); Value testHistogram = testItem.getValue(); @@ -25,26 +24,26 @@ public void testSample() throws Exception { // This should print 10 invocations every time Stats.update(new ItemUpdateListener() { - public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, - long intervalMillis, String intervalName) { + public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, long intervalMillis, + String intervalName) { Value v = item.getValue(); History history = v.getHistory(); StringBuilder sb = new StringBuilder(); sb.append("STAT") - .append(',').append(intervalName) - .append(',').append(now / 1000.) - .append(',').append(appNow / 1000.) - .append(',').append(v.getTypeTag()) - .append(',').append(item.getGroupName()) - .append('.').append(item.getName()) - .append(',').append(history.getN(intervalIndex, 1)) - .append(',').append(history.getSum(intervalIndex, 1)) - .append(',').append(history.getLast(intervalIndex, 1)) - .append(',').append(history.getMin(intervalIndex, 1)) - .append(',').append(history.getMax(intervalIndex, 1)) - .append(',').append(history.getAvg(intervalIndex, 1)) - .append(',').append(history.getSum2(intervalIndex, 1)) - .append(',').append(history.getStdev(intervalIndex, 1)); + .append(',').append(intervalName) + .append(',').append(now / 1000.) + .append(',').append(appNow / 1000.) + .append(',').append(v.getTypeTag()) + .append(',').append(item.getGroupName()) + .append('.').append(item.getName()) + .append(',').append(history.getN(intervalIndex, 1)) + .append(',').append(history.getSum(intervalIndex, 1)) + .append(',').append(history.getLast(intervalIndex, 1)) + .append(',').append(history.getMin(intervalIndex, 1)) + .append(',').append(history.getMax(intervalIndex, 1)) + .append(',').append(history.getAvg(intervalIndex, 1)) + .append(',').append(history.getSum2(intervalIndex, 1)) + .append(',').append(history.getStdev(intervalIndex, 1)); System.out.println(sb); } }, NOW + 1000, NOW + 1000, 0); diff --git a/Base/src/test/java/io/deephaven/base/stats/TestValue.java b/Base/src/test/java/io/deephaven/base/stats/TestValue.java index 57e35bf779e..a9a6c4b495d 100644 --- a/Base/src/test/java/io/deephaven/base/stats/TestValue.java +++ b/Base/src/test/java/io/deephaven/base/stats/TestValue.java @@ -64,8 +64,7 @@ private void checkValue(Function.Unary factory) { assertEquals(5, value.getMax()); History history = value.getHistory(); - // issue: actually, interval 0 did not turn over, so this should probably return -1 (and fix - // Value.update too) + // issue: actually, interval 0 did not turn over, so this should probably return -1 (and fix Value.update too) assertEquals(0, history.update(value, 1000L)); assertEquals(5, history.getN(History.INTERVAL_1S_INDEX, 0)); diff --git a/Base/src/test/java/io/deephaven/base/string/cache/TestCharSequenceAdapterBuilder.java b/Base/src/test/java/io/deephaven/base/string/cache/TestCharSequenceAdapterBuilder.java index cfcf3c6cfee..75d29873bca 100644 --- a/Base/src/test/java/io/deephaven/base/string/cache/TestCharSequenceAdapterBuilder.java +++ b/Base/src/test/java/io/deephaven/base/string/cache/TestCharSequenceAdapterBuilder.java @@ -20,8 +20,7 @@ public void testBuilder() { assertEquals(builder.toString(), "hello"); assertEquals(builder.hashCode(), "hello".hashCode()); - assertTrue(CharSequenceUtils.contentEquals(builder.append("hello world again", 5, 6), - "hello world")); + assertTrue(CharSequenceUtils.contentEquals(builder.append("hello world again", 5, 6), "hello world")); assertEquals(builder.toString(), "hello world"); assertEquals(builder.hashCode(), "hello world".hashCode()); @@ -29,13 +28,11 @@ public void testBuilder() { assertEquals(builder.toString(), "hello world "); assertEquals(builder.hashCode(), "hello world ".hashCode()); - assertTrue(CharSequenceUtils.contentEquals(builder.append("again".toCharArray()), - "hello world again")); + assertTrue(CharSequenceUtils.contentEquals(builder.append("again".toCharArray()), "hello world again")); assertEquals(builder.toString(), "hello world again"); assertEquals(builder.hashCode(), "hello world again".hashCode()); - assertTrue(CharSequenceUtils.contentEquals(builder.append("!?!?".toCharArray(), 2, 1), - "hello world again!")); + assertTrue(CharSequenceUtils.contentEquals(builder.append("!?!?".toCharArray(), 2, 1), "hello world again!")); assertEquals(builder.toString(), "hello world again!"); assertEquals(builder.hashCode(), "hello world again!".hashCode()); @@ -44,8 +41,7 @@ public void testBuilder() { assertEquals(builder.toString(), ""); assertEquals(builder.hashCode(), "".hashCode()); - assertTrue( - CharSequenceUtils.contentEquals(builder.append("dancing".getBytes()), "dancing")); + assertTrue(CharSequenceUtils.contentEquals(builder.append("dancing".getBytes()), "dancing")); assertEquals(builder.toString(), "dancing"); assertEquals(builder.hashCode(), "dancing".hashCode()); @@ -53,8 +49,8 @@ public void testBuilder() { assertEquals(builder.toString(), "dancing "); assertEquals(builder.hashCode(), "dancing ".hashCode()); - assertTrue(CharSequenceUtils.contentEquals( - builder.append("dancing with the stars!".getBytes(), 17, 5), "dancing stars")); + assertTrue(CharSequenceUtils.contentEquals(builder.append("dancing with the stars!".getBytes(), 17, 5), + "dancing stars")); assertEquals(builder.toString(), "dancing stars"); assertEquals(builder.hashCode(), "dancing stars".hashCode()); } diff --git a/Base/src/test/java/io/deephaven/base/string/cache/TestCompressedString.java b/Base/src/test/java/io/deephaven/base/string/cache/TestCompressedString.java index 191daca32ce..ab5671efb47 100644 --- a/Base/src/test/java/io/deephaven/base/string/cache/TestCompressedString.java +++ b/Base/src/test/java/io/deephaven/base/string/cache/TestCompressedString.java @@ -72,20 +72,17 @@ public void testContentEquality() { assertEquals(cs.hashCode(), "dancing".hashCode()); assertTrue(Arrays.equals(cs.getData(), "dancing".getBytes())); - assertTrue(CharSequenceUtils - .contentEquals(cs = cs("dancing with the stars!".getBytes(), 17, 5), "stars")); + assertTrue(CharSequenceUtils.contentEquals(cs = cs("dancing with the stars!".getBytes(), 17, 5), "stars")); assertEquals(cs.toString(), "stars"); assertEquals(cs.hashCode(), "stars".hashCode()); assertTrue(Arrays.equals(cs.getData(), "stars".getBytes())); - assertTrue( - CharSequenceUtils.contentEquals(cs = cs(ByteBuffer.wrap("happy".getBytes())), "happy")); + assertTrue(CharSequenceUtils.contentEquals(cs = cs(ByteBuffer.wrap("happy".getBytes())), "happy")); assertEquals(cs.toString(), "happy"); assertEquals(cs.hashCode(), "happy".hashCode()); assertTrue(Arrays.equals(cs.getData(), "happy".getBytes())); - assertTrue(CharSequenceUtils - .contentEquals(cs = cs(ByteBuffer.wrap("hedgehog!".getBytes()), 5, 3), "hog")); + assertTrue(CharSequenceUtils.contentEquals(cs = cs(ByteBuffer.wrap("hedgehog!".getBytes()), 5, 3), "hog")); assertEquals(cs.toString(), "hog"); assertEquals(cs.hashCode(), "hog".hashCode()); assertTrue(Arrays.equals(cs.getData(), "hog".getBytes())); diff --git a/Base/src/test/java/io/deephaven/base/string/cache/TestConcurrentBoundedStringCache.java b/Base/src/test/java/io/deephaven/base/string/cache/TestConcurrentBoundedStringCache.java index 7812a98c531..31d4a1926c9 100644 --- a/Base/src/test/java/io/deephaven/base/string/cache/TestConcurrentBoundedStringCache.java +++ b/Base/src/test/java/io/deephaven/base/string/cache/TestConcurrentBoundedStringCache.java @@ -13,14 +13,14 @@ public class TestConcurrentBoundedStringCache extends TestCase { @SuppressWarnings("unchecked") private static StringCacheTypeAdapter[] COMPRESSED_TYPE_ADAPTERS = - (StringCacheTypeAdapter[]) new StringCacheTypeAdapter[] { - StringCacheTypeAdapterCompressedStringImpl.INSTANCE, - StringCacheTypeAdapterMappedCompressedStringImpl.INSTANCE - }; + (StringCacheTypeAdapter[]) new StringCacheTypeAdapter[] { + StringCacheTypeAdapterCompressedStringImpl.INSTANCE, + StringCacheTypeAdapterMappedCompressedStringImpl.INSTANCE + }; public void testStringPopulation() { final StringCache cache = - new ConcurrentBoundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, 2); + new ConcurrentBoundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, 2); final String s1 = new String(new char[] {'a', 'b', 'c', 'd'}); final String s2 = "abcd"; assertSame(s1, cache.getCachedString(s1)); @@ -30,7 +30,7 @@ public void testStringPopulation() { public void testByteBufferPopulation() { final StringCache cache = - new ConcurrentBoundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, 2); + new ConcurrentBoundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, 2); final ByteBufferCharSequenceAdapterImpl adapter = new ByteBufferCharSequenceAdapterImpl(); final String s1 = new String(new char[] {'a', 'b', 'c', 'd'}); @@ -50,7 +50,7 @@ public void testByteBufferPopulation() { public void testByteArrayPopulation() { final StringCache cache = - new ConcurrentBoundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, 2); + new ConcurrentBoundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, 2); final ByteArrayCharSequenceAdapterImpl adapter = new ByteArrayCharSequenceAdapterImpl(); final String s1 = new String(new char[] {'a', 'b', 'c', 'd'}); @@ -71,9 +71,8 @@ public void testByteArrayPopulation() { public void testStringPopulationCompressed() { for (StringCacheTypeAdapter typeAdapter : COMPRESSED_TYPE_ADAPTERS) { final StringCache cache = - new ConcurrentBoundedStringCache<>(typeAdapter, 10, 2); - final AbstractCompressedString s1 = - typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); + new ConcurrentBoundedStringCache<>(typeAdapter, 10, 2); + final AbstractCompressedString s1 = typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); final AbstractCompressedString s2 = typeAdapter.create("abcd"); assertSame(s1, cache.getCachedString(s1)); assertSame(s1, cache.getCachedString(s2)); @@ -84,12 +83,10 @@ public void testStringPopulationCompressed() { public void testByteBufferPopulationCompressed() { for (StringCacheTypeAdapter typeAdapter : COMPRESSED_TYPE_ADAPTERS) { final StringCache cache = - new ConcurrentBoundedStringCache<>(typeAdapter, 10, 2); - final ByteBufferCharSequenceAdapterImpl adapter = - new ByteBufferCharSequenceAdapterImpl(); + new ConcurrentBoundedStringCache<>(typeAdapter, 10, 2); + final ByteBufferCharSequenceAdapterImpl adapter = new ByteBufferCharSequenceAdapterImpl(); - final AbstractCompressedString s1 = - typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); + final AbstractCompressedString s1 = typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); assertSame(s1, cache.getCachedString(s1)); adapter.set(ByteBuffer.wrap(new byte[] {'a', 'b', 'c', 'd'}), 0, 4); assertTrue(CharSequenceUtils.contentEquals(adapter, s1)); @@ -108,11 +105,10 @@ public void testByteBufferPopulationCompressed() { public void testByteArrayPopulationCompressed() { for (StringCacheTypeAdapter typeAdapter : COMPRESSED_TYPE_ADAPTERS) { final StringCache cache = - new ConcurrentBoundedStringCache<>(typeAdapter, 10, 2); + new ConcurrentBoundedStringCache<>(typeAdapter, 10, 2); final ByteArrayCharSequenceAdapterImpl adapter = new ByteArrayCharSequenceAdapterImpl(); - final AbstractCompressedString s1 = - typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); + final AbstractCompressedString s1 = typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); assertSame(s1, cache.getCachedString(s1)); adapter.set(new byte[] {'a', 'b', 'c', 'd'}, 0, 4); assertTrue(CharSequenceUtils.contentEquals(adapter, s1)); diff --git a/Base/src/test/java/io/deephaven/base/string/cache/TestConcurrentUnboundedStringCache.java b/Base/src/test/java/io/deephaven/base/string/cache/TestConcurrentUnboundedStringCache.java index c0a8dbe9a34..10e620a5626 100644 --- a/Base/src/test/java/io/deephaven/base/string/cache/TestConcurrentUnboundedStringCache.java +++ b/Base/src/test/java/io/deephaven/base/string/cache/TestConcurrentUnboundedStringCache.java @@ -13,16 +13,16 @@ public class TestConcurrentUnboundedStringCache extends TestCase { @SuppressWarnings("unchecked") private static StringCacheTypeAdapter[] COMPRESSED_TYPE_ADAPTERS = - (StringCacheTypeAdapter[]) new StringCacheTypeAdapter[] { - StringCacheTypeAdapterCompressedStringImpl.INSTANCE, - StringCacheTypeAdapterMappedCompressedStringImpl.INSTANCE - }; + (StringCacheTypeAdapter[]) new StringCacheTypeAdapter[] { + StringCacheTypeAdapterCompressedStringImpl.INSTANCE, + StringCacheTypeAdapterMappedCompressedStringImpl.INSTANCE + }; public void testStringPopulation() { - final StringCache debugCache = new ConcurrentUnboundedStringCache<>( - StringCacheTypeAdapterStringImpl.INSTANCE, 10, true); - final StringCache regularCache = new ConcurrentUnboundedStringCache<>( - StringCacheTypeAdapterStringImpl.INSTANCE, 10, false); + final StringCache debugCache = + new ConcurrentUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, true); + final StringCache regularCache = + new ConcurrentUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, false); for (StringCache cache : new StringCache[] {debugCache, regularCache}) { final String s1 = new String(new char[] {'a', 'b', 'c', 'd'}); final String s2 = "abcd"; @@ -33,10 +33,10 @@ public void testStringPopulation() { } public void testCharSequencePopulation() { - final StringCache debugCache = new ConcurrentUnboundedStringCache<>( - StringCacheTypeAdapterStringImpl.INSTANCE, 10, true); - final StringCache regularCache = new ConcurrentUnboundedStringCache<>( - StringCacheTypeAdapterStringImpl.INSTANCE, 10, false); + final StringCache debugCache = + new ConcurrentUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, true); + final StringCache regularCache = + new ConcurrentUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, false); // noinspection unchecked for (StringCache cache : new StringCache[] {debugCache, regularCache}) { final StringBuilder builder = new StringBuilder(); @@ -60,14 +60,13 @@ public void testCharSequencePopulation() { } public void testByteBufferPopulation() { - final StringCache debugCache = new ConcurrentUnboundedStringCache<>( - StringCacheTypeAdapterStringImpl.INSTANCE, 10, true); - final StringCache regularCache = new ConcurrentUnboundedStringCache<>( - StringCacheTypeAdapterStringImpl.INSTANCE, 10, false); + final StringCache debugCache = + new ConcurrentUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, true); + final StringCache regularCache = + new ConcurrentUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, false); // noinspection unchecked for (StringCache cache : new StringCache[] {debugCache, regularCache}) { - final ByteBufferCharSequenceAdapterImpl adapter = - new ByteBufferCharSequenceAdapterImpl(); + final ByteBufferCharSequenceAdapterImpl adapter = new ByteBufferCharSequenceAdapterImpl(); final String s1 = new String(new char[] {'a', 'b', 'c', 'd'}); assertSame(s1, cache.getCachedString(s1)); @@ -88,10 +87,10 @@ public void testByteBufferPopulation() { } public void testByteArrayPopulation() { - final StringCache debugCache = new ConcurrentUnboundedStringCache<>( - StringCacheTypeAdapterStringImpl.INSTANCE, 10, true); - final StringCache regularCache = new ConcurrentUnboundedStringCache<>( - StringCacheTypeAdapterStringImpl.INSTANCE, 10, false); + final StringCache debugCache = + new ConcurrentUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, true); + final StringCache regularCache = + new ConcurrentUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, 10, false); // noinspection unchecked for (StringCache cache : new StringCache[] {debugCache, regularCache}) { final ByteArrayCharSequenceAdapterImpl adapter = new ByteArrayCharSequenceAdapterImpl(); @@ -115,14 +114,12 @@ public void testByteArrayPopulation() { public void testStringPopulationCompressed() { for (StringCacheTypeAdapter typeAdapter : COMPRESSED_TYPE_ADAPTERS) { final StringCache debugCache = - new ConcurrentUnboundedStringCache<>(typeAdapter, 10, true); + new ConcurrentUnboundedStringCache<>(typeAdapter, 10, true); final StringCache regularCache = - new ConcurrentUnboundedStringCache<>(typeAdapter, 10, false); + new ConcurrentUnboundedStringCache<>(typeAdapter, 10, false); // noinspection unchecked - for (StringCache cache : new StringCache[] {debugCache, - regularCache}) { - final AbstractCompressedString s1 = - typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); + for (StringCache cache : new StringCache[] {debugCache, regularCache}) { + final AbstractCompressedString s1 = typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); final AbstractCompressedString s2 = typeAdapter.create("abcd"); assertSame(s1, cache.getCachedString(s1)); assertSame(s1, cache.getCachedString(s2)); @@ -134,17 +131,14 @@ public void testStringPopulationCompressed() { public void testByteBufferPopulationCompressed() { for (StringCacheTypeAdapter typeAdapter : COMPRESSED_TYPE_ADAPTERS) { final StringCache debugCache = - new ConcurrentUnboundedStringCache<>(typeAdapter, 10, true); + new ConcurrentUnboundedStringCache<>(typeAdapter, 10, true); final StringCache regularCache = - new ConcurrentUnboundedStringCache<>(typeAdapter, 10, false); + new ConcurrentUnboundedStringCache<>(typeAdapter, 10, false); // noinspection unchecked - for (StringCache cache : new StringCache[] {debugCache, - regularCache}) { - final ByteBufferCharSequenceAdapterImpl adapter = - new ByteBufferCharSequenceAdapterImpl(); + for (StringCache cache : new StringCache[] {debugCache, regularCache}) { + final ByteBufferCharSequenceAdapterImpl adapter = new ByteBufferCharSequenceAdapterImpl(); - final AbstractCompressedString s1 = - typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); + final AbstractCompressedString s1 = typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); assertSame(s1, cache.getCachedString(s1)); adapter.set(ByteBuffer.wrap(new byte[] {'a', 'b', 'c', 'd'}), 0, 4); assertTrue(CharSequenceUtils.contentEquals(adapter, s1)); @@ -153,8 +147,7 @@ public void testByteBufferPopulationCompressed() { adapter.set(ByteBuffer.wrap(new byte[] {' ', 'h', 'e', 'l', 'l', 'o', 'w'}), 1, 5); assertTrue(CharSequenceUtils.contentEquals(adapter, "hello")); - final AbstractCompressedString s2 = - typeAdapter.create(cache.getCachedString(adapter)); + final AbstractCompressedString s2 = typeAdapter.create(cache.getCachedString(adapter)); assertTrue(CharSequenceUtils.contentEquals(adapter, s2)); assertSame(s2, cache.getCachedString("hello")); adapter.clear(); @@ -165,17 +158,14 @@ public void testByteBufferPopulationCompressed() { public void testByteArrayPopulationCompressed() { for (StringCacheTypeAdapter typeAdapter : COMPRESSED_TYPE_ADAPTERS) { final StringCache debugCache = - new ConcurrentUnboundedStringCache<>(typeAdapter, 10, true); + new ConcurrentUnboundedStringCache<>(typeAdapter, 10, true); final StringCache regularCache = - new ConcurrentUnboundedStringCache<>(typeAdapter, 10, false); + new ConcurrentUnboundedStringCache<>(typeAdapter, 10, false); // noinspection unchecked - for (StringCache cache : new StringCache[] {debugCache, - regularCache}) { - final ByteArrayCharSequenceAdapterImpl adapter = - new ByteArrayCharSequenceAdapterImpl(); + for (StringCache cache : new StringCache[] {debugCache, regularCache}) { + final ByteArrayCharSequenceAdapterImpl adapter = new ByteArrayCharSequenceAdapterImpl(); - final AbstractCompressedString s1 = - typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); + final AbstractCompressedString s1 = typeAdapter.create(new String(new char[] {'a', 'b', 'c', 'd'})); assertSame(s1, cache.getCachedString(s1)); adapter.set(new byte[] {'a', 'b', 'c', 'd'}, 0, 4); assertTrue(CharSequenceUtils.contentEquals(adapter, s1)); diff --git a/Base/src/test/java/io/deephaven/base/string/cache/TestMappedCompressedString.java b/Base/src/test/java/io/deephaven/base/string/cache/TestMappedCompressedString.java index 62f64a913ee..f6f8fe51fc9 100644 --- a/Base/src/test/java/io/deephaven/base/string/cache/TestMappedCompressedString.java +++ b/Base/src/test/java/io/deephaven/base/string/cache/TestMappedCompressedString.java @@ -23,46 +23,40 @@ public void testEquality() { assertEquals(mcs.hashCode(), "".hashCode()); assertTrue(Arrays.equals(mcs.getData(), "".getBytes())); - assertTrue( - CharSequenceUtils.contentEquals(mcs = new MappedCompressedString("hello"), "hello")); + assertTrue(CharSequenceUtils.contentEquals(mcs = new MappedCompressedString("hello"), "hello")); assertEquals(mcs.toString(), "hello"); assertEquals(mcs.hashCode(), "hello".hashCode()); assertTrue(Arrays.equals(mcs.getData(), "hello".getBytes())); - assertTrue(CharSequenceUtils - .contentEquals(mcs = new MappedCompressedString("again".toCharArray()), "again")); + assertTrue(CharSequenceUtils.contentEquals(mcs = new MappedCompressedString("again".toCharArray()), "again")); assertEquals(mcs.toString(), "again"); assertEquals(mcs.hashCode(), "again".hashCode()); assertTrue(Arrays.equals(mcs.getData(), "again".getBytes())); - assertTrue(CharSequenceUtils - .contentEquals(mcs = new MappedCompressedString("!?!?".toCharArray(), 2, 1), "!")); + assertTrue(CharSequenceUtils.contentEquals(mcs = new MappedCompressedString("!?!?".toCharArray(), 2, 1), "!")); assertEquals(mcs.toString(), "!"); assertEquals(mcs.hashCode(), "!".hashCode()); assertTrue(Arrays.equals(mcs.getData(), "!".getBytes())); - assertTrue(CharSequenceUtils - .contentEquals(mcs = new MappedCompressedString("dancing".getBytes()), "dancing")); + assertTrue(CharSequenceUtils.contentEquals(mcs = new MappedCompressedString("dancing".getBytes()), "dancing")); assertEquals(mcs.toString(), "dancing"); assertEquals(mcs.hashCode(), "dancing".hashCode()); assertTrue(Arrays.equals(mcs.getData(), "dancing".getBytes())); - assertTrue(CharSequenceUtils.contentEquals( - mcs = new MappedCompressedString("dancing with the stars!".getBytes(), 17, 5), - "stars")); + assertTrue(CharSequenceUtils + .contentEquals(mcs = new MappedCompressedString("dancing with the stars!".getBytes(), 17, 5), "stars")); assertEquals(mcs.toString(), "stars"); assertEquals(mcs.hashCode(), "stars".hashCode()); assertTrue(Arrays.equals(mcs.getData(), "stars".getBytes())); - assertTrue(CharSequenceUtils.contentEquals( - mcs = new MappedCompressedString(ByteBuffer.wrap("happy".getBytes())), "happy")); + assertTrue(CharSequenceUtils + .contentEquals(mcs = new MappedCompressedString(ByteBuffer.wrap("happy".getBytes())), "happy")); assertEquals(mcs.toString(), "happy"); assertEquals(mcs.hashCode(), "happy".hashCode()); assertTrue(Arrays.equals(mcs.getData(), "happy".getBytes())); - assertTrue(CharSequenceUtils.contentEquals( - mcs = new MappedCompressedString(ByteBuffer.wrap("hedgehog!".getBytes()), 5, 3), - "hog")); + assertTrue(CharSequenceUtils + .contentEquals(mcs = new MappedCompressedString(ByteBuffer.wrap("hedgehog!".getBytes()), 5, 3), "hog")); assertEquals(mcs.toString(), "hog"); assertEquals(mcs.hashCode(), "hog".hashCode()); assertTrue(Arrays.equals(mcs.getData(), "hog".getBytes())); diff --git a/Base/src/test/java/io/deephaven/base/testing/BaseArrayTestCase.java b/Base/src/test/java/io/deephaven/base/testing/BaseArrayTestCase.java index 32dde476d63..c80183634cf 100644 --- a/Base/src/test/java/io/deephaven/base/testing/BaseArrayTestCase.java +++ b/Base/src/test/java/io/deephaven/base/testing/BaseArrayTestCase.java @@ -10,8 +10,7 @@ public static void assertEquals(String name, final double[][] a, final double[][ assertEquals(name, a, b, 1e-10); } - public static void assertEquals(String name, final double[][] a, final double[][] b, - final double tolerance) { + public static void assertEquals(String name, final double[][] a, final double[][] b, final double tolerance) { if (a == null || b == null) { assertTrue(a == b); } else { @@ -27,8 +26,7 @@ public static void assertEquals(final double[][] a, final double[][] b) { assertEquals("array", a, b); } - public static void assertEquals(final double[][] a, final double[][] b, - final double tolerance) { + public static void assertEquals(final double[][] a, final double[][] b, final double tolerance) { assertEquals("array", a, b, tolerance); } @@ -312,8 +310,7 @@ public static String compareArray(final String name, final boolean[] a, final bo // Array comparison functions - public static String compareArray(final String name, final float[] a, final float[] b, - float tolerance) { + public static String compareArray(final String name, final float[] a, final float[] b, float tolerance) { if (a == null) { if (b == null) { return null; @@ -336,8 +333,7 @@ public static String compareArray(final String name, final float[] a, final floa return err; } - public static String checkMagnitudeArray(final String name, final float[] a, final float[] b, - float percentage) { + public static String checkMagnitudeArray(final String name, final float[] a, final float[] b, float percentage) { if (a == null) { if (b == null) { return null; @@ -363,8 +359,7 @@ public static String checkMagnitudeArray(final String name, final float[] a, fin } - public static String compareArray(final String name, final double[] a, final double[] b, - double tolerance) { + public static String compareArray(final String name, final double[] a, final double[] b, double tolerance) { if (a == null) { if (b == null) { return null; @@ -387,8 +382,7 @@ public static String compareArray(final String name, final double[] a, final dou return err; } - public static String checkMagnitudeArray(final String name, final double[] a, final double[] b, - double percentage) { + public static String checkMagnitudeArray(final String name, final double[] a, final double[] b, double percentage) { if (a == null) { if (b == null) { return null; @@ -473,8 +467,7 @@ private static String compare(final String name, final boolean a, final boolean } - private static String compare(final String name, final double a, final double b, - double tolerance) { + private static String compare(final String name, final double a, final double b, double tolerance) { if (Math.abs(a - b) > tolerance || (Double.isNaN(a) ^ Double.isNaN(b))) { return name + " expected: <" + a + "> but was: <" + b + ">"; } else { diff --git a/Base/src/test/java/io/deephaven/base/testing/BaseCachedJMockTestCase.java b/Base/src/test/java/io/deephaven/base/testing/BaseCachedJMockTestCase.java index 2fe7165833f..b9242c933ac 100644 --- a/Base/src/test/java/io/deephaven/base/testing/BaseCachedJMockTestCase.java +++ b/Base/src/test/java/io/deephaven/base/testing/BaseCachedJMockTestCase.java @@ -41,11 +41,11 @@ protected void autoInstantiate(Object parentObject, Field field) { try { Constructor constructor = type.getConstructor(); Object childObject = constructor.newInstance(); - setAutoField(field, parentObject, childObject, "auto-instantiate " - + type.getSimpleName() + " field " + field.getName()); + setAutoField(field, parentObject, childObject, + "auto-instantiate " + type.getSimpleName() + " field " + field.getName()); return; } catch (NoSuchMethodException | InstantiationException | IllegalAccessException - | InvocationTargetException e) { + | InvocationTargetException e) { // fall through and call super implementation } } @@ -128,13 +128,11 @@ public Object invoke(Invocation invocation) throws Throwable { // ---------------------------------------------------------------- public static class CachingImposteriser implements Imposteriser { - public static final BaseCachedJMockTestCase.CachingImposteriser INSTANCE = - new CachingImposteriser(); + public static final BaseCachedJMockTestCase.CachingImposteriser INSTANCE = new CachingImposteriser(); private final static Class[] CONSTRUCTOR_PARAMS = {InvocationHandler.class}; - private static Map> proxyInfoToConstructorMap = - new HashMap<>(); + private static Map> proxyInfoToConstructorMap = new HashMap<>(); // ---------------------------------------------------------------- @Override // from Imposteriser @@ -144,8 +142,7 @@ public boolean canImposterise(Class type) { // ---------------------------------------------------------------- @Override // from Imposteriser - public T imposterise(final Invokable mockObject, Class mockedType, - Class... ancillaryTypes) { + public T imposterise(final Invokable mockObject, Class mockedType, Class... ancillaryTypes) { ProxyInfo proxyInfo = new ProxyInfo(mockedType, ancillaryTypes); Function.Unary constructor = proxyInfoToConstructorMap.get(proxyInfo); if (null == constructor) { @@ -182,13 +179,11 @@ private Function.Unary createInterfaceConstructor(ProxyInfo proxyI public Object call(final Invokable invokable) { try { return constructor.newInstance(new InvocationHandler() { - public Object invoke(Object proxy, Method method, Object[] args) - throws Throwable { + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { return invokable.invoke(new Invocation(proxy, method, args)); } }); - } catch (InstantiationException | IllegalAccessException - | InvocationTargetException e) { + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw Assert.exceptionNeverCaught(e); } } @@ -200,8 +195,8 @@ public Object invoke(Object proxy, Method method, Object[] args) private Function.Unary createClassConstructor(final ProxyInfo proxyInfo) { final Class proxyClass; try { - proxyClass = (Class) CREATE_PROXY_CLASS.invoke(ClassImposteriser.INSTANCE, - proxyInfo.mockedType, proxyInfo.ancillaryTypes); + proxyClass = (Class) CREATE_PROXY_CLASS.invoke(ClassImposteriser.INSTANCE, proxyInfo.mockedType, + proxyInfo.ancillaryTypes); } catch (IllegalAccessException | InvocationTargetException e) { throw Assert.exceptionNeverCaught(e); } @@ -211,13 +206,10 @@ private Function.Unary createClassConstructor(final ProxyInfo prox public Object call(final Invokable invokable) { try { try { - SET_CONSTRUCTORS_ACCESSIBLE.invoke(ClassImposteriser.INSTANCE, - proxyInfo.mockedType, true); - return CREATE_PROXY.invoke(ClassImposteriser.INSTANCE, proxyClass, - invokable); + SET_CONSTRUCTORS_ACCESSIBLE.invoke(ClassImposteriser.INSTANCE, proxyInfo.mockedType, true); + return CREATE_PROXY.invoke(ClassImposteriser.INSTANCE, proxyClass, invokable); } finally { - SET_CONSTRUCTORS_ACCESSIBLE.invoke(ClassImposteriser.INSTANCE, - proxyInfo.mockedType, false); + SET_CONSTRUCTORS_ACCESSIBLE.invoke(ClassImposteriser.INSTANCE, proxyInfo.mockedType, false); } } catch (IllegalAccessException | InvocationTargetException e) { throw Assert.exceptionNeverCaught(e); @@ -231,14 +223,13 @@ public Object call(final Invokable invokable) { private static final Method SET_CONSTRUCTORS_ACCESSIBLE; static { try { - CREATE_PROXY = ClassImposteriser.class.getDeclaredMethod("proxy", Class.class, - Invokable.class); + CREATE_PROXY = ClassImposteriser.class.getDeclaredMethod("proxy", Class.class, Invokable.class); CREATE_PROXY.setAccessible(true); - CREATE_PROXY_CLASS = ClassImposteriser.class.getDeclaredMethod("proxyClass", - Class.class, Class[].class); + CREATE_PROXY_CLASS = + ClassImposteriser.class.getDeclaredMethod("proxyClass", Class.class, Class[].class); CREATE_PROXY_CLASS.setAccessible(true); - SET_CONSTRUCTORS_ACCESSIBLE = ClassImposteriser.class - .getDeclaredMethod("setConstructorsAccessible", Class.class, boolean.class); + SET_CONSTRUCTORS_ACCESSIBLE = ClassImposteriser.class.getDeclaredMethod("setConstructorsAccessible", + Class.class, boolean.class); SET_CONSTRUCTORS_ACCESSIBLE.setAccessible(true); } catch (NoSuchMethodException e) { throw Assert.exceptionNeverCaught(e); @@ -290,8 +281,7 @@ public int hashCode() { public String toString() { StringBuilder stringBuilder = new StringBuilder(); for (Class proxiedClass : proxiedClasses) { - stringBuilder.append(0 == stringBuilder.length() ? "[" : ", ") - .append(proxiedClass.getSimpleName()); + stringBuilder.append(0 == stringBuilder.length() ? "[" : ", ").append(proxiedClass.getSimpleName()); } return stringBuilder.append("]").toString(); } diff --git a/Base/src/test/java/io/deephaven/base/testing/CheckedMessageRegexp.java b/Base/src/test/java/io/deephaven/base/testing/CheckedMessageRegexp.java index b17964cbe5f..8bd995eb72f 100644 --- a/Base/src/test/java/io/deephaven/base/testing/CheckedMessageRegexp.java +++ b/Base/src/test/java/io/deephaven/base/testing/CheckedMessageRegexp.java @@ -11,8 +11,8 @@ // -------------------------------------------------------------------- /** - * The details of a message that should have been logged, when the message must be matched against a - * regular expression. For use with {@link LoggingRecorder}. + * The details of a message that should have been logged, when the message must be matched against a regular expression. + * For use with {@link LoggingRecorder}. */ public class CheckedMessageRegexp extends CheckedMessage { @@ -26,8 +26,7 @@ public CheckedMessageRegexp(Pattern pattern, Level level) { @Override public void checkMessage(String sRenderedMessage) { if (!m_pattern.matcher(sRenderedMessage).matches()) { - throw new ComparisonFailure("Could not match pattern.", m_pattern.toString(), - sRenderedMessage); + throw new ComparisonFailure("Could not match pattern.", m_pattern.toString(), sRenderedMessage); } } } diff --git a/Base/src/test/java/io/deephaven/base/testing/LoggingRecorder.java b/Base/src/test/java/io/deephaven/base/testing/LoggingRecorder.java index b3599e852f1..286c7ae1454 100644 --- a/Base/src/test/java/io/deephaven/base/testing/LoggingRecorder.java +++ b/Base/src/test/java/io/deephaven/base/testing/LoggingRecorder.java @@ -22,8 +22,8 @@ // -------------------------------------------------------------------- /** - * Temporarily intercepts logging for a given {@link Logger} and saves all logged messages to memory - * so that a unit test can verify the correct messages were logged. + * Temporarily intercepts logging for a given {@link Logger} and saves all logged messages to memory so that a unit test + * can verify the correct messages were logged. *

* Usage: *

  • To start capturing (in {@link TestCase#setUp}): @@ -126,8 +126,7 @@ public void detach() { // ------------------------------------------------------------ public List getReportAndReset() { - LoggingEvent[] loggingEvents = - m_loggingEvents.toArray(new LoggingEvent[m_loggingEvents.size()]); + LoggingEvent[] loggingEvents = m_loggingEvents.toArray(new LoggingEvent[m_loggingEvents.size()]); m_loggingEvents.clear(); return Arrays.asList(loggingEvents); } @@ -162,9 +161,8 @@ public void assertMessagesLogged(CheckedMessage... checkedMessages) { checkedMessage.checkMessage(message.getRenderedMessage()); if (null != checkedMessage.getDetailFragment()) { - SimpleTestSupport.assertStringContains( - message.getThrowableInformation().getThrowable().toString(), - checkedMessage.getDetailFragment()); + SimpleTestSupport.assertStringContains(message.getThrowableInformation().getThrowable().toString(), + checkedMessage.getDetailFragment()); } junit.framework.Assert.assertEquals(checkedMessage.getLevel(), message.getLevel()); } @@ -187,11 +185,10 @@ public void assertMessagesLoggedInAnyOrder(CheckedMessage... checkedMessages) { if (null != checkedMessage.getDetailFragment()) { SimpleTestSupport.assertStringContains( - message.getThrowableInformation().getThrowable().toString(), - checkedMessage.getDetailFragment()); + message.getThrowableInformation().getThrowable().toString(), + checkedMessage.getDetailFragment()); } - junit.framework.Assert.assertEquals(checkedMessage.getLevel(), - message.getLevel()); + junit.framework.Assert.assertEquals(checkedMessage.getLevel(), message.getLevel()); found = true; break; } catch (ComparisonFailure e) { @@ -199,8 +196,7 @@ public void assertMessagesLoggedInAnyOrder(CheckedMessage... checkedMessages) { } } if (!found) { - junit.framework.Assert - .fail("Could not find \"" + checkedMessage.getMessageFragment() + "\""); + junit.framework.Assert.fail("Could not find \"" + checkedMessage.getMessageFragment() + "\""); } } } @@ -210,9 +206,8 @@ public void discardLoggedMessagePotentiallyManyOfButOnly(CheckedMessage checkedM for (LoggingEvent message : messages) { checkedMessage.checkMessage(message.getRenderedMessage()); if (null != checkedMessage.getDetailFragment()) { - SimpleTestSupport.assertStringContains( - message.getThrowableInformation().getThrowable().toString(), - checkedMessage.getDetailFragment()); + SimpleTestSupport.assertStringContains(message.getThrowableInformation().getThrowable().toString(), + checkedMessage.getDetailFragment()); } junit.framework.Assert.assertEquals(checkedMessage.getLevel(), message.getLevel()); } @@ -221,11 +216,11 @@ public void discardLoggedMessagePotentiallyManyOfButOnly(CheckedMessage checkedM // ------------------------------------------------------------ private static void dumpLogMessages(List messages) { for (LoggingEvent message : messages) { - System.err.println("Possibly unexpected log message: [" + message.getLevel() + "] " - + message.getRenderedMessage() - + (null != message.getThrowableInformation() - ? " (" + message.getThrowableInformation().getThrowable().toString() + ")" - : "")); + System.err.println( + "Possibly unexpected log message: [" + message.getLevel() + "] " + message.getRenderedMessage() + + (null != message.getThrowableInformation() + ? " (" + message.getThrowableInformation().getThrowable().toString() + ")" + : "")); } } } diff --git a/Base/src/test/java/io/deephaven/base/testing/MTTestSupport.java b/Base/src/test/java/io/deephaven/base/testing/MTTestSupport.java index 4886b771d8e..be3b58ceda9 100644 --- a/Base/src/test/java/io/deephaven/base/testing/MTTestSupport.java +++ b/Base/src/test/java/io/deephaven/base/testing/MTTestSupport.java @@ -22,9 +22,8 @@ public static void assertBecomesEquals(int timeout, T expectedVal, Function. if (now - start >= 1000) { start = now; System.out.printf( - "assertBecomesEquals(%d millis, %s expected, %s actual) still waiting after %d millis%n", - timeout, expectedVal.toString(), testVal.toString(), - now - (deadline - timeout)); + "assertBecomesEquals(%d millis, %s expected, %s actual) still waiting after %d millis%n", + timeout, expectedVal.toString(), testVal.toString(), now - (deadline - timeout)); } Thread.yield(); continue; @@ -32,7 +31,7 @@ public static void assertBecomesEquals(int timeout, T expectedVal, Function. break; } Assert.fail("value did not become equal to " + expectedVal + " within " + timeout - + " millis, most recent value was " + testVal); + + " millis, most recent value was " + testVal); } public static void assertRemainsEquals(int timeout, T val, Function.Nullary f) { @@ -41,7 +40,7 @@ public static void assertRemainsEquals(int timeout, T val, Function.Nullary< T sample = f.call(); if (!val.equals(sample)) { Assert.fail("value did not remain equal to " + val + " for " + timeout - + " millis, most recent value was " + sample); + + " millis, most recent value was " + sample); } Thread.yield(); } while (System.currentTimeMillis() < deadline); @@ -68,8 +67,7 @@ public static void assertRemainsTrue(int timeout, Predicate.Nullary pred) { } while (System.currentTimeMillis() < deadline); } - public static void assertBecomesStable(int initialTimeout, int stableTimeout, - Predicate.Nullary pred) { + public static void assertBecomesStable(int initialTimeout, int stableTimeout, Predicate.Nullary pred) { assertBecomesTrue(initialTimeout, pred); assertRemainsTrue(stableTimeout, pred); } diff --git a/Base/src/test/java/io/deephaven/base/testing/Matchers.java b/Base/src/test/java/io/deephaven/base/testing/Matchers.java index f9e35ce0c0c..062d4dbaada 100644 --- a/Base/src/test/java/io/deephaven/base/testing/Matchers.java +++ b/Base/src/test/java/io/deephaven/base/testing/Matchers.java @@ -7,8 +7,7 @@ public class Matchers { // ---------------------------------------------------------------- - public static Matcher thumbprint(Class type, final Thumbprinter thumbprinter, - String thumbprint) { + public static Matcher thumbprint(Class type, final Thumbprinter thumbprinter, String thumbprint) { return new ThumbprintMatcher(type, thumbprint) { @Override public String getThumbprint(T t) { @@ -18,8 +17,7 @@ public String getThumbprint(T t) { } // ---------------------------------------------------------------- - public static abstract class ThumbprintMatcher extends BaseMatcher - implements Thumbprinter { + public static abstract class ThumbprintMatcher extends BaseMatcher implements Thumbprinter { private final Class m_type; private final String m_thumbprint; @@ -34,13 +32,13 @@ protected ThumbprintMatcher(Class type, String thumbprint) { public boolean matches(Object item) { // noinspection unchecked return m_type.isInstance(item) - && m_thumbprint.equals(getThumbprint((T) item)); + && m_thumbprint.equals(getThumbprint((T) item)); } @Override public void describeTo(Description description) { - description.appendText(m_type.getName()).appendText(" has thumbprint \"") - .appendText(m_thumbprint).appendText("\""); + description.appendText(m_type.getName()).appendText(" has thumbprint \"").appendText(m_thumbprint) + .appendText("\""); } } } diff --git a/Base/src/test/java/io/deephaven/base/testing/SimpleTestSupport.java b/Base/src/test/java/io/deephaven/base/testing/SimpleTestSupport.java index e84924896eb..c86bf6ecd1a 100644 --- a/Base/src/test/java/io/deephaven/base/testing/SimpleTestSupport.java +++ b/Base/src/test/java/io/deephaven/base/testing/SimpleTestSupport.java @@ -32,16 +32,14 @@ public static void assertOrderedListEquals(List l, T... elements) { } } - public static void assertOrderedListEquals(List l, Predicate.Binary equals, - T... elements) { + public static void assertOrderedListEquals(List l, Predicate.Binary equals, T... elements) { Assert.assertEquals(elements.length, l.size()); for (int i = 0; i < elements.length; ++i) { Assert.assertTrue(equals.call(elements[i], l.get(i))); } } - public static > void assertUnorderedListEquals(List l, - T... elements) { + public static > void assertUnorderedListEquals(List l, T... elements) { ArrayList sorted_l = new ArrayList<>(l); Collections.sort(sorted_l); Arrays.sort(elements); @@ -52,7 +50,7 @@ public static > void assertUnorderedListEquals(L } public static > void assertUnorderedListEquals(List l, - Predicate.Binary equals, T... elements) { + Predicate.Binary equals, T... elements) { ArrayList sorted_l = new ArrayList<>(l); Collections.sort(sorted_l); Arrays.sort(elements); @@ -68,8 +66,7 @@ public static > void assertUnorderedListEquals(L /** * Asserts that the given collection contains exactly the given elements (in any order). */ - public static void assertCollectionContainsExactly(Collection collectionToSearch, - E... itemsToFind) { + public static void assertCollectionContainsExactly(Collection collectionToSearch, E... itemsToFind) { assertCollectionContainsExactly(null, collectionToSearch, itemsToFind); } @@ -77,23 +74,19 @@ public static void assertCollectionContainsExactly(Collection collectionT /** * Asserts that the given collection contains exactly the given elements (in any order). */ - public static void assertCollectionContainsExactly(String sMessage, - Collection collectionToSearch, E... itemsToFind) { + public static void assertCollectionContainsExactly(String sMessage, Collection collectionToSearch, + E... itemsToFind) { try { String sPrefix = null == sMessage ? "" : sMessage + " "; if (null == itemsToFind) { - Assert.assertNull(sPrefix + "Expected collectionToSearch to be null.", - collectionToSearch); + Assert.assertNull(sPrefix + "Expected collectionToSearch to be null.", collectionToSearch); } else { - Assert.assertNotNull(sPrefix + "Expected collectionToSearch to be non-null.", - collectionToSearch); - Assert.assertEquals( - sPrefix + "Expected collectionToSearch and itemsToFind to be the same size.", - itemsToFind.length, collectionToSearch.size()); + Assert.assertNotNull(sPrefix + "Expected collectionToSearch to be non-null.", collectionToSearch); + Assert.assertEquals(sPrefix + "Expected collectionToSearch and itemsToFind to be the same size.", + itemsToFind.length, collectionToSearch.size()); for (E item : itemsToFind) { - Assert.assertTrue( - sPrefix + "Expected collectionToSearch to contain \"" + item + "\".", - collectionToSearch.contains(item)); + Assert.assertTrue(sPrefix + "Expected collectionToSearch to contain \"" + item + "\".", + collectionToSearch.contains(item)); } } } catch (AssertionFailedError e) { @@ -113,19 +106,16 @@ public static void assertCollectionContainsExactly(String sMessage, /** Asserts that the given string contains the given substring. */ public static void assertStringContains(String sWhole, String sFragment) { if (!sWhole.contains(sFragment)) { - throw new ComparisonFailure("Could not find fragment(expected) in whole(actual).", - sFragment, sWhole); + throw new ComparisonFailure("Could not find fragment(expected) in whole(actual).", sFragment, sWhole); } } // ---------------------------------------------------------------- /** Asserts that the given string contains the given substring. */ - public static void assertStringContains(String sTestDescription, String sWhole, - String sFragment) { + public static void assertStringContains(String sTestDescription, String sWhole, String sFragment) { if (!sWhole.contains(sFragment)) { - throw new ComparisonFailure( - sTestDescription + " Could not find fragment(expected) in whole(actual).", - sFragment, sWhole); + throw new ComparisonFailure(sTestDescription + " Could not find fragment(expected) in whole(actual).", + sFragment, sWhole); } } @@ -137,8 +127,7 @@ public static T serializeDeserialize(T t) throws IOException, ClassNotFoundE ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream); objectOutputStream.writeObject(t); objectOutputStream.writeObject(t); - ByteArrayInputStream byteArrayInputStream = - new ByteArrayInputStream(byteArrayOutputStream.toByteArray()); + ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(byteArrayOutputStream.toByteArray()); ObjectInputStream objectInputStream = new ObjectInputStream(byteArrayInputStream); // noinspection unchecked objectInputStream.readObject(); @@ -152,14 +141,13 @@ public static T serializeDeserialize(T t) throws IOException, ClassNotFoundE /** Serializes then deserializes an object. */ @SuppressWarnings("unchecked") public static T readWriteExternal(T t, T u, T v) - throws IOException, ClassNotFoundException { + throws IOException, ClassNotFoundException { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream); t.writeExternal(objectOutputStream); t.writeExternal(objectOutputStream); objectOutputStream.close(); - ByteArrayInputStream byteArrayInputStream = - new ByteArrayInputStream(byteArrayOutputStream.toByteArray()); + ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(byteArrayOutputStream.toByteArray()); ObjectInputStream objectInputStream = new ObjectInputStream(byteArrayInputStream); // noinspection unchecked u.readExternal(objectInputStream); diff --git a/Base/src/test/java/io/deephaven/base/testing/TimeLimitedTest.java b/Base/src/test/java/io/deephaven/base/testing/TimeLimitedTest.java index c24c4e7542e..53910fec528 100644 --- a/Base/src/test/java/io/deephaven/base/testing/TimeLimitedTest.java +++ b/Base/src/test/java/io/deephaven/base/testing/TimeLimitedTest.java @@ -1,13 +1,12 @@ package io.deephaven.base.testing; /** - * A "fishlib JUnit" compatible test class with a timeout that can fail tests after a given timeout - * expires. + * A "fishlib JUnit" compatible test class with a timeout that can fail tests after a given timeout expires. * * Override {@link #maxMillis()} to configure. * - * If you are writing any new tests using junit 4+, instead use @Test(timeout=15_000) to set - * timeouts directly on each method. + * If you are writing any new tests using junit 4+, instead use @Test(timeout=15_000) to set timeouts directly on each + * method. */ public abstract class TimeLimitedTest extends BaseCachedJMockTestCase { @@ -39,7 +38,7 @@ protected void setUp() throws Exception { // but it's still better than a deadlocked VM staying alive for hours System.err.println("Force killing thread after exceeding " + ttl + " ms"); new IllegalStateException() - .printStackTrace(); + .printStackTrace(); running.stop(); } } diff --git a/Base/src/test/java/io/deephaven/base/text/TestConvert.java b/Base/src/test/java/io/deephaven/base/text/TestConvert.java index 4ef3794a946..416577fd3eb 100644 --- a/Base/src/test/java/io/deephaven/base/text/TestConvert.java +++ b/Base/src/test/java/io/deephaven/base/text/TestConvert.java @@ -155,11 +155,10 @@ public void testAppendISO8601Millis() throws ParseException { dateFormat.setTimeZone(TimeZone.getTimeZone("GMT")); long[] testCases = { - 0L, 1L, 10L, 100L, 1000L, 10000L, 100000L, 1000000L, 10000000L, 100000000L, - 1000000000L, 10000000000L, 100000000000L, 1000000000000L, 10000000000000L, - 100000000000000L, 253402300799999L, - -1L, -10L, -100L, -1000L, -10000L, -100000L, -1000000L, -10000000L, -100000000L, - -1000000000L, -10000000000L, -100000000000L, -1000000000000L, -10000000000000L, + 0L, 1L, 10L, 100L, 1000L, 10000L, 100000L, 1000000L, 10000000L, 100000000L, 1000000000L, 10000000000L, + 100000000000L, 1000000000000L, 10000000000000L, 100000000000000L, 253402300799999L, + -1L, -10L, -100L, -1000L, -10000L, -100000L, -1000000L, -10000000L, -100000000L, -1000000000L, + -10000000000L, -100000000000L, -1000000000000L, -10000000000000L, }; for (long testCase : testCases) { checkAppendIso8601Millis(testCase, dateFormat.format(testCase)); @@ -172,17 +171,14 @@ public void testAppendISO8601Millis() throws ParseException { checkAppendIso8601Millis(253402300799999L, "9999-12-31T23:59:59.999"); checkAppendIso8601Millis(253402300800000L, "9999-99-99T99:99:99.999"); - checkAppendIso8601Millis(dateFormat.parse("2004-02-20T11:12:13.014").getTime(), - "2004-02-20T11:12:13.014"); - checkAppendIso8601Millis(dateFormat.parse("2000-02-20T11:12:13.014").getTime(), - "2000-02-20T11:12:13.014"); - checkAppendIso8601Millis(dateFormat.parse("1900-02-20T11:12:13.014").getTime(), - "1900-02-20T11:12:13.014"); + checkAppendIso8601Millis(dateFormat.parse("2004-02-20T11:12:13.014").getTime(), "2004-02-20T11:12:13.014"); + checkAppendIso8601Millis(dateFormat.parse("2000-02-20T11:12:13.014").getTime(), "2000-02-20T11:12:13.014"); + checkAppendIso8601Millis(dateFormat.parse("1900-02-20T11:12:13.014").getTime(), "1900-02-20T11:12:13.014"); { ByteBuffer byteBuffer = ByteBuffer.allocate(100); - assertSame(byteBuffer, Convert.appendISO8601Millis(0, - new byte[] {'_', 's', 'u', 'f', 'f', 'i', 'x'}, byteBuffer)); + assertSame(byteBuffer, + Convert.appendISO8601Millis(0, new byte[] {'_', 's', 'u', 'f', 'f', 'i', 'x'}, byteBuffer)); assertBufferEqual(byteBuffer, 0, "1970-01-01T00:00:00.000_suffix"); } } @@ -241,16 +237,16 @@ public void testAppendISO8601Micros() throws ParseException { checkAppendIso8601Micros(253402300800000000L, "9999-99-99T99:99:99.999999"); checkAppendIso8601Micros(dateFormat.parse("2004-02-20T11:12:13.014").getTime() * 1000, - "2004-02-20T11:12:13.014000"); + "2004-02-20T11:12:13.014000"); checkAppendIso8601Micros(dateFormat.parse("2000-02-20T11:12:13.014").getTime() * 1000, - "2000-02-20T11:12:13.014000"); + "2000-02-20T11:12:13.014000"); checkAppendIso8601Micros(dateFormat.parse("1900-02-20T11:12:13.014").getTime() * 1000, - "1900-02-20T11:12:13.014000"); + "1900-02-20T11:12:13.014000"); { ByteBuffer byteBuffer = ByteBuffer.allocate(100); - assertSame(byteBuffer, Convert.appendISO8601Micros(0, - new byte[] {'_', 's', 'u', 'f', 'f', 'i', 'x'}, byteBuffer)); + assertSame(byteBuffer, + Convert.appendISO8601Micros(0, new byte[] {'_', 's', 'u', 'f', 'f', 'i', 'x'}, byteBuffer)); assertBufferEqual(byteBuffer, 0, "1970-01-01T00:00:00.000000_suffix"); } } diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/BenchmarkTools.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/BenchmarkTools.java index 2d0de73f80b..56cdd6918a2 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/BenchmarkTools.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/BenchmarkTools.java @@ -26,10 +26,10 @@ public class BenchmarkTools { private static final List COMMON_RESULT_COLUMNS = Arrays.asList( - ColumnDefinition.ofString("Benchmark"), - ColumnDefinition.ofString("Mode"), - ColumnDefinition.ofInt("Iteration"), - ColumnDefinition.ofString("Params")); + ColumnDefinition.ofString("Benchmark"), + ColumnDefinition.ofString("Mode"), + ColumnDefinition.ofInt("Iteration"), + ColumnDefinition.ofString("Params")); /** @@ -72,8 +72,7 @@ BenchmarkTableBuilder tableBuilder(String name, Table fromTable) { } /** - * Create an enumerated {@link ColumnGenerator}, selecting values from the enum - * randomly. + * Create an enumerated {@link ColumnGenerator}, selecting values from the enum randomly. * * @param name The name of the column * @param nVals The number of values in the enumeration @@ -84,21 +83,18 @@ BenchmarkTableBuilder tableBuilder(String name, Table fromTable) { * {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} */ @ScriptApi - public static ColumnGenerator stringCol(String name, int nVals, int minLen, int maxLen, - long seed) { + public static ColumnGenerator stringCol(String name, int nVals, int minLen, int maxLen, long seed) { return stringCol(name, nVals, minLen, maxLen, seed, EnumStringColumnGenerator.Mode.Random); } /** *

    - * Create an enumerated {@link ColumnGenerator} selecting values from the enum using the - * selected mode. + * Create an enumerated {@link ColumnGenerator} selecting values from the enum using the selected mode. *

    * *
      *
    • {@link EnumStringColumnGenerator.Mode#Random} - Select enum values randomly
    • - *
    • {@link EnumStringColumnGenerator.Mode#Rotate} - Select enum values in order, and - * wrap around
    • + *
    • {@link EnumStringColumnGenerator.Mode#Rotate} - Select enum values in order, and wrap around
    • *
    * * @param name The name of the column @@ -111,8 +107,8 @@ public static ColumnGenerator stringCol(String name, int nVals, int minL * {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} */ @ScriptApi - public static ColumnGenerator stringCol(String name, int nVals, int minLen, int maxLen, - long seed, EnumStringColumnGenerator.Mode mode) { + public static ColumnGenerator stringCol(String name, int nVals, int minLen, int maxLen, long seed, + EnumStringColumnGenerator.Mode mode) { return new EnumStringColumnGenerator(name, nVals, minLen, maxLen, seed, mode); } @@ -172,8 +168,7 @@ public static ColumnGenerator dateCol(String name, DBDateTime min, D * @param name The name of the column * @param type The type of number * @param The type of number - * @return a {@link ColumnGenerator} for use with - * {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} + * @return a {@link ColumnGenerator} for use with {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} */ @ScriptApi public static ColumnGenerator numberCol(String name, Class type) { @@ -181,54 +176,48 @@ public static ColumnGenerator numberCol(String name, Class } /** - * Create a {@link ColumnGenerator} that generates a random number of the desired type within - * a range. + * Create a {@link ColumnGenerator} that generates a random number of the desired type within a range. * * @param name The name of the column * @param type The type of number * @param min The minimum value * @param max The maximum value * @param The type of number - * @return a {@link ColumnGenerator} for use with - * {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} + * @return a {@link ColumnGenerator} for use with {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} */ @ScriptApi - public static ColumnGenerator numberCol(String name, Class type, - double min, double max) { + public static ColumnGenerator numberCol(String name, Class type, double min, double max) { return new RandomNumColumnGenerator<>(type, name, min, max); } /** - * Create a {@link ColumnGenerator} that generates a monotonically increasing number of the - * desired type. + * Create a {@link ColumnGenerator} that generates a monotonically increasing number of the desired type. * * @param name The name of the column * @param type The type of number * @param start The starting value * @param step The value to step by * @param The type of number - * @return a {@link ColumnGenerator} for use with - * {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} + * @return a {@link ColumnGenerator} for use with {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} */ @ScriptApi - public static ColumnGenerator seqNumberCol(String name, Class type, - double start, double step) { + public static ColumnGenerator seqNumberCol(String name, Class type, double start, + double step) { return new SequentialNumColumnGenerator<>(type, name, start, step); } /** *

    - * Create a {@link ColumnGenerator} that generates a number of the desired type which steps - * based on the input {@link SequentialNumColumnGenerator.Mode}. + * Create a {@link ColumnGenerator} that generates a number of the desired type which steps based on the input + * {@link SequentialNumColumnGenerator.Mode}. *

    * *
      - *
    • {@link SequentialNumColumnGenerator.Mode#NoLimit} - Monotonically increasing with - * no limit
    • - *
    • {@link SequentialNumColumnGenerator.Mode#RollAtLimit} - Roll over to the start - * value when the limit is reached
    • - *
    • {@link SequentialNumColumnGenerator.Mode#ReverseAtLimit} - Change increment - * direction when the limit is reached
    • + *
    • {@link SequentialNumColumnGenerator.Mode#NoLimit} - Monotonically increasing with no limit
    • + *
    • {@link SequentialNumColumnGenerator.Mode#RollAtLimit} - Roll over to the start value when the limit is + * reached
    • + *
    • {@link SequentialNumColumnGenerator.Mode#ReverseAtLimit} - Change increment direction when the limit + * is reached
    • *
    * * @param name The name of the column @@ -238,18 +227,16 @@ public static ColumnGenerator seqNumberCol(String name, Cl * @param max the limit value * @param mode What to do when the max is reached. * @param The type of number - * @return a {@link ColumnGenerator} for use with - * {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} + * @return a {@link ColumnGenerator} for use with {@link BenchmarkTableBuilder#addColumn(ColumnGenerator)} */ @ScriptApi - public static ColumnGenerator seqNumberCol(String name, Class type, - double start, double step, double max, SequentialNumColumnGenerator.Mode mode) { + public static ColumnGenerator seqNumberCol(String name, Class type, double start, + double step, double max, SequentialNumColumnGenerator.Mode mode) { return new SequentialNumColumnGenerator<>(type, name, start, step, max, mode); } /** - * Strip a benchmark name of the format x.y.z.a.b.c.d.BenchmarkClass.method to - * BenchmarkClass.method + * Strip a benchmark name of the format x.y.z.a.b.c.d.BenchmarkClass.method to BenchmarkClass.method * * @param benchmark The full benchmark name * @return The stripped version @@ -299,8 +286,7 @@ public static int sizeWithSparsity(int size, int sparsity) { throw new IllegalStateException("Sparsity must be in the range of 1 through 100"); } - return sparsity == 100 ? size - : (int) Math.ceil(size * SPARSITY_FUDGE_FACTOR / (sparsity / 100.0)); + return sparsity == 100 ? size : (int) Math.ceil(size * SPARSITY_FUDGE_FACTOR / (sparsity / 100.0)); } public static Table applySparsity(Table table, int size, int sparsity, long seed) { diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/ConcurrentResourceProfiler.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/ConcurrentResourceProfiler.java index 55d6fe5881d..0927cfb434d 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/ConcurrentResourceProfiler.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/ConcurrentResourceProfiler.java @@ -14,10 +14,9 @@ import java.util.Collection; /* - * Collects resource utilization stats using a separate thread and StatsGatherer class. Since CPU - * Load is averaged over the last minute (by Java), this profiler doesn't provide much different CPU - * information than sampling in afterIteration; but memory and thread stats may be more accurate - * with this approach. + * Collects resource utilization stats using a separate thread and StatsGatherer class. Since CPU Load is averaged over + * the last minute (by Java), this profiler doesn't provide much different CPU information than sampling in + * afterIteration; but memory and thread stats may be more accurate with this approach. */ public class ConcurrentResourceProfiler implements InternalProfiler { @@ -36,20 +35,16 @@ public void beforeIteration(BenchmarkParams benchmarkParams, IterationParams ite } @Override - public Collection afterIteration(BenchmarkParams benchmarkParams, - IterationParams iterationParams, - IterationResult result) { + public Collection afterIteration(BenchmarkParams benchmarkParams, IterationParams iterationParams, + IterationResult result) { gatherer.interrupt(); final Collection results = new ArrayList<>(); results.add(new ScalarResult("Max heap", stats.totalHeap, "bytes", AggregationPolicy.MAX)); - results - .add(new ScalarResult("Max free heap", stats.freeHeap, "bytes", AggregationPolicy.MAX)); - results - .add(new ScalarResult("Max used heap", stats.usedHeap, "bytes", AggregationPolicy.MAX)); - results.add( - new ScalarResult("Max threads", stats.activeThreads, "threads", AggregationPolicy.MAX)); + results.add(new ScalarResult("Max free heap", stats.freeHeap, "bytes", AggregationPolicy.MAX)); + results.add(new ScalarResult("Max used heap", stats.usedHeap, "bytes", AggregationPolicy.MAX)); + results.add(new ScalarResult("Max threads", stats.activeThreads, "threads", AggregationPolicy.MAX)); results.add(new ScalarResult("Max CPU", stats.cpuLoad, "percent", AggregationPolicy.MAX)); return results; diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/CsvResultWriter.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/CsvResultWriter.java index 941f736e807..53b3a909931 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/CsvResultWriter.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/CsvResultWriter.java @@ -15,14 +15,13 @@ public class CsvResultWriter { public static final String TEST_OUTPUT_DIR_PATH = - System.getProperty("test.output.dir", "tmp" + File.separator + "logs"); + System.getProperty("test.output.dir", "tmp" + File.separator + "logs"); public static void recordResults(final Collection results, final Class c) { final String className = c.getSimpleName(); final String timeString = - new SimpleDateFormat("yyyy-MM-dd-HHmmss").format(new Date(System.currentTimeMillis())); - recordResults(results, new File( - TEST_OUTPUT_DIR_PATH + File.separator + className + "-" + timeString + ".csv")); + new SimpleDateFormat("yyyy-MM-dd-HHmmss").format(new Date(System.currentTimeMillis())); + recordResults(results, new File(TEST_OUTPUT_DIR_PATH + File.separator + className + "-" + timeString + ".csv")); } public static void recordResults(final Collection results, final File file) { @@ -62,8 +61,7 @@ public static void recordResults(final Collection results, final File for (String key : runParams.getParamsKeys()) { values.put(key, runParams.getParam(key)); } - values.put("Score", - decimalFormat.format(itResult.getPrimaryResult().getScore())); + values.put("Score", decimalFormat.format(itResult.getPrimaryResult().getScore())); values.put("Run", Integer.toString(runNo)); values.put("Iteration", Integer.toString(itNo)); diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/ResourceProfiler.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/ResourceProfiler.java index 41d9ec7a5d3..84f86dac210 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/ResourceProfiler.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/ResourceProfiler.java @@ -9,9 +9,9 @@ import java.util.Collection; /* - * Collects resource utilization stats once, at the end of the iteration. Since CPU Load is averaged - * over the last minute (by Java), this profiler doesn't provide much different CPU information than - * sampling concurrently on another thread as is done in the ConcurrentResourceProfiler. + * Collects resource utilization stats once, at the end of the iteration. Since CPU Load is averaged over the last + * minute (by Java), this profiler doesn't provide much different CPU information than sampling concurrently on another + * thread as is done in the ConcurrentResourceProfiler. */ public class ResourceProfiler implements InternalProfiler { @@ -26,28 +26,22 @@ public void beforeIteration(BenchmarkParams benchmarkParams, IterationParams ite } @Override - public Collection afterIteration(BenchmarkParams benchmarkParams, - IterationParams iterationParams, - IterationResult result) { + public Collection afterIteration(BenchmarkParams benchmarkParams, IterationParams iterationParams, + IterationResult result) { final long totalHeap = Runtime.getRuntime().totalMemory(); final long freeHeap = Runtime.getRuntime().freeMemory(); - final long usedHeap = - java.lang.management.ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); + final long usedHeap = java.lang.management.ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed(); final long activeThreads = java.lang.Thread.activeCount(); - final double cpuLoad = - java.lang.management.ManagementFactory.getOperatingSystemMXBean().getSystemLoadAverage() + final double cpuLoad = java.lang.management.ManagementFactory.getOperatingSystemMXBean().getSystemLoadAverage() / - java.lang.management.ManagementFactory.getOperatingSystemMXBean() - .getAvailableProcessors() - * 100.0; + java.lang.management.ManagementFactory.getOperatingSystemMXBean().getAvailableProcessors() * 100.0; final Collection results = new ArrayList<>(); results.add(new ScalarResult("Max heap", totalHeap, "bytes", AggregationPolicy.MAX)); results.add(new ScalarResult("Max free heap", freeHeap, "bytes", AggregationPolicy.MAX)); results.add(new ScalarResult("Max used heap", usedHeap, "bytes", AggregationPolicy.MAX)); - results - .add(new ScalarResult("Max threads", activeThreads, "threads", AggregationPolicy.MAX)); + results.add(new ScalarResult("Max threads", activeThreads, "threads", AggregationPolicy.MAX)); results.add(new ScalarResult("Max CPU", cpuLoad, "percent", AggregationPolicy.MAX)); return results; diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/AbstractNumColumnGenerator.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/AbstractNumColumnGenerator.java index d3317ce8fd3..3ecb62a5104 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/AbstractNumColumnGenerator.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/AbstractNumColumnGenerator.java @@ -21,8 +21,7 @@ public ColumnDefinition getDefinition() { @Override public String getUpdateString(String varName) { - return def.getName() + "=(" + def.getDataType().getSimpleName() + ")" + varName - + chooseGetter(); + return def.getName() + "=(" + def.getDataType().getSimpleName() + ")" + varName + chooseGetter(); } public abstract byte getByte(); diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/ColumnGenerator.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/ColumnGenerator.java index 71552e27ff2..99cb26c5e8f 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/ColumnGenerator.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/ColumnGenerator.java @@ -4,9 +4,9 @@ import io.deephaven.benchmarking.generator.random.ExtendedRandom; /** - * An interface that defines a class which will create Columns for a - * {@link io.deephaven.benchmarking.BenchmarkTable} including {@link ColumnDefinition} creation and - * a method to create {@link io.deephaven.db.tables.Table#update(String...)} strings. + * An interface that defines a class which will create Columns for a {@link io.deephaven.benchmarking.BenchmarkTable} + * including {@link ColumnDefinition} creation and a method to create + * {@link io.deephaven.db.tables.Table#update(String...)} strings. * * @param The column type */ @@ -24,11 +24,10 @@ public interface ColumnGenerator { void init(ExtendedRandom random); /** - * Create a string suitable for use with {@link io.deephaven.db.tables.Table#update(String...)} - * calls to generate data. + * Create a string suitable for use with {@link io.deephaven.db.tables.Table#update(String...)} calls to generate + * data. * - * @param varName The name of this instance's variable within the - * {@link io.deephaven.db.tables.select.QueryScope} + * @param varName The name of this instance's variable within the {@link io.deephaven.db.tables.select.QueryScope} * @return A string for use with update() */ String getUpdateString(String varName); diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/EnumStringColumnGenerator.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/EnumStringColumnGenerator.java index 19aa1a39a59..6ff6ddcdfbd 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/EnumStringColumnGenerator.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/EnumStringColumnGenerator.java @@ -9,8 +9,7 @@ import java.util.Set; /** - * A {@link ColumnGenerator} that sources values from a fixed set of values, either - * randomly, or in rotation. + * A {@link ColumnGenerator} that sources values from a fixed set of values, either randomly, or in rotation. */ public class EnumStringColumnGenerator extends AbstractStringColumnGenerator { public enum Mode { @@ -26,8 +25,7 @@ public enum Mode { private final Mode mode; private final long enumSeed; - public EnumStringColumnGenerator(String name, int nVals, int minLength, int maxLength, - long enumSeed, Mode mode) { + public EnumStringColumnGenerator(String name, int nVals, int minLength, int maxLength, long enumSeed, Mode mode) { super(name, minLength, maxLength); this.enumSeed = enumSeed; @@ -42,10 +40,10 @@ public void init(ExtendedRandom random) { final Set enums = new HashSet<>(nVals); - // We need to use a different random to generate the enum otherwise it's difficult to - // generate consistent enums between different tables. - final StringGenerator sg = new StringGenerator(getMinLength(), getMaxLength(), - new NormalExtendedRandom(new Random(enumSeed))); + // We need to use a different random to generate the enum otherwise it's difficult to generate consistent enums + // between different tables. + final StringGenerator sg = + new StringGenerator(getMinLength(), getMaxLength(), new NormalExtendedRandom(new Random(enumSeed))); while (enums.size() < nVals) { enums.add(sg.get()); } diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/FuzzyNumColumnGenerator.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/FuzzyNumColumnGenerator.java index e68f20010fa..14d98822587 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/FuzzyNumColumnGenerator.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/FuzzyNumColumnGenerator.java @@ -6,14 +6,13 @@ public class FuzzyNumColumnGenerator extends SequentialNumColu private final double fuzz; private ExtendedRandom random; - public FuzzyNumColumnGenerator(Class type, String name, double start, double step, - double fuzz) { + public FuzzyNumColumnGenerator(Class type, String name, double start, double step, double fuzz) { super(type, name, start, step); this.fuzz = fuzz; } - public FuzzyNumColumnGenerator(Class type, String name, double start, double step, - double max, double fuzz, Mode mode) { + public FuzzyNumColumnGenerator(Class type, String name, double start, double step, double max, double fuzz, + Mode mode) { super(type, name, start, step, max, mode); this.fuzz = fuzz; } diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/SequentialNumColumnGenerator.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/SequentialNumColumnGenerator.java index feac433039d..af5d6d7be50 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/SequentialNumColumnGenerator.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/SequentialNumColumnGenerator.java @@ -30,8 +30,7 @@ public SequentialNumColumnGenerator(Class type, String name, double start, do this(type, name, start, step, 0, Mode.NoLimit); } - public SequentialNumColumnGenerator(Class type, String name, double start, double step, - double max, Mode mode) { + public SequentialNumColumnGenerator(Class type, String name, double start, double step, double max, Mode mode) { super(type, name); this.start = start; this.current = start; diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/StringGenerator.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/StringGenerator.java index 5b67a78958c..b8c8bd778ed 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/StringGenerator.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/StringGenerator.java @@ -33,12 +33,12 @@ public StringGenerator(int minLen, int maxLen, long seed) { public StringGenerator(int minLen, int maxLen, ExtendedRandom random) { if (minLen <= 0 || maxLen <= 0) { - throw new IllegalArgumentException("minLen and maxLen must be positive! (minLen=" - + minLen + ", maxLen-" + maxLen + ')'); + throw new IllegalArgumentException( + "minLen and maxLen must be positive! (minLen=" + minLen + ", maxLen-" + maxLen + ')'); } if (minLen > maxLen) { - throw new IllegalArgumentException("minLen cannot be greater than maxLen (minLen=" - + minLen + ", maxLen-" + maxLen + ')'); + throw new IllegalArgumentException( + "minLen cannot be greater than maxLen (minLen=" + minLen + ", maxLen-" + maxLen + ')'); } if (maxLen == Integer.MAX_VALUE) { throw new IllegalArgumentException("maxLen must be less than Integer.MAX_VALUE"); diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/random/ExtendedRandom.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/random/ExtendedRandom.java index 4fbd5182076..4436313f73f 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/random/ExtendedRandom.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/generator/random/ExtendedRandom.java @@ -3,8 +3,8 @@ import java.util.Random; /** - * ExtendedRandom is a helper to facilitate the use of various concrete {@link Random} - * implementations, while providing a consistent interface. + * ExtendedRandom is a helper to facilitate the use of various concrete {@link Random} implementations, while providing + * a consistent interface. */ public abstract class ExtendedRandom { public final double nextDouble(double origin, double bound) { @@ -25,8 +25,8 @@ public final long nextLong(long origin, long bound) { r = (r & m) + origin; else if (n > 0L) { // reject over-represented candidates for (long u = r >>> 1; // ensure nonnegative - u + m - (r = u % n) < 0L; // rejection check - u = nextLong() >>> 1) // retry + u + m - (r = u % n) < 0L; // rejection check + u = nextLong() >>> 1) // retry ; r += origin; } else { // range not representable as long diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractBenchmarkTable.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractBenchmarkTable.java index f0c0c3bfb9e..68eab15e0ab 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractBenchmarkTable.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractBenchmarkTable.java @@ -11,8 +11,8 @@ import java.util.*; /** - * The base implementation of {@link BenchmarkTable}. This includes all of the common things that - * the other specializations require. + * The base implementation of {@link BenchmarkTable}. This includes all of the common things that the other + * specializations require. */ public abstract class AbstractBenchmarkTable implements BenchmarkTable { private final String name; @@ -20,8 +20,7 @@ public abstract class AbstractBenchmarkTable implements BenchmarkTable { private Map generatorMap = Collections.emptyMap(); private ExtendedRandom rand; - AbstractBenchmarkTable(@NotNull String name, long rngSeed, - @NotNull List generators) { + AbstractBenchmarkTable(@NotNull String name, long rngSeed, @NotNull List generators) { this.name = name; this.rngSeed = rngSeed; this.rand = new NormalExtendedRandom(new Random(rngSeed)); diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractBenchmarkTableBuilder.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractBenchmarkTableBuilder.java index dcb129a11f1..cb5a32a5bbc 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractBenchmarkTableBuilder.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractBenchmarkTableBuilder.java @@ -10,14 +10,13 @@ import java.util.List; /** - * The basic implementation of {@link BenchmarkTableBuilder}. It allows users to specify table type - * and add columns, while specifying their RNG properties. + * The basic implementation of {@link BenchmarkTableBuilder}. It allows users to specify table type and add columns, + * while specifying their RNG properties. */ public abstract class AbstractBenchmarkTableBuilder - implements BenchmarkTableBuilder { + implements BenchmarkTableBuilder { protected final String name; - protected final KeyedObjectHash columns = - new KeyedObjectHash<>(new ColumnGeneratorKey()); + protected final KeyedObjectHash columns = new KeyedObjectHash<>(new ColumnGeneratorKey()); protected long rngSeed = 0; final long size; @@ -50,8 +49,7 @@ public SELF addColumn(ColumnGenerator generator) { } - private static final class ColumnGeneratorKey - extends KeyedObjectKey.Basic { + private static final class ColumnGeneratorKey extends KeyedObjectKey.Basic { @Override public String getKey(ColumnGenerator columnGenerator) { return columnGenerator.getName(); diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractGeneratedTable.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractGeneratedTable.java index 109ea283ee8..5944d26a378 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractGeneratedTable.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/AbstractGeneratedTable.java @@ -16,22 +16,22 @@ public abstract class AbstractGeneratedTable extends AbstractBenchmarkTable { private final TableDefinition definition; public AbstractGeneratedTable(@NotNull String name, long nRows, long rngSeed, - @NotNull List generators) { + @NotNull List generators) { super(name, rngSeed, generators); this.nRows = nRows; List definitions = getGeneratorMap() - .values() - .stream() - .map(ColumnGenerator::getDefinition) - .map(ColumnDefinition::withNormal) - .collect(Collectors.toList()); + .values() + .stream() + .map(ColumnGenerator::getDefinition) + .map(ColumnDefinition::withNormal) + .collect(Collectors.toList()); definition = new TableDefinition(definitions); } protected Table generateTable() { return SparseSelect.sparseSelect(TableTools.emptyTable(nRows).updateView( - getGeneratorMap().entrySet().stream() - .map(ent -> ent.getValue().getUpdateString(ent.getKey())).toArray(String[]::new))); + getGeneratorMap().entrySet().stream().map(ent -> ent.getValue().getUpdateString(ent.getKey())) + .toArray(String[]::new))); } @Override diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/InMemoryBenchmarkTable.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/InMemoryBenchmarkTable.java index 276765754ab..66a6485aa10 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/InMemoryBenchmarkTable.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/InMemoryBenchmarkTable.java @@ -11,10 +11,9 @@ import java.util.Random; /** - * An In memory only implementation of {@link BenchmarkTable}. This class uses a single internal - * {@link ExtendedRandom} wrapping a unique {@link Random} as the RNG source for all underlying data - * generation. This simplifies overall data generation and makes it easier to guarantee that it - * produces identical tables given identical seeds. + * An In memory only implementation of {@link BenchmarkTable}. This class uses a single internal {@link ExtendedRandom} + * wrapping a unique {@link Random} as the RNG source for all underlying data generation. This simplifies overall data + * generation and makes it easier to guarantee that it produces identical tables given identical seeds. */ public class InMemoryBenchmarkTable extends AbstractGeneratedTable { /** @@ -24,11 +23,10 @@ public class InMemoryBenchmarkTable extends AbstractGeneratedTable { * @param name The name of the table (to be used in tagging DbInternal data) * @param nRows The number of rows to generate * @param seed The RNG seed to use. - * @param columnGenerators The set of {@link ColumnGenerator}s used to create the internal - * {@link TableDefinition} and data. + * @param columnGenerators The set of {@link ColumnGenerator}s used to create the internal {@link TableDefinition} + * and data. */ - InMemoryBenchmarkTable(String name, long nRows, long seed, - List columnGenerators) { + InMemoryBenchmarkTable(String name, long nRows, long seed, List columnGenerators) { super(name, nRows, seed, columnGenerators); reset(); diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/InMemoryBenchmarkTableBuilder.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/InMemoryBenchmarkTableBuilder.java index 0b81b66b329..dde85aed506 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/InMemoryBenchmarkTableBuilder.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/InMemoryBenchmarkTableBuilder.java @@ -4,8 +4,8 @@ import io.deephaven.benchmarking.BenchmarkTableBuilder; /** - * The basic implementation of {@link BenchmarkTableBuilder}. It allows users to specify table type - * and add columns, while specifying their RNG properties. + * The basic implementation of {@link BenchmarkTableBuilder}. It allows users to specify table type and add columns, + * while specifying their RNG properties. */ public class InMemoryBenchmarkTableBuilder extends AbstractBenchmarkTableBuilder { diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/PersistentBenchmarkTableBuilder.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/PersistentBenchmarkTableBuilder.java index 57167cf7cf4..704e6666d2b 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/PersistentBenchmarkTableBuilder.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/PersistentBenchmarkTableBuilder.java @@ -5,8 +5,7 @@ import java.util.*; -public class PersistentBenchmarkTableBuilder - extends AbstractBenchmarkTableBuilder { +public class PersistentBenchmarkTableBuilder extends AbstractBenchmarkTableBuilder { private final LinkedHashSet groupingColumns = new LinkedHashSet<>(); private String partitioningFormula; @@ -50,13 +49,12 @@ public BenchmarkTable build() { columns.keySet().forEach(missingGroupingColumns::remove); if (!missingGroupingColumns.isEmpty()) { - throw new IllegalStateException( - "Grouping requested on the following nonexistant columns " + throw new IllegalStateException("Grouping requested on the following nonexistant columns " + StringUtils.joinStrings(missingGroupingColumns, ", ")); } - // TODO (deephaven/deephaven-core/issues/147): Replace this with a Parquet-backed table, or - // delete this entirely and use in-memory always + // TODO (deephaven/deephaven-core/issues/147): Replace this with a Parquet-backed table, or delete this entirely + // and use in-memory always return new InMemoryBenchmarkTable(name, size, rngSeed, getColumnGenerators()); } } diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/TableBackedBenchmarkTable.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/TableBackedBenchmarkTable.java index 1e843f08a25..ed2449d746c 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/TableBackedBenchmarkTable.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/TableBackedBenchmarkTable.java @@ -7,15 +7,13 @@ import java.util.List; /** - * A {@link io.deephaven.benchmarking.BenchmarkTable} implementation that is based of an existing - * table, adding the {@link ColumnGenerator}s provided as new columns via - * {@link Table#update(String...)} + * A {@link io.deephaven.benchmarking.BenchmarkTable} implementation that is based of an existing table, adding the + * {@link ColumnGenerator}s provided as new columns via {@link Table#update(String...)} */ public class TableBackedBenchmarkTable extends AbstractBenchmarkTable { private final Table sourceTable; - TableBackedBenchmarkTable(String name, Table sourceTable, long rngSeed, - List columnsToAdd) { + TableBackedBenchmarkTable(String name, Table sourceTable, long rngSeed, List columnsToAdd) { super(name, rngSeed, columnsToAdd); this.sourceTable = sourceTable; } @@ -23,7 +21,7 @@ public class TableBackedBenchmarkTable extends AbstractBenchmarkTable { @Override protected Table populate() { return sourceTable.update(getGeneratorMap().entrySet().stream() - .map(ent -> ent.getValue().getUpdateString(ent.getKey())).toArray(String[]::new)); + .map(ent -> ent.getValue().getUpdateString(ent.getKey())).toArray(String[]::new)); } @Override diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/TableBackedBenchmarkTableBuilder.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/TableBackedBenchmarkTableBuilder.java index 62338e037cf..777171d5e11 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/TableBackedBenchmarkTableBuilder.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/impl/TableBackedBenchmarkTableBuilder.java @@ -6,8 +6,8 @@ import org.jetbrains.annotations.NotNull; /** - * The basic implementation of {@link BenchmarkTableBuilder}. It allows users to specify table type - * and add columns, while specifying their RNG properties. + * The basic implementation of {@link BenchmarkTableBuilder}. It allows users to specify table type and add columns, + * while specifying their RNG properties. */ public class TableBackedBenchmarkTableBuilder extends AbstractBenchmarkTableBuilder { diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/BenchmarkRunner.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/BenchmarkRunner.java index 7fa6fc2fc6b..b436ca7c892 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/BenchmarkRunner.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/BenchmarkRunner.java @@ -29,29 +29,27 @@ import org.openjdk.jmh.runner.options.OptionsBuilder; public class BenchmarkRunner { - private static final TableDefinition RESULT_TABLE_DEF = - BenchmarkTools.getLogDefinitionWithExtra( + private static final TableDefinition RESULT_TABLE_DEF = BenchmarkTools.getLogDefinitionWithExtra( Arrays.asList( - ColumnDefinition.ofInt("Run"), - ColumnDefinition.ofDouble("Score"), - ColumnDefinition.ofLong("TotalHeap"), - ColumnDefinition.ofLong("FreeHeap"), - ColumnDefinition.ofLong("UsedHeap"), - ColumnDefinition.ofLong("Threads"), - ColumnDefinition.ofDouble("CPULoad"))); + ColumnDefinition.ofInt("Run"), + ColumnDefinition.ofDouble("Score"), + ColumnDefinition.ofLong("TotalHeap"), + ColumnDefinition.ofLong("FreeHeap"), + ColumnDefinition.ofLong("UsedHeap"), + ColumnDefinition.ofLong("Threads"), + ColumnDefinition.ofDouble("CPULoad"))); private static final int RETRY_LIMIT = 5; public static void main(String[] args) throws IOException { try { final Runner runner = new Runner(new OptionsBuilder() - .parent(new CommandLineOptions(args)) - .addProfiler(ConcurrentResourceProfiler.class) - .build()); + .parent(new CommandLineOptions(args)) + .addProfiler(ConcurrentResourceProfiler.class) + .build()); final Collection run = runner.run(); recordResults(run); - CsvResultWriter.recordResults(run, - new File(BenchmarkTools.getLogPath() + File.separator + "Benchmark")); + CsvResultWriter.recordResults(run, new File(BenchmarkTools.getLogPath() + File.separator + "Benchmark")); } catch (Throwable t) { t.printStackTrace(); System.exit(1); @@ -76,16 +74,16 @@ private static void recordResults(Collection results) { int itNo = 0; for (final IterationResult itResult : benchResult.getIterationResults()) { builder.addRow(benchmarkName, - modeString, - itNo++, - paramString, - runNo, - filterDouble(itResult.getPrimaryResult().getScore()), - (long) (itResult.getSecondaryResults().get("Max heap").getScore()), - (long) (itResult.getSecondaryResults().get("Max free heap").getScore()), - (long) (itResult.getSecondaryResults().get("Max used heap").getScore()), - (long) (itResult.getSecondaryResults().get("Max threads").getScore()), - filterDouble(itResult.getSecondaryResults().get("Max CPU").getScore())); + modeString, + itNo++, + paramString, + runNo, + filterDouble(itResult.getPrimaryResult().getScore()), + (long) (itResult.getSecondaryResults().get("Max heap").getScore()), + (long) (itResult.getSecondaryResults().get("Max free heap").getScore()), + (long) (itResult.getSecondaryResults().get("Max used heap").getScore()), + (long) (itResult.getSecondaryResults().get("Max threads").getScore()), + filterDouble(itResult.getSecondaryResults().get("Max CPU").getScore())); } runNo++; } @@ -93,19 +91,18 @@ private static void recordResults(Collection results) { final Table topLevel = builder.build(); final Table mergedDetails = getMergedDetails(); - final Table result = - topLevel.naturalJoin(mergedDetails, "Benchmark,Mode,Run,Iteration,Params"); + final Table result = topLevel.naturalJoin(mergedDetails, "Benchmark,Mode,Run,Iteration,Params"); - final Path outputPath = Paths.get(BenchmarkTools.getLogPath()) - .resolve("Benchmark" + ParquetTableWriter.PARQUET_FILE_EXTENSION); + final Path outputPath = + Paths.get(BenchmarkTools.getLogPath()).resolve("Benchmark" + ParquetTableWriter.PARQUET_FILE_EXTENSION); ParquetTools.writeTable(result, outputPath.toFile(), result.getDefinition()); } private static Table getMergedDetails() { final File[] files = FileUtils.missingSafeListFiles( - new File(BenchmarkTools.getLogPath()), - file -> file.getName().startsWith(BenchmarkTools.DETAIL_LOG_PREFIX)); + new File(BenchmarkTools.getLogPath()), + file -> file.getName().startsWith(BenchmarkTools.DETAIL_LOG_PREFIX)); Arrays.sort(files, Utils.getModifiedTimeComparator(false)); boolean OK; @@ -117,7 +114,7 @@ private static Table getMergedDetails() { while (!OK && retries < RETRY_LIMIT) { try { detailTables[i] = TableBenchmarkState.readBin(files[i]) - .update("Run=" + i); + .update("Run=" + i); OK = true; } catch (AssertionFailure af) { retries++; @@ -125,8 +122,8 @@ private static Table getMergedDetails() { } if (!OK && (retries == RETRY_LIMIT)) { - throw new RuntimeException("Failed to readBin " + files[i].getAbsolutePath() - + " after " + RETRY_LIMIT + " attempts."); + throw new RuntimeException( + "Failed to readBin " + files[i].getAbsolutePath() + " after " + RETRY_LIMIT + " attempts."); } files[i].deleteOnExit(); } diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/StatsGatherer.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/StatsGatherer.java index 7bcbcc17161..c58cf9327eb 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/StatsGatherer.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/StatsGatherer.java @@ -29,15 +29,13 @@ private long max(long current_value, long new_value) { private void getStats() { stats.totalHeap = max(stats.totalHeap, Runtime.getRuntime().totalMemory()); stats.freeHeap = max(stats.freeHeap, Runtime.getRuntime().freeMemory()); - stats.usedHeap = max(stats.usedHeap, java.lang.management.ManagementFactory - .getMemoryMXBean().getHeapMemoryUsage().getUsed()); + stats.usedHeap = max(stats.usedHeap, + java.lang.management.ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getUsed()); stats.activeThreads = max(stats.activeThreads, Thread.activeCount() - 1); stats.cpuLoad = max(stats.cpuLoad, - java.lang.management.ManagementFactory.getOperatingSystemMXBean().getSystemLoadAverage() - / - java.lang.management.ManagementFactory.getOperatingSystemMXBean() - .getAvailableProcessors() - * 100.0); + java.lang.management.ManagementFactory.getOperatingSystemMXBean().getSystemLoadAverage() / + java.lang.management.ManagementFactory.getOperatingSystemMXBean().getAvailableProcessors() + * 100.0); } public void run() { diff --git a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/TableBenchmarkState.java b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/TableBenchmarkState.java index 9ae2ad4aa51..f462e409a46 100644 --- a/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/TableBenchmarkState.java +++ b/BenchmarkSupport/src/main/java/io/deephaven/benchmarking/runner/TableBenchmarkState.java @@ -17,8 +17,8 @@ import java.util.Collections; public class TableBenchmarkState { - private static final TableDefinition RESULT_DEF = BenchmarkTools.getLogDefinitionWithExtra( - Collections.singletonList(ColumnDefinition.ofString("Fingerprint"))); + private static final TableDefinition RESULT_DEF = BenchmarkTools + .getLogDefinitionWithExtra(Collections.singletonList(ColumnDefinition.ofString("Fingerprint"))); private final String benchmarkName; private final TableBuilder outputBuilder; @@ -41,8 +41,7 @@ public void init() { public void logOutput() throws IOException { final Path outputPath = Paths.get(BenchmarkTools.getLogPath()) - .resolve(BenchmarkTools.getDetailOutputPath(benchmarkName) - + ParquetTableWriter.PARQUET_FILE_EXTENSION); + .resolve(BenchmarkTools.getDetailOutputPath(benchmarkName) + ParquetTableWriter.PARQUET_FILE_EXTENSION); final Table output = outputBuilder.build(); ParquetTools.writeTable(output, outputPath.toFile(), RESULT_DEF); @@ -59,8 +58,8 @@ public void processResult(BenchmarkParams params) throws IOException { } outputBuilder.addRow(benchmarkName, params.getMode().toString(), iteration++, - BenchmarkTools.buildParameterString(params), - TableTools.base64Fingerprint(resultTable)); + BenchmarkTools.buildParameterString(params), + TableTools.base64Fingerprint(resultTable)); } public Table setResult(Table result) { diff --git a/BenchmarkSupport/src/test/java/io/deephaven/benchmarking/impl/TestTableGeneration.java b/BenchmarkSupport/src/test/java/io/deephaven/benchmarking/impl/TestTableGeneration.java index 037858bd472..c175d086c2a 100644 --- a/BenchmarkSupport/src/test/java/io/deephaven/benchmarking/impl/TestTableGeneration.java +++ b/BenchmarkSupport/src/test/java/io/deephaven/benchmarking/impl/TestTableGeneration.java @@ -10,18 +10,17 @@ public class TestTableGeneration extends QueryTableTestBase { public void testCreateHistorical() { - final PersistentBenchmarkTableBuilder builder = - BenchmarkTools.persistentTableBuilder("Carlos", 2000); + final PersistentBenchmarkTableBuilder builder = BenchmarkTools.persistentTableBuilder("Carlos", 2000); final BenchmarkTable bt = builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) - .addColumn(BenchmarkTools.numberCol("C2", int.class)) - .addColumn(BenchmarkTools.numberCol("C3", double.class)) - .addColumn(BenchmarkTools.stringCol("C4", 10, 5, 7, 0xFEEDBEEF)) - .addColumn(BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F)) - .addGroupingColumns("Thingy") - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10) - .build(); + .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) + .addColumn(BenchmarkTools.numberCol("C2", int.class)) + .addColumn(BenchmarkTools.numberCol("C3", double.class)) + .addColumn(BenchmarkTools.stringCol("C4", 10, 5, 7, 0xFEEDBEEF)) + .addColumn(BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F)) + .addGroupingColumns("Thingy") + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10) + .build(); final Table historicalTable = bt.getTable(); Table selected = historicalTable.select(); @@ -42,12 +41,12 @@ public void testCreateHistorical() { public void testCreateIntraday() { final BenchmarkTableBuilder builder = BenchmarkTools.persistentTableBuilder("Carlos", 2000); final BenchmarkTable bt = builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) - .addColumn(BenchmarkTools.numberCol("C2", int.class)) - .addColumn(BenchmarkTools.numberCol("C3", double.class)) - .addColumn(BenchmarkTools.stringCol("C4", 10, 5, 7, 0xFEEDBEEF)) - .addColumn(BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F)) - .build(); + .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) + .addColumn(BenchmarkTools.numberCol("C2", int.class)) + .addColumn(BenchmarkTools.numberCol("C3", double.class)) + .addColumn(BenchmarkTools.stringCol("C4", 10, 5, 7, 0xFEEDBEEF)) + .addColumn(BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F)) + .build(); final Table intradayTable = bt.getTable(); @@ -66,12 +65,12 @@ public void testCreateIntraday() { public void testCreateSparseInMemory() { final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder("Carlos", 200000); final BenchmarkTable bt = builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) - .addColumn(BenchmarkTools.numberCol("C2", int.class)) - .addColumn(BenchmarkTools.numberCol("C3", double.class)) - .addColumn(BenchmarkTools.stringCol("C4", 10, 5, 7, 0xFEEDBEEF)) - .addColumn(BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F)) - .build(); + .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) + .addColumn(BenchmarkTools.numberCol("C2", int.class)) + .addColumn(BenchmarkTools.numberCol("C3", double.class)) + .addColumn(BenchmarkTools.stringCol("C4", 10, 5, 7, 0xFEEDBEEF)) + .addColumn(BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F)) + .build(); final Table resultTable = BenchmarkTools.applySparsity(bt.getTable(), 2000, 1, 0); diff --git a/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/BucketState.java b/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/BucketState.java index 18d5eacb54e..3bbee11bc5b 100644 --- a/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/BucketState.java +++ b/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/BucketState.java @@ -16,11 +16,11 @@ import java.util.stream.IntStream; /** - * Represents a given "pixel" in the downsampled output - the first and last value within that - * pixel, and the max/min value of each column we're interested in within that pixel. + * Represents a given "pixel" in the downsampled output - the first and last value within that pixel, and the max/min + * value of each column we're interested in within that pixel. * - * The arrays of values for a given downsampled table are shared between all states, so each - * BucketState instance tracks its own offset in those arrays. + * The arrays of values for a given downsampled table are shared between all states, so each BucketState instance tracks + * its own offset in those arrays. */ public class BucketState { private final Index index = Index.FACTORY.getEmptyIndex(); @@ -38,17 +38,15 @@ public class BucketState { private final boolean trackNulls; private final Index[] nulls; - public BucketState(final long key, final int offset, final ValueTracker[] valueTrackers, - boolean trackNulls) { - Assert.eqTrue(trackNulls || offset == 0 || offset == 1, - "trackNulls || offset == 0 || offset == 1"); + public BucketState(final long key, final int offset, final ValueTracker[] valueTrackers, boolean trackNulls) { + Assert.eqTrue(trackNulls || offset == 0 || offset == 1, "trackNulls || offset == 0 || offset == 1"); this.key = key; this.offset = offset; this.values = valueTrackers; this.trackNulls = trackNulls; if (trackNulls) { - this.nulls = IntStream.range(0, valueTrackers.length) - .mapToObj(ignore -> Index.FACTORY.getEmptyIndex()).toArray(Index[]::new); + this.nulls = IntStream.range(0, valueTrackers.length).mapToObj(ignore -> Index.FACTORY.getEmptyIndex()) + .toArray(Index[]::new); } else { this.nulls = null; } @@ -67,11 +65,10 @@ public long getOffset() { } public void append(final long rowIndex, final Chunk[] valueChunks, - final int chunkIndex) { + final int chunkIndex) { index.insert(rowIndex); for (int i = 0; i < values.length; i++) { - values[i].append(offset, rowIndex, valueChunks[i], chunkIndex, - trackNulls ? nulls[i] : null); + values[i].append(offset, rowIndex, valueChunks[i], chunkIndex, trackNulls ? nulls[i] : null); } if (cachedIndex != null) { cachedIndex.close(); @@ -94,14 +91,13 @@ public void remove(final long rowIndex) { } public void update(final long rowIndex, final Chunk[] valueChunks, - final int chunkIndex) { + final int chunkIndex) { for (int i = 0; i < values.length; i++) { final Chunk valueChunk = valueChunks[i]; if (valueChunk == null) { continue;// skip, already decided to be unnecessary } - values[i].update(offset, rowIndex, valueChunk, chunkIndex, - trackNulls ? nulls[i] : null); + values[i].update(offset, rowIndex, valueChunk, chunkIndex, trackNulls ? nulls[i] : null); } if (cachedIndex != null) { cachedIndex.close(); @@ -130,37 +126,32 @@ public void shift(final IndexShiftData shiftData) { public void rescanIfNeeded(final DownsampleChunkContext context) { final long indexSize = index.size(); - // this was already checked before this method was called, but let's make sure so that the - // null logic works + // this was already checked before this method was called, but let's make sure so that the null logic works Assert.gt(indexSize, "indexSize", 0); final int[] cols = IntStream.range(0, values.length) - .filter(i -> { - if (trackNulls) { - // if all items are null, don't look for max/min - we can't be sure of this - // without null tracking - if (nulls[i].size() == indexSize) { - // all items are null, so we can mark this as valid - values[i].maxValueValid(offset, true); - values[i].minValueValid(offset, true); - // for sanity's sake, also mark the max and min index to be null - values[i].setMaxIndex(offset, QueryConstants.NULL_LONG); - values[i].setMinIndex(offset, QueryConstants.NULL_LONG); - return false; + .filter(i -> { + if (trackNulls) { + // if all items are null, don't look for max/min - we can't be sure of this without null + // tracking + if (nulls[i].size() == indexSize) { + // all items are null, so we can mark this as valid + values[i].maxValueValid(offset, true); + values[i].minValueValid(offset, true); + // for sanity's sake, also mark the max and min index to be null + values[i].setMaxIndex(offset, QueryConstants.NULL_LONG); + values[i].setMinIndex(offset, QueryConstants.NULL_LONG); + return false; + } } - } - return !values[i].maxValueValid(offset) || !values[i].minValueValid(offset); - }) - .toArray(); - - // This next line appears to be necessary, but is deliberately commented out, since it will - // have no effect. - // Normally, any use of a ChunkContext to get Y values should first have a call to - // useYValues to ensure - // that those contexts are ready. In this case, we already know that the contexts exists, so - // there is no - // need to populate them - if they didn't exist, we wouldn't need to rescan that column, - // when that column + return !values[i].maxValueValid(offset) || !values[i].minValueValid(offset); + }) + .toArray(); + + // This next line appears to be necessary, but is deliberately commented out, since it will have no effect. + // Normally, any use of a ChunkContext to get Y values should first have a call to useYValues to ensure + // that those contexts are ready. In this case, we already know that the contexts exists, so there is no + // need to populate them - if they didn't exist, we wouldn't need to rescan that column, when that column // was first marked as needing a rescan, we already created the context. /* context.addYColumnsOfInterest(cols); */ @@ -168,10 +159,8 @@ public void rescanIfNeeded(final DownsampleChunkContext context) { return; } - // As this is a complete rescan, we first pretend that this bucket has never seen any - // values, and mark all - // positions as "null". We must have at least one value in the given context, so we know it - // will be marked + // As this is a complete rescan, we first pretend that this bucket has never seen any values, and mark all + // positions as "null". We must have at least one value in the given context, so we know it will be marked // as valid again when we're done. for (final int columnIndex : cols) { if (trackNulls) { @@ -186,16 +175,14 @@ public void rescanIfNeeded(final DownsampleChunkContext context) { final OrderedKeys next = it.getNextOrderedKeysWithLength(RunChartDownsample.CHUNK_SIZE); // LongChunk dateChunk = context.getXValues(next, false); final LongChunk keyChunk = next.asKeyIndicesChunk(); - final Chunk[] valueChunks = - context.getYValues(cols, next, false); + final Chunk[] valueChunks = context.getYValues(cols, next, false); // find the max in this chunk, compare with existing, loop. // this loop uses the prepared "which columns actually need testing" array for (int indexInChunk = 0; indexInChunk < keyChunk.size(); indexInChunk++) { for (final int columnIndex : cols) { - values[columnIndex].append(offset, keyChunk.get(indexInChunk), - valueChunks[columnIndex], indexInChunk, - trackNulls ? nulls[columnIndex] : null); + values[columnIndex].append(offset, keyChunk.get(indexInChunk), valueChunks[columnIndex], + indexInChunk, trackNulls ? nulls[columnIndex] : null); } } } @@ -214,13 +201,11 @@ public Index makeIndex() { for (int i = 0; i < values.length; i++) { if (nulls[i].size() != indexSize) { ValueTracker tracker = values[i]; - // No need to null check these, since we already know at least one real value is - // in here, as we + // No need to null check these, since we already know at least one real value is in here, as we // were tracking nulls build.addKey(tracker.maxIndex(offset)); build.addKey(tracker.minIndex(offset)); - } // Else nothing to do, entire bucket is null, and we already included first+last, - // more than needed + } // Else nothing to do, entire bucket is null, and we already included first+last, more than needed } for (Index nullsForCol : nulls) { @@ -255,12 +240,9 @@ public Index makeIndex() { } } else { for (final ValueTracker tracker : values) { - // Nulls are not being tracked, so instead we will ask each column if it has only - // null values. If - // so, skip max/min in the constructed index for this column, the first/last (and - // other column - // values) are sufficient for this column. If either max or min index is null, the - // other must be as + // Nulls are not being tracked, so instead we will ask each column if it has only null values. If + // so, skip max/min in the constructed index for this column, the first/last (and other column + // values) are sufficient for this column. If either max or min index is null, the other must be as // well. final long max = tracker.maxIndex(offset); @@ -285,22 +267,19 @@ public Index makeIndex() { @Override public String toString() { return "BucketState{" + - "key=" + key + - ", offset=" + offset + - ", values=" - + Arrays.stream(values).map(vt -> vt.toString(offset)).collect(Collectors.joining(", ")) - + - '}'; + "key=" + key + + ", offset=" + offset + + ", values=" + Arrays.stream(values).map(vt -> vt.toString(offset)).collect(Collectors.joining(", ")) + + '}'; } - public void validate(final boolean usePrev, final DownsampleChunkContext context, - int[] allYColumnIndexes) { + public void validate(final boolean usePrev, final DownsampleChunkContext context, int[] allYColumnIndexes) { final OrderedKeys.Iterator it = index.getOrderedKeysIterator(); while (it.hasMore()) { final OrderedKeys next = it.getNextOrderedKeysWithLength(RunChartDownsample.CHUNK_SIZE); final LongChunk keyChunk = next.asKeyIndicesChunk(); final Chunk[] valueChunks = - context.getYValues(allYColumnIndexes, next, usePrev); + context.getYValues(allYColumnIndexes, next, usePrev); for (int indexInChunk = 0; indexInChunk < keyChunk.size(); indexInChunk++) { @@ -310,29 +289,28 @@ public void validate(final boolean usePrev, final DownsampleChunkContext context if (nulls[columnIndex].size() == index.size()) { // all entries are null Assert.eq(values[columnIndex].maxIndex(offset), - "values[" + columnIndex + "].maxIndex(" + offset + ")", - QueryConstants.NULL_LONG); + "values[" + columnIndex + "].maxIndex(" + offset + ")", + QueryConstants.NULL_LONG); Assert.eq(values[columnIndex].minIndex(offset), - "values[" + columnIndex + "].minIndex(" + offset + ")", - QueryConstants.NULL_LONG); + "values[" + columnIndex + "].minIndex(" + offset + ")", + QueryConstants.NULL_LONG); } else { // must have non-null max and min Assert.neq(values[columnIndex].maxIndex(offset), - "values[" + columnIndex + "].maxIndex(" + offset + ")", - QueryConstants.NULL_LONG); + "values[" + columnIndex + "].maxIndex(" + offset + ")", + QueryConstants.NULL_LONG); Assert.neq(values[columnIndex].minIndex(offset), - "values[" + columnIndex + "].minIndex(" + offset + ")", - QueryConstants.NULL_LONG); + "values[" + columnIndex + "].minIndex(" + offset + ")", + QueryConstants.NULL_LONG); } } // else we really can't assert anything specific - values[columnIndex].validate(offset, keyChunk.get(indexInChunk), - valueChunks[columnIndex], indexInChunk, - trackNulls ? nulls[columnIndex] : null); + values[columnIndex].validate(offset, keyChunk.get(indexInChunk), valueChunks[columnIndex], + indexInChunk, trackNulls ? nulls[columnIndex] : null); } catch (final RuntimeException e) { System.out.println(index); - final String msg = "Bad data! indexInChunk=" + indexInChunk + ", col=" - + columnIndex + ", usePrev=" + usePrev + ", offset=" + offset - + ", index=" + keyChunk.get(indexInChunk); + final String msg = + "Bad data! indexInChunk=" + indexInChunk + ", col=" + columnIndex + ", usePrev=" + + usePrev + ", offset=" + offset + ", index=" + keyChunk.get(indexInChunk); throw new IllegalStateException(msg, e); } } diff --git a/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/DownsampleChunkContext.java b/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/DownsampleChunkContext.java index a3911bb9a28..b6a9b3e8c98 100644 --- a/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/DownsampleChunkContext.java +++ b/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/DownsampleChunkContext.java @@ -12,9 +12,8 @@ import java.util.List; /** - * Provides chunks for given sources so that downsampling can walk several columns at once, allowing - * GetContext instances to be lazily created as needed, and all tracked together so they can all be - * closed with a single call. + * Provides chunks for given sources so that downsampling can walk several columns at once, allowing GetContext + * instances to be lazily created as needed, and all tracked together so they can all be closed with a single call. */ public class DownsampleChunkContext implements AutoCloseable { private final ColumnSource xColumnSource; @@ -27,17 +26,15 @@ public class DownsampleChunkContext implements AutoCloseable { private final Chunk[] valuesArray; /** - * Creates an object to track the contexts to be used to read data from an upstream table for a - * given operation + * Creates an object to track the contexts to be used to read data from an upstream table for a given operation * - * @param xColumnSource the X column source, always a long column source, currently - * reinterpreted from DBDateTime - * @param yColumnSources any Y value column source which may be used. Indexes into this list are - * used when specifying columns which are used later + * @param xColumnSource the X column source, always a long column source, currently reinterpreted from DBDateTime + * @param yColumnSources any Y value column source which may be used. Indexes into this list are used when + * specifying columns which are used later * @param chunkSize the size of chunks to specify when actually creating any GetContext */ - DownsampleChunkContext(final ColumnSource xColumnSource, - final List> yColumnSources, final int chunkSize) { + DownsampleChunkContext(final ColumnSource xColumnSource, final List> yColumnSources, + final int chunkSize) { this.xColumnSource = xColumnSource; this.xContext = xColumnSource.makeGetContext(chunkSize); this.yColumnSources = yColumnSources; @@ -48,8 +45,7 @@ public class DownsampleChunkContext implements AutoCloseable { } /** - * Indicates that any of these Y columns will actually be used, and should be pre-populated if - * not yet present + * Indicates that any of these Y columns will actually be used, and should be pre-populated if not yet present * * @param yCols an array of indexes into the original yColumnSources constructor parameter */ @@ -73,26 +69,24 @@ public void addYColumnsOfInterest(final int[] yCols) { public LongChunk getXValues(final OrderedKeys keys, final boolean usePrev) { // noinspection unchecked return (LongChunk) (usePrev ? xColumnSource.getPrevChunk(xContext, keys) - : xColumnSource.getChunk(xContext, keys)); + : xColumnSource.getChunk(xContext, keys)); } /** - * Requests an array of chunks from the given Y column sources, using the internally tracked - * GetContexts. + * Requests an array of chunks from the given Y column sources, using the internally tracked GetContexts. * - * This assumes that addYColumnsOfInterest has been called on at least the columns indicated in - * yCols. + * This assumes that addYColumnsOfInterest has been called on at least the columns indicated in yCols. * * Do not retain or reuse the array, this DownsampleChunkContext will reuse it. * * @param yCols the indexes of the columns from the original yColumnSources to get data from * @param keys the keys in the columns that values are needed for * @param usePrev whether or not previous values should be fetched - * @return an array containing the data in the specified rows. The array will be the same size - * as the original yColumnSources, with only the indexes in yCols populated. + * @return an array containing the data in the specified rows. The array will be the same size as the original + * yColumnSources, with only the indexes in yCols populated. */ - public Chunk[] getYValues(final int[] yCols, - final OrderedKeys keys, final boolean usePrev) { + public Chunk[] getYValues(final int[] yCols, final OrderedKeys keys, + final boolean usePrev) { Arrays.fill(valuesArray, null); for (final int yCol : yCols) { valuesArray[yCol] = getYValues(yCol, keys, usePrev); @@ -101,22 +95,20 @@ public Chunk[] getYValues(final int[] yCols, } /** - * Requests a chunk of data from the specified Y column source, using the internally tracked - * GetContexts. + * Requests a chunk of data from the specified Y column source, using the internally tracked GetContexts. * * @param yColIndex the index of the column from the original yColumnSources to get data from * @param keys the keys in the column that values are needed for * @param usePrev whether or not previous values should be fetched * @return a chunk containing the values specified */ - public Chunk getYValues(final int yColIndex, - final OrderedKeys keys, final boolean usePrev) { + public Chunk getYValues(final int yColIndex, final OrderedKeys keys, + final boolean usePrev) { final ColumnSource columnSource = yColumnSources.get(yColIndex); final ChunkSource.GetContext getContext = yContexts[yColIndex]; Assert.neqNull(getContext, "yContexts.get(yColIndex)"); - return usePrev ? columnSource.getPrevChunk(getContext, keys) - : columnSource.getChunk(getContext, keys); + return usePrev ? columnSource.getPrevChunk(getContext, keys) : columnSource.getChunk(getContext, keys); } public void close() { diff --git a/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/ReplicateDownsamplingValueTrackers.java b/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/ReplicateDownsamplingValueTrackers.java index 121b3625e0e..f88f7344f78 100644 --- a/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/ReplicateDownsamplingValueTrackers.java +++ b/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/ReplicateDownsamplingValueTrackers.java @@ -5,9 +5,9 @@ import java.io.IOException; /** - * Generates primitive value trackers from the char variant, so that only char and Object need to be - * manually maintained. When these are changed, please run `./gradlew - * replicateDownsampleValueTrackers` to regenerate the other types. + * Generates primitive value trackers from the char variant, so that only char and Object need to be manually + * maintained. When these are changed, please run `./gradlew replicateDownsampleValueTrackers` to regenerate the other + * types. */ public class ReplicateDownsamplingValueTrackers extends ReplicatePrimitiveCode { public static void main(String[] args) throws IOException { diff --git a/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/RunChartDownsample.java b/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/RunChartDownsample.java index 4d421f01cce..99bcf4199f2 100644 --- a/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/RunChartDownsample.java +++ b/ClientSupport/src/main/java/io/deephaven/clientsupport/plotdownsampling/RunChartDownsample.java @@ -34,38 +34,32 @@ import java.util.stream.Stream; /** - * Downsamples a table assuming its contents will be rendered in a run chart, with the each - * subsequent row holding a later X value (i.e. is sorted on that column). Multiple Y columns can be - * specified, as can a range of values for the X value (providing a "zoom" feature, with high - * resolution data in that range, and low resolution data outside of it). + * Downsamples a table assuming its contents will be rendered in a run chart, with the each subsequent row holding a + * later X value (i.e. is sorted on that column). Multiple Y columns can be specified, as can a range of values for the + * X value (providing a "zoom" feature, with high resolution data in that range, and low resolution data outside of it). */ /* - * TODO Remaining work to make this class more efficient. This work can be done incrementally as we - * find we need specific cases to be faster, but at the time of writing, this is "fast enough" for - * updating, appending tables with 10m+ rows in them to look good in the web UI: o switching - * downsample<->passthrough is very untested, likely buggy (PRESENTLY DISABLED) o support automatic - * re-ranging, due to too many items being added/removed o read MCS on updates to decide whether or - * not to even check for changes o handle non-QueryTable instances o make shifting more efficient o + * TODO Remaining work to make this class more efficient. This work can be done incrementally as we find we need + * specific cases to be faster, but at the time of writing, this is "fast enough" for updating, appending tables with + * 10m+ rows in them to look good in the web UI: o switching downsample<->passthrough is very untested, likely buggy + * (PRESENTLY DISABLED) o support automatic re-ranging, due to too many items being added/removed o read MCS on updates + * to decide whether or not to even check for changes o handle non-QueryTable instances o make shifting more efficient o * make nulls result in fewer items in the result table */ public class RunChartDownsample implements Function.Unary { private static final Logger log = ProcessEnvironment.getDefaultLog(RunChartDownsample.class); - public static final int CHUNK_SIZE = - Configuration.getInstance().getIntegerWithDefault("chunkSize", 1 << 14); + public static final int CHUNK_SIZE = Configuration.getInstance().getIntegerWithDefault("chunkSize", 1 << 14); /** Enable this to add additional checks at runtime. */ - private static final boolean VALIDATE = Configuration.getInstance() - .getBooleanForClassWithDefault(RunChartDownsample.class, "validate", false); - private static final String BUCKET_SIZES_KEY = - RunChartDownsample.class.getSimpleName() + ".bucketsizes"; + private static final boolean VALIDATE = + Configuration.getInstance().getBooleanForClassWithDefault(RunChartDownsample.class, "validate", false); + private static final String BUCKET_SIZES_KEY = RunChartDownsample.class.getSimpleName() + ".bucketsizes"; /** - * Specifies the bucket sizes to round up to when a client specifies some number of pixels. If - * empty, each user will get exactly the size output table that they asked for, but this likely - * will not be memoized + * Specifies the bucket sizes to round up to when a client specifies some number of pixels. If empty, each user will + * get exactly the size output table that they asked for, but this likely will not be memoized */ - private static final int[] BUCKET_SIZES = - Configuration.getInstance().hasProperty(BUCKET_SIZES_KEY) + private static final int[] BUCKET_SIZES = Configuration.getInstance().hasProperty(BUCKET_SIZES_KEY) ? Configuration.getInstance().getIntegerArray(BUCKET_SIZES_KEY) : new int[] {500, 1000, 2000, 4000}; @@ -74,8 +68,8 @@ public class RunChartDownsample implements Function.Unary { private final String xColumnName; private final String[] yColumnNames; - public RunChartDownsample(final int pxCount, @Nullable final long[] zoomRange, - final String xColumnName, final String[] yColumnNames) { + public RunChartDownsample(final int pxCount, @Nullable final long[] zoomRange, final String xColumnName, + final String[] yColumnNames) { Assert.gt(pxCount, "pxCount", 0); Assert.neqNull(xColumnName, "xColumnName"); Assert.neqNull(yColumnNames, "yColumnNames"); @@ -97,15 +91,12 @@ public Table call(final Table wholeTable) { if (wholeTable instanceof QueryTable) { final QueryTable wholeQueryTable = (QueryTable) wholeTable; - return QueryPerformanceRecorder - .withNugget( - "downsample(" + minBins + ", " + xColumnName + " {" - + Arrays.toString(yColumnNames) + "})", + return QueryPerformanceRecorder.withNugget( + "downsample(" + minBins + ", " + xColumnName + " {" + Arrays.toString(yColumnNames) + "})", wholeQueryTable.sizeForInstrumentation(), () -> { - final DownsampleKey memoKey = - new DownsampleKey(minBins, xColumnName, yColumnNames, zoomRange); + final DownsampleKey memoKey = new DownsampleKey(minBins, xColumnName, yColumnNames, zoomRange); return wholeQueryTable.memoizeResult(memoKey, - () -> makeDownsampledQueryTable(wholeQueryTable, memoKey)); + () -> makeDownsampledQueryTable(wholeQueryTable, memoKey)); }); } @@ -117,20 +108,17 @@ public Table call(final Table wholeTable) { // // final Mutable result = new MutableObject<>(); // - // baseTable.initializeWithSnapshot("downsample", swapListener, (prevRequested, beforeClock) - // -> { + // baseTable.initializeWithSnapshot("downsample", swapListener, (prevRequested, beforeClock) -> { // final boolean usePrev = prevRequested && baseTable.isRefreshing(); - // final Index indexToUse = usePrev ? baseTable.getIndex().getPrevIndex() : - // baseTable.getIndex(); + // final Index indexToUse = usePrev ? baseTable.getIndex().getPrevIndex() : baseTable.getIndex(); // // // process existing rows - // handleAdded(indexToUse, columnSourceToBin, getNanosPerPx(minBins, usePrev, indexToUse, - // columnSourceToBin), valueColumnSource, states, usePrev); + // handleAdded(indexToUse, columnSourceToBin, getNanosPerPx(minBins, usePrev, indexToUse, columnSourceToBin), + // valueColumnSource, states, usePrev); // // // construct the initial index, table // //TODO copy def, columns that we actually need here - // QueryTable resultTable = new QueryTable(buildIndexFromGroups(states), - // baseTable.getColumnSourceMap()); + // QueryTable resultTable = new QueryTable(buildIndexFromGroups(states), baseTable.getColumnSourceMap()); // result.setValue(resultTable); // // return true; @@ -138,35 +126,30 @@ public Table call(final Table wholeTable) { // return result.getValue(); // } - throw new IllegalArgumentException( - "Can't downsample table of type " + wholeTable.getClass()); + throw new IllegalArgumentException("Can't downsample table of type " + wholeTable.getClass()); } - private Table makeDownsampledQueryTable(final QueryTable wholeQueryTable, - final DownsampleKey memoKey) { + private Table makeDownsampledQueryTable(final QueryTable wholeQueryTable, final DownsampleKey memoKey) { final ShiftAwareSwapListener swapListener = - wholeQueryTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + wholeQueryTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); final Mutable result = new MutableObject<>(); - wholeQueryTable.initializeWithSnapshot("downsample", swapListener, - (prevRequested, beforeClock) -> { - final boolean usePrev = prevRequested && wholeQueryTable.isRefreshing(); + wholeQueryTable.initializeWithSnapshot("downsample", swapListener, (prevRequested, beforeClock) -> { + final boolean usePrev = prevRequested && wholeQueryTable.isRefreshing(); - final DownsamplerListener downsampleListener = - DownsamplerListener.of(wholeQueryTable, memoKey); - downsampleListener.init(usePrev); - result.setValue(downsampleListener.resultTable); + final DownsamplerListener downsampleListener = DownsamplerListener.of(wholeQueryTable, memoKey); + downsampleListener.init(usePrev); + result.setValue(downsampleListener.resultTable); - if (swapListener != null) { - swapListener.setListenerAndResult(downsampleListener, - downsampleListener.resultTable); - downsampleListener.resultTable.addParentReference(swapListener); - downsampleListener.resultTable.addParentReference(downsampleListener); - } + if (swapListener != null) { + swapListener.setListenerAndResult(downsampleListener, downsampleListener.resultTable); + downsampleListener.resultTable.addParentReference(swapListener); + downsampleListener.resultTable.addParentReference(downsampleListener); + } - return true; - }); + return true; + }); return result.getValue(); } @@ -178,7 +161,7 @@ private final static class DownsampleKey extends MemoizedOperationKey { private final long[] zoomRange; private DownsampleKey(final int bins, final String xColumnName, final String[] yColumnNames, - final long[] zoomRange) { + final long[] zoomRange) { this.bins = bins; this.xColumnName = xColumnName; this.yColumnNames = yColumnNames; @@ -200,8 +183,7 @@ public boolean equals(final Object o) { return false; if (!Arrays.equals(yColumnNames, that.yColumnNames)) return false; - return zoomRange != null ? Arrays.equals(zoomRange, that.zoomRange) - : that.zoomRange == null; + return zoomRange != null ? Arrays.equals(zoomRange, that.zoomRange) : that.zoomRange == null; } @Override @@ -216,19 +198,19 @@ public int hashCode() { @Override public String toString() { return "DownsampleKey{" + - "bins=" + bins + - ", xColumnName='" + xColumnName + '\'' + - ", yColumnNames=" + Arrays.toString(yColumnNames) + - ", zoomRange=" + Arrays.toString(zoomRange) + - '}'; + "bins=" + bins + + ", xColumnName='" + xColumnName + '\'' + + ", yColumnNames=" + Arrays.toString(yColumnNames) + + ", zoomRange=" + Arrays.toString(zoomRange) + + '}'; } } private static int findMatchingBinSize(final int minBins) { return IntStream.of(BUCKET_SIZES) - .filter(bin -> bin >= minBins) - .findFirst() - .orElse(minBins); + .filter(bin -> bin >= minBins) + .findFirst() + .orElse(minBins); } private static class DownsamplerListener extends BaseTable.ShiftAwareListenerImpl { @@ -240,8 +222,7 @@ private enum RangeMode { ZOOM, AUTO } - public static DownsamplerListener of(final QueryTable sourceTable, - final DownsampleKey key) { + public static DownsamplerListener of(final QueryTable sourceTable, final DownsampleKey key) { final Index index = Index.FACTORY.getEmptyIndex(); final QueryTable resultTable = sourceTable.getSubTable(index); return new DownsamplerListener(sourceTable, resultTable, key); @@ -269,28 +250,28 @@ public static DownsamplerListener of(final QueryTable sourceTable, private final int[] allYColumnIndexes; private final KeyedLongObjectHashMap states = - new KeyedLongObjectHashMap<>(new KeyedLongObjectKey.BasicLax() { - @Override - public long getLongKey(final BucketState bucketState) { - return bucketState.getKey(); - } - }); + new KeyedLongObjectHashMap<>(new KeyedLongObjectKey.BasicLax() { + @Override + public long getLongKey(final BucketState bucketState) { + return bucketState.getKey(); + } + }); private final KeyedLongObjectHash.ValueFactory bucketStateFactory = - new KeyedLongObjectHash.ValueFactory() { - @Override - public BucketState newValue(final long key) { - return new BucketState(key, nextPosition(), values, true); - } + new KeyedLongObjectHash.ValueFactory() { + @Override + public BucketState newValue(final long key) { + return new BucketState(key, nextPosition(), values, true); + } - @Override - public BucketState newValue(final Long key) { - return newValue((long) key); - } - }; + @Override + public BucketState newValue(final Long key) { + return newValue((long) key); + } + }; private DownsamplerListener(final QueryTable sourceTable, final QueryTable resultTable, - final DownsampleKey key) { + final DownsampleKey key) { super("downsample listener", sourceTable, resultTable); this.sourceTable = sourceTable; this.index = resultTable.getIndex(); @@ -304,16 +285,15 @@ private DownsamplerListener(final QueryTable sourceTable, final QueryTable resul // noinspection unchecked this.xColumnSource = xSource.reinterpret(long.class); } else { - throw new IllegalArgumentException("Cannot use non-DBDateTime, non-long x column " - + key.xColumnName + " in downsample"); + throw new IllegalArgumentException( + "Cannot use non-DBDateTime, non-long x column " + key.xColumnName + " in downsample"); } this.valueColumnSources = Arrays.stream(this.key.yColumnNames) - .map(colName -> (ColumnSource) this.sourceTable.getColumnSource(colName)) - .collect(Collectors.toList()); + .map(colName -> (ColumnSource) this.sourceTable.getColumnSource(colName)) + .collect(Collectors.toList()); - // pre-size the array sources, indicate that these indexes are available as bucketstates - // are created + // pre-size the array sources, indicate that these indexes are available as bucketstates are created // always leave 0, 1 for head/tail, we start counting at 2 nextSlot = key.bins + 2; this.values = ValueTracker.of(valueColumnSources, nextSlot); @@ -339,7 +319,7 @@ protected void destroy() { @Override public void onUpdate(final Update upstream) { try (final DownsampleChunkContext context = - new DownsampleChunkContext(xColumnSource, valueColumnSources, CHUNK_SIZE)) { + new DownsampleChunkContext(xColumnSource, valueColumnSources, CHUNK_SIZE)) { handleRemoved(context, upstream.removed); handleShifts(upstream.shifted); @@ -352,10 +332,8 @@ public void onUpdate(final Update upstream) { notifyResultTable(upstream); - // TODO Complete this so we can switch modes. In the meantime, this is wrapped in - // if(false) - // so that if other changes happen in the APIs this uses, local code won't break. - // When + // TODO Complete this so we can switch modes. In the meantime, this is wrapped in if(false) + // so that if other changes happen in the APIs this uses, local code won't break. When // implemented, remove the operations above, and inline the method. // maybeSwitchModes(upstream, context); @@ -364,11 +342,9 @@ public void onUpdate(final Update upstream) { head.validate(false, context, allYColumnIndexes); tail.validate(false, context, allYColumnIndexes); } - states.values() - .forEach(state -> state.validate(false, context, allYColumnIndexes)); + states.values().forEach(state -> state.validate(false, context, allYColumnIndexes)); if (!index.subsetOf(sourceTable.getIndex())) { - throw new IllegalStateException( - "index.subsetOf(sourceTable.getIndex()) is false, extra items= " + throw new IllegalStateException("index.subsetOf(sourceTable.getIndex()) is false, extra items= " + index.minus(sourceTable.getIndex())); } } @@ -381,11 +357,10 @@ public void onUpdate(final Update upstream) { protected void maybeSwitchModes(Update upstream, DownsampleChunkContext context) { // Consider switching modes - this is deliberately hard to swap back and forth between if (indexMode == IndexMode.PASSTHROUGH - && sourceTable.size() > key.bins * 2 * (2 + key.yColumnNames.length)) { - log.info().append("Switching from PASSTHROUGH to DOWNSAMPLE ") - .append(sourceTable.size()).append(key.toString()).endl(); - // If there are more than 4x items in the source table as there are PX to draw, - // convert to downsampled + && sourceTable.size() > key.bins * 2 * (2 + key.yColumnNames.length)) { + log.info().append("Switching from PASSTHROUGH to DOWNSAMPLE ").append(sourceTable.size()) + .append(key.toString()).endl(); + // If there are more than 4x items in the source table as there are PX to draw, convert to downsampled indexMode = IndexMode.DOWNSAMPLE; // act as if all items were just added fresh @@ -397,10 +372,10 @@ protected void maybeSwitchModes(Update upstream, DownsampleChunkContext context) // notify downstream tables that the index was swapped notifyResultTable(upstream, sourceTable.getIndex()); } else if (indexMode == IndexMode.DOWNSAMPLE && sourceTable.size() < key.bins) { - log.info().append("Switching from DOWNSAMPLE to PASSTHROUGH ") - .append(sourceTable.size()).append(key.toString()).endl(); - // if the table has shrunk until there are less items in the table than there are Px - // to draw, just show the items + log.info().append("Switching from DOWNSAMPLE to PASSTHROUGH ").append(sourceTable.size()) + .append(key.toString()).endl(); + // if the table has shrunk until there are less items in the table than there are Px to draw, just show + // the items indexMode = IndexMode.PASSTHROUGH; states.clear(); @@ -413,11 +388,11 @@ protected void maybeSwitchModes(Update upstream, DownsampleChunkContext context) availableSlots.clear();// TODO optionally, clear out value trackers? nextSlot = 1; - // notify downstream tables that the index changed, add all missing rows from the - // source table, since we're un-downsampling + // notify downstream tables that the index changed, add all missing rows from the source table, since + // we're un-downsampling // TODO - final Update switchToPassThrough = new Update(addToResultTable, removed, modified, - upstream.shifted, upstream.modifiedColumnSet); + final Update switchToPassThrough = + new Update(addToResultTable, removed, modified, upstream.shifted, upstream.modifiedColumnSet); resultTable.notifyListeners(switchToPassThrough); } else if (indexMode == IndexMode.PASSTHROUGH) { log.info().append("PASSTHROUGH update ").append(upstream).endl(); @@ -445,18 +420,15 @@ protected void maybeSwitchModes(Update upstream, DownsampleChunkContext context) notifyResultTable(upstream); } else { - // // Decide if it is time to re-bucket. As above, we want this to infrequently - // done, but we also don't - // // want to bump into the next bucket size up or down. Right now, the bucket - // count should start at - // // 110% of the requested PX size, and can shrink to 100%, or grow to up to - // 150%. - // // We rebucket _before_ getting any work done, since otherwise we would - // bucket twice in this one pass. - // // This does seem a bit silly, but we're assuming that if the last change - // pushed us over the edge so - // // that we need to rebucket, it still makes sense to do so. With that said, - // we don't use the last + // // Decide if it is time to re-bucket. As above, we want this to infrequently done, but we also + // don't + // // want to bump into the next bucket size up or down. Right now, the bucket count should start at + // // 110% of the requested PX size, and can shrink to 100%, or grow to up to 150%. + // // We rebucket _before_ getting any work done, since otherwise we would bucket twice in this one + // pass. + // // This does seem a bit silly, but we're assuming that if the last change pushed us over the edge + // so + // // that we need to rebucket, it still makes sense to do so. With that said, we don't use the last // // update's ranges when rebucketing, we'll start fresh. // if (we needed to rebucket last time && still need to rebucket) { // rerange(); @@ -488,7 +460,7 @@ protected void maybeSwitchModes(Update upstream, DownsampleChunkContext context) public void init(final boolean usePrev) { rerange(usePrev); try (final DownsampleChunkContext context = - new DownsampleChunkContext(xColumnSource, valueColumnSources, CHUNK_SIZE)) { + new DownsampleChunkContext(xColumnSource, valueColumnSources, CHUNK_SIZE)) { handleAdded(context, usePrev, sourceTable.getIndex()); if (VALIDATE) { Consumer validate = state -> { @@ -508,8 +480,7 @@ public void init(final boolean usePrev) { Assert.assertion(index.empty(), "this.index.empty()"); final Index initialIndex = indexFromStates(); - // log.info().append("initial downsample - // index.size()=").append(initialIndex.size()).append(", + // log.info().append("initial downsample index.size()=").append(initialIndex.size()).append(", // index=").append(initialIndex).endl(); index.insert(initialIndex); @@ -537,16 +508,14 @@ private void rerange() { } private void rerange(final boolean usePrev) { - // read the first and last value in the source table, and work out our new nanosPerPx - // value + // read the first and last value in the source table, and work out our new nanosPerPx value final long first; final long last; if (rangeMode == RangeMode.ZOOM) { first = key.zoomRange[0]; last = key.zoomRange[1]; } else { - final Index index = - usePrev ? sourceTable.getIndex().getPrevIndex() : sourceTable.getIndex(); + final Index index = usePrev ? sourceTable.getIndex().getPrevIndex() : sourceTable.getIndex(); first = xColumnSource.getLong(index.firstKey()); last = xColumnSource.getLong(index.lastKey()); } @@ -556,8 +525,7 @@ private void rerange(final boolean usePrev) { nanosPerPx = (long) (1.1 * (last - first) / key.bins); } - private void handleAdded(final DownsampleChunkContext context, final boolean usePrev, - final Index addedIndex) { + private void handleAdded(final DownsampleChunkContext context, final boolean usePrev, final Index addedIndex) { final Index index = usePrev ? addedIndex.getPrevIndex() : addedIndex; if (index.empty()) { return; @@ -572,8 +540,7 @@ private void handleAdded(final DownsampleChunkContext context, final boolean use final OrderedKeys next = it.getNextOrderedKeysWithLength(CHUNK_SIZE); final LongChunk xValueChunk = context.getXValues(next, usePrev); final LongChunk keyChunk = next.asKeyIndicesChunk(); - final Chunk[] valueChunks = - context.getYValues(all, next, usePrev); + final Chunk[] valueChunks = context.getYValues(all, next, usePrev); long lastBin = 0; BucketState bucket = null; @@ -621,7 +588,7 @@ private void handleRemoved(final DownsampleChunkContext context, final Index rem } private void handleModified(final DownsampleChunkContext context, final Index modified, - final ModifiedColumnSet modifiedColumnSet) { + final ModifiedColumnSet modifiedColumnSet) { // TODO use MCS here if (modified.empty()/* || !modifiedColumnSet.containsAny(interestedColumns) */) { return; @@ -641,16 +608,13 @@ private void handleModified(final DownsampleChunkContext context, final Index mo final LongChunk oldDateChunk = context.getXValues(next, true); final LongChunk newDateChunk = context.getXValues(next, false); final LongChunk keyChunk = next.asKeyIndicesChunk(); - final Chunk[] valueChunks = - context.getYValues(yColIndexes, next, false); + final Chunk[] valueChunks = context.getYValues(yColIndexes, next, false); final long lastBin = 0; BucketState bucket = null; for (int indexInChunk = 0; indexInChunk < oldDateChunk.size(); indexInChunk++) { - final long bin = - LongNumericPrimitives.lowerBin(oldDateChunk.get(indexInChunk), nanosPerPx); - final long newBin = - LongNumericPrimitives.lowerBin(newDateChunk.get(indexInChunk), nanosPerPx); + final long bin = LongNumericPrimitives.lowerBin(oldDateChunk.get(indexInChunk), nanosPerPx); + final long newBin = LongNumericPrimitives.lowerBin(newDateChunk.get(indexInChunk), nanosPerPx); if (lastBin != bin || bucket == null) { bucket = getBucket(bin); @@ -705,8 +669,7 @@ private void handleShifts(final IndexShiftData shiftData) { private void performRescans(final DownsampleChunkContext context) { // check each group to see if any needs a rescan - for (final Iterator iterator = states.values().iterator(); iterator - .hasNext();) { + for (final Iterator iterator = states.values().iterator(); iterator.hasNext();) { final BucketState bucket = iterator.next(); if (bucket.getIndex().empty()) { // if it has no keys at all, remove it so we quit checking it @@ -720,13 +683,13 @@ private void performRescans(final DownsampleChunkContext context) { } /** - * Indicates that a change has probably happened and we should notify the result table. The - * contents of the change will be our state map (i.e. there is + * Indicates that a change has probably happened and we should notify the result table. The contents of the + * change will be our state map (i.e. there is * * @param upstream the change that happened upstream - * @param lastIndex the base index to use when considering what items to tell the result - * table changed. if this.index, then update it normally, otherwise this.index must - * be empty and this.index should be populated. + * @param lastIndex the base index to use when considering what items to tell the result table changed. if + * this.index, then update it normally, otherwise this.index must be empty and this.index should be + * populated. */ private void notifyResultTable(final Update upstream, final Index lastIndex) { final Index resultIndex = indexFromStates(); @@ -742,11 +705,10 @@ private void notifyResultTable(final Update upstream, final Index lastIndex) { Assert.assertion(this.index.empty(), "this.index.empty()"); this.index.insert(resultIndex); } - // log.info().append("After downsample update, - // index.size=").append(index.size()).append(", index=").endl();//.append(index).endl(); + // log.info().append("After downsample update, index.size=").append(index.size()).append(", + // index=").endl();//.append(index).endl(); - final Update update = - new Update(added, removed, modified, upstream.shifted, upstream.modifiedColumnSet); + final Update update = new Update(added, removed, modified, upstream.shifted, upstream.modifiedColumnSet); // log.info().append("resultTable.notifyListeners").append(update).endl(); resultTable.notifyListeners(update); } @@ -755,29 +717,25 @@ private Index indexFromStates() { // TODO this couldnt be uglier if i tried if (rangeMode == RangeMode.ZOOM) { return Stream.concat( - Stream.of(head, tail).filter(s -> !s.getIndex().empty()), // note: we only - // filter these two, - // since states - // shouldn't contain - // empty indexes - // anyway - states.values().stream()) - .reduce(Index.FACTORY.getRandomBuilder(), (builder, state) -> { - builder.addIndex(state.makeIndex()); - return builder; - }, (b1, b2) -> { - b1.addIndex(b2.getIndex()); - return b1; - }).getIndex(); + Stream.of(head, tail).filter(s -> !s.getIndex().empty()), // note: we only filter these two, + // since states shouldn't contain + // empty indexes anyway + states.values().stream()) + .reduce(Index.FACTORY.getRandomBuilder(), (builder, state) -> { + builder.addIndex(state.makeIndex()); + return builder; + }, (b1, b2) -> { + b1.addIndex(b2.getIndex()); + return b1; + }).getIndex(); } - return states.values().stream() - .reduce(Index.FACTORY.getRandomBuilder(), (builder, state) -> { - builder.addIndex(state.makeIndex()); - return builder; - }, (b1, b2) -> { - b1.addIndex(b2.getIndex()); - return b1; - }).getIndex(); + return states.values().stream().reduce(Index.FACTORY.getRandomBuilder(), (builder, state) -> { + builder.addIndex(state.makeIndex()); + return builder; + }, (b1, b2) -> { + b1.addIndex(b2.getIndex()); + return b1; + }).getIndex(); } private void notifyResultTable(final Update upstream) { diff --git a/ClientSupport/src/main/java/io/deephaven/table/sort/SortDirective.java b/ClientSupport/src/main/java/io/deephaven/table/sort/SortDirective.java index 754b0e0000a..d378f3552cd 100644 --- a/ClientSupport/src/main/java/io/deephaven/table/sort/SortDirective.java +++ b/ClientSupport/src/main/java/io/deephaven/table/sort/SortDirective.java @@ -9,8 +9,7 @@ public class SortDirective implements Serializable { private static final long serialVersionUID = 2L; - public static final SortDirective EMPTY_DIRECTIVE = - new SortDirective(null, SortDirective.NOT_SORTED, false); + public static final SortDirective EMPTY_DIRECTIVE = new SortDirective(null, SortDirective.NOT_SORTED, false); public static final int DESCENDING = -1; public static final int NOT_SORTED = 0; @@ -41,9 +40,9 @@ public String getColumnName() { @Override public String toString() { return "SortDirective{" + - "direction=" + direction + - ", isAbsolute=" + isAbsolute + - ", columnName='" + columnName + '\'' + - '}'; + "direction=" + direction + + ", isAbsolute=" + isAbsolute + + ", columnName='" + columnName + '\'' + + '}'; } } diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/AbstractTreeSnapshotImpl.java b/ClientSupport/src/main/java/io/deephaven/treetable/AbstractTreeSnapshotImpl.java index 06e9a213f08..138d17f72b2 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/AbstractTreeSnapshotImpl.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/AbstractTreeSnapshotImpl.java @@ -27,13 +27,13 @@ import static io.deephaven.treetable.TreeTableConstants.ROOT_TABLE_KEY; /** - * The basic implementation used to produce a viewport-ready snapshot of a tree table, taking into - * account the set of expanded rows at each level. + * The basic implementation used to produce a viewport-ready snapshot of a tree table, taking into account the set of + * expanded rows at each level. */ public abstract class AbstractTreeSnapshotImpl> { private static final Index EMPTY_INDEX = Index.FACTORY.getEmptyIndex(); private static final boolean DEBUG = - Configuration.getInstance().getBooleanWithDefault("AbstractTreeSnapshotImpl.debug", false); + Configuration.getInstance().getBooleanWithDefault("AbstractTreeSnapshotImpl.debug", false); private final long firstViewportRow; private final long lastViewportRow; @@ -56,14 +56,11 @@ public abstract class AbstractTreeSnapshotImpl tablesByKey, - long firstRow, long lastRow, - BitSet columns, - @NotNull SelectFilter[] filters, - @NotNull List sorts, - CLIENT_TYPE client, - Set includedOps) { + HierarchicalTable baseTable, + Map tablesByKey, + long firstRow, long lastRow, + BitSet columns, + @NotNull SelectFilter[] filters, + @NotNull List sorts, + CLIENT_TYPE client, + Set includedOps) { Assert.leq(firstRow, "firstRow", lastRow, "lastRow"); - Assert.leq(lastRow - firstRow, "lastRow - firstRow", Integer.MAX_VALUE, - "Integer.MAX_VALUE"); + Assert.leq(lastRow - firstRow, "lastRow - firstRow", Integer.MAX_VALUE, "Integer.MAX_VALUE"); this.tablesByKey = tablesByKey; this.client = client; this.baseTable = baseTable; @@ -108,23 +104,22 @@ private SnapshotState makeSnapshotState() { } /** - * Apply any required transformations to the root table that are required before the snapshot - * can proceed. Such transformations should use concurrent instantiation patterns internally. + * Apply any required transformations to the root table that are required before the snapshot can proceed. Such + * transformations should use concurrent instantiation patterns internally. *

    - * This will be Sort and filter for both {@link RollupSnapshotImpl#prepareRootTable() rollups} - * and {@link TreeTableSnapshotImpl#prepareRootTable() tree tables} + * This will be Sort and filter for both {@link RollupSnapshotImpl#prepareRootTable() rollups} and + * {@link TreeTableSnapshotImpl#prepareRootTable() tree tables} *

    * - * @implNote This method will only be invoked if the client has requested an update using a - * table id of -1/ + * @implNote This method will only be invoked if the client has requested an update using a table id of -1/ * * @return The result of all transformations applied to the root table. */ abstract Table prepareRootTable(); /** - * Get if the root table has changed during this TSQ. This can happen if sorts/filters were - * applied. This will be used to decide if the root needs to be exported to the client. + * Get if the root table has changed during this TSQ. This can happen if sorts/filters were applied. This will be + * used to decide if the root needs to be exported to the client. * * @return true of the root table has changed. */ @@ -138,17 +133,14 @@ private SnapshotState makeSnapshotState() { abstract Table getSourceTable(); /** - * Apply any required transformations to child tables that are required before the snapshot can - * proceed. + * Apply any required transformations to child tables that are required before the snapshot can proceed. *

    - * This will be sorting in the {@link RollupSnapshotImpl#prepareTableInternal(Table) rollup} - * case, and nothing in the {@link TreeTableSnapshotImpl#prepareTableInternal(Table) tree table} - * case + * This will be sorting in the {@link RollupSnapshotImpl#prepareTableInternal(Table) rollup} case, and nothing in + * the {@link TreeTableSnapshotImpl#prepareTableInternal(Table) tree table} case *

    * - * @implNote This method will only be invoked if the client has requested an update using a - * table id of -1. This method will not be invoked on the root table (see - * {@link #prepareRootTable()}). + * @implNote This method will only be invoked if the client has requested an update using a table id of -1. This + * method will not be invoked on the root table (see {@link #prepareRootTable()}). * * @param childMap The map in which to lookup children * @return The result of all transformations on t @@ -180,9 +172,8 @@ private Table prepareTable(Object key, TableMap childMap) { * Attach a reverse lookup listener to the specified table. */ Table attachReverseLookup(Table table) { - final ReverseLookupListener listener = - ReverseLookupListener.makeReverseLookupListenerWithSnapshot((BaseTable) table, - getInfo().getHierarchicalColumnName()); + final ReverseLookupListener listener = ReverseLookupListener + .makeReverseLookupListenerWithSnapshot((BaseTable) table, getInfo().getHierarchicalColumnName()); table = ((BaseTable) table).copy(); table.setAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE, listener); @@ -192,8 +183,8 @@ Table attachReverseLookup(Table table) { /** * Check if a particular index key is valid with respect to t. * - * @implNote This is used to allow for {@link TreeTableSnapshotImpl} to share - * {@link ReverseLookup RLLs} across all child tables. + * @implNote This is used to allow for {@link TreeTableSnapshotImpl} to share {@link ReverseLookup RLLs} across all + * child tables. * * @param usePrev if previous values should be used while validating. * @param t The table to validate K with @@ -222,8 +213,7 @@ INFO_TYPE getInfo() { * @param usePrev if any table data access should use previous values * @return true if the child is valid, false if it should be eliminated. */ - boolean verifyChild(TableDetails parentDetail, TableDetails childDetail, long childKeyPos, - boolean usePrev) { + boolean verifyChild(TableDetails parentDetail, TableDetails childDetail, long childKeyPos, boolean usePrev) { return true; } @@ -237,8 +227,8 @@ Table applyColumnFormats(Table t) { if (info.getColumnFormats() != null && t != null) { final Object rll = t.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE); final Object preparedRll = t.getAttribute(Table.PREPARED_RLL_ATTRIBUTE); - t = t.updateView(processFormatColumns(t, - SelectColumnFactory.getFormatExpressions(getInfo().getColumnFormats()))); + t = t.updateView( + processFormatColumns(t, SelectColumnFactory.getFormatExpressions(getInfo().getColumnFormats()))); if (rll != null) { t.setAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE, rll); } @@ -251,11 +241,10 @@ Table applyColumnFormats(Table t) { } /** - * Process the initial set of format columns and return a set that is applicable to the table. + * Process the initial set of format columns and return a set that is applicable to the table.
    *
    - *
    - * This is here in case the column type does not match up between levels of a tree, for example, - * when the table is a rollup and constituent rows are included. + * This is here in case the column type does not match up between levels of a tree, for example, when the table is a + * rollup and constituent rows are included. * * @param t the table to update the columns for * @param initial the initial set of filter columns @@ -290,19 +279,16 @@ private void checkInputs() { } if (allDetails.size() != 1) { - Assert.statementNeverExecuted( - "There is a detail not corresponding to a requested expansion " + allDetails + Assert.statementNeverExecuted("There is a detail not corresponding to a requested expansion " + allDetails + " orig = " + tablesByKey); } - Assert.eqTrue(allDetails.contains(tablesByKey.get(ROOT_TABLE_KEY)), - "allDetails contains ROOT_TABLE_KEY"); + Assert.eqTrue(allDetails.contains(tablesByKey.get(ROOT_TABLE_KEY)), "allDetails contains ROOT_TABLE_KEY"); } /** - * Get a {@link NotificationQueue.Dependency dependency} that will allow us to check whether all - * "structural" components that may impact a snapshot have been satisfied on the current cycle, - * if such is needed. + * Get a {@link NotificationQueue.Dependency dependency} that will allow us to check whether all "structural" + * components that may impact a snapshot have been satisfied on the current cycle, if such is needed. * * @return The appropriate dependency, or null if no such check is needed */ @@ -329,41 +315,40 @@ public TreeSnapshotResult getSnapshot() { final Set ops = getOperations(); // If we're adjusting sorts or filters, we have to wipe out the retention cache. if (ops.contains(TreeSnapshotQuery.Operation.SortChanged) - || ops.contains(TreeSnapshotQuery.Operation.FilterChanged)) { + || ops.contains(TreeSnapshotQuery.Operation.FilterChanged)) { releaseAllTables(); } - // NB: Our snapshot control must be notification-aware, because if source ticks we cannot - // guarantee that we + // NB: Our snapshot control must be notification-aware, because if source ticks we cannot guarantee that we // won't observe some newly created components on their instantiation step. - final ConstructSnapshot.SnapshotControl control = ConstructSnapshot - .makeSnapshotControl(true, ((NotificationStepSource) baseTable.getSourceTable())); + final ConstructSnapshot.SnapshotControl control = + ConstructSnapshot.makeSnapshotControl(true, ((NotificationStepSource) baseTable.getSourceTable())); final MutableObject finalState = new MutableObject<>(); ConstructSnapshot.callDataSnapshotFunction(getClass().getSimpleName(), control, - (final boolean usePrev, final long beforeClockValue) -> { - try { - processTables(usePrev); + (final boolean usePrev, final long beforeClockValue) -> { + try { + processTables(usePrev); - final TableDetails rootDetails = tablesByKey.get(ROOT_TABLE_KEY); - final SnapshotState state = treeState.getUserState(); + final TableDetails rootDetails = tablesByKey.get(ROOT_TABLE_KEY); + final SnapshotState state = treeState.getUserState(); - state.beginSnapshot(tablesByKey, columns, firstViewportRow, lastViewportRow); - compute(usePrev, rootDetails, state); + state.beginSnapshot(tablesByKey, columns, firstViewportRow, lastViewportRow); + compute(usePrev, rootDetails, state); - finalState.setValue(state); - } catch (ConstructSnapshot.SnapshotInconsistentException cie) { - return false; - } - return true; - }); + finalState.setValue(state); + } catch (ConstructSnapshot.SnapshotInconsistentException cie) { + return false; + } + return true; + }); postSnapshot(); return makeResult(finalState.getValue()); } /** - * Do a DFS traversal of the tree to ensure that we have an actual table for each visible table, - * and the list of children has been updated. + * Do a DFS traversal of the tree to ensure that we have an actual table for each visible table, and the list of + * children has been updated. */ private void processTables(final boolean usePrev) { // Make sure we reset the removed state so if this is a retry we don't have polluted state. @@ -391,8 +376,8 @@ private void processTables(final boolean usePrev) { if (lookup == null) { ConstructSnapshot.failIfConcurrentAttemptInconsistent(); - Assert.assertion(map.size() == 0, "There should be no child tables", map.size(), - "map.size()", children, "children"); + Assert.assertion(map.size() == 0, "There should be no child tables", map.size(), "map.size()", + children, "children"); continue; } @@ -400,8 +385,7 @@ private void processTables(final boolean usePrev) { final TableDetails childDetail = tablesByKey.get(key); final long keyPos = usePrev ? lookup.getPrev(key) : lookup.get(key); - // If the row is no longer present for this child key, queue it up to be - // removed. + // If the row is no longer present for this child key, queue it up to be removed. if (keyPos < 0) { eliminateChildren(childDetail); return; @@ -428,8 +412,7 @@ private void processTables(final boolean usePrev) { } /** - * Remove all of the children in the tree below parent from the map of tables that we need to - * process and inflate. + * Remove all of the children in the tree below parent from the map of tables that we need to process and inflate. * * @param details The root child. */ @@ -440,9 +423,9 @@ private void eliminateChildren(TableDetails details) { final TableDetails current = tablesToEliminate.pop(); current.setRemoved(true); current.getChildren().stream() - .map(tablesByKey::get) - .filter(Objects::nonNull) - .forEach(tablesToEliminate::add); + .map(tablesByKey::get) + .filter(Objects::nonNull) + .forEach(tablesToEliminate::add); } } @@ -455,48 +438,44 @@ private void eliminateChildren(TableDetails details) { * Create a {@link TreeSnapshotResult} from the result of the viewport calculation. * * @param state The currenmt SnapshotState object. - * @return A {@link TreeSnapshotResult} containing the viewport data, and an updated set of - * {@link TableDetails}. + * @return A {@link TreeSnapshotResult} containing the viewport data, and an updated set of {@link TableDetails}. */ private TreeSnapshotResult makeResult(SnapshotState state) { final TableDetails[] tables = tablesByKey.values().stream() - .filter(td -> !td.isRemoved()) - .peek(td -> td.getChildren().removeIf(k -> { - final TableDetails child = tablesByKey.get(k); - return child == null || child.isRemoved(); - })) - .toArray(TableDetails[]::new); + .filter(td -> !td.isRemoved()) + .peek(td -> td.getChildren().removeIf(k -> { + final TableDetails child = tablesByKey.get(k); + return child == null || child.isRemoved(); + })) + .toArray(TableDetails[]::new); final long actualEnd = firstViewportRow + state.actualViewportSize - 1; final Table maybeNewSource = rootTableChanged() ? getSourceTable() : null; return new TreeSnapshotResult(maybeNewSource, - state.totalRowCount, - state.getDataMatrix(), - tables, - state.tableKeyColumn, - state.childPresenceColumn, - firstViewportRow, - actualEnd, - state.getRequiredConstituents()); + state.totalRowCount, + state.getDataMatrix(), + tables, + state.tableKeyColumn, + state.childPresenceColumn, + firstViewportRow, + actualEnd, + state.getRequiredConstituents()); } /** - * Recursively compute and copy the requested viewport. This works by first finding the first - * visible row, starting with the root table and skipping rows by expansion. Then the tables are - * walked depth first, by expansion and the data is copied to the resultant flat snapshot, until - * the proper number of rows have been consumed. + * Recursively compute and copy the requested viewport. This works by first finding the first visible row, starting + * with the root table and skipping rows by expansion. Then the tables are walked depth first, by expansion and the + * data is copied to the resultant flat snapshot, until the proper number of rows have been consumed. * * @param usePrev Whether we're using previous values or current * @param current The current table we're evaluating * @param state The current state of the recursion. */ - private void compute(final boolean usePrev, final TableDetails current, - @NotNull final SnapshotState state) { + private void compute(final boolean usePrev, final TableDetails current, @NotNull final SnapshotState state) { if (current == null) { ConstructSnapshot.failIfConcurrentAttemptInconsistent(); - // If this happens that means that the child table has gone away between when we - // computed the child index, and now. - // which means the LogicalClock has ticked, and the snapshot is going to fail, so we'll - // abort mission now. + // If this happens that means that the child table has gone away between when we computed the child index, + // and now. + // which means the LogicalClock has ticked, and the snapshot is going to fail, so we'll abort mission now. Assert.neqNull(current, "Child table ticked away during computation"); } @@ -511,8 +490,7 @@ private void compute(final boolean usePrev, final TableDetails current, // rows skipped. long vkUpper; - final Index currentIndex = - usePrev ? curTable.getIndex().getPrevIndex() : curTable.getIndex(); + final Index currentIndex = usePrev ? curTable.getIndex().getPrevIndex() : curTable.getIndex(); // If the first row of the viewport is beyond the current table, we'll use an upper that's // guaranteed to be beyond the table. One of two things will happen: @@ -530,33 +508,28 @@ private void compute(final boolean usePrev, final TableDetails current, long currentPosition = 0; long nextExpansion = -1; - // When searching for the beginning of the viewport, we need to evaluate all of the expanded - // rows recursively and - // shift vkUpper left until there are no more expanded children between the current position - // and the viewport start. - final ColumnSource columnSource = - curTable.getColumnSource(info.getHierarchicalColumnName()); + // When searching for the beginning of the viewport, we need to evaluate all of the expanded rows recursively + // and + // shift vkUpper left until there are no more expanded children between the current position and the viewport + // start. + final ColumnSource columnSource = curTable.getColumnSource(info.getHierarchicalColumnName()); while (exIter.hasNext() && (state.skippedRows < firstViewportRow)) { final long expandedRow = exIter.nextLong(); - // In this case the beginning of the viewport is before the next expanded row, so we're - // done. We can + // In this case the beginning of the viewport is before the next expanded row, so we're done. We can // start adding these table rows to the viewport for this table. if (vkUpper <= expandedRow) { nextExpansion = expandedRow; break; } else { - // Otherwise, there is an expanded range between the current position and the first - // viewport row. - // In this case, we'll recursively evaluate the expanded rows, shifting the initial - // viewport row to the left + // Otherwise, there is an expanded range between the current position and the first viewport row. + // In this case, we'll recursively evaluate the expanded rows, shifting the initial viewport row to the + // left // by the total number of rows "below" the expanded one. - final Object tableKey = - usePrev ? columnSource.getPrev(expandedRow) : columnSource.get(expandedRow); + final Object tableKey = usePrev ? columnSource.getPrev(expandedRow) : columnSource.get(expandedRow); final TableDetails child = tablesByKey.get(tableKey); - // If the expanded row doesn't exist in the current table index, something at a - // higher level is broken. + // If the expanded row doesn't exist in the current table index, something at a higher level is broken. final long expandedPosition = currentIndex.find(expandedRow); if (expandedPosition < 0) { @@ -564,18 +537,17 @@ private void compute(final boolean usePrev, final TableDetails current, Assert.geqZero(expandedPosition, "current.getIndex().find(expandedRow)"); } - // Since we know that this expanded row is before the viewport start, we need to - // accumulate the rows between the current position - // and this expanded row as "skipped" before moving the current table position to - // the right. + // Since we know that this expanded row is before the viewport start, we need to accumulate the rows + // between the current position + // and this expanded row as "skipped" before moving the current table position to the right. state.skippedRows += (expandedPosition - currentPosition) + 1; currentPosition = expandedPosition + 1; // Next evaluate the expanded child rows of this table. compute(usePrev, child, state); - // If we have skipped the same number of rows as the first viewport row after the - // recursive evaluation, then + // If we have skipped the same number of rows as the first viewport row after the recursive evaluation, + // then // we found the first viewport row inside our child and have no more work to do. if (state.skippedRows == firstViewportRow) { break; @@ -583,12 +555,11 @@ private void compute(final boolean usePrev, final TableDetails current, if (state.skippedRows >= firstViewportRow) { ConstructSnapshot.failIfConcurrentAttemptInconsistent(); - Assert.lt(state.skippedRows, "state.skippedRows", firstViewportRow, - "firstViewportRow"); + Assert.lt(state.skippedRows, "state.skippedRows", firstViewportRow, "firstViewportRow"); } - // Finally, we need to shift the index of the viewport start wrt to this table, by - // the number of row's we've skipped, less the current position + // Finally, we need to shift the index of the viewport start wrt to this table, by the number of row's + // we've skipped, less the current position // because the current position has already been accounted for in state.skippedRows final long newTarget = firstViewportRow - state.skippedRows + currentPosition; if (newTarget >= currentIndex.size()) { @@ -599,12 +570,12 @@ private void compute(final boolean usePrev, final TableDetails current, } } - // When we get to here, we've found the table and row in that table where the viewport - // begins, so should start accumulating + // When we get to here, we've found the table and row in that table where the viewport begins, so should start + // accumulating // by table index. - // There were no more expanded children, so we need to skip the remaining rows in our table, - // or up to the viewport row + // There were no more expanded children, so we need to skip the remaining rows in our table, or up to the + // viewport row // whichever comes first. if (state.skippedRows < firstViewportRow) { final long remaining = firstViewportRow - state.skippedRows; @@ -639,19 +610,16 @@ private void compute(final boolean usePrev, final TableDetails current, if (nextExpansion == currentIndexKey) { // Copy everything so far, and start a new index. - state.addToSnapshot(usePrev, curTable, current.getKey(), curTableMap, - sequentialBuilder.getIndex()); + state.addToSnapshot(usePrev, curTable, current.getKey(), curTableMap, sequentialBuilder.getIndex()); sequentialBuilder = Index.FACTORY.getSequentialBuilder(); - final Object tableKey = - usePrev ? columnSource.getPrev(nextExpansion) : columnSource.get(nextExpansion); + final Object tableKey = usePrev ? columnSource.getPrev(nextExpansion) : columnSource.get(nextExpansion); final TableDetails child = tablesByKey.get(tableKey); if (child == null) { ConstructSnapshot.failIfConcurrentAttemptInconsistent(); - log.error().append("No details for key ").append(Objects.toString(tableKey)) - .append(", usePrev=").append(usePrev).append(", nextExpansion=") - .append(nextExpansion).endl(); + log.error().append("No details for key ").append(Objects.toString(tableKey)).append(", usePrev=") + .append(usePrev).append(", nextExpansion=").append(nextExpansion).endl(); Assert.statementNeverExecuted(); } @@ -675,14 +643,14 @@ private void compute(final boolean usePrev, final TableDetails current, } /** - * Use the {@link ReverseLookup} provided by the specific implementation to locate where - * client-expanded rows have moved within the table, and return an index of these rows. + * Use the {@link ReverseLookup} provided by the specific implementation to locate where client-expanded rows have + * moved within the table, and return an index of these rows. * * @param usePrev If we should use previous values * @param t The table to look in * @param childKeys The keys of the child tables to find - * @return An index containing the rows that represent the indices of the tables indicated in - * childKeys, if they still exist. + * @return An index containing the rows that represent the indices of the tables indicated in childKeys, if they + * still exist. */ private Index getExpandedIndex(boolean usePrev, Table t, Set childKeys) { final ReverseLookup lookup = getReverseLookup(t); @@ -722,9 +690,8 @@ static class PreparedSort { } /** - * Take a preemptive pass through the sorting parameters and compute any additional columns - * required to perform the sort once, so we can apply them at each level without creating - * lots of extra garbage. + * Take a preemptive pass through the sorting parameters and compute any additional columns required to perform + * the sort once, so we can apply them at each level without creating lots of extra garbage. */ void computeSortingData() { if (directives.isEmpty()) { @@ -753,8 +720,8 @@ void computeSortingData() { } /** - * Apply requested sorts and filters to the specified table, if needed. This method makes no - * assumptions about, or changes to {@link ReverseLookup ReverseLookups} + * Apply requested sorts and filters to the specified table, if needed. This method makes no assumptions about, + * or changes to {@link ReverseLookup ReverseLookups} * * @param table The table to sort and filter. * @return The result of applying all of the requested sorts. @@ -832,8 +799,7 @@ private static void maybeWaitForSatisfaction(@Nullable final Table table) { maybeWaitForSatisfaction((NotificationQueue.Dependency) table); } - private static void maybeWaitForSatisfaction( - @Nullable final NotificationQueue.Dependency dependency) { + private static void maybeWaitForSatisfaction(@Nullable final NotificationQueue.Dependency dependency) { ConstructSnapshot.maybeWaitForSatisfaction(dependency); } } diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/BitSetColumnSource.java b/ClientSupport/src/main/java/io/deephaven/treetable/BitSetColumnSource.java index d0a40b87c15..db0fca0a268 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/BitSetColumnSource.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/BitSetColumnSource.java @@ -6,13 +6,13 @@ import java.util.BitSet; /** - * A {@link io.deephaven.db.v2.sources.ColumnSource} wrapping a bitset. It does not support previous - * values and is immutable. + * A {@link io.deephaven.db.v2.sources.ColumnSource} wrapping a bitset. It does not support previous values and is + * immutable. * * @implNote This is intended only for use with {@link TreeSnapshotQuery}. */ public class BitSetColumnSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForBoolean { + implements ImmutableColumnSourceGetDefaults.ForBoolean { private final BitSet theSet; public BitSetColumnSource(BitSet theSet) { diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/RollupDefinition.java b/ClientSupport/src/main/java/io/deephaven/treetable/RollupDefinition.java index 11b167e6eb4..62880dd193a 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/RollupDefinition.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/RollupDefinition.java @@ -13,8 +13,8 @@ import java.util.*; /** - * This class defines a rollup. It defines both the grouping columns, their order, and all - * aggregations to be performed on other columns. + * This class defines a rollup. It defines both the grouping columns, their order, and all aggregations to be performed + * on other columns. */ public class RollupDefinition implements Serializable { private static final long serialVersionUID = 2L; @@ -42,13 +42,11 @@ public class RollupDefinition implements Serializable { * @param aggregations the aggregations to perform and which columns to perform them on * @param includeConstituents if constituent rows should be included * @param includeOriginalColumns if original columns should be included - * @param includeDescriptions if the rollup should automatically add column descriptions for the - * chosen aggs + * @param includeDescriptions if the rollup should automatically add column descriptions for the chosen aggs */ public RollupDefinition(List groupingColumns, Map> aggregations, - boolean includeConstituents, boolean includeOriginalColumns, boolean includeDescriptions) { - this(groupingColumns, aggregations, includeConstituents, includeOriginalColumns, - includeDescriptions, ""); + boolean includeConstituents, boolean includeOriginalColumns, boolean includeDescriptions) { + this(groupingColumns, aggregations, includeConstituents, includeOriginalColumns, includeDescriptions, ""); } /** @@ -58,17 +56,15 @@ public RollupDefinition(List groupingColumns, Map> * @param aggregations the aggregations to perform and which columns to perform them on * @param includeConstituents if constituent rows should be included * @param includeOriginalColumns if original columns should be included - * @param includeDescriptions if the rollup should automatically add column descriptions for the - * chosen aggs + * @param includeDescriptions if the rollup should automatically add column descriptions for the chosen aggs * @param name an optional name. */ public RollupDefinition(List groupingColumns, Map> aggregations, - boolean includeConstituents, boolean includeOriginalColumns, boolean includeDescriptions, - String name) { + boolean includeConstituents, boolean includeOriginalColumns, boolean includeDescriptions, String name) { this.groupingColumns = new ArrayList<>(groupingColumns); this.aggregations = new LinkedHashMap<>(); - aggregations.forEach((agg, cols) -> this.aggregations - .computeIfAbsent(agg, a -> new LinkedHashSet<>()).addAll(cols)); + aggregations.forEach( + (agg, cols) -> this.aggregations.computeIfAbsent(agg, a -> new LinkedHashSet<>()).addAll(cols)); this.includeConstituents = includeConstituents; this.includeOriginalColumns = includeOriginalColumns; this.includeDescriptions = includeDescriptions; @@ -83,8 +79,8 @@ public RollupDefinition(List groupingColumns, Map> public RollupDefinition(RollupDefinition other) { this.groupingColumns = new ArrayList<>(other.groupingColumns); this.aggregations = new LinkedHashMap<>(); - other.aggregations.forEach((agg, cols) -> this.aggregations - .computeIfAbsent(agg, a -> new LinkedHashSet<>()).addAll(cols)); + other.aggregations.forEach( + (agg, cols) -> this.aggregations.computeIfAbsent(agg, a -> new LinkedHashSet<>()).addAll(cols)); this.includeConstituents = other.includeConstituents; this.includeOriginalColumns = other.includeOriginalColumns; this.includeDescriptions = other.includeDescriptions; @@ -128,8 +124,7 @@ public boolean includeOriginalColumns() { } /** - * Check if this definition produces a rollup that includes column descriptions for each - * aggregation. + * Check if this definition produces a rollup that includes column descriptions for each aggregation. * * @return true if column descriptions are included. */ @@ -144,9 +139,9 @@ public boolean includeDescriptions() { */ public Element toXml() { final Element info = new Element(NODE_NAME) - .setAttribute(ATTR_CONSTITUENTS, Boolean.toString(includeConstituents)) - .setAttribute(ATTR_INCLUDE_OTHER, Boolean.toString(includeOriginalColumns)) - .setAttribute(ATTR_INCLUDE_DESCRIPTIONS, Boolean.toString(includeDescriptions)); + .setAttribute(ATTR_CONSTITUENTS, Boolean.toString(includeConstituents)) + .setAttribute(ATTR_INCLUDE_OTHER, Boolean.toString(includeOriginalColumns)) + .setAttribute(ATTR_INCLUDE_DESCRIPTIONS, Boolean.toString(includeDescriptions)); if (!StringUtils.isNullOrEmpty(name)) { info.setAttribute(ATTR_NAME, name); @@ -163,8 +158,8 @@ public Element toXml() { opElem.setAttribute(ATTR_NAME, item.getKey().toString()); item.getValue().stream() - .map(col -> new Element(COLUMN_NODE).setAttribute(ATTR_NAME, col)) - .forEach(opElem::addContent); + .map(col -> new Element(COLUMN_NODE).setAttribute(ATTR_NAME, col)) + .forEach(opElem::addContent); info.addContent(opElem); } @@ -203,8 +198,8 @@ public String getName() { */ public Table applyTo(Table table) { final Map maybeDescriptions = includeDescriptions ? new HashMap<>() : null; - Table result = table.rollup(createComboAggregateFactory(maybeDescriptions), - includeConstituents, groupingColumns); + Table result = + table.rollup(createComboAggregateFactory(maybeDescriptions), includeConstituents, groupingColumns); if (maybeDescriptions != null) { result = result.withColumnDescription(maybeDescriptions); } @@ -218,14 +213,11 @@ public Table applyTo(Table table) { * @param descriptions if non-null this method will generate column descriptions * @return the ComboAggFactory */ - private ComboAggregateFactory createComboAggregateFactory( - final Map descriptions) { + private ComboAggregateFactory createComboAggregateFactory(final Map descriptions) { final TObjectIntHashMap aggsByColumn = new TObjectIntHashMap<>(); - final List combos = - new ArrayList<>(getAggregations().size()); + final List combos = new ArrayList<>(getAggregations().size()); - // Take two passes through the list. The first pass is to decide if we need to append - // suffixes. + // Take two passes through the list. The first pass is to decide if we need to append suffixes. // The second pass actually creates the aggs. for (final Map.Entry> item : getAggregations().entrySet()) { if (item.getKey() != AggType.Count) { @@ -235,20 +227,18 @@ private ComboAggregateFactory createComboAggregateFactory( for (final Map.Entry> item : getAggregations().entrySet()) { if (item.getKey() == AggType.Count) { - combos.add(ComboAggregateFactory - .AggCount(item.getValue().stream().findFirst().orElse("Rollup_Count"))); + combos.add(ComboAggregateFactory.AggCount(item.getValue().stream().findFirst().orElse("Rollup_Count"))); } else { final String[] matchPairs = item.getValue() - .stream() - .map(col -> { - final String aggColName = - createAggColName(col, aggsByColumn, item.getKey()); - if (descriptions != null) { - descriptions.put(aggColName, col + " aggregated with " + item.getKey()); - } - - return aggColName + "=" + col; - }).toArray(String[]::new); + .stream() + .map(col -> { + final String aggColName = createAggColName(col, aggsByColumn, item.getKey()); + if (descriptions != null) { + descriptions.put(aggColName, col + " aggregated with " + item.getKey()); + } + + return aggColName + "=" + col; + }).toArray(String[]::new); combos.add(ComboAggregateFactory.Agg(item.getKey(), matchPairs)); } } @@ -256,11 +246,9 @@ private ComboAggregateFactory createComboAggregateFactory( return ComboAggregateFactory.AggCombo(combos.toArray(new ComboAggregateFactory.ComboBy[0])); } - private String createAggColName(String col, TObjectIntHashMap aggsByColumn, - AggType agg) { - return (aggsByColumn.get(col) > 1 && agg != AggType.Sum) || groupingColumns.contains(col) - ? col + "_" + agg - : col; + private String createAggColName(String col, TObjectIntHashMap aggsByColumn, AggType agg) { + return (aggsByColumn.get(col) > 1 && agg != AggType.Sum) || groupingColumns.contains(col) ? col + "_" + agg + : col; } /** @@ -269,8 +257,7 @@ private String createAggColName(String col, TObjectIntHashMap aggsByColu * @param rollupElement the element * @return a RollupDefinition */ - public static RollupDefinition fromXml(Element rollupElement) - throws UncheckedDeephavenException { + public static RollupDefinition fromXml(Element rollupElement) throws UncheckedDeephavenException { final List groupingColumns = new ArrayList<>(); for (final Element groupByElem : rollupElement.getChildren(GROUP_BY_NODE)) { groupingColumns.add(groupByElem.getAttributeValue(ATTR_NAME)); @@ -294,8 +281,7 @@ public static RollupDefinition fromXml(Element rollupElement) for (final Element colEl : opsElem.getChildren(COLUMN_NODE)) { final String colName = colEl.getAttributeValue(ATTR_NAME); if (StringUtils.isNullOrEmpty(colName)) { - throw new UncheckedDeephavenException( - "Rollup aggregation column element missing name"); + throw new UncheckedDeephavenException("Rollup aggregation column element missing name"); } colsForAgg.add(colName); @@ -303,10 +289,10 @@ public static RollupDefinition fromXml(Element rollupElement) } return new RollupDefinition(groupingColumns, aggs, - Boolean.parseBoolean(rollupElement.getAttributeValue(ATTR_CONSTITUENTS)), - Boolean.parseBoolean(rollupElement.getAttributeValue(ATTR_INCLUDE_OTHER)), - Boolean.parseBoolean(rollupElement.getAttributeValue(ATTR_INCLUDE_DESCRIPTIONS)), - rollupElement.getAttributeValue(ATTR_NAME)); + Boolean.parseBoolean(rollupElement.getAttributeValue(ATTR_CONSTITUENTS)), + Boolean.parseBoolean(rollupElement.getAttributeValue(ATTR_INCLUDE_OTHER)), + Boolean.parseBoolean(rollupElement.getAttributeValue(ATTR_INCLUDE_DESCRIPTIONS)), + rollupElement.getAttributeValue(ATTR_NAME)); } /** @@ -323,16 +309,15 @@ public boolean equals(Object o) { return false; RollupDefinition that = (RollupDefinition) o; return includeConstituents == that.includeConstituents && - includeOriginalColumns == that.includeOriginalColumns && - includeDescriptions == that.includeDescriptions && - Objects.equals(groupingColumns, that.groupingColumns) && - Objects.equals(aggregations, that.aggregations); + includeOriginalColumns == that.includeOriginalColumns && + includeDescriptions == that.includeDescriptions && + Objects.equals(groupingColumns, that.groupingColumns) && + Objects.equals(aggregations, that.aggregations); } @Override public int hashCode() { - return Objects.hash(groupingColumns, aggregations, includeConstituents, - includeOriginalColumns); + return Objects.hash(groupingColumns, aggregations, includeConstituents, includeOriginalColumns); } public static Builder builder() { @@ -342,8 +327,8 @@ public static Builder builder() { // region Builder for PreDefine /** - * A Builder class to define rollups and attach them to tables as predefined rollups. Instances - * may be retained to hold references to pre-created rollups. + * A Builder class to define rollups and attach them to tables as predefined rollups. Instances may be retained to + * hold references to pre-created rollups. */ public static class Builder { @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") @@ -419,8 +404,7 @@ public Builder agg(AggType type, Collection columns) { if (type == AggType.Count) { if (columns.size() > 1) { - throw new IllegalArgumentException( - "The Count aggregation must have one, and only one column"); + throw new IllegalArgumentException("The Count aggregation must have one, and only one column"); } } @@ -440,8 +424,7 @@ public Builder includeConstituents(boolean include) { } /** - * Set if the result table should include original columns. NOTE: This is currently - * unsupported. + * Set if the result table should include original columns. NOTE: This is currently unsupported. * * @param include if original columns should be included * @return this builder @@ -481,8 +464,8 @@ public RollupDefinition build() throws UncheckedDeephavenException { throw new UncheckedDeephavenException("No aggregations defined"); } - return new RollupDefinition(groupingColumns, aggregations, includeConstituents, - includeOriginalColumns, includeDescriptions, name); + return new RollupDefinition(groupingColumns, aggregations, includeConstituents, includeOriginalColumns, + includeDescriptions, name); } /** @@ -497,15 +480,14 @@ public Builder buildAndAttach(Table attachTo) throws UncheckedDeephavenException } /** - * Create the rollup definition and attach it to the specified table as a predefined rollup. - * Additionally, create and hold a reference to the rollup table if requested. + * Create the rollup definition and attach it to the specified table as a predefined rollup. Additionally, + * create and hold a reference to the rollup table if requested. * * @param attachTo the table to attach to * @return this builder * @throws UncheckedDeephavenException */ - public Builder buildAndAttach(Table attachTo, boolean preCreate) - throws UncheckedDeephavenException { + public Builder buildAndAttach(Table attachTo, boolean preCreate) throws UncheckedDeephavenException { final RollupDefinition def = build(); if (preCreate) { @@ -518,7 +500,7 @@ public Builder buildAndAttach(Table attachTo, boolean preCreate) // noinspection unchecked List definitionMap = - (List) attachTo.getAttribute(Table.PREDEFINED_ROLLUP_ATTRIBUTE); + (List) attachTo.getAttribute(Table.PREDEFINED_ROLLUP_ATTRIBUTE); if (definitionMap == null) { definitionMap = new ArrayList<>(); attachTo.setAttribute(Table.PREDEFINED_ROLLUP_ATTRIBUTE, definitionMap); diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/RollupSnapshotImpl.java b/ClientSupport/src/main/java/io/deephaven/treetable/RollupSnapshotImpl.java index 03526dd40f8..2e9348ab5ab 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/RollupSnapshotImpl.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/RollupSnapshotImpl.java @@ -21,20 +21,18 @@ import static io.deephaven.treetable.TreeTableConstants.RE_TREE_KEY; class RollupSnapshotImpl> - extends AbstractTreeSnapshotImpl { + extends AbstractTreeSnapshotImpl { private boolean rootTableChanged = false; private Table sourceTable; private final PreparedSort constituentSort; /** - * Construct a new query that will create a flat snapshot of the tree table using a flat - * viewport beginning at the specified rows and columns, applying the specified sorts and - * filters if required to fetch tables + * Construct a new query that will create a flat snapshot of the tree table using a flat viewport beginning at the + * specified rows and columns, applying the specified sorts and filters if required to fetch tables * * @param baseTableId The Id of the base table. Used to maintain client state. * @param baseTable The base table to use if sorts/filters must be applied. - * @param tablesByKey The tables within the tree for which viewports are being tracked, - * separated by table key. + * @param tablesByKey The tables within the tree for which viewports are being tracked, separated by table key. * @param firstRow The first row of the flat viewport. * @param lastRow The last row of the flat viewport. * @param columns The columns to include in the viewport @@ -44,17 +42,16 @@ class RollupSnapshotImpl tablesByKey, - long firstRow, - long lastRow, - BitSet columns, - @NotNull SelectFilter[] filters, - @NotNull List sorts, - CLIENT_TYPE client, - Set includedOps) { - super(baseTableId, baseTable, tablesByKey, firstRow, lastRow, columns, filters, sorts, - client, includedOps); + HierarchicalTable baseTable, + Map tablesByKey, + long firstRow, + long lastRow, + BitSet columns, + @NotNull SelectFilter[] filters, + @NotNull List sorts, + CLIENT_TYPE client, + Set includedOps) { + super(baseTableId, baseTable, tablesByKey, firstRow, lastRow, columns, filters, sorts, client, includedOps); if (getInfo().includesConstituents()) { final List updated = maybeComputeConstituentSorts(sorts); @@ -116,10 +113,9 @@ Table prepareTableInternal(Table t) { } /** - * For Rollups, if constituents are included, it's possible for the column type to be different - * at the leaf level. This will cause a host of potential problems for formatting, so we will - * eliminate format columns for any column that either can't be found in the leaf table, or has - * a different column type than the root. + * For Rollups, if constituents are included, it's possible for the column type to be different at the leaf level. + * This will cause a host of potential problems for formatting, so we will eliminate format columns for any column + * that either can't be found in the leaf table, or has a different column type than the root. * * @param t the table to update the columns for * @param initial the initial set of filter columns @@ -131,22 +127,22 @@ SelectColumn[] processFormatColumns(Table t, SelectColumn[] initial) { final Map currentColumns = t.getColumnSourceMap(); final HierarchicalTable baseTable = getBaseTable(); return Arrays.stream(initial) - .filter(col -> { - col.initDef(t.getDefinition().getColumnNameMap()); - final List requiredColumns = col.getColumns(); - for (final String colName : requiredColumns) { - final ColumnSource currentColumn = currentColumns.get(colName); - if (currentColumn == null) { - return false; + .filter(col -> { + col.initDef(t.getDefinition().getColumnNameMap()); + final List requiredColumns = col.getColumns(); + for (final String colName : requiredColumns) { + final ColumnSource currentColumn = currentColumns.get(colName); + if (currentColumn == null) { + return false; + } + + if (currentColumn.getType() != baseTable.getColumn(colName).getType()) { + return false; + } } - if (currentColumn.getType() != baseTable.getColumn(colName).getType()) { - return false; - } - } - - return true; - }).toArray(SelectColumn[]::new); + return true; + }).toArray(SelectColumn[]::new); } return initial; @@ -166,7 +162,7 @@ private HierarchicalTable applyFilters(@NotNull HierarchicalTable table) { final Table source = Require.neqNull(table.getSourceTable(), "Hierarchical source table"); final RollupInfo info = getInfo(); return (HierarchicalTable) source.where(filters).rollup(info.factory, - info.getLeafType() == RollupInfo.LeafType.Constituent, info.getSelectColumns()); + info.getLeafType() == RollupInfo.LeafType.Constituent, info.getSelectColumns()); } @Override @@ -190,9 +186,8 @@ Table attachReverseLookup(Table table) { @Override TableMap getTableMap(Table t) { - return Require.neqNull( - (TableMap) t.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), - "Child Table Map"); + return Require.neqNull((TableMap) t.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), + "Child Table Map"); } private List maybeComputeConstituentSorts(List sorts) { @@ -201,10 +196,9 @@ private List maybeComputeConstituentSorts(List sor } final Map nameMap = getInfo().getMatchPairs().stream() - .collect(Collectors.toMap(MatchPair::left, MatchPair::right)); + .collect(Collectors.toMap(MatchPair::left, MatchPair::right)); - // Note that we can't use getSourceTable() here because it won't have been initialized until - // after + // Note that we can't use getSourceTable() here because it won't have been initialized until after // getSnapshot() is invoked. final Table sourceTable = getBaseTable().getSourceTable(); final List updated = new ArrayList<>(); @@ -217,10 +211,8 @@ private List maybeComputeConstituentSorts(List sor // Try mapping the column back to an original. // In most cases this will succeed -- a notable exception will be Count final String maybeSourceColumn = nameMap.get(dir.getColumnName()); - if (!StringUtils.isNullOrEmpty(maybeSourceColumn) - && sourceTable.hasColumns(maybeSourceColumn)) { - updated.add( - new SortDirective(maybeSourceColumn, dir.getDirection(), dir.isAbsolute())); + if (!StringUtils.isNullOrEmpty(maybeSourceColumn) && sourceTable.hasColumns(maybeSourceColumn)) { + updated.add(new SortDirective(maybeSourceColumn, dir.getDirection(), dir.isAbsolute())); } } diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/SnapshotState.java b/ClientSupport/src/main/java/io/deephaven/treetable/SnapshotState.java index 169e20b88d7..6ecbd9868fe 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/SnapshotState.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/SnapshotState.java @@ -33,8 +33,8 @@ class SnapshotState { private final int childColumnIndex; /** - * A List of column-name, data array mappings. The arrays in this mapping are references to - * items in dataMatrix below + * A List of column-name, data array mappings. The arrays in this mapping are references to items in dataMatrix + * below */ private final List> data = new ArrayList<>(); @@ -48,10 +48,7 @@ class SnapshotState { /** How many rows have actually been copied into the snapshot */ private int copied = 0; - /** - * The number of total rows that have been skipped by the snapshot algorithm before data is - * collected - */ + /** The number of total rows that have been skipped by the snapshot algorithm before data is collected */ int skippedRows = 0; /** The number of rows that have been consumed and will be added to the viewport */ @@ -70,10 +67,7 @@ class SnapshotState { /** Mapping of aggregated column name to source column name */ private final Map aggToSourceMap; - /** - * Copiers that copy data from the original table to the intermediate columns. These are for - * rollups only - */ + /** Copiers that copy data from the original table to the intermediate columns. These are for rollups only */ private Map constituentCopiers; /** Analagous to {@link #data} except containing entries for constituent rows */ @@ -88,22 +82,22 @@ class SnapshotState { private final HierarchicalTableInfo info; /** - * Single snapshot state -- Set to true if the snapshot in progress included data from an - * include-constituent Rollup leaf table + * Single snapshot state -- Set to true if the snapshot in progress included data from an include-constituent Rollup + * leaf table */ private boolean includesLeafData = false; // endregion interface Copier { - void copy(boolean usePrev, ColumnSource columnSource, Index.Iterator it, Object target, - int offset, Table table, TableMap tableMap, BitSet childPresenceColumn); + void copy(boolean usePrev, ColumnSource columnSource, Index.Iterator it, Object target, int offset, Table table, + TableMap tableMap, BitSet childPresenceColumn); } SnapshotState(HierarchicalTable baseTable, String hierarchicalColumnName) { this.baseTable = baseTable; this.info = baseTable.getInfo(); - includeConstituents = (info instanceof RollupInfo - && ((RollupInfo) info).getLeafType() == RollupInfo.LeafType.Constituent); + includeConstituents = + (info instanceof RollupInfo && ((RollupInfo) info).getLeafType() == RollupInfo.LeafType.Constituent); final TableDefinition definition = baseTable.getDefinition(); childColumnIndex = definition.getColumnIndex(definition.getColumn(hierarchicalColumnName)); @@ -114,12 +108,11 @@ void copy(boolean usePrev, ColumnSource columnSource, Index.Iterator it, Object constituentCopiers = new HashMap<>(); constituentData = new HashMap<>(); aggToSourceMap = ((RollupInfo) info).getMatchPairs() - .stream() - .filter(p -> source.hasColumns(p.rightColumn)) // Filter out any columns that don't - // exist in the parent - // this is a concern for the Count - // aggregation. - .collect(Collectors.toMap(MatchPair::left, MatchPair::right)); + .stream() + .filter(p -> source.hasColumns(p.rightColumn)) // Filter out any columns that don't exist in the + // parent + // this is a concern for the Count aggregation. + .collect(Collectors.toMap(MatchPair::left, MatchPair::right)); includedConstituentColumns = new HashSet<>(); } else { constituentCopiers = null; @@ -132,23 +125,23 @@ void copy(boolean usePrev, ColumnSource columnSource, Index.Iterator it, Object dataMatrix = new Object[definition.getColumns().length]; } - void beginSnapshot(Map tablesByKey, BitSet requestedColumns, - long firstViewportRow, long lastViewportRow) { + void beginSnapshot(Map tablesByKey, BitSet requestedColumns, long firstViewportRow, + long lastViewportRow) { // Compute snapshot boundaries includesLeafData = false; skippedRows = consumed = copied = 0; childPresenceColumn.clear(); totalRowCount = tablesByKey.values().stream() - .filter(v -> !v.isRemoved()) - .mapToLong(v -> v.getTable().size()).sum(); + .filter(v -> !v.isRemoved()) + .mapToLong(v -> v.getTable().size()).sum(); - final int newViewportSize = (int) (Math.min(lastViewportRow + 1, totalRowCount) - - Math.min(firstViewportRow, totalRowCount)); + final int newViewportSize = + (int) (Math.min(lastViewportRow + 1, totalRowCount) - Math.min(firstViewportRow, totalRowCount)); // Do any allocation required if we have to. // Update the final data matrix with the proper set of requeted columns. if (ensureSpace(newViewportSize, tablesByKey.get(TreeTableConstants.ROOT_TABLE_KEY)) - || !requestedColumns.equals(columns)) { + || !requestedColumns.equals(columns)) { this.columns = requestedColumns; // Properly populate the dataMatrix @@ -157,24 +150,18 @@ void beginSnapshot(Map tablesByKey, BitSet requestedColumn final Pair dataPair = data.get(ii); dataMatrix[ii] = dataPair.second; - // If including constituents, populate the set of original columns that we - // actually need, to save us work + // If including constituents, populate the set of original columns that we actually need, to save us + // work // later on. Ignore the by columns, they do not get modified. if (includeConstituents) { // Do we include constituents? final String sourceName = aggToSourceMap.get(dataPair.first); if (!io.deephaven.db.util.string.StringUtils.isNullOrEmpty(sourceName) && - !((RollupInfo) info).getSelectColumnNames().contains(dataPair.first) && // Is - // it - // one - // of - // the - // grouping - // columns? - !RollupInfo.ROLLUP_COLUMN.equals(dataPair.first) && // Is it the magic - // rollup column? - !ColumnFormattingValues.isFormattingColumn(dataPair.first)) { // Is it a - // formatting - // column? + !((RollupInfo) info).getSelectColumnNames().contains(dataPair.first) && // Is it one of + // the grouping + // columns? + !RollupInfo.ROLLUP_COLUMN.equals(dataPair.first) && // Is it the magic rollup column? + !ColumnFormattingValues.isFormattingColumn(dataPair.first)) { // Is it a formatting + // column? includedConstituentColumns.add(sourceName); } } @@ -186,20 +173,17 @@ void beginSnapshot(Map tablesByKey, BitSet requestedColumn } /** - * Copy data from the specified table into the resultant data matrix at the offsets specified in - * the viewport state. + * Copy data from the specified table into the resultant data matrix at the offsets specified in the viewport state. * - * @implNote This method populates an additional data column that maps each row back to the - * table they came from by it's table key, so that clients can map rows back into the - * tree structure. + * @implNote This method populates an additional data column that maps each row back to the table they came from by + * it's table key, so that clients can map rows back into the tree structure. * * @param usePrev if the snapshot should use previous values. * @param snapshotIndex An index containing the rows to copy from the source table. */ - void addToSnapshot(boolean usePrev, Table table, Object tableKey, TableMap tableMap, - Index snapshotIndex) { - Assert.leq(copied + snapshotIndex.size(), "dataOffset + snapshotIndex.size()", - actualViewportSize, "viewport size"); + void addToSnapshot(boolean usePrev, Table table, Object tableKey, TableMap tableMap, Index snapshotIndex) { + Assert.leq(copied + snapshotIndex.size(), "dataOffset + snapshotIndex.size()", actualViewportSize, + "viewport size"); if (table.hasAttribute(Table.ROLLUP_LEAF_ATTRIBUTE) && includeConstituents) { addToSnapshotConstituent(usePrev, table, tableMap, snapshotIndex); @@ -216,31 +200,28 @@ void addToSnapshot(boolean usePrev, Table table, Object tableKey, TableMap table /** * Copy data directly from the table in question. */ - private void addToSnapshotNormal(boolean usePrev, Table table, TableMap tableMap, - Index snapshotIndex) { + private void addToSnapshotNormal(boolean usePrev, Table table, TableMap tableMap, Index snapshotIndex) { for (int ii = 0; ii < data.size(); ii++) { if (!columns.get(ii)) { continue; } final ColumnSource cs = table.getColumnSource(data.get(ii).first); - columnCopiers[ii].copy(usePrev, cs, snapshotIndex.iterator(), data.get(ii).second, - copied, table, tableMap, childPresenceColumn); + columnCopiers[ii].copy(usePrev, cs, snapshotIndex.iterator(), data.get(ii).second, copied, table, tableMap, + childPresenceColumn); } } /** - * Copy the data from the table in question, assuming that the table is a constituent table. - * This means that we need to copy the data to the alternate data set because column types may - * be reused or changed. + * Copy the data from the table in question, assuming that the table is a constituent table. This means that we need + * to copy the data to the alternate data set because column types may be reused or changed. */ - private void addToSnapshotConstituent(boolean usePrev, Table table, TableMap tableMap, - Index snapshotIndex) { + private void addToSnapshotConstituent(boolean usePrev, Table table, TableMap tableMap, Index snapshotIndex) { includesLeafData = true; includedConstituentColumns.forEach(cn -> { final ColumnSource cs = table.getColumnSource(cn); - constituentCopiers.get(cn).copy(usePrev, cs, snapshotIndex.iterator(), - constituentData.get(cn).getSecond(), copied, table, tableMap, childPresenceColumn); + constituentCopiers.get(cn).copy(usePrev, cs, snapshotIndex.iterator(), constituentData.get(cn).getSecond(), + copied, table, tableMap, childPresenceColumn); }); final Map columnSourceMap = table.getColumnSourceMap(); @@ -248,28 +229,28 @@ private void addToSnapshotConstituent(boolean usePrev, Table table, TableMap tab for (int ii = 0; ii < data.size(); ii++) { final String columnName = data.get(ii).first; if (columns.get(ii) && - (((RollupInfo) info).getSelectColumnNames().contains(columnName) || - ColumnFormattingValues.isFormattingColumn(columnName))) { + (((RollupInfo) info).getSelectColumnNames().contains(columnName) || + ColumnFormattingValues.isFormattingColumn(columnName))) { - // In the case of constituent rows, because column types can change, we may have - // omitted formatting columns + // In the case of constituent rows, because column types can change, we may have omitted formatting + // columns // so we'll allow those to not exist and be null-filled final ColumnSource cs = columnSourceMap.get(columnName); if (cs != null) { - columnCopiers[ii].copy(usePrev, cs, snapshotIndex.iterator(), - data.get(ii).second, copied, table, tableMap, childPresenceColumn); + columnCopiers[ii].copy(usePrev, cs, snapshotIndex.iterator(), data.get(ii).second, copied, table, + tableMap, childPresenceColumn); } else if (!ColumnFormattingValues.isFormattingColumn(columnName)) { throw new UncheckedTableException( - "Column " + columnName + " does not exist. Available column names are [" + - StringUtils.joinStrings(columnSourceMap.keySet(), ",") + "]"); + "Column " + columnName + " does not exist. Available column names are [" + + StringUtils.joinStrings(columnSourceMap.keySet(), ",") + "]"); } } } } /** - * Allocate a type-correct matrix to store the resultant snapshot into. It will create Columns+1 - * columns to allow for the extra table-mapping column. + * Allocate a type-correct matrix to store the resultant snapshot into. It will create Columns+1 columns to allow + * for the extra table-mapping column. * * @param requestedViewportSize The total size of the snapshot to allocate. * @param rootData Any table in the tree, to read column types from @@ -278,8 +259,8 @@ private void addToSnapshotConstituent(boolean usePrev, Table table, TableMap tab */ private boolean ensureSpace(int requestedViewportSize, TableDetails rootData) { if (requestedViewportSize == actualViewportSize) { - // Since viewports will always be exactly sized, we don't need to iterate the arrays and - // null values out. Copiers will + // Since viewports will always be exactly sized, we don't need to iterate the arrays and null values out. + // Copiers will // always overwrite. return false; } @@ -288,29 +269,25 @@ private boolean ensureSpace(int requestedViewportSize, TableDetails rootData) { data.clear(); rootData.getTable().getDefinition().getColumnList() - .forEach(col -> data.add(makeData(col, requestedViewportSize))); + .forEach(col -> data.add(makeData(col, requestedViewportSize))); tableKeyColumn = new Object[requestedViewportSize]; childPresenceColumn = new BitSet(requestedViewportSize); if (includeConstituents) { - // TODO: With a little bit of creativity we can re-use the existing column array if the - // original and rolldup - // TODO: column types are the same. I will do this in a second pass, after this one - // works. + // TODO: With a little bit of creativity we can re-use the existing column array if the original and rolldup + // TODO: column types are the same. I will do this in a second pass, after this one works. final Table originalTable = baseTable.getSourceTable(); constituentData.clear(); originalTable.getDefinition().getColumnList() - .forEach(col -> constituentData.put(col.getName(), - makeData(col, requestedViewportSize))); + .forEach(col -> constituentData.put(col.getName(), makeData(col, requestedViewportSize))); } return true; } /** - * Create a Pair containing the name and appropriately typed and sized array store for a - * particular column. + * Create a Pair containing the name and appropriately typed and sized array store for a particular column. * * @param col the column to allocate space for. * @param requestedViewportSize the requested viewport size. @@ -350,7 +327,7 @@ private Pair makeData(ColumnDefinition col, int requestedViewpor */ private void makeCopiers() { final ColumnSource[] columnSources = - baseTable.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + baseTable.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); columnCopiers = new Copier[columnSources.length]; for (int ii = 0; ii < columnSources.length; ii++) { @@ -363,9 +340,9 @@ private void makeCopiers() { final List matchPairs = ((RollupInfo) info).getMatchPairs(); matchPairs.stream() - .filter(p -> originalTable.hasColumns(p.rightColumn)) - .forEach(col -> constituentCopiers.computeIfAbsent(col.rightColumn, - (name) -> makeCopier(originalTable.getColumn(name).getType()))); + .filter(p -> originalTable.hasColumns(p.rightColumn)) + .forEach(col -> constituentCopiers.computeIfAbsent(col.rightColumn, + (name) -> makeCopier(originalTable.getColumn(name).getType()))); } } @@ -377,13 +354,12 @@ private void makeCopiers() { */ private Copier makeChildCopier(Class type) { if (type == DBDateTime.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { Assert.neqNull(tableMap, "Child table map"); while (it.hasNext()) { final long next = it.nextLong(); - final DBDateTime keyVal = (DBDateTime) (usePrev ? columnSource.getPrev(next) - : columnSource.get(next)); + final DBDateTime keyVal = + (DBDateTime) (usePrev ? columnSource.getPrev(next) : columnSource.get(next)); final Table child = tableMap.get(keyVal); ((long[]) target)[offset] = keyVal.getNanos(); childPresenceColumn.set(offset++, child != null && child.size() > 0); @@ -392,91 +368,77 @@ private Copier makeChildCopier(Class type) { } else if (type == boolean.class || type == Boolean.class) { throw new UnsupportedOperationException("Booleans can't be used as child IDs"); } else if (type == long.class || type == Long.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { Assert.neqNull(tableMap, "Child table map"); while (it.hasNext()) { final long next = it.nextLong(); - final long keyVal = - usePrev ? columnSource.getPrevLong(next) : columnSource.getLong(next); + final long keyVal = usePrev ? columnSource.getPrevLong(next) : columnSource.getLong(next); final Table child = tableMap.get(keyVal); ((long[]) target)[offset] = keyVal; childPresenceColumn.set(offset++, child != null && child.size() > 0); } }; } else if (type == int.class || type == Integer.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { Assert.neqNull(tableMap, "Child table map"); while (it.hasNext()) { final long next = it.nextLong(); - final int keyVal = - usePrev ? columnSource.getPrevInt(next) : columnSource.getInt(next); + final int keyVal = usePrev ? columnSource.getPrevInt(next) : columnSource.getInt(next); final Table child = tableMap.get(keyVal); ((int[]) target)[offset] = keyVal; childPresenceColumn.set(offset++, child != null && child.size() > 0); } }; } else if (type == short.class || type == Short.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { Assert.neqNull(tableMap, "Child table map"); while (it.hasNext()) { final long next = it.nextLong(); - final short keyVal = - usePrev ? columnSource.getPrevShort(next) : columnSource.getShort(next); + final short keyVal = usePrev ? columnSource.getPrevShort(next) : columnSource.getShort(next); final Table child = tableMap.get(keyVal); ((short[]) target)[offset] = keyVal; childPresenceColumn.set(offset++, child != null && child.size() > 0); } }; } else if (type == byte.class || type == Byte.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { Assert.neqNull(tableMap, "Child table map"); while (it.hasNext()) { final long next = it.nextLong(); - final byte keyVal = - usePrev ? columnSource.getPrevByte(next) : columnSource.getByte(next); + final byte keyVal = usePrev ? columnSource.getPrevByte(next) : columnSource.getByte(next); final Table child = tableMap.get(keyVal); ((byte[]) target)[offset] = keyVal; childPresenceColumn.set(offset++, child != null && child.size() > 0); } }; } else if (type == char.class || type == Character.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { Assert.neqNull(tableMap, "Child table map"); while (it.hasNext()) { final long next = it.nextLong(); - final char keyVal = - usePrev ? columnSource.getPrevChar(next) : columnSource.getChar(next); + final char keyVal = usePrev ? columnSource.getPrevChar(next) : columnSource.getChar(next); final Table child = tableMap.get(keyVal); ((char[]) target)[offset] = keyVal; childPresenceColumn.set(offset++, child != null && child.size() > 0); } }; } else if (type == double.class || type == Double.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { Assert.neqNull(tableMap, "Child table map"); while (it.hasNext()) { final long next = it.nextLong(); - final double keyVal = - usePrev ? columnSource.getPrevDouble(next) : columnSource.getDouble(next); + final double keyVal = usePrev ? columnSource.getPrevDouble(next) : columnSource.getDouble(next); final Table child = tableMap.get(keyVal); ((double[]) target)[offset] = keyVal; childPresenceColumn.set(offset++, child != null && child.size() > 0); } }; } else if (type == float.class || type == Float.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { Assert.neqNull(tableMap, "Child table map"); while (it.hasNext()) { final long next = it.nextLong(); - final float keyVal = - usePrev ? columnSource.getPrevFloat(next) : columnSource.getFloat(next); + final float keyVal = usePrev ? columnSource.getPrevFloat(next) : columnSource.getFloat(next); final Table child = tableMap.get(keyVal); ((float[]) target)[offset] = keyVal; childPresenceColumn.set(offset++, child != null && child.size() > 0); @@ -484,8 +446,7 @@ private Copier makeChildCopier(Class type) { }; } - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { Assert.neqNull(tableMap, "Child table map"); while (it.hasNext()) { final long next = it.nextLong(); @@ -502,96 +463,80 @@ private Copier makeChildCopier(Class type) { */ private Copier makeCopier(Class type) { if (type == DBDateTime.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); - final DBDateTime dbDateTime = (DBDateTime) (usePrev ? columnSource.getPrev(next) - : columnSource.get(next)); - ((long[]) target)[offset++] = - dbDateTime == null ? QueryConstants.NULL_LONG : dbDateTime.getNanos(); + final DBDateTime dbDateTime = + (DBDateTime) (usePrev ? columnSource.getPrev(next) : columnSource.get(next)); + ((long[]) target)[offset++] = dbDateTime == null ? QueryConstants.NULL_LONG : dbDateTime.getNanos(); } }; } else if (type == boolean.class || type == Boolean.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); - ((byte[]) target)[offset++] = BooleanUtils.booleanAsByte( - (Boolean) (usePrev ? columnSource.getPrev(next) : columnSource.get(next))); + ((byte[]) target)[offset++] = BooleanUtils + .booleanAsByte((Boolean) (usePrev ? columnSource.getPrev(next) : columnSource.get(next))); } }; } else if (type == long.class || type == Long.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); - ((long[]) target)[offset++] = - usePrev ? columnSource.getPrevLong(next) : columnSource.getLong(next); + ((long[]) target)[offset++] = usePrev ? columnSource.getPrevLong(next) : columnSource.getLong(next); } }; } else if (type == int.class || type == Integer.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); - ((int[]) target)[offset++] = - usePrev ? columnSource.getPrevInt(next) : columnSource.getInt(next); + ((int[]) target)[offset++] = usePrev ? columnSource.getPrevInt(next) : columnSource.getInt(next); } }; } else if (type == short.class || type == Short.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); ((short[]) target)[offset++] = - usePrev ? columnSource.getPrevShort(next) : columnSource.getShort(next); + usePrev ? columnSource.getPrevShort(next) : columnSource.getShort(next); } }; } else if (type == byte.class || type == Byte.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); - ((byte[]) target)[offset++] = - usePrev ? columnSource.getPrevByte(next) : columnSource.getByte(next); + ((byte[]) target)[offset++] = usePrev ? columnSource.getPrevByte(next) : columnSource.getByte(next); } }; } else if (type == char.class || type == Character.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); - ((char[]) target)[offset++] = - usePrev ? columnSource.getPrevChar(next) : columnSource.getChar(next); + ((char[]) target)[offset++] = usePrev ? columnSource.getPrevChar(next) : columnSource.getChar(next); } }; } else if (type == double.class || type == Double.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); ((double[]) target)[offset++] = - usePrev ? columnSource.getPrevDouble(next) : columnSource.getDouble(next); + usePrev ? columnSource.getPrevDouble(next) : columnSource.getDouble(next); } }; } else if (type == float.class || type == Float.class) { - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); ((float[]) target)[offset++] = - usePrev ? columnSource.getPrevFloat(next) : columnSource.getFloat(next); + usePrev ? columnSource.getPrevFloat(next) : columnSource.getFloat(next); } }; } - return (usePrev, columnSource, it, target, offset, table, tableMap, - childPresenceColumn) -> { + return (usePrev, columnSource, it, target, offset, table, tableMap, childPresenceColumn) -> { while (it.hasNext()) { final long next = it.nextLong(); - ((Object[]) target)[offset++] = - usePrev ? columnSource.getPrev(next) : columnSource.get(next); + ((Object[]) target)[offset++] = usePrev ? columnSource.getPrev(next) : columnSource.get(next); } }; } @@ -601,8 +546,8 @@ Object[] getDataMatrix() { } /** - * Get any required alternate data columns. This is happens when a rollup is showing constituent - * rows and those rows are part of the viewport. + * Get any required alternate data columns. This is happens when a rollup is showing constituent rows and those rows + * are part of the viewport. * */ Pair[] getRequiredConstituents() { @@ -612,7 +557,7 @@ Pair[] getRequiredConstituents() { // noinspection unchecked return includedConstituentColumns.stream() - .map(constituentData::get) - .toArray(Pair[]::new); + .map(constituentData::get) + .toArray(Pair[]::new); } } diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/TableDetails.java b/ClientSupport/src/main/java/io/deephaven/treetable/TableDetails.java index 026b0afa492..af9eae3bf93 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/TableDetails.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/TableDetails.java @@ -12,9 +12,9 @@ *

    * *

    - * When this structure is created, if the table id is included then the query will fetch the table - * from it's parent by the key, applying any filters and sorts required. Before data is returned to - * the client, the set of children is updated to reflect any changes in the table. + * When this structure is created, if the table id is included then the query will fetch the table from it's parent by + * the key, applying any filters and sorts required. Before data is returned to the client, the set of children is + * updated to reflect any changes in the table. *

    */ public class TableDetails implements Serializable { @@ -50,8 +50,7 @@ void setTable(Table table) { @Override public String toString() { - return "{key=" + key + ", children=" + children + ", table=" + table - + (removed ? ", REMOVED" : "") + "}"; + return "{key=" + key + ", children=" + children + ", table=" + table + (removed ? ", REMOVED" : "") + "}"; } public TableDetails copy() { diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/TreeSnapshotQuery.java b/ClientSupport/src/main/java/io/deephaven/treetable/TreeSnapshotQuery.java index db4ae1d2ac3..c9d0abd34ea 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/TreeSnapshotQuery.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/TreeSnapshotQuery.java @@ -16,11 +16,11 @@ import java.util.*; /** - * A query that fetches a flat viewport-ready snapshot of a tree table, taking into account the set - * of expanded rows at each level. + * A query that fetches a flat viewport-ready snapshot of a tree table, taking into account the set of expanded rows at + * each level. */ public class TreeSnapshotQuery> - implements Function.Unary { + implements Function.Unary { private final CLIENT_TYPE client; @@ -40,13 +40,11 @@ public enum Operation { } /** - * Construct a new query that will create a flat snapshot of the tree table using a flat - * viewport beginning at the specified rows and columns, applying the specified sorts and - * filters if required to fetch tables + * Construct a new query that will create a flat snapshot of the tree table using a flat viewport beginning at the + * specified rows and columns, applying the specified sorts and filters if required to fetch tables * * @param baseId The id of the base table to be used as a key to manage this client's state. - * @param tablesByKey The tables within the tree for which viewports are being tracked, - * separated by table key. + * @param tablesByKey The tables within the tree for which viewports are being tracked, separated by table key. * @param firstRow The first row of the flat viewport * @param lastRow The last row of the flat viewport * @param columns The columns to include in the viewport @@ -56,13 +54,12 @@ public enum Operation { * @param includedOps The set of operations the client has performed since the last TSQ. */ public TreeSnapshotQuery(int baseId, Map tablesByKey, - long firstRow, long lastRow, BitSet columns, - @NotNull SelectFilter[] filters, @NotNull List sorts, - CLIENT_TYPE client, EnumSet includedOps) { + long firstRow, long lastRow, BitSet columns, + @NotNull SelectFilter[] filters, @NotNull List sorts, + CLIENT_TYPE client, EnumSet includedOps) { this.client = client; Assert.leq(firstRow, "firstRow", lastRow, "lastRow"); - Assert.leq(lastRow - firstRow, "lastRow - firstRow", Integer.MAX_VALUE, - "Integer.MAX_VALUE"); + Assert.leq(lastRow - firstRow, "lastRow - firstRow", Integer.MAX_VALUE, "Integer.MAX_VALUE"); this.tablesByKey = tablesByKey; firstViewportRow = firstRow; @@ -84,12 +81,10 @@ public TreeSnapshotResult call(Table arg) { final HierarchicalTableInfo sourceInfoAttr = ((HierarchicalTable) arg).getInfo(); if (sourceInfoAttr instanceof TreeTableInfo) { return new TreeTableSnapshotImpl<>(baseTableId, (HierarchicalTable) arg, tablesByKey, - firstViewportRow, lastViewportRow, columns, filters, directives, client, - includedOps).getSnapshot(); + firstViewportRow, lastViewportRow, columns, filters, directives, client, includedOps).getSnapshot(); } else if (sourceInfoAttr instanceof RollupInfo) { return new RollupSnapshotImpl<>(baseTableId, (HierarchicalTable) arg, tablesByKey, - firstViewportRow, lastViewportRow, columns, filters, directives, client, - includedOps).getSnapshot(); + firstViewportRow, lastViewportRow, columns, filters, directives, client, includedOps).getSnapshot(); } throw new IllegalStateException("Could not determine tree table type"); @@ -98,13 +93,12 @@ public TreeSnapshotResult call(Table arg) { @Override public String toString() { return "TreeSnapshotQuery{" + - "firstViewportRow=" + firstViewportRow + - ", lastViewportRow=" + lastViewportRow + - ", columns=" + (columns == null ? "(null)" : FormatBitSet.formatBitSetAsString(columns)) - + - ", filters=" + Arrays.toString(filters) + - ", directives=" + directives + - ", tablesByKey.size()=" + tablesByKey.size() + - '}'; + "firstViewportRow=" + firstViewportRow + + ", lastViewportRow=" + lastViewportRow + + ", columns=" + (columns == null ? "(null)" : FormatBitSet.formatBitSetAsString(columns)) + + ", filters=" + Arrays.toString(filters) + + ", directives=" + directives + + ", tablesByKey.size()=" + tablesByKey.size() + + '}'; } } diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/TreeSnapshotResult.java b/ClientSupport/src/main/java/io/deephaven/treetable/TreeSnapshotResult.java index 9bc303c1a59..1f19e13df97 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/TreeSnapshotResult.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/TreeSnapshotResult.java @@ -60,22 +60,21 @@ public class TreeSnapshotResult { * @param updatedSource An updated source table. Should be set to null if it hasn't changed. * @param treeSize The total size of tree taking into account currently expanded rows. * @param data An array of arrays containing each data column for the snapshot. - * @param tableData The list of {@link TableDetails} describing the state of the tree as of this - * TSQ. + * @param tableData The list of {@link TableDetails} describing the state of the tree as of this TSQ. * @param tableKeyColumn A column containing the table key of the parent table of each row. * @param childPresenceColumn A bitset where each bit represents if that row has children. * @param start The actual start of the snapshot in viewport coordinates. * @param end The actual end of the snapshot in viewport coordinates. */ TreeSnapshotResult(@Nullable Table updatedSource, - long treeSize, - Object[] data, - TableDetails[] tableData, - Object[] tableKeyColumn, - BitSet childPresenceColumn, - long start, - long end, - Pair[] constituentData) { + long treeSize, + Object[] data, + TableDetails[] tableData, + Object[] tableKeyColumn, + BitSet childPresenceColumn, + long start, + long end, + Pair[] constituentData) { this.tableData = tableData; this.data = data; this.treeSize = treeSize; @@ -136,22 +135,18 @@ Table asTable(Table originalTree) { for (int i = 0; i < data.length; i++) { if (data[i] != null) { - final ColumnDefinition colDef = - originalTree.getDefinition().getColumn(columnNames.get(i)); + final ColumnDefinition colDef = originalTree.getDefinition().getColumn(columnNames.get(i)); // noinspection unchecked - sources.put(columnNames.get(i), - ArrayBackedColumnSource.getImmutableMemoryColumnSource(data[i], + sources.put(columnNames.get(i), ArrayBackedColumnSource.getImmutableMemoryColumnSource(data[i], colDef.getDataType(), colDef.getComponentType())); } } sources.put(TreeTableConstants.TABLE_KEY_COLUMN, - ArrayBackedColumnSource.getImmutableMemoryColumnSource(tableKeyColumn)); - sources.put(TreeTableConstants.CHILD_PRESENCE_COLUMN, - new BitSetColumnSource(childPresenceColumn)); + ArrayBackedColumnSource.getImmutableMemoryColumnSource(tableKeyColumn)); + sources.put(TreeTableConstants.CHILD_PRESENCE_COLUMN, new BitSetColumnSource(childPresenceColumn)); - return new QueryTable(Index.FACTORY.getFlatIndex((snapshotEnd - snapshotStart) + 1), - sources); + return new QueryTable(Index.FACTORY.getFlatIndex((snapshotEnd - snapshotStart) + 1), sources); } public Table getUpdatedSource() { @@ -172,9 +167,8 @@ public static class Descriptor { private final Pair[] constituentData; Descriptor(long treeSize, Object[] data, TableDetails[] tableData, long start, long end, - Object[] tableKeyColumn, - BitSet childPresenceColumn, Table updatedSource, - Pair[] constituentData) { + Object[] tableKeyColumn, + BitSet childPresenceColumn, Table updatedSource, Pair[] constituentData) { this.treeSize = treeSize; this.data = data; this.tableData = tableData; diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableClientTableManager.java b/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableClientTableManager.java index 153a0263821..67f8e7d9020 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableClientTableManager.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableClientTableManager.java @@ -18,16 +18,14 @@ /** * This class manages instances of client -> set *
    - * because hierarchical tables will apply sorting/filtering to each individual table. Instead of - * serializing and holding these references on the client, we will retain and manage their lifecycle - * here. + * because hierarchical tables will apply sorting/filtering to each individual table. Instead of serializing and holding + * these references on the client, we will retain and manage their lifecycle here. */ @SuppressWarnings("rawtypes") public enum TreeTableClientTableManager { DEFAULT; - // TODO (deephaven/deephaven-core/issues/37): Refine this type into something useful, or - // refactor entirely. + // TODO (deephaven/deephaven-core/issues/37): Refine this type into something useful, or refactor entirely. public interface Client> { void addDisconnectHandler(@NotNull Consumer handler); @@ -50,8 +48,7 @@ public int getIntKey(TreeState viewportState) { } private final Consumer DISCONNECT_HANDLER = this::release; - private final KeyedObjectHash statesByClient = - new KeyedObjectHash<>(new ClientStateKey()); + private final KeyedObjectHash statesByClient = new KeyedObjectHash<>(new ClientStateKey()); /** * The complete state of all trees for a particular client. @@ -60,8 +57,7 @@ public static class ClientState { final Client client; /** Map the original root table id, to the list of all child tables to retain */ - final KeyedIntObjectHash retentionMap = - new KeyedIntObjectHash<>(new ViewportStateKey()); + final KeyedIntObjectHash retentionMap = new KeyedIntObjectHash<>(new ViewportStateKey()); ClientState(Client clientId) { this.client = clientId; @@ -79,20 +75,20 @@ synchronized void releaseAll() { TreeState getTreeState(int baseTableId, Supplier userStateFactory) { final KeyedIntObjectHash.ValueFactory integerTreeStateValueFactory = - new KeyedIntObjectHash.ValueFactory.Strict() { - @Override - public TreeState newValue(int key) { - return new TreeState(key, userStateFactory.get()); - } - }; + new KeyedIntObjectHash.ValueFactory.Strict() { + @Override + public TreeState newValue(int key) { + return new TreeState(key, userStateFactory.get()); + } + }; return retentionMap.putIfAbsent(baseTableId, integerTreeStateValueFactory); } } /** - * The state of a single hierarchical table. This includes a unique {@link TableState} for each - * expanded row of the table. + * The state of a single hierarchical table. This includes a unique {@link TableState} for each expanded row of the + * table. */ public static class TreeState { final Map expandedTables = new HashMap<>(); @@ -115,7 +111,7 @@ synchronized void retain(Object key, Table table) { synchronized void releaseIf(Predicate test) { for (final Iterator> entryIterator = - expandedTables.entrySet().iterator(); entryIterator.hasNext();) { + expandedTables.entrySet().iterator(); entryIterator.hasNext();) { final Map.Entry entry = entryIterator.next(); if (test.test(entry.getKey())) { entry.getValue().release(); @@ -159,8 +155,8 @@ Table getTable() { * @return the {@link ClientState client state} for the specified client */ public ClientState get(Client client) { - // Note that putIfAbsent(K, Factory) is distinctly different in behavior from - // putIfAbsent(K,V). It will return + // Note that putIfAbsent(K, Factory) is distinctly different in behavior from putIfAbsent(K,V). It will + // return // the new value, or the existing value, it will not return null like putIfAbsent(K,V). return statesByClient.putIfAbsent(client, clt -> { // noinspection unchecked diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableConstants.java b/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableConstants.java index c14bbb50d55..a2b5f2e83e1 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableConstants.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableConstants.java @@ -8,8 +8,7 @@ public class TreeTableConstants { public static final String RE_TREE_KEY = "__RE_TREED__"; - // This is the key into the sources map of the synthetic column that identifies which tables own - // what rows. + // This is the key into the sources map of the synthetic column that identifies which tables own what rows. public static final String TABLE_KEY_COLUMN = "__TABLE_KEY__"; public static final String CHILD_PRESENCE_COLUMN = "__CHILD_PRESENCE__"; diff --git a/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableSnapshotImpl.java b/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableSnapshotImpl.java index fdb45c66adf..b30314316f6 100644 --- a/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableSnapshotImpl.java +++ b/ClientSupport/src/main/java/io/deephaven/treetable/TreeTableSnapshotImpl.java @@ -18,9 +18,9 @@ import static io.deephaven.treetable.TreeTableConstants.ROOT_TABLE_KEY; class TreeTableSnapshotImpl> - extends AbstractTreeSnapshotImpl { - private static final boolean NODE_SORT_MODE = Configuration.getInstance() - .getBooleanWithDefault("TreeTableSnapshotImpl.sortAtNodes", true); + extends AbstractTreeSnapshotImpl { + private static final boolean NODE_SORT_MODE = + Configuration.getInstance().getBooleanWithDefault("TreeTableSnapshotImpl.sortAtNodes", true); private ReverseLookup masterRll; private TableMap masterTableMap; @@ -28,14 +28,12 @@ class TreeTableSnapshotImpl tablesByKey, - long firstRow, - long lastRow, - BitSet columns, - @NotNull SelectFilter[] filters, - @NotNull List sorts, - CLIENT_TYPE client, - Set includedOps) { - super(baseTableId, baseTable, tablesByKey, firstRow, lastRow, columns, filters, sorts, - client, includedOps); + HierarchicalTable baseTable, + Map tablesByKey, + long firstRow, + long lastRow, + BitSet columns, + @NotNull SelectFilter[] filters, + @NotNull List sorts, + CLIENT_TYPE client, + Set includedOps) { + super(baseTableId, baseTable, tablesByKey, firstRow, lastRow, columns, filters, sorts, client, includedOps); } @Override @@ -85,7 +82,7 @@ Table prepareRootTable() { if (reTreeRequired) { final HierarchicalTable reTreed = - (HierarchicalTable) TreeTableFilter.toTreeTable(prepared, baseTable); + (HierarchicalTable) TreeTableFilter.toTreeTable(prepared, baseTable); // We need to retain this reference or we will leak it. retainTable(RE_TREE_KEY, reTreed); @@ -107,8 +104,7 @@ Table prepareRootTable() { } masterRll = (ReverseLookup) treeForDisplay.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE); - masterTableMap = - (TableMap) treeForDisplay.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + masterTableMap = (TableMap) treeForDisplay.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); sourceTable = treeForDisplay.getSourceTable(); return prepared; @@ -141,8 +137,7 @@ Table prepareTableInternal(Table t) { @Override ReverseLookup getReverseLookup(Table t) { - final ReverseLookup tableRll = - (ReverseLookup) t.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE); + final ReverseLookup tableRll = (ReverseLookup) t.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE); return tableRll == null ? masterRll : tableRll; } @@ -157,10 +152,9 @@ boolean isKeyValid(boolean usePrev, Table t, long key) { } @Override - boolean verifyChild(TableDetails parentDetail, TableDetails childDetail, long childKeyPos, - boolean usePrev) { + boolean verifyChild(TableDetails parentDetail, TableDetails childDetail, long childKeyPos, boolean usePrev) { final Index parentIndex = parentDetail.getTable().getIndex(); return usePrev ? parentIndex.getPrevIndex().find(childKeyPos) >= 0 - : parentIndex.find(childKeyPos) >= 0; + : parentIndex.find(childKeyPos) >= 0; } } diff --git a/ClientSupport/src/test/java/io/deephaven/treetable/SnapshotStateTest.java b/ClientSupport/src/test/java/io/deephaven/treetable/SnapshotStateTest.java index 8a64d0234e1..1ccb8a5fd78 100644 --- a/ClientSupport/src/test/java/io/deephaven/treetable/SnapshotStateTest.java +++ b/ClientSupport/src/test/java/io/deephaven/treetable/SnapshotStateTest.java @@ -22,15 +22,12 @@ public class SnapshotStateTest extends QueryTableTestBase { private static Table getRawNyMunis() throws IOException { QueryLibrary.importStatic(TreeSnapshotQueryTest.StaticHolder.class); - final BaseTable base = (BaseTable) TableTools - .readCsv(TreeSnapshotQueryTest.class.getResourceAsStream("nymunis.csv")); + final BaseTable base = + (BaseTable) TableTools.readCsv(TreeSnapshotQueryTest.class.getResourceAsStream("nymunis.csv")); base.setRefreshing(true); - return base - .update( - "Path=(List)removeEmpty(County_Name, City_Name, Town_Name, Village_Name)") - .update( - "Direct = Path.size() == 1 ? null : new ArrayList(Path.subList(0, Path.size() - 1))") - .lastBy("Path"); + return base.update("Path=(List)removeEmpty(County_Name, City_Name, Town_Name, Village_Name)") + .update("Direct = Path.size() == 1 ? null : new ArrayList(Path.subList(0, Path.size() - 1))") + .lastBy("Path"); } private static Table makeNyMunisTreeTableFrom(Table t) { @@ -137,8 +134,7 @@ public void testBounds() throws IOException { assertEquals(0, state.tableKeyColumn.length); } - private void addTable(Map details, String key, String parentKey, - int size) { + private void addTable(Map details, String key, String parentKey, int size) { final TableDetails d = new TableDetails(key, new HashSet<>()); d.setTable(TableTools.emptyTable(size)); details.put(key, d); diff --git a/ClientSupport/src/test/java/io/deephaven/treetable/TreeSnapshotQueryTest.java b/ClientSupport/src/test/java/io/deephaven/treetable/TreeSnapshotQueryTest.java index ddccb80e588..546dd186596 100644 --- a/ClientSupport/src/test/java/io/deephaven/treetable/TreeSnapshotQueryTest.java +++ b/ClientSupport/src/test/java/io/deephaven/treetable/TreeSnapshotQueryTest.java @@ -37,8 +37,8 @@ public class TreeSnapshotQueryTest extends QueryTableTestBase { private TreeTableClientTableManager.Client mockClient; /** - * Since we're not using a remote client we need to provide TSQ with a way to map Table Ids to - * tables so we'll just assign them and pass this thing in as the ExportedObjectClient + * Since we're not using a remote client we need to provide TSQ with a way to map Table Ids to tables so we'll just + * assign them and pass this thing in as the ExportedObjectClient */ private final TIntObjectMap
    tableIdMap = new TIntObjectHashMap<>(); private final TObjectIntHashMap
    reverseTableIdMap = new TObjectIntHashMap<>(); @@ -77,9 +77,9 @@ protected void setUp() throws Exception { private Map makeDetailsMap(Collection details) { return details.stream() - .map(TableDetails::copy) - .collect(Collectors.toMap(TableDetails::getKey, java.util.function.Function.identity(), - (u, v) -> u)); + .map(TableDetails::copy) + .collect(Collectors.toMap(TableDetails::getKey, java.util.function.Function.identity(), + (u, v) -> u)); } private class TTState { @@ -98,31 +98,27 @@ private class TTState { /** We need to do simple expansion tracking. */ Map expansionMap = new HashMap<>(); - EnumSet ops = - EnumSet.noneOf(TreeSnapshotQuery.Operation.class); + EnumSet ops = EnumSet.noneOf(TreeSnapshotQuery.Operation.class); Map constituentSources = new HashMap<>(); TTState(Table theTree) { this.theTree = (HierarchicalTable) theTree; - expansionMap.put(ROOT_TABLE_KEY, - new TableDetails(ROOT_TABLE_KEY, Collections.emptySet())); + expansionMap.put(ROOT_TABLE_KEY, new TableDetails(ROOT_TABLE_KEY, Collections.emptySet())); - final TableMap sourceMap = - (TableMap) theTree.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final TableMap sourceMap = (TableMap) theTree.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); if (sourceMap != null) { for (final Table t : sourceMap.values()) { addIdForTable(t); } } - final HierarchicalTableInfo info = (HierarchicalTableInfo) theTree - .getAttribute(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); + final HierarchicalTableInfo info = + (HierarchicalTableInfo) theTree.getAttribute(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); rollup = info instanceof RollupInfo; this.hierarchicalColumn = info.getHierarchicalColumnName(); } - void applyTsq(BitSet columns, long start, long end, SelectFilter[] filters, - List sorts) { + void applyTsq(BitSet columns, long start, long end, SelectFilter[] filters, List sorts) { if (filters.length > 0) { ops.add(TreeSnapshotQuery.Operation.FilterChanged); } @@ -131,9 +127,8 @@ void applyTsq(BitSet columns, long start, long end, SelectFilter[] filters, ops.add(TreeSnapshotQuery.Operation.SortChanged); } - final TreeSnapshotQuery tsq = - new TreeSnapshotQuery(getIdForTable(theTree), makeDetailsMap(expansionMap.values()), - start, end, columns, filters, sorts, mockClient, ops); + final TreeSnapshotQuery tsq = new TreeSnapshotQuery(getIdForTable(theTree), + makeDetailsMap(expansionMap.values()), start, end, columns, filters, sorts, mockClient, ops); result = theTree.apply(tsq); snapshot = result.asTable(theTree); @@ -141,21 +136,20 @@ void applyTsq(BitSet columns, long start, long end, SelectFilter[] filters, constituentSources.clear(); if (result.getConstituentData() != null && result.getConstituentData().length > 0) { Arrays.stream(result.getConstituentData()) - .forEach(p -> constituentSources.put(p.getFirst(), - makeConstituentColumnSource(p.getFirst(), p.getSecond()))); + .forEach(p -> constituentSources.put(p.getFirst(), + makeConstituentColumnSource(p.getFirst(), p.getSecond()))); } - expansionMap = result.getTableData().stream() - .collect(Collectors.toMap(TableDetails::getKey, Function.identity())); + expansionMap = + result.getTableData().stream().collect(Collectors.toMap(TableDetails::getKey, Function.identity())); ops.clear(); } private ColumnSource makeConstituentColumnSource(String name, Object array) { - final ColumnDefinition colDef = - theTree.getSourceTable().getDefinition().getColumn(name); + final ColumnDefinition colDef = theTree.getSourceTable().getDefinition().getColumn(name); // noinspection unchecked - return ArrayBackedColumnSource.getImmutableMemoryColumnSource(array, - colDef.getDataType(), colDef.getComponentType()); + return ArrayBackedColumnSource.getImmutableMemoryColumnSource(array, colDef.getDataType(), + colDef.getComponentType()); } void setCompareToTable(Table compareTo) { @@ -164,51 +158,45 @@ void setCompareToTable(Table compareTo) { void addExpanded(Object parentKey, Object childKey) { expansionMap.get(parentKey).getChildren().add(childKey); - expansionMap.computeIfAbsent(childKey, - k -> new TableDetails(k, Collections.emptySet())); + expansionMap.computeIfAbsent(childKey, k -> new TableDetails(k, Collections.emptySet())); ops.add(TreeSnapshotQuery.Operation.Expand); } void updateTableIds() { - expansionMap = - expansionMap.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, + expansionMap = expansionMap.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, td -> new TableDetails(td.getValue().getKey(), td.getValue().getChildren()))); } TableMap getTableMap(Table table) { return (TableMap) (rollup ? table : forComparisons != null ? forComparisons : theTree) - .getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + .getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); } } public static class StaticHolder { @ReflexiveUse(referrers = "QueryLibrary") public static List removeEmpty(String... components) { - return Arrays.stream(components).filter(s -> s != null && !s.isEmpty()) - .collect(Collectors.toList()); + return Arrays.stream(components).filter(s -> s != null && !s.isEmpty()).collect(Collectors.toList()); } } private static Table getRawNyMunis() throws IOException { QueryLibrary.importStatic(StaticHolder.class); - final BaseTable base = (BaseTable) TableTools - .readCsv(TreeSnapshotQueryTest.class.getResourceAsStream("nymunis.csv")); + final BaseTable base = + (BaseTable) TableTools.readCsv(TreeSnapshotQueryTest.class.getResourceAsStream("nymunis.csv")); base.setRefreshing(true); - return base - .update( - "Path=(List)removeEmpty(County_Name, City_Name, Town_Name, Village_Name)") - .update( - "Direct = Path.size() == 1 ? null : new ArrayList(Path.subList(0, Path.size() - 1))") - .update("boolCol=(boolean)(i%2==0)", - "byteCol = (byte)(i & 0xFF)", - "charCol=(char)(isNull(Town_Name) ? null : Town_Name.charAt(0))", - "doubleCol=(double)i/33.2", - "floatCol=(float)(i/22.1)", - "longCol=(long)i", - "shortCol=(short)i", - "timestamp='2018-12-10 NY'") - .lastBy("Path"); + return base.update("Path=(List)removeEmpty(County_Name, City_Name, Town_Name, Village_Name)") + .update("Direct = Path.size() == 1 ? null : new ArrayList(Path.subList(0, Path.size() - 1))") + .update("boolCol=(boolean)(i%2==0)", + "byteCol = (byte)(i & 0xFF)", + "charCol=(char)(isNull(Town_Name) ? null : Town_Name.charAt(0))", + "doubleCol=(double)i/33.2", + "floatCol=(float)(i/22.1)", + "longCol=(long)i", + "shortCol=(short)i", + "timestamp='2018-12-10 NY'") + .lastBy("Path"); } private static Table makeNyMunisTreeTable() throws IOException { @@ -379,8 +367,7 @@ public void testTsq() throws IOException { testViewport(state, 0, 32, allColumns, false); - // At this point we're done expanding stuff, lets set the table IDs to test those paths in - // TSQ + // At this point we're done expanding stuff, lets set the table IDs to test those paths in TSQ state.updateTableIds(); // Root, to first of subchild @@ -453,10 +440,8 @@ public void testTsq() throws IOException { assertFalse(state.expansionMap.containsKey(foodvilleKey)); assertFalse(state.expansionMap.containsKey(bacontownKey)); - // We'll delete a child key so that a child table becomes empty. TSQ should eliminate it - // from the set. - final DynamicTable source = - (DynamicTable) t.getAttribute(Table.HIERARCHICAL_SOURCE_TABLE_ATTRIBUTE); + // We'll delete a child key so that a child table becomes empty. TSQ should eliminate it from the set. + final DynamicTable source = (DynamicTable) t.getAttribute(Table.HIERARCHICAL_SOURCE_TABLE_ATTRIBUTE); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { TstUtils.removeRows(source, i(467)); source.notifyListeners(i(), i(467), i()); @@ -480,46 +465,46 @@ public void testSortandFilter() throws IOException { state.addExpanded(munisKey("Fulton"), munisKey("Fulton", "Mayfield")); final Table sortThenTree = getRawNyMunis() - .sort("Town_Name") - .sortDescending("Type") - .treeTable("Path", "Direct"); + .sort("Town_Name") + .sortDescending("Type") + .treeTable("Path", "Direct"); state.setCompareToTable(sortThenTree); List directives = Arrays.asList( - new SortDirective("Type", SortDirective.DESCENDING, false), - new SortDirective("Town_Name", SortDirective.ASCENDING, false)); + new SortDirective("Type", SortDirective.DESCENDING, false), + new SortDirective("Town_Name", SortDirective.ASCENDING, false)); testViewportAgainst(sortThenTree, state, 7, 51, allColumns, directives, - SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, true); + SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, true); final Table filterThenTree = TreeTableFilter.filterTree(makeNyMunisTreeTable(), - "!isNull(Website) && Website.contains(`ny`)"); + "!isNull(Website) && Website.contains(`ny`)"); state.setCompareToTable(filterThenTree); testViewportAgainst(filterThenTree, state, 0, 30, allColumns, Collections.emptyList(), - SelectFilterFactory.getExpressions("!isNull(Website) && Website.contains(`ny`)"), true); + SelectFilterFactory.getExpressions("!isNull(Website) && Website.contains(`ny`)"), true); - // Deliberately sorting first than filtering, it will produce the same result and is a - // different order than TSQ does, + // Deliberately sorting first than filtering, it will produce the same result and is a different order than TSQ + // does, // so validates things nicely. final Table filterAndSortThenTree = TreeTableFilter.filterTree(getRawNyMunis() - .sortDescending("County_Name", "Town_Name") - .treeTable("Path", "Direct"), - "!isNull(Website) && Website.contains(`ny`)"); + .sortDescending("County_Name", "Town_Name") + .treeTable("Path", "Direct"), + "!isNull(Website) && Website.contains(`ny`)"); state.setCompareToTable(filterAndSortThenTree); directives = Arrays.asList( - new SortDirective("County_Name", SortDirective.DESCENDING, false), - new SortDirective("Town_Name", SortDirective.DESCENDING, false)); + new SortDirective("County_Name", SortDirective.DESCENDING, false), + new SortDirective("Town_Name", SortDirective.DESCENDING, false)); testViewportAgainst(filterAndSortThenTree, state, 37, 63, allColumns, directives, - SelectFilterFactory.getExpressions("!isNull(Website) && Website.contains(`ny`)"), true); + SelectFilterFactory.getExpressions("!isNull(Website) && Website.contains(`ny`)"), true); state.setCompareToTable(null); } public void testRollupTsq() { final Table t = TableTools.emptyTable(100) - .update("I=i", "Test=i%12", "Dtest=44.6*i/2", "Bagel= i%2==0"); + .update("I=i", "Test=i%12", "Dtest=44.6*i/2", "Bagel= i%2==0"); final Table rollup = t.rollup(AggCombo(AggLast("Dtest"), AggSum("I")), "Bagel", "Test"); @@ -533,44 +518,37 @@ public void testRollupTsq() { state.addExpanded(nullSmartKey, true); testViewport(state, 0, 14, allColumns, true); - final Table filtered = - t.where("Bagel").rollup(AggCombo(AggLast("Dtest"), AggSum("I")), "Bagel", "Test"); + final Table filtered = t.where("Bagel").rollup(AggCombo(AggLast("Dtest"), AggSum("I")), "Bagel", "Test"); testViewportAgainst(filtered, state, 0, 7, allColumns, Collections.emptyList(), - SelectFilterFactory.getExpressions("Bagel"), true); + SelectFilterFactory.getExpressions("Bagel"), true); final List directives = Arrays.asList( - new SortDirective("Test", SortDirective.DESCENDING, false), - new SortDirective("Bagel", SortDirective.ASCENDING, false)); + new SortDirective("Test", SortDirective.DESCENDING, false), + new SortDirective("Bagel", SortDirective.ASCENDING, false)); state.addExpanded(nullSmartKey, false); - testViewportAgainst(rollup, state, 0, 14, allColumns, directives, - SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, - ct -> { - final Table sortTarget = - ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() - : ct; - return sortTarget.sort("Bagel").sortDescending("Test"); - }, true); - - final Table filtered2 = - t.where("Bagel").rollup(AggCombo(AggLast("Dtest"), AggSum("I")), "Bagel", "Test"); - - testViewportAgainst(filtered2, state, 0, 7, allColumns, directives, - SelectFilterFactory.getExpressions("Bagel"), - ct -> { - final Table sortTarget = - ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() - : ct; - return sortTarget.sort("Bagel").sortDescending("Test"); - }, true); + testViewportAgainst(rollup, state, 0, 14, allColumns, directives, SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, + ct -> { + final Table sortTarget = + ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() : ct; + return sortTarget.sort("Bagel").sortDescending("Test"); + }, true); + + final Table filtered2 = t.where("Bagel").rollup(AggCombo(AggLast("Dtest"), AggSum("I")), "Bagel", "Test"); + + testViewportAgainst(filtered2, state, 0, 7, allColumns, directives, SelectFilterFactory.getExpressions("Bagel"), + ct -> { + final Table sortTarget = + ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() : ct; + return sortTarget.sort("Bagel").sortDescending("Test"); + }, true); } public void testRollupConstituentsTsq() { final Table t = TableTools.emptyTable(100) - .update("I=i", "Test=i%12", "Dtest=44.6*i/2", "Bagel= i%2==0"); + .update("I=i", "Test=i%12", "Dtest=44.6*i/2", "Bagel= i%2==0"); - final Table rollup = - t.rollup(AggCombo(AggLast("Dtest"), AggSum("I")), true, "Bagel", "Test"); + final Table rollup = t.rollup(AggCombo(AggLast("Dtest"), AggSum("I")), true, "Bagel", "Test"); final TTState state = new TTState(rollup); final BitSet allColumns = new BitSet(rollup.getColumns().length); @@ -582,53 +560,44 @@ public void testRollupConstituentsTsq() { state.addExpanded(nullSmartKey, true); testViewport(state, 0, 14, allColumns, true); - final Table filtered = - t.where("Bagel").rollup(AggCombo(AggLast("Dtest"), AggSum("I")), true, "Bagel", "Test"); + final Table filtered = t.where("Bagel").rollup(AggCombo(AggLast("Dtest"), AggSum("I")), true, "Bagel", "Test"); testViewportAgainst(filtered, state, 0, 7, allColumns, Collections.emptyList(), - SelectFilterFactory.getExpressions("Bagel"), true); + SelectFilterFactory.getExpressions("Bagel"), true); final List directives = Arrays.asList( - new SortDirective("Test", SortDirective.DESCENDING, false), - new SortDirective("Bagel", SortDirective.ASCENDING, false)); + new SortDirective("Test", SortDirective.DESCENDING, false), + new SortDirective("Bagel", SortDirective.ASCENDING, false)); state.addExpanded(nullSmartKey, false); - testViewportAgainst(rollup, state, 0, 14, allColumns, directives, - SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, - ct -> { - final Table sortTarget = - ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() - : ct; - return sortTarget.sort("Bagel").sortDescending("Test"); - }, true); - - testViewportAgainst(filtered, state, 0, 7, allColumns, directives, - SelectFilterFactory.getExpressions("Bagel"), - ct -> { - final Table sortTarget = - ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() - : ct; - return sortTarget.sort("Bagel").sortDescending("Test"); - }, true); + testViewportAgainst(rollup, state, 0, 14, allColumns, directives, SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, + ct -> { + final Table sortTarget = + ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() : ct; + return sortTarget.sort("Bagel").sortDescending("Test"); + }, true); + + testViewportAgainst(filtered, state, 0, 7, allColumns, directives, SelectFilterFactory.getExpressions("Bagel"), + ct -> { + final Table sortTarget = + ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() : ct; + return sortTarget.sort("Bagel").sortDescending("Test"); + }, true); state.addExpanded(true, new SmartKey(true, 0)); - testViewportAgainst(rollup, state, 0, 17, allColumns, directives, - SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, - ct -> { - final Table sortTarget = - ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() - : ct; - return sortTarget.sort("Bagel").sortDescending("Test"); - }, true); + testViewportAgainst(rollup, state, 0, 17, allColumns, directives, SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, + ct -> { + final Table sortTarget = + ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() : ct; + return sortTarget.sort("Bagel").sortDescending("Test"); + }, true); state.addExpanded(true, new SmartKey(true, 6)); - testViewportAgainst(filtered, state, 0, 24, allColumns, directives, - SelectFilterFactory.getExpressions("Bagel"), - ct -> { - final Table sortTarget = - ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() - : ct; - return sortTarget.sort("Bagel").sortDescending("Test"); - }, true); + testViewportAgainst(filtered, state, 0, 24, allColumns, directives, SelectFilterFactory.getExpressions("Bagel"), + ct -> { + final Table sortTarget = + ct instanceof HierarchicalTable ? ((HierarchicalTable) ct).getRawRootTable() : ct; + return sortTarget.sort("Bagel").sortDescending("Test"); + }, true); } public void testGetPrev() throws Exception { @@ -654,13 +623,12 @@ public void testGetPrev() throws Exception { ((DynamicTable) raw).notifyListeners(i(), i(475), i()); // Fetch current data while it is concurrently updating - final Future currentFetch = - pool.submit(() -> testViewport(state, 0, halfTableSize, allColumns, true)); + final Future currentFetch = pool.submit(() -> testViewport(state, 0, halfTableSize, allColumns, true)); // Flush the changes final AtomicBoolean done = new AtomicBoolean(false); - final Runnable awaitFlushJob = LiveTableMonitor.DEFAULT - .flushAllNormalNotificationsForUnitTests(done::get, 60_000L); + final Runnable awaitFlushJob = + LiveTableMonitor.DEFAULT.flushAllNormalNotificationsForUnitTests(done::get, 60_000L); // Fetch current data currentFetch.get(); @@ -681,9 +649,9 @@ public void testGetPrev() throws Exception { public void testArbitraryReparent_IDS6404() { // Build a simple tree based upon a directory hierarchy final QueryTable dataTable = TstUtils.testRefreshingTable( - c("ID", "Root", "0", "1", "2", "0-0", "1-0", "2-0", "0-0-0", "1-0-0", "2-0-0"), - c("Parent", null, "Root", "Root", "Root", "0", "1", "2", "0-0", "1-0", "2-0"), - c("Name", "Root", "0", "1", "2", "0-0", "1-0", "2-0", "0-0-0", "1-0-0", "2-0-0")); + c("ID", "Root", "0", "1", "2", "0-0", "1-0", "2-0", "0-0-0", "1-0-0", "2-0-0"), + c("Parent", null, "Root", "Root", "Root", "0", "1", "2", "0-0", "1-0", "2-0"), + c("Name", "Root", "0", "1", "2", "0-0", "1-0", "2-0", "0-0-0", "1-0-0", "2-0-0")); final Table lastBy = dataTable.lastBy("ID"); final Table tree = lastBy.treeTable("ID", "Parent"); @@ -732,21 +700,18 @@ public void testArbitraryReparent_IDS6404() { // region Hierarchical verification - private static void testViewport(TTState state, long start, long end, BitSet columns, - boolean showAfter) { + private static void testViewport(TTState state, long start, long end, BitSet columns, boolean showAfter) { testViewportAgainst(state.theTree, state, start, end, columns, Collections.emptyList(), - SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, showAfter); + SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY, showAfter); } - private static void testViewportAgainst(Table against, TTState state, long start, long end, - BitSet columns, List sorts, SelectFilter[] filters, boolean showAfter) { - testViewportAgainst(against, state, start, end, columns, sorts, filters, - Function.identity(), showAfter); + private static void testViewportAgainst(Table against, TTState state, long start, long end, BitSet columns, + List sorts, SelectFilter[] filters, boolean showAfter) { + testViewportAgainst(against, state, start, end, columns, sorts, filters, Function.identity(), showAfter); } - private static void testViewportAgainst(Table against, TTState state, long start, long end, - BitSet columns, List sorts, SelectFilter[] filters, - Function childMutator, boolean showAfter) { + private static void testViewportAgainst(Table against, TTState state, long start, long end, BitSet columns, + List sorts, SelectFilter[] filters, Function childMutator, boolean showAfter) { state.applyTsq(columns, start, end, filters, sorts); if (showAfter) { @@ -763,52 +728,44 @@ private static class TraversalState { } private static void checkSnapshotAgainst(TTState state, Table against, long start, long end, - Function childMutator) { + Function childMutator) { assertEquals(state.result.getSnapshotStart(), start); assertEquals(state.result.getSnapshotEnd(), end); final TraversalState ts = new TraversalState(); - checkSnapshotAgainst(state, childMutator.apply(against), ROOT_TABLE_KEY, start, end, ts, - childMutator); + checkSnapshotAgainst(state, childMutator.apply(against), ROOT_TABLE_KEY, start, end, ts, childMutator); assertEquals(ts.traversed, state.result.getTreeSize()); assertEquals(ts.consumed, end - start + 1); } - private static void checkSnapshotAgainst(TTState state, Table currentTable, Object tableKey, - long start, long end, TraversalState ts, Function childMutator) { + private static void checkSnapshotAgainst(TTState state, Table currentTable, Object tableKey, long start, long end, + TraversalState ts, Function childMutator) { final boolean usePrev = LogicalClock.DEFAULT.currentState() == LogicalClock.State.Updating - && ((NotificationStepSource) state.theTree.getSourceTable()) - .getLastNotificationStep() != LogicalClock.DEFAULT.currentStep(); + && ((NotificationStepSource) state.theTree.getSourceTable()) + .getLastNotificationStep() != LogicalClock.DEFAULT.currentStep(); for (int rowNo = 0; rowNo < currentTable.size(); rowNo++) { - final long tableRow = usePrev ? currentTable.getIndex().getPrev(rowNo) - : currentTable.getIndex().get(rowNo); + final long tableRow = usePrev ? currentTable.getIndex().getPrev(rowNo) : currentTable.getIndex().get(rowNo); final ColumnSource childSource = currentTable.getColumnSource(state.hierarchicalColumn); - final Object childKey = - usePrev ? childSource.getPrev(tableRow) : childSource.get(tableRow); + final Object childKey = usePrev ? childSource.getPrev(tableRow) : childSource.get(tableRow); final TableMap childMap = state.getTableMap(currentTable); if (ts.traversed >= start && ts.traversed <= end) { - final Object[] record = state.snapshot.getRecord(ts.consumed, - currentTable.getDefinition().getColumnNamesArray()); + final Object[] record = + state.snapshot.getRecord(ts.consumed, currentTable.getDefinition().getColumnNamesArray()); final Pair[] constituentData = state.result.getConstituentData(); - if (constituentData != null - && currentTable.hasAttribute(Table.ROLLUP_LEAF_ATTRIBUTE)) { + if (constituentData != null && currentTable.hasAttribute(Table.ROLLUP_LEAF_ATTRIBUTE)) { for (int i = 0; i < constituentData.length; i++) { - final int colIndex = currentTable.getDefinition().getColumnNames() - .indexOf(constituentData[i].first); - record[colIndex] = - state.constituentSources.get(constituentData[i].first).get(ts.consumed); + final int colIndex = + currentTable.getDefinition().getColumnNames().indexOf(constituentData[i].first); + record[colIndex] = state.constituentSources.get(constituentData[i].first).get(ts.consumed); } } assertArrayEquals(getRecord(currentTable, tableRow, usePrev), record); - assertEquals( - childKey != null && childMap.get(childKey) != null - && !childMap.get(childKey).isEmpty(), - state.snapshot.getColumnSource(CHILD_PRESENCE_COLUMN).get(ts.consumed)); - assertEquals(tableKey, - state.snapshot.getColumnSource(TABLE_KEY_COLUMN).get(ts.consumed)); + assertEquals(childKey != null && childMap.get(childKey) != null && !childMap.get(childKey).isEmpty(), + state.snapshot.getColumnSource(CHILD_PRESENCE_COLUMN).get(ts.consumed)); + assertEquals(tableKey, state.snapshot.getColumnSource(TABLE_KEY_COLUMN).get(ts.consumed)); ts.consumed++; } @@ -817,8 +774,8 @@ private static void checkSnapshotAgainst(TTState state, Table currentTable, Obje if (childKey != null) { final TableDetails childDetails = state.expansionMap.get(childKey); if (childDetails != null) { - checkSnapshotAgainst(state, childMutator.apply(childMap.get(childKey)), - childKey, start, end, ts, childMutator); + checkSnapshotAgainst(state, childMutator.apply(childMap.get(childKey)), childKey, start, end, ts, + childMutator); } } } diff --git a/ClientSupport/src/test/java/io/deephaven/treetable/TreeTableClientTableManagerTest.java b/ClientSupport/src/test/java/io/deephaven/treetable/TreeTableClientTableManagerTest.java index 3dd5600fdd3..1d5e9b22774 100644 --- a/ClientSupport/src/test/java/io/deephaven/treetable/TreeTableClientTableManagerTest.java +++ b/ClientSupport/src/test/java/io/deephaven/treetable/TreeTableClientTableManagerTest.java @@ -37,8 +37,7 @@ static class DelayingReleaseProxy implements InvocationHandler { } @Override - public Object invoke(Object proxy, Method method, Object[] args) - throws InterruptedException { + public Object invoke(Object proxy, Method method, Object[] args) throws InterruptedException { if (method.equals(RELEASE_METHOD)) { // Sleep for a bit so we can generate CMEs Thread.sleep(250); @@ -54,8 +53,8 @@ public Object invoke(Object proxy, Method method, Object[] args) } private Table makeProxy() { - return (Table) Proxy.newProxyInstance(getClass().getClassLoader(), - new Class[] {Table.class}, new DelayingReleaseProxy()); + return (Table) Proxy.newProxyInstance(getClass().getClassLoader(), new Class[] {Table.class}, + new DelayingReleaseProxy()); } @Override @@ -78,19 +77,15 @@ protected void setUp() throws Exception { } /** - * This method tests for regression of the ConcurrentModificationException documented by - * IDS-5134 + * This method tests for regression of the ConcurrentModificationException documented by IDS-5134 */ public void testIds5134CME() throws ExecutionException, InterruptedException { - final TreeTableClientTableManager.ClientState stateObj = - TreeTableClientTableManager.DEFAULT.get(clients[0]); - final TreeTableClientTableManager.TreeState treeState00 = - stateObj.getTreeState(0, () -> mockSnapshotState); + final TreeTableClientTableManager.ClientState stateObj = TreeTableClientTableManager.DEFAULT.get(clients[0]); + final TreeTableClientTableManager.TreeState treeState00 = stateObj.getTreeState(0, () -> mockSnapshotState); assertSame(mockSnapshotState, treeState00.getUserState()); // Retain a few tables - final Table[] proxies = - IntStream.range(0, 10).mapToObj((i) -> makeProxy()).toArray(Table[]::new); + final Table[] proxies = IntStream.range(0, 10).mapToObj((i) -> makeProxy()).toArray(Table[]::new); for (int i = 0; i < 5; i++) { treeState00.retain(i, proxies[i]); } diff --git a/CompilerTools/src/main/java/io/deephaven/compilertools/CompilerTools.java b/CompilerTools/src/main/java/io/deephaven/compilertools/CompilerTools.java index ab51f4c243f..3799596efbc 100644 --- a/CompilerTools/src/main/java/io/deephaven/compilertools/CompilerTools.java +++ b/CompilerTools/src/main/java/io/deephaven/compilertools/CompilerTools.java @@ -48,21 +48,19 @@ public class CompilerTools { */ private static final int DEFAULT_MAX_STRING_LITERAL_LENGTH = 65500; - private static final String JAVA_CLASS_VERSION = - System.getProperty("java.class.version").replace('.', '_'); + private static final String JAVA_CLASS_VERSION = System.getProperty("java.class.version").replace('.', '_'); private static final int MAX_CLASS_COLLISIONS = 128; private static final String IDENTIFYING_FIELD_NAME = "_CLASS_BODY_"; private static final String CODEGEN_TIMEOUT_PROP = "CompilerTools.codegen.timeoutMs"; - private static final long CODEGEN_TIMEOUT_MS_DEFAULT = TimeUnit.SECONDS.toMillis(10); // 10 - // seconds + private static final long CODEGEN_TIMEOUT_MS_DEFAULT = TimeUnit.SECONDS.toMillis(10); // 10 seconds private static final String CODEGEN_LOOP_DELAY_PROP = "CompilerTools.codegen.retry.delay"; private static final long CODEGEN_LOOP_DELAY_MS_DEFAULT = 100; - private static final long codegenTimeoutMs = Configuration.getInstance() - .getLongWithDefault(CODEGEN_TIMEOUT_PROP, CODEGEN_TIMEOUT_MS_DEFAULT); - private static final long codegenLoopDelayMs = Configuration.getInstance() - .getLongWithDefault(CODEGEN_LOOP_DELAY_PROP, CODEGEN_LOOP_DELAY_MS_DEFAULT); + private static final long codegenTimeoutMs = + Configuration.getInstance().getLongWithDefault(CODEGEN_TIMEOUT_PROP, CODEGEN_TIMEOUT_MS_DEFAULT); + private static final long codegenLoopDelayMs = + Configuration.getInstance().getLongWithDefault(CODEGEN_LOOP_DELAY_PROP, CODEGEN_LOOP_DELAY_MS_DEFAULT); /** * Enables or disables compilation logging. @@ -78,21 +76,21 @@ public static boolean setLogEnabled(boolean logEnabled) { /* * NB: This is (obviously) not thread safe if code tries to write the same className to the same - * destinationDirectory from multiple threads. Seeing as we don't currently have this use case, - * leaving synchronization as an external concern. + * destinationDirectory from multiple threads. Seeing as we don't currently have this use case, leaving + * synchronization as an external concern. */ - public static void writeClass(final File destinationDirectory, final String className, - final byte[] data) throws IOException { + public static void writeClass(final File destinationDirectory, final String className, final byte[] data) + throws IOException { writeClass(destinationDirectory, className, data, null); } private static void ensureDirectories(final File file, final Supplier runtimeErrMsg) { // File.mkdirs() checks for existrance on entry, in which case it returns false. // It may also return false on a failure to create. - // Also note, two separate threads or JVMs may be running this code in parallel. It's - // possible that we could lose the race - // (and therefore mkdirs() would return false), but still get the directory we need (and - // therefore exists() would return true) + // Also note, two separate threads or JVMs may be running this code in parallel. It's possible that we could + // lose the race + // (and therefore mkdirs() would return false), but still get the directory we need (and therefore exists() + // would return true) if (!file.mkdirs() && !file.isDirectory()) { throw new RuntimeException(runtimeErrMsg.get()); } @@ -100,46 +98,44 @@ private static void ensureDirectories(final File file, final Supplier ru /* * NB: This is (obviously) not thread safe if code tries to write the same className to the same - * destinationDirectory from multiple threads. Seeing as we don't currently have this use case, - * leaving synchronization as an external concern. + * destinationDirectory from multiple threads. Seeing as we don't currently have this use case, leaving + * synchronization as an external concern. */ - public static void writeClass(final File destinationDirectory, final String className, - final byte[] data, final String message) throws IOException { + public static void writeClass(final File destinationDirectory, final String className, final byte[] data, + final String message) throws IOException { final File destinationFile = new File(destinationDirectory, - className.replace('.', File.separatorChar) + JavaFileObject.Kind.CLASS.extension); + className.replace('.', File.separatorChar) + JavaFileObject.Kind.CLASS.extension); if (destinationFile.exists()) { final byte[] existingBytes = Files.readAllBytes(destinationFile.toPath()); if (Arrays.equals(existingBytes, data)) { if (message == null) { log.info().append("Ignoring pushed class ").append(className) - .append(" because it already exists in this context!").endl(); + .append(" because it already exists in this context!").endl(); } else { log.info().append("Ignoring pushed class ").append(className).append(message) - .append(" because it already exists in this context!").endl(); + .append(" because it already exists in this context!").endl(); } return; } else { if (message == null) { log.info().append("Pushed class ").append(className) - .append(" already exists in this context, but has changed!").endl(); + .append(" already exists in this context, but has changed!").endl(); } else { log.info().append("Pushed class ").append(className).append(message) - .append(" already exists in this context, but has changed!").endl(); + .append(" already exists in this context, but has changed!").endl(); } if (!destinationFile.delete()) { - throw new IOException( - "Could not delete existing class file: " + destinationFile); + throw new IOException("Could not delete existing class file: " + destinationFile); } } } final File parentDir = destinationFile.getParentFile(); ensureDirectories(parentDir, - () -> "Unable to create missing destination directory " + parentDir.getAbsolutePath()); + () -> "Unable to create missing destination directory " + parentDir.getAbsolutePath()); if (!destinationFile.createNewFile()) { - throw new RuntimeException( - "Unable to create destination file " + destinationFile.getAbsolutePath()); + throw new RuntimeException("Unable to create destination file " + destinationFile.getAbsolutePath()); } final ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream(data.length); byteOutStream.write(data, 0, data.length); @@ -157,100 +153,94 @@ public static class Context { String[] dynamicPatterns = new String[] {DYNAMIC_GROOVY_CLASS_PREFIX, FORMULA_PREFIX}; private ClassLoader getClassLoaderForFormula(final Map> parameterClasses) { - // We should always be able to get our own class loader, even if this is invoked from - // external code + // We should always be able to get our own class loader, even if this is invoked from external code // that doesn't have security permissions to make ITS own class loader. - return doPrivileged( - (PrivilegedAction) () -> new URLClassLoader(ucl.getURLs(), ucl) { - // Once we find a class that is missing, we should not attempt to load it again, - // otherwise we can end up with a StackOverflow Exception - final HashSet missingClasses = new HashSet<>(); - - @Override - protected Class findClass(String name) throws ClassNotFoundException { - // If we have a parameter that uses this class, return it - final Class paramClass = parameterClasses.get(name); - if (paramClass != null) { - return paramClass; - } - - // Unless we are looking for a formula or Groovy class, we should use the - // default behavior - if (!isFormulaClass(name)) { - return super.findClass(name); - } - - // if it is a groovy class, always try to use the instance in the shell - if (name.startsWith(DYNAMIC_GROOVY_CLASS_PREFIX)) { - try { - return ucl.getParent().loadClass(name); - } catch (final ClassNotFoundException ignored) { - // we'll try to load it otherwise - } - } + return doPrivileged((PrivilegedAction) () -> new URLClassLoader(ucl.getURLs(), ucl) { + // Once we find a class that is missing, we should not attempt to load it again, + // otherwise we can end up with a StackOverflow Exception + final HashSet missingClasses = new HashSet<>(); + + @Override + protected Class findClass(String name) throws ClassNotFoundException { + // If we have a parameter that uses this class, return it + final Class paramClass = parameterClasses.get(name); + if (paramClass != null) { + return paramClass; + } - // We've already not found this class, so we should not try to search again - if (missingClasses.contains(name)) { - return super.findClass(name); - } + // Unless we are looking for a formula or Groovy class, we should use the default behavior + if (!isFormulaClass(name)) { + return super.findClass(name); + } - final byte[] bytes; + // if it is a groovy class, always try to use the instance in the shell + if (name.startsWith(DYNAMIC_GROOVY_CLASS_PREFIX)) { try { - bytes = loadClassData(name); - } catch (IOException ioe) { - missingClasses.add(name); - return super.loadClass(name); + return ucl.getParent().loadClass(name); + } catch (final ClassNotFoundException ignored) { + // we'll try to load it otherwise } - return defineClass(name, bytes, 0, bytes.length); } - @SuppressWarnings("BooleanMethodIsAlwaysInverted") - private boolean isFormulaClass(String name) { - return Arrays.stream(dynamicPatterns).anyMatch(name::startsWith); + // We've already not found this class, so we should not try to search again + if (missingClasses.contains(name)) { + return super.findClass(name); } - @Override - public Class loadClass(String name) throws ClassNotFoundException { - if (!isFormulaClass(name)) { - return super.loadClass(name); - } - return findClass(name); + final byte[] bytes; + try { + bytes = loadClassData(name); + } catch (IOException ioe) { + missingClasses.add(name); + return super.loadClass(name); } + return defineClass(name, bytes, 0, bytes.length); + } - private byte[] loadClassData(String name) throws IOException { - try { - // The compiler should always have access to the class-loader - // directories, - // even if code that invokes this does not. - return doPrivileged((PrivilegedExceptionAction) () -> { - final File destFile = - new File(classDestination, name.replace('.', File.separatorChar) - + JavaFileObject.Kind.CLASS.extension); - if (destFile.exists()) { - return Files.readAllBytes(destFile.toPath()); - } + @SuppressWarnings("BooleanMethodIsAlwaysInverted") + private boolean isFormulaClass(String name) { + return Arrays.stream(dynamicPatterns).anyMatch(name::startsWith); + } - for (File location : additionalClassLocations) { - final File checkFile = - new File(location, name.replace('.', File.separatorChar) - + JavaFileObject.Kind.CLASS.extension); - if (checkFile.exists()) { - return Files.readAllBytes(checkFile.toPath()); - } - } + @Override + public Class loadClass(String name) throws ClassNotFoundException { + if (!isFormulaClass(name)) { + return super.loadClass(name); + } + return findClass(name); + } - throw new FileNotFoundException(name); - }); - } catch (final PrivilegedActionException pae) { - final Exception inner = pae.getException(); - if (inner instanceof IOException) { - throw (IOException) inner; - } else { - throw new RuntimeException(inner); + private byte[] loadClassData(String name) throws IOException { + try { + // The compiler should always have access to the class-loader directories, + // even if code that invokes this does not. + return doPrivileged((PrivilegedExceptionAction) () -> { + final File destFile = new File(classDestination, + name.replace('.', File.separatorChar) + JavaFileObject.Kind.CLASS.extension); + if (destFile.exists()) { + return Files.readAllBytes(destFile.toPath()); + } + + for (File location : additionalClassLocations) { + final File checkFile = new File(location, + name.replace('.', File.separatorChar) + JavaFileObject.Kind.CLASS.extension); + if (checkFile.exists()) { + return Files.readAllBytes(checkFile.toPath()); + } } + + throw new FileNotFoundException(name); + }); + } catch (final PrivilegedActionException pae) { + final Exception inner = pae.getException(); + if (inner instanceof IOException) { + throw (IOException) inner; + } else { + throw new RuntimeException(inner); } } - }); + } + }); } private static class WritableURLClassLoader extends URLClassLoader { @@ -259,8 +249,7 @@ private WritableURLClassLoader(URL[] urls, ClassLoader parent) { } @Override - protected synchronized Class loadClass(String name, boolean resolve) - throws ClassNotFoundException { + protected synchronized Class loadClass(String name, boolean resolve) throws ClassNotFoundException { Class clazz = findLoadedClass(name); if (clazz != null) { return clazz; @@ -294,8 +283,7 @@ public Context(File classDestination) { public Context(File classDestination, ClassLoader parentClassLoader) { this.classDestination = classDestination; - ensureDirectories(this.classDestination, - () -> "Failed to create missing class destination directory " + + ensureDirectories(this.classDestination, () -> "Failed to create missing class destination directory " + classDestination.getAbsolutePath()); additionalClassLocations = new LinkedHashSet<>(); @@ -305,11 +293,9 @@ public Context(File classDestination, ClassLoader parentClassLoader) { } catch (MalformedURLException e) { throw new RuntimeException("", e); } - // We should be able to create this class loader, even if this is invoked from external - // code + // We should be able to create this class loader, even if this is invoked from external code // that does not have sufficient security permissions. - this.ucl = doPrivileged( - (PrivilegedAction) () -> new WritableURLClassLoader(urls, + this.ucl = doPrivileged((PrivilegedAction) () -> new WritableURLClassLoader(urls, parentClassLoader)); } @@ -328,14 +314,11 @@ protected void addClassSource(File classSourceDirectory) { } public File getFakeClassDestination() { - // Groovy classes need to be written out to a location where they can be found by the - // compiler + // Groovy classes need to be written out to a location where they can be found by the compiler // (so that filters and formulae can use them). // - // We don't want the regular runtime class loader to find them, because then they get - // "stuck" in there - // even if the class itself changes, and we can't forget it. So instead we use a - // single-use class loader + // We don't want the regular runtime class loader to find them, because then they get "stuck" in there + // even if the class itself changes, and we can't forget it. So instead we use a single-use class loader // for each formula, that will always read the class from disk. return null; } @@ -360,12 +343,11 @@ public WritableURLClassLoader getClassLoader() { } public void setParentClassLoader(final ClassLoader parentClassLoader) { - // The system should always be able to create this class loader, even if invoked from - // something that + // The system should always be able to create this class loader, even if invoked from something that // doesn't have the right security permissions for it. ucl = doPrivileged( - (PrivilegedAction) () -> new WritableURLClassLoader( - ucl.getURLs(), parentClassLoader)); + (PrivilegedAction) () -> new WritableURLClassLoader(ucl.getURLs(), + parentClassLoader)); } public void cleanup() { @@ -379,8 +361,7 @@ private static Context getDefaultContext() { if (defaultContext == null) { synchronized (CompilerTools.class) { if (defaultContext == null) { - defaultContext = - new Context(new File(Configuration.getInstance().getWorkspacePath() + + defaultContext = new Context(new File(Configuration.getInstance().getWorkspacePath() + File.separator + "cache" + File.separator + "classes")); } } @@ -398,17 +379,16 @@ private static Context getDefaultContext() { public static synchronized void setDefaultContext(final Context context) { if (defaultContext != null) { throw new IllegalStateException( - "It's too late to set default context; it's already set to: " + defaultContext); + "It's too late to set default context; it's already set to: " + defaultContext); } defaultContext = Objects.requireNonNull(context); } - private static final ThreadLocal currContext = - ThreadLocal.withInitial(CompilerTools::getDefaultContext); + private static final ThreadLocal currContext = ThreadLocal.withInitial(CompilerTools::getDefaultContext); public static void resetContext() { - setContext(new Context(new File(Configuration.getInstance().getWorkspacePath() - + File.separator + "cache" + File.separator + "classes"))); + setContext(new Context(new File(Configuration.getInstance().getWorkspacePath() + File.separator + "cache" + + File.separator + "classes"))); } public static void setContext(@Nullable Context context) { @@ -424,7 +404,7 @@ public static Context getContext() { } public static RETURN_TYPE doWithContext(@NotNull final Context context, - @NotNull final Supplier action) { + @NotNull final Supplier action) { final Context originalContext = getContext(); try { setContext(context); @@ -434,20 +414,18 @@ public static RETURN_TYPE doWithContext(@NotNull final Context con } } - private static boolean logEnabled = - Configuration.getInstance().getBoolean("CompilerTools.logEnabledDefault"); + private static boolean logEnabled = Configuration.getInstance().getBoolean("CompilerTools.logEnabledDefault"); public static Class compile(String className, String classBody, String packageNameRoot) { return compile(className, classBody, packageNameRoot, null, Collections.emptyMap()); } public static Class compile(String className, String classBody, String packageNameRoot, - Map> parameterClasses) { + Map> parameterClasses) { return compile(className, classBody, packageNameRoot, null, parameterClasses); } - public static Class compile(String className, String classBody, String packageNameRoot, - StringBuilder codeLog) { + public static Class compile(String className, String classBody, String packageNameRoot, StringBuilder codeLog) { return compile(className, classBody, packageNameRoot, codeLog, Collections.emptyMap()); } @@ -455,18 +433,17 @@ public static Class compile(String className, String classBody, String packag * Compile a class. * * @param className Class name - * @param classBody Class body, before update with "$CLASS_NAME$" replacement and package name - * prefixing + * @param classBody Class body, before update with "$CLASS_NAME$" replacement and package name prefixing * @param packageNameRoot Package name prefix * @param codeLog Optional "log" for final class code * @param parameterClasses Generic parameters, empty if none required * @return The compiled class */ public static Class compile(@NotNull final String className, - @NotNull final String classBody, - @NotNull final String packageNameRoot, - @Nullable final StringBuilder codeLog, - @NotNull final Map> parameterClasses) { + @NotNull final String classBody, + @NotNull final String packageNameRoot, + @Nullable final StringBuilder codeLog, + @NotNull final Map> parameterClasses) { SimplePromise> promise; final boolean promiseAlreadyMade; @@ -490,8 +467,7 @@ public static Class compile(@NotNull final String className, // It's my job to fulfill the promise try { - return compileHelper(className, classBody, packageNameRoot, codeLog, parameterClasses, - context); + return compileHelper(className, classBody, packageNameRoot, codeLog, parameterClasses, context); } catch (RuntimeException e) { promise.setException(e); throw e; @@ -499,25 +475,24 @@ public static Class compile(@NotNull final String className, } private static Class compileHelper(@NotNull final String className, - @NotNull final String classBody, - @NotNull final String packageNameRoot, - @Nullable final StringBuilder codeLog, - @NotNull final Map> parameterClasses, - @NotNull final Context context) { - // NB: We include class name hash in order to (hopefully) account for case insensitive file - // systems. + @NotNull final String classBody, + @NotNull final String packageNameRoot, + @Nullable final StringBuilder codeLog, + @NotNull final Map> parameterClasses, + @NotNull final Context context) { + // NB: We include class name hash in order to (hopefully) account for case insensitive file systems. final int classNameHash = className.hashCode(); final int classBodyHash = classBody.hashCode(); for (int pi = 0; pi < MAX_CLASS_COLLISIONS; ++pi) { final String packageNameSuffix = "c" - + (classBodyHash < 0 ? "m" : "") + (classBodyHash & Integer.MAX_VALUE) - + (classNameHash < 0 ? "n" : "") + (classNameHash & Integer.MAX_VALUE) - + (pi == 0 ? "" : ("p" + pi)) - + "v" + JAVA_CLASS_VERSION; + + (classBodyHash < 0 ? "m" : "") + (classBodyHash & Integer.MAX_VALUE) + + (classNameHash < 0 ? "n" : "") + (classNameHash & Integer.MAX_VALUE) + + (pi == 0 ? "" : ("p" + pi)) + + "v" + JAVA_CLASS_VERSION; final String packageName = (packageNameRoot.isEmpty() - ? packageNameSuffix - : packageNameRoot + (packageNameRoot.endsWith(".") ? "" : ".") + packageNameSuffix); + ? packageNameSuffix + : packageNameRoot + (packageNameRoot.endsWith(".") ? "" : ".") + packageNameSuffix); final String fqClassName = packageName + "." + className; // Ask the classloader to load an existing class with this name. This might: @@ -528,13 +503,10 @@ private static Class compileHelper(@NotNull final String className, if (result == null) { // Couldn't find one, so try to create it. This might: // A. succeed - // B. Lose a race to another process on the same file system which is compiling the - // identical formula - // C. Lose a race to another process on the same file system compiling a different - // formula that + // B. Lose a race to another process on the same file system which is compiling the identical formula + // C. Lose a race to another process on the same file system compiling a different formula that // happens to have the same hash (same packageName). - // However, regardless of A-C, there will be *some* class being found (i.e. - // tryLoadClassByFqName won't + // However, regardless of A-C, there will be *some* class being found (i.e. tryLoadClassByFqName won't // return null). maybeCreateClass(className, classBody, packageName, fqClassName); @@ -542,8 +514,7 @@ private static Class compileHelper(@NotNull final String className, // If we wrote a file and can't load it ... then give the filesystem some time. result = tryLoadClassByFqName(fqClassName, parameterClasses); try { - final long deadline = - System.currentTimeMillis() + codegenTimeoutMs - codegenLoopDelayMs; + final long deadline = System.currentTimeMillis() + codegenTimeoutMs - codegenLoopDelayMs; while (result == null && System.currentTimeMillis() < deadline) { Thread.sleep(codegenLoopDelayMs); result = tryLoadClassByFqName(fqClassName, parameterClasses); @@ -553,45 +524,32 @@ private static Class compileHelper(@NotNull final String className, } if (result == null) { - throw new IllegalStateException( - "Should have been able to load *some* class here"); + throw new IllegalStateException("Should have been able to load *some* class here"); } } final String identifyingFieldValue = loadIdentifyingField(result); - // We have a class. It either contains the formula we are looking for (cases 2, A, and - // B) or a different - // formula with the same name (cases 3 and C). In either case, we should store the - // result in our cache, + // We have a class. It either contains the formula we are looking for (cases 2, A, and B) or a different + // formula with the same name (cases 3 and C). In either case, we should store the result in our cache, // either fulfilling an existing promise or making a new, fulfilled promise. synchronized (context) { - // Note we are doing something kind of subtle here. We are removing an entry whose - // key was matched by - // value equality and replacing it with a value-equal but reference-different string - // that is a static - // member of the class we just loaded. This should be easier on the garbage - // collector because we are - // replacing a calculated value with a classloaded value and so in effect we are - // "canonicalizing" the - // string. This is important because these long strings stay in knownClasses - // forever. + // Note we are doing something kind of subtle here. We are removing an entry whose key was matched by + // value equality and replacing it with a value-equal but reference-different string that is a static + // member of the class we just loaded. This should be easier on the garbage collector because we are + // replacing a calculated value with a classloaded value and so in effect we are "canonicalizing" the + // string. This is important because these long strings stay in knownClasses forever. SimplePromise> p = context.knownClasses.remove(identifyingFieldValue); if (p == null) { - // If we encountered a different class than the one we're looking for, make a - // fresh promise and - // immediately fulfill it. This is for the purpose of populating the cache in - // case someone comes - // looking for that class later. Rationale: we already did all the classloading - // work; no point in + // If we encountered a different class than the one we're looking for, make a fresh promise and + // immediately fulfill it. This is for the purpose of populating the cache in case someone comes + // looking for that class later. Rationale: we already did all the classloading work; no point in // throwing it away now, even though this is not the class we're looking for. p = new SimplePromise<>(); } context.knownClasses.put(identifyingFieldValue, p); - // It's also possible that some other code has already fulfilled this promise with - // exactly the same - // class. That's ok though: the promise code does not reject multiple sets to the - // identical value. + // It's also possible that some other code has already fulfilled this promise with exactly the same + // class. That's ok though: the promise code does not reject multiple sets to the identical value. p.setResultFriendly(result); } @@ -599,22 +557,19 @@ private static Class compileHelper(@NotNull final String className, if (classBody.equals(identifyingFieldValue)) { // Cases 2, A, and B. if (codeLog != null) { - // If the caller wants a textual copy of the code we either made, or just found - // in the cache. + // If the caller wants a textual copy of the code we either made, or just found in the cache. codeLog.append(makeFinalCode(className, classBody, packageName)); } return result; } // Try the next hash name } - throw new IllegalStateException( - "Found too many collisions for package name root " + packageNameRoot + throw new IllegalStateException("Found too many collisions for package name root " + packageNameRoot + ", class name=" + className + ", class body hash=" + classBodyHash + " - contact Deephaven support!"); } - private static Class tryLoadClassByFqName(String fqClassName, - Map> parameterClasses) { + private static Class tryLoadClassByFqName(String fqClassName, Map> parameterClasses) { try { return getContext().getClassLoaderForFormula(parameterClasses).loadClass(fqClassName); } catch (ClassNotFoundException cnfe) { @@ -635,23 +590,20 @@ private static String makeFinalCode(String className, String classBody, String p final String joinedEscapedBody = createEscapedJoinedString(classBody); classBody = classBody.replaceAll("\\$CLASSNAME\\$", className); classBody = classBody.substring(0, classBody.lastIndexOf("}")); - classBody += " public static String " + IDENTIFYING_FIELD_NAME + " = " - + joinedEscapedBody + ";\n}"; + classBody += " public static String " + IDENTIFYING_FIELD_NAME + " = " + joinedEscapedBody + ";\n}"; return "package " + packageName + ";\n" + classBody; } /** - * Transform a string into the corresponding Java source code that compiles into that string. - * This involves escaping special characters, surrounding it with quotes, and (if the string is - * larger than the max string length for Java literals), splitting it into substrings and - * constructing a call to String.join() that combines those substrings. + * Transform a string into the corresponding Java source code that compiles into that string. This involves escaping + * special characters, surrounding it with quotes, and (if the string is larger than the max string length for Java + * literals), splitting it into substrings and constructing a call to String.join() that combines those substrings. */ public static String createEscapedJoinedString(final String originalString) { return createEscapedJoinedString(originalString, DEFAULT_MAX_STRING_LITERAL_LENGTH); } - public static String createEscapedJoinedString(final String originalString, - int maxStringLength) { + public static String createEscapedJoinedString(final String originalString, int maxStringLength) { final String[] splits = splitByModifiedUtf8Encoding(originalString, maxStringLength); // Turn each split into a Java source string by escaping it and surrounding it with " @@ -672,8 +624,7 @@ private static String[] splitByModifiedUtf8Encoding(final String originalString, final List splits = new ArrayList<>(); // exclusive end position of the previous substring. int previousEnd = 0; - // Number of bytes in the "modified UTF-8" representation of the substring we are currently - // scanning. + // Number of bytes in the "modified UTF-8" representation of the substring we are currently scanning. int currentByteCount = 0; for (int ii = 0; ii < originalString.length(); ++ii) { final int bytesConsumed = calcBytesConsumed(originalString.charAt(ii)); @@ -687,8 +638,7 @@ private static String[] splitByModifiedUtf8Encoding(final String originalString, } // At the end of the loop, either // 1. there are one or more characters that still need to be added to splits - // 2. originalString was empty and so splits is empty and we need to add a single empty - // string to splits + // 2. originalString was empty and so splits is empty and we need to add a single empty string to splits splits.add(originalString.substring(previousEnd)); return splits.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } @@ -710,8 +660,7 @@ static class JavaSourceFromString extends SimpleJavaFileObject { final String code; JavaSourceFromString(String name, String code) { - super(URI.create("string:///" + name.replace('.', '/') + Kind.SOURCE.extension), - Kind.SOURCE); + super(URI.create("string:///" + name.replace('.', '/') + Kind.SOURCE.extension), Kind.SOURCE); this.code = code; } @@ -725,8 +674,8 @@ static class JavaSourceFromFile extends SimpleJavaFileObject { final String code; JavaSourceFromFile(File basePath, File file) { - super(URI.create("string:///" + createName(basePath, file).replace('.', '/') - + Kind.SOURCE.extension), Kind.SOURCE); + super(URI.create("string:///" + createName(basePath, file).replace('.', '/') + Kind.SOURCE.extension), + Kind.SOURCE); try { this.code = FileUtils.readTextFile(file); } catch (IOException e) { @@ -753,12 +702,10 @@ public CharSequence getCharContent(boolean ignoreEncodingErrors) { } } - private static void maybeCreateClass(String className, String code, String packageName, - String fqClassName) { + private static void maybeCreateClass(String className, String code, String packageName, String fqClassName) { final String finalCode = makeFinalCode(className, code, packageName); - // The 'compile' action does a bunch of things that need security permissions; this always - // needs to run + // The 'compile' action does a bunch of things that need security permissions; this always needs to run // with elevated permissions. if (logEnabled) { log.info().append("Generating code ").append(finalCode).endl(); @@ -769,42 +716,33 @@ private static void maybeCreateClass(String className, String code, String packa final String[] splitPackageName = packageName.split("\\."); if (splitPackageName.length == 0) { - throw new RuntimeException( - String.format("packageName %s expected to have at least one .", packageName)); + throw new RuntimeException(String.format("packageName %s expected to have at least one .", packageName)); } - final String[] truncatedSplitPackageName = - Arrays.copyOf(splitPackageName, splitPackageName.length - 1); - - // Get the destination root directory (e.g. /tmp/workspace/cache/classes) and populate it - // with the package - // directories (e.g. io/deephaven/test) if they are not already there. This will be useful - // later. - // Also create a temp directory e.g. - // /tmp/workspace/cache/classes/temporaryCompilationDirectory12345 + final String[] truncatedSplitPackageName = Arrays.copyOf(splitPackageName, splitPackageName.length - 1); + + // Get the destination root directory (e.g. /tmp/workspace/cache/classes) and populate it with the package + // directories (e.g. io/deephaven/test) if they are not already there. This will be useful later. + // Also create a temp directory e.g. /tmp/workspace/cache/classes/temporaryCompilationDirectory12345 // This temp directory will be where the compiler drops files into, e.g. // /tmp/workspace/cache/classes/temporaryCompilationDirectory12345/io/deephaven/test/cm12862183232603186v52_0/Formula.class - // Foreshadowing: we will eventually atomically move cm12862183232603186v52_0 from the above - // to + // Foreshadowing: we will eventually atomically move cm12862183232603186v52_0 from the above to // /tmp/workspace/cache/classes/io/deephaven/test - // Note: for this atomic move to work, this temp directory must be on the same file system - // as the destination + // Note: for this atomic move to work, this temp directory must be on the same file system as the destination // directory. final String rootPathAsString; final String tempDirAsString; try { - final Pair resultPair = AccessController - .doPrivileged((PrivilegedExceptionAction>) () -> { - final String rootPathString = ctxClassDestination.getAbsolutePath(); - final Path rootPathWithPackage = - Paths.get(rootPathString, truncatedSplitPackageName); - final File rpf = rootPathWithPackage.toFile(); - ensureDirectories(rpf, - () -> "Couldn't create package directories: " + rootPathWithPackage); - final Path tempPath = Files.createTempDirectory(Paths.get(rootPathString), - "temporaryCompilationDirectory"); - final String tempPathString = tempPath.toFile().getAbsolutePath(); - return new Pair<>(rootPathString, tempPathString); - }); + final Pair resultPair = + AccessController.doPrivileged((PrivilegedExceptionAction>) () -> { + final String rootPathString = ctxClassDestination.getAbsolutePath(); + final Path rootPathWithPackage = Paths.get(rootPathString, truncatedSplitPackageName); + final File rpf = rootPathWithPackage.toFile(); + ensureDirectories(rpf, () -> "Couldn't create package directories: " + rootPathWithPackage); + final Path tempPath = + Files.createTempDirectory(Paths.get(rootPathString), "temporaryCompilationDirectory"); + final String tempPathString = tempPath.toFile().getAbsolutePath(); + return new Pair<>(rootPathString, tempPathString); + }); rootPathAsString = resultPair.first; tempDirAsString = resultPair.second; } catch (PrivilegedActionException pae) { @@ -812,8 +750,7 @@ private static void maybeCreateClass(String className, String code, String packa } try { - maybeCreateClassHelper(fqClassName, finalCode, splitPackageName, ctx, rootPathAsString, - tempDirAsString); + maybeCreateClassHelper(fqClassName, finalCode, splitPackageName, ctx, rootPathAsString, tempDirAsString); } finally { AccessController.doPrivileged((PrivilegedAction) () -> { try { @@ -826,44 +763,37 @@ private static void maybeCreateClass(String className, String code, String packa } } - private static void maybeCreateClassHelper(String fqClassName, String finalCode, - String[] splitPackageName, - Context ctx, String rootPathAsString, String tempDirAsString) { + private static void maybeCreateClassHelper(String fqClassName, String finalCode, String[] splitPackageName, + Context ctx, String rootPathAsString, String tempDirAsString) { final StringWriter compilerOutput = new StringWriter(); - final JavaCompiler compiler = AccessController - .doPrivileged((PrivilegedAction) ToolProvider::getSystemJavaCompiler); + final JavaCompiler compiler = + AccessController.doPrivileged((PrivilegedAction) ToolProvider::getSystemJavaCompiler); if (compiler == null) { - throw new RuntimeException( - "No Java compiler provided - are you using a JRE instead of a JDK?"); + throw new RuntimeException("No Java compiler provided - are you using a JRE instead of a JDK?"); } - final String classPathAsString = - ctx.getClassPath() + File.pathSeparator + getJavaClassPath(); - final List compilerOptions = - AccessController.doPrivileged((PrivilegedAction>) () -> Arrays.asList("-d", - tempDirAsString, "-cp", classPathAsString)); + final String classPathAsString = ctx.getClassPath() + File.pathSeparator + getJavaClassPath(); + final List compilerOptions = AccessController.doPrivileged( + (PrivilegedAction>) () -> Arrays.asList("-d", tempDirAsString, "-cp", classPathAsString)); - final StandardJavaFileManager fileManager = - compiler.getStandardFileManager(null, null, null); + final StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null); final boolean result = compiler.getTask(compilerOutput, - fileManager, - null, - compilerOptions, - null, - Collections.singletonList(new JavaSourceFromString(fqClassName, finalCode))) - .call(); + fileManager, + null, + compilerOptions, + null, + Collections.singletonList(new JavaSourceFromString(fqClassName, finalCode))) + .call(); if (!result) { - throw new RuntimeException( - "Error compiling class " + fqClassName + ":\n" + compilerOutput.toString()); + throw new RuntimeException("Error compiling class " + fqClassName + ":\n" + compilerOutput.toString()); } // The above has compiled into into e.g. // /tmp/workspace/cache/classes/temporaryCompilationDirectory12345/io/deephaven/test/cm12862183232603186v52_0/{various // class files} // We want to atomically move it to e.g. - // /tmp/workspace/cache/classes/io/deephaven/test/cm12862183232603186v52_0/{various class - // files} + // /tmp/workspace/cache/classes/io/deephaven/test/cm12862183232603186v52_0/{various class files} // Our strategy try { AccessController.doPrivileged((PrivilegedExceptionAction) () -> { @@ -872,15 +802,12 @@ private static void maybeCreateClassHelper(String fqClassName, String finalCode, try { Files.move(srcDir, destDir, StandardCopyOption.ATOMIC_MOVE); } catch (IOException ioe) { - // The move might have failed for a variety of bad reasons. However, if the - // reason was because - // we lost the race to some other process, that's a harmless/desirable outcome, - // and we can ignore + // The move might have failed for a variety of bad reasons. However, if the reason was because + // we lost the race to some other process, that's a harmless/desirable outcome, and we can ignore // it. if (!Files.exists(destDir)) { - throw new IOException( - "Move failed for some reason other than destination already existing", - ioe); + throw new IOException("Move failed for some reason other than destination already existing", + ioe); } } return null; @@ -897,42 +824,37 @@ private static void maybeCreateClassHelper(String fqClassName, String finalCode, * @param javaFiles the java source files * @return a Pair of success, and the compiler output */ - public static Pair tryCompile(File basePath, Collection javaFiles) - throws IOException { + public static Pair tryCompile(File basePath, Collection javaFiles) throws IOException { try { // We need multiple filesystem accesses et al, so make this whole section privileged. - return AccessController - .doPrivileged((PrivilegedExceptionAction>) () -> { + return AccessController.doPrivileged((PrivilegedExceptionAction>) () -> { - final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); - if (compiler == null) { - throw new RuntimeException( - "No Java compiler provided - are you using a JRE instead of a JDK?"); - } + final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); + if (compiler == null) { + throw new RuntimeException("No Java compiler provided - are you using a JRE instead of a JDK?"); + } - final File outputDirectory = - Files.createTempDirectory("temporaryCompilationDirectory").toFile(); + final File outputDirectory = Files.createTempDirectory("temporaryCompilationDirectory").toFile(); - try { - final StringWriter compilerOutput = new StringWriter(); - final Context ctx = getContext(); - final String javaClasspath = getJavaClassPath(); + try { + final StringWriter compilerOutput = new StringWriter(); + final Context ctx = getContext(); + final String javaClasspath = getJavaClassPath(); - final Collection javaFileObjects = - javaFiles.stream().map(f -> new JavaSourceFromFile(basePath, f)) - .collect(Collectors.toList()); + final Collection javaFileObjects = javaFiles.stream() + .map(f -> new JavaSourceFromFile(basePath, f)).collect(Collectors.toList()); - final boolean result = compiler.getTask(compilerOutput, null, null, + final boolean result = compiler.getTask(compilerOutput, null, null, Arrays.asList("-d", outputDirectory.getAbsolutePath(), "-cp", - ctx.getClassPath() + File.pathSeparator + javaClasspath), + ctx.getClassPath() + File.pathSeparator + javaClasspath), null, javaFileObjects).call(); - return new Pair<>(result, compilerOutput.toString()); + return new Pair<>(result, compilerOutput.toString()); - } finally { - FileUtils.deleteRecursively(outputDirectory); - } - }); + } finally { + FileUtils.deleteRecursively(outputDirectory); + } + }); } catch (final PrivilegedActionException pae) { if (pae.getException() instanceof IOException) { throw (IOException) pae.getException(); @@ -943,16 +865,14 @@ public static Pair tryCompile(File basePath, Collection j } /** - * Retrieve the java class path from our existing Java class path, and IntelliJ/TeamCity - * environment variables. + * Retrieve the java class path from our existing Java class path, and IntelliJ/TeamCity environment variables. * * @return */ public static String getJavaClassPath() { String javaClasspath; { - final StringBuilder javaClasspathBuilder = - new StringBuilder(System.getProperty("java.class.path")); + final StringBuilder javaClasspathBuilder = new StringBuilder(System.getProperty("java.class.path")); final String teamCityWorkDir = System.getProperty("teamcity.build.workingDir"); if (teamCityWorkDir != null) { @@ -962,8 +882,7 @@ public static String getJavaClassPath() { for (File f : classDirs) { javaClasspathBuilder.append(File.pathSeparator).append(f.getAbsolutePath()); } - final File testDirs[] = - new File(teamCityWorkDir + "/_out_/test-classes").listFiles(); + final File testDirs[] = new File(teamCityWorkDir + "/_out_/test-classes").listFiles(); for (File f : testDirs) { javaClasspathBuilder.append(File.pathSeparator).append(f.getAbsolutePath()); @@ -979,15 +898,14 @@ public static String getJavaClassPath() { javaClasspath = javaClasspathBuilder.toString(); } - // IntelliJ will bundle a very large class path into an empty jar with a Manifest that will - // define the full class path - // Look for this being used during compile time, so the full class path can be sent into the - // compile call + // IntelliJ will bundle a very large class path into an empty jar with a Manifest that will define the full + // class path + // Look for this being used during compile time, so the full class path can be sent into the compile call final String intellijClassPathJarRegex = ".*classpath[0-9]*\\.jar.*"; if (javaClasspath.matches(intellijClassPathJarRegex)) { try { final Enumeration resources = - CompilerTools.class.getClassLoader().getResources("META-INF/MANIFEST.MF"); + CompilerTools.class.getClassLoader().getResources("META-INF/MANIFEST.MF"); final Attributes.Name createdByAttribute = new Attributes.Name("Created-By"); final Attributes.Name classPathAttribute = new Attributes.Name("Class-Path"); while (resources.hasMoreElements()) { @@ -996,28 +914,25 @@ public static String getJavaClassPath() { final Attributes attributes = manifest.getMainAttributes(); final Object createdBy = attributes.get(createdByAttribute); if ("IntelliJ IDEA".equals(createdBy)) { - final String extendedClassPath = - (String) attributes.get(classPathAttribute); + final String extendedClassPath = (String) attributes.get(classPathAttribute); if (extendedClassPath != null) { - // Parses the files in the manifest description an changes their format - // to drop the "file:/" and + // Parses the files in the manifest description an changes their format to drop the "file:/" + // and // use the default path separator final String filePaths = Stream.of(extendedClassPath.split("file:/")) - .map(String::trim) - .filter(fileName -> fileName.length() > 0) - .collect(Collectors.joining(File.pathSeparator)); + .map(String::trim) + .filter(fileName -> fileName.length() > 0) + .collect(Collectors.joining(File.pathSeparator)); - // Remove the classpath jar in question, and expand it with the files - // from the manifest + // Remove the classpath jar in question, and expand it with the files from the manifest javaClasspath = Stream.of(javaClasspath.split(File.pathSeparator)) - .map(cp -> cp.matches(intellijClassPathJarRegex) ? filePaths : cp) - .collect(Collectors.joining(File.pathSeparator)); + .map(cp -> cp.matches(intellijClassPathJarRegex) ? filePaths : cp) + .collect(Collectors.joining(File.pathSeparator)); } } } } catch (IOException e) { - throw new RuntimeException( - "Error extract manifest file from " + javaClasspath + ".\n", e); + throw new RuntimeException("Error extract manifest file from " + javaClasspath + ".\n", e); } } return javaClasspath; @@ -1074,30 +989,30 @@ private void checkState() { public static void main(String[] args) { final String sillyFuncFormat = String.join("\n", - " public int sillyFunc%d() {", - " final String temp0 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " final String temp1 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " final String temp2 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " final String temp3 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " final String temp4 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " final String temp5 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " final String temp6 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " final String temp7 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " final String temp8 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " final String temp9 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", - " return temp0.length() + temp1.length() + temp2.length() + temp3.length() + temp4.length() + temp5.length() + temp6.length() + temp7.length() + temp8.length() + temp9.length();", - " }"); + " public int sillyFunc%d() {", + " final String temp0 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " final String temp1 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " final String temp2 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " final String temp3 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " final String temp4 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " final String temp5 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " final String temp6 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " final String temp7 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " final String temp8 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " final String temp9 = \"ᵈᵉᵉᵖʰᵃᵛᵉⁿ__½¼⅒___\uD83D\uDC96___0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789\";", + " return temp0.length() + temp1.length() + temp2.length() + temp3.length() + temp4.length() + temp5.length() + temp6.length() + temp7.length() + temp8.length() + temp9.length();", + " }"); final StringBuilder sb = new StringBuilder(); sb.append(String.join("\n", - "public class $CLASSNAME$ implements java.util.function.Function {", - " private final int n;", - " public $CLASSNAME$(int n) {", - " this.n = n;", - " }", - " public Integer apply(Integer v) {", - " return n + v;", - " }")); + "public class $CLASSNAME$ implements java.util.function.Function {", + " private final int n;", + " public $CLASSNAME$(int n) {", + " this.n = n;", + " }", + " public Integer apply(Integer v) {", + " return n + v;", + " }")); for (int ii = 0; ii < 100; ++ii) { sb.append("\n"); sb.append(String.format(sillyFuncFormat, ii)); @@ -1108,11 +1023,9 @@ public static void main(String[] args) { StringBuilder codeLog = new StringBuilder(); try { - final Class clazz = - compile("Test", programText, "com.deephaven.test", codeLog, Collections.emptyMap()); + final Class clazz = compile("Test", programText, "com.deephaven.test", codeLog, Collections.emptyMap()); final Constructor constructor = clazz.getConstructor(int.class); - Function obj = - (Function) constructor.newInstance(17); + Function obj = (Function) constructor.newInstance(17); final int result = obj.apply(5); if (result != 22) { throw new Exception(String.format("Expected 22, got %d", result)); diff --git a/CompilerTools/src/main/java/io/deephaven/compilertools/ReplicatePrimitiveCode.java b/CompilerTools/src/main/java/io/deephaven/compilertools/ReplicatePrimitiveCode.java index 106fc462643..55583fd7e24 100644 --- a/CompilerTools/src/main/java/io/deephaven/compilertools/ReplicatePrimitiveCode.java +++ b/CompilerTools/src/main/java/io/deephaven/compilertools/ReplicatePrimitiveCode.java @@ -17,11 +17,9 @@ public class ReplicatePrimitiveCode { public static final String TEST_SRC = "src/test/java"; public static final String BENCHMARK_SRC = "benchmark"; - private static String replicateCodeBasedOnChar(Class sourceClass, - Map serialVersionUIDs, String exemptions[], String upperCharacterReplace, - String upperCharReplace, - String characterReplace, String charReplace, String root, String allCapsCharReplace) - throws IOException { + private static String replicateCodeBasedOnChar(Class sourceClass, Map serialVersionUIDs, + String exemptions[], String upperCharacterReplace, String upperCharReplace, + String characterReplace, String charReplace, String root, String allCapsCharReplace) throws IOException { String pairs[][] = new String[][] { {"Character", upperCharacterReplace}, {"Char", upperCharReplace}, @@ -32,11 +30,9 @@ private static String replicateCodeBasedOnChar(Class sourceClass, return replaceAll(sourceClass, serialVersionUIDs, exemptions, root, pairs); } - private static String replicateCodeBasedOnInt(Class sourceClass, - Map serialVersionUIDs, String exemptions[], String upperCharacterReplace, - String upperCharReplace, - String characterReplace, String charReplace, String root, String allCapsCharReplace) - throws IOException { + private static String replicateCodeBasedOnInt(Class sourceClass, Map serialVersionUIDs, + String exemptions[], String upperCharacterReplace, String upperCharReplace, + String characterReplace, String charReplace, String root, String allCapsCharReplace) throws IOException { String pairs[][] = new String[][] { {"Integer", upperCharacterReplace}, {"Int", upperCharReplace}, @@ -47,10 +43,9 @@ private static String replicateCodeBasedOnInt(Class sourceClass, return replaceAll(sourceClass, serialVersionUIDs, exemptions, root, pairs); } - private static void replicateCodeBasedOnShort(Class sourceClass, - Map serialVersionUIDs, String exemptions[], String upperCharacterReplace, - String charReplace, String root, String allCapsCharReplace, String[]... extraPairs) - throws IOException { + private static void replicateCodeBasedOnShort(Class sourceClass, Map serialVersionUIDs, + String exemptions[], String upperCharacterReplace, + String charReplace, String root, String allCapsCharReplace, String[]... extraPairs) throws IOException { final String pairs[][]; final int extraPairsLength; if (extraPairs != null) { @@ -67,9 +62,9 @@ private static void replicateCodeBasedOnShort(Class sourceClass, replaceAll(sourceClass, serialVersionUIDs, exemptions, root, pairs); } - private static void replicateCodeBasedOnFloat(Class sourceClass, - Map serialVersionUIDs, String exemptions[], String upperCharacterReplace, - String charReplace, String root, String allCapsCharReplace) throws IOException { + private static void replicateCodeBasedOnFloat(Class sourceClass, Map serialVersionUIDs, + String exemptions[], String upperCharacterReplace, + String charReplace, String root, String allCapsCharReplace) throws IOException { String pairs[][] = new String[][] { {"Float", upperCharacterReplace}, {"float", charReplace}, @@ -78,68 +73,66 @@ private static void replicateCodeBasedOnFloat(Class sourceClass, replaceAll(sourceClass, serialVersionUIDs, exemptions, root, pairs); } - public static String charToBoolean(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Boolean", - "Boolean", "boolean", "boolean", root, "BOOLEAN"); + public static String charToBoolean(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Boolean", "Boolean", "boolean", + "boolean", root, "BOOLEAN"); } - public static String charToBooleanAsByte(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Boolean", - "Boolean", "boolean", "byte", root, "BOOLEAN"); + public static String charToBooleanAsByte(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Boolean", "Boolean", "boolean", + "byte", root, "BOOLEAN"); } - private static String charToObject(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Object", - "Object", "Object", "Object", root, "OBJECT"); + private static String charToObject(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Object", "Object", "Object", + "Object", root, "OBJECT"); } - private static String charToByte(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Byte", "Byte", - "byte", "byte", root, "BYTE"); + private static String charToByte(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Byte", "Byte", "byte", "byte", + root, "BYTE"); } - public static String charToDouble(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Double", - "Double", "double", "double", root, "DOUBLE"); + public static String charToDouble(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Double", "Double", "double", + "double", root, "DOUBLE"); } - public static String charToFloat(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Float", - "Float", "float", "float", root, "FLOAT"); + public static String charToFloat(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Float", "Float", "float", "float", + root, "FLOAT"); } - public static String charToInteger(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Integer", - "Int", "integer", "int", root, "INT"); + public static String charToInteger(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Integer", "Int", "integer", "int", + root, "INT"); } - public static String charToLong(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Long", "Long", - "long", "long", root, "LONG"); + public static String charToLong(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Long", "Long", "long", "long", + root, "LONG"); } - private static String charToShort(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Short", - "Short", "short", "short", root, "SHORT"); + private static String charToShort(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnChar(sourceClass, serialVersionUIDs, exemptions, "Short", "Short", "short", "short", + root, "SHORT"); } - public static String charLongToLongInt(Class sourceClass, String root, String... exemptions) - throws IOException { + public static String charLongToLongInt(Class sourceClass, String root, String... exemptions) throws IOException { final String pairs[][] = new String[][] { - // these happen in order, so we want to turn our longs to ints first, then do char - // to long, we can't actually discriminate between "Long" as text and "Long" as a - // type, - // so we are going to fail for integers here, but it is hopefully enough to use just - // for the Timsort kernel that needs it. + // these happen in order, so we want to turn our longs to ints first, then do char to long, we can't + // actually discriminate between "Long" as text and "Long" as a type, + // so we are going to fail for integers here, but it is hopefully enough to use just for the Timsort + // kernel that needs it. {"Long", "Int"}, {"long", "int"}, {"LONG", "INT"}, @@ -152,14 +145,12 @@ public static String charLongToLongInt(Class sourceClass, String root, String... return replaceAll(sourceClass, null, exemptions, root, pairs); } - public static String longToInt(Class sourceClass, String root, String... exemptions) - throws IOException { + public static String longToInt(Class sourceClass, String root, String... exemptions) throws IOException { final String pairs[][] = new String[][] { - // these happen in order, so we want to turn our longs to ints first, then do char - // to long, we can't actually discriminate between "Long" as text and "Long" as a - // type, - // so we are going to fail for integers here, but it is hopefully enough to use just - // for the Timsort kernel that needs it. + // these happen in order, so we want to turn our longs to ints first, then do char to long, we can't + // actually discriminate between "Long" as text and "Long" as a type, + // so we are going to fail for integers here, but it is hopefully enough to use just for the Timsort + // kernel that needs it. {"Long", "Int"}, {"long", "int"}, {"LONG", "INT"}, @@ -167,14 +158,12 @@ public static String longToInt(Class sourceClass, String root, String... exempti return replaceAll(sourceClass, null, exemptions, root, pairs); } - public static String charLongToIntInt(Class sourceClass, String root, String... exemptions) - throws IOException { + public static String charLongToIntInt(Class sourceClass, String root, String... exemptions) throws IOException { final String pairs[][] = new String[][] { - // these happen in order, so we want to turn our longs to ints first, then do char - // to long, we can't actually discriminate between "Long" as text and "Long" as a - // type, - // so we are going to fail for integers here, but it is hopefully enough to use just - // for the Timsort kernel that needs it. + // these happen in order, so we want to turn our longs to ints first, then do char to long, we can't + // actually discriminate between "Long" as text and "Long" as a type, + // so we are going to fail for integers here, but it is hopefully enough to use just for the Timsort + // kernel that needs it. {"Long", "Int"}, {"long", "int"}, {"LONG", "INT"}, @@ -187,97 +176,91 @@ public static String charLongToIntInt(Class sourceClass, String root, String... return replaceAll(sourceClass, null, exemptions, root, pairs); } - public static String intToObject(Class sourceClass, String root, String... exemptions) - throws IOException { - return replicateCodeBasedOnInt(sourceClass, null, exemptions, "Object", "Object", "Object", - "Object", root, "OBJECT"); + public static String intToObject(Class sourceClass, String root, String... exemptions) throws IOException { + return replicateCodeBasedOnInt(sourceClass, null, exemptions, "Object", "Object", "Object", "Object", root, + "OBJECT"); } - private static String intToObject(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Object", - "Object", "Object", "Object", root, "OBJECT"); + private static String intToObject(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Object", "Object", "Object", + "Object", root, "OBJECT"); } - private static String intToChar(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Character", - "Char", "char", "char", root, "CHAR"); + private static String intToChar(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Character", "Char", "char", "char", + root, "CHAR"); } - private static String intToByte(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Byte", "Byte", - "byte", "byte", root, "BYTE"); + private static String intToByte(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Byte", "Byte", "byte", "byte", root, + "BYTE"); } - private static String intToDouble(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Double", - "Double", "double", "double", root, "DOUBLE"); + private static String intToDouble(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Double", "Double", "double", + "double", root, "DOUBLE"); } - private static String intToFloat(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Float", "Float", - "float", "float", root, "FLOAT"); + private static String intToFloat(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Float", "Float", "float", "float", + root, "FLOAT"); } - private static String intToLong(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Long", "Long", - "long", "long", root, "LONG"); + private static String intToLong(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Long", "Long", "long", "long", root, + "LONG"); } - private static String intToShort(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Short", "Short", - "short", "short", root, "SHORT"); + private static String intToShort(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + return replicateCodeBasedOnInt(sourceClass, serialVersionUIDs, exemptions, "Short", "Short", "short", "short", + root, "SHORT"); } - private static void shortToByte(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Byte", "byte", root, - "BYTE"); + private static void shortToByte(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Byte", "byte", root, "BYTE"); } - private static void shortToDouble(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Double", "double", - root, "DOUBLE"); + private static void shortToDouble(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Double", "double", root, "DOUBLE"); } - private static void shortToFloat(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Float", "float", - root, "FLOAT"); + private static void shortToFloat(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Float", "float", root, "FLOAT"); } - private static void shortToInteger(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Integer", "int", - root, "INT", new String[][] {{"DbShortArray", "DbIntArray"}}); + private static void shortToInteger(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Integer", "int", root, "INT", + new String[][] {{"DbShortArray", "DbIntArray"}}); } - private static void shortToLong(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Long", "long", root, - "LONG", new String[][] {{"Integer.signum", "Long.signum"}}); + private static void shortToLong(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + replicateCodeBasedOnShort(sourceClass, serialVersionUIDs, exemptions, "Long", "long", root, "LONG", + new String[][] {{"Integer.signum", "Long.signum"}}); } - private static void floatToDouble(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { - replicateCodeBasedOnFloat(sourceClass, serialVersionUIDs, exemptions, "Double", "double", - root, "DOUBLE"); + private static void floatToDouble(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { + replicateCodeBasedOnFloat(sourceClass, serialVersionUIDs, exemptions, "Double", "double", root, "DOUBLE"); } - public static List charToAll(Class sourceClass, String root, String... exemptions) - throws IOException { + public static List charToAll(Class sourceClass, String root, String... exemptions) throws IOException { return charToAll(sourceClass, root, null, exemptions); } - private static List charToAll(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + private static List charToAll(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { final List results = new ArrayList<>(); results.add(charToBoolean(sourceClass, root, serialVersionUIDs, exemptions)); results.add(charToByte(sourceClass, root, serialVersionUIDs, exemptions)); @@ -289,43 +272,38 @@ private static List charToAll(Class sourceClass, String root, return results; } - public static List charToIntegers(Class sourceClass, String root, String... exemptions) - throws IOException { + public static List charToIntegers(Class sourceClass, String root, String... exemptions) throws IOException { return charToIntegers(sourceClass, root, null, exemptions); } - public static List charToAllButBoolean(Class sourceClass, String root, - String... exemptions) throws IOException { + public static List charToAllButBoolean(Class sourceClass, String root, String... exemptions) + throws IOException { return charToAllButBoolean(sourceClass, root, null, exemptions); } - public static String charToByte(Class sourceClass, String root, String... exemptions) - throws IOException { + public static String charToByte(Class sourceClass, String root, String... exemptions) throws IOException { return charToByte(sourceClass, root, null, exemptions); } - public static String charToObject(Class sourceClass, String root, String... exemptions) - throws IOException { + public static String charToObject(Class sourceClass, String root, String... exemptions) throws IOException { return charToObject(sourceClass, root, null, exemptions); } - public static String charToBoolean(Class sourceClass, String root, String... exemptions) - throws IOException { + public static String charToBoolean(Class sourceClass, String root, String... exemptions) throws IOException { return charToBoolean(sourceClass, root, null, exemptions); } - public static String charToLong(Class sourceClass, String root, String... exemptions) - throws IOException { + public static String charToLong(Class sourceClass, String root, String... exemptions) throws IOException { return charToLong(sourceClass, root, null, exemptions); } - public static void charToAllButBooleanAndLong(Class sourceClass, String root, - String... exemptions) throws IOException { + public static void charToAllButBooleanAndLong(Class sourceClass, String root, String... exemptions) + throws IOException { charToAllButBooleanAndLong(sourceClass, root, null, exemptions); } - public static List charToAllButBoolean(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + public static List charToAllButBoolean(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { final List resultFiles = new ArrayList<>(); resultFiles.add(charToByte(sourceClass, root, serialVersionUIDs, exemptions)); resultFiles.add(charToDouble(sourceClass, root, serialVersionUIDs, exemptions)); @@ -337,8 +315,8 @@ public static List charToAllButBoolean(Class sourceClass, String root, return resultFiles; } - public static List charToIntegers(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + public static List charToIntegers(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { final List resultFiles = new ArrayList<>(); resultFiles.add(charToByte(sourceClass, root, serialVersionUIDs, exemptions)); resultFiles.add(charToShort(sourceClass, root, serialVersionUIDs, exemptions)); @@ -347,8 +325,8 @@ public static List charToIntegers(Class sourceClass, String root, return resultFiles; } - private static void charToAllButBooleanAndLong(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + private static void charToAllButBooleanAndLong(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { charToByte(sourceClass, root, serialVersionUIDs, exemptions); charToDouble(sourceClass, root, serialVersionUIDs, exemptions); charToFloat(sourceClass, root, serialVersionUIDs, exemptions); @@ -356,13 +334,13 @@ private static void charToAllButBooleanAndLong(Class sourceClass, String root, charToShort(sourceClass, root, serialVersionUIDs, exemptions); } - public static void charToAllButBooleanAndByte(Class sourceClass, String root, - String... exemptions) throws IOException { + public static void charToAllButBooleanAndByte(Class sourceClass, String root, String... exemptions) + throws IOException { charToAllButBooleanAndByte(sourceClass, root, null, exemptions); } - private static void charToAllButBooleanAndByte(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + private static void charToAllButBooleanAndByte(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { charToDouble(sourceClass, root, serialVersionUIDs, exemptions); charToFloat(sourceClass, root, serialVersionUIDs, exemptions); charToInteger(sourceClass, root, serialVersionUIDs, exemptions); @@ -370,19 +348,18 @@ private static void charToAllButBooleanAndByte(Class sourceClass, String root, charToShort(sourceClass, root, serialVersionUIDs, exemptions); } - public static void charToAllButBooleanAndFloats(Class sourceClass, String root, - String... exemptions) throws IOException { + public static void charToAllButBooleanAndFloats(Class sourceClass, String root, String... exemptions) + throws IOException { charToAllButBooleanAndFloats(sourceClass, root, null, exemptions); } - public static void charToShortAndByte(Class sourceClass, String root, String... exemptions) - throws IOException { + public static void charToShortAndByte(Class sourceClass, String root, String... exemptions) throws IOException { charToByte(sourceClass, root, null, exemptions); charToShort(sourceClass, root, null, exemptions); } - public static void charToAllButBooleanAndFloats(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + public static void charToAllButBooleanAndFloats(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { charToInteger(sourceClass, root, serialVersionUIDs, exemptions); charToByte(sourceClass, root, serialVersionUIDs, exemptions); charToLong(sourceClass, root, serialVersionUIDs, exemptions); @@ -390,8 +367,8 @@ public static void charToAllButBooleanAndFloats(Class sourceClass, String root, } - public static void shortToAllNumericals(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + public static void shortToAllNumericals(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { shortToByte(sourceClass, root, serialVersionUIDs, exemptions); shortToDouble(sourceClass, root, serialVersionUIDs, exemptions); shortToFloat(sourceClass, root, serialVersionUIDs, exemptions); @@ -399,8 +376,8 @@ public static void shortToAllNumericals(Class sourceClass, String root, shortToLong(sourceClass, root, serialVersionUIDs, exemptions); } - public static void intToAllNumericals(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + public static void intToAllNumericals(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { intToByte(sourceClass, root, serialVersionUIDs, exemptions); intToDouble(sourceClass, root, serialVersionUIDs, exemptions); intToFloat(sourceClass, root, serialVersionUIDs, exemptions); @@ -408,13 +385,13 @@ public static void intToAllNumericals(Class sourceClass, String root, intToShort(sourceClass, root, serialVersionUIDs, exemptions); } - public static List intToAllButBoolean(Class sourceClass, String root, - String... exemptions) throws IOException { + public static List intToAllButBoolean(Class sourceClass, String root, String... exemptions) + throws IOException { return intToAllButBoolean(sourceClass, root, null, exemptions); } - public static List intToAllButBoolean(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + public static List intToAllButBoolean(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { final List results = new ArrayList<>(); results.add(intToChar(sourceClass, root, serialVersionUIDs, exemptions)); results.add(intToByte(sourceClass, root, serialVersionUIDs, exemptions)); @@ -426,32 +403,31 @@ public static List intToAllButBoolean(Class sourceClass, String root, } public static void shortToAllIntegralTypes(Class sourceClass, String root, String... exemptions) - throws IOException { + throws IOException { shortToAllIntegralTypes(sourceClass, root, null, exemptions); } - private static void shortToAllIntegralTypes(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + private static void shortToAllIntegralTypes(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { shortToByte(sourceClass, root, serialVersionUIDs, exemptions); shortToInteger(sourceClass, root, serialVersionUIDs, exemptions); shortToLong(sourceClass, root, serialVersionUIDs, exemptions); } - public static void floatToAllFloatingPoints(Class sourceClass, String root, - String... exemptions) throws IOException { + public static void floatToAllFloatingPoints(Class sourceClass, String root, String... exemptions) + throws IOException { floatToAllFloatingPoints(sourceClass, root, null, exemptions); } - private static void floatToAllFloatingPoints(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + private static void floatToAllFloatingPoints(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { floatToDouble(sourceClass, root, serialVersionUIDs, exemptions); } - public static String replaceAll(Class sourceClass, Map serialVersionUIDs, - String exemptions[], String root, String[]... pairs) throws IOException { + public static String replaceAll(Class sourceClass, Map serialVersionUIDs, String exemptions[], + String root, String[]... pairs) throws IOException { final String basePath = basePathForClass(sourceClass, root); - InputStream inputStream = - new FileInputStream(basePath + "/" + sourceClass.getSimpleName() + ".java"); + InputStream inputStream = new FileInputStream(basePath + "/" + sourceClass.getSimpleName() + ".java"); int nextChar; final StringBuilder inputText = new StringBuilder(); while ((nextChar = inputStream.read()) != -1) { @@ -461,18 +437,17 @@ public static String replaceAll(Class sourceClass, Map serialVersi final String className = replaceAll(sourceClass.getSimpleName(), null, exemptions, pairs); final String fullClassName = sourceClass.getPackage().getName() + "." + className; - Long serialVersionUID = - serialVersionUIDs == null ? null : serialVersionUIDs.get(fullClassName); + Long serialVersionUID = serialVersionUIDs == null ? null : serialVersionUIDs.get(fullClassName); String fullPath = basePath + "/" + className + ".java"; System.out.println("Generating java file " + fullPath); PrintWriter out = new PrintWriter(fullPath); out.println( - "/* ---------------------------------------------------------------------------------------------------------------------"); + "/* ---------------------------------------------------------------------------------------------------------------------"); out.println(" * AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit " - + sourceClass.getSimpleName() + " and regenerate"); + + sourceClass.getSimpleName() + " and regenerate"); out.println( - " * ------------------------------------------------------------------------------------------------------------------ */"); + " * ------------------------------------------------------------------------------------------------------------------ */"); out.print(replaceAll(inputText.toString(), serialVersionUID, exemptions, pairs)); out.flush(); out.close(); @@ -482,8 +457,7 @@ public static String replaceAll(Class sourceClass, Map serialVersi @NotNull public static String basePathForClass(Class sourceClass, String root) { - return getModuleName(sourceClass) + "/" + root + "/" - + sourceClass.getPackage().getName().replace('.', '/'); + return getModuleName(sourceClass) + "/" + root + "/" + sourceClass.getPackage().getName().replace('.', '/'); } @NotNull @@ -491,8 +465,7 @@ public static String pathForClass(Class sourceClass, String root) { return basePathForClass(sourceClass, root) + "/" + sourceClass.getSimpleName() + ".java"; } - public static String replaceAll(String inputText, Long serialVersionUID, String exemptions[], - String[]... pairs) { + public static String replaceAll(String inputText, Long serialVersionUID, String exemptions[], String[]... pairs) { String result = inputText; for (int i = 0; i < exemptions.length; i++) { String exemption = exemptions[i]; @@ -513,8 +486,8 @@ public static String replaceAll(String inputText, Long serialVersionUID, String if (serialVersionUID != null) { result = result.replaceAll( - "(\\s+(private\\s+)?(static\\s+)?(final\\s+)?long\\s+serialVersionUID\\s+=\\s+)\\-?[0-9]+L\\s*;", - "$1" + serialVersionUID + "L;"); + "(\\s+(private\\s+)?(static\\s+)?(final\\s+)?long\\s+serialVersionUID\\s+=\\s+)\\-?[0-9]+L\\s*;", + "$1" + serialVersionUID + "L;"); } return result; @@ -522,8 +495,7 @@ public static String replaceAll(String inputText, Long serialVersionUID, String private static String getModuleName(Class sourceClass) { for (File file : new File(".").listFiles()) { - // there is a folder 'lib' that exists during the build process that matches the the - // third startsWith + // there is a folder 'lib' that exists during the build process that matches the the third startsWith // If we are in this package, don't bother looking at the other two. if (sourceClass.getName().startsWith("io.deephaven.libs.primitives")) { if (file.isDirectory() && file.getName().equals("DB")) { @@ -534,8 +506,8 @@ private static String getModuleName(Class sourceClass) { return file.getPath(); } } else { - if (file.isDirectory() && sourceClass.getName().startsWith( - "io.deephaven." + file.getName().toLowerCase().replace('-', '_') + ".")) { + if (file.isDirectory() && sourceClass.getName() + .startsWith("io.deephaven." + file.getName().toLowerCase().replace('-', '_') + ".")) { return file.getPath(); } } @@ -543,15 +515,15 @@ private static String getModuleName(Class sourceClass) { throw new RuntimeException("Unable to find " + sourceClass); } - public static void intToLongAndFloatingPoints(Class sourceClass, String root, - Map serialVersionUIDs, String... exemptions) throws IOException { + public static void intToLongAndFloatingPoints(Class sourceClass, String root, Map serialVersionUIDs, + String... exemptions) throws IOException { intToDouble(sourceClass, root, serialVersionUIDs, exemptions); intToFloat(sourceClass, root, serialVersionUIDs, exemptions); intToLong(sourceClass, root, serialVersionUIDs, exemptions); } - public static void intToLongAndFloatingPoints(Class sourceClass, String root, - String... exemptions) throws IOException { + public static void intToLongAndFloatingPoints(Class sourceClass, String root, String... exemptions) + throws IOException { intToLongAndFloatingPoints(sourceClass, root, null, exemptions); } } diff --git a/CompilerTools/src/main/java/io/deephaven/compilertools/ReplicateUtilities.java b/CompilerTools/src/main/java/io/deephaven/compilertools/ReplicateUtilities.java index 31997eb9064..b64a24e1d3b 100644 --- a/CompilerTools/src/main/java/io/deephaven/compilertools/ReplicateUtilities.java +++ b/CompilerTools/src/main/java/io/deephaven/compilertools/ReplicateUtilities.java @@ -15,20 +15,18 @@ public class ReplicateUtilities { /** - * Take a list of lines; and apply a given fixup expressed as a code region, regular expression, - * then function from the matcher to the replacement line. + * Take a list of lines; and apply a given fixup expressed as a code region, regular expression, then function from + * the matcher to the replacement line. * * @param lines the input lines - * @param region the name of the region started by "// region <name>" and ended by "// - * endregion <name>" + * @param region the name of the region started by "// region <name>" and ended by "// endregion <name>" * @param searchPattern the pattern to search for - * @param replacer a function from the search pattern's successful matcher to the new lines to - * apply as a List. + * @param replacer a function from the search pattern's successful matcher to the new lines to apply as a List. * @return a new list of lines with the fixup applied */ @NotNull - public static List applyFixup(List lines, final String region, - final String searchPattern, final Function> replacer) { + public static List applyFixup(List lines, final String region, final String searchPattern, + final Function> replacer) { final List newLines = new ArrayList<>(); final Pattern startPattern = Pattern.compile("// region " + region); @@ -74,15 +72,13 @@ public static List applyFixup(List lines, final String region, * Take a list of lines; and apply a given fixup expressed as a code region and replacements * * @param lines the input lines - * @param region the name of the region started by "// region <name>" and ended by "// - * endregion <name>" - * @param replacements an array with an even number of elements, even elements are a thing to - * replace, the next element is the thing to replace it with + * @param region the name of the region started by "// region <name>" and ended by "// endregion <name>" + * @param replacements an array with an even number of elements, even elements are a thing to replace, the next + * element is the thing to replace it with * @return a new list of lines with the fixup applied */ @NotNull - public static List simpleFixup(List lines, final String region, - final String... replacements) { + public static List simpleFixup(List lines, final String region, final String... replacements) { final List newLines = new ArrayList<>(); final Pattern startPattern = Pattern.compile("// region " + region); @@ -117,16 +113,15 @@ public static List simpleFixup(List lines, final String region, } /** - * Do whatever miscellaneous cleanups might be appropriate for all replicated classes. For now, - * this removes identical import lines. + * Do whatever miscellaneous cleanups might be appropriate for all replicated classes. For now, this removes + * identical import lines. */ public static List standardCleanups(List lines) { final List newLines = new ArrayList<>(); final Set imports = new HashSet<>(); for (final String line : lines) { - // Gets copied over if it is not an import statement, or if it is an import statement - // that we have seen + // Gets copied over if it is not an import statement, or if it is an import statement that we have seen // for the first time. Otherwise gets dropped. if (!line.startsWith("import") || imports.add(line)) { newLines.add(line); @@ -136,8 +131,8 @@ public static List standardCleanups(List lines) { } /** - * Locates the region demarked by "// region <name>" and ended by "// endregion - * <name>" and adds extra lines at the top. + * Locates the region demarked by "// region <name>" and ended by "// endregion <name>" and adds extra + * lines at the top. * * @param lines the lines to process * @param region the name of the region @@ -145,8 +140,7 @@ public static List standardCleanups(List lines) { * @return a new list of lines */ @NotNull - public static List insertRegion(List lines, final String region, - List extraLines) { + public static List insertRegion(List lines, final String region, List extraLines) { final List newLines = new ArrayList<>(); final Pattern startPattern = Pattern.compile("// region " + region); @@ -180,8 +174,7 @@ public static List insertRegion(List lines, final String region, } /** - * Locates the region demarked by "// region <name>" and ended by "// endregion - * <name>" and removes it. + * Locates the region demarked by "// region <name>" and ended by "// endregion <name>" and removes it. * * @param lines the lines to process * @param region the name of the region @@ -193,8 +186,8 @@ public static List removeRegion(List lines, final String region) } /** - * Locates the region demarked by "// region <name>" and ended by "// endregion - * <name>" and replaces the text with the contents of replacement. + * Locates the region demarked by "// region <name>" and ended by "// endregion <name>" and replaces the + * text with the contents of replacement. * * @param lines the lines to process * @param region the name of the region @@ -202,8 +195,7 @@ public static List removeRegion(List lines, final String region) * @return a new list of lines */ @NotNull - public static List replaceRegion(List lines, final String region, - List replacement) { + public static List replaceRegion(List lines, final String region, List replacement) { final List newLines = new ArrayList<>(); final Pattern startPattern = Pattern.compile("//\\s*region " + region); @@ -237,16 +229,14 @@ public static List replaceRegion(List lines, final String region return newLines; } - public static List globalReplacements(int skip, List lines, - String... replacements) { + public static List globalReplacements(int skip, List lines, String... replacements) { if (replacements.length == 0 || replacements.length % 2 != 0) { throw new IllegalArgumentException("Bad replacement length: " + replacements.length); } final Stream startStream = lines.subList(0, skip).stream(); final Stream replacementStream = lines.subList(skip, lines.size()).stream(); - return Stream - .concat(startStream, replacementStream.map(x -> doLineReplacements(x, replacements))) - .collect(Collectors.toList()); + return Stream.concat(startStream, replacementStream.map(x -> doLineReplacements(x, replacements))) + .collect(Collectors.toList()); } public static List globalReplacements(List lines, String... replacements) { @@ -254,13 +244,13 @@ public static List globalReplacements(List lines, String... repl } public static List addImport(List lines, Class... importClasses) { - return addImport(lines, Arrays.stream(importClasses) - .map(c -> "import " + c.getCanonicalName() + ";").toArray(String[]::new)); + return addImport(lines, + Arrays.stream(importClasses).map(c -> "import " + c.getCanonicalName() + ";").toArray(String[]::new)); } public static List removeImport(List lines, Class... importClasses) { return removeImport(lines, Arrays.stream(importClasses) - .map(c -> "\\s*import\\s+" + c.getCanonicalName() + "\\s*;").toArray(String[]::new)); + .map(c -> "\\s*import\\s+" + c.getCanonicalName() + "\\s*;").toArray(String[]::new)); } public static List addImport(List lines, String... importString) { @@ -279,8 +269,7 @@ public static List addImport(List lines, String... importString) * Remove all of the specified imports -- Error if any are not found. */ public static List removeImport(List lines, String... importRegex) { - final List patterns = - Arrays.stream(importRegex).map(Pattern::compile).collect(Collectors.toList()); + final List patterns = Arrays.stream(importRegex).map(Pattern::compile).collect(Collectors.toList()); final List newLines = removeAnyImports(lines, patterns); if (!patterns.isEmpty()) { throw new IllegalArgumentException("Could not find imports to remove: " + patterns); @@ -292,8 +281,7 @@ public static List removeImport(List lines, String... importRege * Remove imports if they match any of the patterns. */ public static List removeAnyImports(List lines, String... importRegex) { - final List patterns = - Arrays.stream(importRegex).map(Pattern::compile).collect(Collectors.toList()); + final List patterns = Arrays.stream(importRegex).map(Pattern::compile).collect(Collectors.toList()); return removeAnyImports(lines, patterns); } @@ -332,15 +320,14 @@ public static List fixupChunkAttributes(List lines) { @NotNull public static List fixupChunkAttributes(List lines, final String genericType) { - lines = lines.stream() - .map(x -> x.replaceAll("ObjectChunk<([^>]*)>", "ObjectChunk<" + genericType + ", $1>")) - .collect(Collectors.toList()); + lines = lines.stream().map(x -> x.replaceAll("ObjectChunk<([^>]*)>", "ObjectChunk<" + genericType + ", $1>")) + .collect(Collectors.toList()); return lines; } public static void fixupChunkAttributes(String objectPath) throws IOException { - FileUtils.writeLines(new File(objectPath), fixupChunkAttributes( - FileUtils.readLines(new File(objectPath), Charset.defaultCharset()))); + FileUtils.writeLines(new File(objectPath), + fixupChunkAttributes(FileUtils.readLines(new File(objectPath), Charset.defaultCharset()))); } public static List indent(final List lines, int spaces) { diff --git a/CompilerTools/src/test/java/io/deephaven/compilertools/TestCompilerTools.java b/CompilerTools/src/test/java/io/deephaven/compilertools/TestCompilerTools.java index 8b82ae60898..ca0a1e6cbab 100644 --- a/CompilerTools/src/test/java/io/deephaven/compilertools/TestCompilerTools.java +++ b/CompilerTools/src/test/java/io/deephaven/compilertools/TestCompilerTools.java @@ -21,11 +21,9 @@ public class TestCompilerTools { private final static List raisedThrowables = new ArrayList<>(); - // Two nearly-identical classes, so we can get an idea of how long it takes to compile one of - // them + // Two nearly-identical classes, so we can get an idea of how long it takes to compile one of them static { - final StringBuilder testClassCode1 = - new StringBuilder(" public class $CLASSNAME$ {"); + final StringBuilder testClassCode1 = new StringBuilder(" public class $CLASSNAME$ {"); testClassCode1.append(" final static String testString = \"Hello World\\n\";"); // Simple static inner classes to generate two class files @@ -38,8 +36,7 @@ public class TestCompilerTools { testClassCode1.append(" }"); for (int i = 0; i < NUM_METHODS; i++) { - testClassCode1.append(" public static void testMethod").append(i) - .append(" (String [] args) {"); + testClassCode1.append(" public static void testMethod").append(i).append(" (String [] args) {"); testClassCode1.append(" System.out.println(testString);"); testClassCode1.append(" }"); } @@ -54,12 +51,11 @@ public void testParallelCompile() throws Throwable { Configuration.getInstance(); final Thread[] threads = new Thread[NUM_THREADS]; - // Use a unique value added to the class name to guarantee unique classes, in case - // workspaces aren't cleared out correctly + // Use a unique value added to the class name to guarantee unique classes, in case workspaces aren't cleared out + // correctly final long startTimeOffset = System.currentTimeMillis(); - // Get a baseline estimate of compilation time, ignoring the first run as it's typically - // much longer + // Get a baseline estimate of compilation time, ignoring the first run as it's typically much longer long totalCompileTimeMillis = 0; for (long i = 0; i < NUM_COMPILE_TESTS; i++) { final long startTimeTest = System.currentTimeMillis(); @@ -67,8 +63,8 @@ public void testParallelCompile() throws Throwable { compile(false, testClassName); final long endTimeTest = System.currentTimeMillis(); final long compileTestMillis = endTimeTest - startTimeTest; - System.out.println(printMillis(endTimeTest) + ": compile test of " + testClassName - + " took " + compileTestMillis + " millis"); + System.out.println(printMillis(endTimeTest) + ": compile test of " + testClassName + " took " + + compileTestMillis + " millis"); if (i > 0) { totalCompileTimeMillis += compileTestMillis; } @@ -76,25 +72,23 @@ public void testParallelCompile() throws Throwable { final long averageCompileTime = (totalCompileTimeMillis / (NUM_COMPILE_TESTS - 1)); final long tempWaitStartMillis = averageCompileTime - 500; final long waitStartMillis = Math.max(tempWaitStartMillis, MINIMUM_DELAY_MILLIS); - System.out.println("Average compile time millis: " + averageCompileTime + ", delay will be " - + waitStartMillis + " millis"); + System.out.println("Average compile time millis: " + averageCompileTime + ", delay will be " + waitStartMillis + + " millis"); final String className = "TestClass" + startTimeOffset + NUM_COMPILE_TESTS; - System.out.println( - printMillis(System.currentTimeMillis()) + ": starting test with class " + className); + System.out.println(printMillis(System.currentTimeMillis()) + ": starting test with class " + className); // We don't want to create the threads until the compile is mostly complete for (int i = 0; i < NUM_THREADS; i++) { final int fi = i; // For the lambda threads[i] = new Thread(() -> { try { - final long delay = - fi == 0 ? 0 : fi * WAIT_BETWEEN_THREAD_START_MILLIS + waitStartMillis; + final long delay = fi == 0 ? 0 : fi * WAIT_BETWEEN_THREAD_START_MILLIS + waitStartMillis; final long startTime = System.currentTimeMillis(); compile(fi == 0, className); final long endTime = System.currentTimeMillis(); - System.out.println(printMillis(endTime) + ": thread " + fi - + " completed with specified delay=" + delay - + " (actual run time " + (endTime - startTime) + " millis)"); + System.out.println( + printMillis(endTime) + ": thread " + fi + " completed with specified delay=" + delay + + " (actual run time " + (endTime - startTime) + " millis)"); } catch (Throwable e) { synchronized (raisedThrowables) { System.out.println("Exception occurred: " + e.getMessage()); @@ -121,8 +115,7 @@ public void testParallelCompile() throws Throwable { synchronized (raisedThrowables) { System.out.println(raisedThrowables.size() + - (raisedThrowables.size() == 1 ? " exception was raised" - : " exceptions were raised")); + (raisedThrowables.size() == 1 ? " exception was raised" : " exceptions were raised")); if (!raisedThrowables.isEmpty()) { throw raisedThrowables.get(0); } @@ -147,14 +140,13 @@ private void compile(boolean printDetails, final String className) throws Except CompilerTools.compile(className, CLASS_CODE, "io.deephaven.temp"); if (printDetails) { final long endMillis = System.currentTimeMillis(); - System.out.println(printMillis(endMillis) + ": Thread 0 ending compile: (" - + (endMillis - startMillis) + ") millis elapsed"); + System.out.println(printMillis(endMillis) + ": Thread 0 ending compile: (" + (endMillis - startMillis) + + ") millis elapsed"); } } private String printMillis(final long millis) { - LocalDateTime localDateTime = - LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.systemDefault()); + LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.systemDefault()); return localDateTime.toString(); } @@ -162,18 +154,18 @@ private String printMillis(final long millis) { @Test public void testSimpleCompile() throws Exception { final String program1Text = String.join( - "\n", - "public class $CLASSNAME$ {", - " public static void main (String [] args) {", - " System.out.println (\"Hello, World?\");", - " System.out.println (args.length);", - " }", - " public static class Other {}", - "}"); + "\n", + "public class $CLASSNAME$ {", + " public static void main (String [] args) {", + " System.out.println (\"Hello, World?\");", + " System.out.println (args.length);", + " }", + " public static class Other {}", + "}"); StringBuilder codeLog = new StringBuilder(); - final Class clazz1 = CompilerTools.compile("Test", program1Text, "com.deephaven.test", - codeLog, Collections.emptyMap()); + final Class clazz1 = + CompilerTools.compile("Test", program1Text, "com.deephaven.test", codeLog, Collections.emptyMap()); final Method m1 = clazz1.getMethod("main", String[].class); Object[] args1 = new Object[] {new String[] {"hello", "there"}}; m1.invoke(null, args1); @@ -182,21 +174,21 @@ public void testSimpleCompile() throws Exception { @Test public void testCollidingCompile() throws Exception { final String program1Text = String.join( - "\n", - "public class Test {", - " public static void main (String [] args) {", - " System.out.println (\"Hello, World\");", - " System.out.println (args.length);", - " }", - "}"); + "\n", + "public class Test {", + " public static void main (String [] args) {", + " System.out.println (\"Hello, World\");", + " System.out.println (args.length);", + " }", + "}"); List threads = new ArrayList<>(); for (int i = 0; i < 100; ++i) { Thread t = new Thread(() -> { StringBuilder codeLog = new StringBuilder(); try { - final Class clazz1 = CompilerTools.compile("Test", program1Text, - "com.deephaven.test", codeLog, Collections.emptyMap()); + final Class clazz1 = CompilerTools.compile("Test", program1Text, "com.deephaven.test", codeLog, + Collections.emptyMap()); final Method m1 = clazz1.getMethod("main", String[].class); Object[] args1 = new Object[] {new String[] {"hello", "there"}}; m1.invoke(null, args1); diff --git a/Configuration/src/main/java/io/deephaven/configuration/Configuration.java b/Configuration/src/main/java/io/deephaven/configuration/Configuration.java index 479b5bfbc6a..e705f61553f 100644 --- a/Configuration/src/main/java/io/deephaven/configuration/Configuration.java +++ b/Configuration/src/main/java/io/deephaven/configuration/Configuration.java @@ -15,8 +15,8 @@ import java.util.*; /** - * Utility class to provide an enhanced view and common access point for java properties files, as - * well as common configuration pieces such as log directories and workspace-related properties. + * Utility class to provide an enhanced view and common access point for java properties files, as well as common + * configuration pieces such as log directories and workspace-related properties. */ @SuppressWarnings({"WeakerAccess", "unused"}) public class Configuration extends PropertyFile { @@ -31,8 +31,7 @@ public class Configuration extends PropertyFile { /** Property that specifies the directory for process logs. */ @SuppressWarnings("WeakerAccess") - public static final String LOGDIR_PROPERTY = "logDir"; // Defaults to - // getProperty(WORKSPACE_PROPERTY)/../logs + public static final String LOGDIR_PROPERTY = "logDir"; // Defaults to getProperty(WORKSPACE_PROPERTY)/../logs /** Property that specifies the default process log directory. */ private static final String LOGDIR_DEFAULT_PROPERTY = "defaultLogDir"; @@ -89,8 +88,8 @@ public static Configuration getInstance() { } /** - * Get the process name based on the standard process name property - * {@link #PROCESS_NAME_PROPERTY}. Throw an exception if the property name does not exist. + * Get the process name based on the standard process name property {@link #PROCESS_NAME_PROPERTY}. Throw an + * exception if the property name does not exist. * * @return the process name */ @@ -99,45 +98,39 @@ public String getProcessName() { } /** - * Get the process name based on the standard process name property - * {@link #PROCESS_NAME_PROPERTY}. If the property does not exist and requireProcessName is - * true, throw an exception. If the property does not exist and requireProcessName is false, - * return null. + * Get the process name based on the standard process name property {@link #PROCESS_NAME_PROPERTY}. If the property + * does not exist and requireProcessName is true, throw an exception. If the property does not exist and + * requireProcessName is false, return null. * - * @param requireProcessName if true, throw an exception if the process name can't be found or - * is empty - * @return the process name, or null if the process name can't be determined and - * requireProcessName is false + * @param requireProcessName if true, throw an exception if the process name can't be found or is empty + * @return the process name, or null if the process name can't be determined and requireProcessName is false */ @SuppressWarnings("WeakerAccess") public @Nullable String getProcessName(final boolean requireProcessName) { final String processName = getStringWithDefault(PROCESS_NAME_PROPERTY, null); if (requireProcessName && (processName == null || processName.isEmpty())) { - throw new ConfigurationException( - "Property " + PROCESS_NAME_PROPERTY + " must be defined and non-empty"); + throw new ConfigurationException("Property " + PROCESS_NAME_PROPERTY + " must be defined and non-empty"); } return processName; } /** - * Determine the directory where process logs should be written. This is based off a series of - * cascading properties. + * Determine the directory where process logs should be written. This is based off a series of cascading properties. *
      - *
    • If the system property {@link #LOGDIR_PROPERTY} is set, the assumption is that a JVM - * parameter has been defined and it takes precedence
    • - *
    • Otherwise, if the process name is defined, the properties - * {@link #LOGDIR_PROPERTY}.processName is used
    • - *
    • If no value has been determined, the default log directory property - * {@link #LOGDIR_DEFAULT_PROPERTY} is used
    • + *
    • If the system property {@link #LOGDIR_PROPERTY} is set, the assumption is that a JVM parameter has been + * defined and it takes precedence
    • + *
    • Otherwise, if the process name is defined, the properties {@link #LOGDIR_PROPERTY}.processName is used
    • + *
    • If no value has been determined, the default log directory property {@link #LOGDIR_DEFAULT_PROPERTY} is + * used
    • *
    • If no value has been determined, "{@link #WORKSPACE_PROPERTY}/../logs" is used
    • *
    - * In all cases except the first (where {@link #LOGDIR_PROPERTY} is defined with the system - * property), the directory is normalized with {@link #normalizeLogDirectoryPath(String)}. If - * validateOrCreateDirectory is true, then if the filesystem contains the determined directory, - * it is validated to be a directory, or if it does not exit it is created. + * In all cases except the first (where {@link #LOGDIR_PROPERTY} is defined with the system property), the directory + * is normalized with {@link #normalizeLogDirectoryPath(String)}. If validateOrCreateDirectory is true, then if the + * filesystem contains the determined directory, it is validated to be a directory, or if it does not exit it is + * created. * - * @param validateOrCreateDirectory if true, then if the directory exists validate that it is a - * directory, and create it if it doesn't exist + * @param validateOrCreateDirectory if true, then if the directory exists validate that it is a directory, and + * create it if it doesn't exist * @return the directory where process logs should be written */ public String getLogDir(final boolean validateOrCreateDirectory) { @@ -151,8 +144,8 @@ public String getLogDir(final boolean validateOrCreateDirectory) { String logDir; if (processName != null && !processName.isEmpty()) { logDir = getPossibleStringWithDefault(null, - LOGDIR_PROPERTY + "." + processName, - LOGDIR_DEFAULT_PROPERTY); + LOGDIR_PROPERTY + "." + processName, + LOGDIR_DEFAULT_PROPERTY); } else { logDir = getStringWithDefault(LOGDIR_DEFAULT_PROPERTY, null); } @@ -175,9 +168,8 @@ public String getLogDir(final boolean validateOrCreateDirectory) { } /** - * Determine the directory where process logs should be written using - * {@link #getLogDir(boolean)}, validating that the directory exists or creating the directory - * if it doesn't exist. + * Determine the directory where process logs should be written using {@link #getLogDir(boolean)}, validating that + * the directory exists or creating the directory if it doesn't exist. * * @return the directory where process logs should be written */ @@ -192,8 +184,7 @@ public String getLogDir() { *
  • {@code } - replaced with the process workspace
  • *
  • {@code } - replaced with the installation root directory
  • *
  • {@code } - replaced with the process name
  • - *
  • {@code } - replaced with the value found by the property - * {@link #LOGDIR_ROOT_PROPERTY}
  • + *
  • {@code } - replaced with the value found by the property {@link #LOGDIR_ROOT_PROPERTY}
  • *
  • After all substitutions, {@link #expandLinuxPath(String)} is called
  • *
  • Finally, {@link Path#normalize()} is called
  • * @@ -207,8 +198,8 @@ public String normalizeLogDirectoryPath(@NotNull final String directoryName) { if (directoryName.contains(LOGROOT_TOKEN)) { final String logDirRoot = getStringWithDefault(LOGDIR_ROOT_PROPERTY, null); if (logDirRoot == null) { - throw new IllegalArgumentException("Directory " + directoryName + " contains " - + LOGROOT_TOKEN + " but " + LOGDIR_ROOT_PROPERTY + " property is not defined"); + throw new IllegalArgumentException("Directory " + directoryName + " contains " + LOGROOT_TOKEN + " but " + + LOGDIR_ROOT_PROPERTY + " property is not defined"); } substitutedPath = directoryName.replace("", logDirRoot); } else { @@ -218,9 +209,9 @@ public String normalizeLogDirectoryPath(@NotNull final String directoryName) { if (substitutedPath.contains(PROCESS_NAME_TOKEN)) { final String processName = getStringWithDefault(PROCESS_NAME_PROPERTY, null); if (processName == null) { - throw new IllegalArgumentException("Directory " + substitutedPath - + " (original path " + directoryName + ") contains " + PROCESS_NAME_TOKEN - + " but " + PROCESS_NAME_PROPERTY + " property is not defined"); + throw new IllegalArgumentException( + "Directory " + substitutedPath + " (original path " + directoryName + ") contains " + + PROCESS_NAME_TOKEN + " but " + PROCESS_NAME_PROPERTY + " property is not defined"); } else { substitutedPath = substitutedPath.replace(PROCESS_NAME_TOKEN, processName); } @@ -228,9 +219,8 @@ public String normalizeLogDirectoryPath(@NotNull final String directoryName) { if (substitutedPath.contains(WORKSPACE_TOKEN)) { if (workspace == null) { - throw new IllegalArgumentException("Directory " + substitutedPath - + " (original path " + directoryName + ") contains " + WORKSPACE_TOKEN + " but " - + WORKSPACE_PROPERTY + " property is not defined"); + throw new IllegalArgumentException("Directory " + substitutedPath + " (original path " + directoryName + + ") contains " + WORKSPACE_TOKEN + " but " + WORKSPACE_PROPERTY + " property is not defined"); } else { substitutedPath = substitutedPath.replace(WORKSPACE_TOKEN, workspace); } @@ -238,9 +228,8 @@ public String normalizeLogDirectoryPath(@NotNull final String directoryName) { if (substitutedPath.contains(DEVROOT_TOKEN)) { if (devroot == null) { - throw new IllegalArgumentException("Directory " + substitutedPath - + " (original path " + directoryName + ") contains " + DEVROOT_TOKEN + " but " - + DEVROOT_PROPERTY + " property is not defined"); + throw new IllegalArgumentException("Directory " + substitutedPath + " (original path " + directoryName + + ") contains " + DEVROOT_TOKEN + " but " + DEVROOT_PROPERTY + " property is not defined"); } else { substitutedPath = substitutedPath.replace(DEVROOT_TOKEN, devroot); } @@ -249,12 +238,10 @@ public String normalizeLogDirectoryPath(@NotNull final String directoryName) { if (substitutedPath.contains(DB_ROOT_TOKEN)) { final String dbRoot = getStringWithDefault(DB_ROOT_PROPERTY, null); if (dbRoot == null) { - throw new IllegalArgumentException("Directory " + substitutedPath - + " (original path " + directoryName + ") contains " + DB_ROOT_TOKEN + " but " - + DB_ROOT_PROPERTY + " property is not defined"); + throw new IllegalArgumentException("Directory " + substitutedPath + " (original path " + directoryName + + ") contains " + DB_ROOT_TOKEN + " but " + DB_ROOT_PROPERTY + " property is not defined"); } - substitutedPath = substitutedPath.replace(DB_ROOT_TOKEN, - getStringWithDefault(DB_ROOT_PROPERTY, null)); + substitutedPath = substitutedPath.replace(DB_ROOT_TOKEN, getStringWithDefault(DB_ROOT_PROPERTY, null)); } // Now perform the expansion of any linux-like (i.e. ~) path pieces @@ -264,13 +251,12 @@ public String normalizeLogDirectoryPath(@NotNull final String directoryName) { return path.normalize().toString(); } - public void checkDirectory(final String dir, final boolean createDirectory, - final String message) throws IOException { + public void checkDirectory(final String dir, final boolean createDirectory, final String message) + throws IOException { final File logDirFile = new File(dir); if (!logDirFile.exists()) { if (!createDirectory) { - throw new IOException( - message + " " + dir + " does not exist and createDirectory=false"); + throw new IOException(message + " " + dir + " does not exist and createDirectory=false"); } final boolean dirCreated; @@ -283,8 +269,7 @@ public void checkDirectory(final String dir, final boolean createDirectory, throw new IOException(message + " " + dir + " could not be created"); } } else if (!logDirFile.isDirectory()) { - throw new IllegalArgumentException( - message + " " + dir + " exists but is not a directory"); + throw new IllegalArgumentException(message + " " + dir + " exists but is not a directory"); } } @@ -312,8 +297,8 @@ public static Configuration TEST_NEW_Configuration() { } /** - * Find the name of the property specifying the root configuration file. The first property set - * in the ordered list of candidates is returned, or null if none is set. + * Find the name of the property specifying the root configuration file. The first property set in the ordered list + * of candidates is returned, or null if none is set. * * @see #FILE_NAME_PROPERTIES * @@ -354,7 +339,7 @@ protected Configuration() { reloadProperties(); } catch (IOException x) { throw new ConfigurationException( - "Could not process configuration from file " + confFileName + " in CLASSPATH.", x); + "Could not process configuration from file " + confFileName + " in CLASSPATH.", x); } String workspacePropValue; @@ -373,8 +358,8 @@ protected Configuration() { } devroot = devrootPropValue; - // The quiet property is available because things like shell scripts may be parsing our - // System.out and they don't + // The quiet property is available because things like shell scripts may be parsing our System.out and they + // don't // want to have to deal with these log messages if (System.getProperty(QUIET_PROPERTY) == null) { if (workspace != null) { @@ -403,11 +388,10 @@ String getConfFileProperty() { * @throws IOException if the property stream cannot be processed * @throws ConfigurationException if the property stream cannot be opened */ - private void load(String fileName, boolean ignoreScope) - throws IOException, ConfigurationException { + private void load(String fileName, boolean ignoreScope) throws IOException, ConfigurationException { final ParsedProperties temp = new ParsedProperties(ignoreScope); - // we explicitly want to set 'properties' here so that if we get an error while loading, - // anything before that error shows up. + // we explicitly want to set 'properties' here so that if we get an error while loading, anything before that + // error shows up. // That is very helpful in debugging. properties = temp; temp.load(fileName); @@ -415,9 +399,9 @@ private void load(String fileName, boolean ignoreScope) } /** - * Return the configuration contexts for this process. This is the list of properties that may - * have been used to parse the configuration file. If the configuration has not been parsed, - * this collection may be empty. This collection will be immutable. + * Return the configuration contexts for this process. This is the list of properties that may have been used to + * parse the configuration file. If the configuration has not been parsed, this collection may be empty. This + * collection will be immutable. * * @return the configuration contexts. */ @@ -427,15 +411,13 @@ public Collection getContextKeyValues() { } /** - * Treat the system property propertyName as a path, and perform substitution with - * {@link #expandLinuxPath(String)}. + * Treat the system property propertyName as a path, and perform substitution with {@link #expandLinuxPath(String)}. * * @param propertyName system property containing a path * @return The value of property propertyName after the manipulations. */ public String lookupPath(String propertyName) { - String result = System.getProperty(propertyName); // In case it's been set with - // System.setProperty after the + String result = System.getProperty(propertyName); // In case it's been set with System.setProperty after the // Configuration instance was created if (result == null) { result = getStringWithDefault(propertyName, null); @@ -449,11 +431,10 @@ public String lookupPath(String propertyName) { /** * Expand the Linux-style path. *
      - *
    • Change linux-style absolute paths to platform independent absolute. If the path starts - * with "/", replace "/" with the current directory's root (e.g. "C:\" on Windows.
    • + *
    • Change linux-style absolute paths to platform independent absolute. If the path starts with "/", replace "/" + * with the current directory's root (e.g. "C:\" on Windows.
    • *
    • If the path begins with "~/", then replace the ~ with the user.home system property.
    • - *
    • If the path does not begin with "~/", then replace all occurrences of ~ with system - * property user.name.
    • + *
    • If the path does not begin with "~/", then replace all occurrences of ~ with system property user.name.
    • *
    • Make sure the path ends in File.separator.
    • *
    * @@ -482,8 +463,7 @@ public static String expandLinuxPath(String path) { } /** - * Get the prefix for absolute files on this system. For example, "/" on linux and "C:\" on - * Windows. + * Get the prefix for absolute files on this system. For example, "/" on linux and "C:\" on Windows. * * @return The absolute path prefix. */ @@ -553,8 +533,7 @@ public TimeZone getServerTimezone() { } /** - * Reload properties, then update with all system properties (properties set in System take - * precedence). + * Reload properties, then update with all system properties (properties set in System take precedence). * * @throws IOException if the property stream cannot be processed * @throws ConfigurationException if the property stream cannot be opened @@ -566,23 +545,23 @@ public void reloadProperties() throws IOException, ConfigurationException { /** * Reload properties, optionally ignoring scope sections - used for testing * - * @param ignoreScope True if scope declarations in the property file should be ignored, false - * otherwise. Used only for testing. + * @param ignoreScope True if scope declarations in the property file should be ignored, false otherwise. Used only + * for testing. * @throws IOException if the property stream cannot be processed * @throws ConfigurationException if the property stream cannot be opened */ void reloadProperties(boolean ignoreScope) throws IOException, ConfigurationException { load(System.getProperty(confFileProperty), ignoreScope); - // If any system properties exist with the same name as a property that's been declared - // final, that will generate + // If any system properties exist with the same name as a property that's been declared final, that will + // generate // an exception the same way it would inside the properties file. properties.putAll(System.getProperties()); } /** - * ONLY the service factory is allowed to get null properties and ONLY for the purposes of using - * default profiles when one doesn't exist. This has been relocated here after many people are - * using defaults/nulls in the code when it's not allowed. + * ONLY the service factory is allowed to get null properties and ONLY for the purposes of using default profiles + * when one doesn't exist. This has been relocated here after many people are using defaults/nulls in the code when + * it's not allowed. */ @SuppressWarnings("WeakerAccess") public static class NullableConfiguration extends Configuration { @@ -605,8 +584,8 @@ private Properties load(String path, String propFileName) throws IOException { /** - * The following main method compares two directories of prop files and outputs a CSV report of - * the differences. Usually run before the release of a new version into prod + * The following main method compares two directories of prop files and outputs a CSV report of the differences. + * Usually run before the release of a new version into prod * * @param args dir1 dir2 outFile.csv */ @@ -623,17 +602,17 @@ public static void main(String[] args) { PrintWriter out = new PrintWriter(new File(args[2])); HashSet diffSet = new HashSet<>(); - out.print(propFileDiffReport(diffSet, oldEtcPath, "ise-prod.prop", newEtcPath, - "ise-prod.prop", "", "", false)); + out.print(propFileDiffReport(diffSet, oldEtcPath, "ise-prod.prop", newEtcPath, "ise-prod.prop", "", "", + false)); out.print('\n'); - out.print(propFileDiffReport(diffSet, oldEtcPath, "ise-stage.prop", newEtcPath, - "ise-stage.prop", "", "", false)); + out.print(propFileDiffReport(diffSet, oldEtcPath, "ise-stage.prop", newEtcPath, "ise-stage.prop", "", "", + false)); out.print('\n'); - out.print(propFileDiffReport(diffSet, oldEtcPath, "ise-simulation.prop", newEtcPath, - "ise-simulation.prop", "", "", false)); + out.print(propFileDiffReport(diffSet, oldEtcPath, "ise-simulation.prop", newEtcPath, "ise-simulation.prop", + "", "", false)); out.print('\n'); - out.print(propFileDiffReport(diffSet, newEtcPath, "ise-prod.prop", newEtcPath, - "ise-stage.prop", newEtcPath, "ise-simulation.prop", true)); + out.print(propFileDiffReport(diffSet, newEtcPath, "ise-prod.prop", newEtcPath, "ise-stage.prop", newEtcPath, + "ise-simulation.prop", true)); out.print('\n'); out.close(); } catch (IOException e) { @@ -641,9 +620,8 @@ public static void main(String[] args) { } } - private static String propFileDiffReport(Set includedProperties, String dir1, - String file1, String dir2, String file2, String dir3, String file3, boolean useDiffKeys) - throws IOException { + private static String propFileDiffReport(Set includedProperties, String dir1, String file1, String dir2, + String file2, String dir3, String file3, boolean useDiffKeys) throws IOException { StringBuilder out = new StringBuilder(); Configuration configuration = new Configuration(); Properties leftProperties = configuration.load(dir1, file1); @@ -658,33 +636,27 @@ private static String propFileDiffReport(Set includedProperties, String Set keynames = new TreeSet<>(); - for (Enumeration enumeration = leftProperties.propertyNames(); enumeration - .hasMoreElements();) { + for (Enumeration enumeration = leftProperties.propertyNames(); enumeration.hasMoreElements();) { keynames.add((String) enumeration.nextElement()); } - for (Enumeration enumeration = rightProperties.propertyNames(); enumeration - .hasMoreElements();) { + for (Enumeration enumeration = rightProperties.propertyNames(); enumeration.hasMoreElements();) { keynames.add((String) enumeration.nextElement()); } - for (Enumeration enumeration = right2Properties.propertyNames(); enumeration - .hasMoreElements();) { + for (Enumeration enumeration = right2Properties.propertyNames(); enumeration.hasMoreElements();) { keynames.add((String) enumeration.nextElement()); } - out.append("key,").append(dir1).append(File.separator).append(file1).append(",") - .append(dir2).append(File.separator).append(file2).append(",").append(dir3) - .append(File.separator).append(file3).append("\n"); + out.append("key,").append(dir1).append(File.separator).append(file1).append(",").append(dir2) + .append(File.separator).append(file2).append(",").append(dir3).append(File.separator).append(file3) + .append("\n"); for (String sKey : keynames) { - String sLeftValue = - leftProperties.containsKey(sKey) ? leftProperties.getProperty(sKey) : ""; - String sRightValue = - rightProperties.containsKey(sKey) ? rightProperties.getProperty(sKey) : ""; - String sRightValue2 = - right2Properties.containsKey(sKey) ? right2Properties.getProperty(sKey) : ""; + String sLeftValue = leftProperties.containsKey(sKey) ? leftProperties.getProperty(sKey) : ""; + String sRightValue = rightProperties.containsKey(sKey) ? rightProperties.getProperty(sKey) : ""; + String sRightValue2 = right2Properties.containsKey(sKey) ? right2Properties.getProperty(sKey) : ""; boolean bSame; if (dir3.length() > 0) { bSame = sLeftValue.equals(sRightValue) && sLeftValue.equals(sRightValue2) - && sRightValue.equals(sRightValue2); + && sRightValue.equals(sRightValue2); } else { bSame = sLeftValue.equals(sRightValue); } @@ -702,9 +674,9 @@ private static String propFileDiffReport(Set includedProperties, String return out.toString(); } - private static void writeLine(StringBuilder out, String sKey, String sLeftValue, - String sRightValue, String sRightValue2) { - out.append(sKey).append(", \"").append(sLeftValue).append("\", \"").append(sRightValue) - .append("\", \"").append(sRightValue2).append("\"\n"); + private static void writeLine(StringBuilder out, String sKey, String sLeftValue, String sRightValue, + String sRightValue2) { + out.append(sKey).append(", \"").append(sLeftValue).append("\", \"").append(sRightValue).append("\", \"") + .append(sRightValue2).append("\"\n"); } } diff --git a/Configuration/src/main/java/io/deephaven/configuration/ConfigurationContext.java b/Configuration/src/main/java/io/deephaven/configuration/ConfigurationContext.java index fc8509950fe..8b2e798a94c 100644 --- a/Configuration/src/main/java/io/deephaven/configuration/ConfigurationContext.java +++ b/Configuration/src/main/java/io/deephaven/configuration/ConfigurationContext.java @@ -11,8 +11,7 @@ class ConfigurationContext { // Note that we explicitly use 'host' as a value in the configuration files. // If a system property named 'host' exists, it will be ignored. private static final String HOSTNAME = "host"; - // Note that we explicitly allow the use of 'process' as a shorthand for 'process.name' in the - // configuration files. + // Note that we explicitly allow the use of 'process' as a shorthand for 'process.name' in the configuration files. // If a system property named 'process' exists, it will be ignored in favor of 'process.name'. static final String PROCESS_NAME_PROPERTY = "process"; @@ -29,20 +28,15 @@ public ConfigurationContext() { } /** - * Check whether the current system context matches one of the requested values in the specified - * scope + * Check whether the current system context matches one of the requested values in the specified scope * * @param token The name of the property to check - * @param targetValues A list of possible values for the specified property, such as - * 'process.name=foo' - * @return True if the specified property currently has a value equal to one of the target - * values, false otherwise. + * @param targetValues A list of possible values for the specified property, such as 'process.name=foo' + * @return True if the specified property currently has a value equal to one of the target values, false otherwise. */ public boolean matches(final String token, final List targetValues) { - // Mostly this is just checking system properties, but we also have one special case; the - // hostname is allowed to - // be several different potential values (short name, FQDN, etc), so we have to check that - // separately. + // Mostly this is just checking system properties, but we also have one special case; the hostname is allowed to + // be several different potential values (short name, FQDN, etc), so we have to check that separately. if (token.toLowerCase().equals(HOSTNAME)) { if (hostOptions.isEmpty()) { populateHostnames(); @@ -62,8 +56,8 @@ public boolean matches(final String token, final List targetValues) { } /** - * Retrieve a specified context item. These are usually but not necessarily system properties or - * a small number of other environmental factors. + * Retrieve a specified context item. These are usually but not necessarily system properties or a small number of + * other environmental factors. * * @param token The name of the context item to look up. * @return The current value of the specified context item, or null if no value exists. @@ -72,14 +66,12 @@ private String getContextItem(final String token) { if (contextItems.containsKey(token)) { return contextItems.get(token); } - // If we don't have an existing context item by this name, see if we have a matching system - // property + // If we don't have an existing context item by this name, see if we have a matching system property return getSystemProperty(token); } /** - * Get the hostname of the current system where this is running, along with the IP address and - * fully-qualified name. + * Get the hostname of the current system where this is running, along with the IP address and fully-qualified name. */ private void populateHostnames() { try { @@ -107,18 +99,15 @@ private void populateHostnames() { } } catch (UnknownHostException e) { - // If somehow we can't get the current host name, then don't apply any host-specific - // parameters. - System.err.println( - "Unable to get current host name. Host-specific configuration items will be ignored."); + // If somehow we can't get the current host name, then don't apply any host-specific parameters. + System.err.println("Unable to get current host name. Host-specific configuration items will be ignored."); } } /** * Retrieve and store a specified system property's value. * - * @param propertyName The system property to look up. If a value exists, it will be cached for - * later retrieval. + * @param propertyName The system property to look up. If a value exists, it will be cached for later retrieval. * @return The value of the requested system property, or null if the property has no set value. */ private String getSystemProperty(final String propertyName) { @@ -130,14 +119,13 @@ private String getSystemProperty(final String propertyName) { } /** - * Return the configuration contexts. This is the list of system properties that may have been - * used to parse the configuration file. This collection will be immutable. + * Return the configuration contexts. This is the list of system properties that may have been used to parse the + * configuration file. This collection will be immutable. * * @return the configuration contexts. */ Collection getContextKeyValues() { - // Create a new HashSet, so that changes to the underlying contextItems don't find their way - // back to the caller + // Create a new HashSet, so that changes to the underlying contextItems don't find their way back to the caller return Collections.unmodifiableCollection(new HashSet<>(contextItems.keySet())); } } diff --git a/Configuration/src/main/java/io/deephaven/configuration/ConfigurationScope.java b/Configuration/src/main/java/io/deephaven/configuration/ConfigurationScope.java index b502f34f07e..e7be140295b 100644 --- a/Configuration/src/main/java/io/deephaven/configuration/ConfigurationScope.java +++ b/Configuration/src/main/java/io/deephaven/configuration/ConfigurationScope.java @@ -35,8 +35,7 @@ private List getTargetValues() { } /** - * Create a scope with a set of subscopes. Collapse the subscopes to this scope if there is only - * one subscope. + * Create a scope with a set of subscopes. Collapse the subscopes to this scope if there is only one subscope. * * @param scopes The list of scopes to evaluate collectively. */ @@ -76,8 +75,8 @@ boolean scopeMatches(ConfigurationContext context) { } /** - * Represent a ConfigurationScope as either the token and values it indicates, or a collection - * of subscopes. Useful when debugging. + * Represent a ConfigurationScope as either the token and values it indicates, or a collection of subscopes. Useful + * when debugging. * * @return The String representation of this scope. */ @@ -86,8 +85,7 @@ public String toString() { if (subScopes.isEmpty()) { return token + "=" + String.join("|", targetValues); } else { - return "[" + subScopes.stream().map(Objects::toString).collect(Collectors.joining(",")) - + "]"; + return "[" + subScopes.stream().map(Objects::toString).collect(Collectors.joining(",")) + "]"; } } } diff --git a/Configuration/src/main/java/io/deephaven/configuration/ParsedProperties.java b/Configuration/src/main/java/io/deephaven/configuration/ParsedProperties.java index 5c7347f231d..70ff8aff4aa 100644 --- a/Configuration/src/main/java/io/deephaven/configuration/ParsedProperties.java +++ b/Configuration/src/main/java/io/deephaven/configuration/ParsedProperties.java @@ -18,9 +18,8 @@ import static io.deephaven.configuration.Configuration.QUIET_PROPERTY; /** - * Class for reading in a customized properties file, applying only the locally-relevant properties - * and keeping track of which properties may not be further modified. Maintains the ordering of the - * properties from the input file. + * Class for reading in a customized properties file, applying only the locally-relevant properties and keeping track of + * which properties may not be further modified. Maintains the ordering of the properties from the input file. */ @SuppressWarnings("SpellCheckingInspection") public class ParsedProperties extends Properties { @@ -51,20 +50,18 @@ public class ParsedProperties extends Properties { private final LinkedList scope = new LinkedList<>(); private final Map props; - // We want to report the line number in the case of errors within a line, and may want to report - // on it later other ways. + // We want to report the line number in the case of errors within a line, and may want to report on it later other + // ways. private int lineNum = 0; - // We also need to keep track of which properties have been finalized and may no longer be - // modified, + // We also need to keep track of which properties have been finalized and may no longer be modified, // regardless of scope. final private Set finalProperties; // We also want to keep track of which properties were set by which line in the current file final private Map> lineNumbers; - // The Context tells us which items in the input file can be ignored for whatever is calling - // this configuration. + // The Context tells us which items in the input file can be ignored for whatever is calling this configuration. final private ConfigurationContext context; final private PropertyInputStreamLoader propertyInputStreamLoader; @@ -79,9 +76,9 @@ public class ParsedProperties extends Properties { // region Properties /** - * A mapping from each property name to the file and location where the property came from. This - * is public so that we can use this in a property inspector to get the full story about what - * properties exist and where those properties were defined. + * A mapping from each property name to the file and location where the property came from. This is public so that + * we can use this in a property inspector to get the full story about what properties exist and where those + * properties were defined. * * @return The current map */ @@ -107,9 +104,8 @@ public ParsedProperties() { /** * A constructor that starts with no existing scoped or final properties. * - * @param ignoreScopes True if this parser should ignore scope restrictions, false otherwise. - * Used by the PropertyInspector when checking whether required or disallowed properties - * are present. + * @param ignoreScopes True if this parser should ignore scope restrictions, false otherwise. Used by the + * PropertyInspector when checking whether required or disallowed properties are present. */ public ParsedProperties(final boolean ignoreScopes) { context = new ConfigurationContext(); @@ -126,27 +122,23 @@ public synchronized void putAll(Map t) { } /** - * A constructor that passes through the current state of any scoped and final properties, used - * when processing includefiles. + * A constructor that passes through the current state of any scoped and final properties, used when processing + * includefiles. * - * @param callingProperties An existing ParsedProperties object with existing data that should - * be further filled out. + * @param callingProperties An existing ParsedProperties object with existing data that should be further filled + * out. */ @SuppressWarnings("CopyConstructorMissesField") private ParsedProperties(final ParsedProperties callingProperties) { this.context = callingProperties.getContext(); this.finalProperties = callingProperties.getFinalProperties(); - this.lineNumbers = callingProperties.lineNumbers; // We actually want the original item, not - // a copy. + this.lineNumbers = callingProperties.lineNumbers; // We actually want the original item, not a copy. this.props = callingProperties.props; this.ignoreScopes = callingProperties.ignoreScopes; this.propertyInputStreamLoader = callingProperties.propertyInputStreamLoader; - // explicitly do NOT copy over scope, since we should be back to root when the import - // happens. - // explicitly do not copy over filenames; those will get handled during the 'load' and - // 'merge' steps as needed. - // explicitly do not copy over 'expectingScopeOpen' - that should not propagate across - // includefiles. + // explicitly do NOT copy over scope, since we should be back to root when the import happens. + // explicitly do not copy over filenames; those will get handled during the 'load' and 'merge' steps as needed. + // explicitly do not copy over 'expectingScopeOpen' - that should not propagate across includefiles. } // endregion Constructors @@ -161,19 +153,19 @@ private void parseLine(String nextLine) throws IOException { final boolean inScope = isContextValid(); - if (nextLine == null || nextLine.startsWith(TOKEN_COMMENT_HASH) - || nextLine.startsWith(TOKEN_COMMENT_BANG) || nextLine.length() == 0) { + if (nextLine == null || nextLine.startsWith(TOKEN_COMMENT_HASH) || nextLine.startsWith(TOKEN_COMMENT_BANG) + || nextLine.length() == 0) { // If the line starts with # or with !, it is a comment and should be ignored. // Blank lines can also be ignored. // noinspection UnnecessaryReturnStatement return; } else { if (expectingScopeOpen && !nextLine.startsWith(TOKEN_SCOPE_OPEN)) { - throw new ConfigurationException(TOKEN_SCOPE_OPEN - + " must immediately follow a scope declaration, found : " + nextLine); + throw new ConfigurationException( + TOKEN_SCOPE_OPEN + " must immediately follow a scope declaration, found : " + nextLine); } else if (!expectingScopeOpen && nextLine.startsWith(TOKEN_SCOPE_OPEN)) { - throw new ConfigurationException(TOKEN_SCOPE_OPEN - + " may not be used as the first character of a property name: " + nextLine); + throw new ConfigurationException( + TOKEN_SCOPE_OPEN + " may not be used as the first character of a property name: " + nextLine); } else if (startsWithIgnoreCase(nextLine, TOKEN_FINALIZE) && inScope) { finalizeProperty(nextLine); } else if (startsWithIgnoreCase(nextLine, TOKEN_FINAL) && inScope) { @@ -204,17 +196,15 @@ private static boolean startsWithIgnoreCase(String baseString, String findString } /** - * Record the value of a property declaration in the form X=Y or X:Y (ignoring whitespace around - * the = or :). Since the property value may include terminal whitespace or any other character, - * it is NOT permitted to place further instructions immediately on the same line. + * Record the value of a property declaration in the form X=Y or X:Y (ignoring whitespace around the = or :). Since + * the property value may include terminal whitespace or any other character, it is NOT permitted to place further + * instructions immediately on the same line. * * @param nextLine The line to be parsed - * @param markFinal If this line was preceded by a 'final' directive, also mark this property as - * final. + * @param markFinal If this line was preceded by a 'final' directive, also mark this property as final. */ private void storePropertyDeclaration(String nextLine, boolean markFinal) throws IOException { - // Always starts with a token, then may have either : or =, followed by a value for the - // token. + // Always starts with a token, then may have either : or =, followed by a value for the token. String[] parts = propPattern.split(nextLine, 2); final String token = parts[0].trim(); String value; @@ -224,37 +214,30 @@ private void storePropertyDeclaration(String nextLine, boolean markFinal) throws value = ""; } - // the 'includefiles' line get special handling; in that case, we want to explicitly just - // load up the specified files. - // Since 'includefiles' must be the first non-comment line (if present), just immediately - // load those files. + // the 'includefiles' line get special handling; in that case, we want to explicitly just load up the specified + // files. + // Since 'includefiles' must be the first non-comment line (if present), just immediately load those files. if (token.equals(TOKEN_INCLUDE)) { if (haveParsedFirstLine) { - throw new ConfigurationException( - TOKEN_INCLUDE + " found in location other than first non-comment line in file " - + thisFile + "."); + throw new ConfigurationException(TOKEN_INCLUDE + + " found in location other than first non-comment line in file " + thisFile + "."); } ParsedProperties includeProps; for (String file : commaPattern.split(value)) { - // Since we're passing around the same collection objects, they'll automatically - // populate everything in one batch. + // Since we're passing around the same collection objects, they'll automatically populate everything in + // one batch. includeProps = new ParsedProperties(this); includeProps.load(file.trim()); } } else if (token.equalsIgnoreCase(TOKEN_FINAL) || token.equalsIgnoreCase(TOKEN_FINALIZE)) { - // If someone tries to use 'final final = value' or 'final finalize = value', then smack - // them on the wrist. - throw new ConfigurationException( - token + " is a reserved keyword and may not be used as a property name."); + // If someone tries to use 'final final = value' or 'final finalize = value', then smack them on the wrist. + throw new ConfigurationException(token + " is a reserved keyword and may not be used as a property name."); } else { if (!isFinal(token)) { - List tokenHistory = - lineNumbers.computeIfAbsent(token, prop -> new ArrayList<>()); + List tokenHistory = lineNumbers.computeIfAbsent(token, prop -> new ArrayList<>()); props.put(token, value); - // Store the line number that this declaration was made on AFTER storing the actual - // declaration, - // so if there's a conflict with the 'final' descriptor, the previous line number - // will be persisted. + // Store the line number that this declaration was made on AFTER storing the actual declaration, + // so if there's a conflict with the 'final' descriptor, the previous line number will be persisted. tokenHistory.add(0, new PropertyHistory(thisFile, lineNum, value, stringScope())); if (markFinal) { finalizeProperty(token); @@ -266,27 +249,26 @@ private void storePropertyDeclaration(String nextLine, boolean markFinal) throws } /** - * Exit out of the current scope, returning to the next scope level up. It is permitted to place - * further instructions immediately after this on the same line. + * Exit out of the current scope, returning to the next scope level up. It is permitted to place further + * instructions immediately after this on the same line. * * @param nextLine The string to be parsed */ private void closeScopeBlock(String nextLine) throws IOException { - // If the line starts with }, and the scope is not 0, then end the current scope and move up - // one level. + // If the line starts with }, and the scope is not 0, then end the current scope and move up one level. if (scope.size() > 0) { scope.removeLast(); // Whatever the last-added scope was, that is now removed. } else { - throw new ConfigurationException(TOKEN_SCOPE_CLOSE + " found at line " + lineNum - + " with no matching " + TOKEN_SCOPE_OPEN); + throw new ConfigurationException( + TOKEN_SCOPE_CLOSE + " found at line " + lineNum + " with no matching " + TOKEN_SCOPE_OPEN); } String restOfLine = ltrim(nextLine.replaceFirst(TOKEN_SCOPE_CLOSE, "")); parseLine(restOfLine); } /** - * Start operating inside a scope block that has already been defined. It is permitted to place - * further instructions immediately after this on the same line. + * Start operating inside a scope block that has already been defined. It is permitted to place further instructions + * immediately after this on the same line. * * @param nextLine The line to be parsed. */ @@ -296,7 +278,7 @@ private void openScopeBlock(String nextLine) throws IOException { expectingScopeOpen = false; } else { throw new ConfigurationException( - "Found " + TOKEN_SCOPE_OPEN + " at line " + lineNum + " when none was expected."); + "Found " + TOKEN_SCOPE_OPEN + " at line " + lineNum + " when none was expected."); } if (nextLine.length() > 1) { parseLine(ltrim(nextLine.substring(TOKEN_SCOPE_OPEN.length()))); @@ -304,45 +286,39 @@ private void openScopeBlock(String nextLine) throws IOException { } /** - * Define a scope block. It is permitted to place the open indicator immediately after this on - * the same line. + * Define a scope block. It is permitted to place the open indicator immediately after this on the same line. * * @param nextLine The line to be parsed. */ private void defineScopeBlock(String nextLine) throws IOException { - // If the line starts with [, then it is a scope declaration, and may have a terminal - // open-brace, which may have more commands in it. + // If the line starts with [, then it is a scope declaration, and may have a terminal open-brace, which may have + // more commands in it. // It is possible to define multiple scope items on the same level, or 'or' blocks. final int endBlock = nextLine.indexOf(TOKEN_SCOPE_END); if (endBlock < 0) { throw new ConfigurationException( - "Invalid scope declaration: unterminated scope block at line " + lineNum + ": " - + nextLine); + "Invalid scope declaration: unterminated scope block at line " + lineNum + ": " + nextLine); } // Skip the first character, since we know it's the opening [. final String scopeSection = nextLine.substring(1, endBlock); final String[] scopeItems = commaPattern.split(scopeSection); if (scopeItems.length == 0) { throw new ConfigurationException( - "Invalid scope declaration: scope with no scope items at line " + lineNum + ": " - + nextLine); + "Invalid scope declaration: scope with no scope items at line " + lineNum + ": " + nextLine); } final ArrayList newScopes = new ArrayList<>(); for (String aScope : scopeItems) { String[] parts = equalPattern.split(aScope, 2); if (parts.length < 2) { throw new ConfigurationException( - "Invalid scope declaration: no '=' found at line " + lineNum + ":" + nextLine); + "Invalid scope declaration: no '=' found at line " + lineNum + ":" + nextLine); } final String contextToken = parts[0].trim(); final String contextValue = ltrim(parts[1]); - newScopes.add(new ConfigurationScope(contextToken, contextValue)); // Whatever scope - // we've just - // entered, add it to - // the end of the - // current list of + newScopes.add(new ConfigurationScope(contextToken, contextValue)); // Whatever scope we've just entered, add + // it to the end of the current list of // scope conditions. } @@ -356,41 +332,37 @@ private void defineScopeBlock(String nextLine) throws IOException { } /** - * Finalize a given property. If the property does not exist, it will be created with a value of - * empty-string. It is not permitted to place additional instructions on this line; anything - * after 'finalize' will be treated as the name of the token. + * Finalize a given property. If the property does not exist, it will be created with a value of empty-string. It is + * not permitted to place additional instructions on this line; anything after 'finalize' will be treated as the + * name of the token. * * @param nextLine The string to be processed, in the form "finalize [token]". */ private void finalizeProperty(String nextLine) { - // If we're ignoring scopes, we have to ignore finalization as well; properties could be - // finalized in different scopes. + // If we're ignoring scopes, we have to ignore finalization as well; properties could be finalized in different + // scopes. if (ignoreScopes) return; - // if the line starts with 'finalize ', then the remainder of the line is a token to be - // marked as final. - // Note that if the token has not been declared yet, it will be marked as final, and may not - // then be created. - // It is non-harmful to finalize something that is already final, since at that point it - // can't be modified anyway. + // if the line starts with 'finalize ', then the remainder of the line is a token to be marked as final. + // Note that if the token has not been declared yet, it will be marked as final, and may not then be created. + // It is non-harmful to finalize something that is already final, since at that point it can't be modified + // anyway. final String[] tokens = commaPattern.split(nextLine.replaceFirst(TOKEN_FINALIZE, "")); for (String aToken : tokens) { final String token = aToken.trim(); if (!this.containsKey(token)) { - List tokenHistory = - lineNumbers.computeIfAbsent(token, prop -> new ArrayList<>()); - tokenHistory.add(0, new PropertyHistory(thisFile, lineNum, - "(Property finalized with no value defined)", stringScope())); + List tokenHistory = lineNumbers.computeIfAbsent(token, prop -> new ArrayList<>()); + tokenHistory.add(0, new PropertyHistory(thisFile, lineNum, "(Property finalized with no value defined)", + stringScope())); } makePropertyFinal(token); } } /** - * Indicate whether a specified property has already been marked as final. Note that wildcard - * values may be included in 'finalize' uses, so a property will be considered 'final' if it - * matches a finalized value with a wildcard. Example: If 'a.*.b' has been finalized, then - * 'a.foo.b' will be considered final after that. + * Indicate whether a specified property has already been marked as final. Note that wildcard values may be included + * in 'finalize' uses, so a property will be considered 'final' if it matches a finalized value with a wildcard. + * Example: If 'a.*.b' has been finalized, then 'a.foo.b' will be considered final after that. * * @param token The name of the property to check. * @return True if the property has been marked as final, false otherwise. @@ -405,16 +377,15 @@ private boolean isFinal(String token) { } /** - * Make a property final, so that it can no longer have a new value entered. It is non-harmful - * to mark a property final multiple times, since the check for 'final' status is binary and - * irreversible. There could exist cases where a property might be conditionally marked final in - * one block, then universally finalized later, so this is explicitly permitted. + * Make a property final, so that it can no longer have a new value entered. It is non-harmful to mark a property + * final multiple times, since the check for 'final' status is binary and irreversible. There could exist cases + * where a property might be conditionally marked final in one block, then universally finalized later, so this is + * explicitly permitted. * * @param token The name of the property to mark as final. */ private void makePropertyFinal(String token) { - // If we're ignoring scopes, we have to ignore finalization as well; a property could be - // included in two + // If we're ignoring scopes, we have to ignore finalization as well; a property could be included in two // different scopes and be 'final' in both. if (ignoreScopes) return; @@ -422,8 +393,8 @@ private void makePropertyFinal(String token) { } /** - * Indiciate whether, given the current scope, the context is valid for the current line. Null - * values are automatically considered invalid. + * Indiciate whether, given the current scope, the context is valid for the current line. Null values are + * automatically considered invalid. * * @return True if the current scope matches the context, false otherwise. */ @@ -443,8 +414,8 @@ private boolean isContextValid() { // region Properties class overrides - // A variety of Hashtable properties need to be overridden to redirect them to the LinkedHashMap - // we're actually using. + // A variety of Hashtable properties need to be overridden to redirect them to the LinkedHashMap we're actually + // using. @Override public void clear() { @@ -548,24 +519,21 @@ public Collection values() { } /** - * Load the properties from the specified InputStream, ignoring any directives that do not match - * the current context. Automatically closes the stream when the last line has been processed. + * Load the properties from the specified InputStream, ignoring any directives that do not match the current + * context. Automatically closes the stream when the last line has been processed. * * @param stream The open stream providing a view into the data to be parsed. * @throws IOException If the stream cannot be read at some point. */ @Override public synchronized void load(InputStream stream) throws IOException { - // Since logical lines are allowed to be represented as multiple actual lines due to - // trailing \, + // Since logical lines are allowed to be represented as multiple actual lines due to trailing \, // we need to assemble the logical lines for parsing. Reader reader = new BufferedReader(new InputStreamReader(stream)); final ParsedPropertiesLineReader lr = new ParsedPropertiesLineReader(reader); - // When we include a file, that file needs to be processed before anything else in this one - // - - // the fact that the other file is listed means it is assumed to pre-exist this one, and - // should + // When we include a file, that file needs to be processed before anything else in this one - + // the fact that the other file is listed means it is assumed to pre-exist this one, and should // be handled first. String nextLine; @@ -575,9 +543,8 @@ public synchronized void load(InputStream stream) throws IOException { break; } nextLine = convertUnicodeEncoding(rawLine); - lineNum += lr.getNumLinesLastRead(); // Since a logical line can break across multiple - // actual lines, need to track the actual line - // numbers. + lineNum += lr.getNumLinesLastRead(); // Since a logical line can break across multiple actual lines, need to + // track the actual line numbers. if (nextLine == null) { break; } @@ -586,16 +553,15 @@ public synchronized void load(InputStream stream) throws IOException { if (scope.size() != 0) { throw new ConfigurationException("Failed to close scope in file " + thisFile); } - // Once the load is finished, we also want to make sure that if someone tries to set a value - // from code or otherwise, + // Once the load is finished, we also want to make sure that if someone tries to set a value from code or + // otherwise, // they don't get told that it happened inside this file. this.thisFile = "(Modified outside of configuration file)"; this.lineNum = -1; } /** - * Determine whether a property is final or not, and only allow the update if it is not already - * final. + * Determine whether a property is final or not, and only allow the update if it is not already final. * * @param key The name of the property to set a value for. * @param value The value of the property being set. @@ -607,8 +573,8 @@ public synchronized Object put(Object key, Object value) { } /** - * Remove a non-final property from the collection. Attempting to remove a final property will - * cause a ConfigurationException to be thrown. + * Remove a non-final property from the collection. Attempting to remove a final property will cause a + * ConfigurationException to be thrown. * * @param key The name of the property to be removed. * @return The value of the property, if it existed. @@ -622,8 +588,8 @@ public synchronized Object remove(Object key) { } /** - * Determine whether a property is final or not, and only allow the update if it is not already - * final. This should not be called from within the load operation for this class. + * Determine whether a property is final or not, and only allow the update if it is not already final. This should + * not be called from within the load operation for this class. * * @param key The name of the property to set a value for. * @param value The value of the property being set. @@ -632,20 +598,14 @@ public synchronized Object remove(Object key) { @Override public synchronized Object setProperty(String key, String value) { if (!isFinal(key)) { - // If something calls 'put' on this property other than during a load, then we don't - // have a location for it. - List tokenHistory = - lineNumbers.computeIfAbsent(key, prop -> new ArrayList<>()); - // Once we're fully converted to Java 9 or higher, we could use the StackWalker API to - // get this more - // efficiently. Since we're currently moving away from Java 8, which would want to use - // SharedSecrets instead, - // and since our code internally does not appear to have any cases where properties get - // set in a loop - // where performance would be notably impacted, we can go ahead and use the full stack - // for now. - // We also want to skip frame 0, since that's THIS method, which we know is in use - // anyway. + // If something calls 'put' on this property other than during a load, then we don't have a location for it. + List tokenHistory = lineNumbers.computeIfAbsent(key, prop -> new ArrayList<>()); + // Once we're fully converted to Java 9 or higher, we could use the StackWalker API to get this more + // efficiently. Since we're currently moving away from Java 8, which would want to use SharedSecrets + // instead, + // and since our code internally does not appear to have any cases where properties get set in a loop + // where performance would be notably impacted, we can go ahead and use the full stack for now. + // We also want to skip frame 0, since that's THIS method, which we know is in use anyway. StackTraceElement[] stack = new Throwable().getStackTrace(); StringBuilder stackOutput = new StringBuilder(": "); if (stack != null && stack.length > 1) { @@ -654,8 +614,7 @@ public synchronized Object setProperty(String key, String value) { stackOutput.append(stack[fc].toString()).append(System.lineSeparator()); } } - tokenHistory.add(0, - new PropertyHistory(stackOutput.toString(), 0, value, stringScope())); + tokenHistory.add(0, new PropertyHistory(stackOutput.toString(), 0, value, stringScope())); return props.put(key, value); } else { handleFinalConflict(key); @@ -673,15 +632,12 @@ public synchronized Object setProperty(String key, String value) { * @return A String representing the current scope. */ private String stringScope() { - return "[" - + scope.stream().map(ConfigurationScope::toString).collect(Collectors.joining("]-[")) - + "]"; + return "[" + scope.stream().map(ConfigurationScope::toString).collect(Collectors.joining("]-[")) + "]"; } /** - * Load the properties from the specified file, ignoring any directives that do not match the - * current context. Directories may be specified, including relative paths, but the '~' operator - * is not supported. + * Load the properties from the specified file, ignoring any directives that do not match the current context. + * Directories may be specified, including relative paths, but the '~' operator is not supported. * *

    * Loads the fileName via {@link PropertyInputStreamLoader}. @@ -698,23 +654,21 @@ public synchronized void load(String fileName) throws IOException, Configuration thisFile = fileName; try (InputStream resourceAsStream = propertyInputStreamLoader - .openConfiguration(fileName)) { + .openConfiguration(fileName)) { this.load(resourceAsStream); } } /** - * In a property name, "*" should be treated as a wildcard, so if we find one, it should be - * treated as a regex for ".*" - but everything else has to be an exact match. Enclose the - * entire thing in the regex literal markers "\Q" and "\E" to mark the entire name as being a - * string literal, then replace any instances of "*" with ".*" and make everything around the - * "*" into the end of the prior literal and the start of a new literal. So "A*B" -> - * "\QA\E.*\QB\E" == "Match the pattern of the exact string 'A', then any text, then the exact - * string 'B'." + * In a property name, "*" should be treated as a wildcard, so if we find one, it should be treated as a regex for + * ".*" - but everything else has to be an exact match. Enclose the entire thing in the regex literal markers "\Q" + * and "\E" to mark the entire name as being a string literal, then replace any instances of "*" with ".*" and make + * everything around the "*" into the end of the prior literal and the start of a new literal. So "A*B" -> + * "\QA\E.*\QB\E" == "Match the pattern of the exact string 'A', then any text, then the exact string 'B'." * * @param token The string to be wildcard-expanded - * @return The possibly-wildcarded string turned into a regex, with all non-asterisk characters - * treated as string literals, not a pattern. + * @return The possibly-wildcarded string turned into a regex, with all non-asterisk characters treated as string + * literals, not a pattern. */ private String createWildcardExpansionPattern(String token) { return ("\\Q" + token + "\\E").replaceAll("\\*", "\\\\E.*\\\\Q"); @@ -722,23 +676,21 @@ private String createWildcardExpansionPattern(String token) { /** - * Handle situations where something tries to modify a property that has been marked as 'final.' - * If an included file marks something as final, this file may not edit that value, regardless - * of whether this file calls it final or not. If this file includes a property twice, and the - * first one is marked final, then that's also an issue. + * Handle situations where something tries to modify a property that has been marked as 'final.' If an included file + * marks something as final, this file may not edit that value, regardless of whether this file calls it final or + * not. If this file includes a property twice, and the first one is marked final, then that's also an issue. * * @param token The name of the property that was being set. */ private void handleFinalConflict(final String token) throws ConfigurationException { // Since includefiles has to be the first effective line, if present at all, - // we don't need to handle backreferences - everything from the included files will be - // present + // we don't need to handle backreferences - everything from the included files will be present // by the time the current file is processed. List tokenHistory = lineNumbers.get(token); String finalPattern = null; if (tokenHistory == null) { - // This token was finalized via a pattern-match, and this exact value doesn't exist. - // Find the matching pattern. + // This token was finalized via a pattern-match, and this exact value doesn't exist. Find the matching + // pattern. finalPattern = finalProperties.stream().filter(str -> { final String pattern = createWildcardExpansionPattern(str); Pattern pat = Pattern.compile(pattern); @@ -746,23 +698,22 @@ private void handleFinalConflict(final String token) throws ConfigurationExcepti return pat.matcher(token).matches(); }).findFirst().orElse(""); tokenHistory = lineNumbers.get(finalPattern); - // It shouldn't be possible to have something final but not be able to find it, but if - // it did happen, just return a less-informative error. + // It shouldn't be possible to have something final but not be able to find it, but if it did happen, just + // return a less-informative error. if (tokenHistory == null) { - throw new ConfigurationException(("Property '" + token - + "' previously marked as final was then modified in file '" + thisFile - + "' at line " + lineNum)); + throw new ConfigurationException( + ("Property '" + token + "' previously marked as final was then modified in file '" + thisFile + + "' at line " + lineNum)); } } StringBuilder msgBuilder = new StringBuilder("Property '" + token + - "' marked as final in file '" + tokenHistory.get(0).fileName + - "' with value at line " + tokenHistory.get(0).lineNumber); + "' marked as final in file '" + tokenHistory.get(0).fileName + + "' with value at line " + tokenHistory.get(0).lineNumber); if (finalPattern != null) { msgBuilder.append(" with pattern '").append(finalPattern).append("' and"); } if (lineNum >= 0) { - msgBuilder.append(" was then modified in file '").append(thisFile).append("' at line ") - .append(lineNum); + msgBuilder.append(" was then modified in file '").append(thisFile).append("' at line ").append(lineNum); } else { msgBuilder.append(" was then modified outside of a configuration file"); } @@ -770,8 +721,7 @@ private void handleFinalConflict(final String token) throws ConfigurationExcepti } /** - * A list of all the properties that have been marked as final and thus may not be further - * updated. + * A list of all the properties that have been marked as final and thus may not be further updated. * * @return The set of all the properties that have thus far been marked as final. */ @@ -780,8 +730,8 @@ private Set getFinalProperties() { } /** - * Get the context for the current process, along with all the values that have been retrieved - * thus far so they don't need to be looked up again. + * Get the context for the current process, along with all the values that have been retrieved thus far so they + * don't need to be looked up again. * * @return The existing context */ @@ -819,8 +769,8 @@ private static String ltrim(String trimMe) { // endregion Utilities /** - * Take a properties file and return it line by line - taking into account the 'this continues - * on next line' indicators. + * Take a properties file and return it line by line - taking into account the 'this continues on next line' + * indicators. */ private class ParsedPropertiesLineReader { private final BufferedReader breader; @@ -838,8 +788,8 @@ private class ParsedPropertiesLineReader { } /** - * Return the next logical line from the file, where a line ending in \ means to continue on - * the next line. Automatically closes the stream when the last line has been read. + * Return the next logical line from the file, where a line ending in \ means to continue on the next line. + * Automatically closes the stream when the last line has been read. * * @return The next logical line. * @throws IOException If the file cannot be read. @@ -856,15 +806,13 @@ String readLine() throws IOException { if (retLine.toString().endsWith("\\") && !retLine.toString().endsWith("\\\\")) { retLine.deleteCharAt(retLine.length() - 1); } - // StringBuilder will explicitly write 'null' if a null is appended, so give that - // special handling. + // StringBuilder will explicitly write 'null' if a null is appended, so give that special handling. if (nextLine != null) { retLine.append(nextLine); } nextLine = ltrim(breader.readLine()); numLinesLastRead++; - } while (retLine.toString().endsWith("\\") && !retLine.toString().endsWith("\\\\") - && nextLine != null); + } while (retLine.toString().endsWith("\\") && !retLine.toString().endsWith("\\\\") && nextLine != null); // If we've read the last line, close the reader. if (nextLine == null) { open = false; @@ -889,8 +837,8 @@ boolean isOpen() { } /** - * Return the configuration contexts. This is the list of system properties that may have been - * used to parse the configuration file. This collection will be immutable. + * Return the configuration contexts. This is the list of system properties that may have been used to parse the + * configuration file. This collection will be immutable. * * @return the configuration contexts. */ diff --git a/Configuration/src/main/java/io/deephaven/configuration/PropertyFile.java b/Configuration/src/main/java/io/deephaven/configuration/PropertyFile.java index bae1beae217..ad953d04702 100644 --- a/Configuration/src/main/java/io/deephaven/configuration/PropertyFile.java +++ b/Configuration/src/main/java/io/deephaven/configuration/PropertyFile.java @@ -104,10 +104,10 @@ public String setProperty(String key, String value) { } /** - * Gets the value of the given property, aborting if the value is not specified in the system - * config files. Note that it is by design that there is no overloaded method taking a default - * value. Rather than scattering default values through all the source files, all properties - * should be in one config file (as much as possible). Put default values in common.prop. + * Gets the value of the given property, aborting if the value is not specified in the system config files. Note + * that it is by design that there is no overloaded method taking a default value. Rather than scattering default + * values through all the source files, all properties should be in one config file (as much as possible). Put + * default values in common.prop. */ public @NotNull String getProperty(String propertyName) { String result = properties.getProperty(propertyName); @@ -174,8 +174,7 @@ public int getIntegerWithDefault(String propertyName, final int defaultValue) { return getInteger(propertyName); } - public int getPossibleIntegerWithDefault(final int defaultValue, - final String... possiblePropertyNames) { + public int getPossibleIntegerWithDefault(final int defaultValue, final String... possiblePropertyNames) { for (final String propertyName : possiblePropertyNames) { if (hasProperty(propertyName)) { return getInteger(propertyName); @@ -191,8 +190,7 @@ public String getStringWithDefault(final String propertyName, final String defau return getProperty(propertyName); } - public String getPossibleStringWithDefault(final String defaultValue, - final String... possiblePropertyNames) { + public String getPossibleStringWithDefault(final String defaultValue, final String... possiblePropertyNames) { for (final String propertyName : possiblePropertyNames) { if (hasProperty(propertyName)) { return getProperty(propertyName); @@ -270,7 +268,7 @@ public String typeName() { } private java.lang.Object get(T parser, String propertyName, - Logger logger, String logPrefix) { + Logger logger, String logPrefix) { String propStringValue = properties.getProperty(propertyName); if (propStringValue == null) { String msg = "property " + propertyName + " is missing"; @@ -283,7 +281,7 @@ private java.lang.Object get(T parser, String propertyName, return propValue; } catch (NumberFormatException e) { String msg = "property " + propertyName + " string value " + propStringValue - + " couldn't be parsed as " + parser.typeName(); + + " couldn't be parsed as " + parser.typeName(); logger.error(logPrefix + ": " + msg); throw new PropertyException(msg); } @@ -331,8 +329,7 @@ public boolean getBooleanForClass(Class c, String propertyLast) { return getBoolean(c.getSimpleName() + "." + propertyLast, log, c.getName()); } - public boolean getBooleanForClassWithDefault(Class c, String propertyLast, - final boolean defaultValue) { + public boolean getBooleanForClassWithDefault(Class c, String propertyLast, final boolean defaultValue) { final String prop = c.getSimpleName() + "." + propertyLast; if (!hasProperty(prop)) { return defaultValue; @@ -344,8 +341,7 @@ public int getIntegerForClass(Class c, String propertyLast) { return getInteger(c.getSimpleName() + "." + propertyLast, log, c.getName()); } - public int getIntegerForClassWithDefault(final Class c, final String propertyLast, - final int defaultValue) { + public int getIntegerForClassWithDefault(final Class c, final String propertyLast, final int defaultValue) { final String prop = c.getSimpleName() + "." + propertyLast; if (!hasProperty(prop)) { return defaultValue; @@ -361,8 +357,7 @@ public long getLongForClass(Class c, String propertyLast) { return getLong(c.getSimpleName() + "." + propertyLast, log, c.getName()); } - public long getLongForClassWithDefault(final Class c, final String propertyLast, - final long defaultValue) { + public long getLongForClassWithDefault(final Class c, final String propertyLast, final long defaultValue) { final String prop = c.getSimpleName() + "." + propertyLast; if (!hasProperty(prop)) { return defaultValue; @@ -374,8 +369,7 @@ public double getDoubleForClass(Class c, String propertyLast) { return getDouble(c.getSimpleName() + "." + propertyLast, log, c.getName()); } - public double getDoubleForClassWithDefault(Class c, String propertyLast, - final double defaultValue) { + public double getDoubleForClassWithDefault(Class c, String propertyLast, final double defaultValue) { final String prop = c.getSimpleName() + "." + propertyLast; if (!hasProperty(prop)) { return defaultValue; @@ -408,7 +402,7 @@ public int[] getIntArrayForClass(Class c, String propertyLast) { } public void getClassParams(final Logger log, final Class c, final String instanceStr, - final Object obj, final int desiredMods) { + final Object obj, final int desiredMods) { Class paramClass = obj.getClass(); Field[] fields = paramClass.getDeclaredFields(); final String propBase = c.getSimpleName(); @@ -416,11 +410,10 @@ public void getClassParams(final Logger log, final Class c, final String instanc for (Field f : fields) { if ((f.getModifiers() & desiredMods) == 0) { throw new PropertyException(errMsg, - new PropertyException("Field with wrong modifiers " + f.toString())); + new PropertyException("Field with wrong modifiers " + f.toString())); } final String fieldName = f.getName(); - final String s = - (instanceStr == null || instanceStr.length() == 0) ? "" : (instanceStr + "."); + final String s = (instanceStr == null || instanceStr.length() == 0) ? "" : (instanceStr + "."); final String propName = propBase + "." + s + fieldName; try { final String value = getProperties().getProperty(propName); @@ -429,8 +422,7 @@ public void getClassParams(final Logger log, final Class c, final String instanc throw new PropertyException("null value for property " + propName); } else { source = "file"; - // Setting field accessibility should be allowed by our code, but not from - // outside classes, so + // Setting field accessibility should be allowed by our code, but not from outside classes, so // this should be privileged. AccessController.doPrivileged((PrivilegedAction) () -> { f.setAccessible(true); @@ -460,8 +452,7 @@ public void getClassParams(final Logger log, final Class c, final String instanc } } if (log != null) - log.info( - ">>>" + propName + " = " + f.get(obj).toString() + " (" + source + ")"); + log.info(">>>" + propName + " = " + f.get(obj).toString() + " (" + source + ")"); } catch (Exception e) { throw new PropertyException("Property " + propName + " is missing.", e); } @@ -476,8 +467,7 @@ public void getClassParams(final Logger log, final Class c, final Object obj) { getClassParams(log, c, null, obj, Modifier.PUBLIC); } - public void getClassParams(final Logger log, final Class c, final String name, - final Object obj) { + public void getClassParams(final Logger log, final Class c, final String name, final Object obj) { getClassParams(log, c, name, obj, Modifier.PUBLIC); } @@ -497,8 +487,7 @@ public Set getStringSetFromProperty(final String propertyName) { return set; } - public Set getStringSetFromPropertyWithDefault(final String propertyName, - final Set defaultValue) { + public Set getStringSetFromPropertyWithDefault(final String propertyName, final Set defaultValue) { return hasProperty(propertyName) ? getStringSetFromProperty(propertyName) : defaultValue; } @@ -506,8 +495,7 @@ public String[] getStringArrayFromProperty(final String propertyName) { return getProperty(propertyName).split("[, ]"); } - public String[] getStringArrayFromPropertyWithDefault(final String propertyName, - final String[] defaultValue) { + public String[] getStringArrayFromPropertyWithDefault(final String propertyName, final String[] defaultValue) { return hasProperty(propertyName) ? getStringArrayFromProperty(propertyName) : defaultValue; } @@ -535,8 +523,8 @@ public Map getNameStringMapFromProperty(final String propertyNam return Collections.emptyMap(); } final Matcher propertyMapMatcher = - Pattern.compile("\\A\\(\\s*((?:[a-zA-Z0-9\\-\\. ]+=>[a-zA-Z0-9\\-\\. ]+,?)*)\\s*\\)\\Z") - .matcher(propertyValue); + Pattern.compile("\\A\\(\\s*((?:[a-zA-Z0-9\\-\\. ]+=>[a-zA-Z0-9\\-\\. ]+,?)*)\\s*\\)\\Z") + .matcher(propertyValue); Require.requirement(propertyMapMatcher.matches(), "propertyMapMatcher.matches())"); Map result = new HashMap<>(); for (String pair : propertyMapMatcher.group(1).split(",")) { @@ -546,7 +534,7 @@ public Map getNameStringMapFromProperty(final String propertyNam String[] values = pair.split("=>"); Require.eq(values.length, "values.length", 2); Require.eqNull(result.put(values[0].trim(), values[1].trim()), - "result.put(values[0].trim(), values[1].trim())"); + "result.put(values[0].trim(), values[1].trim())"); } return result; } @@ -565,8 +553,8 @@ public int[] getIntegerArray(final String propertyName) { } /** - * Parse a set of non-negative ints from a property. Format is comma-separated individual values - * and ranges of the form start-end. + * Parse a set of non-negative ints from a property. Format is comma-separated individual values and ranges of the + * form start-end. * * @example 0,22,100-200,99,1000-2000 * @param propertyName @@ -588,7 +576,7 @@ public TIntSet getNonNegativeIntSetWithRangeSupport(final String propertyName) { final int rangeBegin = Integer.parseInt(rangeMatcher.group(1)); final int rangeEnd = Integer.parseInt(rangeMatcher.group(2)); Assert.assertion(0 <= rangeBegin && rangeBegin <= rangeEnd, - "0 <= rangeBegin && rangeBegin <= rangeEnd"); + "0 <= rangeBegin && rangeBegin <= rangeEnd"); for (int value = rangeBegin; value <= rangeEnd; ++value) { result.add(value); } @@ -614,8 +602,7 @@ public BitSet getBitSet(final String propertyName, final int length) { return result; } - public TObjectIntHashMap getStringIntHashMap(final String propertyName, - final String separator) { + public TObjectIntHashMap getStringIntHashMap(final String propertyName, final String separator) { final String s = getProperty(propertyName); if (s.equals("")) { return new TObjectIntHashMap<>(); diff --git a/Configuration/src/main/java/io/deephaven/configuration/PropertyHistory.java b/Configuration/src/main/java/io/deephaven/configuration/PropertyHistory.java index 120c7f438db..eb4bbd3adbd 100644 --- a/Configuration/src/main/java/io/deephaven/configuration/PropertyHistory.java +++ b/Configuration/src/main/java/io/deephaven/configuration/PropertyHistory.java @@ -1,21 +1,20 @@ package io.deephaven.configuration; /** - * A simple datastore to record one step in the history of a property. A property may have its value - * overwritten by subsequent declarations or in subsequent files, but it is convenient to be able to - * identify what the active value of a property is, where that value was created, and what other - * values the property may have been assigned by other files before getting to this final value. + * A simple datastore to record one step in the history of a property. A property may have its value overwritten by + * subsequent declarations or in subsequent files, but it is convenient to be able to identify what the active value of + * a property is, where that value was created, and what other values the property may have been assigned by other files + * before getting to this final value. */ public class PropertyHistory { /** - * The name of the file where the property value was set, or a note that the value was not set - * by a configuration file. + * The name of the file where the property value was set, or a note that the value was not set by a configuration + * file. */ final public String fileName; /** - * The number of the line within the file where the property value was set. If the property - * value was not set via a configuration file (such as a system property or a programmatic - * change of value), then this should be 0. + * The number of the line within the file where the property value was set. If the property value was not set via a + * configuration file (such as a system property or a programmatic change of value), then this should be 0. */ final public int lineNumber; /** diff --git a/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoader.java b/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoader.java index af590114585..9998b346acf 100644 --- a/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoader.java +++ b/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoader.java @@ -10,8 +10,8 @@ * The default implementation is {@link PropertyInputStreamLoaderTraditional}. * *

    - * To override the default, additional {@link PropertyInputStreamLoader} implementations can be - * added to the classpath and referenced via the {@link ServiceLoader} mechanism. + * To override the default, additional {@link PropertyInputStreamLoader} implementations can be added to the classpath + * and referenced via the {@link ServiceLoader} mechanism. */ public interface PropertyInputStreamLoader { diff --git a/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoaderFactory.java b/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoaderFactory.java index e5149a9c330..d40b8e4d2db 100644 --- a/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoaderFactory.java +++ b/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoaderFactory.java @@ -7,38 +7,38 @@ import java.util.stream.StreamSupport; /** - * Uses {@link ServiceLoader#load(Class)} to find the best {@link PropertyInputStreamLoader} (the - * loader with the smallest priority). + * Uses {@link ServiceLoader#load(Class)} to find the best {@link PropertyInputStreamLoader} (the loader with the + * smallest priority). */ public class PropertyInputStreamLoaderFactory { public static PropertyInputStreamLoader newInstance() { final List loaders = StreamSupport - .stream(ServiceLoader.load(PropertyInputStreamLoader.class).spliterator(), false) - .collect(Collectors.toList()); + .stream(ServiceLoader.load(PropertyInputStreamLoader.class).spliterator(), false) + .collect(Collectors.toList()); if (loaders.isEmpty()) { final String message = String.format( - "Unable to find any provided implementations for %s. This should not happen - we expect at least %s to be on the classpath.", - PropertyInputStreamLoader.class.getName(), - PropertyInputStreamLoaderTraditional.class.getName()); + "Unable to find any provided implementations for %s. This should not happen - we expect at least %s to be on the classpath.", + PropertyInputStreamLoader.class.getName(), + PropertyInputStreamLoaderTraditional.class.getName()); throw new IllegalStateException(message); } final long distinctCount = loaders.stream() - .mapToLong(PropertyInputStreamLoader::getPriority) - .distinct() - .count(); + .mapToLong(PropertyInputStreamLoader::getPriority) + .distinct() + .count(); if (distinctCount != loaders.size()) { final String propertyInputStreamLoadersDebugInfo = loaders.stream() - .map(c -> c.getClass().getName() + ":" + c.getPriority()) - .collect(Collectors.joining(",", "[", "]")); + .map(c -> c.getClass().getName() + ":" + c.getPriority()) + .collect(Collectors.joining(",", "[", "]")); final String message = String.format( - "Unable to return the appropriate %s - at least two of the implementations have equal priorities, and that is not allowed. %s", - PropertyInputStreamLoader.class.getName(), - propertyInputStreamLoadersDebugInfo); + "Unable to return the appropriate %s - at least two of the implementations have equal priorities, and that is not allowed. %s", + PropertyInputStreamLoader.class.getName(), + propertyInputStreamLoadersDebugInfo); throw new IllegalStateException(message); } return loaders.stream() - .min(Comparator.comparing(PropertyInputStreamLoader::getPriority)) - .get(); // we know this will be present since loaders is not empty + .min(Comparator.comparing(PropertyInputStreamLoader::getPriority)) + .get(); // we know this will be present since loaders is not empty } } diff --git a/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoaderTraditional.java b/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoaderTraditional.java index c50f0d64b49..ec0d74ae3f9 100644 --- a/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoaderTraditional.java +++ b/Configuration/src/main/java/io/deephaven/configuration/PropertyInputStreamLoaderTraditional.java @@ -5,8 +5,8 @@ import java.io.InputStream; /** - * A {@link PropertyInputStreamLoader} implementation that first searches for the property file as a - * classpath resource, and then via the filesystem. The priority is 100. + * A {@link PropertyInputStreamLoader} implementation that first searches for the property file as a classpath resource, + * and then via the filesystem. The priority is 100. */ public class PropertyInputStreamLoaderTraditional implements PropertyInputStreamLoader { diff --git a/Configuration/src/test/java/io/deephaven/configuration/PropertyInputStreamLoaderFactoryTest.java b/Configuration/src/test/java/io/deephaven/configuration/PropertyInputStreamLoaderFactoryTest.java index 2a34bcf3cca..1ae9c2e1172 100644 --- a/Configuration/src/test/java/io/deephaven/configuration/PropertyInputStreamLoaderFactoryTest.java +++ b/Configuration/src/test/java/io/deephaven/configuration/PropertyInputStreamLoaderFactoryTest.java @@ -9,12 +9,11 @@ public class PropertyInputStreamLoaderFactoryTest extends TestCase { * {@link PropertyInputStreamLoaderTraditional}. * *

    - * The fishlib tests always assume the "traditional" method of loading prop files. If some day - * this is not the case, then this test will need to be updated to reflect new fishlib testing - * assumptions wrt {@link PropertyInputStreamLoaderFactory}. + * The fishlib tests always assume the "traditional" method of loading prop files. If some day this is not the case, + * then this test will need to be updated to reflect new fishlib testing assumptions wrt + * {@link PropertyInputStreamLoaderFactory}. */ public void testInstanceIsTraditional() { - assertTrue(PropertyInputStreamLoaderFactory - .newInstance() instanceof PropertyInputStreamLoaderTraditional); + assertTrue(PropertyInputStreamLoaderFactory.newInstance() instanceof PropertyInputStreamLoaderTraditional); } } diff --git a/Configuration/src/test/java/io/deephaven/configuration/PropertyInputStreamLoaderTraditionalTest.java b/Configuration/src/test/java/io/deephaven/configuration/PropertyInputStreamLoaderTraditionalTest.java index 6d410e291ac..3ef0784f3c7 100644 --- a/Configuration/src/test/java/io/deephaven/configuration/PropertyInputStreamLoaderTraditionalTest.java +++ b/Configuration/src/test/java/io/deephaven/configuration/PropertyInputStreamLoaderTraditionalTest.java @@ -12,8 +12,7 @@ public class PropertyInputStreamLoaderTraditionalTest extends TestCase { - private static final PropertyInputStreamLoaderTraditional loader = - new PropertyInputStreamLoaderTraditional(); + private static final PropertyInputStreamLoaderTraditional loader = new PropertyInputStreamLoaderTraditional(); private static InputStream open(String name) { return loader.openConfiguration(name); @@ -34,9 +33,8 @@ public void testContentFromResource() throws IOException { public void testContentFromFile() throws IOException, URISyntaxException { // ensure that the resource hello-world.prop is fully scoped out as a filesystem path String path = Paths - .get(PropertyInputStreamLoaderTraditionalTest.class.getResource("/hello-world.prop") - .toURI()) - .toString(); + .get(PropertyInputStreamLoaderTraditionalTest.class.getResource("/hello-world.prop").toURI()) + .toString(); final byte[] bytes; try (final InputStream in = open(path)) { bytes = IOUtils.toByteArray(in); diff --git a/Configuration/src/test/java/io/deephaven/configuration/TestConfiguration.java b/Configuration/src/test/java/io/deephaven/configuration/TestConfiguration.java index 7f97dc97a43..8c76628ef9f 100644 --- a/Configuration/src/test/java/io/deephaven/configuration/TestConfiguration.java +++ b/Configuration/src/test/java/io/deephaven/configuration/TestConfiguration.java @@ -12,9 +12,9 @@ /** * Test suite for Configuration. * - * Must provide a Configuration.rootFile property in the VM arguments when running from IntelliJ, - * even though we set that in most of the tests. -ea - * -DConfiguration.rootFile=resources/lib-tests.prop -Ddevroot=/ -Dworkspace=/ -DcacheDir=/cache + * Must provide a Configuration.rootFile property in the VM arguments when running from IntelliJ, even though we set + * that in most of the tests. -ea -DConfiguration.rootFile=resources/lib-tests.prop -Ddevroot=/ -Dworkspace=/ + * -DcacheDir=/cache */ public class TestConfiguration extends TestCase { private final String FILENAME_PROPERTY = Configuration.getInstance().getConfFileProperty(); @@ -53,8 +53,7 @@ public void testFileExistenceCheck() { } catch (ConfigurationException e) { // Expected } catch (Exception e) { - fail("Didn't get expected ConfigurationException, got something else instead: " - + e.getMessage()); + fail("Didn't get expected ConfigurationException, got something else instead: " + e.getMessage()); } // Make sure it fails correctly if we pass in a nonexistent file @@ -65,8 +64,7 @@ public void testFileExistenceCheck() { } catch (ConfigurationException e) { // Expected } catch (Exception e) { - fail("Didn't get expected ConfigurationException, got something else instead: " - + e.getMessage()); + fail("Didn't get expected ConfigurationException, got something else instead: " + e.getMessage()); } } @@ -74,8 +72,7 @@ public void testFileExistenceCheck() { * Verify expected normal operation */ public void testSimple() { - final String propertyValue = - Configuration.getInstance().getProperty("measurement.per_thread_cpu"); + final String propertyValue = Configuration.getInstance().getProperty("measurement.per_thread_cpu"); assertEquals("false", propertyValue); } @@ -171,20 +168,17 @@ public void testBadScope() { } /** - * Test that the file will fail to load if a '{' is included with no scope declaration before - * it. + * Test that the file will fail to load if a '{' is included with no scope declaration before it. */ public void testBadScopeOpen() { System.setProperty(FILENAME_PROPERTY, "test-bad-scope-open.prop"); try { Configuration.getInstance().reloadProperties(); - fail( - "Expected and did not get parsing failure due to { with no preceding scope declaration."); + fail("Expected and did not get parsing failure due to { with no preceding scope declaration."); } catch (ConfigurationException e) { // Expected } catch (Exception e) { - fail("Received unexpected error while checking for invalid scope opener: " - + e.getMessage()); + fail("Received unexpected error while checking for invalid scope opener: " + e.getMessage()); } } @@ -195,25 +189,22 @@ public void testReservedKeywordAsDeclaration() { System.setProperty(FILENAME_PROPERTY, "test-reserved-keyword.prop"); try { Configuration.getInstance().reloadProperties(); - fail( - "Expected and did not get parsing failure due to reserved keyword used as declaration."); + fail("Expected and did not get parsing failure due to reserved keyword used as declaration."); } catch (ConfigurationException e) { // Expected } catch (Exception e) { - fail( - "Received unexpected error while checking for reserved keyword used as declaration: " + fail("Received unexpected error while checking for reserved keyword used as declaration: " + e.getMessage()); } } /** - * Verify that loading context-sensitive configuration files works correctly, both with a known - * system property and an on-the-fly property + * Verify that loading context-sensitive configuration files works correctly, both with a known system property and + * an on-the-fly property */ public void testContext() throws Exception { - final String oldProcessName = - System.getProperty(ConfigurationContext.PROCESS_NAME_PROPERTY); + final String oldProcessName = System.getProperty(ConfigurationContext.PROCESS_NAME_PROPERTY); final String testProp = "testproperty"; try { System.setProperty(FILENAME_PROPERTY, "resources/test-context.prop"); @@ -223,72 +214,47 @@ public void testContext() throws Exception { System.setProperty("process.name", procVal); System.setProperty(testProp, propVal); Configuration.getInstance().reloadProperties(); - assertEquals(procVal, - Configuration.getInstance().getStringWithDefault("procval", "FAIL")); - assertEquals(propVal, - Configuration.getInstance().getStringWithDefault("propval", "FAIL")); - assertEquals(propVal + procVal, - Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); - assertEquals("changed", - Configuration.getInstance().getStringWithDefault("aval", "FAIL")); - assertEquals("nothing", - Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); - assertEquals("something", - Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); + assertEquals(procVal, Configuration.getInstance().getStringWithDefault("procval", "FAIL")); + assertEquals(propVal, Configuration.getInstance().getStringWithDefault("propval", "FAIL")); + assertEquals(propVal + procVal, Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); + assertEquals("changed", Configuration.getInstance().getStringWithDefault("aval", "FAIL")); + assertEquals("nothing", Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); + assertEquals("something", Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); propVal = "B"; System.setProperty(testProp, propVal); Configuration.getInstance().reloadProperties(); - assertEquals(procVal, - Configuration.getInstance().getStringWithDefault("procval", "FAIL")); - assertEquals(propVal, - Configuration.getInstance().getStringWithDefault("propval", "FAIL")); - assertEquals(propVal + procVal, - Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); - assertEquals("changed", - Configuration.getInstance().getStringWithDefault("aval", "FAIL")); - assertEquals("nothing", - Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); - assertEquals("something", - Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); + assertEquals(procVal, Configuration.getInstance().getStringWithDefault("procval", "FAIL")); + assertEquals(propVal, Configuration.getInstance().getStringWithDefault("propval", "FAIL")); + assertEquals(propVal + procVal, Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); + assertEquals("changed", Configuration.getInstance().getStringWithDefault("aval", "FAIL")); + assertEquals("nothing", Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); + assertEquals("something", Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); procVal = "defg"; propVal = "A"; System.setProperty("process.name", procVal); System.setProperty(testProp, propVal); Configuration.getInstance().reloadProperties(); - assertEquals(procVal, - Configuration.getInstance().getStringWithDefault("procval", "FAIL")); - assertEquals(propVal, - Configuration.getInstance().getStringWithDefault("propval", "FAIL")); - assertEquals(propVal + procVal, - Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); - assertEquals("changed", - Configuration.getInstance().getStringWithDefault("aval", "FAIL")); - assertEquals("nothing", - Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); - assertEquals("nada", - Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); + assertEquals(procVal, Configuration.getInstance().getStringWithDefault("procval", "FAIL")); + assertEquals(propVal, Configuration.getInstance().getStringWithDefault("propval", "FAIL")); + assertEquals(propVal + procVal, Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); + assertEquals("changed", Configuration.getInstance().getStringWithDefault("aval", "FAIL")); + assertEquals("nothing", Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); + assertEquals("nada", Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); propVal = "B"; System.setProperty(testProp, propVal); Configuration.getInstance().reloadProperties(); - assertEquals(procVal, - Configuration.getInstance().getStringWithDefault("procval", "FAIL")); - assertEquals(propVal, - Configuration.getInstance().getStringWithDefault("propval", "FAIL")); - assertEquals(propVal + procVal, - Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); - assertEquals("changed", - Configuration.getInstance().getStringWithDefault("aval", "FAIL")); - assertEquals("nothing", - Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); - assertEquals("nada", - Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); + assertEquals(procVal, Configuration.getInstance().getStringWithDefault("procval", "FAIL")); + assertEquals(propVal, Configuration.getInstance().getStringWithDefault("propval", "FAIL")); + assertEquals(propVal + procVal, Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); + assertEquals("changed", Configuration.getInstance().getStringWithDefault("aval", "FAIL")); + assertEquals("nothing", Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); + assertEquals("nada", Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); // Make sure character escapes work - assertEquals("\\abcd", - Configuration.getInstance().getStringWithDefault("bval", "FAIL")); + assertEquals("\\abcd", Configuration.getInstance().getStringWithDefault("bval", "FAIL")); // Make sure the Unicode conversion works properly assertEquals("P", Configuration.getInstance().getStringWithDefault("cval", "FAIL")); @@ -301,8 +267,7 @@ public void testContext() throws Exception { } public void testContextIgnoreScope() throws Exception { - final String oldProcessName = - System.getProperty(ConfigurationContext.PROCESS_NAME_PROPERTY); + final String oldProcessName = System.getProperty(ConfigurationContext.PROCESS_NAME_PROPERTY); final String testProp = "testproperty"; try { System.setProperty(FILENAME_PROPERTY, "resources/test-context.prop"); @@ -311,23 +276,16 @@ public void testContextIgnoreScope() throws Exception { System.setProperty("process.name", procVal); System.setProperty(testProp, propVal); Configuration.getInstance().reloadProperties(true); - assertEquals(procVal, - Configuration.getInstance().getStringWithDefault("process.name", "FAIL")); - assertEquals(propVal, - Configuration.getInstance().getStringWithDefault(testProp, "FAIL")); - // This is the last value it was set to, so it should be this, ignoring all the 'final' - // declarations. - assertEquals("Bdefg", - Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); - - assertEquals("changed", - Configuration.getInstance().getStringWithDefault("aval", "FAIL")); - - assertEquals("multi", - Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); - - assertEquals("something", - Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); + assertEquals(procVal, Configuration.getInstance().getStringWithDefault("process.name", "FAIL")); + assertEquals(propVal, Configuration.getInstance().getStringWithDefault(testProp, "FAIL")); + // This is the last value it was set to, so it should be this, ignoring all the 'final' declarations. + assertEquals("Bdefg", Configuration.getInstance().getStringWithDefault("propproc", "FAIL")); + + assertEquals("changed", Configuration.getInstance().getStringWithDefault("aval", "FAIL")); + + assertEquals("multi", Configuration.getInstance().getStringWithDefault("multiprop", "FAIL")); + + assertEquals("something", Configuration.getInstance().getStringWithDefault("multiprop2", "FAIL")); } finally { if (oldProcessName != null) System.setProperty("process.name", oldProcessName); @@ -357,33 +315,28 @@ public void testFinalAndFinalizedDeclaration() { } /** - * Test that changing a value that was declared 'final' in an already-included file causes an - * error. + * Test that changing a value that was declared 'final' in an already-included file causes an error. */ public void testIncludeFinal() { - // This test should set some values, then fail immediately due to changing the value of - // 'includetest' + // This test should set some values, then fail immediately due to changing the value of 'includetest' runTestsOnFinalKeyword("resources/include-test.prop", "finalinclude", "", "bar", "bar"); } /** - * Verify that we can either create an error or a warning when a declaration previously named - * 'final' is modified later. + * Verify that we can either create an error or a warning when a declaration previously named 'final' is modified + * later. * - * @param filename The name of the file to load, generally from the resources directory within - * this project + * @param filename The name of the file to load, generally from the resources directory within this project * @param contextName The process name to use while loading the configuration file - * @param beforeValue The value that the 'beforetest' property should have at the end of this - * test. If 'beforetest' is explicitly set, then that value should appear (since it was - * parsed before the error happened). If 'beforetest' is not explicitly set but is - * created by an empty declaration or the 'final' keyword, then it should have a value of - * empty-string. If 'beforetest' was created by a 'finalize' declaration, then it has no - * value at all, so the default value of 'FAIL' should be returned. + * @param beforeValue The value that the 'beforetest' property should have at the end of this test. If 'beforetest' + * is explicitly set, then that value should appear (since it was parsed before the error happened). If + * 'beforetest' is not explicitly set but is created by an empty declaration or the 'final' keyword, then it + * should have a value of empty-string. If 'beforetest' was created by a 'finalize' declaration, then it has + * no value at all, so the default value of 'FAIL' should be returned. */ - private void runTestsOnFinalKeyword(final String filename, final String contextName, - final String beforeValue, final String finalTestValue, final String includeValue) { - final String oldProcessName = - System.getProperty(ConfigurationContext.PROCESS_NAME_PROPERTY); + private void runTestsOnFinalKeyword(final String filename, final String contextName, final String beforeValue, + final String finalTestValue, final String includeValue) { + final String oldProcessName = System.getProperty(ConfigurationContext.PROCESS_NAME_PROPERTY); final String testPropContextIdentifier = "testbatch"; final String beforeTestProperty = "beforetest"; final String finalTestProperty = "finaltest"; @@ -394,34 +347,26 @@ private void runTestsOnFinalKeyword(final String filename, final String contextN // Set the context to something that tries to re-set some final declarations System.setProperty(testPropContextIdentifier, contextName); - // A ConfigurationException should be generated when trying to load a file that re-sets - // a final property + // A ConfigurationException should be generated when trying to load a file that re-sets a final property try { Configuration.getInstance().reloadProperties(); fail("Did not catch re-set final declaration"); } catch (ConfigurationException e) { - // expected; this configuration tries to re-set a final declaration or includes a - // file in the wrong place. + // expected; this configuration tries to re-set a final declaration or includes a file in the wrong + // place. String a = e.getMessage(); } catch (Exception e) { - fail("Unexpected exception while checking for re-set final declaration: " - + e.getMessage()); + fail("Unexpected exception while checking for re-set final declaration: " + e.getMessage()); } - // The 'before' line should load with an empty value, unless it was declared with a - // 'finalize' statement and so is null. - assertEquals(beforeValue, - Configuration.getInstance().getStringWithDefault(beforeTestProperty, "FAIL")); + // The 'before' line should load with an empty value, unless it was declared with a 'finalize' statement and + // so is null. + assertEquals(beforeValue, Configuration.getInstance().getStringWithDefault(beforeTestProperty, "FAIL")); // the 'final' line should have the original value, unless it was not loaded - assertEquals(finalTestValue, - Configuration.getInstance().getStringWithDefault(finalTestProperty, "FAIL")); - // The 'after' line should not be loaded, since this should always come after some - // exception - assertEquals("NOTHING", - Configuration.getInstance().getStringWithDefault(afterTestProperty, "NOTHING")); - // The 'includetest' line should be loaded with its initial value, unless it was changed - // or failed to load - assertEquals(includeValue, - Configuration.getInstance().getStringWithDefault("includetest", "FAIL")); + assertEquals(finalTestValue, Configuration.getInstance().getStringWithDefault(finalTestProperty, "FAIL")); + // The 'after' line should not be loaded, since this should always come after some exception + assertEquals("NOTHING", Configuration.getInstance().getStringWithDefault(afterTestProperty, "NOTHING")); + // The 'includetest' line should be loaded with its initial value, unless it was changed or failed to load + assertEquals(includeValue, Configuration.getInstance().getStringWithDefault("includetest", "FAIL")); } finally { if (oldProcessName != null) @@ -434,8 +379,7 @@ private void runTestsOnFinalKeyword(final String filename, final String contextN * Test that an includefiles line must be the first line in the file */ public void testIncludeInWrongPlace() { - runTestsOnFinalKeyword("resources/test-include-wrong-place.prop", "finalinclude", "FAIL", - "FAIL", "FAIL"); + runTestsOnFinalKeyword("resources/test-include-wrong-place.prop", "finalinclude", "FAIL", "FAIL", "FAIL"); } public void testFinalizedPropertyProgrammatic() { @@ -447,8 +391,7 @@ public void testFinalizedPropertyProgrammatic() { } catch (ConfigurationException e) { // Expected } catch (Exception e) { - fail("Received unexpected error while attempting to change final property: " - + e.getMessage()); + fail("Received unexpected error while attempting to change final property: " + e.getMessage()); } } @@ -460,18 +403,15 @@ public void testFinalizePatternDeclaration() { System.setProperty(FILENAME_PROPERTY, "resources/test-finalized-property-pattern.prop"); try { Configuration.getInstance().reloadProperties(); - // 'fo' should be able to be set, since that doesn't match the pattern 'foo*' that was - // finalized + // 'fo' should be able to be set, since that doesn't match the pattern 'foo*' that was finalized Configuration.getInstance().setProperty("fo", "plugh"); - // Any other property starting with 'foo' should not be able to be set, even a - // newly-created on. + // Any other property starting with 'foo' should not be able to be set, even a newly-created on. Configuration.getInstance().setProperty("foozywhatsis", "xyzzy"); fail("Expected and did not get error due to attempting to change finalized property."); } catch (ConfigurationException e) { // Expected } catch (Exception e) { - fail("Received unexpected error while attempting to change finalized property: " - + e.getMessage()); + fail("Received unexpected error while attempting to change finalized property: " + e.getMessage()); } } @@ -483,15 +423,13 @@ public void testMultiline() { System.setProperty(FILENAME_PROPERTY, "resources/test-multiline.prop"); try { Configuration.getInstance().reloadProperties(); - assertEquals("abcdefghi", - Configuration.getInstance().getStringWithDefault("foo", "FAIL")); + assertEquals("abcdefghi", Configuration.getInstance().getStringWithDefault("foo", "FAIL")); assertEquals("C:\\a\\b\\c\\", - Configuration.getInstance().getStringWithDefault("research.dbCacheDir", "FAIL")); - assertEquals("C:\\d\\e\\", Configuration.getInstance() - .getStringWithDefault("research.stockPriceEstimator", "FAIL")); + Configuration.getInstance().getStringWithDefault("research.dbCacheDir", "FAIL")); + assertEquals("C:\\d\\e\\", + Configuration.getInstance().getStringWithDefault("research.stockPriceEstimator", "FAIL")); } catch (Exception e) { - fail( - "Received unexpected error while checking multiline properties: " + e.getMessage()); + fail("Received unexpected error while checking multiline properties: " + e.getMessage()); } } @@ -508,19 +446,18 @@ public void testShowHistory() { assertEquals("[]", history.get(0).context); if ("1.8".equals(System.getProperty("java.specification.version"))) { assertEquals( - ": io.deephaven.configuration.TestConfiguration.testShowHistory(TestConfiguration.java:502)\n" - + - "sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n" + - "sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n", - history.get(0).fileName); + ": io.deephaven.configuration.TestConfiguration.testShowHistory(TestConfiguration.java:440)\n" + + + "sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n" + + "sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n", + history.get(0).fileName); } else { assertEquals( - ": io.deephaven.configuration.TestConfiguration.testShowHistory(TestConfiguration.java:502)\n" - + - "java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n" - + - "java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n", - history.get(0).fileName); + ": io.deephaven.configuration.TestConfiguration.testShowHistory(TestConfiguration.java:440)\n" + + + "java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n" + + "java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n", + history.get(0).fileName); } System.out.println("-------------- End show history -----------------"); } diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/AbortableRangeConsumer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/AbortableRangeConsumer.java index 962b1a8972a..518dd97c781 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/AbortableRangeConsumer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/AbortableRangeConsumer.java @@ -5,9 +5,8 @@ */ public interface AbortableRangeConsumer { /** - * Deliver a single range. Methods receiving a RangeConsumer should call accept on it for - * non-empty, disjoint ranges. Calls should be made in increasing order of values contained in - * the ranges. + * Deliver a single range. Methods receiving a RangeConsumer should call accept on it for non-empty, disjoint + * ranges. Calls should be made in increasing order of values contained in the ranges. * * @param begin first value of the range to add. * @param end one past the last value in the range to add (ie, end is exclusive). diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ArrayContainer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ArrayContainer.java index f19b4856ca2..7b46f850aeb 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ArrayContainer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ArrayContainer.java @@ -1,8 +1,8 @@ /* * (c) the authors Licensed under the Apache License, Version 2.0. * - * The code in this file is a heavily modified version of the original in the RoaringBitmap library; - * please see https://roaringbitmap.org/ + * The code in this file is a heavily modified version of the original in the RoaringBitmap library; please see + * https://roaringbitmap.org/ * */ @@ -31,8 +31,7 @@ public class ArrayContainer extends Container { // containers with DEFAULT_MAX_SZE or less integers should be ArrayContainers static final int DEFAULT_MAX_SIZE = 4096 - 6; // 12 bytes of object overhead is 6 shorts. - public static final int SWITCH_CONTAINER_CARDINALITY_THRESHOLD = - DEFAULT_MAX_SIZE - DEFAULT_MAX_SIZE / 16; + public static final int SWITCH_CONTAINER_CARDINALITY_THRESHOLD = DEFAULT_MAX_SIZE - DEFAULT_MAX_SIZE / 16; static int sizeInBytes(final int cardinality) { return cardinality * Short.BYTES; @@ -71,8 +70,8 @@ public ArrayContainer(final int capacity) { } /** - * Create an array container with a run of ones from firstOfRun to lastOfRun, inclusive. Caller - * is responsible for making sure the range is small enough that ArrayContainer is appropriate. + * Create an array container with a run of ones from firstOfRun to lastOfRun, inclusive. Caller is responsible for + * making sure the range is small enough that ArrayContainer is appropriate. * * @param firstOfRun first index * @param lastOfRun last index (range is exclusive) @@ -100,8 +99,7 @@ private ArrayContainer(final ArrayContainer src, final int startRank, final int * @param offset index position for the first value to copy. * @param sz number of values to copy. */ - private ArrayContainer(final int newCapacity, final short[] arr, final int offset, - final int sz) { + private ArrayContainer(final int newCapacity, final short[] arr, final int offset, final int sz) { cardinality = sz; final short[] cs = new short[shortArraySizeRounding(newCapacity)]; System.arraycopy(arr, offset, cs, 0, sz); @@ -148,17 +146,15 @@ public static ArrayContainer makeByCopying(final short[] arr, final int offset, return makeByCopying(sz, arr, offset, sz); } - public static ArrayContainer makeByCopying(final int newCapacity, final short[] arr, - final int offset, final int sz) { + public static ArrayContainer makeByCopying(final int newCapacity, final short[] arr, final int offset, + final int sz) { return new ArrayContainer(newCapacity, arr, offset, sz); } /** - * Construct a new ArrayContainer using the provided array. The container takes ownership of the - * array. + * Construct a new ArrayContainer using the provided array. The container takes ownership of the array. * - * @param arr array with values in increasing unsigned short order. The container takes - * ownership of this array. + * @param arr array with values in increasing unsigned short order. The container takes ownership of this array. * @param sz number of elements in arr. */ @SuppressWarnings("unused") @@ -184,8 +180,7 @@ public Container add(final int begin, final int end) { if (indexstart < 0) { indexstart = -indexstart - 1; } - int indexend = - ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) (end - 1)); + int indexend = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) (end - 1)); if (indexend < 0) { indexend = -indexend - 1; } else { @@ -199,7 +194,7 @@ public Container add(final int begin, final int end) { } final ArrayContainer answer = makeByCopying(newcardinality, content, 0, indexstart); System.arraycopy(content, indexend, answer.content, indexstart + rangelength, - cardinality - indexend); + cardinality - indexend); for (int k = 0; k < rangelength; ++k) { answer.content[k + indexstart] = (short) (begin + k); } @@ -243,9 +238,9 @@ public Container set(final short x) { } private Container isetImpl(final short x, - final PositionHint positionHint, - final Supplier self, - final Supplier copy) { + final PositionHint positionHint, + final Supplier self, + final Supplier copy) { final int begin = getIfNotNullAndNonNegative(positionHint, 0); int loc = ContainerUtil.unsignedBinarySearch(content, begin, cardinality, x); if (loc >= 0) { @@ -266,8 +261,7 @@ private Container isetImpl(final short x, return ans.isetImplSecondHalf(x, loc, positionHint); } - private Container isetImplSecondHalf(final short x, final int loc, - final PositionHint positionHintOut) { + private Container isetImplSecondHalf(final short x, final int loc, final PositionHint positionHintOut) { if (cardinality >= content.length) { increaseCapacity(); } @@ -313,8 +307,7 @@ public Container and(final ArrayContainer value2) { ArrayContainer value1 = this; final int desiredCapacity = Math.min(value1.getCardinality(), value2.getCardinality()); ArrayContainer answer = new ArrayContainer(desiredCapacity); - answer.cardinality = - ContainerUtil.unsignedIntersect2by2(value1.content, value1.getCardinality(), + answer.cardinality = ContainerUtil.unsignedIntersect2by2(value1.content, value1.getCardinality(), value2.content, value2.getCardinality(), answer.content); return answer.maybeSwitchContainer(); } @@ -338,8 +331,7 @@ public Container andNot(final ArrayContainer value2) { ArrayContainer value1 = this; final int desiredCapacity = value1.getCardinality(); ArrayContainer answer = new ArrayContainer(desiredCapacity); - answer.cardinality = - ContainerUtil.unsignedDifference(value1.content, value1.getCardinality(), + answer.cardinality = ContainerUtil.unsignedDifference(value1.content, value1.getCardinality(), value2.content, value2.getCardinality(), answer.content); return answer.maybeSwitchContainer(); } @@ -422,12 +414,11 @@ public boolean contains(final int rangeStart, final int rangeEnd) { if (start >= cardinality) { return false; } - final int end = - ContainerUtil.advanceUntil(content, start - 1, cardinality, (short) maximum); + final int end = ContainerUtil.advanceUntil(content, start - 1, cardinality, (short) maximum); return end < cardinality - && end - start == maximum - rangeStart - && content[start] == (short) rangeStart - && content[end] == (short) maximum; + && end - start == maximum - rangeStart + && content[start] == (short) rangeStart + && content[end] == (short) maximum; } @Override @@ -446,9 +437,9 @@ protected boolean contains(final RunContainer runContainer) { } int end = ContainerUtil.advanceUntil(content, start - 1, cardinality, (short) last); if (end >= cardinality || - end - start != last - first || - content[start] != (short) first || - content[end] != (short) last) { + end - start != last - first || + content[start] != (short) first || + content[end] != (short) last) { return false; } prev = end - 1; @@ -573,8 +564,7 @@ public boolean advance(final int v) { if (ContainerUtil.toIntUnsigned(parent.content[pos]) <= v) { return true; } - int i = ContainerUtil.unsignedBinarySearch(parent.content, 0, pos + 1, - ContainerUtil.lowbits(v)); + int i = ContainerUtil.unsignedBinarySearch(parent.content, 0, pos + 1, ContainerUtil.lowbits(v)); if (i < 0) { i = -i - 1; if (i == 0) { @@ -675,8 +665,7 @@ public Container iadd(final int begin, final int end) { if (indexstart < 0) { indexstart = -indexstart - 1; } - int indexend = - ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) (end - 1)); + int indexend = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) (end - 1)); if (indexend < 0) { indexend = -indexend - 1; } else { @@ -689,26 +678,25 @@ public Container iadd(final int begin, final int end) { return a.iadd(begin, end); } /* - * b - index of begin(indexstart), e - index of end(indexend), |--| is current sequential - * indexes in content. Total 6 cases are possible, listed as below: + * b - index of begin(indexstart), e - index of end(indexend), |--| is current sequential indexes in content. + * Total 6 cases are possible, listed as below: * - * case-1) |--------|b-e case-2) |----b---|e case-3) |---b---e---| case-4) b|----e---| - * case-5) b-e|------| case-6) b|-----|e + * case-1) |--------|b-e case-2) |----b---|e case-3) |---b---e---| case-4) b|----e---| case-5) b-e|------| + * case-6) b|-----|e * - * In case of old approach, we did (1a) Array.copyOf in increaseCapacity ( # of elements - * copied -> cardinality), (1b) then we moved elements using System.arrayCopy ( # of - * elements copied -> cardinality -indexend), (1c) then we set all elements from begin to - * end ( # of elements set -> end - begin) + * In case of old approach, we did (1a) Array.copyOf in increaseCapacity ( # of elements copied -> cardinality), + * (1b) then we moved elements using System.arrayCopy ( # of elements copied -> cardinality -indexend), (1c) + * then we set all elements from begin to end ( # of elements set -> end - begin) * - * With new approach, (2a) we set all elements from begin to end ( # of elements set -> end- - * begin), (2b) we only copy elements in current set which are not in range begin-end ( # of - * elements copied -> cardinality - (end-begin) ) + * With new approach, (2a) we set all elements from begin to end ( # of elements set -> end- begin), (2b) we + * only copy elements in current set which are not in range begin-end ( # of elements copied -> cardinality - + * (end-begin) ) * - * why is it faster? Logically we are doing less # of copies. Mathematically proof as below: - * -> 2a is same as 1c, so we can avoid. Assume, 2b < (1a+1b), lets prove this assumption. - * Substitute the values. (cardinality - (end-begin)) < ( 2*cardinality - indexend) , lowest - * possible value of indexend is 0 and equation holds true , hightest possible value of - * indexend is cardinality and equation holds true , hence "<" equation holds true always + * why is it faster? Logically we are doing less # of copies. Mathematically proof as below: -> 2a is same as + * 1c, so we can avoid. Assume, 2b < (1a+1b), lets prove this assumption. Substitute the values. (cardinality - + * (end-begin)) < ( 2*cardinality - indexend) , lowest possible value of indexend is 0 and equation holds true , + * hightest possible value of indexend is cardinality and equation holds true , hence "<" equation holds true + * always */ final ArrayContainer ans; if (newcardinality > content.length) { @@ -720,12 +708,12 @@ public Container iadd(final int begin, final int end) { destination[k + indexstart] = (short) (begin + k); } /* - * so far cases - 1,2 and 6 are done Now, if e < cardinality, we copy from e to - * cardinality.Otherwise do noting this covers remaining 3,4 and 5 cases + * so far cases - 1,2 and 6 are done Now, if e < cardinality, we copy from e to cardinality.Otherwise do + * noting this covers remaining 3,4 and 5 cases */ System.arraycopy(content, indexend, - destination, indexstart + rangelength, - cardinality - indexend); + destination, indexstart + rangelength, + cardinality - indexend); if (shared) { ans = new ArrayContainer(destination, newcardinality); } else { @@ -741,8 +729,8 @@ public Container iadd(final int begin, final int end) { ans = this; } System.arraycopy(content, indexend, - ans.content, indexstart + rangelength, - cardinality - indexend); + ans.content, indexstart + rangelength, + cardinality - indexend); for (int k = 0; k < rangelength; ++k) { ans.content[k + indexstart] = (short) (begin + k); } @@ -790,7 +778,7 @@ public Container iappend(final int begin, final int end) { public Container iand(final ArrayContainer value2) { final ArrayContainer ans = deepcopyIfShared(); ans.cardinality = ContainerUtil.unsignedIntersect2by2(ans.content, ans.getCardinality(), - value2.content, value2.getCardinality(), ans.content); + value2.content, value2.getCardinality(), ans.content); return ans.maybeSwitchContainer(); } @@ -817,8 +805,7 @@ public Container iand(final RunContainer x) { @Override public Container iandNot(final ArrayContainer value2) { final ArrayContainer ans = deepcopyIfShared(); - ans.cardinality = - ContainerUtil.unsignedDifference(ans.content, ans.getCardinality(), value2.content, + ans.cardinality = ContainerUtil.unsignedDifference(ans.content, ans.getCardinality(), value2.content, value2.getCardinality(), ans.content); return ans.maybeSwitchContainer(); } @@ -849,9 +836,9 @@ private void increaseCapacity() { private static int nextCapacity(final int oldCapacity) { return (oldCapacity == 0) ? DEFAULT_INIT_SIZE - : oldCapacity < 64 ? shortArraySizeRounding(oldCapacity * 2) - : oldCapacity < 1067 ? shortArraySizeRounding(oldCapacity * 3 / 2) - : shortArraySizeRounding(oldCapacity * 5 / 4); + : oldCapacity < 64 ? shortArraySizeRounding(oldCapacity * 2) + : oldCapacity < 1067 ? shortArraySizeRounding(oldCapacity * 3 / 2) + : shortArraySizeRounding(oldCapacity * 5 / 4); } // temporarily allow an illegally large size, as long as the operation creating @@ -883,13 +870,11 @@ private int calculateCapacity(final int min) { public Container inot(final int firstOfRange, final int exclusiveEndOfRange) { // TODO: may need to convert to a RunContainer // determine the span of array indices to be affected - int startIndex = - ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) firstOfRange); + int startIndex = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) firstOfRange); if (startIndex < 0) { startIndex = -startIndex - 1; } - int lastIndex = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, - (short) (exclusiveEndOfRange - 1)); + int lastIndex = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) (exclusiveEndOfRange - 1)); if (lastIndex < 0) { lastIndex = -lastIndex - 1 - 1; } @@ -904,8 +889,7 @@ public Container inot(final int firstOfRange, final int exclusiveEndOfRange) { final short[] src = content; // so big we need a bitmap? if (newCardinality > DEFAULT_MAX_SIZE) { - return toBiggerCardinalityContainer(newCardinality).inot(firstOfRange, - exclusiveEndOfRange); + return toBiggerCardinalityContainer(newCardinality).inot(firstOfRange, exclusiveEndOfRange); } if (shared) { ans = new ArrayContainer(calculateCapacity(newCardinality)); @@ -918,10 +902,9 @@ public Container inot(final int firstOfRange, final int exclusiveEndOfRange) { } } // slide right the contents after the range - System.arraycopy(src, startIndex + currentValuesInRange, ans.content, - startIndex + newValuesInRange, cardinality - 1 - lastIndex); - ans.negateRange(newValuesInRange, startIndex, lastIndex, firstOfRange, - exclusiveEndOfRange); + System.arraycopy(src, startIndex + currentValuesInRange, ans.content, startIndex + newValuesInRange, + cardinality - 1 - lastIndex); + ans.negateRange(newValuesInRange, startIndex, lastIndex, firstOfRange, exclusiveEndOfRange); } else { // no alloc expansion needed if (shared) { if (cardinalityChange == 0) { @@ -933,13 +916,12 @@ public Container inot(final int firstOfRange, final int exclusiveEndOfRange) { } else { ans = this; } - ans.negateRange(newValuesInRange, startIndex, lastIndex, firstOfRange, - exclusiveEndOfRange); + ans.negateRange(newValuesInRange, startIndex, lastIndex, firstOfRange, exclusiveEndOfRange); if (cardinalityChange < 0) { // contraction, left sliding. // Leave array oversize - System.arraycopy(content, startIndex + currentValuesInRange, ans.content, - startIndex + newValuesInRange, cardinality - 1 - lastIndex); + System.arraycopy(content, startIndex + currentValuesInRange, ans.content, startIndex + newValuesInRange, + cardinality - 1 - lastIndex); } } ans.cardinality = newCardinality; @@ -985,16 +967,14 @@ public Container ior(final ArrayContainer value2) { int newCapacity = calculateCapacity(sumOfCardinalities); final ArrayContainer ans = new ArrayContainer(newCapacity); ans.cardinality = - ContainerUtil.unsignedUnion2by2(content, 0, cardinality, value2.content, 0, - value2.cardinality, - ans.content); + ContainerUtil.unsignedUnion2by2(content, 0, cardinality, value2.content, 0, value2.cardinality, + ans.content); return ans; } System.arraycopy(content, 0, content, value2.cardinality, cardinality); cardinality = - ContainerUtil.unsignedUnion2by2(content, value2.cardinality, cardinality, - value2.content, 0, - value2.cardinality, content); + ContainerUtil.unsignedUnion2by2(content, value2.cardinality, cardinality, value2.content, 0, + value2.cardinality, content); return this; } @@ -1036,8 +1016,7 @@ private Container removeImpl(final int begin, final int end, final boolean inPla if (indexstart < 0) { indexstart = -indexstart - 1; } - int indexend = - ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) (end - 1)); + int indexend = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) (end - 1)); if (indexend < 0) { indexend = -indexend - 1; } else { @@ -1055,10 +1034,8 @@ private Container removeImpl(final int begin, final int end, final boolean inPla return makeSingletonContainer(content[indexstart > 0 ? 0 : indexend]); } if (newCardinality == 2) { - // Since elements in the range need to be contiguous, you can think about the range - // inside our contents - // array as one block. The two values remaining can only be either both at the - // beginning, before the block, + // Since elements in the range need to be contiguous, you can think about the range inside our contents + // array as one block. The two values remaining can only be either both at the beginning, before the block, // or one before and one after, or both after the block at the end. final int i0; final int i1; @@ -1083,7 +1060,7 @@ private Container removeImpl(final int begin, final int end, final boolean inPla System.arraycopy(content, 0, ans.content, 0, indexstart); } System.arraycopy(content, indexstart + rangelength, ans.content, indexstart, - cardinality - indexstart - rangelength); + cardinality - indexstart - rangelength); ans.cardinality = newCardinality; return ans; } @@ -1110,15 +1087,14 @@ protected void loadData(final BitmapContainer bitmapContainer) { } protected void loadDataWithSkipValue( - final BitmapContainer bitmapContainer, final short valueToSkip, - final PositionHint positionHintOut) { + final BitmapContainer bitmapContainer, final short valueToSkip, final PositionHint positionHintOut) { cardinality = bitmapContainer.fillArrayWithSkipValue(content, valueToSkip, positionHintOut); } // for use in inot range known to be nonempty private void negateRange(final int valuesInRange, - final int startIndex, final int lastIndex, - final int startRange, final int lastRange) { + final int startIndex, final int lastIndex, + final int startRange, final int lastRange) { // compute the negation into buffer final short[] buf = threadLocalBuf.get(); final short[] buffer = buf.length >= valuesInRange ? buf : new short[valuesInRange]; @@ -1159,13 +1135,11 @@ public Container not(final int firstOfRange, final int lastOfRange) { } // determine the span of array indices to be affected - int startIndex = - ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) firstOfRange); + int startIndex = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) firstOfRange); if (startIndex < 0) { startIndex = -startIndex - 1; } - int lastIndex = - ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) (lastOfRange - 1)); + int lastIndex = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, (short) (lastOfRange - 1)); if (lastIndex < 0) { lastIndex = -lastIndex - 2; } @@ -1260,10 +1234,10 @@ public Container or(final ArrayContainer value2) { } ArrayContainer answer = new ArrayContainer(totalCardinality); answer.cardinality = - ContainerUtil.unsignedUnion2by2( - value1.content, 0, value1.getCardinality(), - value2.content, 0, value2.getCardinality(), - answer.content); + ContainerUtil.unsignedUnion2by2( + value1.content, 0, value1.getCardinality(), + value2.content, 0, value2.getCardinality(), + answer.content); return answer; } @@ -1295,15 +1269,13 @@ private Container or(final ShortIterator it, final boolean exclusive) { final ArrayContainer ac = new ArrayContainer(); int myItPos = 0; // do a merge. int -1 denotes end of input. - int myHead = - (myItPos == cardinality) ? -1 : ContainerUtil.toIntUnsigned(content[myItPos++]); + int myHead = (myItPos == cardinality) ? -1 : ContainerUtil.toIntUnsigned(content[myItPos++]); int hisHead = advance(it); while (myHead != -1 && hisHead != -1) { if (myHead < hisHead) { ac.forceAppend((short) myHead); - myHead = - (myItPos == cardinality) ? -1 : ContainerUtil.toIntUnsigned(content[myItPos++]); + myHead = (myItPos == cardinality) ? -1 : ContainerUtil.toIntUnsigned(content[myItPos++]); } else if (myHead > hisHead) { ac.forceAppend((short) hisHead); hisHead = advance(it); @@ -1312,15 +1284,13 @@ private Container or(final ShortIterator it, final boolean exclusive) { ac.forceAppend((short) hisHead); } hisHead = advance(it); - myHead = - (myItPos == cardinality) ? -1 : ContainerUtil.toIntUnsigned(content[myItPos++]); + myHead = (myItPos == cardinality) ? -1 : ContainerUtil.toIntUnsigned(content[myItPos++]); } } while (myHead != -1) { ac.forceAppend((short) myHead); - myHead = - (myItPos == cardinality) ? -1 : ContainerUtil.toIntUnsigned(content[myItPos++]); + myHead = (myItPos == cardinality) ? -1 : ContainerUtil.toIntUnsigned(content[myItPos++]); } while (hisHead != -1) { @@ -1370,8 +1340,7 @@ Container unset(final short x, final PositionHint positionHint) { return unsetImpl(x, false, positionHint); } - private Container unsetImpl(final short x, final boolean inPlace, - final PositionHint positionHint) { + private Container unsetImpl(final short x, final boolean inPlace, final PositionHint positionHint) { final int searchStartPos = getIfNotNullAndNonNegative(positionHint, 0); final int loc = ContainerUtil.unsignedBinarySearch(content, searchStartPos, cardinality, x); if (loc < 0) { @@ -1435,12 +1404,11 @@ public Container runOptimize() { } private void compact() { - if (shared || content.length == cardinality - || (cardinality == 0 && content.length == DEFAULT_INIT_SIZE)) { + if (shared || content.length == cardinality || (cardinality == 0 && content.length == DEFAULT_INIT_SIZE)) { return; } final short[] newContent = - new short[cardinality == 0 ? DEFAULT_INIT_SIZE : shortArraySizeRounding(cardinality)]; + new short[cardinality == 0 ? DEFAULT_INIT_SIZE : shortArraySizeRounding(cardinality)]; System.arraycopy(content, 0, newContent, 0, cardinality); content = newContent; } @@ -1505,8 +1473,7 @@ private Container andRangeImpl(final boolean inPlace, final int start, final int if (end <= start || isEmpty()) { return Container.empty(); } - int firstPos = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, - ContainerUtil.lowbits(start)); + int firstPos = ContainerUtil.unsignedBinarySearch(content, 0, cardinality, ContainerUtil.lowbits(start)); if (firstPos < 0) { firstPos = ~firstPos; if (firstPos >= cardinality || ContainerUtil.toIntUnsigned(content[firstPos]) >= end) { @@ -1514,8 +1481,8 @@ private Container andRangeImpl(final boolean inPlace, final int start, final int } } // inclusive. - int lastPos = ContainerUtil.unsignedBinarySearch(content, firstPos, cardinality, - ContainerUtil.lowbits(end - 1)); + int lastPos = + ContainerUtil.unsignedBinarySearch(content, firstPos, cardinality, ContainerUtil.lowbits(end - 1)); if (lastPos < 0) { lastPos = ~lastPos - 1; } @@ -1563,7 +1530,7 @@ public boolean findRanges(RangeConsumer outPositions, RangeIterator inValues, in return true; } int pos = ContainerUtil.unsignedBinarySearch(content, startSearch, cardinality, - ContainerUtil.lowbits(key)); + ContainerUtil.lowbits(key)); if (pos < 0) { throw new IllegalArgumentException("findRanges for invalid key=" + key); } @@ -1607,8 +1574,7 @@ public BitmapContainer toBitmapContainer() { public int nextValue(short fromValue) { int index = ContainerUtil.advanceUntil(content, -1, cardinality, fromValue); int effectiveIndex = index >= 0 ? index : -index - 1; - return effectiveIndex >= cardinality ? -1 - : ContainerUtil.toIntUnsigned(content[effectiveIndex]); + return effectiveIndex >= cardinality ? -1 : ContainerUtil.toIntUnsigned(content[effectiveIndex]); } private void assertNonEmpty() { @@ -1660,8 +1626,7 @@ public Container xor(final ArrayContainer value2) { return bc; } ArrayContainer answer = new ArrayContainer(totalCardinality); - answer.cardinality = - ContainerUtil.unsignedExclusiveUnion2by2(value1.content, value1.getCardinality(), + answer.cardinality = ContainerUtil.unsignedExclusiveUnion2by2(value1.content, value1.getCardinality(), value2.content, value2.getCardinality(), answer.content); return answer.maybeSwitchContainer(); } @@ -1729,8 +1694,8 @@ public boolean subsetOf(ArrayContainer c) { return false; } if (cardinality > c.cardinality || - first() < c.first() || - last() > c.last()) { + first() < c.first() || + last() > c.last()) { return false; } int ci = 0; @@ -1854,7 +1819,7 @@ public boolean overlaps(final BitmapContainer c) { @Override public boolean overlaps(final RunContainer c) { return (getCardinality() < c.getCardinality()) ? ContainerUtil.overlaps(this, c) - : ContainerUtil.overlaps(c, this); + : ContainerUtil.overlaps(c, this); } @Override diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ArrayContainerRangeIterator.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ArrayContainerRangeIterator.java index 17a6bfd7d58..00aacf76132 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ArrayContainerRangeIterator.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ArrayContainerRangeIterator.java @@ -55,11 +55,10 @@ public void next() { } /** - * Set the current iterator range based on the index idx and value v provided. Look for a range - * starting at the value at index idx. The provided value v is to the left or inside that range, - * never to the right. Set the end of the current iterator range to the end of the range at idx. - * Set the current iterator range start to v if v is inside the range, or the start of the range - * at idx otherwise. + * Set the current iterator range based on the index idx and value v provided. Look for a range starting at the + * value at index idx. The provided value v is to the left or inside that range, never to the right. Set the end of + * the current iterator range to the end of the range at idx. Set the current iterator range start to v if v is + * inside the range, or the start of the range at idx otherwise. * * @param v A value to the left or inside the range at idx * @param idx a valid index inside our contents array. @@ -88,7 +87,7 @@ public boolean advance(final int v) { return true; } int i = ContainerUtil.unsignedBinarySearch(parent.content, nextPos, parent.cardinality, - ContainerUtil.lowbits(v)); + ContainerUtil.lowbits(v)); if (i < 0) { i = -i - 1; } diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BatchIterator.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BatchIterator.java index 2b31acb14b6..18ecb4bf381 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BatchIterator.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BatchIterator.java @@ -3,8 +3,7 @@ public interface BatchIterator { /** - * Writes the next batch of integers onto the buffer, and returns how many were written. Aims to - * fill the buffer. + * Writes the next batch of integers onto the buffer, and returns how many were written. Aims to fill the buffer. * * @param buffer - the target to write onto * @return how many values were written during the call. diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BitmapContainer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BitmapContainer.java index 2a1e6ae4d23..a6595e83608 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BitmapContainer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BitmapContainer.java @@ -1,8 +1,8 @@ /* * (c) the authors Licensed under the Apache License, Version 2.0. * - * The code in this file is a heavily modified version of the original in the RoaringBitmap library; - * please see https://roaringbitmap.org/ + * The code in this file is a heavily modified version of the original in the RoaringBitmap library; please see + * https://roaringbitmap.org/ * */ @@ -25,8 +25,7 @@ public final class BitmapContainer extends Container implements Cloneable { protected static final int BITMAP_SIZE_IN_BYTES = BITMAP_CAPACITY * Long.BYTES; /** - * optimization flag: whether the cardinality of the bitmaps is maintained through branchless - * operations + * optimization flag: whether the cardinality of the bitmaps is maintained through branchless operations */ public static final boolean USE_BRANCHLESS = true; @@ -69,8 +68,8 @@ private BitmapContainer(final int start, final int end) { } /** - * Create a bitmap container with a run of ones from start to end. Caller must ensure that the - * range isn't so small that an ArrayContainer should have been created instead + * Create a bitmap container with a run of ones from start to end. Caller must ensure that the range isn't so small + * that an ArrayContainer should have been created instead * * @param start first index * @param end end index (exclusive) @@ -95,10 +94,10 @@ private BitmapContainer(final BitmapContainer other) { // For tests. BitmapContainer(final long[] newBitmap, final int newCardinality) { if (newBitmap.length != BITMAP_CAPACITY || - newCardinality < 0 || - newCardinality > MAX_RANGE) { + newCardinality < 0 || + newCardinality > MAX_RANGE) { throw new IllegalArgumentException( - "newBitmap.length=" + newBitmap.length + ", newCardinality=" + newCardinality); + "newBitmap.length=" + newBitmap.length + ", newCardinality=" + newCardinality); } cardinality = newCardinality; bitmap = newBitmap; @@ -144,15 +143,12 @@ private Container maybeSwitchContainerToArrayOrRun() { while (bitmap[i--] == 0) { ++backZeroes; } - final int bitsAvailableInNonZeroWords = - (BITMAP_CAPACITY - frontZeroes - backZeroes) * Long.BYTES * 8; + final int bitsAvailableInNonZeroWords = (BITMAP_CAPACITY - frontZeroes - backZeroes) * Long.BYTES * 8; // One every other bit set maximizes number of runs. final int runsUpperBound1 = bitsAvailableInNonZeroWords / 2; final int zeroBitsInNonZeroWords = bitsAvailableInNonZeroWords - cardinality; - final int runsUpperBound2 = 1 + zeroBitsInNonZeroWords; // Spread the fingers in one hand: 4 - // holes => 5 fingers. - if (Math.min(runsUpperBound1, - runsUpperBound2) < ArrayContainer.SWITCH_CONTAINER_CARDINALITY_THRESHOLD / 2) { + final int runsUpperBound2 = 1 + zeroBitsInNonZeroWords; // Spread the fingers in one hand: 4 holes => 5 fingers. + if (Math.min(runsUpperBound1, runsUpperBound2) < ArrayContainer.SWITCH_CONTAINER_CARDINALITY_THRESHOLD / 2) { return toRunContainer(); } return this; @@ -228,7 +224,7 @@ Container set(final short x, final PositionHint positionHint) { } private Container setImpl( - final short x, final Supplier self, final Supplier copy) { + final short x, final Supplier self, final Supplier copy) { if (contains(x)) { return self.get(); } @@ -281,8 +277,7 @@ public Container iandRange(final int rangeStart, final int rangeEnd) { return andRangeImpl(!shared, rangeStart, rangeEnd); } - private Container andRangeImpl(final boolean inPlace, final int rangeStart, - final int rangeEnd) { + private Container andRangeImpl(final boolean inPlace, final int rangeStart, final int rangeEnd) { if (rangeEnd <= rangeStart || isEmpty()) { return Container.empty(); } @@ -461,8 +456,7 @@ public boolean contains(final short i) { public boolean contains(final int rangeStart, final int rangeEnd) { final ValuesInRangeContext ctx = new ValuesInRangeContext(rangeStart, rangeEnd); if (ctx.iFirst == ctx.iLast) { - return ((bitmap[ctx.iLast] & ctx.maskFirst & ctx.maskLast) == (ctx.maskFirst - & ctx.maskLast)); + return ((bitmap[ctx.iLast] & ctx.maskFirst & ctx.maskLast) == (ctx.maskFirst & ctx.maskLast)); } if ((bitmap[ctx.iFirst] & ctx.maskFirst) != ctx.maskFirst) { return false; @@ -543,8 +537,7 @@ protected void fillArray(final short[] array) { } } - int fillArrayWithSkipValue(final short[] array, final short valueToSkip, - final PositionHint positionHintOut) { + int fillArrayWithSkipValue(final short[] array, final short valueToSkip, final PositionHint positionHintOut) { int pos = 0; int base = 0; for (long bits : bitmap) { @@ -569,8 +562,7 @@ public Container iflip(final short x) { long bef = bitmap[index]; long mask = 1L << xAsInt; final boolean isOnAndWillBeTurnedOff = (bef & mask) != 0; - if (isOnAndWillBeTurnedOff - && cardinality <= ArrayContainer.SWITCH_CONTAINER_CARDINALITY_THRESHOLD) { + if (isOnAndWillBeTurnedOff && cardinality <= ArrayContainer.SWITCH_CONTAINER_CARDINALITY_THRESHOLD) { if (cardinality > 3) { final ArrayContainer ac = new ArrayContainer(cardinality - 1); ac.loadDataWithSkipValue(this, x, null); @@ -695,8 +687,7 @@ public boolean advance(final int v) { eatZeroes(); if (nextPos < 0) { while (bitmap[savedPos] == 0) { - ++savedPos; // there is some nonzero element otherwise we would have returned - // earlier. + ++savedPos; // there is some nonzero element otherwise we would have returned earlier. } curr = (savedPos + 1) * 64 - Long.numberOfLeadingZeros(bitmap[savedPos]) - 1; return false; @@ -899,8 +890,7 @@ public Container iappend(final int begin, final int end) { @Override public Container iand(final ArrayContainer b2) { - // We always produce an ArrayContainer result for and(ArrayContainer), so this will never be - // in-place. + // We always produce an ArrayContainer result for and(ArrayContainer), so this will never be in-place. return and(b2); } @@ -1500,9 +1490,9 @@ Container unset(final short x, final PositionHint positionHint) { } private Container unsetImpl( - final short v, - final boolean inPlace, - final PositionHint positionHintOut) { + final short v, + final boolean inPlace, + final PositionHint positionHintOut) { final int x = toIntUnsigned(v); final int index = x / 64; final long bef = bitmap[index]; @@ -1695,7 +1685,7 @@ private BitmapContainer selectToBitmapContainer(final int startRank, final int c public Container select(final int startRank, final int endRank) { if (endRank <= startRank || endRank > cardinality) { throw new IllegalArgumentException( - "startRank=" + startRank + ", endRank=" + endRank + ", cardinality=" + cardinality); + "startRank=" + startRank + ", endRank=" + endRank + ", cardinality=" + cardinality); } final int card = endRank - startRank; if (card < ArrayContainer.DEFAULT_MAX_SIZE) { @@ -1739,8 +1729,7 @@ public void selectRanges(final RangeConsumer outValues, final RangeIterator inPo int oend = -1; // inclusive int wordIndex = 0; if (isEmpty()) { - throw new IllegalArgumentException( - "select Ranges for invalid pos=" + inPositions.start()); + throw new IllegalArgumentException("select Ranges for invalid pos=" + inPositions.start()); } int wordAccumBitCount = Long.bitCount(bitmap[0]); int prevWordAccumBitCount = 0; @@ -1755,8 +1744,7 @@ public void selectRanges(final RangeConsumer outValues, final RangeIterator inPo prevWordAccumBitCount = wordAccumBitCount; wordAccumBitCount += Long.bitCount(bitmap[wordIndex]); } - int key = wordIndex * 64 - + ContainerUtil.select(bitmap[wordIndex], istart - prevWordAccumBitCount); + int key = wordIndex * 64 + ContainerUtil.select(bitmap[wordIndex], istart - prevWordAccumBitCount); if (ostart == -1) { ostart = oend = key; } else { @@ -1777,8 +1765,7 @@ public void selectRanges(final RangeConsumer outValues, final RangeIterator inPo prevWordAccumBitCount = wordAccumBitCount; wordAccumBitCount += Long.bitCount(bitmap[wordIndex]); } - key = wordIndex * 64 - + ContainerUtil.select(bitmap[wordIndex], j - prevWordAccumBitCount); + key = wordIndex * 64 + ContainerUtil.select(bitmap[wordIndex], j - prevWordAccumBitCount); if (oend + 1 == key) { oend = key; } else { @@ -1791,8 +1778,7 @@ public void selectRanges(final RangeConsumer outValues, final RangeIterator inPo } @Override - public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, - final int maxPos) { + public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, final int maxPos) { if (!inValues.hasNext()) { return false; } @@ -1851,8 +1837,8 @@ public boolean findRanges(final RangeConsumer outPositions, final RangeIterator pos = findSecondHalf(k, rj, kAccumBitCount); if (pos < 0) { throw new IllegalArgumentException("findRanges for invalid key=" + j); - // Note we do not validate potential values between istart and iend, just the - // endpoints of the range. + // Note we do not validate potential values between istart and iend, just the endpoints of the + // range. } if (pos > maxPos) { outPositions.accept(ostart, oend + 1); @@ -2323,8 +2309,7 @@ public Container toLargeContainer() { public void validate() { final int computedCard = computeCardinality(this); if (computedCard != cardinality) { - throw new IllegalStateException( - "computedCard=" + computedCard + ", cardinality=" + cardinality); + throw new IllegalStateException("computedCard=" + computedCard + ", cardinality=" + cardinality); } } diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BitmapContainerRangeIterator.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BitmapContainerRangeIterator.java index 6c3829100fc..1c0049c81dd 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BitmapContainerRangeIterator.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/BitmapContainerRangeIterator.java @@ -217,22 +217,20 @@ public boolean search(final ContainerUtil.TargetComparator comp) { final long mask = maskForAllBitsSetFromOffsetToHigher(moffset); long masked = bitmap[m] & mask; if (masked == 0) { - // We will try towards the higher words first. Don't clobber m as we may realize we - // need to go towards + // We will try towards the higher words first. Don't clobber m as we may realize we need to go towards // lower words from m instead. int m2 = m; do { ++m2; - // by construction, bitmap[j] != 0 on entry, and m <= j, so this loop will end - // before + // by construction, bitmap[j] != 0 on entry, and m <= j, so this loop will end before // mm > bitmap.length - 1. } while (bitmap[m2] == 0); - // since we are moving towards higher words, the lowest bit is the one closer to the - // original (m,moffset) target. + // since we are moving towards higher words, the lowest bit is the one closer to the original + // (m,moffset) target. int m2offset = lowestBit(bitmap[m2]); if (m2 == j && m2offset == joffset) { - // Going towards higher words we ended up in the same place we started, and - // there are no more lower bits. + // Going towards higher words we ended up in the same place we started, and there are no more lower + // bits. // Try indexes from m towards the lower words instead. tryLowerBits = true; } else { @@ -253,19 +251,16 @@ public boolean search(final ContainerUtil.TargetComparator comp) { if (masked != 0) { moffset = highestBit(masked); mv = 64 * m + moffset; - // We already found (j,joffset) to the highest words. If this happens to be - // (i,ioffset) + // We already found (j,joffset) to the highest words. If this happens to be (i,ioffset) // we are done. That check will happen a bit later. } else { do { --m; - // by construction, bitmap[i] != 0 on entry, and i <= m, so this loop will - // end before m < 0. + // by construction, bitmap[i] != 0 on entry, and i <= m, so this loop will end before m < 0. } while (bitmap[m] == 0); moffset = highestBit(bitmap[m]); mv = 64 * m + moffset; - // We already found (j,joffset) to the highest words. If this happens to be - // (i,ioffset) + // We already found (j,joffset) to the highest words. If this happens to be (i,ioffset) // we are done. That check will happen a bit later. } } diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/Container.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/Container.java index 8474880d0a0..437058d22e1 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/Container.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/Container.java @@ -1,8 +1,8 @@ /* * (c) the authors Licensed under the Apache License, Version 2.0. * - * The code in this file is a heavily modified version of the original in the RoaringBitmap library; - * please see https://roaringbitmap.org/ + * The code in this file is a heavily modified version of the original in the RoaringBitmap library; please see + * https://roaringbitmap.org/ * */ @@ -15,12 +15,11 @@ */ public abstract class Container { - public static final boolean DEBUG = - Boolean.getBoolean("io.deephaven.db.v2.utils.rsp.container.Container.DEBUG"); + public static final boolean DEBUG = Boolean.getBoolean("io.deephaven.db.v2.utils.rsp.container.Container.DEBUG"); /** - * The maximum possible cardinality of a container, as an int. Also the maximum possible - * exclusive end for a range that can be stored in a container. + * The maximum possible cardinality of a container, as an int. Also the maximum possible exclusive end for a range + * that can be stored in a container. */ public static final int MAX_RANGE = (1 << 16); @@ -53,8 +52,7 @@ static int runsSizeRounding(final int nruns) { return nruns + padding / 4; } - protected static final ThreadLocal threadLocalBuf = - ThreadLocal.withInitial(() -> new short[256 - 12]); + protected static final ThreadLocal threadLocalBuf = ThreadLocal.withInitial(() -> new short[256 - 12]); public final void ifDebugValidate() { if (DEBUG) { @@ -142,8 +140,8 @@ public static Container twoValues(final short v1, final short v2) { } /** - * Returns a new Container containing the two provided ranges. The ranges provided should be - * nonempty, disjoint and provided in appearance order, ie, start2 > start1. + * Returns a new Container containing the two provided ranges. The ranges provided should be nonempty, disjoint and + * provided in appearance order, ie, start2 > start1. * * @param start1 start of first range, inclusive. * @param end1 end of first range, exclusive. @@ -151,24 +149,21 @@ public static Container twoValues(final short v1, final short v2) { * @param end2 end of second range, exclusive. * @return A new Container containing the provided ranges. */ - public static Container twoRanges(final int start1, final int end1, final int start2, - final int end2) { + public static Container twoRanges(final int start1, final int end1, final int start2, final int end2) { if (DEBUG) { if (end1 <= start1 || end2 <= start2 || - start1 < 0 || start2 < 0 || - end1 > MAX_RANGE || end2 > MAX_RANGE || - start2 <= end1) { + start1 < 0 || start2 < 0 || + end1 > MAX_RANGE || end2 > MAX_RANGE || + start2 <= end1) { throw new IllegalArgumentException( - "start1=" + start1 + ", end1=" + end1 + ", start2=" + start2 + ", end2=" - + end2); + "start1=" + start1 + ", end1=" + end1 + ", start2=" + start2 + ", end2=" + end2); } } if (smallContainersDisabled()) { return new RunContainer(start1, end1, start2, end2); } if (end1 - start1 == 1 && end2 - start2 == 1) { - return new TwoValuesContainer(ContainerUtil.lowbits(start1), - ContainerUtil.lowbits(start2)); + return new TwoValuesContainer(ContainerUtil.lowbits(start1), ContainerUtil.lowbits(start2)); } return new RunContainer(start1, end1, start2, end2); } @@ -234,8 +229,8 @@ private String myType() { } /** - * Computes the bitwise AND of this container with another (intersection). This container as - * well as the provided container are left unaffected. + * Computes the bitwise AND of this container with another (intersection). This container as well as the provided + * container are left unaffected. * * @param x Another container * @return aggregated container @@ -243,8 +238,8 @@ private String myType() { public abstract Container and(ArrayContainer x); /** - * Computes the bitwise AND of this container with another (intersection). This container as - * well as the provided container are left unaffected. + * Computes the bitwise AND of this container with another (intersection). This container as well as the provided + * container are left unaffected. * * @param x Another container * @return aggregated container @@ -252,8 +247,8 @@ private String myType() { public abstract Container and(BitmapContainer x); /** - * Computes the bitwise AND of this container with another (intersection). This container as - * well as the provided container are left unaffected. + * Computes the bitwise AND of this container with another (intersection). This container as well as the provided + * container are left unaffected. * * @param x Another container * @return aggregated container @@ -284,8 +279,8 @@ private Container and(final TwoValuesContainer tv) { } /** - * Computes the bitwise AND of this container with another (intersection). This container as - * well as the provided container are left unaffected. + * Computes the bitwise AND of this container with another (intersection). This container as well as the provided + * container are left unaffected. * * @param x Another container * @return aggregated container @@ -317,8 +312,8 @@ public Container and(final Container x) { /** - * Calculate the intersection of this container and a range, in a new container. The existing - * container is not modified. + * Calculate the intersection of this container and a range, in a new container. The existing container is not + * modified. * * @param start start of range * @param end end of range, exclusive. @@ -327,8 +322,8 @@ public Container and(final Container x) { public abstract Container andRange(int start, int end); /** - * Calculate the intersection of this container and a range; may overwrite the existing - * container or return a new one. + * Calculate the intersection of this container and a range; may overwrite the existing container or return a new + * one. * * @param start start of range * @param end end of range, exclusive. @@ -337,8 +332,8 @@ public Container and(final Container x) { public abstract Container iandRange(int start, int end); /** - * Computes the bitwise ANDNOT of this container with another (difference). This container as - * well as the provided container are left unaffected. + * Computes the bitwise ANDNOT of this container with another (difference). This container as well as the provided + * container are left unaffected. * * @param x Another container * @return aggregated container @@ -346,8 +341,8 @@ public Container and(final Container x) { public abstract Container andNot(ArrayContainer x); /** - * Computes the bitwise ANDNOT of this container with another (difference). This container as - * well as the provided container are left unaffected. + * Computes the bitwise ANDNOT of this container with another (difference). This container as well as the provided + * container are left unaffected. * * @param x Another container * @return aggregated container @@ -355,8 +350,8 @@ public Container and(final Container x) { public abstract Container andNot(BitmapContainer x); /** - * Computes the bitwise ANDNOT of this container with another (difference). This container as - * well as the provided container are left unaffected. + * Computes the bitwise ANDNOT of this container with another (difference). This container as well as the provided + * container are left unaffected. * * @param x Another container * @return aggregated container @@ -377,8 +372,8 @@ private Container andNot(final TwoValuesContainer tv) { } /** - * Computes the bitwise ANDNOT of this container with another (difference). This container as - * well as the provided container are left unaffected. + * Computes the bitwise ANDNOT of this container with another (difference). This container as well as the provided + * container are left unaffected. * * @param x Another container * @return aggregated container @@ -419,11 +414,11 @@ public Container andNot(final Container x) { public abstract Container deepCopy(); /** - * Get a shared, copy-on-write copy of an existing container. Mutations on the returned - * container will always return a copy and leave the original container unchanged. + * Get a shared, copy-on-write copy of an existing container. Mutations on the returned container will always return + * a copy and leave the original container unchanged. *

    - * This operation allows for cheap read-only references to the same values, at the cost of an - * additional copy for any first mutation. + * This operation allows for cheap read-only references to the same values, at the cost of an additional copy for + * any first mutation. * * @return A copy-on-write reference to the container. */ @@ -437,16 +432,16 @@ public Container andNot(final Container x) { public abstract boolean isEmpty(); /** - * Checks whether the container spans the full 2^16 range (ie, contains every short value) This - * is an O(1) operation in all container types (some do not cache cardinality). + * Checks whether the container spans the full 2^16 range (ie, contains every short value) This is an O(1) operation + * in all container types (some do not cache cardinality). * * @return true if the container does not miss any single short value. */ public abstract boolean isAllOnes(); /** - * Checks whether the container has exactly one element (meaningful since cardinality may not be - * cached in some Container types, eg, Run). + * Checks whether the container has exactly one element (meaningful since cardinality may not be cached in some + * Container types, eg, Run). * * @return true if the container contains exactly one element, false otherwise. */ @@ -455,11 +450,11 @@ public boolean isSingleElement() { } /** - * Checks whether the container spans the full 2^16 range (ie, contains every short value) This - * is an O(1) operation in all container types (some do not cache cardinality). + * Checks whether the container spans the full 2^16 range (ie, contains every short value) This is an O(1) operation + * in all container types (some do not cache cardinality). * - * @return true if the container does not miss any single short value. This method is - * deprecated, prefer isAllOnes instead. + * @return true if the container does not miss any single short value. This method is deprecated, prefer isAllOnes + * instead. */ @Deprecated public final boolean isFull() { @@ -527,8 +522,7 @@ public boolean contains(final Container subset) { } /** - * Add a short to the container if it is not present, otherwise remove it. May generate a new - * container. + * Add a short to the container if it is not present, otherwise remove it. May generate a new container. * * @param x short to be added * @return the new container @@ -536,8 +530,7 @@ public boolean contains(final Container subset) { public abstract Container iflip(short x); /** - * Computes the distinct number of short values in the container. Can be expected to run in - * constant time. + * Computes the distinct number of short values in the container. Can be expected to run in constant time. * * @return the cardinality */ @@ -580,8 +573,7 @@ public String getContainerName() { "empty", "singleton", "singlerange", "twovalues", "array", "bitmap", "run"}; /** - * Iterate through the values of this container in order and pass them along to the - * ShortConsumer. + * Iterate through the values of this container in order and pass them along to the ShortConsumer. * * @param sc a shortConsumer * @return false if the consumer returned false at some point, true otherwise. @@ -622,8 +614,7 @@ public String getContainerName() { public abstract ContainerShortBatchIterator getShortBatchIterator(int skipFromStartCount); /** - * Iterator to visit the short values in container in [start, end) ranges, in increasing order - * of start values. + * Iterator to visit the short values in container in [start, end) ranges, in increasing order of start values. * * @return iterator */ @@ -639,9 +630,8 @@ public String getContainerName() { public abstract Container iadd(int begin, int end); /** - * Add all shorts in [begin,end) using an unsigned interpretation. May generate a new container. - * The beginning of the range should be strictly greater than the last value already present in - * the container, if there is one. + * Add all shorts in [begin,end) using an unsigned interpretation. May generate a new container. The beginning of + * the range should be strictly greater than the last value already present in the container, if there is one. * * @param begin start of range (inclusive) * @param end end of range (exclusive) @@ -650,9 +640,8 @@ public String getContainerName() { public abstract Container iappend(int begin, int end); /** - * Computes the in-place bitwise AND of this container with another (intersection). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise AND of this container with another (intersection). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -661,9 +650,8 @@ public String getContainerName() { /** - * Computes the in-place bitwise AND of this container with another (intersection). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise AND of this container with another (intersection). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -671,9 +659,8 @@ public String getContainerName() { public abstract Container iand(BitmapContainer x); /** - * Computes the in-place bitwise AND of this container with another (intersection). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise AND of this container with another (intersection). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -685,9 +672,8 @@ private Container iand(final SingleRangeContainer sr) { } /** - * Computes the in-place bitwise AND of this container with another (intersection). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise AND of this container with another (intersection). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -718,9 +704,8 @@ public Container iand(final Container x) { } /** - * Computes the in-place bitwise ANDNOT of this container with another (difference). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise ANDNOT of this container with another (difference). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -729,9 +714,8 @@ public Container iand(final Container x) { /** - * Computes the in-place bitwise ANDNOT of this container with another (difference). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise ANDNOT of this container with another (difference). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -739,9 +723,8 @@ public Container iand(final Container x) { public abstract Container iandNot(BitmapContainer x); /** - * Computes the in-place bitwise ANDNOT of this container with another (difference). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise ANDNOT of this container with another (difference). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -762,9 +745,8 @@ private Container iandNot(final TwoValuesContainer tv) { } /** - * Computes the in-place bitwise ANDNOT of this container with another (difference). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise ANDNOT of this container with another (difference). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -798,9 +780,8 @@ public Container iandNot(final Container x) { } /** - * Computes the in-place bitwise NOT of this container (complement). Only those bits within the - * range are affected. The current container is generally modified. May generate a new - * container. + * Computes the in-place bitwise NOT of this container (complement). Only those bits within the range are affected. + * The current container is generally modified. May generate a new container. * * @param rangeStart beginning of range (inclusive); 0 is beginning of this container. * @param rangeEnd ending of range (exclusive) @@ -809,9 +790,8 @@ public Container iandNot(final Container x) { public abstract Container inot(int rangeStart, int rangeEnd); /** - * Computes the in-place bitwise OR of this container with another (union). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise OR of this container with another (union). The current container is generally + * modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -819,9 +799,8 @@ public Container iandNot(final Container x) { public abstract Container ior(ArrayContainer x); /** - * Computes the in-place bitwise OR of this container with another (union). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise OR of this container with another (union). The current container is generally + * modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -829,9 +808,8 @@ public Container iandNot(final Container x) { public abstract Container ior(BitmapContainer x); /** - * Computes the in-place bitwise OR of this container with another (union). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise OR of this container with another (union). The current container is generally + * modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -853,9 +831,8 @@ private Container ior(final TwoValuesContainer tv) { } /** - * Computes the in-place bitwise OR of this container with another (union). The current - * container is generally modified, whereas the provided container (x) is unaffected. May - * generate a new container. + * Computes the in-place bitwise OR of this container with another (union). The current container is generally + * modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -901,9 +878,8 @@ public Container ior(final Container x) { public abstract Container iremove(int begin, int end); /** - * Computes the in-place bitwise XOR of this container with another (symmetric difference). The - * current container is generally modified, whereas the provided container (x) is unaffected. - * May generate a new container. + * Computes the in-place bitwise XOR of this container with another (symmetric difference). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -911,9 +887,8 @@ public Container ior(final Container x) { public abstract Container ixor(ArrayContainer x); /** - * Computes the in-place bitwise XOR of this container with another (symmetric difference). The - * current container is generally modified, whereas the provided container (x) is unaffected. - * May generate a new container. + * Computes the in-place bitwise XOR of this container with another (symmetric difference). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -921,9 +896,8 @@ public Container ior(final Container x) { public abstract Container ixor(BitmapContainer x); /** - * Computes the in-place bitwise XOR of this container with another (symmetric difference). The - * current container is generally modified, whereas the provided container (x) is unaffected. - * May generate a new container. + * Computes the in-place bitwise XOR of this container with another (symmetric difference). The current container is + * generally modified, whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return aggregated container @@ -943,9 +917,8 @@ private Container ixor(final TwoValuesContainer tv) { } /** - * Computes the in-place bitwise XOR of this container with another. The current container is - * generally modified, whereas the provided container (x) is unaffected. May generate a new - * container. + * Computes the in-place bitwise XOR of this container with another. The current container is generally modified, + * whereas the provided container (x) is unaffected. May generate a new container. * * @param x Another container * @return xor result as a new container reference @@ -979,9 +952,8 @@ public Container ixor(final Container x) { } /** - * Computes the bitwise NOT of this container (complement). Only those bits within the range are - * affected. This is equivalent to an xor with a range of ones for the given range. The current - * container is left unaffected. + * Computes the bitwise NOT of this container (complement). Only those bits within the range are affected. This is + * equivalent to an xor with a range of ones for the given range. The current container is left unaffected. * * @param rangeStart beginning of range (inclusive); 0 is beginning of this container. * @param rangeEnd ending of range (exclusive) @@ -1002,8 +974,8 @@ public int numberOfRanges() { } /** - * Computes the bitwise OR of this container with another (union). This container as well as the - * provided container are left unaffected. + * Computes the bitwise OR of this container with another (union). This container as well as the provided container + * are left unaffected. * * @param x Another container * @return aggregated container @@ -1011,8 +983,8 @@ public int numberOfRanges() { public abstract Container or(ArrayContainer x); /** - * Computes the bitwise OR of this container with another (union). This container as well as the - * provided container are left unaffected. + * Computes the bitwise OR of this container with another (union). This container as well as the provided container + * are left unaffected. * * @param x Another container * @return aggregated container @@ -1020,8 +992,8 @@ public int numberOfRanges() { public abstract Container or(BitmapContainer x); /** - * Computes the bitwise OR of this container with another (union). This container as well as the - * provided container are left unaffected. + * Computes the bitwise OR of this container with another (union). This container as well as the provided container + * are left unaffected. * * @param x Another container * @return aggregated container @@ -1043,8 +1015,8 @@ private Container or(final TwoValuesContainer tv) { } /** - * Computes the bitwise OR of this container with another (union). This container as well as the - * provided container are left unaffected. + * Computes the bitwise OR of this container with another (union). This container as well as the provided container + * are left unaffected. * * @param x Another container * @return aggregated container @@ -1081,8 +1053,7 @@ public Container or(final Container x) { } /** - * Rank returns the number of integers that are smaller or equal to x (Rank(infinity) would be - * GetCardinality()). + * Rank returns the number of integers that are smaller or equal to x (Rank(infinity) would be GetCardinality()). * * @param lowbits upper limit * @return the rank @@ -1090,8 +1061,7 @@ public Container or(final Container x) { public abstract int rank(short lowbits); /** - * Return a new container with all shorts in [begin,end) remove using an unsigned - * interpretation. + * Return a new container with all shorts in [begin,end) remove using an unsigned interpretation. * * @param begin start of range (inclusive) * @param end end of range (exclusive) @@ -1100,8 +1070,8 @@ public Container or(final Container x) { public abstract Container remove(int begin, int end); /** - * Remove the short from this container. May create a new container. Note this legacy method - * does not respect the naming convention of an i prefix for inplace operations; prefer iunset. + * Remove the short from this container. May create a new container. Note this legacy method does not respect the + * naming convention of an i prefix for inplace operations; prefer iunset. * * @param x to be removed * @return resulting container. @@ -1128,9 +1098,8 @@ public final Container remove(short x) { public abstract Container iunset(short x); /** - * Convert to RunContainers, when the result is smaller. Overridden by RunContainer to - * possibility switch from RunContainer to a smaller alternative. Overridden by BitmapContainer - * with a more efficient approach. + * Convert to RunContainers, when the result is smaller. Overridden by RunContainer to possibility switch from + * RunContainer to a smaller alternative. Overridden by BitmapContainer with a more efficient approach. * * @return the new container */ @@ -1157,14 +1126,11 @@ public final Container remove(short x) { * Searches for the specified short value * * @param x value to search for - * @return Relative position of the value in the sorted set of elements in this container, in - * the range [0 .. cardinality - 1]. If not present, (-(insertion point) - 1) similar to - * Array.binarySearch. + * @return Relative position of the value in the sorted set of elements in this container, in the range [0 .. + * cardinality - 1]. If not present, (-(insertion point) - 1) similar to Array.binarySearch. *

    - * For values of x that - * {@link io.deephaven.db.v2.utils.rsp.container.Container#contains} returns true, this - * method returns the same value as - * {@link io.deephaven.db.v2.utils.rsp.container.Container#rank}. + * For values of x that {@link io.deephaven.db.v2.utils.rsp.container.Container#contains} returns true, this + * method returns the same value as {@link io.deephaven.db.v2.utils.rsp.container.Container#rank}. */ public abstract int find(short x); @@ -1180,14 +1146,11 @@ public final Container remove(short x) { * As find but for all the values in a range. * * @param outPositions accept is called in this consumer for each resulting position range. - * @param inValues input iterator that provides the key ranges; these must each exist in the - * container. - * @param maxPos maximum position to add to outPositions; values of position > maxPos are not - * added. + * @param inValues input iterator that provides the key ranges; these must each exist in the container. + * @param maxPos maximum position to add to outPositions; values of position > maxPos are not added. * @return true if maxPos was reached, false otherwise. */ - public abstract boolean findRanges(RangeConsumer outPositions, RangeIterator inValues, - int maxPos); + public abstract boolean findRanges(RangeConsumer outPositions, RangeIterator inValues, int maxPos); /** * If possible, recover wasted memory. @@ -1195,8 +1158,8 @@ public abstract boolean findRanges(RangeConsumer outPositions, RangeIterator inV public abstract void trim(); /** - * Computes the bitwise XOR of this container with another (symmetric difference). This - * container as well as the provided container are left unaffected. + * Computes the bitwise XOR of this container with another (symmetric difference). This container as well as the + * provided container are left unaffected. * * @param x Another container * @return aggregated container @@ -1204,8 +1167,8 @@ public abstract boolean findRanges(RangeConsumer outPositions, RangeIterator inV public abstract Container xor(ArrayContainer x); /** - * Computes the bitwise XOR of this container with another (symmetric difference). This - * container as well as the provided container are left unaffected. + * Computes the bitwise XOR of this container with another (symmetric difference). This container as well as the + * provided container are left unaffected. * * @param x Another container * @return aggregated container @@ -1213,8 +1176,8 @@ public abstract boolean findRanges(RangeConsumer outPositions, RangeIterator inV public abstract Container xor(BitmapContainer x); /** - * Computes the bitwise XOR of this container with another (symmetric difference). This - * container as well as the provided container are left unaffected. + * Computes the bitwise XOR of this container with another (symmetric difference). This container as well as the + * provided container are left unaffected. * * @param x Another container * @return aggregated container @@ -1234,8 +1197,8 @@ private Container xor(final TwoValuesContainer tv) { } /** - * Computes the bitwise OR of this container with another (symmetric difference). This container - * as well as the provided container are left unaffected. + * Computes the bitwise OR of this container with another (symmetric difference). This container as well as the + * provided container are left unaffected. * * @param x other parameter * @return aggregated container @@ -1272,12 +1235,12 @@ public Container xor(final Container x) { } /** - * Convert the current container to a BitmapContainer, if a conversion is needed. If the - * container is already a bitmap, the container is returned unchanged. + * Convert the current container to a BitmapContainer, if a conversion is needed. If the container is already a + * bitmap, the container is returned unchanged. *

    - * When multiple container "merge" operations are done it might be more efficient to convert to - * bitmap first, and then at the end convert to the efficient container type, to avoid multiple - * container type conversions, since bitmap can always stay a bitmap. + * When multiple container "merge" operations are done it might be more efficient to convert to bitmap first, and + * then at the end convert to the efficient container type, to avoid multiple container type conversions, since + * bitmap can always stay a bitmap. * * @return a bitmap container */ @@ -1464,104 +1427,95 @@ public boolean overlaps(final Container x) { } /* - * Instruct this container to never modify itself with mutation operations, and instead always - * return a new container. + * Instruct this container to never modify itself with mutation operations, and instead always return a new + * container. */ public abstract void setCopyOnWrite(); /** - * @return The allocated size in bytes of the underlying array backing store used by this - * container. + * @return The allocated size in bytes of the underlying array backing store used by this container. */ public abstract int bytesAllocated(); /** - * @return The size in bytes of the used portion out of the total allocated bytes for the - * underlying array backing store used by this container. + * @return The size in bytes of the used portion out of the total allocated bytes for the underlying array backing + * store used by this container. */ @SuppressWarnings("unused") public abstract int bytesUsed(); /** - * Insert a value in the current container. May modify the existing container or return a new - * one. If positionHint is greater or equal than zero, it is taken to be a container-specific - * position hint to help speed up the insertion; this can be obtained from previous calls to any - * method taking a hint to help speedup a sequence of operations done in increasing value order. - * Before returning, the method stores in positionHint a valid value for a subsequent calls to - * methods taking a hint, which can be used on the returned container for a value greater than + * Insert a value in the current container. May modify the existing container or return a new one. If positionHint + * is greater or equal than zero, it is taken to be a container-specific position hint to help speed up the + * insertion; this can be obtained from previous calls to any method taking a hint to help speedup a sequence of + * operations done in increasing value order. Before returning, the method stores in positionHint a valid value for + * a subsequent calls to methods taking a hint, which can be used on the returned container for a value greater than * the one provided in this call. * * @param x the value to insert - * @param positionHint a position hint to speed up insertion specific to a container, returned - * from a previous call to a hint taking method, or -1 if none available. Updated to a - * valid hint for a subsequent call to a hint taking method on the returned container; if - * that subsequent call uses the hint it should be for an argument bigger than x provided - * in this call. - * @return A container with the value to be inserted added; this container may or may not be a - * modification on the object on which the call was performed; the value in positionHint - * after return would be valid on the returned container. + * @param positionHint a position hint to speed up insertion specific to a container, returned from a previous call + * to a hint taking method, or -1 if none available. Updated to a valid hint for a subsequent call to a hint + * taking method on the returned container; if that subsequent call uses the hint it should be for an + * argument bigger than x provided in this call. + * @return A container with the value to be inserted added; this container may or may not be a modification on the + * object on which the call was performed; the value in positionHint after return would be valid on the + * returned container. */ abstract Container iset(short x, PositionHint positionHint); /** - * Return a new container container everything in the existing container plus the provided - * value; does not modify the existing container. If positionHint is greater or equal than zero, - * it is taken to be a container-specific position hint to help speed up the insertion; this can - * be obtained from previous calls to any method taking a hint to help speedup a sequence of - * operations done in increasing value order. Before returning, the method stores in - * positionHint a valid value for a subsequent calls to methods taking a hint, which can be used - * on the returned container for a value greater than the one provided in this call. + * Return a new container container everything in the existing container plus the provided value; does not modify + * the existing container. If positionHint is greater or equal than zero, it is taken to be a container-specific + * position hint to help speed up the insertion; this can be obtained from previous calls to any method taking a + * hint to help speedup a sequence of operations done in increasing value order. Before returning, the method stores + * in positionHint a valid value for a subsequent calls to methods taking a hint, which can be used on the returned + * container for a value greater than the one provided in this call. * * @param x the value to insert - * @param positionHint a position hint to speed up insertion specific to a container, returned - * from a previous call to a hint taking method, or -1 if none available. Updated to a - * valid hint for a subsequent call to a hint taking method on the returned container; if - * that subsequent call uses the hint it should be for an argument bigger than x provided - * in this call. - * @return A new container with the value to be inserted added; the value in positionHint after - * return would be valid on the returned container. + * @param positionHint a position hint to speed up insertion specific to a container, returned from a previous call + * to a hint taking method, or -1 if none available. Updated to a valid hint for a subsequent call to a hint + * taking method on the returned container; if that subsequent call uses the hint it should be for an + * argument bigger than x provided in this call. + * @return A new container with the value to be inserted added; the value in positionHint after return would be + * valid on the returned container. */ abstract Container set(short x, PositionHint positionHint); /** - * Remove a value in the current container. May modify the existing container or return a new - * one. If positionHint is greater or equal than zero, it is taken to be a container-specific - * position hint to help speed up the removal; this can be obtained from previous calls to any - * method taking a hint to help speedup a sequence of operations done in increasing value order. - * Before returning, the method stores in positionHint a valid value for a subsequent calls to - * methods taking a hint, which can be used on the returned container for a value greater than + * Remove a value in the current container. May modify the existing container or return a new one. If positionHint + * is greater or equal than zero, it is taken to be a container-specific position hint to help speed up the removal; + * this can be obtained from previous calls to any method taking a hint to help speedup a sequence of operations + * done in increasing value order. Before returning, the method stores in positionHint a valid value for a + * subsequent calls to methods taking a hint, which can be used on the returned container for a value greater than * the one provided in this call. * * @param x the value to remove - * @param positionHint a position hint to speed up removal specific to a container, returned - * from a previous call to a hint taking method, or -1 if none available. Updated to a - * valid hint for a subsequent call to a hint taking method on the returned container; if - * that subsequent call uses the hint it should be for an argument bigger than x provided - * in this call. - * @return A container with the value removed; this container may or may not be a modification - * on the object on which the call was performed; the value in positionHint after return - * would be valid on the returned container. + * @param positionHint a position hint to speed up removal specific to a container, returned from a previous call to + * a hint taking method, or -1 if none available. Updated to a valid hint for a subsequent call to a hint + * taking method on the returned container; if that subsequent call uses the hint it should be for an + * argument bigger than x provided in this call. + * @return A container with the value removed; this container may or may not be a modification on the object on + * which the call was performed; the value in positionHint after return would be valid on the returned + * container. */ abstract Container iunset(short x, PositionHint positionHint); /** - * Return a new container with every value in the existing container except for the provided - * argument; the existing container is not modified. If positionHint is greater or equal than - * zero, it is taken to be a container-specific position hint to help speed up the removal; this - * can be obtained from previous calls to any method taking a hint to help speedup a sequence of - * operations done in increasing value order. Before returning, the method stores in - * positionHint a valid value for a subsequent calls to methods taking a hint, which can be used - * on the returned container for a value greater than the one provided in this call. + * Return a new container with every value in the existing container except for the provided argument; the existing + * container is not modified. If positionHint is greater or equal than zero, it is taken to be a container-specific + * position hint to help speed up the removal; this can be obtained from previous calls to any method taking a hint + * to help speedup a sequence of operations done in increasing value order. Before returning, the method stores in + * positionHint a valid value for a subsequent calls to methods taking a hint, which can be used on the returned + * container for a value greater than the one provided in this call. * * @param x the value to remove - * @param positionHint a position hint to speed up removal specific to a container, returned - * from a previous call to a hint taking method, or -1 if none available. Updated to a - * valid hint for a subsequent call to a hint taking method on the returned container; if - * that subsequent call uses the hint it should be for an argument bigger than x provided - * in this call. - * @return A new container with the value removed; this container may or may not be a - * modification on the object on which the call was performed; the value in positionHint - * after return would be valid on the returned container. + * @param positionHint a position hint to speed up removal specific to a container, returned from a previous call to + * a hint taking method, or -1 if none available. Updated to a valid hint for a subsequent call to a hint + * taking method on the returned container; if that subsequent call uses the hint it should be for an + * argument bigger than x provided in this call. + * @return A new container with the value removed; this container may or may not be a modification on the object on + * which the call was performed; the value in positionHint after return would be valid on the returned + * container. */ abstract Container unset(short x, PositionHint positionHint); diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ContainerShortBatchIterator.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ContainerShortBatchIterator.java index 93c00a3cdad..0ec98b7d713 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ContainerShortBatchIterator.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ContainerShortBatchIterator.java @@ -19,10 +19,9 @@ public interface ContainerShortBatchIterator { boolean hasNext(); /** - * Starting from the next iterator position (if any), feed values to the consumer until it - * returns false. After each value is consumed, the current iterator position is moving forward; - * eg, a call to forEach that consumes 4 elements effectively works as if next was called 4 - * times. + * Starting from the next iterator position (if any), feed values to the consumer until it returns false. After each + * value is consumed, the current iterator position is moving forward; eg, a call to forEach that consumes 4 + * elements effectively works as if next was called 4 times. * * @param sc a ShortConsumer to feed values to. * @return false if the processing was stopped by the consumer returning false, true otherwise. diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ContainerUtil.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ContainerUtil.java index 06155327471..9b4c5bc2696 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ContainerUtil.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ContainerUtil.java @@ -1,8 +1,8 @@ /* * (c) the authors Licensed under the Apache License, Version 2.0. * - * The code in this file is a heavily modified version of the original in the RoaringBitmap library; - * please see https://roaringbitmap.org/ + * The code in this file is a heavily modified version of the original in the RoaringBitmap library; please see + * https://roaringbitmap.org/ * */ @@ -18,24 +18,23 @@ public final class ContainerUtil { /** - * optimization flag: whether to use hybrid binary search: hybrid formats combine a binary - * search with a sequential search + * optimization flag: whether to use hybrid binary search: hybrid formats combine a binary search with a sequential + * search */ public static final boolean USE_HYBRID_BINSEARCH = true; /** - * Find the smallest integer larger than pos such that array[pos]>= min. If none can be - * found, return length. Based on code by O. Kaser. + * Find the smallest integer larger than pos such that array[pos]>= min. If none can be found, return length. + * Based on code by O. Kaser. * * @param array array to search within * @param pos starting position of the search * @param length length of the array to search * @param min minimum value - * @return x greater than pos such that array[pos] is at least as large as min, pos is is equal - * to length if it is not possible. + * @return x greater than pos such that array[pos] is at least as large as min, pos is is equal to length if it is + * not possible. */ - public static int advanceUntil(final short[] array, final int pos, final int length, - final short min) { + public static int advanceUntil(final short[] array, final int pos, final int length, final short min) { int lower = pos + 1; // special handling for a possibly common sequential case @@ -47,7 +46,7 @@ public static int advanceUntil(final short[] array, final int pos, final int len // bootstrap an upper limit while (lower + spansize < length - && toIntUnsigned(array[lower + spansize]) < toIntUnsigned(min)) { + && toIntUnsigned(array[lower + spansize]) < toIntUnsigned(min)) { spansize *= 2; // hoping for compiler will reduce to } // shift @@ -90,15 +89,14 @@ && toIntUnsigned(array[lower + spansize]) < toIntUnsigned(min)) { } /** - * Find the smallest integer larger than pos such that array[pos]>= min. If none can be - * found, return length. + * Find the smallest integer larger than pos such that array[pos]>= min. If none can be found, return length. * * @param array array to search within * @param pos starting position of the search * @param length length of the array to search * @param min minimum value - * @return x greater than pos such that array[pos] is at least as large as min, pos is is equal - * to length if it is not possible. + * @return x greater than pos such that array[pos] is at least as large as min, pos is is equal to length if it is + * not possible. */ public static int iterateUntil(final short[] array, int pos, final int length, final int min) { while (pos < length && toIntUnsigned(array[pos]) < min) { @@ -108,7 +106,7 @@ public static int iterateUntil(final short[] array, int pos, final int length, f } protected static int branchyUnsignedBinarySearch(final short[] array, final int begin, - final int end, final short k) { + final int end, final short k) { int ikey = toIntUnsigned(k); // next line accelerates the possibly common case where the value would // be inserted at the end @@ -135,32 +133,29 @@ protected static int branchyUnsignedBinarySearch(final short[] array, final int @FunctionalInterface public interface TargetComparator { /** - * Compare the underlying target to the provided value. Return -1, 0, or 1 if target is less - * than, equal, or greater than the provided value, respectively. + * Compare the underlying target to the provided value. Return -1, 0, or 1 if target is less than, equal, or + * greater than the provided value, respectively. * * @param value - * @return -1 if target is to the left of value (target < value ); 0 if value == target; +1 - * if target is to the right of value (value < target). + * @return -1 if target is to the left of value (target < value ); 0 if value == target; +1 if target is to the + * right of value (value < target). */ int directionFrom(final int value); } /** - * Search for the largest value in array such that comp.directionFrom(value) > 0, or any value - * such that comp.directionFrom(value) == 0, and return its index. If there is no such a value - * return -1. + * Search for the largest value in array such that comp.directionFrom(value) > 0, or any value such that + * comp.directionFrom(value) == 0, and return its index. If there is no such a value return -1. * * @param array Array with values sorted in increasing order. * @param begin Start position in the array for the search. * @param end One past the last position in the array for the search. * @param comp A comparator. - * @return -1 if comp.directionFrom(array[begin]) < 0, otherwise the biggest position pos in - * [begin, end - 1] such that comp.directionFrom(array[pos]) > 0 or, if there is a one - * or more positions pos for which comp.directionFrom(array[pos]) == 0, return any of - * them. + * @return -1 if comp.directionFrom(array[begin]) < 0, otherwise the biggest position pos in [begin, end - 1] such + * that comp.directionFrom(array[pos]) > 0 or, if there is a one or more positions pos for which + * comp.directionFrom(array[pos]) == 0, return any of them. */ - public static int search(final short[] array, final int begin, final int end, - final TargetComparator comp) { + public static int search(final short[] array, final int begin, final int end, final TargetComparator comp) { if (end < 1) { return -1; } @@ -208,14 +203,13 @@ public static int search(final short[] array, final int begin, final int end, } /** - * Look for the biggest value of i that satisfies begin <= i < end and comp.directionFrom(i) >= - * 0. + * Look for the biggest value of i that satisfies begin <= i < end and comp.directionFrom(i) >= 0. * * @param begin The beginning of the range (inclusive) * @param end The end of the range (exclusive) * @param comp a TargetComparator. - * @return the last position i inside the provided range that satisfies comp.directionFrom(i) >= - * 0, or -1 if none does. + * @return the last position i inside the provided range that satisfies comp.directionFrom(i) >= 0, or -1 if none + * does. */ public static int rangeSearch(final int begin, final int end, final TargetComparator comp) { if (begin >= end) { @@ -251,13 +245,13 @@ public static int rangeSearch(final int begin, final int end, final TargetCompar } /** - * Compares the two specified {@code short} values, treating them as unsigned values between - * {@code 0} and {@code 2^16 - 1} inclusive. + * Compares the two specified {@code short} values, treating them as unsigned values between {@code 0} and + * {@code 2^16 - 1} inclusive. * * @param a the first unsigned {@code short} to compare * @param b the second unsigned {@code short} to compare - * @return a negative value if {@code a} is less than {@code b}; a positive value if {@code a} - * is greater than {@code b}; or zero if they are equal + * @return a negative value if {@code a} is less than {@code b}; a positive value if {@code a} is greater than + * {@code b}; or zero if they are equal */ public static int compareUnsigned(final short a, final short b) { return toIntUnsigned(a) - toIntUnsigned(b); @@ -271,7 +265,7 @@ public static int compareUnsigned(final short a, final short b) { * @param bitmap2 second bitmap */ public static void fillArrayAND(final short[] container, final long[] bitmap1, - final long[] bitmap2) { + final long[] bitmap2) { int pos = 0; if (bitmap1.length != bitmap2.length) { throw new IllegalArgumentException("not supported"); @@ -293,7 +287,7 @@ public static void fillArrayAND(final short[] container, final long[] bitmap1, * @param bitmap2 second bitmap */ public static void fillArrayANDNOT(final short[] container, final long[] bitmap1, - final long[] bitmap2) { + final long[] bitmap2) { int pos = 0; if (bitmap1.length != bitmap2.length) { throw new IllegalArgumentException("not supported"); @@ -315,7 +309,7 @@ public static void fillArrayANDNOT(final short[] container, final long[] bitmap1 * @param bitmap2 second bitmap */ public static void fillArrayXOR(final short[] container, final long[] bitmap1, - final long[] bitmap2) { + final long[] bitmap2) { int pos = 0; if (bitmap1.length != bitmap2.length) { throw new IllegalArgumentException("not supported"); @@ -351,9 +345,8 @@ public static void flipBitmapRange(final long[] bitmap, final int start, final i /** - * Hamming weight of the 64-bit words involved in the range start, start+1,..., end-1, that is, - * it will compute the cardinality of the bitset from index (floor(start/64) to - * floor((end-1)/64)) inclusively. + * Hamming weight of the 64-bit words involved in the range start, start+1,..., end-1, that is, it will compute the + * cardinality of the bitset from index (floor(start/64) to floor((end-1)/64)) inclusively. * * @param bitmap array of words representing a bitset * @param start first index (inclusive) @@ -361,8 +354,7 @@ public static void flipBitmapRange(final long[] bitmap, final int start, final i * @return the hamming weight of the corresponding words */ @Deprecated - public static int cardinalityInBitmapWordRange(final long[] bitmap, final int start, - final int end) { + public static int cardinalityInBitmapWordRange(final long[] bitmap, final int start, final int end) { if (start >= end) { return 0; } @@ -384,8 +376,7 @@ public static int cardinalityInBitmapWordRange(final long[] bitmap, final int st * @param end last index (exclusive) * @return the hamming weight of the corresponding range */ - public static int cardinalityInBitmapRange(final long[] bitmap, final int start, - final int end) { + public static int cardinalityInBitmapRange(final long[] bitmap, final int start, final int end) { if (start >= end) { return 0; } @@ -404,7 +395,7 @@ public static int cardinalityInBitmapRange(final long[] bitmap, final int start, // starts with binary search and finishes with a sequential search protected static int hybridUnsignedBinarySearch(final short[] array, final int begin, - final int end, final short k) { + final int end, final short k) { int ikey = toIntUnsigned(k); // next line accelerates the possibly common case where the value would // be inserted at the end @@ -559,8 +550,7 @@ public static void setBitmapRange(final long[] bitmap, final int start, final in * @return cardinality change */ @Deprecated - public static int setBitmapRangeAndCardinalityChange(final long[] bitmap, final int start, - final int end) { + public static int setBitmapRangeAndCardinalityChange(final long[] bitmap, final int start, final int end) { int cardbefore = cardinalityInBitmapWordRange(bitmap, start, end); setBitmapRange(bitmap, start, end); int cardafter = cardinalityInBitmapWordRange(bitmap, start, end); @@ -577,8 +567,7 @@ public static int setBitmapRangeAndCardinalityChange(final long[] bitmap, final * @return cardinality change */ @Deprecated - public static int flipBitmapRangeAndCardinalityChange(final long[] bitmap, final int start, - final int end) { + public static int flipBitmapRangeAndCardinalityChange(final long[] bitmap, final int start, final int end) { int cardbefore = cardinalityInBitmapWordRange(bitmap, start, end); flipBitmapRange(bitmap, start, end); int cardafter = cardinalityInBitmapWordRange(bitmap, start, end); @@ -595,8 +584,7 @@ public static int flipBitmapRangeAndCardinalityChange(final long[] bitmap, final * @return cardinality change */ @Deprecated - public static int resetBitmapRangeAndCardinalityChange(final long[] bitmap, final int start, - final int end) { + public static int resetBitmapRangeAndCardinalityChange(final long[] bitmap, final int start, final int end) { int cardbefore = cardinalityInBitmapWordRange(bitmap, start, end); resetBitmapRange(bitmap, start, end); int cardafter = cardinalityInBitmapWordRange(bitmap, start, end); @@ -608,9 +596,9 @@ protected static int toIntUnsigned(final short x) { } /** - * Look for value k in array in the range [begin,end). If the value is found, return its index. - * If not, return -(i+1) where i is the index where the value would be inserted. The array is - * assumed to contain sorted values where shorts are interpreted as unsigned integers. + * Look for value k in array in the range [begin,end). If the value is found, return its index. If not, return + * -(i+1) where i is the index where the value would be inserted. The array is assumed to contain sorted values + * where shorts are interpreted as unsigned integers. * * @param array array where we search * @param begin first index (inclusive) @@ -619,7 +607,7 @@ protected static int toIntUnsigned(final short x) { * @return count */ public static int unsignedBinarySearch(final short[] array, final int begin, final int end, - final short k) { + final short k) { if (USE_HYBRID_BINSEARCH) { return hybridUnsignedBinarySearch(array, begin, end, k); } else { @@ -628,8 +616,7 @@ public static int unsignedBinarySearch(final short[] array, final int begin, fin } /** - * Compute the difference between two sorted lists and write the result to the provided output - * array + * Compute the difference between two sorted lists and write the result to the provided output array * * @param set1 first array * @param length1 length of first array @@ -639,7 +626,7 @@ public static int unsignedBinarySearch(final short[] array, final int begin, fin * @return cardinality of the difference */ public static int unsignedDifference(final short[] set1, final int length1, final short[] set2, - final int length2, final short[] buffer) { + final int length2, final short[] buffer) { int pos = 0; int k1 = 0, k2 = 0; if (0 == length2) { @@ -684,8 +671,7 @@ public static int unsignedDifference(final short[] set1, final int length1, fina } /** - * Compute the difference between two sorted lists and write the result to the provided output - * array + * Compute the difference between two sorted lists and write the result to the provided output array * * @param set1 first array * @param set2 second array @@ -693,7 +679,7 @@ public static int unsignedDifference(final short[] set1, final int length1, fina * @return cardinality of the difference */ public static int unsignedDifference(final ShortIterator set1, final ShortIterator set2, - final short[] buffer) { + final short[] buffer) { int pos = 0; if (!set2.hasNext()) { while (set1.hasNext()) { @@ -740,8 +726,7 @@ public static int unsignedDifference(final ShortIterator set1, final ShortIterat } /** - * Compute the exclusive union of two sorted lists and write the result to the provided output - * array + * Compute the exclusive union of two sorted lists and write the result to the provided output array * * @param set1 first array * @param length1 length of first array @@ -751,7 +736,7 @@ public static int unsignedDifference(final ShortIterator set1, final ShortIterat * @return cardinality of the exclusive union */ public static int unsignedExclusiveUnion2by2(final short[] set1, final int length1, - final short[] set2, final int length2, final short[] buffer) { + final short[] set2, final int length2, final short[] buffer) { int pos = 0; int k1 = 0, k2 = 0; if (0 == length2) { @@ -810,9 +795,8 @@ public static int unsignedExclusiveUnion2by2(final short[] set1, final int lengt * @param buffer output array * @return cardinality of the intersection */ - public static int unsignedIntersect2by2(final short[] set1, final int length1, - final short[] set2, - final int length2, final short[] buffer) { + public static int unsignedIntersect2by2(final short[] set1, final int length1, final short[] set2, + final int length2, final short[] buffer) { final int THRESHOLD = 25; if (set1.length * THRESHOLD < set2.length) { return unsignedOneSidedGallopingIntersect2by2(set1, length1, set2, length2, buffer); @@ -833,8 +817,8 @@ public static int unsignedIntersect2by2(final short[] set1, final int length1, * @param length2 length of second array * @return true if they intersect */ - public static boolean unsignedIntersects(final short[] set1, final int length1, - final short[] set2, final int length2) { + public static boolean unsignedIntersects(final short[] set1, final int length1, final short[] set2, + final int length2) { // galloping might be faster, but we do not expect this function to be slow if ((0 == length1) || (0 == length2)) { return false; @@ -870,7 +854,7 @@ public static boolean unsignedIntersects(final short[] set1, final int length1, protected static int unsignedLocalIntersect2by2(final short[] set1, final int length1, - final short[] set2, final int length2, final short[] buffer) { + final short[] set2, final int length2, final short[] buffer) { if ((0 == length1) || (0 == length2)) { return 0; } @@ -931,7 +915,7 @@ protected static int unsignedLocalIntersect2by2(final short[] set1, final int le * @return cardinality of the intersection */ public static int unsignedLocalIntersect2by2Cardinality(final short[] set1, final int length1, - final short[] set2, final int length2) { + final short[] set2, final int length2) { if ((0 == length1) || (0 == length2)) { return 0; } @@ -983,8 +967,7 @@ public static int unsignedLocalIntersect2by2Cardinality(final short[] set1, fina protected static int unsignedOneSidedGallopingIntersect2by2(final short[] smallSet, - final int smallLength, final short[] largeSet, final int largeLength, - final short[] buffer) { + final int smallLength, final short[] largeSet, final int largeLength, final short[] buffer) { if (0 == smallLength) { return 0; } @@ -1040,9 +1023,9 @@ protected static int unsignedOneSidedGallopingIntersect2by2(final short[] smallS * @return cardinality of the union */ public static int unsignedUnion2by2( - final short[] set1, final int offset1, final int length1, - final short[] set2, final int offset2, final int length2, - final short[] buffer) { + final short[] set1, final int offset1, final int length1, + final short[] set2, final int offset2, final int length2, + final short[] buffer) { if (0 == length2) { System.arraycopy(set1, offset1, buffer, 0, length1); return length1; @@ -1146,8 +1129,7 @@ static boolean overlaps(final RunContainer c1, final ArrayContainer c2) { int value = ContainerUtil.toIntUnsigned(c1.getValue(c1i)); int len = ContainerUtil.toIntUnsigned(c1.getLength(c1i)); for (int j = value; j <= value + len; ++j) { - int s = ContainerUtil.unsignedBinarySearch(c2.content, c2i, c2.cardinality, - ContainerUtil.lowbits(j)); + int s = ContainerUtil.unsignedBinarySearch(c2.content, c2i, c2.cardinality, ContainerUtil.lowbits(j)); if (s >= 0) { return true; } diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/EmptyContainer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/EmptyContainer.java index 6baa7ff9294..ed6c19539bb 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/EmptyContainer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/EmptyContainer.java @@ -298,8 +298,7 @@ public void selectRanges(final RangeConsumer outValues, final RangeIterator inPo } @Override - public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, - final int maxPos) { + public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, final int maxPos) { if (inValues.hasNext()) { throw new IllegalStateException(); } diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ImmutableContainer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ImmutableContainer.java index cce0e24aaaa..ac775655b7e 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ImmutableContainer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ImmutableContainer.java @@ -3,7 +3,7 @@ public abstract class ImmutableContainer extends Container { public static final boolean ENABLED = - !Boolean.getBoolean("io.deephaven.db.v2.utils.rsp.container.ImmutableContainer.DISABLED"); + !Boolean.getBoolean("io.deephaven.db.v2.utils.rsp.container.ImmutableContainer.DISABLED"); @Override public final Container deepCopy() { diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RangeConsumer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RangeConsumer.java index 0219bd2772a..07312e6aabb 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RangeConsumer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RangeConsumer.java @@ -5,9 +5,8 @@ */ public interface RangeConsumer { /** - * Deliver a single range. Methods receiving a RangeConsumer should call accept on it for for - * non-empty, disjoint ranges. Calls should be made in increasing order of values contained in - * the ranges. + * Deliver a single range. Methods receiving a RangeConsumer should call accept on it for for non-empty, disjoint + * ranges. Calls should be made in increasing order of values contained in the ranges. * * @param begin first value of the range to add. * @param end one past the last value in the range to add (ie, end is exclusive). diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RangeIterator.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RangeIterator.java index 88175631d21..760dfdb1a2d 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RangeIterator.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RangeIterator.java @@ -4,8 +4,8 @@ public interface RangeIterator { /** - * Checks if the iterator has more ranges. If hasNext() returns false, calling next thereafter - * results in undefined behavior. + * Checks if the iterator has more ranges. If hasNext() returns false, calling next thereafter results in undefined + * behavior. * * @return whether there is another range. */ @@ -35,8 +35,7 @@ public interface RangeIterator { void next(); /** - * Call accept on the provided AbortableRangeConsumer until it returns false or we run out of - * values. + * Call accept on the provided AbortableRangeConsumer until it returns false or we run out of values. * * @param rc An AbortableRangeConsumer to feed ranges to. * @return false if AbortableRangeConsumer returned false at any point, true otherwise. @@ -57,10 +56,9 @@ default boolean forEachRange(AbortableRangeConsumer rc) { * * @param buffer a short array where consecutive pairs of (start, end-1) values will be stored. * @param offset where in buffer to start storing range boundary values. - * @param maxRanges maximum number of ranges that can be written to buffer; {@code buffer} - * should have at least space for {@code 2*maxRanges} shorts starting at {@code offset}. - * @return how many ranges were written in {@code buffer}; this is two times the individual - * elements written. + * @param maxRanges maximum number of ranges that can be written to buffer; {@code buffer} should have at least + * space for {@code 2*maxRanges} shorts starting at {@code offset}. + * @return how many ranges were written in {@code buffer}; this is two times the individual elements written. */ default int next(final short[] buffer, final int offset, final int maxRanges) { int count = 0; @@ -80,8 +78,7 @@ class Single implements RangeIterator { public Single(int start, int end) { if (end < start || start < 0) { - throw new IllegalArgumentException( - "Invalid range start=" + start + ", endI=" + end); + throw new IllegalArgumentException("Invalid range start=" + start + ", endI=" + end); } this.start = start; this.end = end; @@ -130,8 +127,7 @@ class ArrayBacked implements RangeIterator { public ArrayBacked(int[] ranges) { if ((ranges.length & 1) != 0) { - throw new IllegalArgumentException( - "Invalid array for range, odd size=" + ranges.length); + throw new IllegalArgumentException("Invalid array for range, odd size=" + ranges.length); } this.ranges = Arrays.copyOf(ranges, ranges.length); pos = -2; diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RunContainer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RunContainer.java index d0b172204e8..35484f07dfb 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RunContainer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RunContainer.java @@ -1,8 +1,8 @@ /* * (c) the authors Licensed under the Apache License, Version 2.0. * - * The code in this file is a heavily modified version of the original in the RoaringBitmap library; - * please see https://roaringbitmap.org/ + * The code in this file is a heavily modified version of the original in the RoaringBitmap library; please see + * https://roaringbitmap.org/ * */ package io.deephaven.db.v2.utils.rsp.container; @@ -19,8 +19,8 @@ /** * This container takes the form of runs of consecutive values (effectively, run-length encoding). *

    - * Adding and removing content from this container might make it wasteful so regular calls to - * "runOptimize" might be warranted. + * Adding and removing content from this container might make it wasteful so regular calls to "runOptimize" might be + * warranted. */ public final class RunContainer extends Container { // Sizing of a short array object in a 64 bit JVM (Hotspot) uses @@ -33,7 +33,7 @@ public final class RunContainer extends Container { private static final boolean ENABLE_GALLOPING_AND = false; private static int branchyUnsignedInterleavedBinarySearch(final short[] array, final int begin, - final int end, final short k) { + final int end, final short k) { int ikey = toIntUnsigned(k); int low = begin; int high = end - 1; @@ -53,7 +53,7 @@ private static int branchyUnsignedInterleavedBinarySearch(final short[] array, f // starts with binary search and finishes with a sequential search private static int hybridUnsignedInterleavedBinarySearch(final short[] array, final int begin, - final int end, final short k) { + final int end, final short k) { int ikey = toIntUnsigned(k); // next line accelerates the possibly common case where the value would // be inserted at the end @@ -93,7 +93,7 @@ protected static int sizeInBytes(final int numberOfRuns) { } private static int unsignedInterleavedBinarySearch(final short[] array, final int begin, - final int end, final short k) { + final int end, final short k) { if (ContainerUtil.USE_HYBRID_BINSEARCH) { return hybridUnsignedInterleavedBinarySearch(array, begin, end, k); } else { @@ -103,12 +103,10 @@ private static int unsignedInterleavedBinarySearch(final short[] array, final in } private short[] valueslength;// we interleave values and lengths, so - // that if you have the values 11,12,13,14,15, you store that as 11,4 where 4 means that beyond - // 11 + // that if you have the values 11,12,13,14,15, you store that as 11,4 where 4 means that beyond 11 // itself, there are // 4 contiguous values that follows. - // Other example: e.g., 1, 10, 20,0, 31,2 would be a concise representation of 1, 2, ..., 11, - // 20, + // Other example: e.g., 1, 10, 20,0, 31,2 would be a concise representation of 1, 2, ..., 11, 20, // 31, 32, 33 int nbrruns = 0;// how many runs, this number should fit in 16 bits. @@ -190,8 +188,7 @@ public RunContainer(final int start1, final int end1, final int start2, final in cardinality = d1 + d2; } - public static RunContainer select(final RunContainer src, final int startRank, - final int endRank) { + public static RunContainer select(final RunContainer src, final int startRank, final int endRank) { final RunContainer ans = new RunContainer(src.valueslength.length); int k = 0; int kStart = toIntUnsigned(src.getValue(0)); @@ -299,8 +296,8 @@ public RunContainer(final int runsCapacity) { } /** - * Construct a new RunContainer backed by the provided array. Note that if you modify the - * RunContainer a new array may be produced. + * Construct a new RunContainer backed by the provided array. Note that if you modify the RunContainer a new array + * may be produced. * * @param array array where the data is stored * @param numRuns number of runs (each using 2 shorts in the buffer) @@ -318,16 +315,14 @@ public RunContainer(final int runsCapacity) { } /** - * Construct a new RunContainer using the provided array. The container takes ownership of the - * array. + * Construct a new RunContainer using the provided array. The container takes ownership of the array. * - * @param valueslength array with valid runs, in increasing unsigned short order. The container - * takes ownership of this array. + * @param valueslength array with valid runs, in increasing unsigned short order. The container takes ownership of + * this array. * @param nbrruns number of runs (the array should contain 2*n elements). * @param cardinality total cardinality in the runs. */ - public static RunContainer makeByWrapping(final short[] valueslength, final int nbrruns, - final int cardinality) { + public static RunContainer makeByWrapping(final short[] valueslength, final int nbrruns, final int cardinality) { return new RunContainer(valueslength, nbrruns, cardinality); } @@ -342,8 +337,7 @@ public Container add(final int begin, final int end) { } private Container iaddImpl( - final int begin, final int end, final Supplier self, - final Supplier copy) { + final int begin, final int end, final Supplier self, final Supplier copy) { if (end == begin) { return self.get(); } @@ -413,10 +407,10 @@ Container set(final short k, final PositionHint positionHint) { } private Container isetImpl(final short k, - int index, - final PositionHint positionHint, - final Supplier self, - final Supplier copy) { + int index, + final PositionHint positionHint, + final Supplier self, + final Supplier copy) { index = -index - 2;// points to preceding value, possibly -1 final int kAsInt = toIntUnsigned(k); if (index >= 0) {// possible match @@ -514,8 +508,7 @@ public Container and(final ArrayContainer x) { rlelength = getLengthAsInt(rlepos); } if (rleval > arrayval) { - arraypos = ContainerUtil.advanceUntil(x.content, arraypos, x.getCardinality(), - (short) rleval); + arraypos = ContainerUtil.advanceUntil(x.content, arraypos, x.getCardinality(), (short) rleval); } else { ac.content[ac.cardinality] = (short) arrayval; ++ac.cardinality; @@ -599,8 +592,7 @@ public Container and(final RunContainer x) { while ((rlepos < nbrruns) && (xrlepos < x.nbrruns)) { if (end <= xstart) { if (ENABLE_GALLOPING_AND) { - rlepos = skipAhead(this, rlepos, xstart); // skip over runs until we have end > - // xstart (or + rlepos = skipAhead(this, rlepos, xstart); // skip over runs until we have end > xstart (or // rlepos is advanced beyond end) } else { ++rlepos; @@ -708,8 +700,7 @@ private Container andRangeImpl(final boolean inPlace, final int start, final int final RunContainer ans; if (inPlace) { ans = this; - ans.cardinality = 0; // we are going to building ourselves over on top of the same - // array. + ans.cardinality = 0; // we are going to building ourselves over on top of the same array. } else { ans = new RunContainer(erun - srun + 1); } @@ -761,8 +752,7 @@ public Container andNot(final ArrayContainer x) { // if the cardinality is small, we construct the solution in place final ArrayContainer ac = new ArrayContainer(card); ac.cardinality = - ContainerUtil.unsignedDifference(getShortIterator(), x.getShortIterator(), - ac.content); + ContainerUtil.unsignedDifference(getShortIterator(), x.getShortIterator(), ac.content); return ac; } // otherwise, we generate a bitmap @@ -784,8 +774,7 @@ public Container andNot(final BitmapContainer x) { int runStart = getValueAsInt(rlepos); int runEnd = runStart + getLengthAsInt(rlepos); for (int runValue = runStart; runValue <= runEnd; ++runValue) { - if (!x.contains((short) runValue)) {// it looks like contains() should be cheap - // enough if + if (!x.contains((short) runValue)) {// it looks like contains() should be cheap enough if // accessed sequentially answer.content[answer.cardinality++] = (short) runValue; } @@ -872,7 +861,7 @@ public Container andNot(final RunContainer x) { ++rlepos; if (rlepos < nbrruns) { System.arraycopy(valueslength, 2 * rlepos, ans.valueslength, 2 * ans.nbrruns, - 2 * (nbrruns - rlepos)); + 2 * (nbrruns - rlepos)); for (int run = rlepos; run < nbrruns; ++run) { ans.cardinality += getLengthAsInt(run) + 1; } @@ -953,12 +942,11 @@ public boolean contains(final short x) { /** * @param x a value to check for membership to this container * @param i a position in runs space where to begin a binary search - * @return the index of the run that contains x, if x is contained in some existing run; if x is - * not part of an existing run, -(index+1) where index is the position where x would be - * "inserted". Note "inserting" x may mean either (a) adding x as the new ending for the - * run in the position, (b) inserting a new run with only x pushing - * the run in the position returned right, or (c) inserting x as the new start of the - * existing run in that position. + * @return the index of the run that contains x, if x is contained in some existing run; if x is not part of an + * existing run, -(index+1) where index is the position where x would be "inserted". Note "inserting" x may + * mean either (a) adding x as the new ending for the run in the position, (b) inserting a + * new run with only x pushing the run in the position returned right, or (c) inserting x as the new start + * of the existing run in that position. */ int searchFrom(final short x, final int i) { int index = unsignedInterleavedBinarySearch(valueslength, i, nbrruns, x); @@ -1046,8 +1034,7 @@ protected boolean contains(final ArrayContainer arrayContainer) { @Override protected boolean contains(final BitmapContainer bitmapContainer) { final int cardinality = getCardinality(); - if (bitmapContainer.getCardinality() != -1 - && bitmapContainer.getCardinality() > cardinality) { + if (bitmapContainer.getCardinality() != -1 && bitmapContainer.getCardinality() > cardinality) { return false; } final int runCount = numberOfRuns(); @@ -1100,8 +1087,7 @@ private void copyToOffset(final int offset) { } private void copyValuesLength( - final short[] src, final int srcIndex, final short[] dst, final int dstIndex, - final int length) { + final short[] src, final int srcIndex, final short[] dst, final int dstIndex, final int length) { System.arraycopy(src, 2 * srcIndex, dst, 2 * dstIndex, 2 * length); } @@ -1117,9 +1103,9 @@ private void decrementValue(final int index) { private static int nextRunsCapacity(final int oldRuns) { return (oldRuns == 0) ? DEFAULT_INIT_SIZE_IN_RUNS - : oldRuns < 32 ? runsSizeRounding(oldRuns * 2) - : oldRuns < 512 ? runsSizeRounding(oldRuns * 3 / 2) - : runsSizeRounding(oldRuns * 5 / 4); + : oldRuns < 32 ? runsSizeRounding(oldRuns * 2) + : oldRuns < 512 ? runsSizeRounding(oldRuns * 3 / 2) + : runsSizeRounding(oldRuns * 5 / 4); } @@ -1163,8 +1149,7 @@ public int getCardinality() { * * @param index the index of the run. * @return the length of the run at the index. - * @throws ArrayIndexOutOfBoundsException if index is negative or larger than the index of the - * last run. + * @throws ArrayIndexOutOfBoundsException if index is negative or larger than the index of the last run. */ public short getLength(final int index) { return valueslength[2 * index + 1]; @@ -1271,8 +1256,7 @@ public boolean advance(final int v) { // We know v is to the left of the run at position right. // Binary search over start elements. - // At this point neither left nor right contain v, and there is at least one other range - // between them + // At this point neither left nor right contain v, and there is at least one other range between them // thus we're guaranteed to advance. // Note as this loop iterates, it is always true neither left nor right contain v. while (true) { @@ -1383,8 +1367,7 @@ public SearchRangeIterator getShortRangeIterator(final int initialSeek) { * * @param index the index of the run. * @return the value of the first element of the run at the index. - * @throws ArrayIndexOutOfBoundsException if index is negative or larger than the index of the - * last run. + * @throws ArrayIndexOutOfBoundsException if index is negative or larger than the index of the last run. */ public short getValue(final int index) { return valueslength[2 * index]; @@ -1395,19 +1378,16 @@ public int getValueAsInt(final int index) { } public RunContainer iaddUnsafe(final int begin, final int end, final int searchBeginRunIndex) { - int bIndex = unsignedInterleavedBinarySearch(valueslength, searchBeginRunIndex, nbrruns, - (short) begin); + int bIndex = unsignedInterleavedBinarySearch(valueslength, searchBeginRunIndex, nbrruns, (short) begin); int eIndex; if (bIndex >= 0) { - eIndex = - unsignedInterleavedBinarySearch(valueslength, bIndex, nbrruns, (short) (end - 1)); + eIndex = unsignedInterleavedBinarySearch(valueslength, bIndex, nbrruns, (short) (end - 1)); } else { final int effectiveBeginIndex = ~bIndex; if (effectiveBeginIndex >= nbrruns) { eIndex = bIndex; } else { - eIndex = unsignedInterleavedBinarySearch(valueslength, effectiveBeginIndex, nbrruns, - (short) (end - 1)); + eIndex = unsignedInterleavedBinarySearch(valueslength, effectiveBeginIndex, nbrruns, (short) (end - 1)); } } @@ -1535,7 +1515,7 @@ public Container iappend(final int begin, final int end) { final RunContainer ans; if (shared) { final short[] newValuesLength = - getValuesLengthInBiggerArray(runsShortArraySizeRounding(capacityForAns / 2)); + getValuesLengthInBiggerArray(runsShortArraySizeRounding(capacityForAns / 2)); ans = makeByWrapping(newValuesLength, nbrruns, cardinality); } else { ans = this; @@ -1593,9 +1573,9 @@ public Container iandNot(final RunContainer x) { private int nextCapacity() { int newCapacity = (valueslength.length == 0) ? DEFAULT_INIT_SIZE_IN_RUNS - : valueslength.length < 64 ? valueslength.length * 2 - : valueslength.length < 1024 ? valueslength.length * 3 / 2 - : valueslength.length * 5 / 4; + : valueslength.length < 64 ? valueslength.length * 2 + : valueslength.length < 1024 ? valueslength.length * 3 / 2 + : valueslength.length * 5 / 4; return newCapacity; } @@ -1680,8 +1660,7 @@ public Container inot(final int rangeStart, final int rangeEnd) { // remaining code is just a minor variation on not() int myNbrRuns = nbrruns; - // we can't use deepCopy/deepCopyIfShared because we already tested valueslentgh.lentgth - // above, + // we can't use deepCopy/deepCopyIfShared because we already tested valueslentgh.lentgth above, // and deepCopy may reduce valueslentgh.lentgth final RunContainer ans; if (shared) { @@ -1693,8 +1672,7 @@ public Container inot(final int rangeStart, final int rangeEnd) { } int k = 0; - // could try using unsignedInterleavedBinarySearch(valueslength, 0, nbrruns, rangeStart) - // instead + // could try using unsignedInterleavedBinarySearch(valueslength, 0, nbrruns, rangeStart) instead // of sequential scan // to find the starting location @@ -1709,8 +1687,7 @@ public Container inot(final int rangeStart, final int rangeEnd) { // left of the write pointer. However, we need to give the read pointer a head start. // use local variables so we are always reading 1 location ahead. - short bufferedValue = 0, bufferedLength = 0; // MAX_VALUE start and MAX_VALUE length would - // be illegal, + short bufferedValue = 0, bufferedLength = 0; // MAX_VALUE start and MAX_VALUE length would be illegal, // could use as sentinel short nextValue = 0, nextLength = 0; if (k < myNbrRuns) { // prime the readahead variables @@ -1723,8 +1700,7 @@ public Container inot(final int rangeStart, final int rangeEnd) { for (; k < myNbrRuns; ++k) { if (ans.nbrruns > k + 1) { throw new RuntimeException( - "internal error in inot, writer has overtaken reader!! " + k + " " - + ans.nbrruns); + "internal error in inot, writer has overtaken reader!! " + k + " " + ans.nbrruns); } if (k + 1 < myNbrRuns) { nextValue = getValue(k + 1); // readahead for next iteration @@ -1796,22 +1772,20 @@ public Container ior(final RunContainer x) { /** * Calculate the result of an or between two RunContainers storing the result in a RunContainer. * - * @param dst A RunContainer where to store the result. Its nbrruns field member should be zero - * on entry. - * @param src A RunContainer with source data for the RunContainer part of the or; if the same - * object as dst, the operation is done in place and the data in src is expected to - * shifted by srcOffset. - * @param srcRuns The number of runs in src; as a separate argument since dst and src may be the - * same object, in which case nbrruns for src is also cero. + * @param dst A RunContainer where to store the result. Its nbrruns field member should be zero on entry. + * @param src A RunContainer with source data for the RunContainer part of the or; if the same object as dst, the + * operation is done in place and the data in src is expected to shifted by srcOffset. + * @param srcRuns The number of runs in src; as a separate argument since dst and src may be the same object, in + * which case nbrruns for src is also cero. * @param srcOffset An offset ot use while reading a particular position in src. * @param other A second RunContainer, to or against src. */ private static void orImpl( - final RunContainer dst, - final RunContainer src, - final int srcRuns, - final int srcOffset, - final RunContainer other) { + final RunContainer dst, + final RunContainer src, + final int srcRuns, + final int srcOffset, + final RunContainer other) { final int otherRuns = other.nbrruns; int srcPosWithOffset = srcOffset; final int srcRunsWithOffset = srcRuns + srcOffset; @@ -1878,7 +1852,7 @@ public Container iremove(final int begin, final int end) { private Container iremoveImpl(final int begin, final int end) { int bIndex = unsignedInterleavedBinarySearch(valueslength, 0, nbrruns, (short) begin); int eIndex = - unsignedInterleavedBinarySearch(valueslength, 0, nbrruns, (short) (end - 1)); + unsignedInterleavedBinarySearch(valueslength, 0, nbrruns, (short) (end - 1)); if (bIndex >= 0) { // beginning marks beginning of a run if (eIndex < 0) { @@ -1887,8 +1861,7 @@ private Container iremoveImpl(final int begin, final int end) { // eIndex could be a run that begins exactly at "end" // or it might be an earlier run - // if the end is before the first run, we'd have eIndex==-1. But bIndex makes this - // impossible. + // if the end is before the first run, we'd have eIndex==-1. But bIndex makes this impossible. if (valueLengthContains(end, eIndex)) { chopValueLength(end, eIndex); // there is something left in the run @@ -2063,7 +2036,7 @@ private RunContainer andNotAsRun(final ArrayContainer x) { if (rlepos < nbrruns) { final int nruns = nbrruns - rlepos; System.arraycopy(valueslength, 2 * rlepos, ans.valueslength, 2 * ans.nbrruns, - 2 * nruns); + 2 * nruns); for (int i = 0; i < nruns; ++i) { ans.cardinality += ans.getLengthAsInt(ans.nbrruns + i) + 1; } @@ -2074,24 +2047,20 @@ private RunContainer andNotAsRun(final ArrayContainer x) { } /** - * Calculate the result of an or between a RunContainer and an ArrayContainer storing the result - * in a RunContainer. + * Calculate the result of an or between a RunContainer and an ArrayContainer storing the result in a RunContainer. * - * @param dst A RunContainer where to store the result. Its nbrruns field member should be zero - * on entry. - * @param src A RunContainer with source data for the RunContainer part of the or, potentially - * shifted by srcOffset. + * @param dst A RunContainer where to store the result. Its nbrruns field member should be zero on entry. + * @param src A RunContainer with source data for the RunContainer part of the or, potentially shifted by srcOffset. * @param srcRuns The number of runs in the srcValueslength array. - * @param srcOffset An offset ot use while reading a particular position in the srcValueslength - * array. + * @param srcOffset An offset ot use while reading a particular position in the srcValueslength array. * @param other An ArrayContainer to or against. */ private static void orImpl( - final RunContainer dst, - final RunContainer src, - final int srcRuns, - final int srcOffset, - final ArrayContainer other) { + final RunContainer dst, + final RunContainer src, + final int srcRuns, + final int srcOffset, + final ArrayContainer other) { final int otherCardinality = other.getCardinality(); int srcPosWithOffset = srcOffset; final int srcRunsWithOffset = srcRuns + srcOffset; @@ -2286,7 +2255,7 @@ private void recoverRoomsInRange(final int begin, final int end) { } if (end + 1 < nbrruns) { copyValuesLength(valueslength, end + 1, valueslength, begin + 1, - nbrruns - 1 - end); + nbrruns - 1 - end); } nbrruns -= end - begin; } @@ -2315,20 +2284,17 @@ private Container iunset(final short x, final int index) { @Override Container iunset(final short x, final PositionHint positionHint) { - final int index = unsignedInterleavedBinarySearch(valueslength, - Math.max(positionHint.value, 0), nbrruns, x); + final int index = unsignedInterleavedBinarySearch(valueslength, Math.max(positionHint.value, 0), nbrruns, x); return unsetImpl(x, index, true, positionHint); } @Override Container unset(final short x, final PositionHint positionHint) { - final int index = unsignedInterleavedBinarySearch(valueslength, - Math.max(positionHint.value, 0), nbrruns, x); + final int index = unsignedInterleavedBinarySearch(valueslength, Math.max(positionHint.value, 0), nbrruns, x); return unsetImpl(x, index, false, positionHint); } - private Container unsetImpl(final short x, int index, final boolean inPlace, - final PositionHint positionHintOut) { + private Container unsetImpl(final short x, int index, final boolean inPlace, final PositionHint positionHintOut) { if (index >= 0) { final RunContainer ans = inPlace ? deepcopyIfShared() : deepCopy(); if (ans.getLength(index) == 0) { @@ -2375,8 +2341,8 @@ private Container unsetImpl(final short x, int index, final boolean inPlace, } /** - * Convert to Array or Bitmap container if the serialized form would be shorter. Exactly the - * same functionality as toEfficientContainer. + * Convert to Array or Bitmap container if the serialized form would be shorter. Exactly the same functionality as + * toEfficientContainer. */ @Override @@ -2395,7 +2361,7 @@ public short select(final int j) { offset = nextOffset; } throw new IllegalArgumentException( - "Cannot select " + j + " since cardinality is " + getCardinality()); + "Cannot select " + j + " since cardinality is " + getCardinality()); } @Override @@ -2464,8 +2430,7 @@ public void selectRanges(final RangeConsumer outValues, final RangeIterator inPo } @Override - public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, - final int maxPos) { + public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, final int maxPos) { if (!inValues.hasNext()) { return false; } @@ -2547,18 +2512,17 @@ private int skipAhead(final RunContainer skippingOn, final int pos, final int ta do { probePos = left + span; if (probePos >= skippingOn.nbrruns - 1) { - // expect it might be quite common to find the container cannot be advanced as far - // as + // expect it might be quite common to find the container cannot be advanced as far as // requested. Optimize for it. probePos = skippingOn.nbrruns - 1; end = toIntUnsigned(skippingOn.getValue(probePos)) - + toIntUnsigned(skippingOn.getLength(probePos)) + 1; + + toIntUnsigned(skippingOn.getLength(probePos)) + 1; if (end <= targetToExceed) { return skippingOn.nbrruns; } } end = toIntUnsigned(skippingOn.getValue(probePos)) - + toIntUnsigned(skippingOn.getLength(probePos)) + 1; + + toIntUnsigned(skippingOn.getLength(probePos)) + 1; span *= 2; } while (end <= targetToExceed); int right = probePos; @@ -2569,7 +2533,7 @@ private int skipAhead(final RunContainer skippingOn, final int pos, final int ta while (right - left > 1) { int mid = (right + left) / 2; int midVal = toIntUnsigned(skippingOn.getValue(mid)) - + toIntUnsigned(skippingOn.getLength(mid)) + 1; + + toIntUnsigned(skippingOn.getLength(mid)) + 1; if (midVal > targetToExceed) { right = mid; } else { @@ -2587,8 +2551,7 @@ private void simpleAppend(final short start, final short length) { } /** - * Append a value to a RunContainer. Callers must guarantee there is no need to grow the array - * for appending. + * Append a value to a RunContainer. Callers must guarantee there is no need to grow the array for appending. * * @param val a value to append. */ @@ -2597,8 +2560,7 @@ private void smartAppend(final short val) { } /** - * Append a run to a RunContainer. Callers must guarantee there is no need to grow the array for - * appending. + * Append a run to a RunContainer. Callers must guarantee there is no need to grow the array for appending. * * @param start the value for the start of the run. * @param length the length of the run. @@ -2623,8 +2585,8 @@ void smartAppend(final short start, final short length) { } /** - * Append a value to a RunContainer, or remove it if already there (as in an XOR operation). - * Callers must guarantee there is no need to grow the array for appending. + * Append a value to a RunContainer, or remove it if already there (as in an XOR operation). Callers must guarantee + * there is no need to grow the array for appending. * * @param val a value to append. */ @@ -2633,8 +2595,8 @@ private void smartAppendForXor(final short val) { } /** - * Append a run to a RunContainer, or remove from it if some values already there (as in an XOR - * operation). Callers must guarantee there is no need to grow the array for appending. + * Append a run to a RunContainer, or remove from it if some values already there (as in an XOR operation). Callers + * must guarantee there is no need to grow the array for appending. * * @param start the value for the start of the run. * @param length the length of the run. @@ -2797,11 +2759,11 @@ private boolean valueLengthContains(final int value, final int index) { // other.getCardinality() > 0. private static void xOrImpl( - final RunContainer dst, - final RunContainer src, - final int srcRuns, - final int srcOffset, - final ArrayContainer other) { + final RunContainer dst, + final RunContainer src, + final int srcRuns, + final int srcOffset, + final ArrayContainer other) { int srcPosWithOffset = srcOffset; final int srcRunsWithOffset = srcRuns + srcOffset; int otherIdx = 0; @@ -2837,8 +2799,7 @@ private static void xOrImpl( } } do { - dst.smartAppendForXor(src.getValue(srcPosWithOffset), - src.getLength(srcPosWithOffset)); + dst.smartAppendForXor(src.getValue(srcPosWithOffset), src.getLength(srcPosWithOffset)); } while (++srcPosWithOffset < srcRunsWithOffset); } return; @@ -2855,8 +2816,7 @@ public Container xor(final ArrayContainer x) { if (isEmpty()) { return x.cowRef(); } - // if the cardinality of the array is small, guess that the output will still be a run - // container + // if the cardinality of the array is small, guess that the output will still be a run container final int arbitrary_threshold = 32; // 32 is arbitrary here final int xCard = x.getCardinality(); if (xCard < arbitrary_threshold) { @@ -2898,11 +2858,11 @@ public Container xor(final BitmapContainer x) { } private static void xOrImpl( - final RunContainer dst, - final RunContainer src, - final int srcRuns, - final int srcOffset, - final RunContainer other) { + final RunContainer dst, + final RunContainer src, + final int srcRuns, + final int srcOffset, + final RunContainer other) { int srcPosWithOffset = srcOffset; final int srcRunsWithOffset = srcRuns + srcOffset; int otherPos = 0; @@ -2940,8 +2900,7 @@ private static void xOrImpl( } } do { - dst.smartAppendForXor(src.getValue(srcPosWithOffset), - src.getLength(srcPosWithOffset)); + dst.smartAppendForXor(src.getValue(srcPosWithOffset), src.getLength(srcPosWithOffset)); } while (++srcPosWithOffset < srcRunsWithOffset); } return; @@ -3122,8 +3081,7 @@ public boolean subsetOf(final ArrayContainer c) { return false; } - int s = - ContainerUtil.unsignedBinarySearch(c.content, ci, c.cardinality, lowbits(j)); + int s = ContainerUtil.unsignedBinarySearch(c.content, ci, c.cardinality, lowbits(j)); if (s < 0) { return false; } @@ -3213,7 +3171,7 @@ public boolean overlapsRange(final int rangeStart, final int rangeEnd) { @Override public boolean overlaps(final ArrayContainer c) { return (getCardinality() < c.getCardinality()) ? ContainerUtil.overlaps(this, c) - : ContainerUtil.overlaps(c, this); + : ContainerUtil.overlaps(c, this); } @Override @@ -3292,15 +3250,13 @@ public void validate() { final int len = getLengthAsInt(i); computedCard += len + 1; if (val - 1 <= prev || val + len > MAX_VALUE) { - throw new IllegalStateException( - "i=" + i + ", prev=" + prev + ", val=" + val + ", len=" + len); + throw new IllegalStateException("i=" + i + ", prev=" + prev + ", val=" + val + ", len=" + len); } prev = val + len; } final int readCard = getCardinality(); if (computedCard != readCard) { - throw new IllegalStateException( - "computedCard=" + computedCard + ", readCard=" + readCard); + throw new IllegalStateException("computedCard=" + computedCard + ", readCard=" + readCard); } } diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RunContainerRangeIterator.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RunContainerRangeIterator.java index cfdaaca3d0f..c1196effdf5 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RunContainerRangeIterator.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/RunContainerRangeIterator.java @@ -7,8 +7,7 @@ final class RunContainerRangeIterator implements SearchRangeIterator { private int pos; private int start; - private int end; // end == -1 marks a just initialized iterator for which hasNext()/next() have - // not been called yet. + private int end; // end == -1 marks a just initialized iterator for which hasNext()/next() have not been called yet. RunContainerRangeIterator(final RunContainer p) { this(p, 0); diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SearchRangeIterator.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SearchRangeIterator.java index b8471e7ede8..21c887ddd60 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SearchRangeIterator.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SearchRangeIterator.java @@ -3,63 +3,55 @@ public interface SearchRangeIterator extends RangeIterator { /** *

    - * Advance the iterator position forward until the current range end's (exclusive) position is - * bigger (not equal) than the argument, or the iterator is exhausted. Note this should find a - * range that either contains the argument or, if there is no such a range, is the first range - * in the iterator after the argument. If a range containing the argument is found, and its - * start position is less than the argument, start is updated to the argument value. + * Advance the iterator position forward until the current range end's (exclusive) position is bigger (not equal) + * than the argument, or the iterator is exhausted. Note this should find a range that either contains the argument + * or, if there is no such a range, is the first range in the iterator after the argument. If a range containing the + * argument is found, and its start position is less than the argument, start is updated to the argument value. *

    * *

    - * If no satisfying range is found, false is returned, and any subsequent call to hasNext - * returns false. Otherwise true is returned and the current range is updated + * If no satisfying range is found, false is returned, and any subsequent call to hasNext returns false. Otherwise + * true is returned and the current range is updated *

    * *

    - * Note the iterator is invalidated (exhausted) when this method returns false: there is no - * guarantee as to where the start and end positions are left in this case). Calling hasNext() - * on an invalidated iterator is guaranteed to return false; any other method call results in - * undefined behavior. + * Note the iterator is invalidated (exhausted) when this method returns false: there is no guarantee as to where + * the start and end positions are left in this case). Calling hasNext() on an invalidated iterator is guaranteed to + * return false; any other method call results in undefined behavior. *

    * - * @param v a value to search for starting from the current iterator position, which must be a - * valid one on entry. - * @return true if a range satisfying the constraints is found, false if the iterator was - * exhausted. + * @param v a value to search for starting from the current iterator position, which must be a valid one on entry. + * @return true if a range satisfying the constraints is found, false if the iterator was exhausted. */ boolean advance(int v); /** *

    - * Advance the current iterator (start) position while the current value maintains - * comp.directionFrom(v) > 0. If next to the last such value there is a value for which - * comp.directionFrom(v) < 0, or no further values exist, then that last value satisfying - * comp,.directionFrom(v) > 0 is left as the current position and true is returned. If there are - * any elements for which comp.directionFrom(v) == 0, one of such elements, no guarantee which - * one, is left as the current position and true is returned. If at the call entry, the next - * range starts at a point where comp.directionFrom(v) < 0, false is returned and the current - * position is not moved. + * Advance the current iterator (start) position while the current value maintains comp.directionFrom(v) > 0. If + * next to the last such value there is a value for which comp.directionFrom(v) < 0, or no further values exist, + * then that last value satisfying comp,.directionFrom(v) > 0 is left as the current position and true is returned. + * If there are any elements for which comp.directionFrom(v) == 0, one of such elements, no guarantee which one, is + * left as the current position and true is returned. If at the call entry, the next range starts at a point where + * comp.directionFrom(v) < 0, false is returned and the current position is not moved. *

    * *

    - * Note the iterator may not move if at the time of the call, the iterator's current range start - * position satisfies comp.directionFrom(v) >= 0 and there is no other value in the iterator - * that does. + * Note the iterator may not move if at the time of the call, the iterator's current range start position satisfies + * comp.directionFrom(v) >= 0 and there is no other value in the iterator that does. *

    * * *

    - * Part of the contract of this method is that comp.directionFrom will only be called with - * values that are in the underlying container. + * Part of the contract of this method is that comp.directionFrom will only be called with values that are in the + * underlying container. *

    * * @param comp a comparator used to search forward from the current iterator position - * @return false if the target was to the left of the initial position at the time of the call - * (iterator not changed); true otherwise. In the true case the current position is - * guaranteed to satisfy comp.directionFrom(v) >= 0 and if also comp.directionFrom(v) > - * 0, then v is the biggest such value for which comp.directionFrom(v) > 0. If there are - * multiple values for which comp.directionFrom(v) == 0, there is no guarantee as of - * which one will be left as current iterator position. + * @return false if the target was to the left of the initial position at the time of the call (iterator not + * changed); true otherwise. In the true case the current position is guaranteed to satisfy + * comp.directionFrom(v) >= 0 and if also comp.directionFrom(v) > 0, then v is the biggest such value for + * which comp.directionFrom(v) > 0. If there are multiple values for which comp.directionFrom(v) == 0, there + * is no guarantee as of which one will be left as current iterator position. */ boolean search(ContainerUtil.TargetComparator comp); } diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortAdvanceIterator.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortAdvanceIterator.java index 33a76b1532f..e8a6f432c43 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortAdvanceIterator.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortAdvanceIterator.java @@ -7,20 +7,18 @@ public interface ShortAdvanceIterator extends ShortIterator { /** *

    - * Advance the iterator position forward until the current value is smaller or equal to the - * argument, or the iterator is exhausted. + * Advance the iterator position forward until the current value is smaller or equal to the argument, or the + * iterator is exhausted. *

    * *

    - * If no satisfying position is found, false is returned, and any subsequent call to hasNext - * returns false, as the iterator has been exhausted, and the current position is undefined. - * Otherwise true is returned and the current position is updated. + * If no satisfying position is found, false is returned, and any subsequent call to hasNext returns false, as the + * iterator has been exhausted, and the current position is undefined. Otherwise true is returned and the current + * position is updated. *

    * - * @param v a value to search for starting from the current iterator position, which must be a - * valid one on entry. - * @return true if a value satisfying the constraints is found, false if the iterator was - * exhausted. + * @param v a value to search for starting from the current iterator position, which must be a valid one on entry. + * @return true if a value satisfying the constraints is found, false if the iterator was exhausted. */ boolean advance(int v); diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortConsumer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortConsumer.java index 328f427b10c..49e82ead93f 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortConsumer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortConsumer.java @@ -1,8 +1,7 @@ package io.deephaven.db.v2.utils.rsp.container; /** - * A ShortConsumer receives the short values contained in a data structure. Each value is visited - * once. + * A ShortConsumer receives the short values contained in a data structure. Each value is visited once. *

    * Usage: * @@ -19,8 +18,8 @@ public interface ShortConsumer { /** - * Provides a value to this consumer. A false return value indicates that the application - * providing values to this consumer should not invoke it again. + * Provides a value to this consumer. A false return value indicates that the application providing values to this + * consumer should not invoke it again. * * @param value the short value * @return false if don't want any more values after this one, true otherwise. diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortIterator.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortIterator.java index d67bfd6cc0d..5e1ed5d1dc8 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortIterator.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortIterator.java @@ -1,8 +1,8 @@ /* * (c) the authors Licensed under the Apache License, Version 2.0. * - * The code in this file is a heavily modified version of the original in the RoaringBitmap library; - * please see https://roaringbitmap.org/ + * The code in this file is a heavily modified version of the original in the RoaringBitmap library; please see + * https://roaringbitmap.org/ * */ diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortRangeConsumer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortRangeConsumer.java index 653d60fe877..4ba3ba29f51 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortRangeConsumer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/ShortRangeConsumer.java @@ -1,10 +1,9 @@ package io.deephaven.db.v2.utils.rsp.container; /** - * A ShortRangeConsumer receives the ranges contained in a data structure. Each range is visited - * once, in increasing unsigned order, with non-overlapped boundaries. In particular, the start - * position of a range needs to be strictly greater than the end position of the previous range, - * both as unsigned values. + * A ShortRangeConsumer receives the ranges contained in a data structure. Each range is visited once, in increasing + * unsigned order, with non-overlapped boundaries. In particular, the start position of a range needs to be strictly + * greater than the end position of the previous range, both as unsigned values. *

    * Usage: * @@ -21,8 +20,8 @@ public interface ShortRangeConsumer { /** - * Provides a value to this consumer. A false return value indicates that the application - * providing values to this consumer should not invoke it again. + * Provides a value to this consumer. A false return value indicates that the application providing values to this + * consumer should not invoke it again. * * @param unsignedStart the unsigned short value for the start of this range. * @param unsignedEndInclusive the unsigned short value for the end of this range, inclusive. diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SingleRangeContainer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SingleRangeContainer.java index 631f11d9f21..10d0fd39ff3 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SingleRangeContainer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SingleRangeContainer.java @@ -45,8 +45,7 @@ public Container add(final int rangeBegin, final int rangeEnd) { return Container.twoRanges(begin, end, rangeBegin, rangeEnd); } // rangeBegin <= end - return new SingleRangeContainer(minBeginIsUs ? begin : rangeBegin, - maxEndIsUs ? end : rangeEnd); + return new SingleRangeContainer(minBeginIsUs ? begin : rangeBegin, maxEndIsUs ? end : rangeEnd); } @Override @@ -117,8 +116,7 @@ public Container andRange(final int rangeBegin, final int rangeEnd) { if (minBeginIsThem && maxEndIsThem) { // avoid creating a new object if we can. return this; } - return new SingleRangeContainer(minBeginIsThem ? begin : rangeBegin, - maxEndIsThem ? end : rangeEnd); + return new SingleRangeContainer(minBeginIsThem ? begin : rangeBegin, maxEndIsThem ? end : rangeEnd); } private Container andNotImpl(final Container x) { @@ -165,7 +163,7 @@ public boolean contains(int rangeStart, int rangeEnd) { private boolean containsImpl(final Container c) { return c.isEmpty() || - (begin() <= c.first() && c.last() < end()); + (begin() <= c.first() && c.last() < end()); } @Override @@ -249,8 +247,7 @@ public boolean forEach(final int rankOffset, final ShortConsumer sc) { public boolean forEachRange(final int rankOffset, final ShortRangeConsumer sc) { final int rangeStart = begin() + rankOffset; if (rangeStart >= end()) { - throw new IllegalStateException( - "rankOffset=" + rankOffset + ", cardinality=" + getCardinality()); + throw new IllegalStateException("rankOffset=" + rankOffset + ", cardinality=" + getCardinality()); } return sc.accept(ContainerUtil.lowbits(rangeStart), rangeLastValue); } @@ -391,7 +388,7 @@ public boolean forEach(final ShortConsumer sc) { public ContainerShortBatchIterator getShortBatchIterator(final int skipFromStartCount) { if (DEBUG && skipFromStartCount >= getCardinality()) { throw new IllegalArgumentException( - "skipFromStartCount=" + skipFromStartCount + ", cardinality=" + getCardinality()); + "skipFromStartCount=" + skipFromStartCount + ", cardinality=" + getCardinality()); } return new ContainerShortBatchIter(this, skipFromStartCount); } @@ -469,7 +466,7 @@ public void next() { public SearchRangeIterator getShortRangeIterator(final int skipFromStartCount) { if (DEBUG && skipFromStartCount >= getCardinality()) { throw new IllegalArgumentException( - "skipFromStartCount=" + skipFromStartCount + ", cardinality=" + getCardinality()); + "skipFromStartCount=" + skipFromStartCount + ", cardinality=" + getCardinality()); } return new SearchRangeIter(this, skipFromStartCount); } @@ -596,8 +593,7 @@ public Container remove(final int rangeFirst, final int rangeEnd) { } // rangeLast < last. if (first + 1 == rangeFirst && rangeLast + 1 == last) { - return new TwoValuesContainer(ContainerUtil.lowbits(first), - ContainerUtil.lowbits(last)); + return new TwoValuesContainer(ContainerUtil.lowbits(first), ContainerUtil.lowbits(last)); } return new RunContainer(first, rangeFirst, rangeLast + 1, last + 1); } @@ -682,8 +678,7 @@ public void selectRanges(final RangeConsumer outValues, final RangeIterator inPo } @Override - public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, - final int maxPos) { + public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, final int maxPos) { final int begin = begin(); final int end = end(); while (inValues.hasNext()) { diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SingletonContainer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SingletonContainer.java index d16c74574ef..0190e6d5bb9 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SingletonContainer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/SingletonContainer.java @@ -138,27 +138,27 @@ public boolean contains(final short x) { @Override public boolean contains(final int rangeStart, final int rangeEnd) { return rangeEnd <= rangeStart || - (rangeStart == intValue() && rangeEnd - rangeStart == 1); + (rangeStart == intValue() && rangeEnd - rangeStart == 1); } @Override protected boolean contains(final RunContainer runContainer) { return runContainer.nbrruns == 0 || - (runContainer.nbrruns == 1 && - runContainer.getValue(0) == value && - runContainer.getLength(0) == 0); + (runContainer.nbrruns == 1 && + runContainer.getValue(0) == value && + runContainer.getLength(0) == 0); } @Override protected boolean contains(final ArrayContainer arrayContainer) { return arrayContainer.cardinality == 0 || - (arrayContainer.cardinality == 1 && arrayContainer.content[0] == value); + (arrayContainer.cardinality == 1 && arrayContainer.content[0] == value); } @Override protected boolean contains(final BitmapContainer bitmapContainer) { return bitmapContainer.cardinality == 0 || - (bitmapContainer.cardinality == 1 && bitmapContainer.contains(value)); + (bitmapContainer.cardinality == 1 && bitmapContainer.contains(value)); } @Override @@ -512,8 +512,7 @@ public void selectRanges(final RangeConsumer outValues, final RangeIterator inPo } @Override - public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, - final int maxPos) { + public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, final int maxPos) { if (maxPos < 0 || !inValues.hasNext()) { return false; } diff --git a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/TwoValuesContainer.java b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/TwoValuesContainer.java index 2adc3cfe8d1..d04a6526bab 100644 --- a/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/TwoValuesContainer.java +++ b/Container/src/main/java/io/deephaven/db/v2/utils/rsp/container/TwoValuesContainer.java @@ -472,8 +472,7 @@ public ShortIterator getShortIterator() { return new ForwardIter(this); } - public static final class ContainerShortBatchIter extends ForwardIterBase - implements ContainerShortBatchIterator { + public static final class ContainerShortBatchIter extends ForwardIterBase implements ContainerShortBatchIterator { public ContainerShortBatchIter(final TwoValuesContainer c, final int skip) { super(c, -1 + skip); } @@ -508,8 +507,7 @@ public ContainerShortBatchIterator getShortBatchIterator(final int skipFromStart return new ContainerShortBatchIter(this, skipFromStartCount); } - private static final class SearchRangeIter extends ForwardIterBase - implements SearchRangeIterator { + private static final class SearchRangeIter extends ForwardIterBase implements SearchRangeIterator { public SearchRangeIter(final TwoValuesContainer c, final int skip) { super(c, -1 + skip); } @@ -587,8 +585,7 @@ public Container not(final int rangeStart, final int rangeEnd) { // iv1 < rangeEnd. final boolean v1Contained = rangeStart <= iv1; final boolean v2Contained = iv2 < rangeEnd; - final int[] buf = intBuf.get(); // buf will contain consecutive pairs of [begin,end] - // segments. + final int[] buf = intBuf.get(); // buf will contain consecutive pairs of [begin,end] segments. int n = 0; if (v1Contained) { if (iv1 == rangeStart) { @@ -802,12 +799,11 @@ public void selectRanges(final RangeConsumer outValues, final RangeIterator inPo } } throw new IllegalArgumentException("pStart=" + pStart + ", pEnd=" + pEnd + - ", iv1=" + iv1 + ", iv2=" + iv2); + ", iv1=" + iv1 + ", iv2=" + iv2); } @Override - public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, - final int maxPos) { + public boolean findRanges(final RangeConsumer outPositions, final RangeIterator inValues, final int maxPos) { if (maxPos < 0) { throw new IllegalArgumentException("maxPos=" + maxPos); } @@ -854,8 +850,7 @@ public boolean findRanges(final RangeConsumer outPositions, final RangeIterator break; } if (doThrow) { - throw new IllegalArgumentException( - "start=" + start + ", end=" + end + ", iv1=" + iv1 + ", iv2=" + iv2); + throw new IllegalArgumentException("start=" + start + ", end=" + end + ", iv1=" + iv1 + ", iv2=" + iv2); } if (accept1) { if (accept2) { @@ -970,7 +965,7 @@ public boolean overlapsRange(final int start, final int end) { final int iv1 = v1AsInt(); final int iv2 = v2AsInt(); return (start <= iv1 && iv1 < end) || - (start <= iv2 && iv2 < end); + (start <= iv2 && iv2 < end); } @Override diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/ContainerTestCommon.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/ContainerTestCommon.java index c7cc6050938..57744409e4b 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/ContainerTestCommon.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/ContainerTestCommon.java @@ -15,8 +15,7 @@ import static org.junit.Assert.assertNotEquals; class ContainerTestCommon { - static Container populate(String msg, int[] vs, Container container, String name, - ArrayList ranges) { + static Container populate(String msg, int[] vs, Container container, String name, ArrayList ranges) { ranges = getRanges(vs, ranges); final Iterator it = ranges.iterator(); while (it.hasNext()) { @@ -31,20 +30,18 @@ static Container populate(String msg, int[] vs, Container container, String name return container; } - static Container populate(int[] vs, Container container, String name, - ArrayList ranges) { + static Container populate(int[] vs, Container container, String name, ArrayList ranges) { return populate("", vs, container, name, ranges); } static int[][] vss = new int[][] { - // A negative number marks the end of a range starting int he previous element, for its - // absolute value (inclusive). + // A negative number marks the end of a range starting int he previous element, for its absolute value + // (inclusive). new int[] {10, 20, 30, 40, 50, 32767, -32768, 65535}, new int[] {1, -3, 27, -28, 111, 345, 347, 349, 360, 16000, 32767, 65535}, new int[] {0, 65, 129, -132, 255, -257, 32768, 65533, -65534}, new int[] {0, -1}, - new int[] {0, -1, 63, -65, 128, 255, 513, 1024, -1025, 2047, 4191, 8192, - -(8192 + 64 * 4 - 1)}, + new int[] {0, -1, 63, -65, 128, 255, 513, 1024, -1025, 2047, 4191, 8192, -(8192 + 64 * 4 - 1)}, new int[] {0}, new int[] {1}, new int[] {65534}, @@ -79,7 +76,7 @@ static void doTestFind(int[] vs, Container container, String name) { for (i = start; i < end; ++i) { while (preNon < i) { assertEquals("prenNon=" + preNon + ", i=" + i, -offset - 1, - container.find(ContainerUtil.lowbits(preNon))); + container.find(ContainerUtil.lowbits(preNon))); ++preNon; } assertEquals("i=" + i, offset, container.find(ContainerUtil.lowbits(i))); @@ -106,7 +103,7 @@ static ArrayList getRanges(int[] vs, ArrayList ranges) { // validate the input, in particular ensure non-adjacent ranges. assertNotEquals(-1, lastStart); assertTrue("i=" + i + ", vs[i]=" + vsi + ", lastEnd=" + lastEnd, - lastStart < -vsi); + lastStart < -vsi); ranges.add(lastStart); ranges.add(-vsi + 1); lastStart = -1; @@ -115,7 +112,7 @@ static ArrayList getRanges(int[] vs, ArrayList ranges) { if (lastEnd != 0) { // more input validation as above. assertTrue("i=" + i + ", vs[i]=" + vsi + ", lastEnd=" + lastEnd, - lastEnd < vsi - 1); + lastEnd < vsi - 1); } if (lastStart != -1) { ranges.add(lastStart); @@ -139,7 +136,7 @@ static void doTestRangeIterator(Supplier containerFactory, String con } private static ArrayList rangesSeek( - final ArrayList ranges, final int skip) { + final ArrayList ranges, final int skip) { int remaining = skip; final ArrayList rs = new ArrayList<>(remaining); final Iterator it = ranges.iterator(); @@ -167,15 +164,13 @@ static void doTestRangeIterator(int vi, Container container, String name) { final ArrayList ranges = new ArrayList<>(2 * vs.length); container = populate(vs, container, name, ranges); for (int skip = 0; skip < container.getCardinality(); ++skip) { - final ArrayList rs = - rangesSeek(ranges, Math.min(container.getCardinality(), skip)); + final ArrayList rs = rangesSeek(ranges, Math.min(container.getCardinality(), skip)); doTestRangeIterator(vi, container, skip, rs); } } - static void doTestRangeIterator(int vi, Container container, final int seek, - final ArrayList ranges) { + static void doTestRangeIterator(int vi, Container container, final int seek, final ArrayList ranges) { final RangeIterator cit = container.getShortRangeIterator(seek); final Iterator ait = ranges.iterator(); int r = 0; @@ -196,15 +191,13 @@ static void doTestRangeIterator(int vi, Container container, final int seek, assertFalse(ait.hasNext()); } - static void doTestRangeIteratorNextBuffer(final Supplier containerFactory, - final String containerName) { + static void doTestRangeIteratorNextBuffer(final Supplier containerFactory, final String containerName) { for (int vi = 0; vi < vss.length; ++vi) { doTestRangeIteratorNextBuffer(vi, containerFactory.get(), containerName); } } - private static void doTestRangeIteratorNextBuffer(final int vi, final Container container, - final String name) { + private static void doTestRangeIteratorNextBuffer(final int vi, final Container container, final String name) { final int[] vs = vss[vi]; final String m = "vi=" + vi; final ArrayList ranges = new ArrayList<>(2 * vs.length); @@ -236,8 +229,7 @@ private static void doTestRangeIteratorNextBuffer(final int vi, final Container assertFalse(m, ait.hasNext()); } - static void doTestContainerShortBatchIterator(Supplier containerFactory, - String containerName) { + static void doTestContainerShortBatchIterator(Supplier containerFactory, String containerName) { for (int i = 2; i < vss.length; ++i) { final Container c = containerFactory.get(); doTestContainerShortBatchIterator(i, c, containerName); @@ -257,33 +249,33 @@ static void doTestContainerShortBatchIterator(int vi, Container container, Strin final int offset = 2; final int[] voffset = new int[1]; final Predicate hasNextForContainerIter = - (it) -> (count[0] > 0) || it.hasNext(); + (it) -> (count[0] > 0) || it.hasNext(); final Function nextForContainerIter = - (it) -> { - if (count[0] == 0) { - voffset[0] = offset; - count[0] = it.next(buf, offset, buf.length - offset); - assertTrue(count[0] > 0); - } - final int v = toUnsignedInt(buf[voffset[0]]); - ++voffset[0]; - --count[0]; - return v; - }; + (it) -> { + if (count[0] == 0) { + voffset[0] = offset; + count[0] = it.next(buf, offset, buf.length - offset); + assertTrue(count[0] > 0); + } + final int v = toUnsignedInt(buf[voffset[0]]); + ++voffset[0]; + --count[0]; + return v; + }; final int[] start = new int[1]; final int[] end = new int[1]; final int[] curr = new int[1]; final Predicate> hasNextForRanges = - (it) -> ((curr[0] < end[0]) || it.hasNext()); + (it) -> ((curr[0] < end[0]) || it.hasNext()); final Function, Integer> nextForRanges = - (it) -> { - if (curr[0] >= end[0]) { - start[0] = curr[0] = it.next(); - end[0] = it.next(); - } - return curr[0]++; - }; + (it) -> { + if (curr[0] >= end[0]) { + start[0] = curr[0] = it.next(); + end[0] = it.next(); + } + return curr[0]++; + }; int r = 0; while (hasNextForContainerIter.test(cit)) { final String m2 = m + ", r=" + r; @@ -301,16 +293,14 @@ static void doTestContainerShortBatchIteratorForEach(int vi, Container container final ArrayList ranges = new ArrayList<>(2 * vs.length); container = populate(vs, container, name, ranges); for (int skip = 0; skip < container.getCardinality(); ++skip) { - final ArrayList rs = - rangesSeek(ranges, Math.min(container.getCardinality(), skip)); + final ArrayList rs = rangesSeek(ranges, Math.min(container.getCardinality(), skip)); final String m = "vi=" + vi + ", skip=" + skip; doTestContainerShortBatchIteratorForEach(m, container, skip, rs); } } private static void doTestContainerShortBatchIteratorForEach( - final String m, final Container container, final int skip, - final ArrayList ranges) { + final String m, final Container container, final int skip, final ArrayList ranges) { final ContainerShortBatchIterator cit = container.getShortBatchIterator(skip); final Iterator ait = ranges.iterator(); final short[] buf = new short[7]; @@ -318,36 +308,36 @@ private static void doTestContainerShortBatchIteratorForEach( final int offset = 2; final int[] voffset = new int[1]; final Predicate hasNextForContainerIter = - (it) -> (count[0] > 0) || it.hasNext(); + (it) -> (count[0] > 0) || it.hasNext(); final Function nextForContainerIter = - (it) -> { - if (count[0] == 0) { - voffset[0] = offset; - it.forEach((short v) -> { - buf[voffset[0] + count[0]] = v; - ++count[0]; - return count[0] < buf.length - offset; - }); - assertTrue(count[0] > 0); - } - final int v = toUnsignedInt(buf[voffset[0]]); - ++voffset[0]; - --count[0]; - return v; - }; + (it) -> { + if (count[0] == 0) { + voffset[0] = offset; + it.forEach((short v) -> { + buf[voffset[0] + count[0]] = v; + ++count[0]; + return count[0] < buf.length - offset; + }); + assertTrue(count[0] > 0); + } + final int v = toUnsignedInt(buf[voffset[0]]); + ++voffset[0]; + --count[0]; + return v; + }; final int[] end = new int[1]; final int[] curr = new int[1]; final Predicate> hasNextForRanges = - (it) -> ((curr[0] < end[0]) || it.hasNext()); + (it) -> ((curr[0] < end[0]) || it.hasNext()); final Function, Integer> nextForRanges = - (it) -> { - if (curr[0] >= end[0]) { - curr[0] = it.next(); - end[0] = it.next(); - } - return curr[0]++; - }; + (it) -> { + if (curr[0] >= end[0]) { + curr[0] = it.next(); + end[0] = it.next(); + } + return curr[0]++; + }; int r = 0; while (hasNextForContainerIter.test(cit)) { final String m2 = m + ", r=" + r; @@ -360,8 +350,7 @@ private static void doTestContainerShortBatchIteratorForEach( assertFalse(hasNextForRanges.test(ait)); } - static void doTestRangeIteratorAdvance(Supplier containerFactory, - String containerName) { + static void doTestRangeIteratorAdvance(Supplier containerFactory, String containerName) { for (int i = 0; i < vss.length; ++i) { doTestRangeIteratorAdvance(i, containerFactory.get(), containerName); } @@ -393,8 +382,7 @@ static void doTestRangeIteratorAdvance(int iv, Container container, String name) if (v < 0) { v = 0; } - final String m3 = - m2 + ", range=" + range + ", astart=" + astart + ", aend=" + aend + ", v=" + v; + final String m3 = m2 + ", range=" + range + ", astart=" + astart + ", aend=" + aend + ", v=" + v; final boolean result = cit.advance(v); assertTrue(m3, result); assertEquals(m3, Math.max(v, astart), cit.start()); @@ -407,8 +395,7 @@ static void doTestRangeIteratorAdvance(int iv, Container container, String name) } } - static void doTestRangeIteratorSearch(Supplier containerFactory, - String containerName) { + static void doTestRangeIteratorSearch(Supplier containerFactory, String containerName) { for (int i = 0; i < vss.length; ++i) { doTestRangeIteratorSearch(i, containerFactory.get(), containerName); doTestRangeIteratorSearch2(i, containerFactory.get(), containerName); @@ -417,8 +404,8 @@ static void doTestRangeIteratorSearch(Supplier containerFactory, // expectedEnd is inclusive. private static void doSingleSearch(final String m, final Container compContainer, - final SearchRangeIterator cit, final int v, - final boolean expectedResult, final int expectedStart, final int expectedEnd) { + final SearchRangeIterator cit, final int v, + final boolean expectedResult, final int expectedStart, final int expectedEnd) { ContainerUtil.TargetComparator comp = (k) -> { final boolean check = compContainer.contains((short) k); assertTrue(check); @@ -505,8 +492,8 @@ static void doTestRangeIteratorSearch2(int iv, Container container, String name) if (container.contains((short) v) && cit.start() <= v) { doSingleSearch(m3, container, cit, v, true, v, alast); } else { - doSingleSearch(m3, container, cit, v, - cit.start() <= v && !(prevLast == -1 && j == 0), prevLast, prevLast); + doSingleSearch(m3, container, cit, v, cit.start() <= v && !(prevLast == -1 && j == 0), prevLast, + prevLast); } prevLast = cit.end() - 1; } @@ -538,8 +525,7 @@ static void doTestSelect(int[] vs, Container container, String name) { int start = it.next(); int end = it.next(); for (int i = start; i < end; ++i) { - assertEquals("i=" + i + ", offset=" + offset, ContainerUtil.lowbits(i), - container.select(offset)); + assertEquals("i=" + i + ", offset=" + offset, ContainerUtil.lowbits(i), container.select(offset)); ++offset; } } @@ -571,8 +557,7 @@ static void doTestSelectContainer(Supplier containerFactory, String c } } - static void doTestSelectContainer(final int vi, final Container inContainer, - final String name) { + static void doTestSelectContainer(final int vi, final Container inContainer, final String name) { final int[] vs = vss[vi]; final String m = "vi==" + vi; final ArrayList ranges = new ArrayList<>(2 * vs.length); @@ -603,15 +588,12 @@ static void doTestSelectContainer(final int vi, final Container inContainer, assertEquals(m2, er - sr + 1, sc.getCardinality()); assertEquals(m2, toUnsignedInt(container.select(sr)), sc.first()); assertEquals(m2, toUnsignedInt(container.select(er)), sc.last()); - final Container pos = - (er + 1 > lastRank) ? empty : container.select(er + 1, lastRank + 1); - assertEquals(m2, (er + 1 > lastRank) ? 0 : lastRank - er, - pos.getCardinality()); + final Container pos = (er + 1 > lastRank) ? empty : container.select(er + 1, lastRank + 1); + assertEquals(m2, (er + 1 > lastRank) ? 0 : lastRank - er, pos.getCardinality()); assertFalse(m2, pre.intersects(sc)); assertFalse(m2, sc.intersects(pos)); final Container u = pre.or(sc).or(pos); - assertEquals(m2, container.getCardinality(), - container.and(u).getCardinality()); + assertEquals(m2, container.getCardinality(), container.and(u).getCardinality()); } } } @@ -670,8 +652,7 @@ static void doTestSelectRanges(int vi, Container container, String name) { return; } // Now select all the ranges. - final RangeIterator in = - makeRangeIterator(new int[] {0, -(container.getCardinality() - 1)}); + final RangeIterator in = makeRangeIterator(new int[] {0, -(container.getCardinality() - 1)}); final ToArrayRangeConsumer out = new ToArrayRangeConsumer(); container.selectRanges(out, in); final ArrayList oranges = out.getRanges(); @@ -700,8 +681,7 @@ static void doTestFindRanges(int vi, Container container, String name) { final ToArrayRangeConsumer out = new ToArrayRangeConsumer(); container.findRanges(out, in, 0xFFFF); final ArrayList oranges = out.getRanges(); - final String msg = - "vi=" + vi + ", name=" + name + ", i=" + i + ", offset=" + offset; + final String msg = "vi=" + vi + ", name=" + name + ", i=" + i + ", offset=" + offset; assertEquals(msg, 2, oranges.size()); assertEquals(msg, 1, oranges.get(1) - oranges.get(0)); final int apples = offset; @@ -784,8 +764,8 @@ interface BoolContainerOp { } static void doTestBoolOp( - int vi, int vj, - BoolContainerOp testOp, BoolContainerOp validateOp) { + int vi, int vj, + BoolContainerOp testOp, BoolContainerOp validateOp) { int[] vs1 = vss2[vi]; int[] vs2 = vss2[vj]; String pfx = "vi=" + vi + ", vj=" + vj; @@ -811,7 +791,7 @@ static void doTestBoolOp( } static void doTestBoolOp( - BoolContainerOp testOp, BoolContainerOp validateOp) { + BoolContainerOp testOp, BoolContainerOp validateOp) { for (int i = 0; i < vss2.length; ++i) { for (int j = i + 1; j < vss2.length; ++j) { doTestBoolOp(i, j, testOp, validateOp); @@ -819,8 +799,7 @@ static void doTestBoolOp( } } - static void doTestRemoveRange(final Supplier containerFactory, - final String containerName) { + static void doTestRemoveRange(final Supplier containerFactory, final String containerName) { for (int vi = 0; vi < vss.length; ++vi) { doTestRemoveRange(vi, containerFactory.get(), containerName); } @@ -862,8 +841,7 @@ static void doTestRemoveRange(final int vi, Container container, final String na } } - static void doTestCopyOnWrite(final Supplier containerFactory, - final String containerName) { + static void doTestCopyOnWrite(final Supplier containerFactory, final String containerName) { Container c = containerFactory.get(); c = c.add(10, 100); Container c2 = c.deepCopy(); @@ -896,15 +874,13 @@ static void doTestCopyOnWrite(final Supplier containerFactory, c2.setCopyOnWrite(); } - static void doTestForEachWithRankOffset(final Supplier containerFactory, - final String containerName) { + static void doTestForEachWithRankOffset(final Supplier containerFactory, final String containerName) { for (int vi = 0; vi < vss.length; ++vi) { doTestForEachWithRankOffset(vi, containerFactory.get(), containerName); } } - private static void doTestForEachWithRankOffset(final int vi, Container container, - final String name) { + private static void doTestForEachWithRankOffset(final int vi, Container container, final String name) { final int[] vs = vss[vi]; final ArrayList ranges = new ArrayList<>(2 * vs.length); container = populate(vs, container, name, ranges); @@ -923,8 +899,7 @@ private static void doTestForEachWithRankOffset(final int vi, Container containe } } - static void doTestForEachRange(final Supplier containerFactory, - final String containerName) { + static void doTestForEachRange(final Supplier containerFactory, final String containerName) { for (int vi = 0; vi < vss.length; ++vi) { doTestForEachRange(vi, containerFactory.get(), containerName); } @@ -956,8 +931,7 @@ private static void doTestForEachRange(final int vi, Container container, final } } - static void doTestOverlapsRange(final Supplier containerFactory, - final String containerName) { + static void doTestOverlapsRange(final Supplier containerFactory, final String containerName) { for (int vi = 0; vi < vss.length; ++vi) { doTestOverlapsRange(vi, containerFactory.get(), containerName); } @@ -989,8 +963,7 @@ private static void doTestOverlapsRange(final int vi, Container container, final } } - static void doTestContainsRange(final Supplier containerFactory, - final String containerName) { + static void doTestContainsRange(final Supplier containerFactory, final String containerName) { for (int vi = 0; vi < vss.length; ++vi) { doTestContainsRange(vi, containerFactory.get(), containerName); } @@ -1020,23 +993,20 @@ private static void doTestContainsRange(final int vi, Container container, final boolean r = container.contains(start, end); final Container c2 = Container.rangeOfOnes(start, end); final Container c3 = container.and(c2); - final boolean expected = - c3.subsetOf(c2) && c3.getCardinality() == c2.getCardinality(); + final boolean expected = c3.subsetOf(c2) && c3.getCardinality() == c2.getCardinality(); assertEquals(m2, expected, r); } } } } - static void doTestAppend(final Supplier containerFactory, - final String containerName) { + static void doTestAppend(final Supplier containerFactory, final String containerName) { for (int vi = 0; vi < vss.length; ++vi) { doTestAppend(vi, containerFactory.get(), containerName); } } - private static void doTestAppend(final int vi, Container container, - final String containerName) { + private static void doTestAppend(final int vi, Container container, final String containerName) { final int[] vs = vss[vi]; ArrayList ranges = new ArrayList<>(2 * vs.length); final Container expected = populate(vs, container, containerName, ranges); @@ -1052,8 +1022,7 @@ private static void doTestAppend(final int vi, Container container, TestContainerBase.assertSameContents(expected, container); } - static void doTestReverseIteratorAdvance(final Supplier containerFactory, - final String containerName) { + static void doTestReverseIteratorAdvance(final Supplier containerFactory, final String containerName) { for (int vi = 0; vi < vss.length; ++vi) { doTestReverseIteratorAdvance(vi, containerFactory.get(), containerName); } @@ -1061,7 +1030,7 @@ static void doTestReverseIteratorAdvance(final Supplier containerFact } private static void doTestReverseIteratorAdvance(final int vi, final Container container, - final String containerName) { + final String containerName) { final int[] vs = vss[vi]; final String m = "vi==" + vi; ArrayList ranges = new ArrayList<>(2 * vs.length); @@ -1098,14 +1067,12 @@ private static void doTestReverseIteratorAdvance(final int vi, final Container c } } - private static void doTestReverseIteratorAdvanceEmpty(final Container container, - final String containerName) { + private static void doTestReverseIteratorAdvanceEmpty(final Container container, final String containerName) { final ShortAdvanceIterator reverseIter = container.getReverseShortIterator(); assertFalse(reverseIter.advance(1)); } - static void doTestAddRange(final Supplier containerFactory, - final String containerName) { + static void doTestAddRange(final Supplier containerFactory, final String containerName) { Container c = containerFactory.get(); c = c.iadd(5, 7); c = c.iadd(8, 10); @@ -1123,8 +1090,7 @@ static void doTestAddRange(final Supplier containerFactory, } } - static void doTestAndRange(final Supplier containerFactory, - final String containerName) { + static void doTestAndRange(final Supplier containerFactory, final String containerName) { for (int i = 0; i < vss.length; ++i) { doTestAndRange(i, containerFactory.get(), containerName); } @@ -1133,7 +1099,7 @@ static void doTestAndRange(final Supplier containerFactory, private static final int nruns = 1000; private static void doTestAndRange( - final int vi, Container container, final String containerName) { + final int vi, Container container, final String containerName) { final int[] vs = vss[vi]; final String m = "vi==" + vi; final Container c = populate(vs, container, containerName, null); diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestArrayContainer.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestArrayContainer.java index 36d304de287..3275d48b543 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestArrayContainer.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestArrayContainer.java @@ -356,9 +356,9 @@ public void testIntersectsWithRange2() { @Test public void testIntersectsWithRange3() { Container container = new ArrayContainer() - .iset((short) 1) - .iset((short) 300) - .iset((short) 1024); + .iset((short) 1) + .iset((short) 300) + .iset((short) 1024); assertTrue(container.intersects(0, 300)); assertTrue(container.intersects(1, 300)); assertFalse(container.intersects(2, 300)); @@ -384,8 +384,8 @@ public void testContainsRange() { @Test public void testContainsRange2() { Container ac = new ArrayContainer() - .iset((short) 1).iset((short) 10) - .add(20, 100); + .iset((short) 1).iset((short) 10) + .add(20, 100); assertFalse(ac.contains(1, 21)); assertFalse(ac.contains(1, 20)); assertTrue(ac.contains(1, 2)); @@ -508,8 +508,8 @@ public void testInotWhenSharedExpansion() { assertEquals(0, c.andNot(pre).getCardinality()); assertEquals(21 - 3 + 1 - 2, ans.getCardinality()); assertEquals(0, - ans.andNot(Container.rangeOfOnes(3, 10 + 1)).andNot(Container.rangeOfOnes(13, 21 + 1)) - .getCardinality()); + ans.andNot(Container.rangeOfOnes(3, 10 + 1)).andNot(Container.rangeOfOnes(13, 21 + 1)) + .getCardinality()); } @Test @@ -552,14 +552,12 @@ public void testSetWithGuess() { c = c.iadd(20, 26); assertEquals("array", c.getContainerName()); final PositionHint hint = new PositionHint(); - c = c.iset((short) 8, hint).iset((short) 9, hint).iset((short) 10, hint) - .iset((short) 11, hint).iset((short) 12, hint).iset((short) 13, hint) - .iset((short) 14, hint).iset((short) 15, hint).iset((short) 16, hint) - .iset((short) 17, hint).iset((short) 18, hint).iset((short) 19, hint) - .iset((short) 20, hint).iset((short) 21, hint).iset((short) 22, hint) - .iset((short) 23, hint).iset((short) 24, hint).iset((short) 25, hint) - .iset((short) 26, hint).iset((short) 27, hint).iset((short) 28, hint) - .iset((short) 29, hint).iset((short) 30, hint); + c = c.iset((short) 8, hint).iset((short) 9, hint).iset((short) 10, hint).iset((short) 11, hint) + .iset((short) 12, hint).iset((short) 13, hint).iset((short) 14, hint).iset((short) 15, hint) + .iset((short) 16, hint).iset((short) 17, hint).iset((short) 18, hint).iset((short) 19, hint) + .iset((short) 20, hint).iset((short) 21, hint).iset((short) 22, hint).iset((short) 23, hint) + .iset((short) 24, hint).iset((short) 25, hint).iset((short) 26, hint).iset((short) 27, hint) + .iset((short) 28, hint).iset((short) 29, hint).iset((short) 30, hint); assertEquals(23, c.getCardinality()); assertTrue(c.contains(8, 23)); } @@ -571,10 +569,9 @@ public void testUnsetWithHint() { c = c.iadd(20, 25); c = c.iadd(30, 35); final PositionHint hint = new PositionHint(); - c = c.iunset((short) 10, hint).iunset((short) 12, hint).iunset((short) 14, hint) - .iunset((short) 20, hint).iunset((short) 21, hint).iunset((short) 22, hint) - .iunset((short) 23, hint).iunset((short) 24, hint).iunset((short) 34, hint) - .iset((short) 35, hint); + c = c.iunset((short) 10, hint).iunset((short) 12, hint).iunset((short) 14, hint).iunset((short) 20, hint) + .iunset((short) 21, hint).iunset((short) 22, hint).iunset((short) 23, hint).iunset((short) 24, hint) + .iunset((short) 34, hint).iset((short) 35, hint); assertEquals(7, c.getCardinality()); assertTrue(c.contains(11, 12)); assertTrue(c.contains(13, 14)); diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestBitmapContainer.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestBitmapContainer.java index 42cc19837f5..8b3320a65ec 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestBitmapContainer.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestBitmapContainer.java @@ -581,7 +581,7 @@ public void testIntersectsWithRange() { @Test public void testIntersectsWithRangeHitScan() { Container container = new BitmapContainer().add(0, 10) - .add(500, 512).add(lower16Bits(-50), lower16Bits(-10)); + .add(500, 512).add(lower16Bits(-50), lower16Bits(-10)); assertTrue(container.intersects(0, 1)); assertTrue(container.intersects(0, 101)); assertTrue(container.intersects(0, 1 << 16)); @@ -828,7 +828,7 @@ public void testValuesInRangeIter() { Container bc = new BitmapContainer(); bc = bc.iadd(67, 70); BitmapContainer.ValuesInRangeIter it = - new BitmapContainer.ValuesInRangeIter(((BitmapContainer) bc).bitmap, 68, 2001); + new BitmapContainer.ValuesInRangeIter(((BitmapContainer) bc).bitmap, 68, 2001); assertTrue(it.hasNext()); assertEquals(68, it.next()); assertTrue(it.hasNext()); @@ -860,10 +860,10 @@ public void testValuesInRangeIterRegression0() { bc = bc.add(32767, 32769); bc = bc.add(65535, 65536); final BitmapContainer.ValuesInRangeContext ctx = - new BitmapContainer.ValuesInRangeContext(29566, 44970); + new BitmapContainer.ValuesInRangeContext(29566, 44970); final long[] bitmap = ((BitmapContainer) bc).bitmap; BitmapContainer.ValuesInRangeIter it = - new BitmapContainer.ValuesInRangeIter(bitmap, ctx); + new BitmapContainer.ValuesInRangeIter(bitmap, ctx); assertEquals(2, ctx.cardinalityInRange(bitmap)); assertTrue(it.hasNext()); assertEquals((short) 32767, it.next()); @@ -876,10 +876,10 @@ public void testValuesInRangeIterRegression0() { public void testValuesInRangeIterRegression1() { Container bc = new BitmapContainer(); final BitmapContainer.ValuesInRangeContext ctx = - new BitmapContainer.ValuesInRangeContext(0, 65535); + new BitmapContainer.ValuesInRangeContext(0, 65535); final long[] bitmap = ((BitmapContainer) bc).bitmap; final BitmapContainer.ValuesInRangeIter it = - new BitmapContainer.ValuesInRangeIter(bitmap, ctx); + new BitmapContainer.ValuesInRangeIter(bitmap, ctx); assertFalse(it.hasNext()); } @@ -913,8 +913,7 @@ public void testvaluesInRangeIterRandom() { final Container onesInRange = Container.singleRange(start, end); final Container bcRestricted = bc.and(onesInRange); final ShortIterator sit = bcRestricted.getShortIterator(); - final BitmapContainer.ValuesInRangeIter vit = - new BitmapContainer.ValuesInRangeIter(bitmap, start, end); + final BitmapContainer.ValuesInRangeIter vit = new BitmapContainer.ValuesInRangeIter(bitmap, start, end); while (sit.hasNext()) { assertTrue(vit.hasNext()); assertEquals(sit.next(), vit.next()); diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainer.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainer.java index da15273c4a9..ee8146bc624 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainer.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainer.java @@ -19,7 +19,7 @@ public class TestContainer { private final static Class[] CONTAINER_TYPES = - new Class[] {ArrayContainer.class, BitmapContainer.class, RunContainer.class}; + new Class[] {ArrayContainer.class, BitmapContainer.class, RunContainer.class}; @Test public void testNames() { @@ -27,8 +27,7 @@ public void testNames() { assertTrue(new ArrayContainer().getContainerName().equals("array")); assertTrue(new RunContainer().getContainerName().equals("run")); assertTrue(new SingletonContainer((short) 1).getContainerName().equals("singleton")); - assertTrue( - new TwoValuesContainer((short) 1, (short) 3).getContainerName().equals("twovalues")); + assertTrue(new TwoValuesContainer((short) 1, (short) 3).getContainerName().equals("twovalues")); assertTrue(new SingleRangeContainer(1, 3).getContainerName().equals("singlerange")); if (ImmutableContainer.ENABLED) { assertTrue(Container.empty().getContainerName().equals("empty")); @@ -239,9 +238,9 @@ public void inotTest10() { assertTrue(c1 instanceof ArrayContainer); assertEquals(14, c1.getCardinality()); assertTrue(checkContent(c1, - new short[] {0, 2, 4, (short) 65190, (short) 65191, (short) 65192, (short) 65193, - (short) 65194, (short) 65195, (short) 65196, (short) 65197, (short) 65198, - (short) 65199, (short) 65200})); + new short[] {0, 2, 4, (short) 65190, (short) 65191, (short) 65192, (short) 65193, + (short) 65194, (short) 65195, (short) 65196, (short) 65197, (short) 65198, + (short) 65199, (short) 65200})); } @Test @@ -692,7 +691,7 @@ public void or2() { Container result = ac.or(ac1.getShortIterator()); assertTrue(checkContent(result, - new short[] {1, 4, 5, (short) 50000, (short) 50002, (short) 50003, (short) 50004})); + new short[] {1, 4, 5, (short) 50000, (short) 50002, (short) 50003, (short) 50004})); } @Test @@ -932,7 +931,7 @@ public void xor2() { Container result = ac.xor(ac1.getShortIterator()); assertTrue(checkContent(result, - new short[] {1, 4, 5, (short) 50000, (short) 50002, (short) 50003, (short) 50004})); + new short[] {1, 4, 5, (short) 50000, (short) 50002, (short) 50003, (short) 50004})); } @@ -959,7 +958,7 @@ public void xor3() { Container result = ac.xor(ac1.getShortIterator()); assertTrue(checkContent(result, - new short[] {3, 4, (short) 50001, (short) 50002, (short) 50003, (short) 50004})); + new short[] {3, 4, (short) 50001, (short) 50002, (short) 50003, (short) 50004})); } @Test @@ -977,9 +976,9 @@ public void testConsistentToString() { assertEquals(expected, ac.toString()); assertEquals(expected, bc.toString()); String normalizedRCstr = rc.toString() - .replaceAll("\\d+\\]\\[", "") - .replace('[', '{') - .replaceFirst(",\\d+\\]", "}"); + .replaceAll("\\d+\\]\\[", "") + .replace('[', '{') + .replaceFirst(",\\d+\\]", "}"); assertEquals(expected, normalizedRCstr); } @@ -1007,9 +1006,8 @@ public void xor4() { Container result = ac.xor(ac1.getShortIterator()); - assertTrue( - checkContent(result, new short[] {3, 4, (short) 50001, (short) 50002, (short) 50003, - (short) 50004, (short) 50011})); + assertTrue(checkContent(result, new short[] {3, 4, (short) 50001, (short) 50002, (short) 50003, + (short) 50004, (short) 50011})); } @@ -1038,10 +1036,9 @@ public void xor5() { @Test public void testSubsetOf() { - ContainerTestCommon.BoolContainerOp testOp = - (Container c1, Container c2) -> c1.subsetOf(c2); + ContainerTestCommon.BoolContainerOp testOp = (Container c1, Container c2) -> c1.subsetOf(c2); ContainerTestCommon.BoolContainerOp validateOp = - (Container c1, Container c2) -> c1.and(c2).xor(c1).getCardinality() == 0; + (Container c1, Container c2) -> c1.and(c2).xor(c1).getCardinality() == 0; ContainerTestCommon.doTestBoolOp(testOp, validateOp); } } diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainerBase.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainerBase.java index cda94f28589..ff18eab0af7 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainerBase.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainerBase.java @@ -109,10 +109,9 @@ public void testAppend() { @Test public void testOverlaps() { - ContainerTestCommon.BoolContainerOp testOp = - (Container c1, Container c2) -> c1.overlaps(c2); + ContainerTestCommon.BoolContainerOp testOp = (Container c1, Container c2) -> c1.overlaps(c2); ContainerTestCommon.BoolContainerOp validateOp = - (Container c1, Container c2) -> c1.and(c2).getCardinality() > 0; + (Container c1, Container c2) -> c1.and(c2).getCardinality() > 0; ContainerTestCommon.doTestBoolOp(testOp, validateOp); } diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainerUtil.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainerUtil.java index a141d6b6f8e..9c1dd601d0a 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainerUtil.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestContainerUtil.java @@ -11,30 +11,24 @@ public class TestContainerUtil { public void testUtilUnsignedIntersection() { short[] data1 = {-19, -17, -15, -13, -11, -9, -7, -5, -3, -1}; short[] data2 = {-18, -16, -14, -12, -10, -8, -1}; - Assert - .assertTrue(ContainerUtil.unsignedIntersects(data1, data1.length, data2, data2.length)); + Assert.assertTrue(ContainerUtil.unsignedIntersects(data1, data1.length, data2, data2.length)); short[] data3 = {-19, -17, -15, -13, -11, -9, -7}; short[] data4 = {-18, -16, -14, -12, -10, -8, -6, -4, -2, 0}; - Assert.assertFalse( - ContainerUtil.unsignedIntersects(data3, data3.length, data4, data4.length)); + Assert.assertFalse(ContainerUtil.unsignedIntersects(data3, data3.length, data4, data4.length)); short[] data5 = {}; short[] data6 = {}; - Assert.assertFalse( - ContainerUtil.unsignedIntersects(data5, data5.length, data6, data6.length)); + Assert.assertFalse(ContainerUtil.unsignedIntersects(data5, data5.length, data6, data6.length)); } @Test public void testBranchyUnsignedBinarySearch() { short[] data1 = {-19, -17, -15, -13, -11, -9, -7, -5, -3}; - Assert.assertEquals(8, - ContainerUtil.branchyUnsignedBinarySearch(data1, 0, data1.length, data1[8])); - Assert.assertEquals(0, - ContainerUtil.branchyUnsignedBinarySearch(data1, 0, data1.length, data1[0])); - Assert.assertEquals(data1.length - 1, ContainerUtil.branchyUnsignedBinarySearch(data1, - data1.length - 1, data1.length, data1[data1.length - 1])); + Assert.assertEquals(8, ContainerUtil.branchyUnsignedBinarySearch(data1, 0, data1.length, data1[8])); + Assert.assertEquals(0, ContainerUtil.branchyUnsignedBinarySearch(data1, 0, data1.length, data1[0])); + Assert.assertEquals(data1.length - 1, ContainerUtil.branchyUnsignedBinarySearch(data1, data1.length - 1, + data1.length, data1[data1.length - 1])); Assert.assertEquals(-1, ContainerUtil.branchyUnsignedBinarySearch(data1, 0, 0, (short) 0)); - Assert.assertEquals(-10, - ContainerUtil.branchyUnsignedBinarySearch(data1, 0, data1.length, (short) -1)); + Assert.assertEquals(-10, ContainerUtil.branchyUnsignedBinarySearch(data1, 0, data1.length, (short) -1)); } @Test @@ -81,9 +75,8 @@ public void testPartialRadixSortSortsKeysCorrectlyWithDuplicates() { int[] data = new int[] {key2 | 25, key1 | 1, 0, key2 | 10, 25, key1 | 10, key1, 10, key2 | 25, key1 | 1, 0, key2 | 10, 25, key1 | 10, key1, 10}; // sort by keys, leave values stable - int[] expected = - new int[] {0, 25, 10, 0, 25, 10, key1 | 1, key1 | 10, key1, key1 | 1, key1 | 10, key1, - key2 | 25, key2 | 10, key2 | 25, key2 | 10}; + int[] expected = new int[] {0, 25, 10, 0, 25, 10, key1 | 1, key1 | 10, key1, key1 | 1, key1 | 10, key1, + key2 | 25, key2 | 10, key2 | 25, key2 | 10}; int[] test = Arrays.copyOf(data, data.length); ContainerUtil.partialRadixSort(test); Assert.assertArrayEquals(expected, test); @@ -101,14 +94,14 @@ public void testAdvanceUntil() { @Test public void testIterateUntil() { short[] data = {0, 3, 16, 18, 21, 29, 30, -342}; - Assert.assertEquals(1, ContainerUtil.iterateUntil(data, 0, data.length, - ContainerUtil.toIntUnsigned((short) 3))); - Assert.assertEquals(5, ContainerUtil.iterateUntil(data, 0, data.length, - ContainerUtil.toIntUnsigned((short) 28))); - Assert.assertEquals(5, ContainerUtil.iterateUntil(data, 0, data.length, - ContainerUtil.toIntUnsigned((short) 29))); - Assert.assertEquals(7, ContainerUtil.iterateUntil(data, 0, data.length, - ContainerUtil.toIntUnsigned((short) -342))); + Assert.assertEquals(1, + ContainerUtil.iterateUntil(data, 0, data.length, ContainerUtil.toIntUnsigned((short) 3))); + Assert.assertEquals(5, + ContainerUtil.iterateUntil(data, 0, data.length, ContainerUtil.toIntUnsigned((short) 28))); + Assert.assertEquals(5, + ContainerUtil.iterateUntil(data, 0, data.length, ContainerUtil.toIntUnsigned((short) 29))); + Assert.assertEquals(7, + ContainerUtil.iterateUntil(data, 0, data.length, ContainerUtil.toIntUnsigned((short) -342))); } @Test diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestLargeContainersRandomOps.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestLargeContainersRandomOps.java index 089a290fb40..ee9b9a391ba 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestLargeContainersRandomOps.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestLargeContainersRandomOps.java @@ -23,13 +23,13 @@ private static int randValueInRange(final Random rand, final int first, final in } private static Container randomLargeContainer( - final Random rand, final boolean asBitmapContainer, final int min, final int max) { + final Random rand, final boolean asBitmapContainer, final int min, final int max) { return randomLargeContainer(rand, true, asBitmapContainer, min, max); } private static Container randomLargeContainer( - final Random rand, final boolean allowEmpty, final boolean asBitmapContainer, final int min, - final int max) { + final Random rand, final boolean allowEmpty, final boolean asBitmapContainer, final int min, + final int max) { if (max - min < 2) { throw new IllegalArgumentException("min=" + min + ", max=" + max); } @@ -58,8 +58,7 @@ private static Container randomLargeContainer( } private static void testOpLoop( - final Random rand, final BiFunction op, - final boolean inPlace) { + final Random rand, final BiFunction op, final boolean inPlace) { for (int i = 0; i < numRuns; ++i) { testOp(i, rand, op, inPlace); } @@ -68,15 +67,15 @@ private static void testOpLoop( private static final int tdelta = 40; private static void testOp(final int i, final Random rand, - final BiFunction op, final boolean inPlace) { + final BiFunction op, final boolean inPlace) { testOp(i, rand, op, inPlace, 0, tdelta); testOp(i, rand, op, inPlace, 65535 - tdelta, 65535); } private static void testOp( - final int i, final Random rand, - final BiFunction op, final boolean inPlace, - final int min, final int max) { + final int i, final Random rand, + final BiFunction op, final boolean inPlace, + final int min, final int max) { final String m = "i==" + i + " && min==" + min + " && max==" + max; final Container c1 = randomLargeContainer(rand, false, min, max); final boolean asBitmapContainer = rand.nextBoolean(); @@ -89,14 +88,11 @@ private static void testOp( if (!inPlace) { assertTrue(m, result != c1 || result.isShared()); // none of the operations are inplace. } - Container expected = - (inPlace ? op.apply(c1Bitmap.deepCopy(), c2Bitmap) : op.apply(c1Bitmap, c2Bitmap)) - .check(); + Container expected = (inPlace ? op.apply(c1Bitmap.deepCopy(), c2Bitmap) : op.apply(c1Bitmap, c2Bitmap)).check(); assertTrue(m, expected.sameContents(result)); assertTrue(m, expected != c2Bitmap || expected.isShared()); if (!inPlace) { - assertTrue(m, expected != c1Bitmap || expected.isShared()); // none of the operations - // are inplace. + assertTrue(m, expected != c1Bitmap || expected.isShared()); // none of the operations are inplace. } result = (inPlace ? op.apply(c2.deepCopy(), c1) : op.apply(c2, c1)).check(); @@ -104,13 +100,10 @@ private static void testOp( if (!inPlace) { assertTrue(m, result != c2 || result.isShared()); // none of the operations are inplace. } - expected = - (inPlace ? op.apply(c2Bitmap.deepCopy(), c1Bitmap) : op.apply(c2Bitmap, c1Bitmap)) - .check(); + expected = (inPlace ? op.apply(c2Bitmap.deepCopy(), c1Bitmap) : op.apply(c2Bitmap, c1Bitmap)).check(); assertTrue(m, expected != c1Bitmap || expected.isShared()); if (!inPlace) { - assertTrue(m, expected != c2Bitmap || expected.isShared()); // none of the operations - // are inplace. + assertTrue(m, expected != c2Bitmap || expected.isShared()); // none of the operations are inplace. } assertTrue(m, expected.sameContents(result)); @@ -192,7 +185,7 @@ private static void testBooleanOp(final int i, final Random rand, final BooleanO } private static void testBooleanOp( - final int i, final Random rand, final BooleanOps ops, final int min, final int max) { + final int i, final Random rand, final BooleanOps ops, final int min, final int max) { final String m = "i==" + i + " && min==" + min + " && max==" + max; final Container c1 = randomLargeContainer(rand, false, min, max); final boolean asLargeContainer = rand.nextBoolean(); @@ -321,8 +314,7 @@ public Container expectedOp(final Container c, final int start, final int end) { private static void testRangeOp(final RangeOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -381,8 +373,7 @@ public boolean expectedOp(final Container c, final int start, final int end) { private static void testRangeBooleanOp(final RangeBooleanOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -455,8 +446,7 @@ public Container expectedOp(final Container c, final int v) { private static void testValueOp(final ValueOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -499,8 +489,7 @@ public boolean expectedOp(final Container c, final int v) { private static void testValueBooleanOp(final ValueBooleanOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -522,8 +511,7 @@ private static void testValueBooleanOp(final ValueBooleanOps ops) { @Test public void testAppend() { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65534}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65534}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -537,8 +525,7 @@ public void testAppend() { final int first = randValueInRange(rand, rmin, rmax); final int last = randValueInRange(rand, first, rmax); final Container result = c.cowRef().iappend(first, last + 1).check(); - final Container expected = - c.cowRef().toBitmapContainer().iappend(first, last + 1).check(); + final Container expected = c.cowRef().toBitmapContainer().iappend(first, last + 1).check(); assertEquals(m3, expected.getCardinality(), result.getCardinality()); assertTrue(m3, expected.subsetOf(result)); } @@ -618,8 +605,7 @@ public int expectedOp(final Container c, final int v) { private static void testValueIntOp(final ValueIntOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestRangeCardinality.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestRangeCardinality.java index d4a3b61e8c7..23ad53e4b06 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestRangeCardinality.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestRangeCardinality.java @@ -42,7 +42,6 @@ public void testCardinalityInBitmapWordRange() { for (int e : elements) { bc.iset((short) e); } - Assert.assertEquals(expected, - ContainerUtil.cardinalityInBitmapRange(bc.bitmap, begin, end)); + Assert.assertEquals(expected, ContainerUtil.cardinalityInBitmapRange(bc.bitmap, begin, end)); } } diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestRunContainer.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestRunContainer.java index dca0238e071..3c717ea7f65 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestRunContainer.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestRunContainer.java @@ -51,7 +51,7 @@ static int[] generateUniformHash(Random rand, int N, int Max) { } private static void getSetOfContainers(ArrayList set, - ArrayList setb) { + ArrayList setb) { Container r1 = new RunContainer(); r1 = r1.iadd(0, (1 << 16)); Container b1 = new ArrayContainer(); @@ -764,8 +764,7 @@ public void inot11() { container.iset((short) 510); - // second run entirely inside range, third run entirely inside range, 4th run entirely - // outside + // second run entirely inside range, third run entirely inside range, 4th run entirely outside Container result = container.inot(498, 507); assertEquals(7, result.getCardinality()); @@ -1525,8 +1524,7 @@ public void not11() { container.iset((short) 510); - // second run entirely inside range, third run entirely inside range, 4th run entirely - // outside + // second run entirely inside range, third run entirely inside range, 4th run entirely outside Container result = container.not(498, 507); assertEquals(7, result.getCardinality()); @@ -1935,12 +1933,9 @@ public void RunContainerArg_ArrayANDNOT() { @Test public void RunContainerArg_ArrayANDNOT2() { - ArrayContainer ac = - new ArrayContainer(new short[] {0, 2, 4, 8, 10, 15, 16, 48, 50, 61, 80, -2}); - RunContainer rc = - new RunContainer(new short[] {7, 3, 17, 2, 20, 3, 30, 3, 36, 6, 60, 5, -3, 2}, 7); - assertSameContents(new ArrayContainer(new short[] {0, 2, 4, 15, 16, 48, 50, 80}), - ac.andNot(rc)); + ArrayContainer ac = new ArrayContainer(new short[] {0, 2, 4, 8, 10, 15, 16, 48, 50, 61, 80, -2}); + RunContainer rc = new RunContainer(new short[] {7, 3, 17, 2, 20, 3, 30, 3, 36, 6, 60, 5, -3, 2}, 7); + assertSameContents(new ArrayContainer(new short[] {0, 2, 4, 15, 16, 48, 50, 80}), ac.andNot(rc)); } @Test @@ -3060,10 +3055,10 @@ public void testContainsRange() { @Test public void testContainsRange3() { Container rc = new RunContainer().add(1, 100) - .add(300, 300) - .add(400, 500) - .add(502, 600) - .add(700, 10000); + .add(300, 300) + .add(400, 500) + .add(502, 600) + .add(700, 10000); assertFalse(rc.contains(0, 100)); assertFalse(rc.contains(500, 600)); assertFalse(rc.contains(501, 600)); @@ -3111,7 +3106,7 @@ public void testShortRangeIteratorSearch0() { fail("wrong iterator type"); } RunContainerRangeIterator it2 = - new RunContainerRangeIterator((RunContainerRangeIterator) it); + new RunContainerRangeIterator((RunContainerRangeIterator) it); final int ii = i; final ContainerUtil.TargetComparator t = (final int value) -> (ii - value); final boolean b = it2.search(t); @@ -3216,14 +3211,12 @@ public void testReverseIteratorAdvanceRegression0() { public void testSetWithHint() { Container c = new RunContainer(10, 15, 20, 25); final PositionHint hint = new PositionHint(); - c = c.iset((short) 8, hint).iset((short) 9, hint).iset((short) 10, hint) - .iset((short) 11, hint).iset((short) 12, hint).iset((short) 13, hint) - .iset((short) 14, hint).iset((short) 15, hint).iset((short) 16, hint) - .iset((short) 17, hint).iset((short) 18, hint).iset((short) 19, hint) - .iset((short) 20, hint).iset((short) 21, hint).iset((short) 22, hint) - .iset((short) 23, hint).iset((short) 24, hint).iset((short) 25, hint) - .iset((short) 26, hint).iset((short) 27, hint).iset((short) 28, hint) - .iset((short) 29, hint).iset((short) 30, hint); + c = c.iset((short) 8, hint).iset((short) 9, hint).iset((short) 10, hint).iset((short) 11, hint) + .iset((short) 12, hint).iset((short) 13, hint).iset((short) 14, hint).iset((short) 15, hint) + .iset((short) 16, hint).iset((short) 17, hint).iset((short) 18, hint).iset((short) 19, hint) + .iset((short) 20, hint).iset((short) 21, hint).iset((short) 22, hint).iset((short) 23, hint) + .iset((short) 24, hint).iset((short) 25, hint).iset((short) 26, hint).iset((short) 27, hint) + .iset((short) 28, hint).iset((short) 29, hint).iset((short) 30, hint); assertEquals(23, c.getCardinality()); assertTrue(c.contains(8, 23)); } @@ -3233,10 +3226,9 @@ public void testUnsetWithHint() { Container c = new RunContainer(10, 15, 20, 25); c = c.iadd(30, 35); final PositionHint hint = new PositionHint(); - c = c.iunset((short) 10, hint).iunset((short) 12, hint).iunset((short) 14, hint) - .iunset((short) 20, hint).iunset((short) 21, hint).iunset((short) 22, hint) - .iunset((short) 23, hint).iunset((short) 24, hint).iunset((short) 34, hint) - .iset((short) 35, hint); + c = c.iunset((short) 10, hint).iunset((short) 12, hint).iunset((short) 14, hint).iunset((short) 20, hint) + .iunset((short) 21, hint).iunset((short) 22, hint).iunset((short) 23, hint).iunset((short) 24, hint) + .iunset((short) 34, hint).iset((short) 35, hint); assertEquals(7, c.getCardinality()); assertTrue(c.contains(11, 12)); assertTrue(c.contains(13, 14)); @@ -3255,8 +3247,7 @@ public void testSetWithHint2() { c = c.iset((short) v2, hint); c = c.iset((short) v3, hint); assertEquals(4, c.getCardinality()); - final Container expected = - new ArrayContainer(new short[] {(short) v0, (short) v2, (short) v3, (short) v1}); + final Container expected = new ArrayContainer(new short[] {(short) v0, (short) v2, (short) v3, (short) v1}); assertTrue(expected.sameContents(c)); } diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestSingletonContainer.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestSingletonContainer.java index ae70ebc946d..bcfb91eda9f 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestSingletonContainer.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestSingletonContainer.java @@ -46,12 +46,12 @@ public void testShortBatchIterator() { public void testFindRanges() { final SingletonContainer c = new SingletonContainer((short) 10); assertTrue(c.findRanges( - (final int start, final int end) -> { - assertEquals(0, start); - assertEquals(1, end); - }, - c.getShortRangeIterator(0), - 0)); + (final int start, final int end) -> { + assertEquals(0, start); + assertEquals(1, end); + }, + c.getShortRangeIterator(0), + 0)); } @Test @@ -129,8 +129,7 @@ public void testAddRange() { final String m3 = m2 + " && i==" + i; final boolean expectedContains = i == 10 || (first <= i && i <= last); assertEquals(m3, expectedContains, r.contains((short) i)); - final int expectedCardinality = - last - first + 1 + ((last < 10 || 10 < first) ? 1 : 0); + final int expectedCardinality = last - first + 1 + ((last < 10 || 10 < first) ? 1 : 0); assertEquals(m3, expectedCardinality, r.getCardinality()); } if ((first <= 11 && 9 <= last)) { diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestSmallContainersRandomOps.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestSmallContainersRandomOps.java index 4442ec7ae9b..d58fb0d640a 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestSmallContainersRandomOps.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestSmallContainersRandomOps.java @@ -24,7 +24,7 @@ private static int randValueInRange(final Random rand, final int first, final in } private static Container randomSmallContainer( - final Random rand, final boolean asLargeContainer, final int min, final int max) { + final Random rand, final boolean asLargeContainer, final int min, final int max) { return randomSmallContainer(rand, true, asLargeContainer, min, max); } @@ -40,8 +40,7 @@ private static Container randomEmptyLargeContainer(final Random rand) { } private static Container randomSmallContainer( - final Random rand, final boolean allowEmpty, final boolean asLargeContainer, final int min, - final int max) { + final Random rand, final boolean allowEmpty, final boolean asLargeContainer, final int min, final int max) { if (max - min < 2) { throw new IllegalArgumentException("min=" + min + ", max=" + max); } @@ -61,7 +60,7 @@ private static Container randomSmallContainer( if (x < cummulativeProbSingleton) { final int v = min + rand.nextInt(max - min + 1); return asLargeContainer ? randomEmptyLargeContainer(rand).iset((short) v) - : new SingletonContainer((short) v); + : new SingletonContainer((short) v); } if (x < cummulativeProbTwoValues) { final int d = max - min; // d >= 2. @@ -69,20 +68,18 @@ private static Container randomSmallContainer( final short v1 = (short) iv1; final short v2 = (short) randValueInRange(rand, iv1 + 2, max); return asLargeContainer ? randomEmptyLargeContainer(rand).iset(v1).iset(v2) - : new TwoValuesContainer(v1, v2); + : new TwoValuesContainer(v1, v2); } final int first = randValueInRange(rand, min, max - 1); final int last = randValueInRange(rand, first + 1, max); if (asLargeContainer) { return randomEmptyLargeContainer(rand).iadd(first, last + 1); } - return (last == first) ? new SingletonContainer((short) last) - : new SingleRangeContainer(first, last + 1); + return (last == first) ? new SingletonContainer((short) last) : new SingleRangeContainer(first, last + 1); } private static void testOpLoop( - final Random rand, final BiFunction op, - final boolean inPlace) { + final Random rand, final BiFunction op, final boolean inPlace) { for (int i = 0; i < numRuns; ++i) { testOp(i, rand, op, inPlace); } @@ -91,51 +88,43 @@ private static void testOpLoop( private static final int tdelta = 13; private static void testOp(final int i, final Random rand, - final BiFunction op, final boolean inPlace) { + final BiFunction op, final boolean inPlace) { testOp(i, rand, op, inPlace, 0, tdelta); testOp(i, rand, op, inPlace, 65535 - tdelta, 65535); } private static void testOp( - final int i, final Random rand, - final BiFunction op, final boolean inPlace, - final int min, final int max) { + final int i, final Random rand, + final BiFunction op, final boolean inPlace, + final int min, final int max) { final String m = "i==" + i + " && min==" + min + " && max==" + max; final Container c1 = randomSmallContainer(rand, false, min, max); final boolean asLargeContainer = rand.nextBoolean(); final Container c2 = randomSmallContainer(rand, asLargeContainer, min, max); - final Container largec1 = - inPlace ? c1.deepCopy().toLargeContainer() : c1.toLargeContainer(); - final Container largec2 = - inPlace ? c2.deepCopy().toLargeContainer() : c2.toLargeContainer(); + final Container largec1 = inPlace ? c1.deepCopy().toLargeContainer() : c1.toLargeContainer(); + final Container largec2 = inPlace ? c2.deepCopy().toLargeContainer() : c2.toLargeContainer(); Container result = (inPlace ? op.apply(c1.deepCopy(), c2) : op.apply(c1, c2)).check(); assertTrue(m, result != c2 || result.isShared() || result instanceof ImmutableContainer); if (!inPlace) { - assertTrue(m, - result != c1 || result.isShared() || result instanceof ImmutableContainer); + assertTrue(m, result != c1 || result.isShared() || result instanceof ImmutableContainer); } - Container expected = - (inPlace ? op.apply(largec1.deepCopy(), largec2) : op.apply(largec1, largec2)).check(); + Container expected = (inPlace ? op.apply(largec1.deepCopy(), largec2) : op.apply(largec1, largec2)).check(); assertTrue(m, expected != largec2 || expected.isShared()); if (!inPlace) { - assertTrue(m, expected != largec1 || expected.isShared()); // none of the operations are - // inplace. + assertTrue(m, expected != largec1 || expected.isShared()); // none of the operations are inplace. } assertTrue(m, expected.sameContents(result)); result = (inPlace ? op.apply(c2.deepCopy(), c1) : op.apply(c2, c1)).check(); assertTrue(m, result != c1 || result.isShared() || result instanceof ImmutableContainer); if (!inPlace) { - assertTrue(m, - result != c2 || result.isShared() || result instanceof ImmutableContainer); + assertTrue(m, result != c2 || result.isShared() || result instanceof ImmutableContainer); } - expected = - (inPlace ? op.apply(largec2.deepCopy(), largec1) : op.apply(largec2, largec1)).check(); + expected = (inPlace ? op.apply(largec2.deepCopy(), largec1) : op.apply(largec2, largec1)).check(); assertTrue(m, expected != largec1 || expected.isShared()); if (!inPlace) { - assertTrue(m, expected != largec2 || expected.isShared()); // none of the operations are - // inplace. + assertTrue(m, expected != largec2 || expected.isShared()); // none of the operations are inplace. } assertTrue(m, expected.sameContents(result)); } @@ -206,7 +195,7 @@ private static void testBooleanOp(final int i, final Random rand, final BooleanO } private static void testBooleanOp( - final int i, final Random rand, final BooleanOps ops, final int min, final int max) { + final int i, final Random rand, final BooleanOps ops, final int min, final int max) { final String m = "i==" + i + " && min==" + min + " && max==" + max; final Container c1 = randomSmallContainer(rand, false, min, max); final boolean asLargeContainer = rand.nextBoolean(); @@ -335,8 +324,7 @@ public Container expectedOp(final Container c, final int start, final int end) { private static void testRangeOp(final RangeOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -374,8 +362,7 @@ public Container expectedOp(final Container c, final int start, final int end) { private static void testRankRangeOp(final RangeOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -433,8 +420,7 @@ public boolean expectedOp(final Container c, final int start, final int end) { private static void testRangeBooleanOp(final RangeBooleanOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -507,8 +493,7 @@ public Container expectedOp(final Container c, final int v) { private static void testValueOp(final ValueOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -551,8 +536,7 @@ public boolean expectedOp(final Container c, final int v) { private static void testValueBooleanOp(final ValueBooleanOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -574,8 +558,7 @@ private static void testValueBooleanOp(final ValueBooleanOps ops) { @Test public void testAppend() { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65534}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65534}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { @@ -589,8 +572,7 @@ public void testAppend() { final int first = randValueInRange(rand, rmin, rmax); final int last = randValueInRange(rand, first, rmax); final Container result = c.iappend(first, last + 1).check(); - final Container expected = - c.toLargeContainer().iappend(first, last + 1).check(); + final Container expected = c.toLargeContainer().iappend(first, last + 1).check(); assertEquals(m3, expected.getCardinality(), result.getCardinality()); assertTrue(m3, expected.subsetOf(result)); } @@ -670,8 +652,7 @@ public int expectedOp(final Container c, final int v) { private static void testValueIntOp(final ValueIntOps ops) { final Random rand = new Random(seed0); - final int[][] minmaxes = - new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; + final int[][] minmaxes = new int[][] {new int[] {0, tdelta}, new int[] {65535 - tdelta, 65535}}; for (int run = 0; run < numRuns; ++run) { final String m = "run==" + run; for (int[] minmax : minmaxes) { diff --git a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestTwoValuesContainer.java b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestTwoValuesContainer.java index c8612f819a5..14c9af1e17d 100644 --- a/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestTwoValuesContainer.java +++ b/Container/src/test/java/io/deephaven/db/v2/utils/rsp/container/TestTwoValuesContainer.java @@ -214,9 +214,9 @@ public void accept(final int begin, final int end) { } }; assertFalse(tv.findRanges( - consumer, - tv.getShortRangeIterator(0), - 2)); + consumer, + tv.getShortRangeIterator(0), + 2)); assertEquals(0, ranges[0]); assertEquals(2, ranges[1]); } @@ -319,8 +319,7 @@ public void testOrCaseRegression0() { final TwoValuesContainer tv1 = new TwoValuesContainer((short) v2, (short) v3); final Container c = tv0.or(tv1); assertEquals(4, c.getCardinality()); - final Container expected = - new ArrayContainer(new short[] {(short) v0, (short) v2, (short) v3, (short) v1}); + final Container expected = new ArrayContainer(new short[] {(short) v0, (short) v2, (short) v3, (short) v1}); assertTrue(expected.sameContents(c)); } diff --git a/DB-test/src/test/java/io/deephaven/db/tables/select/PythonMatchFilterTest.java b/DB-test/src/test/java/io/deephaven/db/tables/select/PythonMatchFilterTest.java index 99db8a123eb..e8c85c9fc44 100644 --- a/DB-test/src/test/java/io/deephaven/db/tables/select/PythonMatchFilterTest.java +++ b/DB-test/src/test/java/io/deephaven/db/tables/select/PythonMatchFilterTest.java @@ -31,8 +31,7 @@ public class PythonMatchFilterTest extends PythonTest { public void setUp() { if (ProcessEnvironment.tryGet() == null) { ProcessEnvironment.basicInteractiveProcessInitialization(Configuration.getInstance(), - PythonMatchFilterTest.class.getCanonicalName(), - new StreamLoggerImpl(System.out, LogLevel.INFO)); + PythonMatchFilterTest.class.getCanonicalName(), new StreamLoggerImpl(System.out, LogLevel.INFO)); } } @@ -45,8 +44,8 @@ public void testIntMatch() { SelectFilter filter = SelectFilterFactory.getExpression("ival in iii"); assertEquals(MatchFilter.class, filter.getClass()); - TableDefinition tableDef = new TableDefinition(Collections.singletonList(int.class), - Collections.singletonList("ival")); + TableDefinition tableDef = + new TableDefinition(Collections.singletonList(int.class), Collections.singletonList("ival")); filter.init(tableDef); Object[] values = ((MatchFilter) filter).getValues(); // System.out.println(Arrays.toString(values)); @@ -64,8 +63,8 @@ public void testStrMatch() { SelectFilter filter = SelectFilterFactory.getExpression("sval in ss"); assertEquals(MatchFilter.class, filter.getClass()); - TableDefinition tableDef = new TableDefinition(Collections.singletonList(String.class), - Collections.singletonList("sval")); + TableDefinition tableDef = + new TableDefinition(Collections.singletonList(String.class), Collections.singletonList("sval")); filter.init(tableDef); Object[] values = ((MatchFilter) filter).getValues(); // System.out.println(Arrays.toString(values)); diff --git a/DB-test/src/test/java/io/deephaven/db/util/TestWorkerPythonEnvironment.java b/DB-test/src/test/java/io/deephaven/db/util/TestWorkerPythonEnvironment.java index bb77b97e27e..888ff663f79 100644 --- a/DB-test/src/test/java/io/deephaven/db/util/TestWorkerPythonEnvironment.java +++ b/DB-test/src/test/java/io/deephaven/db/util/TestWorkerPythonEnvironment.java @@ -27,8 +27,8 @@ public void setUp() throws Exception { super.setUp(); if (ProcessEnvironment.tryGet() == null) { ProcessEnvironment.basicInteractiveProcessInitialization(Configuration.getInstance(), - TestWorkerPythonEnvironment.class.getCanonicalName(), - new StreamLoggerImpl(System.out, LogLevel.INFO)); + TestWorkerPythonEnvironment.class.getCanonicalName(), + new StreamLoggerImpl(System.out, LogLevel.INFO)); } LiveTableMonitor.DEFAULT.enableUnitTestMode(); LiveTableMonitor.DEFAULT.resetForUnitTests(false); @@ -53,8 +53,7 @@ public void testTimeTable() throws IOException { } public void testEmptyTable() throws IOException { - WorkerPythonEnvironment.DEFAULT - .eval("TableTools = jpy.get_type(\"io.deephaven.db.tables.utils.TableTools\")"); + WorkerPythonEnvironment.DEFAULT.eval("TableTools = jpy.get_type(\"io.deephaven.db.tables.utils.TableTools\")"); WorkerPythonEnvironment.DEFAULT.eval("et = TableTools.emptyTable(2).update(\"A=k\")"); Object result = WorkerPythonEnvironment.DEFAULT.getValue("et"); assertTrue(result instanceof Table); @@ -63,10 +62,8 @@ public void testEmptyTable() throws IOException { } public void testUpdateList() throws IOException { - WorkerPythonEnvironment.DEFAULT - .eval("TableTools = jpy.get_type(\"io.deephaven.db.tables.utils.TableTools\")"); - WorkerPythonEnvironment.DEFAULT - .eval("et = TableTools.emptyTable(2).update([\"A=k\", \"B=i*2\"])"); + WorkerPythonEnvironment.DEFAULT.eval("TableTools = jpy.get_type(\"io.deephaven.db.tables.utils.TableTools\")"); + WorkerPythonEnvironment.DEFAULT.eval("et = TableTools.emptyTable(2).update([\"A=k\", \"B=i*2\"])"); Object result = WorkerPythonEnvironment.DEFAULT.getValue("et"); assertTrue(result instanceof Table); Table et = (Table) result; @@ -74,10 +71,8 @@ public void testUpdateList() throws IOException { } public void testUpdateVarArgs() throws IOException { - WorkerPythonEnvironment.DEFAULT - .eval("TableTools = jpy.get_type(\"io.deephaven.db.tables.utils.TableTools\")"); - WorkerPythonEnvironment.DEFAULT - .eval("et = TableTools.emptyTable(2).update(\"A=k\", \"B=i*2\")"); + WorkerPythonEnvironment.DEFAULT.eval("TableTools = jpy.get_type(\"io.deephaven.db.tables.utils.TableTools\")"); + WorkerPythonEnvironment.DEFAULT.eval("et = TableTools.emptyTable(2).update(\"A=k\", \"B=i*2\")"); Object result = WorkerPythonEnvironment.DEFAULT.getValue("et"); assertTrue(result instanceof Table); Table et = (Table) result; diff --git a/DB-test/src/test/java/io/deephaven/db/v2/select/TestConditionFilter.java b/DB-test/src/test/java/io/deephaven/db/v2/select/TestConditionFilter.java index d9561e24aa0..b7b6fa6f3f6 100644 --- a/DB-test/src/test/java/io/deephaven/db/v2/select/TestConditionFilter.java +++ b/DB-test/src/test/java/io/deephaven/db/v2/select/TestConditionFilter.java @@ -39,14 +39,12 @@ public class TestConditionFilter extends PythonTest { static { if (ProcessEnvironment.tryGet() == null) { ProcessEnvironment.basicInteractiveProcessInitialization(Configuration.getInstance(), - TestConditionFilter.class.getCanonicalName(), - new StreamLoggerImpl(System.out, LogLevel.INFO)); + TestConditionFilter.class.getCanonicalName(), new StreamLoggerImpl(System.out, LogLevel.INFO)); } } - private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = - Configuration.getInstance().getBooleanForClassWithDefault(TestConditionFilter.class, - "CompilerTools.logEnabled", false); + private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = Configuration.getInstance() + .getBooleanForClassWithDefault(TestConditionFilter.class, "CompilerTools.logEnabled", false); private final Table testDataTable; private boolean compilerToolsLogEnabledInitial = false; @@ -62,8 +60,7 @@ public TestConditionFilter() { public void setUp() throws Exception { if (ProcessEnvironment.tryGet() == null) { ProcessEnvironment.basicInteractiveProcessInitialization(Configuration.getInstance(), - PythonMatchFilterTest.class.getCanonicalName(), - new StreamLoggerImpl(System.out, LogLevel.INFO)); + PythonMatchFilterTest.class.getCanonicalName(), new StreamLoggerImpl(System.out, LogLevel.INFO)); } compilerToolsLogEnabledInitial = CompilerTools.setLogEnabled(ENABLE_COMPILER_TOOLS_LOGGING); } @@ -105,7 +102,7 @@ public void testObjectConstruction() { public void testRuntimeException() { checkExpectingEvaluationException("((Boolean) null)", "NullPointerException"); checkExpectingEvaluationException("Integer.parseInt(\"this is not an integer\") != null", - "NumberFormatException"); + "NumberFormatException"); } @Test @@ -117,9 +114,8 @@ public void testBadExpressionType() { @Test public void testMiscCompilationExceptions() { checkExpectingCompilationException("nonExistentVariableOrClass", - "Cannot find variable or class nonExistentVariableOrClass"); - checkExpectingCompilationException("Integer.noSuchMethod()", - "Cannot find method noSuchMethod()"); + "Cannot find variable or class nonExistentVariableOrClass"); + checkExpectingCompilationException("Integer.noSuchMethod()", "Cannot find method noSuchMethod()"); } @Test @@ -141,128 +137,128 @@ public void testComparison() { { // LESS THAN expression = "myShortObj < ShortCol"; test = (colValues) -> DBLanguageFunctionUtil.less( - DBLanguageFunctionUtil.shortCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.shortCast(colValues.get("ShortCol"))); + DBLanguageFunctionUtil.shortCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.shortCast(colValues.get("ShortCol"))); check(expression, test, true, false); expression = "myIntObj < IntCol"; test = (colValues) -> DBLanguageFunctionUtil.less( - QUERYSCOPE_OBJ_BASE_VALUE, - DBLanguageFunctionUtil.intCast(colValues.get("IntCol"))); + QUERYSCOPE_OBJ_BASE_VALUE, + DBLanguageFunctionUtil.intCast(colValues.get("IntCol"))); check(expression, test, true); expression = "myLongObj < LongCol"; test = (colValues) -> DBLanguageFunctionUtil.less( - DBLanguageFunctionUtil.longCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.longCast(colValues.get("LongCol"))); + DBLanguageFunctionUtil.longCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.longCast(colValues.get("LongCol"))); check(expression, test, true); expression = "myFloatObj < FloatCol"; test = (colValues) -> DBLanguageFunctionUtil.less( - DBLanguageFunctionUtil.floatCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.floatCast(colValues.get("FloatCol"))); + DBLanguageFunctionUtil.floatCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.floatCast(colValues.get("FloatCol"))); check(expression, test, true); expression = "myDoubleObj < DoubleCol"; test = (colValues) -> DBLanguageFunctionUtil.less( - DBLanguageFunctionUtil.doubleCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.doubleCast(colValues.get("DoubleCol"))); + DBLanguageFunctionUtil.doubleCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.doubleCast(colValues.get("DoubleCol"))); check(expression, test, true); } { // GREATER THAN expression = "myShortObj > ShortCol"; test = (colValues) -> DBLanguageFunctionUtil.greater( - DBLanguageFunctionUtil.shortCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.shortCast(colValues.get("ShortCol"))); + DBLanguageFunctionUtil.shortCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.shortCast(colValues.get("ShortCol"))); check(expression, test, true); expression = "myIntObj > IntCol"; test = (colValues) -> DBLanguageFunctionUtil.greater( - QUERYSCOPE_OBJ_BASE_VALUE, - DBLanguageFunctionUtil.intCast(colValues.get("IntCol"))); + QUERYSCOPE_OBJ_BASE_VALUE, + DBLanguageFunctionUtil.intCast(colValues.get("IntCol"))); check(expression, test, true); expression = "myLongObj > LongCol"; test = (colValues) -> DBLanguageFunctionUtil.greater( - DBLanguageFunctionUtil.longCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.longCast(colValues.get("LongCol"))); + DBLanguageFunctionUtil.longCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.longCast(colValues.get("LongCol"))); check(expression, test, true); expression = "myFloatObj > FloatCol"; test = (colValues) -> DBLanguageFunctionUtil.greater( - DBLanguageFunctionUtil.floatCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.floatCast(colValues.get("FloatCol"))); + DBLanguageFunctionUtil.floatCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.floatCast(colValues.get("FloatCol"))); check(expression, test, true); expression = "myDoubleObj > DoubleCol"; test = (colValues) -> DBLanguageFunctionUtil.greater( - DBLanguageFunctionUtil.doubleCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.doubleCast(colValues.get("DoubleCol"))); + DBLanguageFunctionUtil.doubleCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.doubleCast(colValues.get("DoubleCol"))); check(expression, test, true); } { // EQUAL expression = "myShortObj == ShortCol"; test = (colValues) -> DBLanguageFunctionUtil.eq( - DBLanguageFunctionUtil.shortCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.shortCast(colValues.get("ShortCol"))); + DBLanguageFunctionUtil.shortCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.shortCast(colValues.get("ShortCol"))); check(expression, test, true); expression = "myIntObj == IntCol"; test = (colValues) -> DBLanguageFunctionUtil.eq( - QUERYSCOPE_OBJ_BASE_VALUE, - DBLanguageFunctionUtil.intCast(colValues.get("IntCol"))); + QUERYSCOPE_OBJ_BASE_VALUE, + DBLanguageFunctionUtil.intCast(colValues.get("IntCol"))); check(expression, test, true); expression = "myLongObj == LongCol"; test = (colValues) -> DBLanguageFunctionUtil.eq( - DBLanguageFunctionUtil.longCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.longCast(colValues.get("LongCol"))); + DBLanguageFunctionUtil.longCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.longCast(colValues.get("LongCol"))); check(expression, test, true); expression = "myFloatObj == FloatCol"; test = (colValues) -> DBLanguageFunctionUtil.eq( - DBLanguageFunctionUtil.floatCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.floatCast(colValues.get("FloatCol"))); + DBLanguageFunctionUtil.floatCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.floatCast(colValues.get("FloatCol"))); check(expression, test, true); expression = "myDoubleObj == DoubleCol"; test = (colValues) -> DBLanguageFunctionUtil.eq( - DBLanguageFunctionUtil.doubleCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.doubleCast(colValues.get("DoubleCol"))); + DBLanguageFunctionUtil.doubleCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.doubleCast(colValues.get("DoubleCol"))); check(expression, test, true); } { // NOT EQUAL expression = "myShortObj != ShortCol"; test = (colValues) -> !DBLanguageFunctionUtil.eq( - DBLanguageFunctionUtil.shortCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.shortCast(colValues.get("ShortCol"))); + DBLanguageFunctionUtil.shortCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.shortCast(colValues.get("ShortCol"))); check(expression, test, true); expression = "myIntObj != IntCol"; test = (colValues) -> !DBLanguageFunctionUtil.eq( - QUERYSCOPE_OBJ_BASE_VALUE, - DBLanguageFunctionUtil.intCast(colValues.get("IntCol"))); + QUERYSCOPE_OBJ_BASE_VALUE, + DBLanguageFunctionUtil.intCast(colValues.get("IntCol"))); check(expression, test, true); expression = "myLongObj != LongCol"; test = (colValues) -> !DBLanguageFunctionUtil.eq( - DBLanguageFunctionUtil.longCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.longCast(colValues.get("LongCol"))); + DBLanguageFunctionUtil.longCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.longCast(colValues.get("LongCol"))); check(expression, test, true); expression = "myFloatObj != FloatCol"; test = (colValues) -> !DBLanguageFunctionUtil.eq( - DBLanguageFunctionUtil.floatCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.floatCast(colValues.get("FloatCol"))); + DBLanguageFunctionUtil.floatCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.floatCast(colValues.get("FloatCol"))); check(expression, test, true); expression = "myDoubleObj != DoubleCol"; test = (colValues) -> !DBLanguageFunctionUtil.eq( - DBLanguageFunctionUtil.doubleCast(QUERYSCOPE_OBJ_BASE_VALUE), - DBLanguageFunctionUtil.doubleCast(colValues.get("DoubleCol"))); + DBLanguageFunctionUtil.doubleCast(QUERYSCOPE_OBJ_BASE_VALUE), + DBLanguageFunctionUtil.doubleCast(colValues.get("DoubleCol"))); check(expression, test, true); } } @@ -280,10 +276,10 @@ public void testLoadNumpyTwice() { @Test public void testPython() { PyObject.executeCode("from numba.npyufunc import vectorize\n" + - "@vectorize\n" + - "def testf(a, b, c):\n" + - " return a + b < c\n" + - "\n", PyInputMode.SCRIPT); + "@vectorize\n" + + "def testf(a, b, c):\n" + + " return a + b < c\n" + + "\n", PyInputMode.SCRIPT); check("testf(IntCol,IntCol*2,IntCol+2)", m -> { Integer ic = (Integer) m.get("IntCol"); @@ -325,8 +321,8 @@ public void testIIIK() { /** - * Ensure that a {@link ConditionFilter} with the given {@code expression} - * {@link #testDataTable} filtered by a ConditionF + * Ensure that a {@link ConditionFilter} with the given {@code expression} {@link #testDataTable} filtered by a + * ConditionF * * @param expression the conditional expression to check * @param testPredicate the predicate over a map of column values to compare with the expression @@ -335,26 +331,25 @@ private void check(String expression, Predicate> testPredica check(expression, testPredicate, false, true); } - private void check(String expression, Predicate> testPredicate, - boolean testPython) { + private void check(String expression, Predicate> testPredicate, boolean testPython) { check(expression, testPredicate, testPython, true); } - private void check(String expression, Predicate> testPredicate, - boolean testPython, boolean testNative) { + private void check(String expression, Predicate> testPredicate, boolean testPython, + boolean testNative) { final Index.SequentialBuilder keepBuilder = Index.FACTORY.getSequentialBuilder(); final Index.SequentialBuilder dropBuilder = Index.FACTORY.getSequentialBuilder(); - final Map sourcesMap = testDataTable - .updateView("actualI = i", "actualII = ii", "actualK = k").getColumnSourceMap(); + final Map sourcesMap = + testDataTable.updateView("actualI = i", "actualII = ii", "actualK = k").getColumnSourceMap(); for (final Index.Iterator it = testDataTable.getIndex().iterator(); it.hasNext();) { final long idx = it.nextLong(); final Map rowMap = new HashMap<>(sourcesMap.size()); for (Map.Entry entry : sourcesMap.entrySet()) { rowMap.put( - entry.getKey(), - entry.getValue().get(idx)); + entry.getKey(), + entry.getValue().get(idx)); } if (testPredicate.test(rowMap)) { keepBuilder.appendKey(idx); @@ -374,7 +369,7 @@ private void check(String expression, Predicate> testPredica try { if (pythonScope == null) { pythonScope = new PythonDeephavenSession(new PythonScopeJpyImpl( - getMainGlobals().asDict())).newQueryScope(); + getMainGlobals().asDict())).newQueryScope(); QueryScope.setScope(pythonScope); } for (Param param : currentScope.getParams(currentScope.getParamNames())) { @@ -389,17 +384,13 @@ private void check(String expression, Predicate> testPredica } - private void validate(String expression, Index keepIndex, Index dropIndex, - FormulaParserConfiguration parser) { + private void validate(String expression, Index keepIndex, Index dropIndex, FormulaParserConfiguration parser) { final Index filteredIndex = initCheck(expression, parser); - Require.eq(keepIndex.size(), "keepIndex.size()", filteredIndex.size(), - "filteredIndex.size()"); - Require.eq(keepIndex.intersect(filteredIndex).size(), - "keepIndex.intersect(filteredIndex).size()", filteredIndex.size(), - "filteredIndex.size()"); - Require.eqZero(dropIndex.intersect(filteredIndex).size(), - "dropIndex.intersect(filteredIndex).size()"); + Require.eq(keepIndex.size(), "keepIndex.size()", filteredIndex.size(), "filteredIndex.size()"); + Require.eq(keepIndex.intersect(filteredIndex).size(), "keepIndex.intersect(filteredIndex).size()", + filteredIndex.size(), "filteredIndex.size()"); + Require.eqZero(dropIndex.intersect(filteredIndex).size(), "dropIndex.intersect(filteredIndex).size()"); } @@ -409,33 +400,26 @@ private void checkExpectingEvaluationException(String expression, String expecte fail("Should have thrown an exception"); } catch (FormulaEvaluationException ex) { if (!ex.getMessage().contains(expectedCauseMessage) - && !ex.getCause().getMessage().contains(expectedCauseMessage)) // check the cause, - // since all - // exceptions during - // filter evaluation - // are caught + && !ex.getCause().getMessage().contains(expectedCauseMessage)) // check the cause, since all + // exceptions during filter + // evaluation are caught { - fail("Useless exception message!\nOriginal exception:\n" - + ExceptionUtils.getStackTrace(ex)); + fail("Useless exception message!\nOriginal exception:\n" + ExceptionUtils.getStackTrace(ex)); } } } - private void checkExpectingCompilationException(String expression, - String expectedCauseMessage) { + private void checkExpectingCompilationException(String expression, String expectedCauseMessage) { try { initCheck(expression, FormulaParserConfiguration.Deephaven); fail("Should have thrown an exception"); } catch (FormulaCompilationException ex) { if (!ex.getMessage().contains(expectedCauseMessage) - && !ex.getCause().getMessage().contains(expectedCauseMessage)) // check the cause, - // since all - // exceptions during - // filter init are - // caught + && !ex.getCause().getMessage().contains(expectedCauseMessage)) // check the cause, since all + // exceptions during filter init are + // caught { - fail("Useless exception message!\nOriginal exception:\n" - + ExceptionUtils.getStackTrace(ex)); + fail("Useless exception message!\nOriginal exception:\n" + ExceptionUtils.getStackTrace(ex)); } } @@ -443,11 +427,9 @@ private void checkExpectingCompilationException(String expression, private Index initCheck(String expression, FormulaParserConfiguration parser) { - final SelectFilter conditionFilter = - ConditionFilter.createConditionFilter(expression, parser); + final SelectFilter conditionFilter = ConditionFilter.createConditionFilter(expression, parser); conditionFilter.init(testDataTable.getDefinition()); - return conditionFilter.filter(testDataTable.getIndex().clone(), testDataTable.getIndex(), - testDataTable, false); + return conditionFilter.filter(testDataTable.getIndex().clone(), testDataTable.getIndex(), testDataTable, false); } } diff --git a/DB/benchmark/com/illumon/iris/db/v2/utils/SmallIndexCreation.java b/DB/benchmark/com/illumon/iris/db/v2/utils/SmallIndexCreation.java index 94bccc49613..da6666408b1 100644 --- a/DB/benchmark/com/illumon/iris/db/v2/utils/SmallIndexCreation.java +++ b/DB/benchmark/com/illumon/iris/db/v2/utils/SmallIndexCreation.java @@ -40,7 +40,7 @@ public class SmallIndexCreation { private static final TIntHashSet workSet = new TIntHashSet(); private static void populateRandomBlockValues( - final Random random, final int[] blockValues, final int valuesPerBlock) { + final Random random, final int[] blockValues, final int valuesPerBlock) { workSet.clear(); while (workSet.size() < valuesPerBlock) { workSet.add(random.nextInt(RspArray.BLOCK_SIZE)); @@ -142,8 +142,8 @@ public void fillAnArrayOfSameSizeOneElementAtATime(Blackhole bh) { public static void main(String[] args) throws RunnerException { Options opt = new OptionsBuilder() - .include(SmallIndexCreation.class.getSimpleName()) - .build(); + .include(SmallIndexCreation.class.getSimpleName()) + .build(); new Runner(opt).run(); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/AjBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/AjBenchmark.java index b018cfaf0ad..691584b9495 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/AjBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/AjBenchmark.java @@ -66,18 +66,17 @@ public void setupEnv(BenchmarkParams params) { case "Historical": case "HistoricalOrdered": rightBuilder = BenchmarkTools.persistentTableBuilder("Carlos", rightSize) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); leftBuilder = BenchmarkTools.persistentTableBuilder("Karl", leftSize) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": rightBuilder = BenchmarkTools.persistentTableBuilder("Carlos", rightSize); leftBuilder = BenchmarkTools.persistentTableBuilder("Karl", leftSize); if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; @@ -85,14 +84,11 @@ public void setupEnv(BenchmarkParams params) { throw new IllegalStateException("Table type must be Historical or Intraday"); } - rightBuilder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); - leftBuilder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); + rightBuilder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); + leftBuilder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); if (buckets > 0) { - final ColumnGenerator intJoinKey = - BenchmarkTools.numberCol("JInt", int.class, 0, buckets); + final ColumnGenerator intJoinKey = BenchmarkTools.numberCol("JInt", int.class, 0, buckets); rightBuilder.addColumn(intJoinKey); leftBuilder.addColumn(intJoinKey); @@ -101,13 +97,12 @@ public void setupEnv(BenchmarkParams params) { joinKeyName = "InvalidKeyWithoutBuckets"; } - final ColumnGenerator rightStampColumn = - BenchmarkTools.seqNumberCol("RightStamp", int.class, 0, 10); + final ColumnGenerator rightStampColumn = BenchmarkTools.seqNumberCol("RightStamp", int.class, 0, 10); final ColumnGenerator leftStampColumn; if (tableType.equals("HistoricalOrdered")) { - leftStampColumn = new FuzzyNumColumnGenerator<>(int.class, "LeftStamp", 0, - (rightSize * 10) / (double) leftSize, rightSize * 10, 0.01, - SequentialNumColumnGenerator.Mode.NoLimit); + leftStampColumn = + new FuzzyNumColumnGenerator<>(int.class, "LeftStamp", 0, (rightSize * 10) / (double) leftSize, + rightSize * 10, 0.01, SequentialNumColumnGenerator.Mode.NoLimit); } else { leftStampColumn = BenchmarkTools.numberCol("LeftStamp", int.class, 0, rightSize * 10); } @@ -118,17 +113,16 @@ public void setupEnv(BenchmarkParams params) { } bmRight = rightBuilder - .addColumn(rightStampColumn) - .addColumn(BenchmarkTools.numberCol("RightSentinel", long.class)) - .build(); + .addColumn(rightStampColumn) + .addColumn(BenchmarkTools.numberCol("RightSentinel", long.class)) + .build(); bmLeft = leftBuilder - .addColumn(leftStampColumn) - .addColumn(BenchmarkTools.numberCol("LeftSentinel", long.class)) - .build(); + .addColumn(leftStampColumn) + .addColumn(BenchmarkTools.numberCol("LeftSentinel", long.class)) + .build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); rightTable = bmRight.getTable().coalesce().dropColumns("PartCol"); leftTable = bmLeft.getTable().coalesce().dropColumns("PartCol"); @@ -172,8 +166,8 @@ public Table ajStatic(Blackhole bh) { if (buckets == 0) { throw new UnsupportedOperationException("Buckets must be positive!"); } - final Table result = LiveTableMonitor.DEFAULT.sharedLock().computeLocked( - () -> leftTable.aj(rightTable, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")); + final Table result = LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> leftTable.aj(rightTable, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")); return doFingerPrint(result, bh); } @@ -183,9 +177,9 @@ public Table ajLeftIncremental(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked( - () -> lt.aj(rightTable, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), - leftTable); + (lt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> lt.aj(rightTable, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), + leftTable); return doFingerPrint(result, bh); } @@ -194,13 +188,10 @@ public Table ajLeftIncrementalSmallSteps(Blackhole bh) { if (buckets == 0) { throw new UnsupportedOperationException("Buckets must be positive!"); } - final Table result = - IncrementalBenchmark - .incrementalBenchmark( - (lt) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> lt.aj(rightTable, - joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), - leftTable, 100); + final Table result = IncrementalBenchmark.incrementalBenchmark( + (lt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> lt.aj(rightTable, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), + leftTable, 100); return doFingerPrint(result, bh); } @@ -209,13 +200,10 @@ public Table ajLeftIncrementalTinySteps(Blackhole bh) { if (buckets == 0) { throw new UnsupportedOperationException("Buckets must be positive!"); } - final Table result = - IncrementalBenchmark - .incrementalBenchmark( - (lt) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> lt.aj(rightTable, - joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), - leftTable, 1000); + final Table result = IncrementalBenchmark.incrementalBenchmark( + (lt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> lt.aj(rightTable, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), + leftTable, 1000); return doFingerPrint(result, bh); } @@ -225,9 +213,9 @@ public Table ajRightIncremental(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (rt) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked( - () -> leftTable.aj(rt, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), - rightTable); + (rt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> leftTable.aj(rt, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), + rightTable); return doFingerPrint(result, bh); } @@ -237,7 +225,7 @@ public Table ajZkStatic(Blackhole bh) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } final Table result = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> leftTable.aj(rightTable, "LeftStamp=RightStamp", "RightSentinel")); + .computeLocked(() -> leftTable.aj(rightTable, "LeftStamp=RightStamp", "RightSentinel")); return doFingerPrint(result, bh); } @@ -246,10 +234,11 @@ public Table ajZkLeftIncremental(Blackhole bh) { if (buckets != 0) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } - final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> lt.aj(rightTable, "LeftStamp=RightStamp", "RightSentinel")), - leftTable); + final Table result = + IncrementalBenchmark.incrementalBenchmark( + (lt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> lt.aj(rightTable, "LeftStamp=RightStamp", "RightSentinel")), + leftTable); return doFingerPrint(result, bh); } @@ -258,10 +247,11 @@ public Table ajZkRightIncremental(Blackhole bh) { if (buckets != 0) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } - final Table result = IncrementalBenchmark.incrementalBenchmark( - (rt) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> leftTable.aj(rt, "LeftStamp=RightStamp", "RightSentinel")), - rightTable); + final Table result = + IncrementalBenchmark.incrementalBenchmark( + (rt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> leftTable.aj(rt, "LeftStamp=RightStamp", "RightSentinel")), + rightTable); return doFingerPrint(result, bh); } @@ -271,9 +261,9 @@ public Table ajZkIncremental(Blackhole bh) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> lt.aj(rt, "LeftStamp=RightStamp", "RightSentinel")), - leftTable, rightTable); + (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> lt.aj(rt, "LeftStamp=RightStamp", "RightSentinel")), + leftTable, rightTable); return doFingerPrint(result, bh); } @@ -283,9 +273,9 @@ public Table ajZkIncrementalStartup(Blackhole bh) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> lt.aj(rt, "LeftStamp=RightStamp", "RightSentinel")), - leftTable, rightTable, 0.95, 1); + (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> lt.aj(rt, "LeftStamp=RightStamp", "RightSentinel")), + leftTable, rightTable, 0.95, 1); return doFingerPrint(result, bh); } @@ -295,9 +285,9 @@ public Table ajZkIncrementalSmallSteps(Blackhole bh) { throw new UnsupportedOperationException("Zero key should have zero buckets!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> lt.aj(rt, "LeftStamp=RightStamp", "RightSentinel")), - leftTable, rightTable, 0.1, 100); + (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> lt.aj(rt, "LeftStamp=RightStamp", "RightSentinel")), + leftTable, rightTable, 0.1, 100); return doFingerPrint(result, bh); } @@ -308,9 +298,9 @@ public Table ajIncrementalSmallSteps(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked( - () -> lt.aj(rt, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), - leftTable, rightTable, 0.1, 100); + (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> lt.aj(rt, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), + leftTable, rightTable, 0.1, 100); return doFingerPrint(result, bh); } @@ -320,9 +310,9 @@ public Table ajIncremental(Blackhole bh) { throw new UnsupportedOperationException("Buckets must be positive!"); } final Table result = IncrementalBenchmark.incrementalBenchmark( - (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked( - () -> lt.aj(rt, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), - leftTable, rightTable); + (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> lt.aj(rt, joinKeyName + ",LeftStamp=RightStamp", "RightSentinel")), + leftTable, rightTable); return doFingerPrint(result, bh); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/ByBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/ByBenchmark.java index 9bd8ea18473..00a7fd2b134 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/ByBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/ByBenchmark.java @@ -62,14 +62,13 @@ public void setupEnv(BenchmarkParams params) { switch (tableType) { case "Historical": builder = BenchmarkTools.persistentTableBuilder("Karl", size) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("Karl", size); if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; @@ -77,14 +76,12 @@ public void setupEnv(BenchmarkParams params) { throw new IllegalStateException("Table type must be Historical or Intraday"); } - builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); + builder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); - final EnumStringColumnGenerator stringKey = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("KeyString", keyCount, 6, 6, - 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); - final ColumnGenerator intKey = BenchmarkTools.seqNumberCol("KeyInt", int.class, 0, 1, - keyCount, SequentialNumColumnGenerator.Mode.RollAtLimit); + final EnumStringColumnGenerator stringKey = (EnumStringColumnGenerator) BenchmarkTools.stringCol("KeyString", + keyCount, 6, 6, 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); + final ColumnGenerator intKey = BenchmarkTools.seqNumberCol("KeyInt", int.class, 0, 1, keyCount, + SequentialNumColumnGenerator.Mode.RollAtLimit); System.out.println("Key type: " + keyType); switch (keyType) { @@ -101,8 +98,7 @@ public void setupEnv(BenchmarkParams params) { builder.addColumn(intKey); keyName = stringKey.getName() + "," + intKey.getName(); if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; default: @@ -114,11 +110,10 @@ public void setupEnv(BenchmarkParams params) { } final BenchmarkTable bmt = builder - .addColumn(BenchmarkTools.numberCol("Sentinel", long.class)) - .build(); + .addColumn(BenchmarkTools.numberCol("Sentinel", long.class)) + .build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); table = bmt.getTable().coalesce().dropColumns("PartCol"); @@ -155,34 +150,33 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table byStatic(@NotNull final Blackhole bh) { - final Table result = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> table.by(keyName.split("[, ]+"))); + final Table result = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.by(keyName.split("[, ]+"))); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table byIncremental(@NotNull final Blackhole bh) { - final Table result = - IncrementalBenchmark.incrementalBenchmark((t) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> t.by(keyName.split("[, ]+"))), table); + final Table result = IncrementalBenchmark.incrementalBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.by(keyName.split("[, ]+"))), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table byExternalStatic(@NotNull final Blackhole bh) { - final TableMap result = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> table.byExternal(keyName.split("[, ]+"))); + final TableMap result = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.byExternal(keyName.split("[, ]+"))); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table byExternalIncremental(@NotNull final Blackhole bh) { - final TableMap result = - IncrementalBenchmark.incrementalBenchmark((t) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> t.byExternal(keyName.split("[, ]+"))), table); + final TableMap result = IncrementalBenchmark.incrementalBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.byExternal(keyName.split("[, ]+"))), + table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/ConditionFilterMultipleColumnsBench.java b/DB/benchmark/io/deephaven/benchmark/db/ConditionFilterMultipleColumnsBench.java index 55ea34e5b63..64e7267ad81 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/ConditionFilterMultipleColumnsBench.java +++ b/DB/benchmark/io/deephaven/benchmark/db/ConditionFilterMultipleColumnsBench.java @@ -50,13 +50,12 @@ public void setupEnv(final BenchmarkParams params) { } LiveTableMonitor.DEFAULT.enableUnitTestMode(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); final BenchmarkTableBuilder builder; final String tPartCol = "TPartCol"; builder = BenchmarkTools.persistentTableBuilder("T", tableSize); builder.setSeed(0xDEADB00F) - .addColumn(BenchmarkTools.stringCol(tPartCol, 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol(tPartCol, 4, 5, 7, 0xFEEDBEEF)); tCols = new String[2 + nFilterCols + nAdditionalCols]; int nT1Cols = 0; tCols[nT1Cols++] = tPartCol; @@ -84,7 +83,7 @@ public void setupEnv(final BenchmarkParams params) { final Table t = bmTable.getTable(); if (doSelect) { inputTable = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> t.select(tCols).sort(sortCol).coalesce()); + () -> t.select(tCols).sort(sortCol).coalesce()); } else { inputTable = t.sort(sortCol).coalesce(); @@ -106,7 +105,7 @@ public void finishTrial() { public void setupInvocation() { final long sizePerStep = Math.max(inputTable.size() / steps, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table inputReleased = inputTable.where(incrementalReleaseFilter); final SelectFilter filter = ConditionFilter.createConditionFilter(filterExpression); diff --git a/DB/benchmark/io/deephaven/benchmark/db/ConditionFilterMultipleColumnsFillChunkBench.java b/DB/benchmark/io/deephaven/benchmark/db/ConditionFilterMultipleColumnsFillChunkBench.java index 82a5e1c5867..1d076fbc221 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/ConditionFilterMultipleColumnsFillChunkBench.java +++ b/DB/benchmark/io/deephaven/benchmark/db/ConditionFilterMultipleColumnsFillChunkBench.java @@ -46,7 +46,7 @@ protected QueryData getQuery() { final String tPartCol = "TPartCol"; builder = BenchmarkTools.persistentTableBuilder("T", tableSize); builder.setSeed(0xDEADB00F) - .addColumn(BenchmarkTools.stringCol(tPartCol, 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol(tPartCol, 4, 5, 7, 0xFEEDBEEF)); final String[] tCols = new String[2 + numberOfFilterColumns + numberOfAdditionalColumns]; int nT1Cols = 0; tCols[nT1Cols++] = tPartCol; @@ -73,13 +73,13 @@ protected QueryData getQuery() { final Table inputTable = bmTable.getTable().coalesce(); final long sizePerStep = Math.max(inputTable.size() / steps, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table inputReleased = inputTable.where(incrementalReleaseFilter); final SelectFilter filter = ConditionFilter.createConditionFilter(filterExpression); final Table live = inputReleased.sort(sortCol).where(filter); return new QueryData(live, incrementalReleaseFilter, steps, new String[] {sortCol}, - WritableLongChunk.makeWritableChunk(chunkCapacity)); + WritableLongChunk.makeWritableChunk(chunkCapacity)); } @TearDown(Level.Trial) diff --git a/DB/benchmark/io/deephaven/benchmark/db/IncrementalBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/IncrementalBenchmark.java index 27ce92f478b..76affc21ffc 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/IncrementalBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/IncrementalBenchmark.java @@ -12,11 +12,10 @@ import java.util.function.Function; class IncrementalBenchmark { - static R incrementalBenchmark(final Function function, final Table inputTable, - final int steps) { + static R incrementalBenchmark(final Function function, final Table inputTable, final int steps) { final long sizePerStep = Math.max(inputTable.size() / steps, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table filtered = inputTable.where(incrementalReleaseFilter); final R result = function.apply(filtered); @@ -30,11 +29,9 @@ static R incrementalBenchmark(final Function function, final Table return result; } - static R rollingBenchmark(final Function function, final Table inputTable, - final int steps) { + static R rollingBenchmark(final Function function, final Table inputTable, final int steps) { final long sizePerStep = Math.max(inputTable.size() / steps, 1); - final RollingReleaseFilter incrementalReleaseFilter = - new RollingReleaseFilter(sizePerStep * 2, sizePerStep); + final RollingReleaseFilter incrementalReleaseFilter = new RollingReleaseFilter(sizePerStep * 2, sizePerStep); final Table filtered = inputTable.where(incrementalReleaseFilter); final R result = function.apply(filtered); @@ -56,13 +53,13 @@ static R rollingBenchmark(final Function function, final Table inp return rollingBenchmark(function, inputTable, 10); } - static R incrementalBenchmark(final BiFunction function, - final Table inputTable1, final Table inputTable2) { + static R incrementalBenchmark(final BiFunction function, final Table inputTable1, + final Table inputTable2) { return incrementalBenchmark(function, inputTable1, inputTable2, 0.1, 9); } - static R incrementalBenchmark(final BiFunction function, - final Table inputTable1, final Table inputTable2, double initialFraction, int steps) { + static R incrementalBenchmark(final BiFunction function, final Table inputTable1, + final Table inputTable2, double initialFraction, int steps) { final long initialSize1 = (long) (inputTable1.size() * initialFraction); final long initialSize2 = (long) (inputTable2.size() * initialFraction); @@ -70,9 +67,9 @@ static R incrementalBenchmark(final BiFunction function, final long sizePerStep2 = Math.max((inputTable2.size() - initialSize2) / steps, 1); final IncrementalReleaseFilter incrementalReleaseFilter1 = - new IncrementalReleaseFilter(initialSize1, sizePerStep1); + new IncrementalReleaseFilter(initialSize1, sizePerStep1); final IncrementalReleaseFilter incrementalReleaseFilter2 = - new IncrementalReleaseFilter(initialSize2, sizePerStep2); + new IncrementalReleaseFilter(initialSize2, sizePerStep2); final Table filtered1 = inputTable1.where(incrementalReleaseFilter1); final Table filtered2 = inputTable2.where(incrementalReleaseFilter2); @@ -80,18 +77,18 @@ static R incrementalBenchmark(final BiFunction function, final InstrumentedShiftAwareListenerAdapter failureListener; if (result instanceof DynamicTable) { - failureListener = new InstrumentedShiftAwareListenerAdapter("Failure Listener", - (DynamicTable) result, false) { - @Override - public void onUpdate(Update upstream) {} - - @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { - originalException.printStackTrace(); - System.exit(1); - } - }; + failureListener = + new InstrumentedShiftAwareListenerAdapter("Failure Listener", (DynamicTable) result, false) { + @Override + public void onUpdate(Update upstream) {} + + @Override + public void onFailureInternal(Throwable originalException, + UpdatePerformanceTracker.Entry sourceEntry) { + originalException.printStackTrace(); + System.exit(1); + } + }; ((DynamicTable) result).listenForUpdates(failureListener); } else { failureListener = null; diff --git a/DB/benchmark/io/deephaven/benchmark/db/LastByBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/LastByBenchmark.java index a27dd68f005..a0973a2d508 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/LastByBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/LastByBenchmark.java @@ -69,27 +69,24 @@ public void setupEnv(BenchmarkParams params) { if (keyCount == 0) { if (!"None".equals(keyType)) { - throw new UnsupportedOperationException( - "Zero Key can only be run with keyType == None"); + throw new UnsupportedOperationException("Zero Key can only be run with keyType == None"); } } else { if ("None".equals(keyType)) { - throw new UnsupportedOperationException( - "keyType == None can only be run with keyCount==0"); + throw new UnsupportedOperationException("keyType == None can only be run with keyCount==0"); } } switch (tableType) { case "Historical": builder = BenchmarkTools.persistentTableBuilder("Karl", size) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("Karl", size); if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; @@ -97,14 +94,12 @@ public void setupEnv(BenchmarkParams params) { throw new IllegalStateException("Table type must be Historical or Intraday"); } - builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); + builder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); - final EnumStringColumnGenerator stringKey = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("KeyString", keyCount, 6, 6, - 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); - final ColumnGenerator intKey = BenchmarkTools.seqNumberCol("KeyInt", int.class, 0, 1, - keyCount, SequentialNumColumnGenerator.Mode.RollAtLimit); + final EnumStringColumnGenerator stringKey = (EnumStringColumnGenerator) BenchmarkTools.stringCol("KeyString", + keyCount, 6, 6, 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); + final ColumnGenerator intKey = BenchmarkTools.seqNumberCol("KeyInt", int.class, 0, 1, keyCount, + SequentialNumColumnGenerator.Mode.RollAtLimit); System.out.println("Key type: " + keyType); switch (keyType) { @@ -121,22 +116,19 @@ public void setupEnv(BenchmarkParams params) { builder.addColumn(intKey); keyName = stringKey.getName() + "," + intKey.getName(); if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; case "None": keyName = ""; if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; default: throw new IllegalStateException("Unknown KeyType: " + keyType); } - keyColumnNames = - keyCount > 0 ? keyName.split(",") : CollectionUtil.ZERO_LENGTH_STRING_ARRAY; + keyColumnNames = keyCount > 0 ? keyName.split(",") : CollectionUtil.ZERO_LENGTH_STRING_ARRAY; switch (valueCount) { case 8: @@ -157,8 +149,7 @@ public void setupEnv(BenchmarkParams params) { builder.addColumn(BenchmarkTools.numberCol("ValueToSum1", int.class)); break; default: - throw new IllegalArgumentException( - "Can not initialize with " + valueCount + " values."); + throw new IllegalArgumentException("Can not initialize with " + valueCount + " values."); } if (grouped) { @@ -166,10 +157,9 @@ public void setupEnv(BenchmarkParams params) { } final BenchmarkTable bmt = builder - .build(); + .build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); table = bmt.getTable().coalesce().dropColumns("PartCol"); @@ -197,25 +187,23 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table lastByStatic(@NotNull final Blackhole bh) { - final Table result = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.lastBy(keyColumnNames)); + final Table result = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.lastBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table lastByIncremental(@NotNull final Blackhole bh) { - final Table result = - IncrementalBenchmark.incrementalBenchmark((t) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> t.lastBy(keyColumnNames)), table); + final Table result = IncrementalBenchmark.incrementalBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.lastBy(keyColumnNames)), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table lastByRolling(@NotNull final Blackhole bh) { - final Table result = IncrementalBenchmark.rollingBenchmark((t) -> LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> t.lastBy(keyColumnNames)), table); + final Table result = IncrementalBenchmark.rollingBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.lastBy(keyColumnNames)), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -223,12 +211,13 @@ public Table lastByRolling(@NotNull final Blackhole bh) { @Benchmark public Table lastFirstByStatic(@NotNull final Blackhole bh) { final ComboAggregateFactory.ComboBy lastCols = AggLast(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "Last" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "Last" + ii + "=ValueToSum" + ii).toArray(String[]::new)); final ComboAggregateFactory.ComboBy firstCols = AggFirst(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "First" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "First" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = IncrementalBenchmark.rollingBenchmark((t) -> LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> t.by(AggCombo(lastCols, firstCols))), table); + final Table result = IncrementalBenchmark.rollingBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.by(AggCombo(lastCols, firstCols))), + table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -236,12 +225,13 @@ public Table lastFirstByStatic(@NotNull final Blackhole bh) { @Benchmark public Table lastFirstByIncremental(@NotNull final Blackhole bh) { final ComboAggregateFactory.ComboBy lastCols = AggLast(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "Last" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "Last" + ii + "=ValueToSum" + ii).toArray(String[]::new)); final ComboAggregateFactory.ComboBy firstCols = AggFirst(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "First" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "First" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = IncrementalBenchmark.rollingBenchmark((t) -> LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> t.by(AggCombo(lastCols, firstCols))), table); + final Table result = IncrementalBenchmark.rollingBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.by(AggCombo(lastCols, firstCols))), + table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -249,12 +239,13 @@ public Table lastFirstByIncremental(@NotNull final Blackhole bh) { @Benchmark public Table lastFirstByRolling(@NotNull final Blackhole bh) { final ComboAggregateFactory.ComboBy lastCols = AggLast(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "Last" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "Last" + ii + "=ValueToSum" + ii).toArray(String[]::new)); final ComboAggregateFactory.ComboBy firstCols = AggFirst(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "First" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "First" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = IncrementalBenchmark.rollingBenchmark((t) -> LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> t.by(AggCombo(lastCols, firstCols))), table); + final Table result = IncrementalBenchmark.rollingBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.by(AggCombo(lastCols, firstCols))), + table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/MatchFilterBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/MatchFilterBenchmark.java index 4fa5b1b2d41..49bdfcbc633 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/MatchFilterBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/MatchFilterBenchmark.java @@ -59,8 +59,8 @@ public void setupEnv(BenchmarkParams params) { switch (tableType) { case "Historical": builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize); @@ -71,7 +71,7 @@ public void setupEnv(BenchmarkParams params) { } builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)); final DBDateTime startTime = DBTimeUtils.convertDateTime("2019-01-01T12:00:00 NY"); final DBDateTime endTime = DBTimeUtils.convertDateTime("2019-01-01T12:00:00.000001 NY"); @@ -92,8 +92,7 @@ public void setupEnv(BenchmarkParams params) { } final BenchmarkTable bmTable = builder.build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); inputTable = applySparsity(bmTable.getTable(), tableSize, sparsity, 0).coalesce(); @@ -104,7 +103,7 @@ public void setupEnv(BenchmarkParams params) { } } else if (filterCol.equals("Symbol")) { inputTable.selectDistinct("Symbol").head(matchValues).columnIterator("Symbol") - .forEachRemaining(values::add); + .forEachRemaining(values::add); } else { for (int ii = 0; ii < matchValues; ++ii) { values.add(ii); @@ -136,7 +135,7 @@ public void finishIteration(BenchmarkParams params) throws IOException { private R incrementalBenchmark(Function function) { final long sizePerStep = Math.max(inputTable.size() / 10, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table filtered = inputTable.where(incrementalReleaseFilter); final R result = function.apply(filtered); diff --git a/DB/benchmark/io/deephaven/benchmark/db/MultiTableKeyedOperations.java b/DB/benchmark/io/deephaven/benchmark/db/MultiTableKeyedOperations.java index fca4ff9d336..a860f8cf406 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/MultiTableKeyedOperations.java +++ b/DB/benchmark/io/deephaven/benchmark/db/MultiTableKeyedOperations.java @@ -57,18 +57,17 @@ public void setupEnv(BenchmarkParams params) { type = Utils.primitiveTypeForName.get(typeName); Configuration.getInstance().setProperty("QueryTable.memoizeResults", "false"); - final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder( - "SingleTableOperations", BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); + final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder("SingleTableOperations", + BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); builder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.numberCol("Mock", int.class)); for (int i = 0; i < columnCount; i++) { builder.addColumn(BenchmarkTools.numberCol("InputColumn" + i, type, 0, - 1 << Math.max(0, logSpaceSize - logColumnCount))); + 1 << Math.max(0, logSpaceSize - logColumnCount))); } bmTable = builder.build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); keyColumns = new String[columnCount]; @@ -77,8 +76,8 @@ public void setupEnv(BenchmarkParams params) { } columnsToMatch = String.join(",", keyColumns); fullTable = applySparsity(bmTable.getTable().select(), tableSize, sparsity, 0); - distinctTable = applySparsity(bmTable.getTable().select(), tableSize, sparsity, 0) - .renameColumns("DMock = Mock").lastBy(keyColumns).select(); + distinctTable = applySparsity(bmTable.getTable().select(), tableSize, sparsity, 0).renameColumns("DMock = Mock") + .lastBy(keyColumns).select(); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinBenchmark.java index 1ee910d0787..775b62f26a2 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinBenchmark.java @@ -55,18 +55,17 @@ public void setupEnv(BenchmarkParams params) { switch (tableType) { case "Historical": rightBuilder = BenchmarkTools.persistentTableBuilder("Carlos", rightSize) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); leftBuilder = BenchmarkTools.persistentTableBuilder("Karl", leftSize) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": rightBuilder = BenchmarkTools.persistentTableBuilder("Carlos", rightSize); leftBuilder = BenchmarkTools.persistentTableBuilder("Karl", leftSize); if (leftGrouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; @@ -74,16 +73,13 @@ public void setupEnv(BenchmarkParams params) { throw new IllegalStateException("Table type must be Historical or Intraday"); } - rightBuilder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); - leftBuilder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); + rightBuilder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); + leftBuilder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); - final EnumStringColumnGenerator stringJoinKey = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("JString", rightSize, 6, 6, - 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); - final ColumnGenerator intJoinKey = BenchmarkTools.seqNumberCol("JInt", int.class, 0, 1, - rightSize, SequentialNumColumnGenerator.Mode.RollAtLimit); + final EnumStringColumnGenerator stringJoinKey = (EnumStringColumnGenerator) BenchmarkTools.stringCol("JString", + rightSize, 6, 6, 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); + final ColumnGenerator intJoinKey = BenchmarkTools.seqNumberCol("JInt", int.class, 0, 1, rightSize, + SequentialNumColumnGenerator.Mode.RollAtLimit); System.out.println("Join key type: " + joinKeyType); switch (joinKeyType) { @@ -104,8 +100,7 @@ public void setupEnv(BenchmarkParams params) { leftBuilder.addColumn(intJoinKey); joinKeyName = stringJoinKey.getName() + "," + intJoinKey.getName(); if (leftGrouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; default: @@ -117,15 +112,14 @@ public void setupEnv(BenchmarkParams params) { } final BenchmarkTable bmRight = rightBuilder - .addColumn(BenchmarkTools.numberCol("RightSentinel", long.class)) - .build(); + .addColumn(BenchmarkTools.numberCol("RightSentinel", long.class)) + .build(); final BenchmarkTable bmLeft = leftBuilder - .addColumn(BenchmarkTools.numberCol("LeftSentinel", long.class)) - .build(); + .addColumn(BenchmarkTools.numberCol("LeftSentinel", long.class)) + .build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); rightTable = bmRight.getTable().coalesce().dropColumns("PartCol"); leftTable = bmLeft.getTable().coalesce().dropColumns("PartCol"); @@ -163,18 +157,15 @@ public void tearDownInvocation() { @Benchmark public Table naturalJoinStatic() { final Table result = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> leftTable.naturalJoin(rightTable, joinKeyName)); + .computeLocked(() -> leftTable.naturalJoin(rightTable, joinKeyName)); return state.setResult(result); } @Benchmark public Table naturalJoinIncremental() { - final Table result = - IncrementalBenchmark - .incrementalBenchmark( - (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> lt.naturalJoin(rt, joinKeyName)), - leftTable, rightTable); + final Table result = IncrementalBenchmark.incrementalBenchmark( + (lt, rt) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> lt.naturalJoin(rt, joinKeyName)), + leftTable, rightTable); return state.setResult(result); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinMultipleColumnsBench.java b/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinMultipleColumnsBench.java index 243bb9d6b90..17aa7d2b3c4 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinMultipleColumnsBench.java +++ b/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinMultipleColumnsBench.java @@ -45,18 +45,16 @@ public class NaturalJoinMultipleColumnsBench { public void setupEnv(final BenchmarkParams params) { if (numberOfJoinColumns < 1 || t1NumberOfAdditionalColumns < 1) { throw new InternalError( - "Both numberOfJoinColumns(=" + numberOfJoinColumns - + ") and t1NumberOfAdditionalColumns(=" + t1NumberOfAdditionalColumns - + ") have to be >= 1."); + "Both numberOfJoinColumns(=" + numberOfJoinColumns + ") and t1NumberOfAdditionalColumns(=" + + t1NumberOfAdditionalColumns + ") have to be >= 1."); } - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); LiveTableMonitor.DEFAULT.enableUnitTestMode(); final BenchmarkTableBuilder builder1; final String t1PartCol = "T1PartCol"; builder1 = BenchmarkTools.persistentTableBuilder("T1", tableSize); builder1.setSeed(0xDEADB00F) - .addColumn(BenchmarkTools.stringCol(t1PartCol, 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol(t1PartCol, 4, 5, 7, 0xFEEDBEEF)); t1Cols = new String[numberOfJoinColumns + t1NumberOfAdditionalColumns + 1]; int nT1Cols = 0; t1Cols[nT1Cols++] = t1PartCol; @@ -82,7 +80,7 @@ public void setupEnv(final BenchmarkParams params) { final String t2PartCol = "T2PartCol"; builder2 = BenchmarkTools.persistentTableBuilder("T2", tableSize); builder2.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol(t2PartCol, 4, 5, 7, 0xFEEDB00F)); + .addColumn(BenchmarkTools.stringCol(t2PartCol, 4, 5, 7, 0xFEEDB00F)); final String[] t2Cols = new String[numberOfJoinColumns + t2NumberOfAdditionalColumns + 1]; int nT2Cols = 0; t2Cols[nT2Cols++] = t2PartCol; @@ -133,12 +131,11 @@ public Table naturalJoinBench(final Blackhole bh) { final Table result; if (doSelect) { result = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> IncrementalBenchmark - .incrementalBenchmark((Table t) -> t.select(t1Cols).sort(sortCol).naturalJoin( - t2, joinColsStr, joinColumnsToAddStr), inputTable, steps)); + .computeLocked(() -> IncrementalBenchmark + .incrementalBenchmark((Table t) -> t.select(t1Cols).sort(sortCol).naturalJoin( + t2, joinColsStr, joinColumnsToAddStr), inputTable, steps)); } else { - result = - IncrementalBenchmark.incrementalBenchmark((Table t) -> t.sort(sortCol).naturalJoin( + result = IncrementalBenchmark.incrementalBenchmark((Table t) -> t.sort(sortCol).naturalJoin( t2, joinColsStr, joinColumnsToAddStr), inputTable, steps); } return state.setResult(result); diff --git a/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinMultipleColumnsFillChunkBench.java b/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinMultipleColumnsFillChunkBench.java index 6968dcdb8b9..996cdf96aed 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinMultipleColumnsFillChunkBench.java +++ b/DB/benchmark/io/deephaven/benchmark/db/NaturalJoinMultipleColumnsFillChunkBench.java @@ -48,7 +48,7 @@ protected QueryData getQuery() { final String t1PartCol = "T1PartCol"; builder1 = BenchmarkTools.persistentTableBuilder("T1", tableSize); builder1.setSeed(0xDEADB00F) - .addColumn(BenchmarkTools.stringCol(t1PartCol, 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol(t1PartCol, 4, 5, 7, 0xFEEDBEEF)); t1Cols = new String[numberOfJoinColumns + t1NumberOfAdditionalColumns + 1]; int nT1Cols = 0; t1Cols[nT1Cols++] = t1PartCol; @@ -74,7 +74,7 @@ protected QueryData getQuery() { final String t2PartCol = "T2PartCol"; builder2 = BenchmarkTools.persistentTableBuilder("T2", tableSize); builder2.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol(t2PartCol, 4, 5, 7, 0xFEEDB00F)); + .addColumn(BenchmarkTools.stringCol(t2PartCol, 4, 5, 7, 0xFEEDB00F)); final String[] t2Cols = new String[numberOfJoinColumns + t2NumberOfAdditionalColumns + 1]; int nT2Cols = 0; t2Cols[nT2Cols++] = t2PartCol; @@ -95,20 +95,19 @@ protected QueryData getQuery() { final Table t2 = bmTable2.getTable().coalesce(); final long sizePerStep = Math.max(t1.size() / steps, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table t1Released = t1.where(incrementalReleaseFilter); final Table live; final String joinColsStr = String.join(",", joinCols); final String joinColumnsToAddStr = String.join(",", joinColumnsToAdd); if (doSelect) { - live = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t1Released - .select(t1Cols).sort(sortCol).naturalJoin(t2, joinColsStr, joinColumnsToAddStr)); + live = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( + () -> t1Released.select(t1Cols).sort(sortCol).naturalJoin(t2, joinColsStr, joinColumnsToAddStr)); } else { live = t1Released.sort(sortCol).naturalJoin(t2, joinColsStr, joinColumnsToAddStr); } return new QueryData( - live, incrementalReleaseFilter, steps, joinCols, - WritableLongChunk.makeWritableChunk(chunkCapacity)); + live, incrementalReleaseFilter, steps, joinCols, WritableLongChunk.makeWritableChunk(chunkCapacity)); } public static void main(String[] args) { diff --git a/DB/benchmark/io/deephaven/benchmark/db/PercentileByBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/PercentileByBenchmark.java index d48bec98a92..cecc0d45022 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/PercentileByBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/PercentileByBenchmark.java @@ -69,27 +69,24 @@ public void setupEnv(BenchmarkParams params) { if (keyCount == 0) { if (!"None".equals(keyType)) { - throw new UnsupportedOperationException( - "Zero Key can only be run with keyType == None"); + throw new UnsupportedOperationException("Zero Key can only be run with keyType == None"); } } else { if ("None".equals(keyType)) { - throw new UnsupportedOperationException( - "keyType == None can only be run with keyCount==0"); + throw new UnsupportedOperationException("keyType == None can only be run with keyCount==0"); } } switch (tableType) { case "Historical": builder = BenchmarkTools.persistentTableBuilder("Karl", size) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("Karl", size); if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; @@ -97,14 +94,12 @@ public void setupEnv(BenchmarkParams params) { throw new IllegalStateException("Table type must be Historical or Intraday"); } - builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); + builder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); - final EnumStringColumnGenerator stringKey = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("KeyString", keyCount, 6, 6, - 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); - final ColumnGenerator intKey = BenchmarkTools.seqNumberCol("KeyInt", int.class, 0, 1, - keyCount, SequentialNumColumnGenerator.Mode.RollAtLimit); + final EnumStringColumnGenerator stringKey = (EnumStringColumnGenerator) BenchmarkTools.stringCol("KeyString", + keyCount, 6, 6, 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); + final ColumnGenerator intKey = BenchmarkTools.seqNumberCol("KeyInt", int.class, 0, 1, keyCount, + SequentialNumColumnGenerator.Mode.RollAtLimit); System.out.println("Key type: " + keyType); switch (keyType) { @@ -121,22 +116,19 @@ public void setupEnv(BenchmarkParams params) { builder.addColumn(intKey); keyName = stringKey.getName() + "," + intKey.getName(); if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; case "None": keyName = ""; if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; default: throw new IllegalStateException("Unknown KeyType: " + keyType); } - keyColumnNames = - keyCount > 0 ? keyName.split(",") : CollectionUtil.ZERO_LENGTH_STRING_ARRAY; + keyColumnNames = keyCount > 0 ? keyName.split(",") : CollectionUtil.ZERO_LENGTH_STRING_ARRAY; switch (valueCount) { case 8: @@ -157,8 +149,7 @@ public void setupEnv(BenchmarkParams params) { builder.addColumn(BenchmarkTools.numberCol("ValueToSum1", int.class)); break; default: - throw new IllegalArgumentException( - "Can not initialize with " + valueCount + " values."); + throw new IllegalArgumentException("Can not initialize with " + valueCount + " values."); } if (grouped) { @@ -166,10 +157,9 @@ public void setupEnv(BenchmarkParams params) { } final BenchmarkTable bmt = builder - .build(); + .build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); table = bmt.getTable().coalesce().dropColumns("PartCol"); } @@ -196,8 +186,7 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table percentileByStatic(@NotNull final Blackhole bh) { final Function fut = getFunction(); - final Table result = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> fut.apply(table)); + final Table result = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> fut.apply(table)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -210,11 +199,10 @@ private Function getFunction() { } else if (percentileMode.equals("tdigest")) { fut = (t) -> { final NonKeyColumnAggregationFactory aggregationContextFactory = - new NonKeyColumnAggregationFactory( - (type, resultName, exposeInternalColumns) -> new TDigestPercentileOperator( - type, 100.0, 0.99, resultName)); - return ChunkedOperatorAggregationHelper.aggregation(aggregationContextFactory, - (QueryTable) t, SelectColumnFactory.getExpressions(keyColumnNames)); + new NonKeyColumnAggregationFactory((type, resultName, + exposeInternalColumns) -> new TDigestPercentileOperator(type, 100.0, 0.99, resultName)); + return ChunkedOperatorAggregationHelper.aggregation(aggregationContextFactory, (QueryTable) t, + SelectColumnFactory.getExpressions(keyColumnNames)); }; } else { throw new IllegalArgumentException("Bad mode: " + percentileMode); @@ -226,7 +214,7 @@ private Function getFunction() { public Table percentileByIncremental(@NotNull final Blackhole bh) { final Function fut = getFunction(); final Table result = IncrementalBenchmark.incrementalBenchmark( - (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> fut.apply(t)), table); + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> fut.apply(t)), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/RangeFilterBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/RangeFilterBenchmark.java index bb112eb146e..1958ed22728 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/RangeFilterBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/RangeFilterBenchmark.java @@ -54,8 +54,8 @@ public void setupEnv(BenchmarkParams params) { switch (tableType) { case "Historical": builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize); @@ -66,7 +66,7 @@ public void setupEnv(BenchmarkParams params) { } builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)); final DBDateTime startTime = DBTimeUtils.convertDateTime("2019-01-01T12:00:00 NY"); final DBDateTime endTime = DBTimeUtils.convertDateTime("2019-12-31T12:00:00 NY"); @@ -79,12 +79,10 @@ public void setupEnv(BenchmarkParams params) { builder.addColumn(BenchmarkTools.numberCol("F1", float.class, -10e6f, 10e6f)); break; case "L1": - builder - .addColumn(BenchmarkTools.numberCol("L1", long.class, -10_000_000, 10_000_000)); + builder.addColumn(BenchmarkTools.numberCol("L1", long.class, -10_000_000, 10_000_000)); break; case "I1": - builder - .addColumn(BenchmarkTools.numberCol("I1", int.class, -10_000_000, 10_000_000)); + builder.addColumn(BenchmarkTools.numberCol("I1", int.class, -10_000_000, 10_000_000)); break; case "Timestamp": builder.addColumn(BenchmarkTools.dateCol("Timestamp", startTime, endTime)); @@ -102,10 +100,8 @@ public void setupEnv(BenchmarkParams params) { } else { final long midpoint = (startTime.getNanos() + endTime.getNanos()) / 2; final long range = (endTime.getNanos() - startTime.getNanos()); - lowerBound = - DBTimeUtils.nanosToTime(midpoint - (long) (range * (selectivity / 100.0))); - upperBound = - DBTimeUtils.nanosToTime(midpoint + (long) (range * (selectivity / 100.0))); + lowerBound = DBTimeUtils.nanosToTime(midpoint - (long) (range * (selectivity / 100.0))); + upperBound = DBTimeUtils.nanosToTime(midpoint + (long) (range * (selectivity / 100.0))); } assert lowerBound != null; @@ -129,8 +125,7 @@ public void setupEnv(BenchmarkParams params) { } final BenchmarkTable bmTable = builder.build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); inputTable = applySparsity(bmTable.getTable(), tableSize, sparsity, 0).coalesce(); } @@ -156,7 +151,7 @@ public void finishIteration(BenchmarkParams params) throws IOException { private R incrementalBenchmark(Function function) { final long sizePerStep = Math.max(inputTable.size() / 10, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table filtered = inputTable.where(incrementalReleaseFilter); final R result = function.apply(filtered); diff --git a/DB/benchmark/io/deephaven/benchmark/db/RedirectedColumnSourceBench.java b/DB/benchmark/io/deephaven/benchmark/db/RedirectedColumnSourceBench.java index 895ef304011..d64b013e71c 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/RedirectedColumnSourceBench.java +++ b/DB/benchmark/io/deephaven/benchmark/db/RedirectedColumnSourceBench.java @@ -51,38 +51,38 @@ protected QueryData getQuery() { final BenchmarkTableBuilder builder; builder = BenchmarkTools.persistentTableBuilder("Juancho", tableSize); builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)); final WritableChunk chunk; final Consumer builderAddColumn; final int nSelectCols; switch (fillColsType) { case "D": builderAddColumn = (final String col) -> builder - .addColumn(BenchmarkTools.numberCol(col, double.class, -10e6, 10e6)); + .addColumn(BenchmarkTools.numberCol(col, double.class, -10e6, 10e6)); chunk = WritableDoubleChunk.makeWritableChunk(chunkCapacity); nSelectCols = nFillCols + 1; break; case "F": builderAddColumn = (final String col) -> builder - .addColumn(BenchmarkTools.numberCol(col, float.class, -10e6f, 10e6f)); + .addColumn(BenchmarkTools.numberCol(col, float.class, -10e6f, 10e6f)); chunk = WritableFloatChunk.makeWritableChunk(chunkCapacity); nSelectCols = nFillCols + 1; break; case "L": builderAddColumn = (final String col) -> builder - .addColumn(BenchmarkTools.numberCol(col, long.class, -10_000_000, 10_000_000)); + .addColumn(BenchmarkTools.numberCol(col, long.class, -10_000_000, 10_000_000)); chunk = WritableLongChunk.makeWritableChunk(chunkCapacity); nSelectCols = nFillCols + 1; break; case "I": builderAddColumn = (final String col) -> builder - .addColumn(BenchmarkTools.numberCol(col, int.class, -10_000_000, 10_000_000)); + .addColumn(BenchmarkTools.numberCol(col, int.class, -10_000_000, 10_000_000)); chunk = WritableIntChunk.makeWritableChunk(chunkCapacity); nSelectCols = nFillCols; break; case "S": - builderAddColumn = (final String col) -> builder - .addColumn(BenchmarkTools.stringCol(col, 4096, 4, 8, 0xDEADBEEF)); + builderAddColumn = + (final String col) -> builder.addColumn(BenchmarkTools.stringCol(col, 4096, 4, 8, 0xDEADBEEF)); chunk = WritableObjectChunk.makeWritableChunk(chunkCapacity); nSelectCols = nFillCols + 1; break; @@ -107,20 +107,20 @@ protected QueryData getQuery() { final Table t = bmTable.getTable().coalesce(); final long sizePerStep = Math.max(t.size() / steps, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table live; if (doSelect) { - live = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> t.where(incrementalReleaseFilter).select(selectCols).sort(sortCol)); + live = LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> t.where(incrementalReleaseFilter).select(selectCols).sort(sortCol)); } else { live = t.where(incrementalReleaseFilter).sort(sortCol); } return new QueryData( - live, - incrementalReleaseFilter, - steps, - fillCols, - chunk); + live, + incrementalReleaseFilter, + steps, + fillCols, + chunk); } public static void main(String[] args) { diff --git a/DB/benchmark/io/deephaven/benchmark/db/RedirectionBenchBase.java b/DB/benchmark/io/deephaven/benchmark/db/RedirectionBenchBase.java index d91c7787231..42a58c76f6f 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/RedirectionBenchBase.java +++ b/DB/benchmark/io/deephaven/benchmark/db/RedirectionBenchBase.java @@ -43,11 +43,11 @@ public class QueryData { public final WritableChunk chunk; public QueryData( - final Table live, - final IncrementalReleaseFilter incrementalReleaseFilter, - final int steps, - final String[] fillCol, - final WritableChunk chunk) { + final Table live, + final IncrementalReleaseFilter incrementalReleaseFilter, + final int steps, + final String[] fillCol, + final WritableChunk chunk) { this.live = live; this.incrementalReleaseFilter = incrementalReleaseFilter; this.steps = steps; @@ -66,13 +66,11 @@ public void setupEnv(final BenchmarkParams params) { LiveTableMonitor.DEFAULT.enableUnitTestMode(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); final QueryData queryData = getQuery(); for (int step = 0; step < queryData.steps; ++step) { - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(queryData.incrementalReleaseFilter::refresh); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(queryData.incrementalReleaseFilter::refresh); } inputTable = queryData.live; nFillCols = queryData.fillCols.length; diff --git a/DB/benchmark/io/deephaven/benchmark/db/RedirectionIndexBench.java b/DB/benchmark/io/deephaven/benchmark/db/RedirectionIndexBench.java index 1e423f9f23c..22e6ad7dc5c 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/RedirectionIndexBench.java +++ b/DB/benchmark/io/deephaven/benchmark/db/RedirectionIndexBench.java @@ -40,7 +40,7 @@ protected QueryData getQuery() { final BenchmarkTableBuilder builder1; builder1 = BenchmarkTools.persistentTableBuilder("T1", tableSize / 10); builder1.setSeed(0xDEADB00F) - .addColumn(BenchmarkTools.stringCol("PartCol1", 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol("PartCol1", 4, 5, 7, 0xFEEDBEEF)); final String joinCol = "L"; builder1.addColumn(BenchmarkTools.seqNumberCol(joinCol, long.class, 0, 1)); builder1.addColumn(BenchmarkTools.numberCol("I1", int.class, -10_000_000, 10_000_000)); @@ -49,31 +49,30 @@ protected QueryData getQuery() { final BenchmarkTableBuilder builder2; builder2 = BenchmarkTools.persistentTableBuilder("T2", tableSize); builder2.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol2", 4, 5, 7, 0xFEEDB00F)); + .addColumn(BenchmarkTools.stringCol("PartCol2", 4, 5, 7, 0xFEEDB00F)); builder2.addColumn(BenchmarkTools.seqNumberCol(joinCol, long.class, tableSize, -1)); final BenchmarkTable bmTable2 = builder2.build(); final Table t1 = bmTable1.getTable().coalesce(); final Table t2 = bmTable2.getTable().coalesce(); final long sizePerStep = Math.max(t1.size() / steps, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table live; if (doSelect) { live = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> t1.where(incrementalReleaseFilter).select(joinCol, "PartCol1", "I1") - .sort("I1").naturalJoin( - t2, joinCol, "PartCol2")); + () -> t1.where(incrementalReleaseFilter).select(joinCol, "PartCol1", "I1").sort("I1").naturalJoin( + t2, joinCol, "PartCol2")); } else { live = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> t1.where(incrementalReleaseFilter).sort("I1").naturalJoin( - t2, joinCol, "PartCol2")); + () -> t1.where(incrementalReleaseFilter).sort("I1").naturalJoin( + t2, joinCol, "PartCol2")); } return new QueryData( - live, - incrementalReleaseFilter, - steps, - new String[] {joinCol}, - WritableLongChunk.makeWritableChunk(chunkCapacity)); + live, + incrementalReleaseFilter, + steps, + new String[] {joinCol}, + WritableLongChunk.makeWritableChunk(chunkCapacity)); } public static void main(String[] args) { diff --git a/DB/benchmark/io/deephaven/benchmark/db/RegionedColumnSourceBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/RegionedColumnSourceBenchmark.java index 88eecd779de..b1b83f99f2e 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/RegionedColumnSourceBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/RegionedColumnSourceBenchmark.java @@ -66,28 +66,27 @@ public class RegionedColumnSourceBenchmark { private enum Copier { Int() { @Override - final void copy(@NotNull final ColumnSource columnSource, - @NotNull final WritableChunk destination, final long key) { + final void copy(@NotNull final ColumnSource columnSource, @NotNull final WritableChunk destination, + final long key) { destination.asWritableIntChunk().add(columnSource.getInt(key)); } }, Long() { @Override - final void copy(@NotNull final ColumnSource columnSource, - @NotNull final WritableChunk destination, final long key) { + final void copy(@NotNull final ColumnSource columnSource, @NotNull final WritableChunk destination, + final long key) { destination.asWritableLongChunk().add(columnSource.getLong(key)); } }, Object() { @Override - final void copy(@NotNull final ColumnSource columnSource, - @NotNull final WritableChunk destination, final long key) { + final void copy(@NotNull final ColumnSource columnSource, @NotNull final WritableChunk destination, + final long key) { destination.asWritableObjectChunk().add(columnSource.get(key)); } }; - abstract void copy(@NotNull ColumnSource columnSource, - @NotNull WritableChunk destination, long key); + abstract void copy(@NotNull ColumnSource columnSource, @NotNull WritableChunk destination, long key); } @Setup(Level.Trial) @@ -102,8 +101,8 @@ public void setupEnv(BenchmarkParams params) { switch (tableType) { case "Historical": builder = BenchmarkTools.persistentTableBuilder("RegionedTable", actualSize) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("RegionedTable", actualSize); @@ -114,7 +113,7 @@ public void setupEnv(BenchmarkParams params) { } builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartitioningColumn", 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol("PartitioningColumn", 4, 5, 7, 0xFEEDBEEF)); switch (fillColumn) { case "I1": @@ -140,8 +139,7 @@ public void setupEnv(BenchmarkParams params) { } final BenchmarkTable bmTable = builder.build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); inputTable = applySparsity(bmTable.getTable(), tableSize, sparsity, 555).coalesce(); } @@ -166,15 +164,11 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public void readEntireTable(@NotNull final Blackhole bh) { - final AbstractColumnSource inputSource = - (AbstractColumnSource) inputTable.getColumnSource(fillColumn); + final AbstractColumnSource inputSource = (AbstractColumnSource) inputTable.getColumnSource(fillColumn); switch (mode) { case "Fill": - try ( - final ColumnSource.FillContext fillContext = - inputSource.makeFillContext(chunkCapacity); - final OrderedKeys.Iterator oki = - inputTable.getIndex().getOrderedKeysIterator()) { + try (final ColumnSource.FillContext fillContext = inputSource.makeFillContext(chunkCapacity); + final OrderedKeys.Iterator oki = inputTable.getIndex().getOrderedKeysIterator()) { while (oki.hasMore()) { final OrderedKeys ok = oki.getNextOrderedKeysWithLength(chunkCapacity); inputSource.fillChunk(fillContext, destination, ok); @@ -183,11 +177,8 @@ public void readEntireTable(@NotNull final Blackhole bh) { } break; case "Get": - try ( - final ColumnSource.GetContext getContext = - inputSource.makeGetContext(chunkCapacity); - final OrderedKeys.Iterator oki = - inputTable.getIndex().getOrderedKeysIterator()) { + try (final ColumnSource.GetContext getContext = inputSource.makeGetContext(chunkCapacity); + final OrderedKeys.Iterator oki = inputTable.getIndex().getOrderedKeysIterator()) { while (oki.hasMore()) { final OrderedKeys ok = oki.getNextOrderedKeysWithLength(chunkCapacity); bh.consume(inputSource.getChunk(getContext, ok)); @@ -195,11 +186,8 @@ public void readEntireTable(@NotNull final Blackhole bh) { } break; case "Default": - try ( - final ColumnSource.FillContext fillContext = - inputSource.makeFillContext(chunkCapacity); - final OrderedKeys.Iterator oki = - inputTable.getIndex().getOrderedKeysIterator()) { + try (final ColumnSource.FillContext fillContext = inputSource.makeFillContext(chunkCapacity); + final OrderedKeys.Iterator oki = inputTable.getIndex().getOrderedKeysIterator()) { while (oki.hasMore()) { final OrderedKeys ok = oki.getNextOrderedKeysWithLength(chunkCapacity); inputSource.defaultFillChunk(fillContext, destination, ok); diff --git a/DB/benchmark/io/deephaven/benchmark/db/ResultSizeProfiler.java b/DB/benchmark/io/deephaven/benchmark/db/ResultSizeProfiler.java index 883c96ca8bd..b77947c5f3e 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/ResultSizeProfiler.java +++ b/DB/benchmark/io/deephaven/benchmark/db/ResultSizeProfiler.java @@ -12,8 +12,7 @@ import java.util.Collections; /** - * Simple profiler that just records how many rows were in the result (presuming you set the result - * size). + * Simple profiler that just records how many rows were in the result (presuming you set the result size). */ public class ResultSizeProfiler implements InternalProfiler { private static long resultSize; @@ -29,11 +28,9 @@ public void beforeIteration(BenchmarkParams benchmarkParams, IterationParams ite } @Override - public Collection afterIteration(BenchmarkParams benchmarkParams, - IterationParams iterationParams, - IterationResult result) { - return Collections - .singleton(new ScalarResult("Result size", resultSize, "rows", AggregationPolicy.AVG)); + public Collection afterIteration(BenchmarkParams benchmarkParams, IterationParams iterationParams, + IterationResult result) { + return Collections.singleton(new ScalarResult("Result size", resultSize, "rows", AggregationPolicy.AVG)); } public static void setResultSize(long resultSize) { diff --git a/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedDateTimeOperations.java b/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedDateTimeOperations.java index fb286015643..0381bb54a3d 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedDateTimeOperations.java +++ b/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedDateTimeOperations.java @@ -51,18 +51,17 @@ public void setupEnv(BenchmarkParams params) { int columnCount = 1 << logColumnCount; Configuration.getInstance().setProperty("QueryTable.memoizeResults", "false"); - final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder( - "SingleTableOperations", BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); + final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder("SingleTableOperations", + BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); builder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.numberCol("Mock", int.class)); for (int i = 0; i < columnCount; i++) { builder.addColumn(BenchmarkTools.numberCol("InputColumn" + i, long.class, 0, - 1 << Math.max(0, logSpaceSize - logColumnCount))); + 1 << Math.max(0, logSpaceSize - logColumnCount))); } bmTable = builder.build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); keyColumns = new String[columnCount]; @@ -78,8 +77,7 @@ public void setupEnv(BenchmarkParams params) { @Setup(Level.Iteration) public void setupIteration() { state.init(); - inputTable = - applySparsity(bmTable.getTable().select(convertToDateTime), tableSize, sparsity, 0); + inputTable = applySparsity(bmTable.getTable().select(convertToDateTime), tableSize, sparsity, 0); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedOperations.java b/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedOperations.java index 1c19f3c7918..7caeb5f7626 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedOperations.java +++ b/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedOperations.java @@ -56,18 +56,17 @@ public void setupEnv(BenchmarkParams params) { type = Utils.primitiveTypeForName.get(typeName); Configuration.getInstance().setProperty("QueryTable.memoizeResults", "false"); - final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder( - "SingleTableOperations", BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); + final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder("SingleTableOperations", + BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); builder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.numberCol("Mock", int.class)); for (int i = 0; i < columnCount; i++) { builder.addColumn(BenchmarkTools.numberCol("InputColumn" + i, type, 0, - 1 << Math.max(0, logSpaceSize - logColumnCount))); + 1 << Math.max(0, logSpaceSize - logColumnCount))); } bmTable = builder.build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); keyColumns = new String[columnCount]; diff --git a/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedStringOperations.java b/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedStringOperations.java index a3d634247e3..9c720f05f2d 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedStringOperations.java +++ b/DB/benchmark/io/deephaven/benchmark/db/SingleTableKeyedStringOperations.java @@ -51,18 +51,17 @@ public void setupEnv(BenchmarkParams params) { int columnCount = 1 << logColumnCount; Configuration.getInstance().setProperty("QueryTable.memoizeResults", "false"); - final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder( - "SingleTableOperations", BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); + final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder("SingleTableOperations", + BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); builder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.numberCol("Mock", int.class)); for (int i = 0; i < columnCount; i++) { builder.addColumn(BenchmarkTools.numberCol("InputColumn" + i, long.class, 0, - 1 << Math.max(0, logSpaceSize - logColumnCount))); + 1 << Math.max(0, logSpaceSize - logColumnCount))); } bmTable = builder.build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); keyColumns = new String[columnCount]; @@ -78,8 +77,7 @@ public void setupEnv(BenchmarkParams params) { @Setup(Level.Iteration) public void setupIteration() { state.init(); - inputTable = - applySparsity(bmTable.getTable().select(convertToDateTime), tableSize, sparsity, 0); + inputTable = applySparsity(bmTable.getTable().select(convertToDateTime), tableSize, sparsity, 0); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/SingleTableOperations.java b/DB/benchmark/io/deephaven/benchmark/db/SingleTableOperations.java index 07edce5f9c9..66476475de2 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/SingleTableOperations.java +++ b/DB/benchmark/io/deephaven/benchmark/db/SingleTableOperations.java @@ -53,8 +53,7 @@ public void setupEnv(BenchmarkParams params) { type = Utils.primitiveTypeForName.get(typeName); Configuration.getInstance().setProperty("QueryTable.memoizeResults", "false"); - final BenchmarkTableBuilder builder = - BenchmarkTools.inMemoryTableBuilder("SingleTableOperations", + final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder("SingleTableOperations", BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); builder.setSeed(0xDEADBEEF); @@ -63,8 +62,7 @@ public void setupEnv(BenchmarkParams params) { } bmTable = builder.build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); inputTable = applySparsity(bmTable.getTable(), tableSize, sparsity, 0); diff --git a/DB/benchmark/io/deephaven/benchmark/db/SortBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/SortBenchmark.java index 83a81d69347..07f9cfe4249 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/SortBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/SortBenchmark.java @@ -70,10 +70,8 @@ public class SortBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { - Assert.eqTrue(tableSize % sizePerStep == 0, - "Cannot evenly divide input table size by step size."); - Assert.eqTrue(workingSize % sizePerStep == 0, - "Cannot evenly divide working size by step size."); + Assert.eqTrue(tableSize % sizePerStep == 0, "Cannot evenly divide input table size by step size."); + Assert.eqTrue(workingSize % sizePerStep == 0, "Cannot evenly divide working size by step size."); workingSizeInSteps = workingSize / sizePerStep; LiveTableMonitor.DEFAULT.enableUnitTestMode(); @@ -81,7 +79,7 @@ public void setupEnv(BenchmarkParams params) { final int nVals = (int) (enumSize < 1 ? enumSize * tableSize : enumSize); System.out.println("String Values: " + nVals); final EnumStringColumnGenerator enumStringyCol = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("Thingy", nVals, 6, 6, 0xB00FB00F); + (EnumStringColumnGenerator) BenchmarkTools.stringCol("Thingy", nVals, 6, 6, 0xB00FB00F); final BenchmarkTableBuilder builder; final int actualSize = BenchmarkTools.sizeWithSparsity(tableSize, sparsity); @@ -91,9 +89,9 @@ public void setupEnv(BenchmarkParams params) { switch (tableType) { case "Historical": builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize) - .addGroupingColumns("Thingy") - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .addGroupingColumns("Thingy") + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize); @@ -115,21 +113,20 @@ public void setupEnv(BenchmarkParams params) { } bmTable = builder - .setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)) - // .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) - // .addColumn(BenchmarkTools.numberCol("I1", int.class)) - .addColumn(BenchmarkTools.numberCol("D1", double.class, -10e6, 10e6)) - .addColumn(BenchmarkTools.numberCol("L1", long.class)) - // .addColumn(BenchmarkTools.numberCol("B1", byte.class)) - // .addColumn(BenchmarkTools.numberCol("S1", short.class)) - // .addColumn(BenchmarkTools.numberCol("F1", float.class)) - // .addColumn(BenchmarkTools.charCol("C1", 'A', 'Z')) - .addColumn(enumStringyCol) - .build(); - - inputTable = - (QueryTable) applySparsity(bmTable.getTable(), tableSize, sparsity, 0).coalesce(); + .setSeed(0xDEADBEEF) + .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)) + // .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) + // .addColumn(BenchmarkTools.numberCol("I1", int.class)) + .addColumn(BenchmarkTools.numberCol("D1", double.class, -10e6, 10e6)) + .addColumn(BenchmarkTools.numberCol("L1", long.class)) + // .addColumn(BenchmarkTools.numberCol("B1", byte.class)) + // .addColumn(BenchmarkTools.numberCol("S1", short.class)) + // .addColumn(BenchmarkTools.numberCol("F1", float.class)) + // .addColumn(BenchmarkTools.charCol("C1", 'A', 'Z')) + .addColumn(enumStringyCol) + .build(); + + inputTable = (QueryTable) applySparsity(bmTable.getTable(), tableSize, sparsity, 0).coalesce(); mcsWithSortColumn = inputTable.newModifiedColumnSet(sortCol); MutableInt ci = new MutableInt(); @@ -192,8 +189,7 @@ public Table rollingWithModNoSort() { ShiftAwareListener.Update update = new ShiftAwareListener.Update(); update.added = inputTable.getIndex().subindexByPos(addMarker, addMarker + sizePerStep - 1); - update.modified = - inputTable.getIndex().subindexByPos(modMarker, modMarker + sizePerStep - 1); + update.modified = inputTable.getIndex().subindexByPos(modMarker, modMarker + sizePerStep - 1); update.removed = inputTable.getIndex().subindexByPos(rmMarker, rmMarker + sizePerStep - 1); update.modified.retain(rollingInputIndex); update.removed.retain(rollingInputIndex); @@ -206,7 +202,7 @@ public Table rollingWithModNoSort() { }); Assert.eq(rollingOutputTable.getIndex().size(), "rollingOutputTable.getIndex().size()", - rollingInputIndex.size(), "rollingInputIndex.size()"); + rollingInputIndex.size(), "rollingInputIndex.size()"); return rollingOutputTable; } @@ -219,8 +215,7 @@ public Table rollingWithModSort() { ShiftAwareListener.Update update = new ShiftAwareListener.Update(); update.added = inputTable.getIndex().subindexByPos(addMarker, addMarker + sizePerStep - 1); - update.modified = - inputTable.getIndex().subindexByPos(modMarker, modMarker + sizePerStep - 1); + update.modified = inputTable.getIndex().subindexByPos(modMarker, modMarker + sizePerStep - 1); update.removed = inputTable.getIndex().subindexByPos(rmMarker, rmMarker + sizePerStep - 1); update.modified.retain(rollingInputIndex); update.removed.retain(rollingInputIndex); @@ -233,7 +228,7 @@ public Table rollingWithModSort() { }); Assert.eq(rollingOutputTable.getIndex().size(), "rollingOutputTable.getIndex().size()", - rollingInputIndex.size(), "rollingInputIndex.size()"); + rollingInputIndex.size(), "rollingInputIndex.size()"); return rollingOutputTable; } diff --git a/DB/benchmark/io/deephaven/benchmark/db/SortMultiBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/SortMultiBenchmark.java index efe837ef146..91bf140c480 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/SortMultiBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/SortMultiBenchmark.java @@ -43,9 +43,9 @@ public class SortMultiBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { final EnumStringColumnGenerator enumStringCol1 = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("Enum1", 10000, 6, 6, 0xB00FB00F); + (EnumStringColumnGenerator) BenchmarkTools.stringCol("Enum1", 10000, 6, 6, 0xB00FB00F); final EnumStringColumnGenerator enumStringCol2 = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("Enum2", 1000, 6, 6, 0xF00DF00D); + (EnumStringColumnGenerator) BenchmarkTools.stringCol("Enum2", 1000, 6, 6, 0xF00DF00D); final BenchmarkTableBuilder builder; final int actualSize = BenchmarkTools.sizeWithSparsity(tableSize, sparsity); @@ -55,9 +55,9 @@ public void setupEnv(BenchmarkParams params) { switch (tableType) { case "Historical": builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize) - .addGroupingColumns("Enum1") - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .addGroupingColumns("Enum1") + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize); @@ -68,17 +68,16 @@ public void setupEnv(BenchmarkParams params) { } bmTable = builder - .setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)) - .addColumn(BenchmarkTools.numberCol("I1", int.class)) - .addColumn(BenchmarkTools.numberCol("D1", double.class, -10e6, 10e6)) - .addColumn(BenchmarkTools.numberCol("L1", long.class)) - .addColumn(enumStringCol1) - .addColumn(enumStringCol2) - .build(); - - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + .setSeed(0xDEADBEEF) + .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)) + .addColumn(BenchmarkTools.numberCol("I1", int.class)) + .addColumn(BenchmarkTools.numberCol("D1", double.class, -10e6, 10e6)) + .addColumn(BenchmarkTools.numberCol("L1", long.class)) + .addColumn(enumStringCol1) + .addColumn(enumStringCol2) + .build(); + + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); } @TearDown(Level.Trial) diff --git a/DB/benchmark/io/deephaven/benchmark/db/SparseSelectBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/SparseSelectBenchmark.java index f921f9bf3e2..56f09e31f37 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/SparseSelectBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/SparseSelectBenchmark.java @@ -46,23 +46,21 @@ public void setupEnv(BenchmarkParams params) { System.out.println("Actual Size: " + actualSize); - final BenchmarkTableBuilder builder = - BenchmarkTools.persistentTableBuilder("Carlos", actualSize); + final BenchmarkTableBuilder builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize); bmTable = builder - .setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)) - .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) - .addColumn(BenchmarkTools.numberCol("I1", int.class)) - .addColumn(BenchmarkTools.numberCol("D1", double.class, -10e6, 10e6)) - .addColumn(BenchmarkTools.numberCol("L1", long.class)) - .addColumn(BenchmarkTools.numberCol("B1", byte.class)) - .addColumn(BenchmarkTools.numberCol("S1", short.class)) - .addColumn(BenchmarkTools.numberCol("F1", float.class)) - .addColumn(BenchmarkTools.charCol("C1", 'A', 'Z')) - .build(); - - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + .setSeed(0xDEADBEEF) + .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)) + .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) + .addColumn(BenchmarkTools.numberCol("I1", int.class)) + .addColumn(BenchmarkTools.numberCol("D1", double.class, -10e6, 10e6)) + .addColumn(BenchmarkTools.numberCol("L1", long.class)) + .addColumn(BenchmarkTools.numberCol("B1", byte.class)) + .addColumn(BenchmarkTools.numberCol("S1", short.class)) + .addColumn(BenchmarkTools.numberCol("F1", float.class)) + .addColumn(BenchmarkTools.charCol("C1", 'A', 'Z')) + .build(); + + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); inputTable = applySparsity(bmTable.getTable(), tableSize, sparsity, 0).coalesce(); } @@ -89,17 +87,16 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table incrementalSparseSelect() { - final Table result = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> IncrementalBenchmark - .incrementalBenchmark(SparseSelect::sparseSelect, inputTable, 10)); + final Table result = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( + () -> IncrementalBenchmark.incrementalBenchmark(SparseSelect::sparseSelect, inputTable, 10)); Assert.eq(result.size(), "result.size()", inputTable.size(), "inputTable.size()"); return state.setResult(result); } @Benchmark public Table sparseSelect() { - return state.setResult(LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> SparseSelect.sparseSelect(inputTable))); + return state.setResult( + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> SparseSelect.sparseSelect(inputTable))); } public static void main(final String[] args) { diff --git a/DB/benchmark/io/deephaven/benchmark/db/SumByBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/SumByBenchmark.java index d849aa59144..4adf0a24489 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/SumByBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/SumByBenchmark.java @@ -68,27 +68,24 @@ public void setupEnv(BenchmarkParams params) { if (keyCount == 0) { if (!"None".equals(keyType)) { - throw new UnsupportedOperationException( - "Zero Key can only be run with keyType == None"); + throw new UnsupportedOperationException("Zero Key can only be run with keyType == None"); } } else { if ("None".equals(keyType)) { - throw new UnsupportedOperationException( - "keyType == None can only be run with keyCount==0"); + throw new UnsupportedOperationException("keyType == None can only be run with keyCount==0"); } } switch (tableType) { case "Historical": builder = BenchmarkTools.persistentTableBuilder("Karl", size) - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("Karl", size); if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; @@ -96,14 +93,12 @@ public void setupEnv(BenchmarkParams params) { throw new IllegalStateException("Table type must be Historical or Intraday"); } - builder.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); + builder.setSeed(0xDEADBEEF).addColumn(BenchmarkTools.stringCol("PartCol", 1, 5, 7, 0xFEEDBEEF)); - final EnumStringColumnGenerator stringKey = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("KeyString", keyCount, 6, 6, - 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); - final ColumnGenerator intKey = BenchmarkTools.seqNumberCol("KeyInt", int.class, 0, 1, - keyCount, SequentialNumColumnGenerator.Mode.RollAtLimit); + final EnumStringColumnGenerator stringKey = (EnumStringColumnGenerator) BenchmarkTools.stringCol("KeyString", + keyCount, 6, 6, 0xB00FB00F, EnumStringColumnGenerator.Mode.Rotate); + final ColumnGenerator intKey = BenchmarkTools.seqNumberCol("KeyInt", int.class, 0, 1, keyCount, + SequentialNumColumnGenerator.Mode.RollAtLimit); System.out.println("Key type: " + keyType); switch (keyType) { @@ -120,22 +115,19 @@ public void setupEnv(BenchmarkParams params) { builder.addColumn(intKey); keyName = stringKey.getName() + "," + intKey.getName(); if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; case "None": keyName = ""; if (grouped) { - throw new UnsupportedOperationException( - "Can not run this benchmark combination."); + throw new UnsupportedOperationException("Can not run this benchmark combination."); } break; default: throw new IllegalStateException("Unknown KeyType: " + keyType); } - keyColumnNames = - keyCount > 0 ? keyName.split(",") : CollectionUtil.ZERO_LENGTH_STRING_ARRAY; + keyColumnNames = keyCount > 0 ? keyName.split(",") : CollectionUtil.ZERO_LENGTH_STRING_ARRAY; switch (valueCount) { case 8: @@ -156,8 +148,7 @@ public void setupEnv(BenchmarkParams params) { builder.addColumn(BenchmarkTools.numberCol("ValueToSum1", int.class)); break; default: - throw new IllegalArgumentException( - "Can not initialize with " + valueCount + " values."); + throw new IllegalArgumentException("Can not initialize with " + valueCount + " values."); } if (grouped) { @@ -165,10 +156,9 @@ public void setupEnv(BenchmarkParams params) { } final BenchmarkTable bmt = builder - .build(); + .build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); table = bmt.getTable().coalesce().dropColumns("PartCol"); } @@ -194,50 +184,46 @@ public void finishIteration(BenchmarkParams params) throws IOException { @Benchmark public Table sumByStatic(@NotNull final Blackhole bh) { - final Table result = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.sumBy(keyColumnNames)); + final Table result = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.sumBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table sumByIncremental(@NotNull final Blackhole bh) { - final Table result = - IncrementalBenchmark.incrementalBenchmark((t) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> t.sumBy(keyColumnNames)), table); + final Table result = IncrementalBenchmark.incrementalBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.sumBy(keyColumnNames)), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table sumByRolling(@NotNull final Blackhole bh) { - final Table result = IncrementalBenchmark.rollingBenchmark((t) -> LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> t.sumBy(keyColumnNames)), table); + final Table result = IncrementalBenchmark.rollingBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.sumBy(keyColumnNames)), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table minByStatic(@NotNull final Blackhole bh) { - final Table result = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.minBy(keyColumnNames)); + final Table result = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.minBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table minByIncremental(@NotNull final Blackhole bh) { - final Table result = - IncrementalBenchmark.incrementalBenchmark((t) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> t.minBy(keyColumnNames)), table); + final Table result = IncrementalBenchmark.incrementalBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.minBy(keyColumnNames)), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table minByRolling(@NotNull final Blackhole bh) { - final Table result = IncrementalBenchmark.rollingBenchmark((t) -> LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> t.minBy(keyColumnNames)), table); + final Table result = IncrementalBenchmark.rollingBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.minBy(keyColumnNames)), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -245,12 +231,13 @@ public Table minByRolling(@NotNull final Blackhole bh) { @Benchmark public Table minMaxByStatic(@NotNull final Blackhole bh) { final ComboAggregateFactory.ComboBy minCols = AggMin(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "Min" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "Min" + ii + "=ValueToSum" + ii).toArray(String[]::new)); final ComboAggregateFactory.ComboBy maxCols = AggMax(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "Max" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "Max" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = IncrementalBenchmark.rollingBenchmark((t) -> LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> t.by(AggCombo(minCols, maxCols))), table); + final Table result = IncrementalBenchmark.rollingBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.by(AggCombo(minCols, maxCols))), + table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -258,12 +245,13 @@ public Table minMaxByStatic(@NotNull final Blackhole bh) { @Benchmark public Table minMaxByIncremental(@NotNull final Blackhole bh) { final ComboAggregateFactory.ComboBy minCols = AggMin(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "Min" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "Min" + ii + "=ValueToSum" + ii).toArray(String[]::new)); final ComboAggregateFactory.ComboBy maxCols = AggMax(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "Max" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "Max" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = IncrementalBenchmark.rollingBenchmark((t) -> LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> t.by(AggCombo(minCols, maxCols))), table); + final Table result = IncrementalBenchmark.rollingBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.by(AggCombo(minCols, maxCols))), + table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @@ -271,46 +259,43 @@ public Table minMaxByIncremental(@NotNull final Blackhole bh) { @Benchmark public Table minMaxByRolling(@NotNull final Blackhole bh) { final ComboAggregateFactory.ComboBy minCols = AggMin(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "Min" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "Min" + ii + "=ValueToSum" + ii).toArray(String[]::new)); final ComboAggregateFactory.ComboBy maxCols = AggMax(IntStream.range(1, valueCount + 1) - .mapToObj(ii -> "Max" + ii + "=ValueToSum" + ii).toArray(String[]::new)); + .mapToObj(ii -> "Max" + ii + "=ValueToSum" + ii).toArray(String[]::new)); - final Table result = IncrementalBenchmark.rollingBenchmark((t) -> LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> t.by(AggCombo(minCols, maxCols))), table); + final Table result = IncrementalBenchmark.rollingBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.by(AggCombo(minCols, maxCols))), + table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table varByStatic(@NotNull final Blackhole bh) { - final Table result = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.varBy(keyColumnNames)); + final Table result = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.varBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table varByIncremental(@NotNull final Blackhole bh) { - final Table result = - IncrementalBenchmark.incrementalBenchmark((t) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> t.varBy(keyColumnNames)), table); + final Table result = IncrementalBenchmark.incrementalBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.varBy(keyColumnNames)), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table avgByStatic(@NotNull final Blackhole bh) { - final Table result = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.avgBy(keyColumnNames)); + final Table result = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> table.avgBy(keyColumnNames)); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } @Benchmark public Table avgByIncremental(@NotNull final Blackhole bh) { - final Table result = - IncrementalBenchmark.incrementalBenchmark((t) -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> t.avgBy(keyColumnNames)), table); + final Table result = IncrementalBenchmark.incrementalBenchmark( + (t) -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.avgBy(keyColumnNames)), table); bh.consume(result); return state.setResult(TableTools.emptyTable(0)); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/TestSortIncrementalPerformance.java b/DB/benchmark/io/deephaven/benchmark/db/TestSortIncrementalPerformance.java index 85a8044a11e..65fae8e9c95 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/TestSortIncrementalPerformance.java +++ b/DB/benchmark/io/deephaven/benchmark/db/TestSortIncrementalPerformance.java @@ -48,16 +48,15 @@ private void incrementalSort(int size, int steps) { final double lgsize = Math.log(size) / Math.log(2); System.out.println("Size = " + size + ", Steps = " + steps + ", Average (s): " - + ((double) sum / count) / 1000_000_000.0 + ", ns/Element: " - + (double) (sum / (size * count)) + ", ns/n lg n: " - + (double) (sum / (size * lgsize * count))); + + ((double) sum / count) / 1000_000_000.0 + ", ns/Element: " + (double) (sum / (size * count)) + + ", ns/n lg n: " + (double) (sum / (size * lgsize * count))); } private long incrementalSort(int seed, long size, int steps) { final Random random = new Random(seed); QueryScope.addParam("random", random); - final Table tableToSort = TableTools.emptyTable(size).update("Sentinel=ii", - "D=random.nextDouble()", "L=random.nextLong()"); + final Table tableToSort = + TableTools.emptyTable(size).update("Sentinel=ii", "D=random.nextDouble()", "L=random.nextLong()"); final long start = System.nanoTime(); final Table result = incrementalBenchmark(tableToSort, (Table t) -> t.sort("D"), steps); @@ -70,7 +69,7 @@ private long incrementalSort(int seed, long size, int steps) { private R incrementalBenchmark(Table inputTable, Function function, int steps) { final long sizePerStep = Math.max(inputTable.size() / steps, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table filtered = inputTable.where(incrementalReleaseFilter); final R result = function.apply(filtered); @@ -86,8 +85,7 @@ private R incrementalBenchmark(Table inputTable, Function function static public void setup() { LiveTableMonitor.DEFAULT.enableUnitTestMode(); AsyncClientErrorNotifier.setReporter(t -> { - System.err.println( - "Received error notification: " + new ExceptionDetails(t).getFullStackTrace()); + System.err.println("Received error notification: " + new ExceptionDetails(t).getFullStackTrace()); TestCase.fail(t.getMessage()); }); } diff --git a/DB/benchmark/io/deephaven/benchmark/db/UngroupedColumnSourceBench.java b/DB/benchmark/io/deephaven/benchmark/db/UngroupedColumnSourceBench.java index ae4d6cdd14e..2a4824c367b 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/UngroupedColumnSourceBench.java +++ b/DB/benchmark/io/deephaven/benchmark/db/UngroupedColumnSourceBench.java @@ -40,7 +40,7 @@ protected QueryData getQuery() { final BenchmarkTableBuilder builder1; builder1 = BenchmarkTools.persistentTableBuilder("T1", tableSize / 10); builder1.setSeed(0xDEADB00F) - .addColumn(BenchmarkTools.stringCol("PartCol1", 4, 5, 7, 0xFEEDBEEF)); + .addColumn(BenchmarkTools.stringCol("PartCol1", 4, 5, 7, 0xFEEDBEEF)); final String joinCol = "L"; builder1.addColumn(BenchmarkTools.seqNumberCol(joinCol, long.class, 0, 1)); builder1.addColumn(BenchmarkTools.numberCol("I1", int.class, -10_000_000, 10_000_000)); @@ -49,31 +49,30 @@ protected QueryData getQuery() { final BenchmarkTableBuilder builder2; builder2 = BenchmarkTools.persistentTableBuilder("T2", tableSize); builder2.setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol2", 4, 5, 7, 0xFEEDB00F)); + .addColumn(BenchmarkTools.stringCol("PartCol2", 4, 5, 7, 0xFEEDB00F)); builder2.addColumn(BenchmarkTools.numberCol(joinCol, long.class, 0, tableSize)); final BenchmarkTable bmTable2 = builder2.build(); final Table t1 = bmTable1.getTable().coalesce(); final Table t2 = bmTable2.getTable().coalesce(); final long sizePerStep = Math.max(t1.size() / steps, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table live; if (doSelect) { live = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> t1.where(incrementalReleaseFilter).select(joinCol, "PartCol1", "I1") - .sort("I1").join( - t2, joinCol, "PartCol2")); + () -> t1.where(incrementalReleaseFilter).select(joinCol, "PartCol1", "I1").sort("I1").join( + t2, joinCol, "PartCol2")); } else { live = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> t1.where(incrementalReleaseFilter).sort("I1").join( - t2, joinCol, "PartCol2")); + () -> t1.where(incrementalReleaseFilter).sort("I1").join( + t2, joinCol, "PartCol2")); } return new QueryData( - live, - incrementalReleaseFilter, - steps, - new String[] {joinCol}, - WritableLongChunk.makeWritableChunk(chunkCapacity)); + live, + incrementalReleaseFilter, + steps, + new String[] {joinCol}, + WritableLongChunk.makeWritableChunk(chunkCapacity)); } public static void main(final String[] args) { diff --git a/DB/benchmark/io/deephaven/benchmark/db/UpdateBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/UpdateBenchmark.java index 625fd5e011d..b1296c66d82 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/UpdateBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/UpdateBenchmark.java @@ -193,20 +193,18 @@ private String chainedUpdate() { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { final EnumStringColumnGenerator enumStringyCol = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F); + (EnumStringColumnGenerator) BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F); final BenchmarkTableBuilder builder; switch (tableType) { case "Historical": - builder = BenchmarkTools - .persistentTableBuilder("Carlos", sizeWithSparsity(tableSize, sparsity)) - .addGroupingColumns("Thingy") - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + builder = BenchmarkTools.persistentTableBuilder("Carlos", sizeWithSparsity(tableSize, sparsity)) + .addGroupingColumns("Thingy") + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": - builder = BenchmarkTools.persistentTableBuilder("Carlos", - sizeWithSparsity(tableSize, sparsity)); + builder = BenchmarkTools.persistentTableBuilder("Carlos", sizeWithSparsity(tableSize, sparsity)); break; default: @@ -267,18 +265,16 @@ public void setupEnv(BenchmarkParams params) { } bmTable = builder - .addColumn(BenchmarkTools.stringCol("C4", 4, 5, 7, 0xFEEDBEEF)) - .addColumn(enumStringyCol) - .build(); + .addColumn(BenchmarkTools.stringCol("C4", 4, 5, 7, 0xFEEDBEEF)) + .addColumn(enumStringyCol) + .build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); final List uniqueThingyVals = Arrays.asList(enumStringyCol.getEnumVals()); final String updateString; - // filterString (where clause) is unused in the current version of this benchmark, but is - // included + // filterString (where clause) is unused in the current version of this benchmark, but is included // as a model in case it's needed in other benchmarks copied from this class. filterString = ""; diff --git a/DB/benchmark/io/deephaven/benchmark/db/WhereBenchmark.java b/DB/benchmark/io/deephaven/benchmark/db/WhereBenchmark.java index 2cb81302aa1..317f0cc6715 100644 --- a/DB/benchmark/io/deephaven/benchmark/db/WhereBenchmark.java +++ b/DB/benchmark/io/deephaven/benchmark/db/WhereBenchmark.java @@ -51,22 +51,21 @@ public class WhereBenchmark { @Setup(Level.Trial) public void setupEnv(BenchmarkParams params) { final EnumStringColumnGenerator enumStringyCol = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F); + (EnumStringColumnGenerator) BenchmarkTools.stringCol("Thingy", 30, 6, 6, 0xB00FB00F); final BenchmarkTableBuilder builder; switch (tableType) { case "Historical": builder = BenchmarkTools - .persistentTableBuilder("Carlos", - BenchmarkTools.sizeWithSparsity(tableSize, sparsity)) - .addGroupingColumns("Thingy") - .setPartitioningFormula("${autobalance_single}") - .setPartitionCount(10); + .persistentTableBuilder("Carlos", BenchmarkTools.sizeWithSparsity(tableSize, sparsity)) + .addGroupingColumns("Thingy") + .setPartitioningFormula("${autobalance_single}") + .setPartitionCount(10); break; case "Intraday": builder = BenchmarkTools.persistentTableBuilder("Carlos", - BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); + BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); break; default: @@ -74,17 +73,16 @@ public void setupEnv(BenchmarkParams params) { } bmTable = builder - .setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) - .addColumn(BenchmarkTools.numberCol("C2", int.class)) - .addColumn(BenchmarkTools.numberCol("C3", double.class, -10e6, 10e6)) - .addColumn(BenchmarkTools.stringCol("C4", 4, 5, 7, 0xFEEDBEEF)) - .addColumn(BenchmarkTools.numberCol("C5", double.class, -10e6, 10e6)) - .addColumn(enumStringyCol) - .build(); - - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + .setSeed(0xDEADBEEF) + .addColumn(BenchmarkTools.stringCol("Stringy", 1, 10)) + .addColumn(BenchmarkTools.numberCol("C2", int.class)) + .addColumn(BenchmarkTools.numberCol("C3", double.class, -10e6, 10e6)) + .addColumn(BenchmarkTools.stringCol("C4", 4, 5, 7, 0xFEEDBEEF)) + .addColumn(BenchmarkTools.numberCol("C5", double.class, -10e6, 10e6)) + .addColumn(enumStringyCol) + .build(); + + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); final List uniqueThingyVals = Arrays.asList(enumStringyCol.getEnumVals()); final String filterString; @@ -92,13 +90,12 @@ public void setupEnv(BenchmarkParams params) { switch (testType) { case "StringGroupedMatch": filterString = "Thingy in " + - "`" + uniqueThingyVals.get(0) + "`, " + - "`" + uniqueThingyVals.get(uniqueThingyVals.size() - 1) + "`, " + - "`NotInTheSet`"; + "`" + uniqueThingyVals.get(0) + "`, " + + "`" + uniqueThingyVals.get(uniqueThingyVals.size() - 1) + "`, " + + "`NotInTheSet`"; break; case "StringGroupedCondition": - filterString = - "Thingy.startsWith(`" + uniqueThingyVals.get(0).substring(0, 2) + "`)"; + filterString = "Thingy.startsWith(`" + uniqueThingyVals.get(0).substring(0, 2) + "`)"; break; case "DoubleUngroupedCondition": filterString = "C3 > 0"; diff --git a/DB/benchmark/io/deephaven/db/v2/sort/partition/PartitionKernelBenchmark.java b/DB/benchmark/io/deephaven/db/v2/sort/partition/PartitionKernelBenchmark.java index 06374850d0d..c8babff5646 100644 --- a/DB/benchmark/io/deephaven/db/v2/sort/partition/PartitionKernelBenchmark.java +++ b/DB/benchmark/io/deephaven/db/v2/sort/partition/PartitionKernelBenchmark.java @@ -15,25 +15,23 @@ public class PartitionKernelBenchmark { public static void main(String[] args) throws RunnerException { final Options opt = new OptionsBuilder() - // .include("[A-Z].*PartitionKernelBenchmark") - .include("(Long|Int|Double|Object)PartitionKernelBenchmark") - .warmupIterations(1) - .measurementIterations(10) - .measurementTime(TimeValue.seconds(1)) - // .param("dataSize", IntStream.range(8, 21).mapToObj(exp -> Integer.toString(1 << - // exp)).toArray(String[]::new)) - // .param("chunkSize", IntStream.range(10, 11).mapToObj(exp -> Integer.toString(1 << - // exp)).toArray(String[]::new)) - .param("dataSize", - IntStream.range(20, 24).mapToObj(exp -> Integer.toString(1 << exp)) - .toArray(String[]::new)) - .param("chunkSize", - IntStream.range(10, 17).mapToObj(exp -> Integer.toString(1 << exp)) - .toArray(String[]::new)) - .param("runType", "random") - .param("preserveEquality", "true", "false") - .jvmArgsPrepend("-Xmx8g") - .build(); + // .include("[A-Z].*PartitionKernelBenchmark") + .include("(Long|Int|Double|Object)PartitionKernelBenchmark") + .warmupIterations(1) + .measurementIterations(10) + .measurementTime(TimeValue.seconds(1)) + // .param("dataSize", IntStream.range(8, 21).mapToObj(exp -> Integer.toString(1 << + // exp)).toArray(String[]::new)) + // .param("chunkSize", IntStream.range(10, 11).mapToObj(exp -> Integer.toString(1 << + // exp)).toArray(String[]::new)) + .param("dataSize", + IntStream.range(20, 24).mapToObj(exp -> Integer.toString(1 << exp)).toArray(String[]::new)) + .param("chunkSize", + IntStream.range(10, 17).mapToObj(exp -> Integer.toString(1 << exp)).toArray(String[]::new)) + .param("runType", "random") + .param("preserveEquality", "true", "false") + .jvmArgsPrepend("-Xmx8g") + .build(); final Collection runResults = new Runner(opt).run(); diff --git a/DB/benchmark/io/deephaven/db/v2/sort/timsort/SortKernelBenchmark.java b/DB/benchmark/io/deephaven/db/v2/sort/timsort/SortKernelBenchmark.java index 58b5b11f010..dc2be5a0dc2 100644 --- a/DB/benchmark/io/deephaven/db/v2/sort/timsort/SortKernelBenchmark.java +++ b/DB/benchmark/io/deephaven/db/v2/sort/timsort/SortKernelBenchmark.java @@ -14,16 +14,15 @@ public class SortKernelBenchmark { public static void main(String[] args) throws RunnerException { final Options opt = new OptionsBuilder() - .include("[A-Z].*SortKernelBenchmark") - .warmupIterations(1) - .measurementIterations(10) - .measurementTime(TimeValue.seconds(1)) - .param("chunkSize", - IntStream.range(8, 21).mapToObj(exp -> Integer.toString(1 << exp)) - .toArray(String[]::new)) - .param("algorithm", "timsort", "javaarray") - .param("runType", "random", "runs") - .build(); + .include("[A-Z].*SortKernelBenchmark") + .warmupIterations(1) + .measurementIterations(10) + .measurementTime(TimeValue.seconds(1)) + .param("chunkSize", + IntStream.range(8, 21).mapToObj(exp -> Integer.toString(1 << exp)).toArray(String[]::new)) + .param("algorithm", "timsort", "javaarray") + .param("runType", "random", "runs") + .build(); final Collection runResults = new Runner(opt).run(); diff --git a/DB/benchmark/io/deephaven/db/v2/sources/ColumnSourceFillBenchmark.java b/DB/benchmark/io/deephaven/db/v2/sources/ColumnSourceFillBenchmark.java index 6448c82d30a..43659900c34 100644 --- a/DB/benchmark/io/deephaven/db/v2/sources/ColumnSourceFillBenchmark.java +++ b/DB/benchmark/io/deephaven/db/v2/sources/ColumnSourceFillBenchmark.java @@ -114,14 +114,14 @@ public void fillChunkSparseArray(Blackhole bh) { public static void main(String[] args) throws RunnerException { final Options opt = new OptionsBuilder() - .include(ColumnSourceFillBenchmark.class.getSimpleName()) - .param("typeName", "char", "byte", "short", "int", "long", "float", "double") - .param("fetchSize", - IntStream.range(6, 25).filter(exp -> exp % 2 == 0).map(exp -> 1 << exp) - .mapToObj(Integer::toString).toArray(String[]::new)) - .jvmArgs("-Xmx8g", "-Xms8g") - .forks(1) - .build(); + .include(ColumnSourceFillBenchmark.class.getSimpleName()) + .param("typeName", "char", "byte", "short", "int", "long", "float", "double") + .param("fetchSize", + IntStream.range(6, 25).filter(exp -> exp % 2 == 0).map(exp -> 1 << exp) + .mapToObj(Integer::toString).toArray(String[]::new)) + .jvmArgs("-Xmx8g", "-Xms8g") + .forks(1) + .build(); final Collection results = new Runner(opt).run(); CsvResultWriter.recordResults(results, ColumnSourceFillBenchmark.class); diff --git a/DB/benchmark/io/deephaven/db/v2/sources/ReplicateColumnSourceFillBenchmark.java b/DB/benchmark/io/deephaven/db/v2/sources/ReplicateColumnSourceFillBenchmark.java index a29585995ad..a1c90bf1933 100644 --- a/DB/benchmark/io/deephaven/db/v2/sources/ReplicateColumnSourceFillBenchmark.java +++ b/DB/benchmark/io/deephaven/db/v2/sources/ReplicateColumnSourceFillBenchmark.java @@ -6,7 +6,6 @@ public class ReplicateColumnSourceFillBenchmark { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(CharHelper.class, - ReplicatePrimitiveCode.BENCHMARK_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharHelper.class, ReplicatePrimitiveCode.BENCHMARK_SRC); } } diff --git a/DB/benchmark/io/deephaven/db/v2/utils/BuildSortedRangesBench.java b/DB/benchmark/io/deephaven/db/v2/utils/BuildSortedRangesBench.java index a14f29bb26a..2e1fb1a4159 100644 --- a/DB/benchmark/io/deephaven/db/v2/utils/BuildSortedRangesBench.java +++ b/DB/benchmark/io/deephaven/db/v2/utils/BuildSortedRangesBench.java @@ -19,8 +19,7 @@ public class BuildSortedRangesBench { - private static final int count = 415 * 1000; // -Xmx768M makes for a tight heap, which is how we - // want to test. + private static final int count = 415 * 1000; // -Xmx768M makes for a tight heap, which is how we want to test. private static final int sortedRangesIntMaxCapacity = SortedRanges.MAX_CAPACITY / 2; private static final int sz = sortedRangesIntMaxCapacity - 2; private static final long[] values = new long[sz + 1]; diff --git a/DB/benchmark/io/deephaven/db/v2/utils/ComplementRangeIteratorTest.java b/DB/benchmark/io/deephaven/db/v2/utils/ComplementRangeIteratorTest.java index cc180963dbf..0f36ed93027 100644 --- a/DB/benchmark/io/deephaven/db/v2/utils/ComplementRangeIteratorTest.java +++ b/DB/benchmark/io/deephaven/db/v2/utils/ComplementRangeIteratorTest.java @@ -69,8 +69,7 @@ public void testSingleRangeInput() { final long start = points[i]; final long end = points[i + 1]; final Index ix = Index.FACTORY.getIndexByRange(start, end); - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(cit.hasNext()); final String m = "i=" + i; if (i == 0) { @@ -145,8 +144,7 @@ public void testAdvanceSimple() { for (long v : new long[] {0, 1, 5, 8, 9}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertTrue(m, cit.advance(v)); assertEquals(m, v, cit.currentRangeStart()); @@ -160,8 +158,7 @@ public void testAdvanceSimple() { for (long v : new long[] {10, 11, 15, 20, 21, 26, 28, 29}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertTrue(m, cit.advance(v)); assertEquals(m, Math.max(v, 21), cit.currentRangeStart()); @@ -173,11 +170,9 @@ public void testAdvanceSimple() { } } - for (long v : new long[] {30, 31, 35, 39, 40, 41, 600, Long.MAX_VALUE - 1, - Long.MAX_VALUE}) { + for (long v : new long[] {30, 31, 35, 39, 40, 41, 600, Long.MAX_VALUE - 1, Long.MAX_VALUE}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertTrue(m, cit.advance(v)); assertEquals(m, Math.max(v, 41), cit.currentRangeStart()); @@ -195,8 +190,7 @@ public void testAdvanceInputContainsZero() { for (long v : new long[] {0, 1, 5, 9, 10, 11, 12, 15, 18, 19}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertTrue(m, cit.advance(v)); assertEquals(m, Math.max(11, v), cit.currentRangeStart()); @@ -217,8 +211,7 @@ public void testAdvanceInputContainsMax() { for (long v : new long[] {20, 21, 25, 1000000, Long.MAX_VALUE - 1, Long.MAX_VALUE}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertEquals(0, cit.next()); assertFalse(m, cit.advance(v)); @@ -232,8 +225,7 @@ public void testAdvanceEmpty() { final Index ix = Index.FACTORY.getIndexByRange(0, Long.MAX_VALUE); for (long v : new long[] {0, 1, 20, Long.MAX_VALUE - 1, Long.MAX_VALUE}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertFalse(m, cit.advance(v)); assertFalse(m, cit.hasNext()); } @@ -247,8 +239,7 @@ public void testAdvanceCoverage1() { ix.insertRange(20, Long.MAX_VALUE); for (long v : new long[] {20, 21, 30, Long.MAX_VALUE}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertEquals(11, cit.next()); assertFalse(cit.advance(v)); @@ -265,8 +256,7 @@ public void testAdvanceCoverage2() { ix.insertRange(50, 60); for (long v : new long[] {30, 31, 35, 39, 40, 41, 42, 45, 48, 49}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertEquals(m, 0, cit.next()); assertTrue(m, cit.advance(v)); @@ -282,8 +272,7 @@ public void testAdvanceCoverage2() { for (long v : new long[] {40, 41, 42, 45, 48, 49}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertEquals(m, 0, cit.next()); assertTrue(m, cit.advance(v)); @@ -299,8 +288,7 @@ public void testAdvanceCoverage2() { ix.insertRange(70, 80); for (long v : new long[] {30, 31, 35, 39, 40, 41, 42, 45, 48, 49}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertEquals(m, 0, cit.next()); assertTrue(m, cit.advance(v)); @@ -317,8 +305,7 @@ public void testAdvanceCoverage2() { ix.insertRange(70, Long.MAX_VALUE); for (long v : new long[] {50, 51, 55, 59, 60, 61, 62, 65, 68, 69}) { final String m = "v=" + v; - try (final ComplementRangeIterator cit = - new ComplementRangeIterator(ix.rangeIterator())) { + try (final ComplementRangeIterator cit = new ComplementRangeIterator(ix.rangeIterator())) { assertTrue(m, cit.hasNext()); assertEquals(m, 0, cit.next()); assertTrue(m, cit.advance(v)); diff --git a/DB/benchmark/io/deephaven/db/v2/utils/IndexCreation.java b/DB/benchmark/io/deephaven/db/v2/utils/IndexCreation.java index bda6dbebb75..79ad4b0b4c9 100644 --- a/DB/benchmark/io/deephaven/db/v2/utils/IndexCreation.java +++ b/DB/benchmark/io/deephaven/db/v2/utils/IndexCreation.java @@ -50,10 +50,9 @@ public void setupEnv() { int remainingCount = indexCount; int j = 0; for (int i = 0; i < rangeCount - 1; i++) { - indexRanges[2 * i] = - lastPos + 1 + random.nextInt(2 * avgElementsPerRange - 1) * sparsityFactor; - int step = 1 + Math.max(0, - Math.min(random.nextInt(2 * avgElementsPerRange - 1), remainingCount - rangeCount)); + indexRanges[2 * i] = lastPos + 1 + random.nextInt(2 * avgElementsPerRange - 1) * sparsityFactor; + int step = + 1 + Math.max(0, Math.min(random.nextInt(2 * avgElementsPerRange - 1), remainingCount - rangeCount)); lastPos = indexRanges[2 * i + 1] = indexRanges[2 * i] + step; remainingCount -= step; indexPoints[j++] = indexRanges[2 * i]; diff --git a/DB/benchmark/io/deephaven/db/v2/utils/IndexIteration.java b/DB/benchmark/io/deephaven/db/v2/utils/IndexIteration.java index a3f2a0f72fc..45c79258d81 100644 --- a/DB/benchmark/io/deephaven/db/v2/utils/IndexIteration.java +++ b/DB/benchmark/io/deephaven/db/v2/utils/IndexIteration.java @@ -58,8 +58,8 @@ public void setupEnv() { int j = 0; for (int i = 0; i < rangeCount - 1; i++) { indexRanges[2 * i] = lastPos + 1 + random.nextInt(2 * avgElementsPerRange - 1); - int step = 1 + Math.max(0, - Math.min(random.nextInt(2 * avgElementsPerRange - 1), remainingCount - rangeCount)); + int step = + 1 + Math.max(0, Math.min(random.nextInt(2 * avgElementsPerRange - 1), remainingCount - rangeCount)); lastPos = indexRanges[2 * i + 1] = indexRanges[2 * i] + step; remainingCount -= step; indexPoints[j++] = indexRanges[2 * i]; @@ -102,8 +102,7 @@ public void setupEnv() { System.out.println("Expected expectedSum = " + expectedSum); } - private int fillChunkOfIndicesFromRange(WritableLongChunk indices, int posInRange, - long startValue, int count) { + private int fillChunkOfIndicesFromRange(WritableLongChunk indices, int posInRange, long startValue, int count) { indices.setSize(0); long pos = startValue; do { @@ -126,8 +125,7 @@ private int fillChunkOfIndicesFromRange(WritableLongChunk indices, int posInRang return posInRange; } - private void fillChunkOfRangesFromIndices(WritableLongChunk ranges, final int posInIndex, - final int count) { + private void fillChunkOfRangesFromIndices(WritableLongChunk ranges, final int posInIndex, final int count) { ranges.setSize(0); ranges.add(indexPoints[posInIndex]); long prevValue = indexPoints[posInIndex]; @@ -150,18 +148,16 @@ private void fillChunkOfRangesFromIndices(WritableLongChunk ranges, final int po ranges.add(prevValue + 1); } - private void fillChunkByOrderedKeysRange(OrderedKeys orderedKeys, - WritableDoubleChunk doubleChunk, int sourceId) { + private void fillChunkByOrderedKeysRange(OrderedKeys orderedKeys, WritableDoubleChunk doubleChunk, int sourceId) { fillChunkDirectByRange(orderedKeys.asKeyRangesChunk(), doubleChunk, sourceId); } - private void fillChunkByOrderedKeyItems(OrderedKeys orderedKeys, - WritableDoubleChunk doubleChunk, int sourceId) { + private void fillChunkByOrderedKeyItems(OrderedKeys orderedKeys, WritableDoubleChunk doubleChunk, int sourceId) { fillChunkDirectByItems(orderedKeys.asKeyIndicesChunk(), doubleChunk, sourceId); } - private void fillChunkDirectByRange(LongChunk ranges, - WritableDoubleChunk doubleChunk, int sourceId) { + private void fillChunkDirectByRange(LongChunk ranges, WritableDoubleChunk doubleChunk, + int sourceId) { int pos = 0; final int size = ranges.size(); for (int i = 0; i < size; i += 2) { @@ -173,8 +169,8 @@ private void fillChunkDirectByRange(LongChunk ranges, doubleChunk.setSize(pos); } - private void fillChunkDirectByItems(LongChunk indices, - WritableDoubleChunk doubleChunk, int sourceId) { + private void fillChunkDirectByItems(LongChunk indices, WritableDoubleChunk doubleChunk, + int sourceId) { final int size = indices.size(); doubleChunk.setSize(0); for (int i = 0; i < size; i++) { @@ -182,8 +178,7 @@ private void fillChunkDirectByItems(LongChunk indices, } } - private void fillChunkByIndexIterator(Index.Iterator it, int size, - WritableDoubleChunk doubleChunk, int sourceId) { + private void fillChunkByIndexIterator(Index.Iterator it, int size, WritableDoubleChunk doubleChunk, int sourceId) { doubleChunk.setSize(0); for (int i = 0; i < size; i++) { doubleChunk.add(sets[sourceId][(int) it.nextLong()]); @@ -191,7 +186,7 @@ private void fillChunkByIndexIterator(Index.Iterator it, int size, } private int fillChunkByIndexRangeIterator(Index.RangeIterator it, int rangeStart, int size, - WritableDoubleChunk doubleChunk, int sourceId) { + WritableDoubleChunk doubleChunk, int sourceId) { int pos = 0; int rangeEnd = (int) it.currentRangeEnd() + 1; int length = rangeEnd - rangeStart; @@ -210,7 +205,7 @@ private int fillChunkByIndexRangeIterator(Index.RangeIterator it, int rangeStart } private int[] fillChunkDirectByRangeIndexIteration(int posInRange, int rangeStart, int size, - WritableDoubleChunk doubleChunk, int sourceId) { + WritableDoubleChunk doubleChunk, int sourceId) { int pos = 0; int rangeEnd = (int) indexRanges[posInRange + 1]; int length = rangeEnd - rangeStart; @@ -271,8 +266,8 @@ public void orderedKeysByRange(Blackhole bh) { private void print(double sum) { /* - * Assert.assertEquals(sum, expectedSum, 0.0001 * expectedSum); if (!printed) { - * System.out.println("Sum = " + sum); printed = true; } + * Assert.assertEquals(sum, expectedSum, 0.0001 * expectedSum); if (!printed) { System.out.println("Sum = " + + * sum); printed = true; } */ } @@ -309,8 +304,8 @@ public void directByRangeIteration(Blackhole bh) { for (int step = 0; step < stepCount; step++) { int[] posInRangeAndRangeStart = null; for (int i = 0; i < chunks.length; i++) { - posInRangeAndRangeStart = fillChunkDirectByRangeIndexIteration(lastPosInRange, - rangeStart, chunkSize, chunks[i], i); + posInRangeAndRangeStart = + fillChunkDirectByRangeIndexIteration(lastPosInRange, rangeStart, chunkSize, chunks[i], i); } lastPosInRange = posInRangeAndRangeStart[0]; rangeStart = posInRangeAndRangeStart[1]; @@ -320,8 +315,7 @@ public void directByRangeIteration(Blackhole bh) { } for (int i = 0; i < chunks.length; i++) { - fillChunkDirectByRangeIndexIteration(lastPosInRange, rangeStart, indexCount % chunkSize, - chunks[i], i); + fillChunkDirectByRangeIndexIteration(lastPosInRange, rangeStart, indexCount % chunkSize, chunks[i], i); } evaluate(result, chunks); sum = sum(sum); @@ -334,8 +328,7 @@ public void directByIndexIteration(Blackhole bh) { int stepCount = indexCount / chunkSize; double sum = 0; for (int step = 0; step < stepCount; step++) { - indexChunk = - WritableLongChunk.writableChunkWrap(indexPoints, step * chunkSize, chunkSize); + indexChunk = WritableLongChunk.writableChunkWrap(indexPoints, step * chunkSize, chunkSize); for (int i = 0; i < chunks.length; i++) { fillChunkDirectByItems(indexChunk, chunks[i], i); } @@ -343,8 +336,8 @@ public void directByIndexIteration(Blackhole bh) { sum = sum(sum); bh.consume(result); } - indexChunk = WritableLongChunk.writableChunkWrap(indexPoints, - (indexCount / chunkSize) * chunkSize, indexCount % chunkSize); + indexChunk = WritableLongChunk.writableChunkWrap(indexPoints, (indexCount / chunkSize) * chunkSize, + indexCount % chunkSize); for (int i = 0; i < chunks.length; i++) { fillChunkDirectByItems(indexChunk, chunks[i], i); } @@ -394,8 +387,7 @@ public void indexByIndexRangeIterator(Blackhole bh) { for (int step = 0; step < stepCount; step++) { int nextRangeStart = rangeStart; for (int i = 0; i < chunks.length; i++) { - nextRangeStart = - fillChunkByIndexRangeIterator(its[i], rangeStart, chunkSize, chunks[i], i); + nextRangeStart = fillChunkByIndexRangeIterator(its[i], rangeStart, chunkSize, chunks[i], i); } rangeStart = nextRangeStart; evaluate(result, chunks); @@ -404,18 +396,16 @@ public void indexByIndexRangeIterator(Blackhole bh) { } int nextRangeStart = rangeStart; for (int i = 0; i < chunks.length; i++) { - nextRangeStart = fillChunkByIndexRangeIterator(its[i], rangeStart, - indexCount % chunkSize, chunks[i], i); + nextRangeStart = fillChunkByIndexRangeIterator(its[i], rangeStart, indexCount % chunkSize, chunks[i], i); } evaluate(result, chunks); sum = sum(sum); bh.consume(result); print(sum); } - // private void fillChunkByIndexIterator(Index.Iterator it, int size, DoubleChunk doubleChunk, - // int sourceId) { - // private int fillChunkByIndexRangeIterator(Index.RangeIterator it, int rangeStart, int size, - // DoubleChunk doubleChunk, int sourceId) { + // private void fillChunkByIndexIterator(Index.Iterator it, int size, DoubleChunk doubleChunk, int sourceId) { + // private int fillChunkByIndexRangeIterator(Index.RangeIterator it, int rangeStart, int size, DoubleChunk + // doubleChunk, int sourceId) { public static void main(String[] args) throws RunnerException { BenchUtil.run(IndexIteration.class); diff --git a/DB/benchmark/io/deephaven/db/v2/utils/IndexIterationRaw.java b/DB/benchmark/io/deephaven/db/v2/utils/IndexIterationRaw.java index 9f9c359acf9..46b470d1fa3 100644 --- a/DB/benchmark/io/deephaven/db/v2/utils/IndexIterationRaw.java +++ b/DB/benchmark/io/deephaven/db/v2/utils/IndexIterationRaw.java @@ -65,8 +65,8 @@ public void setupEnv() { int j = 0; for (int i = 0; i < rangeCount - 1; i++) { indexRanges[2 * i] = lastPos + 1 + random.nextInt(2 * avgElementsPerRange - 1); - int step = 1 + Math.max(0, - Math.min(random.nextInt(2 * avgElementsPerRange - 1), remainingCount - rangeCount)); + int step = + 1 + Math.max(0, Math.min(random.nextInt(2 * avgElementsPerRange - 1), remainingCount - rangeCount)); lastPos = indexRanges[2 * i + 1] = indexRanges[2 * i] + step; remainingCount -= step; indexPoints[j++] = indexRanges[2 * i]; @@ -236,8 +236,7 @@ public void directByRangeIteration(Blackhole bh) { int rangeStart = (int) indexRanges[0]; for (int step = 0; step < stepCount; step++) { int[] posInRangeAndRangeStart = null; - long[] res = - fillChunkDirectByRangeIndexIteration(lastPosInRange, rangeStart, chunkSize); + long[] res = fillChunkDirectByRangeIndexIteration(lastPosInRange, rangeStart, chunkSize); lastPosInRange = (int) res[0]; rangeStart = (int) res[1]; sum += res[2]; @@ -245,8 +244,7 @@ public void directByRangeIteration(Blackhole bh) { } - sum += fillChunkDirectByRangeIndexIteration(lastPosInRange, rangeStart, - indexCount % chunkSize)[2]; + sum += fillChunkDirectByRangeIndexIteration(lastPosInRange, rangeStart, indexCount % chunkSize)[2]; bh.consume(sum); print(sum); } @@ -256,21 +254,20 @@ public void directByIndexIteration(Blackhole bh) { int stepCount = indexCount / chunkSize; long sum = 0; for (int step = 0; step < stepCount; step++) { - indexChunk = - WritableLongChunk.writableChunkWrap(indexPoints, step * chunkSize, chunkSize); + indexChunk = WritableLongChunk.writableChunkWrap(indexPoints, step * chunkSize, chunkSize); sum += fillChunkDirectByItems(indexChunk); bh.consume(sum); } - indexChunk = WritableLongChunk.writableChunkWrap(indexPoints, - (indexCount / chunkSize) * chunkSize, indexCount % chunkSize); + indexChunk = WritableLongChunk.writableChunkWrap(indexPoints, (indexCount / chunkSize) * chunkSize, + indexCount % chunkSize); sum += fillChunkDirectByItems(indexChunk); bh.consume(sum); print(sum); } - private int fillChunkOfIndicesFromRange(WritableLongChunk indices, - int posInRange, long startValue, int count) { + private int fillChunkOfIndicesFromRange(WritableLongChunk indices, int posInRange, + long startValue, int count) { indices.setSize(0); long pos = startValue; do { @@ -311,8 +308,8 @@ public void fromRangesByIndexIteration(Blackhole bh) { print(sum); } - private void fillChunkOfRangesFromIndices(WritableLongChunk ranges, - final int posInIndex, final int count) { + private void fillChunkOfRangesFromIndices(WritableLongChunk ranges, final int posInIndex, + final int count) { ranges.setSize(0); ranges.add(indexPoints[posInIndex]); long prevValue = indexPoints[posInIndex]; @@ -344,8 +341,7 @@ public void fromIndicesByRangeIteration(Blackhole bh) { bh.consume(sum); } - fillChunkOfRangesFromIndices(rangeChunk, (indexCount / chunkSize) * chunkSize, - indexCount % chunkSize); + fillChunkOfRangesFromIndices(rangeChunk, (indexCount / chunkSize) * chunkSize, indexCount % chunkSize); sum += fillChunkDirectByRange(rangeChunk); bh.consume(sum); diff --git a/DB/benchmark/io/deephaven/db/v2/utils/OrderedKeysBench.java b/DB/benchmark/io/deephaven/db/v2/utils/OrderedKeysBench.java index d8a462a17d9..aa2f9aab8c8 100644 --- a/DB/benchmark/io/deephaven/db/v2/utils/OrderedKeysBench.java +++ b/DB/benchmark/io/deephaven/db/v2/utils/OrderedKeysBench.java @@ -45,7 +45,7 @@ public void done() { }; TestValues.setup(tb, 16 * 1024 * 1024, TestValues.asymmetric); final WritableLongChunk fixedCostChunk = - WritableLongChunk.makeWritableChunk(fixedCostChunkSz); + WritableLongChunk.makeWritableChunk(fixedCostChunkSz); final Random r = new Random(1); long last = 0; for (int i = 0; i < fixedCostChunkSz; ++i) { diff --git a/DB/benchmark/io/deephaven/db/v2/utils/ShiftAwareBench.java b/DB/benchmark/io/deephaven/db/v2/utils/ShiftAwareBench.java index 4ac4af4bd1f..f0397aea672 100644 --- a/DB/benchmark/io/deephaven/db/v2/utils/ShiftAwareBench.java +++ b/DB/benchmark/io/deephaven/db/v2/utils/ShiftAwareBench.java @@ -56,14 +56,13 @@ public void setupEnv(BenchmarkParams params) { LiveTableMonitor.DEFAULT.enableUnitTestMode(); final BenchmarkTableBuilder builder = BenchmarkTools.inMemoryTableBuilder("ShiftAwareBench", - BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); + BenchmarkTools.sizeWithSparsity(tableSize, sparsity)); builder.setSeed(0xDEADBEEF); builder.addColumn(BenchmarkTools.numberCol("intCol", Integer.class, 0, 1 << 20)); bmTable = builder.build(); - state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), - params.getWarmup().getCount()); + state = new TableBenchmarkState(BenchmarkTools.stripName(params.getBenchmark()), params.getWarmup().getCount()); inputTable = applySparsity(bmTable.getTable(), tableSize, sparsity, 0); } @@ -85,7 +84,7 @@ public void setupIteration() { private R incrementalBenchmark(Function function) { final long sizePerStep = Math.max(inputTable.size() / 10, 1); final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(sizePerStep, sizePerStep); + new IncrementalReleaseFilter(sizePerStep, sizePerStep); final Table filtered = inputTable.where(incrementalReleaseFilter); final R result = function.apply(filtered); @@ -101,22 +100,20 @@ private R incrementalBenchmark(Function function) { @Benchmark public Table b00_flattenWhere() { - final Table result = - incrementalBenchmark((Table t) -> t.flatten().where("intCol % 3 == 0")); + final Table result = incrementalBenchmark((Table t) -> t.flatten().where("intCol % 3 == 0")); return state.setResult(result); } @Benchmark public Table b01_mergeWhere() { - final Table result = - incrementalBenchmark((Table t) -> TableTools.merge(t, t).where("intCol % 3 == 0")); + final Table result = incrementalBenchmark((Table t) -> TableTools.merge(t, t).where("intCol % 3 == 0")); return state.setResult(result); } @Benchmark public Table b02_mergeFlattenWhere() { - final Table result = incrementalBenchmark( - (Table t) -> TableTools.merge(t, t).flatten().where("intCol % 3 == 0")); + final Table result = + incrementalBenchmark((Table t) -> TableTools.merge(t, t).flatten().where("intCol % 3 == 0")); return state.setResult(result); } diff --git a/DB/benchmark/io/deephaven/db/v2/utils/TestValues.java b/DB/benchmark/io/deephaven/db/v2/utils/TestValues.java index f11875922e9..05f6602f55f 100644 --- a/DB/benchmark/io/deephaven/db/v2/utils/TestValues.java +++ b/DB/benchmark/io/deephaven/db/v2/utils/TestValues.java @@ -18,8 +18,7 @@ public Context(final int clusterMid, final Builder b) { this.clusterMid = clusterMid; } - public long populateFirstArgStep(final int jumpPropOneIn, final int d, int halfClusterWidth, - final Random r) { + public long populateFirstArgStep(final int jumpPropOneIn, final int d, int halfClusterWidth, final Random r) { final long k; if (r.nextInt(jumpPropOneIn) == 0) { k = clusterMid = halfClusterWidth + r.nextInt(d); @@ -30,9 +29,8 @@ public long populateFirstArgStep(final int jumpPropOneIn, final int d, int halfC return k; } - public void populateSecondArgStep(final int sizePropOneIn, final int sharePropOneIn, - final long k, - int cluster1Mid, final int halfClusterWidth, final Random r) { + public void populateSecondArgStep(final int sizePropOneIn, final int sharePropOneIn, final long k, + int cluster1Mid, final int halfClusterWidth, final Random r) { if (sizePropOneIn != 1 && r.nextInt(sizePropOneIn) != 0) { return; } @@ -53,9 +51,8 @@ public int getClusterMid() { } public static class Config { - Config(final String name, final int min, final int max, final int clusterWidth, - final int sizePropOneIn, - final int sharePropOneIn, final int jumpPropOneIn) { + Config(final String name, final int min, final int max, final int clusterWidth, final int sizePropOneIn, + final int sharePropOneIn, final int jumpPropOneIn) { this.name = name; this.clusterWidth = clusterWidth; this.sizePropOneIn = sizePropOneIn; @@ -75,11 +72,11 @@ public static class Config { }; public static final Config sparse = - new Config("sparse", 10, 300000000, 50, 1, 1000, 25); + new Config("sparse", 10, 300000000, 50, 1, 1000, 25); public static final Config dense = - new Config("dense", 20, 30000000, 20, 1, 3, 20); + new Config("dense", 20, 30000000, 20, 1, 3, 20); public static final Config asymmetric = - new Config("asymmetric", 10, 300000000, 30000000, 160000, 1000, 25); + new Config("asymmetric", 10, 300000000, 30000000, 160000, 1000, 25); public static void setup(final Builder b, final int sz, final TestValues.Config c) { final int halfClusterWidth = c.clusterWidth / 2; @@ -93,7 +90,7 @@ public static void setup(final Builder b, final int sz, final TestValues.Config } public static void setup3(final Builder b1, final Builder b2, final Builder b3, final int sz, - final TestValues.Config cf) { + final TestValues.Config cf) { final int halfClusterWidth = cf.clusterWidth / 2; final TestValues.Context cx1 = new TestValues.Context(cf.min + halfClusterWidth, b1); final TestValues.Context cx2 = new TestValues.Context(cf.max + halfClusterWidth, b2); @@ -103,8 +100,7 @@ public static void setup3(final Builder b1, final Builder b2, final Builder b3, for (int i = 0; i < sz; ++i) { final long k = cx1.populateFirstArgStep(cf.jumpPropOneIn, d, halfClusterWidth, r); final TestValues.Context cx = (r.nextBoolean()) ? cx2 : cx3; - cx.populateSecondArgStep(cf.sizePropOneIn, cf.sharePropOneIn, k, cx1.getClusterMid(), - halfClusterWidth, r); + cx.populateSecondArgStep(cf.sizePropOneIn, cf.sharePropOneIn, k, cx1.getClusterMid(), halfClusterWidth, r); } b1.done(); b2.done(); diff --git a/DB/src/main/java/io/deephaven/db/NotSortableException.java b/DB/src/main/java/io/deephaven/db/NotSortableException.java index 536877f66b8..d1f9a3f5039 100644 --- a/DB/src/main/java/io/deephaven/db/NotSortableException.java +++ b/DB/src/main/java/io/deephaven/db/NotSortableException.java @@ -5,16 +5,14 @@ import java.util.Collection; public class NotSortableException extends RuntimeException { - public NotSortableException(Collection unsortableColumns, - Collection sortableColumns) { + public NotSortableException(Collection unsortableColumns, Collection sortableColumns) { super(buildErrorMessage(unsortableColumns, sortableColumns)); } - private static String buildErrorMessage(Collection unsortableColumns, - Collection sortableColumns) { + private static String buildErrorMessage(Collection unsortableColumns, Collection sortableColumns) { String message = sortableColumns.isEmpty() ? "Sorting is not allowed on this table" - : "Sorting is not allowed on this table, but was attempted on: " - + StringUtils.joinStrings(unsortableColumns, ", "); + : "Sorting is not allowed on this table, but was attempted on: " + + StringUtils.joinStrings(unsortableColumns, ", "); message += " but was attempted on: " + StringUtils.joinStrings(unsortableColumns, ", "); diff --git a/DB/src/main/java/io/deephaven/db/exceptions/ArgumentException.java b/DB/src/main/java/io/deephaven/db/exceptions/ArgumentException.java index 8bed852aea7..77da73d6d7a 100644 --- a/DB/src/main/java/io/deephaven/db/exceptions/ArgumentException.java +++ b/DB/src/main/java/io/deephaven/db/exceptions/ArgumentException.java @@ -15,8 +15,7 @@ public ArgumentException(Throwable cause) { super(cause); } - public ArgumentException(String message, Throwable cause, boolean enableSuppression, - boolean writableStackTrace) { + public ArgumentException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } diff --git a/DB/src/main/java/io/deephaven/db/exceptions/ExpressionException.java b/DB/src/main/java/io/deephaven/db/exceptions/ExpressionException.java index 3cc0d96958d..6e9a9608e6b 100644 --- a/DB/src/main/java/io/deephaven/db/exceptions/ExpressionException.java +++ b/DB/src/main/java/io/deephaven/db/exceptions/ExpressionException.java @@ -2,8 +2,7 @@ /** * An unchecked exception indicating a problem with a parsed expression, for example in - * {@link io.deephaven.db.tables.Table#where(String...)} or - * {@link io.deephaven.db.tables.Table#update(String...)}. + * {@link io.deephaven.db.tables.Table#where(String...)} or {@link io.deephaven.db.tables.Table#update(String...)}. */ public class ExpressionException extends UncheckedTableException { final String problemExpression; diff --git a/DB/src/main/java/io/deephaven/db/exceptions/OperationException.java b/DB/src/main/java/io/deephaven/db/exceptions/OperationException.java index 3c24da5435e..1b127ef5e5f 100644 --- a/DB/src/main/java/io/deephaven/db/exceptions/OperationException.java +++ b/DB/src/main/java/io/deephaven/db/exceptions/OperationException.java @@ -15,8 +15,7 @@ public OperationException(Throwable cause) { super(cause); } - public OperationException(String message, Throwable cause, boolean enableSuppression, - boolean writableStackTrace) { + public OperationException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } diff --git a/DB/src/main/java/io/deephaven/db/exceptions/SizeException.java b/DB/src/main/java/io/deephaven/db/exceptions/SizeException.java index c48ce699b18..15bcbee2432 100644 --- a/DB/src/main/java/io/deephaven/db/exceptions/SizeException.java +++ b/DB/src/main/java/io/deephaven/db/exceptions/SizeException.java @@ -7,13 +7,11 @@ /** *

    * Unchecked exception thrown when {@link Index}, {@link io.deephaven.db.tables.Table} or - * {@link io.deephaven.db.tables.DataColumn} operations are invoked (directly, or indirectly as data - * updates) that cannot be completed correctly due to size constraints on the underlying data - * structures. + * {@link io.deephaven.db.tables.DataColumn} operations are invoked (directly, or indirectly as data updates) that + * cannot be completed correctly due to size constraints on the underlying data structures. *

    - * For example, the current implementations of {@link io.deephaven.db.v2.utils.RedirectionIndex}, - * required for {@link io.deephaven.db.tables.Table#sort}, can only support an 32-bit integer number - * of keys. + * For example, the current implementations of {@link io.deephaven.db.v2.utils.RedirectionIndex}, required for + * {@link io.deephaven.db.tables.Table#sort}, can only support an 32-bit integer number of keys. */ public class SizeException extends UncheckedDeephavenException { @@ -24,15 +22,14 @@ public class SizeException extends UncheckedDeephavenException { * @param inputSize The input size for the message * @param maximumSize The maximum size for the message */ - public SizeException(@Nullable final String messagePrefix, final long inputSize, - final long maximumSize) { - super((messagePrefix == null ? "" : messagePrefix + ": ") + "Input size " + inputSize - + " larger than maximum " + maximumSize); + public SizeException(@Nullable final String messagePrefix, final long inputSize, final long maximumSize) { + super((messagePrefix == null ? "" : messagePrefix + ": ") + "Input size " + inputSize + " larger than maximum " + + maximumSize); } /** - * Construct an exception, with a message appropriate for the given arguments. Maximum size is - * assumed to be {@link Integer#MAX_VALUE}. + * Construct an exception, with a message appropriate for the given arguments. Maximum size is assumed to be + * {@link Integer#MAX_VALUE}. * * @param messagePrefix An optional prefix for the message * @param inputSize The input size for the message @@ -42,8 +39,8 @@ public SizeException(@Nullable final String messagePrefix, final long inputSize) } /** - * Construct an exception, with a message appropriate for the given arguments. Maximum size is - * assumed to be {@link Integer#MAX_VALUE}, and no prefix is included. + * Construct an exception, with a message appropriate for the given arguments. Maximum size is assumed to be + * {@link Integer#MAX_VALUE}, and no prefix is included. * * @param inputSize The input size for the message */ diff --git a/DB/src/main/java/io/deephaven/db/exceptions/StateException.java b/DB/src/main/java/io/deephaven/db/exceptions/StateException.java index 21a500b329c..9c2213f3fbf 100644 --- a/DB/src/main/java/io/deephaven/db/exceptions/StateException.java +++ b/DB/src/main/java/io/deephaven/db/exceptions/StateException.java @@ -15,8 +15,7 @@ public StateException(Throwable cause) { super(cause); } - public StateException(String message, Throwable cause, boolean enableSuppression, - boolean writableStackTrace) { + public StateException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } diff --git a/DB/src/main/java/io/deephaven/db/exceptions/TableAccessException.java b/DB/src/main/java/io/deephaven/db/exceptions/TableAccessException.java index 7c9d0da51ca..9e78d3e59c0 100644 --- a/DB/src/main/java/io/deephaven/db/exceptions/TableAccessException.java +++ b/DB/src/main/java/io/deephaven/db/exceptions/TableAccessException.java @@ -6,8 +6,8 @@ import org.jetbrains.annotations.Nullable; /** - * An {@link UncheckedPermissionException} derivative which indicates a table may not be accessed - * for one reason or another. + * An {@link UncheckedPermissionException} derivative which indicates a table may not be accessed for one reason or + * another. */ public class TableAccessException extends UncheckedPermissionException { public TableAccessException(String reason) { @@ -15,17 +15,17 @@ public TableAccessException(String reason) { } public TableAccessException(@Nullable String namespace, @NotNull String tableName, - @NotNull AuthContext authContext) { + @NotNull AuthContext authContext) { this(namespace, tableName, authContext, ""); } - public TableAccessException(@Nullable String namespace, @NotNull String tableName, - @NotNull AuthContext authContext, @Nullable String reason) { + public TableAccessException(@Nullable String namespace, @NotNull String tableName, @NotNull AuthContext authContext, + @Nullable String reason) { super(makeDescription(namespace, tableName, authContext, reason)); } private static String makeDescription(@Nullable String namespace, @NotNull String tableName, - @NotNull AuthContext authContext, @Nullable String reason) { + @NotNull AuthContext authContext, @Nullable String reason) { final StringBuilder sb = new StringBuilder(); sb.append(authContext.getLogRepresentation()).append(" may not access: "); diff --git a/DB/src/main/java/io/deephaven/db/exceptions/TableIOException.java b/DB/src/main/java/io/deephaven/db/exceptions/TableIOException.java index dd17f67c9d8..3fa0dd9460c 100644 --- a/DB/src/main/java/io/deephaven/db/exceptions/TableIOException.java +++ b/DB/src/main/java/io/deephaven/db/exceptions/TableIOException.java @@ -9,18 +9,17 @@ public TableIOException(String reason, Throwable cause) { super(reason, cause); } - public TableIOException(@NotNull String namespace, @NotNull String tableName, - @Nullable String reason) { + public TableIOException(@NotNull String namespace, @NotNull String tableName, @Nullable String reason) { super(makeDescription(namespace, tableName, reason)); } - public TableIOException(@NotNull String namespace, @NotNull String tableName, - @Nullable String reason, Throwable cause) { + public TableIOException(@NotNull String namespace, @NotNull String tableName, @Nullable String reason, + Throwable cause) { super(makeDescription(namespace, tableName, reason), cause); } private static String makeDescription(@NotNull String namespace, @NotNull String tableName, - @Nullable String reason) { + @Nullable String reason) { final StringBuilder sb = new StringBuilder(); sb.append("Error while accessing ").append(namespace).append('.').append(tableName); diff --git a/DB/src/main/java/io/deephaven/db/exceptions/UncheckedTableException.java b/DB/src/main/java/io/deephaven/db/exceptions/UncheckedTableException.java index 1177bef6777..c9a4aae0b25 100644 --- a/DB/src/main/java/io/deephaven/db/exceptions/UncheckedTableException.java +++ b/DB/src/main/java/io/deephaven/db/exceptions/UncheckedTableException.java @@ -3,8 +3,7 @@ import io.deephaven.UncheckedDeephavenException; /** - * A generic unchecked exception for problems related to {@link io.deephaven.db.tables.Table} - * operations. + * A generic unchecked exception for problems related to {@link io.deephaven.db.tables.Table} operations. */ public class UncheckedTableException extends UncheckedDeephavenException { public UncheckedTableException(String reason) { diff --git a/DB/src/main/java/io/deephaven/db/tablelogger/ProcessInfoLogLogger.java b/DB/src/main/java/io/deephaven/db/tablelogger/ProcessInfoLogLogger.java index 469db4bb128..11b8461a10a 100644 --- a/DB/src/main/java/io/deephaven/db/tablelogger/ProcessInfoLogLogger.java +++ b/DB/src/main/java/io/deephaven/db/tablelogger/ProcessInfoLogLogger.java @@ -23,8 +23,7 @@ public static String getDefaultTableName() { } interface ISetter extends WritableRowContainer { - void log(Row.Flags flags, String id, String type, String key, String value) - throws java.io.IOException; + void log(Row.Flags flags, String id, String type, String key, String value) throws java.io.IOException; } class DirectSetter extends BaseSetter implements ISetter { @@ -41,8 +40,7 @@ class DirectSetter extends BaseSetter implements ISetter { } @Override - public void log(Row.Flags flags, String id, String type, String key, String value) - throws java.io.IOException { + public void log(Row.Flags flags, String id, String type, String key, String value) throws java.io.IOException { setRowFlags(flags); this.Id.set(id); this.Type.set(type); @@ -61,10 +59,10 @@ protected String threadName() { static { final ColumnsSpecHelper cols = new ColumnsSpecHelper() - .add("Id", String.class) - .add("Type", String.class) - .add("Key", String.class) - .add("Value", String.class); + .add("Id", String.class) + .add("Type", String.class) + .add("Key", String.class) + .add("Value", String.class); columnNames = cols.getColumnNames(); columnDbTypes = cols.getDbTypes(); } @@ -75,14 +73,13 @@ protected ISetter createSetter() { return new DirectSetter(); } - public void log(final String id, final String type, final String key, final String value) - throws IOException { + public void log(final String id, final String type, final String key, final String value) throws IOException { log(DEFAULT_INTRADAY_LOGGER_FLAGS, id, type, key, value); } public void log( - final Row.Flags flags, final String id, final String type, final String key, - final String value) throws IOException { + final Row.Flags flags, final String id, final String type, final String key, final String value) + throws IOException { verifyCondition(isInitialized(), "init() must be called before calling log()"); verifyCondition(!isClosed, "cannot call log() after the logger is closed"); verifyCondition(!isShuttingDown, "cannot call log() while the logger is shutting down"); @@ -96,8 +93,7 @@ public void log( flush(setter); } - private static final TableDefinition TABLE_DEFINITION = - TableDefinition.tableDefinition(columnDbTypes, columnNames); + private static final TableDefinition TABLE_DEFINITION = TableDefinition.tableDefinition(columnDbTypes, columnNames); public static TableDefinition getTableDefinition() { return TABLE_DEFINITION; diff --git a/DB/src/main/java/io/deephaven/db/tablelogger/ProcessMetricsLogLogger.java b/DB/src/main/java/io/deephaven/db/tablelogger/ProcessMetricsLogLogger.java index d41200ec079..afaedb34144 100644 --- a/DB/src/main/java/io/deephaven/db/tablelogger/ProcessMetricsLogLogger.java +++ b/DB/src/main/java/io/deephaven/db/tablelogger/ProcessMetricsLogLogger.java @@ -12,7 +12,7 @@ import java.io.IOException; public class ProcessMetricsLogLogger - extends TableLoggerImpl2 { + extends TableLoggerImpl2 { private static final String TABLE_NAME = "ProcessMetricsLog"; @@ -25,10 +25,8 @@ public static String getDefaultTableName() { } interface ISetter extends WritableRowContainer { - void log(Row.Flags flags, long timestamp, String processUniqueId, String name, - String interval, String type, - long n, long sum, long last, long min, long max, long avg, long sum2, long stdev) - throws IOException; + void log(Row.Flags flags, long timestamp, String processUniqueId, String name, String interval, String type, + long n, long sum, long last, long min, long max, long avg, long sum2, long stdev) throws IOException; } class DirectSetter extends BaseSetter implements ISetter { @@ -63,10 +61,10 @@ class DirectSetter extends BaseSetter implements ISetter { } @Override - public void log(final Row.Flags flags, final long timestamp, final String processUniqueId, - final String name, final String interval, final String type, - final long n, final long sum, final long last, final long min, final long max, - final long avg, final long sum2, final long stdev) throws IOException { + public void log(final Row.Flags flags, final long timestamp, final String processUniqueId, final String name, + final String interval, final String type, + final long n, final long sum, final long last, final long min, final long max, final long avg, + final long sum2, final long stdev) throws IOException { setRowFlags(flags); this.ProcessUniqueId.set(processUniqueId); this.Timestamp.set(timestamp); @@ -94,19 +92,19 @@ protected String threadName() { static { final ColumnsSpecHelper cols = new ColumnsSpecHelper() - .add("ProcessUniqueId", String.class) - .add("Timestamp", long.class) - .add("Name", String.class) - .add("Interval", String.class) - .add("Type", String.class) - .add("N", long.class) - .add("Sum", long.class) - .add("Last", long.class) - .add("Min", long.class) - .add("Max", long.class) - .add("Avg", long.class) - .add("Sum2", long.class) - .add("Stdev", long.class); + .add("ProcessUniqueId", String.class) + .add("Timestamp", long.class) + .add("Name", String.class) + .add("Interval", String.class) + .add("Type", String.class) + .add("N", long.class) + .add("Sum", long.class) + .add("Last", long.class) + .add("Min", long.class) + .add("Max", long.class) + .add("Avg", long.class) + .add("Sum2", long.class) + .add("Stdev", long.class); columnNames = cols.getColumnNames(); columnDbTypes = cols.getDbTypes(); } @@ -117,25 +115,24 @@ protected ISetter createSetter() { return new DirectSetter(); } - public void log(final long timestamp, final String processId, final String name, - final String interval, final String type, - final long n, final long sum, final long last, final long min, final long max, - final long avg, final long sum2, final long stdev) throws IOException { - log(DEFAULT_INTRADAY_LOGGER_FLAGS, timestamp, processId, name, interval, type, n, sum, last, - min, max, avg, sum2, stdev); + public void log(final long timestamp, final String processId, final String name, final String interval, + final String type, + final long n, final long sum, final long last, final long min, final long max, final long avg, + final long sum2, final long stdev) throws IOException { + log(DEFAULT_INTRADAY_LOGGER_FLAGS, timestamp, processId, name, interval, type, n, sum, last, min, max, avg, + sum2, stdev); } - public void log(final Row.Flags flags, final long timestamp, final String processId, - final String name, final String interval, final String type, - final long n, final long sum, final long last, final long min, final long max, - final long avg, final long sum2, final long stdev) throws IOException { + public void log(final Row.Flags flags, final long timestamp, final String processId, final String name, + final String interval, final String type, + final long n, final long sum, final long last, final long min, final long max, final long avg, + final long sum2, final long stdev) throws IOException { verifyCondition(isInitialized(), "init() must be called before calling log()"); verifyCondition(!isClosed, "cannot call log() after the logger is closed"); verifyCondition(!isShuttingDown, "cannot call log() while the logger is shutting down"); final ISetter setter = setterPool.take(); try { - setter.log(flags, timestamp, processId, name, interval, type, n, sum, last, min, max, - avg, sum2, stdev); + setter.log(flags, timestamp, processId, name, interval, type, n, sum, last, min, max, avg, sum2, stdev); } catch (Exception e) { setterPool.give(setter); throw e; @@ -143,8 +140,7 @@ public void log(final Row.Flags flags, final long timestamp, final String proces flush(setter); } - private static final TableDefinition TABLE_DEFINITION = - TableDefinition.tableDefinition(columnDbTypes, columnNames); + private static final TableDefinition TABLE_DEFINITION = TableDefinition.tableDefinition(columnDbTypes, columnNames); public static TableDefinition getTableDefinition() { return TABLE_DEFINITION; diff --git a/DB/src/main/java/io/deephaven/db/tablelogger/QueryOperationPerformanceLogLogger.java b/DB/src/main/java/io/deephaven/db/tablelogger/QueryOperationPerformanceLogLogger.java index cb0f013132d..40bd4f7148f 100644 --- a/DB/src/main/java/io/deephaven/db/tablelogger/QueryOperationPerformanceLogLogger.java +++ b/DB/src/main/java/io/deephaven/db/tablelogger/QueryOperationPerformanceLogLogger.java @@ -12,7 +12,7 @@ import java.io.IOException; public class QueryOperationPerformanceLogLogger - extends TableLoggerImpl2 { + extends TableLoggerImpl2 { private static final String TABLE_NAME = "QueryOperationPerformanceLog"; @@ -28,8 +28,7 @@ public static String getDefaultTableName() { } interface ISetter extends WritableRowContainer { - void log(Row.Flags flags, int operationNumber, QueryPerformanceNugget nugget) - throws IOException; + void log(Row.Flags flags, int operationNumber, QueryPerformanceNugget nugget) throws IOException; } class DirectSetter extends BaseSetter implements ISetter { @@ -76,8 +75,8 @@ class DirectSetter extends BaseSetter implements ISetter { } @Override - public void log(final Row.Flags flags, final int operationNumber, - final QueryPerformanceNugget nugget) throws IOException { + public void log(final Row.Flags flags, final int operationNumber, final QueryPerformanceNugget nugget) + throws IOException { setRowFlags(flags); this.ProcessUniqueId.set(processUniqueId); this.EvaluationNumber.setInt(nugget.getEvaluationNumber()); @@ -89,12 +88,11 @@ public void log(final Row.Flags flags, final int operationNumber, this.IsCompilation.setBoolean(nugget.getName().startsWith("Compile:")); this.StartTime.set(DBTimeUtils.millisToTime(nugget.getStartClockTime())); this.EndTime.set(nugget.getTotalTimeNanos() == null - ? null - : DBTimeUtils.millisToTime( - nugget.getStartClockTime() - + DBTimeUtils.nanosToMillis(nugget.getTotalTimeNanos()))); - this.DurationNanos.setLong(nugget.getTotalTimeNanos() == null ? QueryConstants.NULL_LONG - : nugget.getTotalTimeNanos()); + ? null + : DBTimeUtils.millisToTime( + nugget.getStartClockTime() + DBTimeUtils.nanosToMillis(nugget.getTotalTimeNanos()))); + this.DurationNanos.setLong( + nugget.getTotalTimeNanos() == null ? QueryConstants.NULL_LONG : nugget.getTotalTimeNanos()); this.CpuNanos.setLong(nugget.getCpuNanos()); this.UserCpuNanos.setLong(nugget.getUserCpuNanos()); this.FreeMemoryChange.setLong(nugget.getDiffFreeMemory()); @@ -116,25 +114,25 @@ protected String threadName() { static { final ColumnsSpecHelper cols = new ColumnsSpecHelper() - .add("ProcessUniqueId", String.class) - .add("EvaluationNumber", int.class) - .add("OperationNumber", int.class) - .add("Depth", int.class) - .add("Description", String.class) - .add("CallerLine", String.class) - .add("IsTopLevel", Boolean.class) - .add("IsCompilation", Boolean.class) - .add("StartTime", DBDateTime.class) - .add("EndTime", DBDateTime.class) - .add("DurationNanos", long.class) - .add("CpuNanos", long.class) - .add("UserCpuNanos", long.class) - .add("FreeMemoryChange", long.class) - .add("TotalMemoryChange", long.class) - .add("AllocatedBytes", long.class) - .add("PoolAllocatedBytes", long.class) - .add("InputSizeLong", long.class) - .add("WasInterrupted", Boolean.class); + .add("ProcessUniqueId", String.class) + .add("EvaluationNumber", int.class) + .add("OperationNumber", int.class) + .add("Depth", int.class) + .add("Description", String.class) + .add("CallerLine", String.class) + .add("IsTopLevel", Boolean.class) + .add("IsCompilation", Boolean.class) + .add("StartTime", DBDateTime.class) + .add("EndTime", DBDateTime.class) + .add("DurationNanos", long.class) + .add("CpuNanos", long.class) + .add("UserCpuNanos", long.class) + .add("FreeMemoryChange", long.class) + .add("TotalMemoryChange", long.class) + .add("AllocatedBytes", long.class) + .add("PoolAllocatedBytes", long.class) + .add("InputSizeLong", long.class) + .add("WasInterrupted", Boolean.class); columnNames = cols.getColumnNames(); columnDbTypes = cols.getDbTypes(); } @@ -145,13 +143,12 @@ protected ISetter createSetter() { return new DirectSetter(); } - public void log(final int operationNumber, final QueryPerformanceNugget nugget) - throws IOException { + public void log(final int operationNumber, final QueryPerformanceNugget nugget) throws IOException { log(DEFAULT_INTRADAY_LOGGER_FLAGS, operationNumber, nugget); } - public void log(final Row.Flags flags, final int operationNumber, - final QueryPerformanceNugget nugget) throws IOException { + public void log(final Row.Flags flags, final int operationNumber, final QueryPerformanceNugget nugget) + throws IOException { verifyCondition(isInitialized(), "init() must be called before calling log()"); verifyCondition(!isClosed, "cannot call log() after the logger is closed"); verifyCondition(!isShuttingDown, "cannot call log() while the logger is shutting down"); @@ -165,8 +162,7 @@ public void log(final Row.Flags flags, final int operationNumber, flush(setter); } - private static final TableDefinition TABLE_DEFINITION = - TableDefinition.tableDefinition(columnDbTypes, columnNames); + private static final TableDefinition TABLE_DEFINITION = TableDefinition.tableDefinition(columnDbTypes, columnNames); public static TableDefinition getTableDefinition() { return TABLE_DEFINITION; diff --git a/DB/src/main/java/io/deephaven/db/tablelogger/QueryPerformanceLogLogger.java b/DB/src/main/java/io/deephaven/db/tablelogger/QueryPerformanceLogLogger.java index 7d949a31ffc..ebf87a331e6 100644 --- a/DB/src/main/java/io/deephaven/db/tablelogger/QueryPerformanceLogLogger.java +++ b/DB/src/main/java/io/deephaven/db/tablelogger/QueryPerformanceLogLogger.java @@ -15,7 +15,7 @@ import java.io.IOException; public class QueryPerformanceLogLogger - extends TableLoggerImpl2 { + extends TableLoggerImpl2 { private static final String TABLE_NAME = "QueryPerformanceLog"; @@ -32,8 +32,7 @@ public static String getDefaultTableName() { interface ISetter extends WritableRowContainer { void log(Row.Flags flags, final long evaluationNumber, - QueryProcessingResults queryProcessingResults, QueryPerformanceNugget nugget) - throws IOException; + QueryProcessingResults queryProcessingResults, QueryPerformanceNugget nugget) throws IOException; } class DirectSetter extends BaseSetter implements ISetter { @@ -75,20 +74,19 @@ class DirectSetter extends BaseSetter implements ISetter { @Override public void log( - final Row.Flags flags, final long evaluationNumber, - final QueryProcessingResults queryProcessingResults, - final QueryPerformanceNugget nugget) throws IOException { + final Row.Flags flags, final long evaluationNumber, + final QueryProcessingResults queryProcessingResults, final QueryPerformanceNugget nugget) + throws IOException { setRowFlags(flags); this.ProcessUniqueId.set(processUniqueId); this.EvaluationNumber.setLong(evaluationNumber); this.StartTime.set(DBTimeUtils.millisToTime(nugget.getStartClockTime())); this.EndTime.set(nugget.getTotalTimeNanos() == null - ? null - : DBTimeUtils.millisToTime( - nugget.getStartClockTime() - + DBTimeUtils.nanosToMillis(nugget.getTotalTimeNanos()))); - this.DurationNanos.setLong(nugget.getTotalTimeNanos() == null ? QueryConstants.NULL_LONG - : nugget.getTotalTimeNanos()); + ? null + : DBTimeUtils.millisToTime( + nugget.getStartClockTime() + DBTimeUtils.nanosToMillis(nugget.getTotalTimeNanos()))); + this.DurationNanos.setLong( + nugget.getTotalTimeNanos() == null ? QueryConstants.NULL_LONG : nugget.getTotalTimeNanos()); this.CpuNanos.setLong(nugget.getCpuNanos()); this.UserCpuNanos.setLong(nugget.getUserCpuNanos()); this.TotalMemoryFree.setLong(nugget.getEndFreeMemory()); @@ -113,22 +111,22 @@ protected String threadName() { static { final ColumnsSpecHelper cols = new ColumnsSpecHelper() - .add("ProcessUniqueId", String.class) - .add("EvaluationNumber", long.class) - .add("StartTime", DBDateTime.class) - .add("EndTime", DBDateTime.class) - .add("DurationNanos", long.class) - .add("CpuNanos", long.class) - .add("UserCpuNanos", long.class) - .add("TotalMemoryFree", long.class) - .add("TotalMemoryUsed", long.class) - .add("FreeMemoryChange", long.class) - .add("TotalMemoryChange", long.class) - .add("AllocatedBytes", long.class) - .add("PoolAllocatedBytes", long.class) - .add("WasInterrupted", Boolean.class) - .add("IsReplayer", Boolean.class) - .add("Exception", String.class); + .add("ProcessUniqueId", String.class) + .add("EvaluationNumber", long.class) + .add("StartTime", DBDateTime.class) + .add("EndTime", DBDateTime.class) + .add("DurationNanos", long.class) + .add("CpuNanos", long.class) + .add("UserCpuNanos", long.class) + .add("TotalMemoryFree", long.class) + .add("TotalMemoryUsed", long.class) + .add("FreeMemoryChange", long.class) + .add("TotalMemoryChange", long.class) + .add("AllocatedBytes", long.class) + .add("PoolAllocatedBytes", long.class) + .add("WasInterrupted", Boolean.class) + .add("IsReplayer", Boolean.class) + .add("Exception", String.class); columnNames = cols.getColumnNames(); columnDbTypes = cols.getDbTypes(); } @@ -140,15 +138,15 @@ protected ISetter createSetter() { } public void log(final long evaluationNumber, - final QueryProcessingResults queryProcessingResults, - final QueryPerformanceNugget nugget) throws IOException { + final QueryProcessingResults queryProcessingResults, + final QueryPerformanceNugget nugget) throws IOException { log(DEFAULT_INTRADAY_LOGGER_FLAGS, evaluationNumber, queryProcessingResults, nugget); } public void log( - final Row.Flags flags, final long evaluationNumber, - final QueryProcessingResults queryProcessingResults, final QueryPerformanceNugget nugget) - throws IOException { + final Row.Flags flags, final long evaluationNumber, + final QueryProcessingResults queryProcessingResults, final QueryPerformanceNugget nugget) + throws IOException { verifyCondition(isInitialized(), "init() must be called before calling log()"); verifyCondition(!isClosed, "cannot call log() after the logger is closed"); verifyCondition(!isShuttingDown, "cannot call log() while the logger is shutting down"); @@ -162,8 +160,7 @@ public void log( flush(setter); } - private static final TableDefinition TABLE_DEFINITION = - TableDefinition.tableDefinition(columnDbTypes, columnNames); + private static final TableDefinition TABLE_DEFINITION = TableDefinition.tableDefinition(columnDbTypes, columnNames); public static TableDefinition getTableDefinition() { return TABLE_DEFINITION; diff --git a/DB/src/main/java/io/deephaven/db/tablelogger/UpdatePerformanceLogLogger.java b/DB/src/main/java/io/deephaven/db/tablelogger/UpdatePerformanceLogLogger.java index 2720d8daff1..b0860716086 100644 --- a/DB/src/main/java/io/deephaven/db/tablelogger/UpdatePerformanceLogLogger.java +++ b/DB/src/main/java/io/deephaven/db/tablelogger/UpdatePerformanceLogLogger.java @@ -15,7 +15,7 @@ import static io.deephaven.db.v2.utils.UpdatePerformanceTracker.Entry; public class UpdatePerformanceLogLogger - extends TableLoggerImpl2 { + extends TableLoggerImpl2 { private static final String TABLE_NAME = "UpdatePerformanceLog"; @@ -27,8 +27,7 @@ public UpdatePerformanceLogLogger(final String processUniqueId) { } interface ISetter extends WritableRowContainer { - void log(Row.Flags flags, IntervalLevelDetails intervalLevelDetails, Entry performanceEntry) - throws IOException; + void log(Row.Flags flags, IntervalLevelDetails intervalLevelDetails, Entry performanceEntry) throws IOException; } public static String getDefaultTableName() { @@ -75,18 +74,16 @@ class DirectSetter extends BaseSetter implements ISetter { EntryIntervalRemoved = row.getSetter("EntryIntervalRemoved", long.class); EntryIntervalModified = row.getSetter("EntryIntervalModified", long.class); EntryIntervalShifted = row.getSetter("EntryIntervalShifted", long.class); - EntryIntervalInvocationCount = - row.getSetter("EntryIntervalInvocationCount", long.class); + EntryIntervalInvocationCount = row.getSetter("EntryIntervalInvocationCount", long.class); TotalMemoryFree = row.getSetter("TotalMemoryFree", long.class); TotalMemoryUsed = row.getSetter("TotalMemoryUsed", long.class); EntryIntervalAllocatedBytes = row.getSetter("EntryIntervalAllocatedBytes", long.class); - EntryIntervalPoolAllocatedBytes = - row.getSetter("EntryIntervalPoolAllocatedBytes", long.class); + EntryIntervalPoolAllocatedBytes = row.getSetter("EntryIntervalPoolAllocatedBytes", long.class); } @Override public void log(final Row.Flags flags, final IntervalLevelDetails intervalLevelDetails, - final Entry performanceEntry) throws IOException { + final Entry performanceEntry) throws IOException { setRowFlags(flags); this.ProcessUniqueId.set(processUniqueId); this.EntryId.setInt(performanceEntry.getId()); @@ -94,10 +91,8 @@ public void log(final Row.Flags flags, final IntervalLevelDetails intervalLevelD this.OperationNumber.setInt(performanceEntry.getOperationNumber()); this.EntryDescription.set(performanceEntry.getDescription()); this.EntryCallerLine.set(performanceEntry.getCallerLine()); - this.IntervalStartTime - .set(DBTimeUtils.millisToTime(intervalLevelDetails.getIntervalStartTimeMillis())); - this.IntervalEndTime - .set(DBTimeUtils.millisToTime(intervalLevelDetails.getIntervalEndTimeMillis())); + this.IntervalStartTime.set(DBTimeUtils.millisToTime(intervalLevelDetails.getIntervalStartTimeMillis())); + this.IntervalEndTime.set(DBTimeUtils.millisToTime(intervalLevelDetails.getIntervalEndTimeMillis())); this.IntervalDurationNanos.setLong(intervalLevelDetails.getIntervalDurationNanos()); this.EntryIntervalUsage.setLong(performanceEntry.getIntervalUsageNanos()); this.EntryIntervalCpuNanos.setLong(performanceEntry.getIntervalCpuNanos()); @@ -106,13 +101,11 @@ public void log(final Row.Flags flags, final IntervalLevelDetails intervalLevelD this.EntryIntervalRemoved.setLong(performanceEntry.getIntervalRemoved()); this.EntryIntervalModified.setLong(performanceEntry.getIntervalModified()); this.EntryIntervalShifted.setLong(performanceEntry.getIntervalShifted()); - this.EntryIntervalInvocationCount - .setLong(performanceEntry.getIntervalInvocationCount()); + this.EntryIntervalInvocationCount.setLong(performanceEntry.getIntervalInvocationCount()); this.TotalMemoryFree.setLong(performanceEntry.getTotalFreeMemory()); this.TotalMemoryUsed.setLong(performanceEntry.getTotalUsedMemory()); this.EntryIntervalAllocatedBytes.setLong(performanceEntry.getIntervalAllocatedBytes()); - this.EntryIntervalPoolAllocatedBytes - .setLong(performanceEntry.getIntervalPoolAllocatedBytes()); + this.EntryIntervalPoolAllocatedBytes.setLong(performanceEntry.getIntervalPoolAllocatedBytes()); } } @@ -126,30 +119,30 @@ protected String threadName() { static { final ColumnsSpecHelper cols = new ColumnsSpecHelper() - .add("ProcessUniqueId", String.class) - .add("EntryId", int.class) - .add("EvaluationNumber", int.class) - .add("OperationNumber", int.class) - .add("EntryDescription", String.class) - .add("EntryCallerLine", String.class) - - .add("IntervalStartTime", DBDateTime.class) - .add("IntervalEndTime", DBDateTime.class) - - .add("IntervalDurationNanos", long.class) - .add("EntryIntervalUsage", long.class) - .add("EntryIntervalCpuNanos", long.class) - .add("EntryIntervalUserCpuNanos", long.class) - .add("EntryIntervalAdded", long.class) - .add("EntryIntervalRemoved", long.class) - .add("EntryIntervalModified", long.class) - .add("EntryIntervalShifted", long.class) - - .add("EntryIntervalInvocationCount", long.class) - .add("TotalMemoryFree", long.class) - .add("TotalMemoryUsed", long.class) - .add("EntryIntervalAllocatedBytes", long.class) - .add("EntryIntervalPoolAllocatedBytes", long.class) + .add("ProcessUniqueId", String.class) + .add("EntryId", int.class) + .add("EvaluationNumber", int.class) + .add("OperationNumber", int.class) + .add("EntryDescription", String.class) + .add("EntryCallerLine", String.class) + + .add("IntervalStartTime", DBDateTime.class) + .add("IntervalEndTime", DBDateTime.class) + + .add("IntervalDurationNanos", long.class) + .add("EntryIntervalUsage", long.class) + .add("EntryIntervalCpuNanos", long.class) + .add("EntryIntervalUserCpuNanos", long.class) + .add("EntryIntervalAdded", long.class) + .add("EntryIntervalRemoved", long.class) + .add("EntryIntervalModified", long.class) + .add("EntryIntervalShifted", long.class) + + .add("EntryIntervalInvocationCount", long.class) + .add("TotalMemoryFree", long.class) + .add("TotalMemoryUsed", long.class) + .add("EntryIntervalAllocatedBytes", long.class) + .add("EntryIntervalPoolAllocatedBytes", long.class) ; @@ -164,14 +157,13 @@ protected ISetter createSetter() { } public void log( - final IntervalLevelDetails intervalLevelDetails, final Entry performanceEntry) - throws IOException { + final IntervalLevelDetails intervalLevelDetails, final Entry performanceEntry) throws IOException { log(DEFAULT_INTRADAY_LOGGER_FLAGS, intervalLevelDetails, performanceEntry); } public void log( - final Row.Flags flags, final IntervalLevelDetails intervalLevelDetails, - final Entry performanceEntry) throws IOException { + final Row.Flags flags, final IntervalLevelDetails intervalLevelDetails, final Entry performanceEntry) + throws IOException { verifyCondition(isInitialized(), "init() must be called before calling log()"); verifyCondition(!isClosed, "cannot call log() after the logger is closed"); verifyCondition(!isShuttingDown, "cannot call log() while the logger is shutting down"); @@ -185,8 +177,7 @@ public void log( flush(setter); } - private static final TableDefinition TABLE_DEFINITION = - TableDefinition.tableDefinition(columnDbTypes, columnNames); + private static final TableDefinition TABLE_DEFINITION = TableDefinition.tableDefinition(columnDbTypes, columnNames); public static TableDefinition getTableDefinition() { return TABLE_DEFINITION; diff --git a/DB/src/main/java/io/deephaven/db/tables/CodecLookup.java b/DB/src/main/java/io/deephaven/db/tables/CodecLookup.java index 82f573adf38..efabf87b1a2 100644 --- a/DB/src/main/java/io/deephaven/db/tables/CodecLookup.java +++ b/DB/src/main/java/io/deephaven/db/tables/CodecLookup.java @@ -37,32 +37,29 @@ public static boolean codecRequired(@NotNull final ColumnDefinition columnDef * {@link io.deephaven.db.tables.dbarrays.DbArrayBase} types * @return Whether a codec is required */ - public static boolean codecRequired(@NotNull final Class dataType, - @Nullable final Class componentType) { + public static boolean codecRequired(@NotNull final Class dataType, @Nullable final Class componentType) { if (dataType.isPrimitive() || dataType == Boolean.class || dataType == DBDateTime.class - || dataType == String.class || StringSet.class.isAssignableFrom(dataType)) { + || dataType == String.class || StringSet.class.isAssignableFrom(dataType)) { // Primitive, basic, and special types do not require codecs return false; } if (dataType.isArray()) { - if (componentType == null - || !dataType.getComponentType().isAssignableFrom(componentType)) { + if (componentType == null || !dataType.getComponentType().isAssignableFrom(componentType)) { throw new IllegalArgumentException( - "Array type " + dataType + " does not match component type " + componentType); + "Array type " + dataType + " does not match component type " + componentType); } // Arrays of primitives or basic types do not require codecs - return !(componentType.isPrimitive() || componentType == Boolean.class - || componentType == DBDateTime.class || componentType == String.class); + return !(componentType.isPrimitive() || componentType == Boolean.class || componentType == DBDateTime.class + || componentType == String.class); } if (DbArrayBase.class.isAssignableFrom(dataType)) { if (componentType == null) { - throw new IllegalArgumentException( - "Vector type " + dataType + " requires a component type"); + throw new IllegalArgumentException("Vector type " + dataType + " requires a component type"); } if (DbArray.class.isAssignableFrom(dataType)) { // DbArrays of basic types do not require codecs return !(componentType == Boolean.class || componentType == DBDateTime.class - || componentType == String.class); + || componentType == String.class); } // DbArrayBases of primitive types do not require codecs return false; @@ -82,40 +79,37 @@ public static boolean explicitCodecPresent(final String codecClassName) { } /** - * Lookup an {@link ObjectCodec} for the supplied {@link ColumnDefinition}. Assumes that the - * data type is appropriate for use with a codec, i.e. that {@link #codecRequired(Class, Class)} - * will return false. + * Lookup an {@link ObjectCodec} for the supplied {@link ColumnDefinition}. Assumes that the data type is + * appropriate for use with a codec, i.e. that {@link #codecRequired(Class, Class)} will return false. * * @param columnDefinition The {@link ColumnDefinition} * @return The {@link ObjectCodec} */ public static ObjectCodec lookup( - @NotNull final ColumnDefinition columnDefinition, - @NotNull final ColumnToCodecMappings codecMappings) { + @NotNull final ColumnDefinition columnDefinition, + @NotNull final ColumnToCodecMappings codecMappings) { final String colName = columnDefinition.getName(); final ObjectCodec codec = lookup( - columnDefinition.getDataType(), - codecMappings.getCodecName(colName), - codecMappings.getCodecArgs(colName)); + columnDefinition.getDataType(), + codecMappings.getCodecName(colName), + codecMappings.getCodecArgs(colName)); if (codec == null) { - throw new UnsupportedOperationException( - "Failed to find a matching codec for " + columnDefinition); + throw new UnsupportedOperationException("Failed to find a matching codec for " + columnDefinition); } return codec; } /** - * Lookup an {@link ObjectCodec} for the supplied data type, codec class name, and arguments. - * Assumes that the data type is appropriate for use with a codec, i.e. that - * {@link #codecRequired(Class, Class)} will return false. + * Lookup an {@link ObjectCodec} for the supplied data type, codec class name, and arguments. Assumes that the data + * type is appropriate for use with a codec, i.e. that {@link #codecRequired(Class, Class)} will return false. * * @param dataType The data type * @param codecClassName The codec class name * @param codecArguments The codec arguments in string form * @return The {@link ObjectCodec} */ - public static ObjectCodec lookup(@NotNull final Class dataType, - final String codecClassName, final String codecArguments) { + public static ObjectCodec lookup(@NotNull final Class dataType, final String codecClassName, + final String codecArguments) { if (explicitCodecPresent(codecClassName)) { return CodecCache.DEFAULT.getCodec(codecClassName, codecArguments); } @@ -130,8 +124,7 @@ public static ObjectCodec lookup(@NotNull final Class dataTyp */ public static ObjectCodec getDefaultCodec(@NotNull final Class dataType) { if (Externalizable.class.isAssignableFrom(dataType)) { - return CodecCache.DEFAULT.getCodec(ExternalizableCodec.class.getName(), - dataType.getName()); + return CodecCache.DEFAULT.getCodec(ExternalizableCodec.class.getName(), dataType.getName()); } return SerializableCodec.create(); } diff --git a/DB/src/main/java/io/deephaven/db/tables/ColumnDefinition.java b/DB/src/main/java/io/deephaven/db/tables/ColumnDefinition.java index 0e71c55ee98..96b5d92f97a 100644 --- a/DB/src/main/java/io/deephaven/db/tables/ColumnDefinition.java +++ b/DB/src/main/java/io/deephaven/db/tables/ColumnDefinition.java @@ -52,13 +52,12 @@ /** * Column definition for all Deephaven columns. */ -public class ColumnDefinition - implements Externalizable, LogOutputAppendable, Copyable> { +public class ColumnDefinition implements Externalizable, LogOutputAppendable, Copyable> { private static final long serialVersionUID = 3656456077670712362L; public static final EnumFormatter COLUMN_TYPE_FORMATTER = - new EnumFormatter(new String[] {"Normal", "Grouping", "Partitioning", "Virtual"}); + new EnumFormatter(new String[] {"Normal", "Grouping", "Partitioning", "Virtual"}); public static final int COLUMNTYPE_NORMAL = 1; public static final int COLUMNTYPE_GROUPING = 2; @@ -122,19 +121,19 @@ public static ColumnDefinition of(String name, GenericType type) { } public static ColumnDefinition ofDbArray(@NotNull final String name, - @NotNull final Class dbArrayType) { + @NotNull final Class dbArrayType) { ColumnDefinition columnDefinition = new ColumnDefinition<>(name, dbArrayType); columnDefinition.setComponentType(baseComponentTypeForDbArray(dbArrayType)); return columnDefinition; } public static ColumnDefinition fromGenericType(@NotNull final String name, - @NotNull final Class dataType) { + @NotNull final Class dataType) { return fromGenericType(name, dataType, null); } - public static ColumnDefinition fromGenericType(@NotNull final String name, - @NotNull final Class dataType, @Nullable final Class componentType) { + public static ColumnDefinition fromGenericType(@NotNull final String name, @NotNull final Class dataType, + @Nullable final Class componentType) { Objects.requireNonNull(name); Objects.requireNonNull(dataType); final ColumnDefinition cd = new ColumnDefinition<>(name, dataType); @@ -143,11 +142,10 @@ public static ColumnDefinition fromGenericType(@NotNull final String name } /** - * Base component type class for each {@link DbArrayBase} type. Note that {@link DbBooleanArray} - * is deprecated, superseded by {@link DbArray}. + * Base component type class for each {@link DbArrayBase} type. Note that {@link DbBooleanArray} is deprecated, + * superseded by {@link DbArray}. */ - private static Class baseComponentTypeForDbArray( - @NotNull final Class dbArrayType) { + private static Class baseComponentTypeForDbArray(@NotNull final Class dbArrayType) { if (DbBooleanArray.class.isAssignableFrom(dbArrayType)) { return Boolean.class; } @@ -179,79 +177,72 @@ private static Class baseComponentTypeForDbArray( } private static void assertComponentTypeValid(@NotNull final Class dataType, - @Nullable final Class componentType) { + @Nullable final Class componentType) { if (!DbArrayBase.class.isAssignableFrom(dataType) && !dataType.isArray()) { return; } if (componentType == null) { - throw new IllegalArgumentException( - "Required component type not specified for data type " + dataType); + throw new IllegalArgumentException("Required component type not specified for data type " + dataType); } if (dataType.isArray()) { final Class arrayComponentType = dataType.getComponentType(); if (!arrayComponentType.isAssignableFrom(componentType)) { throw new IllegalArgumentException( - "Invalid component type " + componentType + " for array data type " + dataType); + "Invalid component type " + componentType + " for array data type " + dataType); } return; } // noinspection unchecked - final Class baseComponentType = - baseComponentTypeForDbArray((Class) dataType); + final Class baseComponentType = baseComponentTypeForDbArray((Class) dataType); if (!baseComponentType.isAssignableFrom(componentType)) { throw new IllegalArgumentException( - "Invalid component type " + componentType + " for DbArray data type " + dataType); + "Invalid component type " + componentType + " for DbArray data type " + dataType); } } private static Class checkAndMaybeInferComponentType(@NotNull final Class dataType, - @Nullable final Class inputComponentType) { + @Nullable final Class inputComponentType) { if (dataType.isArray()) { final Class arrayComponentType = dataType.getComponentType(); if (inputComponentType == null) { return arrayComponentType; } if (!arrayComponentType.isAssignableFrom(inputComponentType)) { - throw new IllegalArgumentException("Invalid component type " + inputComponentType - + " for array data type " + dataType); + throw new IllegalArgumentException( + "Invalid component type " + inputComponentType + " for array data type " + dataType); } return inputComponentType; } if (DbArrayBase.class.isAssignableFrom(dataType)) { // noinspection unchecked - final Class dbArrayComponentType = - baseComponentTypeForDbArray((Class) dataType); + final Class dbArrayComponentType = baseComponentTypeForDbArray((Class) dataType); if (inputComponentType == null) { /* - * TODO (https://github.com/deephaven/deephaven-core/issues/817): Allow formula - * results returning DbArray to know component type if - * (DbArray.class.isAssignableFrom(dataType)) { throw new - * IllegalArgumentException("Missing required component type for DbArray data type " - * + dataType); } + * TODO (https://github.com/deephaven/deephaven-core/issues/817): Allow formula results returning + * DbArray to know component type if (DbArray.class.isAssignableFrom(dataType)) { throw new + * IllegalArgumentException("Missing required component type for DbArray data type " + dataType); } */ return dbArrayComponentType; } if (!dbArrayComponentType.isAssignableFrom(inputComponentType)) { - throw new IllegalArgumentException("Invalid component type " + inputComponentType - + " for DbArray data type " + dataType); + throw new IllegalArgumentException( + "Invalid component type " + inputComponentType + " for DbArray data type " + dataType); } return inputComponentType; } return inputComponentType; } - private static void maybeSetComponentType( - @NotNull final ColumnDefinition columnDefinition, @NotNull final Class dataType, - @Nullable Class inputComponentType) { - final Class updatedComponentType = - checkAndMaybeInferComponentType(dataType, inputComponentType); + private static void maybeSetComponentType(@NotNull final ColumnDefinition columnDefinition, + @NotNull final Class dataType, @Nullable Class inputComponentType) { + final Class updatedComponentType = checkAndMaybeInferComponentType(dataType, inputComponentType); if (updatedComponentType != null) { columnDefinition.setComponentType(updatedComponentType); } } - public static ColumnDefinition fromGenericType(String name, Class dataType, - int columnType, Class componentType) { + public static ColumnDefinition fromGenericType(String name, Class dataType, int columnType, + Class componentType) { Objects.requireNonNull(dataType); ColumnDefinition cd = new ColumnDefinition<>(name, dataType, columnType); if (componentType == null) { @@ -265,8 +256,7 @@ public static ColumnDefinition from(ColumnHeader header) { return header.componentType().walk(new Adapter(header.name())).out(); } - private static class Adapter - implements Type.Visitor, PrimitiveType.Visitor, GenericType.Visitor { + private static class Adapter implements Type.Visitor, PrimitiveType.Visitor, GenericType.Visitor { private final String name; @@ -432,19 +422,17 @@ public boolean isDirect() { } /** - * Compares two ColumnDefinitions somewhat more permissively than equals, disregarding matters - * of storage and derivation. Checks for equality of {@code name}, {@code dataType}, and - * {@code componentType}. As such, this method has an equivalence relation, ie - * {@code A.isCompatible(B) == B.isCompatible(A)}. + * Compares two ColumnDefinitions somewhat more permissively than equals, disregarding matters of storage and + * derivation. Checks for equality of {@code name}, {@code dataType}, and {@code componentType}. As such, this + * method has an equivalence relation, ie {@code A.isCompatible(B) == B.isCompatible(A)}. * * @param other - The ColumnDefinition to compare to. - * @return True if the ColumnDefinition defines a column whose data is compatible with this - * ColumnDefinition. + * @return True if the ColumnDefinition defines a column whose data is compatible with this ColumnDefinition. */ public boolean isCompatible(ColumnDefinition other) { return this.name.equals(other.name) - && this.dataType.equals(other.dataType) - && this.componentType == other.componentType; + && this.dataType.equals(other.dataType) + && this.componentType == other.componentType; } /** @@ -461,8 +449,8 @@ public String describeForCompatibility() { } /** - * Enumerate the differences between this ColumnDefinition, and another one. Lines will be of - * the form "lhs attribute 'value' does not match rhs attribute 'value'. + * Enumerate the differences between this ColumnDefinition, and another one. Lines will be of the form "lhs + * attribute 'value' does not match rhs attribute 'value'. * * @param differences an array to which differences can be added * @param other the ColumnDefinition under comparison @@ -470,24 +458,22 @@ public String describeForCompatibility() { * @param rhs what to call the other definition * @param prefix begin each difference with this string */ - public void describeDifferences(@NotNull List differences, - @NotNull final ColumnDefinition other, - @NotNull final String lhs, @NotNull final String rhs, @NotNull final String prefix) { + public void describeDifferences(@NotNull List differences, @NotNull final ColumnDefinition other, + @NotNull final String lhs, @NotNull final String rhs, @NotNull final String prefix) { if (!name.equals(other.name)) { - differences.add(prefix + lhs + " name '" + name + "' does not match " + rhs + " name '" - + other.name + "'"); + differences.add(prefix + lhs + " name '" + name + "' does not match " + rhs + " name '" + other.name + "'"); } if (!dataType.equals(other.dataType)) { - differences.add(prefix + lhs + " dataType '" + dataType + "' does not match " + rhs - + " dataType '" + other.dataType + "'"); + differences.add(prefix + lhs + " dataType '" + dataType + "' does not match " + rhs + " dataType '" + + other.dataType + "'"); } else { if (!Objects.equals(componentType, other.componentType)) { - differences.add(prefix + lhs + " componentType '" + componentType - + "' does not match " + rhs + " componentType '" + other.componentType + "'"); + differences.add(prefix + lhs + " componentType '" + componentType + "' does not match " + rhs + + " componentType '" + other.componentType + "'"); } if (columnType != other.columnType) { - differences.add(prefix + lhs + " columnType " + columnType + " does not match " - + rhs + " columnType " + other.columnType); + differences.add(prefix + lhs + " columnType " + columnType + " does not match " + rhs + " columnType " + + other.columnType); } } } @@ -498,9 +484,9 @@ public boolean equals(final Object other) { } final ColumnDefinition otherCD = (ColumnDefinition) other; return name.equals(otherCD.name) - && dataType.equals(otherCD.dataType) - && Objects.equals(componentType, otherCD.componentType) - && columnType == otherCD.columnType; + && dataType.equals(otherCD.dataType) + && Objects.equals(componentType, otherCD.componentType) + && columnType == otherCD.columnType; } public ColumnDefinition rename(String newName) { diff --git a/DB/src/main/java/io/deephaven/db/tables/DataColumn.java b/DB/src/main/java/io/deephaven/db/tables/DataColumn.java index 0dd1730e596..93b9255688c 100644 --- a/DB/src/main/java/io/deephaven/db/tables/DataColumn.java +++ b/DB/src/main/java/io/deephaven/db/tables/DataColumn.java @@ -20,19 +20,18 @@ public interface DataColumn extends LongSizedDataStructure { Class getType(); /** - * Get the array component type, or the type itself. For basic types, this is just the type. For - * example, if you have a column of java.lang.String, this also returns java.lang.String. For - * array types (java Arrays), or DbArray (which would be returned by getType), you get the type - * that is contained within the array. For example, if a single row in this column contains a - * DbDoubleArray, getComponentType returns double. + * Get the array component type, or the type itself. For basic types, this is just the type. For example, if you + * have a column of java.lang.String, this also returns java.lang.String. For array types (java Arrays), or DbArray + * (which would be returned by getType), you get the type that is contained within the array. For example, if a + * single row in this column contains a DbDoubleArray, getComponentType returns double. * * @return if type is an array, the type of object within the array. Otherwise type itself. */ Class getComponentType(); /** - * Get the contents of this data column in array form. Note that this will return an array of - * the appropriate primitive type for columns of non-Boolean primitive types. + * Get the contents of this data column in array form. Note that this will return an array of the appropriate + * primitive type for columns of non-Boolean primitive types. * * @return An appropriately-typed array containing the full contents of this data column */ @@ -41,14 +40,12 @@ default Object getDirect() { } /** - * Get the contents of a range of this data column in array form. See {@link #getDirect()} for - * an explanation of return types. Note that it's required that - * {@code endIndexExclusive - startIndexInclusive < Integer.MAX_VALUE}. + * Get the contents of a range of this data column in array form. See {@link #getDirect()} for an explanation of + * return types. Note that it's required that {@code endIndexExclusive - startIndexInclusive < Integer.MAX_VALUE}. * * @param startIndexInclusive The first position in the data column to include, inclusive * @param endIndexExclusive One more than the last position in the data column to include - * @return An appropriately-typed array containing the contents of the specified range of this - * data column + * @return An appropriately-typed array containing the contents of the specified range of this data column */ default Object getDirect(final long startIndexInclusive, final long endIndexExclusive) { // noinspection unchecked @@ -140,8 +137,8 @@ default Object getDirect(final int... indexes) { TYPE get(long index); /** - * Return the column's values for the specified row range. Note that this will be a boxed array, - * for data columns of primitive types. + * Return the column's values for the specified row range. Note that this will be a boxed array, for data columns of + * primitive types. * * @param startIndexInclusive The first position in the data column to include, inclusive * @param endIndexExclusive One more than the last position in the data column to include @@ -150,8 +147,8 @@ default Object getDirect(final int... indexes) { TYPE[] get(long startIndexInclusive, long endIndexExclusive); /** - * Return the column's values for the specified rows. Note that this will be a boxed array, for - * data columns of primitive types. + * Return the column's values for the specified rows. Note that this will be a boxed array, for data columns of + * primitive types. * * @param indexes The row indexes to fetch * @return Return the column's values for the specified rows @@ -159,8 +156,8 @@ default Object getDirect(final int... indexes) { TYPE[] get(long... indexes); /** - * Return the column's values for the specified rows. Note that this will be a boxed array, for - * data columns of primitive types. + * Return the column's values for the specified rows. Note that this will be a boxed array, for data columns of + * primitive types. * * @param indexes The row indexes to fetch * @return Return the column's values for the specified rows diff --git a/DB/src/main/java/io/deephaven/db/tables/SortPair.java b/DB/src/main/java/io/deephaven/db/tables/SortPair.java index 4a2f4d3c7ee..a0e53285746 100644 --- a/DB/src/main/java/io/deephaven/db/tables/SortPair.java +++ b/DB/src/main/java/io/deephaven/db/tables/SortPair.java @@ -18,9 +18,8 @@ public static SortPair[] from(Collection sortColumns) { public static SortPair of(SortColumn sortColumn) { return new SortPair( - sortColumn.column().name(), - sortColumn.order() == Order.ASCENDING ? SortingOrder.Ascending - : SortingOrder.Descending); + sortColumn.column().name(), + sortColumn.order() == Order.ASCENDING ? SortingOrder.Ascending : SortingOrder.Descending); } private final String column; @@ -43,8 +42,7 @@ public static SortPair ascending(String name) { * @return an ascending SortPair array for names */ public static SortPair[] ascendingPairs(String... names) { - return Arrays.stream(names).map(name -> new SortPair(name, SortingOrder.Ascending)) - .toArray(SortPair[]::new); + return Arrays.stream(names).map(name -> new SortPair(name, SortingOrder.Ascending)).toArray(SortPair[]::new); } @@ -65,8 +63,7 @@ public static SortPair descending(String name) { * @return an descending SortPair array for names */ public static SortPair[] descendingPairs(String... names) { - return Arrays.stream(names).map(name -> new SortPair(name, SortingOrder.Descending)) - .toArray(SortPair[]::new); + return Arrays.stream(names).map(name -> new SortPair(name, SortingOrder.Descending)).toArray(SortPair[]::new); } private SortPair(String column, SortingOrder order) { diff --git a/DB/src/main/java/io/deephaven/db/tables/StringSetArrayWrapper.java b/DB/src/main/java/io/deephaven/db/tables/StringSetArrayWrapper.java index d41fe02cdd3..377086ea8fa 100644 --- a/DB/src/main/java/io/deephaven/db/tables/StringSetArrayWrapper.java +++ b/DB/src/main/java/io/deephaven/db/tables/StringSetArrayWrapper.java @@ -88,7 +88,7 @@ public synchronized long getEncoding(@NotNull final ToIntFunction toOffs final int key = toOffset.applyAsInt(s); if (key > 63) { throw new RuntimeException( - "Symbol " + s + " exceeds the limit of 63 symbols for StringSetArrayWrapper"); + "Symbol " + s + " exceeds the limit of 63 symbols for StringSetArrayWrapper"); } encoding |= 1L << key; } diff --git a/DB/src/main/java/io/deephaven/db/tables/StringSetWrapper.java b/DB/src/main/java/io/deephaven/db/tables/StringSetWrapper.java index e63425971c4..25121db23b2 100644 --- a/DB/src/main/java/io/deephaven/db/tables/StringSetWrapper.java +++ b/DB/src/main/java/io/deephaven/db/tables/StringSetWrapper.java @@ -108,8 +108,7 @@ public long getEncoding(@NotNull final ToIntFunction toOffset) { for (String s : this) { final int key = toOffset.applyAsInt(s); if (key > 63) { - throw new RuntimeException( - "Symbol " + s + " exceeds the limit of 63 symbols for StringSetWrapper"); + throw new RuntimeException("Symbol " + s + " exceeds the limit of 63 symbols for StringSetWrapper"); } encoding |= 1L << key; } diff --git a/DB/src/main/java/io/deephaven/db/tables/Table.java b/DB/src/main/java/io/deephaven/db/tables/Table.java index 0d2151a3843..98a707954b6 100644 --- a/DB/src/main/java/io/deephaven/db/tables/Table.java +++ b/DB/src/main/java/io/deephaven/db/tables/Table.java @@ -73,14 +73,11 @@ default Table getMeta() { String getDescription(); /** - * Determines whether this Table contains a column for each string in the specified array of - * {@code columnNames}. + * Determines whether this Table contains a column for each string in the specified array of {@code columnNames}. * - * @param columnNames The array of column names to be checked for inclusion in this table. Must - * not be {@code null}. - * @return {@code true} if this Table contains a column for each and every string in the - * {@code columnNames} array; {@code false} if any element of {@code columnNames} is - * not the name of a column in this table + * @param columnNames The array of column names to be checked for inclusion in this table. Must not be {@code null}. + * @return {@code true} if this Table contains a column for each and every string in the {@code columnNames} array; + * {@code false} if any element of {@code columnNames} is not the name of a column in this table */ @AsyncMethod default boolean hasColumns(final String... columnNames) { @@ -91,14 +88,14 @@ default boolean hasColumns(final String... columnNames) { } /** - * Determines whether this Table contains a column for each string in the specified collection - * of {@code columnNames}. + * Determines whether this Table contains a column for each string in the specified collection of + * {@code columnNames}. * - * @param columnNames The collection of column names to be checked for inclusion in this table. - * Must not be {@code null}. - * @return {@code true} if this Table contains a column for each and every string in the - * {@code columnNames} collection; {@code false} if any element of {@code columnNames} - * is not the name of a column in this table + * @param columnNames The collection of column names to be checked for inclusion in this table. Must not be + * {@code null}. + * @return {@code true} if this Table contains a column for each and every string in the {@code columnNames} + * collection; {@code false} if any element of {@code columnNames} is not the name of a column in + * this table */ @AsyncMethod default boolean hasColumns(Collection columnNames) { @@ -124,26 +121,23 @@ default boolean hasColumns(Collection columnNames) { *

    * If this attribute is present with value {@code true}, this Table is a "stream table". *

    - * A stream table is a sequence of additions that represent rows newly received from a stream; - * on the cycle after the stream table is refreshed the rows are removed. Note that this means - * any particular row of data (not to be confused with an index key) never exists for more than - * one cycle. + * A stream table is a sequence of additions that represent rows newly received from a stream; on the cycle after + * the stream table is refreshed the rows are removed. Note that this means any particular row of data (not to be + * confused with an index key) never exists for more than one cycle. *

    - * Most operations are supported as normal on stream tables, but aggregation operations are - * treated specially, producing aggregate results that are valid over the entire observed stream - * from the time the operation is initiated. These semantics necessitate a few exclusions, i.e. - * unsupported operations: + * Most operations are supported as normal on stream tables, but aggregation operations are treated specially, + * producing aggregate results that are valid over the entire observed stream from the time the operation is + * initiated. These semantics necessitate a few exclusions, i.e. unsupported operations: *

      - *
    1. {@link #by(SelectColumn...) by()} as an index-aggregation is unsupported. This means any - * of the overloads for {@link #by(AggregationStateFactory, SelectColumn...)} or - * {@link #by(Collection, Collection)} using {@link AggregationIndexStateFactory}, - * {@link AggregationFormulaStateFactory}, or {@link Array}. + *
    2. {@link #by(SelectColumn...) by()} as an index-aggregation is unsupported. This means any of the overloads for + * {@link #by(AggregationStateFactory, SelectColumn...)} or {@link #by(Collection, Collection)} using + * {@link AggregationIndexStateFactory}, {@link AggregationFormulaStateFactory}, or {@link Array}. * {@link io.deephaven.db.v2.by.ComboAggregateFactory#AggArray(java.lang.String...)}, and - * {@link ComboAggregateFactory#AggFormula(java.lang.String, java.lang.String, java.lang.String...)} - * are also unsupported. + * {@link ComboAggregateFactory#AggFormula(java.lang.String, java.lang.String, java.lang.String...)} are also + * unsupported. *
    3. {@link #byExternal(boolean, String...) byExternal()} is unsupported
    4. - *
    5. {@link #rollup(ComboAggregateFactory, boolean, SelectColumn...) rollup()} is unsupported - * if {@code includeConstituents == true}
    6. + *
    7. {@link #rollup(ComboAggregateFactory, boolean, SelectColumn...) rollup()} is unsupported if + * {@code includeConstituents == true}
    8. *
    9. {@link #treeTable(String, String) treeTable()} is unsupported
    10. *
    *

    @@ -151,8 +145,7 @@ default boolean hasColumns(Collection columnNames) { */ String STREAM_TABLE_ATTRIBUTE = "StreamTable"; /** - * The query engine may set or read this attribute to determine if a table is sorted by a - * particular column. + * The query engine may set or read this attribute to determine if a table is sorted by a particular column. */ String SORTED_COLUMNS_ATTRIBUTE = "SortedColumns"; String SYSTEMIC_TABLE_ATTRIBUTE = "SystemicTable"; @@ -171,9 +164,8 @@ default boolean hasColumns(Collection columnNames) { String SNAPSHOT_VIEWPORT_TYPE = "Snapshot"; /** - * This attribute is used internally by TableTools.merge to detect successive merges. Its - * presence indicates that it is safe to decompose the table into its multiple constituent - * parts. + * This attribute is used internally by TableTools.merge to detect successive merges. Its presence indicates that it + * is safe to decompose the table into its multiple constituent parts. */ String MERGED_TABLE_ATTRIBUTE = "MergedTable"; @@ -181,10 +173,8 @@ default boolean hasColumns(Collection columnNames) { *

    * This attribute is applied to source tables, and takes on Boolean values. *

      - *
    • True for post-{@link #coalesce()} source tables and their children if the source table is - * empty.
    • - *
    • False for post-{@link #coalesce()} source tables and their children if the source table - * is non-empty.
    • + *
    • True for post-{@link #coalesce()} source tables and their children if the source table is empty.
    • + *
    • False for post-{@link #coalesce()} source tables and their children if the source table is non-empty.
    • *
    • Missing for all other tables.
    • *
    */ @@ -267,8 +257,8 @@ default boolean isLive() { } /** - * Explicitly ensure that any work needed to make a table indexable, iterable, or queryable has - * been done, and return the coalesced child table if appropriate. + * Explicitly ensure that any work needed to make a table indexable, iterable, or queryable has been done, and + * return the coalesced child table if appropriate. * * @return This table, or a fully-coalesced child */ @@ -301,8 +291,7 @@ default boolean isEmpty() { ColumnSource getColumnSource(String sourceName); /** - * Retrieves a {@code ColumnSource} and {@link ColumnSource#cast casts} is to to the target - * class {@code clazz}. + * Retrieves a {@code ColumnSource} and {@link ColumnSource#cast casts} is to to the target class {@code clazz}. * * @param sourceName The name of the column. * @param clazz The target type. @@ -325,8 +314,7 @@ default ColumnSource getColumnSource(String sourceName, Class getColumn(c.getName())) - .toArray(DataColumn[]::new); + return getDefinition().getColumnStream().map(c -> getColumn(c.getName())).toArray(DataColumn[]::new); } default DataColumn getColumn(int columnIndex) { @@ -429,10 +417,9 @@ default Table wouldMatch(String... expressions) { } /** - * A table operation that applies the supplied predicate to each row in the table and produces - * columns containing the pass/fail result of the predicate application. This is similar to - * {@link #where(String...)} except that instead of selecting only rows that meet the criteria, - * new columns are added with the result of the comparison. + * A table operation that applies the supplied predicate to each row in the table and produces columns containing + * the pass/fail result of the predicate application. This is similar to {@link #where(String...)} except that + * instead of selecting only rows that meet the criteria, new columns are added with the result of the comparison. * * @return a table with new columns containing the filter result for each row. */ @@ -440,31 +427,27 @@ default Table wouldMatch(String... expressions) { Table wouldMatch(WouldMatchPair... matchers); /** - * Filters this table based on the set of values in the rightTable. Note that when the right - * table ticks, all of the rows in the left table are going to be re-evaluated, thus the - * intention is that the right table is fairly slow moving compared with the left table. + * Filters this table based on the set of values in the rightTable. Note that when the right table ticks, all of the + * rows in the left table are going to be re-evaluated, thus the intention is that the right table is fairly slow + * moving compared with the left table. * * @param rightTable the filtering table. - * @param inclusion whether things included in rightTable should be passed through (they are - * exluded if false) + * @param inclusion whether things included in rightTable should be passed through (they are exluded if false) * @param columnsToMatch the columns to match between the two tables * @return a new table filtered on right table */ - Table whereIn(GroupStrategy groupStrategy, Table rightTable, boolean inclusion, - MatchPair... columnsToMatch); + Table whereIn(GroupStrategy groupStrategy, Table rightTable, boolean inclusion, MatchPair... columnsToMatch); default Table whereIn(Table rightTable, boolean inclusion, MatchPair... columnsToMatch) { return whereIn(GroupStrategy.DEFAULT, rightTable, inclusion, columnsToMatch); } default Table whereIn(Table rightTable, boolean inclusion, String... columnsToMatch) { - return whereIn(GroupStrategy.DEFAULT, rightTable, inclusion, - MatchPairFactory.getExpressions(columnsToMatch)); + return whereIn(GroupStrategy.DEFAULT, rightTable, inclusion, MatchPairFactory.getExpressions(columnsToMatch)); } default Table whereIn(Table rightTable, String... columnsToMatch) { - return whereIn(GroupStrategy.DEFAULT, rightTable, true, - MatchPairFactory.getExpressions(columnsToMatch)); + return whereIn(GroupStrategy.DEFAULT, rightTable, true, MatchPairFactory.getExpressions(columnsToMatch)); } default Table whereIn(Table rightTable, MatchPair... columnsToMatch) { @@ -472,8 +455,7 @@ default Table whereIn(Table rightTable, MatchPair... columnsToMatch) { } default Table whereNotIn(Table rightTable, String... columnsToMatch) { - return whereIn(GroupStrategy.DEFAULT, rightTable, false, - MatchPairFactory.getExpressions(columnsToMatch)); + return whereIn(GroupStrategy.DEFAULT, rightTable, false, MatchPairFactory.getExpressions(columnsToMatch)); } default Table whereNotIn(Table rightTable, MatchPair... columnsToMatch) { @@ -484,25 +466,20 @@ default Table whereIn(GroupStrategy groupStrategy, Table rightTable, String... c return whereIn(groupStrategy, rightTable, true, columnsToMatch); } - default Table whereIn(GroupStrategy groupStrategy, Table rightTable, - MatchPair... columnsToMatch) { + default Table whereIn(GroupStrategy groupStrategy, Table rightTable, MatchPair... columnsToMatch) { return whereIn(groupStrategy, rightTable, true, columnsToMatch); } - default Table whereNotIn(GroupStrategy groupStrategy, Table rightTable, - String... columnsToMatch) { + default Table whereNotIn(GroupStrategy groupStrategy, Table rightTable, String... columnsToMatch) { return whereIn(groupStrategy, rightTable, false, columnsToMatch); } - default Table whereNotIn(GroupStrategy groupStrategy, Table rightTable, - MatchPair... columnsToMatch) { + default Table whereNotIn(GroupStrategy groupStrategy, Table rightTable, MatchPair... columnsToMatch) { return whereIn(groupStrategy, rightTable, false, columnsToMatch); } - default Table whereIn(GroupStrategy groupStrategy, Table rightTable, boolean inclusion, - String... columnsToMatch) { - return whereIn(groupStrategy, rightTable, inclusion, - MatchPairFactory.getExpressions(columnsToMatch)); + default Table whereIn(GroupStrategy groupStrategy, Table rightTable, boolean inclusion, String... columnsToMatch) { + return whereIn(groupStrategy, rightTable, inclusion, MatchPairFactory.getExpressions(columnsToMatch)); } @Override @@ -548,9 +525,8 @@ default Table whereDynamicNotIn(Table rightTable, String... columnsToMatch) { *

    * The input is an array of clauses, which in turn are a collection of filters. * - * @param filtersToApply each inner collection is a set of filters, all of must which match for - * the clause to be true. If any one of the collections in the array evaluates to true, - * the row is part of the output table. + * @param filtersToApply each inner collection is a set of filters, all of must which match for the clause to be + * true. If any one of the collections in the array evaluates to true, the row is part of the output table. * @return a new table, with the filters applied. */ @SuppressWarnings("unchecked") @@ -569,9 +545,8 @@ default Table whereOneOf(Collection... filtersToApply) { default Table whereOneOf(String... filtersToApplyStrings) { // noinspection unchecked, generic array creation is not possible final Collection[] filtersToApplyArrayOfCollections = - (Collection[]) Arrays - .stream(SelectFilterFactory.getExpressions(filtersToApplyStrings)) - .map(Collections::singleton).toArray(Collection[]::new); + (Collection[]) Arrays.stream(SelectFilterFactory.getExpressions(filtersToApplyStrings)) + .map(Collections::singleton).toArray(Collection[]::new); return whereOneOf(filtersToApplyArrayOfCollections); } @@ -646,16 +621,15 @@ default SelectValidationResult validateSelect(String... columns) { * Compute column formulas on demand. * *

    - * Lazy update defers computation until required for a set of values, and caches the results for - * a set of input values. This uses less RAM than an update statement when you have a smaller - * set of unique values. Less computation than an updateView is needed, because the results are - * saved in a cache. + * Lazy update defers computation until required for a set of values, and caches the results for a set of input + * values. This uses less RAM than an update statement when you have a smaller set of unique values. Less + * computation than an updateView is needed, because the results are saved in a cache. *

    * *

    - * If you have many unique values, you should instead use an update statement, which will have - * more memory efficient structures. Values are never removed from the lazyUpdate cache, so it - * should be used judiciously on a ticking table. + * If you have many unique values, you should instead use an update statement, which will have more memory efficient + * structures. Values are never removed from the lazyUpdate cache, so it should be used judiciously on a ticking + * table. *

    * * @param newColumns the columns to add @@ -706,9 +680,9 @@ default Table updateView(Collection columns) { @AsyncMethod default Table dropColumnFormats() { String[] columnAry = getDefinition().getColumnStream() - .map(ColumnDefinition::getName) - .filter(ColumnFormattingValues::isFormattingColumn) - .toArray(String[]::new); + .map(ColumnDefinition::getName) + .filter(ColumnFormattingValues::isFormattingColumn) + .toArray(String[]::new); return dropColumns(columnAry); } @@ -734,28 +708,26 @@ interface RenameFunction { default Table renameAllColumns(RenameFunction renameFunction) { return renameColumns(getDefinition().getColumnStream().map(ColumnDefinition::getName) - .map(n -> new MatchPair(renameFunction.rename(n), n)).toArray(MatchPair[]::new)); + .map(n -> new MatchPair(renameFunction.rename(n), n)).toArray(MatchPair[]::new)); } @AsyncMethod default Table formatColumns(String... columnFormats) { - final SelectColumn[] selectColumns = - SelectColumnFactory.getFormatExpressions(columnFormats); + final SelectColumn[] selectColumns = SelectColumnFactory.getFormatExpressions(columnFormats); final Set existingColumns = getDefinition().getColumnNames() - .stream() - .filter(column -> !ColumnFormattingValues.isFormattingColumn(column)) - .collect(Collectors.toSet()); + .stream() + .filter(column -> !ColumnFormattingValues.isFormattingColumn(column)) + .collect(Collectors.toSet()); final String[] unknownColumns = Arrays.stream(selectColumns) - .map(SelectColumnFactory::getFormatBaseColumn) - .filter(column -> (column != null && !column.equals("*") - && !existingColumns.contains(column))) - .toArray(String[]::new); + .map(SelectColumnFactory::getFormatBaseColumn) + .filter(column -> (column != null && !column.equals("*") && !existingColumns.contains(column))) + .toArray(String[]::new); if (unknownColumns.length > 0) { - throw new RuntimeException("Unknown columns: " + Arrays.toString(unknownColumns) - + ", available columns = " + existingColumns); + throw new RuntimeException( + "Unknown columns: " + Arrays.toString(unknownColumns) + ", available columns = " + existingColumns); } return updateView(selectColumns); @@ -768,18 +740,16 @@ default Table formatRowWhere(String condition, String formula) { @AsyncMethod default Table formatColumnWhere(String columnName, String condition, String formula) { - return formatColumns( - columnName + " = (" + condition + ") ? io.deephaven.db.util.DBColorUtil.toLong(" - + formula + ") : io.deephaven.db.util.DBColorUtil.toLong(NO_FORMATTING)"); + return formatColumns(columnName + " = (" + condition + ") ? io.deephaven.db.util.DBColorUtil.toLong(" + formula + + ") : io.deephaven.db.util.DBColorUtil.toLong(NO_FORMATTING)"); } /** - * Produce a new table with the specified columns moved to the leftmost position. Columns can be - * renamed with the usual syntax, i.e. {@code "NewColumnName=OldColumnName")}. + * Produce a new table with the specified columns moved to the leftmost position. Columns can be renamed with the + * usual syntax, i.e. {@code "NewColumnName=OldColumnName")}. * * @param columnsToMove The columns to move to the left (and, optionally, to rename) - * @return The new table, with the columns rearranged as explained above - * {@link #moveColumns(int, String...)} + * @return The new table, with the columns rearranged as explained above {@link #moveColumns(int, String...)} */ @AsyncMethod default Table moveUpColumns(String... columnsToMove) { @@ -787,23 +757,20 @@ default Table moveUpColumns(String... columnsToMove) { } /** - * Produce a new table with the specified columns moved to the rightmost position. Columns can - * be renamed with the usual syntax, i.e. {@code "NewColumnName=OldColumnName")}. + * Produce a new table with the specified columns moved to the rightmost position. Columns can be renamed with the + * usual syntax, i.e. {@code "NewColumnName=OldColumnName")}. * * @param columnsToMove The columns to move to the right (and, optionally, to rename) - * @return The new table, with the columns rearranged as explained above - * {@link #moveColumns(int, String...)} + * @return The new table, with the columns rearranged as explained above {@link #moveColumns(int, String...)} */ @AsyncMethod default Table moveDownColumns(String... columnsToMove) { - return moveColumns(getDefinition().getColumns().length - columnsToMove.length, true, - columnsToMove); + return moveColumns(getDefinition().getColumns().length - columnsToMove.length, true, columnsToMove); } /** - * Produce a new table with the specified columns moved to the specified {@code index}. Column - * indices begin at 0. Columns can be renamed with the usual syntax, i.e. - * {@code "NewColumnName=OldColumnName")}. + * Produce a new table with the specified columns moved to the specified {@code index}. Column indices begin at 0. + * Columns can be renamed with the usual syntax, i.e. {@code "NewColumnName=OldColumnName")}. * * @param index The index to which the specified columns should be moved * @param columnsToMove The columns to move to the specified index (and, optionally, to rename) @@ -828,14 +795,12 @@ default Table moveColumns(int index, boolean moveToEnd, String... columnsToMove) final String left = MatchPairFactory.getExpression(columnToMove).leftColumn; final String right = MatchPairFactory.getExpression(columnToMove).rightColumn; - if (!leftColsToMove.add(left) || !currentColumns.contains(left) - || (rightColsToMove.contains(left) && !left.equals(right) - && leftColsToMove.stream().anyMatch(col -> col.equals(right)))) { + if (!leftColsToMove.add(left) || !currentColumns.contains(left) || (rightColsToMove.contains(left) + && !left.equals(right) && leftColsToMove.stream().anyMatch(col -> col.equals(right)))) { extraCols++; } - if (currentColumns.stream().anyMatch(currentColumn -> currentColumn.equals(right)) - && !left.equals(right) && rightColsToMove.add(right) - && !rightColsToMove.contains(left)) { + if (currentColumns.stream().anyMatch(currentColumn -> currentColumn.equals(right)) && !left.equals(right) + && rightColsToMove.add(right) && !rightColsToMove.contains(left)) { extraCols--; } } @@ -847,16 +812,14 @@ default Table moveColumns(int index, boolean moveToEnd, String... columnsToMove) if (vci >= index && ctmi < columnsToMove.length) { viewColumns[vci++] = SelectColumnFactory.getExpression(columnsToMove[ctmi++]); } else { - // Don't add the column if it's one of the columns we're moving or if it has been - // renamed. + // Don't add the column if it's one of the columns we're moving or if it has been renamed. final String currentColumn = currentColumns.get(cci++); if (!leftColsToMove.contains(currentColumn) - && Arrays.stream(viewColumns) - .noneMatch(viewCol -> viewCol != null - && viewCol.getMatchPair().leftColumn.equals(currentColumn)) - && Arrays.stream(columnsToMove).noneMatch( - colToMove -> MatchPairFactory.getExpression(colToMove).rightColumn - .equals(currentColumn))) { + && Arrays.stream(viewColumns).noneMatch( + viewCol -> viewCol != null && viewCol.getMatchPair().leftColumn.equals(currentColumn)) + && Arrays.stream(columnsToMove) + .noneMatch(colToMove -> MatchPairFactory.getExpression(colToMove).rightColumn + .equals(currentColumn))) { viewColumns[vci++] = SelectColumnFactory.getExpression(currentColumn); } @@ -866,13 +829,12 @@ default Table moveColumns(int index, boolean moveToEnd, String... columnsToMove) } /** - * Produce a new table with the same columns as this table, but with a new column presenting the - * specified DBDateTime column as a Long column (with each DBDateTime represented instead as the - * corresponding number of nanos since the epoch). + * Produce a new table with the same columns as this table, but with a new column presenting the specified + * DBDateTime column as a Long column (with each DBDateTime represented instead as the corresponding number of nanos + * since the epoch). *

    - * NOTE: This is a really just an updateView(), and behaves accordingly for column ordering and - * (re)placement. This doesn't work on data that has been brought fully into memory (e.g. via - * select()). Use a view instead. + * NOTE: This is a really just an updateView(), and behaves accordingly for column ordering and (re)placement. This + * doesn't work on data that has been brought fully into memory (e.g. via select()). Use a view instead. * * @param dateTimeColumnName * @param nanosColumnName @@ -881,8 +843,7 @@ default Table moveColumns(int index, boolean moveToEnd, String... columnsToMove) @AsyncMethod default Table dateTimeColumnAsNanos(String dateTimeColumnName, String nanosColumnName) { // noinspection unchecked - return updateView(new ReinterpretedColumn(dateTimeColumnName, DBDateTime.class, - nanosColumnName, long.class)); + return updateView(new ReinterpretedColumn(dateTimeColumnName, DBDateTime.class, nanosColumnName, long.class)); } /** @@ -907,23 +868,19 @@ default Table dateTimeColumnAsNanos(String columnName) { /** * Extracts a subset of a table by row position. * - * If both firstPosition and lastPosition are positive, then the rows are counted from the - * beginning of the table. The firstPosition is inclusive, and the lastPosition is exclusive. - * The {@link #head}(N) call is equivalent to slice(0, N). The firstPosition must be less than - * or equal to the lastPosition. + * If both firstPosition and lastPosition are positive, then the rows are counted from the beginning of the table. + * The firstPosition is inclusive, and the lastPosition is exclusive. The {@link #head}(N) call is equivalent to + * slice(0, N). The firstPosition must be less than or equal to the lastPosition. * - * If firstPosition is positive and lastPosition is negative, then the firstRow is counted from - * the beginning of the table, inclusively. The lastPosition is counted from the end of the - * table. For example, slice(1, -1) includes all rows but the first and last. If the - * lastPosition would be before the firstRow, the result is an emptyTable. + * If firstPosition is positive and lastPosition is negative, then the firstRow is counted from the beginning of the + * table, inclusively. The lastPosition is counted from the end of the table. For example, slice(1, -1) includes all + * rows but the first and last. If the lastPosition would be before the firstRow, the result is an emptyTable. * - * If firstPosition is negative, and lastPosition is zero, then the firstRow is counted from the - * end of the table, and the end of the slice is the size of the table. slice(-N, 0) is - * equivalent to {@link #tail}(N). + * If firstPosition is negative, and lastPosition is zero, then the firstRow is counted from the end of the table, + * and the end of the slice is the size of the table. slice(-N, 0) is equivalent to {@link #tail}(N). * - * If the firstPosition is nega tive and the lastPosition is negative, they are both counted - * from the end of the table. For example, slice(-2, -1) returns the second to last row of the - * table. + * If the firstPosition is nega tive and the lastPosition is negative, they are both counted from the end of the + * table. For example, slice(-2, -1) returns the second to last row of the table. * * @param firstPositionInclusive the first position to include in the result * @param lastPositionExclusive the last position to include in the result @@ -936,8 +893,8 @@ default Table dateTimeColumnAsNanos(String columnName) { /** * Provides a head that selects a dynamic number of rows based on a percent. * - * @param percent the fraction of the table to return (0..1), the number of rows will be rounded - * up. For example if there are 3 rows, headPct(50) returns the first two rows. + * @param percent the fraction of the table to return (0..1), the number of rows will be rounded up. For example if + * there are 3 rows, headPct(50) returns the first two rows. */ @AsyncMethod Table headPct(double percent); @@ -950,8 +907,8 @@ default Table dateTimeColumnAsNanos(String columnName) { // ----------------------------------------------------------------------------------------------------------------- /** - * GroupStrategy is used for joins and other operations that can choose one of several ways to - * make use of grouping information. + * GroupStrategy is used for joins and other operations that can choose one of several ways to make use of grouping + * information. */ enum GroupStrategy { DEFAULT, LINEAR, USE_EXISTING_GROUPS, CREATE_GROUPS, @@ -962,53 +919,49 @@ enum GroupStrategy { // ----------------------------------------------------------------------------------------------------------------- /** - * Returns a table that has one column for each original table's columns, and one column - * corresponding to each of the input table (right table) columns listed in the columns to add - * (or all the columns whose names don't overlap with the name of a column from the source table - * if the columnsToAdd is length zero). The new columns (those corresponding to the input table) - * contain an aggregation of all values from the left side that match the join criteria. - * Consequently the types of all right side columns not involved in a join criteria, is an array - * of the original column type. If the two tables have columns with matching names then the - * method will fail with an exception unless the columns with corresponding names are found in - * one of the matching criteria. + * Returns a table that has one column for each original table's columns, and one column corresponding to each of + * the input table (right table) columns listed in the columns to add (or all the columns whose names don't overlap + * with the name of a column from the source table if the columnsToAdd is length zero). The new columns (those + * corresponding to the input table) contain an aggregation of all values from the left side that match the join + * criteria. Consequently the types of all right side columns not involved in a join criteria, is an array of the + * original column type. If the two tables have columns with matching names then the method will fail with an + * exception unless the columns with corresponding names are found in one of the matching criteria. *

    *

    - * NOTE: leftJoin operation does not involve an actual data copy, or an in-memory table - * creation. In order to produce an actual in memory table you need to apply a select call on - * the join result. + * NOTE: leftJoin operation does not involve an actual data copy, or an in-memory table creation. In order to + * produce an actual in memory table you need to apply a select call on the join result. * * @param rightTable input table * @param columnsToMatch match criteria * @param columnsToAdd columns to add - * @return a table that has one column for each original table's columns, and one column - * corresponding to each column listed in columnsToAdd. If columnsToAdd.length==0 one - * column corresponding to each column of the input table (right table) columns whose - * names don't overlap with the name of a column from the source table is added. The new - * columns (those corresponding to the input table) contain an aggregation of all values - * from the left side that match the join criteria. + * @return a table that has one column for each original table's columns, and one column corresponding to each + * column listed in columnsToAdd. If columnsToAdd.length==0 one column corresponding to each column of the + * input table (right table) columns whose names don't overlap with the name of a column from the source + * table is added. The new columns (those corresponding to the input table) contain an aggregation of all + * values from the left side that match the join criteria. */ Table leftJoin(Table rightTable, MatchPair columnsToMatch[], MatchPair[] columnsToAdd); default Table leftJoin(Table rightTable, Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToAdd) { return leftJoin( - rightTable, - MatchPair.fromMatches(columnsToMatch), - MatchPair.fromAddition(columnsToAdd)); + rightTable, + MatchPair.fromMatches(columnsToMatch), + MatchPair.fromAddition(columnsToAdd)); } default Table leftJoin(Table rightTable, Collection columnsToMatch) { return leftJoin( - rightTable, - MatchPairFactory.getExpressions(columnsToMatch), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); + rightTable, + MatchPairFactory.getExpressions(columnsToMatch), + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); } default Table leftJoin(Table rightTable, String columnsToMatch, String columnsToAdd) { return leftJoin( - rightTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd))); + rightTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd))); } default Table leftJoin(Table rightTable, String columnsToMatch) { @@ -1023,25 +976,25 @@ default Table leftJoin(Table rightTable) { @Override default Table exactJoin(Table rightTable, Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToAdd) { return exactJoin( - rightTable, - MatchPair.fromMatches(columnsToMatch), - MatchPair.fromAddition(columnsToAdd)); + rightTable, + MatchPair.fromMatches(columnsToMatch), + MatchPair.fromAddition(columnsToAdd)); } default Table exactJoin(Table rightTable, String columnsToMatch, String columnsToAdd) { return exactJoin( - rightTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd))); + rightTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd))); } default Table exactJoin(Table rightTable, String columnsToMatch) { return exactJoin( - rightTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); + rightTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); } enum AsOfMatchRule { @@ -1070,33 +1023,32 @@ static AsOfMatchRule of(ReverseAsOfJoinRule rule) { /** - * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch - * list. Matching is done exactly for the first n-1 columns and via a binary search for the last - * match pair. The columns of the original table are returned intact, together with the columns - * from rightTable defined in a comma separated list "columnsToAdd" + * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch list. Matching is + * done exactly for the first n-1 columns and via a binary search for the last match pair. The columns of the + * original table are returned intact, together with the columns from rightTable defined in a comma separated list + * "columnsToAdd" * * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the left side that need to - * be added to the right side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the left side that need to be added to the right + * side as a result of the match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ - Table aj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - AsOfMatchRule asOfMatchRule); + Table aj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, AsOfMatchRule asOfMatchRule); /** - * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch - * list. Matching is done exactly for the first n-1 columns and via a binary search for the last - * match pair. The columns of the original table are returned intact, together with the columns - * from rightTable defined in a comma separated list "columnsToAdd" + * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch list. Matching is + * done exactly for the first n-1 columns and via a binary search for the last match pair. The columns of the + * original table are returned intact, together with the columns from rightTable defined in a comma separated list + * "columnsToAdd" * * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the left side that need to - * be added to the right side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the left side that need to be added to the right + * side as a result of the match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ default Table aj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd) { @@ -1104,27 +1056,26 @@ default Table aj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] colum } default Table aj(Table rightTable, Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToAdd) { return aj( - rightTable, - MatchPair.fromMatches(columnsToMatch), - MatchPair.fromAddition(columnsToAdd)); + rightTable, + MatchPair.fromMatches(columnsToMatch), + MatchPair.fromAddition(columnsToAdd)); } default Table aj(Table rightTable, Collection columnsToMatch, - Collection columnsToAdd, AsOfJoinRule asOfJoinRule) { + Collection columnsToAdd, AsOfJoinRule asOfJoinRule) { return aj( - rightTable, - MatchPair.fromMatches(columnsToMatch), - MatchPair.fromAddition(columnsToAdd), - AsOfMatchRule.of(asOfJoinRule)); + rightTable, + MatchPair.fromMatches(columnsToMatch), + MatchPair.fromAddition(columnsToAdd), + AsOfMatchRule.of(asOfJoinRule)); } /** - * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch - * list. Matching is done exactly for the first n-1 columns and via a binary search for the last - * match pair. The columns of the original table are returned intact, together with all the - * columns from rightTable. + * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch list. Matching is + * done exactly for the first n-1 columns and via a binary search for the last match pair. The columns of the + * original table are returned intact, together with all the columns from rightTable. * * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or @@ -1132,23 +1083,22 @@ default Table aj(Table rightTable, Collection columnsToMatc * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ default Table aj(Table rightTable, Collection columnsToMatch) { - Pair expressions = - AjMatchPairFactory.getExpressions(false, columnsToMatch); + Pair expressions = AjMatchPairFactory.getExpressions(false, columnsToMatch); return aj( - rightTable, - expressions.getFirst(), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, - expressions.getSecond()); + rightTable, + expressions.getFirst(), + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, + expressions.getSecond()); } default Table aj(Table rightTable, String columnsToMatch, String columnsToAdd) { Pair expressions = - AjMatchPairFactory.getExpressions(false, StringUtils.splitToCollection(columnsToMatch)); + AjMatchPairFactory.getExpressions(false, StringUtils.splitToCollection(columnsToMatch)); return aj( - rightTable, - expressions.getFirst(), - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd)), - expressions.getSecond()); + rightTable, + expressions.getFirst(), + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd)), + expressions.getSecond()); } default Table aj(Table rightTable, String columnsToMatch) { @@ -1156,39 +1106,38 @@ default Table aj(Table rightTable, String columnsToMatch) { } /** - * Just like .aj(), but the matching on the last column is in reverse order, so that you find - * the row after the given timestamp instead of the row before. + * Just like .aj(), but the matching on the last column is in reverse order, so that you find the row after the + * given timestamp instead of the row before. *

    - * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch - * list. Matching is done exactly for the first n-1 columns and via a binary search for the last - * match pair. The columns of the original table are returned intact, together with the columns - * from rightTable defined in a comma separated list "columnsToAdd" + * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch list. Matching is + * done exactly for the first n-1 columns and via a binary search for the last match pair. The columns of the + * original table are returned intact, together with the columns from rightTable defined in a comma separated list + * "columnsToAdd" * * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the left side that need to - * be added to the right side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the left side that need to be added to the right + * side as a result of the match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ - Table raj(Table rightTable, MatchPair columnsToMatch[], MatchPair[] columnsToAdd, - AsOfMatchRule asOfMatchRule); + Table raj(Table rightTable, MatchPair columnsToMatch[], MatchPair[] columnsToAdd, AsOfMatchRule asOfMatchRule); /** - * Just like .aj(), but the matching on the last column is in reverse order, so that you find - * the row after the given timestamp instead of the row before. + * Just like .aj(), but the matching on the last column is in reverse order, so that you find the row after the + * given timestamp instead of the row before. *

    - * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch - * list. Matching is done exactly for the first n-1 columns and via a binary search for the last - * match pair. The columns of the original table are returned intact, together with the columns - * from rightTable defined in a comma separated list "columnsToAdd" + * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch list. Matching is + * done exactly for the first n-1 columns and via a binary search for the last match pair. The columns of the + * original table are returned intact, together with the columns from rightTable defined in a comma separated list + * "columnsToAdd" * * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the left side that need to - * be added to the right side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the left side that need to be added to the right + * side as a result of the match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ default Table raj(Table rightTable, MatchPair columnsToMatch[], MatchPair[] columnsToAdd) { @@ -1196,30 +1145,29 @@ default Table raj(Table rightTable, MatchPair columnsToMatch[], MatchPair[] colu } default Table raj(Table rightTable, Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToAdd) { return raj( - rightTable, - MatchPair.fromMatches(columnsToMatch), - MatchPair.fromAddition(columnsToAdd)); + rightTable, + MatchPair.fromMatches(columnsToMatch), + MatchPair.fromAddition(columnsToAdd)); } default Table raj(Table rightTable, Collection columnsToMatch, - Collection columnsToAdd, ReverseAsOfJoinRule reverseAsOfJoinRule) { + Collection columnsToAdd, ReverseAsOfJoinRule reverseAsOfJoinRule) { return raj( - rightTable, - MatchPair.fromMatches(columnsToMatch), - MatchPair.fromAddition(columnsToAdd), - AsOfMatchRule.of(reverseAsOfJoinRule)); + rightTable, + MatchPair.fromMatches(columnsToMatch), + MatchPair.fromAddition(columnsToAdd), + AsOfMatchRule.of(reverseAsOfJoinRule)); } /** - * Just like .aj(), but the matching on the last column is in reverse order, so that you find - * the row after the given timestamp instead of the row before. + * Just like .aj(), but the matching on the last column is in reverse order, so that you find the row after the + * given timestamp instead of the row before. *

    - * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch - * list. Matching is done exactly for the first n-1 columns and via a binary search for the last - * match pair. The columns of the original table are returned intact, together with the all - * columns from rightTable. + * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch list. Matching is + * done exactly for the first n-1 columns and via a binary search for the last match pair. The columns of the + * original table are returned intact, together with the all columns from rightTable. * * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or @@ -1227,39 +1175,38 @@ default Table raj(Table rightTable, Collection columnsToMat * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ default Table raj(Table rightTable, Collection columnsToMatch) { - Pair expressions = - AjMatchPairFactory.getExpressions(true, columnsToMatch); + Pair expressions = AjMatchPairFactory.getExpressions(true, columnsToMatch); return raj( - rightTable, - expressions.getFirst(), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, - expressions.getSecond()); + rightTable, + expressions.getFirst(), + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, + expressions.getSecond()); } /** - * Just like .aj(), but the matching on the last column is in reverse order, so that you find - * the row after the given timestamp instead of the row before. + * Just like .aj(), but the matching on the last column is in reverse order, so that you find the row after the + * given timestamp instead of the row before. *

    - * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch - * list. Matching is done exactly for the first n-1 columns and via a binary search for the last - * match pair. The columns of the original table are returned intact, together with the columns - * from rightTable defined in a comma separated list "columnsToAdd" + * Looks up the columns in the rightTable that meet the match conditions in the columnsToMatch list. Matching is + * done exactly for the first n-1 columns and via a binary search for the last match pair. The columns of the + * original table are returned intact, together with the columns from rightTable defined in a comma separated list + * "columnsToAdd" * * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the left side that need to - * be added to the right side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the left side that need to be added to the right + * side as a result of the match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ default Table raj(Table rightTable, String columnsToMatch, String columnsToAdd) { Pair expressions = - AjMatchPairFactory.getExpressions(true, StringUtils.splitToCollection(columnsToMatch)); + AjMatchPairFactory.getExpressions(true, StringUtils.splitToCollection(columnsToMatch)); return raj( - rightTable, - expressions.getFirst(), - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd)), - expressions.getSecond()); + rightTable, + expressions.getFirst(), + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd)), + expressions.getSecond()); } default Table raj(Table rightTable, String columnsToMatch) { @@ -1270,280 +1217,262 @@ default Table raj(Table rightTable, String columnsToMatch) { @Override default Table naturalJoin(Table rightTable, Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToAdd) { return naturalJoin( - rightTable, - MatchPair.fromMatches(columnsToMatch), - MatchPair.fromAddition(columnsToAdd)); + rightTable, + MatchPair.fromMatches(columnsToMatch), + MatchPair.fromAddition(columnsToAdd)); } default Table naturalJoin(Table rightTable, String columnsToMatch, String columnsToAdd) { return naturalJoin( - rightTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd))); + rightTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd))); } default Table naturalJoin(Table rightTable, String columnsToMatch) { return naturalJoin( - rightTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); + rightTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); } /** * Perform a cross join with the right table. *

    - * Returns a table that is the cartesian product of left rows X right rows, with one column for - * each of the left table's columns, and one column corresponding to each of the right table's - * columns. The rows are ordered first by the left table then by the right table. + * Returns a table that is the cartesian product of left rows X right rows, with one column for each of the left + * table's columns, and one column corresponding to each of the right table's columns. The rows are ordered first by + * the left table then by the right table. *

    - * To efficiently produce updates, the bits that represent a key for a given row are split into - * two. Unless specified, join reserves 16 bits to represent a right row. When there are too few - * bits to represent all of the right rows for a given aggregation group the table will shift a - * bit from the left side to the right side. The default of 16 bits was carefully chosen because - * it results in an efficient implementation to process live updates. + * To efficiently produce updates, the bits that represent a key for a given row are split into two. Unless + * specified, join reserves 16 bits to represent a right row. When there are too few bits to represent all of the + * right rows for a given aggregation group the table will shift a bit from the left side to the right side. The + * default of 16 bits was carefully chosen because it results in an efficient implementation to process live + * updates. *

    - * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of - * bits needed to express the result table exceeds that needed to represent Long.MAX_VALUE. - * There are a few work arounds: - If the left table is sparse, consider flattening the left - * table. - If there are no key-columns and the right table is sparse, consider flattening the - * right table. - If the maximum size of a right table's group is small, you can reserve fewer - * bits by setting numRightBitsToReserve on initialization. + * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of bits needed to + * express the result table exceeds that needed to represent Long.MAX_VALUE. There are a few work arounds: - If the + * left table is sparse, consider flattening the left table. - If there are no key-columns and the right table is + * sparse, consider flattening the right table. - If the maximum size of a right table's group is small, you can + * reserve fewer bits by setting numRightBitsToReserve on initialization. *

    - * Note: If you can prove that a given group has at most one right-row then you should prefer - * using {@link #naturalJoin}. + * Note: If you can prove that a given group has at most one right-row then you should prefer using + * {@link #naturalJoin}. * * @param rightTable The right side table on the join. - * @return a new table joined according to the specification with zero key-columns and includes - * all right columns + * @return a new table joined according to the specification with zero key-columns and includes all right columns */ default Table join(Table rightTable) { return join( - rightTable, - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); + rightTable, + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); } /** * Perform a cross join with the right table. *

    - * Returns a table that is the cartesian product of left rows X right rows, with one column for - * each of the left table's columns, and one column corresponding to each of the right table's - * columns. The rows are ordered first by the left table then by the right table. + * Returns a table that is the cartesian product of left rows X right rows, with one column for each of the left + * table's columns, and one column corresponding to each of the right table's columns. The rows are ordered first by + * the left table then by the right table. *

    - * To efficiently produce updates, the bits that represent a key for a given row are split into - * two. Unless specified, join reserves 16 bits to represent a right row. When there are too few - * bits to represent all of the right rows for a given aggregation group the table will shift a - * bit from the left side to the right side. The default of 16 bits was carefully chosen because - * it results in an efficient implementation to process live updates. + * To efficiently produce updates, the bits that represent a key for a given row are split into two. Unless + * specified, join reserves 16 bits to represent a right row. When there are too few bits to represent all of the + * right rows for a given aggregation group the table will shift a bit from the left side to the right side. The + * default of 16 bits was carefully chosen because it results in an efficient implementation to process live + * updates. *

    - * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of - * bits needed to express the result table exceeds that needed to represent Long.MAX_VALUE. - * There are a few work arounds: - If the left table is sparse, consider flattening the left - * table. - If there are no key-columns and the right table is sparse, consider flattening the - * right table. - If the maximum size of a right table's group is small, you can reserve fewer - * bits by setting numRightBitsToReserve on initialization. + * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of bits needed to + * express the result table exceeds that needed to represent Long.MAX_VALUE. There are a few work arounds: - If the + * left table is sparse, consider flattening the left table. - If there are no key-columns and the right table is + * sparse, consider flattening the right table. - If the maximum size of a right table's group is small, you can + * reserve fewer bits by setting numRightBitsToReserve on initialization. *

    - * Note: If you can prove that a given group has at most one right-row then you should prefer - * using {@link #naturalJoin}. + * Note: If you can prove that a given group has at most one right-row then you should prefer using + * {@link #naturalJoin}. * * @param rightTable The right side table on the join. * @param numRightBitsToReserve The number of bits to reserve for rightTable groups. - * @return a new table joined according to the specification with zero key-columns and includes - * all right columns + * @return a new table joined according to the specification with zero key-columns and includes all right columns */ default Table join(Table rightTable, int numRightBitsToReserve) { - return join(rightTable, Collections.emptyList(), Collections.emptyList(), - numRightBitsToReserve); + return join(rightTable, Collections.emptyList(), Collections.emptyList(), numRightBitsToReserve); } default Table join(Table rightTable, String columnsToMatch) { return join( - rightTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); + rightTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY); } /** * Perform a cross join with the right table. *

    - * Returns a table that is the cartesian product of left rows X right rows, with one column for - * each of the left table's columns, and one column corresponding to each of the right table's - * columns that are not key-columns. The rows are ordered first by the left table then by the - * right table. If columnsToMatch is non-empty then the product is filtered by the supplied - * match conditions. + * Returns a table that is the cartesian product of left rows X right rows, with one column for each of the left + * table's columns, and one column corresponding to each of the right table's columns that are not key-columns. The + * rows are ordered first by the left table then by the right table. If columnsToMatch is non-empty then the product + * is filtered by the supplied match conditions. *

    - * To efficiently produce updates, the bits that represent a key for a given row are split into - * two. Unless specified, join reserves 16 bits to represent a right row. When there are too few - * bits to represent all of the right rows for a given aggregation group the table will shift a - * bit from the left side to the right side. The default of 16 bits was carefully chosen because - * it results in an efficient implementation to process live updates. + * To efficiently produce updates, the bits that represent a key for a given row are split into two. Unless + * specified, join reserves 16 bits to represent a right row. When there are too few bits to represent all of the + * right rows for a given aggregation group the table will shift a bit from the left side to the right side. The + * default of 16 bits was carefully chosen because it results in an efficient implementation to process live + * updates. *

    - * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of - * bits needed to express the result table exceeds that needed to represent Long.MAX_VALUE. - * There are a few work arounds: - If the left table is sparse, consider flattening the left - * table. - If there are no key-columns and the right table is sparse, consider flattening the - * right table. - If the maximum size of a right table's group is small, you can reserve fewer - * bits by setting numRightBitsToReserve on initialization. + * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of bits needed to + * express the result table exceeds that needed to represent Long.MAX_VALUE. There are a few work arounds: - If the + * left table is sparse, consider flattening the left table. - If there are no key-columns and the right table is + * sparse, consider flattening the right table. - If the maximum size of a right table's group is small, you can + * reserve fewer bits by setting numRightBitsToReserve on initialization. *

    - * Note: If you can prove that a given group has at most one right-row then you should prefer - * using {@link #naturalJoin}. + * Note: If you can prove that a given group has at most one right-row then you should prefer using + * {@link #naturalJoin}. * * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") * @param numRightBitsToReserve The number of bits to reserve for rightTable groups. - * @return a new table joined according to the specification in columnsToMatch and includes all - * non-key-columns from the right table + * @return a new table joined according to the specification in columnsToMatch and includes all non-key-columns from + * the right table */ default Table join(Table rightTable, String columnsToMatch, int numRightBitsToReserve) { return join( - rightTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, - numRightBitsToReserve); + rightTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, + numRightBitsToReserve); } default Table join(Table rightTable, String columnsToMatch, String columnsToAdd) { return join( - rightTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd))); + rightTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd))); } /** * Perform a cross join with the right table. *

    - * Returns a table that is the cartesian product of left rows X right rows, with one column for - * each of the left table's columns, and one column corresponding to each of the right table's - * columns that are included in the columnsToAdd argument. The rows are ordered first by the - * left table then by the right table. If columnsToMatch is non-empty then the product is - * filtered by the supplied match conditions. + * Returns a table that is the cartesian product of left rows X right rows, with one column for each of the left + * table's columns, and one column corresponding to each of the right table's columns that are included in the + * columnsToAdd argument. The rows are ordered first by the left table then by the right table. If columnsToMatch is + * non-empty then the product is filtered by the supplied match conditions. *

    - * To efficiently produce updates, the bits that represent a key for a given row are split into - * two. Unless specified, join reserves 16 bits to represent a right row. When there are too few - * bits to represent all of the right rows for a given aggregation group the table will shift a - * bit from the left side to the right side. The default of 16 bits was carefully chosen because - * it results in an efficient implementation to process live updates. + * To efficiently produce updates, the bits that represent a key for a given row are split into two. Unless + * specified, join reserves 16 bits to represent a right row. When there are too few bits to represent all of the + * right rows for a given aggregation group the table will shift a bit from the left side to the right side. The + * default of 16 bits was carefully chosen because it results in an efficient implementation to process live + * updates. *

    - * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of - * bits needed to express the result table exceeds that needed to represent Long.MAX_VALUE. - * There are a few work arounds: - If the left table is sparse, consider flattening the left - * table. - If there are no key-columns and the right table is sparse, consider flattening the - * right table. - If the maximum size of a right table's group is small, you can reserve fewer - * bits by setting numRightBitsToReserve on initialization. + * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of bits needed to + * express the result table exceeds that needed to represent Long.MAX_VALUE. There are a few work arounds: - If the + * left table is sparse, consider flattening the left table. - If there are no key-columns and the right table is + * sparse, consider flattening the right table. - If the maximum size of a right table's group is small, you can + * reserve fewer bits by setting numRightBitsToReserve on initialization. *

    - * Note: If you can prove that a given group has at most one right-row then you should prefer - * using {@link #naturalJoin}. + * Note: If you can prove that a given group has at most one right-row then you should prefer using + * {@link #naturalJoin}. * * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the right side that need to - * be added to the left side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the right side that need to be added to the left + * side as a result of the match. * @param numRightBitsToReserve The number of bits to reserve for rightTable groups. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ - default Table join(Table rightTable, String columnsToMatch, String columnsToAdd, - int numRightBitsToReserve) { + default Table join(Table rightTable, String columnsToMatch, String columnsToAdd, int numRightBitsToReserve) { return join( - rightTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), - MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd)), - numRightBitsToReserve); + rightTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToMatch)), + MatchPairFactory.getExpressions(StringUtils.splitToCollection(columnsToAdd)), + numRightBitsToReserve); } /** * Perform a cross join with the right table. *

    - * Returns a table that is the cartesian product of left rows X right rows, with one column for - * each of the left table's columns, and one column corresponding to each of the right table's - * columns that are included in the columnsToAdd argument. The rows are ordered first by the - * left table then by the right table. If columnsToMatch is non-empty then the product is - * filtered by the supplied match conditions. + * Returns a table that is the cartesian product of left rows X right rows, with one column for each of the left + * table's columns, and one column corresponding to each of the right table's columns that are included in the + * columnsToAdd argument. The rows are ordered first by the left table then by the right table. If columnsToMatch is + * non-empty then the product is filtered by the supplied match conditions. *

    - * To efficiently produce updates, the bits that represent a key for a given row are split into - * two. Unless specified, join reserves 16 bits to represent a right row. When there are too few - * bits to represent all of the right rows for a given aggregation group the table will shift a - * bit from the left side to the right side. The default of 16 bits was carefully chosen because - * it results in an efficient implementation to process live updates. + * To efficiently produce updates, the bits that represent a key for a given row are split into two. Unless + * specified, join reserves 16 bits to represent a right row. When there are too few bits to represent all of the + * right rows for a given aggregation group the table will shift a bit from the left side to the right side. The + * default of 16 bits was carefully chosen because it results in an efficient implementation to process live + * updates. *

    - * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of - * bits needed to express the result table exceeds that needed to represent Long.MAX_VALUE. - * There are a few work arounds: - If the left table is sparse, consider flattening the left - * table. - If there are no key-columns and the right table is sparse, consider flattening the - * right table. - If the maximum size of a right table's group is small, you can reserve fewer - * bits by setting numRightBitsToReserve on initialization. + * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of bits needed to + * express the result table exceeds that needed to represent Long.MAX_VALUE. There are a few work arounds: - If the + * left table is sparse, consider flattening the left table. - If there are no key-columns and the right table is + * sparse, consider flattening the right table. - If the maximum size of a right table's group is small, you can + * reserve fewer bits by setting numRightBitsToReserve on initialization. *

    - * Note: If you can prove that a given group has at most one right-row then you should prefer - * using {@link #naturalJoin}. + * Note: If you can prove that a given group has at most one right-row then you should prefer using + * {@link #naturalJoin}. * * @param rightTable The right side table on the join. - * @param columnsToMatch An array of match pair conditions ("leftColumn=rightColumn" or - * "columnFoundInBoth") - * @param columnsToAdd An array of the columns from the right side that need to be added to the - * left side as a result of the match. + * @param columnsToMatch An array of match pair conditions ("leftColumn=rightColumn" or "columnFoundInBoth") + * @param columnsToAdd An array of the columns from the right side that need to be added to the left side as a + * result of the match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ default Table join(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd) { - return join(rightTable, columnsToMatch, columnsToAdd, - CrossJoinHelper.DEFAULT_NUM_RIGHT_BITS_TO_RESERVE); + return join(rightTable, columnsToMatch, columnsToAdd, CrossJoinHelper.DEFAULT_NUM_RIGHT_BITS_TO_RESERVE); } /** * Perform a cross join with the right table. *

    - * Returns a table that is the cartesian product of left rows X right rows, with one column for - * each of the left table's columns, and one column corresponding to each of the right table's - * columns that are included in the columnsToAdd argument. The rows are ordered first by the - * left table then by the right table. If columnsToMatch is non-empty then the product is - * filtered by the supplied match conditions. + * Returns a table that is the cartesian product of left rows X right rows, with one column for each of the left + * table's columns, and one column corresponding to each of the right table's columns that are included in the + * columnsToAdd argument. The rows are ordered first by the left table then by the right table. If columnsToMatch is + * non-empty then the product is filtered by the supplied match conditions. *

    - * To efficiently produce updates, the bits that represent a key for a given row are split into - * two. Unless specified, join reserves 16 bits to represent a right row. When there are too few - * bits to represent all of the right rows for a given aggregation group the table will shift a - * bit from the left side to the right side. The default of 16 bits was carefully chosen because - * it results in an efficient implementation to process live updates. + * To efficiently produce updates, the bits that represent a key for a given row are split into two. Unless + * specified, join reserves 16 bits to represent a right row. When there are too few bits to represent all of the + * right rows for a given aggregation group the table will shift a bit from the left side to the right side. The + * default of 16 bits was carefully chosen because it results in an efficient implementation to process live + * updates. *

    - * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of - * bits needed to express the result table exceeds that needed to represent Long.MAX_VALUE. - * There are a few work arounds: - If the left table is sparse, consider flattening the left - * table. - If there are no key-columns and the right table is sparse, consider flattening the - * right table. - If the maximum size of a right table's group is small, you can reserve fewer - * bits by setting numRightBitsToReserve on initialization. + * An {@link io.deephaven.db.v2.utils.OutOfKeySpaceException} is thrown when the total number of bits needed to + * express the result table exceeds that needed to represent Long.MAX_VALUE. There are a few work arounds: - If the + * left table is sparse, consider flattening the left table. - If there are no key-columns and the right table is + * sparse, consider flattening the right table. - If the maximum size of a right table's group is small, you can + * reserve fewer bits by setting numRightBitsToReserve on initialization. *

    - * Note: If you can prove that a given group has at most one right-row then you should prefer - * using {@link #naturalJoin}. + * Note: If you can prove that a given group has at most one right-row then you should prefer using + * {@link #naturalJoin}. * * @param rightTable The right side table on the join. - * @param columnsToMatch An array of match pair conditions ("leftColumn=rightColumn" or - * "columnFoundInBoth") - * @param columnsToAdd An array of the columns from the right side that need to be added to the - * left side as a result of the match. + * @param columnsToMatch An array of match pair conditions ("leftColumn=rightColumn" or "columnFoundInBoth") + * @param columnsToAdd An array of the columns from the right side that need to be added to the left side as a + * result of the match. * @param numRightBitsToReserve The number of bits to reserve for rightTable groups. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ - Table join(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - int numRightBitsToReserve); + Table join(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, int numRightBitsToReserve); @Override default Table join(Table rightTable, Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToAdd) { return join( - rightTable, - MatchPair.fromMatches(columnsToMatch), - MatchPair.fromAddition(columnsToAdd)); + rightTable, + MatchPair.fromMatches(columnsToMatch), + MatchPair.fromAddition(columnsToAdd)); } @Override default Table join(Table rightTable, Collection columnsToMatch, - Collection columnsToAdd, int numRightBitsToReserve) { + Collection columnsToAdd, int numRightBitsToReserve) { return join( - rightTable, - MatchPair.fromMatches(columnsToMatch), - MatchPair.fromAddition(columnsToAdd), - numRightBitsToReserve); + rightTable, + MatchPair.fromMatches(columnsToMatch), + MatchPair.fromAddition(columnsToAdd), + numRightBitsToReserve); } // ----------------------------------------------------------------------------------------------------------------- @@ -1586,21 +1515,19 @@ default Table by(Collection groupByColumns) { @Override @AsyncMethod - default Table by(Collection groupByColumns, - Collection aggregations) { + default Table by(Collection groupByColumns, Collection aggregations) { List optimized = ComboBy.optimize(aggregations); List optimizedOrder = optimized.stream() - .map(ComboBy::getResultPairs) - .flatMap(Stream::of) - .map(MatchPair::left) - .map(ColumnName::of) - .collect(Collectors.toList()); - List userOrder = - AggregationOutputs.of(aggregations).collect(Collectors.toList()); + .map(ComboBy::getResultPairs) + .flatMap(Stream::of) + .map(MatchPair::left) + .map(ColumnName::of) + .collect(Collectors.toList()); + List userOrder = AggregationOutputs.of(aggregations).collect(Collectors.toList()); Table aggregationTable = by( - new ComboAggregateFactory(optimized), - SelectColumn.from(groupByColumns)); + new ComboAggregateFactory(optimized), + SelectColumn.from(groupByColumns)); if (userOrder.equals(optimizedOrder)) { return aggregationTable; @@ -1608,8 +1535,8 @@ default Table by(Collection groupByColumns, // We need to re-order the columns to match the user-provided order List newOrder = - Stream.concat(groupByColumns.stream().map(Selectable::newColumn), userOrder.stream()) - .collect(Collectors.toList()); + Stream.concat(groupByColumns.stream().map(Selectable::newColumn), userOrder.stream()) + .collect(Collectors.toList()); return aggregationTable.view(newOrder); } @@ -1635,24 +1562,23 @@ default Table tailBy(long nRows, Collection groupByColumns) { } /** - * Groups data according to groupByColumns and applies formulaColumn to each of columns not - * altered by the grouping operation. columnParamName is used as place-holder for - * the name of each column inside formulaColumn. + * Groups data according to groupByColumns and applies formulaColumn to each of columns not altered by the grouping + * operation. columnParamName is used as place-holder for the name of each column inside + * formulaColumn. * * @param formulaColumn Formula applied to each column * @param columnParamName The parameter name used as a placeholder for each column * @param groupByColumns The grouping columns {@link Table#by(SelectColumn[])} */ @AsyncMethod - Table applyToAllBy(String formulaColumn, String columnParamName, - SelectColumn... groupByColumns); + Table applyToAllBy(String formulaColumn, String columnParamName, SelectColumn... groupByColumns); /** - * Groups data according to groupByColumns and applies formulaColumn to each of columns not - * altered by the grouping operation. + * Groups data according to groupByColumns and applies formulaColumn to each of columns not altered by the grouping + * operation. * - * @param formulaColumn Formula applied to each column, uses parameter each to refer to - * each colum it being applied to + * @param formulaColumn Formula applied to each column, uses parameter each to refer to each colum it being + * applied to * @param groupByColumns The grouping columns {@link Table#by(SelectColumn...)} */ @AsyncMethod @@ -1661,11 +1587,11 @@ default Table applyToAllBy(String formulaColumn, SelectColumn... groupByColumns) } /** - * Groups data according to groupByColumns and applies formulaColumn to each of columns not - * altered by the grouping operation. + * Groups data according to groupByColumns and applies formulaColumn to each of columns not altered by the grouping + * operation. * - * @param formulaColumn Formula applied to each column, uses parameter each to refer to - * each colum it being applied to + * @param formulaColumn Formula applied to each column, uses parameter each to refer to each colum it being + * applied to * @param groupByColumns The grouping columns {@link Table#by(String...)} */ @AsyncMethod @@ -1679,8 +1605,7 @@ default Table applyToAllBy(String formulaColumn, String groupByColumn) { } /** - * Groups the data column according to groupByColumns and computes the sum for the - * rest of the fields + * Groups the data column according to groupByColumns and computes the sum for the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1688,8 +1613,7 @@ default Table applyToAllBy(String formulaColumn, String groupByColumn) { Table sumBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and computes the sum for the - * rest of the fields + * Groups the data column according to groupByColumns and computes the sum for the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1699,8 +1623,7 @@ default Table sumBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the sum for the - * rest of the fields + * Groups the data column according to groupByColumns and computes the sum for the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1720,8 +1643,8 @@ default Table sumBy() { } /** - * Groups the data column according to groupByColumns and computes the sum of the - * absolute values for the rest of the fields + * Groups the data column according to groupByColumns and computes the sum of the absolute values for + * the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1729,8 +1652,8 @@ default Table sumBy() { Table absSumBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and computes the sum of the - * absolute values for the rest of the fields + * Groups the data column according to groupByColumns and computes the sum of the absolute values for + * the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1740,8 +1663,8 @@ default Table absSumBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the sum of the - * absolute values for the rest of the fields + * Groups the data column according to groupByColumns and computes the sum of the absolute values for + * the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1761,8 +1684,8 @@ default Table absSumBy() { } /** - * Groups the data column according to groupByColumns and computes the average for - * the rest of the fields + * Groups the data column according to groupByColumns and computes the average for the rest of the + * fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1770,8 +1693,8 @@ default Table absSumBy() { Table avgBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and computes the average for - * the rest of the fields + * Groups the data column according to groupByColumns and computes the average for the rest of the + * fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1781,8 +1704,8 @@ default Table avgBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the average for - * the rest of the fields + * Groups the data column according to groupByColumns and computes the average for the rest of the + * fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1802,8 +1725,8 @@ default Table avgBy() { } /** - * Groups the data column according to groupByColumns and computes the weighted - * average using weightColumn for the rest of the fields + * Groups the data column according to groupByColumns and computes the weighted average using + * weightColumn for the rest of the fields * * @param weightColumn the column to use for the weight * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} @@ -1812,8 +1735,8 @@ default Table avgBy() { Table wavgBy(String weightColumn, SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and computes the weighted - * average using weightColumn for the rest of the fields + * Groups the data column according to groupByColumns and computes the weighted average using + * weightColumn for the rest of the fields * * @param weightColumn the column to use for the weight * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} @@ -1824,8 +1747,8 @@ default Table wavgBy(String weightColumn, String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the weighted - * average using weightColumn for the rest of the fields + * Groups the data column according to groupByColumns and computes the weighted average using + * weightColumn for the rest of the fields * * @param weightColumn the column to use for the weight * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} @@ -1836,8 +1759,7 @@ default Table wavgBy(String weightColumn, Collection groupByColumns) { } /** - * Produces a single row table with the weighted average using weightColumn for the rest of the - * fields + * Produces a single row table with the weighted average using weightColumn for the rest of the fields * * When the input table is empty, zero output rows are produced. * @@ -1849,12 +1771,12 @@ default Table wavgBy(String weightColumn) { } /** - * Groups the data column according to groupByColumns and computes the weighted sum - * using weightColumn for the rest of the fields + * Groups the data column according to groupByColumns and computes the weighted sum using weightColumn + * for the rest of the fields * - * If the weight column is a floating point type, all result columns will be doubles. If the - * weight column is an integral type, all integral input columns will have long results and all - * floating point input columns will have double results. + * If the weight column is a floating point type, all result columns will be doubles. If the weight column is an + * integral type, all integral input columns will have long results and all floating point input columns will have + * double results. * * @param weightColumn the column to use for the weight * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} @@ -1863,12 +1785,11 @@ default Table wavgBy(String weightColumn) { Table wsumBy(String weightColumn, SelectColumn... groupByColumns); /** - * Computes the weighted sum for all rows in the table using weightColumn for the rest of the - * fields + * Computes the weighted sum for all rows in the table using weightColumn for the rest of the fields * - * If the weight column is a floating point type, all result columns will be doubles. If the - * weight column is an integral type, all integral input columns will have long results and all - * floating point input columns will have double results. + * If the weight column is a floating point type, all result columns will be doubles. If the weight column is an + * integral type, all integral input columns will have long results and all floating point input columns will have + * double results. * * @param weightColumn the column to use for the weight */ @@ -1878,12 +1799,12 @@ default Table wsumBy(String weightColumn) { } /** - * Groups the data column according to groupByColumns and computes the weighted sum - * using weightColumn for the rest of the fields + * Groups the data column according to groupByColumns and computes the weighted sum using weightColumn + * for the rest of the fields * - * If the weight column is a floating point type, all result columns will be doubles. If the - * weight column is an integral type, all integral input columns will have long results and all - * floating point input columns will have double results. + * If the weight column is a floating point type, all result columns will be doubles. If the weight column is an + * integral type, all integral input columns will have long results and all floating point input columns will have + * double results. * * @param weightColumn the column to use for the weight * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} @@ -1894,12 +1815,12 @@ default Table wsumBy(String weightColumn, String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the weighted sum - * using weightColumn for the rest of the fields + * Groups the data column according to groupByColumns and computes the weighted sum using weightColumn + * for the rest of the fields * - * If the weight column is a floating point type, all result columns will be doubles. If the - * weight column is an integral type, all integral input columns will have long results and all - * floating point input columns will have double results. + * If the weight column is a floating point type, all result columns will be doubles. If the weight column is an + * integral type, all integral input columns will have long results and all floating point input columns will have + * double results. * * @param weightColumn the column to use for the weight * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} @@ -1913,8 +1834,8 @@ default Table wsumBy(String weightColumn, Collection groupByColumns) { Table stdBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and computes the standard - * deviation for the rest of the fields + * Groups the data column according to groupByColumns and computes the standard deviation for the rest + * of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1924,8 +1845,8 @@ default Table stdBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the standard - * deviation for the rest of the fields + * Groups the data column according to groupByColumns and computes the standard deviation for the rest + * of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1944,8 +1865,8 @@ default Table stdBy() { } /** - * Groups the data column according to groupByColumns and computes the variance for - * the rest of the fields + * Groups the data column according to groupByColumns and computes the variance for the rest of the + * fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1953,8 +1874,8 @@ default Table stdBy() { Table varBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and computes the variance for - * the rest of the fields + * Groups the data column according to groupByColumns and computes the variance for the rest of the + * fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1964,8 +1885,8 @@ default Table varBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the variance for - * the rest of the fields + * Groups the data column according to groupByColumns and computes the variance for the rest of the + * fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1984,8 +1905,7 @@ default Table varBy() { } /** - * Groups the data column according to groupByColumns and retrieves the last for - * the rest of the fields + * Groups the data column according to groupByColumns and retrieves the last for the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -1993,8 +1913,7 @@ default Table varBy() { Table lastBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and retrieves the last for - * the rest of the fields + * Groups the data column according to groupByColumns and retrieves the last for the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -2004,8 +1923,7 @@ default Table lastBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and retrieves the last for - * the rest of the fields + * Groups the data column according to groupByColumns and retrieves the last for the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -2023,8 +1941,8 @@ default Table lastBy() { } /** - * Groups the data column according to groupByColumns and retrieves the first for - * the rest of the fields + * Groups the data column according to groupByColumns and retrieves the first for the rest of the + * fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -2032,8 +1950,8 @@ default Table lastBy() { Table firstBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and retrieves the first for - * the rest of the fields + * Groups the data column according to groupByColumns and retrieves the first for the rest of the + * fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -2043,8 +1961,8 @@ default Table firstBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and retrieves the first for - * the rest of the fields + * Groups the data column according to groupByColumns and retrieves the first for the rest of the + * fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -2062,8 +1980,7 @@ default Table firstBy() { } /** - * Groups the data column according to groupByColumns and computes the min for the - * rest of the fields + * Groups the data column according to groupByColumns and computes the min for the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -2071,8 +1988,7 @@ default Table firstBy() { Table minBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and computes the min for the - * rest of the fields + * Groups the data column according to groupByColumns and computes the min for the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -2082,8 +1998,7 @@ default Table minBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the min for the - * rest of the fields + * Groups the data column according to groupByColumns and computes the min for the rest of the fields * * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} */ @@ -2103,21 +2018,17 @@ default Table minBy() { } /** - * Groups the data column according to groupByColumns and computes the max for the - * rest of the fields + * Groups the data column according to groupByColumns and computes the max for the rest of the fields * - * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} - * } + * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} } */ @AsyncMethod Table maxBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and computes the max for the - * rest of the fields + * Groups the data column according to groupByColumns and computes the max for the rest of the fields * - * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} - * } + * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} } */ @AsyncMethod default Table maxBy(String... groupByColumns) { @@ -2125,11 +2036,9 @@ default Table maxBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the max for the - * rest of the fields + * Groups the data column according to groupByColumns and computes the max for the rest of the fields * - * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} - * } + * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} } */ @AsyncMethod default Table maxBy(Collection groupByColumns) { @@ -2147,21 +2056,19 @@ default Table maxBy() { } /** - * Groups the data column according to groupByColumns and computes the median for - * the rest of the fields + * Groups the data column according to groupByColumns and computes the median for the rest of the + * fields * - * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} - * } + * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} } */ @AsyncMethod Table medianBy(SelectColumn... groupByColumns); /** - * Groups the data column according to groupByColumns and computes the median for - * the rest of the fields + * Groups the data column according to groupByColumns and computes the median for the rest of the + * fields * - * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} - * } + * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} } */ @AsyncMethod default Table medianBy(String... groupByColumns) { @@ -2169,11 +2076,10 @@ default Table medianBy(String... groupByColumns) { } /** - * Groups the data column according to groupByColumns and computes the median for - * the rest of the fields + * Groups the data column according to groupByColumns and computes the median for the rest of the + * fields * - * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} - * } + * @param groupByColumns The grouping columns {@link io.deephaven.db.tables.Table#by(String...)} } */ @AsyncMethod default Table medianBy(Collection groupByColumns) { @@ -2209,9 +2115,8 @@ default Table countBy(String countColumnName) { } /** - * If this table is a stream table, i.e. it has {@link #STREAM_TABLE_ATTRIBUTE} set to - * {@code true}, return a child without the attribute, restoring standard semantics for - * aggregation operations. + * If this table is a stream table, i.e. it has {@link #STREAM_TABLE_ATTRIBUTE} set to {@code true}, return a child + * without the attribute, restoring standard semantics for aggregation operations. * * @return A non-stream child table, or this table if it is not a stream table */ @@ -2225,9 +2130,8 @@ default Table countBy(String countColumnName) { /** * Ungroups a table by converting arrays into columns. * - * @param nullFill indicates if the ungrouped table should allow disparate sized arrays filling - * shorter columns with null values. If set to false, then all arrays should be the same - * length. + * @param nullFill indicates if the ungrouped table should allow disparate sized arrays filling shorter columns with + * null values. If set to false, then all arrays should be the same length. * @param columnsToUngroup the columns to ungroup * @return the ungrouped table */ @@ -2238,24 +2142,24 @@ default Table ungroup(String... columnsToUngroup) { } default Table ungroupAllBut(String... columnsNotToUngroup) { - final Set columnsNotToUnwrapSet = - Arrays.stream(columnsNotToUngroup).collect(Collectors.toSet()); + final Set columnsNotToUnwrapSet = Arrays.stream(columnsNotToUngroup).collect(Collectors.toSet()); return ungroup(getDefinition().getColumnStream() - .filter(c -> !columnsNotToUnwrapSet.contains(c.getName()) - && (c.getDataType().isArray() || DBLanguageParser.isDbArray(c.getDataType()))) - .map(ColumnDefinition::getName).toArray(String[]::new)); + .filter(c -> !columnsNotToUnwrapSet.contains(c.getName()) + && (c.getDataType().isArray() || DBLanguageParser.isDbArray(c.getDataType()))) + .map(ColumnDefinition::getName).toArray(String[]::new)); } default Table ungroup() { return ungroup(getDefinition().getColumnStream() - .filter(c -> c.getDataType().isArray() || DBLanguageParser.isDbArray(c.getDataType())) - .map(ColumnDefinition::getName).toArray(String[]::new)); + .filter(c -> c.getDataType().isArray() || DBLanguageParser.isDbArray(c.getDataType())) + .map(ColumnDefinition::getName).toArray(String[]::new)); } default Table ungroup(boolean nullFill) { - return ungroup(nullFill, getDefinition().getColumnStream() - .filter(c -> c.getDataType().isArray() || DBLanguageParser.isDbArray(c.getDataType())) - .map(ColumnDefinition::getName).toArray(String[]::new)); + return ungroup(nullFill, + getDefinition().getColumnStream() + .filter(c -> c.getDataType().isArray() || DBLanguageParser.isDbArray(c.getDataType())) + .map(ColumnDefinition::getName).toArray(String[]::new)); } // ----------------------------------------------------------------------------------------------------------------- @@ -2266,21 +2170,19 @@ default Table ungroup(boolean nullFill) { * Create a {@link TableMap} from this table, keyed by the specified columns. * *

    - * The returned TableMap contains each row in this table in exactly one of the tables within the - * map. If you have exactly one key column the TableMap is keyed by the value in that column. If - * you have zero key columns, then the TableMap is keyed by - * {@code io.deephaven.datastructures.util.SmartKey.EMPTY} (and will contain this table as the + * The returned TableMap contains each row in this table in exactly one of the tables within the map. If you have + * exactly one key column the TableMap is keyed by the value in that column. If you have zero key columns, then the + * TableMap is keyed by {@code io.deephaven.datastructures.util.SmartKey.EMPTY} (and will contain this table as the * value). If you have multiple key columns, then the TableMap is keyed by a - * {@code io.deephaven.datastructures.util.SmartKey}. The SmartKey will have one value for each - * of your column values, in the order specified by keyColumnNames. + * {@code io.deephaven.datastructures.util.SmartKey}. The SmartKey will have one value for each of your column + * values, in the order specified by keyColumnNames. *

    * *

    - * For example if you have a Table keyed by a String column named USym, and a DBDateTime column - * named Expiry; a value could be retrieved from the TableMap with - * {@code tableMap.get(new SmartKey("SPY";, DBTimeUtils.convertDateTime("2020-06-19T16:15:00 NY")))}. - * For a table with an Integer column named Bucket, you simply use the desired value as in - * {@code tableMap.get(1)}. + * For example if you have a Table keyed by a String column named USym, and a DBDateTime column named Expiry; a + * value could be retrieved from the TableMap with + * {@code tableMap.get(new SmartKey("SPY";, DBTimeUtils.convertDateTime("2020-06-19T16:15:00 NY")))}. For a table + * with an Integer column named Bucket, you simply use the desired value as in {@code tableMap.get(1)}. *

    * * @param dropKeys if true, drop key columns in the output Tables @@ -2294,21 +2196,19 @@ default Table ungroup(boolean nullFill) { * Create a {@link TableMap} from this table, keyed by the specified columns. * *

    - * The returned TableMap contains each row in this table in exactly one of the tables within the - * map. If you have exactly one key column the TableMap is keyed by the value in that column. If - * you have zero key columns, then the TableMap is keyed by - * {@code io.deephaven.datastructures.util.SmartKey.EMPTY} (and will contain this table as the + * The returned TableMap contains each row in this table in exactly one of the tables within the map. If you have + * exactly one key column the TableMap is keyed by the value in that column. If you have zero key columns, then the + * TableMap is keyed by {@code io.deephaven.datastructures.util.SmartKey.EMPTY} (and will contain this table as the * value). If you have multiple key columns, then the TableMap is keyed by a - * {@code io.deephaven.datastructures.util.SmartKey}. The SmartKey will have one value for each - * of your column values, in the order specified by keyColumnNames. + * {@code io.deephaven.datastructures.util.SmartKey}. The SmartKey will have one value for each of your column + * values, in the order specified by keyColumnNames. *

    * *

    - * For example if you have a Table keyed by a String column named USym, and a DBDateTime column - * named Expiry; a value could be retrieved from the TableMap with - * {@code tableMap.get(new SmartKey("SPY";, DBTimeUtils.convertDateTime("2020-06-19T16:15:00 NY")))}. - * For a table with an Integer column named Bucket, you simply use the desired value as in - * {@code tableMap.get(1)}. + * For example if you have a Table keyed by a String column named USym, and a DBDateTime column named Expiry; a + * value could be retrieved from the TableMap with + * {@code tableMap.get(new SmartKey("SPY";, DBTimeUtils.convertDateTime("2020-06-19T16:15:00 NY")))}. For a table + * with an Integer column named Bucket, you simply use the desired value as in {@code tableMap.get(1)}. *

    * * @param keyColumnNames the name of the key columns to use. @@ -2326,9 +2226,9 @@ default TableMap byExternal(String... keyColumnNames) { /** * Create a rollup table. * - * A rollup table aggregates by the specified columns, and then creates a hierarchical table - * which re-aggregates using one less aggregation column on each level. The column that is no - * longer part of the aggregation key is replaced with null on each level. + * A rollup table aggregates by the specified columns, and then creates a hierarchical table which re-aggregates + * using one less aggregation column on each level. The column that is no longer part of the aggregation key is + * replaced with null on each level. * * @param comboAggregateFactory the ComboAggregateFactory describing the aggregation * @param columns the columns to group by @@ -2342,9 +2242,9 @@ default Table rollup(ComboAggregateFactory comboAggregateFactory, Collection columns) { - return rollup(comboAggregateFactory, includeConstituents, - SelectColumnFactory.getExpressions(columns)); + Collection columns) { + return rollup(comboAggregateFactory, includeConstituents, SelectColumnFactory.getExpressions(columns)); } /** * Create a rollup table. * - * A rollup table aggregates by the specified columns, and then creates a hierarchical table - * which re-aggregates using one less aggregation column on each level. The column that is no - * longer part of the aggregation key is replaced with null on each level. + * A rollup table aggregates by the specified columns, and then creates a hierarchical table which re-aggregates + * using one less aggregation column on each level. The column that is no longer part of the aggregation key is + * replaced with null on each level. * * @param comboAggregateFactory the ComboAggregateFactory describing the aggregation * @param columns the columns to group by @@ -2378,9 +2277,9 @@ default Table rollup(ComboAggregateFactory comboAggregateFactory, String... colu /** * Create a rollup table. * - * A rollup table aggregates by the specified columns, and then creates a hierarchical table - * which re-aggregates using one less aggregation column on each level. The column that is no - * longer part of the aggregation key is replaced with null on each level. + * A rollup table aggregates by the specified columns, and then creates a hierarchical table which re-aggregates + * using one less aggregation column on each level. The column that is no longer part of the aggregation key is + * replaced with null on each level. * * @param comboAggregateFactory the ComboAggregateFactory describing the aggregation * @param columns the columns to group by @@ -2389,18 +2288,16 @@ default Table rollup(ComboAggregateFactory comboAggregateFactory, String... colu * @return a hierarchical table with the rollup applied */ @AsyncMethod - default Table rollup(ComboAggregateFactory comboAggregateFactory, boolean includeConstituents, - String... columns) { - return rollup(comboAggregateFactory, includeConstituents, - SelectColumnFactory.getExpressions(columns)); + default Table rollup(ComboAggregateFactory comboAggregateFactory, boolean includeConstituents, String... columns) { + return rollup(comboAggregateFactory, includeConstituents, SelectColumnFactory.getExpressions(columns)); } /** * Create a rollup table. * - * A rollup table aggregates by the specified columns, and then creates a hierarchical table - * which re-aggregates using one less aggregation column on each level. The column that is no - * longer part of the aggregation key is replaced with null on each level. + * A rollup table aggregates by the specified columns, and then creates a hierarchical table which re-aggregates + * using one less aggregation column on each level. The column that is no longer part of the aggregation key is + * replaced with null on each level. * * @param comboAggregateFactory the ComboAggregateFactory describing the aggregation * @param columns the columns to group by @@ -2435,27 +2332,23 @@ default Table rollup(ComboAggregateFactory comboAggregateFactory) { */ @AsyncMethod default Table rollup(ComboAggregateFactory comboAggregateFactory, boolean includeConstituents) { - return rollup(comboAggregateFactory, includeConstituents, - SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY); + return rollup(comboAggregateFactory, includeConstituents, SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY); } @AsyncMethod - Table rollup(ComboAggregateFactory comboAggregateFactory, boolean includeConstituents, - SelectColumn... columns); + Table rollup(ComboAggregateFactory comboAggregateFactory, boolean includeConstituents, SelectColumn... columns); /** * Create a hierarchical tree table. * - * The structure of the table is encoded by an "id" and a "parent" column. The id column should - * represent a unique identifier for a given row, and the parent column indicates which row is - * the parent for a given row. Rows that have a null parent, are shown in the main table. It is - * possible for rows to be "orphaned", if their parent reference is non-null and does not exist - * in the table. + * The structure of the table is encoded by an "id" and a "parent" column. The id column should represent a unique + * identifier for a given row, and the parent column indicates which row is the parent for a given row. Rows that + * have a null parent, are shown in the main table. It is possible for rows to be "orphaned", if their parent + * reference is non-null and does not exist in the table. * - * @param idColumn the name of a column containing a unique identifier for a particular row in - * the table - * @param parentColumn the name of a column containing the parent's identifier, null for - * elements that are part of the root table + * @param idColumn the name of a column containing a unique identifier for a particular row in the table + * @param parentColumn the name of a column containing the parent's identifier, null for elements that are part of + * the root table * @return a hierarchical table grouped according to the parentColumn */ @AsyncMethod @@ -2505,8 +2398,8 @@ default Table sort(Collection columnsToSortBy) { *

    * *

    - * Note that this table operates on the table it was invoked on and does not create a new table. - * So in the following code T1 = baseTable.where(...) + * Note that this table operates on the table it was invoked on and does not create a new table. So in the following + * code T1 = baseTable.where(...) * T2 = T1.restrictSortTo("C1") * T3 = T2.clearSortingRestrictions() * @@ -2538,10 +2431,9 @@ default Table snapshotIncremental(Table rightTable, String... stampColumns) { Table snapshotHistory(final Table rightTable); @Override - default Table snapshot(Table baseTable, boolean doInitialSnapshot, - Collection stampColumns) { + default Table snapshot(Table baseTable, boolean doInitialSnapshot, Collection stampColumns) { return snapshot(baseTable, doInitialSnapshot, - stampColumns.stream().map(ColumnName::name).toArray(String[]::new)); + stampColumns.stream().map(ColumnName::name).toArray(String[]::new)); } // ----------------------------------------------------------------------------------------------------------------- @@ -2551,11 +2443,11 @@ default Table snapshot(Table baseTable, boolean doInitialSnapshot, /** * Applies a function to this table. *

    - * This is useful if you have a reference to a table or a proxy and want to run a series of - * operations against the table without each individual operation resulting in an RMI. + * This is useful if you have a reference to a table or a proxy and want to run a series of operations against the + * table without each individual operation resulting in an RMI. * - * @implNote If the LTM is not required the {@link Function.Unary#call(Object)} method should be - * annotated with {@link AsyncMethod}. + * @implNote If the LTM is not required the {@link Function.Unary#call(Object)} method should be annotated with + * {@link AsyncMethod}. * * @param function the function to run, its single argument will be this table * @param the return type of function @@ -2563,7 +2455,7 @@ default Table snapshot(Table baseTable, boolean doInitialSnapshot, */ default R apply(Function.Unary function) { final QueryPerformanceNugget nugget = - QueryPerformanceRecorder.getInstance().getNugget("apply(" + function + ")"); + QueryPerformanceRecorder.getInstance().getNugget("apply(" + function + ")"); try { return function.call(this); @@ -2573,8 +2465,7 @@ default R apply(Function.Unary function) { } /** - * Return true if this table is guaranteed to be flat. The index of a flat table will be from - * 0...numRows-1. + * Return true if this table is guaranteed to be flat. The index of a flat table will be from 0...numRows-1. */ @AsyncMethod boolean isFlat(); @@ -2596,8 +2487,7 @@ default R apply(Function.Unary function) { /** * Set the table's key columns and indicate that each key set will be unique. * - * @return The same table this method was invoked on, with the keyColumns and unique attributes - * set + * @return The same table this method was invoked on, with the keyColumns and unique attributes set */ @AsyncMethod Table withUniqueKeys(String... columns); @@ -2614,8 +2504,8 @@ default Table layoutHints(LayoutHintBuilder builder) { Table withTableDescription(String description); /** - * Add a description for a specific column. You may use {@link #withColumnDescription(Map)} to - * set several descriptions at once. + * Add a description for a specific column. You may use {@link #withColumnDescription(Map)} to set several + * descriptions at once. * * @param column the name of the column * @param description the column description @@ -2659,8 +2549,8 @@ default Table withColumnDescription(String column, String description) { // ----------------------------------------------------------------------------------------------------------------- /** - * Release resources held by this table, possibly destructively. This may render the table - * unsuitable or unsafe for further use. + * Release resources held by this table, possibly destructively. This may render the table unsuitable or unsafe for + * further use. * * @apiNote In practice, implementations usually just invoke {@link #releaseCachedResources()}. */ @@ -2669,9 +2559,9 @@ default void close() { } /** - * Attempt to release cached resources held by this table. Unlike {@link #close()}, this must - * not render the table unusable for subsequent read operations. Implementations should be sure - * to call {@code super.releaseCachedResources()}. + * Attempt to release cached resources held by this table. Unlike {@link #close()}, this must not render the table + * unusable for subsequent read operations. Implementations should be sure to call + * {@code super.releaseCachedResources()}. */ default void releaseCachedResources() {} diff --git a/DB/src/main/java/io/deephaven/db/tables/TableCreatorImpl.java b/DB/src/main/java/io/deephaven/db/tables/TableCreatorImpl.java index 0f42060df9b..675b370c656 100644 --- a/DB/src/main/java/io/deephaven/db/tables/TableCreatorImpl.java +++ b/DB/src/main/java/io/deephaven/db/tables/TableCreatorImpl.java @@ -22,8 +22,7 @@ enum TableCreatorImpl implements TableCreator

    { INSTANCE; public static Table create(TableSpec table) { - return TableCreator.create(INSTANCE, TableToOperationsImpl.INSTANCE, - OperationsToTableImpl.INSTANCE, table); + return TableCreator.create(INSTANCE, TableToOperationsImpl.INSTANCE, OperationsToTableImpl.INSTANCE, table); } @Override @@ -39,15 +38,14 @@ public final Table of(EmptyTable emptyTable) { @Override public final Table of(TimeTable timeTable) { final io.deephaven.db.v2.utils.TimeProvider provider = TimeProviderAdapter - .of(timeTable.timeProvider()); + .of(timeTable.timeProvider()); final DBDateTime firstTime = timeTable.startTime().map(DBDateTime::of).orElse(null); return TableTools.timeTable(provider, firstTime, timeTable.interval().toNanos()); } @Override public final Table merge(Iterable
    tables) { - return TableTools - .merge(StreamSupport.stream(tables.spliterator(), false).toArray(Table[]::new)); + return TableTools.merge(StreamSupport.stream(tables.spliterator(), false).toArray(Table[]::new)); } @Override @@ -81,22 +79,19 @@ public final Table merge(Table t1, Table t2, Table t3, Table t4, Table t5, Table } @Override - public final Table merge(Table t1, Table t2, Table t3, Table t4, Table t5, Table t6, Table t7, - Table t8) { + public final Table merge(Table t1, Table t2, Table t3, Table t4, Table t5, Table t6, Table t7, Table t8) { return TableTools.merge(t1, t2, t3, t4, t5, t6, t7, t8); } @Override - public final Table merge(Table t1, Table t2, Table t3, Table t4, Table t5, Table t6, Table t7, - Table t8, Table t9) { + public final Table merge(Table t1, Table t2, Table t3, Table t4, Table t5, Table t6, Table t7, Table t8, Table t9) { return TableTools.merge(t1, t2, t3, t4, t5, t6, t7, t8, t9); } @Override - public final Table merge(Table t1, Table t2, Table t3, Table t4, Table t5, Table t6, Table t7, - Table t8, Table t9, Table... remaining) { - return TableTools.merge( - Stream.concat(Stream.of(t1, t2, t3, t4, t5, t6, t7, t8, t9), Stream.of(remaining)) + public final Table merge(Table t1, Table t2, Table t3, Table t4, Table t5, Table t6, Table t7, Table t8, Table t9, + Table... remaining) { + return TableTools.merge(Stream.concat(Stream.of(t1, t2, t3, t4, t5, t6, t7, t8, t9), Stream.of(remaining)) .toArray(Table[]::new)); } @@ -111,8 +106,7 @@ public static io.deephaven.db.v2.utils.TimeProvider of(TimeProvider provider) { return provider.walk(new TimeProviderAdapter()).getOut(); } - private static final io.deephaven.db.v2.utils.TimeProvider SYSTEM_PROVIDER = - DBTimeUtils::currentTime; + private static final io.deephaven.db.v2.utils.TimeProvider SYSTEM_PROVIDER = DBTimeUtils::currentTime; private io.deephaven.db.v2.utils.TimeProvider out; diff --git a/DB/src/main/java/io/deephaven/db/tables/TableDefinition.java b/DB/src/main/java/io/deephaven/db/tables/TableDefinition.java index 9c6b9c7654b..b1e836b6d64 100644 --- a/DB/src/main/java/io/deephaven/db/tables/TableDefinition.java +++ b/DB/src/main/java/io/deephaven/db/tables/TableDefinition.java @@ -25,8 +25,7 @@ /** * Table definition for all Deephaven tables. */ -public class TableDefinition - implements Externalizable, LogOutputAppendable, Copyable { +public class TableDefinition implements Externalizable, LogOutputAppendable, Copyable { private static final long serialVersionUID = -120432133075760976L; private static final String NEW_LINE = System.getProperty("line.separator"); @@ -41,7 +40,7 @@ public static TableDefinition inferFrom(Map sour final String name = e.getKey(); final ColumnSource source = e.getValue(); final ColumnDefinition inferred = - ColumnDefinition.fromGenericType(name, source.getType(), source.getComponentType()); + ColumnDefinition.fromGenericType(name, source.getType(), source.getComponentType()); definitions.add(inferred); } return new TableDefinition(definitions); @@ -60,8 +59,7 @@ public static TableDefinition from(Iterable> headers) { public TableDefinition() {} - public TableDefinition(@NotNull final List types, - @NotNull final List columnNames) { + public TableDefinition(@NotNull final List types, @NotNull final List columnNames) { this(getColumnDefinitions(types, columnNames)); } @@ -79,7 +77,7 @@ public TableDefinition(@NotNull final TableDefinition other) { } public static TableDefinition tableDefinition(@NotNull final Class[] types, - @NotNull final String[] columnNames) { + @NotNull final String[] columnNames) { return new TableDefinition(getColumnDefinitions(types, columnNames)); } @@ -104,8 +102,7 @@ public void setColumns(final ColumnDefinition[] columns) { final Set columnNames = new HashSet<>(); for (final ColumnDefinition column : columns) { if (!columnNames.add(column.getName())) { - throw new IllegalArgumentException( - "Duplicate definition for column \"" + column.getName() + "\""); + throw new IllegalArgumentException("Duplicate definition for column \"" + column.getName() + "\""); } } columnNameMap = null; @@ -133,64 +130,58 @@ public Map getColumnNameMap() { if (columnNameMap != null) { return columnNameMap; } - return columnNameMap = Collections - .unmodifiableMap(getColumnStream().collect(Collectors.toMap(ColumnDefinition::getName, - Function.identity(), Assert::neverInvoked, LinkedHashMap::new))); + return columnNameMap = Collections.unmodifiableMap(getColumnStream().collect(Collectors + .toMap(ColumnDefinition::getName, Function.identity(), Assert::neverInvoked, LinkedHashMap::new))); } /** - * @return A freshly-allocated list of column definitions for all partitioning columns, in the - * same relative order as in the column definitions array. + * @return A freshly-allocated list of column definitions for all partitioning columns, in the same relative order + * as in the column definitions array. */ public List getPartitioningColumns() { - return getColumnStream().filter(ColumnDefinition::isPartitioning) - .collect(Collectors.toList()); + return getColumnStream().filter(ColumnDefinition::isPartitioning).collect(Collectors.toList()); } /** - * @return A freshly-allocated list of column definitions for all grouping columns, in the same - * relative order as in the column definitions array. + * @return A freshly-allocated list of column definitions for all grouping columns, in the same relative order as in + * the column definitions array. */ public List getGroupingColumns() { return getColumnStream().filter(ColumnDefinition::isGrouping).collect(Collectors.toList()); } /** - * @return A freshly-allocated array of column names for all grouping columns, in the same - * relative order as in the column definitions array. + * @return A freshly-allocated array of column names for all grouping columns, in the same relative order as in the + * column definitions array. */ public String[] getGroupingColumnNamesArray() { return getColumnStream().filter(ColumnDefinition::isGrouping).map(ColumnDefinition::getName) - .toArray(String[]::new); + .toArray(String[]::new); } /** - * @return A freshly-allocated list of column names in the same order as the column definitions - * array. + * @return A freshly-allocated list of column names in the same order as the column definitions array. */ public List getColumnNames() { return getColumnStream().map(ColumnDefinition::getName).collect(Collectors.toList()); } /** - * @return A freshly-allocated array of column names in the same order as the column definitions - * array. + * @return A freshly-allocated array of column names in the same order as the column definitions array. */ public String[] getColumnNamesArray() { return getColumnStream().map(ColumnDefinition::getName).toArray(String[]::new); } /** - * @return A freshly-allocated list of column types in the same order as the column definitions - * array. + * @return A freshly-allocated list of column types in the same order as the column definitions array. */ public List getColumnTypes() { return getColumnStream().map(ColumnDefinition::getDataType).collect(Collectors.toList()); } /** - * @return A freshly-allocated array of column types in the same order as the column definitions - * array. + * @return A freshly-allocated array of column types in the same order as the column definitions array. */ public Class[] getColumnTypesArray() { return getColumnStream().map(ColumnDefinition::getDataType).toArray(Class[]::new); @@ -198,8 +189,7 @@ public Class[] getColumnTypesArray() { /** * @param columnName the column name to search for - * @return The column definition for the supplied name, or null if no such column exists in this - * table definition. + * @return The column definition for the supplied name, or null if no such column exists in this table definition. */ public ColumnDefinition getColumn(@NotNull final String columnName) { return getColumnNameMap().get(columnName); @@ -207,8 +197,8 @@ public ColumnDefinition getColumn(@NotNull final String columnName) { /** * @param column the ColumnDefinition to search for - * @return The index of the column for the supplied name, or -1 if no such column exists in this - * table definition. Note: This is an O(columns.length) lookup. + * @return The index of the column for the supplied name, or -1 if no such column exists in this table definition. + * Note: This is an O(columns.length) lookup. */ public int getColumnIndex(@NotNull final ColumnDefinition column) { for (int ci = 0; ci < columns.length; ++ci) { @@ -235,10 +225,9 @@ public String getColumnNamesAsString() { } /** - * Tests mutual-compatibility of {@code this} and {@code other}. To be mutually compatible, they - * must have the same number of columns, each matched up with - * {@link ColumnDefinition#isCompatible}. As such, this method has an equivalence relation, ie - * {@code A.checkMutualCompatibility(B) == B.checkMutualCompatibility(A)}. + * Tests mutual-compatibility of {@code this} and {@code other}. To be mutually compatible, they must have the same + * number of columns, each matched up with {@link ColumnDefinition#isCompatible}. As such, this method has an + * equivalence relation, ie {@code A.checkMutualCompatibility(B) == B.checkMutualCompatibility(A)}. * * @param other the other definition * @return {@code this} table definition, but in the the column order of {@code other} @@ -250,14 +239,13 @@ public TableDefinition checkMutualCompatibility(@NotNull final TableDefinition o } /** - * Test compatibility of this definition with another. This definition must have all columns of - * the other, and the column definitions in common must be compatible, as defined by + * Test compatibility of this definition with another. This definition must have all columns of the other, and the + * column definitions in common must be compatible, as defined by * {@link ColumnDefinition#isCompatible(ColumnDefinition)}. * *

    - * Note: unlike {@link ColumnDefinition#isCompatible(ColumnDefinition)}, this method does not - * have an equivalence relation. For a stricter check, use - * {@link #checkMutualCompatibility(TableDefinition)}. + * Note: unlike {@link ColumnDefinition#isCompatible(ColumnDefinition)}, this method does not have an equivalence + * relation. For a stricter check, use {@link #checkMutualCompatibility(TableDefinition)}. * * @param other comparison table definition * @return the minimized compatible table definition, in the same order as {@code other} @@ -268,26 +256,25 @@ public TableDefinition checkCompatibility(@NotNull final TableDefinition other) } /** - * Test compatibility of this definition with another. This definition must have all columns of - * the other, and the column definitions in common must be compatible, as defined by + * Test compatibility of this definition with another. This definition must have all columns of the other, and the + * column definitions in common must be compatible, as defined by * {@link ColumnDefinition#isCompatible(ColumnDefinition)}. * *

    * Partitioning columns in other will be ignored if ignorePartitioningColumns is true. * *

    - * Note: unlike {@link ColumnDefinition#isCompatible(ColumnDefinition)}, this method does not - * have an equivalence relation. For a stricter check, use - * {@link #checkMutualCompatibility(TableDefinition)}. + * Note: unlike {@link ColumnDefinition#isCompatible(ColumnDefinition)}, this method does not have an equivalence + * relation. For a stricter check, use {@link #checkMutualCompatibility(TableDefinition)}. * * @param other comparison table definition - * @param ignorePartitioningColumns if true, other definition may contain partitioning columns - * not in this definition + * @param ignorePartitioningColumns if true, other definition may contain partitioning columns not in this + * definition * @return the minimized compatible table definition, in the same order as {@code other} * @throws IncompatibleTableDefinitionException if the definitions are not compatible */ public TableDefinition checkCompatibility(@NotNull final TableDefinition other, - final boolean ignorePartitioningColumns) { + final boolean ignorePartitioningColumns) { List inOrder = new ArrayList<>(); // TODO: need to compare in order and be less permissive with partitioning - @@ -298,48 +285,45 @@ public TableDefinition checkCompatibility(@NotNull final TableDefinition other, continue; final ColumnDefinition myColumn = myNamesToColumns.get(otherColumn.getName()); if (myColumn == null) { - sb.append(NEW_LINE).append("\tMissing column definition for ") - .append(otherColumn.getName()); + sb.append(NEW_LINE).append("\tMissing column definition for ").append(otherColumn.getName()); } else if (!myColumn.isCompatible(otherColumn)) { sb.append(NEW_LINE) - .append("\tColumn definitions aren't compatible - ") - .append("found column ") - .append(myColumn.describeForCompatibility()) - .append(", expected compatibility with ") - .append(otherColumn.describeForCompatibility()); + .append("\tColumn definitions aren't compatible - ") + .append("found column ") + .append(myColumn.describeForCompatibility()) + .append(", expected compatibility with ") + .append(otherColumn.describeForCompatibility()); } inOrder.add(myColumn); } if (sb.length() > 0) { - throw new IncompatibleTableDefinitionException( - "Table definition incompatibilities: " + sb.toString()); + throw new IncompatibleTableDefinitionException("Table definition incompatibilities: " + sb.toString()); } return new TableDefinition(inOrder); } /** - * Build a description of the difference between this definition and the other. Should - * correspond to equalsIgnoreOrder logic. + * Build a description of the difference between this definition and the other. Should correspond to + * equalsIgnoreOrder logic. * * @param other another TableDefinition to compare * @param lhs what to call "this" definition * @param rhs what to call the other definition * @return a list of strings representing the difference between two table definitions */ - public List describeDifferences(@NotNull final TableDefinition other, - @NotNull final String lhs, @NotNull final String rhs) { + public List describeDifferences(@NotNull final TableDefinition other, @NotNull final String lhs, + @NotNull final String rhs) { final List differences = new ArrayList<>(); final Map otherColumns = other.getColumnNameMap(); for (final ColumnDefinition thisColumn : columns) { final ColumnDefinition otherColumn = otherColumns.get(thisColumn.getName()); if (otherColumn == null) { - differences - .add(lhs + " column '" + thisColumn.getName() + "' is missing in " + rhs); + differences.add(lhs + " column '" + thisColumn.getName() + "' is missing in " + rhs); } else if (!thisColumn.equals(otherColumn)) { differences.add("column '" + thisColumn.getName() + "' is different ..."); thisColumn.describeDifferences(differences, otherColumn, lhs, rhs, - " " + thisColumn.getName() + ": "); + " " + thisColumn.getName() + ": "); } // else same } @@ -355,8 +339,8 @@ public List describeDifferences(@NotNull final TableDefinition other, } /** - * Build a description of the difference between this definition and the other. Should - * correspond to equalsIgnoreOrder logic. + * Build a description of the difference between this definition and the other. Should correspond to + * equalsIgnoreOrder logic. * * @param other another TableDefinition to compare * @param lhs what to call "this" definition @@ -364,8 +348,8 @@ public List describeDifferences(@NotNull final TableDefinition other, * @param separator separate strings in the list of differences with this separator * @return A string in which the differences are enumerated, separated by the given separator */ - public String getDifferenceDescription(@NotNull final TableDefinition other, - @NotNull final String lhs, @NotNull final String rhs, @NotNull final String separator) { + public String getDifferenceDescription(@NotNull final TableDefinition other, @NotNull final String lhs, + @NotNull final String rhs, @NotNull final String separator) { List differences = describeDifferences(other, lhs, rhs); return String.join(separator, differences); } @@ -382,10 +366,9 @@ public boolean equalsIgnoreOrder(@NotNull final TableDefinition other) { return false; } final Iterator> thisColumns = - getColumnStream().sorted(Comparator.comparing(ColumnDefinition::getName)).iterator(); + getColumnStream().sorted(Comparator.comparing(ColumnDefinition::getName)).iterator(); final Iterator> otherColumns = - other.getColumnStream().sorted(Comparator.comparing(ColumnDefinition::getName)) - .iterator(); + other.getColumnStream().sorted(Comparator.comparing(ColumnDefinition::getName)).iterator(); while (thisColumns.hasNext()) { if (!thisColumns.next().equals(otherColumns.next())) { return false; @@ -398,8 +381,8 @@ public boolean equalsIgnoreOrder(@NotNull final TableDefinition other) { * Strict comparison (column-wise only). * * @param other - The object to compare with. - * @return True if other is a TableDefinition and contains equal ColumnDefinitions in the same - * order. False otherwise. + * @return True if other is a TableDefinition and contains equal ColumnDefinitions in the same order. False + * otherwise. */ @Override public boolean equals(Object other) { @@ -434,8 +417,8 @@ public int hashCode() { * @param columnNames List of column names, parallel to columnTypes * @return A new array of column definitions from the supplied lists of types and names. */ - private static ColumnDefinition[] getColumnDefinitions( - @NotNull final List columnTypes, @NotNull final List columnNames) { + private static ColumnDefinition[] getColumnDefinitions(@NotNull final List columnTypes, + @NotNull final List columnNames) { Require.eq(columnTypes.size(), "types.size()", columnNames.size(), "columnNames.size()"); final ColumnDefinition[] result = new ColumnDefinition[columnTypes.size()]; @@ -455,12 +438,11 @@ private static ColumnDefinition[] getColumnDefinitions( * @return A new array of column definitions from the supplied lists of types and names. */ private static ColumnDefinition[] getColumnDefinitions( - @NotNull final Class[] columnTypes, @NotNull final String[] columnNames, - ColumnDefinition... additionalColumnDefs) { + @NotNull final Class[] columnTypes, @NotNull final String[] columnNames, + ColumnDefinition... additionalColumnDefs) { Require.eq(columnTypes.length, "types.length", columnNames.length, "columnNames.length"); - final ColumnDefinition[] result = - new ColumnDefinition[columnTypes.length + additionalColumnDefs.length]; + final ColumnDefinition[] result = new ColumnDefinition[columnTypes.length + additionalColumnDefs.length]; int ri = 0; for (ColumnDefinition additionalColumnDef : additionalColumnDefs) { result[ri++] = additionalColumnDef; @@ -474,19 +456,18 @@ private static ColumnDefinition[] getColumnDefinitions( } /** - * @return This definition if it's writable, or a freshly-allocated definition that is identical - * but for the columns array, which will exclude all non-writable columns. + * @return This definition if it's writable, or a freshly-allocated definition that is identical but for the columns + * array, which will exclude all non-writable columns. */ public TableDefinition getWritable() { return getWritable(false); } /** - * @return This definition if it's writable, or a freshly-allocated definition that is identical - * but for the columns array, which will exclude all non-writable columns, optionally - * converting partitioning columns to normal columns. - * @param partitioningToNormal Whether partitioning columns should be preserved as normal - * columns, or excluded + * @return This definition if it's writable, or a freshly-allocated definition that is identical but for the columns + * array, which will exclude all non-writable columns, optionally converting partitioning columns to normal + * columns. + * @param partitioningToNormal Whether partitioning columns should be preserved as normal columns, or excluded */ public TableDefinition getWritable(final boolean partitioningToNormal) { final ColumnDefinition[] writableColumns = getWritableColumns(partitioningToNormal); @@ -497,11 +478,10 @@ public TableDefinition getWritable(final boolean partitioningToNormal) { } /** - * @return This definition's array of column definitions if they're all writable, or a - * freshly-allocated array of column definitions which will exclude all non-writable - * columns, optionally converting partitioning columns to normal columns. - * @param partitioningToNormal Whether partitioning columns should be preserved as normal - * columns, or excluded + * @return This definition's array of column definitions if they're all writable, or a freshly-allocated array of + * column definitions which will exclude all non-writable columns, optionally converting partitioning + * columns to normal columns. + * @param partitioningToNormal Whether partitioning columns should be preserved as normal columns, or excluded */ public ColumnDefinition[] getWritableColumns(final boolean partitioningToNormal) { if (getColumnStream().anyMatch(c -> !c.isDirect())) { @@ -513,8 +493,7 @@ public ColumnDefinition[] getWritableColumns(final boolean partitioningToNorm return c; }).toArray(ColumnDefinition[]::new); } - return getColumnStream().filter(ColumnDefinition::isDirect) - .toArray(ColumnDefinition[]::new); + return getColumnStream().filter(ColumnDefinition::isDirect).toArray(ColumnDefinition[]::new); } return columns; } @@ -535,8 +514,7 @@ public Table getColumnDefinitionsTable() { columnGrouping.add(cDef.isGrouping()); } - final String[] resultColumnNames = - {"Name", "DataType", "ColumnType", "IsPartitioning", "IsGrouping"}; + final String[] resultColumnNames = {"Name", "DataType", "ColumnType", "IsPartitioning", "IsGrouping"}; final Object[] resultValues = { columnNames.toArray(new String[columnNames.size()]), columnDataTypes.toArray(new String[columnDataTypes.size()]), @@ -549,16 +527,15 @@ public Table getColumnDefinitionsTable() { } /** - * Helper method to assist with definition creation for user-namespace partitioned tables. This - * version automatically converts grouping columns to normal columns. + * Helper method to assist with definition creation for user-namespace partitioned tables. This version + * automatically converts grouping columns to normal columns. * * @param partitioningColumnName The name of the column to use for partitioning * @param baseDefinition The definition to work from * @return A new definition suitable for writing partitioned tables with */ - public static TableDefinition createUserPartitionedTableDefinition( - @NotNull final String partitioningColumnName, - @NotNull final TableDefinition baseDefinition) { + public static TableDefinition createUserPartitionedTableDefinition(@NotNull final String partitioningColumnName, + @NotNull final TableDefinition baseDefinition) { return createUserPartitionedTableDefinition(partitioningColumnName, baseDefinition, true); } @@ -571,10 +548,9 @@ public static TableDefinition createUserPartitionedTableDefinition( * @return A new definition suitable for writing partitioned tables with */ @SuppressWarnings("WeakerAccess") - public static TableDefinition createUserPartitionedTableDefinition( - @NotNull final String partitioningColumnName, - @NotNull final TableDefinition baseDefinition, - final boolean groupingColumnsAsNormal) { + public static TableDefinition createUserPartitionedTableDefinition(@NotNull final String partitioningColumnName, + @NotNull final TableDefinition baseDefinition, + final boolean groupingColumnsAsNormal) { final List columnDefs = new ArrayList<>(); columnDefs.add(ColumnDefinition.ofShort(partitioningColumnName).withPartitioning()); final List baseDefs = new ArrayList<>(baseDefinition.getColumnList()); @@ -585,8 +561,7 @@ public static TableDefinition createUserPartitionedTableDefinition( continue; } if (current.getColumnType() != ColumnDefinition.COLUMNTYPE_NORMAL && - (current.getColumnType() != ColumnDefinition.COLUMNTYPE_GROUPING - || groupingColumnsAsNormal)) { + (current.getColumnType() != ColumnDefinition.COLUMNTYPE_GROUPING || groupingColumnsAsNormal)) { iter.set(current.withNormal()); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArray.java b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArray.java index b38beb1779d..59e1c1a6956 100644 --- a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArray.java +++ b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArray.java @@ -53,8 +53,7 @@ default Chunk toChunk() { @Override default void fillChunk(WritableChunk destChunk) { - destChunk.asWritableObjectChunk().copyFromTypedArray(toArray(), 0, destChunk.size(), - (int) size()); + destChunk.asWritableObjectChunk().copyFromTypedArray(toArray(), 0, destChunk.size(), (int) size()); } static String defaultValToString(final Object val) { @@ -73,8 +72,7 @@ static String toString(@NotNull final DbArray array, final int prefixLength) return "[]"; } - final Function valToString = - DbArrayBase.classToHelper(array.getComponentType()); + final Function valToString = DbArrayBase.classToHelper(array.getComponentType()); final StringBuilder builder = new StringBuilder("["); final int displaySize = (int) Math.min(array.size(), prefixLength); diff --git a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArrayBase.java b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArrayBase.java index 017a8ee89c8..22043d5a04e 100644 --- a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArrayBase.java +++ b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArrayBase.java @@ -16,8 +16,7 @@ import java.util.function.Function; import java.util.stream.LongStream; -public interface DbArrayBase - extends Serializable, LongSizedDataStructure { +public interface DbArrayBase extends Serializable, LongSizedDataStructure { long serialVersionUID = -2429677814745466454L; String NULL_ELEMENT_STRING = " "; @@ -45,27 +44,24 @@ default boolean isEmpty() { /** Return a version of this DbArrayBase that is flattened out to only reference memory. */ DBARRAY getDirect(); - static long clampIndex(final long validFromInclusive, final long validToExclusive, - final long index) { + static long clampIndex(final long validFromInclusive, final long validToExclusive, final long index) { return index < validFromInclusive || index >= validToExclusive ? -1 : index; } static long[] mapSelectedPositionRange(@NotNull final long[] currentPositions, - final long selectedRangeStartInclusive, final long selectedRangeEndExclusive) { - Assert.leq(selectedRangeStartInclusive, "selectedRangeStartInclusive", - selectedRangeEndExclusive, "selectedRangeEndExclusive"); + final long selectedRangeStartInclusive, final long selectedRangeEndExclusive) { + Assert.leq(selectedRangeStartInclusive, "selectedRangeStartInclusive", selectedRangeEndExclusive, + "selectedRangeEndExclusive"); return LongStream.range(selectedRangeStartInclusive, selectedRangeEndExclusive) - .map(s -> s < 0 || s >= currentPositions.length ? -1 - : currentPositions[LongSizedDataStructure.intSize("mapSelectedPositionRange", s)]) - .toArray(); + .map(s -> s < 0 || s >= currentPositions.length ? -1 + : currentPositions[LongSizedDataStructure.intSize("mapSelectedPositionRange", s)]) + .toArray(); } static long[] mapSelectedPositions(@NotNull final long[] currentPositions, - @NotNull final long[] selectedPositions) { - return Arrays.stream(selectedPositions) - .map(s -> s < 0 || s >= currentPositions.length ? -1 - : currentPositions[LongSizedDataStructure.intSize("mapSelectedPositions", s)]) - .toArray(); + @NotNull final long[] selectedPositions) { + return Arrays.stream(selectedPositions).map(s -> s < 0 || s >= currentPositions.length ? -1 + : currentPositions[LongSizedDataStructure.intSize("mapSelectedPositions", s)]).toArray(); } static Function classToHelper(final Class clazz) { diff --git a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArrayDirect.java b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArrayDirect.java index 0de36a2f1b5..c819e150de4 100644 --- a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArrayDirect.java +++ b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArrayDirect.java @@ -15,8 +15,7 @@ public class DbArrayDirect implements DbArray { public DbArrayDirect(T... data) { this.data = data; - componentType = - (Class) (data == null ? Object.class : data.getClass().getComponentType()); + componentType = (Class) (data == null ? Object.class : data.getClass().getComponentType()); } @Override diff --git a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArraySlice.java b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArraySlice.java index 6beaa53a376..9e52facc234 100644 --- a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArraySlice.java +++ b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbArraySlice.java @@ -25,12 +25,11 @@ public class DbArraySlice extends DbArray.Indirect { private final long innerArrayValidFromInclusive; private final long innerArrayValidToExclusive; - private DbArraySlice(@NotNull final DbArray innerArray, final long offsetIndex, - final long length, final long innerArrayValidFromInclusive, - final long innerArrayValidToExclusive) { + private DbArraySlice(@NotNull final DbArray innerArray, final long offsetIndex, final long length, + final long innerArrayValidFromInclusive, final long innerArrayValidToExclusive) { Assert.geqZero(length, "length"); - Assert.leq(innerArrayValidFromInclusive, "innerArrayValidFromInclusive", - innerArrayValidToExclusive, "innerArrayValidToExclusive"); + Assert.leq(innerArrayValidFromInclusive, "innerArrayValidFromInclusive", innerArrayValidToExclusive, + "innerArrayValidToExclusive"); this.innerArray = innerArray; this.offsetIndex = offsetIndex; this.length = length; @@ -38,17 +37,16 @@ private DbArraySlice(@NotNull final DbArray innerArray, final long offsetInde this.innerArrayValidToExclusive = innerArrayValidToExclusive; } - public DbArraySlice(@NotNull final DbArray innerArray, final long offsetIndex, - final long length) { + public DbArraySlice(@NotNull final DbArray innerArray, final long offsetIndex, final long length) { this(innerArray, offsetIndex, length, - clampLong(0, innerArray.size(), offsetIndex), - clampLong(0, innerArray.size(), offsetIndex + length)); + clampLong(0, innerArray.size(), offsetIndex), + clampLong(0, innerArray.size(), offsetIndex + length)); } @Override public T get(final long index) { - return innerArray.get(clampIndex(innerArrayValidFromInclusive, innerArrayValidToExclusive, - index + offsetIndex)); + return innerArray + .get(clampIndex(innerArrayValidFromInclusive, innerArrayValidToExclusive, index + offsetIndex)); } @Override @@ -57,29 +55,27 @@ public DbArray subArray(final long fromIndexInclusive, final long toIndexExcl final long newLength = toIndexExclusive - fromIndexInclusive; final long newOffsetIndex = offsetIndex + fromIndexInclusive; return new DbArraySlice<>(innerArray, newOffsetIndex, newLength, - clampLong(innerArrayValidFromInclusive, innerArrayValidToExclusive, newOffsetIndex), - clampLong(innerArrayValidFromInclusive, innerArrayValidToExclusive, - newOffsetIndex + newLength)); + clampLong(innerArrayValidFromInclusive, innerArrayValidToExclusive, newOffsetIndex), + clampLong(innerArrayValidFromInclusive, innerArrayValidToExclusive, newOffsetIndex + newLength)); } @Override public DbArray subArrayByPositions(final long[] positions) { - return innerArray.subArrayByPositions( - Arrays.stream(positions).map(p -> clampIndex(innerArrayValidFromInclusive, - innerArrayValidToExclusive, p + offsetIndex)).toArray()); + return innerArray.subArrayByPositions(Arrays.stream(positions) + .map(p -> clampIndex(innerArrayValidFromInclusive, innerArrayValidToExclusive, p + offsetIndex)) + .toArray()); } @Override public T[] toArray() { if (innerArray instanceof DbArrayDirect && offsetIndex >= innerArrayValidFromInclusive - && offsetIndex + length <= innerArrayValidToExclusive) { - return Arrays.copyOfRange(innerArray.toArray(), - LongSizedDataStructure.intSize("toArray", offsetIndex), - LongSizedDataStructure.intSize("toArray", offsetIndex + length)); + && offsetIndex + length <= innerArrayValidToExclusive) { + return Arrays.copyOfRange(innerArray.toArray(), LongSizedDataStructure.intSize("toArray", offsetIndex), + LongSizedDataStructure.intSize("toArray", offsetIndex + length)); } // noinspection unchecked - final T[] result = (T[]) Array.newInstance(getComponentType(), - LongSizedDataStructure.intSize("toArray", length)); + final T[] result = + (T[]) Array.newInstance(getComponentType(), LongSizedDataStructure.intSize("toArray", length)); for (int ii = 0; ii < length; ++ii) { result[ii] = get(ii); } diff --git a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbSubArray.java b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbSubArray.java index e6433c3f98e..f451f1374d7 100644 --- a/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbSubArray.java +++ b/DB/src/main/java/io/deephaven/db/tables/dbarrays/DbSubArray.java @@ -26,20 +26,18 @@ public T get(final long index) { if (index < 0 || index >= positions.length) { return null; } - return innerArray - .get(positions[LongSizedDataStructure.intSize("subarray array access", index)]); + return innerArray.get(positions[LongSizedDataStructure.intSize("subarray array access", index)]); } @Override public DbArray subArray(final long fromIndexInclusive, final long toIndexExclusive) { return innerArray.subArrayByPositions( - DbArrayBase.mapSelectedPositionRange(positions, fromIndexInclusive, toIndexExclusive)); + DbArrayBase.mapSelectedPositionRange(positions, fromIndexInclusive, toIndexExclusive)); } @Override public DbArray subArrayByPositions(final long[] positions) { - return innerArray - .subArrayByPositions(DbArrayBase.mapSelectedPositions(this.positions, positions)); + return innerArray.subArrayByPositions(DbArrayBase.mapSelectedPositions(this.positions, positions)); } @Override @@ -67,8 +65,7 @@ public T getPrev(final long index) { if (index < 0 || index >= positions.length) { return null; } - return innerArray - .getPrev(positions[LongSizedDataStructure.intSize("DbSubArray getPrev", index)]); + return innerArray.getPrev(positions[LongSizedDataStructure.intSize("DbSubArray getPrev", index)]); } @Override diff --git a/DB/src/main/java/io/deephaven/db/tables/dbarrays/ReplicateDbArrays.java b/DB/src/main/java/io/deephaven/db/tables/dbarrays/ReplicateDbArrays.java index 1eb2fdc7bd1..5243548a759 100644 --- a/DB/src/main/java/io/deephaven/db/tables/dbarrays/ReplicateDbArrays.java +++ b/DB/src/main/java/io/deephaven/db/tables/dbarrays/ReplicateDbArrays.java @@ -21,62 +21,50 @@ public class ReplicateDbArrays { public static void main(String[] args) throws IOException { Map serialVersionUIDs = new HashMap<>(); serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbArrayBase", -2429677814745466454L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbArrayDirect", - 9111886364211462917L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbBooleanArrayDirect", - -9116229390345474761L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbArrayDirect", 9111886364211462917L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbBooleanArrayDirect", -9116229390345474761L); serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbCharArray", -1373264425081841175L); serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbLongArray", -4934601086974582202L); serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbIntArray", -4282375411744560278L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbShortArray", - -6562228894877343013L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbShortArray", -6562228894877343013L); serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbByteArray", 8519130615638683196L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbDoubleArray", - 7218901311693729986L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbFloatArray", - -1889118072737983807L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbCharArrayDirect", - 3636374971797603565L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbLongArrayDirect", - 1233975234000551534L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbIntArrayDirect", - -7790095389322728763L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbShortArrayDirect", - -4415134364550246624L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbByteArrayDirect", - 5978679490703697461L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbDoubleArrayDirect", - 3262776153086160765L); - serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbFloatArrayDirect", - -8263599481663466384L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbDoubleArray", 7218901311693729986L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbFloatArray", -1889118072737983807L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbCharArrayDirect", 3636374971797603565L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbLongArrayDirect", 1233975234000551534L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbIntArrayDirect", -7790095389322728763L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbShortArrayDirect", -4415134364550246624L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbByteArrayDirect", 5978679490703697461L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbDoubleArrayDirect", 3262776153086160765L); + serialVersionUIDs.put("io.deephaven.db.tables.dbarrays.DbFloatArrayDirect", -8263599481663466384L); - ReplicatePrimitiveCode.charToAllButBooleanAndFloats(DbCharArray.class, - ReplicatePrimitiveCode.MAIN_SRC, serialVersionUIDs); + ReplicatePrimitiveCode.charToAllButBooleanAndFloats(DbCharArray.class, ReplicatePrimitiveCode.MAIN_SRC, + serialVersionUIDs); - final String floatPath = ReplicatePrimitiveCode.charToFloat(DbCharArray.class, - ReplicatePrimitiveCode.MAIN_SRC, serialVersionUIDs); + final String floatPath = ReplicatePrimitiveCode.charToFloat(DbCharArray.class, ReplicatePrimitiveCode.MAIN_SRC, + serialVersionUIDs); final File floatFile = new File(floatPath); List floatLines = FileUtils.readLines(floatFile, Charset.defaultCharset()); floatLines = ReplicateUtilities.simpleFixup(floatLines, "elementEquals", - "aArray\\.get\\(ei\\) != bArray\\.get\\(ei\\)", - "Float.floatToIntBits(aArray.get(ei)) != Float.floatToIntBits(bArray.get(ei))"); + "aArray\\.get\\(ei\\) != bArray\\.get\\(ei\\)", + "Float.floatToIntBits(aArray.get(ei)) != Float.floatToIntBits(bArray.get(ei))"); FileUtils.writeLines(floatFile, floatLines); final String doublePath = ReplicatePrimitiveCode.charToDouble(DbCharArray.class, - ReplicatePrimitiveCode.MAIN_SRC, serialVersionUIDs); + ReplicatePrimitiveCode.MAIN_SRC, serialVersionUIDs); final File doubleFile = new File(doublePath); List doubleLines = FileUtils.readLines(doubleFile, Charset.defaultCharset()); doubleLines = ReplicateUtilities.simpleFixup(doubleLines, "elementEquals", - "aArray\\.get\\(ei\\) != bArray\\.get\\(ei\\)", - "Double.doubleToLongBits(aArray.get(ei)) != Double.doubleToLongBits(bArray.get(ei))"); + "aArray\\.get\\(ei\\) != bArray\\.get\\(ei\\)", + "Double.doubleToLongBits(aArray.get(ei)) != Double.doubleToLongBits(bArray.get(ei))"); FileUtils.writeLines(doubleFile, doubleLines); - ReplicatePrimitiveCode.charToAllButBoolean(DbCharArrayDirect.class, - ReplicatePrimitiveCode.MAIN_SRC, serialVersionUIDs); - ReplicatePrimitiveCode.charToAllButBoolean(DbCharArraySlice.class, - ReplicatePrimitiveCode.MAIN_SRC, serialVersionUIDs); - ReplicatePrimitiveCode.charToAllButBoolean(DbSubCharArray.class, - ReplicatePrimitiveCode.MAIN_SRC, serialVersionUIDs); + ReplicatePrimitiveCode.charToAllButBoolean(DbCharArrayDirect.class, ReplicatePrimitiveCode.MAIN_SRC, + serialVersionUIDs); + ReplicatePrimitiveCode.charToAllButBoolean(DbCharArraySlice.class, ReplicatePrimitiveCode.MAIN_SRC, + serialVersionUIDs); + ReplicatePrimitiveCode.charToAllButBoolean(DbSubCharArray.class, ReplicatePrimitiveCode.MAIN_SRC, + serialVersionUIDs); // Uncomment if running from the IDE: // io.deephaven.db.v2.dbarrays.ReplicateDbArrayColumnWrappers.main(new String[0]); diff --git a/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionGenerator.java b/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionGenerator.java index 775313180a2..2a21d59d62a 100644 --- a/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionGenerator.java +++ b/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionGenerator.java @@ -39,346 +39,326 @@ public static void main(String args[]) { // 12 - optional nonzero literal value of param type 2 (for testing) MessageFormat varVarFormatter = new MessageFormat("" + - " public static {3} {0}({1} a, {2} b)'{'\n" + - " return a==QueryConstants.NULL_{4} || b==QueryConstants.NULL_{5} ? QueryConstants.NULL_{6} : a{7}{8}b;\n" - + - " '}'"); + " public static {3} {0}({1} a, {2} b)'{'\n" + + " return a==QueryConstants.NULL_{4} || b==QueryConstants.NULL_{5} ? QueryConstants.NULL_{6} : a{7}{8}b;\n" + + + " '}'"); MessageFormat varVarTestFormatter = new MessageFormat("" + - " public static void test_{0}_{1}_{2}() '{'\n" + - " final {1} value1 = {11};\n" + - " final {2} value2 = {12};\n" + - " final {1} zero1 = 0;\n" + - " final {2} zero2 = 0;\n" + - "\n" + - " {3} dbResult = -1, expectedResult = -1;\n" + - " int compareResult;\n" + - " String description;\n" + - "\n" + - " try '{'\n" + - " dbResult = DBLanguageFunctionUtil.{0}(value1, value2);\n" + - " expectedResult = value1{7}{8}value2;\n" + - " compareResult = {13}.compare(dbResult, expectedResult);\n" + - " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(value1, value2), value1{7}{8}value2)\";\n" - + - " TestCase.assertEquals(description, 0, compareResult);\n" + - /* - * ---------- This one runs into ArithmeticExceptions doing stuff like 0 % 0 ---------- - * "\n" + " dbResult = DBLanguageFunctionUtil.{0}(value1, zero2);\n" + - * " expectedResult = value1{7}{8}zero2;\n" + - * " compareResult = {13}.compare(dbResult, expectedResult);\n" + - * " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(value1, zero2), value1{7}{8}zero2)\";\n" - * + " TestCase.assertEquals(description, 0, compareResult);\n" + - */ - "\n" + - " dbResult = DBLanguageFunctionUtil.{0}(value1, QueryConstants.NULL_{5});\n" - + - " expectedResult = QueryConstants.NULL_{6};\n" + - " compareResult = {13}.compare(dbResult, expectedResult);\n" + - " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(value1, QueryConstants.NULL_{5}), QueryConstants.NULL_{6})\";\n" - + - " TestCase.assertEquals(description, 0, compareResult);\n" + - "\n" + - " dbResult = DBLanguageFunctionUtil.{0}(zero1, value2);\n" + - " expectedResult = zero1{7}{8}value2;\n" + - " compareResult = {13}.compare(dbResult, expectedResult);\n" + - " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(zero1, value2), zero1{7}{8}value2)\";\n" - + - " TestCase.assertEquals(description, 0, compareResult);\n" + - "\n" + - " dbResult = DBLanguageFunctionUtil.{0}(QueryConstants.NULL_{4}, value2);\n" - + - " expectedResult = QueryConstants.NULL_{6};\n" + - " compareResult = {13}.compare(dbResult, expectedResult);\n" + - " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(QueryConstants.NULL_{4}, value2), QueryConstants.NULL_{6})\";\n" - + - " TestCase.assertEquals(description, 0, compareResult);\n" + - "\n" + - " dbResult = DBLanguageFunctionUtil.{0}(QueryConstants.NULL_{4}, QueryConstants.NULL_{5});\n" - + - " expectedResult = QueryConstants.NULL_{6};\n" + - " compareResult = {13}.compare(dbResult, expectedResult);\n" + - " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(QueryConstants.NULL_{4}, QueryConstants.NULL_{5}), QueryConstants.NULL_{6})\";\n" - + - " TestCase.assertEquals(description, 0, compareResult);\n" + - /*---------- Same issue as above ---------- - "\n" + - " dbResult = DBLanguageFunctionUtil.{0}(zero1, zero2);\n" + - " expectedResult = zero1{7}{8}zero2;\n" + - " compareResult = {13}.compare(dbResult, expectedResult);\n" + - " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(zero1, zero2), zero1{7}{8}zero2)\";\n" + - " TestCase.assertEquals(description, 0, compareResult);\n" +*/ - " '}' catch (Exception ex) '{'\n" + - " throw new RuntimeException(\"Comparison failure: dbResult=\" + dbResult + \", expectedResult=\" + expectedResult, ex);\n" - + - " '}'\n" + - "\n" + - " '}'"); + " public static void test_{0}_{1}_{2}() '{'\n" + + " final {1} value1 = {11};\n" + + " final {2} value2 = {12};\n" + + " final {1} zero1 = 0;\n" + + " final {2} zero2 = 0;\n" + + "\n" + + " {3} dbResult = -1, expectedResult = -1;\n" + + " int compareResult;\n" + + " String description;\n" + + "\n" + + " try '{'\n" + + " dbResult = DBLanguageFunctionUtil.{0}(value1, value2);\n" + + " expectedResult = value1{7}{8}value2;\n" + + " compareResult = {13}.compare(dbResult, expectedResult);\n" + + " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(value1, value2), value1{7}{8}value2)\";\n" + + + " TestCase.assertEquals(description, 0, compareResult);\n" + + /* + * ---------- This one runs into ArithmeticExceptions doing stuff like 0 % 0 ---------- "\n" + + * " dbResult = DBLanguageFunctionUtil.{0}(value1, zero2);\n" + + * " expectedResult = value1{7}{8}zero2;\n" + + * " compareResult = {13}.compare(dbResult, expectedResult);\n" + + * " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(value1, zero2), value1{7}{8}zero2)\";\n" + * + " TestCase.assertEquals(description, 0, compareResult);\n" + + */ + "\n" + + " dbResult = DBLanguageFunctionUtil.{0}(value1, QueryConstants.NULL_{5});\n" + + " expectedResult = QueryConstants.NULL_{6};\n" + + " compareResult = {13}.compare(dbResult, expectedResult);\n" + + " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(value1, QueryConstants.NULL_{5}), QueryConstants.NULL_{6})\";\n" + + + " TestCase.assertEquals(description, 0, compareResult);\n" + + "\n" + + " dbResult = DBLanguageFunctionUtil.{0}(zero1, value2);\n" + + " expectedResult = zero1{7}{8}value2;\n" + + " compareResult = {13}.compare(dbResult, expectedResult);\n" + + " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(zero1, value2), zero1{7}{8}value2)\";\n" + + + " TestCase.assertEquals(description, 0, compareResult);\n" + + "\n" + + " dbResult = DBLanguageFunctionUtil.{0}(QueryConstants.NULL_{4}, value2);\n" + + " expectedResult = QueryConstants.NULL_{6};\n" + + " compareResult = {13}.compare(dbResult, expectedResult);\n" + + " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(QueryConstants.NULL_{4}, value2), QueryConstants.NULL_{6})\";\n" + + + " TestCase.assertEquals(description, 0, compareResult);\n" + + "\n" + + " dbResult = DBLanguageFunctionUtil.{0}(QueryConstants.NULL_{4}, QueryConstants.NULL_{5});\n" + + + " expectedResult = QueryConstants.NULL_{6};\n" + + " compareResult = {13}.compare(dbResult, expectedResult);\n" + + " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(QueryConstants.NULL_{4}, QueryConstants.NULL_{5}), QueryConstants.NULL_{6})\";\n" + + + " TestCase.assertEquals(description, 0, compareResult);\n" + + /*---------- Same issue as above ---------- + "\n" + + " dbResult = DBLanguageFunctionUtil.{0}(zero1, zero2);\n" + + " expectedResult = zero1{7}{8}zero2;\n" + + " compareResult = {13}.compare(dbResult, expectedResult);\n" + + " description = \"{13}.compare(DBLanguageFunctionUtil.{0}(zero1, zero2), zero1{7}{8}zero2)\";\n" + + " TestCase.assertEquals(description, 0, compareResult);\n" +*/ + " '}' catch (Exception ex) '{'\n" + + " throw new RuntimeException(\"Comparison failure: dbResult=\" + dbResult + \", expectedResult=\" + expectedResult, ex);\n" + + + " '}'\n" + + "\n" + + " '}'"); // requires that value2 > value1 MessageFormat varVarCompareTestFormatter = new MessageFormat("" + - " public static void test_compare_{1}_{2}_compare() '{'\n" + - " final {1} value1 = {11};\n" + - " final {2} value2 = {12};\n" + - " final {1} zero1 = 0;\n" + - " final {2} zero2 = 0;\n\n" + - " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, Float.NaN));\n" - + - " TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Float.NaN, value1));\n" - + - " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, Double.NaN));\n" - + - " TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1));\n" - + - " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(zero1, zero2));\n" + - " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(zero2, zero1));\n" + - " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_{4}, QueryConstants.NULL_{5}));\n" - + - " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(value1, value1));\n" - + - " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(value2, value2));\n" - + - " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2));\n" - + - " TestCase.assertEquals( 1, DBLanguageFunctionUtil.compareTo(value2, value1));\n" - + - " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(-value1, value2));\n" - + - " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(-value2, value1));\n" - + - " TestCase.assertEquals( 1, DBLanguageFunctionUtil.compareTo(-value1, -value2));\n" - + - " '}'"); + " public static void test_compare_{1}_{2}_compare() '{'\n" + + " final {1} value1 = {11};\n" + + " final {2} value2 = {12};\n" + + " final {1} zero1 = 0;\n" + + " final {2} zero2 = 0;\n\n" + + " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, Float.NaN));\n" + + " TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Float.NaN, value1));\n" + + " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, Double.NaN));\n" + + " TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1));\n" + + " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(zero1, zero2));\n" + + " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(zero2, zero1));\n" + + " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_{4}, QueryConstants.NULL_{5}));\n" + + + " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(value1, value1));\n" + + " TestCase.assertEquals( 0, DBLanguageFunctionUtil.compareTo(value2, value2));\n" + + " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2));\n" + + " TestCase.assertEquals( 1, DBLanguageFunctionUtil.compareTo(value2, value1));\n" + + " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(-value1, value2));\n" + + " TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(-value2, value1));\n" + + " TestCase.assertEquals( 1, DBLanguageFunctionUtil.compareTo(-value1, -value2));\n" + + " '}'"); /* - * Special varVar formatter for boolean operations. If one expression in a ternary if is a - * boxed type and the other is a primitive, Java's inclination is to unbox the one that's - * boxed. + * Special varVar formatter for boolean operations. If one expression in a ternary if is a boxed type and the + * other is a primitive, Java's inclination is to unbox the one that's boxed. * - * Since the DB uses {@code Boolean} to store booleans while supporting {@code null}, we - * must manually box the result of boolean operations if we wish to support nulls. + * Since the DB uses {@code Boolean} to store booleans while supporting {@code null}, we must manually box the + * result of boolean operations if we wish to support nulls. * - * See JLS Chapter 15 section 25 -- - * https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.25" + * See JLS Chapter 15 section 25 -- https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.25" * - * Note that we do not provide null-safe handling for the "conditional-and" and - * "conditional-or" operators ("&&" and "||"). To do so while maintaining their - * short-circuit behavior would require different parser changes. + * Note that we do not provide null-safe handling for the "conditional-and" and "conditional-or" operators ("&&" + * and "||"). To do so while maintaining their short-circuit behavior would require different parser changes. */ MessageFormat varVarBooleanFormatter = new MessageFormat("" + - " public static {3} {0}({1} a, {2} b)'{'\n" + - " return a==QueryConstants.NULL_{4} || b==QueryConstants.NULL_{5} ? QueryConstants.NULL_{6} : Boolean.valueOf(a{7}{8}b);\n" - + - " '}'"); + " public static {3} {0}({1} a, {2} b)'{'\n" + + " return a==QueryConstants.NULL_{4} || b==QueryConstants.NULL_{5} ? QueryConstants.NULL_{6} : Boolean.valueOf(a{7}{8}b);\n" + + + " '}'"); MessageFormat varVarCompareToFormatter = new MessageFormat("" + - " public static int compareTo({1} a, {2} b) '{'\n" + - " if (a==QueryConstants.NULL_{4})'{'\n" + - " return (b==QueryConstants.NULL_{5}) ? 0 : -1;\n" + - " '}'\n" + - " \n" + - " if (b==QueryConstants.NULL_{5})'{'\n" + - " return 1;\n" + - " '}'\n" + - "\n" + - " return a Long.MAX_VALUE) '{'\n" + - " return -1;\n" + - " '}' else if(b < Long.MIN_VALUE) '{'\n" + - " return 1;\n" + - " '}' else '{'\n" + - " final long longValue = (long) b;\n" + - " if (longValue > a) '{'\n" + - " return -1;\n" + - " '}' else if (longValue == a) '{'\n" + - " if (b - longValue == 0d) '{'\n" + - " return 0;\n" + - " '}' else if (b - longValue > 0d) '{'\n" + - " return -1;\n" + - " '}'\n" + - " '}'\n" + - " return 1;\n" + - " '}'\n" + - " '}'"); + " public static int compareTo({1} a, {2} b) '{'\n" + + " if (a==QueryConstants.NULL_{4})'{'\n" + + " return (b==QueryConstants.NULL_{5}) ? 0 : -1;\n" + + " '}'\n" + + " \n" + + " if (b==QueryConstants.NULL_{5})'{'\n" + + " return 1;\n" + + " '}'\n\n" + + " if(Double.isNaN(b)) '{'\n" + + " return -1;\n" + + " }\n" + + " if(b > Long.MAX_VALUE) '{'\n" + + " return -1;\n" + + " '}' else if(b < Long.MIN_VALUE) '{'\n" + + " return 1;\n" + + " '}' else '{'\n" + + " final long longValue = (long) b;\n" + + " if (longValue > a) '{'\n" + + " return -1;\n" + + " '}' else if (longValue == a) '{'\n" + + " if (b - longValue == 0d) '{'\n" + + " return 0;\n" + + " '}' else if (b - longValue > 0d) '{'\n" + + " return -1;\n" + + " '}'\n" + + " '}'\n" + + " return 1;\n" + + " '}'\n" + + " '}'"); MessageFormat inverseCompareToFormatter = new MessageFormat("" + - " public static int compareTo({1} a, {2} b) '{'\n" + - " return -compareTo(b, a);\n" + - " '}'"); + " public static int compareTo({1} a, {2} b) '{'\n" + + " return -compareTo(b, a);\n" + + " '}'"); MessageFormat varVarCompareToUserFormatter = new MessageFormat("" + - " public static boolean {0}({1} a, {2} b)'{'\n" + - " return compareTo(a,b){9};\n" + - " '}'"); + " public static boolean {0}({1} a, {2} b)'{'\n" + + " return compareTo(a,b){9};\n" + + " '}'"); MessageFormat varVarEqualsFormatter = new MessageFormat("" + - " public static boolean eq({1} a, {2} b) '{'\n" + - " if (a==QueryConstants.NULL_{4})'{'\n" + - " return (b==QueryConstants.NULL_{5});\n" + - " '}'\n" + - " \n" + - " if (b==QueryConstants.NULL_{5})'{'\n" + - " return false;\n" + - " '}'\n" + - "\n" + - " return a==b;\n" + - " '}'"); + " public static boolean eq({1} a, {2} b) '{'\n" + + " if (a==QueryConstants.NULL_{4})'{'\n" + + " return (b==QueryConstants.NULL_{5});\n" + + " '}'\n" + + " \n" + + " if (b==QueryConstants.NULL_{5})'{'\n" + + " return false;\n" + + " '}'\n" + + "\n" + + " return a==b;\n" + + " '}'"); MessageFormat arrayArrayFormatter = new MessageFormat("" + - " public static {3}[] {0}Array({1} a[], {2} b[])'{'\n" + - " if (a.length != b.length) throw new IllegalArgumentException(\"Attempt to {10} two arrays ({1}, {2}) of different length\" +\n" - + - " \" (a.length=\" + a.length + \", b.length=\" + b.length + '')'');\n" + - " \n" + - " {3}[] ret = new {3}[a.length];\n" + - " for (int i = 0; i < a.length; i++) '{'\n" + - " ret[i] = {0}(a[i],b[i]);\n" + - " '}'\n" + - " \n" + - " return ret;\n" + - " '}'"); + " public static {3}[] {0}Array({1} a[], {2} b[])'{'\n" + + " if (a.length != b.length) throw new IllegalArgumentException(\"Attempt to {10} two arrays ({1}, {2}) of different length\" +\n" + + + " \" (a.length=\" + a.length + \", b.length=\" + b.length + '')'');\n" + + " \n" + + " {3}[] ret = new {3}[a.length];\n" + + " for (int i = 0; i < a.length; i++) '{'\n" + + " ret[i] = {0}(a[i],b[i]);\n" + + " '}'\n" + + " \n" + + " return ret;\n" + + " '}'"); MessageFormat arrayVarFormatter = new MessageFormat("" + - " public static {3}[] {0}Array({1} a[], {2} b)'{'\n" + - " {3}[] ret = new {3}[a.length];\n" + - " for (int i = 0; i < a.length; i++) '{'\n" + - " ret[i] = {0}(a[i],b);\n" + - " '}'\n" + - "\n" + - " return ret;\n" + - " '}'"); + " public static {3}[] {0}Array({1} a[], {2} b)'{'\n" + + " {3}[] ret = new {3}[a.length];\n" + + " for (int i = 0; i < a.length; i++) '{'\n" + + " ret[i] = {0}(a[i],b);\n" + + " '}'\n" + + "\n" + + " return ret;\n" + + " '}'"); MessageFormat varArrayFormatter = new MessageFormat("" + - " public static {3}[] {0}Array({1} a, {2} b[])'{'\n" + - " {3}[] ret = new {3}[b.length];\n" + - " for (int i = 0; i < b.length; i++) '{'\n" + - " ret[i] = {0}(a,b[i]);\n" + - " '}'\n" + - "\n" + - " return ret;\n" + - " '}'"); + " public static {3}[] {0}Array({1} a, {2} b[])'{'\n" + + " {3}[] ret = new {3}[b.length];\n" + + " for (int i = 0; i < b.length; i++) '{'\n" + + " ret[i] = {0}(a,b[i]);\n" + + " '}'\n" + + "\n" + + " return ret;\n" + + " '}'"); MessageFormat castFormatter = new MessageFormat("" + - " public static {2} {2}Cast({1} a)'{'\n" + - " return a==QueryConstants.NULL_{4} ? QueryConstants.NULL_{5} : ({2})a;\n" + - " '}'"); + " public static {2} {2}Cast({1} a)'{'\n" + + " return a==QueryConstants.NULL_{4} ? QueryConstants.NULL_{5} : ({2})a;\n" + + " '}'"); /* - * Note that this will only work when unboxing -- e.g. doubleCast(a) when 'a' is a Double. - * Casting from an Integer to a double requires: doubleCast(intCast(theInteger)) + * Note that this will only work when unboxing -- e.g. doubleCast(a) when 'a' is a Double. Casting from an + * Integer to a double requires: doubleCast(intCast(theInteger)) * * See the language specification, or comments in the parser, for more details. */ MessageFormat castFromObjFormatter = new MessageFormat("" + - " public static {1} {1}Cast(Object a)'{'\n" + - " return a==null ? QueryConstants.NULL_{4} : ({1})a;\n" + - " '}'"); + " public static {1} {1}Cast(Object a)'{'\n" + + " return a==null ? QueryConstants.NULL_{4} : ({1})a;\n" + + " '}'"); MessageFormat negateFormatter = new MessageFormat("" + - " public static {3} negate({1} a)'{'\n" + - " return a==QueryConstants.NULL_{4} ? QueryConstants.NULL_{6} : -a;\n" + - " '}'"); + " public static {3} negate({1} a)'{'\n" + + " return a==QueryConstants.NULL_{4} ? QueryConstants.NULL_{6} : -a;\n" + + " '}'"); final int sbCapacity = (int) Math.pow(2, 20); StringBuilder buf = new StringBuilder(sbCapacity); StringBuilder testBuf = new StringBuilder(sbCapacity); buf.append("/*\n" + - " * Copyright (c) 2016-").append(LocalDate.now().getYear()) - .append(" Deephaven Data Labs and Patent Pending\n" + - " * GENERATED CODE - DO NOT MODIFY DIRECTLY\n" + - " * This class generated by " + DBLanguageFunctionGenerator.class.getCanonicalName() - + "\n" + - " */\n" + - "\n"); + " * Copyright (c) 2016-").append(LocalDate.now().getYear()) + .append(" Deephaven Data Labs and Patent Pending\n" + + " * GENERATED CODE - DO NOT MODIFY DIRECTLY\n" + + " * This class generated by " + DBLanguageFunctionGenerator.class.getCanonicalName() + "\n" + + " */\n" + + "\n"); buf.append("package io.deephaven.db.tables.lang;\n\n"); buf.append("import io.deephaven.util.QueryConstants;\n\n"); - buf.append( - "@SuppressWarnings({\"unused\", \"WeakerAccess\", \"SimplifiableIfStatement\"})\n"); + buf.append("@SuppressWarnings({\"unused\", \"WeakerAccess\", \"SimplifiableIfStatement\"})\n"); buf.append("public final class DBLanguageFunctionUtil {\n\n"); // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ buf.append("" + - " public static boolean eq(Object obj1, Object obj2){\n" + - " //noinspection SimplifiableBooleanExpression\n" + - " return obj1==obj2 || (!(obj1==null ^ obj2==null) && obj1.equals(obj2));\n" + - " }\n" + - " \n" + - " @SuppressWarnings({\"unchecked\"})\n" + - " public static int compareTo(Comparable obj1, Comparable obj2) {\n" + - " if (obj1==null){\n" + - " return (obj2==null) ? 0 : -1;\n" + - " }\n" + - " \n" + - " if (obj2==null){\n" + - " return 1;\n" + - " }\n" + - "\n" + - " return obj1.compareTo(obj2);\n" + - " }\n" + - "\n" + - " public static Boolean not(Boolean a){\n" + - " return a==QueryConstants.NULL_BOOLEAN ? QueryConstants.NULL_BOOLEAN : Boolean.valueOf(!a);\n" - + - " }\n\n"); + " public static boolean eq(Object obj1, Object obj2){\n" + + " //noinspection SimplifiableBooleanExpression\n" + + " return obj1==obj2 || (!(obj1==null ^ obj2==null) && obj1.equals(obj2));\n" + + " }\n" + + " \n" + + " @SuppressWarnings({\"unchecked\"})\n" + + " public static int compareTo(Comparable obj1, Comparable obj2) {\n" + + " if (obj1==null){\n" + + " return (obj2==null) ? 0 : -1;\n" + + " }\n" + + " \n" + + " if (obj2==null){\n" + + " return 1;\n" + + " }\n" + + "\n" + + " return obj1.compareTo(obj2);\n" + + " }\n" + + "\n" + + " public static Boolean not(Boolean a){\n" + + " return a==QueryConstants.NULL_BOOLEAN ? QueryConstants.NULL_BOOLEAN : Boolean.valueOf(!a);\n" + + " }\n\n"); // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ /* Now start the test class: */ testBuf.append("/*\n" + - " * Copyright (c) 2016-").append(LocalDate.now().getYear()) - .append(" Deephaven Data Labs and Patent Pending\n" + - " * GENERATED CODE - DO NOT MODIFY DIRECTLY\n" + - " * This class generated by " + DBLanguageFunctionGenerator.class.getCanonicalName() - + "\n" + - " */\n" + - "\n"); + " * Copyright (c) 2016-").append(LocalDate.now().getYear()) + .append(" Deephaven Data Labs and Patent Pending\n" + + " * GENERATED CODE - DO NOT MODIFY DIRECTLY\n" + + " * This class generated by " + DBLanguageFunctionGenerator.class.getCanonicalName() + "\n" + + " */\n" + + "\n"); testBuf.append("package io.deephaven.db.tables.lang;\n\n"); @@ -390,8 +370,8 @@ public static void main(String args[]) { // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ - Class classes[] = new Class[] {int.class, double.class, long.class, float.class, char.class, - byte.class, short.class}; + Class classes[] = + new Class[] {int.class, double.class, long.class, float.class, char.class, byte.class, short.class}; BinaryExpr.Operator operators[] = new BinaryExpr.Operator[] { BinaryExpr.Operator.plus, @@ -401,10 +381,10 @@ public static void main(String args[]) { BinaryExpr.Operator.remainder, }; - // Verbs corresponding to each operator, used in exception messages: "Attempt to _____ two - // arrays of different length" + // Verbs corresponding to each operator, used in exception messages: "Attempt to _____ two arrays of different + // length" String[] operatorDescriptions = - new String[] {"add", "subtract", "multiply", "divide", "calculate remainder of"}; + new String[] {"add", "subtract", "multiply", "divide", "calculate remainder of"}; for (int i = 0; i < operators.length; i++) { BinaryExpr.Operator operator = operators[i]; @@ -423,8 +403,8 @@ public static void main(String args[]) { // compare tests for (Class classA : classes) { for (Class classB : classes) { - appendTest(testBuf, varVarCompareTestFormatter, classA, classB, - getSmallLiteral(classA), getBiggerLiteral(classB)); + appendTest(testBuf, varVarCompareTestFormatter, classA, classB, getSmallLiteral(classA), + getBiggerLiteral(classB)); } } @@ -449,62 +429,54 @@ public static void main(String args[]) { // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ - classes = new Class[] {int.class, double.class, long.class, float.class, char.class, - byte.class, short.class}; + classes = new Class[] {int.class, double.class, long.class, float.class, char.class, byte.class, short.class}; for (Class classA : classes) { for (Class classB : classes) { - // handle special cases with float/double arguments (need to handle NaN/precision - // differently) + // handle special cases with float/double arguments (need to handle NaN/precision differently) if (classA.equals(long.class) && classB.equals(double.class)) { - append(buf, longDoubleCompareToFormatter, BinaryExpr.Operator.plus, classA, - classB); // the plus is just to avoid a npe + append(buf, longDoubleCompareToFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus is + // just to + // avoid a npe } else if (classA.equals(double.class) && classB.equals(long.class)) { - append(buf, inverseCompareToFormatter, BinaryExpr.Operator.plus, classA, - classB); // the plus is just to avoid a npe + append(buf, inverseCompareToFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus is + // just to avoid a + // npe } else if (classA.equals(long.class) && classB.equals(float.class)) { - append(buf, longDoubleCompareToFormatter, BinaryExpr.Operator.plus, classA, - classB); // the plus is just to avoid a npe + append(buf, longDoubleCompareToFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus is + // just to + // avoid a npe } else if (classA.equals(float.class) && classB.equals(long.class)) { - append(buf, inverseCompareToFormatter, BinaryExpr.Operator.plus, classA, - classB); // the plus is just to avoid a npe + append(buf, inverseCompareToFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus is + // just to avoid a + // npe } else if (classA.equals(double.class) || classB.equals(double.class)) { // if either arg is a double, we promote to double - append(buf, varDoubleCompareToFormatter, BinaryExpr.Operator.plus, classA, - classB); // the plus is just to avoid a npe + append(buf, varDoubleCompareToFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus is + // just to avoid + // a npe } else if (classA.equals(float.class) || classB.equals(float.class)) { - // if both args can contain a float, use float comparator, otherwise promote to - // double - if (classA.isAssignableFrom(float.class) - && classB.isAssignableFrom(float.class)) { - append(buf, varFloatCompareToFormatter, BinaryExpr.Operator.plus, classA, - classB); // the plus is just to avoid a npe + // if both args can contain a float, use float comparator, otherwise promote to double + if (classA.isAssignableFrom(float.class) && classB.isAssignableFrom(float.class)) { + append(buf, varFloatCompareToFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus + // is just to + // avoid a + // npe } else { - append(buf, varDoubleCompareToFormatter, BinaryExpr.Operator.plus, classA, - classB); // the plus is just to avoid a npe + append(buf, varDoubleCompareToFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus + // is just + // to avoid + // a npe } } else { - append(buf, varVarCompareToFormatter, BinaryExpr.Operator.plus, classA, classB); // the - // plus - // is - // just - // to - // avoid - // a - // npe + append(buf, varVarCompareToFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus is just + // to avoid a npe } - append(buf, varVarEqualsFormatter, BinaryExpr.Operator.plus, classA, classB); // the - // plus - // is - // just - // to - // avoid - // a - // npe - append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", - classA, classB); + append(buf, varVarEqualsFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus is just to + // avoid a npe + append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", classA, classB); append(buf, arrayVarFormatter, BinaryExpr.Operator.equals, classA, classB); append(buf, varArrayFormatter, BinaryExpr.Operator.equals, classA, classB); } @@ -546,14 +518,10 @@ public static void main(String args[]) { // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ - append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", - Boolean.class, boolean.class); - append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", - boolean.class, Boolean.class); - append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", - boolean.class, boolean.class); - append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", - Object.class, Object.class); + append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", Boolean.class, boolean.class); + append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", boolean.class, Boolean.class); + append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", boolean.class, boolean.class); + append(buf, arrayArrayFormatter, BinaryExpr.Operator.equals, "check equality of", Object.class, Object.class); append(buf, arrayVarFormatter, BinaryExpr.Operator.equals, boolean.class, Boolean.class); append(buf, arrayVarFormatter, BinaryExpr.Operator.equals, Object.class, Object.class); @@ -571,54 +539,36 @@ public static void main(String args[]) { }; for (BinaryExpr.Operator operator : operators) { - append(buf, arrayArrayFormatter, operator, "compare", Comparable.class, - Comparable.class); + append(buf, arrayArrayFormatter, operator, "compare", Comparable.class, Comparable.class); append(buf, arrayVarFormatter, operator, Comparable.class, Comparable.class); append(buf, varArrayFormatter, operator, Comparable.class, Comparable.class); } // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ - classes = new Class[] {int.class, double.class, long.class, float.class, char.class, - byte.class, short.class}; + classes = new Class[] {int.class, double.class, long.class, float.class, char.class, byte.class, short.class}; for (Class classA : classes) { // Functions for null-safe casts between primitive types for (Class classB : classes) { - if (classA != classB) { // don't create functions for redundant casts (e.g. - // intCast(int)) - append(buf, castFormatter, BinaryExpr.Operator.plus, classA, classB); // the - // plus is - // just so - // we - // don't - // get a - // npe + if (classA != classB) { // don't create functions for redundant casts (e.g. intCast(int)) + append(buf, castFormatter, BinaryExpr.Operator.plus, classA, classB); // the plus is just so we + // don't get a npe } } } for (Class c : classes) { // Functions for null-safe casts from Object to primitive types - append(buf, castFromObjFormatter, BinaryExpr.Operator.plus, c, Object.class); // the - // plus - // and - // Object - // are - // just so - // we - // don't - // get a - // npe + append(buf, castFromObjFormatter, BinaryExpr.Operator.plus, c, Object.class); // the plus and Object are + // just so we don't get a npe } // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ - classes = new Class[] {int.class, double.class, long.class, float.class, char.class, - byte.class, short.class}; + classes = new Class[] {int.class, double.class, long.class, float.class, char.class, byte.class, short.class}; for (Class clazz : classes) { - append(buf, negateFormatter, BinaryExpr.Operator.plus, clazz, clazz); // the plus is - // just so we - // don't get a npe + append(buf, negateFormatter, BinaryExpr.Operator.plus, clazz, clazz); // the plus is just so we don't get a + // npe } // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ @@ -626,10 +576,8 @@ public static void main(String args[]) { buf.append("}\n"); testBuf.append("}\n"); - String fileName = - "./DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionUtil.java"; - String testFileName = - "./DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageFunctionUtil.java"; + String fileName = "./DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionUtil.java"; + String testFileName = "./DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageFunctionUtil.java"; try { try (BufferedWriter out = new BufferedWriter(new FileWriter(fileName))) { out.write(buf.toString()); @@ -643,42 +591,40 @@ public static void main(String args[]) { } System.out.println("Finished generating DBLanguageFunctionUtil in " - + new DecimalFormat().format(System.currentTimeMillis() - start) + " millis"); + + new DecimalFormat().format(System.currentTimeMillis() - start) + " millis"); System.out.println("Wrote DBLanguageFunctionUtil to: " + fileName); System.out.println("Wrote TestDBLanguageFunctionUtil to: " + testFileName); } - private static void append(StringBuilder buf, MessageFormat messageFormat, - BinaryExpr.Operator op, Class type1, Class type2) { + private static void append(StringBuilder buf, MessageFormat messageFormat, BinaryExpr.Operator op, Class type1, + Class type2) { append(buf, messageFormat, op, null, type1, type2); } - private static void append(StringBuilder buf, MessageFormat messageFormat, - BinaryExpr.Operator op, String opDescription, Class type1, Class type2) { + private static void append(StringBuilder buf, MessageFormat messageFormat, BinaryExpr.Operator op, + String opDescription, Class type1, Class type2) { append(buf, messageFormat, op, opDescription, type1, type2, null, null); } - private static void appendTest(StringBuilder buf, MessageFormat messageFormat, - BinaryExpr.Operator op, Class type1, Class type2) { + private static void appendTest(StringBuilder buf, MessageFormat messageFormat, BinaryExpr.Operator op, Class type1, + Class type2) { append(buf, messageFormat, op, null, type1, type2, getLiteral(type1), getLiteral(type2)); } - private static void appendTest(StringBuilder buf, MessageFormat messageFormat, Class type1, - Class type2, String literal1, String literal2) { + private static void appendTest(StringBuilder buf, MessageFormat messageFormat, Class type1, Class type2, + String literal1, String literal2) { append(buf, messageFormat, null, null, type1, type2, literal1, literal2); } - private static void append(StringBuilder buf, MessageFormat messageFormat, - BinaryExpr.Operator op, String opDescription, Class type1, Class type2, String literal1, - String literal2) { + private static void append(StringBuilder buf, MessageFormat messageFormat, BinaryExpr.Operator op, + String opDescription, Class type1, Class type2, String literal1, String literal2) { Class promotedType; - if (op == BinaryExpr.Operator.equals || op == BinaryExpr.Operator.less - || op == BinaryExpr.Operator.greater || op == BinaryExpr.Operator.lessEquals - || op == BinaryExpr.Operator.greaterEquals) { + if (op == BinaryExpr.Operator.equals || op == BinaryExpr.Operator.less || op == BinaryExpr.Operator.greater + || op == BinaryExpr.Operator.lessEquals || op == BinaryExpr.Operator.greaterEquals) { promotedType = boolean.class; } else if (io.deephaven.util.type.TypeUtils.getBoxedType(type1) == Boolean.class - || io.deephaven.util.type.TypeUtils.getBoxedType(type2) == Boolean.class) { + || io.deephaven.util.type.TypeUtils.getBoxedType(type2) == Boolean.class) { promotedType = Boolean.class; } else { promotedType = DBLanguageParser.binaryNumericPromotionType(type1, type2); @@ -716,8 +662,7 @@ private static void append(StringBuilder buf, MessageFormat messageFormat, promotedType.getSimpleName(), type1Unboxed == null ? "" : type1Unboxed.getSimpleName().toUpperCase(), type2Unboxed == null ? "" : type2Unboxed.getSimpleName().toUpperCase(), - promotedTypeUnboxed == null ? "" - : promotedTypeUnboxed.getSimpleName().toUpperCase(), + promotedTypeUnboxed == null ? "" : promotedTypeUnboxed.getSimpleName().toUpperCase(), operatorSymbol, cast, compareTo, @@ -755,8 +700,7 @@ private static String getLiteral(Class type) { } /** - * Returns a String of an small example literal value of {@code type}. Used for generating - * comparison tests. + * Returns a String of an small example literal value of {@code type}. Used for generating comparison tests. */ private static String getSmallLiteral(Class type) { if (type.equals(boolean.class)) { @@ -781,8 +725,8 @@ private static String getSmallLiteral(Class type) { } /** - * Returns a String of an bigger example literal value of {@code type}. Must be larger than the - * value produced by getSmallLiteral (across all types). Used for generating comparison tests. + * Returns a String of an bigger example literal value of {@code type}. Must be larger than the value produced by + * getSmallLiteral (across all types). Used for generating comparison tests. */ private static String getBiggerLiteral(Class type) { if (type.equals(boolean.class)) { diff --git a/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionUtil.java b/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionUtil.java index 25581694ab1..9ca684629ab 100644 --- a/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionUtil.java +++ b/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageFunctionUtil.java @@ -1,6 +1,6 @@ /* - * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending GENERATED CODE - DO NOT MODIFY - * DIRECTLY This class generated by io.deephaven.db.tables.lang.DBLanguageFunctionGenerator + * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending GENERATED CODE - DO NOT MODIFY DIRECTLY This class + * generated by io.deephaven.db.tables.lang.DBLanguageFunctionGenerator */ package io.deephaven.db.tables.lang; @@ -33,15 +33,12 @@ public static Boolean not(Boolean a) { } public static int plus(int a, int b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -71,15 +68,12 @@ public static int[] plusArray(int a, int b[]) { } public static double plus(int a, double b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(int a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (int, double) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (int, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -109,15 +103,12 @@ public static double[] plusArray(int a, double b[]) { } public static long plus(int a, long b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a + b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a + b; } public static long[] plusArray(int a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (int, long) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (int, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -147,15 +138,12 @@ public static long[] plusArray(int a, long b[]) { } public static float plus(int a, float b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(int a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (int, float) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (int, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -185,15 +173,12 @@ public static float[] plusArray(int a, float b[]) { } public static int plus(int a, char b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(int a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (int, char) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (int, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -223,15 +208,12 @@ public static int[] plusArray(int a, char b[]) { } public static int plus(int a, byte b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(int a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (int, byte) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (int, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -261,15 +243,12 @@ public static int[] plusArray(int a, byte b[]) { } public static int plus(int a, short b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(int a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (int, short) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (int, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -299,15 +278,12 @@ public static int[] plusArray(int a, short b[]) { } public static double plus(double a, int b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(double a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (double, int) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (double, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -337,15 +313,12 @@ public static double[] plusArray(double a, int b[]) { } public static double plus(double a, double b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(double a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (double, double) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (double, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -375,15 +348,12 @@ public static double[] plusArray(double a, double b[]) { } public static double plus(double a, long b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(double a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (double, long) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (double, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -413,15 +383,12 @@ public static double[] plusArray(double a, long b[]) { } public static double plus(double a, float b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(double a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (double, float) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (double, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -451,15 +418,12 @@ public static double[] plusArray(double a, float b[]) { } public static double plus(double a, char b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(double a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (double, char) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (double, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -489,15 +453,12 @@ public static double[] plusArray(double a, char b[]) { } public static double plus(double a, byte b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(double a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (double, byte) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (double, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -527,15 +488,12 @@ public static double[] plusArray(double a, byte b[]) { } public static double plus(double a, short b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(double a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (double, short) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (double, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -565,15 +523,12 @@ public static double[] plusArray(double a, short b[]) { } public static long plus(long a, int b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_LONG - : a + b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT ? QueryConstants.NULL_LONG : a + b; } public static long[] plusArray(long a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (long, int) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (long, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -603,15 +558,12 @@ public static long[] plusArray(long a, int b[]) { } public static double plus(long a, double b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(long a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (long, double) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (long, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -641,15 +593,12 @@ public static double[] plusArray(long a, double b[]) { } public static long plus(long a, long b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a + b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a + b; } public static long[] plusArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -679,15 +628,12 @@ public static long[] plusArray(long a, long b[]) { } public static float plus(long a, float b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(long a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (long, float) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (long, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -717,15 +663,12 @@ public static float[] plusArray(long a, float b[]) { } public static long plus(long a, char b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_LONG - : a + b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_LONG : a + b; } public static long[] plusArray(long a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (long, char) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (long, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -755,15 +698,12 @@ public static long[] plusArray(long a, char b[]) { } public static long plus(long a, byte b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_LONG - : a + b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_LONG : a + b; } public static long[] plusArray(long a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (long, byte) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (long, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -793,15 +733,12 @@ public static long[] plusArray(long a, byte b[]) { } public static long plus(long a, short b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_LONG - : a + b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_LONG : a + b; } public static long[] plusArray(long a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (long, short) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (long, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -831,15 +768,12 @@ public static long[] plusArray(long a, short b[]) { } public static float plus(float a, int b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(float a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (float, int) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (float, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -869,15 +803,12 @@ public static float[] plusArray(float a, int b[]) { } public static double plus(float a, double b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(float a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (float, double) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (float, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -907,15 +838,12 @@ public static double[] plusArray(float a, double b[]) { } public static float plus(float a, long b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(float a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (float, long) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (float, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -945,15 +873,12 @@ public static float[] plusArray(float a, long b[]) { } public static float plus(float a, float b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(float a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (float, float) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (float, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -983,15 +908,12 @@ public static float[] plusArray(float a, float b[]) { } public static float plus(float a, char b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(float a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (float, char) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (float, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -1021,15 +943,12 @@ public static float[] plusArray(float a, char b[]) { } public static float plus(float a, byte b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(float a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (float, byte) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (float, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -1059,15 +978,12 @@ public static float[] plusArray(float a, byte b[]) { } public static float plus(float a, short b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(float a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (float, short) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (float, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -1097,15 +1013,12 @@ public static float[] plusArray(float a, short b[]) { } public static int plus(char a, int b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(char a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (char, int) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (char, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1135,15 +1048,12 @@ public static int[] plusArray(char a, int b[]) { } public static double plus(char a, double b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(char a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (char, double) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (char, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -1173,15 +1083,12 @@ public static double[] plusArray(char a, double b[]) { } public static long plus(char a, long b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a + b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a + b; } public static long[] plusArray(char a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (char, long) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (char, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -1211,15 +1118,12 @@ public static long[] plusArray(char a, long b[]) { } public static float plus(char a, float b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(char a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (char, float) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (char, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -1249,15 +1153,12 @@ public static float[] plusArray(char a, float b[]) { } public static int plus(char a, char b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1287,15 +1188,12 @@ public static int[] plusArray(char a, char b[]) { } public static int plus(char a, byte b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(char a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (char, byte) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (char, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1325,15 +1223,12 @@ public static int[] plusArray(char a, byte b[]) { } public static int plus(char a, short b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(char a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (char, short) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (char, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1363,15 +1258,12 @@ public static int[] plusArray(char a, short b[]) { } public static int plus(byte a, int b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(byte a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (byte, int) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (byte, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1401,15 +1293,12 @@ public static int[] plusArray(byte a, int b[]) { } public static double plus(byte a, double b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(byte a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (byte, double) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (byte, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -1439,15 +1328,12 @@ public static double[] plusArray(byte a, double b[]) { } public static long plus(byte a, long b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a + b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a + b; } public static long[] plusArray(byte a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (byte, long) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (byte, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -1477,15 +1363,12 @@ public static long[] plusArray(byte a, long b[]) { } public static float plus(byte a, float b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(byte a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (byte, float) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (byte, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -1515,15 +1398,12 @@ public static float[] plusArray(byte a, float b[]) { } public static int plus(byte a, char b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(byte a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (byte, char) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (byte, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1553,15 +1433,12 @@ public static int[] plusArray(byte a, char b[]) { } public static int plus(byte a, byte b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1591,15 +1468,12 @@ public static int[] plusArray(byte a, byte b[]) { } public static int plus(byte a, short b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(byte a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (byte, short) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (byte, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1629,15 +1503,12 @@ public static int[] plusArray(byte a, short b[]) { } public static int plus(short a, int b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(short a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (short, int) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (short, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1667,15 +1538,12 @@ public static int[] plusArray(short a, int b[]) { } public static double plus(short a, double b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a + b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a + b; } public static double[] plusArray(short a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (short, double) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (short, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -1705,15 +1573,12 @@ public static double[] plusArray(short a, double b[]) { } public static long plus(short a, long b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a + b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a + b; } public static long[] plusArray(short a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (short, long) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (short, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -1743,15 +1608,12 @@ public static long[] plusArray(short a, long b[]) { } public static float plus(short a, float b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a + b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a + b; } public static float[] plusArray(short a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (short, float) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (short, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -1781,15 +1643,12 @@ public static float[] plusArray(short a, float b[]) { } public static int plus(short a, char b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(short a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (short, char) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (short, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1819,15 +1678,12 @@ public static int[] plusArray(short a, char b[]) { } public static int plus(short a, byte b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(short a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (short, byte) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (short, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1857,15 +1713,12 @@ public static int[] plusArray(short a, byte b[]) { } public static int plus(short a, short b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a + b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a + b; } public static int[] plusArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to add two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to add two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1895,15 +1748,12 @@ public static int[] plusArray(short a, short b[]) { } public static int minus(int a, int b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -1933,15 +1783,12 @@ public static int[] minusArray(int a, int b[]) { } public static double minus(int a, double b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(int a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (int, double) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (int, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -1971,15 +1818,12 @@ public static double[] minusArray(int a, double b[]) { } public static long minus(int a, long b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a - b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a - b; } public static long[] minusArray(int a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (int, long) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (int, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -2009,15 +1853,12 @@ public static long[] minusArray(int a, long b[]) { } public static float minus(int a, float b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(int a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (int, float) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (int, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -2047,15 +1888,12 @@ public static float[] minusArray(int a, float b[]) { } public static int minus(int a, char b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(int a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (int, char) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (int, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -2085,15 +1923,12 @@ public static int[] minusArray(int a, char b[]) { } public static int minus(int a, byte b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(int a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (int, byte) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (int, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -2123,15 +1958,12 @@ public static int[] minusArray(int a, byte b[]) { } public static int minus(int a, short b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(int a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (int, short) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (int, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -2161,15 +1993,12 @@ public static int[] minusArray(int a, short b[]) { } public static double minus(double a, int b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(double a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (double, int) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (double, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -2199,15 +2028,12 @@ public static double[] minusArray(double a, int b[]) { } public static double minus(double a, double b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(double a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (double, double) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (double, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -2237,15 +2063,12 @@ public static double[] minusArray(double a, double b[]) { } public static double minus(double a, long b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(double a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (double, long) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (double, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -2275,15 +2098,12 @@ public static double[] minusArray(double a, long b[]) { } public static double minus(double a, float b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(double a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (double, float) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (double, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -2313,15 +2133,12 @@ public static double[] minusArray(double a, float b[]) { } public static double minus(double a, char b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(double a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (double, char) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (double, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -2351,15 +2168,12 @@ public static double[] minusArray(double a, char b[]) { } public static double minus(double a, byte b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(double a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (double, byte) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (double, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -2389,15 +2203,12 @@ public static double[] minusArray(double a, byte b[]) { } public static double minus(double a, short b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(double a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (double, short) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (double, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -2427,15 +2238,12 @@ public static double[] minusArray(double a, short b[]) { } public static long minus(long a, int b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_LONG - : a - b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT ? QueryConstants.NULL_LONG : a - b; } public static long[] minusArray(long a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (long, int) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (long, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -2465,15 +2273,12 @@ public static long[] minusArray(long a, int b[]) { } public static double minus(long a, double b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(long a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (long, double) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (long, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -2503,15 +2308,12 @@ public static double[] minusArray(long a, double b[]) { } public static long minus(long a, long b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a - b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a - b; } public static long[] minusArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -2541,15 +2343,12 @@ public static long[] minusArray(long a, long b[]) { } public static float minus(long a, float b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(long a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (long, float) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (long, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -2579,15 +2378,12 @@ public static float[] minusArray(long a, float b[]) { } public static long minus(long a, char b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_LONG - : a - b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_LONG : a - b; } public static long[] minusArray(long a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (long, char) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (long, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -2617,15 +2413,12 @@ public static long[] minusArray(long a, char b[]) { } public static long minus(long a, byte b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_LONG - : a - b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_LONG : a - b; } public static long[] minusArray(long a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (long, byte) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (long, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -2655,15 +2448,12 @@ public static long[] minusArray(long a, byte b[]) { } public static long minus(long a, short b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_LONG - : a - b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_LONG : a - b; } public static long[] minusArray(long a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (long, short) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (long, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -2693,15 +2483,12 @@ public static long[] minusArray(long a, short b[]) { } public static float minus(float a, int b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(float a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (float, int) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (float, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -2731,15 +2518,12 @@ public static float[] minusArray(float a, int b[]) { } public static double minus(float a, double b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(float a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (float, double) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (float, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -2769,15 +2553,12 @@ public static double[] minusArray(float a, double b[]) { } public static float minus(float a, long b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(float a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (float, long) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (float, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -2807,15 +2588,12 @@ public static float[] minusArray(float a, long b[]) { } public static float minus(float a, float b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(float a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (float, float) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (float, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -2845,15 +2623,12 @@ public static float[] minusArray(float a, float b[]) { } public static float minus(float a, char b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(float a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (float, char) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (float, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -2883,15 +2658,12 @@ public static float[] minusArray(float a, char b[]) { } public static float minus(float a, byte b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(float a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (float, byte) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (float, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -2921,15 +2693,12 @@ public static float[] minusArray(float a, byte b[]) { } public static float minus(float a, short b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(float a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (float, short) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (float, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -2959,15 +2728,12 @@ public static float[] minusArray(float a, short b[]) { } public static int minus(char a, int b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(char a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (char, int) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (char, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -2997,15 +2763,12 @@ public static int[] minusArray(char a, int b[]) { } public static double minus(char a, double b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(char a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (char, double) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (char, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -3035,15 +2798,12 @@ public static double[] minusArray(char a, double b[]) { } public static long minus(char a, long b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a - b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a - b; } public static long[] minusArray(char a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (char, long) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (char, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -3073,15 +2833,12 @@ public static long[] minusArray(char a, long b[]) { } public static float minus(char a, float b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(char a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (char, float) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (char, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -3111,15 +2868,12 @@ public static float[] minusArray(char a, float b[]) { } public static int minus(char a, char b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3149,15 +2903,12 @@ public static int[] minusArray(char a, char b[]) { } public static int minus(char a, byte b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(char a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (char, byte) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (char, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3187,15 +2938,12 @@ public static int[] minusArray(char a, byte b[]) { } public static int minus(char a, short b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(char a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (char, short) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (char, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3225,15 +2973,12 @@ public static int[] minusArray(char a, short b[]) { } public static int minus(byte a, int b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(byte a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (byte, int) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (byte, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3263,15 +3008,12 @@ public static int[] minusArray(byte a, int b[]) { } public static double minus(byte a, double b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(byte a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (byte, double) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (byte, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -3301,15 +3043,12 @@ public static double[] minusArray(byte a, double b[]) { } public static long minus(byte a, long b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a - b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a - b; } public static long[] minusArray(byte a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (byte, long) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (byte, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -3339,15 +3078,12 @@ public static long[] minusArray(byte a, long b[]) { } public static float minus(byte a, float b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(byte a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (byte, float) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (byte, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -3377,15 +3113,12 @@ public static float[] minusArray(byte a, float b[]) { } public static int minus(byte a, char b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(byte a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (byte, char) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (byte, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3415,15 +3148,12 @@ public static int[] minusArray(byte a, char b[]) { } public static int minus(byte a, byte b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3453,15 +3183,12 @@ public static int[] minusArray(byte a, byte b[]) { } public static int minus(byte a, short b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(byte a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (byte, short) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (byte, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3491,15 +3218,12 @@ public static int[] minusArray(byte a, short b[]) { } public static int minus(short a, int b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(short a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (short, int) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (short, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3529,15 +3253,12 @@ public static int[] minusArray(short a, int b[]) { } public static double minus(short a, double b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a - b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a - b; } public static double[] minusArray(short a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (short, double) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (short, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -3567,15 +3288,12 @@ public static double[] minusArray(short a, double b[]) { } public static long minus(short a, long b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a - b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a - b; } public static long[] minusArray(short a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (short, long) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (short, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -3605,15 +3323,12 @@ public static long[] minusArray(short a, long b[]) { } public static float minus(short a, float b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a - b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a - b; } public static float[] minusArray(short a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (short, float) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (short, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -3643,15 +3358,12 @@ public static float[] minusArray(short a, float b[]) { } public static int minus(short a, char b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(short a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (short, char) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (short, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3681,15 +3393,12 @@ public static int[] minusArray(short a, char b[]) { } public static int minus(short a, byte b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(short a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (short, byte) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (short, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3719,15 +3428,12 @@ public static int[] minusArray(short a, byte b[]) { } public static int minus(short a, short b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a - b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a - b; } public static int[] minusArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to subtract two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to subtract two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3757,15 +3463,12 @@ public static int[] minusArray(short a, short b[]) { } public static int times(int a, int b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3795,15 +3498,12 @@ public static int[] timesArray(int a, int b[]) { } public static double times(int a, double b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(int a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (int, double) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (int, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -3833,15 +3533,12 @@ public static double[] timesArray(int a, double b[]) { } public static long times(int a, long b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a * b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a * b; } public static long[] timesArray(int a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (int, long) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (int, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -3871,15 +3568,12 @@ public static long[] timesArray(int a, long b[]) { } public static float times(int a, float b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(int a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (int, float) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (int, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -3909,15 +3603,12 @@ public static float[] timesArray(int a, float b[]) { } public static int times(int a, char b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(int a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (int, char) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (int, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3947,15 +3638,12 @@ public static int[] timesArray(int a, char b[]) { } public static int times(int a, byte b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(int a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (int, byte) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (int, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -3985,15 +3673,12 @@ public static int[] timesArray(int a, byte b[]) { } public static int times(int a, short b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(int a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (int, short) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (int, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -4023,15 +3708,12 @@ public static int[] timesArray(int a, short b[]) { } public static double times(double a, int b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(double a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (double, int) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (double, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4061,15 +3743,12 @@ public static double[] timesArray(double a, int b[]) { } public static double times(double a, double b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(double a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (double, double) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (double, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4099,15 +3778,12 @@ public static double[] timesArray(double a, double b[]) { } public static double times(double a, long b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(double a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (double, long) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (double, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4137,15 +3813,12 @@ public static double[] timesArray(double a, long b[]) { } public static double times(double a, float b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(double a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (double, float) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (double, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4175,15 +3848,12 @@ public static double[] timesArray(double a, float b[]) { } public static double times(double a, char b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(double a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (double, char) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (double, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4213,15 +3883,12 @@ public static double[] timesArray(double a, char b[]) { } public static double times(double a, byte b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(double a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (double, byte) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (double, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4251,15 +3918,12 @@ public static double[] timesArray(double a, byte b[]) { } public static double times(double a, short b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(double a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (double, short) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (double, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4289,15 +3953,12 @@ public static double[] timesArray(double a, short b[]) { } public static long times(long a, int b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_LONG - : a * b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT ? QueryConstants.NULL_LONG : a * b; } public static long[] timesArray(long a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (long, int) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (long, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -4327,15 +3988,12 @@ public static long[] timesArray(long a, int b[]) { } public static double times(long a, double b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(long a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (long, double) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (long, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4365,15 +4023,12 @@ public static double[] timesArray(long a, double b[]) { } public static long times(long a, long b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a * b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a * b; } public static long[] timesArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -4403,15 +4058,12 @@ public static long[] timesArray(long a, long b[]) { } public static float times(long a, float b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(long a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (long, float) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (long, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -4441,15 +4093,12 @@ public static float[] timesArray(long a, float b[]) { } public static long times(long a, char b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_LONG - : a * b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_LONG : a * b; } public static long[] timesArray(long a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (long, char) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (long, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -4479,15 +4128,12 @@ public static long[] timesArray(long a, char b[]) { } public static long times(long a, byte b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_LONG - : a * b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_LONG : a * b; } public static long[] timesArray(long a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (long, byte) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (long, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -4517,15 +4163,12 @@ public static long[] timesArray(long a, byte b[]) { } public static long times(long a, short b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_LONG - : a * b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_LONG : a * b; } public static long[] timesArray(long a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (long, short) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (long, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -4555,15 +4198,12 @@ public static long[] timesArray(long a, short b[]) { } public static float times(float a, int b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(float a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (float, int) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (float, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -4593,15 +4233,12 @@ public static float[] timesArray(float a, int b[]) { } public static double times(float a, double b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(float a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (float, double) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (float, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4631,15 +4268,12 @@ public static double[] timesArray(float a, double b[]) { } public static float times(float a, long b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(float a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (float, long) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (float, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -4669,15 +4303,12 @@ public static float[] timesArray(float a, long b[]) { } public static float times(float a, float b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(float a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (float, float) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (float, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -4707,15 +4338,12 @@ public static float[] timesArray(float a, float b[]) { } public static float times(float a, char b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(float a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (float, char) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (float, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -4745,15 +4373,12 @@ public static float[] timesArray(float a, char b[]) { } public static float times(float a, byte b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(float a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (float, byte) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (float, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -4783,15 +4408,12 @@ public static float[] timesArray(float a, byte b[]) { } public static float times(float a, short b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(float a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (float, short) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (float, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -4821,15 +4443,12 @@ public static float[] timesArray(float a, short b[]) { } public static int times(char a, int b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(char a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (char, int) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (char, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -4859,15 +4478,12 @@ public static int[] timesArray(char a, int b[]) { } public static double times(char a, double b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(char a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (char, double) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (char, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -4897,15 +4513,12 @@ public static double[] timesArray(char a, double b[]) { } public static long times(char a, long b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a * b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a * b; } public static long[] timesArray(char a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (char, long) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (char, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -4935,15 +4548,12 @@ public static long[] timesArray(char a, long b[]) { } public static float times(char a, float b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(char a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (char, float) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (char, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -4973,15 +4583,12 @@ public static float[] timesArray(char a, float b[]) { } public static int times(char a, char b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5011,15 +4618,12 @@ public static int[] timesArray(char a, char b[]) { } public static int times(char a, byte b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(char a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (char, byte) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (char, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5049,15 +4653,12 @@ public static int[] timesArray(char a, byte b[]) { } public static int times(char a, short b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(char a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (char, short) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (char, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5087,15 +4688,12 @@ public static int[] timesArray(char a, short b[]) { } public static int times(byte a, int b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(byte a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (byte, int) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (byte, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5125,15 +4723,12 @@ public static int[] timesArray(byte a, int b[]) { } public static double times(byte a, double b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(byte a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (byte, double) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (byte, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5163,15 +4758,12 @@ public static double[] timesArray(byte a, double b[]) { } public static long times(byte a, long b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a * b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a * b; } public static long[] timesArray(byte a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (byte, long) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (byte, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -5201,15 +4793,12 @@ public static long[] timesArray(byte a, long b[]) { } public static float times(byte a, float b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(byte a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (byte, float) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (byte, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -5239,15 +4828,12 @@ public static float[] timesArray(byte a, float b[]) { } public static int times(byte a, char b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(byte a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (byte, char) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (byte, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5277,15 +4863,12 @@ public static int[] timesArray(byte a, char b[]) { } public static int times(byte a, byte b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5315,15 +4898,12 @@ public static int[] timesArray(byte a, byte b[]) { } public static int times(byte a, short b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(byte a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (byte, short) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (byte, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5353,15 +4933,12 @@ public static int[] timesArray(byte a, short b[]) { } public static int times(short a, int b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(short a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (short, int) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (short, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5391,15 +4968,12 @@ public static int[] timesArray(short a, int b[]) { } public static double times(short a, double b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a * b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a * b; } public static double[] timesArray(short a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (short, double) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (short, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5429,15 +5003,12 @@ public static double[] timesArray(short a, double b[]) { } public static long times(short a, long b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a * b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a * b; } public static long[] timesArray(short a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (short, long) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (short, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -5467,15 +5038,12 @@ public static long[] timesArray(short a, long b[]) { } public static float times(short a, float b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a * b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a * b; } public static float[] timesArray(short a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (short, float) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (short, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -5505,15 +5073,12 @@ public static float[] timesArray(short a, float b[]) { } public static int times(short a, char b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(short a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (short, char) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (short, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5543,15 +5108,12 @@ public static int[] timesArray(short a, char b[]) { } public static int times(short a, byte b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(short a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (short, byte) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (short, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5581,15 +5143,12 @@ public static int[] timesArray(short a, byte b[]) { } public static int times(short a, short b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a * b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a * b; } public static int[] timesArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to multiply two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to multiply two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -5619,15 +5178,13 @@ public static int[] timesArray(short a, short b[]) { } public static double divide(int a, int b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5657,15 +5214,12 @@ public static double[] divideArray(int a, int b[]) { } public static double divide(int a, double b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a / b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a / b; } public static double[] divideArray(int a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (int, double) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (int, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5695,15 +5249,13 @@ public static double[] divideArray(int a, double b[]) { } public static double divide(int a, long b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(int a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (int, long) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (int, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5733,15 +5285,12 @@ public static double[] divideArray(int a, long b[]) { } public static float divide(int a, float b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a / b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a / b; } public static float[] divideArray(int a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (int, float) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (int, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -5771,15 +5320,13 @@ public static float[] divideArray(int a, float b[]) { } public static double divide(int a, char b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(int a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (int, char) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (int, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5809,15 +5356,13 @@ public static double[] divideArray(int a, char b[]) { } public static double divide(int a, byte b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(int a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (int, byte) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (int, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5847,15 +5392,13 @@ public static double[] divideArray(int a, byte b[]) { } public static double divide(int a, short b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(int a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (int, short) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (int, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5885,15 +5428,13 @@ public static double[] divideArray(int a, short b[]) { } public static double divide(double a, int b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(double a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (double, int) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (double, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5923,15 +5464,12 @@ public static double[] divideArray(double a, int b[]) { } public static double divide(double a, double b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a / b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a / b; } public static double[] divideArray(double a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (double, double) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (double, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5961,15 +5499,13 @@ public static double[] divideArray(double a, double b[]) { } public static double divide(double a, long b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(double a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (double, long) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (double, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -5999,15 +5535,12 @@ public static double[] divideArray(double a, long b[]) { } public static double divide(double a, float b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_DOUBLE - : a / b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_DOUBLE : a / b; } public static double[] divideArray(double a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (double, float) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (double, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6037,15 +5570,13 @@ public static double[] divideArray(double a, float b[]) { } public static double divide(double a, char b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(double a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (double, char) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (double, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6075,15 +5606,13 @@ public static double[] divideArray(double a, char b[]) { } public static double divide(double a, byte b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(double a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (double, byte) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (double, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6113,15 +5642,13 @@ public static double[] divideArray(double a, byte b[]) { } public static double divide(double a, short b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(double a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (double, short) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (double, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6151,15 +5678,13 @@ public static double[] divideArray(double a, short b[]) { } public static double divide(long a, int b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(long a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (long, int) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (long, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6189,15 +5714,12 @@ public static double[] divideArray(long a, int b[]) { } public static double divide(long a, double b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a / b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a / b; } public static double[] divideArray(long a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (long, double) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (long, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6227,15 +5749,13 @@ public static double[] divideArray(long a, double b[]) { } public static double divide(long a, long b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6265,15 +5785,12 @@ public static double[] divideArray(long a, long b[]) { } public static float divide(long a, float b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a / b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a / b; } public static float[] divideArray(long a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (long, float) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (long, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -6303,15 +5820,13 @@ public static float[] divideArray(long a, float b[]) { } public static double divide(long a, char b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(long a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (long, char) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (long, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6341,15 +5856,13 @@ public static double[] divideArray(long a, char b[]) { } public static double divide(long a, byte b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(long a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (long, byte) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (long, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6379,15 +5892,13 @@ public static double[] divideArray(long a, byte b[]) { } public static double divide(long a, short b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(long a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (long, short) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (long, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6417,15 +5928,13 @@ public static double[] divideArray(long a, short b[]) { } public static double divide(float a, int b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(float a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (float, int) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (float, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6455,15 +5964,12 @@ public static double[] divideArray(float a, int b[]) { } public static double divide(float a, double b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a / b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a / b; } public static double[] divideArray(float a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (float, double) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (float, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6493,15 +5999,13 @@ public static double[] divideArray(float a, double b[]) { } public static double divide(float a, long b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(float a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (float, long) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (float, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6531,15 +6035,12 @@ public static double[] divideArray(float a, long b[]) { } public static float divide(float a, float b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a / b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a / b; } public static float[] divideArray(float a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (float, float) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (float, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -6569,15 +6070,13 @@ public static float[] divideArray(float a, float b[]) { } public static double divide(float a, char b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(float a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (float, char) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (float, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6607,15 +6106,13 @@ public static double[] divideArray(float a, char b[]) { } public static double divide(float a, byte b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(float a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (float, byte) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (float, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6645,15 +6142,13 @@ public static double[] divideArray(float a, byte b[]) { } public static double divide(float a, short b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(float a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (float, short) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (float, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6683,15 +6178,13 @@ public static double[] divideArray(float a, short b[]) { } public static double divide(char a, int b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(char a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (char, int) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (char, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6721,15 +6214,12 @@ public static double[] divideArray(char a, int b[]) { } public static double divide(char a, double b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a / b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a / b; } public static double[] divideArray(char a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (char, double) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (char, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6759,15 +6249,13 @@ public static double[] divideArray(char a, double b[]) { } public static double divide(char a, long b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(char a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (char, long) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (char, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6797,15 +6285,12 @@ public static double[] divideArray(char a, long b[]) { } public static float divide(char a, float b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a / b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a / b; } public static float[] divideArray(char a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (char, float) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (char, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -6835,15 +6320,13 @@ public static float[] divideArray(char a, float b[]) { } public static double divide(char a, char b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6873,15 +6356,13 @@ public static double[] divideArray(char a, char b[]) { } public static double divide(char a, byte b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(char a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (char, byte) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (char, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6911,15 +6392,13 @@ public static double[] divideArray(char a, byte b[]) { } public static double divide(char a, short b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(char a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (char, short) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (char, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6949,15 +6428,13 @@ public static double[] divideArray(char a, short b[]) { } public static double divide(byte a, int b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(byte a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (byte, int) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (byte, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -6987,15 +6464,12 @@ public static double[] divideArray(byte a, int b[]) { } public static double divide(byte a, double b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a / b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a / b; } public static double[] divideArray(byte a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (byte, double) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (byte, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7025,15 +6499,13 @@ public static double[] divideArray(byte a, double b[]) { } public static double divide(byte a, long b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(byte a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (byte, long) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (byte, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7063,15 +6535,12 @@ public static double[] divideArray(byte a, long b[]) { } public static float divide(byte a, float b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a / b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a / b; } public static float[] divideArray(byte a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (byte, float) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (byte, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -7101,15 +6570,13 @@ public static float[] divideArray(byte a, float b[]) { } public static double divide(byte a, char b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(byte a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (byte, char) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (byte, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7139,15 +6606,13 @@ public static double[] divideArray(byte a, char b[]) { } public static double divide(byte a, byte b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7177,15 +6642,13 @@ public static double[] divideArray(byte a, byte b[]) { } public static double divide(byte a, short b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(byte a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (byte, short) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (byte, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7215,15 +6678,13 @@ public static double[] divideArray(byte a, short b[]) { } public static double divide(short a, int b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(short a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (short, int) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (short, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7253,15 +6714,12 @@ public static double[] divideArray(short a, int b[]) { } public static double divide(short a, double b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a / b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a / b; } public static double[] divideArray(short a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (short, double) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (short, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7291,15 +6749,13 @@ public static double[] divideArray(short a, double b[]) { } public static double divide(short a, long b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(short a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (short, long) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (short, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7329,15 +6785,12 @@ public static double[] divideArray(short a, long b[]) { } public static float divide(short a, float b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a / b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a / b; } public static float[] divideArray(short a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (short, float) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (short, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; @@ -7367,15 +6820,13 @@ public static float[] divideArray(short a, float b[]) { } public static double divide(short a, char b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(short a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (short, char) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (short, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7405,15 +6856,13 @@ public static double[] divideArray(short a, char b[]) { } public static double divide(short a, byte b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(short a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (short, byte) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (short, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7443,15 +6892,13 @@ public static double[] divideArray(short a, byte b[]) { } public static double divide(short a, short b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a / (double) b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE + : a / (double) b; } public static double[] divideArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to divide two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to divide two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; @@ -7481,16 +6928,14 @@ public static double[] divideArray(short a, short b[]) { } public static int remainder(int a, int b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(int a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (int, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (int, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -7519,16 +6964,14 @@ public static int[] remainderArray(int a, int b[]) { } public static double remainder(int a, double b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(int a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (int, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (int, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -7557,16 +7000,14 @@ public static double[] remainderArray(int a, double b[]) { } public static long remainder(int a, long b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a % b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a % b; } public static long[] remainderArray(int a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (int, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (int, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; for (int i = 0; i < a.length; i++) { @@ -7595,16 +7036,14 @@ public static long[] remainderArray(int a, long b[]) { } public static float remainder(int a, float b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(int a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (int, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (int, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -7633,16 +7072,14 @@ public static float[] remainderArray(int a, float b[]) { } public static int remainder(int a, char b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(int a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (int, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (int, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -7671,16 +7108,14 @@ public static int[] remainderArray(int a, char b[]) { } public static int remainder(int a, byte b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(int a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (int, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (int, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -7709,16 +7144,14 @@ public static int[] remainderArray(int a, byte b[]) { } public static int remainder(int a, short b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(int a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (int, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (int, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -7747,16 +7180,14 @@ public static int[] remainderArray(int a, short b[]) { } public static double remainder(double a, int b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(double a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (double, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (double, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -7785,17 +7216,14 @@ public static double[] remainderArray(double a, int b[]) { } public static double remainder(double a, double b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(double a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (double, double) of different length" - + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (double, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -7824,16 +7252,14 @@ public static double[] remainderArray(double a, double b[]) { } public static double remainder(double a, long b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(double a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (double, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (double, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -7862,16 +7288,14 @@ public static double[] remainderArray(double a, long b[]) { } public static double remainder(double a, float b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(double a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (double, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (double, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -7900,16 +7324,14 @@ public static double[] remainderArray(double a, float b[]) { } public static double remainder(double a, char b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(double a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (double, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (double, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -7938,16 +7360,14 @@ public static double[] remainderArray(double a, char b[]) { } public static double remainder(double a, byte b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(double a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (double, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (double, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -7976,16 +7396,14 @@ public static double[] remainderArray(double a, byte b[]) { } public static double remainder(double a, short b) { - return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_DOUBLE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(double a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (double, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (double, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -8014,16 +7432,14 @@ public static double[] remainderArray(double a, short b[]) { } public static long remainder(long a, int b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_LONG - : a % b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_INT ? QueryConstants.NULL_LONG : a % b; } public static long[] remainderArray(long a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (long, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (long, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; for (int i = 0; i < a.length; i++) { @@ -8052,16 +7468,14 @@ public static long[] remainderArray(long a, int b[]) { } public static double remainder(long a, double b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(long a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (long, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (long, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -8090,16 +7504,14 @@ public static double[] remainderArray(long a, double b[]) { } public static long remainder(long a, long b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a % b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a % b; } public static long[] remainderArray(long a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (long, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (long, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; for (int i = 0; i < a.length; i++) { @@ -8128,16 +7540,14 @@ public static long[] remainderArray(long a, long b[]) { } public static float remainder(long a, float b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(long a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (long, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (long, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -8166,16 +7576,14 @@ public static float[] remainderArray(long a, float b[]) { } public static long remainder(long a, char b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_LONG - : a % b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_LONG : a % b; } public static long[] remainderArray(long a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (long, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (long, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; for (int i = 0; i < a.length; i++) { @@ -8204,16 +7612,14 @@ public static long[] remainderArray(long a, char b[]) { } public static long remainder(long a, byte b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_LONG - : a % b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_LONG : a % b; } public static long[] remainderArray(long a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (long, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (long, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; for (int i = 0; i < a.length; i++) { @@ -8242,16 +7648,14 @@ public static long[] remainderArray(long a, byte b[]) { } public static long remainder(long a, short b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_LONG - : a % b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_LONG : a % b; } public static long[] remainderArray(long a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (long, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (long, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; for (int i = 0; i < a.length; i++) { @@ -8280,16 +7684,14 @@ public static long[] remainderArray(long a, short b[]) { } public static float remainder(float a, int b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(float a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (float, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (float, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -8318,16 +7720,14 @@ public static float[] remainderArray(float a, int b[]) { } public static double remainder(float a, double b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(float a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (float, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (float, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -8356,16 +7756,14 @@ public static double[] remainderArray(float a, double b[]) { } public static float remainder(float a, long b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(float a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (float, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (float, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -8394,16 +7792,14 @@ public static float[] remainderArray(float a, long b[]) { } public static float remainder(float a, float b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(float a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (float, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (float, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -8432,16 +7828,14 @@ public static float[] remainderArray(float a, float b[]) { } public static float remainder(float a, char b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(float a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (float, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (float, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -8470,16 +7864,14 @@ public static float[] remainderArray(float a, char b[]) { } public static float remainder(float a, byte b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(float a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (float, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (float, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -8508,16 +7900,14 @@ public static float[] remainderArray(float a, byte b[]) { } public static float remainder(float a, short b) { - return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_FLOAT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(float a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (float, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (float, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -8546,16 +7936,14 @@ public static float[] remainderArray(float a, short b[]) { } public static int remainder(char a, int b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(char a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (char, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (char, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -8584,16 +7972,14 @@ public static int[] remainderArray(char a, int b[]) { } public static double remainder(char a, double b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(char a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (char, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (char, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -8622,16 +8008,14 @@ public static double[] remainderArray(char a, double b[]) { } public static long remainder(char a, long b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a % b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a % b; } public static long[] remainderArray(char a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (char, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (char, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; for (int i = 0; i < a.length; i++) { @@ -8660,16 +8044,14 @@ public static long[] remainderArray(char a, long b[]) { } public static float remainder(char a, float b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(char a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (char, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (char, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -8698,16 +8080,14 @@ public static float[] remainderArray(char a, float b[]) { } public static int remainder(char a, char b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(char a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (char, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (char, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -8736,16 +8116,14 @@ public static int[] remainderArray(char a, char b[]) { } public static int remainder(char a, byte b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(char a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (char, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (char, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -8774,16 +8152,14 @@ public static int[] remainderArray(char a, byte b[]) { } public static int remainder(char a, short b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(char a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (char, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (char, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -8812,16 +8188,14 @@ public static int[] remainderArray(char a, short b[]) { } public static int remainder(byte a, int b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(byte a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (byte, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (byte, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -8850,16 +8224,14 @@ public static int[] remainderArray(byte a, int b[]) { } public static double remainder(byte a, double b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(byte a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (byte, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (byte, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -8888,16 +8260,14 @@ public static double[] remainderArray(byte a, double b[]) { } public static long remainder(byte a, long b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a % b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a % b; } public static long[] remainderArray(byte a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (byte, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (byte, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; for (int i = 0; i < a.length; i++) { @@ -8926,16 +8296,14 @@ public static long[] remainderArray(byte a, long b[]) { } public static float remainder(byte a, float b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(byte a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (byte, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (byte, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -8964,16 +8332,14 @@ public static float[] remainderArray(byte a, float b[]) { } public static int remainder(byte a, char b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(byte a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (byte, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (byte, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -9002,16 +8368,14 @@ public static int[] remainderArray(byte a, char b[]) { } public static int remainder(byte a, byte b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(byte a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (byte, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (byte, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -9040,16 +8404,14 @@ public static int[] remainderArray(byte a, byte b[]) { } public static int remainder(byte a, short b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(byte a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (byte, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (byte, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -9078,16 +8440,14 @@ public static int[] remainderArray(byte a, short b[]) { } public static int remainder(short a, int b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(short a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (short, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (short, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -9116,16 +8476,14 @@ public static int[] remainderArray(short a, int b[]) { } public static double remainder(short a, double b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE - ? QueryConstants.NULL_DOUBLE - : a % b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_DOUBLE ? QueryConstants.NULL_DOUBLE : a % b; } public static double[] remainderArray(short a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (short, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (short, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); double[] ret = new double[a.length]; for (int i = 0; i < a.length; i++) { @@ -9154,16 +8512,14 @@ public static double[] remainderArray(short a, double b[]) { } public static long remainder(short a, long b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a % b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a % b; } public static long[] remainderArray(short a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (short, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (short, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; for (int i = 0; i < a.length; i++) { @@ -9192,16 +8548,14 @@ public static long[] remainderArray(short a, long b[]) { } public static float remainder(short a, float b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT - ? QueryConstants.NULL_FLOAT - : a % b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_FLOAT ? QueryConstants.NULL_FLOAT : a % b; } public static float[] remainderArray(short a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (short, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (short, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); float[] ret = new float[a.length]; for (int i = 0; i < a.length; i++) { @@ -9230,16 +8584,14 @@ public static float[] remainderArray(short a, float b[]) { } public static int remainder(short a, char b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(short a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (short, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (short, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -9268,16 +8620,14 @@ public static int[] remainderArray(short a, char b[]) { } public static int remainder(short a, byte b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(short a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (short, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (short, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -9306,16 +8656,14 @@ public static int[] remainderArray(short a, byte b[]) { } public static int remainder(short a, short b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a % b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a % b; } public static int[] remainderArray(short a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to calculate remainder of two arrays (short, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to calculate remainder of two arrays (short, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; for (int i = 0; i < a.length; i++) { @@ -9344,15 +8692,12 @@ public static int[] remainderArray(short a, short b[]) { } public static int binOr(int a, int b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a | b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a | b; } public static int[] binOrArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binOr two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to binOr two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9382,15 +8727,12 @@ public static int[] binOrArray(int a, int b[]) { } public static long binOr(long a, long b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a | b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a | b; } public static long[] binOrArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binOr two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to binOr two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -9420,15 +8762,12 @@ public static long[] binOrArray(long a, long b[]) { } public static int binOr(char a, char b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a | b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a | b; } public static int[] binOrArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binOr two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to binOr two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9458,15 +8797,12 @@ public static int[] binOrArray(char a, char b[]) { } public static int binOr(byte a, byte b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a | b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a | b; } public static int[] binOrArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binOr two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to binOr two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9496,15 +8832,12 @@ public static int[] binOrArray(byte a, byte b[]) { } public static int binOr(short a, short b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a | b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a | b; } public static int[] binOrArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binOr two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to binOr two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9534,15 +8867,12 @@ public static int[] binOrArray(short a, short b[]) { } public static int xor(int a, int b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a ^ b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a ^ b; } public static int[] xorArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to xor two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to xor two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9572,15 +8902,12 @@ public static int[] xorArray(int a, int b[]) { } public static long xor(long a, long b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a ^ b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a ^ b; } public static long[] xorArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to xor two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to xor two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -9610,15 +8937,12 @@ public static long[] xorArray(long a, long b[]) { } public static int xor(char a, char b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a ^ b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a ^ b; } public static int[] xorArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to xor two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to xor two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9648,15 +8972,12 @@ public static int[] xorArray(char a, char b[]) { } public static int xor(byte a, byte b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a ^ b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a ^ b; } public static int[] xorArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to xor two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to xor two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9686,15 +9007,12 @@ public static int[] xorArray(byte a, byte b[]) { } public static int xor(short a, short b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a ^ b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a ^ b; } public static int[] xorArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to xor two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to xor two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9724,15 +9042,12 @@ public static int[] xorArray(short a, short b[]) { } public static int binAnd(int a, int b) { - return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT - ? QueryConstants.NULL_INT - : a & b; + return a == QueryConstants.NULL_INT || b == QueryConstants.NULL_INT ? QueryConstants.NULL_INT : a & b; } public static int[] binAndArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binAnd two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to binAnd two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9762,15 +9077,12 @@ public static int[] binAndArray(int a, int b[]) { } public static long binAnd(long a, long b) { - return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG - ? QueryConstants.NULL_LONG - : a & b; + return a == QueryConstants.NULL_LONG || b == QueryConstants.NULL_LONG ? QueryConstants.NULL_LONG : a & b; } public static long[] binAndArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binAnd two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to binAnd two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); long[] ret = new long[a.length]; @@ -9800,15 +9112,12 @@ public static long[] binAndArray(long a, long b[]) { } public static int binAnd(char a, char b) { - return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR - ? QueryConstants.NULL_INT - : a & b; + return a == QueryConstants.NULL_CHAR || b == QueryConstants.NULL_CHAR ? QueryConstants.NULL_INT : a & b; } public static int[] binAndArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binAnd two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to binAnd two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9838,15 +9147,12 @@ public static int[] binAndArray(char a, char b[]) { } public static int binAnd(byte a, byte b) { - return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE - ? QueryConstants.NULL_INT - : a & b; + return a == QueryConstants.NULL_BYTE || b == QueryConstants.NULL_BYTE ? QueryConstants.NULL_INT : a & b; } public static int[] binAndArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binAnd two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to binAnd two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9876,15 +9182,12 @@ public static int[] binAndArray(byte a, byte b[]) { } public static int binAnd(short a, short b) { - return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT - ? QueryConstants.NULL_INT - : a & b; + return a == QueryConstants.NULL_SHORT || b == QueryConstants.NULL_SHORT ? QueryConstants.NULL_INT : a & b; } public static int[] binAndArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to binAnd two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to binAnd two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); int[] ret = new int[a.length]; @@ -9940,8 +9243,8 @@ public static boolean eq(int a, int b) { public static boolean[] eqArray(int a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (int, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (int, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -9996,8 +9299,8 @@ public static boolean eq(int a, double b) { public static boolean[] eqArray(int a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (int, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (int, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10052,8 +9355,8 @@ public static boolean eq(int a, long b) { public static boolean[] eqArray(int a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (int, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (int, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10108,8 +9411,8 @@ public static boolean eq(int a, float b) { public static boolean[] eqArray(int a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (int, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (int, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10164,8 +9467,8 @@ public static boolean eq(int a, char b) { public static boolean[] eqArray(int a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (int, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (int, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10220,8 +9523,8 @@ public static boolean eq(int a, byte b) { public static boolean[] eqArray(int a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (int, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (int, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10276,8 +9579,8 @@ public static boolean eq(int a, short b) { public static boolean[] eqArray(int a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (int, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (int, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10332,8 +9635,8 @@ public static boolean eq(double a, int b) { public static boolean[] eqArray(double a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (double, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (double, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10388,8 +9691,8 @@ public static boolean eq(double a, double b) { public static boolean[] eqArray(double a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (double, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (double, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10436,8 +9739,8 @@ public static boolean eq(double a, long b) { public static boolean[] eqArray(double a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (double, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (double, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10492,8 +9795,8 @@ public static boolean eq(double a, float b) { public static boolean[] eqArray(double a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (double, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (double, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10548,8 +9851,8 @@ public static boolean eq(double a, char b) { public static boolean[] eqArray(double a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (double, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (double, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10604,8 +9907,8 @@ public static boolean eq(double a, byte b) { public static boolean[] eqArray(double a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (double, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (double, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10660,8 +9963,8 @@ public static boolean eq(double a, short b) { public static boolean[] eqArray(double a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (double, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (double, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10716,8 +10019,8 @@ public static boolean eq(long a, int b) { public static boolean[] eqArray(long a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (long, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (long, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10791,8 +10094,8 @@ public static boolean eq(long a, double b) { public static boolean[] eqArray(long a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (long, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (long, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10847,8 +10150,8 @@ public static boolean eq(long a, long b) { public static boolean[] eqArray(long a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (long, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (long, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10922,8 +10225,8 @@ public static boolean eq(long a, float b) { public static boolean[] eqArray(long a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (long, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (long, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -10978,8 +10281,8 @@ public static boolean eq(long a, char b) { public static boolean[] eqArray(long a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (long, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (long, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11034,8 +10337,8 @@ public static boolean eq(long a, byte b) { public static boolean[] eqArray(long a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (long, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (long, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11090,8 +10393,8 @@ public static boolean eq(long a, short b) { public static boolean[] eqArray(long a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (long, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (long, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11146,8 +10449,8 @@ public static boolean eq(float a, int b) { public static boolean[] eqArray(float a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (float, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (float, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11202,8 +10505,8 @@ public static boolean eq(float a, double b) { public static boolean[] eqArray(float a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (float, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (float, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11250,8 +10553,8 @@ public static boolean eq(float a, long b) { public static boolean[] eqArray(float a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (float, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (float, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11306,8 +10609,8 @@ public static boolean eq(float a, float b) { public static boolean[] eqArray(float a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (float, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (float, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11362,8 +10665,8 @@ public static boolean eq(float a, char b) { public static boolean[] eqArray(float a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (float, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (float, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11418,8 +10721,8 @@ public static boolean eq(float a, byte b) { public static boolean[] eqArray(float a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (float, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (float, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11474,8 +10777,8 @@ public static boolean eq(float a, short b) { public static boolean[] eqArray(float a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (float, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (float, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11530,8 +10833,8 @@ public static boolean eq(char a, int b) { public static boolean[] eqArray(char a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (char, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (char, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11586,8 +10889,8 @@ public static boolean eq(char a, double b) { public static boolean[] eqArray(char a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (char, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (char, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11642,8 +10945,8 @@ public static boolean eq(char a, long b) { public static boolean[] eqArray(char a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (char, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (char, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11698,8 +11001,8 @@ public static boolean eq(char a, float b) { public static boolean[] eqArray(char a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (char, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (char, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11754,8 +11057,8 @@ public static boolean eq(char a, char b) { public static boolean[] eqArray(char a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (char, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (char, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11810,8 +11113,8 @@ public static boolean eq(char a, byte b) { public static boolean[] eqArray(char a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (char, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (char, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11866,8 +11169,8 @@ public static boolean eq(char a, short b) { public static boolean[] eqArray(char a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (char, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (char, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11922,8 +11225,8 @@ public static boolean eq(byte a, int b) { public static boolean[] eqArray(byte a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (byte, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (byte, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -11978,8 +11281,8 @@ public static boolean eq(byte a, double b) { public static boolean[] eqArray(byte a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (byte, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (byte, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12034,8 +11337,8 @@ public static boolean eq(byte a, long b) { public static boolean[] eqArray(byte a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (byte, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (byte, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12090,8 +11393,8 @@ public static boolean eq(byte a, float b) { public static boolean[] eqArray(byte a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (byte, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (byte, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12146,8 +11449,8 @@ public static boolean eq(byte a, char b) { public static boolean[] eqArray(byte a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (byte, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (byte, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12202,8 +11505,8 @@ public static boolean eq(byte a, byte b) { public static boolean[] eqArray(byte a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (byte, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (byte, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12258,8 +11561,8 @@ public static boolean eq(byte a, short b) { public static boolean[] eqArray(byte a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (byte, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (byte, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12314,8 +11617,8 @@ public static boolean eq(short a, int b) { public static boolean[] eqArray(short a[], int b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (short, int) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (short, int) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12370,8 +11673,8 @@ public static boolean eq(short a, double b) { public static boolean[] eqArray(short a[], double b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (short, double) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (short, double) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12426,8 +11729,8 @@ public static boolean eq(short a, long b) { public static boolean[] eqArray(short a[], long b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (short, long) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (short, long) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12482,8 +11785,8 @@ public static boolean eq(short a, float b) { public static boolean[] eqArray(short a[], float b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (short, float) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (short, float) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12538,8 +11841,8 @@ public static boolean eq(short a, char b) { public static boolean[] eqArray(short a[], char b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (short, char) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (short, char) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12594,8 +11897,8 @@ public static boolean eq(short a, byte b) { public static boolean[] eqArray(short a[], byte b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (short, byte) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (short, byte) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12650,8 +11953,8 @@ public static boolean eq(short a, short b) { public static boolean[] eqArray(short a[], short b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (short, short) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (short, short) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -12685,8 +11988,7 @@ public static boolean less(int a, int b) { public static boolean[] lessArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -12721,8 +12023,7 @@ public static boolean less(int a, double b) { public static boolean[] lessArray(int a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -12757,8 +12058,7 @@ public static boolean less(int a, long b) { public static boolean[] lessArray(int a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -12793,8 +12093,7 @@ public static boolean less(int a, float b) { public static boolean[] lessArray(int a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -12829,8 +12128,7 @@ public static boolean less(int a, char b) { public static boolean[] lessArray(int a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -12865,8 +12163,7 @@ public static boolean less(int a, byte b) { public static boolean[] lessArray(int a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -12901,8 +12198,7 @@ public static boolean less(int a, short b) { public static boolean[] lessArray(int a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -12937,8 +12233,7 @@ public static boolean less(double a, int b) { public static boolean[] lessArray(double a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -12973,8 +12268,7 @@ public static boolean less(double a, double b) { public static boolean[] lessArray(double a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13009,8 +12303,7 @@ public static boolean less(double a, long b) { public static boolean[] lessArray(double a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13045,8 +12338,7 @@ public static boolean less(double a, float b) { public static boolean[] lessArray(double a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13081,8 +12373,7 @@ public static boolean less(double a, char b) { public static boolean[] lessArray(double a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13117,8 +12408,7 @@ public static boolean less(double a, byte b) { public static boolean[] lessArray(double a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13153,8 +12443,7 @@ public static boolean less(double a, short b) { public static boolean[] lessArray(double a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13189,8 +12478,7 @@ public static boolean less(long a, int b) { public static boolean[] lessArray(long a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13225,8 +12513,7 @@ public static boolean less(long a, double b) { public static boolean[] lessArray(long a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13261,8 +12548,7 @@ public static boolean less(long a, long b) { public static boolean[] lessArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13297,8 +12583,7 @@ public static boolean less(long a, float b) { public static boolean[] lessArray(long a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13333,8 +12618,7 @@ public static boolean less(long a, char b) { public static boolean[] lessArray(long a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13369,8 +12653,7 @@ public static boolean less(long a, byte b) { public static boolean[] lessArray(long a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13405,8 +12688,7 @@ public static boolean less(long a, short b) { public static boolean[] lessArray(long a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13441,8 +12723,7 @@ public static boolean less(float a, int b) { public static boolean[] lessArray(float a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13477,8 +12758,7 @@ public static boolean less(float a, double b) { public static boolean[] lessArray(float a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13513,8 +12793,7 @@ public static boolean less(float a, long b) { public static boolean[] lessArray(float a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13549,8 +12828,7 @@ public static boolean less(float a, float b) { public static boolean[] lessArray(float a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13585,8 +12863,7 @@ public static boolean less(float a, char b) { public static boolean[] lessArray(float a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13621,8 +12898,7 @@ public static boolean less(float a, byte b) { public static boolean[] lessArray(float a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13657,8 +12933,7 @@ public static boolean less(float a, short b) { public static boolean[] lessArray(float a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13693,8 +12968,7 @@ public static boolean less(char a, int b) { public static boolean[] lessArray(char a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13729,8 +13003,7 @@ public static boolean less(char a, double b) { public static boolean[] lessArray(char a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13765,8 +13038,7 @@ public static boolean less(char a, long b) { public static boolean[] lessArray(char a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13801,8 +13073,7 @@ public static boolean less(char a, float b) { public static boolean[] lessArray(char a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13837,8 +13108,7 @@ public static boolean less(char a, char b) { public static boolean[] lessArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13873,8 +13143,7 @@ public static boolean less(char a, byte b) { public static boolean[] lessArray(char a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13909,8 +13178,7 @@ public static boolean less(char a, short b) { public static boolean[] lessArray(char a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13945,8 +13213,7 @@ public static boolean less(byte a, int b) { public static boolean[] lessArray(byte a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -13981,8 +13248,7 @@ public static boolean less(byte a, double b) { public static boolean[] lessArray(byte a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14017,8 +13283,7 @@ public static boolean less(byte a, long b) { public static boolean[] lessArray(byte a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14053,8 +13318,7 @@ public static boolean less(byte a, float b) { public static boolean[] lessArray(byte a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14089,8 +13353,7 @@ public static boolean less(byte a, char b) { public static boolean[] lessArray(byte a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14125,8 +13388,7 @@ public static boolean less(byte a, byte b) { public static boolean[] lessArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14161,8 +13423,7 @@ public static boolean less(byte a, short b) { public static boolean[] lessArray(byte a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14197,8 +13458,7 @@ public static boolean less(short a, int b) { public static boolean[] lessArray(short a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14233,8 +13493,7 @@ public static boolean less(short a, double b) { public static boolean[] lessArray(short a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14269,8 +13528,7 @@ public static boolean less(short a, long b) { public static boolean[] lessArray(short a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14305,8 +13563,7 @@ public static boolean less(short a, float b) { public static boolean[] lessArray(short a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14341,8 +13598,7 @@ public static boolean less(short a, char b) { public static boolean[] lessArray(short a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14377,8 +13633,7 @@ public static boolean less(short a, byte b) { public static boolean[] lessArray(short a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14413,8 +13668,7 @@ public static boolean less(short a, short b) { public static boolean[] lessArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14449,8 +13703,7 @@ public static boolean greater(int a, int b) { public static boolean[] greaterArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14485,8 +13738,7 @@ public static boolean greater(int a, double b) { public static boolean[] greaterArray(int a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14521,8 +13773,7 @@ public static boolean greater(int a, long b) { public static boolean[] greaterArray(int a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14557,8 +13808,7 @@ public static boolean greater(int a, float b) { public static boolean[] greaterArray(int a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14593,8 +13843,7 @@ public static boolean greater(int a, char b) { public static boolean[] greaterArray(int a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14629,8 +13878,7 @@ public static boolean greater(int a, byte b) { public static boolean[] greaterArray(int a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14665,8 +13913,7 @@ public static boolean greater(int a, short b) { public static boolean[] greaterArray(int a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14701,8 +13948,7 @@ public static boolean greater(double a, int b) { public static boolean[] greaterArray(double a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14737,8 +13983,7 @@ public static boolean greater(double a, double b) { public static boolean[] greaterArray(double a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14773,8 +14018,7 @@ public static boolean greater(double a, long b) { public static boolean[] greaterArray(double a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14809,8 +14053,7 @@ public static boolean greater(double a, float b) { public static boolean[] greaterArray(double a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14845,8 +14088,7 @@ public static boolean greater(double a, char b) { public static boolean[] greaterArray(double a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14881,8 +14123,7 @@ public static boolean greater(double a, byte b) { public static boolean[] greaterArray(double a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14917,8 +14158,7 @@ public static boolean greater(double a, short b) { public static boolean[] greaterArray(double a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14953,8 +14193,7 @@ public static boolean greater(long a, int b) { public static boolean[] greaterArray(long a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -14989,8 +14228,7 @@ public static boolean greater(long a, double b) { public static boolean[] greaterArray(long a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15025,8 +14263,7 @@ public static boolean greater(long a, long b) { public static boolean[] greaterArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15061,8 +14298,7 @@ public static boolean greater(long a, float b) { public static boolean[] greaterArray(long a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15097,8 +14333,7 @@ public static boolean greater(long a, char b) { public static boolean[] greaterArray(long a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15133,8 +14368,7 @@ public static boolean greater(long a, byte b) { public static boolean[] greaterArray(long a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15169,8 +14403,7 @@ public static boolean greater(long a, short b) { public static boolean[] greaterArray(long a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15205,8 +14438,7 @@ public static boolean greater(float a, int b) { public static boolean[] greaterArray(float a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15241,8 +14473,7 @@ public static boolean greater(float a, double b) { public static boolean[] greaterArray(float a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15277,8 +14508,7 @@ public static boolean greater(float a, long b) { public static boolean[] greaterArray(float a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15313,8 +14543,7 @@ public static boolean greater(float a, float b) { public static boolean[] greaterArray(float a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15349,8 +14578,7 @@ public static boolean greater(float a, char b) { public static boolean[] greaterArray(float a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15385,8 +14613,7 @@ public static boolean greater(float a, byte b) { public static boolean[] greaterArray(float a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15421,8 +14648,7 @@ public static boolean greater(float a, short b) { public static boolean[] greaterArray(float a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15457,8 +14683,7 @@ public static boolean greater(char a, int b) { public static boolean[] greaterArray(char a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15493,8 +14718,7 @@ public static boolean greater(char a, double b) { public static boolean[] greaterArray(char a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15529,8 +14753,7 @@ public static boolean greater(char a, long b) { public static boolean[] greaterArray(char a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15565,8 +14788,7 @@ public static boolean greater(char a, float b) { public static boolean[] greaterArray(char a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15601,8 +14823,7 @@ public static boolean greater(char a, char b) { public static boolean[] greaterArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15637,8 +14858,7 @@ public static boolean greater(char a, byte b) { public static boolean[] greaterArray(char a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15673,8 +14893,7 @@ public static boolean greater(char a, short b) { public static boolean[] greaterArray(char a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15709,8 +14928,7 @@ public static boolean greater(byte a, int b) { public static boolean[] greaterArray(byte a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15745,8 +14963,7 @@ public static boolean greater(byte a, double b) { public static boolean[] greaterArray(byte a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15781,8 +14998,7 @@ public static boolean greater(byte a, long b) { public static boolean[] greaterArray(byte a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15817,8 +15033,7 @@ public static boolean greater(byte a, float b) { public static boolean[] greaterArray(byte a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15853,8 +15068,7 @@ public static boolean greater(byte a, char b) { public static boolean[] greaterArray(byte a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15889,8 +15103,7 @@ public static boolean greater(byte a, byte b) { public static boolean[] greaterArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15925,8 +15138,7 @@ public static boolean greater(byte a, short b) { public static boolean[] greaterArray(byte a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15961,8 +15173,7 @@ public static boolean greater(short a, int b) { public static boolean[] greaterArray(short a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -15997,8 +15208,7 @@ public static boolean greater(short a, double b) { public static boolean[] greaterArray(short a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16033,8 +15243,7 @@ public static boolean greater(short a, long b) { public static boolean[] greaterArray(short a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16069,8 +15278,7 @@ public static boolean greater(short a, float b) { public static boolean[] greaterArray(short a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16105,8 +15313,7 @@ public static boolean greater(short a, char b) { public static boolean[] greaterArray(short a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16141,8 +15348,7 @@ public static boolean greater(short a, byte b) { public static boolean[] greaterArray(short a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16177,8 +15383,7 @@ public static boolean greater(short a, short b) { public static boolean[] greaterArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16213,8 +15418,7 @@ public static boolean lessEquals(int a, int b) { public static boolean[] lessEqualsArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16249,8 +15453,7 @@ public static boolean lessEquals(int a, double b) { public static boolean[] lessEqualsArray(int a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16285,8 +15488,7 @@ public static boolean lessEquals(int a, long b) { public static boolean[] lessEqualsArray(int a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16321,8 +15523,7 @@ public static boolean lessEquals(int a, float b) { public static boolean[] lessEqualsArray(int a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16357,8 +15558,7 @@ public static boolean lessEquals(int a, char b) { public static boolean[] lessEqualsArray(int a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16393,8 +15593,7 @@ public static boolean lessEquals(int a, byte b) { public static boolean[] lessEqualsArray(int a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16429,8 +15628,7 @@ public static boolean lessEquals(int a, short b) { public static boolean[] lessEqualsArray(int a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16465,8 +15663,7 @@ public static boolean lessEquals(double a, int b) { public static boolean[] lessEqualsArray(double a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16501,8 +15698,7 @@ public static boolean lessEquals(double a, double b) { public static boolean[] lessEqualsArray(double a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16537,8 +15733,7 @@ public static boolean lessEquals(double a, long b) { public static boolean[] lessEqualsArray(double a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16573,8 +15768,7 @@ public static boolean lessEquals(double a, float b) { public static boolean[] lessEqualsArray(double a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16609,8 +15803,7 @@ public static boolean lessEquals(double a, char b) { public static boolean[] lessEqualsArray(double a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16645,8 +15838,7 @@ public static boolean lessEquals(double a, byte b) { public static boolean[] lessEqualsArray(double a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16681,8 +15873,7 @@ public static boolean lessEquals(double a, short b) { public static boolean[] lessEqualsArray(double a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16717,8 +15908,7 @@ public static boolean lessEquals(long a, int b) { public static boolean[] lessEqualsArray(long a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16753,8 +15943,7 @@ public static boolean lessEquals(long a, double b) { public static boolean[] lessEqualsArray(long a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16789,8 +15978,7 @@ public static boolean lessEquals(long a, long b) { public static boolean[] lessEqualsArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16825,8 +16013,7 @@ public static boolean lessEquals(long a, float b) { public static boolean[] lessEqualsArray(long a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16861,8 +16048,7 @@ public static boolean lessEquals(long a, char b) { public static boolean[] lessEqualsArray(long a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16897,8 +16083,7 @@ public static boolean lessEquals(long a, byte b) { public static boolean[] lessEqualsArray(long a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16933,8 +16118,7 @@ public static boolean lessEquals(long a, short b) { public static boolean[] lessEqualsArray(long a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -16969,8 +16153,7 @@ public static boolean lessEquals(float a, int b) { public static boolean[] lessEqualsArray(float a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17005,8 +16188,7 @@ public static boolean lessEquals(float a, double b) { public static boolean[] lessEqualsArray(float a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17041,8 +16223,7 @@ public static boolean lessEquals(float a, long b) { public static boolean[] lessEqualsArray(float a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17077,8 +16258,7 @@ public static boolean lessEquals(float a, float b) { public static boolean[] lessEqualsArray(float a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17113,8 +16293,7 @@ public static boolean lessEquals(float a, char b) { public static boolean[] lessEqualsArray(float a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17149,8 +16328,7 @@ public static boolean lessEquals(float a, byte b) { public static boolean[] lessEqualsArray(float a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17185,8 +16363,7 @@ public static boolean lessEquals(float a, short b) { public static boolean[] lessEqualsArray(float a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17221,8 +16398,7 @@ public static boolean lessEquals(char a, int b) { public static boolean[] lessEqualsArray(char a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17257,8 +16433,7 @@ public static boolean lessEquals(char a, double b) { public static boolean[] lessEqualsArray(char a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17293,8 +16468,7 @@ public static boolean lessEquals(char a, long b) { public static boolean[] lessEqualsArray(char a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17329,8 +16503,7 @@ public static boolean lessEquals(char a, float b) { public static boolean[] lessEqualsArray(char a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17365,8 +16538,7 @@ public static boolean lessEquals(char a, char b) { public static boolean[] lessEqualsArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17401,8 +16573,7 @@ public static boolean lessEquals(char a, byte b) { public static boolean[] lessEqualsArray(char a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17437,8 +16608,7 @@ public static boolean lessEquals(char a, short b) { public static boolean[] lessEqualsArray(char a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17473,8 +16643,7 @@ public static boolean lessEquals(byte a, int b) { public static boolean[] lessEqualsArray(byte a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17509,8 +16678,7 @@ public static boolean lessEquals(byte a, double b) { public static boolean[] lessEqualsArray(byte a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17545,8 +16713,7 @@ public static boolean lessEquals(byte a, long b) { public static boolean[] lessEqualsArray(byte a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17581,8 +16748,7 @@ public static boolean lessEquals(byte a, float b) { public static boolean[] lessEqualsArray(byte a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17617,8 +16783,7 @@ public static boolean lessEquals(byte a, char b) { public static boolean[] lessEqualsArray(byte a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17653,8 +16818,7 @@ public static boolean lessEquals(byte a, byte b) { public static boolean[] lessEqualsArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17689,8 +16853,7 @@ public static boolean lessEquals(byte a, short b) { public static boolean[] lessEqualsArray(byte a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17725,8 +16888,7 @@ public static boolean lessEquals(short a, int b) { public static boolean[] lessEqualsArray(short a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17761,8 +16923,7 @@ public static boolean lessEquals(short a, double b) { public static boolean[] lessEqualsArray(short a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17797,8 +16958,7 @@ public static boolean lessEquals(short a, long b) { public static boolean[] lessEqualsArray(short a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17833,8 +16993,7 @@ public static boolean lessEquals(short a, float b) { public static boolean[] lessEqualsArray(short a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17869,8 +17028,7 @@ public static boolean lessEquals(short a, char b) { public static boolean[] lessEqualsArray(short a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17905,8 +17063,7 @@ public static boolean lessEquals(short a, byte b) { public static boolean[] lessEqualsArray(short a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17941,8 +17098,7 @@ public static boolean lessEquals(short a, short b) { public static boolean[] lessEqualsArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -17977,8 +17133,7 @@ public static boolean greaterEquals(int a, int b) { public static boolean[] greaterEqualsArray(int a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18013,8 +17168,7 @@ public static boolean greaterEquals(int a, double b) { public static boolean[] greaterEqualsArray(int a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18049,8 +17203,7 @@ public static boolean greaterEquals(int a, long b) { public static boolean[] greaterEqualsArray(int a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18085,8 +17238,7 @@ public static boolean greaterEquals(int a, float b) { public static boolean[] greaterEqualsArray(int a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18121,8 +17273,7 @@ public static boolean greaterEquals(int a, char b) { public static boolean[] greaterEqualsArray(int a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18157,8 +17308,7 @@ public static boolean greaterEquals(int a, byte b) { public static boolean[] greaterEqualsArray(int a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18193,8 +17343,7 @@ public static boolean greaterEquals(int a, short b) { public static boolean[] greaterEqualsArray(int a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (int, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (int, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18229,8 +17378,7 @@ public static boolean greaterEquals(double a, int b) { public static boolean[] greaterEqualsArray(double a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18265,8 +17413,7 @@ public static boolean greaterEquals(double a, double b) { public static boolean[] greaterEqualsArray(double a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18301,8 +17448,7 @@ public static boolean greaterEquals(double a, long b) { public static boolean[] greaterEqualsArray(double a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18337,8 +17483,7 @@ public static boolean greaterEquals(double a, float b) { public static boolean[] greaterEqualsArray(double a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18373,8 +17518,7 @@ public static boolean greaterEquals(double a, char b) { public static boolean[] greaterEqualsArray(double a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18409,8 +17553,7 @@ public static boolean greaterEquals(double a, byte b) { public static boolean[] greaterEqualsArray(double a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18445,8 +17588,7 @@ public static boolean greaterEquals(double a, short b) { public static boolean[] greaterEqualsArray(double a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (double, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (double, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18481,8 +17623,7 @@ public static boolean greaterEquals(long a, int b) { public static boolean[] greaterEqualsArray(long a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18517,8 +17658,7 @@ public static boolean greaterEquals(long a, double b) { public static boolean[] greaterEqualsArray(long a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18553,8 +17693,7 @@ public static boolean greaterEquals(long a, long b) { public static boolean[] greaterEqualsArray(long a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18589,8 +17728,7 @@ public static boolean greaterEquals(long a, float b) { public static boolean[] greaterEqualsArray(long a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18625,8 +17763,7 @@ public static boolean greaterEquals(long a, char b) { public static boolean[] greaterEqualsArray(long a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18661,8 +17798,7 @@ public static boolean greaterEquals(long a, byte b) { public static boolean[] greaterEqualsArray(long a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18697,8 +17833,7 @@ public static boolean greaterEquals(long a, short b) { public static boolean[] greaterEqualsArray(long a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (long, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (long, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18733,8 +17868,7 @@ public static boolean greaterEquals(float a, int b) { public static boolean[] greaterEqualsArray(float a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18769,8 +17903,7 @@ public static boolean greaterEquals(float a, double b) { public static boolean[] greaterEqualsArray(float a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18805,8 +17938,7 @@ public static boolean greaterEquals(float a, long b) { public static boolean[] greaterEqualsArray(float a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18841,8 +17973,7 @@ public static boolean greaterEquals(float a, float b) { public static boolean[] greaterEqualsArray(float a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18877,8 +18008,7 @@ public static boolean greaterEquals(float a, char b) { public static boolean[] greaterEqualsArray(float a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18913,8 +18043,7 @@ public static boolean greaterEquals(float a, byte b) { public static boolean[] greaterEqualsArray(float a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18949,8 +18078,7 @@ public static boolean greaterEquals(float a, short b) { public static boolean[] greaterEqualsArray(float a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (float, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (float, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -18985,8 +18113,7 @@ public static boolean greaterEquals(char a, int b) { public static boolean[] greaterEqualsArray(char a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19021,8 +18148,7 @@ public static boolean greaterEquals(char a, double b) { public static boolean[] greaterEqualsArray(char a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19057,8 +18183,7 @@ public static boolean greaterEquals(char a, long b) { public static boolean[] greaterEqualsArray(char a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19093,8 +18218,7 @@ public static boolean greaterEquals(char a, float b) { public static boolean[] greaterEqualsArray(char a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19129,8 +18253,7 @@ public static boolean greaterEquals(char a, char b) { public static boolean[] greaterEqualsArray(char a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19165,8 +18288,7 @@ public static boolean greaterEquals(char a, byte b) { public static boolean[] greaterEqualsArray(char a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19201,8 +18323,7 @@ public static boolean greaterEquals(char a, short b) { public static boolean[] greaterEqualsArray(char a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (char, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (char, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19237,8 +18358,7 @@ public static boolean greaterEquals(byte a, int b) { public static boolean[] greaterEqualsArray(byte a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19273,8 +18393,7 @@ public static boolean greaterEquals(byte a, double b) { public static boolean[] greaterEqualsArray(byte a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19309,8 +18428,7 @@ public static boolean greaterEquals(byte a, long b) { public static boolean[] greaterEqualsArray(byte a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19345,8 +18463,7 @@ public static boolean greaterEquals(byte a, float b) { public static boolean[] greaterEqualsArray(byte a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19381,8 +18498,7 @@ public static boolean greaterEquals(byte a, char b) { public static boolean[] greaterEqualsArray(byte a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19417,8 +18533,7 @@ public static boolean greaterEquals(byte a, byte b) { public static boolean[] greaterEqualsArray(byte a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19453,8 +18568,7 @@ public static boolean greaterEquals(byte a, short b) { public static boolean[] greaterEqualsArray(byte a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (byte, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (byte, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19489,8 +18603,7 @@ public static boolean greaterEquals(short a, int b) { public static boolean[] greaterEqualsArray(short a[], int b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, int) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, int) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19525,8 +18638,7 @@ public static boolean greaterEquals(short a, double b) { public static boolean[] greaterEqualsArray(short a[], double b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, double) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, double) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19561,8 +18673,7 @@ public static boolean greaterEquals(short a, long b) { public static boolean[] greaterEqualsArray(short a[], long b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, long) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, long) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19597,8 +18708,7 @@ public static boolean greaterEquals(short a, float b) { public static boolean[] greaterEqualsArray(short a[], float b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, float) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, float) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19633,8 +18743,7 @@ public static boolean greaterEquals(short a, char b) { public static boolean[] greaterEqualsArray(short a[], char b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, char) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, char) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19669,8 +18778,7 @@ public static boolean greaterEquals(short a, byte b) { public static boolean[] greaterEqualsArray(short a[], byte b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, byte) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, byte) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19705,8 +18813,7 @@ public static boolean greaterEquals(short a, short b) { public static boolean[] greaterEqualsArray(short a[], short b[]) { if (a.length != b.length) - throw new IllegalArgumentException( - "Attempt to compare two arrays (short, short) of different length" + + throw new IllegalArgumentException("Attempt to compare two arrays (short, short) of different length" + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; @@ -19752,28 +18859,25 @@ public static boolean greaterEquals(Comparable a, Comparable b) { } public static Boolean binOr(Boolean a, Boolean b) { - return a == QueryConstants.NULL_BOOLEAN || b == QueryConstants.NULL_BOOLEAN - ? QueryConstants.NULL_BOOLEAN - : Boolean.valueOf(a | b); + return a == QueryConstants.NULL_BOOLEAN || b == QueryConstants.NULL_BOOLEAN ? QueryConstants.NULL_BOOLEAN + : Boolean.valueOf(a | b); } public static Boolean xor(Boolean a, Boolean b) { - return a == QueryConstants.NULL_BOOLEAN || b == QueryConstants.NULL_BOOLEAN - ? QueryConstants.NULL_BOOLEAN - : Boolean.valueOf(a ^ b); + return a == QueryConstants.NULL_BOOLEAN || b == QueryConstants.NULL_BOOLEAN ? QueryConstants.NULL_BOOLEAN + : Boolean.valueOf(a ^ b); } public static Boolean binAnd(Boolean a, Boolean b) { - return a == QueryConstants.NULL_BOOLEAN || b == QueryConstants.NULL_BOOLEAN - ? QueryConstants.NULL_BOOLEAN - : Boolean.valueOf(a & b); + return a == QueryConstants.NULL_BOOLEAN || b == QueryConstants.NULL_BOOLEAN ? QueryConstants.NULL_BOOLEAN + : Boolean.valueOf(a & b); } public static boolean[] eqArray(Boolean a[], boolean b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (Boolean, boolean) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (Boolean, boolean) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -19786,8 +18890,8 @@ public static boolean[] eqArray(Boolean a[], boolean b[]) { public static boolean[] eqArray(boolean a[], Boolean b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (boolean, Boolean) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (boolean, Boolean) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -19800,8 +18904,8 @@ public static boolean[] eqArray(boolean a[], Boolean b[]) { public static boolean[] eqArray(boolean a[], boolean b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (boolean, boolean) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (boolean, boolean) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -19814,8 +18918,8 @@ public static boolean[] eqArray(boolean a[], boolean b[]) { public static boolean[] eqArray(Object a[], Object b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to check equality of two arrays (Object, Object) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to check equality of two arrays (Object, Object) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -19864,8 +18968,8 @@ public static boolean[] eqArray(Object a, Object b[]) { public static boolean[] lessArray(Comparable a[], Comparable b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to compare two arrays (Comparable, Comparable) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to compare two arrays (Comparable, Comparable) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -19896,8 +19000,8 @@ public static boolean[] lessArray(Comparable a, Comparable b[]) { public static boolean[] greaterArray(Comparable a[], Comparable b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to compare two arrays (Comparable, Comparable) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to compare two arrays (Comparable, Comparable) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -19928,8 +19032,8 @@ public static boolean[] greaterArray(Comparable a, Comparable b[]) { public static boolean[] lessEqualsArray(Comparable a[], Comparable b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to compare two arrays (Comparable, Comparable) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to compare two arrays (Comparable, Comparable) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { @@ -19960,8 +19064,8 @@ public static boolean[] lessEqualsArray(Comparable a, Comparable b[]) { public static boolean[] greaterEqualsArray(Comparable a[], Comparable b[]) { if (a.length != b.length) throw new IllegalArgumentException( - "Attempt to compare two arrays (Comparable, Comparable) of different length" + - " (a.length=" + a.length + ", b.length=" + b.length + ')'); + "Attempt to compare two arrays (Comparable, Comparable) of different length" + + " (a.length=" + a.length + ", b.length=" + b.length + ')'); boolean[] ret = new boolean[a.length]; for (int i = 0; i < a.length; i++) { diff --git a/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageParser.java b/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageParser.java index c5640257dbc..deb07184e13 100644 --- a/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageParser.java +++ b/DB/src/main/java/io/deephaven/db/tables/lang/DBLanguageParser.java @@ -38,8 +38,7 @@ import static io.deephaven.db.util.PythonScopeJpyImpl.*; -public final class DBLanguageParser - extends GenericVisitorAdapter { +public final class DBLanguageParser extends GenericVisitorAdapter { private final Collection packageImports; private final Collection classImports; @@ -49,13 +48,11 @@ public final class DBLanguageParser private final HashSet variablesUsed = new HashSet<>(); - private static final Class NULL_CLASS = DBLanguageParser.class; // I needed some class to - // represent null. So I chose - // this one since it won't be - // used... + private static final Class NULL_CLASS = DBLanguageParser.class; // I needed some class to represent null. So I chose + // this one since it won't be used... - private static final Set simpleNameWhiteList = Collections.unmodifiableSet( - new HashSet<>(Arrays.asList("java.lang", DbArrayBase.class.getPackage().getName()))); + private static final Set simpleNameWhiteList = Collections + .unmodifiableSet(new HashSet<>(Arrays.asList("java.lang", DbArrayBase.class.getPackage().getName()))); /** * The result of the DBLanguageParser for the expression passed given to the constructor. @@ -67,67 +64,62 @@ public final class DBLanguageParser /** * Create a DBLanguageParser and parse the given {@code expression}. After construction, the - * {@link DBLanguageParser.Result result} of parsing the {@code expression} is available with - * the {@link #getResult()}} method. + * {@link DBLanguageParser.Result result} of parsing the {@code expression} is available with the + * {@link #getResult()}} method. * * @param expression The query language expression to parse * @param packageImports Wildcard package imports * @param classImports Individual class imports - * @param staticImports Wildcard static imports. All static variables and methods for the given - * classes are imported. + * @param staticImports Wildcard static imports. All static variables and methods for the given classes are + * imported. * @param variables A map of the names of scope variables to their types - * @param variableParameterizedTypes A map of the names of scope variables to their paramterized - * types + * @param variableParameterizedTypes A map of the names of scope variables to their paramterized types * @throws QueryLanguageParseException If any exception or error is encountered */ public DBLanguageParser(String expression, - Collection packageImports, - Collection classImports, - Collection staticImports, - Map variables, - Map variableParameterizedTypes) throws QueryLanguageParseException { - this(expression, packageImports, classImports, staticImports, variables, - variableParameterizedTypes, true); + Collection packageImports, + Collection classImports, + Collection staticImports, + Map variables, + Map variableParameterizedTypes) throws QueryLanguageParseException { + this(expression, packageImports, classImports, staticImports, variables, variableParameterizedTypes, true); } /** * Create a DBLanguageParser and parse the given {@code expression}. After construction, the - * {@link DBLanguageParser.Result result} of parsing the {@code expression} is available with - * the {@link #getResult()}} method. + * {@link DBLanguageParser.Result result} of parsing the {@code expression} is available with the + * {@link #getResult()}} method. * * @param expression The query language expression to parse * @param packageImports Wildcard package imports * @param classImports Individual class imports - * @param staticImports Wildcard static imports. All static variables and methods for the given - * classes are imported. + * @param staticImports Wildcard static imports. All static variables and methods for the given classes are + * imported. * @param variables A map of the names of scope variables to their types - * @param variableParameterizedTypes A map of the names of scope variables to their paramterized - * types + * @param variableParameterizedTypes A map of the names of scope variables to their paramterized types * @param unboxArguments If true it will unbox the query scope arguments * @throws QueryLanguageParseException If any exception or error is encountered */ public DBLanguageParser(String expression, - Collection packageImports, - Collection classImports, - Collection staticImports, - Map variables, - Map variableParameterizedTypes, boolean unboxArguments) - throws QueryLanguageParseException { + Collection packageImports, + Collection classImports, + Collection staticImports, + Map variables, + Map variableParameterizedTypes, boolean unboxArguments) + throws QueryLanguageParseException { this.packageImports = packageImports == null ? Collections.emptySet() - : Require.notContainsNull(packageImports, "packageImports"); - this.classImports = classImports == null ? Collections.emptySet() - : Require.notContainsNull(classImports, "classImports"); + : Require.notContainsNull(packageImports, "packageImports"); + this.classImports = + classImports == null ? Collections.emptySet() : Require.notContainsNull(classImports, "classImports"); this.staticImports = staticImports == null ? Collections.emptySet() - : Require.notContainsNull(staticImports, "staticImports"); + : Require.notContainsNull(staticImports, "staticImports"); this.variables = variables == null ? Collections.emptyMap() : variables; this.variableParameterizedTypes = - variableParameterizedTypes == null ? Collections.emptyMap() - : variableParameterizedTypes; + variableParameterizedTypes == null ? Collections.emptyMap() : variableParameterizedTypes; this.unboxArguments = unboxArguments; // Convert backticks *before* converting single equals! - // Backticks must be converted first in order to properly identify single-equals signs - // within + // Backticks must be converted first in order to properly identify single-equals signs within // String and char literals, which should *not* be converted. expression = convertBackticks(expression); expression = convertSingleEquals(expression); @@ -143,35 +135,34 @@ public DBLanguageParser(String expression, } result = new Result(type, printer.builder.toString(), variablesUsed); - } catch (Throwable e) { // need to catch it and make a new one because it contains - // unserializable variables... + } catch (Throwable e) { // need to catch it and make a new one because it contains unserializable variables... final StringBuilder exceptionMessageBuilder = new StringBuilder(1024) - .append("\n\nHaving trouble with the following expression:\n") - .append("Full expression : ") - .append(expression) - .append('\n') - .append("Expression having trouble : ") - .append(printer) - .append('\n'); + .append("\n\nHaving trouble with the following expression:\n") + .append("Full expression : ") + .append(expression) + .append('\n') + .append("Expression having trouble : ") + .append(printer) + .append('\n'); final boolean VERBOSE_EXCEPTION_MESSAGES = Configuration - .getInstance() - .getBooleanWithDefault("DBLanguageParser.verboseExceptionMessages", false); + .getInstance() + .getBooleanWithDefault("DBLanguageParser.verboseExceptionMessages", false); if (VERBOSE_EXCEPTION_MESSAGES) { // include stack trace exceptionMessageBuilder - .append("Exception full stack trace: ") - .append(ExceptionUtils.getStackTrace(e)) - .append('\n'); + .append("Exception full stack trace: ") + .append(ExceptionUtils.getStackTrace(e)) + .append('\n'); } else { exceptionMessageBuilder - .append("Exception message : ") - .append(e.getMessage()) - .append('\n'); + .append("Exception message : ") + .append(e.getMessage()) + .append('\n'); } QueryLanguageParseException newException = - new QueryLanguageParseException(exceptionMessageBuilder.toString()); + new QueryLanguageParseException(exceptionMessageBuilder.toString()); newException.setStackTrace(e.getStackTrace()); throw newException; @@ -185,24 +176,22 @@ private QueryLanguageParseException(String message) { } /** - * Retrieves the result of the parser, which includes the translated expression, its return - * type, and the variables it uses. + * Retrieves the result of the parser, which includes the translated expression, its return type, and the variables + * it uses. */ public Result getResult() { return result; } /** - * Convert single equals signs (the assignment operator) to double-equals signs (equality - * operator). The parser will then replace the equality operator with an appropriate - * equality-checking methods. Assignments are not supported. + * Convert single equals signs (the assignment operator) to double-equals signs (equality operator). The parser will + * then replace the equality operator with an appropriate equality-checking methods. Assignments are not supported. * - * This method does not have any special handling for backticks; accordingly this method should - * be run after {@link #convertBackticks(String)}. + * This method does not have any special handling for backticks; accordingly this method should be run after + * {@link #convertBackticks(String)}. * * @param expression The expression to convert - * @return The expression, with unescaped single-equals signs converted to the equality operator - * (double-equals) + * @return The expression, with unescaped single-equals signs converted to the equality operator (double-equals) */ public static String convertSingleEquals(String expression) { final int len = expression.length(); @@ -231,9 +220,8 @@ public static String convertSingleEquals(String expression) { ret.append(c); - if (c == '=' && cBefore != '=' && cBefore != '<' && cBefore != '>' && cBefore != '!' - && cAfter != '=' - && !isInChar && !isInStr) { + if (c == '=' && cBefore != '=' && cBefore != '<' && cBefore != '>' && cBefore != '!' && cAfter != '=' + && !isInChar && !isInStr) { ret.append('='); } } @@ -241,12 +229,11 @@ public static String convertSingleEquals(String expression) { } /** - * Convert backticks into double-quote characters, unless the backticks are already enclosed in - * double-quotes. + * Convert backticks into double-quote characters, unless the backticks are already enclosed in double-quotes. * - * Also, within backticks, double-quotes are automatically re-escaped. For example, in the - * following string "`This expression uses \"double quotes\"!`" The string will be converted to: - * "\"This expression uses \\\"double quotes\\\"!\"" + * Also, within backticks, double-quotes are automatically re-escaped. For example, in the following string "`This + * expression uses \"double quotes\"!`" The string will be converted to: "\"This expression uses \\\"double + * quotes\\\"!\"" * * @param expression The expression to convert * @return The expression, with backticks and double-quotes appropriately converted and escaped @@ -333,8 +320,8 @@ static Class binaryNumericPromotionType(Class type1, Class type2) { } /** - * Search for a class with the given {@code name}. This can be a fully-qualified name, or the - * simple name of an imported class. + * Search for a class with the given {@code name}. This can be a fully-qualified name, or the simple name of an + * imported class. * * @param name The name of the class to search for * @return The class, if it exists; otherwise, {@code null}. @@ -370,32 +357,30 @@ private Class findClass(String name) { */ private Class findNestedClass(Class enclosingClass, String nestedClassName) { Map> m = Stream - .>of(enclosingClass.getDeclaredClasses()) - .filter((cls) -> nestedClassName.equals(cls.getSimpleName())) - .collect(Collectors.toMap(Class::getSimpleName, Function.identity())); + .>of(enclosingClass.getDeclaredClasses()) + .filter((cls) -> nestedClassName.equals(cls.getSimpleName())) + .collect(Collectors.toMap(Class::getSimpleName, Function.identity())); return m.get(nestedClassName); } @SuppressWarnings({"ConstantConditions"}) private Method getMethod(final Class scope, final String methodName, final Class paramTypes[], - final Class parameterizedTypes[][]) { + final Class parameterizedTypes[][]) { final ArrayList acceptableMethods = new ArrayList<>(); if (scope == null) { for (final Class classImport : staticImports) { for (Method method : classImport.getDeclaredMethods()) { - possiblyAddExecutable(acceptableMethods, method, methodName, paramTypes, - parameterizedTypes); + possiblyAddExecutable(acceptableMethods, method, methodName, paramTypes, parameterizedTypes); } } - // for Python function/Groovy closure call syntax without the explicit 'call' keyword, - // check if it is defined in Query scope + // for Python function/Groovy closure call syntax without the explicit 'call' keyword, check if it is + // defined in Query scope if (acceptableMethods.size() == 0) { final Class methodClass = variables.get(methodName); if (methodClass != null && isPotentialImplicitCall(methodClass)) { for (Method method : methodClass.getMethods()) { - possiblyAddExecutable(acceptableMethods, method, "call", paramTypes, - parameterizedTypes); + possiblyAddExecutable(acceptableMethods, method, "call", paramTypes, parameterizedTypes); } } if (acceptableMethods.size() > 0) { @@ -404,30 +389,26 @@ private Method getMethod(final Class scope, final String methodName, final Class } } else { if (scope == org.jpy.PyObject.class) { - // This is a Python method call, assume it exists and wrap in - // PythonScopeJpyImpl.CallableWrapper + // This is a Python method call, assume it exists and wrap in PythonScopeJpyImpl.CallableWrapper for (Method method : CallableWrapper.class.getDeclaredMethods()) { - possiblyAddExecutable(acceptableMethods, method, "call", paramTypes, - parameterizedTypes); + possiblyAddExecutable(acceptableMethods, method, "call", paramTypes, parameterizedTypes); } } else { for (final Method method : scope.getMethods()) { - possiblyAddExecutable(acceptableMethods, method, methodName, paramTypes, - parameterizedTypes); + possiblyAddExecutable(acceptableMethods, method, methodName, paramTypes, parameterizedTypes); } // If 'scope' is an interface, we must explicitly consider the methods in Object if (scope.isInterface()) { for (final Method method : Object.class.getMethods()) { - possiblyAddExecutable(acceptableMethods, method, methodName, paramTypes, - parameterizedTypes); + possiblyAddExecutable(acceptableMethods, method, methodName, paramTypes, parameterizedTypes); } } } } if (acceptableMethods.size() == 0) { - throw new RuntimeException("Cannot find method " + methodName + '(' - + paramsTypesToString(paramTypes) + ')' + (scope != null ? " in " + scope : "")); + throw new RuntimeException("Cannot find method " + methodName + '(' + paramsTypesToString(paramTypes) + ')' + + (scope != null ? " in " + scope : "")); } Method bestMethod = null; @@ -441,17 +422,16 @@ private Method getMethod(final Class scope, final String methodName, final Class } private static boolean isPotentialImplicitCall(Class methodClass) { - return CallableWrapper.class.isAssignableFrom(methodClass) - || methodClass == groovy.lang.Closure.class; + return CallableWrapper.class.isAssignableFrom(methodClass) || methodClass == groovy.lang.Closure.class; } private Class getMethodReturnType(Class scope, String methodName, Class paramTypes[], - Class parameterizedTypes[][]) { + Class parameterizedTypes[][]) { return getMethod(scope, methodName, paramTypes, parameterizedTypes).getReturnType(); } private Class calculateMethodReturnTypeUsingGenerics(Method method, Class paramTypes[], - Class parameterizedTypes[][]) { + Class parameterizedTypes[][]) { Type genericReturnType = method.getGenericReturnType(); int arrayDimensions = 0; @@ -489,10 +469,8 @@ private Class calculateMethodReturnTypeUsingGenerics(Method method, Class paramT return paramType; } - if ((genericParamType instanceof ParameterizedType) - && (parameterizedTypes[i] != null)) { - Type methodParameterizedTypes[] = - ((ParameterizedType) genericParamType).getActualTypeArguments(); + if ((genericParamType instanceof ParameterizedType) && (parameterizedTypes[i] != null)) { + Type methodParameterizedTypes[] = ((ParameterizedType) genericParamType).getActualTypeArguments(); for (int j = 0; j < methodParameterizedTypes.length; j++) { if (genericReturnType.equals(methodParameterizedTypes[j])) { @@ -507,24 +485,22 @@ private Class calculateMethodReturnTypeUsingGenerics(Method method, Class paramT @SuppressWarnings({"ConstantConditions"}) private Constructor getConstructor(final Class scope, final Class paramTypes[], - final Class parameterizedTypes[][]) { + final Class parameterizedTypes[][]) { final ArrayList acceptableConstructors = new ArrayList<>(); for (final Constructor constructor : scope.getConstructors()) { - possiblyAddExecutable(acceptableConstructors, constructor, scope.getName(), paramTypes, - parameterizedTypes); + possiblyAddExecutable(acceptableConstructors, constructor, scope.getName(), paramTypes, parameterizedTypes); } if (acceptableConstructors.size() == 0) { throw new RuntimeException("Cannot find constructor for " + scope.getName() + '(' - + paramsTypesToString(paramTypes) + ')' + (scope != null ? " in " + scope : "")); + + paramsTypesToString(paramTypes) + ')' + (scope != null ? " in " + scope : "")); } Constructor bestConstructor = null; for (final Constructor constructor : acceptableConstructors) { - if (bestConstructor == null - || isMoreSpecificConstructor(bestConstructor, constructor)) { + if (bestConstructor == null || isMoreSpecificConstructor(bestConstructor, constructor)) { bestConstructor = constructor; } } @@ -546,27 +522,26 @@ private String paramsTypesToString(Class paramTypes[]) { return buf.toString(); } - private static void possiblyAddExecutable( - final List accepted, - final EXECUTABLE_TYPE candidate, - final String name, final Class paramTypes[], final Class parameterizedTypes[][]) { + private static void possiblyAddExecutable(final List accepted, + final EXECUTABLE_TYPE candidate, + final String name, final Class paramTypes[], final Class parameterizedTypes[][]) { if (candidate.getName().equals(name)) { final Class candidateParamTypes[] = candidate.getParameterTypes(); if (candidate.isVarArgs() ? candidateParamTypes.length > paramTypes.length + 1 - : candidateParamTypes.length != paramTypes.length) { + : candidateParamTypes.length != paramTypes.length) { return; } boolean acceptable = true; - for (int i = 0; i < (candidate.isVarArgs() ? candidateParamTypes.length - 1 - : candidateParamTypes.length); i++) { + for (int i = + 0; i < (candidate.isVarArgs() ? candidateParamTypes.length - 1 : candidateParamTypes.length); i++) { Class paramType = paramTypes[i]; if (isDbArray(paramType) && candidateParamTypes[i].isArray()) { - paramType = convertDBArray(paramType, - parameterizedTypes[i] == null ? null : parameterizedTypes[i][0]); + paramType = + convertDBArray(paramType, parameterizedTypes[i] == null ? null : parameterizedTypes[i][0]); } if (!isAssignableFrom(candidateParamTypes[i], paramType)) { @@ -575,33 +550,29 @@ private static void possiblyAddExecutable( } } - // If the paramTypes includes 1+ varArgs check the classes match -- no need to check if - // there are 0 varArgs + // If the paramTypes includes 1+ varArgs check the classes match -- no need to check if there are 0 varArgs if (candidate.isVarArgs() && paramTypes.length >= candidateParamTypes.length) { Class paramType = paramTypes[candidateParamTypes.length - 1]; - if (isDbArray(paramType) - && candidateParamTypes[candidateParamTypes.length - 1].isArray()) { - paramType = convertDBArray(paramType, - parameterizedTypes[candidateParamTypes.length - 1] == null ? null - : parameterizedTypes[candidateParamTypes.length - 1][0]); + if (isDbArray(paramType) && candidateParamTypes[candidateParamTypes.length - 1].isArray()) { + paramType = + convertDBArray(paramType, parameterizedTypes[candidateParamTypes.length - 1] == null ? null + : parameterizedTypes[candidateParamTypes.length - 1][0]); } if (candidateParamTypes.length == paramTypes.length && paramType.isArray()) { - if (!isAssignableFrom(candidateParamTypes[candidateParamTypes.length - 1], - paramType)) { + if (!isAssignableFrom(candidateParamTypes[candidateParamTypes.length - 1], paramType)) { acceptable = false; } } else { - final Class lastClass = - candidateParamTypes[candidateParamTypes.length - 1].getComponentType(); + final Class lastClass = candidateParamTypes[candidateParamTypes.length - 1].getComponentType(); for (int i = candidateParamTypes.length - 1; i < paramTypes.length; i++) { paramType = paramTypes[i]; if (isDbArray(paramType) && lastClass.isArray()) { paramType = convertDBArray(paramType, - parameterizedTypes[i] == null ? null : parameterizedTypes[i][0]); + parameterizedTypes[i] == null ? null : parameterizedTypes[i][0]); } if (!isAssignableFrom(lastClass, paramType)) { @@ -621,8 +592,7 @@ private static void possiblyAddExecutable( private static boolean isMoreSpecificConstructor(final Constructor c1, final Constructor c2) { final Boolean executableResult = isMoreSpecificExecutable(c1, c2); if (executableResult == null) { - throw new IllegalStateException( - "Ambiguous comparison between constructors " + c1 + " and " + c2); + throw new IllegalStateException("Ambiguous comparison between constructors " + c1 + " and " + c2); } return executableResult; } @@ -630,15 +600,13 @@ private static boolean isMoreSpecificConstructor(final Constructor c1, final Con private static boolean isMoreSpecificMethod(final Method m1, final Method m2) { final Boolean executableResult = isMoreSpecificExecutable(m1, m2); // NB: executableResult can be null in cases where an override narrows its return type - return executableResult == null ? isAssignableFrom(m1.getReturnType(), m2.getReturnType()) - : executableResult; + return executableResult == null ? isAssignableFrom(m1.getReturnType(), m2.getReturnType()) : executableResult; } - private static Boolean isMoreSpecificExecutable( - final EXECUTABLE_TYPE e1, final EXECUTABLE_TYPE e2) { + private static Boolean isMoreSpecificExecutable(final EXECUTABLE_TYPE e1, + final EXECUTABLE_TYPE e2) { - // var args (variable arity) methods always go after fixed arity methods when determining - // the proper overload + // var args (variable arity) methods always go after fixed arity methods when determining the proper overload // https://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.12.2 if (e1.isVarArgs() && !e2.isVarArgs()) { return true; @@ -651,21 +619,17 @@ private static Boolean isMoreSpecificExecut final Class[] e2ParamTypes = e2.getParameterTypes(); if (e1.isVarArgs() && e2.isVarArgs()) { - e1ParamTypes[e1ParamTypes.length - 1] = - e1ParamTypes[e1ParamTypes.length - 1].getComponentType(); - e2ParamTypes[e2ParamTypes.length - 1] = - e2ParamTypes[e2ParamTypes.length - 1].getComponentType(); + e1ParamTypes[e1ParamTypes.length - 1] = e1ParamTypes[e1ParamTypes.length - 1].getComponentType(); + e2ParamTypes[e2ParamTypes.length - 1] = e2ParamTypes[e2ParamTypes.length - 1].getComponentType(); } for (int i = 0; i < e1ParamTypes.length; i++) { - if (!isAssignableFrom(e1ParamTypes[i], e2ParamTypes[i]) - && !isDbArray(e2ParamTypes[i])) { + if (!isAssignableFrom(e1ParamTypes[i], e2ParamTypes[i]) && !isDbArray(e2ParamTypes[i])) { return false; } } - if (!Arrays.equals(e1ParamTypes, e2ParamTypes)) { // this means that e2 params are more - // specific + if (!Arrays.equals(e1ParamTypes, e2ParamTypes)) { // this means that e2 params are more specific return true; } @@ -677,8 +641,7 @@ private static boolean isAssignableFrom(Class classA, Class classB) { return true; } - if ((classA.isPrimitive() && classA != boolean.class) && classB.isPrimitive() - && classB != boolean.class) { + if ((classA.isPrimitive() && classA != boolean.class) && classB.isPrimitive() && classB != boolean.class) { return classA == binaryNumericPromotionType(classA, classB); } else if (!classA.isPrimitive() && classB == NULL_CLASS) { return true; @@ -696,8 +659,7 @@ private Class[][] getParameterizedTypes(Expression... expressions) { for (int i = 0; i < expressions.length; i++) { if ((expressions[i] instanceof NameExpr)) { - parameterizedTypes[i] = - variableParameterizedTypes.get(((NameExpr) expressions[i]).getName()); + parameterizedTypes[i] = variableParameterizedTypes.get(((NameExpr) expressions[i]).getName()); } } @@ -706,9 +668,7 @@ private Class[][] getParameterizedTypes(Expression... expressions) { private static Class convertDBArray(Class type, Class parameterizedType) { if (DbArray.class.isAssignableFrom(type)) { - return Array - .newInstance(parameterizedType == null ? Object.class : parameterizedType, 0) - .getClass(); + return Array.newInstance(parameterizedType == null ? Object.class : parameterizedType, 0).getClass(); } if (DbIntArray.class.isAssignableFrom(type)) { return int[].class; @@ -797,25 +757,25 @@ static boolean isNonFPNumber(Class type) { } return type == int.class || type == long.class || type == byte.class || type == short.class - || type == char.class; + || type == char.class; } public static boolean isDbArray(Class type) { return DbArray.class.isAssignableFrom(type) || - DbIntArray.class.isAssignableFrom(type) || - DbBooleanArray.class.isAssignableFrom(type) || - DbDoubleArray.class.isAssignableFrom(type) || - DbCharArray.class.isAssignableFrom(type) || - DbByteArray.class.isAssignableFrom(type) || - DbShortArray.class.isAssignableFrom(type) || - DbLongArray.class.isAssignableFrom(type) || - DbFloatArray.class.isAssignableFrom(type); + DbIntArray.class.isAssignableFrom(type) || + DbBooleanArray.class.isAssignableFrom(type) || + DbDoubleArray.class.isAssignableFrom(type) || + DbCharArray.class.isAssignableFrom(type) || + DbByteArray.class.isAssignableFrom(type) || + DbShortArray.class.isAssignableFrom(type) || + DbLongArray.class.isAssignableFrom(type) || + DbFloatArray.class.isAssignableFrom(type); } /** - * Converts the provided argument {@code expressions} for the given {@code executable} so that - * the expressions whose types (expressionTypes) do not match the corresponding declared - * argument types ({@code argumentTypes}) may still be used as arguments. + * Converts the provided argument {@code expressions} for the given {@code executable} so that the expressions whose + * types (expressionTypes) do not match the corresponding declared argument types ({@code argumentTypes}) may still + * be used as arguments. * * Conversions include casts & unwrapping of DB arrays to Java arrays. * @@ -824,29 +784,27 @@ public static boolean isDbArray(Class type) { * @param expressionTypes The types of the {@code expressions} to be passed as arguments * @param parameterizedTypes The actual type arguments corresponding to the expressions * @param expressions The actual expressions - * @return An array of new expressions that maintain the 'meaning' of the input - * {@code expressions} but are appropriate to pass to {@code executable} + * @return An array of new expressions that maintain the 'meaning' of the input {@code expressions} but are + * appropriate to pass to {@code executable} */ private Expression[] convertParameters(final Executable executable, final Class argumentTypes[], - final Class expressionTypes[], final Class parameterizedTypes[][], - Expression expressions[]) { + final Class expressionTypes[], final Class parameterizedTypes[][], Expression expressions[]) { final int nArgs = argumentTypes.length; // Number of declared arguments for (int ai = 0; ai < (executable.isVarArgs() ? nArgs - 1 : nArgs); ai++) { if (argumentTypes[ai] != expressionTypes[ai] && argumentTypes[ai].isPrimitive() - && expressionTypes[ai].isPrimitive()) { + && expressionTypes[ai].isPrimitive()) { expressions[ai] = new CastExpr( - new PrimitiveType(PrimitiveType.Primitive.valueOf( - StringUtils.makeFirstLetterCapital(argumentTypes[ai].getSimpleName()))), - expressions[ai]); - } else if (unboxArguments && argumentTypes[ai].isPrimitive() - && !expressionTypes[ai].isPrimitive()) { - expressions[ai] = new MethodCallExpr(expressions[ai], - argumentTypes[ai].getSimpleName() + "Value", null); + new PrimitiveType(PrimitiveType.Primitive + .valueOf(StringUtils.makeFirstLetterCapital(argumentTypes[ai].getSimpleName()))), + expressions[ai]); + } else if (unboxArguments && argumentTypes[ai].isPrimitive() && !expressionTypes[ai].isPrimitive()) { + expressions[ai] = + new MethodCallExpr(expressions[ai], argumentTypes[ai].getSimpleName() + "Value", null); } else if (argumentTypes[ai].isArray() && isDbArray(expressionTypes[ai])) { - expressions[ai] = new MethodCallExpr(new NameExpr("ArrayUtils"), - "nullSafeDbArrayToArray", Collections.singletonList(expressions[ai])); + expressions[ai] = new MethodCallExpr(new NameExpr("ArrayUtils"), "nullSafeDbArrayToArray", + Collections.singletonList(expressions[ai])); expressionTypes[ai] = convertDBArray(expressionTypes[ai], - parameterizedTypes[ai] == null ? null : parameterizedTypes[ai][0]); + parameterizedTypes[ai] == null ? null : parameterizedTypes[ai][0]); } } @@ -860,47 +818,39 @@ private Expression[] convertParameters(final Executable executable, final Class // If there's only one arg expression provided, and it's a DbArray, and the varArgType // *isn't* DbArray, then convert the DbArray to a Java array if (nArgExpressions == nArgs - && varArgType != expressionTypes[lastArgIndex] - && isDbArray(expressionTypes[lastArgIndex])) { - expressions[lastArgIndex] = new MethodCallExpr(new NameExpr("ArrayUtils"), - "nullSafeDbArrayToArray", Collections.singletonList(expressions[lastArgIndex])); + && varArgType != expressionTypes[lastArgIndex] + && isDbArray(expressionTypes[lastArgIndex])) { + expressions[lastArgIndex] = new MethodCallExpr(new NameExpr("ArrayUtils"), "nullSafeDbArrayToArray", + Collections.singletonList(expressions[lastArgIndex])); expressionTypes[lastArgIndex] = convertDBArray(expressionTypes[lastArgIndex], - parameterizedTypes[lastArgIndex] == null ? null - : parameterizedTypes[lastArgIndex][0]); + parameterizedTypes[lastArgIndex] == null ? null : parameterizedTypes[lastArgIndex][0]); anyExpressionTypesArePrimitive = false; } else { - for (int ei = nArgs - 1; ei < nArgExpressions; ei++) { // iterate over the vararg - // argument expresions + for (int ei = nArgs - 1; ei < nArgExpressions; ei++) { // iterate over the vararg argument expresions if (varArgType != expressionTypes[ei] && varArgType.isPrimitive() - && expressionTypes[ei].isPrimitive()) { // cast primitives to the - // appropriate type + && expressionTypes[ei].isPrimitive()) { // cast primitives to the appropriate type expressions[ei] = new CastExpr( - new PrimitiveType(PrimitiveType.Primitive.valueOf( - StringUtils.makeFirstLetterCapital(varArgType.getSimpleName()))), - expressions[ei]); + new PrimitiveType(PrimitiveType.Primitive + .valueOf(StringUtils.makeFirstLetterCapital(varArgType.getSimpleName()))), + expressions[ei]); } anyExpressionTypesArePrimitive &= expressionTypes[ei].isPrimitive(); } } - if (varArgType.isPrimitive() && anyExpressionTypesArePrimitive) { // we have some - // problems with - // ambiguous oddities - // and varargs, so if - // its primitive lets - // just box it - // ourselves + if (varArgType.isPrimitive() && anyExpressionTypesArePrimitive) { // we have some problems with ambiguous + // oddities and varargs, so if its + // primitive lets just box it ourselves Expression temp[] = new Expression[nArgs]; Expression varArgExpressions[] = new Expression[nArgExpressions - nArgs + 1]; System.arraycopy(expressions, 0, temp, 0, temp.length - 1); - System.arraycopy(expressions, nArgs - 1, varArgExpressions, 0, - varArgExpressions.length); + System.arraycopy(expressions, nArgs - 1, varArgExpressions, 0, varArgExpressions.length); temp[temp.length - 1] = new ArrayCreationExpr( - new PrimitiveType(PrimitiveType.Primitive - .valueOf(StringUtils.makeFirstLetterCapital(varArgType.getSimpleName()))), - 1, new ArrayInitializerExpr(Arrays.asList(varArgExpressions))); + new PrimitiveType(PrimitiveType.Primitive + .valueOf(StringUtils.makeFirstLetterCapital(varArgType.getSimpleName()))), + 1, new ArrayInitializerExpr(Arrays.asList(varArgExpressions))); expressions = temp; } @@ -913,19 +863,16 @@ && isDbArray(expressionTypes[lastArgIndex])) { public Class visit(NameExpr n, VisitArgs printer) { /* - * JLS on how to resolve names: - * https://docs.oracle.com/javase/specs/jls/se8/html/jls-6.html#jls-6.5 + * JLS on how to resolve names: https://docs.oracle.com/javase/specs/jls/se8/html/jls-6.html#jls-6.5 * - * Our parser doesn't work exactly this way (some cases are not relevant, and the work is - * split between this class and the parser library), but the behavior should be consistent - * with the spec. + * Our parser doesn't work exactly this way (some cases are not relevant, and the work is split between this + * class and the parser library), but the behavior should be consistent with the spec. * - * What matters here: 1) If it's a simple name (i.e. not a qualified name; doesn't contain a - * '.'), then 1. Check whether it's in the scope 2. If it's not in the scope, see if it's a - * static import 3. If it's not a static import, then it's not a situation the - * DBLanguageParser has to worry about. 2) Qualified names -- we just throw them to - * 'findClass()'. Many details are not relevant here. For example, field access is handled - * by a different method: visit(FieldAccessExpr, StringBuilder). + * What matters here: 1) If it's a simple name (i.e. not a qualified name; doesn't contain a '.'), then 1. Check + * whether it's in the scope 2. If it's not in the scope, see if it's a static import 3. If it's not a static + * import, then it's not a situation the DBLanguageParser has to worry about. 2) Qualified names -- we just + * throw them to 'findClass()'. Many details are not relevant here. For example, field access is handled by a + * different method: visit(FieldAccessExpr, StringBuilder). */ printer.append(n.getName()); @@ -989,13 +936,13 @@ public Class visit(PrimitiveType n, VisitArgs printer) { public Class visit(ArrayAccessExpr n, VisitArgs printer) { /* - * ArrayAccessExprs are permitted even when the 'array' is not really an array. The main use - * of this is for DbArrays, such as: + * ArrayAccessExprs are permitted even when the 'array' is not really an array. The main use of this is for + * DbArrays, such as: * * t.view("Date", "Price").updateView("Return=Price/Price_[i-1]"). * - * The "Price_[i-1]" is translated to "Price.get(i-1)". But we do this generically, not just - * for DbArrays. As an example, this works (column Blah will be set to "hello"): + * The "Price_[i-1]" is translated to "Price.get(i-1)". But we do this generically, not just for DbArrays. As an + * example, this works (column Blah will be set to "hello"): * * map = new HashMap(); map.put("a", "hello") t = emptyTable(1).update("Blah=map[`a`]") * @@ -1023,8 +970,7 @@ public Class visit(ArrayAccessExpr n, VisitArgs printer) { } } - return getMethodReturnType(type, "get", new Class[] {paramType}, - getParameterizedTypes(n.getIndex())); + return getMethodReturnType(type, "get", new Class[] {paramType}, getParameterizedTypes(n.getIndex())); } } @@ -1069,14 +1015,12 @@ public Class visit(BinaryExpr n, VisitArgs printer) { op = BinaryExpr.Operator.equals; } - boolean isArray = - lhType.isArray() || rhType.isArray() || isDbArray(lhType) || isDbArray(rhType); + boolean isArray = lhType.isArray() || rhType.isArray() || isDbArray(lhType) || isDbArray(rhType); String methodName = getOperatorName(op) + (isArray ? "Array" : ""); if (printer.hasStringBuilder()) { - new MethodCallExpr(null, methodName, Arrays.asList(n.getLeft(), n.getRight())) - .accept(this, printer); + new MethodCallExpr(null, methodName, Arrays.asList(n.getLeft(), n.getRight())).accept(this, printer); } // printer.append(methodName + '('); @@ -1086,7 +1030,7 @@ public Class visit(BinaryExpr n, VisitArgs printer) { // printer.append(')'); return getMethodReturnType(null, methodName, new Class[] {lhType, rhType}, - getParameterizedTypes(n.getLeft(), n.getRight())); + getParameterizedTypes(n.getLeft(), n.getRight())); } public Class visit(UnaryExpr n, VisitArgs printer) { @@ -1097,21 +1041,18 @@ public Class visit(UnaryExpr n, VisitArgs printer) { } else if (n.getOperator() == UnaryExpr.Operator.negative) { opName = "negate"; } else { - throw new RuntimeException( - "Unary operation (" + n.getOperator().name() + ") not supported"); + throw new RuntimeException("Unary operation (" + n.getOperator().name() + ") not supported"); } printer.append(opName).append('('); Class type = n.getExpr().accept(this, printer); printer.append(')'); - return getMethodReturnType(null, opName, new Class[] {type}, - getParameterizedTypes(n.getExpr())); + return getMethodReturnType(null, opName, new Class[] {type}, getParameterizedTypes(n.getExpr())); } public Class visit(CastExpr n, VisitArgs printer) { - final Class ret = n.getType().accept(this, VisitArgs.WITHOUT_STRING_BUILDER); // the target - // type + final Class ret = n.getType().accept(this, VisitArgs.WITHOUT_STRING_BUILDER); // the target type final Expression expr = n.getExpr(); final VisitArgs innerArgs = VisitArgs.create().cloneWithCastingContext(ret); @@ -1120,85 +1061,77 @@ public Class visit(CastExpr n, VisitArgs printer) { final boolean fromPrimitive = exprType.isPrimitive(); final boolean fromBoxedType = io.deephaven.util.type.TypeUtils.isBoxedType(exprType); - final Class unboxedExprType = - !fromBoxedType ? null : io.deephaven.util.type.TypeUtils.getUnboxedType(exprType); + final Class unboxedExprType = !fromBoxedType ? null : io.deephaven.util.type.TypeUtils.getUnboxedType(exprType); final boolean toPrimitive = ret.isPrimitive(); final boolean isWidening; /* - * Here are the rules for casting: - * https://docs.oracle.com/javase/specs/jls/se8/html/jls-5.html#jls-5.5 + * Here are the rules for casting: https://docs.oracle.com/javase/specs/jls/se8/html/jls-5.html#jls-5.5 * * First, we should ensure the cast does not violate the Java Language Specification. */ if (toPrimitive) { // Casting to a primitive /* - * booleans can only be cast to booleans, and only booleans can be cast to booleans. See - * table 5.5-A at the link above. + * booleans can only be cast to booleans, and only booleans can be cast to booleans. See table 5.5-A at the + * link above. * - * The JLS also places restrictions on conversions from boxed types to primitives - * (again, see table 5.5-A). + * The JLS also places restrictions on conversions from boxed types to primitives (again, see table 5.5-A). */ if (fromPrimitive && (ret.equals(boolean.class) ^ exprType.equals(boolean.class))) { throw new RuntimeException("Incompatible types; " + exprType.getName() + - " cannot be converted to " + ret.getName()); + " cannot be converted to " + ret.getName()); } // Now check whether we're converting from a boxed type else if (fromBoxedType) { isWidening = isWideningPrimitiveConversion(unboxedExprType, ret); - if (!ret.equals(unboxedExprType) && // Unboxing and Identity conversions are always - // OK + if (!ret.equals(unboxedExprType) && // Unboxing and Identity conversions are always OK /* - * Boolean is the only boxed type that can be cast to boolean, and boolean is the - * only primitive type to which Boolean can be cast: + * Boolean is the only boxed type that can be cast to boolean, and boolean is the only primitive type to + * which Boolean can be cast: */ - (boolean.class.equals(ret) ^ Boolean.class.equals(exprType) - // Only Character can be cast to char: - || char.class.equals(ret) && !Character.class.equals(exprType) - // Other than that, only widening conversions are allowed: - || !isWidening)) { + (boolean.class.equals(ret) ^ Boolean.class.equals(exprType) + // Only Character can be cast to char: + || char.class.equals(ret) && !Character.class.equals(exprType) + // Other than that, only widening conversions are allowed: + || !isWidening)) { throw new RuntimeException("Incompatible types; " + exprType.getName() + - " cannot be converted to " + ret.getName()); + " cannot be converted to " + ret.getName()); } } else { isWidening = false; } } /* - * When casting primitives to boxed types, only boxing conversions are allowed When casting - * boxed types to boxed types, only the identity conversion is allowed + * When casting primitives to boxed types, only boxing conversions are allowed When casting boxed types to boxed + * types, only the identity conversion is allowed */ else { - if (io.deephaven.util.type.TypeUtils.isBoxedType(ret) - && (fromPrimitive || fromBoxedType) - && !(ret.equals(io.deephaven.util.type.TypeUtils.getBoxedType(exprType)))) { + if (io.deephaven.util.type.TypeUtils.isBoxedType(ret) && (fromPrimitive || fromBoxedType) + && !(ret.equals(io.deephaven.util.type.TypeUtils.getBoxedType(exprType)))) { throw new RuntimeException("Incompatible types; " + exprType.getName() + - " cannot be converted to " + ret.getName()); + " cannot be converted to " + ret.getName()); } isWidening = false; } /* - * Now actually print the cast. For casts to primitives (except boolean), we use special - * null-safe functions (e.g. intCast()) to perform the cast. + * Now actually print the cast. For casts to primitives (except boolean), we use special null-safe functions + * (e.g. intCast()) to perform the cast. * * There is no "booleanCast()" function. * * There are also no special functions for the identity conversion -- e.g. "intCast(int)" */ - if (toPrimitive && !ret.equals(boolean.class) && !ret.equals(exprType)) { // Casting to a - // primitive, - // except booleans - // and the - // identity + if (toPrimitive && !ret.equals(boolean.class) && !ret.equals(exprType)) { // Casting to a primitive, except + // booleans and the identity // conversion printer.append(ret.getSimpleName()); printer.append("Cast("); /* - * When unboxing to a wider type, do an unboxing conversion followed by a widening - * conversion. See table 5.5-A in the JLS, at the link above. + * When unboxing to a wider type, do an unboxing conversion followed by a widening conversion. See table + * 5.5-A in the JLS, at the link above. */ if (isWidening) { Assert.neqNull(unboxedExprType, "unboxedExprType"); @@ -1217,8 +1150,7 @@ else if (fromBoxedType) { /* Print the cast normally - "(targetType) (expression)" */ printer.append('('); - if (ret.getPackage() != null - && simpleNameWhiteList.contains(ret.getPackage().getName())) { + if (ret.getPackage() != null && simpleNameWhiteList.contains(ret.getPackage().getName())) { printer.append(ret.getSimpleName()); } else { printer.append(ret.getCanonicalName()); @@ -1226,8 +1158,8 @@ else if (fromBoxedType) { printer.append(')'); /* - * If the expression is anything more complex than a simple name or literal, then - * enclose it in parentheses to ensure the order of operations is not altered. + * If the expression is anything more complex than a simple name or literal, then enclose it in parentheses + * to ensure the order of operations is not altered. */ boolean isNameOrLiteral = (expr instanceof NameExpr) || (expr instanceof LiteralExpr); @@ -1246,29 +1178,26 @@ else if (fromBoxedType) { } /** - * Checks whether the conversion from {@code original} to {@code target} is a widening primitive - * conversion. The arguments must be primitive types (not boxed types). + * Checks whether the conversion from {@code original} to {@code target} is a widening primitive conversion. The + * arguments must be primitive types (not boxed types). * - * This method return false if {@code original} and {@code target} represent the same type, as - * such a conversion is the identity conversion, not a widening conversion. + * This method return false if {@code original} and {@code target} represent the same type, as such a conversion is + * the identity conversion, not a widening conversion. * - * See the - * JLS for more info. + * See the JLS for more info. * * @param original The type to convert from. * @param target The type to convert to. - * @return {@code true} if a conversion from {@code original} to {@code target} is a widening - * conversion; otherwise, {@code false}. + * @return {@code true} if a conversion from {@code original} to {@code target} is a widening conversion; otherwise, + * {@code false}. */ static boolean isWideningPrimitiveConversion(Class original, Class target) { if (original == null || !original.isPrimitive() || target == null || !target.isPrimitive() - || original.equals(void.class) || target.equals(void.class)) { - throw new IllegalArgumentException( - "Arguments must be a primitive type (excluding void)!"); + || original.equals(void.class) || target.equals(void.class)) { + throw new IllegalArgumentException("Arguments must be a primitive type (excluding void)!"); } - DBLanguageParserPrimitiveType originalEnum = - DBLanguageParserPrimitiveType.getPrimitiveType(original); + DBLanguageParserPrimitiveType originalEnum = DBLanguageParserPrimitiveType.getPrimitiveType(original); switch (originalEnum) { case BytePrimitive: @@ -1292,15 +1221,13 @@ static boolean isWideningPrimitiveConversion(Class original, Class target) } private enum DBLanguageParserPrimitiveType { - // Including "Enum" (or really, any differentiating string) in these names is important. - // They're used - // in a switch() statement, which apparently does not support qualified names. And we can't - // use + // Including "Enum" (or really, any differentiating string) in these names is important. They're used + // in a switch() statement, which apparently does not support qualified names. And we can't use // names that conflict with java.lang's boxed types. - BytePrimitive(byte.class), ShortPrimitive(short.class), CharPrimitive( - char.class), IntPrimitive(int.class), LongPrimitive(long.class), FloatPrimitive( - float.class), DoublePrimitive(double.class), BooleanPrimitive(boolean.class); + BytePrimitive(byte.class), ShortPrimitive(short.class), CharPrimitive(char.class), IntPrimitive( + int.class), LongPrimitive(long.class), FloatPrimitive( + float.class), DoublePrimitive(double.class), BooleanPrimitive(boolean.class); private final Class primitiveClass; @@ -1312,15 +1239,13 @@ private Class getPrimitiveClass() { return primitiveClass; } - private static final Map primitiveClassToEnumMap = - Stream.of(DBLanguageParserPrimitiveType.values()) - .collect(Collectors.toMap(DBLanguageParserPrimitiveType::getPrimitiveClass, - Function.identity())); + private static final Map primitiveClassToEnumMap = Stream + .of(DBLanguageParserPrimitiveType.values()) + .collect(Collectors.toMap(DBLanguageParserPrimitiveType::getPrimitiveClass, Function.identity())); private static DBLanguageParserPrimitiveType getPrimitiveType(Class original) { if (!original.isPrimitive()) { - throw new IllegalArgumentException( - "Class " + original.getName() + " is not a primitive type"); + throw new IllegalArgumentException("Class " + original.getName() + " is not a primitive type"); } else if (original.equals(void.class)) { throw new IllegalArgumentException("Void is not supported!"); } @@ -1380,30 +1305,23 @@ public Class visit(ConditionalExpr n, VisitArgs printer) { Class classA = getTypeWithCaching(n.getThenExpr()); Class classB = getTypeWithCaching(n.getElseExpr()); - if (classA == NULL_CLASS - && io.deephaven.util.type.TypeUtils.getUnboxedType(classB) != null) { - n.setThenExpr(new NameExpr("NULL_" + io.deephaven.util.type.TypeUtils - .getUnboxedType(classB).getSimpleName().toUpperCase())); + if (classA == NULL_CLASS && io.deephaven.util.type.TypeUtils.getUnboxedType(classB) != null) { + n.setThenExpr(new NameExpr( + "NULL_" + io.deephaven.util.type.TypeUtils.getUnboxedType(classB).getSimpleName().toUpperCase())); classA = n.getThenExpr().accept(this, VisitArgs.WITHOUT_STRING_BUILDER); - } else if (classB == NULL_CLASS - && io.deephaven.util.type.TypeUtils.getUnboxedType(classA) != null) { - n.setElseExpr(new NameExpr( - "NULL_" + TypeUtils.getUnboxedType(classA).getSimpleName().toUpperCase())); + } else if (classB == NULL_CLASS && io.deephaven.util.type.TypeUtils.getUnboxedType(classA) != null) { + n.setElseExpr(new NameExpr("NULL_" + TypeUtils.getUnboxedType(classA).getSimpleName().toUpperCase())); classB = n.getElseExpr().accept(this, VisitArgs.WITHOUT_STRING_BUILDER); } - if (classA == boolean.class && classB == Boolean.class) { // a little hacky, but this - // handles the null case where it - // unboxes. very weird stuff - n.setThenExpr( - new CastExpr(new ClassOrInterfaceType("java.lang.Boolean"), n.getThenExpr())); + if (classA == boolean.class && classB == Boolean.class) { // a little hacky, but this handles the null case + // where it unboxes. very weird stuff + n.setThenExpr(new CastExpr(new ClassOrInterfaceType("java.lang.Boolean"), n.getThenExpr())); } - if (classA == Boolean.class && classB == boolean.class) { // a little hacky, but this - // handles the null case where it - // unboxes. very weird stuff - n.setElseExpr( - new CastExpr(new ClassOrInterfaceType("java.lang.Boolean"), n.getElseExpr())); + if (classA == Boolean.class && classB == boolean.class) { // a little hacky, but this handles the null case + // where it unboxes. very weird stuff + n.setElseExpr(new CastExpr(new ClassOrInterfaceType("java.lang.Boolean"), n.getElseExpr())); } if (printer.hasStringBuilder()) { @@ -1427,7 +1345,7 @@ public Class visit(ConditionalExpr n, VisitArgs printer) { } throw new RuntimeException( - "Incompatible types in condition operation not supported : " + classA + ' ' + classB); + "Incompatible types in condition operation not supported : " + classA + ' ' + classB); } public Class visit(EnclosedExpr n, VisitArgs printer) { @@ -1457,36 +1375,31 @@ public Class visit(FieldAccessExpr n, VisitArgs printer) { // com.a.b.TheClass.field // then the scope -- "com.a.b.TheClass" -- is itself a FieldAccessExpr. // - // Thus we can use scopeExpr.accept() to find the scope type if the scope is anything other - // than a class, - // but we would recurse and eventually fail if the scope name actually is a class. Instead, - // we must + // Thus we can use scopeExpr.accept() to find the scope type if the scope is anything other than a class, + // but we would recurse and eventually fail if the scope name actually is a class. Instead, we must // manually check whether the scope is a class. Class scopeType; if (scopeExpr instanceof FieldAccessExpr - && (scopeType = findClass(scopeName)) != null) { // 'scope' was a class, and we found it - // - print 'scopeType' ourselves + && (scopeType = findClass(scopeName)) != null) { // 'scope' was a class, and we found it - print + // 'scopeType' ourselves printer.append(scopeName); } else { // 'scope' was *not* a class; call accept() on it to print it and find its type. try { - // The incoming VisitArgs might have a "casting context", meaning that it wants us - // to cast to - // the proper type at the end. But we have a scope, and that scope needs to be - // evaluated in + // The incoming VisitArgs might have a "casting context", meaning that it wants us to cast to + // the proper type at the end. But we have a scope, and that scope needs to be evaluated in // a non-casting context. So we provide that here. scopeType = scopeExpr.accept(this, printer.cloneWithCastingContext(null)); } catch (RuntimeException e) { throw new RuntimeException("Cannot resolve scope." + - "\n Expression : " + exprString + - "\n Scope : " + scopeExpr.toString() + - "\n Field Name : " + fieldName, e); + "\n Expression : " + exprString + + "\n Scope : " + scopeExpr.toString() + + "\n Field Name : " + fieldName, e); } Assert.neqNull(scopeType, "scopeType"); } if (scopeType.isArray() && fieldName.equals("length")) { - // We need special handling for arrays -- see the note in the javadocs for - // Class.getField(): + // We need special handling for arrays -- see the note in the javadocs for Class.getField(): // "If this Class object represents an array type, then this method // does not find the length field of the array type." ret = Integer.TYPE; @@ -1498,8 +1411,8 @@ public Class visit(FieldAccessExpr n, VisitArgs printer) { // If it wasn't a nested class, then it should be an actual field. if (ret == null) { try { - // For Python object, the type of the field is PyObject by default, the actual - // data type if primitive + // For Python object, the type of the field is PyObject by default, the actual data type if + // primitive // will only be known at runtime if (scopeType == PyObject.class) { ret = PyObject.class; @@ -1509,10 +1422,10 @@ public Class visit(FieldAccessExpr n, VisitArgs printer) { } catch (NoSuchFieldException e) { // And if we still can't find the field, we have a problem. throw new RuntimeException("Cannot resolve field name." + - "\n Expression : " + exprString + - "\n Scope : " + scopeExpr.toString() + - "\n Scope Type : " + scopeType.getCanonicalName() + - "\n Field Name : " + fieldName, e); + "\n Expression : " + exprString + + "\n Scope : " + scopeExpr.toString() + + "\n Scope Type : " + scopeType.getCanonicalName() + + "\n Field Name : " + fieldName, e); } } } @@ -1559,21 +1472,21 @@ public Class visit(IntegerLiteralExpr n, VisitArgs printer) { printer.append(value); /* - * In java, you can't compile if your code contains an integer literal that's too big to fit - * in an int. You'd need to add an "L" to the end, to indicate that it's a long. + * In java, you can't compile if your code contains an integer literal that's too big to fit in an int. You'd + * need to add an "L" to the end, to indicate that it's a long. * - * But in the DB, we assume you don't mind extra precision and just want your query to work, - * so when an 'integer' literal is too big to fit in an int, we automatically add on the "L" - * to promote the literal from an int to a long. + * But in the DB, we assume you don't mind extra precision and just want your query to work, so when an + * 'integer' literal is too big to fit in an int, we automatically add on the "L" to promote the literal from an + * int to a long. * * Also, note that the 'x' and 'b' for hexadecimal/binary literals are _not_ case sensitive. */ // First, we need to remove underscores from the value before we can parse it. value = value.chars() - .filter((c) -> c != '_') - .collect(StringBuilder::new, (sb, c) -> sb.append((char) c), StringBuilder::append) - .toString(); + .filter((c) -> c != '_') + .collect(StringBuilder::new, (sb, c) -> sb.append((char) c), StringBuilder::append) + .toString(); long longValue; String prefix = value.length() > 2 ? value.substring(0, 2) : null; @@ -1581,8 +1494,7 @@ public Class visit(IntegerLiteralExpr n, VisitArgs printer) { longValue = Long.parseLong(value.substring(2), 16); } else if ("0b".equalsIgnoreCase(prefix)) { // binary literal // If a literal has 32 bits, the 32nd (i.e. MSB) is *not* taken as the sign bit! - // This follows from the fact that Integer.parseInt(str, 2) will only parse an 'str' up - // to 31 chars long. + // This follows from the fact that Integer.parseInt(str, 2) will only parse an 'str' up to 31 chars long. longValue = Long.parseLong(value.substring(2), 2); } else { // regular numeric literal longValue = Long.parseLong(value); @@ -1645,8 +1557,7 @@ public Class visit(MethodCallExpr n, VisitArgs printer) { innerPrinter.append('.'); } - Expression expressions[] = - n.getArgs() == null ? new Expression[0] : n.getArgs().toArray(new Expression[0]); + Expression expressions[] = n.getArgs() == null ? new Expression[0] : n.getArgs().toArray(new Expression[0]); Class expressionTypes[] = printArguments(expressions, VisitArgs.WITHOUT_STRING_BUILDER); @@ -1663,27 +1574,23 @@ public Class visit(MethodCallExpr n, VisitArgs printer) { checkPyNumbaVectorizedFunc(n, expressions, expressionTypes); } - expressions = convertParameters(method, argumentTypes, expressionTypes, parameterizedTypes, - expressions); + expressions = convertParameters(method, argumentTypes, expressionTypes, parameterizedTypes, expressions); if (isPotentialImplicitCall(method.getDeclaringClass())) { if (scope == null) { // python func call or Groovy closure call /* - * python func call 1. the func is defined at the main module level and already - * wrapped in CallableWrapper 2. the func will be called via CallableWrapper.call() - * method + * python func call 1. the func is defined at the main module level and already wrapped in + * CallableWrapper 2. the func will be called via CallableWrapper.call() method */ printer.append(innerPrinter); printer.append(n.getName()); printer.append(".call"); } else { /* - * python method call 1. need to reference the method with PyObject.getAttribute(); - * 2. wrap the method reference in CallableWrapper() 3. the method will be called - * via CallableWrapper.call() + * python method call 1. need to reference the method with PyObject.getAttribute(); 2. wrap the method + * reference in CallableWrapper() 3. the method will be called via CallableWrapper.call() */ - if (!n.getName().equals("call")) { // to be backwards compatible with the syntax - // func.call(...) + if (!n.getName().equals("call")) { // to be backwards compatible with the syntax func.call(...) innerPrinter.append("getAttribute(\"" + n.getName() + "\")"); printer.append("(new io.deephaven.db.util.PythonScopeJpyImpl.CallableWrapper("); printer.append(innerPrinter); @@ -1705,15 +1612,12 @@ public Class visit(MethodCallExpr n, VisitArgs printer) { return calculateMethodReturnTypeUsingGenerics(method, expressionTypes, parameterizedTypes); } - private void checkPyNumbaVectorizedFunc(MethodCallExpr n, Expression[] expressions, - Class[] expressionTypes) { - // numba vectorized functions return arrays of primitive types. This will break the - // generated expression - // evaluation code that expects singular values. This check makes sure that numba vectorized - // functions must be + private void checkPyNumbaVectorizedFunc(MethodCallExpr n, Expression[] expressions, Class[] expressionTypes) { + // numba vectorized functions return arrays of primitive types. This will break the generated expression + // evaluation code that expects singular values. This check makes sure that numba vectorized functions must be // used alone (or with cast only) as the entire expression. - if (n.getParentNode() != null && (n.getParentNode().getClass() != CastExpr.class - || n.getParentNode().getParentNode() != null)) { + if (n.getParentNode() != null + && (n.getParentNode().getClass() != CastExpr.class || n.getParentNode().getParentNode() != null)) { throw new RuntimeException("Numba vectorized function can't be used in an expression."); } @@ -1723,19 +1627,16 @@ private void checkPyNumbaVectorizedFunc(MethodCallExpr n, Expression[] expressio NumbaCallableWrapper numbaCallableWrapper = (NumbaCallableWrapper) param.getValue(); List params = numbaCallableWrapper.getParamTypes(); if (params.size() != expressions.length) { - throw new RuntimeException("Numba vectorized function argument count mismatch: " - + params.size() + " vs." + expressions.length); + throw new RuntimeException("Numba vectorized function argument count mismatch: " + params.size() + + " vs." + expressions.length); } for (int i = 0; i < expressions.length; i++) { if (!(expressions[i] instanceof NameExpr)) { - throw new RuntimeException( - "Numba vectorized function arguments can only be columns."); + throw new RuntimeException("Numba vectorized function arguments can only be columns."); } if (!isSafelyCoerceable(expressionTypes[i], params.get(i))) { - throw new RuntimeException( - "Numba vectorized function argument type mismatch: " - + expressionTypes[i].getSimpleName() + " -> " - + params.get(i).getSimpleName()); + throw new RuntimeException("Numba vectorized function argument type mismatch: " + + expressionTypes[i].getSimpleName() + " -> " + params.get(i).getSimpleName()); } } } @@ -1743,8 +1644,7 @@ private void checkPyNumbaVectorizedFunc(MethodCallExpr n, Expression[] expressio } private static boolean isSafelyCoerceable(Class expressionType, Class aClass) { - // TODO, numba does appear to check for type coercing at runtime, though no explicit rules - // exist. + // TODO, numba does appear to check for type coercing at runtime, though no explicit rules exist. // GH-709 is filed to address this at some point in the future. return true; } @@ -1760,8 +1660,7 @@ public Class visit(ObjectCreationExpr n, VisitArgs printer) { Class ret = n.getType().accept(this, printer); - Expression expressions[] = - n.getArgs() == null ? new Expression[0] : n.getArgs().toArray(new Expression[0]); + Expression expressions[] = n.getArgs() == null ? new Expression[0] : n.getArgs().toArray(new Expression[0]); Class expressionTypes[] = printArguments(expressions, VisitArgs.WITHOUT_STRING_BUILDER); @@ -1773,8 +1672,7 @@ public Class visit(ObjectCreationExpr n, VisitArgs printer) { // now do some parameter conversions... - expressions = convertParameters(constructor, argumentTypes, expressionTypes, - parameterizedTypes, expressions); + expressions = convertParameters(constructor, argumentTypes, expressionTypes, parameterizedTypes, expressions); if (printer.hasStringBuilder()) { printArguments(expressions, printer); @@ -1857,10 +1755,8 @@ public Class visit(MethodReferenceExpr n, VisitArgs printer) { // String methodName = n.getIdentifier(); // // /* - // NOTE: I believe the big problem here is knowing how many arguments to expect the - // referenced method to take. - // Seems like we'll have to search parent nodes to find the context in which this method - // reference is used + // NOTE: I believe the big problem here is knowing how many arguments to expect the referenced method to take. + // Seems like we'll have to search parent nodes to find the context in which this method reference is used // */ // // Method[] possibleReferredMethods = Stream @@ -1878,12 +1774,10 @@ public Class visit(MethodReferenceExpr n, VisitArgs printer) { // Method[] candidateCalledMethods = // // Get all possible methods // (parentScope == null - // ? - // staticImports.stream().map(Class::getDeclaredMethods).map(Stream::of).flatMap(Function.identity()) + // ? staticImports.stream().map(Class::getDeclaredMethods).map(Stream::of).flatMap(Function.identity()) // : Stream.of(parentScope.getMethods()) // ) - // .filter((m) -> m.getParameterCount() == parent.getArgs().size()) // filter based on - // argument count + // .filter((m) -> m.getParameterCount() == parent.getArgs().size()) // filter based on argument count // .filter((m) -> m.getName().equals(methodName)) // filter based on name // .toArray(Method[]::new); // @@ -1921,8 +1815,7 @@ public Class visit(MethodReferenceExpr n, VisitArgs printer) { // .get(methodName); // // if(m == null) { - // throw new RuntimeException("Could not find method \"" + methodName + "\": " + - // n.toString()); + // throw new RuntimeException("Could not find method \"" + methodName + "\": " + n.toString()); // } else { // printer.append("::"); // printer.append(n.getIdentifier()); @@ -2190,8 +2083,8 @@ public VisitArgs cloneWithCastingContext(Class pythonCastContext) { } /** - * Underlying StringBuilder or 'null' if we don't need a buffer (i.e. if we are just running - * the visitor pattern to calculate a type and don't care about side effects. + * Underlying StringBuilder or 'null' if we don't need a buffer (i.e. if we are just running the visitor pattern + * to calculate a type and don't care about side effects. */ private final StringBuilder builder; private final Class pythonCastContext; diff --git a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibrary.java b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibrary.java index 5c2dc74f59c..5b130bfca27 100644 --- a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibrary.java +++ b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibrary.java @@ -14,8 +14,7 @@ public class QueryLibrary { - private static final QueryLibraryImports IMPORTS_INSTANCE = - QueryLibraryImports.copyFromServiceLoader(); + private static final QueryLibraryImports IMPORTS_INSTANCE = QueryLibraryImports.copyFromServiceLoader(); private final Map packageImports; private final Map> classImports; @@ -40,7 +39,7 @@ private QueryLibrary(QueryLibraryImports imports) { private static volatile QueryLibrary defaultLibrary = null; private final static ThreadLocal currLibrary = - ThreadLocal.withInitial(QueryLibrary::getDefaultLibrary); + ThreadLocal.withInitial(QueryLibrary::getDefaultLibrary); private static QueryLibrary getDefaultLibrary() { if (defaultLibrary == null) { @@ -63,7 +62,7 @@ private static QueryLibrary getDefaultLibrary() { public static synchronized void setDefaultLibrary(final QueryLibrary library) { if (defaultLibrary != null) { throw new IllegalStateException( - "It's too late to set default library; it's already set to: " + defaultLibrary); + "It's too late to set default library; it's already set to: " + defaultLibrary); } defaultLibrary = Objects.requireNonNull(library); } @@ -96,10 +95,8 @@ public static QueryLibrary getLibrary() { } public static void importPackage(Package aPackage) { - // Any dynamically-added package, class, or static import may alter the meaning of the Java - // code - // we are compiling. So when this happens, we dynamically generate a new globally-unique - // version string. + // Any dynamically-added package, class, or static import may alter the meaning of the Java code + // we are compiling. So when this happens, we dynamically generate a new globally-unique version string. final QueryLibrary lql = currLibrary.get(); final Package previous = lql.packageImports.put(aPackage.getName(), aPackage); if (aPackage != previous) { @@ -108,10 +105,8 @@ public static void importPackage(Package aPackage) { } public static void importClass(Class aClass) { - // Any dynamically-added package, class, or static import may alter the meaning of the Java - // code - // we are compiling. So when this happens, we dynamically generate a new globally-unique - // version string. + // Any dynamically-added package, class, or static import may alter the meaning of the Java code + // we are compiling. So when this happens, we dynamically generate a new globally-unique version string. final QueryLibrary lql = currLibrary.get(); final Class previous = lql.classImports.put(aClass.getCanonicalName(), aClass); if (aClass.getClassLoader() instanceof GroovyClassLoader) { @@ -122,10 +117,8 @@ public static void importClass(Class aClass) { } public static void importStatic(Class aClass) { - // Any dynamically-added package, class, or static import may alter the meaning of the Java - // code - // we are compiling. So when this happens, we dynamically generate a new globally-unique - // version string. + // Any dynamically-added package, class, or static import may alter the meaning of the Java code + // we are compiling. So when this happens, we dynamically generate a new globally-unique version string. final QueryLibrary lql = currLibrary.get(); final Class previous = lql.staticImports.put(aClass.getCanonicalName(), aClass); if (aClass.getClassLoader() instanceof GroovyClassLoader) { diff --git a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImports.java b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImports.java index 91345e52842..72091f8d117 100644 --- a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImports.java +++ b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImports.java @@ -10,12 +10,9 @@ static QueryLibraryImports copyFromServiceLoader() { } static QueryLibraryImports copyFrom(QueryLibraryImports other) { - final Set packages = - Collections.unmodifiableSet(new LinkedHashSet<>(other.packages())); - final Set> classes = - Collections.unmodifiableSet(new LinkedHashSet<>(other.classes())); - final Set> statics = - Collections.unmodifiableSet(new LinkedHashSet<>(other.statics())); + final Set packages = Collections.unmodifiableSet(new LinkedHashSet<>(other.packages())); + final Set> classes = Collections.unmodifiableSet(new LinkedHashSet<>(other.classes())); + final Set> statics = Collections.unmodifiableSet(new LinkedHashSet<>(other.statics())); return new QueryLibraryImportsImpl(packages, classes, statics); } diff --git a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsConfiguration.java b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsConfiguration.java index b8da8cc1ffd..75fc03f23c1 100644 --- a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsConfiguration.java +++ b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsConfiguration.java @@ -11,8 +11,8 @@ public class QueryLibraryImportsConfiguration implements QueryLibraryImports { @Override public Set packages() { try { - return new HashSet<>(ClassList.readPackageList( - Configuration.getInstance().getProperty("QueryLibrary.defaultPackageImportList"))); + return new HashSet<>(ClassList + .readPackageList(Configuration.getInstance().getProperty("QueryLibrary.defaultPackageImportList"))); } catch (IOException e) { throw new RuntimeException("Can not load default class imports", e); } @@ -22,7 +22,7 @@ public Set packages() { public Set> classes() { try { return new HashSet<>(ClassList.readClassListAsCollection( - Configuration.getInstance().getProperty("QueryLibrary.defaultClassImportList"))); + Configuration.getInstance().getProperty("QueryLibrary.defaultClassImportList"))); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException("Can not load default class imports", e); } @@ -32,7 +32,7 @@ public Set> classes() { public Set> statics() { try { return new HashSet<>(ClassList.readClassListAsCollection( - Configuration.getInstance().getProperty("QueryLibrary.defaultStaticImportList"))); + Configuration.getInstance().getProperty("QueryLibrary.defaultStaticImportList"))); } catch (IOException | ClassNotFoundException e) { throw new RuntimeException("Can not load default static imports", e); } diff --git a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsDefaults.java b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsDefaults.java index 327a098b167..ef41c3e0f16 100644 --- a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsDefaults.java +++ b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsDefaults.java @@ -10,89 +10,89 @@ public class QueryLibraryImportsDefaults implements QueryLibraryImports { @Override public Set packages() { return new LinkedHashSet<>(Arrays.asList( - Package.getPackage("java.lang"), - Package.getPackage("java.util"))); + Package.getPackage("java.lang"), + Package.getPackage("java.util"))); } @Override public Set> classes() { return new LinkedHashSet<>(Arrays.asList( - java.lang.reflect.Array.class, - io.deephaven.util.type.TypeUtils.class, - io.deephaven.db.tables.Table.class, - io.deephaven.db.tables.DataColumn.class, - io.deephaven.db.tables.utils.ArrayUtils.class, - io.deephaven.db.tables.utils.DBDateTime.class, - io.deephaven.db.tables.utils.DBTimeUtils.class, - io.deephaven.base.string.cache.CompressedString.class, - java.util.Arrays.class, - org.joda.time.LocalTime.class, - io.deephaven.db.tables.utils.DBPeriod.class, - io.deephaven.db.tables.select.Param.class, - io.deephaven.db.v2.sources.ColumnSource.class, - io.deephaven.db.v2.utils.Index.class, - io.deephaven.db.v2.utils.IndexBuilder.class, - io.deephaven.db.v2.utils.Index.SequentialBuilder.class, - io.deephaven.db.util.LongSizedDataStructure.class, - java.util.concurrent.ConcurrentHashMap.class, - io.deephaven.db.v2.sources.chunk.Attributes.class, - io.deephaven.db.v2.sources.chunk.Chunk.class, - io.deephaven.db.v2.sources.chunk.ByteChunk.class, - io.deephaven.db.v2.sources.chunk.CharChunk.class, - io.deephaven.db.v2.sources.chunk.ShortChunk.class, - io.deephaven.db.v2.sources.chunk.IntChunk.class, - io.deephaven.db.v2.sources.chunk.LongChunk.class, - io.deephaven.db.v2.sources.chunk.FloatChunk.class, - io.deephaven.db.v2.sources.chunk.DoubleChunk.class, - io.deephaven.db.v2.sources.chunk.ObjectChunk.class, - io.deephaven.db.v2.sources.chunk.WritableChunk.class, - io.deephaven.db.v2.sources.chunk.WritableByteChunk.class, - io.deephaven.db.v2.sources.chunk.WritableCharChunk.class, - io.deephaven.db.v2.sources.chunk.WritableShortChunk.class, - io.deephaven.db.v2.sources.chunk.WritableIntChunk.class, - io.deephaven.db.v2.sources.chunk.WritableLongChunk.class, - io.deephaven.db.v2.sources.chunk.WritableFloatChunk.class, - io.deephaven.db.v2.sources.chunk.WritableDoubleChunk.class, - io.deephaven.db.v2.sources.chunk.WritableObjectChunk.class, - io.deephaven.db.v2.sources.chunk.Context.class, - io.deephaven.db.v2.select.ConditionFilter.FilterKernel.class, - io.deephaven.db.v2.utils.OrderedKeys.class)); + java.lang.reflect.Array.class, + io.deephaven.util.type.TypeUtils.class, + io.deephaven.db.tables.Table.class, + io.deephaven.db.tables.DataColumn.class, + io.deephaven.db.tables.utils.ArrayUtils.class, + io.deephaven.db.tables.utils.DBDateTime.class, + io.deephaven.db.tables.utils.DBTimeUtils.class, + io.deephaven.base.string.cache.CompressedString.class, + java.util.Arrays.class, + org.joda.time.LocalTime.class, + io.deephaven.db.tables.utils.DBPeriod.class, + io.deephaven.db.tables.select.Param.class, + io.deephaven.db.v2.sources.ColumnSource.class, + io.deephaven.db.v2.utils.Index.class, + io.deephaven.db.v2.utils.IndexBuilder.class, + io.deephaven.db.v2.utils.Index.SequentialBuilder.class, + io.deephaven.db.util.LongSizedDataStructure.class, + java.util.concurrent.ConcurrentHashMap.class, + io.deephaven.db.v2.sources.chunk.Attributes.class, + io.deephaven.db.v2.sources.chunk.Chunk.class, + io.deephaven.db.v2.sources.chunk.ByteChunk.class, + io.deephaven.db.v2.sources.chunk.CharChunk.class, + io.deephaven.db.v2.sources.chunk.ShortChunk.class, + io.deephaven.db.v2.sources.chunk.IntChunk.class, + io.deephaven.db.v2.sources.chunk.LongChunk.class, + io.deephaven.db.v2.sources.chunk.FloatChunk.class, + io.deephaven.db.v2.sources.chunk.DoubleChunk.class, + io.deephaven.db.v2.sources.chunk.ObjectChunk.class, + io.deephaven.db.v2.sources.chunk.WritableChunk.class, + io.deephaven.db.v2.sources.chunk.WritableByteChunk.class, + io.deephaven.db.v2.sources.chunk.WritableCharChunk.class, + io.deephaven.db.v2.sources.chunk.WritableShortChunk.class, + io.deephaven.db.v2.sources.chunk.WritableIntChunk.class, + io.deephaven.db.v2.sources.chunk.WritableLongChunk.class, + io.deephaven.db.v2.sources.chunk.WritableFloatChunk.class, + io.deephaven.db.v2.sources.chunk.WritableDoubleChunk.class, + io.deephaven.db.v2.sources.chunk.WritableObjectChunk.class, + io.deephaven.db.v2.sources.chunk.Context.class, + io.deephaven.db.v2.select.ConditionFilter.FilterKernel.class, + io.deephaven.db.v2.utils.OrderedKeys.class)); } @Override public Set> statics() { return new LinkedHashSet<>(Arrays.asList( - io.deephaven.util.QueryConstants.class, - io.deephaven.libs.primitives.BytePrimitives.class, - io.deephaven.libs.primitives.ByteNumericPrimitives.class, - io.deephaven.libs.primitives.CharacterPrimitives.class, - io.deephaven.libs.primitives.DoublePrimitives.class, - io.deephaven.libs.primitives.DoubleNumericPrimitives.class, - io.deephaven.libs.primitives.DoubleFpPrimitives.class, - io.deephaven.libs.primitives.FloatPrimitives.class, - io.deephaven.libs.primitives.FloatFpPrimitives.class, - io.deephaven.libs.primitives.FloatNumericPrimitives.class, - io.deephaven.libs.primitives.IntegerPrimitives.class, - io.deephaven.libs.primitives.IntegerNumericPrimitives.class, - io.deephaven.libs.primitives.ShortPrimitives.class, - io.deephaven.libs.primitives.ShortNumericPrimitives.class, - io.deephaven.libs.primitives.LongPrimitives.class, - io.deephaven.libs.primitives.LongNumericPrimitives.class, - io.deephaven.libs.primitives.ObjectPrimitives.class, - io.deephaven.libs.primitives.BooleanPrimitives.class, - io.deephaven.libs.primitives.ComparePrimitives.class, - io.deephaven.libs.primitives.BinSearch.class, - io.deephaven.libs.primitives.Casting.class, - io.deephaven.libs.primitives.PrimitiveParseUtil.class, - io.deephaven.db.tables.lang.DBLanguageFunctionUtil.class, - io.deephaven.db.tables.utils.DBTimeUtils.class, - io.deephaven.db.tables.utils.DBTimeZone.class, - io.deephaven.base.string.cache.CompressedString.class, - io.deephaven.db.tables.utils.WhereClause.class, - io.deephaven.gui.color.Color.class, - io.deephaven.db.util.DBColorUtilImpl.class, - io.deephaven.db.tables.verify.TableAssertions.class, - io.deephaven.util.calendar.StaticCalendarMethods.class, - io.deephaven.db.v2.sources.chunk.Attributes.class)); + io.deephaven.util.QueryConstants.class, + io.deephaven.libs.primitives.BytePrimitives.class, + io.deephaven.libs.primitives.ByteNumericPrimitives.class, + io.deephaven.libs.primitives.CharacterPrimitives.class, + io.deephaven.libs.primitives.DoublePrimitives.class, + io.deephaven.libs.primitives.DoubleNumericPrimitives.class, + io.deephaven.libs.primitives.DoubleFpPrimitives.class, + io.deephaven.libs.primitives.FloatPrimitives.class, + io.deephaven.libs.primitives.FloatFpPrimitives.class, + io.deephaven.libs.primitives.FloatNumericPrimitives.class, + io.deephaven.libs.primitives.IntegerPrimitives.class, + io.deephaven.libs.primitives.IntegerNumericPrimitives.class, + io.deephaven.libs.primitives.ShortPrimitives.class, + io.deephaven.libs.primitives.ShortNumericPrimitives.class, + io.deephaven.libs.primitives.LongPrimitives.class, + io.deephaven.libs.primitives.LongNumericPrimitives.class, + io.deephaven.libs.primitives.ObjectPrimitives.class, + io.deephaven.libs.primitives.BooleanPrimitives.class, + io.deephaven.libs.primitives.ComparePrimitives.class, + io.deephaven.libs.primitives.BinSearch.class, + io.deephaven.libs.primitives.Casting.class, + io.deephaven.libs.primitives.PrimitiveParseUtil.class, + io.deephaven.db.tables.lang.DBLanguageFunctionUtil.class, + io.deephaven.db.tables.utils.DBTimeUtils.class, + io.deephaven.db.tables.utils.DBTimeZone.class, + io.deephaven.base.string.cache.CompressedString.class, + io.deephaven.db.tables.utils.WhereClause.class, + io.deephaven.gui.color.Color.class, + io.deephaven.db.util.DBColorUtilImpl.class, + io.deephaven.db.tables.verify.TableAssertions.class, + io.deephaven.util.calendar.StaticCalendarMethods.class, + io.deephaven.db.v2.sources.chunk.Attributes.class)); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsImpl.java b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsImpl.java index d559c1bb4f6..8618e28e244 100644 --- a/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsImpl.java +++ b/DB/src/main/java/io/deephaven/db/tables/libs/QueryLibraryImportsImpl.java @@ -10,7 +10,7 @@ class QueryLibraryImportsImpl implements QueryLibraryImports { private final Set> statics; QueryLibraryImportsImpl( - Set packages, Set> classes, Set> statics) { + Set packages, Set> classes, Set> statics) { this.packages = Objects.requireNonNull(packages); this.classes = Objects.requireNonNull(classes); this.statics = Objects.requireNonNull(statics); diff --git a/DB/src/main/java/io/deephaven/db/tables/libs/StringSet.java b/DB/src/main/java/io/deephaven/db/tables/libs/StringSet.java index dff52a95014..00c6ace3262 100644 --- a/DB/src/main/java/io/deephaven/db/tables/libs/StringSet.java +++ b/DB/src/main/java/io/deephaven/db/tables/libs/StringSet.java @@ -25,8 +25,7 @@ public interface StringSet extends Iterable { String[] values(); /** - * Get a sorted array of the values in this StringSet. May or may not match the value returned - * by {@link #values()}. + * Get a sorted array of the values in this StringSet. May or may not match the value returned by {@link #values()}. * * @return A sorted array of this StringSet's values */ diff --git a/DB/src/main/java/io/deephaven/db/tables/libs/StringSetFromEnum.java b/DB/src/main/java/io/deephaven/db/tables/libs/StringSetFromEnum.java index 90800f56574..f5f19db314f 100644 --- a/DB/src/main/java/io/deephaven/db/tables/libs/StringSetFromEnum.java +++ b/DB/src/main/java/io/deephaven/db/tables/libs/StringSetFromEnum.java @@ -12,10 +12,9 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; /** - * This class provides a object that converts from list of {@code String} values and their binary - * masks to a StringSetWrapper. (The first string corresponds to the value 1, the second to 2, the - * third to 4, etc.) Because of the use of values of 2, this conversion can handle bit sets. It is - * however limited to 31 possible enum values. + * This class provides a object that converts from list of {@code String} values and their binary masks to a + * StringSetWrapper. (The first string corresponds to the value 1, the second to 2, the third to 4, etc.) Because of the + * use of values of 2, this conversion can handle bit sets. It is however limited to 31 possible enum values. */ public class StringSetFromEnum { @@ -24,8 +23,8 @@ public class StringSetFromEnum { protected final ReentrantReadWriteLock setLock = new ReentrantReadWriteLock(); /** - * Create a converter for the sequence of {@code enums}, where the i-th enum in the sequence is - * associated with the value {@code Math.pow(2,i)} (starting with index 0 and value 1). + * Create a converter for the sequence of {@code enums}, where the i-th enum in the sequence is associated with the + * value {@code Math.pow(2,i)} (starting with index 0 and value 1). */ public StringSetFromEnum(String enums[]) { strings = Arrays.copyOf(enums, enums.length); diff --git a/DB/src/main/java/io/deephaven/db/tables/live/LiveTableMonitor.java b/DB/src/main/java/io/deephaven/db/tables/live/LiveTableMonitor.java index fcfb2f814c0..2d4a6d29307 100644 --- a/DB/src/main/java/io/deephaven/db/tables/live/LiveTableMonitor.java +++ b/DB/src/main/java/io/deephaven/db/tables/live/LiveTableMonitor.java @@ -55,22 +55,20 @@ /** *

    - * This class contains a thread which periodically updates a set of monitored {@link LiveTable - * LiveTables} at a specified target cycle time. The target cycle time can be - * {@link #setTargetCycleTime(long) configured} to reduce or increase the refresh rate of the - * monitored tables. + * This class contains a thread which periodically updates a set of monitored {@link LiveTable LiveTables} at a + * specified target cycle time. The target cycle time can be {@link #setTargetCycleTime(long) configured} to reduce or + * increase the refresh rate of the monitored tables. *

    * *

    * This class can be configured via the following {@link Configuration} property *

    *
      - *
    • LiveTableMonitor.targetcycletime (optional) - The default target cycle time in - * ms (1000 if not defined)
    • + *
    • LiveTableMonitor.targetcycletime (optional) - The default target cycle time in ms (1000 if not + * defined)
    • *
    */ -public enum LiveTableMonitor - implements LiveTableRegistrar, NotificationQueue, NotificationQueue.Dependency { +public enum LiveTableMonitor implements LiveTableRegistrar, NotificationQueue, NotificationQueue.Dependency { DEFAULT; private final Logger log = LoggerFactory.getLogger(LiveTableMonitor.class); @@ -79,7 +77,7 @@ public enum LiveTableMonitor * {@link LiveTable}s that are part of this LiveTableMonitor. */ private final SimpleReferenceManager tables = - new SimpleReferenceManager<>(LiveTableRefreshNotification::new); + new SimpleReferenceManager<>(LiveTableRefreshNotification::new); /** * Recorder for live table satisfaction as a phase of notification processing. @@ -90,41 +88,37 @@ public enum LiveTableMonitor * The queue of non-terminal notifications to process. */ private final IntrusiveDoublyLinkedQueue pendingNormalNotifications = - new IntrusiveDoublyLinkedQueue<>( - IntrusiveDoublyLinkedNode.Adapter.getInstance()); + new IntrusiveDoublyLinkedQueue<>(IntrusiveDoublyLinkedNode.Adapter.getInstance()); /** * The queue of terminal notifications to process. */ private final IntrusiveDoublyLinkedQueue terminalNotifications = - new IntrusiveDoublyLinkedQueue<>( - IntrusiveDoublyLinkedNode.Adapter.getInstance()); + new IntrusiveDoublyLinkedQueue<>(IntrusiveDoublyLinkedNode.Adapter.getInstance()); - // Specifically not using a ConcurrentListDeque here because we don't want to create wasteful - // garbage + // Specifically not using a ConcurrentListDeque here because we don't want to create wasteful garbage // when we know this collection is going to constantly grow and shrink. private final IntrusiveArraySet singleUpdateQueue = - new IntrusiveArraySet<>(SingleUpdateSlotAdapter.INSTANCE, - LiveTableRefreshNotification.class); + new IntrusiveArraySet<>(SingleUpdateSlotAdapter.INSTANCE, LiveTableRefreshNotification.class); private final Thread refreshThread; /** - * If this is set to a positive value, then we will call the {@link #watchDogTimeoutProcedure} - * if any single refresh loop takes longer than this value. The intention is to use this for - * strategies, or other queries, where a LiveTableMonitor loop that is "stuck" is the equivalent - * of an error. Set the value with {@link #setWatchDogMillis(int)}. + * If this is set to a positive value, then we will call the {@link #watchDogTimeoutProcedure} if any single refresh + * loop takes longer than this value. The intention is to use this for strategies, or other queries, where a + * LiveTableMonitor loop that is "stuck" is the equivalent of an error. Set the value with + * {@link #setWatchDogMillis(int)}. */ private int watchDogMillis = 0; /** - * If a timeout time has been {@link #setWatchDogMillis(int) set}, this procedure will be called - * if any single refresh loop takes longer than the value specified. Set the value with + * If a timeout time has been {@link #setWatchDogMillis(int) set}, this procedure will be called if any single + * refresh loop takes longer than the value specified. Set the value with * {@link #setWatchDogTimeoutProcedure(LongConsumer)}. */ private LongConsumer watchDogTimeoutProcedure = null; - private final boolean allowUnitTestMode = Configuration.getInstance() - .getBooleanWithDefault("LiveTableMonitor.allowUnitTestMode", false); + private final boolean allowUnitTestMode = + Configuration.getInstance().getBooleanWithDefault("LiveTableMonitor.allowUnitTestMode", false); private int notificationAdditionDelay = 0; private Random notificationRandomizer = new Random(0); private boolean unitTestMode = false; @@ -132,10 +126,10 @@ public enum LiveTableMonitor private ExecutorService unitTestRefreshThreadPool; private final long defaultTargetCycleTime = - Configuration.getInstance().getIntegerWithDefault("LiveTableMonitor.targetcycletime", 1000); + Configuration.getInstance().getIntegerWithDefault("LiveTableMonitor.targetcycletime", 1000); private volatile long targetCycleTime = defaultTargetCycleTime; - private final long minimumCycleLogNanos = TimeUnit.MILLISECONDS.toNanos(Configuration - .getInstance().getIntegerWithDefault("LiveTableMonitor.minimumCycleLogTime", 25)); + private final long minimumCycleLogNanos = TimeUnit.MILLISECONDS + .toNanos(Configuration.getInstance().getIntegerWithDefault("LiveTableMonitor.minimumCycleLogTime", 25)); /** * How many cycles we have not logged, but were non-zero. @@ -163,50 +157,45 @@ public enum LiveTableMonitor private NotificationProcessor notificationProcessor; /** - * The {@link LivenessScope} that should be on top of the {@link LivenessScopeStack} for all - * refresh and notification processing. Only non-null while some thread is in - * {@link #doRefresh(Runnable)}. + * The {@link LivenessScope} that should be on top of the {@link LivenessScopeStack} for all refresh and + * notification processing. Only non-null while some thread is in {@link #doRefresh(Runnable)}. */ private volatile LivenessScope refreshScope; /** - * The number of threads in our executor service for dispatching notifications. If 1, then we - * don't actually use the executor service; but instead dispatch all the notifications on the - * LiveTableMonitor refresh thread. + * The number of threads in our executor service for dispatching notifications. If 1, then we don't actually use the + * executor service; but instead dispatch all the notifications on the LiveTableMonitor refresh thread. */ private final int updateThreads = Require.geq( - Configuration.getInstance().getIntegerWithDefault("LiveTableMonitor.updateThreads", 1), - "updateThreads", 1); + Configuration.getInstance().getIntegerWithDefault("LiveTableMonitor.updateThreads", 1), "updateThreads", 1); /** - * Is this one of the threads engaged in notification processing? (Either the solitary refresh - * thread, or one of the pooled threads it uses in some configurations) + * Is this one of the threads engaged in notification processing? (Either the solitary refresh thread, or one of the + * pooled threads it uses in some configurations) */ private final ThreadLocal isRefreshThread = ThreadLocal.withInitial(() -> false); - private final boolean CHECK_TABLE_OPERATIONS = Configuration.getInstance() - .getBooleanWithDefault("LiveTableMonitor.checkTableOperations", false); - private final ThreadLocal checkTableOperations = - ThreadLocal.withInitial(() -> CHECK_TABLE_OPERATIONS); + private final boolean CHECK_TABLE_OPERATIONS = + Configuration.getInstance().getBooleanWithDefault("LiveTableMonitor.checkTableOperations", false); + private final ThreadLocal checkTableOperations = ThreadLocal.withInitial(() -> CHECK_TABLE_OPERATIONS); - private final long minimumInterCycleSleep = Configuration.getInstance() - .getIntegerWithDefault("LiveTableMonitor.minimumInterCycleSleep", 0); - private final boolean interCycleYield = Configuration.getInstance() - .getBooleanWithDefault("LiveTableMonitor.interCycleYield", false); + private final long minimumInterCycleSleep = + Configuration.getInstance().getIntegerWithDefault("LiveTableMonitor.minimumInterCycleSleep", 0); + private final boolean interCycleYield = + Configuration.getInstance().getBooleanWithDefault("LiveTableMonitor.interCycleYield", false); /** * Encapsulates locking support. */ - private final LiveTableMonitorLock lock = - new LiveTableMonitorLock(LogicalClock.DEFAULT, allowUnitTestMode); + private final LiveTableMonitorLock lock = new LiveTableMonitorLock(LogicalClock.DEFAULT, allowUnitTestMode); /** - * When LiveTableMonitor.printDependencyInformation is set to true, the LiveTableMonitor will - * print debug information for each notification that has dependency information; as well as - * which notifications have been completed and are outstanding. + * When LiveTableMonitor.printDependencyInformation is set to true, the LiveTableMonitor will print debug + * information for each notification that has dependency information; as well as which notifications have been + * completed and are outstanding. */ - private final boolean printDependencyInformation = Configuration.getInstance() - .getBooleanWithDefault("LiveTableMonitor.printDependencyInformation", false); + private final boolean printDependencyInformation = + Configuration.getInstance().getBooleanWithDefault("LiveTableMonitor.printDependencyInformation", false); LiveTableMonitor() { notificationProcessor = makeNotificationProcessor(); @@ -239,7 +228,7 @@ public String toString() { private NotificationProcessor makeNotificationProcessor() { if (updateThreads > 1) { final ThreadFactory threadFactory = new LiveTableMonitorThreadFactory( - new ThreadGroup("LiveTableMonitor-updateExecutors"), "updateExecutor"); + new ThreadGroup("LiveTableMonitor-updateExecutors"), "updateExecutor"); return new ConcurrentNotificationProcessor(threadFactory, updateThreads); } else { return new QueueNotificationProcessor(); @@ -247,11 +236,10 @@ private NotificationProcessor makeNotificationProcessor() { } @TestUseOnly - private NotificationProcessor makeRandomizedNotificationProcessor(final Random random, - final int nThreads, final int notificationStartDelay) { + private NotificationProcessor makeRandomizedNotificationProcessor(final Random random, final int nThreads, + final int notificationStartDelay) { final LiveTableMonitorThreadFactory threadFactory = new LiveTableMonitorThreadFactory( - new ThreadGroup("LiveTableMonitor-randomizedUpdatedExecutors"), - "randomizedUpdateExecutor"); + new ThreadGroup("LiveTableMonitor-randomizedUpdatedExecutors"), "randomizedUpdateExecutor"); return new ConcurrentNotificationProcessor(threadFactory, nThreads) { private Notification addRandomDelay(@NotNull final Notification notification) { @@ -262,8 +250,8 @@ private Notification addRandomDelay(@NotNull final Notification notification) { @Override public void run() { final int millis = random.nextInt(notificationStartDelay); - logDependencies().append(Thread.currentThread().getName()) - .append(": Sleeping for ").append(millis).append("ms").endl(); + logDependencies().append(Thread.currentThread().getName()).append(": Sleeping for ") + .append(millis).append("ms").endl(); SleepUtil.sleep(millis); super.run(); } @@ -275,13 +263,11 @@ public void submit(@NotNull Notification notification) { if (notification instanceof LiveTableRefreshNotification) { super.submit(notification); } else if (notification instanceof InstrumentedListener.ErrorNotification) { - // NB: The previous implementation of this concept was more rigorous about - // ensuring that errors + // NB: The previous implementation of this concept was more rigorous about ensuring that errors // would be next, but this is likely good enough. submitAt(notification, 0); } else { - submitAt(addRandomDelay(notification), - random.nextInt(outstandingNotificationsCount() + 1)); + submitAt(addRandomDelay(notification), random.nextInt(outstandingNotificationsCount() + 1)); } } @@ -296,9 +282,8 @@ public void submitAll(@NotNull IntrusiveDoublyLinkedQueue notifica * Retrieve the number of update threads. * *

    - * The LiveTableMonitor has a configurable number of update processing threads. The number of - * threads is exposed in your method to enable you to partition a query based on the number of - * threads. + * The LiveTableMonitor has a configurable number of update processing threads. The number of threads is exposed in + * your method to enable you to partition a query based on the number of threads. *

    * * @return the number of update threads configured. @@ -314,13 +299,13 @@ public int getUpdateThreads() { *

    * Get the shared lock for this {@link LiveTableMonitor}. *

    - * Using this lock will prevent refresh processing from proceeding concurrently, but will allow - * other read-only processing to proceed. + * Using this lock will prevent refresh processing from proceeding concurrently, but will allow other read-only + * processing to proceed. *

    * The shared lock implementation is expected to support reentrance. *

    - * This lock does not support {@link java.util.concurrent.locks.Lock#newCondition()}. - * Use the exclusive lock if you need to wait on events that are driven by refresh processing. + * This lock does not support {@link java.util.concurrent.locks.Lock#newCondition()}. Use the exclusive + * lock if you need to wait on events that are driven by refresh processing. * * @return The shared lock for this {@link LiveTableMonitor} */ @@ -336,8 +321,8 @@ public AwareFunctionalLock sharedLock() { *

    * The exclusive lock implementation is expected to support reentrance. *

    - * Note that using the exclusive lock while the shared lock is held by the current thread will - * result in exceptions, as lock upgrade is not supported. + * Note that using the exclusive lock while the shared lock is held by the current thread will result in exceptions, + * as lock upgrade is not supported. *

    * This lock does support {@link java.util.concurrent.locks.Lock#newCondition()}. * @@ -360,40 +345,37 @@ public boolean isRefreshThread() { /** *

    - * If we are establishing a new table operation, on a refreshing table without the - * LiveTableMonitor lock; then we are likely committing a grievous error, but one that will only - * occasionally result in us getting the wrong answer or if we are lucky an assertion. This - * method is called from various query operations that should not be established without the LTM - * lock. + * If we are establishing a new table operation, on a refreshing table without the LiveTableMonitor lock; then we + * are likely committing a grievous error, but one that will only occasionally result in us getting the wrong answer + * or if we are lucky an assertion. This method is called from various query operations that should not be + * established without the LTM lock. *

    * *

    - * The refresh thread pool threads are allowed to instantiate operations, even though that - * thread does not have the lock; because they are protected by the main refresh thread and - * dependency tracking. + * The refresh thread pool threads are allowed to instantiate operations, even though that thread does not have the + * lock; because they are protected by the main refresh thread and dependency tracking. *

    * *

    * If you are sure that you know what you are doing better than the query engine, you may call - * {@link #setCheckTableOperations(boolean)} to set a thread local variable bypassing this - * check. + * {@link #setCheckTableOperations(boolean)} to set a thread local variable bypassing this check. *

    */ public void checkInitiateTableOperation() { if (!getCheckTableOperations() || exclusiveLock().isHeldByCurrentThread() - || sharedLock().isHeldByCurrentThread() || isRefreshThread()) { + || sharedLock().isHeldByCurrentThread() || isRefreshThread()) { return; } - throw new IllegalStateException("May not initiate table operations: LTM exclusiveLockHeld=" - + exclusiveLock().isHeldByCurrentThread() - + ", sharedLockHeld=" + sharedLock().isHeldByCurrentThread() - + ", refreshThread=" + isRefreshThread()); + throw new IllegalStateException( + "May not initiate table operations: LTM exclusiveLockHeld=" + exclusiveLock().isHeldByCurrentThread() + + ", sharedLockHeld=" + sharedLock().isHeldByCurrentThread() + + ", refreshThread=" + isRefreshThread()); } /** - * If you know that the table operations you are performing are indeed safe, then call this - * method with false to disable table operation checking. Conversely, if you want to enforce - * checking even if the configuration disagrees; call it with true. + * If you know that the table operations you are performing are indeed safe, then call this method with false to + * disable table operation checking. Conversely, if you want to enforce checking even if the configuration + * disagrees; call it with true. * * @param value the new value of check table operations * @return the old value of check table operations @@ -476,11 +458,9 @@ public long getTargetCycleTime() { } /** - * Resets the refresh cycle time to the default target configured via the - * LiveTableMonitor.targetcycletime property. + * Resets the refresh cycle time to the default target configured via the LiveTableMonitor.targetcycletime property. * - * @implNote If the LiveTableMonitor.targetcycletime property is not set, this value defaults to - * 1000ms. + * @implNote If the LiveTableMonitor.targetcycletime property is not set, this value defaults to 1000ms. */ @SuppressWarnings("unused") public void resetCycleTime() { @@ -494,8 +474,7 @@ public void resetCycleTime() { * *

    * In this mode calls to {@link #addTable(LiveTable)} will only mark tables as - * {@link DynamicNode#setRefreshing(boolean) refreshing}. Additionally {@link #start()} may not - * be called. + * {@link DynamicNode#setRefreshing(boolean) refreshing}. Additionally {@link #start()} may not be called. *

    */ public void enableUnitTestMode() { @@ -516,14 +495,13 @@ public void enableUnitTestMode() { private void assertLockAvailable(@NotNull final String action) { if (!LiveTableMonitor.DEFAULT.exclusiveLock().tryLock()) { - log.error().append("Lock is held when ").append(action) - .append(", with previous holder: ").append(unitTestModeHolder).endl(); + log.error().append("Lock is held when ").append(action).append(", with previous holder: ") + .append(unitTestModeHolder).endl(); ThreadDump.threadDump(System.err); LiveTableMonitorLock.DebugAwareFunctionalLock lock = - (LiveTableMonitorLock.DebugAwareFunctionalLock) LiveTableMonitor.DEFAULT - .exclusiveLock(); - throw new IllegalStateException("Lock is held when " + action - + ", with previous holder: " + lock.getDebugMessage()); + (LiveTableMonitorLock.DebugAwareFunctionalLock) LiveTableMonitor.DEFAULT.exclusiveLock(); + throw new IllegalStateException( + "Lock is held when " + action + ", with previous holder: " + lock.getDebugMessage()); } LiveTableMonitor.DEFAULT.exclusiveLock().unlock(); } @@ -565,9 +543,8 @@ public void requestSignal(Condition liveTableMonitorCondition) { final Notification terminalNotification = new TerminalNotification() { @Override public void run() { - Assert.assertion( - LiveTableMonitor.DEFAULT.exclusiveLock().isHeldByCurrentThread(), - "LiveTableMonitor.DEFAULT.isHeldByCurrentThread()"); + Assert.assertion(LiveTableMonitor.DEFAULT.exclusiveLock().isHeldByCurrentThread(), + "LiveTableMonitor.DEFAULT.isHeldByCurrentThread()"); liveTableMonitorCondition.signalAll(); } @@ -579,7 +556,7 @@ public boolean mustExecuteWithLtmLock() { @Override public LogOutput append(LogOutput output) { return output.append("SignalNotification(") - .append(System.identityHashCode(liveTableMonitorCondition)).append(")"); + .append(System.identityHashCode(liveTableMonitorCondition)).append(")"); } }; synchronized (terminalNotifications) { @@ -608,18 +585,18 @@ public void start() { synchronized (refreshThread) { if (!refreshThread.isAlive()) { log.info().append("LiveTableMonitor starting with ").append(updateThreads) - .append(" notification processing threads").endl(); + .append(" notification processing threads").endl(); refreshThread.start(); } } } /** - * Add a table to the list of tables to refresh and mark it as - * {@link DynamicNode#setRefreshing(boolean) refreshing} if it was a {@link DynamicNode}. + * Add a table to the list of tables to refresh and mark it as {@link DynamicNode#setRefreshing(boolean) refreshing} + * if it was a {@link DynamicNode}. * - * @implNote This will do nothing in {@link #enableUnitTestMode() unit test} mode other than - * mark the table as refreshing. + * @implNote This will do nothing in {@link #enableUnitTestMode() unit test} mode other than mark the table as + * refreshing. * @param table The table to be added to the refresh list */ @Override @@ -643,8 +620,7 @@ public void removeTable(@NotNull final LiveTable liveTable) { /** * Remove a collection of tables from the list of refreshing tables. * - * @implNote This will not set the tables as {@link DynamicNode#setRefreshing(boolean) - * non-refreshing}. + * @implNote This will not set the tables as {@link DynamicNode#setRefreshing(boolean) non-refreshing}. * @param tablesToRemove The tables to remove from the list of refreshing tables */ public void removeTables(final Collection tablesToRemove) { @@ -652,9 +628,9 @@ public void removeTables(final Collection tablesToRemove) { } /** - * Enqueue a notification to be flushed according to its priority. Non-terminal notifications - * should only be enqueued during the updating phase of a cycle. That is, they should be - * enqueued from a {@link LiveTable#refresh()} or subsequent notification delivery. + * Enqueue a notification to be flushed according to its priority. Non-terminal notifications should only be + * enqueued during the updating phase of a cycle. That is, they should be enqueued from a + * {@link LiveTable#refresh()} or subsequent notification delivery. * * @param notification The notification to enqueue * @see NotificationQueue.Notification#isTerminal() @@ -670,12 +646,11 @@ public void addNotification(@NotNull final Notification notification) { terminalNotifications.offer(notification); } } else { - logDependencies().append(Thread.currentThread().getName()) - .append(": Adding notification ").append(notification).endl(); + logDependencies().append(Thread.currentThread().getName()).append(": Adding notification ") + .append(notification).endl(); synchronized (pendingNormalNotifications) { - Assert.eq(LogicalClock.DEFAULT.currentState(), - "LogicalClock.DEFAULT.currentState()", LogicalClock.State.Updating, - "LogicalClock.State.Updating"); + Assert.eq(LogicalClock.DEFAULT.currentState(), "LogicalClock.DEFAULT.currentState()", + LogicalClock.State.Updating, "LogicalClock.State.Updating"); pendingNormalNotifications.offer(notification); } notificationProcessor.onNotificationAdded(); @@ -683,24 +658,22 @@ public void addNotification(@NotNull final Notification notification) { } @Override - public boolean maybeAddNotification(@NotNull final Notification notification, - final long deliveryStep) { + public boolean maybeAddNotification(@NotNull final Notification notification, final long deliveryStep) { if (notificationAdditionDelay > 0) { SleepUtil.sleep(notificationRandomizer.nextInt(notificationAdditionDelay)); } if (notification.isTerminal()) { throw new IllegalArgumentException("Notification must not be terminal"); } - logDependencies().append(Thread.currentThread().getName()).append(": Adding notification ") - .append(notification).append(" if step is ").append(deliveryStep).endl(); + logDependencies().append(Thread.currentThread().getName()).append(": Adding notification ").append(notification) + .append(" if step is ").append(deliveryStep).endl(); final boolean added; synchronized (pendingNormalNotifications) { - // Note that the clock is advanced to idle under the pendingNormalNotifications lock, - // after which point no + // Note that the clock is advanced to idle under the pendingNormalNotifications lock, after which point no // further normal notifications will be processed on this cycle. final long logicalClockValue = LogicalClock.DEFAULT.currentValue(); if (LogicalClock.getState(logicalClockValue) == LogicalClock.State.Updating - && LogicalClock.getStep(logicalClockValue) == deliveryStep) { + && LogicalClock.getStep(logicalClockValue) == deliveryStep) { pendingNormalNotifications.offer(notification); added = true; } else { @@ -740,7 +713,7 @@ public void addNotifications(@NotNull final Collection notificatio */ private void maybeRefreshTable(@NotNull final LiveTable liveTable) { final LiveTableRefreshNotification liveTableRefreshNotification = - tables.getFirstReference((final LiveTable found) -> found == liveTable); + tables.getFirstReference((final LiveTable found) -> found == liveTable); if (liveTableRefreshNotification == null) { return; } @@ -748,15 +721,14 @@ private void maybeRefreshTable(@NotNull final LiveTable liveTable) { } /** - * Acquire the exclusive lock if necessary and do a refresh of {@code liveTable} on this thread - * if it is registered with this LTM. + * Acquire the exclusive lock if necessary and do a refresh of {@code liveTable} on this thread if it is registered + * with this LTM. * * @param liveTable The {@link LiveTable} that we would like to refresh * @param onlyIfHaveLock If true, check that the lock is held first and do nothing if it is not */ @Override - public void maybeRefreshTable(@NotNull final LiveTable liveTable, - final boolean onlyIfHaveLock) { + public void maybeRefreshTable(@NotNull final LiveTable liveTable, final boolean onlyIfHaveLock) { if (!onlyIfHaveLock || exclusiveLock().isHeldByCurrentThread()) { maybeRefreshTable(liveTable); } @@ -764,12 +736,11 @@ public void maybeRefreshTable(@NotNull final LiveTable liveTable, /** *

    - * Request a refresh for a single {@link LiveTable live table}, which must already be registered - * with this LiveTableMonitor. + * Request a refresh for a single {@link LiveTable live table}, which must already be registered with this + * LiveTableMonitor. *

    *

    - * The update will occur on the LTM thread, but will not necessarily wait for the next scheduled - * cycle. + * The update will occur on the LTM thread, but will not necessarily wait for the next scheduled cycle. *

    * * @param liveTable The {@link LiveTable live table} to refresh @@ -777,7 +748,7 @@ public void maybeRefreshTable(@NotNull final LiveTable liveTable, @Override public void requestRefresh(@NotNull final LiveTable liveTable) { final LiveTableRefreshNotification liveTableRefreshNotification = - tables.getFirstReference((final LiveTable found) -> found == liveTable); + tables.getFirstReference((final LiveTable found) -> found == liveTable); if (liveTableRefreshNotification == null) { return; } @@ -788,11 +759,10 @@ public void requestRefresh(@NotNull final LiveTable liveTable) { } /** - * Clear all monitored tables and enqueued notifications to support {@link #enableUnitTestMode() - * unit-tests}. + * Clear all monitored tables and enqueued notifications to support {@link #enableUnitTestMode() unit-tests}. * - * @param after Whether this is *after* a unit test completed. If true, held locks should result - * in an exception and the LivenessScopeStack will be cleared. + * @param after Whether this is *after* a unit test completed. If true, held locks should result in an exception and + * the LivenessScopeStack will be cleared. */ @TestUseOnly public void resetForUnitTests(final boolean after) { @@ -800,22 +770,19 @@ public void resetForUnitTests(final boolean after) { } /** - * Clear all monitored tables and enqueued notifications to support {@link #enableUnitTestMode() - * unit-tests}. + * Clear all monitored tables and enqueued notifications to support {@link #enableUnitTestMode() unit-tests}. * - * @param after Whether this is *after* a unit test completed. If true, held locks should result - * in an exception and the LivenessScopeStack will be cleared. - * @param randomizedNotifications Whether the notification processor should randomize the order - * of delivery + * @param after Whether this is *after* a unit test completed. If true, held locks should result in an exception and + * the LivenessScopeStack will be cleared. + * @param randomizedNotifications Whether the notification processor should randomize the order of delivery * @param seed Seed for randomized notification delivery order and delays - * @param maxRandomizedThreadCount Maximum number of threads handling randomized notification - * delivery + * @param maxRandomizedThreadCount Maximum number of threads handling randomized notification delivery * @param notificationStartDelay Maximum randomized notification start delay * @param notificationAdditionDelay Maximum randomized notification addition delay */ public void resetForUnitTests(boolean after, - final boolean randomizedNotifications, final int seed, final int maxRandomizedThreadCount, - final int notificationStartDelay, final int notificationAdditionDelay) { + final boolean randomizedNotifications, final int seed, final int maxRandomizedThreadCount, + final int notificationStartDelay, final int notificationAdditionDelay) { final List errors = new ArrayList<>(); this.notificationRandomizer = new Random(seed); this.notificationAdditionDelay = notificationAdditionDelay; @@ -828,7 +795,7 @@ public void resetForUnitTests(boolean after, isRefreshThread.remove(); if (randomizedNotifications) { notificationProcessor = makeRandomizedNotificationProcessor(notificationRandomizer, - maxRandomizedThreadCount, notificationStartDelay); + maxRandomizedThreadCount, notificationStartDelay); } else { notificationProcessor = makeNotificationProcessor(); } @@ -854,11 +821,9 @@ public void resetForUnitTests(boolean after, } try { - unitTestRefreshThreadPool - .submit(() -> ensureUnlocked("unit test refresh pool thread", errors)).get(); + unitTestRefreshThreadPool.submit(() -> ensureUnlocked("unit test refresh pool thread", errors)).get(); } catch (InterruptedException | ExecutionException e) { - errors.add("Failed to ensure LTM unlocked from unit test refresh thread pool: " - + e.toString()); + errors.add("Failed to ensure LTM unlocked from unit test refresh thread pool: " + e.toString()); } unitTestRefreshThreadPool.shutdownNow(); try { @@ -871,8 +836,7 @@ public void resetForUnitTests(boolean after, unitTestRefreshThreadPool = makeUnitTestRefreshExecutor(); if (!errors.isEmpty()) { - final String message = - "LTM reset for unit tests reported errors:\n\t" + String.join("\n\t", errors); + final String message = "LTM reset for unit tests reported errors:\n\t" + String.join("\n\t", errors); System.err.println(message); if (after) { throw new IllegalStateException(message); @@ -883,9 +847,8 @@ public void resetForUnitTests(boolean after, } /** - * Begin the next {@link LogicalClock#startUpdateCycle() update cycle} while in - * {@link #enableUnitTestMode() unit-test} mode. Note that this happens on a simulated LTM - * refresh thread, rather than this thread. + * Begin the next {@link LogicalClock#startUpdateCycle() update cycle} while in {@link #enableUnitTestMode() + * unit-test} mode. Note that this happens on a simulated LTM refresh thread, rather than this thread. */ @TestUseOnly public void startCycleForUnitTests() { @@ -913,8 +876,8 @@ private void startCycleForUnitTestsInternal() { /** * Do the second half of the update cycle, including flushing notifications, and completing the - * {@link LogicalClock#completeUpdateCycle() LogicalClock} update cycle. Note that this happens - * on a simulated LTM refresh thread, rather than this thread. + * {@link LogicalClock#completeUpdateCycle() LogicalClock} update cycle. Note that this happens on a simulated LTM + * refresh thread, rather than this thread. */ @TestUseOnly public void completeCycleForUnitTests() { @@ -949,8 +912,8 @@ private void completeCycleForUnitTestsInternal() { * @param runnable the runnable to execute. */ @TestUseOnly - public void runWithinUnitTestCycle( - FunctionalInterfaces.ThrowingRunnable runnable) throws T { + public void runWithinUnitTestCycle(FunctionalInterfaces.ThrowingRunnable runnable) + throws T { startCycleForUnitTests(); try { runnable.run(); @@ -975,8 +938,8 @@ public void refreshLiveTableForUnitTests(@NotNull final LiveTable liveTable) { } /** - * Flush a single notification from the LTM queue. Note that this happens on a simulated LTM - * refresh thread, rather than this thread. + * Flush a single notification from the LTM queue. Note that this happens on a simulated LTM refresh thread, rather + * than this thread. * * @return whether a notification was found in the queue */ @@ -988,8 +951,7 @@ public boolean flushOneNotificationForUnitTests() { try { this.notificationProcessor = new ControlledNotificationProcessor(); // noinspection AutoUnboxing,AutoBoxing - return unitTestRefreshThreadPool.submit(this::flushOneNotificationForUnitTestsInternal) - .get(); + return unitTestRefreshThreadPool.submit(this::flushOneNotificationForUnitTestsInternal).get(); } catch (InterruptedException | ExecutionException e) { throw new UncheckedDeephavenException(e); } finally { @@ -1000,8 +962,7 @@ public boolean flushOneNotificationForUnitTests() { @TestUseOnly public boolean flushOneNotificationForUnitTestsInternal() { final IntrusiveDoublyLinkedQueue pendingToEvaluate = - new IntrusiveDoublyLinkedQueue<>( - IntrusiveDoublyLinkedNode.Adapter.getInstance()); + new IntrusiveDoublyLinkedQueue<>(IntrusiveDoublyLinkedNode.Adapter.getInstance()); notificationProcessor.beforeNotificationsDrained(); synchronized (pendingNormalNotifications) { pendingToEvaluate.transferAfterTailFrom(pendingNormalNotifications); @@ -1012,8 +973,7 @@ public boolean flushOneNotificationForUnitTestsInternal() { final Notification notification = it.next(); Assert.eqFalse(notification.isTerminal(), "notification.isTerminal()"); - Assert.eqFalse(notification.mustExecuteWithLtmLock(), - "notification.mustExecuteWithLtmLock()"); + Assert.eqFalse(notification.mustExecuteWithLtmLock(), "notification.mustExecuteWithLtmLock()"); if (notification.canExecute(LogicalClock.DEFAULT.currentStep())) { satisfied = notification; @@ -1028,14 +988,14 @@ public boolean flushOneNotificationForUnitTestsInternal() { notificationProcessor.submit(satisfied); } else if (somethingWasPending) { Assert.statementNeverExecuted( - "Did not flush any notifications in unit test mode, yet there were outstanding notifications"); + "Did not flush any notifications in unit test mode, yet there were outstanding notifications"); } return satisfied != null; } /** - * Flush all the normal notifications from the LTM queue. Note that the flushing happens on a - * simulated LTM refresh thread, rather than this thread. + * Flush all the normal notifications from the LTM queue. Note that the flushing happens on a simulated LTM refresh + * thread, rather than this thread. */ @TestUseOnly public void flushAllNormalNotificationsForUnitTests() { @@ -1043,34 +1003,30 @@ public void flushAllNormalNotificationsForUnitTests() { } /** - * Flush all the normal notifications from the LTM queue, continuing until {@code done} returns - * {@code true}. Note that the flushing happens on a simulated LTM refresh thread, rather than - * this thread. + * Flush all the normal notifications from the LTM queue, continuing until {@code done} returns {@code true}. Note + * that the flushing happens on a simulated LTM refresh thread, rather than this thread. * * @param done Function to determine when we can stop waiting for new notifications * @return A Runnable that may be used to wait for the concurrent flush job to complete */ @TestUseOnly public Runnable flushAllNormalNotificationsForUnitTests(@NotNull final BooleanSupplier done, - final long timeoutMillis) { + final long timeoutMillis) { Assert.assertion(unitTestMode, "unitTestMode"); Assert.geqZero(timeoutMillis, "timeoutMillis"); final NotificationProcessor existingNotificationProcessor = notificationProcessor; - final ControlledNotificationProcessor controlledNotificationProcessor = - new ControlledNotificationProcessor(); + final ControlledNotificationProcessor controlledNotificationProcessor = new ControlledNotificationProcessor(); notificationProcessor = controlledNotificationProcessor; final Future flushJobFuture = unitTestRefreshThreadPool.submit(() -> { - final long deadlineNanoTime = - System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(timeoutMillis); + final long deadlineNanoTime = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(timeoutMillis); boolean flushed; while ((flushed = flushOneNotificationForUnitTestsInternal()) || !done.getAsBoolean()) { if (!flushed) { final long remainingNanos = deadlineNanoTime - System.nanoTime(); - if (!controlledNotificationProcessor - .blockUntilNotificationAdded(remainingNanos)) { + if (!controlledNotificationProcessor.blockUntilNotificationAdded(remainingNanos)) { Assert.statementNeverExecuted( - "Unit test failure due to timeout after " + timeoutMillis + " ms"); + "Unit test failure due to timeout after " + timeoutMillis + " ms"); } } } @@ -1097,23 +1053,17 @@ public void wakeRefreshThreadForUnitTests() { } /** - * Flush all non-terminal notifications, complete the logical clock update cycle, then flush all - * terminal notifications. + * Flush all non-terminal notifications, complete the logical clock update cycle, then flush all terminal + * notifications. */ private void flushNotificationsAndCompleteCycle() { - // We cannot proceed with normal notifications, nor are we satisfied, until all LiveTable - // refresh notifications - // have been processed. Note that non-LiveTable notifications that require dependency - // satisfaction are delivered - // first to the pendingNormalNotifications queue, and hence will not be processed until we - // advance to the flush* + // We cannot proceed with normal notifications, nor are we satisfied, until all LiveTable refresh notifications + // have been processed. Note that non-LiveTable notifications that require dependency satisfaction are delivered + // first to the pendingNormalNotifications queue, and hence will not be processed until we advance to the flush* // methods. - // TODO: If and when we properly integrate LiveTables into the dependency tracking system, - // we can - // discontinue this distinct phase, along with the requirement to treat the LTM itself as a - // Dependency. - // Until then, we must delay the beginning of "normal" notification processing until all - // LiveTables are + // TODO: If and when we properly integrate LiveTables into the dependency tracking system, we can + // discontinue this distinct phase, along with the requirement to treat the LTM itself as a Dependency. + // Until then, we must delay the beginning of "normal" notification processing until all LiveTables are // done. See IDS-8039. notificationProcessor.doAllWork(); tablesLastSatisfiedStep = LogicalClock.DEFAULT.currentStep(); @@ -1121,8 +1071,7 @@ private void flushNotificationsAndCompleteCycle() { flushNormalNotificationsAndCompleteCycle(); flushTerminalNotifications(); synchronized (pendingNormalNotifications) { - Assert.assertion(pendingNormalNotifications.isEmpty(), - "pendingNormalNotifications.isEmpty()"); + Assert.assertion(pendingNormalNotifications.isEmpty(), "pendingNormalNotifications.isEmpty()"); } } @@ -1131,53 +1080,47 @@ private void flushNotificationsAndCompleteCycle() { */ private void flushNormalNotificationsAndCompleteCycle() { final IntrusiveDoublyLinkedQueue pendingToEvaluate = - new IntrusiveDoublyLinkedQueue<>( - IntrusiveDoublyLinkedNode.Adapter.getInstance()); + new IntrusiveDoublyLinkedQueue<>(IntrusiveDoublyLinkedNode.Adapter.getInstance()); while (true) { - final int outstandingCountAtStart = - notificationProcessor.outstandingNotificationsCount(); + final int outstandingCountAtStart = notificationProcessor.outstandingNotificationsCount(); notificationProcessor.beforeNotificationsDrained(); synchronized (pendingNormalNotifications) { pendingToEvaluate.transferAfterTailFrom(pendingNormalNotifications); if (outstandingCountAtStart == 0 && pendingToEvaluate.isEmpty()) { - // We complete the cycle here before releasing the lock on pendingNotifications, - // so that - // maybeAddNotification can detect scenarios where the notification cannot be - // delivered on the + // We complete the cycle here before releasing the lock on pendingNotifications, so that + // maybeAddNotification can detect scenarios where the notification cannot be delivered on the // desired step. LogicalClock.DEFAULT.completeUpdateCycle(); break; } } logDependencies().append(Thread.currentThread().getName()) - .append(": Notification queue size=").append(pendingToEvaluate.size()) - .append(", outstanding=").append(outstandingCountAtStart) - .endl(); + .append(": Notification queue size=").append(pendingToEvaluate.size()) + .append(", outstanding=").append(outstandingCountAtStart) + .endl(); boolean nothingBecameSatisfied = true; for (final Iterator it = pendingToEvaluate.iterator(); it.hasNext();) { final Notification notification = it.next(); Assert.eqFalse(notification.isTerminal(), "notification.isTerminal()"); - Assert.eqFalse(notification.mustExecuteWithLtmLock(), - "notification.mustExecuteWithLtmLock()"); + Assert.eqFalse(notification.mustExecuteWithLtmLock(), "notification.mustExecuteWithLtmLock()"); final boolean satisfied = notification.canExecute(tablesLastSatisfiedStep); if (satisfied) { nothingBecameSatisfied = false; it.remove(); logDependencies().append(Thread.currentThread().getName()) - .append(": Submitting to notification processor ").append(notification) - .endl(); + .append(": Submitting to notification processor ").append(notification).endl(); notificationProcessor.submit(notification); } else { - logDependencies().append(Thread.currentThread().getName()) - .append(": Unmet dependencies for ").append(notification).endl(); + logDependencies().append(Thread.currentThread().getName()).append(": Unmet dependencies for ") + .append(notification).endl(); } } if (outstandingCountAtStart == 0 && nothingBecameSatisfied) { throw new IllegalStateException( - "No outstanding notifications, yet the notification queue is not empty!"); + "No outstanding notifications, yet the notification queue is not empty!"); } if (notificationProcessor.outstandingNotificationsCount() > 0) { notificationProcessor.doWork(); @@ -1185,21 +1128,19 @@ private void flushNormalNotificationsAndCompleteCycle() { } synchronized (pendingNormalNotifications) { Assert.eqZero(pendingNormalNotifications.size() + pendingToEvaluate.size(), - "pendingNormalNotifications.size() + pendingToEvaluate.size()"); + "pendingNormalNotifications.size() + pendingToEvaluate.size()"); } } /** - * Flush all {@link Notification#isTerminal() terminal} {@link Notification notifications} from - * the queue. + * Flush all {@link Notification#isTerminal() terminal} {@link Notification notifications} from the queue. * - * @implNote Any notification that may have been queued while the clock's state is Updating must - * be invoked during this cycle's Idle phase. + * @implNote Any notification that may have been queued while the clock's state is Updating must be invoked during + * this cycle's Idle phase. */ private void flushTerminalNotifications() { synchronized (terminalNotifications) { - for (final Iterator it = terminalNotifications.iterator(); it - .hasNext();) { + for (final Iterator it = terminalNotifications.iterator(); it.hasNext();) { final Notification notification = it.next(); Assert.assertion(notification.isTerminal(), "notification.isTerminal()"); @@ -1244,11 +1185,10 @@ private interface NotificationProcessor { * Submit a queue of satisfied notification for processing. * * @param notifications The queue of notifications to - * {@link IntrusiveDoublyLinkedQueue#transferAfterTailFrom(IntrusiveDoublyLinkedQueue) - * transfer} from. Will become empty as a result of successful completion + * {@link IntrusiveDoublyLinkedQueue#transferAfterTailFrom(IntrusiveDoublyLinkedQueue) transfer} from. + * Will become empty as a result of successful completion */ - void submitAll( - @NotNull IntrusiveDoublyLinkedQueue notifications); + void submitAll(@NotNull IntrusiveDoublyLinkedQueue notifications); /** * Query the number of outstanding notifications submitted to this processor. @@ -1287,14 +1227,13 @@ void submitAll( } private void runNotification(@NotNull final Notification notification) { - logDependencies().append(Thread.currentThread().getName()).append(": Executing ") - .append(notification).endl(); + logDependencies().append(Thread.currentThread().getName()).append(": Executing ").append(notification).endl(); final LivenessScope scope; final boolean releaseScopeOnClose; if (notification.isTerminal()) { - // Terminal notifications can't create new notifications, so they have no need to - // participate in a shared refresh scope. + // Terminal notifications can't create new notifications, so they have no need to participate in a shared + // refresh scope. scope = new LivenessScope(); releaseScopeOnClose = true; } else { @@ -1304,25 +1243,23 @@ private void runNotification(@NotNull final Notification notification) { releaseScopeOnClose = false; } - try (final SafeCloseable ignored = - scope == null ? null : LivenessScopeStack.open(scope, releaseScopeOnClose)) { + try (final SafeCloseable ignored = scope == null ? null : LivenessScopeStack.open(scope, releaseScopeOnClose)) { notification.run(); - logDependencies().append(Thread.currentThread().getName()).append(": Completed ") - .append(notification).endl(); + logDependencies().append(Thread.currentThread().getName()).append(": Completed ").append(notification) + .endl(); } catch (final Exception e) { log.error().append(Thread.currentThread().getName()) - .append(": Exception while executing LiveTableMonitor notification: ") - .append(notification).append(": ").append(e).endl(); + .append(": Exception while executing LiveTableMonitor notification: ").append(notification) + .append(": ").append(e).endl(); ProcessEnvironment.getGlobalFatalErrorReporter() - .report("Exception while processing LiveTableMonitor notification", e); + .report("Exception while processing LiveTableMonitor notification", e); } } private class ConcurrentNotificationProcessor implements NotificationProcessor { private final IntrusiveDoublyLinkedQueue satisfiedNotifications = - new IntrusiveDoublyLinkedQueue<>( - IntrusiveDoublyLinkedNode.Adapter.getInstance()); + new IntrusiveDoublyLinkedQueue<>(IntrusiveDoublyLinkedNode.Adapter.getInstance()); private final Thread[] updateThreads; private final AtomicInteger outstandingNotifications = new AtomicInteger(0); @@ -1331,7 +1268,7 @@ private class ConcurrentNotificationProcessor implements NotificationProcessor { private volatile boolean running = true; public ConcurrentNotificationProcessor(@NotNull final ThreadFactory threadFactory, - final int updateThreadCount) { + final int updateThreadCount) { updateThreads = new Thread[updateThreadCount]; for (int ti = 0; ti < updateThreadCount; ++ti) { updateThreads[ti] = threadFactory.newThread(this::processSatisfiedNotifications); @@ -1341,12 +1278,11 @@ public ConcurrentNotificationProcessor(@NotNull final ThreadFactory threadFactor private void processSatisfiedNotifications() { log.info().append(Thread.currentThread().getName()) - .append(": starting to poll for satisfied notifications"); + .append(": starting to poll for satisfied notifications"); while (running) { Notification satisfiedNotification = null; synchronized (satisfiedNotifications) { - while (running - && (satisfiedNotification = satisfiedNotifications.poll()) == null) { + while (running && (satisfiedNotification = satisfiedNotifications.poll()) == null) { try { satisfiedNotifications.wait(); } catch (InterruptedException ignored) { @@ -1388,11 +1324,9 @@ public void submitAll(@NotNull IntrusiveDoublyLinkedQueue notifica protected void submitAt(@NotNull final Notification notification, final int offset) { outstandingNotifications.incrementAndGet(); synchronized (satisfiedNotifications) { - // We clamp the size here because there's a race between the random offset selection - // and other threads + // We clamp the size here because there's a race between the random offset selection and other threads // draining the queue of satisfied notifications. - satisfiedNotifications.insert(notification, - Math.min(offset, satisfiedNotifications.size())); + satisfiedNotifications.insert(notification, Math.min(offset, satisfiedNotifications.size())); satisfiedNotifications.notify(); } } @@ -1446,8 +1380,7 @@ public void beforeNotificationsDrained() { private class QueueNotificationProcessor implements NotificationProcessor { final IntrusiveDoublyLinkedQueue satisfiedNotifications = - new IntrusiveDoublyLinkedQueue<>( - IntrusiveDoublyLinkedNode.Adapter.getInstance()); + new IntrusiveDoublyLinkedQueue<>(IntrusiveDoublyLinkedNode.Adapter.getInstance()); @Override public void submit(@NotNull final Notification notification) { @@ -1500,8 +1433,7 @@ public void submit(@NotNull final Notification notification) { } @Override - public void submitAll( - @NotNull final IntrusiveDoublyLinkedQueue notifications) { + public void submitAll(@NotNull final IntrusiveDoublyLinkedQueue notifications) { Notification notification; while ((notification = notifications.poll()) != null) { runNotification(notification); @@ -1540,8 +1472,7 @@ public void beforeNotificationsDrained() { private boolean blockUntilNotificationAdded(final long nanosToWait) { try { - return pendingNormalNotificationsCheckNeeded.tryAcquire(nanosToWait, - TimeUnit.NANOSECONDS); + return pendingNormalNotificationsCheckNeeded.tryAcquire(nanosToWait, TimeUnit.NANOSECONDS); } catch (InterruptedException e) { Assert.statementNeverExecuted(); return false; @@ -1550,8 +1481,8 @@ private boolean blockUntilNotificationAdded(final long nanosToWait) { } /** - * Iterate over all monitored tables and refresh them. This method also ensures that the loop - * runs no faster than {@link #getTargetCycleTime() minimum cycle time}. + * Iterate over all monitored tables and refresh them. This method also ensures that the loop runs no faster than + * {@link #getTargetCycleTime() minimum cycle time}. */ private void refreshTablesAndFlushNotifications() { final Scheduler sched = CommBase.getScheduler(); @@ -1561,8 +1492,7 @@ private void refreshTablesAndFlushNotifications() { if (tables.isEmpty()) { exclusiveLock().doLocked(this::flushTerminalNotifications); } else { - currentCycleLockWaitTotalNanos = - currentCycleYieldTotalNanos = currentCycleSleepTotalNanos = 0L; + currentCycleLockWaitTotalNanos = currentCycleYieldTotalNanos = currentCycleSleepTotalNanos = 0L; WatchdogJob watchdogJob = null; @@ -1581,15 +1511,11 @@ private void refreshTablesAndFlushNotifications() { if (suppressedCycles > 0) { logSuppressedCycles(); } - log.info().append("Live Table Monitor cycleTime=") - .appendDouble(cycleTime / 1_000_000.0) - .append("ms, lockWaitTime=") - .appendDouble(currentCycleLockWaitTotalNanos / 1_000_000.0) - .append("ms, yieldTime=") - .appendDouble(currentCycleYieldTotalNanos / 1_000_000.0) - .append("ms, sleepTime=") - .appendDouble(currentCycleSleepTotalNanos / 1_000_000.0) - .append("ms").endl(); + log.info().append("Live Table Monitor cycleTime=").appendDouble(cycleTime / 1_000_000.0) + .append("ms, lockWaitTime=").appendDouble(currentCycleLockWaitTotalNanos / 1_000_000.0) + .append("ms, yieldTime=").appendDouble(currentCycleYieldTotalNanos / 1_000_000.0) + .append("ms, sleepTime=").appendDouble(currentCycleSleepTotalNanos / 1_000_000.0) + .append("ms").endl(); } else if (cycleTime > 0) { suppressedCycles++; suppressedCyclesTotalNanos += cycleTime; @@ -1608,11 +1534,10 @@ private void refreshTablesAndFlushNotifications() { private void logSuppressedCycles() { log.info().append("Minimal Live Table Monitor cycle times: ") - .appendDouble((double) (suppressedCyclesTotalNanos) / 1_000_000.0).append("ms / ") - .append(suppressedCycles).append(" cycles = ") - .appendDouble( - (double) suppressedCyclesTotalNanos / (double) suppressedCycles / 1_000_000.0) - .append("ms/cycle average").endl(); + .appendDouble((double) (suppressedCyclesTotalNanos) / 1_000_000.0).append("ms / ") + .append(suppressedCycles).append(" cycles = ") + .appendDouble((double) suppressedCyclesTotalNanos / (double) suppressedCycles / 1_000_000.0) + .append("ms/cycle average").endl(); suppressedCycles = suppressedCyclesTotalNanos = 0; } @@ -1622,14 +1547,13 @@ private void logSuppressedCycles() { *

    * *

    - * If the delay is interrupted by a {@link #requestRefresh(LiveTable) request} to refresh a - * single table this task will drain the queue of single refresh requests, then continue to wait - * for a complete period if necessary. + * If the delay is interrupted by a {@link #requestRefresh(LiveTable) request} to refresh a single table this task + * will drain the queue of single refresh requests, then continue to wait for a complete period if necessary. *

    * *

    - * If the delay is interrupted for any other {@link InterruptedException reason}, it will be - * logged and continue to wait the remaining period. + * If the delay is interrupted for any other {@link InterruptedException reason}, it will be logged and continue to + * wait the remaining period. *

    * * @param startTime The start time of the last refresh cycle @@ -1638,8 +1562,7 @@ private void logSuppressedCycles() { private void waitForNextCycle(final long startTime, final Scheduler timeSource) { long expectedEndTime = startTime + targetCycleTime; if (minimumInterCycleSleep > 0) { - expectedEndTime = - Math.max(expectedEndTime, timeSource.currentTimeMillis() + minimumInterCycleSleep); + expectedEndTime = Math.max(expectedEndTime, timeSource.currentTimeMillis() + minimumInterCycleSleep); } waitForEndTime(expectedEndTime, timeSource); } @@ -1650,14 +1573,13 @@ private void waitForNextCycle(final long startTime, final Scheduler timeSource) *

    * *

    - * If the delay is interrupted by a {@link #requestRefresh(LiveTable) request} to refresh a - * single table this task will drain the queue of single refresh requests, then continue to wait - * for a complete period if necessary. + * If the delay is interrupted by a {@link #requestRefresh(LiveTable) request} to refresh a single table this task + * will drain the queue of single refresh requests, then continue to wait for a complete period if necessary. *

    * *

    - * If the delay is interrupted for any other {@link InterruptedException reason}, it will be - * logged and continue to wait the remaining period. + * If the delay is interrupted for any other {@link InterruptedException reason}, it will be logged and continue to + * wait the remaining period. *

    * * @param expectedEndTime The time which we should sleep until @@ -1679,8 +1601,8 @@ private void waitForEndTime(final long expectedEndTime, final Scheduler timeSour } } } catch (InterruptedException logAndIgnore) { - log.warn().append("Interrupted while waiting on singleUpdateQueue. Ignoring: ") - .append(logAndIgnore).endl(); + log.warn().append("Interrupted while waiting on singleUpdateQueue. Ignoring: ").append(logAndIgnore) + .endl(); } } } @@ -1693,18 +1615,16 @@ private void waitForEndTime(final long expectedEndTime, final Scheduler timeSour * @implNote If the table is not monitored by the LTM it may not be refreshed */ private void drainSingleUpdateQueue() { - // NB: This is called while we're waiting for the next LTM cycle to start, thus blocking the - // next cycle even - // though it doesn't hold the LTM lock. The only race is with single updates, which are not - // submitted to + // NB: This is called while we're waiting for the next LTM cycle to start, thus blocking the next cycle even + // though it doesn't hold the LTM lock. The only race is with single updates, which are not submitted to // the notification processor (and hence have no conflict over next/prev pointers). final IntrusiveDoublyLinkedQueue liveTableNotifications; synchronized (singleUpdateQueue) { if (singleUpdateQueue.isEmpty()) { return; } - liveTableNotifications = new IntrusiveDoublyLinkedQueue<>( - IntrusiveDoublyLinkedNode.Adapter.getInstance()); + liveTableNotifications = + new IntrusiveDoublyLinkedQueue<>(IntrusiveDoublyLinkedNode.Adapter.getInstance()); singleUpdateQueue.forEach(liveTableNotifications::offer); singleUpdateQueue.clear(); } @@ -1712,52 +1632,45 @@ private void drainSingleUpdateQueue() { } /** - * Refresh a single {@link LiveTable live table} within an {@link LogicalClock update cycle} - * after the LTM has been locked. At the end of the update all {@link Notification - * notifications} will be flushed. + * Refresh a single {@link LiveTable live table} within an {@link LogicalClock update cycle} after the LTM has been + * locked. At the end of the update all {@link Notification notifications} will be flushed. * * @param liveTableNotification The enclosing notification for the {@link LiveTable} to refresh */ - private void refreshOneTable( - @NotNull final LiveTableRefreshNotification liveTableNotification) { + private void refreshOneTable(@NotNull final LiveTableRefreshNotification liveTableNotification) { // We're refreshing this table already, we should not prioritize its refresh again. synchronized (singleUpdateQueue) { - singleUpdateQueue.removeIf( - (final LiveTableRefreshNotification found) -> found == liveTableNotification); + singleUpdateQueue.removeIf((final LiveTableRefreshNotification found) -> found == liveTableNotification); } doRefresh(() -> runNotification(liveTableNotification)); } /** - * Refresh all the {@link LiveTable live tables} within an {@link LogicalClock update cycle} - * after the LTM has been locked. At the end of the updates all {@link Notification - * notifications} will be flushed. + * Refresh all the {@link LiveTable live tables} within an {@link LogicalClock update cycle} after the LTM has been + * locked. At the end of the updates all {@link Notification notifications} will be flushed. */ private void refreshAllTables() { - // We're refreshing all tables already, we should not prioritize refresh for any of them - // again. + // We're refreshing all tables already, we should not prioritize refresh for any of them again. synchronized (singleUpdateQueue) { singleUpdateQueue.clear(); } doRefresh(() -> tables.forEach((final LiveTableRefreshNotification liveTableNotification, - final LiveTable unused) -> notificationProcessor.submit(liveTableNotification))); + final LiveTable unused) -> notificationProcessor.submit(liveTableNotification))); } /** - * Perform a refresh cycle, using {@code refreshFunction} to ensure the desired {@link LiveTable - * live tables} are refreshed at the start. + * Perform a refresh cycle, using {@code refreshFunction} to ensure the desired {@link LiveTable live tables} are + * refreshed at the start. * - * @param refreshFunction Function to submit one or more {@link LiveTableRefreshNotification - * live table refresh notifications} to the {@link NotificationProcessor notification - * processor} or run them directly. + * @param refreshFunction Function to submit one or more {@link LiveTableRefreshNotification live table refresh + * notifications} to the {@link NotificationProcessor notification processor} or run them directly. */ private void doRefresh(@NotNull final Runnable refreshFunction) { final long lockStartTimeNanos = System.nanoTime(); exclusiveLock().doLocked(() -> { currentCycleLockWaitTotalNanos += System.nanoTime() - lockStartTimeNanos; synchronized (pendingNormalNotifications) { - Assert.eqZero(pendingNormalNotifications.size(), - "pendingNormalNotifications.size()"); + Assert.eqZero(pendingNormalNotifications.size(), "pendingNormalNotifications.size()"); } Assert.eqNull(refreshScope, "refreshScope"); refreshScope = new LivenessScope(); @@ -1776,7 +1689,7 @@ private void doRefresh(@NotNull final Runnable refreshFunction) { * Re-usable class for adapting {@link LiveTable}s to {@link Notification}s. */ private static final class LiveTableRefreshNotification extends AbstractNotification - implements SimpleReference { + implements SimpleReference { private final WeakReference liveTableRef; @@ -1789,9 +1702,8 @@ private LiveTableRefreshNotification(@NotNull final LiveTable liveTable) { @Override public LogOutput append(@NotNull final LogOutput logOutput) { - return logOutput.append("LiveTableRefreshNotification{") - .append(System.identityHashCode(this)) - .append(", for LiveTable{").append(System.identityHashCode(get())).append("}}"); + return logOutput.append("LiveTableRefreshNotification{").append(System.identityHashCode(this)) + .append(", for LiveTable{").append(System.identityHashCode(get())).append("}}"); } @Override @@ -1821,10 +1733,10 @@ public void clear() { } private static final class SingleUpdateSlotAdapter - implements IntrusiveArraySet.Adapter { + implements IntrusiveArraySet.Adapter { private static final IntrusiveArraySet.Adapter INSTANCE = - new SingleUpdateSlotAdapter(); + new SingleUpdateSlotAdapter(); private SingleUpdateSlotAdapter() {} @@ -1848,8 +1760,7 @@ public LogEntry logDependencies() { } private class LiveTableMonitorThreadFactory extends NamingThreadFactory { - private LiveTableMonitorThreadFactory(@NotNull final ThreadGroup threadGroup, - @NotNull final String name) { + private LiveTableMonitorThreadFactory(@NotNull final ThreadGroup threadGroup, @NotNull final String name) { super(threadGroup, LiveTableMonitor.class, name, true); } @@ -1863,8 +1774,7 @@ public Thread newThread(@NotNull final Runnable r) { } @TestUseOnly - private void ensureUnlocked(@NotNull final String callerDescription, - @Nullable final List errors) { + private void ensureUnlocked(@NotNull final String callerDescription, @Nullable final List errors) { if (exclusiveLock().isHeldByCurrentThread()) { if (errors != null) { errors.add(callerDescription + ": LTM exclusive lock is still held"); @@ -1898,11 +1808,10 @@ private UnitTestRefreshThreadFactory() { public Thread newThread(@NotNull final Runnable runnable) { final Thread thread = super.newThread(runnable); final Thread.UncaughtExceptionHandler existing = thread.getUncaughtExceptionHandler(); - thread.setUncaughtExceptionHandler( - (final Thread errorThread, final Throwable throwable) -> { - ensureUnlocked("unit test refresh pool thread exception handler", null); - existing.uncaughtException(errorThread, throwable); - }); + thread.setUncaughtExceptionHandler((final Thread errorThread, final Throwable throwable) -> { + ensureUnlocked("unit test refresh pool thread exception handler", null); + existing.uncaughtException(errorThread, throwable); + }); return thread; } } diff --git a/DB/src/main/java/io/deephaven/db/tables/live/LiveTableMonitorLock.java b/DB/src/main/java/io/deephaven/db/tables/live/LiveTableMonitorLock.java index bbc8fdd4cb4..5f9758988eb 100644 --- a/DB/src/main/java/io/deephaven/db/tables/live/LiveTableMonitorLock.java +++ b/DB/src/main/java/io/deephaven/db/tables/live/LiveTableMonitorLock.java @@ -27,7 +27,7 @@ class LiveTableMonitorLock { private static final Logger log = LoggerFactory.getLogger(LiveTableMonitorLock.class); private static final boolean STACK_DUMP_LOCKS = - Configuration.getInstance().getBooleanWithDefault("LiveTableMonitor.stackDumpLocks", false); + Configuration.getInstance().getBooleanWithDefault("LiveTableMonitor.stackDumpLocks", false); /** * The {@link LogicalClock} used for instrumentation and assertions. @@ -76,11 +76,9 @@ class LiveTableMonitorLock { * @param logicalClock The {@link LogicalClock} instance to use * @param allowUnitTestMode for unit tests only */ - LiveTableMonitorLock(@NotNull final LogicalClock logicalClock, - final boolean allowUnitTestMode) { + LiveTableMonitorLock(@NotNull final LogicalClock logicalClock, final boolean allowUnitTestMode) { this.logicalClock = logicalClock; - // TODO: Consider whether using a fair lock causes unacceptable performance degradation - // under significant + // TODO: Consider whether using a fair lock causes unacceptable performance degradation under significant // contention, and determine an alternative policy (maybe relying on Thread.yield() if so. rwLock = new ReentrantReadWriteLock(true); readLock = rwLock.readLock(); @@ -96,9 +94,8 @@ class LiveTableMonitorLock { } /** - * Get the shared lock (similar to {@link java.util.concurrent.locks.ReadWriteLock#readLock()}, - * but with LTM-specific instrumentation). See {@link LiveTableMonitor#sharedLock()} for - * user-facing documentation. + * Get the shared lock (similar to {@link java.util.concurrent.locks.ReadWriteLock#readLock()}, but with + * LTM-specific instrumentation). See {@link LiveTableMonitor#sharedLock()} for user-facing documentation. * * @return The shared lock */ @@ -107,9 +104,8 @@ final AwareFunctionalLock sharedLock() { } /** - * Get the exclusive lock (similar to - * {@link java.util.concurrent.locks.ReadWriteLock#writeLock()} ()}, but with LTM-specific - * instrumentation). See {@link LiveTableMonitor#exclusiveLock()} for user-facing documentation. + * Get the exclusive lock (similar to {@link java.util.concurrent.locks.ReadWriteLock#writeLock()} ()}, but with + * LTM-specific instrumentation). See {@link LiveTableMonitor#exclusiveLock()} for user-facing documentation. * * @return The exclusive lock */ @@ -136,8 +132,7 @@ public final void lock() { }); maybeLogStackTrace("locked (shared)"); } catch (Throwable t) { - // If the recorder instrumentation causes us to throw an exception after the - // readLock was successfully + // If the recorder instrumentation causes us to throw an exception after the readLock was successfully // acquired, we'd better unlock it on the way out. if (lockSucceeded.isTrue()) { readLock.unlock(); @@ -150,15 +145,13 @@ public final void lock() { public final void lockInterruptibly() throws InterruptedException { final MutableBoolean lockSucceeded = new MutableBoolean(false); try { - QueryPerformanceRecorder - .withNuggetThrowing("Acquire LiveTableMonitor readLock interruptibly", () -> { - readLock.lockInterruptibly(); - lockSucceeded.setValue(true); - }); + QueryPerformanceRecorder.withNuggetThrowing("Acquire LiveTableMonitor readLock interruptibly", () -> { + readLock.lockInterruptibly(); + lockSucceeded.setValue(true); + }); maybeLogStackTrace("locked (shared)"); } catch (Throwable t) { - // If the recorder instrumentation causes us to throw an exception after the - // readLock was successfully + // If the recorder instrumentation causes us to throw an exception after the readLock was successfully // acquired, we'd better unlock it on the way out. if (lockSucceeded.isTrue()) { readLock.unlock(); @@ -177,8 +170,7 @@ public final boolean tryLock() { } @Override - public final boolean tryLock(final long time, @NotNull final TimeUnit unit) - throws InterruptedException { + public final boolean tryLock(final long time, @NotNull final TimeUnit unit) throws InterruptedException { if (readLock.tryLock(time, unit)) { maybeLogStackTrace("locked (shared)"); return true; @@ -219,12 +211,10 @@ public final void lock() { writeLock.lock(); lockSucceeded.setValue(true); }); - Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", - LogicalClock.State.Idle); + Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", LogicalClock.State.Idle); maybeLogStackTrace("locked (exclusive)"); } catch (Throwable t) { - // If the recorder instrumentation causes us to throw an exception after the - // writeLock was + // If the recorder instrumentation causes us to throw an exception after the writeLock was // successfully acquired, we'd better unlock it on the way out. if (lockSucceeded.isTrue()) { writeLock.unlock(); @@ -238,17 +228,14 @@ public final void lockInterruptibly() throws InterruptedException { checkForUpgradeAttempt(); final MutableBoolean lockSucceeded = new MutableBoolean(false); try { - QueryPerformanceRecorder - .withNuggetThrowing("Acquire LiveTableMonitor writeLock interruptibly", () -> { - writeLock.lockInterruptibly(); - lockSucceeded.setValue(true); - }); - Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", - LogicalClock.State.Idle); + QueryPerformanceRecorder.withNuggetThrowing("Acquire LiveTableMonitor writeLock interruptibly", () -> { + writeLock.lockInterruptibly(); + lockSucceeded.setValue(true); + }); + Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", LogicalClock.State.Idle); maybeLogStackTrace("locked (exclusive)"); } catch (Throwable t) { - // If the recorder instrumentation causes us to throw an exception after the - // writeLock was + // If the recorder instrumentation causes us to throw an exception after the writeLock was // successfully acquired, we'd better unlock it on the way out. if (lockSucceeded.isTrue()) { writeLock.unlock(); @@ -268,8 +255,7 @@ public final boolean tryLock() { } @Override - public final boolean tryLock(final long time, @NotNull final TimeUnit unit) - throws InterruptedException { + public final boolean tryLock(final long time, @NotNull final TimeUnit unit) throws InterruptedException { checkForUpgradeAttempt(); if (writeLock.tryLock(time, unit)) { maybeLogStackTrace("locked (exclusive)"); @@ -280,8 +266,7 @@ public final boolean tryLock(final long time, @NotNull final TimeUnit unit) @Override public final void unlock() { - Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", - LogicalClock.State.Idle); + Assert.eq(logicalClock.currentState(), "logicalClock.currentState()", LogicalClock.State.Idle); writeLock.unlock(); maybeLogStackTrace("unlocked (exclusive)"); } @@ -353,29 +338,28 @@ public Condition newCondition() { @Override public void doLocked( - @NotNull FunctionalInterfaces.ThrowingRunnable runnable) - throws EXCEPTION_TYPE { + @NotNull FunctionalInterfaces.ThrowingRunnable runnable) throws EXCEPTION_TYPE { delegate.doLocked(runnable); } @Override public void doLockedInterruptibly( - @NotNull FunctionalInterfaces.ThrowingRunnable runnable) - throws InterruptedException, EXCEPTION_TYPE { + @NotNull FunctionalInterfaces.ThrowingRunnable runnable) + throws InterruptedException, EXCEPTION_TYPE { delegate.doLockedInterruptibly(runnable); } @Override public RESULT_TYPE computeLocked( - @NotNull FunctionalInterfaces.ThrowingSupplier supplier) - throws EXCEPTION_TYPE { + @NotNull FunctionalInterfaces.ThrowingSupplier supplier) + throws EXCEPTION_TYPE { return delegate.computeLocked(supplier); } @Override public RESULT_TYPE computeLockedInterruptibly( - @NotNull FunctionalInterfaces.ThrowingSupplier supplier) - throws InterruptedException, EXCEPTION_TYPE { + @NotNull FunctionalInterfaces.ThrowingSupplier supplier) + throws InterruptedException, EXCEPTION_TYPE { return delegate.computeLockedInterruptibly(supplier); } @@ -390,8 +374,7 @@ String getDebugMessage() { private void checkForUpgradeAttempt() { if (sharedLock.isHeldByCurrentThread()) { - throw new UnsupportedOperationException( - "Cannot upgrade a shared lock to an exclusive lock"); + throw new UnsupportedOperationException("Cannot upgrade a shared lock to an exclusive lock"); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/live/LiveTableRefreshCombiner.java b/DB/src/main/java/io/deephaven/db/tables/live/LiveTableRefreshCombiner.java index 79ce584c652..66c813df250 100644 --- a/DB/src/main/java/io/deephaven/db/tables/live/LiveTableRefreshCombiner.java +++ b/DB/src/main/java/io/deephaven/db/tables/live/LiveTableRefreshCombiner.java @@ -8,11 +8,10 @@ import java.util.Collections; /** - * Combines multiple {@link LiveTable}s into a single one, in order to allow for update - * parallelization within the {@link LiveTableMonitor}. + * Combines multiple {@link LiveTable}s into a single one, in order to allow for update parallelization within the + * {@link LiveTableMonitor}. */ -public class LiveTableRefreshCombiner extends LivenessArtifact - implements LiveTable, LiveTableRegistrar { +public class LiveTableRefreshCombiner extends LivenessArtifact implements LiveTable, LiveTableRegistrar { private final WeakReferenceManager combinedTables = new WeakReferenceManager<>(true); @@ -25,13 +24,10 @@ public void refresh() { public void addTable(@NotNull final LiveTable liveTable) { if (liveTable instanceof DynamicNode) { final DynamicNode dynamicLiveTable = (DynamicNode) liveTable; - // Like a LiveTableMonitor, we need to ensure that DynamicNodes added to this combiner - // are set to refresh. - // NB: addParentReference usually sets refreshing as a side effect, but it's clearer to - // do it explicitly. + // Like a LiveTableMonitor, we need to ensure that DynamicNodes added to this combiner are set to refresh. + // NB: addParentReference usually sets refreshing as a side effect, but it's clearer to do it explicitly. dynamicLiveTable.setRefreshing(true); - // Unlike a LiveTableMonitor, we must also ensure that DynamicNodes added to this - // combiner have the + // Unlike a LiveTableMonitor, we must also ensure that DynamicNodes added to this combiner have the // combiner as a parent, in order to ensure the integrity of the resulting DAG. dynamicLiveTable.addParentReference(this); } diff --git a/DB/src/main/java/io/deephaven/db/tables/live/NotificationQueue.java b/DB/src/main/java/io/deephaven/db/tables/live/NotificationQueue.java index f6244abc93a..2c08cb883a8 100644 --- a/DB/src/main/java/io/deephaven/db/tables/live/NotificationQueue.java +++ b/DB/src/main/java/io/deephaven/db/tables/live/NotificationQueue.java @@ -16,26 +16,23 @@ public interface NotificationQueue { /** * A notification that may be enqueued. */ - interface Notification - extends Runnable, LogOutputAppendable, IntrusiveDoublyLinkedNode { + interface Notification extends Runnable, LogOutputAppendable, IntrusiveDoublyLinkedNode { /** - * Terminal notifications guarantee that they will not queue additional notifications or - * mutate data structures that should result in additional notifications. They are in turn - * guaranteed to be called after all non-terminal notifications for a given cycle through - * the notification queue. + * Terminal notifications guarantee that they will not queue additional notifications or mutate data structures + * that should result in additional notifications. They are in turn guaranteed to be called after all + * non-terminal notifications for a given cycle through the notification queue. * * @return True iff this notification is terminal. */ boolean isTerminal(); /** - * If a terminal notification must be executed on the main LTM thread, it must override this - * method, so that the notification is not executed on the refresh pool. + * If a terminal notification must be executed on the main LTM thread, it must override this method, so that the + * notification is not executed on the refresh pool. * * It is an error to return true if this notification is not terminal * - * @return true if this notification must be executed directly under the protection of the - * LTM lock + * @return true if this notification must be executed directly under the protection of the LTM lock */ boolean mustExecuteWithLtmLock(); @@ -53,11 +50,9 @@ interface Dependency extends LogOutputAppendable { * Is this ancestor satisfied? Note that this method must be safe to call on any thread. * * @param step The step for which we are testing satisfaction - * @return Whether the dependency is satisfied on {@code step} (and will not fire subsequent - * notifications) + * @return Whether the dependency is satisfied on {@code step} (and will not fire subsequent notifications) * @implNote For all practical purposes, all implementations should consider whether the - * {@link LiveTableMonitor} itself is satisfied if they have no other - * dependencies. + * {@link LiveTableMonitor} itself is satisfied if they have no other dependencies. */ boolean satisfied(long step); } @@ -66,19 +61,18 @@ interface IndexUpdateNotification extends Notification { } /** - * Add a notification for this NotificationQueue to deliver (by invoking its run() method). Note - * that implementations may have restrictions as to how and when this method may be used for - * non-terminal notifications, e.g. by only supporting notification queuing from threads that - * can guarantee they are part of an update cycle. + * Add a notification for this NotificationQueue to deliver (by invoking its run() method). Note that + * implementations may have restrictions as to how and when this method may be used for non-terminal notifications, + * e.g. by only supporting notification queuing from threads that can guarantee they are part of an update cycle. * * @param notification The notification to add */ void addNotification(@NotNull Notification notification); /** - * Add a notification for this NotificationQueue to deliver (by invoking its run() method), iff - * the delivery step is the current step and the update cycle for that step is still in process. - * This is only supported for non-terminal notifications. + * Add a notification for this NotificationQueue to deliver (by invoking its run() method), iff the delivery step is + * the current step and the update cycle for that step is still in process. This is only supported for non-terminal + * notifications. * * @param notification The notification to add * @param deliveryStep The step to deliver this notification on diff --git a/DB/src/main/java/io/deephaven/db/tables/live/NotificationWrapper.java b/DB/src/main/java/io/deephaven/db/tables/live/NotificationWrapper.java index 90dea18cdfb..80ca09f6ad6 100644 --- a/DB/src/main/java/io/deephaven/db/tables/live/NotificationWrapper.java +++ b/DB/src/main/java/io/deephaven/db/tables/live/NotificationWrapper.java @@ -5,8 +5,7 @@ import org.jetbrains.annotations.NotNull; /** - * Implementation of {@link NotificationQueue.Notification} that wraps another, in order to allow - * overrides. + * Implementation of {@link NotificationQueue.Notification} that wraps another, in order to allow overrides. */ public class NotificationWrapper extends AbstractNotification { @@ -19,8 +18,8 @@ public class NotificationWrapper extends AbstractNotification { @Override public LogOutput append(@NotNull final LogOutput logOutput) { - return logOutput.append("NotificationWrapper{").append(System.identityHashCode(this)) - .append("} of ").append(wrapped); + return logOutput.append("NotificationWrapper{").append(System.identityHashCode(this)).append("} of ") + .append(wrapped); } @Override diff --git a/DB/src/main/java/io/deephaven/db/tables/live/NullIndexUpdateNotification.java b/DB/src/main/java/io/deephaven/db/tables/live/NullIndexUpdateNotification.java index 15887159de7..936a84be875 100644 --- a/DB/src/main/java/io/deephaven/db/tables/live/NullIndexUpdateNotification.java +++ b/DB/src/main/java/io/deephaven/db/tables/live/NullIndexUpdateNotification.java @@ -6,8 +6,8 @@ /** * This is a notification that does not actually notify anything. * - * It is useful for the {@link io.deephaven.db.v2.SwapListener} to have the ability to create a - * notification for its parent before there is anything to notify. + * It is useful for the {@link io.deephaven.db.v2.SwapListener} to have the ability to create a notification for its + * parent before there is anything to notify. */ public class NullIndexUpdateNotification extends AbstractIndexUpdateNotification { public NullIndexUpdateNotification() { diff --git a/DB/src/main/java/io/deephaven/db/tables/live/WaitNotification.java b/DB/src/main/java/io/deephaven/db/tables/live/WaitNotification.java index 1ee62ee1266..f4f26a2ecb4 100644 --- a/DB/src/main/java/io/deephaven/db/tables/live/WaitNotification.java +++ b/DB/src/main/java/io/deephaven/db/tables/live/WaitNotification.java @@ -11,9 +11,8 @@ /** * One-shot {@link NotificationQueue.Notification} that can be delivered when a set of - * {@link NotificationQueue.Dependency dependencies} are satisfied. This allows for an external - * observer to wait for multiple dependencies to be satisfied using - * {@link #waitForSatisfaction(long, NotificationQueue.Dependency...)}. + * {@link NotificationQueue.Dependency dependencies} are satisfied. This allows for an external observer to wait for + * multiple dependencies to be satisfied using {@link #waitForSatisfaction(long, NotificationQueue.Dependency...)}. */ public final class WaitNotification extends AbstractNotification { @@ -40,7 +39,7 @@ public boolean canExecute(final long step) { @Override public LogOutput append(LogOutput logOutput) { return logOutput.append(getClass().getSimpleName()).append(": for dependencies") - .append(LogOutput.APPENDABLE_COLLECTION_FORMATTER, Arrays.asList(dependencies)); + .append(LogOutput.APPENDABLE_COLLECTION_FORMATTER, Arrays.asList(dependencies)); } @Override @@ -71,22 +70,18 @@ private void await() throws InterruptedException { * * @param step The step to wait for satisfaction on * @param dependencies The dependencies to wait for - * @return True if the dependencies became satisfied on the specified step, false if the cycle - * had already completed + * @return True if the dependencies became satisfied on the specified step, false if the cycle had already completed */ public static boolean waitForSatisfaction(final long step, - @NotNull final NotificationQueue.Dependency... dependencies) { + @NotNull final NotificationQueue.Dependency... dependencies) { final WaitNotification waitNotification = new WaitNotification(dependencies); if (LiveTableMonitor.DEFAULT.maybeAddNotification(waitNotification, step)) { try { waitNotification.await(); } catch (InterruptedException e) { - throw new QueryCancellationException( - "Interrupted while awaiting dependency satisfaction for " - + Arrays.stream(dependencies).map(Objects::toString) - .collect(Collectors.joining(",")) - + " on step " + step, - e); + throw new QueryCancellationException("Interrupted while awaiting dependency satisfaction for " + + Arrays.stream(dependencies).map(Objects::toString).collect(Collectors.joining(",")) + + " on step " + step, e); } return true; } diff --git a/DB/src/main/java/io/deephaven/db/tables/remote/AsyncMethod.java b/DB/src/main/java/io/deephaven/db/tables/remote/AsyncMethod.java index 90ef034ce98..b4a818e42da 100644 --- a/DB/src/main/java/io/deephaven/db/tables/remote/AsyncMethod.java +++ b/DB/src/main/java/io/deephaven/db/tables/remote/AsyncMethod.java @@ -10,9 +10,8 @@ import java.lang.annotation.Target; /** - * Indicates that the annotated method should be executed asynchronously with respect to the LTM. - * Asynchronous execution will not acquire the LTM lock before invocation, and will be run - * concurrently with other asynchronous methods. + * Indicates that the annotated method should be executed asynchronously with respect to the LTM. Asynchronous execution + * will not acquire the LTM lock before invocation, and will be run concurrently with other asynchronous methods. */ @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) diff --git a/DB/src/main/java/io/deephaven/db/tables/remote/preview/ArrayPreview.java b/DB/src/main/java/io/deephaven/db/tables/remote/preview/ArrayPreview.java index b583780a7ff..08d22712dad 100644 --- a/DB/src/main/java/io/deephaven/db/tables/remote/preview/ArrayPreview.java +++ b/DB/src/main/java/io/deephaven/db/tables/remote/preview/ArrayPreview.java @@ -22,11 +22,10 @@ public static ArrayPreview fromArray(Object array) { return null; } if (!array.getClass().isArray()) { - throw new IllegalArgumentException( - "Input must be an array, instead input class is " + array.getClass()); + throw new IllegalArgumentException("Input must be an array, instead input class is " + array.getClass()); } - return new ArrayPreview(ChunkType.fromElementType(array.getClass().getComponentType()) - .dbArrayWrap(array).toString(ARRAY_SIZE_CUTOFF)); + return new ArrayPreview(ChunkType.fromElementType(array.getClass().getComponentType()).dbArrayWrap(array) + .toString(ARRAY_SIZE_CUTOFF)); } private ArrayPreview(String displayString) { diff --git a/DB/src/main/java/io/deephaven/db/tables/remote/preview/ColumnPreviewManager.java b/DB/src/main/java/io/deephaven/db/tables/remote/preview/ColumnPreviewManager.java index 96d2fa38916..93019ee23ed 100644 --- a/DB/src/main/java/io/deephaven/db/tables/remote/preview/ColumnPreviewManager.java +++ b/DB/src/main/java/io/deephaven/db/tables/remote/preview/ColumnPreviewManager.java @@ -16,8 +16,7 @@ import java.util.stream.Collectors; /** - * Converts large data types to Preview types. Also wraps non-serializable data types to be - * serializable. + * Converts large data types to Preview types. Also wraps non-serializable data types to be serializable. */ public class ColumnPreviewManager { // Maps types pt preview factories from the addPreview method @@ -25,14 +24,13 @@ public class ColumnPreviewManager { // Factories for arrays and DbArrays private static final PreviewColumnFactory arrayPreviewFactory = - new PreviewColumnFactory<>(Object.class, ArrayPreview.class, ArrayPreview::fromArray); + new PreviewColumnFactory<>(Object.class, ArrayPreview.class, ArrayPreview::fromArray); private static final PreviewColumnFactory dbArrayPreviewFactory = - new PreviewColumnFactory<>(DbArrayBase.class, ArrayPreview.class, - ArrayPreview::fromDbArray); + new PreviewColumnFactory<>(DbArrayBase.class, ArrayPreview.class, ArrayPreview::fromDbArray); // Factory for non-serializable types private static final PreviewColumnFactory nonDisplayableFactory = - new PreviewColumnFactory<>(Object.class, DisplayWrapper.class, DisplayWrapper::make); + new PreviewColumnFactory<>(Object.class, DisplayWrapper.class, DisplayWrapper::make); private static boolean shouldPreview(Class type) { return previewMap.containsKey(type); @@ -41,10 +39,10 @@ private static boolean shouldPreview(Class type) { private static final Set whiteList; static { - final String whiteListString = Configuration.getInstance() - .getStringWithDefault("ColumnPreviewManager.whiteListClasses", ""); - whiteList = Arrays.stream(whiteListString.split(",")).map(String::trim) - .filter(StringUtils::isNotEmpty).collect(Collectors.toSet()); + final String whiteListString = + Configuration.getInstance().getStringWithDefault("ColumnPreviewManager.whiteListClasses", ""); + whiteList = Arrays.stream(whiteListString.split(",")).map(String::trim).filter(StringUtils::isNotEmpty) + .collect(Collectors.toSet()); } /** @@ -57,7 +55,7 @@ private static boolean shouldPreview(Class type) { * @param the destination type */ public static void addPreview(Class sourceType, Class destType, - Function function) { + Function function) { previewMap.put(sourceType, new PreviewColumnFactory<>(sourceType, destType, function)); } @@ -69,8 +67,7 @@ public static void addPreview(Class sourceType, Cl */ public static Table applyPreview(final Table table) { if (table instanceof HierarchicalTable) { - // HierarchicalTable tables do not permit `updateView(...)`, and therefore there is - // nothing of value that + // HierarchicalTable tables do not permit `updateView(...)`, and therefore there is nothing of value that // can be applied in this method. short-circuit away return table; } @@ -95,7 +92,7 @@ public static Table applyPreview(final Table table) { selectColumns.add(arrayPreviewFactory.makeColumn(name)); originalTypes.put(name, type.getName()); } else if (!isColumnTypeDisplayable(type) - || !io.deephaven.util.type.TypeUtils.isPrimitiveOrSerializable(type)) { + || !io.deephaven.util.type.TypeUtils.isPrimitiveOrSerializable(type)) { // Always wrap non-displayable and non-serializable types selectColumns.add(nonDisplayableFactory.makeColumn(name)); originalTypes.put(name, type.getName()); @@ -103,8 +100,7 @@ public static Table applyPreview(final Table table) { } if (!selectColumns.isEmpty()) { - result = table - .updateView(selectColumns.toArray(SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY)); + result = table.updateView(selectColumns.toArray(SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY)); ((BaseTable) table).copyAttributes(result, BaseTable.CopyAttributeOperation.Preview); result.setAttribute(Table.PREVIEW_PARENT_TABLE, table); @@ -114,8 +110,7 @@ public static Table applyPreview(final Table table) { // noinspection unchecked final Map columnDescriptions = - attribute != null ? new HashMap<>((Map) attribute) - : new HashMap<>(); + attribute != null ? new HashMap<>((Map) attribute) : new HashMap<>(); for (String name : originalTypes.keySet()) { String message = "Preview of type: " + originalTypes.get(name); @@ -133,15 +128,14 @@ public static Table applyPreview(final Table table) { } /** - * Indicates if a column type is displayable by the client. This is used to screen out unknown - * classes, unserializable, and anything else that should not be displayed. + * Indicates if a column type is displayable by the client. This is used to screen out unknown classes, + * unserializable, and anything else that should not be displayed. * * @param type the column type * @return true if the type can be displayed by the client, false otherwise. */ public static boolean isColumnTypeDisplayable(Class type) { - // Generally arrays and DbArrays will be wrapped in an ArrayPreview class. This check is - // here for correctness. + // Generally arrays and DbArrays will be wrapped in an ArrayPreview class. This check is here for correctness. if (type.isArray() || DbArrayBase.class.isAssignableFrom(type)) { // For arrays, we need to check that the component type is displayable return isColumnTypeDisplayable(type.getComponentType()); @@ -154,9 +148,9 @@ public static boolean isColumnTypeDisplayable(Class type) { // BigInt, BigDecimal // DbDateTime return type.isPrimitive() || io.deephaven.util.type.TypeUtils.isBoxedType(type) - || io.deephaven.util.type.TypeUtils.isString(type) - || io.deephaven.util.type.TypeUtils.isBigNumeric(type) || TypeUtils.isDateTime(type) - || isOnWhiteList(type); + || io.deephaven.util.type.TypeUtils.isString(type) + || io.deephaven.util.type.TypeUtils.isBigNumeric(type) || TypeUtils.isDateTime(type) + || isOnWhiteList(type); } /** diff --git a/DB/src/main/java/io/deephaven/db/tables/remote/preview/DisplayWrapper.java b/DB/src/main/java/io/deephaven/db/tables/remote/preview/DisplayWrapper.java index 6fb893bce32..08ea951605b 100644 --- a/DB/src/main/java/io/deephaven/db/tables/remote/preview/DisplayWrapper.java +++ b/DB/src/main/java/io/deephaven/db/tables/remote/preview/DisplayWrapper.java @@ -6,12 +6,12 @@ import java.io.Serializable; /** - * Wraps Objects that cannot be displayed (e.g. not serializable or an unknown class) and allows - * them to be displayed as a String. + * Wraps Objects that cannot be displayed (e.g. not serializable or an unknown class) and allows them to be displayed as + * a String. */ public class DisplayWrapper implements Serializable { private static final int MAX_CHARACTERS = - Configuration.getInstance().getIntegerWithDefault("DisplayWrapper.maxCharacters", 10000); + Configuration.getInstance().getIntegerWithDefault("DisplayWrapper.maxCharacters", 10000); private final String displayString; /** diff --git a/DB/src/main/java/io/deephaven/db/tables/remote/preview/PreviewType.java b/DB/src/main/java/io/deephaven/db/tables/remote/preview/PreviewType.java index e06e4a66d35..62d50573a47 100644 --- a/DB/src/main/java/io/deephaven/db/tables/remote/preview/PreviewType.java +++ b/DB/src/main/java/io/deephaven/db/tables/remote/preview/PreviewType.java @@ -3,8 +3,7 @@ import java.io.Serializable; /** - * A Preview Type is used for columns that should be previewed rather than sending all of the data - * for each value. + * A Preview Type is used for columns that should be previewed rather than sending all of the data for each value. */ public interface PreviewType extends Serializable { } diff --git a/DB/src/main/java/io/deephaven/db/tables/select/AjMatchPairFactory.java b/DB/src/main/java/io/deephaven/db/tables/select/AjMatchPairFactory.java index 90b2c914bdf..8527d3f176a 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/AjMatchPairFactory.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/AjMatchPairFactory.java @@ -15,8 +15,8 @@ import static io.deephaven.db.tables.select.SelectFactoryConstants.*; /** - * MatchPair Factory that accepts final value of either =, <=, or <, > >= and returns a - * Pair<MatchPair, Table.AsOfMatchRule>. + * MatchPair Factory that accepts final value of either =, <=, or <, > >= and returns a Pair<MatchPair, + * Table.AsOfMatchRule>. */ public class AjMatchPairFactory { private enum AsOfMatchRule { @@ -25,8 +25,7 @@ private enum AsOfMatchRule { Table.AsOfMatchRule toTableMatchRule(boolean reverse) { switch (this) { case Equal: - return reverse ? Table.AsOfMatchRule.GREATER_THAN_EQUAL - : Table.AsOfMatchRule.LESS_THAN_EQUAL; + return reverse ? Table.AsOfMatchRule.GREATER_THAN_EQUAL : Table.AsOfMatchRule.LESS_THAN_EQUAL; case Less_Than: return Table.AsOfMatchRule.LESS_THAN; case Less_Than_Equal: @@ -40,94 +39,72 @@ Table.AsOfMatchRule toTableMatchRule(boolean reverse) { } } - private static final ExpressionParser> finalColumnParser = - new ExpressionParser<>(); + private static final ExpressionParser> finalColumnParser = new ExpressionParser<>(); static { - finalColumnParser - .registerFactory(new AbstractExpressionFactory>( + finalColumnParser.registerFactory(new AbstractExpressionFactory>( START_PTRN + "(" + ID_PTRN + ")" + END_PTRN) { - @Override - public Pair getExpression(String expression, - Matcher matcher, Object... args) { - String columnName = matcher.group(1); - return new Pair<>(new MatchPair(columnName, columnName), AsOfMatchRule.Equal); - } - }); - finalColumnParser - .registerFactory(new AbstractExpressionFactory>( + @Override + public Pair getExpression(String expression, Matcher matcher, Object... args) { + String columnName = matcher.group(1); + return new Pair<>(new MatchPair(columnName, columnName), AsOfMatchRule.Equal); + } + }); + finalColumnParser.registerFactory(new AbstractExpressionFactory>( START_PTRN + "(" + ID_PTRN + ")\\s*==?\\s*(" + ID_PTRN + ")" + END_PTRN) { - @Override - public Pair getExpression(String expression, - Matcher matcher, Object... args) { - return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), - AsOfMatchRule.Equal); - } - }); - finalColumnParser - .registerFactory(new AbstractExpressionFactory>( + @Override + public Pair getExpression(String expression, Matcher matcher, Object... args) { + return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), AsOfMatchRule.Equal); + } + }); + finalColumnParser.registerFactory(new AbstractExpressionFactory>( START_PTRN + "(" + ID_PTRN + ")\\s*<=\\s*(" + ID_PTRN + ")" + END_PTRN) { - @Override - public Pair getExpression(String expression, - Matcher matcher, Object... args) { - return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), - AsOfMatchRule.Less_Than_Equal); - } - }); - finalColumnParser - .registerFactory(new AbstractExpressionFactory>( + @Override + public Pair getExpression(String expression, Matcher matcher, Object... args) { + return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), AsOfMatchRule.Less_Than_Equal); + } + }); + finalColumnParser.registerFactory(new AbstractExpressionFactory>( START_PTRN + "(" + ID_PTRN + ")\\s*<\\s*(" + ID_PTRN + ")" + END_PTRN) { - @Override - public Pair getExpression(String expression, - Matcher matcher, Object... args) { - return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), - AsOfMatchRule.Less_Than); - } - }); - finalColumnParser - .registerFactory(new AbstractExpressionFactory>( + @Override + public Pair getExpression(String expression, Matcher matcher, Object... args) { + return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), AsOfMatchRule.Less_Than); + } + }); + finalColumnParser.registerFactory(new AbstractExpressionFactory>( START_PTRN + "(" + ID_PTRN + ")\\s*>=\\s*(" + ID_PTRN + ")" + END_PTRN) { - @Override - public Pair getExpression(String expression, - Matcher matcher, Object... args) { - return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), - AsOfMatchRule.Greater_Than_Equal); - } - }); - finalColumnParser - .registerFactory(new AbstractExpressionFactory>( + @Override + public Pair getExpression(String expression, Matcher matcher, Object... args) { + return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), AsOfMatchRule.Greater_Than_Equal); + } + }); + finalColumnParser.registerFactory(new AbstractExpressionFactory>( START_PTRN + "(" + ID_PTRN + ")\\s*>\\s*(" + ID_PTRN + ")" + END_PTRN) { - @Override - public Pair getExpression(String expression, - Matcher matcher, Object... args) { - return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), - AsOfMatchRule.Greater_Than); - } - }); + @Override + public Pair getExpression(String expression, Matcher matcher, Object... args) { + return new Pair<>(new MatchPair(matcher.group(1), matcher.group(2)), AsOfMatchRule.Greater_Than); + } + }); } - public static Pair getExpression(boolean reverse, - String match) { + public static Pair getExpression(boolean reverse, String match) { Pair parse = finalColumnParser.parse(match); return new Pair<>(parse.first, parse.second.toTableMatchRule(reverse)); } @SuppressWarnings("WeakerAccess") - public static Pair getExpressions(boolean reverse, - String... matches) { + public static Pair getExpressions(boolean reverse, String... matches) { MatchPair[] result = new MatchPair[matches.length]; for (int ii = 0; ii < matches.length - 1; ++ii) { result[ii] = MatchPairFactory.getExpression(matches[ii]); } - Pair finalColumn = - getExpression(reverse, matches[matches.length - 1]); + Pair finalColumn = getExpression(reverse, matches[matches.length - 1]); result[matches.length - 1] = finalColumn.first; return new Pair<>(result, finalColumn.second); } - public static Pair getExpressions(boolean reverse, - Collection matches) { + public static Pair getExpressions(boolean reverse, Collection matches) { return getExpressions(reverse, matches.toArray(new String[matches.size()])); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/select/MatchPair.java b/DB/src/main/java/io/deephaven/db/tables/select/MatchPair.java index 907e22fc3c1..b103b177c05 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/MatchPair.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/MatchPair.java @@ -89,44 +89,41 @@ public static String[] getRightColumns(MatchPair... matchPairs) { return Arrays.stream(matchPairs).map(MatchPair::right).toArray(String[]::new); } - public static final LogOutput.ObjFormatter MATCH_PAIR_ARRAY_FORMATTER = - (logOutput, matchPairs) -> { - if (matchPairs == null) { - logOutput.append("null"); - } else { - boolean first = true; - logOutput.append('['); - for (MatchPair mp : matchPairs) { - if (!first) { - logOutput.append(", "); - } - if (mp.left().equals(mp.right())) { - logOutput.append(mp.left()); - } else { - logOutput.append(mp.left()).append('=').append(mp.right()); - } - first = false; + public static final LogOutput.ObjFormatter MATCH_PAIR_ARRAY_FORMATTER = (logOutput, matchPairs) -> { + if (matchPairs == null) { + logOutput.append("null"); + } else { + boolean first = true; + logOutput.append('['); + for (MatchPair mp : matchPairs) { + if (!first) { + logOutput.append(", "); } - logOutput.append(']'); - } - }; - - public static final LogOutput.ObjFormatter MATCH_PAIR_FORMATTER = - (logOutput, mp) -> { - if (mp == null) { - logOutput.append("null"); - } else { if (mp.left().equals(mp.right())) { logOutput.append(mp.left()); } else { logOutput.append(mp.left()).append('=').append(mp.right()); } + first = false; + } + logOutput.append(']'); + } + }; + + public static final LogOutput.ObjFormatter MATCH_PAIR_FORMATTER = (logOutput, mp) -> { + if (mp == null) { + logOutput.append("null"); + } else { + if (mp.left().equals(mp.right())) { + logOutput.append(mp.left()); + } else { + logOutput.append(mp.left()).append('=').append(mp.right()); } - }; + } + }; public static String matchString(final MatchPair[] matchPairArray) { - return new LogOutputStringImpl().append(MATCH_PAIR_ARRAY_FORMATTER, matchPairArray) - .toString(); + return new LogOutputStringImpl().append(MATCH_PAIR_ARRAY_FORMATTER, matchPairArray).toString(); } public static String matchString(final MatchPair matchPair) { @@ -141,7 +138,7 @@ public boolean equals(Object o) { return false; final MatchPair matchPair = (MatchPair) o; return Objects.equals(leftColumn, matchPair.leftColumn) && - Objects.equals(rightColumn, matchPair.rightColumn); + Objects.equals(rightColumn, matchPair.rightColumn); } @Override diff --git a/DB/src/main/java/io/deephaven/db/tables/select/MatchPairFactory.java b/DB/src/main/java/io/deephaven/db/tables/select/MatchPairFactory.java index c588de7e2ed..10b82b14509 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/MatchPairFactory.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/MatchPairFactory.java @@ -18,16 +18,15 @@ public class MatchPairFactory { private static final ExpressionParser parser = new ExpressionParser<>(); static { - parser.registerFactory( - new AbstractExpressionFactory(START_PTRN + "(" + ID_PTRN + ")" + END_PTRN) { - @Override - public MatchPair getExpression(String expression, Matcher matcher, Object... args) { - String columnName = matcher.group(1); - return new MatchPair(columnName, columnName); - } - }); + parser.registerFactory(new AbstractExpressionFactory(START_PTRN + "(" + ID_PTRN + ")" + END_PTRN) { + @Override + public MatchPair getExpression(String expression, Matcher matcher, Object... args) { + String columnName = matcher.group(1); + return new MatchPair(columnName, columnName); + } + }); parser.registerFactory(new AbstractExpressionFactory( - START_PTRN + "(" + ID_PTRN + ")\\s*==?\\s*(" + ID_PTRN + ")" + END_PTRN) { + START_PTRN + "(" + ID_PTRN + ")\\s*==?\\s*(" + ID_PTRN + ")" + END_PTRN) { @Override public MatchPair getExpression(String expression, Matcher matcher, Object... args) { return new MatchPair(matcher.group(1), matcher.group(2)); diff --git a/DB/src/main/java/io/deephaven/db/tables/select/Param.java b/DB/src/main/java/io/deephaven/db/tables/select/Param.java index ab12545c699..d3ed1544825 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/Param.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/Param.java @@ -40,12 +40,12 @@ public Param(String name, T value) { public Class getDeclaredType() { final Class type = value == null ? Object.class - : value instanceof Enum ? ((Enum) value).getDeclaringClass() - // in newer versions of groovy, our closures will be subtypes that evade the logic - // in getDeclaredType - // (they will return a null Class#getCanonicalName b/c they are dynamic classes). - : value instanceof Closure ? Closure.class - : value.getClass(); + : value instanceof Enum ? ((Enum) value).getDeclaringClass() + // in newer versions of groovy, our closures will be subtypes that evade the logic in + // getDeclaredType + // (they will return a null Class#getCanonicalName b/c they are dynamic classes). + : value instanceof Closure ? Closure.class + : value.getClass(); return getDeclaredType(type); } @@ -87,8 +87,7 @@ protected static String getDeclaredTypeName(Class type) { } /** - * Get a map from binary name to declared type for the dynamic classes referenced by an array of - * param classes. + * Get a map from binary name to declared type for the dynamic classes referenced by an array of param classes. * * @param params The parameters to operate on * @return The result map @@ -113,8 +112,8 @@ private static void visitParameterClass(final Map> found, Class if (seen != null) { if (seen != cls) { throw new UnsupportedOperationException( - "Parameter list may not include multiple versions of the same class: " - + name + ". Was the class redefined in your shell?"); + "Parameter list may not include multiple versions of the same class: " + + name + ". Was the class redefined in your shell?"); } // we don't need to revisit this class return; diff --git a/DB/src/main/java/io/deephaven/db/tables/select/QueryScope.java b/DB/src/main/java/io/deephaven/db/tables/select/QueryScope.java index 93bd7b4e0fa..3addb4cf35e 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/QueryScope.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/QueryScope.java @@ -27,8 +27,7 @@ public abstract class QueryScope implements LogOutputAppendable { // ----------------------------------------------------------------------------------------------------------------- private static volatile QueryScope defaultScope = null; - private static final ThreadLocal currentScope = - ThreadLocal.withInitial(QueryScope::getDefaultScope); + private static final ThreadLocal currentScope = ThreadLocal.withInitial(QueryScope::getDefaultScope); private static QueryScope getDefaultScope() { if (defaultScope == null) { @@ -50,20 +49,17 @@ private static QueryScope getDefaultScope() { */ public static synchronized void setDefaultScope(final QueryScope scope) { if (defaultScope != null) { - throw new IllegalStateException( - "It's too late to set default scope; it's already set to: " + defaultScope); + throw new IllegalStateException("It's too late to set default scope; it's already set to: " + defaultScope); } defaultScope = Objects.requireNonNull(scope); } /** * Sets the default {@link QueryScope} to be used in the current context. By default there is a - * {@link StandaloneImpl} created by the static initializer and set as the defaultInstance. The - * method allows the use of a new or separate instance as the default instance for static - * methods. + * {@link StandaloneImpl} created by the static initializer and set as the defaultInstance. The method allows the + * use of a new or separate instance as the default instance for static methods. * - * @param queryScope {@link QueryScope} to set as the new default instance; null clears the - * scope. + * @param queryScope {@link QueryScope} to set as the new default instance; null clears the scope. */ public static synchronized void setScope(final QueryScope queryScope) { if (queryScope == null) { @@ -83,8 +79,7 @@ public static QueryScope getScope() { } /** - * Adds a parameter to the default instance {@link QueryScope}, or updates the value of an - * existing parameter. + * Adds a parameter to the default instance {@link QueryScope}, or updates the value of an existing parameter. * * @param name String name of the parameter to add. * @param value value to assign to the parameter. @@ -127,8 +122,8 @@ public static T getParamValue(final String name) throws MissingVariableExcep private volatile String queryNameValue; /** - * A type of RuntimeException thrown when a variable referenced within the {@link QueryScope} is - * not defined or, more likely, has not been added to the scope. + * A type of RuntimeException thrown when a variable referenced within the {@link QueryScope} is not defined or, + * more likely, has not been added to the scope. */ public static class MissingVariableException extends RuntimeException { @@ -156,7 +151,7 @@ private static Object applyValueConversions(final Object value) { final String stringValue = (String) value; if (stringValue.length() > 0 && stringValue.charAt(0) == '\'' - && stringValue.charAt(stringValue.length() - 1) == '\'') { + && stringValue.charAt(stringValue.length() - 1) == '\'') { final String datetimeString = stringValue.substring(1, stringValue.length() - 1); final DBDateTime dateTime = DBTimeUtils.convertDateTimeQuiet(datetimeString); @@ -190,8 +185,8 @@ private static Object applyValueConversions(final Object value) { * * @param names parameter names * @return A newly-constructed array of newly-constructed Params. - * @throws io.deephaven.db.tables.select.QueryScope.MissingVariableException If any of the named - * scope variables does not exist. + * @throws io.deephaven.db.tables.select.QueryScope.MissingVariableException If any of the named scope variables + * does not exist. */ public final Param[] getParams(final Collection names) throws MissingVariableException { final Param[] result = new Param[names.size()]; @@ -226,8 +221,8 @@ public final Param[] getParams(final Collection names) throws MissingVar * * @param name parameter name * @return newly-constructed Param (name + value-snapshot pair). - * @throws io.deephaven.db.tables.select.QueryScope.MissingVariableException If any of the named - * scope variables does not exist. + * @throws io.deephaven.db.tables.select.QueryScope.MissingVariableException If any of the named scope variables + * does not exist. */ protected abstract Param createParam(final String name) throws MissingVariableException; @@ -236,8 +231,7 @@ public final Param[] getParams(final Collection names) throws MissingVar * * @param name parameter name. * @return parameter value. - * @throws io.deephaven.db.tables.select.QueryScope.MissingVariableException If no such scope - * parameter exists. + * @throws io.deephaven.db.tables.select.QueryScope.MissingVariableException If no such scope parameter exists. */ public abstract T readParamValue(final String name) throws MissingVariableException; @@ -259,8 +253,8 @@ public final Param[] getParams(final Collection names) throws MissingVar public abstract void putParam(final String name, final T value); /** - * Add an object's public members (referenced reflectively, not a shallow copy!) to this scope - * if supported. Note: This is an optional method. + * Add an object's public members (referenced reflectively, not a shallow copy!) to this scope if supported. + * Note: This is an optional method. * * @param object object to add public members from. */ @@ -304,7 +298,7 @@ public LogOutput append(@NotNull final LogOutput logOutput) { logOutput.nl().append(paramName).append("="); if (paramValue == this) { logOutput.append("this QueryScope (" + paramValue.getClass().getName() + ':' - + System.identityHashCode(paramValue) + ')'); + + System.identityHashCode(paramValue) + ')'); } else if (paramValue instanceof LogOutputAppendable) { logOutput.append((LogOutputAppendable) paramValue); } else { @@ -321,7 +315,7 @@ public LogOutput append(@NotNull final LogOutput logOutput) { public static class StandaloneImpl extends QueryScope { private final KeyedObjectHashMap valueRetrievers = - new KeyedObjectHashMap<>(new ValueRetrieverNameKey()); + new KeyedObjectHashMap<>(new ValueRetrieverNameKey()); public StandaloneImpl() {} @@ -368,10 +362,8 @@ public T readParamValue(final String name, final T defaultValue) { @Override public void putParam(final String name, final T value) { NameValidator.validateQueryParameterName(name); - // TODO: Can I get rid of this applyValueConversions? It's too inconsistent to feel - // safe. - valueRetrievers.put(name, - new SimpleValueRetriever<>(name, applyValueConversions(value))); + // TODO: Can I get rid of this applyValueConversions? It's too inconsistent to feel safe. + valueRetrievers.put(name, new SimpleValueRetriever<>(name, applyValueConversions(value))); } public void putObjectFields(final Object object) { @@ -399,8 +391,7 @@ public String getName() { public abstract Param createParam(); } - private static class ValueRetrieverNameKey - extends KeyedObjectKey.Basic { + private static class ValueRetrieverNameKey extends KeyedObjectKey.Basic { @Override public String getKey(ValueRetriever valueRetriever) { @@ -489,15 +480,13 @@ public boolean hasParamName(String name) { } @Override - protected synchronized Param createParam(final String name) - throws MissingVariableException { + protected synchronized Param createParam(final String name) throws MissingVariableException { // noinspection unchecked return new Param<>(name, (T) scriptSession.getVariable(name)); } @Override - public synchronized T readParamValue(final String name) - throws MissingVariableException { + public synchronized T readParamValue(final String name) throws MissingVariableException { // noinspection unchecked return (T) scriptSession.getVariable(name); } diff --git a/DB/src/main/java/io/deephaven/db/tables/select/SelectColumnFactory.java b/DB/src/main/java/io/deephaven/db/tables/select/SelectColumnFactory.java index 4ac053b1fad..2b793fcd6ef 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/SelectColumnFactory.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/SelectColumnFactory.java @@ -30,28 +30,26 @@ public class SelectColumnFactory { // io.deephaven.web.shared.data.CustomColumnDescriptor#extractColumnName /* - * SwitchColumn will explicitly check if is a column in the source table first, - * and use FormulaColumn#createFormulaColumn(String, String, FormulaParserConfiguration) - * where appropriate. + * SwitchColumn will explicitly check if is a column in the source table first, and use + * FormulaColumn#createFormulaColumn(String, String, FormulaParserConfiguration) where appropriate. */ // = parser.registerFactory(new AbstractExpressionFactory( - START_PTRN + "(" + ID_PTRN + ")\\s*=\\s*(" + ANYTHING + ")" + END_PTRN) { + START_PTRN + "(" + ID_PTRN + ")\\s*=\\s*(" + ANYTHING + ")" + END_PTRN) { @Override public SelectColumn getExpression(String expression, Matcher matcher, Object... args) { - return new SwitchColumn(matcher.group(1), matcher.group(2), - (FormulaParserConfiguration) args[0]); + return new SwitchColumn(matcher.group(1), matcher.group(2), (FormulaParserConfiguration) args[0]); } }); // - parser.registerFactory(new AbstractExpressionFactory( - START_PTRN + "(" + ID_PTRN + ")" + END_PTRN) { - @Override - public SelectColumn getExpression(String expression, Matcher matcher, Object... args) { - return new SourceColumn(matcher.group(1)); - } - }); + parser.registerFactory( + new AbstractExpressionFactory(START_PTRN + "(" + ID_PTRN + ")" + END_PTRN) { + @Override + public SelectColumn getExpression(String expression, Matcher matcher, Object... args) { + return new SourceColumn(matcher.group(1)); + } + }); // If you add more logic here, please kindly update // io.deephaven.web.shared.data.CustomColumnDescriptor#extractColumnName @@ -59,28 +57,23 @@ public SelectColumn getExpression(String expression, Matcher matcher, Object... public static SelectColumn getExpression(String expression) { Pair parserAndExpression = - FormulaParserConfiguration.extractParserAndExpression(expression); + FormulaParserConfiguration.extractParserAndExpression(expression); return parser.parse(parserAndExpression.second, parserAndExpression.first); } public static SelectColumn[] getExpressions(String... expressions) { - return Arrays.stream(expressions).map(SelectColumnFactory::getExpression) - .toArray(SelectColumn[]::new); + return Arrays.stream(expressions).map(SelectColumnFactory::getExpression).toArray(SelectColumn[]::new); } public static SelectColumn[] getExpressions(Collection expressions) { - return expressions.stream().map(SelectColumnFactory::getExpression) - .toArray(SelectColumn[]::new); + return expressions.stream().map(SelectColumnFactory::getExpression).toArray(SelectColumn[]::new); } private static final Pattern formatPattern = - Pattern.compile(START_PTRN + "(" + ID_PTRN + "|\\*)\\s*=\\s*(.*\\S+)" + END_PTRN); - private static final Pattern coloringPattern = - Pattern.compile(START_PTRN + "Color\\((.*\\S+)\\)" + END_PTRN); - private static final Pattern numberFormatPattern = - Pattern.compile(START_PTRN + "Decimal\\((.*\\S+)\\)" + END_PTRN); - private static final Pattern dateFormatPattern = - Pattern.compile(START_PTRN + "Date\\((.*\\S+)\\)" + END_PTRN); + Pattern.compile(START_PTRN + "(" + ID_PTRN + "|\\*)\\s*=\\s*(.*\\S+)" + END_PTRN); + private static final Pattern coloringPattern = Pattern.compile(START_PTRN + "Color\\((.*\\S+)\\)" + END_PTRN); + private static final Pattern numberFormatPattern = Pattern.compile(START_PTRN + "Decimal\\((.*\\S+)\\)" + END_PTRN); + private static final Pattern dateFormatPattern = Pattern.compile(START_PTRN + "Date\\((.*\\S+)\\)" + END_PTRN); @SuppressWarnings("WeakerAccess") @@ -101,34 +94,28 @@ public static SelectColumn getFormatExpression(String expression) { } if (numberMatcher.matches()) { - return FormulaColumn.createFormulaColumn( - columnName + ColumnFormattingValues.TABLE_NUMERIC_FORMAT_NAME, - numberMatcher.group(1), - FormulaParserConfiguration.Deephaven); + return FormulaColumn.createFormulaColumn(columnName + ColumnFormattingValues.TABLE_NUMERIC_FORMAT_NAME, + numberMatcher.group(1), + FormulaParserConfiguration.Deephaven); } else if (dateMatcher.matches()) { - return FormulaColumn.createFormulaColumn( - columnName + ColumnFormattingValues.TABLE_DATE_FORMAT_NAME, - dateMatcher.group(1), FormulaParserConfiguration.Deephaven); + return FormulaColumn.createFormulaColumn(columnName + ColumnFormattingValues.TABLE_DATE_FORMAT_NAME, + dateMatcher.group(1), FormulaParserConfiguration.Deephaven); } else { - return FormulaColumn.createFormulaColumn( - columnName + ColumnFormattingValues.TABLE_FORMAT_NAME, - "io.deephaven.db.util.DBColorUtil.toLong(" - + (colorMatcher.matches() ? colorMatcher.group(1) : topMatcher.group(2)) + ")", - FormulaParserConfiguration.Deephaven); + return FormulaColumn.createFormulaColumn(columnName + ColumnFormattingValues.TABLE_FORMAT_NAME, + "io.deephaven.db.util.DBColorUtil.toLong(" + + (colorMatcher.matches() ? colorMatcher.group(1) : topMatcher.group(2)) + ")", + FormulaParserConfiguration.Deephaven); } } public static DhFormulaColumn[] getFormatExpressions(String... expressions) { - return Arrays.stream(expressions).map(SelectColumnFactory::getFormatExpression) - .toArray(DhFormulaColumn[]::new); + return Arrays.stream(expressions).map(SelectColumnFactory::getFormatExpression).toArray(DhFormulaColumn[]::new); } /** - * Returns the base column-name used to create a formatting column via - * {@link #getFormatExpression(String)} method + * Returns the base column-name used to create a formatting column via {@link #getFormatExpression(String)} method * - * @param selectColumn a {@link SelectColumn} returned from the - * {@link #getFormatExpression(String)} method + * @param selectColumn a {@link SelectColumn} returned from the {@link #getFormatExpression(String)} method * @return the baseColumn used to define the provided selectColumn */ public static String getFormatBaseColumn(final SelectColumn selectColumn) { @@ -140,8 +127,7 @@ public static String getFormatBaseColumn(final SelectColumn selectColumn) { int index; - // though ugly, this should be no worse than {@link - // ColumnFormattingValues#isFormattingColumn(String)} + // though ugly, this should be no worse than {@link ColumnFormattingValues#isFormattingColumn(String)} index = formattingColumn.lastIndexOf(ColumnFormattingValues.TABLE_FORMAT_NAME); if (index == -1) { index = formattingColumn.lastIndexOf(ColumnFormattingValues.TABLE_NUMERIC_FORMAT_NAME); diff --git a/DB/src/main/java/io/deephaven/db/tables/select/SelectFactoryConstants.java b/DB/src/main/java/io/deephaven/db/tables/select/SelectFactoryConstants.java index 575a9ec8971..8a2c5e28725 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/SelectFactoryConstants.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/SelectFactoryConstants.java @@ -11,10 +11,7 @@ public final class SelectFactoryConstants { /** The end of a complete expression. Matches any trailing spaces and the end of the input */ public static final String END_PTRN = "\\s*\\Z"; - /** - * Matches a variable starting with a letter, _ or $ followed by any number of letters, numbers, - * _ or $ - */ + /** Matches a variable starting with a letter, _ or $ followed by any number of letters, numbers, _ or $ */ public static final String ID_PTRN = "[a-zA-Z_$][a-zA-Z0-9_$]*"; /** An integer, including the initial minus sign */ @@ -33,11 +30,11 @@ public final class SelectFactoryConstants { public static final String BOOL_PTRN = "[tT][rR][uU][eE]" + "|" + "[fF][aA][lL][sS][eE]"; /** - * Any {@link #INT_PTRN int}, {@link #FLT_PTRN float}, {@link #BOOL_PTRN boolean}, or - * {@link #STR_PTRN string} as defined above + * Any {@link #INT_PTRN int}, {@link #FLT_PTRN float}, {@link #BOOL_PTRN boolean}, or {@link #STR_PTRN string} as + * defined above */ - public static final String LITERAL_PTRN = "(?:" + INT_PTRN + ")|(?:" + FLT_PTRN + ")|(?:" - + BOOL_PTRN + ")|(?:" + STR_PTRN + ")|(?:" + DATETIME_PTRN + ")"; + public static final String LITERAL_PTRN = "(?:" + INT_PTRN + ")|(?:" + FLT_PTRN + ")|(?:" + BOOL_PTRN + ")|(?:" + + STR_PTRN + ")|(?:" + DATETIME_PTRN + ")"; /** Case insensitive 'icase' expression */ public static final String ICASE = "[iI][cC][aA][sS][eE]"; diff --git a/DB/src/main/java/io/deephaven/db/tables/select/SelectFilterFactory.java b/DB/src/main/java/io/deephaven/db/tables/select/SelectFilterFactory.java index db4c68b5832..256ff755b39 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/SelectFilterFactory.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/SelectFilterFactory.java @@ -44,37 +44,32 @@ public class SelectFilterFactory { static { // == parser.registerFactory(new AbstractExpressionFactory( - START_PTRN + "(" + ID_PTRN + ")\\s*={1,2}\\s*(" + LITERAL_PTRN + ")" + END_PTRN) { + START_PTRN + "(" + ID_PTRN + ")\\s*={1,2}\\s*(" + LITERAL_PTRN + ")" + END_PTRN) { @Override public SelectFilter getExpression(String expression, Matcher matcher, Object... args) { final String columnName = matcher.group(1); - final FormulaParserConfiguration parserConfiguration = - (FormulaParserConfiguration) args[0]; + final FormulaParserConfiguration parserConfiguration = (FormulaParserConfiguration) args[0]; if (isRowVariable(columnName)) { - log.debug() - .append("SelectFilterFactory creating ConditionFilter for expression: ") - .append(expression).endl(); + log.debug().append("SelectFilterFactory creating ConditionFilter for expression: ") + .append(expression).endl(); return ConditionFilter.createConditionFilter(expression, parserConfiguration); } - log.debug().append("SelectFilterFactory creating MatchFilter for expression: ") - .append(expression).endl(); - return new MatchFilter(MatchFilter.CaseSensitivity.MatchCase, columnName, - matcher.group(2)); + log.debug().append("SelectFilterFactory creating MatchFilter for expression: ").append(expression) + .endl(); + return new MatchFilter(MatchFilter.CaseSensitivity.MatchCase, columnName, matcher.group(2)); } }); // == parser.registerFactory(new AbstractExpressionFactory( - START_PTRN + "(" + ID_PTRN + ")\\s*={1,2}\\s*(" + ID_PTRN + ")" + END_PTRN) { + START_PTRN + "(" + ID_PTRN + ")\\s*={1,2}\\s*(" + ID_PTRN + ")" + END_PTRN) { @Override public SelectFilter getExpression(String expression, Matcher matcher, Object... args) { final String columnName = matcher.group(1); - final FormulaParserConfiguration parserConfiguration = - (FormulaParserConfiguration) args[0]; + final FormulaParserConfiguration parserConfiguration = (FormulaParserConfiguration) args[0]; if (isRowVariable(columnName)) { - log.debug() - .append("SelectFilterFactory creating ConditionFilter for expression: ") - .append(expression).endl(); + log.debug().append("SelectFilterFactory creating ConditionFilter for expression: ") + .append(expression).endl(); return ConditionFilter.createConditionFilter(expression, parserConfiguration); } try { @@ -82,10 +77,9 @@ public SelectFilter getExpression(String expression, Matcher matcher, Object... } catch (QueryScope.MissingVariableException e) { return ConditionFilter.createConditionFilter(expression, parserConfiguration); } - log.debug().append("SelectFilterFactory creating MatchFilter for expression: ") - .append(expression).endl(); - return new MatchFilter(MatchFilter.CaseSensitivity.MatchCase, columnName, - matcher.group(2)); + log.debug().append("SelectFilterFactory creating MatchFilter for expression: ").append(expression) + .endl(); + return new MatchFilter(MatchFilter.CaseSensitivity.MatchCase, columnName, matcher.group(2)); } }); @@ -94,94 +88,82 @@ public SelectFilter getExpression(String expression, Matcher matcher, Object... // > // >= parser.registerFactory(new AbstractExpressionFactory( - START_PTRN + "(" + ID_PTRN + ")\\s*([<>]=?)\\s*(" + LITERAL_PTRN + ")" + END_PTRN) { + START_PTRN + "(" + ID_PTRN + ")\\s*([<>]=?)\\s*(" + LITERAL_PTRN + ")" + END_PTRN) { @Override public SelectFilter getExpression(String expression, Matcher matcher, Object... args) { - final FormulaParserConfiguration parserConfiguration = - (FormulaParserConfiguration) args[0]; + final FormulaParserConfiguration parserConfiguration = (FormulaParserConfiguration) args[0]; final String columnName = matcher.group(1); final String conditionString = matcher.group(2); final String value = matcher.group(3); if (isRowVariable(columnName)) { - log.debug() - .append("SelectFilterFactory creating ConditionFilter for expression: ") - .append(expression).endl(); + log.debug().append("SelectFilterFactory creating ConditionFilter for expression: ") + .append(expression).endl(); return ConditionFilter.createConditionFilter(expression, parserConfiguration); } try { - log.debug() - .append( - "SelectFilterFactory creating RangeConditionFilter for expression: ") - .append(expression).endl(); + log.debug().append("SelectFilterFactory creating RangeConditionFilter for expression: ") + .append(expression).endl(); return new RangeConditionFilter(columnName, conditionString, value, expression, - parserConfiguration); + parserConfiguration); } catch (Exception e) { - log.warn() - .append("SelectFilterFactory could not make RangeFilter for expression: ") - .append(expression).append(" due to ").append(e) - .append(" Creating ConditionFilter instead.").endl(); + log.warn().append("SelectFilterFactory could not make RangeFilter for expression: ") + .append(expression).append(" due to ").append(e) + .append(" Creating ConditionFilter instead.").endl(); return ConditionFilter.createConditionFilter(expression, parserConfiguration); } } }); // [icase] [not] in , , ... , - parser.registerFactory( - new AbstractExpressionFactory("(?s)" + START_PTRN + "(" + ID_PTRN + parser.registerFactory(new AbstractExpressionFactory("(?s)" + START_PTRN + "(" + ID_PTRN + ")\\s+(" + ICASE + "\\s+)?(" + NOT + "\\s+)?" + IN + "\\s+(.+?)" + END_PTRN) { - @Override - public SelectFilter getExpression(String expression, Matcher matcher, - Object... args) { - final SplitIgnoreQuotes splitter = new SplitIgnoreQuotes(); - log.debug().append("SelectFilterFactory creating MatchFilter for expression: ") - .append(expression).endl(); - return new MatchFilter( + @Override + public SelectFilter getExpression(String expression, Matcher matcher, Object... args) { + final SplitIgnoreQuotes splitter = new SplitIgnoreQuotes(); + log.debug().append("SelectFilterFactory creating MatchFilter for expression: ").append(expression) + .endl(); + return new MatchFilter( matcher.group(2) == null ? MatchFilter.CaseSensitivity.MatchCase - : MatchFilter.CaseSensitivity.IgnoreCase, - matcher.group(3) == null ? MatchFilter.MatchType.Regular - : MatchFilter.MatchType.Inverted, + : MatchFilter.CaseSensitivity.IgnoreCase, + matcher.group(3) == null ? MatchFilter.MatchType.Regular : MatchFilter.MatchType.Inverted, matcher.group(1), splitter.split(matcher.group(4), ',')); - } - }); + } + }); // [icase] [not] includes [any|all]<"String"> - parser.registerFactory(new AbstractExpressionFactory( - START_PTRN + "(" + ID_PTRN + ")\\s+(" + ICASE + "\\s+)?(" + NOT + "\\s+)?" + INCLUDES + - "(?:\\s+(" + ANY + "|" + ALL + ")\\s+)?" + "\\s*((?:(?:" + STR_PTRN - + ")(?:,\\s*)?)+)" + END_PTRN) { + parser.registerFactory(new AbstractExpressionFactory(START_PTRN + "(" + ID_PTRN + ")\\s+(" + ICASE + + "\\s+)?(" + NOT + "\\s+)?" + INCLUDES + + "(?:\\s+(" + ANY + "|" + ALL + ")\\s+)?" + "\\s*((?:(?:" + STR_PTRN + ")(?:,\\s*)?)+)" + END_PTRN) { @Override public SelectFilter getExpression(String expression, Matcher matcher, Object... args) { final SplitIgnoreQuotes splitter = new SplitIgnoreQuotes(); - log.debug() - .append("SelectFilterFactory creating StringContainsFilter for expression: ") - .append(expression).endl(); + log.debug().append("SelectFilterFactory creating StringContainsFilter for expression: ") + .append(expression).endl(); final String[] values = splitter.split(matcher.group(5), ','); final String anyAllPart = matcher.group(4); return new StringContainsFilter( - matcher.group(2) == null ? MatchFilter.CaseSensitivity.MatchCase - : MatchFilter.CaseSensitivity.IgnoreCase, - matcher.group(3) == null ? MatchFilter.MatchType.Regular - : MatchFilter.MatchType.Inverted, - matcher.group(1), - values.length == 1 || - StringUtils.isNullOrEmpty(anyAllPart) || "any".equalsIgnoreCase(anyAllPart), - true, values); + matcher.group(2) == null ? MatchFilter.CaseSensitivity.MatchCase + : MatchFilter.CaseSensitivity.IgnoreCase, + matcher.group(3) == null ? MatchFilter.MatchType.Regular : MatchFilter.MatchType.Inverted, + matcher.group(1), + values.length == 1 || + StringUtils.isNullOrEmpty(anyAllPart) || "any".equalsIgnoreCase(anyAllPart), + true, values); } }); // Anything else is assumed to be a condition formula. - parser.registerFactory(new AbstractExpressionFactory( - START_PTRN + "(" + ANYTHING + ")" + END_PTRN) { - @Override - public SelectFilter getExpression(String expression, Matcher matcher, Object... args) { - final FormulaParserConfiguration parserConfiguration = - (FormulaParserConfiguration) args[0]; - - log.debug().append("SelectFilterFactory creating ConditionFilter for expression: ") - .append(expression).endl(); - return ConditionFilter.createConditionFilter(matcher.group(1), parserConfiguration); - } - }); + parser.registerFactory( + new AbstractExpressionFactory(START_PTRN + "(" + ANYTHING + ")" + END_PTRN) { + @Override + public SelectFilter getExpression(String expression, Matcher matcher, Object... args) { + final FormulaParserConfiguration parserConfiguration = (FormulaParserConfiguration) args[0]; + + log.debug().append("SelectFilterFactory creating ConditionFilter for expression: ") + .append(expression).endl(); + return ConditionFilter.createConditionFilter(matcher.group(1), parserConfiguration); + } + }); } private static boolean isRowVariable(String columnName) { @@ -190,32 +172,28 @@ private static boolean isRowVariable(String columnName) { public static SelectFilter getExpression(String match) { Pair parserAndExpression = - FormulaParserConfiguration.extractParserAndExpression(match); + FormulaParserConfiguration.extractParserAndExpression(match); return parser.parse(parserAndExpression.second, parserAndExpression.first); } public static SelectFilter[] getExpressions(String... expressions) { - return Arrays.stream(expressions).map(SelectFilterFactory::getExpression) - .toArray(SelectFilter[]::new); + return Arrays.stream(expressions).map(SelectFilterFactory::getExpression).toArray(SelectFilter[]::new); } public static SelectFilter[] getExpressions(Collection expressions) { - return expressions.stream().map(SelectFilterFactory::getExpression) - .toArray(SelectFilter[]::new); + return expressions.stream().map(SelectFilterFactory::getExpression).toArray(SelectFilter[]::new); } - public static SelectFilter[] expandQuickFilter(Table t, String quickFilter, - Set columnNames) { + public static SelectFilter[] expandQuickFilter(Table t, String quickFilter, Set columnNames) { return expandQuickFilter(t, quickFilter, QuickFilterMode.NORMAL, columnNames); } - public static SelectFilter[] expandQuickFilter(Table t, String quickFilter, - QuickFilterMode filterMode) { + public static SelectFilter[] expandQuickFilter(Table t, String quickFilter, QuickFilterMode filterMode) { return expandQuickFilter(t, quickFilter, filterMode, Collections.emptySet()); } - public static SelectFilter[] expandQuickFilter(Table t, String quickFilter, - QuickFilterMode filterMode, @NotNull Set columnNames) { + public static SelectFilter[] expandQuickFilter(Table t, String quickFilter, QuickFilterMode filterMode, + @NotNull Set columnNames) { // Do some type inference if (quickFilter != null && !quickFilter.isEmpty()) { if (filterMode == QuickFilterMode.MULTI) { @@ -223,43 +201,43 @@ public static SelectFilter[] expandQuickFilter(Table t, String quickFilter, } return t.getColumnSourceMap().entrySet().stream() - .filter(entry -> !ColumnFormattingValues.isFormattingColumn(entry.getKey()) && - !RollupInfo.ROLLUP_COLUMN.equals(entry.getKey()) && - (columnNames.isEmpty() || columnNames.contains(entry.getKey()))) - .map(entry -> { - final Class colClass = entry.getValue().getType(); - final String colName = entry.getKey(); - if (filterMode == QuickFilterMode.REGEX) { - if (colClass.isAssignableFrom(String.class)) { - return new RegexFilter(MatchFilter.CaseSensitivity.IgnoreCase, - MatchFilter.MatchType.Regular, colName, quickFilter); - } - return null; - } else if (filterMode == QuickFilterMode.AND) { - final String[] parts = quickFilter.split("\\s+"); - final List filters = Arrays.stream(parts) - .map(part -> getSelectFilterForAnd(colName, part, colClass)) - .filter(Objects::nonNull).collect(Collectors.toList()); - if (filters.isEmpty()) { + .filter(entry -> !ColumnFormattingValues.isFormattingColumn(entry.getKey()) && + !RollupInfo.ROLLUP_COLUMN.equals(entry.getKey()) && + (columnNames.isEmpty() || columnNames.contains(entry.getKey()))) + .map(entry -> { + final Class colClass = entry.getValue().getType(); + final String colName = entry.getKey(); + if (filterMode == QuickFilterMode.REGEX) { + if (colClass.isAssignableFrom(String.class)) { + return new RegexFilter(MatchFilter.CaseSensitivity.IgnoreCase, + MatchFilter.MatchType.Regular, colName, quickFilter); + } return null; + } else if (filterMode == QuickFilterMode.AND) { + final String[] parts = quickFilter.split("\\s+"); + final List filters = + Arrays.stream(parts).map(part -> getSelectFilterForAnd(colName, part, colClass)) + .filter(Objects::nonNull).collect(Collectors.toList()); + if (filters.isEmpty()) { + return null; + } + return ConjunctiveFilter.makeConjunctiveFilter( + filters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY)); + } else if (filterMode == QuickFilterMode.OR) { + final String[] parts = quickFilter.split("\\s+"); + final List filters = Arrays.stream(parts) + .map(part -> getSelectFilter(colName, part, filterMode, colClass)) + .filter(Objects::nonNull).collect(Collectors.toList()); + if (filters.isEmpty()) { + return null; + } + return DisjunctiveFilter.makeDisjunctiveFilter( + filters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY)); + } else { + return getSelectFilter(colName, quickFilter, filterMode, colClass); } - return ConjunctiveFilter.makeConjunctiveFilter( - filters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY)); - } else if (filterMode == QuickFilterMode.OR) { - final String[] parts = quickFilter.split("\\s+"); - final List filters = Arrays.stream(parts) - .map(part -> getSelectFilter(colName, part, filterMode, colClass)) - .filter(Objects::nonNull).collect(Collectors.toList()); - if (filters.isEmpty()) { - return null; - } - return DisjunctiveFilter.makeDisjunctiveFilter( - filters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY)); - } else { - return getSelectFilter(colName, quickFilter, filterMode, colClass); - } - }).filter(Objects::nonNull).toArray(SelectFilter[]::new); + }).filter(Objects::nonNull).toArray(SelectFilter[]::new); } return SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY; @@ -271,13 +249,13 @@ private static SelectFilter[] expandMultiColumnQuickFilter(Table t, String quick for (String part : parts) { final SelectFilter[] filterArray = t.getColumnSourceMap().entrySet().stream() - .filter(entry -> !ColumnFormattingValues.isFormattingColumn(entry.getKey()) - && !RollupInfo.ROLLUP_COLUMN.equals(entry.getKey())) - .map(entry -> { - final Class colClass = entry.getValue().getType(); - final String colName = entry.getKey(); - return getSelectFilter(colName, part, QuickFilterMode.MULTI, colClass); - }).filter(Objects::nonNull).toArray(SelectFilter[]::new); + .filter(entry -> !ColumnFormattingValues.isFormattingColumn(entry.getKey()) + && !RollupInfo.ROLLUP_COLUMN.equals(entry.getKey())) + .map(entry -> { + final Class colClass = entry.getValue().getType(); + final String colName = entry.getKey(); + return getSelectFilter(colName, part, QuickFilterMode.MULTI, colClass); + }).filter(Objects::nonNull).toArray(SelectFilter[]::new); if (filterArray.length > 0) { filters.add(DisjunctiveFilter.makeDisjunctiveFilter(filterArray)); } @@ -286,18 +264,16 @@ private static SelectFilter[] expandMultiColumnQuickFilter(Table t, String quick return filters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY); } - private static SelectFilter getSelectFilter(String colName, String quickFilter, - QuickFilterMode filterMode, Class colClass) { + private static SelectFilter getSelectFilter(String colName, String quickFilter, QuickFilterMode filterMode, + Class colClass) { final InferenceResult typeData = new InferenceResult(quickFilter); - if ((colClass == Double.class || colClass == double.class) - && (!Double.isNaN(typeData.doubleVal))) { + if ((colClass == Double.class || colClass == double.class) && (!Double.isNaN(typeData.doubleVal))) { try { return DoubleRangeFilter.makeRange(colName, quickFilter); } catch (NumberFormatException ignored) { return new MatchFilter(colName, typeData.doubleVal); } - } else if (colClass == Float.class - || colClass == float.class && (!Float.isNaN(typeData.floatVal))) { + } else if (colClass == Float.class || colClass == float.class && (!Float.isNaN(typeData.floatVal))) { try { return FloatRangeFilter.makeRange(colName, quickFilter); } catch (NumberFormatException ignored) { @@ -317,15 +293,12 @@ private static SelectFilter getSelectFilter(String colName, String quickFilter, return ComparableRangeFilter.makeBigDecimalRange(colName, quickFilter); } else if (filterMode != QuickFilterMode.NUMERIC) { if (colClass == String.class) { - return new StringContainsFilter(MatchFilter.CaseSensitivity.IgnoreCase, - MatchFilter.MatchType.Regular, colName, quickFilter); - } else if ((colClass == boolean.class || colClass == Boolean.class) - && typeData.isBool) { + return new StringContainsFilter(MatchFilter.CaseSensitivity.IgnoreCase, MatchFilter.MatchType.Regular, + colName, quickFilter); + } else if ((colClass == boolean.class || colClass == Boolean.class) && typeData.isBool) { return new MatchFilter(colName, Boolean.parseBoolean(quickFilter)); - } else if (colClass == DBDateTime.class && typeData.dateLower != null - && typeData.dateUpper != null) { - return new DateTimeRangeFilter(colName, typeData.dateLower, typeData.dateUpper, - true, false); + } else if (colClass == DBDateTime.class && typeData.dateLower != null && typeData.dateUpper != null) { + return new DateTimeRangeFilter(colName, typeData.dateLower, typeData.dateUpper, true, false); } else if ((colClass == char.class || colClass == Character.class) && typeData.isChar) { return new MatchFilter(colName, typeData.charVal); } @@ -333,27 +306,26 @@ private static SelectFilter getSelectFilter(String colName, String quickFilter, return null; } - private static SelectFilter getSelectFilterForAnd(String colName, String quickFilter, - Class colClass) { + private static SelectFilter getSelectFilterForAnd(String colName, String quickFilter, Class colClass) { // AND mode only supports String types if (colClass.isAssignableFrom(String.class)) { - return new StringContainsFilter(MatchFilter.CaseSensitivity.IgnoreCase, - MatchFilter.MatchType.Regular, colName, quickFilter); + return new StringContainsFilter(MatchFilter.CaseSensitivity.IgnoreCase, MatchFilter.MatchType.Regular, + colName, quickFilter); } return null; } - public static SelectFilter[] getExpressionsWithQuickFilter(String[] expressions, Table t, - String quickFilter, QuickFilterMode filterMode) { + public static SelectFilter[] getExpressionsWithQuickFilter(String[] expressions, Table t, String quickFilter, + QuickFilterMode filterMode) { if (quickFilter != null && !quickFilter.isEmpty()) { return Stream.concat( - Arrays.stream(getExpressions(expressions)), - Stream.of(filterMode == QuickFilterMode.MULTI - ? ConjunctiveFilter.makeConjunctiveFilter( - SelectFilterFactory.expandQuickFilter(t, quickFilter, filterMode)) - : DisjunctiveFilter.makeDisjunctiveFilter( - SelectFilterFactory.expandQuickFilter(t, quickFilter, filterMode)))) - .toArray(SelectFilter[]::new); + Arrays.stream(getExpressions(expressions)), + Stream.of(filterMode == QuickFilterMode.MULTI + ? ConjunctiveFilter.makeConjunctiveFilter( + SelectFilterFactory.expandQuickFilter(t, quickFilter, filterMode)) + : DisjunctiveFilter.makeDisjunctiveFilter( + SelectFilterFactory.expandQuickFilter(t, quickFilter, filterMode)))) + .toArray(SelectFilter[]::new); } return getExpressions(expressions); } @@ -445,8 +417,8 @@ static class InferenceResult { try { // Maybe it was just a TOD? long time = DBTimeUtils.convertTime(valString); - dateLower = DBTimeUtils.getZonedDateTime(DBDateTime.now()) - .truncatedTo(ChronoUnit.DAYS).plus(time, ChronoUnit.NANOS); + dateLower = DBTimeUtils.getZonedDateTime(DBDateTime.now()).truncatedTo(ChronoUnit.DAYS).plus(time, + ChronoUnit.NANOS); } catch (RuntimeException stillIgnored) { } @@ -454,14 +426,11 @@ static class InferenceResult { if (dateLower != null) { final ChronoField finestUnit = DBTimeUtils.getFinestDefinedUnit(valString); - dateUpper = - finestUnit == null ? dateLower : dateLower.plus(1, finestUnit.getBaseUnit()); + dateUpper = finestUnit == null ? dateLower : dateLower.plus(1, finestUnit.getBaseUnit()); } - this.dateUpper = dateUpper == null ? null - : DBTimeUtils.millisToTime(dateUpper.toInstant().toEpochMilli()); - this.dateLower = dateLower == null ? null - : DBTimeUtils.millisToTime(dateLower.toInstant().toEpochMilli()); + this.dateUpper = dateUpper == null ? null : DBTimeUtils.millisToTime(dateUpper.toInstant().toEpochMilli()); + this.dateLower = dateLower == null ? null : DBTimeUtils.millisToTime(dateLower.toInstant().toEpochMilli()); } } } diff --git a/DB/src/main/java/io/deephaven/db/tables/select/Utils.java b/DB/src/main/java/io/deephaven/db/tables/select/Utils.java index fc57a2c713c..0b8fa318cab 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/Utils.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/Utils.java @@ -16,14 +16,13 @@ public static List getFormulaTokens(String formula, Collection n int lastIndex = 0; while ((lastIndex = formula.indexOf(name, lastIndex)) != -1) { if (lastIndex > 0 && (Character.isLetter(formula.charAt(lastIndex - 1)) - || formula.charAt(lastIndex - 1) == '_')) { + || formula.charAt(lastIndex - 1) == '_')) { lastIndex++; continue; } int nextChar = lastIndex + name.length(); if (nextChar < formula.length() && (Character.isLetter(formula.charAt(nextChar)) - || formula.charAt(nextChar) == '_' - || Character.isDigit(formula.charAt(nextChar)))) { + || formula.charAt(nextChar) == '_' || Character.isDigit(formula.charAt(nextChar)))) { lastIndex++; continue; } @@ -34,25 +33,21 @@ public static List getFormulaTokens(String formula, Collection n return result; } - public static String replaceFormulaTokens(String formula, String sourceToken, - String destToken) { + public static String replaceFormulaTokens(String formula, String sourceToken, String destToken) { int lastIndex = 0; - while (lastIndex < formula.length() - && (lastIndex = formula.indexOf(sourceToken, lastIndex)) != -1) { - if (lastIndex > 0 && (Character.isLetter(formula.charAt(lastIndex - 1)) - || formula.charAt(lastIndex - 1) == '_')) { + while (lastIndex < formula.length() && (lastIndex = formula.indexOf(sourceToken, lastIndex)) != -1) { + if (lastIndex > 0 + && (Character.isLetter(formula.charAt(lastIndex - 1)) || formula.charAt(lastIndex - 1) == '_')) { lastIndex++; continue; } int nextChar = lastIndex + sourceToken.length(); - if (nextChar < formula.length() - && (Character.isLetter(formula.charAt(nextChar)) || formula.charAt(nextChar) == '_' - || Character.isDigit(formula.charAt(nextChar)))) { + if (nextChar < formula.length() && (Character.isLetter(formula.charAt(nextChar)) + || formula.charAt(nextChar) == '_' || Character.isDigit(formula.charAt(nextChar)))) { lastIndex++; continue; } - formula = formula.substring(0, lastIndex) + destToken - + formula.substring(lastIndex + sourceToken.length()); + formula = formula.substring(0, lastIndex) + destToken + formula.substring(lastIndex + sourceToken.length()); lastIndex += destToken.length(); } return formula; diff --git a/DB/src/main/java/io/deephaven/db/tables/select/WouldMatchPair.java b/DB/src/main/java/io/deephaven/db/tables/select/WouldMatchPair.java index e2d72362703..f6e055d4ae8 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/WouldMatchPair.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/WouldMatchPair.java @@ -54,7 +54,7 @@ public boolean equals(Object o) { return false; final WouldMatchPair other = (WouldMatchPair) o; return Objects.equals(columnName, other.columnName) && - Objects.equals(filter, other.filter); + Objects.equals(filter, other.filter); } @Override diff --git a/DB/src/main/java/io/deephaven/db/tables/select/WouldMatchPairFactory.java b/DB/src/main/java/io/deephaven/db/tables/select/WouldMatchPairFactory.java index fb3b5fd4524..74b5f15174f 100644 --- a/DB/src/main/java/io/deephaven/db/tables/select/WouldMatchPairFactory.java +++ b/DB/src/main/java/io/deephaven/db/tables/select/WouldMatchPairFactory.java @@ -21,10 +21,9 @@ public class WouldMatchPairFactory { private static final ExpressionParser parser = new ExpressionParser<>(); static { parser.registerFactory(new AbstractExpressionFactory( - START_PTRN + "(" + ID_PTRN + ")\\s*=\\s*(" + ANYTHING + ")" + END_PTRN) { + START_PTRN + "(" + ID_PTRN + ")\\s*=\\s*(" + ANYTHING + ")" + END_PTRN) { @Override - public WouldMatchPair getExpression(String expression, Matcher matcher, - Object... args) { + public WouldMatchPair getExpression(String expression, Matcher matcher, Object... args) { return new WouldMatchPair(matcher.group(1), matcher.group(2)); } }); @@ -45,6 +44,6 @@ public static WouldMatchPair[] getExpressions(Collection matches) { private static WouldMatchPair[] getExpressions(Stream matchesStream) { return matchesStream.map(WouldMatchPairFactory::getExpression) - .toArray(WouldMatchPair[]::new); + .toArray(WouldMatchPair[]::new); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/ArrayUtils.java b/DB/src/main/java/io/deephaven/db/tables/utils/ArrayUtils.java index bfa81288c58..e20e6ba9535 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/ArrayUtils.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/ArrayUtils.java @@ -189,8 +189,7 @@ public static ArrayAccessor getArrayAccessorFromArray(Object arrayPrototype, int } else if (c.equals(short[].class)) { return new ShortArrayAccessor(shortNullArray(size)); } else { - return new ObjectArrayAccessor( - (Object[]) Array.newInstance(c.getComponentType(), size)); + return new ObjectArrayAccessor((Object[]) Array.newInstance(c.getComponentType(), size)); } } @@ -377,8 +376,7 @@ public static short[] getUnboxedArray(Short[] boxedArray) { public static byte[] getUnboxedByteArray(Object[] boxedArray) { final byte[] result = new byte[boxedArray.length]; for (int i = 0; i < result.length; i++) { - result[i] = - (boxedArray[i] != null ? (((Number) boxedArray[i]).byteValue()) : NULL_BYTE); + result[i] = (boxedArray[i] != null ? (((Number) boxedArray[i]).byteValue()) : NULL_BYTE); } return result; } @@ -394,8 +392,7 @@ public static char[] getUnboxedCharArray(Object[] boxedArray) { public static short[] getUnboxedShortArray(Object[] boxedArray) { final short[] result = new short[boxedArray.length]; for (int i = 0; i < result.length; i++) { - result[i] = - (boxedArray[i] != null ? (((Number) boxedArray[i]).shortValue()) : NULL_SHORT); + result[i] = (boxedArray[i] != null ? (((Number) boxedArray[i]).shortValue()) : NULL_SHORT); } return result; } @@ -419,8 +416,7 @@ public static long[] getUnboxedLongArray(Object[] boxedArray) { public static float[] getUnboxedFloatArray(Object[] boxedArray) { final float[] result = new float[boxedArray.length]; for (int i = 0; i < result.length; i++) { - result[i] = - (boxedArray[i] != null ? ((Number) boxedArray[i]).floatValue() : NULL_FLOAT); + result[i] = (boxedArray[i] != null ? ((Number) boxedArray[i]).floatValue() : NULL_FLOAT); } return result; } @@ -428,8 +424,7 @@ public static float[] getUnboxedFloatArray(Object[] boxedArray) { public static double[] getUnboxedDoubleArray(Object[] boxedArray) { final double[] result = new double[boxedArray.length]; for (int i = 0; i < result.length; i++) { - result[i] = - (boxedArray[i] != null ? ((Number) boxedArray[i]).doubleValue() : NULL_DOUBLE); + result[i] = (boxedArray[i] != null ? ((Number) boxedArray[i]).doubleValue() : NULL_DOUBLE); } return result; } @@ -1022,8 +1017,8 @@ public void copyArray(Object sourceArray, int pos, int length) { if (sourceArray == null) { throw new NullPointerException(); } - Require.requirement(sourceArray instanceof int[], "sourceArray instanceof int[]", - sourceArray.getClass(), "sourceArray.getClass()"); + Require.requirement(sourceArray instanceof int[], "sourceArray instanceof int[]", sourceArray.getClass(), + "sourceArray.getClass()"); System.arraycopy(sourceArray, 0, array, pos, length); } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/CacheLogUtils.java b/DB/src/main/java/io/deephaven/db/tables/utils/CacheLogUtils.java index ed27214ca57..5b117a327cd 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/CacheLogUtils.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/CacheLogUtils.java @@ -6,8 +6,7 @@ public class CacheLogUtils { - public static Boolean enumToBoolean(final int value, final int YES_CONSTANT, - final int NO_CONSTANT) { + public static Boolean enumToBoolean(final int value, final int YES_CONSTANT, final int NO_CONSTANT) { if (value == YES_CONSTANT) { return Boolean.TRUE; } @@ -17,7 +16,7 @@ public static Boolean enumToBoolean(final int value, final int YES_CONSTANT, if (value == Integer.MIN_VALUE) { return null; } - throw new IllegalArgumentException("Unexpected value=" + value + ", YES_CONSTANT=" - + YES_CONSTANT + ", NO_CONSTANT=" + NO_CONSTANT); + throw new IllegalArgumentException( + "Unexpected value=" + value + ", YES_CONSTANT=" + YES_CONSTANT + ", NO_CONSTANT=" + NO_CONSTANT); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/CachedStringSetWrapperEnumFormatter.java b/DB/src/main/java/io/deephaven/db/tables/utils/CachedStringSetWrapperEnumFormatter.java index 2c0e4a94623..dedbbad5e9d 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/CachedStringSetWrapperEnumFormatter.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/CachedStringSetWrapperEnumFormatter.java @@ -9,13 +9,12 @@ import gnu.trove.map.hash.TIntObjectHashMap; /** - * This object extends the EnumFormatter class and affords the caching of string set results Be - * warned this could create a large hash if the possible enum combinations get very large. + * This object extends the EnumFormatter class and affords the caching of string set results Be warned this could create + * a large hash if the possible enum combinations get very large. */ public class CachedStringSetWrapperEnumFormatter extends EnumFormatter { - protected TIntObjectHashMap indexToStringSetWrapper = - new TIntObjectHashMap<>(); + protected TIntObjectHashMap indexToStringSetWrapper = new TIntObjectHashMap<>(); public CachedStringSetWrapperEnumFormatter(String[] enums) { super(enums); diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/ColumnsSpecHelper.java b/DB/src/main/java/io/deephaven/db/tables/utils/ColumnsSpecHelper.java index 8e553ad4d4a..cae30e16c58 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/ColumnsSpecHelper.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/ColumnsSpecHelper.java @@ -3,8 +3,8 @@ import java.util.ArrayList; /** - * Helper class to support easier to read in-line column definitions. You can obtain the array - * arguments to feed the buildWriter methods in TableWriterFactory. + * Helper class to support easier to read in-line column definitions. You can obtain the array arguments to feed the + * buildWriter methods in TableWriterFactory. */ public class ColumnsSpecHelper { private final ArrayList names = new ArrayList<>(); diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/CsvHelpers.java b/DB/src/main/java/io/deephaven/db/tables/utils/CsvHelpers.java index da8bf79a708..82be7638c82 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/CsvHelpers.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/CsvHelpers.java @@ -45,15 +45,14 @@ public class CsvHelpers { * @param destPath path to the CSV file to be written * @param compressed whether to zip the file being written * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer - * and a total size Integer to update progress + * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer and a total size + * Integer to update progress * @param columns a list of columns to include in the export * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, String destPath, boolean compressed, - DBTimeZone timeZone, @Nullable Procedure.Binary progress, String... columns) - throws IOException { + public static void writeCsv(Table source, String destPath, boolean compressed, DBTimeZone timeZone, + @Nullable Procedure.Binary progress, String... columns) throws IOException { writeCsv(source, destPath, compressed, timeZone, progress, false, columns); } @@ -64,16 +63,16 @@ public static void writeCsv(Table source, String destPath, boolean compressed, * @param destPath path to the CSV file to be written * @param compressed whether to zip the file being written * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer - * and a total size Integer to update progress + * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer and a total size + * Integer to update progress * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @param columns a list of columns to include in the export * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, String destPath, boolean compressed, - DBTimeZone timeZone, @Nullable Procedure.Binary progress, boolean nullsAsEmpty, - String... columns) throws IOException { + public static void writeCsv(Table source, String destPath, boolean compressed, DBTimeZone timeZone, + @Nullable Procedure.Binary progress, boolean nullsAsEmpty, String... columns) + throws IOException { writeCsv(source, destPath, compressed, timeZone, progress, nullsAsEmpty, ',', columns); } @@ -84,21 +83,20 @@ public static void writeCsv(Table source, String destPath, boolean compressed, * @param destPath path to the CSV file to be written * @param compressed whether to zip the file being written * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer - * and a total size Integer to update progress + * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer and a total size + * Integer to update progress * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @param separator the delimiter for the CSV * @param columns a list of columns to include in the export * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, String destPath, boolean compressed, - DBTimeZone timeZone, @Nullable Procedure.Binary progress, boolean nullsAsEmpty, - char separator, String... columns) throws IOException { - final BufferedWriter out = (compressed - ? new BufferedWriter( - new OutputStreamWriter(new BzipFileOutputStream(destPath + ".bz2"))) - : new BufferedWriter(new FileWriter(destPath))); + public static void writeCsv(Table source, String destPath, boolean compressed, DBTimeZone timeZone, + @Nullable Procedure.Binary progress, boolean nullsAsEmpty, char separator, String... columns) + throws IOException { + final BufferedWriter out = + (compressed ? new BufferedWriter(new OutputStreamWriter(new BzipFileOutputStream(destPath + ".bz2"))) + : new BufferedWriter(new FileWriter(destPath))); writeCsv(source, out, timeZone, progress, nullsAsEmpty, separator, columns); } @@ -108,16 +106,16 @@ public static void writeCsv(Table source, String destPath, boolean compressed, * @param source a Deephaven table object to be exported * @param out BufferedWriter used to write the CSV * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer - * and a total size Integer to update progress + * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer and a total size + * Integer to update progress * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @param columns a list of columns to include in the export * @throws IOException if the target file cannot be written */ @ScriptApi public static void writeCsv(Table source, BufferedWriter out, DBTimeZone timeZone, - @Nullable Procedure.Binary progress, boolean nullsAsEmpty, String... columns) - throws IOException { + @Nullable Procedure.Binary progress, boolean nullsAsEmpty, String... columns) + throws IOException { writeCsv(source, out, timeZone, progress, nullsAsEmpty, ',', columns); } @@ -127,8 +125,8 @@ public static void writeCsv(Table source, BufferedWriter out, DBTimeZone timeZon * @param source a Deephaven table object to be exported * @param out BufferedWriter used to write the CSV * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer - * and a total size Integer to update progress + * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer and a total size + * Integer to update progress * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @param separator the delimiter for the CSV * @param columns a list of columns to include in the export @@ -136,8 +134,8 @@ public static void writeCsv(Table source, BufferedWriter out, DBTimeZone timeZon */ @ScriptApi public static void writeCsv(Table source, BufferedWriter out, DBTimeZone timeZone, - @Nullable Procedure.Binary progress, boolean nullsAsEmpty, char separator, - String... columns) throws IOException { + @Nullable Procedure.Binary progress, boolean nullsAsEmpty, char separator, String... columns) + throws IOException { if (columns == null || columns.length == 0) { List columnNames = source.getDefinition().getColumnNames(); @@ -145,8 +143,7 @@ public static void writeCsv(Table source, BufferedWriter out, DBTimeZone timeZon } CsvHelpers.writeCsvHeader(out, separator, columns); - CsvHelpers.writeCsvContents(source, out, timeZone, progress, nullsAsEmpty, separator, - columns); + CsvHelpers.writeCsvContents(source, out, timeZone, progress, nullsAsEmpty, separator, columns); out.close(); } @@ -172,8 +169,7 @@ public static void writeCsvHeader(BufferedWriter out, String... columns) throws * @throws IOException if the BufferedWriter cannot be written to */ @ScriptApi - public static void writeCsvHeader(BufferedWriter out, char separator, String... columns) - throws IOException { + public static void writeCsvHeader(BufferedWriter out, char separator, String... columns) throws IOException { for (int i = 0; i < columns.length; i++) { String column = columns[i]; if (i > 0) { @@ -184,40 +180,36 @@ public static void writeCsvHeader(BufferedWriter out, char separator, String... } /** - * Writes a Deephaven table to one or more files, splitting it based on the MAX_CSV_LINE_COUNT - * setting. + * Writes a Deephaven table to one or more files, splitting it based on the MAX_CSV_LINE_COUNT setting. * * @param source a Deephaven table to be exported * @param destPath the path in which the CSV file(s) should be written - * @param filename the base file name to use for the files. A dash and starting line number will - * be concatenated to each file. + * @param filename the base file name to use for the files. A dash and starting line number will be concatenated to + * each file. * @throws IOException if the destination files cannot be written */ @ScriptApi - public static void writeCsvPaginate(Table source, String destPath, String filename) - throws IOException { + public static void writeCsvPaginate(Table source, String destPath, String filename) throws IOException { writeCsvPaginate(source, destPath, filename, false); } /** - * Writes a Deephaven table to one or more files, splitting it based on the MAX_CSV_LINE_COUNT - * setting. + * Writes a Deephaven table to one or more files, splitting it based on the MAX_CSV_LINE_COUNT setting. * * @param source a Deephaven table to be exported * @param destPath the path in which the CSV file(s) should be written - * @param filename the base file name to use for the files. A dash and starting line number will - * be concatenated to each file. + * @param filename the base file name to use for the files. A dash and starting line number will be concatenated to + * each file. * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @throws IOException if the destination files cannot be written */ @ScriptApi - public static void writeCsvPaginate(Table source, String destPath, String filename, - boolean nullsAsEmpty) throws IOException { + public static void writeCsvPaginate(Table source, String destPath, String filename, boolean nullsAsEmpty) + throws IOException { long fileCount = source.size() / MAX_CSV_LINE_COUNT; if (fileCount > 0) { for (long i = 0; i <= fileCount; i++) { - writeToMultipleFiles(source, destPath, filename, i * MAX_CSV_LINE_COUNT, - nullsAsEmpty); + writeToMultipleFiles(source, destPath, filename, i * MAX_CSV_LINE_COUNT, nullsAsEmpty); } } else { TableTools.writeCsv(source, destPath + filename + ".csv", nullsAsEmpty); @@ -229,15 +221,14 @@ public static void writeCsvPaginate(Table source, String destPath, String filena * * @param table a Deephaven table from which rows should be exported * @param path the destination path in which the output CSV file should be created - * @param filename the base file name to which a dash and starting line number will be - * concatenated for the file - * @param startLine the starting line number from the table to export; the ending line number - * will be startLine + MAX_CSV_LINE_COUNT-1, or the end of the table + * @param filename the base file name to which a dash and starting line number will be concatenated for the file + * @param startLine the starting line number from the table to export; the ending line number will be startLine + + * MAX_CSV_LINE_COUNT-1, or the end of the table * @throws IOException if the destination file cannot be written */ @ScriptApi - public static void writeToMultipleFiles(Table table, String path, String filename, - long startLine) throws IOException { + public static void writeToMultipleFiles(Table table, String path, String filename, long startLine) + throws IOException { writeToMultipleFiles(table, path, filename, startLine, false); } @@ -246,18 +237,16 @@ public static void writeToMultipleFiles(Table table, String path, String filenam * * @param table a Deephaven table from which rows should be exported * @param path the destination path in which the output CSV file should be created - * @param filename the base file name to which a dash and starting line number will be - * concatenated for the file - * @param startLine the starting line number from the table to export; the ending line number - * will be startLine + MAX_CSV_LINE_COUNT-1, or the end of the table + * @param filename the base file name to which a dash and starting line number will be concatenated for the file + * @param startLine the starting line number from the table to export; the ending line number will be startLine + + * MAX_CSV_LINE_COUNT-1, or the end of the table * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @throws IOException if the destination file cannot be written */ @ScriptApi - public static void writeToMultipleFiles(Table table, String path, String filename, - long startLine, boolean nullsAsEmpty) throws IOException { - Table part = table - .getSubTable(table.getIndex().subindexByPos(startLine, startLine + MAX_CSV_LINE_COUNT)); + public static void writeToMultipleFiles(Table table, String path, String filename, long startLine, + boolean nullsAsEmpty) throws IOException { + Table part = table.getSubTable(table.getIndex().subindexByPos(startLine, startLine + MAX_CSV_LINE_COUNT)); String partFilename = path + filename + "-" + startLine + ".csv"; TableTools.writeCsv(part, partFilename, nullsAsEmpty); } @@ -272,8 +261,8 @@ public static void writeToMultipleFiles(Table table, String path, String filenam * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone timeZone, - String... colNames) throws IOException { + public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone timeZone, String... colNames) + throws IOException { writeCsvContents(source, out, timeZone, null, colNames); } @@ -288,8 +277,8 @@ public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone timeZone, - boolean nullsAsEmpty, String... colNames) throws IOException { + public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone timeZone, boolean nullsAsEmpty, + String... colNames) throws IOException { writeCsvContents(source, out, timeZone, null, nullsAsEmpty, colNames); } @@ -299,14 +288,14 @@ public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone * @param source a Deephaven table object to be exported * @param out a BufferedWriter to which the header should be written * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer - * and a total size Integer to update progress + * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer and a total size + * Integer to update progress * @param colNames a list of columns to include in the export * @throws IOException if the target file cannot be written */ @ScriptApi public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone timeZone, - @Nullable Procedure.Binary progress, String... colNames) throws IOException { + @Nullable Procedure.Binary progress, String... colNames) throws IOException { writeCsvContents(source, out, timeZone, progress, false, colNames); } @@ -316,16 +305,16 @@ public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone * @param source a Deephaven table object to be exported * @param out a BufferedWriter to which the header should be written * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer - * and a total size Integer to update progress + * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer and a total size + * Integer to update progress * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @param colNames a list of columns to include in the export * @throws IOException if the target file cannot be written */ @ScriptApi public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone timeZone, - @Nullable Procedure.Binary progress, boolean nullsAsEmpty, String... colNames) - throws IOException { + @Nullable Procedure.Binary progress, boolean nullsAsEmpty, String... colNames) + throws IOException { writeCsvContents(source, out, timeZone, progress, nullsAsEmpty, ',', colNames); } @@ -335,8 +324,8 @@ public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone * @param source a Deephaven table object to be exported * @param out a BufferedWriter to which the header should be written * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer - * and a total size Integer to update progress + * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer and a total size + * Integer to update progress * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @param separator the delimiter for the CSV * @param colNames a list of columns to include in the export @@ -344,8 +333,8 @@ public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone */ @ScriptApi public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone timeZone, - @Nullable Procedure.Binary progress, boolean nullsAsEmpty, char separator, - String... colNames) throws IOException { + @Nullable Procedure.Binary progress, boolean nullsAsEmpty, char separator, String... colNames) + throws IOException { if (colNames.length == 0) { return; } @@ -358,13 +347,12 @@ public static void writeCsvContents(Table source, BufferedWriter out, DBTimeZone } /** - * Returns a String value for a CSV column's value. This String will be enclosed in double - * quotes if the value includes a double quote, a newline, or the separator. + * Returns a String value for a CSV column's value. This String will be enclosed in double quotes if the value + * includes a double quote, a newline, or the separator. * * @param str the String to be escaped * @param separator the delimiter for the CSV - * @return the input String, enclosed in double quotes if the value contains a comma, newline or - * double quote + * @return the input String, enclosed in double quotes if the value contains a comma, newline or double quote */ protected static String separatorCsvEscape(String str, String separator) { if (str.contains("\"") || str.contains("\n") || str.contains(separator)) { @@ -383,20 +371,20 @@ protected static String separatorCsvEscape(String str, String separator) { * @param size the size of the DataColumns * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @param separator the delimiter for the CSV - * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer - * and a total size Integer to update progress + * @param progress a procedure that implements Procedure.Binary, and takes a progress Integer and a total size + * Integer to update progress * @throws IOException if the target file cannot be written */ private static void writeCsvContentsSeq( - final BufferedWriter out, - final DBTimeZone timeZone, - final DataColumn[] cols, - final long size, - final boolean nullsAsEmpty, - final char separator, - @Nullable Procedure.Binary progress) throws IOException { + final BufferedWriter out, + final DBTimeZone timeZone, + final DataColumn[] cols, + final long size, + final boolean nullsAsEmpty, + final char separator, + @Nullable Procedure.Binary progress) throws IOException { QueryPerformanceNugget nugget = - QueryPerformanceRecorder.getInstance().getNugget("TableTools.writeCsvContentsSeq()"); + QueryPerformanceRecorder.getInstance().getNugget("TableTools.writeCsvContentsSeq()"); try { String separatorStr = String.valueOf(separator); for (long i = 0; i < size; i++) { @@ -410,12 +398,10 @@ private static void writeCsvContentsSeq( if (o instanceof String) { out.write("" + separatorCsvEscape((String) o, separatorStr)); } else if (o instanceof DBDateTime) { - out.write( - separatorCsvEscape(((DBDateTime) o).toString(timeZone), separatorStr)); + out.write(separatorCsvEscape(((DBDateTime) o).toString(timeZone), separatorStr)); } else { - out.write(nullsAsEmpty - ? separatorCsvEscape(TableTools.nullToEmptyString(o), separatorStr) - : separatorCsvEscape(TableTools.nullToNullString(o), separatorStr)); + out.write(nullsAsEmpty ? separatorCsvEscape(TableTools.nullToEmptyString(o), separatorStr) + : separatorCsvEscape(TableTools.nullToNullString(o), separatorStr)); } } if (progress != null) { @@ -428,19 +414,16 @@ private static void writeCsvContentsSeq( } /** - * Return the provided {@link StatusCallback} if provided, otherwise create a new one and return - * it. + * Return the provided {@link StatusCallback} if provided, otherwise create a new one and return it. * * @param progress use this if it is not null - * @param withLog whether to create a StatusCallback that will annotate progress updates to the - * current log + * @param withLog whether to create a StatusCallback that will annotate progress updates to the current log * @return a valid StatusCallback. */ private static StatusCallback checkStatusCallback(StatusCallback progress, boolean withLog) { if (progress == null) { if (withLog) { - return new ProgressLogger(new MinProcessStatus(), - ProcessEnvironment.get().getLog()); + return new ProgressLogger(new MinProcessStatus(), ProcessEnvironment.get().getLog()); } else { return new MinProcessStatus(); } @@ -449,12 +432,12 @@ private static StatusCallback checkStatusCallback(StatusCallback progress, boole } /** - * Returns a memory table created by importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created by importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. * @return a Deephaven Table object * @throws IOException if the InputStream cannot be read */ @@ -464,48 +447,47 @@ public static Table readCsv(InputStream is, final String format) throws IOExcept } /** - * Returns a memory table created by importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created by importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @return a Deephaven Table object * @throws IOException if the InputStream cannot be read */ @ScriptApi static Table readHeaderlessCsv(InputStream is, final String format, StatusCallback progress, - Collection header) throws IOException { + Collection header) throws IOException { final StatusCallback lProgress = checkStatusCallback(progress, true); return readCsvInternal(is, format, false, lProgress, true, header); } /** - * Returns a memory table created by importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created by importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @return a Deephaven Table object * @throws IOException if the InputStream cannot be read */ @ScriptApi - public static Table readCsv(InputStream is, final String format, StatusCallback progress) - throws IOException { + public static Table readCsv(InputStream is, final String format, StatusCallback progress) throws IOException { final StatusCallback lProgress = checkStatusCallback(progress, true); return readCsvInternal(is, format, false, lProgress); } /** - * Returns a memory table created by importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created by importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. * @param separator a char to use as the delimiter value when parsing the file. @@ -514,32 +496,30 @@ public static Table readCsv(InputStream is, final String format, StatusCallback */ @ScriptApi public static Table readCsv(InputStream is, final char separator) throws IOException { - return readCsvInternal(is, String.valueOf(separator), false, - checkStatusCallback(null, false)); + return readCsvInternal(is, String.valueOf(separator), false, checkStatusCallback(null, false)); } /** - * Returns a memory table created by importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created by importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. * @param separator a char to use as the delimiter value when parsing the file. - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @return a Deephaven Table object * @throws IOException if the InputStream cannot be read */ @ScriptApi - public static Table readCsv(InputStream is, final char separator, StatusCallback progress) - throws IOException { + public static Table readCsv(InputStream is, final char separator, StatusCallback progress) throws IOException { final StatusCallback lProgress = checkStatusCallback(progress, true); return readCsvInternal(is, String.valueOf(separator), false, lProgress); } /** - * Returns a memory table created by importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created by importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. * @param separator a char to use as the delimiter value when parsing the file. @@ -548,13 +528,12 @@ public static Table readCsv(InputStream is, final char separator, StatusCallback */ @ScriptApi public static QueryTable readCsv2(InputStream is, final char separator) throws IOException { - return (QueryTable) readCsvInternal(is, String.valueOf(separator), true, - checkStatusCallback(null, false)); + return (QueryTable) readCsvInternal(is, String.valueOf(separator), true, checkStatusCallback(null, false)); } /** - * Returns a memory table created by importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created by importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. * @return a Deephaven Table object @@ -566,13 +545,13 @@ public static Table readCsv(InputStream is) throws IOException { } /** - * Returns a memory table created by importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created by importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @return a Deephaven Table object * @throws IOException if the InputStream cannot be read */ @@ -583,8 +562,8 @@ public static Table readCsv(InputStream is, StatusCallback progress) throws IOEx } /** - * Returns a memory table created by importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created by importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. * @return a Deephaven QueryTable object @@ -597,46 +576,45 @@ public static QueryTable readCsv2(InputStream is) throws IOException { } /** - * Does the work of creating a memory table by importing CSV data. The first row must be column - * names. Column data types are inferred from the data. + * Does the work of creating a memory table by importing CSV data. The first row must be column names. Column data + * types are inferred from the data. * * @param is an InputStream providing access to the CSV data. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. - * @param v2 whether the process the import using the older QueryTable processing (v2 = true) or - * the newer InMemoryTable processing (v2 = false). - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. + * @param v2 whether the process the import using the older QueryTable processing (v2 = true) or the newer + * InMemoryTable processing (v2 = false). + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @return a Deephaven Table object * @throws IOException if the InputStream cannot be read */ - private static Table readCsvInternal(InputStream is, String format, boolean v2, - StatusCallback progress) throws IOException { + private static Table readCsvInternal(InputStream is, String format, boolean v2, StatusCallback progress) + throws IOException { return readCsvInternal(is, format, v2, progress, false, null); } /** - * Does the work of creating a memory table by importing CSV data. The first row must be column - * names. Column data types are inferred from the data. + * Does the work of creating a memory table by importing CSV data. The first row must be column names. Column data + * types are inferred from the data. * * @param is an InputStream providing access to the CSV data. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. - * @param v2 whether the process the import using the older QueryTable processing (v2 = true) or - * the newer InMemoryTable processing (v2 = false). - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. + * @param v2 whether the process the import using the older QueryTable processing (v2 = true) or the newer + * InMemoryTable processing (v2 = false). + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @param noHeader True when the CSV does not have a header row. * @param header Column names to use as, or instead of, the header row for the CSV. * @return a Deephaven Table object * @throws IOException if the InputStream cannot be read */ - private static Table readCsvInternal(InputStream is, String format, boolean v2, - StatusCallback progress, boolean noHeader, @Nullable Collection header) - throws IOException { + private static Table readCsvInternal(InputStream is, String format, boolean v2, StatusCallback progress, + boolean noHeader, @Nullable Collection header) throws IOException { final char separator; final InputStreamReader fileReader = new InputStreamReader(is); @@ -655,8 +633,8 @@ private static Table readCsvInternal(InputStream is, String format, boolean v2, } } - final CSVFormat parseFormat = CsvParserFormat.getCsvFormat(format, separator, - (format.equals("TRIM")), noHeader, header == null ? null : new ArrayList<>(header)); + final CSVFormat parseFormat = CsvParserFormat.getCsvFormat(format, separator, (format.equals("TRIM")), noHeader, + header == null ? null : new ArrayList<>(header)); final CSVParser parser = new CSVParser(fileReader, parseFormat); lProgress.update(10, "Reading column names from CSV."); @@ -674,28 +652,24 @@ private static Table readCsvInternal(InputStream is, String format, boolean v2, final int colCount; if (csvData.size() == 0) { if (noHeader && header == null) { - throw new RuntimeException( - "There was no header provided and there were no records found in the CSV."); + throw new RuntimeException("There was no header provided and there were no records found in the CSV."); } colCount = 0; } else { try { colCount = csvData.get(0).size(); } catch (Exception e) { - throw new RuntimeException( - "Failed to get number of columns from first record of CSV.", e); + throw new RuntimeException("Failed to get number of columns from first record of CSV.", e); } } /* - * Validate provided header: The parser will fail to read if there are more column headers - * than data columns, but having less headers than data columns is perfectly valid, and - * there will be cases where columns are variable, but there is a guaranteed left-subset, or - * a user is only interested in a left-subset of the data. + * Validate provided header: The parser will fail to read if there are more column headers than data columns, + * but having less headers than data columns is perfectly valid, and there will be cases where columns are + * variable, but there is a guaranteed left-subset, or a user is only interested in a left-subset of the data. */ if (header != null && columnNames.length > colCount && csvData.size() > 0) { - throw new RuntimeException( - "More column names provided in the header (" + columnNames.length + throw new RuntimeException("More column names provided in the header (" + columnNames.length + ") than exist in the first record of the CSV (" + colCount + ")."); } @@ -711,21 +685,19 @@ private static Table readCsvInternal(InputStream is, String format, boolean v2, for (int col = 0; col < columnNames.length; col++) { lProgress.update(20 + (col + 1) * 70 / columnNames.length, - "Parsing CSV column " + (col + 1) + " of " + columnNames.length + "."); + "Parsing CSV column " + (col + 1) + " of " + columnNames.length + "."); columnData[col] = parseColumn(csvData, numRows, col, initialLineNumber); } HashSet taken = new HashSet<>(); for (int i = 0; i < columnNames.length; i++) { - // The Apache parser does not allow duplicate column names, including blank/null, but it - // will allow one blank/null. - // Replace a single blank/null column name with the first unique value based on - // "Column1." + // The Apache parser does not allow duplicate column names, including blank/null, but it will allow one + // blank/null. + // Replace a single blank/null column name with the first unique value based on "Column1." if (columnNames[i] == null || columnNames[i].isEmpty()) { columnNames[i] = "Column" + (i + 1); } - columnNames[i] = - legalizeColumnName(columnNames[i], (s) -> s.replaceAll("[- ]", "_"), taken); + columnNames[i] = legalizeColumnName(columnNames[i], (s) -> s.replaceAll("[- ]", "_"), taken); taken.add(columnNames[i]); } @@ -733,9 +705,8 @@ private static Table readCsvInternal(InputStream is, String format, boolean v2, Map columnSources = new LinkedHashMap<>(); for (int ii = 0; ii < columnNames.length; ii++) { lProgress.update(90 + (ii + 1) * 10 / columnNames.length, - "Mapping CSV column " + (ii + 1) + " of " + columnNames.length + " to table."); - ColumnSource arrayBackedSource = - ArrayBackedColumnSource.getMemoryColumnSourceUntyped(columnData[ii]); + "Mapping CSV column " + (ii + 1) + " of " + columnNames.length + " to table."); + ColumnSource arrayBackedSource = ArrayBackedColumnSource.getMemoryColumnSourceUntyped(columnData[ii]); columnSources.put(columnNames[ii], arrayBackedSource); } lProgress.finish(""); @@ -747,19 +718,18 @@ private static Table readCsvInternal(InputStream is, String format, boolean v2, } /** - * Returns a column of data, and inspects the data read from a CSV to determine what data type - * would best fit the column. + * Returns a column of data, and inspects the data read from a CSV to determine what data type would best fit the + * column. * - * @param csvData a List of CSVRecords from the Apache Commons CSV parser used to read the CSV - * file + * @param csvData a List of CSVRecords from the Apache Commons CSV parser used to read the CSV file * @param numRows how many rows to read from the List * @param col which column from each record should be read - * @param initialLineNumber initial line number in the source file from which the data was read - * (i.e. 1 if there was a header, 0 if not) + * @param initialLineNumber initial line number in the source file from which the data was read (i.e. 1 if there was + * a header, 0 if not) * @return an object representing an array of values from the column that was read */ - private static Object parseColumn(List csvData, int numRows, int col, - long initialLineNumber) throws IOException { + private static Object parseColumn(List csvData, int numRows, int col, long initialLineNumber) + throws IOException { Boolean isInteger = null; Boolean isLong = null; Boolean isDouble = null; @@ -770,8 +740,7 @@ private static Object parseColumn(List csvData, int numRows, int col, long lineNumber = initialLineNumber; for (CSVRecord line : csvData) { if (col >= line.size()) { - throw new IOException( - "Error parsing column " + (col + 1) + " on line " + (lineNumber + 1) + + throw new IOException("Error parsing column " + (col + 1) + " on line " + (lineNumber + 1) + " - line only has " + line.size() + " columns."); } @@ -812,8 +781,7 @@ private static Object parseColumn(List csvData, int numRows, int col, } if (isBoolean == null || isBoolean) { - isBoolean = "true".equalsIgnoreCase(line.get(col)) - || "false".equalsIgnoreCase(line.get(col)); + isBoolean = "true".equalsIgnoreCase(line.get(col)) || "false".equalsIgnoreCase(line.get(col)); } if (isDateTime == null || isDateTime) { @@ -821,8 +789,7 @@ private static Object parseColumn(List csvData, int numRows, int col, } if (isLocalTime == null || isLocalTime) { - isLocalTime = DBTimeUtils - .convertTimeQuiet(value) != io.deephaven.util.QueryConstants.NULL_LONG; + isLocalTime = DBTimeUtils.convertTimeQuiet(value) != io.deephaven.util.QueryConstants.NULL_LONG; } lineNumber++; @@ -834,8 +801,7 @@ private static Object parseColumn(List csvData, int numRows, int col, for (int row = 0; row < numRows; row++) { String value = csvData.get(row).get(col); - data[row] = isNull(value) ? io.deephaven.util.QueryConstants.NULL_INT - : Integer.parseInt(value); + data[row] = isNull(value) ? io.deephaven.util.QueryConstants.NULL_INT : Integer.parseInt(value); } return data; @@ -845,8 +811,7 @@ private static Object parseColumn(List csvData, int numRows, int col, for (int row = 0; row < numRows; row++) { String value = csvData.get(row).get(col); - data[row] = isNull(value) ? io.deephaven.util.QueryConstants.NULL_LONG - : Long.parseLong(value); + data[row] = isNull(value) ? io.deephaven.util.QueryConstants.NULL_LONG : Long.parseLong(value); } return data; @@ -856,8 +821,7 @@ private static Object parseColumn(List csvData, int numRows, int col, for (int row = 0; row < numRows; row++) { String value = csvData.get(row).get(col); - data[row] = isNull(value) ? io.deephaven.util.QueryConstants.NULL_DOUBLE - : Double.parseDouble(value); + data[row] = isNull(value) ? io.deephaven.util.QueryConstants.NULL_DOUBLE : Double.parseDouble(value); } return data; diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/CsvParserFormat.java b/DB/src/main/java/io/deephaven/db/tables/utils/CsvParserFormat.java index b49d16c1a65..ea9886a34e3 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/CsvParserFormat.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/CsvParserFormat.java @@ -19,8 +19,8 @@ public class CsvParserFormat { public static final String FORMAT_BPIPE = "BPIPE"; public static String[] getFormatNames() { - return new String[] {FORMAT_DEFAULT, FORMAT_TRIM, FORMAT_EXCEL, FORMAT_TDF, FORMAT_MYSQL, - FORMAT_RFC4180, FORMAT_BPIPE}; + return new String[] {FORMAT_DEFAULT, FORMAT_TRIM, FORMAT_EXCEL, FORMAT_TDF, FORMAT_MYSQL, FORMAT_RFC4180, + FORMAT_BPIPE}; } /** @@ -32,7 +32,7 @@ public static String[] getFormatNames() { * @throws RuntimeException if fileFormat is unrecognized */ private static CSVFormat getCsvFormat(@NotNull final String fileFormat, final boolean noHeader, - final List columnNames) { + final List columnNames) { CSVFormat result; switch (fileFormat) { case FORMAT_TRIM: @@ -68,25 +68,23 @@ private static CSVFormat getCsvFormat(@NotNull final String fileFormat, final bo * Returns an Apache CSVFormat based on the fileFormat String. * * @param fileFormat The string for which a format should be created - * @param delimiter A single character to use as a delimiter - comma when format will be - * controlled by fileFormat + * @param delimiter A single character to use as a delimiter - comma when format will be controlled by fileFormat * @param trim Whether to trim white space within delimiters * @param noHeader Indicates when the CSV does not include a row of column names * @param columnNames A List of column names to use as a header * @return A CSVFormat object matching the passed String and trim option * @throws RuntimeException if fileFormat is unrecognized */ - public static CSVFormat getCsvFormat(final String fileFormat, final char delimiter, - final boolean trim, final boolean noHeader, @Nullable final List columnNames) { + public static CSVFormat getCsvFormat(final String fileFormat, final char delimiter, final boolean trim, + final boolean noHeader, @Nullable final List columnNames) { // First figure out the CSVFormat final CSVFormat csvFormat; if (fileFormat == null || delimiter != ',') { if (!noHeader) { - csvFormat = CSVFormat.newFormat(delimiter).withTrim(trim).withFirstRecordAsHeader() - .withQuote('"'); + csvFormat = CSVFormat.newFormat(delimiter).withTrim(trim).withFirstRecordAsHeader().withQuote('"'); } else if (columnNames != null && columnNames.size() > 0) { - csvFormat = CSVFormat.newFormat(delimiter).withTrim(trim) - .withHeader(columnNames.toArray(new String[0])).withQuote('"'); + csvFormat = CSVFormat.newFormat(delimiter).withTrim(trim).withHeader(columnNames.toArray(new String[0])) + .withQuote('"'); } else { csvFormat = CSVFormat.newFormat(delimiter).withTrim(trim).withQuote('"'); } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTime.java b/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTime.java index 4dc86790bd2..1154c3c3e67 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTime.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTime.java @@ -29,10 +29,9 @@ * An object representing a timepoint in Deephaven. * *

    - * The DBDateTime object is a timepoint, that is a precise instance in time without respect to - * timezones. The timepoint is stored as a signed 64-bit long nanoseconds since the epoch (January - * 1, 1970, 00:00:00 GMT). This provides a range from 1677-09-21T00:12:43.146-775807 UTC to - * 2262-04-11T23:47:16.854775807 UTC. The minimum long value is reserved for + * The DBDateTime object is a timepoint, that is a precise instance in time without respect to timezones. The timepoint + * is stored as a signed 64-bit long nanoseconds since the epoch (January 1, 1970, 00:00:00 GMT). This provides a range + * from 1677-09-21T00:12:43.146-775807 UTC to 2262-04-11T23:47:16.854775807 UTC. The minimum long value is reserved for * {@link QueryConstants#NULL_LONG} and therefore is not permitted as a valid DBDateTime. *

    */ @@ -44,8 +43,7 @@ public final class DBDateTime implements Comparable, Externalizable private long nanos; - private static final DateTimeFormatter dateTimeFormat = - DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS"); + private static final DateTimeFormatter dateTimeFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS"); private static final DateTimeFormatter dateFormat = DateTimeFormat.forPattern("yyyy-MM-dd"); public static DBDateTime of(Instant instant) { @@ -63,8 +61,7 @@ public DBDateTime() { * Create a new DBDatetime initialized to the current time. * *

    - * The precision of DBDateTime is nanoseconds, but the resolution of the now method is currently - * microseconds. + * The precision of DBDateTime is nanoseconds, but the resolution of the now method is currently microseconds. *

    * * @return a new DBDateTime initialized to the current time. @@ -204,15 +201,13 @@ public String toString() { */ public String toString(DBTimeZone timeZone) { return dateTimeFormat.withZone(timeZone.getTimeZone()).print(getMillis()) - + StringUtils.pad(String.valueOf(getNanosPartial()), 6, '0') + " " - + timeZone.toString().substring(3); + + StringUtils.pad(String.valueOf(getNanosPartial()), 6, '0') + " " + timeZone.toString().substring(3); } /** * Get the date represented by this DBDateTime in the default {@link DBTimeZone}. * - * @return The date (yyyy-MM-dd) represented by this {@code DBDateTime} in the default - * {@link DBTimeZone}. + * @return The date (yyyy-MM-dd) represented by this {@code DBDateTime} in the default {@link DBTimeZone}. */ public String toDateString() { return toDateString(DBTimeZone.TZ_DEFAULT); @@ -232,8 +227,7 @@ public String toDateString(DBTimeZone timeZone) { * Get the date represented by this DBDateTime in the given joda {@code DateTimeZone}. * * @param timeZone A joda DateTimeZone - * @return The date (yyyy-MM-dd) represented by this {@code DBDateTime} in the given - * {@code timeZone} + * @return The date (yyyy-MM-dd) represented by this {@code DBDateTime} in the given {@code timeZone} */ public String toDateString(DateTimeZone timeZone) { if (timeZone == null) { @@ -246,8 +240,8 @@ public String toDateString(DateTimeZone timeZone) { * Get the date represented by this DBDateTime in the time zone specified by {@code zoneId} * * @param zoneId A java time zone ID string - * @return The date (yyyy-MM-dd) represented by this {@code DBDateTime} in time zone represented - * by the given {@code zoneId} + * @return The date (yyyy-MM-dd) represented by this {@code DBDateTime} in time zone represented by the given + * {@code zoneId} */ public String toDateString(String zoneId) { return toDateString(ZoneId.of(zoneId)); @@ -257,8 +251,7 @@ public String toDateString(String zoneId) { * Get the date represented by this DBDateTime in the given java {@code ZoneId}. * * @param timeZone A java {@link ZoneId time zone ID}. - * @return The date (yyyy-MM-dd) represented by this {@code DBDateTime} in the given - * {@code timeZone} + * @return The date (yyyy-MM-dd) represented by this {@code DBDateTime} in the given {@code timeZone} */ public String toDateString(ZoneId timeZone) { if (timeZone == null) { diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTimeFormatter.java b/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTimeFormatter.java index c4b368bd815..3740c893fe9 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTimeFormatter.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTimeFormatter.java @@ -26,15 +26,15 @@ public DBDateTimeFormatter(String pattern) { } public DBDateTimeFormatter(final boolean isISO, final boolean hasDate, final boolean hasTime, - final int subsecondDigits, final boolean hasTZ) { + final int subsecondDigits, final boolean hasTZ) { this((hasDate ? "yyyy-MM-dd" : "") + (!hasDate || !hasTime ? "" : isISO ? "'T'" : " ") + - (hasTime ? "HH:mm:ss" : "") + (hasTime && subsecondDigits > 0 ? "." : "") + - (hasTime ? StringUtils.repeat("S", subsecondDigits) : "") + (hasTZ ? " %t" : "")); + (hasTime ? "HH:mm:ss" : "") + (hasTime && subsecondDigits > 0 ? "." : "") + + (hasTime ? StringUtils.repeat("S", subsecondDigits) : "") + (hasTZ ? " %t" : "")); } private DateTimeFormatter getFormatter(DBTimeZone tz) { return formatCache.computeIfAbsent(tz, newTz -> DateTimeFormatter - .ofPattern(pattern.replaceAll("%t", '\'' + tz.toString().substring(3) + '\''))); + .ofPattern(pattern.replaceAll("%t", '\'' + tz.toString().substring(3) + '\''))); } public String format(DBDateTime dateTime, DBTimeZone tz) { diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTimeFormatters.java b/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTimeFormatters.java index c5a03e70372..0fded3d2625 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTimeFormatters.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/DBDateTimeFormatters.java @@ -9,35 +9,35 @@ */ public enum DBDateTimeFormatters { ISO9TZ(new DBDateTimeFormatter(true, true, true, 9, true)), ISO6TZ( - new DBDateTimeFormatter(true, true, true, 6, true)), ISO3TZ( - new DBDateTimeFormatter(true, true, true, 3, true)), ISO0TZ( - new DBDateTimeFormatter(true, true, true, 0, true)), ISO9( - new DBDateTimeFormatter(true, true, true, 9, false)), ISO6( - new DBDateTimeFormatter(true, true, true, 6, false)), ISO3( - new DBDateTimeFormatter(true, true, true, 3, false)), ISO0( - new DBDateTimeFormatter(true, true, true, 0, false)), + new DBDateTimeFormatter(true, true, true, 6, true)), ISO3TZ( + new DBDateTimeFormatter(true, true, true, 3, true)), ISO0TZ( + new DBDateTimeFormatter(true, true, true, 0, true)), ISO9( + new DBDateTimeFormatter(true, true, true, 9, false)), ISO6( + new DBDateTimeFormatter(true, true, true, 6, false)), ISO3( + new DBDateTimeFormatter(true, true, true, 3, false)), ISO0( + new DBDateTimeFormatter(true, true, true, 0, false)), NONISO9TZ(new DBDateTimeFormatter(false, true, true, 9, true)), NONISO6TZ( - new DBDateTimeFormatter(false, true, true, 6, true)), NONISO3TZ( - new DBDateTimeFormatter(false, true, true, 3, true)), NONISO0TZ( - new DBDateTimeFormatter(false, true, true, 0, true)), NONISO9( - new DBDateTimeFormatter(false, true, true, 9, false)), NONISO6( - new DBDateTimeFormatter(false, true, true, 6, false)), NONISO3( - new DBDateTimeFormatter(false, true, true, 3, false)), NONISO0( - new DBDateTimeFormatter(false, true, true, 0, false)), + new DBDateTimeFormatter(false, true, true, 6, true)), NONISO3TZ( + new DBDateTimeFormatter(false, true, true, 3, true)), NONISO0TZ( + new DBDateTimeFormatter(false, true, true, 0, true)), NONISO9( + new DBDateTimeFormatter(false, true, true, 9, false)), NONISO6( + new DBDateTimeFormatter(false, true, true, 6, false)), NONISO3( + new DBDateTimeFormatter(false, true, true, 3, false)), NONISO0( + new DBDateTimeFormatter(false, true, true, 0, false)), NODATE9TZ(new DBDateTimeFormatter(true, false, true, 9, true)), NODATE6TZ( - new DBDateTimeFormatter(true, false, true, 6, true)), NODATE3TZ( - new DBDateTimeFormatter(true, false, true, 3, true)), NODATE0TZ( - new DBDateTimeFormatter(true, false, true, 0, true)), NODATE9( - new DBDateTimeFormatter(true, false, true, 9, false)), NODATE6( - new DBDateTimeFormatter(true, false, true, 6, false)), NODATE3( - new DBDateTimeFormatter(true, false, true, 3, false)), NODATE0( - new DBDateTimeFormatter(true, false, true, 0, false)), + new DBDateTimeFormatter(true, false, true, 6, true)), NODATE3TZ( + new DBDateTimeFormatter(true, false, true, 3, true)), NODATE0TZ( + new DBDateTimeFormatter(true, false, true, 0, true)), NODATE9( + new DBDateTimeFormatter(true, false, true, 9, false)), NODATE6( + new DBDateTimeFormatter(true, false, true, 6, false)), NODATE3( + new DBDateTimeFormatter(true, false, true, 3, false)), NODATE0( + new DBDateTimeFormatter(true, false, true, 0, false)), DATEONLYTZ(new DBDateTimeFormatter(true, true, false, 0, true)), DATEONLY( - new DBDateTimeFormatter(true, true, false, 0, false)), - ; + new DBDateTimeFormatter(true, true, false, 0, false)), + ; private final DBDateTimeFormatter formatter; diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/DBTimeUtils.java b/DB/src/main/java/io/deephaven/db/tables/utils/DBTimeUtils.java index 5183a49f2d3..cb465b32655 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/DBTimeUtils.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/DBTimeUtils.java @@ -39,23 +39,22 @@ @SuppressWarnings("UnusedDeclaration") public class DBTimeUtils { - // The following 3 patterns support LocalDate literals. Note all LocalDate patterns must not - // have characters after + // The following 3 patterns support LocalDate literals. Note all LocalDate patterns must not have characters after // the date, to avoid confusion with DBDateTime literals. /** Matches yyyy-MM-dd. */ private static final Pattern STD_DATE_PATTERN = - Pattern.compile("^(?[0-9][0-9][0-9][0-9])-(?[0-9][0-9])-(?[0-9][0-9])$"); + Pattern.compile("^(?[0-9][0-9][0-9][0-9])-(?[0-9][0-9])-(?[0-9][0-9])$"); /** Matches yyyyMMdd (consistent with ISO dates). */ private static final Pattern STD_DATE_PATTERN2 = - Pattern.compile("^(?[0-9][0-9][0-9][0-9])(?[0-9][0-9])(?[0-9][0-9])$"); + Pattern.compile("^(?[0-9][0-9][0-9][0-9])(?[0-9][0-9])(?[0-9][0-9])$"); /** - * Matches variations of month/day/year or day/month/year or year/month/day - how this is - * interpreted depends on the DBTimeUtils.dateStyle system property. + * Matches variations of month/day/year or day/month/year or year/month/day - how this is interpreted depends on the + * DBTimeUtils.dateStyle system property. */ private static final Pattern SLASH_DATE_PATTERN = - Pattern.compile( - "^(?[0-9]?[0-9](?[0-9][0-9])?)\\/(?[0-9]?[0-9])\\/(?[0-9]?[0-9](?[0-9][0-9])?)$"); + Pattern.compile( + "^(?[0-9]?[0-9](?[0-9][0-9])?)\\/(?[0-9]?[0-9])\\/(?[0-9]?[0-9](?[0-9][0-9])?)$"); /** for use when interpreting two digit years (we use Java's rules). */ private static final DateTimeFormatter TWO_DIGIT_YR_FORMAT = DateTimeFormatter.ofPattern("yy"); @@ -65,34 +64,33 @@ public class DBTimeUtils { * TIME_AND_DURATION_PATTERN */ private static final Pattern LOCAL_TIME_PATTERN = - Pattern.compile("^L([0-9][0-9]):?([0-9][0-9])?:?([0-9][0-9])?(\\.([0-9]{1,9}))?"); + Pattern.compile("^L([0-9][0-9]):?([0-9][0-9])?:?([0-9][0-9])?(\\.([0-9]{1,9}))?"); // DBDateTime literals private static final Pattern DATETIME_PATTERN = Pattern.compile( - "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9](T[0-9][0-9]?:[0-9][0-9](:[0-9][0-9])?(\\.[0-9][0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?)?)? [a-zA-Z]+"); + "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9](T[0-9][0-9]?:[0-9][0-9](:[0-9][0-9])?(\\.[0-9][0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?)?)? [a-zA-Z]+"); private static final Pattern JIM_DATETIME_PATTERN = Pattern.compile( - "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]T[0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9][0-9][\\+-][0-9][0-9][0-9][0-9]"); + "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]T[0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9][0-9][\\+-][0-9][0-9][0-9][0-9]"); private static final Pattern JIM_MICROS_DATETIME_PATTERN = Pattern.compile( - "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]T[0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9][0-9][0-9][0-9][0-9][\\+-][0-9][0-9][0-9][0-9]"); + "[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]T[0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9][0-9][0-9][0-9][0-9][\\+-][0-9][0-9][0-9][0-9]"); private static final Pattern TIME_AND_DURATION_PATTERN = Pattern.compile( - "\\-?([0-9]+T)?([0-9]+):([0-9]+)(:[0-9]+)?(\\.[0-9][0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?)?"); + "\\-?([0-9]+T)?([0-9]+):([0-9]+)(:[0-9]+)?(\\.[0-9][0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?)?"); private static final Pattern PERIOD_PATTERN = Pattern.compile( - "\\-?([0-9]+[Yy])?([0-9]+[Mm])?([0-9]+[Ww])?([0-9]+[Dd])?(T([0-9]+[Hh])?([0-9]+[Mm])?([0-9]+[Ss])?)?"); + "\\-?([0-9]+[Yy])?([0-9]+[Mm])?([0-9]+[Ww])?([0-9]+[Dd])?(T([0-9]+[Hh])?([0-9]+[Mm])?([0-9]+[Ss])?)?"); private static final String DATE_COLUMN_PARTITION_FORMAT_STRING = "yyyy-MM-dd"; private static final boolean ENABLE_MICROTIME_HACK = - Configuration.getInstance().getBooleanWithDefault("DBTimeUtils.enableMicrotimeHack", false); + Configuration.getInstance().getBooleanWithDefault("DBTimeUtils.enableMicrotimeHack", false); /** - * Date formatting styles for use in conversion functions such as - * {@link #convertDateQuiet(String, DateStyle)}. + * Date formatting styles for use in conversion functions such as {@link #convertDateQuiet(String, DateStyle)}. */ public enum DateStyle { MDY, DMY, YMD } - private static final DateStyle DATE_STYLE = DateStyle.valueOf(Configuration.getInstance() - .getStringWithDefault("DBTimeUtils.dateStyle", DateStyle.MDY.name())); + private static final DateStyle DATE_STYLE = DateStyle + .valueOf(Configuration.getInstance().getStringWithDefault("DBTimeUtils.dateStyle", DateStyle.MDY.name())); /** * Constant value of one second in nanoseconds. @@ -125,15 +123,14 @@ public enum DateStyle { public static final long YEAR = 365 * DAY; private static final Pattern CAPTURING_DATETIME_PATTERN = Pattern.compile( - "(([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])T?)?(([0-9][0-9]?)(?::([0-9][0-9])(?::([0-9][0-9]))?(?:\\.([0-9][0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?))?)?)?( [a-zA-Z]+)?"); + "(([0-9][0-9][0-9][0-9])-([0-9][0-9])-([0-9][0-9])T?)?(([0-9][0-9]?)(?::([0-9][0-9])(?::([0-9][0-9]))?(?:\\.([0-9][0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?[0-9]?))?)?)?( [a-zA-Z]+)?"); private enum DateGroupId { // Date(1), - Year(2, ChronoField.YEAR), Month(3, ChronoField.MONTH_OF_YEAR), Day(4, - ChronoField.DAY_OF_MONTH), + Year(2, ChronoField.YEAR), Month(3, ChronoField.MONTH_OF_YEAR), Day(4, ChronoField.DAY_OF_MONTH), // Tod(5), Hours(6, ChronoField.HOUR_OF_DAY), Minutes(7, ChronoField.MINUTE_OF_HOUR), Seconds(8, - ChronoField.SECOND_OF_MINUTE), Fraction(9, ChronoField.MILLI_OF_SECOND); + ChronoField.SECOND_OF_MINUTE), Fraction(9, ChronoField.MILLI_OF_SECOND); public final int id; public final ChronoField field; @@ -162,22 +159,21 @@ private enum DateGroupId { private static final double YEARS_PER_NANO = 1. / (double) YEAR; /** - * Allows setting an alternate date instead of "today" to be returned from - * {@link #currentDateNy}. This is mainly used when setting up for a replay simulation. + * Allows setting an alternate date instead of "today" to be returned from {@link #currentDateNy}. This is mainly + * used when setting up for a replay simulation. */ public static String currentDateNyOverride; /** - * Allows setting an alternate date instead of the business day before "today" to be returned - * from {@link #lastBusinessDateNy}. This is mainly used when setting up for a replay - * simulation. + * Allows setting an alternate date instead of the business day before "today" to be returned from + * {@link #lastBusinessDateNy}. This is mainly used when setting up for a replay simulation. */ @SuppressWarnings("WeakerAccess") public static String lastBusinessDayNyOverride; /** - * Allows setting a custom time provider instead of actual current time. This is mainly used - * when setting up for a replay simulation. + * Allows setting a custom time provider instead of actual current time. This is mainly used when setting up for a + * replay simulation. */ public static TimeProvider timeProvider; @@ -185,8 +181,8 @@ private enum DateGroupId { * Returns milliseconds since Epoch for a {@link DBDateTime} value. * * @param dateTime The {@link DBDateTime} for which the milliseconds offset should be returned. - * @return A long value of milliseconds since Epoch, or a {@link QueryConstants#NULL_LONG} value - * if the {@link DBDateTime} is null. + * @return A long value of milliseconds since Epoch, or a {@link QueryConstants#NULL_LONG} value if the + * {@link DBDateTime} is null. */ public static long millis(DBDateTime dateTime) { if (dateTime == null) { @@ -200,8 +196,7 @@ public static long millis(DBDateTime dateTime) { * Returns nanoseconds since Epoch for a {@link DBDateTime} value. * * @param dateTime The {@link DBDateTime} for which the nanoseconds offset should be returned. - * @return A long value of nanoseconds since Epoch, or a NULL_LONG value if the - * {@link DBDateTime} is null. + * @return A long value of nanoseconds since Epoch, or a NULL_LONG value if the {@link DBDateTime} is null. */ public static long nanos(DBDateTime dateTime) { if (dateTime == null) { @@ -219,13 +214,12 @@ public static long nanos(Instant instant) { } /** - * Evaluates whether one {@link DBDateTime} value is earlier than a second {@link DBDateTime} - * value. + * Evaluates whether one {@link DBDateTime} value is earlier than a second {@link DBDateTime} value. * * @param d1 The first {@link DBDateTime} value to compare. * @param d2 The second {@link DBDateTime} value to compare. - * @return Boolean true if d1 is earlier than d2, false if either value is null, or if d2 is - * equal to or earlier than d1. + * @return Boolean true if d1 is earlier than d2, false if either value is null, or if d2 is equal to or earlier + * than d1. */ public static boolean isBefore(DBDateTime d1, DBDateTime d2) { if (d1 == null || d2 == null) { @@ -236,13 +230,12 @@ public static boolean isBefore(DBDateTime d1, DBDateTime d2) { } /** - * Evaluates whether one {@link DBDateTime} value is later than a second {@link DBDateTime} - * value. + * Evaluates whether one {@link DBDateTime} value is later than a second {@link DBDateTime} value. * * @param d1 The first {@link DBDateTime} value to compare. * @param d2 The second {@link DBDateTime} value to compare. - * @return Boolean true if d1 is later than d2, false if either value is null, or if d2 is equal - * to or later than d1. + * @return Boolean true if d1 is later than d2, false if either value is null, or if d2 is equal to or later than + * d1. */ public static boolean isAfter(DBDateTime d1, DBDateTime d2) { if (d1 == null || d2 == null) { @@ -257,11 +250,10 @@ public static boolean isAfter(DBDateTime d1, DBDateTime d2) { * * @param dateTime The starting {@link DBDateTime} value. * @param nanos The long number of nanoseconds to add to dateTime. - * @return a null {@link DBDateTime} if either input is null; the starting {@link DBDateTime} - * plus the specified number of nanoseconds, if the result is not too large for a - * {@link DBDateTime}; or throws a {@link DBDateTimeOverflowException - * DBDateTimeOverflowException} if the resultant value is more than max long nanoseconds - * from Epoch. + * @return a null {@link DBDateTime} if either input is null; the starting {@link DBDateTime} plus the specified + * number of nanoseconds, if the result is not too large for a {@link DBDateTime}; or throws a + * {@link DBDateTimeOverflowException DBDateTimeOverflowException} if the resultant value is more than max + * long nanoseconds from Epoch. */ public static DBDateTime plus(DBDateTime dateTime, long nanos) { if (dateTime == null || nanos == io.deephaven.util.QueryConstants.NULL_LONG) { @@ -276,11 +268,10 @@ public static DBDateTime plus(DBDateTime dateTime, long nanos) { * * @param dateTime The starting {@link DBDateTime} value. * @param nanos The long number of nanoseconds to subtract from dateTime. - * @return a null {@link DBDateTime} if either input is null; the starting {@link DBDateTime} - * minus the specified number of nanoseconds, if the result is not too negative for a - * {@link DBDateTime}; or throws a {@link DBDateTimeOverflowException - * DBDateTimeOverflowException} if the resultant value is more than min long nanoseconds - * from Epoch. + * @return a null {@link DBDateTime} if either input is null; the starting {@link DBDateTime} minus the specified + * number of nanoseconds, if the result is not too negative for a {@link DBDateTime}; or throws a + * {@link DBDateTimeOverflowException DBDateTimeOverflowException} if the resultant value is more than min + * long nanoseconds from Epoch. */ public static DBDateTime minus(DBDateTime dateTime, long nanos) { if (dateTime == null || -nanos == io.deephaven.util.QueryConstants.NULL_LONG) { @@ -295,10 +286,9 @@ public static DBDateTime minus(DBDateTime dateTime, long nanos) { * * @param dateTime The starting {@link DBDateTime} value. * @param period The {@link DBPeriod} to add to dateTime. - * @return a null {@link DBDateTime} if either input is null; the starting {@link DBDateTime} - * plus the specified period, if the result is not too large for a DBDateTime; or throws - * a {@link DBDateTimeOverflowException DBDateTimeOverflowException} if the resultant - * value is more than max long nanoseconds from Epoch. + * @return a null {@link DBDateTime} if either input is null; the starting {@link DBDateTime} plus the specified + * period, if the result is not too large for a DBDateTime; or throws a {@link DBDateTimeOverflowException + * DBDateTimeOverflowException} if the resultant value is more than max long nanoseconds from Epoch. */ public static DBDateTime plus(DBDateTime dateTime, DBPeriod period) { if (dateTime == null || period == null) { @@ -306,12 +296,10 @@ public static DBDateTime plus(DBDateTime dateTime, DBPeriod period) { } if (period.isPositive()) { - return new DBDateTime( - millisToNanos(dateTime.getJodaDateTime().plus(period.getJodaPeriod()).getMillis()) + return new DBDateTime(millisToNanos(dateTime.getJodaDateTime().plus(period.getJodaPeriod()).getMillis()) + dateTime.getNanosPartial()); } else { - return new DBDateTime( - millisToNanos(dateTime.getJodaDateTime().minus(period.getJodaPeriod()).getMillis()) + return new DBDateTime(millisToNanos(dateTime.getJodaDateTime().minus(period.getJodaPeriod()).getMillis()) + dateTime.getNanosPartial()); } } @@ -321,11 +309,10 @@ public static DBDateTime plus(DBDateTime dateTime, DBPeriod period) { * * @param dateTime The starting {@link DBDateTime} value. * @param period The {@link DBPeriod} to subtract from dateTime. - * @return a null {@link DBDateTime} if either input is null; the starting {@link DBDateTime} - * minus the specified period, if the result is not too negative for a - * {@link DBDateTime}; or throws a {@link DBDateTimeOverflowException - * DBDateTimeOverflowException} if the resultant value is more than min long nanoseconds - * from Epoch. + * @return a null {@link DBDateTime} if either input is null; the starting {@link DBDateTime} minus the specified + * period, if the result is not too negative for a {@link DBDateTime}; or throws a + * {@link DBDateTimeOverflowException DBDateTimeOverflowException} if the resultant value is more than min + * long nanoseconds from Epoch. */ public static DBDateTime minus(DBDateTime dateTime, DBPeriod period) { if (dateTime == null || period == null) { @@ -333,12 +320,10 @@ public static DBDateTime minus(DBDateTime dateTime, DBPeriod period) { } if (period.isPositive()) { - return new DBDateTime( - millisToNanos(dateTime.getJodaDateTime().minus(period.getJodaPeriod()).getMillis()) + return new DBDateTime(millisToNanos(dateTime.getJodaDateTime().minus(period.getJodaPeriod()).getMillis()) + dateTime.getNanosPartial()); } else { - return new DBDateTime( - millisToNanos(dateTime.getJodaDateTime().plus(period.getJodaPeriod()).getMillis()) + return new DBDateTime(millisToNanos(dateTime.getJodaDateTime().plus(period.getJodaPeriod()).getMillis()) + dateTime.getNanosPartial()); } } @@ -348,15 +333,13 @@ public static DBDateTime minus(DBDateTime dateTime, DBPeriod period) { * * @param d1 The first {@link DBDateTime}. * @param d2 The {@link DBDateTime} to subtract from d1. - * @return {@link QueryConstants#NULL_LONG} if either input is null; the long nanoseconds from - * Epoch value of the first {@link DBDateTime} minus d2, if the result is not out of - * range for a long value; or throws a {@link DBDateTimeOverflowException - * DBDateTimeOverflowException} if the resultant value would be more than min long or - * max long nanoseconds from Epoch. + * @return {@link QueryConstants#NULL_LONG} if either input is null; the long nanoseconds from Epoch value of the + * first {@link DBDateTime} minus d2, if the result is not out of range for a long value; or throws a + * {@link DBDateTimeOverflowException DBDateTimeOverflowException} if the resultant value would be more than + * min long or max long nanoseconds from Epoch. *

    - * Note that the subtraction is done based the nanosecond offsets of the two dates from - * Epoch, so, if either date is before Epoch (negative offset), the result may be - * unexpected. + * Note that the subtraction is done based the nanosecond offsets of the two dates from Epoch, so, if either + * date is before Epoch (negative offset), the result may be unexpected. *

    */ public static long minus(DBDateTime d1, DBDateTime d2) { @@ -387,15 +370,13 @@ public static double dayDiff(DBDateTime start, DBDateTime end) { * * @param d1 The first {@link DBDateTime}. * @param d2 The second {@link DBDateTime}. - * @return {@link QueryConstants#NULL_LONG} if either input is null; the long nanoseconds from - * Epoch value of the first {@link DBDateTime} minus d2, if the result is not out of - * range for a long value; or throws a {@link DBDateTimeOverflowException - * DBDateTimeOverflowException} if the resultant value would be more than min long or - * max long nanoseconds from Epoch. + * @return {@link QueryConstants#NULL_LONG} if either input is null; the long nanoseconds from Epoch value of the + * first {@link DBDateTime} minus d2, if the result is not out of range for a long value; or throws a + * {@link DBDateTimeOverflowException DBDateTimeOverflowException} if the resultant value would be more than + * min long or max long nanoseconds from Epoch. *

    - * Note that the subtraction is done based the nanosecond offsets of the two dates from - * Epoch, so, if either date is before Epoch (negative offset), the result may be - * unexpected. + * Note that the subtraction is done based the nanosecond offsets of the two dates from Epoch, so, if either + * date is before Epoch (negative offset), the result may be unexpected. *

    * If the second value is greater than the first value, the result will be negative. */ @@ -405,20 +386,18 @@ public static long diffNanos(DBDateTime d1, DBDateTime d2) { } /** - * Returns a double value of the number of 365 day units difference between two - * {@link DBDateTime} values. + * Returns a double value of the number of 365 day units difference between two {@link DBDateTime} values. * * @param start The first {@link DBDateTime}. * @param end The second {@link DBDateTime}. - * @return {@link QueryConstants#NULL_LONG} if either input is null; a double value of the - * number of 365 day periods obtained from the first {@link DBDateTime} value minus d2, - * if the intermediate value of nanoseconds difference between the two dates is not out - * of range for a long value; or throws a {@link DBDateTimeOverflowException} if the - * intermediate value would be more than min long or max long nanoseconds from Epoch. + * @return {@link QueryConstants#NULL_LONG} if either input is null; a double value of the number of 365 day periods + * obtained from the first {@link DBDateTime} value minus d2, if the intermediate value of nanoseconds + * difference between the two dates is not out of range for a long value; or throws a + * {@link DBDateTimeOverflowException} if the intermediate value would be more than min long or max long + * nanoseconds from Epoch. *

    - * Note that the subtraction is done based the nanosecond offsets of the two dates from - * Epoch, so, if either date is before Epoch (negative offset), the result may be - * unexpected. + * Note that the subtraction is done based the nanosecond offsets of the two dates from Epoch, so, if either + * date is before Epoch (negative offset), the result may be unexpected. *

    * If the second value is greater than the first value, the result will be negative. */ @@ -431,21 +410,18 @@ public static double diffYear(DBDateTime start, DBDateTime end) { } /** - * Returns a double value of the number of days difference between two {@link DBDateTime} - * values. + * Returns a double value of the number of days difference between two {@link DBDateTime} values. * * @param start The first {@link DBDateTime}. * @param end The second {@link DBDateTime}. - * @return {@link QueryConstants#NULL_LONG} if either input is null; a double value of the - * number of days obtained from the first {@link DBDateTime} value minus d2, if the - * intermediate value of nanoseconds difference between the two dates is not out of - * range for a long value; or throws a {@link DBDateTimeOverflowException - * DBDateTimeOverflowException} if the intermediate value would be more than min long or - * max long nanoseconds from Epoch. + * @return {@link QueryConstants#NULL_LONG} if either input is null; a double value of the number of days obtained + * from the first {@link DBDateTime} value minus d2, if the intermediate value of nanoseconds difference + * between the two dates is not out of range for a long value; or throws a + * {@link DBDateTimeOverflowException DBDateTimeOverflowException} if the intermediate value would be more + * than min long or max long nanoseconds from Epoch. *

    - * Note that the subtraction is done based the nanosecond offsets of the two dates from - * Epoch, so, if either date is before Epoch (negative offset), the result may be - * unexpected. + * Note that the subtraction is done based the nanosecond offsets of the two dates from Epoch, so, if either + * date is before Epoch (negative offset), the result may be unexpected. *

    * If the second value is greater than the first value, the result will be negative. */ @@ -459,32 +435,30 @@ public static double diffDay(DBDateTime start, DBDateTime end) { } /** - * Returns a {@link DBDateTime} for the requested {@link DBDateTime} at midnight in the - * specified time zone. + * Returns a {@link DBDateTime} for the requested {@link DBDateTime} at midnight in the specified time zone. * * @param dateTime {@link DBDateTime} for which the new value at midnight should be calculated. * @param timeZone {@link DBTimeZone} for which the new value at midnight should be calculated. - * @return A null {@link DBDateTime} if either input is null, otherwise a {@link DBDateTime} - * representing midnight for the date and time zone of the inputs. + * @return A null {@link DBDateTime} if either input is null, otherwise a {@link DBDateTime} representing midnight + * for the date and time zone of the inputs. */ public static DBDateTime dateAtMidnight(DBDateTime dateTime, DBTimeZone timeZone) { if (dateTime == null || timeZone == null) { return null; } - return new DBDateTime(millisToNanos( - new DateMidnight(dateTime.getMillis(), timeZone.getTimeZone()).getMillis()) - + dateTime.getNanosPartial()); + return new DBDateTime(millisToNanos(new DateMidnight(dateTime.getMillis(), timeZone.getTimeZone()).getMillis()) + + dateTime.getNanosPartial()); } /** - * Returns a {@link DBDateTime} representing midnight in New York time zone on the date - * specified by the a number of milliseconds from Epoch. + * Returns a {@link DBDateTime} representing midnight in New York time zone on the date specified by the a number of + * milliseconds from Epoch. * - * @param millis A long value of the number of milliseconds from Epoch for which the - * {@link DBDateTime} is to be calculated. - * @return A {@link DBDateTime} rounded down to midnight in the New York time zone for the - * specified number of milliseconds from Epoch. + * @param millis A long value of the number of milliseconds from Epoch for which the {@link DBDateTime} is to be + * calculated. + * @return A {@link DBDateTime} rounded down to midnight in the New York time zone for the specified number of + * milliseconds from Epoch. */ @SuppressWarnings("WeakerAccess") public static DBDateTime millisToDateAtMidnightNy(final long millis) { @@ -492,14 +466,14 @@ public static DBDateTime millisToDateAtMidnightNy(final long millis) { } /** - * Returns a {@link DBDateTime} representing midnight in a selected time zone on the date - * specified by the a number of milliseconds from Epoch. + * Returns a {@link DBDateTime} representing midnight in a selected time zone on the date specified by the a number + * of milliseconds from Epoch. * - * @param millis A long value of the number of milliseconds from Epoch for which the - * {@link DBDateTime} is to be calculated. + * @param millis A long value of the number of milliseconds from Epoch for which the {@link DBDateTime} is to be + * calculated. * @param timeZone {@link DBTimeZone} for which the new value at midnight should be calculated. - * @return A {@link DBDateTime} rounded down to midnight in the selected time zone for the - * specified number of milliseconds from Epoch. + * @return A {@link DBDateTime} rounded down to midnight in the selected time zone for the specified number of + * milliseconds from Epoch. */ @SuppressWarnings("WeakerAccess") public static DBDateTime millisToDateAtMidnight(final long millis, final DBTimeZone timeZone) { @@ -507,8 +481,7 @@ public static DBDateTime millisToDateAtMidnight(final long millis, final DBTimeZ return null; } - return new DBDateTime( - millisToNanos(new DateMidnight(millis, timeZone.getTimeZone()).getMillis())); + return new DBDateTime(millisToNanos(new DateMidnight(millis, timeZone.getTimeZone()).getMillis())); } /** @@ -516,8 +489,7 @@ public static DBDateTime millisToDateAtMidnight(final long millis, final DBTimeZ * * @param dateTime The {@link DBDateTime} to format as a String. * @param timeZone The {@link DBTimeZone} to use when formatting the String. - * @return A null String if either input is null, otherwise a String formatted as - * yyyy-MM-ddThh:mm:ss.nnnnnnnnn TZ. + * @return A null String if either input is null, otherwise a String formatted as yyyy-MM-ddThh:mm:ss.nnnnnnnnn TZ. */ public static String format(DBDateTime dateTime, DBTimeZone timeZone) { if (dateTime == null || timeZone == null) { @@ -528,20 +500,17 @@ public static String format(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns a String date/time representation of a {@link DBDateTime} interpreted for the New - * York time zone. + * Returns a String date/time representation of a {@link DBDateTime} interpreted for the New York time zone. * * @param dateTime The {@link DBDateTime} to format as a String. - * @return A null String if the input is null, otherwise a String formatted as - * yyyy-MM-ddThh:mm:ss.nnnnnnnnn NY. + * @return A null String if the input is null, otherwise a String formatted as yyyy-MM-ddThh:mm:ss.nnnnnnnnn NY. */ public static String formatNy(DBDateTime dateTime) { return format(dateTime, DBTimeZone.TZ_NY); } /** - * Returns a String date representation of a {@link DBDateTime} interpreted for a specified time - * zone. + * Returns a String date representation of a {@link DBDateTime} interpreted for a specified time zone. * * @param dateTime The {@link DBDateTime} to format as a String. * @param timeZone The {@link DBTimeZone} to use when formatting the String. @@ -557,8 +526,7 @@ public static String formatDate(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns a String date representation of a {@link DBDateTime} interpreted for the New York - * time zone. + * Returns a String date representation of a {@link DBDateTime} interpreted for the New York time zone. * * @param dateTime The {@link DBDateTime} to format as a String. * @return A null String if the input is null, otherwise a String formatted as yyyy-MM-dd. @@ -610,8 +578,8 @@ public static String format(long nanos) { buf.append(days).append('T'); } - buf.append(hours).append(':').append(StringUtils.pad(String.valueOf(minutes), 2, '0')) - .append(':').append(StringUtils.pad(String.valueOf(seconds), 2, '0')); + buf.append(hours).append(':').append(StringUtils.pad(String.valueOf(minutes), 2, '0')).append(':') + .append(StringUtils.pad(String.valueOf(seconds), 2, '0')); if (nanos != 0) { buf.append('.').append(StringUtils.pad(String.valueOf(nanos), 9, '0')); @@ -621,14 +589,12 @@ public static String format(long nanos) { } /** - * Returns an int value of the day of the month for a {@link DBDateTime} and specified time - * zone. + * Returns an int value of the day of the month for a {@link DBDateTime} and specified time zone. * * @param dateTime The {@link DBDateTime} for which to find the day of the month. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * the day of the month represented by the {@link DBDateTime} when interpreted in the - * specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of the day of the + * month represented by the {@link DBDateTime} when interpreted in the specified time zone. */ @SuppressWarnings("WeakerAccess") public static int dayOfMonth(DBDateTime dateTime, DBTimeZone timeZone) { @@ -640,27 +606,24 @@ public static int dayOfMonth(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of the day of the month for a {@link DBDateTime} in the New York time - * zone. + * Returns an int value of the day of the month for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the day of the month. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * the day of the month represented by the {@link DBDateTime} when interpreted in the - * New York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of the day of the month + * represented by the {@link DBDateTime} when interpreted in the New York time zone. */ public static int dayOfMonthNy(DBDateTime dateTime) { return dayOfMonth(dateTime, DBTimeZone.TZ_NY); } /** - * Returns an int value of the day of the week for a {@link DBDateTime} in the specified time - * zone, with 1 being Monday and 7 being Sunday. + * Returns an int value of the day of the week for a {@link DBDateTime} in the specified time zone, with 1 being + * Monday and 7 being Sunday. * * @param dateTime The {@link DBDateTime} for which to find the day of the week. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * the day of the week represented by the {@link DBDateTime} when interpreted in the - * specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of the day of the week + * represented by the {@link DBDateTime} when interpreted in the specified time zone. */ public static int dayOfWeek(DBDateTime dateTime, DBTimeZone timeZone) { if (dateTime == null || timeZone == null) { @@ -671,27 +634,24 @@ public static int dayOfWeek(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of the day of the week for a {@link DBDateTime} in the New York time - * zone, with 1 being Monday and 7 being Sunday. + * Returns an int value of the day of the week for a {@link DBDateTime} in the New York time zone, with 1 being + * Monday and 7 being Sunday. * * @param dateTime The {@link DBDateTime} for which to find the day of the week. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * the day of the week represented by the {@link DBDateTime} when interpreted in the New - * York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of the day of the week + * represented by the {@link DBDateTime} when interpreted in the New York time zone. */ public static int dayOfWeekNy(DBDateTime dateTime) { return dayOfWeek(dateTime, DBTimeZone.TZ_NY); } /** - * Returns an int value of the day of the year (Julian date) for a {@link DBDateTime} in the - * specified time zone. + * Returns an int value of the day of the year (Julian date) for a {@link DBDateTime} in the specified time zone. * * @param dateTime The {@link DBDateTime} for which to find the day of the year. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * the day of the year represented by the {@link DBDateTime} when interpreted in the - * specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of the day of the year + * represented by the {@link DBDateTime} when interpreted in the specified time zone. */ public static int dayOfYear(DBDateTime dateTime, DBTimeZone timeZone) { if (dateTime == null || timeZone == null) { @@ -702,27 +662,24 @@ public static int dayOfYear(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of the day of the year (Julian date) for a {@link DBDateTime} in the New - * York time zone. + * Returns an int value of the day of the year (Julian date) for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the day of the year. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * the day of the year represented by the {@link DBDateTime} when interpreted in the New - * York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of the day of the year + * represented by the {@link DBDateTime} when interpreted in the New York time zone. */ public static int dayOfYearNy(DBDateTime dateTime) { return dayOfYear(dateTime, DBTimeZone.TZ_NY); } /** - * Returns an int value of the hour of the day for a {@link DBDateTime} in the specified time - * zone. The hour is on a 24 hour clock (0 - 23). + * Returns an int value of the hour of the day for a {@link DBDateTime} in the specified time zone. The hour is on a + * 24 hour clock (0 - 23). * * @param dateTime The {@link DBDateTime} for which to find the hour of the day. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * the hour of the day represented by the {@link DBDateTime} when interpreted in the - * specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of the hour of the day + * represented by the {@link DBDateTime} when interpreted in the specified time zone. */ @SuppressWarnings("WeakerAccess") public static int hourOfDay(DBDateTime dateTime, DBTimeZone timeZone) { @@ -734,27 +691,25 @@ public static int hourOfDay(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of the hour of the day for a {@link DBDateTime} in the New York time - * zone. The hour is on a 24 hour clock (0 - 23). + * Returns an int value of the hour of the day for a {@link DBDateTime} in the New York time zone. The hour is on a + * 24 hour clock (0 - 23). * * @param dateTime The {@link DBDateTime} for which to find the hour of the day. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * the hour of the day represented by the {@link DBDateTime} when interpreted in the New - * York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of the hour of the day + * represented by the {@link DBDateTime} when interpreted in the New York time zone. */ public static int hourOfDayNy(DBDateTime dateTime) { return hourOfDay(dateTime, DBTimeZone.TZ_NY); } /** - * Returns an int value of milliseconds since midnight for a {@link DBDateTime} in the specified - * time zone. + * Returns an int value of milliseconds since midnight for a {@link DBDateTime} in the specified time zone. * * @param dateTime The {@link DBDateTime} for which to find the milliseconds since midnight. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * milliseconds since midnight for the date/time represented by the {@link DBDateTime} - * when interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of milliseconds since + * midnight for the date/time represented by the {@link DBDateTime} when interpreted in the specified time + * zone. */ @SuppressWarnings("WeakerAccess") public static int millisOfDay(DBDateTime dateTime, DBTimeZone timeZone) { @@ -766,27 +721,26 @@ public static int millisOfDay(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of milliseconds since midnight for a {@link DBDateTime} in the New York - * time zone. + * Returns an int value of milliseconds since midnight for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the milliseconds since midnight. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * milliseconds since midnight for the date/time represented by the {@link DBDateTime} - * when interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of milliseconds since + * midnight for the date/time represented by the {@link DBDateTime} when interpreted in the New York time + * zone. */ public static int millisOfDayNy(DBDateTime dateTime) { return millisOfDay(dateTime, DBTimeZone.TZ_NY); } /** - * Returns an int value of milliseconds since the top of the second for a {@link DBDateTime} in - * the specified time zone. + * Returns an int value of milliseconds since the top of the second for a {@link DBDateTime} in the specified time + * zone. * * @param dateTime The {@link DBDateTime} for which to find the milliseconds. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * milliseconds since the top of the second for the date/time represented by the - * {@link DBDateTime} when interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of milliseconds since + * the top of the second for the date/time represented by the {@link DBDateTime} when interpreted in the + * specified time zone. */ @SuppressWarnings("WeakerAccess") public static int millisOfSecond(DBDateTime dateTime, DBTimeZone timeZone) { @@ -798,27 +752,26 @@ public static int millisOfSecond(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of milliseconds since the top of the second for a {@link DBDateTime} in - * the New York time zone. + * Returns an int value of milliseconds since the top of the second for a {@link DBDateTime} in the New York time + * zone. * * @param dateTime The {@link DBDateTime} for which to find the milliseconds. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * milliseconds since the top of the second for the date/time represented by the - * {@link DBDateTime} when interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of milliseconds since the + * top of the second for the date/time represented by the {@link DBDateTime} when interpreted in the New + * York time zone. */ public static int millisOfSecondNy(DBDateTime dateTime) { return millisOfSecond(dateTime, DBTimeZone.TZ_NY); } /** - * Returns a long value of nanoseconds since midnight for a {@link DBDateTime} in the specified - * time zone. + * Returns a long value of nanoseconds since midnight for a {@link DBDateTime} in the specified time zone. * * @param dateTime The {@link DBDateTime} for which to find the nanoseconds since midnight. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_LONG} if either input is null, otherwise, a long value - * of nanoseconds since midnight for the date/time represented by the {@link DBDateTime} - * when interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_LONG} if either input is null, otherwise, a long value of nanoseconds since + * midnight for the date/time represented by the {@link DBDateTime} when interpreted in the specified time + * zone. */ @SuppressWarnings("WeakerAccess") public static long nanosOfDay(DBDateTime dateTime, DBTimeZone timeZone) { @@ -826,32 +779,30 @@ public static long nanosOfDay(DBDateTime dateTime, DBTimeZone timeZone) { return io.deephaven.util.QueryConstants.NULL_LONG; } - return millisToNanos(dateTime.getJodaDateTime(timeZone).getMillisOfDay()) - + dateTime.getNanosPartial(); + return millisToNanos(dateTime.getJodaDateTime(timeZone).getMillisOfDay()) + dateTime.getNanosPartial(); } /** - * Returns a long value of nanoseconds since midnight for a {@link DBDateTime} in the New York - * time zone. + * Returns a long value of nanoseconds since midnight for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the nanoseconds since midnight. - * @return A {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a long value of - * nanoseconds since midnight for the date/time represented by the {@link DBDateTime} - * when interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a long value of nanoseconds since + * midnight for the date/time represented by the {@link DBDateTime} when interpreted in the New York time + * zone. */ public static long nanosOfDayNy(DBDateTime dateTime) { return nanosOfDay(dateTime, DBTimeZone.TZ_NY); } /** - * Returns a long value of nanoseconds since the top of the second for a {@link DBDateTime} in - * the specified time zone. + * Returns a long value of nanoseconds since the top of the second for a {@link DBDateTime} in the specified time + * zone. * * @param dateTime The {@link DBDateTime} for which to find the nanoseconds. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_LONG} if either input is null, otherwise, a long value - * of nanoseconds since the top of the second for the date/time represented by the - * {@link DBDateTime} when interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_LONG} if either input is null, otherwise, a long value of nanoseconds since + * the top of the second for the date/time represented by the {@link DBDateTime} when interpreted in the + * specified time zone. */ @SuppressWarnings("WeakerAccess") public static long nanosOfSecond(DBDateTime dateTime, DBTimeZone timeZone) { @@ -859,33 +810,32 @@ public static long nanosOfSecond(DBDateTime dateTime, DBTimeZone timeZone) { return io.deephaven.util.QueryConstants.NULL_LONG; } - return millisToNanos(dateTime.getJodaDateTime(timeZone).getMillisOfSecond()) - + dateTime.getNanosPartial(); + return millisToNanos(dateTime.getJodaDateTime(timeZone).getMillisOfSecond()) + dateTime.getNanosPartial(); } /** - * Returns a long value of nanoseconds since the top of the second for a {@link DBDateTime} in - * the New York time zone. + * Returns a long value of nanoseconds since the top of the second for a {@link DBDateTime} in the New York time + * zone. * * @param dateTime The {@link DBDateTime} for which to find the nanoseconds. - * @return A {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a long value of - * nanoseconds since the top of the second for the date/time represented by the - * {@link DBDateTime} when interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a long value of nanoseconds since the + * top of the second for the date/time represented by the {@link DBDateTime} when interpreted in the New + * York time zone. */ public static long nanosOfSecondNy(DBDateTime dateTime) { return nanosOfSecond(dateTime, DBTimeZone.TZ_NY); } /** - * Returns the number of microseconds that have elapsed since the start of the millisecond - * represented by the provided {@code dateTime} in the specified time zone. Nanoseconds are - * rounded, not dropped -- '20:41:39.123456700' has 457 micros, not 456. + * Returns the number of microseconds that have elapsed since the start of the millisecond represented by the + * provided {@code dateTime} in the specified time zone. Nanoseconds are rounded, not dropped -- + * '20:41:39.123456700' has 457 micros, not 456. * * @param dateTime The {@link DBDateTime} for which to find the microseconds. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * microseconds since the top of the millisecond for the date/time represented by the - * {@link DBDateTime} when interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of microseconds since + * the top of the millisecond for the date/time represented by the {@link DBDateTime} when interpreted in + * the specified time zone. */ @SuppressWarnings("WeakerAccess") public static int microsOfMilli(DBDateTime dateTime, DBTimeZone timeZone) { @@ -897,14 +847,14 @@ public static int microsOfMilli(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns the number of microseconds that have elapsed since the start of the millisecond - * represented by the provided {@code dateTime} in the New York time zone. Nanoseconds are - * rounded, not dropped -- '20:41:39.123456700' has 457 micros, not 456. + * Returns the number of microseconds that have elapsed since the start of the millisecond represented by the + * provided {@code dateTime} in the New York time zone. Nanoseconds are rounded, not dropped -- '20:41:39.123456700' + * has 457 micros, not 456. * * @param dateTime The {@link DBDateTime} for which to find the microseconds. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * microseconds since the top of the millisecond for the date/time represented by the - * {@link DBDateTime} when interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of microseconds since the + * top of the millisecond for the date/time represented by the {@link DBDateTime} when interpreted in the + * New York time zone. */ @SuppressWarnings("WeakerAccess") public static int microsOfMilliNy(DBDateTime dateTime) { @@ -912,14 +862,13 @@ public static int microsOfMilliNy(DBDateTime dateTime) { } /** - * Returns an int value of minutes since midnight for a {@link DBDateTime} in the specified time - * zone. + * Returns an int value of minutes since midnight for a {@link DBDateTime} in the specified time zone. * * @param dateTime The {@link DBDateTime} for which to find the minutes. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * minutes since midnight for the date/time represented by the {@link DBDateTime} when - * interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of minutes since + * midnight for the date/time represented by the {@link DBDateTime} when interpreted in the specified time + * zone. */ @SuppressWarnings("WeakerAccess") public static int minuteOfDay(DBDateTime dateTime, DBTimeZone timeZone) { @@ -931,27 +880,24 @@ public static int minuteOfDay(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of minutes since midnight for a {@link DBDateTime} in the New York time - * zone. + * Returns an int value of minutes since midnight for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the milliseconds since midnight. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * minutes since midnight for the date/time represented by the {@link DBDateTime} when - * interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of minutes since midnight + * for the date/time represented by the {@link DBDateTime} when interpreted in the New York time zone. */ public static int minuteOfDayNy(DBDateTime dateTime) { return minuteOfDay(dateTime, DBTimeZone.TZ_NY); } /** - * Returns an int value of minutes since the top of the hour for a {@link DBDateTime} in the - * specified time zone. + * Returns an int value of minutes since the top of the hour for a {@link DBDateTime} in the specified time zone. * * @param dateTime The {@link DBDateTime} for which to find the minutes. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * minutes since the top of the hour for the date/time represented by the - * {@link DBDateTime} when interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of minutes since the + * top of the hour for the date/time represented by the {@link DBDateTime} when interpreted in the specified + * time zone. */ @SuppressWarnings("WeakerAccess") public static int minuteOfHour(DBDateTime dateTime, DBTimeZone timeZone) { @@ -963,13 +909,12 @@ public static int minuteOfHour(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of minutes since the top of the hour for a {@link DBDateTime} in the New - * York time zone. + * Returns an int value of minutes since the top of the hour for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the minutes. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * minutes since the top of the hour for the date/time represented by the - * {@link DBDateTime} when interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of minutes since the top + * of the hour for the date/time represented by the {@link DBDateTime} when interpreted in the New York time + * zone. */ public static int minuteOfHourNy(DBDateTime dateTime) { return minuteOfHour(dateTime, DBTimeZone.TZ_NY); @@ -980,9 +925,9 @@ public static int minuteOfHourNy(DBDateTime dateTime) { * * @param dateTime The {@link DBDateTime} for which to find the month. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * the month for the date/time represented by the {@link DBDateTime} when interpreted in - * the specified time zone. January is 1, February is 2, etc. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of the month for the + * date/time represented by the {@link DBDateTime} when interpreted in the specified time zone. January is + * 1, February is 2, etc. */ @SuppressWarnings("WeakerAccess") public static int monthOfYear(DBDateTime dateTime, DBTimeZone timeZone) { @@ -997,23 +942,21 @@ public static int monthOfYear(DBDateTime dateTime, DBTimeZone timeZone) { * Returns an int value for the month of a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the month. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * the month for the date/time represented by the {@link DBDateTime} when interpreted in - * the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of the month for the + * date/time represented by the {@link DBDateTime} when interpreted in the New York time zone. */ public static int monthOfYearNy(DBDateTime dateTime) { return monthOfYear(dateTime, DBTimeZone.TZ_NY); } /** - * Returns an int value of seconds since midnight for a {@link DBDateTime} in the specified time - * zone. + * Returns an int value of seconds since midnight for a {@link DBDateTime} in the specified time zone. * * @param dateTime The {@link DBDateTime} for which to find the seconds. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * seconds since midnight for the date/time represented by the {@link DBDateTime} when - * interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of seconds since + * midnight for the date/time represented by the {@link DBDateTime} when interpreted in the specified time + * zone. */ @SuppressWarnings("WeakerAccess") public static int secondOfDay(DBDateTime dateTime, DBTimeZone timeZone) { @@ -1025,27 +968,25 @@ public static int secondOfDay(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of seconds since midnight for a {@link DBDateTime} in the New York time - * zone. + * Returns an int value of seconds since midnight for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the seconds. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * seconds since midnight for the date/time represented by the {@link DBDateTime} when - * interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of seconds since + * midnight for the date/time represented by the {@link DBDateTime} when interpreted in the New York time + * zone. */ public static int secondOfDayNy(DBDateTime dateTime) { return secondOfDay(dateTime, DBTimeZone.TZ_NY); } /** - * Returns an int value of seconds since the top of the minute for a {@link DBDateTime} in the - * specified time zone. + * Returns an int value of seconds since the top of the minute for a {@link DBDateTime} in the specified time zone. * * @param dateTime The {@link DBDateTime} for which to find the seconds. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * seconds since the top of the minute for the date/time represented by the - * {@link DBDateTime} when interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of seconds since the + * top of the minute for the date/time represented by the {@link DBDateTime} when interpreted in the + * specified time zone. */ @SuppressWarnings("WeakerAccess") public static int secondOfMinute(DBDateTime dateTime, DBTimeZone timeZone) { @@ -1057,13 +998,12 @@ public static int secondOfMinute(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of seconds since the top of the minute for a {@link DBDateTime} in the - * New York time zone. + * Returns an int value of seconds since the top of the minute for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the seconds. - * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of - * seconds since the top of the minute for the date/time represented by the - * {@link DBDateTime} when interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if the input is null, otherwise, an int value of seconds since the top + * of the minute for the date/time represented by the {@link DBDateTime} when interpreted in the New York + * time zone. */ public static int secondOfMinuteNy(DBDateTime dateTime) { return secondOfMinute(dateTime, DBTimeZone.TZ_NY); @@ -1074,9 +1014,8 @@ public static int secondOfMinuteNy(DBDateTime dateTime) { * * @param dateTime The {@link DBDateTime} for which to find the year. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * the year for the date/time represented by the {@link DBDateTime} when interpreted in - * the specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of the year for the + * date/time represented by the {@link DBDateTime} when interpreted in the specified time zone. */ public static int year(DBDateTime dateTime, DBTimeZone timeZone) { if (dateTime == null || timeZone == null) { @@ -1090,9 +1029,8 @@ public static int year(DBDateTime dateTime, DBTimeZone timeZone) { * Returns an int value of the year for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the year. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * the year for the date/time represented by the {@link DBDateTime} when interpreted in - * the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of the year for the + * date/time represented by the {@link DBDateTime} when interpreted in the New York time zone. */ @SuppressWarnings("WeakerAccess") public static int yearNy(DBDateTime dateTime) { @@ -1100,14 +1038,12 @@ public static int yearNy(DBDateTime dateTime) { } /** - * Returns an int value of the two-digit year for a {@link DBDateTime} in the specified time - * zone. + * Returns an int value of the two-digit year for a {@link DBDateTime} in the specified time zone. * * @param dateTime The {@link DBDateTime} for which to find the year. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * the two-digit year for the date/time represented by the {@link DBDateTime} when - * interpreted in the specified time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of the two-digit year + * for the date/time represented by the {@link DBDateTime} when interpreted in the specified time zone. */ @SuppressWarnings("WeakerAccess") public static int yearOfCentury(DBDateTime dateTime, DBTimeZone timeZone) { @@ -1119,13 +1055,11 @@ public static int yearOfCentury(DBDateTime dateTime, DBTimeZone timeZone) { } /** - * Returns an int value of the two-digit year for a {@link DBDateTime} in the New York time - * zone. + * Returns an int value of the two-digit year for a {@link DBDateTime} in the New York time zone. * * @param dateTime The {@link DBDateTime} for which to find the year. - * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of - * the two-digit year for the date/time represented by the {@link DBDateTime} when - * interpreted in the New York time zone. + * @return A {@link QueryConstants#NULL_INT} if either input is null, otherwise, an int value of the two-digit year + * for the date/time represented by the {@link DBDateTime} when interpreted in the New York time zone. */ public static int yearOfCenturyNy(DBDateTime dateTime) { return yearOfCentury(dateTime, DBTimeZone.TZ_NY); @@ -1136,8 +1070,8 @@ public static int yearOfCenturyNy(DBDateTime dateTime) { * * @param dateTime The {@link DBDateTime} to convert. * @param timeZone The {@link DBTimeZone} to use when interpreting the date/time. - * @return 0.0 if either input is null, otherwise, a double value containing the Excel double - * format representation of a {@link DBDateTime} in the specified time zone. + * @return 0.0 if either input is null, otherwise, a double value containing the Excel double format representation + * of a {@link DBDateTime} in the specified time zone. */ @SuppressWarnings("WeakerAccess") public static double getExcelDateTime(DBDateTime dateTime, DBTimeZone timeZone) { @@ -1149,8 +1083,8 @@ public static double getExcelDateTime(DBDateTime dateTime, DBTimeZone timeZone) * * @param dateTime The {@link DBDateTime} to convert. * @param timeZone The {@link TimeZone} to use when interpreting the date/time. - * @return 0.0 if either input is null, otherwise, a double value containing the Excel double - * format representation of a {@link DBDateTime} in the specified time zone. + * @return 0.0 if either input is null, otherwise, a double value containing the Excel double format representation + * of a {@link DBDateTime} in the specified time zone. */ @SuppressWarnings("WeakerAccess") public static double getExcelDateTime(DBDateTime dateTime, TimeZone timeZone) { @@ -1166,8 +1100,8 @@ public static double getExcelDateTime(DBDateTime dateTime, TimeZone timeZone) { * Returns the Excel double time format representation of a {@link DBDateTime}. * * @param dateTime The {@link DBDateTime} to convert. - * @return 0.0 if the input is null, otherwise, a double value containing the Excel double - * format representation of a {@link DBDateTime} in the New York time zone. + * @return 0.0 if the input is null, otherwise, a double value containing the Excel double format representation of + * a {@link DBDateTime} in the New York time zone. */ @SuppressWarnings("WeakerAccess") public static double getExcelDateTime(DBDateTime dateTime) { @@ -1178,18 +1112,16 @@ public static double getExcelDateTime(DBDateTime dateTime) { * Converts microseconds to nanoseconds. * * @param micros The long value of microseconds to convert. - * @return A {@link QueryConstants#NULL_LONG} if the input is null. Throws a - * {@link DBDateTimeOverflowException} if the resultant value would exceed the range - * that can be stored in a long. Otherwise, returns a long containing the equivalent - * number of nanoseconds for the input in microseconds. + * @return A {@link QueryConstants#NULL_LONG} if the input is null. Throws a {@link DBDateTimeOverflowException} if + * the resultant value would exceed the range that can be stored in a long. Otherwise, returns a long + * containing the equivalent number of nanoseconds for the input in microseconds. */ public static long microsToNanos(long micros) { if (micros == io.deephaven.util.QueryConstants.NULL_LONG) { return io.deephaven.util.QueryConstants.NULL_LONG; } if (Math.abs(micros) > MAX_CONVERTIBLE_MICROS) { - throw new DBDateTimeOverflowException( - "Converting " + micros + " micros to nanos would overflow"); + throw new DBDateTimeOverflowException("Converting " + micros + " micros to nanos would overflow"); } return micros * 1000; } @@ -1198,8 +1130,8 @@ public static long microsToNanos(long micros) { * Converts nanoseconds to microseconds. * * @param nanos The long value of nanoseconds to convert. - * @return A {@link QueryConstants#NULL_LONG} if the input is null. Otherwise, returns a long - * containing the equivalent number of microseconds for the input in nanoseconds. + * @return A {@link QueryConstants#NULL_LONG} if the input is null. Otherwise, returns a long containing the + * equivalent number of microseconds for the input in nanoseconds. */ @SuppressWarnings("WeakerAccess") public static long nanosToMicros(long nanos) { @@ -1213,8 +1145,8 @@ public static long nanosToMicros(long nanos) { * Converts a value of microseconds from Epoch in the UTC time zone to a {@link DBDateTime}. * * @param micros The long microseconds value to convert. - * @return {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a - * {@link DBDateTime} representation of the input. + * @return {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a {@link DBDateTime} representation of + * the input. */ public static DBDateTime microsToTime(long micros) { return nanosToTime(microsToNanos(micros)); @@ -1224,10 +1156,9 @@ public static DBDateTime microsToTime(long micros) { * Converts milliseconds to nanoseconds. * * @param millis The long milliseconds value to convert. - * @return {@link QueryConstants#NULL_LONG} if the input is equal to - * {@link QueryConstants#NULL_LONG}. Throws {@link DBDateTimeOverflowException} if the - * input is too large for conversion. Otherwise returns a long of the equivalent number - * of nanoseconds to the input. + * @return {@link QueryConstants#NULL_LONG} if the input is equal to {@link QueryConstants#NULL_LONG}. Throws + * {@link DBDateTimeOverflowException} if the input is too large for conversion. Otherwise returns a long of + * the equivalent number of nanoseconds to the input. */ public static long millisToNanos(long millis) { if (millis == io.deephaven.util.QueryConstants.NULL_LONG) { @@ -1239,8 +1170,7 @@ public static long millisToNanos(long millis) { return millis * 1000; } } else if (Math.abs(millis) > MAX_CONVERTIBLE_MILLIS) { - throw new DBDateTimeOverflowException( - "Converting " + millis + " millis to nanos would overflow"); + throw new DBDateTimeOverflowException("Converting " + millis + " millis to nanos would overflow"); } return millis * 1000000; } @@ -1249,18 +1179,16 @@ public static long millisToNanos(long millis) { * Converts seconds to nanoseconds. * * @param seconds The long value of seconds to convert. - * @return A {@link QueryConstants#NULL_LONG} if the input is null. Throws a - * {@link DBDateTimeOverflowException} if the resultant value would exceed the range - * that can be stored in a long. Otherwise, returns a long containing the equivalent - * number of nanoseconds for the input in seconds. + * @return A {@link QueryConstants#NULL_LONG} if the input is null. Throws a {@link DBDateTimeOverflowException} if + * the resultant value would exceed the range that can be stored in a long. Otherwise, returns a long + * containing the equivalent number of nanoseconds for the input in seconds. */ public static long secondsToNanos(long seconds) { if (seconds == io.deephaven.util.QueryConstants.NULL_LONG) { return io.deephaven.util.QueryConstants.NULL_LONG; } if (Math.abs(seconds) > MAX_CONVERTIBLE_SECONDS) { - throw new DBDateTimeOverflowException( - "Converting " + seconds + " seconds to nanos would overflow"); + throw new DBDateTimeOverflowException("Converting " + seconds + " seconds to nanos would overflow"); } return seconds * 1000000000L; @@ -1270,8 +1198,8 @@ public static long secondsToNanos(long seconds) { * Converts nanoseconds to milliseconds. * * @param nanos The long value of nanoseconds to convert. - * @return A {@link QueryConstants#NULL_LONG} if the input is null. Otherwise, returns a long - * containing the equivalent number of milliseconds for the input in nanoseconds. + * @return A {@link QueryConstants#NULL_LONG} if the input is null. Otherwise, returns a long containing the + * equivalent number of milliseconds for the input in nanoseconds. */ public static long nanosToMillis(long nanos) { if (nanos == io.deephaven.util.QueryConstants.NULL_LONG) { @@ -1285,8 +1213,8 @@ public static long nanosToMillis(long nanos) { * Converts a value of milliseconds from Epoch in the UTC time zone to a {@link DBDateTime}. * * @param millis The long milliseconds value to convert. - * @return {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a - * {@link DBDateTime} representation of the input. + * @return {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a {@link DBDateTime} representation of + * the input. */ public static DBDateTime millisToTime(long millis) { return nanosToTime(millisToNanos(millis)); @@ -1296,19 +1224,19 @@ public static DBDateTime millisToTime(long millis) { * Converts a value of seconds from Epoch in the UTC time zone to a {@link DBDateTime}. * * @param seconds The long seconds value to convert. - * @return {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a - * {@link DBDateTime} representation of the input. + * @return {@link QueryConstants#NULL_LONG} if the input is null, otherwise, a {@link DBDateTime} representation of + * the input. */ public static DBDateTime secondsToTime(long seconds) { return nanosToTime(secondsToNanos(seconds)); } /** - * Provides the current date/time, or, if a custom {@link #timeProvider} has been configured, - * provides the current time according to the custom provider. + * Provides the current date/time, or, if a custom {@link #timeProvider} has been configured, provides the current + * time according to the custom provider. * - * @return A {@link DBDateTime} of the current date and time from the system or from the - * configured alternate time provider. + * @return A {@link DBDateTime} of the current date and time from the system or from the configured alternate time + * provider. */ public static DBDateTime currentTime() { if (timeProvider != null) { @@ -1317,8 +1245,8 @@ public static DBDateTime currentTime() { return nanosToTime(millisToNanos(System.currentTimeMillis())); } - // TODO: Revoke public access to these fields and retire them! Use getCurrentDate(), maybe hold - // on to the CachedCurrentDate to skip a map lookup. + // TODO: Revoke public access to these fields and retire them! Use getCurrentDate(), maybe hold on to the + // CachedCurrentDate to skip a map lookup. public static String currentDateNy = null; public static long endOfCurrentDateNy = 0; @@ -1347,14 +1275,13 @@ public static String currentDateNy() { /** * Sets the {@link #lastBusinessDayNyOverride} to the previous business day from a currently set - * {@link #currentDateNyOverride} value. If {@link #currentDateNyOverride} has not been set, - * this method has no effect. + * {@link #currentDateNyOverride} value. If {@link #currentDateNyOverride} has not been set, this method has no + * effect. */ public static void overrideLastBusinessDateNyFromCurrentDateNy() { if (currentDateNyOverride != null) { final BusinessCalendar bc = Calendars.calendar("USNYSE"); - lastBusinessDayNyOverride = - bc.previousBusinessDay(currentDateNyOverride.substring(0, 10)); + lastBusinessDayNyOverride = bc.previousBusinessDay(currentDateNyOverride.substring(0, 10)); } } @@ -1369,9 +1296,8 @@ public static void overrideLastBusinessDateNyFromCurrentDateNy() { public static long endOfCurrentDateNyLastBusinessDay = 0; /** - * Provides a String representing the previous business date in the New York time zone using the - * NYSE calendar, or, if a custom {@link #lastBusinessDayNyOverride} has been set, the date - * provided by that override. + * Provides a String representing the previous business date in the New York time zone using the NYSE calendar, or, + * if a custom {@link #lastBusinessDayNyOverride} has been set, the date provided by that override. * * @return A String in yyyy-MM-dd format. */ @@ -1380,13 +1306,12 @@ public static String lastBusinessDateNy() { } /** - * Provides a String representing the previous business date in the New York time zone using the - * NYSE calendar, or, if a custom {@link #lastBusinessDayNyOverride} has been set, the date - * provided by that override. + * Provides a String representing the previous business date in the New York time zone using the NYSE calendar, or, + * if a custom {@link #lastBusinessDayNyOverride} has been set, the date provided by that override. * - * @param currentTimeMillis The current date/time in milliseconds from Epoch to be used when - * determining the previous business date. Typically this is System.currentTimeMillis() - * and is passed in by calling the niladic variant of this method. + * @param currentTimeMillis The current date/time in milliseconds from Epoch to be used when determining the + * previous business date. Typically this is System.currentTimeMillis() and is passed in by calling the + * niladic variant of this method. * @return A String in yyyy-MM-dd format. */ public static String lastBusinessDateNy(final long currentTimeMillis) { @@ -1397,8 +1322,7 @@ public static String lastBusinessDateNy(final long currentTimeMillis) { if (currentTimeMillis > endOfCurrentDateNyLastBusinessDay) { final BusinessCalendar bc = Calendars.calendar("USNYSE"); - lastBusinessDateNy = - bc.previousBusinessDay(DBTimeUtils.millisToTime(currentTimeMillis)); + lastBusinessDateNy = bc.previousBusinessDay(DBTimeUtils.millisToTime(currentTimeMillis)); // Calculate when this cached value expires endOfCurrentDateNyLastBusinessDay = getMillisAtMidnightNy(currentTimeMillis); @@ -1407,14 +1331,13 @@ public static String lastBusinessDateNy(final long currentTimeMillis) { } /** - * Returns the number of milliseconds from Epoch for midnight at the beginning of the next day - * in the New York time zone relative to the date represented by a passed milliseconds from - * Epoch date/time. + * Returns the number of milliseconds from Epoch for midnight at the beginning of the next day in the New York time + * zone relative to the date represented by a passed milliseconds from Epoch date/time. * - * @param currentTimeMillis A long value of milliseconds from Epoch which is the date/time from - * which the next New York time zone midnight value should be calculated. - * @return A long of milliseconds from Epoch for midnight at the beginning of the next day in - * the New York time zone. + * @param currentTimeMillis A long value of milliseconds from Epoch which is the date/time from which the next New + * York time zone midnight value should be calculated. + * @return A long of milliseconds from Epoch for midnight at the beginning of the next day in the New York time + * zone. */ private static long getMillisAtMidnightNy(final long currentTimeMillis) { final Calendar calendar = Calendar.getInstance(TimeZones.TZ_NEWYORK); @@ -1466,12 +1389,12 @@ private CachedCurrentDate(@NotNull final DBTimeZone timeZone) { void update(final long currentTimeMillis) { value = formatDate(millisToTime(currentTimeMillis), timeZone); valueExpirationTimeMillis = new DateTime(currentTimeMillis, timeZone.getTimeZone()) - .withFieldAdded(DurationFieldType.days(), 1).withTimeAtStartOfDay().getMillis(); + .withFieldAdded(DurationFieldType.days(), 1).withTimeAtStartOfDay().getMillis(); } } private static class CachedDateKey - extends KeyedObjectKey.Basic { + extends KeyedObjectKey.Basic { @Override public DBTimeZone getKey(final CACHED_DATE_TYPE cachedDate) { @@ -1480,13 +1403,12 @@ public DBTimeZone getKey(final CACHED_DATE_TYPE cachedDate) { } private static final KeyedObjectHashMap cachedCurrentDates = - new KeyedObjectHashMap<>(new CachedDateKey()); + new KeyedObjectHashMap<>(new CachedDateKey()); /** * Returns a String of the current date in the specified {@link DBTimeZone}. * - * @param timeZone The {@link DBTimeZone} to reference when evaluating the current date for - * "now". + * @param timeZone The {@link DBTimeZone} to reference when evaluating the current date for "now". * @return A String in format yyyy-MM-dd. */ public static String currentDate(DBTimeZone timeZone) { @@ -1505,16 +1427,15 @@ public static DBDateTime nanosToTime(long nanos) { } /** - * Converts a long offset from Epoch value to a {@link DBDateTime}. This method uses expected - * date ranges to infer whether the passed value is in milliseconds, microseconds, or - * nanoseconds. Thresholds used are {@link TimeConstants#MICROTIME_THRESHOLD} divided by 1000 - * for milliseconds, as-is for microseconds, and multiplied by 1000 for nanoseconds. The value - * is tested to see if its ABS exceeds the threshold. E.g. a value whose ABS is greater than - * 1000 * {@link TimeConstants#MICROTIME_THRESHOLD} will be treated as nanoseconds. + * Converts a long offset from Epoch value to a {@link DBDateTime}. This method uses expected date ranges to infer + * whether the passed value is in milliseconds, microseconds, or nanoseconds. Thresholds used are + * {@link TimeConstants#MICROTIME_THRESHOLD} divided by 1000 for milliseconds, as-is for microseconds, and + * multiplied by 1000 for nanoseconds. The value is tested to see if its ABS exceeds the threshold. E.g. a value + * whose ABS is greater than 1000 * {@link TimeConstants#MICROTIME_THRESHOLD} will be treated as nanoseconds. * * @param epoch The long Epoch offset value to convert. - * @return null, if the input is equal to {@link QueryConstants#NULL_LONG}, otherwise a - * {@link DBDateTime} based on the inferred conversion. + * @return null, if the input is equal to {@link QueryConstants#NULL_LONG}, otherwise a {@link DBDateTime} based on + * the inferred conversion. */ @SuppressWarnings("WeakerAccess") public static DBDateTime autoEpochToTime(long epoch) { @@ -1538,37 +1459,30 @@ public static DBDateTime autoEpochToTime(long epoch) { } /** - * Returns a {@link DBDateTime} value based on a starting value and a {@link DBPeriod} to add to - * it, but with a cap max value which is returned in case the starting value plus period exceeds - * the cap. + * Returns a {@link DBDateTime} value based on a starting value and a {@link DBPeriod} to add to it, but with a cap + * max value which is returned in case the starting value plus period exceeds the cap. * * @param original The starting {@link DBDateTime} value. * @param period The {@link DBPeriod} to add to dateTime. * @param cap A {@link DBDateTime} value to use as the maximum return value. - * @return a null {@link DBDateTime} if either original or period are null; the starting - * {@link DBDateTime} plus the specified period, if the result is not too large for a - * DBDateTime and does not exceed the cap value; the cap value if this is less than - * offset plus period. Throws a {@link DBDateTimeOverflowException - * DBDateTimeOverflowException} if the resultant value is more than max long nanoseconds - * from Epoch. - */ - public static DBDateTime cappedTimeOffset(DBDateTime original, DBPeriod period, - DBDateTime cap) { + * @return a null {@link DBDateTime} if either original or period are null; the starting {@link DBDateTime} plus the + * specified period, if the result is not too large for a DBDateTime and does not exceed the cap value; the + * cap value if this is less than offset plus period. Throws a {@link DBDateTimeOverflowException + * DBDateTimeOverflowException} if the resultant value is more than max long nanoseconds from Epoch. + */ + public static DBDateTime cappedTimeOffset(DBDateTime original, DBPeriod period, DBDateTime cap) { DBDateTime offset = DBTimeUtils.plus(original, period); return (offset.compareTo(cap) > 0) ? cap : offset; } /** - * Returns a {@link DBDateTime} value, which is at the starting (lower) end of a time range - * defined by the interval nanoseconds. For example, a 5*MINUTE intervalNanos value would return - * the date/time value for the start of the five minute window that contains the input date - * time. + * Returns a {@link DBDateTime} value, which is at the starting (lower) end of a time range defined by the interval + * nanoseconds. For example, a 5*MINUTE intervalNanos value would return the date/time value for the start of the + * five minute window that contains the input date time. * - * @param dateTime The {@link DBDateTime} for which to evaluate the start of the containing - * window. + * @param dateTime The {@link DBDateTime} for which to evaluate the start of the containing window. * @param intervalNanos The size of the window in nanoseconds. - * @return Null if either input is null, otherwise a {@link DBDateTime} representing the start - * of the window. + * @return Null if either input is null, otherwise a {@link DBDateTime} representing the start of the window. */ public static DBDateTime lowerBin(DBDateTime dateTime, long intervalNanos) { if (dateTime == null || intervalNanos == io.deephaven.util.QueryConstants.NULL_LONG) { @@ -1579,39 +1493,32 @@ public static DBDateTime lowerBin(DBDateTime dateTime, long intervalNanos) { } /** - * Returns a {@link DBDateTime} value, which is at the starting (lower) end of a time range - * defined by the interval nanoseconds. For example, a 5*MINUTE intervalNanos value would return - * the date/time value for the start of the five minute window that contains the input date - * time. + * Returns a {@link DBDateTime} value, which is at the starting (lower) end of a time range defined by the interval + * nanoseconds. For example, a 5*MINUTE intervalNanos value would return the date/time value for the start of the + * five minute window that contains the input date time. * - * @param dateTime The {@link DBDateTime} for which to evaluate the start of the containing - * window. + * @param dateTime The {@link DBDateTime} for which to evaluate the start of the containing window. * @param intervalNanos The size of the window in nanoseconds. - * @param offset The window start offset in nanoseconds. For example, a value of MINUTE would - * offset all windows by one minute. - * @return Null if either input is null, otherwise a {@link DBDateTime} representing the start - * of the window. + * @param offset The window start offset in nanoseconds. For example, a value of MINUTE would offset all windows by + * one minute. + * @return Null if either input is null, otherwise a {@link DBDateTime} representing the start of the window. */ public static DBDateTime lowerBin(DBDateTime dateTime, long intervalNanos, long offset) { - if (dateTime == null || intervalNanos == QueryConstants.NULL_LONG - || offset == QueryConstants.NULL_LONG) { + if (dateTime == null || intervalNanos == QueryConstants.NULL_LONG || offset == QueryConstants.NULL_LONG) { return null; } - return nanosToTime( - LongNumericPrimitives.lowerBin(dateTime.getNanos() - offset, intervalNanos) + offset); + return nanosToTime(LongNumericPrimitives.lowerBin(dateTime.getNanos() - offset, intervalNanos) + offset); } /** - * Returns a {@link DBDateTime} value, which is at the ending (upper) end of a time range - * defined by the interval nanoseconds. For example, a 5*MINUTE intervalNanos value would return - * the date/time value for the end of the five minute window that contains the input date time. + * Returns a {@link DBDateTime} value, which is at the ending (upper) end of a time range defined by the interval + * nanoseconds. For example, a 5*MINUTE intervalNanos value would return the date/time value for the end of the five + * minute window that contains the input date time. * - * @param dateTime The {@link DBDateTime} for which to evaluate the end of the containing - * window. + * @param dateTime The {@link DBDateTime} for which to evaluate the end of the containing window. * @param intervalNanos The size of the window in nanoseconds. - * @return Null if either input is null, otherwise a {@link DBDateTime} representing the end of - * the window. + * @return Null if either input is null, otherwise a {@link DBDateTime} representing the end of the window. */ public static DBDateTime upperBin(DBDateTime dateTime, long intervalNanos) { if (dateTime == null || intervalNanos == io.deephaven.util.QueryConstants.NULL_LONG) { @@ -1622,37 +1529,33 @@ public static DBDateTime upperBin(DBDateTime dateTime, long intervalNanos) { } /** - * Returns a {@link DBDateTime} value, which is at the ending (upper) end of a time range - * defined by the interval nanoseconds. For example, a 5*MINUTE intervalNanos value would return - * the date/time value for the end of the five minute window that contains the input date time. + * Returns a {@link DBDateTime} value, which is at the ending (upper) end of a time range defined by the interval + * nanoseconds. For example, a 5*MINUTE intervalNanos value would return the date/time value for the end of the five + * minute window that contains the input date time. * - * @param dateTime The {@link DBDateTime} for which to evaluate the end of the containing - * window. + * @param dateTime The {@link DBDateTime} for which to evaluate the end of the containing window. * @param intervalNanos The size of the window in nanoseconds. - * @param offset The window start offset in nanoseconds. For example, a value of MINUTE would - * offset all windows by one minute. - * @return Null if either input is null, otherwise a {@link DBDateTime} representing the end of - * the window. + * @param offset The window start offset in nanoseconds. For example, a value of MINUTE would offset all windows by + * one minute. + * @return Null if either input is null, otherwise a {@link DBDateTime} representing the end of the window. */ public static DBDateTime upperBin(DBDateTime dateTime, long intervalNanos, long offset) { if (dateTime == null || intervalNanos == io.deephaven.util.QueryConstants.NULL_LONG - || offset == io.deephaven.util.QueryConstants.NULL_LONG) { + || offset == io.deephaven.util.QueryConstants.NULL_LONG) { return null; } - return nanosToTime( - LongNumericPrimitives.upperBin(dateTime.getNanos() - offset, intervalNanos) + offset); + return nanosToTime(LongNumericPrimitives.upperBin(dateTime.getNanos() - offset, intervalNanos) + offset); } // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ // + can only result in flow if both positive or both negative - private static long checkOverflowPlus(final long l1, final long l2, - final boolean minusOperation) { + private static long checkOverflowPlus(final long l1, final long l2, final boolean minusOperation) { if (l1 > 0 && l2 > 0 && Long.MAX_VALUE - l1 < l2) { final String message = minusOperation - ? "Subtracting " + -l2 + " nanos from " + l1 + " would overflow" - : "Adding " + l2 + " nanos to " + l1 + " would overflow"; + ? "Subtracting " + -l2 + " nanos from " + l1 + " would overflow" + : "Adding " + l2 + " nanos to " + l1 + " would overflow"; throw new DBDateTimeOverflowException(message); } @@ -1664,12 +1567,11 @@ private static long checkOverflowPlus(final long l1, final long l2, } // - can only result in flow if one is positive and one is negative - private static long checkUnderflowMinus(final long l1, final long l2, - final boolean minusOperation) { + private static long checkUnderflowMinus(final long l1, final long l2, final boolean minusOperation) { if (l1 < 0 && l2 > 0 && Long.MIN_VALUE + l2 > -l1) { final String message = minusOperation - ? "Subtracting " + l2 + " nanos from " + l1 + " would underflow" - : "Adding " + -l2 + " nanos to " + l1 + " would underflow"; + ? "Subtracting " + l2 + " nanos from " + l1 + " would underflow" + : "Adding " + -l2 + " nanos to " + l1 + " would underflow"; throw new DBDateTimeOverflowException(message); } @@ -1681,20 +1583,17 @@ private static long checkUnderflowMinus(final long l1, final long l2, } /** - * Converts an expression, replacing DBDateTime and DBPeriod literals with references to - * constant DBDateTime/DBPeriod instances. + * Converts an expression, replacing DBDateTime and DBPeriod literals with references to constant + * DBDateTime/DBPeriod instances. * * @param formula The formula to convert. - * @return A {@link Result} object, which includes the converted formula string, a string of - * instance variable declarations, and a map describing the names and types of these - * instance variables. + * @return A {@link Result} object, which includes the converted formula string, a string of instance variable + * declarations, and a map describing the names and types of these instance variables. * * @throws Exception If any error occurs or a literal value cannot be parsed. */ - // TODO: This should probably be handled in DBLanguageParser.accept(CharLiteralExpr, - // StringBuilder). - public static Result convertExpression(String formula) throws Exception { // TODO: Why throw - // Exception? + // TODO: This should probably be handled in DBLanguageParser.accept(CharLiteralExpr, StringBuilder). + public static Result convertExpression(String formula) throws Exception { // TODO: Why throw Exception? final StringBuilder instanceVariablesString = new StringBuilder(); final HashMap newVariables = new HashMap<>(); @@ -1717,40 +1616,40 @@ public static Result convertExpression(String formula) throws Exception { // TOD if (convertDateTimeQuiet(s) != null) { matcher.appendReplacement(convertedFormula, "_date" + dateTimeIndex); - instanceVariablesString.append(" private DBDateTime _date") - .append(dateTimeIndex).append("=DBTimeUtils.convertDateTime(\"") - .append(formula, matcher.start() + 1, matcher.end() - 1).append("\");\n"); + instanceVariablesString.append(" private DBDateTime _date").append(dateTimeIndex) + .append("=DBTimeUtils.convertDateTime(\"") + .append(formula, matcher.start() + 1, matcher.end() - 1).append("\");\n"); newVariables.put("_date" + dateTimeIndex, DBDateTime.class); dateTimeIndex++; } else if (convertDateQuiet(s) != null) { matcher.appendReplacement(convertedFormula, "_localDate" + localDateIndex); - instanceVariablesString.append(" private java.time.LocalDate _localDate") - .append(localDateIndex).append("=DBTimeUtils.convertDate(\"") - .append(formula, matcher.start() + 1, matcher.end() - 1).append("\");\n"); + instanceVariablesString.append(" private java.time.LocalDate _localDate").append(localDateIndex) + .append("=DBTimeUtils.convertDate(\"").append(formula, matcher.start() + 1, matcher.end() - 1) + .append("\");\n"); newVariables.put("_localDate" + localDateIndex, LocalDate.class); localDateIndex++; } else if (convertTimeQuiet(s) != io.deephaven.util.QueryConstants.NULL_LONG) { matcher.appendReplacement(convertedFormula, "_time" + timeIndex); instanceVariablesString.append(" private long _time").append(timeIndex) - .append("=DBTimeUtils.convertTime(\"") - .append(formula, matcher.start() + 1, matcher.end() - 1).append("\");\n"); + .append("=DBTimeUtils.convertTime(\"").append(formula, matcher.start() + 1, matcher.end() - 1) + .append("\");\n"); newVariables.put("_time" + timeIndex, long.class); timeIndex++; } else if (convertPeriodQuiet(s) != null) { matcher.appendReplacement(convertedFormula, "_period" + periodIndex); - instanceVariablesString.append(" private DBPeriod _period") - .append(periodIndex).append("=DBTimeUtils.convertPeriod(\"") - .append(formula, matcher.start() + 1, matcher.end() - 1).append("\");\n"); + instanceVariablesString.append(" private DBPeriod _period").append(periodIndex) + .append("=DBTimeUtils.convertPeriod(\"").append(formula, matcher.start() + 1, matcher.end() - 1) + .append("\");\n"); newVariables.put("_period" + periodIndex, DBPeriod.class); periodIndex++; } else if (convertLocalTimeQuiet(s) != null) { matcher.appendReplacement(convertedFormula, "_localTime" + timeIndex); - instanceVariablesString.append(" private java.time.LocalTime _localTime") - .append(timeIndex).append("=DBTimeUtils.convertLocalTime(\"") - .append(formula, matcher.start() + 1, matcher.end() - 1).append("\");\n"); + instanceVariablesString.append(" private java.time.LocalTime _localTime").append(timeIndex) + .append("=DBTimeUtils.convertLocalTime(\"") + .append(formula, matcher.start() + 1, matcher.end() - 1).append("\");\n"); newVariables.put("_localTime" + timeIndex, LocalTime.class); timeIndex++; } else { @@ -1760,13 +1659,11 @@ public static Result convertExpression(String formula) throws Exception { // TOD matcher.appendTail(convertedFormula); - return new Result(convertedFormula.toString(), instanceVariablesString.toString(), - newVariables); + return new Result(convertedFormula.toString(), instanceVariablesString.toString(), newVariables); } /** - * Converts a String date/time to nanoseconds from Epoch or a nanoseconds period. Three patterns - * are supported: + * Converts a String date/time to nanoseconds from Epoch or a nanoseconds period. Three patterns are supported: *

    * yyyy-MM-ddThh:mm:ss[.nnnnnnnnn] TZ for date/time values *

    @@ -1774,16 +1671,15 @@ public static Result convertExpression(String formula) throws Exception { // TOD * hh:mm:ss[.nnnnnnnnn] for time values *

    *

    - * Period Strings in the form of numbertype, e.g. 1W for one week, and Tnumbertype for times, - * e.g. T1M for one minute + * Period Strings in the form of numbertype, e.g. 1W for one week, and Tnumbertype for times, e.g. T1M for one + * minute *

    * - * @param formula The String to be evaluated and converted. Optionally, but preferred, enclosed - * in straight single ticks. - * @return A long value representing an Epoch offset in nanoseconds for a time or date/time, or - * a duration in nanoseconds for a period. Throws {@link DBDateTimeOverflowException} if - * the resultant value would be longer than max long, or - * {@link IllegalArgumentException} if expression cannot be evaluated. + * @param formula The String to be evaluated and converted. Optionally, but preferred, enclosed in straight single + * ticks. + * @return A long value representing an Epoch offset in nanoseconds for a time or date/time, or a duration in + * nanoseconds for a period. Throws {@link DBDateTimeOverflowException} if the resultant value would be + * longer than max long, or {@link IllegalArgumentException} if expression cannot be evaluated. */ public static long expressionToNanos(String formula) { if (!formula.startsWith("'")) { @@ -1805,25 +1701,21 @@ public static long expressionToNanos(String formula) { final DBPeriod period = convertPeriodQuiet(s); if (period != null) { try { - return StrictMath.multiplyExact( - period.getJodaPeriod().toStandardDuration().getMillis(), 1_000_000L); + return StrictMath.multiplyExact(period.getJodaPeriod().toStandardDuration().getMillis(), 1_000_000L); } catch (ArithmeticException ex) { - throw new DBDateTimeOverflowException( - "Period length in nanoseconds exceeds Long.MAX_VALUE : " + s, ex); + throw new DBDateTimeOverflowException("Period length in nanoseconds exceeds Long.MAX_VALUE : " + s, ex); } } throw new IllegalArgumentException("Cannot parse datetime/time/period : " + s); } /** - * Attempt to convert the given string to a LocalDate. This should not accept dates with - * times, as we want those to be interpreted as DBDateTime values. The ideal date format is - * YYYY-MM-DD since it's the least ambiguous, but this method also parses slash-delimited dates - * according to the system "date style". + * Attempt to convert the given string to a LocalDate. This should not accept dates with times, as we want + * those to be interpreted as DBDateTime values. The ideal date format is YYYY-MM-DD since it's the least ambiguous, + * but this method also parses slash-delimited dates according to the system "date style". * * @param s the date string to convert - * @throws RuntimeException if the date cannot be converted, otherwise returns a - * {@link LocalDate} + * @throws RuntimeException if the date cannot be converted, otherwise returns a {@link LocalDate} */ @SuppressWarnings("WeakerAccess") public static LocalDate convertDate(String s) { @@ -1839,11 +1731,10 @@ public static LocalDate convertDate(String s) { /** * Converts a DateTime String from a few specific zoned formats to a DBDateTime * - * @param s String to be converted, usually in the form yyyy-MM-ddThh:mm:ss and with optional - * sub-seconds after an optional decimal point, followed by a mandatory time zone - * character code - * @throws RuntimeException if the String cannot be converted, otherwise a {@link DBDateTime} - * from the parsed String. + * @param s String to be converted, usually in the form yyyy-MM-ddThh:mm:ss and with optional sub-seconds after an + * optional decimal point, followed by a mandatory time zone character code + * @throws RuntimeException if the String cannot be converted, otherwise a {@link DBDateTime} from the parsed + * String. */ public static DBDateTime convertDateTime(String s) { DBDateTime ret = convertDateTimeQuiet(s); @@ -1861,8 +1752,8 @@ public static DBDateTime convertDateTime(String s) { * hh:mm:ss[.nnnnnnnnn]. * * @param s The String to be evaluated and converted. - * @return A long value representing an Epoch offset in nanoseconds. Throws - * {@link RuntimeException} if the String cannot be parsed. + * @return A long value representing an Epoch offset in nanoseconds. Throws {@link RuntimeException} if the String + * cannot be parsed. */ public static long convertTime(String s) { long ret = convertTimeQuiet(s); @@ -1877,8 +1768,8 @@ public static long convertTime(String s) { /** * Converts a String into a {@link DBPeriod} object. * - * @param s The String to convert in the form of numbertype, e.g. 1W for one week, and - * Tnumbertype for times, e.g. T1M for one minute. + * @param s The String to convert in the form of numbertype, e.g. 1W for one week, and Tnumbertype for times, e.g. + * T1M for one minute. * @throws RuntimeException if the String cannot be parsed, otherwise a {@link DBPeriod} object. */ @SuppressWarnings("WeakerAccess") @@ -1898,12 +1789,12 @@ private static int extractTwoDigitNum(String s, int startIndex) { private static int extractThreeDigitNum(String s, int startIndex) { return (s.charAt(startIndex) - '0') * 100 + (s.charAt(startIndex + 1) - '0') * 10 - + (s.charAt(startIndex + 2) - '0'); + + (s.charAt(startIndex + 2) - '0'); } private static int extractFourDigitNum(String s, int startIndex) { return (s.charAt(startIndex) - '0') * 1000 + (s.charAt(startIndex + 1) - '0') * 100 - + (s.charAt(startIndex + 2) - '0') * 10 + (s.charAt(startIndex + 3) - '0'); + + (s.charAt(startIndex + 2) - '0') * 10 + (s.charAt(startIndex + 3) - '0'); } private static int extractSixDigitNum(String s, int startIndex) { @@ -1926,17 +1817,13 @@ public static LocalTime convertLocalTimeQuiet(String s) { // Pattern.compile("([0-9][0-9]):?([0-9][0-9])?:?([0-9][0-9])?(\\.([0-9]{1,9}))?"); final Matcher matcher = LOCAL_TIME_PATTERN.matcher(s); if (matcher.matches()) { - final int hour = Integer.parseInt(matcher.group(1)); // hour is the only required - // field - final int minute = - matcher.group(2) != null ? Integer.parseInt(matcher.group(2)) : 0; - final int second = - matcher.group(3) != null ? Integer.parseInt(matcher.group(3)) : 0; + final int hour = Integer.parseInt(matcher.group(1)); // hour is the only required field + final int minute = matcher.group(2) != null ? Integer.parseInt(matcher.group(2)) : 0; + final int second = matcher.group(3) != null ? Integer.parseInt(matcher.group(3)) : 0; final int nanos; if (matcher.group(4) != null) { final String fractionStr = matcher.group(5); // group 5 excludes the decimal pt - nanos = Integer.parseInt(fractionStr) - * (int) Math.pow(10, 9 - fractionStr.length()); + nanos = Integer.parseInt(fractionStr) * (int) Math.pow(10, 9 - fractionStr.length()); } else { nanos = 0; } @@ -1949,9 +1836,8 @@ public static LocalTime convertLocalTimeQuiet(String s) { } /** - * Attempt to convert the given string to a LocalDate. This should not accept dates with - * times, as we want those to be interpreted as DBDateTime values. The ideal date format is - * YYYY-MM-DD since it's the least ambiguous. + * Attempt to convert the given string to a LocalDate. This should not accept dates with times, as we want + * those to be interpreted as DBDateTime values. The ideal date format is YYYY-MM-DD since it's the least ambiguous. * * @param s the date string to convert * @return the LocalDate formatted using the default date style. @@ -1972,9 +1858,8 @@ private static LocalDate matchStdDate(Pattern pattern, String s) { } /** - * Attempt to convert the given string to a LocalDate. This should not accept dates with - * times, as we want those to be interpreted as DBDateTime values. The ideal date format is - * YYYY-MM-DD since it's the least ambiguous. + * Attempt to convert the given string to a LocalDate. This should not accept dates with times, as we want + * those to be interpreted as DBDateTime values. The ideal date format is YYYY-MM-DD since it's the least ambiguous. * * @param s the date string * @param dateStyle indicates how to interpret slash-delimited dates @@ -1991,8 +1876,7 @@ public static LocalDate convertDateQuiet(String s, DateStyle dateStyle) { return localDate; } - // see if we can match one of the slash-delimited styles, the interpretation of which - // requires knowing the + // see if we can match one of the slash-delimited styles, the interpretation of which requires knowing the // system date style setting (for example Europeans often write dates as d/m/y). final Matcher slashMatcher = SLASH_DATE_PATTERN.matcher(s); if (slashMatcher.matches()) { @@ -2024,8 +1908,7 @@ public static LocalDate convertDateQuiet(String s, DateStyle dateStyle) { final int year; // for 2 digit years, lean on java's standard interpretation if (slashMatcher.group(yearFinal2DigitsGroup) == null) { - year = - Year.parse(slashMatcher.group(yearGroup), TWO_DIGIT_YR_FORMAT).getValue(); + year = Year.parse(slashMatcher.group(yearGroup), TWO_DIGIT_YR_FORMAT).getValue(); } else { year = Integer.parseInt(slashMatcher.group(yearGroup)); } @@ -2040,9 +1923,8 @@ public static LocalDate convertDateQuiet(String s, DateStyle dateStyle) { } /* - * This version assumes you know what date it is and that the format is correct and just want - * the time, so we can save time (e.g. 2010-09-02T08:17:17.502-0400) - * 0123456789012345678901234567 1 2 + * This version assumes you know what date it is and that the format is correct and just want the time, so we can + * save time (e.g. 2010-09-02T08:17:17.502-0400) 0123456789012345678901234567 1 2 */ @SuppressWarnings("WeakerAccess") @@ -2078,11 +1960,9 @@ public static DBDateTime convertJimMicrosDateTimeQuiet(String s) { /** * Converts a DateTime String from a few specific zoned formats to a DBDateTime * - * @param s String to be converted, usually in the form yyyy-MM-ddThh:mm:ss and with optional - * sub-seconds after an optional decimal point, followed by a mandatory time zone - * character code - * @return A DBDateTime from the parsed String, or null if the format is not recognized or an - * exception occurs + * @param s String to be converted, usually in the form yyyy-MM-ddThh:mm:ss and with optional sub-seconds after an + * optional decimal point, followed by a mandatory time zone character code + * @return A DBDateTime from the parsed String, or null if the format is not recognized or an exception occurs */ public static DBDateTime convertDateTimeQuiet(final String s) { try { @@ -2093,8 +1973,7 @@ public static DBDateTime convertDateTimeQuiet(final String s) { if (spaceIndex == -1) { // no timezone return null; } - timeZone = - DBTimeZone.valueOf("TZ_" + s.substring(spaceIndex + 1).trim().toUpperCase()); + timeZone = DBTimeZone.valueOf("TZ_" + s.substring(spaceIndex + 1).trim().toUpperCase()); dateTimeString = s.substring(0, spaceIndex); } else if (JIM_DATETIME_PATTERN.matcher(s).matches()) { return convertJimDateTimeQuiet(s); @@ -2107,28 +1986,24 @@ public static DBDateTime convertDateTimeQuiet(final String s) { } int decimalIndex = dateTimeString.indexOf('.'); if (decimalIndex == -1) { - return new DBDateTime(millisToNanos( - new DateTime(dateTimeString, timeZone.getTimeZone()).getMillis())); + return new DBDateTime(millisToNanos(new DateTime(dateTimeString, timeZone.getTimeZone()).getMillis())); } else { final long subsecondNanos = parseNanos(dateTimeString.substring(decimalIndex + 1)); - return new DBDateTime( - millisToNanos(new DateTime(dateTimeString.substring(0, decimalIndex), + return new DBDateTime(millisToNanos(new DateTime(dateTimeString.substring(0, decimalIndex), timeZone.getTimeZone()).getMillis()) + subsecondNanos); } } catch (Exception e) { - // shouldn't get here too often, but somehow something snuck through. we'll just return - // null below... + // shouldn't get here too often, but somehow something snuck through. we'll just return null below... } return null; } /** - * Converts a String of digits of any length to a nanoseconds long value. Will ignore anything - * longer than 9 digits, and will throw a NumberFormatException if any non-numeric character is - * found. Strings shorter than 9 digits will be interpreted as sub-second values to the right of - * the decimal point. + * Converts a String of digits of any length to a nanoseconds long value. Will ignore anything longer than 9 digits, + * and will throw a NumberFormatException if any non-numeric character is found. Strings shorter than 9 digits will + * be interpreted as sub-second values to the right of the decimal point. * * @param input The String to convert * @return long value in nanoseconds @@ -2143,8 +2018,7 @@ private static long parseNanos(@NotNull final String input) { } else { digit = Character.digit(input.charAt(i), 10); if (digit < 0) { - throw new NumberFormatException( - "Invalid character for nanoseconds conversion: " + input.charAt(i)); + throw new NumberFormatException("Invalid character for nanoseconds conversion: " + input.charAt(i)); } } result += digit; @@ -2152,8 +2026,8 @@ private static long parseNanos(@NotNull final String input) { return result; } - // This function and the next are FAR faster than convertJimMicrosDateTimeQuiet provided you can - // reuse the time zone across calls. Helpful for log file parsing. + // This function and the next are FAR faster than convertJimMicrosDateTimeQuiet provided you can reuse the time zone + // across calls. Helpful for log file parsing. public static DBDateTime convertJimMicrosDateTimeQuietFast(String s, DateTimeZone timeZone) { int year = extractFourDigitNum(s, 0); int month = extractTwoDigitNum(s, 5); @@ -2166,20 +2040,19 @@ public static DBDateTime convertJimMicrosDateTimeQuietFast(String s, DateTimeZon return new DBDateTime(millisToNanos(d.getMillis()) + (micros % 1000) * 1000); } - // This function is very slow. If you can call it once and reuse the result across many calls to - // the above, this is FAR faster than convertJimMicrosDateTimeQuiet + // This function is very slow. If you can call it once and reuse the result across many calls to the above, this is + // FAR faster than convertJimMicrosDateTimeQuiet public static DateTimeZone convertJimMicrosDateTimeQuietFastTz(String s) { int tzHours = (s.charAt(26) == '-' ? -1 : 1) * extractTwoDigitNum(s, 27); return DateTimeZone.forOffsetHours(tzHours); } /** - * Converts a time String in the form hh:mm:ss[.nnnnnnnnn] to a long nanoseconds offset from - * Epoch. + * Converts a time String in the form hh:mm:ss[.nnnnnnnnn] to a long nanoseconds offset from Epoch. * * @param s The String to convert. - * @return {@link QueryConstants#NULL_LONG} if the String cannot be parsed, otherwise long - * nanoseconds offset from Epoch. + * @return {@link QueryConstants#NULL_LONG} if the String cannot be parsed, otherwise long nanoseconds offset from + * Epoch. */ public static long convertTimeQuiet(String s) { try { @@ -2213,18 +2086,17 @@ public static long convertTimeQuiet(String s) { String[] tokens = s.split(":"); if (tokens.length == 2) { // hh:mm - return multiplier * (1000000000L - * (3600 * Integer.parseInt(tokens[0]) + 60 * Integer.parseInt(tokens[1])) - + dayNanos + subsecondNanos); + return multiplier + * (1000000000L * (3600 * Integer.parseInt(tokens[0]) + 60 * Integer.parseInt(tokens[1])) + + dayNanos + subsecondNanos); } else if (tokens.length == 3) { // hh:mm:ss - return multiplier * (1000000000L * (3600 * Integer.parseInt(tokens[0]) - + 60 * Integer.parseInt(tokens[1]) + Integer.parseInt(tokens[2])) + dayNanos - + subsecondNanos); + return multiplier + * (1000000000L * (3600 * Integer.parseInt(tokens[0]) + 60 * Integer.parseInt(tokens[1]) + + Integer.parseInt(tokens[2])) + dayNanos + subsecondNanos); } } } catch (Exception e) { - // shouldn't get here too often, but somehow something snuck through. we'll just return - // null below... + // shouldn't get here too often, but somehow something snuck through. we'll just return null below... } return io.deephaven.util.QueryConstants.NULL_LONG; @@ -2233,8 +2105,8 @@ public static long convertTimeQuiet(String s) { /** * Converts a String into a {@link DBPeriod} object. * - * @param s The String to convert in the form of numbertype, e.g. 1W for one week, and - * Tnumbertype for times, e.g. T1M for one minute. + * @param s The String to convert in the form of numbertype, e.g. 1W for one week, and Tnumbertype for times, e.g. + * T1M for one minute. * @return null if the String cannot be parsed, otherwise a {@link DBPeriod} object. */ public static DBPeriod convertPeriodQuiet(String s) { @@ -2247,8 +2119,7 @@ public static DBPeriod convertPeriodQuiet(String s) { return new DBPeriod(s); } } catch (Exception e) { - // shouldn't get here too often, but somehow something snuck through. we'll just return - // null below... + // shouldn't get here too often, but somehow something snuck through. we'll just return null below... } return null; @@ -2282,8 +2153,8 @@ public static ZonedDateTime getZonedDateTime(DBDateTime dateTime, DBTimeZone tim * Converts a {@link ZonedDateTime} to a {@link DBDateTime}. * * @param zonedDateTime The a {@link ZonedDateTime} to convert. - * @throws DBDateTimeOverflowException if the input is out of the range for a - * {@link DBDateTime}, otherwise, a {@link DBDateTime} version of the input. + * @throws DBDateTimeOverflowException if the input is out of the range for a {@link DBDateTime}, otherwise, a + * {@link DBDateTime} version of the input. */ public static DBDateTime toDateTime(ZonedDateTime zonedDateTime) { int nanos = zonedDateTime.getNano(); @@ -2291,8 +2162,7 @@ public static DBDateTime toDateTime(ZonedDateTime zonedDateTime) { long limit = (Long.MAX_VALUE - nanos) / DBTimeUtils.SECOND; if (seconds >= limit) { - throw new DBDateTimeOverflowException( - "Overflow: cannot convert " + zonedDateTime + " to new DBDateTime"); + throw new DBDateTimeOverflowException("Overflow: cannot convert " + zonedDateTime + " to new DBDateTime"); } return new DBDateTime(nanos + (seconds * DBTimeUtils.SECOND)); @@ -2302,8 +2172,8 @@ public static DBDateTime toDateTime(ZonedDateTime zonedDateTime) { * Returns a {@link ChronoField} indicating the level of precision in a String time value. * * @param timeDef The time String to evaluate. - * @return null if the time String cannot be parsed, otherwise a {@link ChronoField} for the - * finest units in the String (e.g. "10:00:00" would yield SecondOfMinute). + * @return null if the time String cannot be parsed, otherwise a {@link ChronoField} for the finest units in the + * String (e.g. "10:00:00" would yield SecondOfMinute). */ public static ChronoField getFinestDefinedUnit(String timeDef) { Matcher dtMatcher = CAPTURING_DATETIME_PATTERN.matcher(timeDef); @@ -2321,17 +2191,16 @@ public static ChronoField getFinestDefinedUnit(String timeDef) { } /** - * A container object for the result of {@link #convertExpression(String)}, which includes the - * converted formula String, a String of instance variable declarations, and a map describing - * the names and types of these instance variables. + * A container object for the result of {@link #convertExpression(String)}, which includes the converted formula + * String, a String of instance variable declarations, and a map describing the names and types of these instance + * variables. */ public static class Result { private final String convertedFormula; private final String instanceVariablesString; private final HashMap newVariables; - public Result(String convertedFormula, String instanceVariablesString, - HashMap newVariables) { + public Result(String convertedFormula, String instanceVariablesString, HashMap newVariables) { this.convertedFormula = convertedFormula; this.instanceVariablesString = instanceVariablesString; this.newVariables = newVariables; @@ -2351,8 +2220,8 @@ public HashMap getNewVariables() { } /** - * A type of RuntimeException thrown when operations resulting in {@link DBDateTime} values - * would exceed the range available by max or min long nanoseconds. + * A type of RuntimeException thrown when operations resulting in {@link DBDateTime} values would exceed the range + * available by max or min long nanoseconds. */ public static class DBDateTimeOverflowException extends RuntimeException { private DBDateTimeOverflowException() { @@ -2369,8 +2238,7 @@ private DBDateTimeOverflowException(String message, Throwable cause) { } /** - * Create a DateTimeFormatter formatter with the specified time zone name using the standard - * yyyy-MM-dd format. + * Create a DateTimeFormatter formatter with the specified time zone name using the standard yyyy-MM-dd format. * * @param timeZoneName the time zone name * @return a formatter set for the specified time zone @@ -2381,16 +2249,15 @@ public static DateTimeFormatter createFormatter(final String timeZoneName) { } /** - * Given a DateTimeFormatter and a timestamp in millis, return the date as a String in standard - * column-partition format of yyyy-MM-dd. A timestamp of NULL_LONG means use the system current - * time. + * Given a DateTimeFormatter and a timestamp in millis, return the date as a String in standard column-partition + * format of yyyy-MM-dd. A timestamp of NULL_LONG means use the system current time. * * @param dateTimeFormatter the date formatter * @param timestampMillis the timestamp in millis * @return the formatted date */ - public static String getPartitionFromTimestampMillis( - @NotNull final DateTimeFormatter dateTimeFormatter, final long timestampMillis) { + public static String getPartitionFromTimestampMillis(@NotNull final DateTimeFormatter dateTimeFormatter, + final long timestampMillis) { if (timestampMillis == io.deephaven.util.QueryConstants.NULL_LONG) { return dateTimeFormatter.format(Instant.ofEpochMilli(System.currentTimeMillis())); } @@ -2398,16 +2265,15 @@ public static String getPartitionFromTimestampMillis( } /** - * Given a DateTimeFormatter and a timestamp in micros from epoch, return the date as a String - * in standard column-partition format of yyyy-MM-dd. A timestamp of NULL_LONG means use the - * system current time. + * Given a DateTimeFormatter and a timestamp in micros from epoch, return the date as a String in standard + * column-partition format of yyyy-MM-dd. A timestamp of NULL_LONG means use the system current time. * * @param dateTimeFormatter the date formatter * @param timestampMicros the timestamp in micros * @return the formatted date */ - public static String getPartitionFromTimestampMicros( - @NotNull final DateTimeFormatter dateTimeFormatter, final long timestampMicros) { + public static String getPartitionFromTimestampMicros(@NotNull final DateTimeFormatter dateTimeFormatter, + final long timestampMicros) { if (timestampMicros == io.deephaven.util.QueryConstants.NULL_LONG) { return dateTimeFormatter.format(Instant.ofEpochMilli(System.currentTimeMillis())); } @@ -2415,16 +2281,15 @@ public static String getPartitionFromTimestampMicros( } /** - * Given a DateTimeFormatter and a timestamp in nanos from epoch, return the date as a String in - * standard column-partition format of yyyy-MM-dd. A timestamp of NULL_LONG means use the system - * current time. + * Given a DateTimeFormatter and a timestamp in nanos from epoch, return the date as a String in standard + * column-partition format of yyyy-MM-dd. A timestamp of NULL_LONG means use the system current time. * * @param dateTimeFormatter the date formatter * @param timestampNanos the timestamp in nanos * @return the formatted date */ - public static String getPartitionFromTimestampNanos( - @NotNull final DateTimeFormatter dateTimeFormatter, final long timestampNanos) { + public static String getPartitionFromTimestampNanos(@NotNull final DateTimeFormatter dateTimeFormatter, + final long timestampNanos) { if (timestampNanos == io.deephaven.util.QueryConstants.NULL_LONG) { return dateTimeFormatter.format(Instant.ofEpochMilli(System.currentTimeMillis())); } @@ -2432,16 +2297,15 @@ public static String getPartitionFromTimestampNanos( } /** - * Given a DateTimeFormatter and a timestamp in seconds from epoch, return the date as a String - * in standard column-partition format of yyyy-MM-dd. A timestamp of NULL_LONG means use the - * system current time. + * Given a DateTimeFormatter and a timestamp in seconds from epoch, return the date as a String in standard + * column-partition format of yyyy-MM-dd. A timestamp of NULL_LONG means use the system current time. * * @param dateTimeFormatter the date formatter * @param timestampSeconds the timestamp in seconds * @return the formatted date */ - public static String getPartitionFromTimestampSeconds( - @NotNull final DateTimeFormatter dateTimeFormatter, final long timestampSeconds) { + public static String getPartitionFromTimestampSeconds(@NotNull final DateTimeFormatter dateTimeFormatter, + final long timestampSeconds) { if (timestampSeconds == io.deephaven.util.QueryConstants.NULL_LONG) { return dateTimeFormatter.format(Instant.ofEpochMilli(System.currentTimeMillis())); } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/DBTimeZone.java b/DB/src/main/java/io/deephaven/db/tables/utils/DBTimeZone.java index 1a92585bc89..2ee50bc889b 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/DBTimeZone.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/DBTimeZone.java @@ -153,7 +153,7 @@ public static DBTimeZone lookup(DateTimeZone dateTimeZone) { private static DBTimeZone lookupByOffset(DateTimeZone dateTimeZone) { for (DBTimeZone zone : values()) { if (zone.getTimeZone().getOffset(System.currentTimeMillis()) == dateTimeZone - .getOffset(System.currentTimeMillis())) { + .getOffset(System.currentTimeMillis())) { return zone; } } @@ -161,9 +161,8 @@ private static DBTimeZone lookupByOffset(DateTimeZone dateTimeZone) { } /** - * This method returns the same contents as {@link DBTimeZone#values()}, but ordered by - * geographic location / UTC offset. If two elements exist within the same timezone, they are - * second-order-sorted by name + * This method returns the same contents as {@link DBTimeZone#values()}, but ordered by geographic location / UTC + * offset. If two elements exist within the same timezone, they are second-order-sorted by name * * @return An array of DBTimeZones ordered by UTC-offset */ diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/DbTimeConverter.java b/DB/src/main/java/io/deephaven/db/tables/utils/DbTimeConverter.java index 630fe8c948c..b00c190179e 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/DbTimeConverter.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/DbTimeConverter.java @@ -23,8 +23,8 @@ public class DbTimeConverter extends JFrame implements ActionListener, FocusList private JTextField outputNanos; private JTextField outputMillis; - private Color defaultColors[] = {Color.decode("0xFF8A8A"), Color.decode("0xFFFFAA"), - Color.decode("0xC0FF97"), Color.decode("0xCACAFF")}; + private Color defaultColors[] = + {Color.decode("0xFF8A8A"), Color.decode("0xFFFFAA"), Color.decode("0xC0FF97"), Color.decode("0xCACAFF")}; public DbTimeConverter() { super("DbTime Converter 2011 Clippy Edition"); @@ -169,8 +169,7 @@ public void actionPerformed(ActionEvent e) { if (time == null) { output[i].setText("?????"); } else { - output[i].setText( - time.toString(DBTimeZone.values()[timezones[i].getSelectedIndex()])); + output[i].setText(time.toString(DBTimeZone.values()[timezones[i].getSelectedIndex()])); } } catch (Exception ex) { // who cares diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/ExpressionParser.java b/DB/src/main/java/io/deephaven/db/tables/utils/ExpressionParser.java index ca48d942f31..798913f1b49 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/ExpressionParser.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/ExpressionParser.java @@ -13,8 +13,8 @@ import java.util.LinkedHashMap; /** - * A parser that will try a set of {@link ExpressionFactory}s and attempt to parse the expression - * until one of them succeeds. + * A parser that will try a set of {@link ExpressionFactory}s and attempt to parse the expression until one of them + * succeeds. * * @param The expected type of the parsed expression */ @@ -22,25 +22,22 @@ public class ExpressionParser { private Map> expressions = new LinkedHashMap<>(); /** - * Attempt to process the expression using the {@link #registerFactory(ExpressionFactory) - * configured} {@link ExpressionFactory factories} + * Attempt to process the expression using the {@link #registerFactory(ExpressionFactory) configured} + * {@link ExpressionFactory factories} * * @param expression the expression to parse * @return The result of the parsing * - * @throws ExpressionException if there is a problem parsing the expression, or no parsers - * accepted the expression. + * @throws ExpressionException if there is a problem parsing the expression, or no parsers accepted the expression. */ @NotNull public TYPE parse(String expression, Object... args) { Throwable creationException = null; - for (Map.Entry> patternExpressionFactoryEntry : expressions - .entrySet()) { + for (Map.Entry> patternExpressionFactoryEntry : expressions.entrySet()) { Matcher matcher = patternExpressionFactoryEntry.getKey().matcher(expression); if (matcher.matches()) { try { - return patternExpressionFactoryEntry.getValue().getExpression(expression, - matcher, args); + return patternExpressionFactoryEntry.getValue().getExpression(expression, matcher, args); } catch (Throwable t) { if (creationException == null) { creationException = t; @@ -49,11 +46,10 @@ public TYPE parse(String expression, Object... args) { } } if (creationException == null) { - throw new ExpressionException("Unable to parse expression: \"" + expression + "\"", - expression); + throw new ExpressionException("Unable to parse expression: \"" + expression + "\"", expression); } else { - throw new ExpressionException("Failed to get expression for all matched patterns", - creationException, expression); + throw new ExpressionException("Failed to get expression for all matched patterns", creationException, + expression); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/FigureWidgetMarker.java b/DB/src/main/java/io/deephaven/db/tables/utils/FigureWidgetMarker.java index 549991434bb..b38beaa9229 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/FigureWidgetMarker.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/FigureWidgetMarker.java @@ -1,8 +1,8 @@ package io.deephaven.db.tables.utils; /** - * Marker interface for Controller to understand that it is looking at a plot/figure, and how it - * should be handled in the open api. + * Marker interface for Controller to understand that it is looking at a plot/figure, and how it should be handled in + * the open api. */ public interface FigureWidgetMarker { } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/InMemoryBlockTableWriter.java b/DB/src/main/java/io/deephaven/db/tables/utils/InMemoryBlockTableWriter.java index 643e80e1fe0..08a6a357851 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/InMemoryBlockTableWriter.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/InMemoryBlockTableWriter.java @@ -29,8 +29,7 @@ public InMemoryBlockTableWriter(TableDefinition definition) throws IOException { this(definition, 1000); } - public InMemoryBlockTableWriter(TableDefinition definition, int baseBlockSize) - throws IOException { + public InMemoryBlockTableWriter(TableDefinition definition, int baseBlockSize) throws IOException { this.definition = definition; this.baseBlockSize = baseBlockSize; initialize(); diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/LayoutHintBuilder.java b/DB/src/main/java/io/deephaven/db/tables/utils/LayoutHintBuilder.java index ab0a4ea1970..814fb8f3eca 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/LayoutHintBuilder.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/LayoutHintBuilder.java @@ -43,8 +43,7 @@ private static class AutoFilterData { } /** - * Serialize this object to a string suitable for inclusion in the builder's parameter - * string. + * Serialize this object to a string suitable for inclusion in the builder's parameter string. * * @return a string of the format column(:param&value)+ */ @@ -58,8 +57,7 @@ String forBuilder() { } /** - * Convert a string of the format defined by {@link #forBuilder()} into a proper - * AutoFilterData object + * Convert a string of the format defined by {@link #forBuilder()} into a proper AutoFilterData object * * @param string the string to parse * @return an AutoFilterData instance @@ -68,8 +66,7 @@ String forBuilder() { static AutoFilterData fromString(String string) { final String[] parts = string.split(":"); if (parts.length == 0) { - throw new IllegalArgumentException( - "Improperly formatted AutoFilterData string: " + string); + throw new IllegalArgumentException("Improperly formatted AutoFilterData string: " + string); } final String column = parts[0]; @@ -85,8 +82,8 @@ static AutoFilterData fromString(String string) { String[] paramParts = parts[i].split("&"); if (paramParts.length != 2) { throw new IllegalArgumentException( - "Only one value permitted in AutoFilterData parameter string; instead there are: " - + parts.length + " in " + parts[i]); + "Only one value permitted in AutoFilterData parameter string; instead there are: " + + parts.length + " in " + parts[i]); } // noinspection SwitchStatementWithTooFewBranches @@ -96,8 +93,7 @@ static AutoFilterData fromString(String string) { localFetchSize = Integer.parseInt(paramParts[1]); } catch (NumberFormatException ex) { throw new IllegalArgumentException( - "Invalid value for AutoFilterData fetch size parameter: " - + paramParts[1]); + "Invalid value for AutoFilterData fetch size parameter: " + paramParts[1]); } break; } @@ -121,9 +117,8 @@ private LayoutHintBuilder() {} @NotNull public static LayoutHintBuilder fromString(String attrs) { final Map options = Arrays.stream(attrs.split(";")) - .map(attr -> attr.split("=")) - .collect( - Collectors.toMap(parts -> parts[0], parts -> parts.length == 2 ? parts[1] : "")); + .map(attr -> attr.split("=")) + .collect(Collectors.toMap(parts -> parts[0], parts -> parts.length == 2 ? parts[1] : "")); final LayoutHintBuilder lhb = new LayoutHintBuilder(); if (options.containsKey("noSavedLayouts")) { @@ -149,8 +144,8 @@ public static LayoutHintBuilder fromString(String attrs) { if (!io.deephaven.db.util.string.StringUtils.isNullOrEmpty(autoStr)) { final String[] filters = autoStr.split(","); Arrays.stream(filters) - .map(AutoFilterData::fromString) - .forEach(lhb::addAutofilterData); + .map(AutoFilterData::fromString) + .forEach(lhb::addAutofilterData); } final String freezeStr = options.get("freeze"); @@ -193,8 +188,7 @@ public LayoutHintBuilder atFront(String... cols) { } /** - * Indicate the specified columns should appear as the first N columns of the table when - * displayed. + * Indicate the specified columns should appear as the first N columns of the table when displayed. * * @param cols the columns to show at front * @return this LayoutHintBuilder @@ -228,8 +222,7 @@ public LayoutHintBuilder atEnd(String... cols) { } /** - * Indicate the specified columns should appear as the last N columns of the table when - * displayed. + * Indicate the specified columns should appear as the last N columns of the table when displayed. * * @param cols the columns to show at the back * @return this LayoutHintBuilder @@ -310,8 +303,8 @@ public LayoutHintBuilder autoFilter(Collection cols) { } cols.stream() - .map(AutoFilterData::new) - .forEach(c -> autoFilterCols.put(c.column, c)); + .map(AutoFilterData::new) + .forEach(c -> autoFilterCols.put(c.column, c)); return this; } @@ -361,8 +354,7 @@ public LayoutHintBuilder freeze(String... cols) { } /** - * Indicate the specified columns should be frozen (displayed as the first N, unmovable columns) - * upon display. + * Indicate the specified columns should be frozen (displayed as the first N, unmovable columns) upon display. * * @param cols the columns to freeze * @return this LayoutHintBuilder @@ -384,8 +376,8 @@ public LayoutHintBuilder freeze(Collection cols) { } /** - * Indicate that the UI should maintain a subscription to the specified columns within - * viewports, even if they are out of view. + * Indicate that the UI should maintain a subscription to the specified columns within viewports, even if they are + * out of view. * * @param columns the columns to keep subscribed * @return this LayoutHintBuilder @@ -468,10 +460,9 @@ public String build() { } if (autoFilterCols != null && !autoFilterCols.isEmpty()) { - sb.append("autofilter=") - .append(StringUtils.joinStrings( - autoFilterCols.values().stream().map(AutoFilterData::forBuilder), ",")) - .append(';'); + sb.append("autofilter=").append( + StringUtils.joinStrings(autoFilterCols.values().stream().map(AutoFilterData::forBuilder), ",")) + .append(';'); } if (freezeCols != null && !freezeCols.isEmpty()) { @@ -523,8 +514,7 @@ public boolean areSavedLayoutsAllowed() { * @return the set of columns that should be hidden */ public @NotNull Set getHiddenCols() { - return hiddenCols == null ? Collections.emptySet() - : Collections.unmodifiableSet(hiddenCols); + return hiddenCols == null ? Collections.emptySet() : Collections.unmodifiableSet(hiddenCols); } /** @@ -533,8 +523,7 @@ public boolean areSavedLayoutsAllowed() { * @return the set of columns enabled for AutoFilter */ public @NotNull Set getAutoFilterCols() { - return autoFilterCols == null ? Collections.emptySet() - : Collections.unmodifiableSet(autoFilterCols.keySet()); + return autoFilterCols == null ? Collections.emptySet() : Collections.unmodifiableSet(autoFilterCols.keySet()); } /** @@ -558,8 +547,7 @@ public int getAutoFilterFetchSize(String column) { * @return the ordered set of columns that should be frozen */ public @NotNull Set getFreezeCols() { - return freezeCols == null ? Collections.emptySet() - : Collections.unmodifiableSet(freezeCols); + return freezeCols == null ? Collections.emptySet() : Collections.unmodifiableSet(freezeCols); } /** @@ -569,7 +557,7 @@ public int getAutoFilterFetchSize(String column) { */ public @NotNull Set getAlwaysSubscribedCols() { return alwaysSubscribedCols == null ? Collections.emptySet() - : Collections.unmodifiableSet(alwaysSubscribedCols); + : Collections.unmodifiableSet(alwaysSubscribedCols); } /** @@ -578,8 +566,7 @@ public int getAutoFilterFetchSize(String column) { * @return the set of columns */ public @NotNull Set getGroupableColumns() { - return groupableColumns == null ? Collections.emptySet() - : Collections.unmodifiableSet(groupableColumns); + return groupableColumns == null ? Collections.emptySet() : Collections.unmodifiableSet(groupableColumns); } // endregion } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/LiveWidgetVisibilityProvider.java b/DB/src/main/java/io/deephaven/db/tables/utils/LiveWidgetVisibilityProvider.java index 149bcb32ed5..8bcf0577bcf 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/LiveWidgetVisibilityProvider.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/LiveWidgetVisibilityProvider.java @@ -10,10 +10,9 @@ *

    * *

    - * This interface only enables the widget developer to limit the users who may open the widget; it - * does not provide any control over what users can see after the widget is opened. The widget - * itself is responsible for determining which data should be presented to the user and applying any - * appropriate viewer permissions. + * This interface only enables the widget developer to limit the users who may open the widget; it does not provide any + * control over what users can see after the widget is opened. The widget itself is responsible for determining which + * data should be presented to the user and applying any appropriate viewer permissions. *

    * *

    @@ -21,14 +20,13 @@ *

    * *

    - * Unlike tables, limiting the visibility of one widget does not affect the visibility of other - * widgets. + * Unlike tables, limiting the visibility of one widget does not affect the visibility of other widgets. *

    */ public interface LiveWidgetVisibilityProvider { /** - * Provide a list of groups which may view this widget. null indicates that there are no viewing - * restrictions on this widget. + * Provide a list of groups which may view this widget. null indicates that there are no viewing restrictions on + * this widget. * * @return the list of groups which may view this widget, null for no restrictions */ diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/PandasWidgetMarker.java b/DB/src/main/java/io/deephaven/db/tables/utils/PandasWidgetMarker.java index 00cc7e19dea..6d96709d035 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/PandasWidgetMarker.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/PandasWidgetMarker.java @@ -1,8 +1,8 @@ package io.deephaven.db.tables.utils; /** - * Marker interface for Controller to understand that it is looking at a Pandas Widget and how it - * should be handled in the open api. + * Marker interface for Controller to understand that it is looking at a Pandas Widget and how it should be handled in + * the open api. */ public interface PandasWidgetMarker { } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/ParquetTools.java b/DB/src/main/java/io/deephaven/db/tables/utils/ParquetTools.java index a2c2acad052..1ce4fb3c7ab 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/ParquetTools.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/ParquetTools.java @@ -91,8 +91,8 @@ public static Table readTable(@NotNull final String sourceFilePath) { * @see FlatParquetLayout */ public static Table readTable( - @NotNull final String sourceFilePath, - @NotNull final ParquetInstructions readInstructions) { + @NotNull final String sourceFilePath, + @NotNull final ParquetInstructions readInstructions) { return readTableInternal(new File(sourceFilePath), readInstructions); } @@ -122,8 +122,8 @@ public static Table readTable(@NotNull final File sourceFile) { * @see FlatParquetLayout */ public static Table readTable( - @NotNull final File sourceFile, - @NotNull final ParquetInstructions readInstructions) { + @NotNull final File sourceFile, + @NotNull final ParquetInstructions readInstructions) { return readTableInternal(sourceFile, readInstructions); } @@ -131,28 +131,26 @@ public static Table readTable( * Write a table to a file. * * @param sourceTable source table - * @param destPath destination file path; the file name should end in ".parquet" extension If - * the path includes non-existing directories they are created If there is an error any - * intermediate directories previously created are removed; note this makes this method - * unsafe for concurrent use + * @param destPath destination file path; the file name should end in ".parquet" extension If the path includes + * non-existing directories they are created If there is an error any intermediate directories previously + * created are removed; note this makes this method unsafe for concurrent use */ public static void writeTable( - @NotNull final Table sourceTable, - @NotNull final String destPath) { - writeTable(sourceTable, new File(destPath), sourceTable.getDefinition(), - ParquetInstructions.EMPTY); + @NotNull final Table sourceTable, + @NotNull final String destPath) { + writeTable(sourceTable, new File(destPath), sourceTable.getDefinition(), ParquetInstructions.EMPTY); } /** * Write a table to a file. * * @param sourceTable source table - * @param destFile destination file; the file name should end in ".parquet" extension If the - * path includes non-existing directories they are created + * @param destFile destination file; the file name should end in ".parquet" extension If the path includes + * non-existing directories they are created */ public static void writeTable( - @NotNull final Table sourceTable, - @NotNull final File destFile) { + @NotNull final Table sourceTable, + @NotNull final File destFile) { writeTable(sourceTable, destFile, sourceTable.getDefinition(), ParquetInstructions.EMPTY); } @@ -160,15 +158,15 @@ public static void writeTable( * Write a table to a file. * * @param sourceTable source table - * @param destFile destination file; its path must end in ".parquet". Any non existing - * directories in the path are created If there is an error any intermediate directories - * previously created are removed; note this makes this method unsafe for concurrent use + * @param destFile destination file; its path must end in ".parquet". Any non existing directories in the path are + * created If there is an error any intermediate directories previously created are removed; note this makes + * this method unsafe for concurrent use * @param definition table definition to use (instead of the one implied by the table itself) */ public static void writeTable( - @NotNull final Table sourceTable, - @NotNull final File destFile, - @NotNull final TableDefinition definition) { + @NotNull final Table sourceTable, + @NotNull final File destFile, + @NotNull final TableDefinition definition) { writeTable(sourceTable, destFile, definition, ParquetInstructions.EMPTY); } @@ -176,15 +174,15 @@ public static void writeTable( * Write a table to a file. * * @param sourceTable source table - * @param destFile destination file; its path must end in ".parquet". Any non existing - * directories in the path are created If there is an error any intermediate directories - * previously created are removed; note this makes this method unsafe for concurrent use + * @param destFile destination file; its path must end in ".parquet". Any non existing directories in the path are + * created If there is an error any intermediate directories previously created are removed; note this makes + * this method unsafe for concurrent use * @param writeInstructions instructions for customizations while writing */ public static void writeTable( - @NotNull final Table sourceTable, - @NotNull final File destFile, - @NotNull final ParquetInstructions writeInstructions) { + @NotNull final Table sourceTable, + @NotNull final File destFile, + @NotNull final ParquetInstructions writeInstructions) { writeTable(sourceTable, destFile, sourceTable.getDefinition(), writeInstructions); } @@ -192,16 +190,16 @@ public static void writeTable( * Write a table to a file. * * @param sourceTable source table - * @param destPath destination path; it must end in ".parquet". Any non existing directories in - * the path are created If there is an error any intermediate directories previously - * created are removed; note this makes this method unsafe for concurrent use + * @param destPath destination path; it must end in ".parquet". Any non existing directories in the path are created + * If there is an error any intermediate directories previously created are removed; note this makes this + * method unsafe for concurrent use * @param definition table definition to use (instead of the one implied by the table itself) * @param writeInstructions instructions for customizations while writing */ public static void writeTable(@NotNull final Table sourceTable, - @NotNull final String destPath, - @NotNull final TableDefinition definition, - @NotNull final ParquetInstructions writeInstructions) { + @NotNull final String destPath, + @NotNull final TableDefinition definition, + @NotNull final ParquetInstructions writeInstructions) { writeTable(sourceTable, new File(destPath), definition, writeInstructions); } @@ -211,22 +209,21 @@ public static void writeTable(@NotNull final Table sourceTable, * @param sourceTable source table * @param definition table definition to use (instead of the one implied by the table itself) * @param writeInstructions instructions for customizations while writing - * @param destFile destination file; its path must end in ".parquet". Any non existing - * directories in the path are created If there is an error any intermediate directories - * previously created are removed; note this makes this method unsafe for concurrent use + * @param destFile destination file; its path must end in ".parquet". Any non existing directories in the path are + * created If there is an error any intermediate directories previously created are removed; note this makes + * this method unsafe for concurrent use */ public static void writeTable(@NotNull final Table sourceTable, - @NotNull final File destFile, - @NotNull final TableDefinition definition, - @NotNull final ParquetInstructions writeInstructions) { + @NotNull final File destFile, + @NotNull final TableDefinition definition, + @NotNull final ParquetInstructions writeInstructions) { if (definition.getColumns().length == 0) { throw new TableDataException("Cannot write a parquet table with zero columns"); } final File firstCreated = prepareDestinationFileLocation(destFile); try { writeParquetTableImpl( - sourceTable, definition, writeInstructions, destFile, - definition.getGroupingColumnNamesArray()); + sourceTable, definition, writeInstructions, destFile, definition.getGroupingColumnNamesArray()); } catch (Exception e) { if (firstCreated != null) { FileUtils.deleteRecursivelyOnNFS(firstCreated); @@ -246,17 +243,15 @@ public static void writeTable(@NotNull final Table sourceTable, private static File prepareDestinationFileLocation(@NotNull File destination) { destination = destination.getAbsoluteFile(); if (!destination.getPath().endsWith(PARQUET_FILE_EXTENSION)) { - throw new UncheckedDeephavenException("Destination " + destination + " does not end in " - + PARQUET_FILE_EXTENSION + " extension"); + throw new UncheckedDeephavenException( + "Destination " + destination + " does not end in " + PARQUET_FILE_EXTENSION + " extension"); } if (destination.exists()) { if (destination.isDirectory()) { - throw new UncheckedDeephavenException( - "Destination " + destination + " exists and is a directory"); + throw new UncheckedDeephavenException("Destination " + destination + " exists and is a directory"); } if (!destination.canWrite()) { - throw new UncheckedDeephavenException( - "Destination " + destination + " exists but is not writable"); + throw new UncheckedDeephavenException("Destination " + destination + " exists but is not writable"); } return null; } @@ -265,67 +260,63 @@ private static File prepareDestinationFileLocation(@NotNull File destination) { if (firstParent.canWrite()) { return null; } - throw new UncheckedDeephavenException( - "Destination " + destination + " has non writable parent directory"); + throw new UncheckedDeephavenException("Destination " + destination + " has non writable parent directory"); } File firstCreated = firstParent; File parent; for (parent = destination.getParentFile(); parent != null && !parent.exists(); parent = - parent.getParentFile()) { + parent.getParentFile()) { firstCreated = parent; } if (parent == null) { throw new IllegalArgumentException( - "Can't find any existing parent directory for destination path: " + destination); + "Can't find any existing parent directory for destination path: " + destination); } if (!parent.isDirectory()) { throw new IllegalArgumentException( - "Existing parent file " + parent + " of " + destination + " is not a directory"); + "Existing parent file " + parent + " of " + destination + " is not a directory"); } if (!firstParent.mkdirs()) { - throw new UncheckedDeephavenException( - "Couldn't (re)create destination directory " + firstParent); + throw new UncheckedDeephavenException("Couldn't (re)create destination directory " + firstParent); } return firstCreated; } /** - * Writes tables to disk in parquet format to a supplied set of destinations. If you specify - * grouping columns, there must already be grouping information for those columns in the - * sources. This can be accomplished with {@code .by().ungroup()} or - * {@code .sort()}. + * Writes tables to disk in parquet format to a supplied set of destinations. If you specify grouping columns, there + * must already be grouping information for those columns in the sources. This can be accomplished with + * {@code .by().ungroup()} or {@code .sort()}. * * @param sources The tables to write * @param tableDefinition The common schema for all the tables to write * @param writeInstructions Write instructions for customizations while writing - * @param destinations The destinations paths. Any non existing directories in the paths - * provided are created. If there is an error any intermediate directories previously - * created are removed; note this makes this method unsafe for concurrent use - * @param groupingColumns List of columns the tables are grouped by (the write operation will - * store the grouping info) + * @param destinations The destinations paths. Any non existing directories in the paths provided are created. If + * there is an error any intermediate directories previously created are removed; note this makes this method + * unsafe for concurrent use + * @param groupingColumns List of columns the tables are grouped by (the write operation will store the grouping + * info) */ public static void writeParquetTables(@NotNull final Table[] sources, - @NotNull final TableDefinition tableDefinition, - @NotNull final ParquetInstructions writeInstructions, - @NotNull final File[] destinations, - @NotNull final String[] groupingColumns) { + @NotNull final TableDefinition tableDefinition, + @NotNull final ParquetInstructions writeInstructions, + @NotNull final File[] destinations, + @NotNull final String[] groupingColumns) { Require.eq(sources.length, "sources.length", destinations.length, "destinations.length"); if (tableDefinition.getColumns().length == 0) { throw new TableDataException("Cannot write a parquet table with zero columns"); } final File[] absoluteDestinations = - Arrays.stream(destinations) - .map(File::getAbsoluteFile) - .toArray(File[]::new); + Arrays.stream(destinations) + .map(File::getAbsoluteFile) + .toArray(File[]::new); final File[] firstCreatedDirs = - Arrays.stream(absoluteDestinations) - .map(ParquetTools::prepareDestinationFileLocation) - .toArray(File[]::new); + Arrays.stream(absoluteDestinations) + .map(ParquetTools::prepareDestinationFileLocation) + .toArray(File[]::new); for (int i = 0; i < sources.length; i++) { final Table source = sources[i]; try { - writeParquetTableImpl(source, tableDefinition, writeInstructions, destinations[i], - groupingColumns); + writeParquetTableImpl(source, tableDefinition, writeInstructions, destinations[i], groupingColumns); } catch (RuntimeException e) { for (final File destination : destinations) { destination.delete(); @@ -335,9 +326,9 @@ public static void writeParquetTables(@NotNull final Table[] sources, continue; } log.error().append( - "Error in table writing, cleaning up potentially incomplete table destination path starting from ") - .append(firstCreatedDir.getAbsolutePath()) - .append(e); + "Error in table writing, cleaning up potentially incomplete table destination path starting from ") + .append(firstCreatedDir.getAbsolutePath()) + .append(e); FileUtils.deleteRecursivelyOnNFS(firstCreatedDir); } throw e; @@ -353,10 +344,10 @@ public static void writeParquetTables(@NotNull final Table[] sources, * @param destinations destinations */ public static void writeTables(@NotNull final Table[] sources, - @NotNull final TableDefinition tableDefinition, - @NotNull final File[] destinations) { + @NotNull final TableDefinition tableDefinition, + @NotNull final File[] destinations) { writeParquetTables(sources, tableDefinition, ParquetInstructions.EMPTY, destinations, - tableDefinition.getGroupingColumnNamesArray()); + tableDefinition.getGroupingColumnNamesArray()); } /** @@ -370,17 +361,17 @@ public static void deleteTable(File path) { } /** - * This method attempts to "do the right thing." It examines the source to determine if it's a - * single parquet file, a metadata file, or a directory. If it's a directory, it additionally - * tries to guess the layout to use. Unless a metadata file is supplied or discovered in the - * directory, the first found parquet file will be used to infer schema. + * This method attempts to "do the right thing." It examines the source to determine if it's a single parquet file, + * a metadata file, or a directory. If it's a directory, it additionally tries to guess the layout to use. Unless a + * metadata file is supplied or discovered in the directory, the first found parquet file will be used to infer + * schema. * * @param source The source file or directory * @param instructions Instructions for reading * @return A {@link Table} */ private static Table readTableInternal(@NotNull final File source, - @NotNull final ParquetInstructions instructions) { + @NotNull final ParquetInstructions instructions) { final Path sourcePath = source.toPath(); if (!Files.exists(sourcePath)) { throw new TableDataException("Source file " + source + " does not exist"); @@ -389,14 +380,13 @@ private static Table readTableInternal(@NotNull final File source, final BasicFileAttributes sourceAttr = readAttributes(sourcePath); if (sourceAttr.isRegularFile()) { if (sourceFileName.endsWith(PARQUET_FILE_EXTENSION)) { - final ParquetTableLocationKey tableLocationKey = - new ParquetTableLocationKey(source, 0, null); + final ParquetTableLocationKey tableLocationKey = new ParquetTableLocationKey(source, 0, null); final Pair, ParquetInstructions> schemaInfo = convertSchema( - tableLocationKey.getFileReader().getSchema(), - tableLocationKey.getMetadata().getFileMetaData().getKeyValueMetaData(), - instructions); + tableLocationKey.getFileReader().getSchema(), + tableLocationKey.getMetadata().getFileMetaData().getKeyValueMetaData(), + instructions); return readSingleFileTable(tableLocationKey, schemaInfo.getSecond(), - new TableDefinition(schemaInfo.getFirst())); + new TableDefinition(schemaInfo.getFirst())); } if (sourceFileName.equals(ParquetMetadataFileLayout.METADATA_FILE_NAME)) { return readPartitionedTableWithMetadata(source.getParentFile(), instructions); @@ -405,11 +395,10 @@ private static Table readTableInternal(@NotNull final File source, return readPartitionedTableWithMetadata(source.getParentFile(), instructions); } throw new TableDataException( - "Source file " + source + " does not appear to be a parquet file or metadata file"); + "Source file " + source + " does not appear to be a parquet file or metadata file"); } if (sourceAttr.isDirectory()) { - final Path metadataPath = - sourcePath.resolve(ParquetMetadataFileLayout.METADATA_FILE_NAME); + final Path metadataPath = sourcePath.resolve(ParquetMetadataFileLayout.METADATA_FILE_NAME); if (Files.exists(metadataPath)) { return readPartitionedTableWithMetadata(source, instructions); } @@ -426,17 +415,14 @@ private static Table readTableInternal(@NotNull final File source, final String firstEntryFileName = firstEntryPath.getFileName().toString(); final BasicFileAttributes firstEntryAttr = readAttributes(firstEntryPath); if (firstEntryAttr.isDirectory() && firstEntryFileName.contains("=")) { - return readPartitionedTableInferSchema( - KeyValuePartitionLayout.forParquet(source, 32), instructions); + return readPartitionedTableInferSchema(KeyValuePartitionLayout.forParquet(source, 32), instructions); } - if (firstEntryAttr.isRegularFile() - && firstEntryFileName.endsWith(PARQUET_FILE_EXTENSION)) { + if (firstEntryAttr.isRegularFile() && firstEntryFileName.endsWith(PARQUET_FILE_EXTENSION)) { return readPartitionedTableInferSchema(new FlatParquetLayout(source), instructions); } throw new TableDataException("No recognized Parquet table layout found in " + source); } - throw new TableDataException( - "Source " + source + " is neither a directory nor a regular file"); + throw new TableDataException("Source " + source + " is neither a directory nor a regular file"); } private static BasicFileAttributes readAttributes(@NotNull final Path path) { @@ -456,87 +442,80 @@ private static BasicFileAttributes readAttributes(@NotNull final Path path) { * @return The table */ public static Table readSingleFileTable( - @NotNull final ParquetTableLocationKey tableLocationKey, - @NotNull final ParquetInstructions readInstructions, - @NotNull final TableDefinition tableDefinition) { + @NotNull final ParquetTableLocationKey tableLocationKey, + @NotNull final ParquetInstructions readInstructions, + @NotNull final TableDefinition tableDefinition) { final TableLocationProvider locationProvider = new PollingTableLocationProvider<>( - StandaloneTableKey.getInstance(), - new KnownLocationKeyFinder<>(tableLocationKey), - new ParquetTableLocationFactory(readInstructions), - null); + StandaloneTableKey.getInstance(), + new KnownLocationKeyFinder<>(tableLocationKey), + new ParquetTableLocationFactory(readInstructions), + null); return new SimpleSourceTable(tableDefinition.getWritable(), - "Read single parquet file from " + tableLocationKey.getFile(), - RegionedTableComponentFactoryImpl.INSTANCE, locationProvider, null); + "Read single parquet file from " + tableLocationKey.getFile(), + RegionedTableComponentFactoryImpl.INSTANCE, locationProvider, null); } /** - * Reads in a table from files discovered with {@code locationKeyFinder} using the provided - * table definition. + * Reads in a table from files discovered with {@code locationKeyFinder} using the provided table definition. * - * @param locationKeyFinder The source of {@link ParquetTableLocationKey location keys} to - * include + * @param locationKeyFinder The source of {@link ParquetTableLocationKey location keys} to include * @param readInstructions Instructions for customizations while reading * @param tableDefinition The table's {@link TableDefinition definition} * @return The table */ public static Table readPartitionedTable( - @NotNull final TableLocationKeyFinder locationKeyFinder, - @NotNull final ParquetInstructions readInstructions, - @NotNull final TableDefinition tableDefinition) { + @NotNull final TableLocationKeyFinder locationKeyFinder, + @NotNull final ParquetInstructions readInstructions, + @NotNull final TableDefinition tableDefinition) { final TableLocationProvider locationProvider = new PollingTableLocationProvider<>( - StandaloneTableKey.getInstance(), - locationKeyFinder, - new ParquetTableLocationFactory(readInstructions), - null); - return new PartitionAwareSourceTable(tableDefinition, - "Read multiple parquet files with " + locationKeyFinder, - RegionedTableComponentFactoryImpl.INSTANCE, locationProvider, null); + StandaloneTableKey.getInstance(), + locationKeyFinder, + new ParquetTableLocationFactory(readInstructions), + null); + return new PartitionAwareSourceTable(tableDefinition, "Read multiple parquet files with " + locationKeyFinder, + RegionedTableComponentFactoryImpl.INSTANCE, locationProvider, null); } /** - * Reads in a table from files discovered with {@code locationKeyFinder} using a definition - * built from the first location found, which must have non-null partition values for all - * partition keys. + * Reads in a table from files discovered with {@code locationKeyFinder} using a definition built from the first + * location found, which must have non-null partition values for all partition keys. * - * @param locationKeyFinder The source of {@link ParquetTableLocationKey location keys} to - * include + * @param locationKeyFinder The source of {@link ParquetTableLocationKey location keys} to include * @param readInstructions Instructions for customizations while reading * @return The table */ public static Table readPartitionedTableInferSchema( - @NotNull final TableLocationKeyFinder locationKeyFinder, - @NotNull final ParquetInstructions readInstructions) { + @NotNull final TableLocationKeyFinder locationKeyFinder, + @NotNull final ParquetInstructions readInstructions) { final RecordingLocationKeyFinder recordingLocationKeyFinder = - new RecordingLocationKeyFinder<>(); + new RecordingLocationKeyFinder<>(); locationKeyFinder.findKeys(recordingLocationKeyFinder); - final List foundKeys = - recordingLocationKeyFinder.getRecordedKeys(); + final List foundKeys = recordingLocationKeyFinder.getRecordedKeys(); if (foundKeys.isEmpty()) { return TableTools.emptyTable(0); } - // TODO (https://github.com/deephaven/deephaven-core/issues/877): Support schema merge when - // discovering multiple parquet files + // TODO (https://github.com/deephaven/deephaven-core/issues/877): Support schema merge when discovering multiple + // parquet files final ParquetTableLocationKey firstKey = foundKeys.get(0); final Pair, ParquetInstructions> schemaInfo = convertSchema( - firstKey.getFileReader().getSchema(), - firstKey.getMetadata().getFileMetaData().getKeyValueMetaData(), - readInstructions); + firstKey.getFileReader().getSchema(), + firstKey.getMetadata().getFileMetaData().getKeyValueMetaData(), + readInstructions); final List allColumns = - new ArrayList<>(firstKey.getPartitionKeys().size() + schemaInfo.getFirst().size()); + new ArrayList<>(firstKey.getPartitionKeys().size() + schemaInfo.getFirst().size()); for (final String partitionKey : firstKey.getPartitionKeys()) { final Comparable partitionValue = firstKey.getPartitionValue(partitionKey); if (partitionValue == null) { throw new IllegalArgumentException("First location key " + firstKey - + " has null partition value at partition key " + partitionKey); + + " has null partition value at partition key " + partitionKey); } // noinspection unchecked allColumns.add(ColumnDefinition.fromGenericType(partitionKey, - getUnboxedTypeIfBoxed(partitionValue.getClass()), - ColumnDefinition.COLUMNTYPE_PARTITIONING, null)); + getUnboxedTypeIfBoxed(partitionValue.getClass()), ColumnDefinition.COLUMNTYPE_PARTITIONING, null)); } allColumns.addAll(schemaInfo.getFirst()); return readPartitionedTable(recordingLocationKeyFinder, schemaInfo.getSecond(), - new TableDefinition(allColumns)); + new TableDefinition(allColumns)); } /** @@ -547,31 +526,27 @@ public static Table readPartitionedTableInferSchema( * @return The table */ public static Table readPartitionedTableWithMetadata( - @NotNull final File directory, - @NotNull final ParquetInstructions readInstructions) { - final ParquetMetadataFileLayout layout = - new ParquetMetadataFileLayout(directory, readInstructions); + @NotNull final File directory, + @NotNull final ParquetInstructions readInstructions) { + final ParquetMetadataFileLayout layout = new ParquetMetadataFileLayout(directory, readInstructions); return readPartitionedTable(layout, layout.getInstructions(), layout.getTableDefinition()); } private static final SimpleTypeMap> DB_ARRAY_TYPE_MAP = SimpleTypeMap.create( - null, DbCharArray.class, DbByteArray.class, DbShortArray.class, DbIntArray.class, - DbLongArray.class, - DbFloatArray.class, DbDoubleArray.class, DbArray.class); + null, DbCharArray.class, DbByteArray.class, DbShortArray.class, DbIntArray.class, DbLongArray.class, + DbFloatArray.class, DbDoubleArray.class, DbArray.class); - private static Class loadClass(final String colName, final String desc, - final String className) { + private static Class loadClass(final String colName, final String desc, final String className) { try { return ClassUtil.lookupClass(className); } catch (ClassNotFoundException e) { throw new UncheckedDeephavenException( - "Column " + colName + " with " + desc + "=" + className - + " that can't be found in classloader"); + "Column " + colName + " with " + desc + "=" + className + " that can't be found in classloader"); } } private static ParquetSchemaReader.ColumnDefinitionConsumer makeSchemaReaderConsumer( - final ArrayList colsOut) { + final ArrayList colsOut) { return (final ParquetSchemaReader.ParquetMessageDefinition parquetColDef) -> { Class baseType; if (parquetColDef.baseType == boolean.class) { @@ -582,45 +557,34 @@ private static ParquetSchemaReader.ColumnDefinitionConsumer makeSchemaReaderCons ColumnDefinition colDef; if (parquetColDef.codecType != null && !parquetColDef.codecType.isEmpty()) { final Class componentType = - (parquetColDef.codecComponentType != null - && !parquetColDef.codecComponentType.isEmpty()) - ? loadClass(parquetColDef.name, "codecComponentType", - parquetColDef.codecComponentType) - : null; - final Class dataType = - loadClass(parquetColDef.name, "codecType", parquetColDef.codecType); - colDef = - ColumnDefinition.fromGenericType(parquetColDef.name, dataType, componentType); + (parquetColDef.codecComponentType != null && !parquetColDef.codecComponentType.isEmpty()) + ? loadClass(parquetColDef.name, "codecComponentType", parquetColDef.codecComponentType) + : null; + final Class dataType = loadClass(parquetColDef.name, "codecType", parquetColDef.codecType); + colDef = ColumnDefinition.fromGenericType(parquetColDef.name, dataType, componentType); } else if (parquetColDef.dhSpecialType != null) { if (parquetColDef.dhSpecialType == ColumnTypeInfo.SpecialType.StringSet) { - colDef = - ColumnDefinition.fromGenericType(parquetColDef.name, StringSet.class, null); + colDef = ColumnDefinition.fromGenericType(parquetColDef.name, StringSet.class, null); } else if (parquetColDef.dhSpecialType == ColumnTypeInfo.SpecialType.Vector) { final Class dbArrayType = DB_ARRAY_TYPE_MAP.get(baseType); if (dbArrayType != null) { - colDef = ColumnDefinition.fromGenericType(parquetColDef.name, dbArrayType, - baseType); + colDef = ColumnDefinition.fromGenericType(parquetColDef.name, dbArrayType, baseType); } else { - colDef = ColumnDefinition.fromGenericType(parquetColDef.name, DbArray.class, - baseType); + colDef = ColumnDefinition.fromGenericType(parquetColDef.name, DbArray.class, baseType); } } else { - throw new UncheckedDeephavenException( - "Unhandled dbSpecialType=" + parquetColDef.dhSpecialType); + throw new UncheckedDeephavenException("Unhandled dbSpecialType=" + parquetColDef.dhSpecialType); } } else { if (parquetColDef.isArray) { if (baseType == byte.class && parquetColDef.noLogicalType) { - colDef = ColumnDefinition.fromGenericType(parquetColDef.name, byte[].class, - byte.class); + colDef = ColumnDefinition.fromGenericType(parquetColDef.name, byte[].class, byte.class); } else { // TODO: ParquetInstruction.loadAsDbArray final Class componentType = baseType; // On Java 12, replace by: dataType = componentType.arrayType(); - final Class dataType = - java.lang.reflect.Array.newInstance(componentType, 0).getClass(); - colDef = ColumnDefinition.fromGenericType(parquetColDef.name, dataType, - componentType); + final Class dataType = java.lang.reflect.Array.newInstance(componentType, 0).getClass(); + colDef = ColumnDefinition.fromGenericType(parquetColDef.name, dataType, componentType); } } else { colDef = ColumnDefinition.fromGenericType(parquetColDef.name, baseType, null); @@ -642,11 +606,10 @@ private static ParquetSchemaReader.ColumnDefinitionConsumer makeSchemaReaderCons public static ParquetFileReader getParquetFileReader(@NotNull final File parquetFile) { try { return new ParquetFileReader( - parquetFile.getAbsolutePath(), - new CachedChannelProvider( - new TrackedSeekableChannelsProvider(TrackedFileHandleFactory.getInstance()), - 1 << 7), - 0); + parquetFile.getAbsolutePath(), + new CachedChannelProvider( + new TrackedSeekableChannelsProvider(TrackedFileHandleFactory.getInstance()), 1 << 7), + 0); } catch (IOException e) { throw new TableDataException("Failed to create Parquet file reader: " + parquetFile, e); } @@ -654,14 +617,13 @@ public static ParquetFileReader getParquetFileReader(@NotNull final File parquet @VisibleForTesting public static Table readParquetSchemaAndTable( - @NotNull final File source, @NotNull final ParquetInstructions readInstructionsIn, - MutableObject instructionsOut) { - final ParquetTableLocationKey tableLocationKey = - new ParquetTableLocationKey(source, 0, null); + @NotNull final File source, @NotNull final ParquetInstructions readInstructionsIn, + MutableObject instructionsOut) { + final ParquetTableLocationKey tableLocationKey = new ParquetTableLocationKey(source, 0, null); final Pair, ParquetInstructions> schemaInfo = convertSchema( - tableLocationKey.getFileReader().getSchema(), - tableLocationKey.getMetadata().getFileMetaData().getKeyValueMetaData(), - readInstructionsIn); + tableLocationKey.getFileReader().getSchema(), + tableLocationKey.getMetadata().getFileMetaData().getKeyValueMetaData(), + readInstructionsIn); final TableDefinition def = new TableDefinition(schemaInfo.getFirst()); if (instructionsOut != null) { instructionsOut.setValue(schemaInfo.getSecond()); @@ -670,64 +632,58 @@ public static Table readParquetSchemaAndTable( } /** - * Convert schema information from a {@link ParquetMetadata} into {@link ColumnDefinition - * ColumnDefinitions}. + * Convert schema information from a {@link ParquetMetadata} into {@link ColumnDefinition ColumnDefinitions}. * * @param schema Parquet schema. DO NOT RELY ON {@link ParquetMetadataConverter} FOR THIS! USE * {@link ParquetFileReader}! * @param keyValueMetadata Parquet key-value metadata map * @param readInstructionsIn Input conversion {@link ParquetInstructions} - * @return A {@link Pair} with {@link ColumnDefinition ColumnDefinitions} and adjusted - * {@link ParquetInstructions} + * @return A {@link Pair} with {@link ColumnDefinition ColumnDefinitions} and adjusted {@link ParquetInstructions} */ public static Pair, ParquetInstructions> convertSchema( - @NotNull final MessageType schema, - @NotNull final Map keyValueMetadata, - @NotNull final ParquetInstructions readInstructionsIn) { + @NotNull final MessageType schema, + @NotNull final Map keyValueMetadata, + @NotNull final ParquetInstructions readInstructionsIn) { // noinspection rawtypes final ArrayList cols = new ArrayList<>(); - final ParquetSchemaReader.ColumnDefinitionConsumer colConsumer = - makeSchemaReaderConsumer(cols); + final ParquetSchemaReader.ColumnDefinitionConsumer colConsumer = makeSchemaReaderConsumer(cols); return new Pair<>(cols, ParquetSchemaReader.readParquetSchema( - schema, - keyValueMetadata, - readInstructionsIn, - colConsumer, - (final String colName, final Set takenNames) -> NameValidator - .legalizeColumnName(colName, s -> s.replace(" ", "_"), takenNames))); + schema, + keyValueMetadata, + readInstructionsIn, + colConsumer, + (final String colName, final Set takenNames) -> NameValidator.legalizeColumnName(colName, + s -> s.replace(" ", "_"), takenNames))); } private static void writeParquetTableImpl( - final Table sourceTable, - final TableDefinition definition, - final ParquetInstructions writeInstructions, - final File destFile, - final String[] groupingColumns) { + final Table sourceTable, + final TableDefinition definition, + final ParquetInstructions writeInstructions, + final File destFile, + final String[] groupingColumns) { final String path = destFile.getPath(); try { if (groupingColumns.length > 0) { ParquetTableWriter.write( - sourceTable, definition, writeInstructions, path, Collections.emptyMap(), - ParquetTableWriter.defaultGroupingFileName(path), groupingColumns); + sourceTable, definition, writeInstructions, path, Collections.emptyMap(), + ParquetTableWriter.defaultGroupingFileName(path), groupingColumns); } else { ParquetTableWriter.write( - sourceTable, definition, writeInstructions, path, Collections.emptyMap()); + sourceTable, definition, writeInstructions, path, Collections.emptyMap()); } } catch (Exception e) { throw new UncheckedDeephavenException("Error writing table to " + destFile, e); } } - public static final ParquetInstructions LZ4 = - ParquetInstructions.builder().setCompressionCodecName("LZ4").build(); - public static final ParquetInstructions LZO = - ParquetInstructions.builder().setCompressionCodecName("LZO").build(); + public static final ParquetInstructions LZ4 = ParquetInstructions.builder().setCompressionCodecName("LZ4").build(); + public static final ParquetInstructions LZO = ParquetInstructions.builder().setCompressionCodecName("LZO").build(); public static final ParquetInstructions GZIP = - ParquetInstructions.builder().setCompressionCodecName("GZIP").build(); + ParquetInstructions.builder().setCompressionCodecName("GZIP").build(); public static final ParquetInstructions ZSTD = - ParquetInstructions.builder().setCompressionCodecName("ZSTD").build(); - public static final ParquetInstructions LEGACY = - ParquetInstructions.builder().setIsLegacyParquet(true).build(); + ParquetInstructions.builder().setCompressionCodecName("ZSTD").build(); + public static final ParquetInstructions LEGACY = ParquetInstructions.builder().setIsLegacyParquet(true).build(); public static void setDefaultCompressionCodecName(final String compressionCodecName) { ParquetInstructions.setDefaultCompressionCodecName(compressionCodecName); diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceLogThreshold.java b/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceLogThreshold.java index a5f32fa841e..0fc1a35a65c 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceLogThreshold.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceLogThreshold.java @@ -3,8 +3,8 @@ import io.deephaven.configuration.Configuration; /** - * This class encapsulates the parameters that control whether a given item (nugget or entry - * interval) is logged to one of our three performance logs. + * This class encapsulates the parameters that control whether a given item (nugget or entry interval) is logged to one + * of our three performance logs. */ public class QueryPerformanceLogThreshold { private final long minimumDurationNanos; @@ -13,8 +13,7 @@ public class QueryPerformanceLogThreshold { * Create a log threshold object for a particular kind of log update *
      *
    • "" is for instrumented QueryPerformanceLog/QueryOperationPerformanceLog nuggets
    • - *
    • "Uninstrumented" is for uninstrumented QueryPerformanceLog/QueryOperationPerformanceLog - * nuggets, and + *
    • "Uninstrumented" is for uninstrumented QueryPerformanceLog/QueryOperationPerformanceLog nuggets, and *
    • "Update" is for UpdatePerformanceLog entry intervals.
    • *
    * @@ -23,18 +22,17 @@ public class QueryPerformanceLogThreshold { * @param defaultRepeatedReads default value for repeated read threshold * @param defaultInitialReads default value for initial read threshold */ - private QueryPerformanceLogThreshold(String kind, long defaultDuration, - long defaultRepeatedReads, long defaultInitialReads) { - minimumDurationNanos = Configuration.getInstance().getLongWithDefault( - "QueryPerformance.minimum" + kind + "LogDurationNanos", defaultDuration); + private QueryPerformanceLogThreshold(String kind, long defaultDuration, long defaultRepeatedReads, + long defaultInitialReads) { + minimumDurationNanos = Configuration.getInstance() + .getLongWithDefault("QueryPerformance.minimum" + kind + "LogDurationNanos", defaultDuration); } /** * Create a log threshold object for a particular kind of log update *
      *
    • "" is for instrumented QueryPerformanceLog/QueryOperationPerformanceLog nuggets
    • - *
    • "Uninstrumented" is for uninstrumented QueryPerformanceLog/QueryOperationPerformanceLog - * nuggets, and + *
    • "Uninstrumented" is for uninstrumented QueryPerformanceLog/QueryOperationPerformanceLog nuggets, and *
    • "Update" is for UpdatePerformanceLog entry intervals.
    • *
    * @@ -48,9 +46,8 @@ public QueryPerformanceLogThreshold(String kind, long defaultDuration) { } /** - * The minimum duration for an QueryPerformanceNugget to be logged based on its duration (or - * entry interval usage for the UpdatePerformanceLog). The value 0 logs everything. The value -1 - * will not log anything based on duration. + * The minimum duration for an QueryPerformanceNugget to be logged based on its duration (or entry interval usage + * for the UpdatePerformanceLog). The value 0 logs everything. The value -1 will not log anything based on duration. */ private long getMinimumDurationNanos() { return minimumDurationNanos; diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceNugget.java b/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceNugget.java index 7677d286a8d..479d1da224c 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceNugget.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceNugget.java @@ -14,15 +14,14 @@ import static io.deephaven.util.QueryConstants.*; /** - * Per-operation instrumentation node for hierarchical performance recording. Note that this class - * has an unusually intimate relationship with another class, {@link QueryPerformanceRecorder}. - * Changes to either should take this lack of encapsulation into account. + * Per-operation instrumentation node for hierarchical performance recording. Note that this class has an unusually + * intimate relationship with another class, {@link QueryPerformanceRecorder}. Changes to either should take this lack + * of encapsulation into account. */ public class QueryPerformanceNugget implements Serializable, AutoCloseable { - private static final QueryPerformanceLogThreshold LOG_THRESHOLD = - new QueryPerformanceLogThreshold("", 1_000_000); + private static final QueryPerformanceLogThreshold LOG_THRESHOLD = new QueryPerformanceLogThreshold("", 1_000_000); private static final QueryPerformanceLogThreshold UNINSTRUMENTED_LOG_THRESHOLD = - new QueryPerformanceLogThreshold("Uninstrumented", 1_000_000_000); + new QueryPerformanceLogThreshold("Uninstrumented", 1_000_000_000); private static final int MAX_DESCRIPTION_LENGTH = 16 << 10; private static final long serialVersionUID = 2L; @@ -65,8 +64,7 @@ public class QueryPerformanceNugget implements Serializable, AutoCloseable { /** * Constructor for query-level nuggets. * - * @param evaluationNumber A unique identifier for the query evaluation that triggered this - * nugget creation + * @param evaluationNumber A unique identifier for the query evaluation that triggered this nugget creation * @param description The operation description */ QueryPerformanceNugget(final int evaluationNumber, final String description) { @@ -76,20 +74,19 @@ public class QueryPerformanceNugget implements Serializable, AutoCloseable { /** * Full constructor for nuggets. * - * @param evaluationNumber A unique identifier for the query evaluation that triggered this - * nugget creation + * @param evaluationNumber A unique identifier for the query evaluation that triggered this nugget creation * @param depth Depth in the evaluation chain for the respective operation * @param description The operation description * @param isUser Whether this is a "user" nugget or one created by the system * @param inputSize The size of the input data */ QueryPerformanceNugget(final int evaluationNumber, final int depth, - final String description, final boolean isUser, final long inputSize) { + final String description, final boolean isUser, final long inputSize) { this.evaluationNumber = evaluationNumber; this.depth = depth; if (description.length() > MAX_DESCRIPTION_LENGTH) { this.description = description.substring(0, MAX_DESCRIPTION_LENGTH) + " ... [truncated " - + (description.length() - MAX_DESCRIPTION_LENGTH) + " bytes]"; + + (description.length() - MAX_DESCRIPTION_LENGTH) + " bytes]"; } else { this.description = description; } @@ -158,9 +155,9 @@ public boolean done(QueryPerformanceRecorder recorder) { } /** - * AutoCloseable implementation - wraps the no-argument version of done() used by query code - * outside of the QueryPerformance(Recorder/Nugget), reporting successful completion to the - * thread-local QueryPerformanceRecorder instance. + * AutoCloseable implementation - wraps the no-argument version of done() used by query code outside of the + * QueryPerformance(Recorder/Nugget), reporting successful completion to the thread-local QueryPerformanceRecorder + * instance. */ @Override public void close() { @@ -175,11 +172,10 @@ public boolean abort(QueryPerformanceRecorder recorder) { /** * Finish the nugget and record the current state of the world. * - * @param closingState The current query state. If it is anything other than - * {@link QueryState#RUNNING} nothing will happen and it will return false; + * @param closingState The current query state. If it is anything other than {@link QueryState#RUNNING} nothing will + * happen and it will return false; * - * @param recorderToNotify The {@link QueryPerformanceRecorder} to notify this nugget is - * closing. + * @param recorderToNotify The {@link QueryPerformanceRecorder} to notify this nugget is closing. * @return If the nugget passes criteria for logging. */ private boolean close(QueryState closingState, QueryPerformanceRecorder recorderToNotify) { @@ -206,10 +202,8 @@ private boolean close(QueryState closingState, QueryPerformanceRecorder recorder diffTotalMemory = totalUsedMemory - startTotalMemory; diffPoolAllocatedBytes = - minus(QueryPerformanceRecorder.getPoolAllocatedBytesForCurrentThread(), - startPoolAllocatedBytes); - diffAllocatedBytes = - minus(ThreadProfiler.DEFAULT.getCurrentThreadAllocatedBytes(), startAllocatedBytes); + minus(QueryPerformanceRecorder.getPoolAllocatedBytesForCurrentThread(), startPoolAllocatedBytes); + diffAllocatedBytes = minus(ThreadProfiler.DEFAULT.getCurrentThreadAllocatedBytes(), startAllocatedBytes); state = closingState; return recorderToNotify.releaseNugget(this); @@ -219,8 +213,8 @@ private boolean close(QueryState closingState, QueryPerformanceRecorder recorder @Override public String toString() { return Integer.toString(evaluationNumber) - + ":" + description - + ":" + callerLine; + + ":" + description + + ":" + callerLine; } public int getEvaluationNumber() { @@ -268,8 +262,8 @@ public long getStartClockTime() { /** * Get nanoseconds of CPU time attributed to the instrumented operation. * - * @return The nanoseconds of CPU time attributed to the instrumented operation, or - * {@link QueryConstants#NULL_LONG} if not enabled/supported. + * @return The nanoseconds of CPU time attributed to the instrumented operation, or {@link QueryConstants#NULL_LONG} + * if not enabled/supported. */ public long getCpuNanos() { return diffCpuNanos; @@ -307,8 +301,7 @@ public long getDiffFreeMemory() { } /** - * @return total (allocated high water mark) memory difference between time of completion and - * creation + * @return total (allocated high water mark) memory difference between time of completion and creation */ public long getDiffTotalMemory() { return diffTotalMemory; @@ -327,8 +320,8 @@ public long getAllocatedBytes() { /** * Get bytes of allocated pooled/reusable memory attributed to the instrumented operation. * - * @return The bytes of allocated pooled/reusable memory attributed to the instrumented - * operation, or {@link QueryConstants#NULL_LONG} if not enabled/supported. + * @return The bytes of allocated pooled/reusable memory attributed to the instrumented operation, or + * {@link QueryConstants#NULL_LONG} if not enabled/supported. */ public long getPoolAllocatedBytes() { return diffPoolAllocatedBytes; @@ -349,8 +342,8 @@ public void setShouldLogMeAndStackParents() { } /** - * @return true if this nugget triggers the logging of itself and every other nugget in its - * stack of nesting operations. + * @return true if this nugget triggers the logging of itself and every other nugget in its stack of nesting + * operations. */ public boolean shouldLogMenAndStackParents() { return shouldLogMeAndStackParents; @@ -359,8 +352,8 @@ public boolean shouldLogMenAndStackParents() { /** * Suppress de minimus performance nuggets using the properties defined above. * - * @param isUninstrumented this nugget for uninstrumented code? If so the thresholds for - * inclusion in the logs are configured distinctly. + * @param isUninstrumented this nugget for uninstrumented code? If so the thresholds for inclusion in the logs are + * configured distinctly. * * @return if this nugget is significant enough to be logged. */ @@ -368,8 +361,8 @@ boolean shouldLogNugget(final boolean isUninstrumented) { if (shouldLogMeAndStackParents) { return true; } - // Nuggets will have a null value for total time if they weren't closed for a RUNNING query; - // this is an abnormal condition and the nugget should be logged + // Nuggets will have a null value for total time if they weren't closed for a RUNNING query; this is an abnormal + // condition and the nugget should be logged if (getTotalTimeNanos() == null) { return true; } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceRecorder.java b/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceRecorder.java index 899bca30776..afb6e20efa2 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceRecorder.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/QueryPerformanceRecorder.java @@ -26,11 +26,10 @@ import static io.deephaven.db.tables.lang.DBLanguageFunctionUtil.plus; /** - * Query performance instrumentation tools. Manages a hierarchy of {@link QueryPerformanceNugget} - * instances. + * Query performance instrumentation tools. Manages a hierarchy of {@link QueryPerformanceNugget} instances. *

    - * Thread-safety note: This used to be thread-safe only by virtue of using a thread-local instance. - * Now it's aggressively synchronized so we can abort it from outside the "owner" thread. + * Thread-safety note: This used to be thread-safe only by virtue of using a thread-local instance. Now it's + * aggressively synchronized so we can abort it from outside the "owner" thread. */ public class QueryPerformanceRecorder implements Serializable { @@ -49,26 +48,23 @@ public class QueryPerformanceRecorder implements Serializable { private static final AtomicInteger queriesProcessed = new AtomicInteger(0); private static final ThreadLocal theLocal = - ThreadLocal.withInitial(QueryPerformanceRecorder::new); + ThreadLocal.withInitial(QueryPerformanceRecorder::new); private static final ThreadLocal poolAllocatedBytes = ThreadLocal.withInitial( - () -> new MutableLong(ThreadProfiler.DEFAULT.memoryProfilingAvailable() ? 0L - : io.deephaven.util.QueryConstants.NULL_LONG)); + () -> new MutableLong(ThreadProfiler.DEFAULT.memoryProfilingAvailable() ? 0L + : io.deephaven.util.QueryConstants.NULL_LONG)); private static final ThreadLocal cachedCallsite = new ThreadLocal<>(); static { final Configuration config = Configuration.getInstance(); final Set filters = new HashSet<>(); - final String propVal = - config.getProperty("QueryPerformanceRecorder.packageFilter.internal"); + final String propVal = config.getProperty("QueryPerformanceRecorder.packageFilter.internal"); final URL path = QueryPerformanceRecorder.class.getResource("/" + propVal); if (path == null) { - throw new RuntimeException( - "Can not locate package filter file " + propVal + " in classpath"); + throw new RuntimeException("Can not locate package filter file " + propVal + " in classpath"); } - try (final BufferedReader reader = - new BufferedReader(new InputStreamReader(path.openStream()))) { + try (final BufferedReader reader = new BufferedReader(new InputStreamReader(path.openStream()))) { String line; while ((line = reader.readLine()) != null) { if (!line.isEmpty()) { @@ -87,8 +83,7 @@ public static QueryPerformanceRecorder getInstance() { } public static void resetInstance() { - // clear interrupted - because this is a good place to do it - no cancellation exception - // here though + // clear interrupted - because this is a good place to do it - no cancellation exception here though // noinspection ResultOfMethodCallIgnored Thread.interrupted(); theLocal.remove(); @@ -154,7 +149,7 @@ public synchronized boolean endQuery() { private void startCatchAll(final int evaluationNumber) { catchAllNugget = new QueryPerformanceNugget( - evaluationNumber, 0, UNINSTRUMENTED_CODE_DESCRIPTION, false, QueryConstants.NULL_LONG); + evaluationNumber, 0, UNINSTRUMENTED_CODE_DESCRIPTION, false, QueryConstants.NULL_LONG); } private void stopCatchAll(final boolean abort) { @@ -172,8 +167,7 @@ private void stopCatchAll(final boolean abort) { /** * @param name the nugget name - * @return A new QueryPerformanceNugget to encapsulate user query operations. done() must be - * called on the nugget. + * @return A new QueryPerformanceNugget to encapsulate user query operations. done() must be called on the nugget. */ public QueryPerformanceNugget getNugget(String name) { return getNugget(name, QueryConstants.NULL_LONG); @@ -182,8 +176,7 @@ public QueryPerformanceNugget getNugget(String name) { /** * @param name the nugget name * @param inputSize the nugget's input size - * @return A new QueryPerformanceNugget to encapsulate user query operations. done() must be - * called on the nugget. + * @return A new QueryPerformanceNugget to encapsulate user query operations. done() must be called on the nugget. */ public synchronized QueryPerformanceNugget getNugget(final String name, final long inputSize) { if (state != QueryState.RUNNING) { @@ -196,18 +189,17 @@ public synchronized QueryPerformanceNugget getNugget(final String name, final lo stopCatchAll(false); } final QueryPerformanceNugget nugget = new QueryPerformanceNugget( - queryNugget.getEvaluationNumber(), userNuggetStack.size(), - name, true, inputSize); + queryNugget.getEvaluationNumber(), userNuggetStack.size(), + name, true, inputSize); operationNuggets.add(nugget); userNuggetStack.addLast(nugget); return nugget; } /** - * Note: Do not call this directly - it's for nugget use only. Call nugget.done(), - * instead. TODO: Reverse the disclaimer above - I think it's much better for the recorder to - * support done/abort(nugget), rather than continuing to have the nugget support - * done/abort(recorder). + * Note: Do not call this directly - it's for nugget use only. Call nugget.done(), instead. TODO: Reverse the + * disclaimer above - I think it's much better for the recorder to support done/abort(nugget), rather than + * continuing to have the nugget support done/abort(recorder). * * @param nugget the nugget to be released * @return If the nugget passes criteria for logging. @@ -220,11 +212,11 @@ synchronized boolean releaseNugget(QueryPerformanceNugget nugget) { final QueryPerformanceNugget removed = userNuggetStack.removeLast(); if (nugget != removed) { - throw new IllegalStateException("Released query performance nugget " + nugget + " (" - + System.identityHashCode(nugget) + - ") didn't match the top of the user nugget stack " + removed + " (" - + System.identityHashCode(removed) + - ") - did you follow the correct try/finally pattern?"); + throw new IllegalStateException( + "Released query performance nugget " + nugget + " (" + System.identityHashCode(nugget) + + ") didn't match the top of the user nugget stack " + removed + " (" + + System.identityHashCode(removed) + + ") - did you follow the correct try/finally pattern?"); } if (removed.shouldLogMenAndStackParents()) { @@ -234,18 +226,15 @@ synchronized boolean releaseNugget(QueryPerformanceNugget nugget) { } } if (!shouldLog) { - // If we have filtered this nugget, by our filter design we will also have filtered any - // nuggets it encloses. - // This means it *must* be the last entry in operationNuggets, so we can safely remove - // it in O(1). - final QueryPerformanceNugget lastNugget = - operationNuggets.remove(operationNuggets.size() - 1); + // If we have filtered this nugget, by our filter design we will also have filtered any nuggets it encloses. + // This means it *must* be the last entry in operationNuggets, so we can safely remove it in O(1). + final QueryPerformanceNugget lastNugget = operationNuggets.remove(operationNuggets.size() - 1); if (nugget != lastNugget) { - throw new IllegalStateException("Filtered query performance nugget " + nugget + " (" - + System.identityHashCode(nugget) + - ") didn't match the last operation nugget " + lastNugget + " (" - + System.identityHashCode(lastNugget) + - ")"); + throw new IllegalStateException( + "Filtered query performance nugget " + nugget + " (" + System.identityHashCode(nugget) + + ") didn't match the last operation nugget " + lastNugget + " (" + + System.identityHashCode(lastNugget) + + ")"); } } @@ -319,32 +308,27 @@ public synchronized Table getTimingResultsAsTable() { isCompileTime[i] = operationNuggets.get(i).getName().startsWith("Compile:"); } return TableTools.newTable( - TableTools.col("names", names), - TableTools.col("line", callerLine), - TableTools.col("timeNanos", timeNanos), - TableTools.col("isTopLevel", isTopLevel), - TableTools.col("isCompileTime", isCompileTime)); + TableTools.col("names", names), + TableTools.col("line", callerLine), + TableTools.col("timeNanos", timeNanos), + TableTools.col("isTopLevel", isTopLevel), + TableTools.col("isCompileTime", isCompileTime)); } /** - * Record a single-threaded operation's allocations as "pool" allocated memory attributable to - * the current thread. + * Record a single-threaded operation's allocations as "pool" allocated memory attributable to the current thread. * * @param operation The operation to record allocation for * @return The result of the operation. */ - public static RESULT_TYPE recordPoolAllocation( - @NotNull final Supplier operation) { - final long startThreadAllocatedBytes = - ThreadProfiler.DEFAULT.getCurrentThreadAllocatedBytes(); + public static RESULT_TYPE recordPoolAllocation(@NotNull final Supplier operation) { + final long startThreadAllocatedBytes = ThreadProfiler.DEFAULT.getCurrentThreadAllocatedBytes(); try { return operation.get(); } finally { - final long endThreadAllocatedBytes = - ThreadProfiler.DEFAULT.getCurrentThreadAllocatedBytes(); + final long endThreadAllocatedBytes = ThreadProfiler.DEFAULT.getCurrentThreadAllocatedBytes(); final MutableLong poolAllocatedBytesForCurrentThread = poolAllocatedBytes.get(); - poolAllocatedBytesForCurrentThread - .setValue(plus(poolAllocatedBytesForCurrentThread.longValue(), + poolAllocatedBytesForCurrentThread.setValue(plus(poolAllocatedBytesForCurrentThread.longValue(), minus(endThreadAllocatedBytes, startThreadAllocatedBytes))); } } @@ -435,8 +419,8 @@ public static T withNugget(final String name, final Supplier r) { * @param r the stuff to run * @throws T exception of type T */ - public static void withNuggetThrowing(final String name, - final Procedure.ThrowingNullary r) throws T { + public static void withNuggetThrowing(final String name, final Procedure.ThrowingNullary r) + throws T { final boolean needClear = setCallsite(); QueryPerformanceNugget nugget = null; try { @@ -456,7 +440,7 @@ public static void withNuggetThrowing(final String name, * @throws ExceptionType exception of type ExceptionType */ public static R withNuggetThrowing(final String name, - final Function.ThrowingNullary r) throws ExceptionType { + final Function.ThrowingNullary r) throws ExceptionType { final boolean needClear = setCallsite(); QueryPerformanceNugget nugget = null; try { @@ -473,8 +457,7 @@ public static R withNuggetThrowing(final St * @param name the nugget name * @param r the stuff to run */ - public static void withNugget(final String name, final long inputSize, - final Procedure.Nullary r) { + public static void withNugget(final String name, final long inputSize, final Procedure.Nullary r) { final boolean needClear = setCallsite(); QueryPerformanceNugget nugget = null; try { @@ -510,8 +493,8 @@ public static T withNugget(final String name, final long inputSize, final Su * @throws T exception of type T */ @SuppressWarnings("unused") - public static void withNuggetThrowing(final String name, - final long inputSize, final Procedure.ThrowingNullary r) throws T { + public static void withNuggetThrowing(final String name, final long inputSize, + final Procedure.ThrowingNullary r) throws T { final boolean needClear = setCallsite(); QueryPerformanceNugget nugget = null; try { @@ -531,9 +514,8 @@ public static void withNuggetThrowing(final String name, * @throws ExceptionType exception of type ExceptionType */ @SuppressWarnings("unused") - public static R withNuggetThrowing(final String name, - final long inputSize, final Function.ThrowingNullary r) - throws ExceptionType { + public static R withNuggetThrowing(final String name, final long inputSize, + final Function.ThrowingNullary r) throws ExceptionType { final boolean needClear = setCallsite(); QueryPerformanceNugget nugget = null; try { @@ -546,14 +528,13 @@ public static R withNuggetThrowing(final St /** *

    - * Attempt to set the thread local callsite so that invocations of {@link #getCallerLine()} will - * not spend time trying to recompute. + * Attempt to set the thread local callsite so that invocations of {@link #getCallerLine()} will not spend time + * trying to recompute. *

    * *

    - * This method returns a boolean if the value was successfully set. In the event this returns - * true, it's the responsibility of the caller to invoke {@link #clearCallsite()} when the - * operation is complete. + * This method returns a boolean if the value was successfully set. In the event this returns true, it's the + * responsibility of the caller to invoke {@link #clearCallsite()} when the operation is complete. *

    * *

    @@ -586,8 +567,8 @@ public static boolean setCallsite(String callsite) { /** *

    - * Attempt to compute and set the thread local callsite so that invocations of - * {@link #getCallerLine()} will not spend time trying to recompute. + * Attempt to compute and set the thread local callsite so that invocations of {@link #getCallerLine()} will not + * spend time trying to recompute. *

    * *

    @@ -597,8 +578,7 @@ public static boolean setCallsite(String callsite) { * @return true if the callsite was computed and set. */ public static boolean setCallsite() { - // This is very similar to the other getCallsite, but we don't want to invoke - // getCallerLine() unless we + // This is very similar to the other getCallsite, but we don't want to invoke getCallerLine() unless we // really need to. if (cachedCallsite.get() == null) { cachedCallsite.set(getCallerLine()); diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/TableDiff.java b/DB/src/main/java/io/deephaven/db/tables/utils/TableDiff.java index 899673c4595..02d5090711e 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/TableDiff.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/TableDiff.java @@ -22,7 +22,7 @@ public class TableDiff { private static final int chunkSize = 1 << 16; private static final EnumSet DOUBLES_EXACT_AND_FRACTION = - EnumSet.of(DiffItems.DoublesExact, DiffItems.DoubleFraction); + EnumSet.of(DiffItems.DoublesExact, DiffItems.DoubleFraction); private static final double DOUBLE_EXACT_THRESHOLD = 0.0001; private static final double FLOAT_EXACT_THRESHOLD = 0.005; @@ -36,14 +36,13 @@ public class TableDiff { * @return a pair containing an error description String and the first different line */ @NotNull - static Pair diffInternal(Table actualResult, Table expectedResult, - long maxDiffLines, EnumSet itemsToSkip) { + static Pair diffInternal(Table actualResult, Table expectedResult, long maxDiffLines, + EnumSet itemsToSkip) { final List issues = new ArrayList<>(); long firstDifferentPosition = Long.MAX_VALUE; if (expectedResult == null) { - throw new IllegalArgumentException( - "Can not pass null expected result to TableTools.diff!"); + throw new IllegalArgumentException("Can not pass null expected result to TableTools.diff!"); } if (actualResult == null) { @@ -52,21 +51,18 @@ static Pair diffInternal(Table actualResult, Table expectedResult, } if (actualResult.size() != expectedResult.size()) { - issues.add("Result table has size " + actualResult.size() + " vs. expected " - + expectedResult.size()); + issues.add("Result table has size " + actualResult.size() + " vs. expected " + expectedResult.size()); if (issues.size() >= maxDiffLines) { return makeResult(issues, maxDiffLines, firstDifferentPosition); } } - final Map actualNameToColumnSource = - actualResult.getColumnSourceMap(); - final Map expectedNameToColumnSource = - expectedResult.getColumnSourceMap(); - final String[] actualColumnNames = actualResult.getDefinition().getColumnNames() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - final String[] expectedColumnNames = expectedResult.getDefinition().getColumnNames() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final Map actualNameToColumnSource = actualResult.getColumnSourceMap(); + final Map expectedNameToColumnSource = expectedResult.getColumnSourceMap(); + final String[] actualColumnNames = + actualResult.getDefinition().getColumnNames().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] expectedColumnNames = + expectedResult.getDefinition().getColumnNames().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); for (final String actualColumnName : actualColumnNames) { if (!expectedNameToColumnSource.containsKey(actualColumnName)) { @@ -80,23 +76,21 @@ static Pair diffInternal(Table actualResult, Table expectedResult, final Set columnNamesForDiff = new LinkedHashSet<>(); for (int ci = 0; ci < expectedColumnNames.length; ci++) { final String expectedColumnName = expectedColumnNames[ci]; - final ColumnSource expectedColumnSource = - expectedNameToColumnSource.get(expectedColumnName); - final ColumnSource actualColumnSource = - actualNameToColumnSource.get(expectedColumnName); + final ColumnSource expectedColumnSource = expectedNameToColumnSource.get(expectedColumnName); + final ColumnSource actualColumnSource = actualNameToColumnSource.get(expectedColumnName); if (actualColumnSource == null) { issues.add("Expected column " + expectedColumnName + " not found"); } else { if (actualColumnNames.length - 1 < ci) { if (!itemsToSkip.contains(DiffItems.ColumnsOrder)) { - issues.add("Expected column " + expectedColumnName - + " is found but not on expected position (" + ci + ")"); + issues.add("Expected column " + expectedColumnName + " is found but not on expected position (" + + ci + ")"); } } else { if (!expectedColumnName.equals(actualColumnNames[ci])) { if (!itemsToSkip.contains(DiffItems.ColumnsOrder)) { issues.add("Expected column " + expectedColumnName - + " is found but not on expected position (" + ci + ")"); + + " is found but not on expected position (" + ci + ")"); } } } @@ -104,8 +98,8 @@ static Pair diffInternal(Table actualResult, Table expectedResult, final Class expectedType = expectedColumnSource.getType(); final Class actualType = actualColumnSource.getType(); if (actualType != expectedType) { - issues.add("Expected type of " + expectedColumnName + " is " + expectedType - + " actual type is " + actualType); + issues.add("Expected type of " + expectedColumnName + " is " + expectedType + " actual type is " + + actualType); } else { columnNamesForDiff.add(expectedColumnName); } @@ -116,47 +110,37 @@ static Pair diffInternal(Table actualResult, Table expectedResult, } try (final SafeCloseableList safeCloseables = new SafeCloseableList(); - final SharedContext expectedSharedContext = SharedContext.makeSharedContext(); - final SharedContext actualSharedContext = SharedContext.makeSharedContext(); - final WritableBooleanChunk equalValues = - WritableBooleanChunk.makeWritableChunk(chunkSize)) { + final SharedContext expectedSharedContext = SharedContext.makeSharedContext(); + final SharedContext actualSharedContext = SharedContext.makeSharedContext(); + final WritableBooleanChunk equalValues = WritableBooleanChunk.makeWritableChunk(chunkSize)) { final ColumnDiffContext[] columnContexts = columnNamesForDiff.stream() - .map(name -> safeCloseables.add(new ColumnDiffContext(name, - expectedNameToColumnSource.get(name), expectedSharedContext, - actualNameToColumnSource.get(name), actualSharedContext))) - .toArray(ColumnDiffContext[]::new); + .map(name -> safeCloseables.add(new ColumnDiffContext(name, expectedNameToColumnSource.get(name), + expectedSharedContext, actualNameToColumnSource.get(name), actualSharedContext))) + .toArray(ColumnDiffContext[]::new); - try ( - final OrderedKeys.Iterator expectedIterator = - expectedResult.getIndex().getOrderedKeysIterator(); - final OrderedKeys.Iterator actualIterator = - actualResult.getIndex().getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator expectedIterator = expectedResult.getIndex().getOrderedKeysIterator(); + final OrderedKeys.Iterator actualIterator = actualResult.getIndex().getOrderedKeysIterator()) { int columnsRemaining = columnContexts.length; long position = 0; - while (expectedIterator.hasMore() && actualIterator.hasMore() - && columnsRemaining > 0) { - final OrderedKeys expectedChunkOk = - expectedIterator.getNextOrderedKeysWithLength(chunkSize); - final OrderedKeys actualChunkOk = - actualIterator.getNextOrderedKeysWithLength(chunkSize); + while (expectedIterator.hasMore() && actualIterator.hasMore() && columnsRemaining > 0) { + final OrderedKeys expectedChunkOk = expectedIterator.getNextOrderedKeysWithLength(chunkSize); + final OrderedKeys actualChunkOk = actualIterator.getNextOrderedKeysWithLength(chunkSize); for (int ci = 0; ci < columnContexts.length; ++ci) { final ColumnDiffContext columnContext = columnContexts[ci]; if (columnContext == null) { continue; } - final long columnFirstDifferentPosition = - columnContext.diffChunk(expectedChunkOk, actualChunkOk, equalValues, - itemsToSkip, issues, position); + final long columnFirstDifferentPosition = columnContext.diffChunk(expectedChunkOk, + actualChunkOk, equalValues, itemsToSkip, issues, position); if (columnFirstDifferentPosition == -1L) { continue; } --columnsRemaining; columnContexts[ci] = null; - firstDifferentPosition = - Math.min(columnFirstDifferentPosition, firstDifferentPosition); + firstDifferentPosition = Math.min(columnFirstDifferentPosition, firstDifferentPosition); if (issues.size() >= maxDiffLines) { return makeResult(issues, maxDiffLines, firstDifferentPosition); } @@ -171,8 +155,8 @@ static Pair diffInternal(Table actualResult, Table expectedResult, return makeResult(issues, maxDiffLines, firstDifferentPosition); } - private static Pair makeResult(@NotNull final List issues, - final long maxDiffLines, final long firstDifferentPosition) { + private static Pair makeResult(@NotNull final List issues, final long maxDiffLines, + final long firstDifferentPosition) { final StringBuilder result = new StringBuilder(); int count = 0; for (final String issue : issues) { @@ -184,8 +168,7 @@ private static Pair makeResult(@NotNull final List issues, count++; } // noinspection AutoBoxing - return new Pair<>(result.toString(), - firstDifferentPosition == Long.MAX_VALUE ? 0 : firstDifferentPosition); + return new Pair<>(result.toString(), firstDifferentPosition == Long.MAX_VALUE ? 0 : firstDifferentPosition); } /** @@ -193,8 +176,8 @@ private static Pair makeResult(@NotNull final List issues, */ public enum DiffItems { /** - * Doubles and Floats are not treated as differences if they are within - * {@link #DOUBLE_EXACT_THRESHOLD} or {@link #FLOAT_EXACT_THRESHOLD}. + * Doubles and Floats are not treated as differences if they are within {@link #DOUBLE_EXACT_THRESHOLD} or + * {@link #FLOAT_EXACT_THRESHOLD}. */ DoublesExact, /** @@ -203,8 +186,7 @@ public enum DiffItems { ColumnsOrder, /** * Doubles and Floats are not treated as differences if they are within a factor of - * {@link #DOUBLE_EXACT_THRESHOLD} or {@link #FLOAT_EXACT_THRESHOLD}. DoublesExact must also - * be set. + * {@link #DOUBLE_EXACT_THRESHOLD} or {@link #FLOAT_EXACT_THRESHOLD}. DoublesExact must also be set. */ DoubleFraction } @@ -220,10 +202,10 @@ private static final class ColumnDiffContext implements Context { private final ChunkEquals chunkEquals; private ColumnDiffContext(@NotNull final String name, - @NotNull final ColumnSource expectedColumnSource, - @NotNull final SharedContext expectedSharedContext, - @NotNull final ColumnSource actualColumnSource, - @NotNull final SharedContext actualSharedContext) { + @NotNull final ColumnSource expectedColumnSource, + @NotNull final SharedContext expectedSharedContext, + @NotNull final ColumnSource actualColumnSource, + @NotNull final SharedContext actualSharedContext) { this.name = name; this.expectedColumnSource = expectedColumnSource; expectedContext = expectedColumnSource.makeGetContext(chunkSize, expectedSharedContext); @@ -242,19 +224,18 @@ private ColumnDiffContext(@NotNull final String name, * @param itemsToSkip {@link DiffItems} to skip * @param issues A place to record issues * @param position The row number to start from, 0-indexed - * @return -1 if the expected and actual chunks were equal, else the position in row space - * of the first difference + * @return -1 if the expected and actual chunks were equal, else the position in row space of the first + * difference */ private long diffChunk(@NotNull final OrderedKeys expectedChunkOk, - @NotNull final OrderedKeys actualChunkOk, - @NotNull final WritableBooleanChunk equalValues, - @NotNull final Set itemsToSkip, - @NotNull final List issues, - long position) { + @NotNull final OrderedKeys actualChunkOk, + @NotNull final WritableBooleanChunk equalValues, + @NotNull final Set itemsToSkip, + @NotNull final List issues, + long position) { final Chunk expectedValues = - expectedColumnSource.getChunk(expectedContext, expectedChunkOk); - final Chunk actualValues = - actualColumnSource.getChunk(actualContext, actualChunkOk); + expectedColumnSource.getChunk(expectedContext, expectedChunkOk); + final Chunk actualValues = actualColumnSource.getChunk(actualContext, actualChunkOk); if (expectedValues.size() < actualValues.size()) { chunkEquals.equal(expectedValues, actualValues, equalValues); @@ -270,109 +251,87 @@ private long diffChunk(@NotNull final OrderedKeys expectedChunkOk, final Object expectedValue = expectedValues.asObjectChunk().get(ii); final Object actualValue = actualValues.asObjectChunk().get(ii); - if (actualValue == null || expectedValue == null - || !actualValue.getClass().isArray()) { - issues.add("Column " + name - + " different from the expected set, first difference at row " + - position + " encountered " + actualValue + " expected " - + expectedValue); + if (actualValue == null || expectedValue == null || !actualValue.getClass().isArray()) { + issues.add("Column " + name + " different from the expected set, first difference at row " + + position + " encountered " + actualValue + " expected " + expectedValue); return position; } if (!ArrayUtils.equals(actualValue, expectedValue)) { - issues.add("Column " + name - + " different from the expected set, first difference at row " + - position + " encountered " + ArrayUtils.toString(actualValue) - + " expected " + ArrayUtils.toString(expectedValue)); + issues.add("Column " + name + " different from the expected set, first difference at row " + + position + " encountered " + ArrayUtils.toString(actualValue) + " expected " + + ArrayUtils.toString(expectedValue)); return position; } } else if (chunkType == ChunkType.Float) { final float expectedValue = expectedValues.asFloatChunk().get(ii); final float actualValue = actualValues.asFloatChunk().get(ii); if (expectedValue == io.deephaven.util.QueryConstants.NULL_FLOAT - || actualValue == io.deephaven.util.QueryConstants.NULL_FLOAT) { - final String actualString = - actualValue == io.deephaven.util.QueryConstants.NULL_FLOAT ? "null" + || actualValue == io.deephaven.util.QueryConstants.NULL_FLOAT) { + final String actualString = actualValue == io.deephaven.util.QueryConstants.NULL_FLOAT ? "null" : Float.toString(actualValue); final String expectString = - expectedValue == io.deephaven.util.QueryConstants.NULL_FLOAT ? "null" - : Float.toString(expectedValue); - issues.add("Column " + name - + " different from the expected set, first difference at row " + - position + " encountered " + actualString + " expected " - + expectString); + expectedValue == io.deephaven.util.QueryConstants.NULL_FLOAT ? "null" + : Float.toString(expectedValue); + issues.add("Column " + name + " different from the expected set, first difference at row " + + position + " encountered " + actualString + " expected " + expectString); return position; } final float difference = Math.abs(expectedValue - actualValue); if (itemsToSkip.containsAll(DOUBLES_EXACT_AND_FRACTION)) { - final float fracDiff = - difference / Math.min(Math.abs(expectedValue), Math.abs(actualValue)); - // if we are different by more than 0.5%, then we have an error; otherwise - // it is within bounds + final float fracDiff = difference / Math.min(Math.abs(expectedValue), Math.abs(actualValue)); + // if we are different by more than 0.5%, then we have an error; otherwise it is within bounds if (fracDiff > FLOAT_EXACT_THRESHOLD) { - issues.add("Column " + name - + " different from the expected set, first difference at row " + - position + " encountered " + actualValue + " expected " - + expectedValue + " (difference = " + difference + ")"); + issues.add("Column " + name + " different from the expected set, first difference at row " + + position + " encountered " + actualValue + " expected " + expectedValue + + " (difference = " + difference + ")"); return position; } - } else if (difference > FLOAT_EXACT_THRESHOLD - || !itemsToSkip.contains(DiffItems.DoublesExact)) { - issues.add("Column " + name - + " different from the expected set, first difference at row " + - position + " encountered " + actualValue + " expected " + expectedValue - + " (difference = " + difference + ")"); + } else if (difference > FLOAT_EXACT_THRESHOLD || !itemsToSkip.contains(DiffItems.DoublesExact)) { + issues.add("Column " + name + " different from the expected set, first difference at row " + + position + " encountered " + actualValue + " expected " + expectedValue + + " (difference = " + difference + ")"); return position; } } else if (chunkType == ChunkType.Double) { final double expectedValue = expectedValues.asDoubleChunk().get(ii); final double actualValue = actualValues.asDoubleChunk().get(ii); if (expectedValue == io.deephaven.util.QueryConstants.NULL_DOUBLE - || actualValue == io.deephaven.util.QueryConstants.NULL_DOUBLE) { - final String actualString = - actualValue == io.deephaven.util.QueryConstants.NULL_DOUBLE ? "null" + || actualValue == io.deephaven.util.QueryConstants.NULL_DOUBLE) { + final String actualString = actualValue == io.deephaven.util.QueryConstants.NULL_DOUBLE ? "null" : Double.toString(actualValue); final String expectString = - expectedValue == QueryConstants.NULL_DOUBLE ? "null" - : Double.toString(expectedValue); - issues.add("Column " + name - + " different from the expected set, first difference at row " + - position + " encountered " + actualString + " expected " - + expectString); + expectedValue == QueryConstants.NULL_DOUBLE ? "null" : Double.toString(expectedValue); + issues.add("Column " + name + " different from the expected set, first difference at row " + + position + " encountered " + actualString + " expected " + expectString); return position; } final double difference = Math.abs(expectedValue - actualValue); if (itemsToSkip.containsAll(DOUBLES_EXACT_AND_FRACTION)) { - final double fracDiff = - difference / Math.min(Math.abs(expectedValue), Math.abs(actualValue)); - // if we are different by more than 0.01%, then we have an error; otherwise - // it is within bounds + final double fracDiff = difference / Math.min(Math.abs(expectedValue), Math.abs(actualValue)); + // if we are different by more than 0.01%, then we have an error; otherwise it is within bounds if (fracDiff > DOUBLE_EXACT_THRESHOLD) { - issues.add("Column " + name - + " different from the expected set, first difference at row " + - position + " encountered " + actualValue + " expected " - + expectedValue + " (difference = " + difference + ")"); + issues.add("Column " + name + " different from the expected set, first difference at row " + + position + " encountered " + actualValue + " expected " + expectedValue + + " (difference = " + difference + ")"); return position; } - } else if (difference > DOUBLE_EXACT_THRESHOLD - || !itemsToSkip.contains(DiffItems.DoublesExact)) { - issues.add("Column " + name - + " different from the expected set, first difference at row " + - position + " encountered " + actualValue + " expected " + expectedValue - + " (difference = " + difference + ")"); + } else if (difference > DOUBLE_EXACT_THRESHOLD || !itemsToSkip.contains(DiffItems.DoublesExact)) { + issues.add("Column " + name + " different from the expected set, first difference at row " + + position + " encountered " + actualValue + " expected " + expectedValue + + " (difference = " + difference + ")"); return position; } } else { // noinspection unchecked - final String expectedString = ChunkUtils.extractKeyStringFromChunks( - new ChunkType[] {chunkType}, new Chunk[] {expectedValues}, ii); + final String expectedString = ChunkUtils.extractKeyStringFromChunks(new ChunkType[] {chunkType}, + new Chunk[] {expectedValues}, ii); // noinspection unchecked - final String actualString = ChunkUtils.extractKeyStringFromChunks( - new ChunkType[] {chunkType}, new Chunk[] {actualValues}, ii); + final String actualString = ChunkUtils.extractKeyStringFromChunks(new ChunkType[] {chunkType}, + new Chunk[] {actualValues}, ii); - issues.add("Column " + name - + " different from the expected set, first difference at row " + - position + " encountered " + actualString + " expected " + expectedString); + issues.add("Column " + name + " different from the expected set, first difference at row " + + position + " encountered " + actualString + " expected " + expectedString); return position; } } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/TableShowTools.java b/DB/src/main/java/io/deephaven/db/tables/utils/TableShowTools.java index 1a56fd1aa2e..a59aeea3cf4 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/TableShowTools.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/TableShowTools.java @@ -22,18 +22,16 @@ */ class TableShowTools { - static void showInternal(Table source, long firstRow, long lastRowExclusive, - DBTimeZone timeZone, String delimiter, PrintStream out, boolean showIndex, - String[] columns) { - final QueryPerformanceNugget nugget = - QueryPerformanceRecorder.getInstance().getNugget("TableTools.show()"); + static void showInternal(Table source, long firstRow, long lastRowExclusive, DBTimeZone timeZone, String delimiter, + PrintStream out, boolean showIndex, String[] columns) { + final QueryPerformanceNugget nugget = QueryPerformanceRecorder.getInstance().getNugget("TableTools.show()"); try { if (columns.length == 0) { final List columnNames = source.getDefinition().getColumnNames(); columns = columnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } final ColumnSource[] columnSources = - Arrays.stream(columns).map(source::getColumnSource).toArray(ColumnSource[]::new); + Arrays.stream(columns).map(source::getColumnSource).toArray(ColumnSource[]::new); final Index index = source.getIndex(); int lineLen = 0; @@ -55,8 +53,7 @@ static void showInternal(Table source, long firstRow, long lastRowExclusive, columnLimits.add(lineLen); lineLen++; } - final int columnLen = - columnLengths[i] = getColumnLen(column, columnSources[i], index); + final int columnLen = columnLengths[i] = getColumnLen(column, columnSources[i], index); while (columnLen > column.length()) { column = " " + column; } @@ -84,7 +81,7 @@ static void showInternal(Table source, long firstRow, long lastRowExclusive, final ColumnPrinter indexPrinter = new DefaultPrinter(10); long ri = 0; for (final Index.Iterator indexIterator = index.iterator(); ri < lastRowExclusive - && indexIterator.hasNext(); ++ri) { + && indexIterator.hasNext(); ++ri) { final long key = indexIterator.nextLong(); if (ri < firstRow) { continue; @@ -117,8 +114,7 @@ private static int getColumnLen(String name, ColumnSource columnSource, Index in len = Math.max(len, 40); } else if (columnSource.getType() == long.class || columnSource.getType() == Long.class) { len = Math.max(len, 20); - } else if (columnSource.getType() == double.class - || columnSource.getType() == Double.class) { + } else if (columnSource.getType() == double.class || columnSource.getType() == Double.class) { len = Math.max(len, 20); } else if (columnSource.getType() == DBDateTime.class) { len = Math.max(len, 33); @@ -127,8 +123,7 @@ private static int getColumnLen(String name, ColumnSource columnSource, Index in } else if (columnSource.getType() == SmartKey.class) { len = Math.max(len, 40); } else { - final Annotation annotation = - columnSource.getType().getAnnotation(TableToolsShowControl.class); + final Annotation annotation = columnSource.getType().getAnnotation(TableToolsShowControl.class); if (annotation != null) { len = Math.max(len, ((TableToolsShowControl) annotation).getWidth()); } else { @@ -147,8 +142,7 @@ private static int getColumnLen(String name, ColumnSource columnSource, Index in return len; } - private static ColumnPrinter getColumnPrinter(ColumnSource column, int len, - DBTimeZone timeZone) { + private static ColumnPrinter getColumnPrinter(ColumnSource column, int len, DBTimeZone timeZone) { if (column.getType() == DBDateTime.class) { return new DateTimePrinter(len, timeZone); } else if (!column.getType().isArray()) { diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/TableTools.java b/DB/src/main/java/io/deephaven/db/tables/utils/TableTools.java index 37809aebe9d..fd0db6a3258 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/TableTools.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/TableTools.java @@ -45,8 +45,8 @@ import java.util.*; /** - * Tools for working with tables. This includes methods to examine tables, combine them, convert - * them to and from CSV files, and create and manipulate columns. + * Tools for working with tables. This includes methods to examine tables, combine them, convert them to and from CSV + * files, and create and manipulate columns. */ @SuppressWarnings("unused") public class TableTools { @@ -64,13 +64,13 @@ private static BinaryOperator throwingMerger() { } private static Collector> toLinkedMap( - Function keyMapper, - Function valueMapper) { + Function keyMapper, + Function valueMapper) { return Collectors.toMap(keyMapper, valueMapper, throwingMerger(), LinkedHashMap::new); } private static final Collector> COLUMN_HOLDER_LINKEDMAP_COLLECTOR = - toLinkedMap(ColumnHolder::getName, ColumnHolder::getColumnSource); + toLinkedMap(ColumnHolder::getName, ColumnHolder::getColumnSource); /////////// Utilities To Display Tables ///////////////// // region Show Utilities @@ -86,8 +86,8 @@ public static void show(Table source, String... columns) { } /** - * Prints the first few rows of a table to standard output, and also prints the details of the - * index and record positions that provided the values. + * Prints the first few rows of a table to standard output, and also prints the details of the index and record + * positions that provided the values. * * @param source a Deephaven table object * @param columns varargs of column names to display @@ -129,8 +129,8 @@ public static void show(Table source, long maxRowCount, String... columns) { } /** - * Prints the first few rows of a table to standard output, and also prints the details of the - * index and record positions that provided the values. + * Prints the first few rows of a table to standard output, and also prints the details of the index and record + * positions that provided the values. * * @param source a Deephaven table object * @param maxRowCount the number of rows to return @@ -159,8 +159,7 @@ public static void showCommaDelimited(Table source, long maxRowCount, String... * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted * @param columns varargs of column names to display */ - public static void show(Table source, long maxRowCount, DBTimeZone timeZone, - String... columns) { + public static void show(Table source, long maxRowCount, DBTimeZone timeZone, String... columns) { show(source, maxRowCount, timeZone, System.out, columns); } @@ -173,14 +172,13 @@ public static void show(Table source, long maxRowCount, DBTimeZone timeZone, * @param out a PrintStream destination to which to print the data * @param columns varargs of column names to display */ - public static void show(Table source, long maxRowCount, DBTimeZone timeZone, PrintStream out, - String... columns) { + public static void show(Table source, long maxRowCount, DBTimeZone timeZone, PrintStream out, String... columns) { show(source, maxRowCount, timeZone, "|", out, false, columns); } /** - * Prints the first few rows of a table to standard output, and also prints the details of the - * index and record positions that provided the values. + * Prints the first few rows of a table to standard output, and also prints the details of the index and record + * positions that provided the values. * * @param source a Deephaven table object * @param maxRowCount the number of rows to return @@ -188,14 +186,14 @@ public static void show(Table source, long maxRowCount, DBTimeZone timeZone, Pri * @param out a PrintStream destination to which to print the data * @param columns varargs of column names to display */ - public static void showWithIndex(Table source, long maxRowCount, DBTimeZone timeZone, - PrintStream out, String... columns) { + public static void showWithIndex(Table source, long maxRowCount, DBTimeZone timeZone, PrintStream out, + String... columns) { show(source, maxRowCount, timeZone, "|", out, true, columns); } /** - * Prints the first few rows of a table to standard output, and also prints the details of the - * index and record positions that provided the values. + * Prints the first few rows of a table to standard output, and also prints the details of the index and record + * positions that provided the values. * * @param source a Deephaven table object * @param firstRow the firstRow to display @@ -203,10 +201,8 @@ public static void showWithIndex(Table source, long maxRowCount, DBTimeZone time * @param out a PrintStream destination to which to print the data * @param columns varargs of column names to display */ - public static void showWithIndex(Table source, long firstRow, long lastRow, PrintStream out, - String... columns) { - TableShowTools.showInternal(source, firstRow, lastRow, DBTimeZone.TZ_DEFAULT, "|", out, - true, columns); + public static void showWithIndex(Table source, long firstRow, long lastRow, PrintStream out, String... columns) { + TableShowTools.showInternal(source, firstRow, lastRow, DBTimeZone.TZ_DEFAULT, "|", out, true, columns); } /** @@ -221,24 +217,21 @@ public static void showWithIndex(Table source, long firstRow, long lastRow, Prin * @param columns varargs of column names to display */ public static void show(final Table source, final long maxRowCount, final DBTimeZone timeZone, - final String delimiter, final PrintStream out, final boolean showIndex, String... columns) { - TableShowTools.showInternal(source, 0, maxRowCount, timeZone, delimiter, out, showIndex, - columns); + final String delimiter, final PrintStream out, final boolean showIndex, String... columns) { + TableShowTools.showInternal(source, 0, maxRowCount, timeZone, delimiter, out, showIndex, columns); } /** - * Prints the first few rows of a table to standard output, and also prints the details of the - * index and record positions that provided the values. + * Prints the first few rows of a table to standard output, and also prints the details of the index and record + * positions that provided the values. * * @param source a Deephaven table object * @param firstRow the firstRow to display * @param lastRow the lastRow (exclusive) to display * @param columns varargs of column names to display */ - public static void showWithIndex(final Table source, final long firstRow, final long lastRow, - String... columns) { - TableShowTools.showInternal(source, firstRow, lastRow, DBTimeZone.TZ_DEFAULT, "|", - System.out, true, columns); + public static void showWithIndex(final Table source, final long firstRow, final long lastRow, String... columns) { + TableShowTools.showInternal(source, firstRow, lastRow, DBTimeZone.TZ_DEFAULT, "|", System.out, true, columns); } /** @@ -296,8 +289,7 @@ public static String string(Table t, int size, DBTimeZone timeZone, String... co } /** - * Returns a printout of a table formatted as HTML. Limit use to small tables to avoid running - * out of memory. + * Returns a printout of a table formatted as HTML. Limit use to small tables to avoid running out of memory. * * @param source a Deephaven table object * @return a String of the table printout formatted as HTML @@ -318,8 +310,7 @@ public static String html(Table source) { * @return String report of the detected differences */ public static String diff(Table actualResult, Table expectedResult, long maxDiffLines) { - return diff(actualResult, expectedResult, maxDiffLines, - EnumSet.noneOf(TableDiff.DiffItems.class)); + return diff(actualResult, expectedResult, maxDiffLines, EnumSet.noneOf(TableDiff.DiffItems.class)); } /** @@ -328,14 +319,13 @@ public static String diff(Table actualResult, Table expectedResult, long maxDiff * @param actualResult first Deephaven table object to compare * @param expectedResult second Deephaven table object to compare * @param maxDiffLines stop comparing after this many differences are found - * @param itemsToSkip EnumSet of checks not to perform, such as checking column order, or exact - * match of double values + * @param itemsToSkip EnumSet of checks not to perform, such as checking column order, or exact match of double + * values * @return String report of the detected differences */ public static String diff(Table actualResult, Table expectedResult, long maxDiffLines, - EnumSet itemsToSkip) { - return TableDiff.diffInternal(actualResult, expectedResult, maxDiffLines, itemsToSkip) - .getFirst(); + EnumSet itemsToSkip) { + return TableDiff.diffInternal(actualResult, expectedResult, maxDiffLines, itemsToSkip).getFirst(); } /** @@ -344,13 +334,13 @@ public static String diff(Table actualResult, Table expectedResult, long maxDiff * @param actualResult first Deephaven table object to compare * @param expectedResult second Deephaven table object to compare * @param maxDiffLines stop comparing after this many differences are found - * @param itemsToSkip EnumSet of checks not to perform, such as checking column order, or exact - * match of double values - * @return a pair of String report of the detected differences, and the first different row (0 - * if there are no different data values) + * @param itemsToSkip EnumSet of checks not to perform, such as checking column order, or exact match of double + * values + * @return a pair of String report of the detected differences, and the first different row (0 if there are no + * different data values) */ - public static Pair diffPair(Table actualResult, Table expectedResult, - long maxDiffLines, EnumSet itemsToSkip) { + public static Pair diffPair(Table actualResult, Table expectedResult, long maxDiffLines, + EnumSet itemsToSkip) { return TableDiff.diffInternal(actualResult, expectedResult, maxDiffLines, itemsToSkip); } // endregion @@ -367,8 +357,8 @@ static String nullToEmptyString(Object obj) { // region CSV Utilities /** - * Returns a memory table created from importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created from importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. * @return a Deephaven DynamicTable object @@ -380,8 +370,8 @@ public static DynamicTable readCsv(InputStream is) throws IOException { } /** - * Returns a memory table created from importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created from importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param is an InputStream providing access to the CSV data. * @param separator a char to use as the delimiter value when parsing the file. @@ -394,8 +384,8 @@ public static DynamicTable readCsv(InputStream is, final char separator) throws } /** - * Returns a memory table created from importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created from importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param filePath the fully-qualified path to a CSV file to be read. * @return a Deephaven Table object @@ -407,12 +397,12 @@ public static Table readCsv(String filePath) throws IOException { } /** - * Returns a memory table created from importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created from importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param filePath the fully-qualified path to a CSV file to be read. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. * @return a Deephaven Table object * @throws IOException if the file cannot be read */ @@ -422,27 +412,26 @@ public static Table readCsv(String filePath, String format) throws IOException { } /** - * Returns a memory table created from importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created from importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param filePath the fully-qualified path to a CSV file to be read. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @return a Deephaven Table object * @throws IOException if the file cannot be read */ @ScriptApi - public static Table readCsv(String filePath, String format, StatusCallback progress) - throws IOException { + public static Table readCsv(String filePath, String format, StatusCallback progress) throws IOException { return readCsv(new File(filePath), format, progress); } /** - * Returns a memory table created from importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created from importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param file a file object providing access to the CSV file to be read. * @return a Deephaven Table object @@ -454,13 +443,13 @@ public static Table readCsv(File file) throws IOException { } /** - * Returns a memory table created from importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created from importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param file a file object providing access to the CSV file to be read. - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @return a Deephaven Table object * @throws IOException if the file cannot be read */ @@ -470,32 +459,29 @@ public static Table readCsv(File file, StatusCallback progress) throws IOExcepti } /** - * Returns a memory table created from importing CSV data. The first row must be column names. - * Column data types are inferred from the data. + * Returns a memory table created from importing CSV data. The first row must be column names. Column data types are + * inferred from the data. * * @param file a file object providing access to the CSV file to be read. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @return a Deephaven Table object * @throws IOException if the file cannot be read */ @ScriptApi - public static Table readCsv(File file, String format, StatusCallback progress) - throws IOException { + public static Table readCsv(File file, String format, StatusCallback progress) throws IOException { Table table; - try (final InputStream is = - CompressedFileUtil.openPossiblyCompressedFile(file.getAbsolutePath())) { + try (final InputStream is = CompressedFileUtil.openPossiblyCompressedFile(file.getAbsolutePath())) { table = io.deephaven.db.tables.utils.CsvHelpers.readCsv(is, format, progress); } return table; } /** - * Returns a memory table created from importing CSV data. Column data types are inferred from - * the data. + * Returns a memory table created from importing CSV data. Column data types are inferred from the data. * * @param filePath the fully-qualified path to a CSV file to be read. * @return a Deephaven Table object @@ -507,8 +493,7 @@ public static Table readHeaderlessCsv(String filePath) throws IOException { } /** - * Returns a memory table created from importing CSV data. Column data types are inferred from - * the data. + * Returns a memory table created from importing CSV data. Column data types are inferred from the data. * * @param filePath the fully-qualified path to a CSV file to be read. * @param header Column names to use for the resultant table. @@ -516,14 +501,12 @@ public static Table readHeaderlessCsv(String filePath) throws IOException { * @throws IOException if the file cannot be read */ @ScriptApi - public static Table readHeaderlessCsv(String filePath, Collection header) - throws IOException { + public static Table readHeaderlessCsv(String filePath, Collection header) throws IOException { return readHeaderlessCsv(new File(filePath), null, null, header); } /** - * Returns a memory table created from importing CSV data. Column data types are inferred from - * the data. + * Returns a memory table created from importing CSV data. Column data types are inferred from the data. * * @param filePath the fully-qualified path to a CSV file to be read. * @param header Column names to use for the resultant table. @@ -536,48 +519,44 @@ public static Table readHeaderlessCsv(String filePath, String... header) throws } /** - * Returns a memory table created from importing CSV data. Column data types are inferred from - * the data. + * Returns a memory table created from importing CSV data. Column data types are inferred from the data. * * @param filePath the fully-qualified path to a CSV file to be read. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. * @param header Column names to use for the resultant table. * @return a Deephaven Table object * @throws IOException if the file cannot be read */ @ScriptApi public static Table readHeaderlessCsv(String filePath, String format, StatusCallback progress, - Collection header) throws IOException { + Collection header) throws IOException { return readHeaderlessCsv(new File(filePath), format, progress, header); } /** - * Returns a memory table created from importing CSV data. Column data types are inferred from - * the data. + * Returns a memory table created from importing CSV data. Column data types are inferred from the data. * * @param file a file object providing access to the CSV file to be read. - * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single - * non-newline character to use as a delimiter. - * @param progress a StatusCallback object that can be used to log progress details or update a - * progress bar. If passed explicitly as null, a StatusCallback instance will be created - * to log progress to the current logger. - * @param header Column names to use for the resultant table, or null if column names should be - * automatically generated. + * @param format an Apache Commons CSV format name to be used to parse the CSV, or a single non-newline character to + * use as a delimiter. + * @param progress a StatusCallback object that can be used to log progress details or update a progress bar. If + * passed explicitly as null, a StatusCallback instance will be created to log progress to the current + * logger. + * @param header Column names to use for the resultant table, or null if column names should be automatically + * generated. * @return a Deephaven Table object * @throws IOException if the file cannot be read */ @ScriptApi public static Table readHeaderlessCsv(File file, String format, StatusCallback progress, - @Nullable Collection header) throws IOException { + @Nullable Collection header) throws IOException { Table table; - try (final InputStream is = - CompressedFileUtil.openPossiblyCompressedFile(file.getAbsolutePath())) { - table = io.deephaven.db.tables.utils.CsvHelpers.readHeaderlessCsv(is, format, progress, - header); + try (final InputStream is = CompressedFileUtil.openPossiblyCompressedFile(file.getAbsolutePath())) { + table = io.deephaven.db.tables.utils.CsvHelpers.readHeaderlessCsv(is, format, progress, header); } return table; } @@ -592,8 +571,8 @@ public static Table readHeaderlessCsv(File file, String format, StatusCallback p * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, boolean compressed, String destPath, - String... columns) throws IOException { + public static void writeCsv(Table source, boolean compressed, String destPath, String... columns) + throws IOException { writeCsv(source, compressed, destPath, false, columns); } @@ -608,8 +587,8 @@ public static void writeCsv(Table source, boolean compressed, String destPath, * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, boolean compressed, String destPath, - boolean nullsAsEmpty, String... columns) throws IOException { + public static void writeCsv(Table source, boolean compressed, String destPath, boolean nullsAsEmpty, + String... columns) throws IOException { writeCsv(source, destPath, compressed, DBTimeZone.TZ_DEFAULT, nullsAsEmpty, columns); } @@ -622,8 +601,7 @@ public static void writeCsv(Table source, boolean compressed, String destPath, * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, String destPath, String... columns) - throws IOException { + public static void writeCsv(Table source, String destPath, String... columns) throws IOException { writeCsv(source, destPath, false, columns); } @@ -637,8 +615,8 @@ public static void writeCsv(Table source, String destPath, String... columns) * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, String destPath, boolean nullsAsEmpty, - String... columns) throws IOException { + public static void writeCsv(Table source, String destPath, boolean nullsAsEmpty, String... columns) + throws IOException { writeCsv(source, destPath, false, DBTimeZone.TZ_DEFAULT, nullsAsEmpty, columns); } @@ -651,8 +629,7 @@ public static void writeCsv(Table source, String destPath, boolean nullsAsEmpty, * @throws IOException if there is a problem writing to the stream */ @ScriptApi - public static void writeCsv(Table source, PrintStream out, String... columns) - throws IOException { + public static void writeCsv(Table source, PrintStream out, String... columns) throws IOException { writeCsv(source, out, false, columns); } @@ -666,12 +643,11 @@ public static void writeCsv(Table source, PrintStream out, String... columns) * @throws IOException if there is a problem writing to the stream */ @ScriptApi - public static void writeCsv(Table source, PrintStream out, boolean nullsAsEmpty, - String... columns) throws IOException { + public static void writeCsv(Table source, PrintStream out, boolean nullsAsEmpty, String... columns) + throws IOException { final PrintWriter printWriter = new PrintWriter(out); final BufferedWriter bufferedWriter = new BufferedWriter(printWriter); - CsvHelpers.writeCsv(source, bufferedWriter, DBTimeZone.TZ_DEFAULT, null, nullsAsEmpty, ',', - columns); + CsvHelpers.writeCsv(source, bufferedWriter, DBTimeZone.TZ_DEFAULT, null, nullsAsEmpty, ',', columns); } /** @@ -685,8 +661,8 @@ public static void writeCsv(Table source, PrintStream out, boolean nullsAsEmpty, * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, String destPath, boolean compressed, - DBTimeZone timeZone, String... columns) throws IOException { + public static void writeCsv(Table source, String destPath, boolean compressed, DBTimeZone timeZone, + String... columns) throws IOException { CsvHelpers.writeCsv(source, destPath, compressed, timeZone, null, false, ',', columns); } @@ -702,10 +678,9 @@ public static void writeCsv(Table source, String destPath, boolean compressed, * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, String destPath, boolean compressed, - DBTimeZone timeZone, boolean nullsAsEmpty, String... columns) throws IOException { - CsvHelpers.writeCsv(source, destPath, compressed, timeZone, null, nullsAsEmpty, ',', - columns); + public static void writeCsv(Table source, String destPath, boolean compressed, DBTimeZone timeZone, + boolean nullsAsEmpty, String... columns) throws IOException { + CsvHelpers.writeCsv(source, destPath, compressed, timeZone, null, nullsAsEmpty, ',', columns); } /** @@ -721,11 +696,9 @@ public static void writeCsv(Table source, String destPath, boolean compressed, * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table source, String destPath, boolean compressed, - DBTimeZone timeZone, boolean nullsAsEmpty, char separator, String... columns) - throws IOException { - CsvHelpers.writeCsv(source, destPath, compressed, timeZone, null, nullsAsEmpty, separator, - columns); + public static void writeCsv(Table source, String destPath, boolean compressed, DBTimeZone timeZone, + boolean nullsAsEmpty, char separator, String... columns) throws IOException { + CsvHelpers.writeCsv(source, destPath, compressed, timeZone, null, nullsAsEmpty, separator, columns); } /** @@ -735,14 +708,13 @@ public static void writeCsv(Table source, String destPath, boolean compressed, * @param destPath path to the CSV file to be written * @param compressed whether to compress (bz2) the file being written * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param tableSeparator a String (normally a single character) to be used as the table - * delimiter + * @param tableSeparator a String (normally a single character) to be used as the table delimiter * @param columns a list of columns to include in the export * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table[] sources, String destPath, boolean compressed, - DBTimeZone timeZone, String tableSeparator, String... columns) throws IOException { + public static void writeCsv(Table[] sources, String destPath, boolean compressed, DBTimeZone timeZone, + String tableSeparator, String... columns) throws IOException { writeCsv(sources, destPath, compressed, timeZone, tableSeparator, false, columns); } @@ -753,17 +725,14 @@ public static void writeCsv(Table[] sources, String destPath, boolean compressed * @param destPath path to the CSV file to be written * @param compressed whether to compress (bz2) the file being written * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param tableSeparator a String (normally a single character) to be used as the table - * delimiter + * @param tableSeparator a String (normally a single character) to be used as the table delimiter * @param columns a list of columns to include in the export * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table[] sources, String destPath, boolean compressed, - DBTimeZone timeZone, String tableSeparator, boolean nullsAsEmpty, String... columns) - throws IOException { - writeCsv(sources, destPath, compressed, timeZone, tableSeparator, ',', nullsAsEmpty, - columns); + public static void writeCsv(Table[] sources, String destPath, boolean compressed, DBTimeZone timeZone, + String tableSeparator, boolean nullsAsEmpty, String... columns) throws IOException { + writeCsv(sources, destPath, compressed, timeZone, tableSeparator, ',', nullsAsEmpty, columns); } /** @@ -773,21 +742,18 @@ public static void writeCsv(Table[] sources, String destPath, boolean compressed * @param destPath path to the CSV file to be written * @param compressed whether to compress (bz2) the file being written * @param timeZone a DBTimeZone constant relative to which DBDateTime data should be adjusted - * @param tableSeparator a String (normally a single character) to be used as the table - * delimiter + * @param tableSeparator a String (normally a single character) to be used as the table delimiter * @param fieldSeparator the delimiter for the CSV files * @param nullsAsEmpty if nulls should be written as blank instead of '(null)' * @param columns a list of columns to include in the export * @throws IOException if the target file cannot be written */ @ScriptApi - public static void writeCsv(Table[] sources, String destPath, boolean compressed, - DBTimeZone timeZone, String tableSeparator, char fieldSeparator, boolean nullsAsEmpty, - String... columns) throws IOException { - BufferedWriter out = (compressed - ? new BufferedWriter( - new OutputStreamWriter(new BzipFileOutputStream(destPath + ".bz2"))) - : new BufferedWriter(new FileWriter(destPath))); + public static void writeCsv(Table[] sources, String destPath, boolean compressed, DBTimeZone timeZone, + String tableSeparator, char fieldSeparator, boolean nullsAsEmpty, String... columns) throws IOException { + BufferedWriter out = + (compressed ? new BufferedWriter(new OutputStreamWriter(new BzipFileOutputStream(destPath + ".bz2"))) + : new BufferedWriter(new FileWriter(destPath))); if (columns.length == 0) { List columnNames = sources[0].getDefinition().getColumnNames(); @@ -797,8 +763,7 @@ public static void writeCsv(Table[] sources, String destPath, boolean compressed CsvHelpers.writeCsvHeader(out, fieldSeparator, columns); for (Table source : sources) { - CsvHelpers.writeCsvContents(source, out, timeZone, null, nullsAsEmpty, fieldSeparator, - columns); + CsvHelpers.writeCsvContents(source, out, timeZone, null, nullsAsEmpty, fieldSeparator, columns); out.write(tableSeparator); } @@ -817,8 +782,7 @@ public static void writeCsv(Table[] sources, String destPath, boolean compressed * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(Class clazz, Collection values) { - ArrayBackedColumnSource result = - ArrayBackedColumnSource.getMemoryColumnSource(values.size(), clazz); + ArrayBackedColumnSource result = ArrayBackedColumnSource.getMemoryColumnSource(values.size(), clazz); int resultIndex = 0; for (T value : values) { result.set(resultIndex++, value); @@ -835,8 +799,8 @@ public static ColumnSource colSource(Class clazz, Collection values */ @SuppressWarnings("unchecked") public static ColumnSource objColSource(T... values) { - ArrayBackedColumnSource result = ArrayBackedColumnSource - .getMemoryColumnSource(values.length, values.getClass().getComponentType()); + ArrayBackedColumnSource result = + ArrayBackedColumnSource.getMemoryColumnSource(values.length, values.getClass().getComponentType()); for (int i = 0; i < values.length; i++) { // noinspection unchecked result.set(i, values[i]); @@ -851,8 +815,8 @@ public static ColumnSource objColSource(T... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(long... values) { - ArrayBackedColumnSource result = ArrayBackedColumnSource - .getMemoryColumnSource(values.length, values.getClass().getComponentType()); + ArrayBackedColumnSource result = + ArrayBackedColumnSource.getMemoryColumnSource(values.length, values.getClass().getComponentType()); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); } @@ -867,8 +831,8 @@ public static ColumnSource colSource(long... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(int... values) { - ArrayBackedColumnSource result = ArrayBackedColumnSource - .getMemoryColumnSource(values.length, values.getClass().getComponentType()); + ArrayBackedColumnSource result = + ArrayBackedColumnSource.getMemoryColumnSource(values.length, values.getClass().getComponentType()); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); } @@ -883,8 +847,8 @@ public static ColumnSource colSource(int... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(short... values) { - ArrayBackedColumnSource result = ArrayBackedColumnSource - .getMemoryColumnSource(values.length, values.getClass().getComponentType()); + ArrayBackedColumnSource result = + ArrayBackedColumnSource.getMemoryColumnSource(values.length, values.getClass().getComponentType()); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); } @@ -899,8 +863,8 @@ public static ColumnSource colSource(short... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(byte... values) { - ArrayBackedColumnSource result = ArrayBackedColumnSource - .getMemoryColumnSource(values.length, values.getClass().getComponentType()); + ArrayBackedColumnSource result = + ArrayBackedColumnSource.getMemoryColumnSource(values.length, values.getClass().getComponentType()); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); } @@ -915,8 +879,8 @@ public static ColumnSource colSource(byte... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(char... values) { - ArrayBackedColumnSource result = ArrayBackedColumnSource - .getMemoryColumnSource(values.length, values.getClass().getComponentType()); + ArrayBackedColumnSource result = + ArrayBackedColumnSource.getMemoryColumnSource(values.length, values.getClass().getComponentType()); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); } @@ -931,8 +895,8 @@ public static ColumnSource colSource(char... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(double... values) { - ArrayBackedColumnSource result = ArrayBackedColumnSource - .getMemoryColumnSource(values.length, values.getClass().getComponentType()); + ArrayBackedColumnSource result = + ArrayBackedColumnSource.getMemoryColumnSource(values.length, values.getClass().getComponentType()); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); } @@ -947,8 +911,8 @@ public static ColumnSource colSource(double... values) { * @return a Deephaven ColumnSource object */ public static ColumnSource colSource(float... values) { - ArrayBackedColumnSource result = ArrayBackedColumnSource - .getMemoryColumnSource(values.length, values.getClass().getComponentType()); + ArrayBackedColumnSource result = + ArrayBackedColumnSource.getMemoryColumnSource(values.length, values.getClass().getComponentType()); for (int i = 0; i < values.length; i++) { result.set(i, values[i]); } @@ -1028,7 +992,7 @@ public static ColumnHolder col(String name, T... data) { } // noinspection unchecked return new ColumnHolder(name, data.getClass().getComponentType(), - data.getClass().getComponentType().getComponentType(), false, data); + data.getClass().getComponentType().getComponentType(), false, data); } /** @@ -1142,8 +1106,8 @@ public static Table emptyTable(long size) { return new QueryTable(Index.FACTORY.getFlatIndex(size), Collections.emptyMap()); } - private static , KT, VT> MT newMapFromLists(Class mapClass, - List keys, List values) { + private static , KT, VT> MT newMapFromLists(Class mapClass, List keys, + List values) { Require.eq(keys.size(), "keys.size()", values.size(), "values.size()"); MT result; try { @@ -1165,11 +1129,10 @@ private static , KT, VT> MT newMapFromLists(Class map * @param columnSources a List of the ColumnSource(s) * @return a Deephaven DynamicTable */ - public static DynamicTable newTable(long size, List names, - List columnSources) { + public static DynamicTable newTable(long size, List names, List columnSources) { // noinspection unchecked return new QueryTable(Index.FACTORY.getFlatIndex(size), - newMapFromLists(LinkedHashMap.class, names, columnSources)); + newMapFromLists(LinkedHashMap.class, names, columnSources)); } /** @@ -1194,7 +1157,7 @@ public static DynamicTable newTable(TableDefinition definition) { for (ColumnDefinition columnDefinition : definition.getColumnList()) { // noinspection unchecked columns.put(columnDefinition.getName(), ArrayBackedColumnSource.getMemoryColumnSource(0, - columnDefinition.getDataType(), columnDefinition.getComponentType())); + columnDefinition.getDataType(), columnDefinition.getComponentType())); } return new QueryTable(definition, Index.FACTORY.getEmptyIndex(), columns); } @@ -1208,33 +1171,30 @@ public static DynamicTable newTable(TableDefinition definition) { public static DynamicTable newTable(ColumnHolder... columnHolders) { checkSizes(columnHolders); Index index = getIndex(columnHolders); - Map columns = - Stream.of(columnHolders).collect(COLUMN_HOLDER_LINKEDMAP_COLLECTOR); + Map columns = Stream.of(columnHolders).collect(COLUMN_HOLDER_LINKEDMAP_COLLECTOR); return new QueryTable(index, columns); } public static DynamicTable newTable(TableDefinition definition, ColumnHolder... columnHolders) { checkSizes(columnHolders); Index index = getIndex(columnHolders); - Map columns = - Stream.of(columnHolders).collect(COLUMN_HOLDER_LINKEDMAP_COLLECTOR); + Map columns = Stream.of(columnHolders).collect(COLUMN_HOLDER_LINKEDMAP_COLLECTOR); return new QueryTable(definition, index, columns); } private static void checkSizes(ColumnHolder[] columnHolders) { int[] sizes = Arrays.stream(columnHolders) - .mapToInt(x -> x.data == null ? 0 : Array.getLength(x.data)) - .toArray(); + .mapToInt(x -> x.data == null ? 0 : Array.getLength(x.data)) + .toArray(); if (Arrays.stream(sizes).anyMatch(size -> size != sizes[0])) { throw new IllegalArgumentException( - "All columns must have the same number of rows, but sizes are: " - + Arrays.toString(sizes)); + "All columns must have the same number of rows, but sizes are: " + Arrays.toString(sizes)); } } private static Index getIndex(ColumnHolder[] columnHolders) { return columnHolders.length == 0 ? Index.FACTORY.getEmptyIndex() - : Index.FACTORY.getFlatIndex(Array.getLength(columnHolders[0].data)); + : Index.FACTORY.getFlatIndex(Array.getLength(columnHolders[0].data)); } // region Time tables @@ -1327,8 +1287,7 @@ public static Table timeTable(long periodNanos) { * @return time table */ public static Table timeTable(long periodNanos, ReplayerInterface replayer) { - final TimeTable timeTable = - new TimeTable(Replayer.getTimeProvider(replayer), null, periodNanos); + final TimeTable timeTable = new TimeTable(Replayer.getTimeProvider(replayer), null, periodNanos); LiveTableMonitor.DEFAULT.addTable(timeTable); return timeTable; } @@ -1341,8 +1300,7 @@ public static Table timeTable(long periodNanos, ReplayerInterface replayer) { * @return time table */ public static Table timeTable(DBDateTime startTime, long periodNanos) { - final TimeTable timeTable = - new TimeTable(Replayer.getTimeProvider(null), startTime, periodNanos); + final TimeTable timeTable = new TimeTable(Replayer.getTimeProvider(null), startTime, periodNanos); LiveTableMonitor.DEFAULT.addTable(timeTable); return timeTable; } @@ -1355,10 +1313,8 @@ public static Table timeTable(DBDateTime startTime, long periodNanos) { * @param replayer data replayer * @return time table */ - public static Table timeTable(DBDateTime startTime, long periodNanos, - ReplayerInterface replayer) { - final TimeTable timeTable = - new TimeTable(Replayer.getTimeProvider(replayer), startTime, periodNanos); + public static Table timeTable(DBDateTime startTime, long periodNanos, ReplayerInterface replayer) { + final TimeTable timeTable = new TimeTable(Replayer.getTimeProvider(replayer), startTime, periodNanos); LiveTableMonitor.DEFAULT.addTable(timeTable); return timeTable; } @@ -1394,8 +1350,7 @@ public static Table timeTable(String startTime, long periodNanos, ReplayerInterf * @param periodNanos time interval between new row additions in nanoseconds. * @return time table */ - public static Table timeTable(TimeProvider timeProvider, DBDateTime startTime, - long periodNanos) { + public static Table timeTable(TimeProvider timeProvider, DBDateTime startTime, long periodNanos) { final TimeTable timeTable = new TimeTable(timeProvider, startTime, periodNanos); LiveTableMonitor.DEFAULT.addTable(timeTable); return timeTable; @@ -1409,17 +1364,15 @@ public static Table timeTable(TimeProvider timeProvider, DBDateTime startTime, * Concatenates multiple Deephaven Tables into a single Table. * *

    - * The resultant table will have rows from the same table together, in the order they are - * specified as inputs. + * The resultant table will have rows from the same table together, in the order they are specified as inputs. *

    * *

    - * When ticking tables grow, they may run out of the 'pre-allocated' space for newly added rows. - * When more key- space is needed, tables in higher key-space are shifted to yet higher - * key-space to make room for new rows. Shifts are handled efficiently, but some downstream - * operations generate a linear O(n) amount of work per shifted row. When possible, one should - * favor ordering the constituent tables first by static/non-ticking sources followed by tables - * that are expected to grow at slower rates, and finally by tables that grow without bound. + * When ticking tables grow, they may run out of the 'pre-allocated' space for newly added rows. When more key- + * space is needed, tables in higher key-space are shifted to yet higher key-space to make room for new rows. Shifts + * are handled efficiently, but some downstream operations generate a linear O(n) amount of work per shifted row. + * When possible, one should favor ordering the constituent tables first by static/non-ticking sources followed by + * tables that are expected to grow at slower rates, and finally by tables that grow without bound. *

    * * @param theList a List of Tables to be concatenated @@ -1433,17 +1386,15 @@ public static Table merge(List
    theList) { * Concatenates multiple Deephaven Tables into a single Table. * *

    - * The resultant table will have rows from the same table together, in the order they are - * specified as inputs. + * The resultant table will have rows from the same table together, in the order they are specified as inputs. *

    * *

    - * When ticking tables grow, they may run out of the 'pre-allocated' space for newly added rows. - * When more key- space is needed, tables in higher key-space are shifted to yet higher - * key-space to make room for new rows. Shifts are handled efficiently, but some downstream - * operations generate a linear O(n) amount of work per shifted row. When possible, one should - * favor ordering the constituent tables first by static/non-ticking sources followed by tables - * that are expected to grow at slower rates, and finally by tables that grow without bound. + * When ticking tables grow, they may run out of the 'pre-allocated' space for newly added rows. When more key- + * space is needed, tables in higher key-space are shifted to yet higher key-space to make room for new rows. Shifts + * are handled efficiently, but some downstream operations generate a linear O(n) amount of work per shifted row. + * When possible, one should favor ordering the constituent tables first by static/non-ticking sources followed by + * tables that are expected to grow at slower rates, and finally by tables that grow without bound. *

    * * @param tables a Collection of Tables to be concatenated @@ -1457,17 +1408,15 @@ public static Table merge(Collection
    tables) { * Concatenates multiple Deephaven Tables into a single Table. * *

    - * The resultant table will have rows from the same table together, in the order they are - * specified as inputs. + * The resultant table will have rows from the same table together, in the order they are specified as inputs. *

    * *

    - * When ticking tables grow, they may run out of the 'pre-allocated' space for newly added rows. - * When more key- space is needed, tables in higher key-space are shifted to yet higher - * key-space to make room for new rows. Shifts are handled efficiently, but some downstream - * operations generate a linear O(n) amount of work per shifted row. When possible, one should - * favor ordering the constituent tables first by static/non-ticking sources followed by tables - * that are expected to grow at slower rates, and finally by tables that grow without bound. + * When ticking tables grow, they may run out of the 'pre-allocated' space for newly added rows. When more key- + * space is needed, tables in higher key-space are shifted to yet higher key-space to make room for new rows. Shifts + * are handled efficiently, but some downstream operations generate a linear O(n) amount of work per shifted row. + * When possible, one should favor ordering the constituent tables first by static/non-ticking sources followed by + * tables that are expected to grow at slower rates, and finally by tables that grow without bound. *

    * * @param tables a list of Tables to be concatenated @@ -1475,48 +1424,41 @@ public static Table merge(Collection
    tables) { */ public static Table merge(Table... tables) { return QueryPerformanceRecorder.withNugget("merge", () -> { - // TODO (deephaven/deephaven-core/issues/257): When we have a new Table proxy - // implementation, we should reintroduce remote merge for proxies. - // If all of the tables are proxies, then we should ship this request over rather than - // trying to do it locally. + // TODO (deephaven/deephaven-core/issues/257): When we have a new Table proxy implementation, we should + // reintroduce remote merge for proxies. + // If all of the tables are proxies, then we should ship this request over rather than trying to do it + // locally. // Table proxyMerge = io.deephaven.db.tables.utils.TableTools.mergeByProxy(tables); // if (proxyMerge != null) { // return proxyMerge; // } - final List
    tableList = - TableToolsMergeHelper.getTablesToMerge(Arrays.stream(tables), tables.length); + final List
    tableList = TableToolsMergeHelper.getTablesToMerge(Arrays.stream(tables), tables.length); if (tableList == null || tableList.isEmpty()) { throw new IllegalArgumentException("no tables provided to merge"); } - return TableToolsMergeHelper.mergeInternal(tableList.get(0).getDefinition(), tableList, - null); + return TableToolsMergeHelper.mergeInternal(tableList.get(0).getDefinition(), tableList, null); }); } /** - * Concatenates multiple sorted Deephaven Tables into a single Table sorted by the specified key - * column. + * Concatenates multiple sorted Deephaven Tables into a single Table sorted by the specified key column. *

    - * The input tables must each individually be sorted by keyColumn, otherwise results are - * undefined. + * The input tables must each individually be sorted by keyColumn, otherwise results are undefined. * * @param tables sorted Tables to be concatenated * @param keyColumn the column to use when sorting the concatenated results * @return a Deephaven table object */ - public static Table mergeSorted(@SuppressWarnings("SameParameterValue") String keyColumn, - Table... tables) { + public static Table mergeSorted(@SuppressWarnings("SameParameterValue") String keyColumn, Table... tables) { return mergeSorted(keyColumn, Arrays.asList(tables)); } /** - * Concatenates multiple sorted Deephaven Tables into a single Table sorted by the specified key - * column. + * Concatenates multiple sorted Deephaven Tables into a single Table sorted by the specified key column. *

    - * The input tables must each individually be sorted by keyColumn, otherwise results are - * undefined. + * The input tables must each individually be sorted by keyColumn, otherwise results are undefined. * * @param tables a Collection of sorted Tables to be concatenated * @param keyColumn the column to use when sorting the concatenated results @@ -1529,11 +1471,10 @@ public static Table mergeSorted(String keyColumn, Collection

    tables) { /////////// Other Utilities ///////////////// /** - * Produce a new table with all the columns of this table, in the same order, but with - * {@code double} and {@code float} columns rounded to {@code long}s. + * Produce a new table with all the columns of this table, in the same order, but with {@code double} and + * {@code float} columns rounded to {@code long}s. * - * @return The new {@code Table}, with all {@code double} and {@code float} columns rounded to - * {@code long}s. + * @return The new {@code Table}, with all {@code double} and {@code float} columns rounded to {@code long}s. */ @ScriptApi public static Table roundDecimalColumns(Table table) { @@ -1544,17 +1485,15 @@ public static Table roundDecimalColumns(Table table) { columnsToRound.add(columnDefinition.getName()); } } - return roundDecimalColumns(table, - columnsToRound.toArray(new String[columnsToRound.size()])); + return roundDecimalColumns(table, columnsToRound.toArray(new String[columnsToRound.size()])); } /** - * Produce a new table with all the columns of this table, in the same order, but with all - * {@code double} and {@code float} columns rounded to {@code long}s, except for the specified - * {@code columnsNotToRound}. + * Produce a new table with all the columns of this table, in the same order, but with all {@code double} and + * {@code float} columns rounded to {@code long}s, except for the specified {@code columnsNotToRound}. * - * @param columnsNotToRound The names of the {@code double} and {@code float} columns not - * to round to {@code long}s + * @param columnsNotToRound The names of the {@code double} and {@code float} columns not to round to + * {@code long}s * @return The new {@code Table}, with columns modified as explained above */ @ScriptApi @@ -1566,24 +1505,21 @@ public static Table roundDecimalColumnsExcept(Table table, String... columnsNotT for (ColumnDefinition columnDefinition : table.getDefinition().getColumns()) { Class type = columnDefinition.getDataType(); String colName = columnDefinition.getName(); - if ((type.equals(double.class) || type.equals(float.class)) - && !columnsNotToRoundSet.contains(colName)) { + if ((type.equals(double.class) || type.equals(float.class)) && !columnsNotToRoundSet.contains(colName)) { columnsToRound.add(colName); } } - return roundDecimalColumns(table, - columnsToRound.toArray(new String[columnsToRound.size()])); + return roundDecimalColumns(table, columnsToRound.toArray(new String[columnsToRound.size()])); } /** - * Produce a new table with all the columns of this table, in the same order, but with - * {@code double} and {@code float} columns rounded to {@code long}s. + * Produce a new table with all the columns of this table, in the same order, but with {@code double} and + * {@code float} columns rounded to {@code long}s. * * @param columns The names of the {@code double} and {@code float} columns to round. * @return The new {@code Table}, with the specified columns rounded to {@code long}s. - * @throws java.lang.IllegalArgumentException If {@code columns} is null, or if one of the - * specified {@code columns} is neither a {@code double} column nor a {@code float} - * column. + * @throws java.lang.IllegalArgumentException If {@code columns} is null, or if one of the specified {@code columns} + * is neither a {@code double} column nor a {@code float} column. */ @ScriptApi public static Table roundDecimalColumns(Table table, String... columns) { @@ -1594,8 +1530,7 @@ public static Table roundDecimalColumns(Table table, String... columns) { for (String colName : columns) { Class colType = table.getColumn(colName).getType(); if (!(colType.equals(double.class) || colType.equals(float.class))) - throw new IllegalArgumentException( - "Column \"" + colName + "\" is not a decimal column!"); + throw new IllegalArgumentException("Column \"" + colName + "\" is not a decimal column!"); updateDescriptions.add(colName + "=round(" + colName + ')'); } return table.updateView(updateDescriptions.toArray(new String[updateDescriptions.size()])); @@ -1606,8 +1541,8 @@ public static Table roundDecimalColumns(Table table, String... columns) { * Compute the SHA256 hash of the input table. *

    *

    - * The hash is computed using every value in each row, using toString for unrecognized objects. - * The hash also includes the input table definition column names and types. + * The hash is computed using every value in each row, using toString for unrecognized objects. The hash also + * includes the input table definition column names and types. *

    * * @param source The table to fingerprint @@ -1620,12 +1555,10 @@ public static byte[] computeFingerprint(Table source) throws IOException { md = MessageDigest.getInstance("SHA-256"); } catch (NoSuchAlgorithmException e) { throw new IllegalStateException( - "Runtime does not suport SHA-256 hashing required for resultsTable fingerprints.", - e); + "Runtime does not suport SHA-256 hashing required for resultsTable fingerprints.", e); } - final DataOutputStream osw = - new DataOutputStream(new DigestOutputStream(new NullOutputStream(), md)); + final DataOutputStream osw = new DataOutputStream(new DigestOutputStream(new NullOutputStream(), md)); for (final ColumnSource col : source.getColumnSourceMap().values()) { processColumnForFingerprint(source.getIndex(), col, osw); @@ -1654,8 +1587,8 @@ public static String base64Fingerprint(Table source) throws IOException { return Base64.getEncoder().encodeToString(computeFingerprint(source)); } - private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource col, - DataOutputStream outputStream) throws IOException { + private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource col, DataOutputStream outputStream) + throws IOException { if (col.getType() == DBDateTime.class) { col = ReinterpretUtilities.dateTimeToLongSource(col); } @@ -1666,11 +1599,10 @@ private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource switch (chunkType) { case Char: try (final ColumnSource.GetContext getContext = col.makeGetContext(chunkSize); - final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(chunkSize); - final CharChunk valuesChunk = - col.getChunk(getContext, chunkOk).asCharChunk(); + final CharChunk valuesChunk = col.getChunk(getContext, chunkOk).asCharChunk(); for (int ii = 0; ii < valuesChunk.size(); ++ii) { outputStream.writeChar(valuesChunk.get(ii)); } @@ -1679,11 +1611,10 @@ private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource break; case Byte: try (final ColumnSource.GetContext getContext = col.makeGetContext(chunkSize); - final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(chunkSize); - final ByteChunk valuesChunk = - col.getChunk(getContext, chunkOk).asByteChunk(); + final ByteChunk valuesChunk = col.getChunk(getContext, chunkOk).asByteChunk(); for (int ii = 0; ii < valuesChunk.size(); ++ii) { outputStream.writeByte(valuesChunk.get(ii)); } @@ -1692,11 +1623,11 @@ private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource break; case Short: try (final ColumnSource.GetContext getContext = col.makeGetContext(chunkSize); - final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(chunkSize); final ShortChunk valuesChunk = - col.getChunk(getContext, chunkOk).asShortChunk(); + col.getChunk(getContext, chunkOk).asShortChunk(); for (int ii = 0; ii < valuesChunk.size(); ++ii) { outputStream.writeShort(valuesChunk.get(ii)); } @@ -1705,11 +1636,10 @@ private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource break; case Int: try (final ColumnSource.GetContext getContext = col.makeGetContext(chunkSize); - final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(chunkSize); - final IntChunk valuesChunk = - col.getChunk(getContext, chunkOk).asIntChunk(); + final IntChunk valuesChunk = col.getChunk(getContext, chunkOk).asIntChunk(); for (int ii = 0; ii < valuesChunk.size(); ++ii) { outputStream.writeInt(valuesChunk.get(ii)); } @@ -1718,11 +1648,10 @@ private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource break; case Long: try (final ColumnSource.GetContext getContext = col.makeGetContext(chunkSize); - final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(chunkSize); - final LongChunk valuesChunk = - col.getChunk(getContext, chunkOk).asLongChunk(); + final LongChunk valuesChunk = col.getChunk(getContext, chunkOk).asLongChunk(); for (int ii = 0; ii < valuesChunk.size(); ++ii) { outputStream.writeLong(valuesChunk.get(ii)); } @@ -1731,11 +1660,11 @@ private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource break; case Float: try (final ColumnSource.GetContext getContext = col.makeGetContext(chunkSize); - final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(chunkSize); final FloatChunk valuesChunk = - col.getChunk(getContext, chunkOk).asFloatChunk(); + col.getChunk(getContext, chunkOk).asFloatChunk(); for (int ii = 0; ii < valuesChunk.size(); ++ii) { outputStream.writeFloat(valuesChunk.get(ii)); } @@ -1744,11 +1673,11 @@ private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource break; case Double: try (final ColumnSource.GetContext getContext = col.makeGetContext(chunkSize); - final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(chunkSize); final DoubleChunk valuesChunk = - col.getChunk(getContext, chunkOk).asDoubleChunk(); + col.getChunk(getContext, chunkOk).asDoubleChunk(); for (int ii = 0; ii < valuesChunk.size(); ++ii) { outputStream.writeDouble(valuesChunk.get(ii)); } @@ -1757,14 +1686,13 @@ private static void processColumnForFingerprint(OrderedKeys ok, ColumnSource break; case Object: try (final ColumnSource.GetContext getContext = col.makeGetContext(chunkSize); - final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(chunkSize); final ObjectChunk valuesChunk = - col.getChunk(getContext, chunkOk).asObjectChunk(); + col.getChunk(getContext, chunkOk).asObjectChunk(); for (int ii = 0; ii < valuesChunk.size(); ++ii) { - outputStream - .writeChars(Objects.toString(valuesChunk.get(ii).toString())); + outputStream.writeChars(Objects.toString(valuesChunk.get(ii).toString())); } } } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/TableToolsMergeHelper.java b/DB/src/main/java/io/deephaven/db/tables/utils/TableToolsMergeHelper.java index 14b0e2334eb..fc91199ec36 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/TableToolsMergeHelper.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/TableToolsMergeHelper.java @@ -23,58 +23,51 @@ public class TableToolsMergeHelper { public static Table mergeTableMap(LocalTableMap tableMap) { - final List
    tablesToMergeOrNull = - getTablesToMerge(tableMap.values().stream(), tableMap.size()); - final List
    tablesToMerge = - tablesToMergeOrNull == null ? Collections.emptyList() : tablesToMergeOrNull; + final List
    tablesToMergeOrNull = getTablesToMerge(tableMap.values().stream(), tableMap.size()); + final List
    tablesToMerge = tablesToMergeOrNull == null ? Collections.emptyList() : tablesToMergeOrNull; return mergeInternal(tableMap.getConstituentDefinitionOrErr(), tablesToMerge, tableMap); } /** * @param tableDef = The definition to apply to the result table. - * @param tables = The list of tables to merge -- all elements must be non-null and - * un-partitioned. + * @param tables = The list of tables to merge -- all elements must be non-null and un-partitioned. * @return A new table, containing all the rows from tables, respecting the input ordering. */ public static Table mergeInternal(TableDefinition tableDef, List
    tables, - NotificationQueue.Dependency parentDependency) { + NotificationQueue.Dependency parentDependency) { final Set targetColumnNames = - tableDef.getColumnStream().map(ColumnDefinition::getName).collect(Collectors.toSet()); + tableDef.getColumnStream().map(ColumnDefinition::getName).collect(Collectors.toSet()); boolean isStatic = true; for (int ti = 0; ti < tables.size(); ++ti) { // verify the column names are exactly the same as our target final TableDefinition definition = tables.get(ti).getDefinition(); - final Set columnNames = definition.getColumnStream() - .map(ColumnDefinition::getName).collect(Collectors.toSet()); + final Set columnNames = + definition.getColumnStream().map(ColumnDefinition::getName).collect(Collectors.toSet()); isStatic &= !tables.get(ti).isLive(); - if (!targetColumnNames.containsAll(columnNames) - || !columnNames.containsAll(targetColumnNames)) { + if (!targetColumnNames.containsAll(columnNames) || !columnNames.containsAll(targetColumnNames)) { final Set missingTargets = new HashSet<>(targetColumnNames); missingTargets.removeAll(columnNames); columnNames.removeAll(targetColumnNames); if (missingTargets.isEmpty()) { throw new UnsupportedOperationException( - "Column mismatch for table " + ti + ", additional columns: " + columnNames); + "Column mismatch for table " + ti + ", additional columns: " + columnNames); } else if (columnNames.isEmpty()) { throw new UnsupportedOperationException( - "Column mismatch for table " + ti + ", missing columns: " + missingTargets); + "Column mismatch for table " + ti + ", missing columns: " + missingTargets); } else { - throw new UnsupportedOperationException( - "Column mismatch for table " + ti + ", missing columns: " + missingTargets - + ", additional columns: " + columnNames); + throw new UnsupportedOperationException("Column mismatch for table " + ti + ", missing columns: " + + missingTargets + ", additional columns: " + columnNames); } } - // TODO: Make this check better? It's slightly too permissive, if we want identical - // column sets, and not permissive enough if we want the "merge non-conflicting defs - // with nulls" behavior. + // TODO: Make this check better? It's slightly too permissive, if we want identical column sets, and not + // permissive enough if we want the "merge non-conflicting defs with nulls" behavior. try { definition.checkCompatibility(tableDef); } catch (RuntimeException e) { - throw new UnsupportedOperationException("Table definition mismatch for table " + ti, - e); + throw new UnsupportedOperationException("Table definition mismatch for table " + ti, e); } } @@ -82,8 +75,7 @@ public static Table mergeInternal(TableDefinition tableDef, List
    tables, LiveTableMonitor.DEFAULT.checkInitiateTableOperation(); } - final UnionSourceManager unionSourceManager = - new UnionSourceManager(tableDef, parentDependency); + final UnionSourceManager unionSourceManager = new UnionSourceManager(tableDef, parentDependency); final QueryTable queryTable = unionSourceManager.getResult(); for (Table table : tables) { @@ -99,10 +91,9 @@ public static Table mergeInternal(TableDefinition tableDef, List
    tables, } /** - * Given a table that consists of only UnionColumnSources, produce a list of new tables that - * represent each one of the unioned sources. This basically will undo the merge operation, so - * that the consituents can be reused in an new merge operation. The UnionSourceManager must be - * shared across all the columns. + * Given a table that consists of only UnionColumnSources, produce a list of new tables that represent each one of + * the unioned sources. This basically will undo the merge operation, so that the consituents can be reused in an + * new merge operation. The UnionSourceManager must be shared across all the columns. * * @param table that has only UnionSourceColumns (with the same manager) * @return the list of component tables from the manager. @@ -122,7 +113,7 @@ private static Collection getComponentTables(Table table) { unionSourceManager = thisUnionSourceManager; } else if (unionSourceManager != thisUnionSourceManager) { throw new RuntimeException( - "A table exists with columns from multiple UnionSourceManagers, this doesn't make any sense."); + "A table exists with columns from multiple UnionSourceManagers, this doesn't make any sense."); } } @@ -130,14 +121,11 @@ private static Collection getComponentTables(Table table) { throw new IllegalStateException("UnionSourceManager is null!"); if (unionSourceManager.getColumnSources().equals(columnSourceMap)) { - // we've got the original merged table, we can just go ahead and use the components as - // is + // we've got the original merged table, we can just go ahead and use the components as is return unionSourceManager.getComponentTables(); } else { - // the merged table has had things renamed, viewed, dropped or otherwise messed with; so - // we need to create - // brand new component tables, using the sources from the original that parallel the - // merged table + // the merged table has had things renamed, viewed, dropped or otherwise messed with; so we need to create + // brand new component tables, using the sources from the original that parallel the merged table Collection
    componentTables = unionSourceManager.getComponentTables(); ArrayList
    result = new ArrayList<>(); int componentIndex = 0; @@ -154,7 +142,7 @@ private static Collection getComponentTables(Table table) { QueryTable viewedTable = new QueryTable(component.getIndex(), componentSources); if (component.isRefreshing()) { component.listenForUpdates( - new BaseTable.ShiftAwareListenerImpl("union view", component, viewedTable)); + new BaseTable.ShiftAwareListenerImpl("union view", component, viewedTable)); } result.add(viewedTable); } @@ -170,8 +158,7 @@ static List
    getTablesToMerge(Stream
    tables, int sizeEstimate) { if (table == null) { return; } - if (table instanceof UncoalescedTable - || table instanceof TableMapProxyHandler.TableMapProxy) { + if (table instanceof UncoalescedTable || table instanceof TableMapProxyHandler.TableMapProxy) { table = table.coalesce(); } @@ -190,8 +177,7 @@ static List
    getTablesToMerge(Stream
    tables, int sizeEstimate) { } /** - * Determine if table is the output of a previous merge operation and can be broken into - * constituents. + * Determine if table is the output of a previous merge operation and can be broken into constituents. * * @param table the table to check * @return true if the table is the result of a previous merge. @@ -212,8 +198,7 @@ private static boolean canBreakOutUnionedTable(Table table) { return false; } - final UnionColumnSource columnSource = - (UnionColumnSource) columnSourceMap.values().iterator().next(); + final UnionColumnSource columnSource = (UnionColumnSource) columnSourceMap.values().iterator().next(); return columnSource.getUnionSourceManager().isUsingComponentsSafe(); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/WhereClause.java b/DB/src/main/java/io/deephaven/db/tables/utils/WhereClause.java index dc9cb2566ea..f9029e3e5e4 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/WhereClause.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/WhereClause.java @@ -31,11 +31,9 @@ static public SelectFilter createDisjunctiveFilter(Collection[] fi ArrayList clauses = new ArrayList<>(); for (Collection clause : filtersToApply) { - clauses.add(ConjunctiveFilter - .makeConjunctiveFilter(clause.toArray(new SelectFilter[clause.size()]))); + clauses.add(ConjunctiveFilter.makeConjunctiveFilter(clause.toArray(new SelectFilter[clause.size()]))); } - return DisjunctiveFilter - .makeDisjunctiveFilter(clauses.toArray(new SelectFilter[clauses.size()])); + return DisjunctiveFilter.makeDisjunctiveFilter(clauses.toArray(new SelectFilter[clauses.size()])); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/utils/WindowCheck.java b/DB/src/main/java/io/deephaven/db/tables/utils/WindowCheck.java index a8c6619e93b..b8afab12bd0 100644 --- a/DB/src/main/java/io/deephaven/db/tables/utils/WindowCheck.java +++ b/DB/src/main/java/io/deephaven/db/tables/utils/WindowCheck.java @@ -32,27 +32,23 @@ private WindowCheck() {} *

    * *

    - * If the timestamp is greater than or equal to the curent time - windowNanos, then the result - * column is true. If the timestamp is null; the InWindow value is null. + * If the timestamp is greater than or equal to the curent time - windowNanos, then the result column is true. If + * the timestamp is null; the InWindow value is null. *

    * *

    - * The resultant table ticks whenever the input table ticks, or modifies a row when it passes - * out of the window. + * The resultant table ticks whenever the input table ticks, or modifies a row when it passes out of the window. *

    * * @param table the input table * @param timestampColumn the timestamp column to monitor in table - * @param windowNanos how many nanoseconds in the past a timestamp can be before it is out of - * the window + * @param windowNanos how many nanoseconds in the past a timestamp can be before it is out of the window * @param inWindowColumn the name of the new Boolean column. * @return a new table that contains an in-window Boolean column */ @SuppressWarnings("unused") - public static Table addTimeWindow(Table table, String timestampColumn, long windowNanos, - String inWindowColumn) { - return addTimeWindowInternal(null, table, timestampColumn, windowNanos, inWindowColumn, - true).first; + public static Table addTimeWindow(Table table, String timestampColumn, long windowNanos, String inWindowColumn) { + return addTimeWindowInternal(null, table, timestampColumn, windowNanos, inWindowColumn, true).first; } private static class WindowListenerRecorder extends ListenerRecorder { @@ -62,24 +58,22 @@ private WindowListenerRecorder(DynamicTable parent, DynamicTable dependent) { } /** - * See {@link WindowCheck#addTimeWindow(Table, String, long, String)} for a description, the - * internal version gives you access to the TimeWindowListener for unit testing purposes. + * See {@link WindowCheck#addTimeWindow(Table, String, long, String)} for a description, the internal version gives + * you access to the TimeWindowListener for unit testing purposes. * * @param addToMonitor should we add this to the LiveTableMonitor * @return a pair of the result table and the TimeWindowListener that drives it */ - static Pair addTimeWindowInternal(TimeProvider timeProvider, - Table table, String timestampColumn, long windowNanos, String inWindowColumn, - boolean addToMonitor) { - final Map resultColumns = - new LinkedHashMap<>(table.getColumnSourceMap()); + static Pair addTimeWindowInternal(TimeProvider timeProvider, Table table, + String timestampColumn, long windowNanos, String inWindowColumn, boolean addToMonitor) { + final Map resultColumns = new LinkedHashMap<>(table.getColumnSourceMap()); final InWindowColumnSource inWindowColumnSource; if (timeProvider == null) { inWindowColumnSource = new InWindowColumnSource(table, timestampColumn, windowNanos); } else { - inWindowColumnSource = new InWindowColumnSourceWithTimeProvider(timeProvider, table, - timestampColumn, windowNanos); + inWindowColumnSource = + new InWindowColumnSourceWithTimeProvider(timeProvider, table, timestampColumn, windowNanos); } inWindowColumnSource.init(); resultColumns.put(inWindowColumn, inWindowColumnSource); @@ -88,10 +82,9 @@ static Pair addTimeWindowInternal(TimeProvider timePr if (table instanceof DynamicTable) { final DynamicTable dynamicSource = (DynamicTable) table; - final WindowListenerRecorder recorder = - new WindowListenerRecorder(dynamicSource, result); - final TimeWindowListener timeWindowListener = new TimeWindowListener(inWindowColumn, - inWindowColumnSource, recorder, dynamicSource, result); + final WindowListenerRecorder recorder = new WindowListenerRecorder(dynamicSource, result); + final TimeWindowListener timeWindowListener = + new TimeWindowListener(inWindowColumn, inWindowColumnSource, recorder, dynamicSource, result); recorder.setMergedListener(timeWindowListener); dynamicSource.listenForUpdates(recorder); table.getIndex().forAllLongs(timeWindowListener::addIndex); @@ -107,19 +100,15 @@ static Pair addTimeWindowInternal(TimeProvider timePr } /** - * The TimeWindowListener maintains a priority queue of rows that are within a configured - * window, when they pass out of the window, the InWindow column is set to false and a - * modification tick happens. + * The TimeWindowListener maintains a priority queue of rows that are within a configured window, when they pass out + * of the window, the InWindow column is set to false and a modification tick happens. * * It implements LiveTable, so that we can be inserted into the LiveTableMonitor. */ static class TimeWindowListener extends MergedListener implements LiveTable { private final InWindowColumnSource inWindowColumnSource; private final QueryTable result; - /** - * a priority queue of InWindow entries, with the least recent timestamps getting pulled out - * first. - */ + /** a priority queue of InWindow entries, with the least recent timestamps getting pulled out first. */ private final RAPriQueue priorityQueue; /** a map from table indices to our entries. */ private final TLongObjectHashMap indexToEntry; @@ -149,51 +138,47 @@ private static class Entry { @Override public String toString() { return "Entry{" + - "nanos=" + nanos + - ", index=" + index + - '}'; + "nanos=" + nanos + + ", index=" + index + + '}'; } } /** * Creates a TimeWindowListener. * - * @param inWindowColumnSource the resulting InWindowColumnSource, which contains the - * timestamp source + * @param inWindowColumnSource the resulting InWindowColumnSource, which contains the timestamp source * @param source the source table * @param result our initialized result table */ - private TimeWindowListener(final String inWindowColumnName, - final InWindowColumnSource inWindowColumnSource, - final ListenerRecorder recorder, final DynamicTable source, final QueryTable result) { - super(Collections.singleton(recorder), Collections.singleton(source), "WindowCheck", - result); + private TimeWindowListener(final String inWindowColumnName, final InWindowColumnSource inWindowColumnSource, + final ListenerRecorder recorder, final DynamicTable source, final QueryTable result) { + super(Collections.singleton(recorder), Collections.singleton(source), "WindowCheck", result); this.source = source; this.recorder = recorder; this.inWindowColumnSource = inWindowColumnSource; this.result = result; - this.priorityQueue = new RAPriQueue<>(1 + source.intSize("WindowCheck"), - new RAPriQueue.Adapter() { - @Override - public boolean less(Entry a, Entry b) { - return a.nanos < b.nanos; - } + this.priorityQueue = new RAPriQueue<>(1 + source.intSize("WindowCheck"), new RAPriQueue.Adapter() { + @Override + public boolean less(Entry a, Entry b) { + return a.nanos < b.nanos; + } - @Override - public void setPos(Entry el, int pos) { - el.pos = pos; - } + @Override + public void setPos(Entry el, int pos) { + el.pos = pos; + } - @Override - public int getPos(Entry el) { - return el.pos; - } - }, Entry.class); + @Override + public int getPos(Entry el) { + return el.pos; + } + }, Entry.class); this.indexToEntry = new TLongObjectHashMap<>(); - this.mcsTransformer = source.newModifiedColumnSetTransformer(result, source - .getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + this.mcsTransformer = source.newModifiedColumnSetTransformer(result, + source.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); this.mcsNewColumns = result.newModifiedColumnSet(inWindowColumnName); this.reusableModifiedColumnSet = new ModifiedColumnSet(this.mcsNewColumns); } @@ -211,8 +196,7 @@ protected void process() { upstream.shifted.apply((start, end, delta) -> { final Index subIndex = preShiftIndex.subindexByKey(start, end); - final Index.SearchIterator it = - delta < 0 ? subIndex.searchIterator() : subIndex.reverseIterator(); + final Index.SearchIterator it = delta < 0 ? subIndex.searchIterator() : subIndex.reverseIterator(); while (it.hasNext()) { final long idx = it.nextLong(); final Entry entry = indexToEntry.remove(idx); @@ -224,15 +208,12 @@ protected void process() { }); // TODO: improve performance with getChunk - // TODO: reinterpret inWindowColumnSource so that it compares longs instead of - // objects + // TODO: reinterpret inWindowColumnSource so that it compares longs instead of objects // figure out for all the modified indices if the timestamp or index changed upstream.forAllModified((oldIndex, newIndex) -> { - final DBDateTime currentTimestamp = - inWindowColumnSource.timeStampSource.get(newIndex); - final DBDateTime prevTimestamp = - inWindowColumnSource.timeStampSource.getPrev(oldIndex); + final DBDateTime currentTimestamp = inWindowColumnSource.timeStampSource.get(newIndex); + final DBDateTime prevTimestamp = inWindowColumnSource.timeStampSource.getPrev(oldIndex); if (!Objects.equals(currentTimestamp, prevTimestamp)) { updateIndex(newIndex, currentTimestamp); } @@ -252,8 +233,7 @@ protected void process() { // everything that was added, removed, or modified stays added removed or modified downstream.modifiedColumnSet = reusableModifiedColumnSet; if (downstream.modified.nonempty()) { - mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); + mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, downstream.modifiedColumnSet); downstream.modifiedColumnSet.setAll(mcsNewColumns); } else { downstream.modifiedColumnSet.clear(); @@ -278,8 +258,8 @@ protected void process() { } /** - * Handles modified indices. If they are outside of the window, they need to be removed from - * the queue. If they are inside the window, they need to be (re)inserted into the queue. + * Handles modified indices. If they are outside of the window, they need to be removed from the queue. If they + * are inside the window, they need to be (re)inserted into the queue. */ private void updateIndex(final long index, DBDateTime currentTimestamp) { Entry entry = indexToEntry.remove(index); @@ -289,8 +269,7 @@ private void updateIndex(final long index, DBDateTime currentTimestamp) { } return; } - if (inWindowColumnSource.computeInWindow(currentTimestamp, - inWindowColumnSource.currentTime)) { + if (inWindowColumnSource.computeInWindow(currentTimestamp, inWindowColumnSource.currentTime)) { if (entry == null) { entry = new Entry(index, 0); } @@ -312,8 +291,7 @@ private void addIndex(long index) { if (currentTimestamp == null) { return; } - if (inWindowColumnSource.computeInWindow(currentTimestamp, - inWindowColumnSource.currentTime)) { + if (inWindowColumnSource.computeInWindow(currentTimestamp, inWindowColumnSource.currentTime)) { final Entry el = new Entry(index, currentTimestamp.getNanos()); priorityQueue.enter(el); indexToEntry.put(el.index, el); @@ -354,8 +332,7 @@ private Index recomputeModified() { break; } - if (inWindowColumnSource.computeInWindow(entry.nanos, - inWindowColumnSource.currentTime)) { + if (inWindowColumnSource.computeInWindow(entry.nanos, inWindowColumnSource.currentTime)) { break; } else { // take it out of the queue, and mark it as modified @@ -378,14 +355,12 @@ void validateQueue() { Arrays.stream(entries).mapToLong(entry -> entry.index).forEach(builder::addKey); final Index inQueue = builder.getIndex(); - Assert.eq(inQueue.size(), "inQueue.size()", priorityQueue.size(), - "priorityQueue.size()"); + Assert.eq(inQueue.size(), "inQueue.size()", priorityQueue.size(), "priorityQueue.size()"); final boolean condition = inQueue.subsetOf(resultIndex); if (!condition) { // noinspection ConstantConditions - Assert.assertion(condition, "inQueue.subsetOf(resultIndex)", inQueue, "inQueue", - resultIndex, "resultIndex", inQueue.minus(resultIndex), - "inQueue.minus(resultIndex)"); + Assert.assertion(condition, "inQueue.subsetOf(resultIndex)", inQueue, "inQueue", resultIndex, + "resultIndex", inQueue.minus(resultIndex), "inQueue.minus(resultIndex)"); } } @@ -399,8 +374,8 @@ public void destroy() { private static class InWindowColumnSourceWithTimeProvider extends InWindowColumnSource { final private TimeProvider timeProvider; - InWindowColumnSourceWithTimeProvider(TimeProvider timeProvider, Table table, - String timestampColumn, long windowNanos) { + InWindowColumnSourceWithTimeProvider(TimeProvider timeProvider, Table table, String timestampColumn, + long windowNanos) { super(table, timestampColumn, windowNanos); this.timeProvider = Require.neqNull(timeProvider, "timeProvider"); } @@ -412,7 +387,7 @@ long getTimeNanos() { } private static class InWindowColumnSource extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForBoolean { + implements MutableColumnSourceGetDefaults.ForBoolean { private final long windowNanos; private final ColumnSource timeStampSource; @@ -433,8 +408,8 @@ private static class InWindowColumnSource extends AbstractColumnSource } /** - * Initialize the first currentTime. Called outside the constructor, because subclasses may - * overload getTimeNanos(). + * Initialize the first currentTime. Called outside the constructor, because subclasses may overload + * getTimeNanos(). */ private void init() { currentTime = getTimeNanos(); @@ -454,8 +429,7 @@ public Boolean get(long index) { public Boolean getPrev(long index) { final long currentStep = LogicalClock.DEFAULT.currentStep(); - final long time = - (clockStep < currentStep || clockStep == initialStep) ? currentTime : prevTime; + final long time = (clockStep < currentStep || clockStep == initialStep) ? currentTime : prevTime; // get the previous value from the underlying column source final DBDateTime tableTimeStamp = timeStampSource.getPrev(index); diff --git a/DB/src/main/java/io/deephaven/db/tables/verify/AppendOnlyAssertionFailure.java b/DB/src/main/java/io/deephaven/db/tables/verify/AppendOnlyAssertionFailure.java index 0985182a26e..e962cd42bbb 100644 --- a/DB/src/main/java/io/deephaven/db/tables/verify/AppendOnlyAssertionFailure.java +++ b/DB/src/main/java/io/deephaven/db/tables/verify/AppendOnlyAssertionFailure.java @@ -6,7 +6,6 @@ public class AppendOnlyAssertionFailure extends TableAssertionFailure { } AppendOnlyAssertionFailure(String description) { - super("Update to table violates append-only assertion! (Table description: " + description - + ")"); + super("Update to table violates append-only assertion! (Table description: " + description + ")"); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/verify/AppendOnlyAssertionInstrumentedListenerAdapter.java b/DB/src/main/java/io/deephaven/db/tables/verify/AppendOnlyAssertionInstrumentedListenerAdapter.java index b51bc3021db..7c266d3e83f 100644 --- a/DB/src/main/java/io/deephaven/db/tables/verify/AppendOnlyAssertionInstrumentedListenerAdapter.java +++ b/DB/src/main/java/io/deephaven/db/tables/verify/AppendOnlyAssertionInstrumentedListenerAdapter.java @@ -3,23 +3,21 @@ import io.deephaven.db.v2.BaseTable; import io.deephaven.db.v2.DynamicTable; -public class AppendOnlyAssertionInstrumentedListenerAdapter - extends BaseTable.ShiftAwareListenerImpl { +public class AppendOnlyAssertionInstrumentedListenerAdapter extends BaseTable.ShiftAwareListenerImpl { private final String description; public AppendOnlyAssertionInstrumentedListenerAdapter(String description, DynamicTable parent, - DynamicTable dependent) { + DynamicTable dependent) { super( - "assertAppendOnly(" + (description == null ? "" : description) + ')', - parent, dependent); + "assertAppendOnly(" + (description == null ? "" : description) + ')', + parent, dependent); this.description = description; } @Override public void onUpdate(final Update upstream) { - if (upstream.removed.nonempty() || upstream.modified.nonempty() - || upstream.shifted.nonempty()) { + if (upstream.removed.nonempty() || upstream.modified.nonempty() || upstream.shifted.nonempty()) { if (description == null) { throw new AppendOnlyAssertionFailure(); } else { diff --git a/DB/src/main/java/io/deephaven/db/tables/verify/SortedAssertionFailure.java b/DB/src/main/java/io/deephaven/db/tables/verify/SortedAssertionFailure.java index 0987de420d0..29064608654 100644 --- a/DB/src/main/java/io/deephaven/db/tables/verify/SortedAssertionFailure.java +++ b/DB/src/main/java/io/deephaven/db/tables/verify/SortedAssertionFailure.java @@ -4,11 +4,10 @@ import org.jetbrains.annotations.Nullable; public class SortedAssertionFailure extends TableAssertionFailure { - SortedAssertionFailure(@Nullable String description, String column, SortingOrder order, - final String value, final String value2) { - super("Table violates sorted assertion" - + (description == null ? "" : ", table description=" + description) + ", column=" - + column + ", " + order + ", " + value + " is out of order with respect to " + value2 - + "!"); + SortedAssertionFailure(@Nullable String description, String column, SortingOrder order, final String value, + final String value2) { + super("Table violates sorted assertion" + (description == null ? "" : ", table description=" + description) + + ", column=" + column + ", " + order + ", " + value + " is out of order with respect to " + value2 + + "!"); } } diff --git a/DB/src/main/java/io/deephaven/db/tables/verify/SortedAssertionInstrumentedListenerAdapter.java b/DB/src/main/java/io/deephaven/db/tables/verify/SortedAssertionInstrumentedListenerAdapter.java index 6aed2422716..8ea6181a24a 100644 --- a/DB/src/main/java/io/deephaven/db/tables/verify/SortedAssertionInstrumentedListenerAdapter.java +++ b/DB/src/main/java/io/deephaven/db/tables/verify/SortedAssertionInstrumentedListenerAdapter.java @@ -26,35 +26,32 @@ public class SortedAssertionInstrumentedListenerAdapter extends BaseTable.ShiftA private final SortCheck sortCheck; public SortedAssertionInstrumentedListenerAdapter(String description, - DynamicTable parent, - DynamicTable dependent, - String columnName, - SortingOrder order) { + DynamicTable parent, + DynamicTable dependent, + String columnName, + SortingOrder order) { super( - "assertSorted(" + (description == null ? "" : description) + ", " + columnName + ", " - + order + ')', - parent, dependent); + "assertSorted(" + (description == null ? "" : description) + ", " + columnName + ", " + order + ')', + parent, dependent); this.description = description; this.column = columnName; this.order = order; parentIndex = parent.getIndex(); parentColumnSource = parent.getColumnSource(columnName); parentColumnSet = parent.newModifiedColumnSet(columnName); - sortCheck = - SortCheck.make(parentColumnSource.getChunkType(), order == SortingOrder.Descending); + sortCheck = SortCheck.make(parentColumnSource.getChunkType(), order == SortingOrder.Descending); } @Override public void onUpdate(final Update upstream) { final boolean modifiedRows = - upstream.modified.nonempty() && upstream.modifiedColumnSet.containsAny(parentColumnSet); + upstream.modified.nonempty() && upstream.modifiedColumnSet.containsAny(parentColumnSet); if (upstream.added.nonempty() || modifiedRows) { - final Index rowsOfInterest = - modifiedRows ? upstream.added.union(upstream.modified) : upstream.added; + final Index rowsOfInterest = modifiedRows ? upstream.added.union(upstream.modified) : upstream.added; try (final Index ignored = modifiedRows ? rowsOfInterest : null; - final Index toProcess = makeAdjacentIndex(rowsOfInterest)) { + final Index toProcess = makeAdjacentIndex(rowsOfInterest)) { Assert.assertion(toProcess.subsetOf(parentIndex), "toProcess.subsetOf(parentIndex)", - makeAdjacentIndex(rowsOfInterest), "toProcess", parentIndex, "parentIndex"); + makeAdjacentIndex(rowsOfInterest), "toProcess", parentIndex, "parentIndex"); doCheck(toProcess); } } @@ -65,24 +62,21 @@ private void doCheck(Index toProcess) { doCheckStatic(toProcess, parentColumnSource, sortCheck, description, column, order); } - public static void doCheckStatic(Index toProcess, ColumnSource parentColumnSource, - SortCheck sortCheck, String description, String column, SortingOrder order) { + public static void doCheckStatic(Index toProcess, ColumnSource parentColumnSource, SortCheck sortCheck, + String description, String column, SortingOrder order) { final int contextSize = (int) Math.min(CHUNK_SIZE, toProcess.size()); - try ( - final ChunkSource.GetContext getContext = - parentColumnSource.makeGetContext(contextSize); - final OrderedKeys.Iterator okIt = toProcess.getOrderedKeysIterator()) { + try (final ChunkSource.GetContext getContext = parentColumnSource.makeGetContext(contextSize); + final OrderedKeys.Iterator okIt = toProcess.getOrderedKeysIterator()) { while (okIt.hasMore()) { final OrderedKeys chunkOk = okIt.getNextOrderedKeysWithLength(contextSize); - final Chunk valuesChunk = - parentColumnSource.getChunk(getContext, chunkOk); + final Chunk valuesChunk = parentColumnSource.getChunk(getContext, chunkOk); final int firstUnsorted = sortCheck.sortCheck(valuesChunk); if (firstUnsorted >= 0) { - final String value1 = ChunkUtils.extractKeyStringFromChunk( - valuesChunk.getChunkType(), valuesChunk, firstUnsorted); - final String value2 = ChunkUtils.extractKeyStringFromChunk( - valuesChunk.getChunkType(), valuesChunk, firstUnsorted + 1); + final String value1 = ChunkUtils.extractKeyStringFromChunk(valuesChunk.getChunkType(), valuesChunk, + firstUnsorted); + final String value2 = ChunkUtils.extractKeyStringFromChunk(valuesChunk.getChunkType(), valuesChunk, + firstUnsorted + 1); throw new SortedAssertionFailure(description, column, order, value1, value2); } } @@ -91,12 +85,10 @@ public static void doCheckStatic(Index toProcess, ColumnSource parentColumnSo private Index makeAdjacentIndex(Index rowsOfInterest) { try (final Index inverted = parentIndex.invert(rowsOfInterest)) { - final Index.SequentialBuilder processBuilder = - Index.CURRENT_FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder processBuilder = Index.CURRENT_FACTORY.getSequentialBuilder(); long lastPosition = parentIndex.size() - 1; long lastUsedPosition = 0; - for (ReadOnlyIndex.RangeIterator rangeIterator = inverted.rangeIterator(); rangeIterator - .hasNext();) { + for (ReadOnlyIndex.RangeIterator rangeIterator = inverted.rangeIterator(); rangeIterator.hasNext();) { rangeIterator.next(); long start = rangeIterator.currentRangeStart(); long end = rangeIterator.currentRangeEnd(); diff --git a/DB/src/main/java/io/deephaven/db/tables/verify/TableAssertions.java b/DB/src/main/java/io/deephaven/db/tables/verify/TableAssertions.java index 860acf2f34e..8d3d5faecfb 100644 --- a/DB/src/main/java/io/deephaven/db/tables/verify/TableAssertions.java +++ b/DB/src/main/java/io/deephaven/db/tables/verify/TableAssertions.java @@ -11,20 +11,18 @@ /** * Assert properties of a table. * - * The table assertions verify that a table meets certain properties. Presently, that the table is - * add only and that it is sorted by a particular column. The desired property is verified on the - * initial table, and if the table is refreshing then a listener is attached to ensure that the - * property remains true. + * The table assertions verify that a table meets certain properties. Presently, that the table is add only and that it + * is sorted by a particular column. The desired property is verified on the initial table, and if the table is + * refreshing then a listener is attached to ensure that the property remains true. * - * The returned table may have attributes set which allow the query engine to more efficiently - * perform downstream operations. + * The returned table may have attributes set which allow the query engine to more efficiently perform downstream + * operations. */ public class TableAssertions { private TableAssertions() {} /** - * Asserts that the {@code table} is append-only. If its rows are ever modified or removed, the - * query will crash. + * Asserts that the {@code table} is append-only. If its rows are ever modified or removed, the query will crash. *

    * This can be used to ensure the safety and stability of stateful operations. * @@ -36,13 +34,12 @@ public static Table assertAppendOnly(@NotNull Table table) { } /** - * Asserts that the {@code table} is append-only. If its rows are ever modified or removed, the - * query will crash. + * Asserts that the {@code table} is append-only. If its rows are ever modified or removed, the query will crash. *

    * This can be used to ensure the safety and stability of stateful operations. * - * @param description An optional description which will be included in the exception message if - * the assertion is violated. + * @param description An optional description which will be included in the exception message if the assertion is + * violated. * @param table The table to apply the assertion to * @return The provided {@code table}. */ @@ -60,51 +57,47 @@ public static Table assertAppendOnly(String description, @NotNull Table table) { return table; return QueryPerformanceRecorder.withNuggetThrowing( - "assertAppendOnly(" + (description == null ? "" : description) + ')', - () -> { - - final DynamicTable result = new QueryTable(dynamicTable.getDefinition(), - dynamicTable.getIndex(), dynamicTable.getColumnSourceMap()); - final ShiftAwareListener listener = - new AppendOnlyAssertionInstrumentedListenerAdapter(description, dynamicTable, - result); - dynamicTable.listenForUpdates(listener); - - return result; - }); + "assertAppendOnly(" + (description == null ? "" : description) + ')', + () -> { + + final DynamicTable result = new QueryTable(dynamicTable.getDefinition(), dynamicTable.getIndex(), + dynamicTable.getColumnSourceMap()); + final ShiftAwareListener listener = + new AppendOnlyAssertionInstrumentedListenerAdapter(description, dynamicTable, result); + dynamicTable.listenForUpdates(listener); + + return result; + }); } /** * Asserts that the {@code table} is sorted by the given column. * - * This allows range filters to utilize binary search instead of a linear scan of the table for - * the given column. + * This allows range filters to utilize binary search instead of a linear scan of the table for the given column. * * @param table The table to apply the assertion to * @param column The column that the table is sorted by. * @param order Whether the column is ascending or descending. * @return The provided {@code table}. */ - public static Table assertSorted(@NotNull Table table, @NotNull final String column, - SortingOrder order) { + public static Table assertSorted(@NotNull Table table, @NotNull final String column, SortingOrder order) { return assertSorted(null, table, column, order); } /** * Asserts that the {@code table} is sorted by the given column. * - * This allows range filters to utilize binary search instead of a linear scan of the table for - * the given column. + * This allows range filters to utilize binary search instead of a linear scan of the table for the given column. * - * @param description An optional description which will be included in the exception message if - * the assertion is violated. + * @param description An optional description which will be included in the exception message if the assertion is + * violated. * @param table The table to apply the assertion to * @param column The column that the table is sorted by. * @param order Whether the column is ascending or descending. * @return The provided {@code table}. */ - public static Table assertSorted(String description, @NotNull Table table, - @NotNull final String column, SortingOrder order) { + public static Table assertSorted(String description, @NotNull Table table, @NotNull final String column, + SortingOrder order) { // noinspection ConstantConditions if (table == null) { throw new IllegalArgumentException("The table cannot be null!"); @@ -113,8 +106,7 @@ public static Table assertSorted(String description, @NotNull Table table, // do the initial check final ColumnSource columnSource = table.getColumnSource(column); SortedAssertionInstrumentedListenerAdapter.doCheckStatic(table.getIndex(), columnSource, - SortCheck.make(columnSource.getChunkType(), order.isDescending()), description, column, - order); + SortCheck.make(columnSource.getChunkType(), order.isDescending()), description, column, order); if (!(table instanceof DynamicTable)) { @@ -128,37 +120,35 @@ public static Table assertSorted(String description, @NotNull Table table, } return QueryPerformanceRecorder.withNuggetThrowing( - "assertSorted(" + (description == null ? "" : description) + ", " + column + ", " - + order + ')', - () -> { - final DynamicTable result = - new QueryTable(dynamicTable.getIndex(), dynamicTable.getColumnSourceMap()); - final ShiftAwareListener listener = new SortedAssertionInstrumentedListenerAdapter( - description, dynamicTable, result, column, order); - dynamicTable.listenForUpdates(listener); - ((BaseTable) dynamicTable).copyAttributes(result, s -> true); - SortedColumnsAttribute.setOrderForColumn(result, column, order); - return result; - }); + "assertSorted(" + (description == null ? "" : description) + ", " + column + ", " + order + ')', + () -> { + final DynamicTable result = + new QueryTable(dynamicTable.getIndex(), dynamicTable.getColumnSourceMap()); + final ShiftAwareListener listener = new SortedAssertionInstrumentedListenerAdapter(description, + dynamicTable, result, column, order); + dynamicTable.listenForUpdates(listener); + ((BaseTable) dynamicTable).copyAttributes(result, s -> true); + SortedColumnsAttribute.setOrderForColumn(result, column, order); + return result; + }); } /* * * Some other things that might be nice: * - * assertIsNormalDecimal(Table, String... cols) assertPositive(Table, String... cols) - * assertNegative(Table, String... cols) assertNull(Table, String... cols) assertNotNull(Table, - * String... cols) assertFormula(Table, String... formulas) + * assertIsNormalDecimal(Table, String... cols) assertPositive(Table, String... cols) assertNegative(Table, + * String... cols) assertNull(Table, String... cols) assertNotNull(Table, String... cols) assertFormula(Table, + * String... formulas) * - * assertColumnEqual(Table, String col1, String col2) assertColumnGreater(Table, String col1, - * String col2) assertColumnLess(Table, String col1, String col2) assertColumnGeq(Table, String - * col1, String col2) assertColumnLeq(Table, String col1, String col2) + * assertColumnEqual(Table, String col1, String col2) assertColumnGreater(Table, String col1, String col2) + * assertColumnLess(Table, String col1, String col2) assertColumnGeq(Table, String col1, String col2) + * assertColumnLeq(Table, String col1, String col2) * - * assertColumnEqual(Table, double tolerance, String col1, String col2) - * assertColumnGreater(Table, double tolerance, String col1, String col2) - * assertColumnLess(Table, double tolerance, String col1, String col2) assertColumnGeq(Table, - * double tolerance, String col1, String col2) assertColumnLeq(Table, double tolerance, String - * col1, String col2) + * assertColumnEqual(Table, double tolerance, String col1, String col2) assertColumnGreater(Table, double tolerance, + * String col1, String col2) assertColumnLess(Table, double tolerance, String col1, String col2) + * assertColumnGeq(Table, double tolerance, String col1, String col2) assertColumnLeq(Table, double tolerance, + * String col1, String col2) * */ diff --git a/DB/src/main/java/io/deephaven/db/util/AbstractScriptSession.java b/DB/src/main/java/io/deephaven/db/util/AbstractScriptSession.java index 3e78c2ebde6..495183cf8b3 100644 --- a/DB/src/main/java/io/deephaven/db/util/AbstractScriptSession.java +++ b/DB/src/main/java/io/deephaven/db/util/AbstractScriptSession.java @@ -20,13 +20,12 @@ import java.util.*; /** - * This class exists to make all script sessions to be liveness artifacts, and provide a default - * implementation for evaluateScript which handles liveness and diffs in a consistent way. + * This class exists to make all script sessions to be liveness artifacts, and provide a default implementation for + * evaluateScript which handles liveness and diffs in a consistent way. */ -public abstract class AbstractScriptSession extends LivenessScope - implements ScriptSession, VariableProvider { +public abstract class AbstractScriptSession extends LivenessScope implements ScriptSession, VariableProvider { public static final String CLASS_CACHE_LOCATION = Configuration.getInstance() - .getStringWithDefault("ScriptSession.classCacheDirectory", "/tmp/dh_class_cache"); + .getStringWithDefault("ScriptSession.classCacheDirectory", "/tmp/dh_class_cache"); public static void createScriptCache() { final File classCacheDirectory = new File(CLASS_CACHE_LOCATION); @@ -39,7 +38,7 @@ private static void createOrClearDirectory(final File directory) { } if (!directory.mkdirs()) { throw new UncheckedDeephavenException( - "Failed to create class cache directory " + directory.getAbsolutePath()); + "Failed to create class cache directory " + directory.getAbsolutePath()); } } @@ -58,23 +57,21 @@ protected AbstractScriptSession(boolean isDefaultScriptSession) { queryScope = newQueryScope(); queryLibrary = QueryLibrary.makeNewLibrary(); - compilerContext = - new CompilerTools.Context(classCacheDirectory, getClass().getClassLoader()) { - { - addClassSource(getFakeClassDestination()); - } + compilerContext = new CompilerTools.Context(classCacheDirectory, getClass().getClassLoader()) { + { + addClassSource(getFakeClassDestination()); + } - @Override - public File getFakeClassDestination() { - return classCacheDirectory; - } + @Override + public File getFakeClassDestination() { + return classCacheDirectory; + } - @Override - public String getClassPath() { - return classCacheDirectory.getAbsolutePath() + File.pathSeparatorChar - + super.getClassPath(); - } - }; + @Override + public String getClassPath() { + return classCacheDirectory.getAbsolutePath() + File.pathSeparatorChar + super.getClassPath(); + } + }; if (isDefaultScriptSession) { CompilerTools.setDefaultContext(compilerContext); @@ -82,8 +79,7 @@ public String getClassPath() { QueryLibrary.setDefaultLibrary(queryLibrary); } @SuppressWarnings("rawtypes") - ServiceLoader loader = - ServiceLoader.load(CompletionParseService.class); + ServiceLoader loader = ServiceLoader.load(CompletionParseService.class); @SuppressWarnings("rawtypes") final Iterator itr = loader.iterator(); if (itr.hasNext()) { @@ -103,8 +99,7 @@ public final Changes evaluateScript(final String script, final @Nullable String final CompilerTools.Context prevCompilerContext = CompilerTools.getContext(); final QueryScope prevQueryScope = QueryScope.getScope(); - // retain any objects which are created in the executed code, we'll release them when the - // script session closes + // retain any objects which are created in the executed code, we'll release them when the script session closes try (final SafeCloseable ignored = LivenessScopeStack.open(this, false)) { // point query scope static state to our session's state QueryScope.setScope(queryScope); @@ -150,8 +145,7 @@ public final Changes evaluateScript(final String script, final @Nullable String final ExportedObjectType type = ExportedObjectType.fromObject(value); if (type.isDisplayableInSwing()) { if (type != types.get(name)) { - // either the name no longer exists, or it has a new type, and we mark it as - // removed (see above) + // either the name no longer exists, or it has a new type, and we mark it as removed (see above) diff.removed.put(entry.getKey(), type); } } @@ -172,8 +166,8 @@ protected void destroy() { * Evaluates command in the context of the current ScriptSession. * * @param command the command to evaluate - * @param scriptName an optional script name, which may be ignored by the implementation, or - * used improve error messages or for other internal purposes + * @param scriptName an optional script name, which may be ignored by the implementation, or used improve error + * messages or for other internal purposes */ protected abstract void evaluate(String command, @Nullable String scriptName); diff --git a/DB/src/main/java/io/deephaven/db/util/ClassList.java b/DB/src/main/java/io/deephaven/db/util/ClassList.java index f22c2a83aa1..e9c71aad8cf 100644 --- a/DB/src/main/java/io/deephaven/db/util/ClassList.java +++ b/DB/src/main/java/io/deephaven/db/util/ClassList.java @@ -18,8 +18,7 @@ */ public class ClassList { @NotNull - public static Class[] readClassList(String resourceName) - throws IOException, ClassNotFoundException { + public static Class[] readClassList(String resourceName) throws IOException, ClassNotFoundException { final ArrayList classString = getClassStrings(resourceName); final Class[] classList = new Class[classString.size()]; @@ -31,7 +30,7 @@ public static Class[] readClassList(String resourceName) @NotNull public static Collection> readClassListAsCollection(String resourceName) - throws IOException, ClassNotFoundException { + throws IOException, ClassNotFoundException { final ArrayList classString = getClassStrings(resourceName); ArrayList> result = new ArrayList<>(classString.size()); @@ -51,8 +50,7 @@ private static ArrayList getClassStrings(String resourceName) throws IOE for (String resourceNameLocal : resourceNameAry) { - final InputStream pushListStream = - ClassList.class.getResourceAsStream("/" + resourceNameLocal); + final InputStream pushListStream = ClassList.class.getResourceAsStream("/" + resourceNameLocal); if (pushListStream == null) { throw new IOException("Could not open class list: " + resourceNameLocal); } @@ -63,8 +61,7 @@ private static ArrayList getClassStrings(String resourceName) throws IOE while ((c = file.readLine()) != null) { c = c.trim(); if (c.length() > 0 && c.charAt(0) != '#') { - // No idea why this was here, pretty sure it's unnecessary: c = c.replace(" ", - // ""); + // No idea why this was here, pretty sure it's unnecessary: c = c.replace(" ", ""); classString.add(c); } } diff --git a/DB/src/main/java/io/deephaven/db/util/ColumnFormattingValues.java b/DB/src/main/java/io/deephaven/db/util/ColumnFormattingValues.java index f32c48c80ce..430703b1375 100644 --- a/DB/src/main/java/io/deephaven/db/util/ColumnFormattingValues.java +++ b/DB/src/main/java/io/deephaven/db/util/ColumnFormattingValues.java @@ -16,9 +16,8 @@ public interface ColumnFormattingValues { * @return true if the columnName is a formatting column; false otherwise */ static boolean isFormattingColumn(String columnName) { - return columnName.endsWith(TABLE_FORMAT_NAME) - || columnName.endsWith(TABLE_NUMERIC_FORMAT_NAME) - || columnName.endsWith(TABLE_DATE_FORMAT_NAME); + return columnName.endsWith(TABLE_FORMAT_NAME) || columnName.endsWith(TABLE_NUMERIC_FORMAT_NAME) + || columnName.endsWith(TABLE_DATE_FORMAT_NAME); } /** diff --git a/DB/src/main/java/io/deephaven/db/util/DBColorUtil.java b/DB/src/main/java/io/deephaven/db/util/DBColorUtil.java index edc117c0b86..03ee72825ad 100644 --- a/DB/src/main/java/io/deephaven/db/util/DBColorUtil.java +++ b/DB/src/main/java/io/deephaven/db/util/DBColorUtil.java @@ -10,15 +10,13 @@ /** * Colors moved to {@link Color}. They are left in here for backwards compatibility. *

    - * Colors are encoded into longs, the 32 least significant bits representing the foreground color - * and the 32 most significant bits the background color. The 24 least significant bits of each - * chunk hold the color's RGB values. + * Colors are encoded into longs, the 32 least significant bits representing the foreground color and the 32 most + * significant bits the background color. The 24 least significant bits of each chunk hold the color's RGB values. *

    * The 25th bit is flipped. This distinguishes between no formatting (0L) and black.
    - * For foreground colors, one of the remaining 7 bits denotes no foreground color overriding when - * the cell is highlighted in the table. This means the foreground color will stay the same when the - * cell is highlighted. TODO (deephaven/deephaven-core/issues/175): Move this to a new module and - * package + * For foreground colors, one of the remaining 7 bits denotes no foreground color overriding when the cell is + * highlighted in the table. This means the foreground color will stay the same when the cell is highlighted. TODO + * (deephaven/deephaven-core/issues/175): Move this to a new module and package */ @SuppressWarnings({"UnusedDeclaration", "WeakerAccess"}) public class DBColorUtil extends DBColorUtilImpl { diff --git a/DB/src/main/java/io/deephaven/db/util/DBColorUtilImpl.java b/DB/src/main/java/io/deephaven/db/util/DBColorUtilImpl.java index 4d0b22d0270..2e79cdc092a 100644 --- a/DB/src/main/java/io/deephaven/db/util/DBColorUtilImpl.java +++ b/DB/src/main/java/io/deephaven/db/util/DBColorUtilImpl.java @@ -11,11 +11,10 @@ import static io.deephaven.util.QueryConstants.NULL_LONG; /** - * Formatting methods from DBColorUtil. Exists so that we can statically import the DBColorUtil - * methods without importing the color fields. TODO: remove once {@link DBColorUtil} field and - * {@link Color} field conflicts are resolved. TODO: This class won't be necessary once we can - * import DBColorUtil as static again. TODO (deephaven/deephaven-core/issues/175): Move this to a - * new module and package + * Formatting methods from DBColorUtil. Exists so that we can statically import the DBColorUtil methods without + * importing the color fields. TODO: remove once {@link DBColorUtil} field and {@link Color} field conflicts are + * resolved. TODO: This class won't be necessary once we can import DBColorUtil as static again. TODO + * (deephaven/deephaven-core/issues/175): Move this to a new module and package */ public class DBColorUtilImpl { public static final long COLOR_SET_BIT = 0x01; @@ -26,8 +25,7 @@ public class DBColorUtilImpl { /** - * Creates a table format encoding with background color equal to the input RGB and unformatted - * foreground. + * Creates a table format encoding with background color equal to the input RGB and unformatted foreground. * * @param r red component * @param g green component @@ -36,14 +34,13 @@ public class DBColorUtilImpl { */ private static long background(long r, long g, long b) { return (COLOR_SET_BIT << 56) | - (r << 48) | - (g << 40) | - (b << 32); + (r << 48) | + (g << 40) | + (b << 32); } /** - * Creates a table format encoding with specified background color and an unformatted - * foreground. + * Creates a table format encoding with specified background color and an unformatted foreground. * * @param color color encoding * @return table format encoding with specified background color and unformatted foreground @@ -53,8 +50,7 @@ private static long background(long color) { } /** - * Creates a table format encoding with specified background color and an unformatted - * foreground. + * Creates a table format encoding with specified background color and an unformatted foreground. * * @param color color represented by a {@link Color} * @return table format encoding with specified background color and unformatted foreground @@ -64,8 +60,7 @@ public static long background(Color color) { } /** - * Creates a table format encoding with specified background color and an unformatted - * foreground. + * Creates a table format encoding with specified background color and an unformatted foreground. * * @param color the hex representation or the case-insensitive color name * @return table format encoding with specified background color and unformatted foreground @@ -107,8 +102,8 @@ public static long bg(Color color) { /** * Convenience method for {@link #background(Color)}. *

    - * This variant takes the input color as a {@link String}. This may be the hex representation or - * the case-insensitive color name. + * This variant takes the input color as a {@link String}. This may be the hex representation or the + * case-insensitive color name. *

    */ public static long bg(String color) { @@ -169,8 +164,8 @@ public static long bgo(Color color) { /** * Convenience method for {@link #backgroundOverride(Color)}. *

    - * This variant takes the input color as a {@link String}. This may be the hex representation or - * the case-insensitive color name. + * This variant takes the input color as a {@link String}. This may be the hex representation or the + * case-insensitive color name. *

    */ public static long bgo(String color) { @@ -182,8 +177,7 @@ public static long bgo(String color) { /** - * Creates a table format encoding with foreground color equal to the input RGB and unformatted - * background. + * Creates a table format encoding with foreground color equal to the input RGB and unformatted background. * * @param r red component * @param g green component @@ -192,10 +186,10 @@ public static long bgo(String color) { */ private static long foreground(long r, long g, long b) { return (COLOR_SET_BIT << 24) | - (r << 16) | - (g << 8) | - (b) | - FOREGROUND_SET_BIT << 24; + (r << 16) | + (g << 8) | + (b) | + FOREGROUND_SET_BIT << 24; } /** @@ -241,9 +235,9 @@ public static long foreground(String color) { /** * Convenience method for {@link #foreground(Color)}. *

    - * This variant takes the input color encoded as a long. The resultant encoding is an - * unformatted background and a foreground the same color as the background color of the input, - * e.g. fg(bgfg(YELLOW, RED)) will result in a yellow foreground with no background formatting. + * This variant takes the input color encoded as a long. The resultant encoding is an unformatted background and a + * foreground the same color as the background color of the input, e.g. fg(bgfg(YELLOW, RED)) will result in a + * yellow foreground with no background formatting. *

    */ public static long fg(long color) { @@ -263,8 +257,8 @@ public static long fg(Color color) { /** * Convenience method for {@link #foreground(Color)}. *

    - * This variant takes the input color as a {@link String}. This may be the hex representation or - * the case-insensitive color name. + * This variant takes the input color as a {@link String}. This may be the hex representation or the + * case-insensitive color name. *

    */ public static long fg(String color) { @@ -335,16 +329,15 @@ public static long fgo(Color color) { /** * Convenience method for {@link #foregroundOverride(Color)}. *

    - * This variant takes the input color as a {@link String}. This may be the hex representation or - * the case-insensitive color name. + * This variant takes the input color as a {@link String}. This may be the hex representation or the + * case-insensitive color name. *

    */ public static long fgo(String color) { return foregroundOverride(color); } - ////////////////////////////////////// BackgroundForeground methods - ////////////////////////////////////// ////////////////////////////////////// + ////////////////////////////////////// BackgroundForeground methods ////////////////////////////////////// /** @@ -358,8 +351,7 @@ public static long fgo(String color) { * @param fgb blue component of the foreground color * @return table format encoding with specified foreground and background colors */ - private static long backgroundForeground(long bgr, long bgg, long bgb, long fgr, long fgg, - long fgb) { + private static long backgroundForeground(long bgr, long bgg, long bgb, long fgr, long fgg, long fgb) { return background(bgr, bgg, bgb) | foreground(fgr, fgg, fgb); } @@ -388,10 +380,10 @@ public static long backgroundForeground(Color bg, Color fg) { /** * Creates a table format encoding with specified foreground and background colors. * - * @param bg background color represented by a {@link String}. This may be the hex - * representation or the case-insensitive color name - * @param fg foreground color represented by a {@link String}. This may be the hex - * representation or the case-insensitive color name + * @param bg background color represented by a {@link String}. This may be the hex representation or the + * case-insensitive color name + * @param fg foreground color represented by a {@link String}. This may be the hex representation or the + * case-insensitive color name * @return table format encoding with specified foreground and background colors */ public static long backgroundForeground(String bg, String fg) { @@ -431,8 +423,8 @@ public static long bgfg(Color bg, Color fg) { /** * Convenience method for {@link #backgroundForeground(Color, Color)}. *

    - * This variant takes the input colors as {@link String}s. This may be the hex representation or - * the case-insensitive color name. + * This variant takes the input colors as {@link String}s. This may be the hex representation or the + * case-insensitive color name. *

    */ public static long bgfg(String bg, String fg) { @@ -440,13 +432,12 @@ public static long bgfg(String bg, String fg) { } - ////////////////////////////////////// BackgroundForegroundAuto methods - ////////////////////////////////////// ////////////////////////////////////// + ////////////////////////////////////// BackgroundForegroundAuto methods ////////////////////////////////////// /** - * Creates a table format encoding with specified background color and automatically chosen - * contrasting foreground color. + * Creates a table format encoding with specified background color and automatically chosen contrasting foreground + * color. * * @param bgr red component of the background color * @param bgg green component of the background color @@ -460,8 +451,8 @@ private static long backgroundForegroundAuto(long bgr, long bgg, long bgb) { } /** - * Creates a table format encoding with specified background color and automatically chosen - * contrasting foreground color. + * Creates a table format encoding with specified background color and automatically chosen contrasting foreground + * color. * * @param color background color * @return table format encoding with background color and auto-generated foreground color @@ -473,8 +464,8 @@ private static long backgroundForegroundAuto(long color) { } /** - * Creates a table format encoding with specified background color and automatically chosen - * contrasting foreground color. + * Creates a table format encoding with specified background color and automatically chosen contrasting foreground + * color. * * @param color background color represented by a {@link Color} * @return table format encoding with background color and auto-generated foreground color @@ -484,11 +475,11 @@ public static long backgroundForegroundAuto(Color color) { } /** - * Creates a table format encoding with specified background color and automatically chosen - * contrasting foreground color. + * Creates a table format encoding with specified background color and automatically chosen contrasting foreground + * color. * - * @param color background color represented by a {@link String}. This may be the hex - * representation or the case-insensitive color name + * @param color background color represented by a {@link String}. This may be the hex representation or the + * case-insensitive color name * @return table format encoding with background color and auto-generated foreground color */ public static long backgroundForegroundAuto(String color) { @@ -530,8 +521,8 @@ public static long bgfga(Color color) { * Convenience method for {@link #backgroundForegroundAuto(Color)} * *

    - * This variant takes the input color as a {@link String}. This may be the hex representation or - * the case-insensitive color name. + * This variant takes the input color as a {@link String}. This may be the hex representation or the + * case-insensitive color name. *

    */ public static long bgfga(String color) { @@ -543,16 +534,15 @@ public static long bgfga(String color) { /** - * Creates a table format encoding for the heat map at {@code value}. A contrasting foreground - * color is automatically chosen. + * Creates a table format encoding for the heat map at {@code value}. A contrasting foreground color is + * automatically chosen. * * @param value determines the color used by its location in the heat map's range * @param min minimum value of the heat map range * @param max maximum value of the heat map range * @param bg1 background color at or below the minimum value of the heat map. Encoded as a long * @param bg2 background color at or above the maximum value of the heat map. Encoded as a long - * @return table format encoding with background color and auto-generated foreground color - * determined by a heat map + * @return table format encoding with background color and auto-generated foreground color determined by a heat map */ public static long heatmap(double value, double min, double max, long bg1, long bg2) { if (value <= min) { @@ -571,50 +561,47 @@ public static long heatmap(double value, double min, double max, long bg1, long long b2 = (bg2 >> 32) & 0xFF; return bgfga((long) (r1 + pert * (r2 - r1)), (long) (g1 + pert * (g2 - g1)), - (long) (b1 + pert * (b2 - b1))); + (long) (b1 + pert * (b2 - b1))); } } /** - * Creates a table format encoding for the heat map at {@code value}. A contrasting foreground - * color is automatically chosen. + * Creates a table format encoding for the heat map at {@code value}. A contrasting foreground color is + * automatically chosen. * * @param value determines the color used by its location in the heat map's range * @param min minimum value of the heat map range * @param max maximum value of the heat map range * @param bg1 background color at or below the minimum value of the heat map. * @param bg2 background color at or above the maximum value of the heat map. - * @return table format encoding with background color and auto-generated foreground color - * determined by a heat map + * @return table format encoding with background color and auto-generated foreground color determined by a heat map */ public static long heatmap(double value, double min, double max, Color bg1, Color bg2) { return heatmap(value, min, max, toLong(bg1), toLong(bg2)); } /** - * Creates a table format encoding for the heat map at {@code value}. A contrasting foreground - * color is automatically chosen. + * Creates a table format encoding for the heat map at {@code value}. A contrasting foreground color is + * automatically chosen. * * @param value determines the color used by its location in the heat map's range * @param min minimum value of the heat map range * @param max maximum value of the heat map range * @param bg1 background color at or below the minimum value of the heat map. * @param bg2 background color at or above the maximum value of the heat map. - * @return table format encoding with background color and auto-generated foreground color - * determined by a heat map + * @return table format encoding with background color and auto-generated foreground color determined by a heat map */ public static long heatmap(double value, double min, double max, String bg1, String bg2) { return heatmap(value, min, max, toLong(bg1), toLong(bg2)); } - ////////////////////////////////////// heatmapForeground methods - ////////////////////////////////////// ////////////////////////////////////// + ////////////////////////////////////// heatmapForeground methods ////////////////////////////////////// /** - * Creates a table format encoding for the heat map at {@code value} with specified foreground - * color and unformatted background. + * Creates a table format encoding for the heat map at {@code value} with specified foreground color and unformatted + * background. *

    * Note that fg1 and fg2 must be encoded as foreground colors for this to work as expected. *

    @@ -626,8 +613,7 @@ public static long heatmap(double value, double min, double max, String bg1, Str * @param fg2 foreground color at or above the maximum value of the heat map. Encoded as a long * @return table format encoding with foreground color determined by a heat map */ - private static long heatmapForeground(double value, double min, double max, long fg1, - long fg2) { + private static long heatmapForeground(double value, double min, double max, long fg1, long fg2) { if (value <= min) { return fg1; } else if (value >= max) { @@ -643,14 +629,13 @@ private static long heatmapForeground(double value, double min, double max, long long g2 = (fg2 >> 8) & 0xFF; long b2 = (fg2) & 0xFF; - return fg((long) (r1 + pert * (r2 - r1)), (long) (g1 + pert * (g2 - g1)), - (long) (b1 + pert * (b2 - b1))); + return fg((long) (r1 + pert * (r2 - r1)), (long) (g1 + pert * (g2 - g1)), (long) (b1 + pert * (b2 - b1))); } } /** - * Creates a table format encoding for the heat map at {@code value} with specified foreground - * color and unformatted background. + * Creates a table format encoding for the heat map at {@code value} with specified foreground color and unformatted + * background. * * @param value determines the color used by its location in the heat map's range * @param min minimum value of the heat map range. @@ -659,14 +644,13 @@ private static long heatmapForeground(double value, double min, double max, long * @param fg2 foreground color at or above the maximum value of the heat map * @return table format encoding with foreground color determined by a heat map */ - public static long heatmapForeground(double value, double min, double max, Color fg1, - Color fg2) { + public static long heatmapForeground(double value, double min, double max, Color fg1, Color fg2) { return heatmapForeground(value, min, max, fg(fg1), fg(fg2)); } /** - * Creates a table format encoding for the heat map at {@code value} with specified foreground - * color and unformatted background. + * Creates a table format encoding for the heat map at {@code value} with specified foreground color and unformatted + * background. * * @param value determines the color used by its location in the heat map's range * @param min minimum value of the heat map range. @@ -675,16 +659,15 @@ public static long heatmapForeground(double value, double min, double max, Color * @param fg2 foreground color at or above the maximum value of the heat map * @return table format encoding with foreground color determined by a heat map */ - public static long heatmapForeground(double value, double min, double max, String fg1, - String fg2) { + public static long heatmapForeground(double value, double min, double max, String fg1, String fg2) { return heatmapForeground(value, min, max, fg(fg1), fg(fg2)); } /** * Convenience method for {@link #heatmapForeground(double, double, double, Color, Color)} *

    - * This variant takes the input colors encoded as a longs. These colors must be formatted as a - * foreground (i.e. through fg() call) for this to work as expected. + * This variant takes the input colors encoded as a longs. These colors must be formatted as a foreground (i.e. + * through fg() call) for this to work as expected. *

    */ public static long heatmapFg(double value, double min, double max, long fg1, long fg2) { @@ -704,8 +687,8 @@ public static long heatmapFg(double value, double min, double max, Color fg1, Co /** * See {@link #heatmapFg(double, double, double, long, long)} *

    - * This variant takes the input colors as {@link String}s. This may be the hex representation or - * the case-insensitive color name. + * This variant takes the input colors as {@link String}s. This may be the hex representation or the + * case-insensitive color name. *

    */ public static long heatmapFg(double value, double min, double max, String fg1, String fg2) { @@ -734,8 +717,8 @@ public static long toLong(final long color) { */ public static long toLong(final Color color) { return color == null || color == Color.NO_FORMATTING ? 0 - : backgroundForegroundAuto(color.javaColor().getRed(), color.javaColor().getGreen(), - color.javaColor().getBlue()); + : backgroundForegroundAuto(color.javaColor().getRed(), color.javaColor().getGreen(), + color.javaColor().getBlue()); } /** @@ -746,8 +729,7 @@ public static long toLong(final Color color) { * @throws IllegalArgumentException If {@code color} is invalid */ public static long toLong(final String color) { - return color == null - || color.toUpperCase().equals(Colors.NO_FORMATTING.name().toUpperCase()) ? 0 + return color == null || color.toUpperCase().equals(Colors.NO_FORMATTING.name().toUpperCase()) ? 0 : toLong(Color.color(color)); } diff --git a/DB/src/main/java/io/deephaven/db/util/DynamicCompileUtils.java b/DB/src/main/java/io/deephaven/db/util/DynamicCompileUtils.java index d4e9b47e1df..cba719d5a5f 100644 --- a/DB/src/main/java/io/deephaven/db/util/DynamicCompileUtils.java +++ b/DB/src/main/java/io/deephaven/db/util/DynamicCompileUtils.java @@ -14,14 +14,12 @@ */ public class DynamicCompileUtils { - public static Supplier compileSimpleFunction(final Class resultType, - final String code) { - return compileSimpleFunction(resultType, code, Collections.emptyList(), - Collections.emptyList()); + public static Supplier compileSimpleFunction(final Class resultType, final String code) { + return compileSimpleFunction(resultType, code, Collections.emptyList(), Collections.emptyList()); } - public static Supplier compileSimpleStatement(final Class resultType, - final String code, final String... imports) { + public static Supplier compileSimpleStatement(final Class resultType, final String code, + final String... imports) { final List importClasses = new ArrayList<>(); for (final String importString : imports) { try { @@ -31,12 +29,11 @@ public static Supplier compileSimpleStatement(final Class re } } - return compileSimpleFunction(resultType, "return " + code, importClasses, - Collections.emptyList()); + return compileSimpleFunction(resultType, "return " + code, importClasses, Collections.emptyList()); } - public static Supplier compileSimpleFunction(final Class resultType, - final String code, final Collection imports, final Collection staticImports) { + public static Supplier compileSimpleFunction(final Class resultType, final String code, + final Collection imports, final Collection staticImports) { final StringBuilder classBody = new StringBuilder(); classBody.append("import ").append(resultType.getName()).append(";\n"); @@ -47,9 +44,8 @@ public static Supplier compileSimpleFunction(final Class res classBody.append("import static ").append(sim.getName()).append(".*;\n"); } - classBody.append("public class $CLASSNAME$ implements ") - .append(Supplier.class.getCanonicalName()).append("<") - .append(resultType.getCanonicalName()).append(">").append(" ").append("{\n"); + classBody.append("public class $CLASSNAME$ implements ").append(Supplier.class.getCanonicalName()).append("<") + .append(resultType.getCanonicalName()).append(">").append(" ").append("{\n"); classBody.append(" @Override\n"); classBody.append(" public ").append(resultType.getCanonicalName()).append(" get() {\n"); classBody.append(code).append(";\n"); @@ -57,7 +53,7 @@ public static Supplier compileSimpleFunction(final Class res classBody.append("}\n"); final Class partitionClass = - CompilerTools.compile("Function", classBody.toString(), CompilerTools.FORMULA_PREFIX); + CompilerTools.compile("Function", classBody.toString(), CompilerTools.FORMULA_PREFIX); try { // noinspection unchecked @@ -69,14 +65,14 @@ public static Supplier compileSimpleFunction(final Class res public static Class getClassThroughCompilation(final String object) { final StringBuilder classBody = new StringBuilder(); - classBody.append("public class $CLASSNAME$ implements ") - .append(Supplier.class.getCanonicalName()).append("{ \n"); + classBody.append("public class $CLASSNAME$ implements ").append(Supplier.class.getCanonicalName()) + .append("{ \n"); classBody.append(" @Override\n"); classBody.append(" public Class get() { return ").append(object).append(".class; }\n"); classBody.append("}\n"); final Class partitionClass = - CompilerTools.compile("Function", classBody.toString(), CompilerTools.FORMULA_PREFIX); + CompilerTools.compile("Function", classBody.toString(), CompilerTools.FORMULA_PREFIX); try { // noinspection unchecked diff --git a/DB/src/main/java/io/deephaven/db/util/GroovyDeephavenSession.java b/DB/src/main/java/io/deephaven/db/util/GroovyDeephavenSession.java index 62c3888ec00..ceafcf984ee 100644 --- a/DB/src/main/java/io/deephaven/db/util/GroovyDeephavenSession.java +++ b/DB/src/main/java/io/deephaven/db/util/GroovyDeephavenSession.java @@ -58,43 +58,41 @@ public class GroovyDeephavenSession extends AbstractScriptSession implements Scr private static final String SCRIPT_PREFIX = "io.deephaven.db.util.Script"; private static final String DEFAULT_SCRIPT_PATH = Configuration.getInstance() - .getProperty("GroovyDeephavenSession.defaultScriptPath") - .replace("", Configuration.getInstance().getDevRootPath()) - .replace("", Configuration.getInstance().getWorkspacePath()); + .getProperty("GroovyDeephavenSession.defaultScriptPath") + .replace("", Configuration.getInstance().getDevRootPath()) + .replace("", Configuration.getInstance().getWorkspacePath()); - private static final boolean ALLOW_UNKNOWN_GROOVY_PACKAGE_IMPORTS = - Configuration.getInstance().getBooleanForClassWithDefault(GroovyDeephavenSession.class, - "allowUnknownGroovyPackageImports", false); + private static final boolean ALLOW_UNKNOWN_GROOVY_PACKAGE_IMPORTS = Configuration.getInstance() + .getBooleanForClassWithDefault(GroovyDeephavenSession.class, "allowUnknownGroovyPackageImports", false); private static final ClassLoader STATIC_LOADER = - new URLClassLoader(new URL[0], GroovyDeephavenSession.class.getClassLoader()) { - final ConcurrentHashMap mapping = new ConcurrentHashMap<>(); - - @Override - protected Class loadClass(String name, boolean resolve) - throws ClassNotFoundException { - if (!mapping.containsKey(name)) { - try { - if (name.replaceAll("\\$", "\\.").contains(PACKAGE)) { - throw new ClassNotFoundException(); + new URLClassLoader(new URL[0], GroovyDeephavenSession.class.getClassLoader()) { + final ConcurrentHashMap mapping = new ConcurrentHashMap<>(); + + @Override + protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { + if (!mapping.containsKey(name)) { + try { + if (name.replaceAll("\\$", "\\.").contains(PACKAGE)) { + throw new ClassNotFoundException(); + } + Class aClass = super.loadClass(name, resolve); + mapping.put(name, aClass); + return aClass; + } catch (ClassNotFoundException e) { + mapping.put(name, e); + throw e; } - Class aClass = super.loadClass(name, resolve); - mapping.put(name, aClass); - return aClass; - } catch (ClassNotFoundException e) { - mapping.put(name, e); - throw e; - } - } else { - Object obj = mapping.get(name); - if (obj instanceof Class) { - return (Class) obj; } else { - throw new ClassNotFoundException(); + Object obj = mapping.get(name); + if (obj instanceof Class) { + return (Class) obj; + } else { + throw new ClassNotFoundException(); + } } } - } - }; + }; private final ScriptFinder scriptFinder; @@ -126,8 +124,7 @@ public GroovyDeephavenSession(final RunScripts runScripts) throws IOException { this(runScripts, false); } - public GroovyDeephavenSession(final RunScripts runScripts, boolean isDefaultScriptSession) - throws IOException { + public GroovyDeephavenSession(final RunScripts runScripts, boolean isDefaultScriptSession) throws IOException { super(isDefaultScriptSession); this.scriptFinder = new ScriptFinder(DEFAULT_SCRIPT_PATH); @@ -162,8 +159,7 @@ public void runScript(String script) throws IOException { private final Set executedScripts = new HashSet<>(); - // Used by closures that implement source() more directly to figure out if we've loaded a script - // already + // Used by closures that implement source() more directly to figure out if we've loaded a script already public boolean hasExecutedScript(final String scriptName) { return !executedScripts.add(scriptName); } @@ -198,19 +194,17 @@ public T getVariable(String name, T defaultValue) { private void evaluateCommand(String command) { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { - // We explicitly want all Groovy commands to run under the 'file:/groovy/shell' source, - // so explicitly create that. + // We explicitly want all Groovy commands to run under the 'file:/groovy/shell' source, so explicitly create + // that. AccessControlContext context; try { final URL urlSource = new URL("file:/groovy/shell"); - final CodeSource codeSource = - new CodeSource(urlSource, (java.security.cert.Certificate[]) null); + final CodeSource codeSource = new CodeSource(urlSource, (java.security.cert.Certificate[]) null); final PermissionCollection perms = Policy.getPolicy().getPermissions(codeSource); - context = AccessController.doPrivileged( - (PrivilegedAction) () -> new AccessControlContext( - new ProtectionDomain[] {new ProtectionDomain( - new CodeSource(urlSource, (java.security.cert.Certificate[]) null), - perms)})); + context = AccessController + .doPrivileged((PrivilegedAction) () -> new AccessControlContext( + new ProtectionDomain[] {new ProtectionDomain( + new CodeSource(urlSource, (java.security.cert.Certificate[]) null), perms)})); } catch (MalformedURLException e) { throw new RuntimeException("Groovy shell URL somehow invalid.", e); } @@ -243,26 +237,21 @@ protected void evaluate(String command, String scriptName) { updateClassloader(lastCommand); try { - LiveTableMonitor.DEFAULT.exclusiveLock() - .doLockedInterruptibly(() -> evaluateCommand(lastCommand)); + LiveTableMonitor.DEFAULT.exclusiveLock().doLockedInterruptibly(() -> evaluateCommand(lastCommand)); } catch (InterruptedException e) { - throw new QueryCancellationException( - e.getMessage() != null ? e.getMessage() : "Query interrupted", - maybeRewriteStackTrace(scriptName, currentScriptName, e, lastCommand, - commandPrefix)); + throw new QueryCancellationException(e.getMessage() != null ? e.getMessage() : "Query interrupted", + maybeRewriteStackTrace(scriptName, currentScriptName, e, lastCommand, commandPrefix)); } catch (Exception e) { - throw wrapAndRewriteStackTrace(scriptName, currentScriptName, e, lastCommand, - commandPrefix); + throw wrapAndRewriteStackTrace(scriptName, currentScriptName, e, lastCommand, commandPrefix); } } finally { script = oldScriptName; } } - private RuntimeException wrapAndRewriteStackTrace(String scriptName, String currentScriptName, - Exception e, String lastCommand, String commandPrefix) { - final Exception en = - maybeRewriteStackTrace(scriptName, currentScriptName, e, lastCommand, commandPrefix); + private RuntimeException wrapAndRewriteStackTrace(String scriptName, String currentScriptName, Exception e, + String lastCommand, String commandPrefix) { + final Exception en = maybeRewriteStackTrace(scriptName, currentScriptName, e, lastCommand, commandPrefix); if (en instanceof RuntimeException) { return (RuntimeException) en; } else { @@ -270,29 +259,25 @@ private RuntimeException wrapAndRewriteStackTrace(String scriptName, String curr } } - private Exception maybeRewriteStackTrace(String scriptName, String currentScriptName, - Exception e, String lastCommand, String commandPrefix) { + private Exception maybeRewriteStackTrace(String scriptName, String currentScriptName, Exception e, + String lastCommand, String commandPrefix) { if (scriptName != null) { final StackTraceElement[] stackTrace = e.getStackTrace(); for (int i = stackTrace.length - 1; i >= 0; i--) { final StackTraceElement stackTraceElement = stackTrace[i]; - if (stackTraceElement.getClassName().startsWith(PACKAGE + "." + currentScriptName) - && - stackTraceElement.getMethodName().equals("run") - && stackTraceElement.getFileName().endsWith(".groovy")) { + if (stackTraceElement.getClassName().startsWith(PACKAGE + "." + currentScriptName) && + stackTraceElement.getMethodName().equals("run") + && stackTraceElement.getFileName().endsWith(".groovy")) { final String[] allLines = lastCommand.split("\n"); - final int prefixLineCount = - org.apache.commons.lang3.StringUtils.countMatches(commandPrefix, "\n"); + final int prefixLineCount = org.apache.commons.lang3.StringUtils.countMatches(commandPrefix, "\n"); final int userLineNumber = stackTraceElement.getLineNumber() - prefixLineCount; if (stackTraceElement.getLineNumber() <= allLines.length) { - return new RuntimeException( - "Error encountered at line " + userLineNumber + ": " - + allLines[stackTraceElement.getLineNumber() - 1], - sanitizeThrowable(e)); + return new RuntimeException("Error encountered at line " + userLineNumber + ": " + + allLines[stackTraceElement.getLineNumber() - 1], sanitizeThrowable(e)); } else { return new RuntimeException( - "Error encountered in Groovy script; unable to identify original line number.", - sanitizeThrowable(e)); + "Error encountered in Groovy script; unable to identify original line number.", + sanitizeThrowable(e)); } } } @@ -361,16 +346,15 @@ private static boolean fieldExists(String className, String fieldName) { } /** - * Remove comments from an import statement. /* comments take precedence over eol (//) comments. - * This ignores escaping and quoting, as they are not valid in an import statement. + * Remove comments from an import statement. /* comments take precedence over eol (//) comments. This ignores + * escaping and quoting, as they are not valid in an import statement. * * @param s import statement string from which to remove comments * @return the input string with comments removed, and whitespace trimmed */ @VisibleForTesting public static String removeComments(String s) { - // first remove /*...*/. This might include // comments, e.g. /* use // to comment to the - // end of the line */ + // first remove /*...*/. This might include // comments, e.g. /* use // to comment to the end of the line */ s = s.replaceAll("/(?s)\\*.*?\\*/", ""); // reluctant match inside /* */ s = s.replaceFirst("//.*", ""); @@ -378,8 +362,7 @@ public static String removeComments(String s) { } /** - * Ensure that the given importString is valid. Return a canonical version of the import string - * if it is valid. + * Ensure that the given importString is valid. Return a canonical version of the import string if it is valid. * * @param importString the string to check. importString is "[import] [static] * package.class[.innerclass...][.field|.method][.*][;]". @@ -388,13 +371,13 @@ public static String removeComments(String s) { */ @VisibleForTesting public static String isValidImportString(Logger log, String importString) { - // look for (ignoring whitespace): optional "import" optional "static" everything_else - // optional ".*" optional semicolon - // "everything_else" should be a valid java identifier of the form - // package.class[.class|.method|.field]. This will be checked later - Matcher matcher = Pattern.compile( - "^\\s*(import\\s+)\\s*(?static\\s+)?\\s*(?.*?)(?\\.\\*)?[\\s;]*$") - .matcher(importString); + // look for (ignoring whitespace): optional "import" optional "static" everything_else optional ".*" optional + // semicolon + // "everything_else" should be a valid java identifier of the form package.class[.class|.method|.field]. This + // will be checked later + Matcher matcher = Pattern + .compile("^\\s*(import\\s+)\\s*(?static\\s+)?\\s*(?.*?)(?\\.\\*)?[\\s;]*$") + .matcher(importString); if (!matcher.matches()) { return null; } @@ -418,45 +401,36 @@ public static String isValidImportString(Logger log, String importString) { if (lastSeparator > 0) { final String prefix = body.substring(0, lastSeparator); final String suffix = body.substring(lastSeparator + 1); - okToImport = functionExists(prefix, suffix) || fieldExists(prefix, suffix) - || classExists(body); + okToImport = functionExists(prefix, suffix) || fieldExists(prefix, suffix) || classExists(body); } else { okToImport = classExists(body); } } } else { if (isWildcard) { - okToImport = classExists(body) || (Package.getPackage(body) != null); // Note: this - // might not - // find a - // valid - // package - // that has - // never been - // loaded + okToImport = classExists(body) || (Package.getPackage(body) != null); // Note: this might not find a + // valid package that has never + // been loaded if (!okToImport) { if (ALLOW_UNKNOWN_GROOVY_PACKAGE_IMPORTS) { - // Check for proper form of a package. Pass a package star import that is - // plausible. Groovy is OK with packages that cannot be found, unlike java. + // Check for proper form of a package. Pass a package star import that is plausible. Groovy is + // OK with packages that cannot be found, unlike java. final String javaIdentifierPattern = - "(\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*\\.)+\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*"; + "(\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*\\.)+\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*"; if (body.matches(javaIdentifierPattern)) { log.info().append("Package or class \"").append(body) - .append( - "\" could not be verified. If this is a package, it could mean that no class from that package has been seen by the classloader.") - .endl(); + .append("\" could not be verified. If this is a package, it could mean that no class from that package has been seen by the classloader.") + .endl(); okToImport = true; } else { log.warn().append("Package or class \"").append(body) - .append( - "\" could not be verified and does not appear to be a valid java identifier.") - .endl(); + .append("\" could not be verified and does not appear to be a valid java identifier.") + .endl(); } } else { log.warn().append("Package or class \"").append(body) - .append( - "\" could not be verified. If this is a package, it could mean that no class from that package has been seen by the classloader.") - .endl(); + .append("\" could not be verified. If this is a package, it could mean that no class from that package has been seen by the classloader.") + .endl(); } } } else { @@ -465,13 +439,10 @@ public static String isValidImportString(Logger log, String importString) { } if (okToImport) { - String fixedImport = - "import " + (isStatic ? "static " : "") + body + (isWildcard ? ".*" : "") + ";"; + String fixedImport = "import " + (isStatic ? "static " : "") + body + (isWildcard ? ".*" : "") + ";"; log.info().append("Adding persistent import ") - .append(isStatic ? "(static/" : "(normal/") - .append(isWildcard ? "wildcard): \"" : "normal): \"") - .append(fixedImport).append("\" from original string: \"").append(importString) - .append("\"").endl(); + .append(isStatic ? "(static/" : "(normal/").append(isWildcard ? "wildcard): \"" : "normal): \"") + .append(fixedImport).append("\" from original string: \"").append(importString).append("\"").endl(); return fixedImport; } else { log.error().append("Invalid import: \"").append(importString).append("\"").endl(); @@ -484,8 +455,7 @@ private void updateScriptImports(String importString) { if (fixedImportString != null) { scriptImports.add(importString); } else { - throw new RuntimeException( - "Attempting to import a path that does not exist: " + importString); + throw new RuntimeException("Attempting to import a path that does not exist: " + importString); } } @@ -521,45 +491,44 @@ public void addScriptImportStatic(Class c) { /** * Creates the full groovy command that we need to evaluate. * - * Imports and the package line are added to the beginning; a postfix is added to the end. We - * return the prefix to enable stack trace rewriting. + * Imports and the package line are added to the beginning; a postfix is added to the end. We return the prefix to + * enable stack trace rewriting. * * @param command the user's input command * @return a pair of our command prefix (first) and the full command (second) */ private Pair fullCommand(String command) { // TODO (core#230): Remove large list of manual text-based imports - // NOTE: Don't add to this list without a compelling reason!!! Use the user script import if - // possible. + // NOTE: Don't add to this list without a compelling reason!!! Use the user script import if possible. final String commandPrefix = "package " + PACKAGE + ";\n" + - "import static io.deephaven.db.tables.utils.TableTools.*;\n" + - "import static io.deephaven.db.v2.utils.TableLoggers.*;\n" + - "import static io.deephaven.db.v2.utils.PerformanceQueries.*;\n" + - "import static io.deephaven.db.tables.utils.WhereClause.whereClause;\n" + - "import io.deephaven.db.tables.DataColumn;\n" + - "import io.deephaven.db.tables.Table;\n" + - "import java.lang.reflect.Array;\n" + - "import io.deephaven.util.type.TypeUtils;\n" + - "import io.deephaven.db.tables.utils.ArrayUtils;\n" + - "import io.deephaven.db.tables.utils.DBDateTime;\n" + - "import io.deephaven.db.tables.utils.DBTimeUtils;\n" + - "import io.deephaven.base.string.cache.CompressedString;\n" + - "import static io.deephaven.base.string.cache.CompressedString.compress;\n" + - "import org.joda.time.LocalTime;\n" + - "import io.deephaven.db.tables.utils.DBPeriod;\n" + - "import io.deephaven.db.tables.select.Param;\n" + - "import io.deephaven.db.tables.select.QueryScope;\n" + - "import java.util.*;\n" + - "import java.lang.*;\n" + - "import static io.deephaven.util.QueryConstants.*;\n" + - "import static io.deephaven.libs.GroovyStaticImports.*;\n" + - "import static io.deephaven.db.tables.utils.DBTimeUtils.*;\n" + - "import static io.deephaven.db.tables.utils.DBTimeZone.*;\n" + - "import static io.deephaven.db.tables.lang.DBLanguageFunctionUtil.*;\n" + - "import static io.deephaven.db.v2.by.ComboAggregateFactory.*;\n" + - StringUtils.joinStrings(scriptImports, "\n") + "\n"; + "import static io.deephaven.db.tables.utils.TableTools.*;\n" + + "import static io.deephaven.db.v2.utils.TableLoggers.*;\n" + + "import static io.deephaven.db.v2.utils.PerformanceQueries.*;\n" + + "import static io.deephaven.db.tables.utils.WhereClause.whereClause;\n" + + "import io.deephaven.db.tables.DataColumn;\n" + + "import io.deephaven.db.tables.Table;\n" + + "import java.lang.reflect.Array;\n" + + "import io.deephaven.util.type.TypeUtils;\n" + + "import io.deephaven.db.tables.utils.ArrayUtils;\n" + + "import io.deephaven.db.tables.utils.DBDateTime;\n" + + "import io.deephaven.db.tables.utils.DBTimeUtils;\n" + + "import io.deephaven.base.string.cache.CompressedString;\n" + + "import static io.deephaven.base.string.cache.CompressedString.compress;\n" + + "import org.joda.time.LocalTime;\n" + + "import io.deephaven.db.tables.utils.DBPeriod;\n" + + "import io.deephaven.db.tables.select.Param;\n" + + "import io.deephaven.db.tables.select.QueryScope;\n" + + "import java.util.*;\n" + + "import java.lang.*;\n" + + "import static io.deephaven.util.QueryConstants.*;\n" + + "import static io.deephaven.libs.GroovyStaticImports.*;\n" + + "import static io.deephaven.db.tables.utils.DBTimeUtils.*;\n" + + "import static io.deephaven.db.tables.utils.DBTimeZone.*;\n" + + "import static io.deephaven.db.tables.lang.DBLanguageFunctionUtil.*;\n" + + "import static io.deephaven.db.v2.by.ComboAggregateFactory.*;\n" + + StringUtils.joinStrings(scriptImports, "\n") + "\n"; return new Pair<>(commandPrefix, commandPrefix + command - + "\n\n// this final true prevents Groovy from interpreting a trailing class definition as something to execute\n;\ntrue;\n"); + + "\n\n// this final true prevents Groovy from interpreting a trailing class definition as something to execute\n;\ntrue;\n"); } public static byte[] getDynamicClass(String name) { @@ -567,14 +536,12 @@ public static byte[] getDynamicClass(String name) { } private static byte[] readClass(final File rootDirectory, final String className) { - final String resourceName = - className.replace('.', '/') + JavaFileObject.Kind.CLASS.extension; + final String resourceName = className.replace('.', '/') + JavaFileObject.Kind.CLASS.extension; final Path path = new File(rootDirectory, resourceName).toPath(); try { return Files.readAllBytes(path); } catch (IOException e) { - throw new RuntimeException("Error reading path " + path + " for className " + className, - e); + throw new RuntimeException("Error reading path " + path + " for className " + className, e); } } @@ -598,27 +565,25 @@ private void updateClassloader(String currentCommand) { for (final GroovyClass aClass : classes) { // Exclude anonymous (numbered) dynamic classes if (aClass.getName().startsWith(SCRIPT_PREFIX) - && isAnInteger(aClass.getName().substring(SCRIPT_PREFIX.length()))) { + && isAnInteger(aClass.getName().substring(SCRIPT_PREFIX.length()))) { continue; } - // always put classes into the writable class loader, because it is possible that their - // content may have changed + // always put classes into the writable class loader, because it is possible that their content may have + // changed newDynamicClasses.put(aClass.getName(), aClass.getBytes()); } if (!newDynamicClasses.isEmpty()) { boolean notifiedQueryLibrary = false; for (final Map.Entry entry : newDynamicClasses.entrySet()) { - // only increment QueryLibrary version if some dynamic class overrides an existing - // class + // only increment QueryLibrary version if some dynamic class overrides an existing class if (!dynamicClasses.add(entry.getKey()) && !notifiedQueryLibrary) { notifiedQueryLibrary = true; queryLibrary.updateVersionString(); } try { - CompilerTools.writeClass(dynamicClassDestination, entry.getKey(), - entry.getValue()); + CompilerTools.writeClass(dynamicClassDestination, entry.getKey(), entry.getValue()); } catch (IOException e) { throw new RuntimeException(e); } @@ -627,8 +592,8 @@ && isAnInteger(aClass.getName().substring(SCRIPT_PREFIX.length()))) { } /** - * I factored out this horrible snippet of code from the updateClassLoader, to isolate the - * badness. I can't think of a replacement that doesn't involve regex matching. + * I factored out this horrible snippet of code from the updateClassLoader, to isolate the badness. I can't think of + * a replacement that doesn't involve regex matching. * * @param s The string to evaluate * @return Whether s can be parsed as an int. @@ -684,7 +649,7 @@ public Throwable sanitizeThrowable(Throwable e) { @Override public void onApplicationInitializationBegin(Supplier pathLoaderSupplier, - ScriptPathLoaderState scriptLoaderState) { + ScriptPathLoaderState scriptLoaderState) { CompilerTools.getContext().setParentClassLoader(getShell().getClassLoader()); setScriptPathLoader(pathLoaderSupplier, true); } @@ -700,12 +665,10 @@ public void onApplicationInitializationEnd() { } @Override - public void setScriptPathLoader(Supplier pathLoaderSupplier, - boolean caching) { + public void setScriptPathLoader(Supplier pathLoaderSupplier, boolean caching) { final ScriptPathLoader pathLoader = pathLoaderSupplier.get(); setVariable("source", sourceClosure = new SourceClosure(this, pathLoader, false, caching)); - setVariable("sourceOnce", - sourceOnceClosure = new SourceClosure(this, pathLoader, true, false)); + setVariable("sourceOnce", sourceOnceClosure = new SourceClosure(this, pathLoader, true, false)); } @Override @@ -722,8 +685,7 @@ public boolean setUseOriginalScriptLoaderState(boolean useOriginal) { sospl.clearOverride(); final ScriptPathLoaderState scriptLoaderState = sospl.getUseState(); log.info().append("Using startup script loader state: ") - .append(scriptLoaderState == null ? "Latest" : scriptLoaderState.toString()) - .endl(); + .append(scriptLoaderState == null ? "Latest" : scriptLoaderState.toString()).endl(); } else { log.info().append("Using latest script states").endl(); sospl.setOverrideState(ScriptPathLoaderState.NONE); @@ -734,12 +696,11 @@ public boolean setUseOriginalScriptLoaderState(boolean useOriginal) { return true; } else { log.warn().append("Incorrect loader type for query: ") - .append(loader == null ? "(null)" : loader.getClass().toString()).endl(); + .append(loader == null ? "(null)" : loader.getClass().toString()).endl(); } } else { log.warn().append("Incorrect closure type for query: ") - .append(sourceClosure == null ? "(null)" : sourceClosure.getClass().toString()) - .endl(); + .append(sourceClosure == null ? "(null)" : sourceClosure.getClass().toString()).endl(); } return false; @@ -770,9 +731,9 @@ public String call(Object... args) { public static class RunScripts { public static RunScripts of(Iterable initScripts) { List paths = StreamSupport.stream(initScripts.spliterator(), false) - .sorted(Comparator.comparingInt(InitScript::priority)) - .map(InitScript::getScriptPath) - .collect(Collectors.toList()); + .sorted(Comparator.comparingInt(InitScript::priority)) + .map(InitScript::getScriptPath) + .collect(Collectors.toList()); return new RunScripts(paths); } @@ -785,8 +746,8 @@ public static RunScripts serviceLoader() { } public static RunScripts oldConfiguration() { - return new RunScripts(Arrays.asList(Configuration.getInstance() - .getProperty("GroovyDeephavenSession.initScripts").split(","))); + return new RunScripts(Arrays + .asList(Configuration.getInstance().getProperty("GroovyDeephavenSession.initScripts").split(","))); } private final List paths; diff --git a/DB/src/main/java/io/deephaven/db/util/GroovyExceptionWrapper.java b/DB/src/main/java/io/deephaven/db/util/GroovyExceptionWrapper.java index 8f3f1e08623..70ee8014dda 100644 --- a/DB/src/main/java/io/deephaven/db/util/GroovyExceptionWrapper.java +++ b/DB/src/main/java/io/deephaven/db/util/GroovyExceptionWrapper.java @@ -37,15 +37,12 @@ public static Throwable maybeTranslateGroovyException(final Throwable original) } /** - * Returns a replacement for the original exception, except now wrapping the new cause, since - * the existing exception can't be given a new cause. + * Returns a replacement for the original exception, except now wrapping the new cause, since the existing exception + * can't be given a new cause. */ - private static Throwable replaceWithNewCause(final Throwable original, - final Throwable replacementCause) { - assert !(original instanceof GroovyException) - && !(original instanceof GroovyRuntimeException); - assert !(replacementCause instanceof GroovyException) - && !(replacementCause instanceof GroovyRuntimeException); + private static Throwable replaceWithNewCause(final Throwable original, final Throwable replacementCause) { + assert !(original instanceof GroovyException) && !(original instanceof GroovyRuntimeException); + assert !(replacementCause instanceof GroovyException) && !(replacementCause instanceof GroovyRuntimeException); final Throwable replacement = makeReplacement(original, replacementCause); replacement.setStackTrace(original.getStackTrace()); @@ -53,14 +50,13 @@ private static Throwable replaceWithNewCause(final Throwable original, } @NotNull - private static Throwable makeReplacement(@NotNull final Throwable original, - final Throwable replacementCause) { + private static Throwable makeReplacement(@NotNull final Throwable original, final Throwable replacementCause) { final Class originalClass = original.getClass(); if (original.getMessage() == null) { try { return originalClass.getConstructor(Throwable.class).newInstance(replacementCause); } catch (InstantiationException | IllegalAccessException | InvocationTargetException - | NoSuchMethodException e1) { + | NoSuchMethodException e1) { try { final Throwable result = originalClass.newInstance(); result.initCause(replacementCause); @@ -71,19 +67,18 @@ private static Throwable makeReplacement(@NotNull final Throwable original, } } try { - return originalClass.getConstructor(String.class, Throwable.class) - .newInstance(original.getMessage(), replacementCause); + return originalClass.getConstructor(String.class, Throwable.class).newInstance(original.getMessage(), + replacementCause); } catch (NoSuchMethodException | IllegalAccessException | InstantiationException - | InvocationTargetException e1) { + | InvocationTargetException e1) { try { - final Throwable result = - originalClass.getConstructor(String.class).newInstance(original.getMessage()); + final Throwable result = originalClass.getConstructor(String.class).newInstance(original.getMessage()); result.initCause(replacementCause); return result; } catch (NoSuchMethodException | IllegalAccessException | InstantiationException - | InvocationTargetException e2) { - return new TranslatedException( - original.getClass().getName() + ": " + original.getMessage(), replacementCause); + | InvocationTargetException e2) { + return new TranslatedException(original.getClass().getName() + ": " + original.getMessage(), + replacementCause); } } } diff --git a/DB/src/main/java/io/deephaven/db/util/IsWidget.java b/DB/src/main/java/io/deephaven/db/util/IsWidget.java index 2bf13c32553..26843a90719 100644 --- a/DB/src/main/java/io/deephaven/db/util/IsWidget.java +++ b/DB/src/main/java/io/deephaven/db/util/IsWidget.java @@ -19,8 +19,7 @@ public class IsWidget { public static boolean isWidget(Object value) { if (value instanceof LiveWidget) { return true; - } else if ((value instanceof PyObject - && ((PyObject) value).hasAttribute(GET_WIDGET_ATTRIBUTE))) { + } else if ((value instanceof PyObject && ((PyObject) value).hasAttribute(GET_WIDGET_ATTRIBUTE))) { try (final PyObject widget = ((PyObject) value).callMethod(GET_WIDGET_ATTRIBUTE)) { return !widget.isNone(); } @@ -55,8 +54,7 @@ public static LiveWidget getWidget(PyObject pyObject) { public static boolean isLiveWidgetVisibilityProvider(Object value) { if (value instanceof LiveWidgetVisibilityProvider) { return true; - } else if ((value instanceof PyObject - && ((PyObject) value).hasAttribute(GET_WIDGET_VISIBILITY_ATTRIBUTE))) { + } else if ((value instanceof PyObject && ((PyObject) value).hasAttribute(GET_WIDGET_VISIBILITY_ATTRIBUTE))) { return true; } @@ -78,8 +76,7 @@ public static String[] getLiveWidgetVisibility(final Object object) { } } } else { - throw new StateException( - "PyObject " + object + " isLiveWidgetVisibilityProvider, but has no attribute " + throw new StateException("PyObject " + object + " isLiveWidgetVisibilityProvider, but has no attribute " + GET_WIDGET_VISIBILITY_ATTRIBUTE); } } @@ -90,8 +87,7 @@ public static String[] getLiveWidgetVisibility(final Object object) { public static boolean isTable(Object value) { if (value instanceof Table) { return true; - } else if ((value instanceof PyObject - && ((PyObject) value).hasAttribute(GET_TABLE_ATTRIBUTE))) { + } else if ((value instanceof PyObject && ((PyObject) value).hasAttribute(GET_TABLE_ATTRIBUTE))) { try (final PyObject widget = ((PyObject) value).callMethod(GET_TABLE_ATTRIBUTE)) { return !widget.isNone(); } diff --git a/DB/src/main/java/io/deephaven/db/util/IterableUtils.java b/DB/src/main/java/io/deephaven/db/util/IterableUtils.java index 5a929f82e36..2f3fab5c59f 100644 --- a/DB/src/main/java/io/deephaven/db/util/IterableUtils.java +++ b/DB/src/main/java/io/deephaven/db/util/IterableUtils.java @@ -7,8 +7,7 @@ public static String makeCommaSeparatedList(Iterable s) { return appendCommaSeparatedList(new StringBuilder(), s).toString(); } - public static String makeSeparatedList(Iterable s, String separator, - Function renderer) { + public static String makeSeparatedList(Iterable s, String separator, Function renderer) { return appendSeparatedList(new StringBuilder(), s, separator, renderer).toString(); } @@ -16,9 +15,8 @@ public static StringBuilder appendCommaSeparatedList(StringBuilder sb, Itera return appendSeparatedList(sb, s, ", ", Object::toString); } - public static StringBuilder appendSeparatedList(StringBuilder sb, Iterable s, - String separator, - Function renderer) { + public static StringBuilder appendSeparatedList(StringBuilder sb, Iterable s, String separator, + Function renderer) { String currentSep = ""; for (T element : s) { sb.append(currentSep); diff --git a/DB/src/main/java/io/deephaven/db/util/NoLanguageDeephavenSession.java b/DB/src/main/java/io/deephaven/db/util/NoLanguageDeephavenSession.java index b0fc09e70e9..47d3884e103 100644 --- a/DB/src/main/java/io/deephaven/db/util/NoLanguageDeephavenSession.java +++ b/DB/src/main/java/io/deephaven/db/util/NoLanguageDeephavenSession.java @@ -12,8 +12,8 @@ import java.util.function.Supplier; /** - * ScriptSession implementation that simply allows variables to be exported. This is not intended - * for use in user scripts. + * ScriptSession implementation that simply allows variables to be exported. This is not intended for use in user + * scripts. */ public class NoLanguageDeephavenSession extends AbstractScriptSession implements ScriptSession { private static final String SCRIPT_TYPE = "NoLanguage"; @@ -91,7 +91,7 @@ public String scriptType() { @Override public void onApplicationInitializationBegin(Supplier pathLoader, - ScriptPathLoaderState scriptLoaderState) {} + ScriptPathLoaderState scriptLoaderState) {} @Override public void onApplicationInitializationEnd() {} @@ -99,18 +99,18 @@ public void onApplicationInitializationEnd() {} @Override public void setScriptPathLoader(Supplier scriptPathLoader, boolean caching) { throw new UnsupportedOperationException( - SCRIPT_TYPE + " session does not support setUseOriginalScriptLoaderState"); + SCRIPT_TYPE + " session does not support setUseOriginalScriptLoaderState"); } @Override public void clearScriptPathLoader() { throw new UnsupportedOperationException( - SCRIPT_TYPE + " session does not support setUseOriginalScriptLoaderState"); + SCRIPT_TYPE + " session does not support setUseOriginalScriptLoaderState"); } @Override public boolean setUseOriginalScriptLoaderState(boolean useOriginal) { throw new UnsupportedOperationException( - SCRIPT_TYPE + " session does not support setUseOriginalScriptLoaderState"); + SCRIPT_TYPE + " session does not support setUseOriginalScriptLoaderState"); } } diff --git a/DB/src/main/java/io/deephaven/db/util/PrintListener.java b/DB/src/main/java/io/deephaven/db/util/PrintListener.java index 477261c1c1a..412812231fd 100644 --- a/DB/src/main/java/io/deephaven/db/util/PrintListener.java +++ b/DB/src/main/java/io/deephaven/db/util/PrintListener.java @@ -9,8 +9,8 @@ * A simple listener that prints out each update received from a table. * *

    - * This can be used to debug the performance of a query by attaching to various tables in an effort - * to understand the update pattern. Optionally, you can also print out the head of the table. + * This can be used to debug the performance of a query by attaching to various tables in an effort to understand the + * update pattern. Optionally, you can also print out the head of the table. *

    * *

    @@ -18,8 +18,7 @@ *

    * *

    - * After you are finished, call the {@link #stop()} method to remove this listener from the source - * table. + * After you are finished, call the {@link #stop()} method to remove this listener from the source table. *

    */ @ScriptApi @@ -57,10 +56,9 @@ public PrintListener(final String description, final DynamicTable table, final i @Override public void onUpdate(final Update upstream) { - System.out.println("Update: " + description + ": " + table.size() + "\nAdded rows: " - + upstream.added.size() + ", Removed rows: " + upstream.removed.size() - + ", Modified Rows: " + upstream.modified.size() + ", Shifted Rows: " - + upstream.shifted.getEffectiveSize() + "\nUpdate:" + upstream); + System.out.println("Update: " + description + ": " + table.size() + "\nAdded rows: " + upstream.added.size() + + ", Removed rows: " + upstream.removed.size() + ", Modified Rows: " + upstream.modified.size() + + ", Shifted Rows: " + upstream.shifted.getEffectiveSize() + "\nUpdate:" + upstream); if (rowCount > 0) { TableTools.showWithIndex(table, rowCount); } @@ -68,7 +66,7 @@ public void onUpdate(final Update upstream) { @Override public void onFailureInternal(Throwable originalException, - io.deephaven.db.v2.utils.UpdatePerformanceTracker.Entry sourceEntry) { + io.deephaven.db.v2.utils.UpdatePerformanceTracker.Entry sourceEntry) { System.out.println("Error for: " + description); originalException.printStackTrace(); } diff --git a/DB/src/main/java/io/deephaven/db/util/PythonDeephavenSession.java b/DB/src/main/java/io/deephaven/db/util/PythonDeephavenSession.java index d86799be6b7..8be7116ab1a 100644 --- a/DB/src/main/java/io/deephaven/db/util/PythonDeephavenSession.java +++ b/DB/src/main/java/io/deephaven/db/util/PythonDeephavenSession.java @@ -35,16 +35,16 @@ /** * A ScriptSession that uses a JPy cpython interpreter internally. * - * This is used for persistent queries or the DB console; Python code running remotely uses - * WorkerPythonEnvironment for it's supporting structures. + * This is used for persistent queries or the DB console; Python code running remotely uses WorkerPythonEnvironment for + * it's supporting structures. */ public class PythonDeephavenSession extends AbstractScriptSession implements ScriptSession { private static final Logger log = LoggerFactory.getLogger(PythonDeephavenSession.class); private static final String DEFAULT_SCRIPT_PATH = Configuration.getInstance() - .getProperty("PythonDeephavenSession.defaultScriptPath") - .replace("", Configuration.getInstance().getDevRootPath()) - .replace("", Configuration.getInstance().getWorkspacePath()); + .getProperty("PythonDeephavenSession.defaultScriptPath") + .replace("", Configuration.getInstance().getDevRootPath()) + .replace("", Configuration.getInstance().getWorkspacePath()); public static String SCRIPT_TYPE = "Python"; @@ -69,8 +69,7 @@ public PythonDeephavenSession(boolean runInitScripts) throws IOException { * @param isDefaultScriptSession true if this is in the default context of a worker jvm * @throws IOException if an IO error occurs running initialization scripts */ - public PythonDeephavenSession(boolean runInitScripts, boolean isDefaultScriptSession) - throws IOException { + public PythonDeephavenSession(boolean runInitScripts, boolean isDefaultScriptSession) throws IOException { super(isDefaultScriptSession); JpyInit.init(log); @@ -88,8 +87,7 @@ public PythonDeephavenSession(boolean runInitScripts, boolean isDefaultScriptSes * And now the user-defined initialization scripts, if any. */ if (runInitScripts) { - String[] scripts = Configuration.getInstance() - .getProperty("PythonDeephavenSession.initScripts").split(","); + String[] scripts = Configuration.getInstance().getProperty("PythonDeephavenSession.initScripts").split(","); for (String script : scripts) { runScript(script); @@ -106,8 +104,8 @@ public PythonDeephavenSession(boolean runInitScripts, boolean isDefaultScriptSes } /** - * Creates a Python "{@link ScriptSession}", for use where we should only be reading from the - * scope, such as an IPython kernel session. + * Creates a Python "{@link ScriptSession}", for use where we should only be reading from the scope, such as an + * IPython kernel session. */ public PythonDeephavenSession(PythonScope scope) { super(false); @@ -125,8 +123,8 @@ public QueryScope newQueryScope() { } /** - * Finds the specified script; and runs it as a file, or if it is a stream writes it to a - * temporary file in order to run it. + * Finds the specified script; and runs it as a file, or if it is a stream writes it to a temporary file in order to + * run it. * * @param script the script's name * @throws IOException if an error occurs reading or writing the script @@ -153,16 +151,15 @@ private void runScript(String script) throws IOException { @Override public Object getVariable(String name) throws QueryScope.MissingVariableException { return scope - .getValue(name) - .orElseThrow( - () -> new QueryScope.MissingVariableException("No global variable for: " + name)); + .getValue(name) + .orElseThrow(() -> new QueryScope.MissingVariableException("No global variable for: " + name)); } @Override public T getVariable(String name, T defaultValue) { return scope - .getValueUnchecked(name) - .orElse(defaultValue); + .getValueUnchecked(name) + .orElse(defaultValue); } @Override @@ -173,8 +170,7 @@ protected void evaluate(String command, String scriptName) { evaluator.evalScript(command); }); } catch (InterruptedException e) { - throw new QueryCancellationException( - e.getMessage() != null ? e.getMessage() : "Query interrupted", e); + throw new QueryCancellationException(e.getMessage() != null ? e.getMessage() : "Query interrupted", e); } } @@ -214,7 +210,7 @@ public String scriptType() { @Override public void onApplicationInitializationBegin(Supplier pathLoader, - ScriptPathLoaderState scriptLoaderState) {} + ScriptPathLoaderState scriptLoaderState) {} @Override public void onApplicationInitializationEnd() {} @@ -230,8 +226,8 @@ public boolean setUseOriginalScriptLoaderState(boolean useOriginal) { return true; } - // TODO core#41 move this logic into the python console instance or scope like this - can go - // further and move isWidget too + // TODO core#41 move this logic into the python console instance or scope like this - can go further and move + // isWidget too @Override public Object unwrapObject(Object object) { if (object instanceof PyObject) { diff --git a/DB/src/main/java/io/deephaven/db/util/PythonEvaluatorJpy.java b/DB/src/main/java/io/deephaven/db/util/PythonEvaluatorJpy.java index e3711c7f6c5..9a6c4fc287f 100644 --- a/DB/src/main/java/io/deephaven/db/util/PythonEvaluatorJpy.java +++ b/DB/src/main/java/io/deephaven/db/util/PythonEvaluatorJpy.java @@ -9,18 +9,18 @@ import org.jpy.PyObject; /** - * The sole implementation of the {@link PythonEvaluator}, using Jpy to create a cpython interpreter - * instance inside of our JVM. + * The sole implementation of the {@link PythonEvaluator}, using Jpy to create a cpython interpreter instance inside of + * our JVM. * * Each evaluator has their own copy of the globals. */ public class PythonEvaluatorJpy implements PythonEvaluator { public static PythonEvaluatorJpy withGlobalCopy() { - // TODO: We still have to reach into the __main__ dictionary to push classes and import the - // Deephaven quasi-module - // because after we dill the item, the undilled item has a reference to the __main__ - // globals() and not our globals. + // TODO: We still have to reach into the __main__ dictionary to push classes and import the Deephaven + // quasi-module + // because after we dill the item, the undilled item has a reference to the __main__ globals() and not our + // globals. // we want to create a copy of globals, which is then used to execute code for this session return new PythonEvaluatorJpy(PyLib.getMainGlobals().asDict().copy()); @@ -42,8 +42,7 @@ public void evalStatement(String s) { return; } // noinspection EmptyTryBlock - try (final PyObject pyObject = - PyModule.executeCode(s, PyInputMode.STATEMENT, globals, null)) { + try (final PyObject pyObject = PyModule.executeCode(s, PyInputMode.STATEMENT, globals, null)) { } } @@ -62,8 +61,7 @@ public void evalScript(String s) { @Override public void runScript(String scriptFile) throws FileNotFoundException { // noinspection EmptyTryBlock - try (final PyObject pyObject = - PyModule.executeScript(scriptFile, PyInputMode.SCRIPT, globals, null)) { + try (final PyObject pyObject = PyModule.executeScript(scriptFile, PyInputMode.SCRIPT, globals, null)) { } } diff --git a/DB/src/main/java/io/deephaven/db/util/PythonLogAdapter.java b/DB/src/main/java/io/deephaven/db/util/PythonLogAdapter.java index 39a93fe8298..bddbe3f4f59 100644 --- a/DB/src/main/java/io/deephaven/db/util/PythonLogAdapter.java +++ b/DB/src/main/java/io/deephaven/db/util/PythonLogAdapter.java @@ -3,9 +3,9 @@ import java.io.PrintStream; /** - * This class is stored in the sys.stdout and sys.stderr variables inside of a Python session, so - * that we can intercept the Python session's output, rather than having it all go to the system - * stdout/stderr streams, which are not accessible to the console. + * This class is stored in the sys.stdout and sys.stderr variables inside of a Python session, so that we can intercept + * the Python session's output, rather than having it all go to the system stdout/stderr streams, which are not + * accessible to the console. */ class PythonLogAdapter { private final PrintStream out; @@ -17,10 +17,9 @@ private PythonLogAdapter(PrintStream out) { /** * This method is used from Python so that we appear as a stream. * - * We don't want to write the trailing newline, as the Logger implementation will do that for - * us. If there is no newline; we need to remember that we added one, so that we can suppress - * the next empty newline. If there was a newline, we shouldn't suppress it (e.g., when printing - * just a blank line to the output we need to preserve it). + * We don't want to write the trailing newline, as the Logger implementation will do that for us. If there is no + * newline; we need to remember that we added one, so that we can suppress the next empty newline. If there was a + * newline, we shouldn't suppress it (e.g., when printing just a blank line to the output we need to preserve it). * * @param s the string to write * @return the number of characters written @@ -46,11 +45,9 @@ public void flush() { // https://docs.python.org/2/library/io.html#io.IOBase.close /** - * If we just allow python to print to it's regular STDOUT and STDERR, then it bypasses the Java - * System.out/err. + * If we just allow python to print to it's regular STDOUT and STDERR, then it bypasses the Java System.out/err. * - * We replace the stdout/stderr with a small log adapter so that console users still get their - * output. + * We replace the stdout/stderr with a small log adapter so that console users still get their output. * * @param pythonHolder the PythonHolder object which we will insert our adapters into */ diff --git a/DB/src/main/java/io/deephaven/db/util/PythonScope.java b/DB/src/main/java/io/deephaven/db/util/PythonScope.java index 944d09f8249..cb0a493b267 100644 --- a/DB/src/main/java/io/deephaven/db/util/PythonScope.java +++ b/DB/src/main/java/io/deephaven/db/util/PythonScope.java @@ -13,8 +13,7 @@ /** * A collection of methods around retrieving objects from the given Python scope. *

    - * The scope is likely coming from some sort of Python dictionary. The scope might be local, global, - * or other. + * The scope is likely coming from some sort of Python dictionary. The scope might be local, global, or other. * * @param the implementation's raw Python object type */ @@ -52,8 +51,8 @@ public interface PythonScope { /** * The helper method to turn a raw key into a string key. *

    - * Note: this assumes that all the keys are strings, which is not always true. Keys can also be - * tuples. TODO: revise interface as appropriate if this becomes an issue. + * Note: this assumes that all the keys are strings, which is not always true. Keys can also be tuples. TODO: revise + * interface as appropriate if this becomes an issue. * * @param key the raw key * @return the string key @@ -64,8 +63,8 @@ public interface PythonScope { /** * The helper method to turn a raw value into an implementation specific object. *

    - * This method should NOT convert PyObj of None type to null - we need to preserve the None - * object so it works with other Optional return values. + * This method should NOT convert PyObj of None type to null - we need to preserve the None object so it works with + * other Optional return values. * * @param value the raw value * @return the converted object value @@ -90,7 +89,7 @@ default boolean containsKey(String name) { */ default Optional getValue(String name) { return getValueRaw(name) - .map(this::convertValue); + .map(this::convertValue); } /** @@ -103,7 +102,7 @@ default Optional getValue(String name) { */ default Optional getValue(String name, Class clazz) { return getValue(name) - .map(clazz::cast); + .map(clazz::cast); } /** @@ -116,7 +115,7 @@ default Optional getValue(String name, Class clazz) { default Optional getValueUnchecked(String name) { // noinspection unchecked return getValue(name) - .map(x -> (T) x); + .map(x -> (T) x); } /** @@ -126,19 +125,18 @@ default Optional getValueUnchecked(String name) { */ default Stream getKeys() { return getKeysRaw() - .map(this::convertStringKey); + .map(this::convertStringKey); } /** - * Equivalent to {@link #getEntriesRaw()}, where the keys have been converted via - * {@link #convertStringKey(PyObj)} and the values via {@link #convertValue(PyObj)} + * Equivalent to {@link #getEntriesRaw()}, where the keys have been converted via {@link #convertStringKey(PyObj)} + * and the values via {@link #convertValue(PyObj)} * * @return the string keys and converted values */ default Stream> getEntries() { return getEntriesRaw() - .map(e -> new SimpleImmutableEntry<>(convertStringKey(e.getKey()), - convertValue(e.getValue()))); + .map(e -> new SimpleImmutableEntry<>(convertStringKey(e.getKey()), convertValue(e.getValue()))); } /** @@ -148,7 +146,7 @@ default Stream> getEntries() { */ default Collection getKeysCollection() { return getKeys() - .collect(Collectors.toList()); + .collect(Collectors.toList()); } /** @@ -158,14 +156,13 @@ default Collection getKeysCollection() { */ default Map getEntriesMap() { return getEntries() - .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); // we're currently making sure that we don't convert None to null... /* * // workaround since the collector doesn't work w/ null values // - * https://bugs.openjdk.java.net/browse/JDK-8148463 return getEntries() .collect( - * HashMap::new, (map, entry) -> map.put(entry.getKey(), entry.getValue()), - * HashMap::putAll); + * https://bugs.openjdk.java.net/browse/JDK-8148463 return getEntries() .collect( HashMap::new, (map, entry) -> + * map.put(entry.getKey(), entry.getValue()), HashMap::putAll); */ } diff --git a/DB/src/main/java/io/deephaven/db/util/PythonScopeJpyImpl.java b/DB/src/main/java/io/deephaven/db/util/PythonScopeJpyImpl.java index 767393ceb84..5f55a7b484c 100644 --- a/DB/src/main/java/io/deephaven/db/util/PythonScopeJpyImpl.java +++ b/DB/src/main/java/io/deephaven/db/util/PythonScopeJpyImpl.java @@ -13,8 +13,7 @@ public class PythonScopeJpyImpl implements PythonScope { private final PyDictWrapper dict; private static final PyObject NUMBA_VECTORIZED_FUNC_TYPE = getNumbaVectorizedFuncType(); - // this assumes that the Python interpreter won't be re-initialized during a session, if this - // turns out to be a + // this assumes that the Python interpreter won't be re-initialized during a session, if this turns out to be a // false assumption, then we'll need to make this initialization code 'python restart' proof. private static PyObject getNumbaVectorizedFuncType() { try { @@ -59,7 +58,7 @@ public boolean containsKey(String name) { public String convertStringKey(PyObject key) { if (!key.isString()) { throw new IllegalArgumentException( - "Found non-string key! Expecting only string keys. " + key.toString()); + "Found non-string key! Expecting only string keys. " + key.toString()); } return key.toString(); } @@ -73,9 +72,8 @@ public Object convertValue(PyObject value) { } /** - * When given a pyObject that is a callable, we stick it inside the callable wrapper, which - * implements a call() varargs method, so that we can call it using __call__ without all of the - * JPy nastiness. + * When given a pyObject that is a callable, we stick it inside the callable wrapper, which implements a call() + * varargs method, so that we can call it using __call__ without all of the JPy nastiness. */ public static class CallableWrapper { private PyObject pyObject; @@ -117,8 +115,7 @@ private static CallableWrapper wrapCallable(PyObject pyObject) { if (pyObject.getType().equals(NUMBA_VECTORIZED_FUNC_TYPE)) { List params = pyObject.getAttribute("types").asList(); if (params.isEmpty()) { - throw new IllegalArgumentException( - "numba vectorized function must have an explicit signature."); + throw new IllegalArgumentException("numba vectorized function must have an explicit signature."); } // numba allows a vectorized function to have multiple signatures, only the first one // will be accepted by DH @@ -129,8 +126,7 @@ private static CallableWrapper wrapCallable(PyObject pyObject) { } } - private static final Map numpyType2JavaClass = - new HashMap(); + private static final Map numpyType2JavaClass = new HashMap(); { numpyType2JavaClass.put('i', int.class); numpyType2JavaClass.put('l', long.class); @@ -150,7 +146,7 @@ private static CallableWrapper parseNumbaVectorized(PyObject pyObject, String nu Class returnType = numpyType2JavaClass.get(numpyTypeCode); if (returnType == null) { throw new IllegalArgumentException( - "numba vectorized functions must have an integral, floating point, or boolean return type."); + "numba vectorized functions must have an integral, floating point, or boolean return type."); } List paramTypes = new ArrayList<>(); @@ -159,7 +155,7 @@ private static CallableWrapper parseNumbaVectorized(PyObject pyObject, String nu Class paramType = numpyType2JavaClass.get(numpyTypeChar); if (paramType == null) { throw new IllegalArgumentException( - "parameters of numba vectorized functions must be of integral, floating point, or boolean type."); + "parameters of numba vectorized functions must be of integral, floating point, or boolean type."); } paramTypes.add(numpyType2JavaClass.get(numpyTypeChar)); } else { @@ -168,8 +164,7 @@ private static CallableWrapper parseNumbaVectorized(PyObject pyObject, String nu } if (paramTypes.size() == 0) { - throw new IllegalArgumentException( - "numba vectorized functions must have at least one argument."); + throw new IllegalArgumentException("numba vectorized functions must have at least one argument."); } return new NumbaCallableWrapper(pyObject, returnType, paramTypes); } diff --git a/DB/src/main/java/io/deephaven/db/util/ScalaDeephavenSession.java b/DB/src/main/java/io/deephaven/db/util/ScalaDeephavenSession.java index e34303d72a3..8c813547577 100644 --- a/DB/src/main/java/io/deephaven/db/util/ScalaDeephavenSession.java +++ b/DB/src/main/java/io/deephaven/db/util/ScalaDeephavenSession.java @@ -79,8 +79,7 @@ public void error(Position pos, String msg) { } } - public ScalaDeephavenSession(@SuppressWarnings("unused") boolean runInitScripts, - boolean isDefaultScriptSession) { + public ScalaDeephavenSession(@SuppressWarnings("unused") boolean runInitScripts, boolean isDefaultScriptSession) { super(isDefaultScriptSession); errorHandler = new ErrorHandler(); @@ -105,8 +104,8 @@ public Either compile(String line, boolean synthetic) { setVariable("log", log); - // Our first valueOfTerm will try to evaluate the Java classes, but there is a scala problem - // with Generics and inners. + // Our first valueOfTerm will try to evaluate the Java classes, but there is a scala problem with Generics and + // inners. interpreter.beSilentDuring(() -> { interpreter.valueOfTerm("log"); return null; @@ -141,10 +140,9 @@ protected void evaluate(String command, @Nullable String scriptName) { Results.Result result; try { result = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLockedInterruptibly(() -> interpreter.interpret(command)); + .computeLockedInterruptibly(() -> interpreter.interpret(command)); } catch (InterruptedException e) { - throw new QueryCancellationException( - e.getMessage() != null ? e.getMessage() : "Query interrupted", e); + throw new QueryCancellationException(e.getMessage() != null ? e.getMessage() : "Query interrupted", e); } if (!(result instanceof Results.Success$)) { @@ -152,24 +150,22 @@ protected void evaluate(String command, @Nullable String scriptName) { if (reporter.lastError != null) { throw new RuntimeException("Could not evaluate command: " + reporter.lastError); } else if (reporter.lastUntruncated != null) { - throw new RuntimeException( - "Could not evaluate command: " + reporter.lastUntruncated); + throw new RuntimeException("Could not evaluate command: " + reporter.lastUntruncated); } else { throw new RuntimeException("Could not evaluate command, unknown error!"); } } else if (result instanceof Results.Incomplete$) { throw new IllegalStateException("Incomplete line"); } else { - throw new IllegalStateException("Bad result type: " - + result.getClass().getCanonicalName() + " (" + result + ")"); + throw new IllegalStateException( + "Bad result type: " + result.getClass().getCanonicalName() + " (" + result + ")"); } } } @Override public Map getVariables() { - Collection termNames = - JavaConverters.asJavaCollection(interpreter.definedTerms()); + Collection termNames = JavaConverters.asJavaCollection(interpreter.definedTerms()); Map variableMap = new HashMap<>(); for (Names.TermName termName : termNames) { final String name = termName.toString(); @@ -181,8 +177,7 @@ public Map getVariables() { @Override public Set getVariableNames() { - return Collections - .unmodifiableSet(JavaConverters.asJavaCollection(interpreter.definedTerms()) + return Collections.unmodifiableSet(JavaConverters.asJavaCollection(interpreter.definedTerms()) .stream() .map(Names.TermName::toString) .collect(Collectors.toSet())); @@ -196,12 +191,11 @@ public boolean hasVariableName(String name) { @Override public void setVariable(String name, Object value) { if (value == null) { - interpreter.beQuietDuring(() -> interpreter - .bind(new NamedParamClass(name, Object.class.getCanonicalName(), null))); + interpreter.beQuietDuring( + () -> interpreter.bind(new NamedParamClass(name, Object.class.getCanonicalName(), null))); } else { final String type = value.getClass().getCanonicalName(); - interpreter - .beQuietDuring(() -> interpreter.bind(new NamedParamClass(name, type, value))); + interpreter.beQuietDuring(() -> interpreter.bind(new NamedParamClass(name, type, value))); } } @@ -212,7 +206,7 @@ public String scriptType() { @Override public void onApplicationInitializationBegin(Supplier pathLoader, - ScriptPathLoaderState scriptLoaderState) {} + ScriptPathLoaderState scriptLoaderState) {} @Override public void onApplicationInitializationEnd() {} diff --git a/DB/src/main/java/io/deephaven/db/util/ScriptFinder.java b/DB/src/main/java/io/deephaven/db/util/ScriptFinder.java index 4d1aa8457bd..9f539e7d4c4 100644 --- a/DB/src/main/java/io/deephaven/db/util/ScriptFinder.java +++ b/DB/src/main/java/io/deephaven/db/util/ScriptFinder.java @@ -69,20 +69,18 @@ InputStream findScript(final String script, final String dbScriptPath) throws IO } else { final Optional fileOptional = fileOrStream.getFile(); Assert.assertion(fileOptional.isPresent(), "fileOptional.isPresent()"); - // noinspection ConstantConditions,OptionalGetWithoutIsPresent -- if we don't have a - // stream we must have a file + // noinspection ConstantConditions,OptionalGetWithoutIsPresent -- if we don't have a stream we must have a + // file return new FileInputStream(fileOptional.get()); } } - private FileOrStream findScriptEx(final String script, final String dbScriptPath) - throws IOException { + private FileOrStream findScriptEx(final String script, final String dbScriptPath) throws IOException { /* - * NB: This code is overdue for some cleanup. In practice, there are two modes: (1) local - - * a user runs a local groovy session from IntelliJ or otherwise, and needs to find scripts - * under their devroot. (2) deployed - a groovy session is created from deployed code, in - * which case scripts are only found via the classpath. I had hopes for being able to do - * everything via the classpath, but that doesn't allow for runtime changes without + * NB: This code is overdue for some cleanup. In practice, there are two modes: (1) local - a user runs a local + * groovy session from IntelliJ or otherwise, and needs to find scripts under their devroot. (2) deployed - a + * groovy session is created from deployed code, in which case scripts are only found via the classpath. I had + * hopes for being able to do everything via the classpath, but that doesn't allow for runtime changes without * additional work. */ final String[] paths = (dbScriptPath == null ? defaultScriptPath : dbScriptPath).split(";"); @@ -102,7 +100,7 @@ private FileOrStream findScriptEx(final String script, final String dbScriptPath return new FileOrStream(result); } throw new IOException("Can not find script: script=" + script - + ", dbScriptPath=" + (Arrays.toString(paths)) - + ", classpath=" + System.getProperty("java.class.path")); + + ", dbScriptPath=" + (Arrays.toString(paths)) + + ", classpath=" + System.getProperty("java.class.path")); } } diff --git a/DB/src/main/java/io/deephaven/db/util/ScriptSession.java b/DB/src/main/java/io/deephaven/db/util/ScriptSession.java index 893af01d92b..fa53d69c3e0 100644 --- a/DB/src/main/java/io/deephaven/db/util/ScriptSession.java +++ b/DB/src/main/java/io/deephaven/db/util/ScriptSession.java @@ -31,8 +31,7 @@ public interface ScriptSession extends ReleasableLivenessManager, LivenessNode { Object getVariable(String name) throws QueryScope.MissingVariableException; /** - * Retrieve a variable from the script session's bindings. If the variable is not present, - * return defaultValue. + * Retrieve a variable from the script session's bindings. If the variable is not present, return defaultValue. * * If the variable is present, but is not of type (T), a ClassCastException may result. * @@ -44,11 +43,10 @@ public interface ScriptSession extends ReleasableLivenessManager, LivenessNode { T getVariable(String name, T defaultValue); /** - * A {@link VariableProvider} instance, for services like autocomplete which may want a limited - * "just the variables" view of our session state. + * A {@link VariableProvider} instance, for services like autocomplete which may want a limited "just the variables" + * view of our session state. * - * @return a VariableProvider instance backed by the global/binding context of this script - * session. + * @return a VariableProvider instance backed by the global/binding context of this script session. */ VariableProvider getVariableProvider(); @@ -61,8 +59,8 @@ class Changes { } /** - * Evaluates the script and manages liveness of objects that are exported to the user. This - * method should be called from the serial executor as it manipulates static state. + * Evaluates the script and manages liveness of objects that are exported to the user. This method should be called + * from the serial executor as it manipulates static state. * * @param script the code to execute * @return the changes made to the exportable objects @@ -72,19 +70,18 @@ default Changes evaluateScript(String script) { } /** - * Evaluates the script and manages liveness of objects that are exported to the user. This - * method should be called from the serial executor as it manipulates static state. + * Evaluates the script and manages liveness of objects that are exported to the user. This method should be called + * from the serial executor as it manipulates static state. * * @param script the code to execute - * @param scriptName an optional script name, which may be ignored by the implementation, or - * used improve error messages or for other internal purposes + * @param scriptName an optional script name, which may be ignored by the implementation, or used improve error + * messages or for other internal purposes * @return the changes made to the exportable objects */ Changes evaluateScript(String script, @Nullable String scriptName); /** - * Retrieves all of the variables present in the session's scope (e.g., Groovy binding, Python - * globals()). + * Retrieves all of the variables present in the session's scope (e.g., Groovy binding, Python globals()). * * @return an unmodifiable map with variable names as the keys, and the Objects as the result */ @@ -119,8 +116,8 @@ default Changes evaluateScript(String script) { String scriptType(); /** - * If this script session can throw unserializable exceptions, this method is responsible for - * turning those exceptions into something suitable for sending back to a client. + * If this script session can throw unserializable exceptions, this method is responsible for turning those + * exceptions into something suitable for sending back to a client. * * @param e the exception to (possibly) sanitize * @return the sanitized exception @@ -130,11 +127,10 @@ default Throwable sanitizeThrowable(Throwable e) { } /** - * Called before Application initialization, should setup sourcing from the controller (as - * required). + * Called before Application initialization, should setup sourcing from the controller (as required). */ void onApplicationInitializationBegin(Supplier pathLoader, - ScriptPathLoaderState scriptLoaderState); + ScriptPathLoaderState scriptLoaderState); /** * Called after Application initialization. @@ -155,16 +151,15 @@ void onApplicationInitializationBegin(Supplier pathLoader, void clearScriptPathLoader(); /** - * Informs the session whether or not we should be using the original ScriptLoaderState for - * source commands. + * Informs the session whether or not we should be using the original ScriptLoaderState for source commands. * * @param useOriginal whether to use the script loader state at persistent query initialization */ boolean setUseOriginalScriptLoaderState(boolean useOriginal); /** - * Asks the session to remove any wrapping that exists on scoped objects so that clients can - * fetch them. Defaults to returning the object itself. + * Asks the session to remove any wrapping that exists on scoped objects so that clients can fetch them. Defaults to + * returning the object itself. * * @param object the scoped object * @return an obj which can be consumed by a client diff --git a/DB/src/main/java/io/deephaven/db/util/SortedBy.java b/DB/src/main/java/io/deephaven/db/util/SortedBy.java index 1defbf106c6..145e7bde749 100644 --- a/DB/src/main/java/io/deephaven/db/util/SortedBy.java +++ b/DB/src/main/java/io/deephaven/db/util/SortedBy.java @@ -12,9 +12,8 @@ import java.util.Collection; /** - * SortedBy operations sort the values in each of the buckets according to a specified column. The - * sortedFirstBy returns the row with the lowest value and sortedLastBy returns the row with the - * greatest value. + * SortedBy operations sort the values in each of the buckets according to a specified column. The sortedFirstBy returns + * the row with the lowest value and sortedLastBy returns the row with the greatest value. */ @ScriptApi public class SortedBy { @@ -56,12 +55,11 @@ public static Table sortedFirstBy(@NotNull Table input, @NotNull String[] sortCo * @param sortColumnName the name of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the lowest value of the sort column for each - * grouping key + * @return a new table containing the rows with the lowest value of the sort column for each grouping key */ @NotNull public static Table sortedFirstBy(@NotNull Table input, @NotNull String sortColumnName, - @NotNull String... groupByColumns) { + @NotNull String... groupByColumns) { return input.by(new SortedFirstBy(sortColumnName), groupByColumns); } @@ -72,12 +70,11 @@ public static Table sortedFirstBy(@NotNull Table input, @NotNull String sortColu * @param sortColumnNames the names of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the lowest value of the sort columns for each - * grouping key + * @return a new table containing the rows with the lowest value of the sort columns for each grouping key */ @NotNull public static Table sortedFirstBy(@NotNull Table input, @NotNull String[] sortColumnNames, - @NotNull String... groupByColumns) { + @NotNull String... groupByColumns) { return input.by(new SortedFirstBy(sortColumnNames), groupByColumns); } @@ -88,14 +85,12 @@ public static Table sortedFirstBy(@NotNull Table input, @NotNull String[] sortCo * @param sortColumnName the name of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the lowest value of the sort column for each - * grouping key + * @return a new table containing the rows with the lowest value of the sort column for each grouping key */ @NotNull public static Table sortedFirstBy(@NotNull Table input, @NotNull String sortColumnName, - @NotNull Collection groupByColumns) { - return input.by(new SortedFirstBy(sortColumnName), - SelectColumnFactory.getExpressions(groupByColumns)); + @NotNull Collection groupByColumns) { + return input.by(new SortedFirstBy(sortColumnName), SelectColumnFactory.getExpressions(groupByColumns)); } /** @@ -105,15 +100,13 @@ public static Table sortedFirstBy(@NotNull Table input, @NotNull String sortColu * @param sortColumnNames the names of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the lowest value of the sort columns for each - * grouping key + * @return a new table containing the rows with the lowest value of the sort columns for each grouping key */ @NotNull - public static Table sortedFirstBy(@NotNull Table input, - @NotNull Collection sortColumnNames, @NotNull Collection groupByColumns) { - return input.by( - new SortedFirstBy(sortColumnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), - SelectColumnFactory.getExpressions(groupByColumns)); + public static Table sortedFirstBy(@NotNull Table input, @NotNull Collection sortColumnNames, + @NotNull Collection groupByColumns) { + return input.by(new SortedFirstBy(sortColumnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), + SelectColumnFactory.getExpressions(groupByColumns)); } /** @@ -123,12 +116,11 @@ public static Table sortedFirstBy(@NotNull Table input, * @param sortColumnName the name of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the lowest value of the sort column for each - * grouping key + * @return a new table containing the rows with the lowest value of the sort column for each grouping key */ @NotNull public static Table sortedFirstBy(@NotNull Table input, @NotNull String sortColumnName, - @NotNull SelectColumn... groupByColumns) { + @NotNull SelectColumn... groupByColumns) { return input.by(new SortedFirstBy(sortColumnName), groupByColumns); } @@ -139,12 +131,11 @@ public static Table sortedFirstBy(@NotNull Table input, @NotNull String sortColu * @param sortColumnNames the names of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the lowest value of the sort columns for each - * grouping key + * @return a new table containing the rows with the lowest value of the sort columns for each grouping key */ @NotNull public static Table sortedFirstBy(@NotNull Table input, @NotNull String[] sortColumnNames, - @NotNull SelectColumn... groupByColumns) { + @NotNull SelectColumn... groupByColumns) { return input.by(new SortedFirstBy(sortColumnNames), groupByColumns); } @@ -181,12 +172,11 @@ public static Table sortedLastBy(@NotNull Table input, @NotNull String[] sortCol * @param sortColumnName the name of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the greatest value of the sort column for each - * grouping key + * @return a new table containing the rows with the greatest value of the sort column for each grouping key */ @NotNull public static Table sortedLastBy(@NotNull Table input, @NotNull String sortColumnName, - @NotNull String... groupByColumns) { + @NotNull String... groupByColumns) { return input.by(new SortedLastBy(sortColumnName), groupByColumns); } @@ -197,12 +187,11 @@ public static Table sortedLastBy(@NotNull Table input, @NotNull String sortColum * @param sortColumnNames the names of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the greatest value of the sort columns for each - * grouping key + * @return a new table containing the rows with the greatest value of the sort columns for each grouping key */ @NotNull public static Table sortedLastBy(@NotNull Table input, @NotNull String[] sortColumnNames, - @NotNull String... groupByColumns) { + @NotNull String... groupByColumns) { return input.by(new SortedLastBy(sortColumnNames), groupByColumns); } @@ -213,14 +202,12 @@ public static Table sortedLastBy(@NotNull Table input, @NotNull String[] sortCol * @param sortColumnName the name of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the greatest value of the sort column for each - * grouping key + * @return a new table containing the rows with the greatest value of the sort column for each grouping key */ @NotNull public static Table sortedLastBy(@NotNull Table input, @NotNull String sortColumnName, - @NotNull Collection groupByColumns) { - return input.by(new SortedLastBy(sortColumnName), - SelectColumnFactory.getExpressions(groupByColumns)); + @NotNull Collection groupByColumns) { + return input.by(new SortedLastBy(sortColumnName), SelectColumnFactory.getExpressions(groupByColumns)); } /** @@ -230,15 +217,13 @@ public static Table sortedLastBy(@NotNull Table input, @NotNull String sortColum * @param sortColumnNames the names of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the greatest value of the sort columns for each - * grouping key + * @return a new table containing the rows with the greatest value of the sort columns for each grouping key */ @NotNull - public static Table sortedLastBy(@NotNull Table input, - @NotNull Collection sortColumnNames, @NotNull Collection groupByColumns) { - return input.by( - new SortedLastBy(sortColumnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), - SelectColumnFactory.getExpressions(groupByColumns)); + public static Table sortedLastBy(@NotNull Table input, @NotNull Collection sortColumnNames, + @NotNull Collection groupByColumns) { + return input.by(new SortedLastBy(sortColumnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), + SelectColumnFactory.getExpressions(groupByColumns)); } /** @@ -248,12 +233,11 @@ public static Table sortedLastBy(@NotNull Table input, * @param sortColumnName the name of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the greatest value of the sort column for each - * grouping key + * @return a new table containing the rows with the greatest value of the sort column for each grouping key */ @NotNull public static Table sortedLastBy(@NotNull Table input, @NotNull String sortColumnName, - @NotNull SelectColumn... groupByColumns) { + @NotNull SelectColumn... groupByColumns) { return input.by(new SortedLastBy(sortColumnName), groupByColumns); } @@ -264,12 +248,11 @@ public static Table sortedLastBy(@NotNull Table input, @NotNull String sortColum * @param sortColumnNames the names of the column to sort by * @param groupByColumns the columns to group by * - * @return a new table containing the rows with the greatest value of the sort columns for each - * grouping key + * @return a new table containing the rows with the greatest value of the sort columns for each grouping key */ @NotNull public static Table sortedLastBy(@NotNull Table input, @NotNull String[] sortColumnNames, - @NotNull SelectColumn... groupByColumns) { + @NotNull SelectColumn... groupByColumns) { return input.by(new SortedLastBy(sortColumnNames), groupByColumns); } } diff --git a/DB/src/main/java/io/deephaven/db/util/SourceClosure.java b/DB/src/main/java/io/deephaven/db/util/SourceClosure.java index f3505f1b08b..09b12906b51 100644 --- a/DB/src/main/java/io/deephaven/db/util/SourceClosure.java +++ b/DB/src/main/java/io/deephaven/db/util/SourceClosure.java @@ -10,8 +10,8 @@ import java.util.Map; /** - * This class represents uses of the source() and sourceOnce() method calls. It will use the - * underlying scriptPathLoader to get the script text to use. + * This class represents uses of the source() and sourceOnce() method calls. It will use the underlying scriptPathLoader + * to get the script text to use. */ public class SourceClosure extends Closure { private final ScriptPathLoader scriptPathLoader; @@ -21,8 +21,8 @@ public class SourceClosure extends Closure { // Attempt to cache scripts as long as memory permits. private final Map> scriptCache; - public SourceClosure(final GroovyDeephavenSession groovySession, - final ScriptPathLoader scriptPathLoader, final boolean sourceOnce, final boolean caching) { + public SourceClosure(final GroovyDeephavenSession groovySession, final ScriptPathLoader scriptPathLoader, + final boolean sourceOnce, final boolean caching) { super(groovySession, null); this.scriptPathLoader = scriptPathLoader; this.sourceOnce = sourceOnce; diff --git a/DB/src/main/java/io/deephaven/db/util/TickSuppressor.java b/DB/src/main/java/io/deephaven/db/util/TickSuppressor.java index d06ef2af8dc..f0efd424c2b 100644 --- a/DB/src/main/java/io/deephaven/db/util/TickSuppressor.java +++ b/DB/src/main/java/io/deephaven/db/util/TickSuppressor.java @@ -22,25 +22,23 @@ public class TickSuppressor { private TickSuppressor() {} // static use only /** - * For shift aware listeners, the modified column set cannot mark particular rows and columns - * dirty; only all of the columns in the modified rows. However, rows can be both removed and - * added and those rows do not affect the modified column set. + * For shift aware listeners, the modified column set cannot mark particular rows and columns dirty; only all of the + * columns in the modified rows. However, rows can be both removed and added and those rows do not affect the + * modified column set. * *

    - * If you have a table that has a small number of modified rows with many modified columns; and - * join on a right-hand side that modifies many rows, but few columns; downstream operations - * must treat all rows and columns in the cross product as modified. + * If you have a table that has a small number of modified rows with many modified columns; and join on a right-hand + * side that modifies many rows, but few columns; downstream operations must treat all rows and columns in the cross + * product as modified. *

    * *

    - * This utility function will convert all modified rows to added and removed rows, such that - * downstream operations can modify rows without additionally marking the columns of this table - * dirty. + * This utility function will convert all modified rows to added and removed rows, such that downstream operations + * can modify rows without additionally marking the columns of this table dirty. *

    * * @param input an input table - * @return an output table that will produce no modified rows, but rather adds and removes - * instead + * @return an output table that will produce no modified rows, but rather adds and removes instead */ public static Table convertModificationsToAddsAndRemoves(Table input) { if (!input.isLive()) { @@ -50,11 +48,11 @@ public static Table convertModificationsToAddsAndRemoves(Table input) { LiveTableMonitor.DEFAULT.checkInitiateTableOperation(); final QueryTable resultTable = - new QueryTable(input.getDefinition(), input.getIndex(), input.getColumnSourceMap()); + new QueryTable(input.getDefinition(), input.getIndex(), input.getColumnSourceMap()); ((BaseTable) input).copyAttributes(resultTable, BaseTable.CopyAttributeOperation.Filter); final BaseTable.ShiftAwareListenerImpl listener = new BaseTable.ShiftAwareListenerImpl( - "convertModificationsToAddsAndRemoves", (DynamicTable) input, resultTable) { + "convertModificationsToAddsAndRemoves", (DynamicTable) input, resultTable) { @Override public void onUpdate(Update upstream) { final Update downstream = upstream.copy(); @@ -73,23 +71,22 @@ public void onUpdate(Update upstream) { * Removes spurious modifications from an update. * *

    - * The Deephaven query engine guarantees that any row or column that has been modified, must be - * marked modified in an update. However, for efficiency, it does not guarantee that only rows - * with changed data are marked as modified. There are cases where a query writer would like to - * remove spurious modifications. For example if a downstream listener is sending network - * messages eliminating additional messages may be worthwhile. + * The Deephaven query engine guarantees that any row or column that has been modified, must be marked modified in + * an update. However, for efficiency, it does not guarantee that only rows with changed data are marked as + * modified. There are cases where a query writer would like to remove spurious modifications. For example if a + * downstream listener is sending network messages eliminating additional messages may be worthwhile. *

    * *

    - * This function produces a new query table with the same contents as the original query table. - * For each modified row and column, if a row has not actually been modified or a column has no - * modifications; then remove the modification from the downstream update. + * This function produces a new query table with the same contents as the original query table. For each modified + * row and column, if a row has not actually been modified or a column has no modifications; then remove the + * modification from the downstream update. *

    * * @param input an input table * - * @return an output table where the set of modified rows and columns is restricted to cells - * where current and previous values are not identical + * @return an output table where the set of modified rows and columns is restricted to cells where current and + * previous values are not identical */ public static Table removeSpuriousModifications(Table input) { if (!input.isLive()) { @@ -99,142 +96,129 @@ public static Table removeSpuriousModifications(Table input) { LiveTableMonitor.DEFAULT.checkInitiateTableOperation(); final QueryTable resultTable = - new QueryTable(input.getDefinition(), input.getIndex(), input.getColumnSourceMap()); + new QueryTable(input.getDefinition(), input.getIndex(), input.getColumnSourceMap()); ((BaseTable) input).copyAttributes(resultTable, BaseTable.CopyAttributeOperation.Filter); final String[] columnNames = input.getDefinition().getColumnNamesArray(); - final ModifiedColumnSet[] inputModifiedColumnSets = - new ModifiedColumnSet[columnNames.length]; - final ModifiedColumnSet[] outputModifiedColumnSets = - new ModifiedColumnSet[columnNames.length]; + final ModifiedColumnSet[] inputModifiedColumnSets = new ModifiedColumnSet[columnNames.length]; + final ModifiedColumnSet[] outputModifiedColumnSets = new ModifiedColumnSet[columnNames.length]; final ColumnSource[] inputSources = new ColumnSource[columnNames.length]; final ChunkEquals[] equalityKernel = new ChunkEquals[columnNames.length]; for (int cc = 0; cc < outputModifiedColumnSets.length; ++cc) { - inputModifiedColumnSets[cc] = - ((DynamicTable) input).newModifiedColumnSet(columnNames[cc]); + inputModifiedColumnSets[cc] = ((DynamicTable) input).newModifiedColumnSet(columnNames[cc]); outputModifiedColumnSets[cc] = resultTable.newModifiedColumnSet(columnNames[cc]); inputSources[cc] = input.getColumnSource(columnNames[cc]); equalityKernel[cc] = ChunkEquals.makeEqual(inputSources[cc].getChunkType()); } - final BaseTable.ShiftAwareListenerImpl listener = new BaseTable.ShiftAwareListenerImpl( - "removeSpuriousModifications", (DynamicTable) input, resultTable) { - final ModifiedColumnSet.Transformer identityTransformer = - ((DynamicTable) input).newModifiedColumnSetIdentityTransformer(resultTable); + final BaseTable.ShiftAwareListenerImpl listener = + new BaseTable.ShiftAwareListenerImpl("removeSpuriousModifications", (DynamicTable) input, resultTable) { + final ModifiedColumnSet.Transformer identityTransformer = + ((DynamicTable) input).newModifiedColumnSetIdentityTransformer(resultTable); - @Override - public void onUpdate(Update upstream) { - final Update downstream = upstream.copy(); - downstream.modifiedColumnSet = resultTable.getModifiedColumnSetForUpdates(); - - if (downstream.modified.isEmpty()) { - identityTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); - resultTable.notifyListeners(downstream); - return; - } - - final int columnCount = resultTable.getColumnSourceMap().size(); - final int chunkSize = (int) Math.min(1 << 16, downstream.modified.size()); - - final ChunkSource.GetContext[] getContextArray = - new ChunkSource.GetContext[columnCount]; - final ChunkSource.GetContext[] prevContextArray = - new ChunkSource.GetContext[columnCount]; - final WritableBooleanChunk[] changedCellsArray = - new WritableBooleanChunk[columnCount]; - final boolean[] changedColumns = new boolean[columnCount]; - - final Index.SequentialBuilder builder = - Index.CURRENT_FACTORY.getSequentialBuilder(); - - try ( - final SafeCloseableArray ignored = - new SafeCloseableArray<>(getContextArray); - final SafeCloseableArray ignored2 = - new SafeCloseableArray<>(prevContextArray); - final SafeCloseableArray ignored3 = - new SafeCloseableArray<>(changedCellsArray); - final SharedContext currentSharedContext = SharedContext.makeSharedContext(); - final SharedContext prevSharedContext = SharedContext.makeSharedContext(); - final OrderedKeys.Iterator preOkIt = - upstream.getModifiedPreShift().getOrderedKeysIterator(); - final OrderedKeys.Iterator postOkIt = - upstream.modified.getOrderedKeysIterator()) { - int changedColumnCount = 0; - for (int cc = 0; cc < columnCount; cc++) { - if (upstream.modifiedColumnSet.containsAny(inputModifiedColumnSets[cc])) { - getContextArray[cc] = - inputSources[cc].makeGetContext(chunkSize, currentSharedContext); - prevContextArray[cc] = - inputSources[cc].makeGetContext(chunkSize, prevSharedContext); - changedCellsArray[cc] = - WritableBooleanChunk.makeWritableChunk(chunkSize); - changedColumnCount++; - } - } - final int[] changedColumnIndices = new int[changedColumnCount]; - int cp = 0; - for (int cc = 0; cc < columnCount; cc++) { - if (upstream.modifiedColumnSet.containsAny(inputModifiedColumnSets[cc])) { - changedColumnIndices[cp++] = cc; + @Override + public void onUpdate(Update upstream) { + final Update downstream = upstream.copy(); + downstream.modifiedColumnSet = resultTable.getModifiedColumnSetForUpdates(); + + if (downstream.modified.isEmpty()) { + identityTransformer.clearAndTransform(upstream.modifiedColumnSet, + downstream.modifiedColumnSet); + resultTable.notifyListeners(downstream); + return; } - } - while (postOkIt.hasMore()) { - try ( - final OrderedKeys postChunkOk = - postOkIt.getNextOrderedKeysWithLength(chunkSize); - final OrderedKeys preChunkOk = - preOkIt.getNextOrderedKeysWithLength(chunkSize)) { - currentSharedContext.reset(); - prevSharedContext.reset(); - - for (final int cc : changedColumnIndices) { - // noinspection unchecked - final Chunk currentValues = - inputSources[cc].getChunk(getContextArray[cc], postChunkOk); - // noinspection unchecked - final Chunk prevValues = - inputSources[cc].getPrevChunk(prevContextArray[cc], preChunkOk); - - // now we need to compare them - equalityKernel[cc].notEqual(currentValues, prevValues, - changedCellsArray[cc]); + final int columnCount = resultTable.getColumnSourceMap().size(); + final int chunkSize = (int) Math.min(1 << 16, downstream.modified.size()); + + final ChunkSource.GetContext[] getContextArray = new ChunkSource.GetContext[columnCount]; + final ChunkSource.GetContext[] prevContextArray = new ChunkSource.GetContext[columnCount]; + final WritableBooleanChunk[] changedCellsArray = new WritableBooleanChunk[columnCount]; + final boolean[] changedColumns = new boolean[columnCount]; + + final Index.SequentialBuilder builder = Index.CURRENT_FACTORY.getSequentialBuilder(); + + try (final SafeCloseableArray ignored = + new SafeCloseableArray<>(getContextArray); + final SafeCloseableArray ignored2 = + new SafeCloseableArray<>(prevContextArray); + final SafeCloseableArray ignored3 = + new SafeCloseableArray<>(changedCellsArray); + final SharedContext currentSharedContext = SharedContext.makeSharedContext(); + final SharedContext prevSharedContext = SharedContext.makeSharedContext(); + final OrderedKeys.Iterator preOkIt = + upstream.getModifiedPreShift().getOrderedKeysIterator(); + final OrderedKeys.Iterator postOkIt = upstream.modified.getOrderedKeysIterator()) { + int changedColumnCount = 0; + for (int cc = 0; cc < columnCount; cc++) { + if (upstream.modifiedColumnSet.containsAny(inputModifiedColumnSets[cc])) { + getContextArray[cc] = + inputSources[cc].makeGetContext(chunkSize, currentSharedContext); + prevContextArray[cc] = + inputSources[cc].makeGetContext(chunkSize, prevSharedContext); + changedCellsArray[cc] = WritableBooleanChunk.makeWritableChunk(chunkSize); + changedColumnCount++; + } + } + final int[] changedColumnIndices = new int[changedColumnCount]; + int cp = 0; + for (int cc = 0; cc < columnCount; cc++) { + if (upstream.modifiedColumnSet.containsAny(inputModifiedColumnSets[cc])) { + changedColumnIndices[cp++] = cc; + } } - final MutableInt pos = new MutableInt(0); - postChunkOk.forAllLongs((idx) -> { - boolean idxChanged = false; - for (final int cc : changedColumnIndices) { - if (changedCellsArray[cc].get(pos.intValue())) { - idxChanged = changedColumns[cc] = true; + while (postOkIt.hasMore()) { + try (final OrderedKeys postChunkOk = postOkIt.getNextOrderedKeysWithLength(chunkSize); + final OrderedKeys preChunkOk = + preOkIt.getNextOrderedKeysWithLength(chunkSize)) { + currentSharedContext.reset(); + prevSharedContext.reset(); + + for (final int cc : changedColumnIndices) { + // noinspection unchecked + final Chunk currentValues = + inputSources[cc].getChunk(getContextArray[cc], postChunkOk); + // noinspection unchecked + final Chunk prevValues = + inputSources[cc].getPrevChunk(prevContextArray[cc], preChunkOk); + + // now we need to compare them + equalityKernel[cc].notEqual(currentValues, prevValues, changedCellsArray[cc]); } + + final MutableInt pos = new MutableInt(0); + postChunkOk.forAllLongs((idx) -> { + boolean idxChanged = false; + for (final int cc : changedColumnIndices) { + if (changedCellsArray[cc].get(pos.intValue())) { + idxChanged = changedColumns[cc] = true; + } + } + if (idxChanged) { + builder.appendKey(idx); + } + pos.increment(); + }); } - if (idxChanged) { - builder.appendKey(idx); - } - pos.increment(); - }); + } } - } - } - downstream.modified = builder.getIndex(); + downstream.modified = builder.getIndex(); - downstream.modifiedColumnSet.clear(); - if (downstream.modified.nonempty()) { - for (int cc = 0; cc < changedColumns.length; ++cc) { - if (changedColumns[cc]) { - downstream.modifiedColumnSet.setAll(outputModifiedColumnSets[cc]); + downstream.modifiedColumnSet.clear(); + if (downstream.modified.nonempty()) { + for (int cc = 0; cc < changedColumns.length; ++cc) { + if (changedColumns[cc]) { + downstream.modifiedColumnSet.setAll(outputModifiedColumnSets[cc]); + } + } } - } - } - resultTable.notifyListeners(downstream); - } - }; + resultTable.notifyListeners(downstream); + } + }; ((DynamicTable) input).listenForUpdates(listener); return resultTable; diff --git a/DB/src/main/java/io/deephaven/db/util/ToMapListener.java b/DB/src/main/java/io/deephaven/db/util/ToMapListener.java index df8480fd11b..f782855f931 100644 --- a/DB/src/main/java/io/deephaven/db/util/ToMapListener.java +++ b/DB/src/main/java/io/deephaven/db/util/ToMapListener.java @@ -26,13 +26,11 @@ * @param the key type * @param the value type */ -public class ToMapListener extends InstrumentedShiftAwareListenerAdapter - implements Map { +public class ToMapListener extends InstrumentedShiftAwareListenerAdapter implements Map { private static final long NO_ENTRY_VALUE = -2; private static final long DELETED_ENTRY_VALUE = -1; - private final TObjectLongHashMap baselineMap = - new TObjectLongHashMap<>(8, 0.5f, NO_ENTRY_VALUE); + private final TObjectLongHashMap baselineMap = new TObjectLongHashMap<>(8, 0.5f, NO_ENTRY_VALUE); private volatile TObjectLongHashMap currentMap; private final LongFunction keyProducer; @@ -42,46 +40,39 @@ public class ToMapListener extends InstrumentedShiftAwareListenerAdapter public static ToMapListener make(DynamicTable source, String keySourceName) { return QueryPerformanceRecorder.withNugget("ToMapListener(" + keySourceName + ")", - () -> new ToMapListener(source, keySourceName, keySourceName)); + () -> new ToMapListener(source, keySourceName, keySourceName)); } - public static ToMapListener make(DynamicTable source, String keySourceName, - String valueSourceName) { - return QueryPerformanceRecorder.withNugget( - "ToMapListener(" + keySourceName + ", " + valueSourceName + ")", - () -> new ToMapListener(source, keySourceName, valueSourceName)); + public static ToMapListener make(DynamicTable source, String keySourceName, String valueSourceName) { + return QueryPerformanceRecorder.withNugget("ToMapListener(" + keySourceName + ", " + valueSourceName + ")", + () -> new ToMapListener(source, keySourceName, valueSourceName)); } - public static ToMapListener make(DynamicTable source, - ColumnSource keySource, ColumnSource valueSource) { + public static ToMapListener make(DynamicTable source, ColumnSource keySource, + ColumnSource valueSource) { // noinspection unchecked return QueryPerformanceRecorder.withNugget("ToMapListener", - () -> new ToMapListener<>(source, keySource, valueSource)); + () -> new ToMapListener<>(source, keySource, valueSource)); } - public static ToMapListener make(DynamicTable source, - LongFunction keyProducer, LongFunction prevKeyProducer, - LongFunction valueProducer, LongFunction prevValueProducer) { + public static ToMapListener make(DynamicTable source, LongFunction keyProducer, + LongFunction prevKeyProducer, LongFunction valueProducer, LongFunction prevValueProducer) { // noinspection unchecked return QueryPerformanceRecorder.withNugget("ToMapListener", - () -> new ToMapListener<>(source, keyProducer, prevKeyProducer, valueProducer, - prevValueProducer)); + () -> new ToMapListener<>(source, keyProducer, prevKeyProducer, valueProducer, prevValueProducer)); } private ToMapListener(DynamicTable source, String keySourceName, String valueSourceName) { // noinspection unchecked - this(source, source.getColumnSource(keySourceName), - source.getColumnSource(valueSourceName)); + this(source, source.getColumnSource(keySourceName), source.getColumnSource(valueSourceName)); } - private ToMapListener(DynamicTable source, ColumnSource keySource, - ColumnSource valueSource) { + private ToMapListener(DynamicTable source, ColumnSource keySource, ColumnSource valueSource) { this(source, keySource::get, keySource::getPrev, valueSource::get, valueSource::getPrev); } - private ToMapListener(DynamicTable source, LongFunction keyProducer, - LongFunction prevKeyProducer, LongFunction valueProducer, - LongFunction prevValueProducer) { + private ToMapListener(DynamicTable source, LongFunction keyProducer, LongFunction prevKeyProducer, + LongFunction valueProducer, LongFunction prevValueProducer) { super(source, false); this.keyProducer = keyProducer; this.prevKeyProducer = prevKeyProducer; @@ -96,10 +87,8 @@ private ToMapListener(DynamicTable source, LongFunction keyProducer, @Override public void onUpdate(final Update upstream) { - final int cap = - upstream.added.intSize() + upstream.removed.intSize() + upstream.modified.intSize(); - final TObjectLongHashMap newMap = - new TObjectLongHashMap<>(cap, 0.5f, NO_ENTRY_VALUE); + final int cap = upstream.added.intSize() + upstream.removed.intSize() + upstream.modified.intSize(); + final TObjectLongHashMap newMap = new TObjectLongHashMap<>(cap, 0.5f, NO_ENTRY_VALUE); final LongConsumer remover = (final long key) -> { newMap.put(prevKeyProducer.apply(key), DELETED_ENTRY_VALUE); @@ -143,10 +132,8 @@ public V get(Object key) { return get((K) key, valueProducer, prevValueProducer); } - public T get(K key, groovy.lang.Closure valueProducer, - groovy.lang.Closure prevValueProducer) { - return get(key, (long row) -> (T) valueProducer.call(row), - (long row) -> (T) prevValueProducer.call(row)); + public T get(K key, groovy.lang.Closure valueProducer, groovy.lang.Closure prevValueProducer) { + return get(key, (long row) -> (T) valueProducer.call(row), (long row) -> (T) prevValueProducer.call(row)); } public T get(K key, ColumnSource cs) { @@ -156,8 +143,8 @@ public T get(K key, ColumnSource cs) { /** * Get but instead of applying the default value producer, use a custom value producer. * - * The intention is that you can wrap the map up with several different value producers, e.g. - * one for bid and another for ask. + * The intention is that you can wrap the map up with several different value producers, e.g. one for bid and + * another for ask. * * @param key the key to retrieve * @param valueProducer retrieve the current value out of the table @@ -186,8 +173,7 @@ public T get(K key, LongFunction valueProducer, LongFunction prevValue return null; } } - return state == LogicalClock.State.Updating ? prevValueProducer.apply(row) - : valueProducer.apply(row); + return state == LogicalClock.State.Updating ? prevValueProducer.apply(row) : valueProducer.apply(row); } @Nullable diff --git a/DB/src/main/java/io/deephaven/db/util/WorkerPythonEnvironment.java b/DB/src/main/java/io/deephaven/db/util/WorkerPythonEnvironment.java index 1999a08ce9d..961e8fb19aa 100644 --- a/DB/src/main/java/io/deephaven/db/util/WorkerPythonEnvironment.java +++ b/DB/src/main/java/io/deephaven/db/util/WorkerPythonEnvironment.java @@ -16,10 +16,9 @@ /** * This class is the support infrastructure for running Python remote queries. * - * It is a singleton that contains an instance of a PythonHolder. All of the specially handled db - * operations from a remote Python session should execute queries which interact wtih this class. - * The script sessions that run for PersistentQueries or consoles are handled separately by the - * {@link PythonDeephavenSession}. + * It is a singleton that contains an instance of a PythonHolder. All of the specially handled db operations from a + * remote Python session should execute queries which interact wtih this class. The script sessions that run for + * PersistentQueries or consoles are handled separately by the {@link PythonDeephavenSession}. */ public enum WorkerPythonEnvironment { DEFAULT; @@ -49,21 +48,21 @@ public enum WorkerPythonEnvironment { log.info().append("Worker python version ").append(pythonVersion).endl(); } else { log.warn().append("Worker python version set as ").append(pythonVersion) - .append(" which has unexpected format (not `....` which may " + - "lead to unexpected errors") - .endl(); + .append(" which has unexpected format (not `....` which may " + + "lead to unexpected errors") + .endl(); } PythonLogAdapter.interceptOutputStreams(evaluator); final String defaultScriptPath = Configuration.getInstance() - .getProperty("WorkerPythonEnvironment.defaultScriptPath") - .replace("", Configuration.getInstance().getDevRootPath()) - .replace("", Configuration.getInstance().getWorkspacePath()); + .getProperty("WorkerPythonEnvironment.defaultScriptPath") + .replace("", Configuration.getInstance().getDevRootPath()) + .replace("", Configuration.getInstance().getWorkspacePath()); final ScriptFinder scriptFinder = new ScriptFinder(defaultScriptPath); - final String initScript = Configuration.getInstance().getStringWithDefault( - "WorkerPythonEnvironment.initScript", "core/deephaven_jpy_init.py"); + final String initScript = Configuration.getInstance().getStringWithDefault("WorkerPythonEnvironment.initScript", + "core/deephaven_jpy_init.py"); final ScriptFinder.FileOrStream file; try { @@ -97,8 +96,8 @@ public enum WorkerPythonEnvironment { * When the object is a convertible PyObject; we return the PyObject. Otherwise, we'll return a * PythonRemoteQuery.PickledResult, which is suitable for unpickling by the remote side. * - * The caller should never serialize an unconverted PyObject; it contains a raw pointer and will - * result in a Hotspot or memory corruption on the remote side. + * The caller should never serialize an unconverted PyObject; it contains a raw pointer and will result in a Hotspot + * or memory corruption on the remote side. * * @param name the variable to retrieve * @return the variable as a Java object; or pickled @@ -121,20 +120,18 @@ public Object fetch(String name) { // In python2, we have that str is 8 bits, and base64.b64encode produces a str evaluator.evalStatement("__resultPickled__ = base64.b64encode(__resultDill__)"); } else { - // In python3, we have that str is 32 bit unicode, and base64.b64encode produces a - // bytes (array basically) - // our next step is to cast this output as a java string, which does not work for a - // bytes. + // In python3, we have that str is 32 bit unicode, and base64.b64encode produces a bytes (array + // basically) + // our next step is to cast this output as a java string, which does not work for a bytes. // We must make it a str, via calling .decode() on it. - evaluator - .evalStatement("__resultPickled__ = base64.b64encode(__resultDill__).decode()"); + evaluator.evalStatement("__resultPickled__ = base64.b64encode(__resultDill__).decode()"); } // this is the only place a base64 encoded item is cast to a string, so only fix needed String pickled = (String) getValue("__resultPickled__"); x = new PickledResult(pickled, pythonVersion); } else { - log.info().append("Variable ").append(name).append(" is of type ") - .append(x.getClass().getCanonicalName()).endl(); + log.info().append("Variable ").append(name).append(" is of type ").append(x.getClass().getCanonicalName()) + .endl(); } return x; @@ -159,8 +156,8 @@ public void eval(String evalString) { */ Object getValue(String variable) { return scope - .getValue(variable) - .orElseThrow(() -> new QueryScope.MissingVariableException("No variable: " + variable)); + .getValue(variable) + .orElseThrow(() -> new QueryScope.MissingVariableException("No variable: " + variable)); } } diff --git a/DB/src/main/java/io/deephaven/db/util/caching/C14nUtil.java b/DB/src/main/java/io/deephaven/db/util/caching/C14nUtil.java index 2c7458c622e..74f76a77fc4 100644 --- a/DB/src/main/java/io/deephaven/db/util/caching/C14nUtil.java +++ b/DB/src/main/java/io/deephaven/db/util/caching/C14nUtil.java @@ -17,24 +17,21 @@ public class C14nUtil { - public static final boolean ENABLED = - Configuration.getInstance().getBooleanWithDefault("C14nUtil.enabled", false); + public static final boolean ENABLED = Configuration.getInstance().getBooleanWithDefault("C14nUtil.enabled", false); /** - * An CanonicalizationCache instance that can/should be used by "general" utilities that want to - * store canonicalized objects in memory. Shared by StringUtils. + * An CanonicalizationCache instance that can/should be used by "general" utilities that want to store canonicalized + * objects in memory. Shared by StringUtils. */ public static final OpenAddressedCanonicalizationCache CACHE = - ENABLED ? new OpenAddressedCanonicalizationCache(10000) : null; + ENABLED ? new OpenAddressedCanonicalizationCache(10000) : null; /** - * A whitelist of classes that we'll canonicalize in the maybeCanonicalize* methods. Mutable - * classes or classes with hashCode() and equals(...) definitions that violate the usual - * contracts are dangerous and unsupported. + * A whitelist of classes that we'll canonicalize in the maybeCanonicalize* methods. Mutable classes or classes with + * hashCode() and equals(...) definitions that violate the usual contracts are dangerous and unsupported. */ private static final Class[] ELIGIBLE_CLASSES = new Class[] { - // Strings/CompressedStrings are the ideal classes for this functionality. In Java 8, - // though, we might + // Strings/CompressedStrings are the ideal classes for this functionality. In Java 8, though, we might // be able to use -XX:+UseStringDeduplication with G1, which is arguably better. String.class, CompressedString.class, @@ -42,12 +39,10 @@ public class C14nUtil { // DBDateTimes used in aggregations are most likely expirations. // DBDateTime.class, - // If we're going to bother canonicalizing key members, we might as well do the keys - // themselves. + // If we're going to bother canonicalizing key members, we might as well do the keys themselves. // CanonicalizedSmartKey.class - // Primitive wrappers are appropriate to include, but I'm not sure the benefits outweigh - // the costs. + // Primitive wrappers are appropriate to include, but I'm not sure the benefits outweigh the costs. // Uncomment if we find otherwise. // Boolean.class, // Character.class, @@ -77,30 +72,25 @@ private static boolean eligible(final Class clazz) { * * @param item * @param - * @return null if item was null, else the canonicalized version of item, which may be the same - * instance + * @return null if item was null, else the canonicalized version of item, which may be the same instance */ private static T canonicalize(final T item) { return item == null ? null : CACHE.getCachedItem(item); } /** - * Canonicalize an object using the default CanonicalizationCache, if it's an instance of a - * known appropriate class. + * Canonicalize an object using the default CanonicalizationCache, if it's an instance of a known appropriate class. * * @param item * @param - * @return null if item was null, else the canonicalized version of item if its class was - * eligible, else item + * @return null if item was null, else the canonicalized version of item if its class was eligible, else item */ public static T maybeCanonicalize(final T item) { - return !ENABLED || item == null || !eligible(item.getClass()) ? item - : CACHE.getCachedItem(item); + return !ENABLED || item == null || !eligible(item.getClass()) ? item : CACHE.getCachedItem(item); } /** - * Canonicalizes an array of objects in-place using the default CanonicalizationCache, with no - * type checking. + * Canonicalizes an array of objects in-place using the default CanonicalizationCache, with no type checking. * * @param items * @param @@ -114,9 +104,8 @@ private static T[] canonicalizeAll(@NotNull final T[] items) { } /** - * Canonicalizes an array of objects in-place using the default CanonicalizationCache, if - * they're instances of known appropriate classes. May canonicalize some items without - * canonicalizing all. + * Canonicalizes an array of objects in-place using the default CanonicalizationCache, if they're instances of known + * appropriate classes. May canonicalize some items without canonicalizing all. * * @param items * @return true if all non-null items were canonicalized, else false. @@ -144,23 +133,20 @@ public static boolean maybeCanonicalizeAll(@NotNull final T[] items) { * Make a SmartKey appropriate for values. * * @param values - * @return A canonicalized CanonicalizedSmartKey if all values are canonicalizable, else a new - * SmartKey + * @return A canonicalized CanonicalizedSmartKey if all values are canonicalizable, else a new SmartKey */ public static SmartKey makeSmartKey(final Object... values) { return maybeCanonicalizeAll(values) ? /* canonicalize( */new CanonicalizedSmartKey(values) - /* ) */ : new SmartKey(values); + /* ) */ : new SmartKey(values); } - private static final CanonicalizedSmartKey SMART_KEY_SINGLE_NULL = - new CanonicalizedSmartKey(new Object[] {null}); + private static final CanonicalizedSmartKey SMART_KEY_SINGLE_NULL = new CanonicalizedSmartKey(new Object[] {null}); /** - * If there is one value and it is null, return a special singleton smart key that we have - * created for this purpose. If there is one value and it is not null, hand it to - * maybeCanonicalize, which will either make a smart key out of it or return the value itself. - * Otherwise (if there are zero values or more than one value), then hand off to makeSmartKey - * which will make a CanonicalizedSmartKey (if possible) or a SmartKey (otherwise). + * If there is one value and it is null, return a special singleton smart key that we have created for this purpose. + * If there is one value and it is not null, hand it to maybeCanonicalize, which will either make a smart key out of + * it or return the value itself. Otherwise (if there are zero values or more than one value), then hand off to + * makeSmartKey which will make a CanonicalizedSmartKey (if possible) or a SmartKey (otherwise). * * @param values the value or values to turn into a key * @return a potentially canonicalized key for use in a map @@ -199,8 +185,7 @@ public boolean equals(final Object obj) { return false; } for (int vi = 0; vi < values_.length; ++vi) { - // Because the members of values are canonicalized, we can use reference equality - // here. + // Because the members of values are canonicalized, we can use reference equality here. if (values_[vi] != other.values_[vi]) { return false; } @@ -210,12 +195,10 @@ public boolean equals(final Object obj) { @Override public String toString() { - return "{CanonicalizedSmartKey: values:" + Arrays.toString(values_) + " hashCode:" - + hashCode() + "}"; + return "{CanonicalizedSmartKey: values:" + Arrays.toString(values_) + " hashCode:" + hashCode() + "}"; } - private void readObject(final ObjectInputStream in) - throws IOException, ClassNotFoundException { + private void readObject(final ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); canonicalizeAll(values_); } diff --git a/DB/src/main/java/io/deephaven/db/util/config/InputTableRowSetter.java b/DB/src/main/java/io/deephaven/db/util/config/InputTableRowSetter.java index 8454af72843..bc63d2edff3 100644 --- a/DB/src/main/java/io/deephaven/db/util/config/InputTableRowSetter.java +++ b/DB/src/main/java/io/deephaven/db/util/config/InputTableRowSetter.java @@ -10,10 +10,9 @@ public interface InputTableRowSetter { /** - * Set the values of the column specified by the input, filling in missing data using the - * parameter 'table' as the previous value source. This method will be invoked asynchronously. - * Users may use {@link #setRows(Table, int[], Map[], InputTableStatusListener)} to be notified - * of asynchronous results. + * Set the values of the column specified by the input, filling in missing data using the parameter 'table' as the + * previous value source. This method will be invoked asynchronously. Users may use + * {@link #setRows(Table, int[], Map[], InputTableStatusListener)} to be notified of asynchronous results. * * @param table The table to use as the previous value source * @param row The row index to set @@ -25,10 +24,9 @@ default void setRow(Table table, int row, Map values) { } /** - * Set the values of the columns specified by the input, filling in missing data using the - * parameter 'table' as the previous value source. This method will be invoked asynchronously. - * Users may use {@link #setRows(Table, int[], Map[], InputTableStatusListener)} to be notified - * of asynchronous results. + * Set the values of the columns specified by the input, filling in missing data using the parameter 'table' as the + * previous value source. This method will be invoked asynchronously. Users may use + * {@link #setRows(Table, int[], Map[], InputTableStatusListener)} to be notified of asynchronous results. * * @param table The table to use as the previous value source * @param rowArray The row indices to update. @@ -39,22 +37,21 @@ default void setRows(Table table, int[] rowArray, Map[] valueArr } /** - * Set the values of the columns specified by the input, filling in missing data using the - * parameter 'table' as the previous value source. This method will be invoked asynchronously. - * The input listener will be notified on success/failure + * Set the values of the columns specified by the input, filling in missing data using the parameter 'table' as the + * previous value source. This method will be invoked asynchronously. The input listener will be notified on + * success/failure * * @param table The table to use as the previous value source * @param rowArray The row indices to update. * @param valueArray The new values. * @param listener The listener to notify on asynchronous results. */ - void setRows(Table table, int[] rowArray, Map[] valueArray, - InputTableStatusListener listener); + void setRows(Table table, int[] rowArray, Map[] valueArray, InputTableStatusListener listener); /** - * Add the specified row to the table. Duplicate keys will be overwritten. This method will - * execute asynchronously. Users may use {@link #addRow(Map, boolean, InputTableStatusListener)} - * to handle the result of the asynchronous write. + * Add the specified row to the table. Duplicate keys will be overwritten. This method will execute asynchronously. + * Users may use {@link #addRow(Map, boolean, InputTableStatusListener)} to handle the result of the asynchronous + * write. * * @param values The values to write. */ @@ -64,9 +61,8 @@ default void addRow(Map values) { } /** - * Add the specified rows to the table. Duplicate keys will be overwritten. This method will - * execute asynchronously. Users may use - * {@link #addRows(Map[], boolean, InputTableStatusListener)} to handle the asynchronous result. + * Add the specified rows to the table. Duplicate keys will be overwritten. This method will execute asynchronously. + * Users may use {@link #addRows(Map[], boolean, InputTableStatusListener)} to handle the asynchronous result. * * @param valueArray The values to write. */ @@ -75,27 +71,25 @@ default void addRows(Map[] valueArray) { } /** - * Add the specified row to the table, optionally overwriting existing keys. This method will - * execute asynchronously, the input listener will be notified on success/failure. + * Add the specified row to the table, optionally overwriting existing keys. This method will execute + * asynchronously, the input listener will be notified on success/failure. * * @param valueArray The value to write. * @param allowEdits Should pre-existing keys be overwritten? * @param listener The listener to report asynchronous result to. */ - default void addRow(Map valueArray, boolean allowEdits, - InputTableStatusListener listener) { + default void addRow(Map valueArray, boolean allowEdits, InputTableStatusListener listener) { // noinspection unchecked addRows(new Map[] {valueArray}, allowEdits, listener); } /** - * Add the specified rows to the table, optionally overwriting existing keys. This method will - * execute asynchronously, the input listener will be notified on success/failure. + * Add the specified rows to the table, optionally overwriting existing keys. This method will execute + * asynchronously, the input listener will be notified on success/failure. * * @param valueArray The values to write. * @param allowEdits Should pre-existing keys be overwritten? * @param listener The listener to report asynchronous results to. */ - void addRows(Map[] valueArray, boolean allowEdits, - InputTableStatusListener listener); + void addRows(Map[] valueArray, boolean allowEdits, InputTableStatusListener listener); } diff --git a/DB/src/main/java/io/deephaven/db/util/config/MutableInputTable.java b/DB/src/main/java/io/deephaven/db/util/config/MutableInputTable.java index 27cb2d19cfb..456b23c773c 100644 --- a/DB/src/main/java/io/deephaven/db/util/config/MutableInputTable.java +++ b/DB/src/main/java/io/deephaven/db/util/config/MutableInputTable.java @@ -29,8 +29,7 @@ default String[] getKeyNames() { } /** - * Get the underlying Table definition (which includes the names and types of all of the - * columns). + * Get the underlying Table definition (which includes the names and types of all of the columns). * * @return the TableDefinition for our user-visible table */ @@ -46,8 +45,8 @@ default String[] getKeyNames() { void add(Table newData) throws IOException; /** - * Delete the keys contained in the parameter table from this input table. This method will - * block until rows are deleted. + * Delete the keys contained in the parameter table from this input table. This method will block until rows are + * deleted. * * @param table The rows to delete. * @throws IOException If a problem occurred while deleting the rows. @@ -57,8 +56,8 @@ default void delete(Table table) throws IOException { } /** - * Delete the keys contained in the parameter table from this input table. This method will - * block until rows are deleted. + * Delete the keys contained in the parameter table from this input table. This method will block until rows are + * deleted. * * @param table The rows to delete. * @throws IOException If a problem occurred while deleting the rows. @@ -104,9 +103,8 @@ default boolean hasColumn(String columnName) { /** * Queries whether this MutableInputTable is editable in the current context. * - * @return true if this MutableInputTable may be edited, false otherwise TODO - * (deephaven/deephaven-core/issues/255): Add AuthContext and whatever else is - * appropriate + * @return true if this MutableInputTable may be edited, false otherwise TODO (deephaven/deephaven-core/issues/255): + * Add AuthContext and whatever else is appropriate */ boolean canEdit(); } diff --git a/DB/src/main/java/io/deephaven/db/util/file/FileHandle.java b/DB/src/main/java/io/deephaven/db/util/file/FileHandle.java index 00b68dff7c0..6b70d71b9d8 100644 --- a/DB/src/main/java/io/deephaven/db/util/file/FileHandle.java +++ b/DB/src/main/java/io/deephaven/db/util/file/FileHandle.java @@ -19,48 +19,45 @@ /** *

    - * A representation of an open file. Designed to ensure predictable cleanup for open file - * descriptors. + * A representation of an open file. Designed to ensure predictable cleanup for open file descriptors. * *

    - * This class is basically just a wrapper around a {@link FileChannel} that only exposes some of its - * methods. It serves two purposes: + * This class is basically just a wrapper around a {@link FileChannel} that only exposes some of its methods. It serves + * two purposes: *

      - *
    1. It creates an extra layer of indirection between the FileChannel and application code, to - * allow for reachability-sensitive cleanup.
    2. - *
    3. It's a convenient place to add instrumentation and/or modified implementations when - * necessary.
    4. + *
    5. It creates an extra layer of indirection between the FileChannel and application code, to allow for + * reachability-sensitive cleanup.
    6. + *
    7. It's a convenient place to add instrumentation and/or modified implementations when necessary.
    8. *
    * *

    - * The current implementation adds a post-close procedure for integration with caches/trackers, and - * stats for all operations. + * The current implementation adds a post-close procedure for integration with caches/trackers, and stats for all + * operations. * *

    - * Note that positional methods, e.g. {@link #position()}, {@link #position(long)}, - * {@link #read(ByteBuffer)}, and {@link #write(ByteBuffer)} may require external synchronization if - * used concurrently by more than one thread. + * Note that positional methods, e.g. {@link #position()}, {@link #position(long)}, {@link #read(ByteBuffer)}, and + * {@link #write(ByteBuffer)} may require external synchronization if used concurrently by more than one thread. */ public final class FileHandle implements SeekableByteChannel { private static final Value SIZE_DURATION_NANOS = - Stats.makeItem("FileHandle", "sizeDurationNanos", State.FACTORY).getValue(); + Stats.makeItem("FileHandle", "sizeDurationNanos", State.FACTORY).getValue(); private static final Value GET_POSITION_DURATION_NANOS = - Stats.makeItem("FileHandle", "getPositionDurationNanos", State.FACTORY).getValue(); + Stats.makeItem("FileHandle", "getPositionDurationNanos", State.FACTORY).getValue(); private static final Value SET_POSITION_DURATION_NANOS = - Stats.makeItem("FileHandle", "setPositionDurationNanos", State.FACTORY).getValue(); + Stats.makeItem("FileHandle", "setPositionDurationNanos", State.FACTORY).getValue(); private static final Value READ_DURATION_NANOS = - Stats.makeItem("FileHandle", "readDurationNanos", State.FACTORY).getValue(); + Stats.makeItem("FileHandle", "readDurationNanos", State.FACTORY).getValue(); private static final Value READ_SIZE_BYTES = - Stats.makeItem("FileHandle", "readSizeBytes", State.FACTORY).getValue(); + Stats.makeItem("FileHandle", "readSizeBytes", State.FACTORY).getValue(); private static final Value WRITE_DURATION_NANOS = - Stats.makeItem("FileHandle", "writeDurationNanos", State.FACTORY).getValue(); + Stats.makeItem("FileHandle", "writeDurationNanos", State.FACTORY).getValue(); private static final Value WRITE_SIZE_BYTES = - Stats.makeItem("FileHandle", "writeSizeBytes", State.FACTORY).getValue(); + Stats.makeItem("FileHandle", "writeSizeBytes", State.FACTORY).getValue(); private static final Value TRUNCATE_DURATION_NANOS = - Stats.makeItem("FileHandle", "truncateDurationNanos", State.FACTORY).getValue(); + Stats.makeItem("FileHandle", "truncateDurationNanos", State.FACTORY).getValue(); private static final Value FORCE_DURATION_NANOS = - Stats.makeItem("FileHandle", "forceDurationNanos", State.FACTORY).getValue(); + Stats.makeItem("FileHandle", "forceDurationNanos", State.FACTORY).getValue(); private final FileChannel fileChannel; private final Procedure.Nullary postCloseProcedure; @@ -73,11 +70,10 @@ public final class FileHandle implements SeekableByteChannel { * {@link ClosedChannelException}s that trigger {@code postCloseProcedure} invocation. * * @param fileChannel The {@link FileChannel} - * @param postCloseProcedure A procedure to invoke if its detected that the {@link FileChannel} - * is closed - must be idempotent + * @param postCloseProcedure A procedure to invoke if its detected that the {@link FileChannel} is closed - must be + * idempotent */ - public FileHandle(@NotNull final FileChannel fileChannel, - @NotNull final Procedure.Nullary postCloseProcedure) { + public FileHandle(@NotNull final FileChannel fileChannel, @NotNull final Procedure.Nullary postCloseProcedure) { this.fileChannel = Require.neqNull(fileChannel, "fileChannel"); this.postCloseProcedure = Require.neqNull(postCloseProcedure, "postCloseProcedure"); } @@ -156,8 +152,7 @@ public final FileHandle position(long newPosition) throws IOException { /** *

    - * Attempt to read {@code destination.remaining()} bytes, starting from {@code position} - * (0-indexed) in the file. + * Attempt to read {@code destination.remaining()} bytes, starting from {@code position} (0-indexed) in the file. *

    * See {@link FileChannel#read(ByteBuffer, long)}. * @@ -165,8 +160,7 @@ public final FileHandle position(long newPosition) throws IOException { * @param position The position in the file to start reading from * @return The number of bytes read, or -1 if end of file is reached */ - public final int read(@NotNull final ByteBuffer destination, final long position) - throws IOException { + public final int read(@NotNull final ByteBuffer destination, final long position) throws IOException { try { final long startTimeNanos = System.nanoTime(); final int sizeBytes = destination.remaining(); @@ -184,8 +178,8 @@ public final int read(@NotNull final ByteBuffer destination, final long position /** *

    - * Attempt to read {@code destination.remaining()} bytes, beginning at the handle's current - * position and updating that position by the number of bytes read. + * Attempt to read {@code destination.remaining()} bytes, beginning at the handle's current position and updating + * that position by the number of bytes read. *

    * See {@link FileChannel#read(ByteBuffer)}. * @@ -211,8 +205,7 @@ public final int read(@NotNull final ByteBuffer destination) throws IOException /** *

    - * Attempt to write {@code source.remaining(){} bytes, starting from {@code position} - * (0-indexed) in the file. + * Attempt to write {@code source.remaining(){} bytes, starting from {@code position} (0-indexed) in the file. *

    * See {@link FileChannel#write(ByteBuffer, long)}. * @@ -221,8 +214,7 @@ public final int read(@NotNull final ByteBuffer destination) throws IOException * @param position The position in the file to start writing at * @return The number of bytes written */ - public final int write(@NotNull final ByteBuffer source, final long position) - throws IOException { + public final int write(@NotNull final ByteBuffer source, final long position) throws IOException { try { final long startTimeNanos = System.nanoTime(); final int sizeBytes = source.remaining(); @@ -240,10 +232,9 @@ public final int write(@NotNull final ByteBuffer source, final long position) /** *

    - * Attempt to write {@code source.remaining()} bytes to this file handle, beginning at the - * handle's current position (which is first advanced to the end of the file, if the underlying - * {@link FileChannel} was opened with {@link java.nio.file.StandardOpenOption#APPEND}), and - * updating that position by the number of bytes written. + * Attempt to write {@code source.remaining()} bytes to this file handle, beginning at the handle's current position + * (which is first advanced to the end of the file, if the underlying {@link FileChannel} was opened with + * {@link java.nio.file.StandardOpenOption#APPEND}), and updating that position by the number of bytes written. *

    * See {@link FileChannel#write(ByteBuffer)}. * diff --git a/DB/src/main/java/io/deephaven/db/util/file/FileHandleAccessor.java b/DB/src/main/java/io/deephaven/db/util/file/FileHandleAccessor.java index 89b5bb465e6..3701824e164 100644 --- a/DB/src/main/java/io/deephaven/db/util/file/FileHandleAccessor.java +++ b/DB/src/main/java/io/deephaven/db/util/file/FileHandleAccessor.java @@ -11,8 +11,7 @@ import java.security.PrivilegedExceptionAction; /** - * Base class for accessors that wrap a {@link FileHandle} with support for interruption and - * asynchronous close. + * Base class for accessors that wrap a {@link FileHandle} with support for interruption and asynchronous close. */ public abstract class FileHandleAccessor { @@ -27,9 +26,8 @@ public abstract class FileHandleAccessor { * @param fileHandleCreator The function used to make file handles * @param file The abstract path name to access */ - protected FileHandleAccessor( - @NotNull final FileHandleFactory.FileToHandleFunction fileHandleCreator, - @NotNull final File file) { + protected FileHandleAccessor(@NotNull final FileHandleFactory.FileToHandleFunction fileHandleCreator, + @NotNull final File file) { this.fileHandleCreator = fileHandleCreator; this.file = Utils.fileGetAbsoluteFilePrivileged(file); fileHandle = makeHandle(); @@ -54,8 +52,8 @@ private FileHandle makeHandle() { } /** - * Replace the file handle with a new one if the closed handle passed in is still current, and - * return the (possibly changed) current value. + * Replace the file handle with a new one if the closed handle passed in is still current, and return the (possibly + * changed) current value. * * @param previousLocalHandle The closed handle that calling code would like to replace * @return The current file handle, possibly newly created diff --git a/DB/src/main/java/io/deephaven/db/util/file/FileHandleFactory.java b/DB/src/main/java/io/deephaven/db/util/file/FileHandleFactory.java index 57e79939a2a..6c8efab6955 100644 --- a/DB/src/main/java/io/deephaven/db/util/file/FileHandleFactory.java +++ b/DB/src/main/java/io/deephaven/db/util/file/FileHandleFactory.java @@ -24,8 +24,7 @@ public interface FileHandleFactory { * @return The new file handle */ @NotNull - FileHandle makeHandle(@NotNull final File file, @NotNull final OpenOption... openOptions) - throws IOException; + FileHandle makeHandle(@NotNull final File file, @NotNull final OpenOption... openOptions) throws IOException; @FunctionalInterface interface FileToHandleFunction { @@ -34,28 +33,23 @@ interface FileToHandleFunction { FileHandle invoke(@NotNull final File file) throws IOException; } - static FileToHandleFunction toReadOnlyHandleCreator( - @NotNull final FileHandleFactory fileHandleFactory) { - return (final File file) -> fileHandleFactory.makeHandle(file, - OpenOptionsHelper.READ_ONLY_OPEN_OPTIONS); + static FileToHandleFunction toReadOnlyHandleCreator(@NotNull final FileHandleFactory fileHandleFactory) { + return (final File file) -> fileHandleFactory.makeHandle(file, OpenOptionsHelper.READ_ONLY_OPEN_OPTIONS); } - static FileToHandleFunction toReadWriteCreateHandleCreator( - @NotNull final FileHandleFactory fileHandleFactory) { + static FileToHandleFunction toReadWriteCreateHandleCreator(@NotNull final FileHandleFactory fileHandleFactory) { return (final File file) -> fileHandleFactory.makeHandle(file, - OpenOptionsHelper.READ_WRITE_CREATE_OPEN_OPTIONS); + OpenOptionsHelper.READ_WRITE_CREATE_OPEN_OPTIONS); } - static FileToHandleFunction toWriteAppendCreateHandleCreator( - @NotNull final FileHandleFactory fileHandleFactory) { + static FileToHandleFunction toWriteAppendCreateHandleCreator(@NotNull final FileHandleFactory fileHandleFactory) { return (final File file) -> fileHandleFactory.makeHandle(file, - OpenOptionsHelper.WRITE_APPEND_CREATE_OPEN_OPTIONS); + OpenOptionsHelper.WRITE_APPEND_CREATE_OPEN_OPTIONS); } - static FileToHandleFunction toWriteTruncateCreateHandleCreator( - @NotNull final FileHandleFactory fileHandleFactory) { + static FileToHandleFunction toWriteTruncateCreateHandleCreator(@NotNull final FileHandleFactory fileHandleFactory) { return (final File file) -> fileHandleFactory.makeHandle(file, - OpenOptionsHelper.WRITE_TRUNCATE_CREATE_OPEN_OPTIONS); + OpenOptionsHelper.WRITE_TRUNCATE_CREATE_OPEN_OPTIONS); } final class OpenOptionsHelper { @@ -63,26 +57,24 @@ final class OpenOptionsHelper { /** * Open the file for reading only. Fail if the file doesn't already exist. */ - private static final OpenOption[] READ_ONLY_OPEN_OPTIONS = - new OpenOption[] {StandardOpenOption.READ}; + private static final OpenOption[] READ_ONLY_OPEN_OPTIONS = new OpenOption[] {StandardOpenOption.READ}; /** * Open the file for reading or writing. Create the file iff it doesn't already exist. */ - private static final OpenOption[] READ_WRITE_CREATE_OPEN_OPTIONS = new OpenOption[] { - StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.CREATE}; + private static final OpenOption[] READ_WRITE_CREATE_OPEN_OPTIONS = + new OpenOption[] {StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.CREATE}; /** * Open the file for writing. If it already exists, append to it, else create it. */ - private static final OpenOption[] WRITE_APPEND_CREATE_OPEN_OPTIONS = new OpenOption[] { - StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE}; + private static final OpenOption[] WRITE_APPEND_CREATE_OPEN_OPTIONS = + new OpenOption[] {StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE}; /** * Open the file for writing. If it already exists truncate it, else create it. */ - private static final OpenOption[] WRITE_TRUNCATE_CREATE_OPEN_OPTIONS = - new OpenOption[] {StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, - StandardOpenOption.CREATE}; + private static final OpenOption[] WRITE_TRUNCATE_CREATE_OPEN_OPTIONS = new OpenOption[] { + StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE}; } } diff --git a/DB/src/main/java/io/deephaven/db/util/file/TrackedFileHandleFactory.java b/DB/src/main/java/io/deephaven/db/util/file/TrackedFileHandleFactory.java index fa7bbabe3d8..3b26fa82eae 100644 --- a/DB/src/main/java/io/deephaven/db/util/file/TrackedFileHandleFactory.java +++ b/DB/src/main/java/io/deephaven/db/util/file/TrackedFileHandleFactory.java @@ -25,9 +25,9 @@ import java.util.concurrent.atomic.AtomicInteger; /** - * Simple least-recently-opened "cache" for FileHandles, to avoid running up against ulimits. Will - * probably not achieve satisfactory results if the number of file handles concurrently in active - * use exceeds capacity. Note that returned FileHandles may be closed asynchronously by the factory. + * Simple least-recently-opened "cache" for FileHandles, to avoid running up against ulimits. Will probably not achieve + * satisfactory results if the number of file handles concurrently in active use exceeds capacity. Note that returned + * FileHandles may be closed asynchronously by the factory. * * TODO: Consider adding a lookup to enable handle sharing. Not necessary for current usage. */ @@ -40,10 +40,9 @@ public static TrackedFileHandleFactory getInstance() { synchronized (TrackedFileHandleFactory.class) { if (instance == null) { instance = new TrackedFileHandleFactory( - CommBase.singleThreadedScheduler( - "TrackedFileHandleFactory.CleanupScheduler", Logger.NULL).start(), - Configuration.getInstance() - .getInteger("TrackedFileHandleFactory.maxOpenFiles")); + CommBase.singleThreadedScheduler("TrackedFileHandleFactory.CleanupScheduler", Logger.NULL) + .start(), + Configuration.getInstance().getInteger("TrackedFileHandleFactory.maxOpenFiles")); } } } @@ -62,14 +61,13 @@ public static TrackedFileHandleFactory getInstance() { private final AtomicInteger size = new AtomicInteger(0); private final Queue handleReferences = new ConcurrentLinkedQueue<>(); - public final FileToHandleFunction readOnlyHandleCreator = - FileHandleFactory.toReadOnlyHandleCreator(this); + public final FileToHandleFunction readOnlyHandleCreator = FileHandleFactory.toReadOnlyHandleCreator(this); public final FileToHandleFunction readWriteCreateHandleCreator = - FileHandleFactory.toReadWriteCreateHandleCreator(this); + FileHandleFactory.toReadWriteCreateHandleCreator(this); public final FileToHandleFunction writeAppendCreateHandleCreator = - FileHandleFactory.toWriteAppendCreateHandleCreator(this); + FileHandleFactory.toWriteAppendCreateHandleCreator(this); public final FileToHandleFunction writeTruncateCreateHandleCreator = - FileHandleFactory.toWriteTruncateCreateHandleCreator(this); + FileHandleFactory.toWriteTruncateCreateHandleCreator(this); /** * Full constructor. @@ -80,19 +78,17 @@ public static TrackedFileHandleFactory getInstance() { * @param cleanupIntervalMillis The interval for asynchronous cleanup attempts */ public TrackedFileHandleFactory(@NotNull final Scheduler scheduler, final int capacity, - final double targetUsageRatio, final long cleanupIntervalMillis) { + final double targetUsageRatio, final long cleanupIntervalMillis) { this.scheduler = scheduler; this.capacity = Require.gtZero(capacity, "capacity"); this.targetUsageRatio = Require.inRange(targetUsageRatio, 0.1, 0.9, "targetUsageRatio"); - targetUsageThreshold = - Require.gtZero((int) (capacity * targetUsageRatio), "targetUsageThreshold"); + targetUsageThreshold = Require.gtZero((int) (capacity * targetUsageRatio), "targetUsageThreshold"); new CleanupJob(cleanupIntervalMillis).schedule(); } /** - * Constructor with default target usage ratio of 0.9 (90%) and cleanup attempts every 60 - * seconds. + * Constructor with default target usage ratio of 0.9 (90%) and cleanup attempts every 60 seconds. * * @param scheduler The scheduler to use for cleanup * @param capacity The total number of file handles to allow outstanding @@ -123,8 +119,8 @@ public int getSize() { @Override @NotNull - public final FileHandle makeHandle(@NotNull final File file, - @NotNull final OpenOption[] openOptions) throws IOException { + public final FileHandle makeHandle(@NotNull final File file, @NotNull final OpenOption[] openOptions) + throws IOException { if (size.get() >= capacity) { // Synchronous cleanup at full capacity. cleanup(); @@ -138,24 +134,22 @@ public final FileHandle makeHandle(@NotNull final File file, private void cleanup() { for (final Iterator handleReferenceIterator = - handleReferences.iterator(); handleReferenceIterator.hasNext();) { + handleReferences.iterator(); handleReferenceIterator.hasNext();) { final HandleReference handleReference = handleReferenceIterator.next(); final FileHandle handle = handleReference.get(); if (handle == null) { handleReference.reclaim(); handleReferenceIterator.remove(); } else if (!handle.isOpen()) { - // NB: handle.isOpen() will invoke the close recorder as a side effect, if - // necessary. + // NB: handle.isOpen() will invoke the close recorder as a side effect, if necessary. handleReferenceIterator.remove(); } } HandleReference handleReference; - while (size.get() > targetUsageThreshold - && (handleReference = handleReferences.poll()) != null) { - // NB: poll() might return null if targetUsageThreshold is very low and some thread has - // incremented size but not added its handle. + while (size.get() > targetUsageThreshold && (handleReference = handleReferences.poll()) != null) { + // NB: poll() might return null if targetUsageThreshold is very low and some thread has incremented size but + // not added its handle. handleReference.reclaim(); } } @@ -185,8 +179,7 @@ public void timedOut() { try { cleanup(); } catch (Exception e) { - throw new RuntimeException( - "TrackedFileHandleFactory.CleanupJob: Unexpected exception", e); + throw new RuntimeException("TrackedFileHandleFactory.CleanupJob: Unexpected exception", e); } schedule(); } @@ -213,8 +206,8 @@ private class HandleReference extends WeakReference { private final FileChannel fileChannel; private final CloseRecorder closeRecorder; - private HandleReference(@NotNull final FileHandle referent, - @NotNull final FileChannel fileChannel, @NotNull final CloseRecorder closeRecorder) { + private HandleReference(@NotNull final FileHandle referent, @NotNull final FileChannel fileChannel, + @NotNull final CloseRecorder closeRecorder) { super(referent); this.fileChannel = fileChannel; this.closeRecorder = closeRecorder; diff --git a/DB/src/main/java/io/deephaven/db/util/jpy/JpyInit.java b/DB/src/main/java/io/deephaven/db/util/jpy/JpyInit.java index 40d7833f1c8..d0b747a960a 100644 --- a/DB/src/main/java/io/deephaven/db/util/jpy/JpyInit.java +++ b/DB/src/main/java/io/deephaven/db/util/jpy/JpyInit.java @@ -19,8 +19,8 @@ public static void init(Logger log) { public static synchronized void init(Logger log, JpyConfigExt jpyConfig) { if (PyLibInitializer.isPyLibInitialized()) { log.warn().append("Skipping initialization of Jpy, already initialized").endl(); - log.warn().append("Using Python Installation ") - .append(System.getProperty("jpy.pythonLib", "(unknown)")).endl(); + log.warn().append("Using Python Installation ").append(System.getProperty("jpy.pythonLib", "(unknown)")) + .endl(); return; } @@ -29,7 +29,6 @@ public static synchronized void init(Logger log, JpyConfigExt jpyConfig) { jpyConfig.initPython(); jpyConfig.startPython(); log.info().append("Started Python interpreter").endl(); - log.info().append("Using Python Installation ") - .append(System.getProperty("jpy.pythonLib", "(unknown)")).endl(); + log.info().append("Using Python Installation ").append(System.getProperty("jpy.pythonLib", "(unknown)")).endl(); } } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/Liveness.java b/DB/src/main/java/io/deephaven/db/util/liveness/Liveness.java index b545fc780a4..cd39330e1a2 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/Liveness.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/Liveness.java @@ -18,20 +18,20 @@ public final class Liveness { public static final Logger log = LoggerFactory.getLogger(Liveness.class); - static final boolean REFERENCE_TRACKING_DISABLED = Configuration.getInstance() - .getBooleanWithDefault("Liveness.referenceTrackingDisabled", false); + static final boolean REFERENCE_TRACKING_DISABLED = + Configuration.getInstance().getBooleanWithDefault("Liveness.referenceTrackingDisabled", false); public static final boolean DEBUG_MODE_ENABLED = - Configuration.getInstance().getBooleanWithDefault("Liveness.debugModeEnabled", false); + Configuration.getInstance().getBooleanWithDefault("Liveness.debugModeEnabled", false); private static final boolean COUNT_LOG_ENABLED = - Configuration.getInstance().getBooleanWithDefault("Liveness.countLogEnabled", true); + Configuration.getInstance().getBooleanWithDefault("Liveness.countLogEnabled", true); private static final boolean HEAP_DUMP_ENABLED = - Configuration.getInstance().getBooleanWithDefault("Liveness.heapDump", false); + Configuration.getInstance().getBooleanWithDefault("Liveness.heapDump", false); static final boolean CLEANUP_LOG_ENABLED = - Configuration.getInstance().getBooleanWithDefault("Liveness.cleanupLogEnabled", true); + Configuration.getInstance().getBooleanWithDefault("Liveness.cleanupLogEnabled", true); private static final long OUTSTANDING_COUNT_LOG_INTERVAL_MILLIS = 1000L; @@ -44,9 +44,8 @@ public final class Liveness { *

    * Maybe log the count of known outstanding {@link LivenessReferent}s. *

    - * Will not log unless such logs are enabled, at least - * {@value OUTSTANDING_COUNT_LOG_INTERVAL_MILLIS}ms have elapsed, and the count has changed - * since the last time it was logged. + * Will not log unless such logs are enabled, at least {@value OUTSTANDING_COUNT_LOG_INTERVAL_MILLIS}ms have + * elapsed, and the count has changed since the last time it was logged. *

    * Note that this should be guarded by the LTM lock or similar. */ @@ -68,12 +67,11 @@ private static void maybeLogOutstandingCount() { } Liveness.log.info().append("Liveness: Outstanding count=").append(outstandingCount) - .append(", intervalMin=").append(intervalMinOutstandingCount) - .append(", intervalMax=").append(intervalMaxOutstandingCount) - .endl(); + .append(", intervalMin=").append(intervalMinOutstandingCount) + .append(", intervalMax=").append(intervalMaxOutstandingCount) + .endl(); outstandingCountChanged = false; - intervalLastOutstandingCount = - intervalMinOutstandingCount = intervalMaxOutstandingCount = outstandingCount; + intervalLastOutstandingCount = intervalMinOutstandingCount = intervalMaxOutstandingCount = outstandingCount; } public static void scheduleCountReport(@NotNull final Scheduler scheduler) { @@ -81,8 +79,7 @@ public static void scheduleCountReport(@NotNull final Scheduler scheduler) { @Override public final void timedOut() { maybeLogOutstandingCount(); - scheduler.installJob(this, - scheduler.currentTimeMillis() + OUTSTANDING_COUNT_LOG_INTERVAL_MILLIS); + scheduler.installJob(this, scheduler.currentTimeMillis() + OUTSTANDING_COUNT_LOG_INTERVAL_MILLIS); } }, 0L); } @@ -91,14 +88,12 @@ private Liveness() {} /** *

    - * Determine whether a cached object should be reused, w.r.t. liveness. Null inputs are never - * safe for reuse. If the object is a {@link LivenessReferent} and not a non-refreshing - * {@link DynamicNode}, this method will return the result of trying to manage object with the - * top of the current thread's {@link LivenessScopeStack}. + * Determine whether a cached object should be reused, w.r.t. liveness. Null inputs are never safe for reuse. If the + * object is a {@link LivenessReferent} and not a non-refreshing {@link DynamicNode}, this method will return the + * result of trying to manage object with the top of the current thread's {@link LivenessScopeStack}. * * @param object The object - * @return True if the object did not need management, or if it was successfully managed, false - * otherwise + * @return True if the object did not need management, or if it was successfully managed, false otherwise */ public static boolean verifyCachedObjectForReuse(final Object object) { if (object == null) { @@ -118,8 +113,8 @@ static void maybeHeapDump(LivenessStateException lse) { return; } final String heapDumpPath = HeapDump.generateHeapDumpPath(); - log.fatal().append("LivenessStateException, generating heap dump to").append(heapDumpPath) - .append(": ").append(lse).endl(); + log.fatal().append("LivenessStateException, generating heap dump to").append(heapDumpPath).append(": ") + .append(lse).endl(); try { HeapDump.heapDump(heapDumpPath); } catch (IOException ignored) { diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessArtifact.java b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessArtifact.java index ca4e6874056..d7c435a868d 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessArtifact.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessArtifact.java @@ -8,8 +8,8 @@ import java.util.Collection; /** - * A query engine artifact that is also a {@link LivenessNode}. These referents are added to the - * current top of the {@link LivenessScopeStack} on construction or deserialization. + * A query engine artifact that is also a {@link LivenessNode}. These referents are added to the current top of the + * {@link LivenessScopeStack} on construction or deserialization. */ public class LivenessArtifact extends ReferenceCountedLivenessNode implements Serializable { @@ -26,7 +26,7 @@ protected LivenessArtifact() { * @param objectInputStream The object input stream */ private void readObject(@NotNull final ObjectInputStream objectInputStream) - throws IOException, ClassNotFoundException { + throws IOException, ClassNotFoundException { objectInputStream.defaultReadObject(); initializeTransientFieldsForLiveness(); manageWithCurrentScope(); @@ -57,8 +57,7 @@ protected final void unmanage(@NotNull final LivenessReferent referent) { /** *

    - * If this artifact is still live and it manages referent one or more times, drop one such - * reference. + * If this artifact is still live and it manages referent one or more times, drop one such reference. * * @param referent The referent to drop */ @@ -77,8 +76,7 @@ protected final void tryUnmanage(@NotNull final LivenessReferent referent) { /** *

    - * For each referent in referents, if this manages referent one or more times, drop one such - * reference. + * For each referent in referents, if this manages referent one or more times, drop one such reference. * * @param referents The referents to drop */ @@ -92,14 +90,13 @@ protected final void unmanage(@NotNull final Collection - * For each referent in referents, if this artifact is still live and it manages referent one or - * more times, drop one such reference. + * For each referent in referents, if this artifact is still live and it manages referent one or more times, drop + * one such reference. * * @param referents The referents to drop */ @SuppressWarnings("unused") - protected final void tryUnmanage( - @NotNull final Collection referents) { + protected final void tryUnmanage(@NotNull final Collection referents) { if (Liveness.REFERENCE_TRACKING_DISABLED) { return; } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessDebugException.java b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessDebugException.java index 3c212d30de7..66387c66ee0 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessDebugException.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessDebugException.java @@ -1,8 +1,7 @@ package io.deephaven.db.util.liveness; /** - * Exception class used for getting stack traces while debugging liveness instrumentation. Should - * never be thrown. + * Exception class used for getting stack traces while debugging liveness instrumentation. Should never be thrown. */ class LivenessDebugException extends RuntimeException { diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessManager.java b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessManager.java index 1d9e0f8b60e..eb23d4b67ae 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessManager.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessManager.java @@ -3,8 +3,7 @@ import org.jetbrains.annotations.NotNull; /** - * Interface for objects that retainReference {@link LivenessReferent}s until such time as they are - * no longer necessary. + * Interface for objects that retainReference {@link LivenessReferent}s until such time as they are no longer necessary. */ public interface LivenessManager { @@ -15,8 +14,8 @@ public interface LivenessManager { */ default void manage(@NotNull final LivenessReferent referent) { if (!tryManage(referent)) { - throw new LivenessStateException(this + " failed to add " - + referent.getReferentDescription() + ", which is no longer live"); + throw new LivenessStateException( + this + " failed to add " + referent.getReferentDescription() + ", which is no longer live"); } } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessNode.java b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessNode.java index aafa27368d3..736636ffb88 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessNode.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessNode.java @@ -1,8 +1,7 @@ package io.deephaven.db.util.liveness; /** - * A {@link LivenessReferent} that is also a {@link LivenessManager}, transitively enforcing - * liveness on its referents. + * A {@link LivenessReferent} that is also a {@link LivenessManager}, transitively enforcing liveness on its referents. */ public interface LivenessNode extends LivenessReferent, LivenessManager { } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessReferent.java b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessReferent.java index 53708c75ab5..4d9009568a0 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessReferent.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessReferent.java @@ -16,14 +16,13 @@ public interface LivenessReferent { */ default void retainReference() { if (!tryRetainReference()) { - throw new LivenessStateException( - this + " is no longer live and cannot be retained further"); + throw new LivenessStateException(this + " is no longer live and cannot be retained further"); } } /** - * If this referent is "live", behave as {@link #retainReference()} and return true. Otherwise, - * returns false rather than throwing an exception. + * If this referent is "live", behave as {@link #retainReference()} and return true. Otherwise, returns false rather + * than throwing an exception. * * @return True if this referent was retained, false otherwise */ @@ -42,8 +41,8 @@ default void retainReference() { WeakReference getWeakReference(); /** - * Get a name that is suitable for uniquely identifying this {@link LivenessReferent} in debug - * and error messages. This is usually not the same as {@link Object#toString()}. + * Get a name that is suitable for uniquely identifying this {@link LivenessReferent} in debug and error messages. + * This is usually not the same as {@link Object#toString()}. * * @return A unique name for this referent for debugging and error message purposes */ diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessScope.java b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessScope.java index 9d2f414df69..f4f81234f34 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessScope.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessScope.java @@ -4,27 +4,25 @@ import org.jetbrains.annotations.NotNull; /** - * {@link LivenessNode} implementation for providing external scope to one or more - * {@link LivenessReferent}s. + * {@link LivenessNode} implementation for providing external scope to one or more {@link LivenessReferent}s. */ -public class LivenessScope extends ReferenceCountedLivenessNode - implements ReleasableLivenessManager { +public class LivenessScope extends ReferenceCountedLivenessNode implements ReleasableLivenessManager { /** - * Construct a new scope, which must be {@link #release()}d in order to release any subsequently - * added {@link LivenessReferent}s. Will only enforce weak reachability on its - * {@link #manage(LivenessReferent)}ed referents. + * Construct a new scope, which must be {@link #release()}d in order to release any subsequently added + * {@link LivenessReferent}s. Will only enforce weak reachability on its {@link #manage(LivenessReferent)}ed + * referents. */ public LivenessScope() { this(false); } /** - * Construct a new scope, which must be {@link #release()}d in order to release any subsequently - * added {@link LivenessReferent}s. + * Construct a new scope, which must be {@link #release()}d in order to release any subsequently added + * {@link LivenessReferent}s. * - * @param enforceStrongReachability Whether this {@link LivenessScope} should maintain strong - * references to its {@link #manage(LivenessReferent)}ed referents + * @param enforceStrongReachability Whether this {@link LivenessScope} should maintain strong references to its + * {@link #manage(LivenessReferent)}ed referents */ public LivenessScope(boolean enforceStrongReachability) { super(enforceStrongReachability); @@ -32,15 +30,14 @@ public LivenessScope(boolean enforceStrongReachability) { return; } if (Liveness.DEBUG_MODE_ENABLED) { - Liveness.log.info().append("LivenessDebug: Creating scope ") - .append(Utils.REFERENT_FORMATTER, this).endl(); + Liveness.log.info().append("LivenessDebug: Creating scope ").append(Utils.REFERENT_FORMATTER, this).endl(); } incrementReferenceCount(); } /** - * Transfer all retained {@link LivenessReferent}s from this {@link LivenessScope} to a - * compatible {@link LivenessManager}. Transfer support compatibility is implementation defined. + * Transfer all retained {@link LivenessReferent}s from this {@link LivenessScope} to a compatible + * {@link LivenessManager}. Transfer support compatibility is implementation defined. * * @param other The other {@link LivenessManager} */ @@ -50,23 +47,21 @@ public final void transferTo(@NotNull final LivenessManager other) { } if (enforceStrongReachability) { throw new UnsupportedOperationException( - "LivenessScope does not support reference transfer if enforceStrongReachability is specified"); + "LivenessScope does not support reference transfer if enforceStrongReachability is specified"); } if (other instanceof ReferenceCountedLivenessNode) { tracker.transferReferencesTo(((ReferenceCountedLivenessNode) other).tracker); } else if (other instanceof PermanentLivenessManager) { tracker.makeReferencesPermanent(); } else { - throw new UnsupportedOperationException( - "Unable to transfer to unrecognized implementation class=" + throw new UnsupportedOperationException("Unable to transfer to unrecognized implementation class=" + Utils.getSimpleNameFor(other) + ", instance=" + other); } } /** - * Release all referents previously added to this scope in its capacity as a - * {@link LivenessManager}, unless other references to this scope are retained in its capacity - * as a {@link LivenessReferent}. + * Release all referents previously added to this scope in its capacity as a {@link LivenessManager}, unless other + * references to this scope are retained in its capacity as a {@link LivenessReferent}. */ @Override public final void release() { @@ -74,13 +69,13 @@ public final void release() { return; } if (Liveness.DEBUG_MODE_ENABLED) { - Liveness.log.info().append("LivenessDebug: Begin releasing scope ") - .append(Utils.REFERENT_FORMATTER, this).endl(); + Liveness.log.info().append("LivenessDebug: Begin releasing scope ").append(Utils.REFERENT_FORMATTER, this) + .endl(); } decrementReferenceCount(); if (Liveness.DEBUG_MODE_ENABLED) { - Liveness.log.info().append("LivenessDebug: End releasing scope ") - .append(Utils.REFERENT_FORMATTER, this).endl(); + Liveness.log.info().append("LivenessDebug: End releasing scope ").append(Utils.REFERENT_FORMATTER, this) + .endl(); } } } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessScopeStack.java b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessScopeStack.java index 26acd10835d..6be5d65dce6 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/LivenessScopeStack.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/LivenessScopeStack.java @@ -9,19 +9,18 @@ /** *

    - * Support for a thread-local stack of {@link LivenessScope}s to allow the preferred programming - * model for scoping of {@link LivenessArtifact}s. + * Support for a thread-local stack of {@link LivenessScope}s to allow the preferred programming model for scoping of + * {@link LivenessArtifact}s. *

    - * Instances expect to be used on exactly one thread, and hence do not take any measures to ensure - * thread safety. + * Instances expect to be used on exactly one thread, and hence do not take any measures to ensure thread safety. */ public class LivenessScopeStack { private static final ThreadLocal THREAD_STACK = - ThreadLocal.withInitial(LivenessScopeStack::new); + ThreadLocal.withInitial(LivenessScopeStack::new); private static final ThreadLocal THREAD_BASE_MANAGER = - ThreadLocal.withInitial(PermanentLivenessManager::new); + ThreadLocal.withInitial(PermanentLivenessManager::new); private final Deque stack = new ArrayDeque<>(); @@ -51,8 +50,8 @@ public static void pop(@NotNull final LivenessScope scope) { /** *

    - * Get the scope at the top of the current thread's scope stack, or the base manager if no - * scopes have been pushed but not popped on this thread. + * Get the scope at the top of the current thread's scope stack, or the base manager if no scopes have been pushed + * but not popped on this thread. *

    * This method defines the manager that should be used for all new {@link LivenessArtifact}s. * @@ -71,28 +70,25 @@ public static LivenessManager peek() { * * @param scope The scope * @param releaseOnClose Whether the scope should be released when the result is closed - * @return A {@link SafeCloseable} whose {@link SafeCloseable#close()} method invokes - * {@link #pop(LivenessScope)} for the scope (followed by - * {@link LivenessScope#release()} if releaseOnClose is true) + * @return A {@link SafeCloseable} whose {@link SafeCloseable#close()} method invokes {@link #pop(LivenessScope)} + * for the scope (followed by {@link LivenessScope#release()} if releaseOnClose is true) */ @NotNull - public static SafeCloseable open(@NotNull final LivenessScope scope, - final boolean releaseOnClose) { + public static SafeCloseable open(@NotNull final LivenessScope scope, final boolean releaseOnClose) { push(scope); return releaseOnClose ? new PopAndReleaseOnClose(scope) : new PopOnClose(scope); } /** *

    - * Push an anonymous scope onto the scope stack, and get an {@link SafeCloseable} that pops it - * and then {@link LivenessScope#release()}s it. + * Push an anonymous scope onto the scope stack, and get an {@link SafeCloseable} that pops it and then + * {@link LivenessScope#release()}s it. *

    - * This is useful enclosing a series of query engine actions whose results must be explicitly - * retained externally in order to preserve liveness. + * This is useful enclosing a series of query engine actions whose results must be explicitly retained externally in + * order to preserve liveness. * - * @return A {@link SafeCloseable} whose {@link SafeCloseable#close()} method invokes - * {@link #pop(LivenessScope)} for the scope, followed by - * {@link LivenessScope#release()} + * @return A {@link SafeCloseable} whose {@link SafeCloseable#close()} method invokes {@link #pop(LivenessScope)} + * for the scope, followed by {@link LivenessScope#release()} */ @NotNull public static SafeCloseable open() { @@ -103,21 +99,19 @@ public static SafeCloseable open() { private void pushInternal(@NotNull final LivenessScope scope) { if (Liveness.DEBUG_MODE_ENABLED) { - Liveness.log.info().append("LivenessDebug: Pushing scope ") - .append(Utils.REFERENT_FORMATTER, scope).endl(); + Liveness.log.info().append("LivenessDebug: Pushing scope ").append(Utils.REFERENT_FORMATTER, scope).endl(); } stack.push(scope); } private void popInternal(@NotNull final LivenessScope scope) { if (Liveness.DEBUG_MODE_ENABLED) { - Liveness.log.info().append("LivenessDebug: Popping scope ") - .append(Utils.REFERENT_FORMATTER, scope).endl(); + Liveness.log.info().append("LivenessDebug: Popping scope ").append(Utils.REFERENT_FORMATTER, scope).endl(); } final LivenessScope peeked = stack.peekFirst(); if (peeked != scope) { - throw new IllegalStateException("Caller requested to pop " + scope - + " but the top of the scope stack is " + peeked); + throw new IllegalStateException( + "Caller requested to pop " + scope + " but the top of the scope stack is " + peeked); } stack.pop(); } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/PermanentLivenessManager.java b/DB/src/main/java/io/deephaven/db/util/liveness/PermanentLivenessManager.java index 44957e8f643..73ab0bbb206 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/PermanentLivenessManager.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/PermanentLivenessManager.java @@ -7,8 +7,7 @@ *

    * A {@link LivenessManager} implementation that will never release its referents. *

    - * Instances expect to be used on exactly one thread, and hence do not take any measures to ensure - * thread safety. + * Instances expect to be used on exactly one thread, and hence do not take any measures to ensure thread safety. */ public final class PermanentLivenessManager implements LivenessManager { @@ -24,8 +23,8 @@ public final boolean tryManage(@NotNull LivenessReferent referent) { } if (Liveness.DEBUG_MODE_ENABLED) { Liveness.log.info().append("LivenessDebug: PermanentLivenessManager managing ") - .append(Utils.REFERENT_FORMATTER, referent).append(" for ") - .append(new LivenessDebugException()).endl(); + .append(Utils.REFERENT_FORMATTER, referent).append(" for ").append(new LivenessDebugException()) + .endl(); } return true; } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/ReferenceCountedLivenessNode.java b/DB/src/main/java/io/deephaven/db/util/liveness/ReferenceCountedLivenessNode.java index 2a17c778d90..be45b628be0 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/ReferenceCountedLivenessNode.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/ReferenceCountedLivenessNode.java @@ -18,20 +18,18 @@ abstract class ReferenceCountedLivenessNode extends ReferenceCounted implements transient RetainedReferenceTracker tracker; /** - * @param enforceStrongReachability Whether this {@link LivenessManager} should maintain strong - * references to its referents + * @param enforceStrongReachability Whether this {@link LivenessManager} should maintain strong references to its + * referents */ - @SuppressWarnings("WeakerAccess") // Needed in order to deserialize Serializable subclass - // instances + @SuppressWarnings("WeakerAccess") // Needed in order to deserialize Serializable subclass instances protected ReferenceCountedLivenessNode(final boolean enforceStrongReachability) { this.enforceStrongReachability = enforceStrongReachability; initializeTransientFieldsForLiveness(); } /** - * Package-private for {@link java.io.Serializable} sub-classes to use in - * readObject only. Public to allow unit tests in another package to work - * around mock issues where the constructor is never invoked. + * Package-private for {@link java.io.Serializable} sub-classes to use in readObject only. + * Public to allow unit tests in another package to work around mock issues where the constructor is never invoked. */ @VisibleForTesting public final void initializeTransientFieldsForLiveness() { @@ -40,9 +38,8 @@ public final void initializeTransientFieldsForLiveness() { } tracker = new RetainedReferenceTracker<>(this, enforceStrongReachability); if (Liveness.DEBUG_MODE_ENABLED) { - Liveness.log.info().append("LivenessDebug: Created tracker ") - .append(Utils.REFERENT_FORMATTER, tracker).append(" for ") - .append(Utils.REFERENT_FORMATTER, this).endl(); + Liveness.log.info().append("LivenessDebug: Created tracker ").append(Utils.REFERENT_FORMATTER, tracker) + .append(" for ").append(Utils.REFERENT_FORMATTER, this).endl(); } } @@ -60,12 +57,11 @@ public final void dropReference() { return; } if (Liveness.DEBUG_MODE_ENABLED) { - Liveness.log.info().append("LivenessDebug: Releasing ") - .append(Utils.REFERENT_FORMATTER, this).endl(); + Liveness.log.info().append("LivenessDebug: Releasing ").append(Utils.REFERENT_FORMATTER, this).endl(); } if (!tryDecrementReferenceCount()) { throw new LivenessStateException( - getReferentDescription() + " could not be released as it was no longer live"); + getReferentDescription() + " could not be released as it was no longer live"); } } @@ -80,8 +76,8 @@ public final boolean tryManage(@NotNull final LivenessReferent referent) { return true; } if (Liveness.DEBUG_MODE_ENABLED) { - Liveness.log.info().append("LivenessDebug: ").append(getReferentDescription()) - .append(" managing ").append(referent.getReferentDescription()).endl(); + Liveness.log.info().append("LivenessDebug: ").append(getReferentDescription()).append(" managing ") + .append(referent.getReferentDescription()).endl(); } if (!referent.tryRetainReference()) { return false; @@ -92,27 +88,24 @@ public final boolean tryManage(@NotNull final LivenessReferent referent) { /** *

    - * Attempt to release (destructively when necessary) resources held by this object. This may - * render the object unusable for subsequent operations. Implementations should be sure to call - * super.destroy(). + * Attempt to release (destructively when necessary) resources held by this object. This may render the object + * unusable for subsequent operations. Implementations should be sure to call super.destroy(). *

    - * This is intended to only ever be used as a side effect of decreasing the reference count to - * 0. + * This is intended to only ever be used as a side effect of decreasing the reference count to 0. */ protected void destroy() {} @Override protected final void onReferenceCountAtZero() { if (Liveness.REFERENCE_TRACKING_DISABLED) { - throw new IllegalStateException("Reference count on " + this - + " reached zero while liveness reference tracking is disabled"); + throw new IllegalStateException( + "Reference count on " + this + " reached zero while liveness reference tracking is disabled"); } try { destroy(); } catch (Exception e) { - Liveness.log.warn().append("Exception while destroying ") - .append(Utils.REFERENT_FORMATTER, this) - .append(" after reference count reached zero: ").append(e).endl(); + Liveness.log.warn().append("Exception while destroying ").append(Utils.REFERENT_FORMATTER, this) + .append(" after reference count reached zero: ").append(e).endl(); } tracker.ensureReferencesDropped(); } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/ReleasableLivenessManager.java b/DB/src/main/java/io/deephaven/db/util/liveness/ReleasableLivenessManager.java index 109e490abe3..a09890b242b 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/ReleasableLivenessManager.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/ReleasableLivenessManager.java @@ -1,15 +1,15 @@ package io.deephaven.db.util.liveness; /** - * Interface for {@link LivenessManager} instances that support a {@link #release} method to - * initiate retained referent release callback invocation. It is the creator's responsibility to - * ensure that {@link #release()} is invoked before this manager becomes unreachable. + * Interface for {@link LivenessManager} instances that support a {@link #release} method to initiate retained referent + * release callback invocation. It is the creator's responsibility to ensure that {@link #release()} is invoked before + * this manager becomes unreachable. */ public interface ReleasableLivenessManager extends LivenessManager { /** - * Release ownership of this {@link ReleasableLivenessManager}, allowing any retained - * {@link LivenessReferent}s to cleanup if they no longer have outstanding references. + * Release ownership of this {@link ReleasableLivenessManager}, allowing any retained {@link LivenessReferent}s to + * cleanup if they no longer have outstanding references. */ void release(); } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/RetainedReferenceTracker.java b/DB/src/main/java/io/deephaven/db/util/liveness/RetainedReferenceTracker.java index ecc3053f627..d7685e666a8 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/RetainedReferenceTracker.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/RetainedReferenceTracker.java @@ -18,54 +18,50 @@ /** *

    * {@link WeakCleanupReference} to a {@link LivenessManager} that tracks the manager's retained - * {@link LivenessReferent}s, in order to guarantee that they will each have their references - * dropped exactly once via an idempotent cleanup process. + * {@link LivenessReferent}s, in order to guarantee that they will each have their references dropped exactly once via + * an idempotent cleanup process. *

    * This cleanup process is initiated one of two ways: *

      - *
    1. The manager invokes it directly via {@link #ensureReferencesDropped()} because it is - * releasing all of its retained references.
    2. - *
    3. A {@link io.deephaven.util.reference.CleanupReferenceProcessor} or similar code invokes - * {@link #cleanup()} after the manager is garbage-collected.
    4. + *
    5. The manager invokes it directly via {@link #ensureReferencesDropped()} because it is releasing all of its + * retained references.
    6. + *
    7. A {@link io.deephaven.util.reference.CleanupReferenceProcessor} or similar code invokes {@link #cleanup()} after + * the manager is garbage-collected.
    8. *
    */ -final class RetainedReferenceTracker - extends WeakCleanupReference { +final class RetainedReferenceTracker extends WeakCleanupReference { @SuppressWarnings("rawtypes") private static final AtomicIntegerFieldUpdater OUTSTANDING_STATE_UPDATER = - AtomicIntegerFieldUpdater.newUpdater(RetainedReferenceTracker.class, "outstandingState"); + AtomicIntegerFieldUpdater.newUpdater(RetainedReferenceTracker.class, "outstandingState"); private static final int NOT_OUTSTANDING = 0; private static final int OUTSTANDING = 1; private static final AtomicInteger outstandingCount = new AtomicInteger(0); private static final ThreadLocal>> tlPendingDropReferences = - new ThreadLocal<>(); + new ThreadLocal<>(); private static final ThreadLocal>>> tlSavedQueueReference = - new ThreadLocal<>(); + new ThreadLocal<>(); private final Impl impl; - @SuppressWarnings("FieldMayBeFinal") // We are using an AtomicIntegerFieldUpdater (via - // reflection) to change this + @SuppressWarnings("FieldMayBeFinal") // We are using an AtomicIntegerFieldUpdater (via reflection) to change this private volatile int outstandingState = OUTSTANDING; /** * Construct a RetainedReferenceTracker. * * @param manager The {@link LivenessManager} that's using this to track its referents - * @param enforceStrongReachability Whether this tracker should maintain strong references to - * the added referents + * @param enforceStrongReachability Whether this tracker should maintain strong references to the added referents */ RetainedReferenceTracker(@NotNull final TYPE manager, final boolean enforceStrongReachability) { super(manager, CleanupReferenceProcessorInstance.LIVENESS.getReferenceQueue()); impl = enforceStrongReachability ? new StrongImpl() : new WeakImpl(); outstandingCount.getAndIncrement(); if (Liveness.DEBUG_MODE_ENABLED) { - ProcessEnvironment.getDefaultLog().info().append("Creating ") - .append(Utils.REFERENT_FORMATTER, this).append(" at ") - .append(new LivenessDebugException()).endl(); + ProcessEnvironment.getDefaultLog().info().append("Creating ").append(Utils.REFERENT_FORMATTER, this) + .append(" at ").append(new LivenessDebugException()).endl(); } } @@ -80,65 +76,57 @@ public final String toString() { * {@link #ensureReferencesDropped()} has already been invoked. * * @param referent The referent to drop on cleanup - * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} - * has already been invoked + * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} has already been + * invoked */ - synchronized final void addReference(@NotNull final LivenessReferent referent) - throws LivenessStateException { + synchronized final void addReference(@NotNull final LivenessReferent referent) throws LivenessStateException { checkOutstanding(); impl.add(referent); } /** *

    - * Remove at most one existing reference to referent from this tracker, so that it will no - * longer be dropped on {@link #cleanup()} or {@link #ensureReferencesDropped()}, and drop it - * immediately. + * Remove at most one existing reference to referent from this tracker, so that it will no longer be dropped on + * {@link #cleanup()} or {@link #ensureReferencesDropped()}, and drop it immediately. *

    - * This is not permitted if {@link #cleanup()} or {@link #ensureReferencesDropped()} has already - * been invoked. + * This is not permitted if {@link #cleanup()} or {@link #ensureReferencesDropped()} has already been invoked. * * @param referent The referent to remove - * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} - * has already been invoked + * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} has already been + * invoked */ - synchronized final void dropReference(@NotNull final LivenessReferent referent) - throws LivenessStateException { + synchronized final void dropReference(@NotNull final LivenessReferent referent) throws LivenessStateException { checkOutstanding(); impl.drop(referent); } /** *

    - * Remove at most one existing reference to each input referent from this tracker, so that it - * will no longer be dropped on {@link #cleanup()} or {@link #ensureReferencesDropped()}, and - * drop it immediately. + * Remove at most one existing reference to each input referent from this tracker, so that it will no longer be + * dropped on {@link #cleanup()} or {@link #ensureReferencesDropped()}, and drop it immediately. *

    - * This is not permitted if {@link #cleanup()} or {@link #ensureReferencesDropped()} has already - * been invoked. + * This is not permitted if {@link #cleanup()} or {@link #ensureReferencesDropped()} has already been invoked. * * @param referents The referents to remove - * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} - * has already been invoked + * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} has already been + * invoked */ - synchronized final void dropReferences( - @NotNull final Collection referents) - throws LivenessStateException { + synchronized final void dropReferences(@NotNull final Collection referents) + throws LivenessStateException { checkOutstanding(); impl.drop(referents); } /** *

    - * Move all {@link LivenessReferent}s previously added to this tracker to other, which becomes - * responsible for dropping them. + * Move all {@link LivenessReferent}s previously added to this tracker to other, which becomes responsible for + * dropping them. *

    - * This is not permitted if {@link #cleanup()} or {@link #ensureReferencesDropped()} has already - * been invoked. + * This is not permitted if {@link #cleanup()} or {@link #ensureReferencesDropped()} has already been invoked. * * @param other The other tracker - * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} - * has already been invoked + * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} has already been + * invoked */ synchronized final void transferReferencesTo(@NotNull final RetainedReferenceTracker other) { checkOutstanding(); @@ -152,14 +140,13 @@ synchronized final void transferReferencesTo(@NotNull final RetainedReferenceTra /** *

    - * Remove all {@link LivenessReferent}s previously added to this tracker, unless they have been - * transferred, without dropping them. Uses to make references "permanent". + * Remove all {@link LivenessReferent}s previously added to this tracker, unless they have been transferred, without + * dropping them. Uses to make references "permanent". *

    - * This is not permitted if {@link #cleanup()} or {@link #ensureReferencesDropped()} has already - * been invoked. + * This is not permitted if {@link #cleanup()} or {@link #ensureReferencesDropped()} has already been invoked. * - * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} - * has already been invoked + * @throws LivenessStateException If {@link #cleanup()} or {@link #ensureReferencesDropped()} has already been + * invoked */ synchronized void makeReferencesPermanent() { checkOutstanding(); @@ -168,8 +155,8 @@ synchronized void makeReferencesPermanent() { private void checkOutstanding() { if (outstandingState == NOT_OUTSTANDING) { - throw new LivenessStateException("RetainedReferenceTracker " + this - + " has already performed cleanup for manager " + get()); + throw new LivenessStateException( + "RetainedReferenceTracker " + this + " has already performed cleanup for manager " + get()); } } @@ -180,8 +167,8 @@ public final void cleanup() { /** *

    - * Initiate the idempotent cleanup process. This will drop all retained references if their - * referents still exist. No new references may be added to or dropped from this tracker. + * Initiate the idempotent cleanup process. This will drop all retained references if their referents still exist. + * No new references may be added to or dropped from this tracker. */ final void ensureReferencesDropped() { ensureReferencesDroppedInternal(false); @@ -193,21 +180,18 @@ private void ensureReferencesDroppedInternal(final boolean onCleanup) { } if (Liveness.DEBUG_MODE_ENABLED || (onCleanup && Liveness.CLEANUP_LOG_ENABLED)) { Liveness.log.info().append("LivenessDebug: Ensuring references dropped ") - .append(onCleanup ? "(on cleanup) " : "").append("for ") - .append(Utils.REFERENT_FORMATTER, this).endl(); + .append(onCleanup ? "(on cleanup) " : "").append("for ").append(Utils.REFERENT_FORMATTER, this) + .endl(); } outstandingCount.decrementAndGet(); - Queue> pendingDropReferences = - tlPendingDropReferences.get(); + Queue> pendingDropReferences = tlPendingDropReferences.get(); final boolean processDrops = pendingDropReferences == null; if (processDrops) { final SoftReference>> savedQueueReference = - tlSavedQueueReference.get(); - if (savedQueueReference == null - || (pendingDropReferences = savedQueueReference.get()) == null) { - tlSavedQueueReference - .set(new SoftReference<>(pendingDropReferences = new ArrayDeque<>())); + tlSavedQueueReference.get(); + if (savedQueueReference == null || (pendingDropReferences = savedQueueReference.get()) == null) { + tlSavedQueueReference.set(new SoftReference<>(pendingDropReferences = new ArrayDeque<>())); } tlPendingDropReferences.set(pendingDropReferences); } @@ -238,11 +222,11 @@ private void ensureReferencesDroppedInternal(final boolean onCleanup) { /** *

    - * Get the number of outstanding trackers (instances of RetainedReferenceTracker that have not - * had their {@link #cleanup()} or {@link #ensureReferencesDropped()} method called). + * Get the number of outstanding trackers (instances of RetainedReferenceTracker that have not had their + * {@link #cleanup()} or {@link #ensureReferencesDropped()} method called). *

    - * Note that this number represents the liveness system's current knowledge of the number of - * live references in the system. + * Note that this number represents the liveness system's current knowledge of the number of live references in the + * system. * * @return The number of outstanding trackers */ @@ -264,8 +248,7 @@ private interface Impl extends Iterable { private static final class WeakImpl implements Impl { - private final List> retainedReferences = - new ArrayList<>(); + private final List> retainedReferences = new ArrayList<>(); @Override public void add(@NotNull final LivenessReferent referent) { @@ -275,8 +258,7 @@ public void add(@NotNull final LivenessReferent referent) { @Override public void drop(@NotNull final LivenessReferent referent) { for (int rrLast = retainedReferences.size() - 1, rri = 0; rri <= rrLast;) { - final WeakReference retainedReference = - retainedReferences.get(rri); + final WeakReference retainedReference = retainedReferences.get(rri); final boolean cleared; final boolean found; { @@ -302,11 +284,10 @@ public void drop(@NotNull final LivenessReferent referent) { @Override public void drop(@NotNull final Collection referents) { final Set referentsToRemove = - new KeyedObjectHashSet<>(IdentityKeyedObjectKey.getInstance()); + new KeyedObjectHashSet<>(IdentityKeyedObjectKey.getInstance()); referentsToRemove.addAll(referents); for (int rrLast = retainedReferences.size() - 1, rri = 0; rri <= rrLast;) { - final WeakReference retainedReference = - retainedReferences.get(rri); + final WeakReference retainedReference = retainedReferences.get(rri); final boolean cleared; final boolean found; { @@ -324,9 +305,8 @@ public void drop(@NotNull final Collection referents retainedReferences.remove(rrLast--); if (found) { final LivenessReferent referent = retainedReference.get(); - if (referent != null) { // Probably unnecessary, unless the referents collection - // is engaged in some reference trickery internally, but - // better safe than sorry. + if (referent != null) { // Probably unnecessary, unless the referents collection is engaged in some + // reference trickery internally, but better safe than sorry. referent.dropReference(); } if (referentsToRemove.isEmpty()) { @@ -352,7 +332,7 @@ public Iterator iterator() { return new Iterator() { private final Iterator> internal = - retainedReferences.iterator(); + retainedReferences.iterator(); @Override public boolean hasNext() { @@ -369,8 +349,7 @@ public LivenessReferent next() { private static final class StrongImpl implements Impl { - private static final RetentionCache permanentReferences = - new RetentionCache<>(); + private static final RetentionCache permanentReferences = new RetentionCache<>(); private final List retained = new ArrayList<>(); @@ -401,7 +380,7 @@ public void drop(@NotNull final Collection referents return; } final Set referentsToRemove = - new KeyedObjectHashSet<>(IdentityKeyedObjectKey.getInstance()); + new KeyedObjectHashSet<>(IdentityKeyedObjectKey.getInstance()); referentsToRemove.addAll(referents); for (int rLast = retained.size() - 1, ri = 0; ri <= rLast;) { final LivenessReferent current = retained.get(ri); @@ -427,8 +406,7 @@ public void clear() { @Override public void makePermanent() { - // See LivenessScope.transferTo: This is currently unreachable code, but implemented for - // completeness + // See LivenessScope.transferTo: This is currently unreachable code, but implemented for completeness retained.forEach(permanentReferences::retain); retained.clear(); } diff --git a/DB/src/main/java/io/deephaven/db/util/liveness/SingletonLivenessManager.java b/DB/src/main/java/io/deephaven/db/util/liveness/SingletonLivenessManager.java index 1a80693e73a..e1a8350c110 100644 --- a/DB/src/main/java/io/deephaven/db/util/liveness/SingletonLivenessManager.java +++ b/DB/src/main/java/io/deephaven/db/util/liveness/SingletonLivenessManager.java @@ -6,14 +6,13 @@ import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; /** - * {@link ReleasableLivenessManager} to manage exactly one object, passed at construction time or - * managed later. + * {@link ReleasableLivenessManager} to manage exactly one object, passed at construction time or managed later. */ public class SingletonLivenessManager implements ReleasableLivenessManager { private static final AtomicReferenceFieldUpdater RETAINED_REFERENCE_UPDATER = - AtomicReferenceFieldUpdater.newUpdater(SingletonLivenessManager.class, WeakReference.class, - "retainedReference"); + AtomicReferenceFieldUpdater.newUpdater(SingletonLivenessManager.class, WeakReference.class, + "retainedReference"); private volatile WeakReference retainedReference; @@ -29,19 +28,17 @@ public SingletonLivenessManager(@NotNull final LivenessReferent referent) { } private void initializeRetainedReference( - @NotNull final WeakReference retainedReference) { + @NotNull final WeakReference retainedReference) { this.retainedReference = retainedReference; } - private boolean setRetainedReference( - @NotNull final WeakReference retainedReference) { + private boolean setRetainedReference(@NotNull final WeakReference retainedReference) { return RETAINED_REFERENCE_UPDATER.compareAndSet(this, null, retainedReference); } private WeakReference getRetainedReference() { // noinspection unchecked - return (WeakReference) RETAINED_REFERENCE_UPDATER - .getAndSet(this, null); + return (WeakReference) RETAINED_REFERENCE_UPDATER.getAndSet(this, null); } @Override @@ -54,8 +51,7 @@ public final boolean tryManage(@NotNull LivenessReferent referent) { } if (!setRetainedReference(referent.getWeakReference())) { referent.dropReference(); - throw new UnsupportedOperationException( - "SingletonLivenessManager can only manage one referent"); + throw new UnsupportedOperationException("SingletonLivenessManager can only manage one referent"); } return true; } diff --git a/DB/src/main/java/io/deephaven/db/util/reference/CleanupReferenceProcessorInstance.java b/DB/src/main/java/io/deephaven/db/util/reference/CleanupReferenceProcessorInstance.java index d7258c017dd..62c64004166 100644 --- a/DB/src/main/java/io/deephaven/db/util/reference/CleanupReferenceProcessorInstance.java +++ b/DB/src/main/java/io/deephaven/db/util/reference/CleanupReferenceProcessorInstance.java @@ -14,22 +14,19 @@ public enum CleanupReferenceProcessorInstance { DEFAULT(new CleanupReferenceProcessor("default", 1000, - (l, r, e) -> l.warn().append(Thread.currentThread().getName()) - .append(": Exception thrown from cleanup of ") - .append(Utils.REFERENT_FORMATTER, r).append(": ").append(e).endl())), LIVENESS( - new CleanupReferenceProcessor("liveness", 1000, (l, r, e) -> { - if (e instanceof RuntimeException) { - throw (RuntimeException) e; - } - throw new RuntimeException(e); - })); + (l, r, e) -> l.warn().append(Thread.currentThread().getName()).append(": Exception thrown from cleanup of ") + .append(Utils.REFERENT_FORMATTER, r).append(": ").append(e).endl())), LIVENESS( + new CleanupReferenceProcessor("liveness", 1000, (l, r, e) -> { + if (e instanceof RuntimeException) { + throw (RuntimeException) e; + } + throw new RuntimeException(e); + })); private final CleanupReferenceProcessor cleanupReferenceProcessor; - CleanupReferenceProcessorInstance( - @NotNull final CleanupReferenceProcessor cleanupReferenceProcessor) { - this.cleanupReferenceProcessor = - Require.neqNull(cleanupReferenceProcessor, "cleanupReferenceProcessor"); + CleanupReferenceProcessorInstance(@NotNull final CleanupReferenceProcessor cleanupReferenceProcessor) { + this.cleanupReferenceProcessor = Require.neqNull(cleanupReferenceProcessor, "cleanupReferenceProcessor"); } public final ReferenceQueue getReferenceQueue() { diff --git a/DB/src/main/java/io/deephaven/db/util/scripts/ConsoleScriptPathLoader.java b/DB/src/main/java/io/deephaven/db/util/scripts/ConsoleScriptPathLoader.java index c9dab0fee8d..39070f7eb44 100644 --- a/DB/src/main/java/io/deephaven/db/util/scripts/ConsoleScriptPathLoader.java +++ b/DB/src/main/java/io/deephaven/db/util/scripts/ConsoleScriptPathLoader.java @@ -12,8 +12,8 @@ import java.util.Set; /** - * This loader loads *only* using {@link GroovyDeephavenSession#findScript(String)}. Consistency - * locking and refresh methods are no-ops. + * This loader loads *only* using {@link GroovyDeephavenSession#findScript(String)}. Consistency locking and refresh + * methods are no-ops. */ public class ConsoleScriptPathLoader implements ScriptPathLoader { @@ -34,8 +34,7 @@ public String getScriptBodyByDisplayPath(@NotNull final String displayPath) thro } @Override - public String getScriptBodyByRelativePath(@NotNull final String relativePath) - throws IOException { + public String getScriptBodyByRelativePath(@NotNull final String relativePath) throws IOException { return FileUtils.readTextFile(GroovyDeephavenSession.findScript(relativePath)); } diff --git a/DB/src/main/java/io/deephaven/db/util/scripts/MultiScriptPathLoader.java b/DB/src/main/java/io/deephaven/db/util/scripts/MultiScriptPathLoader.java index 95dfcbdb72c..95c106ecfe6 100644 --- a/DB/src/main/java/io/deephaven/db/util/scripts/MultiScriptPathLoader.java +++ b/DB/src/main/java/io/deephaven/db/util/scripts/MultiScriptPathLoader.java @@ -14,8 +14,7 @@ /** * A {@link ScriptPathLoader} that will load paths from multiple underlying loaders. */ -public class MultiScriptPathLoader - implements ScriptPathLoader { +public class MultiScriptPathLoader implements ScriptPathLoader { private final List loaders; private volatile SoftReference> availableScriptDisplayPathsReference; @@ -31,8 +30,7 @@ private static class MultiPathLoaderState implements ScriptPathLoaderState { @Override public String toAbbreviatedString() { final String stateString = StringUtils.joinStrings( - states.stream().map(state -> (state == null) ? "--" : state.toAbbreviatedString()), - " , "); + states.stream().map(state -> (state == null) ? "--" : state.toAbbreviatedString()), " , "); return '[' + stateString + ']'; } @@ -71,17 +69,16 @@ public Set getAvailableScriptDisplayPaths() { SoftReference> localRef; Set localSet; - if (((localRef = availableScriptDisplayPathsReference) == null) - || (localSet = localRef.get()) == null) { + if (((localRef = availableScriptDisplayPathsReference) == null) || (localSet = localRef.get()) == null) { synchronized (this) { if (((localRef = availableScriptDisplayPathsReference) == null) - || (localSet = localRef.get()) == null) { + || (localSet = localRef.get()) == null) { localSet = new HashSet<>(); for (final ScriptPathLoader loader : loaders) { localSet.addAll(loader.getAvailableScriptDisplayPaths()); } availableScriptDisplayPathsReference = - new SoftReference<>(localSet = Collections.unmodifiableSet(localSet)); + new SoftReference<>(localSet = Collections.unmodifiableSet(localSet)); } } } @@ -112,31 +109,29 @@ public String getScriptBodyByRelativePath(final String relativePath) throws IOEx } @Override - public Set getAvailableScriptDisplayPaths(final ScriptPathLoaderState state) - throws IOException { + public Set getAvailableScriptDisplayPaths(final ScriptPathLoaderState state) throws IOException { if (!hasStateInfo(state)) { return getAvailableScriptDisplayPaths(); } final Set paths = new HashSet<>(); for (int i = 0; i < loaders.size(); i++) { - paths.addAll(loaders.get(i) - .getAvailableScriptDisplayPaths(((MultiPathLoaderState) state).states.get(i))); + paths.addAll(loaders.get(i).getAvailableScriptDisplayPaths(((MultiPathLoaderState) state).states.get(i))); } return paths; } @Override - public String getScriptBodyByDisplayPath(final String displayPath, - final ScriptPathLoaderState state) throws IOException { + public String getScriptBodyByDisplayPath(final String displayPath, final ScriptPathLoaderState state) + throws IOException { if (!hasStateInfo(state)) { return getScriptBodyByDisplayPath(displayPath); } for (int i = 0; i < loaders.size(); i++) { final String result = loaders.get(i).getScriptBodyByDisplayPath(displayPath, - ((MultiPathLoaderState) state).states.get(i)); + ((MultiPathLoaderState) state).states.get(i)); if (result != null) { return result; } @@ -146,15 +141,15 @@ public String getScriptBodyByDisplayPath(final String displayPath, } @Override - public String getScriptBodyByRelativePath(final String relativePath, - final ScriptPathLoaderState state) throws IOException { + public String getScriptBodyByRelativePath(final String relativePath, final ScriptPathLoaderState state) + throws IOException { if (!hasStateInfo(state)) { return getScriptBodyByRelativePath(relativePath); } for (int i = 0; i < loaders.size(); i++) { final String result = loaders.get(i).getScriptBodyByRelativePath(relativePath, - ((MultiPathLoaderState) state).states.get(i)); + ((MultiPathLoaderState) state).states.get(i)); if (result != null) { return result; } @@ -170,7 +165,7 @@ private boolean hasStateInfo(final ScriptPathLoaderState state) { if (!(state instanceof MultiPathLoaderState)) { throw new IllegalArgumentException( - "Incorrect state type (" + state.getClass() + ") for MultiScriptPathLoader"); + "Incorrect state type (" + state.getClass() + ") for MultiScriptPathLoader"); } return true; diff --git a/DB/src/main/java/io/deephaven/db/util/scripts/ScriptPathLoader.java b/DB/src/main/java/io/deephaven/db/util/scripts/ScriptPathLoader.java index b75118974bf..7d32cae9cf9 100644 --- a/DB/src/main/java/io/deephaven/db/util/scripts/ScriptPathLoader.java +++ b/DB/src/main/java/io/deephaven/db/util/scripts/ScriptPathLoader.java @@ -13,8 +13,8 @@ public interface ScriptPathLoader { /** - * Acquire a read lock. Use before invoking any of the get* methods, and hold for as long as - * consistency is required for this loader. + * Acquire a read lock. Use before invoking any of the get* methods, and hold for as long as consistency is required + * for this loader. */ void lock(); @@ -53,8 +53,8 @@ public interface ScriptPathLoader { String getScriptBodyByRelativePath(final String relativePath) throws IOException; /** - * Gets the display paths available from this loader when it was in the specified - * {@link ScriptPathLoaderState state}. + * Gets the display paths available from this loader when it was in the specified {@link ScriptPathLoaderState + * state}. * * @param state The state of the loader to use when retrieving the list. * @@ -62,8 +62,7 @@ public interface ScriptPathLoader { * * @throws IOException If a problem occurred loading the script. */ - default Set getAvailableScriptDisplayPaths(final ScriptPathLoaderState state) - throws IOException { + default Set getAvailableScriptDisplayPaths(final ScriptPathLoaderState state) throws IOException { return getAvailableScriptDisplayPaths(); } @@ -77,8 +76,8 @@ default Set getAvailableScriptDisplayPaths(final ScriptPathLoaderState s * * @throws IOException If a problem occurred loading the script. */ - default String getScriptBodyByDisplayPath(final String displayPath, - final ScriptPathLoaderState state) throws IOException { + default String getScriptBodyByDisplayPath(final String displayPath, final ScriptPathLoaderState state) + throws IOException { return getScriptBodyByDisplayPath(displayPath); } @@ -92,8 +91,8 @@ default String getScriptBodyByDisplayPath(final String displayPath, * * @throws IOException If a problem occurred loading the script. */ - default String getScriptBodyByRelativePath(final String relativePath, - final ScriptPathLoaderState state) throws IOException { + default String getScriptBodyByRelativePath(final String relativePath, final ScriptPathLoaderState state) + throws IOException { return getScriptBodyByRelativePath(relativePath); } diff --git a/DB/src/main/java/io/deephaven/db/util/scripts/ScriptRepository.java b/DB/src/main/java/io/deephaven/db/util/scripts/ScriptRepository.java index 6a8d3790f1d..64cc66d99b5 100644 --- a/DB/src/main/java/io/deephaven/db/util/scripts/ScriptRepository.java +++ b/DB/src/main/java/io/deephaven/db/util/scripts/ScriptRepository.java @@ -38,15 +38,14 @@ * A {@link ScriptPathLoader} that loads scripts from a git repository. *

    *

    - * If this class is created with updateEnabled = false it loads scripts as if no git repository was - * present. + * If this class is created with updateEnabled = false it loads scripts as if no git repository was present. *

    */ public class ScriptRepository implements ScriptPathLoader { - private static final int gitHashDisplayLength = Configuration.getInstance() - .getIntegerWithDefault("ScriptRepository.githashdisplaylenth", 8); - private static final String[] scriptExtensions = ScriptExtensionsMap.getInstance().values() - .stream().flatMap(List::stream).map(x -> "." + x.toLowerCase()).toArray(String[]::new); + private static final int gitHashDisplayLength = + Configuration.getInstance().getIntegerWithDefault("ScriptRepository.githashdisplaylenth", 8); + private static final String[] scriptExtensions = ScriptExtensionsMap.getInstance().values().stream() + .flatMap(List::stream).map(x -> "." + x.toLowerCase()).toArray(String[]::new); private final Logger log; private final String name; @@ -104,32 +103,31 @@ public String toString() { * Constructs the script repository instance. *

    * - * Note that in the case of the Controller, a misconfiguration of a script repository (which - * will result in an exception to be thrown) will cause the Controller to fail to start. This is - * intentional: the Controller configuration is an all-or-nothing thing, and attempts to limp - * along could cause a misconfigured Controller to stay unnoticed for weeks, and would take days - * to correct. + * Note that in the case of the Controller, a misconfiguration of a script repository (which will result in an + * exception to be thrown) will cause the Controller to fail to start. This is intentional: the Controller + * configuration is an all-or-nothing thing, and attempts to limp along could cause a misconfigured Controller to + * stay unnoticed for weeks, and would take days to correct. * * @throws RuntimeException if the repository configuration is incorrect. */ - // TODO: Move most/all of the repo configuration into the ACL MySQL DB, or add runtime - // re-configuration some other way. + // TODO: Move most/all of the repo configuration into the ACL MySQL DB, or add runtime re-configuration some other + // way. ScriptRepository(@NotNull final Logger log, - @NotNull final String name, - @NotNull final Set groupNames, - @NotNull final String gitURI, - final boolean updateEnabled, - final boolean gcEnabled, - @NotNull final String remoteOrigin, - @Nullable final String branch, - final boolean prefixDisplayPathsWithRepoName, - @NotNull final Path rootPath, - final boolean resetGitLockFiles, - @NotNull final Path... searchPaths) { + @NotNull final String name, + @NotNull final Set groupNames, + @NotNull final String gitURI, + final boolean updateEnabled, + final boolean gcEnabled, + @NotNull final String remoteOrigin, + @Nullable final String branch, + final boolean prefixDisplayPathsWithRepoName, + @NotNull final Path rootPath, + final boolean resetGitLockFiles, + @NotNull final Path... searchPaths) { this.log = log; this.name = name; - this.groupNames = groupNames == CollectionUtil.UNIVERSAL_SET ? groupNames - : Collections.unmodifiableSet(groupNames); + this.groupNames = + groupNames == CollectionUtil.UNIVERSAL_SET ? groupNames : Collections.unmodifiableSet(groupNames); this.prefixDisplayPathsWithRepoName = prefixDisplayPathsWithRepoName; this.gcEnabled = gcEnabled; this.rootPath = rootPath; @@ -147,8 +145,7 @@ public String toString() { @Override public boolean include(TreeWalk walker) { - return walker.isSubtree() - || walker.getNameString().toLowerCase().endsWith(suffix); + return walker.isSubtree() || walker.getNameString().toLowerCase().endsWith(suffix); } @Override @@ -192,9 +189,9 @@ public TreeFilter clone() { * @throws RuntimeException if the setup failed. */ private @Nullable Git setUpGitRepository(final boolean updateEnabled, - final String gitURI, - final String branch, - final boolean resetGitLockFiles) { + final String gitURI, + final String branch, + final boolean resetGitLockFiles) { if (!updateEnabled) { return null; } @@ -210,8 +207,7 @@ public TreeFilter clone() { try { Files.delete(lockFile); } catch (IOException e) { - throw new IOException("Unable to delete git lock file " + lockFileName, - e); + throw new IOException("Unable to delete git lock file " + lockFileName, e); } } } @@ -220,9 +216,8 @@ public TreeFilter clone() { final Repository gitRepo = tempGit.getRepository(); final RepositoryState gitRepoState = gitRepo.getRepositoryState(); if (gitRepoState != RepositoryState.SAFE) { - throw new IllegalStateException( - logPrefix + "repository is not in expected state (SAFE), instead state is: " - + gitRepoState); + throw new IllegalStateException(logPrefix + + "repository is not in expected state (SAFE), instead state is: " + gitRepoState); } try { @@ -230,8 +225,8 @@ public TreeFilter clone() { lastGitRefresh = System.currentTimeMillis(); } catch (final Exception ex) { log.warn().append(logPrefix) - .append("Initial git fetch failed, but repository was cloned, continuing. ") - .append(ex).endl(); + .append("Initial git fetch failed, but repository was cloned, continuing. ").append(ex) + .endl(); } final List localBranches = tempGit.branchList().call(); @@ -244,15 +239,14 @@ public TreeFilter clone() { } } - final CheckoutCommand checkoutCommand = - tempGit.checkout().setName(branch).setCreateBranch(needCreate).setForce(true) - .setUpstreamMode(CreateBranchCommand.SetupUpstreamMode.SET_UPSTREAM) + final CheckoutCommand checkoutCommand = tempGit.checkout().setName(branch).setCreateBranch(needCreate) + .setForce(true).setUpstreamMode(CreateBranchCommand.SetupUpstreamMode.SET_UPSTREAM) .setStartPoint(upstreamBranch); checkoutCommand.call(); final CheckoutResult checkoutResult = checkoutCommand.getResult(); if (checkoutResult.getStatus() != CheckoutResult.Status.OK) { throw new IllegalStateException( - logPrefix + "checkout of branch " + branch + " failed: " + checkoutResult); + logPrefix + "checkout of branch " + branch + " failed: " + checkoutResult); } tempGit.reset().setMode(ResetCommand.ResetType.HARD).setRef(upstreamBranch).call(); @@ -267,8 +261,8 @@ public TreeFilter clone() { uriToClone = gitURI; } - return Git.cloneRepository().setBranch(branch).setURI(uriToClone) - .setDirectory(rootPath.toFile()).call(); + return Git.cloneRepository().setBranch(branch).setURI(uriToClone).setDirectory(rootPath.toFile()) + .call(); } } catch (Exception e) { throw new RuntimeException(logPrefix + "error setting up git repository", e); @@ -285,8 +279,7 @@ public String getName() { } /** - * Get the users allowed to access this repo, defined by the - * [prefix].scripts.repo.[name].users property. + * Get the users allowed to access this repo, defined by the [prefix].scripts.repo.[name].users property. * * @return The names of all users allowed to access the repo. */ @@ -296,8 +289,7 @@ public Set getGroupNames() { } /** - * Get a {@link ScriptPathLoaderState state} object that represents the current branch HEAD - * commit. + * Get a {@link ScriptPathLoaderState state} object that represents the current branch HEAD commit. * * @return The current branch HEAD or null if updates were disabled. */ @@ -310,8 +302,7 @@ public ScriptPathLoaderState getState() { try { return new GitState(name, upstreamBranch, getCurrentRevision()); } catch (IOException e) { - throw new UncheckedIOException( - "Unable to get ref Id for " + name + ": " + upstreamBranch, e); + throw new UncheckedIOException("Unable to get ref Id for " + name + ": " + upstreamBranch, e); } } @@ -347,16 +338,15 @@ private void scanFileTree() throws IOException { for (final ScriptFileVisitor searchPathVisitor : searchPathVisitors) { if (log.isDebugEnabled()) { - log.debug().append(logPrefix).append("searching ") - .append(searchPathVisitor.searchPath.toString()).endl(); + log.debug().append(logPrefix).append("searching ").append(searchPathVisitor.searchPath.toString()) + .endl(); } try { Files.walkFileTree(searchPathVisitor.searchPath, searchPathVisitor); } catch (IOException e) { - throw new IOException( - logPrefix + "Error while searching " + searchPathVisitor.searchPath.toString(), - e); + throw new IOException(logPrefix + "Error while searching " + searchPathVisitor.searchPath.toString(), + e); } } @@ -373,23 +363,20 @@ private ScriptFileVisitor(@NotNull final Path searchPath) { @Override public FileVisitResult visitFile(final Path file, - final BasicFileAttributes attrs) throws IOException { + final BasicFileAttributes attrs) throws IOException { final FileVisitResult result = super.visitFile(file, attrs); - if (result == FileVisitResult.CONTINUE && Arrays.stream(scriptExtensions).anyMatch( - extension -> file.getFileName().toString().toLowerCase().endsWith(extension))) { - final String displayPathString = - ((prefixDisplayPathsWithRepoName ? name + File.separator : "") + if (result == FileVisitResult.CONTINUE && Arrays.stream(scriptExtensions) + .anyMatch(extension -> file.getFileName().toString().toLowerCase().endsWith(extension))) { + final String displayPathString = ((prefixDisplayPathsWithRepoName ? name + File.separator : "") + rootPath.relativize(file).toString()).replace('\\', '/'); if (log.isDebugEnabled()) { - log.debug().append(logPrefix).append("adding script path: display=") - .append(displayPathString).append(", absolute=").append(file.toString()) - .endl(); + log.debug().append(logPrefix).append("adding script path: display=").append(displayPathString) + .append(", absolute=").append(file.toString()).endl(); } displayPathStringToScript.put(displayPathString, file); - final String relativePathString = - searchPath.relativize(file).toString().replace('\\', '/'); + final String relativePathString = searchPath.relativize(file).toString().replace('\\', '/'); relativePathStringToScript.putIfAbsent(relativePathString, file); } return result; @@ -442,8 +429,7 @@ public String getScriptBodyByDisplayPath(@NotNull final String displayPath) thro * @throws IOException If the file is not accessible */ @Override - public String getScriptBodyByRelativePath(@NotNull final String relativePath) - throws IOException { + public String getScriptBodyByRelativePath(@NotNull final String relativePath) throws IOException { lock(); try { return getScriptBody(relativePathStringToScript.get(relativePath)); @@ -472,36 +458,34 @@ private static String getScriptBody(final Path path) throws IOException { } @Override - public Set getAvailableScriptDisplayPaths(final ScriptPathLoaderState state) - throws IOException { + public Set getAvailableScriptDisplayPaths(final ScriptPathLoaderState state) throws IOException { final Set items = new HashSet<>(); this.doTreeWalk(() -> items.addAll(getAvailableScriptDisplayPaths()), - (repository, objectId, treeWalk) -> items - .add((prefixDisplayPathsWithRepoName ? name + File.separator : "") - + treeWalk.getPathString()), - scriptTreeFilter, state); + (repository, objectId, treeWalk) -> items + .add((prefixDisplayPathsWithRepoName ? name + File.separator : "") + treeWalk.getPathString()), + scriptTreeFilter, state); return items; } @Override - public String getScriptBodyByRelativePath(final String relativePath, - final ScriptPathLoaderState state) throws IOException { + public String getScriptBodyByRelativePath(final String relativePath, final ScriptPathLoaderState state) + throws IOException { return getScriptBodyByCommit(relativePathStringToScript.get(relativePath), state); } @Override - public String getScriptBodyByDisplayPath(final String displayPath, - final ScriptPathLoaderState state) throws IOException { + public String getScriptBodyByDisplayPath(final String displayPath, final ScriptPathLoaderState state) + throws IOException { return getScriptBodyByCommit(displayPathStringToScript.get(displayPath), state); } /** * Use get the script body for a specific commit using a git {@link TreeWalk}. * - * @implNote If the specified commit is the same as the current HEAD, this will go to the - * filesystem instead of performing the tree walk. + * @implNote If the specified commit is the same as the current HEAD, this will go to the filesystem instead of + * performing the tree walk. * * @param path The absolute path to the file. * @param state The state containing the commit details. @@ -510,8 +494,7 @@ public String getScriptBodyByDisplayPath(final String displayPath, * * @throws IOException If there was a problem reading the file. */ - private String getScriptBodyByCommit(final Path path, final ScriptPathLoaderState state) - throws IOException { + private String getScriptBodyByCommit(final Path path, final ScriptPathLoaderState state) throws IOException { if (path == null) { return null; } @@ -523,13 +506,13 @@ private String getScriptBodyByCommit(final Path path, final ScriptPathLoaderStat lock(); try { this.doTreeWalk(() -> resultHolder.setValue(getScriptBody(path)), - (repository, objectId, treeWalk) -> { - final ObjectLoader loader = repository.open(objectId); + (repository, objectId, treeWalk) -> { + final ObjectLoader loader = repository.open(objectId); - // Then grab the contents of the object found - final byte[] contents = loader.getBytes(); - resultHolder.setValue(new String(contents, 0, contents.length)); - }, PathFilter.create(repoPath.toString()), state); + // Then grab the contents of the object found + final byte[] contents = loader.getBytes(); + resultHolder.setValue(new String(contents, 0, contents.length)); + }, PathFilter.create(repoPath.toString()), state); return resultHolder.getValue(); } finally { @@ -540,8 +523,7 @@ private String getScriptBodyByCommit(final Path path, final ScriptPathLoaderStat /** * Perform a tree walk of the specified commit, using a {@link TreeFilter filter}. * - * @param fallback The method to call if There is no state, or the requested commit is the same - * as HEAD. + * @param fallback The method to call if There is no state, or the requested commit is the same as HEAD. * @param objectConsumer A consumer to handle the individual matches of the walk. * @param filter The filter to use to match items during the walk. * @param state The state object containing the commit information. @@ -550,14 +532,13 @@ private String getScriptBodyByCommit(final Path path, final ScriptPathLoaderStat * @throws E If one of the input methods throws E. * @throws IOException If there was a problem during the tree walk. */ - private void doTreeWalk( - final FunctionalInterfaces.ThrowingRunnable fallback, - final FunctionalInterfaces.ThrowingTriConsumer objectConsumer, - final TreeFilter filter, - final ScriptPathLoaderState state) throws E, IOException { - - // If we are not actually using git, the requested commit is blank, or the default state, go - // ahead and invoke the fallback method + private void doTreeWalk(final FunctionalInterfaces.ThrowingRunnable fallback, + final FunctionalInterfaces.ThrowingTriConsumer objectConsumer, + final TreeFilter filter, + final ScriptPathLoaderState state) throws E, IOException { + + // If we are not actually using git, the requested commit is blank, or the default state, go ahead and invoke + // the fallback method if ((git == null) || (state == null)) { fallback.run(); return; @@ -565,8 +546,8 @@ private void doTreeWalk( // If the state object isn't a GitState then something bad(tm) happened if (!(state instanceof GitState)) { - throw new IllegalArgumentException("Repo state (" + state.getClass().getName() - + ") is incorrect for ScriptRepository"); + throw new IllegalArgumentException( + "Repo state (" + state.getClass().getName() + ") is incorrect for ScriptRepository"); } final GitState gs = (GitState) state; @@ -606,8 +587,7 @@ public void refresh() { try { if (gcEnabled && ((lastGitGc + TimeConstants.DAY) < System.currentTimeMillis())) { - log.info().append(logPrefix).append("git gc took place more than 24 hours ago") - .endl(); + log.info().append(logPrefix).append("git gc took place more than 24 hours ago").endl(); try { git.gc().call(); lastGitGc = System.currentTimeMillis(); @@ -631,23 +611,20 @@ public void refresh() { git.reset().setMode(ResetCommand.ResetType.HARD).setRef(upstreamBranch).call(); scanFileTree(); } catch (GitAPIException e) { - log.warn().append(logPrefix).append("error resetting git repository: ").append(e) - .endl(); + log.warn().append(logPrefix).append("error resetting git repository: ").append(e).endl(); return; } catch (IOException e) { - log.warn().append(logPrefix).append("error refreshing script paths: ").append(e) - .endl(); + log.warn().append(logPrefix).append("error refreshing script paths: ").append(e).endl(); return; } finally { consistencyLock.writeLock().unlock(); } log.info().append(logPrefix).append("Successful git refresh after ") - .append(System.currentTimeMillis() - lastGitRefresh).append("ms").endl(); + .append(System.currentTimeMillis() - lastGitRefresh).append("ms").endl(); lastGitRefresh = System.currentTimeMillis(); } catch (Exception e) { - // We are overly cautious here, to make sure that a failure to refresh the repository - // doesn't crash + // We are overly cautious here, to make sure that a failure to refresh the repository doesn't crash // the running process (in particular if it's the Controller). log.error().append(logPrefix).append("error refreshing repository: ").append(e).endl(); } @@ -667,85 +644,82 @@ public void close() { private static Path normalizeRootPath(final Configuration config, final String rootPathString) { final Path propertyRootPath = Paths.get(rootPathString); return propertyRootPath.isAbsolute() ? propertyRootPath - : Paths.get(config.getWorkspacePath(), rootPathString).toAbsolutePath(); + : Paths.get(config.getWorkspacePath(), rootPathString).toAbsolutePath(); } private static ScriptRepository readRepoConfig(@NotNull final Configuration config, - @NotNull final String propertyPrefix, - @NotNull final Logger log, - final boolean globalUpdateEnabled, - final boolean globalGcEnabled, - @Nullable final String defaultBranch, - final boolean resetGitLockFiles, - @NotNull final String repoName) { + @NotNull final String propertyPrefix, + @NotNull final Logger log, + final boolean globalUpdateEnabled, + final boolean globalGcEnabled, + @Nullable final String defaultBranch, + final boolean resetGitLockFiles, + @NotNull final String repoName) { final Set userNames = - config.getNameStringSetFromProperty(propertyPrefix + "repo." + repoName + ".groups"); - final boolean updateEnabled = globalUpdateEnabled - && config.getBoolean(propertyPrefix + "repo." + repoName + ".updateEnabled"); - final boolean gcEnabled = globalGcEnabled && config - .getBooleanWithDefault(propertyPrefix + "repo." + repoName + ".gcEnabled", true); + config.getNameStringSetFromProperty(propertyPrefix + "repo." + repoName + ".groups"); + final boolean updateEnabled = + globalUpdateEnabled && config.getBoolean(propertyPrefix + "repo." + repoName + ".updateEnabled"); + final boolean gcEnabled = globalGcEnabled + && config.getBooleanWithDefault(propertyPrefix + "repo." + repoName + ".gcEnabled", true); final String remoteOrigin = - config.getStringWithDefault(propertyPrefix + "repo." + repoName + ".remote", "origin"); - final String branch = config - .getStringWithDefault(propertyPrefix + "repo." + repoName + ".branch", defaultBranch); - Require.requirement(!(updateEnabled && branch == null), - "!(updateEnabled && branch == null)"); - final boolean prefixDisplayPathsWithRepoName = config - .getBoolean(propertyPrefix + "repo." + repoName + ".prefixDisplayPathsWithRepoName"); - final Path rootPath = normalizeRootPath(config, - config.getProperty(propertyPrefix + "repo." + repoName + ".root")); + config.getStringWithDefault(propertyPrefix + "repo." + repoName + ".remote", "origin"); + final String branch = + config.getStringWithDefault(propertyPrefix + "repo." + repoName + ".branch", defaultBranch); + Require.requirement(!(updateEnabled && branch == null), "!(updateEnabled && branch == null)"); + final boolean prefixDisplayPathsWithRepoName = + config.getBoolean(propertyPrefix + "repo." + repoName + ".prefixDisplayPathsWithRepoName"); + final Path rootPath = + normalizeRootPath(config, config.getProperty(propertyPrefix + "repo." + repoName + ".root")); final String gitURI = config.getProperty(propertyPrefix + "repo." + repoName + ".uri"); - final String[] searchPathSuffixes = config - .getProperty(propertyPrefix + "repo." + repoName + ".paths").trim().split("[, ]+"); + final String[] searchPathSuffixes = + config.getProperty(propertyPrefix + "repo." + repoName + ".paths").trim().split("[, ]+"); final Path[] searchPaths = new Path[searchPathSuffixes.length]; for (int spi = 0; spi < searchPathSuffixes.length; ++spi) { searchPaths[spi] = rootPath.resolve(searchPathSuffixes[spi]); } log.info().append("Loading Git Repo: ").append(repoName) - .append(". Branch: ").append(branch != null ? branch : "") - .append(". Root Path: ").append(rootPath.toString()) - .append(globalUpdateEnabled ? ". Repository updates enabled" - : ". Repository updates disabled") - .append(globalGcEnabled ? ". Git GC enabled." : ". Git GC disabled.").endl(); - - return new ScriptRepository(log, repoName, userNames, gitURI, updateEnabled, gcEnabled, - remoteOrigin, branch, prefixDisplayPathsWithRepoName, rootPath, resetGitLockFiles, - searchPaths); + .append(". Branch: ").append(branch != null ? branch : "") + .append(". Root Path: ").append(rootPath.toString()) + .append(globalUpdateEnabled ? ". Repository updates enabled" : ". Repository updates disabled") + .append(globalGcEnabled ? ". Git GC enabled." : ". Git GC disabled.").endl(); + + return new ScriptRepository(log, repoName, userNames, gitURI, updateEnabled, gcEnabled, remoteOrigin, branch, + prefixDisplayPathsWithRepoName, rootPath, resetGitLockFiles, searchPaths); } private static List readRepoConfigs(@NotNull final Configuration config, - @NotNull final String propertyPrefix, - @NotNull final Logger log, - final boolean globalUpdateEnabled, - final boolean globalGcEnabled, - @Nullable final String defaultBranch, - final boolean resetGitLockFiles, - @NotNull final String... repoNames) { + @NotNull final String propertyPrefix, + @NotNull final Logger log, + final boolean globalUpdateEnabled, + final boolean globalGcEnabled, + @Nullable final String defaultBranch, + final boolean resetGitLockFiles, + @NotNull final String... repoNames) { final List scriptRepositories = new ArrayList<>(repoNames.length); for (final String repoName : repoNames) { if (repoName.isEmpty()) { continue; } - scriptRepositories.add(readRepoConfig(config, propertyPrefix, log, globalUpdateEnabled, - globalGcEnabled, defaultBranch, resetGitLockFiles, repoName)); + scriptRepositories.add(readRepoConfig(config, propertyPrefix, log, globalUpdateEnabled, globalGcEnabled, + defaultBranch, resetGitLockFiles, repoName)); } return scriptRepositories; } public static List readRepoConfigs(@NotNull final Configuration config, - @SuppressWarnings("SameParameterValue") @NotNull final String propertyPrefix, - @NotNull final Logger log, - final boolean globalUpdateEnabled, - final boolean globalGcEnabled, - @Nullable final String defaultBranch, - final boolean resetGitLockFiles) { + @SuppressWarnings("SameParameterValue") @NotNull final String propertyPrefix, + @NotNull final Logger log, + final boolean globalUpdateEnabled, + final boolean globalGcEnabled, + @Nullable final String defaultBranch, + final boolean resetGitLockFiles) { return readRepoConfigs(config, - propertyPrefix, - log, - globalUpdateEnabled, - globalGcEnabled, - defaultBranch, - resetGitLockFiles, - config.getProperty(propertyPrefix + "repos").trim().split("[, ]+")); + propertyPrefix, + log, + globalUpdateEnabled, + globalGcEnabled, + defaultBranch, + resetGitLockFiles, + config.getProperty(propertyPrefix + "repos").trim().split("[, ]+")); } } diff --git a/DB/src/main/java/io/deephaven/db/util/scripts/StateOverrideScriptPathLoader.java b/DB/src/main/java/io/deephaven/db/util/scripts/StateOverrideScriptPathLoader.java index 3d18199501e..54950cf98dc 100644 --- a/DB/src/main/java/io/deephaven/db/util/scripts/StateOverrideScriptPathLoader.java +++ b/DB/src/main/java/io/deephaven/db/util/scripts/StateOverrideScriptPathLoader.java @@ -5,8 +5,7 @@ */ public interface StateOverrideScriptPathLoader extends ScriptPathLoader { /** - * Sets a state that should be used for loading operations instead of the state the loader was - * created with. + * Sets a state that should be used for loading operations instead of the state the loader was created with. * * @param state the state to use for loading operations. */ diff --git a/DB/src/main/java/io/deephaven/db/util/serialization/SerializationUtils.java b/DB/src/main/java/io/deephaven/db/util/serialization/SerializationUtils.java index aaaeadac1a8..740a318453b 100644 --- a/DB/src/main/java/io/deephaven/db/util/serialization/SerializationUtils.java +++ b/DB/src/main/java/io/deephaven/db/util/serialization/SerializationUtils.java @@ -18,8 +18,7 @@ */ public class SerializationUtils { - public interface Writer - extends FunctionalInterfaces.ThrowingConsumer { + public interface Writer extends FunctionalInterfaces.ThrowingConsumer { } /** @@ -30,7 +29,7 @@ public interface Writer * @return A new serializing consumer */ public static Writer getWriter(@NotNull final Class itemClass, - @NotNull final ObjectOutput out) { + @NotNull final ObjectOutput out) { if (itemClass == Byte.class) { return k -> out.writeByte((Byte) k); } @@ -74,8 +73,7 @@ public static Writer getWriter(@NotNull final Class - extends FunctionalInterfaces.ThrowingSupplier { + public interface Reader extends FunctionalInterfaces.ThrowingSupplier { } /** @@ -87,7 +85,7 @@ public interface Reader */ @SuppressWarnings("unchecked") public static Reader getReader(@NotNull final Class itemClass, - @NotNull final ObjectInput in) { + @NotNull final ObjectInput in) { if (itemClass == Byte.class) { return () -> (ITEM_TYPE) Byte.valueOf(in.readByte()); } @@ -126,14 +124,12 @@ public static Reader getReader(@NotNull final Class cachedReaders = new TIntObjectHashMap<>(); return () -> { - final StreamingExternalizable key = - (StreamingExternalizable) constructor.newInstance(); + final StreamingExternalizable key = (StreamingExternalizable) constructor.newInstance(); key.readExternalStreaming(in, cachedReaders); return (ITEM_TYPE) key; }; @@ -143,8 +139,7 @@ public static Reader getReader(@NotNull final Class { diff --git a/DB/src/main/java/io/deephaven/db/util/serialization/StreamingExternalizable.java b/DB/src/main/java/io/deephaven/db/util/serialization/StreamingExternalizable.java index d1d2f0a92a6..82e2d571c4c 100644 --- a/DB/src/main/java/io/deephaven/db/util/serialization/StreamingExternalizable.java +++ b/DB/src/main/java/io/deephaven/db/util/serialization/StreamingExternalizable.java @@ -16,23 +16,21 @@ public interface StreamingExternalizable { /** *

    - * Alternative to {@link java.io.Externalizable#writeExternal(ObjectOutput)} for use when a - * series of tuples of the same type with the same element types are being written in iterative - * fashion. + * Alternative to {@link java.io.Externalizable#writeExternal(ObjectOutput)} for use when a series of tuples of the + * same type with the same element types are being written in iterative fashion. *

    * Primitive elements should be written with their primitive write methods (e.g. * {@link ObjectOutput#writeInt(int)}). *

    - * Object elements are preceded by a boolean, true if null, false otherwise. The first non-null - * value for a given Object element is then preceded by the name of the class. All non-null - * values are then written with a writer method from - * {@link SerializationUtils#getWriter(Class, ObjectOutput)}, cached in cachedWriters. + * Object elements are preceded by a boolean, true if null, false otherwise. The first non-null value for a given + * Object element is then preceded by the name of the class. All non-null values are then written with a writer + * method from {@link SerializationUtils#getWriter(Class, ObjectOutput)}, cached in cachedWriters. * * @param out The output * @param cachedWriters The cached writers */ void writeExternalStreaming(@NotNull ObjectOutput out, - @NotNull TIntObjectMap cachedWriters) throws IOException; + @NotNull TIntObjectMap cachedWriters) throws IOException; /** * Implement the Object element writing protocol described in @@ -44,9 +42,9 @@ void writeExternalStreaming(@NotNull ObjectOutput out, * @param item The item to write */ static void writeObjectElement(@NotNull final ObjectOutput out, - @NotNull final TIntObjectMap cachedWriters, - final int itemIndex, - @Nullable ITEM_TYPE item) throws IOException { + @NotNull final TIntObjectMap cachedWriters, + final int itemIndex, + @Nullable ITEM_TYPE item) throws IOException { if (item == null) { out.writeBoolean(true); return; @@ -65,22 +63,20 @@ static void writeObjectElement(@NotNull final ObjectOutput out, /** *

    - * Alternative to {@link java.io.Externalizable#readExternal(ObjectInput)} for use when a series - * of tuples of the same type with the same element types are being read in iterative fashion. + * Alternative to {@link java.io.Externalizable#readExternal(ObjectInput)} for use when a series of tuples of the + * same type with the same element types are being read in iterative fashion. *

    - * Primitive elements should be read with their primitive read methods (e.g. - * {@link ObjectInput#readInt()}). + * Primitive elements should be read with their primitive read methods (e.g. {@link ObjectInput#readInt()}). *

    - * Object elements are preceded by a boolean, true if null, false otherwise. The first non-null - * value for a given Object element is then preceded by the name of the class. All non-null - * values are then read with a reader method from - * {@link SerializationUtils#getReader(Class, ObjectInput)}, cached in cachedReaders. + * Object elements are preceded by a boolean, true if null, false otherwise. The first non-null value for a given + * Object element is then preceded by the name of the class. All non-null values are then read with a reader method + * from {@link SerializationUtils#getReader(Class, ObjectInput)}, cached in cachedReaders. * * @param in The input * @param cachedReaders The cached readers */ - void readExternalStreaming(@NotNull ObjectInput in, - @NotNull TIntObjectMap cachedReaders) throws Exception; + void readExternalStreaming(@NotNull ObjectInput in, @NotNull TIntObjectMap cachedReaders) + throws Exception; /** * Convenience method to allow chaining of construction and calls to @@ -90,9 +86,8 @@ void readExternalStreaming(@NotNull ObjectInput in, * @param cachedReaders The cached readers * @return this */ - default TYPE initializeExternalStreaming( - @NotNull final ObjectInput in, - @NotNull final TIntObjectMap cachedReaders) throws Exception { + default TYPE initializeExternalStreaming(@NotNull final ObjectInput in, + @NotNull final TIntObjectMap cachedReaders) throws Exception { readExternalStreaming(in, cachedReaders); // noinspection unchecked return (TYPE) this; @@ -107,8 +102,8 @@ default TYPE initializeExternalStreaming( * @param itemIndex The index into the cached readers for this item */ static ITEM_TYPE readObjectElement(@NotNull final ObjectInput in, - @NotNull TIntObjectMap cachedReaders, - final int itemIndex) throws Exception { + @NotNull TIntObjectMap cachedReaders, + final int itemIndex) throws Exception { if (in.readBoolean()) { return null; } diff --git a/DB/src/main/java/io/deephaven/db/util/string/StringUtils.java b/DB/src/main/java/io/deephaven/db/util/string/StringUtils.java index 84c1fe0847e..20014f3ed8d 100644 --- a/DB/src/main/java/io/deephaven/db/util/string/StringUtils.java +++ b/DB/src/main/java/io/deephaven/db/util/string/StringUtils.java @@ -20,44 +20,40 @@ public class StringUtils implements Serializable { - private static final int STRING_CACHE_SIZE = - Configuration.getInstance().getInteger("StringUtils.cacheSize"); + private static final int STRING_CACHE_SIZE = Configuration.getInstance().getInteger("StringUtils.cacheSize"); // ------------------------------------------------------------------------------------------------------------------ - // A thread-safe (but not very concurrent) StringCache for use in Deephaven code that desires - // actual caching + // A thread-safe (but not very concurrent) StringCache for use in Deephaven code that desires actual caching // ------------------------------------------------------------------------------------------------------------------ public static final StringCache STRING_CACHE = - C14nUtil.ENABLED - ? new OpenAddressedWeakUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, - C14nUtil.CACHE) - : STRING_CACHE_SIZE == 0 ? AlwaysCreateStringCache.STRING_INSTANCE - : new ConcurrentBoundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, - STRING_CACHE_SIZE, 2); + C14nUtil.ENABLED + ? new OpenAddressedWeakUnboundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, + C14nUtil.CACHE) + : STRING_CACHE_SIZE == 0 ? AlwaysCreateStringCache.STRING_INSTANCE + : new ConcurrentBoundedStringCache<>(StringCacheTypeAdapterStringImpl.INSTANCE, + STRING_CACHE_SIZE, 2); // ------------------------------------------------------------------------------------------------------------------ - // Optional use of CompressedStrings for Symbol or String data columns (excluding partitioning - // columns) + // Optional use of CompressedStrings for Symbol or String data columns (excluding partitioning columns) // ------------------------------------------------------------------------------------------------------------------ - private static final boolean USE_COMPRESSED_STRINGS = Configuration.getInstance() - .getBooleanWithDefault("StringUtils.useCompressedStrings", false); + private static final boolean USE_COMPRESSED_STRINGS = + Configuration.getInstance().getBooleanWithDefault("StringUtils.useCompressedStrings", false); public static final StringCache COMPRESSED_STRING_CACHE = - USE_COMPRESSED_STRINGS - ? C14nUtil.ENABLED - ? new OpenAddressedWeakUnboundedStringCache<>( - StringCacheTypeAdapterCompressedStringImpl.INSTANCE, C14nUtil.CACHE) - : STRING_CACHE_SIZE == 0 ? AlwaysCreateStringCache.COMPRESSED_STRING_INSTANCE - : new ConcurrentBoundedStringCache<>( - StringCacheTypeAdapterCompressedStringImpl.INSTANCE, STRING_CACHE_SIZE, 2) - : null; + USE_COMPRESSED_STRINGS + ? C14nUtil.ENABLED + ? new OpenAddressedWeakUnboundedStringCache<>( + StringCacheTypeAdapterCompressedStringImpl.INSTANCE, C14nUtil.CACHE) + : STRING_CACHE_SIZE == 0 ? AlwaysCreateStringCache.COMPRESSED_STRING_INSTANCE + : new ConcurrentBoundedStringCache<>( + StringCacheTypeAdapterCompressedStringImpl.INSTANCE, STRING_CACHE_SIZE, 2) + : null; /** - * Re-write all non-partitioning String columns in the definition to use CompressedString data - * type. Note this should only be called when it's known that the source will only provide - * String data with single-byte encodings. + * Re-write all non-partitioning String columns in the definition to use CompressedString data type. Note this + * should only be called when it's known that the source will only provide String data with single-byte encodings. * * @param tableDefinition table definition * @return the new table definition @@ -67,11 +63,11 @@ public static TableDefinition rewriteStringColumnTypes(final TableDefinition tab return tableDefinition; } final ColumnDefinition[] resultColumns = - Arrays.copyOf(tableDefinition.getColumns(), tableDefinition.getColumns().length); + Arrays.copyOf(tableDefinition.getColumns(), tableDefinition.getColumns().length); for (int ci = 0; ci < resultColumns.length; ++ci) { final ColumnDefinition column = resultColumns[ci]; if (column.getDataType() == String.class - && column.getColumnType() != ColumnDefinition.COLUMNTYPE_PARTITIONING) { + && column.getColumnType() != ColumnDefinition.COLUMNTYPE_PARTITIONING) { resultColumns[ci] = column.withDataType(CompressedString.class); } } @@ -84,8 +80,8 @@ public static TableDefinition rewriteStringColumnTypes(final TableDefinition tab public static Collection splitToCollection(String string) { return string.trim().isEmpty() ? Collections.emptyList() - : Arrays.stream(string.split(",")).map(String::trim).filter(s -> !s.isEmpty()) - .collect(Collectors.toList()); + : Arrays.stream(string.split(",")).map(String::trim).filter(s -> !s.isEmpty()) + .collect(Collectors.toList()); } /** @@ -101,15 +97,14 @@ public static boolean isNullOrEmpty(String s) { @NotNull @SuppressWarnings("unchecked") public static StringCache getStringCache( - Class dataType) { + Class dataType) { if (String.class == dataType) { return (StringCache) StringUtils.STRING_CACHE; } else if (CompressedString.class == dataType) { return (StringCache) StringUtils.COMPRESSED_STRING_CACHE; } else { // Writing code has been updated to support arbitrary CharSequences without reverting to - // Externalizable/Serializable implementations. Reading code doesn't know what to do - // with other + // Externalizable/Serializable implementations. Reading code doesn't know what to do with other // CharSequence types, especially given that most are mutable. throw new IllegalArgumentException("Unsupported CharSequence type " + dataType); } @@ -151,8 +146,7 @@ public static KeyedObjectKey.Basic stringKey() * * @param */ - private static class NullSafeStringKey - extends KeyedObjectKey.NullSafeBasic { + private static class NullSafeStringKey extends KeyedObjectKey.NullSafeBasic { private static KeyedObjectKey.NullSafeBasic INSTANCE = new NullSafeStringKey<>(); @@ -190,10 +184,10 @@ default String getStringRepresentation() { * @param */ private static class StringKeyedObjectKey - extends KeyedObjectKey.Basic { + extends KeyedObjectKey.Basic { private static KeyedObjectKey.Basic INSTANCE = - new StringKeyedObjectKey<>(); + new StringKeyedObjectKey<>(); @Override public String getKey(@NotNull final VALUE_TYPE value) { @@ -219,10 +213,10 @@ public static KeyedObjectKey.Basic */ private static class NullSafeStringKeyedObjectKey - extends KeyedObjectKey.NullSafeBasic { + extends KeyedObjectKey.NullSafeBasic { private static KeyedObjectKey.NullSafeBasic INSTANCE = - new NullSafeStringKeyedObjectKey<>(); + new NullSafeStringKeyedObjectKey<>(); @Override public String getKey(@NotNull final VALUE_TYPE value) { @@ -248,13 +242,12 @@ public static KeyedObjectKey.NullSafeBasi * @param */ private static class CharSequenceKey - implements KeyedObjectKey { + implements KeyedObjectKey { /** * Singleton CharSequenceKey instance. */ - private static KeyedObjectKey INSTANCE = - new CharSequenceKey<>(); + private static KeyedObjectKey INSTANCE = new CharSequenceKey<>(); @Override @@ -293,13 +286,13 @@ public static KeyedObjectKey */ private static class NullSafeCharSequenceKey - implements KeyedObjectKey { + implements KeyedObjectKey { /** * Singleton CharSequenceKey instance. */ private static KeyedObjectKey INSTANCE = - new NullSafeCharSequenceKey<>(); + new NullSafeCharSequenceKey<>(); @Override @@ -334,8 +327,7 @@ public static KeyedObjectKey s.startsWith(startsWithValue)) - .findFirst() - .ifPresent(s -> { - for (int i = 0; i < toReplaceStrings.length; i++) { - if (toReplaceStrings[i].startsWith(startsWithValue)) { - toReplaceStrings[i] = s; - return; + .filter(s -> s.startsWith(startsWithValue)) + .findFirst() + .ifPresent(s -> { + for (int i = 0; i < toReplaceStrings.length; i++) { + if (toReplaceStrings[i].startsWith(startsWithValue)) { + toReplaceStrings[i] = s; + return; + } } - } - }); + }); } } diff --git a/DB/src/main/java/io/deephaven/db/util/tuples/ArrayTuple.java b/DB/src/main/java/io/deephaven/db/util/tuples/ArrayTuple.java index e902c52222a..1ebffed1943 100644 --- a/DB/src/main/java/io/deephaven/db/util/tuples/ArrayTuple.java +++ b/DB/src/main/java/io/deephaven/db/util/tuples/ArrayTuple.java @@ -15,8 +15,8 @@ *

    * N-Tuple key class backed by an array of elements. */ -public class ArrayTuple implements Comparable, Externalizable, StreamingExternalizable, - CanonicalizableTuple { +public class ArrayTuple + implements Comparable, Externalizable, StreamingExternalizable, CanonicalizableTuple { private static final long serialVersionUID = 1L; @@ -25,8 +25,8 @@ public class ArrayTuple implements Comparable, Externalizable, Strea private transient int cachedHashCode; /** - * Construct a tuple backed by the supplied array of elements. The elements array should not be - * changed after this call. + * Construct a tuple backed by the supplied array of elements. The elements array should not be changed after this + * call. * * @param elements The array to wrap */ @@ -85,14 +85,13 @@ public final int compareTo(@NotNull final ArrayTuple other) { } final int thisLength = elements.length; if (thisLength != other.elements.length) { - throw new IllegalArgumentException( - "Mismatched lengths in " + ArrayTuple.class.getSimpleName() + + throw new IllegalArgumentException("Mismatched lengths in " + ArrayTuple.class.getSimpleName() + " comparison (this.elements=" + Arrays.toString(elements) + ", other.elements=" + Arrays.toString(other.elements) + ')'); } for (int ei = 0; ei < thisLength; ++ei) { - final int comparison = DBLanguageFunctionUtil.compareTo((Comparable) elements[ei], - (Comparable) other.elements[ei]); + final int comparison = + DBLanguageFunctionUtil.compareTo((Comparable) elements[ei], (Comparable) other.elements[ei]); if (comparison != 0) { return comparison; } @@ -109,8 +108,7 @@ public void writeExternal(@NotNull final ObjectOutput out) throws IOException { } @Override - public void readExternal(@NotNull final ObjectInput in) - throws IOException, ClassNotFoundException { + public void readExternal(@NotNull final ObjectInput in) throws IOException, ClassNotFoundException { final int inLength = in.readInt(); final Object inElements[] = new Object[inLength]; for (int ei = 0; ei < inLength; ++ei) { @@ -121,7 +119,7 @@ public void readExternal(@NotNull final ObjectInput in) @Override public void writeExternalStreaming(@NotNull final ObjectOutput out, - @NotNull final TIntObjectMap cachedWriters) throws IOException { + @NotNull final TIntObjectMap cachedWriters) throws IOException { final int length = elements.length; out.writeInt(length); for (int ei = 0; ei < length; ++ei) { @@ -131,7 +129,7 @@ public void writeExternalStreaming(@NotNull final ObjectOutput out, @Override public void readExternalStreaming(@NotNull final ObjectInput in, - @NotNull final TIntObjectMap cachedReaders) throws Exception { + @NotNull final TIntObjectMap cachedReaders) throws Exception { final int inLength = in.readInt(); final Object inElements[] = new Object[inLength]; for (int ei = 0; ei < inLength; ++ei) { diff --git a/DB/src/main/java/io/deephaven/db/util/tuples/CanonicalizableTuple.java b/DB/src/main/java/io/deephaven/db/util/tuples/CanonicalizableTuple.java index 56e3c00c068..dfa4b37f86b 100644 --- a/DB/src/main/java/io/deephaven/db/util/tuples/CanonicalizableTuple.java +++ b/DB/src/main/java/io/deephaven/db/util/tuples/CanonicalizableTuple.java @@ -5,8 +5,8 @@ import java.util.function.UnaryOperator; /** - * Interface for immutable tuple classes that can produce a new instance of themselves with - * canonicalized object elements. + * Interface for immutable tuple classes that can produce a new instance of themselves with canonicalized object + * elements. */ public interface CanonicalizableTuple { diff --git a/DB/src/main/java/io/deephaven/db/util/tuples/EmptyTuple.java b/DB/src/main/java/io/deephaven/db/util/tuples/EmptyTuple.java index 41eb7312ba0..db369de47b4 100644 --- a/DB/src/main/java/io/deephaven/db/util/tuples/EmptyTuple.java +++ b/DB/src/main/java/io/deephaven/db/util/tuples/EmptyTuple.java @@ -9,8 +9,7 @@ *

    * 0-Tuple key class. */ -public enum EmptyTuple - implements Comparable, Serializable, CanonicalizableTuple { +public enum EmptyTuple implements Comparable, Serializable, CanonicalizableTuple { INSTANCE; diff --git a/DB/src/main/java/io/deephaven/db/util/tuples/TupleCodeGenerator.java b/DB/src/main/java/io/deephaven/db/util/tuples/TupleCodeGenerator.java index f378fba22e5..3dbeb32c554 100644 --- a/DB/src/main/java/io/deephaven/db/util/tuples/TupleCodeGenerator.java +++ b/DB/src/main/java/io/deephaven/db/util/tuples/TupleCodeGenerator.java @@ -22,16 +22,14 @@ public class TupleCodeGenerator { /* - * TODO: Support getters for tuple elements? No use case currently, and might encourage - * mutations. TODO: Generify Object elements? No use case without the above, I don't think. - * TODO: Refactor to make it possible to generate arbitrary n-tuples, and eliminate duplication - * between generateDouble and generateTriple. + * TODO: Support getters for tuple elements? No use case currently, and might encourage mutations. TODO: Generify + * Object elements? No use case without the above, I don't think. TODO: Refactor to make it possible to generate + * arbitrary n-tuples, and eliminate duplication between generateDouble and generateTriple. */ - private static final String OUTPUT_PACKAGE = - TupleCodeGenerator.class.getPackage().getName() + ".generated"; + private static final String OUTPUT_PACKAGE = TupleCodeGenerator.class.getPackage().getName() + ".generated"; private static final File OUTPUT_RELATIVE_PATH = - new File(new File("DB", MAIN_SRC), OUTPUT_PACKAGE.replace('.', File.separatorChar)); + new File(new File("DB", MAIN_SRC), OUTPUT_PACKAGE.replace('.', File.separatorChar)); private static final String LHS = "$lhs$"; private static final String RHS = "$rhs$"; @@ -42,16 +40,16 @@ public class TupleCodeGenerator { private static final String NEW_LINE = System.getProperty("line.separator"); private static final String DEFAULT_IMPORTS[] = Stream.of( - CanonicalizableTuple.class, - Externalizable.class, - IOException.class, - NotNull.class, - ObjectInput.class, - ObjectOutput.class, - SerializationUtils.class, - StreamingExternalizable.class, - TIntObjectMap.class, - UnaryOperator.class).map(Class::getName).toArray(String[]::new); + CanonicalizableTuple.class, + Externalizable.class, + IOException.class, + NotNull.class, + ObjectInput.class, + ObjectOutput.class, + SerializationUtils.class, + StreamingExternalizable.class, + TIntObjectMap.class, + UnaryOperator.class).map(Class::getName).toArray(String[]::new); private static final String CLASS_NAME_SUFFIX = "Tuple"; private static final String ELEMENT1 = "element1"; @@ -88,13 +86,13 @@ private enum ElementType { private final String imports[]; ElementType(@NotNull final String nameText, - @NotNull final Class implementation, - @NotNull final String equalsText, - @NotNull final String hashCodeText, - @NotNull final String compareToText, - @NotNull final String writeExternalText, - @NotNull final String readExternalText, - @NotNull final Class... importClasses) { + @NotNull final Class implementation, + @NotNull final String equalsText, + @NotNull final String hashCodeText, + @NotNull final String compareToText, + @NotNull final String writeExternalText, + @NotNull final String readExternalText, + @NotNull final Class... importClasses) { this.nameText = nameText; this.implementation = implementation; this.equalsText = equalsText; @@ -125,20 +123,17 @@ private String getHashCodeText(@NotNull final String elementName) { return hashCodeText.replace(VAL, elementName); } - private String getCompareToText(@NotNull final String lhsName, - @NotNull final String rhsName) { + private String getCompareToText(@NotNull final String lhsName, @NotNull final String rhsName) { return compareToText.replace(LHS, lhsName).replace(RHS, rhsName); } - private String getWriteExternalText( - @SuppressWarnings("SameParameterValue") @NotNull final String outName, - @NotNull final String elementName) { + private String getWriteExternalText(@SuppressWarnings("SameParameterValue") @NotNull final String outName, + @NotNull final String elementName) { return writeExternalText.replace(OUT, outName).replace(VAL, elementName); } - private String getReadExternalText( - @SuppressWarnings("SameParameterValue") @NotNull final String inName, - @NotNull final String elementName) { + private String getReadExternalText(@SuppressWarnings("SameParameterValue") @NotNull final String inName, + @NotNull final String elementName) { return readExternalText.replace(IN, inName).replace(VAL, elementName); } @@ -148,9 +143,9 @@ private String[] getImports() { } private static final Map PRIMITIVE_CLASS_TO_ELEMENT_TYPE = - Collections.unmodifiableMap(Arrays.stream(ElementType.values()) - .filter(et -> et != ElementType.OBJECT) - .collect(Collectors.toMap(ElementType::getImplementation, Function.identity()))); + Collections.unmodifiableMap(Arrays.stream(ElementType.values()) + .filter(et -> et != ElementType.OBJECT) + .collect(Collectors.toMap(ElementType::getImplementation, Function.identity()))); /** * Get the tuple class name for the supplied array of element classes. @@ -161,13 +156,12 @@ private String[] getImports() { public static String getTupleClassName(@NotNull final Class... classes) { if (classes.length < 2) { throw new IllegalArgumentException( - "There are no tuple class names available for " + Arrays.toString(classes)); + "There are no tuple class names available for " + Arrays.toString(classes)); } if (classes.length < 4) { return Arrays.stream(classes) - .map(c -> PRIMITIVE_CLASS_TO_ELEMENT_TYPE.getOrDefault(c, ElementType.OBJECT) - .getNameText()) - .collect(Collectors.joining()) + CLASS_NAME_SUFFIX; + .map(c -> PRIMITIVE_CLASS_TO_ELEMENT_TYPE.getOrDefault(c, ElementType.OBJECT).getNameText()) + .collect(Collectors.joining()) + CLASS_NAME_SUFFIX; } return ArrayTuple.class.getSimpleName(); } @@ -183,12 +177,11 @@ public static String getTupleImport(@NotNull final String className) { } private String generateClassName(@NotNull final ElementType... types) { - return Arrays.stream(types).map(ElementType::getNameText).collect(Collectors.joining()) - + CLASS_NAME_SUFFIX; + return Arrays.stream(types).map(ElementType::getNameText).collect(Collectors.joining()) + CLASS_NAME_SUFFIX; } private String generateDouble(@NotNull final String className, @NotNull final ElementType type1, - @NotNull final ElementType type2) { + @NotNull final ElementType type2) { final Indenter indenter = new Indenter(); final StringBuilder code = new StringBuilder(1024); final String class1Name = type1.getImplementationName(); @@ -201,49 +194,45 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(NEW_LINE); Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports()).flatMap(Arrays::stream) - .filter(i -> !i.startsWith("java.")).sorted().distinct().forEachOrdered( - i -> code.append("import ").append(i).append(';').append(NEW_LINE)); + .filter(i -> !i.startsWith("java.")).sorted().distinct().forEachOrdered( + i -> code.append("import ").append(i).append(';').append(NEW_LINE)); code.append(NEW_LINE); Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports()).flatMap(Arrays::stream) - .filter(i -> i.startsWith("java.")).sorted().distinct().forEachOrdered( - i -> code.append("import ").append(i).append(';').append(NEW_LINE)); + .filter(i -> i.startsWith("java.")).sorted().distinct().forEachOrdered( + i -> code.append("import ").append(i).append(';').append(NEW_LINE)); code.append(NEW_LINE); code.append("/**").append(NEW_LINE); - code.append(" *

    2-Tuple (double) key class composed of ").append(class1Name) - .append(" and ").append(class2Name).append(" elements.").append(NEW_LINE); - code.append(" *

    Generated by {@link ").append(TupleCodeGenerator.class.getName()) - .append("}.").append(NEW_LINE); + code.append(" *

    2-Tuple (double) key class composed of ").append(class1Name).append(" and ") + .append(class2Name).append(" elements.").append(NEW_LINE); + code.append(" *

    Generated by {@link ").append(TupleCodeGenerator.class.getName()).append("}.") + .append(NEW_LINE); code.append(" */").append(NEW_LINE); - code.append("public class ").append(className).append(" implements Comparable<") - .append(className) - .append(">, Externalizable, StreamingExternalizable, CanonicalizableTuple<") - .append(className).append("> {").append(NEW_LINE); + code.append("public class ").append(className).append(" implements Comparable<").append(className) + .append(">, Externalizable, StreamingExternalizable, CanonicalizableTuple<").append(className) + .append("> {").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("private static final long serialVersionUID = 1L;") - .append(NEW_LINE); + code.append(indenter).append("private static final long serialVersionUID = 1L;").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("private ").append(class1Name).append(' ').append(ELEMENT1) - .append(';').append(NEW_LINE); - code.append(indenter).append("private ").append(class2Name).append(' ').append(ELEMENT2) - .append(';').append(NEW_LINE); + code.append(indenter).append("private ").append(class1Name).append(' ').append(ELEMENT1).append(';') + .append(NEW_LINE); + code.append(indenter).append("private ").append(class2Name).append(' ').append(ELEMENT2).append(';') + .append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("private transient int ").append(CACHED_HASH_CODE).append(';') - .append(NEW_LINE); + code.append(indenter).append("private transient int ").append(CACHED_HASH_CODE).append(';').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("public ").append(className).append('(').append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("final ").append(class1Name).append(' ').append(ELEMENT1) - .append(',').append(NEW_LINE); - code.append(indenter).append("final ").append(class2Name).append(' ').append(ELEMENT2) - .append(NEW_LINE); + code.append(indenter).append("final ").append(class1Name).append(' ').append(ELEMENT1).append(',') + .append(NEW_LINE); + code.append(indenter).append("final ").append(class2Name).append(' ').append(ELEMENT2).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(") {").append(NEW_LINE); indenter.increaseLevel(); @@ -259,8 +248,8 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(NEW_LINE); code.append(indenter).append( - "/** Public no-arg constructor for {@link Externalizable} support only. Application code should not use this! **/") - .append(NEW_LINE); + "/** Public no-arg constructor for {@link Externalizable} support only. Application code should not use this! **/") + .append(NEW_LINE); code.append(indenter).append("public ").append(className).append("() {").append(NEW_LINE); code.append(indenter).append('}').append(NEW_LINE); @@ -268,29 +257,27 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(indenter).append("private void initialize(").append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("final ").append(class1Name).append(' ').append(ELEMENT1) - .append(',').append(NEW_LINE); - code.append(indenter).append("final ").append(class2Name).append(' ').append(ELEMENT2) - .append(NEW_LINE); + code.append(indenter).append("final ").append(class1Name).append(' ').append(ELEMENT1).append(',') + .append(NEW_LINE); + code.append(indenter).append("final ").append(class2Name).append(' ').append(ELEMENT2).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(") {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("this.").append(ELEMENT1).append(" = ").append(ELEMENT1) - .append(';').append(NEW_LINE); - code.append(indenter).append("this.").append(ELEMENT2).append(" = ").append(ELEMENT2) - .append(';').append(NEW_LINE); + code.append(indenter).append("this.").append(ELEMENT1).append(" = ").append(ELEMENT1).append(';') + .append(NEW_LINE); + code.append(indenter).append("this.").append(ELEMENT2).append(" = ").append(ELEMENT2).append(';') + .append(NEW_LINE); code.append(indenter).append(CACHED_HASH_CODE).append(" = (31 +").append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append(type1.getHashCodeText(ELEMENT1)).append(") * 31 +") - .append(NEW_LINE); + code.append(indenter).append(type1.getHashCodeText(ELEMENT1)).append(") * 31 +").append(NEW_LINE); code.append(indenter).append(type2.getHashCodeText(ELEMENT2)).append(';').append(NEW_LINE); indenter.decreaseLevel(3); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("public final ").append(class1Name) - .append(" getFirstElement() {").append(NEW_LINE); + code.append(indenter).append("public final ").append(class1Name).append(" getFirstElement() {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return ").append(ELEMENT1).append(';').append(NEW_LINE); indenter.decreaseLevel(); @@ -298,8 +285,8 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(NEW_LINE); - code.append(indenter).append("public final ").append(class2Name) - .append(" getSecondElement() {").append(NEW_LINE); + code.append(indenter).append("public final ").append(class2Name).append(" getSecondElement() {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return ").append(ELEMENT2).append(';').append(NEW_LINE); indenter.decreaseLevel(); @@ -310,38 +297,34 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public final int hashCode() {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return ").append(CACHED_HASH_CODE).append(';') - .append(NEW_LINE); + code.append(indenter).append("return ").append(CACHED_HASH_CODE).append(';').append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final boolean equals(final Object ").append(OTHER) - .append(") {").append(NEW_LINE); + code.append(indenter).append("public final boolean equals(final Object ").append(OTHER).append(") {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (this == ").append(OTHER).append(") {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return true;").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); - code.append(indenter).append("if (").append(OTHER) - .append(" == null || getClass() != other.getClass()) {").append(NEW_LINE); + code.append(indenter).append("if (").append(OTHER).append(" == null || getClass() != other.getClass()) {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return false;").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); - code.append(indenter).append("final ").append(className).append(' ').append(TYPED_OTHER) - .append(" = (").append(className).append(") ").append(OTHER).append(';') - .append(NEW_LINE); + code.append(indenter).append("final ").append(className).append(' ').append(TYPED_OTHER).append(" = (") + .append(className).append(") ").append(OTHER).append(';').append(NEW_LINE); code.append(indenter).append("// @formatter:off").append(NEW_LINE); - code.append(indenter).append("return ") - .append(type1.getEqualsText(ELEMENT1, TYPED_OTHER + '.' + ELEMENT1)).append(" &&") - .append(NEW_LINE); - code.append(indenter).append(" ") - .append(type2.getEqualsText(ELEMENT2, TYPED_OTHER + '.' + ELEMENT2)).append(';') - .append(NEW_LINE); + code.append(indenter).append("return ").append(type1.getEqualsText(ELEMENT1, TYPED_OTHER + '.' + ELEMENT1)) + .append(" &&").append(NEW_LINE); + code.append(indenter).append(" ").append(type2.getEqualsText(ELEMENT2, TYPED_OTHER + '.' + ELEMENT2)) + .append(';').append(NEW_LINE); code.append(indenter).append("// @formatter:on").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); @@ -349,8 +332,8 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final int compareTo(@NotNull final ").append(className) - .append(' ').append(OTHER).append(") {").append(NEW_LINE); + code.append(indenter).append("public final int compareTo(@NotNull final ").append(className).append(' ') + .append(OTHER).append(") {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (this == ").append(OTHER).append(") {").append(NEW_LINE); indenter.increaseLevel(); @@ -360,11 +343,10 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(indenter).append("int comparison;").append(NEW_LINE); code.append(indenter).append("// @formatter:off").append(NEW_LINE); code.append(indenter).append("return 0 != (comparison = ") - .append(type1.getCompareToText(ELEMENT1, OTHER + '.' + ELEMENT1)) - .append(") ? comparison :").append(NEW_LINE); - code.append(indenter).append(" ") - .append(type2.getCompareToText(ELEMENT2, OTHER + '.' + ELEMENT2)).append(";") - .append(NEW_LINE); + .append(type1.getCompareToText(ELEMENT1, OTHER + '.' + ELEMENT1)).append(") ? comparison :") + .append(NEW_LINE); + code.append(indenter).append(" ").append(type2.getCompareToText(ELEMENT2, OTHER + '.' + ELEMENT2)) + .append(";").append(NEW_LINE); code.append(indenter).append("// @formatter:on").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); @@ -372,15 +354,11 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter) - .append( - "public void writeExternal(@NotNull final ObjectOutput out) throws IOException {") - .append(NEW_LINE); - indenter.increaseLevel(); - code.append(indenter).append(type1.getWriteExternalText("out", ELEMENT1)).append(';') - .append(NEW_LINE); - code.append(indenter).append(type2.getWriteExternalText("out", ELEMENT2)).append(';') - .append(NEW_LINE); + code.append(indenter).append("public void writeExternal(@NotNull final ObjectOutput out) throws IOException {") + .append(NEW_LINE); + indenter.increaseLevel(); + code.append(indenter).append(type1.getWriteExternalText("out", ELEMENT1)).append(';').append(NEW_LINE); + code.append(indenter).append(type2.getWriteExternalText("out", ELEMENT2)).append(';').append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); @@ -388,12 +366,12 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append( - "public void readExternal(@NotNull final ObjectInput in) throws IOException, ClassNotFoundException {") - .append(NEW_LINE); + "public void readExternal(@NotNull final ObjectInput in) throws IOException, ClassNotFoundException {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("initialize(").append(NEW_LINE); - code.append(indenter.increaseLevel(2)).append(type1.getReadExternalText("in", ELEMENT1)) - .append(',').append(NEW_LINE); + code.append(indenter.increaseLevel(2)).append(type1.getReadExternalText("in", ELEMENT1)).append(',') + .append(NEW_LINE); code.append(indenter).append(type2.getReadExternalText("in", ELEMENT2)).append(NEW_LINE); code.append(indenter.decreaseLevel(2)).append(");").append(NEW_LINE); indenter.decreaseLevel(); @@ -403,24 +381,20 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append( - "public void writeExternalStreaming(@NotNull final ObjectOutput out, @NotNull final TIntObjectMap cachedWriters) throws IOException {") - .append(NEW_LINE); + "public void writeExternalStreaming(@NotNull final ObjectOutput out, @NotNull final TIntObjectMap cachedWriters) throws IOException {") + .append(NEW_LINE); indenter.increaseLevel(); if (type1 != ElementType.OBJECT) { - code.append(indenter).append(type1.getWriteExternalText("out", ELEMENT1)).append(';') - .append(NEW_LINE); + code.append(indenter).append(type1.getWriteExternalText("out", ELEMENT1)).append(';').append(NEW_LINE); } else { - code.append(indenter) - .append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 0, ") - .append(ELEMENT1).append(");").append(NEW_LINE); + code.append(indenter).append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 0, ") + .append(ELEMENT1).append(");").append(NEW_LINE); } if (type2 != ElementType.OBJECT) { - code.append(indenter).append(type2.getWriteExternalText("out", ELEMENT2)).append(';') - .append(NEW_LINE); + code.append(indenter).append(type2.getWriteExternalText("out", ELEMENT2)).append(';').append(NEW_LINE); } else { - code.append(indenter) - .append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 1, ") - .append(ELEMENT2).append(");").append(NEW_LINE); + code.append(indenter).append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 1, ") + .append(ELEMENT2).append(");").append(NEW_LINE); } indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); @@ -429,25 +403,23 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append( - "public void readExternalStreaming(@NotNull final ObjectInput in, @NotNull final TIntObjectMap cachedReaders) throws Exception {") - .append(NEW_LINE); + "public void readExternalStreaming(@NotNull final ObjectInput in, @NotNull final TIntObjectMap cachedReaders) throws Exception {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("initialize(").append(NEW_LINE); if (type1 != ElementType.OBJECT) { - code.append(indenter.increaseLevel(2)).append(type1.getReadExternalText("in", ELEMENT1)) - .append(',').append(NEW_LINE); + code.append(indenter.increaseLevel(2)).append(type1.getReadExternalText("in", ELEMENT1)).append(',') + .append(NEW_LINE); } else { code.append(indenter.increaseLevel(2)) - .append("StreamingExternalizable.readObjectElement(in, cachedReaders, 0)") - .append(',').append(NEW_LINE); + .append("StreamingExternalizable.readObjectElement(in, cachedReaders, 0)").append(',') + .append(NEW_LINE); } if (type2 != ElementType.OBJECT) { - code.append(indenter).append(type2.getReadExternalText("in", ELEMENT2)) - .append(NEW_LINE); + code.append(indenter).append(type2.getReadExternalText("in", ELEMENT2)).append(NEW_LINE); } else { - code.append(indenter) - .append("StreamingExternalizable.readObjectElement(in, cachedReaders, 1)") - .append(NEW_LINE); + code.append(indenter).append("StreamingExternalizable.readObjectElement(in, cachedReaders, 1)") + .append(NEW_LINE); } code.append(indenter.decreaseLevel(2)).append(");").append(NEW_LINE); indenter.decreaseLevel(); @@ -458,8 +430,7 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public String toString() {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return \"").append(className).append("{\" +") - .append(NEW_LINE); + code.append(indenter).append("return \"").append(className).append("{\" +").append(NEW_LINE); indenter.increaseLevel(2); code.append(indenter).append(ELEMENT1).append(" + \", \" +").append(NEW_LINE); code.append(indenter).append(ELEMENT2).append(" + '}';").append(NEW_LINE); @@ -470,43 +441,36 @@ private String generateDouble(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public ").append(className) - .append(" canonicalize(@NotNull final UnaryOperator canonicalizer) {") - .append(NEW_LINE); + .append(" canonicalize(@NotNull final UnaryOperator canonicalizer) {").append(NEW_LINE); indenter.increaseLevel(); if (firstIsObject) { - code.append(indenter).append("final ").append(class1Name).append(' ') - .append(CANONICALIZED_ELEMENT1).append(" = canonicalizer.apply(").append(ELEMENT1) - .append(");").append(NEW_LINE); + code.append(indenter).append("final ").append(class1Name).append(' ').append(CANONICALIZED_ELEMENT1) + .append(" = canonicalizer.apply(").append(ELEMENT1).append(");").append(NEW_LINE); } if (secondIsObject) { - code.append(indenter).append("final ").append(class2Name).append(' ') - .append(CANONICALIZED_ELEMENT2).append(" = canonicalizer.apply(").append(ELEMENT2) - .append(");").append(NEW_LINE); + code.append(indenter).append("final ").append(class2Name).append(' ').append(CANONICALIZED_ELEMENT2) + .append(" = canonicalizer.apply(").append(ELEMENT2).append(");").append(NEW_LINE); } if (firstIsObject && secondIsObject) { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1).append(" == ") - .append(ELEMENT1).append(" && ").append(CANONICALIZED_ELEMENT2).append(" == ") - .append(ELEMENT2).append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1).append(" == ").append(ELEMENT1) + .append(" && ").append(CANONICALIZED_ELEMENT2).append(" == ").append(ELEMENT2).append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("? this : new ").append(className).append('(') - .append(CANONICALIZED_ELEMENT1).append(", ").append(CANONICALIZED_ELEMENT2) - .append(");").append(NEW_LINE); + code.append(indenter).append("? this : new ").append(className).append('(').append(CANONICALIZED_ELEMENT1) + .append(", ").append(CANONICALIZED_ELEMENT2).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } else if (firstIsObject) { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1).append(" == ") - .append(ELEMENT1).append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1).append(" == ").append(ELEMENT1) + .append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("? this : new ").append(className).append('(') - .append(CANONICALIZED_ELEMENT1).append(", ").append(ELEMENT2).append(");") - .append(NEW_LINE); + code.append(indenter).append("? this : new ").append(className).append('(').append(CANONICALIZED_ELEMENT1) + .append(", ").append(ELEMENT2).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } else if (secondIsObject) { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT2).append(" == ") - .append(ELEMENT2).append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT2).append(" == ").append(ELEMENT2) + .append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("? this : new ").append(className).append('(') - .append(ELEMENT1).append(", ").append(CANONICALIZED_ELEMENT2).append(");") - .append(NEW_LINE); + code.append(indenter).append("? this : new ").append(className).append('(').append(ELEMENT1).append(", ") + .append(CANONICALIZED_ELEMENT2).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } else { code.append(indenter).append("return this;").append(NEW_LINE); @@ -520,7 +484,7 @@ private String generateDouble(@NotNull final String className, @NotNull final El } private String generateTriple(@NotNull final String className, @NotNull final ElementType type1, - @NotNull final ElementType type2, @NotNull final ElementType type3) { + @NotNull final ElementType type2, @NotNull final ElementType type3) { final Indenter indenter = new Indenter(); final StringBuilder code = new StringBuilder(1024); final String class1Name = type1.getImplementationName(); @@ -534,57 +498,50 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(NEW_LINE); - Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), type3.getImports()) - .flatMap(Arrays::stream).filter(i -> !i.startsWith("java.")).sorted().distinct() - .forEachOrdered( - i -> code.append("import ").append(i).append(';').append(NEW_LINE)); + Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), type3.getImports()).flatMap(Arrays::stream) + .filter(i -> !i.startsWith("java.")).sorted().distinct().forEachOrdered( + i -> code.append("import ").append(i).append(';').append(NEW_LINE)); code.append(NEW_LINE); - Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), type3.getImports()) - .flatMap(Arrays::stream).filter(i -> i.startsWith("java.")).sorted().distinct() - .forEachOrdered( - i -> code.append("import ").append(i).append(';').append(NEW_LINE)); + Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), type3.getImports()).flatMap(Arrays::stream) + .filter(i -> i.startsWith("java.")).sorted().distinct().forEachOrdered( + i -> code.append("import ").append(i).append(';').append(NEW_LINE)); code.append(NEW_LINE); code.append("/**").append(NEW_LINE); - code.append(" *

    3-Tuple (triple) key class composed of ").append(class1Name).append(", ") - .append(class2Name).append(", and ").append(class3Name).append(" elements.") - .append(NEW_LINE); - code.append(" *

    Generated by {@link ").append(TupleCodeGenerator.class.getName()) - .append("}.").append(NEW_LINE); + code.append(" *

    3-Tuple (triple) key class composed of ").append(class1Name).append(", ").append(class2Name) + .append(", and ").append(class3Name).append(" elements.").append(NEW_LINE); + code.append(" *

    Generated by {@link ").append(TupleCodeGenerator.class.getName()).append("}.") + .append(NEW_LINE); code.append(" */").append(NEW_LINE); - code.append("public class ").append(className).append(" implements Comparable<") - .append(className) - .append(">, Externalizable, StreamingExternalizable, CanonicalizableTuple<") - .append(className).append("> {").append(NEW_LINE); + code.append("public class ").append(className).append(" implements Comparable<").append(className) + .append(">, Externalizable, StreamingExternalizable, CanonicalizableTuple<").append(className) + .append("> {").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("private static final long serialVersionUID = 1L;") - .append(NEW_LINE); + code.append(indenter).append("private static final long serialVersionUID = 1L;").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("private ").append(class1Name).append(' ').append(ELEMENT1) - .append(';').append(NEW_LINE); - code.append(indenter).append("private ").append(class2Name).append(' ').append(ELEMENT2) - .append(';').append(NEW_LINE); - code.append(indenter).append("private ").append(class3Name).append(' ').append(ELEMENT3) - .append(';').append(NEW_LINE); + code.append(indenter).append("private ").append(class1Name).append(' ').append(ELEMENT1).append(';') + .append(NEW_LINE); + code.append(indenter).append("private ").append(class2Name).append(' ').append(ELEMENT2).append(';') + .append(NEW_LINE); + code.append(indenter).append("private ").append(class3Name).append(' ').append(ELEMENT3).append(';') + .append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("private transient int ").append(CACHED_HASH_CODE).append(';') - .append(NEW_LINE); + code.append(indenter).append("private transient int ").append(CACHED_HASH_CODE).append(';').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("public ").append(className).append('(').append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("final ").append(class1Name).append(' ').append(ELEMENT1) - .append(',').append(NEW_LINE); - code.append(indenter).append("final ").append(class2Name).append(' ').append(ELEMENT2) - .append(',').append(NEW_LINE); - code.append(indenter).append("final ").append(class3Name).append(' ').append(ELEMENT3) - .append(NEW_LINE); + code.append(indenter).append("final ").append(class1Name).append(' ').append(ELEMENT1).append(',') + .append(NEW_LINE); + code.append(indenter).append("final ").append(class2Name).append(' ').append(ELEMENT2).append(',') + .append(NEW_LINE); + code.append(indenter).append("final ").append(class3Name).append(' ').append(ELEMENT3).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(") {").append(NEW_LINE); indenter.increaseLevel(); @@ -601,8 +558,8 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(NEW_LINE); code.append(indenter).append( - "/** Public no-arg constructor for {@link Externalizable} support only. Application code should not use this! **/") - .append(NEW_LINE); + "/** Public no-arg constructor for {@link Externalizable} support only. Application code should not use this! **/") + .append(NEW_LINE); code.append(indenter).append("public ").append(className).append("() {").append(NEW_LINE); code.append(indenter).append('}').append(NEW_LINE); @@ -610,35 +567,32 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(indenter).append("private void initialize(").append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("final ").append(class1Name).append(' ').append(ELEMENT1) - .append(',').append(NEW_LINE); - code.append(indenter).append("final ").append(class2Name).append(' ').append(ELEMENT2) - .append(',').append(NEW_LINE); - code.append(indenter).append("final ").append(class3Name).append(' ').append(ELEMENT3) - .append(NEW_LINE); + code.append(indenter).append("final ").append(class1Name).append(' ').append(ELEMENT1).append(',') + .append(NEW_LINE); + code.append(indenter).append("final ").append(class2Name).append(' ').append(ELEMENT2).append(',') + .append(NEW_LINE); + code.append(indenter).append("final ").append(class3Name).append(' ').append(ELEMENT3).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(") {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("this.").append(ELEMENT1).append(" = ").append(ELEMENT1) - .append(';').append(NEW_LINE); - code.append(indenter).append("this.").append(ELEMENT2).append(" = ").append(ELEMENT2) - .append(';').append(NEW_LINE); - code.append(indenter).append("this.").append(ELEMENT3).append(" = ").append(ELEMENT3) - .append(';').append(NEW_LINE); + code.append(indenter).append("this.").append(ELEMENT1).append(" = ").append(ELEMENT1).append(';') + .append(NEW_LINE); + code.append(indenter).append("this.").append(ELEMENT2).append(" = ").append(ELEMENT2).append(';') + .append(NEW_LINE); + code.append(indenter).append("this.").append(ELEMENT3).append(" = ").append(ELEMENT3).append(';') + .append(NEW_LINE); code.append(indenter).append(CACHED_HASH_CODE).append(" = ((31 +").append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append(type1.getHashCodeText(ELEMENT1)).append(") * 31 +") - .append(NEW_LINE); - code.append(indenter).append(type2.getHashCodeText(ELEMENT2)).append(") * 31 +") - .append(NEW_LINE); + code.append(indenter).append(type1.getHashCodeText(ELEMENT1)).append(") * 31 +").append(NEW_LINE); + code.append(indenter).append(type2.getHashCodeText(ELEMENT2)).append(") * 31 +").append(NEW_LINE); code.append(indenter).append(type3.getHashCodeText(ELEMENT3)).append(';').append(NEW_LINE); indenter.decreaseLevel(3); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("public final ").append(class1Name) - .append(" getFirstElement() {").append(NEW_LINE); + code.append(indenter).append("public final ").append(class1Name).append(" getFirstElement() {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return ").append(ELEMENT1).append(';').append(NEW_LINE); indenter.decreaseLevel(); @@ -646,8 +600,8 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(NEW_LINE); - code.append(indenter).append("public final ").append(class2Name) - .append(" getSecondElement() {").append(NEW_LINE); + code.append(indenter).append("public final ").append(class2Name).append(" getSecondElement() {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return ").append(ELEMENT2).append(';').append(NEW_LINE); indenter.decreaseLevel(); @@ -655,8 +609,8 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(NEW_LINE); - code.append(indenter).append("public final ").append(class3Name) - .append(" getThirdElement() {").append(NEW_LINE); + code.append(indenter).append("public final ").append(class3Name).append(" getThirdElement() {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return ").append(ELEMENT3).append(';').append(NEW_LINE); indenter.decreaseLevel(); @@ -667,41 +621,36 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public final int hashCode() {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return ").append(CACHED_HASH_CODE).append(';') - .append(NEW_LINE); + code.append(indenter).append("return ").append(CACHED_HASH_CODE).append(';').append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final boolean equals(final Object ").append(OTHER) - .append(") {").append(NEW_LINE); + code.append(indenter).append("public final boolean equals(final Object ").append(OTHER).append(") {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (this == ").append(OTHER).append(") {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return true;").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); - code.append(indenter).append("if (").append(OTHER) - .append(" == null || getClass() != other.getClass()) {").append(NEW_LINE); + code.append(indenter).append("if (").append(OTHER).append(" == null || getClass() != other.getClass()) {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return false;").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); - code.append(indenter).append("final ").append(className).append(' ').append(TYPED_OTHER) - .append(" = (").append(className).append(") ").append(OTHER).append(';') - .append(NEW_LINE); + code.append(indenter).append("final ").append(className).append(' ').append(TYPED_OTHER).append(" = (") + .append(className).append(") ").append(OTHER).append(';').append(NEW_LINE); code.append(indenter).append("// @formatter:off").append(NEW_LINE); - code.append(indenter).append("return ") - .append(type1.getEqualsText(ELEMENT1, TYPED_OTHER + '.' + ELEMENT1)).append(" &&") - .append(NEW_LINE); - code.append(indenter).append(" ") - .append(type2.getEqualsText(ELEMENT2, TYPED_OTHER + '.' + ELEMENT2)).append(" &&") - .append(NEW_LINE); - code.append(indenter).append(" ") - .append(type3.getEqualsText(ELEMENT3, TYPED_OTHER + '.' + ELEMENT3)).append(';') - .append(NEW_LINE); + code.append(indenter).append("return ").append(type1.getEqualsText(ELEMENT1, TYPED_OTHER + '.' + ELEMENT1)) + .append(" &&").append(NEW_LINE); + code.append(indenter).append(" ").append(type2.getEqualsText(ELEMENT2, TYPED_OTHER + '.' + ELEMENT2)) + .append(" &&").append(NEW_LINE); + code.append(indenter).append(" ").append(type3.getEqualsText(ELEMENT3, TYPED_OTHER + '.' + ELEMENT3)) + .append(';').append(NEW_LINE); code.append(indenter).append("// @formatter:on").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); @@ -709,8 +658,8 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final int compareTo(@NotNull final ").append(className) - .append(' ').append(OTHER).append(") {").append(NEW_LINE); + code.append(indenter).append("public final int compareTo(@NotNull final ").append(className).append(' ') + .append(OTHER).append(") {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (this == ").append(OTHER).append(") {").append(NEW_LINE); indenter.increaseLevel(); @@ -720,14 +669,13 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(indenter).append("int comparison;").append(NEW_LINE); code.append(indenter).append("// @formatter:off").append(NEW_LINE); code.append(indenter).append("return 0 != (comparison = ") - .append(type1.getCompareToText(ELEMENT1, OTHER + '.' + ELEMENT1)) - .append(") ? comparison :").append(NEW_LINE); + .append(type1.getCompareToText(ELEMENT1, OTHER + '.' + ELEMENT1)).append(") ? comparison :") + .append(NEW_LINE); code.append(indenter).append(" 0 != (comparison = ") - .append(type2.getCompareToText(ELEMENT2, OTHER + '.' + ELEMENT2)) - .append(") ? comparison :").append(NEW_LINE); - code.append(indenter).append(" ") - .append(type3.getCompareToText(ELEMENT3, OTHER + '.' + ELEMENT3)).append(";") - .append(NEW_LINE); + .append(type2.getCompareToText(ELEMENT2, OTHER + '.' + ELEMENT2)).append(") ? comparison :") + .append(NEW_LINE); + code.append(indenter).append(" ").append(type3.getCompareToText(ELEMENT3, OTHER + '.' + ELEMENT3)) + .append(";").append(NEW_LINE); code.append(indenter).append("// @formatter:on").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); @@ -735,17 +683,12 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter) - .append( - "public void writeExternal(@NotNull final ObjectOutput out) throws IOException {") - .append(NEW_LINE); - indenter.increaseLevel(); - code.append(indenter).append(type1.getWriteExternalText("out", ELEMENT1)).append(';') - .append(NEW_LINE); - code.append(indenter).append(type2.getWriteExternalText("out", ELEMENT2)).append(';') - .append(NEW_LINE); - code.append(indenter).append(type3.getWriteExternalText("out", ELEMENT3)).append(';') - .append(NEW_LINE); + code.append(indenter).append("public void writeExternal(@NotNull final ObjectOutput out) throws IOException {") + .append(NEW_LINE); + indenter.increaseLevel(); + code.append(indenter).append(type1.getWriteExternalText("out", ELEMENT1)).append(';').append(NEW_LINE); + code.append(indenter).append(type2.getWriteExternalText("out", ELEMENT2)).append(';').append(NEW_LINE); + code.append(indenter).append(type3.getWriteExternalText("out", ELEMENT3)).append(';').append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); @@ -753,15 +696,13 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append( - "public void readExternal(@NotNull final ObjectInput in) throws IOException, ClassNotFoundException {") - .append(NEW_LINE); + "public void readExternal(@NotNull final ObjectInput in) throws IOException, ClassNotFoundException {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("initialize(").append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append(type1.getReadExternalText("in", ELEMENT1)).append(',') - .append(NEW_LINE); - code.append(indenter).append(type2.getReadExternalText("in", ELEMENT2)).append(',') - .append(NEW_LINE); + code.append(indenter).append(type1.getReadExternalText("in", ELEMENT1)).append(',').append(NEW_LINE); + code.append(indenter).append(type2.getReadExternalText("in", ELEMENT2)).append(',').append(NEW_LINE); code.append(indenter).append(type3.getReadExternalText("in", ELEMENT3)).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(");").append(NEW_LINE); @@ -772,32 +713,26 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append( - "public void writeExternalStreaming(@NotNull final ObjectOutput out, @NotNull final TIntObjectMap cachedWriters) throws IOException {") - .append(NEW_LINE); + "public void writeExternalStreaming(@NotNull final ObjectOutput out, @NotNull final TIntObjectMap cachedWriters) throws IOException {") + .append(NEW_LINE); indenter.increaseLevel(); if (type1 != ElementType.OBJECT) { - code.append(indenter).append(type1.getWriteExternalText("out", ELEMENT1)).append(';') - .append(NEW_LINE); + code.append(indenter).append(type1.getWriteExternalText("out", ELEMENT1)).append(';').append(NEW_LINE); } else { - code.append(indenter) - .append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 0, ") - .append(ELEMENT1).append(");").append(NEW_LINE); + code.append(indenter).append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 0, ") + .append(ELEMENT1).append(");").append(NEW_LINE); } if (type2 != ElementType.OBJECT) { - code.append(indenter).append(type2.getWriteExternalText("out", ELEMENT2)).append(';') - .append(NEW_LINE); + code.append(indenter).append(type2.getWriteExternalText("out", ELEMENT2)).append(';').append(NEW_LINE); } else { - code.append(indenter) - .append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 1, ") - .append(ELEMENT2).append(");").append(NEW_LINE); + code.append(indenter).append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 1, ") + .append(ELEMENT2).append(");").append(NEW_LINE); } if (type3 != ElementType.OBJECT) { - code.append(indenter).append(type3.getWriteExternalText("out", ELEMENT3)).append(';') - .append(NEW_LINE); + code.append(indenter).append(type3.getWriteExternalText("out", ELEMENT3)).append(';').append(NEW_LINE); } else { - code.append(indenter) - .append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 2, ") - .append(ELEMENT3).append(");").append(NEW_LINE); + code.append(indenter).append("StreamingExternalizable.writeObjectElement(out, cachedWriters, 2, ") + .append(ELEMENT3).append(");").append(NEW_LINE); } indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); @@ -806,33 +741,29 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append( - "public void readExternalStreaming(@NotNull final ObjectInput in, @NotNull final TIntObjectMap cachedReaders) throws Exception {") - .append(NEW_LINE); + "public void readExternalStreaming(@NotNull final ObjectInput in, @NotNull final TIntObjectMap cachedReaders) throws Exception {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("initialize(").append(NEW_LINE); if (type1 != ElementType.OBJECT) { - code.append(indenter.increaseLevel(2)).append(type1.getReadExternalText("in", ELEMENT1)) - .append(',').append(NEW_LINE); + code.append(indenter.increaseLevel(2)).append(type1.getReadExternalText("in", ELEMENT1)).append(',') + .append(NEW_LINE); } else { code.append(indenter.increaseLevel(2)) - .append("StreamingExternalizable.readObjectElement(in, cachedReaders, 0)") - .append(',').append(NEW_LINE); + .append("StreamingExternalizable.readObjectElement(in, cachedReaders, 0)").append(',') + .append(NEW_LINE); } if (type2 != ElementType.OBJECT) { - code.append(indenter).append(type2.getReadExternalText("in", ELEMENT2)).append(',') - .append(NEW_LINE); + code.append(indenter).append(type2.getReadExternalText("in", ELEMENT2)).append(',').append(NEW_LINE); } else { - code.append(indenter) - .append("StreamingExternalizable.readObjectElement(in, cachedReaders, 1)") - .append(',').append(NEW_LINE); + code.append(indenter).append("StreamingExternalizable.readObjectElement(in, cachedReaders, 1)").append(',') + .append(NEW_LINE); } if (type3 != ElementType.OBJECT) { - code.append(indenter).append(type3.getReadExternalText("in", ELEMENT3)) - .append(NEW_LINE); + code.append(indenter).append(type3.getReadExternalText("in", ELEMENT3)).append(NEW_LINE); } else { - code.append(indenter) - .append("StreamingExternalizable.readObjectElement(in, cachedReaders, 2)") - .append(NEW_LINE); + code.append(indenter).append("StreamingExternalizable.readObjectElement(in, cachedReaders, 2)") + .append(NEW_LINE); } code.append(indenter.decreaseLevel(2)).append(");").append(NEW_LINE); indenter.decreaseLevel(); @@ -843,8 +774,7 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public String toString() {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return \"").append(className).append("{\" +") - .append(NEW_LINE); + code.append(indenter).append("return \"").append(className).append("{\" +").append(NEW_LINE); indenter.increaseLevel(2); code.append(indenter).append(ELEMENT1).append(" + \", \" +").append(NEW_LINE); code.append(indenter).append(ELEMENT2).append(" + \", \" +").append(NEW_LINE); @@ -856,99 +786,83 @@ private String generateTriple(@NotNull final String className, @NotNull final El code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public ").append(className) - .append(" canonicalize(@NotNull final UnaryOperator canonicalizer) {") - .append(NEW_LINE); + .append(" canonicalize(@NotNull final UnaryOperator canonicalizer) {").append(NEW_LINE); indenter.increaseLevel(); if (firstIsObject) { - code.append(indenter).append("final ").append(class1Name).append(' ') - .append(CANONICALIZED_ELEMENT1).append(" = canonicalizer.apply(").append(ELEMENT1) - .append(");").append(NEW_LINE); + code.append(indenter).append("final ").append(class1Name).append(' ').append(CANONICALIZED_ELEMENT1) + .append(" = canonicalizer.apply(").append(ELEMENT1).append(");").append(NEW_LINE); } if (secondIsObject) { - code.append(indenter).append("final ").append(class2Name).append(' ') - .append(CANONICALIZED_ELEMENT2).append(" = canonicalizer.apply(").append(ELEMENT2) - .append(");").append(NEW_LINE); + code.append(indenter).append("final ").append(class2Name).append(' ').append(CANONICALIZED_ELEMENT2) + .append(" = canonicalizer.apply(").append(ELEMENT2).append(");").append(NEW_LINE); } if (thirdIsObject) { - code.append(indenter).append("final ").append(class3Name).append(' ') - .append(CANONICALIZED_ELEMENT3).append(" = canonicalizer.apply(").append(ELEMENT3) - .append(");").append(NEW_LINE); + code.append(indenter).append("final ").append(class3Name).append(' ').append(CANONICALIZED_ELEMENT3) + .append(" = canonicalizer.apply(").append(ELEMENT3).append(");").append(NEW_LINE); } if (firstIsObject && secondIsObject && thirdIsObject) { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1).append(" == ") - .append(ELEMENT1) - .append(" && ").append(CANONICALIZED_ELEMENT2).append(" == ").append(ELEMENT2) - .append(" && ").append(CANONICALIZED_ELEMENT3).append(" == ").append(ELEMENT3) - .append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1).append(" == ").append(ELEMENT1) + .append(" && ").append(CANONICALIZED_ELEMENT2).append(" == ").append(ELEMENT2) + .append(" && ").append(CANONICALIZED_ELEMENT3).append(" == ").append(ELEMENT3).append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("? this : new ").append(className).append('(') - .append(CANONICALIZED_ELEMENT1) - .append(", ").append(CANONICALIZED_ELEMENT2) - .append(", ").append(CANONICALIZED_ELEMENT3).append(");").append(NEW_LINE); + code.append(indenter).append("? this : new ").append(className).append('(').append(CANONICALIZED_ELEMENT1) + .append(", ").append(CANONICALIZED_ELEMENT2) + .append(", ").append(CANONICALIZED_ELEMENT3).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } else if (firstIsObject) { if (secondIsObject) { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1) - .append(" == ").append(ELEMENT1) - .append(" && ").append(CANONICALIZED_ELEMENT2).append(" == ").append(ELEMENT2) - .append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1).append(" == ").append(ELEMENT1) + .append(" && ").append(CANONICALIZED_ELEMENT2).append(" == ").append(ELEMENT2).append(NEW_LINE); indenter.increaseLevel(2); code.append(indenter).append("? this : new ").append(className).append('(') - .append(CANONICALIZED_ELEMENT1) - .append(", ").append(CANONICALIZED_ELEMENT2) - .append(", ").append(ELEMENT3).append(");").append(NEW_LINE); + .append(CANONICALIZED_ELEMENT1) + .append(", ").append(CANONICALIZED_ELEMENT2) + .append(", ").append(ELEMENT3).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } else if (thirdIsObject) { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1) - .append(" == ").append(ELEMENT1) - .append(" && ").append(CANONICALIZED_ELEMENT3).append(" == ").append(ELEMENT3) - .append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1).append(" == ").append(ELEMENT1) + .append(" && ").append(CANONICALIZED_ELEMENT3).append(" == ").append(ELEMENT3).append(NEW_LINE); indenter.increaseLevel(2); code.append(indenter).append("? this : new ").append(className).append('(') - .append(CANONICALIZED_ELEMENT1) - .append(", ").append(ELEMENT2) - .append(", ").append(CANONICALIZED_ELEMENT3).append(");").append(NEW_LINE); + .append(CANONICALIZED_ELEMENT1) + .append(", ").append(ELEMENT2) + .append(", ").append(CANONICALIZED_ELEMENT3).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } else { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1) - .append(" == ").append(ELEMENT1).append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT1).append(" == ").append(ELEMENT1) + .append(NEW_LINE); indenter.increaseLevel(2); code.append(indenter).append("? this : new ").append(className).append('(') - .append(CANONICALIZED_ELEMENT1) - .append(", ").append(ELEMENT2) - .append(", ").append(ELEMENT3).append(");").append(NEW_LINE); + .append(CANONICALIZED_ELEMENT1) + .append(", ").append(ELEMENT2) + .append(", ").append(ELEMENT3).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } } else if (secondIsObject) { if (thirdIsObject) { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT2) - .append(" == ").append(ELEMENT2) - .append(" && ").append(CANONICALIZED_ELEMENT3).append(" == ").append(ELEMENT3) - .append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT2).append(" == ").append(ELEMENT2) + .append(" && ").append(CANONICALIZED_ELEMENT3).append(" == ").append(ELEMENT3).append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("? this : new ").append(className).append('(') - .append(ELEMENT1) - .append(", ").append(CANONICALIZED_ELEMENT2) - .append(", ").append(CANONICALIZED_ELEMENT3).append(");").append(NEW_LINE); + code.append(indenter).append("? this : new ").append(className).append('(').append(ELEMENT1) + .append(", ").append(CANONICALIZED_ELEMENT2) + .append(", ").append(CANONICALIZED_ELEMENT3).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } else { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT2) - .append(" == ").append(ELEMENT2).append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT2).append(" == ").append(ELEMENT2) + .append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("? this : new ").append(className).append('(') - .append(ELEMENT1) - .append(", ").append(CANONICALIZED_ELEMENT2) - .append(", ").append(ELEMENT3).append(");").append(NEW_LINE); + code.append(indenter).append("? this : new ").append(className).append('(').append(ELEMENT1) + .append(", ").append(CANONICALIZED_ELEMENT2) + .append(", ").append(ELEMENT3).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } } else if (thirdIsObject) { - code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT3).append(" == ") - .append(ELEMENT3).append(NEW_LINE); + code.append(indenter).append("return ").append(CANONICALIZED_ELEMENT3).append(" == ").append(ELEMENT3) + .append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("? this : new ").append(className).append('(') - .append(ELEMENT1) - .append(", ").append(ELEMENT2) - .append(", ").append(CANONICALIZED_ELEMENT3).append(");").append(NEW_LINE); + code.append(indenter).append("? this : new ").append(className).append('(').append(ELEMENT1) + .append(", ").append(ELEMENT2) + .append(", ").append(CANONICALIZED_ELEMENT3).append(");").append(NEW_LINE); indenter.decreaseLevel(2); } else { code.append(indenter).append("return this;").append(NEW_LINE); @@ -962,8 +876,8 @@ private String generateTriple(@NotNull final String className, @NotNull final El } private void writeClass(@NotNull final String className, @NotNull final String classBody) { - try (final PrintStream destination = new PrintStream( - new FileOutputStream(new File(OUTPUT_RELATIVE_PATH, className + ".java")))) { + try (final PrintStream destination = + new PrintStream(new FileOutputStream(new File(OUTPUT_RELATIVE_PATH, className + ".java")))) { destination.print(classBody); destination.flush(); } catch (FileNotFoundException e) { @@ -973,16 +887,15 @@ private void writeClass(@NotNull final String className, @NotNull final String c public static void main(@NotNull final String... args) { final TupleCodeGenerator generator = new TupleCodeGenerator(); - Arrays.stream(ElementType.values()) - .forEach(t1 -> Arrays.stream(ElementType.values()).forEach(t2 -> { - final String doubleName = generator.generateClassName(t1, t2); - final String doubleBody = generator.generateDouble(doubleName, t1, t2); - generator.writeClass(doubleName, doubleBody); - Arrays.stream(ElementType.values()).forEach(t3 -> { - final String tripleName = generator.generateClassName(t1, t2, t3); - final String tripleBody = generator.generateTriple(tripleName, t1, t2, t3); - generator.writeClass(tripleName, tripleBody); - }); - })); + Arrays.stream(ElementType.values()).forEach(t1 -> Arrays.stream(ElementType.values()).forEach(t2 -> { + final String doubleName = generator.generateClassName(t1, t2); + final String doubleBody = generator.generateDouble(doubleName, t1, t2); + generator.writeClass(doubleName, doubleBody); + Arrays.stream(ElementType.values()).forEach(t3 -> { + final String tripleName = generator.generateClassName(t1, t2, t3); + final String tripleBody = generator.generateTriple(tripleName, t1, t2, t3); + generator.writeClass(tripleName, tripleBody); + }); + })); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/AsOfJoinHelper.java b/DB/src/main/java/io/deephaven/db/v2/AsOfJoinHelper.java index da86391be46..4b7b1c0e34c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/AsOfJoinHelper.java +++ b/DB/src/main/java/io/deephaven/db/v2/AsOfJoinHelper.java @@ -37,15 +37,13 @@ public class AsOfJoinHelper { private AsOfJoinHelper() {} // static use only static Table asOfJoin(QueryTable leftTable, QueryTable rightTable, MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, SortingOrder order, boolean disallowExactMatch) { + MatchPair[] columnsToAdd, SortingOrder order, boolean disallowExactMatch) { final JoinControl joinControl = new JoinControl(); - return asOfJoin(joinControl, leftTable, rightTable, columnsToMatch, columnsToAdd, order, - disallowExactMatch); + return asOfJoin(joinControl, leftTable, rightTable, columnsToMatch, columnsToAdd, order, disallowExactMatch); } - static Table asOfJoin(JoinControl control, QueryTable leftTable, QueryTable rightTable, - MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, SortingOrder order, - boolean disallowExactMatch) { + static Table asOfJoin(JoinControl control, QueryTable leftTable, QueryTable rightTable, MatchPair[] columnsToMatch, + MatchPair[] columnsToAdd, SortingOrder order, boolean disallowExactMatch) { if (columnsToMatch.length == 0) { throw new IllegalArgumentException("aj() requires at least one column to match!"); } @@ -53,82 +51,75 @@ static Table asOfJoin(JoinControl control, QueryTable leftTable, QueryTable righ checkColumnConflicts(leftTable, columnsToAdd); if (!leftTable.isLive() && leftTable.size() == 0) { - return makeResult(leftTable, rightTable, - new StaticSingleValueRedirectionIndexImpl(Index.NULL_KEY), columnsToAdd, false); + return makeResult(leftTable, rightTable, new StaticSingleValueRedirectionIndexImpl(Index.NULL_KEY), + columnsToAdd, false); } final MatchPair stampPair = columnsToMatch[columnsToMatch.length - 1]; final int keyColumnCount = columnsToMatch.length - 1; - final ColumnSource[] originalLeftSources = - Arrays.stream(columnsToMatch).limit(keyColumnCount) + final ColumnSource[] originalLeftSources = Arrays.stream(columnsToMatch).limit(keyColumnCount) .map(mp -> leftTable.getColumnSource(mp.leftColumn)).toArray(ColumnSource[]::new); final ColumnSource[] leftSources = new ColumnSource[originalLeftSources.length]; for (int ii = 0; ii < leftSources.length; ++ii) { leftSources[ii] = ReinterpretUtilities.maybeConvertToPrimitive(originalLeftSources[ii]); } - final ColumnSource[] originalRightSources = - Arrays.stream(columnsToMatch).limit(keyColumnCount) + final ColumnSource[] originalRightSources = Arrays.stream(columnsToMatch).limit(keyColumnCount) .map(mp -> rightTable.getColumnSource(mp.rightColumn)).toArray(ColumnSource[]::new); final ColumnSource[] rightSources = new ColumnSource[originalLeftSources.length]; for (int ii = 0; ii < leftSources.length; ++ii) { - rightSources[ii] = - ReinterpretUtilities.maybeConvertToPrimitive(originalRightSources[ii]); + rightSources[ii] = ReinterpretUtilities.maybeConvertToPrimitive(originalRightSources[ii]); } - final ColumnSource leftStampSource = ReinterpretUtilities - .maybeConvertToPrimitive(leftTable.getColumnSource(stampPair.left())); + final ColumnSource leftStampSource = + ReinterpretUtilities.maybeConvertToPrimitive(leftTable.getColumnSource(stampPair.left())); final ColumnSource originalRightStampSource = rightTable.getColumnSource(stampPair.right()); - final ColumnSource rightStampSource = - ReinterpretUtilities.maybeConvertToPrimitive(originalRightStampSource); + final ColumnSource rightStampSource = ReinterpretUtilities.maybeConvertToPrimitive(originalRightStampSource); if (leftStampSource.getType() != rightStampSource.getType()) { throw new IllegalArgumentException("Can not aj() with different stamp types: left=" - + leftStampSource.getType() + ", right=" + rightStampSource.getType()); + + leftStampSource.getType() + ", right=" + rightStampSource.getType()); } - final RedirectionIndex redirectionIndex = - JoinRedirectionIndex.makeRedirectionIndex(control, leftTable); + final RedirectionIndex redirectionIndex = JoinRedirectionIndex.makeRedirectionIndex(control, leftTable); if (keyColumnCount == 0) { - return zeroKeyAj(control, leftTable, rightTable, columnsToAdd, stampPair, - leftStampSource, originalRightStampSource, rightStampSource, order, - disallowExactMatch, redirectionIndex); + return zeroKeyAj(control, leftTable, rightTable, columnsToAdd, stampPair, leftStampSource, + originalRightStampSource, rightStampSource, order, disallowExactMatch, redirectionIndex); } if (rightTable.isLive()) { if (leftTable.isLive()) { - return bothIncrementalAj(control, leftTable, rightTable, columnsToMatch, - columnsToAdd, order, disallowExactMatch, stampPair, - leftSources, rightSources, leftStampSource, rightStampSource, redirectionIndex); + return bothIncrementalAj(control, leftTable, rightTable, columnsToMatch, columnsToAdd, order, + disallowExactMatch, stampPair, + leftSources, rightSources, leftStampSource, rightStampSource, redirectionIndex); } - return rightTickingLeftStaticAj(control, leftTable, rightTable, columnsToMatch, - columnsToAdd, order, disallowExactMatch, stampPair, leftSources, rightSources, - leftStampSource, rightStampSource, redirectionIndex); + return rightTickingLeftStaticAj(control, leftTable, rightTable, columnsToMatch, columnsToAdd, order, + disallowExactMatch, stampPair, leftSources, rightSources, leftStampSource, rightStampSource, + redirectionIndex); } else { - return rightStaticAj(control, leftTable, rightTable, columnsToMatch, columnsToAdd, - order, disallowExactMatch, stampPair, originalLeftSources, leftSources, - rightSources, leftStampSource, originalRightStampSource, rightStampSource, - redirectionIndex); + return rightStaticAj(control, leftTable, rightTable, columnsToMatch, columnsToAdd, order, + disallowExactMatch, stampPair, originalLeftSources, leftSources, rightSources, leftStampSource, + originalRightStampSource, rightStampSource, redirectionIndex); } } @NotNull private static Table rightStaticAj(JoinControl control, - QueryTable leftTable, - Table rightTable, - MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, - SortingOrder order, - boolean disallowExactMatch, - MatchPair stampPair, - ColumnSource[] originalLeftSources, - ColumnSource[] leftSources, - ColumnSource[] rightSources, - ColumnSource leftStampSource, - ColumnSource originalRightStampSource, - ColumnSource rightStampSource, - RedirectionIndex redirectionIndex) { + QueryTable leftTable, + Table rightTable, + MatchPair[] columnsToMatch, + MatchPair[] columnsToAdd, + SortingOrder order, + boolean disallowExactMatch, + MatchPair stampPair, + ColumnSource[] originalLeftSources, + ColumnSource[] leftSources, + ColumnSource[] rightSources, + ColumnSource leftStampSource, + ColumnSource originalRightStampSource, + ColumnSource rightStampSource, + RedirectionIndex redirectionIndex) { final LongArraySource slots = new LongArraySource(); final int slotCount; @@ -166,15 +157,15 @@ private static Table rightStaticAj(JoinControl control, } final StaticChunkedAsOfJoinStateManager asOfJoinStateManager = - new StaticChunkedAsOfJoinStateManager(leftSources, size, originalLeftSources); + new StaticChunkedAsOfJoinStateManager(leftSources, size, originalLeftSources); final Pair, ObjectArraySource> leftGroupedSources; final int leftGroupingSize; if (leftGrouping != null) { final MutableInt groupSize = new MutableInt(); // noinspection unchecked - leftGroupedSources = AbstractColumnSource.groupingToFlatSources( - (ColumnSource) leftSources[0], leftGrouping, leftTable.getIndex(), groupSize); + leftGroupedSources = AbstractColumnSource.groupingToFlatSources((ColumnSource) leftSources[0], leftGrouping, + leftTable.getIndex(), groupSize); leftGroupingSize = groupSize.intValue(); } else { leftGroupedSources = null; @@ -186,8 +177,8 @@ private static Table rightStaticAj(JoinControl control, if (rightGrouping != null) { final MutableInt groupSize = new MutableInt(); // noinspection unchecked - rightGroupedSources = AbstractColumnSource.groupingToFlatSources( - (ColumnSource) rightSources[0], rightGrouping, rightTable.getIndex(), groupSize); + rightGroupedSources = AbstractColumnSource.groupingToFlatSources((ColumnSource) rightSources[0], + rightGrouping, rightTable.getIndex(), groupSize); rightGroupingSize = groupSize.intValue(); } else { rightGroupedSources = null; @@ -196,42 +187,37 @@ private static Table rightStaticAj(JoinControl control, if (buildLeft) { if (leftGroupedSources == null) { - slotCount = asOfJoinStateManager.buildFromLeftSide(leftTable.getIndex(), - leftSources, slots); + slotCount = asOfJoinStateManager.buildFromLeftSide(leftTable.getIndex(), leftSources, slots); } else { - slotCount = asOfJoinStateManager.buildFromLeftSide( - Index.CURRENT_FACTORY.getFlatIndex(leftGroupingSize), - new ColumnSource[] {leftGroupedSources.getFirst()}, slots); + slotCount = asOfJoinStateManager.buildFromLeftSide(Index.CURRENT_FACTORY.getFlatIndex(leftGroupingSize), + new ColumnSource[] {leftGroupedSources.getFirst()}, slots); } if (rightGroupedSources == null) { asOfJoinStateManager.probeRight(rightTable.getIndex(), rightSources); } else { - asOfJoinStateManager.probeRight( - Index.CURRENT_FACTORY.getFlatIndex(rightGroupingSize), - new ColumnSource[] {rightGroupedSources.getFirst()}); + asOfJoinStateManager.probeRight(Index.CURRENT_FACTORY.getFlatIndex(rightGroupingSize), + new ColumnSource[] {rightGroupedSources.getFirst()}); } } else { if (rightGroupedSources == null) { - slotCount = asOfJoinStateManager.buildFromRightSide(rightTable.getIndex(), - rightSources, slots); + slotCount = asOfJoinStateManager.buildFromRightSide(rightTable.getIndex(), rightSources, slots); } else { - slotCount = asOfJoinStateManager.buildFromRightSide( - Index.CURRENT_FACTORY.getFlatIndex(rightGroupingSize), - new ColumnSource[] {rightGroupedSources.getFirst()}, slots); + slotCount = + asOfJoinStateManager.buildFromRightSide(Index.CURRENT_FACTORY.getFlatIndex(rightGroupingSize), + new ColumnSource[] {rightGroupedSources.getFirst()}, slots); } if (leftGroupedSources == null) { asOfJoinStateManager.probeLeft(leftTable.getIndex(), leftSources); } else { asOfJoinStateManager.probeLeft(Index.CURRENT_FACTORY.getFlatIndex(leftGroupingSize), - new ColumnSource[] {leftGroupedSources.getFirst()}); + new ColumnSource[] {leftGroupedSources.getFirst()}); } } final ArrayValuesCache arrayValuesCache; if (leftTable.isLive()) { if (rightGroupedSources != null) { - asOfJoinStateManager.convertRightGrouping(slots, slotCount, - rightGroupedSources.getSecond()); + asOfJoinStateManager.convertRightGrouping(slots, slotCount, rightGroupedSources.getSecond()); } else { asOfJoinStateManager.convertRightBuildersToIndex(slots, slotCount); } @@ -240,18 +226,16 @@ private static Table rightStaticAj(JoinControl control, } else { arrayValuesCache = null; if (rightGroupedSources != null) { - asOfJoinStateManager.convertRightGrouping(slots, slotCount, - rightGroupedSources.getSecond()); + asOfJoinStateManager.convertRightGrouping(slots, slotCount, rightGroupedSources.getSecond()); } } - try ( - final AsOfStampContext stampContext = new AsOfStampContext(order, disallowExactMatch, - leftStampSource, rightStampSource, originalRightStampSource); - final ResettableWritableLongChunk keyChunk = - ResettableWritableLongChunk.makeResettableChunk(); - final ResettableWritableChunk valuesChunk = - rightStampSource.getChunkType().makeResettableWritableChunk()) { + try (final AsOfStampContext stampContext = new AsOfStampContext(order, disallowExactMatch, leftStampSource, + rightStampSource, originalRightStampSource); + final ResettableWritableLongChunk keyChunk = + ResettableWritableLongChunk.makeResettableChunk(); + final ResettableWritableChunk valuesChunk = + rightStampSource.getChunkType().makeResettableWritableChunk()) { for (int slotIndex = 0; slotIndex < slotCount; ++slotIndex) { final long slot = slots.getLong(slotIndex); Index leftIndex = asOfJoinStateManager.getLeftIndex(slot); @@ -266,49 +250,42 @@ private static Table rightStaticAj(JoinControl control, if (leftGroupedSources != null) { if (leftIndex.size() != 1) { - throw new IllegalStateException( - "Groupings should have exactly one index key!"); + throw new IllegalStateException("Groupings should have exactly one index key!"); } leftIndex = leftGroupedSources.getSecond().get(leftIndex.get(0)); } if (arrayValuesCache != null) { - processLeftSlotWithRightCache(stampContext, leftIndex, rightIndex, - redirectionIndex, rightStampSource, keyChunk, valuesChunk, arrayValuesCache, - slot); + processLeftSlotWithRightCache(stampContext, leftIndex, rightIndex, redirectionIndex, + rightStampSource, keyChunk, valuesChunk, arrayValuesCache, slot); } else { stampContext.processEntry(leftIndex, rightIndex, redirectionIndex); } } } - final QueryTable result = makeResult(leftTable, rightTable, redirectionIndex, columnsToAdd, - leftTable.isRefreshing()); + final QueryTable result = + makeResult(leftTable, rightTable, redirectionIndex, columnsToAdd, leftTable.isRefreshing()); if (!leftTable.isRefreshing()) { return result; } final ModifiedColumnSet leftKeysOrStamps = - leftTable.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToMatch)); + leftTable.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToMatch)); final LongArraySource updatedSlots = new LongArraySource(); - final ModifiedColumnSet allRightColumns = - result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); + final ModifiedColumnSet allRightColumns = result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); final ModifiedColumnSet.Transformer leftTransformer = - leftTable.newModifiedColumnSetTransformer(result, - leftTable.getDefinition().getColumnNamesArray()); + leftTable.newModifiedColumnSetTransformer(result, leftTable.getDefinition().getColumnNamesArray()); - leftTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl( - makeListenerDescription(columnsToMatch, stampPair, columnsToAdd, - order == SortingOrder.Descending, disallowExactMatch), - leftTable, result) { + leftTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl(makeListenerDescription(columnsToMatch, + stampPair, columnsToAdd, order == SortingOrder.Descending, disallowExactMatch), leftTable, result) { @Override public void onUpdate(Update upstream) { final Update downstream = upstream.copy(); upstream.removed.forAllLongs(redirectionIndex::removeVoid); - final boolean keysModified = - upstream.modifiedColumnSet.containsAny(leftKeysOrStamps); + final boolean keysModified = upstream.modifiedColumnSet.containsAny(leftKeysOrStamps); final Index restampKeys; if (keysModified) { @@ -325,38 +302,34 @@ public void onUpdate(Update upstream) { if (restampKeys.nonempty()) { final Index.RandomBuilder foundBuilder = Index.FACTORY.getRandomBuilder(); updatedSlots.ensureCapacity(restampKeys.size()); - final int slotCount = asOfJoinStateManager.probeLeft(restampKeys, leftSources, - updatedSlots, foundBuilder); + final int slotCount = + asOfJoinStateManager.probeLeft(restampKeys, leftSources, updatedSlots, foundBuilder); try (final Index foundKeys = foundBuilder.getIndex(); - final Index notFound = restampKeys.minus(foundKeys)) { + final Index notFound = restampKeys.minus(foundKeys)) { notFound.forAllLongs(redirectionIndex::removeVoid); } - try ( - final AsOfStampContext stampContext = - new AsOfStampContext(order, disallowExactMatch, leftStampSource, - rightStampSource, originalRightStampSource); - final ResettableWritableLongChunk keyChunk = - ResettableWritableLongChunk.makeResettableChunk(); - final ResettableWritableChunk valuesChunk = - rightStampSource.getChunkType().makeResettableWritableChunk()) { + try (final AsOfStampContext stampContext = new AsOfStampContext(order, disallowExactMatch, + leftStampSource, rightStampSource, originalRightStampSource); + final ResettableWritableLongChunk keyChunk = + ResettableWritableLongChunk.makeResettableChunk(); + final ResettableWritableChunk valuesChunk = + rightStampSource.getChunkType().makeResettableWritableChunk()) { for (int ii = 0; ii < slotCount; ++ii) { final long slot = updatedSlots.getLong(ii); final Index leftIndex = asOfJoinStateManager.getLeftIndex(slot); final Index rightIndex = asOfJoinStateManager.getRightIndex(slot); assert arrayValuesCache != null; - processLeftSlotWithRightCache(stampContext, leftIndex, rightIndex, - redirectionIndex, rightStampSource, keyChunk, valuesChunk, - arrayValuesCache, slot); + processLeftSlotWithRightCache(stampContext, leftIndex, rightIndex, redirectionIndex, + rightStampSource, keyChunk, valuesChunk, arrayValuesCache, slot); } } } downstream.modifiedColumnSet = result.modifiedColumnSet; - leftTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); + leftTransformer.clearAndTransform(upstream.modifiedColumnSet, downstream.modifiedColumnSet); if (keysModified) { downstream.modifiedColumnSet.setAll(allRightColumns); } @@ -372,8 +345,7 @@ public void onUpdate(Update upstream) { } private static void checkColumnConflicts(QueryTable leftTable, MatchPair[] columnsToAdd) { - final Set rightColumnsToAdd = - new HashSet<>(Arrays.asList(MatchPair.getLeftColumns(columnsToAdd))); + final Set rightColumnsToAdd = new HashSet<>(Arrays.asList(MatchPair.getLeftColumns(columnsToAdd))); rightColumnsToAdd.retainAll(leftTable.getDefinition().getColumnNames()); if (!rightColumnsToAdd.isEmpty()) { throw new RuntimeException("Conflicting column names " + rightColumnsToAdd); @@ -401,7 +373,7 @@ private ArrayValuesCache(int size) { long[] getKeys(long slot) { if (StaticChunkedAsOfJoinStateManager.isOverflowLocation(slot)) { return overflowCachedStampKeys - .get(StaticChunkedAsOfJoinStateManager.hashLocationToOverflowLocation(slot)); + .get(StaticChunkedAsOfJoinStateManager.hashLocationToOverflowLocation(slot)); } else { return cacheStampKeys.get(slot); } @@ -410,7 +382,7 @@ long[] getKeys(long slot) { Object getValues(long slot) { if (StaticChunkedAsOfJoinStateManager.isOverflowLocation(slot)) { return overflowCachedStampValues - .get(StaticChunkedAsOfJoinStateManager.hashLocationToOverflowLocation(slot)); + .get(StaticChunkedAsOfJoinStateManager.hashLocationToOverflowLocation(slot)); } else { return cacheStampValues.get(slot); } @@ -418,8 +390,7 @@ Object getValues(long slot) { void setKeysAndValues(long slot, long[] keyIndices, Object StampArray) { if (StaticChunkedAsOfJoinStateManager.isOverflowLocation(slot)) { - final long overflowLocation = - StaticChunkedAsOfJoinStateManager.hashLocationToOverflowLocation(slot); + final long overflowLocation = StaticChunkedAsOfJoinStateManager.hashLocationToOverflowLocation(slot); overflowCachedStampKeys.ensureCapacity(overflowLocation + 1); overflowCachedStampValues.ensureCapacity(overflowLocation + 1); overflowCachedStampKeys.set(overflowLocation, keyIndices); @@ -437,12 +408,11 @@ void ensureOverflow(int overflowSize) { } private static void processLeftSlotWithRightCache(AsOfStampContext stampContext, - Index leftIndex, Index rightIndex, RedirectionIndex redirectionIndex, - ColumnSource rightStampSource, - ResettableWritableLongChunk keyChunk, - ResettableWritableChunk valuesChunk, - ArrayValuesCache arrayValuesCache, - long slot) { + Index leftIndex, Index rightIndex, RedirectionIndex redirectionIndex, + ColumnSource rightStampSource, + ResettableWritableLongChunk keyChunk, ResettableWritableChunk valuesChunk, + ArrayValuesCache arrayValuesCache, + long slot) { final long[] rightStampKeys = arrayValuesCache.getKeys(slot); if (rightStampKeys == null) { final int rightSize = rightIndex.intSize(); @@ -456,11 +426,9 @@ private static void processLeftSlotWithRightCache(AsOfStampContext stampContext, stampContext.getAndCompactStamps(rightIndex, keyChunk, valuesChunk); if (keyChunk.size() < rightSize) { - // we will hold onto these things "forever", so we would like to avoid making them - // too large + // we will hold onto these things "forever", so we would like to avoid making them too large keyIndices = Arrays.copyOf(keyIndices, keyChunk.size()); - final Object compactedRightValues = - rightStampSource.getChunkType().makeArray(keyChunk.size()); + final Object compactedRightValues = rightStampSource.getChunkType().makeArray(keyChunk.size()); // noinspection SuspiciousSystemArraycopy System.arraycopy(rightStampArray, 0, compactedRightValues, 0, keyChunk.size()); rightStampArray = compactedRightValues; @@ -479,20 +447,17 @@ private static void processLeftSlotWithRightCache(AsOfStampContext stampContext, } /** - * If the asOfJoinStateManager is null, it means we are passing in the leftIndex. If the - * leftIndex is null; we are passing in the asOfJoinStateManager and should obtain the leftIndex - * from the state manager if necessary. + * If the asOfJoinStateManager is null, it means we are passing in the leftIndex. If the leftIndex is null; we are + * passing in the asOfJoinStateManager and should obtain the leftIndex from the state manager if necessary. */ - private static void getCachedLeftStampsAndKeys( - RightIncrementalChunkedAsOfJoinStateManager asOfJoinStateManager, - Index leftIndex, - ColumnSource leftStampSource, - SizedSafeCloseable fillContext, - SizedSafeCloseable> sortContext, - ResettableWritableLongChunk keyChunk, - ResettableWritableChunk valuesChunk, - ArrayValuesCache arrayValuesCache, - long slot) { + private static void getCachedLeftStampsAndKeys(RightIncrementalChunkedAsOfJoinStateManager asOfJoinStateManager, + Index leftIndex, + ColumnSource leftStampSource, + SizedSafeCloseable fillContext, + SizedSafeCloseable> sortContext, + ResettableWritableLongChunk keyChunk, ResettableWritableChunk valuesChunk, + ArrayValuesCache arrayValuesCache, + long slot) { final long[] leftStampKeys = arrayValuesCache.getKeys(slot); if (leftStampKeys == null) { if (leftIndex == null) { @@ -523,36 +488,34 @@ private static void getCachedLeftStampsAndKeys( } private static Table zeroKeyAj(JoinControl control, QueryTable leftTable, QueryTable rightTable, - MatchPair[] columnsToAdd, MatchPair stampPair, ColumnSource leftStampSource, - ColumnSource originalRightStampSource, ColumnSource rightStampSource, - SortingOrder order, boolean disallowExactMatch, final RedirectionIndex redirectionIndex) { + MatchPair[] columnsToAdd, MatchPair stampPair, ColumnSource leftStampSource, + ColumnSource originalRightStampSource, ColumnSource rightStampSource, SortingOrder order, + boolean disallowExactMatch, final RedirectionIndex redirectionIndex) { if (rightTable.isLive() && leftTable.isLive()) { - return zeroKeyAjBothIncremental(control, leftTable, rightTable, columnsToAdd, stampPair, - leftStampSource, rightStampSource, order, disallowExactMatch, redirectionIndex); + return zeroKeyAjBothIncremental(control, leftTable, rightTable, columnsToAdd, stampPair, leftStampSource, + rightStampSource, order, disallowExactMatch, redirectionIndex); } else if (rightTable.isLive()) { - return zeroKeyAjRightIncremental(control, leftTable, rightTable, columnsToAdd, - stampPair, leftStampSource, rightStampSource, order, disallowExactMatch, - redirectionIndex); + return zeroKeyAjRightIncremental(control, leftTable, rightTable, columnsToAdd, stampPair, leftStampSource, + rightStampSource, order, disallowExactMatch, redirectionIndex); } else { - return zeroKeyAjRightStatic(leftTable, rightTable, columnsToAdd, stampPair, - leftStampSource, originalRightStampSource, rightStampSource, order, - disallowExactMatch, redirectionIndex); + return zeroKeyAjRightStatic(leftTable, rightTable, columnsToAdd, stampPair, leftStampSource, + originalRightStampSource, rightStampSource, order, disallowExactMatch, redirectionIndex); } } private static Table rightTickingLeftStaticAj(JoinControl control, - QueryTable leftTable, - QueryTable rightTable, - MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, - SortingOrder order, - boolean disallowExactMatch, - MatchPair stampPair, - ColumnSource[] leftSources, - ColumnSource[] rightSources, - ColumnSource leftStampSource, - ColumnSource rightStampSource, - RedirectionIndex redirectionIndex) { + QueryTable leftTable, + QueryTable rightTable, + MatchPair[] columnsToMatch, + MatchPair[] columnsToAdd, + SortingOrder order, + boolean disallowExactMatch, + MatchPair stampPair, + ColumnSource[] leftSources, + ColumnSource[] rightSources, + ColumnSource leftStampSource, + ColumnSource rightStampSource, + RedirectionIndex redirectionIndex) { if (leftTable.isRefreshing()) { throw new IllegalStateException(); } @@ -561,74 +524,66 @@ private static Table rightTickingLeftStaticAj(JoinControl control, final ChunkType stampChunkType = rightStampSource.getChunkType(); final Supplier ssaFactory = - SegmentedSortedArray.makeFactory(stampChunkType, reverse, control.rightSsaNodeSize()); + SegmentedSortedArray.makeFactory(stampChunkType, reverse, control.rightSsaNodeSize()); final ChunkSsaStamp chunkSsaStamp = ChunkSsaStamp.make(stampChunkType, reverse); final int tableSize = control.initialBuildSize(); final RightIncrementalChunkedAsOfJoinStateManager asOfJoinStateManager = - new RightIncrementalChunkedAsOfJoinStateManager(leftSources, tableSize); + new RightIncrementalChunkedAsOfJoinStateManager(leftSources, tableSize); final LongArraySource slots = new LongArraySource(); - final int slotCount = - asOfJoinStateManager.buildFromLeftSide(leftTable.getIndex(), leftSources, slots); + final int slotCount = asOfJoinStateManager.buildFromLeftSide(leftTable.getIndex(), leftSources, slots); asOfJoinStateManager.probeRightInitial(rightTable.getIndex(), rightSources); - final ArrayValuesCache leftValuesCache = - new ArrayValuesCache(asOfJoinStateManager.getTableSize()); + final ArrayValuesCache leftValuesCache = new ArrayValuesCache(asOfJoinStateManager.getTableSize()); leftValuesCache.ensureOverflow(asOfJoinStateManager.getOverflowSize()); final SizedSafeCloseable> sortContext = - new SizedSafeCloseable<>( - size -> LongSortKernel.makeContext(stampChunkType, order, size, true)); + new SizedSafeCloseable<>(size -> LongSortKernel.makeContext(stampChunkType, order, size, true)); final SizedSafeCloseable leftStampFillContext = - new SizedSafeCloseable<>(leftStampSource::makeFillContext); + new SizedSafeCloseable<>(leftStampSource::makeFillContext); final SizedSafeCloseable rightStampFillContext = - new SizedSafeCloseable<>(rightStampSource::makeFillContext); + new SizedSafeCloseable<>(rightStampSource::makeFillContext); final SizedChunk rightValues = new SizedChunk<>(stampChunkType); final SizedLongChunk rightKeyIndices = new SizedLongChunk<>(); final SizedLongChunk rightKeysForLeft = new SizedLongChunk<>(); // if we have an error the closeableList cleans up for us; if not they can be used later - try ( - final ResettableWritableLongChunk leftKeyChunk = + try (final ResettableWritableLongChunk leftKeyChunk = ResettableWritableLongChunk.makeResettableChunk(); - final ResettableWritableChunk leftValuesChunk = - rightStampSource.getChunkType().makeResettableWritableChunk()) { + final ResettableWritableChunk leftValuesChunk = + rightStampSource.getChunkType().makeResettableWritableChunk()) { for (int slotIndex = 0; slotIndex < slotCount; ++slotIndex) { final long slot = slots.getLong(slotIndex); final Index leftIndex = asOfJoinStateManager.getAndClearLeftIndex(slot); assert leftIndex != null; assert leftIndex.size() > 0; - final SegmentedSortedArray rightSsa = - asOfJoinStateManager.getRightSsa(slot, (rightIndex) -> { - final SegmentedSortedArray ssa = ssaFactory.get(); - final int slotSize = rightIndex.intSize(); - if (slotSize > 0) { - rightStampSource.fillChunk( - rightStampFillContext.ensureCapacity(slotSize), + final SegmentedSortedArray rightSsa = asOfJoinStateManager.getRightSsa(slot, (rightIndex) -> { + final SegmentedSortedArray ssa = ssaFactory.get(); + final int slotSize = rightIndex.intSize(); + if (slotSize > 0) { + rightStampSource.fillChunk(rightStampFillContext.ensureCapacity(slotSize), rightValues.ensureCapacity(slotSize), rightIndex); - rightIndex - .fillKeyIndicesChunk(rightKeyIndices.ensureCapacity(slotSize)); - sortContext.ensureCapacity(slotSize).sort(rightKeyIndices.get(), - rightValues.get()); - ssa.insert(rightValues.get(), rightKeyIndices.get()); - } - return ssa; - }); + rightIndex.fillKeyIndicesChunk(rightKeyIndices.ensureCapacity(slotSize)); + sortContext.ensureCapacity(slotSize).sort(rightKeyIndices.get(), rightValues.get()); + ssa.insert(rightValues.get(), rightKeyIndices.get()); + } + return ssa; + }); - getCachedLeftStampsAndKeys(null, leftIndex, leftStampSource, leftStampFillContext, - sortContext, leftKeyChunk, leftValuesChunk, leftValuesCache, slot); + getCachedLeftStampsAndKeys(null, leftIndex, leftStampSource, leftStampFillContext, sortContext, + leftKeyChunk, leftValuesChunk, leftValuesCache, slot); if (rightSsa.size() == 0) { continue; } final WritableLongChunk rightKeysForLeftChunk = - rightKeysForLeft.ensureCapacity(leftIndex.intSize()); + rightKeysForLeft.ensureCapacity(leftIndex.intSize()); - chunkSsaStamp.processEntry(leftValuesChunk, leftKeyChunk, rightSsa, - rightKeysForLeftChunk, disallowExactMatch); + chunkSsaStamp.processEntry(leftValuesChunk, leftKeyChunk, rightSsa, rightKeysForLeftChunk, + disallowExactMatch); for (int ii = 0; ii < leftKeyChunk.size(); ++ii) { final long index = rightKeysForLeftChunk.get(ii); @@ -640,181 +595,154 @@ private static Table rightTickingLeftStaticAj(JoinControl control, } // we will close them now, but the listener is able to resurrect them as needed - SafeCloseable.closeArray(sortContext, leftStampFillContext, rightStampFillContext, - rightValues, rightKeyIndices, rightKeysForLeft); + SafeCloseable.closeArray(sortContext, leftStampFillContext, rightStampFillContext, rightValues, rightKeyIndices, + rightKeysForLeft); - final QueryTable result = - makeResult(leftTable, rightTable, redirectionIndex, columnsToAdd, true); + final QueryTable result = makeResult(leftTable, rightTable, redirectionIndex, columnsToAdd, true); final ModifiedColumnSet rightMatchColumns = - rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); - final ModifiedColumnSet rightStampColumn = - rightTable.newModifiedColumnSet(stampPair.right()); - final ModifiedColumnSet rightAddedColumns = - result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); + rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); + final ModifiedColumnSet rightStampColumn = rightTable.newModifiedColumnSet(stampPair.right()); + final ModifiedColumnSet rightAddedColumns = result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); final ModifiedColumnSet.Transformer rightTransformer = - rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); + rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); final ObjectArraySource sequentialBuilders = - new ObjectArraySource<>(Index.SequentialBuilder.class); + new ObjectArraySource<>(Index.SequentialBuilder.class); - rightTable.listenForUpdates( - new BaseTable.ShiftAwareListenerImpl(makeListenerDescription(columnsToMatch, stampPair, - columnsToAdd, reverse, disallowExactMatch), rightTable, result) { - @Override - public void onUpdate(Update upstream) { - final Update downstream = new Update(); - downstream.added = Index.FACTORY.getEmptyIndex(); - downstream.removed = Index.FACTORY.getEmptyIndex(); - downstream.shifted = IndexShiftData.EMPTY; - downstream.modifiedColumnSet = result.modifiedColumnSet; - - final boolean keysModified = - upstream.modifiedColumnSet.containsAny(rightMatchColumns); - final boolean stampModified = - upstream.modifiedColumnSet.containsAny(rightStampColumn); - - final Index.RandomBuilder modifiedBuilder = Index.FACTORY.getRandomBuilder(); - - final Index restampRemovals; - final Index restampAdditions; - if (keysModified || stampModified) { - restampAdditions = upstream.added.union(upstream.modified); - restampRemovals = upstream.removed.union(upstream.getModifiedPreShift()); - } else { - restampAdditions = upstream.added; - restampRemovals = upstream.removed; - } + rightTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl( + makeListenerDescription(columnsToMatch, stampPair, columnsToAdd, reverse, disallowExactMatch), + rightTable, result) { + @Override + public void onUpdate(Update upstream) { + final Update downstream = new Update(); + downstream.added = Index.FACTORY.getEmptyIndex(); + downstream.removed = Index.FACTORY.getEmptyIndex(); + downstream.shifted = IndexShiftData.EMPTY; + downstream.modifiedColumnSet = result.modifiedColumnSet; + + final boolean keysModified = upstream.modifiedColumnSet.containsAny(rightMatchColumns); + final boolean stampModified = upstream.modifiedColumnSet.containsAny(rightStampColumn); + + final Index.RandomBuilder modifiedBuilder = Index.FACTORY.getRandomBuilder(); - sequentialBuilders - .ensureCapacity(Math.max(restampRemovals.size(), restampAdditions.size())); + final Index restampRemovals; + final Index restampAdditions; + if (keysModified || stampModified) { + restampAdditions = upstream.added.union(upstream.modified); + restampRemovals = upstream.removed.union(upstream.getModifiedPreShift()); + } else { + restampAdditions = upstream.added; + restampRemovals = upstream.removed; + } - // We first do a probe pass, adding all of the removals to a builder in the as - // of join state manager - final int removedSlotCount = asOfJoinStateManager - .markForRemoval(restampRemovals, rightSources, slots, sequentialBuilders); + sequentialBuilders.ensureCapacity(Math.max(restampRemovals.size(), restampAdditions.size())); - // Now that everything is marked, process the removals state by state, just as - // if we were doing the zero - // key case: when removing a row, record the stamp, redirection key, and prior - // redirection key. Binary search - // in the left for the removed key to find the smallest value geq the removed - // right. Update all rows - // with the removed redirection to the previous key. + // We first do a probe pass, adding all of the removals to a builder in the as of join state manager + final int removedSlotCount = + asOfJoinStateManager.markForRemoval(restampRemovals, rightSources, slots, sequentialBuilders); + // Now that everything is marked, process the removals state by state, just as if we were doing the zero + // key case: when removing a row, record the stamp, redirection key, and prior redirection key. Binary + // search + // in the left for the removed key to find the smallest value geq the removed right. Update all rows + // with the removed redirection to the previous key. - try ( - final ResettableWritableLongChunk leftKeyChunk = - ResettableWritableLongChunk.makeResettableChunk(); + + try (final ResettableWritableLongChunk leftKeyChunk = + ResettableWritableLongChunk.makeResettableChunk(); final ResettableWritableChunk leftValuesChunk = - rightStampSource.getChunkType().makeResettableWritableChunk(); - final SizedLongChunk priorRedirections = - new SizedLongChunk<>()) { - for (int slotIndex = 0; slotIndex < removedSlotCount; ++slotIndex) { - final long slot = slots.getLong(slotIndex); + rightStampSource.getChunkType().makeResettableWritableChunk(); + final SizedLongChunk priorRedirections = new SizedLongChunk<>()) { + for (int slotIndex = 0; slotIndex < removedSlotCount; ++slotIndex) { + final long slot = slots.getLong(slotIndex); - final SegmentedSortedArray rightSsa = - asOfJoinStateManager.getRightSsa(slot); + final SegmentedSortedArray rightSsa = asOfJoinStateManager.getRightSsa(slot); - final Index rightRemoved = sequentialBuilders.get(slotIndex).getIndex(); - sequentialBuilders.set(slotIndex, null); - final int slotSize = rightRemoved.intSize(); + final Index rightRemoved = sequentialBuilders.get(slotIndex).getIndex(); + sequentialBuilders.set(slotIndex, null); + final int slotSize = rightRemoved.intSize(); - rightStampSource.fillPrevChunk( - rightStampFillContext.ensureCapacity(slotSize), + rightStampSource.fillPrevChunk(rightStampFillContext.ensureCapacity(slotSize), rightValues.ensureCapacity(slotSize), rightRemoved); - rightRemoved - .fillKeyIndicesChunk(rightKeyIndices.ensureCapacity(slotSize)); - sortContext.ensureCapacity(slotSize).sort(rightKeyIndices.get(), - rightValues.get()); + rightRemoved.fillKeyIndicesChunk(rightKeyIndices.ensureCapacity(slotSize)); + sortContext.ensureCapacity(slotSize).sort(rightKeyIndices.get(), rightValues.get()); - getCachedLeftStampsAndKeys(asOfJoinStateManager, null, leftStampSource, - leftStampFillContext, sortContext, leftKeyChunk, leftValuesChunk, - leftValuesCache, slot); + getCachedLeftStampsAndKeys(asOfJoinStateManager, null, leftStampSource, leftStampFillContext, + sortContext, leftKeyChunk, leftValuesChunk, leftValuesCache, slot); - priorRedirections.ensureCapacity(slotSize).setSize(slotSize); + priorRedirections.ensureCapacity(slotSize).setSize(slotSize); - rightSsa.removeAndGetPrior(rightValues.get(), rightKeyIndices.get(), - priorRedirections.get()); + rightSsa.removeAndGetPrior(rightValues.get(), rightKeyIndices.get(), priorRedirections.get()); - chunkSsaStamp.processRemovals(leftValuesChunk, leftKeyChunk, - rightValues.get(), rightKeyIndices.get(), priorRedirections.get(), - redirectionIndex, modifiedBuilder, disallowExactMatch); + chunkSsaStamp.processRemovals(leftValuesChunk, leftKeyChunk, rightValues.get(), + rightKeyIndices.get(), priorRedirections.get(), redirectionIndex, modifiedBuilder, + disallowExactMatch); - rightRemoved.close(); - } + rightRemoved.close(); } + } - // After all the removals are done, we do the shifts - if (upstream.shifted.nonempty()) { - try (final Index fullPrevIndex = rightTable.getIndex().getPrevIndex(); + // After all the removals are done, we do the shifts + if (upstream.shifted.nonempty()) { + try (final Index fullPrevIndex = rightTable.getIndex().getPrevIndex(); final Index previousToShift = fullPrevIndex.minus(restampRemovals)) { - if (previousToShift.nonempty()) { - try ( - final ResettableWritableLongChunk leftKeyChunk = - ResettableWritableLongChunk.makeResettableChunk(); + if (previousToShift.nonempty()) { + try (final ResettableWritableLongChunk leftKeyChunk = + ResettableWritableLongChunk.makeResettableChunk(); final ResettableWritableChunk leftValuesChunk = - rightStampSource.getChunkType() - .makeResettableWritableChunk()) { - final IndexShiftData.Iterator sit = - upstream.shifted.applyIterator(); - while (sit.hasNext()) { - sit.next(); - final Index indexToShift = previousToShift - .subindexByKey(sit.beginRange(), sit.endRange()); - if (indexToShift.empty()) { - indexToShift.close(); - continue; - } - - final int shiftedSlots = - asOfJoinStateManager.gatherShiftIndex(indexToShift, - rightSources, slots, sequentialBuilders); + rightStampSource.getChunkType().makeResettableWritableChunk()) { + final IndexShiftData.Iterator sit = upstream.shifted.applyIterator(); + while (sit.hasNext()) { + sit.next(); + final Index indexToShift = + previousToShift.subindexByKey(sit.beginRange(), sit.endRange()); + if (indexToShift.empty()) { indexToShift.close(); + continue; + } + + final int shiftedSlots = asOfJoinStateManager.gatherShiftIndex(indexToShift, + rightSources, slots, sequentialBuilders); + indexToShift.close(); - for (int slotIndex = - 0; slotIndex < shiftedSlots; ++slotIndex) { - final long slot = slots.getLong(slotIndex); - try (final Index slotShiftIndex = + for (int slotIndex = 0; slotIndex < shiftedSlots; ++slotIndex) { + final long slot = slots.getLong(slotIndex); + try (final Index slotShiftIndex = sequentialBuilders.get(slotIndex).getIndex()) { - sequentialBuilders.set(slotIndex, null); + sequentialBuilders.set(slotIndex, null); - final int shiftSize = slotShiftIndex.intSize(); + final int shiftSize = slotShiftIndex.intSize(); - getCachedLeftStampsAndKeys(asOfJoinStateManager, - null, leftStampSource, leftStampFillContext, - sortContext, leftKeyChunk, leftValuesChunk, + getCachedLeftStampsAndKeys(asOfJoinStateManager, null, leftStampSource, + leftStampFillContext, sortContext, leftKeyChunk, leftValuesChunk, leftValuesCache, slot); - rightStampSource.fillPrevChunk( + rightStampSource.fillPrevChunk( rightStampFillContext.ensureCapacity(shiftSize), - rightValues.ensureCapacity(shiftSize), - slotShiftIndex); + rightValues.ensureCapacity(shiftSize), slotShiftIndex); - final SegmentedSortedArray rightSsa = + final SegmentedSortedArray rightSsa = asOfJoinStateManager.getRightSsa(slot); - slotShiftIndex.fillKeyIndicesChunk( - rightKeyIndices.ensureCapacity(shiftSize)); - sortContext.ensureCapacity(shiftSize) - .sort(rightKeyIndices.get(), rightValues.get()); + slotShiftIndex + .fillKeyIndicesChunk(rightKeyIndices.ensureCapacity(shiftSize)); + sortContext.ensureCapacity(shiftSize).sort(rightKeyIndices.get(), + rightValues.get()); - if (sit.polarityReversed()) { - chunkSsaStamp.applyShift(leftValuesChunk, - leftKeyChunk, rightValues.get(), - rightKeyIndices.get(), sit.shiftDelta(), + if (sit.polarityReversed()) { + chunkSsaStamp.applyShift(leftValuesChunk, leftKeyChunk, + rightValues.get(), rightKeyIndices.get(), sit.shiftDelta(), redirectionIndex, disallowExactMatch); - rightSsa.applyShiftReverse(rightValues.get(), - rightKeyIndices.get(), sit.shiftDelta()); - } else { - chunkSsaStamp.applyShift(leftValuesChunk, - leftKeyChunk, rightValues.get(), - rightKeyIndices.get(), sit.shiftDelta(), + rightSsa.applyShiftReverse(rightValues.get(), rightKeyIndices.get(), + sit.shiftDelta()); + } else { + chunkSsaStamp.applyShift(leftValuesChunk, leftKeyChunk, + rightValues.get(), rightKeyIndices.get(), sit.shiftDelta(), redirectionIndex, disallowExactMatch); - rightSsa.applyShift(rightValues.get(), - rightKeyIndices.get(), sit.shiftDelta()); - } + rightSsa.applyShift(rightValues.get(), rightKeyIndices.get(), + sit.shiftDelta()); } } } @@ -822,145 +750,129 @@ public void onUpdate(Update upstream) { } } } + } - // next we do the additions - final int addedSlotCount = asOfJoinStateManager.probeAdditions(restampAdditions, - rightSources, slots, sequentialBuilders); + // next we do the additions + final int addedSlotCount = + asOfJoinStateManager.probeAdditions(restampAdditions, rightSources, slots, sequentialBuilders); - try (final SizedChunk nextRightValue = new SizedChunk<>(stampChunkType); + try (final SizedChunk nextRightValue = new SizedChunk<>(stampChunkType); final SizedChunk rightStampChunk = new SizedChunk<>(stampChunkType); final SizedLongChunk insertedIndices = new SizedLongChunk<>(); final SizedBooleanChunk retainStamps = new SizedBooleanChunk<>(); final SizedSafeCloseable rightStampFillContext = - new SizedSafeCloseable<>(rightStampSource::makeFillContext); + new SizedSafeCloseable<>(rightStampSource::makeFillContext); final ResettableWritableLongChunk leftKeyChunk = - ResettableWritableLongChunk.makeResettableChunk(); + ResettableWritableLongChunk.makeResettableChunk(); final ResettableWritableChunk leftValuesChunk = - rightStampSource.getChunkType().makeResettableWritableChunk()) { - final ChunkEquals stampChunkEquals = ChunkEquals.makeEqual(stampChunkType); - final CompactKernel stampCompact = - CompactKernel.makeCompact(stampChunkType); - - // When adding a row to the right hand side: we need to know which left hand - // side might be - // responsive. If we are a duplicate stamp and not the last one, we ignore - // it. Next, we should binary - // search in the left for the first value >=, everything up until the next - // extant right value should be - // restamped with our value - for (int slotIndex = 0; slotIndex < addedSlotCount; ++slotIndex) { - final long slot = slots.getLong(slotIndex); + rightStampSource.getChunkType().makeResettableWritableChunk()) { + final ChunkEquals stampChunkEquals = ChunkEquals.makeEqual(stampChunkType); + final CompactKernel stampCompact = CompactKernel.makeCompact(stampChunkType); + + // When adding a row to the right hand side: we need to know which left hand side might be + // responsive. If we are a duplicate stamp and not the last one, we ignore it. Next, we should + // binary + // search in the left for the first value >=, everything up until the next extant right value should + // be + // restamped with our value + for (int slotIndex = 0; slotIndex < addedSlotCount; ++slotIndex) { + final long slot = slots.getLong(slotIndex); - final SegmentedSortedArray rightSsa = - asOfJoinStateManager.getRightSsa(slot); + final SegmentedSortedArray rightSsa = asOfJoinStateManager.getRightSsa(slot); - final Index rightAdded = sequentialBuilders.get(slotIndex).getIndex(); - sequentialBuilders.set(slotIndex, null); + final Index rightAdded = sequentialBuilders.get(slotIndex).getIndex(); + sequentialBuilders.set(slotIndex, null); - final int rightSize = rightAdded.intSize(); + final int rightSize = rightAdded.intSize(); - rightStampSource.fillChunk( - rightStampFillContext.ensureCapacity(rightSize), + rightStampSource.fillChunk(rightStampFillContext.ensureCapacity(rightSize), rightStampChunk.ensureCapacity(rightSize), rightAdded); - rightAdded - .fillKeyIndicesChunk(insertedIndices.ensureCapacity(rightSize)); - sortContext.ensureCapacity(rightSize).sort(insertedIndices.get(), - rightStampChunk.get()); - - final int valuesWithNext = rightSsa.insertAndGetNextValue( - rightStampChunk.get(), insertedIndices.get(), - nextRightValue.ensureCapacity(rightSize)); - - final boolean endsWithLastValue = - valuesWithNext != rightStampChunk.get().size(); - if (endsWithLastValue) { - Assert.eq(valuesWithNext, "valuesWithNext", - rightStampChunk.get().size() - 1, "rightStampChunk.size() - 1"); - rightStampChunk.get().setSize(valuesWithNext); - stampChunkEquals.notEqual(rightStampChunk.get(), - nextRightValue.get(), retainStamps.ensureCapacity(rightSize)); - stampCompact.compact(nextRightValue.get(), retainStamps.get()); - - retainStamps.get().setSize(rightSize); - retainStamps.get().set(valuesWithNext, true); - rightStampChunk.get().setSize(rightSize); - } else { - // remove duplicates - stampChunkEquals.notEqual(rightStampChunk.get(), - nextRightValue.get(), retainStamps.ensureCapacity(rightSize)); - stampCompact.compact(nextRightValue.get(), retainStamps.get()); - } - LongCompactKernel.compact(insertedIndices.get(), retainStamps.get()); - stampCompact.compact(rightStampChunk.get(), retainStamps.get()); + rightAdded.fillKeyIndicesChunk(insertedIndices.ensureCapacity(rightSize)); + sortContext.ensureCapacity(rightSize).sort(insertedIndices.get(), rightStampChunk.get()); + + final int valuesWithNext = rightSsa.insertAndGetNextValue(rightStampChunk.get(), + insertedIndices.get(), nextRightValue.ensureCapacity(rightSize)); + + final boolean endsWithLastValue = valuesWithNext != rightStampChunk.get().size(); + if (endsWithLastValue) { + Assert.eq(valuesWithNext, "valuesWithNext", rightStampChunk.get().size() - 1, + "rightStampChunk.size() - 1"); + rightStampChunk.get().setSize(valuesWithNext); + stampChunkEquals.notEqual(rightStampChunk.get(), nextRightValue.get(), + retainStamps.ensureCapacity(rightSize)); + stampCompact.compact(nextRightValue.get(), retainStamps.get()); + + retainStamps.get().setSize(rightSize); + retainStamps.get().set(valuesWithNext, true); + rightStampChunk.get().setSize(rightSize); + } else { + // remove duplicates + stampChunkEquals.notEqual(rightStampChunk.get(), nextRightValue.get(), + retainStamps.ensureCapacity(rightSize)); + stampCompact.compact(nextRightValue.get(), retainStamps.get()); + } + LongCompactKernel.compact(insertedIndices.get(), retainStamps.get()); + stampCompact.compact(rightStampChunk.get(), retainStamps.get()); - getCachedLeftStampsAndKeys(asOfJoinStateManager, null, leftStampSource, - leftStampFillContext, sortContext, leftKeyChunk, leftValuesChunk, - leftValuesCache, slot); + getCachedLeftStampsAndKeys(asOfJoinStateManager, null, leftStampSource, leftStampFillContext, + sortContext, leftKeyChunk, leftValuesChunk, leftValuesCache, slot); - chunkSsaStamp.processInsertion(leftValuesChunk, leftKeyChunk, - rightStampChunk.get(), insertedIndices.get(), nextRightValue.get(), - redirectionIndex, modifiedBuilder, endsWithLastValue, - disallowExactMatch); - } + chunkSsaStamp.processInsertion(leftValuesChunk, leftKeyChunk, rightStampChunk.get(), + insertedIndices.get(), nextRightValue.get(), redirectionIndex, modifiedBuilder, + endsWithLastValue, disallowExactMatch); + } - // and then finally we handle the case where the keys and stamps were not - // modified, but we must identify - // the responsive modifications. - if (!keysModified && !stampModified && upstream.modified.nonempty()) { - // next we do the additions - final int modifiedSlotCount = asOfJoinStateManager.gatherModifications( - upstream.modified, rightSources, slots, sequentialBuilders); + // and then finally we handle the case where the keys and stamps were not modified, but we must + // identify + // the responsive modifications. + if (!keysModified && !stampModified && upstream.modified.nonempty()) { + // next we do the additions + final int modifiedSlotCount = asOfJoinStateManager.gatherModifications(upstream.modified, + rightSources, slots, sequentialBuilders); - for (int slotIndex = 0; slotIndex < modifiedSlotCount; ++slotIndex) { - final long slot = slots.getLong(slotIndex); + for (int slotIndex = 0; slotIndex < modifiedSlotCount; ++slotIndex) { + final long slot = slots.getLong(slotIndex); - try (final Index rightModified = - sequentialBuilders.get(slotIndex).getIndex()) { - sequentialBuilders.set(slotIndex, null); - final int rightSize = rightModified.intSize(); + try (final Index rightModified = sequentialBuilders.get(slotIndex).getIndex()) { + sequentialBuilders.set(slotIndex, null); + final int rightSize = rightModified.intSize(); - rightStampSource.fillChunk( - rightStampFillContext.ensureCapacity(rightSize), + rightStampSource.fillChunk(rightStampFillContext.ensureCapacity(rightSize), rightValues.ensureCapacity(rightSize), rightModified); - rightModified.fillKeyIndicesChunk( - rightKeyIndices.ensureCapacity(rightSize)); - sortContext.ensureCapacity(rightSize) - .sort(rightKeyIndices.get(), rightValues.get()); - - getCachedLeftStampsAndKeys(asOfJoinStateManager, null, - leftStampSource, leftStampFillContext, sortContext, - leftKeyChunk, leftValuesChunk, leftValuesCache, slot); - - chunkSsaStamp.findModified(0, leftValuesChunk, leftKeyChunk, - redirectionIndex, rightValues.get(), rightKeyIndices.get(), - modifiedBuilder, disallowExactMatch); - } - } + rightModified.fillKeyIndicesChunk(rightKeyIndices.ensureCapacity(rightSize)); + sortContext.ensureCapacity(rightSize).sort(rightKeyIndices.get(), rightValues.get()); + + getCachedLeftStampsAndKeys(asOfJoinStateManager, null, leftStampSource, + leftStampFillContext, sortContext, leftKeyChunk, leftValuesChunk, + leftValuesCache, slot); - rightTransformer.transform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); + chunkSsaStamp.findModified(0, leftValuesChunk, leftKeyChunk, redirectionIndex, + rightValues.get(), rightKeyIndices.get(), modifiedBuilder, disallowExactMatch); + } } + + rightTransformer.transform(upstream.modifiedColumnSet, downstream.modifiedColumnSet); } + } - SafeCloseable.closeArray(sortContext, leftStampFillContext, - rightStampFillContext, rightValues, rightKeyIndices, rightKeysForLeft); + SafeCloseable.closeArray(sortContext, leftStampFillContext, rightStampFillContext, rightValues, + rightKeyIndices, rightKeysForLeft); - downstream.modified = modifiedBuilder.getIndex(); + downstream.modified = modifiedBuilder.getIndex(); - final boolean processedAdditionsOrRemovals = - removedSlotCount > 0 || addedSlotCount > 0; - if (keysModified || stampModified || processedAdditionsOrRemovals) { - downstream.modifiedColumnSet.setAll(rightAddedColumns); - } + final boolean processedAdditionsOrRemovals = removedSlotCount > 0 || addedSlotCount > 0; + if (keysModified || stampModified || processedAdditionsOrRemovals) { + downstream.modifiedColumnSet.setAll(rightAddedColumns); + } - result.notifyListeners(downstream); + result.notifyListeners(downstream); - if (stampModified || keysModified) { - restampAdditions.close(); - restampRemovals.close(); - } + if (stampModified || keysModified) { + restampAdditions.close(); + restampRemovals.close(); } - }); + } + }); return result; } @@ -969,44 +881,40 @@ public interface SsaFactory extends Function, SafeC } private static Table bothIncrementalAj(JoinControl control, - QueryTable leftTable, - QueryTable rightTable, - MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, - SortingOrder order, - boolean disallowExactMatch, - MatchPair stampPair, - ColumnSource[] leftSources, - ColumnSource[] rightSources, - ColumnSource leftStampSource, - ColumnSource rightStampSource, - RedirectionIndex redirectionIndex) { + QueryTable leftTable, + QueryTable rightTable, + MatchPair[] columnsToMatch, + MatchPair[] columnsToAdd, + SortingOrder order, + boolean disallowExactMatch, + MatchPair stampPair, + ColumnSource[] leftSources, + ColumnSource[] rightSources, + ColumnSource leftStampSource, + ColumnSource rightStampSource, + RedirectionIndex redirectionIndex) { final boolean reverse = order == SortingOrder.Descending; final ChunkType stampChunkType = rightStampSource.getChunkType(); final Supplier ssaFactory = - SegmentedSortedArray.makeFactory(stampChunkType, reverse, control.rightSsaNodeSize()); + SegmentedSortedArray.makeFactory(stampChunkType, reverse, control.rightSsaNodeSize()); final SsaSsaStamp ssaSsaStamp = SsaSsaStamp.make(stampChunkType, reverse); final RightIncrementalChunkedAsOfJoinStateManager asOfJoinStateManager = - new RightIncrementalChunkedAsOfJoinStateManager(leftSources, - control.initialBuildSize()); + new RightIncrementalChunkedAsOfJoinStateManager(leftSources, control.initialBuildSize()); final LongArraySource slots = new LongArraySource(); - int slotCount = - asOfJoinStateManager.buildFromLeftSide(leftTable.getIndex(), leftSources, slots); - slotCount = asOfJoinStateManager.buildFromRightSide(rightTable.getIndex(), rightSources, - slots, slotCount); + int slotCount = asOfJoinStateManager.buildFromLeftSide(leftTable.getIndex(), leftSources, slots); + slotCount = asOfJoinStateManager.buildFromRightSide(rightTable.getIndex(), rightSources, slots, slotCount); - // These contexts and chunks will be closed when the SSA factory itself is closed by the - // destroy function of the BucketedChunkedAjMergedListener + // These contexts and chunks will be closed when the SSA factory itself is closed by the destroy function of the + // BucketedChunkedAjMergedListener final SizedSafeCloseable rightStampFillContext = - new SizedSafeCloseable<>(rightStampSource::makeFillContext); + new SizedSafeCloseable<>(rightStampSource::makeFillContext); final SizedSafeCloseable> sortKernel = - new SizedSafeCloseable<>( - size -> LongSortKernel.makeContext(stampChunkType, order, size, true)); + new SizedSafeCloseable<>(size -> LongSortKernel.makeContext(stampChunkType, order, size, true)); final SizedSafeCloseable leftStampFillContext = - new SizedSafeCloseable<>(leftStampSource::makeFillContext); + new SizedSafeCloseable<>(leftStampSource::makeFillContext); final SizedLongChunk leftStampKeys = new SizedLongChunk<>(); final SizedChunk leftStampValues = new SizedChunk<>(stampChunkType); final SizedChunk rightStampValues = new SizedChunk<>(stampChunkType); @@ -1025,9 +933,8 @@ public SegmentedSortedArray apply(Index rightIndex) { if (slotSize > 0) { rightIndex.fillKeyIndicesChunk(rightStampKeys.ensureCapacity(slotSize)); rightStampSource.fillChunk(rightStampFillContext.ensureCapacity(slotSize), - rightStampValues.ensureCapacity(slotSize), rightIndex); - sortKernel.ensureCapacity(slotSize).sort(rightStampKeys.get(), - rightStampValues.get()); + rightStampValues.ensureCapacity(slotSize), rightIndex); + sortKernel.ensureCapacity(slotSize).sort(rightStampKeys.get(), rightStampValues.get()); ssa.insert(rightStampValues.get(), rightStampKeys.get()); } return ssa; @@ -1037,8 +944,7 @@ public SegmentedSortedArray apply(Index rightIndex) { final SsaFactory leftSsaFactory = new SsaFactory() { @Override public void close() { - SafeCloseable.closeArray(sortKernel, leftStampFillContext, leftStampValues, - leftStampKeys); + SafeCloseable.closeArray(sortKernel, leftStampFillContext, leftStampValues, leftStampKeys); } @Override @@ -1048,12 +954,11 @@ public SegmentedSortedArray apply(Index leftIndex) { if (slotSize > 0) { leftStampSource.fillChunk(leftStampFillContext.ensureCapacity(slotSize), - leftStampValues.ensureCapacity(slotSize), leftIndex); + leftStampValues.ensureCapacity(slotSize), leftIndex); leftIndex.fillKeyIndicesChunk(leftStampKeys.ensureCapacity(slotSize)); - sortKernel.ensureCapacity(slotSize).sort(leftStampKeys.get(), - leftStampValues.get()); + sortKernel.ensureCapacity(slotSize).sort(leftStampKeys.get(), leftStampValues.get()); ssa.insert(leftStampValues.get(), leftStampKeys.get()); } @@ -1062,31 +967,26 @@ public SegmentedSortedArray apply(Index leftIndex) { }; final QueryTable result; - // if we fail to create the table, then we should make sure to close the ssa factories, - // which contain a context. - // if we are successful, then the mergedJoinListener will own them and be responsible for - // closing them - try (final SafeCloseableList closeableList = - new SafeCloseableList(leftSsaFactory, rightSsaFactory)) { + // if we fail to create the table, then we should make sure to close the ssa factories, which contain a context. + // if we are successful, then the mergedJoinListener will own them and be responsible for closing them + try (final SafeCloseableList closeableList = new SafeCloseableList(leftSsaFactory, rightSsaFactory)) { for (int slotIndex = 0; slotIndex < slotCount; ++slotIndex) { final long slot = slots.getLong(slotIndex); - // if either initial state is empty, we would prefer to leave things as an index - // rather than process them into an ssa + // if either initial state is empty, we would prefer to leave things as an index rather than process + // them into an ssa final byte state = asOfJoinStateManager.getState(slot); if ((state - & RightIncrementalChunkedAsOfJoinStateManager.ENTRY_RIGHT_MASK) == RightIncrementalChunkedAsOfJoinStateManager.ENTRY_RIGHT_IS_EMPTY) { + & RightIncrementalChunkedAsOfJoinStateManager.ENTRY_RIGHT_MASK) == RightIncrementalChunkedAsOfJoinStateManager.ENTRY_RIGHT_IS_EMPTY) { continue; } if ((state - & RightIncrementalChunkedAsOfJoinStateManager.ENTRY_LEFT_MASK) == RightIncrementalChunkedAsOfJoinStateManager.ENTRY_LEFT_IS_EMPTY) { + & RightIncrementalChunkedAsOfJoinStateManager.ENTRY_LEFT_MASK) == RightIncrementalChunkedAsOfJoinStateManager.ENTRY_LEFT_IS_EMPTY) { continue; } - final SegmentedSortedArray rightSsa = - asOfJoinStateManager.getRightSsa(slot, rightSsaFactory); - final SegmentedSortedArray leftSsa = - asOfJoinStateManager.getLeftSsa(slot, leftSsaFactory); + final SegmentedSortedArray rightSsa = asOfJoinStateManager.getRightSsa(slot, rightSsaFactory); + final SegmentedSortedArray leftSsa = asOfJoinStateManager.getLeftSsa(slot, leftSsaFactory); ssaSsaStamp.processEntry(leftSsa, rightSsa, redirectionIndex, disallowExactMatch); } @@ -1094,20 +994,20 @@ public SegmentedSortedArray apply(Index leftIndex) { closeableList.clear(); } - final String listenerDescription = makeListenerDescription(columnsToMatch, stampPair, - columnsToAdd, reverse, disallowExactMatch); + final String listenerDescription = + makeListenerDescription(columnsToMatch, stampPair, columnsToAdd, reverse, disallowExactMatch); final JoinListenerRecorder leftRecorder = - new JoinListenerRecorder(true, listenerDescription, leftTable, result); + new JoinListenerRecorder(true, listenerDescription, leftTable, result); final JoinListenerRecorder rightRecorder = - new JoinListenerRecorder(false, listenerDescription, rightTable, result); + new JoinListenerRecorder(false, listenerDescription, rightTable, result); final BucketedChunkedAjMergedListener mergedJoinListener = - new BucketedChunkedAjMergedListener(leftRecorder, rightRecorder, - listenerDescription, result, leftTable, rightTable, columnsToMatch, stampPair, - columnsToAdd, leftSources, - rightSources, leftStampSource, rightStampSource, - leftSsaFactory, rightSsaFactory, order, disallowExactMatch, - ssaSsaStamp, control, asOfJoinStateManager, redirectionIndex); + new BucketedChunkedAjMergedListener(leftRecorder, rightRecorder, + listenerDescription, result, leftTable, rightTable, columnsToMatch, stampPair, columnsToAdd, + leftSources, + rightSources, leftStampSource, rightStampSource, + leftSsaFactory, rightSsaFactory, order, disallowExactMatch, + ssaSsaStamp, control, asOfJoinStateManager, redirectionIndex); leftRecorder.setMergedListener(mergedJoinListener); rightRecorder.setMergedListener(mergedJoinListener); @@ -1123,19 +1023,17 @@ public SegmentedSortedArray apply(Index leftIndex) { return result; } - private static Table zeroKeyAjBothIncremental(JoinControl control, QueryTable leftTable, - QueryTable rightTable, MatchPair[] columnsToAdd, MatchPair stampPair, - ColumnSource leftStampSource, ColumnSource rightStampSource, SortingOrder order, - boolean disallowExactMatch, final RedirectionIndex redirectionIndex) { + private static Table zeroKeyAjBothIncremental(JoinControl control, QueryTable leftTable, QueryTable rightTable, + MatchPair[] columnsToAdd, MatchPair stampPair, ColumnSource leftStampSource, + ColumnSource rightStampSource, SortingOrder order, boolean disallowExactMatch, + final RedirectionIndex redirectionIndex) { final boolean reverse = order == SortingOrder.Descending; final ChunkType stampChunkType = rightStampSource.getChunkType(); final int leftNodeSize = control.leftSsaNodeSize(); final int rightNodeSize = control.rightSsaNodeSize(); - final SegmentedSortedArray leftSsa = - SegmentedSortedArray.make(stampChunkType, reverse, leftNodeSize); - final SegmentedSortedArray rightSsa = - SegmentedSortedArray.make(stampChunkType, reverse, rightNodeSize); + final SegmentedSortedArray leftSsa = SegmentedSortedArray.make(stampChunkType, reverse, leftNodeSize); + final SegmentedSortedArray rightSsa = SegmentedSortedArray.make(stampChunkType, reverse, rightNodeSize); fillSsaWithSort(rightTable, rightStampSource, rightNodeSize, rightSsa, order); fillSsaWithSort(leftTable, leftStampSource, leftNodeSize, leftSsa, order); @@ -1143,22 +1041,20 @@ private static Table zeroKeyAjBothIncremental(JoinControl control, QueryTable le final SsaSsaStamp ssaSsaStamp = SsaSsaStamp.make(stampChunkType, reverse); ssaSsaStamp.processEntry(leftSsa, rightSsa, redirectionIndex, disallowExactMatch); - final QueryTable result = - makeResult(leftTable, rightTable, redirectionIndex, columnsToAdd, true); + final QueryTable result = makeResult(leftTable, rightTable, redirectionIndex, columnsToAdd, true); - final String listenerDescription = - makeListenerDescription(MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, stampPair, columnsToAdd, - reverse, disallowExactMatch); + final String listenerDescription = makeListenerDescription(MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, stampPair, + columnsToAdd, reverse, disallowExactMatch); final JoinListenerRecorder leftRecorder = - new JoinListenerRecorder(true, listenerDescription, leftTable, result); + new JoinListenerRecorder(true, listenerDescription, leftTable, result); final JoinListenerRecorder rightRecorder = - new JoinListenerRecorder(false, listenerDescription, rightTable, result); + new JoinListenerRecorder(false, listenerDescription, rightTable, result); final ZeroKeyChunkedAjMergedListener mergedJoinListener = - new ZeroKeyChunkedAjMergedListener(leftRecorder, rightRecorder, - listenerDescription, result, leftTable, rightTable, stampPair, columnsToAdd, - leftStampSource, rightStampSource, order, disallowExactMatch, - ssaSsaStamp, leftSsa, rightSsa, redirectionIndex, control); + new ZeroKeyChunkedAjMergedListener(leftRecorder, rightRecorder, + listenerDescription, result, leftTable, rightTable, stampPair, columnsToAdd, + leftStampSource, rightStampSource, order, disallowExactMatch, + ssaSsaStamp, leftSsa, rightSsa, redirectionIndex, control); leftRecorder.setMergedListener(mergedJoinListener); rightRecorder.setMergedListener(mergedJoinListener); @@ -1173,12 +1069,11 @@ private static Table zeroKeyAjBothIncremental(JoinControl control, QueryTable le @NotNull private static String makeListenerDescription(MatchPair[] columnsToMatch, MatchPair stampPair, - MatchPair[] columnsToAdd, boolean reverse, boolean disallowExactMatch) { - final String stampString = - disallowExactMatch ? makeDisallowExactStampString(stampPair, reverse) + MatchPair[] columnsToAdd, boolean reverse, boolean disallowExactMatch) { + final String stampString = disallowExactMatch ? makeDisallowExactStampString(stampPair, reverse) : MatchPair.matchString(stampPair); - return (reverse ? "r" : "") + "aj([" + MatchPair.matchString(columnsToMatch) + ", " - + stampString + "], [" + MatchPair.matchString(columnsToAdd) + "])"; + return (reverse ? "r" : "") + "aj([" + MatchPair.matchString(columnsToMatch) + ", " + stampString + "], [" + + MatchPair.matchString(columnsToAdd) + "])"; } @NotNull @@ -1188,16 +1083,14 @@ private static String makeDisallowExactStampString(MatchPair stampPair, boolean } - private static void fillSsaWithSort(QueryTable rightTable, ColumnSource stampSource, - int nodeSize, SegmentedSortedArray ssa, SortingOrder order) { + private static void fillSsaWithSort(QueryTable rightTable, ColumnSource stampSource, int nodeSize, + SegmentedSortedArray ssa, SortingOrder order) { try (final ColumnSource.FillContext context = stampSource.makeFillContext(nodeSize); - final OrderedKeys.Iterator okit = rightTable.getIndex().getOrderedKeysIterator(); - final WritableChunk stampChunk = - stampSource.getChunkType().makeWritableChunk(nodeSize); - final WritableLongChunk keyChunk = - WritableLongChunk.makeWritableChunk(nodeSize); - final LongSortKernel sortKernel = - LongSortKernel.makeContext(stampSource.getChunkType(), order, nodeSize, true)) { + final OrderedKeys.Iterator okit = rightTable.getIndex().getOrderedKeysIterator(); + final WritableChunk stampChunk = stampSource.getChunkType().makeWritableChunk(nodeSize); + final WritableLongChunk keyChunk = WritableLongChunk.makeWritableChunk(nodeSize); + final LongSortKernel sortKernel = + LongSortKernel.makeContext(stampSource.getChunkType(), order, nodeSize, true)) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(nodeSize); stampSource.fillChunk(context, stampChunk, chunkOk); @@ -1210,39 +1103,35 @@ private static void fillSsaWithSort(QueryTable rightTable, ColumnSource stamp } } - private static Table zeroKeyAjRightIncremental(JoinControl control, QueryTable leftTable, - QueryTable rightTable, MatchPair[] columnsToAdd, MatchPair stampPair, - ColumnSource leftStampSource, ColumnSource rightStampSource, SortingOrder order, - boolean disallowExactMatch, final RedirectionIndex redirectionIndex) { + private static Table zeroKeyAjRightIncremental(JoinControl control, QueryTable leftTable, QueryTable rightTable, + MatchPair[] columnsToAdd, MatchPair stampPair, ColumnSource leftStampSource, + ColumnSource rightStampSource, SortingOrder order, boolean disallowExactMatch, + final RedirectionIndex redirectionIndex) { final boolean reverse = order == SortingOrder.Descending; final ChunkType stampChunkType = rightStampSource.getChunkType(); final int rightNodeSize = control.rightSsaNodeSize(); final int rightChunkSize = control.rightChunkSize(); - final SegmentedSortedArray ssa = - SegmentedSortedArray.make(stampChunkType, reverse, rightNodeSize); + final SegmentedSortedArray ssa = SegmentedSortedArray.make(stampChunkType, reverse, rightNodeSize); fillSsaWithSort(rightTable, rightStampSource, rightChunkSize, ssa, order); final int leftSize = leftTable.intSize(); final WritableChunk leftStampValues = stampChunkType.makeWritableChunk(leftSize); - final WritableLongChunk leftStampKeys = - WritableLongChunk.makeWritableChunk(leftSize); + final WritableLongChunk leftStampKeys = WritableLongChunk.makeWritableChunk(leftSize); leftTable.getIndex().fillKeyIndicesChunk(leftStampKeys); try (final ColumnSource.FillContext context = leftStampSource.makeFillContext(leftSize)) { leftStampSource.fillChunk(context, leftStampValues, leftTable.getIndex()); } try (final LongSortKernel sortKernel = - LongSortKernel.makeContext(stampChunkType, order, leftSize, true)) { + LongSortKernel.makeContext(stampChunkType, order, leftSize, true)) { sortKernel.sort(leftStampKeys, leftStampValues); } final ChunkSsaStamp chunkSsaStamp = ChunkSsaStamp.make(stampChunkType, reverse); - try (final WritableLongChunk rightKeysForLeft = - WritableLongChunk.makeWritableChunk(leftSize)) { - chunkSsaStamp.processEntry(leftStampValues, leftStampKeys, ssa, rightKeysForLeft, - disallowExactMatch); + try (final WritableLongChunk rightKeysForLeft = WritableLongChunk.makeWritableChunk(leftSize)) { + chunkSsaStamp.processEntry(leftStampValues, leftStampKeys, ssa, rightKeysForLeft, disallowExactMatch); for (int ii = 0; ii < leftStampKeys.size(); ++ii) { final long index = rightKeysForLeft.get(ii); @@ -1252,232 +1141,207 @@ private static Table zeroKeyAjRightIncremental(JoinControl control, QueryTable l } } - final QueryTable result = - makeResult(leftTable, rightTable, redirectionIndex, columnsToAdd, true); - final ModifiedColumnSet rightStampColumn = - rightTable.newModifiedColumnSet(stampPair.right()); - final ModifiedColumnSet allRightColumns = - result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); + final QueryTable result = makeResult(leftTable, rightTable, redirectionIndex, columnsToAdd, true); + final ModifiedColumnSet rightStampColumn = rightTable.newModifiedColumnSet(stampPair.right()); + final ModifiedColumnSet allRightColumns = result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); final ModifiedColumnSet.Transformer rightTransformer = - rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); + rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); final ChunkEquals stampChunkEquals = ChunkEquals.makeEqual(stampChunkType); final CompactKernel stampCompact = CompactKernel.makeCompact(stampChunkType); - rightTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl( - makeListenerDescription(MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, stampPair, columnsToAdd, - reverse, disallowExactMatch), - rightTable, result) { - @Override - public void onUpdate(Update upstream) { - final Update downstream = new Update(); - downstream.added = Index.FACTORY.getEmptyIndex(); - downstream.removed = Index.FACTORY.getEmptyIndex(); - downstream.shifted = IndexShiftData.EMPTY; - downstream.modifiedColumnSet = result.modifiedColumnSet; - - final boolean stampModified = - upstream.modifiedColumnSet.containsAny(rightStampColumn); - - final Index.RandomBuilder modifiedBuilder = Index.FACTORY.getRandomBuilder(); + rightTable.listenForUpdates( + new BaseTable.ShiftAwareListenerImpl(makeListenerDescription(MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, + stampPair, columnsToAdd, reverse, disallowExactMatch), rightTable, result) { + @Override + public void onUpdate(Update upstream) { + final Update downstream = new Update(); + downstream.added = Index.FACTORY.getEmptyIndex(); + downstream.removed = Index.FACTORY.getEmptyIndex(); + downstream.shifted = IndexShiftData.EMPTY; + downstream.modifiedColumnSet = result.modifiedColumnSet; + + final boolean stampModified = upstream.modifiedColumnSet.containsAny(rightStampColumn); + + final Index.RandomBuilder modifiedBuilder = Index.FACTORY.getRandomBuilder(); + + try (final ColumnSource.FillContext fillContext = + rightStampSource.makeFillContext(rightChunkSize); + final LongSortKernel sortKernel = + LongSortKernel.makeContext(stampChunkType, order, rightChunkSize, true)) { + + final Index restampRemovals; + final Index restampAdditions; + if (stampModified) { + restampAdditions = upstream.added.union(upstream.modified); + restampRemovals = upstream.removed.union(upstream.getModifiedPreShift()); + } else { + restampAdditions = upstream.added; + restampRemovals = upstream.removed; + } - try ( - final ColumnSource.FillContext fillContext = - rightStampSource.makeFillContext(rightChunkSize); - final LongSortKernel sortKernel = - LongSortKernel.makeContext(stampChunkType, order, rightChunkSize, true)) { - - final Index restampRemovals; - final Index restampAdditions; - if (stampModified) { - restampAdditions = upstream.added.union(upstream.modified); - restampRemovals = upstream.removed.union(upstream.getModifiedPreShift()); - } else { - restampAdditions = upstream.added; - restampRemovals = upstream.removed; - } + // When removing a row, record the stamp, redirection key, and prior redirection key. Binary + // search + // in the left for the removed key to find the smallest value geq the removed right. Update + // all rows + // with the removed redirection to the previous key. + try (final OrderedKeys.Iterator removeit = restampRemovals.getOrderedKeysIterator(); + final WritableLongChunk priorRedirections = + WritableLongChunk.makeWritableChunk(rightChunkSize); + final WritableLongChunk rightKeyIndices = + WritableLongChunk.makeWritableChunk(rightChunkSize); + final WritableChunk rightStampChunk = + stampChunkType.makeWritableChunk(rightChunkSize)) { + while (removeit.hasMore()) { + final OrderedKeys chunkOk = removeit.getNextOrderedKeysWithLength(rightChunkSize); + rightStampSource.fillPrevChunk(fillContext, rightStampChunk, chunkOk); + chunkOk.fillKeyIndicesChunk(rightKeyIndices); + + sortKernel.sort(rightKeyIndices, rightStampChunk); + + ssa.removeAndGetPrior(rightStampChunk, rightKeyIndices, priorRedirections); + chunkSsaStamp.processRemovals(leftStampValues, leftStampKeys, rightStampChunk, + rightKeyIndices, priorRedirections, redirectionIndex, modifiedBuilder, + disallowExactMatch); + } + } - // When removing a row, record the stamp, redirection key, and prior redirection - // key. Binary search - // in the left for the removed key to find the smallest value geq the removed - // right. Update all rows - // with the removed redirection to the previous key. - try ( - final OrderedKeys.Iterator removeit = - restampRemovals.getOrderedKeysIterator(); - final WritableLongChunk priorRedirections = - WritableLongChunk.makeWritableChunk(rightChunkSize); - final WritableLongChunk rightKeyIndices = - WritableLongChunk.makeWritableChunk(rightChunkSize); - final WritableChunk rightStampChunk = - stampChunkType.makeWritableChunk(rightChunkSize)) { - while (removeit.hasMore()) { - final OrderedKeys chunkOk = - removeit.getNextOrderedKeysWithLength(rightChunkSize); - rightStampSource.fillPrevChunk(fillContext, rightStampChunk, chunkOk); - chunkOk.fillKeyIndicesChunk(rightKeyIndices); - - sortKernel.sort(rightKeyIndices, rightStampChunk); - - ssa.removeAndGetPrior(rightStampChunk, rightKeyIndices, - priorRedirections); - chunkSsaStamp.processRemovals(leftStampValues, leftStampKeys, - rightStampChunk, rightKeyIndices, priorRedirections, - redirectionIndex, modifiedBuilder, disallowExactMatch); - } - } + if (upstream.shifted.nonempty()) { + rightIncrementalApplySsaShift(upstream.shifted, ssa, sortKernel, fillContext, + restampRemovals, rightTable, rightChunkSize, rightStampSource, chunkSsaStamp, + leftStampValues, leftStampKeys, redirectionIndex, disallowExactMatch); + } - if (upstream.shifted.nonempty()) { - rightIncrementalApplySsaShift(upstream.shifted, ssa, sortKernel, - fillContext, restampRemovals, rightTable, rightChunkSize, - rightStampSource, chunkSsaStamp, leftStampValues, leftStampKeys, - redirectionIndex, disallowExactMatch); - } + // When adding a row to the right hand side: we need to know which left hand side might be + // responsive. If we are a duplicate stamp and not the last one, we ignore it. Next, we + // should binary + // search in the left for the first value >=, everything up until the next extant right + // value should be + // restamped with our value + try (final WritableChunk stampChunk = + stampChunkType.makeWritableChunk(rightChunkSize); + final WritableChunk nextRightValue = + stampChunkType.makeWritableChunk(rightChunkSize); + final WritableLongChunk insertedIndices = + WritableLongChunk.makeWritableChunk(rightChunkSize); + final WritableBooleanChunk retainStamps = + WritableBooleanChunk.makeWritableChunk(rightChunkSize)) { + final int chunks = (restampAdditions.intSize() + control.rightChunkSize() - 1) + / control.rightChunkSize(); + for (int ii = 0; ii < chunks; ++ii) { + final int startChunk = chunks - ii - 1; + try (final Index chunkOk = + restampAdditions.subindexByPos(startChunk * control.rightChunkSize(), + (startChunk + 1) * control.rightChunkSize())) { + rightStampSource.fillChunk(fillContext, stampChunk, chunkOk); + insertedIndices.setSize(chunkOk.intSize()); + chunkOk.fillKeyIndicesChunk(insertedIndices); + + sortKernel.sort(insertedIndices, stampChunk); + + final int valuesWithNext = + ssa.insertAndGetNextValue(stampChunk, insertedIndices, nextRightValue); + + final boolean endsWithLastValue = valuesWithNext != stampChunk.size(); + if (endsWithLastValue) { + Assert.eq(valuesWithNext, "valuesWithNext", stampChunk.size() - 1, + "stampChunk.size() - 1"); + stampChunk.setSize(valuesWithNext); + stampChunkEquals.notEqual(stampChunk, nextRightValue, retainStamps); + stampCompact.compact(nextRightValue, retainStamps); + + retainStamps.setSize(chunkOk.intSize()); + retainStamps.set(valuesWithNext, true); + stampChunk.setSize(chunkOk.intSize()); + } else { + // remove duplicates + stampChunkEquals.notEqual(stampChunk, nextRightValue, retainStamps); + stampCompact.compact(nextRightValue, retainStamps); + } + LongCompactKernel.compact(insertedIndices, retainStamps); + stampCompact.compact(stampChunk, retainStamps); - // When adding a row to the right hand side: we need to know which left hand - // side might be - // responsive. If we are a duplicate stamp and not the last one, we ignore it. - // Next, we should binary - // search in the left for the first value >=, everything up until the next - // extant right value should be - // restamped with our value - try ( - final WritableChunk stampChunk = - stampChunkType.makeWritableChunk(rightChunkSize); - final WritableChunk nextRightValue = - stampChunkType.makeWritableChunk(rightChunkSize); - final WritableLongChunk insertedIndices = - WritableLongChunk.makeWritableChunk(rightChunkSize); - final WritableBooleanChunk retainStamps = - WritableBooleanChunk.makeWritableChunk(rightChunkSize)) { - final int chunks = - (restampAdditions.intSize() + control.rightChunkSize() - 1) - / control.rightChunkSize(); - for (int ii = 0; ii < chunks; ++ii) { - final int startChunk = chunks - ii - 1; - try (final Index chunkOk = restampAdditions.subindexByPos( - startChunk * control.rightChunkSize(), - (startChunk + 1) * control.rightChunkSize())) { - rightStampSource.fillChunk(fillContext, stampChunk, chunkOk); - insertedIndices.setSize(chunkOk.intSize()); - chunkOk.fillKeyIndicesChunk(insertedIndices); - - sortKernel.sort(insertedIndices, stampChunk); - - final int valuesWithNext = ssa.insertAndGetNextValue(stampChunk, - insertedIndices, nextRightValue); - - final boolean endsWithLastValue = - valuesWithNext != stampChunk.size(); - if (endsWithLastValue) { - Assert.eq(valuesWithNext, "valuesWithNext", - stampChunk.size() - 1, "stampChunk.size() - 1"); - stampChunk.setSize(valuesWithNext); - stampChunkEquals.notEqual(stampChunk, nextRightValue, - retainStamps); - stampCompact.compact(nextRightValue, retainStamps); - - retainStamps.setSize(chunkOk.intSize()); - retainStamps.set(valuesWithNext, true); - stampChunk.setSize(chunkOk.intSize()); - } else { - // remove duplicates - stampChunkEquals.notEqual(stampChunk, nextRightValue, - retainStamps); - stampCompact.compact(nextRightValue, retainStamps); + chunkSsaStamp.processInsertion(leftStampValues, leftStampKeys, stampChunk, + insertedIndices, nextRightValue, redirectionIndex, modifiedBuilder, + endsWithLastValue, disallowExactMatch); + } } - LongCompactKernel.compact(insertedIndices, retainStamps); - stampCompact.compact(stampChunk, retainStamps); + } - chunkSsaStamp.processInsertion(leftStampValues, leftStampKeys, - stampChunk, insertedIndices, nextRightValue, redirectionIndex, - modifiedBuilder, endsWithLastValue, disallowExactMatch); + // if the stamp was not modified, then we need to figure out the responsive rows to mark as + // modified + if (!stampModified && upstream.modified.nonempty()) { + try (final OrderedKeys.Iterator modit = upstream.modified.getOrderedKeysIterator(); + final WritableLongChunk rightStampIndices = + WritableLongChunk.makeWritableChunk(rightChunkSize); + final WritableChunk rightStampChunk = + stampChunkType.makeWritableChunk(rightChunkSize)) { + while (modit.hasMore()) { + final OrderedKeys chunkOk = modit.getNextOrderedKeysWithLength(rightChunkSize); + rightStampSource.fillChunk(fillContext, rightStampChunk, chunkOk); + chunkOk.fillKeyIndicesChunk(rightStampIndices); + + sortKernel.sort(rightStampIndices, rightStampChunk); + + chunkSsaStamp.findModified(0, leftStampValues, leftStampKeys, redirectionIndex, + rightStampChunk, rightStampIndices, modifiedBuilder, + disallowExactMatch); + } + } } - } - } - // if the stamp was not modified, then we need to figure out the responsive rows - // to mark as modified - if (!stampModified && upstream.modified.nonempty()) { - try ( - final OrderedKeys.Iterator modit = - upstream.modified.getOrderedKeysIterator(); - final WritableLongChunk rightStampIndices = - WritableLongChunk.makeWritableChunk(rightChunkSize); - final WritableChunk rightStampChunk = - stampChunkType.makeWritableChunk(rightChunkSize)) { - while (modit.hasMore()) { - final OrderedKeys chunkOk = - modit.getNextOrderedKeysWithLength(rightChunkSize); - rightStampSource.fillChunk(fillContext, rightStampChunk, chunkOk); - chunkOk.fillKeyIndicesChunk(rightStampIndices); - - sortKernel.sort(rightStampIndices, rightStampChunk); - - chunkSsaStamp.findModified(0, leftStampValues, leftStampKeys, - redirectionIndex, rightStampChunk, rightStampIndices, - modifiedBuilder, disallowExactMatch); + if (stampModified) { + restampAdditions.close(); + restampRemovals.close(); } } - } - - if (stampModified) { - restampAdditions.close(); - restampRemovals.close(); - } - } - if (stampModified || upstream.added.nonempty() || upstream.removed.nonempty()) { - // If we kept track of whether or not something actually changed, then we could - // skip painting all - // the right columns as modified. It is not clear whether it is worth the - // additional complexity. - downstream.modifiedColumnSet.setAll(allRightColumns); - } else { - rightTransformer.transform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); - } + if (stampModified || upstream.added.nonempty() || upstream.removed.nonempty()) { + // If we kept track of whether or not something actually changed, then we could skip + // painting all + // the right columns as modified. It is not clear whether it is worth the additional + // complexity. + downstream.modifiedColumnSet.setAll(allRightColumns); + } else { + rightTransformer.transform(upstream.modifiedColumnSet, downstream.modifiedColumnSet); + } - downstream.modified = modifiedBuilder.getIndex(); + downstream.modified = modifiedBuilder.getIndex(); - result.notifyListeners(downstream); - } + result.notifyListeners(downstream); + } - @Override - protected void destroy() { - super.destroy(); - leftStampKeys.close(); - leftStampValues.close(); - } - }); + @Override + protected void destroy() { + super.destroy(); + leftStampKeys.close(); + leftStampValues.close(); + } + }); return result; } - private static void rightIncrementalApplySsaShift(IndexShiftData shiftData, - SegmentedSortedArray ssa, - LongSortKernel sortKernel, ChunkSource.FillContext fillContext, - Index restampRemovals, QueryTable table, - int chunkSize, ColumnSource stampSource, ChunkSsaStamp chunkSsaStamp, - WritableChunk leftStampValues, WritableLongChunk leftStampKeys, - RedirectionIndex redirectionIndex, boolean disallowExactMatch) { + private static void rightIncrementalApplySsaShift(IndexShiftData shiftData, SegmentedSortedArray ssa, + LongSortKernel sortKernel, ChunkSource.FillContext fillContext, + Index restampRemovals, QueryTable table, + int chunkSize, ColumnSource stampSource, ChunkSsaStamp chunkSsaStamp, + WritableChunk leftStampValues, WritableLongChunk leftStampKeys, + RedirectionIndex redirectionIndex, boolean disallowExactMatch) { try (final Index fullPrevIndex = table.getIndex().getPrevIndex(); - final Index previousToShift = fullPrevIndex.minus(restampRemovals); - final SizedSafeCloseable shiftFillContext = - new SizedSafeCloseable<>(stampSource::makeFillContext); - final SizedSafeCloseable> shiftSortKernel = - new SizedSafeCloseable<>(sz -> LongSortKernel.makeContext( - stampSource.getChunkType(), - ssa.isReversed() ? SortingOrder.Descending : SortingOrder.Ascending, sz, true)); - final SizedChunk rightStampValues = - new SizedChunk<>(stampSource.getChunkType()); - final SizedLongChunk rightStampKeys = new SizedLongChunk<>()) { + final Index previousToShift = fullPrevIndex.minus(restampRemovals); + final SizedSafeCloseable shiftFillContext = + new SizedSafeCloseable<>(stampSource::makeFillContext); + final SizedSafeCloseable> shiftSortKernel = + new SizedSafeCloseable<>(sz -> LongSortKernel.makeContext(stampSource.getChunkType(), + ssa.isReversed() ? SortingOrder.Descending : SortingOrder.Ascending, sz, true)); + final SizedChunk rightStampValues = new SizedChunk<>(stampSource.getChunkType()); + final SizedLongChunk rightStampKeys = new SizedLongChunk<>()) { final IndexShiftData.Iterator sit = shiftData.applyIterator(); while (sit.hasNext()) { sit.next(); - try (final Index indexToShift = - previousToShift.subindexByKey(sit.beginRange(), sit.endRange())) { + try (final Index indexToShift = previousToShift.subindexByKey(sit.beginRange(), sit.endRange())) { if (indexToShift.empty()) { continue; } @@ -1487,57 +1351,47 @@ private static void rightIncrementalApplySsaShift(IndexShiftData shiftData, indexToShift.fillKeyIndicesChunk(rightStampKeys.ensureCapacity(shiftSize)); if (chunkSize >= shiftSize) { - stampSource.fillPrevChunk(fillContext, - rightStampValues.ensureCapacity(shiftSize), indexToShift); + stampSource.fillPrevChunk(fillContext, rightStampValues.ensureCapacity(shiftSize), + indexToShift); sortKernel.sort(rightStampKeys.get(), rightStampValues.get()); } else { stampSource.fillPrevChunk(shiftFillContext.ensureCapacity(shiftSize), - rightStampValues.ensureCapacity(shiftSize), indexToShift); + rightStampValues.ensureCapacity(shiftSize), indexToShift); shiftSortKernel.ensureCapacity(shiftSize).sort(rightStampKeys.get(), - rightStampValues.get()); + rightStampValues.get()); } - chunkSsaStamp.applyShift(leftStampValues, leftStampKeys, - rightStampValues.get(), rightStampKeys.get(), sit.shiftDelta(), - redirectionIndex, disallowExactMatch); - ssa.applyShiftReverse(rightStampValues.get(), rightStampKeys.get(), - sit.shiftDelta()); + chunkSsaStamp.applyShift(leftStampValues, leftStampKeys, rightStampValues.get(), + rightStampKeys.get(), sit.shiftDelta(), redirectionIndex, disallowExactMatch); + ssa.applyShiftReverse(rightStampValues.get(), rightStampKeys.get(), sit.shiftDelta()); } else { if (indexToShift.size() > chunkSize) { - try (final OrderedKeys.Iterator shiftIt = - indexToShift.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator shiftIt = indexToShift.getOrderedKeysIterator()) { while (shiftIt.hasMore()) { - final OrderedKeys chunkOk = - shiftIt.getNextOrderedKeysWithLength(chunkSize); - stampSource.fillPrevChunk(fillContext, - rightStampValues.ensureCapacity(chunkSize), chunkOk); + final OrderedKeys chunkOk = shiftIt.getNextOrderedKeysWithLength(chunkSize); + stampSource.fillPrevChunk(fillContext, rightStampValues.ensureCapacity(chunkSize), + chunkOk); - chunkOk.fillKeyIndicesChunk( - rightStampKeys.ensureCapacity(chunkSize)); + chunkOk.fillKeyIndicesChunk(rightStampKeys.ensureCapacity(chunkSize)); sortKernel.sort(rightStampKeys.get(), rightStampValues.get()); - ssa.applyShift(rightStampValues.get(), rightStampKeys.get(), - sit.shiftDelta()); - chunkSsaStamp.applyShift(leftStampValues, leftStampKeys, - rightStampValues.get(), rightStampKeys.get(), - sit.shiftDelta(), redirectionIndex, disallowExactMatch); + ssa.applyShift(rightStampValues.get(), rightStampKeys.get(), sit.shiftDelta()); + chunkSsaStamp.applyShift(leftStampValues, leftStampKeys, rightStampValues.get(), + rightStampKeys.get(), sit.shiftDelta(), redirectionIndex, + disallowExactMatch); } } } else { stampSource.fillPrevChunk(fillContext, - rightStampValues.ensureCapacity(indexToShift.intSize()), - indexToShift); - indexToShift.fillKeyIndicesChunk( - rightStampKeys.ensureCapacity(indexToShift.intSize())); + rightStampValues.ensureCapacity(indexToShift.intSize()), indexToShift); + indexToShift.fillKeyIndicesChunk(rightStampKeys.ensureCapacity(indexToShift.intSize())); sortKernel.sort(rightStampKeys.get(), rightStampValues.get()); - ssa.applyShift(rightStampValues.get(), rightStampKeys.get(), - sit.shiftDelta()); - chunkSsaStamp.applyShift(leftStampValues, leftStampKeys, - rightStampValues.get(), rightStampKeys.get(), sit.shiftDelta(), - redirectionIndex, disallowExactMatch); + ssa.applyShift(rightStampValues.get(), rightStampKeys.get(), sit.shiftDelta()); + chunkSsaStamp.applyShift(leftStampValues, leftStampKeys, rightStampValues.get(), + rightStampKeys.get(), sit.shiftDelta(), redirectionIndex, disallowExactMatch); } } } @@ -1545,124 +1399,117 @@ private static void rightIncrementalApplySsaShift(IndexShiftData shiftData, } } - private static Table zeroKeyAjRightStatic(QueryTable leftTable, Table rightTable, - MatchPair[] columnsToAdd, MatchPair stampPair, ColumnSource leftStampSource, - ColumnSource originalRightStampSource, ColumnSource rightStampSource, - SortingOrder order, boolean disallowExactMatch, final RedirectionIndex redirectionIndex) { + private static Table zeroKeyAjRightStatic(QueryTable leftTable, Table rightTable, MatchPair[] columnsToAdd, + MatchPair stampPair, ColumnSource leftStampSource, ColumnSource originalRightStampSource, + ColumnSource rightStampSource, SortingOrder order, boolean disallowExactMatch, + final RedirectionIndex redirectionIndex) { final Index rightIndex = rightTable.getIndex(); - final WritableLongChunk rightStampKeys = - WritableLongChunk.makeWritableChunk(rightIndex.intSize()); + final WritableLongChunk rightStampKeys = WritableLongChunk.makeWritableChunk(rightIndex.intSize()); final WritableChunk rightStampValues = - rightStampSource.getChunkType().makeWritableChunk(rightIndex.intSize()); + rightStampSource.getChunkType().makeWritableChunk(rightIndex.intSize()); - try (final SafeCloseableList chunksToClose = - new SafeCloseableList(rightStampKeys, rightStampValues)) { + try (final SafeCloseableList chunksToClose = new SafeCloseableList(rightStampKeys, rightStampValues)) { final Supplier keyStringSupplier = () -> "[] (zero key columns)"; - try (final AsOfStampContext stampContext = new AsOfStampContext(order, - disallowExactMatch, leftStampSource, rightStampSource, originalRightStampSource)) { + try (final AsOfStampContext stampContext = new AsOfStampContext(order, disallowExactMatch, leftStampSource, + rightStampSource, originalRightStampSource)) { stampContext.getAndCompactStamps(rightIndex, rightStampKeys, rightStampValues); - stampContext.processEntry(leftTable.getIndex(), rightStampValues, rightStampKeys, - redirectionIndex); + stampContext.processEntry(leftTable.getIndex(), rightStampValues, rightStampKeys, redirectionIndex); } - final QueryTable result = makeResult(leftTable, rightTable, redirectionIndex, - columnsToAdd, leftTable.isRefreshing()); + final QueryTable result = + makeResult(leftTable, rightTable, redirectionIndex, columnsToAdd, leftTable.isRefreshing()); if (!leftTable.isRefreshing()) { return result; } - final ModifiedColumnSet leftStampColumn = - leftTable.newModifiedColumnSet(stampPair.left()); + final ModifiedColumnSet leftStampColumn = leftTable.newModifiedColumnSet(stampPair.left()); final ModifiedColumnSet allRightColumns = - result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); + result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); final ModifiedColumnSet.Transformer leftTransformer = - leftTable.newModifiedColumnSetTransformer(result, - leftTable.getDefinition().getColumnNamesArray()); + leftTable.newModifiedColumnSetTransformer(result, leftTable.getDefinition().getColumnNamesArray()); final WritableLongChunk compactedRightStampKeys; final WritableChunk compactedRightStampValues; if (rightStampKeys.size() < rightIndex.size()) { - compactedRightStampKeys = - WritableLongChunk.makeWritableChunk(rightStampKeys.size()); - compactedRightStampValues = - rightStampSource.getChunkType().makeWritableChunk(rightStampKeys.size()); + compactedRightStampKeys = WritableLongChunk.makeWritableChunk(rightStampKeys.size()); + compactedRightStampValues = rightStampSource.getChunkType().makeWritableChunk(rightStampKeys.size()); rightStampKeys.copyToChunk(0, compactedRightStampKeys, 0, rightStampKeys.size()); - rightStampValues.copyToChunk(0, compactedRightStampValues, 0, - rightStampKeys.size()); + rightStampValues.copyToChunk(0, compactedRightStampValues, 0, rightStampKeys.size()); } else { chunksToClose.clear(); compactedRightStampKeys = rightStampKeys; compactedRightStampValues = rightStampValues; } - leftTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl( - makeListenerDescription(MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, stampPair, - columnsToAdd, order == SortingOrder.Descending, disallowExactMatch), - leftTable, result) { - @Override - public void onUpdate(Update upstream) { - final Update downstream = upstream.copy(); - - upstream.removed.forAllLongs(redirectionIndex::removeVoid); - - final boolean stampModified = upstream.modified.nonempty() - && upstream.modifiedColumnSet.containsAny(leftStampColumn); - - final Index restampKeys; - if (stampModified) { - upstream.getModifiedPreShift().forAllLongs(redirectionIndex::removeVoid); - restampKeys = upstream.modified.union(upstream.added); - } else { - restampKeys = upstream.added; - } + leftTable + .listenForUpdates( + new BaseTable.ShiftAwareListenerImpl( + makeListenerDescription(MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, stampPair, + columnsToAdd, order == SortingOrder.Descending, disallowExactMatch), + leftTable, result) { + @Override + public void onUpdate(Update upstream) { + final Update downstream = upstream.copy(); + + upstream.removed.forAllLongs(redirectionIndex::removeVoid); + + final boolean stampModified = upstream.modified.nonempty() + && upstream.modifiedColumnSet.containsAny(leftStampColumn); + + final Index restampKeys; + if (stampModified) { + upstream.getModifiedPreShift().forAllLongs(redirectionIndex::removeVoid); + restampKeys = upstream.modified.union(upstream.added); + } else { + restampKeys = upstream.added; + } - try (final Index prevLeftIndex = leftTable.getIndex().getPrevIndex()) { - redirectionIndex.applyShift(prevLeftIndex, upstream.shifted); - } + try (final Index prevLeftIndex = leftTable.getIndex().getPrevIndex()) { + redirectionIndex.applyShift(prevLeftIndex, upstream.shifted); + } - try (final AsOfStampContext stampContext = - new AsOfStampContext(order, disallowExactMatch, leftStampSource, - rightStampSource, originalRightStampSource)) { - stampContext.processEntry(restampKeys, compactedRightStampValues, - compactedRightStampKeys, redirectionIndex); - } + try (final AsOfStampContext stampContext = + new AsOfStampContext(order, disallowExactMatch, leftStampSource, + rightStampSource, originalRightStampSource)) { + stampContext.processEntry(restampKeys, compactedRightStampValues, + compactedRightStampKeys, redirectionIndex); + } - downstream.modifiedColumnSet = result.modifiedColumnSet; - leftTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); - if (stampModified) { - downstream.modifiedColumnSet.setAll(allRightColumns); - } + downstream.modifiedColumnSet = result.modifiedColumnSet; + leftTransformer.clearAndTransform(upstream.modifiedColumnSet, + downstream.modifiedColumnSet); + if (stampModified) { + downstream.modifiedColumnSet.setAll(allRightColumns); + } - result.notifyListeners(downstream); + result.notifyListeners(downstream); - if (stampModified) { - restampKeys.close(); - } - } + if (stampModified) { + restampKeys.close(); + } + } - @Override - protected void destroy() { - super.destroy(); - compactedRightStampKeys.close(); - compactedRightStampValues.close(); - } - }); + @Override + protected void destroy() { + super.destroy(); + compactedRightStampKeys.close(); + compactedRightStampValues.close(); + } + }); return result; } } - private static QueryTable makeResult(QueryTable leftTable, Table rightTable, - RedirectionIndex redirectionIndex, MatchPair[] columnsToAdd, boolean refreshing) { - final Map columnSources = - new LinkedHashMap<>(leftTable.getColumnSourceMap()); + private static QueryTable makeResult(QueryTable leftTable, Table rightTable, RedirectionIndex redirectionIndex, + MatchPair[] columnsToAdd, boolean refreshing) { + final Map columnSources = new LinkedHashMap<>(leftTable.getColumnSourceMap()); Arrays.stream(columnsToAdd).forEach(mp -> { // noinspection unchecked - final ReadOnlyRedirectedColumnSource rightSource = new ReadOnlyRedirectedColumnSource<>( - redirectionIndex, rightTable.getColumnSource(mp.right())); + final ReadOnlyRedirectedColumnSource rightSource = + new ReadOnlyRedirectedColumnSource<>(redirectionIndex, rightTable.getColumnSource(mp.right())); if (refreshing) { rightSource.startTrackingPrevValues(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/AsOfStampContext.java b/DB/src/main/java/io/deephaven/db/v2/AsOfStampContext.java index de6d4fe621d..3c3a78662cf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/AsOfStampContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/AsOfStampContext.java @@ -42,9 +42,8 @@ class AsOfStampContext implements Context { private final StampKernel stampKernel; - AsOfStampContext(SortingOrder order, boolean disallowExactMatch, - ColumnSource leftStampSource, ColumnSource rightStampSource, - ColumnSource originalRightStampSource) { + AsOfStampContext(SortingOrder order, boolean disallowExactMatch, ColumnSource leftStampSource, + ColumnSource rightStampSource, ColumnSource originalRightStampSource) { this.order = order; this.leftStampSource = leftStampSource; this.rightStampSource = rightStampSource; @@ -53,12 +52,11 @@ class AsOfStampContext implements Context { final ChunkType rightType = rightStampSource.getChunkType(); if (leftType != rightType) { throw new IllegalArgumentException( - "Stamp columns must have the same type, left=" + leftType + ", right=" + rightType); + "Stamp columns must have the same type, left=" + leftType + ", right=" + rightType); } this.stampType = leftType; this.stampKernel = StampKernel.makeStampKernel(stampType, order, disallowExactMatch); - this.rightDupCompact = - DupCompactKernel.makeDupCompact(stampType, order == SortingOrder.Descending); + this.rightDupCompact = DupCompactKernel.makeDupCompact(stampType, order == SortingOrder.Descending); } private void ensureSortCapacity(int length) { @@ -175,15 +173,14 @@ void processEntry(Index leftIndex, Index rightIndex, RedirectionIndex redirectio } /** - * Fill and and compact the values in the right index into the rightKeyIndicesChunk and - * rightStampChunk. + * Fill and and compact the values in the right index into the rightKeyIndicesChunk and rightStampChunk. * * @param rightIndex the indices of the right values to read and compact * @param rightKeyIndicesChunk the output chunk of rightKeyIndices * @param rightStampChunk the output chunk of right stamp values */ void getAndCompactStamps(Index rightIndex, WritableLongChunk rightKeyIndicesChunk, - WritableChunk rightStampChunk) { + WritableChunk rightStampChunk) { ensureRightFillCapacity(rightIndex.intSize()); // read the right stamp column rightKeyIndicesChunk.setSize(rightIndex.intSize()); @@ -203,8 +200,8 @@ void getAndCompactStamps(Index rightIndex, WritableLongChunk rightKe * @param rightKeyIndicesChunk the right key indices (already compacted) * @param redirectionIndex the redirection index to update */ - void processEntry(Index leftIndex, Chunk rightStampChunk, - LongChunk rightKeyIndicesChunk, RedirectionIndex redirectionIndex) { + void processEntry(Index leftIndex, Chunk rightStampChunk, LongChunk rightKeyIndicesChunk, + RedirectionIndex redirectionIndex) { ensureLeftCapacity(leftIndex.intSize()); // read the left stamp column @@ -219,10 +216,9 @@ void processEntry(Index leftIndex, Chunk rightStampChunk, computeRedirections(redirectionIndex, rightStampChunk, rightKeyIndicesChunk); } - private void computeRedirections(RedirectionIndex redirectionIndex, - Chunk rightStampChunk, LongChunk rightKeyIndicesChunk) { - stampKernel.computeRedirections(leftStampChunk, rightStampChunk, rightKeyIndicesChunk, - leftRedirections); + private void computeRedirections(RedirectionIndex redirectionIndex, Chunk rightStampChunk, + LongChunk rightKeyIndicesChunk) { + stampKernel.computeRedirections(leftStampChunk, rightStampChunk, rightKeyIndicesChunk, leftRedirections); for (int ii = 0; ii < leftKeyIndicesChunk.size(); ++ii) { final long rightKey = leftRedirections.get(ii); // the redirection index defaults to NULL_KEY so we do not need to put it in there diff --git a/DB/src/main/java/io/deephaven/db/v2/BaseTable.java b/DB/src/main/java/io/deephaven/db/v2/BaseTable.java index 8010cb5e63c..991c96ef641 100644 --- a/DB/src/main/java/io/deephaven/db/v2/BaseTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/BaseTable.java @@ -59,16 +59,16 @@ * Base abstract class all standard table implementations. */ public abstract class BaseTable extends LivenessArtifact - implements DynamicTable, Serializable, NotificationStepReceiver, NotificationStepSource { + implements DynamicTable, Serializable, NotificationStepReceiver, NotificationStepSource { private static final long serialVersionUID = 1L; private static final boolean VALIDATE_UPDATE_INDICES = - Configuration.getInstance().getBooleanWithDefault("BaseTable.validateUpdateIndices", false); + Configuration.getInstance().getBooleanWithDefault("BaseTable.validateUpdateIndices", false); private static final boolean VALIDATE_UPDATE_OVERLAPS = - Configuration.getInstance().getBooleanWithDefault("BaseTable.validateUpdateOverlaps", true); - public static final boolean PRINT_SERIALIZED_UPDATE_OVERLAPS = Configuration.getInstance() - .getBooleanWithDefault("BaseTable.printSerializedUpdateOverlaps", false); + Configuration.getInstance().getBooleanWithDefault("BaseTable.validateUpdateOverlaps", true); + public static final boolean PRINT_SERIALIZED_UPDATE_OVERLAPS = + Configuration.getInstance().getBooleanWithDefault("BaseTable.printSerializedUpdateOverlaps", false); private static final Logger log = LoggerFactory.getLogger(BaseTable.class); @@ -115,15 +115,12 @@ private void initializeTransientFields() { liveTableMonitorCondition = LiveTableMonitor.DEFAULT.exclusiveLock().newCondition(); parents = new KeyedObjectHashSet<>(IdentityKeyedObjectKey.getInstance()); childListenerReferences = new SimpleReferenceManager<>(WeakSimpleReference::new, true); - directChildListenerReferences = - new SimpleReferenceManager<>(WeakSimpleReference::new, true); - childShiftAwareListenerReferences = - new SimpleReferenceManager<>(WeakSimpleReference::new, true); + directChildListenerReferences = new SimpleReferenceManager<>(WeakSimpleReference::new, true); + childShiftAwareListenerReferences = new SimpleReferenceManager<>(WeakSimpleReference::new, true); lastNotificationStep = LogicalClock.DEFAULT.currentStep(); } - private void readObject(ObjectInputStream objectInputStream) - throws IOException, ClassNotFoundException { + private void readObject(ObjectInputStream objectInputStream) throws IOException, ClassNotFoundException { objectInputStream.defaultReadObject(); initializeTransientFields(); } @@ -187,9 +184,9 @@ public Map getAttributes(Collection excludedAttrs) { return Collections.unmodifiableMap(attributes); } - return Collections.unmodifiableMap( - attributes.entrySet().stream().filter(ent -> !excludedAttrs.contains(ent.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); + return Collections + .unmodifiableMap(attributes.entrySet().stream().filter(ent -> !excludedAttrs.contains(ent.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))); } public enum CopyAttributeOperation { @@ -216,11 +213,11 @@ public enum CopyAttributeOperation { private static final Map> attributeToCopySet; private static final EnumSet LEGACY_COPY_ATTRIBUTES = EnumSet.of( - CopyAttributeOperation.Flatten, - CopyAttributeOperation.Sort, - CopyAttributeOperation.UpdateView, - CopyAttributeOperation.Join, - CopyAttributeOperation.Filter); + CopyAttributeOperation.Flatten, + CopyAttributeOperation.Sort, + CopyAttributeOperation.UpdateView, + CopyAttributeOperation.Join, + CopyAttributeOperation.Filter); static { final HashMap> tempMap = new HashMap<>(); @@ -228,194 +225,191 @@ public enum CopyAttributeOperation { tempMap.put(INPUT_TABLE_ATTRIBUTE, LEGACY_COPY_ATTRIBUTES); tempMap.put(DO_NOT_MAKE_REMOTE_ATTRIBUTE, LEGACY_COPY_ATTRIBUTES); - // byExternal was creating the sub table with a bespoke ACL copy; we should copy ACLs there - // in addition to the legacy attributes - final EnumSet aclCopyAttributes = - EnumSet.copyOf(LEGACY_COPY_ATTRIBUTES); + // byExternal was creating the sub table with a bespoke ACL copy; we should copy ACLs there in addition to the + // legacy attributes + final EnumSet aclCopyAttributes = EnumSet.copyOf(LEGACY_COPY_ATTRIBUTES); aclCopyAttributes.addAll(EnumSet.of( - CopyAttributeOperation.FirstBy, - CopyAttributeOperation.LastBy, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.WouldMatch)); + CopyAttributeOperation.FirstBy, + CopyAttributeOperation.LastBy, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.WouldMatch)); - // for a merged table, we'll allow operations that keep our Index + column sources the same - // to break us down into constituent tables + // for a merged table, we'll allow operations that keep our Index + column sources the same to break us down + // into constituent tables tempMap.put(MERGED_TABLE_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.DropColumns, - CopyAttributeOperation.View)); + CopyAttributeOperation.DropColumns, + CopyAttributeOperation.View)); tempMap.put(EMPTY_SOURCE_TABLE_ATTRIBUTE, EnumSet.complementOf(EnumSet.of( - CopyAttributeOperation.Rollup, - CopyAttributeOperation.Treetable))); + CopyAttributeOperation.Rollup, + CopyAttributeOperation.Treetable))); tempMap.put(SORTABLE_COLUMNS_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Flatten, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Join, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.Coalesce, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.WouldMatch, - CopyAttributeOperation.Preview)); + CopyAttributeOperation.Flatten, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Join, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.Coalesce, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.WouldMatch, + CopyAttributeOperation.Preview)); tempMap.put(FILTERABLE_COLUMNS_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Flatten, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Join, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.Coalesce, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.WouldMatch, - CopyAttributeOperation.Preview)); + CopyAttributeOperation.Flatten, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Join, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.Coalesce, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.WouldMatch, + CopyAttributeOperation.Preview)); tempMap.put(LAYOUT_HINTS_ATTRIBUTE, EnumSet.allOf(CopyAttributeOperation.class)); tempMap.put(TOTALS_TABLE_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Flatten, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.Coalesce)); + CopyAttributeOperation.Flatten, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.Coalesce)); tempMap.put(SYSTEMIC_TABLE_ATTRIBUTE, EnumSet.of(CopyAttributeOperation.None)); - // Column renderers propagate for all operations that will not rename, remove, or change - // column types + // Column renderers propagate for all operations that will not rename, remove, or change column types tempMap.put(COLUMN_RENDERERS_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Flatten, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.Coalesce, - CopyAttributeOperation.LastBy, - CopyAttributeOperation.FirstBy, - CopyAttributeOperation.Treetable, - CopyAttributeOperation.Preview)); + CopyAttributeOperation.Flatten, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.Coalesce, + CopyAttributeOperation.LastBy, + CopyAttributeOperation.FirstBy, + CopyAttributeOperation.Treetable, + CopyAttributeOperation.Preview)); // Tree table attributes tempMap.put(HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.DropColumns, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.Coalesce, - CopyAttributeOperation.UpdateView, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.Flatten, - CopyAttributeOperation.RollupCopy)); + CopyAttributeOperation.DropColumns, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.Coalesce, + CopyAttributeOperation.UpdateView, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.Flatten, + CopyAttributeOperation.RollupCopy)); tempMap.put(ROLLUP_LEAF_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.DropColumns, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.Coalesce, - CopyAttributeOperation.UpdateView, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.Flatten)); + CopyAttributeOperation.DropColumns, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.Coalesce, + CopyAttributeOperation.UpdateView, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.Flatten)); tempMap.put(TREE_TABLE_FILTER_REVERSE_LOOKUP_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.DropColumns, - CopyAttributeOperation.RollupCopy)); + CopyAttributeOperation.DropColumns, + CopyAttributeOperation.RollupCopy)); tempMap.put(HIERARCHICAL_SOURCE_TABLE_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Sort, - CopyAttributeOperation.UpdateView, - CopyAttributeOperation.DropColumns, - CopyAttributeOperation.RollupCopy)); + CopyAttributeOperation.Sort, + CopyAttributeOperation.UpdateView, + CopyAttributeOperation.DropColumns, + CopyAttributeOperation.RollupCopy)); tempMap.put(HIERARCHICAL_SOURCE_INFO_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Sort, - CopyAttributeOperation.DropColumns, - CopyAttributeOperation.UpdateView, - CopyAttributeOperation.RollupCopy)); + CopyAttributeOperation.Sort, + CopyAttributeOperation.DropColumns, + CopyAttributeOperation.UpdateView, + CopyAttributeOperation.RollupCopy)); tempMap.put(REVERSE_LOOKUP_ATTRIBUTE, EnumSet.of(CopyAttributeOperation.RollupCopy)); tempMap.put(PREPARED_RLL_ATTRIBUTE, EnumSet.of(CopyAttributeOperation.Filter)); tempMap.put(COLUMN_DESCRIPTIONS_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Flatten, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.Coalesce, - CopyAttributeOperation.FirstBy, - CopyAttributeOperation.LastBy, - CopyAttributeOperation.Treetable, - CopyAttributeOperation.TreetableCopy, - CopyAttributeOperation.Preview)); + CopyAttributeOperation.Flatten, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.Coalesce, + CopyAttributeOperation.FirstBy, + CopyAttributeOperation.LastBy, + CopyAttributeOperation.Treetable, + CopyAttributeOperation.TreetableCopy, + CopyAttributeOperation.Preview)); tempMap.put(TABLE_DESCRIPTION_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Flatten, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.Coalesce, - CopyAttributeOperation.Treetable, - CopyAttributeOperation.TreetableCopy, - CopyAttributeOperation.Preview)); + CopyAttributeOperation.Flatten, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.Coalesce, + CopyAttributeOperation.Treetable, + CopyAttributeOperation.TreetableCopy, + CopyAttributeOperation.Preview)); tempMap.put(SNAPSHOT_VIEWPORT_TYPE, EnumSet.allOf(CopyAttributeOperation.class)); tempMap.put(ADD_ONLY_TABLE_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.DropColumns, - CopyAttributeOperation.UpdateView, - CopyAttributeOperation.View, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.Coalesce)); + CopyAttributeOperation.DropColumns, + CopyAttributeOperation.UpdateView, + CopyAttributeOperation.View, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.Coalesce)); tempMap.put(PREDEFINED_ROLLUP_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Flatten, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.Coalesce)); + CopyAttributeOperation.Flatten, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.Coalesce)); tempMap.put(PREVIEW_PARENT_TABLE, EnumSet.of(CopyAttributeOperation.Flatten)); // Key column and unique keys attributes - final EnumSet uniqueKeysCopyAttributes = - EnumSet.copyOf(LEGACY_COPY_ATTRIBUTES); + final EnumSet uniqueKeysCopyAttributes = EnumSet.copyOf(LEGACY_COPY_ATTRIBUTES); uniqueKeysCopyAttributes.add(CopyAttributeOperation.Reverse); uniqueKeysCopyAttributes.add(CopyAttributeOperation.WouldMatch); tempMap.put(UNIQUE_KEYS_ATTRIBUTE, uniqueKeysCopyAttributes); tempMap.put(KEY_COLUMNS_ATTRIBUTE, uniqueKeysCopyAttributes); tempMap.put(PLUGIN_NAME, EnumSet.of( - CopyAttributeOperation.Coalesce, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.Flatten, - CopyAttributeOperation.LastBy, - CopyAttributeOperation.FirstBy, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.Preview)); + CopyAttributeOperation.Coalesce, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.Flatten, + CopyAttributeOperation.LastBy, + CopyAttributeOperation.FirstBy, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.Preview)); tempMap.put(SORTED_COLUMNS_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Flatten, - CopyAttributeOperation.Filter, - CopyAttributeOperation.ByExternal)); + CopyAttributeOperation.Flatten, + CopyAttributeOperation.Filter, + CopyAttributeOperation.ByExternal)); tempMap.put(STREAM_TABLE_ATTRIBUTE, EnumSet.of( - CopyAttributeOperation.Coalesce, - CopyAttributeOperation.Filter, - CopyAttributeOperation.Sort, - CopyAttributeOperation.Reverse, - CopyAttributeOperation.Flatten, - CopyAttributeOperation.ByExternal, - CopyAttributeOperation.Preview, - CopyAttributeOperation.View, // and Select, if added - CopyAttributeOperation.UpdateView, // and Update, if added - CopyAttributeOperation.DropColumns, - CopyAttributeOperation.Join, - CopyAttributeOperation.WouldMatch)); + CopyAttributeOperation.Coalesce, + CopyAttributeOperation.Filter, + CopyAttributeOperation.Sort, + CopyAttributeOperation.Reverse, + CopyAttributeOperation.Flatten, + CopyAttributeOperation.ByExternal, + CopyAttributeOperation.Preview, + CopyAttributeOperation.View, // and Select, if added + CopyAttributeOperation.UpdateView, // and Update, if added + CopyAttributeOperation.DropColumns, + CopyAttributeOperation.Join, + CopyAttributeOperation.WouldMatch)); attributeToCopySet = Collections.unmodifiableMap(tempMap); } @@ -425,8 +419,8 @@ static protected boolean shouldCopyAttribute(String attrName, CopyAttributeOpera } /** - * Copy this table's attributes to the specified table. Attributes will be copied based upon the - * input {@link CopyAttributeOperation}. + * Copy this table's attributes to the specified table. Attributes will be copied based upon the input + * {@link CopyAttributeOperation}. * * @param dest The table to copy attributes to * @param copyType The operation being performed that requires attributes to be copied. @@ -436,8 +430,7 @@ public void copyAttributes(Table dest, CopyAttributeOperation copyType) { } /** - * Copy this table's attributes to the specified table. Attributes are copied based on a - * predicate. + * Copy this table's attributes to the specified table. Attributes are copied based on a predicate. * * @param dest The table to copy attributes to * @param shouldCopy should we copy this attribute? @@ -447,8 +440,7 @@ public void copyAttributes(Table dest, Predicate shouldCopy) { } /** - * Copy attributes between tables. Attributes will be copied based upon the input - * {@link CopyAttributeOperation}. + * Copy attributes between tables. Attributes will be copied based upon the input {@link CopyAttributeOperation}. * * @param dest The table to copy attributes to * @param copyType The operation being performed that requires attributes to be copied. @@ -474,8 +466,8 @@ private static void copyAttributes(Table source, Table dest, Predicate s } /** - * Returns true if this table is static, or has an attribute asserting that no modifies, shifts, - * or removals are generated. + * Returns true if this table is static, or has an attribute asserting that no modifies, shifts, or removals are + * generated. * * @return true if this table does not produce modifications, shifts, or removals */ @@ -530,12 +522,11 @@ public boolean satisfied(final long step) { // noinspection SynchronizeOnNonFinalField synchronized (parents) { - // If we have no parents whatsoever then we are a source, and have no dependency chain - // other than the LTM itself + // If we have no parents whatsoever then we are a source, and have no dependency chain other than the LTM + // itself if (parents.isEmpty()) { if (LiveTableMonitor.DEFAULT.satisfied(step)) { - LiveTableMonitor.DEFAULT.logDependencies().append("Root node satisfied ") - .append(this).endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("Root node satisfied ").append(this).endl(); return true; } return false; @@ -544,18 +535,15 @@ public boolean satisfied(final long step) { for (Object parent : parents) { if (parent instanceof NotificationQueue.Dependency) { if (!((NotificationQueue.Dependency) parent).satisfied(step)) { - LiveTableMonitor.DEFAULT.logDependencies() - .append("Parents dependencies not satisfied for ").append(this) - .append(", parent=").append((NotificationQueue.Dependency) parent) - .endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("Parents dependencies not satisfied for ") + .append(this).append(", parent=").append((NotificationQueue.Dependency) parent).endl(); return false; } } } } - LiveTableMonitor.DEFAULT.logDependencies().append("All parents dependencies satisfied ") - .append(this).endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("All parents dependencies satisfied ").append(this).endl(); lastSatisfiedStep = step; @@ -570,8 +558,8 @@ public void awaitUpdate() throws InterruptedException { @Override public boolean awaitUpdate(long timeout) throws InterruptedException { final MutableBoolean result = new MutableBoolean(false); - LiveTableMonitor.DEFAULT.exclusiveLock().doLocked( - () -> result.setValue(liveTableMonitorCondition.await(timeout, TimeUnit.MILLISECONDS))); + LiveTableMonitor.DEFAULT.exclusiveLock() + .doLocked(() -> result.setValue(liveTableMonitorCondition.await(timeout, TimeUnit.MILLISECONDS))); return result.booleanValue(); } @@ -586,8 +574,7 @@ public void listenForUpdates(final Listener listener, final boolean replayInitia } if (replayInitialImage && getIndex().nonempty()) { listener.setInitialImage(getIndex()); - listener.onUpdate(getIndex(), Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex()); + listener.onUpdate(getIndex(), Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex()); } } @@ -628,7 +615,7 @@ public void removeDirectUpdateListener(final Listener listenerToRemove) { @Override public final void notifyListenersOnError(final Throwable e, - @Nullable final UpdatePerformanceTracker.Entry sourceEntry) { + @Nullable final UpdatePerformanceTracker.Entry sourceEntry) { isFailed = true; LiveTableMonitor.DEFAULT.requestSignal(liveTableMonitorCondition); @@ -641,11 +628,10 @@ public final void notifyListenersOnError(final Throwable e, lastNotificationStep = LogicalClock.DEFAULT.currentStep(); childListenerReferences.forEach((listenerRef, listener) -> LiveTableMonitor.DEFAULT - .addNotification(listener.getErrorNotification(e, sourceEntry))); + .addNotification(listener.getErrorNotification(e, sourceEntry))); // Notify ShiftAwareListeners - childShiftAwareListenerReferences - .forEach((listenerRef, listener) -> LiveTableMonitor.DEFAULT + childShiftAwareListenerReferences.forEach((listenerRef, listener) -> LiveTableMonitor.DEFAULT .addNotification(listener.getErrorNotification(e, sourceEntry))); } @@ -666,7 +652,7 @@ public boolean isFailed() { public boolean hasListeners() { return !childListenerReferences.isEmpty() || !directChildListenerReferences.isEmpty() - || !childShiftAwareListenerReferences.isEmpty(); + || !childShiftAwareListenerReferences.isEmpty(); } @Override @@ -682,8 +668,7 @@ public final void notifyListeners(final ShiftAwareListener.Update update) { final boolean hasNoListeners = !hasListeners(); if (hasNoListeners) { final long currentStep = LogicalClock.DEFAULT.currentStep(); - Assert.lt(lastNotificationStep, "lastNotificationStep", currentStep, - "LogicalClock.DEFAULT.currentStep()"); + Assert.lt(lastNotificationStep, "lastNotificationStep", currentStep, "LogicalClock.DEFAULT.currentStep()"); lastNotificationStep = currentStep; update.release(); return; @@ -709,8 +694,8 @@ public final void notifyListeners(final ShiftAwareListener.Update update) { update.removed.validate(); update.modified.validate(); update.shifted.validate(); - Assert.eq(update.modified.empty(), "update.modified.empty()", - update.modifiedColumnSet.empty(), "update.modifiedColumnSet.empty()"); + Assert.eq(update.modified.empty(), "update.modified.empty()", update.modifiedColumnSet.empty(), + "update.modifiedColumnSet.empty()"); } if (VALIDATE_UPDATE_OVERLAPS) { @@ -719,10 +704,10 @@ public final void notifyListeners(final ShiftAwareListener.Update update) { // Expand if we are testing or have children listening using old Listener API. final boolean childNeedsExpansion = - !directChildListenerReferences.isEmpty() || !childListenerReferences.isEmpty(); + !directChildListenerReferences.isEmpty() || !childListenerReferences.isEmpty(); final IndexShiftDataExpander shiftExpander = childNeedsExpansion - ? new IndexShiftDataExpander(update, getIndex()) - : IndexShiftDataExpander.EMPTY; + ? new IndexShiftDataExpander(update, getIndex()) + : IndexShiftDataExpander.EMPTY; if (childNeedsExpansion && VALIDATE_UPDATE_OVERLAPS) { // Check that expansion is valid w.r.t. historical expectations. @@ -731,27 +716,24 @@ public final void notifyListeners(final ShiftAwareListener.Update update) { // tables may only be updated once per cycle final long currentStep = LogicalClock.DEFAULT.currentStep(); - Assert.lt(lastNotificationStep, "lastNotificationStep", currentStep, - "LogicalClock.DEFAULT.currentStep()"); + Assert.lt(lastNotificationStep, "lastNotificationStep", currentStep, "LogicalClock.DEFAULT.currentStep()"); lastNotificationStep = currentStep; // notify direct children - directChildListenerReferences - .forEach((listenerRef, listener) -> listener.onUpdate(shiftExpander.getAdded(), + directChildListenerReferences.forEach((listenerRef, listener) -> listener.onUpdate(shiftExpander.getAdded(), shiftExpander.getRemoved(), shiftExpander.getModified())); // notify non-direct children final NotificationQueue notificationQueue = getNotificationQueue(); childListenerReferences.forEach((listenerRef, listener) -> { final NotificationQueue.IndexUpdateNotification notification = - listener.getNotification(shiftExpander.getAdded(), shiftExpander.getRemoved(), - shiftExpander.getModified()); + listener.getNotification(shiftExpander.getAdded(), shiftExpander.getRemoved(), + shiftExpander.getModified()); notificationQueue.addNotification(notification); }); childShiftAwareListenerReferences.forEach((listenerRef, listener) -> { - final NotificationQueue.IndexUpdateNotification notification = - listener.getNotification(update); + final NotificationQueue.IndexUpdateNotification notification = listener.getNotification(update); notificationQueue.addNotification(notification); }); @@ -781,32 +763,28 @@ private void validateUpdateOverlaps(final ShiftAwareListener.Update update) { } if (!previousMissingRemovals && !currentMissingAdds && !currentMissingModifications && - (!currentContainsRemovals || !update.shifted.empty())) { + (!currentContainsRemovals || !update.shifted.empty())) { return; } - // Excuse the sloppiness in Index closing after this point, we're planning to crash the - // process anyway... + // Excuse the sloppiness in Index closing after this point, we're planning to crash the process anyway... String serializedIndices = null; if (PRINT_SERIALIZED_UPDATE_OVERLAPS) { - // The indices are really rather complicated, if we fail this check let's generate a - // serialized representation - // of them that can later be loaded into a debugger. If this fails, we'll ignore it and - // continue with our + // The indices are really rather complicated, if we fail this check let's generate a serialized + // representation + // of them that can later be loaded into a debugger. If this fails, we'll ignore it and continue with our // regularly scheduled exception. try { final StringBuilder outputBuffer = new StringBuilder(); final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); - final ObjectOutputStream objectOutputStream = - new ObjectOutputStream(byteArrayOutputStream); + final ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream); final BiConsumer append = (name, obj) -> { try { objectOutputStream.writeObject(obj); outputBuffer.append(name); - outputBuffer - .append(Base64.byteArrayToBase64(byteArrayOutputStream.toByteArray())); + outputBuffer.append(Base64.byteArrayToBase64(byteArrayOutputStream.toByteArray())); byteArrayOutputStream.reset(); objectOutputStream.reset(); } catch (final Exception ignored) { @@ -825,34 +803,27 @@ private void validateUpdateOverlaps(final ShiftAwareListener.Update update) { } } - // If we're still here, we know that things are off the rails, and we want to fire the - // assertion + // If we're still here, we know that things are off the rails, and we want to fire the assertion final Index removalsMinusPrevious = update.removed.minus(getIndex().getPrevIndex()); final Index addedMinusCurrent = update.added.minus(getIndex()); final Index removedIntersectCurrent = update.removed.intersect(getIndex()); final Index modifiedMinusCurrent = update.modified.minus(getIndex()); - // Everything is messed up for this table, print out the indices in an easy to understand - // way + // Everything is messed up for this table, print out the indices in an easy to understand way final LogOutput logOutput = new LogOutputStringImpl() - .append("Index update error detected: ") - .append(LogOutput::nl).append("\t previousIndex=") - .append(getIndex().getPrevIndex()) - .append(LogOutput::nl).append("\t currentIndex=").append(getIndex()) - .append(LogOutput::nl).append("\t added=").append(update.added) - .append(LogOutput::nl).append("\t removed=").append(update.removed) - .append(LogOutput::nl).append("\t modified=").append(update.modified) - .append(LogOutput::nl).append("\t shifted=") - .append(update.shifted.toString()) - .append(LogOutput::nl).append("\t removalsMinusPrevious=") - .append(removalsMinusPrevious) - .append(LogOutput::nl).append("\t addedMinusCurrent=").append(addedMinusCurrent) - .append(LogOutput::nl).append("\t modifiedMinusCurrent=") - .append(modifiedMinusCurrent); + .append("Index update error detected: ") + .append(LogOutput::nl).append("\t previousIndex=").append(getIndex().getPrevIndex()) + .append(LogOutput::nl).append("\t currentIndex=").append(getIndex()) + .append(LogOutput::nl).append("\t added=").append(update.added) + .append(LogOutput::nl).append("\t removed=").append(update.removed) + .append(LogOutput::nl).append("\t modified=").append(update.modified) + .append(LogOutput::nl).append("\t shifted=").append(update.shifted.toString()) + .append(LogOutput::nl).append("\t removalsMinusPrevious=").append(removalsMinusPrevious) + .append(LogOutput::nl).append("\t addedMinusCurrent=").append(addedMinusCurrent) + .append(LogOutput::nl).append("\t modifiedMinusCurrent=").append(modifiedMinusCurrent); if (update.shifted.empty()) { - logOutput.append(LogOutput::nl).append("\tremovedIntersectCurrent=") - .append(removedIntersectCurrent); + logOutput.append(LogOutput::nl).append("\tremovedIntersectCurrent=").append(removedIntersectCurrent); } final String indexUpdateErrorMessage = logOutput.toString(); @@ -860,20 +831,18 @@ private void validateUpdateOverlaps(final ShiftAwareListener.Update update) { log.error().append(indexUpdateErrorMessage).endl(); if (serializedIndices != null) { - log.error().append("Index update error detected: serialized data=") - .append(serializedIndices).endl(); + log.error().append("Index update error detected: serialized data=").append(serializedIndices).endl(); } Assert.assertion(false, "!(previousMissingRemovals || currentMissingAdds || " + - "currentMissingModifications || (currentContainsRemovals && shifted.empty()))", - indexUpdateErrorMessage); + "currentMissingModifications || (currentContainsRemovals && shifted.empty()))", + indexUpdateErrorMessage); } /** - * Get the notification queue to insert notifications into as they are generated by listeners - * during {@link #notifyListeners(Index, Index, Index)}. This method may be overridden to - * provide a different notification queue than the {@link LiveTableMonitor#DEFAULT} instance for - * more complex behavior. + * Get the notification queue to insert notifications into as they are generated by listeners during + * {@link #notifyListeners(Index, Index, Index)}. This method may be overridden to provide a different notification + * queue than the {@link LiveTableMonitor#DEFAULT} instance for more complex behavior. * * @return The {@link NotificationQueue} to add to. */ @@ -902,9 +871,9 @@ public void markSystemic() { } /** - * Simplest appropriate legacy InstrumentedListener implementation for BaseTable and - * descendants. It's expected that most use-cases will require overriding onUpdate() - the - * default implementation simply passes index updates through to the dependent's listeners. + * Simplest appropriate legacy InstrumentedListener implementation for BaseTable and descendants. It's expected that + * most use-cases will require overriding onUpdate() - the default implementation simply passes index updates + * through to the dependent's listeners. * * It is preferred to use {@link ShiftAwareListenerImpl} over {@link ListenerImpl} */ @@ -926,14 +895,12 @@ public ListenerImpl(String description, DynamicTable parent, DynamicTable depend @Override public void onUpdate(Index added, Index removed, Index modified) { - dependent.notifyListeners( - new ShiftAwareListener.Update(added.clone(), removed.clone(), modified.clone(), + dependent.notifyListeners(new ShiftAwareListener.Update(added.clone(), removed.clone(), modified.clone(), IndexShiftData.EMPTY, ModifiedColumnSet.ALL)); } @Override - public final void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public final void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { onFailureInternalWithDependent(dependent, originalException, sourceEntry); } @@ -951,9 +918,9 @@ protected void destroy() { } /** - * Simplest appropriate InstrumentedShiftAwareListener implementation for BaseTable and - * descendants. It's expected that most use-cases will require overriding onUpdate() - the - * default implementation simply passes index updates through to the dependent's listeners. + * Simplest appropriate InstrumentedShiftAwareListener implementation for BaseTable and descendants. It's expected + * that most use-cases will require overriding onUpdate() - the default implementation simply passes index updates + * through to the dependent's listeners. */ public static class ShiftAwareListenerImpl extends InstrumentedShiftAwareListener { @@ -962,8 +929,7 @@ public static class ShiftAwareListenerImpl extends InstrumentedShiftAwareListene private final DynamicTable dependent; private final boolean canReuseModifiedColumnSet; - public ShiftAwareListenerImpl(String description, DynamicTable parent, - DynamicTable dependent) { + public ShiftAwareListenerImpl(String description, DynamicTable parent, DynamicTable dependent) { super(description); this.parent = parent; this.dependent = dependent; @@ -974,11 +940,9 @@ public ShiftAwareListenerImpl(String description, DynamicTable parent, if (parent instanceof QueryTable && dependent instanceof QueryTable) { final QueryTable pqt = (QueryTable) parent; final QueryTable dqt = (QueryTable) dependent; - canReuseModifiedColumnSet = - !pqt.modifiedColumnSet.requiresTransformer(dqt.modifiedColumnSet); + canReuseModifiedColumnSet = !pqt.modifiedColumnSet.requiresTransformer(dqt.modifiedColumnSet); } else { - // We cannot reuse the modifiedColumnSet since there are no assumptions that can be - // made w.r.t. parent's + // We cannot reuse the modifiedColumnSet since there are no assumptions that can be made w.r.t. parent's // and dependent's column source mappings. canReuseModifiedColumnSet = false; } @@ -997,8 +961,7 @@ public void onUpdate(final Update upstream) { } @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { onFailureInternalWithDependent(dependent, originalException, sourceEntry); } @@ -1021,13 +984,12 @@ protected DynamicTable getParent() { @Override public Table withKeys(String... columns) { if (columns == null || columns.length == 0) { - throw new IllegalArgumentException( - "withKeys() must be called with at least one key column"); + throw new IllegalArgumentException("withKeys() must be called with at least one key column"); } if (getAttribute(Table.KEY_COLUMNS_ATTRIBUTE) != null) { throw new IllegalStateException( - "This table already has a set of key columns. You must create a new table in order to use different keys"); + "This table already has a set of key columns. You must create a new table in order to use different keys"); } checkAvailableColumns(columns); @@ -1049,8 +1011,7 @@ public Table restrictSortTo(String... allowedSortingColumns) { Assert.neqNull(allowedSortingColumns, "allowedSortingColumns"); checkAvailableColumns(allowedSortingColumns); - setAttribute(Table.SORTABLE_COLUMNS_ATTRIBUTE, - StringUtils.joinStrings(allowedSortingColumns, ",")); + setAttribute(Table.SORTABLE_COLUMNS_ATTRIBUTE, StringUtils.joinStrings(allowedSortingColumns, ",")); return this; } @@ -1069,8 +1030,8 @@ public Table layoutHints(String hints) { private void checkAvailableColumns(String[] columns) { final Map sourceMap = getColumnSourceMap(); - final String[] missingColumns = Arrays.stream(columns) - .filter(col -> !sourceMap.containsKey(col)).toArray(String[]::new); + final String[] missingColumns = + Arrays.stream(columns).filter(col -> !sourceMap.containsKey(col)).toArray(String[]::new); if (missingColumns.length > 0) { throw new NoSuchColumnException(sourceMap.keySet(), Arrays.asList(missingColumns)); @@ -1084,7 +1045,7 @@ void maybeUpdateSortableColumns(Table destination) { } destination.restrictSortTo(Arrays.stream(currentSortableColumns.split(",")) - .filter(destination.getColumnSourceMap()::containsKey).toArray(String[]::new)); + .filter(destination.getColumnSourceMap()::containsKey).toArray(String[]::new)); } void maybeUpdateSortableColumns(Table destination, MatchPair[] renamedColumns) { @@ -1096,8 +1057,7 @@ void maybeUpdateSortableColumns(Table destination, MatchPair[] renamedColumns) { final BiMap columnMapping = HashBiMap.create(); // Create a bi-directional map of New -> Old column name so we can see if - // a) A column that was sortable in the old table has been renamed & we should make the new - // column sortable + // a) A column that was sortable in the old table has been renamed & we should make the new column sortable // b) The original column exists, and has not been replaced by another. For example // T1 = [ Col1, Col2, Col3 ]; T1.renameColumns(Col1=Col3, Col2]; if (renamedColumns != null) { @@ -1109,17 +1069,14 @@ void maybeUpdateSortableColumns(Table destination, MatchPair[] renamedColumns) { final Set sortableColumns = new HashSet<>(); - // Process the original set of sortable columns, adding them to the new set if one of the - // below - // 1) The column exists in the new table and was not renamed in any way but the Identity (C1 - // = C1) + // Process the original set of sortable columns, adding them to the new set if one of the below + // 1) The column exists in the new table and was not renamed in any way but the Identity (C1 = C1) // 2) The column does not exist in the new table, but was renamed to another (C2 = C1) final Map sourceMap = destination.getColumnSourceMap(); for (String col : currentSortableColumns.split(",")) { // Only add it to the set of sortable columns if it hasn't changed in an unknown way final String maybeRenamedColumn = columnMapping.get(col); - if (sourceMap.get(col) != null - && (maybeRenamedColumn == null || maybeRenamedColumn.equals(col))) { + if (sourceMap.get(col) != null && (maybeRenamedColumn == null || maybeRenamedColumn.equals(col))) { sortableColumns.add(col); } else { final String newName = columnMapping.inverse().get(col); @@ -1130,8 +1087,7 @@ void maybeUpdateSortableColumns(Table destination, MatchPair[] renamedColumns) { } // Apply the new mapping to the result table. - destination - .restrictSortTo(sortableColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + destination.restrictSortTo(sortableColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); } void maybeUpdateSortableColumns(Table destination, SelectColumn[] selectCols) { @@ -1140,8 +1096,7 @@ void maybeUpdateSortableColumns(Table destination, SelectColumn[] selectCols) { return; } - final Set currentSortableSet = - CollectionUtil.setFromArray(currentSortableColumns.split(",")); + final Set currentSortableSet = CollectionUtil.setFromArray(currentSortableColumns.split(",")); final Set newSortableSet = new HashSet<>(); for (SelectColumn sc : selectCols) { @@ -1149,8 +1104,7 @@ void maybeUpdateSortableColumns(Table destination, SelectColumn[] selectCols) { if (sc instanceof SourceColumn) { realColumn = (SourceColumn) sc; - } else if (sc instanceof SwitchColumn - && ((SwitchColumn) sc).getRealColumn() instanceof SourceColumn) { + } else if (sc instanceof SwitchColumn && ((SwitchColumn) sc).getRealColumn() instanceof SourceColumn) { realColumn = (SourceColumn) ((SwitchColumn) sc).getRealColumn(); } else { newSortableSet.remove(sc.getName()); @@ -1196,47 +1150,43 @@ void assertSortable(String... columns) throws NotSortableException { sortableColSet = Arrays.asList(sortable.split(",")); } - // TODO: This is hacky. DbSortedFilteredTableModel will update the table with __ABS__ - // prefixed columns + // TODO: This is hacky. DbSortedFilteredTableModel will update the table with __ABS__ prefixed columns // TODO: when the user requests to sort absolute. final Set unsortable = Arrays.stream(columns) - .map(cn -> cn.startsWith("__ABS__") ? cn.replace("__ABS__", "") : cn) - .collect(Collectors.toSet()); + .map(cn -> cn.startsWith("__ABS__") ? cn.replace("__ABS__", "") : cn).collect(Collectors.toSet()); unsortable.removeAll(sortableColSet); if (unsortable.isEmpty()) { return; } - // If this is null, we never should have gotten to this point because _all_ columns are - // sortable. + // If this is null, we never should have gotten to this point because _all_ columns are sortable. Assert.neqNull(sortable, "sortable"); throw new NotSortableException(unsortable, sortableColSet); } /** - * Copy all valid column-descriptions from this table's attributes to the destination table's - * attributes + * Copy all valid column-descriptions from this table's attributes to the destination table's attributes * * @param destination the table which shall possibly have a column-description attribute created */ void maybeCopyColumnDescriptions(final Table destination) { final Map sourceDescriptions = - (Map) getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); + (Map) getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); maybeCopyColumnDescriptions(destination, sourceDescriptions); } /** - * Copy all valid column-descriptions from this table's attributes to the destination table's - * attributes after a `renameColumns()` operation + * Copy all valid column-descriptions from this table's attributes to the destination table's attributes after a + * `renameColumns()` operation * * @param destination the table which shall possibly have a column-description attribute created * @param renamedColumns an array of the columns which have been renamed */ void maybeCopyColumnDescriptions(final Table destination, final MatchPair[] renamedColumns) { final Map oldDescriptions = - (Map) getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); + (Map) getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); if (oldDescriptions == null || oldDescriptions.isEmpty()) { return; // short-circuit; there are no column-descriptions in this operation @@ -1256,17 +1206,16 @@ void maybeCopyColumnDescriptions(final Table destination, final MatchPair[] rena } /** - * Copy all valid column-descriptions from this table's attributes to the destination table's - * attributes after an `update()` operation. Any column which is possibly being updated as part - * of this operation will have their description invalidated + * Copy all valid column-descriptions from this table's attributes to the destination table's attributes after an + * `update()` operation. Any column which is possibly being updated as part of this operation will have their + * description invalidated * * @param destination the table which shall possibly have a column-description attribute created - * @param selectColumns columns which may be changed during this operation, and have their - * descriptions invalidated + * @param selectColumns columns which may be changed during this operation, and have their descriptions invalidated */ void maybeCopyColumnDescriptions(final Table destination, final SelectColumn[] selectColumns) { final Map oldDescriptions = - (Map) getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); + (Map) getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); if (oldDescriptions == null || oldDescriptions.isEmpty()) { return; // short-circuit; there are no column-descriptions in this operation @@ -1283,43 +1232,42 @@ void maybeCopyColumnDescriptions(final Table destination, final SelectColumn[] s } /** - * Copy all valid column-descriptions from this table's attributes to the destination table's - * attributes after a `join()` operation. The left-table descriptions will be left as-is, and - * the added columns from the right-table will be added to the destination-table. Joining - * column-descriptions will come from the right-table IFF there is no description for the column - * on the left-table + * Copy all valid column-descriptions from this table's attributes to the destination table's attributes after a + * `join()` operation. The left-table descriptions will be left as-is, and the added columns from the right-table + * will be added to the destination-table. Joining column-descriptions will come from the right-table IFF there is + * no description for the column on the left-table * * @param destination the table which shall possibly have a column-description attribute created * @param rightTable the right-side table, from where column-descriptions may be copied * @param joinedColumns the columns on which this table is being joined * @param addColumns the right-table's columns which are being added by the join operation */ - void maybeCopyColumnDescriptions(final Table destination, final Table rightTable, - final MatchPair[] joinedColumns, final MatchPair[] addColumns) { + void maybeCopyColumnDescriptions(final Table destination, final Table rightTable, final MatchPair[] joinedColumns, + final MatchPair[] addColumns) { final Map leftDescriptions = - (Map) getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); + (Map) getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); final Map rightDescriptions = - (Map) rightTable.getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); + (Map) rightTable.getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); if ((leftDescriptions == null || leftDescriptions.isEmpty()) - && (rightDescriptions == null || rightDescriptions.isEmpty())) { + && (rightDescriptions == null || rightDescriptions.isEmpty())) { return; // short-circuit; there are no column-descriptions in this operation } // start with the left-table descriptions, if any final Map sourceDescriptions = - leftDescriptions == null ? new HashMap<>() : new HashMap<>(leftDescriptions); + leftDescriptions == null ? new HashMap<>() : new HashMap<>(leftDescriptions); // and join the right-table descriptions, if any if (rightDescriptions != null && !rightDescriptions.isEmpty()) { Stream.concat(joinedColumns == null ? Stream.empty() : Arrays.stream(joinedColumns), - addColumns == null ? Stream.empty() : Arrays.stream(addColumns)) - .forEach(mp -> { - final String desc = rightDescriptions.get(mp.right()); - if (desc != null) { - sourceDescriptions.putIfAbsent(mp.left(), desc); - } - }); + addColumns == null ? Stream.empty() : Arrays.stream(addColumns)) + .forEach(mp -> { + final String desc = rightDescriptions.get(mp.right()); + if (desc != null) { + sourceDescriptions.putIfAbsent(mp.left(), desc); + } + }); } maybeCopyColumnDescriptions(destination, sourceDescriptions); @@ -1332,7 +1280,7 @@ void maybeCopyColumnDescriptions(final Table destination, final Table rightTable * @param sourceDescriptions column name->description mapping */ private static void maybeCopyColumnDescriptions(final Table destination, - final Map sourceDescriptions) { + final Map sourceDescriptions) { if (sourceDescriptions == null || sourceDescriptions.isEmpty()) { return; // short-circuit; there are no column-descriptions in this operation } @@ -1359,16 +1307,14 @@ public Table copy() { return QueryPerformanceRecorder.withNugget("copy()", sizeForInstrumentation(), () -> { final Mutable

    result = new MutableObject<>(); - final ShiftAwareSwapListener swapListener = - createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + final ShiftAwareSwapListener swapListener = createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); initializeWithSnapshot("copy", swapListener, (usePrev, beforeClockValue) -> { final QueryTable resultTable = (QueryTable) getSubTable(getIndex()); propagateFlatness(resultTable); copyAttributes(resultTable, a -> true); if (swapListener != null) { - final ShiftAwareListenerImpl listener = - new ShiftAwareListenerImpl("copy()", this, resultTable); + final ShiftAwareListenerImpl listener = new ShiftAwareListenerImpl("copy()", this, resultTable); swapListener.setListenerAndResult(listener, resultTable); resultTable.addParentReference(swapListener); } @@ -1396,18 +1342,18 @@ public Table setColumnRenderers(ColumnRenderersBuilder builder) { } final Set existingColumns = getDefinition().getColumnNames() - .stream() - .filter(column -> !ColumnFormattingValues.isFormattingColumn(column)) - .collect(Collectors.toSet()); + .stream() + .filter(column -> !ColumnFormattingValues.isFormattingColumn(column)) + .collect(Collectors.toSet()); final String[] unknownColumns = builder.getColumnSet() - .stream() - .filter(column -> !existingColumns.contains(column)) - .toArray(String[]::new); + .stream() + .filter(column -> !existingColumns.contains(column)) + .toArray(String[]::new); if (unknownColumns.length > 0) { - throw new RuntimeException("Unknown columns: " + Arrays.toString(unknownColumns) - + ", available columns = " + existingColumns); + throw new RuntimeException( + "Unknown columns: " + Arrays.toString(unknownColumns) + ", available columns = " + existingColumns); } final Table result = copy(); @@ -1416,14 +1362,13 @@ public Table setColumnRenderers(ColumnRenderersBuilder builder) { } public void initializeWithSnapshot( - String logPrefix, SL swapListener, ConstructSnapshot.SnapshotFunction snapshotFunction) { + String logPrefix, SL swapListener, ConstructSnapshot.SnapshotFunction snapshotFunction) { if (swapListener == null) { Assert.eqFalse(isRefreshing(), "isRefreshing"); snapshotFunction.call(false, LogicalClock.DEFAULT.currentValue()); return; } - ConstructSnapshot.callDataSnapshotFunction(logPrefix, swapListener.makeSnapshotControl(), - snapshotFunction); + ConstructSnapshot.callDataSnapshotFunction(logPrefix, swapListener.makeSnapshotControl(), snapshotFunction); } public interface SwapListenerFactory { @@ -1438,8 +1383,7 @@ public interface SwapListenerFactory { * @return a swap listener for this table (or null) */ @Nullable - public T createSwapListenerIfRefreshing( - final SwapListenerFactory factory) { + public T createSwapListenerIfRefreshing(final SwapListenerFactory factory) { if (!isRefreshing()) { return null; } @@ -1455,8 +1399,8 @@ public T createSwapListenerIfRefreshing( *

    * *

    - * This function is for use when the result table shares an Index; such that if this table is - * flat, the result table must also be flat. + * This function is for use when the result table shares an Index; such that if this table is flat, the result table + * must also be flat. *

    * * @param result the table derived from this table @@ -1474,10 +1418,8 @@ void propagateFlatness(QueryTable result) { @Override protected void destroy() { super.destroy(); - // NB: We should not assert things about empty listener lists, here, given that listener - // cleanup might never - // happen or happen out of order if the listeners were GC'd and not explicitly left - // unmanaged. + // NB: We should not assert things about empty listener lists, here, given that listener cleanup might never + // happen or happen out of order if the listeners were GC'd and not explicitly left unmanaged. childListenerReferences.clear(); directChildListenerReferences.clear(); parents.clear(); @@ -1493,22 +1435,20 @@ public Table withTableDescription(String description) { @Override public Table withColumnDescription(Map descriptions) { if (!hasColumns(descriptions.keySet())) { - final Map existingColumns = - getDefinition().getColumnNameMap(); + final Map existingColumns = getDefinition().getColumnNameMap(); throw new IllegalArgumentException( - "Cannot set column descriptions. The table does not contain the following columns [ " - + - descriptions.keySet().stream() - .filter(col -> !existingColumns.containsKey(col)) - .collect(Collectors.joining(", ")) - + " ]"); + "Cannot set column descriptions. The table does not contain the following columns [ " + + descriptions.keySet().stream() + .filter(col -> !existingColumns.containsKey(col)) + .collect(Collectors.joining(", ")) + + " ]"); } final Table result = copy(); // noinspection unchecked Map existingDescriptions = - (Map) result.getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); + (Map) result.getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); if (existingDescriptions == null) { existingDescriptions = new HashMap<>(); result.setAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE, existingDescriptions); diff --git a/DB/src/main/java/io/deephaven/db/v2/BucketingContext.java b/DB/src/main/java/io/deephaven/db/v2/BucketingContext.java index 63ccf908803..12c950f7b9c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/BucketingContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/BucketingContext.java @@ -37,31 +37,27 @@ class BucketingContext implements SafeCloseable { long maximumUniqueValue = Integer.MAX_VALUE; long minimumUniqueValue = Integer.MIN_VALUE; - BucketingContext(final String listenerPrefix, final QueryTable leftTable, - final QueryTable rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - JoinControl control) { + BucketingContext(final String listenerPrefix, final QueryTable leftTable, final QueryTable rightTable, + MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, JoinControl control) { final List conflicts = Arrays.stream(columnsToAdd).map(MatchPair::left) - .filter(cn -> leftTable.getColumnSourceMap().containsKey(cn)) - .collect(Collectors.toList()); + .filter(cn -> leftTable.getColumnSourceMap().containsKey(cn)).collect(Collectors.toList()); if (!conflicts.isEmpty()) { throw new RuntimeException("Conflicting column names " + conflicts); } - listenerDescription = listenerPrefix + "(" + matchString(columnsToMatch) + ", " - + matchString(columnsToAdd) + ")"; + listenerDescription = + listenerPrefix + "(" + matchString(columnsToMatch) + ", " + matchString(columnsToAdd) + ")"; - leftSources = Arrays.stream(columnsToMatch) - .map(mp -> leftTable.getColumnSource(mp.leftColumn)).toArray(ColumnSource[]::new); - rightSources = Arrays.stream(columnsToMatch) - .map(mp -> rightTable.getColumnSource(mp.rightColumn)).toArray(ColumnSource[]::new); + leftSources = Arrays.stream(columnsToMatch).map(mp -> leftTable.getColumnSource(mp.leftColumn)) + .toArray(ColumnSource[]::new); + rightSources = Arrays.stream(columnsToMatch).map(mp -> rightTable.getColumnSource(mp.rightColumn)) + .toArray(ColumnSource[]::new); originalLeftSources = Arrays.copyOf(leftSources, leftSources.length); keyColumnCount = leftSources.length; useLeftGrouping = control.useGrouping(leftTable, leftSources); - // note that the naturalJoin operation ignores this field, because there is never any point - // to reading or - // processing grouping information when we have a single row on the right side. Cross join - // just doesn't support + // note that the naturalJoin operation ignores this field, because there is never any point to reading or + // processing grouping information when we have a single row on the right side. Cross join just doesn't support // grouping at all (yuck). useRightGrouping = control.useGrouping(rightTable, rightSources); @@ -69,8 +65,8 @@ class BucketingContext implements SafeCloseable { final Class leftType = TypeUtils.getUnboxedTypeIfBoxed(leftSources[ii].getType()); final Class rightType = TypeUtils.getUnboxedTypeIfBoxed(rightSources[ii].getType()); if (leftType != rightType) { - throw new IllegalArgumentException("Mismatched join types, " + columnsToMatch[ii] - + ": " + leftType + " != " + rightType); + throw new IllegalArgumentException( + "Mismatched join types, " + columnsToMatch[ii] + ": " + leftType + " != " + rightType); } if (leftType == DBDateTime.class) { @@ -84,59 +80,50 @@ class BucketingContext implements SafeCloseable { maximumUniqueValue = BooleanUtils.TRUE_BOOLEAN_AS_BYTE; minimumUniqueValue = BooleanUtils.NULL_BOOLEAN_AS_BYTE; uniqueFunctor = ToIntegerCast.makeToIntegerCast(ChunkType.Byte, - StaticNaturalJoinStateManager.CHUNK_SIZE, - -BooleanUtils.NULL_BOOLEAN_AS_BYTE); + StaticNaturalJoinStateManager.CHUNK_SIZE, -BooleanUtils.NULL_BOOLEAN_AS_BYTE); } } else if (leftType == String.class) { - if (control.considerSymbolTables(leftTable, rightTable, useLeftGrouping, - useRightGrouping, leftSources[ii], rightSources[ii])) { - final SymbolTableSource leftSymbolTableSource = - (SymbolTableSource) leftSources[ii]; - final SymbolTableSource rightSymbolTableSource = - (SymbolTableSource) rightSources[ii]; - - final Table leftSymbolTable = leftSymbolTableSource.getStaticSymbolTable( - leftTable.getIndex(), control.useSymbolTableLookupCaching()); - final Table rightSymbolTable = rightSymbolTableSource.getStaticSymbolTable( - rightTable.getIndex(), control.useSymbolTableLookupCaching()); - - if (control.useSymbolTables(leftTable.size(), leftSymbolTable.size(), - rightTable.size(), rightSymbolTable.size())) { - final SymbolTableCombiner symbolTableCombiner = new SymbolTableCombiner( - new ColumnSource[] {leftSources[ii]}, SymbolTableCombiner.hashTableSize( - Math.max(leftSymbolTable.size(), rightSymbolTable.size()))); - - final IntegerSparseArraySource leftSymbolMapper = - new IntegerSparseArraySource(); - final IntegerSparseArraySource rightSymbolMapper = - new IntegerSparseArraySource(); + if (control.considerSymbolTables(leftTable, rightTable, useLeftGrouping, useRightGrouping, + leftSources[ii], rightSources[ii])) { + final SymbolTableSource leftSymbolTableSource = (SymbolTableSource) leftSources[ii]; + final SymbolTableSource rightSymbolTableSource = (SymbolTableSource) rightSources[ii]; + + final Table leftSymbolTable = leftSymbolTableSource.getStaticSymbolTable(leftTable.getIndex(), + control.useSymbolTableLookupCaching()); + final Table rightSymbolTable = rightSymbolTableSource.getStaticSymbolTable(rightTable.getIndex(), + control.useSymbolTableLookupCaching()); + + if (control.useSymbolTables(leftTable.size(), leftSymbolTable.size(), rightTable.size(), + rightSymbolTable.size())) { + final SymbolTableCombiner symbolTableCombiner = + new SymbolTableCombiner(new ColumnSource[] {leftSources[ii]}, SymbolTableCombiner + .hashTableSize(Math.max(leftSymbolTable.size(), rightSymbolTable.size()))); + + final IntegerSparseArraySource leftSymbolMapper = new IntegerSparseArraySource(); + final IntegerSparseArraySource rightSymbolMapper = new IntegerSparseArraySource(); if (leftSymbolTable.size() < rightSymbolTable.size()) { symbolTableCombiner.addSymbols(leftSymbolTable, leftSymbolMapper); - symbolTableCombiner.lookupSymbols(rightSymbolTable, rightSymbolMapper, - Integer.MAX_VALUE); + symbolTableCombiner.lookupSymbols(rightSymbolTable, rightSymbolMapper, Integer.MAX_VALUE); } else { symbolTableCombiner.addSymbols(rightSymbolTable, rightSymbolMapper); - symbolTableCombiner.lookupSymbols(leftSymbolTable, leftSymbolMapper, - Integer.MAX_VALUE); + symbolTableCombiner.lookupSymbols(leftSymbolTable, leftSymbolMapper, Integer.MAX_VALUE); } - final ColumnSource leftSourceAsLong = - leftSources[ii].reinterpret(long.class); - final ColumnSource rightSourceAsLong = - rightSources[ii].reinterpret(long.class); + final ColumnSource leftSourceAsLong = leftSources[ii].reinterpret(long.class); + final ColumnSource rightSourceAsLong = rightSources[ii].reinterpret(long.class); - leftSources[ii] = new NaturalJoinHelper.SymbolTableToUniqueIdSource( - leftSourceAsLong, leftSymbolMapper); - rightSources[ii] = new NaturalJoinHelper.SymbolTableToUniqueIdSource( - rightSourceAsLong, rightSymbolMapper); + leftSources[ii] = + new NaturalJoinHelper.SymbolTableToUniqueIdSource(leftSourceAsLong, leftSymbolMapper); + rightSources[ii] = + new NaturalJoinHelper.SymbolTableToUniqueIdSource(rightSourceAsLong, rightSymbolMapper); if (leftSources.length == 1) { uniqueValues = true; maximumUniqueValue = symbolTableCombiner.getMaximumIdentifier(); minimumUniqueValue = 0; uniqueFunctor = ToIntegerCast.makeToIntegerCast(ChunkType.Int, - StaticNaturalJoinStateManager.CHUNK_SIZE, 0); + StaticNaturalJoinStateManager.CHUNK_SIZE, 0); } } } @@ -146,7 +133,7 @@ class BucketingContext implements SafeCloseable { maximumUniqueValue = Byte.MAX_VALUE; minimumUniqueValue = Byte.MIN_VALUE; uniqueFunctor = ToIntegerCast.makeToIntegerCast(ChunkType.Byte, - StaticNaturalJoinStateManager.CHUNK_SIZE, -Byte.MIN_VALUE); + StaticNaturalJoinStateManager.CHUNK_SIZE, -Byte.MIN_VALUE); } } else if (leftType == char.class) { if (leftSources.length == 1) { @@ -154,7 +141,7 @@ class BucketingContext implements SafeCloseable { maximumUniqueValue = Character.MAX_VALUE; minimumUniqueValue = Character.MIN_VALUE; uniqueFunctor = ToIntegerCast.makeToIntegerCast(ChunkType.Char, - StaticNaturalJoinStateManager.CHUNK_SIZE, -Character.MIN_VALUE); + StaticNaturalJoinStateManager.CHUNK_SIZE, -Character.MIN_VALUE); } } else if (leftType == short.class) { if (leftSources.length == 1) { @@ -162,7 +149,7 @@ class BucketingContext implements SafeCloseable { maximumUniqueValue = Short.MAX_VALUE; minimumUniqueValue = Short.MIN_VALUE; uniqueFunctor = ToIntegerCast.makeToIntegerCast(ChunkType.Short, - StaticNaturalJoinStateManager.CHUNK_SIZE, -Short.MIN_VALUE); + StaticNaturalJoinStateManager.CHUNK_SIZE, -Short.MIN_VALUE); } } } @@ -176,7 +163,6 @@ public void close() { } int uniqueValuesRange() { - return LongSizedDataStructure.intSize("int cast", - maximumUniqueValue - minimumUniqueValue + 1); + return LongSizedDataStructure.intSize("int cast", maximumUniqueValue - minimumUniqueValue + 1); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/ColumnComparatorFactory.java b/DB/src/main/java/io/deephaven/db/v2/ColumnComparatorFactory.java index 3411e60b675..f30e455b319 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ColumnComparatorFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/ColumnComparatorFactory.java @@ -16,24 +16,21 @@ public class ColumnComparatorFactory { /** - * Produce an {@link IComparator} specialized for a given left and right {@link ColumnSource}. - * To do this we look at the underlying types of the column sources (actually we require those - * underlying types to be the same) and we use this information to call the specific primitive - * type getter method (whether {@link ColumnSource#getDouble}, {@link ColumnSource#getLong} and - * so on). This approach allows us to avoid boxing on these calls. We use a similar approach in - * order to test the null-ness of a given element. + * Produce an {@link IComparator} specialized for a given left and right {@link ColumnSource}. To do this we look at + * the underlying types of the column sources (actually we require those underlying types to be the same) and we use + * this information to call the specific primitive type getter method (whether {@link ColumnSource#getDouble}, + * {@link ColumnSource#getLong} and so on). This approach allows us to avoid boxing on these calls. We use a similar + * approach in order to test the null-ness of a given element. * - * This method is not especially efficient, but (because we are typically not being called from - * an inner loop), that fact is probably not relevant for performance. The point is that the - * returned IComparator *is* rather efficient. + * This method is not especially efficient, but (because we are typically not being called from an inner loop), that + * fact is probably not relevant for performance. The point is that the returned IComparator *is* rather efficient. * * @param lcs The left-hand ColumnSource (uses current values) * @param rcs The right-hand ColumnSource (uses current values) - * @return An AbstractColumnSource.IComparator designed to compare elements from the two column - * sources. + * @return An AbstractColumnSource.IComparator designed to compare elements from the two column sources. */ public static IComparator createComparator(final ColumnSource lcs, - final ColumnSource rcs) { + final ColumnSource rcs) { final Class lType = lcs.getType(); final Class rType = rcs.getType(); Assert.eq(lType, "lType", rType, "rType"); @@ -45,8 +42,7 @@ public static IComparator createComparator(final ColumnSource lcs, return (lKey, rKey) -> { final byte l = lcs.getByte(lKey); final byte r = rcs.getByte(rKey); - return l == r ? 0 - : (l == NULL_BYTE ? -1 : (r == NULL_BYTE ? 1 : Byte.compare(l, r))); + return l == r ? 0 : (l == NULL_BYTE ? -1 : (r == NULL_BYTE ? 1 : Byte.compare(l, r))); }; } if (provides.test(char.class)) { @@ -56,33 +52,28 @@ public static IComparator createComparator(final ColumnSource lcs, return (lKey, rKey) -> { final short l = lcs.getShort(lKey); final short r = rcs.getShort(rKey); - return l == r ? 0 - : (l == NULL_SHORT ? -1 : (r == NULL_SHORT ? 1 : Short.compare(l, r))); + return l == r ? 0 : (l == NULL_SHORT ? -1 : (r == NULL_SHORT ? 1 : Short.compare(l, r))); }; } if (provides.test(int.class)) { return (lKey, rKey) -> { final int l = lcs.getInt(lKey); final int r = rcs.getInt(rKey); - return l == r ? 0 - : (l == NULL_INT ? -1 : (r == NULL_INT ? 1 : Integer.compare(l, r))); + return l == r ? 0 : (l == NULL_INT ? -1 : (r == NULL_INT ? 1 : Integer.compare(l, r))); }; } if (provides.test(long.class)) { return (lKey, rKey) -> { final long l = lcs.getLong(lKey); final long r = rcs.getLong(rKey); - return l == r ? 0 - : (l == NULL_LONG ? -1 : (r == NULL_LONG ? 1 : Long.compare(l, r))); + return l == r ? 0 : (l == NULL_LONG ? -1 : (r == NULL_LONG ? 1 : Long.compare(l, r))); }; } if (provides.test(float.class)) { - return (lKey, rKey) -> DhFloatComparisons.compare(lcs.getFloat(lKey), - rcs.getFloat(rKey)); + return (lKey, rKey) -> DhFloatComparisons.compare(lcs.getFloat(lKey), rcs.getFloat(rKey)); } if (provides.test(double.class)) { - return (lKey, rKey) -> DhDoubleComparisons.compare(lcs.getDouble(lKey), - rcs.getDouble(rKey)); + return (lKey, rKey) -> DhDoubleComparisons.compare(lcs.getDouble(lKey), rcs.getDouble(rKey)); } // fall through to Object interface return (lKey, rKey) -> { @@ -93,24 +84,22 @@ public static IComparator createComparator(final ColumnSource lcs, } /** - * Produce an {@link IComparator} specialized for a given left and right {@link ColumnSource}. - * To do this we look at the underlying types of the column sources (actually we require those - * underlying types to be the same) and we use this information to call the specific primitive - * type getter method (whether {@link ColumnSource#getDouble}, {@link ColumnSource#getLong} and - * so on). This approach allows us to avoid boxing on these calls. We use a similar approach in - * order to test the null-ness of a given element. + * Produce an {@link IComparator} specialized for a given left and right {@link ColumnSource}. To do this we look at + * the underlying types of the column sources (actually we require those underlying types to be the same) and we use + * this information to call the specific primitive type getter method (whether {@link ColumnSource#getDouble}, + * {@link ColumnSource#getLong} and so on). This approach allows us to avoid boxing on these calls. We use a similar + * approach in order to test the null-ness of a given element. * - * This method is not especially efficient, but (because we are typically not being called from - * an inner loop), that fact is probably not relevant for performance. The point is that the - * returned IComparatorEnhanced *is* rather efficient. + * This method is not especially efficient, but (because we are typically not being called from an inner loop), that + * fact is probably not relevant for performance. The point is that the returned IComparatorEnhanced *is* rather + * efficient. * * @param lcs The left-hand ColumnSource (uses current values) * @param rcs The right-hand ColumnSource (uses previous values) - * @return An AbstractColumnSource.IComparator designed to compare elements from the two column - * sources. + * @return An AbstractColumnSource.IComparator designed to compare elements from the two column sources. */ public static IComparator createComparatorLeftCurrRightPrev( - final ColumnSource lcs, final ColumnSource rcs) { + final ColumnSource lcs, final ColumnSource rcs) { final Class lType = lcs.getType(); final Class rType = rcs.getType(); Assert.eq(lType, "lType", rType, "rType"); @@ -122,56 +111,49 @@ public static IComparator createComparatorLeftCurrRightPrev( return (lKey, rKey) -> { final byte l = lcs.getByte(lKey); final byte r = rcs.getPrevByte(rKey); - return l == r ? 0 - : (l == NULL_BYTE ? -1 : (r == NULL_BYTE ? 1 : Byte.compare(l, r))); + return l == r ? 0 : (l == NULL_BYTE ? -1 : (r == NULL_BYTE ? 1 : Byte.compare(l, r))); }; } if (provides.test(char.class)) { return (lKey, rKey) -> { final char l = lcs.getChar(lKey); final char r = rcs.getPrevChar(rKey); - return l == r ? 0 - : (l == NULL_CHAR ? -1 : (r == NULL_CHAR ? 1 : Character.compare(l, r))); + return l == r ? 0 : (l == NULL_CHAR ? -1 : (r == NULL_CHAR ? 1 : Character.compare(l, r))); }; } if (provides.test(short.class)) { return (lKey, rKey) -> { final short l = lcs.getShort(lKey); final short r = rcs.getPrevShort(rKey); - return l == r ? 0 - : (l == NULL_SHORT ? -1 : (r == NULL_SHORT ? 1 : Short.compare(l, r))); + return l == r ? 0 : (l == NULL_SHORT ? -1 : (r == NULL_SHORT ? 1 : Short.compare(l, r))); }; } if (provides.test(int.class)) { return (lKey, rKey) -> { final int l = lcs.getInt(lKey); final int r = rcs.getPrevInt(rKey); - return l == r ? 0 - : (l == NULL_INT ? -1 : (r == NULL_INT ? 1 : Integer.compare(l, r))); + return l == r ? 0 : (l == NULL_INT ? -1 : (r == NULL_INT ? 1 : Integer.compare(l, r))); }; } if (provides.test(long.class)) { return (lKey, rKey) -> { final long l = lcs.getLong(lKey); final long r = rcs.getPrevLong(rKey); - return l == r ? 0 - : (l == NULL_LONG ? -1 : (r == NULL_LONG ? 1 : Long.compare(l, r))); + return l == r ? 0 : (l == NULL_LONG ? -1 : (r == NULL_LONG ? 1 : Long.compare(l, r))); }; } if (provides.test(float.class)) { return (lKey, rKey) -> { final float l = lcs.getFloat(lKey); final float r = rcs.getPrevFloat(rKey); - return l == r ? 0 - : (l == NULL_FLOAT ? -1 : (r == NULL_FLOAT ? 1 : Float.compare(l, r))); + return l == r ? 0 : (l == NULL_FLOAT ? -1 : (r == NULL_FLOAT ? 1 : Float.compare(l, r))); }; } if (provides.test(double.class)) { return (lKey, rKey) -> { final double l = lcs.getDouble(lKey); final double r = rcs.getPrevDouble(rKey); - return l == r ? 0 - : (l == NULL_DOUBLE ? -1 : (r == NULL_DOUBLE ? 1 : Double.compare(l, r))); + return l == r ? 0 : (l == NULL_DOUBLE ? -1 : (r == NULL_DOUBLE ? 1 : Double.compare(l, r))); }; } // fall through to Object interface diff --git a/DB/src/main/java/io/deephaven/db/v2/ColumnRenderersBuilder.java b/DB/src/main/java/io/deephaven/db/v2/ColumnRenderersBuilder.java index 11a50b01b4a..3e935dee0a3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ColumnRenderersBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/ColumnRenderersBuilder.java @@ -70,8 +70,7 @@ public ColumnRenderersBuilder setRenderer(String columnName, ColumnRendererType } @ScriptApi - public ColumnRenderersBuilder setRenderer(String columnName, - Class rendererClass) { + public ColumnRenderersBuilder setRenderer(String columnName, Class rendererClass) { return setRenderer(columnName, rendererClass.getCanonicalName()); } @@ -122,8 +121,7 @@ public String getRenderClassName(String columnName) { } /** - * Gets the column renderer type assigned to a given column name. Returns null if none is - * assigned. + * Gets the column renderer type assigned to a given column name. Returns null if none is assigned. * * @param columnName the name of the column * @return the renderer type, null if none is assigned @@ -143,8 +141,8 @@ public String getRenderClassForType(ColumnRendererType type) { } /** - * Identifies if this builder-instance contains any renderer-definitions. If not, then the - * calling method should not attempt to add our directive an an attribute to a table + * Identifies if this builder-instance contains any renderer-definitions. If not, then the calling method should not + * attempt to add our directive an an attribute to a table * * @return true if there are no renderers defined, else false */ @@ -153,8 +151,8 @@ public boolean isEmpty() { } /** - * Returns a Set of column-names, which may be verified as valid prior to setting our directive - * as an attribute to the table + * Returns a Set of column-names, which may be verified as valid prior to setting our directive as an attribute to + * the table * * @return An iterable Set of column-names identified by this builder-instance */ @@ -190,8 +188,8 @@ public static ColumnRenderersBuilder fromDirective(final String directive) { continue; final String[] kv = pair.split("="); if (kv.length != 2) { - throw new IllegalArgumentException("Invalid " + COLUMN_RENDERERS_ATTRIBUTE + ": " - + directive + ", bad column renderer pair " + pair); + throw new IllegalArgumentException("Invalid " + COLUMN_RENDERERS_ATTRIBUTE + ": " + directive + + ", bad column renderer pair " + pair); } builder.setRenderer(kv[0], kv[1]); } diff --git a/DB/src/main/java/io/deephaven/db/v2/ColumnSourceManager.java b/DB/src/main/java/io/deephaven/db/v2/ColumnSourceManager.java index 9f1e5e33b1b..3526f5eb184 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ColumnSourceManager.java +++ b/DB/src/main/java/io/deephaven/db/v2/ColumnSourceManager.java @@ -18,16 +18,15 @@ public interface ColumnSourceManager { /** - * Get a map of name to {@link DeferredGroupingColumnSource} for the column sources maintained - * by this manager. + * Get a map of name to {@link DeferredGroupingColumnSource} for the column sources maintained by this manager. * * @return An unmodifiable view of the column source map maintained by this manager. */ Map> getColumnSources(); /** - * Turn off column grouping, and clear the groupings on all GROUPING column sources. Note that - * this does *not* affect PARTITIONING columns. + * Turn off column grouping, and clear the groupings on all GROUPING column sources. Note that this does *not* + * affect PARTITIONING columns. */ void disableGrouping(); @@ -39,16 +38,15 @@ public interface ColumnSourceManager { void addLocation(@NotNull TableLocation tableLocation); /** - * Observe size changes in the previously added table locations, and update the managed column - * sources accordingly. + * Observe size changes in the previously added table locations, and update the managed column sources accordingly. * * @return The index of added keys */ Index refresh(); /** - * Get the added locations, first the ones that have been "included" (found to exist with - * non-zero size) in order of inclusion, then the remainder in order of discovery. + * Get the added locations, first the ones that have been "included" (found to exist with non-zero size) in order of + * inclusion, then the remainder in order of discovery. * * @return All known locations, ordered as described */ @@ -63,8 +61,8 @@ public interface ColumnSourceManager { Collection includedLocations(); /** - * Report whether this ColumnSourceManager has no locations that have been "included" (i.e. - * found to exist with non-zero size). + * Report whether this ColumnSourceManager has no locations that have been "included" (i.e. found to exist with + * non-zero size). * * @return True if there are no included locations */ diff --git a/DB/src/main/java/io/deephaven/db/v2/CrossJoinHelper.java b/DB/src/main/java/io/deephaven/db/v2/CrossJoinHelper.java index 7db2cf5bfb9..2cfd70c811d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/CrossJoinHelper.java +++ b/DB/src/main/java/io/deephaven/db/v2/CrossJoinHelper.java @@ -34,75 +34,65 @@ * Implementation for chunk-oriented aggregation operations, including {@link Table#join}. */ public class CrossJoinHelper { - // Note: This would be >= 16 to get efficient performance from Index#insert and - // Index#shiftInPlace. However, it is + // Note: This would be >= 16 to get efficient performance from Index#insert and Index#shiftInPlace. However, it is // very costly for joins of many small groups for the default to be so high. public static final int DEFAULT_NUM_RIGHT_BITS_TO_RESERVE = Configuration.getInstance() - .getIntegerForClassWithDefault(CrossJoinHelper.class, "numRightBitsToReserve", 10); + .getIntegerForClassWithDefault(CrossJoinHelper.class, "numRightBitsToReserve", 10); /** * Static-use only. */ private CrossJoinHelper() {} - static Table join(final QueryTable leftTable, final QueryTable rightTable, - final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, - final int numReserveRightBits) { - return join(leftTable, rightTable, columnsToMatch, columnsToAdd, numReserveRightBits, - new JoinControl()); + static Table join(final QueryTable leftTable, final QueryTable rightTable, final MatchPair[] columnsToMatch, + final MatchPair[] columnsToAdd, final int numReserveRightBits) { + return join(leftTable, rightTable, columnsToMatch, columnsToAdd, numReserveRightBits, new JoinControl()); } - static Table join(final QueryTable leftTable, final QueryTable rightTable, - final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, - final int numReserveRightBits, final JoinControl control) { - final Table result = internalJoin(leftTable, rightTable, columnsToMatch, columnsToAdd, - numReserveRightBits, control); + static Table join(final QueryTable leftTable, final QueryTable rightTable, final MatchPair[] columnsToMatch, + final MatchPair[] columnsToAdd, final int numReserveRightBits, final JoinControl control) { + final Table result = + internalJoin(leftTable, rightTable, columnsToMatch, columnsToAdd, numReserveRightBits, control); leftTable.maybeCopyColumnDescriptions(result, rightTable, columnsToMatch, columnsToAdd); return result; } private static Table internalJoin(final QueryTable leftTable, final QueryTable rightTable, - final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, int numRightBitsToReserve, - final JoinControl control) { + final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, int numRightBitsToReserve, + final JoinControl control) { QueryTable.checkInitiateOperation(leftTable); QueryTable.checkInitiateOperation(rightTable); - try (final BucketingContext bucketingContext = new BucketingContext("join", leftTable, - rightTable, columnsToMatch, columnsToAdd, control)) { - // TODO: if we have a single column of unique values, and the range is small, we can use - // a simplified table - // if (!rightTable.isLive() && control.useUniqueTable(uniqueValues, maximumUniqueValue, - // minumumUniqueValue)) { (etc) + try (final BucketingContext bucketingContext = + new BucketingContext("join", leftTable, rightTable, columnsToMatch, columnsToAdd, control)) { + // TODO: if we have a single column of unique values, and the range is small, we can use a simplified table + // if (!rightTable.isLive() && control.useUniqueTable(uniqueValues, maximumUniqueValue, minumumUniqueValue)) + // { (etc) if (bucketingContext.keyColumnCount == 0) { if (!leftTable.isLive() && !rightTable.isLive()) { - numRightBitsToReserve = 1; // tight computation of this is efficient and - // appropriate + numRightBitsToReserve = 1; // tight computation of this is efficient and appropriate } - return zeroKeyColumnsJoin(leftTable, rightTable, columnsToAdd, - numRightBitsToReserve, bucketingContext.listenerDescription); + return zeroKeyColumnsJoin(leftTable, rightTable, columnsToAdd, numRightBitsToReserve, + bucketingContext.listenerDescription); } final ModifiedColumnSet rightKeyColumns = - rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); + rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); final ModifiedColumnSet leftKeyColumns = - leftTable.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToMatch)); + leftTable.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToMatch)); if (!rightTable.isLive()) { // TODO: use grouping if (!leftTable.isLive()) { - final StaticChunkedCrossJoinStateManager jsm = - new StaticChunkedCrossJoinStateManager( - bucketingContext.leftSources, control.initialBuildSize(), control, - leftTable); + final StaticChunkedCrossJoinStateManager jsm = new StaticChunkedCrossJoinStateManager( + bucketingContext.leftSources, control.initialBuildSize(), control, leftTable); jsm.setMaximumLoadFactor(control.getMaximumLoadFactor()); jsm.setTargetLoadFactor(control.getTargetLoadFactor()); - // We can only build from right, because the left hand side does not permit us - // to nicely rehash as - // we only have the redirection index when building left and no way to reverse - // the lookup. - final Index resultIndex = jsm.buildFromRight(leftTable, - bucketingContext.leftSources, rightTable, bucketingContext.rightSources); + // We can only build from right, because the left hand side does not permit us to nicely rehash as + // we only have the redirection index when building left and no way to reverse the lookup. + final Index resultIndex = jsm.buildFromRight(leftTable, bucketingContext.leftSources, rightTable, + bucketingContext.rightSources); return makeResult(leftTable, rightTable, columnsToAdd, jsm, resultIndex, cs -> { // noinspection unchecked @@ -111,28 +101,25 @@ private static Table internalJoin(final QueryTable leftTable, final QueryTable r } final LeftOnlyIncrementalChunkedCrossJoinStateManager jsm = - new LeftOnlyIncrementalChunkedCrossJoinStateManager( - bucketingContext.leftSources, control.initialBuildSize(), leftTable, - numRightBitsToReserve); + new LeftOnlyIncrementalChunkedCrossJoinStateManager( + bucketingContext.leftSources, control.initialBuildSize(), leftTable, + numRightBitsToReserve); jsm.setMaximumLoadFactor(control.getMaximumLoadFactor()); jsm.setTargetLoadFactor(control.getTargetLoadFactor()); - final Index resultIndex = - jsm.buildLeftTicking(leftTable, rightTable, bucketingContext.rightSources); - final QueryTable resultTable = - makeResult(leftTable, rightTable, columnsToAdd, jsm, resultIndex, cs -> { - // noinspection unchecked - return new CrossJoinRightColumnSource<>(jsm, cs, rightTable.isLive()); - }); + final Index resultIndex = jsm.buildLeftTicking(leftTable, rightTable, bucketingContext.rightSources); + final QueryTable resultTable = makeResult(leftTable, rightTable, columnsToAdd, jsm, resultIndex, cs -> { + // noinspection unchecked + return new CrossJoinRightColumnSource<>(jsm, cs, rightTable.isLive()); + }); jsm.startTrackingPrevValues(); - final ModifiedColumnSet.Transformer leftTransformer = - leftTable.newModifiedColumnSetTransformer( - resultTable, leftTable.getColumnSourceMap().keySet() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + final ModifiedColumnSet.Transformer leftTransformer = leftTable.newModifiedColumnSetTransformer( + resultTable, + leftTable.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); - leftTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl( - bucketingContext.listenerDescription, leftTable, resultTable) { + leftTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl(bucketingContext.listenerDescription, + leftTable, resultTable) { @Override public void onUpdate(final Update upstream) { jsm.validateKeySpaceSize(); @@ -155,8 +142,7 @@ public void onUpdate(final Update upstream) { try (final Index prevLeftIndex = leftTable.getIndex().getPrevIndex()) { prevLeftIndex.remove(upstream.removed); jsm.applyLeftShift(prevLeftIndex, upstream.shifted); - downstream.shifted = - expandLeftOnlyShift(prevLeftIndex, upstream.shifted, jsm); + downstream.shifted = expandLeftOnlyShift(prevLeftIndex, upstream.shifted, jsm); downstream.shifted.apply(resultIndex); } @@ -168,24 +154,20 @@ public void onUpdate(final Update upstream) { downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; } else { downstream.modifiedColumnSet = resultTable.modifiedColumnSet; - leftTransformer.transform(upstream.modifiedColumnSet, - resultTable.modifiedColumnSet); + leftTransformer.transform(upstream.modifiedColumnSet, resultTable.modifiedColumnSet); } } else if (upstream.modified.nonempty()) { - final Index.SequentialBuilder modBuilder = - Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder modBuilder = Index.FACTORY.getSequentialBuilder(); upstream.modified.forAllLongs(ll -> { final Index rightIndex = jsm.getRightIndexFromLeftIndex(ll); if (rightIndex.nonempty()) { final long currResultOffset = ll << jsm.getNumShiftBits(); - modBuilder.appendRange(currResultOffset, - currResultOffset + rightIndex.size() - 1); + modBuilder.appendRange(currResultOffset, currResultOffset + rightIndex.size() - 1); } }); downstream.modified = modBuilder.getIndex(); downstream.modifiedColumnSet = resultTable.modifiedColumnSet; - leftTransformer.transform(upstream.modifiedColumnSet, - resultTable.modifiedColumnSet); + leftTransformer.transform(upstream.modifiedColumnSet, resultTable.modifiedColumnSet); } else { downstream.modified = Index.FACTORY.getEmptyIndex(); downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; @@ -196,8 +178,7 @@ public void onUpdate(final Update upstream) { final Index rightIndex = jsm.getRightIndex(stateSlot); if (rightIndex.nonempty()) { final long regionStart = leftKey << jsm.getNumShiftBits(); - addBuilder.addRange(regionStart, - regionStart + rightIndex.size() - 1); + addBuilder.addRange(regionStart, regionStart + rightIndex.size() - 1); } }); try (final Index added = addBuilder.getIndex()) { @@ -211,55 +192,48 @@ public void onUpdate(final Update upstream) { return resultTable; } - final RightIncrementalChunkedCrossJoinStateManager jsm = - new RightIncrementalChunkedCrossJoinStateManager( - bucketingContext.leftSources, control.initialBuildSize(), - bucketingContext.rightSources, leftTable, numRightBitsToReserve); + final RightIncrementalChunkedCrossJoinStateManager jsm = new RightIncrementalChunkedCrossJoinStateManager( + bucketingContext.leftSources, control.initialBuildSize(), bucketingContext.rightSources, leftTable, + numRightBitsToReserve); jsm.setMaximumLoadFactor(control.getMaximumLoadFactor()); jsm.setTargetLoadFactor(control.getTargetLoadFactor()); final Index resultIndex = jsm.build(leftTable, rightTable); - final QueryTable resultTable = - makeResult(leftTable, rightTable, columnsToAdd, jsm, resultIndex, cs -> { - // noinspection unchecked - return new CrossJoinRightColumnSource<>(jsm, cs, rightTable.isLive()); - }); + final QueryTable resultTable = makeResult(leftTable, rightTable, columnsToAdd, jsm, resultIndex, cs -> { + // noinspection unchecked + return new CrossJoinRightColumnSource<>(jsm, cs, rightTable.isLive()); + }); final ModifiedColumnSet.Transformer rightTransformer = - rightTable.newModifiedColumnSetTransformer(resultTable, columnsToAdd); + rightTable.newModifiedColumnSetTransformer(resultTable, columnsToAdd); if (leftTable.isLive()) { // LeftIndexToSlot needs prev value tracking jsm.startTrackingPrevValues(); - final ModifiedColumnSet.Transformer leftTransformer = - leftTable.newModifiedColumnSetTransformer( - resultTable, leftTable.getColumnSourceMap().keySet() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + final ModifiedColumnSet.Transformer leftTransformer = leftTable.newModifiedColumnSetTransformer( + resultTable, + leftTable.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); - final JoinListenerRecorder leftRecorder = new JoinListenerRecorder(true, - bucketingContext.listenerDescription, leftTable, resultTable); - final JoinListenerRecorder rightRecorder = new JoinListenerRecorder(false, - bucketingContext.listenerDescription, rightTable, resultTable); + final JoinListenerRecorder leftRecorder = + new JoinListenerRecorder(true, bucketingContext.listenerDescription, leftTable, resultTable); + final JoinListenerRecorder rightRecorder = + new JoinListenerRecorder(false, bucketingContext.listenerDescription, rightTable, resultTable); // The approach for both-sides-ticking is to: - // - Aggregate all right side changes, queued to apply at the right time while - // processing left update. + // - Aggregate all right side changes, queued to apply at the right time while processing left update. // - Handle left removes. // - Handle right removes (including right modified removes). // - Handle left shifts // - Handle left modifies. - // - Handle right modifies and adds (including right modified adds and all - // downstream shift data). + // - Handle right modifies and adds (including right modified adds and all downstream shift data). // - Handle left adds. // - Generate downstream MCS. // - Propagate and Profit. - final MergedListener mergedListener = new MergedListener( - Arrays.asList(leftRecorder, rightRecorder), Collections.emptyList(), - bucketingContext.listenerDescription, resultTable) { - private final CrossJoinModifiedSlotTracker tracker = - new CrossJoinModifiedSlotTracker(jsm); + final MergedListener mergedListener = new MergedListener(Arrays.asList(leftRecorder, rightRecorder), + Collections.emptyList(), bucketingContext.listenerDescription, resultTable) { + private final CrossJoinModifiedSlotTracker tracker = new CrossJoinModifiedSlotTracker(jsm); @Override protected void process() { @@ -268,8 +242,7 @@ protected void process() { final boolean leftChanged = upstreamLeft != null; final boolean rightChanged = upstreamRight != null; - final ShiftAwareListener.Update downstream = - new ShiftAwareListener.Update(); + final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update(); // If there are any right changes, let's probe and aggregate them now. if (rightChanged) { @@ -288,16 +261,14 @@ protected void process() { } if (upstreamRight.modified.nonempty()) { jsm.rightModified(upstreamRight, - upstreamRight.modifiedColumnSet.containsAny(rightKeyColumns), - tracker); + upstreamRight.modifiedColumnSet.containsAny(rightKeyColumns), tracker); } - // space needed for right index might have changed, let's verify we have - // enough keyspace + // space needed for right index might have changed, let's verify we have enough keyspace jsm.validateKeySpaceSize(); - // We must finalize all known slots, so that left accumulation does not - // mix with right accumulation. + // We must finalize all known slots, so that left accumulation does not mix with right + // accumulation. if (upstreamRight.shifted.nonempty()) { try (final Index prevIndex = rightTable.getIndex().getPrevIndex()) { jsm.shiftRightIndexToSlot(prevIndex, upstreamRight.shifted); @@ -310,11 +281,11 @@ protected void process() { final int currRightBits = jsm.getNumShiftBits(); final boolean allRowsShift = prevRightBits != currRightBits; - final boolean leftModifiedMightReslot = leftChanged - && upstreamLeft.modifiedColumnSet.containsAny(leftKeyColumns); + final boolean leftModifiedMightReslot = + leftChanged && upstreamLeft.modifiedColumnSet.containsAny(leftKeyColumns); - // Let us gather all removes from the left. This includes aggregating the - // results of left modified. + // Let us gather all removes from the left. This includes aggregating the results of left + // modified. if (leftChanged) { if (upstreamLeft.removed.nonempty()) { jsm.leftRemoved(upstreamLeft.removed, tracker); @@ -323,8 +294,8 @@ protected void process() { } if (upstreamLeft.modified.nonempty()) { - // translates the left modified as rms/mods/adds and accumulates - // into tracker.{leftRemoved,leftModified,leftAdded} + // translates the left modified as rms/mods/adds and accumulates into + // tracker.{leftRemoved,leftModified,leftAdded} jsm.leftModified(upstreamLeft, leftModifiedMightReslot, tracker); } else { tracker.leftModified = Index.FACTORY.getEmptyIndex(); @@ -339,30 +310,23 @@ protected void process() { } if (rightChanged) { - // With left removes (and modified-removes) applied (yet adds and - // modified-adds pending), - // we can now easily calculate which rows are removed due to right - // removes. + // With left removes (and modified-removes) applied (yet adds and modified-adds pending), + // we can now easily calculate which rows are removed due to right removes. - try (final Index leftIndexToVisitForRightRm = - Index.FACTORY.getEmptyIndex()) { + try (final Index leftIndexToVisitForRightRm = Index.FACTORY.getEmptyIndex()) { tracker.forAllModifiedSlots(slotState -> { - if (slotState.leftIndex.size() > 0 - && slotState.rightRemoved.nonempty()) { + if (slotState.leftIndex.size() > 0 && slotState.rightRemoved.nonempty()) { leftIndexToVisitForRightRm.insert(slotState.leftIndex); } }); try (final Index toRemove = Index.FACTORY.getEmptyIndex()) { - // This could use a sequential builder, however, since we are - // always appending - // non-overlapping containers, inserting into an index is - // actually rather efficient. + // This could use a sequential builder, however, since we are always appending + // non-overlapping containers, inserting into an index is actually rather efficient. leftIndexToVisitForRightRm.forAllLongs(ii -> { final long prevOffset = ii << prevRightBits; - final CrossJoinModifiedSlotTracker.SlotState state = - tracker.getFinalSlotState( - jsm.getTrackerCookie(jsm.getSlotFromLeftIndex(ii))); + final CrossJoinModifiedSlotTracker.SlotState state = tracker + .getFinalSlotState(jsm.getTrackerCookie(jsm.getSlotFromLeftIndex(ii))); toRemove.insertWithShift(prevOffset, state.rightRemoved); }); downstream.removed.insert(toRemove); @@ -373,24 +337,20 @@ protected void process() { // apply left shifts to tracker (so our mods/adds are in post-shift space) if (leftChanged && upstreamLeft.shifted.nonempty()) { tracker.leftShifted = upstreamLeft.shifted; - try (final Index prevLeftMinusRemovals = - leftTable.getIndex().getPrevIndex()) { + try (final Index prevLeftMinusRemovals = leftTable.getIndex().getPrevIndex()) { prevLeftMinusRemovals.remove(upstreamLeft.removed); jsm.leftShift(prevLeftMinusRemovals, upstreamLeft.shifted, tracker); } } - // note rows to shift might have no shifts but still need result index - // updated + // note rows to shift might have no shifts but still need result index updated final Index rowsToShift; final boolean mustCloseRowsToShift; if (rightChanged) { // process right mods / adds (in post-shift space) - final Index.RandomBuilder addsToVisit = - Index.FACTORY.getRandomBuilder(); - final Index.RandomBuilder modsToVisit = - Index.FACTORY.getRandomBuilder(); + final Index.RandomBuilder addsToVisit = Index.FACTORY.getRandomBuilder(); + final Index.RandomBuilder modsToVisit = Index.FACTORY.getRandomBuilder(); tracker.forAllModifiedSlots(slotState -> { if (slotState.leftIndex.size() == 0) { return; @@ -404,31 +364,28 @@ protected void process() { }); try (final Index leftIndexesToVisitForAdds = addsToVisit.getIndex(); - final Index leftIndexesToVisitForMods = modsToVisit.getIndex(); - final Index modified = Index.FACTORY.getEmptyIndex()) { + final Index leftIndexesToVisitForMods = modsToVisit.getIndex(); + final Index modified = Index.FACTORY.getEmptyIndex()) { downstream.added = Index.FACTORY.getEmptyIndex(); leftIndexesToVisitForAdds.forAllLongs(ii -> { final long currOffset = ii << currRightBits; - final CrossJoinModifiedSlotTracker.SlotState state = - tracker.getFinalSlotState( - jsm.getTrackerCookie(jsm.getSlotFromLeftIndex(ii))); + final CrossJoinModifiedSlotTracker.SlotState state = tracker + .getFinalSlotState(jsm.getTrackerCookie(jsm.getSlotFromLeftIndex(ii))); downstream.added.insertWithShift(currOffset, state.rightAdded); }); leftIndexesToVisitForMods.forAllLongs(ii -> { final long currOffset = ii << currRightBits; - final CrossJoinModifiedSlotTracker.SlotState state = - tracker.getFinalSlotState( - jsm.getTrackerCookie(jsm.getSlotFromLeftIndex(ii))); + final CrossJoinModifiedSlotTracker.SlotState state = tracker + .getFinalSlotState(jsm.getTrackerCookie(jsm.getSlotFromLeftIndex(ii))); modified.insertWithShift(currOffset, state.rightModified); }); downstream.modified.insert(modified); mustCloseRowsToShift = leftChanged || !allRowsShift; if (allRowsShift) { - rowsToShift = - leftChanged ? leftTable.getIndex().minus(upstreamLeft.added) + rowsToShift = leftChanged ? leftTable.getIndex().minus(upstreamLeft.added) : leftTable.getIndex(); } else { rowsToShift = leftIndexesToVisitForAdds.clone(); @@ -437,11 +394,9 @@ protected void process() { if (!allRowsShift) { // removals might generate shifts, so let's add those to our index - final Index.RandomBuilder rmsToVisit = - Index.FACTORY.getRandomBuilder(); + final Index.RandomBuilder rmsToVisit = Index.FACTORY.getRandomBuilder(); tracker.forAllModifiedSlots(slotState -> { - if (slotState.leftIndex.size() > 0 - && slotState.rightRemoved.nonempty()) { + if (slotState.leftIndex.size() > 0 && slotState.rightRemoved.nonempty()) { rmsToVisit.addIndex(slotState.leftIndex); } }); @@ -458,43 +413,36 @@ protected void process() { final long prevCardinality = 1L << prevRightBits; final long currCardinality = 1L << currRightBits; final IndexShiftData.Builder shiftBuilder = new IndexShiftData.Builder(); - final Index.SequentialBuilder toRemoveFromResultIndex = - Index.FACTORY.getSequentialBuilder(); - final Index.SequentialBuilder toInsertIntoResultIndex = - Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder toRemoveFromResultIndex = Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder toInsertIntoResultIndex = Index.FACTORY.getSequentialBuilder(); - if (rowsToShift.nonempty() && leftChanged - && upstreamLeft.shifted.nonempty()) { + if (rowsToShift.nonempty() && leftChanged && upstreamLeft.shifted.nonempty()) { final MutableBoolean finishShifting = new MutableBoolean(); final MutableLong watermark = new MutableLong(0); final MutableInt currLeftShiftIdx = new MutableInt(0); - try ( - final OrderedKeys.Iterator okit = + try (final OrderedKeys.Iterator okit = allRowsShift ? null : resultIndex.getOrderedKeysIterator(); - final Index unshiftedRowsToShift = rowsToShift.clone()) { + final Index unshiftedRowsToShift = rowsToShift.clone()) { upstreamLeft.shifted.unapply(unshiftedRowsToShift); - final ReadOnlyIndex.SearchIterator prevIter = - unshiftedRowsToShift.searchIterator(); + final ReadOnlyIndex.SearchIterator prevIter = unshiftedRowsToShift.searchIterator(); final LongConsumer processLeftShiftsUntil = (ii) -> { - // note: if all rows shift, then each row shifts by a different - // amount and rowsToShift is inclusive - if (!finishShifting.booleanValue() - && watermark.longValue() >= ii || allRowsShift) { + // note: if all rows shift, then each row shifts by a different amount and + // rowsToShift is inclusive + if (!finishShifting.booleanValue() && watermark.longValue() >= ii || allRowsShift) { return; } - for (; currLeftShiftIdx.intValue() < upstreamLeft.shifted - .size(); currLeftShiftIdx.increment()) { + for (; currLeftShiftIdx.intValue() < upstreamLeft.shifted.size(); currLeftShiftIdx + .increment()) { final int shiftIdx = currLeftShiftIdx.intValue(); - final long beginRange = upstreamLeft.shifted - .getBeginRange(shiftIdx) << prevRightBits; + final long beginRange = + upstreamLeft.shifted.getBeginRange(shiftIdx) << prevRightBits; final long endRange = - ((upstreamLeft.shifted.getEndRange(shiftIdx) - + 1) << prevRightBits) - 1; - final long shiftDelta = upstreamLeft.shifted - .getShiftDelta(shiftIdx) << currRightBits; + ((upstreamLeft.shifted.getEndRange(shiftIdx) + 1) << prevRightBits) - 1; + final long shiftDelta = + upstreamLeft.shifted.getShiftDelta(shiftIdx) << currRightBits; if (endRange < watermark.longValue()) { continue; @@ -504,23 +452,19 @@ protected void process() { } final long maxTouched = Math.min(ii - 1, endRange); - final long minTouched = - Math.max(watermark.longValue(), beginRange); + final long minTouched = Math.max(watermark.longValue(), beginRange); if (!okit.advance(minTouched)) { break; } shiftBuilder.shiftRange(minTouched, maxTouched, shiftDelta); - okit.getNextOrderedKeysThrough(maxTouched) - .forAllLongRanges((s, e) -> { - toRemoveFromResultIndex.appendRange(s, e); - toInsertIntoResultIndex.appendRange(s + shiftDelta, - e + shiftDelta); - }); + okit.getNextOrderedKeysThrough(maxTouched).forAllLongRanges((s, e) -> { + toRemoveFromResultIndex.appendRange(s, e); + toInsertIntoResultIndex.appendRange(s + shiftDelta, e + shiftDelta); + }); watermark.setValue(maxTouched + 1); - if (!finishShifting.booleanValue() - && maxTouched != endRange) { + if (!finishShifting.booleanValue() && maxTouched != endRange) { break; } } @@ -536,61 +480,56 @@ protected void process() { processLeftShiftsUntil.accept(prevOffset); if (slotFromLeftIndex == RightIncrementalChunkedCrossJoinStateManager.LEFT_MAPPING_MISSING) { - // Since left rows that change key-column-groups are - // currently removed from all JSM data structures, - // they won't have a properly mapped slot. They will be - // added to their new slot after we - // generate-downstream shifts. The result index is also - // updated for these rows in - // the left-rm/left-add code paths. This code path should - // only be hit when prevRightBits != currRightBits. + // Since left rows that change key-column-groups are currently removed from all + // JSM data structures, + // they won't have a properly mapped slot. They will be added to their new slot + // after we + // generate-downstream shifts. The result index is also updated for these rows + // in + // the left-rm/left-add code paths. This code path should only be hit when + // prevRightBits != currRightBits. return; } - final CrossJoinModifiedSlotTracker.SlotState slotState = tracker - .getFinalSlotState(jsm.getTrackerCookie(slotFromLeftIndex)); + final CrossJoinModifiedSlotTracker.SlotState slotState = + tracker.getFinalSlotState(jsm.getTrackerCookie(slotFromLeftIndex)); if (prevRightBits != currRightBits) { - final Index rightIndex = - jsm.getRightIndex(slotFromLeftIndex); + final Index rightIndex = jsm.getRightIndex(slotFromLeftIndex); if (rightIndex.nonempty()) { toInsertIntoResultIndex.appendRange(currOffset, - currOffset + rightIndex.size() - 1); + currOffset + rightIndex.size() - 1); } } else if (slotState != null) { final long prevSize = slotState.rightIndex.sizePrev(); final long currSize = slotState.rightIndex.size(); - // note prevCardinality == currCardinality if prevRightBits - // == currRightBits + // note prevCardinality == currCardinality if prevRightBits == currRightBits if (prevOffset != currOffset) { // might be changing to an empty group if (currSize > 0) { toInsertIntoResultIndex.appendRange(currOffset, - currOffset + currSize - 1); + currOffset + currSize - 1); } // might have changed from an empty group if (prevSize > 0) { toRemoveFromResultIndex.appendRange(prevOffset, - prevOffset + currCardinality - 1); + prevOffset + currCardinality - 1); } } else if (prevSize < currSize) { - toInsertIntoResultIndex.appendRange( - currOffset + prevSize, currOffset + currSize - 1); + toInsertIntoResultIndex.appendRange(currOffset + prevSize, + currOffset + currSize - 1); } else if (currSize < prevSize && prevSize > 0) { - toRemoveFromResultIndex.appendRange( - prevOffset + currSize, - prevOffset + currCardinality - 1); + toRemoveFromResultIndex.appendRange(prevOffset + currSize, + prevOffset + currCardinality - 1); } } // propagate inner shifts if (slotState != null && slotState.innerShifted.nonempty()) { - shiftBuilder.appendShiftData(slotState.innerShifted, - prevOffset, prevCardinality, currOffset, - currCardinality); + shiftBuilder.appendShiftData(slotState.innerShifted, prevOffset, + prevCardinality, currOffset, currCardinality); } else if (prevOffset != currOffset) { - shiftBuilder.shiftRange(prevOffset, - prevOffset + prevCardinality - 1, - currOffset - prevOffset); + shiftBuilder.shiftRange(prevOffset, prevOffset + prevCardinality - 1, + currOffset - prevOffset); } watermark.setValue((pi + 1) << prevRightBits); }); @@ -606,76 +545,69 @@ protected void process() { final long slotFromLeftIndex = jsm.getSlotFromLeftIndex(ii); if (slotFromLeftIndex == RightIncrementalChunkedCrossJoinStateManager.LEFT_MAPPING_MISSING) { - // Since left rows that change key-column-groups are currently - // removed from all JSM data structures, - // they won't have a properly mapped slot. They will be added to - // their new slot after we - // generate-downstream shifts. The result index is also updated - // for these rows in - // the left-rm/left-add code paths. This code path should only - // be hit when prevRightBits != currRightBits. + // Since left rows that change key-column-groups are currently removed from all JSM + // data structures, + // they won't have a properly mapped slot. They will be added to their new slot + // after we + // generate-downstream shifts. The result index is also updated for these rows in + // the left-rm/left-add code paths. This code path should only be hit when + // prevRightBits != currRightBits. return; } - final CrossJoinModifiedSlotTracker.SlotState slotState = tracker - .getFinalSlotState(jsm.getTrackerCookie(slotFromLeftIndex)); + final CrossJoinModifiedSlotTracker.SlotState slotState = + tracker.getFinalSlotState(jsm.getTrackerCookie(slotFromLeftIndex)); // calculate modifications to result index if (prevRightBits != currRightBits) { final Index rightIndex = jsm.getRightIndex(slotFromLeftIndex); if (rightIndex.nonempty()) { toInsertIntoResultIndex.appendRange(currOffset, - currOffset + rightIndex.size() - 1); + currOffset + rightIndex.size() - 1); } } else if (slotState != null) { final long prevSize = slotState.rightIndex.sizePrev(); final long currSize = slotState.rightIndex.size(); - // note: prevOffset == currOffset (because left did not shift - // and right bits are unchanged) + // note: prevOffset == currOffset (because left did not shift and right bits are + // unchanged) if (prevSize < currSize) { toInsertIntoResultIndex.appendRange(currOffset + prevSize, - currOffset + currSize - 1); + currOffset + currSize - 1); } else if (currSize < prevSize && prevSize > 0) { - // note prevCardinality == currCardinality if prevRightBits - // == currRightBits + // note prevCardinality == currCardinality if prevRightBits == currRightBits toRemoveFromResultIndex.appendRange(prevOffset + currSize, - prevOffset + currCardinality - 1); + prevOffset + currCardinality - 1); } } // propagate inner shifts if (slotState != null && slotState.innerShifted.nonempty()) { - shiftBuilder.appendShiftData(slotState.innerShifted, prevOffset, - prevCardinality, currOffset, currCardinality); + shiftBuilder.appendShiftData(slotState.innerShifted, prevOffset, prevCardinality, + currOffset, currCardinality); } else if (prevOffset != currOffset) { - shiftBuilder.shiftRange(prevOffset, - prevOffset + prevCardinality - 1, currOffset - prevOffset); + shiftBuilder.shiftRange(prevOffset, prevOffset + prevCardinality - 1, + currOffset - prevOffset); } }); } else if (leftChanged && upstreamLeft.shifted.nonempty()) { // upstream-left-shift our result index, and build downstream shifts - try (final OrderedKeys.Iterator okit = - resultIndex.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator okit = resultIndex.getOrderedKeysIterator()) { for (int idx = 0; idx < upstreamLeft.shifted.size(); ++idx) { - final long beginRange = - upstreamLeft.shifted.getBeginRange(idx) << prevRightBits; - final long endRange = ((upstreamLeft.shifted.getEndRange(idx) - + 1) << prevRightBits) - 1; - final long shiftDelta = - upstreamLeft.shifted.getShiftDelta(idx) << prevRightBits; + final long beginRange = upstreamLeft.shifted.getBeginRange(idx) << prevRightBits; + final long endRange = + ((upstreamLeft.shifted.getEndRange(idx) + 1) << prevRightBits) - 1; + final long shiftDelta = upstreamLeft.shifted.getShiftDelta(idx) << prevRightBits; if (!okit.advance(beginRange)) { break; } shiftBuilder.shiftRange(beginRange, endRange, shiftDelta); - okit.getNextOrderedKeysThrough(endRange) - .forAllLongRanges((s, e) -> { - toRemoveFromResultIndex.appendRange(s, e); - toInsertIntoResultIndex.appendRange(s + shiftDelta, - e + shiftDelta); - }); + okit.getNextOrderedKeysThrough(endRange).forAllLongRanges((s, e) -> { + toRemoveFromResultIndex.appendRange(s, e); + toInsertIntoResultIndex.appendRange(s + shiftDelta, e + shiftDelta); + }); } } } @@ -683,7 +615,7 @@ protected void process() { downstream.shifted = shiftBuilder.build(); try (final Index toRemove = toRemoveFromResultIndex.getIndex(); - final Index toInsert = toInsertIntoResultIndex.getIndex()) { + final Index toInsert = toInsertIntoResultIndex.getIndex()) { if (prevRightBits != currRightBits) { // every row shifted resultIndex.clear(); @@ -725,8 +657,8 @@ protected void process() { } if (leftChanged && tracker.leftModified.nonempty()) { - // We simply exploded the left rows to include all existing right rows; - // must remove the recently added. + // We simply exploded the left rows to include all existing right rows; must remove the + // recently added. downstream.modified.remove(downstream.added); } if (downstream.modified.empty()) { @@ -735,12 +667,11 @@ protected void process() { downstream.modifiedColumnSet = resultTable.modifiedColumnSet; downstream.modifiedColumnSet.clear(); if (leftChanged && tracker.hasLeftModifies) { - leftTransformer.transform(upstreamLeft.modifiedColumnSet, - downstream.modifiedColumnSet); + leftTransformer.transform(upstreamLeft.modifiedColumnSet, downstream.modifiedColumnSet); } if (rightChanged && tracker.hasRightModifies) { rightTransformer.transform(upstreamRight.modifiedColumnSet, - downstream.modifiedColumnSet); + downstream.modifiedColumnSet); } } @@ -756,10 +687,9 @@ protected void process() { rightTable.listenForUpdates(rightRecorder); resultTable.addParentReference(mergedListener); } else { - rightTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl( - bucketingContext.listenerDescription, rightTable, resultTable) { - private final CrossJoinModifiedSlotTracker tracker = - new CrossJoinModifiedSlotTracker(jsm); + rightTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl(bucketingContext.listenerDescription, + rightTable, resultTable) { + private final CrossJoinModifiedSlotTracker tracker = new CrossJoinModifiedSlotTracker(jsm); @Override public void onUpdate(Update upstream) { @@ -780,8 +710,8 @@ public void onUpdate(Update upstream) { jsm.rightAdd(upstream.added, tracker); } if (upstream.modified.nonempty()) { - jsm.rightModified(upstream, - upstream.modifiedColumnSet.containsAny(rightKeyColumns), tracker); + jsm.rightModified(upstream, upstream.modifiedColumnSet.containsAny(rightKeyColumns), + tracker); } // right changes are flushed now @@ -792,8 +722,7 @@ public void onUpdate(Update upstream) { } tracker.finalizeRightProcessing(); - // space needed for right index might have changed, let's verify we have - // enough keyspace + // space needed for right index might have changed, let's verify we have enough keyspace jsm.validateKeySpaceSize(); final int prevRightBits = jsm.getPrevNumShiftBits(); @@ -807,15 +736,12 @@ public void onUpdate(Update upstream) { // Must rebuild entire result index. resultIndex.clear(); } else { - final Index.RandomBuilder leftChangedBuilder = - Index.FACTORY.getRandomBuilder(); + final Index.RandomBuilder leftChangedBuilder = Index.FACTORY.getRandomBuilder(); tracker.forAllModifiedSlots(slotState -> { - // filter out slots that only have right shifts (these don't have - // downstream effects) + // filter out slots that only have right shifts (these don't have downstream effects) if (slotState.rightChanged) { - leftChangedBuilder - .addIndex(jsm.getLeftIndex(slotState.slotLocation)); + leftChangedBuilder.addIndex(jsm.getLeftIndex(slotState.slotLocation)); } }); @@ -824,15 +750,12 @@ public void onUpdate(Update upstream) { final long prevCardinality = 1L << prevRightBits; final Index.SequentialBuilder added = Index.FACTORY.getSequentialBuilder(); - final Index.SequentialBuilder removed = - Index.FACTORY.getSequentialBuilder(); - final Index.SequentialBuilder modified = - Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder removed = Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder modified = Index.FACTORY.getSequentialBuilder(); final Index.SequentialBuilder removeFromResultIndex = - numRightBitsChanged ? null : Index.FACTORY.getSequentialBuilder(); - final Index.SequentialBuilder addToResultIndex = - Index.FACTORY.getSequentialBuilder(); + numRightBitsChanged ? null : Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder addToResultIndex = Index.FACTORY.getSequentialBuilder(); // Accumulate all changes by left row. leftChanged.forAllLongs(ii -> { @@ -841,43 +764,36 @@ public void onUpdate(Update upstream) { final long slot = jsm.getSlotFromLeftIndex(ii); final CrossJoinModifiedSlotTracker.SlotState slotState = - tracker.getFinalSlotState(jsm.getTrackerCookie(slot)); - final Index rightIndex = - slotState == null ? jsm.getRightIndex(slot) : slotState.rightIndex; + tracker.getFinalSlotState(jsm.getTrackerCookie(slot)); + final Index rightIndex = slotState == null ? jsm.getRightIndex(slot) : slotState.rightIndex; if (numRightBitsChanged) { if (rightIndex.nonempty()) { - addToResultIndex.appendRange(currOffset, - currOffset + rightIndex.size() - 1); + addToResultIndex.appendRange(currOffset, currOffset + rightIndex.size() - 1); } } else if (slotState != null) { final long prevSize = slotState.rightIndex.sizePrev(); final long currSize = slotState.rightIndex.size(); if (prevSize < currSize) { - addToResultIndex.appendRange(currOffset + prevSize, - currOffset + currSize - 1); + addToResultIndex.appendRange(currOffset + prevSize, currOffset + currSize - 1); } else if (currSize < prevSize && prevSize > 0) { - // note prevCardinality == currCardinality if prevRightBits == - // currRightBits + // note prevCardinality == currCardinality if prevRightBits == currRightBits removeFromResultIndex.appendRange(prevOffset + currSize, - prevOffset + prevCardinality - 1); + prevOffset + prevCardinality - 1); } } if (slotState == null || !slotState.rightChanged) { if (prevOffset != currOffset) { - shifted.shiftRange(prevOffset, - prevOffset + rightIndex.sizePrev() - 1, - currOffset - prevOffset); + shifted.shiftRange(prevOffset, prevOffset + rightIndex.sizePrev() - 1, + currOffset - prevOffset); } return; } - final long preShiftShiftAmt = - prevOffset - (slotState.lastIndex << prevRightBits); - final long postShiftShiftAmt = - currOffset - (slotState.lastIndex << currRightBits); + final long preShiftShiftAmt = prevOffset - (slotState.lastIndex << prevRightBits); + final long postShiftShiftAmt = currOffset - (slotState.lastIndex << currRightBits); if (slotState.rightAdded.nonempty()) { slotState.rightAdded.shiftInPlace(postShiftShiftAmt); added.appendIndex(slotState.rightAdded); @@ -892,8 +808,8 @@ public void onUpdate(Update upstream) { } slotState.lastIndex = ii; - shifted.appendShiftData(slotState.innerShifted, prevOffset, - rightIndex.sizePrev(), currOffset, rightIndex.size()); + shifted.appendShiftData(slotState.innerShifted, prevOffset, rightIndex.sizePrev(), + currOffset, rightIndex.size()); }); downstream.added = added.getIndex(); @@ -920,7 +836,7 @@ public void onUpdate(Update upstream) { } else { downstream.modifiedColumnSet = resultTable.modifiedColumnSet; rightTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); + downstream.modifiedColumnSet); } resultTable.notifyListeners(downstream); @@ -932,8 +848,8 @@ public void onUpdate(Update upstream) { } } - private static void validateZeroKeyIndexSpace(final QueryTable leftTable, - final QueryTable rightTable, int numRightBitsReserved) { + private static void validateZeroKeyIndexSpace(final QueryTable leftTable, final QueryTable rightTable, + int numRightBitsReserved) { final long leftLastKey = leftTable.getIndex().lastKey(); final long rightLastKey = rightTable.getIndex().lastKey(); final int minLeftBits = CrossJoinShiftState.getMinBits(leftLastKey); @@ -941,276 +857,246 @@ private static void validateZeroKeyIndexSpace(final QueryTable leftTable, numRightBitsReserved = Math.max(numRightBitsReserved, minRightBits); if (minLeftBits + numRightBitsReserved > 63) { throw new OutOfKeySpaceException( - "join with zero key columns out of index space (left reqBits + right reserveBits > 63); " - + "(left table: {size: " + leftTable.getIndex().size() + " maxIndex: " - + leftLastKey + " reqBits: " + minLeftBits + "}) X " - + "(right table: {size: " + rightTable.getIndex().size() + " maxIndex: " - + rightLastKey + " reqBits: " + minRightBits + " reservedBits: " - + numRightBitsReserved + "})" - + " exceeds Long.MAX_VALUE. Consider flattening either table or reserving fewer right bits if possible."); + "join with zero key columns out of index space (left reqBits + right reserveBits > 63); " + + "(left table: {size: " + leftTable.getIndex().size() + " maxIndex: " + leftLastKey + + " reqBits: " + minLeftBits + "}) X " + + "(right table: {size: " + rightTable.getIndex().size() + " maxIndex: " + rightLastKey + + " reqBits: " + minRightBits + " reservedBits: " + numRightBitsReserved + "})" + + " exceeds Long.MAX_VALUE. Consider flattening either table or reserving fewer right bits if possible."); } } @NotNull - private static Table zeroKeyColumnsJoin(QueryTable leftTable, QueryTable rightTable, - MatchPair[] columnsToAdd, int numRightBitsToReserve, String listenerDescription) { + private static Table zeroKeyColumnsJoin(QueryTable leftTable, QueryTable rightTable, MatchPair[] columnsToAdd, + int numRightBitsToReserve, String listenerDescription) { // we are a single value join, we do not need to do any hash-related work validateZeroKeyIndexSpace(leftTable, rightTable, numRightBitsToReserve); - final CrossJoinShiftState crossJoinState = new CrossJoinShiftState( - Math.max(numRightBitsToReserve, CrossJoinShiftState.getMinBits(rightTable))); + final CrossJoinShiftState crossJoinState = + new CrossJoinShiftState(Math.max(numRightBitsToReserve, CrossJoinShiftState.getMinBits(rightTable))); final Index resultIndex = Index.FACTORY.getEmptyIndex(); - final QueryTable result = - makeResult(leftTable, rightTable, columnsToAdd, crossJoinState, resultIndex, cs -> { - // noinspection unchecked - return new BitMaskingColumnSource<>(crossJoinState, cs); - }); + final QueryTable result = makeResult(leftTable, rightTable, columnsToAdd, crossJoinState, resultIndex, cs -> { + // noinspection unchecked + return new BitMaskingColumnSource<>(crossJoinState, cs); + }); final ModifiedColumnSet.Transformer leftTransformer = - leftTable.newModifiedColumnSetTransformer(result, - leftTable.getDefinition().getColumnNamesArray()); + leftTable.newModifiedColumnSetTransformer(result, leftTable.getDefinition().getColumnNamesArray()); final ModifiedColumnSet.Transformer rightTransformer = - rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); + rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); - final BiConsumer onUpdate = - (leftUpdate, rightUpdate) -> { + final BiConsumer onUpdate = (leftUpdate, rightUpdate) -> { - final boolean leftChanged = leftUpdate != null; - final boolean rightChanged = rightUpdate != null; + final boolean leftChanged = leftUpdate != null; + final boolean rightChanged = rightUpdate != null; - final int prevRightBits = crossJoinState.getNumShiftBits(); - final int currRightBits = - Math.max(prevRightBits, CrossJoinShiftState.getMinBits(rightTable)); - validateZeroKeyIndexSpace(leftTable, rightTable, currRightBits); - - if (currRightBits != prevRightBits) { - crossJoinState.setNumShiftBitsAndUpdatePrev(currRightBits); - } + final int prevRightBits = crossJoinState.getNumShiftBits(); + final int currRightBits = Math.max(prevRightBits, CrossJoinShiftState.getMinBits(rightTable)); + validateZeroKeyIndexSpace(leftTable, rightTable, currRightBits); - final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update(); - downstream.added = Index.FACTORY.getEmptyIndex(); - downstream.removed = Index.FACTORY.getEmptyIndex(); - downstream.modified = Index.FACTORY.getEmptyIndex(); - downstream.modifiedColumnSet = result.modifiedColumnSet; - downstream.modifiedColumnSet.clear(); - - final IndexShiftData.Builder shiftBuilder = new IndexShiftData.Builder(); - - try (final SafeCloseableList closer = new SafeCloseableList()) { - if (rightChanged && rightUpdate.modified.nonempty()) { - rightTransformer.transform(rightUpdate.modifiedColumnSet, - result.modifiedColumnSet); - } - if (leftChanged && leftUpdate.modified.nonempty()) { - leftTransformer.transform(leftUpdate.modifiedColumnSet, - result.modifiedColumnSet); - } - - long currRightShift = 0; // how far currRight has been shifted - final Index currRight = closer.add(rightTable.getIndex().clone()); - - if (rightChanged) { - // Must touch every left row. (Note: this code is accessible iff right - // changed.) - final Index currLeft = leftTable.getIndex(); - final Index prevLeft = closer.add(currLeft.getPrevIndex()); - - long prevRightShift = 0; // how far prevRight has been shifted - final Index prevRight = closer.add(rightTable.getIndex().getPrevIndex()); - - long rmRightShift = 0; // how far rmRight has been shifted - final Index rmRight = closer.add(rightUpdate.removed.clone()); - - long addRightShift = 0; // how far addRight has been shifted - final Index addRight = closer.add(rightUpdate.added.clone()); - - long modRightShift = 0; // how far modRight has been shifted - final Index modRight = closer.add(rightUpdate.modified.clone()); - - long existingRightShift = 0; // how far existingRight has been shifted - final Index existingRight = closer.add(currRight.minus(rightUpdate.added)); + if (currRightBits != prevRightBits) { + crossJoinState.setNumShiftBitsAndUpdatePrev(currRightBits); + } - final boolean rightHasAdds = addRight.nonempty(); - final boolean rightHasRemoves = rmRight.nonempty(); - final boolean rightHasModifies = modRight.nonempty(); + final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update(); + downstream.added = Index.FACTORY.getEmptyIndex(); + downstream.removed = Index.FACTORY.getEmptyIndex(); + downstream.modified = Index.FACTORY.getEmptyIndex(); + downstream.modifiedColumnSet = result.modifiedColumnSet; + downstream.modifiedColumnSet.clear(); - // Do note that add/mod's are in post-shift keyspace. - final Index.SearchIterator leftAddIter = - leftChanged ? leftUpdate.added.searchIterator() : null; - final Index.SearchIterator leftRmIter = - leftChanged ? leftUpdate.removed.searchIterator() : null; - final Index.SearchIterator leftModIter = - leftChanged ? leftUpdate.modified.searchIterator() : null; - boolean moreLeftAdd = leftChanged && advanceIterator(leftAddIter); - boolean moreLeftRm = leftChanged && advanceIterator(leftRmIter); - boolean moreLeftMod = leftChanged && advanceIterator(leftModIter); + final IndexShiftData.Builder shiftBuilder = new IndexShiftData.Builder(); - // Prepare left-side iterators. - final Index.SearchIterator leftPrevIter = prevLeft.searchIterator(); - final Index.SearchIterator leftCurrIter = - leftTable.getIndex().searchIterator(); - boolean moreLeftPrev = advanceIterator(leftPrevIter); - boolean moreLeftCurr = advanceIterator(leftCurrIter); + try (final SafeCloseableList closer = new SafeCloseableList()) { + if (rightChanged && rightUpdate.modified.nonempty()) { + rightTransformer.transform(rightUpdate.modifiedColumnSet, result.modifiedColumnSet); + } + if (leftChanged && leftUpdate.modified.nonempty()) { + leftTransformer.transform(leftUpdate.modifiedColumnSet, result.modifiedColumnSet); + } - // It is more efficient to completely rebuild this index, than to modify - // each row to right mapping. - resultIndex.clear(); + long currRightShift = 0; // how far currRight has been shifted + final Index currRight = closer.add(rightTable.getIndex().clone()); + + if (rightChanged) { + // Must touch every left row. (Note: this code is accessible iff right changed.) + final Index currLeft = leftTable.getIndex(); + final Index prevLeft = closer.add(currLeft.getPrevIndex()); + + long prevRightShift = 0; // how far prevRight has been shifted + final Index prevRight = closer.add(rightTable.getIndex().getPrevIndex()); + + long rmRightShift = 0; // how far rmRight has been shifted + final Index rmRight = closer.add(rightUpdate.removed.clone()); + + long addRightShift = 0; // how far addRight has been shifted + final Index addRight = closer.add(rightUpdate.added.clone()); + + long modRightShift = 0; // how far modRight has been shifted + final Index modRight = closer.add(rightUpdate.modified.clone()); + + long existingRightShift = 0; // how far existingRight has been shifted + final Index existingRight = closer.add(currRight.minus(rightUpdate.added)); + + final boolean rightHasAdds = addRight.nonempty(); + final boolean rightHasRemoves = rmRight.nonempty(); + final boolean rightHasModifies = modRight.nonempty(); + + // Do note that add/mod's are in post-shift keyspace. + final Index.SearchIterator leftAddIter = leftChanged ? leftUpdate.added.searchIterator() : null; + final Index.SearchIterator leftRmIter = leftChanged ? leftUpdate.removed.searchIterator() : null; + final Index.SearchIterator leftModIter = leftChanged ? leftUpdate.modified.searchIterator() : null; + boolean moreLeftAdd = leftChanged && advanceIterator(leftAddIter); + boolean moreLeftRm = leftChanged && advanceIterator(leftRmIter); + boolean moreLeftMod = leftChanged && advanceIterator(leftModIter); + + // Prepare left-side iterators. + final Index.SearchIterator leftPrevIter = prevLeft.searchIterator(); + final Index.SearchIterator leftCurrIter = leftTable.getIndex().searchIterator(); + boolean moreLeftPrev = advanceIterator(leftPrevIter); + boolean moreLeftCurr = advanceIterator(leftCurrIter); + + // It is more efficient to completely rebuild this index, than to modify each row to right mapping. + resultIndex.clear(); + + final long prevCardinality = 1L << prevRightBits; + final long currCardinality = 1L << currRightBits; + + // Note: This assumes that shifts are not allowed to re-order data. + while (moreLeftPrev) { + final long currPrevIdx = leftPrevIter.currentValue(); + final long prevResultOffset = currPrevIdx << prevRightBits; + moreLeftPrev = advanceIterator(leftPrevIter); + + if (moreLeftRm && currPrevIdx == leftRmIter.currentValue()) { + // currPrevIdx is a left remove. + moreLeftRm = advanceIterator(leftRmIter); + prevRightShift = furtherShiftIndex(prevRight, prevRightShift, prevResultOffset); + downstream.removed.insert(prevRight); + continue; + } - final long prevCardinality = 1L << prevRightBits; - final long currCardinality = 1L << currRightBits; + // Note: Pre-existing row was not removed, therefore there must be an entry in curr index. + Assert.eqTrue(moreLeftCurr, "moreLeftCurr"); + long currCurrIdx = leftCurrIter.currentValue(); + long currResultOffset = currCurrIdx << currRightBits; + moreLeftCurr = advanceIterator(leftCurrIter); - // Note: This assumes that shifts are not allowed to re-order data. - while (moreLeftPrev) { - final long currPrevIdx = leftPrevIter.currentValue(); - final long prevResultOffset = currPrevIdx << prevRightBits; - moreLeftPrev = advanceIterator(leftPrevIter); - - if (moreLeftRm && currPrevIdx == leftRmIter.currentValue()) { - // currPrevIdx is a left remove. - moreLeftRm = advanceIterator(leftRmIter); - prevRightShift = - furtherShiftIndex(prevRight, prevRightShift, prevResultOffset); - downstream.removed.insert(prevRight); - continue; - } + // Insert adds until we find our currCurrIdx that matches currPrevIdx. + while (moreLeftAdd && currCurrIdx == leftAddIter.currentValue()) { + // currCurrIdx is a left add. + moreLeftAdd = advanceIterator(leftAddIter); + currRightShift = furtherShiftIndex(currRight, currRightShift, currResultOffset); + downstream.added.insert(currRight); + resultIndex.insert(currRight); - // Note: Pre-existing row was not removed, therefore there must be an - // entry in curr index. + // Advance left current iterator. Assert.eqTrue(moreLeftCurr, "moreLeftCurr"); - long currCurrIdx = leftCurrIter.currentValue(); - long currResultOffset = currCurrIdx << currRightBits; + currCurrIdx = leftCurrIter.currentValue(); + currResultOffset = currCurrIdx << currRightBits; moreLeftCurr = advanceIterator(leftCurrIter); + } - // Insert adds until we find our currCurrIdx that matches currPrevIdx. - while (moreLeftAdd && currCurrIdx == leftAddIter.currentValue()) { - // currCurrIdx is a left add. - moreLeftAdd = advanceIterator(leftAddIter); - currRightShift = - furtherShiftIndex(currRight, currRightShift, currResultOffset); - downstream.added.insert(currRight); - resultIndex.insert(currRight); - - // Advance left current iterator. - Assert.eqTrue(moreLeftCurr, "moreLeftCurr"); - currCurrIdx = leftCurrIter.currentValue(); - currResultOffset = currCurrIdx << currRightBits; - moreLeftCurr = advanceIterator(leftCurrIter); - } - - if (rightHasRemoves) { - rmRightShift = - furtherShiftIndex(rmRight, rmRightShift, prevResultOffset); - downstream.removed.insert(rmRight); - } + if (rightHasRemoves) { + rmRightShift = furtherShiftIndex(rmRight, rmRightShift, prevResultOffset); + downstream.removed.insert(rmRight); + } - if (rightHasAdds) { - addRightShift = - furtherShiftIndex(addRight, addRightShift, currResultOffset); - downstream.added.insert(addRight); - } + if (rightHasAdds) { + addRightShift = furtherShiftIndex(addRight, addRightShift, currResultOffset); + downstream.added.insert(addRight); + } - if (moreLeftMod && currCurrIdx == leftModIter.currentValue()) { - // currCurrIdx is modify; paint all existing rows as modified - moreLeftMod = advanceIterator(leftModIter); - existingRightShift = furtherShiftIndex(existingRight, - existingRightShift, currResultOffset); - downstream.modified.insert(existingRight); - } else if (rightHasModifies) { - modRightShift = - furtherShiftIndex(modRight, modRightShift, currResultOffset); - downstream.modified.insert(modRight); - } + if (moreLeftMod && currCurrIdx == leftModIter.currentValue()) { + // currCurrIdx is modify; paint all existing rows as modified + moreLeftMod = advanceIterator(leftModIter); + existingRightShift = furtherShiftIndex(existingRight, existingRightShift, currResultOffset); + downstream.modified.insert(existingRight); + } else if (rightHasModifies) { + modRightShift = furtherShiftIndex(modRight, modRightShift, currResultOffset); + downstream.modified.insert(modRight); + } - currRightShift = - furtherShiftIndex(currRight, currRightShift, currResultOffset); - resultIndex.insert(currRight); + currRightShift = furtherShiftIndex(currRight, currRightShift, currResultOffset); + resultIndex.insert(currRight); - if (rightUpdate.shifted.nonempty()) { - shiftBuilder.appendShiftData(rightUpdate.shifted, prevResultOffset, - prevCardinality, currResultOffset, currCardinality); - } else if (currResultOffset != prevResultOffset) { - final long shiftDelta = currResultOffset - prevResultOffset; - final long lastResultIdx = prevResultOffset + prevCardinality - 1; - shiftBuilder.shiftRange(prevResultOffset, lastResultIdx, - shiftDelta); - } + if (rightUpdate.shifted.nonempty()) { + shiftBuilder.appendShiftData(rightUpdate.shifted, prevResultOffset, prevCardinality, + currResultOffset, currCardinality); + } else if (currResultOffset != prevResultOffset) { + final long shiftDelta = currResultOffset - prevResultOffset; + final long lastResultIdx = prevResultOffset + prevCardinality - 1; + shiftBuilder.shiftRange(prevResultOffset, lastResultIdx, shiftDelta); } + } - // Note: Only left adds remain. - while (moreLeftCurr) { - final long currCurrIdx = leftCurrIter.currentValue(); - moreLeftCurr = advanceIterator(leftCurrIter); + // Note: Only left adds remain. + while (moreLeftCurr) { + final long currCurrIdx = leftCurrIter.currentValue(); + moreLeftCurr = advanceIterator(leftCurrIter); - Assert.eqTrue(moreLeftAdd, "moreLeftAdd"); - assert leftAddIter != null; - Assert.eq(currCurrIdx, "currCurrIdx", leftAddIter.currentValue(), - "leftAddIter.currentValue()"); - moreLeftAdd = advanceIterator(leftAddIter); + Assert.eqTrue(moreLeftAdd, "moreLeftAdd"); + assert leftAddIter != null; + Assert.eq(currCurrIdx, "currCurrIdx", leftAddIter.currentValue(), "leftAddIter.currentValue()"); + moreLeftAdd = advanceIterator(leftAddIter); - final long currResultIdx = currCurrIdx << currRightBits; - currRightShift = - furtherShiftIndex(currRight, currRightShift, currResultIdx); - downstream.added.insert(currRight); - resultIndex.insert(currRight); - } + final long currResultIdx = currCurrIdx << currRightBits; + currRightShift = furtherShiftIndex(currRight, currRightShift, currResultIdx); + downstream.added.insert(currRight); + resultIndex.insert(currRight); + } - downstream.shifted = shiftBuilder.build(); - } else { - // Explode left updates to apply to all right rows. - assert leftUpdate != null; - - Index.SearchIterator iter = leftUpdate.removed.searchIterator(); - while (iter.hasNext()) { - final long currIdx = iter.nextLong(); - final long currResultIdx = currIdx << currRightBits; - currRightShift = - furtherShiftIndex(currRight, currRightShift, currResultIdx); - downstream.removed.insert(currRight); - resultIndex.removeRange(currResultIdx, - ((currIdx + 1) << currRightBits) - 1); - } + downstream.shifted = shiftBuilder.build(); + } else { + // Explode left updates to apply to all right rows. + assert leftUpdate != null; + + Index.SearchIterator iter = leftUpdate.removed.searchIterator(); + while (iter.hasNext()) { + final long currIdx = iter.nextLong(); + final long currResultIdx = currIdx << currRightBits; + currRightShift = furtherShiftIndex(currRight, currRightShift, currResultIdx); + downstream.removed.insert(currRight); + resultIndex.removeRange(currResultIdx, ((currIdx + 1) << currRightBits) - 1); + } - downstream.shifted = expandLeftOnlyShift(leftTable.getIndex(), - leftUpdate.shifted, crossJoinState); - downstream.shifted.apply(resultIndex); - - iter = leftUpdate.modified.searchIterator(); - while (iter.hasNext()) { - final long currIdx = iter.nextLong(); - final long currResultIdx = currIdx << currRightBits; - currRightShift = - furtherShiftIndex(currRight, currRightShift, currResultIdx); - downstream.modified.insert(currRight); - } + downstream.shifted = expandLeftOnlyShift(leftTable.getIndex(), leftUpdate.shifted, crossJoinState); + downstream.shifted.apply(resultIndex); - iter = leftUpdate.added.searchIterator(); - while (iter.hasNext()) { - final long currIdx = iter.nextLong(); - final long currResultIdx = currIdx << currRightBits; - currRightShift = - furtherShiftIndex(currRight, currRightShift, currResultIdx); - downstream.added.insert(currRight); - resultIndex.insert(currRight); - } + iter = leftUpdate.modified.searchIterator(); + while (iter.hasNext()) { + final long currIdx = iter.nextLong(); + final long currResultIdx = currIdx << currRightBits; + currRightShift = furtherShiftIndex(currRight, currRightShift, currResultIdx); + downstream.modified.insert(currRight); + } + + iter = leftUpdate.added.searchIterator(); + while (iter.hasNext()) { + final long currIdx = iter.nextLong(); + final long currResultIdx = currIdx << currRightBits; + currRightShift = furtherShiftIndex(currRight, currRightShift, currResultIdx); + downstream.added.insert(currRight); + resultIndex.insert(currRight); } } + } - result.notifyListeners(downstream); - }; + result.notifyListeners(downstream); + }; if (leftTable.isLive() && rightTable.isLive()) { final JoinListenerRecorder leftRecorder = - new JoinListenerRecorder(true, listenerDescription, leftTable, result); + new JoinListenerRecorder(true, listenerDescription, leftTable, result); final JoinListenerRecorder rightRecorder = - new JoinListenerRecorder(false, listenerDescription, rightTable, result); + new JoinListenerRecorder(false, listenerDescription, rightTable, result); - final MergedListener mergedListener = - new MergedListener(Arrays.asList(leftRecorder, rightRecorder), + final MergedListener mergedListener = new MergedListener(Arrays.asList(leftRecorder, rightRecorder), Collections.emptyList(), listenerDescription, result) { - @Override - protected void process() { - onUpdate.accept(leftRecorder.getUpdate(), rightRecorder.getUpdate()); - } - }; + @Override + protected void process() { + onUpdate.accept(leftRecorder.getUpdate(), rightRecorder.getUpdate()); + } + }; leftRecorder.setMergedListener(mergedListener); rightRecorder.setMergedListener(mergedListener); @@ -1218,21 +1104,19 @@ protected void process() { rightTable.listenForUpdates(rightRecorder); result.addParentReference(mergedListener); } else if (leftTable.isLive() && rightTable.size() > 0) { - leftTable.listenForUpdates( - new BaseTable.ShiftAwareListenerImpl(listenerDescription, leftTable, result) { - @Override - public void onUpdate(final Update upstream) { - onUpdate.accept(upstream, null); - } - }); + leftTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl(listenerDescription, leftTable, result) { + @Override + public void onUpdate(final Update upstream) { + onUpdate.accept(upstream, null); + } + }); } else if (rightTable.isLive() && leftTable.size() > 0) { - rightTable.listenForUpdates( - new BaseTable.ShiftAwareListenerImpl(listenerDescription, rightTable, result) { - @Override - public void onUpdate(final Update upstream) { - onUpdate.accept(null, upstream); - } - }); + rightTable.listenForUpdates(new BaseTable.ShiftAwareListenerImpl(listenerDescription, rightTable, result) { + @Override + public void onUpdate(final Update upstream) { + onUpdate.accept(null, upstream); + } + }); } // Initialize result table. @@ -1240,8 +1124,7 @@ public void onUpdate(final Update upstream) { final MutableLong currRightShift = new MutableLong(); leftTable.getIndex().forAllLongs((currIdx) -> { final long currResultIdx = currIdx << crossJoinState.getNumShiftBits(); - currRightShift.setValue( - furtherShiftIndex(currRight, currRightShift.longValue(), currResultIdx)); + currRightShift.setValue(furtherShiftIndex(currRight, currRightShift.longValue(), currResultIdx)); resultIndex.insert(currRight); }); } @@ -1257,15 +1140,14 @@ private static boolean advanceIterator(final Index.SearchIterator iter) { return true; } - private static long furtherShiftIndex(final Index index, final long currShift, - final long destShift) { + private static long furtherShiftIndex(final Index index, final long currShift, final long destShift) { final long toShift = destShift - currShift; index.shiftInPlace(toShift); return destShift; } - private static IndexShiftData expandLeftOnlyShift(final Index leftIndex, - final IndexShiftData leftShifts, final CrossJoinShiftState shiftState) { + private static IndexShiftData expandLeftOnlyShift(final Index leftIndex, final IndexShiftData leftShifts, + final CrossJoinShiftState shiftState) { final int currRightBits = shiftState.getNumShiftBits(); final int prevRightBits = shiftState.getPrevNumShiftBits(); final boolean needPerRowShift = currRightBits != prevRightBits; @@ -1308,8 +1190,7 @@ public void accept(long exclusiveEnd, long extraDelta) { final long ss = leftShifts.getBeginRange(si); final long se = leftShifts.getEndRange(si); final long sd = leftShifts.getShiftDelta(si); - shiftBuilder.shiftRange(ss << currRightBits, ((se + 1) << currRightBits) - 1, - sd << currRightBits); + shiftBuilder.shiftRange(ss << currRightBits, ((se + 1) << currRightBits) - 1, sd << currRightBits); } } @@ -1318,25 +1199,22 @@ public void accept(long exclusiveEnd, long extraDelta) { @NotNull private static QueryTable makeResult( - @NotNull final QueryTable leftTable, - @NotNull final Table rightTable, - @NotNull final MatchPair[] columnsToAdd, - @NotNull final CrossJoinShiftState joinState, - @NotNull final Index resultIndex, - @NotNull final Function newRightColumnSource) { + @NotNull final QueryTable leftTable, + @NotNull final Table rightTable, + @NotNull final MatchPair[] columnsToAdd, + @NotNull final CrossJoinShiftState joinState, + @NotNull final Index resultIndex, + @NotNull final Function newRightColumnSource) { final Map columnSourceMap = new LinkedHashMap<>(); - for (final Map.Entry leftColumn : leftTable.getColumnSourceMap() - .entrySet()) { + for (final Map.Entry leftColumn : leftTable.getColumnSourceMap().entrySet()) { // noinspection unchecked - final BitShiftingColumnSource wrappedSource = - new BitShiftingColumnSource(joinState, leftColumn.getValue()); + final BitShiftingColumnSource wrappedSource = new BitShiftingColumnSource(joinState, leftColumn.getValue()); columnSourceMap.put(leftColumn.getKey(), wrappedSource); } for (MatchPair mp : columnsToAdd) { - final T wrappedSource = - newRightColumnSource.apply(rightTable.getColumnSource(mp.right())); + final T wrappedSource = newRightColumnSource.apply(rightTable.getColumnSource(mp.right())); columnSourceMap.put(mp.left(), wrappedSource); } diff --git a/DB/src/main/java/io/deephaven/db/v2/CrossJoinModifiedSlotTracker.java b/DB/src/main/java/io/deephaven/db/v2/CrossJoinModifiedSlotTracker.java index 5812b0c3464..6b9e3918f96 100644 --- a/DB/src/main/java/io/deephaven/db/v2/CrossJoinModifiedSlotTracker.java +++ b/DB/src/main/java/io/deephaven/db/v2/CrossJoinModifiedSlotTracker.java @@ -21,8 +21,8 @@ /** * A tracker for modified cross join hash table slots. * - * After adding an entry, you get back a cookie, which must be passed in on future modification - * operations for that slot. + * After adding an entry, you get back a cookie, which must be passed in on future modification operations for that + * slot. */ class CrossJoinModifiedSlotTracker { static final long NULL_COOKIE = 0; @@ -62,8 +62,7 @@ class SlotState { final SizedLongChunk keyChunk = new SizedLongChunk<>(); Index.RandomBuilder indexBuilder = Index.FACTORY.getRandomBuilder(); - long lastIndex = 0; // if added/removed/modified have been shifted then this is the - // left-index for the shift + long lastIndex = 0; // if added/removed/modified have been shifted then this is the left-index for the shift Index rightAdded; Index rightRemoved; Index rightModified; @@ -108,8 +107,7 @@ private void ensureChunkCapacityRemaining() { } final int originalCapacity = chunkCapacity; - chunkCapacity = - (chunkCapacity >= CHUNK_SIZE) ? chunkCapacity + CHUNK_SIZE : 2 * chunkCapacity; + chunkCapacity = (chunkCapacity >= CHUNK_SIZE) ? chunkCapacity + CHUNK_SIZE : 2 * chunkCapacity; maxSlotChunkCapacity = Math.max(maxSlotChunkCapacity, chunkCapacity); keyChunk.ensureCapacityPreserve(chunkCapacity); flagChunk.ensureCapacityPreserve(chunkCapacity); @@ -172,7 +170,7 @@ private void doFinalizeRightState() { // make our right index be what it needs to be final long oldRightSize = rightIndex.size(); try (final Index added = innerAdded.getIndex(); - final Index removed = innerRemoved.getIndex()) { + final Index removed = innerRemoved.getIndex()) { rightIndex.remove(removed); // then we shift @@ -185,9 +183,8 @@ private void doFinalizeRightState() { jsm.onRightGroupInsertion(rightIndex, added, slotLocation); } - Assert.eq(oldRightSize + added.size() - removed.size(), - "oldRightSize + added.size() - removed.size()", rightIndex.size(), - "rightIndex.size()"); + Assert.eq(oldRightSize + added.size() - removed.size(), "oldRightSize + added.size() - removed.size()", + rightIndex.size(), "rightIndex.size()"); } // now translate added && modified; accumulate them too @@ -215,8 +212,7 @@ private void doFinalizeRightState() { modifiedBuilder.appendKey(downstreamOffset); } else { throw new IllegalStateException( - "CrossJoinModifiedSlotTracker encountered unexpected flag value: " - + flag); + "CrossJoinModifiedSlotTracker encountered unexpected flag value: " + flag); } } } @@ -236,10 +232,8 @@ private void doFinalizeRightState() { final long newRightSize = rightIndex.size(); while (postOff < newRightSize && preOff < oldRightSize) { - final long preNextOff = - (preIdx == keyChunk.size()) ? oldRightSize : keyChunk.get(preIdx); - final long postNextOff = - (postIdx == keyChunk.size()) ? newRightSize : keyChunk.get(postIdx); + final long preNextOff = (preIdx == keyChunk.size()) ? oldRightSize : keyChunk.get(preIdx); + final long postNextOff = (postIdx == keyChunk.size()) ? newRightSize : keyChunk.get(postIdx); final long canShift = Math.min(preNextOff - preOff, postNextOff - postOff); if (canShift > 0) { @@ -283,27 +277,23 @@ private void doFinalizeRightState() { } } - private final ObjectArraySource modifiedSlots = - new ObjectArraySource<>(SlotState.class); + private final ObjectArraySource modifiedSlots = new ObjectArraySource<>(SlotState.class); private LongSortKernel sortKernel; private void ensureSortKernel() { if (sortKernel == null) { - sortKernel = LongSortKernel.makeContext(ChunkType.Long, SortingOrder.Ascending, - maxSlotChunkCapacity, true); + sortKernel = LongSortKernel.makeContext(ChunkType.Long, SortingOrder.Ascending, maxSlotChunkCapacity, true); } } /** - * the location that we must write to in modified slots; also if we have a pointer that falls - * outside the range [0, pointer); then we know it is invalid + * the location that we must write to in modified slots; also if we have a pointer that falls outside the range [0, + * pointer); then we know it is invalid */ private long pointer; /** how many slots we have allocated */ private long allocated; - /** - * Each time we clear, we add an offset to our cookies, this prevents us from reading old values - */ + /** Each time we clear, we add an offset to our cookies, this prevents us from reading old values */ private long cookieGeneration = 128; private static final byte FLAG_ADD = 0x1; @@ -338,8 +328,7 @@ boolean clear() { leftShifted = null; rightShifted = null; - // leftAdded/leftRemoved/leftModified are used by the downstream update; so we do not free - // them + // leftAdded/leftRemoved/leftModified are used by the downstream update; so we do not free them leftAdded = null; leftRemoved = null; leftModified = null; @@ -352,13 +341,12 @@ boolean clear() { } /** - * Is this cookie within our valid range (greater than or equal to our generation, but less than - * the pointer after adjustment? + * Is this cookie within our valid range (greater than or equal to our generation, but less than the pointer after + * adjustment? * * @param cookie the cookie to check for validity * - * @return true if the cookie is from the current generation, and references a valid slot in our - * table + * @return true if the cookie is from the current generation, and references a valid slot in our table */ private boolean isValidCookie(long cookie) { return cookie >= cookieGeneration && getPointerFromCookie(cookie) < pointer; @@ -401,8 +389,7 @@ private SlotState getSlotState(final long cookie, final long slot) { if (finishedRightProcessing) { state.finalizedRight = true; - state.rightAdded = - state.rightRemoved = state.rightModified = Index.FACTORY.getEmptyIndex(); + state.rightAdded = state.rightRemoved = state.rightModified = Index.FACTORY.getEmptyIndex(); state.innerShifted = IndexShiftData.EMPTY; } } else { @@ -441,15 +428,13 @@ long appendToBuilder(final long cookie, final long slot, final long leftIndex) { return getSlotState(cookie, slot).appendToBuilder(leftIndex).cookie; } - // Right shifts cannot be applied until after the removes are applied to the slot's right index. - // So, we ensure that + // Right shifts cannot be applied until after the removes are applied to the slot's right index. So, we ensure that // a tracker-slot is allocated for each slot affected by a shift, and apply the shifts later. long needsRightShift(final long cookie, final long slot) { return getSlotState(cookie, slot).needsRightShift().cookie; } - // Left shifts can actually be applied immediately (removes are already rm'd), but we need to be - // careful to only + // Left shifts can actually be applied immediately (removes are already rm'd), but we need to be careful to only // shift the leftIndex once. long needsLeftShift(final long cookie, final long slot) { return getSlotState(cookie, slot).applyLeftShift().cookie; diff --git a/DB/src/main/java/io/deephaven/db/v2/DeferredViewTable.java b/DB/src/main/java/io/deephaven/db/v2/DeferredViewTable.java index 303f1624ea8..58943417bbf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/DeferredViewTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/DeferredViewTable.java @@ -21,8 +21,7 @@ import java.util.stream.Stream; /** - * An uncoalesced table with view and where operations to be applied after {@link #coalesce()} is - * forced. + * An uncoalesced table with view and where operations to be applied after {@link #coalesce()} is forced. */ public class DeferredViewTable extends RedefinableTable { @@ -32,22 +31,18 @@ public class DeferredViewTable extends RedefinableTable { protected final SelectFilter[] deferredFilters; public DeferredViewTable(@NotNull final TableDefinition definition, - @NotNull final String description, - @NotNull final TableReference tableReference, - @Nullable final String[] deferredDropColumns, - @Nullable final SelectColumn[] deferredViewColumns, - @Nullable final SelectFilter[] deferredFilters) { + @NotNull final String description, + @NotNull final TableReference tableReference, + @Nullable final String[] deferredDropColumns, + @Nullable final SelectColumn[] deferredViewColumns, + @Nullable final SelectFilter[] deferredFilters) { super(definition, description); this.tableReference = tableReference; this.deferredDropColumns = - deferredDropColumns == null ? CollectionUtil.ZERO_LENGTH_STRING_ARRAY - : deferredDropColumns; + deferredDropColumns == null ? CollectionUtil.ZERO_LENGTH_STRING_ARRAY : deferredDropColumns; this.deferredViewColumns = - deferredViewColumns == null ? SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY - : deferredViewColumns; - this.deferredFilters = - deferredFilters == null ? SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY - : deferredFilters; + deferredViewColumns == null ? SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY : deferredViewColumns; + this.deferredFilters = deferredFilters == null ? SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY : deferredFilters; if (deferredFilters != null) { for (final SelectFilter sf : deferredFilters) { if (sf instanceof LivenessReferent) { @@ -64,12 +59,10 @@ public DeferredViewTable(@NotNull final TableDefinition definition, manage(tableReference); if (haveDrop && haveFilter) { - throw new IllegalStateException( - "Why do we have a drop and a filter all at the same time?"); + throw new IllegalStateException("Why do we have a drop and a filter all at the same time?"); } if (haveView && haveFilter) { - throw new IllegalStateException( - "Why do we have a view and a filter all at the same time?"); + throw new IllegalStateException("Why do we have a view and a filter all at the same time?"); } } @@ -83,8 +76,7 @@ private Table getResultTableWithWhere(SelectFilter... selectFilters) { return coalesce().where(selectFilters); } - final SelectFilter[] allFilters = - Stream.concat(Arrays.stream(deferredFilters), Arrays.stream(selectFilters)) + final SelectFilter[] allFilters = Stream.concat(Arrays.stream(deferredFilters), Arrays.stream(selectFilters)) .map(SelectFilter::copy).toArray(SelectFilter[]::new); TableReference.TableAndRemainingFilters tableAndRemainingFilters; @@ -104,10 +96,10 @@ private Table getResultTableWithWhere(SelectFilter... selectFilters) { Table localResult = tableAndRemainingFilters.table; if (localResult instanceof DeferredViewTable) { localResult = ((DeferredViewTable) localResult) - .getResultTableWithWhere(tableAndRemainingFilters.remainingFilters); + .getResultTableWithWhere(tableAndRemainingFilters.remainingFilters); } else { localResult = localResult.where(Arrays.stream(tableAndRemainingFilters.remainingFilters) - .map(SelectFilter::copy).toArray(SelectFilter[]::new)); + .map(SelectFilter::copy).toArray(SelectFilter[]::new)); } localResult = applyDeferredViews(localResult); @@ -129,8 +121,8 @@ private Table applyDeferredViews(Table result) { result = result.dropColumns(deferredDropColumns); } if (deferredViewColumns.length > 0) { - result = result.view(Arrays.stream(deferredViewColumns).map(SelectColumn::copy) - .toArray(SelectColumn[]::new)); + result = result + .view(Arrays.stream(deferredViewColumns).map(SelectColumn::copy).toArray(SelectColumn[]::new)); } return result; } @@ -170,8 +162,7 @@ private PreAndPostFilters applyFilterRenamings(SelectFilter[] filters) { // we need to rename this column if (selectColumn instanceof SourceColumn) { // this is a rename of the getSourceName to the innerName - renames.put(selectColumn.getName(), - ((SourceColumn) selectColumn).getSourceName()); + renames.put(selectColumn.getName(), ((SourceColumn) selectColumn).getSourceName()); } else { postFilter = true; break; @@ -186,9 +177,8 @@ private PreAndPostFilters applyFilterRenamings(SelectFilter[] filters) { if (!renames.isEmpty()) { if (filter instanceof io.deephaven.db.v2.select.MatchFilter) { io.deephaven.db.v2.select.MatchFilter matchFilter = - (io.deephaven.db.v2.select.MatchFilter) filter; - Assert.assertion(renames.size() == 1, - "Match Filters should only use one column!"); + (io.deephaven.db.v2.select.MatchFilter) filter; + Assert.assertion(renames.size() == 1, "Match Filters should only use one column!"); String newName = renames.get(matchFilter.getColumnName()); Assert.neqNull(newName, "newName"); preViewFilters.add(matchFilter.renameFilter(newName)); @@ -204,9 +194,8 @@ private PreAndPostFilters applyFilterRenamings(SelectFilter[] filters) { } } - return new PreAndPostFilters( - preViewFilters.toArray(new SelectFilter[preViewFilters.size()]), - postViewFilters.toArray(new SelectFilter[postViewFilters.size()])); + return new PreAndPostFilters(preViewFilters.toArray(new SelectFilter[preViewFilters.size()]), + postViewFilters.toArray(new SelectFilter[postViewFilters.size()])); } @Override @@ -216,7 +205,7 @@ protected DynamicTable doCoalesce() { PreAndPostFilters preAndPostFilters = applyFilterRenamings(deferredFilters); TableReference.TableAndRemainingFilters tarf = - tableReference.getWithWhere(preAndPostFilters.preViewFilters); + tableReference.getWithWhere(preAndPostFilters.preViewFilters); result = tarf.table; result = result.where(tarf.remainingFilters); result = result.where(preAndPostFilters.postViewFilters); @@ -240,21 +229,15 @@ public Table selectDistinct(SelectColumn... columns) { return coalesce().selectDistinct(columns); } - /* - * If we have changed the partitioning columns, we should perform the selectDistinct on the - * coalesced table. - */ + /* If we have changed the partitioning columns, we should perform the selectDistinct on the coalesced table. */ if (deferredViewColumns.length > 0) { - if (tableReference.getDefinition().getPartitioningColumns().stream() - .anyMatch(cd -> Arrays.stream(deferredViewColumns) - .anyMatch(dvc -> dvc.getName().equals(cd.getName())))) { + if (tableReference.getDefinition().getPartitioningColumns().stream().anyMatch( + cd -> Arrays.stream(deferredViewColumns).anyMatch(dvc -> dvc.getName().equals(cd.getName())))) { return coalesce().selectDistinct(columns); } } - /* - * If the cachedResult is not yet created, we first ask for a selectDistinct cachedResult. - */ + /* If the cachedResult is not yet created, we first ask for a selectDistinct cachedResult. */ Table selectDistinct = tableReference.selectDistinct(columns); return selectDistinct == null ? coalesce().selectDistinct(columns) : selectDistinct; } @@ -266,18 +249,17 @@ protected Table redefine(TableDefinition newDefinition) { for (int cdi = 0; cdi < cDefs.length; ++cdi) { newView[cdi] = new SourceColumn(cDefs[cdi].getName()); } - DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinition, - description + "-redefined", new SimpleTableReference(this), null, newView, null); + DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinition, description + "-redefined", + new SimpleTableReference(this), null, newView, null); deferredViewTable.setRefreshing(isRefreshing()); return deferredViewTable; } @Override - protected Table redefine(TableDefinition newDefinitionExternal, - TableDefinition newDefinitionInternal, SelectColumn[] viewColumns, - Map> columnDependency) { - DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinitionExternal, - description + "-redefined", new SimpleTableReference(this), null, viewColumns, null); + protected Table redefine(TableDefinition newDefinitionExternal, TableDefinition newDefinitionInternal, + SelectColumn[] viewColumns, Map> columnDependency) { + DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinitionExternal, description + "-redefined", + new SimpleTableReference(this), null, viewColumns, null); deferredViewTable.setRefreshing(isRefreshing()); return deferredViewTable; } @@ -285,8 +267,7 @@ protected Table redefine(TableDefinition newDefinitionExternal, /** * The table reference hides the table underlying table from us. */ - public static abstract class TableReference extends LivenessArtifact - implements SimpleReference
    { + public static abstract class TableReference extends LivenessArtifact implements SimpleReference
    { TableReference(Table t) { if (t.isLive()) { @@ -295,9 +276,8 @@ public static abstract class TableReference extends LivenessArtifact } /** - * Returns the table in a form that the user can run queries on it. This may be as simple as - * returning a reference, but for amorphous tables, this means we need to do the work to - * instantiate it. + * Returns the table in a form that the user can run queries on it. This may be as simple as returning a + * reference, but for amorphous tables, this means we need to do the work to instantiate it. * * @return */ @@ -329,9 +309,9 @@ public TableAndRemainingFilters(Table table, SelectFilter[] remainingFilters) { } /** - * Get the table in a form that the user can run queries on it. All of the filters that can - * be run efficiently should be run before instantiating the full table should be run. Other - * filters are returned in the remainingFilters field. + * Get the table in a form that the user can run queries on it. All of the filters that can be run efficiently + * should be run before instantiating the full table should be run. Other filters are returned in the + * remainingFilters field. * * @param selectFilters filters to maybe apply before returning the table * @return the instantiated table and a set of filters that were not applied. @@ -341,12 +321,12 @@ public TableAndRemainingFilters getWithWhere(SelectFilter... selectFilters) { } /** - * If possible to execute a selectDistinct without instantiating the full table, then do so. - * Otherwise return null. + * If possible to execute a selectDistinct without instantiating the full table, then do so. Otherwise return + * null. * * @param columns the columns to selectDistinct - * @return null if the operation can not be performed on an uninstantiated table, otherwise - * a new table with the distinct values from strColumns. + * @return null if the operation can not be performed on an uninstantiated table, otherwise a new table with the + * distinct values from strColumns. */ public Table selectDistinct(SelectColumn[] columns) { return null; diff --git a/DB/src/main/java/io/deephaven/db/v2/DynamicNode.java b/DB/src/main/java/io/deephaven/db/v2/DynamicNode.java index dc89f66af34..16824450b92 100644 --- a/DB/src/main/java/io/deephaven/db/v2/DynamicNode.java +++ b/DB/src/main/java/io/deephaven/db/v2/DynamicNode.java @@ -21,13 +21,13 @@ public interface DynamicNode { boolean setRefreshing(boolean refreshing); /** - * Called on a dependent node to ensure that a strong reference is maintained to any parent - * object that is required for the proper maintenance and functioning of the dependent. + * Called on a dependent node to ensure that a strong reference is maintained to any parent object that is required + * for the proper maintenance and functioning of the dependent. * - * In the most common case, the parent object is a child listener to a parent node. The parent - * node only keeps a weak reference to its child listener, but the listener maintains a strong - * reference to the parent node. In this scenario, the only strong reference to the listener - * (and thus indirectly to the parent node itself) is the reference kept by the dependent node. + * In the most common case, the parent object is a child listener to a parent node. The parent node only keeps a + * weak reference to its child listener, but the listener maintains a strong reference to the parent node. In this + * scenario, the only strong reference to the listener (and thus indirectly to the parent node itself) is the + * reference kept by the dependent node. * * @param parent A parent of this node */ @@ -37,8 +37,8 @@ public interface DynamicNode { * Determine if an object is a refreshing {@link DynamicNode}. * * @param object The object - * @return True if the object is a {@link DynamicNode} and its - * {@link DynamicNode#isRefreshing()} returns true, false otherwise + * @return True if the object is a {@link DynamicNode} and its {@link DynamicNode#isRefreshing()} returns true, + * false otherwise */ static boolean isDynamicAndIsRefreshing(final Object object) { return object instanceof DynamicNode && ((DynamicNode) object).isRefreshing(); @@ -48,8 +48,8 @@ static boolean isDynamicAndIsRefreshing(final Object object) { * Determine if an object is a {@link DynamicNode} but is not refreshing. * * @param object The object - * @return True if the object is a {@link DynamicNode} and its - * {@link DynamicNode#isRefreshing()} returns true, false otherwise + * @return True if the object is a {@link DynamicNode} and its {@link DynamicNode#isRefreshing()} returns true, + * false otherwise */ static boolean isDynamicAndNotRefreshing(final Object object) { return object instanceof DynamicNode && !((DynamicNode) object).isRefreshing(); @@ -59,20 +59,19 @@ static boolean isDynamicAndNotRefreshing(final Object object) { * Determine if an object is not a refreshing {@link DynamicNode}. * * @param object The object - * @return True if the object is not a {@link DynamicNode} or its - * {@link DynamicNode#isRefreshing()} returns false, false otherwise + * @return True if the object is not a {@link DynamicNode} or its {@link DynamicNode#isRefreshing()} returns false, + * false otherwise */ static boolean notDynamicOrNotRefreshing(final Object object) { return !(object instanceof DynamicNode) || !((DynamicNode) object).isRefreshing(); } /** - * Determine if an object is either not a {@link DynamicNode}, or is a refreshing - * {@link DynamicNode}. + * Determine if an object is either not a {@link DynamicNode}, or is a refreshing {@link DynamicNode}. * * @param object The object - * @return True if the object is not a {@link DynamicNode} or if its - * {@link DynamicNode#isRefreshing()} returns true, false otherwise + * @return True if the object is not a {@link DynamicNode} or if its {@link DynamicNode#isRefreshing()} returns + * true, false otherwise */ static boolean notDynamicOrIsRefreshing(final Object object) { return !(object instanceof DynamicNode) || ((DynamicNode) object).isRefreshing(); diff --git a/DB/src/main/java/io/deephaven/db/v2/DynamicTable.java b/DB/src/main/java/io/deephaven/db/v2/DynamicTable.java index f6d33499227..975948442c3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/DynamicTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/DynamicTable.java @@ -20,14 +20,13 @@ * * The DynamicTable interface provides support for listening for table changes and errors. */ -public interface DynamicTable - extends Table, NotificationQueue.Dependency, DynamicNode, SystemicObject { +public interface DynamicTable extends Table, NotificationQueue.Dependency, DynamicNode, SystemicObject { /** *

    * Wait for updates to this DynamicTable. *

    - * In some implementations, this call may also terminate in case of interrupt or spurious wakeup - * (see java.util.concurrent.locks.Condition#await()). + * In some implementations, this call may also terminate in case of interrupt or spurious wakeup (see + * java.util.concurrent.locks.Condition#await()). * * @throws InterruptedException In the event this thread is interrupted */ @@ -37,8 +36,8 @@ public interface DynamicTable *

    * Wait for updates to this DynamicTable. *

    - * In some implementations, this call may also terminate in case of interrupt or spurious wakeup - * (see java.util.concurrent.locks.Condition#await()). + * In some implementations, this call may also terminate in case of interrupt or spurious wakeup (see + * java.util.concurrent.locks.Condition#await()). * * @param timeout The maximum time to wait in milliseconds. * @@ -48,8 +47,8 @@ public interface DynamicTable boolean awaitUpdate(long timeout) throws InterruptedException; /** - * Subscribe for updates to this table. Listener will be invoked via the LiveTableMonitor - * notification queue associated with this DynamicTable. + * Subscribe for updates to this table. Listener will be invoked via the LiveTableMonitor notification queue + * associated with this DynamicTable. * * @param listener listener for updates */ @@ -58,26 +57,26 @@ default void listenForUpdates(Listener listener) { } /** - * Subscribe for updates to this table. After the optional initial image, listener will be - * invoked via the LiveTableMonitor notification queue associated with this DynamicTable. + * Subscribe for updates to this table. After the optional initial image, listener will be invoked via the + * LiveTableMonitor notification queue associated with this DynamicTable. * * @param listener listener for updates - * @param replayInitialImage true to process updates for all initial rows in the table plus all - * new row changes; false to only process new row changes + * @param replayInitialImage true to process updates for all initial rows in the table plus all new row changes; + * false to only process new row changes */ void listenForUpdates(Listener listener, boolean replayInitialImage); /** - * Subscribe for updates to this table. Listener will be invoked via the LiveTableMonitor - * notification queue associated with this DynamicTable. + * Subscribe for updates to this table. Listener will be invoked via the LiveTableMonitor notification queue + * associated with this DynamicTable. * * @param listener listener for updates */ void listenForUpdates(ShiftAwareListener listener); /** - * Subscribe for updates to this table. Direct listeners are invoked immediately when changes - * are published, rather than via a LiveTableMonitor notification queue. + * Subscribe for updates to this table. Direct listeners are invoked immediately when changes are published, rather + * than via a LiveTableMonitor notification queue. * * @param listener listener for updates */ @@ -105,33 +104,31 @@ default void listenForUpdates(Listener listener) { void removeDirectUpdateListener(final Listener listener); /** - * Initiate update delivery to this table's listeners. Will notify direct listeners before - * completing, and enqueue notifications for all other listeners. + * Initiate update delivery to this table's listeners. Will notify direct listeners before completing, and enqueue + * notifications for all other listeners. * * @param added index values added to the table * @param removed index values removed from the table * @param modified index values modified in the table. */ default void notifyListeners(Index added, Index removed, Index modified) { - notifyListeners( - new ShiftAwareListener.Update(added, removed, modified, IndexShiftData.EMPTY, + notifyListeners(new ShiftAwareListener.Update(added, removed, modified, IndexShiftData.EMPTY, modified.isEmpty() ? ModifiedColumnSet.EMPTY : ModifiedColumnSet.ALL)); } /** - * Initiate update delivery to this table's listeners. Will notify direct listeners before - * completing, and enqueue notifications for all other listeners. + * Initiate update delivery to this table's listeners. Will notify direct listeners before completing, and enqueue + * notifications for all other listeners. * - * @param update the set of table changes to propagate The caller gives this update object away; - * the invocation of {@code notifyListeners} takes ownership, and will call - * {@code release} on it once it is not used anymore; callers should pass a {@code clone} - * for updates they intend to further use. + * @param update the set of table changes to propagate The caller gives this update object away; the invocation of + * {@code notifyListeners} takes ownership, and will call {@code release} on it once it is not used anymore; + * callers should pass a {@code clone} for updates they intend to further use. */ void notifyListeners(ShiftAwareListener.Update update); /** - * Initiate failure delivery to this table's listeners. Will notify direct listeners before - * completing, and enqueue notifications for all other listeners. + * Initiate failure delivery to this table's listeners. Will notify direct listeners before completing, and enqueue + * notifications for all other listeners. * * @param e error * @param sourceEntry performance tracking @@ -146,8 +143,7 @@ default boolean isFailed() { } /** - * Retrieve the {@link ModifiedColumnSet} that will be used when propagating updates from this - * table. + * Retrieve the {@link ModifiedColumnSet} that will be used when propagating updates from this table. * * @param columnNames the columns that should belong to the resulting set. * @return the resulting ModifiedColumnSet for the given columnNames @@ -157,30 +153,30 @@ default ModifiedColumnSet newModifiedColumnSet(String... columnNames) { } /** - * Create a {@link ModifiedColumnSet.Transformer} that can be used to propagate dirty columns - * from this table to listeners of the table used to construct columnSets. It is an error if - * {@code columnNames} and {@code columnSets} are not the same length. The transformer will mark - * {@code columnSets[i]} as dirty if the column represented by {@code columnNames[i]} is dirty. + * Create a {@link ModifiedColumnSet.Transformer} that can be used to propagate dirty columns from this table to + * listeners of the table used to construct columnSets. It is an error if {@code columnNames} and {@code columnSets} + * are not the same length. The transformer will mark {@code columnSets[i]} as dirty if the column represented by + * {@code columnNames[i]} is dirty. * * @param columnNames the source columns * @param columnSets the destination columns in the convenient ModifiedColumnSet form * @return a transformer that knows the dirty details */ default ModifiedColumnSet.Transformer newModifiedColumnSetTransformer(String[] columnNames, - ModifiedColumnSet[] columnSets) { + ModifiedColumnSet[] columnSets) { throw new UnsupportedOperationException(); } /** - * Create a {@link ModifiedColumnSet.Transformer} that can be used to propagate dirty columns - * from this table to listeners of the provided resultTable. + * Create a {@link ModifiedColumnSet.Transformer} that can be used to propagate dirty columns from this table to + * listeners of the provided resultTable. * * @param resultTable the destination table * @param columnNames the columns that map one-to-one with the result table * @return a transformer that passes dirty details via an identity mapping */ default ModifiedColumnSet.Transformer newModifiedColumnSetTransformer(DynamicTable resultTable, - String... columnNames) { + String... columnNames) { final ModifiedColumnSet[] columnSets = new ModifiedColumnSet[columnNames.length]; for (int i = 0; i < columnNames.length; ++i) { columnSets[i] = resultTable.newModifiedColumnSet(columnNames[i]); @@ -189,15 +185,15 @@ default ModifiedColumnSet.Transformer newModifiedColumnSetTransformer(DynamicTab } /** - * Create a {@link ModifiedColumnSet.Transformer} that can be used to propagate dirty columns - * from this table to listeners of the provided resultTable. + * Create a {@link ModifiedColumnSet.Transformer} that can be used to propagate dirty columns from this table to + * listeners of the provided resultTable. * * @param resultTable the destination table * @param matchPairs the columns that map one-to-one with the result table * @return a transformer that passes dirty details via an identity mapping */ default ModifiedColumnSet.Transformer newModifiedColumnSetTransformer(DynamicTable resultTable, - MatchPair... matchPairs) { + MatchPair... matchPairs) { final ModifiedColumnSet[] columnSets = new ModifiedColumnSet[matchPairs.length]; for (int ii = 0; ii < matchPairs.length; ++ii) { columnSets[ii] = resultTable.newModifiedColumnSet(matchPairs[ii].left()); @@ -206,26 +202,25 @@ default ModifiedColumnSet.Transformer newModifiedColumnSetTransformer(DynamicTab } /** - * Create a transformer that uses an identity mapping from one ColumnSourceMap to another. The - * two CSMs must have equivalent column names and column ordering. + * Create a transformer that uses an identity mapping from one ColumnSourceMap to another. The two CSMs must have + * equivalent column names and column ordering. * * @param newColumns the column source map for result table * @return a simple Transformer that makes a cheap, but CSM compatible copy */ default ModifiedColumnSet.Transformer newModifiedColumnSetIdentityTransformer( - final Map newColumns) { + final Map newColumns) { throw new UnsupportedOperationException(); } /** - * Create a transformer that uses an identity mapping from one DynamicTable to another. The two - * tables must have equivalent column names and column ordering. + * Create a transformer that uses an identity mapping from one DynamicTable to another. The two tables must have + * equivalent column names and column ordering. * * @param other the result table * @return a simple Transformer that makes a cheap, but CSM compatible copy */ - default ModifiedColumnSet.Transformer newModifiedColumnSetIdentityTransformer( - DynamicTable other) { + default ModifiedColumnSet.Transformer newModifiedColumnSetIdentityTransformer(DynamicTable other) { throw new UnsupportedOperationException(); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/EvenlyDividedTableMap.java b/DB/src/main/java/io/deephaven/db/v2/EvenlyDividedTableMap.java index d83ab98bc31..6637ab30190 100644 --- a/DB/src/main/java/io/deephaven/db/v2/EvenlyDividedTableMap.java +++ b/DB/src/main/java/io/deephaven/db/v2/EvenlyDividedTableMap.java @@ -24,9 +24,9 @@ private Division(long start, long end) { @Override public String toString() { return "Division{" + - "start=" + start + - ", end=" + end + - '}'; + "start=" + start + + ", end=" + end + + '}'; } @Override @@ -37,7 +37,7 @@ public boolean equals(Object o) { return false; final Division division = (Division) o; return start == division.start && - end == division.end; + end == division.end; } @Override @@ -50,11 +50,10 @@ public int hashCode() { * Divide the table into even slices, inserting the results into a TableMap. * *

    - * This method is intended to be used to enable parallelism by creating a TableMap without the - * cost of a full byExternal. It is important to note that any natural boundaries in the data - * are not respected, thus when doing operations in parallel and calling - * {@link TransformableTableMap#merge()} the result is very likely not the same as doing the - * operations individually. Care must be taken to preserve your desired semantics. + * This method is intended to be used to enable parallelism by creating a TableMap without the cost of a full + * byExternal. It is important to note that any natural boundaries in the data are not respected, thus when doing + * operations in parallel and calling {@link TransformableTableMap#merge()} the result is very likely not the same + * as doing the operations individually. Care must be taken to preserve your desired semantics. *

    * * @param table the table to divide @@ -75,30 +74,27 @@ static TableMap makeEvenlyDividedTableMap(Table table, int divisions, int minimu long start; for (start = 0; start < tableSize; start += divisionSize) { - localTableMap.put(new Division(start, start + divisionSize), - queryTable.slice(start, start + divisionSize)); + localTableMap.put(new Division(start, start + divisionSize), queryTable.slice(start, start + divisionSize)); } if (queryTable.isRefreshing()) { localTableMap.setRefreshing(true); final long fStart = start; final ShiftAwareListener listener = - new InstrumentedShiftAwareListenerAdapter("tablemap division", queryTable, false) { - long currentEnd = fStart; + new InstrumentedShiftAwareListenerAdapter("tablemap division", queryTable, false) { + long currentEnd = fStart; - @Override - public void onUpdate(Update upstream) { - if (queryTable.getIndex().size() > currentEnd) { - // we should slice the table again and make a new segment for our - // tablemap - while (currentEnd < queryTable.getIndex().size()) { - localTableMap.put( - new Division(currentEnd, currentEnd + divisionSize), - queryTable.slice(currentEnd, currentEnd + divisionSize)); - currentEnd += divisionSize; + @Override + public void onUpdate(Update upstream) { + if (queryTable.getIndex().size() > currentEnd) { + // we should slice the table again and make a new segment for our tablemap + while (currentEnd < queryTable.getIndex().size()) { + localTableMap.put(new Division(currentEnd, currentEnd + divisionSize), + queryTable.slice(currentEnd, currentEnd + divisionSize)); + currentEnd += divisionSize; + } } } - } - }; + }; queryTable.listenForUpdates(listener); localTableMap.manage(listener); localTableMap.addParentReference(listener); diff --git a/DB/src/main/java/io/deephaven/db/v2/FlattenOperation.java b/DB/src/main/java/io/deephaven/db/v2/FlattenOperation.java index 7e09cd96280..10db0c4b137 100644 --- a/DB/src/main/java/io/deephaven/db/v2/FlattenOperation.java +++ b/DB/src/main/java/io/deephaven/db/v2/FlattenOperation.java @@ -43,8 +43,7 @@ public Result initialize(boolean usePrev, long beforeClock) { for (Map.Entry entry : parent.getColumnSourceMap().entrySet()) { // noinspection unchecked - resultColumns.put(entry.getKey(), - new ReadOnlyRedirectedColumnSource(redirectionIndex, entry.getValue())); + resultColumns.put(entry.getKey(), new ReadOnlyRedirectedColumnSource(redirectionIndex, entry.getValue())); } resultTable = new QueryTable(Index.FACTORY.getFlatIndex(size), resultColumns); @@ -53,13 +52,12 @@ public Result initialize(boolean usePrev, long beforeClock) { ShiftAwareListener resultListener = null; if (parent.isRefreshing()) { - resultListener = - new BaseTable.ShiftAwareListenerImpl(getDescription(), parent, resultTable) { - @Override - public void onUpdate(Update upstream) { - FlattenOperation.this.onUpdate(upstream); - } - }; + resultListener = new BaseTable.ShiftAwareListenerImpl(getDescription(), parent, resultTable) { + @Override + public void onUpdate(Update upstream) { + FlattenOperation.this.onUpdate(upstream); + } + }; } prevSize = size; @@ -78,8 +76,7 @@ public void onUpdate(Update upstream) { } private void onUpdate(final ShiftAwareListener.Update upstream) { - // Note: we can safely ignore shifted since shifts do not change data AND shifts are not - // allowed to reorder. + // Note: we can safely ignore shifted since shifts do not change data AND shifts are not allowed to reorder. final Index index = parent.getIndex(); final long newSize = index.size(); @@ -113,10 +110,8 @@ private void onUpdate(final ShiftAwareListener.Update upstream) { }; // Create our range iterators and prime them. - final MutableObject rmIt = - new MutableObject<>(downstream.removed.rangeIterator()); - final MutableObject addIt = - new MutableObject<>(downstream.added.rangeIterator()); + final MutableObject rmIt = new MutableObject<>(downstream.removed.rangeIterator()); + final MutableObject addIt = new MutableObject<>(downstream.added.rangeIterator()); updateIt.accept(rmIt); updateIt.accept(addIt); @@ -126,15 +121,13 @@ private void onUpdate(final ShiftAwareListener.Update upstream) { while (rmIt.getValue() != null || addIt.getValue() != null) { final long nextRm = rmIt.getValue() == null ? Index.NULL_KEY - : rmIt.getValue().currentRangeStart(); + : rmIt.getValue().currentRangeStart(); final long nextAdd = addIt.getValue() == null ? Index.NULL_KEY - : addIt.getValue().currentRangeStart() - currDelta; + : addIt.getValue().currentRangeStart() - currDelta; if (nextRm == nextAdd) { // note neither can be null in this case - final long dtRm = - rmIt.getValue().currentRangeEnd() - rmIt.getValue().currentRangeStart() + 1; - final long dtAdd = - addIt.getValue().currentRangeEnd() - addIt.getValue().currentRangeStart() + 1; + final long dtRm = rmIt.getValue().currentRangeEnd() - rmIt.getValue().currentRangeStart() + 1; + final long dtAdd = addIt.getValue().currentRangeEnd() - addIt.getValue().currentRangeStart() + 1; // shift only if these don't cancel each other out if (dtRm != dtAdd) { @@ -145,11 +138,9 @@ private void onUpdate(final ShiftAwareListener.Update upstream) { updateIt.accept(rmIt); updateIt.accept(addIt); - } else if (nextAdd == Index.NULL_KEY - || (nextRm != Index.NULL_KEY && nextRm < nextAdd)) { + } else if (nextAdd == Index.NULL_KEY || (nextRm != Index.NULL_KEY && nextRm < nextAdd)) { // rmIt cannot be null - final long dtRm = - rmIt.getValue().currentRangeEnd() - rmIt.getValue().currentRangeStart() + 1; + final long dtRm = rmIt.getValue().currentRangeEnd() - rmIt.getValue().currentRangeStart() + 1; outShifted.shiftRange(currMarker, nextRm - 1, currDelta); currDelta -= dtRm; @@ -157,8 +148,7 @@ private void onUpdate(final ShiftAwareListener.Update upstream) { updateIt.accept(rmIt); } else { // addIt cannot be null - final long dtAdd = - addIt.getValue().currentRangeEnd() - addIt.getValue().currentRangeStart() + 1; + final long dtAdd = addIt.getValue().currentRangeEnd() - addIt.getValue().currentRangeStart() + 1; outShifted.shiftRange(currMarker, nextAdd - 1, currDelta); currDelta += dtAdd; diff --git a/DB/src/main/java/io/deephaven/db/v2/HierarchicalTable.java b/DB/src/main/java/io/deephaven/db/v2/HierarchicalTable.java index 4b2a55ccc15..a5944370a6d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/HierarchicalTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/HierarchicalTable.java @@ -17,8 +17,8 @@ import java.util.Arrays; /** - * This class is an extension of QueryTable that overrides many methods from {@link Table} which are - * not valid to perform on Hierarchical tables (treeTables() and rollups()). + * This class is an extension of QueryTable that overrides many methods from {@link Table} which are not valid to + * perform on Hierarchical tables (treeTables() and rollups()). */ public class HierarchicalTable extends QueryTable { private final QueryTable rootTable; @@ -51,12 +51,11 @@ public Table getSourceTable() { @Override public Table copy(boolean copyAttributes) { - return QueryPerformanceRecorder.withNugget("hierarchicalTable-copy()", - sizeForInstrumentation(), () -> { - final HierarchicalTable result = createFrom((QueryTable) rootTable.copy(), info); - copyAttributes(result, a -> true); - return result; - }); + return QueryPerformanceRecorder.withNugget("hierarchicalTable-copy()", sizeForInstrumentation(), () -> { + final HierarchicalTable result = createFrom((QueryTable) rootTable.copy(), info); + copyAttributes(result, a -> true); + return result; + }); } /** @@ -71,35 +70,30 @@ public Table getRawRootTable() { @Override public Table formatColumns(String... columnFormats) { final HierarchicalTableInfo hierarchicalTableInfo = - (HierarchicalTableInfo) getAttribute(HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); + (HierarchicalTableInfo) getAttribute(HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); final String[] originalColumnFormats = hierarchicalTableInfo.getColumnFormats(); final String[] newColumnFormats; if (originalColumnFormats != null && originalColumnFormats.length > 0) { - newColumnFormats = Arrays.copyOf(originalColumnFormats, - originalColumnFormats.length + columnFormats.length); - System.arraycopy(columnFormats, 0, newColumnFormats, originalColumnFormats.length, - columnFormats.length); + newColumnFormats = + Arrays.copyOf(originalColumnFormats, originalColumnFormats.length + columnFormats.length); + System.arraycopy(columnFormats, 0, newColumnFormats, originalColumnFormats.length, columnFormats.length); } else { newColumnFormats = columnFormats; } - // Note that we are not updating the root with the 'newColumnFormats' because the original - // set of formats + // Note that we are not updating the root with the 'newColumnFormats' because the original set of formats // are already there. - final Table updatedRoot = - rootTable.updateView(SelectColumnFactory.getFormatExpressions(columnFormats)); - final ReverseLookup maybeRll = - (ReverseLookup) rootTable.getAttribute(REVERSE_LOOKUP_ATTRIBUTE); + final Table updatedRoot = rootTable.updateView(SelectColumnFactory.getFormatExpressions(columnFormats)); + final ReverseLookup maybeRll = (ReverseLookup) rootTable.getAttribute(REVERSE_LOOKUP_ATTRIBUTE); - // Explicitly need to copy this in case we are a rollup, in which case the RLL needs to be - // at root level + // Explicitly need to copy this in case we are a rollup, in which case the RLL needs to be at root level if (maybeRll != null) { updatedRoot.setAttribute(REVERSE_LOOKUP_ATTRIBUTE, maybeRll); } - final HierarchicalTable result = createFrom((QueryTable) updatedRoot, - hierarchicalTableInfo.withColumnFormats(newColumnFormats)); + final HierarchicalTable result = + createFrom((QueryTable) updatedRoot, hierarchicalTableInfo.withColumnFormats(newColumnFormats)); copyAttributes(result, a -> !Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE.equals(a)); return result; @@ -142,25 +136,24 @@ public Table renameColumns(MatchPair... pairs) { @Override public Table aj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - AsOfMatchRule asOfMatchRule) { + AsOfMatchRule asOfMatchRule) { return throwUnsupported("aj()"); } @Override public Table raj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - AsOfMatchRule asOfMatchRule) { + AsOfMatchRule asOfMatchRule) { return throwUnsupported("raj()"); } @Override - public Table naturalJoin(Table rightTable, MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd) { + public Table naturalJoin(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd) { return throwUnsupported("naturalJoin()"); } @Override public Table join(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - int numRightBitsToReserve) { + int numRightBitsToReserve) { return throwUnsupported("join()"); } @@ -211,7 +204,7 @@ public Table tailPct(double percent) { @Override public Table by(@SuppressWarnings("rawtypes") AggregationStateFactory aggregationStateFactory, - SelectColumn... groupByColumns) { + SelectColumn... groupByColumns) { return throwUnsupported("by()"); } @@ -226,8 +219,7 @@ public Table tailBy(long nRows, String... groupByColumns) { } @Override - public Table applyToAllBy(String formulaColumn, String columnParamName, - SelectColumn... groupByColumns) { + public Table applyToAllBy(String formulaColumn, String columnParamName, SelectColumn... groupByColumns) { return throwUnsupported("applyToAllBy()"); } @@ -268,7 +260,7 @@ public Table where(SelectFilter... filters) { @Override public Table whereIn(GroupStrategy groupStrategy, Table rightTable, boolean inclusion, - MatchPair... columnsToMatch) { + MatchPair... columnsToMatch) { return throwUnsupported("whereIn()"); } @@ -344,8 +336,7 @@ public Table snapshot(Table baseTable, boolean doInitialSnapshot, String... stam } @Override - public Table snapshotIncremental(Table rightTable, boolean doInitialSnapshot, - String... stampColumns) { + public Table snapshotIncremental(Table rightTable, boolean doInitialSnapshot, String... stampColumns) { return throwUnsupported("snapshotIncremental()"); } @@ -361,41 +352,38 @@ public QueryTable getSubTable(Index index) { private T throwUnsupported(String opName) { throw new UnsupportedOperationException("Operation " + opName - + " may not be performed on hierarchical tables. Instead, apply it to table before treeTable() or rollup()"); + + " may not be performed on hierarchical tables. Instead, apply it to table before treeTable() or rollup()"); } /** - * Create a HierarchicalTable from the specified root (top level) table and - * {@link HierarchicalTableInfo info} that describes the hierarchy type. + * Create a HierarchicalTable from the specified root (top level) table and {@link HierarchicalTableInfo info} that + * describes the hierarchy type. * * @param rootTable the root table of the hierarchy * @param info the info that describes the hierarchy type * * @return A new Hierarchical table. The table itself is a view of the root of the hierarchy. */ - static @NotNull HierarchicalTable createFrom(@NotNull QueryTable rootTable, - @NotNull HierarchicalTableInfo info) { + static @NotNull HierarchicalTable createFrom(@NotNull QueryTable rootTable, @NotNull HierarchicalTableInfo info) { final Mutable resultHolder = new MutableObject<>(); - // Create a copy of the root byExternal table as a HierarchicalTable, and wire it up for - // listeners. + // Create a copy of the root byExternal table as a HierarchicalTable, and wire it up for listeners. final ShiftAwareSwapListener swapListener = - rootTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); - rootTable.initializeWithSnapshot("-hierarchicalTable", swapListener, - (usePrev, beforeClockValue) -> { - final HierarchicalTable table = new HierarchicalTable(rootTable, info); - rootTable.copyAttributes(table, a -> true); - - if (swapListener != null) { - final ShiftAwareListenerImpl listener = + rootTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + rootTable.initializeWithSnapshot("-hierarchicalTable", swapListener, (usePrev, beforeClockValue) -> { + final HierarchicalTable table = new HierarchicalTable(rootTable, info); + rootTable.copyAttributes(table, a -> true); + + if (swapListener != null) { + final ShiftAwareListenerImpl listener = new ShiftAwareListenerImpl("hierarchicalTable()", rootTable, table); - swapListener.setListenerAndResult(listener, table); - table.addParentReference(swapListener); - } + swapListener.setListenerAndResult(listener, table); + table.addParentReference(swapListener); + } - resultHolder.setValue(table); - return true; - }); + resultHolder.setValue(table); + return true; + }); return resultHolder.getValue(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/InMemoryTable.java b/DB/src/main/java/io/deephaven/db/v2/InMemoryTable.java index 03b356a6be2..577331f1056 100644 --- a/DB/src/main/java/io/deephaven/db/v2/InMemoryTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/InMemoryTable.java @@ -19,8 +19,8 @@ public class InMemoryTable extends QueryTable { /** - * Defers to {@link ArrayBackedColumnSource#from(io.deephaven.qst.array.Array)} to construct the - * appropriate {@link ColumnSource column sources} (this involves copying the data). + * Defers to {@link ArrayBackedColumnSource#from(io.deephaven.qst.array.Array)} to construct the appropriate + * {@link ColumnSource column sources} (this involves copying the data). * * @param table the new table qst * @return the in memory table @@ -32,35 +32,31 @@ public static InMemoryTable from(NewTable table) { columns.put(column.name(), source); } return new InMemoryTable( - TableDefinition.from(table.header()), - Index.FACTORY.getFlatIndex(table.size()), - columns); + TableDefinition.from(table.header()), + Index.FACTORY.getFlatIndex(table.size()), + columns); } public InMemoryTable(String columnNames[], Object arrayValues[]) { - super(Index.FACTORY.getFlatIndex(Array.getLength(arrayValues[0])), - createColumnsMap(columnNames, arrayValues)); + super(Index.FACTORY.getFlatIndex(Array.getLength(arrayValues[0])), createColumnsMap(columnNames, arrayValues)); } public InMemoryTable(TableDefinition definition, final int size) { super(Index.FACTORY.getFlatIndex(size), - createColumnsMap( - definition.getColumnNames().toArray(new String[definition.getColumnNames().size()]), - Arrays.stream(definition.getColumns()).map( - x -> Array.newInstance(x.getDataType(), size)).toArray(Object[]::new))); + createColumnsMap( + definition.getColumnNames().toArray(new String[definition.getColumnNames().size()]), + Arrays.stream(definition.getColumns()).map( + x -> Array.newInstance(x.getDataType(), size)).toArray(Object[]::new))); } - private InMemoryTable(TableDefinition definition, Index index, - Map> columns) { + private InMemoryTable(TableDefinition definition, Index index, Map> columns) { super(definition, index, columns); } - private static Map createColumnsMap(String[] columnNames, - Object[] arrayValues) { + private static Map createColumnsMap(String[] columnNames, Object[] arrayValues) { Map map = new LinkedHashMap<>(); for (int i = 0; i < columnNames.length; i++) { - map.put(columnNames[i], - ArrayBackedColumnSource.getMemoryColumnSourceUntyped((arrayValues[i]))); + map.put(columnNames[i], ArrayBackedColumnSource.getMemoryColumnSourceUntyped((arrayValues[i]))); } return map; } diff --git a/DB/src/main/java/io/deephaven/db/v2/IndexedDataColumn.java b/DB/src/main/java/io/deephaven/db/v2/IndexedDataColumn.java index bd6146b78cc..dd4f1a5eb12 100644 --- a/DB/src/main/java/io/deephaven/db/v2/IndexedDataColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/IndexedDataColumn.java @@ -31,8 +31,8 @@ public class IndexedDataColumn implements DataColumn { private final String name; @SuppressWarnings({"UnusedDeclaration", "FieldCanBeLocal"}) - private final Object parent; // DO NOT DELETE - This reference preserves strong-reachability of - // the owning table and its listeners. + private final Object parent; // DO NOT DELETE - This reference preserves strong-reachability of the owning table and + // its listeners. private final Index index; private final ColumnSource columnSource; @@ -42,12 +42,12 @@ public IndexedDataColumn(@NotNull final String name, @NotNull final Table table) } public IndexedDataColumn(@NotNull final String name, @NotNull final Index index, - @NotNull final ColumnSource columnSource) { + @NotNull final ColumnSource columnSource) { this(name, null, index, columnSource); } - private IndexedDataColumn(@Nullable final String name, @Nullable final Object parent, - @NotNull final Index index, @NotNull final ColumnSource columnSource) { + private IndexedDataColumn(@Nullable final String name, @Nullable final Object parent, @NotNull final Index index, + @NotNull final ColumnSource columnSource) { this.name = name; this.parent = parent; this.index = index; @@ -59,13 +59,12 @@ private IndexedDataColumn(@Nullable final String name, @Nullable final Object pa * * @param index The index * @param columnSource The column source - * @return A data column with previous values for the supplied column source, according to the - * previous version of the index + * @return A data column with previous values for the supplied column source, according to the previous version of + * the index */ public static IndexedDataColumn makePreviousColumn(@NotNull final Index index, - @NotNull final ColumnSource columnSource) { - return new IndexedDataColumn<>(null, null, index.getPrevIndex(), - new PrevColumnSource<>(columnSource)); + @NotNull final ColumnSource columnSource) { + return new IndexedDataColumn<>(null, null, index.getPrevIndex(), new PrevColumnSource<>(columnSource)); } @Override @@ -94,7 +93,7 @@ public long size() { private Index getSubIndexByPos(final long startPosInclusive, final long endPosExclusive) { return startPosInclusive == 0 && endPosExclusive == index.size() ? index.clone() - : index.subindexByPos(startPosInclusive, endPosExclusive); + : index.subindexByPos(startPosInclusive, endPosExclusive); } // ------------------------------------------------------------------------------------------------------------------ @@ -113,27 +112,24 @@ public TYPE get(final long pos) { @Override public TYPE[] get(final long startPosInclusive, final long endPosExclusive) { final Iterable iterable = - () -> new ColumnIterator<>(getSubIndexByPos(startPosInclusive, endPosExclusive), - columnSource); + () -> new ColumnIterator<>(getSubIndexByPos(startPosInclusive, endPosExclusive), columnSource); // noinspection unchecked return StreamSupport.stream(iterable.spliterator(), false).toArray(s -> (TYPE[]) Array - .newInstance(io.deephaven.util.type.TypeUtils.getBoxedType(columnSource.getType()), s)); + .newInstance(io.deephaven.util.type.TypeUtils.getBoxedType(columnSource.getType()), s)); } @Override public TYPE[] get(final long... positions) { // noinspection unchecked - return Arrays.stream(positions).map(index::get).mapToObj(columnSource::get) - .toArray(s -> (TYPE[]) Array.newInstance( - io.deephaven.util.type.TypeUtils.getBoxedType(columnSource.getType()), s)); + return Arrays.stream(positions).map(index::get).mapToObj(columnSource::get).toArray(s -> (TYPE[]) Array + .newInstance(io.deephaven.util.type.TypeUtils.getBoxedType(columnSource.getType()), s)); } @Override public TYPE[] get(final int... positions) { // noinspection unchecked - return Arrays.stream(positions).mapToLong(i -> i).map(index::get) - .mapToObj(columnSource::get).toArray( - s -> (TYPE[]) Array.newInstance(TypeUtils.getBoxedType(columnSource.getType()), s)); + return Arrays.stream(positions).mapToLong(i -> i).map(index::get).mapToObj(columnSource::get) + .toArray(s -> (TYPE[]) Array.newInstance(TypeUtils.getBoxedType(columnSource.getType()), s)); } @Override @@ -171,11 +167,10 @@ public byte getByte(final long pos) { @Override public byte[] getBytes(final long startPosInclusive, final long endPosExclusive) { try (final Index rangeIndex = getSubIndexByPos(startPosInclusive, endPosExclusive); - final ChunkSource.FillContext context = - columnSource.makeFillContext(rangeIndex.intSize("getBytes"), null)) { + final ChunkSource.FillContext context = + columnSource.makeFillContext(rangeIndex.intSize("getBytes"), null)) { final byte[] result = new byte[rangeIndex.intSize("getBytes")]; - columnSource.fillChunk(context, WritableByteChunk.writableChunkWrap(result), - rangeIndex); + columnSource.fillChunk(context, WritableByteChunk.writableChunkWrap(result), rangeIndex); return result; } } @@ -210,11 +205,10 @@ public char getChar(final long pos) { @Override public char[] getChars(final long startPosInclusive, final long endPosExclusive) { try (final Index rangeIndex = getSubIndexByPos(startPosInclusive, endPosExclusive); - final ChunkSource.FillContext context = - columnSource.makeFillContext(rangeIndex.intSize("getChars"), null)) { + final ChunkSource.FillContext context = + columnSource.makeFillContext(rangeIndex.intSize("getChars"), null)) { final char[] result = new char[rangeIndex.intSize("getChars")]; - columnSource.fillChunk(context, WritableCharChunk.writableChunkWrap(result), - rangeIndex); + columnSource.fillChunk(context, WritableCharChunk.writableChunkWrap(result), rangeIndex); return result; } } @@ -249,11 +243,10 @@ public double getDouble(final long pos) { @Override public double[] getDoubles(final long startPosInclusive, final long endPosExclusive) { try (final Index rangeIndex = getSubIndexByPos(startPosInclusive, endPosExclusive); - final ChunkSource.FillContext context = - columnSource.makeFillContext(rangeIndex.intSize("getDoubles"), null)) { + final ChunkSource.FillContext context = + columnSource.makeFillContext(rangeIndex.intSize("getDoubles"), null)) { final double[] result = new double[rangeIndex.intSize("getDoubles")]; - columnSource.fillChunk(context, WritableDoubleChunk.writableChunkWrap(result), - rangeIndex); + columnSource.fillChunk(context, WritableDoubleChunk.writableChunkWrap(result), rangeIndex); return result; } } @@ -288,11 +281,10 @@ public float getFloat(final long pos) { @Override public float[] getFloats(final long startPosInclusive, final long endPosExclusive) { try (final Index rangeIndex = getSubIndexByPos(startPosInclusive, endPosExclusive); - final ChunkSource.FillContext context = - columnSource.makeFillContext(rangeIndex.intSize("getFloats"), null)) { + final ChunkSource.FillContext context = + columnSource.makeFillContext(rangeIndex.intSize("getFloats"), null)) { final float[] result = new float[rangeIndex.intSize("getFloats")]; - columnSource.fillChunk(context, WritableFloatChunk.writableChunkWrap(result), - rangeIndex); + columnSource.fillChunk(context, WritableFloatChunk.writableChunkWrap(result), rangeIndex); return result; } } @@ -327,8 +319,8 @@ public int getInt(final long pos) { @Override public int[] getInts(final long startPosInclusive, final long endPosExclusive) { try (final Index rangeIndex = getSubIndexByPos(startPosInclusive, endPosExclusive); - final ChunkSource.FillContext context = - columnSource.makeFillContext(rangeIndex.intSize("getInts"), null)) { + final ChunkSource.FillContext context = + columnSource.makeFillContext(rangeIndex.intSize("getInts"), null)) { final int[] result = new int[rangeIndex.intSize("getInts")]; columnSource.fillChunk(context, WritableIntChunk.writableChunkWrap(result), rangeIndex); return result; @@ -365,11 +357,10 @@ public long getLong(final long pos) { @Override public long[] getLongs(final long startPosInclusive, final long endPosExclusive) { try (final Index rangeIndex = getSubIndexByPos(startPosInclusive, endPosExclusive); - final ChunkSource.FillContext context = - columnSource.makeFillContext(rangeIndex.intSize("getLongs"), null)) { + final ChunkSource.FillContext context = + columnSource.makeFillContext(rangeIndex.intSize("getLongs"), null)) { final long[] result = new long[rangeIndex.intSize("getLongs")]; - columnSource.fillChunk(context, WritableLongChunk.writableChunkWrap(result), - rangeIndex); + columnSource.fillChunk(context, WritableLongChunk.writableChunkWrap(result), rangeIndex); return result; } } @@ -404,11 +395,10 @@ public short getShort(final long pos) { @Override public short[] getShorts(final long startPosInclusive, final long endPosExclusive) { try (final Index rangeIndex = getSubIndexByPos(startPosInclusive, endPosExclusive); - final ChunkSource.FillContext context = - columnSource.makeFillContext(rangeIndex.intSize("getShorts"), null)) { + final ChunkSource.FillContext context = + columnSource.makeFillContext(rangeIndex.intSize("getShorts"), null)) { final short[] result = new short[rangeIndex.intSize("getShorts")]; - columnSource.fillChunk(context, WritableShortChunk.writableChunkWrap(result), - rangeIndex); + columnSource.fillChunk(context, WritableShortChunk.writableChunkWrap(result), rangeIndex); return result; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/InstrumentedListener.java b/DB/src/main/java/io/deephaven/db/v2/InstrumentedListener.java index 40dc518c2bb..38c64299bfe 100644 --- a/DB/src/main/java/io/deephaven/db/v2/InstrumentedListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/InstrumentedListener.java @@ -18,14 +18,13 @@ public InstrumentedListener(@Nullable final String description) { super(description, false); } - public InstrumentedListener(@Nullable final String description, - final boolean terminalListener) { + public InstrumentedListener(@Nullable final String description, final boolean terminalListener) { super(description, terminalListener); } @Override public AbstractIndexUpdateNotification getNotification(final Index added, final Index removed, - final Index modified) { + final Index modified) { return new Notification(added, removed, modified); } @@ -36,18 +35,16 @@ public class Notification extends NotificationBase { Notification(final Index added, final Index removed, final Index modified) { super(new ShiftAwareListener.Update(added.clone(), removed.clone(), modified.clone(), - IndexShiftData.EMPTY, ModifiedColumnSet.ALL)); + IndexShiftData.EMPTY, ModifiedColumnSet.ALL)); update.release(); // NotificationBase assumes it does not own the provided update. } @Override public void run() { doRun(() -> { - if (initialImage != null - && (initialImage != update.added || update.removed.nonempty() + if (initialImage != null && (initialImage != update.added || update.removed.nonempty() || update.modified.nonempty())) { - onUpdate(update.added.minus(initialImageClone), update.removed, - update.modified); + onUpdate(update.added.minus(initialImageClone), update.removed, update.modified); } else { onUpdate(update.added, update.removed, update.modified); } diff --git a/DB/src/main/java/io/deephaven/db/v2/InstrumentedListenerAdapter.java b/DB/src/main/java/io/deephaven/db/v2/InstrumentedListenerAdapter.java index 2205860c138..5e3a520a84d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/InstrumentedListenerAdapter.java +++ b/DB/src/main/java/io/deephaven/db/v2/InstrumentedListenerAdapter.java @@ -29,8 +29,7 @@ */ public abstract class InstrumentedListenerAdapter extends InstrumentedListener { - private static final RetentionCache RETENTION_CACHE = - new RetentionCache<>(); + private static final RetentionCache RETENTION_CACHE = new RetentionCache<>(); private final boolean retain; @@ -40,35 +39,29 @@ public abstract class InstrumentedListenerAdapter extends InstrumentedListener { /** * Create an instrumented listener for source. No description is provided. * - * @param source The source table this listener will subscribe to - needed for preserving - * referential integrity - * @param retain Whether a hard reference to this listener should be maintained to prevent it - * from being collected. In most scenarios, it's better to specify {@code false} and keep - * a reference in the calling code. + * @param source The source table this listener will subscribe to - needed for preserving referential integrity + * @param retain Whether a hard reference to this listener should be maintained to prevent it from being collected. + * In most scenarios, it's better to specify {@code false} and keep a reference in the calling code. */ public InstrumentedListenerAdapter(@NotNull final DynamicTable source, final boolean retain) { this(null, source, retain); } /** - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description. - * @param source The source table this listener will subscribe to - needed for preserving - * referential integrity. - * @param retain Whether a hard reference to this listener should be maintained to prevent it - * from being collected. In most scenarios, it's better to specify {@code false} and keep - * a reference in the calling code. + * @param description A description for the UpdatePerformanceTracker to append to its entry description. + * @param source The source table this listener will subscribe to - needed for preserving referential integrity. + * @param retain Whether a hard reference to this listener should be maintained to prevent it from being collected. + * In most scenarios, it's better to specify {@code false} and keep a reference in the calling code. */ - public InstrumentedListenerAdapter(@Nullable final String description, - @NotNull final DynamicTable source, final boolean retain) { + public InstrumentedListenerAdapter(@Nullable final String description, @NotNull final DynamicTable source, + final boolean retain) { super(description); this.source = Require.neqNull(source, "source"); if (this.retain = retain) { RETENTION_CACHE.retain(this); if (Liveness.DEBUG_MODE_ENABLED) { Liveness.log.info().append("LivenessDebug: InstrumentedListenerAdapter ") - .append(Utils.REFERENT_FORMATTER, this) - .append(" created with retention enabled").endl(); + .append(Utils.REFERENT_FORMATTER, this).append(" created with retention enabled").endl(); } } manage(source); @@ -84,11 +77,9 @@ public InstrumentedListenerAdapter(@Nullable final String description, * @param sourceEntry the performance tracker entry that was active when the error occurred */ @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { try { - AsyncErrorLogger.log(DBTimeUtils.currentTime(), sourceEntry, sourceEntry, - originalException); + AsyncErrorLogger.log(DBTimeUtils.currentTime(), sourceEntry, sourceEntry, originalException); AsyncClientErrorNotifier.reportError(originalException); } catch (IOException e) { throw new RuntimeException("Exception in " + sourceEntry.toString(), originalException); diff --git a/DB/src/main/java/io/deephaven/db/v2/InstrumentedListenerBase.java b/DB/src/main/java/io/deephaven/db/v2/InstrumentedListenerBase.java index d7be6126c2d..9cdcf1003b9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/InstrumentedListenerBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/InstrumentedListenerBase.java @@ -30,7 +30,7 @@ import java.io.IOException; public abstract class InstrumentedListenerBase extends LivenessArtifact - implements ListenerBase, NotificationQueue.Dependency { + implements ListenerBase, NotificationQueue.Dependency { private static final Logger log = LoggerFactory.getLogger(InstrumentedListener.class); @@ -39,8 +39,8 @@ public abstract class InstrumentedListenerBase extends LivenessArtifact private boolean failed = false; private static volatile boolean verboseLogging = Configuration - .getInstance() - .getBooleanWithDefault("InstrumentedListener.verboseLogging", false); + .getInstance() + .getBooleanWithDefault("InstrumentedListener.verboseLogging", false); private volatile long lastCompletedStep = NotificationStepReceiver.NULL_NOTIFICATION_STEP; private volatile long lastEnqueuedStep = NotificationStepReceiver.NULL_NOTIFICATION_STEP; @@ -67,14 +67,14 @@ public UpdatePerformanceTracker.Entry getEntry() { @Override public NotificationQueue.Notification getErrorNotification(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + UpdatePerformanceTracker.Entry sourceEntry) { return new ErrorNotification(originalException, sourceEntry == null ? entry : sourceEntry); } @Override public LogOutput append(@NotNull final LogOutput logOutput) { - return logOutput.append("InstrumentedListener:(identity=") - .append(System.identityHashCode(this)).append(", ").append(entry).append(")"); + return logOutput.append("InstrumentedListener:(identity=").append(System.identityHashCode(this)).append(", ") + .append(entry).append(")"); } public boolean canExecute(final long step) { @@ -84,26 +84,23 @@ public boolean canExecute(final long step) { @Override public boolean satisfied(final long step) { if (lastCompletedStep == step) { - LiveTableMonitor.DEFAULT.logDependencies().append("Already completed notification for ") - .append(this).endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("Already completed notification for ").append(this) + .endl(); return true; } if (lastEnqueuedStep == step) { - LiveTableMonitor.DEFAULT.logDependencies().append("Enqueued notification for ") - .append(this).endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("Enqueued notification for ").append(this).endl(); return false; } if (canExecute(step)) { - LiveTableMonitor.DEFAULT.logDependencies().append("Dependencies satisfied for ") - .append(this).endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("Dependencies satisfied for ").append(this).endl(); lastCompletedStep = step; return true; } - LiveTableMonitor.DEFAULT.logDependencies().append("Dependencies not yet satisfied for ") - .append(this).endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("Dependencies not yet satisfied for ").append(this).endl(); return false; } @@ -112,20 +109,15 @@ public void onFailure(Throwable originalException, UpdatePerformanceTracker.Entr onFailureInternal(originalException, sourceEntry == null ? entry : sourceEntry); } - protected abstract void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry); + protected abstract void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry); - protected final void onFailureInternalWithDependent(final DynamicTable dependent, - final Throwable originalException, - final UpdatePerformanceTracker.Entry sourceEntry) { + protected final void onFailureInternalWithDependent(final DynamicTable dependent, final Throwable originalException, + final UpdatePerformanceTracker.Entry sourceEntry) { dependent.notifyListenersOnError(originalException, sourceEntry); - // although we have notified the dependent tables, we should notify the client side as well. - // In pretty - // much every case we would expect this notification to happen anyway, but in the case of a - // GuiTableMap - // from byExternal, the tables will have a hard reference, but would not actually have made - // it all the way + // although we have notified the dependent tables, we should notify the client side as well. In pretty + // much every case we would expect this notification to happen anyway, but in the case of a GuiTableMap + // from byExternal, the tables will have a hard reference, but would not actually have made it all the way // back to the client. Thus, the need for this additional reporting. try { if (SystemicObjectTracker.isSystemic(dependent)) { @@ -154,17 +146,14 @@ public void run() { } failed = true; try { - AsyncErrorLogger.log(DBTimeUtils.currentTime(), entry, sourceEntry, - originalException); + AsyncErrorLogger.log(DBTimeUtils.currentTime(), entry, sourceEntry, originalException); } catch (IOException e) { - log.error().append("Error logging failure from ").append(entry).append(": ") - .append(e).endl(); + log.error().append("Error logging failure from ").append(entry).append(": ").append(e).endl(); } try { onFailureInternal(originalException, sourceEntry); } catch (Exception e) { - log.error().append("Error propagating failure from ").append(sourceEntry) - .append(": ").append(e).endl(); + log.error().append("Error propagating failure from ").append(sourceEntry).append(": ").append(e).endl(); } } @@ -176,13 +165,11 @@ public boolean canExecute(final long step) { @Override public LogOutput append(LogOutput output) { return output.append("ErrorNotification{").append("originalException=") - .append(originalException.getMessage()).append(", sourceEntry=").append(sourceEntry) - .append("}"); + .append(originalException.getMessage()).append(", sourceEntry=").append(sourceEntry).append("}"); } } - protected abstract class NotificationBase extends AbstractIndexUpdateNotification - implements LogOutputAppendable { + protected abstract class NotificationBase extends AbstractIndexUpdateNotification implements LogOutputAppendable { final ShiftAwareListener.Update update; @@ -191,7 +178,7 @@ protected abstract class NotificationBase extends AbstractIndexUpdateNotificatio this.update = update.acquire(); if (lastCompletedStep == LogicalClock.DEFAULT.currentStep()) { throw Assert.statementNeverExecuted( - "Enqueued after lastCompletedStep already set to current step: " + toString()); + "Enqueued after lastCompletedStep already set to current step: " + toString()); } lastEnqueuedStep = LogicalClock.DEFAULT.currentStep(); } @@ -207,11 +194,11 @@ public final String toString() { @Override public final LogOutput append(LogOutput logOutput) { return logOutput.append("Notification:(step=") - .append(LogicalClock.DEFAULT.currentStep()) - .append(", listener=") - .append(System.identityHashCode(InstrumentedListenerBase.this)) - .append(")") - .append(entry); + .append(LogicalClock.DEFAULT.currentStep()) + .append(", listener=") + .append(System.identityHashCode(InstrumentedListenerBase.this)) + .append(")") + .append(entry); } @Override @@ -237,8 +224,7 @@ private void doRunInternal(final Runnable invokeOnUpdate) { try { if (lastCompletedStep == LogicalClock.DEFAULT.currentStep()) { throw new IllegalStateException( - "Executed after lastCompletedStep already set to current step: " - + toString()); + "Executed after lastCompletedStep already set to current step: " + toString()); } invokeOnUpdate.run(); @@ -253,17 +239,15 @@ private void doRunInternal(final Runnable invokeOnUpdate) { } en.append(", added.size()=").append(update.added.size()) - .append(", modified.size()=").append(update.modified.size()) - .append(", removed.size()=").append(update.removed.size()) - .append(", shifted.size()=").append(update.shifted.size()) - .append(", modifiedColumnSet=").append(update.modifiedColumnSet.toString()) - .append(":\n").append(e).endl(); + .append(", modified.size()=").append(update.modified.size()) + .append(", removed.size()=").append(update.removed.size()) + .append(", shifted.size()=").append(update.shifted.size()) + .append(", modifiedColumnSet=").append(update.modifiedColumnSet.toString()) + .append(":\n").append(e).endl(); if (useVerboseLogging) { - // This is a failure and shouldn't happen, so it is OK to be verbose here. - // Particularly as it is not - // clear what is actually going on in some cases of assertion failure related to - // the indices. + // This is a failure and shouldn't happen, so it is OK to be verbose here. Particularly as it is not + // clear what is actually going on in some cases of assertion failure related to the indices. log.error().append("Listener is: ").append(this.toString()).endl(); log.error().append("Added: ").append(update.added.toString()).endl(); log.error().append("Modified: ").append(update.modified.toString()).endl(); diff --git a/DB/src/main/java/io/deephaven/db/v2/InstrumentedShiftAwareListener.java b/DB/src/main/java/io/deephaven/db/v2/InstrumentedShiftAwareListener.java index 86a5df1aef9..e5438ac0441 100644 --- a/DB/src/main/java/io/deephaven/db/v2/InstrumentedShiftAwareListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/InstrumentedShiftAwareListener.java @@ -7,15 +7,13 @@ import io.deephaven.db.v2.utils.AbstractIndexUpdateNotification; import org.jetbrains.annotations.Nullable; -public abstract class InstrumentedShiftAwareListener extends InstrumentedListenerBase - implements ShiftAwareListener { +public abstract class InstrumentedShiftAwareListener extends InstrumentedListenerBase implements ShiftAwareListener { public InstrumentedShiftAwareListener(@Nullable final String description) { super(description, false); } - public InstrumentedShiftAwareListener(@Nullable final String description, - final boolean terminalListener) { + public InstrumentedShiftAwareListener(@Nullable final String description, final boolean terminalListener) { super(description, terminalListener); } diff --git a/DB/src/main/java/io/deephaven/db/v2/InstrumentedShiftAwareListenerAdapter.java b/DB/src/main/java/io/deephaven/db/v2/InstrumentedShiftAwareListenerAdapter.java index 4c92a7ddbe5..54b47c37c42 100644 --- a/DB/src/main/java/io/deephaven/db/v2/InstrumentedShiftAwareListenerAdapter.java +++ b/DB/src/main/java/io/deephaven/db/v2/InstrumentedShiftAwareListenerAdapter.java @@ -19,8 +19,7 @@ import java.io.IOException; /** - * This class is used for ShiftAwareListeners that represent "leaf" nodes in the update propagation - * tree. + * This class is used for ShiftAwareListeners that represent "leaf" nodes in the update propagation tree. * * It provides an optional retention cache, to prevent listeners from being garbage collected. * @@ -28,8 +27,7 @@ */ public abstract class InstrumentedShiftAwareListenerAdapter extends InstrumentedShiftAwareListener { - private static final RetentionCache RETENTION_CACHE = - new RetentionCache<>(); + private static final RetentionCache RETENTION_CACHE = new RetentionCache<>(); private final boolean retain; @@ -39,36 +37,30 @@ public abstract class InstrumentedShiftAwareListenerAdapter extends Instrumented /** * Create an instrumented listener for source. No description is provided. * - * @param source The source table this listener will subscribe to - needed for preserving - * referential integrity - * @param retain Whether a hard reference to this listener should be maintained to prevent it - * from being collected. In most scenarios, it's better to specify {@code false} and keep - * a reference in the calling code. + * @param source The source table this listener will subscribe to - needed for preserving referential integrity + * @param retain Whether a hard reference to this listener should be maintained to prevent it from being collected. + * In most scenarios, it's better to specify {@code false} and keep a reference in the calling code. */ - public InstrumentedShiftAwareListenerAdapter(@NotNull final DynamicTable source, - final boolean retain) { + public InstrumentedShiftAwareListenerAdapter(@NotNull final DynamicTable source, final boolean retain) { this(null, source, retain); } /** - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description - * @param source The source table this listener will subscribe to - needed for preserving - * referential integrity - * @param retain Whether a hard reference to this listener should be maintained to prevent it - * from being collected. In most scenarios, it's better to specify {@code false} and keep - * a reference in the calling code. + * @param description A description for the UpdatePerformanceTracker to append to its entry description + * @param source The source table this listener will subscribe to - needed for preserving referential integrity + * @param retain Whether a hard reference to this listener should be maintained to prevent it from being collected. + * In most scenarios, it's better to specify {@code false} and keep a reference in the calling code. */ - public InstrumentedShiftAwareListenerAdapter(@Nullable final String description, - @NotNull final DynamicTable source, final boolean retain) { + public InstrumentedShiftAwareListenerAdapter(@Nullable final String description, @NotNull final DynamicTable source, + final boolean retain) { super(description); this.source = Require.neqNull(source, "source"); if (this.retain = retain) { RETENTION_CACHE.retain(this); if (Liveness.DEBUG_MODE_ENABLED) { Liveness.log.info().append("LivenessDebug: InstrumentedListenerAdapter ") - .append(Utils.REFERENT_FORMATTER, this) - .append(" created with retention enabled").endl(); + .append(Utils.REFERENT_FORMATTER, this) + .append(" created with retention enabled").endl(); } } manage(source); @@ -84,11 +76,9 @@ public InstrumentedShiftAwareListenerAdapter(@Nullable final String description, * @param sourceEntry the performance tracker entry that was active when the error occurred */ @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { try { - AsyncErrorLogger.log(DBTimeUtils.currentTime(), sourceEntry, sourceEntry, - originalException); + AsyncErrorLogger.log(DBTimeUtils.currentTime(), sourceEntry, sourceEntry, originalException); AsyncClientErrorNotifier.reportError(originalException); } catch (IOException e) { throw new RuntimeException("Exception in " + sourceEntry.toString(), originalException); diff --git a/DB/src/main/java/io/deephaven/db/v2/JoinControl.java b/DB/src/main/java/io/deephaven/db/v2/JoinControl.java index fb9098244d0..348a2e7a4b6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/JoinControl.java +++ b/DB/src/main/java/io/deephaven/db/v2/JoinControl.java @@ -39,38 +39,34 @@ int tableSizeForLeftBuild(Table leftTable) { } boolean useGrouping(Table leftTable, ColumnSource[] leftSources) { - return !leftTable.isLive() && leftSources.length == 1 - && leftTable.getIndex().hasGrouping(leftSources[0]); + return !leftTable.isLive() && leftSources.length == 1 && leftTable.getIndex().hasGrouping(leftSources[0]); } boolean buildLeft(QueryTable leftTable, Table rightTable) { return !leftTable.isRefreshing() && leftTable.size() <= rightTable.size(); } - boolean considerSymbolTables(QueryTable leftTable, - @SuppressWarnings("unused") QueryTable rightTable, boolean useLeftGrouping, - boolean useRightGrouping, ColumnSource leftSource, ColumnSource rightSource) { + boolean considerSymbolTables(QueryTable leftTable, @SuppressWarnings("unused") QueryTable rightTable, + boolean useLeftGrouping, boolean useRightGrouping, ColumnSource leftSource, + ColumnSource rightSource) { return !leftTable.isRefreshing() && !useLeftGrouping && leftSource.getType() == String.class - && !rightTable.isRefreshing() && !useRightGrouping - && rightSource.getType() == String.class - && leftSource instanceof SymbolTableSource && rightSource instanceof SymbolTableSource - && ((SymbolTableSource) leftSource).hasSymbolTable(leftTable.getIndex()) - && ((SymbolTableSource) rightSource).hasSymbolTable(rightTable.getIndex()); + && !rightTable.isRefreshing() && !useRightGrouping && rightSource.getType() == String.class + && leftSource instanceof SymbolTableSource && rightSource instanceof SymbolTableSource + && ((SymbolTableSource) leftSource).hasSymbolTable(leftTable.getIndex()) + && ((SymbolTableSource) rightSource).hasSymbolTable(rightTable.getIndex()); } boolean useSymbolTableLookupCaching() { return false; } - boolean useSymbolTables(long leftSize, long leftSymbolSize, long rightSize, - long rightSymbolSize) { + boolean useSymbolTables(long leftSize, long leftSymbolSize, long rightSize, long rightSymbolSize) { final long proposedSymbolSize = Math.min(rightSymbolSize, leftSymbolSize); return proposedSymbolSize <= leftSize / 2 || proposedSymbolSize <= rightSize / 2; } boolean useUniqueTable(boolean uniqueValues, long maximumUniqueValue, long minimumUniqueValue) { - // we want to have one left over value for "no good" (Integer.MAX_VALUE); and then we need - // another value to + // we want to have one left over value for "no good" (Integer.MAX_VALUE); and then we need another value to // represent that (max - min + 1) is the number of slots required. return uniqueValues && (maximumUniqueValue - minimumUniqueValue) < (Integer.MAX_VALUE - 2); } @@ -82,14 +78,11 @@ RedirectionType getRedirectionType(Table leftTable) { } else { return RedirectionType.Contiguous; } - } else if (leftTable.getIndex().getAverageRunLengthEstimate() >= Math - .min(SparseConstants.BLOCK_SIZE / 4, leftTable.getIndex().size() / 2)) { - // If we are going to use at least a quarter of a sparse array block, then it is a - // better answer than a - // hash table for redirection; because the hash table must store both the key and value, - // and then has a - // load factor of ~50%. Additionally, the sparse array source will have much faster sets - // and lookups so is + } else if (leftTable.getIndex().getAverageRunLengthEstimate() >= Math.min(SparseConstants.BLOCK_SIZE / 4, + leftTable.getIndex().size() / 2)) { + // If we are going to use at least a quarter of a sparse array block, then it is a better answer than a + // hash table for redirection; because the hash table must store both the key and value, and then has a + // load factor of ~50%. Additionally, the sparse array source will have much faster sets and lookups so is // a win, win, win (memory, set, lookup). return RedirectionType.Sparse; } else { diff --git a/DB/src/main/java/io/deephaven/db/v2/JoinRedirectionIndex.java b/DB/src/main/java/io/deephaven/db/v2/JoinRedirectionIndex.java index b288a814384..29559007ccc 100644 --- a/DB/src/main/java/io/deephaven/db/v2/JoinRedirectionIndex.java +++ b/DB/src/main/java/io/deephaven/db/v2/JoinRedirectionIndex.java @@ -11,12 +11,10 @@ */ public class JoinRedirectionIndex { /** - * A utility function that makes a redirection index based on the type determined by the - * JoinControl. + * A utility function that makes a redirection index based on the type determined by the JoinControl. * * @param control the JoinControl that determines the redirection type - * @param leftTable the left table of the join, which the join control examines and determines - * our result size + * @param leftTable the left table of the join, which the join control examines and determines our result size * * @return an empty RedirectionIndex */ @@ -29,12 +27,10 @@ static RedirectionIndex makeRedirectionIndex(JoinControl control, QueryTable lef redirectionIndex = new ContiguousRedirectionIndexImpl(leftTable.intSize()); break; case Sparse: - redirectionIndex = - new LongColumnSourceRedirectionIndex(new LongSparseArraySource()); + redirectionIndex = new LongColumnSourceRedirectionIndex(new LongSparseArraySource()); break; case Hash: - redirectionIndex = RedirectionIndexLockFreeImpl.FACTORY - .createRedirectionIndex(leftTable.intSize()); + redirectionIndex = RedirectionIndexLockFreeImpl.FACTORY.createRedirectionIndex(leftTable.intSize()); break; default: throw new IllegalStateException(); diff --git a/DB/src/main/java/io/deephaven/db/v2/KeyedTableListener.java b/DB/src/main/java/io/deephaven/db/v2/KeyedTableListener.java index 203f948b2e4..ff5e3063d6d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/KeyedTableListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/KeyedTableListener.java @@ -23,8 +23,7 @@ public enum KeyEvent { }; public interface KeyUpdateListener { - void update(KeyedTableListener keyedTableListener, SmartKey key, long index, - KeyEvent event); + void update(KeyedTableListener keyedTableListener, SmartKey key, long index, KeyEvent event); } private final QueryTable table; @@ -40,8 +39,7 @@ void update(KeyedTableListener keyedTableListener, SmartKey key, long index, // TODO: create an even more generic internals to handle multiple matches // TODO: Refactor with some sort of internal assistant object (unique versus generic) - // TODO: private HashMap keyToIndexObjectHashMap; // for storing multiple - // matches + // TODO: private HashMap keyToIndexObjectHashMap; // for storing multiple matches public KeyedTableListener(QueryTable table, String... keyColumnNames) { this.table = table; @@ -70,8 +68,7 @@ public void close() { this.table.removeUpdateListener(tableListener); } - private void handleUpdateFromTable(final Index added, final Index removed, - final Index modified) { + private void handleUpdateFromTable(final Index added, final Index removed, final Index modified) { // Add all the new rows to the hashmap for (Index.Iterator iterator = added.iterator(); iterator.hasNext();) { long next = iterator.nextLong(); diff --git a/DB/src/main/java/io/deephaven/db/v2/LazySnapshotTable.java b/DB/src/main/java/io/deephaven/db/v2/LazySnapshotTable.java index 053a2abc0bc..c4c61da6c4e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/LazySnapshotTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/LazySnapshotTable.java @@ -7,8 +7,8 @@ import io.deephaven.db.tables.Table; /** - * This interface represents a table that will not update itself on the refresh cycle, but instead - * refresh itself when the refresh for snapshot is called. + * This interface represents a table that will not update itself on the refresh cycle, but instead refresh itself when + * the refresh for snapshot is called. */ public interface LazySnapshotTable extends Table { void refreshForSnapshot(); diff --git a/DB/src/main/java/io/deephaven/db/v2/LazySnapshotTableProvider.java b/DB/src/main/java/io/deephaven/db/v2/LazySnapshotTableProvider.java index ab048e4ca9b..947a8380b2d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/LazySnapshotTableProvider.java +++ b/DB/src/main/java/io/deephaven/db/v2/LazySnapshotTableProvider.java @@ -5,10 +5,9 @@ package io.deephaven.db.v2; /** - * The right hand side table of a V2 snapshot must be an QueryTable (otherwise it fails with a class - * cast exception), but there are certain cases where we may not want that to be true (e.g., a - * WnUnsubscribedTable). If so, the right side table should implement this interface, and return a - * LazySnapshotTable for use with the snapshot. + * The right hand side table of a V2 snapshot must be an QueryTable (otherwise it fails with a class cast exception), + * but there are certain cases where we may not want that to be true (e.g., a WnUnsubscribedTable). If so, the right + * side table should implement this interface, and return a LazySnapshotTable for use with the snapshot. */ public interface LazySnapshotTableProvider { LazySnapshotTable getLazySnapshotTable(); diff --git a/DB/src/main/java/io/deephaven/db/v2/Listener.java b/DB/src/main/java/io/deephaven/db/v2/Listener.java index 91e22c16dff..91f864313b3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/Listener.java +++ b/DB/src/main/java/io/deephaven/db/v2/Listener.java @@ -28,8 +28,7 @@ public interface Listener extends ListenerBase { * @param modified rows modified * @return table change notification */ - NotificationQueue.IndexUpdateNotification getNotification(Index added, Index removed, - Index modified); + NotificationQueue.IndexUpdateNotification getNotification(Index added, Index removed, Index modified); /** * Sets the index for the initial data. diff --git a/DB/src/main/java/io/deephaven/db/v2/ListenerBase.java b/DB/src/main/java/io/deephaven/db/v2/ListenerBase.java index 7d3f8cbf169..4fc7fec4bb8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ListenerBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/ListenerBase.java @@ -28,5 +28,5 @@ public interface ListenerBase extends LivenessNode { * @return exception notification */ NotificationQueue.Notification getErrorNotification(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry); + UpdatePerformanceTracker.Entry sourceEntry); } diff --git a/DB/src/main/java/io/deephaven/db/v2/ListenerRecorder.java b/DB/src/main/java/io/deephaven/db/v2/ListenerRecorder.java index de6df68b560..5eee6721020 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ListenerRecorder.java +++ b/DB/src/main/java/io/deephaven/db/v2/ListenerRecorder.java @@ -6,11 +6,10 @@ import io.deephaven.db.v2.utils.IndexShiftDataExpander; /** - * A listener recorder stores references to added, removed, modified, and shifted indices; and then - * notifies a {@link MergedListener} that a change has occurred. The combination of a - * {@link ListenerRecorder} and {@link MergedListener} should be used when a table has multiple - * sources, such that each table can process all of it's dependencies at once and fire a single - * notification to its children. + * A listener recorder stores references to added, removed, modified, and shifted indices; and then notifies a + * {@link MergedListener} that a change has occurred. The combination of a {@link ListenerRecorder} and + * {@link MergedListener} should be used when a table has multiple sources, such that each table can process all of it's + * dependencies at once and fire a single notification to its children. */ public class ListenerRecorder extends BaseTable.ShiftAwareListenerImpl { protected final String logPrefix; @@ -76,8 +75,7 @@ public Index getModified() { } public Index getModifiedPreShift() { - return recordedVariablesAreValid() ? update.getModifiedPreShift() - : Index.FACTORY.getEmptyIndex(); + return recordedVariablesAreValid() ? update.getModifiedPreShift() : Index.FACTORY.getEmptyIndex(); } public IndexShiftData getShifted() { @@ -95,12 +93,10 @@ public Update getUpdate() { /** * The caller is responsible for closing the {@link IndexShiftDataExpander}. * - * @return a backwards compatible version of added / removed / modified that account for - * shifting + * @return a backwards compatible version of added / removed / modified that account for shifting */ public IndexShiftDataExpander getExpandedARM() { - return recordedVariablesAreValid() - ? new IndexShiftDataExpander(update, getParent().getIndex()) - : IndexShiftDataExpander.EMPTY; + return recordedVariablesAreValid() ? new IndexShiftDataExpander(update, getParent().getIndex()) + : IndexShiftDataExpander.EMPTY; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/LocalTableMap.java b/DB/src/main/java/io/deephaven/db/v2/LocalTableMap.java index abc880ed569..285540cca7a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/LocalTableMap.java +++ b/DB/src/main/java/io/deephaven/db/v2/LocalTableMap.java @@ -43,8 +43,7 @@ /** * A TableMap implementation backed by a Map. */ -public class LocalTableMap extends TableMapImpl - implements NotificationQueue.Dependency, SystemicObject { +public class LocalTableMap extends TableMapImpl implements NotificationQueue.Dependency, SystemicObject { /** map to use for our backing store */ private final Map internalMap; @@ -75,8 +74,7 @@ public class LocalTableMap extends TableMapImpl transient boolean useGlobalTransformationThreadPool = true; /** - * If useGlobalTransformationThreadPool is false, how many threads should we use for our - * executorService? + * If useGlobalTransformationThreadPool is false, how many threads should we use for our executorService? */ transient int transformationThreads = 1; @@ -86,9 +84,9 @@ public class LocalTableMap extends TableMapImpl private transient ExecutorService executorService = null; /** - * The TableMaps don't know how to create empty tables for themselves, if the TableMap needs to - * be pre-populated, it must pass this callback into the constructor. When the populateKeys call - * on the TableMap is invoked, the populate function is called for each of the new keys. + * The TableMaps don't know how to create empty tables for themselves, if the TableMap needs to be pre-populated, it + * must pass this callback into the constructor. When the populateKeys call on the TableMap is invoked, the populate + * function is called for each of the new keys. */ @FunctionalInterface public interface PopulateCallback { @@ -103,8 +101,8 @@ public interface PopulateCallback { private NotificationQueue.Dependency dependency; /** - * Cache the last satisfied step, so that we do not need to query all our dependencies if we - * have already been satisfied on a given cycle. + * Cache the last satisfied step, so that we do not need to query all our dependencies if we have already been + * satisfied on a given cycle. */ private long lastSatisfiedStep = -1; @@ -112,13 +110,12 @@ public interface PopulateCallback { * Constructor to create an instance with a specific map, which may not be populated. * * @param internalMap the map to use for our backing store - * @param populateCallback the callback that is invoked when {@link #populateKeys(Object...)} is - * called - * @param constituentDefinition the definition of the constituent tables (optional, but by - * providing it, a TableMap with no constituents can be merged) + * @param populateCallback the callback that is invoked when {@link #populateKeys(Object...)} is called + * @param constituentDefinition the definition of the constituent tables (optional, but by providing it, a TableMap + * with no constituents can be merged) */ LocalTableMap(Map internalMap, PopulateCallback populateCallback, - TableDefinition constituentDefinition) { + TableDefinition constituentDefinition) { this.populateCallback = populateCallback; this.internalMap = internalMap; this.constituentDefinition = constituentDefinition; @@ -128,8 +125,7 @@ public interface PopulateCallback { /** * Constructor to create an instance with an empty default map. * - * @param populateCallback the callback that is invoked when {@link #populateKeys(Object...)} is - * called + * @param populateCallback the callback that is invoked when {@link #populateKeys(Object...)} is called */ public LocalTableMap(PopulateCallback populateCallback) { this(new LinkedHashMap<>(), populateCallback, null); @@ -138,10 +134,9 @@ public LocalTableMap(PopulateCallback populateCallback) { /** * Constructor to create an instance with an empty default map. * - * @param populateCallback the callback that is invoked when {@link #populateKeys(Object...)} is - * called - * @param constituentDefinition the definition of the constituent tables (optional, but by - * providing it, a TableMap with no constituents can be merged) + * @param populateCallback the callback that is invoked when {@link #populateKeys(Object...)} is called + * @param constituentDefinition the definition of the constituent tables (optional, but by providing it, a TableMap + * with no constituents can be merged) */ public LocalTableMap(PopulateCallback populateCallback, TableDefinition constituentDefinition) { this(new LinkedHashMap<>(), populateCallback, constituentDefinition); @@ -184,10 +179,9 @@ private synchronized Table putInternal(Object key, Table table) { if (constituentDefinition != null) { if (!constituentDefinition.equalsIgnoreOrder(table.getDefinition())) { throw new IllegalStateException( - "Put table does not match expected constituent definition: " - + constituentDefinition - .getDifferenceDescription(table.getDefinition(), "existing", "new", - "\n ")); + "Put table does not match expected constituent definition: " + + constituentDefinition + .getDifferenceDescription(table.getDefinition(), "existing", "new", "\n ")); } } @@ -207,8 +201,7 @@ private synchronized Table putInternal(Object key, Table table) { return result; } - public Table computeIfAbsent(Object key, - java.util.function.Function tableFactory) { + public Table computeIfAbsent(Object key, java.util.function.Function tableFactory) { final Table result; synchronized (this) { final Table existing = get(key); @@ -284,8 +277,7 @@ public synchronized boolean containsKey(Object key) { } public synchronized Collection> entrySet() { - // TODO: Should we manage all entry values with current liveness scope? Do so on - // Map.Entry.getValue()? + // TODO: Should we manage all entry values with current liveness scope? Do so on Map.Entry.getValue()? return internalMap.entrySet(); } @@ -317,8 +309,7 @@ public R apply(Function.Unary function) { public TableMap transformTablesWithKey(BiFunction function) { final TableDefinition returnDefinition; if (constituentDefinition != null) { - final Table emptyTable = - new QueryTable(constituentDefinition, Index.FACTORY.getEmptyIndex(), + final Table emptyTable = new QueryTable(constituentDefinition, Index.FACTORY.getEmptyIndex(), NullValueColumnSource.createColumnSourceMap(constituentDefinition)); returnDefinition = function.apply(SENTINEL_KEY, emptyTable).getDefinition(); } else { @@ -329,7 +320,7 @@ public TableMap transformTablesWithKey(BiFunction function @Override public TableMap transformTablesWithKey(TableDefinition returnDefinition, - BiFunction function) { + BiFunction function) { final boolean shouldClear = QueryPerformanceRecorder.setCallsite(); try { final ComputedTableMap result = new ComputedTableMap(this, returnDefinition); @@ -338,13 +329,12 @@ public TableMap transformTablesWithKey(TableDefinition returnDefinition, if (executorService != null) { final boolean doCheck = LiveTableMonitor.DEFAULT.getCheckTableOperations(); final boolean hasLtm = LiveTableMonitor.DEFAULT.sharedLock().isHeldByCurrentThread() - || LiveTableMonitor.DEFAULT.exclusiveLock().isHeldByCurrentThread(); + || LiveTableMonitor.DEFAULT.exclusiveLock().isHeldByCurrentThread(); final Map> futures = new LinkedHashMap<>(); for (final Map.Entry entry : entrySet()) { futures.put(entry.getKey(), executorService.submit(() -> { if (hasLtm || !doCheck) { - final boolean oldCheck = - LiveTableMonitor.DEFAULT.setCheckTableOperations(false); + final boolean oldCheck = LiveTableMonitor.DEFAULT.setCheckTableOperations(false); try { return function.apply(entry.getKey(), entry.getValue()); } finally { @@ -360,8 +350,7 @@ public TableMap transformTablesWithKey(TableDefinition returnDefinition, try { table = entry.getValue().get(); } catch (InterruptedException | ExecutionException e) { - throw new UncheckedTableException( - "Failed to transform table for " + entry.getKey(), e); + throw new UncheckedTableException("Failed to transform table for " + entry.getKey(), e); } result.insertWithApply(entry.getKey(), table, (k, t) -> t); } @@ -380,12 +369,12 @@ public void handleTableAdded(Object key, Table table) { addListener(listener); if (isRefreshing()) { - // if we are refreshing, we want to add a parent reference, which turns the result - // refreshing; and ensures reachability + // if we are refreshing, we want to add a parent reference, which turns the result refreshing; and + // ensures reachability result.addParentReference(listener); } else { - // if we are static, we only want to ensure reachability, but not turn the result - // refreshing; the listener will fire for + // if we are static, we only want to ensure reachability, but not turn the result refreshing; the + // listener will fire for // populate calls, so the child map gets a value filled in properly result.setListenerReference(listener); } @@ -415,7 +404,7 @@ private synchronized ExecutorService getTransformationExecutorService() { final ThreadGroup threadGroup = new ThreadGroup("LocalTableMapTransform"); final NamingThreadFactory threadFactory = - new NamingThreadFactory(threadGroup, LocalTableMap.class, "transformExecutor", true); + new NamingThreadFactory(threadGroup, LocalTableMap.class, "transformExecutor", true); executorService = Executors.newFixedThreadPool(transformationThreads, threadFactory); return executorService; @@ -423,13 +412,12 @@ private synchronized ExecutorService getTransformationExecutorService() { @Override public TableMap transformTablesWithMap(TableMap other, - java.util.function.BiFunction function) { + java.util.function.BiFunction function) { final boolean shouldClear = QueryPerformanceRecorder.setCallsite(); try { final NotificationQueue.Dependency otherDependency = - other instanceof NotificationQueue.Dependency ? (NotificationQueue.Dependency) other - : null; + other instanceof NotificationQueue.Dependency ? (NotificationQueue.Dependency) other : null; final ComputedTableMap result = new ComputedTableMap(this, null); final ExecutorService executorService = getTransformationExecutorService(); @@ -438,7 +426,7 @@ public TableMap transformTablesWithMap(TableMap other, final boolean doCheck = LiveTableMonitor.DEFAULT.setCheckTableOperations(true); LiveTableMonitor.DEFAULT.setCheckTableOperations(doCheck); final boolean hasLtm = LiveTableMonitor.DEFAULT.sharedLock().isHeldByCurrentThread() - || LiveTableMonitor.DEFAULT.exclusiveLock().isHeldByCurrentThread(); + || LiveTableMonitor.DEFAULT.exclusiveLock().isHeldByCurrentThread(); final Map> futures = new LinkedHashMap<>(); @@ -447,8 +435,7 @@ public TableMap transformTablesWithMap(TableMap other, if (otherTable != null) { futures.put(entry.getKey(), executorService.submit(() -> { if (hasLtm || !doCheck) { - final boolean oldCheck = - LiveTableMonitor.DEFAULT.setCheckTableOperations(false); + final boolean oldCheck = LiveTableMonitor.DEFAULT.setCheckTableOperations(false); try { return function.apply(entry.getValue(), otherTable); } finally { @@ -465,8 +452,7 @@ public TableMap transformTablesWithMap(TableMap other, try { table = entry.getValue().get(); } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException( - "Failed to transform table for " + entry.getKey(), e); + throw new RuntimeException("Failed to transform table for " + entry.getKey(), e); } result.insertWithApply(entry.getKey(), table, (k, t) -> t); } @@ -474,8 +460,7 @@ public TableMap transformTablesWithMap(TableMap other, for (final Map.Entry entry : entrySet()) { final Table otherTable = other.get(entry.getKey()); if (otherTable != null) { - result.insertWithApply(entry.getKey(), entry.getValue(), otherTable, - function::apply); + result.insertWithApply(entry.getKey(), entry.getValue(), otherTable, function::apply); } } } @@ -492,8 +477,7 @@ public void handleTableAdded(Object key, Table ourTable) { if (result.containsKey(key)) { return; } - result.insertWithApply(key, ourTable, otherTable, - function::apply); + result.insertWithApply(key, ourTable, otherTable, function::apply); } } }, otherDependency); @@ -504,9 +488,8 @@ public void handleTableAdded(Object key, Table ourTable) { } if (otherRefreshing) { - result.addParentReference(other); // Sets result refreshing if necessary, adds a - // hard ref from result to other, and liveness - // reference from result to other + result.addParentReference(other); // Sets result refreshing if necessary, adds a hard ref from result to + // other, and liveness reference from result to other final Listener otherListener = new LivenessListener() { @Override @@ -518,8 +501,7 @@ public void handleTableAdded(Object key, Table otherTable) { if (result.containsKey(key)) { return; } - result.insertWithApply(key, ourTable, otherTable, - function::apply); + result.insertWithApply(key, ourTable, otherTable, function::apply); } } }, otherDependency); @@ -567,7 +549,7 @@ public TableDefinition getConstituentDefinitionOrErr() { } } throw new UnsupportedOperationException( - "Can not convert TableMap with no constituents, or constituent definition, into a Table."); + "Can not convert TableMap with no constituents, or constituent definition, into a Table."); } @Override @@ -585,9 +567,9 @@ public synchronized Table merge() { } final UnionSourceManager unionSourceManager = - ((UnionColumnSource) (maybeUnionColumnSource)).getUnionSourceManager(); - unionSourceManager.setDisallowReinterpret(); // TODO: Skip this call if we can determine - // that our entry set is static. + ((UnionColumnSource) (maybeUnionColumnSource)).getUnionSourceManager(); + unionSourceManager.setDisallowReinterpret(); // TODO: Skip this call if we can determine that our entry set is + // static. if (isRefreshing()) { unionSourceManager.noteUsingComponentsIsUnsafe(); unionSourceManager.setRefreshing(); @@ -650,8 +632,8 @@ private ComputedTableMap(LocalTableMap parent, TableDefinition constituentDefini this.parent = parent; if (parent.isRefreshing()) { setRefreshing(true); - // NB: No need to addParentReference - we hold a strong reference to our parent - // already for populateKeys. + // NB: No need to addParentReference - we hold a strong reference to our parent already for + // populateKeys. manage(parent); } this.useGlobalTransformationThreadPool = parent.useGlobalTransformationThreadPool; @@ -669,16 +651,14 @@ public Table put(Object key, Table table) { throw new UnsupportedOperationException(); } - private void insertWithApply(Object key, Table input, - BiFunction operator) { - super.put(key, SystemicObjectTracker.executeSystemically(isSystemicObject(), - () -> operator.apply(key, input))); + private void insertWithApply(Object key, Table input, BiFunction operator) { + super.put(key, + SystemicObjectTracker.executeSystemically(isSystemicObject(), () -> operator.apply(key, input))); } - private void insertWithApply(Object key, Table ourInput, Table otherInput, - BinaryOperator
    operator) { + private void insertWithApply(Object key, Table ourInput, Table otherInput, BinaryOperator
    operator) { super.put(key, SystemicObjectTracker.executeSystemically(isSystemicObject(), - () -> operator.apply(ourInput, otherInput))); + () -> operator.apply(ourInput, otherInput))); } private void setFlat() { @@ -691,8 +671,7 @@ void setListenerReference(Listener listenerReference) { @Override public boolean satisfied(final long step) { - // if we have any pending insertions or our parent is not yet satisfied, then we are not - // satisfied + // if we have any pending insertions or our parent is not yet satisfied, then we are not satisfied if (outstandingNotifications.get() > 0) { return false; } @@ -723,8 +702,7 @@ public void run() { AsyncClientErrorNotifier.reportError(originalException); } } catch (IOException e) { - throw new RuntimeException("Exception in ComputedTableMap", - originalException); + throw new RuntimeException("Exception in ComputedTableMap", originalException); } } finally { outstandingNotifications.decrementAndGet(); @@ -738,8 +716,8 @@ public boolean canExecute(final long step) { @Override public LogOutput append(LogOutput output) { - return output.append("ComputedTableMap Notification{") - .append(System.identityHashCode(this)).append("}"); + return output.append("ComputedTableMap Notification{").append(System.identityHashCode(this)) + .append("}"); } }); } @@ -770,13 +748,12 @@ protected void destroy() { } /** - * Returns whether this LocalTableMap is configured to use the global transformation thread - * pool. + * Returns whether this LocalTableMap is configured to use the global transformation thread pool. * * Derived TableMaps will inherit this setting (but use their own thread pool). * - * @return true if transformTables and transformTablesWithMap will use the global thread pool; - * false if they will use a private thread pool + * @return true if transformTables and transformTablesWithMap will use the global thread pool; false if they will + * use a private thread pool */ public boolean useGlobalTransformationThreadPool() { return useGlobalTransformationThreadPool; @@ -785,14 +762,12 @@ public boolean useGlobalTransformationThreadPool() { /** * Sets whether this LocalTableMap is configured to use the global transformation thread pool. * - * When set to true, the global thread pool configured in {@link TableMapTransformThreadPool} is - * used. + * When set to true, the global thread pool configured in {@link TableMapTransformThreadPool} is used. * - * When set to false, a thread pool for this particular TableMap is used (or no thread pool if - * transformation threads is set to 1). + * When set to false, a thread pool for this particular TableMap is used (or no thread pool if transformation + * threads is set to 1). */ - public synchronized void setUseGlobalTransformationThreadPool( - boolean useGlobalTransformationThreadPool) { + public synchronized void setUseGlobalTransformationThreadPool(boolean useGlobalTransformationThreadPool) { this.useGlobalTransformationThreadPool = useGlobalTransformationThreadPool; if (useGlobalTransformationThreadPool && executorService != null) { executorService.shutdown(); @@ -801,9 +776,8 @@ public synchronized void setUseGlobalTransformationThreadPool( } /** - * Returns the number of transformation threads that will be used (if this TableMap is not - * configured to use the global thread pool). If this TableMap is configured to use the global - * thread pool, then this value is ignored. + * Returns the number of transformation threads that will be used (if this TableMap is not configured to use the + * global thread pool). If this TableMap is configured to use the global thread pool, then this value is ignored. * * @return the number of threads that will be used for transformations */ @@ -812,8 +786,8 @@ public int getTransformationThreads() { } /** - * Set the number of transformation threads that should be used. Additionally, the global - * transformation thread pool is disabled for this TableMap. + * Set the number of transformation threads that should be used. Additionally, the global transformation thread pool + * is disabled for this TableMap. * * Derived TableMaps will inherit this setting (but use their own thread pool). * diff --git a/DB/src/main/java/io/deephaven/db/v2/MemoizedOperationKey.java b/DB/src/main/java/io/deephaven/db/v2/MemoizedOperationKey.java index 97ae58a4979..da5c370edfa 100644 --- a/DB/src/main/java/io/deephaven/db/v2/MemoizedOperationKey.java +++ b/DB/src/main/java/io/deephaven/db/v2/MemoizedOperationKey.java @@ -19,22 +19,20 @@ /** * Keys for memoized operations on QueryTable. * - * When a null key is returned from one of the static methods; the operation will not be memoized - * (e.g., if we might depend on the query scope; we can't memoize the operation). + * When a null key is returned from one of the static methods; the operation will not be memoized (e.g., if we might + * depend on the query scope; we can't memoize the operation). */ public abstract class MemoizedOperationKey { /** - * Returns true if the attributes are compatible for this operation. If two table are identical, - * but for attributes many of the operations can be reused. In cases where they can, it would be - * wasteful to reapply them. + * Returns true if the attributes are compatible for this operation. If two table are identical, but for attributes + * many of the operations can be reused. In cases where they can, it would be wasteful to reapply them. * * @param oldAttributes the attributes on the table that already is memoized * @param newAttributes the attributes on the table this is not yet memoized * * @return true if the attributes are compatible for this operation. */ - boolean attributesCompatible(Map oldAttributes, - Map newAttributes) { + boolean attributesCompatible(Map oldAttributes, Map newAttributes) { // this is the safe default return false; } @@ -42,8 +40,7 @@ boolean attributesCompatible(Map oldAttributes, /** * Returns the Copy type for this operation. * - * @return the attribute copy type that should be used when transferring a memoized table across - * a copy. + * @return the attribute copy type that should be used when transferring a memoized table across a copy. */ BaseTable.CopyAttributeOperation copyType() { throw new UnsupportedOperationException(); @@ -55,8 +52,7 @@ BaseTable.CopyAttributeOperation getParentCopyType() { abstract static class AttributeAgnosticMemoizedOperationKey extends MemoizedOperationKey { @Override - boolean attributesCompatible(Map oldAttributes, - Map newAttributes) { + boolean attributesCompatible(Map oldAttributes, Map newAttributes) { return true; } @@ -69,7 +65,7 @@ public interface Provider { } static MemoizedOperationKey selectUpdateViewOrUpdateView(SelectColumn[] selectColumn, - SelectUpdateViewOrUpdateView.Flavor flavor) { + SelectUpdateViewOrUpdateView.Flavor flavor) { if (isMemoizable(selectColumn)) { return new SelectUpdateViewOrUpdateView(selectColumn, flavor); } else { @@ -105,7 +101,7 @@ public static MemoizedOperationKey treeTable(String idColumn, String parentColum } public static MemoizedOperationKey by(AggregationStateFactory aggregationStateFactory, - SelectColumn[] groupByColumns) { + SelectColumn[] groupByColumns) { if (!isMemoizable(groupByColumns)) { return null; } @@ -125,11 +121,11 @@ public static MemoizedOperationKey byExternal(boolean dropKeys, SelectColumn[] g private static boolean isMemoizable(SelectColumn[] selectColumn) { return Arrays.stream(selectColumn) - .allMatch(sc -> sc instanceof SourceColumn || sc instanceof ReinterpretedColumn); + .allMatch(sc -> sc instanceof SourceColumn || sc instanceof ReinterpretedColumn); } - public static MemoizedOperationKey rollup(ComboAggregateFactory comboAggregateFactory, - SelectColumn[] columns, boolean includeConstituents) { + public static MemoizedOperationKey rollup(ComboAggregateFactory comboAggregateFactory, SelectColumn[] columns, + boolean includeConstituents) { if (!isMemoizable(columns)) { return null; } @@ -182,8 +178,7 @@ public boolean equals(Object o) { final SelectUpdateViewOrUpdateView selectOrView = (SelectUpdateViewOrUpdateView) o; - return flavor == selectOrView.flavor - && Arrays.equals(selectColumns, selectOrView.selectColumns); + return flavor == selectOrView.flavor && Arrays.equals(selectColumns, selectOrView.selectColumns); } @Override @@ -201,8 +196,7 @@ BaseTable.CopyAttributeOperation copyType() { case Select: case Update: - // turns out select doesn't copy attributes, maybe we should more accurately - // codify that + // turns out select doesn't copy attributes, maybe we should more accurately codify that return BaseTable.CopyAttributeOperation.None; default: @@ -267,12 +261,9 @@ public int hashCode() { } @Override - boolean attributesCompatible(Map oldAttributes, - Map newAttributes) { - final String parentRestrictions = - (String) oldAttributes.get(Table.SORTABLE_COLUMNS_ATTRIBUTE); - final String newRestrictions = - (String) newAttributes.get(Table.SORTABLE_COLUMNS_ATTRIBUTE); + boolean attributesCompatible(Map oldAttributes, Map newAttributes) { + final String parentRestrictions = (String) oldAttributes.get(Table.SORTABLE_COLUMNS_ATTRIBUTE); + final String newRestrictions = (String) newAttributes.get(Table.SORTABLE_COLUMNS_ATTRIBUTE); return Objects.equals(parentRestrictions, newRestrictions); } @@ -350,7 +341,7 @@ public boolean equals(Object o) { return false; final TreeTable treeTable = (TreeTable) o; return Objects.equals(idColumn, treeTable.idColumn) && - Objects.equals(parentColumn, treeTable.parentColumn); + Objects.equals(parentColumn, treeTable.parentColumn); } @Override @@ -376,7 +367,7 @@ public boolean equals(Object o) { return false; final By by = (By) o; return Objects.equals(aggregationKey, by.aggregationKey) && - Arrays.equals(groupByColumns, by.groupByColumns); + Arrays.equals(groupByColumns, by.groupByColumns); } @Override @@ -410,7 +401,7 @@ public boolean equals(Object o) { return false; final ByExternal by = (ByExternal) o; return dropKeys == by.dropKeys && - Arrays.equals(groupByColumns, by.groupByColumns); + Arrays.equals(groupByColumns, by.groupByColumns); } @Override @@ -425,8 +416,7 @@ private static class Rollup extends AttributeAgnosticMemoizedOperationKey { private final By by; private final boolean includeConstituents; - Rollup(@NotNull AggregationMemoKey aggregationKey, SelectColumn[] groupByColumns, - boolean includeConstituents) { + Rollup(@NotNull AggregationMemoKey aggregationKey, SelectColumn[] groupByColumns, boolean includeConstituents) { this.includeConstituents = includeConstituents; this.by = new By(aggregationKey, groupByColumns); } @@ -443,8 +433,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; final Rollup rollup = (Rollup) o; - return Objects.equals(by, rollup.by) - && includeConstituents == rollup.includeConstituents; + return Objects.equals(by, rollup.by) && includeConstituents == rollup.includeConstituents; } @Override @@ -458,8 +447,8 @@ BaseTable.CopyAttributeOperation getParentCopyType() { } } - public static MemoizedOperationKey symbolTable( - @NotNull final SymbolTableSource symbolTableSource, final boolean useLookupCaching) { + public static MemoizedOperationKey symbolTable(@NotNull final SymbolTableSource symbolTableSource, + final boolean useLookupCaching) { return new SymbolTable(symbolTableSource, useLookupCaching); } @@ -468,8 +457,7 @@ private static final class SymbolTable extends MemoizedOperationKey { private final SymbolTableSource symbolTableSource; private final boolean useLookupCaching; - private SymbolTable(@NotNull final SymbolTableSource symbolTableSource, - final boolean useLookupCaching) { + private SymbolTable(@NotNull final SymbolTableSource symbolTableSource, final boolean useLookupCaching) { this.symbolTableSource = symbolTableSource; this.useLookupCaching = useLookupCaching; } @@ -484,14 +472,12 @@ public final boolean equals(final Object other) { } final SymbolTable that = (SymbolTable) other; // NB: We use the symbolTableSource's identity for comparison - return symbolTableSource == that.symbolTableSource - && useLookupCaching == that.useLookupCaching; + return symbolTableSource == that.symbolTableSource && useLookupCaching == that.useLookupCaching; } @Override public final int hashCode() { - return 31 * System.identityHashCode(symbolTableSource) - + Boolean.hashCode(useLookupCaching); + return 31 * System.identityHashCode(symbolTableSource) + Boolean.hashCode(useLookupCaching); } } @@ -535,7 +521,7 @@ private static class CrossJoin extends AttributeAgnosticMemoizedOperationKey { private final int cachedHashCode; CrossJoin(final Table rightTableCandidate, final MatchPair[] columnsToMatch, - final MatchPair[] columnsToAdd, final int numRightBitsToReserve) { + final MatchPair[] columnsToAdd, final int numRightBitsToReserve) { this.rightTableCandidate = new WeakReference<>(rightTableCandidate); this.columnsToMatch = columnsToMatch; this.columnsToAdd = columnsToAdd; @@ -561,9 +547,9 @@ public boolean equals(Object o) { if (rTable == null || oTable == null) return false; return rTable == oTable && - numRightBitsToReserve == crossJoin.numRightBitsToReserve && - Arrays.equals(columnsToMatch, crossJoin.columnsToMatch) && - Arrays.equals(columnsToAdd, crossJoin.columnsToAdd); + numRightBitsToReserve == crossJoin.numRightBitsToReserve && + Arrays.equals(columnsToMatch, crossJoin.columnsToMatch) && + Arrays.equals(columnsToAdd, crossJoin.columnsToAdd); } @Override @@ -577,10 +563,8 @@ BaseTable.CopyAttributeOperation copyType() { } } - public static CrossJoin crossJoin(final Table rightTableCandidate, - final MatchPair[] columnsToMatch, - final MatchPair[] columnsToAdd, final int numRightBitsToReserve) { - return new CrossJoin(rightTableCandidate, columnsToMatch, columnsToAdd, - numRightBitsToReserve); + public static CrossJoin crossJoin(final Table rightTableCandidate, final MatchPair[] columnsToMatch, + final MatchPair[] columnsToAdd, final int numRightBitsToReserve) { + return new CrossJoin(rightTableCandidate, columnsToMatch, columnsToAdd, numRightBitsToReserve); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/MergedListener.java b/DB/src/main/java/io/deephaven/db/v2/MergedListener.java index c3c212fd53f..cb7feebbb20 100644 --- a/DB/src/main/java/io/deephaven/db/v2/MergedListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/MergedListener.java @@ -25,14 +25,13 @@ import java.util.stream.Stream; /** - * A merged listener has a collection of {@link ListenerRecorder}s. Each one must complete before - * the merged listener fires its sole notification for the cycle. + * A merged listener has a collection of {@link ListenerRecorder}s. Each one must complete before the merged listener + * fires its sole notification for the cycle. * - * You must use a MergedListener if your result table has multiple sources, otherwise it is possible - * for a table to produce notifications more than once in a cycle; which is an error. + * You must use a MergedListener if your result table has multiple sources, otherwise it is possible for a table to + * produce notifications more than once in a cycle; which is an error. */ -public abstract class MergedListener extends LivenessArtifact - implements NotificationQueue.Dependency { +public abstract class MergedListener extends LivenessArtifact implements NotificationQueue.Dependency { private static final Logger log = LoggerFactory.getLogger(MergedListener.class); @SuppressWarnings({"FieldCanBeLocal", "unused"}) @@ -51,16 +50,14 @@ public abstract class MergedListener extends LivenessArtifact private final UpdatePerformanceTracker.Entry entry; protected MergedListener(Collection recorders, - Collection dependencies, String listenerDescription, - QueryTable result) { + Collection dependencies, String listenerDescription, QueryTable result) { this.recorders = recorders; recorders.forEach(this::manage); this.dependencies = dependencies; this.result = result; this.listenerDescription = listenerDescription; this.entry = UpdatePerformanceTracker.getInstance().getEntry(listenerDescription); - this.logPrefix = - System.identityHashCode(this) + " " + listenerDescription + " Merged Listener: "; + this.logPrefix = System.identityHashCode(this) + " " + listenerDescription + " Merged Listener: "; } private void releaseFromRecorders() { @@ -73,21 +70,20 @@ public void notifyChanges() { synchronized (this) { if (notificationClock == currentStep) { throw Assert.statementNeverExecuted( - "MergedListener was fired before both all listener records completed: listener=" - + System.identityHashCode(this) + ", currentStep=" + currentStep); + "MergedListener was fired before both all listener records completed: listener=" + + System.identityHashCode(this) + ", currentStep=" + currentStep); } - // we've already got something in the notification queue that has not yet been executed - // for the current step. + // we've already got something in the notification queue that has not yet been executed for the current + // step. if (queuedNotificationClock == currentStep) { return; } // Otherwise we should have already flushed that notification. Assert.assertion(queuedNotificationClock == notificationClock, - "queuedNotificationClock == notificationClock", queuedNotificationClock, - "queuedNotificationClock", notificationClock, "notificationClock", currentStep, - "currentStep", this, "MergedListener"); + "queuedNotificationClock == notificationClock", queuedNotificationClock, "queuedNotificationClock", + notificationClock, "notificationClock", currentStep, "currentStep", this, "MergedListener"); queuedNotificationClock = currentStep; } @@ -98,9 +94,8 @@ public void run() { try { if (queuedNotificationClock != LogicalClock.DEFAULT.currentStep()) { throw Assert.statementNeverExecuted("Notification step mismatch: listener=" - + System.identityHashCode(MergedListener.this) - + ": queuedNotificationClock=" + queuedNotificationClock + ", step=" - + LogicalClock.DEFAULT.currentStep()); + + System.identityHashCode(MergedListener.this) + ": queuedNotificationClock=" + + queuedNotificationClock + ", step=" + LogicalClock.DEFAULT.currentStep()); } long added = 0; @@ -121,24 +116,22 @@ public void run() { try { synchronized (MergedListener.this) { if (notificationClock == queuedNotificationClock) { - throw Assert.statementNeverExecuted( - "Multiple notifications in the same step: listener=" - + System.identityHashCode(MergedListener.this) - + ", queuedNotificationClock=" + queuedNotificationClock); + throw Assert.statementNeverExecuted("Multiple notifications in the same step: listener=" + + System.identityHashCode(MergedListener.this) + ", queuedNotificationClock=" + + queuedNotificationClock); } notificationClock = queuedNotificationClock; } process(); - LiveTableMonitor.DEFAULT.logDependencies() - .append("MergedListener has completed execution ").append(this).endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("MergedListener has completed execution ") + .append(this).endl(); } finally { entry.onUpdateEnd(); lastCompletedStep = LogicalClock.DEFAULT.currentStep(); } } catch (Exception updateException) { - log.error().append(logPrefix).append("Uncaught exception for entry= ") - .append(entry) - .append(": ").append(updateException).endl(); + log.error().append(logPrefix).append("Uncaught exception for entry= ").append(entry) + .append(": ").append(updateException).endl(); notifyOnError(updateException); try { if (systemicResult()) { @@ -154,9 +147,8 @@ public void run() { @Override public LogOutput append(LogOutput logOutput) { - return logOutput.append("Merged Notification ") - .append(System.identityHashCode(MergedListener.this)).append(" ") - .append(listenerDescription); + return logOutput.append("Merged Notification ").append(System.identityHashCode(MergedListener.this)) + .append(" ").append(listenerDescription); } @Override @@ -183,32 +175,31 @@ protected void notifyOnError(Exception updateException, QueryTable downstream) { @Override public LogOutput append(@NotNull final LogOutput logOutput) { - return logOutput.append("MergedListener(").append(System.identityHashCode(this)) - .append(")"); + return logOutput.append("MergedListener(").append(System.identityHashCode(this)).append(")"); } protected boolean canExecute(final long step) { return Stream.concat(recorders.stream(), dependencies.stream()) - .allMatch((final NotificationQueue.Dependency dep) -> dep.satisfied(step)); + .allMatch((final NotificationQueue.Dependency dep) -> dep.satisfied(step)); } @Override public boolean satisfied(final long step) { if (lastCompletedStep == step) { - LiveTableMonitor.DEFAULT.logDependencies() - .append("MergedListener has previously been completed ").append(this).endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("MergedListener has previously been completed ") + .append(this).endl(); return true; } if (queuedNotificationClock == step) { - LiveTableMonitor.DEFAULT.logDependencies() - .append("MergedListener has queued notification ").append(this).endl(); + LiveTableMonitor.DEFAULT.logDependencies().append("MergedListener has queued notification ").append(this) + .endl(); return false; } if (canExecute(step)) { - LiveTableMonitor.DEFAULT.logDependencies() - .append("MergedListener has dependencies satisfied ").append(this).endl(); - // mark this node as completed, because both our parents have been satisfied; but we are - // not enqueued; so we can never actually execute + LiveTableMonitor.DEFAULT.logDependencies().append("MergedListener has dependencies satisfied ").append(this) + .endl(); + // mark this node as completed, because both our parents have been satisfied; but we are not enqueued; so we + // can never actually execute lastCompletedStep = step; return true; } diff --git a/DB/src/main/java/io/deephaven/db/v2/ModifiedColumnSet.java b/DB/src/main/java/io/deephaven/db/v2/ModifiedColumnSet.java index 169d600fca5..cd464172d13 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ModifiedColumnSet.java +++ b/DB/src/main/java/io/deephaven/db/v2/ModifiedColumnSet.java @@ -24,8 +24,8 @@ private ModifiedColumnSet() { } /** - * A static 'special' ModifiedColumnSet that pretends all columns are dirty. Useful for - * backwards compatibility and convenience. + * A static 'special' ModifiedColumnSet that pretends all columns are dirty. Useful for backwards compatibility and + * convenience. */ public static final ModifiedColumnSet ALL = new ModifiedColumnSet() { @Override @@ -35,8 +35,7 @@ public Transformer newTransformer(String[] columnNames, ModifiedColumnSet[] colu @Override public BitSet extractAsBitSet() { - throw new UnsupportedOperationException( - "Cannot extract BitSet when number of columns is unknown."); + throw new UnsupportedOperationException("Cannot extract BitSet when number of columns is unknown."); } @Override @@ -205,8 +204,7 @@ public String toString(int maxColumns) { } }; - // TODO: combine TableDefinition, ColumnSourceMap, ColumnNames, and IdMap into reusable shared - // object state. + // TODO: combine TableDefinition, ColumnSourceMap, ColumnNames, and IdMap into reusable shared object state. // We'll use this to fail-fast when two incompatible MCSs interact. private final Map columns; private final String[] columnNames; @@ -224,21 +222,20 @@ public BitSet extractAsBitSet() { */ public interface Transformer { /** - * Propagates changes from one {@link ModifiedColumnSet} to another ModifiedColumnSet that - * contextually represent different tables. Clears the output set prior to transforming. + * Propagates changes from one {@link ModifiedColumnSet} to another ModifiedColumnSet that contextually + * represent different tables. Clears the output set prior to transforming. * * @param input source table's columns that changed * @param output result table's columns to propagate dirty columns to */ - default void clearAndTransform(final ModifiedColumnSet input, - final ModifiedColumnSet output) { + default void clearAndTransform(final ModifiedColumnSet input, final ModifiedColumnSet output) { output.clear(); transform(input, output); } /** - * Propagates changes from {@code input} {@link ModifiedColumnSet} to {@code output} - * ModifiedColumnSet. Does not clear the {@code output} before propagating. + * Propagates changes from {@code input} {@link ModifiedColumnSet} to {@code output} ModifiedColumnSet. Does not + * clear the {@code output} before propagating. * * @param input source table's columns that changed (null implies no modified columns) * @param output result table's columns to propagate dirty columns to @@ -261,8 +258,8 @@ default void transform(final ModifiedColumnSet input, final ModifiedColumnSet ou } /** - * Create an empty ModifiedColumnSet from the provided Column Source Map. Note: prefer to use - * the copy constructor on future objects that share this CSM to minimize duplicating state. + * Create an empty ModifiedColumnSet from the provided Column Source Map. Note: prefer to use the copy constructor + * on future objects that share this CSM to minimize duplicating state. * * @param columns The column source map this ModifiedColumnSet will use. */ @@ -277,8 +274,8 @@ public ModifiedColumnSet(final Map columns) { } /** - * Create a new ModifiedColumnSet that shares all immutable state with the provided MCS. The - * dirty set will initially be empty. + * Create a new ModifiedColumnSet that shares all immutable state with the provided MCS. The dirty set will + * initially be empty. * * @param other The ModifiedColumnSet to borrow immutable state from. */ @@ -294,26 +291,23 @@ public ModifiedColumnSet(final ModifiedColumnSet other) { } /** - * Create a transformer that is compatible with the class of ModifiedColumnSets that share a - * ColumnSourceMap. + * Create a transformer that is compatible with the class of ModifiedColumnSets that share a ColumnSourceMap. * * @param columnNames input columns to check for dirtiness * @param columnSets output columns to mark dirty when input column is dirty * @return a new Transformer instance */ - public Transformer newTransformer(final String[] columnNames, - final ModifiedColumnSet[] columnSets) { + public Transformer newTransformer(final String[] columnNames, final ModifiedColumnSet[] columnSets) { Assert.eq(columnNames.length, "columnNames.length", columnSets.length, "columnSets.length"); final int[] columnBits = new int[columnNames.length]; for (int i = 0; i < columnNames.length; ++i) { final int bitIndex = idMap.get(columnNames[i]); if (bitIndex == idMap.getNoEntryValue()) { throw new IllegalArgumentException( - "Unknown column while constructing ModifiedColumnSet: " + columnNames[i]); + "Unknown column while constructing ModifiedColumnSet: " + columnNames[i]); } columnBits[i] = bitIndex; - Assert.eq(columnSets[0].columns, "columnSets[0].columns", columnSets[i].columns, - "columnSets[i].columns"); + Assert.eq(columnSets[0].columns, "columnSets[0].columns", columnSets[i].columns, "columnSets[i].columns"); } return (input, output) -> { @@ -327,8 +321,8 @@ public Transformer newTransformer(final String[] columnNames, } /** - * Create a transformer that uses an identity mapping from one ColumnSourceMap to another. The - * two CSM's must have equivalent column names and column ordering. + * Create a transformer that uses an identity mapping from one ColumnSourceMap to another. The two CSM's must have + * equivalent column names and column ordering. * * @param newColumns the column source map for result table * @return a simple Transformer that makes a cheap, but CSM compatible copy @@ -336,17 +330,15 @@ public Transformer newTransformer(final String[] columnNames, public Transformer newIdentityTransformer(final Map newColumns) { if (columns == newColumns) { throw new IllegalArgumentException( - "Do not use a transformer when it is correct to pass-through the ModifiedColumnSet."); - } else if (!Iterators.elementsEqual(columns.keySet().iterator(), - newColumns.keySet().iterator())) { - throw new IllegalArgumentException( - "Result column names are incompatible with parent column names."); + "Do not use a transformer when it is correct to pass-through the ModifiedColumnSet."); + } else if (!Iterators.elementsEqual(columns.keySet().iterator(), newColumns.keySet().iterator())) { + throw new IllegalArgumentException("Result column names are incompatible with parent column names."); } return (input, output) -> { if (input.columns != columns || output.columns != newColumns) { throw new IllegalArgumentException( - "Provided ModifiedColumnSets are not compatible with this Transformer!"); + "Provided ModifiedColumnSets are not compatible with this Transformer!"); } output.dirtyColumns.or(input.dirtyColumns); }; @@ -357,36 +349,33 @@ public Transformer newIdentityTransformer(final Map newCol */ public boolean isCompatibleWith(final ModifiedColumnSet columnSet) { if (this == ModifiedColumnSet.ALL || this == ModifiedColumnSet.EMPTY - || columnSet == ModifiedColumnSet.ALL || columnSet == ModifiedColumnSet.EMPTY) { + || columnSet == ModifiedColumnSet.ALL || columnSet == ModifiedColumnSet.EMPTY) { return true; } - // They are compatible iff column names and column orders are identical. To be cheaper - // though, we're - // not going to compare those - we'll be stricter and require that they are both actually - // the same + // They are compatible iff column names and column orders are identical. To be cheaper though, we're + // not going to compare those - we'll be stricter and require that they are both actually the same // instance. return columns == columnSet.columns; } /** - * This method is used to determine whether or not a dependent requires a transformer to - * propagate dirty columns from its parent. If no transformer is required then it is acceptable - * to reuse any column set provided by the parent. Note this is intended to be determined during - * initialization and never during an update cycle. It is illegal to use the specialized - * ModifiedColumnSet.EMPTY / ModifiedColumnSet.ALL as their innards do not represent any table. + * This method is used to determine whether or not a dependent requires a transformer to propagate dirty columns + * from its parent. If no transformer is required then it is acceptable to reuse any column set provided by the + * parent. Note this is intended to be determined during initialization and never during an update cycle. It is + * illegal to use the specialized ModifiedColumnSet.EMPTY / ModifiedColumnSet.ALL as their innards do not represent + * any table. * * @param columnSet the column set for the dependent table - * @return whether or not this modified column set must use a Transformer to propagate modified - * columns + * @return whether or not this modified column set must use a Transformer to propagate modified columns */ public boolean requiresTransformer(final ModifiedColumnSet columnSet) { if (this == ModifiedColumnSet.ALL || this == ModifiedColumnSet.EMPTY - || columnSet == ModifiedColumnSet.ALL || columnSet == ModifiedColumnSet.EMPTY) { + || columnSet == ModifiedColumnSet.ALL || columnSet == ModifiedColumnSet.EMPTY) { throw new IllegalArgumentException( - "The ALL/EMPTY ModifiedColumnSets are not indicative of propagation compatibility."); + "The ALL/EMPTY ModifiedColumnSets are not indicative of propagation compatibility."); } - // They are propagation compatible iff column names and column orders are identical; - // otherwise requires transformer. + // They are propagation compatible iff column names and column orders are identical; otherwise requires + // transformer. return columns != columnSet.columns; } @@ -438,16 +427,14 @@ public boolean nonempty() { /** * @return the number of columns in this set - * @throws UnsupportedOperationException on {@link ModifiedColumnSet#ALL} and - * {@link ModifiedColumnSet#EMPTY} + * @throws UnsupportedOperationException on {@link ModifiedColumnSet#ALL} and {@link ModifiedColumnSet#EMPTY} */ public int numColumns() { return columns.size(); } /** - * Turns on all bits for these columns. Use this method to prepare pre-computed - * ModifiedColumnSets. + * Turns on all bits for these columns. Use this method to prepare pre-computed ModifiedColumnSets. * * @param columnNames the columns which need to be marked dirty */ @@ -455,8 +442,7 @@ public void setAll(final String... columnNames) { for (final String column : columnNames) { final int bitIndex = idMap.get(column); if (bitIndex == idMap.getNoEntryValue()) { - throw new IllegalArgumentException( - "Unknown column while constructing ModifiedColumnSet: " + column); + throw new IllegalArgumentException("Unknown column while constructing ModifiedColumnSet: " + column); } this.dirtyColumns.set(bitIndex); } @@ -489,8 +475,7 @@ public void setColumnWithIndex(int columnIndex) { } /** - * Marks specifically a range of adjacent columns. Start is inclusive, end is exclusive; like - * the BitSet API. + * Marks specifically a range of adjacent columns. Start is inclusive, end is exclusive; like the BitSet API. * * @param columnStart start column index to mark dirty * @param columnEndExclusive end column index to mark dirty @@ -500,8 +485,7 @@ public void setColumnsInIndexRange(int columnStart, int columnEndExclusive) { } /** - * Turns off all bits for these columns. Use this method to prepare pre-computed - * ModifiedColumnSets. + * Turns off all bits for these columns. Use this method to prepare pre-computed ModifiedColumnSets. * * @param columnNames the columns which need to be marked clean */ @@ -509,8 +493,7 @@ public void clearAll(final String... columnNames) { for (final String column : columnNames) { final int bitIndex = idMap.get(column); if (bitIndex == idMap.getNoEntryValue()) { - throw new IllegalArgumentException( - "Unknown column while constructing ModifiedColumnSet: " + column); + throw new IllegalArgumentException("Unknown column while constructing ModifiedColumnSet: " + column); } this.dirtyColumns.clear(bitIndex); } @@ -531,9 +514,8 @@ public void clearAll(final ModifiedColumnSet columnSet) { private void verifyCompatibilityWith(final ModifiedColumnSet columnSet) { if (!isCompatibleWith(columnSet)) { - throw new IllegalArgumentException( - "Provided ModifiedColumnSet is incompatible with this one! " + this.toDebugString() - + " vs " + columnSet.toDebugString()); + throw new IllegalArgumentException("Provided ModifiedColumnSet is incompatible with this one! " + + this.toDebugString() + " vs " + columnSet.toDebugString()); } } @@ -581,9 +563,8 @@ public boolean equals(final Object other) { return empty(); } if (this == ALL || columnSet == ALL) { - return (columnSet == ALL - || columnSet.dirtyColumns.cardinality() == columnSet.numColumns()) - && (this == ALL || dirtyColumns.cardinality() == numColumns()); + return (columnSet == ALL || columnSet.dirtyColumns.cardinality() == columnSet.numColumns()) + && (this == ALL || dirtyColumns.cardinality() == numColumns()); } // note: this logic is fine for EMPTY return dirtyColumns.equals(columnSet.dirtyColumns); @@ -597,8 +578,8 @@ public String toDebugString() { return "ModifiedColumnSet.ALL"; } StringBuilder sb = new StringBuilder("ModifiedColumnSet: columns=") - .append(Integer.toHexString(System.identityHashCode(this))) - .append(", {"); + .append(Integer.toHexString(System.identityHashCode(this))) + .append(", {"); for (int i = 0; i < columnNames.length; i++) { if (i != 0) { diff --git a/DB/src/main/java/io/deephaven/db/v2/NaturalJoinHelper.java b/DB/src/main/java/io/deephaven/db/v2/NaturalJoinHelper.java index 2ecea2b8367..3536ba28149 100644 --- a/DB/src/main/java/io/deephaven/db/v2/NaturalJoinHelper.java +++ b/DB/src/main/java/io/deephaven/db/v2/NaturalJoinHelper.java @@ -20,61 +20,53 @@ class NaturalJoinHelper { private NaturalJoinHelper() {} // static use only - static Table naturalJoin(QueryTable leftTable, QueryTable rightTable, - MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, boolean exactMatch) { - return naturalJoin(leftTable, rightTable, columnsToMatch, columnsToAdd, exactMatch, - new JoinControl()); + static Table naturalJoin(QueryTable leftTable, QueryTable rightTable, MatchPair[] columnsToMatch, + MatchPair[] columnsToAdd, boolean exactMatch) { + return naturalJoin(leftTable, rightTable, columnsToMatch, columnsToAdd, exactMatch, new JoinControl()); } @VisibleForTesting - static Table naturalJoin(QueryTable leftTable, QueryTable rightTable, - MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, boolean exactMatch, - JoinControl control) { - final Table result = naturalJoinInternal(leftTable, rightTable, columnsToMatch, - columnsToAdd, exactMatch, control); + static Table naturalJoin(QueryTable leftTable, QueryTable rightTable, MatchPair[] columnsToMatch, + MatchPair[] columnsToAdd, boolean exactMatch, JoinControl control) { + final Table result = + naturalJoinInternal(leftTable, rightTable, columnsToMatch, columnsToAdd, exactMatch, control); leftTable.maybeCopyColumnDescriptions(result, rightTable, columnsToMatch, columnsToAdd); leftTable.copyAttributes(result, BaseTable.CopyAttributeOperation.Join); return result; } - private static Table naturalJoinInternal(QueryTable leftTable, QueryTable rightTable, - MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, boolean exactMatch, - JoinControl control) { - try (final BucketingContext bucketingContext = new BucketingContext("naturalJoin", - leftTable, rightTable, columnsToMatch, columnsToAdd, control)) { + private static Table naturalJoinInternal(QueryTable leftTable, QueryTable rightTable, MatchPair[] columnsToMatch, + MatchPair[] columnsToAdd, boolean exactMatch, JoinControl control) { + try (final BucketingContext bucketingContext = + new BucketingContext("naturalJoin", leftTable, rightTable, columnsToMatch, columnsToAdd, control)) { - // if we have a single column of unique values, and the range is small, we can use a - // simplified table + // if we have a single column of unique values, and the range is small, we can use a simplified table // TODO: SimpleUniqueStaticNaturalJoinManager, but not static! if (!rightTable.isLive() && control.useUniqueTable(bucketingContext.uniqueValues, - bucketingContext.maximumUniqueValue, bucketingContext.minimumUniqueValue)) { + bucketingContext.maximumUniqueValue, bucketingContext.minimumUniqueValue)) { Assert.neqNull(bucketingContext.uniqueFunctor, "uniqueFunctor"); - final SimpleUniqueStaticNaturalJoinStateManager jsm = - new SimpleUniqueStaticNaturalJoinStateManager( + final SimpleUniqueStaticNaturalJoinStateManager jsm = new SimpleUniqueStaticNaturalJoinStateManager( bucketingContext.originalLeftSources, bucketingContext.uniqueValuesRange(), bucketingContext.uniqueFunctor); jsm.setRightSide(rightTable.getIndex(), bucketingContext.rightSources[0]); final LongArraySource leftRedirections = new LongArraySource(); leftRedirections.ensureCapacity(leftTable.getIndex().size()); - jsm.decorateLeftSide(leftTable.getIndex(), bucketingContext.leftSources, - leftRedirections); + jsm.decorateLeftSide(leftTable.getIndex(), bucketingContext.leftSources, leftRedirections); - final RedirectionIndex redirectionIndex = jsm.buildRedirectionIndex(leftTable, - exactMatch, leftRedirections, control.getRedirectionType(leftTable)); + final RedirectionIndex redirectionIndex = jsm.buildRedirectionIndex(leftTable, exactMatch, + leftRedirections, control.getRedirectionType(leftTable)); - final QueryTable result = - makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, true); + final QueryTable result = makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, true); - leftTable.listenForUpdates(new LeftTickingListener( - bucketingContext.listenerDescription, columnsToMatch, columnsToAdd, leftTable, - result, redirectionIndex, jsm, bucketingContext.leftSources)); + leftTable.listenForUpdates(new LeftTickingListener(bucketingContext.listenerDescription, columnsToMatch, + columnsToAdd, leftTable, result, redirectionIndex, jsm, bucketingContext.leftSources)); return result; } if (bucketingContext.leftSources.length == 0) { return zeroKeyColumnsJoin(leftTable, rightTable, columnsToAdd, exactMatch, - bucketingContext.listenerDescription); + bucketingContext.listenerDescription); } final LongArraySource leftHashSlots = new LongArraySource(); @@ -85,41 +77,37 @@ private static Table naturalJoinInternal(QueryTable leftTable, QueryTable rightT if (leftTable.isLive()) { if (bucketingContext.useLeftGrouping) { throw new UnsupportedOperationException( - "Grouping is not supported with ticking chunked naturalJoin!"); + "Grouping is not supported with ticking chunked naturalJoin!"); } final int tableSize = Math.max(control.tableSizeForLeftBuild(leftTable), - control.tableSizeForRightBuild(rightTable)); + control.tableSizeForRightBuild(rightTable)); - final IncrementalChunkedNaturalJoinStateManager jsm = - new IncrementalChunkedNaturalJoinStateManager(bucketingContext.leftSources, - tableSize, bucketingContext.originalLeftSources); + final IncrementalChunkedNaturalJoinStateManager jsm = new IncrementalChunkedNaturalJoinStateManager( + bucketingContext.leftSources, tableSize, bucketingContext.originalLeftSources); jsm.buildFromRightSide(rightTable, bucketingContext.rightSources); - jsm.decorateLeftSide(leftTable.getIndex(), bucketingContext.leftSources, - leftHashSlots); + jsm.decorateLeftSide(leftTable.getIndex(), bucketingContext.leftSources, leftHashSlots); jsm.compactAll(); - redirectionIndex = jsm.buildRedirectionIndexFromRedirections(leftTable, - exactMatch, leftHashSlots, control.getRedirectionType(leftTable)); + redirectionIndex = jsm.buildRedirectionIndexFromRedirections(leftTable, exactMatch, leftHashSlots, + control.getRedirectionType(leftTable)); - final QueryTable result = - makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, true); + final QueryTable result = makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, true); - final JoinListenerRecorder leftRecorder = new JoinListenerRecorder(true, - bucketingContext.listenerDescription, leftTable, result); - final JoinListenerRecorder rightRecorder = new JoinListenerRecorder(false, - bucketingContext.listenerDescription, rightTable, result); + final JoinListenerRecorder leftRecorder = + new JoinListenerRecorder(true, bucketingContext.listenerDescription, leftTable, result); + final JoinListenerRecorder rightRecorder = + new JoinListenerRecorder(false, bucketingContext.listenerDescription, rightTable, result); jsm.setMaximumLoadFactor(control.getMaximumLoadFactor()); jsm.setTargetLoadFactor(control.getTargetLoadFactor()); - final ChunkedMergedJoinListener mergedJoinListener = - new ChunkedMergedJoinListener( - leftTable, rightTable, bucketingContext.leftSources, - bucketingContext.rightSources, columnsToMatch, - columnsToAdd, leftRecorder, rightRecorder, result, redirectionIndex, - jsm, exactMatch, bucketingContext.listenerDescription); + final ChunkedMergedJoinListener mergedJoinListener = new ChunkedMergedJoinListener( + leftTable, rightTable, bucketingContext.leftSources, bucketingContext.rightSources, + columnsToMatch, + columnsToAdd, leftRecorder, rightRecorder, result, redirectionIndex, jsm, exactMatch, + bucketingContext.listenerDescription); leftRecorder.setMergedListener(mergedJoinListener); rightRecorder.setMergedListener(mergedJoinListener); @@ -132,118 +120,107 @@ private static Table naturalJoinInternal(QueryTable leftTable, QueryTable rightT } else { // right is live, left is static final RightIncrementalChunkedNaturalJoinStateManager jsm = - new RightIncrementalChunkedNaturalJoinStateManager( - bucketingContext.leftSources, control.tableSizeForLeftBuild(leftTable), - bucketingContext.originalLeftSources); + new RightIncrementalChunkedNaturalJoinStateManager( + bucketingContext.leftSources, control.tableSizeForLeftBuild(leftTable), + bucketingContext.originalLeftSources); final ObjectArraySource indexSource; final MutableInt groupingSize = new MutableInt(); if (bucketingContext.useLeftGrouping) { final Map grouping = - bucketingContext.leftSources[0].getGroupToRange(leftTable.getIndex()); + bucketingContext.leftSources[0].getGroupToRange(leftTable.getIndex()); // noinspection unchecked final Pair, ObjectArraySource> flatResultColumnSources = - AbstractColumnSource.groupingToFlatSources( - (ColumnSource) bucketingContext.leftSources[0], grouping, - leftTable.getIndex(), groupingSize); - final ArrayBackedColumnSource groupSource = - flatResultColumnSources.getFirst(); + AbstractColumnSource.groupingToFlatSources( + (ColumnSource) bucketingContext.leftSources[0], grouping, leftTable.getIndex(), + groupingSize); + final ArrayBackedColumnSource groupSource = flatResultColumnSources.getFirst(); indexSource = flatResultColumnSources.getSecond(); final Table leftTableGrouped = - new QueryTable(Index.FACTORY.getFlatIndex(groupingSize.intValue()), - Collections.singletonMap(columnsToMatch[0].left(), groupSource)); + new QueryTable(Index.FACTORY.getFlatIndex(groupingSize.intValue()), + Collections.singletonMap(columnsToMatch[0].left(), groupSource)); final ColumnSource[] groupedSourceArray = {groupSource}; jsm.buildFromLeftSide(leftTableGrouped, groupedSourceArray, leftHashSlots); jsm.convertLeftGroups(groupingSize.intValue(), leftHashSlots, indexSource); } else { - jsm.buildFromLeftSide(leftTable, bucketingContext.leftSources, - leftHashSlots); + jsm.buildFromLeftSide(leftTable, bucketingContext.leftSources, leftHashSlots); indexSource = null; } jsm.addRightSide(rightTable.getIndex(), bucketingContext.rightSources); if (bucketingContext.useLeftGrouping) { - redirectionIndex = jsm.buildRedirectionIndexFromHashSlotGrouped(leftTable, - indexSource, groupingSize.intValue(), exactMatch, leftHashSlots, - control.getRedirectionType(leftTable)); + redirectionIndex = jsm.buildRedirectionIndexFromHashSlotGrouped(leftTable, indexSource, + groupingSize.intValue(), exactMatch, leftHashSlots, + control.getRedirectionType(leftTable)); } else { - redirectionIndex = jsm.buildRedirectionIndexFromHashSlot(leftTable, - exactMatch, leftHashSlots, control.getRedirectionType(leftTable)); + redirectionIndex = jsm.buildRedirectionIndexFromHashSlot(leftTable, exactMatch, leftHashSlots, + control.getRedirectionType(leftTable)); } - final QueryTable result = - makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, true); + final QueryTable result = makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, true); - rightTable.listenForUpdates( - new RightTickingListener(bucketingContext.listenerDescription, rightTable, - columnsToMatch, columnsToAdd, result, redirectionIndex, jsm, - bucketingContext.rightSources, exactMatch)); + rightTable + .listenForUpdates(new RightTickingListener(bucketingContext.listenerDescription, rightTable, + columnsToMatch, columnsToAdd, result, redirectionIndex, jsm, + bucketingContext.rightSources, exactMatch)); return result; } } else { if (bucketingContext.useLeftGrouping) { if (leftTable.isRefreshing()) { throw new UnsupportedOperationException( - "Grouping information is not supported when tables are refreshing!"); + "Grouping information is not supported when tables are refreshing!"); } final Map grouping = - bucketingContext.leftSources[0].getGroupToRange(leftTable.getIndex()); + bucketingContext.leftSources[0].getGroupToRange(leftTable.getIndex()); final MutableInt groupingSize = new MutableInt(); // noinspection unchecked final Pair, ObjectArraySource> flatResultColumnSources = - AbstractColumnSource.groupingToFlatSources( - (ColumnSource) bucketingContext.leftSources[0], grouping, - leftTable.getIndex(), groupingSize); + AbstractColumnSource.groupingToFlatSources((ColumnSource) bucketingContext.leftSources[0], + grouping, leftTable.getIndex(), groupingSize); final ArrayBackedColumnSource groupSource = flatResultColumnSources.getFirst(); - final ObjectArraySource indexSource = - flatResultColumnSources.getSecond(); + final ObjectArraySource indexSource = flatResultColumnSources.getSecond(); - final Table leftTableGrouped = - new QueryTable(Index.FACTORY.getFlatIndex(groupingSize.intValue()), + final Table leftTableGrouped = new QueryTable(Index.FACTORY.getFlatIndex(groupingSize.intValue()), Collections.singletonMap(columnsToMatch[0].left(), groupSource)); final ColumnSource[] groupedSourceArray = {groupSource}; final StaticChunkedNaturalJoinStateManager jsm = - new StaticChunkedNaturalJoinStateManager(groupedSourceArray, - StaticChunkedNaturalJoinStateManager - .hashTableSize(groupingSize.intValue()), - groupedSourceArray); + new StaticChunkedNaturalJoinStateManager(groupedSourceArray, + StaticChunkedNaturalJoinStateManager.hashTableSize(groupingSize.intValue()), + groupedSourceArray); jsm.buildFromLeftSide(leftTableGrouped, groupedSourceArray, leftHashSlots); jsm.decorateWithRightSide(rightTable, bucketingContext.rightSources); - redirectionIndex = jsm.buildGroupedRedirectionIndex(leftTable, exactMatch, - leftTableGrouped.size(), leftHashSlots, indexSource, - control.getRedirectionType(leftTable)); + redirectionIndex = jsm.buildGroupedRedirectionIndex(leftTable, exactMatch, leftTableGrouped.size(), + leftHashSlots, indexSource, control.getRedirectionType(leftTable)); } else if (control.buildLeft(leftTable, rightTable)) { final StaticChunkedNaturalJoinStateManager jsm = - new StaticChunkedNaturalJoinStateManager(bucketingContext.leftSources, - control.tableSizeForLeftBuild(leftTable), - bucketingContext.originalLeftSources); + new StaticChunkedNaturalJoinStateManager(bucketingContext.leftSources, + control.tableSizeForLeftBuild(leftTable), bucketingContext.originalLeftSources); jsm.buildFromLeftSide(leftTable, bucketingContext.leftSources, leftHashSlots); jsm.decorateWithRightSide(rightTable, bucketingContext.rightSources); - redirectionIndex = jsm.buildRedirectionIndexFromHashSlot(leftTable, exactMatch, - leftHashSlots, control.getRedirectionType(leftTable)); + redirectionIndex = jsm.buildRedirectionIndexFromHashSlot(leftTable, exactMatch, leftHashSlots, + control.getRedirectionType(leftTable)); } else { final StaticChunkedNaturalJoinStateManager jsm = - new StaticChunkedNaturalJoinStateManager(bucketingContext.leftSources, - control.tableSizeForRightBuild(rightTable), - bucketingContext.originalLeftSources); + new StaticChunkedNaturalJoinStateManager(bucketingContext.leftSources, + control.tableSizeForRightBuild(rightTable), bucketingContext.originalLeftSources); jsm.buildFromRightSide(rightTable, bucketingContext.rightSources); jsm.decorateLeftSide(leftTable, bucketingContext.leftSources, leftHashSlots); - redirectionIndex = jsm.buildRedirectionIndexFromRedirections(leftTable, - exactMatch, leftHashSlots, control.getRedirectionType(leftTable)); + redirectionIndex = jsm.buildRedirectionIndexFromRedirections(leftTable, exactMatch, leftHashSlots, + control.getRedirectionType(leftTable)); - final QueryTable result = - makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, true); + final QueryTable result = makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, true); - leftTable.listenForUpdates(new LeftTickingListener( - bucketingContext.listenerDescription, columnsToMatch, columnsToAdd, - leftTable, result, redirectionIndex, jsm, bucketingContext.leftSources)); + leftTable.listenForUpdates( + new LeftTickingListener(bucketingContext.listenerDescription, columnsToMatch, columnsToAdd, + leftTable, result, redirectionIndex, jsm, bucketingContext.leftSources)); return result; } } @@ -253,8 +230,8 @@ private static Table naturalJoinInternal(QueryTable leftTable, QueryTable rightT } @NotNull - private static Table zeroKeyColumnsJoin(QueryTable leftTable, QueryTable rightTable, - MatchPair[] columnsToAdd, boolean exactMatch, String listenerDescription) { + private static Table zeroKeyColumnsJoin(QueryTable leftTable, QueryTable rightTable, MatchPair[] columnsToAdd, + boolean exactMatch, String listenerDescription) { // we are a single value join, we do not need to do any work final SingleValueRedirectionIndex redirectionIndex; @@ -263,83 +240,74 @@ private static Table zeroKeyColumnsJoin(QueryTable leftTable, QueryTable rightTa if (rightTable.size() > 1) { if (leftTable.size() > 0) { throw new RuntimeException( - "naturalJoin with zero key columns may not have more than one row in the right hand side table!"); + "naturalJoin with zero key columns may not have more than one row in the right hand side table!"); } // we don't care where it goes redirectionIndex = getSingleValueRedirectionIndex(rightRefreshing, Index.NULL_KEY); } else if (rightTable.size() == 1) { - redirectionIndex = - getSingleValueRedirectionIndex(rightRefreshing, rightTable.getIndex().firstKey()); + redirectionIndex = getSingleValueRedirectionIndex(rightRefreshing, rightTable.getIndex().firstKey()); } else { if (exactMatch && leftTable.size() > 0) { throw new RuntimeException( - "exactJoin with zero key columns must have exactly one row in the right hand side table!"); + "exactJoin with zero key columns must have exactly one row in the right hand side table!"); } redirectionIndex = getSingleValueRedirectionIndex(rightRefreshing, Index.NULL_KEY); } - final QueryTable result = - makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, rightRefreshing); + final QueryTable result = makeResult(leftTable, rightTable, columnsToAdd, redirectionIndex, rightRefreshing); final ModifiedColumnSet.Transformer leftTransformer = - leftTable.newModifiedColumnSetTransformer(result, - leftTable.getDefinition().getColumnNamesArray()); + leftTable.newModifiedColumnSetTransformer(result, leftTable.getDefinition().getColumnNamesArray()); final ModifiedColumnSet.Transformer rightTransformer = - rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); - final ModifiedColumnSet allRightColumns = - result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); + rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); + final ModifiedColumnSet allRightColumns = result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); if (leftTable.isLive()) { if (rightTable.isLive()) { final JoinListenerRecorder leftRecorder = - new JoinListenerRecorder(true, listenerDescription, leftTable, result); + new JoinListenerRecorder(true, listenerDescription, leftTable, result); final JoinListenerRecorder rightRecorder = - new JoinListenerRecorder(false, listenerDescription, rightTable, result); + new JoinListenerRecorder(false, listenerDescription, rightTable, result); - final MergedListener mergedListener = - new MergedListener(Arrays.asList(leftRecorder, rightRecorder), + final MergedListener mergedListener = new MergedListener(Arrays.asList(leftRecorder, rightRecorder), Collections.emptyList(), listenerDescription, result) { - @Override - protected void process() { - result.modifiedColumnSet.clear(); + @Override + protected void process() { + result.modifiedColumnSet.clear(); - final boolean rightChanged = rightRecorder.recordedVariablesAreValid(); - final boolean leftChanged = leftRecorder.recordedVariablesAreValid(); + final boolean rightChanged = rightRecorder.recordedVariablesAreValid(); + final boolean leftChanged = leftRecorder.recordedVariablesAreValid(); - checkRightTableSizeZeroKeys(leftTable, rightTable, exactMatch); + checkRightTableSizeZeroKeys(leftTable, rightTable, exactMatch); - if (rightChanged) { - final boolean rightUpdated = - updateRightRedirection(rightTable, redirectionIndex); - if (rightUpdated) { - result.modifiedColumnSet.setAll(allRightColumns); - } else { - rightTransformer.transform(rightRecorder.getModifiedColumnSet(), + if (rightChanged) { + final boolean rightUpdated = updateRightRedirection(rightTable, redirectionIndex); + if (rightUpdated) { + result.modifiedColumnSet.setAll(allRightColumns); + } else { + rightTransformer.transform(rightRecorder.getModifiedColumnSet(), result.modifiedColumnSet); - } } + } - if (leftChanged) { - final Index modified; - if (rightChanged) { - modified = result.getIndex().minus(leftRecorder.getAdded()); - } else { - modified = leftRecorder.getModified().clone(); - } - leftTransformer.transform(leftRecorder.getModifiedColumnSet(), - result.modifiedColumnSet); - result.notifyListeners(new ShiftAwareListener.Update( - leftRecorder.getAdded().clone(), - leftRecorder.getRemoved().clone(), modified, + if (leftChanged) { + final Index modified; + if (rightChanged) { + modified = result.getIndex().minus(leftRecorder.getAdded()); + } else { + modified = leftRecorder.getModified().clone(); + } + leftTransformer.transform(leftRecorder.getModifiedColumnSet(), result.modifiedColumnSet); + result.notifyListeners(new ShiftAwareListener.Update( + leftRecorder.getAdded().clone(), leftRecorder.getRemoved().clone(), modified, leftRecorder.getShifted(), result.modifiedColumnSet)); - } else if (rightChanged) { - result.notifyListeners(new ShiftAwareListener.Update( + } else if (rightChanged) { + result.notifyListeners(new ShiftAwareListener.Update( Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), - result.getIndex().clone(), IndexShiftData.EMPTY, - result.modifiedColumnSet)); - } + result.getIndex().clone(), IndexShiftData.EMPTY, result.modifiedColumnSet)); } + } - }; + }; leftRecorder.setMergedListener(mergedListener); rightRecorder.setMergedListener(mergedListener); @@ -348,52 +316,48 @@ protected void process() { result.addParentReference(mergedListener); } else { - leftTable.listenForUpdates( - new BaseTable.ShiftAwareListenerImpl(listenerDescription, leftTable, result) { - @Override - public void onUpdate(final Update upstream) { - checkRightTableSizeZeroKeys(leftTable, rightTable, exactMatch); - leftTransformer.clearAndTransform(upstream.modifiedColumnSet, - result.modifiedColumnSet); - final Update downstream = upstream.copy(); - downstream.modifiedColumnSet = result.modifiedColumnSet; - result.notifyListeners(downstream); - } - }); + leftTable + .listenForUpdates(new BaseTable.ShiftAwareListenerImpl(listenerDescription, leftTable, result) { + @Override + public void onUpdate(final Update upstream) { + checkRightTableSizeZeroKeys(leftTable, rightTable, exactMatch); + leftTransformer.clearAndTransform(upstream.modifiedColumnSet, result.modifiedColumnSet); + final Update downstream = upstream.copy(); + downstream.modifiedColumnSet = result.modifiedColumnSet; + result.notifyListeners(downstream); + } + }); } } else if (rightTable.isLive()) { if (leftTable.size() > 0) { rightTable.listenForUpdates( - new BaseTable.ShiftAwareListenerImpl(listenerDescription, rightTable, result) { - @Override - public void onUpdate(final Update upstream) { - checkRightTableSizeZeroKeys(leftTable, rightTable, exactMatch); - final boolean changed = - updateRightRedirection(rightTable, redirectionIndex); - if (!changed) { - rightTransformer.clearAndTransform(upstream.modifiedColumnSet, - result.modifiedColumnSet); + new BaseTable.ShiftAwareListenerImpl(listenerDescription, rightTable, result) { + @Override + public void onUpdate(final Update upstream) { + checkRightTableSizeZeroKeys(leftTable, rightTable, exactMatch); + final boolean changed = updateRightRedirection(rightTable, redirectionIndex); + if (!changed) { + rightTransformer.clearAndTransform(upstream.modifiedColumnSet, + result.modifiedColumnSet); + } + result.notifyListeners( + new Update(Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), + result.getIndex().clone(), IndexShiftData.EMPTY, + changed ? allRightColumns : result.modifiedColumnSet)); } - result.notifyListeners(new Update(Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex(), - result.getIndex().clone(), IndexShiftData.EMPTY, - changed ? allRightColumns : result.modifiedColumnSet)); - } - }); + }); } } return result; } @NotNull - private static SingleValueRedirectionIndex getSingleValueRedirectionIndex(boolean refreshing, - long value) { + private static SingleValueRedirectionIndex getSingleValueRedirectionIndex(boolean refreshing, long value) { return refreshing ? new TickingSingleValueRedirectionIndexImpl(value) - : new StaticSingleValueRedirectionIndexImpl(value); + : new StaticSingleValueRedirectionIndexImpl(value); } - private static boolean updateRightRedirection(QueryTable rightTable, - SingleValueRedirectionIndex redirectionIndex) { + private static boolean updateRightRedirection(QueryTable rightTable, SingleValueRedirectionIndex redirectionIndex) { final boolean changed; if (rightTable.size() == 0) { changed = redirectionIndex.getValue() != Index.NULL_KEY; @@ -410,32 +374,29 @@ private static boolean updateRightRedirection(QueryTable rightTable, return changed; } - private static void checkRightTableSizeZeroKeys(final Table leftTable, final Table rightTable, - boolean exactMatch) { + private static void checkRightTableSizeZeroKeys(final Table leftTable, final Table rightTable, boolean exactMatch) { if (leftTable.size() != 0) { if (rightTable.size() > 1) { throw new RuntimeException( - "naturalJoin with zero key columns may not have more than one row in the right hand side table!"); + "naturalJoin with zero key columns may not have more than one row in the right hand side table!"); } else if (rightTable.size() == 0 && exactMatch) { throw new RuntimeException( - "exactJoin with zero key columns must have exactly one row in the right hand side table!"); + "exactJoin with zero key columns must have exactly one row in the right hand side table!"); } } } @NotNull private static QueryTable makeResult(@NotNull final QueryTable leftTable, - @NotNull final Table rightTable, - @NotNull final MatchPair[] columnsToAdd, - @NotNull final RedirectionIndex redirectionIndex, - final boolean rightRefreshingColumns) { - final Map columnSourceMap = - new LinkedHashMap<>(leftTable.getColumnSourceMap()); + @NotNull final Table rightTable, + @NotNull final MatchPair[] columnsToAdd, + @NotNull final RedirectionIndex redirectionIndex, + final boolean rightRefreshingColumns) { + final Map columnSourceMap = new LinkedHashMap<>(leftTable.getColumnSourceMap()); for (MatchPair mp : columnsToAdd) { // noinspection unchecked final ReadOnlyRedirectedColumnSource redirectedColumnSource = - new ReadOnlyRedirectedColumnSource(redirectionIndex, - rightTable.getColumnSource(mp.right())); + new ReadOnlyRedirectedColumnSource(redirectionIndex, rightTable.getColumnSource(mp.right())); if (rightRefreshingColumns) { redirectedColumnSource.startTrackingPrevValues(); } @@ -450,17 +411,16 @@ private static QueryTable makeResult(@NotNull final QueryTable leftTable, /** * This column source is used as a wrapper for the original table's symbol sources. * - * The symbol sources are reinterpreted to longs, and then the SymbolCombiner produces an - * IntegerSparseArraySource for each side. To convert from the symbol table value, we simply - * look it up in the symbolLookup source and use that as our chunked result. + * The symbol sources are reinterpreted to longs, and then the SymbolCombiner produces an IntegerSparseArraySource + * for each side. To convert from the symbol table value, we simply look it up in the symbolLookup source and use + * that as our chunked result. */ static class SymbolTableToUniqueIdSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForInt { + implements ImmutableColumnSourceGetDefaults.ForInt { private final ColumnSource symbolSource; private final IntegerSparseArraySource symbolLookup; - SymbolTableToUniqueIdSource(ColumnSource symbolSource, - IntegerSparseArraySource symbolLookup) { + SymbolTableToUniqueIdSource(ColumnSource symbolSource, IntegerSparseArraySource symbolLookup) { super(int.class); this.symbolSource = symbolSource; this.symbolLookup = symbolLookup; @@ -489,15 +449,13 @@ public void close() { } @Override - public FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + public FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return new LongToIntFillContext(chunkCapacity, sharedContext); } @Override public void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { final WritableIntChunk destAsInt = destination.asWritableIntChunk(); final LongToIntFillContext longToIntContext = (LongToIntFillContext) context; final WritableLongChunk longChunk = longToIntContext.longChunk; @@ -520,10 +478,9 @@ private static class LeftTickingListener extends BaseTable.ShiftAwareListenerImp private final ModifiedColumnSet rightModifiedColumns; private final ModifiedColumnSet.Transformer leftTransformer; - LeftTickingListener(String description, MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, QueryTable leftTable, QueryTable result, - RedirectionIndex redirectionIndex, StaticNaturalJoinStateManager jsm, - ColumnSource[] leftSources) { + LeftTickingListener(String description, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, + QueryTable leftTable, QueryTable result, RedirectionIndex redirectionIndex, + StaticNaturalJoinStateManager jsm, ColumnSource[] leftSources) { super(description, leftTable, result); this.result = result; this.leftTable = leftTable; @@ -531,13 +488,11 @@ private static class LeftTickingListener extends BaseTable.ShiftAwareListenerImp this.jsm = jsm; this.leftSources = leftSources; newLeftRedirections = new LongArraySource(); - leftKeyColumns = - leftTable.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToMatch)); - rightModifiedColumns = - result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); + leftKeyColumns = leftTable.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToMatch)); + rightModifiedColumns = result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); - leftTransformer = leftTable.newModifiedColumnSetTransformer(result, - leftTable.getDefinition().getColumnNamesArray()); + leftTransformer = + leftTable.newModifiedColumnSetTransformer(result, leftTable.getDefinition().getColumnNamesArray()); } @Override @@ -550,8 +505,7 @@ public void onUpdate(final Update upstream) { } downstream.modifiedColumnSet = result.modifiedColumnSet; - leftTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); + leftTransformer.clearAndTransform(upstream.modifiedColumnSet, downstream.modifiedColumnSet); if (upstream.modifiedColumnSet.containsAny(leftKeyColumns)) { newLeftRedirections.ensureCapacity(downstream.modified.size()); @@ -602,13 +556,12 @@ private static class RightTickingListener extends BaseTable.ShiftAwareListenerIm private final ModifiedColumnSet allRightColumns; private final ModifiedColumnSet rightKeyColumns; private final ModifiedColumnSet.Transformer rightTransformer; - private final NaturalJoinModifiedSlotTracker modifiedSlotTracker = - new NaturalJoinModifiedSlotTracker(); + private final NaturalJoinModifiedSlotTracker modifiedSlotTracker = new NaturalJoinModifiedSlotTracker(); RightTickingListener(String description, QueryTable rightTable, MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, QueryTable result, RedirectionIndex redirectionIndex, - RightIncrementalChunkedNaturalJoinStateManager jsm, ColumnSource[] rightSources, - boolean exactMatch) { + MatchPair[] columnsToAdd, QueryTable result, RedirectionIndex redirectionIndex, + RightIncrementalChunkedNaturalJoinStateManager jsm, ColumnSource[] rightSources, + boolean exactMatch) { super(description, rightTable, result); this.result = result; this.redirectionIndex = redirectionIndex; @@ -616,8 +569,7 @@ private static class RightTickingListener extends BaseTable.ShiftAwareListenerIm this.rightSources = rightSources; this.exactMatch = exactMatch; - rightKeyColumns = - rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); + rightKeyColumns = rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); allRightColumns = result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); rightTransformer = rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); } @@ -629,19 +581,18 @@ public void onUpdate(final Update upstream) { final boolean addedRightColumnsChanged; - final int maxSize = UpdateSizeCalculator.chunkSize(upstream, - IncrementalChunkedNaturalJoinStateManager.CHUNK_SIZE); + final int maxSize = + UpdateSizeCalculator.chunkSize(upstream, IncrementalChunkedNaturalJoinStateManager.CHUNK_SIZE); if (maxSize == 0) { Assert.assertion(upstream.empty(), "upstream.empty()"); return; } try (final RightIncrementalChunkedNaturalJoinStateManager.ProbeContext pc = - jsm.makeProbeContext(rightSources, maxSize)) { + jsm.makeProbeContext(rightSources, maxSize)) { final Index modifiedPreShift; - final boolean rightKeysChanged = - upstream.modifiedColumnSet.containsAny(rightKeyColumns); + final boolean rightKeysChanged = upstream.modifiedColumnSet.containsAny(rightKeyColumns); if (rightKeysChanged) { modifiedPreShift = upstream.getModifiedPreShift(); @@ -653,35 +604,31 @@ public void onUpdate(final Update upstream) { final Index previousToShift; if (rightKeysChanged) { - previousToShift = getParent().getIndex().getPrevIndex() - .minus(modifiedPreShift).minus(upstream.removed); - } else { previousToShift = - getParent().getIndex().getPrevIndex().minus(upstream.removed); + getParent().getIndex().getPrevIndex().minus(modifiedPreShift).minus(upstream.removed); + } else { + previousToShift = getParent().getIndex().getPrevIndex().minus(upstream.removed); } final IndexShiftData.Iterator sit = upstream.shifted.applyIterator(); while (sit.hasNext()) { sit.next(); final Index shiftedIndex = - previousToShift.subindexByKey(sit.beginRange(), sit.endRange()) - .shift(sit.shiftDelta()); - jsm.applyRightShift(pc, rightSources, shiftedIndex, sit.shiftDelta(), - modifiedSlotTracker); + previousToShift.subindexByKey(sit.beginRange(), sit.endRange()).shift(sit.shiftDelta()); + jsm.applyRightShift(pc, rightSources, shiftedIndex, sit.shiftDelta(), modifiedSlotTracker); } } jsm.removeRight(pc, upstream.removed, rightSources, modifiedSlotTracker); - rightTransformer.clearAndTransform(upstream.modifiedColumnSet, - result.modifiedColumnSet); + rightTransformer.clearAndTransform(upstream.modifiedColumnSet, result.modifiedColumnSet); addedRightColumnsChanged = result.modifiedColumnSet.size() != 0; if (rightKeysChanged) { - // It should make us somewhat sad that we have to add/remove, because we are - // doing two hash lookups for keys that have not actually changed. - // The alternative would be to do an initial pass that would filter out key - // columns that have not actually changed. + // It should make us somewhat sad that we have to add/remove, because we are doing two hash lookups + // for keys that have not actually changed. + // The alternative would be to do an initial pass that would filter out key columns that have not + // actually changed. jsm.removeRight(pc, modifiedPreShift, rightSources, modifiedSlotTracker); jsm.addRightSide(pc, upstream.modified, rightSources, modifiedSlotTracker); } else { @@ -694,8 +641,8 @@ public void onUpdate(final Update upstream) { } final Index.RandomBuilder modifiedLeftBuilder = Index.FACTORY.getRandomBuilder(); - final ModifiedSlotUpdater slotUpdater = new ModifiedSlotUpdater(jsm, - modifiedLeftBuilder, redirectionIndex, exactMatch, addedRightColumnsChanged); + final ModifiedSlotUpdater slotUpdater = new ModifiedSlotUpdater(jsm, modifiedLeftBuilder, redirectionIndex, + exactMatch, addedRightColumnsChanged); modifiedSlotTracker.forAllModifiedSlots(slotUpdater); if (slotUpdater.changedRedirection) { result.modifiedColumnSet.setAll(allRightColumns); @@ -704,15 +651,13 @@ public void onUpdate(final Update upstream) { // left is static, so the only thing that can happen is modifications final Index modifiedLeft = modifiedLeftBuilder.getIndex(); - result.notifyListeners( - new Update(Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), + result.notifyListeners(new Update(Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), modifiedLeft, IndexShiftData.EMPTY, modifiedLeft.nonempty() ? result.modifiedColumnSet : ModifiedColumnSet.EMPTY)); } } - private static class ModifiedSlotUpdater - implements NaturalJoinModifiedSlotTracker.ModifiedSlotConsumer { + private static class ModifiedSlotUpdater implements NaturalJoinModifiedSlotTracker.ModifiedSlotConsumer { private final IncrementalNaturalJoinStateManager jsm; private final Index.RandomBuilder modifiedLeftBuilder; @@ -721,9 +666,8 @@ private static class ModifiedSlotUpdater private final boolean rightAddedColumnsChanged; boolean changedRedirection = false; - private ModifiedSlotUpdater(IncrementalNaturalJoinStateManager jsm, - Index.RandomBuilder modifiedLeftBuilder, RedirectionIndex redirectionIndex, - boolean exactMatch, boolean rightAddedColumnsChanged) { + private ModifiedSlotUpdater(IncrementalNaturalJoinStateManager jsm, Index.RandomBuilder modifiedLeftBuilder, + RedirectionIndex redirectionIndex, boolean exactMatch, boolean rightAddedColumnsChanged) { this.jsm = jsm; this.modifiedLeftBuilder = modifiedLeftBuilder; this.redirectionIndex = redirectionIndex; @@ -741,32 +685,28 @@ public void accept(long updatedSlot, long originalRightValue, byte flag) { final long rightIndex = jsm.getRightIndex(updatedSlot); if (rightIndex == StaticNaturalJoinStateManager.DUPLICATE_RIGHT_VALUE) { - throw new IllegalStateException( - "Duplicate right key for " + jsm.keyString(updatedSlot)); + throw new IllegalStateException("Duplicate right key for " + jsm.keyString(updatedSlot)); } final boolean unchangedRedirection = rightIndex == originalRightValue; - // if we have no right columns that have changed, and our redirection is identical we - // can quit here + // if we have no right columns that have changed, and our redirection is identical we can quit here if (unchangedRedirection && !rightAddedColumnsChanged - && (flag & NaturalJoinModifiedSlotTracker.FLAG_RIGHT_ADD) == 0) { + && (flag & NaturalJoinModifiedSlotTracker.FLAG_RIGHT_ADD) == 0) { return; } - final byte notShift = (~NaturalJoinModifiedSlotTracker.FLAG_RIGHT_SHIFT) - & NaturalJoinModifiedSlotTracker.FLAG_MASK; + final byte notShift = + (~NaturalJoinModifiedSlotTracker.FLAG_RIGHT_SHIFT) & NaturalJoinModifiedSlotTracker.FLAG_MASK; if ((flag & notShift) != 0) { - // we do not want to mark the state as modified if the only thing that changed was a - // shift + // we do not want to mark the state as modified if the only thing that changed was a shift // otherwise we know the left side is modified modifiedLeftBuilder.addIndex(leftIndices); } // but we might not need to update the redirection index - if (unchangedRedirection - && (flag & NaturalJoinModifiedSlotTracker.FLAG_RIGHT_ADD) == 0) { + if (unchangedRedirection && (flag & NaturalJoinModifiedSlotTracker.FLAG_RIGHT_ADD) == 0) { return; } @@ -798,20 +738,19 @@ private static class ChunkedMergedJoinListener extends MergedListener { private ChunkedMergedJoinListener(QueryTable leftTable, - QueryTable rightTable, - ColumnSource[] leftSources, - ColumnSource[] rightSources, - MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, - JoinListenerRecorder leftRecorder, - JoinListenerRecorder rightRecorder, - QueryTable result, - RedirectionIndex redirectionIndex, - IncrementalChunkedNaturalJoinStateManager jsm, - boolean exactMatch, - String listenerDescription) { - super(Arrays.asList(leftRecorder, rightRecorder), Collections.emptyList(), - listenerDescription, result); + QueryTable rightTable, + ColumnSource[] leftSources, + ColumnSource[] rightSources, + MatchPair[] columnsToMatch, + MatchPair[] columnsToAdd, + JoinListenerRecorder leftRecorder, + JoinListenerRecorder rightRecorder, + QueryTable result, + RedirectionIndex redirectionIndex, + IncrementalChunkedNaturalJoinStateManager jsm, + boolean exactMatch, + String listenerDescription) { + super(Arrays.asList(leftRecorder, rightRecorder), Collections.emptyList(), listenerDescription, result); this.leftSources = leftSources; this.rightSources = rightSources; this.leftRecorder = leftRecorder; @@ -820,14 +759,12 @@ private ChunkedMergedJoinListener(QueryTable leftTable, this.jsm = jsm; this.exactMatch = exactMatch; - rightKeyColumns = - rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); - leftKeyColumns = - leftTable.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToMatch)); + rightKeyColumns = rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); + leftKeyColumns = leftTable.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToMatch)); allRightColumns = result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); - leftTransformer = leftTable.newModifiedColumnSetTransformer(result, leftTable - .getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + leftTransformer = leftTable.newModifiedColumnSetTransformer(result, + leftTable.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); rightTransformer = rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); modifiedSlotTracker = new NaturalJoinModifiedSlotTracker(); } @@ -846,20 +783,18 @@ protected void process() { final Index rightModified = rightRecorder.getModified(); final ModifiedColumnSet rightModifiedColumns = rightRecorder.getModifiedColumnSet(); final boolean rightKeysModified = - rightModified.nonempty() && rightModifiedColumns.containsAny(rightKeyColumns); + rightModified.nonempty() && rightModifiedColumns.containsAny(rightKeyColumns); - final long probeSize = UpdateSizeCalculator.chunkSize( - Math.max(rightRemoved.size(), rightModified.size()), rightRecorder.getShifted(), - IncrementalChunkedNaturalJoinStateManager.CHUNK_SIZE); - final long buildSize = - Math.max(rightAdded.size(), rightKeysModified ? rightModified.size() : 0); + final long probeSize = + UpdateSizeCalculator.chunkSize(Math.max(rightRemoved.size(), rightModified.size()), + rightRecorder.getShifted(), IncrementalChunkedNaturalJoinStateManager.CHUNK_SIZE); + final long buildSize = Math.max(rightAdded.size(), rightKeysModified ? rightModified.size() : 0); // process right updates - try ( - final IncrementalChunkedNaturalJoinStateManager.ProbeContext pc = + try (final IncrementalChunkedNaturalJoinStateManager.ProbeContext pc = probeSize == 0 ? null : jsm.makeProbeContext(rightSources, probeSize); - final IncrementalChunkedNaturalJoinStateManager.BuildContext bc = - buildSize == 0 ? null : jsm.makeBuildContext(rightSources, buildSize)) { + final IncrementalChunkedNaturalJoinStateManager.BuildContext bc = + buildSize == 0 ? null : jsm.makeBuildContext(rightSources, buildSize)) { final Index modifiedPreShift; final IndexShiftData rightShifted = rightRecorder.getShifted(); @@ -878,16 +813,16 @@ protected void process() { addedRightColumnsChanged = result.modifiedColumnSet.size() > 0; if (rightKeysModified) { - // It should make us somewhat sad that we have to add/remove, because we are - // doing two hash lookups for keys that have not actually changed. - // The alternative would be to do an initial pass that would filter out key - // columns that have not actually changed. + // It should make us somewhat sad that we have to add/remove, because we are doing two hash + // lookups for keys that have not actually changed. + // The alternative would be to do an initial pass that would filter out key columns that have + // not actually changed. jsm.removeRight(pc, modifiedPreShift, rightSources, modifiedSlotTracker); } if (rightShifted.nonempty()) { final Index previousToShift = - rightRecorder.getParent().getIndex().getPrevIndex().minus(rightRemoved); + rightRecorder.getParent().getIndex().getPrevIndex().minus(rightRemoved); if (rightKeysModified) { previousToShift.remove(modifiedPreShift); @@ -896,11 +831,9 @@ protected void process() { final IndexShiftData.Iterator sit = rightShifted.applyIterator(); while (sit.hasNext()) { sit.next(); - final Index shiftedIndex = - previousToShift.subindexByKey(sit.beginRange(), sit.endRange()) + final Index shiftedIndex = previousToShift.subindexByKey(sit.beginRange(), sit.endRange()) .shift(sit.shiftDelta()); - jsm.applyRightShift(pc, rightSources, shiftedIndex, sit.shiftDelta(), - modifiedSlotTracker); + jsm.applyRightShift(pc, rightSources, shiftedIndex, sit.shiftDelta(), modifiedSlotTracker); } } @@ -927,25 +860,22 @@ protected void process() { final ModifiedColumnSet leftModifiedColumns = leftRecorder.getModifiedColumnSet(); final boolean leftAdditions = leftAdded.nonempty(); final boolean leftKeyModifications = - leftModified.nonempty() && leftModifiedColumns.containsAny(leftKeyColumns); + leftModified.nonempty() && leftModifiedColumns.containsAny(leftKeyColumns); final boolean newLeftRedirections = leftAdditions || leftKeyModifications; - final long buildSize = - Math.max(leftAdded.size(), leftKeyModifications ? leftModified.size() : 0); + final long buildSize = Math.max(leftAdded.size(), leftKeyModifications ? leftModified.size() : 0); final long probeSize = UpdateSizeCalculator.chunkSize( - Math.max(leftRemoved.size(), leftKeyModifications ? leftModified.size() : 0), - leftShifted, IncrementalChunkedNaturalJoinStateManager.CHUNK_SIZE); + Math.max(leftRemoved.size(), leftKeyModifications ? leftModified.size() : 0), leftShifted, + IncrementalChunkedNaturalJoinStateManager.CHUNK_SIZE); - final LongArraySource leftRedirections = - newLeftRedirections ? new LongArraySource() : null; + final LongArraySource leftRedirections = newLeftRedirections ? new LongArraySource() : null; if (leftRedirections != null) { leftRedirections.ensureCapacity(buildSize); } - try ( - final IncrementalChunkedNaturalJoinStateManager.ProbeContext pc = + try (final IncrementalChunkedNaturalJoinStateManager.ProbeContext pc = probeSize == 0 ? null : jsm.makeProbeContext(leftSources, probeSize); - final IncrementalChunkedNaturalJoinStateManager.BuildContext bc = - buildSize == 0 ? null : jsm.makeBuildContext(leftSources, buildSize)) { + final IncrementalChunkedNaturalJoinStateManager.BuildContext bc = + buildSize == 0 ? null : jsm.makeBuildContext(leftSources, buildSize)) { leftRemoved.forAllLongs(redirectionIndex::removeVoid); jsm.removeLeft(pc, leftRemoved, leftSources); @@ -966,8 +896,7 @@ protected void process() { } if (leftShifted.nonempty()) { - try (final Index prevIndex = - leftRecorder.getParent().getIndex().getPrevIndex()) { + try (final Index prevIndex = leftRecorder.getParent().getIndex().getPrevIndex()) { prevIndex.remove(leftRemoved); if (leftKeyModifications) { @@ -978,11 +907,9 @@ protected void process() { final IndexShiftData.Iterator sit = leftShifted.applyIterator(); while (sit.hasNext()) { sit.next(); - try (final Index shiftedIndex = - prevIndex.subindexByKey(sit.beginRange(), sit.endRange()) - .shift(sit.shiftDelta())) { - jsm.applyLeftShift(pc, leftSources, shiftedIndex, - sit.shiftDelta()); + try (final Index shiftedIndex = prevIndex + .subindexByKey(sit.beginRange(), sit.endRange()).shift(sit.shiftDelta())) { + jsm.applyLeftShift(pc, leftSources, shiftedIndex, sit.shiftDelta()); } } @@ -992,31 +919,26 @@ protected void process() { if (leftKeyModifications) { // add post-shift modified - jsm.addLeftSide(bc, leftModified, leftSources, leftRedirections, - modifiedSlotTracker); + jsm.addLeftSide(bc, leftModified, leftSources, leftRedirections, modifiedSlotTracker); copyRedirections(leftModified, leftRedirections); - // TODO: This column mask could be made better if we were to keep more - // careful track of the original left hash slots during removal. - // We are almost able to fix this, because we know the hash slot and the - // result redirection for the left modified row; which is the new value. - // We could get the hash slot from the removal, and compare them, but the - // hash slot outside of a modified slot tracker is unstable [and we don’t - // want two of them]. - // On removal, we could ask our modified slot tracker if, (i) our cookie is - // valid, and if so (ii) what the original right value was what the right - // value was - // [presuming we add that for right side point 1]. This would let us report - // our original redirection index as part of the jsm.removeLeft. We could - // then compare - // the old redirections to the new redirections, only lighting up - // allRightColumns if there was indeed a change. + // TODO: This column mask could be made better if we were to keep more careful track of the + // original left hash slots during removal. + // We are almost able to fix this, because we know the hash slot and the result redirection for + // the left modified row; which is the new value. + // We could get the hash slot from the removal, and compare them, but the hash slot outside of a + // modified slot tracker is unstable [and we don’t want two of them]. + // On removal, we could ask our modified slot tracker if, (i) our cookie is valid, and if so + // (ii) what the original right value was what the right value was + // [presuming we add that for right side point 1]. This would let us report our original + // redirection index as part of the jsm.removeLeft. We could then compare + // the old redirections to the new redirections, only lighting up allRightColumns if there was + // indeed a change. result.modifiedColumnSet.setAll(allRightColumns); } if (leftAdditions) { - jsm.addLeftSide(bc, leftAdded, leftSources, leftRedirections, - modifiedSlotTracker); + jsm.addLeftSide(bc, leftAdded, leftSources, leftRedirections, modifiedSlotTracker); copyRedirections(leftAdded, leftRedirections); } } @@ -1027,8 +949,8 @@ protected void process() { modifiedLeftBuilder.addIndex(leftModified); } - final ModifiedSlotUpdater slotUpdater = new ModifiedSlotUpdater(jsm, - modifiedLeftBuilder, redirectionIndex, exactMatch, addedRightColumnsChanged); + final ModifiedSlotUpdater slotUpdater = new ModifiedSlotUpdater(jsm, modifiedLeftBuilder, redirectionIndex, + exactMatch, addedRightColumnsChanged); modifiedSlotTracker.forAllModifiedSlots(slotUpdater); if (slotUpdater.changedRedirection) { result.modifiedColumnSet.setAll(allRightColumns); @@ -1038,13 +960,11 @@ protected void process() { modifiedLeft.retain(result.getIndex()); modifiedLeft.remove(leftRecorder.getAdded()); - result.notifyListeners( - new ShiftAwareListener.Update(leftAdded.clone(), leftRemoved.clone(), modifiedLeft, + result.notifyListeners(new ShiftAwareListener.Update(leftAdded.clone(), leftRemoved.clone(), modifiedLeft, leftShifted, result.modifiedColumnSet)); } - private void copyRedirections(final Index leftRows, - @NotNull final LongArraySource leftRedirections) { + private void copyRedirections(final Index leftRows, @NotNull final LongArraySource leftRedirections) { final MutableInt position = new MutableInt(0); leftRows.forAllLongs((long ll) -> { final long rightKey = leftRedirections.getLong(position.intValue()); diff --git a/DB/src/main/java/io/deephaven/db/v2/NaturalJoinModifiedSlotTracker.java b/DB/src/main/java/io/deephaven/db/v2/NaturalJoinModifiedSlotTracker.java index a67392cc1b5..d141cdeb748 100644 --- a/DB/src/main/java/io/deephaven/db/v2/NaturalJoinModifiedSlotTracker.java +++ b/DB/src/main/java/io/deephaven/db/v2/NaturalJoinModifiedSlotTracker.java @@ -5,11 +5,10 @@ /** * A tracker for modified join hash table slots. * - * After adding an entry, you get back a cookie, which must be passed in on future modification - * operations for that slot. + * After adding an entry, you get back a cookie, which must be passed in on future modification operations for that + * slot. * - * To process the entries after modifications are complete, call - * {@link #forAllModifiedSlots(ModifiedSlotConsumer)}. + * To process the entries after modifications are complete, call {@link #forAllModifiedSlots(ModifiedSlotConsumer)}. */ class NaturalJoinModifiedSlotTracker { private static final int CHUNK_SIZE = 4096; @@ -17,15 +16,13 @@ class NaturalJoinModifiedSlotTracker { /** the original right values, parallel to modifiedSlots. */ private final LongArraySource originalRightValues = new LongArraySource(); /** - * the location that we must write to in modified slots; also if we have a pointer that falls - * outside the range [0, pointer); then we know it is invalid + * the location that we must write to in modified slots; also if we have a pointer that falls outside the range [0, + * pointer); then we know it is invalid */ private long pointer; /** how many slots we have allocated */ private long allocated; - /** - * Each time we clear, we add an offset to our cookies, this prevents us from reading old values - */ + /** Each time we clear, we add an offset to our cookies, this prevents us from reading old values */ private long cookieGeneration; private static final int FLAG_SHIFT = 16; @@ -47,13 +44,12 @@ void clear() { } /** - * Is this cookie within our valid range (greater than or equal to our generation, but less than - * the pointer after adjustment? + * Is this cookie within our valid range (greater than or equal to our generation, but less than the pointer after + * adjustment? * * @param cookie the cookie to check for validity * - * @return true if the cookie is from the current generation, and references a valid slot in our - * table + * @return true if the cookie is from the current generation, and references a valid slot in our table */ private boolean isValidCookie(long cookie) { return cookie >= cookieGeneration && getPointerFromCookie(cookie) < pointer; @@ -83,8 +79,8 @@ private long getPointerFromCookie(long cookie) { * Add a slot in the main table. * * @param slot the slot to add. - * @param originalRightValue if we are the addition of the slot, what the right value was before - * our modification (otherwise ignored) + * @param originalRightValue if we are the addition of the slot, what the right value was before our modification + * (otherwise ignored) * @param flags the flags to or into our state * * @return the cookie for future access @@ -105,13 +101,12 @@ long addMain(final long cookie, final long slot, final long originalRightValue, * Add a slot in the overflow table. * * @param overflow the slot to add (0...n in the overflow table). - * @param originalRightValue if we are the addition of the slot, what the right value was before - * our modification (otherwise ignored) + * @param originalRightValue if we are the addition of the slot, what the right value was before our modification + * (otherwise ignored) * * @return the cookie for future access */ - long addOverflow(final long cookie, final long overflow, final long originalRightValue, - byte flags) { + long addOverflow(final long cookie, final long overflow, final long originalRightValue, byte flags) { final long slot = IncrementalChunkedNaturalJoinStateManager.overflowToSlot(overflow); if (originalRightValue < 0) { flags |= FLAG_RIGHT_ADD; @@ -144,8 +139,8 @@ private long updateFlags(final long cookie, byte flags) { /** * For each main and overflow value, call slotConsumer. * - * Main values are represented as values >= 0. Overflow values are represented as negative - * values according to {@link IncrementalChunkedNaturalJoinStateManager#overflowToSlot(long)}. + * Main values are represented as values >= 0. Overflow values are represented as negative values according to + * {@link IncrementalChunkedNaturalJoinStateManager#overflowToSlot(long)}. * * @param slotConsumer the consumer of our values */ @@ -164,8 +159,7 @@ void forAllModifiedSlots(ModifiedSlotConsumer slotConsumer) { * @param oldTableLocation the old hash slot * @param newTableLocation the new hash slot */ - void moveTableLocation(long cookie, @SuppressWarnings("unused") long oldTableLocation, - long newTableLocation) { + void moveTableLocation(long cookie, @SuppressWarnings("unused") long oldTableLocation, long newTableLocation) { if (isValidCookie(cookie)) { final long pointer = getPointerFromCookie(cookie); final long existingSlotAndFlag = modifiedSlots.getLong(pointer); @@ -181,8 +175,7 @@ void moveTableLocation(long cookie, @SuppressWarnings("unused") long oldTableLoc * @param overflowLocation the old overflow location * @param tableLocation the new table location */ - void promoteFromOverflow(long cookie, @SuppressWarnings("unused") long overflowLocation, - long tableLocation) { + void promoteFromOverflow(long cookie, @SuppressWarnings("unused") long overflowLocation, long tableLocation) { if (isValidCookie(cookie)) { final long pointer = getPointerFromCookie(cookie); final long existingSlotAndFlag = modifiedSlots.getLong(pointer); diff --git a/DB/src/main/java/io/deephaven/db/v2/NoSuchColumnException.java b/DB/src/main/java/io/deephaven/db/v2/NoSuchColumnException.java index 8ecb5464d6a..8aa3fd8d36f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/NoSuchColumnException.java +++ b/DB/src/main/java/io/deephaven/db/v2/NoSuchColumnException.java @@ -15,11 +15,9 @@ public class NoSuchColumnException extends IllegalArgumentException { * @param presentColumns the column names present in the table * @param requestedColumns the request column names that were not found */ - public NoSuchColumnException(Collection presentColumns, - Collection requestedColumns) { + public NoSuchColumnException(Collection presentColumns, Collection requestedColumns) { super("Unknown column names [" + StringUtils.joinStrings(requestedColumns, ",") - + "], available column names are [" + StringUtils.joinStrings(presentColumns, ",") - + "]"); + + "], available column names are [" + StringUtils.joinStrings(presentColumns, ",") + "]"); } /** diff --git a/DB/src/main/java/io/deephaven/db/v2/NotificationStepReceiver.java b/DB/src/main/java/io/deephaven/db/v2/NotificationStepReceiver.java index beed5d96941..ecc29fdf736 100644 --- a/DB/src/main/java/io/deephaven/db/v2/NotificationStepReceiver.java +++ b/DB/src/main/java/io/deephaven/db/v2/NotificationStepReceiver.java @@ -1,8 +1,7 @@ package io.deephaven.db.v2; /** - * Used by {@link SwapListenerBase swap listeners} to set the notification step of elements in our - * DAG. + * Used by {@link SwapListenerBase swap listeners} to set the notification step of elements in our DAG. */ public interface NotificationStepReceiver { diff --git a/DB/src/main/java/io/deephaven/db/v2/PartitionAwareSourceTable.java b/DB/src/main/java/io/deephaven/db/v2/PartitionAwareSourceTable.java index 23557870b39..1e6269653fa 100644 --- a/DB/src/main/java/io/deephaven/db/v2/PartitionAwareSourceTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/PartitionAwareSourceTable.java @@ -30,8 +30,8 @@ import java.util.stream.StreamSupport; /** - * A source table that can filter partitions before coalescing. Refer to {@link TableLocationKey} - * for an explanation of partitioning. + * A source table that can filter partitions before coalescing. Refer to {@link TableLocationKey} for an explanation of + * partitioning. */ public class PartitionAwareSourceTable extends SourceTable { @@ -42,70 +42,65 @@ public class PartitionAwareSourceTable extends SourceTable { * @param tableDefinition A TableDefinition * @param description A human-readable description for this table * @param componentFactory A component factory for creating column source managers - * @param locationProvider A TableLocationProvider, for use in discovering the locations that - * compose this table - * @param liveTableRegistrar Callback for registering live tables for refreshes, null if this - * table is not live + * @param locationProvider A TableLocationProvider, for use in discovering the locations that compose this table + * @param liveTableRegistrar Callback for registering live tables for refreshes, null if this table is not live */ public PartitionAwareSourceTable(@NotNull final TableDefinition tableDefinition, - @NotNull final String description, - @NotNull final SourceTableComponentFactory componentFactory, - @NotNull final TableLocationProvider locationProvider, - @Nullable final LiveTableRegistrar liveTableRegistrar) { + @NotNull final String description, + @NotNull final SourceTableComponentFactory componentFactory, + @NotNull final TableLocationProvider locationProvider, + @Nullable final LiveTableRegistrar liveTableRegistrar) { this(tableDefinition, - description, - componentFactory, - locationProvider, - liveTableRegistrar, - extractPartitioningColumnDefinitions(tableDefinition)); + description, + componentFactory, + locationProvider, + liveTableRegistrar, + extractPartitioningColumnDefinitions(tableDefinition)); } PartitionAwareSourceTable(@NotNull final TableDefinition tableDefinition, - @NotNull final String description, - @NotNull final SourceTableComponentFactory componentFactory, - @NotNull final TableLocationProvider locationProvider, - @Nullable final LiveTableRegistrar liveTableRegistrar, - @NotNull final Map partitioningColumnDefinitions, - @Nullable final SelectFilter... partitioningColumnFilters) { + @NotNull final String description, + @NotNull final SourceTableComponentFactory componentFactory, + @NotNull final TableLocationProvider locationProvider, + @Nullable final LiveTableRegistrar liveTableRegistrar, + @NotNull final Map partitioningColumnDefinitions, + @Nullable final SelectFilter... partitioningColumnFilters) { super(tableDefinition, description, componentFactory, locationProvider, liveTableRegistrar); this.partitioningColumnDefinitions = partitioningColumnDefinitions; this.partitioningColumnFilters = partitioningColumnFilters; } protected PartitionAwareSourceTable newInstance(@NotNull final TableDefinition tableDefinition, - @NotNull final String description, - @NotNull final SourceTableComponentFactory componentFactory, - @NotNull final TableLocationProvider locationProvider, - @Nullable final LiveTableRegistrar liveTableRegistrar, - @NotNull final Map partitioningColumnDefinitions, - @Nullable final SelectFilter... partitioningColumnFilters) { - return new PartitionAwareSourceTable(tableDefinition, description, componentFactory, - locationProvider, liveTableRegistrar, partitioningColumnDefinitions, - partitioningColumnFilters); + @NotNull final String description, + @NotNull final SourceTableComponentFactory componentFactory, + @NotNull final TableLocationProvider locationProvider, + @Nullable final LiveTableRegistrar liveTableRegistrar, + @NotNull final Map partitioningColumnDefinitions, + @Nullable final SelectFilter... partitioningColumnFilters) { + return new PartitionAwareSourceTable(tableDefinition, description, componentFactory, locationProvider, + liveTableRegistrar, partitioningColumnDefinitions, partitioningColumnFilters); } private PartitionAwareSourceTable getFilteredTable( - @NotNull final SelectFilter... additionalPartitioningColumnFilters) { + @NotNull final SelectFilter... additionalPartitioningColumnFilters) { SelectFilter[] resultPartitioningColumnFilters = - new SelectFilter[partitioningColumnFilters.length - + additionalPartitioningColumnFilters.length]; + new SelectFilter[partitioningColumnFilters.length + additionalPartitioningColumnFilters.length]; System.arraycopy(partitioningColumnFilters, 0, resultPartitioningColumnFilters, 0, - partitioningColumnFilters.length); + partitioningColumnFilters.length); System.arraycopy(additionalPartitioningColumnFilters, 0, resultPartitioningColumnFilters, - partitioningColumnFilters.length, additionalPartitioningColumnFilters.length); + partitioningColumnFilters.length, additionalPartitioningColumnFilters.length); return newInstance(definition, - description + ".where(" + Arrays.deepToString(additionalPartitioningColumnFilters) - + ')', - componentFactory, locationProvider, liveTableRegistrar, partitioningColumnDefinitions, - resultPartitioningColumnFilters); + description + ".where(" + Arrays.deepToString(additionalPartitioningColumnFilters) + ')', + componentFactory, locationProvider, liveTableRegistrar, partitioningColumnDefinitions, + resultPartitioningColumnFilters); } private static Map extractPartitioningColumnDefinitions( - @NotNull final TableDefinition tableDefinition) { + @NotNull final TableDefinition tableDefinition) { return tableDefinition.getColumnStream() - .filter(ColumnDefinition::isPartitioning) - .collect(Collectors.toMap(ColumnDefinition::getName, Function.identity(), - Assert::neverInvoked, LinkedHashMap::new)); + .filter(ColumnDefinition::isPartitioning) + .collect(Collectors.toMap(ColumnDefinition::getName, Function.identity(), Assert::neverInvoked, + LinkedHashMap::new)); } private static class PartitionAwareQueryTableReference extends QueryTableReference { @@ -122,18 +117,18 @@ public TableAndRemainingFilters getWithWhere(SelectFilter... selectFilters) { List groupingColumns = table.getDefinition().getGroupingColumns(); Set groupingColumnNames = - groupingColumns.stream().map(ColumnDefinition::getName).collect(Collectors.toSet()); + groupingColumns.stream().map(ColumnDefinition::getName).collect(Collectors.toSet()); for (SelectFilter filter : selectFilters) { filter.init(table.definition); List columns = filter.getColumns(); if (filter instanceof ReindexingFilter) { otherFilters.add(filter); - } else if (((PartitionAwareSourceTable) table) - .isValidAgainstColumnPartitionTable(columns, filter.getColumnArrays())) { + } else if (((PartitionAwareSourceTable) table).isValidAgainstColumnPartitionTable(columns, + filter.getColumnArrays())) { partitionFilters.add(filter); } else if (filter.isSimpleFilter() && (columns.size() == 1) - && (groupingColumnNames.contains(columns.get(0)))) { + && (groupingColumnNames.contains(columns.get(0)))) { groupFilters.add(filter); } else { otherFilters.add(filter); @@ -141,22 +136,17 @@ public TableAndRemainingFilters getWithWhere(SelectFilter... selectFilters) { } final Table result = partitionFilters.isEmpty() ? table.coalesce() - : table - .where(partitionFilters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY)); - - // put the other filters onto the end of the grouping filters, this means that the group - // filters should - // go first, which should be preferable to having them second. This is basically the - // first query - // optimization that we're doing for the user, so maybe it is a good thing but maybe - // not. The reason we do - // it, is that we have deferred the filters for the users permissions, and they did not - // have the opportunity + : table.where(partitionFilters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY)); + + // put the other filters onto the end of the grouping filters, this means that the group filters should + // go first, which should be preferable to having them second. This is basically the first query + // optimization that we're doing for the user, so maybe it is a good thing but maybe not. The reason we do + // it, is that we have deferred the filters for the users permissions, and they did not have the opportunity // to properly filter the data yet at this point. groupFilters.addAll(otherFilters); return new TableAndRemainingFilters(result, - groupFilters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY)); + groupFilters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY)); } @Override @@ -167,8 +157,8 @@ public Table selectDistinct(SelectColumn[] selectColumns) { } catch (Exception e) { return null; } - if (!((PartitionAwareSourceTable) table).isValidAgainstColumnPartitionTable( - selectColumn.getColumns(), selectColumn.getColumnArrays())) { + if (!((PartitionAwareSourceTable) table).isValidAgainstColumnPartitionTable(selectColumn.getColumns(), + selectColumn.getColumnArrays())) { return null; } } @@ -183,101 +173,89 @@ protected final BaseTable redefine(@NotNull final TableDefinition newDefinition) // Nothing changed - we have the same columns in the same order. return this; } - if (newDefinition.getColumns().length == definition.getColumns().length || newDefinition - .getPartitioningColumns().size() == partitioningColumnDefinitions.size()) { - // Nothing changed except ordering, *or* some columns were dropped but the partitioning - // column was retained. + if (newDefinition.getColumns().length == definition.getColumns().length + || newDefinition.getPartitioningColumns().size() == partitioningColumnDefinitions.size()) { + // Nothing changed except ordering, *or* some columns were dropped but the partitioning column was retained. return newInstance(newDefinition, - description + "-retainColumns", - componentFactory, locationProvider, liveTableRegistrar, - partitioningColumnDefinitions, partitioningColumnFilters); + description + "-retainColumns", + componentFactory, locationProvider, liveTableRegistrar, partitioningColumnDefinitions, + partitioningColumnFilters); } // Some partitioning columns are gone - defer dropping them. - final List newColumnDefinitions = - new ArrayList<>(newDefinition.getColumnList()); + final List newColumnDefinitions = new ArrayList<>(newDefinition.getColumnList()); final Map retainedPartitioningColumnDefinitions = - extractPartitioningColumnDefinitions(newDefinition); - final Collection droppedPartitioningColumnDefinitions = - partitioningColumnDefinitions.values().stream() - .filter(cd -> !retainedPartitioningColumnDefinitions.containsKey(cd.getName())) + extractPartitioningColumnDefinitions(newDefinition); + final Collection droppedPartitioningColumnDefinitions = partitioningColumnDefinitions.values() + .stream().filter(cd -> !retainedPartitioningColumnDefinitions.containsKey(cd.getName())) .collect(Collectors.toList()); newColumnDefinitions.addAll(droppedPartitioningColumnDefinitions); - final PartitionAwareSourceTable redefined = - newInstance(new TableDefinition(newColumnDefinitions), + final PartitionAwareSourceTable redefined = newInstance(new TableDefinition(newColumnDefinitions), description + "-retainColumns", - componentFactory, locationProvider, liveTableRegistrar, - partitioningColumnDefinitions, partitioningColumnFilters); - final DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinition, - description + "-retainColumns", new PartitionAwareQueryTableReference(redefined), - droppedPartitioningColumnDefinitions.stream().map(ColumnDefinition::getName) - .toArray(String[]::new), - null, null); + componentFactory, locationProvider, liveTableRegistrar, partitioningColumnDefinitions, + partitioningColumnFilters); + final DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinition, description + "-retainColumns", + new PartitionAwareQueryTableReference(redefined), + droppedPartitioningColumnDefinitions.stream().map(ColumnDefinition::getName).toArray(String[]::new), + null, null); deferredViewTable.setRefreshing(isRefreshing()); return deferredViewTable; } @Override - protected final Table redefine(TableDefinition newDefinitionExternal, - TableDefinition newDefinitionInternal, SelectColumn[] viewColumns, - Map> columnDependency) { + protected final Table redefine(TableDefinition newDefinitionExternal, TableDefinition newDefinitionInternal, + SelectColumn[] viewColumns, Map> columnDependency) { BaseTable redefined = redefine(newDefinitionInternal); DeferredViewTable.TableReference reference = redefined instanceof PartitionAwareSourceTable - ? new PartitionAwareQueryTableReference((PartitionAwareSourceTable) redefined) - : new DeferredViewTable.SimpleTableReference(redefined); - DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinitionExternal, - description + "-redefined", reference, null, viewColumns, null); + ? new PartitionAwareQueryTableReference((PartitionAwareSourceTable) redefined) + : new DeferredViewTable.SimpleTableReference(redefined); + DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinitionExternal, description + "-redefined", + reference, null, viewColumns, null); deferredViewTable.setRefreshing(isRefreshing()); return deferredViewTable; } - private static final String LOCATION_KEY_COLUMN_NAME = - "__PartitionAwareSourceTable_TableLocationKey__"; + private static final String LOCATION_KEY_COLUMN_NAME = "__PartitionAwareSourceTable_TableLocationKey__"; @SuppressWarnings("unchecked") - private static ColumnSource makePartitionSource( - @NotNull final ColumnDefinition columnDefinition, - @NotNull final Collection locationKeys) { + private static ColumnSource makePartitionSource(@NotNull final ColumnDefinition columnDefinition, + @NotNull final Collection locationKeys) { final Class dataType = columnDefinition.getDataType(); final String partitionKey = columnDefinition.getName(); final WritableSource result = - ArrayBackedColumnSource.getMemoryColumnSource(locationKeys.size(), dataType, null); + ArrayBackedColumnSource.getMemoryColumnSource(locationKeys.size(), dataType, null); final MutableLong nextIndex = new MutableLong(0L); locationKeys.stream() - .map(lk -> (T) lk.getPartitionValue(partitionKey)) - .forEach((final T partitionValue) -> result.set(nextIndex.getAndIncrement(), - partitionValue)); + .map(lk -> (T) lk.getPartitionValue(partitionKey)) + .forEach((final T partitionValue) -> result.set(nextIndex.getAndIncrement(), partitionValue)); return result; } @Override protected final Collection filterLocationKeys( - @NotNull final Collection foundLocationKeys) { + @NotNull final Collection foundLocationKeys) { if (partitioningColumnFilters.length == 0) { return foundLocationKeys; } - // TODO (https://github.com/deephaven/deephaven-core/issues/867): Refactor around a ticking - // partition table + // TODO (https://github.com/deephaven/deephaven-core/issues/867): Refactor around a ticking partition table final List partitionTableColumnNames = Stream.concat( - partitioningColumnDefinitions.keySet().stream(), - Stream.of(LOCATION_KEY_COLUMN_NAME)).collect(Collectors.toList()); + partitioningColumnDefinitions.keySet().stream(), + Stream.of(LOCATION_KEY_COLUMN_NAME)).collect(Collectors.toList()); final List partitionTableColumnSources = - new ArrayList<>(partitioningColumnDefinitions.size() + 1); + new ArrayList<>(partitioningColumnDefinitions.size() + 1); for (final ColumnDefinition columnDefinition : partitioningColumnDefinitions.values()) { // noinspection unchecked - partitionTableColumnSources - .add(makePartitionSource(columnDefinition, foundLocationKeys)); + partitionTableColumnSources.add(makePartitionSource(columnDefinition, foundLocationKeys)); } - partitionTableColumnSources.add(ArrayBackedColumnSource - .getMemoryColumnSource(foundLocationKeys, ImmutableTableLocationKey.class, null)); + partitionTableColumnSources.add(ArrayBackedColumnSource.getMemoryColumnSource(foundLocationKeys, + ImmutableTableLocationKey.class, null)); final Table filteredColumnPartitionTable = TableTools - .newTable(foundLocationKeys.size(), partitionTableColumnNames, - partitionTableColumnSources) - .where(partitioningColumnFilters); + .newTable(foundLocationKeys.size(), partitionTableColumnNames, partitionTableColumnSources) + .where(partitioningColumnFilters); if (filteredColumnPartitionTable.size() == foundLocationKeys.size()) { return foundLocationKeys; } final Iterable iterable = - () -> filteredColumnPartitionTable.columnIterator(LOCATION_KEY_COLUMN_NAME); + () -> filteredColumnPartitionTable.columnIterator(LOCATION_KEY_COLUMN_NAME); return StreamSupport.stream(iterable.spliterator(), false).collect(Collectors.toList()); } @@ -293,7 +271,7 @@ public final Table where(SelectFilter... filters) { List groupingColumns = definition.getGroupingColumns(); Set groupingColumnNames = - groupingColumns.stream().map(ColumnDefinition::getName).collect(Collectors.toSet()); + groupingColumns.stream().map(ColumnDefinition::getName).collect(Collectors.toSet()); for (SelectFilter filter : filters) { filter.init(definition); @@ -303,30 +281,27 @@ public final Table where(SelectFilter... filters) { } else if (isValidAgainstColumnPartitionTable(columns, filter.getColumnArrays())) { partitionFilters.add(filter); } else if (filter.isSimpleFilter() && (columns.size() == 1) - && (groupingColumnNames.contains(columns.get(0)))) { + && (groupingColumnNames.contains(columns.get(0)))) { groupFilters.add(filter); } else { otherFilters.add(filter); } } - // if there was nothing that actually required the partition, defer the result. This is - // different than V1, and + // if there was nothing that actually required the partition, defer the result. This is different than V1, and // is actually different than the old behavior as well. if (partitionFilters.isEmpty()) { DeferredViewTable deferredViewTable = - new DeferredViewTable(definition, description + "-withDeferredFilters", - new PartitionAwareQueryTableReference(this), null, null, filters); + new DeferredViewTable(definition, description + "-withDeferredFilters", + new PartitionAwareQueryTableReference(this), null, null, filters); deferredViewTable.setRefreshing(isRefreshing()); return deferredViewTable; } - SelectFilter[] partitionFilterArray = - partitionFilters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY); - final String filteredTableDescription = - "getFilteredTable(" + Arrays.toString(partitionFilterArray) + ")"; + SelectFilter[] partitionFilterArray = partitionFilters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY); + final String filteredTableDescription = "getFilteredTable(" + Arrays.toString(partitionFilterArray) + ")"; SourceTable filteredTable = QueryPerformanceRecorder.withNugget(filteredTableDescription, - () -> getFilteredTable(partitionFilterArray)); + () -> getFilteredTable(partitionFilterArray)); copyAttributes(filteredTable, CopyAttributeOperation.Filter); @@ -334,53 +309,47 @@ public final Table where(SelectFilter... filters) { groupFilters.addAll(otherFilters); if (groupFilters.isEmpty()) { - return QueryPerformanceRecorder.withNugget( - description + filteredTableDescription + ".coalesce()", filteredTable::coalesce); + return QueryPerformanceRecorder.withNugget(description + filteredTableDescription + ".coalesce()", + filteredTable::coalesce); } - return QueryPerformanceRecorder.withNugget( - description + ".coalesce().where(" + groupFilters + ")", () -> filteredTable.coalesce() - .where(groupFilters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY))); + return QueryPerformanceRecorder.withNugget(description + ".coalesce().where(" + groupFilters + ")", + () -> filteredTable.coalesce() + .where(groupFilters.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY))); } @Override public final Table selectDistinct(@NotNull final SelectColumn... columns) { for (SelectColumn selectColumn : columns) { selectColumn.initDef(definition.getColumnNameMap()); - if (!isValidAgainstColumnPartitionTable(selectColumn.getColumns(), - selectColumn.getColumnArrays())) { + if (!isValidAgainstColumnPartitionTable(selectColumn.getColumns(), selectColumn.getColumnArrays())) { return super.selectDistinct(columns); } } initializeAvailableLocations(); final List existingLocationKeys = - columnSourceManager.allLocations().stream().filter(tl -> { - tl.refresh(); - final long size = tl.getSize(); - // noinspection ConditionCoveredByFurtherCondition - return size != TableLocation.NULL_SIZE && size > 0; - }).map(TableLocation::getKey).collect(Collectors.toList()); - final List partitionTableColumnNames = - new ArrayList<>(partitioningColumnDefinitions.keySet()); - final List partitionTableColumnSources = - new ArrayList<>(partitioningColumnDefinitions.size()); + columnSourceManager.allLocations().stream().filter(tl -> { + tl.refresh(); + final long size = tl.getSize(); + // noinspection ConditionCoveredByFurtherCondition + return size != TableLocation.NULL_SIZE && size > 0; + }).map(TableLocation::getKey).collect(Collectors.toList()); + final List partitionTableColumnNames = new ArrayList<>(partitioningColumnDefinitions.keySet()); + final List partitionTableColumnSources = new ArrayList<>(partitioningColumnDefinitions.size()); for (final ColumnDefinition columnDefinition : partitioningColumnDefinitions.values()) { // noinspection unchecked - partitionTableColumnSources - .add(makePartitionSource(columnDefinition, existingLocationKeys)); + partitionTableColumnSources.add(makePartitionSource(columnDefinition, existingLocationKeys)); } return TableTools - .newTable(existingLocationKeys.size(), partitionTableColumnNames, - partitionTableColumnSources) - .selectDistinct(columns); - // TODO (https://github.com/deephaven/deephaven-core/issues/867): Refactor around a ticking - // partition table - // TODO: Maybe just get rid of this implementation and coalesce? Partitioning columns are - // automatically grouped. Needs lazy region allocation. + .newTable(existingLocationKeys.size(), partitionTableColumnNames, partitionTableColumnSources) + .selectDistinct(columns); + // TODO (https://github.com/deephaven/deephaven-core/issues/867): Refactor around a ticking partition table + // TODO: Maybe just get rid of this implementation and coalesce? Partitioning columns are automatically grouped. + // Needs lazy region allocation. } private boolean isValidAgainstColumnPartitionTable(@NotNull final List columnNames, - @NotNull final List columnArrayNames) { + @NotNull final List columnArrayNames) { if (columnArrayNames.size() > 0) { return false; } diff --git a/DB/src/main/java/io/deephaven/db/v2/QueryTable.java b/DB/src/main/java/io/deephaven/db/v2/QueryTable.java index b452a2b0c4f..e50b84742f0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/QueryTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/QueryTable.java @@ -77,14 +77,14 @@ public Result(final @NotNull T resultNode) { } /** - * Construct the result of an operation. The listener may be null if the parent is - * non-ticking and the table does not need to respond to ticks from other sources. + * Construct the result of an operation. The listener may be null if the parent is non-ticking and the table + * does not need to respond to ticks from other sources. * * @param resultNode the result of the operation * @param resultListener the listener that should be attached to the parent (or null) */ public Result(final @NotNull T resultNode, - final @Nullable ShiftAwareListener resultListener) { + final @Nullable ShiftAwareListener resultListener) { this.resultNode = resultNode; this.resultListener = resultListener; } @@ -108,16 +108,14 @@ default ShiftAwareSwapListener newSwapListener(final QueryTable queryTable) { * Initialize this operation. * * @param usePrev data from the previous cycle should be used (otherwise use this cycle) - * @param beforeClock the clock value that we captured before the function began; the - * function can use this value to bail out early if it notices something has gone - * wrong. + * @param beforeClock the clock value that we captured before the function began; the function can use this + * value to bail out early if it notices something has gone wrong. * @return the result table / listener if successful, null if it should be retried. */ Result initialize(boolean usePrev, long beforeClock); } - public interface MemoizableOperation - extends Operation { + public interface MemoizableOperation extends Operation { /** * @return the key that should be used to memoize off of */ @@ -142,46 +140,45 @@ public interface MemoizableOperation> cachedOperations; @@ -195,11 +192,9 @@ public QueryTable(Index index, Map columns) { * * @param definition the definition to use for this table * @param index the index of the new table - * @param columns the column source map for the table, which will be copied into a new column - * source map + * @param columns the column source map for the table, which will be copied into a new column source map */ - public QueryTable(TableDefinition definition, Index index, - Map columns) { + public QueryTable(TableDefinition definition, Index index, Map columns) { this(definition, Require.neqNull(index, "index"), new LinkedHashMap<>(columns), null); } @@ -209,12 +204,10 @@ public QueryTable(TableDefinition definition, Index index, * @param definition the definition to use for this table * @param index the index of the new table * @param columns the column source map for the table, which is not copied. - * @param modifiedColumnSet optional {@link ModifiedColumnSet} that should be re-used if - * supplied + * @param modifiedColumnSet optional {@link ModifiedColumnSet} that should be re-used if supplied */ - private QueryTable(TableDefinition definition, Index index, - LinkedHashMap columns, - @Nullable ModifiedColumnSet modifiedColumnSet) { + private QueryTable(TableDefinition definition, Index index, LinkedHashMap columns, + @Nullable ModifiedColumnSet modifiedColumnSet) { super(definition, "QueryTable"); // TODO: Better descriptions composed from query chain this.index = index; this.columns = columns; @@ -226,15 +219,14 @@ private QueryTable(TableDefinition definition, Index index, } /** - * Create a new query table with the {@link ColumnDefinition ColumnDefinitions} of - * {@code template}, but in the order of {@code this}. The tables must be mutually compatible, - * as defined via {@link TableDefinition#checkCompatibility(TableDefinition)}. + * Create a new query table with the {@link ColumnDefinition ColumnDefinitions} of {@code template}, but in the + * order of {@code this}. The tables must be mutually compatible, as defined via + * {@link TableDefinition#checkCompatibility(TableDefinition)}. * * @param template the new definition template to use * @return the new query table - * @deprecated this is being used a workaround for testing purposes where previously mutations - * were being used at the {@link ColumnDefinition} level. Do not use this method - * without good reason. + * @deprecated this is being used a workaround for testing purposes where previously mutations were being used at + * the {@link ColumnDefinition} level. Do not use this method without good reason. */ @Deprecated public QueryTable withDefinitionUnsafe(TableDefinition template) { @@ -251,8 +243,7 @@ private void initializeTransientFields() { } } - private void readObject(ObjectInputStream objectInputStream) - throws IOException, ClassNotFoundException { + private void readObject(ObjectInputStream objectInputStream) throws IOException, ClassNotFoundException { objectInputStream.defaultReadObject(); initializeTransientFields(); } @@ -271,8 +262,7 @@ public long size() { public ColumnSource getColumnSource(String sourceName) { final ColumnSource columnSource = columns.get(sourceName); if (columnSource == null) { - throw new NoSuchColumnException(columns.keySet(), - Collections.singletonList(sourceName)); + throw new NoSuchColumnException(columns.keySet(), Collections.singletonList(sourceName)); } return columnSource; } @@ -307,11 +297,10 @@ public ModifiedColumnSet newModifiedColumnSet(final String... columnNames) { } /** - * Producers of tables should use the modified column set embedded within the table for their - * result. + * Producers of tables should use the modified column set embedded within the table for their result. * - * You must not mutate the result of this method if you are not generating the updates for this - * table. Callers should not rely on the dirty state of this modified column set. + * You must not mutate the result of this method if you are not generating the updates for this table. Callers + * should not rely on the dirty state of this modified column set. * * @return the modified column set for this table */ @@ -321,19 +310,18 @@ public ModifiedColumnSet getModifiedColumnSetForUpdates() { @Override public ModifiedColumnSet.Transformer newModifiedColumnSetTransformer(final String[] columnNames, - final ModifiedColumnSet[] columnSets) { + final ModifiedColumnSet[] columnSets) { return modifiedColumnSet.newTransformer(columnNames, columnSets); } @Override public ModifiedColumnSet.Transformer newModifiedColumnSetIdentityTransformer( - final Map newColumns) { + final Map newColumns) { return modifiedColumnSet.newIdentityTransformer(newColumns); } @Override - public ModifiedColumnSet.Transformer newModifiedColumnSetIdentityTransformer( - final DynamicTable other) { + public ModifiedColumnSet.Transformer newModifiedColumnSetIdentityTransformer(final DynamicTable other) { if (other instanceof QueryTable) { return modifiedColumnSet.newIdentityTransformer(((QueryTable) other).columns); } @@ -344,7 +332,7 @@ public ModifiedColumnSet.Transformer newModifiedColumnSetIdentityTransformer( public Object[] getRecord(long rowNo, String... columnNames) { final long key = index.get(rowNo); return (columnNames.length > 0 ? Arrays.stream(columnNames).map(this::getColumnSource) - : columns.values().stream()).map(cs -> cs.get(key)).toArray(Object[]::new); + : columns.values().stream()).map(cs -> cs.get(key)).toArray(Object[]::new); } @Override @@ -353,75 +341,66 @@ public LocalTableMap byExternal(final boolean dropKeys, final String... keyColum throw streamUnsupported("byExternal"); } final SelectColumn[] groupByColumns = - Arrays.stream(keyColumnNames).map(SourceColumn::new).toArray(SelectColumn[]::new); + Arrays.stream(keyColumnNames).map(SourceColumn::new).toArray(SelectColumn[]::new); return memoizeResult(MemoizedOperationKey.byExternal(dropKeys, groupByColumns), - () -> QueryPerformanceRecorder.withNugget( - "byExternal(" + dropKeys + ", " + Arrays.toString(keyColumnNames) + ')', - sizeForInstrumentation(), - () -> ByExternalAggregationFactory.byExternal(this, dropKeys, - (pt, st) -> pt.copyAttributes(st, CopyAttributeOperation.ByExternal), - Collections.emptyList(), groupByColumns))); + () -> QueryPerformanceRecorder.withNugget( + "byExternal(" + dropKeys + ", " + Arrays.toString(keyColumnNames) + ')', + sizeForInstrumentation(), + () -> ByExternalAggregationFactory.byExternal(this, dropKeys, + (pt, st) -> pt.copyAttributes(st, CopyAttributeOperation.ByExternal), + Collections.emptyList(), groupByColumns))); } @Override public Table rollup(ComboAggregateFactory comboAggregateFactory, boolean includeConstituents, - SelectColumn... columns) { + SelectColumn... columns) { if (isStream() && includeConstituents) { throw streamUnsupported("rollup with included constituents"); } - return memoizeResult( - MemoizedOperationKey.rollup(comboAggregateFactory, columns, includeConstituents), - () -> { - final ComboAggregateFactory withRollup = - comboAggregateFactory.forRollup(includeConstituents); - ComboAggregateFactory aggregationStateFactory = withRollup; + return memoizeResult(MemoizedOperationKey.rollup(comboAggregateFactory, columns, includeConstituents), () -> { + final ComboAggregateFactory withRollup = comboAggregateFactory.forRollup(includeConstituents); + ComboAggregateFactory aggregationStateFactory = withRollup; - final QueryTable lowestLevel = byNoMemo(withRollup, columns); - // now we need to reaggregate at each of the levels, combining the results - final List reaggregateColumns = - new ArrayList<>(Arrays.asList(columns)); + final QueryTable lowestLevel = byNoMemo(withRollup, columns); + // now we need to reaggregate at each of the levels, combining the results + final List reaggregateColumns = new ArrayList<>(Arrays.asList(columns)); - final ComboAggregateFactory rollupFactory = withRollup.rollupFactory(); + final ComboAggregateFactory rollupFactory = withRollup.rollupFactory(); - final List nullColumns = new ArrayList<>(reaggregateColumns.size()); + final List nullColumns = new ArrayList<>(reaggregateColumns.size()); - QueryTable lastLevel = lowestLevel; - while (!reaggregateColumns.isEmpty()) { - final SelectColumn removedColumn = - reaggregateColumns.remove(reaggregateColumns.size() - 1); + QueryTable lastLevel = lowestLevel; + while (!reaggregateColumns.isEmpty()) { + final SelectColumn removedColumn = reaggregateColumns.remove(reaggregateColumns.size() - 1); - nullColumns.add(0, removedColumn.getName()); + nullColumns.add(0, removedColumn.getName()); - final Map nullColumnsMap = - new LinkedHashMap<>(nullColumns.size()); - final Table fLastLevel = lastLevel; - nullColumns.forEach(nc -> nullColumnsMap.put(nc, - fLastLevel.getDefinition().getColumn(nc).getDataType())); + final Map nullColumnsMap = new LinkedHashMap<>(nullColumns.size()); + final Table fLastLevel = lastLevel; + nullColumns + .forEach(nc -> nullColumnsMap.put(nc, fLastLevel.getDefinition().getColumn(nc).getDataType())); - aggregationStateFactory = rollupFactory.withNulls(nullColumnsMap); - lastLevel = lastLevel.byNoMemo(aggregationStateFactory, + aggregationStateFactory = rollupFactory.withNulls(nullColumnsMap); + lastLevel = lastLevel.byNoMemo(aggregationStateFactory, reaggregateColumns.toArray(SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY)); - } + } - final String[] rollupsToDrop = lastLevel.getColumnSourceMap().keySet().stream() - .filter(cn -> cn.endsWith(ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX)) - .toArray(String[]::new); - final QueryTable finalTable = (QueryTable) lastLevel.dropColumns(rollupsToDrop); - final Object reverseLookup = Require.neqNull( - lastLevel.getAttribute(REVERSE_LOOKUP_ATTRIBUTE), "REVERSE_LOOKUP_ATTRIBUTE"); - finalTable.setAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE, reverseLookup); - - final Table result = HierarchicalTable.createFrom(finalTable, - new RollupInfo(comboAggregateFactory, columns, - includeConstituents ? RollupInfo.LeafType.Constituent - : RollupInfo.LeafType.Normal)); - result.setAttribute(Table.HIERARCHICAL_SOURCE_TABLE_ATTRIBUTE, QueryTable.this); - copyAttributes(result, CopyAttributeOperation.Rollup); - maybeUpdateSortableColumns(result); + final String[] rollupsToDrop = lastLevel.getColumnSourceMap().keySet().stream() + .filter(cn -> cn.endsWith(ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX)).toArray(String[]::new); + final QueryTable finalTable = (QueryTable) lastLevel.dropColumns(rollupsToDrop); + final Object reverseLookup = + Require.neqNull(lastLevel.getAttribute(REVERSE_LOOKUP_ATTRIBUTE), "REVERSE_LOOKUP_ATTRIBUTE"); + finalTable.setAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE, reverseLookup); - return result; - }); + final Table result = HierarchicalTable.createFrom(finalTable, new RollupInfo(comboAggregateFactory, columns, + includeConstituents ? RollupInfo.LeafType.Constituent : RollupInfo.LeafType.Normal)); + result.setAttribute(Table.HIERARCHICAL_SOURCE_TABLE_ATTRIBUTE, QueryTable.this); + copyAttributes(result, CopyAttributeOperation.Rollup); + maybeUpdateSortableColumns(result); + + return result; + }); } @Override @@ -430,12 +409,12 @@ public Table treeTable(String idColumn, String parentColumn) { throw streamUnsupported("treeTable"); } return memoizeResult(MemoizedOperationKey.treeTable(idColumn, parentColumn), () -> { - final LocalTableMap byExternalResult = ByExternalAggregationFactory.byExternal(this, - false, (pt, st) -> pt.copyAttributes(st, CopyAttributeOperation.ByExternal), - Collections.singletonList(null), parentColumn); + final LocalTableMap byExternalResult = ByExternalAggregationFactory.byExternal(this, false, + (pt, st) -> pt.copyAttributes(st, CopyAttributeOperation.ByExternal), + Collections.singletonList(null), parentColumn); final QueryTable rootTable = (QueryTable) byExternalResult.get(null); final Table result = HierarchicalTable.createFrom((QueryTable) rootTable.copy(), - new TreeTableInfo(idColumn, parentColumn)); + new TreeTableInfo(idColumn, parentColumn)); // If the parent table has an RLL attached to it, we can re-use it. final ReverseLookup reverseLookup; @@ -444,15 +423,14 @@ public Table treeTable(String idColumn, String parentColumn) { final String[] listenerCols = reverseLookup.getKeyColumns(); if (listenerCols.length != 1 || !listenerCols[0].equals(idColumn)) { - final String listenerColError = StringUtils - .joinStrings(Arrays.stream(listenerCols).map(col -> "'" + col + "'"), ", "); + final String listenerColError = + StringUtils.joinStrings(Arrays.stream(listenerCols).map(col -> "'" + col + "'"), ", "); throw new IllegalStateException( - "Table was prepared for Tree table with a different Id column. Expected `" - + idColumn + "`, Actual " + listenerColError); + "Table was prepared for Tree table with a different Id column. Expected `" + idColumn + + "`, Actual " + listenerColError); } } else { - reverseLookup = ReverseLookupListener - .makeReverseLookupListenerWithSnapshot(QueryTable.this, idColumn); + reverseLookup = ReverseLookupListener.makeReverseLookupListenerWithSnapshot(QueryTable.this, idColumn); } result.setAttribute(HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE, byExternalResult); @@ -470,8 +448,7 @@ public Table slice(final long firstPositionInclusive, final long lastPositionExc if (firstPositionInclusive == lastPositionExclusive) { return getSubTable(Index.FACTORY.getEmptyIndex()); } - return getResult( - SliceLikeOperation.slice(this, firstPositionInclusive, lastPositionExclusive, "slice")); + return getResult(SliceLikeOperation.slice(this, firstPositionInclusive, lastPositionExclusive, "slice")); } @Override @@ -495,142 +472,131 @@ public Table tailPct(final double percent) { } @Override - public Table leftJoin(Table table, final MatchPair[] columnsToMatch, - final MatchPair[] columnsToAdd) { - return QueryPerformanceRecorder.withNugget("leftJoin(" + table + "," - + matchString(columnsToMatch) + "," + matchString(columnsToAdd) + ")", - sizeForInstrumentation(), () -> { - String rightColumnKeys[] = new String[columnsToMatch.length]; - for (int i = 0; i < columnsToMatch.length; i++) { - MatchPair match = columnsToMatch[i]; - rightColumnKeys[i] = match.rightColumn; - } + public Table leftJoin(Table table, final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd) { + return QueryPerformanceRecorder.withNugget( + "leftJoin(" + table + "," + matchString(columnsToMatch) + "," + matchString(columnsToAdd) + ")", + sizeForInstrumentation(), () -> { + String rightColumnKeys[] = new String[columnsToMatch.length]; + for (int i = 0; i < columnsToMatch.length; i++) { + MatchPair match = columnsToMatch[i]; + rightColumnKeys[i] = match.rightColumn; + } - Table groupedRight = table.by(rightColumnKeys); - return naturalJoin(groupedRight, columnsToMatch, columnsToAdd); - }); + Table groupedRight = table.by(rightColumnKeys); + return naturalJoin(groupedRight, columnsToMatch, columnsToAdd); + }); } @Override public Table exactJoin(Table table, MatchPair columnsToMatch[], MatchPair[] columnsToAdd) { return QueryPerformanceRecorder.withNugget( - "exactJoin(" + table + "," + Arrays.toString(columnsToMatch) + "," - + Arrays.toString(columnsToMatch) + ")", - sizeForInstrumentation(), - () -> naturalJoinInternal(table, columnsToMatch, columnsToAdd, true)); + "exactJoin(" + table + "," + Arrays.toString(columnsToMatch) + "," + Arrays.toString(columnsToMatch) + + ")", + sizeForInstrumentation(), + () -> naturalJoinInternal(table, columnsToMatch, columnsToAdd, true)); } @Override public Table lastBy(SelectColumn... selectColumns) { return QueryPerformanceRecorder.withNugget("lastBy(" + Arrays.toString(selectColumns) + ")", - sizeForInstrumentation(), - () -> { - final Table result = by(TRACKED_LAST_BY ? new TrackingLastByStateFactoryImpl() - : new LastByStateFactoryImpl(), selectColumns); - copyAttributes(result, CopyAttributeOperation.LastBy); - return result; - }); + sizeForInstrumentation(), + () -> { + final Table result = + by(TRACKED_LAST_BY ? new TrackingLastByStateFactoryImpl() : new LastByStateFactoryImpl(), + selectColumns); + copyAttributes(result, CopyAttributeOperation.LastBy); + return result; + }); } @Override public Table firstBy(SelectColumn... selectColumns) { - return QueryPerformanceRecorder.withNugget( - "firstBy(" + Arrays.toString(selectColumns) + ")", sizeForInstrumentation(), - () -> { - final Table result = by(TRACKED_FIRST_BY ? new TrackingFirstByStateFactoryImpl() - : new FirstByStateFactoryImpl(), selectColumns); - copyAttributes(result, CopyAttributeOperation.FirstBy); - return result; - }); + return QueryPerformanceRecorder.withNugget("firstBy(" + Arrays.toString(selectColumns) + ")", + sizeForInstrumentation(), + () -> { + final Table result = + by(TRACKED_FIRST_BY ? new TrackingFirstByStateFactoryImpl() : new FirstByStateFactoryImpl(), + selectColumns); + copyAttributes(result, CopyAttributeOperation.FirstBy); + return result; + }); } @Override public Table minBy(SelectColumn... selectColumns) { return QueryPerformanceRecorder.withNugget("minBy(" + Arrays.toString(selectColumns) + ")", - sizeForInstrumentation(), () -> { - if (isRefreshing()) { - return by(new MinMaxByStateFactoryImpl(true), selectColumns); - } else { - return by(new AddOnlyMinMaxByStateFactoryImpl(true), selectColumns); - } - }); + sizeForInstrumentation(), () -> { + if (isRefreshing()) { + return by(new MinMaxByStateFactoryImpl(true), selectColumns); + } else { + return by(new AddOnlyMinMaxByStateFactoryImpl(true), selectColumns); + } + }); } @Override public Table maxBy(SelectColumn... selectColumns) { return QueryPerformanceRecorder.withNugget("maxBy(" + Arrays.toString(selectColumns) + ")", - sizeForInstrumentation(), () -> { - if (isRefreshing()) { - return by(new MinMaxByStateFactoryImpl(false), selectColumns); - } else { - return by(new AddOnlyMinMaxByStateFactoryImpl(false), selectColumns); - } - }); + sizeForInstrumentation(), () -> { + if (isRefreshing()) { + return by(new MinMaxByStateFactoryImpl(false), selectColumns); + } else { + return by(new AddOnlyMinMaxByStateFactoryImpl(false), selectColumns); + } + }); } @Override public Table medianBy(SelectColumn... selectColumns) { - return QueryPerformanceRecorder.withNugget( - "medianBy(" + Arrays.toString(selectColumns) + ")", sizeForInstrumentation(), - () -> by(new PercentileByStateFactoryImpl(0.50, true), selectColumns)); + return QueryPerformanceRecorder.withNugget("medianBy(" + Arrays.toString(selectColumns) + ")", + sizeForInstrumentation(), + () -> by(new PercentileByStateFactoryImpl(0.50, true), selectColumns)); } @Override public Table countBy(String countColumnName, SelectColumn... groupByColumns) { return QueryPerformanceRecorder.withNugget( - "countBy(" + countColumnName + "," + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), () -> { - if (!COLUMN_NAME.matcher(countColumnName).matches()) { // TODO: Test more columns - // this way - throw new RuntimeException(countColumnName + " is not a valid column name"); - } - return by(new CountByStateFactoryImpl(countColumnName), groupByColumns); - }); + "countBy(" + countColumnName + "," + Arrays.toString(groupByColumns) + ")", sizeForInstrumentation(), + () -> { + if (!COLUMN_NAME.matcher(countColumnName).matches()) { // TODO: Test more columns this way + throw new RuntimeException(countColumnName + " is not a valid column name"); + } + return by(new CountByStateFactoryImpl(countColumnName), groupByColumns); + }); } - public Table by(final AggregationStateFactory inputAggregationStateFactory, - final SelectColumn... groupByColumns) { + public Table by(final AggregationStateFactory inputAggregationStateFactory, final SelectColumn... groupByColumns) { return memoizeResult(MemoizedOperationKey.by(inputAggregationStateFactory, groupByColumns), - () -> byNoMemo(inputAggregationStateFactory, groupByColumns)); + () -> byNoMemo(inputAggregationStateFactory, groupByColumns)); } private QueryTable byNoMemo(AggregationStateFactory inputAggregationStateFactory, - final SelectColumn... groupByColumns) { - final String description = - "by(" + inputAggregationStateFactory + ", " + Arrays.toString(groupByColumns) + ")"; + final SelectColumn... groupByColumns) { + final String description = "by(" + inputAggregationStateFactory + ", " + Arrays.toString(groupByColumns) + ")"; return QueryPerformanceRecorder.withNugget(description, sizeForInstrumentation(), () -> { - final boolean isBy = - inputAggregationStateFactory.getClass() == AggregationIndexStateFactory.class; + final boolean isBy = inputAggregationStateFactory.getClass() == AggregationIndexStateFactory.class; final boolean isApplyToAllBy = - inputAggregationStateFactory.getClass() == AggregationFormulaStateFactory.class; - final boolean isNumeric = - inputAggregationStateFactory.getClass() == SumStateFactory.class || + inputAggregationStateFactory.getClass() == AggregationFormulaStateFactory.class; + final boolean isNumeric = inputAggregationStateFactory.getClass() == SumStateFactory.class || inputAggregationStateFactory.getClass() == AbsSumStateFactory.class || inputAggregationStateFactory.getClass() == AvgStateFactory.class || inputAggregationStateFactory.getClass() == VarStateFactory.class || inputAggregationStateFactory.getClass() == StdStateFactory.class; final boolean isSelectDistinct = - inputAggregationStateFactory.getClass() == SelectDistinctStateFactoryImpl.class; - final boolean isCount = - inputAggregationStateFactory.getClass() == CountByStateFactoryImpl.class; - final boolean isMinMax = - inputAggregationStateFactory instanceof MinMaxByStateFactoryImpl; - final boolean isPercentile = - inputAggregationStateFactory.getClass() == PercentileByStateFactoryImpl.class; + inputAggregationStateFactory.getClass() == SelectDistinctStateFactoryImpl.class; + final boolean isCount = inputAggregationStateFactory.getClass() == CountByStateFactoryImpl.class; + final boolean isMinMax = inputAggregationStateFactory instanceof MinMaxByStateFactoryImpl; + final boolean isPercentile = inputAggregationStateFactory.getClass() == PercentileByStateFactoryImpl.class; final boolean isWeightedAvg = - inputAggregationStateFactory.getClass() == WeightedAverageStateFactoryImpl.class; - final boolean isWeightedSum = - inputAggregationStateFactory.getClass() == WeightedSumStateFactoryImpl.class; - final boolean isSortedFirstOrLast = - inputAggregationStateFactory instanceof SortedFirstOrLastByFactoryImpl; - final boolean isFirst = inputAggregationStateFactory - .getClass() == FirstByStateFactoryImpl.class - || inputAggregationStateFactory.getClass() == TrackingFirstByStateFactoryImpl.class; - final boolean isLast = inputAggregationStateFactory - .getClass() == LastByStateFactoryImpl.class - || inputAggregationStateFactory.getClass() == TrackingLastByStateFactoryImpl.class; + inputAggregationStateFactory.getClass() == WeightedAverageStateFactoryImpl.class; + final boolean isWeightedSum = inputAggregationStateFactory.getClass() == WeightedSumStateFactoryImpl.class; + final boolean isSortedFirstOrLast = inputAggregationStateFactory instanceof SortedFirstOrLastByFactoryImpl; + final boolean isFirst = inputAggregationStateFactory.getClass() == FirstByStateFactoryImpl.class + || inputAggregationStateFactory.getClass() == TrackingFirstByStateFactoryImpl.class; + final boolean isLast = inputAggregationStateFactory.getClass() == LastByStateFactoryImpl.class + || inputAggregationStateFactory.getClass() == TrackingLastByStateFactoryImpl.class; final boolean isCombo = inputAggregationStateFactory instanceof ComboAggregateFactory; if (isBy) { @@ -645,100 +611,83 @@ private QueryTable byNoMemo(AggregationStateFactory inputAggregationStateFactory if (isStream()) { throw streamUnsupported("applyToAllBy"); } - final String formula = - ((AggregationFormulaStateFactory) inputAggregationStateFactory).getFormula(); + final String formula = ((AggregationFormulaStateFactory) inputAggregationStateFactory).getFormula(); final String columnParamName = - ((AggregationFormulaStateFactory) inputAggregationStateFactory) - .getColumnParamName(); - return FormulaAggregationFactory.applyToAllBy(this, formula, columnParamName, - groupByColumns); + ((AggregationFormulaStateFactory) inputAggregationStateFactory).getColumnParamName(); + return FormulaAggregationFactory.applyToAllBy(this, formula, columnParamName, groupByColumns); } else if (isNumeric) { - return ChunkedOperatorAggregationHelper.aggregation( - new NonKeyColumnAggregationFactory( - (IterativeChunkedOperatorFactory) inputAggregationStateFactory), - this, groupByColumns); + return ChunkedOperatorAggregationHelper.aggregation(new NonKeyColumnAggregationFactory( + (IterativeChunkedOperatorFactory) inputAggregationStateFactory), this, groupByColumns); } else if (isSortedFirstOrLast) { final boolean isSortedFirst = - ((SortedFirstOrLastByFactoryImpl) inputAggregationStateFactory).isSortedFirst(); - return ChunkedOperatorAggregationHelper - .aggregation(new SortedFirstOrLastByAggregationFactory(isSortedFirst, false, - ((SortedFirstOrLastByFactoryImpl) inputAggregationStateFactory) - .getSortColumnNames()), + ((SortedFirstOrLastByFactoryImpl) inputAggregationStateFactory).isSortedFirst(); + return ChunkedOperatorAggregationHelper.aggregation( + new SortedFirstOrLastByAggregationFactory(isSortedFirst, false, + ((SortedFirstOrLastByFactoryImpl) inputAggregationStateFactory).getSortColumnNames()), this, groupByColumns); } else if (isFirst || isLast) { - return ChunkedOperatorAggregationHelper.aggregation( - new FirstOrLastByAggregationFactory(isFirst), this, groupByColumns); + return ChunkedOperatorAggregationHelper.aggregation(new FirstOrLastByAggregationFactory(isFirst), this, + groupByColumns); } else if (isMinMax) { - final boolean isMin = - ((MinMaxByStateFactoryImpl) inputAggregationStateFactory).isMinimum(); + final boolean isMin = ((MinMaxByStateFactoryImpl) inputAggregationStateFactory).isMinimum(); return ChunkedOperatorAggregationHelper.aggregation( - new NonKeyColumnAggregationFactory( - new MinMaxIterativeOperatorFactory(isMin, isStream() || isAddOnly())), - this, groupByColumns); + new NonKeyColumnAggregationFactory( + new MinMaxIterativeOperatorFactory(isMin, isStream() || isAddOnly())), + this, groupByColumns); } else if (isPercentile) { - final double percentile = - ((PercentileByStateFactoryImpl) inputAggregationStateFactory).getPercentile(); + final double percentile = ((PercentileByStateFactoryImpl) inputAggregationStateFactory).getPercentile(); final boolean averageMedian = - ((PercentileByStateFactoryImpl) inputAggregationStateFactory) - .getAverageMedian(); + ((PercentileByStateFactoryImpl) inputAggregationStateFactory).getAverageMedian(); return ChunkedOperatorAggregationHelper.aggregation( - new NonKeyColumnAggregationFactory( - new PercentileIterativeOperatorFactory(percentile, averageMedian)), - this, groupByColumns); + new NonKeyColumnAggregationFactory( + new PercentileIterativeOperatorFactory(percentile, averageMedian)), + this, groupByColumns); } else if (isWeightedAvg || isWeightedSum) { final String weightName; if (isWeightedAvg) { - weightName = ((WeightedAverageStateFactoryImpl) inputAggregationStateFactory) - .getWeightName(); + weightName = ((WeightedAverageStateFactoryImpl) inputAggregationStateFactory).getWeightName(); } else { - weightName = ((WeightedSumStateFactoryImpl) inputAggregationStateFactory) - .getWeightName(); + weightName = ((WeightedSumStateFactoryImpl) inputAggregationStateFactory).getWeightName(); } return ChunkedOperatorAggregationHelper.aggregation( - new WeightedAverageSumAggregationFactory(weightName, isWeightedSum), this, - groupByColumns); + new WeightedAverageSumAggregationFactory(weightName, isWeightedSum), this, groupByColumns); } else if (isCount) { return ChunkedOperatorAggregationHelper.aggregation( - new CountAggregationFactory( - ((CountByStateFactoryImpl) inputAggregationStateFactory).getCountName()), - this, groupByColumns); + new CountAggregationFactory( + ((CountByStateFactoryImpl) inputAggregationStateFactory).getCountName()), + this, groupByColumns); } else if (isSelectDistinct) { if (getColumnSourceMap().isEmpty()) { - // if we have no input columns, then the only thing we can do is have an empty - // result + // if we have no input columns, then the only thing we can do is have an empty result return new QueryTable(Index.FACTORY.getEmptyIndex(), Collections.emptyMap()); } - return ChunkedOperatorAggregationHelper.aggregation(new KeyOnlyAggregationFactory(), - this, groupByColumns); + return ChunkedOperatorAggregationHelper.aggregation(new KeyOnlyAggregationFactory(), this, + groupByColumns); } else if (isCombo) { - return ChunkedOperatorAggregationHelper - .aggregation(((ComboAggregateFactory) inputAggregationStateFactory) - .makeAggregationContextFactory(), this, groupByColumns); + return ChunkedOperatorAggregationHelper.aggregation( + ((ComboAggregateFactory) inputAggregationStateFactory).makeAggregationContextFactory(), this, + groupByColumns); } - throw new RuntimeException( - "Unknown aggregation factory: " + inputAggregationStateFactory); + throw new RuntimeException("Unknown aggregation factory: " + inputAggregationStateFactory); }); } - private static UnsupportedOperationException streamUnsupported( - @NotNull final String operationName) { + private static UnsupportedOperationException streamUnsupported(@NotNull final String operationName) { return new UnsupportedOperationException("Stream tables do not support " + operationName - + "; use StreamTableTools.streamToAppendOnlyTable to accumulate full history"); + + "; use StreamTableTools.streamToAppendOnlyTable to accumulate full history"); } @Override public Table headBy(long nRows, String... groupByColumns) { - return QueryPerformanceRecorder.withNugget( - "headBy(" + nRows + ", " + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), () -> headOrTailBy(nRows, true, groupByColumns)); + return QueryPerformanceRecorder.withNugget("headBy(" + nRows + ", " + Arrays.toString(groupByColumns) + ")", + sizeForInstrumentation(), () -> headOrTailBy(nRows, true, groupByColumns)); } @Override public Table tailBy(long nRows, String... groupByColumns) { - return QueryPerformanceRecorder.withNugget( - "tailBy(" + nRows + ", " + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), () -> headOrTailBy(nRows, false, groupByColumns)); + return QueryPerformanceRecorder.withNugget("tailBy(" + nRows + ", " + Arrays.toString(groupByColumns) + ")", + sizeForInstrumentation(), () -> headOrTailBy(nRows, false, groupByColumns)); } private Table headOrTailBy(long nRows, boolean head, String... groupByColumns) { @@ -747,8 +696,7 @@ private Table headOrTailBy(long nRows, boolean head, String... groupByColumns) { Set groupByColsSet = new HashSet<>(Arrays.asList(groupByColumns)); // TODO: WTF? List colNames = getDefinition().getColumnNames(); - // Iterate through the columns and build updateView() arguments that will trim the columns - // to nRows rows + // Iterate through the columns and build updateView() arguments that will trim the columns to nRows rows String[] updates = new String[colNames.size() - groupByColumns.length]; String[] casting = new String[colNames.size() - groupByColumns.length]; for (int i = 0, j = 0; i < colNames.size(); i++) { @@ -758,25 +706,24 @@ private Table headOrTailBy(long nRows, boolean head, String... groupByColumns) { casting[j] = colName + " = " + getCastFormula(dataType) + colName; if (head) updates[j++] = - // Get the first nRows rows: - // colName = isNull(colName) ? null - // : colName.size() > nRows ? colName.subArray(0, nRows) - // : colName - colName + '=' + "isNull(" + colName + ") ? null" + - ':' + colName + ".size() > " + nRows + " ? " + colName + ".subArray(0, " - + nRows + ')' + - ':' + colName; + // Get the first nRows rows: + // colName = isNull(colName) ? null + // : colName.size() > nRows ? colName.subArray(0, nRows) + // : colName + colName + '=' + "isNull(" + colName + ") ? null" + + ':' + colName + ".size() > " + nRows + " ? " + colName + ".subArray(0, " + nRows + + ')' + + ':' + colName; else updates[j++] = - // Get the last nRows rows: - // colName = isNull(colName) ? null - // : colName.size() > nRows ? colName.subArray(colName.size() - nRows, - // colName.size()) - // : colName - colName + '=' + "isNull(" + colName + ") ? null" + - ':' + colName + ".size() > " + nRows + " ? " + colName + ".subArray(" - + colName + ".size() - " + nRows + ", " + colName + ".size())" + - ':' + colName; + // Get the last nRows rows: + // colName = isNull(colName) ? null + // : colName.size() > nRows ? colName.subArray(colName.size() - nRows, colName.size()) + // : colName + colName + '=' + "isNull(" + colName + ") ? null" + + ':' + colName + ".size() > " + nRows + " ? " + colName + ".subArray(" + colName + + ".size() - " + nRows + ", " + colName + ".size())" + + ':' + colName; } } @@ -817,69 +764,64 @@ private String getCastFormulaInternal(Class dataType) { } @Override - public Table applyToAllBy(String formulaColumn, String columnParamName, - SelectColumn... groupByColumns) { + public Table applyToAllBy(String formulaColumn, String columnParamName, SelectColumn... groupByColumns) { final String[] formattingColumnNames = definition.getColumnNames().stream() - .filter(name -> name.endsWith("__WTABLE_FORMAT")).toArray(String[]::new); + .filter(name -> name.endsWith("__WTABLE_FORMAT")).toArray(String[]::new); final QueryTable noFormattingColumnsTable = (QueryTable) dropColumns(formattingColumnNames); return QueryPerformanceRecorder.withNugget( - "applyToAllBy(" + formulaColumn + ',' + columnParamName + ',' - + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), - () -> noFormattingColumnsTable.by( - new AggregationFormulaStateFactory(formulaColumn, columnParamName), - groupByColumns)); + "applyToAllBy(" + formulaColumn + ',' + columnParamName + ',' + Arrays.toString(groupByColumns) + ")", + sizeForInstrumentation(), + () -> noFormattingColumnsTable.by(new AggregationFormulaStateFactory(formulaColumn, columnParamName), + groupByColumns)); } @Override public Table sumBy(SelectColumn... groupByColumns) { return QueryPerformanceRecorder.withNugget("sumBy(" + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), - () -> by(new SumStateFactory(), groupByColumns)); + sizeForInstrumentation(), + () -> by(new SumStateFactory(), groupByColumns)); } @Override public Table absSumBy(SelectColumn... groupByColumns) { - return QueryPerformanceRecorder.withNugget( - "absSumBy(" + Arrays.toString(groupByColumns) + ")", sizeForInstrumentation(), - () -> by(new AbsSumStateFactory(), groupByColumns)); + return QueryPerformanceRecorder.withNugget("absSumBy(" + Arrays.toString(groupByColumns) + ")", + sizeForInstrumentation(), + () -> by(new AbsSumStateFactory(), groupByColumns)); } @Override public Table avgBy(SelectColumn... groupByColumns) { return QueryPerformanceRecorder.withNugget("avgBy(" + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), - () -> by(new AvgStateFactory(), groupByColumns)); + sizeForInstrumentation(), + () -> by(new AvgStateFactory(), groupByColumns)); } @Override public Table wavgBy(String weightColumn, SelectColumn... groupByColumns) { return QueryPerformanceRecorder.withNugget( - "wavgBy(" + weightColumn + ", " + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), - () -> by(new WeightedAverageStateFactoryImpl(weightColumn), groupByColumns)); + "wavgBy(" + weightColumn + ", " + Arrays.toString(groupByColumns) + ")", sizeForInstrumentation(), + () -> by(new WeightedAverageStateFactoryImpl(weightColumn), groupByColumns)); } @Override public Table wsumBy(String weightColumn, SelectColumn... groupByColumns) { return QueryPerformanceRecorder.withNugget( - "wsumBy(" + weightColumn + ", " + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), - () -> by(new WeightedSumStateFactoryImpl(weightColumn), groupByColumns)); + "wsumBy(" + weightColumn + ", " + Arrays.toString(groupByColumns) + ")", sizeForInstrumentation(), + () -> by(new WeightedSumStateFactoryImpl(weightColumn), groupByColumns)); } @Override public Table stdBy(SelectColumn... groupByColumns) { return QueryPerformanceRecorder.withNugget("stdBy(" + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), - () -> by(new StdStateFactory(), groupByColumns)); + sizeForInstrumentation(), + () -> by(new StdStateFactory(), groupByColumns)); } @Override public Table varBy(SelectColumn... groupByColumns) { return QueryPerformanceRecorder.withNugget("varBy(" + Arrays.toString(groupByColumns) + ")", - sizeForInstrumentation(), - () -> by(new VarStateFactory(), groupByColumns)); + sizeForInstrumentation(), + () -> by(new VarStateFactory(), groupByColumns)); } public static class FilteredTable extends QueryTable implements SelectFilter.RecomputeListener { @@ -889,8 +831,7 @@ public static class FilteredTable extends QueryTable implements SelectFilter.Rec private boolean refilterUnmatchedRequested = false; private MergedListener whereListener; - public FilteredTable(final Index currentMapping, final QueryTable source, - final SelectFilter[] filters) { + public FilteredTable(final Index currentMapping, final QueryTable source, final SelectFilter[] filters) { super(source.getDefinition(), currentMapping, source.columns, null); this.source = source; this.filters = filters; @@ -915,8 +856,8 @@ public void requestRecomputeUnmatched() { } /** - * Called if something about the filters has changed such that all matched rows of the - * source table should be re-evaluated. + * Called if something about the filters has changed such that all matched rows of the source table should be + * re-evaluated. */ @Override public void requestRecomputeMatched() { @@ -946,22 +887,18 @@ public void setIsRefreshing(boolean refreshing) { * @param upstreamRemoved index of keys that were removed * @param upstreamModified index of keys that were modified upstream * @param shiftData sequence of shifts that apply to keyspace - * @param modifiedColumnSet the set of columns that have any changes to indices in - * {@code modified} + * @param modifiedColumnSet the set of columns that have any changes to indices in {@code modified} */ - private void doRefilter(final Index upstreamAdded, final Index upstreamRemoved, - final Index upstreamModified, - final IndexShiftData shiftData, final ModifiedColumnSet modifiedColumnSet) { + private void doRefilter(final Index upstreamAdded, final Index upstreamRemoved, final Index upstreamModified, + final IndexShiftData shiftData, final ModifiedColumnSet modifiedColumnSet) { final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); update.modifiedColumnSet = modifiedColumnSet; - // Remove upstream keys first, so that keys at rows that were removed and then added are - // propagated as such. - // Note that it is a failure to propagate these as modifies, since modifiedColumnSet may - // not mark that all + // Remove upstream keys first, so that keys at rows that were removed and then added are propagated as such. + // Note that it is a failure to propagate these as modifies, since modifiedColumnSet may not mark that all // columns have changed. update.removed = upstreamRemoved == null ? Index.FACTORY.getEmptyIndex() - : upstreamRemoved.intersect(getIndex()); + : upstreamRemoved.intersect(getIndex()); getIndex().remove(update.removed); // Update our index and compute removals due to splatting. @@ -970,8 +907,7 @@ private void doRefilter(final Index upstreamAdded, final Index upstreamRemoved, shiftData.apply((beginRange, endRange, shiftDelta) -> { final Index toShift = index.subindexByKey(beginRange, endRange); index.removeRange(beginRange, endRange); - update.removed.insert( - index.subindexByKey(beginRange + shiftDelta, endRange + shiftDelta)); + update.removed.insert(index.subindexByKey(beginRange + shiftDelta, endRange + shiftDelta)); index.removeRange(beginRange + shiftDelta, endRange + shiftDelta); index.insert(toShift.shift(shiftDelta)); }); @@ -979,8 +915,7 @@ private void doRefilter(final Index upstreamAdded, final Index upstreamRemoved, final Index newMapping; if (refilterMatchedRequested && refilterUnmatchedRequested) { - newMapping = - whereInternal(source.getIndex().clone(), source.getIndex(), false, filters); + newMapping = whereInternal(source.getIndex().clone(), source.getIndex(), false, filters); refilterMatchedRequested = refilterUnmatchedRequested = false; } else if (refilterUnmatchedRequested) { // things that are added or removed are already reflected in source.getIndex @@ -1005,8 +940,8 @@ private void doRefilter(final Index upstreamAdded, final Index upstreamRemoved, refilterUnmatchedRequested = false; } else if (refilterMatchedRequested) { - // we need to take removed rows out of our index so we do not read them; and also - // examine added or modified rows + // we need to take removed rows out of our index so we do not read them; and also examine added or + // modified rows final Index matchedRows = getIndex().clone(); if (upstreamAdded != null) { matchedRows.insert(upstreamAdded); @@ -1018,8 +953,7 @@ private void doRefilter(final Index upstreamAdded, final Index upstreamRemoved, newMapping = whereInternal(matchedClone, matchedRows, false, filters); refilterMatchedRequested = false; } else { - throw new IllegalStateException( - "Refilter called when a refilter was not requested!"); + throw new IllegalStateException("Refilter called when a refilter was not requested!"); } // Compute added/removed in post-shift keyspace. @@ -1054,110 +988,105 @@ private void setWhereListener(MergedListener whereListener) { @Override public Table where(final SelectFilter... filters) { - return QueryPerformanceRecorder.withNugget("where(" + Arrays.toString(filters) + ")", - sizeForInstrumentation(), () -> { - for (int fi = 0; fi < filters.length; ++fi) { - if (!(filters[fi] instanceof ReindexingFilter)) { - continue; - } - final ReindexingFilter reindexingFilter = (ReindexingFilter) filters[fi]; - final boolean first = fi == 0; - final boolean last = fi == filters.length - 1; - if (last && !reindexingFilter.requiresSorting()) { - // If this is the last (or only) filter, we can just run it as normal unless - // it requires sorting. - break; - } - Table result = this; - if (!first) { - result = result.where(Arrays.copyOf(filters, fi)); - } - if (reindexingFilter.requiresSorting()) { - result = result.sort(reindexingFilter.getSortColumns()); - reindexingFilter.sortingDone(); + return QueryPerformanceRecorder.withNugget("where(" + Arrays.toString(filters) + ")", sizeForInstrumentation(), + () -> { + for (int fi = 0; fi < filters.length; ++fi) { + if (!(filters[fi] instanceof ReindexingFilter)) { + continue; + } + final ReindexingFilter reindexingFilter = (ReindexingFilter) filters[fi]; + final boolean first = fi == 0; + final boolean last = fi == filters.length - 1; + if (last && !reindexingFilter.requiresSorting()) { + // If this is the last (or only) filter, we can just run it as normal unless it requires + // sorting. + break; + } + Table result = this; + if (!first) { + result = result.where(Arrays.copyOf(filters, fi)); + } + if (reindexingFilter.requiresSorting()) { + result = result.sort(reindexingFilter.getSortColumns()); + reindexingFilter.sortingDone(); + } + result = result.where(reindexingFilter); + if (!last) { + result = result.where(Arrays.copyOfRange(filters, fi + 1, filters.length)); + } + return result; } - result = result.where(reindexingFilter); - if (!last) { - result = result.where(Arrays.copyOfRange(filters, fi + 1, filters.length)); + + for (SelectFilter filter : filters) { + filter.init(getDefinition()); } - return result; - } - for (SelectFilter filter : filters) { - filter.init(getDefinition()); - } + return memoizeResult(MemoizedOperationKey.filter(filters), () -> { + final ShiftAwareSwapListener swapListener = + createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); - return memoizeResult(MemoizedOperationKey.filter(filters), () -> { - final ShiftAwareSwapListener swapListener = - createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); - - final Mutable result = new MutableObject<>(); - initializeWithSnapshot("where", swapListener, - (prevRequested, beforeClock) -> { - final boolean usePrev = prevRequested && isRefreshing(); - final Index indexToUse = usePrev ? index.getPrevIndex() : index; - final Index currentMapping; - - currentMapping = - whereInternal(indexToUse.clone(), indexToUse, usePrev, filters); - Assert.eq(currentMapping.getPrevIndex().size(), - "currentMapping.getPrevIndex.size()", currentMapping.size(), - "currentMapping.size()"); - - final FilteredTable filteredTable = - new FilteredTable(currentMapping, this, filters); - - for (final SelectFilter filter : filters) { - filter.setRecomputeListener(filteredTable); - } + final Mutable result = new MutableObject<>(); + initializeWithSnapshot("where", swapListener, + (prevRequested, beforeClock) -> { + final boolean usePrev = prevRequested && isRefreshing(); + final Index indexToUse = usePrev ? index.getPrevIndex() : index; + final Index currentMapping; - final boolean refreshingFilters = - Arrays.stream(filters).anyMatch(SelectFilter::isRefreshing); - copyAttributes(filteredTable, CopyAttributeOperation.Filter); - if (!refreshingFilters && isAddOnly()) { - filteredTable.setAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE, - Boolean.TRUE); - } + currentMapping = whereInternal(indexToUse.clone(), indexToUse, usePrev, filters); + Assert.eq(currentMapping.getPrevIndex().size(), + "currentMapping.getPrevIndex.size()", currentMapping.size(), + "currentMapping.size()"); - final List dependencies = Stream.concat( - Stream.of(filters) - .filter(f -> f instanceof NotificationQueue.Dependency) - .map(f -> (NotificationQueue.Dependency) f), - Stream.of(filters) - .filter(f -> f instanceof DependencyStreamProvider).flatMap( - f -> ((DependencyStreamProvider) f).getDependencyStream())) - .collect(Collectors.toList()); - if (swapListener != null) { - final ListenerRecorder recorder = - new ListenerRecorder("where(" + Arrays.toString(filters) + ")", - QueryTable.this, filteredTable); - final WhereListener whereListener = - new WhereListener(recorder, dependencies, filteredTable); - filteredTable.setWhereListener(whereListener); - recorder.setMergedListener(whereListener); - swapListener.setListenerAndResult(recorder, filteredTable); - filteredTable.addParentReference(swapListener); - filteredTable.addParentReference(whereListener); - } else if (refreshingFilters) { - final StaticWhereListener whereListener = - new StaticWhereListener(dependencies, filteredTable); - filteredTable.setWhereListener(whereListener); - filteredTable.addParentReference(whereListener); - } + final FilteredTable filteredTable = + new FilteredTable(currentMapping, this, filters); + + for (final SelectFilter filter : filters) { + filter.setRecomputeListener(filteredTable); + } - result.setValue(filteredTable); + final boolean refreshingFilters = + Arrays.stream(filters).anyMatch(SelectFilter::isRefreshing); + copyAttributes(filteredTable, CopyAttributeOperation.Filter); + if (!refreshingFilters && isAddOnly()) { + filteredTable.setAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE, Boolean.TRUE); + } - return true; - }); + final List dependencies = Stream.concat( + Stream.of(filters).filter(f -> f instanceof NotificationQueue.Dependency) + .map(f -> (NotificationQueue.Dependency) f), + Stream.of(filters).filter(f -> f instanceof DependencyStreamProvider) + .flatMap(f -> ((DependencyStreamProvider) f).getDependencyStream())) + .collect(Collectors.toList()); + if (swapListener != null) { + final ListenerRecorder recorder = + new ListenerRecorder("where(" + Arrays.toString(filters) + ")", + QueryTable.this, filteredTable); + final WhereListener whereListener = + new WhereListener(recorder, dependencies, filteredTable); + filteredTable.setWhereListener(whereListener); + recorder.setMergedListener(whereListener); + swapListener.setListenerAndResult(recorder, filteredTable); + filteredTable.addParentReference(swapListener); + filteredTable.addParentReference(whereListener); + } else if (refreshingFilters) { + final StaticWhereListener whereListener = + new StaticWhereListener(dependencies, filteredTable); + filteredTable.setWhereListener(whereListener); + filteredTable.addParentReference(whereListener); + } - return result.getValue(); + result.setValue(filteredTable); + + return true; + }); + + return result.getValue(); + }); }); - }); } @SuppressWarnings("WeakerAccess") - protected Index whereInternal(Index currentMapping, Index fullSet, boolean usePrev, - SelectFilter... filters) { + protected Index whereInternal(Index currentMapping, Index fullSet, boolean usePrev, SelectFilter... filters) { for (SelectFilter filter : filters) { if (Thread.interrupted()) { throw new QueryCancellationException("interrupted while filtering"); @@ -1168,35 +1097,33 @@ protected Index whereInternal(Index currentMapping, Index fullSet, boolean usePr } @Override - public Table whereIn(final GroupStrategy groupStrategy, final Table rightTable, - final boolean inclusion, final MatchPair... columnsToMatch) { - return QueryPerformanceRecorder.withNugget("whereIn(" + groupStrategy + " , rightTable, " - + inclusion + ", " + matchString(columnsToMatch) + ")", sizeForInstrumentation(), - () -> { - checkInitiateOperation(rightTable); - - final Table distinctValues = - rightTable.selectDistinct(MatchPair.getRightColumns(columnsToMatch)); - final DynamicWhereFilter dynamicWhereFilter = new DynamicWhereFilter(groupStrategy, - distinctValues, inclusion, columnsToMatch); - final Table where = where(dynamicWhereFilter); - if (where instanceof DynamicTable) { - if (distinctValues.isLive()) { - ((DynamicTable) where).addParentReference(distinctValues); - } - if (dynamicWhereFilter.isRefreshing()) { - ((DynamicTable) where).addParentReference(dynamicWhereFilter); + public Table whereIn(final GroupStrategy groupStrategy, final Table rightTable, final boolean inclusion, + final MatchPair... columnsToMatch) { + return QueryPerformanceRecorder.withNugget( + "whereIn(" + groupStrategy + " , rightTable, " + inclusion + ", " + matchString(columnsToMatch) + ")", + sizeForInstrumentation(), () -> { + checkInitiateOperation(rightTable); + + final Table distinctValues = rightTable.selectDistinct(MatchPair.getRightColumns(columnsToMatch)); + final DynamicWhereFilter dynamicWhereFilter = + new DynamicWhereFilter(groupStrategy, distinctValues, inclusion, columnsToMatch); + final Table where = where(dynamicWhereFilter); + if (where instanceof DynamicTable) { + if (distinctValues.isLive()) { + ((DynamicTable) where).addParentReference(distinctValues); + } + if (dynamicWhereFilter.isRefreshing()) { + ((DynamicTable) where).addParentReference(dynamicWhereFilter); + } } - } - return where; - }); + return where; + }); } @Override public Table flatten() { if (!isFlat() && !isRefreshing() && index.size() - 1 == index.lastKey()) { - // We're already flat, and we'll never update; so we can just return ourselves, after - // setting ourselves flat + // We're already flat, and we'll never update; so we can just return ourselves, after setting ourselves flat setFlat(); } @@ -1217,8 +1144,8 @@ protected void setFlat() { @Override public boolean isFlat() { if (flat) { - Assert.assertion(index.lastKey() == index.size() - 1, - "index.lastKey() == index.size() - 1", index, "index"); + Assert.assertion(index.lastKey() == index.size() - 1, "index.lastKey() == index.size() - 1", index, + "index"); } return flat; } @@ -1232,8 +1159,7 @@ public void releaseCachedResources() { @Override public Table select(SelectColumn... selectColumns) { if (!isRefreshing() && !isFlat() && exceedsMaximumStaticSelectOverhead()) { - // if we are static, we will pass the select through a flatten call, to ensure that our - // result is as + // if we are static, we will pass the select through a flatten call, to ensure that our result is as // efficient in terms of memory as possible return flatten().select(selectColumns); } @@ -1248,10 +1174,8 @@ private boolean exceedsMaximumStaticSelectOverhead() { return true; } - final long requiredBlocks = - (size() + SparseConstants.BLOCK_SIZE - 1) / SparseConstants.BLOCK_SIZE; - final long acceptableBlocks = - (long) (MAXIMUM_STATIC_SELECT_MEMORY_OVERHEAD * (double) requiredBlocks); + final long requiredBlocks = (size() + SparseConstants.BLOCK_SIZE - 1) / SparseConstants.BLOCK_SIZE; + final long acceptableBlocks = (long) (MAXIMUM_STATIC_SELECT_MEMORY_OVERHEAD * (double) requiredBlocks); final MutableLong lastBlock = new MutableLong(-1L); final MutableLong usedBlocks = new MutableLong(0); return !getIndex().forEachLongRange((s, e) -> { @@ -1278,34 +1202,28 @@ public Table update(final SelectColumn... selectColumns) { } /** - * This does a certain amount of validation and can be used to get confidence that the formulas - * are valid. If it is not valid, you will get an exception. Positive test (should pass - * validation): "X = 12", "Y = X + 1") Negative test (should fail validation): "X = 12", "Y = Z - * + 1") + * This does a certain amount of validation and can be used to get confidence that the formulas are valid. If it is + * not valid, you will get an exception. Positive test (should pass validation): "X = 12", "Y = X + 1") Negative + * test (should fail validation): "X = 12", "Y = Z + 1") */ @Override public SelectValidationResult validateSelect(final SelectColumn... selectColumns) { - final SelectColumn[] clones = - Arrays.stream(selectColumns).map(SelectColumn::copy).toArray(SelectColumn[]::new); - SelectAndViewAnalyzer analyzer = - SelectAndViewAnalyzer.create(SelectAndViewAnalyzer.Mode.SELECT_STATIC, columns, + final SelectColumn[] clones = Arrays.stream(selectColumns).map(SelectColumn::copy).toArray(SelectColumn[]::new); + SelectAndViewAnalyzer analyzer = SelectAndViewAnalyzer.create(SelectAndViewAnalyzer.Mode.SELECT_STATIC, columns, index, modifiedColumnSet, true, clones); return new SelectValidationResult(analyzer, clones); } private Table selectOrUpdate(Flavor flavor, final SelectColumn... selectColumns) { final String humanReadablePrefix = flavor.toString(); - final String updateDescription = - humanReadablePrefix + '(' + selectColumnString(selectColumns) + ')'; - return memoizeResult( - MemoizedOperationKey.selectUpdateViewOrUpdateView(selectColumns, flavor), - () -> QueryPerformanceRecorder.withNugget(updateDescription, sizeForInstrumentation(), - () -> { + final String updateDescription = humanReadablePrefix + '(' + selectColumnString(selectColumns) + ')'; + return memoizeResult(MemoizedOperationKey.selectUpdateViewOrUpdateView(selectColumns, flavor), + () -> QueryPerformanceRecorder.withNugget(updateDescription, sizeForInstrumentation(), () -> { checkInitiateOperation(); final SelectAndViewAnalyzer.Mode mode; if (isRefreshing()) { if ((flavor == Flavor.Update && USE_REDIRECTED_COLUMNS_FOR_UPDATE) - || (flavor == Flavor.Select && USE_REDIRECTED_COLUMNS_FOR_SELECT)) { + || (flavor == Flavor.Select && USE_REDIRECTED_COLUMNS_FOR_SELECT)) { mode = SelectAndViewAnalyzer.Mode.SELECT_REDIRECTED_REFRESHING; } else { mode = SelectAndViewAnalyzer.Mode.SELECT_REFRESHING; @@ -1315,32 +1233,30 @@ private Table selectOrUpdate(Flavor flavor, final SelectColumn... selectColumns) } final boolean publishTheseSources = flavor == Flavor.Update; final SelectAndViewAnalyzer analyzer = - SelectAndViewAnalyzer.create(mode, columns, index, modifiedColumnSet, - publishTheseSources, selectColumns); + SelectAndViewAnalyzer.create(mode, columns, index, modifiedColumnSet, + publishTheseSources, selectColumns); // Init all the rows by cooking up a fake Update final Index emptyIndex = Index.FACTORY.getEmptyIndex(); final ShiftAwareListener.Update fakeUpdate = - new ShiftAwareListener.Update(index, emptyIndex, emptyIndex, - IndexShiftData.EMPTY, ModifiedColumnSet.ALL); + new ShiftAwareListener.Update(index, emptyIndex, emptyIndex, + IndexShiftData.EMPTY, ModifiedColumnSet.ALL); try (final SelectAndViewAnalyzer.UpdateHelper updateHelper = - new SelectAndViewAnalyzer.UpdateHelper(emptyIndex, fakeUpdate)) { + new SelectAndViewAnalyzer.UpdateHelper(emptyIndex, fakeUpdate)) { analyzer.applyUpdate(fakeUpdate, emptyIndex, updateHelper); } - final QueryTable resultTable = - new QueryTable(index, analyzer.getPublishedColumnSources()); + final QueryTable resultTable = new QueryTable(index, analyzer.getPublishedColumnSources()); if (isRefreshing()) { analyzer.startTrackingPrev(); final Map effects = analyzer.calcEffects(); final SelectOrUpdateListener soul = - new SelectOrUpdateListener(updateDescription, this, resultTable, - effects, analyzer); + new SelectOrUpdateListener(updateDescription, this, resultTable, + effects, analyzer); listenForUpdates(soul); } else { propagateGrouping(selectColumns, resultTable); - for (final ColumnSource columnSource : analyzer.getNewColumnSources() - .values()) { + for (final ColumnSource columnSource : analyzer.getNewColumnSources().values()) { ((SparseArrayColumnSource) columnSource).setImmutable(); } } @@ -1367,31 +1283,26 @@ private void propagateGrouping(SelectColumn[] selectColumns, QueryTable resultTa sourceColumn = (SourceColumn) selectColumn; } if (sourceColumn != null && !usedOutputColumns.contains(sourceColumn.getSourceName())) { - final ColumnSource originalColumnSource = - getColumnSource(sourceColumn.getSourceName()); - final ColumnSource selectedColumnSource = - resultTable.getColumnSource(sourceColumn.getName()); + final ColumnSource originalColumnSource = getColumnSource(sourceColumn.getSourceName()); + final ColumnSource selectedColumnSource = resultTable.getColumnSource(sourceColumn.getName()); if (originalColumnSource != selectedColumnSource) { if (originalColumnSource instanceof DeferredGroupingColumnSource) { final DeferredGroupingColumnSource deferredGroupingSelectedSource = - (DeferredGroupingColumnSource) selectedColumnSource; + (DeferredGroupingColumnSource) selectedColumnSource; final GroupingProvider groupingProvider = - ((DeferredGroupingColumnSource) originalColumnSource) - .getGroupingProvider(); + ((DeferredGroupingColumnSource) originalColumnSource).getGroupingProvider(); if (groupingProvider != null) { // noinspection unchecked deferredGroupingSelectedSource.setGroupingProvider(groupingProvider); } else if (originalColumnSource.getGroupToRange() != null) { // noinspection unchecked - deferredGroupingSelectedSource - .setGroupToRange(originalColumnSource.getGroupToRange()); + deferredGroupingSelectedSource.setGroupToRange(originalColumnSource.getGroupToRange()); } } else if (originalColumnSource.getGroupToRange() != null) { final DeferredGroupingColumnSource deferredGroupingSelectedSource = - (DeferredGroupingColumnSource) selectedColumnSource; + (DeferredGroupingColumnSource) selectedColumnSource; // noinspection unchecked - deferredGroupingSelectedSource - .setGroupToRange(originalColumnSource.getGroupToRange()); + deferredGroupingSelectedSource.setGroupToRange(originalColumnSource.getGroupToRange()); } else if (index.hasGrouping(originalColumnSource)) { index.copyImmutableGroupings(originalColumnSource, selectedColumnSource); } @@ -1428,58 +1339,54 @@ private Table viewOrUpdateView(Flavor flavor, final SelectColumn... viewColumns) final String humanReadablePrefix = flavor.toString(); // Assuming that the description is human-readable, we make it once here and use it twice. - final String updateDescription = - humanReadablePrefix + '(' + selectColumnString(viewColumns) + ')'; + final String updateDescription = humanReadablePrefix + '(' + selectColumnString(viewColumns) + ')'; return memoizeResult(MemoizedOperationKey.selectUpdateViewOrUpdateView(viewColumns, flavor), - () -> QueryPerformanceRecorder.withNugget( - updateDescription, sizeForInstrumentation(), () -> { - final Mutable
    result = new MutableObject<>(); - - final ShiftAwareSwapListener swapListener = - createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); - initializeWithSnapshot(humanReadablePrefix, swapListener, - (usePrev, beforeClockValue) -> { - final boolean publishTheseSources = flavor == Flavor.UpdateView; - final SelectAndViewAnalyzer analyzer = - SelectAndViewAnalyzer.create(SelectAndViewAnalyzer.Mode.VIEW_EAGER, - columns, index, modifiedColumnSet, publishTheseSources, - viewColumns); - final QueryTable queryTable = - new QueryTable(index, analyzer.getPublishedColumnSources()); - if (swapListener != null) { - final Map effects = analyzer.calcEffects(); - final ShiftAwareListener listener = new ViewOrUpdateViewListener( - updateDescription, this, queryTable, effects); - swapListener.setListenerAndResult(listener, queryTable); - queryTable.addParentReference(swapListener); - } + () -> QueryPerformanceRecorder.withNugget( + updateDescription, sizeForInstrumentation(), () -> { + final Mutable
    result = new MutableObject<>(); + + final ShiftAwareSwapListener swapListener = + createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + initializeWithSnapshot(humanReadablePrefix, swapListener, (usePrev, beforeClockValue) -> { + final boolean publishTheseSources = flavor == Flavor.UpdateView; + final SelectAndViewAnalyzer analyzer = + SelectAndViewAnalyzer.create(SelectAndViewAnalyzer.Mode.VIEW_EAGER, + columns, index, modifiedColumnSet, publishTheseSources, viewColumns); + final QueryTable queryTable = + new QueryTable(index, analyzer.getPublishedColumnSources()); + if (swapListener != null) { + final Map effects = analyzer.calcEffects(); + final ShiftAwareListener listener = + new ViewOrUpdateViewListener(updateDescription, this, queryTable, effects); + swapListener.setListenerAndResult(listener, queryTable); + queryTable.addParentReference(swapListener); + } - propagateFlatness(queryTable); + propagateFlatness(queryTable); - copyAttributes(queryTable, - flavor == Flavor.UpdateView ? CopyAttributeOperation.UpdateView - : CopyAttributeOperation.View); - maybeUpdateSortableColumns(queryTable, viewColumns); - if (publishTheseSources) { - maybeCopyColumnDescriptions(queryTable, viewColumns); - } else { - maybeCopyColumnDescriptions(queryTable); - } + copyAttributes(queryTable, + flavor == Flavor.UpdateView ? CopyAttributeOperation.UpdateView + : CopyAttributeOperation.View); + maybeUpdateSortableColumns(queryTable, viewColumns); + if (publishTheseSources) { + maybeCopyColumnDescriptions(queryTable, viewColumns); + } else { + maybeCopyColumnDescriptions(queryTable); + } - result.setValue(queryTable); + result.setValue(queryTable); - return true; - }); + return true; + }); - return result.getValue(); - })); + return result.getValue(); + })); } /** - * A Shift-Aware listener for {Update,}View. It uses the LayeredColumnReferences class to - * calculate how columns affect other columns, then creates a column set transformer which will - * be used by onUpdate to transform updates. + * A Shift-Aware listener for {Update,}View. It uses the LayeredColumnReferences class to calculate how columns + * affect other columns, then creates a column set transformer which will be used by onUpdate to transform updates. */ private static class ViewOrUpdateViewListener extends ShiftAwareListenerImpl { private final QueryTable dependent; @@ -1492,7 +1399,7 @@ private static class ViewOrUpdateViewListener extends ShiftAwareListenerImpl { * @param effects A map from a column name to the column names that it affects */ ViewOrUpdateViewListener(String description, QueryTable parent, QueryTable dependent, - Map effects) { + Map effects) { super(description, parent, dependent); this.dependent = dependent; @@ -1518,9 +1425,8 @@ public void onUpdate(final Update upstream) { } /** - * A Shift-Aware listener for Select or Update. It uses the SelectAndViewAnalyzer to calculate - * how columns affect other columns, then creates a column set transformer which will be used by - * onUpdate to transform updates. + * A Shift-Aware listener for Select or Update. It uses the SelectAndViewAnalyzer to calculate how columns affect + * other columns, then creates a column set transformer which will be used by onUpdate to transform updates. */ private static class SelectOrUpdateListener extends ShiftAwareListenerImpl { private final QueryTable dependent; @@ -1534,8 +1440,8 @@ private static class SelectOrUpdateListener extends ShiftAwareListenerImpl { * @param effects A map from a column name to the column names that it affects */ SelectOrUpdateListener(String description, QueryTable parent, QueryTable dependent, - Map effects, - SelectAndViewAnalyzer analyzer) { + Map effects, + SelectAndViewAnalyzer analyzer) { super(description, parent, dependent); this.dependent = dependent; @@ -1556,19 +1462,17 @@ private static class SelectOrUpdateListener extends ShiftAwareListenerImpl { public void onUpdate(final Update upstream) { // Attempt to minimize work by sharing computation across all columns: // - clear only the keys that no longer exist - // - create parallel arrays of pre-shift-keys and post-shift-keys so we can move them in - // chunks + // - create parallel arrays of pre-shift-keys and post-shift-keys so we can move them in chunks try (final Index toClear = dependent.index.getPrevIndex(); - final SelectAndViewAnalyzer.UpdateHelper updateHelper = - new SelectAndViewAnalyzer.UpdateHelper(dependent.index, upstream)) { + final SelectAndViewAnalyzer.UpdateHelper updateHelper = + new SelectAndViewAnalyzer.UpdateHelper(dependent.index, upstream)) { toClear.remove(dependent.index); analyzer.applyUpdate(upstream, toClear, updateHelper); final Update downstream = upstream.copy(); downstream.modifiedColumnSet = dependent.modifiedColumnSet; - transformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); + transformer.clearAndTransform(upstream.modifiedColumnSet, downstream.modifiedColumnSet); dependent.notifyListeners(downstream); } } @@ -1576,43 +1480,38 @@ public void onUpdate(final Update upstream) { @Override public Table lazyUpdate(SelectColumn... selectColumns) { - return QueryPerformanceRecorder.withNugget( - "lazyUpdate(" + selectColumnString(selectColumns) + ")", sizeForInstrumentation(), - () -> { - checkInitiateOperation(); - - final SelectAndViewAnalyzer analyzer = - SelectAndViewAnalyzer.create(SelectAndViewAnalyzer.Mode.VIEW_LAZY, - columns, index, modifiedColumnSet, true, selectColumns); - final QueryTable result = - new QueryTable(index, analyzer.getPublishedColumnSources()); - if (isRefreshing()) { - listenForUpdates(new ShiftAwareListenerImpl( - "lazyUpdate(" + Arrays.deepToString(selectColumns) + ')', this, result)); - } - propagateFlatness(result); - copyAttributes(result, CopyAttributeOperation.UpdateView); - maybeUpdateSortableColumns(result, selectColumns); - maybeCopyColumnDescriptions(result, selectColumns); + return QueryPerformanceRecorder.withNugget("lazyUpdate(" + selectColumnString(selectColumns) + ")", + sizeForInstrumentation(), () -> { + checkInitiateOperation(); - return result; - }); + final SelectAndViewAnalyzer analyzer = + SelectAndViewAnalyzer.create(SelectAndViewAnalyzer.Mode.VIEW_LAZY, + columns, index, modifiedColumnSet, true, selectColumns); + final QueryTable result = new QueryTable(index, analyzer.getPublishedColumnSources()); + if (isRefreshing()) { + listenForUpdates(new ShiftAwareListenerImpl( + "lazyUpdate(" + Arrays.deepToString(selectColumns) + ')', this, result)); + } + propagateFlatness(result); + copyAttributes(result, CopyAttributeOperation.UpdateView); + maybeUpdateSortableColumns(result, selectColumns); + maybeCopyColumnDescriptions(result, selectColumns); + + return result; + }); } @Override public Table dropColumns(String... columnNames) { - return memoizeResult(MemoizedOperationKey.dropColumns(columnNames), - () -> QueryPerformanceRecorder.withNugget( - "dropColumns(" + Arrays.toString(columnNames) + ")", sizeForInstrumentation(), - () -> { + return memoizeResult(MemoizedOperationKey.dropColumns(columnNames), () -> QueryPerformanceRecorder + .withNugget("dropColumns(" + Arrays.toString(columnNames) + ")", sizeForInstrumentation(), () -> { final Mutable
    result = new MutableObject<>(); final Set existingColumns = new HashSet<>(definition.getColumnNames()); final Set columnNamesToDrop = new HashSet<>(Arrays.asList(columnNames)); if (!existingColumns.containsAll(columnNamesToDrop)) { columnNamesToDrop.removeAll(existingColumns); - throw new RuntimeException( - "Unknown columns: " + columnNamesToDrop.toString() + throw new RuntimeException("Unknown columns: " + columnNamesToDrop.toString() + ", available columns = " + getColumnSourceMap().keySet()); } final Map newColumns = new LinkedHashMap<>(columns); @@ -1621,52 +1520,48 @@ public Table dropColumns(String... columnNames) { } final ShiftAwareSwapListener swapListener = - createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); - initializeWithSnapshot("dropColumns", swapListener, - (usePrev, beforeClockValue) -> { - final QueryTable resultTable = new QueryTable(index, newColumns); - propagateFlatness(resultTable); + initializeWithSnapshot("dropColumns", swapListener, (usePrev, beforeClockValue) -> { + final QueryTable resultTable = new QueryTable(index, newColumns); + propagateFlatness(resultTable); - copyAttributes(resultTable, CopyAttributeOperation.DropColumns); - maybeUpdateSortableColumns(resultTable); - maybeCopyColumnDescriptions(resultTable); + copyAttributes(resultTable, CopyAttributeOperation.DropColumns); + maybeUpdateSortableColumns(resultTable); + maybeCopyColumnDescriptions(resultTable); - if (swapListener != null) { - final ModifiedColumnSet.Transformer mcsTransformer = + if (swapListener != null) { + final ModifiedColumnSet.Transformer mcsTransformer = newModifiedColumnSetTransformer(resultTable, - resultTable.getColumnSourceMap().keySet() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); - final ShiftAwareListenerImpl listener = new ShiftAwareListenerImpl( - "dropColumns(" + Arrays.deepToString(columnNames) + ')', this, - resultTable) { - @Override - public void onUpdate(final Update upstream) { - final Update downstream = upstream.copy(); - mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, + resultTable.getColumnSourceMap().keySet() + .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + final ShiftAwareListenerImpl listener = new ShiftAwareListenerImpl( + "dropColumns(" + Arrays.deepToString(columnNames) + ')', this, resultTable) { + @Override + public void onUpdate(final Update upstream) { + final Update downstream = upstream.copy(); + mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, resultTable.modifiedColumnSet); - if (upstream.modified.empty() - || resultTable.modifiedColumnSet.empty()) { - downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; - if (downstream.modified.nonempty()) { - downstream.modified.close(); - downstream.modified = Index.FACTORY.getEmptyIndex(); - } - } else { - downstream.modifiedColumnSet = - resultTable.modifiedColumnSet; + if (upstream.modified.empty() || resultTable.modifiedColumnSet.empty()) { + downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; + if (downstream.modified.nonempty()) { + downstream.modified.close(); + downstream.modified = Index.FACTORY.getEmptyIndex(); } - resultTable.notifyListeners(downstream); + } else { + downstream.modifiedColumnSet = resultTable.modifiedColumnSet; } - }; - swapListener.setListenerAndResult(listener, resultTable); - resultTable.addParentReference(swapListener); - } + resultTable.notifyListeners(downstream); + } + }; + swapListener.setListenerAndResult(listener, resultTable); + resultTable.addParentReference(swapListener); + } - result.setValue(resultTable); + result.setValue(resultTable); - return true; - }); + return true; + }); return result.getValue(); })); @@ -1675,103 +1570,100 @@ public void onUpdate(final Update upstream) { @Override public Table renameColumns(MatchPair... pairs) { return QueryPerformanceRecorder.withNugget("renameColumns(" + matchString(pairs) + ")", - sizeForInstrumentation(), () -> { - if (pairs == null || pairs.length == 0) { - if (isRefreshing()) { - manageWithCurrentScope(); + sizeForInstrumentation(), () -> { + if (pairs == null || pairs.length == 0) { + if (isRefreshing()) { + manageWithCurrentScope(); + } + return this; } - return this; - } - checkInitiateOperation(); + checkInitiateOperation(); - Map pairLookup = new HashMap<>(); - for (MatchPair pair : pairs) { - if (pair.leftColumn == null || pair.leftColumn.equals("")) { - throw new IllegalArgumentException( - "Bad left column in rename pair \"" + pair.toString() + "\""); - } - if (null == columns.get(pair.rightColumn)) { - throw new IllegalArgumentException( - "Column \"" + pair.rightColumn + "\" not found"); + Map pairLookup = new HashMap<>(); + for (MatchPair pair : pairs) { + if (pair.leftColumn == null || pair.leftColumn.equals("")) { + throw new IllegalArgumentException( + "Bad left column in rename pair \"" + pair.toString() + "\""); + } + if (null == columns.get(pair.rightColumn)) { + throw new IllegalArgumentException("Column \"" + pair.rightColumn + "\" not found"); + } + pairLookup.put(pair.rightColumn, pair.leftColumn); } - pairLookup.put(pair.rightColumn, pair.leftColumn); - } - int mcsPairIdx = 0; - final MatchPair[] modifiedColumnSetPairs = new MatchPair[columns.size()]; + int mcsPairIdx = 0; + final MatchPair[] modifiedColumnSetPairs = new MatchPair[columns.size()]; - Map newColumns = new LinkedHashMap<>(); - for (Map.Entry entry : columns.entrySet()) { - String oldName = entry.getKey(); - ColumnSource columnSource = entry.getValue(); - String newName = pairLookup.get(oldName); - if (newName == null) { - newName = oldName; + Map newColumns = new LinkedHashMap<>(); + for (Map.Entry entry : columns.entrySet()) { + String oldName = entry.getKey(); + ColumnSource columnSource = entry.getValue(); + String newName = pairLookup.get(oldName); + if (newName == null) { + newName = oldName; + } + modifiedColumnSetPairs[mcsPairIdx++] = new MatchPair(newName, oldName); + newColumns.put(newName, columnSource); } - modifiedColumnSetPairs[mcsPairIdx++] = new MatchPair(newName, oldName); - newColumns.put(newName, columnSource); - } - final QueryTable queryTable = new QueryTable(index, newColumns); - if (isRefreshing()) { - final ModifiedColumnSet.Transformer mcsTransformer = - newModifiedColumnSetTransformer(queryTable, modifiedColumnSetPairs); - listenForUpdates(new ShiftAwareListenerImpl( - "renameColumns(" + Arrays.deepToString(pairs) + ')', this, queryTable) { - @Override - public void onUpdate(final Update upstream) { - final Update downstream = upstream.copy(); - downstream.modifiedColumnSet = queryTable.modifiedColumnSet; - if (upstream.modified.nonempty()) { - mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); - } else { - downstream.modifiedColumnSet.clear(); + final QueryTable queryTable = new QueryTable(index, newColumns); + if (isRefreshing()) { + final ModifiedColumnSet.Transformer mcsTransformer = + newModifiedColumnSetTransformer(queryTable, modifiedColumnSetPairs); + listenForUpdates(new ShiftAwareListenerImpl("renameColumns(" + Arrays.deepToString(pairs) + ')', + this, queryTable) { + @Override + public void onUpdate(final Update upstream) { + final Update downstream = upstream.copy(); + downstream.modifiedColumnSet = queryTable.modifiedColumnSet; + if (upstream.modified.nonempty()) { + mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, + downstream.modifiedColumnSet); + } else { + downstream.modifiedColumnSet.clear(); + } + queryTable.notifyListeners(downstream); } - queryTable.notifyListeners(downstream); - } - }); - } - propagateFlatness(queryTable); + }); + } + propagateFlatness(queryTable); - maybeUpdateSortableColumns(queryTable, pairs); - maybeCopyColumnDescriptions(queryTable, pairs); + maybeUpdateSortableColumns(queryTable, pairs); + maybeCopyColumnDescriptions(queryTable, pairs); - return queryTable; - }); + return queryTable; + }); } @Override - public Table aj(final Table rightTable, final MatchPair[] columnsToMatch, - final MatchPair[] columnsToAdd, AsOfMatchRule asOfMatchRule) { + public Table aj(final Table rightTable, final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, + AsOfMatchRule asOfMatchRule) { if (rightTable == null) { throw new IllegalArgumentException("aj() requires a non-null right hand side table."); } final Table rightTableCoalesced = rightTable.coalesce(); return QueryPerformanceRecorder.withNugget( - "aj(" + "rightTable, " + matchString(columnsToMatch) + ", " + matchString(columnsToAdd) - + ")", - () -> ajInternal(rightTableCoalesced, columnsToMatch, columnsToAdd, - SortingOrder.Ascending, asOfMatchRule)); + "aj(" + "rightTable, " + matchString(columnsToMatch) + ", " + matchString(columnsToAdd) + ")", + () -> ajInternal(rightTableCoalesced, columnsToMatch, columnsToAdd, SortingOrder.Ascending, + asOfMatchRule)); } @Override - public Table raj(final Table rightTable, final MatchPair[] columnsToMatch, - final MatchPair[] columnsToAdd, AsOfMatchRule asOfMatchRule) { + public Table raj(final Table rightTable, final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, + AsOfMatchRule asOfMatchRule) { if (rightTable == null) { throw new IllegalArgumentException("raj() requires a non-null right hand side table."); } final Table rightTableCoalesced = rightTable.coalesce(); return QueryPerformanceRecorder.withNugget( - "raj(" + "rightTable, " + matchString(columnsToMatch) + ", " + matchString(columnsToAdd) - + ")", - () -> ajInternal(rightTableCoalesced.reverse(), columnsToMatch, columnsToAdd, - SortingOrder.Descending, asOfMatchRule)); + "raj(" + "rightTable, " + matchString(columnsToMatch) + ", " + matchString(columnsToAdd) + ")", + () -> ajInternal(rightTableCoalesced.reverse(), columnsToMatch, columnsToAdd, SortingOrder.Descending, + asOfMatchRule)); } private Table ajInternal(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - final SortingOrder order, AsOfMatchRule asOfMatchRule) { + final SortingOrder order, AsOfMatchRule asOfMatchRule) { if (rightTable == null) { throw new IllegalArgumentException("aj() requires a non-null right hand side table."); } @@ -1803,29 +1695,25 @@ private Table ajInternal(Table rightTable, MatchPair[] columnsToMatch, MatchPair switch (asOfMatchRule) { case LESS_THAN: if (order != SortingOrder.Ascending) { - throw new IllegalArgumentException( - "Invalid as of match rule for raj: " + asOfMatchRule); + throw new IllegalArgumentException("Invalid as of match rule for raj: " + asOfMatchRule); } disallowExactMatch = true; break; case LESS_THAN_EQUAL: if (order != SortingOrder.Ascending) { - throw new IllegalArgumentException( - "Invalid as of match rule for raj: " + asOfMatchRule); + throw new IllegalArgumentException("Invalid as of match rule for raj: " + asOfMatchRule); } disallowExactMatch = false; break; case GREATER_THAN: if (order != SortingOrder.Descending) { - throw new IllegalArgumentException( - "Invalid as of match rule for aj: " + asOfMatchRule); + throw new IllegalArgumentException("Invalid as of match rule for aj: " + asOfMatchRule); } disallowExactMatch = true; break; case GREATER_THAN_EQUAL: if (order != SortingOrder.Descending) { - throw new IllegalArgumentException( - "Invalid as of match rule for aj: " + asOfMatchRule); + throw new IllegalArgumentException("Invalid as of match rule for aj: " + asOfMatchRule); } disallowExactMatch = false; break; @@ -1833,37 +1721,34 @@ private Table ajInternal(Table rightTable, MatchPair[] columnsToMatch, MatchPair throw new UnsupportedOperationException(); } - return AsOfJoinHelper.asOfJoin(this, (QueryTable) rightTable, columnsToMatch, columnsToAdd, - order, disallowExactMatch); + return AsOfJoinHelper.asOfJoin(this, (QueryTable) rightTable, columnsToMatch, columnsToAdd, order, + disallowExactMatch); } @Override - public Table naturalJoin(final Table rightTable, final MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd) { + public Table naturalJoin(final Table rightTable, final MatchPair[] columnsToMatch, MatchPair[] columnsToAdd) { return QueryPerformanceRecorder.withNugget( - "naturalJoin(" + matchString(columnsToMatch) + ", " + matchString(columnsToAdd) + ")", - () -> naturalJoinInternal(rightTable, columnsToMatch, columnsToAdd, false)); + "naturalJoin(" + matchString(columnsToMatch) + ", " + matchString(columnsToAdd) + ")", + () -> naturalJoinInternal(rightTable, columnsToMatch, columnsToAdd, false)); } private Table naturalJoinInternal(final Table rightTable, final MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, boolean exactMatch) { + MatchPair[] columnsToAdd, boolean exactMatch) { columnsToAdd = createColumnsToAddIfMissing(rightTable, columnsToMatch, columnsToAdd); final QueryTable rightTableCoalesced = (QueryTable) rightTable.coalesce(); - return NaturalJoinHelper.naturalJoin(this, rightTableCoalesced, columnsToMatch, - columnsToAdd, exactMatch); + return NaturalJoinHelper.naturalJoin(this, rightTableCoalesced, columnsToMatch, columnsToAdd, exactMatch); } private MatchPair[] createColumnsToAddIfMissing(Table rightTable, MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd) { + MatchPair[] columnsToAdd) { if (columnsToAdd.length == 0) { - final Set matchColumns = - Arrays.stream(columnsToMatch).map(matchPair -> matchPair.leftColumn) + final Set matchColumns = Arrays.stream(columnsToMatch).map(matchPair -> matchPair.leftColumn) .collect(Collectors.toCollection(HashSet::new)); final List columnNames = rightTable.getDefinition().getColumnNames(); return columnNames.stream().filter((name) -> !matchColumns.contains(name)) - .map(name -> new MatchPair(name, name)).toArray(MatchPair[]::new); + .map(name -> new MatchPair(name, name)).toArray(MatchPair[]::new); } return columnsToAdd; } @@ -1871,8 +1756,8 @@ private MatchPair[] createColumnsToAddIfMissing(Table rightTable, MatchPair[] co private static String selectColumnString(final SelectColumn[] selectColumns) { final StringBuilder result = new StringBuilder(); result.append('['); - final Iterable scs = Arrays.stream(selectColumns).map(SelectColumn::getName) - .filter(name -> name.length() > 0)::iterator; + final Iterable scs = + Arrays.stream(selectColumns).map(SelectColumn::getName).filter(name -> name.length() > 0)::iterator; IterableUtils.appendCommaSeparatedList(result, scs); result.append("]"); return result.toString(); @@ -1883,33 +1768,31 @@ static void startTrackingPrev(Collection values) { } @Override - public Table join(final Table rightTableCandidate, MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, int numRightBitsToReserve) { + public Table join(final Table rightTableCandidate, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, + int numRightBitsToReserve) { return memoizeResult( - MemoizedOperationKey.crossJoin(rightTableCandidate, columnsToMatch, columnsToAdd, - numRightBitsToReserve), - () -> joinNoMemo(rightTableCandidate, columnsToMatch, columnsToAdd, - numRightBitsToReserve)); + MemoizedOperationKey.crossJoin(rightTableCandidate, columnsToMatch, columnsToAdd, + numRightBitsToReserve), + () -> joinNoMemo(rightTableCandidate, columnsToMatch, columnsToAdd, numRightBitsToReserve)); } - private Table joinNoMemo(final Table rightTableCandidate, MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd, int numRightBitsToReserve) { + private Table joinNoMemo(final Table rightTableCandidate, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, + int numRightBitsToReserve) { final MatchPair[] realColumnsToAdd; if (columnsToAdd.length == 0) { Set columnsForMatching = - Arrays.stream(columnsToMatch).filter(mp -> mp.rightColumn.equals(mp.leftColumn)) - .map(x -> x.rightColumn).collect(Collectors.toCollection(HashSet::new)); + Arrays.stream(columnsToMatch).filter(mp -> mp.rightColumn.equals(mp.leftColumn)) + .map(x -> x.rightColumn).collect(Collectors.toCollection(HashSet::new)); Set rightColumnNames; try { rightColumnNames = rightTableCandidate.getColumnSourceMap().keySet(); } catch (UnsupportedOperationException uoe) { - throw new UnsupportedOperationException( - "Can not join a V2 table to a V1 table on the right side.", uoe); + throw new UnsupportedOperationException("Can not join a V2 table to a V1 table on the right side.", + uoe); } - realColumnsToAdd = - rightColumnNames.stream().filter(x -> !columnsForMatching.contains(x)) + realColumnsToAdd = rightColumnNames.stream().filter(x -> !columnsForMatching.contains(x)) .map(x -> new MatchPair(x, x)).toArray(MatchPair[]::new); } else { realColumnsToAdd = columnsToAdd; @@ -1918,14 +1801,14 @@ private Table joinNoMemo(final Table rightTableCandidate, MatchPair[] columnsToM if (USE_CHUNKED_CROSS_JOIN) { final QueryTable coalescedRightTable = (QueryTable) rightTableCandidate.coalesce(); return QueryPerformanceRecorder.withNugget( - "join(" + matchString(columnsToMatch) + ", " + matchString(realColumnsToAdd) + ", " - + numRightBitsToReserve + ")", - () -> CrossJoinHelper.join(this, coalescedRightTable, columnsToMatch, - realColumnsToAdd, numRightBitsToReserve)); + "join(" + matchString(columnsToMatch) + ", " + matchString(realColumnsToAdd) + ", " + + numRightBitsToReserve + ")", + () -> CrossJoinHelper.join(this, coalescedRightTable, columnsToMatch, realColumnsToAdd, + numRightBitsToReserve)); } - final Set columnsToMatchSet = Arrays.stream(columnsToMatch).map(MatchPair::right) - .collect(Collectors.toCollection(HashSet::new)); + final Set columnsToMatchSet = + Arrays.stream(columnsToMatch).map(MatchPair::right).collect(Collectors.toCollection(HashSet::new)); final LinkedHashSet columnsToAddSelectColumns = new LinkedHashSet<>(); final List columnsToUngroupBy = new ArrayList<>(); @@ -1934,69 +1817,61 @@ private Table joinNoMemo(final Table rightTableCandidate, MatchPair[] columnsToM rightColumnsToMatch[i] = columnsToMatch[i].rightColumn; columnsToAddSelectColumns.add(new SourceColumn(columnsToMatch[i].rightColumn)); } - final ArrayList columnsToAddAfterRename = - new ArrayList<>(realColumnsToAdd.length); + final ArrayList columnsToAddAfterRename = new ArrayList<>(realColumnsToAdd.length); for (MatchPair matchPair : realColumnsToAdd) { columnsToAddAfterRename.add(new MatchPair(matchPair.leftColumn, matchPair.leftColumn)); if (!columnsToMatchSet.contains(matchPair.leftColumn)) { columnsToUngroupBy.add(matchPair.leftColumn); } - columnsToAddSelectColumns - .add(new SourceColumn(matchPair.rightColumn, matchPair.leftColumn)); + columnsToAddSelectColumns.add(new SourceColumn(matchPair.rightColumn, matchPair.leftColumn)); } - return QueryPerformanceRecorder.withNugget( - "join(" + matchString(columnsToMatch) + ", " + matchString(realColumnsToAdd) + ")", - () -> { - boolean sentinelAdded = false; - final Table rightTable; - if (columnsToUngroupBy.isEmpty()) { - rightTable = rightTableCandidate.updateView("__sentinel__=null"); - columnsToUngroupBy.add("__sentinel__"); - columnsToAddSelectColumns.add(new SourceColumn("__sentinel__")); - columnsToAddAfterRename.add(new MatchPair("__sentinel__", "__sentinel__")); - sentinelAdded = true; - } else { - rightTable = rightTableCandidate; - } + return QueryPerformanceRecorder + .withNugget("join(" + matchString(columnsToMatch) + ", " + matchString(realColumnsToAdd) + ")", () -> { + boolean sentinelAdded = false; + final Table rightTable; + if (columnsToUngroupBy.isEmpty()) { + rightTable = rightTableCandidate.updateView("__sentinel__=null"); + columnsToUngroupBy.add("__sentinel__"); + columnsToAddSelectColumns.add(new SourceColumn("__sentinel__")); + columnsToAddAfterRename.add(new MatchPair("__sentinel__", "__sentinel__")); + sentinelAdded = true; + } else { + rightTable = rightTableCandidate; + } - final Table rightGrouped = - rightTable.by(rightColumnsToMatch).view(columnsToAddSelectColumns - .toArray(SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY)); - final Table naturalJoinResult = naturalJoin(rightGrouped, columnsToMatch, - columnsToAddAfterRename.toArray(MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY)); - final Table ungroupedResult = naturalJoinResult - .ungroup(columnsToUngroupBy.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + final Table rightGrouped = rightTable.by(rightColumnsToMatch) + .view(columnsToAddSelectColumns.toArray(SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY)); + final Table naturalJoinResult = naturalJoin(rightGrouped, columnsToMatch, + columnsToAddAfterRename.toArray(MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY)); + final Table ungroupedResult = naturalJoinResult + .ungroup(columnsToUngroupBy.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); - maybeCopyColumnDescriptions(ungroupedResult, rightTable, columnsToMatch, - realColumnsToAdd); + maybeCopyColumnDescriptions(ungroupedResult, rightTable, columnsToMatch, realColumnsToAdd); - return sentinelAdded ? ungroupedResult.dropColumns("__sentinel__") - : ungroupedResult; - }); + return sentinelAdded ? ungroupedResult.dropColumns("__sentinel__") : ungroupedResult; + }); } /** - * The leftColumns are the column sources for the snapshot-triggering table. The rightColumns - * are the column sources for the table being snapshotted. The leftIndex refers to snapshots - * that we want to take. Typically this index is expected to have size 1, but in some cases it - * could be larger. The rightIndex refers to the index of the current table. Therefore we want - * to take leftIndex.size() snapshots, each of which being rightIndex.size() in size. + * The leftColumns are the column sources for the snapshot-triggering table. The rightColumns are the column sources + * for the table being snapshotted. The leftIndex refers to snapshots that we want to take. Typically this index is + * expected to have size 1, but in some cases it could be larger. The rightIndex refers to the index of the current + * table. Therefore we want to take leftIndex.size() snapshots, each of which being rightIndex.size() in size. * * @param leftColumns Columns making up the triggering data * @param leftIndex The currently triggering rows * @param rightColumns Columns making up the data being snapshotted * @param rightIndex The data to snapshot - * @param dest The ColumnSources in which to store the data. The keys are drawn from - * leftColumns.keys() and\ rightColumns.keys() + * @param dest The ColumnSources in which to store the data. The keys are drawn from leftColumns.keys() and\ + * rightColumns.keys() * @param destOffset The offset in the 'dest' ColumnSources at which to write data - * @return The new dest ColumnSource size, calculated as - * {@code destOffset + leftIndex.size() * rightIndex.size()} + * @return The new dest ColumnSource size, calculated as {@code destOffset + leftIndex.size() * rightIndex.size()} */ private static long snapshotHistoryInternal( - @NotNull Map leftColumns, @NotNull Index leftIndex, - @NotNull Map rightColumns, @NotNull Index rightIndex, - @NotNull Map dest, long destOffset) { + @NotNull Map leftColumns, @NotNull Index leftIndex, + @NotNull Map rightColumns, @NotNull Index rightIndex, + @NotNull Map dest, long destOffset) { assert leftColumns.size() + rightColumns.size() == dest.size(); if (leftIndex.empty() || rightIndex.empty()) { // Nothing to do. @@ -2015,8 +1890,7 @@ private static long snapshotHistoryInternal( leftIndex.forAllLongs(snapshotKey -> { final long doff = destOffsetHolder[0]; destOffsetHolder[0] += rightSize; - try ( - final Index destIndex = Index.FACTORY.getIndexByRange(doff, doff + rightSize - 1)) { + try (final Index destIndex = Index.FACTORY.getIndexByRange(doff, doff + rightSize - 1)) { SnapshotUtils.copyStampColumns(leftColumns, snapshotKey, dest, destIndex); SnapshotUtils.copyDataColumns(rightColumns, rightIndex, dest, destIndex, false); } @@ -2025,71 +1899,60 @@ private static long snapshotHistoryInternal( } public Table snapshotHistory(final Table rightTable) { - return QueryPerformanceRecorder.withNugget("snapshotHistory", - rightTable.sizeForInstrumentation(), () -> { - checkInitiateOperation(); - - // resultColumns initially contains the left columns, then we insert the right - // columns into it - final Map resultColumns = - SnapshotUtils.createColumnSourceMap( - this.getColumnSourceMap(), - ArrayBackedColumnSource::getMemoryColumnSource); - final Map rightColumns = - SnapshotUtils.createColumnSourceMap( - rightTable.getColumnSourceMap(), - ArrayBackedColumnSource::getMemoryColumnSource); - resultColumns.putAll(rightColumns); - - // BTW, we don't track prev because these items are never modified or removed. - final Table leftTable = this; // For readability. - final long initialSize = - snapshotHistoryInternal(leftTable.getColumnSourceMap(), leftTable.getIndex(), - rightTable.getColumnSourceMap(), rightTable.getIndex(), - resultColumns, 0); - final Index resultIndex = Index.FACTORY.getFlatIndex(initialSize); - final QueryTable result = new QueryTable(resultIndex, resultColumns); - if (isRefreshing()) { - listenForUpdates(new ListenerImpl( - "snapshotHistory" + resultColumns.keySet().toString(), this, result) { - private long lastKey = index.lastKey(); - - @Override - public void onUpdate(final Index added, final Index removed, - final Index modified) { - Assert.assertion(removed.size() == 0, "removed.size() == 0", + return QueryPerformanceRecorder.withNugget("snapshotHistory", rightTable.sizeForInstrumentation(), () -> { + checkInitiateOperation(); + + // resultColumns initially contains the left columns, then we insert the right columns into it + final Map resultColumns = + SnapshotUtils.createColumnSourceMap(this.getColumnSourceMap(), + ArrayBackedColumnSource::getMemoryColumnSource); + final Map rightColumns = + SnapshotUtils.createColumnSourceMap(rightTable.getColumnSourceMap(), + ArrayBackedColumnSource::getMemoryColumnSource); + resultColumns.putAll(rightColumns); + + // BTW, we don't track prev because these items are never modified or removed. + final Table leftTable = this; // For readability. + final long initialSize = snapshotHistoryInternal(leftTable.getColumnSourceMap(), leftTable.getIndex(), + rightTable.getColumnSourceMap(), rightTable.getIndex(), + resultColumns, 0); + final Index resultIndex = Index.FACTORY.getFlatIndex(initialSize); + final QueryTable result = new QueryTable(resultIndex, resultColumns); + if (isRefreshing()) { + listenForUpdates(new ListenerImpl("snapshotHistory" + resultColumns.keySet().toString(), this, result) { + private long lastKey = index.lastKey(); + + @Override + public void onUpdate(final Index added, final Index removed, final Index modified) { + Assert.assertion(removed.size() == 0, "removed.size() == 0", removed, "removed"); - Assert.assertion(modified.size() == 0, "modified.size() == 0", + Assert.assertion(modified.size() == 0, "modified.size() == 0", modified, "modified"); - if (added.size() == 0 || rightTable.size() == 0) { - return; - } - Assert.assertion(added.firstKey() > lastKey, - "added.firstKey() > lastKey", + if (added.size() == 0 || rightTable.size() == 0) { + return; + } + Assert.assertion(added.firstKey() > lastKey, "added.firstKey() > lastKey", lastKey, "lastKey", added, "added"); - final long oldSize = resultIndex.size(); - final long newSize = - snapshotHistoryInternal(leftTable.getColumnSourceMap(), added, - rightTable.getColumnSourceMap(), rightTable.getIndex(), - resultColumns, oldSize); - final Index addedSnapshots = - Index.FACTORY.getIndexByRange(oldSize, newSize - 1); - resultIndex.insert(addedSnapshots); - lastKey = index.lastKey(); - result.notifyListeners(addedSnapshots, Index.FACTORY.getEmptyIndex(), + final long oldSize = resultIndex.size(); + final long newSize = snapshotHistoryInternal(leftTable.getColumnSourceMap(), added, + rightTable.getColumnSourceMap(), rightTable.getIndex(), + resultColumns, oldSize); + final Index addedSnapshots = Index.FACTORY.getIndexByRange(oldSize, newSize - 1); + resultIndex.insert(addedSnapshots); + lastKey = index.lastKey(); + result.notifyListeners(addedSnapshots, Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex()); - } + } - @Override - public boolean canExecute(final long step) { - return ((NotificationQueue.Dependency) rightTable).satisfied(step) - && super.canExecute(step); - } - }); - } - result.setFlat(); - return result; - }); + @Override + public boolean canExecute(final long step) { + return ((NotificationQueue.Dependency) rightTable).satisfied(step) && super.canExecute(step); + } + }); + } + result.setFlat(); + return result; + }); } public Table silent() { @@ -2099,47 +1962,39 @@ public Table silent() { @Override @Deprecated public void addColumnGrouping(String columnName) { - // NB: This used to set the group to range map on the column source, but that's not a safe - // thing to do. + // NB: This used to set the group to range map on the column source, but that's not a safe thing to do. getIndex().getGrouping(getColumnSource(columnName)); } @Override public Table snapshot(Table baseTable, boolean doInitialSnapshot, String... stampColumns) { return QueryPerformanceRecorder.withNugget( - "snapshot(baseTable, " + doInitialSnapshot + ", " + Arrays.toString(stampColumns) + ")", - baseTable.sizeForInstrumentation(), () -> { - - // 'stampColumns' specifies a subset of this table's columns to use, but if - // stampColumns is empty, we get - // a view containing all of the columns (in that case, basically we get this table - // back). - QueryTable viewTable = (QueryTable) view(stampColumns); - // Due to the above logic, we need to pull the actual set of column names back from - // the viewTable. - // Whatever viewTable came back from the above, we do the snapshot - return viewTable.snapshotInternal(baseTable, doInitialSnapshot, - viewTable.getDefinition().getColumnNamesArray()); - }); + "snapshot(baseTable, " + doInitialSnapshot + ", " + Arrays.toString(stampColumns) + ")", + baseTable.sizeForInstrumentation(), () -> { + + // 'stampColumns' specifies a subset of this table's columns to use, but if stampColumns is empty, + // we get + // a view containing all of the columns (in that case, basically we get this table back). + QueryTable viewTable = (QueryTable) view(stampColumns); + // Due to the above logic, we need to pull the actual set of column names back from the viewTable. + // Whatever viewTable came back from the above, we do the snapshot + return viewTable.snapshotInternal(baseTable, doInitialSnapshot, + viewTable.getDefinition().getColumnNamesArray()); + }); } - private Table snapshotInternal(Table tableToSnapshot, boolean doInitialSnapshot, - String... stampColumns) { - // TODO: we would like to make this operation LTM safe, instead of requiring the lock here; - // there are two tables + private Table snapshotInternal(Table tableToSnapshot, boolean doInitialSnapshot, String... stampColumns) { + // TODO: we would like to make this operation LTM safe, instead of requiring the lock here; there are two tables // but we do only need to listen to one of them; however we are dependent on two of them checkInitiateOperation(); - // There are no LazySnapshotTableProviders in the system currently, but they may be used for - // multicast + // There are no LazySnapshotTableProviders in the system currently, but they may be used for multicast // distribution systems and similar integrations. - // If this table provides a lazy snapshot version, we should use that instead for the - // snapshot, this allows us - // to refresh the table only immediately before the snapshot occurs. Because we know that we - // are uninterested - // in things like previous values, it can save a significant amount of CPU to only refresh - // the table when needed. + // If this table provides a lazy snapshot version, we should use that instead for the snapshot, this allows us + // to refresh the table only immediately before the snapshot occurs. Because we know that we are uninterested + // in things like previous values, it can save a significant amount of CPU to only refresh the table when + // needed. final boolean lazySnapshot = tableToSnapshot instanceof LazySnapshotTableProvider; if (lazySnapshot) { tableToSnapshot = ((LazySnapshotTableProvider) tableToSnapshot).getLazySnapshotTable(); @@ -2152,23 +2007,18 @@ private Table snapshotInternal(Table tableToSnapshot, boolean doInitialSnapshot, checkInitiateOperation(tableToSnapshot); } - // Establish the "right" columns using the same names and types as the table being - // snapshotted + // Establish the "right" columns using the same names and types as the table being snapshotted final Map resultRightColumns = - SnapshotUtils.createColumnSourceMap( - tableToSnapshot.getColumnSourceMap(), - ArrayBackedColumnSource::getMemoryColumnSource); - - // Now make the "left" columns (namely, the "snapshot key" columns). Because this flavor of - // "snapshot" only - // keeps a single snapshot, each snapshot key column will have the same value in every row. - // So for efficiency we + SnapshotUtils.createColumnSourceMap( + tableToSnapshot.getColumnSourceMap(), ArrayBackedColumnSource::getMemoryColumnSource); + + // Now make the "left" columns (namely, the "snapshot key" columns). Because this flavor of "snapshot" only + // keeps a single snapshot, each snapshot key column will have the same value in every row. So for efficiency we // use a SingleValueColumnSource for these columns. final Map resultLeftColumns = new LinkedHashMap<>(); for (String stampColumn : stampColumns) { final Class stampColumnType = getColumnSource(stampColumn).getType(); - resultLeftColumns.put(stampColumn, - SingleValueColumnSource.getSingleValueColumnSource(stampColumnType)); + resultLeftColumns.put(stampColumn, SingleValueColumnSource.getSingleValueColumnSource(stampColumnType)); } // make our result table @@ -2180,22 +2030,19 @@ private Table snapshotInternal(Table tableToSnapshot, boolean doInitialSnapshot, final Index resultIndex = Index.FACTORY.getEmptyIndex(); final QueryTable result = new QueryTable(resultIndex, allColumns); - final SnapshotInternalListener listener = - new SnapshotInternalListener(this, lazySnapshot, tableToSnapshot, + final SnapshotInternalListener listener = new SnapshotInternalListener(this, lazySnapshot, tableToSnapshot, result, resultLeftColumns, resultRightColumns, resultIndex); if (doInitialSnapshot) { if (!isRefreshing() && tableToSnapshot.isLive() && !lazySnapshot) { - // if we are making a static copy of the table, we must ensure that it does not - // change out from under us + // if we are making a static copy of the table, we must ensure that it does not change out from under us ConstructSnapshot.callDataSnapshotFunction("snapshotInternal", - ConstructSnapshot.makeSnapshotControl(false, - (NotificationStepSource) tableToSnapshot), - (usePrev, beforeClockUnused) -> { - listener.doSnapshot(false, usePrev); - resultIndex.initializePreviousValue(); - return true; - }); + ConstructSnapshot.makeSnapshotControl(false, (NotificationStepSource) tableToSnapshot), + (usePrev, beforeClockUnused) -> { + listener.doSnapshot(false, usePrev); + resultIndex.initializePreviousValue(); + return true; + }); } else { listener.doSnapshot(false, false); @@ -2214,139 +2061,127 @@ private Table snapshotInternal(Table tableToSnapshot, boolean doInitialSnapshot, @Override public Table snapshotIncremental(final Table tableToSnapshot, final boolean doInitialSnapshot, - final String... stampColumns) { - return QueryPerformanceRecorder.withNugget( - "snapshotIncremental(tableToSnapshot, " + doInitialSnapshot + ", " - + Arrays.toString(stampColumns) + ")", - tableToSnapshot.sizeForInstrumentation(), () -> { - checkInitiateOperation(); - - final QueryTable rightTable = - (QueryTable) (tableToSnapshot instanceof UncoalescedTable - ? tableToSnapshot.coalesce() - : tableToSnapshot); - rightTable.checkInitiateOperation(); - - // Use the given columns (if specified); otherwise an empty array means all of my - // columns - final String[] useStampColumns = stampColumns.length == 0 - ? getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY) - : stampColumns; - - final Map leftColumns = new LinkedHashMap<>(); - for (String stampColumn : useStampColumns) { - final ColumnSource cs = getColumnSource(stampColumn); - leftColumns.put(stampColumn, cs); - } + final String... stampColumns) { + return QueryPerformanceRecorder.withNugget("snapshotIncremental(tableToSnapshot, " + doInitialSnapshot + ", " + + Arrays.toString(stampColumns) + ")", tableToSnapshot.sizeForInstrumentation(), () -> { + checkInitiateOperation(); - final Map resultLeftColumns = - new LinkedHashMap<>(); - for (Map.Entry entry : leftColumns.entrySet()) { - final String name = entry.getKey(); - final ColumnSource cs = entry.getValue(); - final Class type = cs.getType(); - final SparseArrayColumnSource stampDest = - DbArrayBase.class.isAssignableFrom(type) - ? SparseArrayColumnSource.getSparseMemoryColumnSource(type, - cs.getComponentType()) - : SparseArrayColumnSource.getSparseMemoryColumnSource(type); - - resultLeftColumns.put(name, stampDest); - } + final QueryTable rightTable = + (QueryTable) (tableToSnapshot instanceof UncoalescedTable ? tableToSnapshot.coalesce() + : tableToSnapshot); + rightTable.checkInitiateOperation(); - final Map resultRightColumns = - SnapshotUtils.createColumnSourceMap( - rightTable.getColumnSourceMap(), - SparseArrayColumnSource::getSparseMemoryColumnSource); - - final Map resultColumns = - new LinkedHashMap<>(resultRightColumns); - resultColumns.putAll(resultLeftColumns); - if (resultColumns.size() != resultLeftColumns.size() + resultRightColumns.size()) { - throwColumnConflictMessage(resultLeftColumns.keySet(), - resultRightColumns.keySet()); - } + // Use the given columns (if specified); otherwise an empty array means all of my columns + final String[] useStampColumns = stampColumns.length == 0 + ? getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY) + : stampColumns; - final Index resultIndex = Index.FACTORY.getEmptyIndex(); + final Map leftColumns = new LinkedHashMap<>(); + for (String stampColumn : useStampColumns) { + final ColumnSource cs = getColumnSource(stampColumn); + leftColumns.put(stampColumn, cs); + } - final QueryTable resultTable = new QueryTable(resultIndex, resultColumns); + final Map resultLeftColumns = new LinkedHashMap<>(); + for (Map.Entry entry : leftColumns.entrySet()) { + final String name = entry.getKey(); + final ColumnSource cs = entry.getValue(); + final Class type = cs.getType(); + final SparseArrayColumnSource stampDest = DbArrayBase.class.isAssignableFrom(type) + ? SparseArrayColumnSource.getSparseMemoryColumnSource(type, cs.getComponentType()) + : SparseArrayColumnSource.getSparseMemoryColumnSource(type); - if (isRefreshing() && rightTable.isRefreshing()) { + resultLeftColumns.put(name, stampDest); + } + + final Map resultRightColumns = + SnapshotUtils.createColumnSourceMap( + rightTable.getColumnSourceMap(), + SparseArrayColumnSource::getSparseMemoryColumnSource); - // What's happening here: the left table gets "listener" (some complicated logic - // that has access to the - // coalescer) whereas the right table (above) gets the one-liner above (but - // which also has access to the - // same coalescer). So when the right table sees updates they are simply fed to - // the coalescer. The - // coalescer's job is just to remember what rows have changed. When the *left* - // table gets updates, then - // the SnapshotIncrementalListener gets called, which does all the snapshotting - // work. + final Map resultColumns = new LinkedHashMap<>(resultRightColumns); + resultColumns.putAll(resultLeftColumns); + if (resultColumns.size() != resultLeftColumns.size() + resultRightColumns.size()) { + throwColumnConflictMessage(resultLeftColumns.keySet(), resultRightColumns.keySet()); + } - final ListenerRecorder rightListenerRecorder = new ListenerRecorder( - "snapshotIncremental (rightTable)", rightTable, resultTable); - rightTable.listenForUpdates(rightListenerRecorder); + final Index resultIndex = Index.FACTORY.getEmptyIndex(); - final ListenerRecorder leftListenerRecorder = - new ListenerRecorder("snapshotIncremental (leftTable)", this, resultTable); - listenForUpdates(leftListenerRecorder); + final QueryTable resultTable = new QueryTable(resultIndex, resultColumns); - final SnapshotIncrementalListener listener = - new SnapshotIncrementalListener(this, resultTable, resultColumns, - rightListenerRecorder, leftListenerRecorder, rightTable, leftColumns); + if (isRefreshing() && rightTable.isRefreshing()) { + // What's happening here: the left table gets "listener" (some complicated logic that has access + // to the + // coalescer) whereas the right table (above) gets the one-liner above (but which also has + // access to the + // same coalescer). So when the right table sees updates they are simply fed to the coalescer. + // The + // coalescer's job is just to remember what rows have changed. When the *left* table gets + // updates, then + // the SnapshotIncrementalListener gets called, which does all the snapshotting work. - rightListenerRecorder.setMergedListener(listener); - leftListenerRecorder.setMergedListener(listener); - resultTable.addParentReference(listener); + final ListenerRecorder rightListenerRecorder = + new ListenerRecorder("snapshotIncremental (rightTable)", rightTable, resultTable); + rightTable.listenForUpdates(rightListenerRecorder); - if (doInitialSnapshot) { - listener.doFirstSnapshot(true); - } + final ListenerRecorder leftListenerRecorder = + new ListenerRecorder("snapshotIncremental (leftTable)", this, resultTable); + listenForUpdates(leftListenerRecorder); - startTrackingPrev(resultColumns.values()); - resultTable.getIndex().initializePreviousValue(); - } else if (doInitialSnapshot) { - SnapshotIncrementalListener.copyRowsToResult(rightTable.getIndex(), this, - rightTable, leftColumns, resultColumns); - resultTable.getIndex().insert(rightTable.getIndex()); - resultTable.getIndex().initializePreviousValue(); - } else if (isRefreshing()) { - // we are not doing an initial snapshot, but are refreshing so need to take a - // snapshot of our (static) - // right table on the very first tick of the leftTable - listenForUpdates(new ShiftAwareListenerImpl("snapshotIncremental (leftTable)", - this, resultTable) { - @Override - public void onUpdate(Update upstream) { - SnapshotIncrementalListener.copyRowsToResult(rightTable.getIndex(), - QueryTable.this, rightTable, leftColumns, resultColumns); - resultTable.getIndex().insert(rightTable.getIndex()); - resultTable.notifyListeners(resultTable.getIndex(), - Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex()); - removeUpdateListener(this); + final SnapshotIncrementalListener listener = + new SnapshotIncrementalListener(this, resultTable, resultColumns, + rightListenerRecorder, leftListenerRecorder, rightTable, leftColumns); + + + rightListenerRecorder.setMergedListener(listener); + leftListenerRecorder.setMergedListener(listener); + resultTable.addParentReference(listener); + + if (doInitialSnapshot) { + listener.doFirstSnapshot(true); } - }); - } - return resultTable; - }); + startTrackingPrev(resultColumns.values()); + resultTable.getIndex().initializePreviousValue(); + } else if (doInitialSnapshot) { + SnapshotIncrementalListener.copyRowsToResult(rightTable.getIndex(), this, rightTable, + leftColumns, resultColumns); + resultTable.getIndex().insert(rightTable.getIndex()); + resultTable.getIndex().initializePreviousValue(); + } else if (isRefreshing()) { + // we are not doing an initial snapshot, but are refreshing so need to take a snapshot of our + // (static) + // right table on the very first tick of the leftTable + listenForUpdates( + new ShiftAwareListenerImpl("snapshotIncremental (leftTable)", this, resultTable) { + @Override + public void onUpdate(Update upstream) { + SnapshotIncrementalListener.copyRowsToResult(rightTable.getIndex(), + QueryTable.this, rightTable, leftColumns, resultColumns); + resultTable.getIndex().insert(rightTable.getIndex()); + resultTable.notifyListeners(resultTable.getIndex(), + Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex()); + removeUpdateListener(this); + } + }); + } + + return resultTable; + }); } private static void throwColumnConflictMessage(Set left, Set right) { Iterable conflicts = left.stream().filter(right::contains)::iterator; - throw new RuntimeException( - "Column name conflicts: " + IterableUtils.makeCommaSeparatedList(conflicts)); + throw new RuntimeException("Column name conflicts: " + IterableUtils.makeCommaSeparatedList(conflicts)); } /** - * If we have a column source that is a complex type, but can be reinterpreted or transformed - * into a simpler type, do the transformation, otherwise return the original column source. + * If we have a column source that is a complex type, but can be reinterpreted or transformed into a simpler type, + * do the transformation, otherwise return the original column source. * * @param columnSource the column source to possibly reinterpret - * @return the transformed column source, or the original column source if there is not a - * relevant transformation + * @return the transformed column source, or the original column source if there is not a relevant transformation */ static ColumnSource maybeTransformToPrimitive(final ColumnSource columnSource) { if (DBDateTime.class.isAssignableFrom(columnSource.getType())) { @@ -2354,8 +2189,7 @@ static ColumnSource maybeTransformToPrimitive(final ColumnSource columnSource return columnSource.reinterpret(long.class); } else { // noinspection unchecked - final ColumnSource columnSourceAsDateTime = - (ColumnSource) columnSource; + final ColumnSource columnSourceAsDateTime = (ColumnSource) columnSource; return new DatetimeAsLongColumnSource(columnSourceAsDateTime); } } @@ -2365,8 +2199,7 @@ static ColumnSource maybeTransformToPrimitive(final ColumnSource columnSource return columnSource.reinterpret(byte.class); } else { // noinspection unchecked - final ColumnSource columnSourceAsBoolean = - (ColumnSource) columnSource; + final ColumnSource columnSourceAsBoolean = (ColumnSource) columnSource; return new BooleanAsByteColumnSource(columnSourceAsBoolean); } } @@ -2396,10 +2229,9 @@ public Table sort(SortPair... sortPairs) { } /** - * This is the smallest "base" that is used by the ungroup function. Each row from the input - * table is allocated 2^minimumUngroupBase rows in the output table at startup. If rows are - * added to the table, this base may need to grow. If a single row in the input has more than - * 2^base rows, then the base must change for all of the rows. + * This is the smallest "base" that is used by the ungroup function. Each row from the input table is allocated + * 2^minimumUngroupBase rows in the output table at startup. If rows are added to the table, this base may need to + * grow. If a single row in the input has more than 2^base rows, then the base must change for all of the rows. */ private static int minimumUngroupBase = 10; @@ -2416,9 +2248,8 @@ static int setMinimumUngroupBase(int minimumUngroupBase) { } /** - * The reverse operation returns a new table that is the same as the original table, but the - * first row is last, and the last row is first. This is an internal API to be used by .raj(), - * but is accessible for unit tests. + * The reverse operation returns a new table that is the same as the original table, but the first row is last, and + * the last row is first. This is an internal API to be used by .raj(), but is accessible for unit tests. * * @return the reversed table */ @@ -2429,233 +2260,212 @@ public Table reverse() { @Override public Table ungroup(boolean nullFill, String... columnsToUngroupBy) { - return QueryPerformanceRecorder.withNugget( - "ungroup(" + Arrays.toString(columnsToUngroupBy) + ")", sizeForInstrumentation(), - () -> { - if (columnsToUngroupBy.length == 0) { - if (isRefreshing()) { - manageWithCurrentScope(); + return QueryPerformanceRecorder.withNugget("ungroup(" + Arrays.toString(columnsToUngroupBy) + ")", + sizeForInstrumentation(), () -> { + if (columnsToUngroupBy.length == 0) { + if (isRefreshing()) { + manageWithCurrentScope(); + } + return this; } - return this; - } - checkInitiateOperation(); + checkInitiateOperation(); - final Map arrayColumns = new HashMap<>(); - final Map dbArrayColumns = new HashMap<>(); - for (String name : columnsToUngroupBy) { - ColumnSource column = getColumnSource(name); - if (column.getType().isArray()) { - arrayColumns.put(name, column); - } else if (DbArrayBase.class.isAssignableFrom(column.getType())) { - dbArrayColumns.put(name, column); - } else { - throw new RuntimeException("Column " + name + " is not an array"); + final Map arrayColumns = new HashMap<>(); + final Map dbArrayColumns = new HashMap<>(); + for (String name : columnsToUngroupBy) { + ColumnSource column = getColumnSource(name); + if (column.getType().isArray()) { + arrayColumns.put(name, column); + } else if (DbArrayBase.class.isAssignableFrom(column.getType())) { + dbArrayColumns.put(name, column); + } else { + throw new RuntimeException("Column " + name + " is not an array"); + } } - } - final long[] sizes = new long[intSize("ungroup")]; - long maxSize = - computeMaxSize(index, arrayColumns, dbArrayColumns, null, sizes, nullFill); - final int initialBase = - Math.max(64 - Long.numberOfLeadingZeros(maxSize), minimumUngroupBase); - final CrossJoinShiftState shiftState = new CrossJoinShiftState(initialBase); - - final Map resultMap = new LinkedHashMap<>(); - for (Map.Entry es : getColumnSourceMap().entrySet()) { - final ColumnSource column = es.getValue(); - final String name = es.getKey(); - final ColumnSource result; - if (dbArrayColumns.containsKey(name) || arrayColumns.containsKey(name)) { - final UngroupedColumnSource ungroupedSource = - UngroupedColumnSource.getColumnSource(column); - ungroupedSource.initializeBase(initialBase); - result = ungroupedSource; - } else { - // noinspection unchecked - result = new BitShiftingColumnSource(shiftState, column); + final long[] sizes = new long[intSize("ungroup")]; + long maxSize = computeMaxSize(index, arrayColumns, dbArrayColumns, null, sizes, nullFill); + final int initialBase = Math.max(64 - Long.numberOfLeadingZeros(maxSize), minimumUngroupBase); + final CrossJoinShiftState shiftState = new CrossJoinShiftState(initialBase); + + final Map resultMap = new LinkedHashMap<>(); + for (Map.Entry es : getColumnSourceMap().entrySet()) { + final ColumnSource column = es.getValue(); + final String name = es.getKey(); + final ColumnSource result; + if (dbArrayColumns.containsKey(name) || arrayColumns.containsKey(name)) { + final UngroupedColumnSource ungroupedSource = UngroupedColumnSource.getColumnSource(column); + ungroupedSource.initializeBase(initialBase); + result = ungroupedSource; + } else { + // noinspection unchecked + result = new BitShiftingColumnSource(shiftState, column); + } + resultMap.put(name, result); } - resultMap.put(name, result); - } - final QueryTable result = new QueryTable( - getUngroupIndex(sizes, Index.FACTORY.getRandomBuilder(), initialBase, index) - .getIndex(), - resultMap); - if (isRefreshing()) { - startTrackingPrev(resultMap.values()); - - listenForUpdates(new ListenerImpl( - "ungroup(" + Arrays.deepToString(columnsToUngroupBy) + ')', this, result) { - - @Override - public void onUpdate(final Index added, final Index removed, - final Index modified) { - intSize("ungroup"); - - int newBase = shiftState.getNumShiftBits(); - Index.RandomBuilder ungroupAdded = Index.FACTORY.getRandomBuilder(); - Index.RandomBuilder ungroupModified = Index.FACTORY.getRandomBuilder(); - Index.RandomBuilder ungroupRemoved = Index.FACTORY.getRandomBuilder(); - newBase = evaluateIndex(added, ungroupAdded, newBase); - newBase = evaluateModified(modified, ungroupModified, ungroupAdded, - ungroupRemoved, newBase); - if (newBase > shiftState.getNumShiftBits()) { - rebase(newBase + 1); - } else { - evaluateRemovedIndex(removed, ungroupRemoved); - final Index removedIndex = ungroupRemoved.getIndex(); - final Index addedIndex = ungroupAdded.getIndex(); - result.getIndex().update(addedIndex, removedIndex); - final Index modifiedIndex = ungroupModified.getIndex(); - - if (!modifiedIndex.subsetOf(result.getIndex())) { - final Index missingModifications = - modifiedIndex.minus(result.getIndex()); - log.error().append("Result Index: ") - .append(result.getIndex().toString()).endl(); - log.error().append("Missing modifications: ") - .append(missingModifications.toString()).endl(); - log.error().append("Added: ").append(addedIndex.toString()) - .endl(); - log.error().append("Modified: ") - .append(modifiedIndex.toString()).endl(); - log.error().append("Removed: ").append(removedIndex.toString()) - .endl(); - - for (Map.Entry es : arrayColumns - .entrySet()) { - ColumnSource arrayColumn = es.getValue(); - String name = es.getKey(); - - Index.Iterator iterator = index.iterator(); - for (int i = 0; i < index.size(); i++) { - final long next = iterator.nextLong(); - int size = (arrayColumn.get(next) == null ? 0 - : Array.getLength(arrayColumn.get(next))); - int prevSize = (arrayColumn.getPrev(next) == null ? 0 - : Array.getLength(arrayColumn.getPrev(next))); - log.error().append(name).append("[").append(i) - .append("] ").append(size).append(" -> ") - .append(prevSize).endl(); + final QueryTable result = new QueryTable( + getUngroupIndex(sizes, Index.FACTORY.getRandomBuilder(), initialBase, index).getIndex(), + resultMap); + if (isRefreshing()) { + startTrackingPrev(resultMap.values()); + + listenForUpdates(new ListenerImpl("ungroup(" + Arrays.deepToString(columnsToUngroupBy) + ')', + this, result) { + + @Override + public void onUpdate(final Index added, final Index removed, final Index modified) { + intSize("ungroup"); + + int newBase = shiftState.getNumShiftBits(); + Index.RandomBuilder ungroupAdded = Index.FACTORY.getRandomBuilder(); + Index.RandomBuilder ungroupModified = Index.FACTORY.getRandomBuilder(); + Index.RandomBuilder ungroupRemoved = Index.FACTORY.getRandomBuilder(); + newBase = evaluateIndex(added, ungroupAdded, newBase); + newBase = evaluateModified(modified, ungroupModified, ungroupAdded, ungroupRemoved, + newBase); + if (newBase > shiftState.getNumShiftBits()) { + rebase(newBase + 1); + } else { + evaluateRemovedIndex(removed, ungroupRemoved); + final Index removedIndex = ungroupRemoved.getIndex(); + final Index addedIndex = ungroupAdded.getIndex(); + result.getIndex().update(addedIndex, removedIndex); + final Index modifiedIndex = ungroupModified.getIndex(); + + if (!modifiedIndex.subsetOf(result.getIndex())) { + final Index missingModifications = modifiedIndex.minus(result.getIndex()); + log.error().append("Result Index: ").append(result.getIndex().toString()) + .endl(); + log.error().append("Missing modifications: ") + .append(missingModifications.toString()).endl(); + log.error().append("Added: ").append(addedIndex.toString()).endl(); + log.error().append("Modified: ").append(modifiedIndex.toString()).endl(); + log.error().append("Removed: ").append(removedIndex.toString()).endl(); + + for (Map.Entry es : arrayColumns.entrySet()) { + ColumnSource arrayColumn = es.getValue(); + String name = es.getKey(); + + Index.Iterator iterator = index.iterator(); + for (int i = 0; i < index.size(); i++) { + final long next = iterator.nextLong(); + int size = (arrayColumn.get(next) == null ? 0 + : Array.getLength(arrayColumn.get(next))); + int prevSize = (arrayColumn.getPrev(next) == null ? 0 + : Array.getLength(arrayColumn.getPrev(next))); + log.error().append(name).append("[").append(i).append("] ").append(size) + .append(" -> ").append(prevSize).endl(); + } } + + for (Map.Entry es : dbArrayColumns.entrySet()) { + ColumnSource arrayColumn = es.getValue(); + String name = es.getKey(); + Index.Iterator iterator = index.iterator(); + + for (int i = 0; i < index.size(); i++) { + final long next = iterator.nextLong(); + long size = (arrayColumn.get(next) == null ? 0 + : ((DbArrayBase) arrayColumn.get(next)).size()); + long prevSize = (arrayColumn.getPrev(next) == null ? 0 + : ((DbArrayBase) arrayColumn.getPrev(next)).size()); + log.error().append(name).append("[").append(i).append("] ").append(size) + .append(" -> ").append(prevSize).endl(); + } + } + + Assert.assertion(false, "modifiedIndex.subsetOf(result.getIndex())", + modifiedIndex, "modifiedIndex", result.getIndex(), "result.getIndex()", + shiftState.getNumShiftBits(), "shiftState.getNumShiftBits()", newBase, + "newBase"); } - for (Map.Entry es : dbArrayColumns - .entrySet()) { - ColumnSource arrayColumn = es.getValue(); - String name = es.getKey(); - Index.Iterator iterator = index.iterator(); - - for (int i = 0; i < index.size(); i++) { - final long next = iterator.nextLong(); - long size = (arrayColumn.get(next) == null ? 0 - : ((DbArrayBase) arrayColumn.get(next)).size()); - long prevSize = (arrayColumn.getPrev(next) == null ? 0 - : ((DbArrayBase) arrayColumn.getPrev(next)).size()); - log.error().append(name).append("[").append(i) - .append("] ").append(size).append(" -> ") - .append(prevSize).endl(); + for (ColumnSource source : resultMap.values()) { + if (source instanceof UngroupedColumnSource) { + ((UngroupedColumnSource) source).setBase(newBase); } } - Assert.assertion(false, - "modifiedIndex.subsetOf(result.getIndex())", - modifiedIndex, "modifiedIndex", result.getIndex(), - "result.getIndex()", - shiftState.getNumShiftBits(), - "shiftState.getNumShiftBits()", newBase, "newBase"); + result.notifyListeners(addedIndex, removedIndex, modifiedIndex); } + } + private void rebase(final int newBase) { + final Index newIndex = getUngroupIndex( + computeSize(getIndex(), arrayColumns, dbArrayColumns, nullFill), + Index.FACTORY.getRandomBuilder(), newBase, getIndex()) + .getIndex(); + final Index index = result.getIndex(); + final Index added = newIndex.minus(index); + final Index removed = index.minus(newIndex); + final Index modified = newIndex; + modified.retain(index); + index.update(added, removed); for (ColumnSource source : resultMap.values()) { if (source instanceof UngroupedColumnSource) { ((UngroupedColumnSource) source).setBase(newBase); } } - - result.notifyListeners(addedIndex, removedIndex, modifiedIndex); + shiftState.setNumShiftBitsAndUpdatePrev(newBase); + result.notifyListeners(added, removed, modified); } - } - private void rebase(final int newBase) { - final Index newIndex = getUngroupIndex( - computeSize(getIndex(), arrayColumns, dbArrayColumns, nullFill), - Index.FACTORY.getRandomBuilder(), newBase, getIndex()) - .getIndex(); - final Index index = result.getIndex(); - final Index added = newIndex.minus(index); - final Index removed = index.minus(newIndex); - final Index modified = newIndex; - modified.retain(index); - index.update(added, removed); - for (ColumnSource source : resultMap.values()) { - if (source instanceof UngroupedColumnSource) { - ((UngroupedColumnSource) source).setBase(newBase); + private int evaluateIndex(final Index index, final Index.RandomBuilder ungroupBuilder, + final int newBase) { + if (index.size() > 0) { + final long[] modifiedSizes = new long[index.intSize("ungroup")]; + final long maxSize = computeMaxSize(index, arrayColumns, dbArrayColumns, null, + modifiedSizes, nullFill); + final int minBase = 64 - Long.numberOfLeadingZeros(maxSize); + getUngroupIndex(modifiedSizes, ungroupBuilder, shiftState.getNumShiftBits(), index); + return Math.max(newBase, minBase); } + return newBase; } - shiftState.setNumShiftBitsAndUpdatePrev(newBase); - result.notifyListeners(added, removed, modified); - } - private int evaluateIndex(final Index index, - final Index.RandomBuilder ungroupBuilder, final int newBase) { - if (index.size() > 0) { - final long[] modifiedSizes = new long[index.intSize("ungroup")]; - final long maxSize = computeMaxSize(index, arrayColumns, - dbArrayColumns, null, modifiedSizes, nullFill); - final int minBase = 64 - Long.numberOfLeadingZeros(maxSize); - getUngroupIndex(modifiedSizes, ungroupBuilder, - shiftState.getNumShiftBits(), index); - return Math.max(newBase, minBase); - } - return newBase; - } - - private void evaluateRemovedIndex(final Index index, - final Index.RandomBuilder ungroupBuilder) { - if (index.size() > 0) { - final long[] modifiedSizes = new long[index.intSize("ungroup")]; - computePrevSize(index, arrayColumns, dbArrayColumns, modifiedSizes, - nullFill); - getUngroupIndex(modifiedSizes, ungroupBuilder, - shiftState.getNumShiftBits(), index); + private void evaluateRemovedIndex(final Index index, + final Index.RandomBuilder ungroupBuilder) { + if (index.size() > 0) { + final long[] modifiedSizes = new long[index.intSize("ungroup")]; + computePrevSize(index, arrayColumns, dbArrayColumns, modifiedSizes, nullFill); + getUngroupIndex(modifiedSizes, ungroupBuilder, shiftState.getNumShiftBits(), index); + } } - } - private int evaluateModified(final Index index, - final Index.RandomBuilder modifyBuilder, - final Index.RandomBuilder addedBuilded, - final Index.RandomBuilder removedBuilder, - final int newBase) { - if (index.size() > 0) { - final long maxSize = computeModifiedIndicesAndMaxSize(index, - arrayColumns, dbArrayColumns, null, modifyBuilder, addedBuilded, - removedBuilder, shiftState.getNumShiftBits(), nullFill); - final int minBase = 64 - Long.numberOfLeadingZeros(maxSize); - return Math.max(newBase, minBase); + private int evaluateModified(final Index index, + final Index.RandomBuilder modifyBuilder, + final Index.RandomBuilder addedBuilded, + final Index.RandomBuilder removedBuilder, + final int newBase) { + if (index.size() > 0) { + final long maxSize = computeModifiedIndicesAndMaxSize(index, arrayColumns, + dbArrayColumns, null, modifyBuilder, addedBuilded, removedBuilder, + shiftState.getNumShiftBits(), nullFill); + final int minBase = 64 - Long.numberOfLeadingZeros(maxSize); + return Math.max(newBase, minBase); + } + return newBase; } - return newBase; - } - }); - } - return result; - }); + }); + } + return result; + }); } - private long computeModifiedIndicesAndMaxSize(Index index, - Map arrayColumns, Map dbArrayColumns, - String referenceColumn, Index.RandomBuilder modifyBuilder, Index.RandomBuilder addedBuilded, - Index.RandomBuilder removedBuilder, long base, boolean nullFill) { + private long computeModifiedIndicesAndMaxSize(Index index, Map arrayColumns, + Map dbArrayColumns, String referenceColumn, Index.RandomBuilder modifyBuilder, + Index.RandomBuilder addedBuilded, Index.RandomBuilder removedBuilder, long base, boolean nullFill) { if (nullFill) { - return computeModifiedIndicesAndMaxSizeNullFill(index, arrayColumns, dbArrayColumns, - referenceColumn, modifyBuilder, addedBuilded, removedBuilder, base); + return computeModifiedIndicesAndMaxSizeNullFill(index, arrayColumns, dbArrayColumns, referenceColumn, + modifyBuilder, addedBuilded, removedBuilder, base); } - return computeModifiedIndicesAndMaxSizeNormal(index, arrayColumns, dbArrayColumns, - referenceColumn, modifyBuilder, addedBuilded, removedBuilder, base); + return computeModifiedIndicesAndMaxSizeNormal(index, arrayColumns, dbArrayColumns, referenceColumn, + modifyBuilder, addedBuilded, removedBuilder, base); } - private long computeModifiedIndicesAndMaxSizeNullFill(Index index, - Map arrayColumns, Map dbArrayColumns, - String referenceColumn, Index.RandomBuilder modifyBuilder, Index.RandomBuilder addedBuilded, - Index.RandomBuilder removedBuilder, long base) { + private long computeModifiedIndicesAndMaxSizeNullFill(Index index, Map arrayColumns, + Map dbArrayColumns, String referenceColumn, Index.RandomBuilder modifyBuilder, + Index.RandomBuilder addedBuilded, Index.RandomBuilder removedBuilder, long base) { long maxSize = 0; final Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { @@ -2680,16 +2490,15 @@ private long computeModifiedIndicesAndMaxSizeNullFill(Index index, final long prevSize = (prevArray == null ? 0 : prevArray.size()); maxPrev = Math.max(maxPrev, prevSize); } - maxSize = maxAndIndexUpdateForRow(modifyBuilder, addedBuilded, removedBuilder, maxSize, - maxCur, next, maxPrev, base); + maxSize = maxAndIndexUpdateForRow(modifyBuilder, addedBuilded, removedBuilder, maxSize, maxCur, next, + maxPrev, base); } return maxSize; } - private long computeModifiedIndicesAndMaxSizeNormal(Index index, - Map arrayColumns, Map dbArrayColumns, - String referenceColumn, Index.RandomBuilder modifyBuilder, Index.RandomBuilder addedBuilded, - Index.RandomBuilder removedBuilder, long base) { + private long computeModifiedIndicesAndMaxSizeNormal(Index index, Map arrayColumns, + Map dbArrayColumns, String referenceColumn, Index.RandomBuilder modifyBuilder, + Index.RandomBuilder addedBuilded, Index.RandomBuilder removedBuilder, long base) { long maxSize = 0; boolean sizeIsInitialized = false; long sizes[] = new long[index.intSize("ungroup")]; @@ -2706,16 +2515,16 @@ private long computeModifiedIndicesAndMaxSizeNormal(Index index, sizes[i] = (array == null ? 0 : Array.getLength(array)); Object prevArray = arrayColumn.getPrev(next); int prevSize = (prevArray == null ? 0 : Array.getLength(prevArray)); - maxSize = maxAndIndexUpdateForRow(modifyBuilder, addedBuilded, removedBuilder, - maxSize, sizes[i], next, prevSize, base); + maxSize = maxAndIndexUpdateForRow(modifyBuilder, addedBuilded, removedBuilder, maxSize, sizes[i], + next, prevSize, base); } } else { Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { long k = iterator.nextLong(); Assert.assertion(sizes[i] == Array.getLength(arrayColumn.get(k)), - "sizes[i] == Array.getLength(arrayColumn.get(k))", - referenceColumn, "referenceColumn", name, "name", k, "row"); + "sizes[i] == Array.getLength(arrayColumn.get(k))", + referenceColumn, "referenceColumn", name, "name", k, "row"); } } @@ -2733,30 +2542,28 @@ private long computeModifiedIndicesAndMaxSizeNormal(Index index, sizes[i] = (array == null ? 0 : array.size()); DbArrayBase prevArray = (DbArrayBase) arrayColumn.getPrev(next); long prevSize = (prevArray == null ? 0 : prevArray.size()); - maxSize = maxAndIndexUpdateForRow(modifyBuilder, addedBuilded, removedBuilder, - maxSize, sizes[i], next, prevSize, base); + maxSize = maxAndIndexUpdateForRow(modifyBuilder, addedBuilded, removedBuilder, maxSize, sizes[i], + next, prevSize, base); } } else { Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { final long next = iterator.nextLong(); Assert.assertion(sizes[i] == 0 && arrayColumn.get(next) == null || - sizes[i] == ((DbArrayBase) arrayColumn.get(next)).size(), - "sizes[i] == ((DbArrayBase)arrayColumn.get(i)).size()", - referenceColumn, "referenceColumn", name, "arrayColumn.getName()", i, - "row"); + sizes[i] == ((DbArrayBase) arrayColumn.get(next)).size(), + "sizes[i] == ((DbArrayBase)arrayColumn.get(i)).size()", + referenceColumn, "referenceColumn", name, "arrayColumn.getName()", i, "row"); } } } return maxSize; } - private long maxAndIndexUpdateForRow(Index.RandomBuilder modifyBuilder, - Index.RandomBuilder addedBuilded, Index.RandomBuilder removedBuilder, long maxSize, - long size, long rowKey, long prevSize, long base) { + private long maxAndIndexUpdateForRow(Index.RandomBuilder modifyBuilder, Index.RandomBuilder addedBuilded, + Index.RandomBuilder removedBuilder, long maxSize, long size, long rowKey, long prevSize, long base) { rowKey = rowKey << base; Require.requirement(rowKey >= 0 && (size == 0 || (rowKey + size - 1 >= 0)), - "rowKey >= 0 && (size == 0 || (rowKey + size - 1 >= 0))"); + "rowKey >= 0 && (size == 0 || (rowKey + size - 1 >= 0))"); if (size == prevSize) { if (size > 0) { modifyBuilder.addRange(rowKey, rowKey + size - 1); @@ -2777,8 +2584,7 @@ private long maxAndIndexUpdateForRow(Index.RandomBuilder modifyBuilder, } private static long computeMaxSize(Index index, Map arrayColumns, - Map dbArrayColumns, String referenceColumn, long[] sizes, - boolean nullFill) { + Map dbArrayColumns, String referenceColumn, long[] sizes, boolean nullFill) { if (nullFill) { return computeMaxSizeNullFill(index, arrayColumns, dbArrayColumns, sizes); } @@ -2787,7 +2593,7 @@ private static long computeMaxSize(Index index, Map arrayC } private static long computeMaxSizeNullFill(Index index, Map arrayColumns, - Map dbArrayColumns, long[] sizes) { + Map dbArrayColumns, long[] sizes) { long maxSize = 0; final Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { @@ -2804,7 +2610,7 @@ private static long computeMaxSizeNullFill(Index index, Map es : dbArrayColumns.entrySet()) { final ColumnSource arrayColumn = es.getValue(); final boolean isUngroupable = arrayColumn instanceof UngroupableColumnSource - && ((UngroupableColumnSource) arrayColumn).isUngroupable(); + && ((UngroupableColumnSource) arrayColumn).isUngroupable(); final long size; if (isUngroupable) { size = ((UngroupableColumnSource) arrayColumn).getUngroupedSize(nextIndex); @@ -2822,7 +2628,7 @@ private static long computeMaxSizeNullFill(Index index, Map arrayColumns, - Map dbArrayColumns, String referenceColumn, long[] sizes) { + Map dbArrayColumns, String referenceColumn, long[] sizes) { long maxSize = 0; boolean sizeIsInitialized = false; for (Map.Entry es : arrayColumns.entrySet()) { @@ -2840,10 +2646,9 @@ private static long computeMaxSizeNormal(Index index, Map } else { Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { - Assert.assertion( - sizes[i] == Array.getLength(arrayColumn.get(iterator.nextLong())), - "sizes[i] == Array.getLength(arrayColumn.get(i))", - referenceColumn, "referenceColumn", name, "name", i, "row"); + Assert.assertion(sizes[i] == Array.getLength(arrayColumn.get(iterator.nextLong())), + "sizes[i] == Array.getLength(arrayColumn.get(i))", + referenceColumn, "referenceColumn", name, "name", i, "row"); } } @@ -2852,7 +2657,7 @@ private static long computeMaxSizeNormal(Index index, Map final ColumnSource arrayColumn = es.getValue(); final String name = es.getKey(); final boolean isUngroupable = arrayColumn instanceof UngroupableColumnSource - && ((UngroupableColumnSource) arrayColumn).isUngroupable(); + && ((UngroupableColumnSource) arrayColumn).isUngroupable(); if (!sizeIsInitialized) { sizeIsInitialized = true; @@ -2860,11 +2665,9 @@ private static long computeMaxSizeNormal(Index index, Map Index.Iterator iterator = index.iterator(); for (int ii = 0; ii < index.size(); ii++) { if (isUngroupable) { - sizes[ii] = ((UngroupableColumnSource) arrayColumn) - .getUngroupedSize(iterator.nextLong()); + sizes[ii] = ((UngroupableColumnSource) arrayColumn).getUngroupedSize(iterator.nextLong()); } else { - final DbArrayBase dbArrayBase = - (DbArrayBase) arrayColumn.get(iterator.nextLong()); + final DbArrayBase dbArrayBase = (DbArrayBase) arrayColumn.get(iterator.nextLong()); sizes[ii] = dbArrayBase != null ? dbArrayBase.size() : 0; } maxSize = Math.max(maxSize, sizes[ii]); @@ -2874,17 +2677,13 @@ private static long computeMaxSizeNormal(Index index, Map for (int i = 0; i < index.size(); i++) { final long expectedSize; if (isUngroupable) { - expectedSize = ((UngroupableColumnSource) arrayColumn) - .getUngroupedSize(iterator.nextLong()); + expectedSize = ((UngroupableColumnSource) arrayColumn).getUngroupedSize(iterator.nextLong()); } else { - final DbArrayBase dbArrayBase = - (DbArrayBase) arrayColumn.get(iterator.nextLong()); + final DbArrayBase dbArrayBase = (DbArrayBase) arrayColumn.get(iterator.nextLong()); expectedSize = dbArrayBase != null ? dbArrayBase.size() : 0; } - Assert.assertion(sizes[i] == expectedSize, - "sizes[i] == ((DbArrayBase)arrayColumn.get(i)).size()", - referenceColumn, "referenceColumn", name, "arrayColumn.getName()", i, - "row"); + Assert.assertion(sizes[i] == expectedSize, "sizes[i] == ((DbArrayBase)arrayColumn.get(i)).size()", + referenceColumn, "referenceColumn", name, "arrayColumn.getName()", i, "row"); } } } @@ -2892,7 +2691,7 @@ private static long computeMaxSizeNormal(Index index, Map } private static void computePrevSize(Index index, Map arrayColumns, - Map dbArrayColumns, long[] sizes, boolean nullFill) { + Map dbArrayColumns, long[] sizes, boolean nullFill) { if (nullFill) { computePrevSizeNullFill(index, arrayColumns, dbArrayColumns, sizes); } else { @@ -2901,7 +2700,7 @@ private static void computePrevSize(Index index, Map array } private static void computePrevSizeNullFill(Index index, Map arrayColumns, - Map dbArrayColumns, long[] sizes) { + Map dbArrayColumns, long[] sizes) { final Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { long localMax = 0; @@ -2916,7 +2715,7 @@ private static void computePrevSizeNullFill(Index index, Map es : dbArrayColumns.entrySet()) { final ColumnSource arrayColumn = es.getValue(); final boolean isUngroupable = arrayColumn instanceof UngroupableColumnSource - && ((UngroupableColumnSource) arrayColumn).isUngroupable(); + && ((UngroupableColumnSource) arrayColumn).isUngroupable(); final long size; if (isUngroupable) { size = ((UngroupableColumnSource) arrayColumn).getUngroupedPrevSize(nextIndex); @@ -2931,7 +2730,7 @@ private static void computePrevSizeNullFill(Index index, Map arrayColumns, - Map dbArrayColumns, long[] sizes) { + Map dbArrayColumns, long[] sizes) { for (ColumnSource arrayColumn : arrayColumns.values()) { Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { @@ -2942,13 +2741,12 @@ private static void computePrevSizeNormal(Index index, Map } for (ColumnSource arrayColumn : dbArrayColumns.values()) { final boolean isUngroupable = arrayColumn instanceof UngroupableColumnSource - && ((UngroupableColumnSource) arrayColumn).isUngroupable(); + && ((UngroupableColumnSource) arrayColumn).isUngroupable(); Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { if (isUngroupable) { - sizes[i] = ((UngroupableColumnSource) arrayColumn) - .getUngroupedPrevSize(iterator.nextLong()); + sizes[i] = ((UngroupableColumnSource) arrayColumn).getUngroupedPrevSize(iterator.nextLong()); } else { DbArrayBase array = (DbArrayBase) arrayColumn.getPrev(iterator.nextLong()); sizes[i] = array == null ? 0 : array.size(); @@ -2959,7 +2757,7 @@ private static void computePrevSizeNormal(Index index, Map } private static long[] computeSize(Index index, Map arrayColumns, - Map dbArrayColumns, boolean nullFill) { + Map dbArrayColumns, boolean nullFill) { if (nullFill) { return computeSizeNullFill(index, arrayColumns, dbArrayColumns); } @@ -2968,7 +2766,7 @@ private static long[] computeSize(Index index, Map arrayCo } private static long[] computeSizeNullFill(Index index, Map arrayColumns, - Map dbArrayColumns) { + Map dbArrayColumns) { final long[] sizes = new long[index.intSize("ungroup")]; final Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { @@ -2984,7 +2782,7 @@ private static long[] computeSizeNullFill(Index index, Map for (Map.Entry es : dbArrayColumns.entrySet()) { final ColumnSource arrayColumn = es.getValue(); final boolean isUngroupable = arrayColumn instanceof UngroupableColumnSource - && ((UngroupableColumnSource) arrayColumn).isUngroupable(); + && ((UngroupableColumnSource) arrayColumn).isUngroupable(); final long size; if (isUngroupable) { size = ((UngroupableColumnSource) arrayColumn).getUngroupedSize(nextIndex); @@ -3000,7 +2798,7 @@ private static long[] computeSizeNullFill(Index index, Map } private static long[] computeSizeNormal(Index index, Map arrayColumns, - Map dbArrayColumns) { + Map dbArrayColumns) { final long[] sizes = new long[index.intSize("ungroup")]; for (ColumnSource arrayColumn : arrayColumns.values()) { Index.Iterator iterator = index.iterator(); @@ -3012,13 +2810,12 @@ private static long[] computeSizeNormal(Index index, Map a } for (ColumnSource arrayColumn : dbArrayColumns.values()) { final boolean isUngroupable = arrayColumn instanceof UngroupableColumnSource - && ((UngroupableColumnSource) arrayColumn).isUngroupable(); + && ((UngroupableColumnSource) arrayColumn).isUngroupable(); Index.Iterator iterator = index.iterator(); for (int i = 0; i < index.size(); i++) { if (isUngroupable) { - sizes[i] = ((UngroupableColumnSource) arrayColumn) - .getUngroupedSize(iterator.nextLong()); + sizes[i] = ((UngroupableColumnSource) arrayColumn).getUngroupedSize(iterator.nextLong()); } else { DbArrayBase array = (DbArrayBase) arrayColumn.get(iterator.nextLong()); sizes[i] = array == null ? 0 : array.size(); @@ -3030,15 +2827,14 @@ private static long[] computeSizeNormal(Index index, Map a } private IndexBuilder getUngroupIndex( - final long[] sizes, final Index.RandomBuilder indexBuilder, final long base, - final Index index) { + final long[] sizes, final Index.RandomBuilder indexBuilder, final long base, final Index index) { Assert.assertion(base >= 0 && base <= 63, "base >= 0 && base <= 63", base, "base"); long mask = ((1L << base) - 1) << (64 - base); long lastKey = index.lastKey(); if ((lastKey > 0) && ((lastKey & mask) != 0)) { throw new IllegalStateException( - "Key overflow detected, perhaps you should flatten your table before calling ungroup. " - + ",lastKey=" + lastKey + ", base=" + base); + "Key overflow detected, perhaps you should flatten your table before calling ungroup. " + + ",lastKey=" + lastKey + ", base=" + base); } int pos = 0; @@ -3046,8 +2842,7 @@ private IndexBuilder getUngroupIndex( long next = iterator.nextLong(); long nextShift = next << base; if (sizes[pos] != 0) { - Assert.assertion(nextShift >= 0, "nextShift >= 0", nextShift, "nextShift", base, - "base", next, "next"); + Assert.assertion(nextShift >= 0, "nextShift >= 0", nextShift, "nextShift", base, "base", next, "next"); indexBuilder.addRange(nextShift, nextShift + sizes[pos++] - 1); } else { pos++; @@ -3058,9 +2853,9 @@ private IndexBuilder getUngroupIndex( @Override public Table selectDistinct(SelectColumn... columns) { - return QueryPerformanceRecorder.withNugget( - "selectDistinct(" + Arrays.toString(columns) + ")", sizeForInstrumentation(), - () -> by(new SelectDistinctStateFactoryImpl(), columns)); + return QueryPerformanceRecorder.withNugget("selectDistinct(" + Arrays.toString(columns) + ")", + sizeForInstrumentation(), + () -> by(new SelectDistinctStateFactoryImpl(), columns)); } @Override @@ -3068,15 +2863,12 @@ public QueryTable getSubTable(Index index) { return getSubTable(index, null, CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); } - public QueryTable getSubTable(@NotNull final Index index, - @Nullable final ModifiedColumnSet resultModifiedColumnSet, - @NotNull final Object... parents) { + public QueryTable getSubTable(@NotNull final Index index, @Nullable final ModifiedColumnSet resultModifiedColumnSet, + @NotNull final Object... parents) { return QueryPerformanceRecorder.withNugget("getSubTable", sizeForInstrumentation(), () -> { - // there is no operation check here, because byExternal calls it internally; and the - // Index results are + // there is no operation check here, because byExternal calls it internally; and the Index results are // not updated internally, but rather externally. - final QueryTable result = - new QueryTable(definition, index, columns, resultModifiedColumnSet); + final QueryTable result = new QueryTable(definition, index, columns, resultModifiedColumnSet); for (Object parent : parents) { result.addParentReference(parent); } @@ -3105,8 +2897,7 @@ public Table copy(TableDefinition definition, boolean copyAttributes) { return QueryPerformanceRecorder.withNugget("copy()", sizeForInstrumentation(), () -> { final Mutable
    result = new MutableObject<>(); - final ShiftAwareSwapListener swapListener = - createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + final ShiftAwareSwapListener swapListener = createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); initializeWithSnapshot("copy", swapListener, (usePrev, beforeClockValue) -> { final QueryTable resultTable = new CopiedTable(definition, this); propagateFlatness(resultTable); @@ -3115,8 +2906,7 @@ public Table copy(TableDefinition definition, boolean copyAttributes) { } if (swapListener != null) { - final ShiftAwareListenerImpl listener = - new ShiftAwareListenerImpl("copy()", this, resultTable); + final ShiftAwareListenerImpl listener = new ShiftAwareListenerImpl("copy()", this, resultTable); swapListener.setListenerAndResult(listener, resultTable); resultTable.addParentReference(swapListener); } @@ -3150,14 +2940,12 @@ public R memoizeResult(MemoizedOperationKey memoKey, Supplier operation) return operation.get(); } - final boolean attributesCompatible = - memoKey.attributesCompatible(parent.attributes, attributes); + final boolean attributesCompatible = memoKey.attributesCompatible(parent.attributes, attributes); final Supplier computeCachedOperation = attributesCompatible ? () -> { final R parentResult = parent.memoizeResult(memoKey, operation); if (parentResult instanceof QueryTable) { final Table myResult = ((QueryTable) parentResult).copy(false); - copyAttributes((QueryTable) parentResult, myResult, - memoKey.getParentCopyType()); + copyAttributes((QueryTable) parentResult, myResult, memoKey.getParentCopyType()); copyAttributes(myResult, memoKey.copyType()); // noinspection unchecked return (R) myResult; @@ -3208,10 +2996,9 @@ public R memoizeResult(MemoizedOperationKey memoKey, Supplier operation) @NotNull private static MemoizedResult getMemoizedResult(MemoizedOperationKey memoKey, - Map> cachedOperations) { + Map> cachedOperations) { // noinspection unchecked - return (MemoizedResult) cachedOperations.computeIfAbsent(memoKey, - k -> new MemoizedResult<>()); + return (MemoizedResult) cachedOperations.computeIfAbsent(memoKey, k -> new MemoizedResult<>()); } private static class MemoizedResult { @@ -3241,8 +3028,7 @@ R getOrCompute(Supplier operation) { } private void maybeMarkSystemic(R cachedResult) { - if (cachedResult instanceof SystemicObject - && SystemicObjectTracker.isSystemicThread()) { + if (cachedResult instanceof SystemicObject && SystemicObjectTracker.isSystemicThread()) { ((SystemicObject) cachedResult).markSystemic(); } } @@ -3250,8 +3036,7 @@ private void maybeMarkSystemic(R cachedResult) { R getIfValid() { if (reference != null) { final R cachedResult = reference.get(); - if (!isFailedTable(cachedResult) - && Liveness.verifyCachedObjectForReuse(cachedResult)) { + if (!isFailedTable(cachedResult) && Liveness.verifyCachedObjectForReuse(cachedResult)) { return cachedResult; } } @@ -3263,50 +3048,44 @@ private boolean isFailedTable(R cachedResult) { } } - public T getResult( - final Operation operation) { + public T getResult(final Operation operation) { if (operation instanceof MemoizableOperation) { return memoizeResult(((MemoizableOperation) operation).getMemoizedOperationKey(), - () -> getResultNoMemo(operation)); + () -> getResultNoMemo(operation)); } return getResultNoMemo(operation); } - private T getResultNoMemo( - final Operation operation) { - return QueryPerformanceRecorder.withNugget(operation.getDescription(), - sizeForInstrumentation(), () -> { - final Mutable resultTable = new MutableObject<>(); + private T getResultNoMemo(final Operation operation) { + return QueryPerformanceRecorder.withNugget(operation.getDescription(), sizeForInstrumentation(), () -> { + final Mutable resultTable = new MutableObject<>(); - final ShiftAwareSwapListener swapListener; - if (isRefreshing()) { - swapListener = operation.newSwapListener(this); - swapListener.subscribeForUpdates(); - } else { - swapListener = null; - } - - initializeWithSnapshot(operation.getLogPrefix(), swapListener, - (usePrev, beforeClockValue) -> { - final Operation.Result result = - operation.initialize(usePrev, beforeClockValue); - if (result == null) { - return false; - } + final ShiftAwareSwapListener swapListener; + if (isRefreshing()) { + swapListener = operation.newSwapListener(this); + swapListener.subscribeForUpdates(); + } else { + swapListener = null; + } - resultTable.setValue(result.resultNode); - if (swapListener != null) { - swapListener.setListenerAndResult( - Require.neqNull(result.resultListener, "resultListener"), - result.resultNode); - result.resultNode.addParentReference(swapListener); - } + initializeWithSnapshot(operation.getLogPrefix(), swapListener, (usePrev, beforeClockValue) -> { + final Operation.Result result = operation.initialize(usePrev, beforeClockValue); + if (result == null) { + return false; + } - return true; - }); + resultTable.setValue(result.resultNode); + if (swapListener != null) { + swapListener.setListenerAndResult(Require.neqNull(result.resultListener, "resultListener"), + result.resultNode); + result.resultNode.addParentReference(swapListener); + } - return resultTable.getValue(); + return true; }); + + return resultTable.getValue(); + }); } private class WhereListener extends MergedListener { @@ -3316,10 +3095,10 @@ private class WhereListener extends MergedListener { private final ModifiedColumnSet filterColumns; private final ListenerRecorder recorder; - private WhereListener(ListenerRecorder recorder, - Collection dependencies, FilteredTable result) { - super(Collections.singleton(recorder), dependencies, - "where(" + Arrays.toString(result.filters) + ")", result); + private WhereListener(ListenerRecorder recorder, Collection dependencies, + FilteredTable result) { + super(Collections.singleton(recorder), dependencies, "where(" + Arrays.toString(result.filters) + ")", + result); this.recorder = recorder; this.result = result; this.currentMapping = result.getIndex(); @@ -3332,8 +3111,7 @@ private WhereListener(ListenerRecorder recorder, filterColumnNames.addAll(filter.getColumns()); } this.filterColumns = hasColumnArray ? null - : newModifiedColumnSet( - filterColumnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + : newModifiedColumnSet(filterColumnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); } @Override @@ -3345,9 +3123,9 @@ public void process() { } if (result.refilterRequested()) { - result.doRefilter(recorder.getAdded(), recorder.getRemoved(), - recorder.getModified(), recorder.getShifted(), - sourceModColumns); + result.doRefilter(recorder.getAdded(), recorder.getRemoved(), recorder.getModified(), + recorder.getShifted(), + sourceModColumns); return; } @@ -3363,16 +3141,13 @@ public void process() { // compute added against filters update.added = whereInternal(recorder.getAdded().clone(), index, false, filters); - // check which modified keys match filters (Note: filterColumns will be null if we must - // always check) - final boolean runFilters = - filterColumns == null || sourceModColumns.containsAny(filterColumns); + // check which modified keys match filters (Note: filterColumns will be null if we must always check) + final boolean runFilters = filterColumns == null || sourceModColumns.containsAny(filterColumns); final Index matchingModifies = !runFilters ? Index.FACTORY.getEmptyIndex() - : whereInternal(recorder.getModified(), index, false, filters); + : whereInternal(recorder.getModified(), index, false, filters); // which propagate as mods? - update.modified = - (runFilters ? matchingModifies : recorder.getModified()).intersect(currentMapping); + update.modified = (runFilters ? matchingModifies : recorder.getModified()).intersect(currentMapping); // remaining matchingModifies are adds update.added.insert(matchingModifies.minus(update.modified)); @@ -3407,10 +3182,8 @@ public void process() { private static class StaticWhereListener extends MergedListener { private final FilteredTable result; - private StaticWhereListener(Collection dependencies, - FilteredTable result) { - super(Collections.emptyList(), dependencies, - "where(" + Arrays.toString(result.filters) + ")", result); + private StaticWhereListener(Collection dependencies, FilteredTable result) { + super(Collections.emptyList(), dependencies, "where(" + Arrays.toString(result.filters) + ")", result); this.result = result; } @@ -3437,8 +3210,7 @@ static void checkInitiateOperation(Table other) { @Override public R apply(Function.Unary function) { if (function instanceof MemoizedOperationKey.Provider) { - return memoizeResult(((MemoizedOperationKey.Provider) function).getMemoKey(), - () -> super.apply(function)); + return memoizeResult(((MemoizedOperationKey.Provider) function).getMemoKey(), () -> super.apply(function)); } return super.apply(function); diff --git a/DB/src/main/java/io/deephaven/db/v2/RedefinableTable.java b/DB/src/main/java/io/deephaven/db/v2/RedefinableTable.java index 5e482f9e995..e3450cad5c0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/RedefinableTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/RedefinableTable.java @@ -19,8 +19,7 @@ */ public abstract class RedefinableTable extends UncoalescedTable { - protected RedefinableTable(@NotNull final TableDefinition definition, - @NotNull final String description) { + protected RedefinableTable(@NotNull final TableDefinition definition, @NotNull final String description) { super(definition, description); } @@ -38,29 +37,28 @@ public Table view(SelectColumn... columns) { List usedColumnNames = selectColumn.initDef(allColumns); columnDependency.put(selectColumn.getName(), new HashSet<>(usedColumnNames)); resultColumnsInternal.addAll(usedColumnNames.stream() - .filter(usedColumnName -> !resultColumnsExternal.containsKey(usedColumnName)) - .map(definition::getColumn).collect(Collectors.toList())); + .filter(usedColumnName -> !resultColumnsExternal.containsKey(usedColumnName)) + .map(definition::getColumn).collect(Collectors.toList())); final ColumnDefinition columnDef; if (selectColumn.isRetain()) { columnDef = definition.getColumn(selectColumn.getName()); } else { simpleRetain = false; // noinspection unchecked - columnDef = ColumnDefinition.fromGenericType(selectColumn.getName(), - selectColumn.getReturnedType()); + columnDef = ColumnDefinition.fromGenericType(selectColumn.getName(), selectColumn.getReturnedType()); } resultColumnsExternal.put(selectColumn.getName(), columnDef); allColumns.put(selectColumn.getName(), columnDef); } - TableDefinition newDefExternal = new TableDefinition(resultColumnsExternal.values() - .toArray(new ColumnDefinition[resultColumnsExternal.size()])); + TableDefinition newDefExternal = new TableDefinition( + resultColumnsExternal.values().toArray(new ColumnDefinition[resultColumnsExternal.size()])); if (simpleRetain) { // NB: We use the *external* TableDefinition because it's ordered appropriately. return redefine(newDefExternal); } - TableDefinition newDefInternal = new TableDefinition( - resultColumnsInternal.toArray(new ColumnDefinition[resultColumnsInternal.size()])); + TableDefinition newDefInternal = + new TableDefinition(resultColumnsInternal.toArray(new ColumnDefinition[resultColumnsInternal.size()])); return redefine(newDefExternal, newDefInternal, columns, columnDependency); } @@ -89,8 +87,8 @@ public Table dropColumns(final String... columnNames) { final Set existingColumns = new HashSet<>(definition.getColumnNames()); if (!existingColumns.containsAll(columnNamesToDrop)) { columnNamesToDrop.removeAll(existingColumns); - throw new RuntimeException("Unknown columns: " + columnNamesToDrop.toString() - + ", available columns = " + getColumnSourceMap().keySet()); + throw new RuntimeException("Unknown columns: " + columnNamesToDrop.toString() + ", available columns = " + + getColumnSourceMap().keySet()); } List resultColumns = new ArrayList<>(); @@ -111,16 +109,14 @@ public Table renameColumns(MatchPair... pairs) { Map pairLookup = new HashMap<>(); for (MatchPair pair : pairs) { if (pair.leftColumn == null || pair.leftColumn.equals("")) { - throw new IllegalArgumentException( - "Bad left column in rename pair \"" + pair.toString() + "\""); + throw new IllegalArgumentException("Bad left column in rename pair \"" + pair.toString() + "\""); } ColumnDefinition cDef = definition.getColumn(pair.rightColumn); if (cDef == null) { throw new IllegalArgumentException("Column \"" + pair.rightColumn + "\" not found"); } pairLookup.put(pair.rightColumn, pair.leftColumn); - columnDependency.put(pair.leftColumn, - new HashSet<>(Collections.singletonList(pair.rightColumn))); + columnDependency.put(pair.leftColumn, new HashSet<>(Collections.singletonList(pair.rightColumn))); } ColumnDefinition columnDefinitions[] = definition.getColumns(); @@ -137,33 +133,29 @@ public Table renameColumns(MatchPair... pairs) { viewColumns[ci] = new SourceColumn(cDef.getName(), newName); } } - return redefine(new TableDefinition(resultColumnsExternal), definition, viewColumns, - columnDependency); + return redefine(new TableDefinition(resultColumnsExternal), definition, viewColumns, columnDependency); } /** * Redefine this table with a subset of its current columns. * - * @param newDefinition A TableDefinition with a subset of this RedefinableTable's - * ColumnDefinitions. + * @param newDefinition A TableDefinition with a subset of this RedefinableTable's ColumnDefinitions. * @return */ protected abstract Table redefine(TableDefinition newDefinition); /** - * Redefine this table with a subset of its current columns, with a potentially-differing - * definition to present to external interfaces and one or more select columns to apply. + * Redefine this table with a subset of its current columns, with a potentially-differing definition to present to + * external interfaces and one or more select columns to apply. * * @param newDefinitionExternal A TableDefinition that represents the results of * redefine(newDefinitionInternal).view(viewColumns). - * @param newDefinitionInternal A TableDefinition with a subset of this RedefinableTable's - * ColumnDefinitions. - * @param viewColumns A set of SelectColumns to apply in order to transform a table with - * newDefinitionInternal to a table with newDefinitionExternal. + * @param newDefinitionInternal A TableDefinition with a subset of this RedefinableTable's ColumnDefinitions. + * @param viewColumns A set of SelectColumns to apply in order to transform a table with newDefinitionInternal to a + * table with newDefinitionExternal. * @param columnDependency * @return */ - protected abstract Table redefine(TableDefinition newDefinitionExternal, - TableDefinition newDefinitionInternal, SelectColumn[] viewColumns, - Map> columnDependency); + protected abstract Table redefine(TableDefinition newDefinitionExternal, TableDefinition newDefinitionInternal, + SelectColumn[] viewColumns, Map> columnDependency); } diff --git a/DB/src/main/java/io/deephaven/db/v2/ReplicateHashTable.java b/DB/src/main/java/io/deephaven/db/v2/ReplicateHashTable.java index f9021496063..0651d75ee73 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ReplicateHashTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/ReplicateHashTable.java @@ -28,31 +28,27 @@ /** * This code replicator is designed to operate differently than the other replication in our system. * - * It reads both the source and destination file, preserving custom code inside of the destination - * file. + * It reads both the source and destination file, preserving custom code inside of the destination file. * - * The source and destination files must provide annotations for their state column source, overflow - * column source, and empty state value. These are used to translate names, and also to determine - * the appropriate types for substitution. + * The source and destination files must provide annotations for their state column source, overflow column source, and + * empty state value. These are used to translate names, and also to determine the appropriate types for substitution. * * The source file has three kinds of structured comments that control behavior. *
      - *
    • regions, denoted by // region name and - * // endregion name are snippets of code that change between the source and - * destination. You should edit the code within a region in either the source or destination file. - * Each region that exists in the source must exist in the destination (this is a sanity check to - * prevent you from overwriting your work). Regions must have unique names.
    • + *
    • regions, denoted by // region name and // endregion name are + * snippets of code that change between the source and destination. You should edit the code within a region in either + * the source or destination file. Each region that exists in the source must exist in the destination (this is a sanity + * check to prevent you from overwriting your work). Regions must have unique names.
    • * - *
    • mixins, denoted by // mixin name and - * // mixin name are snippets of code that may not be useful in the destination - * class. Any mixins in the destination class will be overwritten! A mixin can be spread across - * multiple structured blocks, for example imports and a function definition may both use the same - * mixin name. Regions may exist inside a mixin. When mixins are excluded, the regions that exist - * within them are ignored.
    • + *
    • mixins, denoted by // mixin name and // mixin name are snippets of + * code that may not be useful in the destination class. Any mixins in the destination class will be overwritten! A + * mixin can be spread across multiple structured blocks, for example imports and a function definition may both use the + * same mixin name. Regions may exist inside a mixin. When mixins are excluded, the regions that exist within them are + * ignored.
    • * - *
    • substitutions, denoted by // @thing from literal are - * instructions to replace a particular literal with the appropriate type denoted by thing on the - * next line. Multiple substitutions may be separated using commas. The valid substitutions are: + *
    • substitutions, denoted by // @thing from literal are instructions to replace a + * particular literal with the appropriate type denoted by thing on the next line. Multiple substitutions may be + * separated using commas. The valid substitutions are: *
        *
      • StateChunkName, e.g. "LongChunk"
      • *
      • StateChunkIdentityName, e.g. "LongChunk" or "ObjectChunkIdentity"
      • @@ -70,24 +66,22 @@ */ public class ReplicateHashTable { /** - * We tag the empty state variable with this annotation, so we know what its name is in the - * source and destination. + * We tag the empty state variable with this annotation, so we know what its name is in the source and destination. */ @Retention(RetentionPolicy.RUNTIME) public @interface EmptyStateValue { } /** - * We tag the state ColumnSource with this annotation, so we know what its name is in the source - * and destination. + * We tag the state ColumnSource with this annotation, so we know what its name is in the source and destination. */ @Retention(RetentionPolicy.RUNTIME) public @interface StateColumnSource { } /** - * We tag the overflow state ColumnSource with this annotation, so we know what its name is in - * the source and destination. + * We tag the overflow state ColumnSource with this annotation, so we know what its name is in the source and + * destination. */ @Retention(RetentionPolicy.RUNTIME) public @interface OverflowStateColumnSource { @@ -97,46 +91,37 @@ public static void main(String[] args) throws IOException { final boolean allowMissingDestinations = false; doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - RightIncrementalChunkedNaturalJoinStateManager.class, allowMissingDestinations, - Arrays.asList("rehash", "allowUpdateWriteThroughState", "dumpTable")); - doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - StaticChunkedNaturalJoinStateManager.class, allowMissingDestinations, - Arrays.asList("rehash", "allowUpdateWriteThroughState", "dumpTable", "prev")); - doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - StaticChunkedAsOfJoinStateManager.class, allowMissingDestinations, - Arrays.asList("dumpTable", "prev")); - doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - RightIncrementalChunkedAsOfJoinStateManager.class, allowMissingDestinations, - Collections.singletonList("dumpTable")); + RightIncrementalChunkedNaturalJoinStateManager.class, allowMissingDestinations, + Arrays.asList("rehash", "allowUpdateWriteThroughState", "dumpTable")); + doReplicate(IncrementalChunkedNaturalJoinStateManager.class, StaticChunkedNaturalJoinStateManager.class, + allowMissingDestinations, Arrays.asList("rehash", "allowUpdateWriteThroughState", "dumpTable", "prev")); + doReplicate(IncrementalChunkedNaturalJoinStateManager.class, StaticChunkedAsOfJoinStateManager.class, + allowMissingDestinations, Arrays.asList("dumpTable", "prev")); + doReplicate(IncrementalChunkedNaturalJoinStateManager.class, RightIncrementalChunkedAsOfJoinStateManager.class, + allowMissingDestinations, Collections.singletonList("dumpTable")); doReplicate(IncrementalChunkedNaturalJoinStateManager.class, SymbolTableCombiner.class, - allowMissingDestinations, - Arrays.asList("overflowLocationToHashLocation", "getStateValue", "prev")); + allowMissingDestinations, Arrays.asList("overflowLocationToHashLocation", "getStateValue", "prev")); doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - LeftOnlyIncrementalChunkedCrossJoinStateManager.class, allowMissingDestinations, - Collections.singletonList("dumpTable")); - doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - RightIncrementalChunkedCrossJoinStateManager.class, allowMissingDestinations, - Arrays.asList("dumpTable", "allowUpdateWriteThroughState")); - doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - StaticChunkedCrossJoinStateManager.class, allowMissingDestinations, - Arrays.asList("dumpTable", "prev")); + LeftOnlyIncrementalChunkedCrossJoinStateManager.class, allowMissingDestinations, + Collections.singletonList("dumpTable")); + doReplicate(IncrementalChunkedNaturalJoinStateManager.class, RightIncrementalChunkedCrossJoinStateManager.class, + allowMissingDestinations, Arrays.asList("dumpTable", "allowUpdateWriteThroughState")); + doReplicate(IncrementalChunkedNaturalJoinStateManager.class, StaticChunkedCrossJoinStateManager.class, + allowMissingDestinations, Arrays.asList("dumpTable", "prev")); // Incremental NJ -> Static & Incremental Operator Aggregations + doReplicate(IncrementalChunkedNaturalJoinStateManager.class, StaticChunkedOperatorAggregationStateManager.class, + allowMissingDestinations, Arrays.asList("dumpTable", "prev", "decorationProbe")); doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - StaticChunkedOperatorAggregationStateManager.class, allowMissingDestinations, - Arrays.asList("dumpTable", "prev", "decorationProbe")); - doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - IncrementalChunkedOperatorAggregationStateManager.class, allowMissingDestinations, - Collections.singletonList("dumpTable")); + IncrementalChunkedOperatorAggregationStateManager.class, allowMissingDestinations, + Collections.singletonList("dumpTable")); // Incremental NJ -> Incremental By -> Static By - doReplicate(IncrementalChunkedNaturalJoinStateManager.class, - IncrementalChunkedByAggregationStateManager.class, allowMissingDestinations, - Arrays.asList("dumpTable", "allowUpdateWriteThroughState")); - doReplicate(IncrementalChunkedByAggregationStateManager.class, - StaticChunkedByAggregationStateManager.class, allowMissingDestinations, - Arrays.asList("dumpTable", "prev", "decorationProbe")); + doReplicate(IncrementalChunkedNaturalJoinStateManager.class, IncrementalChunkedByAggregationStateManager.class, + allowMissingDestinations, Arrays.asList("dumpTable", "allowUpdateWriteThroughState")); + doReplicate(IncrementalChunkedByAggregationStateManager.class, StaticChunkedByAggregationStateManager.class, + allowMissingDestinations, Arrays.asList("dumpTable", "prev", "decorationProbe")); } private static class RegionedFile { @@ -163,12 +148,12 @@ String stateChunkName() { String stateChunkIdentityName() { return writableStateChunkName().replace("Writable", "") - + (stateChunkType == ChunkType.Object ? "Identity" : ""); + + (stateChunkType == ChunkType.Object ? "Identity" : ""); } String writableStateChunkType() { return writableStateChunkName() - + (genericDataType == null ? "" : "<" + genericDataType + ",Values>"); + + (genericDataType == null ? "" : "<" + genericDataType + ",Values>"); } String writableStateChunkName() { @@ -210,27 +195,22 @@ String getStateChunkTypeEnum() { @SuppressWarnings("SameParameterValue") private static void doReplicate(final Class sourceClass, - final Class destinationClass, - final boolean allowMissingDestinations, - Collection excludedMixins) throws IOException { + final Class destinationClass, + final boolean allowMissingDestinations, + Collection excludedMixins) throws IOException { final ColumnSourceInfo sourceColumnSourceInfo = findAnnotations(sourceClass); final ColumnSourceInfo destinationColumnSourceInfo = findAnnotations(destinationClass); - final String sourcePath = - ReplicatePrimitiveCode.pathForClass(sourceClass, ReplicatePrimitiveCode.MAIN_SRC); + final String sourcePath = ReplicatePrimitiveCode.pathForClass(sourceClass, ReplicatePrimitiveCode.MAIN_SRC); final String destinationPath = - ReplicatePrimitiveCode.pathForClass(destinationClass, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.pathForClass(destinationClass, ReplicatePrimitiveCode.MAIN_SRC); - final List sourceLines = - FileUtils.readLines(new File(sourcePath), Charset.defaultCharset()); + final List sourceLines = FileUtils.readLines(new File(sourcePath), Charset.defaultCharset()); final File destinationFile = new File(destinationPath); - final List destLines = - FileUtils.readLines(destinationFile, Charset.defaultCharset()); + final List destLines = FileUtils.readLines(destinationFile, Charset.defaultCharset()); - final RegionedFile sourceRegioned = - makeRegionedFile(sourcePath, sourceLines, excludedMixins); - final RegionedFile destRegioned = - makeRegionedFile(destinationPath, destLines, excludedMixins); + final RegionedFile sourceRegioned = makeRegionedFile(sourcePath, sourceLines, excludedMixins); + final RegionedFile destRegioned = makeRegionedFile(destinationPath, destLines, excludedMixins); final Set missingInSource = new LinkedHashSet<>(destRegioned.regionNames); final Set missingInDestination = new LinkedHashSet<>(sourceRegioned.regionNames); @@ -239,40 +219,37 @@ private static void doReplicate(final Class sourceClass, missingInDestination.removeAll(destRegioned.regionNames); if (!missingInSource.isEmpty()) { - throw new IllegalStateException(destinationPath + ": Region mismatch, not in source " - + missingInSource + ", not in destination" + missingInDestination); + throw new IllegalStateException(destinationPath + ": Region mismatch, not in source " + missingInSource + + ", not in destination" + missingInDestination); } if (!missingInDestination.isEmpty()) { if (allowMissingDestinations) { - System.err - .println("Allowing missing regions in destination: " + missingInDestination); + System.err.println("Allowing missing regions in destination: " + missingInDestination); } else { - throw new IllegalStateException( - destinationPath + ": Region mismatch, not in source " + missingInSource + throw new IllegalStateException(destinationPath + ": Region mismatch, not in source " + missingInSource + ", not in destination" + missingInDestination); } } if (!allowMissingDestinations - && sourceRegioned.noRegionSegments.size() != destRegioned.noRegionSegments.size()) { + && sourceRegioned.noRegionSegments.size() != destRegioned.noRegionSegments.size()) { throw new IllegalStateException( - destinationPath + ": Number of segments outside of regions does not match!"); + destinationPath + ": Number of segments outside of regions does not match!"); } final Function replaceFunction = (sourceString) -> sourceString - .replaceAll(sourceClass.getSimpleName(), destinationClass.getSimpleName()) - .replaceAll(sourceColumnSourceInfo.stateColumnSourceName, - destinationColumnSourceInfo.stateColumnSourceName) - .replaceAll(sourceColumnSourceInfo.overflowStateColumnSourceName, - destinationColumnSourceInfo.overflowStateColumnSourceName) - .replaceAll(sourceColumnSourceInfo.emptyStateValue, - destinationColumnSourceInfo.emptyStateValue); + .replaceAll(sourceClass.getSimpleName(), destinationClass.getSimpleName()) + .replaceAll(sourceColumnSourceInfo.stateColumnSourceName, + destinationColumnSourceInfo.stateColumnSourceName) + .replaceAll(sourceColumnSourceInfo.overflowStateColumnSourceName, + destinationColumnSourceInfo.overflowStateColumnSourceName) + .replaceAll(sourceColumnSourceInfo.emptyStateValue, destinationColumnSourceInfo.emptyStateValue); final List rewrittenLines = new ArrayList<>(); for (int ii = 0; ii < sourceRegioned.noRegionSegments.size() - 1; ++ii) { final List unregionedSegment = sourceRegioned.noRegionSegments.get(ii); final List segmentLines = - rewriteSegment(destinationColumnSourceInfo, replaceFunction, unregionedSegment); + rewriteSegment(destinationColumnSourceInfo, replaceFunction, unregionedSegment); rewrittenLines.addAll(segmentLines); @@ -289,9 +266,8 @@ private static void doReplicate(final Class sourceClass, } } final List unregionedSegment = - sourceRegioned.noRegionSegments.get(sourceRegioned.noRegionSegments.size() - 1); - rewrittenLines.addAll( - rewriteSegment(destinationColumnSourceInfo, replaceFunction, unregionedSegment)); + sourceRegioned.noRegionSegments.get(sourceRegioned.noRegionSegments.size() - 1); + rewrittenLines.addAll(rewriteSegment(destinationColumnSourceInfo, replaceFunction, unregionedSegment)); final String sourcePackage = sourceClass.getPackage().getName(); final String destinationPackage = destinationClass.getPackage().getName(); @@ -300,14 +276,13 @@ private static void doReplicate(final Class sourceClass, for (packageLine = 0; packageLine < 10; ++packageLine) { if (rewrittenLines.get(packageLine).startsWith("package")) { final String rewritePackage = - rewrittenLines.get(packageLine).replace(sourcePackage, destinationPackage); + rewrittenLines.get(packageLine).replace(sourcePackage, destinationPackage); rewrittenLines.set(packageLine, rewritePackage); break; } } if (packageLine == 10) { - throw new RuntimeException( - "Could not find package line to rewrite for " + destinationClass); + throw new RuntimeException("Could not find package line to rewrite for " + destinationClass); } FileUtils.writeLines(destinationFile, rewrittenLines); @@ -316,12 +291,10 @@ private static void doReplicate(final Class sourceClass, @NotNull private static List rewriteSegment(ColumnSourceInfo destinationColumnSourceInfo, - Function replaceFunction, List unregionedSegment) { - final List segmentLines = - unregionedSegment.stream().map(replaceFunction).collect(Collectors.toList()); + Function replaceFunction, List unregionedSegment) { + final List segmentLines = unregionedSegment.stream().map(replaceFunction).collect(Collectors.toList()); - final String replacementRegex = - "@(\\S+)@\\s+from\\s+(\\S+)(\\s*,\\s*@\\S+@\\s+from\\s+\\S+)*\\s*"; + final String replacementRegex = "@(\\S+)@\\s+from\\s+(\\S+)(\\s*,\\s*@\\S+@\\s+from\\s+\\S+)*\\s*"; final Pattern controlPattern = Pattern.compile("(\\s*//\\s+)" + replacementRegex); final Pattern subsequentPattern = Pattern.compile(replacementRegex); for (int jj = 0; jj < segmentLines.size(); ++jj) { @@ -359,12 +332,10 @@ private static List rewriteSegment(ColumnSourceInfo destinationColumnSou replacementValue = destinationColumnSourceInfo.writableStateChunkType(); break; case "StateColumnSourceType": - replacementValue = - destinationColumnSourceInfo.getStateColumnSourceType(); + replacementValue = destinationColumnSourceInfo.getStateColumnSourceType(); break; case "StateColumnSourceConstructor": - replacementValue = - destinationColumnSourceInfo.getStateColumnSourceConstructor(); + replacementValue = destinationColumnSourceInfo.getStateColumnSourceConstructor(); break; case "NullStateValue": replacementValue = destinationColumnSourceInfo.getNullStateValue(); @@ -376,25 +347,21 @@ private static List rewriteSegment(ColumnSourceInfo destinationColumnSou replacementValue = destinationColumnSourceInfo.getStateChunkTypeEnum(); break; default: - throw new IllegalStateException( - "Unknown replacement: " + replacementType); + throw new IllegalStateException("Unknown replacement: " + replacementType); } - controlReplacement.append(firstControl ? "" : ", ").append('@') - .append(replacementType).append("@ from ") - .append(Pattern.quote(replacementValue)); + controlReplacement.append(firstControl ? "" : ", ").append('@').append(replacementType) + .append("@ from ").append(Pattern.quote(replacementValue)); firstControl = false; - final String replacementLine = - originalLine.replaceAll(fromReplacement, replacementValue); + final String replacementLine = originalLine.replaceAll(fromReplacement, replacementValue); segmentLines.set(jj + 1, replacementLine); if (subsequentReplacement == null) { break; } - final Matcher subsequentMatcher = subsequentPattern - .matcher(subsequentReplacement.replaceFirst("\\s*,\\s*", "")); + final Matcher subsequentMatcher = + subsequentPattern.matcher(subsequentReplacement.replaceFirst("\\s*,\\s*", "")); if (!subsequentMatcher.matches()) { - throw new IllegalStateException( - "Invalid subsequent replacement: " + subsequentReplacement); + throw new IllegalStateException("Invalid subsequent replacement: " + subsequentReplacement); } replacementType = subsequentMatcher.group(1); fromReplacement = subsequentMatcher.group(2); @@ -412,26 +379,22 @@ private static ColumnSourceInfo findAnnotations(Class clazz) { final Field[] fields = clazz.getDeclaredFields(); - final Field stateColumnSourceField = - findAnnotatedField(clazz, fields, StateColumnSource.class); + final Field stateColumnSourceField = findAnnotatedField(clazz, fields, StateColumnSource.class); final Class type = stateColumnSourceField.getType(); if (ColumnSource.class.isAssignableFrom(type)) { final Type genericType = stateColumnSourceField.getGenericType(); if (genericType instanceof ParameterizedType) { final ParameterizedType parameterizedType = (ParameterizedType) genericType; - final Class dataType = - (Class) ((ParameterizedType) genericType).getActualTypeArguments()[0]; + final Class dataType = (Class) ((ParameterizedType) genericType).getActualTypeArguments()[0]; result.genericDataType = dataType.getSimpleName(); // noinspection unchecked - final Class asColumnSource = - (Class) parameterizedType.getRawType(); + final Class asColumnSource = (Class) parameterizedType.getRawType(); try { result.stateColumnSourceRawType = asColumnSource.getSimpleName(); - final ColumnSource cs = - asColumnSource.getConstructor(Class.class).newInstance(dataType); + final ColumnSource cs = asColumnSource.getConstructor(Class.class).newInstance(dataType); result.stateChunkType = cs.getChunkType(); } catch (InstantiationException | IllegalAccessException | NoSuchMethodException - | InvocationTargetException e) { + | InvocationTargetException e) { throw new RuntimeException(e); } } else { // type instanceof Class @@ -446,13 +409,12 @@ private static ColumnSourceInfo findAnnotations(Class clazz) { } } } else { - throw new IllegalStateException( - "Not a column source: field=" + stateColumnSourceField + ", type=" + type); + throw new IllegalStateException("Not a column source: field=" + stateColumnSourceField + ", type=" + type); } result.stateColumnSourceName = stateColumnSourceField.getName(); result.overflowStateColumnSourceName = - findAnnotatedField(clazz, fields, OverflowStateColumnSource.class).getName(); + findAnnotatedField(clazz, fields, OverflowStateColumnSource.class).getName(); result.emptyStateValue = findAnnotatedField(clazz, fields, EmptyStateValue.class).getName(); return result; @@ -460,26 +422,25 @@ private static ColumnSourceInfo findAnnotations(Class clazz) { @NotNull private static Field findAnnotatedField(Class clazz, Field[] fields, - Class annotationClass) { - final List matchingFields = Arrays.stream(fields) - .filter(f -> f.getAnnotation(annotationClass) != null).collect(Collectors.toList()); + Class annotationClass) { + final List matchingFields = Arrays.stream(fields).filter(f -> f.getAnnotation(annotationClass) != null) + .collect(Collectors.toList()); if (matchingFields.size() > 1) { - throw new RuntimeException("Multiple fields annotated with " - + annotationClass.getSimpleName() + " annotation in " + clazz.getCanonicalName()); + throw new RuntimeException("Multiple fields annotated with " + annotationClass.getSimpleName() + + " annotation in " + clazz.getCanonicalName()); } if (matchingFields.size() < 1) { - throw new RuntimeException("Could not find annotation with " - + annotationClass.getSimpleName() + " annotation in " + clazz.getCanonicalName()); + throw new RuntimeException("Could not find annotation with " + annotationClass.getSimpleName() + + " annotation in " + clazz.getCanonicalName()); } return matchingFields.get(0); } private static RegionedFile makeRegionedFile(final String name, List lines, - Collection excludedMixins) { + Collection excludedMixins) { final Pattern startMixinPattern = Pattern.compile("\\s*//\\s+mixin\\s+(.*)?\\s*"); final Pattern endMixinPattern = Pattern.compile("\\s*//\\s+endmixin\\s+(.*)?\\s*"); - final Pattern altMixinPattern = - Pattern.compile("(\\s*)//\\s+altmixin\\s+(.*?):\\s(.*?)(\\\\)?"); + final Pattern altMixinPattern = Pattern.compile("(\\s*)//\\s+altmixin\\s+(.*?):\\s(.*?)(\\\\)?"); final Pattern startRegionPattern = Pattern.compile("\\s*//\\s+region\\s+(.*)?\\s*"); final Pattern endRegionPattern = Pattern.compile("\\s*//\\s+endregion\\s+(.*)?\\s*"); @@ -513,16 +474,16 @@ private static RegionedFile makeRegionedFile(final String name, List lin if (mixinEndMatcher.matches()) { if (currentRegion != null) { throw new IllegalStateException( - name + ":" + lineNumber + ": Can not end mixin while a region is open, " - + currentRegion + " opened at line " + regionOpenLine); + name + ":" + lineNumber + ": Can not end mixin while a region is open, " + currentRegion + + " opened at line " + regionOpenLine); } if (currentMixin == null) { throw new IllegalStateException( - name + ":" + lineNumber + ": Can not end mixin without an open mixin."); + name + ":" + lineNumber + ": Can not end mixin without an open mixin."); } if (!currentMixin.equals(mixinEndMatcher.group(1))) { throw new IllegalStateException(name + ":" + lineNumber + ": ended mixin " - + mixinEndMatcher.group(1) + ", but current mixin is " + currentMixin); + + mixinEndMatcher.group(1) + ", but current mixin is " + currentMixin); } mixinStack.pop(); currentMixin = mixinStack.isEmpty() ? null : mixinStack.peek(); @@ -537,8 +498,8 @@ private static RegionedFile makeRegionedFile(final String name, List lin if (mixinStartMatcher.matches()) { if (currentRegion != null) { throw new IllegalStateException( - name + ":" + lineNumber + ": Can not start mixin while a region is open, " - + currentRegion + " opened at line " + regionOpenLine); + name + ":" + lineNumber + ": Can not start mixin while a region is open, " + currentRegion + + " opened at line " + regionOpenLine); } currentMixin = mixinStartMatcher.group(1); mixinStack.push(currentMixin); @@ -555,25 +516,25 @@ private static RegionedFile makeRegionedFile(final String name, List lin result.noRegionSegments.add(accumulated); accumulated = new ArrayList<>(); if (currentRegion != null) { - throw new IllegalStateException(name + ":" + lineNumber + ": Already in region " - + currentRegion + " opened at line" + regionOpenLine); + throw new IllegalStateException(name + ":" + lineNumber + ": Already in region " + currentRegion + + " opened at line" + regionOpenLine); } currentRegion = regionStartMatcher.group(1); regionOpenLine = lineNumber; if (result.regionText.containsKey(currentRegion)) { - throw new IllegalStateException(name + ":" + lineNumber - + ": Multiply defined region " + currentRegion + "."); + throw new IllegalStateException( + name + ":" + lineNumber + ": Multiply defined region " + currentRegion + "."); } } final Matcher regionEndMatcher = endRegionPattern.matcher(line); if (regionEndMatcher.matches()) { if (currentRegion == null) { throw new IllegalStateException( - name + ":" + lineNumber + ": not in region, but encountered " + line); + name + ":" + lineNumber + ": not in region, but encountered " + line); } if (!currentRegion.equals(regionEndMatcher.group(1))) { throw new IllegalStateException(name + ":" + lineNumber + ": ended region " - + regionEndMatcher.group(1) + ", but current region is " + currentRegion); + + regionEndMatcher.group(1) + ", but current region is " + currentRegion); } result.regionNames.add(currentRegion); result.regionText.put(currentRegion, accumulated); @@ -589,13 +550,13 @@ private static RegionedFile makeRegionedFile(final String name, List lin if (currentMixin != null) { throw new IllegalStateException( - "Mixin " + currentMixin + " never ended, started on line " + mixinOpenLine.peek()); + "Mixin " + currentMixin + " never ended, started on line " + mixinOpenLine.peek()); } result.noRegionSegments.add(accumulated); - Require.eq(result.noRegionSegments.size() - 1, "result.noRegionSegments.size() - 1", - result.regionText.size(), "result.regionText.size()"); + Require.eq(result.noRegionSegments.size() - 1, "result.noRegionSegments.size() - 1", result.regionText.size(), + "result.regionText.size()"); return result; } diff --git a/DB/src/main/java/io/deephaven/db/v2/ReverseLookup.java b/DB/src/main/java/io/deephaven/db/v2/ReverseLookup.java index f985f66d564..a0a0f5ba2f0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ReverseLookup.java +++ b/DB/src/main/java/io/deephaven/db/v2/ReverseLookup.java @@ -1,34 +1,31 @@ package io.deephaven.db.v2; /** - * For hierarchical table display, identify the row index key that corresponds to a given logical - * key. + * For hierarchical table display, identify the row index key that corresponds to a given logical key. */ public interface ReverseLookup { /** - * Gets the index value where key exists in the table, or the no-entry-value if it is not found - * in the table. + * Gets the index value where key exists in the table, or the no-entry-value if it is not found in the table. * - * @param key a single object for a single column, or a - * {@link io.deephaven.datastructures.util.SmartKey} for multiple columns + * @param key a single object for a single column, or a {@link io.deephaven.datastructures.util.SmartKey} for + * multiple columns * @return the row index where key exists in the table */ long get(Object key); /** - * Gets the index value where key previously in the table, or the no-entry-value if it is was - * not found in the table. + * Gets the index value where key previously in the table, or the no-entry-value if it is was not found in the + * table. * - * @param key a single object for a single column, or a - * {@link io.deephaven.datastructures.util.SmartKey} for multiple columns + * @param key a single object for a single column, or a {@link io.deephaven.datastructures.util.SmartKey} for + * multiple columns * * @return the row index where key previously existed in the table */ long getPrev(Object key); /** - * Returns the value that will be returned from {@link #get} or if no entry exists for a given - * key. + * Returns the value that will be returned from {@link #get} or if no entry exists for a given key. */ long getNoEntryValue(); diff --git a/DB/src/main/java/io/deephaven/db/v2/ReverseLookupListener.java b/DB/src/main/java/io/deephaven/db/v2/ReverseLookupListener.java index 261e233fa3c..9be08e6efb3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ReverseLookupListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/ReverseLookupListener.java @@ -29,13 +29,13 @@ /** * Maintains a map from key column values to their index. * - * This allows you to quickly find a row based on a unique key on a ticking table, without the need - * for searching the entire table. + * This allows you to quickly find a row based on a unique key on a ticking table, without the need for searching the + * entire table. * * Note: The key column values must be unique. */ public class ReverseLookupListener extends LivenessArtifact - implements ReverseLookup, DynamicNode, NotificationStepSource { + implements ReverseLookup, DynamicNode, NotificationStepSource { private static final long NO_ENTRY_VALUE = -2; private static final long REMOVED_ENTRY_VALUE = -3; @@ -46,27 +46,23 @@ public class ReverseLookupListener extends LivenessArtifact private final InternalListener listener; private class InternalListener extends InstrumentedListenerAdapter - implements NotificationStepSource, NotificationStepReceiver { + implements NotificationStepSource, NotificationStepReceiver { private final TObjectLongHashMap prevMap; private final Set modifiedThisCycle = new THashSet<>(); private volatile long lastNotificationStep = NULL_NOTIFICATION_STEP; InternalListener(String description, DynamicTable source, boolean retain) { super(description, source, retain); - prevMap = new TObjectLongHashMap<>(source.isRefreshing() ? 2 * source.intSize() : 0, - 0.75f, NO_ENTRY_VALUE); + prevMap = new TObjectLongHashMap<>(source.isRefreshing() ? 2 * source.intSize() : 0, 0.75f, NO_ENTRY_VALUE); modifiedThisCycle.clear(); } @Override public void onUpdate(final Index added, final Index removed, final Index modified) { synchronized (ReverseLookupListener.this) { - // Note that lastNotificationStep will change before we are technically satisfied, - // but it doesn't - // matter; we aren't fully updated yet, but we rely on synchronization on the - // enclosing RLL to prevent - // inconsistent data access. By changing the step as early as we know we can we - // allow concurrent + // Note that lastNotificationStep will change before we are technically satisfied, but it doesn't + // matter; we aren't fully updated yet, but we rely on synchronization on the enclosing RLL to prevent + // inconsistent data access. By changing the step as early as we know we can we allow concurrent // consumers to avoid using a WaitNotification and just rely on our locking. lastNotificationStep = LogicalClock.DEFAULT.currentStep(); prevMap.clear(); @@ -88,9 +84,8 @@ private void removeEntries(Index index) { final long oldRow = map.remove(keyToReverse); if (oldRow == map.getNoEntryValue()) { - throw Assert - .statementNeverExecuted("Removed value not in reverse lookup map: row=" - + row + ", key=" + keyToReverse); + throw Assert.statementNeverExecuted( + "Removed value not in reverse lookup map: row=" + row + ", key=" + keyToReverse); } setPrevious(keyToReverse, oldRow); } @@ -108,21 +103,18 @@ private void modifyEntries(Index index) { final long oldRow; // We only want to remove keys from the mapping that haven't already been modified. - if ((!ignoreNull || keyToReverse != null) - && !modifiedThisCycle.contains(keyToReverse)) { + if ((!ignoreNull || keyToReverse != null) && !modifiedThisCycle.contains(keyToReverse)) { oldRow = map.remove(keyToReverse); if (oldRow == map.getNoEntryValue()) { - throw Assert - .statementNeverExecuted("Removed value not in reverse lookup map: row=" - + row + ", key=" + keyToReverse); + throw Assert.statementNeverExecuted( + "Removed value not in reverse lookup map: row=" + row + ", key=" + keyToReverse); } } else { oldRow = NO_ENTRY_VALUE; } if (!ignoreNull || newKey != null) { - // Take into account that the newKey may already be mapped somewhere, and in - // that case + // Take into account that the newKey may already be mapped somewhere, and in that case // should be added to the previous map so we don't lose that component. setPrevious(newKey, map.put(newKey, row)); } @@ -165,8 +157,8 @@ long getPrev(Object key) { @Override public String toString() { return "{lastNotificationStep=" + lastNotificationStep + - ", modifiedThisCycle.size=" + modifiedThisCycle.size() + - ", prevMap.size=" + prevMap.size() + "}"; + ", modifiedThisCycle.size=" + modifiedThisCycle.size() + + ", prevMap.size=" + prevMap.size() + "}"; } } @@ -175,8 +167,7 @@ public String toString() { @ReferentialIntegrity private Object reference; - public static ReverseLookupListener makeReverseLookupListenerWithSnapshot(BaseTable source, - String... columns) { + public static ReverseLookupListener makeReverseLookupListenerWithSnapshot(BaseTable source, String... columns) { final SwapListener swapListener; if (source.isRefreshing()) { swapListener = new SwapListener(source); @@ -189,18 +180,17 @@ public static ReverseLookupListener makeReverseLookupListenerWithSnapshot(BaseTa // noinspection AutoBoxing ConstructSnapshot.callDataSnapshotFunction(System.identityHashCode(source) + ": ", - swapListener == null ? ConstructSnapshot.StaticSnapshotControl.INSTANCE - : swapListener.makeSnapshotControl(), - (usePrev, beforeClock) -> { - final ReverseLookupListener value = - new ReverseLookupListener(source, false, usePrev, columns); - if (swapListener != null) { - swapListener.setListenerAndResult(value.listener, value.listener); - value.reference = swapListener; - } - resultListener.setValue(value); - return true; - }); + swapListener == null ? ConstructSnapshot.StaticSnapshotControl.INSTANCE + : swapListener.makeSnapshotControl(), + (usePrev, beforeClock) -> { + final ReverseLookupListener value = new ReverseLookupListener(source, false, usePrev, columns); + if (swapListener != null) { + swapListener.setListenerAndResult(value.listener, value.listener); + value.reference = swapListener; + } + resultListener.setValue(value); + return true; + }); final ReverseLookupListener resultListenerValue = resultListener.getValue(); if (swapListener != null) { @@ -209,8 +199,7 @@ public static ReverseLookupListener makeReverseLookupListenerWithSnapshot(BaseTa return resultListenerValue; } - public static ReverseLookupListener makeReverseLookupListenerWithLock(DynamicTable source, - String... columns) { + public static ReverseLookupListener makeReverseLookupListenerWithLock(DynamicTable source, String... columns) { LiveTableMonitor.DEFAULT.checkInitiateTableOperation(); final ReverseLookupListener result = new ReverseLookupListener(source, columns); source.listenForUpdates(result.listener); @@ -221,8 +210,7 @@ public static ReverseLookupListener makeReverseLookupListenerWithLock(DynamicTab * Prepare the parameter table for use with {@link Table#treeTable(String, String) tree table} * * @param preTree The tree to prepare - * @param idColumn The column that will be used as the id for - * {@link Table#treeTable(String, String)} + * @param idColumn The column that will be used as the id for {@link Table#treeTable(String, String)} */ @ScriptApi public static void prepareForTree(BaseTable preTree, String idColumn) { @@ -233,7 +221,7 @@ public static void prepareForTree(BaseTable preTree, String idColumn) { } preTree.setAttribute(Table.PREPARED_RLL_ATTRIBUTE, - makeReverseLookupListenerWithSnapshot(preTree, idColumn)); + makeReverseLookupListenerWithSnapshot(preTree, idColumn)); } } @@ -245,12 +233,10 @@ private ReverseLookupListener(DynamicTable source, boolean ignoreNull, String... this(source, ignoreNull, false, columns); } - private ReverseLookupListener(DynamicTable source, boolean ignoreNull, boolean usePrev, - String... columns) { + private ReverseLookupListener(DynamicTable source, boolean ignoreNull, boolean usePrev, String... columns) { this.keyColumnNames = columns; this.ignoreNull = ignoreNull; - this.columns = - Arrays.stream(columns).map(source::getColumnSource).toArray(ColumnSource[]::new); + this.columns = Arrays.stream(columns).map(source::getColumnSource).toArray(ColumnSource[]::new); map = new TObjectLongHashMap<>(2 * source.intSize(), 0.75f, NO_ENTRY_VALUE); try (final ReadOnlyIndex prevIndex = usePrev ? source.getIndex().getPrevIndex() : null) { @@ -262,8 +248,7 @@ private ReverseLookupListener(DynamicTable source, boolean ignoreNull, boolean u } if (source.isRefreshing()) { - this.listener = new InternalListener("ReverseLookup(" + Arrays.toString(columns) + ")", - source, false); + this.listener = new InternalListener("ReverseLookup(" + Arrays.toString(columns) + ")", source, false); manage(listener); } else { this.listener = null; @@ -286,8 +271,8 @@ public long getNoEntryValue() { } /** - * Returns an iterator to the underlying map of current values. This should only be used by unit - * tests, as the iterator is not synchronized on the RLL and hence may become inconsistent. + * Returns an iterator to the underlying map of current values. This should only be used by unit tests, as the + * iterator is not synchronized on the RLL and hence may become inconsistent. * * @return an iterator to the underlying map of values. */ @@ -317,7 +302,7 @@ private Object getPrevKey(long row) { } private void addEntries(@NotNull final ReadOnlyIndex index, final boolean usePrev, - @NotNull final Runnable consistencyChecker) { + @NotNull final Runnable consistencyChecker) { for (final ReadOnlyIndex.Iterator it = index.iterator(); it.hasNext();) { final long row = it.nextLong(); final Object keyToReverse = usePrev ? getPrevKey(row) : getKey(row); @@ -328,8 +313,8 @@ private void addEntries(@NotNull final ReadOnlyIndex index, final boolean usePre final long oldRow = map.put(keyToReverse, row); if (oldRow != map.getNoEntryValue()) { consistencyChecker.run(); - throw Assert.statementNeverExecuted("Duplicate value in reverse lookup map: row=" - + row + ", oldRow=" + oldRow + ", key=" + keyToReverse); + throw Assert.statementNeverExecuted("Duplicate value in reverse lookup map: row=" + row + ", oldRow=" + + oldRow + ", key=" + keyToReverse); } if (listener != null) { @@ -341,9 +326,9 @@ private void addEntries(@NotNull final ReadOnlyIndex index, final boolean usePre @Override public String toString() { return "ReverseLookupListener{" + - "map={size=" + (map == null ? 0 : map.size()) + "}" + - "listener=" + listener + - '}'; + "map={size=" + (map == null ? 0 : map.size()) + "}" + + "listener=" + listener + + '}'; } @Override @@ -364,8 +349,7 @@ public boolean satisfied(final long step) { } private void assertLive() { - Assert.assertion(listener != null, - "The base table was not live, this method should not be invoked."); + Assert.assertion(listener != null, "The base table was not live, this method should not be invoked."); } @Override @@ -376,7 +360,7 @@ public boolean isRefreshing() { @Override public boolean setRefreshing(boolean refreshing) { throw new UnsupportedOperationException( - "An RLL refreshing state is tied to the table it is mapping and can not be changed."); + "An RLL refreshing state is tied to the table it is mapping and can not be changed."); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/ReverseOperation.java b/DB/src/main/java/io/deephaven/db/v2/ReverseOperation.java index 3e31bbcfb40..f7a574eb406 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ReverseOperation.java +++ b/DB/src/main/java/io/deephaven/db/v2/ReverseOperation.java @@ -22,8 +22,7 @@ public class ReverseOperation implements QueryTable.MemoizableOperation (Long.MAX_VALUE / PIVOT_GROWTH_FACTOR)) { return Long.MAX_VALUE; } else { - // make it big enough that we should be able to accommodate what we are adding now, plus - // a bit more + // make it big enough that we should be able to accommodate what we are adding now, plus a bit more return Math.max(highestOneBit * PIVOT_GROWTH_FACTOR - 1, MINIMUM_PIVOT); } } @@ -242,8 +232,7 @@ public Index transform(final Index indexToTransform) { } /** - * Transform an outer (reversed) index to the inner (unreversed) index as of the previous cycle, - * or vice versa. + * Transform an outer (reversed) index to the inner (unreversed) index as of the previous cycle, or vice versa. * * @param outerIndex the outer index * @return the corresponding inner index @@ -256,8 +245,7 @@ private Index transform(final Index outerIndex, final boolean usePrev) { final long pivot = usePrev ? getPivotPrev() : pivotPoint; final IndexBuilder reversedBuilder = Index.FACTORY.getRandomBuilder(); - for (final Index.RangeIterator rangeIterator = outerIndex.rangeIterator(); rangeIterator - .hasNext();) { + for (final Index.RangeIterator rangeIterator = outerIndex.rangeIterator(); rangeIterator.hasNext();) { rangeIterator.next(); final long startValue = rangeIterator.currentRangeStart(); final long endValue = rangeIterator.currentRangeEnd(); @@ -283,8 +271,7 @@ public long transform(long outerIndex) { } /** - * Transform an outer (reversed) index to the inner (unreversed) index as of the previous cycle, - * or vice versa. + * Transform an outer (reversed) index to the inner (unreversed) index as of the previous cycle, or vice versa. * * @param outerIndex the outer index * @return the corresponding inner index diff --git a/DB/src/main/java/io/deephaven/db/v2/RollupAttributeCopier.java b/DB/src/main/java/io/deephaven/db/v2/RollupAttributeCopier.java index fd8d1c9a55f..f054a9b86fb 100644 --- a/DB/src/main/java/io/deephaven/db/v2/RollupAttributeCopier.java +++ b/DB/src/main/java/io/deephaven/db/v2/RollupAttributeCopier.java @@ -8,16 +8,15 @@ */ public class RollupAttributeCopier { /** - * When creating constituent leaves, we set the appropriate TableMap and reverse lookup on each - * leaf we are creating. + * When creating constituent leaves, we set the appropriate TableMap and reverse lookup on each leaf we are + * creating. */ - public final static ByExternalChunkedOperator.AttributeCopier LEAF_WITHCONSTITUENTS_INSTANCE = - (pt, st) -> { - pt.copyAttributes(st, BaseTable.CopyAttributeOperation.ByExternal); - st.setAttribute(Table.ROLLUP_LEAF_ATTRIBUTE, RollupInfo.LeafType.Constituent); - st.setAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE, TableMap.emptyMap()); - st.setAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE, ReverseLookup.NULL); - }; + public final static ByExternalChunkedOperator.AttributeCopier LEAF_WITHCONSTITUENTS_INSTANCE = (pt, st) -> { + pt.copyAttributes(st, BaseTable.CopyAttributeOperation.ByExternal); + st.setAttribute(Table.ROLLUP_LEAF_ATTRIBUTE, RollupInfo.LeafType.Constituent); + st.setAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE, TableMap.emptyMap()); + st.setAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE, ReverseLookup.NULL); + }; /** For intermediate levels, we must copy the reverse lookup from the deeper level. */ public final static ByExternalChunkedOperator.AttributeCopier DEFAULT_INSTANCE = (pt, st) -> { diff --git a/DB/src/main/java/io/deephaven/db/v2/RollupInfo.java b/DB/src/main/java/io/deephaven/db/v2/RollupInfo.java index ea52a5551fb..f2569f4a45f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/RollupInfo.java +++ b/DB/src/main/java/io/deephaven/db/v2/RollupInfo.java @@ -26,19 +26,18 @@ public enum LeafType { Normal, /** - * The leaf tables are from the original table (they show constituent rows) and may have - * different column names and types + * The leaf tables are from the original table (they show constituent rows) and may have different column names + * and types */ Constituent } - public RollupInfo(ComboAggregateFactory factory, SelectColumn[] selectColumns, - LeafType leafType) { + public RollupInfo(ComboAggregateFactory factory, SelectColumn[] selectColumns, LeafType leafType) { this(factory, selectColumns, leafType, null); } - public RollupInfo(ComboAggregateFactory factory, SelectColumn[] selectColumns, - LeafType leafType, String[] columnFormats) { + public RollupInfo(ComboAggregateFactory factory, SelectColumn[] selectColumns, LeafType leafType, + String[] columnFormats) { super(columnFormats); this.factory = factory; this.selectColumns = selectColumns; @@ -46,7 +45,7 @@ public RollupInfo(ComboAggregateFactory factory, SelectColumn[] selectColumns, this.leafType = leafType; final Set tempSet = Arrays.stream(selectColumns).map(SelectColumn::getName) - .collect(Collectors.toCollection(LinkedHashSet::new)); + .collect(Collectors.toCollection(LinkedHashSet::new)); this.byColumnNames = Collections.unmodifiableSet(tempSet); } diff --git a/DB/src/main/java/io/deephaven/db/v2/ShiftAwareListener.java b/DB/src/main/java/io/deephaven/db/v2/ShiftAwareListener.java index 2d1f0de62d3..503261b6534 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ShiftAwareListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/ShiftAwareListener.java @@ -21,8 +21,8 @@ public interface ShiftAwareListener extends ListenerBase { /** - * A shift aware update structure, containing the rows and columns that were added, modified, - * removed, and shifted on a given cycle. + * A shift aware update structure, containing the rows and columns that were added, modified, removed, and shifted + * on a given cycle. */ class Update implements LogOutputAppendable { /** @@ -53,19 +53,18 @@ class Update implements LogOutputAppendable { // Cached version of prevModified index. private volatile Index prevModified; - // Field updater for refCount, so we can avoid creating an {@link - // java.util.concurrent.atomic.AtomicInteger} for each instance. + // Field updater for refCount, so we can avoid creating an {@link java.util.concurrent.atomic.AtomicInteger} for + // each instance. private static final AtomicIntegerFieldUpdater REFERENCE_COUNT_UPDATER = - AtomicIntegerFieldUpdater.newUpdater(Update.class, "refCount"); + AtomicIntegerFieldUpdater.newUpdater(Update.class, "refCount"); // Ensure that we clean up only after all copies of the update are released. private volatile int refCount = 1; public Update() {} - public Update(final Index added, final Index removed, final Index modified, - final IndexShiftData shifted, - final ModifiedColumnSet modifiedColumnSet) { + public Update(final Index added, final Index removed, final Index modified, final IndexShiftData shifted, + final ModifiedColumnSet modifiedColumnSet) { this.added = added; this.removed = removed; this.modified = modified; @@ -109,8 +108,7 @@ public boolean empty() { * @return true if all internal state is initialized */ public boolean valid() { - return added != null && removed != null && modified != null && shifted != null - && modifiedColumnSet != null; + return added != null && removed != null && modified != null && shifted != null && modifiedColumnSet != null; } /** @@ -118,8 +116,7 @@ public boolean valid() { */ public Update copy() { final ModifiedColumnSet newMCS; - if (modifiedColumnSet == ModifiedColumnSet.ALL - || modifiedColumnSet == ModifiedColumnSet.EMPTY) { + if (modifiedColumnSet == ModifiedColumnSet.ALL || modifiedColumnSet == ModifiedColumnSet.EMPTY) { newMCS = modifiedColumnSet; } else { newMCS = new ModifiedColumnSet(modifiedColumnSet); @@ -142,8 +139,7 @@ public Index getModifiedPreShift() { if (localPrevModified == null) { localPrevModified = modified.clone(); shifted.unapply(localPrevModified); - // this volatile write ensures prevModified is visible only after it is - // shifted + // this volatile write ensures prevModified is visible only after it is shifted prevModified = localPrevModified; } } @@ -152,8 +148,7 @@ public Index getModifiedPreShift() { } /** - * This helper iterates through the modified index and supplies both the pre-shift and - * post-shift keys per row. + * This helper iterates through the modified index and supplies both the pre-shift and post-shift keys per row. * * @param consumer a consumer to feed the modified pre-shift and post-shift key values to. */ @@ -167,8 +162,7 @@ public void forAllModified(final BiConsumer consumer) { } if (it.hasNext() || pit.hasNext()) { - throw new IllegalStateException( - "IndexShiftData.forAllModified(modified) generated an invalid set."); + throw new IllegalStateException("IndexShiftData.forAllModified(modified) generated an invalid set."); } } @@ -190,8 +184,7 @@ private void reset() { } shifted = null; modifiedColumnSet = null; - // This doubles as a memory barrier write prior to the read in acquire(). It must remain - // last. + // This doubles as a memory barrier write prior to the read in acquire(). It must remain last. prevModified = null; } @@ -203,13 +196,13 @@ public String toString() { @Override public LogOutput append(LogOutput logOutput) { return logOutput.append('{') - .append("added=").append(added) - .append(", removed=").append(removed) - .append(", modified=").append(modified) - .append(", shifted=").append(shifted == null ? "{}" : shifted.toString()) - .append(", modifiedColumnSet=") - .append(modifiedColumnSet == null ? "{EMPTY}" : modifiedColumnSet.toString()) - .append("}"); + .append("added=").append(added) + .append(", removed=").append(removed) + .append(", modified=").append(modified) + .append(", shifted=").append(shifted == null ? "{}" : shifted.toString()) + .append(", modifiedColumnSet=") + .append(modifiedColumnSet == null ? "{EMPTY}" : modifiedColumnSet.toString()) + .append("}"); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/ShiftAwareSwapListener.java b/DB/src/main/java/io/deephaven/db/v2/ShiftAwareSwapListener.java index eedfa121924..2705100a7e2 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ShiftAwareSwapListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/ShiftAwareSwapListener.java @@ -2,8 +2,7 @@ import io.deephaven.db.tables.live.NotificationQueue; -public class ShiftAwareSwapListener extends SwapListenerBase - implements ShiftAwareListener { +public class ShiftAwareSwapListener extends SwapListenerBase implements ShiftAwareListener { public ShiftAwareSwapListener(final BaseTable sourceTable) { super(sourceTable); @@ -16,8 +15,7 @@ public synchronized void onUpdate(final Update upstream) { } @Override - public synchronized NotificationQueue.IndexUpdateNotification getNotification( - final Update update) { + public synchronized NotificationQueue.IndexUpdateNotification getNotification(final Update update) { return doGetNotification(() -> eventualListener.getNotification(update)); } diff --git a/DB/src/main/java/io/deephaven/db/v2/SimpleSourceTable.java b/DB/src/main/java/io/deephaven/db/v2/SimpleSourceTable.java index f9a7c5d97a9..2c78451d3d5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SimpleSourceTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/SimpleSourceTable.java @@ -22,26 +22,24 @@ public class SimpleSourceTable extends SourceTable { * @param tableDefinition A TableDefinition * @param description A human-readable description for this table * @param componentFactory A component factory for creating column source managers - * @param locationProvider A TableLocationProvider, for use in discovering the locations that - * compose this table - * @param liveTableRegistrar Callback for registering live tables for refreshes, null if this - * table is not live + * @param locationProvider A TableLocationProvider, for use in discovering the locations that compose this table + * @param liveTableRegistrar Callback for registering live tables for refreshes, null if this table is not live */ public SimpleSourceTable(TableDefinition tableDefinition, - String description, - SourceTableComponentFactory componentFactory, - TableLocationProvider locationProvider, - LiveTableRegistrar liveTableRegistrar) { + String description, + SourceTableComponentFactory componentFactory, + TableLocationProvider locationProvider, + LiveTableRegistrar liveTableRegistrar) { super(tableDefinition, description, componentFactory, locationProvider, liveTableRegistrar); } protected SimpleSourceTable newInstance(TableDefinition tableDefinition, - String description, - SourceTableComponentFactory componentFactory, - TableLocationProvider locationProvider, - LiveTableRegistrar liveTableRegistrar) { - return new SimpleSourceTable(tableDefinition, description, componentFactory, - locationProvider, liveTableRegistrar); + String description, + SourceTableComponentFactory componentFactory, + TableLocationProvider locationProvider, + LiveTableRegistrar liveTableRegistrar) { + return new SimpleSourceTable(tableDefinition, description, componentFactory, locationProvider, + liveTableRegistrar); } @Override @@ -50,17 +48,15 @@ protected final SourceTable redefine(TableDefinition newDefinition) { // Nothing changed - we have the same columns in the same order. return this; } - return newInstance(newDefinition, description + "-retainColumns", componentFactory, - locationProvider, liveTableRegistrar); + return newInstance(newDefinition, description + "-retainColumns", componentFactory, locationProvider, + liveTableRegistrar); } @Override - protected final Table redefine(TableDefinition newDefinitionExternal, - TableDefinition newDefinitionInternal, SelectColumn[] viewColumns, - Map> columnDependency) { - DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinitionExternal, - description + "-redefined", new QueryTableReference(redefine(newDefinitionInternal)), - new String[0], viewColumns, null); + protected final Table redefine(TableDefinition newDefinitionExternal, TableDefinition newDefinitionInternal, + SelectColumn[] viewColumns, Map> columnDependency) { + DeferredViewTable deferredViewTable = new DeferredViewTable(newDefinitionExternal, description + "-redefined", + new QueryTableReference(redefine(newDefinitionInternal)), new String[0], viewColumns, null); deferredViewTable.setRefreshing(isRefreshing()); return deferredViewTable; } diff --git a/DB/src/main/java/io/deephaven/db/v2/SliceLikeOperation.java b/DB/src/main/java/io/deephaven/db/v2/SliceLikeOperation.java index 15232a9aa66..6f39a6d6761 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SliceLikeOperation.java +++ b/DB/src/main/java/io/deephaven/db/v2/SliceLikeOperation.java @@ -8,30 +8,27 @@ public class SliceLikeOperation implements QueryTable.Operation { - public static SliceLikeOperation slice(final QueryTable parent, - final long firstPositionInclusive, - final long lastPositionExclusive, final String op) { + public static SliceLikeOperation slice(final QueryTable parent, final long firstPositionInclusive, + final long lastPositionExclusive, final String op) { if (firstPositionInclusive < 0 && lastPositionExclusive > 0) { - throw new IllegalArgumentException( - "Can not slice with a negative first position (" + firstPositionInclusive + throw new IllegalArgumentException("Can not slice with a negative first position (" + firstPositionInclusive + ") and positive last position (" + lastPositionExclusive + ")"); } // note: first >= 0 && last < 0 is allowed, otherwise first must be less than last if ((firstPositionInclusive < 0 || lastPositionExclusive >= 0) - && lastPositionExclusive < firstPositionInclusive) { - throw new IllegalArgumentException("Can not slice with a first position (" - + firstPositionInclusive + ") after last position (" + lastPositionExclusive + ")"); + && lastPositionExclusive < firstPositionInclusive) { + throw new IllegalArgumentException("Can not slice with a first position (" + firstPositionInclusive + + ") after last position (" + lastPositionExclusive + ")"); } - return new SliceLikeOperation(op, - op + "(" + firstPositionInclusive + ", " + lastPositionExclusive + ")", - parent, firstPositionInclusive, lastPositionExclusive, firstPositionInclusive == 0); + return new SliceLikeOperation(op, op + "(" + firstPositionInclusive + ", " + lastPositionExclusive + ")", + parent, firstPositionInclusive, lastPositionExclusive, firstPositionInclusive == 0); } public static SliceLikeOperation headPct(final QueryTable parent, final double percent) { return new SliceLikeOperation("headPct", "headPct(" + percent + ")", parent, - 0, 0, true) { + 0, 0, true) { @Override protected long getLastPositionExclusive() { return (long) Math.ceil(percent * parent.size()); @@ -41,7 +38,7 @@ protected long getLastPositionExclusive() { public static SliceLikeOperation tailPct(final QueryTable parent, final double percent) { return new SliceLikeOperation("tailPct", "tailPct(" + percent + ")", parent, - 0, 0, false) { + 0, 0, false) { @Override protected long getFirstPositionInclusive() { return -(long) Math.ceil(percent * parent.size()); @@ -57,10 +54,9 @@ protected long getFirstPositionInclusive() { private final boolean isFlat; private QueryTable resultTable; - private SliceLikeOperation(final String operation, final String description, - final QueryTable parent, - final long firstPositionInclusive, final long lastPositionExclusive, - final boolean mayBeFlat) { + private SliceLikeOperation(final String operation, final String description, final QueryTable parent, + final long firstPositionInclusive, final long lastPositionExclusive, + final boolean mayBeFlat) { this.operation = operation; this.description = description; this.parent = parent; @@ -90,11 +86,9 @@ protected long getLastPositionExclusive() { @Override public Result initialize(boolean usePrev, long beforeClock) { final Index parentIndex = parent.getIndex(); - final Index resultIndex = - computeSliceIndex(usePrev ? parentIndex.getPrevIndex() : parentIndex); + final Index resultIndex = computeSliceIndex(usePrev ? parentIndex.getPrevIndex() : parentIndex); - // result table must be a sub-table so we can pass ModifiedColumnSet to listeners when - // possible + // result table must be a sub-table so we can pass ModifiedColumnSet to listeners when possible resultTable = parent.getSubTable(resultIndex); if (isFlat) { resultTable.setFlat(); @@ -102,13 +96,12 @@ public Result initialize(boolean usePrev, long beforeClock) { ShiftAwareListener resultListener = null; if (parent.isRefreshing()) { - resultListener = - new BaseTable.ShiftAwareListenerImpl(getDescription(), parent, resultTable) { - @Override - public void onUpdate(Update upstream) { - SliceLikeOperation.this.onUpdate(upstream); - } - }; + resultListener = new BaseTable.ShiftAwareListenerImpl(getDescription(), parent, resultTable) { + @Override + public void onUpdate(Update upstream) { + SliceLikeOperation.this.onUpdate(upstream); + } + }; } return new Result(resultTable, resultListener); diff --git a/DB/src/main/java/io/deephaven/db/v2/SortHelpers.java b/DB/src/main/java/io/deephaven/db/v2/SortHelpers.java index 2b9a75e9f5a..bdcc17c7d2b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SortHelpers.java +++ b/DB/src/main/java/io/deephaven/db/v2/SortHelpers.java @@ -35,35 +35,32 @@ public class SortHelpers { public static boolean sortBySymbolTable = - Configuration.getInstance().getBooleanWithDefault("QueryTable.sortBySymbolTable", true); + Configuration.getInstance().getBooleanWithDefault("QueryTable.sortBySymbolTable", true); /** - * If we have more than this many entries per group, instead of creating a large flat - * redirection Index, we create a redirection index that is composed of the group indices and an - * accumulated cardinality cache. This can save a significant amount of memory when the groups - * are large and storing them using our Index structure is more efficient. + * If we have more than this many entries per group, instead of creating a large flat redirection Index, we create a + * redirection index that is composed of the group indices and an accumulated cardinality cache. This can save a + * significant amount of memory when the groups are large and storing them using our Index structure is more + * efficient. */ - public static int groupedRedirectionThreshold = Configuration.getInstance() - .getIntegerWithDefault("SortHelpers.groupedRedirectionThreshold", 32); + public static int groupedRedirectionThreshold = + Configuration.getInstance().getIntegerWithDefault("SortHelpers.groupedRedirectionThreshold", 32); /** - * When the sort is greater than or equal to than megaSortSize, instead of sorting one large - * chunk, we will sort individual chunks of sortChunkSize and then merge them into - * ArrayBackedColumnSources with the LongMegaMergeKernel. + * When the sort is greater than or equal to than megaSortSize, instead of sorting one large chunk, we will sort + * individual chunks of sortChunkSize and then merge them into ArrayBackedColumnSources with the + * LongMegaMergeKernel. * - * There are some boundary conditions in the chunk sizing math that make Integer.MAX_VALUE fail; - * you could probably back off to Integer.MAX_VALUE - 32 safely. We're being very conservative - * with 1 << 30 instead. + * There are some boundary conditions in the chunk sizing math that make Integer.MAX_VALUE fail; you could probably + * back off to Integer.MAX_VALUE - 32 safely. We're being very conservative with 1 << 30 instead. */ @VisibleForTesting - static int megaSortSize = - Configuration.getInstance().getIntegerWithDefault("QueryTable.sortChunkSize", 1 << 30); + static int megaSortSize = Configuration.getInstance().getIntegerWithDefault("QueryTable.sortChunkSize", 1 << 30); /** * The size of each chunk of a sort in a mega-merge based sort. */ @VisibleForTesting - static int sortChunkSize = - Configuration.getInstance().getIntegerWithDefault("QueryTable.sortChunkSize", 1 << 30); + static int sortChunkSize = Configuration.getInstance().getIntegerWithDefault("QueryTable.sortChunkSize", 1 << 30); interface SortMapping extends LongSizedDataStructure { long size(); @@ -208,25 +205,23 @@ public RedirectionIndex makeHistoricalRedirectionIndex() { } } - static private final SortMapping EMPTY_SORT_MAPPING = - new ArraySortMapping(CollectionUtil.ZERO_LENGTH_LONG_ARRAY); + static private final SortMapping EMPTY_SORT_MAPPING = new ArraySortMapping(CollectionUtil.ZERO_LENGTH_LONG_ARRAY); /** - * Note that if usePrev is true, then indexToSort is the previous index; not the current index, - * and we should not need to call getPrevIndex. + * Note that if usePrev is true, then indexToSort is the previous index; not the current index, and we should not + * need to call getPrevIndex. */ - static SortMapping getSortedKeys(SortingOrder[] order, - ColumnSource>[] columnsToSortBy, ReadOnlyIndex indexToSort, boolean usePrev) { + static SortMapping getSortedKeys(SortingOrder[] order, ColumnSource>[] columnsToSortBy, + ReadOnlyIndex indexToSort, boolean usePrev) { return getSortedKeys(order, columnsToSortBy, indexToSort, usePrev, sortBySymbolTable); } /** - * Note that if usePrev is true, then indexToSort is the previous index; not the current index, - * and we should not need to call getPrevIndex. + * Note that if usePrev is true, then indexToSort is the previous index; not the current index, and we should not + * need to call getPrevIndex. */ - static SortMapping getSortedKeys(SortingOrder[] order, - ColumnSource>[] columnsToSortBy, ReadOnlyIndex indexToSort, boolean usePrev, - boolean allowSymbolTable) { + static SortMapping getSortedKeys(SortingOrder[] order, ColumnSource>[] columnsToSortBy, + ReadOnlyIndex indexToSort, boolean usePrev, boolean allowSymbolTable) { if (indexToSort.size() == 0) { return EMPTY_SORT_MAPPING; } @@ -240,8 +235,7 @@ static SortMapping getSortedKeys(SortingOrder[] order, } } else { if (allowSymbolTable && columnsToSortBy[0] instanceof SymbolTableSource - && ((SymbolTableSource>) columnsToSortBy[0]) - .hasSymbolTable(indexToSort)) { + && ((SymbolTableSource>) columnsToSortBy[0]).hasSymbolTable(indexToSort)) { return doSymbolTableMapping(order[0], columnsToSortBy[0], indexToSort, usePrev); } else { return getSortMappingOne(order[0], columnsToSortBy[0], indexToSort, usePrev); @@ -294,8 +288,7 @@ private int doIntLookup(long symTabId) { return lookupTable[region][id]; } - private static SparseSymbolMapping createMapping(LongChunk originalSymbol, - IntChunk mappedIndex) { + private static SparseSymbolMapping createMapping(LongChunk originalSymbol, IntChunk mappedIndex) { // figure out what the maximum region is, and determine how many bits of it there are int maxUpperPart = 0; int minTrailing = 32; @@ -309,18 +302,16 @@ private static SparseSymbolMapping createMapping(LongChunk originalSymbol, } final int maxShiftedRegion = maxUpperPart >> minTrailing; if (minTrailing == 32) { - // this means we only found a zero region, in which case we do not want to shift by - // 64, but rather by zero to just truncate the region entirely + // this means we only found a zero region, in which case we do not want to shift by 64, but rather by + // zero to just truncate the region entirely Assert.eqZero(maxShiftedRegion, "maxShiftedRegion"); minTrailing = 0; } final int[][] lookupTable = new int[maxShiftedRegion + 1][maxSymbol + 1]; - // maxMapping ends up being the number of unique string values that we have. We compute - // it so that we can - // map symbol IDs to these unique integers using the narrowest primitive sorting kernel - // possible. + // maxMapping ends up being the number of unique string values that we have. We compute it so that we can + // map symbol IDs to these unique integers using the narrowest primitive sorting kernel possible. int maxMapping = 0; for (int ii = 0; ii < originalSymbol.size(); ++ii) { @@ -339,60 +330,48 @@ private static SparseSymbolMapping createMapping(LongChunk originalSymbol, private static final String SORTED_INDEX_COLUMN_NAME = "SortedIndex"; private static final String SORTED_INDEX_COLUMN_UPDATE = SORTED_INDEX_COLUMN_NAME + "=i"; - private static SortMapping doSymbolTableMapping(SortingOrder order, - ColumnSource> columnSource, ReadOnlyIndex index, boolean usePrev) { + private static SortMapping doSymbolTableMapping(SortingOrder order, ColumnSource> columnSource, + ReadOnlyIndex index, boolean usePrev) { final int sortSize = index.intSize(); final ColumnSource reinterpreted = columnSource.reinterpret(long.class); - final Table symbolTable = - ((SymbolTableSource) columnSource).getStaticSymbolTable(index, true); + final Table symbolTable = ((SymbolTableSource) columnSource).getStaticSymbolTable(index, true); if (symbolTable.size() >= sortSize) { - // the very first thing we will do is sort the symbol table, using a regular sort; if it - // is larger than the + // the very first thing we will do is sort the symbol table, using a regular sort; if it is larger than the // actual table we care to sort, then it is wasteful to use the symbol table sorting return getSortMappingOne(order, columnSource, index, usePrev); } final Table idMapping = symbolTable.sort(SymbolTableSource.SYMBOL_COLUMN_NAME) - .by(SymbolTableSource.SYMBOL_COLUMN_NAME).update(SORTED_INDEX_COLUMN_UPDATE).ungroup() - .view(SymbolTableSource.ID_COLUMN_NAME, SORTED_INDEX_COLUMN_NAME); + .by(SymbolTableSource.SYMBOL_COLUMN_NAME).update(SORTED_INDEX_COLUMN_UPDATE).ungroup() + .view(SymbolTableSource.ID_COLUMN_NAME, SORTED_INDEX_COLUMN_NAME); final int symbolEntries = idMapping.intSize(); final SparseSymbolMapping mapping; - try ( - final WritableLongChunk originalSymbol = - WritableLongChunk.makeWritableChunk(symbolEntries); - final WritableIntChunk mappedIndex = - WritableIntChunk.makeWritableChunk(symbolEntries)) { - final ColumnSource idSource = - idMapping.getColumnSource(SymbolTableSource.ID_COLUMN_NAME); - try (final ColumnSource.FillContext idContext = - idSource.makeFillContext(symbolEntries)) { + try (final WritableLongChunk originalSymbol = WritableLongChunk.makeWritableChunk(symbolEntries); + final WritableIntChunk mappedIndex = WritableIntChunk.makeWritableChunk(symbolEntries)) { + final ColumnSource idSource = idMapping.getColumnSource(SymbolTableSource.ID_COLUMN_NAME); + try (final ColumnSource.FillContext idContext = idSource.makeFillContext(symbolEntries)) { idSource.fillChunk(idContext, originalSymbol, idMapping.getIndex()); } - final ColumnSource sortedIndexSource = - idMapping.getColumnSource(SORTED_INDEX_COLUMN_NAME); - try (final ColumnSource.FillContext sortedIndexContext = - sortedIndexSource.makeFillContext(symbolEntries)) { + final ColumnSource sortedIndexSource = idMapping.getColumnSource(SORTED_INDEX_COLUMN_NAME); + try (final ColumnSource.FillContext sortedIndexContext = sortedIndexSource.makeFillContext(symbolEntries)) { sortedIndexSource.fillChunk(sortedIndexContext, mappedIndex, idMapping.getIndex()); } mapping = SparseSymbolMapping.createMapping(originalSymbol, mappedIndex); } - // Read the symbol table values into the unmappedValues chunk. The reinterpreted source - // provides the region and - // the symbol ID within the region as a packed long, which we then unpack in the - // type-specific loops below. + // Read the symbol table values into the unmappedValues chunk. The reinterpreted source provides the region and + // the symbol ID within the region as a packed long, which we then unpack in the type-specific loops below. try (final WritableLongChunk unmappedValues = - makeAndFillValues(usePrev, index, reinterpreted).asWritableLongChunk()) { + makeAndFillValues(usePrev, index, reinterpreted).asWritableLongChunk()) { - // Mapped values generic is the chunk of unique integer keys for the sort operation, - // using the narrowest + // Mapped values generic is the chunk of unique integer keys for the sort operation, using the narrowest // possible primitive type (byte, short, or int). final WritableChunk mappedValuesGeneric; final LongSortKernel sortContext; @@ -401,36 +380,31 @@ private static SortMapping doSymbolTableMapping(SortingOrder order, final WritableByteChunk mappedValues; mappedValues = WritableByteChunk.makeWritableChunk(index.intSize()); for (int ii = 0; ii < unmappedValues.size(); ++ii) { - // symTabId is the packed ID, nulls get converted to the right kind of null, - // other values are pulled - // apart into region and id; and then used as offsets in the lookup table to get - // the unique sorted id + // symTabId is the packed ID, nulls get converted to the right kind of null, other values are pulled + // apart into region and id; and then used as offsets in the lookup table to get the unique sorted + // id // and the mapped value is added to the mappedValues chunk mappedValues.set(ii, mapping.lookupByte(unmappedValues.get(ii))); } sortContext = LongSortKernel.makeContext(ChunkType.Byte, order, sortSize, false); mappedValuesGeneric = mappedValues; } else if (mapping.getMaxMapping() <= Short.MAX_VALUE) { - final WritableShortChunk mappedValues = - WritableShortChunk.makeWritableChunk(index.intSize()); + final WritableShortChunk mappedValues = WritableShortChunk.makeWritableChunk(index.intSize()); for (int ii = 0; ii < unmappedValues.size(); ++ii) { - // symTabId is the packed ID, nulls get converted to the right kind of null, - // other values are pulled - // apart into region and id; and then used as offsets in the lookup table to get - // the unique sorted id + // symTabId is the packed ID, nulls get converted to the right kind of null, other values are pulled + // apart into region and id; and then used as offsets in the lookup table to get the unique sorted + // id // and the mapped value is added to the mappedValues chunk mappedValues.set(ii, mapping.lookupShort(unmappedValues.get(ii))); } sortContext = LongSortKernel.makeContext(ChunkType.Short, order, sortSize, false); mappedValuesGeneric = mappedValues; } else { - final WritableIntChunk mappedValues = - WritableIntChunk.makeWritableChunk(index.intSize()); + final WritableIntChunk mappedValues = WritableIntChunk.makeWritableChunk(index.intSize()); for (int ii = 0; ii < unmappedValues.size(); ++ii) { - // symTabId is the packed ID, nulls get converted to the right kind of null, - // other values are pulled - // apart into region and id; and then used as offsets in the lookup table to get - // the unique sorted id + // symTabId is the packed ID, nulls get converted to the right kind of null, other values are pulled + // apart into region and id; and then used as offsets in the lookup table to get the unique sorted + // id // and the mapped value is added to the mappedValues chunk mappedValues.set(ii, mapping.lookupInt(unmappedValues.get(ii))); @@ -439,14 +413,12 @@ private static SortMapping doSymbolTableMapping(SortingOrder order, mappedValuesGeneric = mappedValues; } - // Fill a chunk that is Writable, and does not have an ordered tag with the index keys - // that we are sorting, the - // index would does something very similar inside of - // io.deephaven.db.v2.utils.OrderedKeys.asKeyIndicesChunk; + // Fill a chunk that is Writable, and does not have an ordered tag with the index keys that we are sorting, + // the + // index would does something very similar inside of io.deephaven.db.v2.utils.OrderedKeys.asKeyIndicesChunk; // but provides a LongChunk as its return. final long[] indexKeysArray = new long[sortSize]; - final WritableLongChunk indexKeys = - WritableLongChunk.writableChunkWrap(indexKeysArray); + final WritableLongChunk indexKeys = WritableLongChunk.writableChunkWrap(indexKeysArray); index.fillKeyIndicesChunk(indexKeys); sortContext.sort(indexKeys, mappedValuesGeneric); sortContext.close(); @@ -456,50 +428,46 @@ private static SortMapping doSymbolTableMapping(SortingOrder order, } } - private static SortMapping getSortMappingOne(SortingOrder order, - ColumnSource> columnSource, ReadOnlyIndex index, boolean usePrev) { + private static SortMapping getSortMappingOne(SortingOrder order, ColumnSource> columnSource, + ReadOnlyIndex index, boolean usePrev) { final long sortSize = index.size(); if (sortSize >= megaSortSize) { return doMegaSortOne(order, columnSource, index, usePrev, sortSize); } else { - return new ArraySortMapping( - doChunkSortingOne(order, columnSource, index, usePrev, (int) sortSize)); + return new ArraySortMapping(doChunkSortingOne(order, columnSource, index, usePrev, (int) sortSize)); } } @NotNull - private static SortMapping doMegaSortOne(SortingOrder order, - ColumnSource> columnSource, ReadOnlyIndex index, boolean usePrev, - long sortSize) { + private static SortMapping doMegaSortOne(SortingOrder order, ColumnSource> columnSource, + ReadOnlyIndex index, boolean usePrev, long sortSize) { final LongArraySource resultIndices = new LongArraySource(); resultIndices.ensureCapacity(sortSize, false); final ArrayBackedColumnSource valuesToMerge = - ArrayBackedColumnSource.getMemoryColumnSource(0, columnSource.getType()); + ArrayBackedColumnSource.getMemoryColumnSource(0, columnSource.getType()); valuesToMerge.ensureCapacity(sortSize, false); long accumulatedSize = 0; final LongMegaMergeKernel longMegaMergeKernel = - LongMegaMergeKernel.makeContext(columnSource.getChunkType(), order); - try ( - final LongSortKernel sortContext = + LongMegaMergeKernel.makeContext(columnSource.getChunkType(), order); + try (final LongSortKernel sortContext = LongSortKernel.makeContext(columnSource.getChunkType(), order, sortChunkSize, true); - final OrderedKeys.Iterator okit = index.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okit = index.getOrderedKeysIterator()) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(sortChunkSize); final int chunkSize = chunkOk.intSize(); - try (final WritableChunk partialValues = - makeAndFillValues(usePrev, chunkOk, columnSource)) { + try (final WritableChunk partialValues = makeAndFillValues(usePrev, chunkOk, columnSource)) { final long[] partialKeysArray = new long[chunkSize]; final WritableLongChunk partialKeys = - WritableLongChunk.writableChunkWrap(partialKeysArray); + WritableLongChunk.writableChunkWrap(partialKeysArray); chunkOk.fillKeyIndicesChunk(partialKeys); sortContext.sort(partialKeys, partialValues); - longMegaMergeKernel.merge(resultIndices, valuesToMerge, 0, accumulatedSize, - partialKeys, partialValues); + longMegaMergeKernel.merge(resultIndices, valuesToMerge, 0, accumulatedSize, partialKeys, + partialValues); accumulatedSize += chunkSize; } } @@ -509,17 +477,15 @@ private static SortMapping doMegaSortOne(SortingOrder order, } @NotNull - private static long[] doChunkSortingOne(SortingOrder order, - ColumnSource> columnSource, OrderedKeys index, boolean usePrev, - int chunkSize) { + private static long[] doChunkSortingOne(SortingOrder order, ColumnSource> columnSource, + OrderedKeys index, boolean usePrev, int chunkSize) { try (final WritableChunk values = makeAndFillValues(usePrev, index, columnSource)) { final long[] indexKeysArray = new long[chunkSize]; - final WritableLongChunk indexKeys = - WritableLongChunk.writableChunkWrap(indexKeysArray); + final WritableLongChunk indexKeys = WritableLongChunk.writableChunkWrap(indexKeysArray); index.fillKeyIndicesChunk(indexKeys); try (final LongSortKernel sortContext = - LongSortKernel.makeContext(columnSource.getChunkType(), order, chunkSize, false)) { + LongSortKernel.makeContext(columnSource.getChunkType(), order, chunkSize, false)) { sortContext.sort(indexKeys, values); } @@ -527,13 +493,11 @@ private static long[] doChunkSortingOne(SortingOrder order, } } - private static SortMapping getSortMappingGrouped(SortingOrder order, - ColumnSource> columnSource, ReadOnlyIndex index) { + private static SortMapping getSortMappingGrouped(SortingOrder order, ColumnSource> columnSource, + ReadOnlyIndex index) { final Map groupToRange = index.getGrouping(columnSource); - final Object[] keys = groupToRange.keySet() - .toArray((Object[]) Array.newInstance( - io.deephaven.util.type.TypeUtils.getBoxedType(columnSource.getType()), - groupToRange.size())); + final Object[] keys = groupToRange.keySet().toArray((Object[]) Array.newInstance( + io.deephaven.util.type.TypeUtils.getBoxedType(columnSource.getType()), groupToRange.size())); Arrays.sort((Comparable[]) keys, order.getComparator()); @@ -570,26 +534,23 @@ private static SortMapping getSortMappingGrouped(SortingOrder order, } } - private static SortMapping getSortMappingMulti(SortingOrder[] order, - ColumnSource>[] columnSources, ReadOnlyIndex index, boolean usePrev) { + private static SortMapping getSortMappingMulti(SortingOrder[] order, ColumnSource>[] columnSources, + ReadOnlyIndex index, boolean usePrev) { Assert.gt(columnSources.length, "columnSources.length", 1); final int sortSize = index.intSize(); final long[] indexKeysArray = new long[sortSize]; - final WritableLongChunk indexKeys = - WritableLongChunk.writableChunkWrap(indexKeysArray); + final WritableLongChunk indexKeys = WritableLongChunk.writableChunkWrap(indexKeysArray); - WritableIntChunk offsetsOut = - WritableIntChunk.makeWritableChunk((sortSize + 1) / 2); - WritableIntChunk lengthsOut = - WritableIntChunk.makeWritableChunk((sortSize + 1) / 2); + WritableIntChunk offsetsOut = WritableIntChunk.makeWritableChunk((sortSize + 1) / 2); + WritableIntChunk lengthsOut = WritableIntChunk.makeWritableChunk((sortSize + 1) / 2); ColumnSource> columnSource = columnSources[0]; if (index.hasGrouping(columnSources[0])) { final Map, Index> groupToRange = columnSource.getGroupToRange(); - final Object[] keys = groupToRange.keySet().toArray((Object[]) Array - .newInstance(TypeUtils.getBoxedType(columnSource.getType()), groupToRange.size())); + final Object[] keys = groupToRange.keySet().toArray( + (Object[]) Array.newInstance(TypeUtils.getBoxedType(columnSource.getType()), groupToRange.size())); Arrays.sort((Comparable[]) keys, order[0].getComparator()); @@ -616,7 +577,7 @@ private static SortMapping getSortMappingMulti(SortingOrder[] order, final WritableChunk values = makeAndFillValues(usePrev, index, columnSource); try (final LongSortKernel sortContext = - LongSortKernel.makeContext(chunkType, order[0], sortSize, true)) { + LongSortKernel.makeContext(chunkType, order[0], sortSize, true)) { sortContext.sort(indexKeys, values); } @@ -633,20 +594,18 @@ private static SortMapping getSortMappingMulti(SortingOrder[] order, final int totalRunLength = sumChunk(lengthsOut); - final WritableLongChunk indicesToFetch = - WritableLongChunk.makeWritableChunk(totalRunLength); - final WritableIntChunk originalPositions = - WritableIntChunk.makeWritableChunk(totalRunLength); + final WritableLongChunk indicesToFetch = WritableLongChunk.makeWritableChunk(totalRunLength); + final WritableIntChunk originalPositions = WritableIntChunk.makeWritableChunk(totalRunLength); final LongIntTimsortKernel.LongIntSortKernelContext sortIndexContext = - LongIntTimsortKernel.createContext(totalRunLength); + LongIntTimsortKernel.createContext(totalRunLength); ChunkType chunkType = columnSources[1].getChunkType(); - int maximumSecondarySize = computeIndicesToFetch(indexKeys, offsetsOut, lengthsOut, - indicesToFetch, originalPositions); - WritableChunk values = fetchSecondaryValues(usePrev, columnSources[1], - indicesToFetch, originalPositions, sortIndexContext, maximumSecondarySize); - try (final LongSortKernel sortContext = LongSortKernel - .makeContext(chunkType, order[1], indicesToFetch.size(), columnSources.length != 2)) { + int maximumSecondarySize = + computeIndicesToFetch(indexKeys, offsetsOut, lengthsOut, indicesToFetch, originalPositions); + WritableChunk values = fetchSecondaryValues(usePrev, columnSources[1], indicesToFetch, + originalPositions, sortIndexContext, maximumSecondarySize); + try (final LongSortKernel sortContext = + LongSortKernel.makeContext(chunkType, order[1], indicesToFetch.size(), columnSources.length != 2)) { // and we can sort the stuff within the run now sortContext.sort(indexKeys, values, offsetsOut, lengthsOut); } @@ -669,16 +628,15 @@ private static SortMapping getSortMappingMulti(SortingOrder[] order, chunkType = columnSource.getChunkType(); - maximumSecondarySize = computeIndicesToFetch(indexKeys, offsetsOut, lengthsOut, - indicesToFetch, originalPositions); + maximumSecondarySize = + computeIndicesToFetch(indexKeys, offsetsOut, lengthsOut, indicesToFetch, originalPositions); values.close(); - values = fetchSecondaryValues(usePrev, columnSources[columnIndex], indicesToFetch, - originalPositions, sortIndexContext, maximumSecondarySize); + values = fetchSecondaryValues(usePrev, columnSources[columnIndex], indicesToFetch, originalPositions, + sortIndexContext, maximumSecondarySize); - try (final LongSortKernel sortContext = - LongSortKernel.makeContext(chunkType, order[columnIndex], indicesToFetch.size(), - columnIndex != columnSources.length - 1)) { + try (final LongSortKernel sortContext = LongSortKernel.makeContext(chunkType, + order[columnIndex], indicesToFetch.size(), columnIndex != columnSources.length - 1)) { // and we can sort the stuff within the run now sortContext.sort(indexKeys, values, offsetsOut, lengthsOut); } @@ -706,21 +664,19 @@ private static SortMapping getSortMappingMulti(SortingOrder[] order, return new ArraySortMapping(indexKeysArray); } - private static WritableChunk fetchSecondaryValues(boolean usePrev, - ColumnSource columnSource, WritableLongChunk indicesToFetch, - WritableIntChunk originalPositions, - LongIntTimsortKernel.LongIntSortKernelContext sortIndexContext, - int maximumSecondarySize) { + private static WritableChunk fetchSecondaryValues(boolean usePrev, ColumnSource columnSource, + WritableLongChunk indicesToFetch, WritableIntChunk originalPositions, + LongIntTimsortKernel.LongIntSortKernelContext sortIndexContext, + int maximumSecondarySize) { sortIndexContext.sort(originalPositions, indicesToFetch); - try (final WritableChunk secondaryValues = - makeAndFillValues(usePrev, OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys( - WritableLongChunk.downcast(indicesToFetch)), columnSource)) { + try (final WritableChunk secondaryValues = makeAndFillValues(usePrev, + OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(WritableLongChunk.downcast(indicesToFetch)), + columnSource)) { final ChunkType chunkType = columnSource.getChunkType(); - // make the big chunk that can hold all the relevant secondary values, in their desired - // position + // make the big chunk that can hold all the relevant secondary values, in their desired position final WritableChunk values = chunkType.makeWritableChunk(maximumSecondarySize); final PermuteKernel permuteKernel = PermuteKernel.makePermuteKernel(chunkType); @@ -730,9 +686,8 @@ private static WritableChunk fetchSecondaryValues(boolean usePrev, } private static int computeIndicesToFetch(WritableLongChunk indexKeys, - WritableIntChunk offsetsOut, WritableIntChunk lengthsOut, - WritableLongChunk indicesToFetch, - WritableIntChunk originalPositions) { + WritableIntChunk offsetsOut, WritableIntChunk lengthsOut, + WritableLongChunk indicesToFetch, WritableIntChunk originalPositions) { indicesToFetch.setSize(0); originalPositions.setSize(0); int maximumSecondarySize = 0; @@ -760,15 +715,12 @@ private static int sumChunk(IntChunk lengthsOut) { @NotNull private static WritableChunk makeAndFillValues(boolean usePrev, OrderedKeys ok, - ColumnSource columnSource) { - final int sortSize = - LongSizedDataStructure.intSize("SortHelper.makeAndFillValues", ok.size()); + ColumnSource columnSource) { + final int sortSize = LongSizedDataStructure.intSize("SortHelper.makeAndFillValues", ok.size()); - final WritableChunk values = - columnSource.getChunkType().makeWritableChunk(sortSize); + final WritableChunk values = columnSource.getChunkType().makeWritableChunk(sortSize); - try (final ColumnSource.FillContext primaryColumnSourceContext = - columnSource.makeFillContext(sortSize)) { + try (final ColumnSource.FillContext primaryColumnSourceContext = columnSource.makeFillContext(sortSize)) { if (usePrev) { columnSource.fillPrevChunk(primaryColumnSourceContext, values, ok); } else { diff --git a/DB/src/main/java/io/deephaven/db/v2/SortListener.java b/DB/src/main/java/io/deephaven/db/v2/SortListener.java index 6cdc959bfd2..8792c48a57e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SortListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/SortListener.java @@ -33,14 +33,14 @@ public class SortListener extends BaseTable.ShiftAwareListenerImpl { private static final Logger log = LoggerFactory.getLogger(SortListener.class); // We like this key because it's in the middle of the (positive 32 bit signed integer) space. - public static final long REBALANCE_MIDPOINT = Configuration.getInstance() - .getLongWithDefault("QueryTable.intradaySort.rebalance.midpoint", 1L << 30); - public static final int REBALANCE_RANGE_SIZE = Configuration.getInstance() - .getIntegerWithDefault("QueryTable.intradaySort.rebalance.rangeSize", 64); - public static final int REBALANCE_GAP_SIZE = Configuration.getInstance() - .getIntegerWithDefault("QueryTable.intradaySort.rebalance.gapSize", 64); + public static final long REBALANCE_MIDPOINT = + Configuration.getInstance().getLongWithDefault("QueryTable.intradaySort.rebalance.midpoint", 1L << 30); + public static final int REBALANCE_RANGE_SIZE = + Configuration.getInstance().getIntegerWithDefault("QueryTable.intradaySort.rebalance.rangeSize", 64); + public static final int REBALANCE_GAP_SIZE = + Configuration.getInstance().getIntegerWithDefault("QueryTable.intradaySort.rebalance.gapSize", 64); public static final boolean REBALANCE_EFFORT_TRACKER_ENABLED = Configuration.getInstance() - .getBooleanWithDefault("QueryTable.intradaySort.rebalance.effortTracker.enabled", false); + .getBooleanWithDefault("QueryTable.intradaySort.rebalance.effortTracker.enabled", false); private final DynamicTable parent; private final QueryTable result; @@ -58,9 +58,9 @@ public class SortListener extends BaseTable.ShiftAwareListenerImpl { private final ModifiedColumnSet sortColumnSet; public SortListener(DynamicTable parent, QueryTable result, HashMapK4V4 reverseLookup, - ColumnSource>[] columnsToSortBy, SortingOrder[] order, - RedirectionIndex sortMapping, ColumnSource>[] sortedColumnsToSortBy, - ModifiedColumnSet.Transformer mcsTransformer, ModifiedColumnSet sortColumnSet) { + ColumnSource>[] columnsToSortBy, SortingOrder[] order, + RedirectionIndex sortMapping, ColumnSource>[] sortedColumnsToSortBy, + ModifiedColumnSet.Transformer mcsTransformer, ModifiedColumnSet sortColumnSet) { super("sortInternal", parent, result); this.parent = parent; this.result = result; @@ -72,8 +72,7 @@ public SortListener(DynamicTable parent, QueryTable result, HashMapK4V4 reverseL this.sortedColumnsToSortBy = sortedColumnsToSortBy; this.effortTracker = REBALANCE_EFFORT_TRACKER_ENABLED ? new EffortTracker(100) : null; - // We create these comparators here so as to avoid building new ones on every call to - // doUpdate(). + // We create these comparators here so as to avoid building new ones on every call to doUpdate(). this.targetComparator = new TargetComparator(); this.mcsTransformer = mcsTransformer; @@ -82,51 +81,38 @@ public SortListener(DynamicTable parent, QueryTable result, HashMapK4V4 reverseL // The new "onUpdate" algorithm. // - // First a note about terminology: we refer to the table we are mapping as the "input" table; - // our sorted + // First a note about terminology: we refer to the table we are mapping as the "input" table; our sorted // representation of that is called the "output" table. // - // Next a note about the "modified" argument. Our algorithm computes which subset of modifies - // need to be reordered. - // These reordered modifies propagate downstream as removes plus adds. Thus, the set of modifies - // that propagate are + // Next a note about the "modified" argument. Our algorithm computes which subset of modifies need to be reordered. + // These reordered modifies propagate downstream as removes plus adds. Thus, the set of modifies that propagate are // the subset of upstream modifies that were not reordered (but may have been shifted). // // == Initialization for the removed set == // - // Allocate an array of size (removed.size() + modified.size()) and fill it with indexes (in the - // output - // coordinate space) of the 'removed' and 'reordered-modified' sets. We obtain these indexes by - // doing a reverse - // mapping lookup. Call this array 'removedOutputKeys'. Note that we must also maintain our - // redirection index states. + // Allocate an array of size (removed.size() + modified.size()) and fill it with indexes (in the output + // coordinate space) of the 'removed' and 'reordered-modified' sets. We obtain these indexes by doing a reverse + // mapping lookup. Call this array 'removedOutputKeys'. Note that we must also maintain our redirection index + // states. // // == Initialization for the added set == // - // Allocate an array of size (added.size() + modified.size()) and fill it with the key indexes - // (in the input + // Allocate an array of size (added.size() + modified.size()) and fill it with the key indexes (in the input // coordinate space) of the 'added' and 'reordered-modified' sets. // - // Sort this array by key value, "ascending" (but in the sense of what the comparator thinks is - // ascending), - // breaking ties by comparing input key indices ascending (actual ascending, as in - // Long.compare). This secondary + // Sort this array by key value, "ascending" (but in the sense of what the comparator thinks is ascending), + // breaking ties by comparing input key indices ascending (actual ascending, as in Long.compare). This secondary // comparison keeps key ordering stable. // - // Make a parallel array to 'addedInputKeys'; call it 'addedOutputKeys'. The entries in this - // array indicate the - // key index in the "output" space _at_ which we want to insert an element. The calculation used - // is sensitive to + // Make a parallel array to 'addedInputKeys'; call it 'addedOutputKeys'. The entries in this array indicate the + // key index in the "output" space _at_ which we want to insert an element. The calculation used is sensitive to // whether we are operating in the forward or backward direction. The calculation used is: // - // Scanning forward, find the rightmost key value in the table that is <= the key value being - // added. If we are - // operating in the reverse direction, the index of the found key is the exact key index to use. - // On the other + // Scanning forward, find the rightmost key value in the table that is <= the key value being added. If we are + // operating in the reverse direction, the index of the found key is the exact key index to use. On the other // hand, if we are moving in the forward direction, we adjust it by adding 1 to that index. // - // For output indices >= the median, we want to operate in the forward direction. For output - // indices < median, + // For output indices >= the median, we want to operate in the forward direction. For output indices < median, // we want to operate in the reverse direction. // // Example of existing table: @@ -136,85 +122,65 @@ public SortListener(DynamicTable parent, QueryTable result, HashMapK4V4 reverseL // Note that the median of this table is 40. // // Values to add (note these have already been sorted thanks to the code above): - // B: highest <= key doesn't exist (start of table is a special case), so at-key-index is 9 and - // direction is reverse (this will occupy an empty slot at 9) - // C: highest <= key is C at 10, before the median, so at-key-index is 10 and dir is reverse - // (this will push the existing C to the left) + // B: highest <= key doesn't exist (start of table is a special case), so at-key-index is 9 and direction is reverse + // (this will occupy an empty slot at 9) + // C: highest <= key is C at 10, before the median, so at-key-index is 10 and dir is reverse (this will push the + // existing C to the left) // D: highest <= key is C at 10, before median, at-key-index 10, reverse, pushes C to the left // E: highest <= key is E at 20, before median, at-key-index 20, reverse, pushes E to the left - // I: highest <= key is I at 50, after median, at-key-index 51 (recall the +1 rule), forward, - // pushes O to the right + // I: highest <= key is I at 50, after median, at-key-index 51 (recall the +1 rule), forward, pushes O to the right // J: highest <= key is I at 50, after median, at-key-index 51, forward, pushes O to the right // O: highest <= key is O at 51, after median, at-key-index 52, forward, pushes U to the right - // Z: highest <= key is U at 52, after median, at-key-index 53, forward, occupies an empty slot - // at 53. + // Z: highest <= key is U at 52, after median, at-key-index 53, forward, occupies an empty slot at 53. // // (End example) // - // == Split the work between the 'forward' and 'reverse' direction, by splitting at the median - // == + // == Split the work between the 'forward' and 'reverse' direction, by splitting at the median == // - // For the sake of efficiency we divide our work between some items we want to insert in the - // "forward" direction - // (moving elements to the right), and other items we want to insert in the "reverse" direction - // (moving elements + // For the sake of efficiency we divide our work between some items we want to insert in the "forward" direction + // (moving elements to the right), and other items we want to insert in the "reverse" direction (moving elements // to the left). // - // Then we apply the below algorithm to each part. First, we process the "reverse" elements in - // the reverse - // direction. Then we process the "forward" elements in the forward direction. After both sides - // are done, we + // Then we apply the below algorithm to each part. First, we process the "reverse" elements in the reverse + // direction. Then we process the "forward" elements in the forward direction. After both sides are done, we // apply the changes to the output set and notify our downstream listeners. // // == Processing the elements (in a given direction) === // - // We work through the added queue. We take turns between writing as many added/modified rows as - // possible and then - // removing as many things off of the backlog as possible. The backlog is "virtual", in that we - // use the resultIndex + // We work through the added queue. We take turns between writing as many added/modified rows as possible and then + // removing as many things off of the backlog as possible. The backlog is "virtual", in that we use the resultIndex // to remember that we have a mapping already at a particular row. // - // The destination for these merged queue items is 'destinationSlot', which starts at the - // configured start point + // The destination for these merged queue items is 'destinationSlot', which starts at the configured start point // (probably the median) and marches "ahead" (in the direction we're operating in). Furthermore, - // 'destinationSlot' is never "before" 'desiredSlot', so it skips "ahead" as needed (again, the - // notion of + // 'destinationSlot' is never "before" 'desiredSlot', so it skips "ahead" as needed (again, the notion of // "before" and "ahead" depend on the direction we are operating in). // // The operation repeats the following steps until all rows were inserted. // - // There is one final piece to the logic. Threaded throughout the loop there is code that has to - // do with - // "spreading" elements when they get overcrowded. The general approach is to watch for a run - // greater than - // "maximumRunLength", a value defined below. (A run is a contiguous sequence in the index where - // we have had to - // move every key. For example, if there are 300 contiguous keys and we inserted a single key at - // the beginning, - // this would be a run of 300 even though the backlog never got larger than size 1). We compute - // up front whether or - // not we will have a large run, and if so, we start spreading as soon as we start placing - // elements. Additionally, + // There is one final piece to the logic. Threaded throughout the loop there is code that has to do with + // "spreading" elements when they get overcrowded. The general approach is to watch for a run greater than + // "maximumRunLength", a value defined below. (A run is a contiguous sequence in the index where we have had to + // move every key. For example, if there are 300 contiguous keys and we inserted a single key at the beginning, + // this would be a run of 300 even though the backlog never got larger than size 1). We compute up front whether or + // not we will have a large run, and if so, we start spreading as soon as we start placing elements. Additionally, // we always spread when we append to either end of the table. @Override public void onUpdate(final Update upstream) { try (final SafeCloseableList closer = new SafeCloseableList()) { final Update downstream = new Update(); final boolean modifiedNeedsSorting = - upstream.modifiedColumnSet.containsAny(sortColumnSet) - && upstream.modified.nonempty(); + upstream.modifiedColumnSet.containsAny(sortColumnSet) && upstream.modified.nonempty(); final long REVERSE_LOOKUP_NO_ENTRY_VALUE = reverseLookup.getNoEntryValue(); - // We use these in enough places that we might as well just grab them (and check their - // sizes) here. + // We use these in enough places that we might as well just grab them (and check their sizes) here. upstream.added.intSize("validating added elements"); final int removedSize = upstream.removed.intSize("allocating removed elements"); final int modifiedSize = - modifiedNeedsSorting ? upstream.modified.intSize("allocating modified elements") - : 0; + modifiedNeedsSorting ? upstream.modified.intSize("allocating modified elements") : 0; Assert.assertion((long) removedSize + (long) modifiedSize <= Integer.MAX_VALUE, - "(long)removedSize + (long)modifiedSize <= Integer.MAX_VALUE"); + "(long)removedSize + (long)modifiedSize <= Integer.MAX_VALUE"); int numRemovedKeys = removedSize; final long[] removedOutputKeys = new long[removedSize + modifiedSize]; @@ -222,14 +188,11 @@ public void onUpdate(final Update upstream) { if (numRemovedKeys > 0) { fillArray(removedOutputKeys, upstream.removed, 0, reverseLookup::remove); Arrays.sort(removedOutputKeys, 0, numRemovedKeys); - final LongChunk keyChunk = - LongChunk.chunkWrap(removedOutputKeys, 0, numRemovedKeys); - try (final OrderedKeys wrappedKeyChunk = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(keyChunk)) { + final LongChunk keyChunk = LongChunk.chunkWrap(removedOutputKeys, 0, numRemovedKeys); + try (final OrderedKeys wrappedKeyChunk = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(keyChunk)) { sortMapping.removeAll(wrappedKeyChunk); } - try (final Index rmKeyIndex = - sortedArrayToIndex(removedOutputKeys, 0, numRemovedKeys)) { + try (final Index rmKeyIndex = sortedArrayToIndex(removedOutputKeys, 0, numRemovedKeys)) { resultIndex.remove(rmKeyIndex); } } @@ -247,40 +210,33 @@ public void onUpdate(final Update upstream) { } final long indexKeyForLeftmostInsert = - resultIndex.empty() ? REBALANCE_MIDPOINT : resultIndex.firstKey() - 1; + resultIndex.empty() ? REBALANCE_MIDPOINT : resultIndex.firstKey() - 1; if (indexKeyForLeftmostInsert <= 0) { // Actually we "could", but we "don't" (yet). - throw new IllegalStateException( - "Table has filled to key index 0; need to rebalance but cannot."); + throw new IllegalStateException("Table has filled to key index 0; need to rebalance but cannot."); } // Identify the location where each key needs to be inserted. int numAddedKeys = 0; int numPropagatedModdedKeys = 0; final Index addedAndModified = - modifiedNeedsSorting ? closer.add(upstream.added.union(upstream.modified)) - : upstream.added; + modifiedNeedsSorting ? closer.add(upstream.added.union(upstream.modified)) : upstream.added; final long[] addedInputKeys = - SortHelpers.getSortedKeys(order, columnsToSortBy, addedAndModified, false, false) - .getArrayMapping(); + SortHelpers.getSortedKeys(order, columnsToSortBy, addedAndModified, false, false).getArrayMapping(); final long[] addedOutputKeys = new long[addedInputKeys.length]; - final long[] propagatedModOutputKeys = - modifiedNeedsSorting ? new long[upstream.modified.intSize()] + final long[] propagatedModOutputKeys = modifiedNeedsSorting ? new long[upstream.modified.intSize()] : CollectionUtil.ZERO_LENGTH_LONG_ARRAY; final Index.SearchIterator ait = resultIndex.searchIterator(); for (int ii = 0; ii < addedInputKeys.length; ++ii) { targetComparator.setTarget(addedInputKeys[ii]); - final long after = - ait.binarySearchValue(targetComparator, SortingOrder.Ascending.direction); + final long after = ait.binarySearchValue(targetComparator, SortingOrder.Ascending.direction); final long outputKey = after == -1 ? indexKeyForLeftmostInsert : after; - final long curr = modifiedNeedsSorting ? reverseLookup.get(addedInputKeys[ii]) - : REVERSE_LOOKUP_NO_ENTRY_VALUE; + final long curr = + modifiedNeedsSorting ? reverseLookup.get(addedInputKeys[ii]) : REVERSE_LOOKUP_NO_ENTRY_VALUE; - // check if new location differs from current location or if the previous row needs - // to slot here - if (curr != outputKey - || (numAddedKeys > 0 && addedOutputKeys[numAddedKeys - 1] == curr)) { + // check if new location differs from current location or if the previous row needs to slot here + if (curr != outputKey || (numAddedKeys > 0 && addedOutputKeys[numAddedKeys - 1] == curr)) { // true for all adds and reordered mods addedInputKeys[numAddedKeys] = addedInputKeys[ii]; addedOutputKeys[numAddedKeys] = outputKey; @@ -296,19 +252,18 @@ public void onUpdate(final Update upstream) { } } - // Process downstream removed keys. Note that sortMapping cannot be modified until after - // the above loop completes + // Process downstream removed keys. Note that sortMapping cannot be modified until after the above loop + // completes // otherwise the algorithm will not be able to break ties by upstream keyspace. if (numRemovedKeys > removedSize) { Arrays.sort(removedOutputKeys, removedSize, numRemovedKeys); - final LongChunk keyChunk = LongChunk.chunkWrap(removedOutputKeys, - removedSize, numRemovedKeys - removedSize); - try (final OrderedKeys wrappedKeyChunk = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(keyChunk)) { + final LongChunk keyChunk = + LongChunk.chunkWrap(removedOutputKeys, removedSize, numRemovedKeys - removedSize); + try (final OrderedKeys wrappedKeyChunk = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(keyChunk)) { sortMapping.removeAll(wrappedKeyChunk); } - try (final Index rmKeyIndex = sortedArrayToIndex(removedOutputKeys, removedSize, - numRemovedKeys - removedSize)) { + try (final Index rmKeyIndex = + sortedArrayToIndex(removedOutputKeys, removedSize, numRemovedKeys - removedSize)) { resultIndex.remove(rmKeyIndex); } Arrays.sort(removedOutputKeys, 0, numRemovedKeys); @@ -316,58 +271,51 @@ public void onUpdate(final Update upstream) { downstream.removed = sortedArrayToIndex(removedOutputKeys, 0, numRemovedKeys); final long medianOutputKey = - resultIndex.empty() ? REBALANCE_MIDPOINT : resultIndex.get(resultIndex.size() / 2); + resultIndex.empty() ? REBALANCE_MIDPOINT : resultIndex.get(resultIndex.size() / 2); int addedStart = findKeyStart(addedOutputKeys, medianOutputKey, numAddedKeys); - // The forward items in the add queue need to be adjusted by +1 for the logic to be - // right. + // The forward items in the add queue need to be adjusted by +1 for the logic to be right. for (int ii = addedStart; ii < numAddedKeys; ++ii) { addedOutputKeys[ii]++; } // Queues going in the reverse direction final QueueState rqs = new QueueState(-1, addedOutputKeys, addedInputKeys, - addedStart - 1, -1); + addedStart - 1, -1); // Queues going in the forward direction final QueueState fqs = new QueueState(1, addedOutputKeys, addedInputKeys, - addedStart, numAddedKeys); + addedStart, numAddedKeys); final IndexShiftData.Builder shiftBuilder = new IndexShiftData.Builder(); final Index.SequentialBuilder addedBuilder = Index.FACTORY.getSequentialBuilder(); - performUpdatesInDirection(addedBuilder, shiftBuilder, medianOutputKey - 1, rqs, - mappingChanges); - performUpdatesInDirection(addedBuilder, shiftBuilder, medianOutputKey, fqs, - mappingChanges); + performUpdatesInDirection(addedBuilder, shiftBuilder, medianOutputKey - 1, rqs, mappingChanges); + performUpdatesInDirection(addedBuilder, shiftBuilder, medianOutputKey, fqs, mappingChanges); downstream.added = addedBuilder.getIndex(); downstream.shifted = shiftBuilder.build(); mappingChanges.flush(); // Compute modified set in post-shift space. if (modifiedNeedsSorting && numPropagatedModdedKeys == 0 || upstream.modified.empty() - || upstream.modifiedColumnSet.empty()) { + || upstream.modifiedColumnSet.empty()) { downstream.modified = Index.FACTORY.getEmptyIndex(); } else if (modifiedNeedsSorting) { Arrays.sort(propagatedModOutputKeys, 0, numPropagatedModdedKeys); int ii, si; - final Index.SequentialBuilder modifiedBuilder = - Index.FACTORY.getSequentialBuilder(); - for (ii = 0, si = 0; ii < numPropagatedModdedKeys - && si < downstream.shifted.size(); ++si) { + final Index.SequentialBuilder modifiedBuilder = Index.FACTORY.getSequentialBuilder(); + for (ii = 0, si = 0; ii < numPropagatedModdedKeys && si < downstream.shifted.size(); ++si) { final long beginRange = downstream.shifted.getBeginRange(si); final long endRange = downstream.shifted.getEndRange(si); final long shiftDelta = downstream.shifted.getShiftDelta(si); // before the shifted range - for (; ii < numPropagatedModdedKeys - && propagatedModOutputKeys[ii] < beginRange; ++ii) { + for (; ii < numPropagatedModdedKeys && propagatedModOutputKeys[ii] < beginRange; ++ii) { modifiedBuilder.appendKey(propagatedModOutputKeys[ii]); } // the shifted range - for (; ii < numPropagatedModdedKeys - && propagatedModOutputKeys[ii] <= endRange; ++ii) { + for (; ii < numPropagatedModdedKeys && propagatedModOutputKeys[ii] <= endRange; ++ii) { modifiedBuilder.appendKey(propagatedModOutputKeys[ii] + shiftDelta); } } @@ -382,8 +330,7 @@ public void onUpdate(final Update upstream) { final long[] modifiedOutputKeys = new long[upstream.modified.intSize()]; fillArray(modifiedOutputKeys, upstream.modified, 0, reverseLookup::get); Arrays.sort(modifiedOutputKeys); - downstream.modified = - sortedArrayToIndex(modifiedOutputKeys, 0, modifiedOutputKeys.length); + downstream.modified = sortedArrayToIndex(modifiedOutputKeys, 0, modifiedOutputKeys.length); } // Calculate downstream MCS. @@ -391,8 +338,7 @@ public void onUpdate(final Update upstream) { downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; } else { downstream.modifiedColumnSet = result.modifiedColumnSet; - mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); + mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, downstream.modifiedColumnSet); } // Update the final result index. @@ -414,9 +360,9 @@ private Index sortedArrayToIndex(long[] arr, int offset, int length) { * @param start Start position * @param qs Queue state -- containing the view on the various keys arrays, directions, etc. */ - private void performUpdatesInDirection(final Index.SequentialBuilder added, - final IndexShiftData.Builder shifted, final long start, - final QueueState qs, final SortMappingAggregator mappingChanges) { + private void performUpdatesInDirection(final Index.SequentialBuilder added, final IndexShiftData.Builder shifted, + final long start, + final QueueState qs, final SortMappingAggregator mappingChanges) { final long numRequestedAdds = (qs.addedEnd - qs.addedCurrent) * qs.direction; if (numRequestedAdds == 0) { @@ -425,33 +371,29 @@ private void performUpdatesInDirection(final Index.SequentialBuilder added, long numWrites = 0; final DirectionalResettableSequentialBuilder resultAdded = - new DirectionalResettableSequentialBuilder(qs.direction); + new DirectionalResettableSequentialBuilder(qs.direction); final DirectionalResettableIndexShiftDataBuilder resultShifted = - new DirectionalResettableIndexShiftDataBuilder(qs.direction); + new DirectionalResettableIndexShiftDataBuilder(qs.direction); final DirectionalResettableSequentialBuilder modRemoved = - new DirectionalResettableSequentialBuilder(qs.direction); + new DirectionalResettableSequentialBuilder(qs.direction); final DirectionalResettableSequentialBuilder modAdded = - new DirectionalResettableSequentialBuilder(qs.direction); + new DirectionalResettableSequentialBuilder(qs.direction); - // When we notice that there is a long run length of contiguous mapping, we enter into - // spread mode which leaves + // When we notice that there is a long run length of contiguous mapping, we enter into spread mode which leaves // gaps for future incremental updates to use. - // We use the sqrt of the resultIndex.size(), but we pick a reasonable minimum size. By the - // way, + // We use the sqrt of the resultIndex.size(), but we pick a reasonable minimum size. By the way, // integer overflow on cast-to-int wouldn't be a problem until - // resultIndex.size() >= Integer.MAX_VALUE^2, which we won't reach because we'll run out of - // memory long + // resultIndex.size() >= Integer.MAX_VALUE^2, which we won't reach because we'll run out of memory long // before. final int minimumRunLength = REBALANCE_RANGE_SIZE + REBALANCE_GAP_SIZE; - final int maximumRunLength = - Math.max(minimumRunLength, (int) Math.sqrt(resultIndex.size())); + final int maximumRunLength = Math.max(minimumRunLength, (int) Math.sqrt(resultIndex.size())); final Index.SearchIterator gapEvictionIter = - (qs.direction == -1) ? resultIndex.reverseIterator() : resultIndex.searchIterator(); + (qs.direction == -1) ? resultIndex.reverseIterator() : resultIndex.searchIterator(); final Index.SearchIterator backlogIter = - (qs.direction == -1) ? resultIndex.reverseIterator() : resultIndex.searchIterator(); + (qs.direction == -1) ? resultIndex.reverseIterator() : resultIndex.searchIterator(); long destKey = qs.addedOutputKeys[qs.addedCurrent]; while (qs.hasMoreToAdd()) { @@ -468,8 +410,7 @@ private void performUpdatesInDirection(final Index.SequentialBuilder added, long writesUntilGap = Math.max(1, REBALANCE_RANGE_SIZE / 2); do { - // insert extra space at the end of the table; because it's the right thing to - // do + // insert extra space at the end of the table; because it's the right thing to do if (--writesUntilGap == 0) { destKey = insertAGap(destKey, qs, modRemoved, mappingChanges, null); writesUntilGap = REBALANCE_RANGE_SIZE; @@ -487,42 +428,37 @@ private void performUpdatesInDirection(final Index.SequentialBuilder added, // determine if we must be in spreading mode final long maxRunKey = desiredOutputKey + maximumRunLength * qs.direction; - // note: this is an (over) approximation of cardinality since binarySearch will give any - // index if exists + // note: this is an (over) approximation of cardinality since binarySearch will give any index if exists long addedMaxIdx; if (qs.direction == -1) { - addedMaxIdx = qs.twiddleIfNegative( - Arrays.binarySearch(qs.addedOutputKeys, 0, qs.addedCurrent, maxRunKey)); + addedMaxIdx = + qs.twiddleIfNegative(Arrays.binarySearch(qs.addedOutputKeys, 0, qs.addedCurrent, maxRunKey)); } else { - addedMaxIdx = qs.twiddleIfNegative(Arrays.binarySearch(qs.addedOutputKeys, - qs.addedCurrent, qs.addedEnd, maxRunKey)); + addedMaxIdx = qs.twiddleIfNegative( + Arrays.binarySearch(qs.addedOutputKeys, qs.addedCurrent, qs.addedEnd, maxRunKey)); } - // note: if Index.SearchIterator had an O(1) method to get pos we should prefer that - // over Index#find, - // turn maxRunKey into an advancing iterator (similar to gapEvictionIter), and also use - // that method to compute sizeToShift + // note: if Index.SearchIterator had an O(1) method to get pos we should prefer that over Index#find, + // turn maxRunKey into an advancing iterator (similar to gapEvictionIter), and also use that method to + // compute sizeToShift final long backMaxIdx = qs.twiddleIfNegative(resultIndex.find(maxRunKey)); long sizeToAdd = qs.direction * (addedMaxIdx - qs.addedCurrent); - long sizeToShift = - qs.direction * (backMaxIdx - resultIndex.find(backlogIter.currentValue())); + long sizeToShift = qs.direction * (backMaxIdx - resultIndex.find(backlogIter.currentValue())); final boolean spreadMode = sizeToAdd + sizeToShift >= maximumRunLength; long writesUntilGap = REBALANCE_RANGE_SIZE; boolean backlogged = false; - // stay in this loop until we might need to enable spreading; don't leave this loop - // while backlog is non-empty - while (!tableEmpty && (backlogged || sizeToAdd > 0 && (spreadMode - || (sizeToAdd + sizeToShift) <= qs.direction * (maxRunKey - destKey)))) { + // stay in this loop until we might need to enable spreading; don't leave this loop while backlog is + // non-empty + while (!tableEmpty && (backlogged || sizeToAdd > 0 + && (spreadMode || (sizeToAdd + sizeToShift) <= qs.direction * (maxRunKey - destKey)))) { // Add anything prior to the next possible backlog item. - while (qs.hasMoreToAdd() - && qs.isBefore(desiredOutputKey, backlogIter.currentValue() + qs.direction)) { + while (qs.hasMoreToAdd() && qs.isBefore(desiredOutputKey, backlogIter.currentValue() + qs.direction)) { if (spreadMode && --writesUntilGap == 0) { - destKey = - insertAGap(destKey, qs, modRemoved, mappingChanges, gapEvictionIter); + destKey = insertAGap(destKey, qs, modRemoved, mappingChanges, gapEvictionIter); writesUntilGap = REBALANCE_RANGE_SIZE; } @@ -540,15 +476,13 @@ private void performUpdatesInDirection(final Index.SequentialBuilder added, } } - // Either, all items have been added, or next item comes after the backlog item(s) - // is(are) processed. + // Either, all items have been added, or next item comes after the backlog item(s) is(are) processed. long backlogKey = backlogIter.currentValue(); final boolean writesPending = qs.hasMoreToAdd(); while (((!writesPending || qs.isBefore(backlogKey, desiredOutputKey)) - && qs.isBefore(backlogKey, destKey))) { + && qs.isBefore(backlogKey, destKey))) { if (spreadMode && --writesUntilGap == 0) { - destKey = - insertAGap(destKey, qs, modRemoved, mappingChanges, gapEvictionIter); + destKey = insertAGap(destKey, qs, modRemoved, mappingChanges, gapEvictionIter); writesUntilGap = REBALANCE_RANGE_SIZE; } @@ -568,10 +502,9 @@ private void performUpdatesInDirection(final Index.SequentialBuilder added, backlogKey = backlogIter.nextLong(); } - // must disable shift coalescing if any keys between last shift and next shift are - // not shifted + // must disable shift coalescing if any keys between last shift and next shift are not shifted backlogged = (writesPending && qs.isBefore(desiredOutputKey, destKey)) || - (!tableEmpty && qs.isBefore(backlogIter.currentValue(), destKey)); + (!tableEmpty && qs.isBefore(backlogIter.currentValue(), destKey)); if (!backlogged) { // note that we don't bother counting the number of shifted rows we're skipping resultShifted.noteBacklogNowEmpty(); @@ -597,9 +530,9 @@ private void performUpdatesInDirection(final Index.SequentialBuilder added, } private long insertAGap(final long destinationSlot, final QueueState qs, - final DirectionalResettableSequentialBuilder modRemoved, - final SortMappingAggregator mappingChanges, - final Index.SearchIterator gapEvictionIter) { + final DirectionalResettableSequentialBuilder modRemoved, + final SortMappingAggregator mappingChanges, + final Index.SearchIterator gapEvictionIter) { final long gapEnd = destinationSlot + REBALANCE_GAP_SIZE * qs.direction; // exclusive checkDestinationSlotOk(gapEnd); @@ -621,8 +554,8 @@ private long insertAGap(final long destinationSlot, final QueueState qs, } /** - * The following may clarify what we are doing: lKey (the "target") is in input coordinates rKey - * (the "probe") is in output coordinates + * The following may clarify what we are doing: lKey (the "target") is in input coordinates rKey (the "probe") is in + * output coordinates */ private class TargetComparator implements Index.TargetComparator { private long lKey; @@ -630,11 +563,11 @@ private class TargetComparator implements Index.TargetComparator { TargetComparator() { Assert.eq(columnsToSortBy.length, "columnsToSortBy.length", - sortedColumnsToSortBy.length, "sortedColumnsToSortBy.length"); + sortedColumnsToSortBy.length, "sortedColumnsToSortBy.length"); this.comparators = new ColumnComparatorFactory.IComparator[columnsToSortBy.length]; for (int ii = 0; ii < columnsToSortBy.length; ii++) { - comparators[ii] = ColumnComparatorFactory.createComparatorLeftCurrRightPrev( - columnsToSortBy[ii], sortedColumnsToSortBy[ii]); + comparators[ii] = ColumnComparatorFactory.createComparatorLeftCurrRightPrev(columnsToSortBy[ii], + sortedColumnsToSortBy[ii]); } setTarget(-1); } @@ -680,8 +613,7 @@ private class SortMappingAggregator implements SafeCloseable { keysChunk = WritableLongChunk.makeWritableChunk(chunkSize); valuesChunk = WritableLongChunk.makeWritableChunk(chunkSize); fillFromContext = sortMapping.makeFillFromContext(chunkSize); - sortKernel = - LongSortKernel.makeContext(ChunkType.Long, SortingOrder.Ascending, chunkSize, true); + sortKernel = LongSortKernel.makeContext(ChunkType.Long, SortingOrder.Ascending, chunkSize, true); } @Override @@ -708,8 +640,7 @@ public void flush() { // noinspection unchecked sortKernel.sort(valuesChunk, keysChunk); - try (final OrderedKeys orderedKeys = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(keysChunk)) { + try (final OrderedKeys orderedKeys = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(keysChunk)) { // noinspection unchecked sortMapping.fillFromChunk(fillFromContext, valuesChunk, orderedKeys); } @@ -741,13 +672,11 @@ public int checkpoint() { private static int findKeyStart(long[] array, long key, int arrayLen) { int index = Arrays.binarySearch(array, 0, arrayLen, key); if (index < 0) { - // If the key was not found, then return the hypothetical insertion point (the first - // element > key) + // If the key was not found, then return the hypothetical insertion point (the first element > key) return -index - 1; } - // If the key was found, then there might be multiple keys with the same value. If so, walk - // backwards to the + // If the key was found, then there might be multiple keys with the same value. If so, walk backwards to the // first such key. while (index > 0 && array[index - 1] == key) { --index; @@ -781,11 +710,10 @@ public void add(long numWrites, long numRequestedAdds) { } String summarize() { - final double workRatio = totalNumRequestedAdds == 0 ? 0 - : (double) totalNumWrites / (double) totalNumRequestedAdds; - return String.format( - "Sort Effort Summary: samples=%d, writes=%d, requested=%d, ratio=%g", - writes.size(), totalNumWrites, totalNumRequestedAdds, workRatio); + final double workRatio = + totalNumRequestedAdds == 0 ? 0 : (double) totalNumWrites / (double) totalNumRequestedAdds; + return String.format("Sort Effort Summary: samples=%d, writes=%d, requested=%d, ratio=%g", + writes.size(), totalNumWrites, totalNumRequestedAdds, workRatio); } } @@ -796,11 +724,10 @@ private static class DirectionalResettableSequentialBuilder implements Index.Seq private DirectionalResettableSequentialBuilder(int direction, long[] initialItems) { this(direction, initialItems, direction > 0 ? 0 : initialItems.length - 1, - direction > 0 ? initialItems.length : -1); + direction > 0 ? initialItems.length : -1); } - private DirectionalResettableSequentialBuilder(int direction, long[] initialItems, - int begin, int end) { + private DirectionalResettableSequentialBuilder(int direction, long[] initialItems, int begin, int end) { this(direction); while (begin != end) { appendKey(initialItems[begin]); @@ -831,10 +758,8 @@ public void appendRange(long firstKey, long lastKey) { // if direction == -1, then lastKey must be <= firstKey final int rangeDirection = -Long.compare(firstKey, lastKey); if (rangeDirection * direction < 0) { - Assert.assertion(rangeDirection * direction >= 0, - "Range must be compatible with direction", - (Object) firstKey, "firstKey", (Object) lastKey, "lastKey", direction, - "direction"); + Assert.assertion(rangeDirection * direction >= 0, "Range must be compatible with direction", + (Object) firstKey, "firstKey", (Object) lastKey, "lastKey", direction, "direction"); } final int lSize = lasts.size(); @@ -842,8 +767,8 @@ public void appendRange(long firstKey, long lastKey) { final long lastLast = lasts.get(lSize - 1); Assert.assertion(Long.compare(lastKey, lastLast) * direction > 0, - "Long.compare(lastKey, lastLast) * direction > 0", - "New key not being added in the right direction"); + "Long.compare(lastKey, lastLast) * direction > 0", + "New key not being added in the right direction"); if (lastLast + direction == firstKey) { lasts.set(lSize - 1, lastKey); return; @@ -892,9 +817,8 @@ private DirectionalResettableIndexShiftDataBuilder(int direction) { private void noteRequiredShift(final long key, final long delta) { if (delta * direction <= 0) { - Assert.assertion(delta * direction > 0, - "Shift delta must be compatible with direction", - (Object) key, "key", (Object) delta, "delta", direction, "direction"); + Assert.assertion(delta * direction > 0, "Shift delta must be compatible with direction", + (Object) key, "key", (Object) delta, "delta", direction, "direction"); } final int lSize = lasts.size(); @@ -943,8 +867,7 @@ private static class QueueState { // The exclusive end index for the added(Input,Output)Keys final int addedEnd; - QueueState(int direction, long[] addedOutputKeys, long[] addedInputKeys, int addedCurrent, - int addedEnd) { + QueueState(int direction, long[] addedOutputKeys, long[] addedInputKeys, int addedCurrent, int addedEnd) { this.direction = direction; this.addedOutputKeys = addedOutputKeys; this.addedInputKeys = addedInputKeys; @@ -975,13 +898,13 @@ private long twiddleIfNegative(long a) { private static void checkDestinationSlotOk(long destinationSlot) { if (destinationSlot <= 0 || destinationSlot == Long.MAX_VALUE) { throw new IllegalStateException( - String.format("While updating index, the destination slot %d reached its limit", - destinationSlot)); + String.format("While updating index, the destination slot %d reached its limit", + destinationSlot)); } } private static void fillArray(final long[] dest, final Index src, final int destIndex, - final LongUnaryOperator transformer) { + final LongUnaryOperator transformer) { final MutableInt pos = new MutableInt(destIndex); src.forAllLongs((final long v) -> { dest[pos.intValue()] = transformer.applyAsLong(v); @@ -1000,10 +923,9 @@ private static void showGaps(Index index) { long usedEnd = i.currentRangeEnd(); long usedSize = usedEnd - usedStart + 1; - System.out.printf( - "free %14d [%14d..%14d] [0x%10x..0x%10x] used %14d [%14d..%14d] [0x%10x..0x%10x]%n", - freeSize, freeStart, freeEnd, freeStart, freeEnd, - usedSize, usedStart, usedEnd, usedStart, usedEnd); + System.out.printf("free %14d [%14d..%14d] [0x%10x..0x%10x] used %14d [%14d..%14d] [0x%10x..0x%10x]%n", + freeSize, freeStart, freeEnd, freeStart, freeEnd, + usedSize, usedStart, usedEnd, usedStart, usedEnd); freeStart = usedEnd + 1; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/SortOperation.java b/DB/src/main/java/io/deephaven/db/v2/SortOperation.java index bb14132cf49..b644cc3f6bd 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SortOperation.java +++ b/DB/src/main/java/io/deephaven/db/v2/SortOperation.java @@ -41,25 +41,21 @@ public class SortOperation implements QueryTable.MemoizableOperation public SortOperation(QueryTable parent, SortPair[] sortPairs) { this.parent = parent; this.sortPairs = sortPairs; - this.sortOrder = - Arrays.stream(sortPairs).map(SortPair::getOrder).toArray(SortingOrder[]::new); - this.sortColumnNames = - Arrays.stream(sortPairs).map(SortPair::getColumn).toArray(String[]::new); + this.sortOrder = Arrays.stream(sortPairs).map(SortPair::getOrder).toArray(SortingOrder[]::new); + this.sortColumnNames = Arrays.stream(sortPairs).map(SortPair::getColumn).toArray(String[]::new); // noinspection unchecked sortColumns = new ColumnSource[sortColumnNames.length]; for (int ii = 0; ii < sortColumnNames.length; ++ii) { // noinspection unchecked - sortColumns[ii] = - QueryTable.maybeTransformToPrimitive(parent.getColumnSource(sortColumnNames[ii])); + sortColumns[ii] = QueryTable.maybeTransformToPrimitive(parent.getColumnSource(sortColumnNames[ii])); Require.requirement( - Comparable.class.isAssignableFrom(sortColumns[ii].getType()) - || sortColumns[ii].getType().isPrimitive(), - "Comparable.class.isAssignableFrom(sortColumns[ii].getType()) || sortColumns[ii].getType().isPrimitive()", - sortColumnNames[ii], "sortColumnNames[ii]", sortColumns[ii].getType(), - "sortColumns[ii].getType()"); + Comparable.class.isAssignableFrom(sortColumns[ii].getType()) + || sortColumns[ii].getType().isPrimitive(), + "Comparable.class.isAssignableFrom(sortColumns[ii].getType()) || sortColumns[ii].getType().isPrimitive()", + sortColumnNames[ii], "sortColumnNames[ii]", sortColumns[ii].getType(), "sortColumns[ii].getType()"); } parent.assertSortable(sortColumnNames); @@ -95,8 +91,7 @@ public synchronized boolean end(long clockCycle) { }; } - private static boolean alreadySorted(final QueryTable parent, - @NotNull final SortHelpers.SortMapping sortedKeys) { + private static boolean alreadySorted(final QueryTable parent, @NotNull final SortHelpers.SortMapping sortedKeys) { if (sortedKeys.size() == 0) { return true; } @@ -115,11 +110,10 @@ private QueryTable historicalSort(SortHelpers.SortMapping sortedKeys) { final Index resultIndex = Index.FACTORY.getFlatIndex(sortedKeys.size()); final Map> resultMap = new LinkedHashMap<>(); - for (Map.Entry stringColumnSourceEntry : this.parent - .getColumnSourceMap().entrySet()) { + for (Map.Entry stringColumnSourceEntry : this.parent.getColumnSourceMap().entrySet()) { // noinspection unchecked - resultMap.put(stringColumnSourceEntry.getKey(), new ReadOnlyRedirectedColumnSource<>( - sortMapping, stringColumnSourceEntry.getValue())); + resultMap.put(stringColumnSourceEntry.getKey(), + new ReadOnlyRedirectedColumnSource<>(sortMapping, stringColumnSourceEntry.getValue())); } resultTable = new QueryTable(resultIndex, resultMap); @@ -130,20 +124,16 @@ private QueryTable historicalSort(SortHelpers.SortMapping sortedKeys) { } @NotNull - private Result streamSort( - @NotNull final SortHelpers.SortMapping initialSortedKeys) { + private Result streamSort(@NotNull final SortHelpers.SortMapping initialSortedKeys) { final LongChunkColumnSource initialInnerRedirectionSource = new LongChunkColumnSource(); if (initialSortedKeys.size() > 0) { initialInnerRedirectionSource - .addChunk(WritableLongChunk.writableChunkWrap(initialSortedKeys.getArrayMapping())); + .addChunk(WritableLongChunk.writableChunkWrap(initialSortedKeys.getArrayMapping())); } - final MutableObject recycledInnerRedirectionSource = - new MutableObject<>(); - final SwitchColumnSource redirectionSource = - new SwitchColumnSource<>(initialInnerRedirectionSource, + final MutableObject recycledInnerRedirectionSource = new MutableObject<>(); + final SwitchColumnSource redirectionSource = new SwitchColumnSource<>(initialInnerRedirectionSource, (final ColumnSource previousInnerRedirectionSource) -> { - final LongChunkColumnSource recycled = - (LongChunkColumnSource) previousInnerRedirectionSource; + final LongChunkColumnSource recycled = (LongChunkColumnSource) previousInnerRedirectionSource; recycled.clear(); recycledInnerRedirectionSource.setValue(recycled); }); @@ -152,11 +142,10 @@ private Result streamSort( final Index resultIndex = Index.FACTORY.getFlatIndex(initialSortedKeys.size()); final Map> resultMap = new LinkedHashMap<>(); - for (Map.Entry stringColumnSourceEntry : parent.getColumnSourceMap() - .entrySet()) { + for (Map.Entry stringColumnSourceEntry : parent.getColumnSourceMap().entrySet()) { // noinspection unchecked - resultMap.put(stringColumnSourceEntry.getKey(), new ReadOnlyRedirectedColumnSource<>( - sortMapping, stringColumnSourceEntry.getValue())); + resultMap.put(stringColumnSourceEntry.getKey(), + new ReadOnlyRedirectedColumnSource<>(sortMapping, stringColumnSourceEntry.getValue())); } resultTable = new QueryTable(resultIndex, resultMap); @@ -165,43 +154,40 @@ private Result streamSort( setSorted(resultTable); final ShiftAwareListener resultListener = - new BaseTable.ShiftAwareListenerImpl("Stream sort listener", parent, resultTable) { - @Override - public void onUpdate(@NotNull final Update upstream) { - Assert.assertion(upstream.modified.empty() && upstream.shifted.empty(), - "upstream.modified.empty() && upstream.shifted.empty()"); - Assert.eq(resultIndex.size(), "resultIndex.size()", upstream.removed.size(), - "upstream.removed.size()"); - if (upstream.empty()) { - return; + new BaseTable.ShiftAwareListenerImpl("Stream sort listener", parent, resultTable) { + @Override + public void onUpdate(@NotNull final Update upstream) { + Assert.assertion(upstream.modified.empty() && upstream.shifted.empty(), + "upstream.modified.empty() && upstream.shifted.empty()"); + Assert.eq(resultIndex.size(), "resultIndex.size()", upstream.removed.size(), + "upstream.removed.size()"); + if (upstream.empty()) { + return; + } + + final SortHelpers.SortMapping updateSortedKeys = + SortHelpers.getSortedKeys(sortOrder, sortColumns, upstream.added, false); + final LongChunkColumnSource recycled = recycledInnerRedirectionSource.getValue(); + recycledInnerRedirectionSource.setValue(null); + final LongChunkColumnSource updateInnerRedirectSource = + recycled == null ? new LongChunkColumnSource() : recycled; + if (updateSortedKeys.size() > 0) { + updateInnerRedirectSource + .addChunk(WritableLongChunk.writableChunkWrap(updateSortedKeys.getArrayMapping())); + } + redirectionSource.setNewCurrent(updateInnerRedirectSource); + + final Index added = Index.CURRENT_FACTORY.getFlatIndex(upstream.added.size()); + final Index removed = Index.CURRENT_FACTORY.getFlatIndex(upstream.removed.size()); + if (added.size() > removed.size()) { + resultIndex.insertRange(removed.size(), added.size() - 1); + } else if (removed.size() > added.size()) { + resultIndex.removeRange(added.size(), removed.size() - 1); + } + resultTable.notifyListeners(new Update(added, removed, Index.CURRENT_FACTORY.getEmptyIndex(), + IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); } - - final SortHelpers.SortMapping updateSortedKeys = - SortHelpers.getSortedKeys(sortOrder, sortColumns, upstream.added, false); - final LongChunkColumnSource recycled = - recycledInnerRedirectionSource.getValue(); - recycledInnerRedirectionSource.setValue(null); - final LongChunkColumnSource updateInnerRedirectSource = - recycled == null ? new LongChunkColumnSource() : recycled; - if (updateSortedKeys.size() > 0) { - updateInnerRedirectSource.addChunk(WritableLongChunk - .writableChunkWrap(updateSortedKeys.getArrayMapping())); - } - redirectionSource.setNewCurrent(updateInnerRedirectSource); - - final Index added = Index.CURRENT_FACTORY.getFlatIndex(upstream.added.size()); - final Index removed = - Index.CURRENT_FACTORY.getFlatIndex(upstream.removed.size()); - if (added.size() > removed.size()) { - resultIndex.insertRange(removed.size(), added.size() - 1); - } else if (removed.size() > added.size()) { - resultIndex.removeRange(added.size(), removed.size() - 1); - } - resultTable.notifyListeners( - new Update(added, removed, Index.CURRENT_FACTORY.getEmptyIndex(), - IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); - } - }; + }; return new Result<>(resultTable, resultListener); } @@ -215,15 +201,14 @@ private void setSorted(QueryTable table) { public Result initialize(boolean usePrev, long beforeClock) { if (!parent.isRefreshing()) { final SortHelpers.SortMapping sortedKeys = - SortHelpers.getSortedKeys(sortOrder, sortColumns, parent.getIndex(), false); + SortHelpers.getSortedKeys(sortOrder, sortColumns, parent.getIndex(), false); return new Result<>(historicalSort(sortedKeys)); } if (parent.isStream()) { - try ( - final ReadOnlyIndex prevIndex = usePrev ? parent.getIndex().getPrevIndex() : null) { + try (final ReadOnlyIndex prevIndex = usePrev ? parent.getIndex().getPrevIndex() : null) { final ReadOnlyIndex indexToUse = usePrev ? prevIndex : parent.getIndex(); final SortHelpers.SortMapping sortedKeys = - SortHelpers.getSortedKeys(sortOrder, sortColumns, indexToUse, usePrev); + SortHelpers.getSortedKeys(sortOrder, sortColumns, indexToUse, usePrev); return streamSort(sortedKeys); } } @@ -232,17 +217,15 @@ public Result initialize(boolean usePrev, long beforeClock) { // reset the sort data structures that we share between invocations final Map> resultMap = new LinkedHashMap<>(); - final Index indexToSort = - usePrev ? closer.add(parent.getIndex().getPrevIndex()) : parent.getIndex(); + final Index indexToSort = usePrev ? closer.add(parent.getIndex().getPrevIndex()) : parent.getIndex(); if (indexToSort.size() >= Integer.MAX_VALUE) { - throw new UnsupportedOperationException( - "Can not perform ticking sort for table larger than " + Integer.MAX_VALUE - + " rows, table is" + indexToSort.size()); + throw new UnsupportedOperationException("Can not perform ticking sort for table larger than " + + Integer.MAX_VALUE + " rows, table is" + indexToSort.size()); } - final long[] sortedKeys = SortHelpers - .getSortedKeys(sortOrder, sortColumns, indexToSort, usePrev).getArrayMapping(); + final long[] sortedKeys = + SortHelpers.getSortedKeys(sortOrder, sortColumns, indexToSort, usePrev).getArrayMapping(); final HashMapK4V4 reverseLookup = new HashMapLockFreeK4V4(sortedKeys.length, .75f, -3); sortMapping = SortHelpers.createSortRedirectionIndex(); @@ -250,46 +233,40 @@ public Result initialize(boolean usePrev, long beforeClock) { // Center the keys around middleKeyToUse final long offset = SortListener.REBALANCE_MIDPOINT - sortedKeys.length / 2; final Index resultIndex = sortedKeys.length == 0 ? Index.FACTORY.getEmptyIndex() - : Index.FACTORY.getIndexByRange(offset, offset + sortedKeys.length - 1); + : Index.FACTORY.getIndexByRange(offset, offset + sortedKeys.length - 1); for (int i = 0; i < sortedKeys.length; i++) { reverseLookup.put(sortedKeys[i], i + offset); } - // fillFromChunk may convert the provided OrderedKeys to a KeyRanges (or KeyIndices) - // chunk that is owned by + // fillFromChunk may convert the provided OrderedKeys to a KeyRanges (or KeyIndices) chunk that is owned by // the Index and is not closed until the index is closed. WritableChunkSink.FillFromContext fillFromContext = - closer.add(sortMapping.makeFillFromContext(sortedKeys.length)); + closer.add(sortMapping.makeFillFromContext(sortedKeys.length)); sortMapping.fillFromChunk(fillFromContext, LongChunk.chunkWrap(sortedKeys), - closer.add(resultIndex.clone())); + closer.add(resultIndex.clone())); - for (Map.Entry stringColumnSourceEntry : parent - .getColumnSourceMap().entrySet()) { + for (Map.Entry stringColumnSourceEntry : parent.getColumnSourceMap().entrySet()) { // noinspection unchecked resultMap.put(stringColumnSourceEntry.getKey(), - new ReadOnlyRedirectedColumnSource<>(sortMapping, - stringColumnSourceEntry.getValue())); + new ReadOnlyRedirectedColumnSource<>(sortMapping, stringColumnSourceEntry.getValue())); } // noinspection unchecked final ColumnSource>[] sortedColumnsToSortBy = - Arrays.stream(sortColumnNames).map(resultMap::get).toArray(ColumnSource[]::new); - // we also reinterpret our sortedColumnsToSortBy, which are guaranteed to be redirected - // sources of the inner source + Arrays.stream(sortColumnNames).map(resultMap::get).toArray(ColumnSource[]::new); + // we also reinterpret our sortedColumnsToSortBy, which are guaranteed to be redirected sources of the inner + // source for (int ii = 0; ii < sortedColumnsToSortBy.length; ++ii) { // noinspection unchecked - sortedColumnsToSortBy[ii] = - QueryTable.maybeTransformToPrimitive(sortedColumnsToSortBy[ii]); + sortedColumnsToSortBy[ii] = QueryTable.maybeTransformToPrimitive(sortedColumnsToSortBy[ii]); } resultTable = new QueryTable(resultIndex, resultMap); parent.copyAttributes(resultTable, BaseTable.CopyAttributeOperation.Sort); - final SortListener listener = - new SortListener(parent, resultTable, reverseLookup, sortColumns, sortOrder, - sortMapping, sortedColumnsToSortBy, - parent.newModifiedColumnSetIdentityTransformer(resultTable), + final SortListener listener = new SortListener(parent, resultTable, reverseLookup, sortColumns, sortOrder, + sortMapping, sortedColumnsToSortBy, parent.newModifiedColumnSetIdentityTransformer(resultTable), parent.newModifiedColumnSet(sortColumnNames)); setSorted(resultTable); diff --git a/DB/src/main/java/io/deephaven/db/v2/SortedColumnsAttribute.java b/DB/src/main/java/io/deephaven/db/v2/SortedColumnsAttribute.java index fd95663ab93..b7b445798af 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SortedColumnsAttribute.java +++ b/DB/src/main/java/io/deephaven/db/v2/SortedColumnsAttribute.java @@ -33,8 +33,7 @@ public static Optional getOrderForColumn(String attribute, String * @return an optional containing the SortingOrder of the column if defined, empty otherwise */ public static Optional getOrderForColumn(Table table, String columnName) { - return getOrderForColumn((String) table.getAttribute(Table.SORTED_COLUMNS_ATTRIBUTE), - columnName); + return getOrderForColumn((String) table.getAttribute(Table.SORTED_COLUMNS_ATTRIBUTE), columnName); } /** @@ -59,8 +58,7 @@ public static boolean isSortedBy(Table table, String columnName, SortingOrder or * @param order the order that the column is sorted in * @return a String suitable for use as a {@link Table#SORTED_COLUMNS_ATTRIBUTE} value. */ - public static String setOrderForColumn(String attribute, String columnName, - SortingOrder order) { + public static String setOrderForColumn(String attribute, String columnName, SortingOrder order) { Map map = stringToMap(attribute, true); map.put(columnName, order); return stringFromMap(map); @@ -86,7 +84,7 @@ private static Map stringToMap(String attribute, boolean w final String[] columnAttrs = attribute.split(","); Map map = Arrays.stream(columnAttrs).map(s -> s.split("=")) - .collect(Collectors.toMap(a -> a[0], a -> SortingOrder.valueOf(a[1]))); + .collect(Collectors.toMap(a -> a[0], a -> SortingOrder.valueOf(a[1]))); if (writable) { return map; } else { @@ -98,7 +96,6 @@ private static String stringFromMap(Map map) { if (map.isEmpty()) { return null; } - return map.entrySet().stream().map(x -> x.getKey() + "=" + x.getValue()) - .collect(Collectors.joining(",")); + return map.entrySet().stream().map(x -> x.getKey() + "=" + x.getValue()).collect(Collectors.joining(",")); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/SourceTable.java b/DB/src/main/java/io/deephaven/db/v2/SourceTable.java index 11823a229dc..10f031f76bf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SourceTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/SourceTable.java @@ -34,8 +34,7 @@ public abstract class SourceTable extends RedefinableTable { /** * Static log instance for use in trace. */ - private static final Logger log = - ProcessEnvironment.tryGet() == null ? LoggerFactory.getLogger(SourceTable.class) + private static final Logger log = ProcessEnvironment.tryGet() == null ? LoggerFactory.getLogger(SourceTable.class) : ProcessEnvironment.getGlobalLog(); /** @@ -84,16 +83,14 @@ public abstract class SourceTable extends RedefinableTable { * @param tableDefinition A TableDefinition * @param description A human-readable description for this table * @param componentFactory A component factory for creating column source managers - * @param locationProvider A TableLocationProvider, for use in discovering the locations that - * compose this table - * @param liveTableRegistrar Callback for registering live tables for refreshes, null if this - * table is not live + * @param locationProvider A TableLocationProvider, for use in discovering the locations that compose this table + * @param liveTableRegistrar Callback for registering live tables for refreshes, null if this table is not live */ SourceTable(@NotNull final TableDefinition tableDefinition, - @NotNull final String description, - @NotNull final SourceTableComponentFactory componentFactory, - @NotNull final TableLocationProvider locationProvider, - final LiveTableRegistrar liveTableRegistrar) { + @NotNull final String description, + @NotNull final SourceTableComponentFactory componentFactory, + @NotNull final TableLocationProvider locationProvider, + final LiveTableRegistrar liveTableRegistrar) { super(tableDefinition, description); this.componentFactory = Require.neqNull(componentFactory, "componentFactory"); @@ -101,12 +98,8 @@ public abstract class SourceTable extends RedefinableTable { this.liveTableRegistrar = liveTableRegistrar; final boolean isLive = liveTableRegistrar != null; - columnSourceManager = - componentFactory.createColumnSourceManager(isLive, ColumnToCodecMappings.EMPTY, - definition.getColumns() /* - * NB: this is the *re-written* definition passed to the - * super-class constructor. - */); + columnSourceManager = componentFactory.createColumnSourceManager(isLive, ColumnToCodecMappings.EMPTY, definition + .getColumns() /* NB: this is the *re-written* definition passed to the super-class constructor. */); if (isLive) { // NB: There's no reason to start out trying to group, if this is a live table. columnSourceManager.disableGrouping(); @@ -117,8 +110,7 @@ public abstract class SourceTable extends RedefinableTable { } /** - * Force this table to determine its initial state (available locations, size, index) if it - * hasn't already done so. + * Force this table to determine its initial state (available locations, size, index) if it hasn't already done so. */ private void initialize() { initializeAvailableLocations(); @@ -144,30 +136,27 @@ protected final void initializeAvailableLocations() { if (locationsInitialized) { return; } - QueryPerformanceRecorder.withNugget(description + ".initializeAvailableLocations()", - () -> { - if (isRefreshing()) { - final TableLocationSubscriptionBuffer locationBuffer = + QueryPerformanceRecorder.withNugget(description + ".initializeAvailableLocations()", () -> { + if (isRefreshing()) { + final TableLocationSubscriptionBuffer locationBuffer = new TableLocationSubscriptionBuffer(locationProvider); - maybeAddLocations(locationBuffer.processPending()); - liveTableRegistrar.addTable( - locationChangePoller = new LocationChangePoller(locationBuffer)); - } else { - locationProvider.refresh(); - maybeAddLocations(locationProvider.getTableLocationKeys()); - } - }); + maybeAddLocations(locationBuffer.processPending()); + liveTableRegistrar.addTable(locationChangePoller = new LocationChangePoller(locationBuffer)); + } else { + locationProvider.refresh(); + maybeAddLocations(locationProvider.getTableLocationKeys()); + } + }); locationsInitialized = true; } } - private void maybeAddLocations( - @NotNull final Collection locationKeys) { + private void maybeAddLocations(@NotNull final Collection locationKeys) { if (locationKeys.isEmpty()) { return; } filterLocationKeys(locationKeys) - .forEach(lk -> columnSourceManager.addLocation(locationProvider.getTableLocation(lk))); + .forEach(lk -> columnSourceManager.addLocation(locationProvider.getTableLocation(lk))); } private void initializeLocationSizes() { @@ -179,21 +168,20 @@ private void initializeLocationSizes() { if (locationSizesInitialized) { return; } - QueryPerformanceRecorder.withNugget(description + ".initializeLocationSizes()", - sizeForInstrumentation(), () -> { - Assert.eqNull(index, "index"); - index = refreshLocationSizes(); - setAttribute(EMPTY_SOURCE_TABLE_ATTRIBUTE, index.empty()); - if (!isRefreshing()) { - return; - } - index.initializePreviousValue(); - final long currentClockValue = LogicalClock.DEFAULT.currentValue(); - setLastNotificationStep( - LogicalClock.getState(currentClockValue) == LogicalClock.State.Updating - ? LogicalClock.getStep(currentClockValue) - 1 - : LogicalClock.getStep(currentClockValue)); - }); + QueryPerformanceRecorder.withNugget(description + ".initializeLocationSizes()", sizeForInstrumentation(), + () -> { + Assert.eqNull(index, "index"); + index = refreshLocationSizes(); + setAttribute(EMPTY_SOURCE_TABLE_ATTRIBUTE, index.empty()); + if (!isRefreshing()) { + return; + } + index.initializePreviousValue(); + final long currentClockValue = LogicalClock.DEFAULT.currentValue(); + setLastNotificationStep(LogicalClock.getState(currentClockValue) == LogicalClock.State.Updating + ? LogicalClock.getStep(currentClockValue) - 1 + : LogicalClock.getStep(currentClockValue)); + }); locationSizesInitialized = true; } } @@ -210,8 +198,7 @@ private class LocationChangePoller extends InstrumentedLiveTable { private final TableLocationSubscriptionBuffer locationBuffer; - private LocationChangePoller( - @NotNull final TableLocationSubscriptionBuffer locationBuffer) { + private LocationChangePoller(@NotNull final TableLocationSubscriptionBuffer locationBuffer) { super(description + ".indexUpdateLiveTable"); this.locationBuffer = locationBuffer; } @@ -221,11 +208,9 @@ protected void instrumentedRefresh() { try { maybeAddLocations(locationBuffer.processPending()); // NB: The availableLocationsLiveTable previously had functionality to notify - // "location listeners", but it was never used - resurrect from git history if - // needed. + // "location listeners", but it was never used - resurrect from git history if needed. if (!locationSizesInitialized) { - // We don't want to start polling size changes until the initial Index has been - // computed. + // We don't want to start polling size changes until the initial Index has been computed. return; } final boolean wasEmpty = index.empty(); @@ -237,27 +222,24 @@ protected void instrumentedRefresh() { setAttribute(EMPTY_SOURCE_TABLE_ATTRIBUTE, false); } index.insert(added); - notifyListeners(added, Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex()); + notifyListeners(added, Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex()); } catch (Exception e) { - // Notify listeners to the SourceTable when we had an issue refreshing available - // locations. + // Notify listeners to the SourceTable when we had an issue refreshing available locations. notifyListenersOnError(e, null); } } } /** - * Hook to allow found location keys to be filtered (e.g. according to a where-clause on the - * partitioning columns of a {@link PartitionAwareSourceTable}. The default implementation - * returns its input unmolested. + * Hook to allow found location keys to be filtered (e.g. according to a where-clause on the partitioning columns of + * a {@link PartitionAwareSourceTable}. The default implementation returns its input unmolested. * * @param foundLocationKeys A collection of {@link ImmutableTableLocationKey}s fetched from the * {@link TableLocationProvider}, but not yet incorporated into the table * @return A sub-collection of the input */ protected Collection filterLocationKeys( - @NotNull final Collection foundLocationKeys) { + @NotNull final Collection foundLocationKeys) { return foundLocationKeys; } @@ -265,50 +247,46 @@ protected Collection filterLocationKeys( protected final QueryTable doCoalesce() { initialize(); - final ShiftAwareSwapListener swapListener = createSwapListenerIfRefreshing( - (final BaseTable parent) -> new ShiftAwareSwapListener(parent) { - - @Override - public final void destroy() { - // NB: We can't call super.destroy() because we don't want to try to remove - // ourselves from the - // coalesced table (see override for removeUpdateListener), but we are probably - // not missing - // anything by not having super.destroy() invoke its own super.destroy(). - removeUpdateListenerUncoalesced(this); - } + final ShiftAwareSwapListener swapListener = + createSwapListenerIfRefreshing((final BaseTable parent) -> new ShiftAwareSwapListener(parent) { - @Override - public final void subscribeForUpdates() { - listenForUpdatesUncoalesced(this); - } - }); + @Override + public final void destroy() { + // NB: We can't call super.destroy() because we don't want to try to remove ourselves from the + // coalesced table (see override for removeUpdateListener), but we are probably not missing + // anything by not having super.destroy() invoke its own super.destroy(). + removeUpdateListenerUncoalesced(this); + } + + @Override + public final void subscribeForUpdates() { + listenForUpdatesUncoalesced(this); + } + }); final Mutable result = new MutableObject<>(); - initializeWithSnapshot("SourceTable.coalesce", swapListener, - (usePrev, beforeClockValue) -> { - final QueryTable resultTable = - new QueryTable(definition, index, columnSourceManager.getColumnSources()); - copyAttributes(resultTable, CopyAttributeOperation.Coalesce); - - if (swapListener != null) { - final ShiftAwareListenerImpl listener = + initializeWithSnapshot("SourceTable.coalesce", swapListener, (usePrev, beforeClockValue) -> { + final QueryTable resultTable = new QueryTable(definition, index, columnSourceManager.getColumnSources()); + copyAttributes(resultTable, CopyAttributeOperation.Coalesce); + + if (swapListener != null) { + final ShiftAwareListenerImpl listener = new ShiftAwareListenerImpl("SourceTable.coalesce", this, resultTable) { @Override protected final void destroy() { - // NB: This implementation cannot call super.destroy() for the same - // reason as the swap listener + // NB: This implementation cannot call super.destroy() for the same reason as the swap + // listener removeUpdateListenerUncoalesced(this); } }; - swapListener.setListenerAndResult(listener, resultTable); - resultTable.addParentReference(swapListener); - } + swapListener.setListenerAndResult(listener, resultTable); + resultTable.addParentReference(swapListener); + } - result.setValue(resultTable); - return true; - }); + result.setValue(resultTable); + return true; + }); return result.getValue(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/SourceTableComponentFactory.java b/DB/src/main/java/io/deephaven/db/v2/SourceTableComponentFactory.java index 91fa2e83741..a9846605b12 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SourceTableComponentFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/SourceTableComponentFactory.java @@ -12,7 +12,7 @@ public interface SourceTableComponentFactory { ColumnSourceManager createColumnSourceManager( - boolean isRefreshing, - ColumnToCodecMappings codecMappings, - ColumnDefinition... columnDefinitions); + boolean isRefreshing, + ColumnToCodecMappings codecMappings, + ColumnDefinition... columnDefinitions); } diff --git a/DB/src/main/java/io/deephaven/db/v2/SourceTableMap.java b/DB/src/main/java/io/deephaven/db/v2/SourceTableMap.java index 32c2a71d3ca..51536bb72ef 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SourceTableMap.java +++ b/DB/src/main/java/io/deephaven/db/v2/SourceTableMap.java @@ -24,8 +24,8 @@ import java.util.stream.Stream; /** - * {@link LocalTableMap} of single-location {@link SourceTable}s keyed by {@link TableLocationKey}. - * Refer to {@link TableLocationKey} for an explanation of partitioning. + * {@link LocalTableMap} of single-location {@link SourceTable}s keyed by {@link TableLocationKey}. Refer to + * {@link TableLocationKey} for an explanation of partitioning. */ public class SourceTableMap extends LocalTableMap { @@ -38,8 +38,7 @@ public class SourceTableMap extends LocalTableMap { private final TableLocationSubscriptionBuffer subscriptionBuffer; private final IntrusiveDoublyLinkedQueue pendingLocationStates; private final IntrusiveDoublyLinkedQueue readyLocationStates; - @SuppressWarnings("FieldCanBeLocal") // We need to hold onto this reference for reachability - // purposes. + @SuppressWarnings("FieldCanBeLocal") // We need to hold onto this reference for reachability purposes. private final LiveTable processNewLocationsLiveTable; /** @@ -47,32 +46,30 @@ public class SourceTableMap extends LocalTableMap { * Construct a {@link SourceTableMap} from the supplied parameters. * *

        - * Note that refreshLocations and refreshSizes are distinct because there are use cases that - * supply an external index and hence don't require size refreshes. Others might care for size - * refreshes, but only the initially-available set of locations. + * Note that refreshLocations and refreshSizes are distinct because there are use cases that supply an external + * index and hence don't require size refreshes. Others might care for size refreshes, but only the + * initially-available set of locations. * * @param tableDefinition The table definition - * @param applyTablePermissions Function to apply in order to correctly restrict the visible - * result rows + * @param applyTablePermissions Function to apply in order to correctly restrict the visible result rows * @param tableLocationProvider Source for table locations * @param refreshLocations Whether the set of locations should be refreshed * @param refreshSizes Whether the locations found should be refreshed * @param locationKeyMatcher Function to filter desired location keys */ public SourceTableMap(@NotNull final TableDefinition tableDefinition, - @NotNull final UnaryOperator

    applyTablePermissions, - @NotNull final TableLocationProvider tableLocationProvider, - final boolean refreshLocations, - final boolean refreshSizes, - @NotNull final Predicate locationKeyMatcher) { + @NotNull final UnaryOperator
    applyTablePermissions, + @NotNull final TableLocationProvider tableLocationProvider, + final boolean refreshLocations, + final boolean refreshSizes, + @NotNull final Predicate locationKeyMatcher) { super(null, Objects.requireNonNull(tableDefinition)); this.applyTablePermissions = applyTablePermissions; this.tableLocationProvider = tableLocationProvider; this.refreshSizes = refreshSizes; this.locationKeyMatcher = locationKeyMatcher; - final boolean needToRefreshLocations = - refreshLocations && tableLocationProvider.supportsSubscriptions(); + final boolean needToRefreshLocations = refreshLocations && tableLocationProvider.supportsSubscriptions(); if (needToRefreshLocations || refreshSizes) { setRefreshing(true); @@ -85,12 +82,12 @@ public SourceTableMap(@NotNull final TableDefinition tableDefinition, if (needToRefreshLocations) { subscriptionBuffer = new TableLocationSubscriptionBuffer(tableLocationProvider); pendingLocationStates = new IntrusiveDoublyLinkedQueue<>( - IntrusiveDoublyLinkedNode.Adapter.getInstance()); + IntrusiveDoublyLinkedNode.Adapter.getInstance()); readyLocationStates = new IntrusiveDoublyLinkedQueue<>( - IntrusiveDoublyLinkedNode.Adapter.getInstance()); + IntrusiveDoublyLinkedNode.Adapter.getInstance()); processNewLocationsLiveTable = new InstrumentedLiveTable( - SourceTableMap.class.getSimpleName() + '[' + tableLocationProvider + ']' - + "-processPendingLocations") { + SourceTableMap.class.getSimpleName() + '[' + tableLocationProvider + ']' + + "-processPendingLocations") { @Override protected void instrumentedRefresh() { processPendingLocations(); @@ -104,8 +101,8 @@ protected void instrumentedRefresh() { readyLocationStates = null; processNewLocationsLiveTable = null; tableLocationProvider.refresh(); - sortAndAddLocations(tableLocationProvider.getTableLocationKeys().stream() - .filter(locationKeyMatcher).map(tableLocationProvider::getTableLocation)); + sortAndAddLocations(tableLocationProvider.getTableLocationKeys().stream().filter(locationKeyMatcher) + .map(tableLocationProvider::getTableLocation)); } if (isRefreshing()) { @@ -127,46 +124,37 @@ private void sortAndAddLocations(@NotNull final Stream locations) if (!observeCreation.getValue()) { // we have a duplicate location - not allowed final TableLocation previousLocation = - ((PartitionAwareSourceTable) previousTable).locationProvider - .getTableLocation(tl.getKey()); + ((PartitionAwareSourceTable) previousTable).locationProvider.getTableLocation(tl.getKey()); throw new TableDataException( - "Data Routing Configuration error: TableDataService elements overlap at location " - + - tl.toGenericString() + - ". Duplicate locations are " + previousLocation.toStringDetailed() + " and " - + tl.toStringDetailed()); + "Data Routing Configuration error: TableDataService elements overlap at location " + + tl.toGenericString() + + ". Duplicate locations are " + previousLocation.toStringDetailed() + " and " + + tl.toStringDetailed()); } }); } private Table makeTable(@NotNull final TableLocation tableLocation) { return applyTablePermissions.apply(new PartitionAwareSourceTable( - getConstituentDefinition().orElseThrow(IllegalStateException::new), - "SingleLocationSourceTable-" + tableLocation, - RegionedTableComponentFactoryImpl.INSTANCE, - new SingleTableLocationProvider(tableLocation), - refreshSizes ? refreshCombiner : null)); + getConstituentDefinition().orElseThrow(IllegalStateException::new), + "SingleLocationSourceTable-" + tableLocation, + RegionedTableComponentFactoryImpl.INSTANCE, + new SingleTableLocationProvider(tableLocation), + refreshSizes ? refreshCombiner : null)); } private void processPendingLocations() { - // This block of code is unfortunate, because it largely duplicates the intent and effort of - // similar code in - // RegionedColumnSourceManager. I think that the RegionedColumnSourceManager could be - // changed to intermediate - // between TableLocationProvider and SourceTable or SourceTableMap, allowing for much - // cleaner code in all three. - // The RCSM could then populate STM nodes or ST regions. We could also add a "RegionManager" - // to - // RegionedColumnSources, in order to eliminate the unnecessary post-initialization array - // population in STM + // This block of code is unfortunate, because it largely duplicates the intent and effort of similar code in + // RegionedColumnSourceManager. I think that the RegionedColumnSourceManager could be changed to intermediate + // between TableLocationProvider and SourceTable or SourceTableMap, allowing for much cleaner code in all three. + // The RCSM could then populate STM nodes or ST regions. We could also add a "RegionManager" to + // RegionedColumnSources, in order to eliminate the unnecessary post-initialization array population in STM // ColumnSources. - // TODO (https://github.com/deephaven/deephaven-core/issues/867): Refactor around a ticking - // partition table + // TODO (https://github.com/deephaven/deephaven-core/issues/867): Refactor around a ticking partition table subscriptionBuffer.processPending().stream().filter(locationKeyMatcher) - .map(tableLocationProvider::getTableLocation).map(PendingLocationState::new) - .forEach(pendingLocationStates::offer); - for (final Iterator iter = pendingLocationStates.iterator(); iter - .hasNext();) { + .map(tableLocationProvider::getTableLocation).map(PendingLocationState::new) + .forEach(pendingLocationStates::offer); + for (final Iterator iter = pendingLocationStates.iterator(); iter.hasNext();) { final PendingLocationState pendingLocationState = iter.next(); if (pendingLocationState.exists()) { iter.remove(); @@ -177,8 +165,7 @@ private void processPendingLocations() { readyLocationStates.clearFast(); } - private static class PendingLocationState - extends IntrusiveDoublyLinkedNode.Impl { + private static class PendingLocationState extends IntrusiveDoublyLinkedNode.Impl { private final TableLocation location; @@ -190,9 +177,9 @@ private PendingLocationState(@NotNull final TableLocation location) { } /** - * Test if the pending location is ready for inclusion in the table map. This means it must - * have non-null, non-zero size. We expect that this means the location will be immediately - * included in the resulting table's {@link ColumnSourceManager}, which is a + * Test if the pending location is ready for inclusion in the table map. This means it must have non-null, + * non-zero size. We expect that this means the location will be immediately included in the resulting table's + * {@link ColumnSourceManager}, which is a * {@link io.deephaven.db.v2.sources.regioned.RegionedColumnSourceManager} in all cases. * * @return Whether this location exists for purposes of inclusion in the table map diff --git a/DB/src/main/java/io/deephaven/db/v2/SparseSelect.java b/DB/src/main/java/io/deephaven/db/v2/SparseSelect.java index 542e5a9fb8b..320afd7bcf4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SparseSelect.java +++ b/DB/src/main/java/io/deephaven/db/v2/SparseSelect.java @@ -25,42 +25,40 @@ import java.util.concurrent.Future; /** - * A simpler version of {@link Table#select} that is guaranteed to preserve the original table's - * index. + * A simpler version of {@link Table#select} that is guaranteed to preserve the original table's index. * *

    - * Like select, the sparseSelected table's columns will be materialized in memory. Unlike select(), - * sparseSelect guarantees the original Table's index is preserved. Formula columns are not - * supported, only the names of columns to copy into the output table. This means that each output - * column is independent of every other output column, which enables column-level parallelism. + * Like select, the sparseSelected table's columns will be materialized in memory. Unlike select(), sparseSelect + * guarantees the original Table's index is preserved. Formula columns are not supported, only the names of columns to + * copy into the output table. This means that each output column is independent of every other output column, which + * enables column-level parallelism. *

    */ public class SparseSelect { /** - * How many threads should be used for sparse select? All concurrent sparseSelect operations use - * the same thread pool. + * How many threads should be used for sparse select? All concurrent sparseSelect operations use the same thread + * pool. * *

    * Configured using the {@code SparseSelect.threads} property. Defaults to 1. *

    */ private final static int SPARSE_SELECT_THREADS = - Configuration.getInstance().getIntegerWithDefault("SparseSelect.threads", 1); + Configuration.getInstance().getIntegerWithDefault("SparseSelect.threads", 1); /** - * What size chunk (in bytes) should be read from the input column sources and written to the - * output column sources? + * What size chunk (in bytes) should be read from the input column sources and written to the output column sources? * *

    * Configured using the {@code SparseSelect.chunkSize} property. Defaults to 2^16. *

    */ private final static int SPARSE_SELECT_CHUNK_SIZE = - Configuration.getInstance().getIntegerWithDefault("SparseSelect.chunkSize", 1 << 16); + Configuration.getInstance().getIntegerWithDefault("SparseSelect.chunkSize", 1 << 16); private final static ExecutorService executor = SPARSE_SELECT_THREADS == 1 ? null - : Executors.newFixedThreadPool(SPARSE_SELECT_THREADS, - new NamingThreadFactory(SparseSelect.class, "copyThread", true)); + : Executors.newFixedThreadPool(SPARSE_SELECT_THREADS, + new NamingThreadFactory(SparseSelect.class, "copyThread", true)); private SparseSelect() {} // static use only @@ -73,7 +71,7 @@ private SparseSelect() {} // static use only */ public static Table sparseSelect(Table source) { return sparseSelect(source, - source.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + source.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); } /** @@ -98,7 +96,7 @@ public static Table sparseSelect(Table source, String... columnNames) { */ public static Table sparseSelect(Table source, Collection columnNames) { return sparseSelect(source, CollectionUtil.ZERO_LENGTH_STRING_ARRAY, - columnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + columnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); } /** @@ -112,8 +110,7 @@ public static Table sparseSelect(Table source, Collection columnNames) { * @return a copy of the source table with materialized columns */ public static Table partialSparseSelect(Table source, Collection columnNames) { - return partialSparseSelect(source, - columnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + return partialSparseSelect(source, columnNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); } /** @@ -129,158 +126,141 @@ public static Table partialSparseSelect(Table source, Collection columnN public static Table partialSparseSelect(Table source, String... columnNames) { final Set columnsToCopy = new HashSet<>(Arrays.asList(columnNames)); final String[] preserveColumns = source.getColumnSourceMap().keySet().stream() - .filter(x -> !columnsToCopy.contains(x)).toArray(String[]::new); + .filter(x -> !columnsToCopy.contains(x)).toArray(String[]::new); return sparseSelect(source, preserveColumns, columnNames); } - private static Table sparseSelect(Table source, String[] preserveColumns, - String[] columnNames) { - return QueryPerformanceRecorder.withNugget( - "sparseSelect(" + Arrays.toString(columnNames) + ")", source.sizeForInstrumentation(), - () -> { - if (source.isLive()) { - LiveTableMonitor.DEFAULT.checkInitiateTableOperation(); - } + private static Table sparseSelect(Table source, String[] preserveColumns, String[] columnNames) { + return QueryPerformanceRecorder.withNugget("sparseSelect(" + Arrays.toString(columnNames) + ")", + source.sizeForInstrumentation(), () -> { + if (source.isLive()) { + LiveTableMonitor.DEFAULT.checkInitiateTableOperation(); + } - final Map resultColumns = new LinkedHashMap<>(); + final Map resultColumns = new LinkedHashMap<>(); - // we copy the preserve columns to the map without changes - for (final String preserveColumn : preserveColumns) { - resultColumns.put(preserveColumn, source.getColumnSource(preserveColumn)); - } + // we copy the preserve columns to the map without changes + for (final String preserveColumn : preserveColumns) { + resultColumns.put(preserveColumn, source.getColumnSource(preserveColumn)); + } - final List inputSourcesList = new ArrayList<>(columnNames.length); - final List outputSourcesList = - new ArrayList<>(columnNames.length); - final List modifiedColumnSets = - new ArrayList<>(columnNames.length); - - for (final String columnName : columnNames) { - final ColumnSource inputSource = source.getColumnSource(columnName); - if (inputSource instanceof SparseArrayColumnSource - || inputSource instanceof ArrayBackedColumnSource) { - resultColumns.put(columnName, inputSource); - } else { - inputSourcesList.add(inputSource); - final SparseArrayColumnSource outputSource = - SparseArrayColumnSource.getSparseMemoryColumnSource( - inputSource.getType(), inputSource.getComponentType()); - outputSourcesList.add(outputSource); - resultColumns.put(columnName, outputSource); - modifiedColumnSets - .add(((DynamicTable) source).newModifiedColumnSet(columnName)); + final List inputSourcesList = new ArrayList<>(columnNames.length); + final List outputSourcesList = new ArrayList<>(columnNames.length); + final List modifiedColumnSets = new ArrayList<>(columnNames.length); + + for (final String columnName : columnNames) { + final ColumnSource inputSource = source.getColumnSource(columnName); + if (inputSource instanceof SparseArrayColumnSource + || inputSource instanceof ArrayBackedColumnSource) { + resultColumns.put(columnName, inputSource); + } else { + inputSourcesList.add(inputSource); + final SparseArrayColumnSource outputSource = SparseArrayColumnSource + .getSparseMemoryColumnSource(inputSource.getType(), inputSource.getComponentType()); + outputSourcesList.add(outputSource); + resultColumns.put(columnName, outputSource); + modifiedColumnSets.add(((DynamicTable) source).newModifiedColumnSet(columnName)); + } } - } - final ColumnSource[] inputSources = - inputSourcesList.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); - final SparseArrayColumnSource[] outputSources = outputSourcesList - .toArray(SparseArrayColumnSource.ZERO_LENGTH_SPARSE_ARRAY_COLUMN_SOURCE_ARRAY); - - doCopy(source.getIndex(), inputSources, outputSources, null); - - final QueryTable resultTable = new QueryTable(source.getIndex(), resultColumns); - - if (source.isLive()) { - outputSourcesList.forEach(ColumnSource::startTrackingPrevValues); - final ObjectSparseArraySource[] sparseObjectSources = - outputSourcesList.stream().filter(x -> x instanceof ObjectSparseArraySource) - .map(x -> (ObjectSparseArraySource) x) - .toArray(ObjectSparseArraySource[]::new); - ((DynamicTable) source).listenForUpdates(new BaseTable.ShiftAwareListenerImpl( - "sparseSelect(" + Arrays.toString(columnNames) + ")", (DynamicTable) source, - resultTable) { - private final ModifiedColumnSet modifiedColumnSetForUpdates = - resultTable.getModifiedColumnSetForUpdates(); - private final ModifiedColumnSet.Transformer transformer = - ((BaseTable) source).newModifiedColumnSetTransformer(resultTable, - resultTable.getDefinition().getColumnNamesArray()); - - @Override - public void onUpdate(Update upstream) { - final Update downstream = upstream.copy(); - downstream.modifiedColumnSet = modifiedColumnSetForUpdates; - if (sparseObjectSources.length > 0) { - try (final Index removedOnly = - upstream.removed.minus(upstream.added)) { - for (final ObjectSparseArraySource objectSparseArraySource : sparseObjectSources) { - objectSparseArraySource.remove(removedOnly); + final ColumnSource[] inputSources = + inputSourcesList.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + final SparseArrayColumnSource[] outputSources = outputSourcesList + .toArray(SparseArrayColumnSource.ZERO_LENGTH_SPARSE_ARRAY_COLUMN_SOURCE_ARRAY); + + doCopy(source.getIndex(), inputSources, outputSources, null); + + final QueryTable resultTable = new QueryTable(source.getIndex(), resultColumns); + + if (source.isLive()) { + outputSourcesList.forEach(ColumnSource::startTrackingPrevValues); + final ObjectSparseArraySource[] sparseObjectSources = + outputSourcesList.stream().filter(x -> x instanceof ObjectSparseArraySource) + .map(x -> (ObjectSparseArraySource) x).toArray(ObjectSparseArraySource[]::new); + ((DynamicTable) source).listenForUpdates(new BaseTable.ShiftAwareListenerImpl( + "sparseSelect(" + Arrays.toString(columnNames) + ")", (DynamicTable) source, + resultTable) { + private final ModifiedColumnSet modifiedColumnSetForUpdates = + resultTable.getModifiedColumnSetForUpdates(); + private final ModifiedColumnSet.Transformer transformer = + ((BaseTable) source).newModifiedColumnSetTransformer(resultTable, + resultTable.getDefinition().getColumnNamesArray()); + + @Override + public void onUpdate(Update upstream) { + final Update downstream = upstream.copy(); + downstream.modifiedColumnSet = modifiedColumnSetForUpdates; + if (sparseObjectSources.length > 0) { + try (final Index removedOnly = upstream.removed.minus(upstream.added)) { + for (final ObjectSparseArraySource objectSparseArraySource : sparseObjectSources) { + objectSparseArraySource.remove(removedOnly); + } } } - } - boolean anyModified = false; - boolean allModified = true; + boolean anyModified = false; + boolean allModified = true; - final boolean[] modifiedColumns = new boolean[inputSources.length]; + final boolean[] modifiedColumns = new boolean[inputSources.length]; - for (int cc = 0; cc < inputSources.length; ++cc) { - final boolean columnModified = upstream.modifiedColumnSet - .containsAny(modifiedColumnSets.get(cc)); - modifiedColumns[cc] = columnModified; - anyModified |= columnModified; - allModified &= columnModified; - } + for (int cc = 0; cc < inputSources.length; ++cc) { + final boolean columnModified = + upstream.modifiedColumnSet.containsAny(modifiedColumnSets.get(cc)); + modifiedColumns[cc] = columnModified; + anyModified |= columnModified; + allModified &= columnModified; + } + + if (anyModified) { + try (final Index addedAndModified = upstream.added.union(upstream.modified)) { + if (upstream.shifted.nonempty()) { + try (final Index currentWithoutAddsOrModifies = + source.getIndex().minus(addedAndModified); + final SafeCloseablePair shifts = upstream.shifted + .extractParallelShiftedRowsFromPostShiftIndex( + currentWithoutAddsOrModifies)) { + doShift(shifts, outputSources, modifiedColumns); + } + } + + doCopy(addedAndModified, inputSources, outputSources, modifiedColumns); + } + } + + if (!allModified) { + invert(modifiedColumns); - if (anyModified) { - try (final Index addedAndModified = - upstream.added.union(upstream.modified)) { if (upstream.shifted.nonempty()) { - try ( - final Index currentWithoutAddsOrModifies = - source.getIndex().minus(addedAndModified); - final SafeCloseablePair shifts = - upstream.shifted - .extractParallelShiftedRowsFromPostShiftIndex( - currentWithoutAddsOrModifies)) { + try (final Index currentWithoutAdds = source.getIndex().minus(upstream.added); + final SafeCloseablePair shifts = + upstream.shifted.extractParallelShiftedRowsFromPostShiftIndex( + currentWithoutAdds)) { doShift(shifts, outputSources, modifiedColumns); } } - doCopy(addedAndModified, inputSources, outputSources, - modifiedColumns); + doCopy(upstream.added, inputSources, outputSources, modifiedColumns); } - } - if (!allModified) { - invert(modifiedColumns); - - if (upstream.shifted.nonempty()) { - try ( - final Index currentWithoutAdds = - source.getIndex().minus(upstream.added); - final SafeCloseablePair shifts = - upstream.shifted - .extractParallelShiftedRowsFromPostShiftIndex( - currentWithoutAdds)) { - doShift(shifts, outputSources, modifiedColumns); - } - } + transformer.transform(upstream.modifiedColumnSet, downstream.modifiedColumnSet); - doCopy(upstream.added, inputSources, outputSources, - modifiedColumns); + resultTable.notifyListeners(downstream); } - transformer.transform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); - - resultTable.notifyListeners(downstream); - } - - private void invert(boolean[] modifiedColumns) { - for (int cc = 0; cc < modifiedColumns.length; ++cc) { - modifiedColumns[cc] = !modifiedColumns[cc]; + private void invert(boolean[] modifiedColumns) { + for (int cc = 0; cc < modifiedColumns.length; ++cc) { + modifiedColumns[cc] = !modifiedColumns[cc]; + } } - } - }); - } + }); + } - return resultTable; - }); + return resultTable; + }); } - private static void doShift(SafeCloseablePair shifts, - SparseArrayColumnSource[] outputSources, boolean[] toShift) { + private static void doShift(SafeCloseablePair shifts, SparseArrayColumnSource[] outputSources, + boolean[] toShift) { if (executor == null) { doShiftSingle(shifts, outputSources, toShift); } else { @@ -288,8 +268,8 @@ private static void doShift(SafeCloseablePair shifts, } } - private static void doCopy(Index addedAndModified, ColumnSource[] inputSources, - WritableSource[] outputSources, boolean[] toCopy) { + private static void doCopy(Index addedAndModified, ColumnSource[] inputSources, WritableSource[] outputSources, + boolean[] toCopy) { if (executor == null) { doCopySingle(addedAndModified, inputSources, outputSources, toCopy); } else { @@ -298,28 +278,22 @@ private static void doCopy(Index addedAndModified, ColumnSource[] inputSources, } private static void doCopySingle(Index addedAndModified, ColumnSource[] inputSources, - WritableSource[] outputSources, boolean[] toCopy) { + WritableSource[] outputSources, boolean[] toCopy) { final ChunkSource.GetContext[] gcs = new ChunkSource.GetContext[inputSources.length]; - final WritableChunkSink.FillFromContext[] ffcs = - new WritableChunkSink.FillFromContext[inputSources.length]; - try ( - final SafeCloseableArray ignored = - new SafeCloseableArray<>(gcs); - final SafeCloseableArray ignored2 = - new SafeCloseableArray<>(ffcs); - final OrderedKeys.Iterator okit = addedAndModified.getOrderedKeysIterator(); - final SharedContext sharedContext = SharedContext.makeSharedContext()) { + final WritableChunkSink.FillFromContext[] ffcs = new WritableChunkSink.FillFromContext[inputSources.length]; + try (final SafeCloseableArray ignored = new SafeCloseableArray<>(gcs); + final SafeCloseableArray ignored2 = new SafeCloseableArray<>(ffcs); + final OrderedKeys.Iterator okit = addedAndModified.getOrderedKeysIterator(); + final SharedContext sharedContext = SharedContext.makeSharedContext()) { for (int cc = 0; cc < inputSources.length; cc++) { if (toCopy == null || toCopy[cc]) { - gcs[cc] = - inputSources[cc].makeGetContext(SPARSE_SELECT_CHUNK_SIZE, sharedContext); + gcs[cc] = inputSources[cc].makeGetContext(SPARSE_SELECT_CHUNK_SIZE, sharedContext); ffcs[cc] = outputSources[cc].makeFillFromContext(SPARSE_SELECT_CHUNK_SIZE); } } while (okit.hasMore()) { sharedContext.reset(); - final OrderedKeys chunkOk = - okit.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); + final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); for (int cc = 0; cc < inputSources.length; cc++) { if (toCopy == null || toCopy[cc]) { // noinspection unchecked @@ -332,13 +306,13 @@ private static void doCopySingle(Index addedAndModified, ColumnSource[] inputSou } private static void doCopyThreads(Index addedAndModified, ColumnSource[] inputSources, - WritableSource[] outputSources, boolean[] toCopy) { + WritableSource[] outputSources, boolean[] toCopy) { final Future[] futures = new Future[inputSources.length]; for (int columnIndex = 0; columnIndex < inputSources.length; columnIndex++) { if (toCopy == null || toCopy[columnIndex]) { final int cc = columnIndex; - futures[cc] = executor.submit( - () -> doCopySource(addedAndModified, outputSources[cc], inputSources[cc])); + futures[cc] = + executor.submit(() -> doCopySource(addedAndModified, outputSources[cc], inputSources[cc])); } } for (int columnIndex = 0; columnIndex < inputSources.length; columnIndex++) { @@ -354,56 +328,44 @@ private static void doCopyThreads(Index addedAndModified, ColumnSource[] inputSo } } - private static void doCopySource(Index addedAndModified, WritableSource outputSource, - ColumnSource inputSource) { + private static void doCopySource(Index addedAndModified, WritableSource outputSource, ColumnSource inputSource) { try (final OrderedKeys.Iterator okit = addedAndModified.getOrderedKeysIterator(); - final WritableChunkSink.FillFromContext ffc = - outputSource.makeFillFromContext(SPARSE_SELECT_CHUNK_SIZE); - final ChunkSource.GetContext gc = - inputSource.makeGetContext(SPARSE_SELECT_CHUNK_SIZE)) { + final WritableChunkSink.FillFromContext ffc = + outputSource.makeFillFromContext(SPARSE_SELECT_CHUNK_SIZE); + final ChunkSource.GetContext gc = inputSource.makeGetContext(SPARSE_SELECT_CHUNK_SIZE)) { while (okit.hasMore()) { - final OrderedKeys chunkOk = - okit.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); + final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); final Chunk values = inputSource.getChunk(gc, chunkOk); outputSource.fillFromChunk(ffc, values, chunkOk); } } } - private static void doShiftSingle(SafeCloseablePair shifts, - SparseArrayColumnSource[] outputSources, boolean[] toShift) { + private static void doShiftSingle(SafeCloseablePair shifts, SparseArrayColumnSource[] outputSources, + boolean[] toShift) { // noinspection unchecked final WritableChunk[] values = new WritableChunk[outputSources.length]; final ChunkSource.FillContext[] fcs = new ChunkSource.FillContext[outputSources.length]; - final WritableChunkSink.FillFromContext[] ffcs = - new WritableChunkSink.FillFromContext[outputSources.length]; - - try ( - final SafeCloseableArray> ignored = - new SafeCloseableArray<>(values); - final SafeCloseableArray ignored2 = - new SafeCloseableArray<>(fcs); - final SafeCloseableArray ignored3 = - new SafeCloseableArray<>(ffcs); - final SharedContext sharedContext = SharedContext.makeSharedContext(); - final OrderedKeys.Iterator preIt = shifts.first.getOrderedKeysIterator(); - final OrderedKeys.Iterator postIt = shifts.second.getOrderedKeysIterator()) { + final WritableChunkSink.FillFromContext[] ffcs = new WritableChunkSink.FillFromContext[outputSources.length]; + + try (final SafeCloseableArray> ignored = new SafeCloseableArray<>(values); + final SafeCloseableArray ignored2 = new SafeCloseableArray<>(fcs); + final SafeCloseableArray ignored3 = new SafeCloseableArray<>(ffcs); + final SharedContext sharedContext = SharedContext.makeSharedContext(); + final OrderedKeys.Iterator preIt = shifts.first.getOrderedKeysIterator(); + final OrderedKeys.Iterator postIt = shifts.second.getOrderedKeysIterator()) { for (int cc = 0; cc < outputSources.length; cc++) { if (toShift == null || toShift[cc]) { - fcs[cc] = - outputSources[cc].makeFillContext(SPARSE_SELECT_CHUNK_SIZE, sharedContext); + fcs[cc] = outputSources[cc].makeFillContext(SPARSE_SELECT_CHUNK_SIZE, sharedContext); ffcs[cc] = outputSources[cc].makeFillFromContext(SPARSE_SELECT_CHUNK_SIZE); - values[cc] = outputSources[cc].getChunkType() - .makeWritableChunk(SPARSE_SELECT_CHUNK_SIZE); + values[cc] = outputSources[cc].getChunkType().makeWritableChunk(SPARSE_SELECT_CHUNK_SIZE); } } while (preIt.hasMore()) { - final OrderedKeys preChunkOk = - preIt.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); - final OrderedKeys postChunkOk = - postIt.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); + final OrderedKeys preChunkOk = preIt.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); + final OrderedKeys postChunkOk = postIt.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); for (int cc = 0; cc < outputSources.length; cc++) { if (toShift == null || toShift[cc]) { outputSources[cc].fillPrevChunk(fcs[cc], values[cc], preChunkOk); @@ -414,8 +376,8 @@ private static void doShiftSingle(SafeCloseablePair shifts, } } - private static void doShiftThreads(SafeCloseablePair shifts, - SparseArrayColumnSource[] outputSources, boolean[] toShift) { + private static void doShiftThreads(SafeCloseablePair shifts, SparseArrayColumnSource[] outputSources, + boolean[] toShift) { final Future[] futures = new Future[outputSources.length]; for (int columnIndex = 0; columnIndex < outputSources.length; columnIndex++) { if (toShift == null || toShift[columnIndex]) { @@ -436,21 +398,17 @@ private static void doShiftThreads(SafeCloseablePair shifts, } } - private static void doShiftSource(SafeCloseablePair shifts, - SparseArrayColumnSource outputSource) { + private static void doShiftSource(SafeCloseablePair shifts, SparseArrayColumnSource outputSource) { try (final OrderedKeys.Iterator preIt = shifts.first.getOrderedKeysIterator(); - final OrderedKeys.Iterator postIt = shifts.second.getOrderedKeysIterator(); - final WritableChunkSink.FillFromContext ffc = - outputSource.makeFillFromContext(SPARSE_SELECT_CHUNK_SIZE); - final ChunkSource.FillContext fc = - outputSource.makeFillContext(SPARSE_SELECT_CHUNK_SIZE); - final WritableChunk values = - outputSource.getChunkType().makeWritableChunk(SPARSE_SELECT_CHUNK_SIZE)) { + final OrderedKeys.Iterator postIt = shifts.second.getOrderedKeysIterator(); + final WritableChunkSink.FillFromContext ffc = + outputSource.makeFillFromContext(SPARSE_SELECT_CHUNK_SIZE); + final ChunkSource.FillContext fc = outputSource.makeFillContext(SPARSE_SELECT_CHUNK_SIZE); + final WritableChunk values = + outputSource.getChunkType().makeWritableChunk(SPARSE_SELECT_CHUNK_SIZE)) { while (preIt.hasMore()) { - final OrderedKeys preChunkOk = - preIt.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); - final OrderedKeys postChunkOk = - postIt.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); + final OrderedKeys preChunkOk = preIt.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); + final OrderedKeys postChunkOk = postIt.getNextOrderedKeysWithLength(SPARSE_SELECT_CHUNK_SIZE); outputSource.fillPrevChunk(fc, values, preChunkOk); outputSource.fillFromChunk(ffc, values, postChunkOk); } diff --git a/DB/src/main/java/io/deephaven/db/v2/StreamTableTools.java b/DB/src/main/java/io/deephaven/db/v2/StreamTableTools.java index d9c9a6984f6..d269261b44a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/StreamTableTools.java +++ b/DB/src/main/java/io/deephaven/db/v2/StreamTableTools.java @@ -35,93 +35,86 @@ public static Table streamToAppendOnlyTable(Table streamTable) { final BaseTable baseStreamTable = (BaseTable) streamTable.coalesce(); final ShiftAwareSwapListener swapListener = - baseStreamTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + baseStreamTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); // stream tables must tick Assert.neqNull(swapListener, "swapListener"); final Mutable resultHolder = new MutableObject<>(); - ConstructSnapshot.callDataSnapshotFunction("streamToAppendOnlyTable", - swapListener.makeSnapshotControl(), (boolean usePrev, long beforeClockValue) -> { - final Map columns = new LinkedHashMap<>(); - final Map columnSourceMap = - streamTable.getColumnSourceMap(); - final int columnCount = columnSourceMap.size(); - final ColumnSource[] sourceColumns = new ColumnSource[columnCount]; - final WritableSource[] destColumns = new WritableSource[columnCount]; - int colIdx = 0; - for (Map.Entry nameColumnSourceEntry : columnSourceMap - .entrySet()) { - final ColumnSource existingColumn = nameColumnSourceEntry.getValue(); - final ArrayBackedColumnSource newColumn = - ArrayBackedColumnSource.getMemoryColumnSource(0, - existingColumn.getType(), existingColumn.getComponentType()); - columns.put(nameColumnSourceEntry.getKey(), newColumn); - // for the source columns, we would like to read primitives instead of - // objects in cases where it is possible - sourceColumns[colIdx] = - ReinterpretUtilities.maybeConvertToPrimitive(existingColumn); - // for the destination sources, we know they are array backed sources that - // will actually store primitives and we can fill efficiently - destColumns[colIdx++] = (WritableSource) ReinterpretUtilities - .maybeConvertToPrimitive(newColumn); - } - - - final Index index; - if (usePrev) { - try (final Index useIndex = baseStreamTable.getIndex().getPrevIndex()) { - index = Index.FACTORY.getFlatIndex(useIndex.size()); - ChunkUtils.copyData(sourceColumns, useIndex, destColumns, index, - usePrev); + ConstructSnapshot.callDataSnapshotFunction("streamToAppendOnlyTable", swapListener.makeSnapshotControl(), + (boolean usePrev, long beforeClockValue) -> { + final Map columns = new LinkedHashMap<>(); + final Map columnSourceMap = streamTable.getColumnSourceMap(); + final int columnCount = columnSourceMap.size(); + final ColumnSource[] sourceColumns = new ColumnSource[columnCount]; + final WritableSource[] destColumns = new WritableSource[columnCount]; + int colIdx = 0; + for (Map.Entry nameColumnSourceEntry : columnSourceMap + .entrySet()) { + final ColumnSource existingColumn = nameColumnSourceEntry.getValue(); + final ArrayBackedColumnSource newColumn = ArrayBackedColumnSource.getMemoryColumnSource( + 0, existingColumn.getType(), existingColumn.getComponentType()); + columns.put(nameColumnSourceEntry.getKey(), newColumn); + // for the source columns, we would like to read primitives instead of objects in cases + // where it is possible + sourceColumns[colIdx] = ReinterpretUtilities.maybeConvertToPrimitive(existingColumn); + // for the destination sources, we know they are array backed sources that will actually + // store primitives and we can fill efficiently + destColumns[colIdx++] = + (WritableSource) ReinterpretUtilities.maybeConvertToPrimitive(newColumn); } - } else { - index = Index.FACTORY.getFlatIndex(baseStreamTable.getIndex().size()); - ChunkUtils.copyData(sourceColumns, baseStreamTable.getIndex(), destColumns, - index, usePrev); - } - - final QueryTable result = new QueryTable(index, columns); - result.setRefreshing(true); - result.setAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE, true); - result.setFlat(); - result.addParentReference(swapListener); - resultHolder.setValue(result); - - swapListener.setListenerAndResult(new BaseTable.ShiftAwareListenerImpl( - "streamToAppendOnly", (DynamicTable) streamTable, result) { - @Override - public void onUpdate(Update upstream) { - if (upstream.modified.nonempty() || upstream.shifted.nonempty()) { - throw new IllegalArgumentException( - "Stream tables should not modify or shift!"); - } - final long newRows = upstream.added.size(); - if (newRows == 0) { - return; + + + final Index index; + if (usePrev) { + try (final Index useIndex = baseStreamTable.getIndex().getPrevIndex()) { + index = Index.FACTORY.getFlatIndex(useIndex.size()); + ChunkUtils.copyData(sourceColumns, useIndex, destColumns, index, usePrev); } - final long currentSize = index.size(); - columns.values().forEach(c -> c.ensureCapacity(currentSize + newRows)); - - final Index newRange = Index.CURRENT_FACTORY - .getIndexByRange(currentSize, currentSize + newRows - 1); - - ChunkUtils.copyData(sourceColumns, upstream.added, destColumns, - newRange, false); - index.insertRange(currentSize, currentSize + newRows - 1); - - final Update downstream = new Update(); - downstream.added = newRange; - downstream.modified = Index.CURRENT_FACTORY.getEmptyIndex(); - downstream.removed = Index.CURRENT_FACTORY.getEmptyIndex(); - downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; - downstream.shifted = IndexShiftData.EMPTY; - result.notifyListeners(downstream); + } else { + index = Index.FACTORY.getFlatIndex(baseStreamTable.getIndex().size()); + ChunkUtils.copyData(sourceColumns, baseStreamTable.getIndex(), destColumns, index, usePrev); } - }, result); - return true; - }); + final QueryTable result = new QueryTable(index, columns); + result.setRefreshing(true); + result.setAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE, true); + result.setFlat(); + result.addParentReference(swapListener); + resultHolder.setValue(result); + + swapListener.setListenerAndResult(new BaseTable.ShiftAwareListenerImpl("streamToAppendOnly", + (DynamicTable) streamTable, result) { + @Override + public void onUpdate(Update upstream) { + if (upstream.modified.nonempty() || upstream.shifted.nonempty()) { + throw new IllegalArgumentException("Stream tables should not modify or shift!"); + } + final long newRows = upstream.added.size(); + if (newRows == 0) { + return; + } + final long currentSize = index.size(); + columns.values().forEach(c -> c.ensureCapacity(currentSize + newRows)); + + final Index newRange = + Index.CURRENT_FACTORY.getIndexByRange(currentSize, currentSize + newRows - 1); + + ChunkUtils.copyData(sourceColumns, upstream.added, destColumns, newRange, false); + index.insertRange(currentSize, currentSize + newRows - 1); + + final Update downstream = new Update(); + downstream.added = newRange; + downstream.modified = Index.CURRENT_FACTORY.getEmptyIndex(); + downstream.removed = Index.CURRENT_FACTORY.getEmptyIndex(); + downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; + downstream.shifted = IndexShiftData.EMPTY; + result.notifyListeners(downstream); + } + }, result); + + return true; + }); return resultHolder.getValue(); }); diff --git a/DB/src/main/java/io/deephaven/db/v2/SwapListener.java b/DB/src/main/java/io/deephaven/db/v2/SwapListener.java index 9ada3592a51..2f2163b8c3b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SwapListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/SwapListener.java @@ -10,24 +10,21 @@ public SwapListener(BaseTable sourceTable) { } @Override - public synchronized void onUpdate(final Index added, final Index removed, - final Index modified) { + public synchronized void onUpdate(final Index added, final Index removed, final Index modified) { // not a direct listener throw new UnsupportedOperationException(); } @Override public synchronized NotificationQueue.IndexUpdateNotification getNotification( - final Index added, final Index removed, final Index modified) { + final Index added, final Index removed, final Index modified) { return doGetNotification(() -> eventualListener.getNotification(added, removed, modified)); } @Override public void setInitialImage(Index initialImage) { - // we should never use an initialImage, because the swapListener listens to the table before - // we are confident - // that we'll get a good snapshot, and if we get a bad snapshot, it will never get updated - // appropriately + // we should never use an initialImage, because the swapListener listens to the table before we are confident + // that we'll get a good snapshot, and if we get a bad snapshot, it will never get updated appropriately throw new IllegalStateException(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/SwapListenerBase.java b/DB/src/main/java/io/deephaven/db/v2/SwapListenerBase.java index b33c34c2a58..5e108ad3735 100644 --- a/DB/src/main/java/io/deephaven/db/v2/SwapListenerBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/SwapListenerBase.java @@ -16,32 +16,29 @@ /** * Watch for ticks and when initialization is complete forward to the eventual listener. * - * The SwapListenerBase is attached to a table so that we can listen for updates during the LTM - * cycle; and if any updates occur, we'll be able to notice them and retry initialization. If no - * ticks were received before the result is ready, then we should forward all calls to our eventual - * listener. + * The SwapListenerBase is attached to a table so that we can listen for updates during the LTM cycle; and if any + * updates occur, we'll be able to notice them and retry initialization. If no ticks were received before the result is + * ready, then we should forward all calls to our eventual listener. * - * Callers should use our start and end functions. The start function is called at the beginning of - * a data snapshot; and allows us to setup our state variables. At the end of the snapshot attempt, - * end() is called; and if there were no clock changes, we were not gotNotification, and no - * notifications were enqueued; then we have a successful snapshot and can return true. We then set - * the currentListener, so that all future calls are forwarded to the listener. + * Callers should use our start and end functions. The start function is called at the beginning of a data snapshot; and + * allows us to setup our state variables. At the end of the snapshot attempt, end() is called; and if there were no + * clock changes, we were not gotNotification, and no notifications were enqueued; then we have a successful snapshot + * and can return true. We then set the currentListener, so that all future calls are forwarded to the listener. * - * Use either {@link SwapListener} or {@link ShiftAwareSwapListener} depending on which Listener - * interface you are using. + * Use either {@link SwapListener} or {@link ShiftAwareSwapListener} depending on which Listener interface you are + * using. */ -public abstract class SwapListenerBase extends LivenessArtifact - implements ListenerBase { +public abstract class SwapListenerBase extends LivenessArtifact implements ListenerBase { protected static final boolean DEBUG = - Configuration.getInstance().getBooleanWithDefault("SwapListener.debug", false); + Configuration.getInstance().getBooleanWithDefault("SwapListener.debug", false); static final boolean DEBUG_NOTIFICATIONS = - Configuration.getInstance().getBooleanWithDefault("SwapListener.debugNotifications", false); + Configuration.getInstance().getBooleanWithDefault("SwapListener.debugNotifications", false); private static final Logger log = LoggerFactory.getLogger(SwapListenerBase.class); /** - * The listener that will be called if this operation is successful. If we have a successful - * snapshot, then success is set to true. + * The listener that will be called if this operation is successful. If we have a successful snapshot, then success + * is set to true. */ T eventualListener; private NotificationStepReceiver eventualResult; @@ -65,10 +62,9 @@ public SwapListenerBase(final BaseTable sourceTable) { public ConstructSnapshot.SnapshotControl makeSnapshotControl() { // noinspection AutoBoxing return ConstructSnapshot.makeSnapshotControl( - this::start, - (final long currentClockValue, - final boolean usingPreviousValues) -> isInInitialNotificationWindow(), - (final long afterClockValue, final boolean usedPreviousValues) -> end(afterClockValue)); + this::start, + (final long currentClockValue, final boolean usingPreviousValues) -> isInInitialNotificationWindow(), + (final long afterClockValue, final boolean usedPreviousValues) -> end(afterClockValue)); } /** @@ -85,18 +81,18 @@ protected synchronized boolean start(final long clockCycle) { final boolean updating = LogicalClock.getState(clockCycle) == LogicalClock.State.Updating; if (DEBUG) { log.info().append("Swap Listener source=") - .append(System.identityHashCode(sourceTable)) - .append(" swap=") - .append(System.identityHashCode(this)) - .append(" start: ") - .append(currentStep) - .append(" ") - .append(LogicalClock.getState(clockCycle).toString()) - .append(", last=").append(lastNotificationStep) - .append(", updating=") - .append(updating) - .append(", updatedOnThisCycle=") - .append(updatedOnThisCycle).endl(); + .append(System.identityHashCode(sourceTable)) + .append(" swap=") + .append(System.identityHashCode(this)) + .append(" start: ") + .append(currentStep) + .append(" ") + .append(LogicalClock.getState(clockCycle).toString()) + .append(", last=").append(lastNotificationStep) + .append(", updating=") + .append(updating) + .append(", updatedOnThisCycle=") + .append(updatedOnThisCycle).endl(); } return updating && !updatedOnThisCycle; } @@ -106,8 +102,8 @@ protected synchronized boolean start(final long clockCycle) { * * @param clockCycle The {@link LogicalClock logical clock} cycle we are ending a snapshot on * @return true if the snapshot was successful, false if we should try again. - * @throws IllegalStateException If the snapshot was successful (consistent), but the snapshot - * function failed to set the eventual listener or eventual result + * @throws IllegalStateException If the snapshot was successful (consistent), but the snapshot function failed to + * set the eventual listener or eventual result */ protected synchronized boolean end(@SuppressWarnings("unused") final long clockCycle) { if (isInInitialNotificationWindow()) { @@ -124,13 +120,13 @@ protected synchronized boolean end(@SuppressWarnings("unused") final long clockC if (DEBUG) { log.info().append("Swap Listener ") - .append(System.identityHashCode(sourceTable)) - .append(" swap=") - .append(System.identityHashCode(this)) - .append(" End: success=") - .append(success) - .append(", last=") - .append(lastNotificationStep).endl(); + .append(System.identityHashCode(sourceTable)) + .append(" swap=") + .append(System.identityHashCode(this)) + .append(" End: success=") + .append(success) + .append(", last=") + .append(lastNotificationStep).endl(); } if (success) { @@ -142,14 +138,14 @@ protected synchronized boolean end(@SuppressWarnings("unused") final long clockC @Override public synchronized void onFailure( - final Throwable originalException, final UpdatePerformanceTracker.Entry sourceEntry) { + final Throwable originalException, final UpdatePerformanceTracker.Entry sourceEntry) { // not a direct listener throw new UnsupportedOperationException(); } @Override public synchronized NotificationQueue.Notification getErrorNotification( - final Throwable originalException, final UpdatePerformanceTracker.Entry sourceEntry) { + final Throwable originalException, final UpdatePerformanceTracker.Entry sourceEntry) { if (success && !isInInitialNotificationWindow()) { return eventualListener.getErrorNotification(originalException, sourceEntry); } else { @@ -169,21 +165,20 @@ boolean isInInitialNotificationWindow() { * @param resultTable The table that will result from this operation */ public synchronized void setListenerAndResult(@NotNull final T listener, - @NotNull final NotificationStepReceiver resultTable) { + @NotNull final NotificationStepReceiver resultTable) { eventualListener = listener; eventualResult = resultTable; if (DEBUG) { log.info().append("SwapListener source=") - .append(System.identityHashCode(sourceTable)) - .append(", swap=") - .append(System.identityHashCode(this)).append(", result=") - .append(System.identityHashCode(resultTable)).endl(); + .append(System.identityHashCode(sourceTable)) + .append(", swap=") + .append(System.identityHashCode(this)).append(", result=") + .append(System.identityHashCode(resultTable)).endl(); } } /** - * Invoke {@link QueryTable#listenForUpdates(Listener)} for the appropriate subclass of - * {@link SwapListenerBase}. + * Invoke {@link QueryTable#listenForUpdates(Listener)} for the appropriate subclass of {@link SwapListenerBase}. */ public abstract void subscribeForUpdates(); @@ -211,27 +206,27 @@ public boolean canExecute(final long step) { @Override public LogOutput append(final LogOutput logOutput) { return logOutput.append("Wrapped(ShiftAwareSwapListener=") - .append(System.identityHashCode(sourceTable)) - .append(" swap=") - .append(System.identityHashCode(SwapListenerBase.this)) - .append("){") - .append(notification) - .append("}"); + .append(System.identityHashCode(sourceTable)) + .append(" swap=") + .append(System.identityHashCode(SwapListenerBase.this)) + .append("){") + .append(notification) + .append("}"); } @Override public void run() { log.info().append("ShiftAwareSwapListener: Firing notification ") - .append(System.identityHashCode(sourceTable)) - .append(" swap=") - .append(System.identityHashCode(SwapListenerBase.this)) - .append(", clock=") - .append(LogicalClock.DEFAULT.currentStep()).endl(); + .append(System.identityHashCode(sourceTable)) + .append(" swap=") + .append(System.identityHashCode(SwapListenerBase.this)) + .append(", clock=") + .append(LogicalClock.DEFAULT.currentStep()).endl(); notification.run(); log.info().append("ShiftAwareSwapListener: Complete notification ") - .append(System.identityHashCode(sourceTable)) - .append(" swap=") - .append(System.identityHashCode(SwapListenerBase.this)).endl(); + .append(System.identityHashCode(sourceTable)) + .append(" swap=") + .append(System.identityHashCode(SwapListenerBase.this)).endl(); } }; } diff --git a/DB/src/main/java/io/deephaven/db/v2/TableKeyStateRegistry.java b/DB/src/main/java/io/deephaven/db/v2/TableKeyStateRegistry.java index 8fee5dcdbfe..8f192d2af99 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TableKeyStateRegistry.java +++ b/DB/src/main/java/io/deephaven/db/v2/TableKeyStateRegistry.java @@ -16,7 +16,7 @@ public class TableKeyStateRegistry { private final KeyedObjectHashMap> registeredTableMaps = - new KeyedObjectHashMap<>(StateKey.getInstance()); + new KeyedObjectHashMap<>(StateKey.getInstance()); /** * Get (or create if none exists) a value for the supplied {@link TableKey}. @@ -25,7 +25,7 @@ public class TableKeyStateRegistry { * @return The associated value */ public VALUE_TYPE computeIfAbsent(@NotNull final TableKey tableKey, - @NotNull final Function valueFactory) { + @NotNull final Function valueFactory) { return registeredTableMaps.putIfAbsent(tableKey, State::new, valueFactory).value; } @@ -43,15 +43,13 @@ private static class State { private final VALUE_TYPE value; - private State(@NotNull final TableKey key, - @NotNull final Function valueFactory) { + private State(@NotNull final TableKey key, @NotNull final Function valueFactory) { this.key = key.makeImmutable(); value = valueFactory.apply(key); } } - private static class StateKey - extends KeyedObjectKey.Basic> { + private static class StateKey extends KeyedObjectKey.Basic> { @SuppressWarnings("rawtypes") private static final StateKey INSTANCE = new StateKey(); diff --git a/DB/src/main/java/io/deephaven/db/v2/TableMap.java b/DB/src/main/java/io/deephaven/db/v2/TableMap.java index ffd59aa0481..a63877d5486 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TableMap.java +++ b/DB/src/main/java/io/deephaven/db/v2/TableMap.java @@ -31,8 +31,7 @@ static TableMap emptyMap() { } /** - * Check the key passed to the function in {@link #transformTablesWithKey(BiFunction)}, to see - * if it's the sentinel. + * Check the key passed to the function in {@link #transformTablesWithKey(BiFunction)}, to see if it's the sentinel. * * @param key the object * @return true if the key is the sentinel @@ -88,10 +87,9 @@ static boolean isSentinel(Object key) { int size(); /** - * When creating the table map, some of the keys that we would like to be there eventually may - * not exist. This call lets you pre-populate keys, so that at initialization time you can - * perform the appropriate joins, etc., on empty tables that you expect to be populated in the - * future. + * When creating the table map, some of the keys that we would like to be there eventually may not exist. This call + * lets you pre-populate keys, so that at initialization time you can perform the appropriate joins, etc., on empty + * tables that you expect to be populated in the future. * * @param keys the keys to add to the map * @return this TableMap @@ -134,9 +132,8 @@ static boolean isSentinel(Object key) { /** * Applies a function to this tableMap. * - * This is useful if you have a reference to a tableMap and want to run a series of operations - * against the table map without each individual operation resulting in a remote method - * invocation. + * This is useful if you have a reference to a tableMap and want to run a series of operations against the table map + * without each individual operation resulting in a remote method invocation. * * @param function the function to run, its single argument will be this table map. * @param the return type of function @@ -145,8 +142,8 @@ static boolean isSentinel(Object key) { R apply(Function.Unary function); /** - * Applies a transformation function on all tables in the TableMap, producing a new TableMap - * which will update as new keys are added. + * Applies a transformation function on all tables in the TableMap, producing a new TableMap which will update as + * new keys are added. * * @param function the function to apply to each table in this TableMap * @return a new TableMap where each table has had function applied @@ -156,26 +153,25 @@ default TableMap transformTables(java.util.function.Function funct } /** - * Applies a transformation function on all tables in the TableMap, producing a new TableMap - * which will update as new keys are added. + * Applies a transformation function on all tables in the TableMap, producing a new TableMap which will update as + * new keys are added. * * @param returnDefinition the table definition for the tables the function will return * @param function the function to apply to each table in this TableMap * @return a new TableMap where each table has had function applied */ default TableMap transformTables(TableDefinition returnDefinition, - java.util.function.Function function) { + java.util.function.Function function) { return transformTablesWithKey(returnDefinition, TableMapFunctionAdapter.of(function)); } /** - * Applies a transformation function on all tables in the TableMap, producing a new TableMap - * which will update as new keys are added. + * Applies a transformation function on all tables in the TableMap, producing a new TableMap which will update as + * new keys are added. *

    - * The function may be passed a sentinel key, which can be checked with - * {@link TableMap#isSentinel(Object)}. On the sentinel key, the function will be passed in an - * empty table, and is expected to return an empty table of the proper definition. To avoid this - * sentinel invocation, callers can be explicit and use + * The function may be passed a sentinel key, which can be checked with {@link TableMap#isSentinel(Object)}. On the + * sentinel key, the function will be passed in an empty table, and is expected to return an empty table of the + * proper definition. To avoid this sentinel invocation, callers can be explicit and use * {@link #transformTablesWithKey(TableDefinition, BiFunction)}. * * @param function the bifunction to apply to each table in this TableMap @@ -184,28 +180,27 @@ default TableMap transformTables(TableDefinition returnDefinition, TableMap transformTablesWithKey(java.util.function.BiFunction function); /** - * Applies a transformation function on all tables in the TableMap, producing a new TableMap - * which will update as new keys are added. + * Applies a transformation function on all tables in the TableMap, producing a new TableMap which will update as + * new keys are added. * * @param returnDefinition the table definition for the tables the function will return * @param function the bifunction to apply to each table in this TableMap * @return a new TableMap where each table has had function applied */ TableMap transformTablesWithKey(TableDefinition returnDefinition, - java.util.function.BiFunction function); + java.util.function.BiFunction function); /** - * Applies a BiFunction function on all tables in this TableMap and otherMap that have matching - * keys, producing a new TableMap which will update as new keys are added. Only applies the - * function to tables which exist in both maps. + * Applies a BiFunction function on all tables in this TableMap and otherMap that have matching keys, producing a + * new TableMap which will update as new keys are added. Only applies the function to tables which exist in both + * maps. * * @param otherMap the other TableMap - * @param function the function to apply to each table in this TableMap, the tables in this map - * are the first argument the tables in the other map are the second argument. + * @param function the function to apply to each table in this TableMap, the tables in this map are the first + * argument the tables in the other map are the second argument. * @return a new TableMap where each table has had function applied */ - TableMap transformTablesWithMap(TableMap otherMap, - java.util.function.BiFunction function); + TableMap transformTablesWithMap(TableMap otherMap, java.util.function.BiFunction function); /** * Table map change listener. diff --git a/DB/src/main/java/io/deephaven/db/v2/TableMapImpl.java b/DB/src/main/java/io/deephaven/db/v2/TableMapImpl.java index e5969b0a229..0b547f889e2 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TableMapImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/TableMapImpl.java @@ -21,8 +21,7 @@ public abstract class TableMapImpl extends LivenessArtifact implements TableMap, private final WeakReferenceManager keyListeners = new WeakReferenceManager<>(true); @ReferentialIntegrity - private final Collection parents = - new KeyedObjectHashSet<>(IdentityKeyedObjectKey.getInstance()); + private final Collection parents = new KeyedObjectHashSet<>(IdentityKeyedObjectKey.getInstance()); private boolean refreshing; @Override @@ -65,8 +64,7 @@ protected void notifyKeyListeners(Object key) { } /** - * Returns true if there are any {@link io.deephaven.db.v2.TableMap.Listener} for table - * additions. + * Returns true if there are any {@link io.deephaven.db.v2.TableMap.Listener} for table additions. * *

    * Note that this function returns false if there are only KeyListeners. diff --git a/DB/src/main/java/io/deephaven/db/v2/TableMapProxyHandler.java b/DB/src/main/java/io/deephaven/db/v2/TableMapProxyHandler.java index 08e0e9dce56..cbe1466048c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TableMapProxyHandler.java +++ b/DB/src/main/java/io/deephaven/db/v2/TableMapProxyHandler.java @@ -29,53 +29,45 @@ public class TableMapProxyHandler extends LivenessArtifact implements Invocation private static final Set AJ_METHOD_NAMES = new HashSet<>(); static { try { - HIJACKED_DELEGATIONS.put(Table.class.getMethod("size"), (proxy, method, - args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).size()); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("coalesce"), (proxy, method, - args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).coalesce()); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("getDefinition"), - (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) - .getDefinition()); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("size"), + (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).size()); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("coalesce"), + (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).coalesce()); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("getDefinition"), (proxy, method, + args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).getDefinition()); HIJACKED_DELEGATIONS.put( - TransformableTableMap.class.getMethod("asTable", boolean.class, boolean.class, - boolean.class), - (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) - .asTable((boolean) args[0], (boolean) args[1], (boolean) args[2])); - HIJACKED_DELEGATIONS.put(TransformableTableMap.class.getMethod("merge"), (proxy, method, - args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).merge()); + TransformableTableMap.class.getMethod("asTable", boolean.class, boolean.class, boolean.class), + (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) + .asTable((boolean) args[0], (boolean) args[1], (boolean) args[2])); + HIJACKED_DELEGATIONS.put(TransformableTableMap.class.getMethod("merge"), + (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).merge()); HIJACKED_DELEGATIONS.put(TransformableTableMap.class.getMethod("asTableMap"), - (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) - .asTableMap()); - HIJACKED_DELEGATIONS.put(TransformableTableMap.class.getMethod("asTableBuilder"), - (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) - .asTableBuilder()); + (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).asTableMap()); + HIJACKED_DELEGATIONS.put(TransformableTableMap.class.getMethod("asTableBuilder"), (proxy, method, + args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).asTableBuilder()); HIJACKED_DELEGATIONS.put(Object.class.getMethod("toString"), - (proxy, method, args) -> Proxy.getInvocationHandler(proxy).toString()); - - HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttribute", String.class), - (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) - .getAttribute((String) args[0])); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttributeNames"), - (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) - .getAttributeNames()); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("hasAttribute", String.class), - (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) - .hasAttribute((String) args[0])); + (proxy, method, args) -> Proxy.getInvocationHandler(proxy).toString()); + + HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttribute", String.class), (proxy, method, + args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).getAttribute((String) args[0])); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttributeNames"), (proxy, method, + args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).getAttributeNames()); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("hasAttribute", String.class), (proxy, method, + args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)).hasAttribute((String) args[0])); HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttributes"), - (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) - .getAttributes(true, Collections.emptySet())); + (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) + .getAttributes(true, Collections.emptySet())); // noinspection unchecked HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttributes", Collection.class), - (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) - .getAttributes(true, (Collection) args[0])); + (proxy, method, args) -> ((TableMapProxyHandler) Proxy.getInvocationHandler(proxy)) + .getAttributes(true, (Collection) args[0])); COALESCING_METHODS.add(Table.class.getMethod("getIndex")); COALESCING_METHODS.add(Table.class.getMethod("getColumnSource", String.class)); COALESCING_METHODS.add(Table.class.getMethod("getColumnSourceMap")); COALESCING_METHODS.add(Table.class.getMethod("getColumn", int.class)); COALESCING_METHODS.add(Table.class.getMethod("getColumn", String.class)); - COALESCING_METHODS - .add(Table.class.getMethod("setAttribute", String.class, Object.class)); + COALESCING_METHODS.add(Table.class.getMethod("setAttribute", String.class, Object.class)); JOIN_METHOD_NAMES.add("join"); JOIN_METHOD_NAMES.add("naturalJoin"); @@ -95,17 +87,17 @@ public class TableMapProxyHandler extends LivenessArtifact implements Invocation private Table coalesced; public static Table makeProxy(TableMap localTableMap, boolean strictKeys, boolean allowCoalesce, - boolean sanityCheckJoins) { + boolean sanityCheckJoins) { return (Table) Proxy.newProxyInstance(TableMapProxyHandler.class.getClassLoader(), - new Class[] {TableMapProxy.class}, - new TableMapProxyHandler(localTableMap, strictKeys, allowCoalesce, sanityCheckJoins)); + new Class[] {TableMapProxy.class}, + new TableMapProxyHandler(localTableMap, strictKeys, allowCoalesce, sanityCheckJoins)); } public interface TableMapProxy extends Table, TransformableTableMap { } - private TableMapProxyHandler(TableMap underlyingTableMap, boolean strictKeys, - boolean allowCoalesce, boolean sanityCheckJoins) { + private TableMapProxyHandler(TableMap underlyingTableMap, boolean strictKeys, boolean allowCoalesce, + boolean sanityCheckJoins) { this.underlyingTableMap = underlyingTableMap; this.strictKeys = strictKeys; this.allowCoalesce = allowCoalesce; @@ -126,8 +118,7 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl } if (method.getReturnType() != Table.class) { - throw new UnsupportedOperationException( - "Method is not supported by TableMapProxyHandler: " + method); + throw new UnsupportedOperationException("Method is not supported by TableMapProxyHandler: " + method); } final Class[] parameterTypes = method.getParameterTypes(); @@ -138,8 +129,7 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl for (int ii = 0; ii < parameterTypes.length; ii++) { if (Table.class.isAssignableFrom(parameterTypes[ii])) { if (tableArgument >= 0) { - throw new UnsupportedOperationException( - "Can not handle methods with multiple Table arguments!"); + throw new UnsupportedOperationException("Can not handle methods with multiple Table arguments!"); } tableArgument = ii; if (args[ii] instanceof TableMapProxy) { @@ -149,18 +139,16 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl } if (tableArgument < 0 || !isTableMapProxy) { - return QueryPerformanceRecorder.withNugget("TableMapProxyHandler-" + method.getName(), - () -> { - final TableMap resultMap = underlyingTableMap.transformTables(x -> { - try { - return (Table) method.invoke(x, args); - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException( - "Error invoking method on TableMapProxy: " + method, e); - } - }); - return makeProxy(resultMap, strictKeys, allowCoalesce, sanityCheckJoins); + return QueryPerformanceRecorder.withNugget("TableMapProxyHandler-" + method.getName(), () -> { + final TableMap resultMap = underlyingTableMap.transformTables(x -> { + try { + return (Table) method.invoke(x, args); + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException("Error invoking method on TableMapProxy: " + method, e); + } }); + return makeProxy(resultMap, strictKeys, allowCoalesce, sanityCheckJoins); + }); } @@ -170,125 +158,114 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl final List references = new ArrayList<>(); final int finalArgument = tableArgument; - return QueryPerformanceRecorder.withNugget("TableMapProxyHandler-" + method.getName(), - () -> { - if (strictKeys) { - final Set otherKeys = - new HashSet<>(Arrays.asList(otherMap.getKeySet())); - final Set ourKeys = - new HashSet<>(Arrays.asList(underlyingTableMap.getKeySet())); - - if (!otherKeys.containsAll(ourKeys) || !ourKeys.containsAll(otherKeys)) { - final Set tempOther = new HashSet<>(otherKeys); - tempOther.removeAll(ourKeys); - ourKeys.removeAll(otherKeys); - - throw new IllegalArgumentException( - "Strict keys is set, but key sets differ left missing=" - + ourKeys.toString() + ", right missing=" + tempOther.toString()); - } + return QueryPerformanceRecorder.withNugget("TableMapProxyHandler-" + method.getName(), () -> { + if (strictKeys) { + final Set otherKeys = new HashSet<>(Arrays.asList(otherMap.getKeySet())); + final Set ourKeys = new HashSet<>(Arrays.asList(underlyingTableMap.getKeySet())); - // if we are strict and a key is added, we know something has gone wrong; - // because they can't be added at exactly the same time - final TableMap.KeyListener enforceStrictListener = key -> { - throw new IllegalStateException( - "When operating on a TableMapProxy, keys may not be added after the initial merge() call when strictKeys is set, key=" - + key); - }; - underlyingTableMap.addKeyListener(enforceStrictListener); - otherMap.addKeyListener(enforceStrictListener); - references.add(enforceStrictListener); + if (!otherKeys.containsAll(ourKeys) || !ourKeys.containsAll(otherKeys)) { + final Set tempOther = new HashSet<>(otherKeys); + tempOther.removeAll(ourKeys); + ourKeys.removeAll(otherKeys); + + throw new IllegalArgumentException("Strict keys is set, but key sets differ left missing=" + + ourKeys.toString() + ", right missing=" + tempOther.toString()); } - if (sanityCheckJoins) { - int keyArgument = -1; - boolean skipLast = false; - if (JOIN_METHOD_NAMES.contains(method.getName())) { - // we need to figure out the keys, from our first argument that is after the - // right table - keyArgument = finalArgument + 1; - } else if (AJ_METHOD_NAMES.contains(method.getName())) { - keyArgument = finalArgument + 1; - // we don't use the last key, because it is not used for exact match - skipLast = true; - } + // if we are strict and a key is added, we know something has gone wrong; because they can't be added at + // exactly the same time + final TableMap.KeyListener enforceStrictListener = key -> { + throw new IllegalStateException( + "When operating on a TableMapProxy, keys may not be added after the initial merge() call when strictKeys is set, key=" + + key); + }; + underlyingTableMap.addKeyListener(enforceStrictListener); + otherMap.addKeyListener(enforceStrictListener); + references.add(enforceStrictListener); + } - if (keyArgument > 0) { - final List keyColumns = new ArrayList<>(); + if (sanityCheckJoins) { + int keyArgument = -1; + boolean skipLast = false; + if (JOIN_METHOD_NAMES.contains(method.getName())) { + // we need to figure out the keys, from our first argument that is after the right table + keyArgument = finalArgument + 1; + } else if (AJ_METHOD_NAMES.contains(method.getName())) { + keyArgument = finalArgument + 1; + // we don't use the last key, because it is not used for exact match + skipLast = true; + } - final Object keyValue = args[keyArgument]; - if (keyValue == null) { - throw new IllegalArgumentException( - "Join Keys Value is null for Join operation!"); - } + if (keyArgument > 0) { + final List keyColumns = new ArrayList<>(); - final Class keyClass = keyValue.getClass(); - if (Collection.class.isAssignableFrom(keyClass)) { - // we should have a collection of Strings - // noinspection unchecked - keyColumns.addAll(Arrays.asList( - MatchPairFactory.getExpressions((Collection) (keyValue)))); - } else if (String.class.isAssignableFrom(keyClass)) { - // we need to turn into MatchPairs - keyColumns.addAll(Arrays.asList(MatchPairFactory - .getExpressions(StringUtils.splitToCollection((String) keyValue)))); - } else if (MatchPair[].class.isAssignableFrom(keyClass)) { - keyColumns.addAll(Arrays.asList((MatchPair[]) keyValue)); - } + final Object keyValue = args[keyArgument]; + if (keyValue == null) { + throw new IllegalArgumentException("Join Keys Value is null for Join operation!"); + } - final String description = method.getName() + "(" + MatchPair.matchString( - keyColumns.toArray(new MatchPair[keyColumns.size()])) + ")"; + final Class keyClass = keyValue.getClass(); + if (Collection.class.isAssignableFrom(keyClass)) { + // we should have a collection of Strings + // noinspection unchecked + keyColumns.addAll( + Arrays.asList(MatchPairFactory.getExpressions((Collection) (keyValue)))); + } else if (String.class.isAssignableFrom(keyClass)) { + // we need to turn into MatchPairs + keyColumns.addAll(Arrays.asList( + MatchPairFactory.getExpressions(StringUtils.splitToCollection((String) keyValue)))); + } else if (MatchPair[].class.isAssignableFrom(keyClass)) { + keyColumns.addAll(Arrays.asList((MatchPair[]) keyValue)); + } - if (skipLast) { - keyColumns.remove(keyColumns.size() - 1); - } + final String description = method.getName() + "(" + + MatchPair.matchString(keyColumns.toArray(new MatchPair[keyColumns.size()])) + ")"; - final Map joinKeyToTableKey = new HashMap<>(); + if (skipLast) { + keyColumns.remove(keyColumns.size() - 1); + } - final String[] leftKeyNames = - keyColumns.stream().map(MatchPair::left).toArray(String[]::new); - final String[] rightKeyNames = - keyColumns.stream().map(MatchPair::right).toArray(String[]::new); + final Map joinKeyToTableKey = new HashMap<>(); - for (Object tableKey : underlyingTableMap.getKeySet()) { - final Table leftTable = underlyingTableMap.get(tableKey); - final Table rightTable = otherMap.get(tableKey); + final String[] leftKeyNames = keyColumns.stream().map(MatchPair::left).toArray(String[]::new); + final String[] rightKeyNames = keyColumns.stream().map(MatchPair::right).toArray(String[]::new); - final Table leftKeyTable = leftTable.selectDistinct(leftKeyNames); - references.add(verifyDisjointJoinKeys(description + " Left", - joinKeyToTableKey, tableKey, leftKeyNames, leftKeyTable)); + for (Object tableKey : underlyingTableMap.getKeySet()) { + final Table leftTable = underlyingTableMap.get(tableKey); + final Table rightTable = otherMap.get(tableKey); - final Table rightKeyTable = rightTable.selectDistinct(rightKeyNames); - references.add(verifyDisjointJoinKeys(description + " Right", - joinKeyToTableKey, tableKey, rightKeyNames, rightKeyTable)); - } + final Table leftKeyTable = leftTable.selectDistinct(leftKeyNames); + references.add(verifyDisjointJoinKeys(description + " Left", joinKeyToTableKey, tableKey, + leftKeyNames, leftKeyTable)); + + final Table rightKeyTable = rightTable.selectDistinct(rightKeyNames); + references.add(verifyDisjointJoinKeys(description + " Right", joinKeyToTableKey, tableKey, + rightKeyNames, rightKeyTable)); } } + } - final TableMap resultMap = - this.underlyingTableMap.transformTablesWithMap(otherMap, (x, y) -> { - final Object[] rewrittenArgs = Arrays.copyOf(args, args.length); - rewrittenArgs[indexOfProxy] = y; - try { - return (Table) method.invoke(x, rewrittenArgs); - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException( - "Error invoking method in TableMap: " + method, e); - } - }); + final TableMap resultMap = this.underlyingTableMap.transformTablesWithMap(otherMap, (x, y) -> { + final Object[] rewrittenArgs = Arrays.copyOf(args, args.length); + rewrittenArgs[indexOfProxy] = y; + try { + return (Table) method.invoke(x, rewrittenArgs); + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException("Error invoking method in TableMap: " + method, e); + } + }); - references.removeIf(Objects::isNull); - references.forEach(((TableMapImpl) resultMap)::addParentReference); + references.removeIf(Objects::isNull); + references.forEach(((TableMapImpl) resultMap)::addParentReference); - return makeProxy(resultMap, strictKeys, allowCoalesce, sanityCheckJoins); - }); + return makeProxy(resultMap, strictKeys, allowCoalesce, sanityCheckJoins); + }); } - private Object verifyDisjointJoinKeys(final String description, - final Map joinKeyToTableKey, final Object tableKey, final String[] keyNames, - final Table keyTable) { + private Object verifyDisjointJoinKeys(final String description, final Map joinKeyToTableKey, + final Object tableKey, final String[] keyNames, final Table keyTable) { final JoinSanityListener listener = - new JoinSanityListener(description, joinKeyToTableKey, tableKey, keyNames, keyTable); + new JoinSanityListener(description, joinKeyToTableKey, tableKey, keyNames, keyTable); listener.checkSanity(keyTable.getIndex()); if (((DynamicTable) keyTable).isRefreshing()) { @@ -315,8 +292,7 @@ private synchronized Table merge() { if (!Liveness.verifyCachedObjectForReuse(coalesced)) { coalesced = underlyingTableMap.merge(); - final Map consistentAttributes = - getAttributes(false, Collections.emptySet()); + final Map consistentAttributes = getAttributes(false, Collections.emptySet()); consistentAttributes.forEach(coalesced::setAttribute); } return coalesced; @@ -334,8 +310,8 @@ private Table asTable(boolean strictKeys, boolean allowCoalesce, boolean sanityC } private TransformableTableMap.AsTableBuilder asTableBuilder() { - return new TransformableTableMap.AsTableBuilder(underlyingTableMap) - .allowCoalesce(allowCoalesce).sanityCheckJoin(sanityCheckJoins).strictKeys(strictKeys); + return new TransformableTableMap.AsTableBuilder(underlyingTableMap).allowCoalesce(allowCoalesce) + .sanityCheckJoin(sanityCheckJoins).strictKeys(strictKeys); } @Override @@ -349,8 +325,7 @@ public TableDefinition getDefinition() { return it.next().getDefinition(); } // TODO: maybe the TableMap should actually know it's definition - throw new IllegalArgumentException( - "No tables exist in the table map, can not determine the definition."); + throw new IllegalArgumentException("No tables exist in the table map, can not determine the definition."); } private boolean hasAttribute(String name) { @@ -364,7 +339,7 @@ private boolean hasAttribute(String name) { final boolean hasAttribute = it.next().hasAttribute(name); if (hasAttribute != expected) { throw new IllegalArgumentException( - "Underlying tables do not have consistent presence for attribute " + name); + "Underlying tables do not have consistent presence for attribute " + name); } } return expected; @@ -381,7 +356,7 @@ private Object getAttribute(String name) { final Object thisAttribute = it.next().getAttribute(name); if (!(Objects.equals(thisAttribute, expected))) { throw new IllegalArgumentException( - "Underlying tables do not have consistent value for attribute " + name); + "Underlying tables do not have consistent value for attribute " + name); } } return expected; @@ -390,13 +365,12 @@ private Object getAttribute(String name) { /** * Get the common attributes for the merged table. * - * @param assertConsistency if true, throw an IllegalArgumentException if the attributes are not - * consistent; otherwise return only the attributes which are consistent + * @param assertConsistency if true, throw an IllegalArgumentException if the attributes are not consistent; + * otherwise return only the attributes which are consistent * @param excluded a set of attributes to exclude from the result * @return the set of common attributes for the merged table */ - private Map getAttributes(boolean assertConsistency, - Collection excluded) { + private Map getAttributes(boolean assertConsistency, Collection excluded) { final Collection

    underlyingTables = underlyingTableMap.values(); if (underlyingTables.isEmpty()) { return Collections.emptyMap(); @@ -409,14 +383,13 @@ private Map getAttributes(boolean assertConsistency, if (assertConsistency) { // if we have no consistency, we should bomb if (!theseAttributes.equals(expected)) { - throw new IllegalArgumentException( - "Underlying tables do not have consistent attributes."); + throw new IllegalArgumentException("Underlying tables do not have consistent attributes."); } } else { boolean expectedCopied = false; // make a set of consistent attributes - for (final Iterator> expectedIt = - expected.entrySet().iterator(); expectedIt.hasNext();) { + for (final Iterator> expectedIt = expected.entrySet().iterator(); expectedIt + .hasNext();) { final Map.Entry expectedEntry = expectedIt.next(); final Object expectedValue = expectedEntry.getValue(); final Object thisValue = theseAttributes.get(expectedEntry.getKey()); @@ -455,8 +428,7 @@ private Set getAttributeNames() { while (it.hasNext()) { final Set theseAttributes = it.next().getAttributeNames(); if (!theseAttributes.equals(expected)) { - throw new IllegalArgumentException( - "Underlying tables do not have consistent attribute sets."); + throw new IllegalArgumentException("Underlying tables do not have consistent attribute sets."); } } @@ -469,10 +441,9 @@ private static class JoinSanityListener extends InstrumentedShiftAwareListenerAd private final String description; private final Map joinKeyToTableKey; - private JoinSanityListener(String description, Map joinKeyToTableKey, - Object tableKey, String[] keyNames, Table keyTable) { - super("TableMapProxy JoinSanityListener-" + description, (DynamicTable) keyTable, - false); + private JoinSanityListener(String description, Map joinKeyToTableKey, Object tableKey, + String[] keyNames, Table keyTable) { + super("TableMapProxy JoinSanityListener-" + description, (DynamicTable) keyTable, false); this.description = description; this.joinKeyToTableKey = joinKeyToTableKey; keyColumns = keyTable.getColumnSources().toArray(new ColumnSource[keyNames.length]); @@ -493,8 +464,7 @@ private void checkSanity(Index index) { final Object existing = joinKeyToTableKey.putIfAbsent(joinKey, tableKey); if (existing != null && !Objects.equals(existing, tableKey)) { throw new IllegalArgumentException(description + " join key \"" + joinKey - + "\" exists in multiple TableMap keys, \"" + existing + "\" and \"" - + tableKey + "\""); + + "\" exists in multiple TableMap keys, \"" + existing + "\" and \"" + tableKey + "\""); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/TableMapSupplier.java b/DB/src/main/java/io/deephaven/db/v2/TableMapSupplier.java index 9ffc8716cdc..43843ada54e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TableMapSupplier.java +++ b/DB/src/main/java/io/deephaven/db/v2/TableMapSupplier.java @@ -27,8 +27,7 @@ public class TableMapSupplier implements TableMap { // This list of functions is for table map transformations private final List functions; - private final WeakReferenceManager internalListeners = - new WeakReferenceManager<>(true); + private final WeakReferenceManager internalListeners = new WeakReferenceManager<>(true); @ReferentialIntegrity private final Listener internalListener = (key, table) -> { @@ -42,18 +41,21 @@ public class TableMapSupplier implements TableMap { }); }; - public TableMapSupplier(TableMap sourceMap, - List> functions) { + public TableMapSupplier(TableMap sourceMap, List> functions) { this.sourceMap = sourceMap; this.functions = functions.stream() - .map(TableMapFunctionAdapter::of) - .map(f -> new TransformTablesFunction(null, f)) - .collect(Collectors.toCollection(ArrayList::new)); + .map(TableMapFunctionAdapter::of) + .map(f -> new TransformTablesFunction(null, f)) + .collect(Collectors.toCollection(ArrayList::new)); sourceMap.addListener(internalListener); } - private TableMapSupplier(TableMap sourceMap, List functions, - boolean sentinel) { // sentinel forces new type-signature for constructor + private TableMapSupplier(TableMap sourceMap, List functions, boolean sentinel) { // sentinel + // forces + // new + // type-signature + // for + // constructor this.sourceMap = sourceMap; this.functions = new ArrayList<>(functions); sourceMap.addListener(internalListener); @@ -105,8 +107,7 @@ public Collection> entrySet() { @Override public Collection
    values() { - return sourceMap.entrySet().stream().map(this::applyOperations) - .collect(Collectors.toList()); + return sourceMap.entrySet().stream().map(this::applyOperations).collect(Collectors.toList()); } @Override @@ -158,17 +159,15 @@ public TableMap transformTablesWithKey(BiFunction function @Override public TableMap transformTablesWithKey(TableDefinition returnDefinition, - BiFunction function) { + BiFunction function) { final TableMapSupplier copy = new TableMapSupplier(sourceMap, functions, true); copy.functions.add(new TransformTablesFunction(returnDefinition, function)); return copy; } @Override - public TableMap transformTablesWithMap(TableMap otherMap, - BiFunction function) { - throw new UnsupportedOperationException( - "TableSupplierMap does not support transformTablesWithMap"); + public TableMap transformTablesWithMap(TableMap otherMap, BiFunction function) { + throw new UnsupportedOperationException("TableSupplierMap does not support transformTablesWithMap"); } @Override @@ -193,15 +192,13 @@ public WeakReference getWeakReference() { @Override public Table merge() { - // note: this is different than the previous logic - we are doing are operations on the - // inner tables first + // note: this is different than the previous logic - we are doing are operations on the inner tables first return applyFunctionsToSourceMap().merge(); } @Override public Table asTable(boolean strictKeys, boolean allowCoalesce, boolean sanityCheckJoins) { - // note: this is different than the previous logic - we are doing are operations on the - // inner tables first + // note: this is different than the previous logic - we are doing are operations on the inner tables first return applyFunctionsToSourceMap().asTable(strictKeys, allowCoalesce, sanityCheckJoins); } diff --git a/DB/src/main/java/io/deephaven/db/v2/TableMapTransformThreadPool.java b/DB/src/main/java/io/deephaven/db/v2/TableMapTransformThreadPool.java index 6a1f05a1724..3d68267e421 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TableMapTransformThreadPool.java +++ b/DB/src/main/java/io/deephaven/db/v2/TableMapTransformThreadPool.java @@ -8,13 +8,13 @@ public class TableMapTransformThreadPool { final static int TRANSFORM_THREADS = - Configuration.getInstance().getIntegerWithDefault("TableMap.transformThreads", 1); + Configuration.getInstance().getIntegerWithDefault("TableMap.transformThreads", 1); final static ExecutorService executorService; static { final ThreadGroup threadGroup = new ThreadGroup("TableMapTransformThreadPool"); - final NamingThreadFactory threadFactory = new NamingThreadFactory(threadGroup, - TableMapProxyHandler.class, "transformExecutor", true); + final NamingThreadFactory threadFactory = + new NamingThreadFactory(threadGroup, TableMapProxyHandler.class, "transformExecutor", true); executorService = Executors.newFixedThreadPool(TRANSFORM_THREADS, threadFactory); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/TableSupplier.java b/DB/src/main/java/io/deephaven/db/v2/TableSupplier.java index 921933f65b0..369a69ef180 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TableSupplier.java +++ b/DB/src/main/java/io/deephaven/db/v2/TableSupplier.java @@ -27,8 +27,8 @@ import org.jetbrains.annotations.NotNull; /** - * TableSupplier creates a Proxy to a Table with a list of Table operations to be applied when a - * filter method is called.
    + * TableSupplier creates a Proxy to a Table with a list of Table operations to be applied when a filter method is + * called.
    */ @ScriptApi public class TableSupplier extends LivenessArtifact implements InvocationHandler { @@ -37,38 +37,32 @@ public class TableSupplier extends LivenessArtifact implements InvocationHandler private static final Map HIJACKED_DELEGATIONS = new HashMap<>(); static { try { - HIJACKED_DELEGATIONS.put(Table.class.getMethod("coalesce"), (proxy, method, - args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).coalesce()); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("coalesce"), + (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).coalesce()); HIJACKED_DELEGATIONS.put(Table.class.getMethod("hasColumns", Collection.class), - (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) - .hasColumns((Collection) args[0])); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("hasColumns", String[].class), - (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) - .hasColumns((String[]) args[0])); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("byExternal", String[].class), - (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) - .byExternal((String[]) args[0])); - HIJACKED_DELEGATIONS.put( - Table.class.getMethod("byExternal", boolean.class, String[].class), - (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) - .byExternal((Boolean) args[0], (String[]) args[1])); + (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) + .hasColumns((Collection) args[0])); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("hasColumns", String[].class), (proxy, method, + args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).hasColumns((String[]) args[0])); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("byExternal", String[].class), (proxy, method, + args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).byExternal((String[]) args[0])); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("byExternal", boolean.class, String[].class), + (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) + .byExternal((Boolean) args[0], (String[]) args[1])); HIJACKED_DELEGATIONS.put(Table.class.getMethod("apply", Function.Unary.class), - (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) - .apply((Function.Unary) args[0], (Table) proxy)); - HIJACKED_DELEGATIONS.put( - Table.class.getMethod("setAttribute", String.class, Object.class), - (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) - .setAttribute((String) args[0], args[1])); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttribute", String.class), - (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) - .getAttribute((String) args[0])); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttributeNames"), (proxy, method, - args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).getAttributeNames()); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("hasAttribute", String.class), - (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) - .hasAttribute((String) args[0])); - HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttributes"), (proxy, method, - args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).getAttributes()); + (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) + .apply((Function.Unary) args[0], (Table) proxy)); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("setAttribute", String.class, Object.class), + (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)) + .setAttribute((String) args[0], args[1])); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttribute", String.class), (proxy, method, + args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).getAttribute((String) args[0])); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttributeNames"), + (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).getAttributeNames()); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("hasAttribute", String.class), (proxy, method, + args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).hasAttribute((String) args[0])); + HIJACKED_DELEGATIONS.put(Table.class.getMethod("getAttributes"), + (proxy, method, args) -> ((TableSupplier) Proxy.getInvocationHandler(proxy)).getAttributes()); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } @@ -121,16 +115,14 @@ private static boolean isFilterOperation(Method method) { */ @ScriptApi public static Table build(Table sourceTable) { - return (Table) Proxy.newProxyInstance(TableSupplier.class.getClassLoader(), - PROXY_INTERFACES, - new TableSupplier(sourceTable, TableTools.newTable(sourceTable.getDefinition()), - Collections.emptyList(), false)); + return (Table) Proxy.newProxyInstance(TableSupplier.class.getClassLoader(), PROXY_INTERFACES, + new TableSupplier(sourceTable, TableTools.newTable(sourceTable.getDefinition()), + Collections.emptyList(), false)); } /** - * Sets a Table Supplier to be complete. This means that the supplier will generate a table the - * next time a filter operation is called. This method has no affect on Tables that are not - * suppliers. + * Sets a Table Supplier to be complete. This means that the supplier will generate a table the next time a filter + * operation is called. This method has no affect on Tables that are not suppliers. * * @param maybeSupplier a Table that may be a supplier * @return a completed Supplier or unaltered Table @@ -141,8 +133,8 @@ public static Table complete(Table maybeSupplier) { } /** - * Gets an empty version of the supplied table with all current operations applied to it. If the - * Table is not a Table Supplier then this will return the table unaltered. + * Gets an empty version of the supplied table with all current operations applied to it. If the Table is not a + * Table Supplier then this will return the table unaltered. * * @param maybeSupplier a Table that may be a supplier * @return an applied empty table or an unaltered table @@ -153,7 +145,7 @@ public static Table getAppliedEmptyTable(Table maybeSupplier) { } private static Table callTableSupplierMethod(Table maybeSupplier, - java.util.function.Function method) { + java.util.function.Function method) { if (maybeSupplier == null) { return null; } @@ -169,8 +161,8 @@ private static Table callTableSupplierMethod(Table maybeSupplier, } } - private TableSupplier(Table sourceTable, Table appliedEmptyTable, - List tableOperations, boolean isComplete) { + private TableSupplier(Table sourceTable, Table appliedEmptyTable, List tableOperations, + boolean isComplete) { this.sourceTable = sourceTable; this.appliedEmptyTable = appliedEmptyTable; // This is intended to be a copy @@ -181,10 +173,8 @@ private TableSupplier(Table sourceTable, Table appliedEmptyTable, private Table complete() { log.info().append("TableSupplier setting complete").endl(); - final TableSupplier copy = - new TableSupplier(sourceTable, appliedEmptyTable, tableOperations, true); - return (Table) Proxy.newProxyInstance(TableSupplier.class.getClassLoader(), - PROXY_INTERFACES, copy); + final TableSupplier copy = new TableSupplier(sourceTable, appliedEmptyTable, tableOperations, true); + return (Table) Proxy.newProxyInstance(TableSupplier.class.getClassLoader(), PROXY_INTERFACES, copy); } private Table getAppliedEmptyTable() { @@ -207,19 +197,17 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl // All TableMap operations should be hijacked if (TableMap.class.isAssignableFrom(method.getReturnType())) { throw new IllegalStateException( - "TableSupplier byExternal methods should be hijacked but invoked " - + method.getName()); + "TableSupplier byExternal methods should be hijacked but invoked " + method.getName()); } - log.info().append("TableSupplier invoking on applied empty table ").append(method.getName()) - .endl(); + log.info().append("TableSupplier invoking on applied empty table ").append(method.getName()).endl(); // Let the source table handle everything else return method.invoke(appliedEmptyTable, args); } private Table deferOrExecute(Method method, Object[] args) - throws InvocationTargetException, IllegalAccessException { + throws InvocationTargetException, IllegalAccessException { if (isComplete && isFilterOperation(method)) { return execute(method, args); } else { @@ -227,15 +215,13 @@ private Table deferOrExecute(Method method, Object[] args) } } - private Table defer(Method method, Object[] args) - throws InvocationTargetException, IllegalAccessException { + private Table defer(Method method, Object[] args) throws InvocationTargetException, IllegalAccessException { log.info().append("TableSupplier defer ").append(method.getName()).endl(); // Defer the table operation by adding to a copy of this table - final TableSupplier copy = new TableSupplier(sourceTable, - (Table) method.invoke(appliedEmptyTable, args), tableOperations, isComplete); + final TableSupplier copy = new TableSupplier(sourceTable, (Table) method.invoke(appliedEmptyTable, args), + tableOperations, isComplete); copy.tableOperations.add(new Operation(method, args)); - return (Table) Proxy.newProxyInstance(TableSupplier.class.getClassLoader(), - PROXY_INTERFACES, copy); + return (Table) Proxy.newProxyInstance(TableSupplier.class.getClassLoader(), PROXY_INTERFACES, copy); } private Table execute(Method method, Object[] args) { @@ -249,8 +235,7 @@ private Table execute(Method method, Object[] args) { } } - private Table applyOperations(Table table) - throws IllegalAccessException, InvocationTargetException { + private Table applyOperations(Table table) throws IllegalAccessException, InvocationTargetException { for (Operation operation : tableOperations) { table = (Table) operation.method.invoke(table, operation.args); } @@ -260,8 +245,8 @@ private Table applyOperations(Table table) // region Hijacked Operations /** - * Coalesce will apply all of the table operations at any point in the suppliers construction. - * The supplier need not be complete nor does coalesce require a filter operation. + * Coalesce will apply all of the table operations at any point in the suppliers construction. The supplier need not + * be complete nor does coalesce require a filter operation. * * @return a coalesced Table from the supplier */ @@ -275,13 +260,12 @@ private Table coalesce() { } /** - * This hasColumns implementation is intentionally permissive. It returns true if the table - * supplier a column prior to applying operations or after applying operations. This allows - * various one click implementations to succeed when they check for columns. + * This hasColumns implementation is intentionally permissive. It returns true if the table supplier a column prior + * to applying operations or after applying operations. This allows various one click implementations to succeed + * when they check for columns. * * @param columnNames the column names to check - * @return true if the table supplier has each column either before or after operations, false - * otherwise + * @return true if the table supplier has each column either before or after operations, false otherwise */ private boolean hasColumns(Collection columnNames) { // Check that either the "before table" or "after table" has the column @@ -303,7 +287,7 @@ private boolean hasColumns(final String... columnNames) { private TableMap byExternal(boolean dropKeys, String... columnNames) { return new TableMapSupplier(sourceTable.byExternal(dropKeys, columnNames), tableOperations, - Collections.emptyList()); + Collections.emptyList()); } private TableMap byExternal(String... columnNames) { @@ -366,8 +350,7 @@ private static class TableMapSupplier implements TableMap { // This list of functions is for table map transformations private final List functions; - TableMapSupplier(TableMap sourceMap, List tableOperations, - List functions) { + TableMapSupplier(TableMap sourceMap, List tableOperations, List functions) { this.sourceMap = sourceMap; this.tableOperations = new ArrayList<>(tableOperations); this.functions = new ArrayList<>(functions); @@ -383,7 +366,7 @@ public Table get(Object key) { } private Table applyOperations(Object key, Table table) - throws IllegalAccessException, InvocationTargetException { + throws IllegalAccessException, InvocationTargetException { // Apply operations from the supplier for (Operation operation : tableOperations) { table = (Table) operation.method.invoke(table, operation.args); @@ -398,8 +381,7 @@ private Table applyOperations(Object key, Table table) } @Override - public Table getWithTransform(Object key, - java.util.function.Function transform) { + public Table getWithTransform(Object key, java.util.function.Function transform) { return transform.apply(get(key)); } @@ -460,26 +442,22 @@ public R apply(Function.Unary function) { @Override public TableMap transformTablesWithKey(BiFunction function) { - final TableMapSupplier copy = - new TableMapSupplier(sourceMap, tableOperations, functions); + final TableMapSupplier copy = new TableMapSupplier(sourceMap, tableOperations, functions); copy.functions.add(new TransformTablesFunction(function)); return copy; } @Override public TableMap transformTablesWithKey(TableDefinition returnDefinition, - BiFunction function) { - final TableMapSupplier copy = - new TableMapSupplier(sourceMap, tableOperations, functions); + BiFunction function) { + final TableMapSupplier copy = new TableMapSupplier(sourceMap, tableOperations, functions); copy.functions.add(new TransformTablesFunction(returnDefinition, function)); return copy; } @Override - public TableMap transformTablesWithMap(TableMap otherMap, - BiFunction function) { - throw new UnsupportedOperationException( - "TableSupplierMap does not support transformTablesWithMap"); + public TableMap transformTablesWithMap(TableMap otherMap, BiFunction function) { + throw new UnsupportedOperationException("TableSupplierMap does not support transformTablesWithMap"); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/TableUpdateValidator.java b/DB/src/main/java/io/deephaven/db/v2/TableUpdateValidator.java index 7448175c17d..6a30871d255 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TableUpdateValidator.java +++ b/DB/src/main/java/io/deephaven/db/v2/TableUpdateValidator.java @@ -24,24 +24,20 @@ public class TableUpdateValidator implements QueryTable.Operation { private static final boolean useSharedContext = Configuration.getInstance() - .getBooleanForClassWithDefault(TableUpdateValidator.class, "useSharedContext", true); - private static final boolean aggressiveUpdateValidation = - Configuration.getInstance().getBooleanForClassWithDefault(TableUpdateValidator.class, - "aggressiveUpdateValidation", false); + .getBooleanForClassWithDefault(TableUpdateValidator.class, "useSharedContext", true); + private static final boolean aggressiveUpdateValidation = Configuration.getInstance() + .getBooleanForClassWithDefault(TableUpdateValidator.class, "aggressiveUpdateValidation", false); private static final int CHUNK_SIZE = 4096; public static TableUpdateValidator make(final QueryTable tableToValidate) { return make(null, tableToValidate); } - public static TableUpdateValidator make(final String description, - final QueryTable tableToValidate) { + public static TableUpdateValidator make(final String description, final QueryTable tableToValidate) { if (!tableToValidate.isRefreshing()) { - throw new IllegalArgumentException( - "Validator has nothing to validate if input table is not refreshing."); + throw new IllegalArgumentException("Validator has nothing to validate if input table is not refreshing."); } - final TableUpdateValidator validator = - new TableUpdateValidator(description, tableToValidate); + final TableUpdateValidator validator = new TableUpdateValidator(description, tableToValidate); tableToValidate.getResult(validator); return validator; } @@ -59,15 +55,13 @@ private TableUpdateValidator(final String description, final DynamicTable tableT this.description = description == null ? tableToValidate.getDescription() : description; this.tableToValidate = tableToValidate; this.validationMCS = tableToValidate.newModifiedColumnSet( - tableToValidate.getColumnSourceMap().keySet() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + tableToValidate.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); Assert.neq(validationMCS, "validationMCS", ModifiedColumnSet.ALL, "ModifiedColumnSet.ALL"); - Assert.neq(validationMCS, "validationMCS", ModifiedColumnSet.EMPTY, - "ModifiedColumnSet.EMPTY"); + Assert.neq(validationMCS, "validationMCS", ModifiedColumnSet.EMPTY, "ModifiedColumnSet.EMPTY"); columnInfos = tableToValidate.getColumnSourceMap().keySet().stream() - .map((name) -> new ColumnInfo(tableToValidate, name)) - .toArray(ColumnInfo[]::new); + .map((name) -> new ColumnInfo(tableToValidate, name)) + .toArray(ColumnInfo[]::new); } private SafeCloseable maybeOpenSharedContext() { @@ -101,19 +95,17 @@ public String getLogPrefix() { @Override public Result initialize(boolean usePrev, long beforeClock) { - trackingIndex = usePrev ? tableToValidate.getIndex().getPrevIndex() - : tableToValidate.getIndex().clone(); + trackingIndex = usePrev ? tableToValidate.getIndex().getPrevIndex() : tableToValidate.getIndex().clone(); resultTable = new QueryTable(Index.FACTORY.getEmptyIndex(), Collections.emptyMap()); resultTable.setFlat(); final ShiftAwareListener listener; try (final SafeCloseable ignored1 = maybeOpenSharedContext(); - final SafeCloseable ignored2 = new SafeCloseableList(columnInfos)) { + final SafeCloseable ignored2 = new SafeCloseableList(columnInfos)) { updateValues(ModifiedColumnSet.ALL, trackingIndex, usePrev); - listener = new BaseTable.ShiftAwareListenerImpl(getDescription(), tableToValidate, - resultTable) { + listener = new BaseTable.ShiftAwareListenerImpl(getDescription(), tableToValidate, resultTable) { @Override public void onUpdate(final Update upstream) { TableUpdateValidator.this.onUpdate(upstream); @@ -129,20 +121,18 @@ public QueryTable getResultTable() { } public void validate() { - Assert.equals(trackingIndex, "trackingIndex", tableToValidate.getIndex(), - "tableToValidate.getIndex()"); + Assert.equals(trackingIndex, "trackingIndex", tableToValidate.getIndex(), "tableToValidate.getIndex()"); } public void deepValidation() { try (final SafeCloseable ignored1 = maybeOpenSharedContext(); - final SafeCloseable ignored2 = new SafeCloseableList(columnInfos)) { + final SafeCloseable ignored2 = new SafeCloseableList(columnInfos)) { validate(); - validateValues("EndOfTickValidation", ModifiedColumnSet.ALL, trackingIndex, false, - false); + validateValues("EndOfTickValidation", ModifiedColumnSet.ALL, trackingIndex, false, false); if (!issues.isEmpty()) { - final StringBuilder result = new StringBuilder( - "Table to validate " + getDescription() + " has inconsistent state:"); + final StringBuilder result = + new StringBuilder("Table to validate " + getDescription() + " has inconsistent state:"); for (final String issue : issues) { result.append("\n - ").append(issue); } @@ -158,25 +148,23 @@ private void onUpdate(final ShiftAwareListener.Update upstream) { } try (final SafeCloseable ignored1 = maybeOpenSharedContext(); - final SafeCloseable ignored2 = new SafeCloseableList(columnInfos)) { + final SafeCloseable ignored2 = new SafeCloseableList(columnInfos)) { if (!upstream.modifiedColumnSet.isCompatibleWith(validationMCS)) { noteIssue( - () -> "upstream.modifiedColumnSet is not compatible with table.newModifiedColumnSet(...): upstream=" - + upstream.modifiedColumnSet + " initialized=" + validationMCS); + () -> "upstream.modifiedColumnSet is not compatible with table.newModifiedColumnSet(...): upstream=" + + upstream.modifiedColumnSet + " initialized=" + validationMCS); } // remove if (aggressiveUpdateValidation) { validateValues("pre-update", ModifiedColumnSet.ALL, trackingIndex, true, false); } else { - validateValues("pre-update removed", ModifiedColumnSet.ALL, upstream.removed, true, - false); - validateValues("pre-update modified", upstream.modifiedColumnSet, - upstream.getModifiedPreShift(), true, false); + validateValues("pre-update removed", ModifiedColumnSet.ALL, upstream.removed, true, false); + validateValues("pre-update modified", upstream.modifiedColumnSet, upstream.getModifiedPreShift(), true, + false); } - validateIndexesEqual("pre-update index", trackingIndex, - tableToValidate.getIndex().getPrevIndex()); + validateIndexesEqual("pre-update index", trackingIndex, tableToValidate.getIndex().getPrevIndex()); trackingIndex.remove(upstream.removed); Arrays.stream(columnInfos).forEach((ci) -> ci.remove(upstream.removed)); @@ -186,16 +174,15 @@ private void onUpdate(final ShiftAwareListener.Update upstream) { if (aggressiveUpdateValidation) { final Index unmodified = trackingIndex.minus(upstream.modified); - validateValues("post-shift unmodified", ModifiedColumnSet.ALL, unmodified, false, - false); - validateValues("post-shift unmodified columns", upstream.modifiedColumnSet, - upstream.modified, false, true); + validateValues("post-shift unmodified", ModifiedColumnSet.ALL, unmodified, false, false); + validateValues("post-shift unmodified columns", upstream.modifiedColumnSet, upstream.modified, false, + true); } // added if (trackingIndex.overlaps(upstream.added)) { noteIssue(() -> "post-shift index contains rows that are added: " - + trackingIndex.intersect(upstream.added)); + + trackingIndex.intersect(upstream.added)); } trackingIndex.insert(upstream.added); validateIndexesEqual("post-update index", trackingIndex, tableToValidate.getIndex()); @@ -205,8 +192,8 @@ private void onUpdate(final ShiftAwareListener.Update upstream) { updateValues(upstream.modifiedColumnSet, upstream.modified, false); if (!issues.isEmpty()) { - StringBuilder result = new StringBuilder( - "Table to validate " + getDescription() + " generated an erroneous update:"); + StringBuilder result = + new StringBuilder("Table to validate " + getDescription() + " generated an erroneous update:"); for (String issue : issues) { result.append("\n - ").append(issue); } @@ -231,8 +218,8 @@ private void validateIndexesEqual(final String what, final Index expected, final } } - // TODO: Should this string array actually just be the table output? with columns like - // 'expected', 'actual', 'row', 'cycle', etc? + // TODO: Should this string array actually just be the table output? with columns like 'expected', 'actual', 'row', + // 'cycle', etc? private final int MAX_ISSUES = 10; private final ArrayList issues = new ArrayList<>(); @@ -242,9 +229,8 @@ private void noteIssue(Supplier issue) { } } - private void validateValues(final String what, final ModifiedColumnSet columnsToCheck, - final Index toValidate, - final boolean usePrev, final boolean invertMCS) { + private void validateValues(final String what, final ModifiedColumnSet columnsToCheck, final Index toValidate, + final boolean usePrev, final boolean invertMCS) { try (final OrderedKeys.Iterator it = toValidate.getOrderedKeysIterator()) { while (it.hasMore()) { final OrderedKeys subKeys = it.getNextOrderedKeysWithLength(CHUNK_SIZE); @@ -258,8 +244,7 @@ private void validateValues(final String what, final ModifiedColumnSet columnsTo } } - private void updateValues(final ModifiedColumnSet columnsToUpdate, final Index toUpdate, - final boolean usePrev) { + private void updateValues(final ModifiedColumnSet columnsToUpdate, final Index toUpdate, final boolean usePrev) { try (final OrderedKeys.Iterator it = toUpdate.getOrderedKeysIterator()) { while (it.hasMore()) { final OrderedKeys subKeys = it.getNextOrderedKeysWithLength(CHUNK_SIZE); @@ -274,8 +259,8 @@ private void updateValues(final ModifiedColumnSet columnsToUpdate, final Index t } /** - * Some things won't last forever, like a DbArray that is really a column wrapper. We need to - * turn those into something that will persist properly until the next clock cycle. + * Some things won't last forever, like a DbArray that is really a column wrapper. We need to turn those into + * something that will persist properly until the next clock cycle. * * @param fromSource * @return a version of fromSource that does not reference ephemeral stuff. @@ -336,8 +321,8 @@ private ColumnInfo(DynamicTable tableToValidate, String columnName) { this.source = tableToValidate.getColumnSource(columnName); this.isPrimitive = source.getType().isPrimitive(); - this.expectedSource = SparseArrayColumnSource - .getSparseMemoryColumnSource(source.getType(), source.getComponentType()); + this.expectedSource = + SparseArrayColumnSource.getSparseMemoryColumnSource(source.getType(), source.getComponentType()); this.chunkEquals = ChunkEquals.makeEqual(source.getChunkType()); } @@ -351,16 +336,14 @@ private ColumnSource.GetContext sourceGetContext() { private ColumnSource.FillContext sourceFillContext() { if (sourceFillContext == null) { - sourceFillContext = - isPrimitive ? null : this.source.makeFillContext(CHUNK_SIZE, sharedContext); + sourceFillContext = isPrimitive ? null : this.source.makeFillContext(CHUNK_SIZE, sharedContext); } return sourceFillContext; } private WritableObjectChunk sourceFillChunk() { if (sourceFillChunk == null) { - sourceFillChunk = - isPrimitive ? null : WritableObjectChunk.makeWritableChunk(CHUNK_SIZE); + sourceFillChunk = isPrimitive ? null : WritableObjectChunk.makeWritableChunk(CHUNK_SIZE); } return sourceFillChunk; } @@ -397,8 +380,7 @@ public void remove(final Index toRemove) { private void updateValues(final OrderedKeys toUpdate, final boolean usePrev) { if (isPrimitive) { - expectedSource.fillFromChunk(expectedFillFromContext(), - getSourceChunk(toUpdate, usePrev), toUpdate); + expectedSource.fillFromChunk(expectedFillFromContext(), getSourceChunk(toUpdate, usePrev), toUpdate); return; } @@ -420,11 +402,10 @@ private void updateValues(final OrderedKeys toUpdate, final boolean usePrev) { expectedSource.fillFromChunk(expectedFillFromContext(), sourceFillChunk(), toUpdate); } - public void validateValues(final String what, final OrderedKeys toValidate, - final boolean usePrev) { + public void validateValues(final String what, final OrderedKeys toValidate, final boolean usePrev) { Assert.leq(toValidate.size(), "toValidate.size()", CHUNK_SIZE, "CHUNK_SIZE"); final Chunk expected = - expectedSource.getChunk(expectedGetContext(), toValidate); + expectedSource.getChunk(expectedGetContext(), toValidate); final Chunk actual = getSourceChunk(toValidate, usePrev); chunkEquals.equal(expected, actual, equalValuesDest()); MutableInt off = new MutableInt(); @@ -437,22 +418,21 @@ public void validateValues(final String what, final OrderedKeys toValidate, noteIssue(() -> { Object eValue = expectedSource.get(i); Object aValue = usePrev ? source.getPrev(i) : source.get(i); - String chunkEValue = ChunkUtils.extractKeyStringFromChunk( - expectedSource.getChunkType(), expected, off.intValue() - 1); - String chunkAValue = ChunkUtils.extractKeyStringFromChunk(source.getChunkType(), - actual, off.intValue() - 1); + String chunkEValue = ChunkUtils.extractKeyStringFromChunk(expectedSource.getChunkType(), expected, + off.intValue() - 1); + String chunkAValue = + ChunkUtils.extractKeyStringFromChunk(source.getChunkType(), actual, off.intValue() - 1); return what + (usePrev ? " (previous)" : "") + - " columnName=" + name + " k=" + i + - " (from source) expected=" + eValue + " actual=" + aValue + - " (from chunk) expected=" + chunkEValue + " actual=" + chunkAValue; + " columnName=" + name + " k=" + i + + " (from source) expected=" + eValue + " actual=" + aValue + + " (from chunk) expected=" + chunkEValue + " actual=" + chunkAValue; }); }); } - private Chunk getSourceChunk(OrderedKeys orderedKeys, - boolean usePrev) { + private Chunk getSourceChunk(OrderedKeys orderedKeys, boolean usePrev) { return usePrev ? source.getPrevChunk(sourceGetContext(), orderedKeys) - : source.getChunk(sourceGetContext(), orderedKeys); + : source.getChunk(sourceGetContext(), orderedKeys); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/TimeTable.java b/DB/src/main/java/io/deephaven/db/v2/TimeTable.java index 678d3f4dd3d..09a53a212a7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TimeTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/TimeTable.java @@ -24,8 +24,8 @@ /** * A TimeTable adds rows at a fixed interval with a single column named "Timestamp". * - * To create a TimeTable, you should use the - * {@link io.deephaven.db.tables.utils.TableTools#timeTable} family of methods. + * To create a TimeTable, you should use the {@link io.deephaven.db.tables.utils.TableTools#timeTable} family of + * methods. */ public class TimeTable extends QueryTable implements LiveTable { private static final Logger log = LoggerFactory.getLogger(TimeTable.class); @@ -48,8 +48,7 @@ public TimeTable(TimeProvider timeProvider, DBDateTime firstTime, long dbPeriod) if (dbPeriod <= 0) { throw new IllegalArgumentException("Invalid time period: " + dbPeriod + " nanoseconds"); } - this.entry = UpdatePerformanceTracker.getInstance() - .getEntry("TimeTable(" + firstTime + "," + dbPeriod + ")"); + this.entry = UpdatePerformanceTracker.getInstance().getEntry("TimeTable(" + firstTime + "," + dbPeriod + ")"); this.lastTime = firstTime == null ? null : new DBDateTime(firstTime.getNanos() - dbPeriod); binOffset = firstTime == null ? 0 : lastTime.getNanos() % dbPeriod; dateTimeArraySource = (DateTimeArraySource) getColumnSourceMap().get(TIMESTAMP); @@ -76,8 +75,7 @@ private void refresh(final boolean notifyListeners) { try { final DBDateTime dateTime = timeProvider.currentTime(); DBDateTime currentBinnedTime = new DBDateTime( - LongNumericPrimitives.lowerBin(dateTime.getNanos() - binOffset, dbPeriod) - + binOffset); + LongNumericPrimitives.lowerBin(dateTime.getNanos() - binOffset, dbPeriod) + binOffset); long rangeStart = lastIndex + 1; if (lastTime == null) { lastIndex = 0; @@ -92,12 +90,9 @@ private void refresh(final boolean notifyListeners) { dateTimeArraySource.set(lastIndex, lastTime); } if (rangeStart <= lastIndex) { - // If we have a period longer than 10s, print out that the timetable has been - // updated. This can be - // useful when analyzing what's gone wrong in the logs. It is capped at periods of - // 5s, so we don't - // end up with too much log spam for short interval time tables. 5s is not so - // coincidentally the period + // If we have a period longer than 10s, print out that the timetable has been updated. This can be + // useful when analyzing what's gone wrong in the logs. It is capped at periods of 5s, so we don't + // end up with too much log spam for short interval time tables. 5s is not so coincidentally the period // of the Jvm Heap: messages. if (dbPeriod >= 5_000_000_000L) { log.info().append("TimeTable updated to ").append(lastTime.toString()).endl(); @@ -105,8 +100,7 @@ private void refresh(final boolean notifyListeners) { final Index range = Index.FACTORY.getIndexByRange(rangeStart, lastIndex); getIndex().insert(range); if (notifyListeners) { - notifyListeners(range, Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex()); + notifyListeners(range, Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex()); } } } finally { diff --git a/DB/src/main/java/io/deephaven/db/v2/TotalsTableBuilder.java b/DB/src/main/java/io/deephaven/db/v2/TotalsTableBuilder.java index 8830a0ebe36..9981cc49013 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TotalsTableBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/TotalsTableBuilder.java @@ -25,8 +25,8 @@ * Defines the default aggregations and display for a totals table. * *

    - * The builder is intended to be passed to the {@link Table#setTotalsTable(TotalsTableBuilder)} - * operation after the operations are applied. + * The builder is intended to be passed to the {@link Table#setTotalsTable(TotalsTableBuilder)} operation after the + * operations are applied. *

    */ @ScriptApi @@ -105,8 +105,7 @@ public TotalsTableBuilder setDefaultOperation(AggType defaultOperation) { */ @ScriptApi public TotalsTableBuilder setDefaultOperation(String defaultOperation) { - return setDefaultOperation( - EnumValue.caseInsensitiveValueOf(AggType.class, defaultOperation)); + return setDefaultOperation(EnumValue.caseInsensitiveValueOf(AggType.class, defaultOperation)); } /** @@ -174,8 +173,8 @@ public TotalsTableBuilder setOperation(String column, String operation, String f * Adds an operation for a column. * *

    - * The add method is used instead of the {@link #setOperation(String, String)} method when more - * than one aggregation per input column is desired. + * The add method is used instead of the {@link #setOperation(String, String)} method when more than one aggregation + * per input column is desired. *

    * * @param column the name of the column to operate on @@ -192,8 +191,8 @@ public TotalsTableBuilder addOperation(String column, AggType operation) { * Adds an operation for a column. * *

    - * The add method is used instead of the {@link #setOperation(String, AggType, String)} method - * when more than one aggregation per input column is desired. + * The add method is used instead of the {@link #setOperation(String, AggType, String)} method when more than one + * aggregation per input column is desired. *

    * * @param column the name of the column to operate on @@ -215,8 +214,8 @@ public TotalsTableBuilder addOperation(String column, AggType operation, String * Adds an operation for a column. * *

    - * The add method is used instead of the {@link #setOperation(String, String, String)} method - * when more than one aggregation per input column is desired. + * The add method is used instead of the {@link #setOperation(String, String, String)} method when more than one + * aggregation per input column is desired. *

    * * @param column the name of the column to operate on @@ -235,8 +234,8 @@ public TotalsTableBuilder addOperation(String column, String operation, String f * Adds an operation for a column. * *

    - * The add method is used instead of the {@link #setOperation(String, String)} method when more - * than one aggregation per input column is desired. + * The add method is used instead of the {@link #setOperation(String, String)} method when more than one aggregation + * per input column is desired. *

    * * @param column the name of the column to operate on @@ -288,8 +287,7 @@ public TotalsTableBuilder setFormat(String column, AggType agg, String format) { * Sets the format of a column. * * @param column the column to set the format for - * @param agg the aggregation type the format is relevant for, "*" for all - * aggregations + * @param agg the aggregation type the format is relevant for, "*" for all aggregations * @param format the format string * @return this TotalsTableBuilder */ @@ -297,8 +295,8 @@ public TotalsTableBuilder setFormat(String column, AggType agg, String format) { public TotalsTableBuilder setFormat(String column, String agg, String format) { if ("*".equals(agg)) { Arrays.stream(AggType.values()) - .filter(op -> op != AggType.Skip && op != AggType.Array) - .forEach(op -> setFormat(column, op, format)); + .filter(op -> op != AggType.Skip && op != AggType.Array) + .forEach(op -> setFormat(column, op, format)); return this; } @@ -325,16 +323,14 @@ public Map getFormats(String column) { public String buildDirective() { final StringBuilder builder = new StringBuilder(); builder.append(Boolean.toString(showTotalsByDefault)).append(',') - .append(Boolean.toString(showGrandTotalsByDefault)).append(',').append(defaultOperation) - .append(';'); + .append(Boolean.toString(showGrandTotalsByDefault)).append(',').append(defaultOperation).append(';'); operationMap.forEach((k, v) -> builder.append(k).append('=') - .append(v.stream().map(Object::toString).collect(Collectors.joining(":"))).append(',')); + .append(v.stream().map(Object::toString).collect(Collectors.joining(":"))).append(',')); builder.append(';'); formatMap.forEach((k, v) -> builder.append(k).append('=') - .append(v.entrySet().stream() - .map(ent -> ent.getKey().toString() + ':' + encodeFormula(ent.getValue())) - .collect(Collectors.joining("&"))) - .append(',')); + .append(v.entrySet().stream().map(ent -> ent.getKey().toString() + ':' + encodeFormula(ent.getValue())) + .collect(Collectors.joining("&"))) + .append(',')); return builder.toString(); } @@ -364,8 +360,8 @@ public String toString() { * Create a totals table from a source table. * *

    - * Given a source table that has had a TotalsTableBuilder applied, create a new totals table - * from the table. If no TotalsTableBuilder has been applied, then the columns are summed. + * Given a source table that has had a TotalsTableBuilder applied, create a new totals table from the table. If no + * TotalsTableBuilder has been applied, then the columns are summed. *

    * * @param source the source table @@ -389,8 +385,8 @@ public static TotalsTableBuilder get(Table source) { * Produce a TotalsTableBuilder from a directive string. * *

    - * The {@link #buildDirective()} method produces a String representation of a - * TotalsTableBuilder, this function is its inverse. + * The {@link #buildDirective()} method produces a String representation of a TotalsTableBuilder, this function is + * its inverse. *

    * * @param directive the directive. @@ -406,8 +402,7 @@ public static TotalsTableBuilder fromDirective(final String directive) { final String[] frontMatter = splitSemi[0].split(","); if (frontMatter.length < 3) { - throw new IllegalArgumentException( - "Invalid " + TOTALS_TABLE_ATTRIBUTE + ": " + directive); + throw new IllegalArgumentException("Invalid " + TOTALS_TABLE_ATTRIBUTE + ": " + directive); } builder.setShowTotalsByDefault(Boolean.parseBoolean(frontMatter[0])); builder.setShowGrandTotalsByDefault(Boolean.parseBoolean(frontMatter[1])); @@ -420,8 +415,8 @@ public static TotalsTableBuilder fromDirective(final String directive) { continue; final String[] kv = columnDirective.split("="); if (kv.length != 2) { - throw new IllegalArgumentException("Invalid " + TOTALS_TABLE_ATTRIBUTE + ": " - + directive + ", bad column " + columnDirective); + throw new IllegalArgumentException( + "Invalid " + TOTALS_TABLE_ATTRIBUTE + ": " + directive + ", bad column " + columnDirective); } final String[] operations = kv[1].split(":"); for (final String op : operations) { @@ -442,22 +437,21 @@ public static TotalsTableBuilder fromDirective(final String directive) { final String[] colAndFormats = formatDirective.split("="); if (colAndFormats.length != 2) { - throw new IllegalArgumentException("Invalid " + TOTALS_TABLE_ATTRIBUTE + ": " - + directive + ", bad format " + formatDirective); + throw new IllegalArgumentException( + "Invalid " + TOTALS_TABLE_ATTRIBUTE + ": " + directive + ", bad format " + formatDirective); } final String[] formatsByAgg = colAndFormats[1].split("&"); for (final String formatForAgg : formatsByAgg) { final String[] aggAndFormat = formatForAgg.split(":"); if (aggAndFormat.length != 2) { - throw new IllegalArgumentException( - "Invalid " + TOTALS_TABLE_ATTRIBUTE + ": " + directive + throw new IllegalArgumentException("Invalid " + TOTALS_TABLE_ATTRIBUTE + ": " + directive + ", bad format for agg" + formatForAgg + " in " + formatDirective); } builder.setFormat(colAndFormats[0], - EnumValue.caseInsensitiveValueOf(AggType.class, aggAndFormat[0]), - decodeFormula(aggAndFormat[1])); + EnumValue.caseInsensitiveValueOf(AggType.class, aggAndFormat[0]), + decodeFormula(aggAndFormat[1])); } } } @@ -484,8 +478,7 @@ static Table makeTotalsTable(Table source, String aggregationDirective) { } /** - * Given a source table, builder and aggregation columns build a totals table with multiple - * rows. + * Given a source table, builder and aggregation columns build a totals table with multiple rows. * * @param source the source table * @param builder the TotalsTableBuilder @@ -493,8 +486,7 @@ static Table makeTotalsTable(Table source, String aggregationDirective) { * * @return an aggregated totals table */ - public static Table makeTotalsTable(Table source, TotalsTableBuilder builder, - String... groupByColumns) { + public static Table makeTotalsTable(Table source, TotalsTableBuilder builder, String... groupByColumns) { final ComboAggregateFactory aggregationFactory = makeAggregationFactory(source, builder); final String[] formatSpecs = makeColumnFormats(source, builder); @@ -510,8 +502,8 @@ private static void ensureColumnsExist(Table source, Set columns) { if (!source.getColumnSourceMap().keySet().containsAll(columns)) { final Set missing = new LinkedHashSet<>(columns); missing.removeAll(source.getColumnSourceMap().keySet()); - throw new IllegalArgumentException("Missing columns for totals table " + missing - + ", available columns " + source.getColumnSourceMap().keySet()); + throw new IllegalArgumentException("Missing columns for totals table " + missing + ", available columns " + + source.getColumnSourceMap().keySet()); } } @@ -544,7 +536,7 @@ private static String[] makeColumnFormats(Table source, TotalsTableBuilder build } final String formatSpec = (aggsForCol.size() == 1) ? col + '=' + aggFormat - : col + "__" + agg + '=' + aggFormat; + : col + "__" + agg + '=' + aggFormat; formatSpecs.add(formatSpec); } } @@ -562,21 +554,18 @@ private static String[] makeColumnFormats(Table source, TotalsTableBuilder build * * @return the {@link ComboAggregateFactory} described by source and builder. */ - public static ComboAggregateFactory makeAggregationFactory(Table source, - TotalsTableBuilder builder) { + public static ComboAggregateFactory makeAggregationFactory(Table source, TotalsTableBuilder builder) { ensureColumnsExist(source, builder.operationMap.keySet()); final Set defaultOperations = EnumSet.of(builder.defaultOperation); final Map> columnsByType = new LinkedHashMap<>(); - for (final Map.Entry entry : source.getColumnSourceMap() - .entrySet()) { + for (final Map.Entry entry : source.getColumnSourceMap().entrySet()) { final String columnName = entry.getKey(); if (ColumnFormattingValues.isFormattingColumn(columnName)) { continue; } - final Set operations = - builder.operationMap.getOrDefault(columnName, defaultOperations); + final Set operations = builder.operationMap.getOrDefault(columnName, defaultOperations); final Class type = entry.getValue().getType(); for (final AggType op : operations) { @@ -598,54 +587,46 @@ public static ComboAggregateFactory makeAggregationFactory(Table source, final List aggregations = new ArrayList<>(); columnsByType.entrySet().stream().flatMap(e -> makeOperation(e.getKey(), e.getValue())) - .forEach(aggregations::add); + .forEach(aggregations::add); return new ComboAggregateFactory(aggregations); } - private static Stream makeOperation(AggType operation, - List values) { + private static Stream makeOperation(AggType operation, List values) { switch (operation) { case Array: - throw new IllegalArgumentException( - "Can not use Array aggregation in totals table."); + throw new IllegalArgumentException("Can not use Array aggregation in totals table."); case Count: return values.stream().map(ComboAggregateFactory::AggCount); case Min: - return Stream.of(ComboAggregateFactory - .AggMin(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream.of(ComboAggregateFactory.AggMin(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case Max: - return Stream.of(ComboAggregateFactory - .AggMax(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream.of(ComboAggregateFactory.AggMax(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case First: - return Stream.of(ComboAggregateFactory - .AggFirst(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream + .of(ComboAggregateFactory.AggFirst(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case Last: - return Stream.of(ComboAggregateFactory - .AggLast(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream + .of(ComboAggregateFactory.AggLast(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case Sum: - return Stream.of(ComboAggregateFactory - .AggSum(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream.of(ComboAggregateFactory.AggSum(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case AbsSum: - return Stream.of(ComboAggregateFactory - .AggAbsSum(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream + .of(ComboAggregateFactory.AggAbsSum(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case Avg: - return Stream.of(ComboAggregateFactory - .AggAvg(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream.of(ComboAggregateFactory.AggAvg(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case Std: - return Stream.of(ComboAggregateFactory - .AggStd(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream.of(ComboAggregateFactory.AggStd(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case Var: - return Stream.of(ComboAggregateFactory - .AggVar(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream.of(ComboAggregateFactory.AggVar(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case Unique: - return Stream.of(ComboAggregateFactory - .AggUnique(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream + .of(ComboAggregateFactory.AggUnique(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case CountDistinct: return Stream.of(ComboAggregateFactory - .AggCountDistinct(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + .AggCountDistinct(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); case Distinct: - return Stream.of(ComboAggregateFactory - .AggDistinct(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + return Stream + .of(ComboAggregateFactory.AggDistinct(values.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); default: throw new IllegalStateException(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/TransformTablesFunction.java b/DB/src/main/java/io/deephaven/db/v2/TransformTablesFunction.java index dc842c2583e..5927421c6de 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TransformTablesFunction.java +++ b/DB/src/main/java/io/deephaven/db/v2/TransformTablesFunction.java @@ -19,8 +19,7 @@ public TransformTablesFunction(BiFunction function) { this.isExplicit = false; } - public TransformTablesFunction(TableDefinition returnDefinition, - BiFunction function) { + public TransformTablesFunction(TableDefinition returnDefinition, BiFunction function) { this.returnDefinition = returnDefinition; this.function = Objects.requireNonNull(function); this.isExplicit = true; diff --git a/DB/src/main/java/io/deephaven/db/v2/TransformableTableMap.java b/DB/src/main/java/io/deephaven/db/v2/TransformableTableMap.java index 0ea8b6519cf..25ba84dc4d1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TransformableTableMap.java +++ b/DB/src/main/java/io/deephaven/db/v2/TransformableTableMap.java @@ -7,8 +7,8 @@ /** * This object can be merged to produce a single coalesced Table. *

    - * This is used by TableMap and TableMapProxyHandlers to expose the - * {@link TransformableTableMap#merge} operation to users. + * This is used by TableMap and TableMapProxyHandlers to expose the {@link TransformableTableMap#merge} operation to + * users. */ public interface TransformableTableMap { @@ -31,35 +31,30 @@ public interface TransformableTableMap { * Create a Table out of this TableMap's values. * *

    - * Creates a proxy object that in many respects acts like a Table, you can perform many of the - * table operations on it, which are then applied using - * {@link TableMap#transformTables(java.util.function.Function)} or - * {@link TableMap#transformTablesWithMap(TableMap, BiFunction)} if the right hand side of an - * operation is another TableMap. + * Creates a proxy object that in many respects acts like a Table, you can perform many of the table operations on + * it, which are then applied using {@link TableMap#transformTables(java.util.function.Function)} or + * {@link TableMap#transformTablesWithMap(TableMap, BiFunction)} if the right hand side of an operation is another + * TableMap. *

    * *

    - * The returned table acts as if it were an uncoalesced table; when two of our Proxy objects are - * operated on together, e.g., by a {@link Table#join}) operation, then tables with identical - * keys are used. If strictKeys is set, an error occurs if the two TableMaps do not have - * identical keySets. + * The returned table acts as if it were an uncoalesced table; when two of our Proxy objects are operated on + * together, e.g., by a {@link Table#join}) operation, then tables with identical keys are used. If strictKeys is + * set, an error occurs if the two TableMaps do not have identical keySets. *

    * *

    - * Supported operations include those which return a {@link io.deephaven.db.tables.Table}, - * {@link Table#size()}, {@link Table#getDefinition()} and operations to retrieve attributes. - * Operations which retrieve data (such as {@link Table#getIndex()}} or - * {@link Table#getColumn(int)} require a coalesce operation. If allowCoalesce is not set to - * true, then the coalescing operations will fail with an {@link IllegalArgumentException}. + * Supported operations include those which return a {@link io.deephaven.db.tables.Table}, {@link Table#size()}, + * {@link Table#getDefinition()} and operations to retrieve attributes. Operations which retrieve data (such as + * {@link Table#getIndex()}} or {@link Table#getColumn(int)} require a coalesce operation. If allowCoalesce is not + * set to true, then the coalescing operations will fail with an {@link IllegalArgumentException}. *

    * * @param strictKeys if we should fail when our RHS TableMap does not have the same keySet - * @param allowCoalesce if we should allow this TableMap to be automatically coalesced into a - * table - * @param sanityCheckJoins if we should sanity check join keys, meaning that we should refuse to - * perform any joins if the join keys would span two segments of the TableMap. This - * option is safer, but requires additional work on the query engine to perform the - * safety checks. + * @param allowCoalesce if we should allow this TableMap to be automatically coalesced into a table + * @param sanityCheckJoins if we should sanity check join keys, meaning that we should refuse to perform any joins + * if the join keys would span two segments of the TableMap. This option is safer, but requires additional + * work on the query engine to perform the safety checks. * @return a Table object that performs operations by segment */ Table asTable(boolean strictKeys, boolean allowCoalesce, boolean sanityCheckJoins); @@ -85,8 +80,8 @@ default AsTableBuilder asTableBuilder() { /** * Builder object for a TableMapProxy. *

    - * By default strict keys and join sanity check are enabled; but coalescing is not. This gives - * you the safest possible asTable call. + * By default strict keys and join sanity check are enabled; but coalescing is not. This gives you the safest + * possible asTable call. */ class AsTableBuilder { private final TransformableTableMap transformableTableMap; @@ -120,8 +115,7 @@ public Table build() { * True by default. *

    * - * @param strictKeys if operations should fail when our RHS TableMap does not have the same - * keySet + * @param strictKeys if operations should fail when our RHS TableMap does not have the same keySet * @return this builder */ public AsTableBuilder strictKeys(boolean strictKeys) { @@ -136,8 +130,7 @@ public AsTableBuilder strictKeys(boolean strictKeys) { * False by default. *

    * - * @param allowCoalesce if operations should allow this TableMap to be automatically - * coalesced into a table + * @param allowCoalesce if operations should allow this TableMap to be automatically coalesced into a table * @return this builder */ public AsTableBuilder allowCoalesce(boolean allowCoalesce) { @@ -152,10 +145,9 @@ public AsTableBuilder allowCoalesce(boolean allowCoalesce) { * True by default. *

    * - * @param sanityCheckJoins if we should sanity check join keys, meaning that we should - * refuse to perform any joins if the join keys would span two segments of the - * TableMap. This option is safer, but requires additional work on the query engine - * to perform the safety checks. + * @param sanityCheckJoins if we should sanity check join keys, meaning that we should refuse to perform any + * joins if the join keys would span two segments of the TableMap. This option is safer, but requires + * additional work on the query engine to perform the safety checks. * @return this builder */ public AsTableBuilder sanityCheckJoin(boolean sanityCheckJoins) { diff --git a/DB/src/main/java/io/deephaven/db/v2/TreeTableFilter.java b/DB/src/main/java/io/deephaven/db/v2/TreeTableFilter.java index 60dff170355..12e6f35387f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TreeTableFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/TreeTableFilter.java @@ -30,14 +30,13 @@ /** * Apply filters, preserving parents. * - * The TreeTableFilter takes a TreeTable and SelectFilters as input. The original source table is - * filtered and any matching rows are included; as well as their ancestors. The result table is then - * converted into a tree table using the original parameters. + * The TreeTableFilter takes a TreeTable and SelectFilters as input. The original source table is filtered and any + * matching rows are included; as well as their ancestors. The result table is then converted into a tree table using + * the original parameters. */ -public class TreeTableFilter - implements Function.Unary, MemoizedOperationKey.Provider { +public class TreeTableFilter implements Function.Unary, MemoizedOperationKey.Provider { private static final boolean DEBUG = io.deephaven.configuration.Configuration.getInstance() - .getBooleanWithDefault("TreeTableFilter.debug", false); + .getBooleanWithDefault("TreeTableFilter.debug", false); private static final Logger log = LoggerFactory.getLogger(TreeTableFilter.class); @@ -113,14 +112,13 @@ private State(Table table, TableDefinition origTableDefinition) { throw new IllegalArgumentException("Table is not a treeTable"); } treeTableInfo = (TreeTableInfo) sourceInfo; - reverseLookupListener = Objects.requireNonNull( - (ReverseLookupListener) table.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE)); + reverseLookupListener = + Objects.requireNonNull((ReverseLookupListener) table.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE)); filters = TreeTableFilter.this.filters; // The filters have already been inited by here. - Assert.eq(table.getDefinition(), "Applied table.definition", origTableDefinition, - "Original definition"); + Assert.eq(table.getDefinition(), "Applied table.definition", origTableDefinition, "Original definition"); parentSource = source.getColumnSource(treeTableInfo.parentColumn); idSource = source.getColumnSource(treeTableInfo.idColumn); @@ -129,11 +127,11 @@ private State(Table table, TableDefinition origTableDefinition) { swapListener = new SwapListenerWithRLL(source, reverseLookupListener); source.listenForUpdates(swapListener); ConstructSnapshot.callDataSnapshotFunction(System.identityHashCode(source) + ": ", - swapListener.makeSnapshotControl(), - (usePrev, beforeClockValue) -> { - doInitialFilter(usePrev); - return true; - }); + swapListener.makeSnapshotControl(), + (usePrev, beforeClockValue) -> { + doInitialFilter(usePrev); + return true; + }); } else { swapListener = null; doInitialFilter(false); @@ -158,10 +156,8 @@ private void doInitialFilter(final boolean usePrev) { filteredRaw.addParentReference(treeListener); } - // We can re-use the RLL when filtering as long as we are sure to check for existence in - // the - // sub table indices. Sticking this annotation here will let QueryTable know it can - // re-use it. + // We can re-use the RLL when filtering as long as we are sure to check for existence in the + // sub table indices. Sticking this annotation here will let QueryTable know it can re-use it. filteredRaw.setAttribute(PREPARED_RLL_ATTRIBUTE, reverseLookupListener); } @@ -191,35 +187,31 @@ private void validateState(final boolean usePrev) { if (!expectedIndex.equals(valuesIndex)) { final Index missing = expectedIndex.minus(valuesIndex); final Index extraValues = valuesIndex.minus(expectedIndex); - throw new IllegalStateException( - "Inconsistent included Values: missing=" + missing + ", extra=" + extraValues - + ", expected=" + expectedIndex + ", valuesIndex=" + valuesIndex); + throw new IllegalStateException("Inconsistent included Values: missing=" + missing + ", extra=" + + extraValues + ", expected=" + expectedIndex + ", valuesIndex=" + valuesIndex); } TLongArrayList parentsToProcess = new TLongArrayList(); expectedIndex.forEach(parentsToProcess::add); - final Index sourceIndex = - usePrev ? source.getIndex().getPrevIndex() : source.getIndex(); + final Index sourceIndex = usePrev ? source.getIndex().getPrevIndex() : source.getIndex(); do { final TLongArrayList newParentKeys = new TLongArrayList(); for (final TLongIterator it = parentsToProcess.iterator(); it.hasNext();) { final long row = it.next(); - final Object parent = - usePrev ? parentSource.getPrev(row) : parentSource.get(row); + final Object parent = usePrev ? parentSource.getPrev(row) : parentSource.get(row); if (parent == null) { continue; } expectedParents.computeIfAbsent(parent, x -> new TLongHashSet()).add(row); - final long parentRow = usePrev ? reverseLookupListener.getPrev(parent) - : reverseLookupListener.get(parent); + final long parentRow = + usePrev ? reverseLookupListener.getPrev(parent) : reverseLookupListener.get(parent); if (parentRow == reverseLookupListener.getNoEntryValue()) { continue; } if (sourceIndex.find(parentRow) < 0) { - throw new IllegalStateException( - "Reverse Lookup Listener points at row " + parentRow + " for " + parent - + ", but the row is not in the index=" + source.getIndex()); + throw new IllegalStateException("Reverse Lookup Listener points at row " + parentRow + " for " + + parent + ", but the row is not in the index=" + source.getIndex()); } newParentKeys.add(parentRow); } @@ -232,19 +224,19 @@ private void validateState(final boolean usePrev) { final TLongSet actualSet = parentReferences.get(parentValue); final TLongSet expectedSet = expectedParents.get(parentValue); if (!actualSet.equals(expectedSet)) { - throw new IllegalStateException("Parent set mismatch " + parentValue - + ", expected=" + expectedSet + ", actual=" + actualSet); + throw new IllegalStateException("Parent set mismatch " + parentValue + ", expected=" + expectedSet + + ", actual=" + actualSet); } - final long parentKey = usePrev ? reverseLookupListener.getPrev(parentValue) - : reverseLookupListener.get(parentValue); + final long parentKey = + usePrev ? reverseLookupListener.getPrev(parentValue) : reverseLookupListener.get(parentValue); if (parentKey != reverseLookupListener.getNoEntryValue()) { // then we should have it in our index builder.addKey(parentKey); final long position = parentIndex.find(parentKey); if (position < 0) { - throw new IllegalStateException("Could not find parent in our result: " - + parentValue + ", key=" + parentKey); + throw new IllegalStateException( + "Could not find parent in our result: " + parentValue + ", key=" + parentKey); } } }); @@ -261,13 +253,11 @@ private void removeValues(Index rowsToRemove) { } private void removeParents(Index rowsToRemove) { - final Map parents = - generateParentReferenceMap(rowsToRemove, parentSource::getPrev); + final Map parents = generateParentReferenceMap(rowsToRemove, parentSource::getPrev); final IndexBuilder builder = Index.FACTORY.getRandomBuilder(); while (!parents.isEmpty()) { - final Iterator> iterator = - parents.entrySet().iterator(); + final Iterator> iterator = parents.entrySet().iterator(); final Map.Entry entry = iterator.next(); final Object parent = entry.getKey(); final TLongSet references = entry.getValue(); @@ -287,8 +277,7 @@ private void removeParents(Index rowsToRemove) { if (valuesIndex.find(parentKey) < 0) { final Object grandParentId = parentSource.getPrev(parentKey); if (grandParentId != null) { - parents.computeIfAbsent(grandParentId, x -> new TLongHashSet()) - .add(parentKey); + parents.computeIfAbsent(grandParentId, x -> new TLongHashSet()).add(parentKey); } } } @@ -323,40 +312,36 @@ private Index checkForResurrectedParent(Index rowsToCheck) { } private Index computeParents(final boolean usePrev, @NotNull final Index rowsToParent) { - final Map parents = generateParentReferenceMap(rowsToParent, - usePrev ? parentSource::getPrev : parentSource::get); + final Map parents = + generateParentReferenceMap(rowsToParent, usePrev ? parentSource::getPrev : parentSource::get); final IndexBuilder builder = Index.FACTORY.getRandomBuilder(); while (!parents.isEmpty()) { - final Iterator> iterator = - parents.entrySet().iterator(); + final Iterator> iterator = parents.entrySet().iterator(); final Map.Entry entry = iterator.next(); final Object parent = entry.getKey(); final TLongSet references = entry.getValue(); iterator.remove(); - final long parentKey = usePrev ? reverseLookupListener.getPrev(parent) - : reverseLookupListener.get(parent); + final long parentKey = + usePrev ? reverseLookupListener.getPrev(parent) : reverseLookupListener.get(parent); if (parentKey != reverseLookupListener.getNoEntryValue()) { builder.addKey(parentKey); final Object grandParentId = - usePrev ? parentSource.getPrev(parentKey) : parentSource.get(parentKey); + usePrev ? parentSource.getPrev(parentKey) : parentSource.get(parentKey); if (grandParentId != null) { - parents.computeIfAbsent(grandParentId, x -> new TLongHashSet()) - .add(parentKey); + parents.computeIfAbsent(grandParentId, x -> new TLongHashSet()).add(parentKey); } } - parentReferences.computeIfAbsent(parent, x -> new TLongHashSet()) - .addAll(references); + parentReferences.computeIfAbsent(parent, x -> new TLongHashSet()).addAll(references); } return builder.getIndex(); } @NotNull - private Map generateParentReferenceMap(Index rowsToParent, - LongFunction getValue) { + private Map generateParentReferenceMap(Index rowsToParent, LongFunction getValue) { final Map parents = new LinkedHashMap<>(rowsToParent.intSize()); for (final Index.Iterator it = rowsToParent.iterator(); it.hasNext();) { final long row = it.nextLong(); @@ -373,8 +358,7 @@ private class TreeTableFilterListener extends BaseTable.ShiftAwareListenerImpl { TreeTableFilterListener(String description, DynamicTable parent, QueryTable dependent) { super(description, parent, dependent); - inputColumns = - source.newModifiedColumnSet(treeTableInfo.idColumn, treeTableInfo.parentColumn); + inputColumns = source.newModifiedColumnSet(treeTableInfo.idColumn, treeTableInfo.parentColumn); Arrays.stream(filters).forEach(filter -> { for (final String column : filter.getColumns()) { inputColumns.setAll(column); @@ -392,24 +376,22 @@ public void onUpdate(final Update upstream) { final long sourceLastStep = source.getLastNotificationStep(); if (rllLastStep != sourceLastStep) { - throw new IllegalStateException("RLL was updated in a different cycle! Rll: " - + rllLastStep + " source: " + sourceLastStep); + throw new IllegalStateException( + "RLL was updated in a different cycle! Rll: " + rllLastStep + " source: " + sourceLastStep); } // We can ignore modified while updating if columns we care about were not touched. final boolean useModified = upstream.modifiedColumnSet.containsAny(inputColumns); - // Must take care of removed here, because these rows are not valid in post shift - // space. + // Must take care of removed here, because these rows are not valid in post shift space. downstream.removed = resultIndex.extract(upstream.removed); - try ( - final Index allRemoved = + try (final Index allRemoved = useModified ? upstream.removed.union(upstream.getModifiedPreShift()) : null; - final Index valuesToRemove = - (useModified ? allRemoved : upstream.removed).intersect(valuesIndex); - final Index removedParents = - (useModified ? allRemoved : upstream.removed).intersect(parentIndex)) { + final Index valuesToRemove = + (useModified ? allRemoved : upstream.removed).intersect(valuesIndex); + final Index removedParents = + (useModified ? allRemoved : upstream.removed).intersect(parentIndex)) { removeValues(valuesToRemove); parentIndex.remove(removedParents); @@ -436,10 +418,10 @@ public void onUpdate(final Update upstream) { // Finally handle added sets. try (final Index addedAndModified = upstream.added.union(upstream.modified); - final Index newFiltered = doValueFilter(false, addedAndModified); - final Index resurrectedParents = checkForResurrectedParent(addedAndModified); - final Index newParents = computeParents(false, newFiltered); - final Index newResurrectedParents = computeParents(false, resurrectedParents)) { + final Index newFiltered = doValueFilter(false, addedAndModified); + final Index resurrectedParents = checkForResurrectedParent(addedAndModified); + final Index newParents = computeParents(false, newFiltered); + final Index newResurrectedParents = computeParents(false, resurrectedParents)) { valuesIndex.insert(newFiltered); @@ -450,22 +432,20 @@ public void onUpdate(final Update upstream) { // Compute expected results and the sets we will propagate to child listeners. try (final Index result = valuesIndex.union(parentIndex); - final Index resultRemovals = resultIndex.minus(result)) { + final Index resultRemovals = resultIndex.minus(result)) { downstream.added = result.minus(resultIndex); resultIndex.update(downstream.added, resultRemovals); downstream.modified = upstream.modified.intersect(resultIndex); downstream.modified.remove(downstream.added); - // convert post filter removals into pre-shift space -- note these rows must - // have previously existed + // convert post filter removals into pre-shift space -- note these rows must have previously existed upstream.shifted.unapply(resultRemovals); downstream.removed.insert(resultRemovals); } downstream.shifted = upstream.shifted; - downstream.modifiedColumnSet = upstream.modifiedColumnSet; // note that dependent is - // a subTable + downstream.modifiedColumnSet = upstream.modifiedColumnSet; // note that dependent is a subTable filteredRaw.notifyListeners(downstream); @@ -515,13 +495,12 @@ private static final class SwapListenerWithRLL extends ShiftAwareSwapListener { @Override public ConstructSnapshot.SnapshotControl makeSnapshotControl() { return ConstructSnapshot.makeSnapshotControl( - this::startWithRLL, - (final long currentClockValue, - final boolean usingPreviousValues) -> rll - .getLastNotificationStep() == rllLastNotificationStep - && isInInitialNotificationWindow(), - (final long afterClockValue, - final boolean usedPreviousValues) -> end(afterClockValue)); + this::startWithRLL, + (final long currentClockValue, + final boolean usingPreviousValues) -> rll + .getLastNotificationStep() == rllLastNotificationStep + && isInInitialNotificationWindow(), + (final long afterClockValue, final boolean usedPreviousValues) -> end(afterClockValue)); } @SuppressWarnings("AutoBoxing") @@ -562,15 +541,14 @@ public synchronized Boolean startWithRLL(final long beforeClockValue) { } if (DEBUG) { log.info().append("SwapListenerWithRLL start() source=") - .append(System.identityHashCode(sourceTable)) - .append(". swap=") - .append(System.identityHashCode(this)) - .append(", start={").append(beforeStep).append(",") - .append(beforeState.toString()) - .append("}, last=").append(lastNotificationStep) - .append(", rllLast=").append(rllLastNotificationStep) - .append(", result=").append(result) - .endl(); + .append(System.identityHashCode(sourceTable)) + .append(". swap=") + .append(System.identityHashCode(this)) + .append(", start={").append(beforeStep).append(",").append(beforeState.toString()) + .append("}, last=").append(lastNotificationStep) + .append(", rllLast=").append(rllLastNotificationStep) + .append(", result=").append(result) + .endl(); } return result; } @@ -583,26 +561,23 @@ public boolean start(final long beforeClockValue) { @Override public synchronized boolean end(final long afterClockValue) { if (SwapListener.DEBUG) { - log.info().append("SwapListenerWithRLL end() swap=") - .append(System.identityHashCode(this)) - .append(", end={").append(LogicalClock.getStep(afterClockValue)).append(",") - .append(LogicalClock.getState(afterClockValue).toString()) - .append("}, last=").append(sourceTable.getLastNotificationStep()) - .append(", rllLast=").append(rll.getLastNotificationStep()) - .endl(); + log.info().append("SwapListenerWithRLL end() swap=").append(System.identityHashCode(this)) + .append(", end={").append(LogicalClock.getStep(afterClockValue)).append(",") + .append(LogicalClock.getState(afterClockValue).toString()) + .append("}, last=").append(sourceTable.getLastNotificationStep()) + .append(", rllLast=").append(rll.getLastNotificationStep()) + .endl(); } - return rll.getLastNotificationStep() == rllLastNotificationStep - && super.end(afterClockValue); + return rll.getLastNotificationStep() == rllLastNotificationStep && super.end(afterClockValue); } @Override public synchronized void setListenerAndResult(@NotNull final ShiftAwareListener listener, - @NotNull final NotificationStepReceiver resultTable) { + @NotNull final NotificationStepReceiver resultTable) { super.setListenerAndResult(listener, resultTable); if (SwapListener.DEBUG) { - log.info().append("SwapListenerWithRLL swap=") - .append(System.identityHashCode(SwapListenerWithRLL.this)).append(", result=") - .append(System.identityHashCode(resultTable)).endl(); + log.info().append("SwapListenerWithRLL swap=").append(System.identityHashCode(SwapListenerWithRLL.this)) + .append(", result=").append(System.identityHashCode(resultTable)).endl(); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/TreeTableOrphanPromoter.java b/DB/src/main/java/io/deephaven/db/v2/TreeTableOrphanPromoter.java index 2bd0cf0b22f..c494690317d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/TreeTableOrphanPromoter.java +++ b/DB/src/main/java/io/deephaven/db/v2/TreeTableOrphanPromoter.java @@ -22,14 +22,14 @@ /** *

    - * Identify orphan rows in a table destined for conversion into a tree table, and mask their parent - * column value to null, so that they show up at the top level of the hierarchy. + * Identify orphan rows in a table destined for conversion into a tree table, and mask their parent column value to + * null, so that they show up at the top level of the hierarchy. *

    * *

    - * This is useful if your data contains values which you can not identify as top-level rows; or if - * you would like to filter your tree table source, excluding parents which do not meet your filter - * criteria, but do not want to orphan the matches. + * This is useful if your data contains values which you can not identify as top-level rows; or if you would like to + * filter your tree table source, excluding parents which do not meet your filter criteria, but do not want to orphan + * the matches. *

    * *

    @@ -69,158 +69,156 @@ public State(Table table) { } public Table invoke() { - final Map nameToColumns = - new LinkedHashMap<>(source.getColumnSourceMap()); + final Map nameToColumns = new LinkedHashMap<>(source.getColumnSourceMap()); // noinspection unchecked - final ColumnSource parentView = - new AbstractColumnSource.DefaultedMutable(parentSource.getType()) { - @Override - public Object get(long index) { - if (hasParent(index)) { - return parentSource.get(index); - } - return null; + final ColumnSource parentView = new AbstractColumnSource.DefaultedMutable(parentSource.getType()) { + @Override + public Object get(long index) { + if (hasParent(index)) { + return parentSource.get(index); } + return null; + } - @Override - public Object getPrev(long index) { - if (hadParent(index)) { - return parentSource.getPrev(index); - } - return null; + @Override + public Object getPrev(long index) { + if (hadParent(index)) { + return parentSource.getPrev(index); } + return null; + } - @Override - public Boolean getPrevBoolean(long index) { - if (hadParent(index)) { - return parentSource.getPrevBoolean(index); - } - return null; + @Override + public Boolean getPrevBoolean(long index) { + if (hadParent(index)) { + return parentSource.getPrevBoolean(index); } + return null; + } - @Override - public byte getPrevByte(long index) { - if (hadParent(index)) { - return parentSource.getPrevByte(index); - } - return QueryConstants.NULL_BYTE; + @Override + public byte getPrevByte(long index) { + if (hadParent(index)) { + return parentSource.getPrevByte(index); } + return QueryConstants.NULL_BYTE; + } - @Override - public char getPrevChar(long index) { - if (hadParent(index)) { - return parentSource.getPrevChar(index); - } - return QueryConstants.NULL_CHAR; + @Override + public char getPrevChar(long index) { + if (hadParent(index)) { + return parentSource.getPrevChar(index); } + return QueryConstants.NULL_CHAR; + } - @Override - public double getPrevDouble(long index) { - if (hadParent(index)) { - return parentSource.getPrevDouble(index); - } - return QueryConstants.NULL_DOUBLE; + @Override + public double getPrevDouble(long index) { + if (hadParent(index)) { + return parentSource.getPrevDouble(index); } + return QueryConstants.NULL_DOUBLE; + } - @Override - public float getPrevFloat(long index) { - if (hadParent(index)) { - return parentSource.getPrevFloat(index); - } - return QueryConstants.NULL_FLOAT; + @Override + public float getPrevFloat(long index) { + if (hadParent(index)) { + return parentSource.getPrevFloat(index); } + return QueryConstants.NULL_FLOAT; + } - @Override - public int getPrevInt(long index) { - if (hadParent(index)) { - return parentSource.getPrevInt(index); - } - return QueryConstants.NULL_INT; + @Override + public int getPrevInt(long index) { + if (hadParent(index)) { + return parentSource.getPrevInt(index); } + return QueryConstants.NULL_INT; + } - @Override - public long getPrevLong(long index) { - if (hadParent(index)) { - return parentSource.getPrevLong(index); - } - return QueryConstants.NULL_LONG; + @Override + public long getPrevLong(long index) { + if (hadParent(index)) { + return parentSource.getPrevLong(index); } + return QueryConstants.NULL_LONG; + } - @Override - public short getPrevShort(long index) { - if (hadParent(index)) { - return parentSource.getPrevShort(index); - } - return QueryConstants.NULL_SHORT; + @Override + public short getPrevShort(long index) { + if (hadParent(index)) { + return parentSource.getPrevShort(index); } + return QueryConstants.NULL_SHORT; + } - @Override - public Boolean getBoolean(long index) { - if (hasParent(index)) { - return parentSource.getBoolean(index); - } - return null; + @Override + public Boolean getBoolean(long index) { + if (hasParent(index)) { + return parentSource.getBoolean(index); } + return null; + } - @Override - public byte getByte(long index) { - if (hasParent(index)) { - return parentSource.getByte(index); - } - return QueryConstants.NULL_BYTE; + @Override + public byte getByte(long index) { + if (hasParent(index)) { + return parentSource.getByte(index); } + return QueryConstants.NULL_BYTE; + } - @Override - public char getChar(long index) { - if (hasParent(index)) { - return parentSource.getChar(index); - } - return QueryConstants.NULL_CHAR; + @Override + public char getChar(long index) { + if (hasParent(index)) { + return parentSource.getChar(index); } + return QueryConstants.NULL_CHAR; + } - @Override - public double getDouble(long index) { - if (hasParent(index)) { - return parentSource.getDouble(index); - } - return QueryConstants.NULL_DOUBLE; + @Override + public double getDouble(long index) { + if (hasParent(index)) { + return parentSource.getDouble(index); } + return QueryConstants.NULL_DOUBLE; + } - @Override - public float getFloat(long index) { - if (hasParent(index)) { - return parentSource.getFloat(index); - } - return QueryConstants.NULL_FLOAT; - + @Override + public float getFloat(long index) { + if (hasParent(index)) { + return parentSource.getFloat(index); } + return QueryConstants.NULL_FLOAT; - @Override - public int getInt(long index) { - if (hasParent(index)) { - return parentSource.getInt(index); - } - return QueryConstants.NULL_INT; + } + @Override + public int getInt(long index) { + if (hasParent(index)) { + return parentSource.getInt(index); } + return QueryConstants.NULL_INT; + + } - @Override - public long getLong(long index) { - if (hasParent(index)) { - return parentSource.getLong(index); - } - return QueryConstants.NULL_LONG; + @Override + public long getLong(long index) { + if (hasParent(index)) { + return parentSource.getLong(index); } + return QueryConstants.NULL_LONG; + } - @Override - public short getShort(long index) { - if (hasParent(index)) { - return parentSource.getShort(index); - } - return QueryConstants.NULL_SHORT; + @Override + public short getShort(long index) { + if (hasParent(index)) { + return parentSource.getShort(index); } - }; + return QueryConstants.NULL_SHORT; + } + }; nameToColumns.put(parentColumn, parentView); @@ -230,178 +228,169 @@ public short getShort(long index) { if (source.isRefreshing()) { result.addParentReference(reverseLookupListener); - final ModifiedColumnSet inputColumns = - source.newModifiedColumnSet(idColumn, parentColumn); + final ModifiedColumnSet inputColumns = source.newModifiedColumnSet(idColumn, parentColumn); - final String[] columnNames = source.getColumnSourceMap().keySet() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] columnNames = + source.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); final ModifiedColumnSet.Transformer mcsTransformer = - source.newModifiedColumnSetTransformer(result, columnNames); + source.newModifiedColumnSetTransformer(result, columnNames); final ModifiedColumnSet mcsParentColumn = result.newModifiedColumnSet(parentColumn); final ShiftAwareListener listener = - new BaseTable.ShiftAwareListenerImpl("Orphan Promoter", source, result) { - final Map parentToChildMap = new HashMap<>(); - - { - addChildren(source.getIndex()); - } - - private void addChildren(Index index) { - for (final Index.Iterator it = index.iterator(); it.hasNext();) { - final long key = it.nextLong(); - final Object parent = parentSource.get(key); - if (parent != null) { - parentToChildMap - .computeIfAbsent(parent, x -> new TLongHashSet()).add(key); - } + new BaseTable.ShiftAwareListenerImpl("Orphan Promoter", source, result) { + final Map parentToChildMap = new HashMap<>(); + + { + addChildren(source.getIndex()); } - } - - private void removeChildren(Index index) { - for (final Index.Iterator it = index.iterator(); it.hasNext();) { - final long key = it.nextLong(); - final Object oldParent = parentSource.getPrev(key); - if (oldParent != null) { - removeFromParent(oldParent, parentToChildMap.get(oldParent), - key); + + private void addChildren(Index index) { + for (final Index.Iterator it = index.iterator(); it.hasNext();) { + final long key = it.nextLong(); + final Object parent = parentSource.get(key); + if (parent != null) { + parentToChildMap.computeIfAbsent(parent, x -> new TLongHashSet()).add(key); + } } } - } - private void removeFromParent(final Object oldParent, - final TLongSet oldParentSet, final long keyToRemove) { - if (oldParentSet == null) { - throw new IllegalStateException( - "Could not find set for parent: " + oldParent); - } - if (!oldParentSet.remove(keyToRemove)) { - throw new IllegalStateException("key=" + keyToRemove - + " was not in parent=" + oldParent + " set=" + oldParentSet); + private void removeChildren(Index index) { + for (final Index.Iterator it = index.iterator(); it.hasNext();) { + final long key = it.nextLong(); + final Object oldParent = parentSource.getPrev(key); + if (oldParent != null) { + removeFromParent(oldParent, parentToChildMap.get(oldParent), key); + } + } } - } - - @Override - public void onUpdate(final Update upstream) { - final Update downstream = upstream.copy(); - downstream.modifiedColumnSet = result.modifiedColumnSet; - - final boolean modifiedInputColumns = - upstream.modifiedColumnSet.containsAny(inputColumns); - if (upstream.added.empty() && upstream.removed.empty() - && upstream.shifted.empty() && !modifiedInputColumns) { - mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); - result.notifyListeners(downstream); - return; + + private void removeFromParent(final Object oldParent, final TLongSet oldParentSet, + final long keyToRemove) { + if (oldParentSet == null) { + throw new IllegalStateException("Could not find set for parent: " + oldParent); + } + if (!oldParentSet.remove(keyToRemove)) { + throw new IllegalStateException("key=" + keyToRemove + " was not in parent=" + + oldParent + " set=" + oldParentSet); + } } - // Collect removed / added parent objects. - final Set removedIds = new HashSet<>(); - final Set addedIds = new HashSet<>(); + @Override + public void onUpdate(final Update upstream) { + final Update downstream = upstream.copy(); + downstream.modifiedColumnSet = result.modifiedColumnSet; + + final boolean modifiedInputColumns = + upstream.modifiedColumnSet.containsAny(inputColumns); + if (upstream.added.empty() && upstream.removed.empty() && upstream.shifted.empty() + && !modifiedInputColumns) { + mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, + downstream.modifiedColumnSet); + result.notifyListeners(downstream); + return; + } - upstream.removed.forAllLongs((final long v) -> { - final Object id = idSource.getPrev(v); - removedIds.add(id); - }); + // Collect removed / added parent objects. + final Set removedIds = new HashSet<>(); + final Set addedIds = new HashSet<>(); - upstream.added.forAllLongs((final long v) -> { - final Object id = idSource.get(v); - if (!removedIds.remove(id)) { - addedIds.add(id); - } - }); - - if (modifiedInputColumns) { - // account for any rows with modified ids - upstream.forAllModified((preIndex, postIndex) -> { - final Object prevId = idSource.getPrev(preIndex); - final Object id = idSource.get(postIndex); - if (!Objects.equals(id, prevId)) { - if (!addedIds.contains(prevId)) { - removedIds.add(prevId); - } - removedIds.remove(id); + upstream.removed.forAllLongs((final long v) -> { + final Object id = idSource.getPrev(v); + removedIds.add(id); + }); + + upstream.added.forAllLongs((final long v) -> { + final Object id = idSource.get(v); + if (!removedIds.remove(id)) { addedIds.add(id); } }); - } - // Process upstream changes and modify our state. - removeChildren(upstream.removed); - if (modifiedInputColumns) { - removeChildren(upstream.getModifiedPreShift()); - } + if (modifiedInputColumns) { + // account for any rows with modified ids + upstream.forAllModified((preIndex, postIndex) -> { + final Object prevId = idSource.getPrev(preIndex); + final Object id = idSource.get(postIndex); + if (!Objects.equals(id, prevId)) { + if (!addedIds.contains(prevId)) { + removedIds.add(prevId); + } + removedIds.remove(id); + addedIds.add(id); + } + }); + } - try (final Index prevIndex = source.getIndex().getPrevIndex()) { - prevIndex.remove(upstream.removed); + // Process upstream changes and modify our state. + removeChildren(upstream.removed); if (modifiedInputColumns) { - prevIndex.remove(upstream.getModifiedPreShift()); + removeChildren(upstream.getModifiedPreShift()); } - upstream.shifted.forAllInIndex(prevIndex, (key, shiftDelta) -> { - final Object oldParent = parentSource.getPrev(key); - final Object newParent = parentSource.get(key + shiftDelta); - if (oldParent != null && Objects.equals(oldParent, newParent)) { - final TLongSet set = parentToChildMap.get(oldParent); - removeFromParent(oldParent, set, key); - set.add(key + shiftDelta); - } else { - if (oldParent != null) { - removeFromParent(oldParent, - parentToChildMap.get(oldParent), key); - } - if (newParent != null) { - parentToChildMap - .computeIfAbsent(newParent, x -> new TLongHashSet()) - .add(key + shiftDelta); - } + try (final Index prevIndex = source.getIndex().getPrevIndex()) { + prevIndex.remove(upstream.removed); + if (modifiedInputColumns) { + prevIndex.remove(upstream.getModifiedPreShift()); } - }); - } - if (modifiedInputColumns) { - addChildren(upstream.modified); - } - addChildren(upstream.added); + upstream.shifted.forAllInIndex(prevIndex, (key, shiftDelta) -> { + final Object oldParent = parentSource.getPrev(key); + final Object newParent = parentSource.get(key + shiftDelta); + if (oldParent != null && Objects.equals(oldParent, newParent)) { + final TLongSet set = parentToChildMap.get(oldParent); + removeFromParent(oldParent, set, key); + set.add(key + shiftDelta); + } else { + if (oldParent != null) { + removeFromParent(oldParent, parentToChildMap.get(oldParent), key); + } + if (newParent != null) { + parentToChildMap.computeIfAbsent(newParent, x -> new TLongHashSet()) + .add(key + shiftDelta); + } + } + }); + } - final TLongList modifiedKeys = new TLongArrayList(); - Stream.concat(removedIds.stream(), addedIds.stream()) - .map(parentToChildMap::get).filter(Objects::nonNull) - .forEach(x -> x.forEach(value -> { - modifiedKeys.add(value); + if (modifiedInputColumns) { + addChildren(upstream.modified); + } + addChildren(upstream.added); + + final TLongList modifiedKeys = new TLongArrayList(); + Stream.concat(removedIds.stream(), addedIds.stream()).map(parentToChildMap::get) + .filter(Objects::nonNull).forEach(x -> x.forEach(value -> { + modifiedKeys.add(value); + return true; + })); + modifiedKeys.sort(); + + final Index.SequentialBuilder builder = Index.FACTORY.getSequentialBuilder(); + // TODO: Modify this such that we don't actually ever add the keys to the builder if + // they exist + // within added; this would be made easier/more efficient if Index.Iterator exposed the + // advance() operation. + modifiedKeys.forEach(x -> { + builder.appendKey(x); return true; - })); - modifiedKeys.sort(); - - final Index.SequentialBuilder builder = - Index.FACTORY.getSequentialBuilder(); - // TODO: Modify this such that we don't actually ever add the keys to - // the builder if they exist - // within added; this would be made easier/more efficient if - // Index.Iterator exposed the - // advance() operation. - modifiedKeys.forEach(x -> { - builder.appendKey(x); - return true; - }); - - downstream.modified.insert(builder.getIndex()); - downstream.modified.remove(upstream.added); - - if (downstream.modified.nonempty()) { - mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, - downstream.modifiedColumnSet); - if (!modifiedKeys.isEmpty()) { - downstream.modifiedColumnSet.setAll(mcsParentColumn); + }); + + downstream.modified.insert(builder.getIndex()); + downstream.modified.remove(upstream.added); + + if (downstream.modified.nonempty()) { + mcsTransformer.clearAndTransform(upstream.modifiedColumnSet, + downstream.modifiedColumnSet); + if (!modifiedKeys.isEmpty()) { + downstream.modifiedColumnSet.setAll(mcsParentColumn); + } + } else { + downstream.modifiedColumnSet.clear(); } - } else { - downstream.modifiedColumnSet.clear(); - } - result.notifyListeners(downstream); - } - }; + result.notifyListeners(downstream); + } + }; source.listenForUpdates(listener); } @@ -416,8 +405,7 @@ private boolean hasParent(long key) { private boolean hadParent(long key) { final Object parentKey = parentSource.getPrev(key); - return reverseLookupListener.getPrev(parentKey) != reverseLookupListener - .getNoEntryValue(); + return reverseLookupListener.getPrev(parentKey) != reverseLookupListener.getNoEntryValue(); } } @@ -436,8 +424,7 @@ public static Table promoteOrphans(Table table, String idColumn, String parentCo static ReverseLookup getReverseLookupListener(DynamicTable source, String idColumn) { // noinspection unchecked - Map> rllMap = - (Map>) source + Map> rllMap = (Map>) source .getAttribute(TREE_TABLE_FILTER_REVERSE_LOOKUP_ATTRIBUTE); if (rllMap == null) { rllMap = new HashMap<>(); @@ -449,8 +436,7 @@ static ReverseLookup getReverseLookupListener(DynamicTable source, String idColu return cachedRll; } - final ReverseLookupListener result = - ReverseLookupListener.makeReverseLookupListenerWithLock(source, idColumn); + final ReverseLookupListener result = ReverseLookupListener.makeReverseLookupListenerWithLock(source, idColumn); rllMap.put(idColumn, new WeakReference<>(result)); return result; diff --git a/DB/src/main/java/io/deephaven/db/v2/UncoalescedTable.java b/DB/src/main/java/io/deephaven/db/v2/UncoalescedTable.java index e900ad396fb..e43af639c6a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/UncoalescedTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/UncoalescedTable.java @@ -23,8 +23,8 @@ import java.util.Map; /** - * Abstract class for uncoalesced tables. These tables have deferred work that must be done before - * data can be operated on. + * Abstract class for uncoalesced tables. These tables have deferred work that must be done before data can be operated + * on. */ public abstract class UncoalescedTable extends BaseTable implements Table { @@ -32,16 +32,15 @@ public abstract class UncoalescedTable extends BaseTable implements Table { private volatile DynamicTable coalesced; - public UncoalescedTable(@NotNull final TableDefinition definition, - @NotNull final String description) { + public UncoalescedTable(@NotNull final TableDefinition definition, @NotNull final String description) { super(definition, description); } /** * Produce the actual coalesced result table, suitable for caching. *

    - * Note that if this table must have listeners registered, etc, setting these up is the - * implementation's responsibility. + * Note that if this table must have listeners registered, etc, setting these up is the implementation's + * responsibility. *

    * Also note that the implementation should copy attributes, as in * {@code copyAttributes(resultTable, CopyAttributeOperation.Coalesce)}. @@ -64,8 +63,8 @@ public final Table coalesce() { } /** - * Proactively set the coalesced result table. See {@link #doCoalesce()} for the caller's - * responsibilities. Note that it is an error to call this more than once with a non-null input. + * Proactively set the coalesced result table. See {@link #doCoalesce()} for the caller's responsibilities. Note + * that it is an error to call this more than once with a non-null input. * * @param coalesced The coalesced result table, suitable for caching */ @@ -109,8 +108,7 @@ public void removeUpdateListener(ShiftAwareListener listener) { ((DynamicTable) coalesce()).removeUpdateListener(listener); } - protected final void removeUpdateListenerUncoalesced( - @NotNull final ShiftAwareListener listener) { + protected final void removeUpdateListenerUncoalesced(@NotNull final ShiftAwareListener listener) { super.removeUpdateListener(listener); } @@ -171,7 +169,7 @@ public Table where(SelectFilter... filters) { @Override public Table whereIn(GroupStrategy groupStrategy, Table rightTable, boolean inclusion, - MatchPair... columnsToMatch) { + MatchPair... columnsToMatch) { return coalesce().whereIn(groupStrategy, rightTable, inclusion, columnsToMatch); } @@ -262,31 +260,29 @@ public Table exactJoin(Table table, MatchPair[] columnsToMatch, MatchPair[] colu @Override public Table aj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - AsOfMatchRule asOfMatchRule) { + AsOfMatchRule asOfMatchRule) { return coalesce().aj(rightTable, columnsToMatch, columnsToAdd, asOfMatchRule); } @Override public Table raj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - AsOfMatchRule asOfMatchRule) { + AsOfMatchRule asOfMatchRule) { return coalesce().raj(rightTable, columnsToMatch, columnsToAdd, asOfMatchRule); } @Override - public Table naturalJoin(Table rightTable, MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd) { + public Table naturalJoin(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd) { return coalesce().naturalJoin(rightTable, columnsToMatch, columnsToAdd); } @Override public Table join(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - int numRightBitsToReserve) { + int numRightBitsToReserve) { return coalesce().join(rightTable, columnsToMatch, columnsToAdd, numRightBitsToReserve); } @Override - public Table by(AggregationStateFactory aggregationStateFactory, - SelectColumn... groupByColumns) { + public Table by(AggregationStateFactory aggregationStateFactory, SelectColumn... groupByColumns) { return coalesce().by(aggregationStateFactory, groupByColumns); } @@ -301,8 +297,7 @@ public Table tailBy(long nRows, String... groupByColumns) { } @Override - public Table applyToAllBy(String formulaColumn, String columnParamName, - SelectColumn... groupByColumns) { + public Table applyToAllBy(String formulaColumn, String columnParamName, SelectColumn... groupByColumns) { return coalesce().applyToAllBy(formulaColumn, columnParamName, groupByColumns); } @@ -383,7 +378,7 @@ public TableMap byExternal(boolean dropKeys, String... keyColumnNames) { @Override public Table rollup(ComboAggregateFactory comboAggregateFactory, boolean includeConstituents, - SelectColumn... columns) { + SelectColumn... columns) { return coalesce().rollup(comboAggregateFactory, includeConstituents, columns); } @@ -408,8 +403,7 @@ public Table snapshot(Table baseTable, boolean doInitialSnapshot, String... stam } @Override - public Table snapshotIncremental(Table rightTable, boolean doInitialSnapshot, - String... stampColumns) { + public Table snapshotIncremental(Table rightTable, boolean doInitialSnapshot, String... stampColumns) { return coalesce().snapshotIncremental(rightTable, doInitialSnapshot, stampColumns); } diff --git a/DB/src/main/java/io/deephaven/db/v2/UpdatableTable.java b/DB/src/main/java/io/deephaven/db/v2/UpdatableTable.java index f3547eef51a..e81d062c4ae 100644 --- a/DB/src/main/java/io/deephaven/db/v2/UpdatableTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/UpdatableTable.java @@ -22,30 +22,27 @@ public class UpdatableTable extends QueryTable implements LiveTable { /** - * Interface provided to updater functions that allows index changes to be recorded for - * propagation. + * Interface provided to updater functions that allows index changes to be recorded for propagation. */ public interface IndexChangeRecorder { /** - * Flag key as an addition (or a modification if previously removed in this cycle). Must - * only be called in an updater function. + * Flag key as an addition (or a modification if previously removed in this cycle). Must only be called in an + * updater function. * * @param key The key */ void addIndex(long key); /** - * Flag key as a removal (if it wasn't added on this cycle). Must only be called in an - * updater function. + * Flag key as a removal (if it wasn't added on this cycle). Must only be called in an updater function. * * @param key The key */ void removeIndex(long key); /** - * Flag key as an modification (unless it was added this cycle). Must only be called in an - * updater function. + * Flag key as an modification (unless it was added this cycle). Must only be called in an updater function. * * @param key The key */ @@ -64,15 +61,15 @@ public interface Updater extends Consumer { private final IndexChangeRecorder indexChangeRecorder = new IndexChangeRecorderImpl(); private final TLongSet addedSet = - new TLongHashSet(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, NULL_LONG); + new TLongHashSet(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, NULL_LONG); private final TLongSet removedSet = - new TLongHashSet(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, NULL_LONG); + new TLongHashSet(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, NULL_LONG); private final TLongSet modifiedSet = - new TLongHashSet(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, NULL_LONG); + new TLongHashSet(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, NULL_LONG); public UpdatableTable(@NotNull final Index index, - @NotNull final Map nameToColumnSource, - @NotNull final Updater updater) { + @NotNull final Map nameToColumnSource, + @NotNull final Updater updater) { super(index, nameToColumnSource); this.updater = updater; } @@ -81,8 +78,7 @@ private class IndexChangeRecorderImpl implements IndexChangeRecorder { @Override public void addIndex(final long key) { - // if a key is removed and then added back before a refresh, it looks like it was - // modified + // if a key is removed and then added back before a refresh, it looks like it was modified if (removedSet.remove(key)) { modifiedSet.add(key); } else { diff --git a/DB/src/main/java/io/deephaven/db/v2/WouldMatchOperation.java b/DB/src/main/java/io/deephaven/db/v2/WouldMatchOperation.java index f6eb9ed9348..073478e9e21 100644 --- a/DB/src/main/java/io/deephaven/db/v2/WouldMatchOperation.java +++ b/DB/src/main/java/io/deephaven/db/v2/WouldMatchOperation.java @@ -26,10 +26,9 @@ import java.util.stream.Collectors; /** - * A table operation similar to {@link io.deephaven.db.tables.Table#where(String...)} except that - * instead of filtering the rows in the table, it appends new columns containing the result of the - * filter evaluation on each row of the table. It will re-evaluate cell values if any of the - * underlying filters are dynamic, and change. + * A table operation similar to {@link io.deephaven.db.tables.Table#where(String...)} except that instead of filtering + * the rows in the table, it appends new columns containing the result of the filter evaluation on each row of the + * table. It will re-evaluate cell values if any of the underlying filters are dynamic, and change. */ public class WouldMatchOperation implements QueryTable.MemoizableOperation { private static final ReadOnlyIndex EMPTY_INDEX = Index.FACTORY.getEmptyIndex(); @@ -65,13 +64,13 @@ SelectFilter getFilter() { final List parentColumns = parent.getDefinition().getColumnNames(); final List collidingColumns = matchColumns.stream() - .map(ColumnHolder::getColumnName) - .filter(parentColumns::contains) - .collect(Collectors.toList()); + .map(ColumnHolder::getColumnName) + .filter(parentColumns::contains) + .collect(Collectors.toList()); if (!collidingColumns.isEmpty()) { - throw new UncheckedTableException("The table already contains the following columns: " - + String.join(", ", collidingColumns)); + throw new UncheckedTableException( + "The table already contains the following columns: " + String.join(", ", collidingColumns)); } } @@ -91,32 +90,28 @@ public Result initialize(boolean usePrev, long beforeClock) { // TODO: Do I need a closer for getPrevIndex? Some ops have it.... try (final SafeCloseableList closer = new SafeCloseableList()) { - final Index fullIndex = - usePrev ? closer.add(parent.getIndex().getPrevIndex()) : parent.getIndex(); + final Index fullIndex = usePrev ? closer.add(parent.getIndex().getPrevIndex()) : parent.getIndex(); final Index indexToUse = closer.add(fullIndex.clone()); final List dependencies = new ArrayList<>(); - final Map newColumns = - new LinkedHashMap<>(parent.getColumnSourceMap()); + final Map newColumns = new LinkedHashMap<>(parent.getColumnSourceMap()); matchColumns.forEach(holder -> { final SelectFilter filter = holder.getFilter(); filter.init(parent.getDefinition()); final Index result = filter.filter(indexToUse, fullIndex, parent, usePrev); - holder.column = - new IndexWrapperColumnSource(holder.getColumnName(), parent, result, filter); + holder.column = new IndexWrapperColumnSource(holder.getColumnName(), parent, result, filter); if (newColumns.put(holder.getColumnName(), holder.column) != null) { // This should never happen or the check in the constructor has failed. - throw new UncheckedTableException("In match(), column " + holder.getColumnName() - + " already exists in the table."); + throw new UncheckedTableException( + "In match(), column " + holder.getColumnName() + " already exists in the table."); } // Accumulate dependencies if (filter instanceof NotificationQueue.Dependency) { dependencies.add((NotificationQueue.Dependency) filter); } else if (filter instanceof DependencyStreamProvider) { - ((DependencyStreamProvider) filter).getDependencyStream() - .forEach(dependencies::add); + ((DependencyStreamProvider) filter).getDependencyStream().forEach(dependencies::add); } if (filter.isRefreshing()) { @@ -126,8 +121,8 @@ public Result initialize(boolean usePrev, long beforeClock) { this.resultTable = new QueryTable(parent.getIndex(), newColumns); - transformer = parent.newModifiedColumnSetTransformer(resultTable, - parent.getDefinition().getColumnNamesArray()); + transformer = + parent.newModifiedColumnSetTransformer(resultTable, parent.getDefinition().getColumnNamesArray()); // Set up the column to be a listener for recomputes matchColumns.forEach(mc -> { @@ -141,20 +136,17 @@ public Result initialize(boolean usePrev, long beforeClock) { ShiftAwareListener eventualListener = null; MergedListener eventualMergedListener = null; if (parent.isRefreshing()) { - // If we're refreshing, our final listener needs to handle upstream updates from a - // recorder. + // If we're refreshing, our final listener needs to handle upstream updates from a recorder. final ListenerRecorder recorder = - new ListenerRecorder("where(" + makeDescription() + ")", parent, resultTable); + new ListenerRecorder("where(" + makeDescription() + ")", parent, resultTable); final Listener listener = new Listener(recorder, dependencies); recorder.setMergedListener(listener); eventualMergedListener = listener; eventualListener = recorder; } else if (anyRefreshing.isTrue()) { - // If not, then we still need to update if any of our filters request updates. We'll - // use the - // merge listener to handle that. Note that the filters themselves should set the - // table to + // If not, then we still need to update if any of our filters request updates. We'll use the + // merge listener to handle that. Note that the filters themselves should set the table to // refreshing. eventualMergedListener = new StaticListener(dependencies); } @@ -175,64 +167,61 @@ public MemoizedOperationKey getMemoizedOperationKey() { } /** - * A {@link MergedListener} implementation for - * {@link io.deephaven.db.tables.Table#wouldMatch(WouldMatchPair...)} when the parent table is - * ticking. + * A {@link MergedListener} implementation for {@link io.deephaven.db.tables.Table#wouldMatch(WouldMatchPair...)} + * when the parent table is ticking. */ private class Listener extends MergedListener { final ListenerRecorder recorder; Listener(@NotNull ListenerRecorder recorder, - @NotNull List dependencies) { + @NotNull List dependencies) { super(Collections.singletonList(recorder), - dependencies, - "merge(" + makeDescription() + ")", - resultTable); + dependencies, + "merge(" + makeDescription() + ")", + resultTable); this.recorder = recorder; } @Override protected void process() { final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update( - recorder.getAdded().clone(), - recorder.getRemoved().clone(), - recorder.getModified().clone(), - recorder.getShifted(), - resultTable.modifiedColumnSet); + recorder.getAdded().clone(), + recorder.getRemoved().clone(), + recorder.getModified().clone(), + recorder.getShifted(), + resultTable.modifiedColumnSet); - transformer.clearAndTransform(recorder.getModifiedColumnSet(), - downstream.modifiedColumnSet); + transformer.clearAndTransform(recorder.getModifiedColumnSet(), downstream.modifiedColumnSet); - // Propagate the updates to each column, inserting any additional modified rows - // post-shift that were produced + // Propagate the updates to each column, inserting any additional modified rows post-shift that were + // produced // by each column (ie. if a filter required a recompute matchColumns.stream() - .map(vc -> vc.column.update(recorder.getAdded(), - recorder.getRemoved(), - recorder.getModified(), - recorder.getModifiedPreShift(), - recorder.getShifted(), - recorder.getModifiedColumnSet(), - downstream.modifiedColumnSet, - parent)) - .filter(Objects::nonNull) - .forEach(downstream.modified::insert); + .map(vc -> vc.column.update(recorder.getAdded(), + recorder.getRemoved(), + recorder.getModified(), + recorder.getModifiedPreShift(), + recorder.getShifted(), + recorder.getModifiedColumnSet(), + downstream.modifiedColumnSet, + parent)) + .filter(Objects::nonNull) + .forEach(downstream.modified::insert); resultTable.notifyListeners(downstream); } } /** - * A {@link MergedListener} implementation for - * {@link io.deephaven.db.tables.Table#wouldMatch(WouldMatchPair...)} when * the parent table is - * static (not ticking). + * A {@link MergedListener} implementation for {@link io.deephaven.db.tables.Table#wouldMatch(WouldMatchPair...)} + * when * the parent table is static (not ticking). */ private class StaticListener extends MergedListener { StaticListener(@NotNull List dependencies) { super(Collections.emptyList(), - dependencies, - "wouldMatch(" + makeDescription() + ")", - resultTable); + dependencies, + "wouldMatch(" + makeDescription() + ")", + resultTable); } @Override @@ -242,10 +231,10 @@ protected void process() { if (holder.column.recomputeRequested()) { if (downstream == null) { downstream = new ShiftAwareListener.Update(Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex(), - IndexShiftData.EMPTY, - resultTable.modifiedColumnSet); + Index.FACTORY.getEmptyIndex(), + Index.FACTORY.getEmptyIndex(), + IndexShiftData.EMPTY, + resultTable.modifiedColumnSet); } downstream.modifiedColumnSet.setAll(holder.getColumnName()); @@ -272,7 +261,7 @@ private String makeDescription() { } private static class IndexWrapperColumnSource extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForBoolean, SelectFilter.RecomputeListener { + implements MutableColumnSourceGetDefaults.ForBoolean, SelectFilter.RecomputeListener { private Index source; private final SelectFilter filter; private boolean doRecompute = false; @@ -281,14 +270,13 @@ private static class IndexWrapperColumnSource extends AbstractColumnSource destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { try (final SafeCloseableList closer = new SafeCloseableList()) { final Index keysToCheck = closer.add(orderedKeys.asIndex()); final Index intersection = closer.add(keysToCheck.intersect(source)); @@ -314,19 +301,16 @@ public void fillChunk(@NotNull FillContext context, @Override public void fillPrevChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { try (final SafeCloseableList closer = new SafeCloseableList()) { final Index keysToCheck = closer.add(orderedKeys.asIndex()); - final Index intersection = - closer.add(keysToCheck.getPrevIndex().intersect(source.getPrevIndex())); + final Index intersection = closer.add(keysToCheck.getPrevIndex().intersect(source.getPrevIndex())); fillChunkInternal(keysToCheck, intersection, orderedKeys.intSize(), destination); } } /** - * Fill a chunk by walking the set of requested keys and the intersection of these keys - * together. + * Fill a chunk by walking the set of requested keys and the intersection of these keys together. * * @param keysToCheck the requested set of keys for the chunk * @param intersection the intersection of keys to check with the current source @@ -334,9 +318,9 @@ public void fillPrevChunk(@NotNull FillContext context, * @param destination the destination chunk */ private void fillChunkInternal(Index keysToCheck, Index intersection, int orderedKeysSize, - @NotNull WritableChunk destination) { + @NotNull WritableChunk destination) { final WritableObjectChunk writeable = - destination.asWritableObjectChunk(); + destination.asWritableObjectChunk(); if (intersection.empty()) { writeable.fillWithValue(0, orderedKeysSize, false); return; @@ -365,8 +349,7 @@ private void fillChunkInternal(Index keysToCheck, Index intersection, int ordere } } - // If this is true, we've bailed out of the lockstep iteration because there are no more - // intersections, + // If this is true, we've bailed out of the lockstep iteration because there are no more intersections, // so we can fill the rest with false. if (keysIterator.hasNext()) { writeable.fillWithValue(chunkIndex, orderedKeysSize - chunkIndex, false); @@ -407,9 +390,8 @@ public void setIsRefreshing(boolean refreshing) { } /** - * Update the internal index with the upstream - * {@link io.deephaven.db.v2.ShiftAwareListener.Update}. If the column was recomputed, - * return an optional containing rows that were modified. + * Update the internal index with the upstream {@link io.deephaven.db.v2.ShiftAwareListener.Update}. If the + * column was recomputed, return an optional containing rows that were modified. * * @param added the set of added rows in the update * @param removed the set of removed rows in the update @@ -423,18 +405,15 @@ public void setIsRefreshing(boolean refreshing) { * @return an Optional containing rows modified to add to the downstream update */ @Nullable - private Index update(Index added, Index removed, Index modified, Index modPreShift, - IndexShiftData shift, - ModifiedColumnSet upstreamModified, ModifiedColumnSet downstreamModified, - QueryTable table) { - final boolean affected = - upstreamModified != null && upstreamModified.containsAny(possibleUpstreamModified); + private Index update(Index added, Index removed, Index modified, Index modPreShift, IndexShiftData shift, + ModifiedColumnSet upstreamModified, ModifiedColumnSet downstreamModified, + QueryTable table) { + final boolean affected = upstreamModified != null && upstreamModified.containsAny(possibleUpstreamModified); // Remove the removed keys, and pre-shift modifieds source.remove(removed); - // If this column is affected, removed the modifieds pre-shift. We will refilter and add - // back + // If this column is affected, removed the modifieds pre-shift. We will refilter and add back // ones that match. If not, just leave them, the shift will preserve them. if (affected) { source.remove(modPreShift); @@ -469,14 +448,13 @@ private Index update(Index added, Index removed, Index modified, Index modPreShi private Index recompute(QueryTable table, ReadOnlyIndex upstreamAdded) { doRecompute = false; - final Index refiltered = - filter.filter(table.getIndex().clone(), table.getIndex(), table, false); + final Index refiltered = filter.filter(table.getIndex().clone(), table.getIndex(), table, false); // This is just Xor, but there is no Index op for that final Index newlySet = refiltered.minus(source); final Index justCleared = source.minus(refiltered); final Index rowsChanged = newlySet.union(justCleared) - .minus(upstreamAdded); + .minus(upstreamAdded); source.update(newlySet, justCleared); diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AbsSumStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/AbsSumStateFactory.java index 7c23a1a31da..1c43d76d098 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AbsSumStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AbsSumStateFactory.java @@ -13,7 +13,7 @@ public AbsSumStateFactory() {} @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { return getAbsSumChunked(type, name); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AddOnlyFirstOrLastChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/AddOnlyFirstOrLastChunkedOperator.java index 20ea5fa4e1c..e19d0870baa 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AddOnlyFirstOrLastChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AddOnlyFirstOrLastChunkedOperator.java @@ -15,15 +15,15 @@ public class AddOnlyFirstOrLastChunkedOperator extends BaseAddOnlyFirstOrLastChunkedOperator { AddOnlyFirstOrLastChunkedOperator(boolean isFirst, MatchPair[] resultPairs, Table originalTable, - String exposeRedirectionAs) { + String exposeRedirectionAs) { super(isFirst, resultPairs, originalTable, exposeRedirectionAs); } @Override public void addChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); @@ -34,14 +34,12 @@ public void addChunk(BucketedContext bucketedContext, Chunk va } @Override - public boolean addChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return addChunk(inputIndices, 0, inputIndices.size(), destination); } - private boolean addChunk(LongChunk indices, int start, int length, - long destination) { + private boolean addChunk(LongChunk indices, int start, int length, long destination) { if (length == 0) { return false; } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AddOnlyMinMaxByStateFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/by/AddOnlyMinMaxByStateFactoryImpl.java index a4b8f4c0a7d..7d014dbf240 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AddOnlyMinMaxByStateFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AddOnlyMinMaxByStateFactoryImpl.java @@ -5,9 +5,8 @@ package io.deephaven.db.v2.by; /** - * If you've got a table that is grow only, this will do a min/max calculation without requiring any - * state. The limitation is that if you modify or remove a row it will throw an - * UnsupportedOperationException. + * If you've got a table that is grow only, this will do a min/max calculation without requiring any state. The + * limitation is that if you modify or remove a row it will throw an UnsupportedOperationException. */ public class AddOnlyMinMaxByStateFactoryImpl extends MinMaxByStateFactoryImpl { public AddOnlyMinMaxByStateFactoryImpl(boolean minimum) { diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AggType.java b/DB/src/main/java/io/deephaven/db/v2/by/AggType.java index 023c001c75c..32cb436d0b4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AggType.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AggType.java @@ -32,8 +32,8 @@ public enum AggType { /** Collect the distinct items from the column */ Distinct, /** - * Display the singular value from the column if it is unique, or a default value if none are - * present, or it is not unique + * Display the singular value from the column if it is unique, or a default value if none are present, or it is not + * unique */ Unique, /** Only valid in a TotalsTableBuilder to indicate we should not perform any aggregation. */ diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AggregationContext.java b/DB/src/main/java/io/deephaven/db/v2/by/AggregationContext.java index 4f9953a5971..6467b1a2ae4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AggregationContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AggregationContext.java @@ -25,8 +25,8 @@ /** * Encapsulates the operators and inputs for an aggregation operation. *

    - * Provides utility methods for the ChunkedOperationAggregationHelper to manipulate and interrogate - * the operators, inputs and outputs. + * Provides utility methods for the ChunkedOperationAggregationHelper to manipulate and interrogate the operators, + * inputs and outputs. */ class AggregationContext { /** @@ -35,8 +35,7 @@ class AggregationContext { final IterativeChunkedAggregationOperator[] operators; /** - * Our input columns (currently one per operator, but that will change to support zero or - * multiple input operators). + * Our input columns (currently one per operator, but that will change to support zero or multiple input operators). */ final ChunkSource.WithPrev[] inputColumns; @@ -46,8 +45,7 @@ class AggregationContext { final String[][] inputNames; /** - * Does any operator require indices? See - * {@link IterativeChunkedAggregationOperator#requiresIndices()}. + * Does any operator require indices? See {@link IterativeChunkedAggregationOperator#requiresIndices()}. */ private final boolean requiresIndices; @@ -74,39 +72,36 @@ class AggregationContext { private final AggregationContextTransformer[] transformers; /** - * For a given operator index, the first slot that the column exists in. If the value in index - * oi is equal to oi, then we will read from this column. If the value is not equal to oi - * (because it is -1 for a null column or a value less than oi), we will reuse an already read - * value. + * For a given operator index, the first slot that the column exists in. If the value in index oi is equal to oi, + * then we will read from this column. If the value is not equal to oi (because it is -1 for a null column or a + * value less than oi), we will reuse an already read value. */ private final int[] inputSlots; AggregationContext(IterativeChunkedAggregationOperator[] operators, String[][] inputNames, - ChunkSource.WithPrev[] inputColumns) { + ChunkSource.WithPrev[] inputColumns) { this(operators, inputNames, inputColumns, true); } AggregationContext(IterativeChunkedAggregationOperator[] operators, String[][] inputNames, - ChunkSource.WithPrev[] inputColumns, boolean addedBackModified) { + ChunkSource.WithPrev[] inputColumns, boolean addedBackModified) { this(operators, inputNames, inputColumns, null, true); } AggregationContext(IterativeChunkedAggregationOperator[] operators, String[][] inputNames, - ChunkSource.WithPrev[] inputColumns, AggregationContextTransformer[] transformers, - boolean addedBackModified) { + ChunkSource.WithPrev[] inputColumns, AggregationContextTransformer[] transformers, + boolean addedBackModified) { this.operators = operators; this.inputNames = inputNames; this.inputColumns = inputColumns; this.transformers = transformers; this.addedBackModified = addedBackModified; - requiresIndices = Arrays.stream(this.operators) - .anyMatch(IterativeChunkedAggregationOperator::requiresIndices); + requiresIndices = Arrays.stream(this.operators).anyMatch(IterativeChunkedAggregationOperator::requiresIndices); requiresInputs = Arrays.stream(this.inputColumns).anyMatch(Objects::nonNull); - unchunkedIndices = Arrays.stream(this.operators) - .allMatch(IterativeChunkedAggregationOperator::unchunkedIndex); + unchunkedIndices = Arrays.stream(this.operators).allMatch(IterativeChunkedAggregationOperator::unchunkedIndex); // noinspection unchecked - resultColumns = merge(Arrays.stream(this.operators) - .map(IterativeChunkedAggregationOperator::getResultColumns).toArray(Map[]::new)); + resultColumns = merge(Arrays.stream(this.operators).map(IterativeChunkedAggregationOperator::getResultColumns) + .toArray(Map[]::new)); this.inputSlots = new int[inputColumns.length]; for (int currentSlot = 0; currentSlot < inputSlots.length; ++currentSlot) { @@ -128,8 +123,7 @@ class AggregationContext { } } - private static Map> merge( - Map>[] operatorResultColumns) { + private static Map> merge(Map>[] operatorResultColumns) { final Map> mergedResult = new LinkedHashMap<>(); for (final Map> operatorColumns : operatorResultColumns) { for (final Map.Entry> entry : operatorColumns.entrySet()) { @@ -183,9 +177,8 @@ void startTrackingPrevValues() { } /** - * The helper passes in the result column source map, which contains the key columns if any. The - * context is responsible for filling in the columns generated by the operators or - * transformations. + * The helper passes in the result column source map, which contains the key columns if any. The context is + * responsible for filling in the columns generated by the operators or transformations. * * @param keyColumns the keyColumns as input, the result column source map as output. */ @@ -216,8 +209,7 @@ QueryTable transformResult(QueryTable table) { * @return an array, parallel to operators, of the input column sets for each operator */ ModifiedColumnSet[] getInputModifiedColumnSets(QueryTable input) { - final ModifiedColumnSet[] inputModifiedColumnSet = - new ModifiedColumnSet[inputColumns.length]; + final ModifiedColumnSet[] inputModifiedColumnSet = new ModifiedColumnSet[inputColumns.length]; for (int ii = 0; ii < inputColumns.length; ++ii) { inputModifiedColumnSet[ii] = input.newModifiedColumnSet(inputNames[ii]); } @@ -225,8 +217,8 @@ ModifiedColumnSet[] getInputModifiedColumnSets(QueryTable input) { } /** - * Allow all operators to perform any internal state keeping needed for destinations that were - * added during initialization. + * Allow all operators to perform any internal state keeping needed for destinations that were added during + * initialization. * * @param resultTable The result {@link QueryTable} after initialization */ @@ -237,32 +229,30 @@ void propagateInitialStateToOperators(@NotNull final QueryTable resultTable) { } /** - * Initialize refreshing result support for all operators. As a side effect, get an array of - * factories to produce result modified column sets for each operator from the upstream modified - * column set. Each factory is used in turn when its operator reports a modification, in order - * to produce a final result modified column set. + * Initialize refreshing result support for all operators. As a side effect, get an array of factories to produce + * result modified column sets for each operator from the upstream modified column set. Each factory is used in turn + * when its operator reports a modification, in order to produce a final result modified column set. * * @param resultTable The result table - * @param aggregationUpdateListener The aggregation update listener, which may be needed for - * referential integrity - * @return An array, parallel to operators, of factories that produce a result modified column - * set from the upstream modified column set + * @param aggregationUpdateListener The aggregation update listener, which may be needed for referential integrity + * @return An array, parallel to operators, of factories that produce a result modified column set from the upstream + * modified column set */ UnaryOperator[] initializeRefreshing(@NotNull final QueryTable resultTable, - @NotNull final LivenessReferent aggregationUpdateListener) { + @NotNull final LivenessReferent aggregationUpdateListener) { // noinspection unchecked final UnaryOperator[] resultModifiedColumnSetFactories = - new UnaryOperator[inputColumns.length]; + new UnaryOperator[inputColumns.length]; for (int ii = 0; ii < inputColumns.length; ++ii) { resultModifiedColumnSetFactories[ii] = - operators[ii].initializeRefreshing(resultTable, aggregationUpdateListener); + operators[ii].initializeRefreshing(resultTable, aggregationUpdateListener); } return resultModifiedColumnSetFactories; } /** - * Allow all operators to reset any per-step internal state. Note that the arguments to this - * method should not be mutated in any way. + * Allow all operators to reset any per-step internal state. Note that the arguments to this method should not be + * mutated in any way. * * @param upstream The upstream {@link ShiftAwareListener.Update} */ @@ -273,17 +263,16 @@ void resetOperatorsForStep(@NotNull final ShiftAwareListener.Update upstream) { } /** - * Allow all operators to perform any internal state keeping needed for destinations that were - * added (went from 0 keys to > 0), removed (went from > 0 keys to 0), or modified (keys - * added or removed, or keys modified) by this iteration. Note that the arguments to this method - * should not be mutated in any way. + * Allow all operators to perform any internal state keeping needed for destinations that were added (went from 0 + * keys to > 0), removed (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by + * this iteration. Note that the arguments to this method should not be mutated in any way. * - * @param downstream The downstream {@link ShiftAwareListener.Update} (which does not - * have its {@link ModifiedColumnSet} finalized yet) + * @param downstream The downstream {@link ShiftAwareListener.Update} (which does not have its + * {@link ModifiedColumnSet} finalized yet) * @param newDestinations New destinations added on this update */ void propagateChangesToOperators(@NotNull final ShiftAwareListener.Update downstream, - @NotNull final ReadOnlyIndex newDestinations) { + @NotNull final ReadOnlyIndex newDestinations) { for (final IterativeChunkedAggregationOperator operator : operators) { operator.propagateUpdates(downstream, newDestinations); } @@ -296,7 +285,7 @@ void propagateChangesToOperators(@NotNull final ShiftAwareListener.Update downst * @param sourceEntry The {@link UpdatePerformanceTracker.Entry} for the failed listener */ void propagateFailureToOperators(@NotNull final Throwable originalException, - @NotNull final UpdatePerformanceTracker.Entry sourceEntry) { + @NotNull final UpdatePerformanceTracker.Entry sourceEntry) { for (final IterativeChunkedAggregationOperator operator : operators) { operator.propagateFailure(originalException, sourceEntry); } @@ -309,8 +298,7 @@ void propagateFailureToOperators(@NotNull final Throwable originalException, * @param getContexts the array to initialize with getContexts * @param maxSize maximum size to allocate */ - void initializeGetContexts(SharedContext sharedContext, ChunkSource.GetContext[] getContexts, - long maxSize) { + void initializeGetContexts(SharedContext sharedContext, ChunkSource.GetContext[] getContexts, long maxSize) { final int chunkSize = ChunkedOperatorAggregationHelper.chunkSize(maxSize); for (int oi = 0; oi < size(); ++oi) { if (inputSlot(oi) != oi) { @@ -328,8 +316,8 @@ void initializeGetContexts(SharedContext sharedContext, ChunkSource.GetContext[] * @param maxSize maximum size to allocate * @param mask only initialize getContexts[i] if mask[i] is true */ - void initializeGetContexts(SharedContext sharedContext, ChunkSource.GetContext[] getContexts, - long maxSize, boolean[] mask) { + void initializeGetContexts(SharedContext sharedContext, ChunkSource.GetContext[] getContexts, long maxSize, + boolean[] mask) { final int chunkSize = ChunkedOperatorAggregationHelper.chunkSize(maxSize); for (int oi = 0; oi < size(); ++oi) { if (!mask[oi]) @@ -337,8 +325,7 @@ void initializeGetContexts(SharedContext sharedContext, ChunkSource.GetContext[] final int inputSlot = inputSlot(oi); if (inputSlot < 0 || getContexts[inputSlot] != null) continue; - getContexts[inputSlot] = - inputColumns[inputSlot].makeGetContext(chunkSize, sharedContext); + getContexts[inputSlot] = inputColumns[inputSlot].makeGetContext(chunkSize, sharedContext); } } @@ -347,8 +334,7 @@ void initializeGetContexts(SharedContext sharedContext, ChunkSource.GetContext[] * * @param contexts the array to initialize with getContexts */ - void initializeSingletonContexts( - IterativeChunkedAggregationOperator.SingletonContext[] contexts, long maxSize) { + void initializeSingletonContexts(IterativeChunkedAggregationOperator.SingletonContext[] contexts, long maxSize) { final int chunkSize = ChunkedOperatorAggregationHelper.chunkSize(maxSize); for (int oi = 0; oi < size(); ++oi) { contexts[oi] = operators[oi].makeSingletonContext(chunkSize); @@ -361,9 +347,8 @@ void initializeSingletonContexts( * @param contexts the array to initialize with getContexts * @param mask the columns to initialize */ - private void initializeSingletonContexts( - IterativeChunkedAggregationOperator.SingletonContext[] contexts, long maxSize, - boolean[] mask) { + private void initializeSingletonContexts(IterativeChunkedAggregationOperator.SingletonContext[] contexts, + long maxSize, boolean[] mask) { final int chunkSize = ChunkedOperatorAggregationHelper.chunkSize(maxSize); for (int oi = 0; oi < size(); ++oi) { if (mask[oi]) { @@ -375,23 +360,20 @@ private void initializeSingletonContexts( /** * Initialize an array of singleton contexts based on an upstream update. */ - void initializeSingletonContexts( - IterativeChunkedAggregationOperator.SingletonContext[] opContexts, - ShiftAwareListener.Update upstream, boolean[] modifiedColumns) { - final long maxSize = - UpdateSizeCalculator.chunkSize(upstream, ChunkedOperatorAggregationHelper.CHUNK_SIZE); + void initializeSingletonContexts(IterativeChunkedAggregationOperator.SingletonContext[] opContexts, + ShiftAwareListener.Update upstream, boolean[] modifiedColumns) { + final long maxSize = UpdateSizeCalculator.chunkSize(upstream, ChunkedOperatorAggregationHelper.CHUNK_SIZE); if (upstream.removed.nonempty() || upstream.added.nonempty()) { initializeSingletonContexts(opContexts, maxSize); return; } - final boolean[] toInitialize = - computeInitializationMaskFromUpdate(upstream, modifiedColumns); + final boolean[] toInitialize = computeInitializationMaskFromUpdate(upstream, modifiedColumns); initializeSingletonContexts(opContexts, maxSize, toInitialize); } private boolean[] computeInitializationMaskFromUpdate(ShiftAwareListener.Update upstream, - boolean[] modifiedColumns) { + boolean[] modifiedColumns) { final boolean[] toInitialize = new boolean[size()]; if (requiresIndices() && upstream.shifted.nonempty()) { for (int ii = 0; ii < size(); ++ii) { @@ -418,8 +400,7 @@ private boolean[] computeInitializationMaskFromUpdate(ShiftAwareListener.Update * * @param contexts the array to initialize with getContexts */ - void initializeBucketedContexts(IterativeChunkedAggregationOperator.BucketedContext[] contexts, - long maxSize) { + void initializeBucketedContexts(IterativeChunkedAggregationOperator.BucketedContext[] contexts, long maxSize) { final int chunkSize = ChunkedOperatorAggregationHelper.chunkSize(maxSize); for (int oi = 0; oi < size(); ++oi) { contexts[oi] = operators[oi].makeBucketedContext(chunkSize); @@ -430,21 +411,18 @@ void initializeBucketedContexts(IterativeChunkedAggregationOperator.BucketedCont * Initialize an array of singleton contexts based on an upstream update. */ void initializeBucketedContexts(IterativeChunkedAggregationOperator.BucketedContext[] contexts, - ShiftAwareListener.Update upstream, boolean keysModified, boolean[] modifiedColumns) { - final long maxSize = - UpdateSizeCalculator.chunkSize(upstream, ChunkedOperatorAggregationHelper.CHUNK_SIZE); + ShiftAwareListener.Update upstream, boolean keysModified, boolean[] modifiedColumns) { + final long maxSize = UpdateSizeCalculator.chunkSize(upstream, ChunkedOperatorAggregationHelper.CHUNK_SIZE); if (upstream.added.nonempty() || upstream.removed.nonempty() || keysModified) { initializeBucketedContexts(contexts, maxSize); return; } - final boolean[] toInitialize = - computeInitializationMaskFromUpdate(upstream, modifiedColumns); + final boolean[] toInitialize = computeInitializationMaskFromUpdate(upstream, modifiedColumns); initializeBucketedContexts(contexts, maxSize, toInitialize); } - private void initializeBucketedContexts( - IterativeChunkedAggregationOperator.BucketedContext[] contexts, long maxSize, - boolean[] mask) { + private void initializeBucketedContexts(IterativeChunkedAggregationOperator.BucketedContext[] contexts, + long maxSize, boolean[] mask) { final int chunkSize = ChunkedOperatorAggregationHelper.chunkSize(maxSize); for (int oi = 0; oi < size(); ++oi) { if (mask[oi]) { @@ -474,16 +452,14 @@ PermuteKernel[] makePermuteKernels() { final PermuteKernel[] permuteKernels = new PermuteKernel[size()]; for (int oi = 0; oi < size(); ++oi) { if (inputSlot(oi) == oi) { - permuteKernels[oi] = - PermuteKernel.makePermuteKernel(inputColumns[oi].getChunkType()); + permuteKernels[oi] = PermuteKernel.makePermuteKernel(inputColumns[oi].getChunkType()); } } return permuteKernels; } /** - * Returns true if slots that are removed and then reincarnated on the same cycle should be - * marked as modified. + * Returns true if slots that are removed and then reincarnated on the same cycle should be marked as modified. */ boolean addedBackModified() { return addedBackModified; diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AggregationContextFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/AggregationContextFactory.java index 08fcf4818d2..2277822d865 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AggregationContextFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AggregationContextFactory.java @@ -4,18 +4,17 @@ import org.jetbrains.annotations.NotNull; /** - * Produces an AggregationContext for aggregations given a table and the names of the group by - * columns. + * Produces an AggregationContext for aggregations given a table and the names of the group by columns. */ public interface AggregationContextFactory { /** - * Should we allow substitution with a {@link KeyOnlyAggregationFactory} (e.g. selectDistinct) - * when there are only key columns? Instances whose operators could have side effects or are - * already {@link KeyOnlyAggregationFactory} should return false. + * Should we allow substitution with a {@link KeyOnlyAggregationFactory} (e.g. selectDistinct) when there are only + * key columns? Instances whose operators could have side effects or are already {@link KeyOnlyAggregationFactory} + * should return false. * - * @return Whether to allow a {@link KeyOnlyAggregationFactory} to be substituted for this when - * there are only key columns + * @return Whether to allow a {@link KeyOnlyAggregationFactory} to be substituted for this when there are only key + * columns */ default boolean allowKeyOnlySubstitution() { return false; @@ -28,6 +27,5 @@ default boolean allowKeyOnlySubstitution() { * @param groupByColumns The key column names * @return A new or safely reusable {@link AggregationContext} */ - AggregationContext makeAggregationContext(@NotNull Table table, - @NotNull String... groupByColumns); + AggregationContext makeAggregationContext(@NotNull Table table, @NotNull String... groupByColumns); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AggregationContextTransformer.java b/DB/src/main/java/io/deephaven/db/v2/by/AggregationContextTransformer.java index cd6e08a8880..09f5a233e3f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AggregationContextTransformer.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AggregationContextTransformer.java @@ -12,18 +12,16 @@ public interface AggregationContextTransformer { AggregationContextTransformer[] ZERO_LENGTH_AGGREGATION_CONTEXT_TRANSFORMER_ARRAY = - new AggregationContextTransformer[0]; + new AggregationContextTransformer[0]; /** - * After we have created the key columns, and the default result columns, allow each transformer - * to add additional columns to the result set that are not handled by the regular modified - * column set transformer, etc. logic. + * After we have created the key columns, and the default result columns, allow each transformer to add additional + * columns to the result set that are not handled by the regular modified column set transformer, etc. logic. */ default void resultColumnFixup(Map> resultColumns) {} /** - * Before we return the result, each transformer has a chance to replace it or change it as it - * sees fit. + * Before we return the result, each transformer has a chance to replace it or change it as it sees fit. * * Practically this is used to change the attributes for rollups. */ @@ -34,8 +32,7 @@ default QueryTable transformResult(QueryTable table) { /** * The helper calls the transformer with a suitable reverse lookup function for this table. * - * @param reverseLookup a function that translates an object to an integer position in our - * output. + * @param reverseLookup a function that translates an object to an integer position in our output. */ default void setReverseLookupFunction(ToIntFunction reverseLookup) {} } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AggregationControl.java b/DB/src/main/java/io/deephaven/db/v2/by/AggregationControl.java index 8c8a0fbb90c..cac8613fae4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AggregationControl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AggregationControl.java @@ -6,8 +6,8 @@ import org.jetbrains.annotations.NotNull; /** - * Stateless "control" class for giving external code (e.g. unit tests) knobs to turn w.r.t. to how - * aggregations should be processed. + * Stateless "control" class for giving external code (e.g. unit tests) knobs to turn w.r.t. to how aggregations should + * be processed. */ @VisibleForTesting public class AggregationControl { @@ -33,8 +33,7 @@ public double getMaximumLoadFactor() { return IncrementalChunkedByAggregationStateManager.DEFAULT_MAX_LOAD_FACTOR; } - public boolean considerGrouping(@NotNull final Table inputTable, - @NotNull final ColumnSource[] sources) { + public boolean considerGrouping(@NotNull final Table inputTable, @NotNull final ColumnSource[] sources) { return !inputTable.isLive() && sources.length == 1; } @@ -42,10 +41,9 @@ public boolean shouldProbeShift(final long shiftSize, final int numStates) { return shiftSize <= numStates * 2; } - // boolean considerSymbolTables(@NotNull final Table inputTable, final boolean useGrouping, - // @NotNull final ColumnSource[] sources) { - // return !inputTable.isLive() && !useGrouping && sources.length == 1 && sources[0] instanceof - // SymbolTableSource; + // boolean considerSymbolTables(@NotNull final Table inputTable, final boolean useGrouping, @NotNull final + // ColumnSource[] sources) { + // return !inputTable.isLive() && !useGrouping && sources.length == 1 && sources[0] instanceof SymbolTableSource; // } // // boolean useSymbolTableLookupCaching() { @@ -56,10 +54,9 @@ public boolean shouldProbeShift(final long shiftSize, final int numStates) { // return symbolTableSize <= inputTableSize / 2; // } // - // boolean useUniqueTable(final boolean uniqueValues, final long maximumUniqueValue, final long - // minimumUniqueValue) { - // // We want to have one left over value for "no good" (Integer.MAX_VALUE), and then we need - // another value to + // boolean useUniqueTable(final boolean uniqueValues, final long maximumUniqueValue, final long minimumUniqueValue) + // { + // // We want to have one left over value for "no good" (Integer.MAX_VALUE), and then we need another value to // // represent that (max - min + 1) is the number of slots required. // return uniqueValues && (maximumUniqueValue - minimumUniqueValue) < (Integer.MAX_VALUE - 2); // } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AggregationFormulaStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/AggregationFormulaStateFactory.java index 540406edc6d..74c5b504f14 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AggregationFormulaStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AggregationFormulaStateFactory.java @@ -41,7 +41,7 @@ public boolean equals(Object o) { return false; final MemoKey memoKey = (MemoKey) o; return Objects.equals(formula, memoKey.formula) && - Objects.equals(columnParamName, memoKey.columnParamName); + Objects.equals(columnParamName, memoKey.columnParamName); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AggregationHelper.java b/DB/src/main/java/io/deephaven/db/v2/by/AggregationHelper.java index ef9ac0f1671..327dcf1ec95 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AggregationHelper.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AggregationHelper.java @@ -26,8 +26,7 @@ import static io.deephaven.datastructures.util.CollectionUtil.ZERO_LENGTH_STRING_ARRAY; /** - * Implementation for chunk-oriented aggregation operations, including {@link Table#by} and - * {@link Table#byExternal}. + * Implementation for chunk-oriented aggregation operations, including {@link Table#by} and {@link Table#byExternal}. */ @SuppressWarnings("rawtypes") public class AggregationHelper { @@ -38,14 +37,14 @@ public class AggregationHelper { private AggregationHelper() {} public static QueryTable by(@NotNull final QueryTable inputTable, - @NotNull final SelectColumn... keyColumns) { + @NotNull final SelectColumn... keyColumns) { return by(AggregationControl.DEFAULT, inputTable, keyColumns); } @VisibleForTesting public static QueryTable by(@NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final SelectColumn... keyColumns) { + @NotNull final QueryTable inputTable, + @NotNull final SelectColumn... keyColumns) { // If we have no key columns aggregate all columns with no hashing if (keyColumns.length == 0) { return noKeyBy(inputTable); @@ -58,10 +57,8 @@ public static QueryTable by(@NotNull final AggregationControl aggregationControl final String[] aggregatedColumnNames; final ColumnSource[] keyColumnSources; { - final Map keyColumnSourceMap = - new LinkedHashMap<>(keyColumns.length); - final Map fullColumnSourceMap = - new LinkedHashMap<>(existingColumnSourceMap); + final Map keyColumnSourceMap = new LinkedHashMap<>(keyColumns.length); + final Map fullColumnSourceMap = new LinkedHashMap<>(existingColumnSourceMap); Arrays.stream(keyColumns).forEachOrdered((final SelectColumn keyColumn) -> { keyColumn.initInputs(inputTable.getIndex(), fullColumnSourceMap); @@ -79,185 +76,166 @@ public static QueryTable by(@NotNull final AggregationControl aggregationControl }); keyColumnNames = keyColumnSourceMap.keySet().toArray(ZERO_LENGTH_STRING_ARRAY); aggregatedColumnNames = existingColumnSourceMap.keySet().stream() - .filter(columnSource -> !keyColumnSourceMap.containsKey(columnSource)) - .toArray(String[]::new); - keyColumnSources = - keyColumnSourceMap.values().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + .filter(columnSource -> !keyColumnSourceMap.containsKey(columnSource)).toArray(String[]::new); + keyColumnSources = keyColumnSourceMap.values().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); } // If we can use an existing static grouping, convert that to a table final Map groupingForAggregation = - maybeGetGroupingForAggregation(aggregationControl, inputTable, keyColumnSources); + maybeGetGroupingForAggregation(aggregationControl, inputTable, keyColumnSources); if (groupingForAggregation != null) { return staticGroupedBy(existingColumnSourceMap, keyColumnNames[0], keyColumnSources[0], - groupingForAggregation); + groupingForAggregation); } // Perform a full hashtable backed aggregation if (inputTable.isRefreshing()) { - return incrementalHashedBy(aggregationControl, inputTable, existingColumnSourceMap, - keyColumnNames, aggregatedColumnNames, keyColumnSources, - keyColumnUpstreamInputColumnNames); + return incrementalHashedBy(aggregationControl, inputTable, existingColumnSourceMap, keyColumnNames, + aggregatedColumnNames, keyColumnSources, keyColumnUpstreamInputColumnNames); } - return staticHashedBy(aggregationControl, inputTable, existingColumnSourceMap, - keyColumnNames, aggregatedColumnNames, keyColumnSources); + return staticHashedBy(aggregationControl, inputTable, existingColumnSourceMap, keyColumnNames, + aggregatedColumnNames, keyColumnSources); } @NotNull private static QueryTable noKeyBy(@NotNull final QueryTable inputTable) { final Mutable resultHolder = new MutableObject<>(); final ShiftAwareSwapListener swapListener = - inputTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + inputTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); inputTable.initializeWithSnapshot("by()-Snapshot", swapListener, - (final boolean usePrev, final long beforeClockValue) -> { - final ColumnSource resultIndexColumnSource = - new SingleValueObjectColumnSource<>(inputTable.getIndex()); - final boolean empty = - usePrev ? inputTable.getIndex().firstKeyPrev() == Index.NULL_KEY - : inputTable.isEmpty(); - final QueryTable resultTable = new QueryTable( - Index.FACTORY.getFlatIndex(empty ? 0 : 1), - inputTable.getColumnSourceMap().entrySet().stream().collect(Collectors.toMap( - Map.Entry::getKey, - (final Map.Entry columnNameToSourceEntry) -> { - // noinspection unchecked - final AggregateColumnSource aggregateColumnSource = - AggregateColumnSource.make(columnNameToSourceEntry.getValue(), - resultIndexColumnSource); - aggregateColumnSource.startTrackingPrevValues(); - return aggregateColumnSource; - }, - Assert::neverInvoked, - LinkedHashMap::new))); - if (swapListener != null) { - final ModifiedColumnSet.Transformer transformer = - inputTable.newModifiedColumnSetTransformer( - inputTable.getDefinition().getColumnNamesArray(), - resultTable.getDefinition().getColumnNames().stream() - .map(resultTable::newModifiedColumnSet) - .toArray(ModifiedColumnSet[]::new)); - final ShiftAwareListener aggregationUpdateListener = - new BaseTable.ShiftAwareListenerImpl("by()", inputTable, resultTable) { - @Override - public final void onUpdate(@NotNull final Update upstream) { - final boolean wasEmpty = - inputTable.getIndex().firstKeyPrev() == Index.NULL_KEY; - final boolean isEmpty = inputTable.getIndex().empty(); - final Index added; - final Index removed; - final Index modified; - final ModifiedColumnSet modifiedColumnSet; - if (wasEmpty) { - if (isEmpty) { - // empty -> empty: No change to report, we probably - // shouldn't even be notified - return; + (final boolean usePrev, final long beforeClockValue) -> { + final ColumnSource resultIndexColumnSource = + new SingleValueObjectColumnSource<>(inputTable.getIndex()); + final boolean empty = + usePrev ? inputTable.getIndex().firstKeyPrev() == Index.NULL_KEY : inputTable.isEmpty(); + final QueryTable resultTable = new QueryTable( + Index.FACTORY.getFlatIndex(empty ? 0 : 1), + inputTable.getColumnSourceMap().entrySet().stream().collect(Collectors.toMap( + Map.Entry::getKey, + (final Map.Entry columnNameToSourceEntry) -> { + // noinspection unchecked + final AggregateColumnSource aggregateColumnSource = AggregateColumnSource + .make(columnNameToSourceEntry.getValue(), resultIndexColumnSource); + aggregateColumnSource.startTrackingPrevValues(); + return aggregateColumnSource; + }, + Assert::neverInvoked, + LinkedHashMap::new))); + if (swapListener != null) { + final ModifiedColumnSet.Transformer transformer = inputTable.newModifiedColumnSetTransformer( + inputTable.getDefinition().getColumnNamesArray(), + resultTable.getDefinition().getColumnNames().stream() + .map(resultTable::newModifiedColumnSet).toArray(ModifiedColumnSet[]::new)); + final ShiftAwareListener aggregationUpdateListener = + new BaseTable.ShiftAwareListenerImpl("by()", inputTable, resultTable) { + @Override + public final void onUpdate(@NotNull final Update upstream) { + final boolean wasEmpty = inputTable.getIndex().firstKeyPrev() == Index.NULL_KEY; + final boolean isEmpty = inputTable.getIndex().empty(); + final Index added; + final Index removed; + final Index modified; + final ModifiedColumnSet modifiedColumnSet; + if (wasEmpty) { + if (isEmpty) { + // empty -> empty: No change to report, we probably shouldn't even be + // notified + return; + } + resultTable.getIndex().insert(0); + added = Index.FACTORY.getFlatIndex(1); + removed = Index.FACTORY.getEmptyIndex(); + modified = Index.FACTORY.getEmptyIndex(); + modifiedColumnSet = ModifiedColumnSet.EMPTY; + } else if (isEmpty) { + resultTable.getIndex().remove(0); + added = Index.FACTORY.getEmptyIndex(); + removed = Index.FACTORY.getFlatIndex(1); + modified = Index.FACTORY.getEmptyIndex(); + modifiedColumnSet = ModifiedColumnSet.EMPTY; + } else if (upstream.added.nonempty() || upstream.removed.nonempty()) { + added = Index.FACTORY.getEmptyIndex(); + removed = Index.FACTORY.getEmptyIndex(); + modified = Index.FACTORY.getFlatIndex(1); + modifiedColumnSet = ModifiedColumnSet.ALL; + } else if (upstream.modified.nonempty()) { + added = Index.FACTORY.getEmptyIndex(); + removed = Index.FACTORY.getEmptyIndex(); + modified = Index.FACTORY.getFlatIndex(1); + transformer.clearAndTransform(upstream.modifiedColumnSet, + modifiedColumnSet = resultTable.getModifiedColumnSetForUpdates()); + } else { + // Only shifts: Nothing to report downstream, our data has not changed + return; + } + final Update downstream = new Update(added, removed, modified, + IndexShiftData.EMPTY, modifiedColumnSet); + resultTable.notifyListeners(downstream); } - resultTable.getIndex().insert(0); - added = Index.FACTORY.getFlatIndex(1); - removed = Index.FACTORY.getEmptyIndex(); - modified = Index.FACTORY.getEmptyIndex(); - modifiedColumnSet = ModifiedColumnSet.EMPTY; - } else if (isEmpty) { - resultTable.getIndex().remove(0); - added = Index.FACTORY.getEmptyIndex(); - removed = Index.FACTORY.getFlatIndex(1); - modified = Index.FACTORY.getEmptyIndex(); - modifiedColumnSet = ModifiedColumnSet.EMPTY; - } else if (upstream.added.nonempty() - || upstream.removed.nonempty()) { - added = Index.FACTORY.getEmptyIndex(); - removed = Index.FACTORY.getEmptyIndex(); - modified = Index.FACTORY.getFlatIndex(1); - modifiedColumnSet = ModifiedColumnSet.ALL; - } else if (upstream.modified.nonempty()) { - added = Index.FACTORY.getEmptyIndex(); - removed = Index.FACTORY.getEmptyIndex(); - modified = Index.FACTORY.getFlatIndex(1); - transformer.clearAndTransform(upstream.modifiedColumnSet, - modifiedColumnSet = - resultTable.getModifiedColumnSetForUpdates()); - } else { - // Only shifts: Nothing to report downstream, our data has not - // changed - return; - } - final Update downstream = new Update(added, removed, modified, - IndexShiftData.EMPTY, modifiedColumnSet); - resultTable.notifyListeners(downstream); - } - }; - swapListener.setListenerAndResult(aggregationUpdateListener, resultTable); - resultTable.addParentReference(swapListener); - } - resultHolder.setValue(resultTable); - return true; - }); + }; + swapListener.setListenerAndResult(aggregationUpdateListener, resultTable); + resultTable.addParentReference(swapListener); + } + resultHolder.setValue(resultTable); + return true; + }); return resultHolder.getValue(); } @NotNull - private static QueryTable staticGroupedBy( - @NotNull final Map existingColumnSourceMap, - @NotNull final String keyColumnName, - @NotNull final ColumnSource keyColumnSource, - @NotNull final Map groupToIndex) { + private static QueryTable staticGroupedBy(@NotNull final Map existingColumnSourceMap, + @NotNull final String keyColumnName, + @NotNull final ColumnSource keyColumnSource, + @NotNull final Map groupToIndex) { // noinspection unchecked final Pair, ObjectArraySource> flatResultColumnSources = - AbstractColumnSource.groupingToFlatSources(keyColumnSource, groupToIndex); + AbstractColumnSource.groupingToFlatSources(keyColumnSource, groupToIndex); final ArrayBackedColumnSource resultKeyColumnSource = flatResultColumnSources.getFirst(); - final ObjectArraySource resultIndexColumnSource = - flatResultColumnSources.getSecond(); + final ObjectArraySource resultIndexColumnSource = flatResultColumnSources.getSecond(); final Index resultIndex = Index.FACTORY.getFlatIndex(groupToIndex.size()); final Map> resultColumnSourceMap = new LinkedHashMap<>(); resultColumnSourceMap.put(keyColumnName, resultKeyColumnSource); // noinspection unchecked existingColumnSourceMap.entrySet().stream() - .filter(( - final Map.Entry columnNameToSourceEntry) -> !columnNameToSourceEntry - .getKey().equals(keyColumnName)) - .forEachOrdered(( - final Map.Entry columnNameToSourceEntry) -> resultColumnSourceMap - .put(columnNameToSourceEntry.getKey(), AggregateColumnSource - .make(columnNameToSourceEntry.getValue(), resultIndexColumnSource))); + .filter((final Map.Entry columnNameToSourceEntry) -> !columnNameToSourceEntry + .getKey().equals(keyColumnName)) + .forEachOrdered((final Map.Entry columnNameToSourceEntry) -> resultColumnSourceMap + .put(columnNameToSourceEntry.getKey(), AggregateColumnSource + .make(columnNameToSourceEntry.getValue(), resultIndexColumnSource))); return new QueryTable(resultIndex, resultColumnSourceMap); } @NotNull private static QueryTable staticHashedBy(@NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final Map existingColumnSourceMap, - @NotNull final String[] keyColumnNames, - @NotNull final String[] aggregatedColumnNames, - @NotNull final ColumnSource[] keyColumnSources) { + @NotNull final QueryTable inputTable, + @NotNull final Map existingColumnSourceMap, + @NotNull final String[] keyColumnNames, + @NotNull final String[] aggregatedColumnNames, + @NotNull final ColumnSource[] keyColumnSources) { // Reinterpret key column sources as primitives where possible - final ColumnSource[] maybeReinterpretedKeyColumnSources = - maybeReinterpretKeyColumnSources(keyColumnSources); + final ColumnSource[] maybeReinterpretedKeyColumnSources = maybeReinterpretKeyColumnSources(keyColumnSources); // Prepare our state manager final StaticChunkedByAggregationStateManager stateManager = - new StaticChunkedByAggregationStateManager(maybeReinterpretedKeyColumnSources, - aggregationControl.initialHashTableSize(inputTable), - aggregationControl.getTargetLoadFactor(), - aggregationControl.getMaximumLoadFactor()); + new StaticChunkedByAggregationStateManager(maybeReinterpretedKeyColumnSources, + aggregationControl.initialHashTableSize(inputTable), aggregationControl.getTargetLoadFactor(), + aggregationControl.getMaximumLoadFactor()); // Do the actual aggregation hashing and convert the results final IntegerArraySource groupIndexToHashSlot = new IntegerArraySource(); - final int numGroups = stateManager.buildTable(inputTable, - maybeReinterpretedKeyColumnSources, groupIndexToHashSlot); + final int numGroups = + stateManager.buildTable(inputTable, maybeReinterpretedKeyColumnSources, groupIndexToHashSlot); stateManager.convertBuildersToIndexes(groupIndexToHashSlot, numGroups); - // TODO: Consider selecting the hash inputTable sources, in order to truncate them to size - // and improve density + // TODO: Consider selecting the hash inputTable sources, in order to truncate them to size and improve density // Compute result index and redirection to hash slots final Index resultIndex = Index.FACTORY.getFlatIndex(numGroups); - final RedirectionIndex resultIndexToHashSlot = - new IntColumnSourceRedirectionIndex(groupIndexToHashSlot); + final RedirectionIndex resultIndexToHashSlot = new IntColumnSourceRedirectionIndex(groupIndexToHashSlot); // Construct result column sources final ColumnSource[] keyHashTableSources = stateManager.getKeyHashTableSources(); @@ -267,114 +245,108 @@ private static QueryTable staticHashedBy(@NotNull final AggregationControl aggre for (int kci = 0; kci < keyHashTableSources.length; ++kci) { ColumnSource resultKeyColumnSource = keyHashTableSources[kci]; if (keyColumnSources[kci] != maybeReinterpretedKeyColumnSources[kci]) { - resultKeyColumnSource = ReinterpretUtilities - .convertToOriginal(keyColumnSources[kci].getType(), resultKeyColumnSource); + resultKeyColumnSource = + ReinterpretUtilities.convertToOriginal(keyColumnSources[kci].getType(), resultKeyColumnSource); } resultColumnSourceMap.put(keyColumnNames[kci], - new ReadOnlyRedirectedColumnSource<>(resultIndexToHashSlot, resultKeyColumnSource)); + new ReadOnlyRedirectedColumnSource<>(resultIndexToHashSlot, resultKeyColumnSource)); } // Gather the result aggregate columns - final ColumnSource resultIndexColumnSource = new ReadOnlyRedirectedColumnSource<>( - resultIndexToHashSlot, stateManager.getIndexHashTableSource()); + final ColumnSource resultIndexColumnSource = + new ReadOnlyRedirectedColumnSource<>(resultIndexToHashSlot, stateManager.getIndexHashTableSource()); // noinspection unchecked Arrays.stream(aggregatedColumnNames) - .forEachOrdered((final String aggregatedColumnName) -> resultColumnSourceMap - .put(aggregatedColumnName, AggregateColumnSource.make( - existingColumnSourceMap.get(aggregatedColumnName), resultIndexColumnSource))); + .forEachOrdered((final String aggregatedColumnName) -> resultColumnSourceMap.put(aggregatedColumnName, + AggregateColumnSource.make(existingColumnSourceMap.get(aggregatedColumnName), + resultIndexColumnSource))); // Construct the result table return new QueryTable(resultIndex, resultColumnSourceMap); } @NotNull - private static QueryTable incrementalHashedBy( - @NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final Map existingColumnSourceMap, - @NotNull final String[] keyColumnNames, - @NotNull final String[] aggregatedColumnNames, - @NotNull final ColumnSource[] keyColumnSources, - @NotNull final Set keyColumnUpstreamInputColumnNames) { + private static QueryTable incrementalHashedBy(@NotNull final AggregationControl aggregationControl, + @NotNull final QueryTable inputTable, + @NotNull final Map existingColumnSourceMap, + @NotNull final String[] keyColumnNames, + @NotNull final String[] aggregatedColumnNames, + @NotNull final ColumnSource[] keyColumnSources, + @NotNull final Set keyColumnUpstreamInputColumnNames) { final Mutable resultHolder = new MutableObject<>(); final ShiftAwareSwapListener swapListener = - inputTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + inputTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); assert swapListener != null; - inputTable.initializeWithSnapshot("by(" + String.join(",", keyColumnNames) + "-Snapshot", - swapListener, (final boolean usePrev, final long beforeClockValue) -> { - // Reinterpret key column sources as primitives where possible - final ColumnSource[] maybeReinterpretedKeyColumnSources = - maybeReinterpretKeyColumnSources(keyColumnSources); - - // Prepare our state manager - final IncrementalChunkedByAggregationStateManager stateManager = - new IncrementalChunkedByAggregationStateManager( - maybeReinterpretedKeyColumnSources, - aggregationControl.initialHashTableSize(inputTable), - aggregationControl.getTargetLoadFactor(), - aggregationControl.getMaximumLoadFactor()); - - // Prepare our update tracker - final IncrementalByAggregationUpdateTracker updateTracker = - new IncrementalByAggregationUpdateTracker(); - - // Perform the initial aggregation pass - if (usePrev) { - stateManager.buildInitialTableFromPrevious(inputTable, - maybeReinterpretedKeyColumnSources, updateTracker); - } else { - stateManager.buildInitialTableFromCurrent(inputTable, - maybeReinterpretedKeyColumnSources, updateTracker); - - } - // Compute result index and redirection to hash slots - final RedirectionIndex resultIndexToHashSlot = RedirectionIndexLockFreeImpl.FACTORY - .createRedirectionIndex(updateTracker.size()); - final Index resultIndex = - updateTracker.applyAddsAndMakeInitialIndex(stateManager.getIndexSource(), - stateManager.getOverflowIndexSource(), resultIndexToHashSlot); - - // Construct result column sources - final Map> resultColumnSourceMap = new LinkedHashMap<>(); - - // Gather the result key columns - for (int kci = 0; kci < keyColumnSources.length; ++kci) { - resultColumnSourceMap.put(keyColumnNames[kci], keyColumnSources[kci]); - } - - // Gather the result aggregate columns - final ColumnSource resultIndexColumnSource = - new ReadOnlyRedirectedColumnSource<>(resultIndexToHashSlot, - stateManager.getIndexHashTableSource()); - Arrays.stream(aggregatedColumnNames) - .forEachOrdered((final String aggregatedColumnName) -> { - // noinspection unchecked - final AggregateColumnSource aggregatedColumnSource = AggregateColumnSource - .make(existingColumnSourceMap.get(aggregatedColumnName), - resultIndexColumnSource); - aggregatedColumnSource.startTrackingPrevValues(); - resultColumnSourceMap.put(aggregatedColumnName, aggregatedColumnSource); - }); - - // Construct the result table - final QueryTable resultTable = new QueryTable(resultIndex, resultColumnSourceMap); - resultIndexToHashSlot.startTrackingPrevValues(); - - // Categorize modified column sets - final ModifiedColumnSet upstreamKeyColumnInputs = inputTable.newModifiedColumnSet( - keyColumnUpstreamInputColumnNames.toArray(ZERO_LENGTH_STRING_ARRAY)); - final ModifiedColumnSet downstreamAllAggregatedColumns = - resultTable.newModifiedColumnSet(aggregatedColumnNames); - final ModifiedColumnSet.Transformer aggregatedColumnsTransformer = - inputTable.newModifiedColumnSetTransformer( - aggregatedColumnNames, - Arrays.stream(aggregatedColumnNames).map(resultTable::newModifiedColumnSet) - .toArray(ModifiedColumnSet[]::new)); - - // Handle updates - final ShiftAwareListener aggregationUpdateListener = - new BaseTable.ShiftAwareListenerImpl( - "by(" + String.join(",", keyColumnNames) + ')', inputTable, resultTable) { + inputTable.initializeWithSnapshot("by(" + String.join(",", keyColumnNames) + "-Snapshot", swapListener, + (final boolean usePrev, final long beforeClockValue) -> { + // Reinterpret key column sources as primitives where possible + final ColumnSource[] maybeReinterpretedKeyColumnSources = + maybeReinterpretKeyColumnSources(keyColumnSources); + + // Prepare our state manager + final IncrementalChunkedByAggregationStateManager stateManager = + new IncrementalChunkedByAggregationStateManager(maybeReinterpretedKeyColumnSources, + aggregationControl.initialHashTableSize(inputTable), + aggregationControl.getTargetLoadFactor(), + aggregationControl.getMaximumLoadFactor()); + + // Prepare our update tracker + final IncrementalByAggregationUpdateTracker updateTracker = + new IncrementalByAggregationUpdateTracker(); + + // Perform the initial aggregation pass + if (usePrev) { + stateManager.buildInitialTableFromPrevious(inputTable, maybeReinterpretedKeyColumnSources, + updateTracker); + } else { + stateManager.buildInitialTableFromCurrent(inputTable, maybeReinterpretedKeyColumnSources, + updateTracker); + + } + // Compute result index and redirection to hash slots + final RedirectionIndex resultIndexToHashSlot = + RedirectionIndexLockFreeImpl.FACTORY.createRedirectionIndex(updateTracker.size()); + final Index resultIndex = updateTracker.applyAddsAndMakeInitialIndex(stateManager.getIndexSource(), + stateManager.getOverflowIndexSource(), resultIndexToHashSlot); + + // Construct result column sources + final Map> resultColumnSourceMap = new LinkedHashMap<>(); + + // Gather the result key columns + for (int kci = 0; kci < keyColumnSources.length; ++kci) { + resultColumnSourceMap.put(keyColumnNames[kci], keyColumnSources[kci]); + } + + // Gather the result aggregate columns + final ColumnSource resultIndexColumnSource = new ReadOnlyRedirectedColumnSource<>( + resultIndexToHashSlot, stateManager.getIndexHashTableSource()); + Arrays.stream(aggregatedColumnNames) + .forEachOrdered((final String aggregatedColumnName) -> { + // noinspection unchecked + final AggregateColumnSource aggregatedColumnSource = AggregateColumnSource.make( + existingColumnSourceMap.get(aggregatedColumnName), resultIndexColumnSource); + aggregatedColumnSource.startTrackingPrevValues(); + resultColumnSourceMap.put(aggregatedColumnName, aggregatedColumnSource); + }); + + // Construct the result table + final QueryTable resultTable = new QueryTable(resultIndex, resultColumnSourceMap); + resultIndexToHashSlot.startTrackingPrevValues(); + + // Categorize modified column sets + final ModifiedColumnSet upstreamKeyColumnInputs = inputTable + .newModifiedColumnSet(keyColumnUpstreamInputColumnNames.toArray(ZERO_LENGTH_STRING_ARRAY)); + final ModifiedColumnSet downstreamAllAggregatedColumns = + resultTable.newModifiedColumnSet(aggregatedColumnNames); + final ModifiedColumnSet.Transformer aggregatedColumnsTransformer = + inputTable.newModifiedColumnSetTransformer( + aggregatedColumnNames, + Arrays.stream(aggregatedColumnNames).map(resultTable::newModifiedColumnSet) + .toArray(ModifiedColumnSet[]::new)); + + // Handle updates + final ShiftAwareListener aggregationUpdateListener = new BaseTable.ShiftAwareListenerImpl( + "by(" + String.join(",", keyColumnNames) + ')', inputTable, resultTable) { @Override public final void onUpdate(@NotNull final Update upstream) { if (updateTracker.clear()) { @@ -382,195 +354,172 @@ public final void onUpdate(@NotNull final Update upstream) { } final boolean keyColumnsModified = - upstream.modifiedColumnSet.containsAny(upstreamKeyColumnInputs); + upstream.modifiedColumnSet.containsAny(upstreamKeyColumnInputs); if (keyColumnsModified) { - try (final Index toRemove = - upstream.removed.union(upstream.getModifiedPreShift())) { - stateManager.processRemoves(maybeReinterpretedKeyColumnSources, - toRemove, updateTracker); + try (final Index toRemove = upstream.removed.union(upstream.getModifiedPreShift())) { + stateManager.processRemoves(maybeReinterpretedKeyColumnSources, toRemove, + updateTracker); } } else { - stateManager.processRemoves(maybeReinterpretedKeyColumnSources, - upstream.removed, updateTracker); + stateManager.processRemoves(maybeReinterpretedKeyColumnSources, upstream.removed, + updateTracker); } updateTracker.applyRemovesToStates(stateManager.getIndexSource(), - stateManager.getOverflowIndexSource()); + stateManager.getOverflowIndexSource()); if (upstream.shifted.nonempty()) { - upstream.shifted.apply((final long beginRange, final long endRange, - final long shiftDelta) -> { - final Index shiftedPreviousIndex; - try (final Index previousIndex = - inputTable.getIndex().getPrevIndex()) { - shiftedPreviousIndex = - previousIndex.subindexByKey(beginRange, endRange); - } - try { - if (aggregationControl.shouldProbeShift( - shiftedPreviousIndex.size(), resultIndex.intSize())) { - stateManager.processShift( - maybeReinterpretedKeyColumnSources, - shiftedPreviousIndex, updateTracker); - updateTracker.applyShiftToStates( - stateManager.getIndexSource(), - stateManager.getOverflowIndexSource(), beginRange, - endRange, shiftDelta); - } else { - resultIndex.forAllLongs((final long stateKey) -> { - final int stateSlot = - (int) resultIndexToHashSlot.get(stateKey); - stateManager.applyShift(stateSlot, beginRange, - endRange, shiftDelta, - updateTracker::processAppliedShift); - }); - } - } finally { - shiftedPreviousIndex.close(); - } - }); + upstream.shifted + .apply((final long beginRange, final long endRange, final long shiftDelta) -> { + final Index shiftedPreviousIndex; + try (final Index previousIndex = inputTable.getIndex().getPrevIndex()) { + shiftedPreviousIndex = + previousIndex.subindexByKey(beginRange, endRange); + } + try { + if (aggregationControl.shouldProbeShift(shiftedPreviousIndex.size(), + resultIndex.intSize())) { + stateManager.processShift(maybeReinterpretedKeyColumnSources, + shiftedPreviousIndex, updateTracker); + updateTracker.applyShiftToStates(stateManager.getIndexSource(), + stateManager.getOverflowIndexSource(), beginRange, endRange, + shiftDelta); + } else { + resultIndex.forAllLongs((final long stateKey) -> { + final int stateSlot = (int) resultIndexToHashSlot.get(stateKey); + stateManager.applyShift(stateSlot, beginRange, endRange, + shiftDelta, updateTracker::processAppliedShift); + }); + } + } finally { + shiftedPreviousIndex.close(); + } + }); } if (keyColumnsModified) { try (final Index toAdd = upstream.added.union(upstream.modified)) { - stateManager.processAdds(maybeReinterpretedKeyColumnSources, - toAdd, updateTracker); + stateManager.processAdds(maybeReinterpretedKeyColumnSources, toAdd, updateTracker); } } else { - stateManager.processModifies(maybeReinterpretedKeyColumnSources, - upstream.modified, updateTracker); - stateManager.processAdds(maybeReinterpretedKeyColumnSources, - upstream.added, updateTracker); + stateManager.processModifies(maybeReinterpretedKeyColumnSources, upstream.modified, + updateTracker); + stateManager.processAdds(maybeReinterpretedKeyColumnSources, upstream.added, + updateTracker); } updateTracker.applyAddsToStates(stateManager.getIndexSource(), - stateManager.getOverflowIndexSource()); + stateManager.getOverflowIndexSource()); final Update downstream = updateTracker.makeUpdateFromStates( - stateManager.getIndexSource(), - stateManager.getOverflowIndexSource(), resultIndex, - resultIndexToHashSlot, - (final boolean someKeyHasAddsOrRemoves, - final boolean someKeyHasModifies) -> { - if (someKeyHasAddsOrRemoves) { - return downstreamAllAggregatedColumns; - } - if (someKeyHasModifies) { - aggregatedColumnsTransformer.clearAndTransform( - upstream.modifiedColumnSet, - resultTable.getModifiedColumnSetForUpdates()); - return resultTable.getModifiedColumnSetForUpdates(); - } - return ModifiedColumnSet.EMPTY; - }); + stateManager.getIndexSource(), stateManager.getOverflowIndexSource(), resultIndex, + resultIndexToHashSlot, + (final boolean someKeyHasAddsOrRemoves, final boolean someKeyHasModifies) -> { + if (someKeyHasAddsOrRemoves) { + return downstreamAllAggregatedColumns; + } + if (someKeyHasModifies) { + aggregatedColumnsTransformer.clearAndTransform(upstream.modifiedColumnSet, + resultTable.getModifiedColumnSetForUpdates()); + return resultTable.getModifiedColumnSetForUpdates(); + } + return ModifiedColumnSet.EMPTY; + }); resultTable.notifyListeners(downstream); } }; - swapListener.setListenerAndResult(aggregationUpdateListener, resultTable); - resultTable.addParentReference(swapListener); + swapListener.setListenerAndResult(aggregationUpdateListener, resultTable); + resultTable.addParentReference(swapListener); - resultHolder.setValue(resultTable); - return true; - }); + resultHolder.setValue(resultTable); + return true; + }); return resultHolder.getValue(); } public static LocalTableMap byExternal(@NotNull final QueryTable inputTable, - final boolean dropKeyColumns, - @NotNull final String... keyColumnNames) { + final boolean dropKeyColumns, + @NotNull final String... keyColumnNames) { return byExternal(AggregationControl.DEFAULT, inputTable, dropKeyColumns, keyColumnNames); } @VisibleForTesting public static LocalTableMap byExternal(@NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - final boolean dropKeyColumns, - @NotNull final String... keyColumnNames) { - // If there are no key columns, return a map with just the input table; there's nothing to - // be aggregated + @NotNull final QueryTable inputTable, + final boolean dropKeyColumns, + @NotNull final String... keyColumnNames) { + // If there are no key columns, return a map with just the input table; there's nothing to be aggregated if (keyColumnNames.length == 0) { final LocalTableMap noKeyResult = new LocalTableMap(null, inputTable.getDefinition()); noKeyResult.put(SmartKey.EMPTY, inputTable); return noKeyResult; } - final ColumnSource[] keyColumnSources = Arrays.stream(keyColumnNames) - .map(inputTable::getColumnSource).toArray(ColumnSource[]::new); + final ColumnSource[] keyColumnSources = + Arrays.stream(keyColumnNames).map(inputTable::getColumnSource).toArray(ColumnSource[]::new); final QueryTable subTableSource = - dropKeyColumns ? (QueryTable) inputTable.dropColumns(keyColumnNames) : inputTable; + dropKeyColumns ? (QueryTable) inputTable.dropColumns(keyColumnNames) : inputTable; // If we can use an existing static grouping, trivially convert that to a table map final Map groupingForAggregation = - maybeGetGroupingForAggregation(aggregationControl, inputTable, keyColumnSources); + maybeGetGroupingForAggregation(aggregationControl, inputTable, keyColumnSources); if (groupingForAggregation != null) { - final LocalTableMap staticGroupedResult = - new LocalTableMap(null, inputTable.getDefinition()); - AbstractColumnSource.forEachResponsiveGroup(groupingForAggregation, - inputTable.getIndex(), - (final Object key, final Index index) -> staticGroupedResult.put(key, - subTableSource.getSubTable(index))); + final LocalTableMap staticGroupedResult = new LocalTableMap(null, inputTable.getDefinition()); + AbstractColumnSource.forEachResponsiveGroup(groupingForAggregation, inputTable.getIndex(), + (final Object key, final Index index) -> staticGroupedResult.put(key, + subTableSource.getSubTable(index))); return staticGroupedResult; } if (inputTable.isRefreshing()) { - return incrementalHashedByExternal(aggregationControl, inputTable, keyColumnSources, - subTableSource); + return incrementalHashedByExternal(aggregationControl, inputTable, keyColumnSources, subTableSource); } - return staticHashedByExternal(aggregationControl, inputTable, keyColumnSources, - subTableSource); + return staticHashedByExternal(aggregationControl, inputTable, keyColumnSources, subTableSource); } @NotNull - private static LocalTableMap staticHashedByExternal( - @NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final ColumnSource[] keyColumnSources, - @NotNull final QueryTable subTableSource) { + private static LocalTableMap staticHashedByExternal(@NotNull final AggregationControl aggregationControl, + @NotNull final QueryTable inputTable, + @NotNull final ColumnSource[] keyColumnSources, + @NotNull final QueryTable subTableSource) { // Reinterpret key column sources as primitives where possible - final ColumnSource[] maybeReinterpretedKeyColumnSources = - maybeReinterpretKeyColumnSources(keyColumnSources); + final ColumnSource[] maybeReinterpretedKeyColumnSources = maybeReinterpretKeyColumnSources(keyColumnSources); // Prepare our state manager final StaticChunkedByAggregationStateManager stateManager = - new StaticChunkedByAggregationStateManager(maybeReinterpretedKeyColumnSources, - aggregationControl.initialHashTableSize(inputTable), - aggregationControl.getTargetLoadFactor(), - aggregationControl.getMaximumLoadFactor()); + new StaticChunkedByAggregationStateManager(maybeReinterpretedKeyColumnSources, + aggregationControl.initialHashTableSize(inputTable), aggregationControl.getTargetLoadFactor(), + aggregationControl.getMaximumLoadFactor()); // Do the actual aggregation hashing and convert the results final IntegerArraySource groupIndexToHashSlot = new IntegerArraySource(); - final int numGroups = stateManager.buildTable(inputTable, - maybeReinterpretedKeyColumnSources, groupIndexToHashSlot); + final int numGroups = + stateManager.buildTable(inputTable, maybeReinterpretedKeyColumnSources, groupIndexToHashSlot); stateManager.convertBuildersToIndexes(groupIndexToHashSlot, numGroups); // Build our table map - final LocalTableMap staticHashedResult = - new LocalTableMap(null, inputTable.getDefinition()); + final LocalTableMap staticHashedResult = new LocalTableMap(null, inputTable.getDefinition()); final TupleSource inputKeyIndexToMapKeySource = - keyColumnSources.length == 1 ? keyColumnSources[0] - : new SmartKeySource(keyColumnSources); + keyColumnSources.length == 1 ? keyColumnSources[0] : new SmartKeySource(keyColumnSources); final ColumnSource hashSlotToIndexSource = stateManager.getIndexHashTableSource(); - final int chunkSize = - Math.min(numGroups, IncrementalChunkedByAggregationStateManager.CHUNK_SIZE); + final int chunkSize = Math.min(numGroups, IncrementalChunkedByAggregationStateManager.CHUNK_SIZE); try (final OrderedKeys groupIndices = CurrentOnlyIndex.FACTORY.getFlatIndex(numGroups); - final OrderedKeys.Iterator groupIndicesIterator = groupIndices.getOrderedKeysIterator(); - final ChunkSource.GetContext hashSlotGetContext = - groupIndexToHashSlot.makeGetContext(chunkSize); - final WritableObjectChunk aggregatedIndexes = - WritableObjectChunk.makeWritableChunk(chunkSize); - final WritableLongChunk mapKeySourceIndices = - WritableLongChunk.makeWritableChunk(chunkSize); - final ChunkSource.GetContext mapKeyGetContext = - inputKeyIndexToMapKeySource.makeGetContext(chunkSize)) { + final OrderedKeys.Iterator groupIndicesIterator = groupIndices.getOrderedKeysIterator(); + final ChunkSource.GetContext hashSlotGetContext = groupIndexToHashSlot.makeGetContext(chunkSize); + final WritableObjectChunk aggregatedIndexes = + WritableObjectChunk.makeWritableChunk(chunkSize); + final WritableLongChunk mapKeySourceIndices = + WritableLongChunk.makeWritableChunk(chunkSize); + final ChunkSource.GetContext mapKeyGetContext = inputKeyIndexToMapKeySource.makeGetContext(chunkSize)) { while (groupIndicesIterator.hasMore()) { final OrderedKeys groupIndexesForThisChunk = - groupIndicesIterator.getNextOrderedKeysWithLength(chunkSize); + groupIndicesIterator.getNextOrderedKeysWithLength(chunkSize); final int groupsInThisChunk = groupIndexesForThisChunk.intSize(); - final LongChunk hashSlots = groupIndexToHashSlot - .getChunk(hashSlotGetContext, groupIndexesForThisChunk).asLongChunk(); + final LongChunk hashSlots = + groupIndexToHashSlot.getChunk(hashSlotGetContext, groupIndexesForThisChunk).asLongChunk(); for (int gi = 0; gi < groupsInThisChunk; ++gi) { final Index index = hashSlotToIndexSource.get(hashSlots.get(gi)); aggregatedIndexes.set(gi, index); @@ -580,13 +529,11 @@ private static LocalTableMap staticHashedByExternal( mapKeySourceIndices.setSize(groupsInThisChunk); final ObjectChunk mapKeys; try (final OrderedKeys inputKeyIndices = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(mapKeySourceIndices)) { - mapKeys = inputKeyIndexToMapKeySource - .getChunk(mapKeyGetContext, inputKeyIndices).asObjectChunk(); + OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(mapKeySourceIndices)) { + mapKeys = inputKeyIndexToMapKeySource.getChunk(mapKeyGetContext, inputKeyIndices).asObjectChunk(); } for (int gi = 0; gi < groupsInThisChunk; ++gi) { - staticHashedResult.put(mapKeys.get(gi), - subTableSource.getSubTable(aggregatedIndexes.get(gi))); + staticHashedResult.put(mapKeys.get(gi), subTableSource.getSubTable(aggregatedIndexes.get(gi))); } } } @@ -594,21 +541,19 @@ private static LocalTableMap staticHashedByExternal( } @NotNull - private static LocalTableMap incrementalHashedByExternal( - @NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final ColumnSource[] keyColumnSources, - @NotNull final QueryTable subTableSource) { + private static LocalTableMap incrementalHashedByExternal(@NotNull final AggregationControl aggregationControl, + @NotNull final QueryTable inputTable, + @NotNull final ColumnSource[] keyColumnSources, + @NotNull final QueryTable subTableSource) { throw new UnsupportedOperationException("Never developed"); } @Nullable private static Map maybeGetGroupingForAggregation( - @NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final ColumnSource[] keyColumnSources) { - // If we have one grouped key column and the input table is not refreshing use the existing - // grouping + @NotNull final AggregationControl aggregationControl, + @NotNull final QueryTable inputTable, + @NotNull final ColumnSource[] keyColumnSources) { + // If we have one grouped key column and the input table is not refreshing use the existing grouping if (!aggregationControl.considerGrouping(inputTable, keyColumnSources)) { return null; } @@ -621,14 +566,12 @@ private static Map maybeGetGroupingForAggregation( } @NotNull - private static ColumnSource[] maybeReinterpretKeyColumnSources( - @NotNull final ColumnSource[] keyColumnSources) { + private static ColumnSource[] maybeReinterpretKeyColumnSources(@NotNull final ColumnSource[] keyColumnSources) { // TODO: Support symbol tables in reinterpret and re-boxing - final ColumnSource[] maybeReinterpretedKeyColumnSources = - new ColumnSource[keyColumnSources.length]; + final ColumnSource[] maybeReinterpretedKeyColumnSources = new ColumnSource[keyColumnSources.length]; for (int kci = 0; kci < keyColumnSources.length; ++kci) { maybeReinterpretedKeyColumnSources[kci] = - ReinterpretUtilities.maybeConvertToPrimitive(keyColumnSources[kci]); + ReinterpretUtilities.maybeConvertToPrimitive(keyColumnSources[kci]); } return maybeReinterpretedKeyColumnSources; } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AggregationIndexStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/AggregationIndexStateFactory.java index 432763b491a..a15932d3565 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AggregationIndexStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AggregationIndexStateFactory.java @@ -5,8 +5,7 @@ package io.deephaven.db.v2.by; public class AggregationIndexStateFactory implements AggregationStateFactory { - private static final AggregationMemoKey AGGREGATION_INDEX_INSTANCE = - new AggregationMemoKey() {}; + private static final AggregationMemoKey AGGREGATION_INDEX_INSTANCE = new AggregationMemoKey() {}; @Override public AggregationMemoKey getMemoKey() { diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AggregationStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/AggregationStateFactory.java index b7a16ee1739..409d2c8cce0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AggregationStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AggregationStateFactory.java @@ -7,9 +7,8 @@ import io.deephaven.db.tables.Table; /** - * The aggregation state factory is passed to the by operation, and is used to create new - * aggregation states for each of the output rows (or in case of byExternal, each of the output - * tables). + * The aggregation state factory is passed to the by operation, and is used to create new aggregation states for each of + * the output rows (or in case of byExternal, each of the output tables). * * The factory also returns the result column sources. */ @@ -18,9 +17,8 @@ public interface AggregationStateFactory { * Produces a MemoKey for this aggregation state factory. * *

    - * If two AggregationStateFactories have equal memoKeys, then {@link Table#by} operations that - * have the same group by columns may be memoized. In that case instead of recomputing the - * result; the original result will be used. + * If two AggregationStateFactories have equal memoKeys, then {@link Table#by} operations that have the same group + * by columns may be memoized. In that case instead of recomputing the result; the original result will be used. *

    * *

    diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ApproximatePercentile.java b/DB/src/main/java/io/deephaven/db/v2/by/ApproximatePercentile.java index c984e1fe96e..679fbf8e236 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ApproximatePercentile.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ApproximatePercentile.java @@ -18,11 +18,10 @@ * Generate approximate percentile aggregations of a table. * *

    - * The underlying data structure and algorithm used is a t-digest as described at - * https://github.com/tdunning/t-digest, which has a "compression" parameter that determines the - * size of the retained values. From the t-digest documentation, "100 is a common value for - * normal uses. 1000 is extremely large. The number of centroids retained will be a smallish - * (usually less than 10) multiple of this number."e; + * The underlying data structure and algorithm used is a t-digest as described at https://github.com/tdunning/t-digest, + * which has a "compression" parameter that determines the size of the retained values. From the t-digest documentation, + * "100 is a common value for normal uses. 1000 is extremely large. The number of centroids retained will be a + * smallish (usually less than 10) multiple of this number."e; *

    * *

    @@ -30,14 +29,14 @@ *

    * *

    - * The input table must be add only, if modifications or removals take place; then an - * UnsupportedOperationException is thrown. For tables with adds and removals you must use exact - * percentiles with {@link ComboAggregateFactory#AggPct(double, java.lang.String...)}. + * The input table must be add only, if modifications or removals take place; then an UnsupportedOperationException is + * thrown. For tables with adds and removals you must use exact percentiles with + * {@link ComboAggregateFactory#AggPct(double, java.lang.String...)}. *

    * *

    - * You may compute either one approximate percentile or several approximate percentiles at once. For - * example, to compute the 95th percentile of all other columns, by the "Sym" column you would call: + * You may compute either one approximate percentile or several approximate percentiles at once. For example, to compute + * the 95th percentile of all other columns, by the "Sym" column you would call: * *

      * ApproximatePercentile.approximatePercentile(input, 0.95, "Sym")
    @@ -45,31 +44,29 @@
      * 

    * *

    - * If you need to compute several percentiles, it is more efficient to compute them simultaneously. - * For example, this example computes the 75th, 95th, and 99th percentiles of the "Latency" column - * using a builder pattern, and the 95th and 99th percentiles of the "Size" column by "Sym": + * If you need to compute several percentiles, it is more efficient to compute them simultaneously. For example, this + * example computes the 75th, 95th, and 99th percentiles of the "Latency" column using a builder pattern, and the 95th + * and 99th percentiles of the "Size" column by "Sym": * *

    - * new ApproximatePercentile.PercentileDefinition("Latency").add(0.75, "L75").add(0.95, "L95")
    - *     .add(0.99, "L99").nextColumn("Size").add(0.95, "S95").add(0.99, "S99");
    + * new ApproximatePercentile.PercentileDefinition("Latency").add(0.75, "L75").add(0.95, "L95").add(0.99, "L99")
    + *         .nextColumn("Size").add(0.95, "S95").add(0.99, "S99");
      * final Table aggregated = ApproximatePercentile.approximatePercentiles(input, definition);
      * 
    *

    * *

    - * When parallelizing a workload, you may want to divide it based on natural partitioning and then - * compute an overall percentile. In these cases, you should use the - * {@link PercentileDefinition#exposeDigest} method to expose the internal t-digest structure as a - * column. If you then perform an array aggregation ({@link Table#by}), you can call the - * {@link #accumulateDigests} function to produce a single digest that represents all of the - * constituent digests. The amount of error introduced is related to the compression factor that you - * have selected for the digests. Once you have a combined digest object, you can call the quantile - * or other functions to extract the desired percentile. + * When parallelizing a workload, you may want to divide it based on natural partitioning and then compute an overall + * percentile. In these cases, you should use the {@link PercentileDefinition#exposeDigest} method to expose the + * internal t-digest structure as a column. If you then perform an array aggregation ({@link Table#by}), you can call + * the {@link #accumulateDigests} function to produce a single digest that represents all of the constituent digests. + * The amount of error introduced is related to the compression factor that you have selected for the digests. Once you + * have a combined digest object, you can call the quantile or other functions to extract the desired percentile. *

    */ public class ApproximatePercentile { - public static double DEFAULT_COMPRESSION = Configuration.getInstance() - .getDoubleWithDefault("ApproximatePercentile.defaultCompression", 100.0); + public static double DEFAULT_COMPRESSION = + Configuration.getInstance().getDoubleWithDefault("ApproximatePercentile.defaultCompression", 100.0); // static usage only private ApproximatePercentile() {} @@ -79,12 +76,12 @@ private ApproximatePercentile() {} * * @param input the input table * @param percentile the percentile to compute for each column - * @return a single row table with double columns representing the approximate percentile for - * each column of the input table + * @return a single row table with double columns representing the approximate percentile for each column of the + * input table */ public static Table approximatePercentile(Table input, double percentile) { return approximatePercentile(input, DEFAULT_COMPRESSION, percentile, - SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY); + SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY); } /** @@ -93,13 +90,12 @@ public static Table approximatePercentile(Table input, double percentile) { * @param input the input table * @param percentile the percentile to compute for each column * @param groupByColumns the columns to group by - * @return a with the groupByColumns and double columns representing the approximate percentile - * for each remaining column of the input table + * @return a with the groupByColumns and double columns representing the approximate percentile for each remaining + * column of the input table */ - public static Table approximatePercentile(Table input, double percentile, - String... groupByColumns) { + public static Table approximatePercentile(Table input, double percentile, String... groupByColumns) { return approximatePercentile(input, DEFAULT_COMPRESSION, percentile, - SelectColumnFactory.getExpressions(groupByColumns)); + SelectColumnFactory.getExpressions(groupByColumns)); } /** @@ -108,11 +104,10 @@ public static Table approximatePercentile(Table input, double percentile, * @param input the input table * @param percentile the percentile to compute for each column * @param groupByColumns the columns to group by - * @return a with the groupByColumns and double columns representing the approximate percentile - * for each remaining column of the input table + * @return a with the groupByColumns and double columns representing the approximate percentile for each remaining + * column of the input table */ - public static Table approximatePercentile(Table input, double percentile, - SelectColumn... groupByColumns) { + public static Table approximatePercentile(Table input, double percentile, SelectColumn... groupByColumns) { return approximatePercentile(input, DEFAULT_COMPRESSION, percentile, groupByColumns); } @@ -123,26 +118,24 @@ public static Table approximatePercentile(Table input, double percentile, * @param compression the t-digest compression parameter * @param percentile the percentile to compute for each column * @param groupByColumns the columns to group by - * @return a with the groupByColumns and double columns representing the approximate percentile - * for each remaining column of the input table + * @return a with the groupByColumns and double columns representing the approximate percentile for each remaining + * column of the input table */ public static Table approximatePercentile(Table input, double compression, double percentile, - SelectColumn... groupByColumns) { - final NonKeyColumnAggregationFactory aggregationContextFactory = - new NonKeyColumnAggregationFactory( - (type, resultName, exposeInternalColumns) -> new TDigestPercentileOperator(type, - compression, percentile, resultName)); - return ChunkedOperatorAggregationHelper.aggregation(aggregationContextFactory, - (QueryTable) input, groupByColumns); + SelectColumn... groupByColumns) { + final NonKeyColumnAggregationFactory aggregationContextFactory = new NonKeyColumnAggregationFactory( + (type, resultName, exposeInternalColumns) -> new TDigestPercentileOperator(type, compression, + percentile, resultName)); + return ChunkedOperatorAggregationHelper.aggregation(aggregationContextFactory, (QueryTable) input, + groupByColumns); } /** - * A builder class for an approximate percentile definition to be used with - * {@link #approximatePercentiles}. + * A builder class for an approximate percentile definition to be used with {@link #approximatePercentiles}. */ public static class PercentileDefinition { private final static PercentileDefinition[] ZERO_LENGTH_PERCENTILE_DEFINITION_ARRAY = - new PercentileDefinition[0]; + new PercentileDefinition[0]; private final PercentileDefinition prior; private final PercentileDefinition first; @@ -191,8 +184,7 @@ public PercentileDefinition add(double percentile, String resultName) { /** * Sets the name of the inputColumn * - * @param inputColumn the name of the input column that subsequent calls to {@link #add} - * operate on. + * @param inputColumn the name of the input column that subsequent calls to {@link #add} operate on. * * @return a (possibly new) PercentileDefinition */ @@ -239,88 +231,74 @@ private void flattenInto(List result) { } /** - * Compute a set of approximate percentiles for input according to the definitions in - * percentileDefinitions. + * Compute a set of approximate percentiles for input according to the definitions in percentileDefinitions. * * @param input the table to compute approximate percentiles for - * @param percentileDefinitions the compression factor, and map of input columns to output - * columns + * @param percentileDefinitions the compression factor, and map of input columns to output columns * @param groupByColumns the columns to group by * @return a table containing the groupByColumns and the approximate percentiles */ - public static Table approximatePercentiles(Table input, - PercentileDefinition percentileDefinitions, SelectColumn... groupByColumns) { - final List flatDefs = - PercentileDefinition.flatten(percentileDefinitions); + public static Table approximatePercentiles(Table input, PercentileDefinition percentileDefinitions, + SelectColumn... groupByColumns) { + final List flatDefs = PercentileDefinition.flatten(percentileDefinitions); if (flatDefs.isEmpty()) { throw new IllegalArgumentException("No percentile columns defined!"); } final double compression = flatDefs.get(0).compression; final NonKeyColumnAggregationFactory aggregationContextFactory = - new NonKeyColumnAggregationFactory((type, resultName, exposeInternalColumns) -> { - for (final PercentileDefinition percentileDefinition : flatDefs) { - if (percentileDefinition.inputColumn.equals(resultName)) { - return new TDigestPercentileOperator(type, compression, - percentileDefinition.digestColumnName, - percentileDefinition.percentiles.toArray(), - percentileDefinition.resultNames - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + new NonKeyColumnAggregationFactory((type, resultName, exposeInternalColumns) -> { + for (final PercentileDefinition percentileDefinition : flatDefs) { + if (percentileDefinition.inputColumn.equals(resultName)) { + return new TDigestPercentileOperator(type, compression, + percentileDefinition.digestColumnName, percentileDefinition.percentiles.toArray(), + percentileDefinition.resultNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + } } - } - return null; - }); - return ChunkedOperatorAggregationHelper.aggregation(aggregationContextFactory, - (QueryTable) input, groupByColumns); + return null; + }); + return ChunkedOperatorAggregationHelper.aggregation(aggregationContextFactory, (QueryTable) input, + groupByColumns); } /** - * Compute a set of approximate percentiles for input according to the definitions in - * percentileDefinitions. + * Compute a set of approximate percentiles for input according to the definitions in percentileDefinitions. * * @param input the table to compute approximate percentiles for - * @param percentileDefinitions the compression factor, and map of input columns to output - * columns + * @param percentileDefinitions the compression factor, and map of input columns to output columns * @param groupByColumns the columns to group by * @return a table containing the groupByColumns and the approximate percentiles */ - public static Table approximatePercentiles(Table input, - PercentileDefinition percentileDefinitions, String... groupByColumns) { - return approximatePercentiles(input, percentileDefinitions, - SelectColumnFactory.getExpressions(groupByColumns)); + public static Table approximatePercentiles(Table input, PercentileDefinition percentileDefinitions, + String... groupByColumns) { + return approximatePercentiles(input, percentileDefinitions, SelectColumnFactory.getExpressions(groupByColumns)); } /** - * Compute a set of approximate percentiles for input according to the definitions in - * percentileDefinitions. + * Compute a set of approximate percentiles for input according to the definitions in percentileDefinitions. * * @param input the table to compute approximate percentiles for - * @param percentileDefinitions the compression factor, and map of input columns to output - * columns + * @param percentileDefinitions the compression factor, and map of input columns to output columns * @return a table containing a single row with the the approximate percentiles */ - public static Table approximatePercentiles(Table input, - PercentileDefinition percentileDefinitions) { - return approximatePercentiles(input, percentileDefinitions, - SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY); + public static Table approximatePercentiles(Table input, PercentileDefinition percentileDefinitions) { + return approximatePercentiles(input, percentileDefinitions, SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY); } /** * Accumulate an DbArray of TDigests into a single new TDigest. * *

    - * Accumulate the digests within the DbArray into a single TDigest. The compression factor is - * one third of the compression factor of the first digest within the array. If the array has - * only a single element, then that element is returned. If a null array is passed in, null is - * returned. + * Accumulate the digests within the DbArray into a single TDigest. The compression factor is one third of the + * compression factor of the first digest within the array. If the array has only a single element, then that + * element is returned. If a null array is passed in, null is returned. *

    * *

    - * This function is intended to be used for parallelization. The first step is to independently - * compute approximate percentiles with an exposed digest column using your desired buckets. - * Next, call {@link Table#by(String...)} to produce arrays of Digests for each relevant bucket. - * Once the arrays are created, use this function to accumulate the arrays of digests within an - * {@link Table#update(String...)} statement. Finally, you may call the TDigest quantile - * function (or others) to produce the desired approximate percentile. + * This function is intended to be used for parallelization. The first step is to independently compute approximate + * percentiles with an exposed digest column using your desired buckets. Next, call {@link Table#by(String...)} to + * produce arrays of Digests for each relevant bucket. Once the arrays are created, use this function to accumulate + * the arrays of digests within an {@link Table#update(String...)} statement. Finally, you may call the TDigest + * quantile function (or others) to produce the desired approximate percentile. *

    * * @param array an array of TDigests diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AvgState.java b/DB/src/main/java/io/deephaven/db/v2/by/AvgState.java index b53a49b34cd..e4b75c8179c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AvgState.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AvgState.java @@ -53,8 +53,8 @@ void savePrevious() { @Override public String toString() { return "Avg{" + - "sum=" + runningSum + - ", nonNull=" + nonNullCount + - '}'; + "sum=" + runningSum + + ", nonNull=" + nonNullCount + + '}'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AvgStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/AvgStateFactory.java index 29bab1b1a87..f4c4c0f991d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AvgStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AvgStateFactory.java @@ -12,7 +12,7 @@ public AvgStateFactory() {} @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { return getAvgChunked(type, name, exposeInternalColumns); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/AvgStateWithNan.java b/DB/src/main/java/io/deephaven/db/v2/by/AvgStateWithNan.java index 94dd35702df..31cdcc86763 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/AvgStateWithNan.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/AvgStateWithNan.java @@ -33,9 +33,9 @@ void savePrevious() { @Override public String toString() { return "Avg{" + - "sum=" + runningSum + - ", nonNull=" + nonNullCount + - ", nan=" + nanCount + - '}'; + "sum=" + runningSum + + ", nonNull=" + nonNullCount + + ", nan=" + nanCount + + '}'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/BaseAddOnlyFirstOrLastChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/BaseAddOnlyFirstOrLastChunkedOperator.java index fa4198d8116..371b3bffed1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/BaseAddOnlyFirstOrLastChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/BaseAddOnlyFirstOrLastChunkedOperator.java @@ -18,15 +18,14 @@ import java.util.LinkedHashMap; import java.util.Map; -abstract class BaseAddOnlyFirstOrLastChunkedOperator - implements IterativeChunkedAggregationOperator { +abstract class BaseAddOnlyFirstOrLastChunkedOperator implements IterativeChunkedAggregationOperator { final boolean isFirst; final LongArraySource redirections; private final LongColumnSourceRedirectionIndex redirectionIndex; private final Map> resultColumns; - BaseAddOnlyFirstOrLastChunkedOperator(boolean isFirst, MatchPair[] resultPairs, - Table originalTable, String exposeRedirectionAs) { + BaseAddOnlyFirstOrLastChunkedOperator(boolean isFirst, MatchPair[] resultPairs, Table originalTable, + String exposeRedirectionAs) { this.isFirst = isFirst; this.redirections = new LongArraySource(); this.redirectionIndex = new LongColumnSourceRedirectionIndex(redirections); @@ -34,8 +33,8 @@ abstract class BaseAddOnlyFirstOrLastChunkedOperator this.resultColumns = new LinkedHashMap<>(resultPairs.length); for (final MatchPair mp : resultPairs) { // noinspection unchecked - resultColumns.put(mp.left(), new ReadOnlyRedirectedColumnSource(redirectionIndex, - originalTable.getColumnSource(mp.right()))); + resultColumns.put(mp.left(), + new ReadOnlyRedirectedColumnSource(redirectionIndex, originalTable.getColumnSource(mp.right()))); } if (exposeRedirectionAs != null) { resultColumns.put(exposeRedirectionAs, redirections); @@ -44,63 +43,59 @@ abstract class BaseAddOnlyFirstOrLastChunkedOperator @Override public void removeChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { throw new UnsupportedOperationException(); } @Override public void modifyChunk(BucketedContext bucketedContext, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { // we have no inputs, so should never get here throw new IllegalStateException(); } @Override public void shiftChunk(BucketedContext bucketedContext, Chunk previousValues, - Chunk newValues, LongChunk preShiftIndices, - LongChunk postShiftIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk preShiftIndices, + LongChunk postShiftIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { throw new UnsupportedOperationException(); } @Override public void modifyIndices(BucketedContext context, LongChunk inputIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { throw new UnsupportedOperationException(); } @Override - public boolean removeChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { throw new UnsupportedOperationException(); } @Override - public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { // we have no inputs, so should never get here throw new IllegalStateException(); } @Override - public boolean shiftChunk(SingletonContext singletonContext, - Chunk previousValues, Chunk newValues, - LongChunk preInputIndices, - LongChunk postInputIndices, long destination) { + public boolean shiftChunk(SingletonContext singletonContext, Chunk previousValues, + Chunk newValues, LongChunk preInputIndices, + LongChunk postInputIndices, long destination) { throw new UnsupportedOperationException(); } @Override - public boolean modifyIndices(SingletonContext context, LongChunk indices, - long destination) { + public boolean modifyIndices(SingletonContext context, LongChunk indices, long destination) { throw new UnsupportedOperationException(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/BaseStreamFirstOrLastChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/BaseStreamFirstOrLastChunkedOperator.java index 14afbf0b259..af335721b01 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/BaseStreamFirstOrLastChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/BaseStreamFirstOrLastChunkedOperator.java @@ -15,17 +15,15 @@ import java.util.Map; /** - * Base class with shared boilerplate for {@link StreamFirstChunkedOperator} and - * {@link StreamLastChunkedOperator}. + * Base class with shared boilerplate for {@link StreamFirstChunkedOperator} and {@link StreamLastChunkedOperator}. */ -public abstract class BaseStreamFirstOrLastChunkedOperator - implements IterativeChunkedAggregationOperator { +public abstract class BaseStreamFirstOrLastChunkedOperator implements IterativeChunkedAggregationOperator { protected static final int COPY_CHUNK_SIZE = ArrayBackedColumnSource.BLOCK_SIZE; /** - * The number of result columns. This is the size of {@link #resultColumns} and the length of - * {@link #inputColumns} and {@link #outputColumns}. + * The number of result columns. This is the size of {@link #resultColumns} and the length of {@link #inputColumns} + * and {@link #outputColumns}. */ protected final int numResultColumns; /** @@ -36,8 +34,7 @@ public abstract class BaseStreamFirstOrLastChunkedOperator *

    * Input columns, parallel to {@link #outputColumns} and {@link #resultColumns}. *

    - * These are the source columns from the upstream table, reinterpreted to primitives where - * applicable. + * These are the source columns from the upstream table, reinterpreted to primitives where applicable. */ protected final ColumnSource[] inputColumns; /** @@ -52,31 +49,28 @@ public abstract class BaseStreamFirstOrLastChunkedOperator */ protected SoftReference cachedRedirections; /** - * Map from destination slot to first key. Only used during a step to keep track of the - * appropriate rows to copy into the output columns. + * Map from destination slot to first key. Only used during a step to keep track of the appropriate rows to copy + * into the output columns. */ protected LongArraySource redirections; protected BaseStreamFirstOrLastChunkedOperator(@NotNull final MatchPair[] resultPairs, - @NotNull final Table streamTable) { + @NotNull final Table streamTable) { numResultColumns = resultPairs.length; inputColumns = new ColumnSource[numResultColumns]; outputColumns = new WritableSource[numResultColumns]; - final Map> resultColumnsMutable = - new LinkedHashMap<>(numResultColumns); + final Map> resultColumnsMutable = new LinkedHashMap<>(numResultColumns); for (int ci = 0; ci < numResultColumns; ++ci) { final MatchPair resultPair = resultPairs[ci]; final ColumnSource streamSource = streamTable.getColumnSource(resultPair.right()); - final ArrayBackedColumnSource resultSource = ArrayBackedColumnSource - .getMemoryColumnSource(0, streamSource.getType(), streamSource.getComponentType()); + final ArrayBackedColumnSource resultSource = ArrayBackedColumnSource.getMemoryColumnSource(0, + streamSource.getType(), streamSource.getComponentType()); resultColumnsMutable.put(resultPair.left(), resultSource); inputColumns[ci] = ReinterpretUtilities.maybeConvertToPrimitive(streamSource); - // Note that ArrayBackedColumnSources implementations reinterpret very efficiently where - // applicable. - outputColumns[ci] = - (WritableSource) ReinterpretUtilities.maybeConvertToPrimitive(resultSource); + // Note that ArrayBackedColumnSources implementations reinterpret very efficiently where applicable. + outputColumns[ci] = (WritableSource) ReinterpretUtilities.maybeConvertToPrimitive(resultSource); Assert.eq(inputColumns[ci].getChunkType(), "inputColumns[ci].getChunkType()", - outputColumns[ci].getChunkType(), "outputColumns[ci].getChunkType()"); + outputColumns[ci].getChunkType(), "outputColumns[ci].getChunkType()"); } resultColumns = Collections.unmodifiableMap(resultColumnsMutable); cachedRedirections = new SoftReference<>(redirections = new LongArraySource()); @@ -105,78 +99,62 @@ public void resetForStep(@NotNull final ShiftAwareListener.Update upstream) { // ----------------------------------------------------------------------------------------------------------------- @Override - public final void removeChunk(BucketedContext bucketedContext, - Chunk values, - LongChunk inputIndices, - IntChunk destinations, - IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + public final void removeChunk(BucketedContext bucketedContext, Chunk values, + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { throw new UnsupportedOperationException(); } @Override - public final void modifyChunk(BucketedContext bucketedContext, - Chunk previousValues, - Chunk newValues, - LongChunk postShiftIndices, - IntChunk destinations, - IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + public final void modifyChunk(BucketedContext bucketedContext, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, + IntChunk length, WritableBooleanChunk stateModified) { throw new IllegalStateException(); } @Override - public final void shiftChunk(BucketedContext bucketedContext, - Chunk previousValues, - Chunk newValues, - LongChunk preShiftIndices, - LongChunk postShiftIndices, - IntChunk destinations, - IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + public final void shiftChunk(BucketedContext bucketedContext, Chunk previousValues, + Chunk newValues, LongChunk preShiftIndices, + LongChunk postShiftIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { throw new UnsupportedOperationException(); } @Override - public final void modifyIndices(BucketedContext context, - LongChunk inputIndices, - IntChunk destinations, - IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + public final void modifyIndices(BucketedContext context, LongChunk inputIndices, + IntChunk destinations, IntChunk startPositions, + IntChunk length, WritableBooleanChunk stateModified) { throw new UnsupportedOperationException(); } @Override public final boolean removeChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, - LongChunk inputIndices, long destination) { + Chunk values, LongChunk inputIndices, + long destination) { throw new UnsupportedOperationException(); } @Override public final boolean modifyChunk(SingletonContext singletonContext, int chunkSize, - Chunk previousValues, - Chunk newValues, - LongChunk postShiftIndices, long destination) { + Chunk previousValues, Chunk newValues, + LongChunk postShiftIndices, long destination) { throw new IllegalStateException(); } @Override public final boolean shiftChunk(SingletonContext singletonContext, - Chunk previousValues, - Chunk newValues, - LongChunk preInputIndices, - LongChunk postInputIndices, long destination) { + Chunk previousValues, Chunk newValues, + LongChunk preInputIndices, + LongChunk postInputIndices, long destination) { throw new UnsupportedOperationException(); } @Override - public final boolean modifyIndices(SingletonContext context, - LongChunk indices, long destination) { + public final boolean modifyIndices(SingletonContext context, LongChunk indices, + long destination) { throw new UnsupportedOperationException(); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedAvgOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedAvgOperator.java index 7d6a74bc65e..7a9d29a19bd 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedAvgOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedAvgOperator.java @@ -20,10 +20,8 @@ class BigDecimalChunkedAvgOperator implements IterativeChunkedAggregationOperator { private final String name; private final boolean exposeInternalColumns; - private final ObjectArraySource resultColumn = - new ObjectArraySource<>(BigDecimal.class); - private final ObjectArraySource runningSum = - new ObjectArraySource<>(BigDecimal.class); + private final ObjectArraySource resultColumn = new ObjectArraySource<>(BigDecimal.class); + private final ObjectArraySource runningSum = new ObjectArraySource<>(BigDecimal.class); private final NonNullCounter nonNullCount = new NonNullCounter(); BigDecimalChunkedAvgOperator(String name, boolean exposeInternalColumns) { @@ -33,57 +31,53 @@ class BigDecimalChunkedAvgOperator implements IterativeChunkedAggregationOperato @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final ObjectChunk asObjectChunk = values.asObjectChunk(); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, - addChunk(asObjectChunk, destination, startPosition, length.get(ii))); + stateModified.set(ii, addChunk(asObjectChunk, destination, startPosition, length.get(ii))); } } @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final ObjectChunk asObjectChunk = values.asObjectChunk(); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, - removeChunk(asObjectChunk, destination, startPosition, length.get(ii))); + stateModified.set(ii, removeChunk(asObjectChunk, destination, startPosition, length.get(ii))); } } @Override public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, - LongChunk inputIndices, long destination) { + LongChunk inputIndices, long destination) { return addChunk(values.asObjectChunk(), destination, 0, values.size()); } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return removeChunk(values.asObjectChunk(), destination, 0, values.size()); } - public boolean addChunk(ObjectChunk values, long destination, - int chunkStart, int chunkSize) { + public boolean addChunk(ObjectChunk values, long destination, int chunkStart, + int chunkSize) { final MutableInt chunkNonNullCount = new MutableInt(); final BigDecimal chunkSum = - SumBigDecimalChunk.sumBigDecimalChunk(values, chunkStart, chunkSize, chunkNonNullCount); + SumBigDecimalChunk.sumBigDecimalChunk(values, chunkStart, chunkSize, chunkNonNullCount); if (chunkNonNullCount.intValue() <= 0) { return false; } - final long newCount = - nonNullCount.addNonNullUnsafe(destination, chunkNonNullCount.intValue()); + final long newCount = nonNullCount.addNonNullUnsafe(destination, chunkNonNullCount.intValue()); final BigDecimal newSum; final BigDecimal oldSum = runningSum.getUnsafe(destination); if (oldSum == null) { @@ -92,24 +86,22 @@ public boolean addChunk(ObjectChunk values, long d newSum = oldSum.add(chunkSum); } runningSum.set(destination, newSum); - resultColumn.set(destination, - newSum.divide(BigDecimal.valueOf(newCount), BigDecimal.ROUND_HALF_UP)); + resultColumn.set(destination, newSum.divide(BigDecimal.valueOf(newCount), BigDecimal.ROUND_HALF_UP)); return true; } - public boolean removeChunk(ObjectChunk values, long destination, - int chunkStart, int chunkSize) { + public boolean removeChunk(ObjectChunk values, long destination, int chunkStart, + int chunkSize) { final MutableInt chunkNonNullCount = new MutableInt(); final BigDecimal chunkSum = - SumBigDecimalChunk.sumBigDecimalChunk(values, chunkStart, chunkSize, chunkNonNullCount); + SumBigDecimalChunk.sumBigDecimalChunk(values, chunkStart, chunkSize, chunkNonNullCount); if (chunkNonNullCount.intValue() <= 0) { return false; } - final long newCount = - nonNullCount.addNonNullUnsafe(destination, -chunkNonNullCount.intValue()); + final long newCount = nonNullCount.addNonNullUnsafe(destination, -chunkNonNullCount.intValue()); if (newCount == 0) { resultColumn.set(destination, null); runningSum.set(destination, null); @@ -117,8 +109,7 @@ public boolean removeChunk(ObjectChunk values, lon final BigDecimal oldSum = runningSum.getUnsafe(destination); final BigDecimal newSum = oldSum.subtract(chunkSum); runningSum.set(destination, newSum); - resultColumn.set(destination, - newSum.divide(BigDecimal.valueOf(newCount), BigDecimal.ROUND_HALF_UP)); + resultColumn.set(destination, newSum.divide(BigDecimal.valueOf(newCount), BigDecimal.ROUND_HALF_UP)); } return true; @@ -137,8 +128,7 @@ public void ensureCapacity(long tableSize) { final Map> results = new LinkedHashMap<>(); results.put(name, resultColumn); results.put(name + ROLLUP_RUNNING_SUM_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, runningSum); - results.put(name + ROLLUP_NONNULL_COUNT_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, - nonNullCount.getColumnSource()); + results.put(name + ROLLUP_NONNULL_COUNT_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, nonNullCount.getColumnSource()); return results; } else { return Collections.singletonMap(name, resultColumn); diff --git a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedReAvgOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedReAvgOperator.java index 70d1b244703..c98a8e3cba5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedReAvgOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedReAvgOperator.java @@ -23,8 +23,7 @@ class BigDecimalChunkedReAvgOperator implements IterativeChunkedAggregationOpera private final BigDecimalChunkedSumOperator sumSum; private final LongChunkedSumOperator nncSum; - BigDecimalChunkedReAvgOperator(String name, BigDecimalChunkedSumOperator sumSum, - LongChunkedSumOperator nncSum) { + BigDecimalChunkedReAvgOperator(String name, BigDecimalChunkedSumOperator sumSum, LongChunkedSumOperator nncSum) { this.name = name; this.sumSum = sumSum; this.nncSum = nncSum; @@ -33,58 +32,55 @@ class BigDecimalChunkedReAvgOperator implements IterativeChunkedAggregationOpera @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { doBucketedUpdate((ReAvgContext) context, destinations, startPositions, stateModified); } @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { doBucketedUpdate((ReAvgContext) context, destinations, startPositions, stateModified); } @Override public void modifyChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { doBucketedUpdate((ReAvgContext) context, destinations, startPositions, stateModified); } private void doBucketedUpdate(ReAvgContext context, IntChunk destinations, - IntChunk startPositions, WritableBooleanChunk stateModified) { + IntChunk startPositions, WritableBooleanChunk stateModified) { context.keyIndices.setSize(startPositions.size()); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); context.keyIndices.set(ii, destinations.get(startPosition)); } - try (final OrderedKeys destinationOk = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(context.keyIndices)) { + try (final OrderedKeys destinationOk = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(context.keyIndices)) { updateResult(context, destinationOk, stateModified); } } @Override public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, - LongChunk inputIndices, long destination) { + LongChunk inputIndices, long destination) { return updateResult(destination); } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return updateResult(destination); } @Override - public boolean modifyChunk(SingletonContext context, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext context, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { return updateResult(destination); } @@ -96,22 +92,21 @@ private boolean updateResult(long destination) { } private void updateResult(ReAvgContext reAvgContext, OrderedKeys destinationOk, - WritableBooleanChunk stateModified) { + WritableBooleanChunk stateModified) { final ObjectChunk sumSumChunk = - sumSum.getChunk(reAvgContext.sumSumContext, destinationOk).asObjectChunk(); + sumSum.getChunk(reAvgContext.sumSumContext, destinationOk).asObjectChunk(); final LongChunk nncSumChunk = - nncSum.getChunk(reAvgContext.nncSumContext, destinationOk).asLongChunk(); + nncSum.getChunk(reAvgContext.nncSumContext, destinationOk).asLongChunk(); final int size = reAvgContext.keyIndices.size(); for (int ii = 0; ii < size; ++ii) { - stateModified.set(ii, updateResult(reAvgContext.keyIndices.get(ii), sumSumChunk.get(ii), - nncSumChunk.get(ii))); + stateModified.set(ii, + updateResult(reAvgContext.keyIndices.get(ii), sumSumChunk.get(ii), nncSumChunk.get(ii))); } } private boolean updateResult(long destination, BigDecimal sumSumValue, long nncValue) { if (nncValue > 0) { - final BigDecimal newValue = - sumSumValue.divide(BigDecimal.valueOf(nncValue), BigDecimal.ROUND_HALF_UP); + final BigDecimal newValue = sumSumValue.divide(BigDecimal.valueOf(nncValue), BigDecimal.ROUND_HALF_UP); return !newValue.equals(resultColumn.getAndSetUnsafe(destination, newValue)); } else { return null != resultColumn.getAndSetUnsafe(destination, null); diff --git a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedReVarOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedReVarOperator.java index 1987bf7582a..f1a4613ed26 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedReVarOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedReVarOperator.java @@ -24,7 +24,7 @@ */ class BigDecimalChunkedReVarOperator implements IterativeChunkedAggregationOperator { private static final int SCALE = - Configuration.getInstance().getIntegerWithDefault("BigDecimalStdOperator.scale", 10); + Configuration.getInstance().getIntegerWithDefault("BigDecimalStdOperator.scale", 10); private final ObjectArraySource resultColumn; private final String name; private final boolean std; @@ -33,7 +33,7 @@ class BigDecimalChunkedReVarOperator implements IterativeChunkedAggregationOpera private final LongChunkedSumOperator nncSum; BigDecimalChunkedReVarOperator(String name, boolean std, BigDecimalChunkedSumOperator sumSum, - BigDecimalChunkedSumOperator sum2sum, LongChunkedSumOperator nncSum) { + BigDecimalChunkedSumOperator sum2sum, LongChunkedSumOperator nncSum) { this.name = name; this.std = std; this.sumSum = sumSum; @@ -44,73 +44,70 @@ class BigDecimalChunkedReVarOperator implements IterativeChunkedAggregationOpera @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { doBucketedUpdate((ReVarContext) context, destinations, startPositions, stateModified); } @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { doBucketedUpdate((ReVarContext) context, destinations, startPositions, stateModified); } @Override public void modifyChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { doBucketedUpdate((ReVarContext) context, destinations, startPositions, stateModified); } @Override public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, - LongChunk inputIndices, long destination) { + LongChunk inputIndices, long destination) { return updateResult(destination); } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return updateResult(destination); } @Override - public boolean modifyChunk(SingletonContext context, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext context, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { return updateResult(destination); } private void doBucketedUpdate(ReVarContext context, IntChunk destinations, - IntChunk startPositions, WritableBooleanChunk stateModified) { + IntChunk startPositions, WritableBooleanChunk stateModified) { context.keyIndices.setSize(startPositions.size()); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); context.keyIndices.set(ii, destinations.get(startPosition)); } - try (final OrderedKeys destinationOk = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(context.keyIndices)) { + try (final OrderedKeys destinationOk = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(context.keyIndices)) { updateResult(context, destinationOk, stateModified); } } private void updateResult(ReVarContext reVarContext, OrderedKeys destinationOk, - WritableBooleanChunk stateModified) { + WritableBooleanChunk stateModified) { final ObjectChunk sumSumChunk = - sumSum.getChunk(reVarContext.sumSumContext, destinationOk).asObjectChunk(); + sumSum.getChunk(reVarContext.sumSumContext, destinationOk).asObjectChunk(); final ObjectChunk sum2SumChunk = - sum2Sum.getChunk(reVarContext.sum2SumContext, destinationOk).asObjectChunk(); + sum2Sum.getChunk(reVarContext.sum2SumContext, destinationOk).asObjectChunk(); final LongChunk nncSumChunk = - nncSum.getChunk(reVarContext.nncSumContext, destinationOk).asLongChunk(); + nncSum.getChunk(reVarContext.nncSumContext, destinationOk).asLongChunk(); final int size = reVarContext.keyIndices.size(); for (int ii = 0; ii < size; ++ii) { stateModified.set(ii, updateResult(reVarContext.keyIndices.get(ii), sumSumChunk.get(ii), - sum2SumChunk.get(ii), nncSumChunk.get(ii))); + sum2SumChunk.get(ii), nncSumChunk.get(ii))); } } @@ -122,8 +119,7 @@ private boolean updateResult(long destination) { return updateResult(destination, newSum, newSum2, nonNullCount); } - private boolean updateResult(long destination, BigDecimal newSum, BigDecimal newSum2, - long nonNullCount) { + private boolean updateResult(long destination, BigDecimal newSum, BigDecimal newSum2, long nonNullCount) { if (nonNullCount <= 1) { return null == resultColumn.getAndSetUnsafe(destination, null); } else { @@ -134,9 +130,9 @@ private boolean updateResult(long destination, BigDecimal newSum, BigDecimal new newSum2 = BigDecimal.ZERO; } final BigDecimal countMinus1 = BigDecimal.valueOf(nonNullCount - 1); - final BigDecimal variance = newSum2.subtract( - newSum.pow(2).divide(BigDecimal.valueOf(nonNullCount), BigDecimal.ROUND_HALF_UP)) - .divide(countMinus1, BigDecimal.ROUND_HALF_UP); + final BigDecimal variance = + newSum2.subtract(newSum.pow(2).divide(BigDecimal.valueOf(nonNullCount), BigDecimal.ROUND_HALF_UP)) + .divide(countMinus1, BigDecimal.ROUND_HALF_UP); final BigDecimal result = std ? BigDecimalUtils.sqrt(variance, SCALE) : variance; return !result.equals(resultColumn.getAndSetUnsafe(destination, result)); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedSumOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedSumOperator.java index c62b5305752..32ad0cfaf34 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedSumOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedSumOperator.java @@ -16,11 +16,9 @@ import java.util.Collections; import java.util.Map; -public class BigDecimalChunkedSumOperator - implements IterativeChunkedAggregationOperator, ChunkSource { +public class BigDecimalChunkedSumOperator implements IterativeChunkedAggregationOperator, ChunkSource { private final String name; - private final ObjectArraySource resultColumn = - new ObjectArraySource<>(BigDecimal.class); + private final ObjectArraySource resultColumn = new ObjectArraySource<>(BigDecimal.class); private final NonNullCounter nonNullCount = new NonNullCounter(); private final boolean isAbsolute; @@ -39,72 +37,66 @@ public static BigDecimal minus(BigDecimal a, BigDecimal b) { @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final ObjectChunk asObjectChunk = values.asObjectChunk(); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, - addChunk(asObjectChunk, destination, startPosition, length.get(ii))); + stateModified.set(ii, addChunk(asObjectChunk, destination, startPosition, length.get(ii))); } } @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final ObjectChunk asObjectChunk = values.asObjectChunk(); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, - removeChunk(asObjectChunk, destination, startPosition, length.get(ii))); + stateModified.set(ii, removeChunk(asObjectChunk, destination, startPosition, length.get(ii))); } } @Override public void modifyChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { - final ObjectChunk preAsObjectChunk = - previousValues.asObjectChunk(); - final ObjectChunk postAsObjectChunk = - newValues.asObjectChunk(); + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { + final ObjectChunk preAsObjectChunk = previousValues.asObjectChunk(); + final ObjectChunk postAsObjectChunk = newValues.asObjectChunk(); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, modifyChunk(preAsObjectChunk, postAsObjectChunk, destination, - startPosition, length.get(ii))); + stateModified.set(ii, + modifyChunk(preAsObjectChunk, postAsObjectChunk, destination, startPosition, length.get(ii))); } } @Override public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, - LongChunk inputIndices, long destination) { + LongChunk inputIndices, long destination) { return addChunk(values.asObjectChunk(), destination, 0, values.size()); } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return removeChunk(values.asObjectChunk(), destination, 0, values.size()); } @Override - public boolean modifyChunk(SingletonContext context, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { - return modifyChunk(previousValues.asObjectChunk(), newValues.asObjectChunk(), destination, - 0, previousValues.size()); + public boolean modifyChunk(SingletonContext context, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { + return modifyChunk(previousValues.asObjectChunk(), newValues.asObjectChunk(), destination, 0, + previousValues.size()); } - private boolean addChunk(ObjectChunk values, long destination, - int chunkStart, int chunkSize) { + private boolean addChunk(ObjectChunk values, long destination, int chunkStart, + int chunkSize) { final MutableInt chunkNonNull = new MutableInt(0); final BigDecimal partialSum = doSum(values, chunkStart, chunkSize, chunkNonNull); @@ -120,8 +112,8 @@ private boolean addChunk(ObjectChunk values, long return changed; } - private boolean removeChunk(ObjectChunk values, long destination, - int chunkStart, int chunkSize) { + private boolean removeChunk(ObjectChunk values, long destination, int chunkStart, + int chunkSize) { final MutableInt chunkNonNull = new MutableInt(0); final BigDecimal partialSum = doSum(values, chunkStart, chunkSize, chunkNonNull); @@ -139,13 +131,11 @@ private boolean removeChunk(ObjectChunk values, lo } private boolean modifyChunk(ObjectChunk preValues, - ObjectChunk postValues, long destination, int chunkStart, - int chunkSize) { + ObjectChunk postValues, long destination, int chunkStart, int chunkSize) { final MutableInt preChunkNonNull = new MutableInt(0); final MutableInt postChunkNonNull = new MutableInt(0); final BigDecimal prePartialSum = doSum(preValues, chunkStart, chunkSize, preChunkNonNull); - final BigDecimal postPartialSum = - doSum(postValues, chunkStart, chunkSize, postChunkNonNull); + final BigDecimal postPartialSum = doSum(postValues, chunkStart, chunkSize, postChunkNonNull); final int nullDifference = postChunkNonNull.intValue() - preChunkNonNull.intValue(); @@ -166,14 +156,12 @@ private boolean modifyChunk(ObjectChunk preValues, return true; } - private BigDecimal doSum(ObjectChunk values, int chunkStart, - int chunkSize, MutableInt chunkNonNull) { + private BigDecimal doSum(ObjectChunk values, int chunkStart, int chunkSize, + MutableInt chunkNonNull) { if (isAbsolute) { - return SumBigDecimalChunk.sumBigDecimalChunkAbs(values, chunkStart, chunkSize, - chunkNonNull); + return SumBigDecimalChunk.sumBigDecimalChunkAbs(values, chunkStart, chunkSize, chunkNonNull); } else { - return SumBigDecimalChunk.sumBigDecimalChunk(values, chunkStart, chunkSize, - chunkNonNull); + return SumBigDecimalChunk.sumBigDecimalChunk(values, chunkStart, chunkSize, chunkNonNull); } } @@ -203,20 +191,18 @@ public ChunkType getChunkType() { } @Override - public Chunk getChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys) { + public Chunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) { return resultColumn.getChunk(context, orderedKeys); } @Override - public Chunk getChunk(@NotNull GetContext context, long firstKey, - long lastKey) { + public Chunk getChunk(@NotNull GetContext context, long firstKey, long lastKey) { return resultColumn.getChunk(context, firstKey, lastKey); } @Override - public void fillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { resultColumn.fillChunk(context, destination, orderedKeys); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedVarOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedVarOperator.java index 87adc63489a..4d43286d513 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedVarOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/BigDecimalChunkedVarOperator.java @@ -25,18 +25,15 @@ */ class BigDecimalChunkedVarOperator implements IterativeChunkedAggregationOperator { private final static int SCALE = - Configuration.getInstance().getIntegerWithDefault("BigDecimalStdOperator.scale", 10); + Configuration.getInstance().getIntegerWithDefault("BigDecimalStdOperator.scale", 10); private final String name; private final boolean exposeInternalColumns; private final boolean std; private final NonNullCounter nonNullCounter = new NonNullCounter(); - private final ObjectArraySource resultColumn = - new ObjectArraySource<>(BigDecimal.class); - private final ObjectArraySource sumSource = - new ObjectArraySource<>(BigDecimal.class); - private final ObjectArraySource sum2Source = - new ObjectArraySource<>(BigDecimal.class); + private final ObjectArraySource resultColumn = new ObjectArraySource<>(BigDecimal.class); + private final ObjectArraySource sumSource = new ObjectArraySource<>(BigDecimal.class); + private final ObjectArraySource sum2Source = new ObjectArraySource<>(BigDecimal.class); BigDecimalChunkedVarOperator(boolean std, String name, boolean exposeInternalColumns) { this.std = std; @@ -59,51 +56,48 @@ private BigDecimal plus(BigDecimal a, BigDecimal b) { @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final ObjectChunk asObjectChunk = values.asObjectChunk(); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, - addChunk(asObjectChunk, destination, startPosition, length.get(ii))); + stateModified.set(ii, addChunk(asObjectChunk, destination, startPosition, length.get(ii))); } } @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final ObjectChunk asObjectChunk = values.asObjectChunk(); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, - removeChunk(asObjectChunk, destination, startPosition, length.get(ii))); + stateModified.set(ii, removeChunk(asObjectChunk, destination, startPosition, length.get(ii))); } } @Override public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, - LongChunk inputIndices, long destination) { + LongChunk inputIndices, long destination) { return addChunk(values.asObjectChunk(), destination, 0, values.size()); } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return removeChunk(values.asObjectChunk(), destination, 0, values.size()); } - private boolean addChunk(ObjectChunk values, long destination, - int chunkStart, int chunkSize) { + private boolean addChunk(ObjectChunk values, long destination, int chunkStart, + int chunkSize) { final MutableObject sum2 = new MutableObject<>(); final MutableInt chunkNonNull = new MutableInt(0); - final BigDecimal sum = SumBigDecimalChunk.sum2BigDecimalChunk(values, chunkStart, chunkSize, - chunkNonNull, sum2); + final BigDecimal sum = + SumBigDecimalChunk.sum2BigDecimalChunk(values, chunkStart, chunkSize, chunkNonNull, sum2); if (chunkNonNull.intValue() <= 0) { return false; @@ -111,33 +105,30 @@ private boolean addChunk(ObjectChunk values, long final BigDecimal newSum = plus(sumSource.getUnsafe(destination), sum); final BigDecimal newSum2 = plus(sum2Source.getUnsafe(destination), sum2.getValue()); - final long nonNullCount = - nonNullCounter.addNonNullUnsafe(destination, chunkNonNull.intValue()); + final long nonNullCount = nonNullCounter.addNonNullUnsafe(destination, chunkNonNull.intValue()); doUpdate(destination, newSum, newSum2, nonNullCount); return true; } - private boolean removeChunk(ObjectChunk values, long destination, - int chunkStart, int chunkSize) { + private boolean removeChunk(ObjectChunk values, long destination, int chunkStart, + int chunkSize) { final MutableObject sum2 = new MutableObject<>(); final MutableInt chunkNonNull = new MutableInt(0); - final BigDecimal sum = SumBigDecimalChunk.sum2BigDecimalChunk(values, chunkStart, chunkSize, - chunkNonNull, sum2); + final BigDecimal sum = + SumBigDecimalChunk.sum2BigDecimalChunk(values, chunkStart, chunkSize, chunkNonNull, sum2); if (chunkNonNull.intValue() <= 0) { return false; } final BigDecimal newSum = plus(sumSource.getUnsafe(destination), sum.negate()); - final BigDecimal newSum2 = - plus(sum2Source.getUnsafe(destination), sum2.getValue().negate()); + final BigDecimal newSum2 = plus(sum2Source.getUnsafe(destination), sum2.getValue().negate()); final long nonNullCount = nonNullCounter.addNonNull(destination, -chunkNonNull.intValue()); doUpdate(destination, newSum, newSum2, nonNullCount); return true; } - private void doUpdate(long destination, BigDecimal newSum, BigDecimal newSum2, - long nonNullCount) { + private void doUpdate(long destination, BigDecimal newSum, BigDecimal newSum2, long nonNullCount) { if (nonNullCount == 0) { sumSource.set(destination, null); sum2Source.set(destination, null); @@ -150,9 +141,9 @@ private void doUpdate(long destination, BigDecimal newSum, BigDecimal newSum2, resultColumn.set(destination, null); } else { final BigDecimal countMinus1 = BigDecimal.valueOf(nonNullCount - 1); - final BigDecimal variance = newSum2.subtract( - newSum.pow(2).divide(BigDecimal.valueOf(nonNullCount), BigDecimal.ROUND_HALF_UP)) - .divide(countMinus1, BigDecimal.ROUND_HALF_UP); + final BigDecimal variance = + newSum2.subtract(newSum.pow(2).divide(BigDecimal.valueOf(nonNullCount), BigDecimal.ROUND_HALF_UP)) + .divide(countMinus1, BigDecimal.ROUND_HALF_UP); if (std) { resultColumn.set(destination, BigDecimalUtils.sqrt(variance, SCALE)); } else { @@ -177,8 +168,7 @@ public void ensureCapacity(long tableSize) { results.put(name, resultColumn); results.put(name + ROLLUP_RUNNING_SUM_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, sumSource); results.put(name + ROLLUP_RUNNING_SUM2_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, sum2Source); - results.put(name + ROLLUP_NONNULL_COUNT_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, - nonNullCounter.getColumnSource()); + results.put(name + ROLLUP_NONNULL_COUNT_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, nonNullCounter.getColumnSource()); return results; } else { return Collections.singletonMap(name, resultColumn); diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ByAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/ByAggregationFactory.java index 50526553ae1..22b80d40814 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ByAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ByAggregationFactory.java @@ -14,8 +14,7 @@ import java.util.stream.Collectors; /** - * An {@link AggregationContextFactory} used in the implementation of - * {@link io.deephaven.db.tables.Table#by}. + * An {@link AggregationContextFactory} used in the implementation of {@link io.deephaven.db.tables.Table#by}. */ public class ByAggregationFactory implements AggregationContextFactory { @@ -34,17 +33,16 @@ public boolean allowKeyOnlySubstitution() { @Override public AggregationContext makeAggregationContext(@NotNull final Table inputTable, - @NotNull final String... groupByColumnNames) { - final Set groupByColumnNameSet = - Arrays.stream(groupByColumnNames).collect(Collectors.toSet()); + @NotNull final String... groupByColumnNames) { + final Set groupByColumnNameSet = Arrays.stream(groupByColumnNames).collect(Collectors.toSet()); final String[] resultColumnNames = inputTable.getDefinition().getColumnNames().stream() - .filter(cn -> !groupByColumnNameSet.contains(cn)).toArray(String[]::new); + .filter(cn -> !groupByColumnNameSet.contains(cn)).toArray(String[]::new); // noinspection unchecked return new AggregationContext( - new IterativeChunkedAggregationOperator[] {new ByChunkedOperator( - (QueryTable) inputTable, true, MatchPairFactory.getExpressions(resultColumnNames))}, - new String[][] {CollectionUtil.ZERO_LENGTH_STRING_ARRAY}, - new ChunkSource.WithPrev[] {null}); + new IterativeChunkedAggregationOperator[] {new ByChunkedOperator((QueryTable) inputTable, true, + MatchPairFactory.getExpressions(resultColumnNames))}, + new String[][] {CollectionUtil.ZERO_LENGTH_STRING_ARRAY}, + new ChunkSource.WithPrev[] {null}); } @Override @@ -53,26 +51,25 @@ public String toString() { } public static QueryTable by(@NotNull final QueryTable inputTable, - @NotNull final String... groupByColumnNames) { + @NotNull final String... groupByColumnNames) { return by(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, groupByColumnNames); } public static QueryTable by(@NotNull final QueryTable inputTable, - @NotNull final SelectColumn[] groupByColumns) { + @NotNull final SelectColumn[] groupByColumns) { return by(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, groupByColumns); } public static QueryTable by(@NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final String... groupByColumnNames) { - return by(aggregationControl, inputTable, - SelectColumnFactory.getExpressions(groupByColumnNames)); + @NotNull final QueryTable inputTable, + @NotNull final String... groupByColumnNames) { + return by(aggregationControl, inputTable, SelectColumnFactory.getExpressions(groupByColumnNames)); } public static QueryTable by(@NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final SelectColumn[] groupByColumns) { - return ChunkedOperatorAggregationHelper.aggregation(aggregationControl, getInstance(), - inputTable, groupByColumns); + @NotNull final QueryTable inputTable, + @NotNull final SelectColumn[] groupByColumns) { + return ChunkedOperatorAggregationHelper.aggregation(aggregationControl, getInstance(), inputTable, + groupByColumns); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ByChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/ByChunkedOperator.java index a991cc5f7d3..5678dd2368b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ByChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ByChunkedOperator.java @@ -26,8 +26,7 @@ import static io.deephaven.db.v2.sources.ArrayBackedColumnSource.BLOCK_SIZE; /** - * An {@link IterativeChunkedAggregationOperator} used in the implementation of - * {@link io.deephaven.db.tables.Table#by}. + * An {@link IterativeChunkedAggregationOperator} used in the implementation of {@link io.deephaven.db.tables.Table#by}. */ public final class ByChunkedOperator implements IterativeChunkedAggregationOperator { @@ -44,16 +43,16 @@ public final class ByChunkedOperator implements IterativeChunkedAggregationOpera private boolean someKeyHasModifies; ByChunkedOperator(@NotNull final QueryTable inputTable, final boolean registeredWithHelper, - @NotNull final MatchPair... resultColumnPairs) { + @NotNull final MatchPair... resultColumnPairs) { this.inputTable = inputTable; this.registeredWithHelper = registeredWithHelper; live = inputTable.isRefreshing(); indices = new ObjectArraySource<>(Index.class); // noinspection unchecked resultColumns = Arrays.stream(resultColumnPairs).collect(Collectors.toMap(MatchPair::left, - matchPair -> (AggregateColumnSource) AggregateColumnSource - .make(inputTable.getColumnSource(matchPair.right()), indices), - Assert::neverInvoked, LinkedHashMap::new)); + matchPair -> (AggregateColumnSource) AggregateColumnSource + .make(inputTable.getColumnSource(matchPair.right()), indices), + Assert::neverInvoked, LinkedHashMap::new)); inputColumnNames = MatchPair.getRightColumns(resultColumnPairs); if (live) { resultInputsModifiedColumnSet = inputTable.newModifiedColumnSet(inputColumnNames); @@ -63,18 +62,14 @@ public final class ByChunkedOperator implements IterativeChunkedAggregationOpera } @Override - public void addChunk(final BucketedContext bucketedContext, - final Chunk values, - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void addChunk(final BucketedContext bucketedContext, final Chunk values, + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { Assert.eqNull(values, "values"); someKeyHasAddsOrRemoves |= startPositions.size() > 0; // noinspection unchecked - final LongChunk inputIndicesAsOrdered = - (LongChunk) inputIndices; + final LongChunk inputIndicesAsOrdered = (LongChunk) inputIndices; for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); @@ -86,18 +81,14 @@ public void addChunk(final BucketedContext bucketedContext, } @Override - public void removeChunk(final BucketedContext bucketedContext, - final Chunk values, - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void removeChunk(final BucketedContext bucketedContext, final Chunk values, + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { Assert.eqNull(values, "values"); someKeyHasAddsOrRemoves |= startPositions.size() > 0; // noinspection unchecked - final LongChunk inputIndicesAsOrdered = - (LongChunk) inputIndices; + final LongChunk inputIndicesAsOrdered = (LongChunk) inputIndices; for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); @@ -109,52 +100,43 @@ public void removeChunk(final BucketedContext bucketedContext, } @Override - public void modifyChunk(final BucketedContext bucketedContext, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk postShiftIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void modifyChunk(final BucketedContext bucketedContext, final Chunk previousValues, + final Chunk newValues, + @NotNull final LongChunk postShiftIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { // We have no inputs, so we should never get here. throw new IllegalStateException(); } @Override - public void shiftChunk(final BucketedContext bucketedContext, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk preShiftIndices, - @NotNull final LongChunk postShiftIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void shiftChunk(final BucketedContext bucketedContext, final Chunk previousValues, + final Chunk newValues, + @NotNull final LongChunk preShiftIndices, + @NotNull final LongChunk postShiftIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { Assert.eqNull(previousValues, "previousValues"); Assert.eqNull(newValues, "newValues"); // noinspection unchecked - final LongChunk preShiftIndicesAsOrdered = - (LongChunk) preShiftIndices; + final LongChunk preShiftIndicesAsOrdered = (LongChunk) preShiftIndices; // noinspection unchecked - final LongChunk postShiftIndicesAsOrdered = - (LongChunk) postShiftIndices; + final LongChunk postShiftIndicesAsOrdered = (LongChunk) postShiftIndices; for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); - doShift(preShiftIndicesAsOrdered, postShiftIndicesAsOrdered, startPosition, runLength, - destination); + doShift(preShiftIndicesAsOrdered, postShiftIndicesAsOrdered, startPosition, runLength, destination); } } @Override public void modifyIndices(final BucketedContext context, - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { if (!stepValuesModified) { return; } @@ -164,8 +146,8 @@ public void modifyIndices(final BucketedContext context, @Override public boolean addChunk(final SingletonContext singletonContext, final int chunkSize, - final Chunk values, - @NotNull final LongChunk inputIndices, final long destination) { + final Chunk values, + @NotNull final LongChunk inputIndices, final long destination) { Assert.eqNull(values, "values"); someKeyHasAddsOrRemoves |= chunkSize > 0; // noinspection unchecked @@ -182,8 +164,8 @@ public boolean addIndex(SingletonContext context, Index index, long destination) @Override public boolean removeChunk(final SingletonContext singletonContext, final int chunkSize, - final Chunk values, - @NotNull final LongChunk inputIndices, final long destination) { + final Chunk values, + @NotNull final LongChunk inputIndices, final long destination) { Assert.eqNull(values, "values"); someKeyHasAddsOrRemoves |= chunkSize > 0; // noinspection unchecked @@ -193,31 +175,30 @@ public boolean removeChunk(final SingletonContext singletonContext, final int ch @Override public boolean modifyChunk(final SingletonContext singletonContext, final int chunkSize, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk postShiftIndices, - final long destination) { + final Chunk previousValues, final Chunk newValues, + @NotNull final LongChunk postShiftIndices, + final long destination) { // We have no inputs, so we should never get here. throw new IllegalStateException(); } @Override - public boolean shiftChunk(final SingletonContext singletonContext, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk preInputIndices, - @NotNull final LongChunk postInputIndices, - final long destination) { + public boolean shiftChunk(final SingletonContext singletonContext, final Chunk previousValues, + final Chunk newValues, + @NotNull final LongChunk preInputIndices, + @NotNull final LongChunk postInputIndices, + final long destination) { Assert.eqNull(previousValues, "previousValues"); Assert.eqNull(newValues, "newValues"); // noinspection unchecked - doShift((LongChunk) preInputIndices, - (LongChunk) postInputIndices, 0, preInputIndices.size(), - destination); + doShift((LongChunk) preInputIndices, (LongChunk) postInputIndices, 0, + preInputIndices.size(), destination); return false; } @Override - public boolean modifyIndices(final SingletonContext context, - @NotNull final LongChunk indices, final long destination) { + public boolean modifyIndices(final SingletonContext context, @NotNull final LongChunk indices, + final long destination) { if (!stepValuesModified) { return false; } @@ -225,8 +206,8 @@ public boolean modifyIndices(final SingletonContext context, return indices.size() != 0; } - private void addChunk(@NotNull final LongChunk indices, final int start, - final int length, final long destination) { + private void addChunk(@NotNull final LongChunk indices, final int start, final int length, + final long destination) { final Index index = indexForSlot(destination); index.insert(indices, start, length); } @@ -235,15 +216,15 @@ private void addIndex(@NotNull final Index addIndex, final long destination) { indexForSlot(destination).insert(addIndex); } - private void removeChunk(@NotNull final LongChunk indices, final int start, - final int length, final long destination) { + private void removeChunk(@NotNull final LongChunk indices, final int start, final int length, + final long destination) { final Index index = indexForSlot(destination); index.remove(indices, start, length); } private void doShift(@NotNull final LongChunk preShiftIndices, - @NotNull final LongChunk postShiftIndices, - final int startPosition, final int runLength, final long destination) { + @NotNull final LongChunk postShiftIndices, + final int startPosition, final int runLength, final long destination) { final Index index = indexForSlot(destination); index.remove(preShiftIndices, startPosition, runLength); index.insert(postShiftIndices, startPosition, runLength); @@ -252,8 +233,7 @@ private void doShift(@NotNull final LongChunk preShiftIndices private Index indexForSlot(final long destination) { Index index = indices.getUnsafe(destination); if (index == null) { - indices.set(destination, - index = (live ? Index.FACTORY : Index.CURRENT_FACTORY).getEmptyIndex()); + indices.set(destination, index = (live ? Index.FACTORY : Index.CURRENT_FACTORY).getEmptyIndex()); } return index; } @@ -270,8 +250,7 @@ public void ensureCapacity(final long tableSize) { @Override public void startTrackingPrevValues() { - // NB: We don't need previous tracking on the indices ColumnSource. It's in destination - // space, and we never move + // NB: We don't need previous tracking on the indices ColumnSource. It's in destination space, and we never move // anything. Nothing should be asking for previous values if they didn't exist previously. // indices.startTrackingPrevValues(); // NB: These are usually (always, as of now) instances of AggregateColumnSource, meaning @@ -280,46 +259,41 @@ public void startTrackingPrevValues() { } @Override - public UnaryOperator initializeRefreshing( - @NotNull final QueryTable resultTable, - @NotNull final LivenessReferent aggregationUpdateListener) { + public UnaryOperator initializeRefreshing(@NotNull final QueryTable resultTable, + @NotNull final LivenessReferent aggregationUpdateListener) { initializeNewIndexPreviousValues(resultTable.getIndex()); return registeredWithHelper - ? new InputToResultModifiedColumnSetFactory(resultTable, - resultColumns.keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)) - : null; + ? new InputToResultModifiedColumnSetFactory(resultTable, + resultColumns.keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)) + : null; } /** - * Make a factory that reads an upstream {@link ModifiedColumnSet} and produces a result - * {@link ModifiedColumnSet}. + * Make a factory that reads an upstream {@link ModifiedColumnSet} and produces a result {@link ModifiedColumnSet}. * * @param resultTable The result {@link QueryTable} - * @param resultColumnNames The result column names, which must be parallel to this operator's - * input column names + * @param resultColumnNames The result column names, which must be parallel to this operator's input column names * @return The factory */ - UnaryOperator makeInputToResultModifiedColumnSetFactory( - @NotNull final QueryTable resultTable, @NotNull final String[] resultColumnNames) { + UnaryOperator makeInputToResultModifiedColumnSetFactory(@NotNull final QueryTable resultTable, + @NotNull final String[] resultColumnNames) { return new InputToResultModifiedColumnSetFactory(resultTable, resultColumnNames); } - private class InputToResultModifiedColumnSetFactory - implements UnaryOperator { + private class InputToResultModifiedColumnSetFactory implements UnaryOperator { private final ModifiedColumnSet updateModifiedColumnSet; private final ModifiedColumnSet allAggregatedColumns; private final ModifiedColumnSet.Transformer aggregatedColumnsTransformer; private InputToResultModifiedColumnSetFactory(@NotNull final QueryTable resultTable, - @NotNull final String[] resultColumnNames) { - updateModifiedColumnSet = - new ModifiedColumnSet(resultTable.getModifiedColumnSetForUpdates()); + @NotNull final String[] resultColumnNames) { + updateModifiedColumnSet = new ModifiedColumnSet(resultTable.getModifiedColumnSetForUpdates()); allAggregatedColumns = resultTable.newModifiedColumnSet(resultColumnNames); aggregatedColumnsTransformer = inputTable.newModifiedColumnSetTransformer( - inputColumnNames, - Arrays.stream(resultColumnNames).map(resultTable::newModifiedColumnSet) - .toArray(ModifiedColumnSet[]::new)); + inputColumnNames, + Arrays.stream(resultColumnNames).map(resultTable::newModifiedColumnSet) + .toArray(ModifiedColumnSet[]::new)); } @Override @@ -328,8 +302,7 @@ public ModifiedColumnSet apply(@NotNull final ModifiedColumnSet upstreamModified return allAggregatedColumns; } if (someKeyHasModifies) { - aggregatedColumnsTransformer.clearAndTransform(upstreamModifiedColumnSet, - updateModifiedColumnSet); + aggregatedColumnsTransformer.clearAndTransform(upstreamModifiedColumnSet, updateModifiedColumnSet); return updateModifiedColumnSet; } return ModifiedColumnSet.EMPTY; @@ -339,14 +312,14 @@ public ModifiedColumnSet apply(@NotNull final ModifiedColumnSet upstreamModified @Override public void resetForStep(@NotNull final ShiftAwareListener.Update upstream) { stepValuesModified = upstream.modified.nonempty() && upstream.modifiedColumnSet.nonempty() - && upstream.modifiedColumnSet.containsAny(resultInputsModifiedColumnSet); + && upstream.modifiedColumnSet.containsAny(resultInputsModifiedColumnSet); someKeyHasAddsOrRemoves = false; someKeyHasModifies = false; } @Override public void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream, - @NotNull final ReadOnlyIndex newDestinations) { + @NotNull final ReadOnlyIndex newDestinations) { initializeNewIndexPreviousValues(newDestinations); } @@ -355,19 +328,16 @@ private void initializeNewIndexPreviousValues(@NotNull final OrderedKeys newDest return; } try (final ChunkSource.GetContext indicesGetContext = indices.makeGetContext(BLOCK_SIZE); - final OrderedKeys.Iterator newDestinationsIterator = - newDestinations.getOrderedKeysIterator()) { + final OrderedKeys.Iterator newDestinationsIterator = newDestinations.getOrderedKeysIterator()) { while (newDestinationsIterator.hasMore()) { final long nextDestination = newDestinationsIterator.peekNextKey(); - final long nextBlockEnd = - (nextDestination / BLOCK_SIZE) * BLOCK_SIZE + BLOCK_SIZE - 1; - // This OrderedKeys slice should be exactly aligned to a slice of a single data - // block in indices (since + final long nextBlockEnd = (nextDestination / BLOCK_SIZE) * BLOCK_SIZE + BLOCK_SIZE - 1; + // This OrderedKeys slice should be exactly aligned to a slice of a single data block in indices (since // it is an ArrayBackedColumnSource), allowing getChunk to skip a copy. final OrderedKeys newDestinationsSlice = - newDestinationsIterator.getNextOrderedKeysThrough(nextBlockEnd); + newDestinationsIterator.getNextOrderedKeysThrough(nextBlockEnd); final ObjectChunk indicesChunk = - indices.getChunk(indicesGetContext, newDestinationsSlice).asObjectChunk(); + indices.getChunk(indicesGetContext, newDestinationsSlice).asObjectChunk(); final int indicesChunkSize = indicesChunk.size(); for (int ii = 0; ii < indicesChunkSize; ++ii) { indicesChunk.get(ii).initializePreviousValue(); diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ByExternalAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/ByExternalAggregationFactory.java index 04944df6b33..6d971f2e731 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ByExternalAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ByExternalAggregationFactory.java @@ -12,8 +12,7 @@ import java.util.List; /** - * An {@link AggregationContextFactory} used in the implementation of - * {@link io.deephaven.db.tables.Table#byExternal}. + * An {@link AggregationContextFactory} used in the implementation of {@link io.deephaven.db.tables.Table#byExternal}. */ public class ByExternalAggregationFactory implements AggregationContextFactory { @@ -24,8 +23,8 @@ public class ByExternalAggregationFactory implements AggregationContextFactory { private ByExternalChunkedOperator operator; private ByExternalAggregationFactory(final boolean dropKeys, - @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, - @NotNull final List keysToPrepopulate) { + @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, + @NotNull final List keysToPrepopulate) { this.dropKeys = dropKeys; this.attributeCopier = attributeCopier; this.keysToPrepopulate = keysToPrepopulate; @@ -33,21 +32,21 @@ private ByExternalAggregationFactory(final boolean dropKeys, @Override public AggregationContext makeAggregationContext(@NotNull final Table inputTable, - @NotNull final String... groupByColumnNames) { + @NotNull final String... groupByColumnNames) { final QueryTable adjustedInputTable = - (QueryTable) (dropKeys ? inputTable.dropColumns(groupByColumnNames) : inputTable); + (QueryTable) (dropKeys ? inputTable.dropColumns(groupByColumnNames) : inputTable); // noinspection unchecked return new AggregationContext( - new IterativeChunkedAggregationOperator[] { - operator = new ByExternalChunkedOperator( - (QueryTable) inputTable, - adjustedInputTable, - attributeCopier, - keysToPrepopulate, - groupByColumnNames) - }, - new String[][] {CollectionUtil.ZERO_LENGTH_STRING_ARRAY}, - new ChunkSource.WithPrev[] {null}); + new IterativeChunkedAggregationOperator[] { + operator = new ByExternalChunkedOperator( + (QueryTable) inputTable, + adjustedInputTable, + attributeCopier, + keysToPrepopulate, + groupByColumnNames) + }, + new String[][] {CollectionUtil.ZERO_LENGTH_STRING_ARRAY}, + new ChunkSource.WithPrev[] {null}); } @Override @@ -56,46 +55,46 @@ public String toString() { } public static LocalTableMap byExternal(@NotNull final QueryTable inputTable, - final boolean dropKeys, - @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, - @NotNull final List keysToPrepopulate, - @NotNull final String... groupByColumnNames) { - return byExternal(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, dropKeys, - attributeCopier, keysToPrepopulate, groupByColumnNames); + final boolean dropKeys, + @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, + @NotNull final List keysToPrepopulate, + @NotNull final String... groupByColumnNames) { + return byExternal(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, dropKeys, attributeCopier, + keysToPrepopulate, groupByColumnNames); } public static LocalTableMap byExternal(@NotNull final QueryTable inputTable, - final boolean dropKeys, - @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, - @NotNull final List keysToPrepopulate, - @NotNull final SelectColumn[] groupByColumns) { - return byExternal(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, dropKeys, - attributeCopier, keysToPrepopulate, groupByColumns); + final boolean dropKeys, + @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, + @NotNull final List keysToPrepopulate, + @NotNull final SelectColumn[] groupByColumns) { + return byExternal(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, dropKeys, attributeCopier, + keysToPrepopulate, groupByColumns); } public static LocalTableMap byExternal(@NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - final boolean dropKeys, - @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, - @NotNull final List keysToPrepopulate, - @NotNull final String... groupByColumnNames) { - return byExternal(aggregationControl, inputTable, dropKeys, attributeCopier, - keysToPrepopulate, SelectColumnFactory.getExpressions(groupByColumnNames)); + @NotNull final QueryTable inputTable, + final boolean dropKeys, + @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, + @NotNull final List keysToPrepopulate, + @NotNull final String... groupByColumnNames) { + return byExternal(aggregationControl, inputTable, dropKeys, attributeCopier, keysToPrepopulate, + SelectColumnFactory.getExpressions(groupByColumnNames)); } public static LocalTableMap byExternal(@NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - final boolean dropKeys, - @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, - @NotNull final List keysToPrepopulate, - @NotNull final SelectColumn[] groupByColumns) { + @NotNull final QueryTable inputTable, + final boolean dropKeys, + @NotNull final ByExternalChunkedOperator.AttributeCopier attributeCopier, + @NotNull final List keysToPrepopulate, + @NotNull final SelectColumn[] groupByColumns) { if (groupByColumns.length == 0) { return noKeyByExternal(inputTable); } final ByExternalAggregationFactory aggregationFactory = - new ByExternalAggregationFactory(dropKeys, attributeCopier, keysToPrepopulate); - ChunkedOperatorAggregationHelper.aggregation(aggregationControl, aggregationFactory, - inputTable, groupByColumns); + new ByExternalAggregationFactory(dropKeys, attributeCopier, keysToPrepopulate); + ChunkedOperatorAggregationHelper.aggregation(aggregationControl, aggregationFactory, inputTable, + groupByColumns); return aggregationFactory.operator.getTableMap(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ByExternalChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/ByExternalChunkedOperator.java index 82fcd028511..71c881f8ed2 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ByExternalChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ByExternalChunkedOperator.java @@ -32,9 +32,8 @@ public final class ByExternalChunkedOperator implements IterativeChunkedAggregat private static final Index NONEXISTENT_TABLE_INDEX = Index.FACTORY.getEmptyIndex(); private static final IndexShiftData.SmartCoalescingBuilder NONEXISTENT_TABLE_SHIFT_BUILDER = - new IndexShiftData.SmartCoalescingBuilder(NONEXISTENT_TABLE_INDEX.clone()); - private static final QueryTable NONEXISTENT_TABLE = - new QueryTable(NONEXISTENT_TABLE_INDEX, Collections.emptyMap()); + new IndexShiftData.SmartCoalescingBuilder(NONEXISTENT_TABLE_INDEX.clone()); + private static final QueryTable NONEXISTENT_TABLE = new QueryTable(NONEXISTENT_TABLE_INDEX, Collections.emptyMap()); private static final int WRITE_THROUGH_CHUNK_SIZE = ArrayBackedColumnSource.BLOCK_SIZE; @@ -47,8 +46,8 @@ public interface AttributeCopier { private final List keysToPrepopulate; private final String[] keyColumnNames; - private final LocalTableMap tableMap; // Consider making this optional, in which case we should - // expose the tables column. + private final LocalTableMap tableMap; // Consider making this optional, in which case we should expose the tables + // column. private final String callSite; private final ObjectArraySource tables; @@ -70,11 +69,10 @@ public interface AttributeCopier { * {@link IterativeChunkedAggregationOperator#resetForStep(ShiftAwareListener.Update)} and * {@link IterativeChunkedAggregationOperator#propagateUpdates(ShiftAwareListener.Update, ReadOnlyIndex)} *

    - * If this ever becomes necessary in other operators, it could be moved out to the helper the - * way modified destination tracking already is. + * If this ever becomes necessary in other operators, it could be moved out to the helper the way modified + * destination tracking already is. *

    - * We should consider whether to instead use a random builder, but the current approach seemed - * reasonable for now. + * We should consider whether to instead use a random builder, but the current approach seemed reasonable for now. */ private Index stepShiftedDestinations; private boolean stepValuesModified; @@ -82,21 +80,19 @@ public interface AttributeCopier { /** * Construct a new operator. * - * @param unadjustedParentTable The parent table for all sub-tables, without any key-column - * dropping or similar already applied - * @param parentTable The parent table for all sub-tables, with any key-column dropping or - * similar already applied - * @param attributeCopier A procedure that copies attributes or similar from its first argument - * (the parent table) to its second (the sub-table) - * @param keysToPrepopulate A list of keys to be pre-populated safely before the operation - * completes. + * @param unadjustedParentTable The parent table for all sub-tables, without any key-column dropping or similar + * already applied + * @param parentTable The parent table for all sub-tables, with any key-column dropping or similar already applied + * @param attributeCopier A procedure that copies attributes or similar from its first argument (the parent table) + * to its second (the sub-table) + * @param keysToPrepopulate A list of keys to be pre-populated safely before the operation completes. * @param keyColumnNames The key columns */ ByExternalChunkedOperator(@NotNull final QueryTable unadjustedParentTable, - @NotNull final QueryTable parentTable, - @NotNull final AttributeCopier attributeCopier, - @NotNull final List keysToPrepopulate, - @NotNull final String... keyColumnNames) { + @NotNull final QueryTable parentTable, + @NotNull final AttributeCopier attributeCopier, + @NotNull final List keysToPrepopulate, + @NotNull final String... keyColumnNames) { this.parentTable = parentTable; this.attributeCopier = attributeCopier; this.keysToPrepopulate = keysToPrepopulate; @@ -109,33 +105,27 @@ public interface AttributeCopier { tables = new ObjectArraySource<>(QueryTable.class); addedIndices = new ObjectArraySource<>(Index.class); - // Note: Sub-tables always share their ColumnSource map with the parent table, so they can - // all use this result MCS. - resultModifiedColumnSet = - new ModifiedColumnSet(parentTable.getModifiedColumnSetForUpdates()); + // Note: Sub-tables always share their ColumnSource map with the parent table, so they can all use this result + // MCS. + resultModifiedColumnSet = new ModifiedColumnSet(parentTable.getModifiedColumnSetForUpdates()); if (parentTable.isRefreshing()) { removedIndices = new ObjectArraySource<>(Index.class); modifiedIndices = new ObjectArraySource<>(Index.class); - shiftDataBuilders = - new ObjectArraySource<>(IndexShiftData.SmartCoalescingBuilder.class); + shiftDataBuilders = new ObjectArraySource<>(IndexShiftData.SmartCoalescingBuilder.class); - final Set keyColumnNameSet = - Arrays.stream(keyColumnNames).collect(Collectors.toSet()); + final Set keyColumnNameSet = Arrays.stream(keyColumnNames).collect(Collectors.toSet()); final Set unadjustedParentColumnNameSet = - new LinkedHashSet<>(unadjustedParentTable.getDefinition().getColumnNames()); + new LinkedHashSet<>(unadjustedParentTable.getDefinition().getColumnNames()); final String[] retainedResultColumnNames = parentTable.getDefinition().getColumnStream() - .map(ColumnDefinition::getName) - .filter(cn -> !keyColumnNameSet.contains(cn)) - .filter(unadjustedParentColumnNameSet::contains) - .toArray(String[]::new); - final ModifiedColumnSet[] retainedResultModifiedColumnSets = - Arrays.stream(retainedResultColumnNames) - .map(parentTable::newModifiedColumnSet) // This is safe because we're not giving - // empty input + .map(ColumnDefinition::getName) + .filter(cn -> !keyColumnNameSet.contains(cn)) + .filter(unadjustedParentColumnNameSet::contains) + .toArray(String[]::new); + final ModifiedColumnSet[] retainedResultModifiedColumnSets = Arrays.stream(retainedResultColumnNames) + .map(parentTable::newModifiedColumnSet) // This is safe because we're not giving empty input .toArray(ModifiedColumnSet[]::new); - upstreamToResultTransformer = - unadjustedParentTable.getModifiedColumnSetForUpdates().newTransformer( + upstreamToResultTransformer = unadjustedParentTable.getModifiedColumnSetForUpdates().newTransformer( retainedResultColumnNames, retainedResultModifiedColumnSets); } else { @@ -151,121 +141,103 @@ LocalTableMap getTableMap() { } @Override - public void addChunk(final BucketedContext bucketedContext, - final Chunk values, - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void addChunk(final BucketedContext bucketedContext, final Chunk values, + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { Assert.eqNull(values, "values"); // noinspection unchecked - final LongChunk inputIndicesAsOrdered = - (LongChunk) inputIndices; + final LongChunk inputIndicesAsOrdered = (LongChunk) inputIndices; for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, accumulateToIndex(addedIndices, inputIndicesAsOrdered, - startPosition, runLength, destination)); + stateModified.set(ii, + accumulateToIndex(addedIndices, inputIndicesAsOrdered, startPosition, runLength, destination)); } } @Override - public void removeChunk(final BucketedContext bucketedContext, - final Chunk values, - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void removeChunk(final BucketedContext bucketedContext, final Chunk values, + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { Assert.eqNull(values, "values"); // noinspection unchecked - final LongChunk inputIndicesAsOrdered = - (LongChunk) inputIndices; + final LongChunk inputIndicesAsOrdered = (LongChunk) inputIndices; for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, accumulateToIndex(removedIndices, inputIndicesAsOrdered, - startPosition, runLength, destination)); + stateModified.set(ii, + accumulateToIndex(removedIndices, inputIndicesAsOrdered, startPosition, runLength, destination)); } } @Override - public void modifyChunk(final BucketedContext bucketedContext, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk postShiftIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void modifyChunk(final BucketedContext bucketedContext, final Chunk previousValues, + final Chunk newValues, + @NotNull final LongChunk postShiftIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { // We have no inputs, so we should never get here. throw new IllegalStateException(); } @Override - public void shiftChunk(final BucketedContext bucketedContext, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk preShiftIndices, - @NotNull final LongChunk postShiftIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void shiftChunk(final BucketedContext bucketedContext, final Chunk previousValues, + final Chunk newValues, + @NotNull final LongChunk preShiftIndices, + @NotNull final LongChunk postShiftIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { Assert.eqNull(previousValues, "previousValues"); Assert.eqNull(newValues, "newValues"); - final TreeIndexImplSequentialBuilder chunkDestinationBuilder = - new TreeIndexImplSequentialBuilder(true); + final TreeIndexImplSequentialBuilder chunkDestinationBuilder = new TreeIndexImplSequentialBuilder(true); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); - if (appendShifts(preShiftIndices, postShiftIndices, startPosition, runLength, - destination)) { + if (appendShifts(preShiftIndices, postShiftIndices, startPosition, runLength, destination)) { chunkDestinationBuilder.appendKey(destination); } } try (final ReadOnlyIndex chunkDestinationsShifted = - new CurrentOnlyIndex(chunkDestinationBuilder.getTreeIndexImpl())) { + new CurrentOnlyIndex(chunkDestinationBuilder.getTreeIndexImpl())) { stepShiftedDestinations.insert(chunkDestinationsShifted); } } @Override public void modifyIndices(final BucketedContext context, - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { if (!stepValuesModified) { return; } // noinspection unchecked - final LongChunk inputIndicesAsOrdered = - (LongChunk) inputIndices; + final LongChunk inputIndicesAsOrdered = (LongChunk) inputIndices; for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, accumulateToIndex(modifiedIndices, inputIndicesAsOrdered, - startPosition, runLength, destination)); + stateModified.set(ii, + accumulateToIndex(modifiedIndices, inputIndicesAsOrdered, startPosition, runLength, destination)); } } @Override public boolean addChunk(final SingletonContext singletonContext, final int chunkSize, - final Chunk values, - @NotNull final LongChunk inputIndices, final long destination) { + final Chunk values, + @NotNull final LongChunk inputIndices, final long destination) { Assert.eqNull(values, "values"); // noinspection unchecked - final LongChunk inputIndicesAsOrdered = - (LongChunk) inputIndices; + final LongChunk inputIndicesAsOrdered = (LongChunk) inputIndices; return accumulateToIndex(addedIndices, inputIndicesAsOrdered, 0, chunkSize, destination); } @@ -276,61 +248,58 @@ public boolean addIndex(SingletonContext context, Index index, long destination) @Override public boolean removeChunk(final SingletonContext singletonContext, final int chunkSize, - final Chunk values, - @NotNull final LongChunk inputIndices, final long destination) { + final Chunk values, + @NotNull final LongChunk inputIndices, final long destination) { Assert.eqNull(values, "values"); // noinspection unchecked - final LongChunk inputIndicesAsOrdered = - (LongChunk) inputIndices; + final LongChunk inputIndicesAsOrdered = (LongChunk) inputIndices; return accumulateToIndex(removedIndices, inputIndicesAsOrdered, 0, chunkSize, destination); } @Override public boolean modifyChunk(final SingletonContext singletonContext, final int chunkSize, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk postShiftIndices, - final long destination) { + final Chunk previousValues, final Chunk newValues, + @NotNull final LongChunk postShiftIndices, + final long destination) { // We have no inputs, so we should never get here. throw new IllegalStateException(); } @Override - public boolean shiftChunk(final SingletonContext singletonContext, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk preInputIndices, - @NotNull final LongChunk postInputIndices, - final long destination) { + public boolean shiftChunk(final SingletonContext singletonContext, final Chunk previousValues, + final Chunk newValues, + @NotNull final LongChunk preInputIndices, + @NotNull final LongChunk postInputIndices, + final long destination) { Assert.eqNull(previousValues, "previousValues"); Assert.eqNull(newValues, "newValues"); - if (appendShifts(preInputIndices, postInputIndices, 0, preInputIndices.size(), - destination)) { + if (appendShifts(preInputIndices, postInputIndices, 0, preInputIndices.size(), destination)) { stepShiftedDestinations.insert(destination); } return false; } @Override - public boolean modifyIndices(final SingletonContext context, - @NotNull final LongChunk indices, final long destination) { + public boolean modifyIndices(final SingletonContext context, @NotNull final LongChunk indices, + final long destination) { if (!stepValuesModified) { return false; } // noinspection unchecked - final LongChunk indicesAsOrdered = - (LongChunk) indices; + final LongChunk indicesAsOrdered = (LongChunk) indices; return accumulateToIndex(modifiedIndices, indicesAsOrdered, 0, indices.size(), destination); } private static boolean accumulateToIndex(@NotNull final ObjectArraySource indexColumn, - @NotNull final LongChunk indicesToAdd, final int start, final int length, - final long destination) { + @NotNull final LongChunk indicesToAdd, final int start, final int length, + final long destination) { final Index index = indexColumn.getUnsafe(destination); if (index == NONEXISTENT_TABLE_INDEX) { return false; } if (index == null) { indexColumn.set(destination, - new CurrentOnlyIndex(TreeIndexImpl.fromChunk(indicesToAdd, start, length, false))); + new CurrentOnlyIndex(TreeIndexImpl.fromChunk(indicesToAdd, start, length, false))); } else { index.insert(indicesToAdd, start, length); } @@ -338,7 +307,7 @@ private static boolean accumulateToIndex(@NotNull final ObjectArraySource } private static boolean accumulateToIndex(@NotNull final ObjectArraySource indexColumn, - @NotNull final Index indicesToAdd, final long destination) { + @NotNull final Index indicesToAdd, final long destination) { final Index index = indexColumn.getUnsafe(destination); if (index == NONEXISTENT_TABLE_INDEX) { return false; @@ -354,8 +323,8 @@ private static boolean accumulateToIndex(@NotNull final ObjectArraySource } private boolean appendShifts(@NotNull final LongChunk preShiftIndices, - @NotNull final LongChunk postShiftIndices, - final int startPosition, final int runLength, final long destination) { + @NotNull final LongChunk postShiftIndices, + final int startPosition, final int runLength, final long destination) { IndexShiftData.SmartCoalescingBuilder builder = shiftDataBuilders.getUnsafe(destination); if (builder == NONEXISTENT_TABLE_SHIFT_BUILDER) { return false; @@ -369,11 +338,10 @@ private boolean appendShifts(@NotNull final LongChunk preS } else { preShiftKeys = tableIndex.minus(removedIndex); } - shiftDataBuilders.set(destination, - builder = new IndexShiftData.SmartCoalescingBuilder(preShiftKeys)); + shiftDataBuilders.set(destination, builder = new IndexShiftData.SmartCoalescingBuilder(preShiftKeys)); } - // the polarity must be the same for shifted index in our chunk, so we use the first one to - // identify the proper polarity + // the polarity must be the same for shifted index in our chunk, so we use the first one to identify the proper + // polarity final boolean reversedPolarity = preShiftIndices.get(0) < postShiftIndices.get(0); if (reversedPolarity) { for (int ki = runLength - 1; ki >= 0; --ki) { @@ -417,81 +385,71 @@ public void startTrackingPrevValues() {} @Override public void propagateInitialState(@NotNull final QueryTable resultTable) { tableMapKeysSource = keyColumnNames.length == 1 - ? resultTable.getColumnSource(keyColumnNames[0]) - : new SmartKeySource(Arrays.stream(keyColumnNames).map(resultTable::getColumnSource) - .toArray(ColumnSource[]::new)); + ? resultTable.getColumnSource(keyColumnNames[0]) + : new SmartKeySource( + Arrays.stream(keyColumnNames).map(resultTable::getColumnSource).toArray(ColumnSource[]::new)); final ReadOnlyIndex initialDestinations = resultTable.getIndex(); if (initialDestinations.nonempty()) { - // At this point, we cannot have had any tables pre-populated because the table map has - // not been exposed + // At this point, we cannot have had any tables pre-populated because the table map has not been exposed // externally. - // The table map is still managed by its creating scope, and so does not need extra - // steps to ensure liveness. + // The table map is still managed by its creating scope, and so does not need extra steps to ensure + // liveness. // There's also no aggregation update listener to retain yet. final boolean setCallSite = QueryPerformanceRecorder.setCallsite(callSite); - try ( - final ChunkSource.GetContext tableMapKeysGetContext = + try (final ChunkSource.GetContext tableMapKeysGetContext = tableMapKeysSource.makeGetContext(WRITE_THROUGH_CHUNK_SIZE); - final ChunkBoxer.BoxerKernel tableMapKeysBoxer = ChunkBoxer - .getBoxer(tableMapKeysSource.getChunkType(), WRITE_THROUGH_CHUNK_SIZE); - final ResettableWritableObjectChunk tablesResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk addedIndicesResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final OrderedKeys.Iterator initialDestinationsIterator = - initialDestinations.getOrderedKeysIterator()) { + final ChunkBoxer.BoxerKernel tableMapKeysBoxer = + ChunkBoxer.getBoxer(tableMapKeysSource.getChunkType(), WRITE_THROUGH_CHUNK_SIZE); + final ResettableWritableObjectChunk tablesResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final ResettableWritableObjectChunk addedIndicesResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final OrderedKeys.Iterator initialDestinationsIterator = + initialDestinations.getOrderedKeysIterator()) { // noinspection unchecked final WritableObjectChunk tablesBackingChunk = - tablesResettableChunk.asWritableObjectChunk(); + tablesResettableChunk.asWritableObjectChunk(); // noinspection unchecked final WritableObjectChunk addedIndicesBackingChunk = - addedIndicesResettableChunk.asWritableObjectChunk(); + addedIndicesResettableChunk.asWritableObjectChunk(); while (initialDestinationsIterator.hasMore()) { final long firstSliceDestination = initialDestinationsIterator.peekNextKey(); final long firstBackingChunkDestination = - tables.resetWritableChunkToBackingStore(tablesResettableChunk, - firstSliceDestination); - addedIndices.resetWritableChunkToBackingStore(addedIndicesResettableChunk, - firstSliceDestination); + tables.resetWritableChunkToBackingStore(tablesResettableChunk, firstSliceDestination); + addedIndices.resetWritableChunkToBackingStore(addedIndicesResettableChunk, firstSliceDestination); final long lastBackingChunkDestination = - firstBackingChunkDestination + tablesBackingChunk.size() - 1; - final OrderedKeys initialDestinationsSlice = initialDestinationsIterator - .getNextOrderedKeysThrough(lastBackingChunkDestination); + firstBackingChunkDestination + tablesBackingChunk.size() - 1; + final OrderedKeys initialDestinationsSlice = + initialDestinationsIterator.getNextOrderedKeysThrough(lastBackingChunkDestination); - final ObjectChunk tableMapKeyChunk = - tableMapKeysBoxer.box(tableMapKeysSource.getChunk(tableMapKeysGetContext, - initialDestinationsSlice)); + final ObjectChunk tableMapKeyChunk = tableMapKeysBoxer + .box(tableMapKeysSource.getChunk(tableMapKeysGetContext, initialDestinationsSlice)); final MutableInt tableMapKeyOffset = new MutableInt(); initialDestinationsSlice.forAllLongs((final long destinationToInitialize) -> { - final Object tableMapKey = - tableMapKeyChunk.get(tableMapKeyOffset.intValue()); + final Object tableMapKey = tableMapKeyChunk.get(tableMapKeyOffset.intValue()); tableMapKeyOffset.increment(); final int backingChunkOffset = - Math.toIntExact(destinationToInitialize - firstBackingChunkDestination); - final QueryTable unexpectedExistingTable = - tablesBackingChunk.get(backingChunkOffset); + Math.toIntExact(destinationToInitialize - firstBackingChunkDestination); + final QueryTable unexpectedExistingTable = tablesBackingChunk.get(backingChunkOffset); if (unexpectedExistingTable != null) { - throw new IllegalStateException("Found unexpected existing table " - + unexpectedExistingTable + " in initial slot " - + destinationToInitialize + " for key " + tableMapKey); + throw new IllegalStateException("Found unexpected existing table " + unexpectedExistingTable + + " in initial slot " + destinationToInitialize + " for key " + tableMapKey); } - final Index initialIndex = - extractAndClearIndex(addedIndicesBackingChunk, backingChunkOffset); + final Index initialIndex = extractAndClearIndex(addedIndicesBackingChunk, backingChunkOffset); initialIndex.compact(); final QueryTable newTable = makeSubTable(initialIndex); tablesBackingChunk.set(backingChunkOffset, newTable); - final Table unexpectedPrepopulatedTable = - tableMap.put(tableMapKey, newTable); + final Table unexpectedPrepopulatedTable = tableMap.put(tableMapKey, newTable); if (unexpectedPrepopulatedTable != null) { throw new IllegalStateException("Found unexpected prepopulated table " - + unexpectedPrepopulatedTable + " after setting initial slot " - + destinationToInitialize + " for key " + tableMapKey); + + unexpectedPrepopulatedTable + " after setting initial slot " + + destinationToInitialize + " for key " + tableMapKey); } }); } @@ -506,29 +464,24 @@ public void propagateInitialState(@NotNull final QueryTable resultTable) { } @Override - public UnaryOperator initializeRefreshing( - @NotNull final QueryTable resultTable, - @NotNull final LivenessReferent aggregationUpdateListener) { + public UnaryOperator initializeRefreshing(@NotNull final QueryTable resultTable, + @NotNull final LivenessReferent aggregationUpdateListener) { this.aggregationUpdateListener = aggregationUpdateListener; if (aggregationUpdateListener instanceof NotificationQueue.Dependency) { tableMap.setDependency((NotificationQueue.Dependency) aggregationUpdateListener); } tableMap.addParentReference(aggregationUpdateListener); - tableMap.values() - .forEach(st -> ((DynamicNode) st).addParentReference(aggregationUpdateListener)); - return IterativeChunkedAggregationOperator.super.initializeRefreshing(resultTable, - aggregationUpdateListener); + tableMap.values().forEach(st -> ((DynamicNode) st).addParentReference(aggregationUpdateListener)); + return IterativeChunkedAggregationOperator.super.initializeRefreshing(resultTable, aggregationUpdateListener); } @Override public void resetForStep(@NotNull final ShiftAwareListener.Update upstream) { stepShiftedDestinations = Index.CURRENT_FACTORY.getEmptyIndex(); - final boolean upstreamModified = - upstream.modified.nonempty() && upstream.modifiedColumnSet.nonempty(); + final boolean upstreamModified = upstream.modified.nonempty() && upstream.modifiedColumnSet.nonempty(); if (upstreamModified) { // We re-use this for all sub-tables that have modifies. - upstreamToResultTransformer.clearAndTransform(upstream.modifiedColumnSet, - resultModifiedColumnSet); + upstreamToResultTransformer.clearAndTransform(upstream.modifiedColumnSet, resultModifiedColumnSet); stepValuesModified = resultModifiedColumnSet.nonempty(); } else { stepValuesModified = false; @@ -537,80 +490,71 @@ public void resetForStep(@NotNull final ShiftAwareListener.Update upstream) { @Override public void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream, - @NotNull final ReadOnlyIndex newDestinations) { - if (downstream.added.isEmpty() && downstream.removed.isEmpty() - && downstream.modified.isEmpty() && stepShiftedDestinations.isEmpty()) { + @NotNull final ReadOnlyIndex newDestinations) { + if (downstream.added.isEmpty() && downstream.removed.isEmpty() && downstream.modified.isEmpty() + && stepShiftedDestinations.isEmpty()) { stepShiftedDestinations = null; return; } if (downstream.added.nonempty()) { - try (final OrderedKeys resurrectedDestinations = - downstream.added.minus(newDestinations)) { + try (final OrderedKeys resurrectedDestinations = downstream.added.minus(newDestinations)) { propagateResurrectedDestinations(resurrectedDestinations); propagateNewDestinations(newDestinations); } } propagateUpdatesToRemovedDestinations(downstream.removed); - try (final OrderedKeys modifiedOrShiftedDestinations = - downstream.modified.union(stepShiftedDestinations)) { + try (final OrderedKeys modifiedOrShiftedDestinations = downstream.modified.union(stepShiftedDestinations)) { stepShiftedDestinations = null; propagateUpdatesToModifiedDestinations(modifiedOrShiftedDestinations); } } - private void propagateResurrectedDestinations( - @NotNull final OrderedKeys resurrectedDestinations) { + private void propagateResurrectedDestinations(@NotNull final OrderedKeys resurrectedDestinations) { if (resurrectedDestinations.isEmpty()) { return; } - try ( - final ResettableWritableObjectChunk tablesResettableChunk = + try (final ResettableWritableObjectChunk tablesResettableChunk = ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk addedIndicesResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final OrderedKeys.Iterator resurrectedDestinationsIterator = - resurrectedDestinations.getOrderedKeysIterator()) { + final ResettableWritableObjectChunk addedIndicesResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final OrderedKeys.Iterator resurrectedDestinationsIterator = + resurrectedDestinations.getOrderedKeysIterator()) { // Destinations that were added can't have any removals, modifications, or shifts. // noinspection unchecked - final ObjectChunk tablesBackingChunk = - tablesResettableChunk.asObjectChunk(); + final ObjectChunk tablesBackingChunk = tablesResettableChunk.asObjectChunk(); // noinspection unchecked final WritableObjectChunk addedIndicesBackingChunk = - addedIndicesResettableChunk.asWritableObjectChunk(); + addedIndicesResettableChunk.asWritableObjectChunk(); while (resurrectedDestinationsIterator.hasMore()) { final long firstSliceDestination = resurrectedDestinationsIterator.peekNextKey(); - final long firstBackingChunkDestination = tables - .resetWritableChunkToBackingStore(tablesResettableChunk, firstSliceDestination); - addedIndices.resetWritableChunkToBackingStore(addedIndicesResettableChunk, - firstSliceDestination); - final long lastBackingChunkDestination = - firstBackingChunkDestination + tablesBackingChunk.size() - 1; - final OrderedKeys resurrectedDestinationsSlice = resurrectedDestinationsIterator - .getNextOrderedKeysThrough(lastBackingChunkDestination); + final long firstBackingChunkDestination = + tables.resetWritableChunkToBackingStore(tablesResettableChunk, firstSliceDestination); + addedIndices.resetWritableChunkToBackingStore(addedIndicesResettableChunk, firstSliceDestination); + final long lastBackingChunkDestination = firstBackingChunkDestination + tablesBackingChunk.size() - 1; + final OrderedKeys resurrectedDestinationsSlice = + resurrectedDestinationsIterator.getNextOrderedKeysThrough(lastBackingChunkDestination); resurrectedDestinationsSlice.forAllLongs((final long resurrectedDestination) -> { final int backingChunkOffset = - Math.toIntExact(resurrectedDestination - firstBackingChunkDestination); + Math.toIntExact(resurrectedDestination - firstBackingChunkDestination); final QueryTable resurrectedTable = tablesBackingChunk.get(backingChunkOffset); if (resurrectedTable == NONEXISTENT_TABLE) { return; } if (resurrectedTable == null) { - throw new IllegalStateException("Missing resurrected table in slot " - + resurrectedDestination + " for table map key " - + tableMapKeysSource.get(resurrectedDestination)); + throw new IllegalStateException("Missing resurrected table in slot " + resurrectedDestination + + " for table map key " + tableMapKeysSource.get(resurrectedDestination)); } - // This table existed already, and has been "resurrected" after becoming empty - // previously. We must notify. + // This table existed already, and has been "resurrected" after becoming empty previously. We must + // notify. final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update(); - downstream.added = nullToEmpty( - extractAndClearIndex(addedIndicesBackingChunk, backingChunkOffset)); + downstream.added = nullToEmpty(extractAndClearIndex(addedIndicesBackingChunk, backingChunkOffset)); downstream.removed = Index.CURRENT_FACTORY.getEmptyIndex(); downstream.modified = Index.CURRENT_FACTORY.getEmptyIndex(); downstream.shifted = IndexShiftData.EMPTY; @@ -618,8 +562,7 @@ private void propagateResurrectedDestinations( resurrectedTable.getIndex().compact(); - Assert.assertion(resurrectedTable.getIndex().isEmpty(), - "resurrectedTable.getIndex().isEmpty()"); + Assert.assertion(resurrectedTable.getIndex().isEmpty(), "resurrectedTable.getIndex().isEmpty()"); resurrectedTable.getIndex().insert(downstream.added); resurrectedTable.notifyListeners(downstream); }); @@ -632,78 +575,70 @@ private void propagateNewDestinations(@NotNull final OrderedKeys newDestinations return; } final boolean retainedTableMap = tableMap.tryRetainReference(); - final boolean retainedAggregationUpdateListener = - aggregationUpdateListener.tryRetainReference(); + final boolean retainedAggregationUpdateListener = aggregationUpdateListener.tryRetainReference(); final boolean allowCreation = retainedTableMap && retainedAggregationUpdateListener; final boolean setCallSite = QueryPerformanceRecorder.setCallsite(callSite); - try ( - final ChunkSource.GetContext tableMapKeysGetContext = + try (final ChunkSource.GetContext tableMapKeysGetContext = tableMapKeysSource.makeGetContext(WRITE_THROUGH_CHUNK_SIZE); - final ChunkBoxer.BoxerKernel tableMapKeysBoxer = - ChunkBoxer.getBoxer(tableMapKeysSource.getChunkType(), WRITE_THROUGH_CHUNK_SIZE); - final ResettableWritableObjectChunk tablesResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk addedIndicesResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk removedIndicesResettableChunk = - allowCreation ? null : ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk modifiedIndicesResettableChunk = - allowCreation ? null : ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk shiftDataBuildersResettableChunk = - allowCreation ? null : ResettableWritableObjectChunk.makeResettableChunk(); - final OrderedKeys.Iterator newDestinationsIterator = - newDestinations.getOrderedKeysIterator()) { + final ChunkBoxer.BoxerKernel tableMapKeysBoxer = + ChunkBoxer.getBoxer(tableMapKeysSource.getChunkType(), WRITE_THROUGH_CHUNK_SIZE); + final ResettableWritableObjectChunk tablesResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final ResettableWritableObjectChunk addedIndicesResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final ResettableWritableObjectChunk removedIndicesResettableChunk = + allowCreation ? null : ResettableWritableObjectChunk.makeResettableChunk(); + final ResettableWritableObjectChunk modifiedIndicesResettableChunk = + allowCreation ? null : ResettableWritableObjectChunk.makeResettableChunk(); + final ResettableWritableObjectChunk shiftDataBuildersResettableChunk = + allowCreation ? null : ResettableWritableObjectChunk.makeResettableChunk(); + final OrderedKeys.Iterator newDestinationsIterator = newDestinations.getOrderedKeysIterator()) { // noinspection unchecked final WritableObjectChunk tablesBackingChunk = - tablesResettableChunk.asWritableObjectChunk(); + tablesResettableChunk.asWritableObjectChunk(); // noinspection unchecked final WritableObjectChunk addedIndicesBackingChunk = - addedIndicesResettableChunk.asWritableObjectChunk(); + addedIndicesResettableChunk.asWritableObjectChunk(); // noinspection unchecked final WritableObjectChunk removedIndicesBackingChunk = - allowCreation ? null : removedIndicesResettableChunk.asWritableObjectChunk(); + allowCreation ? null : removedIndicesResettableChunk.asWritableObjectChunk(); // noinspection unchecked final WritableObjectChunk modifiedIndicesBackingChunk = - allowCreation ? null : modifiedIndicesResettableChunk.asWritableObjectChunk(); + allowCreation ? null : modifiedIndicesResettableChunk.asWritableObjectChunk(); // noinspection unchecked final WritableObjectChunk shiftDataBuildersBackingChunk = - allowCreation ? null : shiftDataBuildersResettableChunk.asWritableObjectChunk(); + allowCreation ? null : shiftDataBuildersResettableChunk.asWritableObjectChunk(); while (newDestinationsIterator.hasMore()) { final long firstSliceDestination = newDestinationsIterator.peekNextKey(); - final long firstBackingChunkDestination = tables - .resetWritableChunkToBackingStore(tablesResettableChunk, firstSliceDestination); - addedIndices.resetWritableChunkToBackingStore(addedIndicesResettableChunk, - firstSliceDestination); + final long firstBackingChunkDestination = + tables.resetWritableChunkToBackingStore(tablesResettableChunk, firstSliceDestination); + addedIndices.resetWritableChunkToBackingStore(addedIndicesResettableChunk, firstSliceDestination); if (!allowCreation) { removedIndices.resetWritableChunkToBackingStore(removedIndicesResettableChunk, - firstSliceDestination); + firstSliceDestination); modifiedIndices.resetWritableChunkToBackingStore(modifiedIndicesResettableChunk, - firstSliceDestination); - shiftDataBuilders.resetWritableChunkToBackingStore( - shiftDataBuildersResettableChunk, firstSliceDestination); + firstSliceDestination); + shiftDataBuilders.resetWritableChunkToBackingStore(shiftDataBuildersResettableChunk, + firstSliceDestination); } - final long lastBackingChunkDestination = - firstBackingChunkDestination + tablesBackingChunk.size() - 1; + final long lastBackingChunkDestination = firstBackingChunkDestination + tablesBackingChunk.size() - 1; final OrderedKeys newDestinationsSlice = - newDestinationsIterator.getNextOrderedKeysThrough(lastBackingChunkDestination); + newDestinationsIterator.getNextOrderedKeysThrough(lastBackingChunkDestination); final ObjectChunk tableMapKeyChunk = tableMapKeysBoxer - .box(tableMapKeysSource.getChunk(tableMapKeysGetContext, newDestinationsSlice)); + .box(tableMapKeysSource.getChunk(tableMapKeysGetContext, newDestinationsSlice)); final MutableInt tableMapKeyOffset = new MutableInt(); newDestinationsSlice.forAllLongs((final long newDestination) -> { final Object tableMapKey = tableMapKeyChunk.get(tableMapKeyOffset.intValue()); tableMapKeyOffset.increment(); - final int backingChunkOffset = - Math.toIntExact(newDestination - firstBackingChunkDestination); - final QueryTable unexpectedExistingTable = - tablesBackingChunk.get(backingChunkOffset); + final int backingChunkOffset = Math.toIntExact(newDestination - firstBackingChunkDestination); + final QueryTable unexpectedExistingTable = tablesBackingChunk.get(backingChunkOffset); if (unexpectedExistingTable != null) { - throw new IllegalStateException( - "Found unexpected existing table " + unexpectedExistingTable + throw new IllegalStateException("Found unexpected existing table " + unexpectedExistingTable + " in new slot " + newDestination + " for key " + tableMapKey); } @@ -712,17 +647,16 @@ private void propagateNewDestinations(@NotNull final OrderedKeys newDestinations if (allowCreation) { final MutableBoolean newTableAllocated = new MutableBoolean(); final QueryTable newOrPrepopulatedTable = - (QueryTable) tableMap.computeIfAbsent(tableMapKey, (unused) -> { - final Index newIndex = extractAndClearIndex( - addedIndicesBackingChunk, backingChunkOffset); - newIndex.compact(); - final QueryTable newTable = makeSubTable(newIndex); - tablesBackingChunk.set(backingChunkOffset, newTable); - newTableAllocated.setTrue(); - return newTable; - }); - prepopulatedTable = - newTableAllocated.booleanValue() ? null : newOrPrepopulatedTable; + (QueryTable) tableMap.computeIfAbsent(tableMapKey, (unused) -> { + final Index newIndex = + extractAndClearIndex(addedIndicesBackingChunk, backingChunkOffset); + newIndex.compact(); + final QueryTable newTable = makeSubTable(newIndex); + tablesBackingChunk.set(backingChunkOffset, newTable); + newTableAllocated.setTrue(); + return newTable; + }); + prepopulatedTable = newTableAllocated.booleanValue() ? null : newOrPrepopulatedTable; } else { prepopulatedTable = (QueryTable) tableMap.get(tableMapKey); } @@ -730,14 +664,13 @@ private void propagateNewDestinations(@NotNull final OrderedKeys newDestinations tablesBackingChunk.set(backingChunkOffset, prepopulatedTable); // "New" table already existed due to TableMap.populateKeys. - // We can ignore allowCreation; the table exists already, and must already - // retain appropriate referents. + // We can ignore allowCreation; the table exists already, and must already retain appropriate + // referents. // Additionally, we must notify of added rows. - final ShiftAwareListener.Update downstream = - new ShiftAwareListener.Update(); + final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update(); - downstream.added = nullToEmpty( - extractAndClearIndex(addedIndicesBackingChunk, backingChunkOffset)); + downstream.added = + nullToEmpty(extractAndClearIndex(addedIndicesBackingChunk, backingChunkOffset)); downstream.removed = Index.CURRENT_FACTORY.getEmptyIndex(); downstream.modified = Index.CURRENT_FACTORY.getEmptyIndex(); downstream.shifted = IndexShiftData.EMPTY; @@ -746,15 +679,12 @@ private void propagateNewDestinations(@NotNull final OrderedKeys newDestinations prepopulatedTable.getIndex().insert(downstream.added); prepopulatedTable.notifyListeners(downstream); } else if (!allowCreation) { - // We will never try to create this table again, or accumulate further state - // for it. + // We will never try to create this table again, or accumulate further state for it. tablesBackingChunk.set(backingChunkOffset, NONEXISTENT_TABLE); addedIndicesBackingChunk.set(backingChunkOffset, NONEXISTENT_TABLE_INDEX); removedIndicesBackingChunk.set(backingChunkOffset, NONEXISTENT_TABLE_INDEX); - modifiedIndicesBackingChunk.set(backingChunkOffset, - NONEXISTENT_TABLE_INDEX); - shiftDataBuildersBackingChunk.set(backingChunkOffset, - NONEXISTENT_TABLE_SHIFT_BUILDER); + modifiedIndicesBackingChunk.set(backingChunkOffset, NONEXISTENT_TABLE_INDEX); + shiftDataBuildersBackingChunk.set(backingChunkOffset, NONEXISTENT_TABLE_SHIFT_BUILDER); } }); } @@ -783,10 +713,8 @@ private void populate(final Object key) { private void populateInternal(final Object key) { // We don't bother with complicated retention or non-existent result handling, here. - // If the user is calling TableMap.populateKeys (the only way to get here) they'd better be - // sure of liveness - // already, and they won't thank us for adding non-existent table tombstones rather than - // blowing up. + // If the user is calling TableMap.populateKeys (the only way to get here) they'd better be sure of liveness + // already, and they won't thank us for adding non-existent table tombstones rather than blowing up. final boolean setCallSite = QueryPerformanceRecorder.setCallsite(callSite); try { tableMap.computeIfAbsent(key, (unused) -> makeSubTable(null)); @@ -799,8 +727,7 @@ private void populateInternal(final Object key) { private QueryTable makeSubTable(@Nullable final Index initialIndexToInsert) { // We don't start from initialIndexToInsert because it is expected to be a CurrentOnlyIndex. - final QueryTable subTable = - parentTable.getSubTable(Index.FACTORY.getEmptyIndex(), resultModifiedColumnSet); + final QueryTable subTable = parentTable.getSubTable(Index.FACTORY.getEmptyIndex(), resultModifiedColumnSet); subTable.setRefreshing(parentTable.isRefreshing()); if (aggregationUpdateListener != null) { subTable.addParentReference(aggregationUpdateListener); @@ -813,155 +740,132 @@ private QueryTable makeSubTable(@Nullable final Index initialIndexToInsert) { return subTable; } - private void propagateUpdatesToRemovedDestinations( - @NotNull final OrderedKeys removedDestinations) { + private void propagateUpdatesToRemovedDestinations(@NotNull final OrderedKeys removedDestinations) { if (removedDestinations.isEmpty()) { return; } - try ( - final ResettableWritableObjectChunk tablesResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk removedIndicesResettableChunk = + try (final ResettableWritableObjectChunk tablesResettableChunk = ResettableWritableObjectChunk.makeResettableChunk(); - final OrderedKeys.Iterator removedDestinationsIterator = - removedDestinations.getOrderedKeysIterator()) { - // Destinations that were completely removed can't have any additions, modifications, or - // shifts. + final ResettableWritableObjectChunk removedIndicesResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final OrderedKeys.Iterator removedDestinationsIterator = removedDestinations.getOrderedKeysIterator()) { + // Destinations that were completely removed can't have any additions, modifications, or shifts. // noinspection unchecked - final ObjectChunk tablesBackingChunk = - tablesResettableChunk.asObjectChunk(); + final ObjectChunk tablesBackingChunk = tablesResettableChunk.asObjectChunk(); // noinspection unchecked final WritableObjectChunk removedIndicesBackingChunk = - removedIndicesResettableChunk.asWritableObjectChunk(); + removedIndicesResettableChunk.asWritableObjectChunk(); while (removedDestinationsIterator.hasMore()) { final long firstSliceDestination = removedDestinationsIterator.peekNextKey(); - final long firstBackingChunkDestination = tables - .resetWritableChunkToBackingStore(tablesResettableChunk, firstSliceDestination); - removedIndices.resetWritableChunkToBackingStore(removedIndicesResettableChunk, - firstSliceDestination); - final long lastBackingChunkDestination = - firstBackingChunkDestination + tablesBackingChunk.size() - 1; - final OrderedKeys removedDestinationsSlice = removedDestinationsIterator - .getNextOrderedKeysThrough(lastBackingChunkDestination); + final long firstBackingChunkDestination = + tables.resetWritableChunkToBackingStore(tablesResettableChunk, firstSliceDestination); + removedIndices.resetWritableChunkToBackingStore(removedIndicesResettableChunk, firstSliceDestination); + final long lastBackingChunkDestination = firstBackingChunkDestination + tablesBackingChunk.size() - 1; + final OrderedKeys removedDestinationsSlice = + removedDestinationsIterator.getNextOrderedKeysThrough(lastBackingChunkDestination); removedDestinationsSlice.forAllLongs((final long removedDestination) -> { - final int backingChunkOffset = - Math.toIntExact(removedDestination - firstBackingChunkDestination); + final int backingChunkOffset = Math.toIntExact(removedDestination - firstBackingChunkDestination); final QueryTable removedTable = tablesBackingChunk.get(backingChunkOffset); if (removedTable == NONEXISTENT_TABLE) { return; } if (removedTable == null) { - throw new IllegalStateException("Missing removed table in slot " - + removedDestination + " for table map key " - + tableMapKeysSource.get(removedDestination)); + throw new IllegalStateException("Missing removed table in slot " + removedDestination + + " for table map key " + tableMapKeysSource.get(removedDestination)); } final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update(); downstream.added = Index.CURRENT_FACTORY.getEmptyIndex(); - downstream.removed = nullToEmpty( - extractAndClearIndex(removedIndicesBackingChunk, backingChunkOffset)); + downstream.removed = + nullToEmpty(extractAndClearIndex(removedIndicesBackingChunk, backingChunkOffset)); downstream.modified = Index.CURRENT_FACTORY.getEmptyIndex(); downstream.shifted = IndexShiftData.EMPTY; downstream.modifiedColumnSet = ModifiedColumnSet.EMPTY; removedTable.getIndex().remove(downstream.removed); removedTable.getIndex().compact(); - Assert.assertion(removedTable.getIndex().isEmpty(), - "removedTable.getIndex().isEmpty()"); + Assert.assertion(removedTable.getIndex().isEmpty(), "removedTable.getIndex().isEmpty()"); removedTable.notifyListeners(downstream); }); } } } - private void propagateUpdatesToModifiedDestinations( - @NotNull final OrderedKeys modifiedDestinations) { + private void propagateUpdatesToModifiedDestinations(@NotNull final OrderedKeys modifiedDestinations) { if (modifiedDestinations.isEmpty()) { return; } - try ( - final ResettableWritableObjectChunk tablesResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk addedIndicesResettableChunk = + try (final ResettableWritableObjectChunk tablesResettableChunk = ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk removedIndicesResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk modifiedIndicesResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final ResettableWritableObjectChunk shiftDataBuildersResettableChunk = - ResettableWritableObjectChunk.makeResettableChunk(); - final OrderedKeys.Iterator modifiedDestinationsIterator = - modifiedDestinations.getOrderedKeysIterator()) { + final ResettableWritableObjectChunk addedIndicesResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final ResettableWritableObjectChunk removedIndicesResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final ResettableWritableObjectChunk modifiedIndicesResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final ResettableWritableObjectChunk shiftDataBuildersResettableChunk = + ResettableWritableObjectChunk.makeResettableChunk(); + final OrderedKeys.Iterator modifiedDestinationsIterator = + modifiedDestinations.getOrderedKeysIterator()) { // noinspection unchecked - final ObjectChunk tablesBackingChunk = - tablesResettableChunk.asObjectChunk(); + final ObjectChunk tablesBackingChunk = tablesResettableChunk.asObjectChunk(); // noinspection unchecked final WritableObjectChunk addedIndicesBackingChunk = - addedIndicesResettableChunk.asWritableObjectChunk(); + addedIndicesResettableChunk.asWritableObjectChunk(); // noinspection unchecked final WritableObjectChunk removedIndicesBackingChunk = - removedIndicesResettableChunk.asWritableObjectChunk(); + removedIndicesResettableChunk.asWritableObjectChunk(); // noinspection unchecked final WritableObjectChunk modifiedIndicesBackingChunk = - modifiedIndicesResettableChunk.asWritableObjectChunk(); + modifiedIndicesResettableChunk.asWritableObjectChunk(); // noinspection unchecked final WritableObjectChunk shiftDataBuildersBackingChunk = - shiftDataBuildersResettableChunk.asWritableObjectChunk(); + shiftDataBuildersResettableChunk.asWritableObjectChunk(); while (modifiedDestinationsIterator.hasMore()) { final long firstSliceDestination = modifiedDestinationsIterator.peekNextKey(); - final long firstBackingChunkDestination = tables - .resetWritableChunkToBackingStore(tablesResettableChunk, firstSliceDestination); - // The (valid) assumption is that the other write-through resets will address the - // same range. - addedIndices.resetWritableChunkToBackingStore(addedIndicesResettableChunk, - firstSliceDestination); - removedIndices.resetWritableChunkToBackingStore(removedIndicesResettableChunk, - firstSliceDestination); - modifiedIndices.resetWritableChunkToBackingStore(modifiedIndicesResettableChunk, - firstSliceDestination); + final long firstBackingChunkDestination = + tables.resetWritableChunkToBackingStore(tablesResettableChunk, firstSliceDestination); + // The (valid) assumption is that the other write-through resets will address the same range. + addedIndices.resetWritableChunkToBackingStore(addedIndicesResettableChunk, firstSliceDestination); + removedIndices.resetWritableChunkToBackingStore(removedIndicesResettableChunk, firstSliceDestination); + modifiedIndices.resetWritableChunkToBackingStore(modifiedIndicesResettableChunk, firstSliceDestination); shiftDataBuilders.resetWritableChunkToBackingStore(shiftDataBuildersResettableChunk, - firstSliceDestination); - final long lastBackingChunkDestination = - firstBackingChunkDestination + tablesBackingChunk.size() - 1; - final OrderedKeys modifiedDestinationsSlice = modifiedDestinationsIterator - .getNextOrderedKeysThrough(lastBackingChunkDestination); + firstSliceDestination); + final long lastBackingChunkDestination = firstBackingChunkDestination + tablesBackingChunk.size() - 1; + final OrderedKeys modifiedDestinationsSlice = + modifiedDestinationsIterator.getNextOrderedKeysThrough(lastBackingChunkDestination); modifiedDestinationsSlice.forAllLongs((final long modifiedDestination) -> { - final int backingChunkOffset = - Math.toIntExact(modifiedDestination - firstBackingChunkDestination); + final int backingChunkOffset = Math.toIntExact(modifiedDestination - firstBackingChunkDestination); final QueryTable modifiedTable = tablesBackingChunk.get(backingChunkOffset); if (modifiedTable == NONEXISTENT_TABLE) { return; } if (modifiedTable == null) { - throw new IllegalStateException("Missing modified table in slot " - + modifiedDestination + " for table map key " - + tableMapKeysSource.get(modifiedDestination)); + throw new IllegalStateException("Missing modified table in slot " + modifiedDestination + + " for table map key " + tableMapKeysSource.get(modifiedDestination)); } final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update(); - downstream.added = nullToEmpty( - extractAndClearIndex(addedIndicesBackingChunk, backingChunkOffset)); - downstream.removed = nullToEmpty( - extractAndClearIndex(removedIndicesBackingChunk, backingChunkOffset)); + downstream.added = nullToEmpty(extractAndClearIndex(addedIndicesBackingChunk, backingChunkOffset)); + downstream.removed = + nullToEmpty(extractAndClearIndex(removedIndicesBackingChunk, backingChunkOffset)); downstream.modified = stepValuesModified - ? nullToEmpty( - extractAndClearIndex(modifiedIndicesBackingChunk, backingChunkOffset)) - : Index.CURRENT_FACTORY.getEmptyIndex(); - downstream.shifted = extractAndClearShiftDataBuilder( - shiftDataBuildersBackingChunk, backingChunkOffset); + ? nullToEmpty(extractAndClearIndex(modifiedIndicesBackingChunk, backingChunkOffset)) + : Index.CURRENT_FACTORY.getEmptyIndex(); + downstream.shifted = + extractAndClearShiftDataBuilder(shiftDataBuildersBackingChunk, backingChunkOffset); downstream.modifiedColumnSet = - downstream.modified.empty() ? ModifiedColumnSet.EMPTY - : resultModifiedColumnSet; + downstream.modified.empty() ? ModifiedColumnSet.EMPTY : resultModifiedColumnSet; if (downstream.removed.nonempty()) { modifiedTable.getIndex().remove(downstream.removed); @@ -981,8 +885,8 @@ private void propagateUpdatesToModifiedDestinations( } } - private static Index extractAndClearIndex( - @NotNull final WritableObjectChunk indicesChunk, final int offset) { + private static Index extractAndClearIndex(@NotNull final WritableObjectChunk indicesChunk, + final int offset) { final Index index = indicesChunk.get(offset); Assert.neq(index, "index", NONEXISTENT_TABLE_INDEX, "NONEXISTENT_TABLE_INDEX"); if (index != null) { @@ -996,12 +900,11 @@ private static Index nullToEmpty(@Nullable final Index index) { } private static IndexShiftData extractAndClearShiftDataBuilder( - @NotNull final WritableObjectChunk shiftDataBuildersChunk, - final int offset) { - final IndexShiftData.SmartCoalescingBuilder shiftDataBuilder = - shiftDataBuildersChunk.get(offset); + @NotNull final WritableObjectChunk shiftDataBuildersChunk, + final int offset) { + final IndexShiftData.SmartCoalescingBuilder shiftDataBuilder = shiftDataBuildersChunk.get(offset); Assert.neq(shiftDataBuilder, "shiftDataBuilder", NONEXISTENT_TABLE_SHIFT_BUILDER, - "NONEXISTENT_TABLE_SHIFT_BUILDER"); + "NONEXISTENT_TABLE_SHIFT_BUILDER"); if (shiftDataBuilder == null) { return IndexShiftData.EMPTY; } @@ -1011,9 +914,8 @@ private static IndexShiftData extractAndClearShiftDataBuilder( @Override public void propagateFailure(@NotNull final Throwable originalException, - @NotNull UpdatePerformanceTracker.Entry sourceEntry) { - tableMap.values().forEach( - st -> ((DynamicTable) st).notifyListenersOnError(originalException, sourceEntry)); + @NotNull UpdatePerformanceTracker.Entry sourceEntry) { + tableMap.values().forEach(st -> ((DynamicTable) st).notifyListenersOnError(originalException, sourceEntry)); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ChunkedOperatorAggregationHelper.java b/DB/src/main/java/io/deephaven/db/v2/by/ChunkedOperatorAggregationHelper.java index 371ed21d686..5d86f8b6cce 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ChunkedOperatorAggregationHelper.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ChunkedOperatorAggregationHelper.java @@ -40,30 +40,28 @@ public class ChunkedOperatorAggregationHelper { @VisibleForTesting public static boolean KEY_ONLY_SUBSTITUTION_ENABLED = - Configuration.getInstance().getBooleanWithDefault( - "ChunkedOperatorAggregationHelper.enableKeyOnlySubstitution", true); + Configuration.getInstance() + .getBooleanWithDefault("ChunkedOperatorAggregationHelper.enableKeyOnlySubstitution", true); static final int CHUNK_SIZE = 1 << 12; - public static QueryTable aggregation(AggregationContextFactory aggregationContextFactory, - QueryTable queryTable, SelectColumn[] groupByColumns) { - return aggregation(AggregationControl.DEFAULT_FOR_OPERATOR, aggregationContextFactory, - queryTable, groupByColumns); + public static QueryTable aggregation(AggregationContextFactory aggregationContextFactory, QueryTable queryTable, + SelectColumn[] groupByColumns) { + return aggregation(AggregationControl.DEFAULT_FOR_OPERATOR, aggregationContextFactory, queryTable, + groupByColumns); } @VisibleForTesting public static QueryTable aggregation(AggregationControl control, - AggregationContextFactory aggregationContextFactory, QueryTable queryTable, - SelectColumn[] groupByColumns) { + AggregationContextFactory aggregationContextFactory, QueryTable queryTable, SelectColumn[] groupByColumns) { final boolean viewRequired = groupByColumns.length > 0 - && Arrays.stream(groupByColumns).anyMatch(selectColumn -> !selectColumn.isRetain()); - final QueryTable withView = - !viewRequired ? queryTable : (QueryTable) queryTable.updateView(groupByColumns); + && Arrays.stream(groupByColumns).anyMatch(selectColumn -> !selectColumn.isRetain()); + final QueryTable withView = !viewRequired ? queryTable : (QueryTable) queryTable.updateView(groupByColumns); final AggregationContextFactory aggregationContextFactoryToUse; if (KEY_ONLY_SUBSTITUTION_ENABLED - && withView.getDefinition().getColumns().length == groupByColumns.length - && aggregationContextFactory.allowKeyOnlySubstitution()) { + && withView.getDefinition().getColumns().length == groupByColumns.length + && aggregationContextFactory.allowKeyOnlySubstitution()) { aggregationContextFactoryToUse = new KeyOnlyAggregationFactory(); } else { aggregationContextFactoryToUse = aggregationContextFactory; @@ -71,38 +69,36 @@ public static QueryTable aggregation(AggregationControl control, if (queryTable.hasAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE)) { withView.setAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE, - queryTable.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE)); + queryTable.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE)); } final Mutable resultHolder = new MutableObject<>(); final ShiftAwareSwapListener swapListener = - withView.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); + withView.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); withView.initializeWithSnapshot( - "by(" + aggregationContextFactoryToUse + ", " + Arrays.toString(groupByColumns) + ")", - swapListener, (usePrev, beforeClockValue) -> { - resultHolder.setValue(aggregation(control, swapListener, - aggregationContextFactoryToUse, withView, groupByColumns, usePrev)); - return true; - }); + "by(" + aggregationContextFactoryToUse + ", " + Arrays.toString(groupByColumns) + ")", swapListener, + (usePrev, beforeClockValue) -> { + resultHolder.setValue(aggregation(control, swapListener, aggregationContextFactoryToUse, withView, + groupByColumns, usePrev)); + return true; + }); return resultHolder.getValue(); } - private static QueryTable aggregation(AggregationControl control, - ShiftAwareSwapListener swapListener, AggregationContextFactory aggregationContextFactory, - QueryTable withView, SelectColumn[] groupByColumns, boolean usePrev) { + private static QueryTable aggregation(AggregationControl control, ShiftAwareSwapListener swapListener, + AggregationContextFactory aggregationContextFactory, QueryTable withView, SelectColumn[] groupByColumns, + boolean usePrev) { if (groupByColumns.length == 0) { return noKeyAggregation(swapListener, aggregationContextFactory, withView, usePrev); } - final String[] keyNames = - Arrays.stream(groupByColumns).map(SelectColumn::getName).toArray(String[]::new); + final String[] keyNames = Arrays.stream(groupByColumns).map(SelectColumn::getName).toArray(String[]::new); final ColumnSource[] keySources = - Arrays.stream(keyNames).map(withView::getColumnSource).toArray(ColumnSource[]::new); + Arrays.stream(keyNames).map(withView::getColumnSource).toArray(ColumnSource[]::new); final ColumnSource[] reinterpretedKeySources = Arrays.stream(keySources) - .map(ReinterpretUtilities::maybeConvertToPrimitive).toArray(ColumnSource[]::new); + .map(ReinterpretUtilities::maybeConvertToPrimitive).toArray(ColumnSource[]::new); - final AggregationContext ac = - aggregationContextFactory.makeAggregationContext(withView, keyNames); + final AggregationContext ac = aggregationContextFactory.makeAggregationContext(withView, keyNames); final PermuteKernel[] permuteKernels = ac.makePermuteKernels(); @@ -115,8 +111,7 @@ private static QueryTable aggregation(AggregationControl control, return staticGroupedAggregation(withView, keyNames[0], keySources[0], ac); } // we have no hasPrevGrouping method - useGrouping = - !usePrev && hasGrouping && Arrays.equals(reinterpretedKeySources, keySources); + useGrouping = !usePrev && hasGrouping && Arrays.equals(reinterpretedKeySources, keySources); } else { useGrouping = false; } @@ -124,14 +119,13 @@ private static QueryTable aggregation(AggregationControl control, final ChunkedOperatorAggregationStateManager stateManager; final IncrementalChunkedOperatorAggregationStateManager incrementalStateManager; if (withView.isRefreshing()) { - stateManager = - incrementalStateManager = new IncrementalChunkedOperatorAggregationStateManager( - reinterpretedKeySources, control.initialHashTableSize(withView), - control.getMaximumLoadFactor(), control.getTargetLoadFactor()); + stateManager = incrementalStateManager = new IncrementalChunkedOperatorAggregationStateManager( + reinterpretedKeySources, control.initialHashTableSize(withView), control.getMaximumLoadFactor(), + control.getTargetLoadFactor()); } else { stateManager = new StaticChunkedOperatorAggregationStateManager(reinterpretedKeySources, - control.initialHashTableSize(withView), control.getMaximumLoadFactor(), - control.getTargetLoadFactor()); + control.initialHashTableSize(withView), control.getMaximumLoadFactor(), + control.getTargetLoadFactor()); incrementalStateManager = null; } setReverseLookupFunction(keySources, ac, stateManager); @@ -140,11 +134,11 @@ private static QueryTable aggregation(AggregationControl control, if (useGrouping) { // This must be incremental, otherwise we would have done this earlier - initialGroupedKeyAddition(withView, reinterpretedKeySources, ac, - incrementalStateManager, outputPosition, usePrev); + initialGroupedKeyAddition(withView, reinterpretedKeySources, ac, incrementalStateManager, outputPosition, + usePrev); } else { - initialBucketedKeyAddition(withView, reinterpretedKeySources, ac, permuteKernels, - stateManager, outputPosition, usePrev); + initialBucketedKeyAddition(withView, reinterpretedKeySources, ac, permuteKernels, stateManager, + outputPosition, usePrev); } // Construct and return result table @@ -154,19 +148,18 @@ private static QueryTable aggregation(AggregationControl control, // Gather the result key columns final ColumnSource[] keyColumnsRaw = new ColumnSource[keyHashTableSources.length]; final ArrayBackedColumnSource[] keyColumnsCopied = - withView.isRefreshing() ? new ArrayBackedColumnSource[keyHashTableSources.length] - : null; + withView.isRefreshing() ? new ArrayBackedColumnSource[keyHashTableSources.length] : null; for (int kci = 0; kci < keyHashTableSources.length; ++kci) { ColumnSource resultKeyColumnSource = keyHashTableSources[kci]; if (keySources[kci] != reinterpretedKeySources[kci]) { - resultKeyColumnSource = ReinterpretUtilities - .convertToOriginal(keySources[kci].getType(), resultKeyColumnSource); + resultKeyColumnSource = + ReinterpretUtilities.convertToOriginal(keySources[kci].getType(), resultKeyColumnSource); } keyColumnsRaw[kci] = resultKeyColumnSource; if (withView.isRefreshing()) { // noinspection ConstantConditions,unchecked - keyColumnsCopied[kci] = ArrayBackedColumnSource - .getMemoryColumnSource(outputPosition.intValue(), keyColumnsRaw[kci].getType()); + keyColumnsCopied[kci] = ArrayBackedColumnSource.getMemoryColumnSource(outputPosition.intValue(), + keyColumnsRaw[kci].getType()); resultColumnSourceMap.put(keyNames[kci], keyColumnsCopied[kci]); } else { resultColumnSourceMap.put(keyNames[kci], keyColumnsRaw[kci]); @@ -191,57 +184,50 @@ private static QueryTable aggregation(AggregationControl control, incrementalStateManager.startTrackingPrevValues(); final boolean isStream = withView.isStream(); - final ShiftAwareListener listener = new BaseTable.ShiftAwareListenerImpl( - "by(" + aggregationContextFactory + ")", withView, result) { - @ReferentialIntegrity - final ShiftAwareSwapListener swapListenerHardReference = swapListener; - - final ModifiedColumnSet keysUpstreamModifiedColumnSet = - withView.newModifiedColumnSet(keyNames); - final ModifiedColumnSet[] operatorInputModifiedColumnSets = - ac.getInputModifiedColumnSets(withView); - final UnaryOperator[] resultModifiedColumnSetFactories = - ac.initializeRefreshing(result, this); - - @Override - public void onUpdate(@NotNull final Update upstream) { - final Update upstreamToUse = isStream ? adjustForStreaming(upstream) : upstream; - if (upstreamToUse.empty()) { - return; - } - final Update downstream; - try (final KeyedUpdateContext kuc = - new KeyedUpdateContext(ac, incrementalStateManager, - reinterpretedKeySources, permuteKernels, keysUpstreamModifiedColumnSet, - operatorInputModifiedColumnSets, - upstreamToUse, outputPosition)) { - downstream = kuc.computeDownstreamIndicesAndCopyKeys(withView.getIndex(), - keyColumnsRaw, keyColumnsCopied, - result.getModifiedColumnSetForUpdates(), - resultModifiedColumnSetFactories); - } - result.getIndex().update(downstream.added, downstream.removed); - result.notifyListeners(downstream); - } + final ShiftAwareListener listener = + new BaseTable.ShiftAwareListenerImpl("by(" + aggregationContextFactory + ")", withView, result) { + @ReferentialIntegrity + final ShiftAwareSwapListener swapListenerHardReference = swapListener; + + final ModifiedColumnSet keysUpstreamModifiedColumnSet = withView.newModifiedColumnSet(keyNames); + final ModifiedColumnSet[] operatorInputModifiedColumnSets = + ac.getInputModifiedColumnSets(withView); + final UnaryOperator[] resultModifiedColumnSetFactories = + ac.initializeRefreshing(result, this); + + @Override + public void onUpdate(@NotNull final Update upstream) { + final Update upstreamToUse = isStream ? adjustForStreaming(upstream) : upstream; + if (upstreamToUse.empty()) { + return; + } + final Update downstream; + try (final KeyedUpdateContext kuc = new KeyedUpdateContext(ac, incrementalStateManager, + reinterpretedKeySources, permuteKernels, keysUpstreamModifiedColumnSet, + operatorInputModifiedColumnSets, + upstreamToUse, outputPosition)) { + downstream = kuc.computeDownstreamIndicesAndCopyKeys(withView.getIndex(), keyColumnsRaw, + keyColumnsCopied, + result.getModifiedColumnSetForUpdates(), resultModifiedColumnSetFactories); + } + result.getIndex().update(downstream.added, downstream.removed); + result.notifyListeners(downstream); + } - @Override - public void onFailureInternal(@NotNull final Throwable originalException, - final UpdatePerformanceTracker.Entry sourceEntry) { - ac.propagateFailureToOperators(originalException, sourceEntry); - super.onFailureInternal(originalException, sourceEntry); - } - }; + @Override + public void onFailureInternal(@NotNull final Throwable originalException, + final UpdatePerformanceTracker.Entry sourceEntry) { + ac.propagateFailureToOperators(originalException, sourceEntry); + super.onFailureInternal(originalException, sourceEntry); + } + }; swapListener.setListenerAndResult(listener, result); result.addParentReference(swapListener); - // In general, result listeners depend on the swap listener for continued liveness, but - // most - // operations handle this by having the result table depend on both (in both a - // reachability sense - // and a liveness sense). That said, it is arguably very natural for the result listener - // to manage - // the swap listener. We do so in this case because byExternal requires it in order for - // the + // In general, result listeners depend on the swap listener for continued liveness, but most + // operations handle this by having the result table depend on both (in both a reachability sense + // and a liveness sense). That said, it is arguably very natural for the result listener to manage + // the swap listener. We do so in this case because byExternal requires it in order for the // sub-tables to continue ticking if the result Table and TableMap are released. listener.manage(swapListener); } @@ -252,24 +238,24 @@ public void onFailureInternal(@NotNull final Throwable originalException, private static Update adjustForStreaming(@NotNull final Update upstream) { // Streaming aggregations never have modifies or shifts from their parent: Assert.assertion(upstream.modified.empty() && upstream.shifted.empty(), - "upstream.modified.empty() && upstream.shifted.empty()"); + "upstream.modified.empty() && upstream.shifted.empty()"); // Streaming aggregations ignore removes: if (upstream.removed.empty()) { return upstream; } - return new Update(upstream.added, Index.CURRENT_FACTORY.getEmptyIndex(), upstream.modified, - upstream.shifted, upstream.modifiedColumnSet); + return new Update(upstream.added, Index.CURRENT_FACTORY.getEmptyIndex(), upstream.modified, upstream.shifted, + upstream.modifiedColumnSet); } - private static void setReverseLookupFunction(ColumnSource[] keySources, - AggregationContext ac, ChunkedOperatorAggregationStateManager stateManager) { + private static void setReverseLookupFunction(ColumnSource[] keySources, AggregationContext ac, + ChunkedOperatorAggregationStateManager stateManager) { if (keySources.length == 1) { if (keySources[0].getType() == DBDateTime.class) { ac.setReverseLookupFunction(key -> stateManager - .findPositionForKey(key == null ? null : DBTimeUtils.nanos((DBDateTime) key))); + .findPositionForKey(key == null ? null : DBTimeUtils.nanos((DBDateTime) key))); } else if (keySources[0].getType() == Boolean.class) { - ac.setReverseLookupFunction(key -> stateManager - .findPositionForKey(BooleanUtils.booleanAsByte((Boolean) key))); + ac.setReverseLookupFunction( + key -> stateManager.findPositionForKey(BooleanUtils.booleanAsByte((Boolean) key))); } else { ac.setReverseLookupFunction(stateManager::findPositionForKey); } @@ -278,23 +264,20 @@ private static void setReverseLookupFunction(ColumnSource[] keySources, for (int ii = 0; ii < keySources.length; ++ii) { if (keySources[ii].getType() == DBDateTime.class) { final int fii = ii; - transformers - .add(reinterpreted -> reinterpreted[fii] = reinterpreted[fii] == null ? null - : DBTimeUtils.nanos((DBDateTime) reinterpreted[fii])); + transformers.add(reinterpreted -> reinterpreted[fii] = + reinterpreted[fii] == null ? null : DBTimeUtils.nanos((DBDateTime) reinterpreted[fii])); } else if (keySources[ii].getType() == Boolean.class) { final int fii = ii; transformers.add(reinterpreted -> reinterpreted[fii] = - BooleanUtils.booleanAsByte((Boolean) reinterpreted[fii])); + BooleanUtils.booleanAsByte((Boolean) reinterpreted[fii])); } } if (transformers.isEmpty()) { - ac.setReverseLookupFunction( - sk -> stateManager.findPositionForKey(((SmartKey) sk).values_)); + ac.setReverseLookupFunction(sk -> stateManager.findPositionForKey(((SmartKey) sk).values_)); } else { ac.setReverseLookupFunction(key -> { final SmartKey smartKey = (SmartKey) key; - final Object[] reinterpreted = - Arrays.copyOf(smartKey.values_, smartKey.values_.length); + final Object[] reinterpreted = Arrays.copyOf(smartKey.values_, smartKey.values_.length); for (final Consumer transform : transformers) { transform.accept(reinterpreted); } @@ -329,15 +312,14 @@ private static class KeyedUpdateContext implements SafeCloseable { private final IterativeChunkedAggregationOperator.BucketedContext[] bucketedContexts; private final IntIntTimsortKernel.IntIntSortKernelContext sortKernelContext; - // These are used for all access when only pre- or post-shift (or previous or current) are - // needed, else for pre-shift/previous + // These are used for all access when only pre- or post-shift (or previous or current) are needed, else for + // pre-shift/previous private final SharedContext sharedContext; private final ChunkSource.GetContext[] getContexts; private final WritableChunk[] workingChunks; private final WritableLongChunk permutedKeyIndices; - // These are used when post-shift/current values are needed concurrently with - // pre-shift/previous + // These are used when post-shift/current values are needed concurrently with pre-shift/previous private final SharedContext postSharedContext; private final ChunkSource.GetContext[] postGetContexts; private final WritableChunk[] postWorkingChunks; @@ -357,13 +339,13 @@ private static class KeyedUpdateContext implements SafeCloseable { private final WritableIntChunk emptiedSlots; private KeyedUpdateContext(@NotNull final AggregationContext ac, - @NotNull final IncrementalChunkedOperatorAggregationStateManager incrementalStateManager, - @NotNull final ColumnSource[] reinterpretedKeySources, - @NotNull final PermuteKernel[] permuteKernels, - @NotNull final ModifiedColumnSet keysUpstreamModifiedColumnSet, - @NotNull final ModifiedColumnSet[] operatorInputUpstreamModifiedColumnSets, - @NotNull final Update upstream, - @NotNull final MutableInt outputPosition) { + @NotNull final IncrementalChunkedOperatorAggregationStateManager incrementalStateManager, + @NotNull final ColumnSource[] reinterpretedKeySources, + @NotNull final PermuteKernel[] permuteKernels, + @NotNull final ModifiedColumnSet keysUpstreamModifiedColumnSet, + @NotNull final ModifiedColumnSet[] operatorInputUpstreamModifiedColumnSets, + @NotNull final Update upstream, + @NotNull final MutableInt outputPosition) { this.ac = ac; this.incrementalStateManager = incrementalStateManager; this.reinterpretedKeySources = reinterpretedKeySources; @@ -372,26 +354,22 @@ private KeyedUpdateContext(@NotNull final AggregationContext ac, this.outputPosition = outputPosition; updateUpstreamModifiedColumnSet = - upstream.modified.isEmpty() ? ModifiedColumnSet.EMPTY : upstream.modifiedColumnSet; - keysModified = - updateUpstreamModifiedColumnSet.containsAny(keysUpstreamModifiedColumnSet); + upstream.modified.isEmpty() ? ModifiedColumnSet.EMPTY : upstream.modifiedColumnSet; + keysModified = updateUpstreamModifiedColumnSet.containsAny(keysUpstreamModifiedColumnSet); shifted = upstream.shifted.nonempty(); processShifts = ac.requiresIndices() && shifted; - od = new OperatorDivision(ac, upstream.modified.nonempty(), - updateUpstreamModifiedColumnSet, operatorInputUpstreamModifiedColumnSets); + od = new OperatorDivision(ac, upstream.modified.nonempty(), updateUpstreamModifiedColumnSet, + operatorInputUpstreamModifiedColumnSets); - final long buildSize = - Math.max(upstream.added.size(), keysModified ? upstream.modified.size() : 0); + final long buildSize = Math.max(upstream.added.size(), keysModified ? upstream.modified.size() : 0); final long probeSizeForModifies = - (keysModified || od.anyOperatorHasModifiedInputColumns || ac.requiresIndices()) - ? upstream.modified.size() - : 0; - final long probeSizeWithoutShifts = - Math.max(upstream.removed.size(), probeSizeForModifies); - final long probeSize = processShifts - ? UpdateSizeCalculator.chunkSize(probeSizeWithoutShifts, upstream.shifted, - CHUNK_SIZE) - : probeSizeWithoutShifts; + (keysModified || od.anyOperatorHasModifiedInputColumns || ac.requiresIndices()) + ? upstream.modified.size() + : 0; + final long probeSizeWithoutShifts = Math.max(upstream.removed.size(), probeSizeForModifies); + final long probeSize = + processShifts ? UpdateSizeCalculator.chunkSize(probeSizeWithoutShifts, upstream.shifted, CHUNK_SIZE) + : probeSizeWithoutShifts; final int buildChunkSize = chunkSize(buildSize); final int probeChunkSize = chunkSize(probeSize); final int chunkSize = Math.max(buildChunkSize, probeChunkSize); @@ -403,10 +381,9 @@ private KeyedUpdateContext(@NotNull final AggregationContext ac, toClose = new SafeCloseableList(); - bucketedContexts = toClose - .addArray(new IterativeChunkedAggregationOperator.BucketedContext[ac.size()]); + bucketedContexts = toClose.addArray(new IterativeChunkedAggregationOperator.BucketedContext[ac.size()]); ac.initializeBucketedContexts(bucketedContexts, upstream, keysModified, - od.operatorsWithModifiedInputColumns); + od.operatorsWithModifiedInputColumns); sortKernelContext = toClose.add(IntIntTimsortKernel.createContext(chunkSize)); sharedContext = toClose.add(SharedContext.makeSharedContext()); @@ -415,15 +392,14 @@ private KeyedUpdateContext(@NotNull final AggregationContext ac, // noinspection unchecked workingChunks = toClose.addArray(new WritableChunk[ac.size()]); ac.initializeWorkingChunks(workingChunks, chunkSize); - permutedKeyIndices = ac.requiresIndices() || keysModified - ? toClose.add(WritableLongChunk.makeWritableChunk(chunkSize)) - : null; + permutedKeyIndices = + ac.requiresIndices() || keysModified ? toClose.add(WritableLongChunk.makeWritableChunk(chunkSize)) + : null; - postPermutedKeyIndices = processShifts || keysModified // Note that we need this for - // modified keys because we use - // it to hold removed key indices - ? toClose.add(WritableLongChunk.makeWritableChunk(chunkSize)) - : null; + postPermutedKeyIndices = processShifts || keysModified // Note that we need this for modified keys because + // we use it to hold removed key indices + ? toClose.add(WritableLongChunk.makeWritableChunk(chunkSize)) + : null; if (od.anyOperatorHasModifiedInputColumns || processShifts) { postSharedContext = toClose.add(SharedContext.makeSharedContext()); @@ -443,20 +419,17 @@ private KeyedUpdateContext(@NotNull final AggregationContext ac, chunkPositions = toClose.add(WritableIntChunk.makeWritableChunk(chunkSize)); slots = toClose.add(WritableIntChunk.makeWritableChunk(chunkSize)); modifiedSlots = toClose.add(WritableBooleanChunk.makeWritableChunk(chunkSize)); - slotsModifiedByOperator = - toClose.add(WritableBooleanChunk.makeWritableChunk(chunkSize)); + slotsModifiedByOperator = toClose.add(WritableBooleanChunk.makeWritableChunk(chunkSize)); if (buildSize > 0) { - bc = toClose.add( - incrementalStateManager.makeBuildContext(reinterpretedKeySources, buildSize)); + bc = toClose.add(incrementalStateManager.makeBuildContext(reinterpretedKeySources, buildSize)); reincarnatedSlots = toClose.add(WritableIntChunk.makeWritableChunk(buildChunkSize)); } else { bc = null; reincarnatedSlots = null; } if (probeSize > 0) { - pc = toClose.add( - incrementalStateManager.makeProbeContext(reinterpretedKeySources, probeSize)); + pc = toClose.add(incrementalStateManager.makeProbeContext(reinterpretedKeySources, probeSize)); } else { pc = null; } @@ -473,11 +446,11 @@ public final void close() { } private Update computeDownstreamIndicesAndCopyKeys( - @NotNull final ReadOnlyIndex upstreamIndex, - @NotNull final ColumnSource[] keyColumnsRaw, - @NotNull final WritableSource[] keyColumnsCopied, - @NotNull final ModifiedColumnSet resultModifiedColumnSet, - @NotNull final UnaryOperator[] resultModifiedColumnSetFactories) { + @NotNull final ReadOnlyIndex upstreamIndex, + @NotNull final ColumnSource[] keyColumnsRaw, + @NotNull final WritableSource[] keyColumnsCopied, + @NotNull final ModifiedColumnSet resultModifiedColumnSet, + @NotNull final UnaryOperator[] resultModifiedColumnSetFactories) { final int previousLastState = outputPosition.intValue(); ac.resetOperatorsForStep(upstream); @@ -485,64 +458,58 @@ private Update computeDownstreamIndicesAndCopyKeys( doRemoves(upstream.removed); } if (upstream.modified.nonempty() && (od.anyOperatorHasModifiedInputColumns - || od.anyOperatorWithoutModifiedInputColumnsRequiresIndices || keysModified)) { + || od.anyOperatorWithoutModifiedInputColumnsRequiresIndices || keysModified)) { try (final ModifySplitResult split = - keysModified ? splitKeyModificationsAndDoKeyChangeRemoves() : null) { + keysModified ? splitKeyModificationsAndDoKeyChangeRemoves() : null) { if (processShifts) { try (final Index postShiftIndex = upstreamIndex.minus(upstream.added)) { if (keysModified) { postShiftIndex.remove(split.keyChangeIndicesPostShift); } - doShifts(postShiftIndex); // Also handles shifted same-key modifications - // for modified-input operators that require - // indices (if any) + doShifts(postShiftIndex); // Also handles shifted same-key modifications for modified-input + // operators that require indices (if any) } try (final ReadOnlyIndex keysSameUnshiftedModifies = - keysModified ? null : getUnshiftedModifies()) { + keysModified ? null : getUnshiftedModifies()) { // Do unshifted modifies for everyone assert !keysModified || split.unshiftedSameSlotIndices != null; final ReadOnlyIndex unshiftedSameSlotModifies = - keysModified ? split.unshiftedSameSlotIndices - : keysSameUnshiftedModifies; - doSameSlotModifies(unshiftedSameSlotModifies, unshiftedSameSlotModifies, - true /* - * We don't process shifts unless some operator requires - * indices - */, - od.operatorsWithModifiedInputColumns, - od.operatorsWithoutModifiedInputColumnsThatRequireIndices); + keysModified ? split.unshiftedSameSlotIndices : keysSameUnshiftedModifies; + doSameSlotModifies(unshiftedSameSlotModifies, unshiftedSameSlotModifies, true /* + * We don't + * process + * shifts + * unless some + * operator + * requires + * indices + */, + od.operatorsWithModifiedInputColumns, + od.operatorsWithoutModifiedInputColumnsThatRequireIndices); if (od.anyOperatorWithModifiedInputColumnsIgnoresIndices) { - // Do shifted same-key modifies for index-only and modified-input - // operators that don't require indices - try ( - final ReadOnlyIndex removeIndex = - keysModified - ? unshiftedSameSlotModifies - .union(split.keyChangeIndicesPostShift) - : null; - final ReadOnlyIndex shiftedSameSlotModifiesPost = - upstream.modified - .minus(removeIndex == null ? unshiftedSameSlotModifies - : removeIndex); - final Index shiftedSameSlotModifiesPre = - shiftedSameSlotModifiesPost.clone()) { + // Do shifted same-key modifies for index-only and modified-input operators that don't + // require indices + try (final ReadOnlyIndex removeIndex = + keysModified ? unshiftedSameSlotModifies.union(split.keyChangeIndicesPostShift) + : null; + final ReadOnlyIndex shiftedSameSlotModifiesPost = upstream.modified + .minus(removeIndex == null ? unshiftedSameSlotModifies : removeIndex); + final Index shiftedSameSlotModifiesPre = shiftedSameSlotModifiesPost.clone()) { upstream.shifted.unapply(shiftedSameSlotModifiesPre); - doSameSlotModifies(shiftedSameSlotModifiesPre, - shiftedSameSlotModifiesPost, true, - od.operatorsWithModifiedInputColumnsThatIgnoreIndices, - od.operatorsThatRequireIndices); + doSameSlotModifies(shiftedSameSlotModifiesPre, shiftedSameSlotModifiesPost, true, + od.operatorsWithModifiedInputColumnsThatIgnoreIndices, + od.operatorsThatRequireIndices); } } else if (ac.requiresIndices()) { // Do shifted same-key modifies for index-only operators try (final Index shiftedSameSlotModifiesPost = - upstream.modified.minus(unshiftedSameSlotModifies)) { + upstream.modified.minus(unshiftedSameSlotModifies)) { if (keysModified) { - shiftedSameSlotModifiesPost - .remove(split.keyChangeIndicesPostShift); + shiftedSameSlotModifiesPost.remove(split.keyChangeIndicesPostShift); } doSameSlotModifyIndicesOnly(shiftedSameSlotModifiesPost, - od.operatorsThatRequireIndices); + od.operatorsThatRequireIndices); } } } @@ -550,18 +517,17 @@ private Update computeDownstreamIndicesAndCopyKeys( assert !keysModified || split.sameSlotIndicesPreShift != null; assert !keysModified || split.sameSlotIndicesPostShift != null; doSameSlotModifies( - keysModified ? split.sameSlotIndicesPreShift - : upstream.getModifiedPreShift(), - keysModified ? split.sameSlotIndicesPostShift : upstream.modified, - ac.requiresIndices(), - od.operatorsWithModifiedInputColumns, - od.operatorsWithoutModifiedInputColumnsThatRequireIndices); + keysModified ? split.sameSlotIndicesPreShift : upstream.getModifiedPreShift(), + keysModified ? split.sameSlotIndicesPostShift : upstream.modified, + ac.requiresIndices(), + od.operatorsWithModifiedInputColumns, + od.operatorsWithoutModifiedInputColumnsThatRequireIndices); } else { assert !keysModified || split.sameSlotIndicesPostShift != null; doSameSlotModifyIndicesOnly( - keysModified ? split.sameSlotIndicesPostShift : upstream.modified, - od.operatorsWithoutModifiedInputColumnsThatRequireIndices); + keysModified ? split.sameSlotIndicesPostShift : upstream.modified, + od.operatorsWithoutModifiedInputColumnsThatRequireIndices); } if (keysModified) { doInserts(split.keyChangeIndicesPostShift, false); @@ -579,8 +545,7 @@ private Update computeDownstreamIndicesAndCopyKeys( final Update downstream = new Update(); downstream.shifted = IndexShiftData.EMPTY; - try (final ReadOnlyIndex newStates = - makeNewStatesIndex(previousLastState, outputPosition.intValue() - 1)) { + try (final ReadOnlyIndex newStates = makeNewStatesIndex(previousLastState, outputPosition.intValue() - 1)) { downstream.added = reincarnatedStatesBuilder.getIndex(); downstream.removed = emptiedStatesBuilder.getIndex(); @@ -604,9 +569,8 @@ private Update computeDownstreamIndicesAndCopyKeys( ac.propagateChangesToOperators(downstream, newStates); } - extractDownstreamModifiedColumnSet(downstream, resultModifiedColumnSet, - modifiedOperators, updateUpstreamModifiedColumnSet, - resultModifiedColumnSetFactories); + extractDownstreamModifiedColumnSet(downstream, resultModifiedColumnSet, modifiedOperators, + updateUpstreamModifiedColumnSet, resultModifiedColumnSetFactories); return downstream; } @@ -615,32 +579,27 @@ private void doRemoves(@NotNull final OrderedKeys keyIndicesToRemove) { if (keyIndicesToRemove.isEmpty()) { return; } - try (final OrderedKeys.Iterator keyIndicesToRemoveIterator = - keyIndicesToRemove.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator keyIndicesToRemoveIterator = keyIndicesToRemove.getOrderedKeysIterator()) { while (keyIndicesToRemoveIterator.hasMore()) { - doRemovesForChunk( - keyIndicesToRemoveIterator.getNextOrderedKeysWithLength(CHUNK_SIZE)); + doRemovesForChunk(keyIndicesToRemoveIterator.getNextOrderedKeysWithLength(CHUNK_SIZE)); } } } private void doRemovesForChunk(@NotNull final OrderedKeys keyIndicesToRemoveChunk) { - incrementalStateManager.remove(pc, keyIndicesToRemoveChunk, reinterpretedKeySources, - slots, emptiedSlots); + incrementalStateManager.remove(pc, keyIndicesToRemoveChunk, reinterpretedKeySources, slots, emptiedSlots); emptiedStatesBuilder.addKeyIndicesChunk(emptiedSlots); propagateRemovesToOperators(keyIndicesToRemoveChunk, slots); } private void propagateRemovesToOperators(@NotNull final OrderedKeys keyIndicesToRemoveChunk, - @NotNull final WritableIntChunk slotsToRemoveFrom) { - findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, - slotsToRemoveFrom); + @NotNull final WritableIntChunk slotsToRemoveFrom) { + findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, slotsToRemoveFrom); if (ac.requiresIndices()) { - final LongChunk keyIndices = - keyIndicesToRemoveChunk.asKeyIndicesChunk(); + final LongChunk keyIndices = keyIndicesToRemoveChunk.asKeyIndicesChunk(); LongPermuteKernel.permuteInput(keyIndices, chunkPositions, permutedKeyIndices); } @@ -656,17 +615,15 @@ private void propagateRemovesToOperators(@NotNull final OrderedKeys keyIndicesTo final int inputSlot = ac.inputSlot(oi); if (oi == inputSlot) { - getAndPermuteChunk(ac.inputColumns[oi], getContexts[oi], - keyIndicesToRemoveChunk, true, permuteKernels[oi], chunkPositions, - workingChunks[oi]); + getAndPermuteChunk(ac.inputColumns[oi], getContexts[oi], keyIndicesToRemoveChunk, true, + permuteKernels[oi], chunkPositions, workingChunks[oi]); } - ac.operators[oi].removeChunk(bucketedContexts[oi], - inputSlot >= 0 ? workingChunks[inputSlot] : null, permutedKeyIndices, - slotsToRemoveFrom, runStarts, runLengths, - firstOperator ? modifiedSlots : slotsModifiedByOperator); + ac.operators[oi].removeChunk(bucketedContexts[oi], inputSlot >= 0 ? workingChunks[inputSlot] : null, + permutedKeyIndices, slotsToRemoveFrom, runStarts, runLengths, + firstOperator ? modifiedSlots : slotsModifiedByOperator); - anyOperatorModified = updateModificationState(modifiedOperators, modifiedSlots, - slotsModifiedByOperator, anyOperatorModified, firstOperator, oi); + anyOperatorModified = updateModificationState(modifiedOperators, modifiedSlots, slotsModifiedByOperator, + anyOperatorModified, firstOperator, oi); firstOperator = false; } @@ -675,44 +632,39 @@ private void propagateRemovesToOperators(@NotNull final OrderedKeys keyIndicesTo } } - private void doInserts(@NotNull final OrderedKeys keyIndicesToInsert, - final boolean addToStateManager) { + private void doInserts(@NotNull final OrderedKeys keyIndicesToInsert, final boolean addToStateManager) { if (keyIndicesToInsert.isEmpty()) { return; } - try (final OrderedKeys.Iterator keyIndicesToInsertIterator = - keyIndicesToInsert.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator keyIndicesToInsertIterator = keyIndicesToInsert.getOrderedKeysIterator()) { while (keyIndicesToInsertIterator.hasMore()) { - doInsertsForChunk( - keyIndicesToInsertIterator.getNextOrderedKeysWithLength(CHUNK_SIZE), - addToStateManager); + doInsertsForChunk(keyIndicesToInsertIterator.getNextOrderedKeysWithLength(CHUNK_SIZE), + addToStateManager); } } } private void doInsertsForChunk(@NotNull final OrderedKeys keyIndicesToInsertChunk, - final boolean addToStateManager) { + final boolean addToStateManager) { if (addToStateManager) { - incrementalStateManager.addForUpdate(bc, keyIndicesToInsertChunk, - reinterpretedKeySources, outputPosition, slots, reincarnatedSlots); + incrementalStateManager.addForUpdate(bc, keyIndicesToInsertChunk, reinterpretedKeySources, + outputPosition, slots, reincarnatedSlots); reincarnatedStatesBuilder.addKeyIndicesChunk(reincarnatedSlots); } else { - incrementalStateManager.findModifications(pc, keyIndicesToInsertChunk, - reinterpretedKeySources, slots); + incrementalStateManager.findModifications(pc, keyIndicesToInsertChunk, reinterpretedKeySources, slots); } propagateInsertsToOperators(keyIndicesToInsertChunk, slots); } private void propagateInsertsToOperators(@NotNull final OrderedKeys keyIndicesToInsertChunk, - @NotNull final WritableIntChunk slotsToAddTo) { + @NotNull final WritableIntChunk slotsToAddTo) { ac.ensureCapacity(outputPosition.intValue()); findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, slotsToAddTo); if (ac.requiresIndices()) { - final LongChunk keyIndices = - keyIndicesToInsertChunk.asKeyIndicesChunk(); + final LongChunk keyIndices = keyIndicesToInsertChunk.asKeyIndicesChunk(); permutedKeyIndices.setSize(keyIndices.size()); LongPermuteKernel.permuteInput(keyIndices, chunkPositions, permutedKeyIndices); } @@ -729,17 +681,15 @@ private void propagateInsertsToOperators(@NotNull final OrderedKeys keyIndicesTo final int inputSlot = ac.inputSlot(oi); if (inputSlot == oi) { - getAndPermuteChunk(ac.inputColumns[oi], getContexts[oi], - keyIndicesToInsertChunk, false, permuteKernels[oi], chunkPositions, - workingChunks[oi]); + getAndPermuteChunk(ac.inputColumns[oi], getContexts[oi], keyIndicesToInsertChunk, false, + permuteKernels[oi], chunkPositions, workingChunks[oi]); } - ac.operators[oi].addChunk(bucketedContexts[oi], - inputSlot >= 0 ? workingChunks[inputSlot] : null, permutedKeyIndices, - slotsToAddTo, runStarts, runLengths, - firstOperator ? modifiedSlots : slotsModifiedByOperator); + ac.operators[oi].addChunk(bucketedContexts[oi], inputSlot >= 0 ? workingChunks[inputSlot] : null, + permutedKeyIndices, slotsToAddTo, runStarts, runLengths, + firstOperator ? modifiedSlots : slotsModifiedByOperator); - anyOperatorModified = updateModificationState(modifiedOperators, modifiedSlots, - slotsModifiedByOperator, anyOperatorModified, firstOperator, oi); + anyOperatorModified = updateModificationState(modifiedOperators, modifiedSlots, slotsModifiedByOperator, + anyOperatorModified, firstOperator, oi); firstOperator = false; } @@ -752,43 +702,37 @@ private void doShifts(@NotNull final ReadOnlyIndex postShiftIndexToProcess) { if (postShiftIndexToProcess.isEmpty()) { return; } - try ( - final WritableLongChunk preKeyIndices = + try (final WritableLongChunk preKeyIndices = WritableLongChunk.makeWritableChunk(pc.chunkSize); - final WritableLongChunk postKeyIndices = - WritableLongChunk.makeWritableChunk(pc.chunkSize)) { - final Runnable applyChunkedShift = - () -> doProcessShiftBucketed(preKeyIndices, postKeyIndices); - processUpstreamShifts(upstream, postShiftIndexToProcess, preKeyIndices, - postKeyIndices, applyChunkedShift); + final WritableLongChunk postKeyIndices = + WritableLongChunk.makeWritableChunk(pc.chunkSize)) { + final Runnable applyChunkedShift = () -> doProcessShiftBucketed(preKeyIndices, postKeyIndices); + processUpstreamShifts(upstream, postShiftIndexToProcess, preKeyIndices, postKeyIndices, + applyChunkedShift); } } - private void doProcessShiftBucketed( - @NotNull final WritableLongChunk preKeyIndices, - @NotNull final WritableLongChunk postKeyIndices) { + private void doProcessShiftBucketed(@NotNull final WritableLongChunk preKeyIndices, + @NotNull final WritableLongChunk postKeyIndices) { final boolean[] chunkInitialized = new boolean[ac.size()]; - try ( - final OrderedKeys preShiftChunkKeys = OrderedKeys - .wrapKeyIndicesChunkAsOrderedKeys(WritableLongChunk.downcast(preKeyIndices)); - final OrderedKeys postShiftChunkKeys = OrderedKeys - .wrapKeyIndicesChunkAsOrderedKeys(WritableLongChunk.downcast(postKeyIndices))) { + try (final OrderedKeys preShiftChunkKeys = + OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(WritableLongChunk.downcast(preKeyIndices)); + final OrderedKeys postShiftChunkKeys = + OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(WritableLongChunk.downcast(postKeyIndices))) { sharedContext.reset(); postSharedContext.reset(); Arrays.fill(chunkInitialized, false); - incrementalStateManager.findModifications(pc, postShiftChunkKeys, - reinterpretedKeySources, slots); + incrementalStateManager.findModifications(pc, postShiftChunkKeys, reinterpretedKeySources, slots); findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, slots); permutedKeyIndices.setSize(preKeyIndices.size()); postPermutedKeyIndices.setSize(postKeyIndices.size()); LongPermuteKernel.permuteInput(preKeyIndices, chunkPositions, permutedKeyIndices); - LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, - postPermutedKeyIndices); + LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, postPermutedKeyIndices); boolean anyOperatorModified = false; boolean firstOperator = true; @@ -803,21 +747,18 @@ private void doProcessShiftBucketed( } final int inputSlot = ac.inputSlot(oi); if (inputSlot >= 0 && !chunkInitialized[inputSlot]) { - getAndPermuteChunk(ac.inputColumns[inputSlot], getContexts[inputSlot], - preShiftChunkKeys, true, permuteKernels[inputSlot], chunkPositions, - workingChunks[inputSlot]); - getAndPermuteChunk(ac.inputColumns[inputSlot], postGetContexts[inputSlot], - postShiftChunkKeys, false, permuteKernels[inputSlot], chunkPositions, - postWorkingChunks[inputSlot]); + getAndPermuteChunk(ac.inputColumns[inputSlot], getContexts[inputSlot], preShiftChunkKeys, true, + permuteKernels[inputSlot], chunkPositions, workingChunks[inputSlot]); + getAndPermuteChunk(ac.inputColumns[inputSlot], postGetContexts[inputSlot], postShiftChunkKeys, + false, permuteKernels[inputSlot], chunkPositions, postWorkingChunks[inputSlot]); chunkInitialized[inputSlot] = true; } - ac.operators[oi].shiftChunk(bucketedContexts[oi], - inputSlot >= 0 ? workingChunks[inputSlot] : null, - inputSlot >= 0 ? postWorkingChunks[inputSlot] : null, permutedKeyIndices, - postPermutedKeyIndices, slots, runStarts, runLengths, - firstOperator ? modifiedSlots : slotsModifiedByOperator); + ac.operators[oi].shiftChunk(bucketedContexts[oi], inputSlot >= 0 ? workingChunks[inputSlot] : null, + inputSlot >= 0 ? postWorkingChunks[inputSlot] : null, permutedKeyIndices, + postPermutedKeyIndices, slots, runStarts, runLengths, + firstOperator ? modifiedSlots : slotsModifiedByOperator); anyOperatorModified = updateModificationState(modifiedOperators, modifiedSlots, - slotsModifiedByOperator, anyOperatorModified, firstOperator, oi); + slotsModifiedByOperator, anyOperatorModified, firstOperator, oi); firstOperator = false; } @@ -828,36 +769,33 @@ private void doProcessShiftBucketed( } private void doSameSlotModifies(@NotNull final OrderedKeys preShiftKeyIndicesToModify, - @NotNull final OrderedKeys postShiftKeyIndicesToModify, - final boolean supplyPostIndices, @NotNull final boolean[] operatorsToProcess, - @NotNull final boolean[] operatorsToProcessIndicesOnly) { + @NotNull final OrderedKeys postShiftKeyIndicesToModify, + final boolean supplyPostIndices, @NotNull final boolean[] operatorsToProcess, + @NotNull final boolean[] operatorsToProcessIndicesOnly) { final boolean shifted = preShiftKeyIndicesToModify != postShiftKeyIndicesToModify; - try ( - final OrderedKeys.Iterator preShiftIterator = - preShiftKeyIndicesToModify.getOrderedKeysIterator(); - final OrderedKeys.Iterator postShiftIterator = - shifted ? postShiftKeyIndicesToModify.getOrderedKeysIterator() : null) { + try (final OrderedKeys.Iterator preShiftIterator = preShiftKeyIndicesToModify.getOrderedKeysIterator(); + final OrderedKeys.Iterator postShiftIterator = + shifted ? postShiftKeyIndicesToModify.getOrderedKeysIterator() : null) { final boolean[] chunkInitialized = new boolean[ac.size()]; while (preShiftIterator.hasMore()) { final OrderedKeys preShiftKeyIndicesChunk = - preShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); + preShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); final OrderedKeys postShiftKeyIndicesChunk = - shifted ? postShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE) - : preShiftKeyIndicesChunk; + shifted ? postShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE) + : preShiftKeyIndicesChunk; sharedContext.reset(); postSharedContext.reset(); Arrays.fill(chunkInitialized, false); - incrementalStateManager.findModifications(pc, postShiftKeyIndicesChunk, - reinterpretedKeySources, slots); + incrementalStateManager.findModifications(pc, postShiftKeyIndicesChunk, reinterpretedKeySources, + slots); findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, slots); if (supplyPostIndices) { final LongChunk postKeyIndices = - postShiftKeyIndicesChunk.asKeyIndicesChunk(); + postShiftKeyIndicesChunk.asKeyIndicesChunk(); permutedKeyIndices.setSize(postKeyIndices.size()); - LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, - permutedKeyIndices); + LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, permutedKeyIndices); } boolean anyOperatorModified = false; @@ -874,32 +812,27 @@ private void doSameSlotModifies(@NotNull final OrderedKeys preShiftKeyIndicesToM } if (operatorsToProcessIndicesOnly[oi]) { - ac.operators[oi].modifyIndices(bucketedContexts[oi], permutedKeyIndices, - slots, runStarts, runLengths, - firstOperator ? modifiedSlots : slotsModifiedByOperator); + ac.operators[oi].modifyIndices(bucketedContexts[oi], permutedKeyIndices, slots, runStarts, + runLengths, firstOperator ? modifiedSlots : slotsModifiedByOperator); } else /* operatorsToProcess[oi] */ { final int inputSlot = ac.inputSlot(oi); if (inputSlot >= 0 && !chunkInitialized[inputSlot]) { - getAndPermuteChunk(ac.inputColumns[inputSlot], - getContexts[inputSlot], preShiftKeyIndicesChunk, true, - permuteKernels[inputSlot], chunkPositions, - workingChunks[inputSlot]); - getAndPermuteChunk(ac.inputColumns[inputSlot], - postGetContexts[inputSlot], postShiftKeyIndicesChunk, false, - permuteKernels[inputSlot], chunkPositions, - postWorkingChunks[inputSlot]); + getAndPermuteChunk(ac.inputColumns[inputSlot], getContexts[inputSlot], + preShiftKeyIndicesChunk, true, permuteKernels[inputSlot], chunkPositions, + workingChunks[inputSlot]); + getAndPermuteChunk(ac.inputColumns[inputSlot], postGetContexts[inputSlot], + postShiftKeyIndicesChunk, false, permuteKernels[inputSlot], chunkPositions, + postWorkingChunks[inputSlot]); chunkInitialized[inputSlot] = true; } ac.operators[oi].modifyChunk(bucketedContexts[oi], - inputSlot >= 0 ? workingChunks[inputSlot] : null, - inputSlot >= 0 ? postWorkingChunks[inputSlot] : null, - permutedKeyIndices, slots, runStarts, runLengths, - firstOperator ? modifiedSlots : slotsModifiedByOperator); + inputSlot >= 0 ? workingChunks[inputSlot] : null, + inputSlot >= 0 ? postWorkingChunks[inputSlot] : null, permutedKeyIndices, slots, + runStarts, runLengths, firstOperator ? modifiedSlots : slotsModifiedByOperator); } - anyOperatorModified = - updateModificationState(modifiedOperators, modifiedSlots, + anyOperatorModified = updateModificationState(modifiedOperators, modifiedSlots, slotsModifiedByOperator, anyOperatorModified, firstOperator, oi); firstOperator = false; } @@ -911,24 +844,20 @@ private void doSameSlotModifies(@NotNull final OrderedKeys preShiftKeyIndicesToM } } - private void doSameSlotModifyIndicesOnly( - @NotNull final OrderedKeys postShiftKeyIndicesToModify, - @NotNull final boolean[] operatorsToProcessIndicesOnly) { - try (final OrderedKeys.Iterator postShiftIterator = - postShiftKeyIndicesToModify.getOrderedKeysIterator()) { + private void doSameSlotModifyIndicesOnly(@NotNull final OrderedKeys postShiftKeyIndicesToModify, + @NotNull final boolean[] operatorsToProcessIndicesOnly) { + try (final OrderedKeys.Iterator postShiftIterator = postShiftKeyIndicesToModify.getOrderedKeysIterator()) { while (postShiftIterator.hasMore()) { final OrderedKeys postShiftKeyIndicesChunk = - postShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); + postShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); - incrementalStateManager.findModifications(pc, postShiftKeyIndicesChunk, - reinterpretedKeySources, slots); + incrementalStateManager.findModifications(pc, postShiftKeyIndicesChunk, reinterpretedKeySources, + slots); findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPositions, slots); - final LongChunk postKeyIndices = - postShiftKeyIndicesChunk.asKeyIndicesChunk(); + final LongChunk postKeyIndices = postShiftKeyIndicesChunk.asKeyIndicesChunk(); permutedKeyIndices.setSize(postKeyIndices.size()); - LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, - permutedKeyIndices); + LongPermuteKernel.permuteInput(postKeyIndices, chunkPositions, permutedKeyIndices); boolean anyOperatorModified = false; boolean firstOperator = true; @@ -943,12 +872,10 @@ private void doSameSlotModifyIndicesOnly( setFalse(slotsModifiedByOperator, runStarts.size()); } - ac.operators[oi].modifyIndices(bucketedContexts[oi], permutedKeyIndices, - slots, runStarts, runLengths, - firstOperator ? modifiedSlots : slotsModifiedByOperator); + ac.operators[oi].modifyIndices(bucketedContexts[oi], permutedKeyIndices, slots, runStarts, + runLengths, firstOperator ? modifiedSlots : slotsModifiedByOperator); - anyOperatorModified = - updateModificationState(modifiedOperators, modifiedSlots, + anyOperatorModified = updateModificationState(modifiedOperators, modifiedSlots, slotsModifiedByOperator, anyOperatorModified, firstOperator, oi); firstOperator = false; } @@ -963,36 +890,36 @@ private void doSameSlotModifyIndicesOnly( private static class ModifySplitResult implements SafeCloseable { /** - * This is a partition of same-slot modifies for index keys that were not shifted. - * Needed for modifyChunk of input-modified operators that require indices, since they - * handle the shifted same-slot modifies in shiftChunk. + * This is a partition of same-slot modifies for index keys that were not shifted. Needed for modifyChunk of + * input-modified operators that require indices, since they handle the shifted same-slot modifies in + * shiftChunk. */ @Nullable private final ReadOnlyIndex unshiftedSameSlotIndices; /** - * This is all of same-slot modified, with index keys in pre-shift space. Needed for - * modifyChunk of input-modified operators that don't require indices. + * This is all of same-slot modified, with index keys in pre-shift space. Needed for modifyChunk of + * input-modified operators that don't require indices. */ @Nullable private final ReadOnlyIndex sameSlotIndicesPreShift; /** - * This is all of same-slot modified, with index keys in post-shift space. Needed for - * modifyChunk of input-modified operators that don't require indices, and for - * modifyIndices of operators that require indices but don't have any inputs modified. + * This is all of same-slot modified, with index keys in post-shift space. Needed for modifyChunk of + * input-modified operators that don't require indices, and for modifyIndices of operators that require + * indices but don't have any inputs modified. */ @Nullable private final ReadOnlyIndex sameSlotIndicesPostShift; /** - * This is all key change modifies, with index keys in post-shift space. Needed for - * addChunk to process key changes for all operators. + * This is all key change modifies, with index keys in post-shift space. Needed for addChunk to process key + * changes for all operators. */ @NotNull private final ReadOnlyIndex keyChangeIndicesPostShift; private ModifySplitResult(@Nullable final ReadOnlyIndex unshiftedSameSlotIndices, - @Nullable final ReadOnlyIndex sameSlotIndicesPreShift, - @Nullable final ReadOnlyIndex sameSlotIndicesPostShift, - @NotNull final ReadOnlyIndex keyChangeIndicesPostShift) { + @Nullable final ReadOnlyIndex sameSlotIndicesPreShift, + @Nullable final ReadOnlyIndex sameSlotIndicesPostShift, + @NotNull final ReadOnlyIndex keyChangeIndicesPostShift) { this.unshiftedSameSlotIndices = unshiftedSameSlotIndices; this.sameSlotIndicesPreShift = sameSlotIndicesPreShift; this.sameSlotIndicesPostShift = sameSlotIndicesPostShift; @@ -1018,52 +945,47 @@ private ModifySplitResult splitKeyModificationsAndDoKeyChangeRemoves() { Require.requirement(keysModified, "keysModified"); final boolean needUnshiftedSameSlotIndices = processShifts; - final boolean needSameSlotIndicesPreShift = - !processShifts && od.anyOperatorHasModifiedInputColumns; - final boolean needSameSlotIndicesPostShift = - !processShifts && (od.anyOperatorHasModifiedInputColumns + final boolean needSameSlotIndicesPreShift = !processShifts && od.anyOperatorHasModifiedInputColumns; + final boolean needSameSlotIndicesPostShift = !processShifts && (od.anyOperatorHasModifiedInputColumns || od.anyOperatorWithoutModifiedInputColumnsRequiresIndices || keysModified); final Index.SequentialBuilder unshiftedSameSlotIndicesBuilder = - needUnshiftedSameSlotIndices ? Index.CURRENT_FACTORY.getSequentialBuilder() : null; + needUnshiftedSameSlotIndices ? Index.CURRENT_FACTORY.getSequentialBuilder() : null; final Index.SequentialBuilder sameSlotIndicesPreShiftBuilder = - needSameSlotIndicesPreShift ? Index.CURRENT_FACTORY.getSequentialBuilder() : null; + needSameSlotIndicesPreShift ? Index.CURRENT_FACTORY.getSequentialBuilder() : null; final Index.SequentialBuilder sameSlotIndicesPostShiftBuilder = - needSameSlotIndicesPostShift ? Index.CURRENT_FACTORY.getSequentialBuilder() : null; + needSameSlotIndicesPostShift ? Index.CURRENT_FACTORY.getSequentialBuilder() : null; final Index.SequentialBuilder keyChangeIndicesPostShiftBuilder = - Index.CURRENT_FACTORY.getSequentialBuilder(); + Index.CURRENT_FACTORY.getSequentialBuilder(); - try ( - final OrderedKeys.Iterator modifiedPreShiftIterator = + try (final OrderedKeys.Iterator modifiedPreShiftIterator = upstream.getModifiedPreShift().getOrderedKeysIterator(); - final OrderedKeys.Iterator modifiedPostShiftIterator = - shifted ? upstream.modified.getOrderedKeysIterator() : null; - final WritableIntChunk postSlots = - WritableIntChunk.makeWritableChunk(bc.chunkSize)) { + final OrderedKeys.Iterator modifiedPostShiftIterator = + shifted ? upstream.modified.getOrderedKeysIterator() : null; + final WritableIntChunk postSlots = WritableIntChunk.makeWritableChunk(bc.chunkSize)) { - // Hijacking postPermutedKeyIndices because it's not used in this loop; the rename - // hopefully makes the code much clearer! + // Hijacking postPermutedKeyIndices because it's not used in this loop; the rename hopefully makes the + // code much clearer! final WritableLongChunk removedKeyIndices = - WritableLongChunk.downcast(postPermutedKeyIndices); + WritableLongChunk.downcast(postPermutedKeyIndices); while (modifiedPreShiftIterator.hasMore()) { final OrderedKeys modifiedPreShiftChunk = - modifiedPreShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); + modifiedPreShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); final OrderedKeys modifiedPostShiftChunk = - shifted ? modifiedPostShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE) - : modifiedPreShiftChunk; + shifted ? modifiedPostShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE) + : modifiedPreShiftChunk; - incrementalStateManager.remove(pc, modifiedPreShiftChunk, - reinterpretedKeySources, slots, emptiedSlots); + incrementalStateManager.remove(pc, modifiedPreShiftChunk, reinterpretedKeySources, slots, + emptiedSlots); emptiedStatesBuilder.addKeyIndicesChunk(emptiedSlots); - incrementalStateManager.addForUpdate(bc, modifiedPostShiftChunk, - reinterpretedKeySources, outputPosition, postSlots, reincarnatedSlots); + incrementalStateManager.addForUpdate(bc, modifiedPostShiftChunk, reinterpretedKeySources, + outputPosition, postSlots, reincarnatedSlots); reincarnatedStatesBuilder.addKeyIndicesChunk(reincarnatedSlots); - final LongChunk preShiftIndices = - modifiedPreShiftChunk.asKeyIndicesChunk(); + final LongChunk preShiftIndices = modifiedPreShiftChunk.asKeyIndicesChunk(); final LongChunk postShiftIndices = - shifted ? modifiedPostShiftChunk.asKeyIndicesChunk() : preShiftIndices; + shifted ? modifiedPostShiftChunk.asKeyIndicesChunk() : preShiftIndices; final int chunkSize = slots.size(); int numKeyChanges = 0; @@ -1092,17 +1014,17 @@ private ModifySplitResult splitKeyModificationsAndDoKeyChangeRemoves() { slots.setSize(numKeyChanges); removedKeyIndices.setSize(numKeyChanges); try (final OrderedKeys keyIndicesToRemoveChunk = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(removedKeyIndices)) { + OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(removedKeyIndices)) { propagateRemovesToOperators(keyIndicesToRemoveChunk, slots); } } } return new ModifySplitResult( - needUnshiftedSameSlotIndices ? unshiftedSameSlotIndicesBuilder.getIndex() : null, - needSameSlotIndicesPreShift ? sameSlotIndicesPreShiftBuilder.getIndex() : null, - needSameSlotIndicesPostShift ? sameSlotIndicesPostShiftBuilder.getIndex() : null, - keyChangeIndicesPostShiftBuilder.getIndex()); + needUnshiftedSameSlotIndices ? unshiftedSameSlotIndicesBuilder.getIndex() : null, + needSameSlotIndicesPreShift ? sameSlotIndicesPreShiftBuilder.getIndex() : null, + needSameSlotIndicesPostShift ? sameSlotIndicesPostShiftBuilder.getIndex() : null, + keyChangeIndicesPostShiftBuilder.getIndex()); } private ReadOnlyIndex getUnshiftedModifies() { @@ -1112,9 +1034,9 @@ private ReadOnlyIndex getUnshiftedModifies() { } private static boolean updateModificationState(@NotNull final boolean[] modifiedOperators, - @NotNull final WritableBooleanChunk modifiedSlots, - @NotNull final BooleanChunk slotsModifiedByOperator, boolean operatorModified, - final boolean firstOperator, final int operatorIndex) { + @NotNull final WritableBooleanChunk modifiedSlots, + @NotNull final BooleanChunk slotsModifiedByOperator, boolean operatorModified, + final boolean firstOperator, final int operatorIndex) { final boolean chunkModifiedSlots; if (firstOperator) { chunkModifiedSlots = anyTrue(modifiedSlots); @@ -1128,26 +1050,20 @@ private static boolean updateModificationState(@NotNull final boolean[] modified } } - private static ReadOnlyIndex extractUnshiftedModifiesFromUpstream( - @NotNull final Update upstream) { - final Index.SequentialBuilder unshiftedModifiesBuilder = - Index.CURRENT_FACTORY.getSequentialBuilder(); + private static ReadOnlyIndex extractUnshiftedModifiesFromUpstream(@NotNull final Update upstream) { + final Index.SequentialBuilder unshiftedModifiesBuilder = Index.CURRENT_FACTORY.getSequentialBuilder(); - try ( - final OrderedKeys.Iterator modifiedPreShiftIterator = + try (final OrderedKeys.Iterator modifiedPreShiftIterator = upstream.getModifiedPreShift().getOrderedKeysIterator(); - final OrderedKeys.Iterator modifiedPostShiftIterator = - upstream.modified.getOrderedKeysIterator()) { + final OrderedKeys.Iterator modifiedPostShiftIterator = upstream.modified.getOrderedKeysIterator()) { while (modifiedPreShiftIterator.hasMore()) { final OrderedKeys modifiedPreShiftChunk = - modifiedPreShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); + modifiedPreShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); final OrderedKeys modifiedPostShiftChunk = - modifiedPostShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); + modifiedPostShiftIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); - final LongChunk preShiftIndices = - modifiedPreShiftChunk.asKeyIndicesChunk(); - final LongChunk postShiftIndices = - modifiedPostShiftChunk.asKeyIndicesChunk(); + final LongChunk preShiftIndices = modifiedPreShiftChunk.asKeyIndicesChunk(); + final LongChunk postShiftIndices = modifiedPostShiftChunk.asKeyIndicesChunk(); final int chunkSize = preShiftIndices.size(); for (int ki = 0; ki < chunkSize; ++ki) { @@ -1165,17 +1081,17 @@ private static ReadOnlyIndex extractUnshiftedModifiesFromUpstream( } private static void extractDownstreamModifiedColumnSet(@NotNull final Update downstream, - @NotNull final ModifiedColumnSet resultModifiedColumnSet, - @NotNull final boolean[] modifiedOperators, - @NotNull final ModifiedColumnSet updateUpstreamModifiedColumnSet, - @NotNull final UnaryOperator[] resultModifiedColumnSetFactories) { + @NotNull final ModifiedColumnSet resultModifiedColumnSet, + @NotNull final boolean[] modifiedOperators, + @NotNull final ModifiedColumnSet updateUpstreamModifiedColumnSet, + @NotNull final UnaryOperator[] resultModifiedColumnSetFactories) { if (downstream.modified.nonempty()) { downstream.modifiedColumnSet = resultModifiedColumnSet; downstream.modifiedColumnSet.clear(); for (int oi = 0; oi < modifiedOperators.length; ++oi) { if (modifiedOperators[oi]) { - downstream.modifiedColumnSet.setAll(resultModifiedColumnSetFactories[oi] - .apply(updateUpstreamModifiedColumnSet)); + downstream.modifiedColumnSet + .setAll(resultModifiedColumnSetFactories[oi].apply(updateUpstreamModifiedColumnSet)); } } } else { @@ -1201,9 +1117,9 @@ private static class OperatorDivision { private final boolean[] operatorsThatRequireIndices; private OperatorDivision(@NotNull final AggregationContext ac, - final boolean upstreamModified, - @NotNull final ModifiedColumnSet updateUpstreamModifiedColumnSet, - @NotNull final ModifiedColumnSet[] operatorInputUpstreamModifiedColumnSets) { + final boolean upstreamModified, + @NotNull final ModifiedColumnSet updateUpstreamModifiedColumnSet, + @NotNull final ModifiedColumnSet[] operatorInputUpstreamModifiedColumnSets) { operatorsThatRequireIndices = new boolean[ac.size()]; for (int oi = 0; oi < ac.size(); ++oi) { operatorsThatRequireIndices[oi] = ac.operators[oi].requiresIndices(); @@ -1218,8 +1134,7 @@ private OperatorDivision(@NotNull final AggregationContext ac, if (upstreamModified) { for (int oi = 0; oi < ac.size(); ++oi) { - if (updateUpstreamModifiedColumnSet - .containsAny(operatorInputUpstreamModifiedColumnSets[oi])) { + if (updateUpstreamModifiedColumnSet.containsAny(operatorInputUpstreamModifiedColumnSets[oi])) { operatorsWithModifiedInputColumns[oi] = true; anyOperatorHasModifiedInputColumnsTemp = true; if (!ac.operators[oi].requiresIndices()) { @@ -1234,21 +1149,20 @@ private OperatorDivision(@NotNull final AggregationContext ac, } anyOperatorHasModifiedInputColumns = anyOperatorHasModifiedInputColumnsTemp; - anyOperatorWithModifiedInputColumnsIgnoresIndices = - anyOperatorWithModifiedInputColumnsIgnoresIndicesTemp; + anyOperatorWithModifiedInputColumnsIgnoresIndices = anyOperatorWithModifiedInputColumnsIgnoresIndicesTemp; anyOperatorWithoutModifiedInputColumnsRequiresIndices = - anyOperatorWithoutModifiedInputColumnsRequiresIndicesTemp; + anyOperatorWithoutModifiedInputColumnsRequiresIndicesTemp; } } private static void processUpstreamShifts(Update upstream, ReadOnlyIndex useIndex, - WritableLongChunk preKeyIndices, - WritableLongChunk postKeyIndices, Runnable applyChunkedShift) { + WritableLongChunk preKeyIndices, WritableLongChunk postKeyIndices, + Runnable applyChunkedShift) { Index.SearchIterator postOkForward = null; Index.SearchIterator postOkReverse = null; - boolean lastPolarityReversed = false; // the initial value doesn't matter, because we'll - // just have a noop apply in the worst case + boolean lastPolarityReversed = false; // the initial value doesn't matter, because we'll just have a noop apply + // in the worst case int writePosition = resetWritePosition(lastPolarityReversed, preKeyIndices, postKeyIndices); final IndexShiftData.Iterator shiftIt = upstream.shifted.applyIterator(); @@ -1258,8 +1172,8 @@ private static void processUpstreamShifts(Update upstream, ReadOnlyIndex useInde final boolean polarityReversed = shiftIt.polarityReversed(); if (polarityReversed != lastPolarityReversed) { // if our polarity changed, we must flush out the shifts that are pending - maybeApplyChunkedShift(applyChunkedShift, preKeyIndices, postKeyIndices, - lastPolarityReversed, writePosition); + maybeApplyChunkedShift(applyChunkedShift, preKeyIndices, postKeyIndices, lastPolarityReversed, + writePosition); writePosition = resetWritePosition(polarityReversed, preKeyIndices, postKeyIndices); } @@ -1281,10 +1195,9 @@ private static void processUpstreamShifts(Update upstream, ReadOnlyIndex useInde if (writePosition == 0) { // once we fill a chunk, we must process the shifts - maybeApplyChunkedShift(applyChunkedShift, preKeyIndices, postKeyIndices, - polarityReversed, writePosition); - writePosition = - resetWritePosition(polarityReversed, preKeyIndices, postKeyIndices); + maybeApplyChunkedShift(applyChunkedShift, preKeyIndices, postKeyIndices, polarityReversed, + writePosition); + writePosition = resetWritePosition(polarityReversed, preKeyIndices, postKeyIndices); } if (postOkReverse.hasNext()) { @@ -1302,8 +1215,7 @@ private static void processUpstreamShifts(Update upstream, ReadOnlyIndex useInde if (postOkForward == null) { postOkForward = useIndex.searchIterator(); } - // we can apply these in a forward direction as normal, we just need to accumulate - // into our key chunks + // we can apply these in a forward direction as normal, we just need to accumulate into our key chunks if (postOkForward.advance(beginRange)) { long idx; while ((idx = postOkForward.currentValue()) <= endRange) { @@ -1312,10 +1224,9 @@ private static void processUpstreamShifts(Update upstream, ReadOnlyIndex useInde if (postKeyIndices.size() == postKeyIndices.capacity()) { // once we fill a chunk, we must process the shifts - maybeApplyChunkedShift(applyChunkedShift, preKeyIndices, postKeyIndices, - polarityReversed, writePosition); - writePosition = - resetWritePosition(polarityReversed, preKeyIndices, postKeyIndices); + maybeApplyChunkedShift(applyChunkedShift, preKeyIndices, postKeyIndices, polarityReversed, + writePosition); + writePosition = resetWritePosition(polarityReversed, preKeyIndices, postKeyIndices); } if (postOkForward.hasNext()) { @@ -1330,8 +1241,7 @@ private static void processUpstreamShifts(Update upstream, ReadOnlyIndex useInde lastPolarityReversed = polarityReversed; } // after we are done, we should process the shift - maybeApplyChunkedShift(applyChunkedShift, preKeyIndices, postKeyIndices, - lastPolarityReversed, writePosition); + maybeApplyChunkedShift(applyChunkedShift, preKeyIndices, postKeyIndices, lastPolarityReversed, writePosition); // close our iterators if (postOkReverse != null) { postOkReverse.close(); @@ -1341,9 +1251,8 @@ private static void processUpstreamShifts(Update upstream, ReadOnlyIndex useInde } } - private static int resetWritePosition(boolean polarityReversed, - WritableLongChunk preKeyIndices, - WritableLongChunk postKeyIndices) { + private static int resetWritePosition(boolean polarityReversed, WritableLongChunk preKeyIndices, + WritableLongChunk postKeyIndices) { if (polarityReversed) { postKeyIndices.setSize(postKeyIndices.capacity()); if (preKeyIndices != null) { @@ -1360,21 +1269,18 @@ private static int resetWritePosition(boolean polarityReversed, } private static void maybeApplyChunkedShift(Runnable applyChunkedShift, - WritableLongChunk preKeyIndices, - WritableLongChunk postKeyIndices, boolean polarityReversed, - int writePosition) { + WritableLongChunk preKeyIndices, WritableLongChunk postKeyIndices, + boolean polarityReversed, int writePosition) { if (polarityReversed) { int chunkSize = postKeyIndices.capacity(); if (writePosition == chunkSize) { return; } if (writePosition > 0) { - postKeyIndices.copyFromTypedChunk(postKeyIndices, writePosition, 0, - chunkSize - writePosition); + postKeyIndices.copyFromTypedChunk(postKeyIndices, writePosition, 0, chunkSize - writePosition); postKeyIndices.setSize(chunkSize - writePosition); if (preKeyIndices != null) { - preKeyIndices.copyFromTypedChunk(preKeyIndices, writePosition, 0, - chunkSize - writePosition); + preKeyIndices.copyFromTypedChunk(preKeyIndices, writePosition, 0, chunkSize - writePosition); preKeyIndices.setSize(chunkSize - writePosition); } } @@ -1412,9 +1318,9 @@ private static boolean anyTrue(BooleanChunk operatorSlots) { } private static void findSlotRuns( - IntIntTimsortKernel.IntIntSortKernelContext sortKernelContext, - WritableIntChunk runStarts, WritableIntChunk runLengths, - WritableIntChunk chunkPosition, WritableIntChunk slots) { + IntIntTimsortKernel.IntIntSortKernelContext sortKernelContext, + WritableIntChunk runStarts, WritableIntChunk runLengths, + WritableIntChunk chunkPosition, WritableIntChunk slots) { chunkPosition.setSize(slots.size()); ChunkUtils.fillInOrder(chunkPosition); IntIntTimsortKernel.sort(sortKernelContext, chunkPosition, slots); @@ -1424,10 +1330,9 @@ private static void findSlotRuns( /** * Get values from the inputColumn, and permute them into workingChunk. */ - private static void getAndPermuteChunk(ChunkSource.WithPrev inputColumn, - ChunkSource.GetContext getContext, OrderedKeys chunkOk, boolean usePrev, - PermuteKernel permuteKernel, IntChunk chunkPosition, - WritableChunk workingChunk) { + private static void getAndPermuteChunk(ChunkSource.WithPrev inputColumn, ChunkSource.GetContext getContext, + OrderedKeys chunkOk, boolean usePrev, PermuteKernel permuteKernel, IntChunk chunkPosition, + WritableChunk workingChunk) { final Chunk values; if (inputColumn == null) { values = null; @@ -1444,9 +1349,8 @@ private static void getAndPermuteChunk(ChunkSource.WithPrev inputColumn, } } - private static void modifySlots(Index.RandomBuilder modifiedBuilder, - IntChunk runStarts, WritableIntChunk slots, - BooleanChunk modified) { + private static void modifySlots(Index.RandomBuilder modifiedBuilder, IntChunk runStarts, + WritableIntChunk slots, BooleanChunk modified) { int outIndex = 0; for (int runIndex = 0; runIndex < runStarts.size(); ++runIndex) { if (modified.get(runIndex)) { @@ -1460,13 +1364,12 @@ private static void modifySlots(Index.RandomBuilder modifiedBuilder, } @NotNull - private static QueryTable staticGroupedAggregation(QueryTable withView, String keyName, - ColumnSource keySource, AggregationContext ac) { + private static QueryTable staticGroupedAggregation(QueryTable withView, String keyName, ColumnSource keySource, + AggregationContext ac) { final Pair> groupKeyIndexTable; final Map grouping = withView.getIndex().getGrouping(keySource); // noinspection unchecked - groupKeyIndexTable = - AbstractColumnSource.groupingToFlatSources((ColumnSource) keySource, grouping); + groupKeyIndexTable = AbstractColumnSource.groupingToFlatSources((ColumnSource) keySource, grouping); final int responsiveGroups = grouping.size(); final Map> resultColumnSourceMap = new LinkedHashMap<>(); @@ -1475,28 +1378,25 @@ private static QueryTable staticGroupedAggregation(QueryTable withView, String k doGroupedAddition(ac, groupKeyIndexTable, responsiveGroups); - final QueryTable result = - new QueryTable(Index.FACTORY.getFlatIndex(responsiveGroups), resultColumnSourceMap); + final QueryTable result = new QueryTable(Index.FACTORY.getFlatIndex(responsiveGroups), resultColumnSourceMap); ac.propagateInitialStateToOperators(result); - final ReverseLookupListener rll = - ReverseLookupListener.makeReverseLookupListenerWithSnapshot(result, keyName); + final ReverseLookupListener rll = ReverseLookupListener.makeReverseLookupListenerWithSnapshot(result, keyName); ac.setReverseLookupFunction(k -> (int) rll.get(k)); return ac.transformResult(result); } private static void doGroupedAddition(AggregationContext ac, - Pair> groupKeyIndexTable, - int responsiveGroups) { + Pair> groupKeyIndexTable, int responsiveGroups) { final boolean indicesRequired = ac.requiresIndices(); final ColumnSource.GetContext[] getContexts = new ColumnSource.GetContext[ac.size()]; final IterativeChunkedAggregationOperator.SingletonContext[] operatorContexts = - new IterativeChunkedAggregationOperator.SingletonContext[ac.size()]; + new IterativeChunkedAggregationOperator.SingletonContext[ac.size()]; try (final SafeCloseableArray ignored = new SafeCloseableArray<>(getContexts); - final SafeCloseable ignored2 = new SafeCloseableArray<>(operatorContexts); - final SharedContext sharedContext = SharedContext.makeSharedContext()) { + final SafeCloseable ignored2 = new SafeCloseableArray<>(operatorContexts); + final SharedContext sharedContext = SharedContext.makeSharedContext()) { ac.ensureCapacity(responsiveGroups); // we don't know how many things are in the groups, so we have to allocate a large chunk ac.initializeGetContexts(sharedContext, getContexts, CHUNK_SIZE); @@ -1519,11 +1419,10 @@ private static void doGroupedAddition(AggregationContext ac, final Chunk[] workingChunks = new Chunk[ac.size()]; while (okit.hasMore()) { - final OrderedKeys chunkOk = - okit.getNextOrderedKeysWithLength(CHUNK_SIZE); + final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(CHUNK_SIZE); final int chunkSize = chunkOk.intSize(); final LongChunk keyIndices = - indicesRequired ? chunkOk.asKeyIndicesChunk() : null; + indicesRequired ? chunkOk.asKeyIndicesChunk() : null; sharedContext.reset(); Arrays.fill(workingChunks, null); @@ -1532,11 +1431,10 @@ private static void doGroupedAddition(AggregationContext ac, final int inputSlot = ac.inputSlot(oi); if (inputSlot == oi) { workingChunks[inputSlot] = ac.inputColumns[oi] == null ? null - : ac.inputColumns[oi].getChunk(getContexts[oi], chunkOk); + : ac.inputColumns[oi].getChunk(getContexts[oi], chunkOk); } ac.operators[oi].addChunk(operatorContexts[oi], chunkSize, - inputSlot < 0 ? null : workingChunks[inputSlot], keyIndices, - ii); + inputSlot < 0 ? null : workingChunks[inputSlot], keyIndices, ii); } } } @@ -1546,23 +1444,22 @@ private static void doGroupedAddition(AggregationContext ac, } private static void initialBucketedKeyAddition(QueryTable withView, - ColumnSource[] reinterpretedKeySources, - AggregationContext ac, - PermuteKernel[] permuteKernels, - ChunkedOperatorAggregationStateManager stateManager, - MutableInt outputPosition, - boolean usePrev) { + ColumnSource[] reinterpretedKeySources, + AggregationContext ac, + PermuteKernel[] permuteKernels, + ChunkedOperatorAggregationStateManager stateManager, + MutableInt outputPosition, + boolean usePrev) { final ChunkSource.GetContext[] getContexts = new ChunkSource.GetContext[ac.size()]; // noinspection unchecked final WritableChunk[] workingChunks = new WritableChunk[ac.size()]; final IterativeChunkedAggregationOperator.BucketedContext[] bucketedContexts = - new IterativeChunkedAggregationOperator.BucketedContext[ac.size()]; + new IterativeChunkedAggregationOperator.BucketedContext[ac.size()]; final ColumnSource[] buildSources; if (usePrev) { buildSources = Arrays.stream(reinterpretedKeySources) - .map((UnaryOperator>) PrevColumnSource::new) - .toArray(ColumnSource[]::new); + .map((UnaryOperator>) PrevColumnSource::new).toArray(ColumnSource[]::new); } else { buildSources = reinterpretedKeySources; } @@ -1575,29 +1472,23 @@ private static void initialBucketedKeyAddition(QueryTable withView, final int chunkSize = chunkSize(index.size()); - try ( - final SafeCloseable bc = - stateManager.makeAggregationStateBuildContext(buildSources, chunkSize); - final SafeCloseable ignored1 = usePrev ? index : null; - final SafeCloseable ignored2 = new SafeCloseableArray<>(getContexts); - final SafeCloseable ignored3 = new SafeCloseableArray<>(workingChunks); - final SafeCloseable ignored4 = new SafeCloseableArray<>(bucketedContexts); - final OrderedKeys.Iterator okIt = index.getOrderedKeysIterator(); - final WritableIntChunk outputPositions = - WritableIntChunk.makeWritableChunk(chunkSize); - final WritableIntChunk chunkPosition = - WritableIntChunk.makeWritableChunk(chunkSize); - final SharedContext sharedContext = SharedContext.makeSharedContext(); - final IntIntTimsortKernel.IntIntSortKernelContext sortKernelContext = - IntIntTimsortKernel.createContext(chunkSize); - final WritableIntChunk runStarts = - WritableIntChunk.makeWritableChunk(chunkSize); - final WritableIntChunk runLengths = - WritableIntChunk.makeWritableChunk(chunkSize); - final WritableLongChunk permutedKeyIndices = - ac.requiresIndices() ? WritableLongChunk.makeWritableChunk(chunkSize) : null; - final WritableBooleanChunk unusedModifiedSlots = - WritableBooleanChunk.makeWritableChunk(chunkSize)) { + try (final SafeCloseable bc = stateManager.makeAggregationStateBuildContext(buildSources, chunkSize); + final SafeCloseable ignored1 = usePrev ? index : null; + final SafeCloseable ignored2 = new SafeCloseableArray<>(getContexts); + final SafeCloseable ignored3 = new SafeCloseableArray<>(workingChunks); + final SafeCloseable ignored4 = new SafeCloseableArray<>(bucketedContexts); + final OrderedKeys.Iterator okIt = index.getOrderedKeysIterator(); + final WritableIntChunk outputPositions = WritableIntChunk.makeWritableChunk(chunkSize); + final WritableIntChunk chunkPosition = WritableIntChunk.makeWritableChunk(chunkSize); + final SharedContext sharedContext = SharedContext.makeSharedContext(); + final IntIntTimsortKernel.IntIntSortKernelContext sortKernelContext = + IntIntTimsortKernel.createContext(chunkSize); + final WritableIntChunk runStarts = WritableIntChunk.makeWritableChunk(chunkSize); + final WritableIntChunk runLengths = WritableIntChunk.makeWritableChunk(chunkSize); + final WritableLongChunk permutedKeyIndices = + ac.requiresIndices() ? WritableLongChunk.makeWritableChunk(chunkSize) : null; + final WritableBooleanChunk unusedModifiedSlots = + WritableBooleanChunk.makeWritableChunk(chunkSize)) { ac.initializeGetContexts(sharedContext, getContexts, chunkSize); ac.initializeWorkingChunks(workingChunks, chunkSize); ac.initializeBucketedContexts(bucketedContexts, chunkSize); @@ -1610,8 +1501,7 @@ private static void initialBucketedKeyAddition(QueryTable withView, ac.ensureCapacity(outputPosition.intValue()); - findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPosition, - outputPositions); + findSlotRuns(sortKernelContext, runStarts, runLengths, chunkPosition, outputPositions); if (permutedKeyIndices != null) { final LongChunk keyIndices = chunkOk.asKeyIndicesChunk(); @@ -1621,30 +1511,28 @@ private static void initialBucketedKeyAddition(QueryTable withView, for (int ii = 0; ii < ac.size(); ++ii) { final int inputSlot = ac.inputSlot(ii); if (ii == inputSlot) { - getAndPermuteChunk(ac.inputColumns[ii], getContexts[ii], chunkOk, usePrev, - permuteKernels[ii], chunkPosition, workingChunks[ii]); + getAndPermuteChunk(ac.inputColumns[ii], getContexts[ii], chunkOk, usePrev, permuteKernels[ii], + chunkPosition, workingChunks[ii]); } - ac.operators[ii].addChunk(bucketedContexts[ii], - inputSlot >= 0 ? workingChunks[inputSlot] : null, permutedKeyIndices, - outputPositions, runStarts, runLengths, unusedModifiedSlots); + ac.operators[ii].addChunk(bucketedContexts[ii], inputSlot >= 0 ? workingChunks[inputSlot] : null, + permutedKeyIndices, outputPositions, runStarts, runLengths, unusedModifiedSlots); } } } } private static void initialGroupedKeyAddition(QueryTable withView, - ColumnSource[] reinterpretedKeySources, - AggregationContext ac, - IncrementalChunkedOperatorAggregationStateManager stateManager, - MutableInt outputPosition, - boolean usePrev) { + ColumnSource[] reinterpretedKeySources, + AggregationContext ac, + IncrementalChunkedOperatorAggregationStateManager stateManager, + MutableInt outputPosition, + boolean usePrev) { final Pair> groupKeyIndexTable; - final Map grouping = - usePrev ? withView.getIndex().getPrevGrouping(reinterpretedKeySources[0]) + final Map grouping = usePrev ? withView.getIndex().getPrevGrouping(reinterpretedKeySources[0]) : withView.getIndex().getGrouping(reinterpretedKeySources[0]); // noinspection unchecked - groupKeyIndexTable = AbstractColumnSource - .groupingToFlatSources((ColumnSource) reinterpretedKeySources[0], grouping); + groupKeyIndexTable = + AbstractColumnSource.groupingToFlatSources((ColumnSource) reinterpretedKeySources[0], grouping); final int responsiveGroups = grouping.size(); if (responsiveGroups == 0) { @@ -1655,20 +1543,17 @@ private static void initialGroupedKeyAddition(QueryTable withView, final ColumnSource[] groupedFlatKeySource = {groupKeyIndexTable.first}; - try ( - final SafeCloseable bc = stateManager - .makeAggregationStateBuildContext(groupedFlatKeySource, responsiveGroups); - final OrderedKeys ok = OrderedKeys.forRange(0, responsiveGroups - 1); - final OrderedKeys.Iterator okIt = ok.getOrderedKeysIterator(); - final WritableIntChunk outputPositions = - WritableIntChunk.makeWritableChunk(responsiveGroups)) { + try (final SafeCloseable bc = + stateManager.makeAggregationStateBuildContext(groupedFlatKeySource, responsiveGroups); + final OrderedKeys ok = OrderedKeys.forRange(0, responsiveGroups - 1); + final OrderedKeys.Iterator okIt = ok.getOrderedKeysIterator(); + final WritableIntChunk outputPositions = + WritableIntChunk.makeWritableChunk(responsiveGroups)) { while (okIt.hasMore()) { final OrderedKeys chunkOk = okIt.getNextOrderedKeysWithLength(CHUNK_SIZE); - stateManager.add(bc, chunkOk, groupedFlatKeySource, outputPosition, - outputPositions); + stateManager.add(bc, chunkOk, groupedFlatKeySource, outputPosition, outputPositions); } - Assert.eq(outputPosition.intValue(), "outputPosition.intValue()", responsiveGroups, - "responsiveGroups"); + Assert.eq(outputPosition.intValue(), "outputPosition.intValue()", responsiveGroups, "responsiveGroups"); } for (int ii = 0; ii < responsiveGroups; ++ii) { @@ -1682,23 +1567,22 @@ private static void initialGroupedKeyAddition(QueryTable withView, private static ReadOnlyIndex makeNewStatesIndex(final int first, final int last) { return first > last ? Index.CURRENT_FACTORY.getEmptyIndex() - : Index.CURRENT_FACTORY.getIndexByRange(first, last); + : Index.CURRENT_FACTORY.getIndexByRange(first, last); } - private static void copyKeyColumns(ColumnSource[] keyColumnsRaw, - WritableSource[] keyColumnsCopied, final ReadOnlyIndex copyValues) { + private static void copyKeyColumns(ColumnSource[] keyColumnsRaw, WritableSource[] keyColumnsCopied, + final ReadOnlyIndex copyValues) { if (copyValues.isEmpty()) { return; } final int chunkSize = chunkSize(copyValues.size()); - final ColumnSource.GetContext[] getContext = - new ColumnSource.GetContext[keyColumnsRaw.length]; + final ColumnSource.GetContext[] getContext = new ColumnSource.GetContext[keyColumnsRaw.length]; final WritableChunkSink.FillFromContext[] fillFromContexts = - new WritableChunkSink.FillFromContext[keyColumnsRaw.length]; + new WritableChunkSink.FillFromContext[keyColumnsRaw.length]; try (final OrderedKeys.Iterator okit = copyValues.getOrderedKeysIterator(); - final SharedContext sharedContext = SharedContext.makeSharedContext(); - final SafeCloseableArray ignored = new SafeCloseableArray<>(getContext); - final SafeCloseableArray ignored2 = new SafeCloseableArray<>(fillFromContexts)) { + final SharedContext sharedContext = SharedContext.makeSharedContext(); + final SafeCloseableArray ignored = new SafeCloseableArray<>(getContext); + final SafeCloseableArray ignored2 = new SafeCloseableArray<>(fillFromContexts)) { for (int ii = 0; ii < keyColumnsRaw.length; ++ii) { getContext[ii] = keyColumnsRaw[ii].makeGetContext(chunkSize, sharedContext); fillFromContexts[ii] = keyColumnsCopied[ii].makeFillFromContext(chunkSize); @@ -1709,8 +1593,7 @@ private static void copyKeyColumns(ColumnSource[] keyColumnsRaw, final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(chunkSize); sharedContext.reset(); for (int ci = 0; ci < keyColumnsRaw.length; ci++) { - final Chunk values = - keyColumnsRaw[ci].getChunk(getContext[ci], chunkOk); + final Chunk values = keyColumnsRaw[ci].getChunk(getContext[ci], chunkOk); keyColumnsCopied[ci].fillFromChunk(fillFromContexts[ci], values, chunkOk); } } @@ -1718,7 +1601,7 @@ private static void copyKeyColumns(ColumnSource[] keyColumnsRaw, } private static QueryTable noKeyAggregation(ShiftAwareSwapListener swapListener, - AggregationContextFactory aggregationContextFactory, QueryTable table, boolean usePrev) { + AggregationContextFactory aggregationContextFactory, QueryTable table, boolean usePrev) { final AggregationContext ac = aggregationContextFactory.makeAggregationContext(table); final Map> resultColumnSourceMap = new LinkedHashMap<>(); @@ -1727,188 +1610,175 @@ private static QueryTable noKeyAggregation(ShiftAwareSwapListener swapListener, final boolean[] allColumns = new boolean[ac.size()]; Arrays.fill(allColumns, true); - // We intentionally update the 0 key even if we have no rows, so we'd better have capacity - // for it. + // We intentionally update the 0 key even if we have no rows, so we'd better have capacity for it. ac.ensureCapacity(1); - // we don't actually care about the modified columns here and it will never set anything to - // false, so it is safe + // we don't actually care about the modified columns here and it will never set anything to false, so it is safe // to use allColumns as the modified columns parameter final IterativeChunkedAggregationOperator.SingletonContext[] opContexts = - new IterativeChunkedAggregationOperator.SingletonContext[ac.size()]; + new IterativeChunkedAggregationOperator.SingletonContext[ac.size()]; final Index index = usePrev ? table.getIndex().getPrevIndex() : table.getIndex(); final int initialResultSize; try (final SafeCloseable ignored1 = new SafeCloseableArray<>(opContexts); - final SafeCloseable ignored2 = usePrev ? index : null) { + final SafeCloseable ignored2 = usePrev ? index : null) { initialResultSize = index.size() == 0 ? 0 : 1; ac.initializeSingletonContexts(opContexts, index.size()); doNoKeyAddition(index, ac, opContexts, allColumns, usePrev, allColumns); } - final QueryTable result = - new QueryTable(Index.FACTORY.getFlatIndex(initialResultSize), resultColumnSourceMap); + final QueryTable result = new QueryTable(Index.FACTORY.getFlatIndex(initialResultSize), resultColumnSourceMap); ac.propagateInitialStateToOperators(result); if (table.isRefreshing()) { ac.startTrackingPrevValues(); final boolean isStream = table.isStream(); - final ShiftAwareListener listener = new BaseTable.ShiftAwareListenerImpl( - "by(" + aggregationContextFactory + ")", table, result) { - - final ModifiedColumnSet[] inputModifiedColumnSet = - ac.getInputModifiedColumnSets(table); - final UnaryOperator[] resultModifiedColumnSetFactories = - ac.initializeRefreshing(result, this); - - int lastSize = initialResultSize; - int statesCreated = initialResultSize; - - @Override - public void onUpdate(@NotNull final Update upstream) { - final Update upstreamToUse = isStream ? adjustForStreaming(upstream) : upstream; - if (upstreamToUse.empty()) { - return; - } - processNoKeyUpdate(upstreamToUse); - } + final ShiftAwareListener listener = + new BaseTable.ShiftAwareListenerImpl("by(" + aggregationContextFactory + ")", table, result) { + + final ModifiedColumnSet[] inputModifiedColumnSet = ac.getInputModifiedColumnSets(table); + final UnaryOperator[] resultModifiedColumnSetFactories = + ac.initializeRefreshing(result, this); + + int lastSize = initialResultSize; + int statesCreated = initialResultSize; - private void processNoKeyUpdate(@NotNull final Update upstream) { - ac.resetOperatorsForStep(upstream); - - final ModifiedColumnSet upstreamModifiedColumnSet = - upstream.modified.isEmpty() ? ModifiedColumnSet.EMPTY - : upstream.modifiedColumnSet; - - final IterativeChunkedAggregationOperator.SingletonContext[] opContexts = - new IterativeChunkedAggregationOperator.SingletonContext[ac.size()]; - try (final SafeCloseable ignored = new SafeCloseableArray<>(opContexts)) { - final OperatorDivision od = - new OperatorDivision(ac, upstream.modified.nonempty(), - upstreamModifiedColumnSet, inputModifiedColumnSet); - ac.initializeSingletonContexts(opContexts, upstream, - od.operatorsWithModifiedInputColumns); - - final boolean[] modifiedOperators = new boolean[ac.size()]; - // remove all the removals - if (upstream.removed.nonempty()) { - doNoKeyRemoval(upstream.removed, ac, opContexts, allColumns, - modifiedOperators); + @Override + public void onUpdate(@NotNull final Update upstream) { + final Update upstreamToUse = isStream ? adjustForStreaming(upstream) : upstream; + if (upstreamToUse.empty()) { + return; + } + processNoKeyUpdate(upstreamToUse); } - final boolean processShifts = - upstream.shifted.nonempty() && ac.requiresIndices(); - - if (upstream.modified.nonempty() && (od.anyOperatorHasModifiedInputColumns - || od.anyOperatorWithoutModifiedInputColumnsRequiresIndices)) { - if (processShifts) { - // Also handles shifted modifications for modified-input operators - // that require indices (if any) - doNoKeyShifts(table, upstream, ac, opContexts, - od.operatorsThatRequireIndices, modifiedOperators); - - try (final ReadOnlyIndex unshiftedModifies = - extractUnshiftedModifiesFromUpstream(upstream)) { - // Do unshifted modifies for everyone - doNoKeyModifications(unshiftedModifies, unshiftedModifies, ac, - opContexts, true, - od.operatorsWithModifiedInputColumns, - od.operatorsWithoutModifiedInputColumnsThatRequireIndices, - modifiedOperators); - - if (od.anyOperatorWithModifiedInputColumnsIgnoresIndices) { - // Do shifted modifies for index-only and modified-input - // operators that don't require indices - try ( - final ReadOnlyIndex shiftedModifiesPost = - upstream.modified.minus(unshiftedModifies); - final Index shiftedModifiesPre = - shiftedModifiesPost.clone()) { - upstream.shifted.unapply(shiftedModifiesPre); - doNoKeyModifications(shiftedModifiesPre, - shiftedModifiesPost, ac, opContexts, true, - od.operatorsWithModifiedInputColumnsThatIgnoreIndices, - od.operatorsThatRequireIndices, + private void processNoKeyUpdate(@NotNull final Update upstream) { + ac.resetOperatorsForStep(upstream); + + final ModifiedColumnSet upstreamModifiedColumnSet = + upstream.modified.isEmpty() ? ModifiedColumnSet.EMPTY : upstream.modifiedColumnSet; + + final IterativeChunkedAggregationOperator.SingletonContext[] opContexts = + new IterativeChunkedAggregationOperator.SingletonContext[ac.size()]; + try (final SafeCloseable ignored = new SafeCloseableArray<>(opContexts)) { + final OperatorDivision od = new OperatorDivision(ac, upstream.modified.nonempty(), + upstreamModifiedColumnSet, inputModifiedColumnSet); + ac.initializeSingletonContexts(opContexts, upstream, + od.operatorsWithModifiedInputColumns); + + final boolean[] modifiedOperators = new boolean[ac.size()]; + // remove all the removals + if (upstream.removed.nonempty()) { + doNoKeyRemoval(upstream.removed, ac, opContexts, allColumns, modifiedOperators); + } + + final boolean processShifts = upstream.shifted.nonempty() && ac.requiresIndices(); + + if (upstream.modified.nonempty() && (od.anyOperatorHasModifiedInputColumns + || od.anyOperatorWithoutModifiedInputColumnsRequiresIndices)) { + if (processShifts) { + // Also handles shifted modifications for modified-input operators that require + // indices (if any) + doNoKeyShifts(table, upstream, ac, opContexts, od.operatorsThatRequireIndices, modifiedOperators); + + try (final ReadOnlyIndex unshiftedModifies = + extractUnshiftedModifiesFromUpstream(upstream)) { + // Do unshifted modifies for everyone + doNoKeyModifications(unshiftedModifies, unshiftedModifies, ac, opContexts, + true, + od.operatorsWithModifiedInputColumns, + od.operatorsWithoutModifiedInputColumnsThatRequireIndices, + modifiedOperators); + + if (od.anyOperatorWithModifiedInputColumnsIgnoresIndices) { + // Do shifted modifies for index-only and modified-input operators that + // don't require indices + try (final ReadOnlyIndex shiftedModifiesPost = + upstream.modified.minus(unshiftedModifies); + final Index shiftedModifiesPre = shiftedModifiesPost.clone()) { + upstream.shifted.unapply(shiftedModifiesPre); + doNoKeyModifications(shiftedModifiesPre, shiftedModifiesPost, ac, + opContexts, true, + od.operatorsWithModifiedInputColumnsThatIgnoreIndices, + od.operatorsThatRequireIndices, + modifiedOperators); + } + } else if (ac.requiresIndices()) { + // Do shifted modifies for index-only operators + try (final ReadOnlyIndex shiftedModifiesPost = + upstream.modified.minus(unshiftedModifies)) { + doIndexOnlyNoKeyModifications(shiftedModifiesPost, ac, opContexts, + od.operatorsThatRequireIndices, modifiedOperators); + } + } } - } else if (ac.requiresIndices()) { - // Do shifted modifies for index-only operators - try (final ReadOnlyIndex shiftedModifiesPost = - upstream.modified.minus(unshiftedModifies)) { - doIndexOnlyNoKeyModifications(shiftedModifiesPost, ac, - opContexts, - od.operatorsThatRequireIndices, modifiedOperators); - } + } else if (od.anyOperatorHasModifiedInputColumns) { + doNoKeyModifications(upstream.getModifiedPreShift(), upstream.modified, ac, + opContexts, ac.requiresIndices(), + od.operatorsWithModifiedInputColumns, + od.operatorsWithoutModifiedInputColumnsThatRequireIndices, + modifiedOperators); + + } else { + doIndexOnlyNoKeyModifications(upstream.modified, ac, opContexts, + od.operatorsWithoutModifiedInputColumnsThatRequireIndices, + modifiedOperators); } + } else if (processShifts) { + doNoKeyShifts(table, upstream, ac, opContexts, od.operatorsThatRequireIndices, + modifiedOperators); } - } else if (od.anyOperatorHasModifiedInputColumns) { - doNoKeyModifications(upstream.getModifiedPreShift(), - upstream.modified, ac, opContexts, ac.requiresIndices(), - od.operatorsWithModifiedInputColumns, - od.operatorsWithoutModifiedInputColumnsThatRequireIndices, - modifiedOperators); - } else { - doIndexOnlyNoKeyModifications(upstream.modified, ac, opContexts, - od.operatorsWithoutModifiedInputColumnsThatRequireIndices, - modifiedOperators); - } - } else if (processShifts) { - doNoKeyShifts(table, upstream, ac, opContexts, - od.operatorsThatRequireIndices, modifiedOperators); - } + if (upstream.added.nonempty()) { + doNoKeyAddition(upstream.added, ac, opContexts, allColumns, false, + modifiedOperators); + } - if (upstream.added.nonempty()) { - doNoKeyAddition(upstream.added, ac, opContexts, allColumns, false, - modifiedOperators); - } + final int newResultSize = (!isStream || lastSize == 0) && table.size() == 0 ? 0 : 1; + final Update downstream = new Update(); + downstream.shifted = IndexShiftData.EMPTY; + if ((lastSize == 0 && newResultSize == 1)) { + downstream.added = Index.FACTORY.getIndexByValues(0); + downstream.removed = Index.FACTORY.getEmptyIndex(); + downstream.modified = Index.FACTORY.getEmptyIndex(); + result.getIndex().insert(0); + } else if (lastSize == 1 && newResultSize == 0) { + downstream.added = Index.FACTORY.getEmptyIndex(); + downstream.removed = Index.FACTORY.getIndexByValues(0); + downstream.modified = Index.FACTORY.getEmptyIndex(); + result.getIndex().remove(0); + } else { + if (!anyTrue(BooleanChunk.chunkWrap(modifiedOperators))) { + return; + } + downstream.added = Index.FACTORY.getEmptyIndex(); + downstream.removed = Index.FACTORY.getEmptyIndex(); + downstream.modified = Index.FACTORY.getIndexByValues(0); + } + lastSize = newResultSize; - final int newResultSize = - (!isStream || lastSize == 0) && table.size() == 0 ? 0 : 1; - final Update downstream = new Update(); - downstream.shifted = IndexShiftData.EMPTY; - if ((lastSize == 0 && newResultSize == 1)) { - downstream.added = Index.FACTORY.getIndexByValues(0); - downstream.removed = Index.FACTORY.getEmptyIndex(); - downstream.modified = Index.FACTORY.getEmptyIndex(); - result.getIndex().insert(0); - } else if (lastSize == 1 && newResultSize == 0) { - downstream.added = Index.FACTORY.getEmptyIndex(); - downstream.removed = Index.FACTORY.getIndexByValues(0); - downstream.modified = Index.FACTORY.getEmptyIndex(); - result.getIndex().remove(0); - } else { - if (!anyTrue(BooleanChunk.chunkWrap(modifiedOperators))) { - return; + final int newStatesCreated = Math.max(statesCreated, newResultSize); + try (final ReadOnlyIndex newStates = + makeNewStatesIndex(statesCreated, newStatesCreated - 1)) { + ac.propagateChangesToOperators(downstream, newStates); + } + statesCreated = newStatesCreated; + + extractDownstreamModifiedColumnSet(downstream, result.getModifiedColumnSetForUpdates(), + modifiedOperators, upstreamModifiedColumnSet, resultModifiedColumnSetFactories); + + result.notifyListeners(downstream); } - downstream.added = Index.FACTORY.getEmptyIndex(); - downstream.removed = Index.FACTORY.getEmptyIndex(); - downstream.modified = Index.FACTORY.getIndexByValues(0); } - lastSize = newResultSize; - final int newStatesCreated = Math.max(statesCreated, newResultSize); - try (final ReadOnlyIndex newStates = - makeNewStatesIndex(statesCreated, newStatesCreated - 1)) { - ac.propagateChangesToOperators(downstream, newStates); + @Override + public void onFailureInternal(@NotNull final Throwable originalException, + final UpdatePerformanceTracker.Entry sourceEntry) { + ac.propagateFailureToOperators(originalException, sourceEntry); + super.onFailureInternal(originalException, sourceEntry); } - statesCreated = newStatesCreated; - - extractDownstreamModifiedColumnSet(downstream, - result.getModifiedColumnSetForUpdates(), modifiedOperators, - upstreamModifiedColumnSet, resultModifiedColumnSetFactories); - - result.notifyListeners(downstream); - } - } - - @Override - public void onFailureInternal(@NotNull final Throwable originalException, - final UpdatePerformanceTracker.Entry sourceEntry) { - ac.propagateFailureToOperators(originalException, sourceEntry); - super.onFailureInternal(originalException, sourceEntry); - } - }; + }; swapListener.setListenerAndResult(listener, result); result.addParentReference(swapListener); listener.manage(swapListener); // See note on keyed version @@ -1920,35 +1790,32 @@ public void onFailureInternal(@NotNull final Throwable originalException, } private static void doNoKeyAddition(OrderedKeys index, AggregationContext ac, - IterativeChunkedAggregationOperator.SingletonContext[] opContexts, - boolean[] operatorsToProcess, boolean usePrev, boolean[] modifiedOperators) { + IterativeChunkedAggregationOperator.SingletonContext[] opContexts, boolean[] operatorsToProcess, + boolean usePrev, boolean[] modifiedOperators) { doNoKeyUpdate(index, ac, opContexts, operatorsToProcess, usePrev, false, modifiedOperators); } private static void doNoKeyRemoval(OrderedKeys index, AggregationContext ac, - IterativeChunkedAggregationOperator.SingletonContext[] opContexts, - boolean[] operatorsToProcess, boolean[] modifiedOperators) { + IterativeChunkedAggregationOperator.SingletonContext[] opContexts, boolean[] operatorsToProcess, + boolean[] modifiedOperators) { doNoKeyUpdate(index, ac, opContexts, operatorsToProcess, true, true, modifiedOperators); } - private static void doNoKeyModifications(OrderedKeys preIndex, OrderedKeys postIndex, - AggregationContext ac, IterativeChunkedAggregationOperator.SingletonContext[] opContexts, - final boolean supplyPostIndices, @NotNull final boolean[] operatorsToProcess, - @NotNull final boolean[] operatorsToProcessIndicesOnly, - @NotNull final boolean[] modifiedOperators) { + private static void doNoKeyModifications(OrderedKeys preIndex, OrderedKeys postIndex, AggregationContext ac, + IterativeChunkedAggregationOperator.SingletonContext[] opContexts, + final boolean supplyPostIndices, @NotNull final boolean[] operatorsToProcess, + @NotNull final boolean[] operatorsToProcessIndicesOnly, @NotNull final boolean[] modifiedOperators) { final ColumnSource.GetContext[] preGetContexts = new ColumnSource.GetContext[ac.size()]; final ColumnSource.GetContext[] postGetContexts = new ColumnSource.GetContext[ac.size()]; try (final SafeCloseableArray ignored = new SafeCloseableArray<>(preGetContexts); - final SafeCloseableArray ignored2 = new SafeCloseableArray<>(postGetContexts); - final SharedContext preSharedContext = SharedContext.makeSharedContext(); - final SharedContext postSharedContext = SharedContext.makeSharedContext(); - final OrderedKeys.Iterator preIt = preIndex.getOrderedKeysIterator(); - final OrderedKeys.Iterator postIt = postIndex.getOrderedKeysIterator()) { - ac.initializeGetContexts(preSharedContext, preGetContexts, preIndex.size(), - operatorsToProcess); - ac.initializeGetContexts(postSharedContext, postGetContexts, postIndex.size(), - operatorsToProcess); + final SafeCloseableArray ignored2 = new SafeCloseableArray<>(postGetContexts); + final SharedContext preSharedContext = SharedContext.makeSharedContext(); + final SharedContext postSharedContext = SharedContext.makeSharedContext(); + final OrderedKeys.Iterator preIt = preIndex.getOrderedKeysIterator(); + final OrderedKeys.Iterator postIt = postIndex.getOrderedKeysIterator()) { + ac.initializeGetContexts(preSharedContext, preGetContexts, preIndex.size(), operatorsToProcess); + ac.initializeGetContexts(postSharedContext, postGetContexts, postIndex.size(), operatorsToProcess); final Chunk[] workingPreChunks = new Chunk[ac.size()]; final Chunk[] workingPostChunks = new Chunk[ac.size()]; @@ -1963,15 +1830,14 @@ private static void doNoKeyModifications(OrderedKeys preIndex, OrderedKeys postI postSharedContext.reset(); final LongChunk postKeyIndices = - supplyPostIndices ? postChunkOk.asKeyIndicesChunk() : null; + supplyPostIndices ? postChunkOk.asKeyIndicesChunk() : null; Arrays.fill(workingPreChunks, null); Arrays.fill(workingPostChunks, null); for (int ii = 0; ii < ac.size(); ++ii) { if (operatorsToProcessIndicesOnly[ii]) { - modifiedOperators[ii] |= - ac.operators[ii].modifyIndices(opContexts[ii], postKeyIndices, 0); + modifiedOperators[ii] |= ac.operators[ii].modifyIndices(opContexts[ii], postKeyIndices, 0); continue; } if (operatorsToProcess[ii]) { @@ -1982,16 +1848,16 @@ private static void doNoKeyModifications(OrderedKeys preIndex, OrderedKeys postI } else { final int inputSlot = ac.inputSlot(ii); if (workingPreChunks[inputSlot] == null) { - workingPreChunks[inputSlot] = ac.inputColumns[inputSlot] - .getPrevChunk(preGetContexts[inputSlot], preChunkOk); - workingPostChunks[inputSlot] = ac.inputColumns[inputSlot] - .getChunk(postGetContexts[inputSlot], postChunkOk); + workingPreChunks[inputSlot] = + ac.inputColumns[inputSlot].getPrevChunk(preGetContexts[inputSlot], preChunkOk); + workingPostChunks[inputSlot] = + ac.inputColumns[inputSlot].getChunk(postGetContexts[inputSlot], postChunkOk); } preValues = workingPreChunks[inputSlot]; postValues = workingPostChunks[inputSlot]; } - modifiedOperators[ii] |= ac.operators[ii].modifyChunk(opContexts[ii], - chunkSize, preValues, postValues, postKeyIndices, 0); + modifiedOperators[ii] |= ac.operators[ii].modifyChunk(opContexts[ii], chunkSize, preValues, + postValues, postKeyIndices, 0); } } } @@ -1999,18 +1865,16 @@ private static void doNoKeyModifications(OrderedKeys preIndex, OrderedKeys postI } private static void doIndexOnlyNoKeyModifications(@NotNull final OrderedKeys postIndex, - @NotNull final AggregationContext ac, - @NotNull final IterativeChunkedAggregationOperator.SingletonContext[] opContexts, - @NotNull final boolean[] operatorsToProcessIndicesOnly, - @NotNull final boolean[] modifiedOperators) { + @NotNull final AggregationContext ac, + @NotNull final IterativeChunkedAggregationOperator.SingletonContext[] opContexts, + @NotNull final boolean[] operatorsToProcessIndicesOnly, @NotNull final boolean[] modifiedOperators) { try (final OrderedKeys.Iterator postIt = postIndex.getOrderedKeysIterator()) { while (postIt.hasMore()) { final OrderedKeys postChunkOk = postIt.getNextOrderedKeysWithLength(CHUNK_SIZE); final LongChunk postKeyIndices = postChunkOk.asKeyIndicesChunk(); for (int ii = 0; ii < ac.size(); ++ii) { if (operatorsToProcessIndicesOnly[ii]) { - modifiedOperators[ii] |= - ac.operators[ii].modifyIndices(opContexts[ii], postKeyIndices, 0); + modifiedOperators[ii] |= ac.operators[ii].modifyIndices(opContexts[ii], postKeyIndices, 0); } } } @@ -2018,28 +1882,26 @@ private static void doIndexOnlyNoKeyModifications(@NotNull final OrderedKeys pos } private static void doNoKeyUpdate(OrderedKeys index, AggregationContext ac, - IterativeChunkedAggregationOperator.SingletonContext[] opContexts, - boolean[] operatorsToProcess, boolean usePrev, boolean remove, - boolean[] modifiedOperators) { + IterativeChunkedAggregationOperator.SingletonContext[] opContexts, + boolean[] operatorsToProcess, boolean usePrev, boolean remove, boolean[] modifiedOperators) { final ColumnSource.GetContext[] getContexts = new ColumnSource.GetContext[ac.size()]; final boolean indicesRequired = ac.requiresIndices(operatorsToProcess); try (final SafeCloseableArray ignored = new SafeCloseableArray<>(getContexts); - final SharedContext sharedContext = SharedContext.makeSharedContext(); - final OrderedKeys.Iterator okIt = index.getOrderedKeysIterator()) { + final SharedContext sharedContext = SharedContext.makeSharedContext(); + final OrderedKeys.Iterator okIt = index.getOrderedKeysIterator()) { ac.initializeGetContexts(sharedContext, getContexts, index.size(), operatorsToProcess); // noinspection unchecked final Chunk[] workingChunks = new Chunk[ac.size()]; - // on an empty initial pass we want to go through the operator anyway, so that we - // initialize things correctly for the aggregation of zero keys + // on an empty initial pass we want to go through the operator anyway, so that we initialize things + // correctly for the aggregation of zero keys do { final OrderedKeys chunkOk = okIt.getNextOrderedKeysWithLength(CHUNK_SIZE); sharedContext.reset(); - final LongChunk keyIndices = - indicesRequired ? chunkOk.asKeyIndicesChunk() : null; + final LongChunk keyIndices = indicesRequired ? chunkOk.asKeyIndicesChunk() : null; Arrays.fill(workingChunks, null); @@ -2050,21 +1912,21 @@ private static void doNoKeyUpdate(OrderedKeys index, AggregationContext ac, final int inputSlot = ac.inputSlot(ii); if (inputSlot >= 0 && workingChunks[inputSlot] == null) { - workingChunks[inputSlot] = fetchValues(usePrev, chunkOk, - ac.inputColumns[inputSlot], getContexts[inputSlot]); + workingChunks[inputSlot] = + fetchValues(usePrev, chunkOk, ac.inputColumns[inputSlot], getContexts[inputSlot]); } - modifiedOperators[ii] |= processColumnNoKey(remove, chunkOk, - inputSlot >= 0 ? workingChunks[inputSlot] : null, ac.operators[ii], - opContexts[ii], keyIndices); + modifiedOperators[ii] |= + processColumnNoKey(remove, chunkOk, inputSlot >= 0 ? workingChunks[inputSlot] : null, + ac.operators[ii], opContexts[ii], keyIndices); } } while (okIt.hasMore()); } } private static void doNoKeyShifts(QueryTable source, Update upstream, AggregationContext ac, - final IterativeChunkedAggregationOperator.SingletonContext[] opContexts, - boolean[] operatorsToShift, boolean[] modifiedOperators) { + final IterativeChunkedAggregationOperator.SingletonContext[] opContexts, + boolean[] operatorsToShift, boolean[] modifiedOperators) { final ColumnSource.GetContext[] getContexts = new ColumnSource.GetContext[ac.size()]; final ColumnSource.GetContext[] postGetContexts = new ColumnSource.GetContext[ac.size()]; @@ -2074,43 +1936,37 @@ private static void doNoKeyShifts(QueryTable source, Update upstream, Aggregatio } final int chunkSize = chunkSize(useIndex.size()); try (final SafeCloseableArray ignored = new SafeCloseableArray<>(getContexts); - final SafeCloseableArray ignored2 = new SafeCloseableArray<>(postGetContexts); - final SharedContext sharedContext = SharedContext.makeSharedContext(); - final SharedContext postSharedContext = SharedContext.makeSharedContext(); - final WritableLongChunk preKeyIndices = - WritableLongChunk.makeWritableChunk(chunkSize); - final WritableLongChunk postKeyIndices = - WritableLongChunk.makeWritableChunk(chunkSize)) { + final SafeCloseableArray ignored2 = new SafeCloseableArray<>(postGetContexts); + final SharedContext sharedContext = SharedContext.makeSharedContext(); + final SharedContext postSharedContext = SharedContext.makeSharedContext(); + final WritableLongChunk preKeyIndices = + WritableLongChunk.makeWritableChunk(chunkSize); + final WritableLongChunk postKeyIndices = + WritableLongChunk.makeWritableChunk(chunkSize)) { ac.initializeGetContexts(sharedContext, getContexts, chunkSize, operatorsToShift); - ac.initializeGetContexts(postSharedContext, postGetContexts, chunkSize, - operatorsToShift); - - final Runnable applyChunkedShift = () -> doProcessShiftNoKey(ac, opContexts, - operatorsToShift, sharedContext, postSharedContext, - getContexts, postGetContexts, preKeyIndices, postKeyIndices, modifiedOperators); - processUpstreamShifts(upstream, useIndex, preKeyIndices, postKeyIndices, - applyChunkedShift); + ac.initializeGetContexts(postSharedContext, postGetContexts, chunkSize, operatorsToShift); + + final Runnable applyChunkedShift = + () -> doProcessShiftNoKey(ac, opContexts, operatorsToShift, sharedContext, postSharedContext, + getContexts, postGetContexts, preKeyIndices, postKeyIndices, modifiedOperators); + processUpstreamShifts(upstream, useIndex, preKeyIndices, postKeyIndices, applyChunkedShift); } } } private static void doProcessShiftNoKey(AggregationContext ac, - IterativeChunkedAggregationOperator.SingletonContext[] opContexts, - boolean[] operatorsToShift, SharedContext sharedContext, SharedContext postSharedContext, - ColumnSource.GetContext[] getContexts, ColumnSource.GetContext[] postGetContexts, - WritableLongChunk preKeyIndices, - WritableLongChunk postKeyIndices, - boolean[] modifiedOperators) { + IterativeChunkedAggregationOperator.SingletonContext[] opContexts, + boolean[] operatorsToShift, SharedContext sharedContext, SharedContext postSharedContext, + ColumnSource.GetContext[] getContexts, ColumnSource.GetContext[] postGetContexts, + WritableLongChunk preKeyIndices, WritableLongChunk postKeyIndices, + boolean[] modifiedOperators) { // noinspection unchecked final Chunk[] workingPreChunks = new Chunk[ac.size()]; // noinspection unchecked final Chunk[] workingPostChunks = new Chunk[ac.size()]; - try ( - final OrderedKeys preChunkOk = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(preKeyIndices); - final OrderedKeys postChunkOk = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(postKeyIndices)) { + try (final OrderedKeys preChunkOk = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(preKeyIndices); + final OrderedKeys postChunkOk = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(postKeyIndices)) { sharedContext.reset(); postSharedContext.reset(); Arrays.fill(workingPreChunks, null); @@ -2126,26 +1982,25 @@ private static void doProcessShiftNoKey(AggregationContext ac, previousValues = newValues = null; } else { if (workingPreChunks[inputSlot] == null) { - workingPreChunks[inputSlot] = - ac.inputColumns[ii].getPrevChunk(getContexts[ii], preChunkOk); + workingPreChunks[inputSlot] = ac.inputColumns[ii].getPrevChunk(getContexts[ii], preChunkOk); workingPostChunks[inputSlot] = - ac.inputColumns[ii].getChunk(postGetContexts[ii], postChunkOk); + ac.inputColumns[ii].getChunk(postGetContexts[ii], postChunkOk); } previousValues = workingPreChunks[inputSlot]; newValues = workingPostChunks[inputSlot]; } - modifiedOperators[ii] |= ac.operators[ii].shiftChunk(opContexts[ii], - previousValues, newValues, preKeyIndices, postKeyIndices, 0); + modifiedOperators[ii] |= ac.operators[ii].shiftChunk(opContexts[ii], previousValues, newValues, + preKeyIndices, postKeyIndices, 0); } } } } - private static boolean processColumnNoKey(boolean remove, OrderedKeys chunkOk, - Chunk values, IterativeChunkedAggregationOperator operator, - IterativeChunkedAggregationOperator.SingletonContext opContext, - LongChunk keyIndices) { + private static boolean processColumnNoKey(boolean remove, OrderedKeys chunkOk, Chunk values, + IterativeChunkedAggregationOperator operator, + IterativeChunkedAggregationOperator.SingletonContext opContext, + LongChunk keyIndices) { if (remove) { return operator.removeChunk(opContext, chunkOk.intSize(), values, keyIndices, 0); } else { @@ -2155,7 +2010,7 @@ private static boolean processColumnNoKey(boolean remove, OrderedKeys chunkOk, @Nullable private static Chunk fetchValues(boolean usePrev, OrderedKeys chunkOk, - ChunkSource.WithPrev inputColumn, ChunkSource.GetContext getContext) { + ChunkSource.WithPrev inputColumn, ChunkSource.GetContext getContext) { final Chunk values; if (inputColumn == null) { values = null; diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ChunkedWeightedAverageOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/ChunkedWeightedAverageOperator.java index 95a027124dd..b302ff353ea 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ChunkedWeightedAverageOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ChunkedWeightedAverageOperator.java @@ -25,8 +25,8 @@ class ChunkedWeightedAverageOperator implements IterativeChunkedAggregationOpera private final DoubleArraySource weightedSum; private final DoubleArraySource resultColumn; - ChunkedWeightedAverageOperator(ChunkType chunkType, - DoubleWeightRecordingInternalOperator weightOperator, String name) { + ChunkedWeightedAverageOperator(ChunkType chunkType, DoubleWeightRecordingInternalOperator weightOperator, + String name) { this.chunkType = chunkType; this.weightOperator = weightOperator; this.resultName = name; @@ -40,58 +40,56 @@ class ChunkedWeightedAverageOperator implements IterativeChunkedAggregationOpera @Override public void addChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final Context context = (Context) bucketedContext; final DoubleChunk doubleValues = context.toDoubleCast.cast(values); final DoubleChunk weightValues = weightOperator.getAddedWeights(); Assert.neqNull(weightValues, "weightValues"); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); - stateModified.set(ii, addChunk(doubleValues, weightValues, startPosition, - length.get(ii), destinations.get(startPosition))); + stateModified.set(ii, addChunk(doubleValues, weightValues, startPosition, length.get(ii), + destinations.get(startPosition))); } } @Override public void removeChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final Context context = (Context) bucketedContext; final DoubleChunk doubleValues = context.prevToDoubleCast.cast(values); final DoubleChunk weightValues = weightOperator.getRemovedWeights(); Assert.neqNull(weightValues, "weightValues"); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); - stateModified.set(ii, removeChunk(doubleValues, weightValues, startPosition, - length.get(ii), destinations.get(startPosition))); + stateModified.set(ii, removeChunk(doubleValues, weightValues, startPosition, length.get(ii), + destinations.get(startPosition))); } } @Override public void modifyChunk(BucketedContext bucketedContext, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final Context context = (Context) bucketedContext; - final DoubleChunk prevDoubleValues = - context.prevToDoubleCast.cast(previousValues); + final DoubleChunk prevDoubleValues = context.prevToDoubleCast.cast(previousValues); final DoubleChunk prevWeightValues = weightOperator.getRemovedWeights(); final DoubleChunk newDoubleValues = context.toDoubleCast.cast(newValues); final DoubleChunk newWeightValues = weightOperator.getAddedWeights(); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); - stateModified.set(ii, modifyChunk(prevDoubleValues, prevWeightValues, newDoubleValues, - newWeightValues, startPosition, length.get(ii), destinations.get(startPosition))); + stateModified.set(ii, modifyChunk(prevDoubleValues, prevWeightValues, newDoubleValues, newWeightValues, + startPosition, length.get(ii), destinations.get(startPosition))); } } @Override - public boolean addChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { final Context context = (Context) singletonContext; final DoubleChunk doubleValues = context.toDoubleCast.cast(values); final DoubleChunk weightValues = weightOperator.getAddedWeights(); @@ -99,9 +97,8 @@ public boolean addChunk(SingletonContext singletonContext, int chunkSize, } @Override - public boolean removeChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { final Context context = (Context) singletonContext; final DoubleChunk doubleValues = context.prevToDoubleCast.cast(values); final DoubleChunk weightValues = weightOperator.getRemovedWeights(); @@ -109,29 +106,27 @@ public boolean removeChunk(SingletonContext singletonContext, int chunkSize, } @Override - public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { final Context context = (Context) singletonContext; final DoubleChunk newDoubleValues = context.toDoubleCast.cast(newValues); final DoubleChunk newWeightValues = weightOperator.getAddedWeights(); - final DoubleChunk prevDoubleValues = - context.prevToDoubleCast.cast(previousValues); + final DoubleChunk prevDoubleValues = context.prevToDoubleCast.cast(previousValues); final DoubleChunk prevWeightValues = weightOperator.getRemovedWeights(); return modifyChunk(prevDoubleValues, prevWeightValues, newDoubleValues, newWeightValues, 0, - newDoubleValues.size(), destination); + newDoubleValues.size(), destination); } private static void sumChunks(DoubleChunk doubleValues, - DoubleChunk weightValues, - int start, - int length, - MutableInt nansOut, - MutableInt normalOut, - MutableDouble sumOfWeightsOut, - MutableDouble weightedSumOut) { + DoubleChunk weightValues, + int start, + int length, + MutableInt nansOut, + MutableInt normalOut, + MutableDouble sumOfWeightsOut, + MutableDouble weightedSumOut) { long nans = 0; long normal = 0; double sumOfWeights = 0.0; @@ -166,15 +161,14 @@ private static void sumChunks(DoubleChunk doubleValues, weightedSumOut.setValue(weightedSum); } - private boolean addChunk(DoubleChunk doubleValues, - DoubleChunk weightValues, int start, int length, long destination) { + private boolean addChunk(DoubleChunk doubleValues, DoubleChunk weightValues, + int start, int length, long destination) { final MutableInt nanOut = new MutableInt(); final MutableInt normalOut = new MutableInt(); final MutableDouble sumOfWeightsOut = new MutableDouble(); final MutableDouble weightedSumOut = new MutableDouble(); - sumChunks(doubleValues, weightValues, start, length, nanOut, normalOut, sumOfWeightsOut, - weightedSumOut); + sumChunks(doubleValues, weightValues, start, length, nanOut, normalOut, sumOfWeightsOut, weightedSumOut); final long newNans = nanOut.intValue(); final long newNormal = normalOut.intValue(); @@ -206,10 +200,8 @@ private boolean addChunk(DoubleChunk doubleValues, final double existingSumOfWeights = sumOfWeights.getUnsafe(destination); final double existingWeightedSum = weightedSum.getUnsafe(destination); - final double totalWeightedSum = - NullSafeAddition.plusDouble(existingWeightedSum, newWeightedSum); - final double totalSumOfWeights = - NullSafeAddition.plusDouble(existingSumOfWeights, newSumOfWeights); + final double totalWeightedSum = NullSafeAddition.plusDouble(existingWeightedSum, newWeightedSum); + final double totalSumOfWeights = NullSafeAddition.plusDouble(existingSumOfWeights, newSumOfWeights); if (newNormal > 0) { weightedSum.set(destination, totalWeightedSum); @@ -243,15 +235,14 @@ private long allocateNans(long destination, long newNans) { return newNans; } - private boolean removeChunk(DoubleChunk doubleValues, - DoubleChunk weightValues, int start, int length, long destination) { + private boolean removeChunk(DoubleChunk doubleValues, DoubleChunk weightValues, + int start, int length, long destination) { final MutableInt nanOut = new MutableInt(); final MutableInt normalOut = new MutableInt(); final MutableDouble sumOfWeightsOut = new MutableDouble(); final MutableDouble weightedSumOut = new MutableDouble(); - sumChunks(doubleValues, weightValues, start, length, nanOut, normalOut, sumOfWeightsOut, - weightedSumOut); + sumChunks(doubleValues, weightValues, start, length, nanOut, normalOut, sumOfWeightsOut, weightedSumOut); final int newNans = nanOut.intValue(); final int newNormal = normalOut.intValue(); @@ -315,24 +306,22 @@ private boolean removeChunk(DoubleChunk doubleValues, } private boolean modifyChunk(DoubleChunk prevDoubleValues, - DoubleChunk prevWeightValues, - DoubleChunk newDoubleValues, - DoubleChunk newWeightValues, int start, int length, long destination) { + DoubleChunk prevWeightValues, DoubleChunk newDoubleValues, + DoubleChunk newWeightValues, int start, int length, long destination) { final MutableInt nanOut = new MutableInt(); final MutableInt normalOut = new MutableInt(); final MutableDouble sumOfWeightsOut = new MutableDouble(); final MutableDouble weightedSumOut = new MutableDouble(); - sumChunks(prevDoubleValues, prevWeightValues, start, length, nanOut, normalOut, - sumOfWeightsOut, weightedSumOut); + sumChunks(prevDoubleValues, prevWeightValues, start, length, nanOut, normalOut, sumOfWeightsOut, + weightedSumOut); final int prevNans = nanOut.intValue(); final int prevNormal = normalOut.intValue(); final double prevSumOfWeights = sumOfWeightsOut.doubleValue(); final double prevWeightedSum = weightedSumOut.doubleValue(); - sumChunks(newDoubleValues, newWeightValues, start, length, nanOut, normalOut, - sumOfWeightsOut, weightedSumOut); + sumChunks(newDoubleValues, newWeightValues, start, length, nanOut, normalOut, sumOfWeightsOut, weightedSumOut); final int newNans = nanOut.intValue(); final int newNormal = normalOut.intValue(); @@ -366,9 +355,9 @@ private boolean modifyChunk(DoubleChunk prevDoubleValues, final double existingWeightedSum = weightedSum.getUnsafe(destination); final double totalWeightedSum = - NullSafeAddition.plusDouble(existingWeightedSum, newWeightedSum - prevWeightedSum); - final double totalSumOfWeights = NullSafeAddition.plusDouble(existingSumOfWeights, - newSumOfWeights - prevSumOfWeights); + NullSafeAddition.plusDouble(existingWeightedSum, newWeightedSum - prevWeightedSum); + final double totalSumOfWeights = + NullSafeAddition.plusDouble(existingSumOfWeights, newSumOfWeights - prevSumOfWeights); if (totalWeightedSum != existingWeightedSum) { weightedSum.set(destination, totalWeightedSum); diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ComboAggregateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/ComboAggregateFactory.java index 949032070f7..1c9e5116464 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ComboAggregateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ComboAggregateFactory.java @@ -78,8 +78,8 @@ * {@link Table#by(AggregationStateFactory)}. * *

    - * The intended use of this class is to call the {@link #AggCombo(ComboBy...)} method with a set of - * aggregations defined by: + * The intended use of this class is to call the {@link #AggCombo(ComboBy...)} method with a set of aggregations defined + * by: *

      *
    • {@link #AggMin}
    • *
    • {@link #AggMax}
    • @@ -123,8 +123,7 @@ public class ComboAggregateFactory implements AggregationStateFactory { public static final String ROLLUP_COLUMN_SUFFIX = "__ROLLUP__"; /** - * Create a new ComboAggregateFactory suitable for passing to - * {@link Table#by(AggregationStateFactory, String...)}. + * Create a new ComboAggregateFactory suitable for passing to {@link Table#by(AggregationStateFactory, String...)}. * * @param aggregations the aggregations to compute * @@ -139,21 +138,19 @@ public static ComboAggregateFactory AggCombo(ComboBy... aggregations) { * * @param formula the formula to apply to each group * @param formulaParam the parameter name within the formula - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ - public static ComboBy AggFormula(String formula, String formulaParam, - final String... matchPairs) { - return new ComboByImpl(new AggregationFormulaStateFactory(formula, formulaParam), - matchPairs); + public static ComboBy AggFormula(String formula, String formulaParam, final String... matchPairs) { + return new ComboByImpl(new AggregationFormulaStateFactory(formula, formulaParam), matchPairs); } /** * Create a minimum aggregation, equivalent to {@link Table#minBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggMin(final String... matchPairs) { @@ -163,8 +160,8 @@ public static ComboBy AggMin(final String... matchPairs) { /** * Create a maximum aggregation, equivalent to {@link Table#maxBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggMax(final String... matchPairs) { @@ -174,8 +171,8 @@ public static ComboBy AggMax(final String... matchPairs) { /** * Create a summation aggregation, equivalent to {@link Table#sumBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggSum(final String... matchPairs) { @@ -185,8 +182,8 @@ public static ComboBy AggSum(final String... matchPairs) { /** * Create an absolute sum aggregation, equivalent to {@link Table#absSumBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggAbsSum(final String... matchPairs) { @@ -196,8 +193,8 @@ public static ComboBy AggAbsSum(final String... matchPairs) { /** * Create a variance aggregation, equivalent to {@link Table#varBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggVar(final String... matchPairs) { @@ -207,8 +204,8 @@ public static ComboBy AggVar(final String... matchPairs) { /** * Create an average aggregation, equivalent to {@link Table#avgBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggAvg(final String... matchPairs) { @@ -219,8 +216,8 @@ public static ComboBy AggAvg(final String... matchPairs) { * Create a weighted average aggregation, equivalent to {@link Table#wavgBy(String, String...)}. * * @param weight the name of the column to use as the weight for the average - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggWAvg(final String weight, final String... matchPairs) { @@ -231,8 +228,8 @@ public static ComboBy AggWAvg(final String weight, final String... matchPairs) { * Create a weighted sum aggregation, equivalent to {@link Table#wsumBy(String, String...)}. * * @param weight the name of the column to use as the weight for the sum - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggWSum(final String weight, final String... matchPairs) { @@ -242,8 +239,8 @@ public static ComboBy AggWSum(final String weight, final String... matchPairs) { /** * Create a median aggregation, equivalent to {@link Table#medianBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggMed(final String... matchPairs) { @@ -253,8 +250,8 @@ public static ComboBy AggMed(final String... matchPairs) { /** * Create a standard deviation aggregation, equivalent to {@link Table#stdBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggStd(final String... matchPairs) { @@ -264,8 +261,8 @@ public static ComboBy AggStd(final String... matchPairs) { /** * Create a first aggregation, equivalent to {@link Table#firstBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggFirst(final String... matchPairs) { @@ -275,8 +272,8 @@ public static ComboBy AggFirst(final String... matchPairs) { /** * Create a last aggregation, equivalent to {@link Table#lastBy(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggLast(final String... matchPairs) { @@ -284,12 +281,11 @@ public static ComboBy AggLast(final String... matchPairs) { } /** - * Create a sorted first aggregation, equivalent to - * {@link io.deephaven.db.util.SortedBy#sortedFirstBy}. + * Create a sorted first aggregation, equivalent to {@link io.deephaven.db.util.SortedBy#sortedFirstBy}. * * @param sortColumn the column to sort by - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggSortedFirst(final String sortColumn, final String... matchPairs) { @@ -297,12 +293,11 @@ public static ComboBy AggSortedFirst(final String sortColumn, final String... ma } /** - * Create a sorted last aggregation, equivalent to - * {@link io.deephaven.db.util.SortedBy#sortedLastBy}. + * Create a sorted last aggregation, equivalent to {@link io.deephaven.db.util.SortedBy#sortedLastBy}. * * @param sortColumn the column to sort by - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggSortedLast(final String sortColumn, final String... matchPairs) { @@ -310,12 +305,11 @@ public static ComboBy AggSortedLast(final String sortColumn, final String... mat } /** - * Create a sorted first aggregation, equivalent to - * {@link io.deephaven.db.util.SortedBy#sortedFirstBy}. + * Create a sorted first aggregation, equivalent to {@link io.deephaven.db.util.SortedBy#sortedFirstBy}. * * @param sortColumns the column to sort by - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggSortedFirst(final String[] sortColumns, final String... matchPairs) { @@ -323,12 +317,11 @@ public static ComboBy AggSortedFirst(final String[] sortColumns, final String... } /** - * Create a sorted last aggregation, equivalent to - * {@link io.deephaven.db.util.SortedBy#sortedLastBy}. + * Create a sorted last aggregation, equivalent to {@link io.deephaven.db.util.SortedBy#sortedLastBy}. * * @param sortColumns the columns to sort by - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggSortedLast(final String[] sortColumns, final String... matchPairs) { @@ -338,8 +331,8 @@ public static ComboBy AggSortedLast(final String[] sortColumns, final String... /** * Create an array aggregation, equivalent to {@link Table#by(String...)}. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggArray(final String... matchPairs) { @@ -362,10 +355,9 @@ public static ComboBy AggCount(final String resultColumn) { * * The output column contains the number of distinct values for the input column in that group. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. - * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)}. Null values - * are not counted. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. + * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)}. Null values are not counted. */ public static ComboBy AggCountDistinct(final String... matchPairs) { return AggCountDistinct(false, matchPairs); @@ -376,10 +368,9 @@ public static ComboBy AggCountDistinct(final String... matchPairs) { * * The output column contains the number of distinct values for the input column in that group. * - * @param countNulls if true null values are counted as a distinct value, otherwise null values - * are ignored - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param countNulls if true null values are counted as a distinct value, otherwise null values are ignored + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggCountDistinct(boolean countNulls, final String... matchPairs) { @@ -389,13 +380,12 @@ public static ComboBy AggCountDistinct(boolean countNulls, final String... match /** * Create a distinct aggregation. * - * The output column contains a {@link io.deephaven.db.tables.dbarrays.DbArrayBase} with the - * distinct values for the input column within the group. + * The output column contains a {@link io.deephaven.db.tables.dbarrays.DbArrayBase} with the distinct values for the + * input column within the group. * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. - * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)}. Null values - * are ignored. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. + * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)}. Null values are ignored. */ public static ComboBy AggDistinct(final String... matchPairs) { return AggDistinct(false, matchPairs); @@ -405,13 +395,12 @@ public static ComboBy AggDistinct(final String... matchPairs) { /** * Create a distinct aggregation. * - * The output column contains a {@link io.deephaven.db.tables.dbarrays.DbArrayBase} with the - * distinct values for the input column within the group. + * The output column contains a {@link io.deephaven.db.tables.dbarrays.DbArrayBase} with the distinct values for the + * input column within the group. * - * @param countNulls if true, then null values are included in the result, otherwise null values - * are ignored - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param countNulls if true, then null values are included in the result, otherwise null values are ignored + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggDistinct(boolean countNulls, final String... matchPairs) { @@ -428,8 +417,8 @@ public static ComboBy AggDistinct(boolean countNulls, final String... matchPairs *
    • The "non unique value" - if there are more than 1 distinct values present
    • *
    * - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggUnique(final String... matchPairs) { @@ -446,13 +435,11 @@ public static ComboBy AggUnique(final String... matchPairs) { *
  • The "non unique value" - if there are more than 1 distinct values present
  • * * - * @param countNulls if true, then null values are included in the result, otherwise null values - * are ignored - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. - * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)}. Output - * columns contain null if there are no values present or there are more than 1 distinct - * values present. + * @param countNulls if true, then null values are included in the result, otherwise null values are ignored + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. + * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)}. Output columns contain null if + * there are no values present or there are more than 1 distinct values present. */ public static ComboBy AggUnique(boolean countNulls, final String... matchPairs) { return AggUnique(countNulls, null, null, matchPairs); @@ -468,16 +455,15 @@ public static ComboBy AggUnique(boolean countNulls, final String... matchPairs) *
  • The "non unique value" - if there are more than 1 distinct values present
  • * * - * @param countNulls if true, then null values are included in the result, otherwise null values - * are ignored + * @param countNulls if true, then null values are included in the result, otherwise null values are ignored * @param noKeyValue the value to use if there are no values present * @param nonUniqueValue the value to use if there are more than 1 values present - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggUnique(boolean countNulls, Object noKeyValue, Object nonUniqueValue, - final String... matchPairs) { + final String... matchPairs) { return Agg(new UniqueStateFactory(countNulls, noKeyValue, nonUniqueValue), matchPairs); } @@ -485,8 +471,8 @@ public static ComboBy AggUnique(boolean countNulls, Object noKeyValue, Object no * Create a percentile aggregation. * * @param percentile the percentile to calculate - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy AggPct(double percentile, final String... matchPairs) { @@ -497,15 +483,13 @@ public static ComboBy AggPct(double percentile, final String... matchPairs) { * Create a percentile aggregation. * * @param percentile the percentile to calculate - * @param averageMedian if true, then when the upper values and lower values have an equal size; - * average the highest lower value and lowest upper value to produce the median value for - * integers, longs, doubles, and floats - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param averageMedian if true, then when the upper values and lower values have an equal size; average the highest + * lower value and lowest upper value to produce the median value for integers, longs, doubles, and floats + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ - public static ComboBy AggPct(double percentile, boolean averageMedian, - final String... matchPairs) { + public static ComboBy AggPct(double percentile, boolean averageMedian, final String... matchPairs) { return Agg(new PercentileByStateFactoryImpl(percentile, averageMedian), matchPairs); } @@ -513,8 +497,8 @@ public static ComboBy AggPct(double percentile, boolean averageMedian, * Create an aggregation. * * @param factory aggregation factory. - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy Agg(AggregationStateFactory factory, final String... matchPairs) { @@ -536,8 +520,8 @@ public static ComboBy Agg(AggregationStateFactory factory, final MatchPair... ma * Create an aggregation. * * @param factoryType aggregation factory type. - * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the - * Output and Input have the same name, then the column name can be specified. + * @param matchPairs the columns to apply the aggregation to in the form Output=Input, if the Output and Input have + * the same name, then the column name can be specified. * @return a ComboBy object suitable for passing to {@link #AggCombo(ComboBy...)} */ public static ComboBy Agg(AggType factoryType, final String... matchPairs) { @@ -594,8 +578,7 @@ public static ComboBy Agg(AggType factoryType, final MatchPair... matchPairs) { factory = new UniqueStateFactory(); break; case Skip: - throw new IllegalArgumentException( - "Skip is not a valid aggregation type for AggCombo."); + throw new IllegalArgumentException("Skip is not a valid aggregation type for AggCombo."); default: throw new UnsupportedOperationException("Unknown AggType: " + factoryType); } @@ -606,38 +589,35 @@ public static ComboBy Agg(AggType factoryType, final MatchPair... matchPairs) { * Create a factory for performing rollups. */ public ComboAggregateFactory rollupFactory() { - // we want to leave off the null value column source for children; but add a by external - // combo for the rollup - return new ComboAggregateFactory(Stream.concat( - underlyingAggregations.subList(0, underlyingAggregations.size() - 1).stream().map(x -> { - final AggregationStateFactory underlyingStateFactory = - x.getUnderlyingStateFactory(); - Assert.assertion(underlyingStateFactory instanceof ReaggregatableStatefactory, - "underlyingStateFactory instanceof ReaggregatableStatefactory", - underlyingStateFactory, "UnderlyingStateFactory"); - - // noinspection ConstantConditions - final ReaggregatableStatefactory reaggregatableStatefactory = - (ReaggregatableStatefactory) underlyingStateFactory; - - Assert.assertion(reaggregatableStatefactory.supportsRollup(), - "((ReaggregatableStatefactory)x.getUnderlyingStateFactory()).supportsRollup()", - underlyingStateFactory, "UnderlyingStateFactory"); - final ReaggregatableStatefactory factory = - reaggregatableStatefactory.rollupFactory(); - - final List leftColumns = new ArrayList<>(); - Collections.addAll(leftColumns, MatchPair.getLeftColumns(x.getResultPairs())); - - return Agg(factory, leftColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); - }), Stream.of(new ExternalComboBy(false))).collect(Collectors.toList()), true, true); + // we want to leave off the null value column source for children; but add a by external combo for the rollup + return new ComboAggregateFactory( + Stream.concat(underlyingAggregations.subList(0, underlyingAggregations.size() - 1).stream().map(x -> { + final AggregationStateFactory underlyingStateFactory = x.getUnderlyingStateFactory(); + Assert.assertion(underlyingStateFactory instanceof ReaggregatableStatefactory, + "underlyingStateFactory instanceof ReaggregatableStatefactory", underlyingStateFactory, + "UnderlyingStateFactory"); + + // noinspection ConstantConditions + final ReaggregatableStatefactory reaggregatableStatefactory = + (ReaggregatableStatefactory) underlyingStateFactory; + + Assert.assertion(reaggregatableStatefactory.supportsRollup(), + "((ReaggregatableStatefactory)x.getUnderlyingStateFactory()).supportsRollup()", + underlyingStateFactory, "UnderlyingStateFactory"); + final ReaggregatableStatefactory factory = reaggregatableStatefactory.rollupFactory(); + + final List leftColumns = new ArrayList<>(); + Collections.addAll(leftColumns, MatchPair.getLeftColumns(x.getResultPairs())); + + return Agg(factory, leftColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + }), Stream.of(new ExternalComboBy(false))).collect(Collectors.toList()), true, true); } public ComboAggregateFactory forRollup(boolean includeConstituents) { final List newUnderliers = - underlyingAggregations.stream().map(ComboBy::forRollup).collect(Collectors.toList()); + underlyingAggregations.stream().map(ComboBy::forRollup).collect(Collectors.toList()); newUnderliers.add(includeConstituents ? new ExternalComboBy(true) - : new NullComboBy(Collections.singletonMap(RollupInfo.ROLLUP_COLUMN, Object.class))); + : new NullComboBy(Collections.singletonMap(RollupInfo.ROLLUP_COLUMN, Object.class))); return new ComboAggregateFactory(newUnderliers, true, false); } @@ -677,8 +657,8 @@ static List optimize(Aggregation aggregation) { } /** - * Optimizes the aggregations, collapsing relevant aggregations into single {@link ComboBy - * comboBys} where applicable. + * Optimizes the aggregations, collapsing relevant aggregations into single {@link ComboBy comboBys} where + * applicable. * *

    * Note: due to the optimization, the combo bys may not be in the same order as specified in @@ -711,19 +691,16 @@ static public class ComboByImpl implements ComboBy { private final String[] rightColumns; private final AggregationStateFactory underlyingStateFactory; - public ComboByImpl(final AggregationStateFactory underlyingStateFactory, - final String... matchPairs) { + public ComboByImpl(final AggregationStateFactory underlyingStateFactory, final String... matchPairs) { this(underlyingStateFactory, MatchPairFactory.getExpressions(matchPairs)); } @SuppressWarnings("unused") - public ComboByImpl(final AggregationStateFactory underlyingStateFactory, - final Collection matchPairs) { + public ComboByImpl(final AggregationStateFactory underlyingStateFactory, final Collection matchPairs) { this(underlyingStateFactory, MatchPairFactory.getExpressions(matchPairs)); } - ComboByImpl(final AggregationStateFactory underlyingStateFactory, - final MatchPair... matchPairs) { + ComboByImpl(final AggregationStateFactory underlyingStateFactory, final MatchPair... matchPairs) { this.matchPairs = matchPairs; this.underlyingStateFactory = underlyingStateFactory; this.rightColumns = new String[matchPairs.length]; @@ -751,14 +728,13 @@ public MatchPair[] getResultPairs() { public ComboBy forRollup() { if (!(underlyingStateFactory instanceof ReaggregatableStatefactory)) { throw new UnsupportedOperationException( - "Not a reaggregatable state factory: " + underlyingStateFactory); + "Not a reaggregatable state factory: " + underlyingStateFactory); } if (!((ReaggregatableStatefactory) underlyingStateFactory).supportsRollup()) { throw new UnsupportedOperationException( - "Underlying state factory does not support rollup: " + underlyingStateFactory); + "Underlying state factory does not support rollup: " + underlyingStateFactory); } - return new ComboByImpl( - ((ReaggregatableStatefactory) underlyingStateFactory).forRollup(), matchPairs); + return new ComboByImpl(((ReaggregatableStatefactory) underlyingStateFactory).forRollup(), matchPairs); } @Override @@ -769,9 +745,9 @@ public AggregationMemoKey getMemoKey() { @Override public String toString() { return "ComboByImpl{" + - "matchPairs=" + Arrays.toString(matchPairs) + - ", underlyingStateFactory=" + underlyingStateFactory + - '}'; + "matchPairs=" + Arrays.toString(matchPairs) + + ", underlyingStateFactory=" + underlyingStateFactory + + '}'; } } @@ -837,8 +813,7 @@ public String[] getSourceColumns() { @Override public MatchPair[] getResultPairs() { - return resultColumns.keySet().stream().map(rc -> new MatchPair(rc, rc)) - .toArray(MatchPair[]::new); + return resultColumns.keySet().stream().map(rc -> new MatchPair(rc, rc)).toArray(MatchPair[]::new); } @Override @@ -876,8 +851,7 @@ public String[] getSourceColumns() { @Override public MatchPair[] getResultPairs() { - return new MatchPair[] { - new MatchPair(RollupInfo.ROLLUP_COLUMN, RollupInfo.ROLLUP_COLUMN)}; + return new MatchPair[] {new MatchPair(RollupInfo.ROLLUP_COLUMN, RollupInfo.ROLLUP_COLUMN)}; } @Override @@ -902,8 +876,7 @@ public ComboAggregateFactory(Collection aggregations) { this(aggregations, false, false); } - public ComboAggregateFactory(Collection aggregations, boolean isRollup, - boolean secondLevelRollup) { + public ComboAggregateFactory(Collection aggregations, boolean isRollup, boolean secondLevelRollup) { this.isRollup = isRollup; this.secondLevel = secondLevelRollup; underlyingAggregations.addAll(aggregations); @@ -911,18 +884,18 @@ public ComboAggregateFactory(Collection aggregations, boolean isRollup, final Map> usedColumns = new LinkedHashMap<>(); for (final ComboBy comboBy : underlyingAggregations) { - Stream.of(comboBy.getResultPairs()).map(MatchPair::left).forEach( - rl -> usedColumns.computeIfAbsent(rl, x -> new ArrayList<>()).add(comboBy)); + Stream.of(comboBy.getResultPairs()).map(MatchPair::left) + .forEach(rl -> usedColumns.computeIfAbsent(rl, x -> new ArrayList<>()).add(comboBy)); } final Map> duplicates = - usedColumns.entrySet().stream().filter(kv -> kv.getValue().size() > 1) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + usedColumns.entrySet().stream().filter(kv -> kv.getValue().size() > 1) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); if (!duplicates.isEmpty()) { - final String errors = duplicates.entrySet().stream() - .map(kv -> kv.getKey() + " used " + kv.getValue().size() + " times") - .collect(Collectors.joining(", ")); + final String errors = + duplicates.entrySet().stream().map(kv -> kv.getKey() + " used " + kv.getValue().size() + " times") + .collect(Collectors.joining(", ")); throw new IllegalArgumentException("Duplicate output columns: " + errors); } } @@ -933,16 +906,14 @@ public ComboAggregateFactory(ComboBy... aggregations) { @Override public AggregationMemoKey getMemoKey() { - final UnderlyingMemoKey[] underlyingMemoKeys = - new UnderlyingMemoKey[underlyingAggregations.size()]; + final UnderlyingMemoKey[] underlyingMemoKeys = new UnderlyingMemoKey[underlyingAggregations.size()]; for (int ii = 0; ii < underlyingMemoKeys.length; ++ii) { final ComboBy comboBy = underlyingAggregations.get(ii); final AggregationMemoKey key = comboBy.getMemoKey(); if (key == null) { return null; } - underlyingMemoKeys[ii] = - new UnderlyingMemoKey(key, comboBy.getSourceColumns(), comboBy.getResultPairs()); + underlyingMemoKeys[ii] = new UnderlyingMemoKey(key, comboBy.getSourceColumns(), comboBy.getResultPairs()); } return new ComboByMemoKey(underlyingMemoKeys); @@ -955,7 +926,7 @@ private static class UnderlyingMemoKey { private UnderlyingMemoKey(AggregationMemoKey componentMemoKey, String[] sourceColumns, - MatchPair[] resultPairs) { + MatchPair[] resultPairs) { this.componentMemoKey = componentMemoKey; this.sourceColumns = sourceColumns; this.resultPairs = resultPairs; @@ -969,8 +940,8 @@ public boolean equals(Object o) { return false; final UnderlyingMemoKey that = (UnderlyingMemoKey) o; return Objects.equals(componentMemoKey, that.componentMemoKey) && - Arrays.equals(sourceColumns, that.sourceColumns) && - Arrays.equals(resultPairs, that.resultPairs); + Arrays.equals(sourceColumns, that.sourceColumns) && + Arrays.equals(resultPairs, that.resultPairs); } @Override @@ -997,7 +968,7 @@ public int hashCode() { @Override public boolean equals(Object obj) { return obj instanceof ComboByMemoKey - && Arrays.equals(underlyingMemoKeys, ((ComboByMemoKey) obj).underlyingMemoKeys); + && Arrays.equals(underlyingMemoKeys, ((ComboByMemoKey) obj).underlyingMemoKeys); } } @@ -1008,7 +979,7 @@ public String toString() { public List getMatchPairs() { return underlyingAggregations.stream().flatMap(c -> Arrays.stream(c.getResultPairs())) - .collect(Collectors.toList()); + .collect(Collectors.toList()); } public AggregationContextFactory makeAggregationContextFactory() { @@ -1027,122 +998,98 @@ public AggregationContextFactory makeAggregationContextFactory() { final boolean isAddOnly = ((BaseTable) table).isAddOnly(); if (comboBy instanceof CountComboBy) { - operators - .add(new CountAggregationOperator(((CountComboBy) comboBy).resultColumn)); + operators.add(new CountAggregationOperator(((CountComboBy) comboBy).resultColumn)); inputColumns.add(null); inputNames.add(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } else if (comboBy instanceof ComboByImpl) { - final AggregationStateFactory inputAggregationStateFactory = - comboBy.getUnderlyingStateFactory(); + final AggregationStateFactory inputAggregationStateFactory = comboBy.getUnderlyingStateFactory(); - final boolean isNumeric = - inputAggregationStateFactory.getClass() == SumStateFactory.class || + final boolean isNumeric = inputAggregationStateFactory.getClass() == SumStateFactory.class || inputAggregationStateFactory.getClass() == AbsSumStateFactory.class || inputAggregationStateFactory.getClass() == AvgStateFactory.class || inputAggregationStateFactory.getClass() == VarStateFactory.class || inputAggregationStateFactory.getClass() == StdStateFactory.class; final boolean isCountDistinct = - inputAggregationStateFactory.getClass() == CountDistinctStateFactory.class; - final boolean isDistinct = - inputAggregationStateFactory.getClass() == DistinctStateFactory.class; - final boolean isSelectDistinct = inputAggregationStateFactory - .getClass() == SelectDistinctStateFactoryImpl.class; - final boolean isAggUnique = - inputAggregationStateFactory.getClass() == UniqueStateFactory.class; - final boolean isMinMax = - inputAggregationStateFactory instanceof MinMaxByStateFactoryImpl; - final boolean isPercentile = inputAggregationStateFactory - .getClass() == PercentileByStateFactoryImpl.class; + inputAggregationStateFactory.getClass() == CountDistinctStateFactory.class; + final boolean isDistinct = inputAggregationStateFactory.getClass() == DistinctStateFactory.class; + final boolean isSelectDistinct = + inputAggregationStateFactory.getClass() == SelectDistinctStateFactoryImpl.class; + final boolean isAggUnique = inputAggregationStateFactory.getClass() == UniqueStateFactory.class; + final boolean isMinMax = inputAggregationStateFactory instanceof MinMaxByStateFactoryImpl; + final boolean isPercentile = + inputAggregationStateFactory.getClass() == PercentileByStateFactoryImpl.class; final boolean isSortedFirstOrLastBy = - inputAggregationStateFactory instanceof SortedFirstOrLastByFactoryImpl; - final boolean isFirst = - inputAggregationStateFactory.getClass() == FirstByStateFactoryImpl.class || - inputAggregationStateFactory - .getClass() == TrackingFirstByStateFactoryImpl.class - || - (inputAggregationStateFactory - .getClass() == KeyOnlyFirstOrLastByStateFactory.class && - !((KeyOnlyFirstOrLastByStateFactory) inputAggregationStateFactory) - .isLast()); - final boolean isLast = - inputAggregationStateFactory.getClass() == LastByStateFactoryImpl.class || - inputAggregationStateFactory - .getClass() == TrackingLastByStateFactoryImpl.class - || - (inputAggregationStateFactory - .getClass() == KeyOnlyFirstOrLastByStateFactory.class && - ((KeyOnlyFirstOrLastByStateFactory) inputAggregationStateFactory) - .isLast()); - final boolean isWeightedAverage = inputAggregationStateFactory - .getClass() == WeightedAverageStateFactoryImpl.class; - final boolean isWeightedSum = inputAggregationStateFactory - .getClass() == WeightedSumStateFactoryImpl.class; - final boolean isAggArray = inputAggregationStateFactory - .getClass() == AggregationIndexStateFactory.class; - final boolean isFormula = inputAggregationStateFactory - .getClass() == AggregationFormulaStateFactory.class; + inputAggregationStateFactory instanceof SortedFirstOrLastByFactoryImpl; + final boolean isFirst = inputAggregationStateFactory.getClass() == FirstByStateFactoryImpl.class || + inputAggregationStateFactory.getClass() == TrackingFirstByStateFactoryImpl.class || + (inputAggregationStateFactory.getClass() == KeyOnlyFirstOrLastByStateFactory.class && + !((KeyOnlyFirstOrLastByStateFactory) inputAggregationStateFactory).isLast()); + final boolean isLast = inputAggregationStateFactory.getClass() == LastByStateFactoryImpl.class || + inputAggregationStateFactory.getClass() == TrackingLastByStateFactoryImpl.class || + (inputAggregationStateFactory.getClass() == KeyOnlyFirstOrLastByStateFactory.class && + ((KeyOnlyFirstOrLastByStateFactory) inputAggregationStateFactory).isLast()); + final boolean isWeightedAverage = + inputAggregationStateFactory.getClass() == WeightedAverageStateFactoryImpl.class; + final boolean isWeightedSum = + inputAggregationStateFactory.getClass() == WeightedSumStateFactoryImpl.class; + final boolean isAggArray = + inputAggregationStateFactory.getClass() == AggregationIndexStateFactory.class; + final boolean isFormula = + inputAggregationStateFactory.getClass() == AggregationFormulaStateFactory.class; // noinspection StatementWithEmptyBody if (isSelectDistinct) { - // Select-distinct is accomplished as a side effect of aggregating on the - // group-by columns. + // Select-distinct is accomplished as a side effect of aggregating on the group-by columns. } else { final MatchPair[] comboMatchPairs = ((ComboByImpl) comboBy).matchPairs; if (isSortedFirstOrLastBy) { final SortedFirstOrLastByFactoryImpl sortedFirstOrLastByFactory = - (SortedFirstOrLastByFactoryImpl) inputAggregationStateFactory; - final boolean isSortedFirstBy = - sortedFirstOrLastByFactory.isSortedFirst(); + (SortedFirstOrLastByFactoryImpl) inputAggregationStateFactory; + final boolean isSortedFirstBy = sortedFirstOrLastByFactory.isSortedFirst(); final MatchPair[] updatedMatchPairs; if (sortedFirstOrLastByFactory.secondRollup - && sortedFirstOrLastByFactory.getSortColumnNames().length == 1 - && sortedFirstOrLastByFactory.getSortColumnNames()[0] - .endsWith(ROLLUP_COLUMN_SUFFIX)) { - updatedMatchPairs = - Arrays.copyOf(comboMatchPairs, comboMatchPairs.length + 1); - final String redirectionName = - sortedFirstOrLastByFactory.getSortColumnNames()[0]; + && sortedFirstOrLastByFactory.getSortColumnNames().length == 1 + && sortedFirstOrLastByFactory.getSortColumnNames()[0] + .endsWith(ROLLUP_COLUMN_SUFFIX)) { + updatedMatchPairs = Arrays.copyOf(comboMatchPairs, comboMatchPairs.length + 1); + final String redirectionName = sortedFirstOrLastByFactory.getSortColumnNames()[0]; updatedMatchPairs[updatedMatchPairs.length - 1] = - new MatchPair(redirectionName, redirectionName); + new MatchPair(redirectionName, redirectionName); } else { updatedMatchPairs = comboMatchPairs; } - final AggregationContext sflac = - SortedFirstOrLastByAggregationFactory.getAggregationContext(table, - sortedFirstOrLastByFactory.getSortColumnNames(), - isSortedFirstBy, true, updatedMatchPairs); + final AggregationContext sflac = SortedFirstOrLastByAggregationFactory + .getAggregationContext(table, sortedFirstOrLastByFactory.getSortColumnNames(), + isSortedFirstBy, true, updatedMatchPairs); Assert.eq(sflac.operators.length, "sflac.operators.length", 1); Assert.eq(sflac.inputColumns.length, "sflac.operators.length", 1); Assert.eq(sflac.inputNames.length, "sflac.operators.length", 1); operators.add(sflac.operators[0]); inputColumns.add(sflac.inputColumns[0]); inputNames.add(sflac.inputNames[0]); - } else if (isNumeric || isMinMax || isPercentile || isCountDistinct - || isDistinct || isAggUnique) { + } else if (isNumeric || isMinMax || isPercentile || isCountDistinct || isDistinct + || isAggUnique) { // add the stuff Arrays.stream(comboMatchPairs).forEach(mp -> { if (isRollup && secondLevel) { - final boolean isAverage = inputAggregationStateFactory - .getClass() == AvgStateFactory.class; - final boolean isStd = inputAggregationStateFactory - .getClass() == StdStateFactory.class; - final boolean isVar = inputAggregationStateFactory - .getClass() == VarStateFactory.class; + final boolean isAverage = + inputAggregationStateFactory.getClass() == AvgStateFactory.class; + final boolean isStd = + inputAggregationStateFactory.getClass() == StdStateFactory.class; + final boolean isVar = + inputAggregationStateFactory.getClass() == VarStateFactory.class; final boolean isStdVar = isStd || isVar; if (isAverage || isStdVar) { - final String runningSumName = mp.left() - + ROLLUP_RUNNING_SUM_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; - final String runningSum2Name = mp.left() - + ROLLUP_RUNNING_SUM2_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; - final String nonNullName = mp.left() - + ROLLUP_NONNULL_COUNT_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; - final String nanName = - mp.left() + ROLLUP_NAN_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; - final String picName = - mp.left() + ROLLUP_PIC_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; - final String nicName = - mp.left() + ROLLUP_NIC_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; + final String runningSumName = + mp.left() + ROLLUP_RUNNING_SUM_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; + final String runningSum2Name = + mp.left() + ROLLUP_RUNNING_SUM2_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; + final String nonNullName = + mp.left() + ROLLUP_NONNULL_COUNT_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; + final String nanName = mp.left() + ROLLUP_NAN_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; + final String picName = mp.left() + ROLLUP_PIC_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; + final String nicName = mp.left() + ROLLUP_NIC_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; final boolean isFloatingPoint = table.hasColumns(nanName); @@ -1158,8 +1105,7 @@ public AggregationContextFactory makeAggregationContextFactory() { if (isStdVar) { // noinspection unchecked - inputColumns - .add(table.getColumnSource(runningSum2Name)); + inputColumns.add(table.getColumnSource(runningSum2Name)); inputNames.add(new String[] {runningSum2Name}); } @@ -1176,70 +1122,63 @@ public AggregationContextFactory makeAggregationContextFactory() { inputNames.add(new String[] {nicName}); } - // then the input column for the updater (reavg/revar) - // operator + // then the input column for the updater (reavg/revar) operator inputColumns.add(null); - // now add add the operators, and the final inputNames that - // matches the updating operator + // now add add the operators, and the final inputNames that matches the updating + // operator final LongChunkedSumOperator nonNull = - new LongChunkedSumOperator(false, nonNullName); + new LongChunkedSumOperator(false, nonNullName); operators.add(nonNull); if (isFloatingPoint) { final DoubleChunkedSumOperator runningSum = - new DoubleChunkedSumOperator(false, runningSumName); + new DoubleChunkedSumOperator(false, runningSumName); operators.add(runningSum); final DoubleChunkedSumOperator runningSum2; if (isStdVar) { - runningSum2 = new DoubleChunkedSumOperator(false, - runningSum2Name); + runningSum2 = new DoubleChunkedSumOperator(false, runningSum2Name); operators.add(runningSum2); } else { runningSum2 = null; } final LongChunkedSumOperator nanSum = - new LongChunkedSumOperator(false, nanName); + new LongChunkedSumOperator(false, nanName); operators.add(nanSum); final LongChunkedSumOperator picSum = - new LongChunkedSumOperator(false, picName); + new LongChunkedSumOperator(false, picName); operators.add(picSum); final LongChunkedSumOperator nicSum = - new LongChunkedSumOperator(false, nicName); + new LongChunkedSumOperator(false, nicName); operators.add(nicSum); if (isAverage) { if (table.getColumnSource(mp.left()) - .getChunkType() == ChunkType.Float) { - operators.add(new FloatChunkedReAvgOperator( - mp.left(), runningSum, nonNull, nanSum, - picSum, nicSum)); + .getChunkType() == ChunkType.Float) { + operators.add(new FloatChunkedReAvgOperator(mp.left(), runningSum, + nonNull, nanSum, picSum, nicSum)); } else if (table.getColumnSource(mp.left()) - .getChunkType() == ChunkType.Double) { - operators.add(new DoubleChunkedReAvgOperator( - mp.left(), runningSum, nonNull, nanSum, - picSum, nicSum)); + .getChunkType() == ChunkType.Double) { + operators.add(new DoubleChunkedReAvgOperator(mp.left(), runningSum, + nonNull, nanSum, picSum, nicSum)); } else { throw new UnsupportedOperationException(); } } else { - if (table.getColumnSource(mp.left()) - .getChunkType() == ChunkType.Float - || table.getColumnSource(mp.left()) - .getChunkType() == ChunkType.Double) { - operators.add(new FloatChunkedReVarOperator( - mp.left(), isStd, runningSum, runningSum2, - nonNull, nanSum, picSum, nicSum)); + if (table.getColumnSource(mp.left()).getChunkType() == ChunkType.Float + || table.getColumnSource(mp.left()) + .getChunkType() == ChunkType.Double) { + operators.add(new FloatChunkedReVarOperator(mp.left(), isStd, + runningSum, runningSum2, nonNull, nanSum, picSum, nicSum)); } else { throw new UnsupportedOperationException(); } } // our final operator is updated if any input changes - final String[] inputNamesForColumn = - new String[isStdVar ? 6 : 5]; + final String[] inputNamesForColumn = new String[isStdVar ? 6 : 5]; inputNamesForColumn[0] = nonNullName; inputNamesForColumn[1] = runningSumName; inputNamesForColumn[2] = nanName; @@ -1250,126 +1189,100 @@ public AggregationContextFactory makeAggregationContextFactory() { } inputNames.add(inputNamesForColumn); } else if (isStdVar) { - final boolean isBigInteger = - BigInteger.class.isAssignableFrom(table - .getColumnSource(runningSumName).getType()); - final boolean isBigDecimal = - BigDecimal.class.isAssignableFrom(table - .getColumnSource(runningSumName).getType()); + final boolean isBigInteger = BigInteger.class + .isAssignableFrom(table.getColumnSource(runningSumName).getType()); + final boolean isBigDecimal = BigDecimal.class + .isAssignableFrom(table.getColumnSource(runningSumName).getType()); if (isBigInteger) { final BigIntegerChunkedSumOperator runningSum = - new BigIntegerChunkedSumOperator(false, - runningSumName); + new BigIntegerChunkedSumOperator(false, runningSumName); operators.add(runningSum); final BigIntegerChunkedSumOperator runningSum2 = - new BigIntegerChunkedSumOperator(false, - runningSum2Name); + new BigIntegerChunkedSumOperator(false, runningSum2Name); operators.add(runningSum2); - operators.add( - new BigIntegerChunkedReVarOperator(mp.left(), - isStd, runningSum, runningSum2, nonNull)); + operators.add(new BigIntegerChunkedReVarOperator(mp.left(), isStd, + runningSum, runningSum2, nonNull)); } else if (isBigDecimal) { final BigDecimalChunkedSumOperator runningSum = - new BigDecimalChunkedSumOperator(false, - runningSumName); + new BigDecimalChunkedSumOperator(false, runningSumName); operators.add(runningSum); final BigDecimalChunkedSumOperator runningSum2 = - new BigDecimalChunkedSumOperator(false, - runningSum2Name); + new BigDecimalChunkedSumOperator(false, runningSum2Name); operators.add(runningSum2); - operators.add( - new BigDecimalChunkedReVarOperator(mp.left(), - isStd, runningSum, runningSum2, nonNull)); + operators.add(new BigDecimalChunkedReVarOperator(mp.left(), isStd, + runningSum, runningSum2, nonNull)); } else { final DoubleChunkedSumOperator runningSum = - new DoubleChunkedSumOperator(false, - runningSumName); + new DoubleChunkedSumOperator(false, runningSumName); operators.add(runningSum); final DoubleChunkedSumOperator runningSum2 = - new DoubleChunkedSumOperator(false, - runningSum2Name); + new DoubleChunkedSumOperator(false, runningSum2Name); operators.add(runningSum2); - operators - .add(new IntegralChunkedReVarOperator(mp.left(), - isStd, runningSum, runningSum2, nonNull)); + operators.add(new IntegralChunkedReVarOperator(mp.left(), isStd, + runningSum, runningSum2, nonNull)); } // our final operator is updated if any input changes - inputNames.add(new String[] {nonNullName, - runningSumName, runningSum2Name}); + inputNames.add(new String[] {nonNullName, runningSumName, runningSum2Name}); } else { // is an average and not floating point - final boolean isBigDecimal = - BigDecimal.class.isAssignableFrom(table - .getColumnSource(runningSumName).getType()); - final boolean isBigInteger = - BigInteger.class.isAssignableFrom(table - .getColumnSource(runningSumName).getType()); + final boolean isBigDecimal = BigDecimal.class + .isAssignableFrom(table.getColumnSource(runningSumName).getType()); + final boolean isBigInteger = BigInteger.class + .isAssignableFrom(table.getColumnSource(runningSumName).getType()); if (isBigInteger) { final BigIntegerChunkedSumOperator runningSum = - new BigIntegerChunkedSumOperator(false, - runningSumName); + new BigIntegerChunkedSumOperator(false, runningSumName); operators.add(runningSum); - operators.add(new BigIntegerChunkedReAvgOperator( - mp.left(), runningSum, nonNull)); + operators.add(new BigIntegerChunkedReAvgOperator(mp.left(), runningSum, + nonNull)); } else if (isBigDecimal) { final BigDecimalChunkedSumOperator runningSum = - new BigDecimalChunkedSumOperator(false, - runningSumName); + new BigDecimalChunkedSumOperator(false, runningSumName); operators.add(runningSum); - operators.add(new BigDecimalChunkedReAvgOperator( - mp.left(), runningSum, nonNull)); + operators.add(new BigDecimalChunkedReAvgOperator(mp.left(), runningSum, + nonNull)); } else { final LongChunkedSumOperator runningSum = - new LongChunkedSumOperator(false, - runningSumName); + new LongChunkedSumOperator(false, runningSumName); operators.add(runningSum); - operators.add(new IntegralChunkedReAvgOperator( - mp.left(), runningSum, nonNull)); + operators.add(new IntegralChunkedReAvgOperator(mp.left(), runningSum, + nonNull)); } // our final operator is updated if any input changes - inputNames - .add(new String[] {nonNullName, runningSumName}); + inputNames.add(new String[] {nonNullName, runningSumName}); } return; } else if (isCountDistinct || isDistinct || isAggUnique) { - final String ssmColName = mp.left() - + ROLLUP_DISTINCT_SSM_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; + final String ssmColName = + mp.left() + ROLLUP_DISTINCT_SSM_COLUMN_ID + ROLLUP_COLUMN_SUFFIX; final ObjectArraySource ssmSource = - (ObjectArraySource) table - .getColumnSource(ssmColName); - final ColumnSource lastLevelResult = - table.getColumnSource(mp.left()); + (ObjectArraySource) table + .getColumnSource(ssmColName); + final ColumnSource lastLevelResult = table.getColumnSource(mp.left()); final boolean countNulls; final IterativeChunkedAggregationOperator op; if (isDistinct) { countNulls = - ((DistinctStateFactory) inputAggregationStateFactory) - .countNulls(); + ((DistinctStateFactory) inputAggregationStateFactory).countNulls(); op = IterativeOperatorStateFactory.getDistinctChunked( - lastLevelResult.getComponentType(), mp.left(), - countNulls, true, true); + lastLevelResult.getComponentType(), mp.left(), countNulls, true, + true); } else if (isCountDistinct) { - countNulls = - ((CountDistinctStateFactory) inputAggregationStateFactory) + countNulls = ((CountDistinctStateFactory) inputAggregationStateFactory) .countNulls(); - op = IterativeOperatorStateFactory - .getCountDistinctChunked( - ssmSource.getComponentType(), mp.left(), - countNulls, true, true); + op = IterativeOperatorStateFactory.getCountDistinctChunked( + ssmSource.getComponentType(), mp.left(), countNulls, true, true); } else { countNulls = - ((UniqueStateFactory) inputAggregationStateFactory) - .countNulls(); + ((UniqueStateFactory) inputAggregationStateFactory).countNulls(); op = IterativeOperatorStateFactory.getUniqueChunked( - lastLevelResult.getType(), mp.left(), countNulls, - true, - ((UniqueStateFactory) inputAggregationStateFactory) - .getNoKeyValue(), - ((UniqueStateFactory) inputAggregationStateFactory) - .getNonUniqueValue(), - true); + lastLevelResult.getType(), mp.left(), countNulls, true, + ((UniqueStateFactory) inputAggregationStateFactory).getNoKeyValue(), + ((UniqueStateFactory) inputAggregationStateFactory) + .getNonUniqueValue(), + true); } inputColumns.add(ssmSource); @@ -1382,8 +1295,7 @@ public AggregationContextFactory makeAggregationContextFactory() { final ColumnSource columnSource = table.getColumnSource(mp.right()); final Class type = columnSource.getType(); - final ColumnSource inputSource = - columnSource.getType() == DBDateTime.class + final ColumnSource inputSource = columnSource.getType() == DBDateTime.class ? ReinterpretUtilities.dateTimeToLongSource(columnSource) : columnSource; @@ -1391,43 +1303,36 @@ public AggregationContextFactory makeAggregationContextFactory() { final boolean hasSource; if (isMinMax) { final boolean isMinimum = - ((MinMaxByStateFactoryImpl) inputAggregationStateFactory) - .isMinimum(); - final OptionalInt priorMinMax = - IntStream.range(0, inputColumns.size()) + ((MinMaxByStateFactoryImpl) inputAggregationStateFactory).isMinimum(); + final OptionalInt priorMinMax = IntStream.range(0, inputColumns.size()) .filter(idx -> (inputColumns.get(idx) == inputSource) - && (operators - .get(idx) instanceof SsmChunkedMinMaxOperator)) + && (operators.get(idx) instanceof SsmChunkedMinMaxOperator)) .findFirst(); if (priorMinMax.isPresent()) { final SsmChunkedMinMaxOperator ssmChunkedMinMaxOperator = - (SsmChunkedMinMaxOperator) operators - .get(priorMinMax.getAsInt()); - operators.add(ssmChunkedMinMaxOperator - .makeSecondaryOperator(isMinimum, resultName)); + (SsmChunkedMinMaxOperator) operators.get(priorMinMax.getAsInt()); + operators.add( + ssmChunkedMinMaxOperator.makeSecondaryOperator(isMinimum, resultName)); hasSource = false; } else { - operators.add( - IterativeOperatorStateFactory.getMinMaxChunked(type, - isMinimum, isStream || isAddOnly, resultName)); + operators.add(IterativeOperatorStateFactory.getMinMaxChunked(type, isMinimum, + isStream || isAddOnly, resultName)); hasSource = true; } } else if (isPercentile) { if (isRollup) { throw new UnsupportedOperationException( - "Percentile or Median can not be used in a rollup!"); + "Percentile or Median can not be used in a rollup!"); } - operators.add( - IterativeOperatorStateFactory.getPercentileChunked(type, + operators.add(IterativeOperatorStateFactory.getPercentileChunked(type, ((PercentileByStateFactoryImpl) inputAggregationStateFactory) - .getPercentile(), + .getPercentile(), ((PercentileByStateFactoryImpl) inputAggregationStateFactory) - .getAverageMedian(), + .getAverageMedian(), resultName)); hasSource = true; } else { - operators.add( - ((IterativeOperatorStateFactory) inputAggregationStateFactory) + operators.add(((IterativeOperatorStateFactory) inputAggregationStateFactory) .getChunkedOperator(type, resultName, isRollup)); hasSource = true; } @@ -1444,11 +1349,10 @@ public AggregationContextFactory makeAggregationContextFactory() { inputColumns.add(null); final String exposeRedirectionAs; if (isRollup) { - exposeRedirectionAs = makeRedirectionName( - (IterativeIndexStateFactory) inputAggregationStateFactory); - } else if (inputAggregationStateFactory instanceof KeyOnlyFirstOrLastByStateFactory) { exposeRedirectionAs = - ((KeyOnlyFirstOrLastByStateFactory) inputAggregationStateFactory) + makeRedirectionName((IterativeIndexStateFactory) inputAggregationStateFactory); + } else if (inputAggregationStateFactory instanceof KeyOnlyFirstOrLastByStateFactory) { + exposeRedirectionAs = ((KeyOnlyFirstOrLastByStateFactory) inputAggregationStateFactory) .getResultColumn(); } else { exposeRedirectionAs = null; @@ -1456,30 +1360,28 @@ public AggregationContextFactory makeAggregationContextFactory() { if (table.isLive()) { if (isStream) { - operators.add(isFirst - ? new StreamFirstChunkedOperator(comboMatchPairs, table) - : new StreamLastChunkedOperator(comboMatchPairs, table)); + operators.add(isFirst ? new StreamFirstChunkedOperator(comboMatchPairs, table) + : new StreamLastChunkedOperator(comboMatchPairs, table)); } else if (isAddOnly) { - operators.add(new AddOnlyFirstOrLastChunkedOperator(isFirst, - comboMatchPairs, table, exposeRedirectionAs)); + operators.add(new AddOnlyFirstOrLastChunkedOperator(isFirst, comboMatchPairs, table, + exposeRedirectionAs)); } else { if (trackedFirstOrLastIndex >= 0) { final IterativeChunkedAggregationOperator operator = - operators.get(trackedFirstOrLastIndex); + operators.get(trackedFirstOrLastIndex); final FirstOrLastChunkedOperator firstOrLastChunkedOperator = - (FirstOrLastChunkedOperator) operator; - operators.add(firstOrLastChunkedOperator - .makeSecondaryOperator(isFirst, comboMatchPairs, table, - exposeRedirectionAs)); + (FirstOrLastChunkedOperator) operator; + operators.add(firstOrLastChunkedOperator.makeSecondaryOperator(isFirst, + comboMatchPairs, table, exposeRedirectionAs)); } else { - operators.add(new FirstOrLastChunkedOperator(isFirst, - comboMatchPairs, table, exposeRedirectionAs)); + operators.add(new FirstOrLastChunkedOperator(isFirst, comboMatchPairs, table, + exposeRedirectionAs)); trackedFirstOrLastIndex = operators.size() - 1; } } } else { - operators.add(new StaticFirstOrLastChunkedOperator(isFirst, - comboMatchPairs, table, exposeRedirectionAs)); + operators.add(new StaticFirstOrLastChunkedOperator(isFirst, comboMatchPairs, table, + exposeRedirectionAs)); } inputNames.add(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } else if (isAggArray) { @@ -1487,23 +1389,19 @@ public AggregationContextFactory makeAggregationContextFactory() { throw streamUnsupported("AggArray"); } inputColumns.add(null); - operators.add( - new ByChunkedOperator((QueryTable) table, true, comboMatchPairs)); + operators.add(new ByChunkedOperator((QueryTable) table, true, comboMatchPairs)); inputNames.add(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } else if (isFormula) { if (isStream) { throw streamUnsupported("AggFormula"); } final AggregationFormulaStateFactory formulaStateFactory = - (AggregationFormulaStateFactory) inputAggregationStateFactory; - final ByChunkedOperator byChunkedOperator = - new ByChunkedOperator((QueryTable) table, false, + (AggregationFormulaStateFactory) inputAggregationStateFactory; + final ByChunkedOperator byChunkedOperator = new ByChunkedOperator((QueryTable) table, false, Arrays.stream(comboMatchPairs).map(MatchPair::right) - .map(MatchPairFactory::getExpression) - .toArray(MatchPair[]::new)); - final FormulaChunkedOperator formulaChunkedOperator = - new FormulaChunkedOperator(byChunkedOperator, true, - formulaStateFactory.getFormula(), + .map(MatchPairFactory::getExpression).toArray(MatchPair[]::new)); + final FormulaChunkedOperator formulaChunkedOperator = new FormulaChunkedOperator( + byChunkedOperator, true, formulaStateFactory.getFormula(), formulaStateFactory.getColumnParamName(), comboMatchPairs); inputColumns.add(null); operators.add(formulaChunkedOperator); @@ -1512,52 +1410,45 @@ public AggregationContextFactory makeAggregationContextFactory() { final String weightName; if (isWeightedAverage) { - weightName = - ((WeightedAverageStateFactoryImpl) inputAggregationStateFactory) + weightName = ((WeightedAverageStateFactoryImpl) inputAggregationStateFactory) .getWeightName(); } else { weightName = - ((WeightedSumStateFactoryImpl) inputAggregationStateFactory) - .getWeightName(); + ((WeightedSumStateFactoryImpl) inputAggregationStateFactory).getWeightName(); } final ColumnSource weightSource = table.getColumnSource(weightName); final DoubleWeightRecordingInternalOperator weightOperator = - new DoubleWeightRecordingInternalOperator( - weightSource.getChunkType()); + new DoubleWeightRecordingInternalOperator(weightSource.getChunkType()); inputColumns.add(weightSource); operators.add(weightOperator); inputNames.add(Stream - .concat(Stream.of(weightName), - Arrays.stream(comboMatchPairs).map(MatchPair::right)) - .toArray(String[]::new)); + .concat(Stream.of(weightName), Arrays.stream(comboMatchPairs).map(MatchPair::right)) + .toArray(String[]::new)); Arrays.stream(comboMatchPairs).forEach(mp -> { - final ColumnSource columnSource = - table.getColumnSource(mp.right()); + final ColumnSource columnSource = table.getColumnSource(mp.right()); inputColumns.add(columnSource); inputNames.add(new String[] {weightName, mp.right()}); if (isWeightedAverage) { - operators.add(new ChunkedWeightedAverageOperator( - columnSource.getChunkType(), weightOperator, mp.left())); + operators.add(new ChunkedWeightedAverageOperator(columnSource.getChunkType(), + weightOperator, mp.left())); } else { - operators.add(new DoubleChunkedWeightedSumOperator( - columnSource.getChunkType(), weightOperator, mp.left())); + operators.add(new DoubleChunkedWeightedSumOperator(columnSource.getChunkType(), + weightOperator, mp.left())); } }); } else { throw new UnsupportedOperationException( - "Unknown ComboByImpl: " + inputAggregationStateFactory.getClass()); + "Unknown ComboByImpl: " + inputAggregationStateFactory.getClass()); } } } else if (comboBy instanceof NullComboBy) { - transformers.add(new NullColumnAggregationTransformer( - ((NullComboBy) comboBy).resultColumns)); + transformers.add(new NullColumnAggregationTransformer(((NullComboBy) comboBy).resultColumns)); } else if (comboBy instanceof ExternalComboBy) { if (!isRollup) { - throw new IllegalStateException( - "ExternalComboBy must be used only with rollups."); + throw new IllegalStateException("ExternalComboBy must be used only with rollups."); } inputColumns.add(null); inputNames.add(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); @@ -1572,23 +1463,20 @@ public AggregationContextFactory makeAggregationContextFactory() { final QueryTable parentTable = (QueryTable) table; final QueryTable adjustedTable; final List columnsToDrop = - parentTable.getDefinition().getColumnStream().map(ColumnDefinition::getName) - .filter(cn -> cn.endsWith(ROLLUP_COLUMN_SUFFIX)) - .collect(Collectors.toList()); + parentTable.getDefinition().getColumnStream().map(ColumnDefinition::getName) + .filter(cn -> cn.endsWith(ROLLUP_COLUMN_SUFFIX)).collect(Collectors.toList()); if (!columnsToDrop.isEmpty()) { adjustedTable = (QueryTable) parentTable.dropColumns(columnsToDrop); } else { if (includeConstituents) { - adjustedTable = (QueryTable) parentTable - .updateView(RollupInfo.ROLLUP_COLUMN + "=" + null); + adjustedTable = (QueryTable) parentTable.updateView(RollupInfo.ROLLUP_COLUMN + "=" + null); } else { adjustedTable = parentTable; } } - if (adjustedTable != parentTable - && parentTable.hasAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE)) { + if (adjustedTable != parentTable && parentTable.hasAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE)) { adjustedTable.setAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE, - parentTable.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE)); + parentTable.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE)); } final ByExternalChunkedOperator.AttributeCopier copier; if (includeConstituents) { @@ -1596,29 +1484,25 @@ public AggregationContextFactory makeAggregationContextFactory() { } else { copier = RollupAttributeCopier.DEFAULT_INSTANCE; } - final ByExternalChunkedOperator tableMapOperator = - new ByExternalChunkedOperator(parentTable, adjustedTable, copier, - Collections.emptyList(), groupByColumns); + final ByExternalChunkedOperator tableMapOperator = new ByExternalChunkedOperator(parentTable, + adjustedTable, copier, Collections.emptyList(), groupByColumns); operators.add(tableMapOperator); if (groupByColumns.length == 0) { transformers.add(new StaticColumnSourceTransformer(RollupInfo.ROLLUP_COLUMN, - new SingleValueObjectColumnSource<>(SmartKey.EMPTY))); + new SingleValueObjectColumnSource<>(SmartKey.EMPTY))); } else if (groupByColumns.length == 1) { - transformers - .add(new RollupKeyColumnDuplicationTransformer(groupByColumns[0])); + transformers.add(new RollupKeyColumnDuplicationTransformer(groupByColumns[0])); } else { - transformers - .add(new RollupSmartKeyColumnDuplicationTransformer(groupByColumns)); + transformers.add(new RollupSmartKeyColumnDuplicationTransformer(groupByColumns)); } - transformers.add(new RollupTableMapAndReverseLookupAttributeSetter( - tableMapOperator, this, secondLevel, includeConstituents)); + transformers.add(new RollupTableMapAndReverseLookupAttributeSetter(tableMapOperator, this, + secondLevel, includeConstituents)); externalFound = true; } else { - throw new UnsupportedOperationException( - "Unknown ComboBy: " + comboBy.getClass()); + throw new UnsupportedOperationException("Unknown ComboBy: " + comboBy.getClass()); } } @@ -1627,35 +1511,30 @@ public AggregationContextFactory makeAggregationContextFactory() { } final IterativeChunkedAggregationOperator[] operatorsArray = operators.toArray( - IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY); - final AggregationContextTransformer[] transformersArray = transformers.toArray( - AggregationContextTransformer.ZERO_LENGTH_AGGREGATION_CONTEXT_TRANSFORMER_ARRAY); - final String[][] inputNamesArray = - inputNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY_ARRAY); + IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY); + final AggregationContextTransformer[] transformersArray = transformers + .toArray(AggregationContextTransformer.ZERO_LENGTH_AGGREGATION_CONTEXT_TRANSFORMER_ARRAY); + final String[][] inputNamesArray = inputNames.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY_ARRAY); // noinspection unchecked final ChunkSource.WithPrev[] inputColumnsArray = - inputColumns.toArray(ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY); + inputColumns.toArray(ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY); - return new AggregationContext(operatorsArray, inputNamesArray, inputColumnsArray, - transformersArray, true); + return new AggregationContext(operatorsArray, inputNamesArray, inputColumnsArray, transformersArray, true); }; } - private static UnsupportedOperationException streamUnsupported( - @NotNull final String operatorTypeName) { + private static UnsupportedOperationException streamUnsupported(@NotNull final String operatorTypeName) { return new UnsupportedOperationException("Stream tables do not support " + operatorTypeName - + "; use StreamTableTools.streamToAppendOnlyTable to accumulate full history"); + + "; use StreamTableTools.streamToAppendOnlyTable to accumulate full history"); } @NotNull - private static String makeRedirectionName( - IterativeIndexStateFactory inputAggregationStateFactory) { - return IterativeIndexStateFactory.REDIRECTION_INDEX_PREFIX - + inputAggregationStateFactory.rollupColumnIdentifier + ROLLUP_COLUMN_SUFFIX; + private static String makeRedirectionName(IterativeIndexStateFactory inputAggregationStateFactory) { + return IterativeIndexStateFactory.REDIRECTION_INDEX_PREFIX + inputAggregationStateFactory.rollupColumnIdentifier + + ROLLUP_COLUMN_SUFFIX; } - private static class RollupTableMapAndReverseLookupAttributeSetter - implements AggregationContextTransformer { + private static class RollupTableMapAndReverseLookupAttributeSetter implements AggregationContextTransformer { private final ByExternalChunkedOperator tableMapOperator; private final ComboAggregateFactory factory; private final boolean secondLevel; @@ -1663,7 +1542,7 @@ private static class RollupTableMapAndReverseLookupAttributeSetter private ReverseLookup reverseLookup; RollupTableMapAndReverseLookupAttributeSetter(ByExternalChunkedOperator tableMapOperator, - ComboAggregateFactory factory, boolean secondLevel, boolean includeConstituents) { + ComboAggregateFactory factory, boolean secondLevel, boolean includeConstituents) { this.tableMapOperator = tableMapOperator; this.factory = factory; this.secondLevel = secondLevel; @@ -1672,8 +1551,7 @@ private static class RollupTableMapAndReverseLookupAttributeSetter @Override public QueryTable transformResult(QueryTable table) { - table.setAttribute(QueryTable.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE, - tableMapOperator.getTableMap()); + table.setAttribute(QueryTable.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE, tableMapOperator.getTableMap()); if (secondLevel || includeConstituents) { table.setAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE, reverseLookup); } else { @@ -1754,16 +1632,14 @@ private static class ComboByAggregationAdapterOptimizer implements Aggregation.V private final Map> wSums = new HashMap<>(); /** - * We'll do our best to maintain the original combo ordering. This will maintain the - * user-specified order as long as the user aggregation types were all next to each other. + * We'll do our best to maintain the original combo ordering. This will maintain the user-specified order as + * long as the user aggregation types were all next to each other. * * ie: * - * {@code by(..., [ Sum.of(A), Sum.of(B), Avg.of(C), Avg.of(D) ] )} will not need to be - * re-ordered + * {@code by(..., [ Sum.of(A), Sum.of(B), Avg.of(C), Avg.of(D) ] )} will not need to be re-ordered * - * {@code by(..., [ Sum.of(A), Avg.of(C), Avg.of(D), Sum.of(B) ] )} will need to be - * re-ordered + * {@code by(..., [ Sum.of(A), Avg.of(C), Avg.of(D), Sum.of(B) ] )} will need to be re-ordered */ private final LinkedHashSet buildOrder = new LinkedHashSet<>(); @@ -1772,8 +1648,7 @@ private interface BuildLogic { void appendTo(List outs); } - // Unfortunately, it doesn't look like we can add ad-hoc lambdas to buildOrder, they don't - // appear to be equal + // Unfortunately, it doesn't look like we can add ad-hoc lambdas to buildOrder, they don't appear to be equal // across multiple constructions. private final BuildLogic buildAbsSums = this::buildAbsSums; private final BuildLogic buildArrays = this::buildArrays; @@ -1807,15 +1682,14 @@ List build() { private void buildWSums(List combos) { for (Map.Entry> e : wSums.entrySet()) { - combos.add(Agg(new WeightedSumStateFactoryImpl(e.getKey().name()), - MatchPair.fromPairs(e.getValue()))); + combos.add(Agg(new WeightedSumStateFactoryImpl(e.getKey().name()), MatchPair.fromPairs(e.getValue()))); } } private void buildWAvgs(List combos) { for (Map.Entry> e : wAvgs.entrySet()) { - combos.add(Agg(new WeightedAverageStateFactoryImpl(e.getKey().name()), - MatchPair.fromPairs(e.getValue()))); + combos.add( + Agg(new WeightedAverageStateFactoryImpl(e.getKey().name()), MatchPair.fromPairs(e.getValue()))); } } @@ -1827,8 +1701,7 @@ private void buildVars(List combos) { private void buildUniques(List combos) { for (Map.Entry> e : uniques.entrySet()) { - combos.add( - Agg(new UniqueStateFactory(e.getKey()), MatchPair.fromPairs(e.getValue()))); + combos.add(Agg(new UniqueStateFactory(e.getKey()), MatchPair.fromPairs(e.getValue()))); } } @@ -1846,20 +1719,18 @@ private void buildStds(List combos) { private void buildSortedLasts(List combos) { for (Map.Entry, List> e : sortedLasts.entrySet()) { - // TODO(deephaven-core#821): SortedFirst / SortedLast aggregations with sort - // direction - String[] columns = e.getKey().stream().map(SortColumn::column).map(ColumnName::name) - .toArray(String[]::new); + // TODO(deephaven-core#821): SortedFirst / SortedLast aggregations with sort direction + String[] columns = + e.getKey().stream().map(SortColumn::column).map(ColumnName::name).toArray(String[]::new); combos.add(Agg(new SortedLastBy(columns), MatchPair.fromPairs(e.getValue()))); } } private void buildSortedFirsts(List combos) { for (Map.Entry, List> e : sortedFirsts.entrySet()) { - // TODO(deephaven-core#821): SortedFirst / SortedLast aggregations with sort - // direction - String[] columns = e.getKey().stream().map(SortColumn::column).map(ColumnName::name) - .toArray(String[]::new); + // TODO(deephaven-core#821): SortedFirst / SortedLast aggregations with sort direction + String[] columns = + e.getKey().stream().map(SortColumn::column).map(ColumnName::name).toArray(String[]::new); combos.add(Agg(new SortedFirstBy(columns), MatchPair.fromPairs(e.getValue()))); } } @@ -1867,7 +1738,7 @@ private void buildSortedFirsts(List combos) { private void buildPcts(List combos) { for (Map.Entry> e : pcts.entrySet()) { combos.add(Agg(new PercentileByStateFactoryImpl(e.getKey().getSecondElement(), - e.getKey().getFirstElement() != 0), MatchPair.fromPairs(e.getValue()))); + e.getKey().getFirstElement() != 0), MatchPair.fromPairs(e.getValue()))); } } @@ -1879,8 +1750,7 @@ private void buildMins(List combos) { private void buildMedians(List combos) { for (Map.Entry> e : medians.entrySet()) { - combos.add(Agg(new PercentileByStateFactoryImpl(0.50d, e.getKey()), - MatchPair.fromPairs(e.getValue()))); + combos.add(Agg(new PercentileByStateFactoryImpl(0.50d, e.getKey()), MatchPair.fromPairs(e.getValue()))); } } @@ -1904,15 +1774,13 @@ private void buildFirsts(List combos) { private void buildDistincts(List combos) { for (Map.Entry> e : distincts.entrySet()) { - combos.add( - Agg(new DistinctStateFactory(e.getKey()), MatchPair.fromPairs(e.getValue()))); + combos.add(Agg(new DistinctStateFactory(e.getKey()), MatchPair.fromPairs(e.getValue()))); } } private void buildCountDistincts(List combos) { for (Map.Entry> e : countDistincts.entrySet()) { - combos.add(Agg(new CountDistinctStateFactory(e.getKey()), - MatchPair.fromPairs(e.getValue()))); + combos.add(Agg(new CountDistinctStateFactory(e.getKey()), MatchPair.fromPairs(e.getValue()))); } } @@ -1967,14 +1835,13 @@ public void visit(Count count) { @Override public void visit(CountDistinct countDistinct) { countDistincts.computeIfAbsent(countDistinct.countNulls(), b -> new ArrayList<>()) - .add(countDistinct.pair()); + .add(countDistinct.pair()); buildOrder.add(buildCountDistincts); } @Override public void visit(Distinct distinct) { - distincts.computeIfAbsent(distinct.includeNulls(), b -> new ArrayList<>()) - .add(distinct.pair()); + distincts.computeIfAbsent(distinct.includeNulls(), b -> new ArrayList<>()).add(distinct.pair()); buildOrder.add(buildDistincts); } @@ -2017,23 +1884,20 @@ public void visit(Multi multi) { @Override public void visit(Pct pct) { - pcts.computeIfAbsent( - new ByteDoubleTuple(pct.averageMedian() ? (byte) 1 : (byte) 0, pct.percentile()), - b -> new ArrayList<>()).add(pct.pair()); + pcts.computeIfAbsent(new ByteDoubleTuple(pct.averageMedian() ? (byte) 1 : (byte) 0, pct.percentile()), + b -> new ArrayList<>()).add(pct.pair()); buildOrder.add(buildPcts); } @Override public void visit(SortedFirst sortedFirst) { - sortedFirsts.computeIfAbsent(sortedFirst.columns(), b -> new ArrayList<>()) - .add(sortedFirst.pair()); + sortedFirsts.computeIfAbsent(sortedFirst.columns(), b -> new ArrayList<>()).add(sortedFirst.pair()); buildOrder.add(buildSortedFirsts); } @Override public void visit(SortedLast sortedLast) { - sortedLasts.computeIfAbsent(sortedLast.columns(), b -> new ArrayList<>()) - .add(sortedLast.pair()); + sortedLasts.computeIfAbsent(sortedLast.columns(), b -> new ArrayList<>()).add(sortedLast.pair()); buildOrder.add(buildSortedLasts); } @@ -2051,8 +1915,7 @@ public void visit(Sum sum) { @Override public void visit(Unique unique) { - uniques.computeIfAbsent(unique.includeNulls(), b -> new ArrayList<>()) - .add(unique.pair()); + uniques.computeIfAbsent(unique.includeNulls(), b -> new ArrayList<>()).add(unique.pair()); buildOrder.add(buildUniques); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/CopyingPermutedStreamFirstOrLastChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/CopyingPermutedStreamFirstOrLastChunkedOperator.java index d0e92c8b14f..27d2a30b2db 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/CopyingPermutedStreamFirstOrLastChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/CopyingPermutedStreamFirstOrLastChunkedOperator.java @@ -16,18 +16,17 @@ import java.util.Arrays; /** - * Base-class for stream first/last-by chunked operators that need to copy data from source columns - * to result columns with a permutation on the redirected indices. + * Base-class for stream first/last-by chunked operators that need to copy data from source columns to result columns + * with a permutation on the redirected indices. */ -public abstract class CopyingPermutedStreamFirstOrLastChunkedOperator - extends BaseStreamFirstOrLastChunkedOperator { +public abstract class CopyingPermutedStreamFirstOrLastChunkedOperator extends BaseStreamFirstOrLastChunkedOperator { /** * Permute kernels, parallel to {@link #outputColumns}. */ protected final PermuteKernel[] permuteKernels; public CopyingPermutedStreamFirstOrLastChunkedOperator(@NotNull final MatchPair[] resultPairs, - @NotNull final Table streamTable) { + @NotNull final Table streamTable) { super(resultPairs, streamTable); permuteKernels = new PermuteKernel[numResultColumns]; for (int ci = 0; ci < numResultColumns; ++ci) { @@ -47,8 +46,8 @@ public final void startTrackingPrevValues() { /** *

    - * For each destination slot, map to the latest source index key and copy source values to - * destination slots for all result columns. + * For each destination slot, map to the latest source index key and copy source values to destination slots for all + * result columns. * *

    * This implementation proceeds chunk-wise in the following manner: @@ -56,37 +55,34 @@ public final void startTrackingPrevValues() { *

  • Get a chunk of destination slots *
  • Fill a chunk of source indices
  • *
  • Sort the chunk of source indices
  • - *
  • For each input column: get a chunk of input values, permute it into a chunk of - * destination values, and then fill the output column
  • + *
  • For each input column: get a chunk of input values, permute it into a chunk of destination values, and then + * fill the output column
  • * * - * @param destinations The changed (added or modified) destination slots as an - * {@link OrderedKeys} + * @param destinations The changed (added or modified) destination slots as an {@link OrderedKeys} */ protected void copyStreamToResult(@NotNull final OrderedKeys destinations) { try (final SafeCloseableList toClose = new SafeCloseableList()) { - final OrderedKeys.Iterator destinationsIterator = - toClose.add(destinations.getOrderedKeysIterator()); + final OrderedKeys.Iterator destinationsIterator = toClose.add(destinations.getOrderedKeysIterator()); final ChunkSource.FillContext redirectionsContext = - toClose.add(redirections.makeFillContext(COPY_CHUNK_SIZE)); + toClose.add(redirections.makeFillContext(COPY_CHUNK_SIZE)); final WritableLongChunk sourceIndices = - toClose.add(WritableLongChunk.makeWritableChunk(COPY_CHUNK_SIZE)); + toClose.add(WritableLongChunk.makeWritableChunk(COPY_CHUNK_SIZE)); final WritableIntChunk sourceIndicesOrder = - toClose.add(WritableIntChunk.makeWritableChunk(COPY_CHUNK_SIZE)); + toClose.add(WritableIntChunk.makeWritableChunk(COPY_CHUNK_SIZE)); final LongIntTimsortKernel.LongIntSortKernelContext sortKernelContext = - toClose.add(LongIntTimsortKernel.createContext(COPY_CHUNK_SIZE)); + toClose.add(LongIntTimsortKernel.createContext(COPY_CHUNK_SIZE)); final SharedContext inputSharedContext = toClose.add(SharedContext.makeSharedContext()); final ChunkSource.GetContext[] inputContexts = - toClose.addArray(new ChunkSource.GetContext[numResultColumns]); + toClose.addArray(new ChunkSource.GetContext[numResultColumns]); final WritableChunkSink.FillFromContext[] outputContexts = - toClose.addArray(new WritableChunkSink.FillFromContext[numResultColumns]); + toClose.addArray(new WritableChunkSink.FillFromContext[numResultColumns]); // noinspection unchecked final WritableChunk[] outputChunks = - toClose.addArray(new WritableChunk[numResultColumns]); + toClose.addArray(new WritableChunk[numResultColumns]); for (int ci = 0; ci < numResultColumns; ++ci) { - inputContexts[ci] = - inputColumns[ci].makeGetContext(COPY_CHUNK_SIZE, inputSharedContext); + inputContexts[ci] = inputColumns[ci].makeGetContext(COPY_CHUNK_SIZE, inputSharedContext); final WritableSource outputColumn = outputColumns[ci]; outputContexts[ci] = outputColumn.makeFillFromContext(COPY_CHUNK_SIZE); outputChunks[ci] = outputColumn.getChunkType().makeWritableChunk(COPY_CHUNK_SIZE); @@ -95,22 +91,19 @@ protected void copyStreamToResult(@NotNull final OrderedKeys destinations) { while (destinationsIterator.hasMore()) { final OrderedKeys sliceDestinations = - destinationsIterator.getNextOrderedKeysWithLength(COPY_CHUNK_SIZE); - redirections.fillChunk(redirectionsContext, WritableLongChunk.upcast(sourceIndices), - sliceDestinations); + destinationsIterator.getNextOrderedKeysWithLength(COPY_CHUNK_SIZE); + redirections.fillChunk(redirectionsContext, WritableLongChunk.upcast(sourceIndices), sliceDestinations); sourceIndicesOrder.setSize(sourceIndices.size()); ChunkUtils.fillInOrder(sourceIndicesOrder); LongIntTimsortKernel.sort(sortKernelContext, sourceIndicesOrder, sourceIndices); - try (final OrderedKeys sliceSources = OrderedKeys - .wrapKeyIndicesChunkAsOrderedKeys(WritableLongChunk.downcast(sourceIndices))) { + try (final OrderedKeys sliceSources = + OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(WritableLongChunk.downcast(sourceIndices))) { for (int ci = 0; ci < numResultColumns; ++ci) { final Chunk inputChunk = - inputColumns[ci].getChunk(inputContexts[ci], sliceSources); - permuteKernels[ci].permute(inputChunk, sourceIndicesOrder, - outputChunks[ci]); - outputColumns[ci].fillFromChunk(outputContexts[ci], outputChunks[ci], - sliceDestinations); + inputColumns[ci].getChunk(inputContexts[ci], sliceSources); + permuteKernels[ci].permute(inputChunk, sourceIndicesOrder, outputChunks[ci]); + outputColumns[ci].fillFromChunk(outputContexts[ci], outputChunks[ci], sliceDestinations); } inputSharedContext.reset(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/CountAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/CountAggregationFactory.java index 7eb2e0085be..66e4fc25036 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/CountAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/CountAggregationFactory.java @@ -16,9 +16,8 @@ public CountAggregationFactory(final String resultName) { @Override public AggregationContext makeAggregationContext(@NotNull final Table table, - @NotNull final String... groupByColumns) { - final IterativeChunkedAggregationOperator[] countOperator = - new IterativeChunkedAggregationOperator[1]; + @NotNull final String... groupByColumns) { + final IterativeChunkedAggregationOperator[] countOperator = new IterativeChunkedAggregationOperator[1]; countOperator[0] = new CountAggregationOperator(resultName); final String[][] inputNameArray = new String[1][0]; @@ -26,8 +25,8 @@ public AggregationContext makeAggregationContext(@NotNull final Table table, // noinspection unchecked return new AggregationContext(countOperator, - inputNameArray, - nullSourceArray); + inputNameArray, + nullSourceArray); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/by/CountAggregationOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/CountAggregationOperator.java index 19040b09c82..014714a8292 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/CountAggregationOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/CountAggregationOperator.java @@ -20,9 +20,9 @@ class CountAggregationOperator implements IterativeChunkedAggregationOperator { @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); @@ -35,9 +35,9 @@ public void addChunk(BucketedContext context, Chunk values, @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); @@ -50,16 +50,15 @@ public void removeChunk(BucketedContext context, Chunk values, @Override public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, - LongChunk inputIndices, long destination) { + LongChunk inputIndices, long destination) { final long oldCount = countColumnSource.getUnsafe(destination); countColumnSource.set(destination, NullSafeAddition.plusLong(oldCount, chunkSize)); return true; } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { final long oldCount = countColumnSource.getUnsafe(destination); countColumnSource.set(destination, NullSafeAddition.minusLong(oldCount, chunkSize)); return true; @@ -67,16 +66,15 @@ public boolean removeChunk(SingletonContext context, int chunkSize, @Override public void modifyChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { stateModified.fillWithValue(0, startPositions.size(), false); } @Override - public boolean modifyChunk(SingletonContext context, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext context, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { return false; } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/CountDistinctStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/CountDistinctStateFactory.java index dd1068d1955..486fe0d3464 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/CountDistinctStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/CountDistinctStateFactory.java @@ -44,7 +44,7 @@ CountDistinctStateFactory rollupFactory() { @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { return getCountDistinctChunked(type, name, countNulls, exposeInternalColumns, secondRollup); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/DistinctStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/DistinctStateFactory.java index ca33a1d9c84..a908d3eccf6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/DistinctStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/DistinctStateFactory.java @@ -1,8 +1,8 @@ package io.deephaven.db.v2.by; /** - * An Iterative state factory that computes the count of distinct items within a particular state. - * It supports rollups, and converts itself into a Sum at the second level of the rollup. + * An Iterative state factory that computes the count of distinct items within a particular state. It supports rollups, + * and converts itself into a Sum at the second level of the rollup. */ public class DistinctStateFactory extends IterativeOperatorStateFactory { private static final AggregationMemoKey NO_NULLS_INSTANCE = new AggregationMemoKey() {}; @@ -45,7 +45,7 @@ DistinctStateFactory rollupFactory() { @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { return getDistinctChunked(type, name, countNulls, exposeInternalColumns, secondRollup); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/FirstByStateFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/by/FirstByStateFactoryImpl.java index 2f46d52e3ff..78632e30634 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/FirstByStateFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/FirstByStateFactoryImpl.java @@ -12,8 +12,7 @@ public FirstByStateFactoryImpl() { this(false, false, 0); } - private FirstByStateFactoryImpl(boolean lowestRollup, boolean secondRollup, - int rollupColumnIdentifier) { + private FirstByStateFactoryImpl(boolean lowestRollup, boolean secondRollup, int rollupColumnIdentifier) { super(lowestRollup, secondRollup, rollupColumnIdentifier); } @@ -35,8 +34,7 @@ ReaggregatableStatefactory forRollup() { @Override ReaggregatableStatefactory rollupFactory() { return new SortedFirstOrLastByFactoryImpl(true, false, true, rollupColumnIdentifier, - REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier - + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX); + REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX); } @Override @@ -45,10 +43,10 @@ public String toString() { return "FirstByStateFactory"; } else { return "FirstByStateFactory{" + - "lowestRollup=" + lowestRollup + - ", secondRollup=" + secondRollup + - ", rollupColumnIdentifier=" + rollupColumnIdentifier + - '}'; + "lowestRollup=" + lowestRollup + + ", secondRollup=" + secondRollup + + ", rollupColumnIdentifier=" + rollupColumnIdentifier + + '}'; } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/FirstOrLastByAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/FirstOrLastByAggregationFactory.java index 9fa7e461719..19177bf88c8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/FirstOrLastByAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/FirstOrLastByAggregationFactory.java @@ -23,8 +23,7 @@ public FirstOrLastByAggregationFactory(boolean isFirst) { public FirstOrLastByAggregationFactory(boolean isFirst, String exposeRedirection) { this.isFirst = isFirst; - this.exposeRedirection = - exposeRedirection == null ? null : NameValidator.validateColumnName(exposeRedirection); + this.exposeRedirection = exposeRedirection == null ? null : NameValidator.validateColumnName(exposeRedirection); } @Override @@ -34,35 +33,30 @@ public boolean allowKeyOnlySubstitution() { @Override public AggregationContext makeAggregationContext(@NotNull final Table table, - @NotNull final String... groupByColumns) { + @NotNull final String... groupByColumns) { // noinspection unchecked final ChunkSource.WithPrev[] inputSource = new ChunkSource.WithPrev[1]; inputSource[0] = null; - final IterativeChunkedAggregationOperator[] operator = - new IterativeChunkedAggregationOperator[1]; + final IterativeChunkedAggregationOperator[] operator = new IterativeChunkedAggregationOperator[1]; final String[][] name = new String[1][0]; name[0] = CollectionUtil.ZERO_LENGTH_STRING_ARRAY; final Set groupBySet = new HashSet<>(Arrays.asList(groupByColumns)); - final MatchPair[] matchPairs = - table.getDefinition().getColumnNames().stream().filter(col -> !groupBySet.contains(col)) - .map(col -> new MatchPair(col, col)).toArray(MatchPair[]::new); + final MatchPair[] matchPairs = table.getDefinition().getColumnNames().stream() + .filter(col -> !groupBySet.contains(col)).map(col -> new MatchPair(col, col)).toArray(MatchPair[]::new); if (table.isLive()) { if (((BaseTable) table).isStream()) { operator[0] = isFirst ? new StreamFirstChunkedOperator(matchPairs, table) - : new StreamLastChunkedOperator(matchPairs, table); + : new StreamLastChunkedOperator(matchPairs, table); } else if (((BaseTable) table).isAddOnly()) { - operator[0] = new AddOnlyFirstOrLastChunkedOperator(isFirst, matchPairs, table, - exposeRedirection); + operator[0] = new AddOnlyFirstOrLastChunkedOperator(isFirst, matchPairs, table, exposeRedirection); } else { - operator[0] = - new FirstOrLastChunkedOperator(isFirst, matchPairs, table, exposeRedirection); + operator[0] = new FirstOrLastChunkedOperator(isFirst, matchPairs, table, exposeRedirection); } } else { - operator[0] = - new StaticFirstOrLastChunkedOperator(isFirst, matchPairs, table, exposeRedirection); + operator[0] = new StaticFirstOrLastChunkedOperator(isFirst, matchPairs, table, exposeRedirection); } return new AggregationContext(operator, name, inputSource); diff --git a/DB/src/main/java/io/deephaven/db/v2/by/FirstOrLastChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/FirstOrLastChunkedOperator.java index 141dbd322ef..0050f04692b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/FirstOrLastChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/FirstOrLastChunkedOperator.java @@ -24,7 +24,7 @@ public class FirstOrLastChunkedOperator implements IterativeChunkedAggregationOp private final boolean exposeRedirections; FirstOrLastChunkedOperator(boolean isFirst, MatchPair[] resultPairs, Table originalTable, - String exposeRedirectionAs) { + String exposeRedirectionAs) { this.isFirst = isFirst; this.redirections = new LongArraySource(); this.redirectionIndex = new LongColumnSourceRedirectionIndex(redirections); @@ -33,8 +33,8 @@ public class FirstOrLastChunkedOperator implements IterativeChunkedAggregationOp this.resultColumns = new LinkedHashMap<>(resultPairs.length); for (final MatchPair mp : resultPairs) { // noinspection unchecked - resultColumns.put(mp.left(), new ReadOnlyRedirectedColumnSource(redirectionIndex, - originalTable.getColumnSource(mp.right()))); + resultColumns.put(mp.left(), + new ReadOnlyRedirectedColumnSource(redirectionIndex, originalTable.getColumnSource(mp.right()))); } exposeRedirections = exposeRedirectionAs != null; if (exposeRedirectionAs != null) { @@ -44,69 +44,63 @@ public class FirstOrLastChunkedOperator implements IterativeChunkedAggregationOp @Override public void addChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { // noinspection unchecked - final LongChunk inputIndicesAsOrdered = - (LongChunk) inputIndices; + final LongChunk inputIndicesAsOrdered = (LongChunk) inputIndices; for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, - addChunk(inputIndicesAsOrdered, startPosition, runLength, destination)); + stateModified.set(ii, addChunk(inputIndicesAsOrdered, startPosition, runLength, destination)); } } @Override public void removeChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { // noinspection unchecked - final LongChunk inputIndicesAsOrdered = - (LongChunk) inputIndices; + final LongChunk inputIndicesAsOrdered = (LongChunk) inputIndices; for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); - stateModified.set(ii, - removeChunk(inputIndicesAsOrdered, startPosition, runLength, destination)); + stateModified.set(ii, removeChunk(inputIndicesAsOrdered, startPosition, runLength, destination)); } } @Override public void modifyChunk(BucketedContext bucketedContext, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { // we have no inputs, so should never get here throw new IllegalStateException(); } @Override public void shiftChunk(BucketedContext bucketedContext, Chunk previousValues, - Chunk newValues, LongChunk preShiftIndices, - LongChunk postShiftIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk preShiftIndices, + LongChunk postShiftIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { // noinspection unchecked - final LongChunk preShiftIndicesAsOrdered = - (LongChunk) preShiftIndices; + final LongChunk preShiftIndicesAsOrdered = (LongChunk) preShiftIndices; // noinspection unchecked - final LongChunk postShiftIndicesAsOrdered = - (LongChunk) postShiftIndices; + final LongChunk postShiftIndicesAsOrdered = (LongChunk) postShiftIndices; for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); - final long newValue = doShift(preShiftIndicesAsOrdered, postShiftIndicesAsOrdered, - startPosition, runLength, destination); + final long newValue = + doShift(preShiftIndicesAsOrdered, postShiftIndicesAsOrdered, startPosition, runLength, destination); if (exposeRedirections) { final long oldValue = redirections.getAndSetUnsafe(destination, newValue); if (oldValue != newValue) { @@ -118,9 +112,8 @@ public void shiftChunk(BucketedContext bucketedContext, Chunk } } - private long doShift(LongChunk preShiftIndices, - LongChunk postShiftIndices, int startPosition, int runLength, - long destination) { + private long doShift(LongChunk preShiftIndices, LongChunk postShiftIndices, + int startPosition, int runLength, long destination) { final Index index = indexForSlot(destination); index.remove(preShiftIndices, startPosition, runLength); index.insert(postShiftIndices, startPosition, runLength); @@ -129,54 +122,47 @@ private long doShift(LongChunk preShiftIndices, @Override public void modifyIndices(BucketedContext context, LongChunk inputIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); final long destination = destinations.get(startPosition); final long redirectedRow = redirections.getUnsafe(destination); - stateModified.set(ii, hasRedirection(inputIndices, redirectedRow, startPosition, - startPosition + runLength)); + stateModified.set(ii, + hasRedirection(inputIndices, redirectedRow, startPosition, startPosition + runLength)); } } @Override - public boolean addChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { // noinspection unchecked - return addChunk((LongChunk) inputIndices, 0, inputIndices.size(), - destination); + return addChunk((LongChunk) inputIndices, 0, inputIndices.size(), destination); } @Override - public boolean removeChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { // noinspection unchecked - return removeChunk((LongChunk) inputIndices, 0, inputIndices.size(), - destination); + return removeChunk((LongChunk) inputIndices, 0, inputIndices.size(), destination); } @Override - public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { // we have no inputs, so should never get here throw new IllegalStateException(); } @Override - public boolean shiftChunk(SingletonContext singletonContext, - Chunk previousValues, Chunk newValues, - LongChunk preInputIndices, - LongChunk postInputIndices, long destination) { + public boolean shiftChunk(SingletonContext singletonContext, Chunk previousValues, + Chunk newValues, LongChunk preInputIndices, + LongChunk postInputIndices, long destination) { // noinspection unchecked final long newValue = doShift((LongChunk) preInputIndices, - (LongChunk) postInputIndices, 0, preInputIndices.size(), - destination); + (LongChunk) postInputIndices, 0, preInputIndices.size(), destination); if (exposeRedirections) { final long oldValue = redirections.getAndSetUnsafe(destination, newValue); return oldValue != newValue; @@ -187,8 +173,7 @@ public boolean shiftChunk(SingletonContext singletonContext, } @Override - public boolean modifyIndices(SingletonContext context, LongChunk indices, - long destination) { + public boolean modifyIndices(SingletonContext context, LongChunk indices, long destination) { if (indices.size() == 0) { return false; } @@ -197,8 +182,8 @@ public boolean modifyIndices(SingletonContext context, LongChunk indices, long redirectedRow, - int startInclusive, int endExclusive) { + private boolean hasRedirection(LongChunk indices, long redirectedRow, int startInclusive, + int endExclusive) { if (isFirst) { return indices.get(startInclusive) == redirectedRow; } else { @@ -206,8 +191,7 @@ private boolean hasRedirection(LongChunk indices, long red } } - private boolean addChunk(LongChunk indices, int start, int length, - long destination) { + private boolean addChunk(LongChunk indices, int start, int length, long destination) { final Index index = indexForSlot(destination); index.insert(indices, start, length); @@ -234,8 +218,7 @@ private Index indexForSlot(long destination) { return index; } - private boolean removeChunk(LongChunk indices, int start, int length, - long destination) { + private boolean removeChunk(LongChunk indices, int start, int length, long destination) { final Index index = indexForSlot(destination); index.remove(indices, start, length); @@ -274,11 +257,10 @@ public boolean requiresIndices() { return true; } - IterativeChunkedAggregationOperator makeSecondaryOperator(boolean isFirst, - MatchPair[] comboMatchPairs, Table table, String exposeRedirectionAs) { + IterativeChunkedAggregationOperator makeSecondaryOperator(boolean isFirst, MatchPair[] comboMatchPairs, Table table, + String exposeRedirectionAs) { if (this.isFirst == isFirst) { - // we only need more output columns, the redirectionIndex and redirections column are - // totally fine + // we only need more output columns, the redirectionIndex and redirections column are totally fine return new DuplicateOperator(comboMatchPairs, table, exposeRedirectionAs); } else { return new ComplementaryOperator(isFirst, comboMatchPairs, table, exposeRedirectionAs); @@ -288,12 +270,11 @@ IterativeChunkedAggregationOperator makeSecondaryOperator(boolean isFirst, private class DuplicateOperator implements IterativeChunkedAggregationOperator { private final Map> resultColumns = new LinkedHashMap<>(); - private DuplicateOperator(MatchPair[] resultPairs, Table table, - String exposeRedirectionAs) { + private DuplicateOperator(MatchPair[] resultPairs, Table table, String exposeRedirectionAs) { for (final MatchPair mp : resultPairs) { // noinspection unchecked - resultColumns.put(mp.left(), new ReadOnlyRedirectedColumnSource(redirectionIndex, - table.getColumnSource(mp.right()))); + resultColumns.put(mp.left(), + new ReadOnlyRedirectedColumnSource(redirectionIndex, table.getColumnSource(mp.right()))); } if (exposeRedirectionAs != null) { resultColumns.put(exposeRedirectionAs, redirections); @@ -302,14 +283,14 @@ private DuplicateOperator(MatchPair[] resultPairs, Table table, @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { checkForChangedRedirections(startPositions, destinations, stateModified); } private void checkForChangedRedirections(IntChunk startPositions, - IntChunk destinations, WritableBooleanChunk stateModified) { + IntChunk destinations, WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); @@ -322,14 +303,14 @@ private void checkForChangedRedirections(IntChunk startPositions } private void checkForMatchingRedirections(IntChunk startPositions, - IntChunk lengths, LongChunk postKeyIndices, - IntChunk destinations, WritableBooleanChunk stateModified) { + IntChunk lengths, LongChunk postKeyIndices, + IntChunk destinations, WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); final long redirection = redirections.getUnsafe(destination); final long chunkKey = isFirst ? postKeyIndices.get(startPosition) - : postKeyIndices.get(startPosition + lengths.get(ii) - 1); + : postKeyIndices.get(startPosition + lengths.get(ii) - 1); if (chunkKey == redirection) { stateModified.set(ii, true); } @@ -338,64 +319,58 @@ private void checkForMatchingRedirections(IntChunk startPosition @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { checkForChangedRedirections(startPositions, destinations, stateModified); } @Override public void modifyChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { - checkForMatchingRedirections(startPositions, length, postShiftIndices, destinations, - stateModified); + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, + IntChunk length, WritableBooleanChunk stateModified) { + checkForMatchingRedirections(startPositions, length, postShiftIndices, destinations, stateModified); } @Override public void shiftChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, LongChunk preShiftIndices, - LongChunk postShiftIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk preShiftIndices, + LongChunk postShiftIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { checkForChangedRedirections(startPositions, destinations, stateModified); } @Override - public void modifyIndices(BucketedContext context, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { - checkForMatchingRedirections(startPositions, length, inputIndices, destinations, - stateModified); + public void modifyIndices(BucketedContext context, LongChunk inputIndices, + IntChunk destinations, IntChunk startPositions, + IntChunk length, WritableBooleanChunk stateModified) { + checkForMatchingRedirections(startPositions, length, inputIndices, destinations, stateModified); } @Override - public boolean addChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return redirectionModified(destination); } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return redirectionModified(destination); } @Override - public boolean modifyChunk(SingletonContext context, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext context, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { return checkSingletonModification(postShiftIndices, destination); } @Override public boolean shiftChunk(SingletonContext context, Chunk previousValues, - Chunk newValues, LongChunk preShiftIndices, - LongChunk postShiftIndices, long destination) { + Chunk newValues, LongChunk preShiftIndices, + LongChunk postShiftIndices, long destination) { if (exposeRedirections) { return checkSingletonModification(postShiftIndices, destination); } else { @@ -407,23 +382,20 @@ private boolean redirectionModified(long destination) { return redirections.getUnsafe(destination) != redirections.getPrevLong(destination); } - private boolean checkSingletonModification(LongChunk postShiftIndices, - long destination) { + private boolean checkSingletonModification(LongChunk postShiftIndices, long destination) { return redirections.getUnsafe(destination) == (isFirst ? postShiftIndices.get(0) - : postShiftIndices.get(postShiftIndices.size() - 1)); + : postShiftIndices.get(postShiftIndices.size() - 1)); } @Override - public boolean modifyIndices(SingletonContext context, - LongChunk indices, long destination) { - return redirections.getUnsafe( - destination) == (isFirst ? indices.get(0) : indices.get(indices.size() - 1)); + public boolean modifyIndices(SingletonContext context, LongChunk indices, + long destination) { + return redirections.getUnsafe(destination) == (isFirst ? indices.get(0) : indices.get(indices.size() - 1)); } @Override public boolean addIndex(SingletonContext context, Index index, long destination) { - return redirections - .getUnsafe(destination) == (isFirst ? index.firstKey() : index.lastKey()); + return redirections.getUnsafe(destination) == (isFirst ? index.firstKey() : index.lastKey()); } @Override @@ -460,7 +432,7 @@ private class ComplementaryOperator implements IterativeChunkedAggregationOperat private final boolean exposeRedirections; private ComplementaryOperator(boolean isFirst, MatchPair[] resultPairs, Table table, - String exposeRedirectionAs) { + String exposeRedirectionAs) { this.isFirst = isFirst; redirections = new LongArraySource(); @@ -469,8 +441,8 @@ private ComplementaryOperator(boolean isFirst, MatchPair[] resultPairs, Table ta this.resultColumns = new LinkedHashMap<>(resultPairs.length); for (final MatchPair mp : resultPairs) { // noinspection unchecked - resultColumns.put(mp.left(), new ReadOnlyRedirectedColumnSource(redirectionIndex, - table.getColumnSource(mp.right()))); + resultColumns.put(mp.left(), + new ReadOnlyRedirectedColumnSource(redirectionIndex, table.getColumnSource(mp.right()))); } exposeRedirections = exposeRedirectionAs != null; if (exposeRedirections) { @@ -480,37 +452,36 @@ private ComplementaryOperator(boolean isFirst, MatchPair[] resultPairs, Table ta @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { updateBucketed(startPositions, destinations, stateModified); } @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { updateBucketed(startPositions, destinations, stateModified); } @Override public void shiftChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, LongChunk preShiftIndices, - LongChunk postShiftIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk preShiftIndices, + LongChunk postShiftIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { updateBucketed(startPositions, destinations, stateModified); } - private void updateBucketed(IntChunk startPositions, - IntChunk destinations, WritableBooleanChunk stateModified) { + private void updateBucketed(IntChunk startPositions, IntChunk destinations, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int destination = destinations.get(startPosition); final Index trackingIndex = indices.getUnsafe(destination); - final long trackingKey = - isFirst ? trackingIndex.firstKey() : trackingIndex.lastKey(); + final long trackingKey = isFirst ? trackingIndex.firstKey() : trackingIndex.lastKey(); if (redirections.getUnsafe(destination) != trackingKey) { redirections.set(destination, trackingKey); stateModified.set(ii, true); @@ -520,30 +491,26 @@ private void updateBucketed(IntChunk startPositions, @Override public void modifyChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { - checkForModifications(postShiftIndices, destinations, startPositions, length, - stateModified); + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, + IntChunk length, WritableBooleanChunk stateModified) { + checkForModifications(postShiftIndices, destinations, startPositions, length, stateModified); } @Override - public void modifyIndices(BucketedContext context, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { - checkForModifications(inputIndices, destinations, startPositions, length, - stateModified); + public void modifyIndices(BucketedContext context, LongChunk inputIndices, + IntChunk destinations, IntChunk startPositions, + IntChunk length, WritableBooleanChunk stateModified) { + checkForModifications(inputIndices, destinations, startPositions, length, stateModified); } private void checkForModifications(LongChunk inputIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + IntChunk destinations, IntChunk startPositions, + IntChunk length, WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long redirection = redirections.getUnsafe(destinations.get(startPosition)); - final int modifiedChunkPosition = - startPosition + (isFirst ? 0 : (length.get(ii) - 1)); + final int modifiedChunkPosition = startPosition + (isFirst ? 0 : (length.get(ii) - 1)); if (inputIndices.get(modifiedChunkPosition) == redirection) { stateModified.set(ii, true); } @@ -551,23 +518,21 @@ private void checkForModifications(LongChunk inputIndices, } @Override - public boolean addChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return updateSingleton(destination); } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return updateSingleton(destination); } @Override public boolean shiftChunk(SingletonContext context, Chunk previousValues, - Chunk newValues, LongChunk preShiftIndices, - LongChunk postShiftIndices, long destination) { + Chunk newValues, LongChunk preShiftIndices, + LongChunk postShiftIndices, long destination) { final boolean changed = updateSingleton(destination); return exposeRedirections && changed; } @@ -578,28 +543,24 @@ public boolean addIndex(SingletonContext context, Index index, long destination) } private boolean updateSingleton(long destination) { - final Index trackedIndex = - Require.neqNull(indices.getUnsafe(destination), "indices.get(destination)"); + final Index trackedIndex = Require.neqNull(indices.getUnsafe(destination), "indices.get(destination)"); final long trackedKey = isFirst ? trackedIndex.firstKey() : trackedIndex.lastKey(); return trackedKey != redirections.getAndSetUnsafe(destination, trackedKey); } @Override - public boolean modifyChunk(SingletonContext context, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { - return checkSingletonModification(postShiftIndices, - redirections.getUnsafe(destination)); + public boolean modifyChunk(SingletonContext context, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { + return checkSingletonModification(postShiftIndices, redirections.getUnsafe(destination)); } @Override - public boolean modifyIndices(SingletonContext context, - LongChunk indices, long destination) { + public boolean modifyIndices(SingletonContext context, LongChunk indices, + long destination) { return checkSingletonModification(indices, redirections.getUnsafe(destination)); } - private boolean checkSingletonModification(LongChunk postShiftIndices, - long redirection) { + private boolean checkSingletonModification(LongChunk postShiftIndices, long redirection) { if (isFirst) { return redirection == postShiftIndices.get(0); } else { diff --git a/DB/src/main/java/io/deephaven/db/v2/by/FormulaAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/FormulaAggregationFactory.java index 50d4f04ea15..f387a914b70 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/FormulaAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/FormulaAggregationFactory.java @@ -15,16 +15,14 @@ import java.util.stream.Collectors; /** - * An {@link AggregationContextFactory} used in the implementation of - * {@link io.deephaven.db.tables.Table#applyToAllBy}. + * An {@link AggregationContextFactory} used in the implementation of {@link io.deephaven.db.tables.Table#applyToAllBy}. */ public class FormulaAggregationFactory implements AggregationContextFactory { private final String formula; private final String columnParamName; - public FormulaAggregationFactory(@NotNull final String formula, - @NotNull final String columnParamName) { + public FormulaAggregationFactory(@NotNull final String formula, @NotNull final String columnParamName) { this.formula = formula; this.columnParamName = columnParamName; } @@ -36,23 +34,22 @@ public boolean allowKeyOnlySubstitution() { @Override public AggregationContext makeAggregationContext(@NotNull final Table inputTable, - @NotNull final String... groupByColumnNames) { - final Set groupByColumnNameSet = - Arrays.stream(groupByColumnNames).collect(Collectors.toSet()); - final MatchPair[] resultColumns = inputTable.getDefinition().getColumnNames().stream() - .filter(cn -> !groupByColumnNameSet.contains(cn)).map(MatchPairFactory::getExpression) - .toArray(MatchPair[]::new); + @NotNull final String... groupByColumnNames) { + final Set groupByColumnNameSet = Arrays.stream(groupByColumnNames).collect(Collectors.toSet()); + final MatchPair[] resultColumns = + inputTable.getDefinition().getColumnNames().stream().filter(cn -> !groupByColumnNameSet.contains(cn)) + .map(MatchPairFactory::getExpression).toArray(MatchPair[]::new); final ByChunkedOperator byChunkedOperator = - new ByChunkedOperator((QueryTable) inputTable, false, resultColumns); - final FormulaChunkedOperator formulaChunkedOperator = new FormulaChunkedOperator( - byChunkedOperator, true, formula, columnParamName, resultColumns); + new ByChunkedOperator((QueryTable) inputTable, false, resultColumns); + final FormulaChunkedOperator formulaChunkedOperator = + new FormulaChunkedOperator(byChunkedOperator, true, formula, columnParamName, resultColumns); // noinspection unchecked return new AggregationContext( - new IterativeChunkedAggregationOperator[] {formulaChunkedOperator}, - new String[][] {CollectionUtil.ZERO_LENGTH_STRING_ARRAY}, - new ChunkSource.WithPrev[] {null}); + new IterativeChunkedAggregationOperator[] {formulaChunkedOperator}, + new String[][] {CollectionUtil.ZERO_LENGTH_STRING_ARRAY}, + new ChunkSource.WithPrev[] {null}); } @Override @@ -61,36 +58,36 @@ public String toString() { } public static QueryTable applyToAllBy(@NotNull final QueryTable inputTable, - @NotNull final String formula, - @NotNull final String columnParamName, - @NotNull final String... groupByColumnNames) { - return applyToAllBy(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, formula, - columnParamName, groupByColumnNames); + @NotNull final String formula, + @NotNull final String columnParamName, + @NotNull final String... groupByColumnNames) { + return applyToAllBy(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, formula, columnParamName, + groupByColumnNames); } public static QueryTable applyToAllBy(@NotNull final QueryTable inputTable, - @NotNull final String formula, - @NotNull final String columnParamName, - @NotNull final SelectColumn[] groupByColumns) { - return applyToAllBy(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, formula, - columnParamName, groupByColumns); + @NotNull final String formula, + @NotNull final String columnParamName, + @NotNull final SelectColumn[] groupByColumns) { + return applyToAllBy(AggregationControl.DEFAULT_FOR_OPERATOR, inputTable, formula, columnParamName, + groupByColumns); } public static QueryTable applyToAllBy(@NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final String formula, - @NotNull final String columnParamName, - @NotNull final String... groupByColumnNames) { + @NotNull final QueryTable inputTable, + @NotNull final String formula, + @NotNull final String columnParamName, + @NotNull final String... groupByColumnNames) { return applyToAllBy(aggregationControl, inputTable, formula, columnParamName, - SelectColumnFactory.getExpressions(groupByColumnNames)); + SelectColumnFactory.getExpressions(groupByColumnNames)); } public static QueryTable applyToAllBy(@NotNull final AggregationControl aggregationControl, - @NotNull final QueryTable inputTable, - @NotNull final String formula, - @NotNull final String columnParamName, - @NotNull final SelectColumn[] groupByColumns) { + @NotNull final QueryTable inputTable, + @NotNull final String formula, + @NotNull final String columnParamName, + @NotNull final SelectColumn[] groupByColumns) { return ChunkedOperatorAggregationHelper.aggregation(aggregationControl, - new FormulaAggregationFactory(formula, columnParamName), inputTable, groupByColumns); + new FormulaAggregationFactory(formula, columnParamName), inputTable, groupByColumns); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/FormulaChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/FormulaChunkedOperator.java index 1e871306772..1c70c3762b1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/FormulaChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/FormulaChunkedOperator.java @@ -63,19 +63,18 @@ class FormulaChunkedOperator implements IterativeChunkedAggregationOperator { * Construct an operator for applying a formula to a set of aggregation result columns. * * @param by The {@link ByChunkedOperator} to use for tracking indices - * @param delegateToBy Whether this operator is responsible for passing methods through to - * {@code by}. Should be false if {@code by} is updated by the helper (and {@code by} - * must come before this operator if so), or if this is not the first operator sharing - * {@code by}. + * @param delegateToBy Whether this operator is responsible for passing methods through to {@code by}. Should be + * false if {@code by} is updated by the helper (and {@code by} must come before this operator if so), or if + * this is not the first operator sharing {@code by}. * @param formula The formula, before any column name substitutions * @param columnParamName The token to substitute column names for * @param resultColumnPairs The names for formula input and result columns */ FormulaChunkedOperator(@NotNull final ByChunkedOperator by, - final boolean delegateToBy, - @NotNull final String formula, - @NotNull final String columnParamName, - @NotNull final MatchPair... resultColumnPairs) { + final boolean delegateToBy, + @NotNull final String formula, + @NotNull final String columnParamName, + @NotNull final MatchPair... resultColumnPairs) { this.by = by; this.delegateToBy = delegateToBy; this.inputColumnNames = MatchPair.getRightColumns(resultColumnPairs); @@ -83,105 +82,85 @@ class FormulaChunkedOperator implements IterativeChunkedAggregationOperator { formulaColumns = new DhFormulaColumn[resultColumnPairs.length]; // noinspection unchecked - formulaDataSources = new ChunkSource[resultColumnPairs.length]; // Not populated until - // propagateInitialState + formulaDataSources = new ChunkSource[resultColumnPairs.length]; // Not populated until propagateInitialState resultColumns = new WritableSource[resultColumnPairs.length]; - resultColumnModifiedColumnSets = new ModifiedColumnSet[resultColumnPairs.length]; // Not - // populated - // until + resultColumnModifiedColumnSets = new ModifiedColumnSet[resultColumnPairs.length]; // Not populated until // initializeRefreshing final Map> byResultColumns = by.getResultColumns(); for (int ci = 0; ci < resultColumnPairs.length; ++ci) { final String inputColumnName = inputColumnNames[ci]; final String outputColumnName = resultColumnNames[ci]; - final FormulaColumn formulaColumn = - formulaColumns[ci] = FormulaColumn.createFormulaColumn(outputColumnName, + final FormulaColumn formulaColumn = formulaColumns[ci] = FormulaColumn.createFormulaColumn(outputColumnName, Utils.replaceFormulaTokens(formula, columnParamName, inputColumnName)); final ColumnSource inputColumnSource = byResultColumns.get(inputColumnName); final ColumnDefinition inputColumnDefinition = ColumnDefinition - .fromGenericType(inputColumnName, inputColumnSource.getType(), - inputColumnSource.getComponentType()); + .fromGenericType(inputColumnName, inputColumnSource.getType(), + inputColumnSource.getComponentType()); formulaColumn.initDef(Collections.singletonMap(inputColumnName, inputColumnDefinition)); // noinspection unchecked - resultColumns[ci] = - ArrayBackedColumnSource.getMemoryColumnSource(0, formulaColumn.getReturnedType()); + resultColumns[ci] = ArrayBackedColumnSource.getMemoryColumnSource(0, formulaColumn.getReturnedType()); } } @Override - public void addChunk(final BucketedContext bucketedContext, - final Chunk values, - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void addChunk(final BucketedContext bucketedContext, final Chunk values, + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { if (delegateToBy) { - by.addChunk(bucketedContext, values, inputIndices, destinations, startPositions, length, - stateModified); + by.addChunk(bucketedContext, values, inputIndices, destinations, startPositions, length, stateModified); } } @Override - public void removeChunk(final BucketedContext bucketedContext, - final Chunk values, - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void removeChunk(final BucketedContext bucketedContext, final Chunk values, + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { if (delegateToBy) { - by.removeChunk(bucketedContext, values, inputIndices, destinations, startPositions, - length, stateModified); + by.removeChunk(bucketedContext, values, inputIndices, destinations, startPositions, length, stateModified); } } @Override - public void modifyChunk(final BucketedContext bucketedContext, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk postShiftIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void modifyChunk(final BucketedContext bucketedContext, final Chunk previousValues, + final Chunk newValues, + @NotNull final LongChunk postShiftIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { if (delegateToBy) { - by.modifyChunk(bucketedContext, previousValues, newValues, postShiftIndices, - destinations, startPositions, length, stateModified); + by.modifyChunk(bucketedContext, previousValues, newValues, postShiftIndices, destinations, startPositions, + length, stateModified); } } @Override - public void shiftChunk(final BucketedContext bucketedContext, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk preShiftIndices, - @NotNull final LongChunk postShiftIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + public void shiftChunk(final BucketedContext bucketedContext, final Chunk previousValues, + final Chunk newValues, + @NotNull final LongChunk preShiftIndices, + @NotNull final LongChunk postShiftIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { if (delegateToBy) { - by.shiftChunk(bucketedContext, previousValues, newValues, preShiftIndices, - postShiftIndices, destinations, startPositions, length, stateModified); + by.shiftChunk(bucketedContext, previousValues, newValues, preShiftIndices, postShiftIndices, destinations, + startPositions, length, stateModified); } } @Override public void modifyIndices(final BucketedContext context, - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, @NotNull final WritableBooleanChunk stateModified) { if (delegateToBy) { - by.modifyIndices(context, inputIndices, destinations, startPositions, length, - stateModified); + by.modifyIndices(context, inputIndices, destinations, startPositions, length, stateModified); } } @Override public boolean addChunk(final SingletonContext singletonContext, final int chunkSize, - final Chunk values, - @NotNull final LongChunk inputIndices, final long destination) { + final Chunk values, + @NotNull final LongChunk inputIndices, final long destination) { if (delegateToBy) { return by.addChunk(singletonContext, chunkSize, values, inputIndices, destination); } else { @@ -191,8 +170,8 @@ public boolean addChunk(final SingletonContext singletonContext, final int chunk @Override public boolean removeChunk(final SingletonContext singletonContext, final int chunkSize, - final Chunk values, - @NotNull final LongChunk inputIndices, final long destination) { + final Chunk values, + @NotNull final LongChunk inputIndices, final long destination) { if (delegateToBy) { return by.removeChunk(singletonContext, chunkSize, values, inputIndices, destination); } else { @@ -202,34 +181,34 @@ public boolean removeChunk(final SingletonContext singletonContext, final int ch @Override public boolean modifyChunk(final SingletonContext singletonContext, final int chunkSize, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk postShiftIndices, - final long destination) { + final Chunk previousValues, final Chunk newValues, + @NotNull final LongChunk postShiftIndices, + final long destination) { if (delegateToBy) { - return by.modifyChunk(singletonContext, chunkSize, previousValues, newValues, - postShiftIndices, destination); + return by.modifyChunk(singletonContext, chunkSize, previousValues, newValues, postShiftIndices, + destination); } else { return false; } } @Override - public boolean shiftChunk(final SingletonContext singletonContext, - final Chunk previousValues, final Chunk newValues, - @NotNull final LongChunk preInputIndices, - @NotNull final LongChunk postInputIndices, - final long destination) { + public boolean shiftChunk(final SingletonContext singletonContext, final Chunk previousValues, + final Chunk newValues, + @NotNull final LongChunk preInputIndices, + @NotNull final LongChunk postInputIndices, + final long destination) { if (delegateToBy) { - return by.shiftChunk(singletonContext, previousValues, newValues, preInputIndices, - postInputIndices, destination); + return by.shiftChunk(singletonContext, previousValues, newValues, preInputIndices, postInputIndices, + destination); } else { return false; } } @Override - public boolean modifyIndices(final SingletonContext context, - @NotNull final LongChunk indices, final long destination) { + public boolean modifyIndices(final SingletonContext context, @NotNull final LongChunk indices, + final long destination) { if (delegateToBy) { return by.modifyIndices(context, indices, destination); } else { @@ -274,14 +253,13 @@ public void propagateInitialState(@NotNull final QueryTable resultTable) { final FormulaColumn formulaColumn = formulaColumns[ci]; final ColumnSource inputColumnSource = byResultColumns.get(inputColumnName); formulaColumn.initInputs(resultTable.getIndex(), - Collections.singletonMap(inputColumnName, inputColumnSource)); + Collections.singletonMap(inputColumnName, inputColumnSource)); // noinspection unchecked formulaDataSources[ci] = formulaColumn.getDataView(); } final boolean[] allColumnsMask = makeAllColumnsMask(); - try (final DataCopyContext dataCopyContext = - new DataCopyContext(allColumnsMask, allColumnsMask)) { + try (final DataCopyContext dataCopyContext = new DataCopyContext(allColumnsMask, allColumnsMask)) { dataCopyContext.copyData(resultTable.getIndex()); } } @@ -298,22 +276,19 @@ public void startTrackingPrevValues() { } @Override - public UnaryOperator initializeRefreshing( - @NotNull final QueryTable resultTable, - @NotNull final LivenessReferent aggregationUpdateListener) { + public UnaryOperator initializeRefreshing(@NotNull final QueryTable resultTable, + @NotNull final LivenessReferent aggregationUpdateListener) { for (int ci = 0; ci < resultColumnNames.length; ++ci) { - resultColumnModifiedColumnSets[ci] = - resultTable.newModifiedColumnSet(resultColumnNames[ci]); + resultColumnModifiedColumnSets[ci] = resultTable.newModifiedColumnSet(resultColumnNames[ci]); } if (delegateToBy) { - // We cannot use the by's result MCS factory, because the result column names are not - // guaranteed to be the same. + // We cannot use the by's result MCS factory, because the result column names are not guaranteed to be the + // same. by.initializeRefreshing(resultTable, aggregationUpdateListener); } - // Note that we also use the factory in propagateUpdates to identify the set of modified - // columns to handle. + // Note that we also use the factory in propagateUpdates to identify the set of modified columns to handle. return inputToResultModifiedColumnSetFactory = - by.makeInputToResultModifiedColumnSetFactory(resultTable, resultColumnNames); + by.makeInputToResultModifiedColumnSetFactory(resultTable, resultColumnNames); } @Override @@ -322,22 +297,21 @@ public void resetForStep(@NotNull final ShiftAwareListener.Update upstream) { by.resetForStep(upstream); } updateUpstreamModifiedColumnSet = - upstream.modified.empty() ? ModifiedColumnSet.EMPTY : upstream.modifiedColumnSet; + upstream.modified.empty() ? ModifiedColumnSet.EMPTY : upstream.modifiedColumnSet; } @Override public void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream, - @NotNull final ReadOnlyIndex newDestinations) { + @NotNull final ReadOnlyIndex newDestinations) { if (delegateToBy) { by.propagateUpdates(downstream, newDestinations); } final ModifiedColumnSet resultModifiedColumnSet = - inputToResultModifiedColumnSetFactory.apply(updateUpstreamModifiedColumnSet); + inputToResultModifiedColumnSetFactory.apply(updateUpstreamModifiedColumnSet); updateUpstreamModifiedColumnSet = null; final boolean addsToProcess = downstream.added.nonempty(); - final boolean modifiesToProcess = - downstream.modified.nonempty() && resultModifiedColumnSet.nonempty(); + final boolean modifiesToProcess = downstream.modified.nonempty() && resultModifiedColumnSet.nonempty(); final boolean removesToProcess = downstream.removed.nonempty(); if (!addsToProcess && !modifiesToProcess && !removesToProcess) { @@ -346,8 +320,7 @@ public void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream // Now we know we have some removes. if (!addsToProcess && !modifiesToProcess) { - try (final DataFillerContext dataFillerContext = - new DataFillerContext(makeObjectColumnsMask())) { + try (final DataFillerContext dataFillerContext = new DataFillerContext(makeObjectColumnsMask())) { dataFillerContext.clearObjectColumnData(downstream.removed); } return; @@ -355,15 +328,15 @@ public void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream // Now we know we have some adds or modifies. final boolean[] modifiedColumnsMask = - modifiesToProcess ? makeModifiedColumnsMask(resultModifiedColumnSet) : null; + modifiesToProcess ? makeModifiedColumnsMask(resultModifiedColumnSet) : null; final boolean[] columnsToFillMask = addsToProcess ? makeAllColumnsMask() - : removesToProcess ? makeObjectOrModifiedColumnsMask(resultModifiedColumnSet) - : modifiedColumnsMask; - final boolean[] columnsToGetMask = addsToProcess ? columnsToFillMask - /* This is the result of makeAllColumnsMask() on the line above */ : modifiedColumnsMask; + : removesToProcess ? makeObjectOrModifiedColumnsMask(resultModifiedColumnSet) : modifiedColumnsMask; + final boolean[] columnsToGetMask = addsToProcess ? columnsToFillMask /* + * This is the result of + * makeAllColumnsMask() on the line above + */ : modifiedColumnsMask; - try (final DataCopyContext dataCopyContext = - new DataCopyContext(columnsToFillMask, columnsToGetMask)) { + try (final DataCopyContext dataCopyContext = new DataCopyContext(columnsToFillMask, columnsToGetMask)) { if (removesToProcess) { dataCopyContext.clearObjectColumnData(downstream.removed); } @@ -379,7 +352,7 @@ public void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream @Override public void propagateFailure(@NotNull final Throwable originalException, - @NotNull final UpdatePerformanceTracker.Entry sourceEntry) { + @NotNull final UpdatePerformanceTracker.Entry sourceEntry) { if (delegateToBy) { by.propagateFailure(originalException, sourceEntry); } @@ -411,22 +384,18 @@ private DataFillerContext(@NotNull final boolean[] columnsToFillMask) { } void clearObjectColumnData(@NotNull final OrderedKeys orderedKeys) { - try ( - final OrderedKeys.Iterator orderedKeysIterator = - orderedKeys.getOrderedKeysIterator(); - final WritableObjectChunk nullValueChunk = - WritableObjectChunk.makeWritableChunk(BLOCK_SIZE)) { + try (final OrderedKeys.Iterator orderedKeysIterator = orderedKeys.getOrderedKeysIterator(); + final WritableObjectChunk nullValueChunk = + WritableObjectChunk.makeWritableChunk(BLOCK_SIZE)) { nullValueChunk.fillWithNullValue(0, BLOCK_SIZE); while (orderedKeysIterator.hasMore()) { - final OrderedKeys orderedKeysSlice = - orderedKeysIterator.getNextOrderedKeysThrough( + final OrderedKeys orderedKeysSlice = orderedKeysIterator.getNextOrderedKeysThrough( calculateContainingBlockLastKey(orderedKeysIterator.peekNextKey())); nullValueChunk.setSize(orderedKeysSlice.intSize()); for (int ci = 0; ci < columnsToFillMask.length; ++ci) { final WritableSource resultColumn = resultColumns[ci]; if (columnsToFillMask[ci] && !resultColumn.getType().isPrimitive()) { - resultColumn.fillFromChunk(fillFromContexts[ci], nullValueChunk, - orderedKeysSlice); + resultColumn.fillFromChunk(fillFromContexts[ci], nullValueChunk, orderedKeysSlice); } } } @@ -445,16 +414,14 @@ private class DataCopyContext extends DataFillerContext { private final SharedContext sharedContext; private final GetContext[] getContexts; - private DataCopyContext(@NotNull final boolean[] columnsToFillMask, - @NotNull final boolean[] columnsToGetMask) { + private DataCopyContext(@NotNull final boolean[] columnsToFillMask, @NotNull final boolean[] columnsToGetMask) { super(columnsToFillMask); this.columnsToGetMask = columnsToGetMask; sharedContext = SharedContext.makeSharedContext(); getContexts = new GetContext[resultColumnNames.length]; for (int ci = 0; ci < resultColumnNames.length; ++ci) { if (columnsToGetMask[ci]) { - getContexts[ci] = - formulaDataSources[ci].makeGetContext(BLOCK_SIZE, sharedContext); + getContexts[ci] = formulaDataSources[ci].makeGetContext(BLOCK_SIZE, sharedContext); } } } @@ -463,19 +430,16 @@ private void copyData(@NotNull final OrderedKeys orderedKeys) { copyData(orderedKeys, columnsToGetMask); } - private void copyData(@NotNull final OrderedKeys orderedKeys, - @NotNull final boolean[] columnsMask) { - try (final OrderedKeys.Iterator orderedKeysIterator = - orderedKeys.getOrderedKeysIterator()) { + private void copyData(@NotNull final OrderedKeys orderedKeys, @NotNull final boolean[] columnsMask) { + try (final OrderedKeys.Iterator orderedKeysIterator = orderedKeys.getOrderedKeysIterator()) { while (orderedKeysIterator.hasMore()) { - final OrderedKeys orderedKeysSlice = - orderedKeysIterator.getNextOrderedKeysThrough( + final OrderedKeys orderedKeysSlice = orderedKeysIterator.getNextOrderedKeysThrough( calculateContainingBlockLastKey(orderedKeysIterator.peekNextKey())); for (int ci = 0; ci < columnsToGetMask.length; ++ci) { if (columnsMask[ci]) { resultColumns[ci].fillFromChunk(fillFromContexts[ci], - formulaDataSources[ci].getChunk(getContexts[ci], orderedKeysSlice), - orderedKeysSlice); + formulaDataSources[ci].getChunk(getContexts[ci], orderedKeysSlice), + orderedKeysSlice); } } } @@ -508,22 +472,19 @@ private boolean[] makeObjectColumnsMask() { return columnsMask; } - private boolean[] makeModifiedColumnsMask( - @NotNull final ModifiedColumnSet resultModifiedColumnSet) { + private boolean[] makeModifiedColumnsMask(@NotNull final ModifiedColumnSet resultModifiedColumnSet) { final boolean[] columnsMask = new boolean[resultColumnModifiedColumnSets.length]; for (int ci = 0; ci < resultColumnModifiedColumnSets.length; ++ci) { - columnsMask[ci] = - resultModifiedColumnSet.containsAny(resultColumnModifiedColumnSets[ci]); + columnsMask[ci] = resultModifiedColumnSet.containsAny(resultColumnModifiedColumnSets[ci]); } return columnsMask; } - private boolean[] makeObjectOrModifiedColumnsMask( - @NotNull final ModifiedColumnSet resultModifiedColumnSet) { + private boolean[] makeObjectOrModifiedColumnsMask(@NotNull final ModifiedColumnSet resultModifiedColumnSet) { final boolean[] columnsMask = new boolean[resultColumns.length]; for (int ci = 0; ci < resultColumns.length; ++ci) { columnsMask[ci] = !resultColumns[ci].getType().isPrimitive() - || resultModifiedColumnSet.containsAny(resultColumnModifiedColumnSets[ci]); + || resultModifiedColumnSet.containsAny(resultColumnModifiedColumnSets[ci]); } return columnsMask; } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/FpChunkedNonNormalCounter.java b/DB/src/main/java/io/deephaven/db/v2/by/FpChunkedNonNormalCounter.java index 3a0833d24c8..a5161103376 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/FpChunkedNonNormalCounter.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/FpChunkedNonNormalCounter.java @@ -67,8 +67,7 @@ final long updateNanCount(long destination, int oldNans, int newNans) { } final long totalNanCount; - totalNanCount = - NullSafeAddition.plusLong(nanCount.getUnsafe(destination), newNans - oldNans); + totalNanCount = NullSafeAddition.plusLong(nanCount.getUnsafe(destination), newNans - oldNans); nanCount.set(destination, totalNanCount); return totalNanCount; } @@ -84,8 +83,8 @@ final long updatePositiveInfinityCount(long destination, int newPositiveInfinity } final long totalPositiveInfinityCount; if (hasPositiveInfinities) { - totalPositiveInfinityCount = NullSafeAddition - .plusLong(positiveInfinityCount.getUnsafe(destination), newPositiveInfinity); + totalPositiveInfinityCount = + NullSafeAddition.plusLong(positiveInfinityCount.getUnsafe(destination), newPositiveInfinity); if (newPositiveInfinity != 0) { positiveInfinityCount.set(destination, totalPositiveInfinityCount); } @@ -96,8 +95,7 @@ final long updatePositiveInfinityCount(long destination, int newPositiveInfinity } - final long updatePositiveInfinityCount(long destination, int oldPositiveInfinities, - int newPositiveInfinities) { + final long updatePositiveInfinityCount(long destination, int oldPositiveInfinities, int newPositiveInfinities) { if (newPositiveInfinities == oldPositiveInfinities) { if (hasPositiveInfinities) { return positiveInfinityCount.getUnsafe(destination); @@ -116,8 +114,7 @@ final long updatePositiveInfinityCount(long destination, int oldPositiveInfiniti } final long totalPositiveInfinityCount; - totalPositiveInfinityCount = - NullSafeAddition.plusLong(positiveInfinityCount.getUnsafe(destination), + totalPositiveInfinityCount = NullSafeAddition.plusLong(positiveInfinityCount.getUnsafe(destination), newPositiveInfinities - oldPositiveInfinities); positiveInfinityCount.set(destination, totalPositiveInfinityCount); return totalPositiveInfinityCount; @@ -134,8 +131,8 @@ final long updateNegativeInfinityCount(long destination, int newNegativeInfinity } final long totalNegativeInfinityCount; if (hasNegativeInfinities) { - totalNegativeInfinityCount = NullSafeAddition - .plusLong(negativeInfinityCount.getUnsafe(destination), newNegativeInfinity); + totalNegativeInfinityCount = + NullSafeAddition.plusLong(negativeInfinityCount.getUnsafe(destination), newNegativeInfinity); if (newNegativeInfinity != 0) { negativeInfinityCount.set(destination, totalNegativeInfinityCount); } @@ -145,8 +142,7 @@ final long updateNegativeInfinityCount(long destination, int newNegativeInfinity return totalNegativeInfinityCount; } - final long updateNegativeInfinityCount(long destination, int oldNegativeInfinities, - int newNegativeInfinities) { + final long updateNegativeInfinityCount(long destination, int oldNegativeInfinities, int newNegativeInfinities) { if (newNegativeInfinities == oldNegativeInfinities) { if (hasNegativeInfinities) { return negativeInfinityCount.getUnsafe(destination); @@ -165,8 +161,7 @@ final long updateNegativeInfinityCount(long destination, int oldNegativeInfiniti } final long totalNegativeInfinityCount; - totalNegativeInfinityCount = - NullSafeAddition.plusLong(negativeInfinityCount.getUnsafe(destination), + totalNegativeInfinityCount = NullSafeAddition.plusLong(negativeInfinityCount.getUnsafe(destination), newNegativeInfinities - oldNegativeInfinities); negativeInfinityCount.set(destination, totalNegativeInfinityCount); return totalNegativeInfinityCount; @@ -203,26 +198,25 @@ Map> fpInternalColumnSources(final String name) { if (nanCount != null) { results.put(name + ROLLUP_NAN_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, nanCount); } else { - results.put(name + ROLLUP_NAN_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, - new WrappedLongArraySource(() -> nanCount)); + results.put(name + ROLLUP_NAN_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, new WrappedLongArraySource(() -> nanCount)); } if (positiveInfinityCount != null) { results.put(name + ROLLUP_PIC_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, positiveInfinityCount); } else { results.put(name + ROLLUP_PIC_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, - new WrappedLongArraySource(() -> positiveInfinityCount)); + new WrappedLongArraySource(() -> positiveInfinityCount)); } if (negativeInfinityCount != null) { results.put(name + ROLLUP_NIC_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, negativeInfinityCount); } else { results.put(name + ROLLUP_NIC_COLUMN_ID + ROLLUP_COLUMN_SUFFIX, - new WrappedLongArraySource(() -> negativeInfinityCount)); + new WrappedLongArraySource(() -> negativeInfinityCount)); } return results; } private static class WrappedLongArraySource extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForLong { + implements MutableColumnSourceGetDefaults.ForLong { final Supplier sourceSupplier; private WrappedLongArraySource(Supplier sourceSupplier) { @@ -272,8 +266,7 @@ public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContex @Override public void fillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { final LongArraySource longArraySource = sourceSupplier.get(); if (longArraySource == null) { destination.fillWithNullValue(0, orderedKeys.intSize()); @@ -284,8 +277,7 @@ public void fillChunk(@NotNull FillContext context, @Override public void fillPrevChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { final LongArraySource longArraySource = sourceSupplier.get(); if (longArraySource == null) { destination.fillWithNullValue(0, orderedKeys.intSize()); @@ -296,7 +288,7 @@ public void fillPrevChunk(@NotNull FillContext context, @Override public Chunk getChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys) { + @NotNull OrderedKeys orderedKeys) { final LongArraySource longArraySource = sourceSupplier.get(); if (longArraySource == null) { return doNullFill((DefaultGetContext) context, orderedKeys.intSize()); @@ -306,12 +298,11 @@ public Chunk getChunk(@NotNull GetContext context, } @Override - public Chunk getChunk(@NotNull GetContext context, - long firstKey, long lastKey) { + public Chunk getChunk(@NotNull GetContext context, long firstKey, long lastKey) { final LongArraySource longArraySource = sourceSupplier.get(); if (longArraySource == null) { return doNullFill((DefaultGetContext) context, - LongSizedDataStructure.intSize("getChunk", lastKey - firstKey + 1)); + LongSizedDataStructure.intSize("getChunk", lastKey - firstKey + 1)); } else { return longArraySource.getChunk(context, firstKey, lastKey); } @@ -328,7 +319,7 @@ private Chunk doNullFill(@NotNull DefaultGetContext context, @Override public Chunk getPrevChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys) { + @NotNull OrderedKeys orderedKeys) { final LongArraySource longArraySource = sourceSupplier.get(); if (longArraySource == null) { return doNullFill((DefaultGetContext) context, orderedKeys.intSize()); @@ -338,12 +329,12 @@ public Chunk getPrevChunk(@NotNull GetContext conte } @Override - public Chunk getPrevChunk(@NotNull GetContext context, - long firstKey, long lastKey) { + public Chunk getPrevChunk(@NotNull GetContext context, long firstKey, + long lastKey) { final LongArraySource longArraySource = sourceSupplier.get(); if (longArraySource == null) { return doNullFill((DefaultGetContext) context, - LongSizedDataStructure.intSize("getPrevChunk", lastKey - firstKey + 1)); + LongSizedDataStructure.intSize("getPrevChunk", lastKey - firstKey + 1)); } else { return longArraySource.getPrevChunk(context, firstKey, lastKey); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/FreezeByAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/FreezeByAggregationFactory.java index d5eeee4707b..19a6a527527 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/FreezeByAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/FreezeByAggregationFactory.java @@ -25,12 +25,11 @@ public boolean allowKeyOnlySubstitution() { @Override public AggregationContext makeAggregationContext(@NotNull final Table table, - @NotNull final String... groupByColumns) { + @NotNull final String... groupByColumns) { return getAllColumnOperators(table, groupByColumns); } - private static AggregationContext getAllColumnOperators(Table withView, - String[] groupByNameArray) { + private static AggregationContext getAllColumnOperators(Table withView, String[] groupByNameArray) { final Set groupByNames = new HashSet<>(Arrays.asList(groupByNameArray)); final int operatorCount = withView.getColumnSourceMap().size() - groupByNames.size() + 1; @@ -49,11 +48,11 @@ private static AggregationContext getAllColumnOperators(Table withView, final Class type = columnSource.getType(); - // For DBDateTime columns, the in-memory source uses longs internally, and all supported - // aggregations (i.e. min and max) work correctly against longs. - final ColumnSource inputSource = columnSource.getType() == DBDateTime.class - ? ReinterpretUtilities.dateTimeToLongSource(columnSource) - : columnSource; + // For DBDateTime columns, the in-memory source uses longs internally, and all supported aggregations (i.e. + // min and max) work correctly against longs. + final ColumnSource inputSource = + columnSource.getType() == DBDateTime.class ? ReinterpretUtilities.dateTimeToLongSource(columnSource) + : columnSource; // noinspection unchecked inputColumns.add(inputSource); @@ -68,10 +67,11 @@ private static AggregationContext getAllColumnOperators(Table withView, } // noinspection unchecked - return new AggregationContext(operators.toArray( - IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY), - inputNameArray, - inputColumns.toArray(ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY)); + return new AggregationContext( + operators.toArray( + IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY), + inputNameArray, + inputColumns.toArray(ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY)); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/by/HashTableColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/by/HashTableColumnSource.java index 52160375d02..626ceed3977 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/HashTableColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/HashTableColumnSource.java @@ -10,26 +10,25 @@ import org.jetbrains.annotations.NotNull; /** - * {@link ColumnSource} implementation that delegates to the main and overflow sources for a hash - * table column. + * {@link ColumnSource} implementation that delegates to the main and overflow sources for a hash table column. */ public class HashTableColumnSource extends AbstractColumnSource - implements ColumnSource { + implements ColumnSource { public static final int MINIMUM_OVERFLOW_HASH_SLOT = 1 << 30; private final ColumnSource mainSource; private final ColumnSource overflowSource; HashTableColumnSource(@NotNull final Class dataType, - @NotNull final ColumnSource mainSource, - @NotNull final ColumnSource overflowSource) { + @NotNull final ColumnSource mainSource, + @NotNull final ColumnSource overflowSource) { super(dataType); this.mainSource = mainSource; this.overflowSource = overflowSource; } public HashTableColumnSource(@NotNull final ColumnSource mainSource, - @NotNull final ColumnSource overflowSource) { + @NotNull final ColumnSource overflowSource) { this(mainSource.getType(), mainSource, overflowSource); } @@ -41,13 +40,11 @@ private static class HashTableFillContext implements FillContext { final ResettableWritableChunk overflowDestinationSlice; private HashTableFillContext(@NotNull final ColumnSource mainSource, - @NotNull final ColumnSource overflowSource, - final int chunkCapacity, - final SharedContext sharedContext) { - // TODO: Implement a proper shareable context to use when combining fills from the main - // and overflow - // sources. Current usage is "safe" because sources are only exposed through this - // wrapper, and all + @NotNull final ColumnSource overflowSource, + final int chunkCapacity, + final SharedContext sharedContext) { + // TODO: Implement a proper shareable context to use when combining fills from the main and overflow + // sources. Current usage is "safe" because sources are only exposed through this wrapper, and all // sources at a given level will split their keys the same, but this is not ideal. mainFillContext = mainSource.makeFillContext(chunkCapacity, sharedContext); overflowFillContext = overflowSource.makeFillContext(chunkCapacity, sharedContext); @@ -64,17 +61,16 @@ public void close() { } } - private static final class HashTableGetContext extends HashTableFillContext - implements GetContext { + private static final class HashTableGetContext extends HashTableFillContext implements GetContext { private final GetContext mainGetContext; private final GetContext overflowGetContext; private final WritableChunk mergeChunk; private HashTableGetContext(@NotNull final ColumnSource mainSource, - @NotNull final ColumnSource overflowSource, - final int chunkCapacity, - final SharedContext sharedContext) { + @NotNull final ColumnSource overflowSource, + final int chunkCapacity, + final SharedContext sharedContext) { super(mainSource, overflowSource, chunkCapacity, sharedContext); mainGetContext = mainSource.makeGetContext(chunkCapacity, sharedContext); overflowGetContext = overflowSource.makeGetContext(chunkCapacity, sharedContext); @@ -91,66 +87,58 @@ public final void close() { } @Override - public final FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return new HashTableFillContext(mainSource, overflowSource, chunkCapacity, sharedContext); } @Override - public final GetContext makeGetContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final GetContext makeGetContext(final int chunkCapacity, final SharedContext sharedContext) { return new HashTableGetContext(mainSource, overflowSource, chunkCapacity, sharedContext); } @Override public final void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { final HashTableFillContext typedContext = (HashTableFillContext) context; if (!isOverflowLocation(orderedKeys.lastKey())) { - // Overflow locations are always after main locations, so there are no responsive - // overflow locations + // Overflow locations are always after main locations, so there are no responsive overflow locations mainSource.fillChunk(typedContext.mainFillContext, destination, orderedKeys); return; } if (isOverflowLocation(orderedKeys.firstKey())) { - // Main locations are always before overflow locations, so there are no responsive main - // locations + // Main locations are always before overflow locations, so there are no responsive main locations typedContext.overflowShiftedOrderedKeys.reset(orderedKeys, -MINIMUM_OVERFLOW_HASH_SLOT); overflowSource.fillChunk(typedContext.overflowFillContext, destination, - typedContext.overflowShiftedOrderedKeys); + typedContext.overflowShiftedOrderedKeys); typedContext.overflowShiftedOrderedKeys.clear(); return; } - // We're going to have to mix main and overflow locations in a single destination chunk, so - // delegate to fill + // We're going to have to mix main and overflow locations in a single destination chunk, so delegate to fill mergedFillChunk(typedContext, destination, orderedKeys); } private void mergedFillChunk(@NotNull final HashTableFillContext typedContext, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { final int totalSize = orderedKeys.intSize(); final int firstOverflowChunkPosition; try (final OrderedKeys mainOrderedKeysSlice = - orderedKeys.getOrderedKeysByKeyRange(0, MINIMUM_OVERFLOW_HASH_SLOT - 1)) { + orderedKeys.getOrderedKeysByKeyRange(0, MINIMUM_OVERFLOW_HASH_SLOT - 1)) { firstOverflowChunkPosition = mainOrderedKeysSlice.intSize(); mainSource.fillChunk(typedContext.mainFillContext, destination, mainOrderedKeysSlice); } final int sizeFromOverflow = totalSize - firstOverflowChunkPosition; - // Set destination size ahead of time, so that resetting our overflow destination slice - // doesn't run into bounds issues. + // Set destination size ahead of time, so that resetting our overflow destination slice doesn't run into bounds + // issues. destination.setSize(totalSize); try (final OrderedKeys overflowOrderedKeysSlice = - orderedKeys.getOrderedKeysByPosition(firstOverflowChunkPosition, sizeFromOverflow)) { - typedContext.overflowShiftedOrderedKeys.reset(overflowOrderedKeysSlice, - -MINIMUM_OVERFLOW_HASH_SLOT); + orderedKeys.getOrderedKeysByPosition(firstOverflowChunkPosition, sizeFromOverflow)) { + typedContext.overflowShiftedOrderedKeys.reset(overflowOrderedKeysSlice, -MINIMUM_OVERFLOW_HASH_SLOT); overflowSource.fillChunk(typedContext.overflowFillContext, - typedContext.overflowDestinationSlice.resetFromChunk(destination, - firstOverflowChunkPosition, sizeFromOverflow), - typedContext.overflowShiftedOrderedKeys); + typedContext.overflowDestinationSlice.resetFromChunk(destination, firstOverflowChunkPosition, + sizeFromOverflow), + typedContext.overflowShiftedOrderedKeys); } typedContext.overflowDestinationSlice.clear(); typedContext.overflowShiftedOrderedKeys.clear(); @@ -158,54 +146,47 @@ private void mergedFillChunk(@NotNull final HashTableFillContext typedContext, @Override public final void fillPrevChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { final HashTableFillContext typedContext = (HashTableFillContext) context; if (!isOverflowLocation(orderedKeys.lastKey())) { - // Overflow locations are always after main locations, so there are no responsive - // overflow locations + // Overflow locations are always after main locations, so there are no responsive overflow locations mainSource.fillPrevChunk(typedContext.mainFillContext, destination, orderedKeys); return; } if (isOverflowLocation(orderedKeys.firstKey())) { - // Main locations are always before overflow locations, so there are no responsive main - // locations + // Main locations are always before overflow locations, so there are no responsive main locations typedContext.overflowShiftedOrderedKeys.reset(orderedKeys, -MINIMUM_OVERFLOW_HASH_SLOT); overflowSource.fillPrevChunk(typedContext.overflowFillContext, destination, - typedContext.overflowShiftedOrderedKeys); + typedContext.overflowShiftedOrderedKeys); typedContext.overflowShiftedOrderedKeys.clear(); return; } - // We're going to have to mix main and overflow locations in a single destination chunk, so - // delegate to fill + // We're going to have to mix main and overflow locations in a single destination chunk, so delegate to fill mergedFillPrevChunk(typedContext, destination, orderedKeys); } private void mergedFillPrevChunk(@NotNull final HashTableFillContext typedContext, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { final int totalSize = orderedKeys.intSize(); final int firstOverflowChunkPosition; try (final OrderedKeys mainOrderedKeysSlice = - orderedKeys.getOrderedKeysByKeyRange(0, MINIMUM_OVERFLOW_HASH_SLOT - 1)) { + orderedKeys.getOrderedKeysByKeyRange(0, MINIMUM_OVERFLOW_HASH_SLOT - 1)) { firstOverflowChunkPosition = mainOrderedKeysSlice.intSize(); - mainSource.fillPrevChunk(typedContext.mainFillContext, destination, - mainOrderedKeysSlice); + mainSource.fillPrevChunk(typedContext.mainFillContext, destination, mainOrderedKeysSlice); } final int sizeFromOverflow = totalSize - firstOverflowChunkPosition; - // Set destination size ahead of time, so that resetting our overflow destination slice - // doesn't run into bounds issues. + // Set destination size ahead of time, so that resetting our overflow destination slice doesn't run into bounds + // issues. destination.setSize(totalSize); try (final OrderedKeys overflowOrderedKeysSlice = - orderedKeys.getOrderedKeysByPosition(firstOverflowChunkPosition, sizeFromOverflow)) { - typedContext.overflowShiftedOrderedKeys.reset(overflowOrderedKeysSlice, - -MINIMUM_OVERFLOW_HASH_SLOT); + orderedKeys.getOrderedKeysByPosition(firstOverflowChunkPosition, sizeFromOverflow)) { + typedContext.overflowShiftedOrderedKeys.reset(overflowOrderedKeysSlice, -MINIMUM_OVERFLOW_HASH_SLOT); overflowSource.fillPrevChunk(typedContext.overflowFillContext, - typedContext.overflowDestinationSlice.resetFromChunk(destination, - firstOverflowChunkPosition, sizeFromOverflow), - typedContext.overflowShiftedOrderedKeys); + typedContext.overflowDestinationSlice.resetFromChunk(destination, firstOverflowChunkPosition, + sizeFromOverflow), + typedContext.overflowShiftedOrderedKeys); typedContext.overflowDestinationSlice.clear(); typedContext.overflowShiftedOrderedKeys.clear(); } @@ -213,44 +194,37 @@ private void mergedFillPrevChunk(@NotNull final HashTableFillContext typedContex @Override public final Chunk getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { final HashTableGetContext typedContext = (HashTableGetContext) context; if (!isOverflowLocation(orderedKeys.lastKey())) { - // Overflow locations are always after main locations, so there are no responsive - // overflow locations + // Overflow locations are always after main locations, so there are no responsive overflow locations return mainSource.getChunk(typedContext.mainGetContext, orderedKeys); } if (isOverflowLocation(orderedKeys.firstKey())) { - // Main locations are always before overflow locations, so there are no responsive main - // locations + // Main locations are always before overflow locations, so there are no responsive main locations typedContext.overflowShiftedOrderedKeys.reset(orderedKeys, -MINIMUM_OVERFLOW_HASH_SLOT); - return overflowSource.getChunk(typedContext.overflowGetContext, - typedContext.overflowShiftedOrderedKeys); + return overflowSource.getChunk(typedContext.overflowGetContext, typedContext.overflowShiftedOrderedKeys); } - // We're going to have to mix main and overflow locations in a single destination chunk, so - // delegate to fill + // We're going to have to mix main and overflow locations in a single destination chunk, so delegate to fill mergedFillChunk(typedContext, typedContext.mergeChunk, orderedKeys); return typedContext.mergeChunk; } @Override public final Chunk getPrevChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { final HashTableGetContext typedContext = (HashTableGetContext) context; if (!isOverflowLocation(orderedKeys.lastKey())) { - // Overflow locations are always after main locations, so there are no responsive - // overflow locations + // Overflow locations are always after main locations, so there are no responsive overflow locations return mainSource.getPrevChunk(typedContext.mainGetContext, orderedKeys); } if (isOverflowLocation(orderedKeys.firstKey())) { - // Main locations are always before overflow locations, so there are no responsive main - // locations + // Main locations are always before overflow locations, so there are no responsive main locations typedContext.overflowShiftedOrderedKeys.reset(orderedKeys, -MINIMUM_OVERFLOW_HASH_SLOT); return overflowSource.getPrevChunk(typedContext.overflowGetContext, - typedContext.overflowShiftedOrderedKeys); + typedContext.overflowShiftedOrderedKeys); } - // We're going to have to mix main and overflow locations in a single destination chunk, so - // delegate to fill + // We're going to have to mix main and overflow locations in a single destination chunk, so delegate to fill mergedFillPrevChunk(typedContext, typedContext.mergeChunk, orderedKeys); return typedContext.mergeChunk; } @@ -258,126 +232,109 @@ public final Chunk getPrevChunk(@NotNull final GetContext cont @Override public final DATA_TYPE get(final long index) { return isOverflowLocation(index) ? overflowSource.get(hashLocationToOverflowLocation(index)) - : mainSource.get(index); + : mainSource.get(index); } @Override public final Boolean getBoolean(final long index) { - return isOverflowLocation(index) - ? overflowSource.getBoolean(hashLocationToOverflowLocation(index)) - : mainSource.getBoolean(index); + return isOverflowLocation(index) ? overflowSource.getBoolean(hashLocationToOverflowLocation(index)) + : mainSource.getBoolean(index); } @Override public final byte getByte(final long index) { - return isOverflowLocation(index) - ? overflowSource.getByte(hashLocationToOverflowLocation(index)) - : mainSource.getByte(index); + return isOverflowLocation(index) ? overflowSource.getByte(hashLocationToOverflowLocation(index)) + : mainSource.getByte(index); } @Override public final char getChar(final long index) { - return isOverflowLocation(index) - ? overflowSource.getChar(hashLocationToOverflowLocation(index)) - : mainSource.getChar(index); + return isOverflowLocation(index) ? overflowSource.getChar(hashLocationToOverflowLocation(index)) + : mainSource.getChar(index); } @Override public final double getDouble(final long index) { - return isOverflowLocation(index) - ? overflowSource.getDouble(hashLocationToOverflowLocation(index)) - : mainSource.getDouble(index); + return isOverflowLocation(index) ? overflowSource.getDouble(hashLocationToOverflowLocation(index)) + : mainSource.getDouble(index); } @Override public final float getFloat(final long index) { - return isOverflowLocation(index) - ? overflowSource.getFloat(hashLocationToOverflowLocation(index)) - : mainSource.getFloat(index); + return isOverflowLocation(index) ? overflowSource.getFloat(hashLocationToOverflowLocation(index)) + : mainSource.getFloat(index); } @Override public final int getInt(final long index) { - return isOverflowLocation(index) - ? overflowSource.getInt(hashLocationToOverflowLocation(index)) - : mainSource.getInt(index); + return isOverflowLocation(index) ? overflowSource.getInt(hashLocationToOverflowLocation(index)) + : mainSource.getInt(index); } @Override public final long getLong(final long index) { - return isOverflowLocation(index) - ? overflowSource.getLong(hashLocationToOverflowLocation(index)) - : mainSource.getLong(index); + return isOverflowLocation(index) ? overflowSource.getLong(hashLocationToOverflowLocation(index)) + : mainSource.getLong(index); } @Override public final short getShort(final long index) { - return isOverflowLocation(index) - ? overflowSource.getShort(hashLocationToOverflowLocation(index)) - : mainSource.getShort(index); + return isOverflowLocation(index) ? overflowSource.getShort(hashLocationToOverflowLocation(index)) + : mainSource.getShort(index); } @Override public final DATA_TYPE getPrev(final long index) { - return isOverflowLocation(index) - ? overflowSource.getPrev(hashLocationToOverflowLocation(index)) - : mainSource.getPrev(index); + return isOverflowLocation(index) ? overflowSource.getPrev(hashLocationToOverflowLocation(index)) + : mainSource.getPrev(index); } @Override public final Boolean getPrevBoolean(final long index) { - return isOverflowLocation(index) - ? overflowSource.getPrevBoolean(hashLocationToOverflowLocation(index)) - : mainSource.getPrevBoolean(index); + return isOverflowLocation(index) ? overflowSource.getPrevBoolean(hashLocationToOverflowLocation(index)) + : mainSource.getPrevBoolean(index); } @Override public final byte getPrevByte(final long index) { - return isOverflowLocation(index) - ? overflowSource.getPrevByte(hashLocationToOverflowLocation(index)) - : mainSource.getPrevByte(index); + return isOverflowLocation(index) ? overflowSource.getPrevByte(hashLocationToOverflowLocation(index)) + : mainSource.getPrevByte(index); } @Override public final char getPrevChar(final long index) { - return isOverflowLocation(index) - ? overflowSource.getPrevChar(hashLocationToOverflowLocation(index)) - : mainSource.getPrevChar(index); + return isOverflowLocation(index) ? overflowSource.getPrevChar(hashLocationToOverflowLocation(index)) + : mainSource.getPrevChar(index); } @Override public final double getPrevDouble(final long index) { - return isOverflowLocation(index) - ? overflowSource.getPrevDouble(hashLocationToOverflowLocation(index)) - : mainSource.getPrevDouble(index); + return isOverflowLocation(index) ? overflowSource.getPrevDouble(hashLocationToOverflowLocation(index)) + : mainSource.getPrevDouble(index); } @Override public final float getPrevFloat(final long index) { - return isOverflowLocation(index) - ? overflowSource.getPrevFloat(hashLocationToOverflowLocation(index)) - : mainSource.getPrevFloat(index); + return isOverflowLocation(index) ? overflowSource.getPrevFloat(hashLocationToOverflowLocation(index)) + : mainSource.getPrevFloat(index); } @Override public final int getPrevInt(final long index) { - return isOverflowLocation(index) - ? overflowSource.getPrevInt(hashLocationToOverflowLocation(index)) - : mainSource.getPrevInt(index); + return isOverflowLocation(index) ? overflowSource.getPrevInt(hashLocationToOverflowLocation(index)) + : mainSource.getPrevInt(index); } @Override public final long getPrevLong(final long index) { - return isOverflowLocation(index) - ? overflowSource.getPrevLong(hashLocationToOverflowLocation(index)) - : mainSource.getPrevLong(index); + return isOverflowLocation(index) ? overflowSource.getPrevLong(hashLocationToOverflowLocation(index)) + : mainSource.getPrevLong(index); } @Override public final short getPrevShort(final long index) { - return isOverflowLocation(index) - ? overflowSource.getPrevShort(hashLocationToOverflowLocation(index)) - : mainSource.getPrevShort(index); + return isOverflowLocation(index) ? overflowSource.getPrevShort(hashLocationToOverflowLocation(index)) + : mainSource.getPrevShort(index); } @Override @@ -393,9 +350,8 @@ public final boolean isImmutable() { @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { - return mainSource.allowsReinterpret(alternateDataType) - && overflowSource.allowsReinterpret(alternateDataType); + @NotNull final Class alternateDataType) { + return mainSource.allowsReinterpret(alternateDataType) && overflowSource.allowsReinterpret(alternateDataType); } private static final class Reinterpreted extends HashTableColumnSource { @@ -403,21 +359,20 @@ private static final class Reinterpreted extends HashTableColumnSourc private final HashTableColumnSource original; private Reinterpreted(@NotNull final Class dataType, - @NotNull final HashTableColumnSource original) { - super(dataType, original.mainSource.reinterpret(dataType), - original.overflowSource.reinterpret(dataType)); + @NotNull final HashTableColumnSource original) { + super(dataType, original.mainSource.reinterpret(dataType), original.overflowSource.reinterpret(dataType)); this.original = original; } @Override public final boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return original.getType() == alternateDataType; } @Override protected final ColumnSource doReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { // noinspection unchecked return (ColumnSource) original; } @@ -425,7 +380,7 @@ protected final ColumnSource doReinte @Override protected ColumnSource doReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return new Reinterpreted<>(alternateDataType, this); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/IncrementalByAggregationUpdateTracker.java b/DB/src/main/java/io/deephaven/db/v2/by/IncrementalByAggregationUpdateTracker.java index 230b321e320..5b5b66d2d29 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/IncrementalByAggregationUpdateTracker.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/IncrementalByAggregationUpdateTracker.java @@ -16,47 +16,40 @@ /** *

    - * A tracker for accumulating changes to aggregation states for - * {@link io.deephaven.db.tables.Table#by}. + * A tracker for accumulating changes to aggregation states for {@link io.deephaven.db.tables.Table#by}. * *

    - * The tracker is used in the initial (insert only) build phase, as well as in subsequent update - * passes. + * The tracker is used in the initial (insert only) build phase, as well as in subsequent update passes. * *

    - * Update processing is performed as follows (note that flags are accumulated across steps - * 1-4 and used in step 5): + * Update processing is performed as follows (note that flags are accumulated across steps 1-4 and used in + * step 5): *

      - *
    1. Probe and accumulate removes (including modified-pre-shift when key columns are modified) in - * sequential builders per state, then build the removed {@link Index} for each state and remove it - * from the state's {@link Index}
    2. - *
    3. Probe shifts and apply them as they are found to impact a given state's {@link Index}, - * writing down the total number of states with shifts as the chunk size for accumulating shifts in - * step 5
    4. + *
    5. Probe and accumulate removes (including modified-pre-shift when key columns are modified) in sequential builders + * per state, then build the removed {@link Index} for each state and remove it from the state's {@link Index}
    6. + *
    7. Probe shifts and apply them as they are found to impact a given state's {@link Index}, writing down the total + * number of states with shifts as the chunk size for accumulating shifts in step 5
    8. *
    9. Probe non-key modifies and flag impacted states
    10. - *
    11. Build and accumulate adds (including modified-post-shift when key columns are modified) in - * sequential builders per state, then build the added {@link Index} for each state and add it to - * the state's {@link Index}
    12. - *
    13. Update redirections from the previous {@link Index} first key to the current {@link Index} - * first key, and from old slot to new slot where a state was moved or promoted in rehash, - * accumulating index keys in 3 random builders (for added, removed, and modified) and shifts in a - * pair of parallel {@link WritableLongChunk}s for previous and current, using the following logic: + *
    14. Build and accumulate adds (including modified-post-shift when key columns are modified) in sequential builders + * per state, then build the added {@link Index} for each state and add it to the state's {@link Index}
    15. + *
    16. Update redirections from the previous {@link Index} first key to the current {@link Index} first key, and from + * old slot to new slot where a state was moved or promoted in rehash, accumulating index keys in 3 random builders (for + * added, removed, and modified) and shifts in a pair of parallel {@link WritableLongChunk}s for previous and current, + * using the following logic: *
        *
      1. Non-empty to empty transitions as removes of the previous first key
      2. *
      3. Empty or null placeholder to non-empty transitions as adds of the current first key
      4. - *
      5. Shifted-only states as shifts from previous first key to current first key, appended to the - * paired shift chunks
      6. - *
      7. All other changes as modifies if first key is unchanged, else paired removes and adds if - * first key changed
      8. + *
      9. Shifted-only states as shifts from previous first key to current first key, appended to the paired shift + * chunks
      10. + *
      11. All other changes as modifies if first key is unchanged, else paired removes and adds if first key changed
      12. *
      *
    17. - *
    18. Sort the shift chunks by the previous keys, accumulate shifts into an - * {@link IndexShiftData.Builder}
    19. + *
    20. Sort the shift chunks by the previous keys, accumulate shifts into an {@link IndexShiftData.Builder}
    21. *
    * *

    - * In each phase, the initial addition of a state to the tracker will return a cookie, which must be - * passed to subsequent updates to the tracker for that state. + * In each phase, the initial addition of a state to the tracker will return a cookie, which must be passed to + * subsequent updates to the tracker for that state. * *

    * To process results after steps 1, 4, and 5, the caller uses @@ -74,29 +67,26 @@ class IncrementalByAggregationUpdateTracker { private static final int ALLOCATION_UNIT = 4096; /** - * For each updated state, store the slot its in (regardless of whether main or overflow) in the - * higher 7 bytes, and flags in the lower 1 byte. Note that flags only use 5 bits currently, but - * it seems reasonable to reserve a whole byte. + * For each updated state, store the slot its in (regardless of whether main or overflow) in the higher 7 bytes, and + * flags in the lower 1 byte. Note that flags only use 5 bits currently, but it seems reasonable to reserve a whole + * byte. */ private final LongArraySource updatedStateSlotAndFlags = new LongArraySource(); /** - * Builders (used in remove processing and add processing), parallel to - * {@code updatedStateSlotAndFlags}. + * Builders (used in remove processing and add processing), parallel to {@code updatedStateSlotAndFlags}. */ private final ObjectArraySource builders = - new ObjectArraySource<>(Index.SequentialBuilder.class); + new ObjectArraySource<>(Index.SequentialBuilder.class); /** - * Each time we clear, we add an offset to our cookies, this prevents us from reading old - * values. + * Each time we clear, we add an offset to our cookies, this prevents us from reading old values. */ private long cookieGeneration = MINIMUM_COOKIE; /** - * The number of updated states, which is also the next position we will use in - * {@code updateStateSlotAndFlags} and {@code builders}. Note that cookies with implied pointers - * outside of {@code [0, size)} are known to be invalid. + * The number of updated states, which is also the next position we will use in {@code updateStateSlotAndFlags} and + * {@code builders}. Note that cookies with implied pointers outside of {@code [0, size)} are known to be invalid. */ private int size; @@ -107,9 +97,9 @@ class IncrementalByAggregationUpdateTracker { /** *

    - * The set of positions in {@link #updatedStateSlotAndFlags} (and possibly {@link #builders}) - * that have been updated in the current pass. Each corresponding "slot and flags" value will - * have the {@link #FLAG_STATE_IN_CURRENT_PASS} bit set. + * The set of positions in {@link #updatedStateSlotAndFlags} (and possibly {@link #builders}) that have been updated + * in the current pass. Each corresponding "slot and flags" value will have the {@link #FLAG_STATE_IN_CURRENT_PASS} + * bit set. *

    * Note that current pass membership is recorded by {@link #processShift(long, int, long)} and * {@link #processAdd(long, int, long)}, only, and cleared in the following @@ -119,8 +109,7 @@ class IncrementalByAggregationUpdateTracker { private final IntegerArraySource currentPassPositions = new IntegerArraySource(); /** - * The number of states whose "slot and flags" position can be found in in - * {@link #currentPassPositions}. + * The number of states whose "slot and flags" position can be found in in {@link #currentPassPositions}. */ private int currentPassSize; @@ -165,12 +154,11 @@ int size() { } /** - * Is this cookie within our valid range (greater than or equal to our generation, but less than - * the size after adjustment)? + * Is this cookie within our valid range (greater than or equal to our generation, but less than the size after + * adjustment)? * * @param cookie The cookie to check for validity - * @return true if the cookie is from the current generation,and references a valid tracker - * position + * @return true if the cookie is from the current generation,and references a valid tracker position */ private boolean isValidCookie(final long cookie) { return cookie >= cookieGeneration && cookieToPosition(cookie) < size; @@ -228,14 +216,13 @@ final long processRemove(final long cookie, final int stateSlot, final long remo * * @param cookie The last known cookie for the state * @param stateSlot The state's slot (in main table space) - * @param unusedShiftedIndex Unused shifted index argument, so we can use a method reference - * with the right signature + * @param unusedShiftedIndex Unused shifted index argument, so we can use a method reference with the right + * signature * @return The new cookie for the state if it has changed */ final long processShift(final long cookie, final int stateSlot, - @SuppressWarnings("unused") final long unusedShiftedIndex) { - return setFlags(cookie, stateSlot, - (byte) (FLAG_STATE_HAS_SHIFTS | FLAG_STATE_IN_CURRENT_PASS)); + @SuppressWarnings("unused") final long unusedShiftedIndex) { + return setFlags(cookie, stateSlot, (byte) (FLAG_STATE_HAS_SHIFTS | FLAG_STATE_IN_CURRENT_PASS)); } /** @@ -254,12 +241,12 @@ final long processAppliedShift(final long cookie, final int stateSlot) { * * @param cookie The last known cookie for the state * @param stateSlot The state's slot (in main table space) - * @param unusedModifiedIndex Unused modified index argument, so we can use a method reference - * with the right signature + * @param unusedModifiedIndex Unused modified index argument, so we can use a method reference with the right + * signature * @return The new cookie for the state if it has changed */ final long processModify(final long cookie, final int stateSlot, - @SuppressWarnings("unused") final long unusedModifiedIndex) { + @SuppressWarnings("unused") final long unusedModifiedIndex) { return setFlags(cookie, stateSlot, FLAG_STATE_HAS_MODIFIES); } @@ -273,8 +260,8 @@ final long processModify(final long cookie, final int stateSlot, * @return The new cookie for the state if it has changed */ final long processAdd(final long cookie, final int stateSlot, final long addedIndex) { - return setFlagsAndBuild(cookie, stateSlot, - (byte) (FLAG_STATE_HAS_ADDS | FLAG_STATE_IN_CURRENT_PASS), addedIndex); + return setFlagsAndBuild(cookie, stateSlot, (byte) (FLAG_STATE_HAS_ADDS | FLAG_STATE_IN_CURRENT_PASS), + addedIndex); } /** @@ -287,14 +274,12 @@ final void processStateMove(final long cookie, final int newStateSlot) { if (isValidCookie(cookie)) { final long position = cookieToPosition(cookie); final long currentSlotAndFlags = updatedStateSlotAndFlags.getLong(position); - final long resultSlotAndFlags = - ((long) newStateSlot << FLAG_SHIFT) | (currentSlotAndFlags & FLAG_MASK); + final long resultSlotAndFlags = ((long) newStateSlot << FLAG_SHIFT) | (currentSlotAndFlags & FLAG_MASK); updatedStateSlotAndFlags.set(position, resultSlotAndFlags); } } - private long setFlagsAndBuild(final long cookie, final int stateSlot, final byte flags, - final long index) { + private long setFlagsAndBuild(final long cookie, final int stateSlot, final byte flags, final long index) { final int position; final long resultCookie; final long currentSlotAndFlags; @@ -309,12 +294,10 @@ private long setFlagsAndBuild(final long cookie, final int stateSlot, final byte currentSlotAndFlags = 0L; } final Index.SequentialBuilder builder; - final long resultSlotAndFlags = - ((long) stateSlot << FLAG_SHIFT) | (currentSlotAndFlags & FLAG_MASK | flags); + final long resultSlotAndFlags = ((long) stateSlot << FLAG_SHIFT) | (currentSlotAndFlags & FLAG_MASK | flags); if (currentSlotAndFlags != resultSlotAndFlags) { updatedStateSlotAndFlags.set(position, resultSlotAndFlags); - if ((flags & FLAG_STATE_IN_CURRENT_PASS) != 0 - && (currentSlotAndFlags & FLAG_STATE_IN_CURRENT_PASS) == 0) { + if ((flags & FLAG_STATE_IN_CURRENT_PASS) != 0 && (currentSlotAndFlags & FLAG_STATE_IN_CURRENT_PASS) == 0) { checkCurrentPassCapacity(); currentPassPositions.set(currentPassSize++, position); } @@ -341,12 +324,10 @@ private long setFlags(final long cookie, final int stateSlot, final byte flags) resultCookie = positionToCookie(position); currentSlotAndFlags = 0L; } - final long resultSlotAndFlags = - ((long) stateSlot << FLAG_SHIFT) | (currentSlotAndFlags & FLAG_MASK | flags); + final long resultSlotAndFlags = ((long) stateSlot << FLAG_SHIFT) | (currentSlotAndFlags & FLAG_MASK | flags); if (currentSlotAndFlags != resultSlotAndFlags) { updatedStateSlotAndFlags.set(position, resultSlotAndFlags); - if ((flags & FLAG_STATE_IN_CURRENT_PASS) != 0 - && (currentSlotAndFlags & FLAG_STATE_IN_CURRENT_PASS) == 0) { + if ((flags & FLAG_STATE_IN_CURRENT_PASS) != 0 && (currentSlotAndFlags & FLAG_STATE_IN_CURRENT_PASS) == 0) { checkCurrentPassCapacity(); currentPassPositions.set(currentPassSize++, position); } @@ -370,18 +351,17 @@ private void checkCurrentPassCapacity() { } /** - * Apply accumulated adds to their states, populate the result {@link RedirectionIndex}, and - * build the initial result {@link Index}. + * Apply accumulated adds to their states, populate the result {@link RedirectionIndex}, and build the initial + * result {@link Index}. * * @param indexSource The {@link Index} column source for the main table * @param overflowIndexSource The {@link Index} column source for the overflow table - * @param redirectionIndex The result {@link RedirectionIndex} (from state first keys to state - * slots) to populate + * @param redirectionIndex The result {@link RedirectionIndex} (from state first keys to state slots) to populate * @return The result {@link Index} */ final Index applyAddsAndMakeInitialIndex(@NotNull final ObjectArraySource indexSource, - @NotNull final ObjectArraySource overflowIndexSource, - @NotNull final RedirectionIndex redirectionIndex) { + @NotNull final ObjectArraySource overflowIndexSource, + @NotNull final RedirectionIndex redirectionIndex) { final Index.RandomBuilder resultBuilder = Index.FACTORY.getRandomBuilder(); for (long trackerIndex = 0; trackerIndex < size; ++trackerIndex) { final long slotAndFlags = updatedStateSlotAndFlags.getLong(trackerIndex); @@ -401,8 +381,7 @@ final Index applyAddsAndMakeInitialIndex(@NotNull final ObjectArraySource redirectionIndex.putVoid(stateFirstKey, slot); resultBuilder.addKey(stateFirstKey); } - // NB: We should not need to initialize previous value here, as the result index was - // computed with no mutations. + // NB: We should not need to initialize previous value here, as the result index was computed with no mutations. return resultBuilder.getIndex(); } @@ -413,7 +392,7 @@ final Index applyAddsAndMakeInitialIndex(@NotNull final ObjectArraySource * @param overflowIndexSource The {@link Index} column source for the overflow table */ final void applyRemovesToStates(@NotNull final ObjectArraySource indexSource, - @NotNull final ObjectArraySource overflowIndexSource) { + @NotNull final ObjectArraySource overflowIndexSource) { for (long trackerIndex = 0; trackerIndex < size; ++trackerIndex) { final long slotAndFlags = updatedStateSlotAndFlags.getLong(trackerIndex); // Since removes are always done first, we need not check the flags here. @@ -438,20 +417,18 @@ final void applyRemovesToStates(@NotNull final ObjectArraySource indexSou * @param shiftDelta See {@link IndexShiftData#applyShift(Index, long, long, long)} */ final void applyShiftToStates(@NotNull final ObjectArraySource indexSource, - @NotNull final ObjectArraySource overflowIndexSource, - final long beginRange, - final long endRange, - final long shiftDelta) { - for (int currentPositionIndex = - 0; currentPositionIndex < currentPassSize; ++currentPositionIndex) { + @NotNull final ObjectArraySource overflowIndexSource, + final long beginRange, + final long endRange, + final long shiftDelta) { + for (int currentPositionIndex = 0; currentPositionIndex < currentPassSize; ++currentPositionIndex) { final int trackerIndex = currentPassPositions.getInt(currentPositionIndex); final long slotAndFlags = updatedStateSlotAndFlags.getLong(trackerIndex); - // Since the current pass is only states responsive to the current shift, we need not - // check the flags here. + // Since the current pass is only states responsive to the current shift, we need not check the flags here. final int slot = (int) (slotAndFlags >> FLAG_SHIFT); - IndexShiftData.applyShift(slotToIndex(indexSource, overflowIndexSource, slot), - beginRange, endRange, shiftDelta); + IndexShiftData.applyShift(slotToIndex(indexSource, overflowIndexSource, slot), beginRange, endRange, + shiftDelta); updatedStateSlotAndFlags.set(trackerIndex, slotAndFlags ^ FLAG_STATE_IN_CURRENT_PASS); } @@ -465,9 +442,8 @@ final void applyShiftToStates(@NotNull final ObjectArraySource indexSourc * @param overflowIndexSource The {@link Index} column source for the overflow table */ final void applyAddsToStates(@NotNull final ObjectArraySource indexSource, - @NotNull final ObjectArraySource overflowIndexSource) { - for (int currentPositionIndex = - 0; currentPositionIndex < currentPassSize; ++currentPositionIndex) { + @NotNull final ObjectArraySource overflowIndexSource) { + for (int currentPositionIndex = 0; currentPositionIndex < currentPassSize; ++currentPositionIndex) { final int trackerIndex = currentPassPositions.getInt(currentPositionIndex); final long slotAndFlags = updatedStateSlotAndFlags.getLong(trackerIndex); // Since the current pass is only states with adds, we need not check the flags here. @@ -493,26 +469,23 @@ interface ModifiedColumnSetProducer { } /** - * Build an {@link ShiftAwareListener.Update} for this tracker's updated states, and update the - * result {@link Index} and {@link RedirectionIndex}. + * Build an {@link ShiftAwareListener.Update} for this tracker's updated states, and update the result {@link Index} + * and {@link RedirectionIndex}. * * @param indexSource The {@link Index} column source for the main table * @param overflowIndexSource The {@link Index} column source for the overflow table * @param index The result {@link Index} of visible keys to update - * @param redirectionIndex The result {@link RedirectionIndex} (from state first keys to state - * slots) to update - * @param modifiedColumnSetProducer The {@link ModifiedColumnSetProducer} to use for computing - * the downstream {@link ModifiedColumnSet} + * @param redirectionIndex The result {@link RedirectionIndex} (from state first keys to state slots) to update + * @param modifiedColumnSetProducer The {@link ModifiedColumnSetProducer} to use for computing the downstream + * {@link ModifiedColumnSet} * @return The result {@link ShiftAwareListener.Update} */ - final ShiftAwareListener.Update makeUpdateFromStates( - @NotNull final ObjectArraySource indexSource, - @NotNull final ObjectArraySource overflowIndexSource, - @NotNull final Index index, - @NotNull final RedirectionIndex redirectionIndex, - @NotNull final ModifiedColumnSetProducer modifiedColumnSetProducer) { - // First pass: Removes are handled on their own, because if the key moved to a new state we - // may reinsert it + final ShiftAwareListener.Update makeUpdateFromStates(@NotNull final ObjectArraySource indexSource, + @NotNull final ObjectArraySource overflowIndexSource, + @NotNull final Index index, + @NotNull final RedirectionIndex redirectionIndex, + @NotNull final ModifiedColumnSetProducer modifiedColumnSetProducer) { + // First pass: Removes are handled on their own, because if the key moved to a new state we may reinsert it final Index.RandomBuilder removedBuilder = Index.FACTORY.getRandomBuilder(); int numStatesWithShifts = 0; for (long ti = 0; ti < size; ++ti) { @@ -550,11 +523,10 @@ final ShiftAwareListener.Update makeUpdateFromStates( boolean someKeyHasAddsOrRemoves = false; boolean someKeyHasModifies = false; final IndexShiftData shiftData; - try ( - final WritableLongChunk previousShiftedFirstKeys = + try (final WritableLongChunk previousShiftedFirstKeys = WritableLongChunk.makeWritableChunk(numStatesWithShifts); - final WritableLongChunk currentShiftedFirstKeys = - WritableLongChunk.makeWritableChunk(numStatesWithShifts)) { + final WritableLongChunk currentShiftedFirstKeys = + WritableLongChunk.makeWritableChunk(numStatesWithShifts)) { int shiftChunkPosition = 0; for (long ti = 0; ti < size; ++ti) { final long slotAndFlags = updatedStateSlotAndFlags.getLong(ti); @@ -594,16 +566,13 @@ final ShiftAwareListener.Update makeUpdateFromStates( } // Now sort shifts and build the shift data - Assert.eq(numStatesWithShifts, "numStatesWithShift", shiftChunkPosition, - "shiftedChunkPosition"); + Assert.eq(numStatesWithShifts, "numStatesWithShift", shiftChunkPosition, "shiftedChunkPosition"); if (numStatesWithShifts > 0) { previousShiftedFirstKeys.setSize(numStatesWithShifts); currentShiftedFirstKeys.setSize(numStatesWithShifts); - try ( - final LongLongTimsortKernel.LongLongSortKernelContext sortKernelContext = + try (final LongLongTimsortKernel.LongLongSortKernelContext sortKernelContext = LongLongTimsortKernel.createContext(numStatesWithShifts)) { - LongLongTimsortKernel.sort(sortKernelContext, currentShiftedFirstKeys, - previousShiftedFirstKeys); + LongLongTimsortKernel.sort(sortKernelContext, currentShiftedFirstKeys, previousShiftedFirstKeys); } final IndexShiftData.Builder shiftBuilder = new IndexShiftData.Builder(); for (int si = 0; si < numStatesWithShifts; ++si) { @@ -629,15 +598,15 @@ final ShiftAwareListener.Update makeUpdateFromStates( // Build and return the update return new ShiftAwareListener.Update(added, removed, modified, shiftData, - modifiedColumnSetProducer.produce(someKeyHasAddsOrRemoves, someKeyHasModifies)); + modifiedColumnSetProducer.produce(someKeyHasAddsOrRemoves, someKeyHasModifies)); } private static Index slotToIndex(@NotNull final ObjectArraySource indexSource, - @NotNull final ObjectArraySource overflowIndexSource, - final int slot) { + @NotNull final ObjectArraySource overflowIndexSource, + final int slot) { return IncrementalChunkedByAggregationStateManager.isOverflowLocation(slot) - ? overflowIndexSource.get( - IncrementalChunkedByAggregationStateManager.hashLocationToOverflowLocation(slot)) - : indexSource.get(slot); + ? overflowIndexSource + .get(IncrementalChunkedByAggregationStateManager.hashLocationToOverflowLocation(slot)) + : indexSource.get(slot); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/IterativeChunkedAggregationOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/IterativeChunkedAggregationOperator.java index fc50e903456..99a4a40bd2e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/IterativeChunkedAggregationOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/IterativeChunkedAggregationOperator.java @@ -26,13 +26,13 @@ import java.util.function.UnaryOperator; /** - * A chunked, iterative operator that processes indices and/or data from one input column to produce - * one or more output columns. + * A chunked, iterative operator that processes indices and/or data from one input column to produce one or more output + * columns. */ public interface IterativeChunkedAggregationOperator { IterativeChunkedAggregationOperator[] ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY = - new IterativeChunkedAggregationOperator[0]; + new IterativeChunkedAggregationOperator[0]; /** * Aggregate a chunk of data into the result columns. @@ -40,18 +40,15 @@ public interface IterativeChunkedAggregationOperator { * @param context the operator-specific context * @param values a chunk of values to aggregate * @param inputIndices the input indices, in post-shift space - * @param destinations the destinations in resultColumn to aggregate into, parallel with - * startPositions and length + * @param destinations the destinations in resultColumn to aggregate into, parallel with startPositions and length * @param startPositions the starting positions in the chunk for each destination * @param length the number of values in the chunk for each destination - * @param stateModified a boolean output array, parallel to destinations, which is set to true - * if the corresponding destination has been modified + * @param stateModified a boolean output array, parallel to destinations, which is set to true if the corresponding + * destination has been modified */ - void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified); + void addChunk(BucketedContext context, Chunk values, LongChunk inputIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified); /** * Remove a chunk of data previously aggregated into the result columns. @@ -59,50 +56,45 @@ void addChunk(BucketedContext context, Chunk values, * @param context the operator-specific context * @param values a chunk of values that have been previously aggregated. * @param inputIndices the input indices, in pre-shift space - * @param destinations the destinations in resultColumn to remove the values from, parallel with - * startPositions and length + * @param destinations the destinations in resultColumn to remove the values from, parallel with startPositions and + * length * @param startPositions the starting positions in the chunk for each destination * @param length the number of values in the chunk for each destination - * @param stateModified a boolean output array, parallel to destinations, which is set to true - * if the corresponding destination has been modified + * @param stateModified a boolean output array, parallel to destinations, which is set to true if the corresponding + * destination has been modified */ void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified); + LongChunk inputIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified); /** - * Modify a chunk of data previously aggregated into the result columns using a parallel chunk - * of new values. Never includes modifies that have been shifted if {@link #requiresIndices()} - * returns true - those are handled in + * Modify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never + * includes modifies that have been shifted if {@link #requiresIndices()} returns true - those are handled in * {@link #shiftChunk(BucketedContext, Chunk, Chunk, LongChunk, LongChunk, IntChunk, IntChunk, IntChunk, WritableBooleanChunk)}. * * @param context the operator-specific context * @param previousValues a chunk of values that have been previously aggregated * @param newValues a chunk of values to aggregate * @param postShiftIndices the input indices, in post-shift space - * @param destinations the destinations in resultColumn to remove the values from, parallel with - * startPositions and length + * @param destinations the destinations in resultColumn to remove the values from, parallel with startPositions and + * length * @param startPositions the starting positions in the chunk for each destination * @param length the number of values in the chunk for each destination - * @param stateModified a boolean output array, parallel to destinations, which is set to true - * if the corresponding destination has been modified + * @param stateModified a boolean output array, parallel to destinations, which is set to true if the corresponding + * destination has been modified */ default void modifyChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, - LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + Chunk newValues, + LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { try (final WritableBooleanChunk addModified = - WritableBooleanChunk.makeWritableChunk(stateModified.size())) { - // There are no shifted indices here for any operators that care about indices, hence it - // is safe to remove in "post-shift" space. - removeChunk(context, previousValues, postShiftIndices, destinations, startPositions, - length, stateModified); - addChunk(context, newValues, postShiftIndices, destinations, startPositions, length, - addModified); + WritableBooleanChunk.makeWritableChunk(stateModified.size())) { + // There are no shifted indices here for any operators that care about indices, hence it is safe to remove + // in "post-shift" space. + removeChunk(context, previousValues, postShiftIndices, destinations, startPositions, length, stateModified); + addChunk(context, newValues, postShiftIndices, destinations, startPositions, length, addModified); for (int ii = 0; ii < stateModified.size(); ++ii) { stateModified.set(ii, stateModified.get(ii) || addModified.get(ii)); } @@ -110,49 +102,42 @@ default void modifyChunk(BucketedContext context, Chunk previo } /** - * Called with shifted indices when {@link #requiresIndices()} returns true, including shifted - * same-slot modifies. + * Called with shifted indices when {@link #requiresIndices()} returns true, including shifted same-slot modifies. * * @param context the operator-specific context * @param previousValues a chunk of values that have been previously aggregated. * @param newValues a chunk of values to aggregate * @param preShiftIndices the input indices, in pre-shift space * @param postShiftIndices the input indices, in post-shift space - * @param destinations the destinations in resultColumn to aggregate into, parallel with - * startPositions and length + * @param destinations the destinations in resultColumn to aggregate into, parallel with startPositions and length * @param startPositions the starting positions in the chunk for each destination * @param length the number of values in the chunk for each destination - * @param stateModified a boolean output array, parallel to destinations, which is set to true - * if the corresponding destination has been modified + * @param stateModified a boolean output array, parallel to destinations, which is set to true if the corresponding + * destination has been modified */ default void shiftChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, - LongChunk preShiftIndices, - LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + Chunk newValues, + LongChunk preShiftIndices, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { // we don't actually care } /** - * Called with the modified indices when {@link #requiresIndices()} returns true if our input - * columns have not changed (or we have none). + * Called with the modified indices when {@link #requiresIndices()} returns true if our input columns have not + * changed (or we have none). * * @param context the operator-specific context * @param inputIndices the input indices, in post-shift space - * @param destinations the destinations in resultColumn to aggregate into, parallel with - * startPositions and length + * @param destinations the destinations in resultColumn to aggregate into, parallel with startPositions and length * @param startPositions the starting positions in the chunk for each destination * @param length the number of values in the chunk for each destination - * @param stateModified a boolean output array, parallel to destinations, which is set to true - * if the corresponding destination has been modified + * @param stateModified a boolean output array, parallel to destinations, which is set to true if the corresponding + * destination has been modified */ - default void modifyIndices(BucketedContext context, - LongChunk inputIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + default void modifyIndices(BucketedContext context, LongChunk inputIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { // we don't actually care } @@ -167,7 +152,7 @@ default void modifyIndices(BucketedContext context, * @return true if the state was modified, false otherwise */ boolean addChunk(SingletonContext context, int chunkSize, Chunk values, - LongChunk inputIndices, long destination); + LongChunk inputIndices, long destination); /** * Remove a chunk of data previously aggregated into the result columns. @@ -180,12 +165,11 @@ boolean addChunk(SingletonContext context, int chunkSize, Chunk values, - LongChunk inputIndices, long destination); + LongChunk inputIndices, long destination); /** - * Modify a chunk of data previously aggregated into the result columns using a parallel chunk - * of new values. Never includes modifies that have been shifted if {@link #requiresIndices()} - * returns true - those are handled in + * Modify a chunk of data previously aggregated into the result columns using a parallel chunk of new values. Never + * includes modifies that have been shifted if {@link #requiresIndices()} returns true - those are handled in * {@link #shiftChunk(SingletonContext, Chunk, Chunk, LongChunk, LongChunk, long)}. * * @param context the operator-specific context @@ -195,21 +179,18 @@ boolean removeChunk(SingletonContext context, int chunkSize, Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { - // There are no shifted indices here for any operators that care about indices, hence it is - // safe to remove in "post-shift" space. - final boolean modifiedOld = - removeChunk(context, chunkSize, previousValues, postShiftIndices, destination); - final boolean modifiedNew = - addChunk(context, chunkSize, newValues, postShiftIndices, destination); + default boolean modifyChunk(SingletonContext context, int chunkSize, Chunk previousValues, + Chunk newValues, + LongChunk postShiftIndices, long destination) { + // There are no shifted indices here for any operators that care about indices, hence it is safe to remove in + // "post-shift" space. + final boolean modifiedOld = removeChunk(context, chunkSize, previousValues, postShiftIndices, destination); + final boolean modifiedNew = addChunk(context, chunkSize, newValues, postShiftIndices, destination); return modifiedOld || modifiedNew; } /** - * Shift a chunk of data previously aggregated into the result columns, including shifted - * same-slot modifies.. + * Shift a chunk of data previously aggregated into the result columns, including shifted same-slot modifies.. * * @param context the operator-specific context * @param previousValues a chunk of values that have been previously aggregated. @@ -220,31 +201,30 @@ default boolean modifyChunk(SingletonContext context, int chunkSize, * @return true if the result should be considered modified */ default boolean shiftChunk(SingletonContext context, Chunk previousValues, - Chunk newValues, - LongChunk preShiftIndices, - LongChunk postShiftIndices, long destination) { + Chunk newValues, + LongChunk preShiftIndices, LongChunk postShiftIndices, + long destination) { // we don't actually care return false; } /** - * Called with the modified indices when {@link #requiresIndices()} returns true if our input - * columns have not changed (or we have none). + * Called with the modified indices when {@link #requiresIndices()} returns true if our input columns have not + * changed (or we have none). * * @param context the operator-specific context * @param indices the modified indices for a given destination, in post-shift space * @param destination the destination that was modified * @return true if the result should be considered modified */ - default boolean modifyIndices(SingletonContext context, LongChunk indices, - long destination) { + default boolean modifyIndices(SingletonContext context, LongChunk indices, long destination) { return false; } /** - * Whether the operator requires indices. This implies that the operator must process shifts - * (i.e. {@link #shiftChunk}), and must observe modifications even when its input columns (if - * any) are not modified (i.e. {@link #modifyIndices}). + * Whether the operator requires indices. This implies that the operator must process shifts (i.e. + * {@link #shiftChunk}), and must observe modifications even when its input columns (if any) are not modified (i.e. + * {@link #modifyIndices}). * * @return true if the operator requires indices, false otherwise */ @@ -281,59 +261,51 @@ default boolean addIndex(SingletonContext context, Index index, long destination Map> getResultColumns(); /** - * Perform any internal state keeping needed for destinations that were added during - * initialization. + * Perform any internal state keeping needed for destinations that were added during initialization. * * @param resultTable The result {@link QueryTable} after initialization */ default void propagateInitialState(@NotNull final QueryTable resultTable) {} /** - * Called after initialization; when the operator's result columns must have previous tracking - * enabled. + * Called after initialization; when the operator's result columns must have previous tracking enabled. */ void startTrackingPrevValues(); /** - * Initialize refreshing result support for this operator. As a side effect, make a factory - * method for converting upstream modified column sets to result modified column sets, to be - * invoked whenever this operator reports a modification in order to determine the operator's - * contribution to the final result modified column set. + * Initialize refreshing result support for this operator. As a side effect, make a factory method for converting + * upstream modified column sets to result modified column sets, to be invoked whenever this operator reports a + * modification in order to determine the operator's contribution to the final result modified column set. * * @param resultTable The result {@link QueryTable} after initialization - * @param aggregationUpdateListener The aggregation update listener, which may be needed for - * referential integrity - * @return A factory that produces a result modified column set from the upstream modified - * column set + * @param aggregationUpdateListener The aggregation update listener, which may be needed for referential integrity + * @return A factory that produces a result modified column set from the upstream modified column set */ - default UnaryOperator initializeRefreshing( - @NotNull final QueryTable resultTable, - @NotNull final LivenessReferent aggregationUpdateListener) { - final ModifiedColumnSet resultModifiedColumnSet = resultTable.newModifiedColumnSet( - getResultColumns().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + default UnaryOperator initializeRefreshing(@NotNull final QueryTable resultTable, + @NotNull final LivenessReferent aggregationUpdateListener) { + final ModifiedColumnSet resultModifiedColumnSet = resultTable + .newModifiedColumnSet(getResultColumns().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); return upstreamModifiedColumnSet -> resultModifiedColumnSet; } /** - * Reset any per-step internal state. Note that the arguments to this method should not be - * mutated in any way. + * Reset any per-step internal state. Note that the arguments to this method should not be mutated in any way. * * @param upstream The upstream ShiftAwareListener.Update */ default void resetForStep(@NotNull final ShiftAwareListener.Update upstream) {} /** - * Perform any internal state keeping needed for destinations that were added (went from 0 keys - * to > 0), removed (went from > 0 keys to 0), or modified (keys added or removed, or keys - * modified) by this iteration. Note that the arguments to this method should not be mutated in - * any way. + * Perform any internal state keeping needed for destinations that were added (went from 0 keys to > 0), removed + * (went from > 0 keys to 0), or modified (keys added or removed, or keys modified) by this iteration. Note that + * the arguments to this method should not be mutated in any way. * * @param downstream The downstream ShiftAwareListener.Update (which does not have its * {@link ModifiedColumnSet} finalized yet) * @param newDestinations New destinations added on this update */ default void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream, - @NotNull final ReadOnlyIndex newDestinations) {} + @NotNull final ReadOnlyIndex newDestinations) {} /** * Called on error to propagate listener failure to this operator. @@ -342,7 +314,7 @@ default void propagateUpdates(@NotNull final ShiftAwareListener.Update downstrea * @param sourceEntry The UpdatePerformanceTracker.Entry for the failed listener */ default void propagateFailure(@NotNull final Throwable originalException, - @NotNull final UpdatePerformanceTracker.Entry sourceEntry) {} + @NotNull final UpdatePerformanceTracker.Entry sourceEntry) {} /** * Make a {@link BucketedContext} suitable for this operator if necessary. diff --git a/DB/src/main/java/io/deephaven/db/v2/by/IterativeChunkedOperatorFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/IterativeChunkedOperatorFactory.java index d6376ea278b..54cacd93c97 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/IterativeChunkedOperatorFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/IterativeChunkedOperatorFactory.java @@ -2,5 +2,5 @@ public interface IterativeChunkedOperatorFactory { IterativeChunkedAggregationOperator getChunkedOperator(Class type, String resultName, - boolean exposeInternalColumns); + boolean exposeInternalColumns); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/IterativeIndexStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/IterativeIndexStateFactory.java index 23a1bf9cad1..2e8d3b4d7c7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/IterativeIndexStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/IterativeIndexStateFactory.java @@ -15,19 +15,17 @@ public abstract class IterativeIndexStateFactory extends ReaggregatableStatefactory { static final String REDIRECTION_INDEX_PREFIX = "RedirectionIndex_"; private final Map nameToDestColumns = new LinkedHashMap<>(); - final protected RedirectionIndex redirectionIndex = - RedirectionIndex.FACTORY.createRedirectionIndex(8); + final protected RedirectionIndex redirectionIndex = RedirectionIndex.FACTORY.createRedirectionIndex(8); private boolean firstTime; final boolean lowestRollup; final boolean secondRollup; final int rollupColumnIdentifier; - // if we have multiple aggregations, we do not want them to have conflicting column names, so we - // use an identifier to find them + // if we have multiple aggregations, we do not want them to have conflicting column names, so we use an identifier + // to find them private final static AtomicInteger nextRollupColumnIdentifier = new AtomicInteger(1); - IterativeIndexStateFactory(boolean lowestRollup, boolean secondRollup, - int rollupColumnIdentifier) { + IterativeIndexStateFactory(boolean lowestRollup, boolean secondRollup, int rollupColumnIdentifier) { firstTime = true; this.lowestRollup = lowestRollup; this.secondRollup = secondRollup; @@ -40,8 +38,7 @@ public abstract class IterativeIndexStateFactory extends ReaggregatableStatefact @NotNull private String getRedirectionName() { - return REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier - + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX; + return REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX; } @Override @@ -60,7 +57,7 @@ ReaggregatableStatefactory rollupFactory() { } class RedirectionValueColumnSource extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForLong { + implements MutableColumnSourceGetDefaults.ForLong { RedirectionValueColumnSource() { super(Long.class); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/IterativeOperatorStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/IterativeOperatorStateFactory.java index 7b8e9fdc625..9706daaf73b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/IterativeOperatorStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/IterativeOperatorStateFactory.java @@ -16,13 +16,13 @@ * Creates iterative operators for the supplied type. */ public abstract class IterativeOperatorStateFactory extends ReaggregatableStatefactory - implements IterativeChunkedOperatorFactory { + implements IterativeChunkedOperatorFactory { IterativeOperatorStateFactory() {} @Override public abstract IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns); + boolean exposeInternalColumns); static IterativeChunkedAggregationOperator getSumChunked(Class type, String name) { if (type == Boolean.class || type == boolean.class) { @@ -49,8 +49,8 @@ static IterativeChunkedAggregationOperator getSumChunked(Class type, String name throw new UnsupportedOperationException("Unsupported type " + type); } - static IterativeChunkedAggregationOperator getMinMaxChunked(Class type, boolean minimum, - boolean isStreamOrAddOnly, String name) { + static IterativeChunkedAggregationOperator getMinMaxChunked(Class type, boolean minimum, boolean isStreamOrAddOnly, + String name) { if (!isStreamOrAddOnly) { return new SsmChunkedMinMaxOperator(type, minimum, name); } else { @@ -77,27 +77,24 @@ static IterativeChunkedAggregationOperator getMinMaxChunked(Class type, boolean } static IterativeChunkedAggregationOperator getPercentileChunked(Class type, double percentile, - boolean averageMedian, String name) { + boolean averageMedian, String name) { return new SsmChunkedPercentileOperator(type, percentile, averageMedian, name); } - static IterativeChunkedAggregationOperator getCountDistinctChunked(Class type, String name, - boolean countNulls, boolean exposeInternal, boolean isRollup) { - return DistinctOperatorFactory.createCountDistinct(type, name, countNulls, exposeInternal, - isRollup); + static IterativeChunkedAggregationOperator getCountDistinctChunked(Class type, String name, boolean countNulls, + boolean exposeInternal, boolean isRollup) { + return DistinctOperatorFactory.createCountDistinct(type, name, countNulls, exposeInternal, isRollup); } - static IterativeChunkedAggregationOperator getDistinctChunked(Class type, String name, - boolean countNulls, boolean exposeInternal, boolean isRollup) { - return DistinctOperatorFactory.createDistinct(type, name, countNulls, exposeInternal, - isRollup); + static IterativeChunkedAggregationOperator getDistinctChunked(Class type, String name, boolean countNulls, + boolean exposeInternal, boolean isRollup) { + return DistinctOperatorFactory.createDistinct(type, name, countNulls, exposeInternal, isRollup); } - static IterativeChunkedAggregationOperator getUniqueChunked(Class type, String name, - boolean countNulls, boolean exposeInternal, Object noKeyValue, Object nonUniqueValue, - boolean isRollup) { - return DistinctOperatorFactory.createUnique(type, name, countNulls, exposeInternal, - noKeyValue, nonUniqueValue, isRollup); + static IterativeChunkedAggregationOperator getUniqueChunked(Class type, String name, boolean countNulls, + boolean exposeInternal, Object noKeyValue, Object nonUniqueValue, boolean isRollup) { + return DistinctOperatorFactory.createUnique(type, name, countNulls, exposeInternal, noKeyValue, nonUniqueValue, + isRollup); } static IterativeChunkedAggregationOperator getAbsSumChunked(Class type, String name) { @@ -125,8 +122,7 @@ static IterativeChunkedAggregationOperator getAbsSumChunked(Class type, String n throw new UnsupportedOperationException("Unsupported type " + type); } - static IterativeChunkedAggregationOperator getAvgChunked(Class type, String name, - boolean exposeInternalColumns) { + static IterativeChunkedAggregationOperator getAvgChunked(Class type, String name, boolean exposeInternalColumns) { if (type == Byte.class || type == byte.class) { return new ByteChunkedAvgOperator(name, exposeInternalColumns); } else if (type == Character.class || type == char.class) { @@ -154,7 +150,7 @@ static IterativeChunkedAggregationOperator getAvgChunked(Class type, String name } static IterativeChunkedAggregationOperator getVarChunked(Class type, boolean std, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { if (type == Byte.class || type == byte.class) { return new ByteChunkedVarOperator(std, name, exposeInternalColumns); } else if (type == Character.class || type == char.class) { diff --git a/DB/src/main/java/io/deephaven/db/v2/by/KeyOnlyAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/KeyOnlyAggregationFactory.java index f51f6b51d33..ae203a81530 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/KeyOnlyAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/KeyOnlyAggregationFactory.java @@ -11,12 +11,12 @@ public class KeyOnlyAggregationFactory implements AggregationContextFactory { @Override public AggregationContext makeAggregationContext(@NotNull final Table table, - @NotNull final String... groupByColumns) { + @NotNull final String... groupByColumns) { // noinspection unchecked return new AggregationContext( - IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY_ARRAY, - ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY, - false); + IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY_ARRAY, + ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY, + false); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/KeyOnlyFirstOrLastByStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/KeyOnlyFirstOrLastByStateFactory.java index 7c110c1f899..79fa03fc996 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/KeyOnlyFirstOrLastByStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/KeyOnlyFirstOrLastByStateFactory.java @@ -3,8 +3,7 @@ import java.util.Objects; /** - * A Flavor of FirstBy that produces no values from the original table, only a named column of - * source keys. + * A Flavor of FirstBy that produces no values from the original table, only a named column of source keys. */ public class KeyOnlyFirstOrLastByStateFactory extends IterativeIndexStateFactory { @@ -27,8 +26,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; MemoKey memoKey = (MemoKey) o; - return Objects.equals(resultColumnName, memoKey.resultColumnName) - && type == memoKey.type; + return Objects.equals(resultColumnName, memoKey.resultColumnName) && type == memoKey.type; } @Override @@ -44,7 +42,7 @@ public KeyOnlyFirstOrLastByStateFactory(String resultColumn, AggType type) { if (type != AggType.First && type != AggType.Last) { throw new IllegalArgumentException( - "KeyOnlyFirstOrLastByStateFactory only support AggType.First and AggType.Last"); + "KeyOnlyFirstOrLastByStateFactory only support AggType.First and AggType.Last"); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/LastByStateFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/by/LastByStateFactoryImpl.java index 3f2166e8ac1..05b2ec8ed45 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/LastByStateFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/LastByStateFactoryImpl.java @@ -9,8 +9,7 @@ public LastByStateFactoryImpl() { this(false, false, 0); } - private LastByStateFactoryImpl(boolean lowestRollup, boolean secondRollup, - int rollupColumnIdentifier) { + private LastByStateFactoryImpl(boolean lowestRollup, boolean secondRollup, int rollupColumnIdentifier) { super(lowestRollup, secondRollup, rollupColumnIdentifier); } @@ -33,8 +32,7 @@ ReaggregatableStatefactory forRollup() { @Override ReaggregatableStatefactory rollupFactory() { return new SortedFirstOrLastByFactoryImpl(false, false, true, rollupColumnIdentifier, - REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier - + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX); + REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX); } @Override @@ -43,10 +41,10 @@ public String toString() { return "LastByStateFactory"; } else { return "LastByStateFactory{" + - "lowestRollup=" + lowestRollup + - ", secondRollup=" + secondRollup + - ", rollupColumnIdentifier=" + rollupColumnIdentifier + - '}'; + "lowestRollup=" + lowestRollup + + ", secondRollup=" + secondRollup + + ", rollupColumnIdentifier=" + rollupColumnIdentifier + + '}'; } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/MinMaxByStateFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/by/MinMaxByStateFactoryImpl.java index 5e7d87b63e1..a5ebe850a13 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/MinMaxByStateFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/MinMaxByStateFactoryImpl.java @@ -7,11 +7,10 @@ /** * Minimum and Maximum aggregation factory. * - * Operates in two modes, for non-refreshing tables it requires very little state (just the current - * minimum or maximum). + * Operates in two modes, for non-refreshing tables it requires very little state (just the current minimum or maximum). * - * For refreshing tables, it requires maintaining a TreeMap of values to counts; so that if the - * min/max value is removed we are able to identify the next lowest/highest value. + * For refreshing tables, it requires maintaining a TreeMap of values to counts; so that if the min/max value is removed + * we are able to identify the next lowest/highest value. * * You can use {@link AddOnlyMinMaxByStateFactoryImpl} if you want to force add-only behavior. * @@ -33,9 +32,8 @@ public MinMaxByStateFactoryImpl(boolean minimum) { * Create a minBy or maxBy factory. * * @param minimum true if selecting the minimum value, false if selecting the maximum value. - * @param addOnly if true create a factory only suitable for add-only tables, if false the - * add-only factory will be created for non-refreshing tables and the general factory is - * created for refreshing tables + * @param addOnly if true create a factory only suitable for add-only tables, if false the add-only factory will be + * created for non-refreshing tables and the general factory is created for refreshing tables */ MinMaxByStateFactoryImpl(boolean minimum, boolean addOnly) { this.minimum = minimum; diff --git a/DB/src/main/java/io/deephaven/db/v2/by/MinMaxIterativeOperatorFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/MinMaxIterativeOperatorFactory.java index 368fc9dd775..5fc50ea8379 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/MinMaxIterativeOperatorFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/MinMaxIterativeOperatorFactory.java @@ -20,7 +20,7 @@ public MinMaxIterativeOperatorFactory(boolean minimum, boolean isAddOnly) { @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { return getMinMaxChunked(type, minimum, isAddOnly, name); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/NonKeyColumnAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/NonKeyColumnAggregationFactory.java index ad0eb5f0158..eeec597042e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/NonKeyColumnAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/NonKeyColumnAggregationFactory.java @@ -13,8 +13,7 @@ public class NonKeyColumnAggregationFactory implements AggregationContextFactory { private final IterativeChunkedOperatorFactory iterativeChunkedOperatorFactory; - public NonKeyColumnAggregationFactory( - IterativeChunkedOperatorFactory iterativeChunkedOperatorFactory) { + public NonKeyColumnAggregationFactory(IterativeChunkedOperatorFactory iterativeChunkedOperatorFactory) { this.iterativeChunkedOperatorFactory = iterativeChunkedOperatorFactory; } @@ -25,19 +24,17 @@ public boolean allowKeyOnlySubstitution() { @Override public AggregationContext makeAggregationContext(@NotNull final Table table, - @NotNull final String... groupByColumns) { + @NotNull final String... groupByColumns) { return getAllColumnOperators(table, groupByColumns, iterativeChunkedOperatorFactory); } - private static AggregationContext getAllColumnOperators(Table withView, - String[] groupByNameArray, IterativeChunkedOperatorFactory iterativeOperatorStateFactory) { + private static AggregationContext getAllColumnOperators(Table withView, String[] groupByNameArray, + IterativeChunkedOperatorFactory iterativeOperatorStateFactory) { final Set groupByNames = new HashSet<>(Arrays.asList(groupByNameArray)); final int operatorColumnCount = withView.getColumnSourceMap().size() - groupByNames.size(); - final List operators = - new ArrayList<>(operatorColumnCount); - final List> inputColumns = - new ArrayList<>(operatorColumnCount); + final List operators = new ArrayList<>(operatorColumnCount); + final List> inputColumns = new ArrayList<>(operatorColumnCount); final List inputNames = new ArrayList<>(operatorColumnCount); withView.getColumnSourceMap().forEach((name, columnSource) -> { @@ -47,14 +44,14 @@ private static AggregationContext getAllColumnOperators(Table withView, final Class type = columnSource.getType(); - // For DBDateTime columns, the in-memory source uses longs internally, and all supported - // aggregations (i.e. min and max) work correctly against longs. - final ColumnSource inputSource = columnSource.getType() == DBDateTime.class - ? ReinterpretUtilities.dateTimeToLongSource(columnSource) - : columnSource; + // For DBDateTime columns, the in-memory source uses longs internally, and all supported aggregations (i.e. + // min and max) work correctly against longs. + final ColumnSource inputSource = + columnSource.getType() == DBDateTime.class ? ReinterpretUtilities.dateTimeToLongSource(columnSource) + : columnSource; final IterativeChunkedAggregationOperator chunkedOperator = - iterativeOperatorStateFactory.getChunkedOperator(type, name, false); + iterativeOperatorStateFactory.getChunkedOperator(type, name, false); if (chunkedOperator != null) { // noinspection unchecked inputColumns.add(inputSource); @@ -69,10 +66,11 @@ private static AggregationContext getAllColumnOperators(Table withView, } // noinspection unchecked - return new AggregationContext(operators.toArray( - IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY), - inputNameArray, - inputColumns.toArray(ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY)); + return new AggregationContext( + operators.toArray( + IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY), + inputNameArray, + inputColumns.toArray(ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY)); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/by/NullColumnAggregationTransformer.java b/DB/src/main/java/io/deephaven/db/v2/by/NullColumnAggregationTransformer.java index 803df9cda8f..92cfdf0dd13 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/NullColumnAggregationTransformer.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/NullColumnAggregationTransformer.java @@ -18,8 +18,8 @@ public void resultColumnFixup(Map> resultColumns) { final Map> savedColumns = new LinkedHashMap<>(resultColumns); resultColumns.clear(); // noinspection unchecked - resultColumnTypes.forEach( - (key, value) -> resultColumns.put(key, NullValueColumnSource.getInstance(value, null))); + resultColumnTypes + .forEach((key, value) -> resultColumns.put(key, NullValueColumnSource.getInstance(value, null))); resultColumns.putAll(savedColumns); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/PercentileByStateFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/by/PercentileByStateFactoryImpl.java index 1d5e4da5de2..b542a16c6f3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/PercentileByStateFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/PercentileByStateFactoryImpl.java @@ -40,7 +40,7 @@ public boolean equals(Object o) { return false; final MemoKey memoKey = (MemoKey) o; return Double.compare(memoKey.percentile, percentile) == 0 && - averageMedian == memoKey.averageMedian; + averageMedian == memoKey.averageMedian; } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/by/PercentileIterativeOperatorFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/PercentileIterativeOperatorFactory.java index 4d824771f99..b52b977bf14 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/PercentileIterativeOperatorFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/PercentileIterativeOperatorFactory.java @@ -18,9 +18,8 @@ public PercentileIterativeOperatorFactory(double percentile, boolean averageMedi @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { - return IterativeOperatorStateFactory.getPercentileChunked(type, percentile, averageMedian, - name); + boolean exposeInternalColumns) { + return IterativeOperatorStateFactory.getPercentileChunked(type, percentile, averageMedian, name); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ReaggregatableStatefactory.java b/DB/src/main/java/io/deephaven/db/v2/by/ReaggregatableStatefactory.java index b89d8c40980..3f732f3e019 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ReaggregatableStatefactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ReaggregatableStatefactory.java @@ -4,8 +4,7 @@ public abstract class ReaggregatableStatefactory implements AggregationStateFact /** * Returns true if this state factory supports rollup. * - * If this factory does not support rollup, calling forRollup and rollupFactory produce - * undefined results. + * If this factory does not support rollup, calling forRollup and rollupFactory produce undefined results. * * @return true if forRollup() and rollupFactory() are implemented. */ @@ -14,14 +13,13 @@ public abstract class ReaggregatableStatefactory implements AggregationStateFact /** * Returns the lowest level state factory for rollup. * - * This may differ from the regular factory in that often the result column is insufficient to - * perform a rollup (for example an average needs not just the result, but the count and sum). + * This may differ from the regular factory in that often the result column is insufficient to perform a rollup (for + * example an average needs not just the result, but the count and sum). */ abstract ReaggregatableStatefactory forRollup(); /** - * Returns the factory used to reaggregate the lowest or intermediate levels into the next - * level. + * Returns the factory used to reaggregate the lowest or intermediate levels into the next level. * * For example, a count factory should return a sum factory to roll up the counts by summation. */ diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ReplicateOperators.java b/DB/src/main/java/io/deephaven/db/v2/by/ReplicateOperators.java index 04dc7ead3ad..336af1446b7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ReplicateOperators.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ReplicateOperators.java @@ -22,84 +22,72 @@ public class ReplicateOperators { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToAllButBooleanAndFloats(SumCharChunk.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBooleanAndFloats(SumCharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAllButBooleanAndFloats(CharChunkedSumOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAllButBooleanAndFloats(CharChunkedAvgOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAllButBooleanAndFloats(CharChunkedVarOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.floatToAllFloatingPoints(SumFloatChunk.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatChunkedSumOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatChunkedAvgOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.floatToAllFloatingPoints(SumFloatChunk.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatChunkedSumOperator.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatChunkedAvgOperator.class, ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatChunkedReAvgOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatChunkedVarOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatChunkedVarOperator.class, ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAllButBoolean(CharChunkedAddOnlyMinMaxOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(CharToDoubleCast.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharToDoubleCast.class, ReplicatePrimitiveCode.MAIN_SRC); replicateObjectAddOnlyMinMax(); fixupLongAddOnlyMinMax(); - ReplicatePrimitiveCode.charToAllButBoolean( - CharAddOnlySortedFirstOrLastChunkedOperator.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharAddOnlySortedFirstOrLastChunkedOperator.class, + ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAllButBoolean(CharStreamSortedFirstOrLastChunkedOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); replicateObjectAddOnlyAndStreamSortedFirstLast(); } private static void replicateObjectAddOnlyMinMax() throws IOException { - final String objectAddOnlyMinMax = ReplicatePrimitiveCode - .charToObject(CharChunkedAddOnlyMinMaxOperator.class, ReplicatePrimitiveCode.MAIN_SRC); + final String objectAddOnlyMinMax = ReplicatePrimitiveCode.charToObject(CharChunkedAddOnlyMinMaxOperator.class, + ReplicatePrimitiveCode.MAIN_SRC); final File objectAddOnlyMinMaxFile = new File(objectAddOnlyMinMax); - List lines = ReplicateUtilities.fixupChunkAttributes( - FileUtils.readLines(objectAddOnlyMinMaxFile, Charset.defaultCharset())); - lines = ReplicateUtilities.globalReplacements(lines, "QueryConstants.NULL_OBJECT", "null", - "getObject", "get"); + List lines = ReplicateUtilities + .fixupChunkAttributes(FileUtils.readLines(objectAddOnlyMinMaxFile, Charset.defaultCharset())); + lines = ReplicateUtilities.globalReplacements(lines, "QueryConstants.NULL_OBJECT", "null", "getObject", "get"); lines = ReplicateUtilities.removeImport(lines, QueryConstants.class); lines = ReplicateUtilities.replaceRegion(lines, "extra constructor params", - Collections.singletonList(" Class type,")); + Collections.singletonList(" Class type,")); lines = ReplicateUtilities.replaceRegion(lines, "resultColumn initialization", - Collections.singletonList(" resultColumn = new ObjectArraySource<>(type);")); + Collections.singletonList(" resultColumn = new ObjectArraySource<>(type);")); FileUtils.writeLines(objectAddOnlyMinMaxFile, lines); } private static void fixupLongAddOnlyMinMax() throws IOException { - final String longBasePath = ReplicatePrimitiveCode.basePathForClass( - LongChunkedAddOnlyMinMaxOperator.class, ReplicatePrimitiveCode.MAIN_SRC); - final File longAddOnlyMinMaxFile = new File(longBasePath, - LongChunkedAddOnlyMinMaxOperator.class.getSimpleName() + ".java"); - List lines = ReplicateUtilities.fixupChunkAttributes( - FileUtils.readLines(longAddOnlyMinMaxFile, Charset.defaultCharset())); - lines = ReplicateUtilities.globalReplacements(lines, "LongArraySource", - "AbstractLongArraySource"); + final String longBasePath = ReplicatePrimitiveCode.basePathForClass(LongChunkedAddOnlyMinMaxOperator.class, + ReplicatePrimitiveCode.MAIN_SRC); + final File longAddOnlyMinMaxFile = + new File(longBasePath, LongChunkedAddOnlyMinMaxOperator.class.getSimpleName() + ".java"); + List lines = ReplicateUtilities + .fixupChunkAttributes(FileUtils.readLines(longAddOnlyMinMaxFile, Charset.defaultCharset())); + lines = ReplicateUtilities.globalReplacements(lines, "LongArraySource", "AbstractLongArraySource"); lines = ReplicateUtilities.replaceRegion(lines, "extra constructor params", - Collections.singletonList(" Class type,")); - lines = ReplicateUtilities.replaceRegion(lines, "resultColumn initialization", - Collections.singletonList( + Collections.singletonList(" Class type,")); + lines = ReplicateUtilities.replaceRegion(lines, "resultColumn initialization", Collections.singletonList( " resultColumn = type == DBDateTime.class ? new DateTimeArraySource() : new LongArraySource();")); - lines = ReplicateUtilities.addImport(lines, DBDateTime.class, DateTimeArraySource.class, - LongArraySource.class); + lines = ReplicateUtilities.addImport(lines, DBDateTime.class, DateTimeArraySource.class, LongArraySource.class); FileUtils.writeLines(longAddOnlyMinMaxFile, lines); } private static void replicateObjectAddOnlyAndStreamSortedFirstLast() throws IOException { - for (final Class charClass : Arrays.asList( - CharAddOnlySortedFirstOrLastChunkedOperator.class, - CharStreamSortedFirstOrLastChunkedOperator.class)) { + for (final Class charClass : Arrays.asList(CharAddOnlySortedFirstOrLastChunkedOperator.class, + CharStreamSortedFirstOrLastChunkedOperator.class)) { final String objectClassName = - ReplicatePrimitiveCode.charToObject(charClass, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToObject(charClass, ReplicatePrimitiveCode.MAIN_SRC); final File objectClassFile = new File(objectClassName); - List lines = ReplicateUtilities.fixupChunkAttributes( - FileUtils.readLines(objectClassFile, Charset.defaultCharset())); + List lines = ReplicateUtilities + .fixupChunkAttributes(FileUtils.readLines(objectClassFile, Charset.defaultCharset())); lines = ReplicateUtilities.replaceRegion(lines, "sortColumnValues initialization", - Collections.singletonList( - " sortColumnValues = new ObjectArraySource<>(Object.class);")); + Collections.singletonList(" sortColumnValues = new ObjectArraySource<>(Object.class);")); FileUtils.writeLines(objectClassFile, lines); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/RollupSmartKeyColumnDuplicationTransformer.java b/DB/src/main/java/io/deephaven/db/v2/by/RollupSmartKeyColumnDuplicationTransformer.java index 4033872b8c3..8a79704c670 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/RollupSmartKeyColumnDuplicationTransformer.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/RollupSmartKeyColumnDuplicationTransformer.java @@ -17,8 +17,7 @@ class RollupSmartKeyColumnDuplicationTransformer implements AggregationContextTr @Override public void resultColumnFixup(Map> resultColumns) { - final ColumnSource[] keySources = - Arrays.stream(names).map(resultColumns::get).toArray(ColumnSource[]::new); + final ColumnSource[] keySources = Arrays.stream(names).map(resultColumns::get).toArray(ColumnSource[]::new); resultColumns.put(RollupInfo.ROLLUP_COLUMN, new SmartKeySource(keySources)); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastByAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastByAggregationFactory.java index b44a47ffeff..5d509c1226d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastByAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastByAggregationFactory.java @@ -22,7 +22,7 @@ public class SortedFirstOrLastByAggregationFactory implements AggregationContext private final String[] sortColumns; public SortedFirstOrLastByAggregationFactory(final boolean isFirst, final boolean isCombo, - final String... sortColumns) { + final String... sortColumns) { this.isFirst = isFirst; this.isCombo = isCombo; this.sortColumns = sortColumns; @@ -35,23 +35,22 @@ public boolean allowKeyOnlySubstitution() { @Override public AggregationContext makeAggregationContext(@NotNull final Table table, - @NotNull final String... groupByColumns) { + @NotNull final String... groupByColumns) { final Set groupBySet = new HashSet<>(Arrays.asList(groupByColumns)); return getAggregationContext(table, sortColumns, isFirst, isCombo, - table.getDefinition().getColumnNames().stream().filter(col -> !groupBySet.contains(col)) - .map(name -> new MatchPair(name, name)).toArray(MatchPair[]::new)); + table.getDefinition().getColumnNames().stream().filter(col -> !groupBySet.contains(col)) + .map(name -> new MatchPair(name, name)).toArray(MatchPair[]::new)); } @NotNull static AggregationContext getAggregationContext(@NotNull final Table table, - @NotNull final String[] sortColumns, - final boolean isFirst, - final boolean isCombo, - @NotNull final MatchPair[] resultNames) { + @NotNull final String[] sortColumns, + final boolean isFirst, + final boolean isCombo, + @NotNull final MatchPair[] resultNames) { // noinspection unchecked final ChunkSource.WithPrev[] inputSource = new ChunkSource.WithPrev[1]; - final IterativeChunkedAggregationOperator[] operator = - new IterativeChunkedAggregationOperator[1]; + final IterativeChunkedAggregationOperator[] operator = new IterativeChunkedAggregationOperator[1]; final String[][] name = new String[1][]; if (sortColumns.length == 1) { @@ -59,8 +58,7 @@ static AggregationContext getAggregationContext(@NotNull final Table table, // noinspection unchecked inputSource[0] = columnSource; } else { - // create a tuple source, because our underlying SSA does not handle multiple sort - // columns + // create a tuple source, because our underlying SSA does not handle multiple sort columns final ColumnSource[] sortColumnSources = new ColumnSource[sortColumns.length]; for (int ii = 0; ii < sortColumnSources.length; ++ii) { sortColumnSources[ii] = table.getColumnSource(sortColumns[ii]); @@ -70,8 +68,7 @@ static AggregationContext getAggregationContext(@NotNull final Table table, } name[0] = sortColumns; - operator[0] = - makeOperator(inputSource[0].getChunkType(), isFirst, isCombo, resultNames, table); + operator[0] = makeOperator(inputSource[0].getChunkType(), isFirst, isCombo, resultNames, table); return new AggregationContext(operator, name, inputSource); } @@ -81,12 +78,11 @@ public String toString() { return (isFirst ? "SortedFirstBy" : "SortedLastBy") + Arrays.toString(sortColumns); } - private static IterativeChunkedAggregationOperator makeOperator( - @NotNull final ChunkType chunkType, - final boolean isFirst, - final boolean isCombo, - @NotNull final MatchPair[] resultPairs, - @NotNull final Table sourceTable) { + private static IterativeChunkedAggregationOperator makeOperator(@NotNull final ChunkType chunkType, + final boolean isFirst, + final boolean isCombo, + @NotNull final MatchPair[] resultPairs, + @NotNull final Table sourceTable) { final boolean isAddOnly = ((BaseTable) sourceTable).isAddOnly(); final boolean isStream = StreamTableTools.isStream(sourceTable); if (isAddOnly) { diff --git a/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastByFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastByFactoryImpl.java index cac90f6b7e2..1395d692f90 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastByFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastByFactoryImpl.java @@ -10,8 +10,7 @@ import java.util.Arrays; import java.util.Objects; -public class SortedFirstOrLastByFactoryImpl extends IterativeIndexStateFactory - implements Serializable { +public class SortedFirstOrLastByFactoryImpl extends IterativeIndexStateFactory implements Serializable { final private String[] sortColumnNames; final private boolean minimum; @@ -20,8 +19,8 @@ public class SortedFirstOrLastByFactoryImpl extends IterativeIndexStateFactory this(minimum, false, false, 0, sortColumnNames); } - SortedFirstOrLastByFactoryImpl(boolean minimum, boolean firstRollup, boolean secondRollup, - int rollupIdentifier, String... sortColumnNames) { + SortedFirstOrLastByFactoryImpl(boolean minimum, boolean firstRollup, boolean secondRollup, int rollupIdentifier, + String... sortColumnNames) { super(firstRollup, secondRollup, rollupIdentifier); Require.gtZero(sortColumnNames.length, "sortColumnNames.length"); this.sortColumnNames = sortColumnNames; @@ -49,7 +48,7 @@ public boolean equals(Object o) { return false; final MemoKey memoKey = (MemoKey) o; return minimum == memoKey.minimum && - Arrays.equals(sortColumnNames, memoKey.sortColumnNames); + Arrays.equals(sortColumnNames, memoKey.sortColumnNames); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastChunkedOperator.java index c472e75ec46..2fa2b423831 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/SortedFirstOrLastChunkedOperator.java @@ -30,7 +30,7 @@ public class SortedFirstOrLastChunkedOperator implements IterativeChunkedAggrega private final ObjectArraySource ssas; SortedFirstOrLastChunkedOperator(ChunkType chunkType, boolean isFirst, MatchPair[] resultNames, - Table originalTable) { + Table originalTable) { this.chunkType = chunkType; this.isFirst = isFirst; this.ssaFactory = SegmentedSortedArray.makeFactory(chunkType, false, 1024); @@ -41,35 +41,33 @@ public class SortedFirstOrLastChunkedOperator implements IterativeChunkedAggrega this.resultColumns = new LinkedHashMap<>(); for (final MatchPair mp : resultNames) { // noinspection unchecked,rawtypes - resultColumns.put(mp.left(), new ReadOnlyRedirectedColumnSource(redirectionIndex, - originalTable.getColumnSource(mp.right()))); + resultColumns.put(mp.left(), + new ReadOnlyRedirectedColumnSource(redirectionIndex, originalTable.getColumnSource(mp.right()))); } } @Override public void addChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { - final SortedFirstOrLastBucketedContext context = - (SortedFirstOrLastBucketedContext) bucketedContext; + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { + final SortedFirstOrLastBucketedContext context = (SortedFirstOrLastBucketedContext) bucketedContext; final int inputSize = inputIndices.size(); context.sortedIndices.setSize(inputSize); context.sortedIndices.copyFromTypedChunk(inputIndices, 0, 0, inputSize); context.sortedValues.setSize(inputSize); context.sortedValues.copyFromChunk(values, 0, 0, inputSize); - context.longSortKernel.sort(context.sortedIndices, context.sortedValues, startPositions, - length); + context.longSortKernel.sort(context.sortedIndices, context.sortedValues, startPositions, length); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); - final LongChunk indexSlice = context.indexResettable - .resetFromTypedChunk(context.sortedIndices, startPosition, length.get(ii)); - final Chunk valuesSlice = context.valuesResettable - .resetFromChunk(context.sortedValues, startPosition, length.get(ii)); + final LongChunk indexSlice = + context.indexResettable.resetFromTypedChunk(context.sortedIndices, startPosition, length.get(ii)); + final Chunk valuesSlice = + context.valuesResettable.resetFromChunk(context.sortedValues, startPosition, length.get(ii)); stateModified.set(ii, addSortedChunk(valuesSlice, indexSlice, destination)); } @@ -77,28 +75,26 @@ public void addChunk(BucketedContext bucketedContext, Chunk va @Override public void removeChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { - final SortedFirstOrLastBucketedContext context = - (SortedFirstOrLastBucketedContext) bucketedContext; + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { + final SortedFirstOrLastBucketedContext context = (SortedFirstOrLastBucketedContext) bucketedContext; final int inputSize = inputIndices.size(); context.sortedIndices.setSize(inputSize); context.sortedIndices.copyFromTypedChunk(inputIndices, 0, 0, inputSize); context.sortedValues.setSize(inputSize); context.sortedValues.copyFromChunk(values, 0, 0, inputSize); - context.longSortKernel.sort(context.sortedIndices, context.sortedValues, startPositions, - length); + context.longSortKernel.sort(context.sortedIndices, context.sortedValues, startPositions, length); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); - final LongChunk indexSlice = context.indexResettable - .resetFromTypedChunk(context.sortedIndices, startPosition, length.get(ii)); - final Chunk valuesSlice = context.valuesResettable - .resetFromChunk(context.sortedValues, startPosition, length.get(ii)); + final LongChunk indexSlice = + context.indexResettable.resetFromTypedChunk(context.sortedIndices, startPosition, length.get(ii)); + final Chunk valuesSlice = + context.valuesResettable.resetFromChunk(context.sortedValues, startPosition, length.get(ii)); stateModified.set(ii, removeSortedChunk(valuesSlice, indexSlice, destination)); } @@ -106,29 +102,27 @@ public void removeChunk(BucketedContext bucketedContext, Chunk @Override public void modifyChunk(BucketedContext bucketedContext, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { - final SortedFirstOrLastBucketedContext context = - (SortedFirstOrLastBucketedContext) bucketedContext; + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { + final SortedFirstOrLastBucketedContext context = (SortedFirstOrLastBucketedContext) bucketedContext; final int inputSize = postShiftIndices.size(); context.sortedIndices.setSize(inputSize); context.sortedIndices.copyFromTypedChunk(postShiftIndices, 0, 0, inputSize); context.sortedValues.setSize(inputSize); context.sortedValues.copyFromChunk(previousValues, 0, 0, inputSize); - context.longSortKernel.sort(context.sortedIndices, context.sortedValues, startPositions, - length); + context.longSortKernel.sort(context.sortedIndices, context.sortedValues, startPositions, length); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); final int runLength = length.get(ii); - final LongChunk indexSlice = context.indexResettable - .resetFromTypedChunk(context.sortedIndices, startPosition, runLength); - final Chunk valuesSlice = context.valuesResettable - .resetFromChunk(context.sortedValues, startPosition, runLength); + final LongChunk indexSlice = + context.indexResettable.resetFromTypedChunk(context.sortedIndices, startPosition, runLength); + final Chunk valuesSlice = + context.valuesResettable.resetFromChunk(context.sortedValues, startPosition, runLength); final SegmentedSortedArray ssa = ssaForSlot(destination); ssa.remove(valuesSlice, indexSlice); @@ -136,18 +130,17 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk context.sortedIndices.copyFromTypedChunk(postShiftIndices, 0, 0, inputSize); context.sortedValues.copyFromChunk(newValues, 0, 0, inputSize); - context.longSortKernel.sort(context.sortedIndices, context.sortedValues, startPositions, - length); + context.longSortKernel.sort(context.sortedIndices, context.sortedValues, startPositions, length); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); final int runLength = length.get(ii); - final LongChunk indexSlice = context.indexResettable - .resetFromTypedChunk(context.sortedIndices, startPosition, runLength); - final Chunk valuesSlice = context.valuesResettable - .resetFromChunk(context.sortedValues, startPosition, runLength); + final LongChunk indexSlice = + context.indexResettable.resetFromTypedChunk(context.sortedIndices, startPosition, runLength); + final Chunk valuesSlice = + context.valuesResettable.resetFromChunk(context.sortedValues, startPosition, runLength); final SegmentedSortedArray ssa = ssaForSlot(destination); ssa.insert(valuesSlice, indexSlice); @@ -158,20 +151,19 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk if (oldValue != newValue) { stateModified.set(ii, true); } else { - stateModified.set(ii, hasRedirection(postShiftIndices, newValue, startPosition, - startPosition + runLength)); + stateModified.set(ii, + hasRedirection(postShiftIndices, newValue, startPosition, startPosition + runLength)); } } } @Override public void shiftChunk(BucketedContext bucketedContext, Chunk previousValues, - Chunk newValues, LongChunk preShiftIndices, - LongChunk postShiftIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { - final SortedFirstOrLastBucketedContext context = - (SortedFirstOrLastBucketedContext) bucketedContext; + Chunk newValues, LongChunk preShiftIndices, + LongChunk postShiftIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { + final SortedFirstOrLastBucketedContext context = (SortedFirstOrLastBucketedContext) bucketedContext; final int inputSize = newValues.size(); final WritableLongChunk sortedPreIndices = context.sortedIndices; @@ -181,8 +173,7 @@ public void shiftChunk(BucketedContext bucketedContext, Chunk ChunkUtils.fillInOrder(context.sortedPositions); context.sortedValues.setSize(inputSize); context.sortedValues.copyFromChunk(previousValues, 0, 0, inputSize); - context.intSortKernel.sort(context.sortedPositions, context.sortedValues, startPositions, - length); + context.intSortKernel.sort(context.sortedPositions, context.sortedValues, startPositions, length); // now permute the indices according to sortedPosition LongPermuteKernel.permuteInput(preShiftIndices, context.sortedPositions, sortedPreIndices); @@ -192,48 +183,44 @@ public void shiftChunk(BucketedContext bucketedContext, Chunk final long destination = destinations.get(startPosition); final int runLength = length.get(ii); - final Chunk valuesSlice = context.valuesResettable - .resetFromChunk(context.sortedValues, startPosition, runLength); + final Chunk valuesSlice = + context.valuesResettable.resetFromChunk(context.sortedValues, startPosition, runLength); final SegmentedSortedArray ssa = ssaForSlot(destination); - ssa.remove(valuesSlice, context.indexResettable.resetFromTypedChunk(sortedPreIndices, - startPosition, runLength)); + ssa.remove(valuesSlice, + context.indexResettable.resetFromTypedChunk(sortedPreIndices, startPosition, runLength)); } ChunkUtils.fillInOrder(context.sortedPositions); context.sortedValues.copyFromChunk(newValues, 0, 0, inputSize); - context.intSortKernel.sort(context.sortedPositions, context.sortedValues, startPositions, - length); - LongPermuteKernel.permuteInput(postShiftIndices, context.sortedPositions, - context.sortedPostIndices); + context.intSortKernel.sort(context.sortedPositions, context.sortedValues, startPositions, length); + LongPermuteKernel.permuteInput(postShiftIndices, context.sortedPositions, context.sortedPostIndices); for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); final int runLength = length.get(ii); - final Chunk valuesSlice = context.valuesResettable - .resetFromChunk(context.sortedValues, startPosition, runLength); + final Chunk valuesSlice = + context.valuesResettable.resetFromChunk(context.sortedValues, startPosition, runLength); final SegmentedSortedArray ssa = ssaForSlot(destination); - ssa.insert(valuesSlice, context.indexResettable - .resetFromTypedChunk(context.sortedPostIndices, startPosition, runLength)); + ssa.insert(valuesSlice, + context.indexResettable.resetFromTypedChunk(context.sortedPostIndices, startPosition, runLength)); final long newValue = isFirst ? ssa.getFirst() : ssa.getLast(); final long oldValue = redirections.getAndSetUnsafe(destination, newValue); final boolean changed = newValue != oldValue; - // if we just shifted something, then this is not a true modification (and modifyIndices - // will catch it later); + // if we just shifted something, then this is not a true modification (and modifyIndices will catch it + // later); // if on the other hand, our index changed, then we must mark the state as modified - final int chunkLocationOfRelevance = - isFirst ? startPosition : startPosition + runLength - 1; + final int chunkLocationOfRelevance = isFirst ? startPosition : startPosition + runLength - 1; final long chunkNewValue = context.sortedPostIndices.get(chunkLocationOfRelevance); if (chunkNewValue == newValue) { - final int chunkIndex = binarySearch(postShiftIndices, chunkNewValue, startPosition, - startPosition + runLength); + final int chunkIndex = + binarySearch(postShiftIndices, chunkNewValue, startPosition, startPosition + runLength); final long chunkOldValue = preShiftIndices.get(chunkIndex); - // if the index was modified, then we must set modification to true; otherwise we - // depend on the + // if the index was modified, then we must set modification to true; otherwise we depend on the // modifyIndices call to catch if the row was modified if (chunkOldValue != oldValue) { stateModified.set(ii, true); @@ -246,25 +233,22 @@ public void shiftChunk(BucketedContext bucketedContext, Chunk @Override public void modifyIndices(BucketedContext context, LongChunk inputIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int slotSize = length.get(ii); final long destination = destinations.get(startPosition); final long redirectedRow = redirections.getUnsafe(destination); - stateModified.set(ii, hasRedirection(inputIndices, redirectedRow, startPosition, - startPosition + slotSize)); + stateModified.set(ii, hasRedirection(inputIndices, redirectedRow, startPosition, startPosition + slotSize)); } } @Override - public boolean addChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { - final SortedFirstOrLastSingletonContext context = - (SortedFirstOrLastSingletonContext) singletonContext; + public boolean addChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { + final SortedFirstOrLastSingletonContext context = (SortedFirstOrLastSingletonContext) singletonContext; final int inputSize = inputIndices.size(); context.sortedIndices.copyFromTypedChunk(inputIndices, 0, 0, inputSize); @@ -277,11 +261,9 @@ public boolean addChunk(SingletonContext singletonContext, int chunkSize, } @Override - public boolean removeChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { - final SortedFirstOrLastSingletonContext context = - (SortedFirstOrLastSingletonContext) singletonContext; + public boolean removeChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { + final SortedFirstOrLastSingletonContext context = (SortedFirstOrLastSingletonContext) singletonContext; final int inputSize = inputIndices.size(); context.sortedIndices.copyFromTypedChunk(inputIndices, 0, 0, inputSize); @@ -294,11 +276,9 @@ public boolean removeChunk(SingletonContext singletonContext, int chunkSize, } @Override - public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { - final SortedFirstOrLastSingletonContext context = - (SortedFirstOrLastSingletonContext) singletonContext; + public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { + final SortedFirstOrLastSingletonContext context = (SortedFirstOrLastSingletonContext) singletonContext; final int inputSize = postShiftIndices.size(); context.sortedIndices.copyFromTypedChunk(postShiftIndices, 0, 0, inputSize); @@ -333,12 +313,10 @@ public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, } @Override - public boolean shiftChunk(SingletonContext singletonContext, - Chunk previousValues, Chunk newValues, - LongChunk preInputIndices, - LongChunk postInputIndices, long destination) { - final SortedFirstOrLastSingletonContext context = - (SortedFirstOrLastSingletonContext) singletonContext; + public boolean shiftChunk(SingletonContext singletonContext, Chunk previousValues, + Chunk newValues, LongChunk preInputIndices, + LongChunk postInputIndices, long destination) { + final SortedFirstOrLastSingletonContext context = (SortedFirstOrLastSingletonContext) singletonContext; final int inputSize = preInputIndices.size(); context.sortedPositions.setSize(inputSize); @@ -349,8 +327,7 @@ public boolean shiftChunk(SingletonContext singletonContext, // now permute the indices according to sortedPosition context.sortedIndices.setSize(inputSize); - LongPermuteKernel.permuteInput(preInputIndices, context.sortedPositions, - context.sortedIndices); + LongPermuteKernel.permuteInput(preInputIndices, context.sortedPositions, context.sortedIndices); final SegmentedSortedArray ssa = ssaForSlot(destination); ssa.remove(context.sortedValues, context.sortedIndices); @@ -360,8 +337,7 @@ public boolean shiftChunk(SingletonContext singletonContext, context.intSortKernel.sort(context.sortedPositions, context.sortedValues); // now permute the indices according to sortedPosition - LongPermuteKernel.permuteInput(postInputIndices, context.sortedPositions, - context.sortedIndices); + LongPermuteKernel.permuteInput(postInputIndices, context.sortedPositions, context.sortedIndices); ssa.insert(context.sortedValues, context.sortedIndices); final long newValue = isFirst ? ssa.getFirst() : ssa.getLast(); @@ -378,20 +354,17 @@ public boolean shiftChunk(SingletonContext singletonContext, // We are the new value; we need to determine if we were also the old value final int newChunkIndex = binarySearch(postInputIndices, chunkNewValue, 0, inputSize); final long oldChunkValue = preInputIndices.get(newChunkIndex); - // if the index changed, then we are modified; for cases where the index did not change, - // then we are + // if the index changed, then we are modified; for cases where the index did not change, then we are // depending on the modifyIndices call to catch this row's modification return oldChunkValue != oldValue; } - // our new value was not the chunk's value so any change is not just shifting our new value - // somewhere + // our new value was not the chunk's value so any change is not just shifting our new value somewhere return oldValue != newValue; } @Override - public boolean modifyIndices(SingletonContext context, LongChunk indices, - long destination) { + public boolean modifyIndices(SingletonContext context, LongChunk indices, long destination) { if (indices.size() == 0) { return false; } @@ -400,8 +373,7 @@ public boolean modifyIndices(SingletonContext context, LongChunk indices, - long redirectedRow, int lo, int hi) { + private static boolean hasRedirection(LongChunk indices, long redirectedRow, int lo, int hi) { while (lo < hi) { final int mid = (lo + hi) / 2; final long candidate = indices.get(mid); @@ -417,8 +389,7 @@ private static boolean hasRedirection(LongChunk indices, return false; } - private static int binarySearch(LongChunk indices, long searchValue, - int lo, int hi) { + private static int binarySearch(LongChunk indices, long searchValue, int lo, int hi) { while (lo < hi) { final int mid = (lo + hi) / 2; final long candidate = indices.get(mid); @@ -434,8 +405,7 @@ private static int binarySearch(LongChunk indices, long se throw new IllegalStateException(); } - private boolean addSortedChunk(Chunk values, LongChunk indices, - long destination) { + private boolean addSortedChunk(Chunk values, LongChunk indices, long destination) { final SegmentedSortedArray ssa = ssaForSlot(destination); ssa.insert(values, indices); final long newValue = isFirst ? ssa.getFirst() : ssa.getLast(); @@ -451,8 +421,7 @@ private SegmentedSortedArray ssaForSlot(long destination) { return ssa; } - private boolean removeSortedChunk(Chunk values, LongChunk indices, - long destination) { + private boolean removeSortedChunk(Chunk values, LongChunk indices, long destination) { final SegmentedSortedArray ssa = ssaForSlot(destination); ssa.remove(values, indices); final long newValue = isFirst ? ssa.getFirst() : ssa.getLast(); @@ -492,10 +461,8 @@ private SortedFirstOrLastSingletonContext(ChunkType chunkType, int size) { sortedIndices = WritableLongChunk.makeWritableChunk(size); sortedValues = chunkType.makeWritableChunk(size); sortedPositions = WritableIntChunk.makeWritableChunk(size); - longSortKernel = - LongSortKernel.makeContext(chunkType, SortingOrder.Ascending, size, true); - intSortKernel = - IntSortKernel.makeContext(chunkType, SortingOrder.Ascending, size, true); + longSortKernel = LongSortKernel.makeContext(chunkType, SortingOrder.Ascending, size, true); + intSortKernel = IntSortKernel.makeContext(chunkType, SortingOrder.Ascending, size, true); } @Override @@ -529,10 +496,8 @@ private SortedFirstOrLastBucketedContext(ChunkType chunkType, int size) { sortedValues = chunkType.makeWritableChunk(size); indexResettable = ResettableLongChunk.makeResettableChunk(); valuesResettable = chunkType.makeResettableReadOnlyChunk(); - longSortKernel = - LongSortKernel.makeContext(chunkType, SortingOrder.Ascending, size, true); - intSortKernel = - IntSortKernel.makeContext(chunkType, SortingOrder.Ascending, size, true); + longSortKernel = LongSortKernel.makeContext(chunkType, SortingOrder.Ascending, size, true); + intSortKernel = IntSortKernel.makeContext(chunkType, SortingOrder.Ascending, size, true); sortedPositions = WritableIntChunk.makeWritableChunk(size); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/StaticFirstOrLastChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/StaticFirstOrLastChunkedOperator.java index 0356109b1ff..5acd3f3c2ce 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/StaticFirstOrLastChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/StaticFirstOrLastChunkedOperator.java @@ -15,15 +15,15 @@ public class StaticFirstOrLastChunkedOperator extends BaseAddOnlyFirstOrLastChunkedOperator { StaticFirstOrLastChunkedOperator(boolean isFirst, MatchPair[] resultPairs, Table originalTable, - String exposeRedirectionAs) { + String exposeRedirectionAs) { super(isFirst, resultPairs, originalTable, exposeRedirectionAs); } @Override public void addChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); @@ -34,14 +34,12 @@ public void addChunk(BucketedContext bucketedContext, Chunk va } @Override - public boolean addChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return addChunk(inputIndices, 0, inputIndices.size(), destination); } - private boolean addChunk(LongChunk indices, int start, int length, - long destination) { + private boolean addChunk(LongChunk indices, int start, int length, long destination) { if (length == 0) { return false; } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/StdState.java b/DB/src/main/java/io/deephaven/db/v2/by/StdState.java index 277fc3a1af7..6be15fe5397 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/StdState.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/StdState.java @@ -55,9 +55,9 @@ void savePrevious() { @Override public String toString() { return "Std{" + - "sum=" + sum + - ", sum2=" + sum2 + - ", nonNull=" + nonNullCount + - '}'; + "sum=" + sum + + ", sum2=" + sum2 + + ", nonNull=" + nonNullCount + + '}'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/StdStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/StdStateFactory.java index 40a2e11b2d0..09244d103e2 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/StdStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/StdStateFactory.java @@ -12,7 +12,7 @@ public StdStateFactory() {} @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { return getVarChunked(type, true, name, exposeInternalColumns); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/StdStateWithNan.java b/DB/src/main/java/io/deephaven/db/v2/by/StdStateWithNan.java index 7de95b35be2..48c7d2c4bbf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/StdStateWithNan.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/StdStateWithNan.java @@ -32,7 +32,6 @@ void savePrevious() { @Override public String toString() { - return "Std{" + "sum=" + sum + ", sum2=" + sum2 + ", nan=" + nanCount + ", nonNull=" - + nonNullCount + '}'; + return "Std{" + "sum=" + sum + ", sum2=" + sum2 + ", nan=" + nanCount + ", nonNull=" + nonNullCount + '}'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/StreamFirstChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/StreamFirstChunkedOperator.java index 3bc5ab85377..9faa84eeb31 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/StreamFirstChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/StreamFirstChunkedOperator.java @@ -27,26 +27,24 @@ public class StreamFirstChunkedOperator extends BaseStreamFirstOrLastChunkedOper *

    * The next destination slot that we expect to be used. *

    - * Any destination less than this one can safely be ignored while processing adds since the - * first row can never change once a destination has been created given that we ignore removes. + * Any destination less than this one can safely be ignored while processing adds since the first row can never + * change once a destination has been created given that we ignore removes. */ private long nextDestination; /** *

    - * The first destination that we used on the current step (if we used any). At the very - * beginning of a step, this is equivalent to {@link #nextDestination} and also the result - * table's size. + * The first destination that we used on the current step (if we used any). At the very beginning of a step, this is + * equivalent to {@link #nextDestination} and also the result table's size. *

    - * We use this as an offset shift for {@code redirections}, so that {@code redirections} only - * needs to hold first source keys for newly-added destinations, rather than the entire space. + * We use this as an offset shift for {@code redirections}, so that {@code redirections} only needs to hold first + * source keys for newly-added destinations, rather than the entire space. *

    * At the end of a step, this is updated to prepare for the next step. */ private long firstDestinationThisStep; - StreamFirstChunkedOperator(@NotNull final MatchPair[] resultPairs, - @NotNull final Table streamTable) { + StreamFirstChunkedOperator(@NotNull final MatchPair[] resultPairs, @NotNull final Table streamTable) { super(resultPairs, streamTable); } @@ -57,8 +55,7 @@ public final boolean unchunkedIndex() { @Override public final void startTrackingPrevValues() { - // We never change the value at any key in outputColumns since there are no removes; - // consequently there's no + // We never change the value at any key in outputColumns since there are no removes; consequently there's no // need to enable previous value tracking. } @@ -69,12 +66,12 @@ public void ensureCapacity(final long tableSize) { @Override public void addChunk(final BucketedContext context, // Unused - final Chunk values, // Unused - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - final IntChunk length, // Unused - @NotNull final WritableBooleanChunk stateModified) { + final Chunk values, // Unused + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, + @NotNull final IntChunk startPositions, + final IntChunk length, // Unused + @NotNull final WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); @@ -86,10 +83,10 @@ public void addChunk(final BucketedContext context, // Unused @Override public boolean addChunk(final SingletonContext context, // Unused - final int chunkSize, - final Chunk values, // Unused - @NotNull final LongChunk inputIndices, - final long destination) { + final int chunkSize, + final Chunk values, // Unused + @NotNull final LongChunk inputIndices, + final long destination) { if (chunkSize == 0) { return false; } @@ -98,8 +95,8 @@ public boolean addChunk(final SingletonContext context, // Unused @Override public boolean addIndex(final SingletonContext context, - @NotNull final Index index, - final long destination) { + @NotNull final Index index, + final long destination) { if (index.isEmpty()) { return false; } @@ -116,7 +113,7 @@ private boolean maybeAssignFirst(final long destination, final long sourceIndexK } else { // noinspection ThrowableNotThrown Assert.statementNeverExecuted( - "Destination " + destination + " greater than next destination " + nextDestination); + "Destination " + destination + " greater than next destination " + nextDestination); } return true; } @@ -131,25 +128,24 @@ public void propagateInitialState(@NotNull final QueryTable resultTable) { @Override public void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream, - @NotNull final ReadOnlyIndex newDestinations) { - // NB: We cannot assert no modifies; other operators in the same aggregation might modify - // columns not in our + @NotNull final ReadOnlyIndex newDestinations) { + // NB: We cannot assert no modifies; other operators in the same aggregation might modify columns not in our // result set. Assert.assertion(downstream.removed.empty() && downstream.shifted.empty(), - "downstream.removed.empty() && downstream.shifted.empty()"); + "downstream.removed.empty() && downstream.shifted.empty()"); copyStreamToResult(downstream.added); redirections = null; if (downstream.added.nonempty()) { - Assert.eq(downstream.added.lastKey() + 1, "downstream.added.lastKey() + 1", - nextDestination, "nextDestination"); + Assert.eq(downstream.added.lastKey() + 1, "downstream.added.lastKey() + 1", nextDestination, + "nextDestination"); firstDestinationThisStep = nextDestination; } } /** *

    - * For each destination slot, map to the (first) source index key and copy source values to - * destination slots for all result columns. + * For each destination slot, map to the (first) source index key and copy source values to destination slots for + * all result columns. * *

    * This implementation proceeds chunk-wise in the following manner: @@ -163,21 +159,18 @@ public void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream */ private void copyStreamToResult(@NotNull final OrderedKeys destinations) { try (final SafeCloseableList toClose = new SafeCloseableList()) { - final OrderedKeys.Iterator destinationsIterator = - toClose.add(destinations.getOrderedKeysIterator()); - final ShiftedOrderedKeys shiftedSliceDestinations = - toClose.add(new ShiftedOrderedKeys()); + final OrderedKeys.Iterator destinationsIterator = toClose.add(destinations.getOrderedKeysIterator()); + final ShiftedOrderedKeys shiftedSliceDestinations = toClose.add(new ShiftedOrderedKeys()); final ChunkSource.GetContext redirectionsContext = - toClose.add(redirections.makeGetContext(COPY_CHUNK_SIZE)); + toClose.add(redirections.makeGetContext(COPY_CHUNK_SIZE)); final SharedContext inputSharedContext = toClose.add(SharedContext.makeSharedContext()); final ChunkSource.GetContext[] inputContexts = - toClose.addArray(new ChunkSource.GetContext[numResultColumns]); + toClose.addArray(new ChunkSource.GetContext[numResultColumns]); final WritableChunkSink.FillFromContext[] outputContexts = - toClose.addArray(new WritableChunkSink.FillFromContext[numResultColumns]); + toClose.addArray(new WritableChunkSink.FillFromContext[numResultColumns]); for (int ci = 0; ci < numResultColumns; ++ci) { - inputContexts[ci] = - inputColumns[ci].makeGetContext(COPY_CHUNK_SIZE, inputSharedContext); + inputContexts[ci] = inputColumns[ci].makeGetContext(COPY_CHUNK_SIZE, inputSharedContext); final WritableSource outputColumn = outputColumns[ci]; outputContexts[ci] = outputColumn.makeFillFromContext(COPY_CHUNK_SIZE); outputColumn.ensureCapacity(destinations.lastKey() + 1, false); @@ -185,20 +178,16 @@ private void copyStreamToResult(@NotNull final OrderedKeys destinations) { while (destinationsIterator.hasMore()) { final OrderedKeys sliceDestinations = - destinationsIterator.getNextOrderedKeysWithLength(COPY_CHUNK_SIZE); + destinationsIterator.getNextOrderedKeysWithLength(COPY_CHUNK_SIZE); shiftedSliceDestinations.reset(sliceDestinations, -firstDestinationThisStep); - final LongChunk sourceIndices = Chunk - .downcast( - redirections.getChunk(redirectionsContext, shiftedSliceDestinations)) - .asLongChunk(); + final LongChunk sourceIndices = Chunk.downcast( + redirections.getChunk(redirectionsContext, shiftedSliceDestinations)).asLongChunk(); - try (final OrderedKeys sliceSources = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(sourceIndices)) { + try (final OrderedKeys sliceSources = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(sourceIndices)) { for (int ci = 0; ci < numResultColumns; ++ci) { final Chunk inputChunk = - inputColumns[ci].getChunk(inputContexts[ci], sliceSources); - outputColumns[ci].fillFromChunk(outputContexts[ci], inputChunk, - sliceDestinations); + inputColumns[ci].getChunk(inputContexts[ci], sliceSources); + outputColumns[ci].fillFromChunk(outputContexts[ci], inputChunk, sliceDestinations); } inputSharedContext.reset(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/StreamLastChunkedOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/StreamLastChunkedOperator.java index b83d9b8eb91..c6ac14371f6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/StreamLastChunkedOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/StreamLastChunkedOperator.java @@ -26,8 +26,7 @@ */ public class StreamLastChunkedOperator extends CopyingPermutedStreamFirstOrLastChunkedOperator { - StreamLastChunkedOperator(@NotNull final MatchPair[] resultPairs, - @NotNull final Table streamTable) { + StreamLastChunkedOperator(@NotNull final MatchPair[] resultPairs, @NotNull final Table streamTable) { super(resultPairs, streamTable); } @@ -38,12 +37,12 @@ public final boolean unchunkedIndex() { @Override public void addChunk(final BucketedContext context, // Unused - final Chunk values, // Unused - @NotNull final LongChunk inputIndices, - @NotNull final IntChunk destinations, - @NotNull final IntChunk startPositions, - @NotNull final IntChunk length, - @NotNull final WritableBooleanChunk stateModified) { + final Chunk values, // Unused + @NotNull final LongChunk inputIndices, + @NotNull final IntChunk destinations, + @NotNull final IntChunk startPositions, + @NotNull final IntChunk length, + @NotNull final WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final int runLength = length.get(ii); @@ -55,10 +54,10 @@ public void addChunk(final BucketedContext context, // Unused @Override public boolean addChunk(final SingletonContext context, // Unused - final int chunkSize, - final Chunk values, // Unused - @NotNull final LongChunk inputIndices, - final long destination) { + final int chunkSize, + final Chunk values, // Unused + @NotNull final LongChunk inputIndices, + final long destination) { if (chunkSize == 0) { return false; } @@ -68,8 +67,8 @@ public boolean addChunk(final SingletonContext context, // Unused @Override public boolean addIndex(final SingletonContext context, - @NotNull final Index index, - final long destination) { + @NotNull final Index index, + final long destination) { if (index.isEmpty()) { return false; } @@ -85,9 +84,9 @@ public void propagateInitialState(@NotNull final QueryTable resultTable) { @Override public void propagateUpdates(@NotNull final ShiftAwareListener.Update downstream, - @NotNull final ReadOnlyIndex newDestinations) { + @NotNull final ReadOnlyIndex newDestinations) { Assert.assertion(downstream.removed.empty() && downstream.shifted.empty(), - "downstream.removed.empty() && downstream.shifted.empty()"); + "downstream.removed.empty() && downstream.shifted.empty()"); try (final OrderedKeys changedDestinations = downstream.modified.union(downstream.added)) { copyStreamToResult(changedDestinations); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/SumBigDecimalChunk.java b/DB/src/main/java/io/deephaven/db/v2/by/SumBigDecimalChunk.java index 194235b97ee..cab07ed040b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/SumBigDecimalChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/SumBigDecimalChunk.java @@ -10,9 +10,8 @@ class SumBigDecimalChunk { private SumBigDecimalChunk() {} // static use only - static BigDecimal sumBigDecimalChunk( - ObjectChunk values, int chunkStart, int chunkSize, - MutableInt chunkNonNull) { + static BigDecimal sumBigDecimalChunk(ObjectChunk values, int chunkStart, + int chunkSize, MutableInt chunkNonNull) { BigDecimal partialSum = BigDecimal.ZERO; for (int ii = chunkStart; ii < chunkStart + chunkSize; ++ii) { final BigDecimal value = values.get(ii); @@ -24,9 +23,8 @@ static BigDecimal sumBigDecimalChunk( return partialSum; } - static BigDecimal sumBigDecimalChunkAbs( - ObjectChunk values, int chunkStart, int chunkSize, - MutableInt chunkNonNull) { + static BigDecimal sumBigDecimalChunkAbs(ObjectChunk values, int chunkStart, + int chunkSize, MutableInt chunkNonNull) { BigDecimal partialSum = BigDecimal.ZERO; for (int ii = chunkStart; ii < chunkStart + chunkSize; ++ii) { final BigDecimal value = values.get(ii); @@ -38,9 +36,8 @@ static BigDecimal sumBigDecimalChunkAbs( return partialSum; } - static BigDecimal sum2BigDecimalChunk( - ObjectChunk values, int chunkStart, int chunkSize, - MutableInt chunkNonNull, MutableObject sum2out) { + static BigDecimal sum2BigDecimalChunk(ObjectChunk values, int chunkStart, + int chunkSize, MutableInt chunkNonNull, MutableObject sum2out) { final int end = chunkStart + chunkSize; BigDecimal sum = BigDecimal.ZERO; BigDecimal sum2 = BigDecimal.ZERO; diff --git a/DB/src/main/java/io/deephaven/db/v2/by/SumStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/SumStateFactory.java index 96ad48b11d1..10033018ac4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/SumStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/SumStateFactory.java @@ -12,7 +12,7 @@ public SumStateFactory() {} @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { return getSumChunked(type, name); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/TDigestPercentileOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/TDigestPercentileOperator.java index 6f327a6105e..9c603b0912a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/TDigestPercentileOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/TDigestPercentileOperator.java @@ -36,16 +36,15 @@ public class TDigestPercentileOperator implements IterativeChunkedAggregationOpe private final String digestColumnName; public TDigestPercentileOperator(@NotNull Class type, double compression, double percentile, - @NotNull String name) { + @NotNull String name) { this(type, compression, null, new double[] {percentile}, new String[] {name}); } - public TDigestPercentileOperator(@NotNull Class type, double compression, - String digestColumnName, @NotNull double[] percentiles, @NotNull String[] resultNames) { + public TDigestPercentileOperator(@NotNull Class type, double compression, String digestColumnName, + @NotNull double[] percentiles, @NotNull String[] resultNames) { if (resultNames.length != percentiles.length) { - throw new IllegalArgumentException( - "Percentile length and resultName length must be identical:" + resultNames.length - + " (resultNames) != " + percentiles.length + " (percentiles)"); + throw new IllegalArgumentException("Percentile length and resultName length must be identical:" + + resultNames.length + " (resultNames) != " + percentiles.length + " (percentiles)"); } this.percentiles = percentiles; this.digestColumnName = digestColumnName; @@ -53,8 +52,7 @@ public TDigestPercentileOperator(@NotNull Class type, double compression, this.digests = new ObjectArraySource<>(TDigest.class); final boolean isDateTime = type == DBDateTime.class; if (isDateTime) { - throw new UnsupportedOperationException( - "DateTime is not supported for approximate percentiles."); + throw new UnsupportedOperationException("DateTime is not supported for approximate percentiles."); } chunkType = ChunkType.fromElementType(type); resultColumns = new DoubleArraySource[percentiles.length]; @@ -66,9 +64,9 @@ public TDigestPercentileOperator(@NotNull Class type, double compression, @Override public void addChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final TDigestContext tDigestContext = (TDigestContext) bucketedContext; final DoubleChunk doubleValues = tDigestContext.toDoubleCast.cast(values); @@ -99,26 +97,23 @@ public void addChunk(BucketedContext bucketedContext, Chunk va @Override public void removeChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { - throw new UnsupportedOperationException( - "t-digest Approximate percentiles do not support data removal."); + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { + throw new UnsupportedOperationException("t-digest Approximate percentiles do not support data removal."); } @Override public void modifyChunk(BucketedContext bucketedContext, Chunk preValues, - Chunk postValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { - throw new UnsupportedOperationException( - "t-digest Approximate percentiles do not support data modification."); + Chunk postValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { + throw new UnsupportedOperationException("t-digest Approximate percentiles do not support data modification."); } @Override - public boolean addChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { final TDigestContext tDigestContext = (TDigestContext) singletonContext; final TDigest digest = digestForSlot(destination); final DoubleChunk doubleValues = tDigestContext.toDoubleCast.cast(values); @@ -150,17 +145,15 @@ private void updateDestination(long destination) { @Override public void propagateUpdates(@NotNull ShiftAwareListener.Update downstream, - @NotNull ReadOnlyIndex newDestinations) { + @NotNull ReadOnlyIndex newDestinations) { downstream.added.forAllLongs(this::updateDestination); downstream.modified.forAllLongs(this::updateDestination); } @Override - public boolean removeChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { - throw new UnsupportedOperationException( - "t-digest Approximate percentiles do not support data removal."); + public boolean removeChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { + throw new UnsupportedOperationException("t-digest Approximate percentiles do not support data removal."); } private TDigest digestForSlot(long slot) { diff --git a/DB/src/main/java/io/deephaven/db/v2/by/TrackingFirstByStateFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/by/TrackingFirstByStateFactoryImpl.java index 0de0496f03b..40b3e06e0e9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/TrackingFirstByStateFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/TrackingFirstByStateFactoryImpl.java @@ -9,8 +9,7 @@ public TrackingFirstByStateFactoryImpl() { this(false, false, 0); } - private TrackingFirstByStateFactoryImpl(boolean lowestRollup, boolean secondRollup, - int rollupColumnIdentifier) { + private TrackingFirstByStateFactoryImpl(boolean lowestRollup, boolean secondRollup, int rollupColumnIdentifier) { super(lowestRollup, secondRollup, rollupColumnIdentifier); } @@ -33,8 +32,7 @@ ReaggregatableStatefactory forRollup() { @Override ReaggregatableStatefactory rollupFactory() { return new SortedFirstOrLastByFactoryImpl(true, false, true, rollupColumnIdentifier, - REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier - + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX); + REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX); } @Override @@ -43,10 +41,10 @@ public String toString() { return "TrackingFirstByStateFactory"; } else { return "TrackingFirstByStateFactory{" + - "lowestRollup=" + lowestRollup + - ", secondRollup=" + secondRollup + - ", rollupColumnIdentifier=" + rollupColumnIdentifier + - '}'; + "lowestRollup=" + lowestRollup + + ", secondRollup=" + secondRollup + + ", rollupColumnIdentifier=" + rollupColumnIdentifier + + '}'; } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/TrackingLastByStateFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/by/TrackingLastByStateFactoryImpl.java index 71d28970bb0..47bbcb16b20 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/TrackingLastByStateFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/TrackingLastByStateFactoryImpl.java @@ -9,8 +9,7 @@ public TrackingLastByStateFactoryImpl() { this(false, false, 0); } - private TrackingLastByStateFactoryImpl(boolean lowestRollup, boolean secondRollup, - int rollupColumnIdentifier) { + private TrackingLastByStateFactoryImpl(boolean lowestRollup, boolean secondRollup, int rollupColumnIdentifier) { super(lowestRollup, secondRollup, rollupColumnIdentifier); } @@ -33,8 +32,7 @@ ReaggregatableStatefactory forRollup() { @Override ReaggregatableStatefactory rollupFactory() { return new SortedFirstOrLastByFactoryImpl(false, false, true, rollupColumnIdentifier, - REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier - + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX); + REDIRECTION_INDEX_PREFIX + rollupColumnIdentifier + ComboAggregateFactory.ROLLUP_COLUMN_SUFFIX); } @Override @@ -43,10 +41,10 @@ public String toString() { return "TrackingLastByStateFactory"; } else { return "TrackingLastByStateFactory{" + - "lowestRollup=" + lowestRollup + - ", secondRollup=" + secondRollup + - ", rollupColumnIdentifier=" + rollupColumnIdentifier + - '}'; + "lowestRollup=" + lowestRollup + + ", secondRollup=" + secondRollup + + ", rollupColumnIdentifier=" + rollupColumnIdentifier + + '}'; } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/UniqueStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/UniqueStateFactory.java index db32aeb9778..5bdd1e3758f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/UniqueStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/UniqueStateFactory.java @@ -3,8 +3,8 @@ import java.util.Objects; /** - * An Iterative state factory that displays the singular unique value of the items within a - * particular state, or default values if none are present, or the values are not unique. + * An Iterative state factory that displays the singular unique value of the items within a particular state, or default + * values if none are present, or the values are not unique. */ public class UniqueStateFactory extends IterativeOperatorStateFactory { private final boolean secondRollup; @@ -31,7 +31,7 @@ public boolean equals(Object o) { return false; AggUniqueMemoKey that = (AggUniqueMemoKey) o; return countNulls == that.countNulls && Objects.equals(noKeyValue, that.noKeyValue) - && Objects.equals(nonUniqueValue, that.nonUniqueValue); + && Objects.equals(nonUniqueValue, that.nonUniqueValue); } @Override @@ -52,8 +52,7 @@ public int hashCode() { this(false, countNulls, noKeyValue, nonUniqueValue); } - private UniqueStateFactory(boolean secondRollup, boolean countNulls, Object noKeyValue, - Object nonUniqueValue) { + private UniqueStateFactory(boolean secondRollup, boolean countNulls, Object noKeyValue, Object nonUniqueValue) { this.secondRollup = secondRollup; this.countNulls = countNulls; this.noKeyValue = noKeyValue; @@ -90,9 +89,9 @@ UniqueStateFactory rollupFactory() { @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { - return getUniqueChunked(type, name, countNulls, exposeInternalColumns, noKeyValue, - nonUniqueValue, secondRollup); + boolean exposeInternalColumns) { + return getUniqueChunked(type, name, countNulls, exposeInternalColumns, noKeyValue, nonUniqueValue, + secondRollup); } public boolean countNulls() { diff --git a/DB/src/main/java/io/deephaven/db/v2/by/VarState.java b/DB/src/main/java/io/deephaven/db/v2/by/VarState.java index 6d25d4590ac..2530966695b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/VarState.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/VarState.java @@ -55,9 +55,9 @@ void savePrevious() { @Override public String toString() { return "Var{" + - "sum=" + sum + - ", sum2=" + sum2 + - ", nonNull=" + nonNullCount + - '}'; + "sum=" + sum + + ", sum2=" + sum2 + + ", nonNull=" + nonNullCount + + '}'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/VarStateFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/VarStateFactory.java index 18ddd50e5ac..9908ea8a65e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/VarStateFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/VarStateFactory.java @@ -12,7 +12,7 @@ public VarStateFactory() {} @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { return getVarChunked(type, false, name, exposeInternalColumns); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/VarStateWithNan.java b/DB/src/main/java/io/deephaven/db/v2/by/VarStateWithNan.java index 77a047a01ce..f70d0841c9c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/VarStateWithNan.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/VarStateWithNan.java @@ -32,7 +32,6 @@ void savePrevious() { @Override public String toString() { - return "Var{" + "sum=" + sum + ", sum2=" + sum2 + ", nan=" + nanCount + ", nonNull=" - + nonNullCount + '}'; + return "Var{" + "sum=" + sum + ", sum2=" + sum2 + ", nan=" + nanCount + ", nonNull=" + nonNullCount + '}'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/WeightedAverageOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/WeightedAverageOperator.java index 5d3ffc2b286..18fddfc9832 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/WeightedAverageOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/WeightedAverageOperator.java @@ -26,8 +26,7 @@ interface State { @SuppressWarnings("unchecked") static Operator getOperator(ColumnSource components, ColumnSource weights) { - Class componentType = - io.deephaven.util.type.TypeUtils.getBoxedType(components.getType()); + Class componentType = io.deephaven.util.type.TypeUtils.getBoxedType(components.getType()); Class weightType = TypeUtils.getBoxedType(weights.getType()); if (componentType == Double.class) @@ -46,12 +45,11 @@ static Operator getOperator(ColumnSource components, ColumnSource w return getLongOperator(weightType, (ColumnSource) components, weights); throw new UnsupportedOperationException( - "Can not perform a weighted average with component type: " + componentType); + "Can not perform a weighted average with component type: " + componentType); } @SuppressWarnings("unchecked") - private static Operator getDoubleOperator(Class weightType, ColumnSource components, - ColumnSource weights) { + private static Operator getDoubleOperator(Class weightType, ColumnSource components, ColumnSource weights) { if (weightType == Double.class) return new OperatorImpl(new DoubleGetter(components), new DoubleGetter(weights)); if (weightType == Float.class) @@ -67,13 +65,11 @@ private static Operator getDoubleOperator(Class weightType, ColumnSource if (weightType == Long.class) return new OperatorImpl(new DoubleGetter(components), new LongGetter(weights)); - throw new UnsupportedOperationException( - "Can not perform a weighted average with weight type: " + weightType); + throw new UnsupportedOperationException("Can not perform a weighted average with weight type: " + weightType); } @SuppressWarnings("unchecked") - private static Operator getFloatOperator(Class weightType, ColumnSource components, - ColumnSource weights) { + private static Operator getFloatOperator(Class weightType, ColumnSource components, ColumnSource weights) { if (weightType == Double.class) return new OperatorImpl(new FloatGetter(components), new DoubleGetter(weights)); if (weightType == Float.class) @@ -89,13 +85,12 @@ private static Operator getFloatOperator(Class weightType, ColumnSource c if (weightType == Long.class) return new OperatorImpl(new FloatGetter(components), new LongGetter(weights)); - throw new UnsupportedOperationException( - "Can not perform a weighted average with weight type: " + weightType); + throw new UnsupportedOperationException("Can not perform a weighted average with weight type: " + weightType); } @SuppressWarnings("unchecked") private static Operator getCharOperator(Class weightType, ColumnSource components, - ColumnSource weights) { + ColumnSource weights) { if (weightType == Double.class) return new OperatorImpl(new CharGetter(components), new DoubleGetter(weights)); if (weightType == Float.class) @@ -111,13 +106,11 @@ private static Operator getCharOperator(Class weightType, ColumnSource components, - ColumnSource weights) { + private static Operator getByteOperator(Class weightType, ColumnSource components, ColumnSource weights) { if (weightType == Double.class) return new OperatorImpl(new ByteGetter(components), new DoubleGetter(weights)); if (weightType == Float.class) @@ -133,13 +126,11 @@ private static Operator getByteOperator(Class weightType, ColumnSource com if (weightType == Long.class) return new OperatorImpl(new ByteGetter(components), new LongGetter(weights)); - throw new UnsupportedOperationException( - "Can not perform a weighted average with weight type: " + weightType); + throw new UnsupportedOperationException("Can not perform a weighted average with weight type: " + weightType); } @SuppressWarnings("unchecked") - private static Operator getShortOperator(Class weightType, ColumnSource components, - ColumnSource weights) { + private static Operator getShortOperator(Class weightType, ColumnSource components, ColumnSource weights) { if (weightType == Double.class) return new OperatorImpl(new ShortGetter(components), new DoubleGetter(weights)); if (weightType == Float.class) @@ -155,13 +146,12 @@ private static Operator getShortOperator(Class weightType, ColumnSource c if (weightType == Long.class) return new OperatorImpl(new ShortGetter(components), new LongGetter(weights)); - throw new UnsupportedOperationException( - "Can not perform a weighted average with weight type: " + weightType); + throw new UnsupportedOperationException("Can not perform a weighted average with weight type: " + weightType); } @SuppressWarnings("unchecked") private static Operator getIntegerOperator(Class weightType, ColumnSource components, - ColumnSource weights) { + ColumnSource weights) { if (weightType == Double.class) return new OperatorImpl(new IntegerGetter(components), new DoubleGetter(weights)); if (weightType == Float.class) @@ -177,13 +167,11 @@ private static Operator getIntegerOperator(Class weightType, ColumnSource components, - ColumnSource weights) { + private static Operator getLongOperator(Class weightType, ColumnSource components, ColumnSource weights) { if (weightType == Double.class) return new OperatorImpl(new LongGetter(components), new DoubleGetter(weights)); if (weightType == Float.class) @@ -199,8 +187,7 @@ private static Operator getLongOperator(Class weightType, ColumnSource com if (weightType == Long.class) return new OperatorImpl(new LongGetter(components), new LongGetter(weights)); - throw new UnsupportedOperationException( - "Can not perform a weighted average with weight type: " + weightType); + throw new UnsupportedOperationException("Can not perform a weighted average with weight type: " + weightType); } private interface ValueGetter { @@ -261,8 +248,7 @@ private void doAdd(double component, double weight) { nanCount++; return; } - if (component == QueryConstants.NULL_DOUBLE - || weight == QueryConstants.NULL_DOUBLE) { + if (component == QueryConstants.NULL_DOUBLE || weight == QueryConstants.NULL_DOUBLE) { return; } weightedSum += (component * weight); @@ -279,8 +265,7 @@ public void removeValue(long key) { nanCount--; return; } - if (component == QueryConstants.NULL_DOUBLE - || weight == QueryConstants.NULL_DOUBLE) { + if (component == QueryConstants.NULL_DOUBLE || weight == QueryConstants.NULL_DOUBLE) { return; } weightedSum -= (component * weight); diff --git a/DB/src/main/java/io/deephaven/db/v2/by/WeightedAverageSumAggregationFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/WeightedAverageSumAggregationFactory.java index 157ac7adfff..3970b2f467a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/WeightedAverageSumAggregationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/WeightedAverageSumAggregationFactory.java @@ -24,41 +24,35 @@ public boolean allowKeyOnlySubstitution() { @Override public AggregationContext makeAggregationContext(@NotNull final Table table, - @NotNull final String... groupByColumns) { + @NotNull final String... groupByColumns) { return getAllColumnOperators(table, groupByColumns); } private AggregationContext getAllColumnOperators(Table withView, String[] groupByNameArray) { final Set groupByNames = new HashSet<>(Arrays.asList(groupByNameArray)); - final int operatorColumnCount = - withView.getColumnSourceMap().size() - groupByNames.size() - 1; + final int operatorColumnCount = withView.getColumnSourceMap().size() - groupByNames.size() - 1; - final List operators = - new ArrayList<>(operatorColumnCount + 1); - final List> inputColumns = - new ArrayList<>(operatorColumnCount + 1); + final List operators = new ArrayList<>(operatorColumnCount + 1); + final List> inputColumns = new ArrayList<>(operatorColumnCount + 1); final List inputNames = new ArrayList<>(operatorColumnCount); final List isIntegerResult = new ArrayList<>(operatorColumnCount); final List floatColumnNames = new ArrayList<>(operatorColumnCount); final List integerColumnNames = new ArrayList<>(operatorColumnCount); final ColumnSource weightSource = withView.getColumnSource(weightName); - boolean weightSourceIsFloatingPoint = weightSource.getChunkType() == ChunkType.Double - || weightSource.getChunkType() == ChunkType.Float; + boolean weightSourceIsFloatingPoint = + weightSource.getChunkType() == ChunkType.Double || weightSource.getChunkType() == ChunkType.Float; boolean anyIntegerResults = !weightSourceIsFloatingPoint && isSum - && withView.getColumnSourceMap().values().stream() - .anyMatch(cs -> cs.getChunkType() == ChunkType.Long - || cs.getChunkType() == ChunkType.Int || cs.getChunkType() == ChunkType.Short - || cs.getChunkType() == ChunkType.Byte || cs.getChunkType() == ChunkType.Char); - boolean anyFloatResults = weightSourceIsFloatingPoint || !isSum - || withView.getColumnSourceMap().values().stream() - .anyMatch(cs -> cs.getChunkType() == ChunkType.Float - || cs.getChunkType() == ChunkType.Double); + && withView.getColumnSourceMap().values().stream() + .anyMatch(cs -> cs.getChunkType() == ChunkType.Long || cs.getChunkType() == ChunkType.Int + || cs.getChunkType() == ChunkType.Short || cs.getChunkType() == ChunkType.Byte + || cs.getChunkType() == ChunkType.Char); + boolean anyFloatResults = weightSourceIsFloatingPoint || !isSum || withView.getColumnSourceMap().values() + .stream().anyMatch(cs -> cs.getChunkType() == ChunkType.Float || cs.getChunkType() == ChunkType.Double); final DoubleWeightRecordingInternalOperator doubleWeightOperator; if (anyFloatResults) { - doubleWeightOperator = - new DoubleWeightRecordingInternalOperator(weightSource.getChunkType()); + doubleWeightOperator = new DoubleWeightRecordingInternalOperator(weightSource.getChunkType()); // noinspection unchecked inputColumns.add(weightSource); operators.add(doubleWeightOperator); @@ -68,8 +62,7 @@ private AggregationContext getAllColumnOperators(Table withView, String[] groupB final LongWeightRecordingInternalOperator longWeightOperator; if (anyIntegerResults) { - longWeightOperator = - new LongWeightRecordingInternalOperator(weightSource.getChunkType()); + longWeightOperator = new LongWeightRecordingInternalOperator(weightSource.getChunkType()); // noinspection unchecked inputColumns.add(weightSource); operators.add(longWeightOperator); @@ -108,26 +101,25 @@ private AggregationContext getAllColumnOperators(Table withView, String[] groupB break; default: throw new UnsupportedOperationException( - "Invalid chunk type for weightedSum: " - + columnSource.getChunkType()); + "Invalid chunk type for weightedSum: " + columnSource.getChunkType()); } } isIntegerResult.add(isInteger); if (isInteger) { integerColumnNames.add(name); - operators.add(new LongChunkedWeightedSumOperator(columnSource.getChunkType(), - longWeightOperator, name)); + operators.add( + new LongChunkedWeightedSumOperator(columnSource.getChunkType(), longWeightOperator, name)); } else { floatColumnNames.add(name); operators.add(new DoubleChunkedWeightedSumOperator(columnSource.getChunkType(), - doubleWeightOperator, name)); + doubleWeightOperator, name)); } } else { isIntegerResult.add(false); floatColumnNames.add(name); - operators.add(new ChunkedWeightedAverageOperator(columnSource.getChunkType(), - doubleWeightOperator, name)); + operators.add( + new ChunkedWeightedAverageOperator(columnSource.getChunkType(), doubleWeightOperator, name)); } }); @@ -160,10 +152,11 @@ private AggregationContext getAllColumnOperators(Table withView, String[] groupB } // noinspection unchecked - return new AggregationContext(operators.toArray( - IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY), - inputNameArray, - inputColumns.toArray(ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY)); + return new AggregationContext( + operators.toArray( + IterativeChunkedAggregationOperator.ZERO_LENGTH_ITERATIVE_CHUNKED_AGGREGATION_OPERATOR_ARRAY), + inputNameArray, + inputColumns.toArray(ChunkSource.WithPrev.ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY)); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/by/WeightedSumStateFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/by/WeightedSumStateFactoryImpl.java index 13049d9af70..7bf84222c3f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/WeightedSumStateFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/WeightedSumStateFactoryImpl.java @@ -20,7 +20,7 @@ public String getWeightName() { @Override public IterativeChunkedAggregationOperator getChunkedOperator(Class type, String name, - boolean exposeInternalColumns) { + boolean exposeInternalColumns) { return null; } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/BucketSsmDistinctContext.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/BucketSsmDistinctContext.java index ac6647140a1..2af603d6147 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/BucketSsmDistinctContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/BucketSsmDistinctContext.java @@ -4,7 +4,7 @@ import io.deephaven.db.v2.sources.chunk.*; public class BucketSsmDistinctContext extends SsmDistinctContext - implements IterativeChunkedAggregationOperator.BucketedContext { + implements IterativeChunkedAggregationOperator.BucketedContext { public final WritableIntChunk lengthCopy; public final ResettableWritableChunk valueResettable; public final ResettableWritableIntChunk countResettable; diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/BucketSsmDistinctRollupContext.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/BucketSsmDistinctRollupContext.java index a8ecf181f9f..b32bf74e048 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/BucketSsmDistinctRollupContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/BucketSsmDistinctRollupContext.java @@ -4,7 +4,7 @@ import io.deephaven.db.v2.sources.chunk.*; public class BucketSsmDistinctRollupContext extends SsmDistinctRollupContext - implements IterativeChunkedAggregationOperator.BucketedContext { + implements IterativeChunkedAggregationOperator.BucketedContext { public final WritableIntChunk lengthCopy; final WritableIntChunk countCopy; public final WritableIntChunk starts; diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/DbDateTimeSsmSourceWrapper.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/DbDateTimeSsmSourceWrapper.java index 369c696e579..fbf50806f4b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/DbDateTimeSsmSourceWrapper.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/DbDateTimeSsmSourceWrapper.java @@ -23,8 +23,8 @@ */ @SuppressWarnings("rawtypes") public class DbDateTimeSsmSourceWrapper extends AbstractColumnSource - implements ColumnSourceGetDefaults.ForObject, - MutableColumnSourceGetDefaults.ForObject { + implements ColumnSourceGetDefaults.ForObject, + MutableColumnSourceGetDefaults.ForObject { private final LongSsmBackedSource underlying; public DbDateTimeSsmSourceWrapper(@NotNull final LongSsmBackedSource underlying) { diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/DistinctOperatorFactory.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/DistinctOperatorFactory.java index 36fc93f31ea..905d8b882d1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/DistinctOperatorFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/DistinctOperatorFactory.java @@ -15,13 +15,11 @@ /** * A factory interface to create {@link IterativeChunkedAggregationOperator operators} for the - * {@link io.deephaven.db.v2.by.AggType#Distinct} - * {@link io.deephaven.db.v2.by.AggType#CountDistinct}, {@link io.deephaven.db.v2.by.AggType#Unique} - * and their rollup counterparts + * {@link io.deephaven.db.v2.by.AggType#Distinct} {@link io.deephaven.db.v2.by.AggType#CountDistinct}, + * {@link io.deephaven.db.v2.by.AggType#Unique} and their rollup counterparts */ public interface DistinctOperatorFactory { - int NODE_SIZE = - Configuration.getInstance().getIntegerWithDefault("DistinctOperatorFactory.nodeSize", 4096); + int NODE_SIZE = Configuration.getInstance().getIntegerWithDefault("DistinctOperatorFactory.nodeSize", 4096); /** * Create an {@link IterativeChunkedAggregationOperator operator} for the @@ -29,42 +27,39 @@ public interface DistinctOperatorFactory { * * @param type the type of the column * @param resultName the name of the result column - * @param countNulls true if null values should be counted as important values, or false if they - * should be ignored. - * @param exposeInternal true if the underlying SSM state should be exposed as a column (for use - * with rollups) - * @param isRollup true if the returned operator should be suitable for second or higher levels - * of rollup aggregation + * @param countNulls true if null values should be counted as important values, or false if they should be ignored. + * @param exposeInternal true if the underlying SSM state should be exposed as a column (for use with rollups) + * @param isRollup true if the returned operator should be suitable for second or higher levels of rollup + * aggregation * * @return an appropriate operator. */ - static IterativeChunkedAggregationOperator createCountDistinct(Class type, String resultName, - boolean countNulls, boolean exposeInternal, boolean isRollup) { + static IterativeChunkedAggregationOperator createCountDistinct(Class type, String resultName, boolean countNulls, + boolean exposeInternal, boolean isRollup) { if (type == Byte.class || type == byte.class) { return isRollup ? new ByteRollupCountDistinctOperator(resultName, countNulls) - : new ByteChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); + : new ByteChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Character.class || type == char.class) { return isRollup ? new CharRollupCountDistinctOperator(resultName, countNulls) - : new CharChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); + : new CharChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Double.class || type == double.class) { return isRollup ? new DoubleRollupCountDistinctOperator(resultName, countNulls) - : new DoubleChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); + : new DoubleChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Float.class || type == float.class) { return isRollup ? new FloatRollupCountDistinctOperator(resultName, countNulls) - : new FloatChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); + : new FloatChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Integer.class || type == int.class) { return isRollup ? new IntRollupCountDistinctOperator(resultName, countNulls) - : new IntChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); + : new IntChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Long.class || type == long.class || type == DBDateTime.class) { return isRollup ? new LongRollupCountDistinctOperator(resultName, countNulls) - : new LongChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); + : new LongChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Short.class || type == short.class) { return isRollup ? new ShortRollupCountDistinctOperator(resultName, countNulls) - : new ShortChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); + : new ShortChunkedCountDistinctOperator(resultName, countNulls, exposeInternal); } else { return isRollup ? new ObjectRollupCountDistinctOperator(type, resultName, countNulls) - : new ObjectChunkedCountDistinctOperator(type, resultName, countNulls, - exposeInternal); + : new ObjectChunkedCountDistinctOperator(type, resultName, countNulls, exposeInternal); } } @@ -74,41 +69,39 @@ static IterativeChunkedAggregationOperator createCountDistinct(Class type, St * * @param type the type of the column * @param resultName the name of the result column - * @param countNulls true if null values should be counted as important values, or false if they - * should be ignored. - * @param exposeInternal true if the underlying SSM state should be exposed as a column (for use - * with rollups) - * @param isRollup true if the returned operator should be suitable for second or higher levels - * of rollup aggregation + * @param countNulls true if null values should be counted as important values, or false if they should be ignored. + * @param exposeInternal true if the underlying SSM state should be exposed as a column (for use with rollups) + * @param isRollup true if the returned operator should be suitable for second or higher levels of rollup + * aggregation * * @return an appropriate operator. */ - static IterativeChunkedAggregationOperator createDistinct(Class type, String resultName, - boolean countNulls, boolean exposeInternal, boolean isRollup) { + static IterativeChunkedAggregationOperator createDistinct(Class type, String resultName, boolean countNulls, + boolean exposeInternal, boolean isRollup) { if (type == Byte.class || type == byte.class) { return isRollup ? new ByteRollupDistinctOperator(resultName, countNulls) - : new ByteChunkedDistinctOperator(resultName, countNulls, exposeInternal); + : new ByteChunkedDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Character.class || type == char.class) { return isRollup ? new CharRollupDistinctOperator(resultName, countNulls) - : new CharChunkedDistinctOperator(resultName, countNulls, exposeInternal); + : new CharChunkedDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Double.class || type == double.class) { return isRollup ? new DoubleRollupDistinctOperator(resultName, countNulls) - : new DoubleChunkedDistinctOperator(resultName, countNulls, exposeInternal); + : new DoubleChunkedDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Float.class || type == float.class) { return isRollup ? new FloatRollupDistinctOperator(resultName, countNulls) - : new FloatChunkedDistinctOperator(resultName, countNulls, exposeInternal); + : new FloatChunkedDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Integer.class || type == int.class) { return isRollup ? new IntRollupDistinctOperator(resultName, countNulls) - : new IntChunkedDistinctOperator(resultName, countNulls, exposeInternal); + : new IntChunkedDistinctOperator(resultName, countNulls, exposeInternal); } else if (type == Long.class || type == long.class || type == DBDateTime.class) { return isRollup ? new LongRollupDistinctOperator(type, resultName, countNulls) - : new LongChunkedDistinctOperator(type, resultName, countNulls, exposeInternal); + : new LongChunkedDistinctOperator(type, resultName, countNulls, exposeInternal); } else if (type == Short.class || type == short.class) { return isRollup ? new ShortRollupDistinctOperator(resultName, countNulls) - : new ShortChunkedDistinctOperator(resultName, countNulls, exposeInternal); + : new ShortChunkedDistinctOperator(resultName, countNulls, exposeInternal); } else { return isRollup ? new ObjectRollupDistinctOperator(type, resultName, countNulls) - : new ObjectChunkedDistinctOperator(type, resultName, countNulls, exposeInternal); + : new ObjectChunkedDistinctOperator(type, resultName, countNulls, exposeInternal); } } @@ -118,119 +111,86 @@ static IterativeChunkedAggregationOperator createDistinct(Class type, String * * @param type the type of the column * @param resultName the name of the result column - * @param countNulls true if null values should be counted as important values, or false if they - * should be ignored. - * @param exposeInternal true if the underlying SSM state should be exposed as a column (for use - * with rollups) - * @param isRollup true if the returned operator should be suitable for second or higher levels - * of rollup aggregation + * @param countNulls true if null values should be counted as important values, or false if they should be ignored. + * @param exposeInternal true if the underlying SSM state should be exposed as a column (for use with rollups) + * @param isRollup true if the returned operator should be suitable for second or higher levels of rollup + * aggregation * * @return an appropriate operator. */ - static IterativeChunkedAggregationOperator createUnique(Class type, String resultName, - boolean countNulls, boolean exposeInternal, Object noKeyValue, Object nonUniqueValue, - boolean isRollup) { + static IterativeChunkedAggregationOperator createUnique(Class type, String resultName, boolean countNulls, + boolean exposeInternal, Object noKeyValue, Object nonUniqueValue, boolean isRollup) { checkType(resultName, "No Key Value", type, noKeyValue); checkType(resultName, "Non Unique Value", type, nonUniqueValue); if (type == Byte.class || type == byte.class) { - final byte nkvAsType = - (noKeyValue == null) ? NULL_BYTE : ((Number) noKeyValue).byteValue(); - final byte nuvAsType = - (nonUniqueValue == null) ? NULL_BYTE : ((Number) nonUniqueValue).byteValue(); - return isRollup - ? new ByteRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) - : new ByteChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, - nuvAsType); + final byte nkvAsType = (noKeyValue == null) ? NULL_BYTE : ((Number) noKeyValue).byteValue(); + final byte nuvAsType = (nonUniqueValue == null) ? NULL_BYTE : ((Number) nonUniqueValue).byteValue(); + return isRollup ? new ByteRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) + : new ByteChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, nuvAsType); } else if (type == Character.class || type == char.class) { return isRollup - ? new CharRollupUniqueOperator(resultName, countNulls, - io.deephaven.util.type.TypeUtils.unbox((Character) noKeyValue), - io.deephaven.util.type.TypeUtils.unbox((Character) nonUniqueValue)) - : new CharChunkedUniqueOperator(resultName, countNulls, exposeInternal, - io.deephaven.util.type.TypeUtils.unbox((Character) noKeyValue), - io.deephaven.util.type.TypeUtils.unbox((Character) nonUniqueValue)); + ? new CharRollupUniqueOperator(resultName, countNulls, + io.deephaven.util.type.TypeUtils.unbox((Character) noKeyValue), + io.deephaven.util.type.TypeUtils.unbox((Character) nonUniqueValue)) + : new CharChunkedUniqueOperator(resultName, countNulls, exposeInternal, + io.deephaven.util.type.TypeUtils.unbox((Character) noKeyValue), + io.deephaven.util.type.TypeUtils.unbox((Character) nonUniqueValue)); } else if (type == Double.class || type == double.class) { - final double nkvAsType = - (noKeyValue == null) ? NULL_DOUBLE : ((Number) noKeyValue).doubleValue(); - final double nuvAsType = - (nonUniqueValue == null) ? NULL_DOUBLE : ((Number) nonUniqueValue).doubleValue(); - return isRollup - ? new DoubleRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) - : new DoubleChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, - nuvAsType); + final double nkvAsType = (noKeyValue == null) ? NULL_DOUBLE : ((Number) noKeyValue).doubleValue(); + final double nuvAsType = (nonUniqueValue == null) ? NULL_DOUBLE : ((Number) nonUniqueValue).doubleValue(); + return isRollup ? new DoubleRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) + : new DoubleChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, nuvAsType); } else if (type == Float.class || type == float.class) { - final float nkvAsType = - (noKeyValue == null) ? NULL_FLOAT : ((Number) noKeyValue).floatValue(); - final float nuvAsType = - (nonUniqueValue == null) ? NULL_FLOAT : ((Number) nonUniqueValue).floatValue(); - return isRollup - ? new FloatRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) - : new FloatChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, - nuvAsType); + final float nkvAsType = (noKeyValue == null) ? NULL_FLOAT : ((Number) noKeyValue).floatValue(); + final float nuvAsType = (nonUniqueValue == null) ? NULL_FLOAT : ((Number) nonUniqueValue).floatValue(); + return isRollup ? new FloatRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) + : new FloatChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, nuvAsType); } else if (type == Integer.class || type == int.class) { - final int nkvAsType = - (noKeyValue == null) ? NULL_INT : ((Number) noKeyValue).intValue(); - final int nuvAsType = - (nonUniqueValue == null) ? NULL_INT : ((Number) nonUniqueValue).intValue(); - return isRollup - ? new IntRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) - : new IntChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, - nuvAsType); + final int nkvAsType = (noKeyValue == null) ? NULL_INT : ((Number) noKeyValue).intValue(); + final int nuvAsType = (nonUniqueValue == null) ? NULL_INT : ((Number) nonUniqueValue).intValue(); + return isRollup ? new IntRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) + : new IntChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, nuvAsType); } else if (type == Long.class || type == long.class || type == DBDateTime.class) { final long nkvAsType; final long nuvAsType; if (type == DBDateTime.class) { nkvAsType = (noKeyValue == null) ? NULL_LONG : ((DBDateTime) noKeyValue).getNanos(); - nuvAsType = - (nonUniqueValue == null) ? NULL_LONG : ((DBDateTime) nonUniqueValue).getNanos(); + nuvAsType = (nonUniqueValue == null) ? NULL_LONG : ((DBDateTime) nonUniqueValue).getNanos(); } else { nkvAsType = (noKeyValue == null) ? NULL_LONG : ((Number) noKeyValue).longValue(); - nuvAsType = - (nonUniqueValue == null) ? NULL_LONG : ((Number) nonUniqueValue).longValue(); + nuvAsType = (nonUniqueValue == null) ? NULL_LONG : ((Number) nonUniqueValue).longValue(); } - return isRollup - ? new LongRollupUniqueOperator(type, resultName, countNulls, nkvAsType, nuvAsType) - : new LongChunkedUniqueOperator(type, resultName, countNulls, exposeInternal, - nkvAsType, nuvAsType); + return isRollup ? new LongRollupUniqueOperator(type, resultName, countNulls, nkvAsType, nuvAsType) + : new LongChunkedUniqueOperator(type, resultName, countNulls, exposeInternal, nkvAsType, nuvAsType); } else if (type == Short.class || type == short.class) { - final short nkvAsType = - (noKeyValue == null) ? NULL_SHORT : ((Number) noKeyValue).shortValue(); - final short nuvAsType = - (nonUniqueValue == null) ? NULL_SHORT : ((Number) nonUniqueValue).shortValue(); - return isRollup - ? new ShortRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) - : new ShortChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, - nuvAsType); + final short nkvAsType = (noKeyValue == null) ? NULL_SHORT : ((Number) noKeyValue).shortValue(); + final short nuvAsType = (nonUniqueValue == null) ? NULL_SHORT : ((Number) nonUniqueValue).shortValue(); + return isRollup ? new ShortRollupUniqueOperator(resultName, countNulls, nkvAsType, nuvAsType) + : new ShortChunkedUniqueOperator(resultName, countNulls, exposeInternal, nkvAsType, nuvAsType); } else { - return isRollup - ? new ObjectRollupUniqueOperator(type, resultName, countNulls, noKeyValue, - nonUniqueValue) - : new ObjectChunkedUniqueOperator(type, resultName, countNulls, exposeInternal, - noKeyValue, nonUniqueValue); + return isRollup ? new ObjectRollupUniqueOperator(type, resultName, countNulls, noKeyValue, nonUniqueValue) + : new ObjectChunkedUniqueOperator(type, resultName, countNulls, exposeInternal, noKeyValue, + nonUniqueValue); } } - static void checkType(String resultColName, String valueIntent, Class expected, - Object value) { + static void checkType(String resultColName, String valueIntent, Class expected, Object value) { expected = io.deephaven.util.type.TypeUtils.getBoxedType(expected); if (value != null && !expected.isAssignableFrom(value.getClass())) { - if (io.deephaven.util.type.TypeUtils.isNumeric(expected) - && TypeUtils.isNumeric(value.getClass())) { + if (io.deephaven.util.type.TypeUtils.isNumeric(expected) && TypeUtils.isNumeric(value.getClass())) { if (checkNumericCompatibility((Number) value, expected)) { return; } - throw new IllegalArgumentException( - "For result column `" + resultColName + "' the " + valueIntent + " '" + value - + "' is larger than can be represented with a " + expected.getName()); + throw new IllegalArgumentException("For result column `" + resultColName + "' the " + valueIntent + " '" + + value + "' is larger than can be represented with a " + expected.getName()); } - throw new IllegalArgumentException( - "For result column `" + resultColName + "' the " + valueIntent + " must be of type " - + expected.getName() + " but is " + value.getClass().getName()); + throw new IllegalArgumentException("For result column `" + resultColName + "' the " + valueIntent + + " must be of type " + expected.getName() + " but is " + value.getClass().getName()); } } @@ -242,9 +202,8 @@ static boolean checkNumericCompatibility(Number value, Class expected) { } else if (expected == Integer.class) { return Integer.MIN_VALUE <= value.longValue() && value.longValue() <= Integer.MAX_VALUE; } else if (expected == Long.class) { - return new BigInteger(value.toString()) - .compareTo(BigInteger.valueOf(Long.MIN_VALUE)) >= 0 && - new BigInteger(value.toString()).compareTo(BigInteger.valueOf(Long.MAX_VALUE)) <= 0; + return new BigInteger(value.toString()).compareTo(BigInteger.valueOf(Long.MIN_VALUE)) >= 0 && + new BigInteger(value.toString()).compareTo(BigInteger.valueOf(Long.MAX_VALUE)) <= 0; } else if (expected == Float.class) { return value.getClass() != Double.class; } else if (expected == Double.class) { diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmBackedColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmBackedColumnSource.java index 4faf2273796..9a4476d4f42 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmBackedColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmBackedColumnSource.java @@ -8,14 +8,14 @@ import org.jetbrains.annotations.NotNull; /** - * A {@link ColumnSource} that provides {@link DbArrayBase DBArrays} of type T, backed by a same - * typed {@link SegmentedSortedMultiSet}. + * A {@link ColumnSource} that provides {@link DbArrayBase DBArrays} of type T, backed by a same typed + * {@link SegmentedSortedMultiSet}. * * @param The SSM Type * @param The provided Array type */ public interface SsmBackedColumnSource - extends ColumnSource { + extends ColumnSource { /** * Create an appropriate instance for the specified type @@ -28,14 +28,12 @@ static SsmBackedColumnSource create(@NotNull final Class type) { if (type == char.class || type == Character.class) { return new CharSsmBackedSource(); } /* - * else if(type == byte.class || type == Byte.class) { return new ByteSsmBackedSource(); } - * else if(type == short.class || type == Short.class) { return new - * ShortSsmBackedSource(); } else if(type == int.class || type == Integer.class) { return - * new IntSsmBackedSource(); } else if(type == long.class || type == Long.class || type == - * DBDateTime.class) { return new LongSsmBackedSource(); } else if(type == float.class || - * type == Float.class) { return new FloatSsmBackedSource(); } else if(type == - * double.class || type == Double.class) { return new DoubleSsmBackedSource(); } else { - * return new ObjectSsmBackedSource(type); } + * else if(type == byte.class || type == Byte.class) { return new ByteSsmBackedSource(); } else if(type == + * short.class || type == Short.class) { return new ShortSsmBackedSource(); } else if(type == int.class || + * type == Integer.class) { return new IntSsmBackedSource(); } else if(type == long.class || type == + * Long.class || type == DBDateTime.class) { return new LongSsmBackedSource(); } else if(type == float.class + * || type == Float.class) { return new FloatSsmBackedSource(); } else if(type == double.class || type == + * Double.class) { return new DoubleSsmBackedSource(); } else { return new ObjectSsmBackedSource(type); } */ throw new IllegalStateException("NOPE"); } @@ -51,8 +49,8 @@ static SsmBackedColumnSource create(@NotNull final Class type) { K getCurrentSsm(long key); /** - * Get the ssm at the specified key, creating one if none existed. This method will update the - * current previous tracking state of the SSM. + * Get the ssm at the specified key, creating one if none existed. This method will update the current previous + * tracking state of the SSM. * * @param key the key to get the ssm for. * @return the SSM at the key, or a new one. diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmDistinctContext.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmDistinctContext.java index 744bf77142d..1dee57abdfd 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmDistinctContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmDistinctContext.java @@ -9,7 +9,7 @@ public class SsmDistinctContext implements IterativeChunkedAggregationOperator.SingletonContext { public final SegmentedSortedMultiSet.RemoveContext removeContext = - SegmentedSortedMultiSet.makeRemoveContext(DistinctOperatorFactory.NODE_SIZE); + SegmentedSortedMultiSet.makeRemoveContext(DistinctOperatorFactory.NODE_SIZE); public final WritableChunk valueCopy; public final WritableIntChunk counts; diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmDistinctRollupContext.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmDistinctRollupContext.java index 8a25af2f24f..5bee32eca0e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmDistinctRollupContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmcountdistinct/SsmDistinctRollupContext.java @@ -7,10 +7,9 @@ import io.deephaven.db.v2.sources.chunk.sized.SizedIntChunk; import io.deephaven.db.v2.ssms.SegmentedSortedMultiSet; -public class SsmDistinctRollupContext - implements IterativeChunkedAggregationOperator.SingletonContext { +public class SsmDistinctRollupContext implements IterativeChunkedAggregationOperator.SingletonContext { public final SegmentedSortedMultiSet.RemoveContext removeContext = - SegmentedSortedMultiSet.makeRemoveContext(DistinctOperatorFactory.NODE_SIZE); + SegmentedSortedMultiSet.makeRemoveContext(DistinctOperatorFactory.NODE_SIZE); public final SizedChunk valueCopy; public final SizedIntChunk counts; diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmminmax/SsmChunkedMinMaxOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmminmax/SsmChunkedMinMaxOperator.java index 50657a7d2ef..4c39eaa5232 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmminmax/SsmChunkedMinMaxOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmminmax/SsmChunkedMinMaxOperator.java @@ -23,8 +23,8 @@ * Iterative average operator. */ public class SsmChunkedMinMaxOperator implements IterativeChunkedAggregationOperator { - private static final int NODE_SIZE = Configuration.getInstance() - .getIntegerWithDefault("SsmChunkedMinMaxOperator.nodeSize", 4096); + private static final int NODE_SIZE = + Configuration.getInstance().getIntegerWithDefault("SsmChunkedMinMaxOperator.nodeSize", 4096); private final ArrayBackedColumnSource resultColumn; private final ObjectArraySource ssms; private final String name; @@ -35,10 +35,10 @@ public class SsmChunkedMinMaxOperator implements IterativeChunkedAggregationOper private final SetResult setResult; public SsmChunkedMinMaxOperator( - // region extra constructor params - Class type, - // endregion extra constructor params - boolean minimum, String name) { + // region extra constructor params + Class type, + // endregion extra constructor params + boolean minimum, String name) { this.name = name; this.ssms = new ObjectArraySource<>(SegmentedSortedMultiSet.class); // region resultColumn initialization @@ -56,7 +56,7 @@ public SsmChunkedMinMaxOperator( } private static SetResult makeSetResult(ChunkType chunkType, Class type, boolean minimum, - ArrayBackedColumnSource resultColumn) { + ArrayBackedColumnSource resultColumn) { if (type == DBDateTime.class) { return new DateTimeSetResult(minimum, resultColumn); } else if (type == Boolean.class) { @@ -93,9 +93,9 @@ interface SetResult { @Override public void addChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final BucketSsmMinMaxContext context = (BucketSsmMinMaxContext) bucketedContext; context.valueCopy.setSize(values.size()); @@ -104,8 +104,7 @@ public void addChunk(BucketedContext bucketedContext, Chunk va context.lengthCopy.setSize(length.size()); context.lengthCopy.copyFromChunk(length, 0, 0, length.size()); - compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, - context.lengthCopy); + compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, context.lengthCopy); for (int ii = 0; ii < startPositions.size(); ++ii) { final int runLength = context.lengthCopy.get(ii); @@ -118,9 +117,9 @@ public void addChunk(BucketedContext bucketedContext, Chunk va final SegmentedSortedMultiSet ssm = ssmForSlot(destination); final WritableChunk valueSlice = - context.valueResettable.resetFromChunk(context.valueCopy, startPosition, runLength); + context.valueResettable.resetFromChunk(context.valueCopy, startPosition, runLength); final WritableIntChunk countSlice = - context.countResettable.resetFromChunk(context.counts, startPosition, runLength); + context.countResettable.resetFromChunk(context.counts, startPosition, runLength); ssm.insert(valueSlice, countSlice); stateModified.set(ii, setResult.setResult(ssm, destination)); @@ -129,9 +128,9 @@ public void addChunk(BucketedContext bucketedContext, Chunk va @Override public void removeChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final BucketSsmMinMaxContext context = (BucketSsmMinMaxContext) bucketedContext; context.valueCopy.setSize(values.size()); @@ -140,8 +139,7 @@ public void removeChunk(BucketedContext bucketedContext, Chunk context.lengthCopy.setSize(length.size()); context.lengthCopy.copyFromChunk(length, 0, 0, length.size()); - compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, - context.lengthCopy); + compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, context.lengthCopy); final SegmentedSortedMultiSet.RemoveContext removeContext = removeContextFactory.get(); for (int ii = 0; ii < startPositions.size(); ++ii) { @@ -154,9 +152,9 @@ public void removeChunk(BucketedContext bucketedContext, Chunk final SegmentedSortedMultiSet ssm = ssmForSlot(destination); final WritableChunk valueSlice = - context.valueResettable.resetFromChunk(context.valueCopy, startPosition, runLength); + context.valueResettable.resetFromChunk(context.valueCopy, startPosition, runLength); final WritableIntChunk countSlice = - context.countResettable.resetFromChunk(context.counts, startPosition, runLength); + context.countResettable.resetFromChunk(context.counts, startPosition, runLength); ssm.remove(removeContext, valueSlice, countSlice); if (ssm.size() == 0) { clearSsm(destination); @@ -168,9 +166,9 @@ public void removeChunk(BucketedContext bucketedContext, Chunk @Override public void modifyChunk(BucketedContext bucketedContext, Chunk preValues, - Chunk postValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + Chunk postValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final BucketSsmMinMaxContext context = (BucketSsmMinMaxContext) bucketedContext; context.valueCopy.setSize(preValues.size()); @@ -179,8 +177,7 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk context.lengthCopy.setSize(length.size()); context.lengthCopy.copyFromChunk(length, 0, 0, length.size()); - compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, - context.lengthCopy); + compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, context.lengthCopy); final SegmentedSortedMultiSet.RemoveContext removeContext = removeContextFactory.get(); context.ssmsToMaybeClear.fillWithValue(0, destinations.size(), false); @@ -194,9 +191,9 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk final SegmentedSortedMultiSet ssm = ssmForSlot(destination); final WritableChunk valueSlice = - context.valueResettable.resetFromChunk(context.valueCopy, startPosition, runLength); + context.valueResettable.resetFromChunk(context.valueCopy, startPosition, runLength); final WritableIntChunk countSlice = - context.countResettable.resetFromChunk(context.counts, startPosition, runLength); + context.countResettable.resetFromChunk(context.counts, startPosition, runLength); ssm.remove(removeContext, valueSlice, countSlice); if (ssm.size() == 0) { context.ssmsToMaybeClear.set(ii, true); @@ -209,8 +206,7 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk context.lengthCopy.setSize(length.size()); context.lengthCopy.copyFromChunk(length, 0, 0, length.size()); - compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, - context.lengthCopy); + compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, context.lengthCopy); for (int ii = 0; ii < startPositions.size(); ++ii) { final int runLength = context.lengthCopy.get(ii); final int startPosition = startPositions.get(ii); @@ -221,26 +217,24 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk clearSsm(destination); stateModified.set(ii, setResult.setResultNull(destination)); } else { - stateModified.set(ii, - setResult.setResult(ssmForSlot(destination), destination)); + stateModified.set(ii, setResult.setResult(ssmForSlot(destination), destination)); } continue; } final SegmentedSortedMultiSet ssm = ssmForSlot(destination); final WritableChunk valueSlice = - context.valueResettable.resetFromChunk(context.valueCopy, startPosition, runLength); + context.valueResettable.resetFromChunk(context.valueCopy, startPosition, runLength); final WritableIntChunk countSlice = - context.countResettable.resetFromChunk(context.counts, startPosition, runLength); + context.countResettable.resetFromChunk(context.counts, startPosition, runLength); ssm.insert(valueSlice, countSlice); stateModified.set(ii, setResult.setResult(ssm, destination)); } } @Override - public boolean addChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { final SsmMinMaxContext context = (SsmMinMaxContext) singletonContext; context.valueCopy.setSize(values.size()); @@ -254,9 +248,8 @@ public boolean addChunk(SingletonContext singletonContext, int chunkSize, } @Override - public boolean removeChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { final SsmMinMaxContext context = (SsmMinMaxContext) singletonContext; context.valueCopy.setSize(values.size()); @@ -274,9 +267,8 @@ public boolean removeChunk(SingletonContext singletonContext, int chunkSize, } @Override - public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, - Chunk preValues, Chunk postValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, Chunk preValues, + Chunk postValues, LongChunk postShiftIndices, long destination) { final SsmMinMaxContext context = (SsmMinMaxContext) singletonContext; context.valueCopy.setSize(preValues.size()); @@ -347,7 +339,7 @@ public SingletonContext makeSingletonContext(int size) { private static class SsmMinMaxContext implements SingletonContext { final SegmentedSortedMultiSet.RemoveContext removeContext = - SegmentedSortedMultiSet.makeRemoveContext(NODE_SIZE); + SegmentedSortedMultiSet.makeRemoveContext(NODE_SIZE); final WritableChunk valueCopy; final WritableIntChunk counts; @@ -363,8 +355,7 @@ public void close() { } } - private static class BucketSsmMinMaxContext extends SsmMinMaxContext - implements BucketedContext { + private static class BucketSsmMinMaxContext extends SsmMinMaxContext implements BucketedContext { final WritableIntChunk lengthCopy; final ResettableWritableChunk valueResettable; final ResettableWritableIntChunk countResettable; @@ -388,8 +379,7 @@ public void close() { } } - public IterativeChunkedAggregationOperator makeSecondaryOperator(boolean isMinimum, - String resultName) { + public IterativeChunkedAggregationOperator makeSecondaryOperator(boolean isMinimum, String resultName) { return new SecondaryOperator(isMinimum, resultName); } @@ -401,37 +391,37 @@ private class SecondaryOperator implements IterativeChunkedAggregationOperator { private SecondaryOperator(boolean isMinimum, String resultName) { // noinspection unchecked this.resultColumn = ArrayBackedColumnSource.getMemoryColumnSource(0, - SsmChunkedMinMaxOperator.this.resultColumn.getType()); + SsmChunkedMinMaxOperator.this.resultColumn.getType()); setResult = makeSetResult(chunkType, resultColumn.getType(), isMinimum, resultColumn); this.resultName = resultName; } @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { updateBucketed(destinations, startPositions, stateModified); } @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { updateBucketed(destinations, startPositions, stateModified); } @Override public void modifyChunk(BucketedContext context, Chunk previousValues, - Chunk newValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, + IntChunk length, WritableBooleanChunk stateModified) { updateBucketed(destinations, startPositions, stateModified); } - private void updateBucketed(IntChunk destinations, - IntChunk startPositions, WritableBooleanChunk stateModified) { + private void updateBucketed(IntChunk destinations, IntChunk startPositions, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int startPosition = startPositions.get(ii); final long destination = destinations.get(startPosition); @@ -441,24 +431,21 @@ private void updateBucketed(IntChunk destinations, } @Override - public boolean addChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return updateSingleton(destination); } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return updateSingleton(destination); } @Override - public boolean modifyChunk(SingletonContext context, int chunkSize, - Chunk previousValues, Chunk newValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext context, int chunkSize, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, long destination) { return updateSingleton(destination); } diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmpercentile/DateTimePercentileTypeHelper.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmpercentile/DateTimePercentileTypeHelper.java index f246a34e03a..00c8ae08d44 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmpercentile/DateTimePercentileTypeHelper.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmpercentile/DateTimePercentileTypeHelper.java @@ -13,8 +13,7 @@ import org.apache.commons.lang3.mutable.MutableInt; -public class DateTimePercentileTypeHelper - implements SsmChunkedPercentileOperator.PercentileTypeHelper { +public class DateTimePercentileTypeHelper implements SsmChunkedPercentileOperator.PercentileTypeHelper { private final double percentile; private final DateTimeArraySource resultColumn; @@ -26,8 +25,7 @@ public class DateTimePercentileTypeHelper } @Override - public boolean setResult(SegmentedSortedMultiSet ssmLo, SegmentedSortedMultiSet ssmHi, - long destination) { + public boolean setResult(SegmentedSortedMultiSet ssmLo, SegmentedSortedMultiSet ssmHi, long destination) { final long loSize = ssmLo.totalSize(); final long hiSize = ssmHi.totalSize(); final long totalSize = loSize + hiSize; @@ -57,20 +55,16 @@ private boolean setResult(long destination, long newResult) { } @Override - public int pivot(SegmentedSortedMultiSet segmentedSortedMultiSet, - Chunk valueCopy, IntChunk counts, - int startPosition, int runLength, MutableInt leftOvers) { + public int pivot(SegmentedSortedMultiSet segmentedSortedMultiSet, Chunk valueCopy, + IntChunk counts, int startPosition, int runLength, MutableInt leftOvers) { final LongChunk asLongChunk = valueCopy.asLongChunk(); - final LongSegmentedSortedMultiset ssmLo = - (LongSegmentedSortedMultiset) segmentedSortedMultiSet; + final LongSegmentedSortedMultiset ssmLo = (LongSegmentedSortedMultiset) segmentedSortedMultiSet; final long hiValue = ssmLo.getMaxLong(); - final int result = - upperBound(asLongChunk, startPosition, startPosition + runLength, hiValue); + final int result = upperBound(asLongChunk, startPosition, startPosition + runLength, hiValue); final long hiCount = ssmLo.getMaxCount(); - if (result > startPosition && asLongChunk.get(result - 1) == hiValue - && counts.get(result - 1) > hiCount) { + if (result > startPosition && asLongChunk.get(result - 1) == hiValue && counts.get(result - 1) > hiCount) { leftOvers.setValue((int) (counts.get(result - 1) - hiCount)); } else { leftOvers.setValue(0); @@ -80,16 +74,13 @@ public int pivot(SegmentedSortedMultiSet segmentedSortedMultiSet, } @Override - public int pivot(SegmentedSortedMultiSet segmentedSortedMultiSet, - Chunk valueCopy, IntChunk counts, - int startPosition, int runLength) { + public int pivot(SegmentedSortedMultiSet segmentedSortedMultiSet, Chunk valueCopy, + IntChunk counts, int startPosition, int runLength) { final LongChunk asLongChunk = valueCopy.asLongChunk(); - final LongSegmentedSortedMultiset ssmLo = - (LongSegmentedSortedMultiset) segmentedSortedMultiSet; + final LongSegmentedSortedMultiset ssmLo = (LongSegmentedSortedMultiset) segmentedSortedMultiSet; final long hiValue = ssmLo.getMaxLong(); - final int result = - upperBound(asLongChunk, startPosition, startPosition + runLength, hiValue); + final int result = upperBound(asLongChunk, startPosition, startPosition + runLength, hiValue); return result - startPosition; } @@ -103,8 +94,8 @@ public int pivot(SegmentedSortedMultiSet segmentedSortedMultiSet, * @param searchValue the value to find * @return the highest index that is less than or equal to valuesToSearch */ - private static int upperBound(LongChunk valuesToSearch, int lo, - int hi, long searchValue) { + private static int upperBound(LongChunk valuesToSearch, int lo, int hi, + long searchValue) { while (lo < hi) { final int mid = (lo + hi) >>> 1; final long testValue = valuesToSearch.get(mid); diff --git a/DB/src/main/java/io/deephaven/db/v2/by/ssmpercentile/SsmChunkedPercentileOperator.java b/DB/src/main/java/io/deephaven/db/v2/by/ssmpercentile/SsmChunkedPercentileOperator.java index e0556a3baf6..15868f0e2e3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/by/ssmpercentile/SsmChunkedPercentileOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/by/ssmpercentile/SsmChunkedPercentileOperator.java @@ -24,8 +24,8 @@ * Iterative average operator. */ public class SsmChunkedPercentileOperator implements IterativeChunkedAggregationOperator { - private static final int NODE_SIZE = Configuration.getInstance() - .getIntegerWithDefault("SsmChunkedMinMaxOperator.nodeSize", 4096); + private static final int NODE_SIZE = + Configuration.getInstance().getIntegerWithDefault("SsmChunkedMinMaxOperator.nodeSize", 4096); private final ArrayBackedColumnSource internalResult; private final ColumnSource externalResult; /** @@ -39,8 +39,7 @@ public class SsmChunkedPercentileOperator implements IterativeChunkedAggregation private final ChunkType chunkType; private final PercentileTypeHelper percentileTypeHelper; - public SsmChunkedPercentileOperator(Class type, double percentile, boolean averageMedian, - String name) { + public SsmChunkedPercentileOperator(Class type, double percentile, boolean averageMedian, String name) { this.name = name; this.ssms = new ObjectArraySource<>(SegmentedSortedMultiSet.class); final boolean isDateTime = type == DBDateTime.class; @@ -66,10 +65,8 @@ public SsmChunkedPercentileOperator(Class type, double percentile, boolean av internalResult = new FloatArraySource(); break; default: - // for things that are not int, long, double, or float we do not actually - // average the median; - // we just do the standard 50-%tile thing. It might be worth defining this - // to be friendlier. + // for things that are not int, long, double, or float we do not actually average the median; + // we just do the standard 50-%tile thing. It might be worth defining this to be friendlier. internalResult = ArrayBackedColumnSource.getMemoryColumnSource(0, type); } } else { @@ -80,18 +77,15 @@ public SsmChunkedPercentileOperator(Class type, double percentile, boolean av compactAndCountKernel = CompactKernel.makeCompact(chunkType); ssmFactory = SegmentedSortedMultiSet.makeFactory(chunkType, NODE_SIZE, type); removeContextFactory = SegmentedSortedMultiSet.makeRemoveContextFactory(NODE_SIZE); - percentileTypeHelper = - makeTypeHelper(chunkType, type, percentile, averageMedian, internalResult); + percentileTypeHelper = makeTypeHelper(chunkType, type, percentile, averageMedian, internalResult); } - private static PercentileTypeHelper makeTypeHelper(ChunkType chunkType, Class type, - double percentile, boolean averageMedian, ArrayBackedColumnSource resultColumn) { + private static PercentileTypeHelper makeTypeHelper(ChunkType chunkType, Class type, double percentile, + boolean averageMedian, ArrayBackedColumnSource resultColumn) { if (averageMedian) { switch (chunkType) { - // for things that are not int, long, double, or float we do not actually average - // the median; - // we just do the standard 50-%tile thing. It might be worth defining this to be - // friendlier. + // for things that are not int, long, double, or float we do not actually average the median; + // we just do the standard 50-%tile thing. It might be worth defining this to be friendlier. case Char: return new CharPercentileTypeHelper(percentile, resultColumn); case Byte: @@ -100,8 +94,8 @@ private static PercentileTypeHelper makeTypeHelper(ChunkType chunkType, Class return new ShortPercentileTypeHelper(percentile, resultColumn); case Object: return makeObjectHelper(type, percentile, resultColumn); - // For the int, long, float, and double types we actually average the adjacent - // values to compute the median + // For the int, long, float, and double types we actually average the adjacent values to compute the + // median case Int: return new IntPercentileTypeMedianHelper(percentile, resultColumn); case Long: @@ -141,7 +135,7 @@ private static PercentileTypeHelper makeTypeHelper(ChunkType chunkType, Class @NotNull private static PercentileTypeHelper makeObjectHelper(Class type, double percentile, - ArrayBackedColumnSource resultColumn) { + ArrayBackedColumnSource resultColumn) { if (type == Boolean.class) { return new BooleanPercentileTypeHelper(percentile, resultColumn); } else if (type == DBDateTime.class) { @@ -152,24 +146,22 @@ private static PercentileTypeHelper makeObjectHelper(Class type, double perce } interface PercentileTypeHelper { - boolean setResult(SegmentedSortedMultiSet ssmLo, SegmentedSortedMultiSet ssmHi, - long destination); + boolean setResult(SegmentedSortedMultiSet ssmLo, SegmentedSortedMultiSet ssmHi, long destination); boolean setResultNull(long destination); - int pivot(SegmentedSortedMultiSet ssmLo, Chunk valueCopy, - IntChunk counts, int startPosition, int runLength, MutableInt leftOvers); + int pivot(SegmentedSortedMultiSet ssmLo, Chunk valueCopy, IntChunk counts, + int startPosition, int runLength, MutableInt leftOvers); - int pivot(SegmentedSortedMultiSet segmentedSortedMultiSet, - Chunk valueCopy, IntChunk counts, int startPosition, - int runLength); + int pivot(SegmentedSortedMultiSet segmentedSortedMultiSet, Chunk valueCopy, + IntChunk counts, int startPosition, int runLength); } @Override public void addChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final BucketSsmMinMaxContext context = (BucketSsmMinMaxContext) bucketedContext; context.valueCopy.setSize(values.size()); @@ -179,8 +171,7 @@ public void addChunk(BucketedContext bucketedContext, Chunk va context.lengthCopy.setSize(length.size()); context.lengthCopy.copyFromChunk(length, 0, 0, length.size()); - compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, - context.lengthCopy); + compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, context.lengthCopy); for (int ii = 0; ii < startPositions.size(); ++ii) { final int runLength = context.lengthCopy.get(ii); @@ -193,8 +184,7 @@ public void addChunk(BucketedContext bucketedContext, Chunk va final SegmentedSortedMultiSet ssmLo = ssmLoForSlot(destination); final SegmentedSortedMultiSet ssmHi = ssmHiForSlot(destination); - pivotedInsertion(context, ssmLo, ssmHi, startPosition, runLength, context.valueCopy, - context.counts); + pivotedInsertion(context, ssmLo, ssmHi, startPosition, runLength, context.valueCopy, context.counts); stateModified.set(ii, percentileTypeHelper.setResult(ssmLo, ssmHi, destination)); } @@ -202,9 +192,9 @@ public void addChunk(BucketedContext bucketedContext, Chunk va @Override public void removeChunk(BucketedContext bucketedContext, Chunk values, - LongChunk inputIndices, IntChunk destinations, - IntChunk startPositions, IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final BucketSsmMinMaxContext context = (BucketSsmMinMaxContext) bucketedContext; context.valueCopy.setSize(values.size()); @@ -214,8 +204,7 @@ public void removeChunk(BucketedContext bucketedContext, Chunk context.lengthCopy.setSize(length.size()); context.lengthCopy.copyFromChunk(length, 0, 0, length.size()); - compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, - context.lengthCopy); + compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, context.lengthCopy); final SegmentedSortedMultiSet.RemoveContext removeContext = removeContextFactory.get(); for (int ii = 0; ii < startPositions.size(); ++ii) { @@ -228,8 +217,8 @@ public void removeChunk(BucketedContext bucketedContext, Chunk final SegmentedSortedMultiSet ssmLo = ssmLoForSlot(destination); final SegmentedSortedMultiSet ssmHi = ssmHiForSlot(destination); - pivotedRemoval(context, removeContext, startPosition, runLength, ssmLo, ssmHi, - context.valueCopy, context.counts); + pivotedRemoval(context, removeContext, startPosition, runLength, ssmLo, ssmHi, context.valueCopy, + context.counts); final boolean modified = percentileTypeHelper.setResult(ssmLo, ssmHi, destination); if (ssmLo.size() == 0) { @@ -242,32 +231,27 @@ public void removeChunk(BucketedContext bucketedContext, Chunk } } - private void pivotedRemoval(SsmMinMaxContext context, - SegmentedSortedMultiSet.RemoveContext removeContext, int startPosition, int runLength, - SegmentedSortedMultiSet ssmLo, SegmentedSortedMultiSet ssmHi, - WritableChunk valueCopy, WritableIntChunk counts) { - // We have no choice but to split this chunk, and furthermore to make sure that we do not - // remove more + private void pivotedRemoval(SsmMinMaxContext context, SegmentedSortedMultiSet.RemoveContext removeContext, + int startPosition, int runLength, SegmentedSortedMultiSet ssmLo, SegmentedSortedMultiSet ssmHi, + WritableChunk valueCopy, WritableIntChunk counts) { + // We have no choice but to split this chunk, and furthermore to make sure that we do not remove more // of the maximum lo value than actually exist within ssmLo. final MutableInt leftOvers = new MutableInt(); int loPivot; if (ssmLo.size() > 0) { - loPivot = percentileTypeHelper.pivot(ssmLo, valueCopy, counts, startPosition, runLength, - leftOvers); - Assert.leq(leftOvers.intValue(), "leftOvers.intValue()", ssmHi.totalSize(), - "ssmHi.totalSize()"); + loPivot = percentileTypeHelper.pivot(ssmLo, valueCopy, counts, startPosition, runLength, leftOvers); + Assert.leq(leftOvers.intValue(), "leftOvers.intValue()", ssmHi.totalSize(), "ssmHi.totalSize()"); } else { loPivot = 0; } if (loPivot > 0) { final WritableChunk loValueSlice = - context.valueResettable.resetFromChunk(valueCopy, startPosition, loPivot); + context.valueResettable.resetFromChunk(valueCopy, startPosition, loPivot); final WritableIntChunk loCountSlice = - context.countResettable.resetFromChunk(counts, startPosition, loPivot); + context.countResettable.resetFromChunk(counts, startPosition, loPivot); if (leftOvers.intValue() > 0) { - counts.set(startPosition + loPivot - 1, - counts.get(startPosition + loPivot - 1) - leftOvers.intValue()); + counts.set(startPosition + loPivot - 1, counts.get(startPosition + loPivot - 1) - leftOvers.intValue()); } ssmLo.remove(removeContext, loValueSlice, loCountSlice); } @@ -278,38 +262,37 @@ private void pivotedRemoval(SsmMinMaxContext context, } if (loPivot < runLength) { - final WritableChunk hiValueSlice = context.valueResettable - .resetFromChunk(valueCopy, startPosition + loPivot, runLength - loPivot); - final WritableIntChunk hiCountSlice = context.countResettable - .resetFromChunk(counts, startPosition + loPivot, runLength - loPivot); + final WritableChunk hiValueSlice = + context.valueResettable.resetFromChunk(valueCopy, startPosition + loPivot, runLength - loPivot); + final WritableIntChunk hiCountSlice = + context.countResettable.resetFromChunk(counts, startPosition + loPivot, runLength - loPivot); ssmHi.remove(removeContext, hiValueSlice, hiCountSlice); } } private void pivotedInsertion(SsmMinMaxContext context, SegmentedSortedMultiSet ssmLo, - SegmentedSortedMultiSet ssmHi, int startPosition, int runLength, - WritableChunk valueCopy, WritableIntChunk counts) { + SegmentedSortedMultiSet ssmHi, int startPosition, int runLength, WritableChunk valueCopy, + WritableIntChunk counts) { final int loPivot; if (ssmLo.size() > 0) { - loPivot = - percentileTypeHelper.pivot(ssmLo, valueCopy, counts, startPosition, runLength); + loPivot = percentileTypeHelper.pivot(ssmLo, valueCopy, counts, startPosition, runLength); } else { loPivot = 0; } if (loPivot > 0) { final WritableChunk loValueSlice = - context.valueResettable.resetFromChunk(valueCopy, startPosition, loPivot); + context.valueResettable.resetFromChunk(valueCopy, startPosition, loPivot); final WritableIntChunk loCountSlice = - context.countResettable.resetFromChunk(counts, startPosition, loPivot); + context.countResettable.resetFromChunk(counts, startPosition, loPivot); ssmLo.insert(loValueSlice, loCountSlice); } if (loPivot < runLength) { - final WritableChunk hiValueSlice = context.valueResettable - .resetFromChunk(valueCopy, startPosition + loPivot, runLength - loPivot); - final WritableIntChunk hiCountSlice = context.countResettable - .resetFromChunk(counts, startPosition + loPivot, runLength - loPivot); + final WritableChunk hiValueSlice = + context.valueResettable.resetFromChunk(valueCopy, startPosition + loPivot, runLength - loPivot); + final WritableIntChunk hiCountSlice = + context.countResettable.resetFromChunk(counts, startPosition + loPivot, runLength - loPivot); ssmHi.insert(hiValueSlice, hiCountSlice); } } @@ -317,9 +300,9 @@ private void pivotedInsertion(SsmMinMaxContext context, SegmentedSortedMultiSet @Override public void modifyChunk(BucketedContext bucketedContext, Chunk preValues, - Chunk postValues, LongChunk postShiftIndices, - IntChunk destinations, IntChunk startPositions, - IntChunk length, WritableBooleanChunk stateModified) { + Chunk postValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { final BucketSsmMinMaxContext context = (BucketSsmMinMaxContext) bucketedContext; context.valueCopy.setSize(preValues.size()); @@ -329,8 +312,7 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk context.lengthCopy.setSize(length.size()); context.lengthCopy.copyFromChunk(length, 0, 0, length.size()); - compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, - context.lengthCopy); + compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, context.lengthCopy); final SegmentedSortedMultiSet.RemoveContext removeContext = removeContextFactory.get(); context.ssmsToMaybeClear.fillWithValue(0, destinations.size(), false); @@ -345,8 +327,8 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk final SegmentedSortedMultiSet ssmLo = ssmLoForSlot(destination); final SegmentedSortedMultiSet ssmHi = ssmHiForSlot(destination); - pivotedRemoval(context, removeContext, startPosition, runLength, ssmLo, ssmHi, - context.valueCopy, context.counts); + pivotedRemoval(context, removeContext, startPosition, runLength, ssmLo, ssmHi, context.valueCopy, + context.counts); if (ssmLo.size() == 0 && ssmHi.size() == 0) { context.ssmsToMaybeClear.set(ii, true); } @@ -359,8 +341,7 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk context.lengthCopy.setSize(length.size()); context.lengthCopy.copyFromChunk(length, 0, 0, length.size()); - compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, - context.lengthCopy); + compactAndCountKernel.compactAndCount(context.valueCopy, context.counts, startPositions, context.lengthCopy); for (int ii = 0; ii < startPositions.size(); ++ii) { final int runLength = context.lengthCopy.get(ii); final int startPosition = startPositions.get(ii); @@ -373,7 +354,7 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk stateModified.set(ii, percentileTypeHelper.setResultNull(destination)); } else { stateModified.set(ii, percentileTypeHelper.setResult(ssmLoForSlot(destination), - ssmHiForSlot(destination), destination)); + ssmHiForSlot(destination), destination)); } continue; } @@ -381,17 +362,15 @@ public void modifyChunk(BucketedContext bucketedContext, Chunk final SegmentedSortedMultiSet ssmLo = ssmLoForSlot(destination); final SegmentedSortedMultiSet ssmHi = ssmHiForSlot(destination); - pivotedInsertion(context, ssmLo, ssmHi, startPosition, runLength, context.valueCopy, - context.counts); + pivotedInsertion(context, ssmLo, ssmHi, startPosition, runLength, context.valueCopy, context.counts); stateModified.set(ii, percentileTypeHelper.setResult(ssmLo, ssmHi, destination)); } } @Override - public boolean addChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean addChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { final SsmMinMaxContext context = (SsmMinMaxContext) singletonContext; context.valueCopy.setSize(values.size()); @@ -401,16 +380,14 @@ public boolean addChunk(SingletonContext singletonContext, int chunkSize, final SegmentedSortedMultiSet ssmLo = ssmLoForSlot(destination); final SegmentedSortedMultiSet ssmHi = ssmHiForSlot(destination); if (context.valueCopy.size() > 0) { - pivotedInsertion(context, ssmLo, ssmHi, 0, context.valueCopy.size(), context.valueCopy, - context.counts); + pivotedInsertion(context, ssmLo, ssmHi, 0, context.valueCopy.size(), context.valueCopy, context.counts); } return percentileTypeHelper.setResult(ssmLo, ssmHi, destination); } @Override - public boolean removeChunk(SingletonContext singletonContext, int chunkSize, - Chunk values, LongChunk inputIndices, - long destination) { + public boolean removeChunk(SingletonContext singletonContext, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { final SsmMinMaxContext context = (SsmMinMaxContext) singletonContext; context.valueCopy.setSize(values.size()); @@ -423,8 +400,8 @@ public boolean removeChunk(SingletonContext singletonContext, int chunkSize, final SegmentedSortedMultiSet ssmLo = ssmLoForSlot(destination); final SegmentedSortedMultiSet ssmHi = ssmHiForSlot(destination); - pivotedRemoval(context, context.removeContext, 0, context.valueCopy.size(), ssmLo, ssmHi, - context.valueCopy, context.counts); + pivotedRemoval(context, context.removeContext, 0, context.valueCopy.size(), ssmLo, ssmHi, context.valueCopy, + context.counts); final boolean modified = percentileTypeHelper.setResult(ssmLo, ssmHi, destination); if (ssmLo.size() == 0) { clearSsm(destination, 0); @@ -436,9 +413,8 @@ public boolean removeChunk(SingletonContext singletonContext, int chunkSize, } @Override - public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, - Chunk preValues, Chunk postValues, - LongChunk postShiftIndices, long destination) { + public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, Chunk preValues, + Chunk postValues, LongChunk postShiftIndices, long destination) { final SsmMinMaxContext context = (SsmMinMaxContext) singletonContext; context.valueCopy.setSize(preValues.size()); @@ -451,8 +427,8 @@ public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, ssmLo = ssmLoForSlot(destination); ssmHi = ssmHiForSlot(destination); - pivotedRemoval(context, context.removeContext, 0, context.valueCopy.size(), ssmLo, - ssmHi, context.valueCopy, context.counts); + pivotedRemoval(context, context.removeContext, 0, context.valueCopy.size(), ssmLo, ssmHi, context.valueCopy, + context.counts); } context.valueCopy.setSize(postValues.size()); @@ -464,8 +440,7 @@ public boolean modifyChunk(SingletonContext singletonContext, int chunkSize, ssmLo = ssmLoForSlot(destination); ssmHi = ssmHiForSlot(destination); } - pivotedInsertion(context, ssmLo, ssmHi, 0, context.valueCopy.size(), context.valueCopy, - context.counts); + pivotedInsertion(context, ssmLo, ssmHi, 0, context.valueCopy.size(), context.valueCopy, context.counts); return percentileTypeHelper.setResult(ssmLo, ssmHi, destination); } else if (ssmLo != null && ssmLo.size() == 0 && ssmHi.size() == 0) { clearSsm(destination, 0); @@ -528,7 +503,7 @@ public SingletonContext makeSingletonContext(int size) { private static class SsmMinMaxContext implements SingletonContext { final SegmentedSortedMultiSet.RemoveContext removeContext = - SegmentedSortedMultiSet.makeRemoveContext(NODE_SIZE); + SegmentedSortedMultiSet.makeRemoveContext(NODE_SIZE); final WritableChunk valueCopy; final WritableIntChunk counts; final ResettableWritableChunk valueResettable; @@ -550,8 +525,7 @@ public void close() { } } - private static class BucketSsmMinMaxContext extends SsmMinMaxContext - implements BucketedContext { + private static class BucketSsmMinMaxContext extends SsmMinMaxContext implements BucketedContext { final WritableIntChunk lengthCopy; final WritableBooleanChunk ssmsToMaybeClear; diff --git a/DB/src/main/java/io/deephaven/db/v2/dbarrays/DbArrayColumnWrapper.java b/DB/src/main/java/io/deephaven/db/v2/dbarrays/DbArrayColumnWrapper.java index 460689dad4b..f4c635e850e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/dbarrays/DbArrayColumnWrapper.java +++ b/DB/src/main/java/io/deephaven/db/v2/dbarrays/DbArrayColumnWrapper.java @@ -27,14 +27,12 @@ public class DbArrayColumnWrapper extends DbArray.Indirect { private final long startPadding; private final long endPadding; - public DbArrayColumnWrapper(@NotNull final ColumnSource columnSource, - @NotNull final Index index) { + public DbArrayColumnWrapper(@NotNull final ColumnSource columnSource, @NotNull final Index index) { this(columnSource, index, 0, 0); } - public DbArrayColumnWrapper(@NotNull final ColumnSource columnSource, - @NotNull final Index index, - final long startPadding, final long endPadding) { + public DbArrayColumnWrapper(@NotNull final ColumnSource columnSource, @NotNull final Index index, + final long startPadding, final long endPadding) { Assert.neqNull(index, "index"); this.columnSource = columnSource; this.index = index; @@ -61,14 +59,13 @@ public DbArray subArray(long fromIndexInclusive, long toIndexExclusive) { final long realFrom = ClampUtil.clampLong(0, index.size(), fromIndexInclusive); final long realTo = ClampUtil.clampLong(0, index.size(), toIndexExclusive); - long newStartPadding = toIndexExclusive < 0 ? toIndexExclusive - fromIndexInclusive - : Math.max(0, -fromIndexInclusive); - long newEndPadding = - fromIndexInclusive >= index.size() ? toIndexExclusive - fromIndexInclusive + long newStartPadding = + toIndexExclusive < 0 ? toIndexExclusive - fromIndexInclusive : Math.max(0, -fromIndexInclusive); + long newEndPadding = fromIndexInclusive >= index.size() ? toIndexExclusive - fromIndexInclusive : (int) Math.max(0, toIndexExclusive - index.size()); - return new DbArrayColumnWrapper<>(columnSource, index.subindexByPos(realFrom, realTo), - newStartPadding, newEndPadding); + return new DbArrayColumnWrapper<>(columnSource, index.subindexByPos(realFrom, realTo), newStartPadding, + newEndPadding); } public DbArray subArrayByPositions(long[] positions) { @@ -99,7 +96,7 @@ public T[] toArray(boolean shouldBeNullIfOutofBounds, long maxSize) { @SuppressWarnings("unchecked") final T result[] = (T[]) Array.newInstance(TypeUtils.getBoxedType(columnSource.getType()), - LongSizedDataStructure.intSize("toArray", sz)); + LongSizedDataStructure.intSize("toArray", sz)); for (int i = 0; i < sz; i++) { result[i] = get(i); } @@ -123,8 +120,8 @@ public DbArray getDirect() { // recursion! final long size = size(); // noinspection unchecked - final T[] array = (T[]) Array.newInstance(getComponentType(), - LongSizedDataStructure.intSize("toArray", size)); + final T[] array = + (T[]) Array.newInstance(getComponentType(), LongSizedDataStructure.intSize("toArray", size)); for (int ii = 0; ii < size; ++ii) { final T arrayBase = get(ii); if (arrayBase == null) { diff --git a/DB/src/main/java/io/deephaven/db/v2/dbarrays/DbPrevArrayColumnWrapper.java b/DB/src/main/java/io/deephaven/db/v2/dbarrays/DbPrevArrayColumnWrapper.java index d96998cf611..9701b7d01f8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/dbarrays/DbPrevArrayColumnWrapper.java +++ b/DB/src/main/java/io/deephaven/db/v2/dbarrays/DbPrevArrayColumnWrapper.java @@ -27,20 +27,17 @@ public class DbPrevArrayColumnWrapper extends DbArray.Indirect { private final long startPadding; private final long endPadding; - public DbPrevArrayColumnWrapper(@NotNull final ColumnSource columnSource, - @NotNull final Index index) { + public DbPrevArrayColumnWrapper(@NotNull final ColumnSource columnSource, @NotNull final Index index) { this(columnSource, index, 0, 0); } - public DbPrevArrayColumnWrapper(@NotNull final ColumnSource columnSource, - @NotNull final Index index, - final long startPadding, final long endPadding) { + public DbPrevArrayColumnWrapper(@NotNull final ColumnSource columnSource, @NotNull final Index index, + final long startPadding, final long endPadding) { this(columnSource, index, startPadding, endPadding, false); } - private DbPrevArrayColumnWrapper(@NotNull final ColumnSource columnSource, - @NotNull final Index index, - final long startPadding, final long endPadding, final boolean alreadyPrevIndex) { + private DbPrevArrayColumnWrapper(@NotNull final ColumnSource columnSource, @NotNull final Index index, + final long startPadding, final long endPadding, final boolean alreadyPrevIndex) { Assert.neqNull(index, "index"); this.columnSource = columnSource; this.index = alreadyPrevIndex ? index : index.getPrevIndex(); @@ -66,14 +63,13 @@ public DbArray subArray(long fromIndexInclusive, long toIndexExclusive) { final long realFrom = ClampUtil.clampLong(0, index.size(), fromIndexInclusive); final long realTo = ClampUtil.clampLong(0, index.size(), toIndexExclusive); - long newStartPadding = toIndexExclusive < 0 ? toIndexExclusive - fromIndexInclusive - : Math.max(0, -fromIndexInclusive); - long newEndPadding = - fromIndexInclusive >= index.size() ? toIndexExclusive - fromIndexInclusive + long newStartPadding = + toIndexExclusive < 0 ? toIndexExclusive - fromIndexInclusive : Math.max(0, -fromIndexInclusive); + long newEndPadding = fromIndexInclusive >= index.size() ? toIndexExclusive - fromIndexInclusive : (int) Math.max(0, toIndexExclusive - index.size()); - return new DbPrevArrayColumnWrapper<>(columnSource, index.subindexByPos(realFrom, realTo), - newStartPadding, newEndPadding, true); + return new DbPrevArrayColumnWrapper<>(columnSource, index.subindexByPos(realFrom, realTo), newStartPadding, + newEndPadding, true); } @Override @@ -105,7 +101,7 @@ public T[] toArray(boolean shouldBeNullIfOutofBounds, int maxSize) { @SuppressWarnings("unchecked") T result[] = (T[]) Array.newInstance(TypeUtils.getBoxedType(columnSource.getType()), - LongSizedDataStructure.intSize("toArray", sz)); + LongSizedDataStructure.intSize("toArray", sz)); for (int i = 0; i < sz; i++) { result[i] = get(i); } diff --git a/DB/src/main/java/io/deephaven/db/v2/dbarrays/ReplicateDbArrayColumnWrappers.java b/DB/src/main/java/io/deephaven/db/v2/dbarrays/ReplicateDbArrayColumnWrappers.java index e576ee0ffa1..46ec0170440 100644 --- a/DB/src/main/java/io/deephaven/db/v2/dbarrays/ReplicateDbArrayColumnWrappers.java +++ b/DB/src/main/java/io/deephaven/db/v2/dbarrays/ReplicateDbArrayColumnWrappers.java @@ -11,9 +11,7 @@ public class ReplicateDbArrayColumnWrappers { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(DbCharArrayColumnWrapper.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(DbPrevCharArrayColumnWrapper.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(DbCharArrayColumnWrapper.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(DbPrevCharArrayColumnWrapper.class, ReplicatePrimitiveCode.MAIN_SRC); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/hashing/ChunkEquals.java b/DB/src/main/java/io/deephaven/db/v2/hashing/ChunkEquals.java index f30f2b21ae3..ea5ace3ee1c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/hashing/ChunkEquals.java +++ b/DB/src/main/java/io/deephaven/db/v2/hashing/ChunkEquals.java @@ -6,8 +6,7 @@ public interface ChunkEquals { /** - * Returns true iff the chunks have the same size() and each corresponding element of the chunk - * compares equal. + * Returns true iff the chunks have the same size() and each corresponding element of the chunk compares equal. * * @param lhs the left-hand side of the comparison * @param rhs the right-hand side of the comparison @@ -15,19 +14,18 @@ public interface ChunkEquals { boolean equalReduce(Chunk lhs, Chunk rhs); /** - * Called for the first (or only) pair of chunks, sets the corresponding destination entry to - * true if the values are equal, or false otherwise + * Called for the first (or only) pair of chunks, sets the corresponding destination entry to true if the values are + * equal, or false otherwise * * @param lhs the left-hand side of the comparison * @param rhs the right-hand side of the comparison * @param destination the chunk to write equality values into */ - void equal(Chunk lhs, Chunk rhs, - WritableBooleanChunk destination); + void equal(Chunk lhs, Chunk rhs, WritableBooleanChunk destination); /** - * For each pair of indices i and i + 1 in chunk; write true to destination[i] if they are - * equal, otherwise write false. + * For each pair of indices i and i + 1 in chunk; write true to destination[i] if they are equal, otherwise write + * false. * * @param chunk the chunk to compare subsequent values in * @param destination the chunk to write equality values into, size is chunk.size() - 1 @@ -35,8 +33,8 @@ void equal(Chunk lhs, Chunk rhs, void equalNext(Chunk chunk, WritableBooleanChunk destination); /** - * Called for the first (or only) pair of chunks, sets the corresponding destination entry to - * true if lhs[lhsPositions] == rhs[rhsPositions]. + * Called for the first (or only) pair of chunks, sets the corresponding destination entry to true if + * lhs[lhsPositions] == rhs[rhsPositions]. * * @param lhsPositions the positions within left-hand side of the comparison * @param rhsPositions the positions within the right-hand side of the comparison @@ -45,34 +43,33 @@ void equal(Chunk lhs, Chunk rhs, * @param destination the chunk to write equality values into */ void equalPermuted(IntChunk lhsPositions, IntChunk rhsPositions, - Chunk lhs, Chunk rhs, WritableBooleanChunk destination); + Chunk lhs, Chunk rhs, WritableBooleanChunk destination); /** - * Called for the first (or only) pair of chunks, sets the corresponding destination entry to - * true if lhs[lhsPositions] == rhs. + * Called for the first (or only) pair of chunks, sets the corresponding destination entry to true if + * lhs[lhsPositions] == rhs. * * @param lhsPositions the positions within left-hand side of the comparison * @param lhs the left-hand side of the comparison * @param rhs the right-hand side of the comparison * @param destination the chunk to write equality values into */ - void equalLhsPermuted(IntChunk lhsPositions, Chunk lhs, - Chunk rhs, WritableBooleanChunk destination); + void equalLhsPermuted(IntChunk lhsPositions, Chunk lhs, Chunk rhs, + WritableBooleanChunk destination); /** - * Called for subsequent pair of chunks, if the corresponding destination entry is false, do - * nothing. If true, then set to false if the corresponding values are not equal. + * Called for subsequent pair of chunks, if the corresponding destination entry is false, do nothing. If true, then + * set to false if the corresponding values are not equal. * * @param lhs the left-hand side of the comparison * @param rhs the right-hand side of the comparison * @param destination the chunk to write equality values into */ - void andEqual(Chunk lhs, Chunk rhs, - WritableBooleanChunk destination); + void andEqual(Chunk lhs, Chunk rhs, WritableBooleanChunk destination); /** - * For each pair of indices i and i + 1 in chunk; if destination[i] is false do nothing, - * otherwise write true to destination[i] if they are equal. + * For each pair of indices i and i + 1 in chunk; if destination[i] is false do nothing, otherwise write true to + * destination[i] if they are equal. * * @param chunk the chunk to compare subsequent values in * @param destination the chunk to write equality values into, size is chunk.size() - 1 @@ -80,8 +77,8 @@ void andEqual(Chunk lhs, Chunk rhs, void andEqualNext(Chunk chunk, WritableBooleanChunk destination); /** - * If destination[i] is false do nothing, otherwise, sets the corresponding destination entry to - * true if lhs[lhsPositions] == rhs[rhsPositions]. + * If destination[i] is false do nothing, otherwise, sets the corresponding destination entry to true if + * lhs[lhsPositions] == rhs[rhsPositions]. * * @param lhsPositions the positions within left-hand side of the comparison * @param rhsPositions the positions within the right-hand side of the comparison @@ -89,43 +86,40 @@ void andEqual(Chunk lhs, Chunk rhs, * @param rhs the right-hand side of the comparison * @param destination the chunk to write equality values into */ - void andEqualPermuted(IntChunk lhsPositions, - IntChunk rhsPositions, Chunk lhs, Chunk rhs, - WritableBooleanChunk destination); + void andEqualPermuted(IntChunk lhsPositions, IntChunk rhsPositions, + Chunk lhs, Chunk rhs, WritableBooleanChunk destination); /** - * If destination[i] is false do nothing, otherwise, sets the corresponding destination entry to - * true if lhs[lhsPositions] == rhs. + * If destination[i] is false do nothing, otherwise, sets the corresponding destination entry to true if + * lhs[lhsPositions] == rhs. * * @param lhsPositions the positions within left-hand side of the comparison * @param lhs the left-hand side of the comparison * @param rhs the right-hand side of the comparison * @param destination the chunk to write equality values into */ - void andEqualLhsPermuted(IntChunk lhsPositions, Chunk lhs, - Chunk rhs, WritableBooleanChunk destination); + void andEqualLhsPermuted(IntChunk lhsPositions, Chunk lhs, Chunk rhs, + WritableBooleanChunk destination); /** - * Called for the first (or only) pair of chunks, sets the corresponding destination entry to - * true if the values are not equal, or false otherwise + * Called for the first (or only) pair of chunks, sets the corresponding destination entry to true if the values are + * not equal, or false otherwise * * @param lhs the left-hand side of the comparison * @param rhs the right-hand side of the comparison * @param destination the chunk to write equality values into */ - void notEqual(Chunk lhs, Chunk rhs, - WritableBooleanChunk destination); + void notEqual(Chunk lhs, Chunk rhs, WritableBooleanChunk destination); /** - * Called for subsequent pair of chunks, if the corresponding destination entry is false, do - * nothing. If true, then set to false if the corresponding values are equal. + * Called for subsequent pair of chunks, if the corresponding destination entry is false, do nothing. If true, then + * set to false if the corresponding values are equal. * * @param lhs the left-hand side of the comparison * @param rhs the right-hand side of the comparison * @param destination the chunk to write equality values into */ - void andNotEqual(Chunk lhs, Chunk rhs, - WritableBooleanChunk destination); + void andNotEqual(Chunk lhs, Chunk rhs, WritableBooleanChunk destination); /** @@ -137,8 +131,8 @@ void andNotEqual(Chunk lhs, Chunk rhs, * @param valuesChunk the chunk of values we are interested in * @param destinations the destination chunk to write equality values into */ - void equalPairs(IntChunk chunkPositionsToCheckForEquality, - Chunk valuesChunk, WritableBooleanChunk destinations); + void equalPairs(IntChunk chunkPositionsToCheckForEquality, Chunk valuesChunk, + WritableBooleanChunk destinations); /** * Compares valuesChunk[chunkPositionsToCheckForEquality[pp * 2]] and @@ -149,8 +143,8 @@ void equalPairs(IntChunk chunkPositionsToCheckForEquality, * @param valuesChunk the chunk of values we are interested in * @param destinations the destination chunk to write equality values into */ - void andEqualPairs(IntChunk chunkPositionsToCheckForEquality, - Chunk valuesChunk, WritableBooleanChunk destinations); + void andEqualPairs(IntChunk chunkPositionsToCheckForEquality, Chunk valuesChunk, + WritableBooleanChunk destinations); static ChunkEquals makeEqual(ChunkType chunkType) { switch (chunkType) { diff --git a/DB/src/main/java/io/deephaven/db/v2/hashing/ChunkHasher.java b/DB/src/main/java/io/deephaven/db/v2/hashing/ChunkHasher.java index d1bda7a3434..80d459d562f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/hashing/ChunkHasher.java +++ b/DB/src/main/java/io/deephaven/db/v2/hashing/ChunkHasher.java @@ -8,8 +8,7 @@ public interface ChunkHasher { /** - * Called for the first (or only) hash value, sets the hash codes in destination corresponding - * to values. + * Called for the first (or only) hash value, sets the hash codes in destination corresponding to values. * * @param values the values to hash * @param destination the chunk to write hash values into @@ -17,8 +16,7 @@ public interface ChunkHasher { void hashInitial(Chunk values, WritableIntChunk destination); /** - * Called for subsequent hash values, updates the hash codes in destination corresponding to - * values. + * Called for subsequent hash values, updates the hash codes in destination corresponding to values. * * @param values the values to hash * @param destination the chunk to update hash values into diff --git a/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapBase.java b/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapBase.java index fe20fae1a3c..16bde77d6e4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapBase.java @@ -26,48 +26,42 @@ public abstract class HashMapBase implements TNullableLongLongMap { static final long DEFAULT_NO_ENTRY_VALUE = -1; static final float DEFAULT_LOAD_FACTOR = 0.5f; - // There are three "special keys" removed from the range of valid keys that are used to - // represent various slot + // There are three "special keys" removed from the range of valid keys that are used to represent various slot // states: // 1. SPECIAL_KEY_FOR_EMPTY_SLOT is used to represent a slot that has never been used. - // 2. SPECIAL_KEY_FOR_DELETED_SLOT is used to represent a slot that was once in use, but the key - // that was formerly + // 2. SPECIAL_KEY_FOR_DELETED_SLOT is used to represent a slot that was once in use, but the key that was formerly // present there has been deleted. // 3. NULL_LONG is used to represent the null key. // // These values must all be distinct. // - // For the sake of efficiency, we define SPECIAL_KEY_FOR_EMPTY_SLOT to be 0. This means that our - // arrays are ready to - // go after being allocated, and we don't need to fill them with a special value. However, we - // are aware that our - // callers may wish to use 0 as a key. To support this, we remap key=0 on get, put, remove, and - // iteration operations + // For the sake of efficiency, we define SPECIAL_KEY_FOR_EMPTY_SLOT to be 0. This means that our arrays are ready to + // go after being allocated, and we don't need to fill them with a special value. However, we are aware that our + // callers may wish to use 0 as a key. To support this, we remap key=0 on get, put, remove, and iteration operations // to our special value called REDIRECTED_KEY_FOR_EMPTY_SLOT. static final long SPECIAL_KEY_FOR_EMPTY_SLOT = 0; static final long REDIRECTED_KEY_FOR_EMPTY_SLOT = NULL_LONG + 1; static final long SPECIAL_KEY_FOR_DELETED_SLOT = NULL_LONG + 2; /** - * This is the load factor we use as the hashtable nears its maximum size, in order to try to - * keep functioning (albeit with reduced performance) rather than failing. + * This is the load factor we use as the hashtable nears its maximum size, in order to try to keep functioning + * (albeit with reduced performance) rather than failing. */ private static final float NEARLY_FULL_LOAD_FACTOR = 0.9f; /** - * This is the fraction of the maximum possible size at which we just give up and throw an - * exception. It is kept slightly smaller than the NEARLY_FULL_LOAD_FACTOR (otherwise, we might - * end up in a situation where every put caused a rehash). Additionally, for some reason K2V2 is - * much less tolerant of getting full than the other two (it gets very slow as it approaches the - * max). For this reason, until we figure it out, we maintain individual size factors for each - * KnVn. + * This is the fraction of the maximum possible size at which we just give up and throw an exception. It is kept + * slightly smaller than the NEARLY_FULL_LOAD_FACTOR (otherwise, we might end up in a situation where every put + * caused a rehash). Additionally, for some reason K2V2 is much less tolerant of getting full than the other two (it + * gets very slow as it approaches the max). For this reason, until we figure it out, we maintain individual size + * factors for each KnVn. */ private static final float SIZE_LIMIT_FACTOR1 = 0.85f; private static final float SIZE_LIMIT_FACTOR2 = 0.75f; private static final float SIZE_LIMIT_FACTOR4 = 0.85f; /** - * This is the size at which we just give up and throw an exception rather than do a new put. It - * is number of entries (aka number of longs / 2) * SIZE_LIMIT_FACTORn. + * This is the size at which we just give up and throw an exception rather than do a new put. It is number of + * entries (aka number of longs / 2) * SIZE_LIMIT_FACTORn. */ static final int SIZE_LIMIT1 = (int) (Integer.MAX_VALUE / 2 * SIZE_LIMIT_FACTOR1); static final int SIZE_LIMIT2 = (int) (Integer.MAX_VALUE / 2 * SIZE_LIMIT_FACTOR2); @@ -86,20 +80,15 @@ public abstract class HashMapBase implements TNullableLongLongMap { private final int desiredInitialCapacity; private final float loadFactor; final long noEntryValue; - // There are three kinds of slots: empty, holding a value, and deleted (formerly holding a - // value). + // There are three kinds of slots: empty, holding a value, and deleted (formerly holding a value). // 'size' is the number of slots holding a value. int size; - // 'nonEmptySlots' is the number of slots either holding a value or deleted. It is an invariant - // that + // 'nonEmptySlots' is the number of slots either holding a value or deleted. It is an invariant that // nonEmptySlots >= size. int nonEmptySlots; - // The threshold (generally loadFactor * capacity) at which a rehash is triggered. This happens - // when nonEmptySlots - // meets or exceeds rehashThreshold. There is a decision to make about whether to rehash at the - // same capacity or a - // larger capacity. The heuristic we use is that if size >= (2/3) * nonEmptySlots we rehash to a - // larger capacity. + // The threshold (generally loadFactor * capacity) at which a rehash is triggered. This happens when nonEmptySlots + // meets or exceeds rehashThreshold. There is a decision to make about whether to rehash at the same capacity or a + // larger capacity. The heuristic we use is that if size >= (2/3) * nonEmptySlots we rehash to a larger capacity. int rehashThreshold; // In various places in the code, we will be dealing with three kinds of units: // - How many buckets in the array (this is always a prime number) @@ -125,15 +114,13 @@ static long fixKey(long key) { long[] allocateKeysAndValuesArray(int entriesPerBucket) { // DesiredInitialCapacity is in units of 'entries'. // Ceiling(desiredInitialCapacity / entriesPerBucket) - final int desiredNumBuckets = - (desiredInitialCapacity + entriesPerBucket - 1) / entriesPerBucket; + final int desiredNumBuckets = (desiredInitialCapacity + entriesPerBucket - 1) / entriesPerBucket; // Because we want the number of buckets to be prime final int proposedBucketCapacity = PrimeFinder.nextPrime(desiredNumBuckets); final int maxBucketCapacity = getMaxBucketCapacity(entriesPerBucket); final int newBucketCapacity = Math.min(proposedBucketCapacity, maxBucketCapacity); - Assert.leq((long) newBucketCapacity * entriesPerBucket * 2, - "(long)newBucketCapacity * entriesPerBucket * 2", - Integer.MAX_VALUE, "Integer.MAX_VALUE"); + Assert.leq((long) newBucketCapacity * entriesPerBucket * 2, "(long)newBucketCapacity * entriesPerBucket * 2", + Integer.MAX_VALUE, "Integer.MAX_VALUE"); final int entryCapacity = newBucketCapacity * entriesPerBucket; final int longCapacity = entryCapacity * 2; rehashThreshold = (int) (entryCapacity * loadFactor); @@ -152,15 +139,13 @@ void rehash(long[] oldKeysAndValues, boolean wantResize, int entriesPerBucket) { final int maxBucketCapacity = getMaxBucketCapacity(entriesPerBucket); final int newBucketCapacity = Math.min(proposedBucketCapacity, maxBucketCapacity); Assert.leq((long) newBucketCapacity * entriesPerBucket * 2, - "(long)newBucketCapacity * entriesPerBucket * 2", - Integer.MAX_VALUE, "Integer.MAX_VALUE"); + "(long)newBucketCapacity * entriesPerBucket * 2", + Integer.MAX_VALUE, "Integer.MAX_VALUE"); final int newEntryCapacity = newBucketCapacity * entriesPerBucket; newNumLongs = newEntryCapacity * 2; - // If we reach the max bucket capacity, then force the rehash threshold to a high number - // like 90%. - final float loadFactorToUse = - newBucketCapacity < maxBucketCapacity ? loadFactor : NEARLY_FULL_LOAD_FACTOR; + // If we reach the max bucket capacity, then force the rehash threshold to a high number like 90%. + final float loadFactorToUse = newBucketCapacity < maxBucketCapacity ? loadFactor : NEARLY_FULL_LOAD_FACTOR; rehashThreshold = (int) (newEntryCapacity * loadFactorToUse); } else { newNumLongs = oldNumLongs; @@ -185,14 +170,13 @@ void checkSize(int sizeLimit) { // If the size reaches the max allowed value, then throw an exception. if (size >= sizeLimit) { throw new UnsupportedOperationException( - String.format( - "The Hashtable has exceeded its maximum capacity of %d elements. To get more space you can use the other hashtable implementation by setting the property %s to false", - sizeLimit, RedirectionIndex.USE_LOCK_FREE_IMPL_PROPERTY_NAME)); + String.format( + "The Hashtable has exceeded its maximum capacity of %d elements. To get more space you can use the other hashtable implementation by setting the property %s to false", + sizeLimit, RedirectionIndex.USE_LOCK_FREE_IMPL_PROPERTY_NAME)); } } - protected abstract long putImplNoTranslate(long[] kvs, long key, long value, - boolean insertOnly); + protected abstract long putImplNoTranslate(long[] kvs, long key, long value, boolean insertOnly); protected abstract void setKeysAndValues(long[] keysAndValues); @@ -213,8 +197,7 @@ final int capacityImpl(long[] keysAndValues) { final void clearImpl(long[] keysAndValues) { size = 0; nonEmptySlots = 0; - // We leave rehashThreshold alone because the array size (and therefore the hashtable - // capacity) isn't changing. + // We leave rehashThreshold alone because the array size (and therefore the hashtable capacity) isn't changing. Arrays.fill(keysAndValues, SPECIAL_KEY_FOR_EMPTY_SLOT); } @@ -226,8 +209,7 @@ final void resetToNullImpl() { @Override public final long getNoEntryKey() { - // It doesn't make sense to call this method because the caller can't observe our - // "noEntryKey" anyway. + // It doesn't make sense to call this method because the caller can't observe our "noEntryKey" anyway. throw new UnsupportedOperationException(); } @@ -239,8 +221,7 @@ public final long getNoEntryValue() { /** * @param kv Our keys and values array * @param space The array to populate (if {@code array} is not null and {@code array.length} >= - * {@link HashMapBase#size()}, otherwise an array of length {@link HashMapBase#size()} - * will be allocated. + * {@link HashMapBase#size()}, otherwise an array of length {@link HashMapBase#size()} will be allocated. * @param wantValues false to return keys; true to return values * @return The passed-in or newly-allocated array of (keys or values). */ @@ -248,10 +229,8 @@ final long[] keysOrValuesImpl(final long[] kv, final long[] space, final boolean final int sz = size; final long[] result = space != null && space.length >= sz ? space : new long[sz]; int nextIndex = 0; - // In a single-threaded case, we would not need the 'nextIndex < sz' part of the - // conjunction. But in the - // unsynchronized concurrent case, we might encounter more keys than would fit in the array. - // To avoid an index + // In a single-threaded case, we would not need the 'nextIndex < sz' part of the conjunction. But in the + // unsynchronized concurrent case, we might encounter more keys than would fit in the array. To avoid an index // range exception, we do the 'nextIndex < sz' test both here and in the loop below. for (int ii = 0; ii < kv.length && nextIndex < sz; ii += 2) { final long key = kv[ii]; @@ -262,8 +241,7 @@ final long[] keysOrValuesImpl(final long[] kv, final long[] space, final boolean if (wantValues) { resultEntry = kv[ii + 1]; } else { - resultEntry = - key == REDIRECTED_KEY_FOR_EMPTY_SLOT ? SPECIAL_KEY_FOR_EMPTY_SLOT : key; + resultEntry = key == REDIRECTED_KEY_FOR_EMPTY_SLOT ? SPECIAL_KEY_FOR_EMPTY_SLOT : key; } result[nextIndex++] = resultEntry; } @@ -307,22 +285,20 @@ public void remove() { } /* - * The strategy used in this class is to keep track of: - The current position (which could be - * any valid position as well as one before the start) - The next position (which could be any - * valid position as well as one after the end). Java iterator semantics makes this annoying. - * Because it's Java!™ We also have to make sure we un-redirect the - * REDIRECTED_KEY_FOR_EMPTY_SLOT back to 0. + * The strategy used in this class is to keep track of: - The current position (which could be any valid position as + * well as one before the start) - The next position (which could be any valid position as well as one after the + * end). Java iterator semantics makes this annoying. Because it's Java!™ We also have to make sure we un-redirect + * the REDIRECTED_KEY_FOR_EMPTY_SLOT back to 0. */ private static class Iterator implements TLongLongIterator { - // We keep a local reference to this array so we can avoid crashing if there's an - // unprotected concurrent write + // We keep a local reference to this array so we can avoid crashing if there's an unprotected concurrent write // (e.g. a rehash that reallocates the owning array). private final long[] keysAndValues; private long currentKey; private long currentValue; /** - * nextIndex points to the next occupied slot (or the first occupied slot if we have just - * been constructed), or keysAndValues.length if there is no next occupied slot. + * nextIndex points to the next occupied slot (or the first occupied slot if we have just been constructed), or + * keysAndValues.length if there is no next occupied slot. */ private int nextIndex; @@ -338,10 +314,8 @@ public boolean hasNext() { @Override public void advance() { - // nextIndex points to some valid key and value (it cannot point past the end of the - // array, because you're - // not supposed to call advance() if hasNext() is false). So set current{Key,Value} from - // the array at + // nextIndex points to some valid key and value (it cannot point past the end of the array, because you're + // not supposed to call advance() if hasNext() is false). So set current{Key,Value} from the array at // nextIndex and then advance nextIndex to the next item or to the end of the array. Assert.lt(nextIndex, "nextIndex", keysAndValues.length, "keysAndValues.length"); final long key = keysAndValues[nextIndex]; @@ -390,17 +364,15 @@ public long setValue(long val) { } } - // Run this at class load time to confirm that the values returned by getMaxBucketCapacity - // aren't too large. + // Run this at class load time to confirm that the values returned by getMaxBucketCapacity aren't too large. // (It would be nice to also confirm that they are prime, but there's no easy way to do that) static { final int longsPerEntry = 2; for (int entriesPerBucket : new int[] {1, 2, 4}) { final long mbc = getMaxBucketCapacity(entriesPerBucket); // Assert.isPrime(mbc); - Assert.leq(mbc * entriesPerBucket * longsPerEntry, - "mbc * entriesPerBucket * longsPerEntry", - Integer.MAX_VALUE, "Integer.MAX_VALUE"); + Assert.leq(mbc * entriesPerBucket * longsPerEntry, "mbc * entriesPerBucket * longsPerEntry", + Integer.MAX_VALUE, "Integer.MAX_VALUE"); } } @@ -417,13 +389,11 @@ private static int getMaxBucketCapacity(int entriesPerBucket) { case 4: return 268435399; default: - throw new UnsupportedOperationException( - "Unexpected entriesPerBucket " + entriesPerBucket); + throw new UnsupportedOperationException("Unexpected entriesPerBucket " + entriesPerBucket); } } - // We don't currently call any of these methods, so I'm not going to bother implementing them. - // This is not a value + // We don't currently call any of these methods, so I'm not going to bother implementing them. This is not a value // judgment: if we want these methods, we can implement them later. @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK1V1.java b/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK1V1.java index d346a8b783d..699332207fa 100644 --- a/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK1V1.java +++ b/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK1V1.java @@ -19,8 +19,7 @@ final long putImpl(long[] kvs, long key, long value, boolean insertOnly) { @Override protected final long putImplNoTranslate(long[] kvs, long key, long value, boolean insertOnly) { - // To minimize possible painful effects of nonsynchronized access to our array, we get the - // reference once. + // To minimize possible painful effects of nonsynchronized access to our array, we get the reference once. int location = getLocationFor(kvs, key); if (location >= 0) { // Item found, so replace it (unless 'insertOnly' is set). @@ -35,8 +34,7 @@ protected final long putImplNoTranslate(long[] kvs, long key, long value, boolea location = -location - 1; ++size; checkSize(SIZE_LIMIT1); - // The slot is either empty or removed. If we're about to consume an empty slot, then update - // our counter. + // The slot is either empty or removed. If we're about to consume an empty slot, then update our counter. if (kvs[location] == SPECIAL_KEY_FOR_EMPTY_SLOT) { ++nonEmptySlots; } @@ -45,14 +43,10 @@ protected final long putImplNoTranslate(long[] kvs, long key, long value, boolea // Did we run out of empty slots? if (nonEmptySlots >= rehashThreshold) { - // This means we're low on empty slots. We might be low on empty slots because we've - // done a lot of - // deletions of previous items (in this case 'size' could be small), or because we've - // done a lot of - // insertions (in this case 'size' would be close to 'nonEmptySlots'). In the former - // case we would rather - // rehash to the same size. In the latter case we would like to grow the hash table. The - // heuristic we use to + // This means we're low on empty slots. We might be low on empty slots because we've done a lot of + // deletions of previous items (in this case 'size' could be small), or because we've done a lot of + // insertions (in this case 'size' would be close to 'nonEmptySlots'). In the former case we would rather + // rehash to the same size. In the latter case we would like to grow the hash table. The heuristic we use to // make this decision is if size exceeds 2/3 of the nonEmptySlots. boolean wantResize = size >= nonEmptySlots * 2 / 3; rehash(kvs, wantResize, 1); @@ -66,8 +60,7 @@ final long getImpl(long[] kvs, long key) { return noEntryValue; } key = fixKey(key); - // To minimize possible painful effects of nonsynchronized access to our array, we get the - // reference once. + // To minimize possible painful effects of nonsynchronized access to our array, we get the reference once. final int location = getLocationFor(kvs, key); if (location < 0) { return noEntryValue; @@ -80,8 +73,7 @@ final long removeImpl(long[] kvs, long key) { return noEntryValue; } key = fixKey(key); - // To minimize possible painful effects of nonsynchronized access to our array, we get the - // reference once. + // To minimize possible painful effects of nonsynchronized access to our array, we get the reference once. final int location = getLocationFor(kvs, key); if (location < 0) { return noEntryValue; @@ -113,8 +105,7 @@ private static int getLocationFor(long[] kvs, long target) { return -probe - 1; } - // These slots might also have been deleted slots. If so, we need to keep searching (until - // key found or the + // These slots might also have been deleted slots. If so, we need to keep searching (until key found or the // first empty slot), but we remember the first deleted slot. int priorDeletedSlot; if (cKey0 == SPECIAL_KEY_FOR_DELETED_SLOT) { @@ -132,10 +123,8 @@ private static int getLocationFor(long[] kvs, long target) { throw new IllegalStateException("Wrapped around? Impossible."); } - // Same logic as the above. Looking for the specific key and aborting if the empty slot - // is found. - // (But, if the empty slot is found, and if there was an earlier deleted slot, we need - // to return the + // Same logic as the above. Looking for the specific key and aborting if the empty slot is found. + // (But, if the empty slot is found, and if there was an earlier deleted slot, we need to return the // earlier deleted slot) cKey0 = kvs[probe]; if (cKey0 == target) { diff --git a/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK2V2.java b/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK2V2.java index 380a9d340c4..d118ef3f494 100644 --- a/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK2V2.java +++ b/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK2V2.java @@ -19,8 +19,7 @@ final long putImpl(long[] kvs, long key, long value, boolean insertOnly) { @Override protected final long putImplNoTranslate(long[] kvs, long key, long value, boolean insertOnly) { - // To minimize possible painful effects of nonsynchronized access to our array, we get the - // reference once. + // To minimize possible painful effects of nonsynchronized access to our array, we get the reference once. int location = getLocationFor(kvs, key); if (location >= 0) { // Item found, so replace it (unless 'insertOnly' is set). @@ -35,8 +34,7 @@ protected final long putImplNoTranslate(long[] kvs, long key, long value, boolea location = -location - 1; ++size; checkSize(SIZE_LIMIT2); - // The slot is either empty or removed. If we're about to consume an empty slot, then update - // our counter. + // The slot is either empty or removed. If we're about to consume an empty slot, then update our counter. if (kvs[location] == SPECIAL_KEY_FOR_EMPTY_SLOT) { ++nonEmptySlots; } @@ -45,14 +43,10 @@ protected final long putImplNoTranslate(long[] kvs, long key, long value, boolea // Did we run out of empty slots? if (nonEmptySlots >= rehashThreshold) { - // This means we're low on empty slots. We might be low on empty slots because we've - // done a lot of - // deletions of previous items (in this case 'size' could be small), or because we've - // done a lot of - // insertions (in this case 'size' would be close to 'nonEmptySlots'). In the former - // case we would rather - // rehash to the same size. In the latter case we would like to grow the hash table. The - // heuristic we use to + // This means we're low on empty slots. We might be low on empty slots because we've done a lot of + // deletions of previous items (in this case 'size' could be small), or because we've done a lot of + // insertions (in this case 'size' would be close to 'nonEmptySlots'). In the former case we would rather + // rehash to the same size. In the latter case we would like to grow the hash table. The heuristic we use to // make this decision is if size exceeds 2/3 of the nonEmptySlots. boolean wantResize = size >= nonEmptySlots * 2 / 3; rehash(kvs, wantResize, 2); @@ -66,8 +60,7 @@ final long getImpl(long[] kvs, long key) { return noEntryValue; } key = fixKey(key); - // To minimize possible painful effects of nonsynchronized access to our array, we get the - // reference once. + // To minimize possible painful effects of nonsynchronized access to our array, we get the reference once. final int location = getLocationFor(kvs, key); if (location < 0) { return noEntryValue; @@ -80,8 +73,7 @@ final long removeImpl(long[] kvs, long key) { return noEntryValue; } key = fixKey(key); - // To minimize possible painful effects of nonsynchronized access to our array, we get the - // reference once. + // To minimize possible painful effects of nonsynchronized access to our array, we get the reference once. final int location = getLocationFor(kvs, key); if (location < 0) { return noEntryValue; @@ -120,8 +112,7 @@ private static int getLocationFor(long[] kvs, long target) { return -(probe + 2) - 1; } - // These slots might also have been deleted slots. If so, we need to keep searching (until - // key found or the + // These slots might also have been deleted slots. If so, we need to keep searching (until key found or the // first empty slot), but we remember the first deleted slot. int priorDeletedSlot; if (cKey0 == SPECIAL_KEY_FOR_DELETED_SLOT) { @@ -141,10 +132,8 @@ private static int getLocationFor(long[] kvs, long target) { throw new IllegalStateException("Wrapped around? Impossible."); } - // Same logic as the above. Looking for the specific key and aborting if the empty slot - // is found. - // (But, if the empty slot is found, and if there was an earlier deleted slot, we need - // to return the + // Same logic as the above. Looking for the specific key and aborting if the empty slot is found. + // (But, if the empty slot is found, and if there was an earlier deleted slot, we need to return the // earlier deleted slot) cKey0 = kvs[probe]; if (cKey0 == target) { diff --git a/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK4V4.java b/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK4V4.java index 9cf47351022..f0091300fdd 100644 --- a/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK4V4.java +++ b/DB/src/main/java/io/deephaven/db/v2/hashing/HashMapK4V4.java @@ -18,8 +18,7 @@ final long putImpl(long[] kvs, long key, long value, boolean insertOnly) { } protected final long putImplNoTranslate(long[] kvs, long key, long value, boolean insertOnly) { - // To minimize possible painful effects of nonsynchronized access to our array, we get the - // reference once. + // To minimize possible painful effects of nonsynchronized access to our array, we get the reference once. int location = getLocationFor(kvs, key); if (location >= 0) { // Item found, so replace it (unless 'insertOnly' is set). @@ -34,8 +33,7 @@ protected final long putImplNoTranslate(long[] kvs, long key, long value, boolea location = -location - 1; ++size; checkSize(SIZE_LIMIT4); - // The slot is either empty or removed. If we're about to consume an empty slot, then update - // our counter. + // The slot is either empty or removed. If we're about to consume an empty slot, then update our counter. if (kvs[location] == SPECIAL_KEY_FOR_EMPTY_SLOT) { ++nonEmptySlots; } @@ -44,14 +42,10 @@ protected final long putImplNoTranslate(long[] kvs, long key, long value, boolea // Did we run out of empty slots? if (nonEmptySlots >= rehashThreshold) { - // This means we're low on empty slots. We might be low on empty slots because we've - // done a lot of - // deletions of previous items (in this case 'size' could be small), or because we've - // done a lot of - // insertions (in this case 'size' would be close to 'nonEmptySlots'). In the former - // case we would rather - // rehash to the same size. In the latter case we would like to grow the hash table. The - // heuristic we use to + // This means we're low on empty slots. We might be low on empty slots because we've done a lot of + // deletions of previous items (in this case 'size' could be small), or because we've done a lot of + // insertions (in this case 'size' would be close to 'nonEmptySlots'). In the former case we would rather + // rehash to the same size. In the latter case we would like to grow the hash table. The heuristic we use to // make this decision is if size exceeds 2/3 of the nonEmptySlots. boolean wantResize = size >= nonEmptySlots * 2 / 3; rehash(kvs, wantResize, 4); @@ -65,8 +59,7 @@ final long getImpl(long[] kvs, long key) { return noEntryValue; } key = fixKey(key); - // To minimize possible painful effects of nonsynchronized access to our array, we get the - // reference once. + // To minimize possible painful effects of nonsynchronized access to our array, we get the reference once. final int location = getLocationFor(kvs, key); if (location < 0) { return noEntryValue; @@ -79,8 +72,7 @@ final long removeImpl(long[] kvs, long key) { return noEntryValue; } key = fixKey(key); - // To minimize possible painful effects of nonsynchronized access to our array, we get the - // reference once. + // To minimize possible painful effects of nonsynchronized access to our array, we get the reference once. final int location = getLocationFor(kvs, key); if (location < 0) { return noEntryValue; @@ -133,8 +125,7 @@ private static int getLocationFor(long[] kvs, long target) { return -(probe + 6) - 1; } - // These slots might also have been deleted slots. If so, we need to keep searching (until - // key found or the + // These slots might also have been deleted slots. If so, we need to keep searching (until key found or the // first empty slot), but we remember the first deleted slot. int priorDeletedSlot; if (cKey0 == SPECIAL_KEY_FOR_DELETED_SLOT) { @@ -158,10 +149,8 @@ private static int getLocationFor(long[] kvs, long target) { throw new IllegalStateException("Wrapped around? Impossible."); } - // Same logic as the above. Looking for the specific key and aborting if the empty slot - // is found. - // (But, if the empty slot is found, and if there was an earlier deleted slot, we need - // to return the + // Same logic as the above. Looking for the specific key and aborting if the empty slot is found. + // (But, if the empty slot is found, and if there was an earlier deleted slot, we need to return the // earlier deleted slot) cKey0 = kvs[probe]; if (cKey0 == target) { diff --git a/DB/src/main/java/io/deephaven/db/v2/hashing/ReplicateHashing.java b/DB/src/main/java/io/deephaven/db/v2/hashing/ReplicateHashing.java index 785c6424605..f5a12eed852 100644 --- a/DB/src/main/java/io/deephaven/db/v2/hashing/ReplicateHashing.java +++ b/DB/src/main/java/io/deephaven/db/v2/hashing/ReplicateHashing.java @@ -19,58 +19,50 @@ public class ReplicateHashing { public static void main(String[] args) throws IOException { ReplicatePrimitiveCode.charToAll(CharChunkHasher.class, ReplicatePrimitiveCode.MAIN_SRC); - final String objectHasher = ReplicatePrimitiveCode.charToObject(CharChunkHasher.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String objectHasher = + ReplicatePrimitiveCode.charToObject(CharChunkHasher.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectChunkHasher(objectHasher); - ReplicatePrimitiveCode.charToIntegers(CharToIntegerCast.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToIntegers(CharToLongCast.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToIntegers(CharToIntegerCastWithOffset.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToIntegers(CharToLongCastWithOffset.class, - ReplicatePrimitiveCode.MAIN_SRC); - - final List paths = ReplicatePrimitiveCode.charToAll(CharChunkEquals.class, - ReplicatePrimitiveCode.MAIN_SRC); - final String floatPath = paths.stream().filter(p -> p.contains("Float")).findFirst() - .orElseThrow(FileNotFoundException::new); - final String doublePath = paths.stream().filter(p -> p.contains("Double")).findFirst() - .orElseThrow(FileNotFoundException::new); + ReplicatePrimitiveCode.charToIntegers(CharToIntegerCast.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToIntegers(CharToLongCast.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToIntegers(CharToIntegerCastWithOffset.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToIntegers(CharToLongCastWithOffset.class, ReplicatePrimitiveCode.MAIN_SRC); + + final List paths = + ReplicatePrimitiveCode.charToAll(CharChunkEquals.class, ReplicatePrimitiveCode.MAIN_SRC); + final String floatPath = + paths.stream().filter(p -> p.contains("Float")).findFirst().orElseThrow(FileNotFoundException::new); + final String doublePath = + paths.stream().filter(p -> p.contains("Double")).findFirst().orElseThrow(FileNotFoundException::new); fixupFloatChunkEquals(floatPath); fixupDoubleChunkEquals(doublePath); - final String objectIdentityEquals = ReplicatePrimitiveCode - .charToObject(CharChunkEquals.class, ReplicatePrimitiveCode.MAIN_SRC); + final String objectIdentityEquals = + ReplicatePrimitiveCode.charToObject(CharChunkEquals.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectChunkIdentityEquals(objectIdentityEquals); - final String objectEquals = ReplicatePrimitiveCode.charToObject(CharChunkEquals.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String objectEquals = + ReplicatePrimitiveCode.charToObject(CharChunkEquals.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectChunkEquals(objectEquals); - final List compactKernels = ReplicatePrimitiveCode - .charToAll(CharCompactKernel.class, ReplicatePrimitiveCode.MAIN_SRC); - final String objectCompact = ReplicatePrimitiveCode.charToObject(CharCompactKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); + final List compactKernels = + ReplicatePrimitiveCode.charToAll(CharCompactKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final String objectCompact = + ReplicatePrimitiveCode.charToObject(CharCompactKernel.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectCompact(objectCompact); // noinspection OptionalGetWithoutIsPresent - fixupBooleanCompact( - compactKernels.stream().filter(x -> x.contains("Boolean")).findFirst().get()); - fixupFloatCompact( - compactKernels.stream().filter(x -> x.contains("Double")).findFirst().get(), "Double"); - fixupFloatCompact( - compactKernels.stream().filter(x -> x.contains("Float")).findFirst().get(), "Float"); + fixupBooleanCompact(compactKernels.stream().filter(x -> x.contains("Boolean")).findFirst().get()); + fixupFloatCompact(compactKernels.stream().filter(x -> x.contains("Double")).findFirst().get(), "Double"); + fixupFloatCompact(compactKernels.stream().filter(x -> x.contains("Float")).findFirst().get(), "Float"); } private static void fixupObjectChunkHasher(String objectPath) throws IOException { final File objectFile = new File(objectPath); List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = addImport(lines, Objects.class); - FileUtils.writeLines(objectFile, - globalReplacements(fixupChunkAttributes(lines), "Object.hashCode", "Objects.hashCode", - "TypeUtils.unbox\\(\\(Object\\)value\\)", "value")); + FileUtils.writeLines(objectFile, globalReplacements(fixupChunkAttributes(lines), "Object.hashCode", + "Objects.hashCode", "TypeUtils.unbox\\(\\(Object\\)value\\)", "value")); } private static void fixupObjectChunkEquals(String objectPath) throws IOException { @@ -78,7 +70,7 @@ private static void fixupObjectChunkEquals(String objectPath) throws IOException List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = addImport(lines, Objects.class); FileUtils.writeLines(objectFile, simpleFixup(fixupChunkAttributes(lines), - "eq", "lhs == rhs", "Objects.equals(lhs, rhs)")); + "eq", "lhs == rhs", "Objects.equals(lhs, rhs)")); } private static void fixupBooleanCompact(String booleanPath) throws IOException { @@ -86,29 +78,28 @@ private static void fixupBooleanCompact(String booleanPath) throws IOException { List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = replaceRegion(lines, "compactAndCount", Arrays.asList( - " int trueValues = 0;" + - " int falseValues = 0;" + - " final int end = start + length;" + - " for (int rpos = start; rpos < end; ++rpos) {" + - " final boolean nextValue = valueChunk.get(rpos);" + - " if (nextValue) {" + - " trueValues++;" + - " }" + - " else {" + - " falseValues++;" + - " }" + + " int trueValues = 0;" + + " int falseValues = 0;" + + " final int end = start + length;" + + " for (int rpos = start; rpos < end; ++rpos) {" + + " final boolean nextValue = valueChunk.get(rpos);" + + " if (nextValue) {" + + " trueValues++;" + + " }" + + " else {" + + " falseValues++;" + + " }" + + " }", + " if (trueValues > 0) {", + " valueChunk.set(++wpos, true);", + " counts.set(wpos, trueValues);", " }", - " if (trueValues > 0) {", - " valueChunk.set(++wpos, true);", - " counts.set(wpos, trueValues);", - " }", - " if (falseValues > 0) {", - " valueChunk.set(++wpos, false);", - " counts.set(wpos, falseValues);", - " }")); - - lines = replaceRegion(lines, "shouldIgnore", - Collections.singletonList(" return false;")); + " if (falseValues > 0) {", + " valueChunk.set(++wpos, false);", + " counts.set(wpos, falseValues);", + " }")); + + lines = replaceRegion(lines, "shouldIgnore", Collections.singletonList(" return false;")); lines = removeImport(lines, "\\s*import io.deephaven.db.util.DhBooleanComparisons;"); lines = removeImport(lines, "\\s*import static.*QueryConstants.*;"); @@ -121,50 +112,46 @@ private static void fixupObjectCompact(String objectPath) throws IOException { List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = fixupChunkAttributes(lines, "T"); lines = globalReplacements(lines, "public static void", "public static void", - "private static void", "private static void", - "public static int", "public static int", - "final Object nextValue", "final T nextValue"); + "private static void", "private static void", + "public static int", "public static int", + "final Object nextValue", "final T nextValue"); lines = globalReplacements(lines, "NULL_OBJECT", "null"); lines = removeImport(lines, "\\s*import static.*QueryConstants.*;"); FileUtils.writeLines(objectFile, lines); } - private static void fixupFloatCompact(String doublePath, String typeOfFloat) - throws IOException { + private static void fixupFloatCompact(String doublePath, String typeOfFloat) throws IOException { final File objectFile = new File(doublePath); List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); - lines = replaceRegion(lines, "shouldIgnore", - Collections.singletonList(" return value == NULL_" + typeOfFloat.toUpperCase() - + " || " + typeOfFloat + ".isNaN(value);")); + lines = replaceRegion(lines, "shouldIgnore", Collections.singletonList( + " return value == NULL_" + typeOfFloat.toUpperCase() + " || " + typeOfFloat + ".isNaN(value);")); FileUtils.writeLines(objectFile, lines); } private static void fixupObjectChunkIdentityEquals(String objectPath) throws IOException { final File objectChunkEqualsFileName = new File(objectPath); final File objectChunkIdentifyEqualsFileName = - new File(objectChunkEqualsFileName.getParent(), "ObjectChunkIdentityEquals.java"); + new File(objectChunkEqualsFileName.getParent(), "ObjectChunkIdentityEquals.java"); Assert.eqTrue(objectChunkEqualsFileName.renameTo(objectChunkIdentifyEqualsFileName), - "objectChunkEqualsFileName.renameTo(objectChunkIdentifyEqualsFileName)"); + "objectChunkEqualsFileName.renameTo(objectChunkIdentifyEqualsFileName)"); - final List lines = - FileUtils.readLines(objectChunkIdentifyEqualsFileName, Charset.defaultCharset()); - FileUtils.writeLines(objectChunkIdentifyEqualsFileName, - simpleFixup(fixupChunkAttributes(lines), + final List lines = FileUtils.readLines(objectChunkIdentifyEqualsFileName, Charset.defaultCharset()); + FileUtils.writeLines(objectChunkIdentifyEqualsFileName, simpleFixup(fixupChunkAttributes(lines), "name", "ObjectChunkEquals", "ObjectChunkIdentityEquals")); } private static void fixupDoubleChunkEquals(String doublePath) throws IOException { final File objectFile = new File(doublePath); final List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); - FileUtils.writeLines(objectFile, simpleFixup(lines, "eq", "lhs == rhs", - "((Double.isNaN(lhs) && Double.isNaN(rhs)) || lhs == rhs)")); + FileUtils.writeLines(objectFile, + simpleFixup(lines, "eq", "lhs == rhs", "((Double.isNaN(lhs) && Double.isNaN(rhs)) || lhs == rhs)")); } private static void fixupFloatChunkEquals(String floatPath) throws IOException { final File objectFile = new File(floatPath); final List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); - FileUtils.writeLines(objectFile, simpleFixup(lines, "eq", "lhs == rhs", - "((Float.isNaN(lhs) && Float.isNaN(rhs)) || lhs == rhs)")); + FileUtils.writeLines(objectFile, + simpleFixup(lines, "eq", "lhs == rhs", "((Float.isNaN(lhs) && Float.isNaN(rhs)) || lhs == rhs)")); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/iterators/ColumnIterator.java b/DB/src/main/java/io/deephaven/db/v2/iterators/ColumnIterator.java index bc9fcad3db1..22d5db4a4c4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/iterators/ColumnIterator.java +++ b/DB/src/main/java/io/deephaven/db/v2/iterators/ColumnIterator.java @@ -26,8 +26,7 @@ public class ColumnIterator implements Iterator { * @param index index for the column source * @param columnSource column source */ - public ColumnIterator(@NotNull final Index index, - @NotNull final ColumnSource columnSource) { + public ColumnIterator(@NotNull final Index index, @NotNull final ColumnSource columnSource) { this.columnSource = columnSource; indexIterator = index.iterator(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/join/BucketedChunkedAjMergedListener.java b/DB/src/main/java/io/deephaven/db/v2/join/BucketedChunkedAjMergedListener.java index dad1d706e25..e8b8b74fa80 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/BucketedChunkedAjMergedListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/BucketedChunkedAjMergedListener.java @@ -65,30 +65,29 @@ public class BucketedChunkedAjMergedListener extends MergedListener { private final ModifiedColumnSet resultModifiedColumnSet; private final ObjectArraySource sequentialBuilders = - new ObjectArraySource<>(Index.SequentialBuilder.class); + new ObjectArraySource<>(Index.SequentialBuilder.class); private final LongArraySource slots = new LongArraySource(); public BucketedChunkedAjMergedListener(JoinListenerRecorder leftRecorder, - JoinListenerRecorder rightRecorder, - String listenerDescription, - QueryTable result, - QueryTable leftTable, - QueryTable rightTable, - MatchPair[] columnsToMatch, - MatchPair stampPair, - MatchPair[] columnsToAdd, - ColumnSource[] leftSources, - ColumnSource[] rightSources, - ColumnSource leftStampSource, - ColumnSource rightStampSource, - AsOfJoinHelper.SsaFactory leftSsaFactory, AsOfJoinHelper.SsaFactory rightSsaFactory, - SortingOrder order, - boolean disallowExactMatch, - SsaSsaStamp ssaSsaStamp, - JoinControl control, RightIncrementalChunkedAsOfJoinStateManager asOfJoinStateManager, - RedirectionIndex redirectionIndex) { - super(Arrays.asList(leftRecorder, rightRecorder), Collections.emptyList(), - listenerDescription, result); + JoinListenerRecorder rightRecorder, + String listenerDescription, + QueryTable result, + QueryTable leftTable, + QueryTable rightTable, + MatchPair[] columnsToMatch, + MatchPair stampPair, + MatchPair[] columnsToAdd, + ColumnSource[] leftSources, + ColumnSource[] rightSources, + ColumnSource leftStampSource, + ColumnSource rightStampSource, + AsOfJoinHelper.SsaFactory leftSsaFactory, AsOfJoinHelper.SsaFactory rightSsaFactory, + SortingOrder order, + boolean disallowExactMatch, + SsaSsaStamp ssaSsaStamp, + JoinControl control, RightIncrementalChunkedAsOfJoinStateManager asOfJoinStateManager, + RedirectionIndex redirectionIndex) { + super(Arrays.asList(leftRecorder, rightRecorder), Collections.emptyList(), listenerDescription, result); this.leftRecorder = leftRecorder; this.rightRecorder = rightRecorder; this.leftTable = leftTable; @@ -116,18 +115,16 @@ public BucketedChunkedAjMergedListener(JoinListenerRecorder leftRecorder, leftStampColumn = leftTable.newModifiedColumnSet(stampPair.left()); rightStampColumn = rightTable.newModifiedColumnSet(stampPair.right()); leftKeyColumns = leftTable.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToMatch)); - rightKeyColumns = - rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); + rightKeyColumns = rightTable.newModifiedColumnSet(MatchPair.getRightColumns(columnsToMatch)); allRightColumns = result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); - leftTransformer = leftTable.newModifiedColumnSetTransformer(result, - leftTable.getDefinition().getColumnNamesArray()); + leftTransformer = + leftTable.newModifiedColumnSetTransformer(result, leftTable.getDefinition().getColumnNamesArray()); rightTransformer = rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); leftChunkSize = control.leftChunkSize(); rightChunkSize = control.rightChunkSize(); - resultModifiedColumnSet = - result.newModifiedColumnSet(result.getDefinition().getColumnNamesArray()); + resultModifiedColumnSet = result.newModifiedColumnSet(result.getDefinition().getColumnNamesArray()); } @Override @@ -140,32 +137,30 @@ public void process() { final boolean rightTicked = rightRecorder.recordedVariablesAreValid(); final boolean leftStampModified = leftTicked && leftRecorder.getModified().nonempty() - && leftRecorder.getModifiedColumnSet().containsAny(leftStampColumn); + && leftRecorder.getModifiedColumnSet().containsAny(leftStampColumn); final boolean leftKeysModified = leftTicked && leftRecorder.getModified().nonempty() - && leftRecorder.getModifiedColumnSet().containsAny(leftKeyColumns); - final boolean leftAdditionsOrRemovals = leftKeysModified || leftStampModified || (leftTicked - && (leftRecorder.getAdded().nonempty() || leftRecorder.getRemoved().nonempty())); + && leftRecorder.getModifiedColumnSet().containsAny(leftKeyColumns); + final boolean leftAdditionsOrRemovals = leftKeysModified || leftStampModified + || (leftTicked && (leftRecorder.getAdded().nonempty() || leftRecorder.getRemoved().nonempty())); final ColumnSource.FillContext leftFillContext = - leftAdditionsOrRemovals ? leftStampSource.makeFillContext(leftChunkSize) : null; + leftAdditionsOrRemovals ? leftStampSource.makeFillContext(leftChunkSize) : null; final WritableChunk leftStampValues = - leftAdditionsOrRemovals ? stampChunkType.makeWritableChunk(leftChunkSize) : null; + leftAdditionsOrRemovals ? stampChunkType.makeWritableChunk(leftChunkSize) : null; final WritableLongChunk leftStampKeys = - leftAdditionsOrRemovals ? WritableLongChunk.makeWritableChunk(leftChunkSize) : null; - final LongSortKernel sortKernel = LongSortKernel - .makeContext(stampChunkType, order, Math.max(leftChunkSize, rightChunkSize), true); + leftAdditionsOrRemovals ? WritableLongChunk.makeWritableChunk(leftChunkSize) : null; + final LongSortKernel sortKernel = + LongSortKernel.makeContext(stampChunkType, order, Math.max(leftChunkSize, rightChunkSize), true); final Index.RandomBuilder modifiedBuilder = Index.FACTORY.getRandomBuilder(); - // first we remove anything that is not of interest from the left hand side, because we - // don't want to + // first we remove anything that is not of interest from the left hand side, because we don't want to // process the relevant right hand side changes if (leftTicked) { final Index leftRestampRemovals; if (leftStampModified || leftKeysModified) { - leftRestampRemovals = - leftRecorder.getRemoved().union(leftRecorder.getModifiedPreShift()); + leftRestampRemovals = leftRecorder.getRemoved().union(leftRecorder.getModifiedPreShift()); } else { leftRestampRemovals = leftRecorder.getRemoved(); } @@ -176,10 +171,9 @@ public void process() { if (leftRestampRemovals.nonempty()) { leftRestampRemovals.forAllLongs(redirectionIndex::removeVoid); - // We first do a probe pass, adding all of the removals to a builder in the as of - // join state manager - final int removedSlotCount = asOfJoinStateManager - .markForRemoval(leftRestampRemovals, leftKeySources, slots, sequentialBuilders); + // We first do a probe pass, adding all of the removals to a builder in the as of join state manager + final int removedSlotCount = asOfJoinStateManager.markForRemoval(leftRestampRemovals, leftKeySources, + slots, sequentialBuilders); final MutableObject leftIndexOutput = new MutableObject<>(); @@ -190,25 +184,21 @@ public void process() { leftRemoved.forAllLongs(redirectionIndex::removeVoid); - final SegmentedSortedArray leftSsa = - asOfJoinStateManager.getLeftSsaOrIndex(slot, leftIndexOutput); + final SegmentedSortedArray leftSsa = asOfJoinStateManager.getLeftSsaOrIndex(slot, leftIndexOutput); if (leftSsa == null) { leftIndexOutput.getValue().remove(leftRemoved); leftIndexOutput.setValue(null); continue; } - try (final OrderedKeys.Iterator leftOkIt = - leftRemoved.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator leftOkIt = leftRemoved.getOrderedKeysIterator()) { while (leftOkIt.hasMore()) { assert leftFillContext != null; assert leftStampValues != null; - final OrderedKeys chunkOk = - leftOkIt.getNextOrderedKeysWithLength(leftChunkSize); + final OrderedKeys chunkOk = leftOkIt.getNextOrderedKeysWithLength(leftChunkSize); - leftStampSource.fillPrevChunk(leftFillContext, leftStampValues, - chunkOk); + leftStampSource.fillPrevChunk(leftFillContext, leftStampValues, chunkOk); chunkOk.fillKeyIndicesChunk(leftStampKeys); sortKernel.sort(leftStampKeys, leftStampValues); @@ -228,40 +218,36 @@ public void process() { if (leftShifted.nonempty()) { try (final Index fullPrevIndex = leftTable.getIndex().getPrevIndex(); - final Index previousToShift = fullPrevIndex.minus(leftRestampRemovals); - final Index relevantShift = getRelevantShifts(leftShifted, previousToShift)) { - // now we apply the left shifts, so that anything in our SSA is a relevant thing - // to stamp + final Index previousToShift = fullPrevIndex.minus(leftRestampRemovals); + final Index relevantShift = getRelevantShifts(leftShifted, previousToShift)) { + // now we apply the left shifts, so that anything in our SSA is a relevant thing to stamp redirectionIndex.applyShift(previousToShift, leftShifted); if (relevantShift.nonempty()) { - try ( - final SizedSafeCloseable leftShiftFillContext = + try (final SizedSafeCloseable leftShiftFillContext = new SizedSafeCloseable<>(leftStampSource::makeFillContext); - final SizedSafeCloseable> shiftSortContext = - new SizedSafeCloseable<>(size -> LongSortKernel - .makeContext(stampChunkType, order, size, true)); - final SizedLongChunk stampKeys = new SizedLongChunk<>(); - final SizedChunk stampValues = - new SizedChunk<>(stampChunkType)) { + final SizedSafeCloseable> shiftSortContext = + new SizedSafeCloseable<>( + size -> LongSortKernel.makeContext(stampChunkType, order, size, true)); + final SizedLongChunk stampKeys = new SizedLongChunk<>(); + final SizedChunk stampValues = new SizedChunk<>(stampChunkType)) { sequentialBuilders.ensureCapacity(relevantShift.size()); slots.ensureCapacity(relevantShift.size()); - final int shiftedSlotCount = asOfJoinStateManager.gatherShiftIndex( - relevantShift, leftKeySources, slots, sequentialBuilders); + final int shiftedSlotCount = asOfJoinStateManager.gatherShiftIndex(relevantShift, + leftKeySources, slots, sequentialBuilders); for (int slotIndex = 0; slotIndex < shiftedSlotCount; ++slotIndex) { final Index shiftedIndex = indexFromBuilder(slotIndex); final long slot = slots.getLong(slotIndex); final byte state = asOfJoinStateManager.getState(slot); - final IndexShiftData shiftDataForSlot = - leftShifted.intersect(shiftedIndex); + final IndexShiftData shiftDataForSlot = leftShifted.intersect(shiftedIndex); if ((state & ENTRY_RIGHT_MASK) == ENTRY_RIGHT_IS_EMPTY) { - // if the left is empty, we should be an index entry rather than - // an SSA, and we can not be empty, because we are responsive + // if the left is empty, we should be an index entry rather than an SSA, and we can + // not be empty, because we are responsive final Index leftIndex = asOfJoinStateManager.getLeftIndex(slot); shiftDataForSlot.apply(leftIndex); shiftedIndex.close(); @@ -269,19 +255,17 @@ public void process() { continue; } - final SegmentedSortedArray leftSsa = - asOfJoinStateManager.getLeftSsa(slot); + final SegmentedSortedArray leftSsa = asOfJoinStateManager.getLeftSsa(slot); - final IndexShiftData.Iterator slotSit = - shiftDataForSlot.applyIterator(); + final IndexShiftData.Iterator slotSit = shiftDataForSlot.applyIterator(); while (slotSit.hasNext()) { slotSit.next(); - final Index indexToShift = shiftedIndex - .subindexByKey(slotSit.beginRange(), slotSit.endRange()); - ChunkedAjUtilities.applyOneShift(leftSsa, leftChunkSize, - leftStampSource, leftShiftFillContext, shiftSortContext, - stampKeys, stampValues, slotSit, indexToShift); + final Index indexToShift = + shiftedIndex.subindexByKey(slotSit.beginRange(), slotSit.endRange()); + ChunkedAjUtilities.applyOneShift(leftSsa, leftChunkSize, leftStampSource, + leftShiftFillContext, shiftSortContext, stampKeys, stampValues, slotSit, + indexToShift); indexToShift.close(); } } @@ -298,47 +282,39 @@ public void process() { if (rightTicked) { // next we remove and add things from the right hand side - final boolean rightKeysModified = - rightRecorder.getModifiedColumnSet().containsAny(rightKeyColumns); - final boolean rightStampModified = - rightRecorder.getModifiedColumnSet().containsAny(rightStampColumn); + final boolean rightKeysModified = rightRecorder.getModifiedColumnSet().containsAny(rightKeyColumns); + final boolean rightStampModified = rightRecorder.getModifiedColumnSet().containsAny(rightStampColumn); final Index rightRestampRemovals; final Index rightRestampAdditions; if (rightKeysModified || rightStampModified) { rightRestampAdditions = rightRecorder.getAdded().union(rightRecorder.getModified()); - rightRestampRemovals = - rightRecorder.getRemoved().union(rightRecorder.getModifiedPreShift()); + rightRestampRemovals = rightRecorder.getRemoved().union(rightRecorder.getModifiedPreShift()); } else { rightRestampAdditions = rightRecorder.getAdded(); rightRestampRemovals = rightRecorder.getRemoved(); } - // We first do a probe pass, adding all of the removals to a builder in the as of join - // state manager - final long requiredCapacity = - Math.max(rightRestampRemovals.size(), rightRestampAdditions.size()); + // We first do a probe pass, adding all of the removals to a builder in the as of join state manager + final long requiredCapacity = Math.max(rightRestampRemovals.size(), rightRestampAdditions.size()); sequentialBuilders.ensureCapacity(requiredCapacity); slots.ensureCapacity(requiredCapacity); - final int removedSlotCount = asOfJoinStateManager.markForRemoval(rightRestampRemovals, - rightKeySources, slots, sequentialBuilders); + final int removedSlotCount = asOfJoinStateManager.markForRemoval(rightRestampRemovals, rightKeySources, + slots, sequentialBuilders); final MutableObject indexOutput = new MutableObject<>(); - try ( - final WritableLongChunk priorRedirections = + try (final WritableLongChunk priorRedirections = WritableLongChunk.makeWritableChunk(rightChunkSize); - final ColumnSource.FillContext fillContext = - rightStampSource.makeFillContext(rightChunkSize); - final WritableChunk rightStampValues = - stampChunkType.makeWritableChunk(rightChunkSize); - final WritableLongChunk rightStampKeys = - WritableLongChunk.makeWritableChunk(rightChunkSize)) { + final ColumnSource.FillContext fillContext = rightStampSource.makeFillContext(rightChunkSize); + final WritableChunk rightStampValues = stampChunkType.makeWritableChunk(rightChunkSize); + final WritableLongChunk rightStampKeys = + WritableLongChunk.makeWritableChunk(rightChunkSize)) { for (int slotIndex = 0; slotIndex < removedSlotCount; ++slotIndex) { final long slot = slots.getLong(slotIndex); try (final Index rightRemoved = indexFromBuilder(slotIndex)) { final SegmentedSortedArray rightSsa = - asOfJoinStateManager.getRightSsaOrIndex(slot, indexOutput); + asOfJoinStateManager.getRightSsaOrIndex(slot, indexOutput); if (rightSsa == null) { indexOutput.getValue().remove(rightRemoved); continue; @@ -350,24 +326,19 @@ public void process() { } final SegmentedSortedArray leftSsa = asOfJoinStateManager.getLeftSsa(slot); - try (final OrderedKeys.Iterator removeIt = - rightRemoved.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator removeIt = rightRemoved.getOrderedKeysIterator()) { while (removeIt.hasMore()) { - final OrderedKeys chunkOk = - removeIt.getNextOrderedKeysWithLength(rightChunkSize); + final OrderedKeys chunkOk = removeIt.getNextOrderedKeysWithLength(rightChunkSize); - rightStampSource.fillPrevChunk(fillContext, rightStampValues, - chunkOk); + rightStampSource.fillPrevChunk(fillContext, rightStampValues, chunkOk); chunkOk.fillKeyIndicesChunk(rightStampKeys); sortKernel.sort(rightStampKeys, rightStampValues); priorRedirections.setSize(rightChunkSize); - rightSsa.removeAndGetPrior(rightStampValues, rightStampKeys, - priorRedirections); + rightSsa.removeAndGetPrior(rightStampValues, rightStampKeys, priorRedirections); - ssaSsaStamp.processRemovals(leftSsa, rightStampValues, - rightStampKeys, priorRedirections, redirectionIndex, - modifiedBuilder, disallowExactMatch); + ssaSsaStamp.processRemovals(leftSsa, rightStampValues, rightStampKeys, + priorRedirections, redirectionIndex, modifiedBuilder, disallowExactMatch); } } } @@ -378,26 +349,23 @@ public void process() { if (rightShifted.nonempty()) { try (final Index fullPrevIndex = rightTable.getIndex().getPrevIndex(); - final Index previousToShift = fullPrevIndex.minus(rightRestampRemovals); - final Index relevantShift = getRelevantShifts(rightShifted, previousToShift)) { + final Index previousToShift = fullPrevIndex.minus(rightRestampRemovals); + final Index relevantShift = getRelevantShifts(rightShifted, previousToShift)) { if (relevantShift.nonempty()) { - try ( - final SizedSafeCloseable rightShiftFillContext = + try (final SizedSafeCloseable rightShiftFillContext = new SizedSafeCloseable<>(rightStampSource::makeFillContext); - final SizedSafeCloseable> shiftSortKernel = - new SizedSafeCloseable<>(sz -> LongSortKernel - .makeContext(stampChunkType, order, sz, true)); - final SizedLongChunk rightStampKeys = - new SizedLongChunk<>(); - final SizedChunk rightStampValues = - new SizedChunk<>(stampChunkType)) { + final SizedSafeCloseable> shiftSortKernel = + new SizedSafeCloseable<>( + sz -> LongSortKernel.makeContext(stampChunkType, order, sz, true)); + final SizedLongChunk rightStampKeys = new SizedLongChunk<>(); + final SizedChunk rightStampValues = new SizedChunk<>(stampChunkType)) { sequentialBuilders.ensureCapacity(relevantShift.size()); slots.ensureCapacity(relevantShift.size()); - final int shiftedSlotCount = asOfJoinStateManager.gatherShiftIndex( - relevantShift, rightKeySources, slots, sequentialBuilders); + final int shiftedSlotCount = asOfJoinStateManager.gatherShiftIndex(relevantShift, + rightKeySources, slots, sequentialBuilders); for (int slotIndex = 0; slotIndex < shiftedSlotCount; ++slotIndex) { final Index shiftedIndex = indexFromBuilder(slotIndex); @@ -411,70 +379,59 @@ public void process() { leftSsa = asOfJoinStateManager.getLeftSsa(slot); } - final IndexShiftData shiftDataForSlot = - rightShifted.intersect(shiftedIndex); + final IndexShiftData shiftDataForSlot = rightShifted.intersect(shiftedIndex); if (leftSsa == null) { - // if the left is empty, we should be an index entry rather than - // an SSA, and we can not be empty, because we are responsive - final Index rightIndex = - asOfJoinStateManager.getRightIndex(slot); + // if the left is empty, we should be an index entry rather than an SSA, and we can + // not be empty, because we are responsive + final Index rightIndex = asOfJoinStateManager.getRightIndex(slot); shiftDataForSlot.apply(rightIndex); shiftedIndex.close(); rightIndex.compact(); continue; } - final SegmentedSortedArray rightSsa = - asOfJoinStateManager.getRightSsa(slot); + final SegmentedSortedArray rightSsa = asOfJoinStateManager.getRightSsa(slot); - final IndexShiftData.Iterator slotSit = - shiftDataForSlot.applyIterator(); + final IndexShiftData.Iterator slotSit = shiftDataForSlot.applyIterator(); while (slotSit.hasNext()) { slotSit.next(); - try (final Index indexToShift = shiftedIndex - .subindexByKey(slotSit.beginRange(), slotSit.endRange())) { + try (final Index indexToShift = + shiftedIndex.subindexByKey(slotSit.beginRange(), slotSit.endRange())) { if (slotSit.polarityReversed()) { final int shiftSize = indexToShift.intSize(); rightStampSource.fillPrevChunk( - rightShiftFillContext.ensureCapacity(shiftSize), - rightStampValues.ensureCapacity(shiftSize), - indexToShift); - indexToShift.fillKeyIndicesChunk( - rightStampKeys.ensureCapacity(shiftSize)); - shiftSortKernel.ensureCapacity(shiftSize) - .sort(rightStampKeys.get(), rightStampValues.get()); + rightShiftFillContext.ensureCapacity(shiftSize), + rightStampValues.ensureCapacity(shiftSize), indexToShift); + indexToShift.fillKeyIndicesChunk(rightStampKeys.ensureCapacity(shiftSize)); + shiftSortKernel.ensureCapacity(shiftSize).sort(rightStampKeys.get(), + rightStampValues.get()); ssaSsaStamp.applyShift(leftSsa, rightStampValues.get(), - rightStampKeys.get(), slotSit.shiftDelta(), - redirectionIndex, disallowExactMatch); - rightSsa.applyShiftReverse(rightStampValues.get(), - rightStampKeys.get(), slotSit.shiftDelta()); + rightStampKeys.get(), slotSit.shiftDelta(), redirectionIndex, + disallowExactMatch); + rightSsa.applyShiftReverse(rightStampValues.get(), rightStampKeys.get(), + slotSit.shiftDelta()); } else { try (final OrderedKeys.Iterator shiftIt = - indexToShift.getOrderedKeysIterator()) { + indexToShift.getOrderedKeysIterator()) { while (shiftIt.hasMore()) { final OrderedKeys chunkOk = - shiftIt.getNextOrderedKeysWithLength( - rightChunkSize); + shiftIt.getNextOrderedKeysWithLength(rightChunkSize); final int shiftSize = chunkOk.intSize(); chunkOk.fillKeyIndicesChunk( - rightStampKeys.ensureCapacity(shiftSize)); + rightStampKeys.ensureCapacity(shiftSize)); rightStampSource.fillPrevChunk( - rightShiftFillContext - .ensureCapacity(shiftSize), - rightStampValues.ensureCapacity(shiftSize), - chunkOk); - sortKernel.sort(rightStampKeys.get(), - rightStampValues.get()); - - rightSsa.applyShift(rightStampValues.get(), - rightStampKeys.get(), slotSit.shiftDelta()); - ssaSsaStamp.applyShift(leftSsa, - rightStampValues.get(), - rightStampKeys.get(), slotSit.shiftDelta(), - redirectionIndex, disallowExactMatch); + rightShiftFillContext.ensureCapacity(shiftSize), + rightStampValues.ensureCapacity(shiftSize), chunkOk); + sortKernel.sort(rightStampKeys.get(), rightStampValues.get()); + + rightSsa.applyShift(rightStampValues.get(), rightStampKeys.get(), + slotSit.shiftDelta()); + ssaSsaStamp.applyShift(leftSsa, rightStampValues.get(), + rightStampKeys.get(), slotSit.shiftDelta(), + redirectionIndex, disallowExactMatch); } } } @@ -487,20 +444,16 @@ public void process() { } // next we do the additions - final int addedSlotCount = asOfJoinStateManager.buildAdditions(false, - rightRestampAdditions, rightKeySources, slots, sequentialBuilders); - - try ( - final ColumnSource.FillContext rightFillContext = - rightStampSource.makeFillContext(rightChunkSize); - final WritableChunk stampChunk = - stampChunkType.makeWritableChunk(rightChunkSize); - final WritableChunk nextRightValue = - stampChunkType.makeWritableChunk(rightChunkSize); - final WritableLongChunk insertedIndices = - WritableLongChunk.makeWritableChunk(rightChunkSize); - final WritableBooleanChunk retainStamps = - WritableBooleanChunk.makeWritableChunk(rightChunkSize)) { + final int addedSlotCount = asOfJoinStateManager.buildAdditions(false, rightRestampAdditions, + rightKeySources, slots, sequentialBuilders); + + try (final ColumnSource.FillContext rightFillContext = rightStampSource.makeFillContext(rightChunkSize); + final WritableChunk stampChunk = stampChunkType.makeWritableChunk(rightChunkSize); + final WritableChunk nextRightValue = stampChunkType.makeWritableChunk(rightChunkSize); + final WritableLongChunk insertedIndices = + WritableLongChunk.makeWritableChunk(rightChunkSize); + final WritableBooleanChunk retainStamps = + WritableBooleanChunk.makeWritableChunk(rightChunkSize)) { for (int slotIndex = 0; slotIndex < addedSlotCount; ++slotIndex) { final long slot = slots.getLong(slotIndex); @@ -554,14 +507,11 @@ public void process() { } - final SegmentedSortedArray rightSsa = - asOfJoinStateManager.getRightSsa(slot, rightSsaFactory); - final SegmentedSortedArray leftSsa = - asOfJoinStateManager.getLeftSsa(slot, leftSsaFactory); + final SegmentedSortedArray rightSsa = asOfJoinStateManager.getRightSsa(slot, rightSsaFactory); + final SegmentedSortedArray leftSsa = asOfJoinStateManager.getLeftSsa(slot, leftSsaFactory); if (processInitial) { - ssaSsaStamp.processEntry(leftSsa, rightSsa, redirectionIndex, - disallowExactMatch); + ssaSsaStamp.processEntry(leftSsa, rightSsa, redirectionIndex, disallowExactMatch); // we've modified everything in the leftssa leftSsa.forAllKeys(modifiedBuilder::addKey); } @@ -569,21 +519,21 @@ public void process() { final int chunks = (rightAdded.intSize() + rightChunkSize - 1) / rightChunkSize; for (int ii = 0; ii < chunks; ++ii) { final int startChunk = chunks - ii - 1; - try (final Index chunkOk = rightAdded.subindexByPos( - startChunk * rightChunkSize, (startChunk + 1) * rightChunkSize)) { + try (final Index chunkOk = rightAdded.subindexByPos(startChunk * rightChunkSize, + (startChunk + 1) * rightChunkSize)) { rightStampSource.fillChunk(rightFillContext, stampChunk, chunkOk); insertedIndices.setSize(chunkOk.intSize()); chunkOk.fillKeyIndicesChunk(insertedIndices); sortKernel.sort(insertedIndices, stampChunk); - final int valuesWithNext = rightSsa.insertAndGetNextValue(stampChunk, - insertedIndices, nextRightValue); + final int valuesWithNext = + rightSsa.insertAndGetNextValue(stampChunk, insertedIndices, nextRightValue); final boolean endsWithLastValue = valuesWithNext != stampChunk.size(); if (endsWithLastValue) { Assert.eq(valuesWithNext, "valuesWithNext", stampChunk.size() - 1, - "stampChunk.size() - 1"); + "stampChunk.size() - 1"); stampChunk.setSize(valuesWithNext); stampChunkEquals.notEqual(stampChunk, nextRightValue, retainStamps); stampCompact.compact(nextRightValue, retainStamps); @@ -599,31 +549,25 @@ public void process() { LongCompactKernel.compact(insertedIndices, retainStamps); stampCompact.compact(stampChunk, retainStamps); - ssaSsaStamp.processInsertion(leftSsa, stampChunk, insertedIndices, - nextRightValue, redirectionIndex, modifiedBuilder, - endsWithLastValue, disallowExactMatch); + ssaSsaStamp.processInsertion(leftSsa, stampChunk, insertedIndices, nextRightValue, + redirectionIndex, modifiedBuilder, endsWithLastValue, disallowExactMatch); } } } } - // if the stamp was not modified, then we need to figure out the responsive rows to mark - // as modified - if (!rightStampModified && !rightKeysModified - && rightRecorder.getModified().nonempty()) { + // if the stamp was not modified, then we need to figure out the responsive rows to mark as modified + if (!rightStampModified && !rightKeysModified && rightRecorder.getModified().nonempty()) { slots.ensureCapacity(rightRecorder.getModified().size()); sequentialBuilders.ensureCapacity(rightRecorder.getModified().size()); - final int modifiedSlotCount = asOfJoinStateManager.gatherModifications( - rightRecorder.getModified(), rightKeySources, slots, sequentialBuilders); + final int modifiedSlotCount = asOfJoinStateManager.gatherModifications(rightRecorder.getModified(), + rightKeySources, slots, sequentialBuilders); - try ( - final ColumnSource.FillContext fillContext = - rightStampSource.makeFillContext(rightChunkSize); - final WritableChunk rightStampChunk = - stampChunkType.makeWritableChunk(rightChunkSize); - final WritableLongChunk rightStampIndices = - WritableLongChunk.makeWritableChunk(rightChunkSize)) { + try (final ColumnSource.FillContext fillContext = rightStampSource.makeFillContext(rightChunkSize); + final WritableChunk rightStampChunk = stampChunkType.makeWritableChunk(rightChunkSize); + final WritableLongChunk rightStampIndices = + WritableLongChunk.makeWritableChunk(rightChunkSize)) { for (int slotIndex = 0; slotIndex < modifiedSlotCount; ++slotIndex) { final long slot = slots.getLong(slotIndex); @@ -634,21 +578,18 @@ public void process() { continue; } - // if we are not empty on the left, then we must already have created the - // SSA + // if we are not empty on the left, then we must already have created the SSA final SegmentedSortedArray leftSsa = asOfJoinStateManager.getLeftSsa(slot); - try (final OrderedKeys.Iterator modit = - rightModified.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator modit = rightModified.getOrderedKeysIterator()) { while (modit.hasMore()) { - final OrderedKeys chunkOk = - modit.getNextOrderedKeysWithLength(rightChunkSize); + final OrderedKeys chunkOk = modit.getNextOrderedKeysWithLength(rightChunkSize); rightStampSource.fillChunk(fillContext, rightStampChunk, chunkOk); chunkOk.fillKeyIndicesChunk(rightStampIndices); sortKernel.sort(rightStampIndices, rightStampChunk); - ssaSsaStamp.findModified(leftSsa, redirectionIndex, rightStampChunk, - rightStampIndices, modifiedBuilder, disallowExactMatch); + ssaSsaStamp.findModified(leftSsa, redirectionIndex, rightStampChunk, rightStampIndices, + modifiedBuilder, disallowExactMatch); } } } @@ -661,11 +602,10 @@ public void process() { } if (rightStampModified || rightKeysModified || rightRecorder.getAdded().nonempty() - || rightRecorder.getRemoved().nonempty()) { + || rightRecorder.getRemoved().nonempty()) { downstream.modifiedColumnSet.setAll(allRightColumns); } else { - rightTransformer.transform(rightRecorder.getModifiedColumnSet(), - downstream.modifiedColumnSet); + rightTransformer.transform(rightRecorder.getModifiedColumnSet(), downstream.modifiedColumnSet); } } @@ -680,8 +620,8 @@ public void process() { sequentialBuilders.ensureCapacity(leftRestampAdditions.size()); slots.ensureCapacity(leftRestampAdditions.size()); - final int addedSlotCount = asOfJoinStateManager.buildAdditions(true, - leftRestampAdditions, leftKeySources, slots, sequentialBuilders); + final int addedSlotCount = asOfJoinStateManager.buildAdditions(true, leftRestampAdditions, leftKeySources, + slots, sequentialBuilders); for (int slotIndex = 0; slotIndex < addedSlotCount; ++slotIndex) { final long slot = slots.getLong(slotIndex); @@ -718,7 +658,7 @@ public void process() { case ENTRY_LEFT_IS_BUILDER | ENTRY_RIGHT_IS_SSA: case ENTRY_LEFT_IS_INDEX | ENTRY_RIGHT_IS_SSA: throw new IllegalStateException( - "Bad state: " + state + ", slot=" + slot + ", slotIndex=" + slotIndex); + "Bad state: " + state + ", slot=" + slot + ", slotIndex=" + slotIndex); case ENTRY_LEFT_IS_SSA | ENTRY_RIGHT_IS_SSA: break; @@ -739,27 +679,23 @@ public void process() { } - final SegmentedSortedArray rightSsa = - asOfJoinStateManager.getRightSsa(slot, rightSsaFactory); - final SegmentedSortedArray leftSsa = - asOfJoinStateManager.getLeftSsa(slot, leftSsaFactory); + final SegmentedSortedArray rightSsa = asOfJoinStateManager.getRightSsa(slot, rightSsaFactory); + final SegmentedSortedArray leftSsa = asOfJoinStateManager.getLeftSsa(slot, leftSsaFactory); if (processInitial) { - ssaSsaStamp.processEntry(leftSsa, rightSsa, redirectionIndex, - disallowExactMatch); + ssaSsaStamp.processEntry(leftSsa, rightSsa, redirectionIndex, disallowExactMatch); leftSsa.forAllKeys(modifiedBuilder::addKey); } try (final OrderedKeys.Iterator leftOkIt = leftAdded.getOrderedKeysIterator(); - final WritableLongChunk rightKeysForLeft = - WritableLongChunk.makeWritableChunk(leftChunkSize)) { + final WritableLongChunk rightKeysForLeft = + WritableLongChunk.makeWritableChunk(leftChunkSize)) { assert leftFillContext != null; assert leftStampValues != null; while (leftOkIt.hasMore()) { - final OrderedKeys chunkOk = - leftOkIt.getNextOrderedKeysWithLength(leftChunkSize); + final OrderedKeys chunkOk = leftOkIt.getNextOrderedKeysWithLength(leftChunkSize); leftStampSource.fillChunk(leftFillContext, leftStampValues, chunkOk); chunkOk.fillKeyIndicesChunk(leftStampKeys); @@ -767,8 +703,8 @@ public void process() { leftSsa.insert(leftStampValues, leftStampKeys); - chunkSsaStamp.processEntry(leftStampValues, leftStampKeys, rightSsa, - rightKeysForLeft, disallowExactMatch); + chunkSsaStamp.processEntry(leftStampValues, leftStampKeys, rightSsa, rightKeysForLeft, + disallowExactMatch); for (int ii = 0; ii < leftStampKeys.size(); ++ii) { final long leftKey = leftStampKeys.get(ii); @@ -784,8 +720,7 @@ public void process() { leftAdded.close(); } - leftTransformer.transform(leftRecorder.getModifiedColumnSet(), - downstream.modifiedColumnSet); + leftTransformer.transform(leftRecorder.getModifiedColumnSet(), downstream.modifiedColumnSet); if (leftKeysModified || leftStampModified) { downstream.modifiedColumnSet.setAll(allRightColumns); } @@ -794,8 +729,8 @@ public void process() { downstream.shifted = leftRecorder.getShifted(); } - SafeCloseable.closeArray(sortKernel, leftStampKeys, leftStampValues, leftFillContext, - leftSsaFactory, rightSsaFactory); + SafeCloseable.closeArray(sortKernel, leftStampKeys, leftStampValues, leftFillContext, leftSsaFactory, + rightSsaFactory); downstream.modified = leftRecorder.getModified().union(modifiedBuilder.getIndex()); @@ -807,8 +742,7 @@ private Index getRelevantShifts(IndexShiftData shifted, Index previousToShift) { final IndexShiftData.Iterator sit = shifted.applyIterator(); while (sit.hasNext()) { sit.next(); - final Index indexToShift = - previousToShift.subindexByKey(sit.beginRange(), sit.endRange()); + final Index indexToShift = previousToShift.subindexByKey(sit.beginRange(), sit.endRange()); if (!indexToShift.empty()) { relevantShiftKeys.addIndex(indexToShift); } diff --git a/DB/src/main/java/io/deephaven/db/v2/join/ChunkedAjUtilities.java b/DB/src/main/java/io/deephaven/db/v2/join/ChunkedAjUtilities.java index 917da7b38e5..9191335731e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/ChunkedAjUtilities.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/ChunkedAjUtilities.java @@ -19,48 +19,46 @@ class ChunkedAjUtilities { static void bothIncrementalLeftSsaShift(IndexShiftData shiftData, SegmentedSortedArray leftSsa, - Index restampRemovals, QueryTable table, - int nodeSize, ColumnSource stampSource) { + Index restampRemovals, QueryTable table, + int nodeSize, ColumnSource stampSource) { final ChunkType stampChunkType = stampSource.getChunkType(); - final SortingOrder sortOrder = - leftSsa.isReversed() ? SortingOrder.Descending : SortingOrder.Ascending; + final SortingOrder sortOrder = leftSsa.isReversed() ? SortingOrder.Descending : SortingOrder.Ascending; try (final Index fullPrevIndex = table.getIndex().getPrevIndex(); - final Index previousToShift = fullPrevIndex.minus(restampRemovals); - final SizedSafeCloseable shiftFillContext = - new SizedSafeCloseable<>(stampSource::makeFillContext); - final SizedSafeCloseable> shiftSortContext = - new SizedSafeCloseable<>( - size -> LongSortKernel.makeContext(stampChunkType, sortOrder, size, true)); - final SizedLongChunk stampKeys = new SizedLongChunk<>(); - final SizedChunk stampValues = new SizedChunk<>(stampChunkType)) { + final Index previousToShift = fullPrevIndex.minus(restampRemovals); + final SizedSafeCloseable shiftFillContext = + new SizedSafeCloseable<>(stampSource::makeFillContext); + final SizedSafeCloseable> shiftSortContext = + new SizedSafeCloseable<>( + size -> LongSortKernel.makeContext(stampChunkType, sortOrder, size, true)); + final SizedLongChunk stampKeys = new SizedLongChunk<>(); + final SizedChunk stampValues = new SizedChunk<>(stampChunkType)) { final IndexShiftData.Iterator sit = shiftData.applyIterator(); while (sit.hasNext()) { sit.next(); - final Index indexToShift = - previousToShift.subindexByKey(sit.beginRange(), sit.endRange()); + final Index indexToShift = previousToShift.subindexByKey(sit.beginRange(), sit.endRange()); if (indexToShift.empty()) { indexToShift.close(); continue; } - applyOneShift(leftSsa, nodeSize, stampSource, shiftFillContext, shiftSortContext, - stampKeys, stampValues, sit, indexToShift); + applyOneShift(leftSsa, nodeSize, stampSource, shiftFillContext, shiftSortContext, stampKeys, + stampValues, sit, indexToShift); indexToShift.close(); } } } - static void applyOneShift(SegmentedSortedArray leftSsa, int nodeSize, - ColumnSource stampSource, SizedSafeCloseable shiftFillContext, - SizedSafeCloseable> shiftSortContext, - SizedLongChunk stampKeys, SizedChunk stampValues, - IndexShiftData.Iterator sit, Index indexToShift) { + static void applyOneShift(SegmentedSortedArray leftSsa, int nodeSize, ColumnSource stampSource, + SizedSafeCloseable shiftFillContext, + SizedSafeCloseable> shiftSortContext, + SizedLongChunk stampKeys, SizedChunk stampValues, IndexShiftData.Iterator sit, + Index indexToShift) { if (sit.polarityReversed()) { final int shiftSize = indexToShift.intSize(); - stampSource.fillPrevChunk(shiftFillContext.ensureCapacity(shiftSize), - stampValues.ensureCapacity(shiftSize), indexToShift); + stampSource.fillPrevChunk(shiftFillContext.ensureCapacity(shiftSize), stampValues.ensureCapacity(shiftSize), + indexToShift); indexToShift.fillKeyIndicesChunk(stampKeys.ensureCapacity(shiftSize)); shiftSortContext.ensureCapacity(shiftSize).sort(stampKeys.get(), stampValues.get()); diff --git a/DB/src/main/java/io/deephaven/db/v2/join/JoinKeyState.java b/DB/src/main/java/io/deephaven/db/v2/join/JoinKeyState.java index c0bc8ae8c82..8f583b807d6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/JoinKeyState.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/JoinKeyState.java @@ -15,13 +15,12 @@ interface JoinKeyState { void removeRightIndices(Index rightIndices); - void modifyByRightIndices(Index rightIndex);// Informs the state the right index at that - // position was modified + void modifyByRightIndices(Index rightIndex);// Informs the state the right index at that position was modified /** - * After the right side has been changed (all additions, modifications, removals, etc.) have - * been completed; each state is visited calling propagateRightUpdates to update its - * RedirectionIndex and the list of left indicesthat have been modified by right changes. + * After the right side has been changed (all additions, modifications, removals, etc.) have been completed; each + * state is visited calling propagateRightUpdates to update its RedirectionIndex and the list of left indicesthat + * have been modified by right changes. */ void propagateRightUpdates(); @@ -39,8 +38,8 @@ interface JoinKeyState { /** * Intrusive set for touchedStates or statesTouchedByRight. * - * The sets can swap back and forth; so rather than having to remove things from one set and - * enter them into another; we swap which of the two intrusive references we use. + * The sets can swap back and forth; so rather than having to remove things from one set and enter them into + * another; we swap which of the two intrusive references we use. */ int getSlot1(); diff --git a/DB/src/main/java/io/deephaven/db/v2/join/JoinListenerRecorder.java b/DB/src/main/java/io/deephaven/db/v2/join/JoinListenerRecorder.java index bca6daa3a48..3e1554de898 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/JoinListenerRecorder.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/JoinListenerRecorder.java @@ -8,18 +8,16 @@ import io.deephaven.db.v2.ListenerRecorder; /** - * This is a listener that writes down the indices that were updated on a given clock cycle, and - * then notifies another listener. The intention is that you would have two of these, one for the - * left side and another for the right side of the join. The ListenerRecorders are created before - * the MergedJoinListener, so that they are always fired first in the priority queue. Once the - * MergedJoinListener is fired, it can examine the indices that were recorded into added and - * removed, and thus know what has changed on the left, and also what has changed on the right at - * the same time to produce a consistent output table. + * This is a listener that writes down the indices that were updated on a given clock cycle, and then notifies another + * listener. The intention is that you would have two of these, one for the left side and another for the right side of + * the join. The ListenerRecorders are created before the MergedJoinListener, so that they are always fired first in the + * priority queue. Once the MergedJoinListener is fired, it can examine the indices that were recorded into added and + * removed, and thus know what has changed on the left, and also what has changed on the right at the same time to + * produce a consistent output table. */ public class JoinListenerRecorder extends ListenerRecorder { - public JoinListenerRecorder(boolean isLeft, final String description, DynamicTable parent, - DynamicTable dependent) { + public JoinListenerRecorder(boolean isLeft, final String description, DynamicTable parent, DynamicTable dependent) { super(isLeft ? "leftTable: " : "rightTable: " + description, parent, dependent); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/join/ZeroKeyChunkedAjMergedListener.java b/DB/src/main/java/io/deephaven/db/v2/join/ZeroKeyChunkedAjMergedListener.java index 924011c944b..49b0667f99d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/ZeroKeyChunkedAjMergedListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/ZeroKeyChunkedAjMergedListener.java @@ -52,24 +52,23 @@ public class ZeroKeyChunkedAjMergedListener extends MergedListener { private final ModifiedColumnSet resultModifiedColumnSet; public ZeroKeyChunkedAjMergedListener(JoinListenerRecorder leftRecorder, - JoinListenerRecorder rightRecorder, - String listenerDescription, - QueryTable result, - QueryTable leftTable, - QueryTable rightTable, - MatchPair stampPair, - MatchPair[] columnsToAdd, - ColumnSource leftStampSource, - ColumnSource rightStampSource, - SortingOrder order, - boolean disallowExactMatch, - SsaSsaStamp ssaSsaStamp, - SegmentedSortedArray leftSsa, - SegmentedSortedArray rightSsa, - RedirectionIndex redirectionIndex, - JoinControl joinControl) { - super(Arrays.asList(leftRecorder, rightRecorder), Collections.emptyList(), - listenerDescription, result); + JoinListenerRecorder rightRecorder, + String listenerDescription, + QueryTable result, + QueryTable leftTable, + QueryTable rightTable, + MatchPair stampPair, + MatchPair[] columnsToAdd, + ColumnSource leftStampSource, + ColumnSource rightStampSource, + SortingOrder order, + boolean disallowExactMatch, + SsaSsaStamp ssaSsaStamp, + SegmentedSortedArray leftSsa, + SegmentedSortedArray rightSsa, + RedirectionIndex redirectionIndex, + JoinControl joinControl) { + super(Arrays.asList(leftRecorder, rightRecorder), Collections.emptyList(), listenerDescription, result); this.leftRecorder = leftRecorder; this.rightRecorder = rightRecorder; this.leftTable = leftTable; @@ -94,12 +93,11 @@ public ZeroKeyChunkedAjMergedListener(JoinListenerRecorder leftRecorder, leftStampColumn = leftTable.newModifiedColumnSet(stampPair.left()); rightStampColumn = rightTable.newModifiedColumnSet(stampPair.right()); allRightColumns = result.newModifiedColumnSet(MatchPair.getLeftColumns(columnsToAdd)); - leftTransformer = leftTable.newModifiedColumnSetTransformer(result, - leftTable.getDefinition().getColumnNamesArray()); + leftTransformer = + leftTable.newModifiedColumnSetTransformer(result, leftTable.getDefinition().getColumnNamesArray()); rightTransformer = rightTable.newModifiedColumnSetTransformer(result, columnsToAdd); - resultModifiedColumnSet = - result.newModifiedColumnSet(result.getDefinition().getColumnNamesArray()); + resultModifiedColumnSet = result.newModifiedColumnSet(result.getDefinition().getColumnNamesArray()); } @Override @@ -112,23 +110,21 @@ public void process() { final boolean rightTicked = rightRecorder.recordedVariablesAreValid(); final boolean leftStampModified = leftTicked && leftRecorder.getModified().nonempty() - && leftRecorder.getModifiedColumnSet().containsAny(leftStampColumn); - final boolean leftAdditionsOrRemovals = leftStampModified || (leftTicked - && (leftRecorder.getAdded().nonempty() || leftRecorder.getRemoved().nonempty())); + && leftRecorder.getModifiedColumnSet().containsAny(leftStampColumn); + final boolean leftAdditionsOrRemovals = leftStampModified + || (leftTicked && (leftRecorder.getAdded().nonempty() || leftRecorder.getRemoved().nonempty())); - try ( - final ColumnSource.FillContext leftFillContext = + try (final ColumnSource.FillContext leftFillContext = leftAdditionsOrRemovals ? leftStampSource.makeFillContext(leftChunkSize) : null; - final WritableChunk leftStampValues = - leftAdditionsOrRemovals ? stampChunkType.makeWritableChunk(leftChunkSize) : null; - final WritableLongChunk leftStampKeys = - leftAdditionsOrRemovals ? WritableLongChunk.makeWritableChunk(leftChunkSize) : null; - final LongSortKernel sortKernel = LongSortKernel.makeContext( - stampChunkType, order, Math.max(leftChunkSize, rightChunkSize), true)) { + final WritableChunk leftStampValues = + leftAdditionsOrRemovals ? stampChunkType.makeWritableChunk(leftChunkSize) : null; + final WritableLongChunk leftStampKeys = + leftAdditionsOrRemovals ? WritableLongChunk.makeWritableChunk(leftChunkSize) : null; + final LongSortKernel sortKernel = LongSortKernel.makeContext(stampChunkType, order, + Math.max(leftChunkSize, rightChunkSize), true)) { final Index.RandomBuilder modifiedBuilder = Index.FACTORY.getRandomBuilder(); - // first we remove anything that is not of interest from the left hand side, because we - // don't want to + // first we remove anything that is not of interest from the left hand side, because we don't want to // process the relevant right hand side changes if (leftTicked) { final Index leftRemoved = leftRecorder.getRemoved(); @@ -143,17 +139,14 @@ public void process() { if (leftRestampRemovals.nonempty()) { leftRestampRemovals.forAllLongs(redirectionIndex::removeVoid); - try (final OrderedKeys.Iterator leftOkIt = - leftRestampRemovals.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator leftOkIt = leftRestampRemovals.getOrderedKeysIterator()) { assert leftFillContext != null; assert leftStampValues != null; while (leftOkIt.hasMore()) { - final OrderedKeys chunkOk = - leftOkIt.getNextOrderedKeysWithLength(leftChunkSize); + final OrderedKeys chunkOk = leftOkIt.getNextOrderedKeysWithLength(leftChunkSize); - leftStampSource.fillPrevChunk(leftFillContext, leftStampValues, - chunkOk); + leftStampSource.fillPrevChunk(leftFillContext, leftStampValues, chunkOk); chunkOk.fillKeyIndicesChunk(leftStampKeys); sortKernel.sort(leftStampKeys, leftStampValues); @@ -170,13 +163,12 @@ public void process() { final IndexShiftData leftShifted = leftRecorder.getShifted(); if (leftShifted.nonempty()) { - // now we apply the left shifts, so that anything in our SSA is a relevant thing - // to stamp + // now we apply the left shifts, so that anything in our SSA is a relevant thing to stamp try (final Index prevIndex = leftTable.getIndex().getPrevIndex()) { redirectionIndex.applyShift(prevIndex, leftShifted); } - ChunkedAjUtilities.bothIncrementalLeftSsaShift(leftShifted, leftSsa, - leftRestampRemovals, leftTable, leftChunkSize, leftStampSource); + ChunkedAjUtilities.bothIncrementalLeftSsaShift(leftShifted, leftSsa, leftRestampRemovals, leftTable, + leftChunkSize, leftStampSource); } } else { downstream.added = Index.FACTORY.getEmptyIndex(); @@ -187,71 +179,57 @@ public void process() { if (rightTicked) { // next we remove and add things from the right hand side - final boolean rightStampModified = - rightRecorder.getModifiedColumnSet().containsAny(rightStampColumn); + final boolean rightStampModified = rightRecorder.getModifiedColumnSet().containsAny(rightStampColumn); - try ( - final ColumnSource.FillContext fillContext = - rightStampSource.makeFillContext(rightChunkSize); - final WritableChunk rightStampValues = - stampChunkType.makeWritableChunk(rightChunkSize); - final WritableLongChunk rightStampKeys = - WritableLongChunk.makeWritableChunk(rightChunkSize)) { + try (final ColumnSource.FillContext fillContext = rightStampSource.makeFillContext(rightChunkSize); + final WritableChunk rightStampValues = stampChunkType.makeWritableChunk(rightChunkSize); + final WritableLongChunk rightStampKeys = + WritableLongChunk.makeWritableChunk(rightChunkSize)) { final Index rightRestampRemovals; final Index rightRestampAdditions; final Index rightModified = rightRecorder.getModified(); if (rightStampModified) { rightRestampAdditions = rightRecorder.getAdded().union(rightModified); - rightRestampRemovals = - rightRecorder.getRemoved().union(rightRecorder.getModifiedPreShift()); + rightRestampRemovals = rightRecorder.getRemoved().union(rightRecorder.getModifiedPreShift()); } else { rightRestampAdditions = rightRecorder.getAdded(); rightRestampRemovals = rightRecorder.getRemoved(); } - // When removing a row, record the stamp, redirection key, and prior redirection - // key. Binary search - // in the left for the removed key to find the smallest value geq the removed - // right. Update all rows + // When removing a row, record the stamp, redirection key, and prior redirection key. Binary search + // in the left for the removed key to find the smallest value geq the removed right. Update all rows // with the removed redirection to the previous key. - try ( - final OrderedKeys.Iterator removeit = - rightRestampRemovals.getOrderedKeysIterator(); - final WritableLongChunk priorRedirections = - WritableLongChunk.makeWritableChunk(rightChunkSize)) { + try (final OrderedKeys.Iterator removeit = rightRestampRemovals.getOrderedKeysIterator(); + final WritableLongChunk priorRedirections = + WritableLongChunk.makeWritableChunk(rightChunkSize)) { while (removeit.hasMore()) { - final OrderedKeys chunkOk = - removeit.getNextOrderedKeysWithLength(rightChunkSize); + final OrderedKeys chunkOk = removeit.getNextOrderedKeysWithLength(rightChunkSize); rightStampSource.fillPrevChunk(fillContext, rightStampValues, chunkOk); chunkOk.fillKeyIndicesChunk(rightStampKeys); sortKernel.sort(rightStampKeys, rightStampValues); - rightSsa.removeAndGetPrior(rightStampValues, rightStampKeys, - priorRedirections); - ssaSsaStamp.processRemovals(leftSsa, rightStampValues, rightStampKeys, - priorRedirections, redirectionIndex, modifiedBuilder, - disallowExactMatch); + rightSsa.removeAndGetPrior(rightStampValues, rightStampKeys, priorRedirections); + ssaSsaStamp.processRemovals(leftSsa, rightStampValues, rightStampKeys, priorRedirections, + redirectionIndex, modifiedBuilder, disallowExactMatch); } } final IndexShiftData rightShifted = rightRecorder.getShifted(); if (rightShifted.nonempty()) { try (final Index fullPrevIndex = rightTable.getIndex().getPrevIndex(); - final Index previousToShift = fullPrevIndex.minus(rightRestampRemovals); - final SizedSafeCloseable shiftFillContext = - new SizedSafeCloseable<>(rightStampSource::makeFillContext); - final SizedSafeCloseable> shiftSortContext = - new SizedSafeCloseable<>(sz -> LongSortKernel - .makeContext(stampChunkType, order, sz, true)); - final SizedChunk shiftRightStampValues = - new SizedChunk<>(stampChunkType); - final SizedLongChunk shiftRightStampKeys = - new SizedLongChunk<>()) { + final Index previousToShift = fullPrevIndex.minus(rightRestampRemovals); + final SizedSafeCloseable shiftFillContext = + new SizedSafeCloseable<>(rightStampSource::makeFillContext); + final SizedSafeCloseable> shiftSortContext = + new SizedSafeCloseable<>( + sz -> LongSortKernel.makeContext(stampChunkType, order, sz, true)); + final SizedChunk shiftRightStampValues = new SizedChunk<>(stampChunkType); + final SizedLongChunk shiftRightStampKeys = new SizedLongChunk<>()) { final IndexShiftData.Iterator sit = rightShifted.applyIterator(); while (sit.hasNext()) { sit.next(); final Index indexToShift = - previousToShift.subindexByKey(sit.beginRange(), sit.endRange()); + previousToShift.subindexByKey(sit.beginRange(), sit.endRange()); if (indexToShift.empty()) { indexToShift.close(); continue; @@ -260,36 +238,29 @@ public void process() { if (sit.polarityReversed()) { final int shiftSize = indexToShift.intSize(); - rightStampSource.fillPrevChunk( - shiftFillContext.ensureCapacity(shiftSize), - shiftRightStampValues.ensureCapacity(shiftSize), - indexToShift); - indexToShift.fillKeyIndicesChunk( - shiftRightStampKeys.ensureCapacity(shiftSize)); - shiftSortContext.ensureCapacity(shiftSize).sort( - shiftRightStampKeys.get(), shiftRightStampValues.get()); + rightStampSource.fillPrevChunk(shiftFillContext.ensureCapacity(shiftSize), + shiftRightStampValues.ensureCapacity(shiftSize), indexToShift); + indexToShift.fillKeyIndicesChunk(shiftRightStampKeys.ensureCapacity(shiftSize)); + shiftSortContext.ensureCapacity(shiftSize).sort(shiftRightStampKeys.get(), + shiftRightStampValues.get()); ssaSsaStamp.applyShift(leftSsa, shiftRightStampValues.get(), - shiftRightStampKeys.get(), sit.shiftDelta(), - redirectionIndex, disallowExactMatch); - rightSsa.applyShiftReverse(shiftRightStampValues.get(), - shiftRightStampKeys.get(), sit.shiftDelta()); + shiftRightStampKeys.get(), sit.shiftDelta(), redirectionIndex, + disallowExactMatch); + rightSsa.applyShiftReverse(shiftRightStampValues.get(), shiftRightStampKeys.get(), + sit.shiftDelta()); } else { - try (final OrderedKeys.Iterator shiftIt = - indexToShift.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator shiftIt = indexToShift.getOrderedKeysIterator()) { while (shiftIt.hasMore()) { - final OrderedKeys chunkOk = shiftIt - .getNextOrderedKeysWithLength(rightChunkSize); - rightStampSource.fillPrevChunk(fillContext, - rightStampValues, chunkOk); + final OrderedKeys chunkOk = + shiftIt.getNextOrderedKeysWithLength(rightChunkSize); + rightStampSource.fillPrevChunk(fillContext, rightStampValues, chunkOk); chunkOk.fillKeyIndicesChunk(rightStampKeys); sortKernel.sort(rightStampKeys, rightStampValues); - rightSsa.applyShift(rightStampValues, rightStampKeys, - sit.shiftDelta()); - ssaSsaStamp.applyShift(leftSsa, rightStampValues, - rightStampKeys, sit.shiftDelta(), redirectionIndex, - disallowExactMatch); + rightSsa.applyShift(rightStampValues, rightStampKeys, sit.shiftDelta()); + ssaSsaStamp.applyShift(leftSsa, rightStampValues, rightStampKeys, + sit.shiftDelta(), redirectionIndex, disallowExactMatch); } } } @@ -300,28 +271,24 @@ public void process() { } - // When adding a row to the right hand side: we need to know which left hand - // side might be - // responsive. If we are a duplicate stamp and not the last one, we ignore it. - // Next, we should binary - // search in the left for the first value >=, everything up until the next - // extant right value should be + // When adding a row to the right hand side: we need to know which left hand side might be + // responsive. If we are a duplicate stamp and not the last one, we ignore it. Next, we should + // binary + // search in the left for the first value >=, everything up until the next extant right value should + // be // restamped with our value - try ( - final WritableChunk stampChunk = - stampChunkType.makeWritableChunk(rightChunkSize); - final WritableChunk nextRightValue = - stampChunkType.makeWritableChunk(rightChunkSize); - final WritableLongChunk insertedIndices = - WritableLongChunk.makeWritableChunk(rightChunkSize); - final WritableBooleanChunk retainStamps = - WritableBooleanChunk.makeWritableChunk(rightChunkSize)) { - final int chunks = - (rightRestampAdditions.intSize() + rightChunkSize - 1) / rightChunkSize; + try (final WritableChunk stampChunk = stampChunkType.makeWritableChunk(rightChunkSize); + final WritableChunk nextRightValue = + stampChunkType.makeWritableChunk(rightChunkSize); + final WritableLongChunk insertedIndices = + WritableLongChunk.makeWritableChunk(rightChunkSize); + final WritableBooleanChunk retainStamps = + WritableBooleanChunk.makeWritableChunk(rightChunkSize)) { + final int chunks = (rightRestampAdditions.intSize() + rightChunkSize - 1) / rightChunkSize; for (int ii = 0; ii < chunks; ++ii) { final int startChunk = chunks - ii - 1; - try (final Index chunkOk = rightRestampAdditions.subindexByPos( - startChunk * rightChunkSize, (startChunk + 1) * rightChunkSize)) { + try (final Index chunkOk = rightRestampAdditions.subindexByPos(startChunk * rightChunkSize, + (startChunk + 1) * rightChunkSize)) { final int chunkSize = chunkOk.intSize(); rightStampSource.fillChunk(fillContext, stampChunk, chunkOk); insertedIndices.setSize(chunkSize); @@ -329,15 +296,13 @@ public void process() { sortKernel.sort(insertedIndices, stampChunk); - final int valuesWithNext = rightSsa.insertAndGetNextValue( - stampChunk, insertedIndices, nextRightValue); + final int valuesWithNext = + rightSsa.insertAndGetNextValue(stampChunk, insertedIndices, nextRightValue); - final boolean endsWithLastValue = - valuesWithNext != stampChunk.size(); + final boolean endsWithLastValue = valuesWithNext != stampChunk.size(); if (endsWithLastValue) { stampChunk.setSize(valuesWithNext); - stampChunkEquals.notEqual(stampChunk, nextRightValue, - retainStamps); + stampChunkEquals.notEqual(stampChunk, nextRightValue, retainStamps); stampCompact.compact(nextRightValue, retainStamps); retainStamps.setSize(chunkSize); @@ -345,34 +310,28 @@ public void process() { stampChunk.setSize(chunkSize); } else { // remove duplicates - stampChunkEquals.notEqual(stampChunk, nextRightValue, - retainStamps); + stampChunkEquals.notEqual(stampChunk, nextRightValue, retainStamps); stampCompact.compact(nextRightValue, retainStamps); } LongCompactKernel.compact(insertedIndices, retainStamps); stampCompact.compact(stampChunk, retainStamps); - ssaSsaStamp.processInsertion(leftSsa, stampChunk, insertedIndices, - nextRightValue, redirectionIndex, modifiedBuilder, - endsWithLastValue, disallowExactMatch); + ssaSsaStamp.processInsertion(leftSsa, stampChunk, insertedIndices, nextRightValue, + redirectionIndex, modifiedBuilder, endsWithLastValue, disallowExactMatch); } } } - // if the stamp was not modified, then we need to figure out the responsive rows - // to mark as modified + // if the stamp was not modified, then we need to figure out the responsive rows to mark as modified if (!rightStampModified && rightModified.nonempty()) { - try (final OrderedKeys.Iterator modit = - rightModified.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator modit = rightModified.getOrderedKeysIterator()) { while (modit.hasMore()) { - final OrderedKeys chunkOk = - modit.getNextOrderedKeysWithLength(rightChunkSize); + final OrderedKeys chunkOk = modit.getNextOrderedKeysWithLength(rightChunkSize); rightStampSource.fillChunk(fillContext, rightStampValues, chunkOk); chunkOk.fillKeyIndicesChunk(rightStampKeys); sortKernel.sort(rightStampKeys, rightStampValues); - ssaSsaStamp.findModified(leftSsa, redirectionIndex, - rightStampValues, rightStampKeys, modifiedBuilder, - disallowExactMatch); + ssaSsaStamp.findModified(leftSsa, redirectionIndex, rightStampValues, rightStampKeys, + modifiedBuilder, disallowExactMatch); } } } @@ -383,11 +342,10 @@ public void process() { } if (rightStampModified || rightRecorder.getAdded().nonempty() - || rightRecorder.getRemoved().nonempty()) { + || rightRecorder.getRemoved().nonempty()) { downstream.modifiedColumnSet.setAll(allRightColumns); } else { - rightTransformer.transform(rightRecorder.getModifiedColumnSet(), - downstream.modifiedColumnSet); + rightTransformer.transform(rightRecorder.getModifiedColumnSet(), downstream.modifiedColumnSet); } } } @@ -396,31 +354,27 @@ public void process() { // we add the left side values now final Index leftRestampAdditions; if (leftStampModified) { - leftRestampAdditions = - leftRecorder.getAdded().union(leftRecorder.getModified()); + leftRestampAdditions = leftRecorder.getAdded().union(leftRecorder.getModified()); } else { leftRestampAdditions = leftRecorder.getAdded(); } - try ( - final OrderedKeys.Iterator leftOkIt = - leftRestampAdditions.getOrderedKeysIterator(); - final WritableLongChunk rightKeysForLeft = - WritableLongChunk.makeWritableChunk(leftChunkSize)) { + try (final OrderedKeys.Iterator leftOkIt = leftRestampAdditions.getOrderedKeysIterator(); + final WritableLongChunk rightKeysForLeft = + WritableLongChunk.makeWritableChunk(leftChunkSize)) { while (leftOkIt.hasMore()) { assert leftFillContext != null; assert leftStampValues != null; - final OrderedKeys chunkOk = - leftOkIt.getNextOrderedKeysWithLength(leftChunkSize); + final OrderedKeys chunkOk = leftOkIt.getNextOrderedKeysWithLength(leftChunkSize); leftStampSource.fillChunk(leftFillContext, leftStampValues, chunkOk); chunkOk.fillKeyIndicesChunk(leftStampKeys); sortKernel.sort(leftStampKeys, leftStampValues); leftSsa.insert(leftStampValues, leftStampKeys); - chunkSsaStamp.processEntry(leftStampValues, leftStampKeys, rightSsa, - rightKeysForLeft, disallowExactMatch); + chunkSsaStamp.processEntry(leftStampValues, leftStampKeys, rightSsa, rightKeysForLeft, + disallowExactMatch); for (int ii = 0; ii < leftStampKeys.size(); ++ii) { final long leftKey = leftStampKeys.get(ii); @@ -434,8 +388,7 @@ public void process() { } } - leftTransformer.transform(leftRecorder.getModifiedColumnSet(), - downstream.modifiedColumnSet); + leftTransformer.transform(leftRecorder.getModifiedColumnSet(), downstream.modifiedColumnSet); if (leftStampModified) { downstream.modifiedColumnSet.setAll(allRightColumns); leftRestampAdditions.close(); diff --git a/DB/src/main/java/io/deephaven/db/v2/join/dupcompact/DupCompactKernel.java b/DB/src/main/java/io/deephaven/db/v2/join/dupcompact/DupCompactKernel.java index 2191412d63f..34aaa22cbad 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/dupcompact/DupCompactKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/dupcompact/DupCompactKernel.java @@ -52,12 +52,12 @@ static DupCompactKernel makeDupCompact(ChunkType chunkType, boolean reverse) { } /** - * Remove all adjacent values from chunkToCompact, except the last value in any adjacent run. - * The keyIndices are parallel to the chunkToCompact; and when a value is removed from - * chunkToCompact it is also removed from keyIndices + * Remove all adjacent values from chunkToCompact, except the last value in any adjacent run. The keyIndices are + * parallel to the chunkToCompact; and when a value is removed from chunkToCompact it is also removed from + * keyIndices * - * Additionally, verify that the elements are properly ordered; returning the first position of - * an out of order element. + * Additionally, verify that the elements are properly ordered; returning the first position of an out of order + * element. * * @param chunkToCompact the values to remove duplicates from * @param keyIndices the key indices parallel to chunkToCompact @@ -65,5 +65,5 @@ static DupCompactKernel makeDupCompact(ChunkType chunkType, boolean reverse) { * @return the first position of an out-of-order element, or -1 if all elements are in order */ int compactDuplicates(WritableChunk chunkToCompact, - WritableLongChunk keyIndices); + WritableLongChunk keyIndices); } diff --git a/DB/src/main/java/io/deephaven/db/v2/join/dupcompact/ReplicateDupCompactKernel.java b/DB/src/main/java/io/deephaven/db/v2/join/dupcompact/ReplicateDupCompactKernel.java index 401370de8b4..fd3efc9942f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/dupcompact/ReplicateDupCompactKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/dupcompact/ReplicateDupCompactKernel.java @@ -19,31 +19,28 @@ public class ReplicateDupCompactKernel { public static void main(String[] args) throws IOException { - final List kernelsToInvert = ReplicatePrimitiveCode - .charToAllButBoolean(CharDupCompactKernel.class, ReplicatePrimitiveCode.MAIN_SRC); - final String objectDupCompact = ReplicatePrimitiveCode - .charToObject(CharDupCompactKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final List kernelsToInvert = + ReplicatePrimitiveCode.charToAllButBoolean(CharDupCompactKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final String objectDupCompact = + ReplicatePrimitiveCode.charToObject(CharDupCompactKernel.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectDupCompact(objectDupCompact); - kernelsToInvert.add(ReplicatePrimitiveCode.pathForClass(CharDupCompactKernel.class, - ReplicatePrimitiveCode.MAIN_SRC)); + kernelsToInvert + .add(ReplicatePrimitiveCode.pathForClass(CharDupCompactKernel.class, ReplicatePrimitiveCode.MAIN_SRC)); kernelsToInvert.add(objectDupCompact); for (String kernel : kernelsToInvert) { - final String dupCompactReversePath = - kernel.replaceAll("DupCompactKernel", "ReverseDupCompactKernel"); + final String dupCompactReversePath = kernel.replaceAll("DupCompactKernel", "ReverseDupCompactKernel"); invertSense(kernel, dupCompactReversePath); if (kernel.contains("Char")) { - final String nullAwarePath = - kernel.replace("CharDupCompactKernel", "NullAwareCharDupCompactKernel"); - fixupCharNullComparisons(CharDupCompactKernel.class, kernel, nullAwarePath, - "CharDupCompactKernel", "NullAwareCharDupCompactKernel", true); - - final String nullAwareDescendingPath = nullAwarePath - .replaceAll("NullAwareCharDupCompact", "NullAwareCharReverseDupCompact"); - fixupCharNullComparisons(CharDupCompactKernel.class, kernel, - nullAwareDescendingPath, "CharDupCompactKernel", - "NullAwareCharReverseDupCompactKernel", false); + final String nullAwarePath = kernel.replace("CharDupCompactKernel", "NullAwareCharDupCompactKernel"); + fixupCharNullComparisons(CharDupCompactKernel.class, kernel, nullAwarePath, "CharDupCompactKernel", + "NullAwareCharDupCompactKernel", true); + + final String nullAwareDescendingPath = + nullAwarePath.replaceAll("NullAwareCharDupCompact", "NullAwareCharReverseDupCompact"); + fixupCharNullComparisons(CharDupCompactKernel.class, kernel, nullAwareDescendingPath, + "CharDupCompactKernel", "NullAwareCharReverseDupCompactKernel", false); } else if (kernel.contains("Float")) { nanFixup(kernel, "Float", true); nanFixup(dupCompactReversePath, "Float", false); @@ -54,16 +51,14 @@ public static void main(String[] args) throws IOException { } } - public static String fixupCharNullComparisons(Class sourceClass, String kernel) - throws IOException { + public static String fixupCharNullComparisons(Class sourceClass, String kernel) throws IOException { final String nullAwarePath = kernel.replace("Char", "NullAwareChar"); - return fixupCharNullComparisons(sourceClass, kernel, nullAwarePath, - sourceClass.getSimpleName(), - sourceClass.getSimpleName().replace("Char", "NullAwareChar"), true); + return fixupCharNullComparisons(sourceClass, kernel, nullAwarePath, sourceClass.getSimpleName(), + sourceClass.getSimpleName().replace("Char", "NullAwareChar"), true); } - private static String fixupCharNullComparisons(Class sourceClass, String path, String newPath, - String oldName, String newName, boolean ascending) throws IOException { + private static String fixupCharNullComparisons(Class sourceClass, String path, String newPath, String oldName, + String newName, boolean ascending) throws IOException { final File file = new File(path); List lines = FileUtils.readLines(file, Charset.defaultCharset()); @@ -71,18 +66,17 @@ private static String fixupCharNullComparisons(Class sourceClass, String path, S lines = ReplicateUtilities.addImport(lines, QueryConstants.class, DhCharComparisons.class); // we always replicate ascending then invert - lines = globalReplacements(ReplicateSortKernel.fixupCharNullComparisons(lines, true), - oldName, newName); + lines = globalReplacements(ReplicateSortKernel.fixupCharNullComparisons(lines, true), oldName, newName); if (!ascending) { lines = ReplicateSortKernel.invertComparisons(lines); } lines.addAll(0, Arrays.asList( - "/* ---------------------------------------------------------------------------------------------------------------------", - " * AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit " - + sourceClass.getSimpleName() + " and regenerate", - " * ------------------------------------------------------------------------------------------------------------------ */")); + "/* ---------------------------------------------------------------------------------------------------------------------", + " * AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit " + sourceClass.getSimpleName() + + " and regenerate", + " * ------------------------------------------------------------------------------------------------------------------ */")); FileUtils.writeLines(new File(newPath), lines); @@ -93,10 +87,9 @@ private static String fixupCharNullComparisons(Class sourceClass, String path, S private static void invertSense(String path, String descendingPath) throws IOException { final File file = new File(path); - List lines = simpleFixup( - ascendingNameToDescendingName(path, - FileUtils.readLines(file, Charset.defaultCharset())), - "initialize last", "MIN_VALUE", "MAX_VALUE"); + List lines = + simpleFixup(ascendingNameToDescendingName(path, FileUtils.readLines(file, Charset.defaultCharset())), + "initialize last", "MIN_VALUE", "MAX_VALUE"); if (path.contains("Object")) { lines = ReplicateSortKernel.fixupObjectComparisons(lines, false); @@ -130,14 +123,12 @@ private static List ascendingNameToDescendingName(String path, List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); - FileUtils.writeLines(objectFile, - ReplicateSortKernel.fixupObjectComparisons(fixupChunkAttributes(lines))); + FileUtils.writeLines(objectFile, ReplicateSortKernel.fixupObjectComparisons(fixupChunkAttributes(lines))); } @NotNull private static List fixupChunkAttributes(List lines) { - lines = - lines.stream().map(x -> x.replaceAll("ObjectChunk<([^>]*)>", "ObjectChunk")) + lines = lines.stream().map(x -> x.replaceAll("ObjectChunk<([^>]*)>", "ObjectChunk")) .collect(Collectors.toList()); return lines; } diff --git a/DB/src/main/java/io/deephaven/db/v2/join/dupexpand/DupExpandKernel.java b/DB/src/main/java/io/deephaven/db/v2/join/dupexpand/DupExpandKernel.java index 8c2042cc558..2bb0a6fe902 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/dupexpand/DupExpandKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/dupexpand/DupExpandKernel.java @@ -30,15 +30,13 @@ static DupExpandKernel makeDupExpand(ChunkType chunkType) { } /** - * Expands entries values from chunkToExpand in-place, using keyRunLengths to determine how many - * copies. The keyRunLengths chunk is parallel to the original chunkToExpand; it is never - * modified. + * Expands entries values from chunkToExpand in-place, using keyRunLengths to determine how many copies. The + * keyRunLengths chunk is parallel to the original chunkToExpand; it is never modified. * * @param expandedSize the sum of all entries in keyRunLengths - * @param chunkToExpand the values to expand in-place (this writable chunk must have capacity >= - * expandedSize) + * @param chunkToExpand the values to expand in-place (this writable chunk must have capacity >= expandedSize) * @param keyRunLengths the key run-lengths parallel to chunkToExpand */ void expandDuplicates(int expandedSize, WritableChunk chunkToExpand, - IntChunk keyRunLengths); + IntChunk keyRunLengths); } diff --git a/DB/src/main/java/io/deephaven/db/v2/join/dupexpand/ReplicateDupExpandKernel.java b/DB/src/main/java/io/deephaven/db/v2/join/dupexpand/ReplicateDupExpandKernel.java index 48c68c16382..0d65b0a3429 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/dupexpand/ReplicateDupExpandKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/dupexpand/ReplicateDupExpandKernel.java @@ -12,16 +12,15 @@ public class ReplicateDupExpandKernel { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToAll(CharDupExpandKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); - fixupObjectDupCompact(ReplicatePrimitiveCode.charToObject(CharDupExpandKernel.class, - ReplicatePrimitiveCode.MAIN_SRC)); + ReplicatePrimitiveCode.charToAll(CharDupExpandKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + fixupObjectDupCompact( + ReplicatePrimitiveCode.charToObject(CharDupExpandKernel.class, ReplicatePrimitiveCode.MAIN_SRC)); } private static void fixupObjectDupCompact(String objectPath) throws IOException { final File objectFile = new File(objectPath); final List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); - FileUtils.writeLines(objectFile, ReplicateSortKernel - .fixupObjectComparisons(ReplicateUtilities.fixupChunkAttributes(lines))); + FileUtils.writeLines(objectFile, + ReplicateSortKernel.fixupObjectComparisons(ReplicateUtilities.fixupChunkAttributes(lines))); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/join/stamp/ReplicateStampKernel.java b/DB/src/main/java/io/deephaven/db/v2/join/stamp/ReplicateStampKernel.java index 31c9bdf0eea..182d20562d3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/stamp/ReplicateStampKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/stamp/ReplicateStampKernel.java @@ -16,37 +16,35 @@ public class ReplicateStampKernel { public static void main(String[] args) throws IOException { - final List stampKernels = ReplicatePrimitiveCode - .charToAllButBoolean(CharStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final List stampKernels = + ReplicatePrimitiveCode.charToAllButBoolean(CharStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); final List noExactStampKernels = ReplicatePrimitiveCode - .charToAllButBoolean(CharNoExactStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + .charToAllButBoolean(CharNoExactStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); stampKernels.addAll(noExactStampKernels); - final String charStampPath = ReplicatePrimitiveCode.pathForClass(CharStampKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String charStampPath = + ReplicatePrimitiveCode.pathForClass(CharStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); stampKernels.add(charStampPath); - final String charNoExactStampPath = ReplicatePrimitiveCode - .pathForClass(CharNoExactStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final String charNoExactStampPath = + ReplicatePrimitiveCode.pathForClass(CharNoExactStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); stampKernels.add(charNoExactStampPath); - final String objectStamp = ReplicatePrimitiveCode.charToObject(CharStampKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String objectStamp = + ReplicatePrimitiveCode.charToObject(CharStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectStamp(objectStamp); - final String objectNoExactStamp = ReplicatePrimitiveCode - .charToObject(CharNoExactStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final String objectNoExactStamp = + ReplicatePrimitiveCode.charToObject(CharNoExactStampKernel.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectStamp(objectNoExactStamp); stampKernels.add(objectStamp); stampKernels.add(objectNoExactStamp); - stampKernels.add(ReplicateDupCompactKernel.fixupCharNullComparisons(CharStampKernel.class, - charStampPath)); - stampKernels.add(ReplicateDupCompactKernel - .fixupCharNullComparisons(CharNoExactStampKernel.class, charNoExactStampPath)); + stampKernels.add(ReplicateDupCompactKernel.fixupCharNullComparisons(CharStampKernel.class, charStampPath)); + stampKernels.add( + ReplicateDupCompactKernel.fixupCharNullComparisons(CharNoExactStampKernel.class, charNoExactStampPath)); for (String stampKernel : stampKernels) { - final String stampReversePath = - stampKernel.replaceAll("StampKernel", "ReverseStampKernel"); + final String stampReversePath = stampKernel.replaceAll("StampKernel", "ReverseStampKernel"); invertSense(stampKernel, stampReversePath); if (stampKernel.contains("Double")) { @@ -62,8 +60,7 @@ public static void main(String[] args) throws IOException { private static void invertSense(String path, String descendingPath) throws IOException { final File file = new File(path); - List lines = ascendingNameToDescendingName(path, - FileUtils.readLines(file, Charset.defaultCharset())); + List lines = ascendingNameToDescendingName(path, FileUtils.readLines(file, Charset.defaultCharset())); if (path.contains("Object")) { lines = ReplicateSortKernel.fixupObjectComparisons(lines, false); @@ -85,14 +82,12 @@ private static List ascendingNameToDescendingName(String path, List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); - FileUtils.writeLines(objectFile, - ReplicateSortKernel.fixupObjectComparisons(fixupChunkAttributes(lines))); + FileUtils.writeLines(objectFile, ReplicateSortKernel.fixupObjectComparisons(fixupChunkAttributes(lines))); } @NotNull private static List fixupChunkAttributes(List lines) { - lines = - lines.stream().map(x -> x.replaceAll("ObjectChunk<([^>]*)>", "ObjectChunk")) + lines = lines.stream().map(x -> x.replaceAll("ObjectChunk<([^>]*)>", "ObjectChunk")) .collect(Collectors.toList()); return lines; } diff --git a/DB/src/main/java/io/deephaven/db/v2/join/stamp/StampKernel.java b/DB/src/main/java/io/deephaven/db/v2/join/stamp/StampKernel.java index f2cc367a5e9..cbc489caaa7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/join/stamp/StampKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/join/stamp/StampKernel.java @@ -7,8 +7,7 @@ import org.jetbrains.annotations.NotNull; public interface StampKernel extends Context { - static StampKernel makeStampKernel(ChunkType type, SortingOrder order, - boolean disallowExactMatch) { + static StampKernel makeStampKernel(ChunkType type, SortingOrder order, boolean disallowExactMatch) { if (disallowExactMatch) { if (order == SortingOrder.Descending) { return makeReverseStampKernelNoExact(type); @@ -127,14 +126,14 @@ static StampKernel makeReverseStampKernelNoExact(ChunkType type) { /** * Stamps the left-hand-side values with the corresponding right hand side. * - * The rightKeyIndices are parallel to the stamp values in rightStamps; and used to compute a - * new chunk of redirections parallel to leftStamps. + * The rightKeyIndices are parallel to the stamp values in rightStamps; and used to compute a new chunk of + * redirections parallel to leftStamps. * * @param leftStamps the input lhs stamp values * @param rightStamps the input rhs stamp values * @param rightKeyIndices the input rhs stamp indices * @param leftRedirections the resulting redirections from the stamping operation */ - void computeRedirections(Chunk leftStamps, Chunk rightStamps, - LongChunk rightKeyIndices, WritableLongChunk leftRedirections); + void computeRedirections(Chunk leftStamps, Chunk rightStamps, LongChunk rightKeyIndices, + WritableLongChunk leftRedirections); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/BasicTableDataListener.java b/DB/src/main/java/io/deephaven/db/v2/locations/BasicTableDataListener.java index 7e02a0d08f6..7b55fbf1e2c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/BasicTableDataListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/BasicTableDataListener.java @@ -8,10 +8,9 @@ public interface BasicTableDataListener { /** - * Notify the listener that an exception was encountered while initiating or maintaining the - * subscription. Delivery of an exception implies that the subscription is no longer valid. This - * might happen during subscription establishment, and consequently should be checked for - * after subscribe completes. + * Notify the listener that an exception was encountered while initiating or maintaining the subscription. Delivery + * of an exception implies that the subscription is no longer valid. This might happen during subscription + * establishment, and consequently should be checked for after subscribe completes. * * @param exception The exception */ diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/ColumnLocation.java b/DB/src/main/java/io/deephaven/db/v2/locations/ColumnLocation.java index 30e8a14df51..0bb633cf9c0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/ColumnLocation.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/ColumnLocation.java @@ -18,8 +18,7 @@ /** * Per-TableLocation, per-column key, state, and factory object. */ -public interface ColumnLocation - extends StringUtils.StringKeyedObject, NamedImplementation, LogOutputAppendable { +public interface ColumnLocation extends StringUtils.StringKeyedObject, NamedImplementation, LogOutputAppendable { /** * Get the {@link TableLocation} enclosing this ColumnLocation. @@ -48,9 +47,9 @@ public interface ColumnLocation *

    * Get the metadata object stored with this column, or null if no such data exists. *

    - * This is typically a value to range map (grouping metadata). The value to range map, if - * non-null, is a map from unique (boxed) column values for this location to the associated - * ranges in which they occur. Ranges are either 2-element int[]s, or 2-element long[]s. + * This is typically a value to range map (grouping metadata). The value to range map, if non-null, is a map from + * unique (boxed) column values for this location to the associated ranges in which they occur. Ranges are either + * 2-element int[]s, or 2-element long[]s. * * @return The metadata stored with this column, or null if no such data exists */ @@ -108,16 +107,14 @@ public interface ColumnLocation * @return A {@link ColumnRegionDouble} for reading data from this ColumnLocation * @throws UnsupportedOperationException If this ColumnLocation does not contain double data */ - ColumnRegionDouble makeColumnRegionDouble( - @NotNull ColumnDefinition columnDefinition); + ColumnRegionDouble makeColumnRegionDouble(@NotNull ColumnDefinition columnDefinition); /** * @param columnDefinition The {@link ColumnDefinition} used to lookup type information * @return A {@link ColumnRegionObject} for reading data from this ColumnLocation * @throws UnsupportedOperationException If this ColumnLocation does not contain object data */ - ColumnRegionObject makeColumnRegionObject( - @NotNull ColumnDefinition columnDefinition); + ColumnRegionObject makeColumnRegionObject(@NotNull ColumnDefinition columnDefinition); // ------------------------------------------------------------------------------------------------------------------ // StringKeyedObject implementation @@ -135,15 +132,15 @@ default String getStringRepresentation() { @Override default LogOutput append(@NotNull final LogOutput logOutput) { return logOutput.append(getTableLocation()) - .append(':').append(getImplementationName()) - .append('[').append(getName()) - .append(']'); + .append(':').append(getImplementationName()) + .append('[').append(getName()) + .append(']'); } default String toStringHelper() { return getTableLocation().toString() - + ':' + getImplementationName() - + '[' + getName() - + ']'; + + ':' + getImplementationName() + + '[' + getName() + + ']'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/GroupingProvider.java b/DB/src/main/java/io/deephaven/db/v2/locations/GroupingProvider.java index 950f3611471..e5a10d79c74 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/GroupingProvider.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/GroupingProvider.java @@ -19,15 +19,14 @@ public interface GroupingProvider { /** - * Make a new {@link GroupingProvider} for the specified {@link ColumnDefinition} and current - * global configuration. + * Make a new {@link GroupingProvider} for the specified {@link ColumnDefinition} and current global configuration. * * @param columnDefinition The column definition * @return A new {@link GroupingProvider} */ @NotNull static GroupingProvider makeGroupingProvider( - @NotNull final ColumnDefinition columnDefinition) { + @NotNull final ColumnDefinition columnDefinition) { return new ParallelDeferredGroupingProvider<>(columnDefinition); } @@ -39,17 +38,16 @@ static GroupingProvider makeGroupingProvider( Map getGroupToRange(); /** - * Returns a grouping structure, possibly constructed on-demand; the grouping is only required - * to include groupings for values that exist within the hint Index; but it may include more. - * The hint allows the underlying implementation to optionally optimize out groupings that do - * not overlap hint. + * Returns a grouping structure, possibly constructed on-demand; the grouping is only required to include groupings + * for values that exist within the hint Index; but it may include more. The hint allows the underlying + * implementation to optionally optimize out groupings that do not overlap hint. *

    - * The return value is a pair, containing a "complete" indicator. If the complete indicator is - * true, then the caller may safely cache the resultant Map. + * The return value is a pair, containing a "complete" indicator. If the complete indicator is true, then the caller + * may safely cache the resultant Map. * * @param hint required indices within the resultant Map - * @return a Pair containing a Map from grouping keys to Indices, which includes at least the - * hint indices; and a Boolean which indicates that the grouping is complete + * @return a Pair containing a Map from grouping keys to Indices, which includes at least the hint indices; and a + * Boolean which indicates that the grouping is complete */ Pair, Boolean> getGroupToRange(Index hint); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/KeyRangeGroupingProvider.java b/DB/src/main/java/io/deephaven/db/v2/locations/KeyRangeGroupingProvider.java index 331f36e7bce..522c5b91e54 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/KeyRangeGroupingProvider.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/KeyRangeGroupingProvider.java @@ -4,8 +4,8 @@ import org.jetbrains.annotations.NotNull; /** - * Interface for {@link GroupingProvider}s that operate on metadata derived from a - * {@link ColumnLocation} for a given {@link io.deephaven.db.v2.utils.Index} key range. + * Interface for {@link GroupingProvider}s that operate on metadata derived from a {@link ColumnLocation} for a given + * {@link io.deephaven.db.v2.utils.Index} key range. */ public interface KeyRangeGroupingProvider extends GroupingProvider { @@ -15,6 +15,5 @@ public interface KeyRangeGroupingProvider extends GroupingProvider - * Subscribe to pushed location updates. Subscribing more than once with the same listener - * without an intervening unsubscribe is an error, and may result in undefined behavior. + * Subscribe to pushed location updates. Subscribing more than once with the same listener without an intervening + * unsubscribe is an error, and may result in undefined behavior. *

    - * This is a possibly asynchronous operation - listener will receive 1 or more handleUpdate - * callbacks, followed by 0 or 1 handleException callbacks during invocation and continuing - * after completion, on a thread determined by the implementation. Don't hold a lock that - * prevents notification delivery while subscribing! + * This is a possibly asynchronous operation - listener will receive 1 or more handleUpdate callbacks, followed by 0 + * or 1 handleException callbacks during invocation and continuing after completion, on a thread determined by the + * implementation. Don't hold a lock that prevents notification delivery while subscribing! *

    - * This method only guarantees eventually consistent state. To force a state update, use - * refresh() after subscription completes. + * This method only guarantees eventually consistent state. To force a state update, use refresh() after + * subscription completes. * * @param listener A listener */ @@ -97,9 +94,9 @@ interface Listener extends BasicTableDataListener { @FinalDefault default LogOutput append(@NotNull final LogOutput logOutput) { return logOutput.append(getTableKey()) - .append(':').append(getImplementationName()) - .append('[').append(getKey()) - .append(']'); + .append(':').append(getImplementationName()) + .append('[').append(getKey()) + .append(']'); } @FinalDefault diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationKey.java b/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationKey.java index 040c295c1bf..420cc8edf30 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationKey.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationKey.java @@ -8,28 +8,26 @@ /** *

    - * Interface for opaque table location keys for use in {@link TableLocationProvider} - * implementations. Note that implementations are generally only comparable to other implementations - * intended for use in the same provider and discovery framework. + * Interface for opaque table location keys for use in {@link TableLocationProvider} implementations. Note that + * implementations are generally only comparable to other implementations intended for use in the same provider and + * discovery framework. * *

    - * This interface also provides a mechanism for communicating partition information from a - * discovery framework to the table engine. A partition of a table represents some sub-range of the - * overall available data, but can always be thought of as a table in its own right. By representing - * partition membership as an ordered set of key-value pairs with mutually-comparable values, we - * make it possible to: + * This interface also provides a mechanism for communicating partition information from a discovery framework + * to the table engine. A partition of a table represents some sub-range of the overall available data, but can always + * be thought of as a table in its own right. By representing partition membership as an ordered set of key-value pairs + * with mutually-comparable values, we make it possible to: *

      *
    1. Totally order the set of partitions belonging to a table, and thus all rows of the table
    2. - *
    3. Refer to partitions via columns of the data, allowing vast savings in filtering efficiency - * for filters that only need evaluate one or more partitioning columns
    4. + *
    5. Refer to partitions via columns of the data, allowing vast savings in filtering efficiency for filters that only + * need evaluate one or more partitioning columns
    6. *
    * *

    - * Generally, only {@link io.deephaven.db.v2.PartitionAwareSourceTable PartitionAwareSourceTable} - * and {@link io.deephaven.db.v2.SourceTableMap SourceTableMap} are properly partition-aware. + * Generally, only {@link io.deephaven.db.v2.PartitionAwareSourceTable PartitionAwareSourceTable} and + * {@link io.deephaven.db.v2.SourceTableMap SourceTableMap} are properly partition-aware. */ -public interface TableLocationKey - extends Comparable, NamedImplementation, LogOutputAppendable { +public interface TableLocationKey extends Comparable, NamedImplementation, LogOutputAppendable { /** * Lookup the value of one of the table partitions enclosing the location keyed by {@code this}. @@ -40,7 +38,7 @@ public interface TableLocationKey * @throws UnknownPartitionKeyException If the partition cannot be found */ > PARTITION_VALUE_TYPE getPartitionValue( - @NotNull final String partitionKey); + @NotNull final String partitionKey); /** * Get the set of available partition keys. diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationProvider.java b/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationProvider.java index 3ef90f36a91..0d5366b7af7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationProvider.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationProvider.java @@ -24,9 +24,9 @@ public interface TableLocationProvider extends NamedImplementation { interface Listener extends BasicTableDataListener { /** - * Notify the listener of a {@link TableLocationKey} encountered while initiating or - * maintaining the location subscription. This should occur at most once per location, but - * the order of delivery is not guaranteed. + * Notify the listener of a {@link TableLocationKey} encountered while initiating or maintaining the location + * subscription. This should occur at most once per location, but the order of delivery is not + * guaranteed. * * @param tableLocationKey The new table location key */ @@ -42,19 +42,18 @@ interface Listener extends BasicTableDataListener { /** *

    - * Subscribe to pushed location additions. Subscribing more than once with the same listener - * without an intervening unsubscribe is an error, and may result in undefined behavior. + * Subscribe to pushed location additions. Subscribing more than once with the same listener without an intervening + * unsubscribe is an error, and may result in undefined behavior. *

    - * This is a possibly asynchronous operation - listener will receive 0 or more - * handleTableLocationKey callbacks, followed by 0 or 1 handleException callbacks during - * invocation and continuing after completion, on a thread determined by the implementation. As - * noted in {@link Listener#handleException(TableDataException)}, an exception callback - * signifies that the subscription is no longer valid, and no unsubscribe is required in that - * case. Callers must not hold any lock that prevents notification delivery while - * subscribing. Callers must guard against duplicate notifications. + * This is a possibly asynchronous operation - listener will receive 0 or more handleTableLocationKey callbacks, + * followed by 0 or 1 handleException callbacks during invocation and continuing after completion, on a thread + * determined by the implementation. As noted in {@link Listener#handleException(TableDataException)}, an exception + * callback signifies that the subscription is no longer valid, and no unsubscribe is required in that case. Callers + * must not hold any lock that prevents notification delivery while subscribing. Callers must guard + * against duplicate notifications. *

    - * This method only guarantees eventually consistent state. To force a state update, use - * refresh() after subscription completes. + * This method only guarantees eventually consistent state. To force a state update, use refresh() after + * subscription completes. * * @param listener A listener. */ @@ -73,18 +72,18 @@ interface Listener extends BasicTableDataListener { void refresh(); /** - * Ensure that this location provider is initialized. Mainly useful in cases where it cannot be - * otherwise guaranteed that {@link #refresh()} or {@link #subscribe(Listener)} has been called - * prior to calls to the various table location fetch methods. + * Ensure that this location provider is initialized. Mainly useful in cases where it cannot be otherwise guaranteed + * that {@link #refresh()} or {@link #subscribe(Listener)} has been called prior to calls to the various table + * location fetch methods. * * @return this, to allow method chaining */ TableLocationProvider ensureInitialized(); /** - * Get this provider's currently known location keys. The locations specified by the keys - * returned may have null size - that is, they may not "exist" for application purposes. - * {@link #getTableLocation(TableLocationKey)} is guaranteed to succeed for all results. + * Get this provider's currently known location keys. The locations specified by the keys returned may have null + * size - that is, they may not "exist" for application purposes. {@link #getTableLocation(TableLocationKey)} is + * guaranteed to succeed for all results. * * @return A collection of keys for locations available from this provider */ diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationState.java b/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationState.java index a0d0500435c..84fbca393c8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationState.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationState.java @@ -6,9 +6,8 @@ import org.jetbrains.annotations.NotNull; /** - * Interface for the mutable fields of a table location. Supports multi-value copy methods, so that - * applications needing a consistent view of all fields can work with a local copy while only - * locking this object for a short while. + * Interface for the mutable fields of a table location. Supports multi-value copy methods, so that applications needing + * a consistent view of all fields can work with a local copy while only locking this object for a short while. */ public interface TableLocationState { @@ -16,37 +15,34 @@ public interface TableLocationState { long NULL_TIME = Long.MIN_VALUE; /** - * @return The Object that accessors should synchronize on if they want to invoke multiple - * getters with consistent results. + * @return The Object that accessors should synchronize on if they want to invoke multiple getters with consistent + * results. */ @NotNull Object getStateLock(); /** - * @return The (possibly-empty) {@link ReadOnlyIndex index} of a table location, or {@code null} - * if index information is unknown or does not exist for this table location. + * @return The (possibly-empty) {@link ReadOnlyIndex index} of a table location, or {@code null} if index + * information is unknown or does not exist for this table location. * @implNote This index must not have any key larger than - * {@link RegionedColumnSource#ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK the - * region mask}. - * @apiNote The returned index will be a "clone", meaning the caller must - * {@link ReadOnlyIndex#close()} it when finished. + * {@link RegionedColumnSource#ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK the region mask}. + * @apiNote The returned index will be a "clone", meaning the caller must {@link ReadOnlyIndex#close()} it when + * finished. */ ReadOnlyIndex getIndex(); /** * @return The size of a table location:
    - * {@link #NULL_SIZE NULL_SIZE}: Size information is unknown or does not exist for this - * location
    + * {@link #NULL_SIZE NULL_SIZE}: Size information is unknown or does not exist for this location
    * {@code >= 0}: The table location exists and has (possibly empty) data */ long getSize(); /** * @return The last modified time for a table location, in milliseconds from the epoch:
    - * {@link #NULL_TIME NULL_TIME}: Modification time information is unknown or does not - * exist for this location
    - * {@code >= 0}: The time this table was last modified, in milliseconds from the UTC - * epoch + * {@link #NULL_TIME NULL_TIME}: Modification time information is unknown or does not exist for this + * location
    + * {@code >= 0}: The time this table was last modified, in milliseconds from the UTC epoch */ long getLastModifiedTimeMillis(); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationStateHolder.java b/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationStateHolder.java index 4496eba8237..bf8f65dc288 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationStateHolder.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/TableLocationStateHolder.java @@ -5,16 +5,15 @@ import org.jetbrains.annotations.Nullable; /** - * Tool for generic multi-field "atomic" get/set of state values for a table location. NB: - * Possibly-concurrent usages should be externally synchronized. + * Tool for generic multi-field "atomic" get/set of state values for a table location. NB: Possibly-concurrent usages + * should be externally synchronized. */ public class TableLocationStateHolder implements TableLocationState { private ReadOnlyIndex index; private volatile long lastModifiedTimeMillis; - private TableLocationStateHolder(@Nullable final ReadOnlyIndex index, - final long lastModifiedTimeMillis) { + private TableLocationStateHolder(@Nullable final ReadOnlyIndex index, final long lastModifiedTimeMillis) { this.index = index; this.lastModifiedTimeMillis = lastModifiedTimeMillis; } @@ -62,18 +61,17 @@ protected final void clearValues() { /** * Set all state values. * - * @param index The new index. Ownership passes to this holder; callers should - * {@link ReadOnlyIndex#clone() clone} it if necessary. + * @param index The new index. Ownership passes to this holder; callers should {@link ReadOnlyIndex#clone() clone} + * it if necessary. * @param lastModifiedTimeMillis The new modification time * @return Whether any of the values changed */ public final synchronized boolean setValues(@Nullable final ReadOnlyIndex index, - final long lastModifiedTimeMillis) { + final long lastModifiedTimeMillis) { boolean changed = false; if (index != this.index) { - // Currently, locations *must* be add-only. Consequently, we assume that a size check is - // sufficient. + // Currently, locations *must* be add-only. Consequently, we assume that a size check is sufficient. changed = (index == null || this.index == null || index.size() != this.index.size()); if (this.index != null) { this.index.close(); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/UnknownPartitionKeyException.java b/DB/src/main/java/io/deephaven/db/v2/locations/UnknownPartitionKeyException.java index 39737efaa1b..19cbd313ef1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/UnknownPartitionKeyException.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/UnknownPartitionKeyException.java @@ -4,8 +4,7 @@ import org.jetbrains.annotations.NotNull; /** - * Exception thrown when a requested partition value cannot be found because the partition key is - * unknown. + * Exception thrown when a requested partition value cannot be found because the partition key is unknown. */ public class UnknownPartitionKeyException extends UncheckedDeephavenException { @@ -14,7 +13,7 @@ public UnknownPartitionKeyException(@NotNull final String partitionKey) { } public UnknownPartitionKeyException(@NotNull final String partitionKey, - @NotNull final TableLocationKey locationKey) { + @NotNull final TableLocationKey locationKey) { super("Unknown partition key " + partitionKey + " for table location key " + locationKey); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractColumnLocation.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractColumnLocation.java index 6ec461d68cc..5cbe27c8371 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractColumnLocation.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractColumnLocation.java @@ -13,8 +13,7 @@ public abstract class AbstractColumnLocation implements ColumnLocation { private final TableLocation tableLocation; private final String name; - protected AbstractColumnLocation(@NotNull final TableLocation tableLocation, - @NotNull final String name) { + protected AbstractColumnLocation(@NotNull final TableLocation tableLocation, @NotNull final String name) { this.tableLocation = Require.neqNull(tableLocation, "tableLocation"); this.name = Require.neqNull(name, "name"); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableDataService.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableDataService.java index a1a23967460..0e86e5a1ed7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableDataService.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableDataService.java @@ -17,7 +17,7 @@ public abstract class AbstractTableDataService implements TableDataService, Name private final String name; private final KeyedObjectHashMap tableLocationProviders = - new KeyedObjectHashMap<>(ProviderKeyDefinition.INSTANCE); + new KeyedObjectHashMap<>(ProviderKeyDefinition.INSTANCE); /** * Construct an AbstractTableDataService. @@ -59,11 +59,9 @@ public String getName() { /** * Key definition for {@link TableLocationProvider} lookup by {@link TableKey}. */ - private static final class ProviderKeyDefinition - extends KeyedObjectKey.Basic { + private static final class ProviderKeyDefinition extends KeyedObjectKey.Basic { - private static final KeyedObjectKey INSTANCE = - new ProviderKeyDefinition(); + private static final KeyedObjectKey INSTANCE = new ProviderKeyDefinition(); private ProviderKeyDefinition() {} diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableLocation.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableLocation.java index 2575dc2d81a..c053cbd7718 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableLocation.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableLocation.java @@ -15,15 +15,15 @@ * Partial TableLocation implementation for use by TableDataService implementations. */ public abstract class AbstractTableLocation - extends SubscriptionAggregator - implements TableLocation { + extends SubscriptionAggregator + implements TableLocation { private final ImmutableTableKey tableKey; private final ImmutableTableLocationKey tableLocationKey; private final TableLocationStateHolder state = new TableLocationStateHolder(); private final KeyedObjectHashMap columnLocations = - new KeyedObjectHashMap<>(StringUtils.charSequenceKey()); + new KeyedObjectHashMap<>(StringUtils.charSequenceKey()); /** * @param tableKey Table key for the table this location belongs to @@ -31,12 +31,11 @@ public abstract class AbstractTableLocation * @param supportsSubscriptions Whether subscriptions are to be supported */ protected AbstractTableLocation(@NotNull final TableKey tableKey, - @NotNull final TableLocationKey tableLocationKey, - final boolean supportsSubscriptions) { + @NotNull final TableLocationKey tableLocationKey, + final boolean supportsSubscriptions) { super(supportsSubscriptions); this.tableKey = Require.neqNull(tableKey, "tableKey").makeImmutable(); - this.tableLocationKey = - Require.neqNull(tableLocationKey, "tableLocationKey").makeImmutable(); + this.tableLocationKey = Require.neqNull(tableLocationKey, "tableLocationKey").makeImmutable(); } @Override @@ -94,8 +93,8 @@ protected final void deliverInitialSnapshot(@NotNull final Listener listener) { /** * See TableLocationState for documentation of values. * - * @param index The new index. Ownership passes to this location; callers should - * {@link ReadOnlyIndex#clone() clone} it if necessary. + * @param index The new index. Ownership passes to this location; callers should {@link ReadOnlyIndex#clone() clone} + * it if necessary. * @param lastModifiedTimeMillis The new lastModificationTimeMillis */ public final void handleUpdate(final ReadOnlyIndex index, final long lastModifiedTimeMillis) { @@ -105,9 +104,8 @@ public final void handleUpdate(final ReadOnlyIndex index, final long lastModifie } /** - * Update all state fields from source's values, as in - * {@link #handleUpdate(ReadOnlyIndex, long)}. See {@link TableLocationState} for documentation - * of values. + * Update all state fields from source's values, as in {@link #handleUpdate(ReadOnlyIndex, long)}. See + * {@link TableLocationState} for documentation of values. * * @param source The source to copy state values from */ diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableLocationProvider.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableLocationProvider.java index d3a94597b77..b76171062f0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableLocationProvider.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/AbstractTableLocationProvider.java @@ -10,24 +10,23 @@ import java.util.Collections; /** - * Partial {@link TableLocationProvider} implementation for standalone use or as part of a - * {@link TableDataService}. + * Partial {@link TableLocationProvider} implementation for standalone use or as part of a {@link TableDataService}. *

    - * Presents an interface similar to {@link TableLocationProvider.Listener} for subclasses to use - * when communicating with the parent; see {@link #handleTableLocationKey(TableLocationKey)}. + * Presents an interface similar to {@link TableLocationProvider.Listener} for subclasses to use when communicating with + * the parent; see {@link #handleTableLocationKey(TableLocationKey)}. *

    - * Note that subclasses are responsible for determining when it's appropriate to call - * {@link #setInitialized()} and/or override {@link #doInitialization()}. + * Note that subclasses are responsible for determining when it's appropriate to call {@link #setInitialized()} and/or + * override {@link #doInitialization()}. */ public abstract class AbstractTableLocationProvider - extends SubscriptionAggregator - implements TableLocationProvider { + extends SubscriptionAggregator + implements TableLocationProvider { private final ImmutableTableKey tableKey; /** - * Map from {@link TableLocationKey} to itself, or to a {@link TableLocation}. The values are - * {@link TableLocation}s if: + * Map from {@link TableLocationKey} to itself, or to a {@link TableLocation}. The values are {@link TableLocation}s + * if: *

      *
    1. The location has been requested via {@link #getTableLocation(TableLocationKey)} or * {@link #getTableLocationIfPresent(TableLocationKey)}
    2. @@ -35,11 +34,11 @@ public abstract class AbstractTableLocationProvider *
    */ private final KeyedObjectHashMap tableLocations = - new KeyedObjectHashMap<>(LocationKeyDefinition.INSTANCE); + new KeyedObjectHashMap<>(LocationKeyDefinition.INSTANCE); @SuppressWarnings("unchecked") private final Collection unmodifiableTableLocationKeys = - (Collection) (Collection) Collections - .unmodifiableCollection(tableLocations.keySet()); + (Collection) (Collection) Collections + .unmodifiableCollection(tableLocations.keySet()); private volatile boolean initialized; @@ -51,8 +50,7 @@ public abstract class AbstractTableLocationProvider * @param tableKey A key that will be used by this provider * @param supportsSubscriptions Whether this provider should support subscriptions */ - protected AbstractTableLocationProvider(@NotNull final TableKey tableKey, - final boolean supportsSubscriptions) { + protected AbstractTableLocationProvider(@NotNull final TableKey tableKey, final boolean supportsSubscriptions) { super(supportsSubscriptions); this.tableKey = tableKey.makeImmutable(); } @@ -80,8 +78,7 @@ public final ImmutableTableKey getKey() { // ------------------------------------------------------------------------------------------------------------------ @Override - protected final void deliverInitialSnapshot( - @NotNull final TableLocationProvider.Listener listener) { + protected final void deliverInitialSnapshot(@NotNull final TableLocationProvider.Listener listener) { unmodifiableTableLocationKeys.forEach(listener::handleTableLocationKey); } @@ -89,8 +86,7 @@ protected final void deliverInitialSnapshot( * Deliver a possibly-new key. * * @param locationKey The new key - * @apiNote This method is intended to be used by subclasses or by tightly-coupled discovery - * tools. + * @apiNote This method is intended to be used by subclasses or by tightly-coupled discovery tools. */ protected final void handleTableLocationKey(@NotNull final TableLocationKey locationKey) { if (!supportsSubscriptions()) { @@ -98,17 +94,14 @@ protected final void handleTableLocationKey(@NotNull final TableLocationKey loca return; } synchronized (subscriptions) { - // Since we're holding the lock on subscriptions, the following code is overly - // complicated - we could - // certainly just deliver the notification in observeInsert. That said, I'm happier with - // this approach, - // as it minimizes lock duration for tableLocations, exemplifies correct use of - // putIfAbsent, and keeps + // Since we're holding the lock on subscriptions, the following code is overly complicated - we could + // certainly just deliver the notification in observeInsert. That said, I'm happier with this approach, + // as it minimizes lock duration for tableLocations, exemplifies correct use of putIfAbsent, and keeps // observeInsert out of the business of subscription processing. locationCreatedRecorder = false; final Object result = tableLocations.putIfAbsent(locationKey, this::observeInsert); - if (locationCreatedRecorder && subscriptions.deliverNotification( - Listener::handleTableLocationKey, toKeyImmutable(result), true)) { + if (locationCreatedRecorder && subscriptions.deliverNotification(Listener::handleTableLocationKey, + toKeyImmutable(result), true)) { onEmpty(); } } @@ -139,9 +132,8 @@ public final TableLocationProvider ensureInitialized() { } /** - * Internal method for subclasses to call to determine if they need to call - * {@link #ensureInitialized()}, if doing so might entail extra work (e.g. enqueueing an - * asynchronous job). + * Internal method for subclasses to call to determine if they need to call {@link #ensureInitialized()}, if doing + * so might entail extra work (e.g. enqueueing an asynchronous job). * * @return Whether {@link #setInitialized()} has been called */ @@ -150,17 +142,16 @@ protected final boolean isInitialized() { } /** - * Internal method for subclasses to call when they consider themselves to have been - * initialized. + * Internal method for subclasses to call when they consider themselves to have been initialized. */ protected final void setInitialized() { initialized = true; } /** - * Initialization method for subclasses to override, in case simply calling {@link #refresh()} - * is inappropriate. This is *not* guaranteed to be called only once. It should internally call - * {@link #setInitialized()} upon successful initialization. + * Initialization method for subclasses to override, in case simply calling {@link #refresh()} is inappropriate. + * This is *not* guaranteed to be called only once. It should internally call {@link #setInitialized()} upon + * successful initialization. */ protected void doInitialization() { refresh(); @@ -180,15 +171,13 @@ public final boolean hasTableLocationKey(@NotNull final TableLocationKey tableLo @Override @Nullable - public TableLocation getTableLocationIfPresent( - @NotNull final TableLocationKey tableLocationKey) { + public TableLocation getTableLocationIfPresent(@NotNull final TableLocationKey tableLocationKey) { Object current = tableLocations.get(tableLocationKey); if (current == null) { return null; } // See JavaDoc on tableLocations for background. - // The intent is to create a TableLocation exactly once to replace the TableLocationKey - // placeholder that was + // The intent is to create a TableLocation exactly once to replace the TableLocationKey placeholder that was // added in handleTableLocationKey. if (current instanceof TableLocation) { return (TableLocation) current; @@ -205,14 +194,11 @@ public TableLocation getTableLocationIfPresent( } /** - * Key definition for {@link TableLocation} or {@link TableLocationKey} lookup by - * {@link TableLocationKey}. + * Key definition for {@link TableLocation} or {@link TableLocationKey} lookup by {@link TableLocationKey}. */ - private static final class LocationKeyDefinition - extends KeyedObjectKey.Basic { + private static final class LocationKeyDefinition extends KeyedObjectKey.Basic { - private static final KeyedObjectKey INSTANCE = - new LocationKeyDefinition(); + private static final KeyedObjectKey INSTANCE = new LocationKeyDefinition(); private LocationKeyDefinition() {} @@ -230,8 +216,7 @@ private static TableLocationKey toKey(@NotNull final Object keyOrLocation) { return ((TableLocationKey) keyOrLocation); } throw new IllegalArgumentException( - "toKey expects a TableLocation or a TableLocationKey, instead received a " - + keyOrLocation.getClass()); + "toKey expects a TableLocation or a TableLocationKey, instead received a " + keyOrLocation.getClass()); } private static ImmutableTableLocationKey toKeyImmutable(@NotNull final Object keyOrLocation) { diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/CompositeTableDataService.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/CompositeTableDataService.java index d556b3a41b2..ed0cc06d0a8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/CompositeTableDataService.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/CompositeTableDataService.java @@ -17,14 +17,12 @@ import java.util.stream.Collectors; /** - * Routing {@link TableDataService} that applies a selector function to pick service(s) for each - * request. It is assumed that each service will provide access to a non-overlapping set of table - * locations for any table key. + * Routing {@link TableDataService} that applies a selector function to pick service(s) for each request. It is assumed + * that each service will provide access to a non-overlapping set of table locations for any table key. */ public class CompositeTableDataService extends AbstractTableDataService { - private static final String IMPLEMENTATION_NAME = - CompositeTableDataService.class.getSimpleName(); + private static final String IMPLEMENTATION_NAME = CompositeTableDataService.class.getSimpleName(); private final ServiceSelector serviceSelector; @@ -49,11 +47,9 @@ default String describe() { /** * @param name optional name for this service - * @param serviceSelector Function to map a table key to a set of services that should be - * queried. + * @param serviceSelector Function to map a table key to a set of services that should be queried. */ - public CompositeTableDataService(@NotNull String name, - @NotNull final ServiceSelector serviceSelector) { + public CompositeTableDataService(@NotNull String name, @NotNull final ServiceSelector serviceSelector) { super(name); this.serviceSelector = Require.neqNull(serviceSelector, "serviceSelector"); } @@ -75,8 +71,7 @@ public void reset(@NotNull final TableKey key) { protected TableLocationProvider makeTableLocationProvider(@NotNull final TableKey tableKey) { final TableDataService[] services = serviceSelector.call(tableKey); if (services == null || services.length == 0) { - throw new TableDataException( - "No services found for " + tableKey + " in " + serviceSelector); + throw new TableDataException("No services found for " + tableKey + " in " + serviceSelector); } if (services.length == 1) { return services[0].getTableLocationProvider(tableKey); @@ -92,10 +87,10 @@ private class TableLocationProviderImpl implements TableLocationProvider { private final String implementationName; private TableLocationProviderImpl(@NotNull final TableDataService[] inputServices, - @NotNull final TableKey tableKey) { + @NotNull final TableKey tableKey) { this.tableKey = tableKey.makeImmutable(); - inputProviders = Arrays.stream(inputServices) - .map(s -> s.getTableLocationProvider(this.tableKey)).collect(Collectors.toList()); + inputProviders = Arrays.stream(inputServices).map(s -> s.getTableLocationProvider(this.tableKey)) + .collect(Collectors.toList()); implementationName = "Composite-" + inputProviders; } @@ -149,30 +144,26 @@ public TableLocationProvider ensureInitialized() { @Override @NotNull public Collection getTableLocationKeys() { - final Set locationKeys = - new KeyedObjectHashSet<>(KeyKeyDefinition.INSTANCE); - try (final SafeCloseable ignored = - CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { + final Set locationKeys = new KeyedObjectHashSet<>(KeyKeyDefinition.INSTANCE); + try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { inputProviders.stream() - .map(TableLocationProvider::getTableLocationKeys) - .flatMap(Collection::stream) - .filter(x -> !locationKeys.add(x)) - .findFirst() - .ifPresent(duplicateLocationKey -> { - final String overlappingProviders = inputProviders.stream() - .filter(inputProvider -> inputProvider - .hasTableLocationKey(duplicateLocationKey)) - .map(TableLocationProvider::getName) - .collect(Collectors.joining(",")); - throw new TableDataException( - "Data Routing Configuration error: TableDataService elements overlap at location " - + - duplicateLocationKey + - " in providers " + overlappingProviders + - ". Full TableDataService configuration:\n" + - Formatter.formatTableDataService( - CompositeTableDataService.this.toString())); - }); + .map(TableLocationProvider::getTableLocationKeys) + .flatMap(Collection::stream) + .filter(x -> !locationKeys.add(x)) + .findFirst() + .ifPresent(duplicateLocationKey -> { + final String overlappingProviders = inputProviders.stream() + .filter(inputProvider -> inputProvider.hasTableLocationKey(duplicateLocationKey)) + .map(TableLocationProvider::getName) + .collect(Collectors.joining(",")); + throw new TableDataException( + "Data Routing Configuration error: TableDataService elements overlap at location " + + duplicateLocationKey + + " in providers " + overlappingProviders + + ". Full TableDataService configuration:\n" + + Formatter + .formatTableDataService(CompositeTableDataService.this.toString())); + }); return Collections.unmodifiableCollection(locationKeys); } } @@ -180,31 +171,25 @@ public Collection getTableLocationKeys() { @Override public boolean hasTableLocationKey(@NotNull final TableLocationKey tableLocationKey) { return inputProviders.stream() - .anyMatch(inputProvider -> inputProvider.hasTableLocationKey(tableLocationKey)); + .anyMatch(inputProvider -> inputProvider.hasTableLocationKey(tableLocationKey)); } @Override @Nullable - public TableLocation getTableLocationIfPresent( - @NotNull final TableLocationKey tableLocationKey) { + public TableLocation getTableLocationIfPresent(@NotNull final TableLocationKey tableLocationKey) { // hang onto the first location and provider, so we can report well on any duplicates TableLocation location = null; TableLocationProvider provider = null; - try (final SafeCloseable ignored = - CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { + try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { for (final TableLocationProvider tlp : inputProviders) { - final TableLocation candidateLocation = - tlp.getTableLocationIfPresent(tableLocationKey); + final TableLocation candidateLocation = tlp.getTableLocationIfPresent(tableLocationKey); if (candidateLocation != null) { if (location != null) { - throw new TableDataException( - "TableDataService elements " + provider.getName() + - " and " + tlp.getName() + " overlap at location " - + location.toGenericString() + + throw new TableDataException("TableDataService elements " + provider.getName() + + " and " + tlp.getName() + " overlap at location " + location.toGenericString() + ". Full TableDataService configuration:\n" + - Formatter.formatTableDataService( - CompositeTableDataService.this.toString())); + Formatter.formatTableDataService(CompositeTableDataService.this.toString())); } location = candidateLocation; provider = tlp; @@ -223,17 +208,17 @@ public String getImplementationName() { @Override public String toString() { return getImplementationName() + '{' + - (getName() == null ? "" : "name=" + getName() + ", ") + - "serviceSelector=" + serviceSelector + - '}'; + (getName() == null ? "" : "name=" + getName() + ", ") + + "serviceSelector=" + serviceSelector + + '}'; } @Override public String describe() { return getImplementationName() + '{' + - (getName() == null ? "" : "name=" + getName() + ", ") + - "serviceSelector=" + serviceSelector.describe() + - '}'; + (getName() == null ? "" : "name=" + getName() + ", ") + + "serviceSelector=" + serviceSelector.describe() + + '}'; } // ------------------------------------------------------------------------------------------------------------------ @@ -241,16 +226,15 @@ public String describe() { // ------------------------------------------------------------------------------------------------------------------ private static final class KeyKeyDefinition - extends KeyedObjectKey.Basic { + extends KeyedObjectKey.Basic { private static final KeyedObjectKey INSTANCE = - new KeyKeyDefinition(); + new KeyKeyDefinition(); private KeyKeyDefinition() {} @Override - public ImmutableTableLocationKey getKey( - @NotNull final ImmutableTableLocationKey tableLocationKey) { + public ImmutableTableLocationKey getKey(@NotNull final ImmutableTableLocationKey tableLocationKey) { return tableLocationKey; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/CompositeTableDataServiceConsistencyMonitor.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/CompositeTableDataServiceConsistencyMonitor.java index ae09c5f5bb6..9c36d4ce718 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/CompositeTableDataServiceConsistencyMonitor.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/CompositeTableDataServiceConsistencyMonitor.java @@ -13,19 +13,17 @@ public class CompositeTableDataServiceConsistencyMonitor { */ static final FunctionConsistencyMonitor INSTANCE = new FunctionConsistencyMonitor(); - public static class ConsistentSupplier - extends FunctionConsistencyMonitor.ConsistentSupplier { + public static class ConsistentSupplier extends FunctionConsistencyMonitor.ConsistentSupplier { public ConsistentSupplier(Supplier underlyingSupplier) { super(CompositeTableDataServiceConsistencyMonitor.INSTANCE, underlyingSupplier); } } private final static ConsistentSupplier defaultCurrentDateNySupplier = - new ConsistentSupplier<>(DBTimeUtils::currentDateNy); + new ConsistentSupplier<>(DBTimeUtils::currentDateNy); /** - * Get the consistent value of currentDateNy() as defined by - * {@link DBTimeUtils#currentDateNy()}. + * Get the consistent value of currentDateNy() as defined by {@link DBTimeUtils#currentDateNy()}. * * @return the current date in the New York time zone. */ @@ -34,8 +32,8 @@ public static String currentDateNy() { } /** - * The same thing as {@link #currentDateNy()}, but with a different name so you can import it - * more easily and be sure you are getting the right value. + * The same thing as {@link #currentDateNy()}, but with a different name so you can import it more easily and be + * sure you are getting the right value. */ public static String consistentDateNy() { return defaultCurrentDateNySupplier.get(); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/FilteredTableDataService.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/FilteredTableDataService.java index 6ea5b83d046..97f5e78fb1b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/FilteredTableDataService.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/FilteredTableDataService.java @@ -16,13 +16,11 @@ import java.util.stream.Collectors; /** - * {@link TableDataService} implementation with support to filter the provided - * {@link TableLocation}s. + * {@link TableDataService} implementation with support to filter the provided {@link TableLocation}s. */ public class FilteredTableDataService extends AbstractTableDataService { - private static final String IMPLEMENTATION_NAME = - FilteredTableDataService.class.getSimpleName(); + private static final String IMPLEMENTATION_NAME = FilteredTableDataService.class.getSimpleName(); private final TableDataService serviceToFilter; private final LocationKeyFilter locationKeyFilter; @@ -44,7 +42,7 @@ public interface LocationKeyFilter { * @param locationKeyFilter The filter function */ public FilteredTableDataService(@NotNull final TableDataService serviceToFilter, - @NotNull final LocationKeyFilter locationKeyFilter) { + @NotNull final LocationKeyFilter locationKeyFilter) { super("Filtered-" + Require.neqNull(serviceToFilter, "serviceToFilter").getName()); this.serviceToFilter = Require.neqNull(serviceToFilter, "serviceToFilter"); this.locationKeyFilter = Require.neqNull(locationKeyFilter, "locationKeyFilter"); @@ -129,13 +127,12 @@ public TableLocationProvider ensureInitialized() { @Override public @NotNull Collection getTableLocationKeys() { return inputProvider.getTableLocationKeys().stream().filter(locationKeyFilter::accept) - .collect(Collectors.toList()); + .collect(Collectors.toList()); } @Override public boolean hasTableLocationKey(@NotNull final TableLocationKey tableLocationKey) { - return locationKeyFilter.accept(tableLocationKey) - && inputProvider.hasTableLocationKey(tableLocationKey); + return locationKeyFilter.accept(tableLocationKey) && inputProvider.hasTableLocationKey(tableLocationKey); } @Nullable @@ -154,18 +151,17 @@ public String getName() { } private class FilteringListener extends WeakReferenceWrapper - implements TableLocationProvider.Listener { + implements TableLocationProvider.Listener { private FilteringListener(@NotNull final TableLocationProvider.Listener outputListener) { super(outputListener); } @Override - public void handleTableLocationKey( - @NotNull final ImmutableTableLocationKey tableLocationKey) { + public void handleTableLocationKey(@NotNull final ImmutableTableLocationKey tableLocationKey) { final TableLocationProvider.Listener outputListener = getWrapped(); - // We can't try to clean up null listeners here, the underlying implementation may not - // allow concurrent unsubscribe operations. + // We can't try to clean up null listeners here, the underlying implementation may not allow concurrent + // unsubscribe operations. if (outputListener != null && locationKeyFilter.accept(tableLocationKey)) { outputListener.handleTableLocationKey(tableLocationKey); } @@ -194,18 +190,18 @@ public String getImplementationName() { @Override public String toString() { return getImplementationName() + '{' + - (getName() != null ? "name=" + getName() + ", " : "") + - "locationKeyFilter=" + locationKeyFilter + - ", serviceToFilter=" + serviceToFilter + - '}'; + (getName() != null ? "name=" + getName() + ", " : "") + + "locationKeyFilter=" + locationKeyFilter + + ", serviceToFilter=" + serviceToFilter + + '}'; } @Override public String describe() { return getImplementationName() + '{' + - (getName() != null ? "name=" + getName() + ", " : "") + - "locationKeyFilter=" + locationKeyFilter + - ", serviceToFilter=" + serviceToFilter.describe() + - '}'; + (getName() != null ? "name=" + getName() + ", " : "") + + "locationKeyFilter=" + locationKeyFilter + + ", serviceToFilter=" + serviceToFilter.describe() + + '}'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/FunctionConsistencyMonitor.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/FunctionConsistencyMonitor.java index 6a22e48d5b9..782fa7ae4c5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/FunctionConsistencyMonitor.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/FunctionConsistencyMonitor.java @@ -8,8 +8,8 @@ public class FunctionConsistencyMonitor { /** - * If a user function returns null, we store this value; which enables us to return null for - * real and distinguish from a null because the function has not been called yet. + * If a user function returns null, we store this value; which enables us to return null for real and distinguish + * from a null because the function has not been called yet. */ private static final Object NULL_SENTINEL_OBJECT = new Object(); @@ -19,8 +19,7 @@ public class FunctionConsistencyMonitor { private final AtomicInteger functionCount = new AtomicInteger(0); /** - * The currentValues, initialized by the {@link #startConsistentBlock()} when performing a - * series of computations. + * The currentValues, initialized by the {@link #startConsistentBlock()} when performing a series of computations. */ private final ThreadLocal currentValues = new ThreadLocal<>(); @@ -41,9 +40,8 @@ private int registerFunction() { /** * Called before a user initiates a series of consistent functions. * - * The primary use case is the CompositeTableDataService needs to determine which location - * providers are responsive. Each provider must have a consistent value for formulas, such as - * the currentDate. + * The primary use case is the CompositeTableDataService needs to determine which location providers are responsive. + * Each provider must have a consistent value for formulas, such as the currentDate. * * @return true if a consistent block was started (and thus must be closed); false otherwise */ @@ -80,11 +78,10 @@ private void endConsistentBlock() { } /** - * Compute the value for the function at location. If are outside a consistent block, just - * return the function. + * Compute the value for the function at location. If are outside a consistent block, just return the function. * - * The first time we compute a function within a consistent block, call function; otherwise - * return the previously computed value. + * The first time we compute a function within a consistent block, call function; otherwise return the previously + * computed value. * * @param location the location returned from {@link #registerFunction()} * @param function the function to compute @@ -102,11 +99,10 @@ private T computeValue(int location, Supplier function) { if (values.length <= location) { final int currentCount = functionCount.get(); if (location >= currentCount) { - throw new IllegalStateException( - "Location was not registered with this monitor " + location); + throw new IllegalStateException("Location was not registered with this monitor " + location); } - // we registered the function after creating the array for consistent invocation, update - // the size of our slots array + // we registered the function after creating the array for consistent invocation, update the size of our + // slots array values = Arrays.copyOf(values, currentCount); currentValues.set(values); } @@ -148,9 +144,8 @@ public void close() { } /** - * A supplier that uses a FunctionConsistencyMonitor to ensure that multiple invocations to the - * same function always return the same value, even if underlying conditions (like the date) - * change. + * A supplier that uses a FunctionConsistencyMonitor to ensure that multiple invocations to the same function always + * return the same value, even if underlying conditions (like the date) change. * * @param the return type of this supplier */ @@ -159,8 +154,7 @@ public static class ConsistentSupplier implements Supplier { private final Supplier underlyingSupplier; private final int id; - public ConsistentSupplier(FunctionConsistencyMonitor monitor, - Supplier underlyingSupplier) { + public ConsistentSupplier(FunctionConsistencyMonitor monitor, Supplier underlyingSupplier) { this.monitor = monitor; this.underlyingSupplier = underlyingSupplier; this.id = monitor.registerFunction(); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/KnownLocationKeyFinder.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/KnownLocationKeyFinder.java index 086f51ad116..897ab0b5e51 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/KnownLocationKeyFinder.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/KnownLocationKeyFinder.java @@ -10,11 +10,10 @@ import java.util.function.Consumer; /** - * {@link TableLocationKeyFinder Location finder} that delivers a known set of - * {@link ImmutableTableLocationKey keys}. + * {@link TableLocationKeyFinder Location finder} that delivers a known set of {@link ImmutableTableLocationKey keys}. */ public final class KnownLocationKeyFinder - implements TableLocationKeyFinder { + implements TableLocationKeyFinder { private final List knownKeys; @@ -22,11 +21,11 @@ public final class KnownLocationKeyFinder public KnownLocationKeyFinder(@NotNull final TLK... knownKeys) { Require.elementsNeqNull(knownKeys, "knownKeys"); this.knownKeys = knownKeys.length == 0 - ? Collections.emptyList() - : Collections.unmodifiableList( - knownKeys.length == 1 - ? Collections.singletonList(knownKeys[0]) - : Arrays.asList(knownKeys)); + ? Collections.emptyList() + : Collections.unmodifiableList( + knownKeys.length == 1 + ? Collections.singletonList(knownKeys[0]) + : Arrays.asList(knownKeys)); } /** diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/NonexistentTableLocation.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/NonexistentTableLocation.java index 79b9a233f02..c39e6a98490 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/NonexistentTableLocation.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/NonexistentTableLocation.java @@ -8,16 +8,14 @@ import org.jetbrains.annotations.NotNull; /** - * {@link TableLocation} implementation for locations that are found to not actually exist when - * accessed. + * {@link TableLocation} implementation for locations that are found to not actually exist when accessed. */ public final class NonexistentTableLocation extends AbstractTableLocation { - private static final String IMPLEMENTATION_NAME = - NonexistentTableLocation.class.getSimpleName(); + private static final String IMPLEMENTATION_NAME = NonexistentTableLocation.class.getSimpleName(); public NonexistentTableLocation(@NotNull final TableKey tableKey, - @NotNull final TableLocationKey tableLocationKey) { + @NotNull final TableLocationKey tableLocationKey) { super(tableKey, tableLocationKey, false); handleUpdate(Index.CURRENT_FACTORY.getEmptyIndex(), NULL_TIME); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/ParallelDeferredGroupingProvider.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/ParallelDeferredGroupingProvider.java index ba034ca61a3..1efc22f8fae 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/ParallelDeferredGroupingProvider.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/ParallelDeferredGroupingProvider.java @@ -21,18 +21,15 @@ import java.util.stream.Collectors; /** - * Grouping provider that loads column location metadata and assembles grouping indexes lazily on - * demand. + * Grouping provider that loads column location metadata and assembles grouping indexes lazily on demand. */ -public class ParallelDeferredGroupingProvider - implements KeyRangeGroupingProvider { +public class ParallelDeferredGroupingProvider implements KeyRangeGroupingProvider { private static final boolean SORT_RANGES = false; private final ColumnDefinition columnDefinition; - public ParallelDeferredGroupingProvider( - @NotNull final ColumnDefinition columnDefinition) { + public ParallelDeferredGroupingProvider(@NotNull final ColumnDefinition columnDefinition) { this.columnDefinition = columnDefinition; } @@ -81,15 +78,14 @@ private static class Source { private final long lastKey; /** - * We a reference to the resultant grouping information. This allows us to avoid re-reading - * grouping information. + * We a reference to the resultant grouping information. This allows us to avoid re-reading grouping + * information. *

    * The reference is soft to avoid having the cache result in an OOM. */ private SoftReference> cachedResult; - private Source(@NotNull final ColumnLocation columnLocation, final long firstKey, - final long lastKey) { + private Source(@NotNull final ColumnLocation columnLocation, final long firstKey, final long lastKey) { Require.neqNull(columnLocation, "columnLocation"); Require.leq(firstKey, "firstKey", lastKey, "lastKey"); this.columnLocation = columnLocation; @@ -100,19 +96,18 @@ private Source(@NotNull final ColumnLocation columnLocation, final long firstKey /** * Load the grouping metadata for from this source. *

    - * If the grouping information was already read, get it from the cached results map. Any - * grouping information read by this method will be stored in the cached results map. + * If the grouping information was already read, get it from the cached results map. Any grouping information + * read by this method will be stored in the cached results map. * * @param columnDefinition The definition of this column - * @return Grouping metadata as a map from value to position range within this source, or - * null if the grouping information was not present + * @return Grouping metadata as a map from value to position range within this source, or null if the grouping + * information was not present */ - private Map loadMetadata( - @NotNull final ColumnDefinition columnDefinition) { + private Map loadMetadata(@NotNull final ColumnDefinition columnDefinition) { if (!columnLocation.exists()) { // noinspection unchecked return (Map) Collections.singletonMap(null, - new long[] {0, lastKey - firstKey + 1}); + new long[] {0, lastKey - firstKey + 1}); } Map valuesToLocationIndexRange = null; @@ -122,8 +117,8 @@ private Map loadMetadata( if (valuesToLocationIndexRange == null) { // noinspection unchecked - if ((valuesToLocationIndexRange = (Map) columnLocation - .getMetadata(columnDefinition)) != null) { + if ((valuesToLocationIndexRange = + (Map) columnLocation.getMetadata(columnDefinition)) != null) { cachedResult = new SoftReference<>(valuesToLocationIndexRange); } } @@ -134,22 +129,21 @@ private Map loadMetadata( /** * Apply validation and transformation steps for this source's result. * - * @param valuesToLocationIndexRange The metadata as a map from value to position range - * within this source + * @param valuesToLocationIndexRange The metadata as a map from value to position range within this source * @return A list of grouping items to be applied to grouping builder. */ private List> validateAndTransformMetadata( - @NotNull final Map valuesToLocationIndexRange) { + @NotNull final Map valuesToLocationIndexRange) { final long locationSize = lastKey - firstKey + 1; if (valuesToLocationIndexRange.isEmpty()) { - // NB: It's impossible for the location to be legitimately empty, since the - // constructor validates that firstKey <= lastKey. - throw new IllegalStateException("Invalid empty grouping for " + columnLocation - + ": expected " + locationSize + " rows"); + // NB: It's impossible for the location to be legitimately empty, since the constructor validates that + // firstKey <= lastKey. + throw new IllegalStateException( + "Invalid empty grouping for " + columnLocation + ": expected " + locationSize + " rows"); } - final Object indicativeValue = valuesToLocationIndexRange.values().stream().findAny() - .orElseThrow(IllegalStateException::new); + final Object indicativeValue = + valuesToLocationIndexRange.values().stream().findAny().orElseThrow(IllegalStateException::new); final RangeAccessor rangeAccessor; if (indicativeValue.getClass() == int[].class) { // noinspection unchecked @@ -158,59 +152,52 @@ private List> validateAndTransformMetadata( // noinspection unchecked rangeAccessor = (RangeAccessor) LONG_RANGE_ACCESSOR; } else { - throw new UnsupportedOperationException("Unexpected range type " - + indicativeValue.getClass() + " in grouping metadata for " + columnLocation); + throw new UnsupportedOperationException("Unexpected range type " + indicativeValue.getClass() + + " in grouping metadata for " + columnLocation); } - final List> result = - new ArrayList<>(valuesToLocationIndexRange.size()); + final List> result = new ArrayList<>(valuesToLocationIndexRange.size()); long totalRangeSize = 0; if (SORT_RANGES) { final Map reversedMap = new TreeMap<>(rangeAccessor); - for (final Map.Entry entry : valuesToLocationIndexRange - .entrySet()) { + for (final Map.Entry entry : valuesToLocationIndexRange.entrySet()) { reversedMap.put(entry.getValue(), entry.getKey()); } for (final Map.Entry entry : reversedMap.entrySet()) { - final long firstPositionInclusive = - rangeAccessor.getRangeStartInclusive(entry.getKey()); - final long lastPositionInclusive = - rangeAccessor.getRangeEndInclusive(entry.getKey()); - result.add(new GroupingItem<>(entry.getValue(), - firstPositionInclusive + firstKey, lastPositionInclusive + firstKey)); + final long firstPositionInclusive = rangeAccessor.getRangeStartInclusive(entry.getKey()); + final long lastPositionInclusive = rangeAccessor.getRangeEndInclusive(entry.getKey()); + result.add(new GroupingItem<>(entry.getValue(), firstPositionInclusive + firstKey, + lastPositionInclusive + firstKey)); totalRangeSize += lastPositionInclusive - firstPositionInclusive + 1; } } else { - for (final Map.Entry entry : valuesToLocationIndexRange - .entrySet()) { - final long firstPositionInclusive = - rangeAccessor.getRangeStartInclusive(entry.getValue()); - final long lastPositionInclusive = - rangeAccessor.getRangeEndInclusive(entry.getValue()); + for (final Map.Entry entry : valuesToLocationIndexRange.entrySet()) { + final long firstPositionInclusive = rangeAccessor.getRangeStartInclusive(entry.getValue()); + final long lastPositionInclusive = rangeAccessor.getRangeEndInclusive(entry.getValue()); result.add(new GroupingItem<>(entry.getKey(), firstPositionInclusive + firstKey, - lastPositionInclusive + firstKey)); + lastPositionInclusive + firstKey)); totalRangeSize += lastPositionInclusive - firstPositionInclusive + 1; } } if (locationSize != totalRangeSize) { - throw new IllegalStateException("Invalid grouping for " + columnLocation - + ": found " + totalRangeSize + " rows, but expected " + locationSize); + throw new IllegalStateException("Invalid grouping for " + columnLocation + ": found " + totalRangeSize + + " rows, but expected " + locationSize); } return result; } /** - * Get a list of grouping items that represent the grouping information from this source, or - * null if grouping information was not present. + * Get a list of grouping items that represent the grouping information from this source, or null if grouping + * information was not present. *

    - * If the grouping information was already read, get it from the cached results map. Any - * grouping information read by this method will be stored in the cached results map. + * If the grouping information was already read, get it from the cached results map. Any grouping information + * read by this method will be stored in the cached results map. * * @param columnDefinition The definition of this column * @return A list of grouping items on success, else null */ private List> getTransformedMetadata( - @NotNull final ColumnDefinition columnDefinition) { + @NotNull final ColumnDefinition columnDefinition) { final Map metadata = loadMetadata(columnDefinition); return metadata == null ? null : validateAndTransformMetadata(metadata); } @@ -228,10 +215,9 @@ private GroupingItem(final DATA_TYPE value, final long firstKey, final long last this.lastKey = lastKey; } - private void updateBuilder( - @NotNull final Map valueToBuilder) { - valueToBuilder.computeIfAbsent(value, v -> Index.FACTORY.getSequentialBuilder()) - .appendRange(firstKey, lastKey); + private void updateBuilder(@NotNull final Map valueToBuilder) { + valueToBuilder.computeIfAbsent(value, v -> Index.FACTORY.getSequentialBuilder()).appendRange(firstKey, + lastKey); } } @@ -239,53 +225,51 @@ private void updateBuilder( @Override public void addSource(@NotNull final ColumnLocation columnLocation, - @NotNull final ReadOnlyIndex locationIndexInTable) { + @NotNull final ReadOnlyIndex locationIndexInTable) { final long firstKey = locationIndexInTable.firstKey(); final long lastKey = locationIndexInTable.lastKey(); if (lastKey - firstKey + 1 != locationIndexInTable.size()) { /* - * TODO (https://github.com/deephaven/deephaven-core/issues/816): This constraint is - * valid for all existing formats that support grouping. Address when we integrate - * grouping/index tables. + * TODO (https://github.com/deephaven/deephaven-core/issues/816): This constraint is valid for all existing + * formats that support grouping. Address when we integrate grouping/index tables. */ - throw new IllegalArgumentException(ParallelDeferredGroupingProvider.class - + " only supports a single range per location"); + throw new IllegalArgumentException( + ParallelDeferredGroupingProvider.class + " only supports a single range per location"); } sources.add(new Source<>(columnLocation, firstKey, lastKey)); } - private Map buildGrouping( - @NotNull final List> includedSources) { + private Map buildGrouping(@NotNull final List> includedSources) { return QueryPerformanceRecorder.withNugget("Build deferred grouping", () -> { // noinspection unchecked final List>[] perSourceGroupingLists = - QueryPerformanceRecorder.withNugget("Read and transform grouping metadata", - () -> includedSources.parallelStream() - .map(source -> source.getTransformedMetadata(columnDefinition)) - .toArray(List[]::new)); + QueryPerformanceRecorder.withNugget("Read and transform grouping metadata", + () -> includedSources.parallelStream() + .map(source -> source.getTransformedMetadata(columnDefinition)) + .toArray(List[]::new)); final Map valueToBuilder = - QueryPerformanceRecorder.withNugget("Integrate grouping metadata", () -> { - final Map result = new LinkedHashMap<>(); - for (final List> groupingList : perSourceGroupingLists) { - if (groupingList == null) { - return null; + QueryPerformanceRecorder.withNugget("Integrate grouping metadata", () -> { + final Map result = new LinkedHashMap<>(); + for (final List> groupingList : perSourceGroupingLists) { + if (groupingList == null) { + return null; + } + for (final GroupingItem grouping : groupingList) { + grouping.updateBuilder(result); + } } - for (final GroupingItem grouping : groupingList) { - grouping.updateBuilder(result); - } - } - return result; - }); + return result; + }); if (valueToBuilder == null) { return null; } return QueryPerformanceRecorder.withNugget("Build and aggregate group indexes", - () -> valueToBuilder.entrySet().parallelStream() - .map(e -> new Pair<>(e.getKey(), e.getValue().getIndex())) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, Assert::neverInvoked, - LinkedHashMap::new))); + () -> valueToBuilder.entrySet().parallelStream() + .map(e -> new Pair<>(e.getKey(), e.getValue().getIndex())) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, Assert::neverInvoked, + LinkedHashMap::new))); }); } @@ -297,10 +281,9 @@ public Map getGroupToRange() { @Override public Pair, Boolean> getGroupToRange(@NotNull final Index hint) { final List> includedSources = sources.stream() - .filter( - source -> CurrentOnlyIndex.FACTORY.getIndexByRange(source.firstKey, source.lastKey) - .overlaps(hint)) - .collect(Collectors.toList()); + .filter(source -> CurrentOnlyIndex.FACTORY.getIndexByRange(source.firstKey, source.lastKey) + .overlaps(hint)) + .collect(Collectors.toList()); return new Pair<>(buildGrouping(includedSources), includedSources.size() == sources.size()); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/PartitionedTableLocationKey.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/PartitionedTableLocationKey.java index 9ac708fa59c..77853ff8616 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/PartitionedTableLocationKey.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/PartitionedTableLocationKey.java @@ -11,38 +11,35 @@ import java.util.*; /** - * Base {@link ImmutableTableLocationKey} implementation for table locations that may be enclosed by - * partitions. Sub-classes should be sure to invoke the partition-map comparator at higher priority - * than other comparisons when implementing {@link #compareTo(Object)}, and to include the - * partitions in their {@link #equals(Object)} implementations. + * Base {@link ImmutableTableLocationKey} implementation for table locations that may be enclosed by partitions. + * Sub-classes should be sure to invoke the partition-map comparator at higher priority than other comparisons when + * implementing {@link #compareTo(Object)}, and to include the partitions in their {@link #equals(Object)} + * implementations. */ public abstract class PartitionedTableLocationKey implements ImmutableTableLocationKey { @SuppressWarnings("StringOperationCanBeSimplified") - private static final Comparable MISSING_PARTITION_VALUE = - new String("MISSING PARTITION VALUE"); + private static final Comparable MISSING_PARTITION_VALUE = new String("MISSING PARTITION VALUE"); protected final Map> partitions; /** * Construct a new PartitionedTableLocationKey for the supplied {@code partitions}. * - * @param partitions The table partitions enclosing the table location keyed by {@code this}. - * Note that if this parameter is {@code null}, the location will be a member of no - * partitions. An ordered copy of the map will be made, so the calling code is free to - * mutate the map after this call completes, but the partition keys and values themselves - * must be effectively immutable. + * @param partitions The table partitions enclosing the table location keyed by {@code this}. Note that if this + * parameter is {@code null}, the location will be a member of no partitions. An ordered copy of the map will + * be made, so the calling code is free to mutate the map after this call completes, but the partition keys + * and values themselves must be effectively immutable. */ protected PartitionedTableLocationKey(@Nullable final Map> partitions) { this.partitions = partitions == null || partitions.isEmpty() ? Collections.emptyMap() - : Collections.unmodifiableMap(new LinkedHashMap<>(partitions)); + : Collections.unmodifiableMap(new LinkedHashMap<>(partitions)); } @Override public final > PARTITION_VALUE_TYPE getPartitionValue( - @NotNull final String partitionKey) { - final Object partitionValue = - partitions.getOrDefault(partitionKey, MISSING_PARTITION_VALUE); + @NotNull final String partitionKey) { + final Object partitionValue = partitions.getOrDefault(partitionKey, MISSING_PARTITION_VALUE); if (partitionValue == MISSING_PARTITION_VALUE) { throw new UnknownPartitionKeyException(partitionKey, this); } @@ -58,17 +55,14 @@ public final Set getPartitionKeys() { /** * Formats a map of partitions as key-value pairs. */ - protected static final class PartitionsFormatter - implements LogOutput.ObjFormatter>> { + protected static final class PartitionsFormatter implements LogOutput.ObjFormatter>> { - public static final LogOutput.ObjFormatter>> INSTANCE = - new PartitionsFormatter(); + public static final LogOutput.ObjFormatter>> INSTANCE = new PartitionsFormatter(); private PartitionsFormatter() {} @Override - public void format(@NotNull final LogOutput logOutput, - @NotNull final Map> partitions) { + public void format(@NotNull final LogOutput logOutput, @NotNull final Map> partitions) { if (partitions.isEmpty()) { logOutput.append("{}"); return; @@ -76,8 +70,8 @@ public void format(@NotNull final LogOutput logOutput, logOutput.append('{'); - for (final Iterator>> pi = - partitions.entrySet().iterator(); pi.hasNext();) { + for (final Iterator>> pi = partitions.entrySet().iterator(); pi + .hasNext();) { final Map.Entry> partition = pi.next(); final String partitionKey = partition.getKey(); final Comparable partitionValue = partition.getValue(); @@ -88,7 +82,7 @@ public void format(@NotNull final LogOutput logOutput, } else if (partitionValue instanceof CharSequence) { logOutput.append((CharSequence) partitionValue); } else if (partitionValue instanceof Long || partitionValue instanceof Integer - || partitionValue instanceof Short || partitionValue instanceof Byte) { + || partitionValue instanceof Short || partitionValue instanceof Byte) { logOutput.append(((Number) partitionValue).longValue()); } else if (partitionValue instanceof Double || partitionValue instanceof Float) { logOutput.appendDouble(((Number) partitionValue).doubleValue()); @@ -107,33 +101,30 @@ public void format(@NotNull final LogOutput logOutput, } /** - * Compares two maps of partitions by key-value pairs. {@code p1}'s entry order determines the - * priority of each partition in the comparison, and it's assumed that {@code p2} will have the - * same entry order. {@link #compare(Map, Map)} will throw an - * {@link UnknownPartitionKeyException} if one of the maps is missing keys found in the other. + * Compares two maps of partitions by key-value pairs. {@code p1}'s entry order determines the priority of each + * partition in the comparison, and it's assumed that {@code p2} will have the same entry order. + * {@link #compare(Map, Map)} will throw an {@link UnknownPartitionKeyException} if one of the maps is missing keys + * found in the other. */ - protected static final class PartitionsComparator - implements Comparator>> { + protected static final class PartitionsComparator implements Comparator>> { - public static final Comparator>> INSTANCE = - new PartitionsComparator(); + public static final Comparator>> INSTANCE = new PartitionsComparator(); private PartitionsComparator() {} private static void checkSizeMismatch(@NotNull final Map> p1, - final int p1Size, - @NotNull final Map> p2, - final int p2Size) { + final int p1Size, + @NotNull final Map> p2, + final int p2Size) { if (p1Size > p2Size) { // noinspection OptionalGetWithoutIsPresent throw new UnknownPartitionKeyException( - p1.keySet().stream().filter(pk -> !p2.containsKey(pk)).findFirst().get()); + p1.keySet().stream().filter(pk -> !p2.containsKey(pk)).findFirst().get()); } } @Override - public int compare(final Map> p1, - final Map> p2) { + public int compare(final Map> p1, final Map> p2) { final int p1Size = Objects.requireNonNull(p1).size(); final int p2Size = Objects.requireNonNull(p2).size(); checkSizeMismatch(p1, p1Size, p2, p2Size); @@ -142,8 +133,7 @@ public int compare(final Map> p1, for (final Map.Entry> p1Entry : p1.entrySet()) { final String partitionKey = p1Entry.getKey(); final Comparable p1Value = p1Entry.getValue(); - final Comparable p2Value = - p2.getOrDefault(partitionKey, MISSING_PARTITION_VALUE); + final Comparable p2Value = p2.getOrDefault(partitionKey, MISSING_PARTITION_VALUE); if (p2Value == MISSING_PARTITION_VALUE) { throw new UnknownPartitionKeyException(partitionKey); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/PollingTableLocationProvider.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/PollingTableLocationProvider.java index 91b9e697a54..3b6f4428f89 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/PollingTableLocationProvider.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/PollingTableLocationProvider.java @@ -9,15 +9,14 @@ import org.jetbrains.annotations.Nullable; /** - * Polling-driven {@link TableLocationProvider} implementation that delegates - * {@link TableLocationKey location key} discovery to a {@link TableLocationKeyFinder} and - * {@link TableLocation location} creation to a {@link TableLocationFactory}. + * Polling-driven {@link TableLocationProvider} implementation that delegates {@link TableLocationKey location key} + * discovery to a {@link TableLocationKeyFinder} and {@link TableLocation location} creation to a + * {@link TableLocationFactory}. */ public class PollingTableLocationProvider - extends AbstractTableLocationProvider { + extends AbstractTableLocationProvider { - private static final String IMPLEMENTATION_NAME = - PollingTableLocationProvider.class.getSimpleName(); + private static final String IMPLEMENTATION_NAME = PollingTableLocationProvider.class.getSimpleName(); private final TableLocationKeyFinder locationKeyFinder; private final TableLocationFactory locationFactory; @@ -26,9 +25,9 @@ public class PollingTableLocationProvider locationKeyFinder, - @NotNull final TableLocationFactory locationFactory, - @Nullable final TableDataRefreshService refreshService) { + @NotNull final TableLocationKeyFinder locationKeyFinder, + @NotNull final TableLocationFactory locationFactory, + @Nullable final TableDataRefreshService refreshService) { super(tableKey, refreshService != null); this.locationKeyFinder = locationKeyFinder; this.locationFactory = locationFactory; diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/RandomGroupingBuilder.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/RandomGroupingBuilder.java index 148ea2fe756..4d629a07040 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/RandomGroupingBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/RandomGroupingBuilder.java @@ -12,8 +12,8 @@ import java.util.Map; /** - * Builder for constructing groupings from one or more {@code } pairs, with no - * requirement that key ranges be sequential. + * Builder for constructing groupings from one or more {@code } pairs, with no requirement that key + * ranges be sequential. */ public class RandomGroupingBuilder { @@ -29,19 +29,17 @@ public class RandomGroupingBuilder { * @param lastKey The last key in the range */ public void addGrouping(@Nullable DATA_TYPE value, long firstKey, long lastKey) { - // if we've already created the groupToIndex, then our groupToIndexBuilder is going to be in - // a bad state + // if we've already created the groupToIndex, then our groupToIndexBuilder is going to be in a bad state Require.eqNull(groupToIndex, "groupToIndex"); Require.neqNull(groupToIndexBuilder, "groupToIndexBuilder"); final Index.RandomBuilder indexBuilder = - groupToIndexBuilder.computeIfAbsent(value, (k) -> Index.FACTORY.getRandomBuilder()); + groupToIndexBuilder.computeIfAbsent(value, (k) -> Index.FACTORY.getRandomBuilder()); indexBuilder.addRange(firstKey, lastKey); } /** - * Get the groupings under construction in a form usable by AbstractColumnSource - * implementations. + * Get the groupings under construction in a form usable by AbstractColumnSource implementations. * * @return A mapping from grouping value to its matching Index */ diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/RecordingLocationKeyFinder.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/RecordingLocationKeyFinder.java index 526c0b82dad..cd314829f20 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/RecordingLocationKeyFinder.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/RecordingLocationKeyFinder.java @@ -8,11 +8,11 @@ import java.util.function.Consumer; /** - * {@link TableLocationKeyFinder Location finder} that will record and expose the output of another - * for subsequent delivery to an observer. + * {@link TableLocationKeyFinder Location finder} that will record and expose the output of another for subsequent + * delivery to an observer. */ public final class RecordingLocationKeyFinder - implements TableLocationKeyFinder, Consumer { + implements TableLocationKeyFinder, Consumer { private final List recordedKeys = new ArrayList<>(); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/SingleTableLocationProvider.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/SingleTableLocationProvider.java index 0b2d59872d6..c95c6e58075 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/SingleTableLocationProvider.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/SingleTableLocationProvider.java @@ -8,13 +8,11 @@ import java.util.Collections; /** - * A {@link TableLocationProvider} that provides access to exactly one, previously-known - * {@link TableLocation}. + * A {@link TableLocationProvider} that provides access to exactly one, previously-known {@link TableLocation}. */ public final class SingleTableLocationProvider implements TableLocationProvider { - private static final String IMPLEMENTATION_NAME = - SingleTableLocationProvider.class.getSimpleName(); + private static final String IMPLEMENTATION_NAME = SingleTableLocationProvider.class.getSimpleName(); private final TableLocation tableLocation; @@ -71,8 +69,7 @@ public boolean hasTableLocationKey(@NotNull final TableLocationKey tableLocation @Nullable @Override - public TableLocation getTableLocationIfPresent( - @NotNull final TableLocationKey tableLocationKey) { + public TableLocation getTableLocationIfPresent(@NotNull final TableLocationKey tableLocationKey) { return hasTableLocationKey(tableLocationKey) ? tableLocation : null; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/StandaloneTableKey.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/StandaloneTableKey.java index 5046afe0461..77d93182cc8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/StandaloneTableKey.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/StandaloneTableKey.java @@ -9,8 +9,7 @@ import javax.annotation.concurrent.Immutable; /** - * {@link TableKey} implementation for standalone tables that are created without a - * {@link TableDataService}. + * {@link TableKey} implementation for standalone tables that are created without a {@link TableDataService}. */ @Immutable public final class StandaloneTableKey implements ImmutableTableKey { diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/StandaloneTableLocationKey.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/StandaloneTableLocationKey.java index b3c8c1ef2f9..ad01e95af1c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/StandaloneTableLocationKey.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/StandaloneTableLocationKey.java @@ -60,7 +60,7 @@ public boolean equals(@Nullable Object other) { @Override public > PARTITION_VALUE_TYPE getPartitionValue( - @NotNull final String partitionKey) { + @NotNull final String partitionKey) { throw new UnknownPartitionKeyException(partitionKey, this); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/SubscriptionAggregator.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/SubscriptionAggregator.java index 5eddcf700c7..32c3017159d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/SubscriptionAggregator.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/SubscriptionAggregator.java @@ -9,8 +9,7 @@ /** * Implement optional subscription support suitable for multiple TableDataService components. * - * @param A bound on the type of listener supported by this aggregator's - * subscriptions + * @param A bound on the type of listener supported by this aggregator's subscriptions */ public abstract class SubscriptionAggregator { @@ -95,9 +94,9 @@ public final void unsubscribe(@NotNull final LISTENER_TYPE listener) { } /** - * Check if this subscription aggregator still has any valid subscribers - useful if there may - * have been no notifications delivered for some time, as a test to determine whether work - * should be done to maintain the underlying subscription. + * Check if this subscription aggregator still has any valid subscribers - useful if there may have been no + * notifications delivered for some time, as a test to determine whether work should be done to maintain the + * underlying subscription. * * @return true if there are valid subscribers, else false */ @@ -116,17 +115,16 @@ public boolean checkHasSubscribers() { *

    * Refresh and activate update pushing from the implementing class. *

    - * If the implementation will deliver notifications in a different thread than the one that - * calls this method, then this method must be asynchronous - that is, it must not block pending - * delivery of results. This requirement holds even if that other thread has nothing to do - * with the initial activation request! + * If the implementation will deliver notifications in a different thread than the one that calls this method, then + * this method must be asynchronous - that is, it must not block pending delivery of results. This requirement + * holds even if that other thread has nothing to do with the initial activation request! * *

    - * Listeners should guard against duplicate notifications, especially if the implementation - * delivers synchronous notifications. + * Listeners should guard against duplicate notifications, especially if the implementation delivers synchronous + * notifications. *

    - * The implementation should call activationSuccessful() when done activating and delivering - * initial refresh results, unless activationFailed() was called instead. + * The implementation should call activationSuccessful() when done activating and delivering initial refresh + * results, unless activationFailed() was called instead. *

    * Must be called under the subscription lock. */ @@ -135,16 +133,16 @@ protected void activateUnderlyingDataSource() { } /** - * Notify the implementation that activation has completed. This may be invoked upon - * "re-activation" of an existing subscription, in which case it is effectively a no-op. This is - * public because it is called externally by services implementing subscriptions. + * Notify the implementation that activation has completed. This may be invoked upon "re-activation" of an existing + * subscription, in which case it is effectively a no-op. This is public because it is called externally by services + * implementing subscriptions. * * @param token A subscription-related object that the subclass can use to match a notification */ public final void activationSuccessful(@Nullable final T token) { if (!supportsSubscriptions()) { - throw new IllegalStateException(this - + ": completed activations are unexpected when subscriptions aren't supported"); + throw new IllegalStateException( + this + ": completed activations are unexpected when subscriptions aren't supported"); } synchronized (subscriptions) { if (!matchSubscriptionToken(token)) { @@ -158,31 +156,27 @@ public final void activationSuccessful(@Nullable final T token) { } /** - * Deliver an exception triggered while activating or maintaining the underlying data source. - * The underlying data source is implicitly deactivated. This is public because it is called - * externally by services implementing subscriptions. + * Deliver an exception triggered while activating or maintaining the underlying data source. The underlying data + * source is implicitly deactivated. This is public because it is called externally by services implementing + * subscriptions. * * @param token A subscription-related object that the subclass can use to match a notification * @param exception The exception */ - public final void activationFailed(@Nullable final T token, - @NotNull final TableDataException exception) { + public final void activationFailed(@Nullable final T token, @NotNull final TableDataException exception) { if (!supportsSubscriptions()) { - throw new IllegalStateException(this - + ": asynchronous exceptions are unexpected when subscriptions aren't supported", - exception); + throw new IllegalStateException( + this + ": asynchronous exceptions are unexpected when subscriptions aren't supported", exception); } synchronized (subscriptions) { if (!matchSubscriptionToken(token)) { return; } if (activationState == ActivationState.PENDING) { - onActivationDone(ActivationState.FAILED); // NB: This can be done before or after - // the notification delivery, since we're - // holding the lock. + onActivationDone(ActivationState.FAILED); // NB: This can be done before or after the notification + // delivery, since we're holding the lock. } - subscriptions.deliverNotification(BasicTableDataListener::handleException, exception, - false); + subscriptions.deliverNotification(BasicTableDataListener::handleException, exception, false); if (!subscriptions.isEmpty()) { subscriptions.clear(); } @@ -190,16 +184,15 @@ public final void activationFailed(@Nullable final T token, } /** - * Deactivate pushed updates from the implementing class. Must be called under the subscription - * lock. + * Deactivate pushed updates from the implementing class. Must be called under the subscription lock. */ protected void deactivateUnderlyingDataSource() { throw new UnsupportedOperationException(); } /** - * Verify that a notification pertains to a currently-active subscription. Must be called under - * the subscription lock. + * Verify that a notification pertains to a currently-active subscription. Must be called under the subscription + * lock. * * @param token A subscription-related object that the subclass can use to match a notification * @return True iff notification delivery should proceed diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/TableLocationFactory.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/TableLocationFactory.java index b321b723d17..0cd442c4289 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/TableLocationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/TableLocationFactory.java @@ -17,13 +17,13 @@ public interface TableLocationFactory EMPTY_TABLE_LOCATION_KEYS = - Collections.emptySet(); + private static final Set EMPTY_TABLE_LOCATION_KEYS = Collections.emptySet(); private final TableLocationProvider tableLocationProvider; @@ -29,18 +27,15 @@ public class TableLocationSubscriptionBuffer implements TableLocationProvider.Li private Set pendingLocationKeys = EMPTY_TABLE_LOCATION_KEYS; private TableDataException pendingException = null; - public TableLocationSubscriptionBuffer( - @NotNull final TableLocationProvider tableLocationProvider) { - this.tableLocationProvider = - Require.neqNull(tableLocationProvider, "tableLocationProvider"); + public TableLocationSubscriptionBuffer(@NotNull final TableLocationProvider tableLocationProvider) { + this.tableLocationProvider = Require.neqNull(tableLocationProvider, "tableLocationProvider"); } /** - * Subscribe if needed, and return any pending location keys (or throw a pending exception) from - * the table location provider. A given location key will only be returned by a single call to - * processPending() (unless state is reset). No order is maintained internally. If a pending - * exception is thrown, this signals that the subscription is no longer valid and no subsequent - * location keys will be returned. + * Subscribe if needed, and return any pending location keys (or throw a pending exception) from the table location + * provider. A given location key will only be returned by a single call to processPending() (unless state is + * reset). No order is maintained internally. If a pending exception is thrown, this signals that the subscription + * is no longer valid and no subsequent location keys will be returned. * * @return The collection of pending location keys */ @@ -50,8 +45,8 @@ public synchronized Collection processPending() { if (tableLocationProvider.supportsSubscriptions()) { tableLocationProvider.subscribe(this); } else { - // NB: Providers that don't support subscriptions don't tick - this single call to - // refresh is sufficient. + // NB: Providers that don't support subscriptions don't tick - this single call to refresh is + // sufficient. tableLocationProvider.refresh(); tableLocationProvider.getTableLocationKeys().forEach(this::handleTableLocationKey); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/impl/TableLocationUpdateSubscriptionBuffer.java b/DB/src/main/java/io/deephaven/db/v2/locations/impl/TableLocationUpdateSubscriptionBuffer.java index 27c903eab90..cf39c2f5285 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/impl/TableLocationUpdateSubscriptionBuffer.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/impl/TableLocationUpdateSubscriptionBuffer.java @@ -11,8 +11,7 @@ import org.jetbrains.annotations.NotNull; /** - * Intermediates between push-based subscription to a TableLocation and polling on LiveTable - * refresh. + * Intermediates between push-based subscription to a TableLocation and polling on LiveTable refresh. */ public class TableLocationUpdateSubscriptionBuffer implements TableLocation.Listener { @@ -30,9 +29,9 @@ public TableLocationUpdateSubscriptionBuffer(@NotNull final TableLocation tableL } /** - * Subscribe if needed, and return whether there was a pending update to the table location, or - * throw a pending exception. If a pending exception is thrown, this signals that the - * subscription is no longer valid and no subsequent pending updates will be returned. + * Subscribe if needed, and return whether there was a pending update to the table location, or throw a pending + * exception. If a pending exception is thrown, this signals that the subscription is no longer valid and no + * subsequent pending updates will be returned. * * @return Whether there was a pending update */ @@ -41,8 +40,8 @@ public synchronized boolean processPending() { if (tableLocation.supportsSubscriptions()) { tableLocation.subscribe(this); } else { - // NB: Locations that don't support subscriptions don't tick - this single call to - // refresh is sufficient. + // NB: Locations that don't support subscriptions don't tick - this single call to refresh is + // sufficient. tableLocation.refresh(); handleUpdate(); } @@ -88,12 +87,10 @@ public void handleUpdate() { synchronized (updateLock) { if (observedNonNullSize) { if (tableLocation.getSize() == TableLocationState.NULL_SIZE) { - pendingException = new TableDataException("Location " + tableLocation - + " is no longer available, data has been removed or replaced"); - // No need to bother unsubscribing - the consumer will either leak (and allow - // asynchronous cleanup) - // or unsubscribe all of its locations as a result of handling this exception - // when it polls. + pendingException = new TableDataException( + "Location " + tableLocation + " is no longer available, data has been removed or replaced"); + // No need to bother unsubscribing - the consumer will either leak (and allow asynchronous cleanup) + // or unsubscribe all of its locations as a result of handling this exception when it polls. } } else if (tableLocation.getSize() != TableLocationState.NULL_SIZE) { observedNonNullSize = true; diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/local/DeephavenNestedPartitionLayout.java b/DB/src/main/java/io/deephaven/db/v2/locations/local/DeephavenNestedPartitionLayout.java index 846291ecdb0..056ec5df91c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/local/DeephavenNestedPartitionLayout.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/local/DeephavenNestedPartitionLayout.java @@ -20,34 +20,34 @@ import java.util.function.Predicate; /** - * {@link TableLocationKeyFinder} that will traverse a directory hierarchy laid out in Deephaven's - * "nested-partitioned" format, e.g. + * {@link TableLocationKeyFinder} that will traverse a directory hierarchy laid out in Deephaven's "nested-partitioned" + * format, e.g. * *

      * tableRootDirectory/internalPartitionValue/columnPartitionValue/tableName/...
      * 
    * - * , producing {@link FileTableLocationKey}'s with two partitions, for keys - * {@value INTERNAL_PARTITION_KEY} and the specified {@code columnPartitionKey}. + * , producing {@link FileTableLocationKey}'s with two partitions, for keys {@value INTERNAL_PARTITION_KEY} and the + * specified {@code columnPartitionKey}. */ public abstract class DeephavenNestedPartitionLayout - implements TableLocationKeyFinder { + implements TableLocationKeyFinder { @VisibleForTesting public static final String PARQUET_FILE_NAME = "table.parquet"; public static DeephavenNestedPartitionLayout forParquet( - @NotNull final File tableRootDirectory, - @NotNull final String tableName, - @NotNull final String columnPartitionKey, - @Nullable final Predicate internalPartitionValueFilter) { - return new DeephavenNestedPartitionLayout(tableRootDirectory, - tableName, columnPartitionKey, internalPartitionValueFilter) { + @NotNull final File tableRootDirectory, + @NotNull final String tableName, + @NotNull final String columnPartitionKey, + @Nullable final Predicate internalPartitionValueFilter) { + return new DeephavenNestedPartitionLayout(tableRootDirectory, tableName, + columnPartitionKey, internalPartitionValueFilter) { @Override protected ParquetTableLocationKey makeKey(@NotNull Path tableLeafDirectory, - @NotNull Map> partitions) { - return new ParquetTableLocationKey( - tableLeafDirectory.resolve(PARQUET_FILE_NAME).toFile(), 0, partitions); + @NotNull Map> partitions) { + return new ParquetTableLocationKey(tableLeafDirectory.resolve(PARQUET_FILE_NAME).toFile(), 0, + partitions); } }; } @@ -63,13 +63,13 @@ protected ParquetTableLocationKey makeKey(@NotNull Path tableLeafDirectory, * @param tableRootDirectory The directory to traverse from * @param tableName The table name * @param columnPartitionKey The partitioning column name - * @param internalPartitionValueFilter Filter to control which internal partitions are included, - * {@code null} for all + * @param internalPartitionValueFilter Filter to control which internal partitions are included, {@code null} for + * all */ protected DeephavenNestedPartitionLayout(@NotNull final File tableRootDirectory, - @NotNull final String tableName, - @NotNull final String columnPartitionKey, - @Nullable final Predicate internalPartitionValueFilter) { + @NotNull final String tableName, + @NotNull final String columnPartitionKey, + @Nullable final Predicate internalPartitionValueFilter) { this.tableRootDirectory = tableRootDirectory; this.tableName = tableName; this.columnPartitionKey = columnPartitionKey; @@ -77,8 +77,7 @@ protected DeephavenNestedPartitionLayout(@NotNull final File tableRootDirectory, } public String toString() { - return DeephavenNestedPartitionLayout.class.getSimpleName() + '[' + tableRootDirectory + ',' - + tableName + ']'; + return DeephavenNestedPartitionLayout.class.getSimpleName() + '[' + tableRootDirectory + ',' + tableName + ']'; } @Override @@ -86,28 +85,25 @@ public final void findKeys(@NotNull final Consumer locationKeyObserver) { final Map> partitions = new LinkedHashMap<>(); PrivilegedFileAccessUtil.doFilesystemAction(() -> { try (final DirectoryStream internalPartitionStream = - Files.newDirectoryStream(tableRootDirectory.toPath(), Files::isDirectory)) { + Files.newDirectoryStream(tableRootDirectory.toPath(), Files::isDirectory)) { for (final Path internalPartition : internalPartitionStream) { - final String internalPartitionValue = - internalPartition.getFileName().toString(); + final String internalPartitionValue = internalPartition.getFileName().toString(); if (internalPartitionValueFilter != null - && !internalPartitionValueFilter.test(internalPartitionValue)) { + && !internalPartitionValueFilter.test(internalPartitionValue)) { continue; } boolean needToUpdateInternalPartitionValue = true; try (final DirectoryStream columnPartitionStream = - Files.newDirectoryStream(internalPartition, Files::isDirectory)) { + Files.newDirectoryStream(internalPartition, Files::isDirectory)) { for (final Path columnPartition : columnPartitionStream) { - partitions.put(columnPartitionKey, - columnPartition.getFileName().toString()); + partitions.put(columnPartitionKey, columnPartition.getFileName().toString()); if (needToUpdateInternalPartitionValue) { - // Partition order dictates comparison priority, so we need to - // insert the internal partition after the column partition. + // Partition order dictates comparison priority, so we need to insert the internal + // partition after the column partition. partitions.put(INTERNAL_PARTITION_KEY, internalPartitionValue); needToUpdateInternalPartitionValue = false; } - locationKeyObserver - .accept(makeKey(columnPartition.resolve(tableName), partitions)); + locationKeyObserver.accept(makeKey(columnPartition.resolve(tableName), partitions)); } } } @@ -115,11 +111,11 @@ public final void findKeys(@NotNull final Consumer locationKeyObserver) { // If we found nothing at all, then there's nothing to be done at this level. } catch (final IOException e) { throw new TableDataException( - "Error finding locations for " + tableName + " under " + tableRootDirectory, e); + "Error finding locations for " + tableName + " under " + tableRootDirectory, e); } }); } protected abstract TLK makeKey(@NotNull final Path tableLeafDirectory, - @NotNull final Map> partitions); + @NotNull final Map> partitions); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/local/FileTableLocationKey.java b/DB/src/main/java/io/deephaven/db/v2/locations/local/FileTableLocationKey.java index 666ef209f7d..900f06f3a74 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/local/FileTableLocationKey.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/local/FileTableLocationKey.java @@ -12,10 +12,10 @@ import java.util.Map; /** - * Base {@link ImmutableTableLocationKey} implementation for table locations that may be enclosed by - * partitions and described by a {@link File}. Sub-classes should override - * {@link #compareTo(TableLocationKey)} and {@link #equals(Object)} only if they need to prevent - * equality with other {@link FileTableLocationKey} implementations. + * Base {@link ImmutableTableLocationKey} implementation for table locations that may be enclosed by partitions and + * described by a {@link File}. Sub-classes should override {@link #compareTo(TableLocationKey)} and + * {@link #equals(Object)} only if they need to prevent equality with other {@link FileTableLocationKey} + * implementations. */ public class FileTableLocationKey extends PartitionedTableLocationKey { @@ -29,20 +29,17 @@ public class FileTableLocationKey extends PartitionedTableLocationKey { /** * Construct a new FileTableLocationKey for the supplied {@code file} and {@code partitions}. * - * @param file The file (or directory) that backs the keyed location. Will be adjusted to an - * absolute path. - * @param order Explicit ordering value for this location key. - * {@link Comparable#compareTo(Object)} will sort FileTableLocationKeys with a lower - * {@code order} before other keys. Comparing this ordering value takes precedence over - * other fields. - * @param partitions The table partitions enclosing the table location keyed by {@code this}. - * Note that if this parameter is {@code null}, the location will be a member of no - * partitions. An ordered copy of the map will be made, so the calling code is free to - * mutate the map after this call completes, but the partition keys and values themselves - * must be effectively immutable. + * @param file The file (or directory) that backs the keyed location. Will be adjusted to an absolute path. + * @param order Explicit ordering value for this location key. {@link Comparable#compareTo(Object)} will sort + * FileTableLocationKeys with a lower {@code order} before other keys. Comparing this ordering value takes + * precedence over other fields. + * @param partitions The table partitions enclosing the table location keyed by {@code this}. Note that if this + * parameter is {@code null}, the location will be a member of no partitions. An ordered copy of the map will + * be made, so the calling code is free to mutate the map after this call completes, but the partition keys + * and values themselves must be effectively immutable. */ public FileTableLocationKey(@NotNull final File file, final int order, - @Nullable final Map> partitions) { + @Nullable final Map> partitions) { super(partitions); this.file = file.getAbsoluteFile(); this.order = order; @@ -55,9 +52,9 @@ public final File getFile() { @Override public LogOutput append(@NotNull final LogOutput logOutput) { return logOutput.append(getImplementationName()) - .append(":[file=").append(file.getPath()) - .append(",partitions=").append(PartitionsFormatter.INSTANCE, partitions) - .append(']'); + .append(":[file=").append(file.getPath()) + .append(",partitions=").append(PartitionsFormatter.INSTANCE, partitions) + .append(']'); } @Override @@ -66,8 +63,8 @@ public String toString() { } /** - * Precedence-wise this implementation compares {@code order}, then applies a - * {@link PartitionsComparator} to {@code partitions}, then compares {@code file}. + * Precedence-wise this implementation compares {@code order}, then applies a {@link PartitionsComparator} to + * {@code partitions}, then compares {@code file}. * * @inheritDoc */ @@ -80,7 +77,7 @@ public int compareTo(@NotNull final TableLocationKey other) { return orderingComparisonResult; } final int partitionComparisonResult = - PartitionsComparator.INSTANCE.compare(partitions, otherTyped.partitions); + PartitionsComparator.INSTANCE.compare(partitions, otherTyped.partitions); if (partitionComparisonResult != 0) { return partitionComparisonResult; } @@ -93,8 +90,7 @@ public int compareTo(@NotNull final TableLocationKey other) { public int hashCode() { if (cachedHashCode == 0) { final int computedHashCode = 31 * partitions.hashCode() + file.hashCode(); - // Don't use 0; that's used by StandaloneTableLocationKey, and also our sentinel for the - // need to compute + // Don't use 0; that's used by StandaloneTableLocationKey, and also our sentinel for the need to compute if (computedHashCode == 0) { final int fallbackHashCode = FileTableLocationKey.class.hashCode(); cachedHashCode = fallbackHashCode == 0 ? 1 : fallbackHashCode; diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/local/FlatParquetLayout.java b/DB/src/main/java/io/deephaven/db/v2/locations/local/FlatParquetLayout.java index 8dbe26a4481..1de15168f68 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/local/FlatParquetLayout.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/local/FlatParquetLayout.java @@ -15,8 +15,7 @@ import static io.deephaven.db.v2.parquet.ParquetTableWriter.PARQUET_FILE_EXTENSION; /** - * Parquet {@link TableLocationKeyFinder location finder} that will discover multiple files in a - * single directory. + * Parquet {@link TableLocationKeyFinder location finder} that will discover multiple files in a single directory. */ public final class FlatParquetLayout implements TableLocationKeyFinder { @@ -36,15 +35,13 @@ public String toString() { @Override public void findKeys(@NotNull final Consumer locationKeyObserver) { PrivilegedFileAccessUtil.doFilesystemAction(() -> { - try (final DirectoryStream parquetFileStream = Files - .newDirectoryStream(tableRootDirectory.toPath(), "*" + PARQUET_FILE_EXTENSION)) { + try (final DirectoryStream parquetFileStream = + Files.newDirectoryStream(tableRootDirectory.toPath(), "*" + PARQUET_FILE_EXTENSION)) { for (final Path parquetFilePath : parquetFileStream) { - locationKeyObserver - .accept(new ParquetTableLocationKey(parquetFilePath.toFile(), 0, null)); + locationKeyObserver.accept(new ParquetTableLocationKey(parquetFilePath.toFile(), 0, null)); } } catch (final IOException e) { - throw new TableDataException( - "Error finding parquet locations under " + tableRootDirectory, e); + throw new TableDataException("Error finding parquet locations under " + tableRootDirectory, e); } }); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/local/KeyValuePartitionLayout.java b/DB/src/main/java/io/deephaven/db/v2/locations/local/KeyValuePartitionLayout.java index ae0613193a6..55735114d81 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/local/KeyValuePartitionLayout.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/local/KeyValuePartitionLayout.java @@ -26,8 +26,8 @@ import java.util.stream.Collectors; /** - * {@link TableLocationKeyFinder Location finder} that will traverse a directory hierarchy and infer - * partitions from key-value pairs in the directory names, for example: + * {@link TableLocationKeyFinder Location finder} that will traverse a directory hierarchy and infer partitions from + * key-value pairs in the directory names, for example: * *
      * tableRootDirectory/Country=France/City=Paris/parisData.parquet
    @@ -35,23 +35,19 @@
      * 
      * Traversal is depth-first, and assumes that target files will only be found at a single depth.
      *
    - * @implNote Type inference uses {@link TableTools#readCsv(java.io.InputStream)} as a conversion
    - *           tool, and hence follows the same rules.
    - * @implNote Column names will be legalized via
    - *           {@link NameValidator#legalizeColumnName(String, Set)}.
    + * @implNote Type inference uses {@link TableTools#readCsv(java.io.InputStream)} as a conversion tool, and hence follows
    + *           the same rules.
    + * @implNote Column names will be legalized via {@link NameValidator#legalizeColumnName(String, Set)}.
      */
    -public final class KeyValuePartitionLayout
    -    implements TableLocationKeyFinder {
    +public final class KeyValuePartitionLayout implements TableLocationKeyFinder {
     
    -    public static TableLocationKeyFinder forParquet(
    -        @NotNull final File tableRootDirectory,
    -        final int maxPartitioningLevels) {
    +    public static TableLocationKeyFinder forParquet(@NotNull final File tableRootDirectory,
    +            final int maxPartitioningLevels) {
             return new KeyValuePartitionLayout<>(
    -            tableRootDirectory,
    -            path -> path.getFileName().toString()
    -                .endsWith(ParquetTableWriter.PARQUET_FILE_EXTENSION),
    -            (path, partitions) -> new ParquetTableLocationKey(path.toFile(), 0, partitions),
    -            maxPartitioningLevels);
    +                tableRootDirectory,
    +                path -> path.getFileName().toString().endsWith(ParquetTableWriter.PARQUET_FILE_EXTENSION),
    +                (path, partitions) -> new ParquetTableLocationKey(path.toFile(), 0, partitions),
    +                maxPartitioningLevels);
         }
     
         private final File tableRootDirectory;
    @@ -63,19 +59,17 @@ public static TableLocationKeyFinder forParquet(
          * @param tableRootDirectory The directory to traverse from
          * @param pathFilter Filter to determine whether a regular file should be used to create a key
          * @param keyFactory Key factory function
    -     * @param maxPartitioningLevels Maximum partitioning levels to traverse. Must be {@code >= 0}.
    -     *        {@code 0} means only look at files in {@code tableRootDirectory} and find no
    -     *        partitions.
    +     * @param maxPartitioningLevels Maximum partitioning levels to traverse. Must be {@code >= 0}. {@code 0} means only
    +     *        look at files in {@code tableRootDirectory} and find no partitions.
          */
         public KeyValuePartitionLayout(@NotNull final File tableRootDirectory,
    -        @NotNull final Predicate pathFilter,
    -        @NotNull final BiFunction>, TLK> keyFactory,
    -        final int maxPartitioningLevels) {
    +            @NotNull final Predicate pathFilter,
    +            @NotNull final BiFunction>, TLK> keyFactory,
    +            final int maxPartitioningLevels) {
             this.tableRootDirectory = tableRootDirectory;
             this.pathFilter = pathFilter;
             this.keyFactory = keyFactory;
    -        this.maxPartitioningLevels =
    -            Require.geqZero(maxPartitioningLevels, "maxPartitioningLevels");
    +        this.maxPartitioningLevels = Require.geqZero(maxPartitioningLevels, "maxPartitioningLevels");
         }
     
         public String toString() {
    @@ -88,93 +82,85 @@ public void findKeys(@NotNull final Consumer locationKeyObserver) {
             final Deque targetFiles = new ArrayDeque<>();
     
             try {
    -            Files.walkFileTree(tableRootDirectory.toPath(),
    -                EnumSet.of(FileVisitOption.FOLLOW_LINKS), maxPartitioningLevels + 1,
    -                new SimpleFileVisitor() {
    -                    final String ls = System.lineSeparator();
    -                    final Set takenNames = new HashSet<>();
    -                    final List columnKeys = new ArrayList<>();
    -                    final List rowValues = new ArrayList<>();
    -                    String row;
    -                    int columnCount = -1;
    +            Files.walkFileTree(tableRootDirectory.toPath(), EnumSet.of(FileVisitOption.FOLLOW_LINKS),
    +                    maxPartitioningLevels + 1, new SimpleFileVisitor() {
    +                        final String ls = System.lineSeparator();
    +                        final Set takenNames = new HashSet<>();
    +                        final List columnKeys = new ArrayList<>();
    +                        final List rowValues = new ArrayList<>();
    +                        String row;
    +                        int columnCount = -1;
     
    -                    @Override
    -                    public FileVisitResult preVisitDirectory(@NotNull final Path dir,
    -                        @NotNull final BasicFileAttributes attrs) {
    -                        if (++columnCount > 0) {
    -                            // We're descending and past the root
    -                            final String[] components = dir.getFileName().toString().split("=", 2);
    -                            if (components.length != 2) {
    -                                throw new TableDataException(
    -                                    "Unexpected directory name format (not key=value) at " + dir);
    -                            }
    -                            final String columnKey =
    -                                NameValidator.legalizeColumnName(components[0], takenNames);
    -                            final int columnIndex = columnCount - 1;
    -                            if (columnCount > columnKeys.size()) {
    -                                columnKeys.add(columnKey);
    -                            } else if (!columnKeys.get(columnIndex).equals(columnKey)) {
    -                                throw new TableDataException(
    -                                    "Column name mismatch at index " + columnIndex
    -                                        + ": expected " + columnKeys.get(columnIndex) + " found "
    -                                        + columnKey + " at " + dir);
    +                        @Override
    +                        public FileVisitResult preVisitDirectory(@NotNull final Path dir,
    +                                @NotNull final BasicFileAttributes attrs) {
    +                            if (++columnCount > 0) {
    +                                // We're descending and past the root
    +                                final String[] components = dir.getFileName().toString().split("=", 2);
    +                                if (components.length != 2) {
    +                                    throw new TableDataException(
    +                                            "Unexpected directory name format (not key=value) at " + dir);
    +                                }
    +                                final String columnKey = NameValidator.legalizeColumnName(components[0], takenNames);
    +                                final int columnIndex = columnCount - 1;
    +                                if (columnCount > columnKeys.size()) {
    +                                    columnKeys.add(columnKey);
    +                                } else if (!columnKeys.get(columnIndex).equals(columnKey)) {
    +                                    throw new TableDataException("Column name mismatch at index " + columnIndex
    +                                            + ": expected " + columnKeys.get(columnIndex) + " found " + columnKey
    +                                            + " at " + dir);
    +                                }
    +                                final String columnValue = components[1];
    +                                rowValues.add(columnValue);
                                 }
    -                            final String columnValue = components[1];
    -                            rowValues.add(columnValue);
    +                            return FileVisitResult.CONTINUE;
                             }
    -                        return FileVisitResult.CONTINUE;
    -                    }
     
    -                    @Override
    -                    public FileVisitResult visitFile(@NotNull final Path file,
    -                        @NotNull final BasicFileAttributes attrs) {
    -                        if (attrs.isRegularFile() && pathFilter.test(file)) {
    -                            if (!columnKeys.isEmpty()) {
    -                                if (csvBuilder.length() == 0) {
    -                                    csvBuilder.append(listToCsvRow(columnKeys)).append(ls);
    +                        @Override
    +                        public FileVisitResult visitFile(@NotNull final Path file,
    +                                @NotNull final BasicFileAttributes attrs) {
    +                            if (attrs.isRegularFile() && pathFilter.test(file)) {
    +                                if (!columnKeys.isEmpty()) {
    +                                    if (csvBuilder.length() == 0) {
    +                                        csvBuilder.append(listToCsvRow(columnKeys)).append(ls);
    +                                    }
    +                                    if (row == null) {
    +                                        row = listToCsvRow(rowValues);
    +                                    }
    +                                    csvBuilder.append(row).append(ls);
                                     }
    -                                if (row == null) {
    -                                    row = listToCsvRow(rowValues);
    -                                }
    -                                csvBuilder.append(row).append(ls);
    +                                targetFiles.add(file);
                                 }
    -                            targetFiles.add(file);
    +                            return FileVisitResult.CONTINUE;
                             }
    -                        return FileVisitResult.CONTINUE;
    -                    }
     
    -                    @Override
    -                    public FileVisitResult postVisitDirectory(@NotNull final Path dir,
    -                        @Nullable final IOException exc) throws IOException {
    -                        if (--columnCount >= 0) {
    -                            row = null;
    -                            rowValues.remove(columnCount);
    +                        @Override
    +                        public FileVisitResult postVisitDirectory(@NotNull final Path dir,
    +                                @Nullable final IOException exc) throws IOException {
    +                            if (--columnCount >= 0) {
    +                                row = null;
    +                                rowValues.remove(columnCount);
    +                            }
    +                            return super.postVisitDirectory(dir, exc);
                             }
    -                        return super.postVisitDirectory(dir, exc);
    -                    }
    -                });
    +                    });
             } catch (IOException e) {
    -            throw new TableDataException("Error finding locations for under " + tableRootDirectory,
    -                e);
    +            throw new TableDataException("Error finding locations for under " + tableRootDirectory, e);
             }
     
             final Table partitioningColumnTable;
             try {
    -            partitioningColumnTable = csvBuilder.length() == 0
    -                ? TableTools.emptyTable(targetFiles.size())
    -                : TableTools.readCsv(new ByteArrayInputStream(csvBuilder.toString().getBytes()));
    +            partitioningColumnTable = csvBuilder.length() == 0 ? TableTools.emptyTable(targetFiles.size())
    +                    : TableTools.readCsv(new ByteArrayInputStream(csvBuilder.toString().getBytes()));
             } catch (IOException e) {
    -            throw new TableDataException(
    -                "Failed converting partition CSV to table for " + tableRootDirectory, e);
    +            throw new TableDataException("Failed converting partition CSV to table for " + tableRootDirectory, e);
             }
     
             final Map> partitions = new LinkedHashMap<>();
    -        final String[] partitionKeys =
    -            partitioningColumnTable.getDefinition().getColumnNamesArray();
    +        final String[] partitionKeys = partitioningColumnTable.getDefinition().getColumnNamesArray();
             // noinspection unchecked
             final ColumnSource>[] partitionValueSources =
    -            partitioningColumnTable.getColumnSources()
    -                .toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY);
    +                partitioningColumnTable.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY);
             final int numColumns = partitionValueSources.length;
             partitioningColumnTable.getIndex().forAllLongs((final long indexKey) -> {
                 for (int ci = 0; ci < numColumns; ++ci) {
    diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/local/ParquetMetadataFileLayout.java b/DB/src/main/java/io/deephaven/db/v2/locations/local/ParquetMetadataFileLayout.java
    index b425e69a75c..792c4e0c7f0 100644
    --- a/DB/src/main/java/io/deephaven/db/v2/locations/local/ParquetMetadataFileLayout.java
    +++ b/DB/src/main/java/io/deephaven/db/v2/locations/local/ParquetMetadataFileLayout.java
    @@ -36,19 +36,17 @@
     
     /**
      * 

    - * {@link TableLocationKeyFinder Location finder} that will examine a parquet metadata file to - * discover locations. + * {@link TableLocationKeyFinder Location finder} that will examine a parquet metadata file to discover locations. * *

    * Note that we expect to find the following files: *

      - *
    • {@code _metadata} - A file containing Parquet metadata for all {@link RowGroup row groups} in - * all {@code .parquet} files for the entire data set, including schema information non-partitioning - * columns and key-value metadata
    • - *
    • {@code _common_metadata} (optional) - A file containing Parquet metadata with schema - * information that applies to the entire data set, including partitioning columns that are inferred - * from file paths rather than explicitly written in {@link org.apache.parquet.format.ColumnChunk - * column chunks} within {@code .parquet} files
    • + *
    • {@code _metadata} - A file containing Parquet metadata for all {@link RowGroup row groups} in all + * {@code .parquet} files for the entire data set, including schema information non-partitioning columns and key-value + * metadata
    • + *
    • {@code _common_metadata} (optional) - A file containing Parquet metadata with schema information that + * applies to the entire data set, including partitioning columns that are inferred from file paths rather than + * explicitly written in {@link org.apache.parquet.format.ColumnChunk column chunks} within {@code .parquet} files
    • *
    */ public class ParquetMetadataFileLayout implements TableLocationKeyFinder { @@ -68,65 +66,55 @@ public ParquetMetadataFileLayout(@NotNull final File directory) { } public ParquetMetadataFileLayout(@NotNull final File directory, - @NotNull final ParquetInstructions inputInstructions) { - this(new File(directory, METADATA_FILE_NAME), - new File(directory, COMMON_METADATA_FILE_NAME), inputInstructions); + @NotNull final ParquetInstructions inputInstructions) { + this(new File(directory, METADATA_FILE_NAME), new File(directory, COMMON_METADATA_FILE_NAME), + inputInstructions); } public ParquetMetadataFileLayout(@NotNull final File metadataFile, - @Nullable final File commonMetadataFile) { + @Nullable final File commonMetadataFile) { this(metadataFile, commonMetadataFile, ParquetInstructions.EMPTY); } public ParquetMetadataFileLayout(@NotNull final File metadataFile, - @Nullable final File commonMetadataFile, - @NotNull final ParquetInstructions inputInstructions) { + @Nullable final File commonMetadataFile, + @NotNull final ParquetInstructions inputInstructions) { this.metadataFile = metadataFile; this.commonMetadataFile = commonMetadataFile; if (!metadataFile.exists()) { - throw new TableDataException( - "Parquet metadata file " + metadataFile + " does not exist"); + throw new TableDataException("Parquet metadata file " + metadataFile + " does not exist"); } - final ParquetFileReader metadataFileReader = - ParquetTools.getParquetFileReader(metadataFile); + final ParquetFileReader metadataFileReader = ParquetTools.getParquetFileReader(metadataFile); final ParquetMetadataConverter converter = new ParquetMetadataConverter(); - final ParquetMetadata metadataFileMetadata = - convertMetadata(metadataFile, metadataFileReader, converter); - final Pair, ParquetInstructions> leafSchemaInfo = - ParquetTools.convertSchema( + final ParquetMetadata metadataFileMetadata = convertMetadata(metadataFile, metadataFileReader, converter); + final Pair, ParquetInstructions> leafSchemaInfo = ParquetTools.convertSchema( metadataFileReader.getSchema(), metadataFileMetadata.getFileMetaData().getKeyValueMetaData(), inputInstructions); if (commonMetadataFile != null && commonMetadataFile.exists()) { - final ParquetFileReader commonMetadataFileReader = - ParquetTools.getParquetFileReader(commonMetadataFile); - final Pair, ParquetInstructions> fullSchemaInfo = - ParquetTools.convertSchema( + final ParquetFileReader commonMetadataFileReader = ParquetTools.getParquetFileReader(commonMetadataFile); + final Pair, ParquetInstructions> fullSchemaInfo = ParquetTools.convertSchema( commonMetadataFileReader.getSchema(), - convertMetadata(commonMetadataFile, commonMetadataFileReader, converter) - .getFileMetaData().getKeyValueMetaData(), + convertMetadata(commonMetadataFile, commonMetadataFileReader, converter).getFileMetaData() + .getKeyValueMetaData(), leafSchemaInfo.getSecond()); final List adjustedColumnDefinitions = new ArrayList<>(); - final Map leafDefinitionsMap = leafSchemaInfo.getFirst() - .stream().collect(toMap(ColumnDefinition::getName, Function.identity())); + final Map leafDefinitionsMap = + leafSchemaInfo.getFirst().stream().collect(toMap(ColumnDefinition::getName, Function.identity())); for (final ColumnDefinition fullDefinition : fullSchemaInfo.getFirst()) { - final ColumnDefinition leafDefinition = - leafDefinitionsMap.get(fullDefinition.getName()); + final ColumnDefinition leafDefinition = leafDefinitionsMap.get(fullDefinition.getName()); if (leafDefinition == null) { adjustedColumnDefinitions.add(adjustPartitionDefinition(fullDefinition)); } else if (fullDefinition.equals(leafDefinition)) { - adjustedColumnDefinitions.add(fullDefinition); // No adjustments to apply in - // this case + adjustedColumnDefinitions.add(fullDefinition); // No adjustments to apply in this case } else { final List differences = new ArrayList<>(); // noinspection unchecked - fullDefinition.describeDifferences(differences, leafDefinition, "full schema", - "file schema", ""); - throw new TableDataException(String.format( - "Schema mismatch between %s and %s for column %s: %s", - metadataFile, commonMetadataFile, fullDefinition.getName(), differences)); + fullDefinition.describeDifferences(differences, leafDefinition, "full schema", "file schema", ""); + throw new TableDataException(String.format("Schema mismatch between %s and %s for column %s: %s", + metadataFile, commonMetadataFile, fullDefinition.getName(), differences)); } } definition = new TableDefinition(adjustedColumnDefinitions); @@ -137,18 +125,15 @@ public ParquetMetadataFileLayout(@NotNull final File metadataFile, } final List partitioningColumns = definition.getPartitioningColumns(); - final Map partitioningColumnsMap = - partitioningColumns.stream().collect( - toMap(ColumnDefinition::getName, Function.identity(), Assert::neverInvoked, - LinkedHashMap::new)); + final Map partitioningColumnsMap = partitioningColumns.stream().collect( + toMap(ColumnDefinition::getName, Function.identity(), Assert::neverInvoked, LinkedHashMap::new)); final Map fileNameToRowGroupIndices = new LinkedHashMap<>(); final List rowGroups = metadataFileReader.fileMetaData.getRow_groups(); final int numRowGroups = rowGroups.size(); for (int rgi = 0; rgi < numRowGroups; ++rgi) { fileNameToRowGroupIndices - .computeIfAbsent(rowGroups.get(rgi).getColumns().get(0).getFile_path(), - fn -> new TIntArrayList()) - .add(rgi); + .computeIfAbsent(rowGroups.get(rgi).getColumns().get(0).getFile_path(), fn -> new TIntArrayList()) + .add(rgi); } final File directory = metadataFile.getParentFile(); final MutableInt partitionOrder = new MutableInt(0); @@ -158,16 +143,16 @@ public ParquetMetadataFileLayout(@NotNull final File metadataFile, if (filePathString == null || filePathString.isEmpty()) { throw new TableDataException("Missing parquet file name for row groups " - + Arrays.toString(rowGroupIndices) + " in " + metadataFile); + + Arrays.toString(rowGroupIndices) + " in " + metadataFile); } final LinkedHashMap> partitions = - partitioningColumns.isEmpty() ? null : new LinkedHashMap<>(); + partitioningColumns.isEmpty() ? null : new LinkedHashMap<>(); if (partitions != null) { final Path filePath = Paths.get(filePathString); final int numPartitions = filePath.getNameCount() - 1; if (numPartitions != partitioningColumns.size()) { - throw new TableDataException("Unexpected number of path elements in " - + filePathString + " for partitions " + partitions.keySet()); + throw new TableDataException("Unexpected number of path elements in " + filePathString + + " for partitions " + partitions.keySet()); } final boolean useHiveStyle = filePath.getName(0).toString().contains("="); for (int pi = 0; pi < numPartitions; ++pi) { @@ -179,11 +164,10 @@ public ParquetMetadataFileLayout(@NotNull final File metadataFile, final String[] pathComponents = pathElement.split("=", 2); if (pathComponents.length != 2) { throw new TableDataException( - "Unexpected path format found for hive-style partitioning from " - + filePathString + " for " + metadataFile); + "Unexpected path format found for hive-style partitioning from " + filePathString + + " for " + metadataFile); } - partitionKey = instructions - .getColumnNameFromParquetColumnNameOrDefault(pathComponents[0]); + partitionKey = instructions.getColumnNameFromParquetColumnNameOrDefault(pathComponents[0]); columnDefinition = partitioningColumnsMap.get(partitionKey); partitionValueRaw = pathComponents[1]; } else { @@ -191,18 +175,17 @@ public ParquetMetadataFileLayout(@NotNull final File metadataFile, partitionKey = columnDefinition.getName(); partitionValueRaw = pathElement; } - final Comparable partitionValue = CONVERSION_FUNCTIONS - .get(columnDefinition.getDataType()).apply(partitionValueRaw); + final Comparable partitionValue = + CONVERSION_FUNCTIONS.get(columnDefinition.getDataType()).apply(partitionValueRaw); if (partitions.containsKey(partitionKey)) { - throw new TableDataException( - "Unexpected duplicate partition key " + partitionKey + " when parsing " - + filePathString + " for " + metadataFile); + throw new TableDataException("Unexpected duplicate partition key " + partitionKey + + " when parsing " + filePathString + " for " + metadataFile); } partitions.put(partitionKey, partitionValue); } } - final ParquetTableLocationKey tlk = new ParquetTableLocationKey( - new File(directory, filePathString), partitionOrder.getAndIncrement(), partitions); + final ParquetTableLocationKey tlk = new ParquetTableLocationKey(new File(directory, filePathString), + partitionOrder.getAndIncrement(), partitions); tlk.setFileReader(metadataFileReader); tlk.setMetadata(metadataFileMetadata); tlk.setRowGroupIndices(rowGroupIndices); @@ -211,13 +194,12 @@ public ParquetMetadataFileLayout(@NotNull final File metadataFile, } public String toString() { - return ParquetMetadataFileLayout.class.getSimpleName() + '[' + metadataFile + ',' - + commonMetadataFile + ']'; + return ParquetMetadataFileLayout.class.getSimpleName() + '[' + metadataFile + ',' + commonMetadataFile + ']'; } private static ParquetMetadata convertMetadata(@NotNull final File file, - @NotNull final ParquetFileReader fileReader, - @NotNull final ParquetMetadataConverter converter) { + @NotNull final ParquetFileReader fileReader, + @NotNull final ParquetMetadataConverter converter) { try { return converter.fromParquetMetadata(fileReader.fileMetaData); } catch (IOException e) { @@ -225,16 +207,15 @@ private static ParquetMetadata convertMetadata(@NotNull final File file, } } - private static ColumnDefinition adjustPartitionDefinition( - @NotNull final ColumnDefinition columnDefinition) { + private static ColumnDefinition adjustPartitionDefinition(@NotNull final ColumnDefinition columnDefinition) { if (columnDefinition.getComponentType() != null) { return ColumnDefinition.fromGenericType(columnDefinition.getName(), String.class, - ColumnDefinition.COLUMNTYPE_PARTITIONING, null); + ColumnDefinition.COLUMNTYPE_PARTITIONING, null); } final Class dataType = columnDefinition.getDataType(); if (dataType == boolean.class) { return ColumnDefinition.fromGenericType(columnDefinition.getName(), Boolean.class, - ColumnDefinition.COLUMNTYPE_PARTITIONING, null); + ColumnDefinition.COLUMNTYPE_PARTITIONING, null); } if (dataType.isPrimitive()) { return columnDefinition.withPartitioning(); @@ -242,17 +223,16 @@ private static ColumnDefinition adjustPartitionDefinition( final Class unboxedType = TypeUtils.getUnboxedType(dataType); if (unboxedType != null && unboxedType.isPrimitive()) { return ColumnDefinition.fromGenericType(columnDefinition.getName(), unboxedType, - ColumnDefinition.COLUMNTYPE_PARTITIONING, null); + ColumnDefinition.COLUMNTYPE_PARTITIONING, null); } if (dataType == Boolean.class || dataType == String.class || dataType == BigDecimal.class - || dataType == BigInteger.class) { + || dataType == BigInteger.class) { return columnDefinition.withPartitioning(); } - // NB: This fallback includes any kind of timestamp; we don't have a strong grasp of - // required parsing support at + // NB: This fallback includes any kind of timestamp; we don't have a strong grasp of required parsing support at // this time, and preserving the contents as a String allows the user full control. return ColumnDefinition.fromGenericType(columnDefinition.getName(), String.class, - ColumnDefinition.COLUMNTYPE_PARTITIONING, null); + ColumnDefinition.COLUMNTYPE_PARTITIONING, null); } private static final Map>> CONVERSION_FUNCTIONS; diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/local/SingleParquetFileLayout.java b/DB/src/main/java/io/deephaven/db/v2/locations/local/SingleParquetFileLayout.java index 6ff1379ed28..499e3132198 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/local/SingleParquetFileLayout.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/local/SingleParquetFileLayout.java @@ -10,8 +10,7 @@ /** * Parquet {@link TableLocationKeyFinder location finder} that will discover a single file. */ -public final class SingleParquetFileLayout - implements TableLocationKeyFinder { +public final class SingleParquetFileLayout implements TableLocationKeyFinder { private final File parquetFile; @@ -29,6 +28,6 @@ public String toString() { @Override public void findKeys(@NotNull final Consumer locationKeyObserver) { PrivilegedFileAccessUtil.doFilesystemAction( - () -> locationKeyObserver.accept(new ParquetTableLocationKey(parquetFile, 0, null))); + () -> locationKeyObserver.accept(new ParquetTableLocationKey(parquetFile, 0, null))); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/ColumnChunkPageStore.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/ColumnChunkPageStore.java index 2022ec09fdd..a65c12a8969 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/ColumnChunkPageStore.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/ColumnChunkPageStore.java @@ -22,11 +22,10 @@ import java.lang.ref.WeakReference; public abstract class ColumnChunkPageStore - implements PageStore>, Page, SafeCloseable, Releasable { + implements PageStore>, Page, SafeCloseable, Releasable { private static final int CACHE_SIZE = - Configuration.getInstance().getIntegerWithDefault("ColumnChunkPageStore.cacheSize", - 1 << 13); + Configuration.getInstance().getIntegerWithDefault("ColumnChunkPageStore.cacheSize", 1 << 13); private static final WeakReference NULL_PAGE = new WeakReference<>(null); private final ColumnChunkReader columnChunkReader; @@ -37,16 +36,14 @@ public abstract class ColumnChunkPageStore final ColumnChunkReader.ColumnPageReaderIterator columnPageReaderIterator; final IntrusiveSoftLRU> intrusiveSoftLRU = - new IntrusiveSoftLRU<>(IntrusiveSoftLRU.Node.Adapter.>getInstance(), - CACHE_SIZE); + new IntrusiveSoftLRU<>(IntrusiveSoftLRU.Node.Adapter.>getInstance(), CACHE_SIZE); static WeakReference> getNullPage() { // noinspection unchecked return (WeakReference>) NULL_PAGE; } - static class IntrusivePage - extends IntrusiveSoftLRU.Node.Impl> { + static class IntrusivePage extends IntrusiveSoftLRU.Node.Impl> { private final ChunkPage page; @@ -66,37 +63,32 @@ public static class CreatorResult { public final ColumnChunkPageStore dictionaryKeysPageStore; private CreatorResult(@NotNull final ColumnChunkPageStore pageStore, - final Chunk dictionary, - final ColumnChunkPageStore dictionaryKeysPageStore) { + final Chunk dictionary, + final ColumnChunkPageStore dictionaryKeysPageStore) { this.pageStore = pageStore; this.dictionary = dictionary; this.dictionaryKeysPageStore = dictionaryKeysPageStore; } } - public static CreatorResult create( - @NotNull final ColumnChunkReader columnChunkReader, - final long mask, - @NotNull final ToPage toPage) throws IOException { + public static CreatorResult create(@NotNull final ColumnChunkReader columnChunkReader, + final long mask, + @NotNull final ToPage toPage) throws IOException { final boolean fixedSizePages = columnChunkReader.getPageFixedSize() >= 1; final ColumnChunkPageStore columnChunkPageStore = fixedSizePages - ? new FixedPageSizeColumnChunkPageStore<>(columnChunkReader, mask, toPage) - : new VariablePageSizeColumnChunkPageStore<>(columnChunkReader, mask, toPage); - final ToPage dictionaryKeysToPage = - toPage.getDictionaryKeysToPage(); - final ColumnChunkPageStore dictionaryKeysColumnChunkPageStore = - dictionaryKeysToPage == null ? null + ? new FixedPageSizeColumnChunkPageStore<>(columnChunkReader, mask, toPage) + : new VariablePageSizeColumnChunkPageStore<>(columnChunkReader, mask, toPage); + final ToPage dictionaryKeysToPage = toPage.getDictionaryKeysToPage(); + final ColumnChunkPageStore dictionaryKeysColumnChunkPageStore = dictionaryKeysToPage == null + ? null : fixedSizePages - ? new FixedPageSizeColumnChunkPageStore<>(columnChunkReader, mask, - dictionaryKeysToPage) - : new VariablePageSizeColumnChunkPageStore<>(columnChunkReader, mask, - dictionaryKeysToPage); - return new CreatorResult<>(columnChunkPageStore, toPage.getDictionary(), - dictionaryKeysColumnChunkPageStore); + ? new FixedPageSizeColumnChunkPageStore<>(columnChunkReader, mask, dictionaryKeysToPage) + : new VariablePageSizeColumnChunkPageStore<>(columnChunkReader, mask, dictionaryKeysToPage); + return new CreatorResult<>(columnChunkPageStore, toPage.getDictionary(), dictionaryKeysColumnChunkPageStore); } ColumnChunkPageStore(@NotNull final ColumnChunkReader columnChunkReader, final long mask, - final ToPage toPage) throws IOException { + final ToPage toPage) throws IOException { Require.requirement(((mask + 1) & mask) == 0, "mask is one less than a power of two"); this.columnChunkReader = columnChunkReader; @@ -107,8 +99,7 @@ public static CreatorResult create( this.columnPageReaderIterator = columnChunkReader.getPageIterator(); } - ChunkPage toPage(final long offset, @NotNull final ColumnPageReader columnPageReader) - throws IOException { + ChunkPage toPage(final long offset, @NotNull final ColumnPageReader columnPageReader) throws IOException { return toPage.toPage(offset, columnPageReader, mask); } @@ -133,8 +124,7 @@ public ChunkType getChunkType() { } /** - * These implementations don't use the FillContext parameter, so we're create a helper method to - * ignore it. + * These implementations don't use the FillContext parameter, so we're create a helper method to ignore it. */ @NotNull public ChunkPage getPageContaining(final long row) { diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/FixedPageSizeColumnChunkPageStore.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/FixedPageSizeColumnChunkPageStore.java index 24bb1a108d9..4a9acdfdc48 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/FixedPageSizeColumnChunkPageStore.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/FixedPageSizeColumnChunkPageStore.java @@ -21,8 +21,8 @@ class FixedPageSizeColumnChunkPageStore extends ColumnChunkPag private final ColumnPageReader[] columnPageReaders; private final WeakReference>[] pages; - FixedPageSizeColumnChunkPageStore(@NotNull final ColumnChunkReader columnChunkReader, - final long mask, @NotNull final ToPage toPage) throws IOException { + FixedPageSizeColumnChunkPageStore(@NotNull final ColumnChunkReader columnChunkReader, final long mask, + @NotNull final ToPage toPage) throws IOException { super(columnChunkReader, mask, toPage); this.pageFixedSize = columnChunkReader.getPageFixedSize(); @@ -42,9 +42,8 @@ private void fillToPage(final int pageNum) { while (numPages <= pageNum) { synchronized (this) { if (numPages <= pageNum) { - Assert.assertion(columnPageReaderIterator.hasNext(), - "columnPageReaderIterator.hasNext()", - "Parquet fixed page size and page iterator don't match, not enough pages."); + Assert.assertion(columnPageReaderIterator.hasNext(), "columnPageReaderIterator.hasNext()", + "Parquet fixed page size and page iterator don't match, not enough pages."); columnPageReaders[numPages++] = columnPageReaderIterator.next(); } } @@ -60,8 +59,7 @@ private ChunkPage getPage(final int pageNum) { if (page == null) { try { - page = new IntrusivePage<>( - toPage((long) pageNum * pageFixedSize, columnPageReaders[pageNum])); + page = new IntrusivePage<>(toPage((long) pageNum * pageFixedSize, columnPageReaders[pageNum])); } catch (IOException except) { throw new UncheckedIOException(except); } @@ -76,8 +74,7 @@ private ChunkPage getPage(final int pageNum) { } @Override - public @NotNull ChunkPage getPageContaining(FillContext fillContext, - final long elementIndex) { + public @NotNull ChunkPage getPageContaining(FillContext fillContext, final long elementIndex) { final long row = elementIndex & mask(); Require.inRange(row, "row", size(), "numRows"); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/VariablePageSizeColumnChunkPageStore.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/VariablePageSizeColumnChunkPageStore.java index 346ceed3cf3..b2a007b9a7c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/VariablePageSizeColumnChunkPageStore.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/VariablePageSizeColumnChunkPageStore.java @@ -16,20 +16,17 @@ class VariablePageSizeColumnChunkPageStore extends ColumnChunkPageStore { - // We will set numPages after changing all of these arrays in place and/or setting additional - // elements to the - // end of the array. Thus, for i < numPages, array[i] will always have the same value, and be - // valid to use, as - // long as we fetch numPages before accessing the arrays. This is the thread-safe pattern used - // throughout. + // We will set numPages after changing all of these arrays in place and/or setting additional elements to the + // end of the array. Thus, for i < numPages, array[i] will always have the same value, and be valid to use, as + // long as we fetch numPages before accessing the arrays. This is the thread-safe pattern used throughout. private volatile int numPages = 0; private volatile long[] pageRowOffsets; private volatile ColumnPageReader[] columnPageReaders; private volatile WeakReference>[] pages; - VariablePageSizeColumnChunkPageStore(@NotNull final ColumnChunkReader columnChunkReader, - final long mask, @NotNull final ToPage toPage) throws IOException { + VariablePageSizeColumnChunkPageStore(@NotNull final ColumnChunkReader columnChunkReader, final long mask, + @NotNull final ToPage toPage) throws IOException { super(columnChunkReader, mask, toPage); final int INIT_ARRAY_SIZE = 15; @@ -49,9 +46,8 @@ private void extendOnePage(final int prevNumPages) { // Make sure that no one has has already extended to this page yet. if (localNumPages == prevNumPages) { - Assert.assertion(columnPageReaderIterator.hasNext(), - "columnPageReaderIterator.hasNext()", - "Parquet num rows and page iterator don't match, not enough pages."); + Assert.assertion(columnPageReaderIterator.hasNext(), "columnPageReaderIterator.hasNext()", + "Parquet num rows and page iterator don't match, not enough pages."); if (columnPageReaders.length == localNumPages) { int newSize = 2 * localNumPages; @@ -113,8 +109,7 @@ private ChunkPage getPage(final int pageNum) { if (page == null) { try { - page = new IntrusivePage<>( - toPage(pageRowOffsets[pageNum], columnPageReaders[pageNum])); + page = new IntrusivePage<>(toPage(pageRowOffsets[pageNum], columnPageReaders[pageNum])); } catch (IOException except) { throw new UncheckedIOException(except); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetColumnLocation.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetColumnLocation.java index b6a95743437..7c6fff2750e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetColumnLocation.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetColumnLocation.java @@ -59,15 +59,15 @@ final class ParquetColumnLocation extends AbstractColumnLoc private static final String IMPLEMENTATION_NAME = ParquetColumnLocation.class.getSimpleName(); - private static final int CHUNK_SIZE = Configuration.getInstance() - .getIntegerForClassWithDefault(ParquetColumnLocation.class, "chunkSize", 4096); + private static final int CHUNK_SIZE = + Configuration.getInstance().getIntegerForClassWithDefault(ParquetColumnLocation.class, "chunkSize", 4096); private static final Logger log = LoggerFactory.getLogger(ParquetColumnLocation.class); private final String parquetColumnName; /** - * Factory object needed for deferred initialization of the remaining fields. Reference serves - * as a barrier to ensure visibility of the derived fields. + * Factory object needed for deferred initialization of the remaining fields. Reference serves as a barrier to + * ensure visibility of the derived fields. */ private volatile ColumnChunkReader[] columnChunkReaders; private final boolean hasGroupingTable; @@ -77,20 +77,18 @@ final class ParquetColumnLocation extends AbstractColumnLoc private ColumnChunkPageStore[] dictionaryKeysPageStores; /** - * Construct a new {@link ParquetColumnLocation} for the specified {@link ParquetTableLocation} - * and column name. + * Construct a new {@link ParquetColumnLocation} for the specified {@link ParquetTableLocation} and column name. * * @param tableLocation The table location enclosing this column location * @param parquetColumnName The Parquet file column name - * @param columnChunkReaders The {@link ColumnChunkReader column chunk readers} for this - * location + * @param columnChunkReaders The {@link ColumnChunkReader column chunk readers} for this location * @param hasGroupingTable Whether this column has an associated grouping table file */ ParquetColumnLocation(@NotNull final ParquetTableLocation tableLocation, - @NotNull final String columnName, - @NotNull final String parquetColumnName, - @Nullable final ColumnChunkReader[] columnChunkReaders, - final boolean hasGroupingTable) { + @NotNull final String columnName, + @NotNull final String parquetColumnName, + @Nullable final ColumnChunkReader[] columnChunkReaders, + final boolean hasGroupingTable) { super(tableLocation, columnName); this.parquetColumnName = parquetColumnName; this.columnChunkReaders = columnChunkReaders; @@ -108,8 +106,7 @@ public String getImplementationName() { @Override public final boolean exists() { - // If we see a null columnChunkReaders array, either we don't exist or we are guaranteed to - // see a non-null + // If we see a null columnChunkReaders array, either we don't exist or we are guaranteed to see a non-null // pageStores array return columnChunkReaders != null || pageStores != null; } @@ -118,201 +115,185 @@ private ParquetTableLocation tl() { return (ParquetTableLocation) getTableLocation(); } - private static final ColumnDefinition FIRST_KEY_COL_DEF = - ColumnDefinition.ofLong("__firstKey__"); - private static final ColumnDefinition LAST_KEY_COL_DEF = - ColumnDefinition.ofLong("__lastKey__"); + private static final ColumnDefinition FIRST_KEY_COL_DEF = ColumnDefinition.ofLong("__firstKey__"); + private static final ColumnDefinition LAST_KEY_COL_DEF = ColumnDefinition.ofLong("__lastKey__"); @Override @Nullable - public final METADATA_TYPE getMetadata( - @NotNull final ColumnDefinition columnDefinition) { + public final METADATA_TYPE getMetadata(@NotNull final ColumnDefinition columnDefinition) { if (!hasGroupingTable) { return null; } final Function defaultGroupingFilenameByColumnName = - ParquetTableWriter.defaultGroupingFileName(tl().getParquetFile().getAbsolutePath()); + ParquetTableWriter.defaultGroupingFileName(tl().getParquetFile().getAbsolutePath()); try { - final GroupingColumnInfo groupingColumnInfo = - tl().getGroupingColumns().get(parquetColumnName); + final GroupingColumnInfo groupingColumnInfo = tl().getGroupingColumns().get(parquetColumnName); final ParquetFileReader parquetFileReader; final String groupingFileName = groupingColumnInfo == null - ? defaultGroupingFilenameByColumnName.apply(parquetColumnName) - : tl().getParquetFile().toPath().getParent() - .resolve(groupingColumnInfo.groupingTablePath()).toString(); + ? defaultGroupingFilenameByColumnName.apply(parquetColumnName) + : tl().getParquetFile().toPath().getParent().resolve(groupingColumnInfo.groupingTablePath()) + .toString(); try { - parquetFileReader = - new ParquetFileReader(groupingFileName, tl().getChannelProvider(), -1); + parquetFileReader = new ParquetFileReader(groupingFileName, tl().getChannelProvider(), -1); } catch (Exception e) { log.warn().append("Failed to read expected grouping file ").append(groupingFileName) - .append(" for table location ").append(tl()).append(", column ") - .append(getName()); + .append(" for table location ").append(tl()).append(", column ").append(getName()); return null; } final Map columnTypes = ParquetSchemaReader.parseMetadata( - new ParquetMetadataConverter().fromParquetMetadata(parquetFileReader.fileMetaData) - .getFileMetaData().getKeyValueMetaData()) - .map(TableInfo::columnTypeMap).orElse(Collections.emptyMap()); + new ParquetMetadataConverter().fromParquetMetadata(parquetFileReader.fileMetaData).getFileMetaData() + .getKeyValueMetaData()) + .map(TableInfo::columnTypeMap).orElse(Collections.emptyMap()); final RowGroupReader rowGroupReader = parquetFileReader.getRowGroup(0); final ColumnChunkReader groupingKeyReader = - rowGroupReader.getColumnChunk(Collections.singletonList(GROUPING_KEY)); + rowGroupReader.getColumnChunk(Collections.singletonList(GROUPING_KEY)); final ColumnChunkReader beginPosReader = - rowGroupReader.getColumnChunk(Collections.singletonList(BEGIN_POS)); - final ColumnChunkReader endPosReader = - rowGroupReader.getColumnChunk(Collections.singletonList(END_POS)); + rowGroupReader.getColumnChunk(Collections.singletonList(BEGIN_POS)); + final ColumnChunkReader endPosReader = rowGroupReader.getColumnChunk(Collections.singletonList(END_POS)); if (groupingKeyReader == null || beginPosReader == null || endPosReader == null) { log.warn().append("Grouping file ").append(groupingFileName) - .append(" is missing one or more expected columns for table location ") - .append(tl()).append(", column ").append(getName()); + .append(" is missing one or more expected columns for table location ").append(tl()) + .append(", column ").append(getName()); return null; } // noinspection unchecked return (METADATA_TYPE) new MetaDataTableFactory( - ColumnChunkPageStore.create(groupingKeyReader, - ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, - makeToPage(columnTypes.get(GROUPING_KEY), ParquetInstructions.EMPTY, - GROUPING_KEY, groupingKeyReader, columnDefinition)).pageStore, - ColumnChunkPageStore.create(beginPosReader, - ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, - makeToPage(columnTypes.get(BEGIN_POS), ParquetInstructions.EMPTY, BEGIN_POS, - beginPosReader, FIRST_KEY_COL_DEF)).pageStore, - ColumnChunkPageStore.create(endPosReader, - ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, - makeToPage(columnTypes.get(END_POS), ParquetInstructions.EMPTY, END_POS, - beginPosReader, LAST_KEY_COL_DEF)).pageStore).get(); + ColumnChunkPageStore.create(groupingKeyReader, + ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, + makeToPage(columnTypes.get(GROUPING_KEY), ParquetInstructions.EMPTY, GROUPING_KEY, + groupingKeyReader, columnDefinition)).pageStore, + ColumnChunkPageStore.create(beginPosReader, + ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, + makeToPage(columnTypes.get(BEGIN_POS), ParquetInstructions.EMPTY, BEGIN_POS, beginPosReader, + FIRST_KEY_COL_DEF)).pageStore, + ColumnChunkPageStore.create(endPosReader, + ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, + makeToPage(columnTypes.get(END_POS), ParquetInstructions.EMPTY, END_POS, beginPosReader, + LAST_KEY_COL_DEF)).pageStore).get(); } catch (IOException e) { throw new UncheckedIOException(e); } } private REGION_TYPE makeColumnRegion( - @NotNull final Function, SOURCE[]> sourceArrayFactory, - @NotNull final ColumnDefinition columnDefinition, - @NotNull final LongFunction nullRegionFactory, - @NotNull final Function singleRegionFactory, - @NotNull final Function, REGION_TYPE> multiRegionFactory) { + @NotNull final Function, SOURCE[]> sourceArrayFactory, + @NotNull final ColumnDefinition columnDefinition, + @NotNull final LongFunction nullRegionFactory, + @NotNull final Function singleRegionFactory, + @NotNull final Function, REGION_TYPE> multiRegionFactory) { final SOURCE[] sources = sourceArrayFactory.apply(columnDefinition); return sources.length == 1 - ? makeSingleColumnRegion(sources[0], nullRegionFactory, singleRegionFactory) - : multiRegionFactory.apply(Arrays.stream(sources).map( - source -> makeSingleColumnRegion(source, nullRegionFactory, singleRegionFactory))); + ? makeSingleColumnRegion(sources[0], nullRegionFactory, singleRegionFactory) + : multiRegionFactory.apply(Arrays.stream(sources) + .map(source -> makeSingleColumnRegion(source, nullRegionFactory, singleRegionFactory))); } private REGION_TYPE makeSingleColumnRegion(final SOURCE source, - @NotNull final LongFunction nullRegionFactory, - @NotNull final Function singleRegionFactory) { + @NotNull final LongFunction nullRegionFactory, + @NotNull final Function singleRegionFactory) { return source == null ? nullRegionFactory.apply(tl().getRegionParameters().regionMask) - : singleRegionFactory.apply(source); + : singleRegionFactory.apply(source); } @Override - public ColumnRegionChar makeColumnRegionChar( - @NotNull final ColumnDefinition columnDefinition) { + public ColumnRegionChar makeColumnRegionChar(@NotNull final ColumnDefinition columnDefinition) { // noinspection unchecked return (ColumnRegionChar) makeColumnRegion(this::getPageStores, columnDefinition, - ColumnRegionChar::createNull, ParquetColumnRegionChar::new, - rs -> new ColumnRegionChar.StaticPageStore(tl().getRegionParameters(), - rs.toArray(ColumnRegionChar[]::new))); + ColumnRegionChar::createNull, ParquetColumnRegionChar::new, + rs -> new ColumnRegionChar.StaticPageStore(tl().getRegionParameters(), + rs.toArray(ColumnRegionChar[]::new))); } @Override - public ColumnRegionByte makeColumnRegionByte( - @NotNull final ColumnDefinition columnDefinition) { + public ColumnRegionByte makeColumnRegionByte(@NotNull final ColumnDefinition columnDefinition) { // noinspection unchecked return (ColumnRegionByte) makeColumnRegion(this::getPageStores, columnDefinition, - ColumnRegionByte::createNull, ParquetColumnRegionByte::new, - rs -> new ColumnRegionByte.StaticPageStore(tl().getRegionParameters(), - rs.toArray(ColumnRegionByte[]::new))); + ColumnRegionByte::createNull, ParquetColumnRegionByte::new, + rs -> new ColumnRegionByte.StaticPageStore(tl().getRegionParameters(), + rs.toArray(ColumnRegionByte[]::new))); } @Override - public ColumnRegionShort makeColumnRegionShort( - @NotNull final ColumnDefinition columnDefinition) { + public ColumnRegionShort makeColumnRegionShort(@NotNull final ColumnDefinition columnDefinition) { // noinspection unchecked return (ColumnRegionShort) makeColumnRegion(this::getPageStores, columnDefinition, - ColumnRegionShort::createNull, ParquetColumnRegionShort::new, - rs -> new ColumnRegionShort.StaticPageStore(tl().getRegionParameters(), - rs.toArray(ColumnRegionShort[]::new))); + ColumnRegionShort::createNull, ParquetColumnRegionShort::new, + rs -> new ColumnRegionShort.StaticPageStore(tl().getRegionParameters(), + rs.toArray(ColumnRegionShort[]::new))); } @Override - public ColumnRegionInt makeColumnRegionInt( - @NotNull final ColumnDefinition columnDefinition) { + public ColumnRegionInt makeColumnRegionInt(@NotNull final ColumnDefinition columnDefinition) { // noinspection unchecked return (ColumnRegionInt) makeColumnRegion(this::getPageStores, columnDefinition, - ColumnRegionInt::createNull, ParquetColumnRegionInt::new, - rs -> new ColumnRegionInt.StaticPageStore(tl().getRegionParameters(), - rs.toArray(ColumnRegionInt[]::new))); + ColumnRegionInt::createNull, ParquetColumnRegionInt::new, + rs -> new ColumnRegionInt.StaticPageStore(tl().getRegionParameters(), + rs.toArray(ColumnRegionInt[]::new))); } @Override - public ColumnRegionLong makeColumnRegionLong( - @NotNull final ColumnDefinition columnDefinition) { + public ColumnRegionLong makeColumnRegionLong(@NotNull final ColumnDefinition columnDefinition) { // noinspection unchecked return (ColumnRegionLong) makeColumnRegion(this::getPageStores, columnDefinition, - ColumnRegionLong::createNull, ParquetColumnRegionLong::new, - rs -> new ColumnRegionLong.StaticPageStore(tl().getRegionParameters(), - rs.toArray(ColumnRegionLong[]::new))); + ColumnRegionLong::createNull, ParquetColumnRegionLong::new, + rs -> new ColumnRegionLong.StaticPageStore(tl().getRegionParameters(), + rs.toArray(ColumnRegionLong[]::new))); } @Override - public ColumnRegionFloat makeColumnRegionFloat( - @NotNull final ColumnDefinition columnDefinition) { + public ColumnRegionFloat makeColumnRegionFloat(@NotNull final ColumnDefinition columnDefinition) { // noinspection unchecked return (ColumnRegionFloat) makeColumnRegion(this::getPageStores, columnDefinition, - ColumnRegionFloat::createNull, ParquetColumnRegionFloat::new, - rs -> new ColumnRegionFloat.StaticPageStore(tl().getRegionParameters(), - rs.toArray(ColumnRegionFloat[]::new))); + ColumnRegionFloat::createNull, ParquetColumnRegionFloat::new, + rs -> new ColumnRegionFloat.StaticPageStore(tl().getRegionParameters(), + rs.toArray(ColumnRegionFloat[]::new))); } @Override - public ColumnRegionDouble makeColumnRegionDouble( - @NotNull final ColumnDefinition columnDefinition) { + public ColumnRegionDouble makeColumnRegionDouble(@NotNull final ColumnDefinition columnDefinition) { // noinspection unchecked return (ColumnRegionDouble) makeColumnRegion(this::getPageStores, columnDefinition, - ColumnRegionDouble::createNull, ParquetColumnRegionDouble::new, - rs -> new ColumnRegionDouble.StaticPageStore(tl().getRegionParameters(), - rs.toArray(ColumnRegionDouble[]::new))); + ColumnRegionDouble::createNull, ParquetColumnRegionDouble::new, + rs -> new ColumnRegionDouble.StaticPageStore(tl().getRegionParameters(), + rs.toArray(ColumnRegionDouble[]::new))); } @Override public ColumnRegionObject makeColumnRegionObject( - @NotNull final ColumnDefinition columnDefinition) { + @NotNull final ColumnDefinition columnDefinition) { // noinspection unchecked final Class dataType = columnDefinition.getDataType(); final ColumnChunkPageStore[] sources = getPageStores(columnDefinition); - final ColumnChunkPageStore[] dictKeySources = - getDictionaryKeysPageStores(columnDefinition); + final ColumnChunkPageStore[] dictKeySources = getDictionaryKeysPageStores(columnDefinition); final Chunk[] dicts = getDictionaries(columnDefinition); if (sources.length == 1) { // noinspection unchecked - return (ColumnRegionObject) makeSingleColumnRegionObject(dataType, - sources[0], dictKeySources[0], dicts[0]); + return (ColumnRegionObject) makeSingleColumnRegionObject(dataType, sources[0], + dictKeySources[0], dicts[0]); } // noinspection unchecked return (ColumnRegionObject) new ColumnRegionObject.StaticPageStore( - tl().getRegionParameters(), - IntStream.range(0, sources.length) - .mapToObj(ri -> makeSingleColumnRegionObject(dataType, sources[ri], - dictKeySources[ri], dicts[ri])) - .toArray(ColumnRegionObject[]::new)); + tl().getRegionParameters(), + IntStream.range(0, sources.length) + .mapToObj(ri -> makeSingleColumnRegionObject(dataType, sources[ri], dictKeySources[ri], + dicts[ri])) + .toArray(ColumnRegionObject[]::new)); } - private ColumnRegionObject makeSingleColumnRegionObject( - @NotNull final Class dataType, - @Nullable final ColumnChunkPageStore source, - @Nullable final ColumnChunkPageStore dictKeySource, - @Nullable final Chunk dict) { + private ColumnRegionObject makeSingleColumnRegionObject(@NotNull final Class dataType, + @Nullable final ColumnChunkPageStore source, + @Nullable final ColumnChunkPageStore dictKeySource, + @Nullable final Chunk dict) { if (source == null) { return ColumnRegionObject.createNull(tl().getRegionParameters().regionMask); } return new ParquetColumnRegionObject<>(source, - () -> new ParquetColumnRegionLong<>(Require.neqNull(dictKeySource, "dictKeySource")), - () -> ColumnRegionChunkDictionary.create(tl().getRegionParameters().regionMask, - dataType, Require.neqNull(dict, "dict"))); + () -> new ParquetColumnRegionLong<>(Require.neqNull(dictKeySource, "dictKeySource")), + () -> ColumnRegionChunkDictionary.create(tl().getRegionParameters().regionMask, dataType, + Require.neqNull(dict, "dict"))); } /** @@ -322,8 +303,7 @@ private ColumnRegionObject makeSingleColumnRegionObject( * @return The page stores */ @NotNull - public final ColumnChunkPageStore[] getPageStores( - @NotNull final ColumnDefinition columnDefinition) { + public final ColumnChunkPageStore[] getPageStores(@NotNull final ColumnDefinition columnDefinition) { fetchValues(columnDefinition); return pageStores; } @@ -340,14 +320,14 @@ public Chunk[] getDictionaries(@NotNull final ColumnDefinition columnDe } /** - * Get the {@link ColumnChunkPageStore page stores} backing the indices for this column - * location. Only usable when there are dictionaries. + * Get the {@link ColumnChunkPageStore page stores} backing the indices for this column location. Only usable when + * there are dictionaries. * * @param columnDefinition The {@link ColumnDefinition} used to lookup type information * @return The page stores */ private ColumnChunkPageStore[] getDictionaryKeysPageStores( - @NotNull final ColumnDefinition columnDefinition) { + @NotNull final ColumnDefinition columnDefinition) { fetchValues(columnDefinition); return dictionaryKeysPageStores; } @@ -369,19 +349,16 @@ private void fetchValues(@NotNull final ColumnDefinition columnDefinition) { for (int psi = 0; psi < pageStoreCount; ++psi) { final ColumnChunkReader columnChunkReader = columnChunkReaders[psi]; try { - final ColumnChunkPageStore.CreatorResult creatorResult = - ColumnChunkPageStore.create( + final ColumnChunkPageStore.CreatorResult creatorResult = ColumnChunkPageStore.create( columnChunkReader, tl().getRegionParameters().regionMask, - makeToPage(tl().getColumnTypes().get(parquetColumnName), - tl().getReadInstructions(), parquetColumnName, columnChunkReader, - columnDefinition)); + makeToPage(tl().getColumnTypes().get(parquetColumnName), tl().getReadInstructions(), + parquetColumnName, columnChunkReader, columnDefinition)); pageStores[psi] = creatorResult.pageStore; dictionaries[psi] = creatorResult.dictionary; dictionaryKeysPageStores[psi] = creatorResult.dictionaryKeysPageStore; } catch (IOException e) { - throw new TableDataException( - "Failed to read parquet file for " + this + ", row group " + psi, e); + throw new TableDataException("Failed to read parquet file for " + this + ", row group " + psi, e); } } @@ -398,8 +375,8 @@ private static final class MetaDataTableFactory { private volatile Object metaData; private MetaDataTableFactory(@NotNull final ColumnChunkPageStore keyColumn, - @NotNull final ColumnChunkPageStore firstColumn, - @NotNull final ColumnChunkPageStore lastColumn) { + @NotNull final ColumnChunkPageStore firstColumn, + @NotNull final ColumnChunkPageStore lastColumn) { this.keyColumn = Require.neqNull(keyColumn, "keyColumn"); this.firstColumn = Require.neqNull(firstColumn, "firstColumn"); this.lastColumn = Require.neqNull(lastColumn, "lastColumn"); @@ -415,27 +392,21 @@ public Object get() { } final int numRows = (int) keyColumn.size(); - try ( - final ChunkBoxer.BoxerKernel boxerKernel = + try (final ChunkBoxer.BoxerKernel boxerKernel = ChunkBoxer.getBoxer(keyColumn.getChunkType(), CHUNK_SIZE); - final BuildGrouping buildGrouping = - BuildGrouping.builder(firstColumn.getChunkType(), numRows); - final ChunkSource.GetContext keyContext = keyColumn.makeGetContext(CHUNK_SIZE); - final ChunkSource.GetContext firstContext = - firstColumn.makeGetContext(CHUNK_SIZE); - final ChunkSource.GetContext lastContext = - lastColumn.makeGetContext(CHUNK_SIZE); - final OrderedKeys rows = OrderedKeys.forRange(0, numRows - 1); - final OrderedKeys.Iterator rowsIterator = rows.getOrderedKeysIterator()) { + final BuildGrouping buildGrouping = BuildGrouping.builder(firstColumn.getChunkType(), numRows); + final ChunkSource.GetContext keyContext = keyColumn.makeGetContext(CHUNK_SIZE); + final ChunkSource.GetContext firstContext = firstColumn.makeGetContext(CHUNK_SIZE); + final ChunkSource.GetContext lastContext = lastColumn.makeGetContext(CHUNK_SIZE); + final OrderedKeys rows = OrderedKeys.forRange(0, numRows - 1); + final OrderedKeys.Iterator rowsIterator = rows.getOrderedKeysIterator()) { while (rowsIterator.hasMore()) { - final OrderedKeys chunkRows = - rowsIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); + final OrderedKeys chunkRows = rowsIterator.getNextOrderedKeysWithLength(CHUNK_SIZE); - buildGrouping.build( - boxerKernel.box(keyColumn.getChunk(keyContext, chunkRows)), - firstColumn.getChunk(firstContext, chunkRows), - lastColumn.getChunk(lastContext, chunkRows)); + buildGrouping.build(boxerKernel.box(keyColumn.getChunk(keyContext, chunkRows)), + firstColumn.getChunk(firstContext, chunkRows), + lastColumn.getChunk(lastContext, chunkRows)); } metaData = buildGrouping.getGrouping(); @@ -446,8 +417,8 @@ public Object get() { private interface BuildGrouping extends Context { void build(@NotNull ObjectChunk keyChunk, - @NotNull Chunk firstChunk, - @NotNull Chunk lastChunk); + @NotNull Chunk firstChunk, + @NotNull Chunk lastChunk); Object getGrouping(); @@ -458,8 +429,7 @@ static BuildGrouping builder(@NotNull final ChunkType chunkType, final int numRo case Long: return new LongBuildGrouping(numRows); default: - throw new IllegalArgumentException( - "Unknown type for an index: " + chunkType); + throw new IllegalArgumentException("Unknown type for an index: " + chunkType); } } @@ -473,12 +443,10 @@ final class IntBuildGrouping implements BuildGrouping { @Override public void build(@NotNull final ObjectChunk keyChunk, - @NotNull final Chunk firstChunk, - @NotNull final Chunk lastChunk) { - final IntChunk firstIntChunk = - firstChunk.asIntChunk(); - final IntChunk lastIntChunk = - lastChunk.asIntChunk(); + @NotNull final Chunk firstChunk, + @NotNull final Chunk lastChunk) { + final IntChunk firstIntChunk = firstChunk.asIntChunk(); + final IntChunk lastIntChunk = lastChunk.asIntChunk(); for (int ki = 0; ki < keyChunk.size(); ++ki) { final int[] range = new int[2]; @@ -506,12 +474,10 @@ final class LongBuildGrouping implements BuildGrouping { @Override public void build(@NotNull final ObjectChunk keyChunk, - @NotNull final Chunk firstChunk, - @NotNull final Chunk lastChunk) { - final LongChunk firstLongChunk = - firstChunk.asLongChunk(); - final LongChunk lastLongChunk = - lastChunk.asLongChunk(); + @NotNull final Chunk firstChunk, + @NotNull final Chunk lastChunk) { + final LongChunk firstLongChunk = firstChunk.asLongChunk(); + final LongChunk lastLongChunk = lastChunk.asLongChunk(); for (int ki = 0; ki < keyChunk.size(); ++ki) { final long[] range = new long[2]; @@ -532,21 +498,19 @@ public Object getGrouping() { } private static ToPage makeToPage( - @Nullable final ColumnTypeInfo columnTypeInfo, - @NotNull final ParquetInstructions readInstructions, - @NotNull final String parquetColumnName, - @NotNull final ColumnChunkReader columnChunkReader, - @NotNull final ColumnDefinition columnDefinition) { + @Nullable final ColumnTypeInfo columnTypeInfo, + @NotNull final ParquetInstructions readInstructions, + @NotNull final String parquetColumnName, + @NotNull final ColumnChunkReader columnChunkReader, + @NotNull final ColumnDefinition columnDefinition) { final PrimitiveType type = columnChunkReader.getType(); final LogicalTypeAnnotation logicalTypeAnnotation = type.getLogicalTypeAnnotation(); - final String codecFromInstructions = - readInstructions.getCodecName(columnDefinition.getName()); + final String codecFromInstructions = readInstructions.getCodecName(columnDefinition.getName()); final String codecName = (codecFromInstructions != null) - ? codecFromInstructions - : columnTypeInfo == null ? null - : columnTypeInfo.codec().map(CodecInfo::codecName).orElse(null); + ? codecFromInstructions + : columnTypeInfo == null ? null : columnTypeInfo.codec().map(CodecInfo::codecName).orElse(null); final ColumnTypeInfo.SpecialType specialTypeName = - columnTypeInfo == null ? null : columnTypeInfo.specialType().orElse(null); + columnTypeInfo == null ? null : columnTypeInfo.specialType().orElse(null); final boolean isArray = columnChunkReader.getMaxRl() > 0; final boolean isCodec = CodecLookup.explicitCodecPresent(codecName); @@ -556,8 +520,8 @@ private static ToPage makeToPage( } try { - // Note that componentType is null for a StringSet. ToStringSetPage.create specifically - // doesn't take this parameter. + // Note that componentType is null for a StringSet. ToStringSetPage.create specifically doesn't take this + // parameter. final Class dataType = columnDefinition.getDataType(); final Class componentType = columnDefinition.getComponentType(); final Class pageType = isArray ? componentType : dataType; @@ -565,9 +529,9 @@ private static ToPage makeToPage( ToPage toPage = null; if (logicalTypeAnnotation != null) { - toPage = logicalTypeAnnotation.accept( - new LogicalTypeVisitor(parquetColumnName, columnChunkReader, pageType)) - .orElse(null); + toPage = logicalTypeAnnotation + .accept(new LogicalTypeVisitor(parquetColumnName, columnChunkReader, pageType)) + .orElse(null); } if (toPage == null) { @@ -597,28 +561,24 @@ private static ToPage makeToPage( final ObjectCodec codec; if (isCodec) { final String codecArgs = codecFromInstructions != null - ? readInstructions.getCodecArgs(columnDefinition.getName()) - : columnTypeInfo.codec().flatMap(CodecInfo::codecArg).orElse(null); + ? readInstructions.getCodecArgs(columnDefinition.getName()) + : columnTypeInfo.codec().flatMap(CodecInfo::codecArg).orElse(null); codec = CodecCache.DEFAULT.getCodec(codecName, codecArgs); } else { - final String codecArgs = - (typeName == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) + final String codecArgs = (typeName == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) ? Integer.toString(type.getTypeLength()) : null; - codec = CodecCache.DEFAULT - .getCodec(SimpleByteArrayCodec.class.getName(), codecArgs); + codec = CodecCache.DEFAULT.getCodec(SimpleByteArrayCodec.class.getName(), codecArgs); } // noinspection unchecked - toPage = - ToObjectPage.create(dataType, codec, columnChunkReader.getDictionary()); + toPage = ToObjectPage.create(dataType, codec, columnChunkReader.getDictionary()); break; default: } } if (toPage == null) { - throw new TableDataException( - "Unsupported parquet column type " + type.getPrimitiveTypeName() + + throw new TableDataException("Unsupported parquet column type " + type.getPrimitiveTypeName() + " with logical type " + logicalTypeAnnotation); } @@ -638,34 +598,29 @@ private static ToPage makeToPage( return (ToPage) toPage; } catch (IOException except) { - throw new TableDataException("IO exception accessing column " + parquetColumnName, - except); + throw new TableDataException("IO exception accessing column " + parquetColumnName, except); } catch (RuntimeException except) { - throw new TableDataException( - "Unexpected exception accessing column " + parquetColumnName, except); + throw new TableDataException("Unexpected exception accessing column " + parquetColumnName, except); } } private static class LogicalTypeVisitor - implements LogicalTypeAnnotation.LogicalTypeAnnotationVisitor> { + implements LogicalTypeAnnotation.LogicalTypeAnnotationVisitor> { private final String name; private final ColumnChunkReader columnChunkReader; private final Class componentType; - LogicalTypeVisitor(@NotNull String name, @NotNull ColumnChunkReader columnChunkReader, - Class componentType) { + LogicalTypeVisitor(@NotNull String name, @NotNull ColumnChunkReader columnChunkReader, Class componentType) { this.name = name; this.columnChunkReader = columnChunkReader; this.componentType = componentType; } @Override - public Optional> visit( - LogicalTypeAnnotation.StringLogicalTypeAnnotation stringLogicalType) { + public Optional> visit(LogicalTypeAnnotation.StringLogicalTypeAnnotation stringLogicalType) { try { - return Optional - .of(ToStringPage.create(componentType, columnChunkReader.getDictionary())); + return Optional.of(ToStringPage.create(componentType, columnChunkReader.getDictionary())); } catch (IOException except) { throw new TableDataException("Failure accessing string column " + name, except); } @@ -673,19 +628,16 @@ private static class LogicalTypeVisitor @Override public Optional> visit( - LogicalTypeAnnotation.TimestampLogicalTypeAnnotation timestampLogicalType) { + LogicalTypeAnnotation.TimestampLogicalTypeAnnotation timestampLogicalType) { if (timestampLogicalType.isAdjustedToUTC()) { - return Optional - .of(ToDBDateTimePage.create(componentType, timestampLogicalType.getUnit())); + return Optional.of(ToDBDateTimePage.create(componentType, timestampLogicalType.getUnit())); } - throw new TableDataException( - "Timestamp column is not UTC or is not nanoseconds " + name); + throw new TableDataException("Timestamp column is not UTC or is not nanoseconds " + name); } @Override - public Optional> visit( - LogicalTypeAnnotation.IntLogicalTypeAnnotation intLogicalType) { + public Optional> visit(LogicalTypeAnnotation.IntLogicalTypeAnnotation intLogicalType) { if (intLogicalType.isSigned()) { switch (intLogicalType.getBitWidth()) { diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocation.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocation.java index ddfef3193f5..8b893641c83 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocation.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocation.java @@ -42,8 +42,8 @@ class ParquetTableLocation extends AbstractTableLocation { private volatile RowGroupReader[] rowGroupReaders; ParquetTableLocation(@NotNull final TableKey tableKey, - @NotNull final ParquetTableLocationKey tableLocationKey, - @NotNull final ParquetInstructions readInstructions) { + @NotNull final ParquetTableLocationKey tableLocationKey, + @NotNull final ParquetInstructions readInstructions) { super(tableKey, tableLocationKey, false); this.readInstructions = readInstructions; final ParquetMetadata parquetMetadata; @@ -56,14 +56,12 @@ class ParquetTableLocation extends AbstractTableLocation { final int rowGroupCount = rowGroupIndices.length; rowGroups = IntStream.of(rowGroupIndices) - .mapToObj(rgi -> parquetFileReader.fileMetaData.getRow_groups().get(rgi)) - .sorted(Comparator.comparingInt(RowGroup::getOrdinal)) - .toArray(RowGroup[]::new); - final long maxRowCount = - Arrays.stream(rowGroups).mapToLong(RowGroup::getNum_rows).max().orElse(0L); + .mapToObj(rgi -> parquetFileReader.fileMetaData.getRow_groups().get(rgi)) + .sorted(Comparator.comparingInt(RowGroup::getOrdinal)) + .toArray(RowGroup[]::new); + final long maxRowCount = Arrays.stream(rowGroups).mapToLong(RowGroup::getNum_rows).max().orElse(0L); regionParameters = new RegionedPageStore.Parameters( - RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, rowGroupCount, - maxRowCount); + RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, rowGroupCount, maxRowCount); parquetColumnNameToPath = new HashMap<>(); for (final ColumnDescriptor column : parquetFileReader.getSchema().getColumns()) { @@ -74,17 +72,13 @@ class ParquetTableLocation extends AbstractTableLocation { } // TODO (https://github.com/deephaven/deephaven-core/issues/958): - // When/if we support _metadata files for Deephaven-written Parquet tables, we may need to - // revise this - // in order to read *this* file's metadata, rather than inheriting file metadata from the - // _metadata file. + // When/if we support _metadata files for Deephaven-written Parquet tables, we may need to revise this + // in order to read *this* file's metadata, rather than inheriting file metadata from the _metadata file. // Obvious issues included grouping table paths, codecs, etc. - // Presumably, we could store per-file instances of the metadata in the _metadata file's - // map. - final Optional tableInfo = ParquetSchemaReader - .parseMetadata(parquetMetadata.getFileMetaData().getKeyValueMetaData()); - groupingColumns = - tableInfo.map(TableInfo::groupingColumnMap).orElse(Collections.emptyMap()); + // Presumably, we could store per-file instances of the metadata in the _metadata file's map. + final Optional tableInfo = + ParquetSchemaReader.parseMetadata(parquetMetadata.getFileMetaData().getKeyValueMetaData()); + groupingColumns = tableInfo.map(TableInfo::groupingColumnMap).orElse(Collections.emptyMap()); columnTypes = tableInfo.map(TableInfo::columnTypeMap).orElse(Collections.emptyMap()); handleUpdate(computeIndex(), tableLocationKey.getFile().lastModified()); @@ -132,33 +126,29 @@ private RowGroupReader[] getRowGroupReaders() { return local; } return rowGroupReaders = IntStream.of(rowGroupIndices) - .mapToObj(parquetFileReader::getRowGroup) - .sorted(Comparator.comparingInt(rgr -> rgr.getRowGroup().getOrdinal())) - .toArray(RowGroupReader[]::new); + .mapToObj(parquetFileReader::getRowGroup) + .sorted(Comparator.comparingInt(rgr -> rgr.getRowGroup().getOrdinal())) + .toArray(RowGroupReader[]::new); } } @NotNull @Override protected ParquetColumnLocation makeColumnLocation(@NotNull final String columnName) { - final String parquetColumnName = - readInstructions.getParquetColumnNameFromColumnNameOrDefault(columnName); + final String parquetColumnName = readInstructions.getParquetColumnNameFromColumnNameOrDefault(columnName); final String[] columnPath = parquetColumnNameToPath.get(parquetColumnName); final List nameList = - columnPath == null ? Collections.singletonList(parquetColumnName) - : Arrays.asList(columnPath); + columnPath == null ? Collections.singletonList(parquetColumnName) : Arrays.asList(columnPath); final ColumnChunkReader[] columnChunkReaders = Arrays.stream(getRowGroupReaders()) - .map(rgr -> rgr.getColumnChunk(nameList)).toArray(ColumnChunkReader[]::new); - final boolean exists = - Arrays.stream(columnChunkReaders).anyMatch(ccr -> ccr != null && ccr.numRows() > 0); + .map(rgr -> rgr.getColumnChunk(nameList)).toArray(ColumnChunkReader[]::new); + final boolean exists = Arrays.stream(columnChunkReaders).anyMatch(ccr -> ccr != null && ccr.numRows() > 0); return new ParquetColumnLocation<>(this, columnName, parquetColumnName, - exists ? columnChunkReaders : null, - exists && groupingColumns.containsKey(parquetColumnName)); + exists ? columnChunkReaders : null, + exists && groupingColumns.containsKey(parquetColumnName)); } private CurrentOnlyIndex computeIndex() { - final CurrentOnlyIndex.SequentialBuilder sequentialBuilder = - Index.CURRENT_FACTORY.getSequentialBuilder(); + final CurrentOnlyIndex.SequentialBuilder sequentialBuilder = Index.CURRENT_FACTORY.getSequentialBuilder(); for (int rgi = 0; rgi < rowGroups.length; ++rgi) { final long subRegionSize = rowGroups[rgi].getNum_rows(); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocationFactory.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocationFactory.java index 933cf29d519..a088c520364 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocationFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocationFactory.java @@ -15,8 +15,7 @@ /** * {@link TableLocationFactory} for {@link ParquetTableLocation}s. */ -public final class ParquetTableLocationFactory - implements TableLocationFactory { +public final class ParquetTableLocationFactory implements TableLocationFactory { private final ParquetInstructions readInstructions; @@ -27,8 +26,8 @@ public ParquetTableLocationFactory(@NotNull final ParquetInstructions readInstru @Override @NotNull public TableLocation makeLocation(@NotNull final TableKey tableKey, - @NotNull final ParquetTableLocationKey locationKey, - @Nullable final TableDataRefreshService refreshService) { + @NotNull final ParquetTableLocationKey locationKey, + @Nullable final TableDataRefreshService refreshService) { final File parquetFile = locationKey.getFile(); if (Utils.fileExistsPrivileged(parquetFile)) { return new ParquetTableLocation(tableKey, locationKey, readInstructions); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocationKey.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocationKey.java index 979dff56d50..fb7ecda7700 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocationKey.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/ParquetTableLocationKey.java @@ -32,23 +32,20 @@ public final class ParquetTableLocationKey extends FileTableLocationKey { /** * Construct a new ParquetTableLocationKey for the supplied {@code file} and {@code partitions}. * - * @param file The parquet file that backs the keyed location. Will be adjusted to an absolute - * path. + * @param file The parquet file that backs the keyed location. Will be adjusted to an absolute path. * @param order Explicit ordering index, taking precedence over other fields - * @param partitions The table partitions enclosing the table location keyed by {@code this}. - * Note that if this parameter is {@code null}, the location will be a member of no - * partitions. An ordered copy of the map will be made, so the calling code is free to - * mutate the map after this call + * @param partitions The table partitions enclosing the table location keyed by {@code this}. Note that if this + * parameter is {@code null}, the location will be a member of no partitions. An ordered copy of the map will + * be made, so the calling code is free to mutate the map after this call */ public ParquetTableLocationKey(@NotNull final File file, final int order, - @Nullable final Map> partitions) { + @Nullable final Map> partitions) { super(validateParquetFile(file), order, partitions); } private static File validateParquetFile(@NotNull final File file) { if (!file.getName().endsWith(ParquetTableWriter.PARQUET_FILE_EXTENSION)) { - throw new IllegalArgumentException( - "Parquet file must end in " + ParquetTableWriter.PARQUET_FILE_EXTENSION); + throw new IllegalArgumentException("Parquet file must end in " + ParquetTableWriter.PARQUET_FILE_EXTENSION); } return file; } @@ -60,8 +57,8 @@ public String getImplementationName() { /** - * Get a previously-{@link #setFileReader(ParquetFileReader) set} or on-demand created - * {@link ParquetFileReader} for this location key's {@code file}. + * Get a previously-{@link #setFileReader(ParquetFileReader) set} or on-demand created {@link ParquetFileReader} for + * this location key's {@code file}. * * @return A {@link ParquetFileReader} for this location key's {@code file}. */ @@ -73,9 +70,9 @@ public synchronized ParquetFileReader getFileReader() { } /** - * Set the {@link ParquetFileReader} that will be returned by {@link #getFileReader()}. Pass - * {@code null} to force on-demand construction at the next invocation. Always clears cached - * {@link ParquetMetadata} and {@link RowGroup} indices. + * Set the {@link ParquetFileReader} that will be returned by {@link #getFileReader()}. Pass {@code null} to force + * on-demand construction at the next invocation. Always clears cached {@link ParquetMetadata} and {@link RowGroup} + * indices. * * @param fileReader The new {@link ParquetFileReader} */ @@ -86,8 +83,8 @@ public synchronized void setFileReader(final ParquetFileReader fileReader) { } /** - * Get a previously-{@link #setMetadata(ParquetMetadata) set} or on-demand created - * {@link ParquetMetadata} for this location key's {@code file}. + * Get a previously-{@link #setMetadata(ParquetMetadata) set} or on-demand created {@link ParquetMetadata} for this + * location key's {@code file}. * * @return A {@link ParquetMetadata} for this location key's {@code file}. */ @@ -96,17 +93,15 @@ public synchronized ParquetMetadata getMetadata() { return metadata; } try { - return metadata = - new ParquetMetadataConverter().fromParquetMetadata(getFileReader().fileMetaData); + return metadata = new ParquetMetadataConverter().fromParquetMetadata(getFileReader().fileMetaData); } catch (IOException e) { - throw new TableDataException("Failed to convert Parquet file metadata: " + getFile(), - e); + throw new TableDataException("Failed to convert Parquet file metadata: " + getFile(), e); } } /** - * Set the {@link ParquetMetadata} that will be returned by {@link #getMetadata()} ()}. Pass - * {@code null} to force on-demand construction at the next invocation. + * Set the {@link ParquetMetadata} that will be returned by {@link #getMetadata()} ()}. Pass {@code null} to force + * on-demand construction at the next invocation. * * @param metadata The new {@link ParquetMetadata} */ @@ -115,8 +110,8 @@ public synchronized void setMetadata(final ParquetMetadata metadata) { } /** - * Get previously-{@link #setRowGroupIndices(int[]) set} or on-demand created {@link RowGroup} - * indices for this location key's current {@link ParquetFileReader}. + * Get previously-{@link #setRowGroupIndices(int[]) set} or on-demand created {@link RowGroup} indices for this + * location key's current {@link ParquetFileReader}. * * @return {@link RowGroup} indices for this location key's current {@link ParquetFileReader}. */ @@ -126,17 +121,13 @@ public synchronized int[] getRowGroupIndices() { } final List rowGroups = getFileReader().fileMetaData.getRow_groups(); return rowGroupIndices = IntStream.range(0, rowGroups.size()).filter(rgi -> { - // 1. We can safely assume there's always at least one column. Our tools will refuse to - // write a + // 1. We can safely assume there's always at least one column. Our tools will refuse to write a // column-less table, and other readers we've tested fail catastrophically. - // 2. null file path means the column is local to the file the metadata was read from - // (which had + // 2. null file path means the column is local to the file the metadata was read from (which had // better be this file, in that case). // 3. We're assuming row groups are contained within a single file. - // While it seems that row group *could* have column chunks splayed out into multiple - // files, - // we're not expecting that in this code path. To support it, discovery tools should - // figure out + // While it seems that row group *could* have column chunks splayed out into multiple files, + // we're not expecting that in this code path. To support it, discovery tools should figure out // the row groups for a partition themselves and call setRowGroupReaders. final String filePath = rowGroups.get(rgi).getColumns().get(0).getFile_path(); return filePath == null || new File(filePath).getAbsoluteFile().equals(file); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/TrackedSeekableChannelsProvider.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/TrackedSeekableChannelsProvider.java index 8f8827e249b..0545fcb4723 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/TrackedSeekableChannelsProvider.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/local/TrackedSeekableChannelsProvider.java @@ -14,8 +14,7 @@ import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; /** - * {@link SeekableChannelsProvider} implementation that is constrained by a Deephaven - * {@link TrackedFileHandleFactory}. + * {@link SeekableChannelsProvider} implementation that is constrained by a Deephaven {@link TrackedFileHandleFactory}. */ public class TrackedSeekableChannelsProvider implements SeekableChannelsProvider { @@ -25,8 +24,7 @@ public static SeekableChannelsProvider getInstance() { if (instance == null) { synchronized (TrackedSeekableChannelsProvider.class) { if (instance == null) { - return instance = - new TrackedSeekableChannelsProvider(TrackedFileHandleFactory.getInstance()); + return instance = new TrackedSeekableChannelsProvider(TrackedFileHandleFactory.getInstance()); } } } @@ -35,33 +33,27 @@ public static SeekableChannelsProvider getInstance() { private final TrackedFileHandleFactory fileHandleFactory; - public TrackedSeekableChannelsProvider( - @NotNull final TrackedFileHandleFactory fileHandleFactory) { + public TrackedSeekableChannelsProvider(@NotNull final TrackedFileHandleFactory fileHandleFactory) { this.fileHandleFactory = fileHandleFactory; } @Override public final SeekableByteChannel getReadChannel(@NotNull final Path path) throws IOException { - return new TrackedSeekableByteChannel(fileHandleFactory.readOnlyHandleCreator, - path.toFile()); + return new TrackedSeekableByteChannel(fileHandleFactory.readOnlyHandleCreator, path.toFile()); } @Override - public final SeekableByteChannel getWriteChannel(@NotNull final Path filePath, - final boolean append) throws IOException { - // NB: I'm not sure this is actually the intended behavior; the "truncate-once" is - // per-handle, not per file. - return new TrackedSeekableByteChannel( - append ? fileHandleFactory.writeAppendCreateHandleCreator - : new TruncateOnceFileCreator(fileHandleFactory), - filePath.toFile()); + public final SeekableByteChannel getWriteChannel(@NotNull final Path filePath, final boolean append) + throws IOException { + // NB: I'm not sure this is actually the intended behavior; the "truncate-once" is per-handle, not per file. + return new TrackedSeekableByteChannel(append ? fileHandleFactory.writeAppendCreateHandleCreator + : new TruncateOnceFileCreator(fileHandleFactory), filePath.toFile()); } - private static final class TruncateOnceFileCreator - implements FileHandleFactory.FileToHandleFunction { + private static final class TruncateOnceFileCreator implements FileHandleFactory.FileToHandleFunction { private static final AtomicIntegerFieldUpdater FIRST_TIME_UPDATER = - AtomicIntegerFieldUpdater.newUpdater(TruncateOnceFileCreator.class, "firstTime"); + AtomicIntegerFieldUpdater.newUpdater(TruncateOnceFileCreator.class, "firstTime"); private static final int FIRST_TIME_TRUE = 1; private static final int FIRST_TIME_FALSE = 0; diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/Dictionary.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/Dictionary.java index c4f8d0d9e7b..39f966b253a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/Dictionary.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/Dictionary.java @@ -8,8 +8,7 @@ import java.util.function.IntFunction; -public class Dictionary - implements StringSetImpl.ReversibleLookup { +public class Dictionary implements StringSetImpl.ReversibleLookup { private final ObjectChunk objects; private volatile TObjectIntMap reverseMap = null; diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ReplicateToPage.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ReplicateToPage.java index d9cc6101a80..c077e99614d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ReplicateToPage.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ReplicateToPage.java @@ -10,9 +10,8 @@ public class ReplicateToPage { public static void main(String... args) throws IOException { - ReplicatePrimitiveCode.intToLongAndFloatingPoints(ToIntPage.class, - ReplicatePrimitiveCode.MAIN_SRC, "interface"); - ReplicatePrimitiveCode.charToShortAndByte(ToCharPageFromInt.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.intToLongAndFloatingPoints(ToIntPage.class, ReplicatePrimitiveCode.MAIN_SRC, + "interface"); + ReplicatePrimitiveCode.charToShortAndByte(ToCharPageFromInt.class, ReplicatePrimitiveCode.MAIN_SRC); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToArrayPage.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToArrayPage.java index 869d6fcc3ee..496ebf0a09f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToArrayPage.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToArrayPage.java @@ -10,23 +10,21 @@ import java.nio.IntBuffer; public class ToArrayPage - extends ToPage.Wrap { + extends ToPage.Wrap { private final Class nativeType; public static ToPage create( - @NotNull final Class nativeType, - @NotNull final Class componentType, - @NotNull final ToPage toPage) { + @NotNull final Class nativeType, + @NotNull final Class componentType, + @NotNull final ToPage toPage) { if (!nativeType.isArray()) { - throw new IllegalArgumentException( - "Native type " + nativeType + " is not an array type."); + throw new IllegalArgumentException("Native type " + nativeType + " is not an array type."); } final Class columnComponentType = toPage.getNativeComponentType(); if (!componentType.isAssignableFrom(columnComponentType)) { - throw new IllegalArgumentException( - "The component type " + componentType.getCanonicalName() + " for the" + + throw new IllegalArgumentException("The component type " + componentType.getCanonicalName() + " for the" + " array type " + nativeType.getCanonicalName() + " is not compatible with the column's component type " + columnComponentType); } @@ -34,8 +32,7 @@ public static ToPage(nativeType, toPage); } - private ToArrayPage(@NotNull final Class nativeType, - @NotNull final ToPage toPage) { + private ToArrayPage(@NotNull final Class nativeType, @NotNull final ToPage toPage) { super(toPage); this.nativeType = nativeType; } @@ -57,8 +54,7 @@ public final ChunkType getChunkType() { public final ARRAY_TYPE[] convertResult(Object object) { final DataWithOffsets dataWithOffsets = (DataWithOffsets) object; - final DbArrayBase dataWrapper = - toPage.makeDbArray(toPage.convertResult(dataWithOffsets.materializeResult)); + final DbArrayBase dataWrapper = toPage.makeDbArray(toPage.convertResult(dataWithOffsets.materializeResult)); final IntBuffer offsets = dataWithOffsets.offsets; // noinspection unchecked diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToDBDateTimePage.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToDBDateTimePage.java index 22a6cc1b215..7c327187bde 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToDBDateTimePage.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToDBDateTimePage.java @@ -21,8 +21,8 @@ public abstract class ToDBDateTimePage extends ToLo private static final ToDBDateTimePage NANOS_INSTANCE = new ToDBDateTimePageFromNanos(); @SuppressWarnings("unchecked") - public static ToPage create( - @NotNull final Class nativeType, final LogicalTypeAnnotation.TimeUnit unit) { + public static ToPage create(@NotNull final Class nativeType, + final LogicalTypeAnnotation.TimeUnit unit) { if (DBDateTime.class.equals(nativeType)) { switch (unit) { case MILLIS: @@ -37,13 +37,13 @@ public static ToPage create( } throw new IllegalArgumentException( - "The native type for a DBDateTime column is " + nativeType.getCanonicalName()); + "The native type for a DBDateTime column is " + nativeType.getCanonicalName()); } protected ToDBDateTimePage() {} protected static DbArray makeDbArrayHelper(final long[] result, - final LongFunction unitToTime) { + final LongFunction unitToTime) { DBDateTime[] to = new DBDateTime[result.length]; for (int i = 0; i < result.length; ++i) { @@ -52,8 +52,7 @@ protected static DbArray makeDbArrayHelper(final long[] result, return new DbArrayDirect<>(to); } - protected static long[] convertResultHelper(@NotNull final Object result, - final LongUnaryOperator unitToNanos) { + protected static long[] convertResultHelper(@NotNull final Object result, final LongUnaryOperator unitToNanos) { final long[] resultLongs = (long[]) result; final int resultLength = resultLongs.length; for (int ri = 0; ri < resultLength; ++ri) { @@ -68,8 +67,7 @@ public final Class getNativeComponentType() { return DBDateTime.class; } - private static final class ToDBDateTimePageFromNanos - extends ToDBDateTimePage { + private static final class ToDBDateTimePageFromNanos extends ToDBDateTimePage { @Override @NotNull public DbArray makeDbArray(long[] result) { @@ -77,8 +75,7 @@ public DbArray makeDbArray(long[] result) { } } - private static final class ToDBDateTimePageFromMicros - extends ToDBDateTimePage { + private static final class ToDBDateTimePageFromMicros extends ToDBDateTimePage { @Override @NotNull public DbArray makeDbArray(long[] result) { @@ -91,8 +88,7 @@ public final long[] convertResult(@NotNull final Object result) { } } - private static final class ToDBDateTimePageFromMillis - extends ToDBDateTimePage { + private static final class ToDBDateTimePageFromMillis extends ToDBDateTimePage { @Override @NotNull public DbArray makeDbArray(long[] result) { diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToDbArrayPage.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToDbArrayPage.java index bad344a0b23..36b3e5d2612 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToDbArrayPage.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToDbArrayPage.java @@ -10,23 +10,20 @@ import java.nio.IntBuffer; public class ToDbArrayPage> - extends ToPage.Wrap { + extends ToPage.Wrap { private final Class nativeType; - public static ToPage create( - @NotNull final Class nativeType, - @NotNull final Class componentType, - @NotNull final ToPage toPage) { + public static ToPage create(@NotNull final Class nativeType, + @NotNull final Class componentType, + @NotNull final ToPage toPage) { if (!DbArrayBase.class.isAssignableFrom(nativeType)) { - throw new IllegalArgumentException( - "Native type " + nativeType + " is not a DbArray type."); + throw new IllegalArgumentException("Native type " + nativeType + " is not a DbArray type."); } final Class columnComponentType = toPage.getNativeComponentType(); if (!componentType.isAssignableFrom(columnComponentType)) { - throw new IllegalArgumentException( - "The component type " + componentType.getCanonicalName() + " for the" + + throw new IllegalArgumentException("The component type " + componentType.getCanonicalName() + " for the" + " array type " + nativeType.getCanonicalName() + " is not compatible with the column's component type " + columnComponentType); } @@ -35,8 +32,7 @@ public class ToDbArrayPage nativeType, - @NotNull final ToPage toPage) { + private ToDbArrayPage(@NotNull final Class nativeType, @NotNull final ToPage toPage) { super(toPage); this.nativeType = nativeType; } @@ -59,8 +55,8 @@ public final ARRAY_TYPE[] convertResult(final Object object) { final DataWithOffsets dataWithOffsets = (DataWithOffsets) object; // noinspection unchecked - final ARRAY_TYPE dataWrapper = (ARRAY_TYPE) toPage - .makeDbArray(toPage.convertResult(dataWithOffsets.materializeResult)); + final ARRAY_TYPE dataWrapper = + (ARRAY_TYPE) toPage.makeDbArray(toPage.convertResult(dataWithOffsets.materializeResult)); final IntBuffer offsets = dataWithOffsets.offsets; // noinspection unchecked diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToPage.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToPage.java index 0ad373a99cc..d3ff2e9f9cd 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToPage.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToPage.java @@ -68,13 +68,12 @@ default DbArrayBase makeDbArray(RESULT result) { } /** - * Produce the appropriately typed chunk page for the page read by the columnPageReader. The is - * the expected entry point for the ColumnChunkPageStore. + * Produce the appropriately typed chunk page for the page read by the columnPageReader. The is the expected entry + * point for the ColumnChunkPageStore. */ @NotNull @FinalDefault - default ChunkPage toPage(long offset, ColumnPageReader columnPageReader, long mask) - throws IOException { + default ChunkPage toPage(long offset, ColumnPageReader columnPageReader, long mask) throws IOException { return getChunkType().pageWrap(offset, convertResult(getResult(columnPageReader)), mask); } @@ -87,9 +86,9 @@ default Chunk getDictionary() { } /** - * @return an object implementing ToChunk which will read the integral Dictionary Keys when - * there's a dictionary for this column (as opposed to the values, which this object's - * toChunk will return.). This will return null iff getDictionary returns null. + * @return an object implementing ToChunk which will read the integral Dictionary Keys when there's a dictionary for + * this column (as opposed to the values, which this object's toChunk will return.). This will return null + * iff getDictionary returns null. * @apiNote null iff {@link #getDictionary()} is null. */ default ToPage getDictionaryKeysToPage() { @@ -104,8 +103,7 @@ default StringSetImpl.ReversibleLookup getReversibleLookup() { return null; } - abstract class Wrap - implements ToPage { + abstract class Wrap implements ToPage { final ToPage toPage; diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToPageWithDictionary.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToPageWithDictionary.java index 1f10e1be3ff..40749a0a9f7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToPageWithDictionary.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToPageWithDictionary.java @@ -17,17 +17,16 @@ import static io.deephaven.util.QueryConstants.NULL_INT; import static io.deephaven.util.QueryConstants.NULL_LONG; -public class ToPageWithDictionary - implements ToPage { +public class ToPageWithDictionary implements ToPage { private final Class nativeType; private final Dictionary dictionary; private final Function convertResultFallbackFun; ToPageWithDictionary( - @NotNull final Class nativeType, - @NotNull final Dictionary dictionary, - @NotNull final Function convertResultFallbackFun) { + @NotNull final Class nativeType, + @NotNull final Dictionary dictionary, + @NotNull final Function convertResultFallbackFun) { this.nativeType = nativeType; this.dictionary = dictionary; this.convertResultFallbackFun = convertResultFallbackFun; @@ -47,8 +46,7 @@ public final ChunkType getChunkType() { @Override @NotNull - public final Object getResult(@NotNull final ColumnPageReader columnPageReader) - throws IOException { + public final Object getResult(@NotNull final ColumnPageReader columnPageReader) throws IOException { if (columnPageReader.getDictionary() == null) { return ToPage.super.getResult(columnPageReader); } @@ -111,8 +109,7 @@ public Object nullValue() { } @Override - public Object getResult(@NotNull final ColumnPageReader columnPageReader) - throws IOException { + public Object getResult(@NotNull final ColumnPageReader columnPageReader) throws IOException { return ToPageWithDictionary.this.getResult(columnPageReader); } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToStringPage.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToStringPage.java index 09552b74166..da520fca856 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToStringPage.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToStringPage.java @@ -12,23 +12,21 @@ public class ToStringPage implements ToPage ToPage create( - final Class nativeType, final org.apache.parquet.column.Dictionary dictionary) { + public static ToPage create(final Class nativeType, + final org.apache.parquet.column.Dictionary dictionary) { if (nativeType == null || String.class.equals(nativeType)) { // noinspection unchecked return dictionary == null - ? INSTANCE - : new ToPageWithDictionary<>( - String.class, - new Dictionary<>( - dictionaryKey -> dictionary.decodeToBinary(dictionaryKey) - .toStringUsingUTF8(), - dictionary.getMaxId() + 1), - INSTANCE::convertResult); + ? INSTANCE + : new ToPageWithDictionary<>( + String.class, + new Dictionary<>( + dictionaryKey -> dictionary.decodeToBinary(dictionaryKey).toStringUsingUTF8(), + dictionary.getMaxId() + 1), + INSTANCE::convertResult); } - throw new IllegalArgumentException( - "The native type for a String column is " + nativeType.getCanonicalName()); + throw new IllegalArgumentException("The native type for a String column is " + nativeType.getCanonicalName()); } private ToStringPage() {} diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToStringSetPage.java b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToStringSetPage.java index 7b397a3a29b..42d70163f80 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToStringSetPage.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/parquet/topage/ToStringSetPage.java @@ -14,28 +14,25 @@ import java.nio.IntBuffer; public class ToStringSetPage - extends ToPage.Wrap { + extends ToPage.Wrap { - public static ToPage create( - @NotNull Class nativeType, @NotNull ToPage toPage) { + public static ToPage create(@NotNull Class nativeType, + @NotNull ToPage toPage) { Class columnComponentType = toPage.getNativeType(); if (!StringSet.class.isAssignableFrom(nativeType)) { - throw new IllegalArgumentException( - "Native type " + nativeType + " is not a StringSet type."); + throw new IllegalArgumentException("Native type " + nativeType + " is not a StringSet type."); } if (!String.class.isAssignableFrom(columnComponentType)) { - throw new IllegalArgumentException( - "The column's component type " + columnComponentType + + throw new IllegalArgumentException("The column's component type " + columnComponentType + "is not compatible with String"); } Chunk dictionary = toPage.getDictionary(); - return dictionary != null && dictionary.size() <= 64 - ? new ToStringSetPage.WithShortDictionary<>(toPage) - : new ToStringSetPage<>(toPage); + return dictionary != null && dictionary.size() <= 64 ? new ToStringSetPage.WithShortDictionary<>(toPage) + : new ToStringSetPage<>(toPage); } private ToStringSetPage(ToPage toPage) { @@ -78,7 +75,7 @@ public final StringSet[] convertResult(Object result) { } private static final class WithShortDictionary - extends ToPage.Wrap { + extends ToPage.Wrap { WithShortDictionary(ToPage toPage) { super(toPage); diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/util/ExecutorTableDataRefreshService.java b/DB/src/main/java/io/deephaven/db/v2/locations/util/ExecutorTableDataRefreshService.java index e6c7ca0c6a8..d25196b717b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/util/ExecutorTableDataRefreshService.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/util/ExecutorTableDataRefreshService.java @@ -38,35 +38,30 @@ public class ExecutorTableDataRefreshService implements TableDataRefreshService private final Value locationSubscriptionRefreshDurationNanos; public ExecutorTableDataRefreshService(@NotNull final String name, - final long tableLocationProviderRefreshIntervalMillis, - final long tableLocationRefreshIntervalMillis, - final int threadPoolSize) { + final long tableLocationProviderRefreshIntervalMillis, + final long tableLocationRefreshIntervalMillis, + final int threadPoolSize) { this.name = Require.neqNull(name, "name"); - this.tableLocationProviderRefreshIntervalMillis = - Require.gtZero(tableLocationProviderRefreshIntervalMillis, + this.tableLocationProviderRefreshIntervalMillis = Require.gtZero(tableLocationProviderRefreshIntervalMillis, "tableLocationProviderRefreshIntervalMillis"); - this.tableLocationRefreshIntervalMillis = Require.gtZero(tableLocationRefreshIntervalMillis, - "tableLocationRefreshIntervalMillis"); + this.tableLocationRefreshIntervalMillis = + Require.gtZero(tableLocationRefreshIntervalMillis, "tableLocationRefreshIntervalMillis"); - scheduler = new ScheduledThreadPoolExecutor(threadPoolSize, this::makeThread, - new ThreadPoolExecutor.AbortPolicy()); + scheduler = + new ScheduledThreadPoolExecutor(threadPoolSize, this::makeThread, new ThreadPoolExecutor.AbortPolicy()); scheduler.setRemoveOnCancelPolicy(true); - providerSubscriptions = - Stats.makeItem(NAME_PREFIX + name, "providerSubscriptions", Counter.FACTORY).getValue(); + providerSubscriptions = Stats.makeItem(NAME_PREFIX + name, "providerSubscriptions", Counter.FACTORY).getValue(); providerSubscriptionRefreshDurationNanos = Stats - .makeItem(NAME_PREFIX + name, "providerSubscriptionRefreshDurationNanos", State.FACTORY) - .getValue(); - locationSubscriptions = - Stats.makeItem(NAME_PREFIX + name, "locationSubscriptions", Counter.FACTORY).getValue(); + .makeItem(NAME_PREFIX + name, "providerSubscriptionRefreshDurationNanos", State.FACTORY).getValue(); + locationSubscriptions = Stats.makeItem(NAME_PREFIX + name, "locationSubscriptions", Counter.FACTORY).getValue(); locationSubscriptionRefreshDurationNanos = Stats - .makeItem(NAME_PREFIX + name, "locationSubscriptionRefreshDurationNanos", State.FACTORY) - .getValue(); + .makeItem(NAME_PREFIX + name, "locationSubscriptionRefreshDurationNanos", State.FACTORY).getValue(); } private Thread makeThread(final Runnable runnable) { - final Thread thread = new Thread(runnable, - NAME_PREFIX + name + "-refreshThread-" + threadCount.incrementAndGet()); + final Thread thread = + new Thread(runnable, NAME_PREFIX + name + "-refreshThread-" + threadCount.incrementAndGet()); thread.setDaemon(true); return thread; } @@ -77,7 +72,7 @@ public void submitOneTimeAsyncTask(@NotNull final Runnable task) { } private abstract class ScheduledSubscriptionTask - implements CancellableSubscriptionToken { + implements CancellableSubscriptionToken { final TYPE subscriptionAggregator; @@ -86,10 +81,9 @@ private abstract class ScheduledSubscriptionTask { + extends ScheduledSubscriptionTask { - private ScheduledTableLocationProviderRefresh( - @NotNull AbstractTableLocationProvider tableLocationProvider) { + private ScheduledTableLocationProviderRefresh(@NotNull AbstractTableLocationProvider tableLocationProvider) { super(tableLocationProvider, tableLocationProviderRefreshIntervalMillis); providerSubscriptions.increment(1); } @@ -138,8 +131,7 @@ public void cancel() { } } - private class ScheduledTableLocationRefresh - extends ScheduledSubscriptionTask { + private class ScheduledTableLocationRefresh extends ScheduledSubscriptionTask { private ScheduledTableLocationRefresh(@NotNull AbstractTableLocation tableLocation) { super(tableLocation, tableLocationRefreshIntervalMillis); @@ -162,13 +154,13 @@ public void cancel() { @Override public CancellableSubscriptionToken scheduleTableLocationProviderRefresh( - @NotNull final AbstractTableLocationProvider tableLocationProvider) { + @NotNull final AbstractTableLocationProvider tableLocationProvider) { return new ScheduledTableLocationProviderRefresh(tableLocationProvider); } @Override public CancellableSubscriptionToken scheduleTableLocationRefresh( - @NotNull final AbstractTableLocation tableLocation) { + @NotNull final AbstractTableLocation tableLocation) { return new ScheduledTableLocationRefresh(tableLocation); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/locations/util/TableDataRefreshService.java b/DB/src/main/java/io/deephaven/db/v2/locations/util/TableDataRefreshService.java index e0691a55518..c1a13e24253 100644 --- a/DB/src/main/java/io/deephaven/db/v2/locations/util/TableDataRefreshService.java +++ b/DB/src/main/java/io/deephaven/db/v2/locations/util/TableDataRefreshService.java @@ -6,8 +6,7 @@ import org.jetbrains.annotations.NotNull; /** - * For TableDataService and related components, this allows a simple implementation for subscription - * support. + * For TableDataService and related components, this allows a simple implementation for subscription support. */ public interface TableDataRefreshService { @@ -36,7 +35,7 @@ interface CancellableSubscriptionToken { * @return A subscription token to be used for matching, which also supports cancellation */ CancellableSubscriptionToken scheduleTableLocationProviderRefresh( - @NotNull AbstractTableLocationProvider tableLocationProvider); + @NotNull AbstractTableLocationProvider tableLocationProvider); /** * Schedule refresh for an AbstractTableLocation. @@ -44,8 +43,7 @@ CancellableSubscriptionToken scheduleTableLocationProviderRefresh( * @param tableLocation The table location * @return A subscription token to be used for matching, which also supports cancellation */ - CancellableSubscriptionToken scheduleTableLocationRefresh( - @NotNull AbstractTableLocation tableLocation); + CancellableSubscriptionToken scheduleTableLocationRefresh(@NotNull AbstractTableLocation tableLocation); /** * Get (and possibly construct) a shared instance. @@ -62,20 +60,18 @@ static TableDataRefreshService getSharedRefreshService() { final class Helper { // region Property names - private static final String TABLE_LOCATION_REFRESH_MILLIS_PROP = - "tableLocationsRefreshMillis"; + private static final String TABLE_LOCATION_REFRESH_MILLIS_PROP = "tableLocationsRefreshMillis"; private static final String TABLE_SIZE_REFRESH_MILLIS_PROP = "tableSizeRefreshMillis"; private static final String REFRESH_THREAD_POOL_SIZE_PROP = "refreshThreadPoolSize"; // endregion - // region Global properties retrieved from Configuration; used only for static - // TableDataRefreshService uses + // region Global properties retrieved from Configuration; used only for static TableDataRefreshService uses private static final String GLOBAL_TABLE_LOCATION_REFRESH_MILLIS_PROP = - "TableDataRefreshService." + TABLE_LOCATION_REFRESH_MILLIS_PROP; + "TableDataRefreshService." + TABLE_LOCATION_REFRESH_MILLIS_PROP; private static final String GLOBAL_TABLE_SIZE_REFRESH_MILLIS_PROP = - "TableDataRefreshService." + TABLE_SIZE_REFRESH_MILLIS_PROP; + "TableDataRefreshService." + TABLE_SIZE_REFRESH_MILLIS_PROP; private static final String GLOBAL_REFRESH_THREAD_POOL_SIZE_PROP = - "TableDataRefreshService." + REFRESH_THREAD_POOL_SIZE_PROP; + "TableDataRefreshService." + REFRESH_THREAD_POOL_SIZE_PROP; // endregion // region Shared property default values @@ -93,15 +89,13 @@ private static TableDataRefreshService getSharedRefreshService() { synchronized (Helper.class) { if (sharedRefreshService == null) { sharedRefreshService = new ExecutorTableDataRefreshService("Local", - Configuration.getInstance().getLongWithDefault( - GLOBAL_TABLE_LOCATION_REFRESH_MILLIS_PROP, - DEFAULT_TABLE_LOCATION_REFRESH_MILLIS), - Configuration.getInstance().getLongWithDefault( - GLOBAL_TABLE_SIZE_REFRESH_MILLIS_PROP, - DEFAULT_TABLE_SIZE_REFRESH_MILLIS), - Configuration.getInstance().getIntegerWithDefault( - GLOBAL_REFRESH_THREAD_POOL_SIZE_PROP, - DEFAULT_REFRESH_THREAD_POOL_SIZE)); + Configuration.getInstance().getLongWithDefault( + GLOBAL_TABLE_LOCATION_REFRESH_MILLIS_PROP, + DEFAULT_TABLE_LOCATION_REFRESH_MILLIS), + Configuration.getInstance().getLongWithDefault(GLOBAL_TABLE_SIZE_REFRESH_MILLIS_PROP, + DEFAULT_TABLE_SIZE_REFRESH_MILLIS), + Configuration.getInstance().getIntegerWithDefault(GLOBAL_REFRESH_THREAD_POOL_SIZE_PROP, + DEFAULT_REFRESH_THREAD_POOL_SIZE)); } } } @@ -125,13 +119,13 @@ public void submitOneTimeAsyncTask(@NotNull final Runnable task) { @Override public CancellableSubscriptionToken scheduleTableLocationProviderRefresh( - @NotNull final AbstractTableLocationProvider tableLocationProvider) { + @NotNull final AbstractTableLocationProvider tableLocationProvider) { throw new UnsupportedOperationException(); } @Override public CancellableSubscriptionToken scheduleTableLocationRefresh( - @NotNull final AbstractTableLocation tableLocation) { + @NotNull final AbstractTableLocation tableLocation) { throw new UnsupportedOperationException(); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/DictionaryAdapter.java b/DB/src/main/java/io/deephaven/db/v2/parquet/DictionaryAdapter.java index 6c1bd89e982..2e5a4eb987d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/DictionaryAdapter.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/DictionaryAdapter.java @@ -7,23 +7,17 @@ public abstract class DictionaryAdapter { static DictionaryAdapter getAdapter(Dictionary dictionary, Object nullValue) { if (dictionary instanceof PlainValuesDictionary.PlainLongDictionary) { - return new LongAdapter((PlainValuesDictionary.PlainLongDictionary) dictionary, - nullValue); + return new LongAdapter((PlainValuesDictionary.PlainLongDictionary) dictionary, nullValue); } else if (dictionary instanceof PlainValuesDictionary.PlainFloatDictionary) { - return new FloatAdapter((PlainValuesDictionary.PlainFloatDictionary) dictionary, - nullValue); + return new FloatAdapter((PlainValuesDictionary.PlainFloatDictionary) dictionary, nullValue); } else if (dictionary instanceof PlainValuesDictionary.PlainIntegerDictionary) { - return new IntegerAdapter((PlainValuesDictionary.PlainIntegerDictionary) dictionary, - nullValue); + return new IntegerAdapter((PlainValuesDictionary.PlainIntegerDictionary) dictionary, nullValue); } else if (dictionary instanceof PlainValuesDictionary.PlainDoubleDictionary) { - return new DoubleAdapter((PlainValuesDictionary.PlainDoubleDictionary) dictionary, - nullValue); + return new DoubleAdapter((PlainValuesDictionary.PlainDoubleDictionary) dictionary, nullValue); } else if (dictionary instanceof PlainValuesDictionary.PlainBinaryDictionary) { - return new BinaryAdapter((PlainValuesDictionary.PlainBinaryDictionary) dictionary, - nullValue); + return new BinaryAdapter((PlainValuesDictionary.PlainBinaryDictionary) dictionary, nullValue); } - throw new UnsupportedOperationException( - "No adapter available for " + dictionary.getClass().getSimpleName()); + throw new UnsupportedOperationException("No adapter available for " + dictionary.getClass().getSimpleName()); } public abstract void apply(RESULT_ARRAY result, int destIndex, int keyIndex); @@ -56,8 +50,7 @@ public long[] createResult(int numValues) { private static class FloatAdapter extends DictionaryAdapter { private final float[] dictionaryMapping; - public FloatAdapter(PlainValuesDictionary.PlainFloatDictionary dictionary, - Object nullValue) { + public FloatAdapter(PlainValuesDictionary.PlainFloatDictionary dictionary, Object nullValue) { super(); dictionaryMapping = new float[dictionary.getMaxId() + 2]; for (int i = 0; i < dictionaryMapping.length - 1; i++) { @@ -80,8 +73,7 @@ public float[] createResult(int numValues) { private static class IntegerAdapter extends DictionaryAdapter { private final int[] dictionaryMapping; - public IntegerAdapter(PlainValuesDictionary.PlainIntegerDictionary dictionary, - Object nullValue) { + public IntegerAdapter(PlainValuesDictionary.PlainIntegerDictionary dictionary, Object nullValue) { super(); dictionaryMapping = new int[dictionary.getMaxId() + 2]; for (int i = 0; i < dictionaryMapping.length - 1; i++) { @@ -104,8 +96,7 @@ public int[] createResult(int numValues) { private static class DoubleAdapter extends DictionaryAdapter { private final double[] dictionaryMapping; - public DoubleAdapter(PlainValuesDictionary.PlainDoubleDictionary dictionary, - Object nullValue) { + public DoubleAdapter(PlainValuesDictionary.PlainDoubleDictionary dictionary, Object nullValue) { super(); dictionaryMapping = new double[dictionary.getMaxId() + 2]; for (int i = 0; i < dictionaryMapping.length - 1; i++) { @@ -128,8 +119,7 @@ public double[] createResult(int numValues) { private static class BinaryAdapter extends DictionaryAdapter { private final String[] dictionaryMapping; - public BinaryAdapter(PlainValuesDictionary.PlainBinaryDictionary dictionary, - Object nullValue) { + public BinaryAdapter(PlainValuesDictionary.PlainBinaryDictionary dictionary, Object nullValue) { super(); dictionaryMapping = new String[dictionary.getMaxId() + 2]; for (int i = 0; i < dictionaryMapping.length - 1; i++) { diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/MappedSchema.java b/DB/src/main/java/io/deephaven/db/v2/parquet/MappedSchema.java index 8566b848634..33ed416b601 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/MappedSchema.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/MappedSchema.java @@ -10,15 +10,14 @@ import static io.deephaven.db.v2.parquet.TypeInfos.getTypeInfo; /** - * Represents the results of a successful mapping between a {@link TableDefinition} and a - * {@link MessageType}. + * Represents the results of a successful mapping between a {@link TableDefinition} and a {@link MessageType}. */ class MappedSchema { static MappedSchema create( - final TableDefinition definition, - final ParquetInstructions instructions, - final ColumnDefinition... extraColumns) { + final TableDefinition definition, + final ParquetInstructions instructions, + final ColumnDefinition... extraColumns) { final MessageTypeBuilder builder = Types.buildMessage(); for (final ColumnDefinition columnDefinition : definition.getColumns()) { TypeInfos.TypeInfo typeInfo = getTypeInfo(columnDefinition, instructions); @@ -26,8 +25,7 @@ static MappedSchema create( builder.addField(schemaType); } for (final ColumnDefinition extraColumn : extraColumns) { - builder.addField( - getTypeInfo(extraColumn, instructions).createSchemaType(extraColumn, instructions)); + builder.addField(getTypeInfo(extraColumn, instructions).createSchemaType(extraColumn, instructions)); } MessageType schema = builder.named("root"); return new MappedSchema(definition, schema); diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetInstructions.java b/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetInstructions.java index 472e7bd2522..3a5930d05e6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetInstructions.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetInstructions.java @@ -15,14 +15,13 @@ import java.util.function.Predicate; /** - * This class provides instructions intended for read and write parquet operations (which take it as - * an optional argument) specifying desired transformations. Examples are mapping column names and - * use of specific codecs during (de)serialization. + * This class provides instructions intended for read and write parquet operations (which take it as an optional + * argument) specifying desired transformations. Examples are mapping column names and use of specific codecs during + * (de)serialization. */ public abstract class ParquetInstructions implements ColumnToCodecMappings { - private static volatile String defaultCompressionCodecName = - CompressionCodecName.SNAPPY.toString(); + private static volatile String defaultCompressionCodecName = CompressionCodecName.SNAPPY.toString(); /** * Set the default for {@link #getCompressionCodecName()}. @@ -50,8 +49,7 @@ public static String getDefaultCompressionCodecName() { * @see Builder#setMaximumDictionaryKeys(int) */ public static void setDefaultMaximumDictionaryKeys(final int maximumDictionaryKeys) { - defaultMaximumDictionaryKeys = - Require.geqZero(maximumDictionaryKeys, "maximumDictionaryKeys"); + defaultMaximumDictionaryKeys = Require.geqZero(maximumDictionaryKeys, "maximumDictionaryKeys"); } /** @@ -63,8 +61,7 @@ public static int getDefaultMaximumDictionaryKeys() { public ParquetInstructions() {} - public final String getColumnNameFromParquetColumnNameOrDefault( - final String parquetColumnName) { + public final String getColumnNameFromParquetColumnNameOrDefault(final String parquetColumnName) { final String mapped = getColumnNameFromParquetColumnName(parquetColumnName); return (mapped != null) ? mapped : parquetColumnName; } @@ -80,25 +77,24 @@ public final String getColumnNameFromParquetColumnNameOrDefault( public abstract String getCodecArgs(final String columnName); /** - * @return A hint that the writer should use dictionary-based encoding for writing this column; - * never evaluated for non-String columns, defaults to false + * @return A hint that the writer should use dictionary-based encoding for writing this column; never evaluated for + * non-String columns, defaults to false */ public abstract boolean useDictionary(String columnName); public abstract String getCompressionCodecName(); /** - * @return The maximum number of unique keys the writer should add to a dictionary page before - * switching to non-dictionary encoding; never evaluated for non-String columns, ignored - * if {@link #useDictionary(String)} + * @return The maximum number of unique keys the writer should add to a dictionary page before switching to + * non-dictionary encoding; never evaluated for non-String columns, ignored if + * {@link #useDictionary(String)} */ public abstract int getMaximumDictionaryKeys(); public abstract boolean isLegacyParquet(); @VisibleForTesting - public static boolean sameColumnNamesAndCodecMappings(final ParquetInstructions i1, - final ParquetInstructions i2) { + public static boolean sameColumnNamesAndCodecMappings(final ParquetInstructions i1, final ParquetInstructions i2) { if (i1 == EMPTY) { if (i2 == EMPTY) { return true; @@ -207,9 +203,9 @@ public void useDictionary(final boolean useDictionary) { private static final class ReadOnly extends ParquetInstructions { private final KeyedObjectHashMap columnNameToInstructions; /** - * Note parquetColumnNameToInstructions may be null while columnNameToInstructions is not - * null; We only store entries in parquetColumnNameToInstructions when the parquetColumnName - * is different than the columnName (ie, the column name mapping is not the default mapping) + * Note parquetColumnNameToInstructions may be null while columnNameToInstructions is not null; We only store + * entries in parquetColumnNameToInstructions when the parquetColumnName is different than the columnName (ie, + * the column name mapping is not the default mapping) */ private final KeyedObjectHashMap parquetColumnNameToInstructions; private final String compressionCodecName; @@ -217,11 +213,11 @@ private static final class ReadOnly extends ParquetInstructions { private final boolean isLegacyParquet; protected ReadOnly( - final KeyedObjectHashMap columnNameToInstructions, - final KeyedObjectHashMap parquetColumnNameToColumnName, - final String compressionCodecName, - final int maximumDictionaryKeys, - final boolean isLegacyParquet) { + final KeyedObjectHashMap columnNameToInstructions, + final KeyedObjectHashMap parquetColumnNameToColumnName, + final String compressionCodecName, + final int maximumDictionaryKeys, + final boolean isLegacyParquet) { this.columnNameToInstructions = columnNameToInstructions; this.parquetColumnNameToInstructions = parquetColumnNameToColumnName; this.compressionCodecName = compressionCodecName; @@ -230,7 +226,7 @@ protected ReadOnly( } private String getOrDefault(final String columnName, final String defaultValue, - final Function fun) { + final Function fun) { if (columnNameToInstructions == null) { return defaultValue; } @@ -242,7 +238,7 @@ private String getOrDefault(final String columnName, final String defaultValue, } private boolean getOrDefault(final String columnName, final boolean defaultValue, - final Predicate fun) { + final Predicate fun) { if (columnNameToInstructions == null) { return defaultValue; } @@ -303,16 +299,15 @@ public boolean isLegacyParquet() { KeyedObjectHashMap copyColumnNameToInstructions() { // noinspection unchecked return (columnNameToInstructions == null) - ? null - : (KeyedObjectHashMap) columnNameToInstructions.clone(); + ? null + : (KeyedObjectHashMap) columnNameToInstructions.clone(); } KeyedObjectHashMap copyParquetColumnNameToInstructions() { // noinspection unchecked return (parquetColumnNameToInstructions == null) - ? null - : (KeyedObjectHashMap) parquetColumnNameToInstructions - .clone(); + ? null + : (KeyedObjectHashMap) parquetColumnNameToInstructions.clone(); } private static boolean sameCodecMappings(final ReadOnly r1, final ReadOnly r2) { @@ -341,8 +336,7 @@ private static boolean sameCodecMappings(final ReadOnly r1, final ReadOnly r2) { public static class Builder { private KeyedObjectHashMap columnNameToInstructions; - // Note parquetColumnNameToInstructions may be null while columnNameToInstructions is not - // null; + // Note parquetColumnNameToInstructions may be null while columnNameToInstructions is not null; // We only store entries in parquetColumnNameToInstructions when the parquetColumnName is // different than the columnName (ie, the column name mapping is not the default mapping) private KeyedObjectHashMap parquetColumnNameToInstructions; @@ -358,32 +352,29 @@ public Builder(final ParquetInstructions parquetInstructions) { } final ReadOnly readOnlyParquetInstructions = (ReadOnly) parquetInstructions; columnNameToInstructions = readOnlyParquetInstructions.copyColumnNameToInstructions(); - parquetColumnNameToInstructions = - readOnlyParquetInstructions.copyParquetColumnNameToInstructions(); + parquetColumnNameToInstructions = readOnlyParquetInstructions.copyParquetColumnNameToInstructions(); } private void newColumnNameToInstructionsMap() { - columnNameToInstructions = - new KeyedObjectHashMap<>(new KeyedObjectKey.Basic() { - @Override - public String getKey(@NotNull final ColumnInstructions value) { - return value.getColumnName(); - } - }); + columnNameToInstructions = new KeyedObjectHashMap<>(new KeyedObjectKey.Basic() { + @Override + public String getKey(@NotNull final ColumnInstructions value) { + return value.getColumnName(); + } + }); } private void newParquetColumnNameToInstructionsMap() { parquetColumnNameToInstructions = - new KeyedObjectHashMap<>(new KeyedObjectKey.Basic() { - @Override - public String getKey(@NotNull final ColumnInstructions value) { - return value.getParquetColumnName(); - } - }); + new KeyedObjectHashMap<>(new KeyedObjectKey.Basic() { + @Override + public String getKey(@NotNull final ColumnInstructions value) { + return value.getParquetColumnName(); + } + }); } - public Builder addColumnNameMapping(final String parquetColumnName, - final String columnName) { + public Builder addColumnNameMapping(final String parquetColumnName, final String columnName) { if (parquetColumnName.equals(columnName)) { return this; } @@ -404,9 +395,9 @@ public Builder addColumnNameMapping(final String parquetColumnName, return this; } throw new IllegalArgumentException( - "Cannot add a mapping from parquetColumnName=" + parquetColumnName - + ": columnName=" + columnName + " already mapped to parquetColumnName=" - + ci.parquetColumnName); + "Cannot add a mapping from parquetColumnName=" + parquetColumnName + + ": columnName=" + columnName + " already mapped to parquetColumnName=" + + ci.parquetColumnName); } } else { ci = new ColumnInstructions(columnName); @@ -420,16 +411,16 @@ public Builder addColumnNameMapping(final String parquetColumnName, } final ColumnInstructions fromParquetColumnNameInstructions = - parquetColumnNameToInstructions.get(parquetColumnName); + parquetColumnNameToInstructions.get(parquetColumnName); if (fromParquetColumnNameInstructions != null) { if (fromParquetColumnNameInstructions == ci) { return this; } throw new IllegalArgumentException( - "Cannot add new mapping from parquetColumnName=" + parquetColumnName - + " to columnName=" + columnName - + ": already mapped to columnName=" - + fromParquetColumnNameInstructions.getColumnName()); + "Cannot add new mapping from parquetColumnName=" + parquetColumnName + " to columnName=" + + columnName + + ": already mapped to columnName=" + + fromParquetColumnNameInstructions.getColumnName()); } ci.setParquetColumnName(parquetColumnName); parquetColumnNameToInstructions.put(parquetColumnName, ci); @@ -437,16 +428,14 @@ public Builder addColumnNameMapping(final String parquetColumnName, } public Set getTakenNames() { - return (columnNameToInstructions == null) ? Collections.emptySet() - : columnNameToInstructions.keySet(); + return (columnNameToInstructions == null) ? Collections.emptySet() : columnNameToInstructions.keySet(); } public Builder addColumnCodec(final String columnName, final String codecName) { return addColumnCodec(columnName, codecName, null); } - public Builder addColumnCodec(final String columnName, final String codecName, - final String codecArgs) { + public Builder addColumnCodec(final String columnName, final String codecName, final String codecArgs) { final ColumnInstructions ci = getColumnInstructions(columnName); ci.setCodecName(codecName); ci.setCodecArgs(codecArgs); @@ -454,8 +443,8 @@ public Builder addColumnCodec(final String columnName, final String codecName, } /** - * Set a hint that the writer should use dictionary-based encoding for writing this column; - * never evaluated for non-String columns. + * Set a hint that the writer should use dictionary-based encoding for writing this column; never evaluated for + * non-String columns. * * @param columnName The column name * @param useDictionary The hint value @@ -484,15 +473,14 @@ public Builder setCompressionCodecName(final String compressionCodecName) { } /** - * Set the maximum number of unique keys the writer should add to a dictionary page before - * switching to non-dictionary encoding; never evaluated for non-String columns, ignored if - * {@link #useDictionary(String) use dictionary} is set for the column. + * Set the maximum number of unique keys the writer should add to a dictionary page before switching to + * non-dictionary encoding; never evaluated for non-String columns, ignored if {@link #useDictionary(String) use + * dictionary} is set for the column. * * @param maximumDictionaryKeys The maximum number of dictionary keys; must be {@code >= 0} */ public Builder setMaximumDictionaryKeys(final int maximumDictionaryKeys) { - this.maximumDictionaryKeys = - Require.geqZero(maximumDictionaryKeys, "maximumDictionaryKeys"); + this.maximumDictionaryKeys = Require.geqZero(maximumDictionaryKeys, "maximumDictionaryKeys"); return this; } @@ -502,14 +490,13 @@ public Builder setIsLegacyParquet(final boolean isLegacyParquet) { } public ParquetInstructions build() { - final KeyedObjectHashMap columnNameToInstructionsOut = - columnNameToInstructions; + final KeyedObjectHashMap columnNameToInstructionsOut = columnNameToInstructions; columnNameToInstructions = null; final KeyedObjectHashMap parquetColumnNameToColumnNameOut = - parquetColumnNameToInstructions; + parquetColumnNameToInstructions; parquetColumnNameToInstructions = null; - return new ReadOnly(columnNameToInstructionsOut, parquetColumnNameToColumnNameOut, - compressionCodecName, maximumDictionaryKeys, isLegacyParquet); + return new ReadOnly(columnNameToInstructionsOut, parquetColumnNameToColumnNameOut, compressionCodecName, + maximumDictionaryKeys, isLegacyParquet); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetSchemaReader.java b/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetSchemaReader.java index 329b6fb4f95..022192977ec 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetSchemaReader.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetSchemaReader.java @@ -39,17 +39,15 @@ public static final class ParquetMessageDefinition { /** The parquet type. */ public Class baseType; /** - * Some types require special annotations to support regular parquet tools and efficient DH - * handling. Examples are StringSet and DbArray; a parquet file with a DbIntArray special - * type metadata annotation, but storing types as repeated int, can be loaded both by other - * parquet tools and efficiently by DH. + * Some types require special annotations to support regular parquet tools and efficient DH handling. Examples + * are StringSet and DbArray; a parquet file with a DbIntArray special type metadata annotation, but storing + * types as repeated int, can be loaded both by other parquet tools and efficiently by DH. */ public ColumnTypeInfo.SpecialType dhSpecialType; /** - * Parquet 1.0 did not support logical types; if we encounter a type like this is true. For - * example, in parquet 1.0 binary columns with no annotation are used to represent strings. - * They are also used to represent other things that are not strings. Good luck, may the - * force be with you. + * Parquet 1.0 did not support logical types; if we encounter a type like this is true. For example, in parquet + * 1.0 binary columns with no annotation are used to represent strings. They are also used to represent other + * things that are not strings. Good luck, may the force be with you. */ public boolean noLogicalType; /** Your guess is good here */ @@ -57,19 +55,18 @@ public static final class ParquetMessageDefinition { /** Your guess is good here. */ public boolean isGrouping; /** - * When codec metadata is present (which will be returned as modified read instructions - * below for actual codec name and args), we expect codec type and component type to be - * present. When they are present, codecType and codecComponentType take precedence over any - * other DH type deducing heuristics (and thus baseType in this structure can be ignored). + * When codec metadata is present (which will be returned as modified read instructions below for actual codec + * name and args), we expect codec type and component type to be present. When they are present, codecType and + * codecComponentType take precedence over any other DH type deducing heuristics (and thus baseType in this + * structure can be ignored). */ public String codecType; public String codecComponentType; /** - * We reuse the guts of this poor object between calls to avoid allocating. Like prometheus - * nailed to a mountain, this poor object has to suffer his guts being eaten forever. Or not - * forever but at least for one stack frame activation of readParquetSchema and as many - * columns that function finds in the file. + * We reuse the guts of this poor object between calls to avoid allocating. Like prometheus nailed to a + * mountain, this poor object has to suffer his guts being eaten forever. Or not forever but at least for one + * stack frame activation of readParquetSchema and as many columns that function finds in the file. */ void reset() { name = null; @@ -84,32 +81,26 @@ void reset() { * Obtain schema information from a parquet file * * @param filePath Location for input parquet file - * @param readInstructions Parquet read instructions specifying transformations like column - * mappings and codecs. Note a new read instructions based on this one may be returned by - * this method to provide necessary transformations, eg, replacing unsupported characters - * like ' ' (space) in column names. - * @param consumer A ColumnDefinitionConsumer whose accept method would be called for each - * column in the file - * @return Parquet read instructions, either the ones supplied or a new object based on the - * supplied with necessary transformations added. + * @param readInstructions Parquet read instructions specifying transformations like column mappings and codecs. + * Note a new read instructions based on this one may be returned by this method to provide necessary + * transformations, eg, replacing unsupported characters like ' ' (space) in column names. + * @param consumer A ColumnDefinitionConsumer whose accept method would be called for each column in the file + * @return Parquet read instructions, either the ones supplied or a new object based on the supplied with necessary + * transformations added. */ public static ParquetInstructions readParquetSchema( - @NotNull final String filePath, - @NotNull final ParquetInstructions readInstructions, - @NotNull final ColumnDefinitionConsumer consumer, - @NotNull final BiFunction, String> legalizeColumnNameFunc) - throws IOException { - final ParquetFileReader parquetFileReader = - ParquetTools.getParquetFileReader(new File(filePath)); + @NotNull final String filePath, + @NotNull final ParquetInstructions readInstructions, + @NotNull final ColumnDefinitionConsumer consumer, + @NotNull final BiFunction, String> legalizeColumnNameFunc) throws IOException { + final ParquetFileReader parquetFileReader = ParquetTools.getParquetFileReader(new File(filePath)); final ParquetMetadata parquetMetadata = - new ParquetMetadataConverter().fromParquetMetadata(parquetFileReader.fileMetaData); - return readParquetSchema(parquetFileReader.getSchema(), - parquetMetadata.getFileMetaData().getKeyValueMetaData(), readInstructions, consumer, - legalizeColumnNameFunc); + new ParquetMetadataConverter().fromParquetMetadata(parquetFileReader.fileMetaData); + return readParquetSchema(parquetFileReader.getSchema(), parquetMetadata.getFileMetaData().getKeyValueMetaData(), + readInstructions, consumer, legalizeColumnNameFunc); } - public static Optional parseMetadata( - @NotNull final Map keyValueMetadata) { + public static Optional parseMetadata(@NotNull final Map keyValueMetadata) { final String tableInfoRaw = keyValueMetadata.get(ParquetTableWriter.METADATA_KEY); if (tableInfoRaw == null) { return Optional.empty(); @@ -117,8 +108,7 @@ public static Optional parseMetadata( try { return Optional.of(TableInfo.deserializeFromJSON(tableInfoRaw)); } catch (JsonProcessingException e) { - throw new TableDataException( - "Failed to parse " + ParquetTableWriter.METADATA_KEY + " metadata", e); + throw new TableDataException("Failed to parse " + ParquetTableWriter.METADATA_KEY + " metadata", e); } } @@ -128,33 +118,30 @@ public static Optional parseMetadata( * @param schema Parquet schema. DO NOT RELY ON {@link ParquetMetadataConverter} FOR THIS! USE * {@link ParquetFileReader}! * @param keyValueMetadata Parquet key-value metadata map - * @param readInstructions Parquet read instructions specifying transformations like column - * mappings and codecs. Note a new read instructions based on this one may be returned by - * this method to provide necessary transformations, eg, replacing unsupported characters - * like ' ' (space) in column names. - * @param consumer A ColumnDefinitionConsumer whose accept method would be called for each - * column in the file - * @return Parquet read instructions, either the ones supplied or a new object based on the - * supplied with necessary transformations added. + * @param readInstructions Parquet read instructions specifying transformations like column mappings and codecs. + * Note a new read instructions based on this one may be returned by this method to provide necessary + * transformations, eg, replacing unsupported characters like ' ' (space) in column names. + * @param consumer A ColumnDefinitionConsumer whose accept method would be called for each column in the file + * @return Parquet read instructions, either the ones supplied or a new object based on the supplied with necessary + * transformations added. */ public static ParquetInstructions readParquetSchema( - @NotNull final MessageType schema, - @NotNull final Map keyValueMetadata, - @NotNull final ParquetInstructions readInstructions, - @NotNull final ColumnDefinitionConsumer consumer, - @NotNull final BiFunction, String> legalizeColumnNameFunc) { + @NotNull final MessageType schema, + @NotNull final Map keyValueMetadata, + @NotNull final ParquetInstructions readInstructions, + @NotNull final ColumnDefinitionConsumer consumer, + @NotNull final BiFunction, String> legalizeColumnNameFunc) { final MutableObject errorString = new MutableObject<>(); final MutableObject currentColumn = new MutableObject<>(); final Optional tableInfo = parseMetadata(keyValueMetadata); final Set groupingColumnNames = - tableInfo.map(TableInfo::groupingColumnNames).orElse(Collections.emptySet()); + tableInfo.map(TableInfo::groupingColumnNames).orElse(Collections.emptySet()); final Map nonDefaultTypeColumns = - tableInfo.map(TableInfo::columnTypeMap).orElse(Collections.emptyMap()); + tableInfo.map(TableInfo::columnTypeMap).orElse(Collections.emptyMap()); final LogicalTypeAnnotation.LogicalTypeAnnotationVisitor> visitor = - getVisitor(nonDefaultTypeColumns, errorString, currentColumn); + getVisitor(nonDefaultTypeColumns, errorString, currentColumn); - final MutableObject instructionsBuilder = - new MutableObject<>(); + final MutableObject instructionsBuilder = new MutableObject<>(); final Supplier builderSupplier = () -> { if (instructionsBuilder.getValue() == null) { instructionsBuilder.setValue(new ParquetInstructions.Builder(readInstructions)); @@ -167,40 +154,34 @@ public static ParquetInstructions readParquetSchema( if (column.getMaxRepetitionLevel() > 1) { // TODO (https://github.com/deephaven/deephaven-core/issues/871): Support this throw new UnsupportedOperationException("Unsupported maximum repetition level " - + column.getMaxRepetitionLevel() + " in column " - + String.join("/", column.getPath())); + + column.getMaxRepetitionLevel() + " in column " + String.join("/", column.getPath())); } colDef.reset(); currentColumn.setValue(column); final PrimitiveType primitiveType = column.getPrimitiveType(); - final LogicalTypeAnnotation logicalTypeAnnotation = - primitiveType.getLogicalTypeAnnotation(); + final LogicalTypeAnnotation logicalTypeAnnotation = primitiveType.getLogicalTypeAnnotation(); final String parquetColumnName = column.getPath()[0]; - parquetColumnNameToFirstPath.compute(parquetColumnName, - (final String pcn, final String[] oldPath) -> { - if (oldPath != null) { - // TODO (https://github.com/deephaven/deephaven-core/issues/871): Support - // this - throw new UnsupportedOperationException( - "Encountered unsupported multi-column field " - + parquetColumnName + ": found columns " + String.join("/", oldPath) - + " and " + String.join("/", column.getPath())); - } - return column.getPath(); - }); + parquetColumnNameToFirstPath.compute(parquetColumnName, (final String pcn, final String[] oldPath) -> { + if (oldPath != null) { + // TODO (https://github.com/deephaven/deephaven-core/issues/871): Support this + throw new UnsupportedOperationException("Encountered unsupported multi-column field " + + parquetColumnName + ": found columns " + String.join("/", oldPath) + " and " + + String.join("/", column.getPath())); + } + return column.getPath(); + }); final String colName; - final String mappedName = - readInstructions.getColumnNameFromParquetColumnName(parquetColumnName); + final String mappedName = readInstructions.getColumnNameFromParquetColumnName(parquetColumnName); if (mappedName != null) { colName = mappedName; } else { final String legalized = legalizeColumnNameFunc.apply( - parquetColumnName, - (instructionsBuilder.getValue() == null) - ? Collections.emptySet() - : instructionsBuilder.getValue().getTakenNames()); + parquetColumnName, + (instructionsBuilder.getValue() == null) + ? Collections.emptySet() + : instructionsBuilder.getValue().getTakenNames()); if (!legalized.equals(parquetColumnName)) { colName = legalized; builderSupplier.get().addColumnNameMapping(parquetColumnName, colName); @@ -208,8 +189,7 @@ public static ParquetInstructions readParquetSchema( colName = parquetColumnName; } } - final Optional columnTypeInfo = - Optional.ofNullable(nonDefaultTypeColumns.get(colName)); + final Optional columnTypeInfo = Optional.ofNullable(nonDefaultTypeColumns.get(colName)); colDef.name = colName; colDef.dhSpecialType = columnTypeInfo.flatMap(ColumnTypeInfo::specialType).orElse(null); @@ -223,15 +203,13 @@ public static ParquetInstructions readParquetSchema( } colDef.isArray = column.getMaxRepetitionLevel() > 0; if (colDef.codecType != null && !colDef.codecType.isEmpty()) { - colDef.codecComponentType = - codecInfo.flatMap(CodecInfo::componentType).orElse(null); + colDef.codecComponentType = codecInfo.flatMap(CodecInfo::componentType).orElse(null); consumer.accept(colDef); continue; } if (logicalTypeAnnotation == null) { colDef.noLogicalType = true; - final PrimitiveType.PrimitiveTypeName typeName = - primitiveType.getPrimitiveTypeName(); + final PrimitiveType.PrimitiveTypeName typeName = primitiveType.getPrimitiveTypeName(); switch (typeName) { case BOOLEAN: colDef.baseType = boolean.class; @@ -255,20 +233,17 @@ public static ParquetInstructions readParquetSchema( case FIXED_LEN_BYTE_ARRAY: if (colDef.dhSpecialType != null) { if (colDef.dhSpecialType == ColumnTypeInfo.SpecialType.StringSet) { - colDef.baseType = null; // when dhSpecialType is set, it takes - // precedence. + colDef.baseType = null; // when dhSpecialType is set, it takes precedence. colDef.isArray = true; } else { // We don't expect to see any other special types here. throw new UncheckedDeephavenException( - "BINARY or FIXED_LEN_BYTE_ARRAY type " - + column.getPrimitiveType() - + " for column " + Arrays.toString(column.getPath()) - + " with unknown special type " + colDef.dhSpecialType); + "BINARY or FIXED_LEN_BYTE_ARRAY type " + column.getPrimitiveType() + + " for column " + Arrays.toString(column.getPath()) + + " with unknown special type " + colDef.dhSpecialType); } } else if (codecName == null || codecName.isEmpty()) { - codecArgs = - (typeName == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) + codecArgs = (typeName == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) ? Integer.toString(primitiveType.getTypeLength()) : null; if (readInstructions.isLegacyParquet()) { @@ -284,99 +259,91 @@ public static ParquetInstructions readParquetSchema( break; default: throw new UncheckedDeephavenException( - "Unhandled type " + column.getPrimitiveType() - + " for column " + Arrays.toString(column.getPath())); + "Unhandled type " + column.getPrimitiveType() + + " for column " + Arrays.toString(column.getPath())); } } else { colDef.baseType = logicalTypeAnnotation.accept(visitor).orElseThrow(() -> { final String logicalTypeString = errorString.getValue(); - String msg = - "Unable to read column " + Arrays.toString(column.getPath()) + ": "; + String msg = "Unable to read column " + Arrays.toString(column.getPath()) + ": "; msg += (logicalTypeString != null) - ? (logicalTypeString + " not supported") - : "no mappable logical type annotation found"; + ? (logicalTypeString + " not supported") + : "no mappable logical type annotation found"; return new UncheckedDeephavenException(msg); }); } consumer.accept(colDef); } return (instructionsBuilder.getValue() == null) - ? readInstructions - : instructionsBuilder.getValue().build(); + ? readInstructions + : instructionsBuilder.getValue().build(); } private static LogicalTypeAnnotation.LogicalTypeAnnotationVisitor> getVisitor( - final Map nonDefaultTypeColumns, - final MutableObject errorString, - final MutableObject currentColumn) { + final Map nonDefaultTypeColumns, + final MutableObject errorString, + final MutableObject currentColumn) { return new LogicalTypeAnnotation.LogicalTypeAnnotationVisitor>() { @Override - public Optional> visit( - final LogicalTypeAnnotation.StringLogicalTypeAnnotation stringLogicalType) { + public Optional> visit(final LogicalTypeAnnotation.StringLogicalTypeAnnotation stringLogicalType) { final ColumnDescriptor column = currentColumn.getValue(); final String columnName = column.getPath()[0]; final ColumnTypeInfo columnTypeInfo = nonDefaultTypeColumns.get(columnName); final ColumnTypeInfo.SpecialType specialType = - columnTypeInfo == null ? null : columnTypeInfo.specialType().orElse(null); + columnTypeInfo == null ? null : columnTypeInfo.specialType().orElse(null); if (specialType != null) { if (specialType == ColumnTypeInfo.SpecialType.StringSet) { return Optional.of(StringSet.class); } if (specialType != ColumnTypeInfo.SpecialType.Vector) { throw new UncheckedDeephavenException("Type " + column.getPrimitiveType() - + " for column " + Arrays.toString(column.getPath()) - + " with unknown or incompatible special type " + specialType); + + " for column " + Arrays.toString(column.getPath()) + + " with unknown or incompatible special type " + specialType); } } return Optional.of(String.class); } @Override - public Optional> visit( - final LogicalTypeAnnotation.MapLogicalTypeAnnotation mapLogicalType) { + public Optional> visit(final LogicalTypeAnnotation.MapLogicalTypeAnnotation mapLogicalType) { errorString.setValue("MapLogicalType"); return Optional.empty(); } @Override - public Optional> visit( - final LogicalTypeAnnotation.ListLogicalTypeAnnotation listLogicalType) { + public Optional> visit(final LogicalTypeAnnotation.ListLogicalTypeAnnotation listLogicalType) { errorString.setValue("ListLogicalType"); return Optional.empty(); } @Override - public Optional> visit( - final LogicalTypeAnnotation.EnumLogicalTypeAnnotation enumLogicalType) { + public Optional> visit(final LogicalTypeAnnotation.EnumLogicalTypeAnnotation enumLogicalType) { errorString.setValue("EnumLogicalType"); return Optional.empty(); } @Override public Optional> visit( - final LogicalTypeAnnotation.DecimalLogicalTypeAnnotation decimalLogicalType) { + final LogicalTypeAnnotation.DecimalLogicalTypeAnnotation decimalLogicalType) { errorString.setValue("DecimalLogicalType"); return Optional.empty(); } @Override - public Optional> visit( - final LogicalTypeAnnotation.DateLogicalTypeAnnotation dateLogicalType) { + public Optional> visit(final LogicalTypeAnnotation.DateLogicalTypeAnnotation dateLogicalType) { errorString.setValue("DateLogicalType"); return Optional.empty(); } @Override - public Optional> visit( - final LogicalTypeAnnotation.TimeLogicalTypeAnnotation timeLogicalType) { - errorString.setValue( - "TimeLogicalType, isAdjustedToUTC=" + timeLogicalType.isAdjustedToUTC()); + public Optional> visit(final LogicalTypeAnnotation.TimeLogicalTypeAnnotation timeLogicalType) { + errorString.setValue("TimeLogicalType, isAdjustedToUTC=" + timeLogicalType.isAdjustedToUTC()); return Optional.empty(); } @Override public Optional> visit( - final LogicalTypeAnnotation.TimestampLogicalTypeAnnotation timestampLogicalType) { + final LogicalTypeAnnotation.TimestampLogicalTypeAnnotation timestampLogicalType) { if (timestampLogicalType.isAdjustedToUTC()) { switch (timestampLogicalType.getUnit()) { case MILLIS: @@ -385,15 +352,13 @@ public Optional> visit( return Optional.of(io.deephaven.db.tables.utils.DBDateTime.class); } } - errorString.setValue("TimestampLogicalType, isAdjustedToUTC=" - + timestampLogicalType.isAdjustedToUTC() + ", unit=" - + timestampLogicalType.getUnit()); + errorString.setValue("TimestampLogicalType, isAdjustedToUTC=" + timestampLogicalType.isAdjustedToUTC() + + ", unit=" + timestampLogicalType.getUnit()); return Optional.empty(); } @Override - public Optional> visit( - final LogicalTypeAnnotation.IntLogicalTypeAnnotation intLogicalType) { + public Optional> visit(final LogicalTypeAnnotation.IntLogicalTypeAnnotation intLogicalType) { // Ensure this stays in sync with ReadOnlyParquetTableLocation.LogicalTypeVisitor. if (intLogicalType.isSigned()) { switch (intLogicalType.getBitWidth()) { @@ -421,42 +386,39 @@ public Optional> visit( // fallthrough. } } - errorString.setValue("IntLogicalType, isSigned=" + intLogicalType.isSigned() - + ", bitWidth=" + intLogicalType.getBitWidth()); + errorString.setValue("IntLogicalType, isSigned=" + intLogicalType.isSigned() + ", bitWidth=" + + intLogicalType.getBitWidth()); return Optional.empty(); } @Override - public Optional> visit( - final LogicalTypeAnnotation.JsonLogicalTypeAnnotation jsonLogicalType) { + public Optional> visit(final LogicalTypeAnnotation.JsonLogicalTypeAnnotation jsonLogicalType) { errorString.setValue("JsonLogicalType"); return Optional.empty(); } @Override - public Optional> visit( - final LogicalTypeAnnotation.BsonLogicalTypeAnnotation bsonLogicalType) { + public Optional> visit(final LogicalTypeAnnotation.BsonLogicalTypeAnnotation bsonLogicalType) { errorString.setValue("BsonLogicalType"); return Optional.empty(); } @Override - public Optional> visit( - final LogicalTypeAnnotation.UUIDLogicalTypeAnnotation uuidLogicalType) { + public Optional> visit(final LogicalTypeAnnotation.UUIDLogicalTypeAnnotation uuidLogicalType) { errorString.setValue("UUIDLogicalType"); return Optional.empty(); } @Override public Optional> visit( - final LogicalTypeAnnotation.IntervalLogicalTypeAnnotation intervalLogicalType) { + final LogicalTypeAnnotation.IntervalLogicalTypeAnnotation intervalLogicalType) { errorString.setValue("IntervalLogicalType"); return Optional.empty(); } @Override public Optional> visit( - final LogicalTypeAnnotation.MapKeyValueTypeAnnotation mapKeyValueLogicalType) { + final LogicalTypeAnnotation.MapKeyValueTypeAnnotation mapKeyValueLogicalType) { errorString.setValue("MapKeyValueType"); return Optional.empty(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetTableWriter.java b/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetTableWriter.java index 425e6106f58..464fe6d17db 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetTableWriter.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/ParquetTableWriter.java @@ -85,8 +85,8 @@ public static Function defaultGroupingFileName(final String path /** *

    - * Information about a writing destination (e.g. a particular output partition). Couples - * destination path, input table data, and grouping information. + * Information about a writing destination (e.g. a particular output partition). Couples destination path, input + * table data, and grouping information. */ public static final class DestinationInfo { @@ -95,8 +95,8 @@ public static final class DestinationInfo { private final Map> columnNameToGroupToRange; public DestinationInfo(@NotNull final String outputPath, - @NotNull final Table inputTable, - @NotNull final Map> columnNameToGroupToRange) { + @NotNull final Table inputTable, + @NotNull final Map> columnNameToGroupToRange) { this.outputPath = outputPath; this.inputTable = inputTable; this.columnNameToGroupToRange = columnNameToGroupToRange; @@ -137,22 +137,21 @@ public Map> getColumnNameToGroupToRange() { * @param path The destination path * @param incomingMeta A map of metadata values to be stores in the file footer * @param groupingPathFactory - * @param groupingColumns List of columns the tables are grouped by (the write operation will - * store the grouping info) - * @throws SchemaMappingException Error creating a parquet table schema for the given table - * (likely due to unsupported types) + * @param groupingColumns List of columns the tables are grouped by (the write operation will store the grouping + * info) + * @throws SchemaMappingException Error creating a parquet table schema for the given table (likely due to + * unsupported types) * @throws IOException For file writing related errors */ public static void write( - Table t, String path, Map incomingMeta, - Function groupingPathFactory, String... groupingColumns) - throws SchemaMappingException, IOException { - write(t, t.getDefinition(), ParquetInstructions.EMPTY, path, incomingMeta, - groupingPathFactory, groupingColumns); + Table t, String path, Map incomingMeta, Function groupingPathFactory, + String... groupingColumns) throws SchemaMappingException, IOException { + write(t, t.getDefinition(), ParquetInstructions.EMPTY, path, incomingMeta, groupingPathFactory, + groupingColumns); } - public static void write(Table t, String path, Map incomingMeta, - String... groupingColumns) throws SchemaMappingException, IOException { + public static void write(Table t, String path, Map incomingMeta, String... groupingColumns) + throws SchemaMappingException, IOException { write(t, path, incomingMeta, defaultGroupingFileName(path), groupingColumns); } @@ -165,35 +164,35 @@ public static void write(Table t, String path, Map incomingMeta, * @param destPathName The destination path * @param incomingMeta A map of metadata values to be stores in the file footer * @param groupingPathFactory - * @param groupingColumns List of columns the tables are grouped by (the write operation will - * store the grouping info) - * @throws SchemaMappingException Error creating a parquet table schema for the given table - * (likely due to unsupported types) + * @param groupingColumns List of columns the tables are grouped by (the write operation will store the grouping + * info) + * @throws SchemaMappingException Error creating a parquet table schema for the given table (likely due to + * unsupported types) * @throws IOException For file writing related errors */ public static void write( - final Table t, - final TableDefinition definition, - final ParquetInstructions writeInstructions, - final String destPathName, - final Map incomingMeta, - final Function groupingPathFactory, - final String... groupingColumns) throws SchemaMappingException, IOException { + final Table t, + final TableDefinition definition, + final ParquetInstructions writeInstructions, + final String destPathName, + final Map incomingMeta, + final Function groupingPathFactory, + final String... groupingColumns) throws SchemaMappingException, IOException { final TableInfo.Builder tableInfoBuilder = TableInfo.builder(); ArrayList cleanupPaths = null; try { if (groupingColumns.length > 0) { cleanupPaths = new ArrayList<>(groupingColumns.length); final Table[] auxiliaryTables = Arrays.stream(groupingColumns) - .map(columnName -> groupingAsTable(t, columnName)).toArray(Table[]::new); + .map(columnName -> groupingAsTable(t, columnName)).toArray(Table[]::new); final Path destDirPath = Paths.get(destPathName).getParent(); for (int gci = 0; gci < auxiliaryTables.length; ++gci) { final String groupingPath = groupingPathFactory.apply(groupingColumns[gci]); cleanupPaths.add(groupingPath); tableInfoBuilder.addGroupingColumns(GroupingColumnInfo.of(groupingColumns[gci], - destDirPath.relativize(Paths.get(groupingPath)).toString())); - write(auxiliaryTables[gci], auxiliaryTables[gci].getDefinition(), - writeInstructions, groupingPath, Collections.emptyMap()); + destDirPath.relativize(Paths.get(groupingPath)).toString())); + write(auxiliaryTables[gci], auxiliaryTables[gci].getDefinition(), writeInstructions, groupingPath, + Collections.emptyMap()); } } write(t, definition, writeInstructions, destPathName, incomingMeta, tableInfoBuilder); @@ -212,12 +211,11 @@ public static void write( } public static void write( - final Table t, final TableDefinition definition, - final ParquetInstructions writeInstructions, final String path, - final Map incomingMeta, final String... groupingColumns) - throws SchemaMappingException, IOException { - write(t, definition, writeInstructions, path, incomingMeta, defaultGroupingFileName(path), - groupingColumns); + final Table t, final TableDefinition definition, final ParquetInstructions writeInstructions, + final String path, + final Map incomingMeta, final String... groupingColumns) + throws SchemaMappingException, IOException { + write(t, definition, writeInstructions, path, incomingMeta, defaultGroupingFileName(path), groupingColumns); } /** @@ -229,35 +227,34 @@ public static void write( * @param path The destination path * @param tableMeta A map of metadata values to be stores in the file footer * @param tableInfoBuilder A partially-constructed builder for the metadata object - * @throws SchemaMappingException Error creating a parquet table schema for the given table - * (likely due to unsupported types) + * @throws SchemaMappingException Error creating a parquet table schema for the given table (likely due to + * unsupported types) * @throws IOException For file writing related errors */ public static void write( - final Table table, - final TableDefinition definition, - final ParquetInstructions writeInstructions, - final String path, - final Map tableMeta, - final TableInfo.Builder tableInfoBuilder) throws SchemaMappingException, IOException { + final Table table, + final TableDefinition definition, + final ParquetInstructions writeInstructions, + final String path, + final Map tableMeta, + final TableInfo.Builder tableInfoBuilder) throws SchemaMappingException, IOException { final CompressionCodecName compressionCodecName = - CompressionCodecName.valueOf(writeInstructions.getCompressionCodecName()); - ParquetFileWriter parquetFileWriter = getParquetFileWriter(definition, path, - writeInstructions, tableMeta, tableInfoBuilder, compressionCodecName); + CompressionCodecName.valueOf(writeInstructions.getCompressionCodecName()); + ParquetFileWriter parquetFileWriter = getParquetFileWriter(definition, path, writeInstructions, tableMeta, + tableInfoBuilder, compressionCodecName); final Table t = pretransformTable(table, definition); final long nrows = t.size(); if (nrows > 0) { RowGroupWriter rowGroupWriter = parquetFileWriter.addRowGroup(nrows); // noinspection rawtypes - for (Map.Entry nameToSource : t.getColumnSourceMap() - .entrySet()) { + for (Map.Entry nameToSource : t.getColumnSourceMap().entrySet()) { String name = nameToSource.getKey(); ColumnSource columnSource = nameToSource.getValue(); try { - writeColumnSource(t.getIndex(), rowGroupWriter, name, columnSource, - definition.getColumn(name), writeInstructions); + writeColumnSource(t.getIndex(), rowGroupWriter, name, columnSource, definition.getColumn(name), + writeInstructions); } catch (IllegalAccessException e) { throw new RuntimeException("Failed to write column " + name, e); } @@ -275,44 +272,40 @@ private static Table pretransformTable(final Table table, final TableDefinition final String colName = column.getName(); if (t.hasColumns(colName)) { if (StringSet.class.isAssignableFrom(column.getDataType())) { - updateViewColumnsTransform - .add(FormulaColumn.createFormulaColumn(colName, colName + ".values()")); + updateViewColumnsTransform.add(FormulaColumn.createFormulaColumn(colName, colName + ".values()")); } viewColumnsTransform.add(new SourceColumn(colName)); } else { // noinspection unchecked - viewColumnsTransform.add( - new NullSelectColumn(column.getDataType(), column.getComponentType(), colName)); + viewColumnsTransform + .add(new NullSelectColumn(column.getDataType(), column.getComponentType(), colName)); } } if (viewColumnsTransform.size() > 0) { - t = t - .view(viewColumnsTransform.toArray((SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY))); + t = t.view(viewColumnsTransform.toArray((SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY))); } if (updateViewColumnsTransform.size() > 0) { - t = t.updateView( - updateViewColumnsTransform.toArray(SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY)); + t = t.updateView(updateViewColumnsTransform.toArray(SelectColumn.ZERO_LENGTH_SELECT_COLUMN_ARRAY)); } return t; } @NotNull private static ParquetFileWriter getParquetFileWriter( - final TableDefinition definition, - final String path, - final ParquetInstructions writeInstructions, - final Map tableMeta, - final TableInfo.Builder tableInfoBuilder, - final CompressionCodecName codecName) throws IOException { + final TableDefinition definition, + final String path, + final ParquetInstructions writeInstructions, + final Map tableMeta, + final TableInfo.Builder tableInfoBuilder, + final CompressionCodecName codecName) throws IOException { final MappedSchema mappedSchema = MappedSchema.create(definition, writeInstructions); final Map extraMetaData = new HashMap<>(tableMeta); for (final ColumnDefinition column : definition.getColumns()) { final String colName = column.getName(); final ColumnTypeInfo.Builder columnInfoBuilder = ColumnTypeInfo.builder() - .columnName(writeInstructions.getParquetColumnNameFromColumnNameOrDefault(colName)); + .columnName(writeInstructions.getParquetColumnNameFromColumnNameOrDefault(colName)); boolean usedColumnInfo = false; - final Pair codecData = - TypeInfos.getCodecAndArgs(column, writeInstructions); + final Pair codecData = TypeInfos.getCodecAndArgs(column, writeInstructions); if (codecData != null) { final CodecInfo.Builder codecInfoBuilder = CodecInfo.builder(); codecInfoBuilder.codecName(codecData.getLeft()); @@ -341,17 +334,16 @@ private static ParquetFileWriter getParquetFileWriter( } extraMetaData.put(METADATA_KEY, tableInfoBuilder.build().serializeToJSON()); return new ParquetFileWriter(path, TrackedSeekableChannelsProvider.getInstance(), PAGE_SIZE, - new HeapByteBufferAllocator(), mappedSchema.getParquetSchema(), codecName, - extraMetaData); + new HeapByteBufferAllocator(), mappedSchema.getParquetSchema(), codecName, extraMetaData); } private static void writeColumnSource( - final Index tableIndex, - final RowGroupWriter rowGroupWriter, - final String name, - final ColumnSource columnSourceIn, - final ColumnDefinition columnDefinition, - final ParquetInstructions writeInstructions) throws IllegalAccessException, IOException { + final Index tableIndex, + final RowGroupWriter rowGroupWriter, + final String name, + final ColumnSource columnSourceIn, + final ColumnDefinition columnDefinition, + final ParquetInstructions writeInstructions) throws IllegalAccessException, IOException { Index index = tableIndex; ColumnSource columnSource = columnSourceIn; ColumnSource lengthSource = null; @@ -361,33 +353,29 @@ private static void writeColumnSource( Supplier valuesStepGetter; int stepsCount; if (columnSource.getComponentType() != null - && !CodecLookup - .explicitCodecPresent(writeInstructions.getCodecName(columnDefinition.getName())) - && !CodecLookup.codecRequired(columnDefinition)) { + && !CodecLookup.explicitCodecPresent(writeInstructions.getCodecName(columnDefinition.getName())) + && !CodecLookup.codecRequired(columnDefinition)) { targetSize = getTargetSize(columnSource.getComponentType()); HashMap columns = new HashMap<>(); columns.put("array", columnSource); Table t = new QueryTable(index, columns); - lengthSource = t.view("len= ((Object)array) == null?null:(int)array." - + (DbArrayBase.class.isAssignableFrom(columnSource.getType()) ? "size()" - : "length")) - .getColumnSource("len"); + lengthSource = t + .view("len= ((Object)array) == null?null:(int)array." + + (DbArrayBase.class.isAssignableFrom(columnSource.getType()) ? "size()" : "length")) + .getColumnSource("len"); lengthIndex = index; List valueChunkSize = new ArrayList<>(); List originalChunkSize = new ArrayList<>(); int runningSize = 0; int originalRowsCount = 0; - try ( - final ChunkSource.GetContext context = - lengthSource.makeGetContext(LOCAL_CHUNK_SIZE); - final OrderedKeys.Iterator it = index.getOrderedKeysIterator()) { + try (final ChunkSource.GetContext context = lengthSource.makeGetContext(LOCAL_CHUNK_SIZE); + final OrderedKeys.Iterator it = index.getOrderedKeysIterator()) { while (it.hasMore()) { OrderedKeys ok = it.getNextOrderedKeysWithLength(LOCAL_CHUNK_SIZE); IntChunk chunk = (IntChunk) lengthSource.getChunk(context, ok); for (int i = 0; i < chunk.size(); i++) { if (chunk.get(i) != Integer.MIN_VALUE) { - if (runningSize + chunk.get(i) > targetSize - || originalRowsCount + 1 > targetSize) { + if (runningSize + chunk.get(i) > targetSize || originalRowsCount + 1 > targetSize) { if (runningSize > targetSize) { targetSize = chunk.get(i); } @@ -429,8 +417,7 @@ public Integer get() { } else { int finalTargetSize = targetSize; rowStepGetter = valuesStepGetter = () -> finalTargetSize; - stepsCount = (int) (index.size() / finalTargetSize - + ((index.size() % finalTargetSize) == 0 ? 0 : 1)); + stepsCount = (int) (index.size() / finalTargetSize + ((index.size() % finalTargetSize) == 0 ? 0 : 1)); } Class columnType = columnSource.getType(); if (columnType == DBDateTime.class) { @@ -444,31 +431,26 @@ public Integer get() { boolean usedDictionary = false; if (supportsDictionary(columnSource.getType())) { - final boolean useDictionaryHint = - writeInstructions.useDictionary(columnDefinition.getName()); - final int maxKeys = useDictionaryHint ? Integer.MAX_VALUE - : writeInstructions.getMaximumDictionaryKeys(); + final boolean useDictionaryHint = writeInstructions.useDictionary(columnDefinition.getName()); + final int maxKeys = useDictionaryHint ? Integer.MAX_VALUE : writeInstructions.getMaximumDictionaryKeys(); final class DictionarySizeExceededException extends RuntimeException { } try { final List buffersPerPage = new ArrayList<>(); - final Function keyArrayBuilder = - getKeyArrayBuilder(columnSource.getType()); - final Function toParquetPrimitive = - getToParquetConversion(columnSource.getType()); - final MutableObject keys = new MutableObject<>( - keyArrayBuilder.apply(Math.min(INITIAL_DICTIONARY_SIZE, maxKeys))); + final Function keyArrayBuilder = getKeyArrayBuilder(columnSource.getType()); + final Function toParquetPrimitive = getToParquetConversion(columnSource.getType()); + final MutableObject keys = + new MutableObject<>(keyArrayBuilder.apply(Math.min(INITIAL_DICTIONARY_SIZE, maxKeys))); final Map keyToPos = new HashMap<>(); final MutableInt keyCount = new MutableInt(0); final MutableBoolean hasNulls = new MutableBoolean(false); try (final ChunkSource.GetContext context = columnSource.makeGetContext(targetSize); - final OrderedKeys.Iterator it = index.getOrderedKeysIterator()) { + final OrderedKeys.Iterator it = index.getOrderedKeysIterator()) { for (int step = 0; step < stepsCount; step++) { - final OrderedKeys ok = - it.getNextOrderedKeysWithLength(valuesStepGetter.get()); + final OrderedKeys ok = it.getNextOrderedKeysWithLength(valuesStepGetter.get()); // noinspection unchecked final ObjectChunk chunk = - (ObjectChunk) columnSource.getChunk(context, ok); + (ObjectChunk) columnSource.getChunk(context, ok); final IntBuffer posInDictionary = IntBuffer.allocate((int) ok.size()); for (int vi = 0; vi < chunk.size(); ++vi) { posInDictionary.put(keyToPos.computeIfAbsent(chunk.get(vi), o -> { @@ -481,7 +463,7 @@ final class DictionarySizeExceededException extends RuntimeException { throw new DictionarySizeExceededException(); } keys.setValue(Arrays.copyOf(keys.getValue(), - (int) Math.min(keyCount.intValue() * 2L, maxKeys))); + (int) Math.min(keyCount.intValue() * 2L, maxKeys))); } keys.getValue()[keyCount.intValue()] = toParquetPrimitive.apply(o); Integer result = keyCount.getValue(); @@ -496,13 +478,10 @@ final class DictionarySizeExceededException extends RuntimeException { List repeatCount = null; if (lengthSource != null) { repeatCount = new ArrayList<>(); - try ( - final ChunkSource.GetContext context = - lengthSource.makeGetContext(targetSize); - final OrderedKeys.Iterator it = lengthIndex.getOrderedKeysIterator()) { + try (final ChunkSource.GetContext context = lengthSource.makeGetContext(targetSize); + final OrderedKeys.Iterator it = lengthIndex.getOrderedKeysIterator()) { while (it.hasMore()) { - final OrderedKeys ok = - it.getNextOrderedKeysWithLength(rowStepGetter.get()); + final OrderedKeys ok = it.getNextOrderedKeysWithLength(rowStepGetter.get()); final IntChunk chunk = (IntChunk) lengthSource.getChunk(context, ok); final IntBuffer newBuffer = IntBuffer.allocate(chunk.size()); chunk.copyToTypedBuffer(0, newBuffer, 0, chunk.size()); @@ -512,13 +491,12 @@ final class DictionarySizeExceededException extends RuntimeException { } } columnWriter.addDictionaryPage(keys.getValue(), keyCount.intValue()); - final Iterator repeatCountIt = - repeatCount == null ? null : repeatCount.iterator(); + final Iterator repeatCountIt = repeatCount == null ? null : repeatCount.iterator(); for (final IntBuffer intBuffer : buffersPerPage) { intBuffer.flip(); if (lengthSource != null) { - columnWriter.addVectorPage(intBuffer, repeatCountIt.next(), - intBuffer.remaining(), Integer.MIN_VALUE); + columnWriter.addVectorPage(intBuffer, repeatCountIt.next(), intBuffer.remaining(), + Integer.MIN_VALUE); } else if (hasNulls.getValue()) { columnWriter.addPage(intBuffer, Integer.MIN_VALUE, intBuffer.remaining()); } else { @@ -531,36 +509,31 @@ final class DictionarySizeExceededException extends RuntimeException { } if (!usedDictionary) { // noinspection unchecked - try (final TransferObject transferObject = getDestinationBuffer(columnSource, - columnDefinition, targetSize, columnType, writeInstructions)) { + try (final TransferObject transferObject = + getDestinationBuffer(columnSource, columnDefinition, targetSize, columnType, writeInstructions)) { final boolean supportNulls = supportNulls(columnType); final Object bufferToWrite = transferObject.getBuffer(); final Object nullValue = getNullValue(columnType); - try ( - final OrderedKeys.Iterator lengthIndexIt = + try (final OrderedKeys.Iterator lengthIndexIt = lengthIndex != null ? lengthIndex.getOrderedKeysIterator() : null; - final ChunkSource.GetContext lengthSourceContext = - lengthSource != null ? lengthSource.makeGetContext(targetSize) : null; - final OrderedKeys.Iterator it = index.getOrderedKeysIterator()) { - final IntBuffer repeatCount = - lengthSource != null ? IntBuffer.allocate(targetSize) : null; + final ChunkSource.GetContext lengthSourceContext = + lengthSource != null ? lengthSource.makeGetContext(targetSize) : null; + final OrderedKeys.Iterator it = index.getOrderedKeysIterator()) { + final IntBuffer repeatCount = lengthSource != null ? IntBuffer.allocate(targetSize) : null; for (int step = 0; step < stepsCount; ++step) { - final OrderedKeys ok = - it.getNextOrderedKeysWithLength(valuesStepGetter.get()); + final OrderedKeys ok = it.getNextOrderedKeysWithLength(valuesStepGetter.get()); transferObject.fetchData(ok); transferObject.propagateChunkData(); if (lengthIndexIt != null) { - final IntChunk lenChunk = (IntChunk) lengthSource.getChunk( - lengthSourceContext, - lengthIndexIt.getNextOrderedKeysWithLength(rowStepGetter.get())); + final IntChunk lenChunk = (IntChunk) lengthSource.getChunk(lengthSourceContext, + lengthIndexIt.getNextOrderedKeysWithLength(rowStepGetter.get())); lenChunk.copyToTypedBuffer(0, repeatCount, 0, lenChunk.size()); repeatCount.limit(lenChunk.size()); - columnWriter.addVectorPage(bufferToWrite, repeatCount, - transferObject.rowCount(), nullValue); + columnWriter.addVectorPage(bufferToWrite, repeatCount, transferObject.rowCount(), + nullValue); repeatCount.clear(); } else if (supportNulls) { - columnWriter.addPage(bufferToWrite, nullValue, - transferObject.rowCount()); + columnWriter.addPage(bufferToWrite, nullValue, transferObject.rowCount()); } else { columnWriter.addPageNoNulls(bufferToWrite, transferObject.rowCount()); } @@ -636,11 +609,11 @@ private static int getTargetSize(Class columnType) throws IllegalAccessException private static TransferObject getDestinationBuffer( - final ColumnSource columnSource, - final ColumnDefinition columnDefinition, - final int targetSize, - final Class columnType, - final ParquetInstructions instructions) { + final ColumnSource columnSource, + final ColumnDefinition columnDefinition, + final int targetSize, + final Class columnType, + final ParquetInstructions instructions) { if (int.class.equals(columnType)) { int[] array = new int[targetSize]; WritableIntChunk chunk = WritableIntChunk.writableChunkWrap(array); @@ -652,13 +625,11 @@ private static TransferObject getDestinationBuffer( } else if (double.class.equals(columnType)) { double[] array = new double[targetSize]; WritableDoubleChunk chunk = WritableDoubleChunk.writableChunkWrap(array); - return new PrimitiveTransfer<>(columnSource, chunk, DoubleBuffer.wrap(array), - targetSize); + return new PrimitiveTransfer<>(columnSource, chunk, DoubleBuffer.wrap(array), targetSize); } else if (float.class.equals(columnType)) { float[] array = new float[targetSize]; WritableFloatChunk chunk = WritableFloatChunk.writableChunkWrap(array); - return new PrimitiveTransfer<>(columnSource, chunk, FloatBuffer.wrap(array), - targetSize); + return new PrimitiveTransfer<>(columnSource, chunk, FloatBuffer.wrap(array), targetSize); } else if (Boolean.class.equals(columnType)) { byte[] array = new byte[targetSize]; WritableByteChunk chunk = WritableByteChunk.writableChunkWrap(array); @@ -687,8 +658,7 @@ interface TransferObject extends Context { void fetchData(OrderedKeys ok); } - static class PrimitiveTransfer, B extends Buffer> - implements TransferObject { + static class PrimitiveTransfer, B extends Buffer> implements TransferObject { private final C chunk; private final B buffer; @@ -925,8 +895,7 @@ static class CodecTransfer implements TransferObject { public void propagateChunkData() { for (int i = 0; i < chunk.size(); i++) { Object value = chunk.get(i); - buffer[i] = - value == null ? null : Binary.fromConstantByteArray(codec.encode(value)); + buffer[i] = value == null ? null : Binary.fromConstantByteArray(codec.encode(value)); } } @@ -996,23 +965,21 @@ public long[] endPos() { private static Table groupingAsTable(Table tableToSave, String columnName) { - Map grouping = - tableToSave.getIndex().getGrouping(tableToSave.getColumnSource(columnName)); + Map grouping = tableToSave.getIndex().getGrouping(tableToSave.getColumnSource(columnName)); RangeCollector collector; - QueryScope.getScope().putParam("__range_collector_" + columnName + "__", - collector = new RangeCollector()); - Table firstOfTheKey = tableToSave.view(columnName) - .where("__range_collector_" + columnName + "__.next(" + columnName + ")"); + QueryScope.getScope().putParam("__range_collector_" + columnName + "__", collector = new RangeCollector()); + Table firstOfTheKey = + tableToSave.view(columnName).where("__range_collector_" + columnName + "__.next(" + columnName + ")"); Table contiguousOccurrences = firstOfTheKey.countBy("c", columnName).where("c != 1"); if (contiguousOccurrences.size() != 0) { throw new RuntimeException( - "Disk grouping is not possible for column because some indices are not contiguous"); + "Disk grouping is not possible for column because some indices are not contiguous"); } Object columnValues = firstOfTheKey.getColumn(columnName).getDirect(); collector.close(); return new InMemoryTable(new String[] {GROUPING_KEY, BEGIN_POS, END_POS}, - new Object[] {columnValues, collector.beginPos(), collector.endPos()}); + new Object[] {columnValues, collector.beginPos(), collector.endPos()}); } public static class SomeSillyTest implements Serializable { @@ -1026,8 +993,8 @@ public SomeSillyTest(int value) { @Override public String toString() { return "SomeSillyTest{" + - "value=" + value + - '}'; + "value=" + value + + '}'; } @Override @@ -1042,28 +1009,28 @@ public boolean equals(Object obj) { private static Table getTableFlat() { QueryLibrary.importClass(SomeSillyTest.class); return TableTools.emptyTable(10).select( - "someStringColumn = i % 10 == 0?null:(`` + (i % 101))", - "nonNullString = `` + (i % 60)", - "someIntColumn = i", - "someNullableInts = i%5 != 0?i:null", - "someLongColumn = ii", - "someDoubleColumn = i*1.1", - "someFloatColumn = (float)(i*1.1)", - "someBoolColum = i % 3 == 0?true:i%3 == 1?false:null", - "someShortColumn = (short)i", - "someByteColumn = (byte)i", - "someCharColumn = (char)i", - "someTime = DBDateTime.now() + i", - "someKey = `` + (int)(i /100)", - "nullKey = i < -1?`123`:null", - "someSerializable = new SomeSillyTest(i)"); + "someStringColumn = i % 10 == 0?null:(`` + (i % 101))", + "nonNullString = `` + (i % 60)", + "someIntColumn = i", + "someNullableInts = i%5 != 0?i:null", + "someLongColumn = ii", + "someDoubleColumn = i*1.1", + "someFloatColumn = (float)(i*1.1)", + "someBoolColum = i % 3 == 0?true:i%3 == 1?false:null", + "someShortColumn = (short)i", + "someByteColumn = (byte)i", + "someCharColumn = (char)i", + "someTime = DBDateTime.now() + i", + "someKey = `` + (int)(i /100)", + "nullKey = i < -1?`123`:null", + "someSerializable = new SomeSillyTest(i)"); } private static Table getGroupedTable() { Table t = getTableFlat(); QueryLibrary.importClass(StringSetArrayWrapper.class); Table result = t.by("groupKey = i % 100 + (int)(i/10)") - .update("someStringSet = new StringSetArrayWrapper(nonNullString)"); + .update("someStringSet = new StringSetArrayWrapper(nonNullString)"); TableTools.show(result); return result; } diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/TypeInfos.java b/DB/src/main/java/io/deephaven/db/v2/parquet/TypeInfos.java index b33067eb23a..42440b70e55 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/TypeInfos.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/TypeInfos.java @@ -20,8 +20,8 @@ import java.util.*; /** - * Contains the necessary information to convert a Deephaven table into a Parquet table. Both the - * schema translation, and the data translation. + * Contains the necessary information to convert a Deephaven table into a Parquet table. Both the schema translation, + * and the data translation. */ class TypeInfos { @@ -55,10 +55,10 @@ private static Optional lookupTypeInfo(@NotNull final Class clazz) } private static TypeInfo lookupTypeInfo( - @NotNull final ColumnDefinition column, - @NotNull final ParquetInstructions instructions) { + @NotNull final ColumnDefinition column, + @NotNull final ParquetInstructions instructions) { if (CodecLookup.codecRequired(column) - || CodecLookup.explicitCodecPresent(instructions.getCodecName(column.getName()))) { + || CodecLookup.explicitCodecPresent(instructions.getCodecName(column.getName()))) { return new CodecType<>(); } final Class componentType = column.getComponentType(); @@ -73,14 +73,13 @@ private static TypeInfo lookupTypeInfo( } static Pair getCodecAndArgs( - @NotNull final ColumnDefinition columnDefinition, - @NotNull final ParquetInstructions instructions) { + @NotNull final ColumnDefinition columnDefinition, + @NotNull final ParquetInstructions instructions) { // Explicit codecs always take precedence final String colName = columnDefinition.getName(); final String codecNameFromInstructions = instructions.getCodecName(colName); if (CodecLookup.explicitCodecPresent(codecNameFromInstructions)) { - return new ImmutablePair<>(codecNameFromInstructions, - instructions.getCodecArgs(colName)); + return new ImmutablePair<>(codecNameFromInstructions, instructions.getCodecArgs(colName)); } // No need to impute a codec for any basic formats we already understand if (!CodecLookup.codecRequired(columnDefinition)) { @@ -95,8 +94,8 @@ static Pair getCodecAndArgs( } static TypeInfo getTypeInfo( - @NotNull final ColumnDefinition column, - @NotNull final ParquetInstructions instructions) { + @NotNull final ColumnDefinition column, + @NotNull final ParquetInstructions instructions) { return lookupTypeInfo(column, instructions); } @@ -104,17 +103,15 @@ private static boolean isRequired(ColumnDefinition columnDefinition) { return false;// TODO change this when adding optionals support } - private static PrimitiveBuilder type(PrimitiveTypeName type, boolean required, - boolean repeating) { - return repeating ? Types.repeated(type) - : (required ? Types.required(type) : Types.optional(type)); + private static PrimitiveBuilder type(PrimitiveTypeName type, boolean required, boolean repeating) { + return repeating ? Types.repeated(type) : (required ? Types.required(type) : Types.optional(type)); } private enum IntType implements TypeInfo { INSTANCE; private static final Set> clazzes = Collections - .unmodifiableSet(new HashSet<>(Arrays.asList(int.class, Integer.class))); + .unmodifiableSet(new HashSet<>(Arrays.asList(int.class, Integer.class))); @Override public Set> getTypes() { @@ -122,13 +119,11 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } - return type(PrimitiveTypeName.INT32, required, repeating) - .as(LogicalTypeAnnotation.intType(32, true)); + return type(PrimitiveTypeName.INT32, required, repeating).as(LogicalTypeAnnotation.intType(32, true)); } } @@ -136,7 +131,7 @@ private enum LongType implements TypeInfo { INSTANCE; private static final Set> clazzes = Collections - .unmodifiableSet(new HashSet<>(Arrays.asList(long.class, Long.class))); + .unmodifiableSet(new HashSet<>(Arrays.asList(long.class, Long.class))); @Override public Set> getTypes() { @@ -144,8 +139,7 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } @@ -157,7 +151,7 @@ private enum ShortType implements TypeInfo { INSTANCE; private static final Set> clazzes = Collections - .unmodifiableSet(new HashSet<>(Arrays.asList(short.class, Short.class))); + .unmodifiableSet(new HashSet<>(Arrays.asList(short.class, Short.class))); @Override public Set> getTypes() { @@ -165,13 +159,11 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } - return type(PrimitiveTypeName.INT32, required, repeating) - .as(LogicalTypeAnnotation.intType(16, true)); + return type(PrimitiveTypeName.INT32, required, repeating).as(LogicalTypeAnnotation.intType(16, true)); } } @@ -179,7 +171,7 @@ private enum BooleanType implements TypeInfo { INSTANCE; private static final Set> clazzes = Collections - .unmodifiableSet(new HashSet<>(Arrays.asList(boolean.class, Boolean.class))); + .unmodifiableSet(new HashSet<>(Arrays.asList(boolean.class, Boolean.class))); @Override public Set> getTypes() { @@ -187,8 +179,7 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } @@ -200,7 +191,7 @@ private enum FloatType implements TypeInfo { INSTANCE; private static final Set> clazzes = Collections - .unmodifiableSet(new HashSet<>(Arrays.asList(float.class, Float.class))); + .unmodifiableSet(new HashSet<>(Arrays.asList(float.class, Float.class))); @Override public Set> getTypes() { @@ -208,8 +199,7 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } @@ -221,7 +211,7 @@ private enum DoubleType implements TypeInfo { INSTANCE; private static final Set> clazzes = Collections - .unmodifiableSet(new HashSet<>(Arrays.asList(double.class, Double.class))); + .unmodifiableSet(new HashSet<>(Arrays.asList(double.class, Double.class))); @Override public Set> getTypes() { @@ -229,8 +219,7 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } @@ -242,7 +231,7 @@ private enum CharType implements TypeInfo { INSTANCE; private static final Set> clazzes = Collections - .unmodifiableSet(new HashSet<>(Arrays.asList(char.class, Character.class))); + .unmodifiableSet(new HashSet<>(Arrays.asList(char.class, Character.class))); @Override public Set> getTypes() { @@ -250,13 +239,11 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } - return type(PrimitiveTypeName.INT32, required, repeating) - .as(LogicalTypeAnnotation.intType(16, false)); + return type(PrimitiveTypeName.INT32, required, repeating).as(LogicalTypeAnnotation.intType(16, false)); } } @@ -264,7 +251,7 @@ private enum ByteType implements TypeInfo { INSTANCE; private static final Set> clazzes = Collections - .unmodifiableSet(new HashSet<>(Arrays.asList(byte.class, Byte.class))); + .unmodifiableSet(new HashSet<>(Arrays.asList(byte.class, Byte.class))); @Override public Set> getTypes() { @@ -272,13 +259,11 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } - return type(PrimitiveTypeName.INT32, required, repeating) - .as(LogicalTypeAnnotation.intType(8, true)); + return type(PrimitiveTypeName.INT32, required, repeating).as(LogicalTypeAnnotation.intType(8, true)); } } @@ -293,19 +278,17 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } return type(PrimitiveTypeName.BINARY, required, repeating) - .as(LogicalTypeAnnotation.stringType()); + .as(LogicalTypeAnnotation.stringType()); } } /** - * TODO: newer versions of parquet seem to support NANOS, but this version seems to only support - * MICROS + * TODO: newer versions of parquet seem to support NANOS, but this version seems to only support MICROS */ private enum DBDateTimeType implements TypeInfo { INSTANCE; @@ -318,14 +301,12 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { if (!isValidFor(dataType)) { throw new IllegalArgumentException("Invalid data type " + dataType); } return type(PrimitiveTypeName.INT64, required, repeating) - .as(LogicalTypeAnnotation.timestampType(true, - LogicalTypeAnnotation.TimeUnit.NANOS)); + .as(LogicalTypeAnnotation.timestampType(true, LogicalTypeAnnotation.TimeUnit.NANOS)); } } @@ -338,16 +319,15 @@ default boolean isValidFor(Class clazz) { } default Type createSchemaType( - @NotNull final ColumnDefinition columnDefinition, - @NotNull final ParquetInstructions instructions) { + @NotNull final ColumnDefinition columnDefinition, + @NotNull final ParquetInstructions instructions) { final Class dataType = columnDefinition.getDataType(); final Class componentType = columnDefinition.getComponentType(); final PrimitiveBuilder builder; final boolean isRepeating; - if (CodecLookup - .explicitCodecPresent(instructions.getCodecName(columnDefinition.getName())) - || CodecLookup.codecRequired(columnDefinition)) { + if (CodecLookup.explicitCodecPresent(instructions.getCodecName(columnDefinition.getName())) + || CodecLookup.codecRequired(columnDefinition)) { builder = getBuilder(isRequired(columnDefinition), false, dataType); isRepeating = false; } else if (componentType != null) { @@ -364,13 +344,12 @@ default Type createSchemaType( return builder.named(columnDefinition.getName()); } return Types.buildGroup(Type.Repetition.OPTIONAL).addField( - Types.buildGroup(Type.Repetition.REPEATED).addField( - builder.named("item")).named(columnDefinition.getName())) - .as(LogicalTypeAnnotation.listType()).named(columnDefinition.getName()); + Types.buildGroup(Type.Repetition.REPEATED).addField( + builder.named("item")).named(columnDefinition.getName())) + .as(LogicalTypeAnnotation.listType()).named(columnDefinition.getName()); } - PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType); + PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType); } private static class CodecType implements TypeInfo { @@ -383,8 +362,7 @@ public Set> getTypes() { } @Override - public PrimitiveBuilder getBuilder(boolean required, boolean repeating, - Class dataType) { + public PrimitiveBuilder getBuilder(boolean required, boolean repeating, Class dataType) { return type(PrimitiveTypeName.BINARY, required, repeating); } diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/CodecInfo.java b/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/CodecInfo.java index c36e99e4348..ac80b74d2b6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/CodecInfo.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/CodecInfo.java @@ -9,8 +9,7 @@ import java.util.Optional; /** - * Representation class for codec information stored in key-value metadata for Deephaven-written - * Parquet files. + * Representation class for codec information stored in key-value metadata for Deephaven-written Parquet files. */ @Value.Immutable @BuildableStyle diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/ColumnTypeInfo.java b/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/ColumnTypeInfo.java index dc1cfd1d3d9..ae6cdb6efc0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/ColumnTypeInfo.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/ColumnTypeInfo.java @@ -10,8 +10,8 @@ /** *

    - * Representation class for per-column type information stored in key-value metadata for - * Deephaven-written Parquet files. + * Representation class for per-column type information stored in key-value metadata for Deephaven-written Parquet + * files. *

    * Currently only used for columns requiring non-default deserialization or type selection */ diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/GroupingColumnInfo.java b/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/GroupingColumnInfo.java index 863cfbcb56e..bbdaa601621 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/GroupingColumnInfo.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/GroupingColumnInfo.java @@ -7,8 +7,8 @@ import org.immutables.value.Value; /** - * Representation class for grouping column information stored in key-value metadata for - * Deephaven-written Parquet files. + * Representation class for grouping column information stored in key-value metadata for Deephaven-written Parquet + * files. */ @Value.Immutable @SimpleStyle diff --git a/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/TableInfo.java b/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/TableInfo.java index 55927b3388d..57dfd34846e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/TableInfo.java +++ b/DB/src/main/java/io/deephaven/db/v2/parquet/metadata/TableInfo.java @@ -17,8 +17,7 @@ import java.util.stream.Collectors; /** - * Representation class for per-table information stored in key-value metadata for Deephaven-written - * Parquet files. + * Representation class for per-table information stored in key-value metadata for Deephaven-written Parquet files. */ @Value.Immutable @BuildableStyle @@ -39,24 +38,21 @@ public final String serializeToJSON() throws JsonProcessingException { return OBJECT_MAPPER.writeValueAsString(this); } - public static TableInfo deserializeFromJSON(@NotNull final String tableInfoRaw) - throws JsonProcessingException { + public static TableInfo deserializeFromJSON(@NotNull final String tableInfoRaw) throws JsonProcessingException { return OBJECT_MAPPER.readValue(tableInfoRaw, ImmutableTableInfo.class); } public final Set groupingColumnNames() { - return groupingColumns().stream().map(GroupingColumnInfo::columnName) - .collect(Collectors.toSet()); + return groupingColumns().stream().map(GroupingColumnInfo::columnName).collect(Collectors.toSet()); } public final Map groupingColumnMap() { return groupingColumns().stream() - .collect(Collectors.toMap(GroupingColumnInfo::columnName, Function.identity())); + .collect(Collectors.toMap(GroupingColumnInfo::columnName, Function.identity())); } public final Map columnTypeMap() { - return columnTypes().stream() - .collect(Collectors.toMap(ColumnTypeInfo::columnName, Function.identity())); + return columnTypes().stream().collect(Collectors.toMap(ColumnTypeInfo::columnName, Function.identity())); } /** @@ -73,8 +69,8 @@ public String version() { public abstract List groupingColumns(); /** - * @return List of {@link ColumnTypeInfo column types} for columns requiring non-default - * deserialization or type selection + * @return List of {@link ColumnTypeInfo column types} for columns requiring non-default deserialization or type + * selection */ public abstract List columnTypes(); diff --git a/DB/src/main/java/io/deephaven/db/v2/remote/ConstructSnapshot.java b/DB/src/main/java/io/deephaven/db/v2/remote/ConstructSnapshot.java index c13294a2b79..42c966db609 100644 --- a/DB/src/main/java/io/deephaven/db/v2/remote/ConstructSnapshot.java +++ b/DB/src/main/java/io/deephaven/db/v2/remote/ConstructSnapshot.java @@ -49,8 +49,8 @@ import static io.deephaven.db.v2.sources.chunk.Attributes.Values; /** - * A Set of static utilities for computing values from a table while avoiding the use of the LTM - * lock. This class supports snapshots in both position space and key space. + * A Set of static utilities for computing values from a table while avoiding the use of the LTM lock. This class + * supports snapshots in both position space and key space. */ public class ConstructSnapshot { @@ -64,24 +64,22 @@ public NoSnapshotAllowedException(String reason) { } } - private static final io.deephaven.io.logger.Logger log = - LoggerFactory.getLogger(InstrumentedListener.class); + private static final io.deephaven.io.logger.Logger log = LoggerFactory.getLogger(InstrumentedListener.class); /** - * The maximum number of allowed attempts to construct a snapshot concurrently with - * {@link LiveTableMonitor} refresh processing. After this many attempts, we fall back and wait - * until we can block refreshes. + * The maximum number of allowed attempts to construct a snapshot concurrently with {@link LiveTableMonitor} refresh + * processing. After this many attempts, we fall back and wait until we can block refreshes. */ - private static final int MAX_CONCURRENT_ATTEMPTS = Configuration.getInstance() - .getIntegerWithDefault("ConstructSnapshot.maxConcurrentAttempts", 2); + private static final int MAX_CONCURRENT_ATTEMPTS = + Configuration.getInstance().getIntegerWithDefault("ConstructSnapshot.maxConcurrentAttempts", 2); /** - * The maximum duration of an attempt to construct a snapshot concurrently with - * {@link LiveTableMonitor} refresh processing. If an unsuccessful attempt takes longer than - * this timeout, we will fall back and wait until we can block refreshes. + * The maximum duration of an attempt to construct a snapshot concurrently with {@link LiveTableMonitor} refresh + * processing. If an unsuccessful attempt takes longer than this timeout, we will fall back and wait until we can + * block refreshes. */ private static final int MAX_CONCURRENT_ATTEMPT_DURATION_MILLIS = Configuration.getInstance() - .getIntegerWithDefault("ConstructSnapshot.maxConcurrentAttemptDurationMillis", 5000); + .getIntegerWithDefault("ConstructSnapshot.maxConcurrentAttemptDurationMillis", 5000); /** * Holder for thread-local state. @@ -123,8 +121,8 @@ private static class ConcurrentAttemptParameters { private final boolean usingPreviousValues; private ConcurrentAttemptParameters(@NotNull final SnapshotControl control, - final long beforeClockValue, - final boolean usingPreviousValues) { + final long beforeClockValue, + final boolean usingPreviousValues) { this.control = control; this.beforeClockValue = beforeClockValue; this.usingPreviousValues = usingPreviousValues; @@ -132,23 +130,21 @@ private ConcurrentAttemptParameters(@NotNull final SnapshotControl control, } /** - * {@link ConcurrentAttemptParameters Per-snapshot attempt parameters} for the lowest-depth - * concurrent snapshot on this thread. {@code null} if there are no concurrent snapshots in - * progress at any depth on this thread. + * {@link ConcurrentAttemptParameters Per-snapshot attempt parameters} for the lowest-depth concurrent snapshot + * on this thread. {@code null} if there are no concurrent snapshots in progress at any depth on this thread. */ private ConcurrentAttemptParameters activeConcurrentAttempt; /** - * The depth of nested concurrent snapshots. Used to avoid releasing the LTM lock if it's - * acquired by a nested snapshot. Zero if there are no concurrent snapshots in progress. + * The depth of nested concurrent snapshots. Used to avoid releasing the LTM lock if it's acquired by a nested + * snapshot. Zero if there are no concurrent snapshots in progress. */ private int concurrentSnapshotDepth; /** - * The depth of nested locked snapshots. Used to treat nested locked snapshots as - * non-concurrent for purposes of consistency checks and to avoid releasing the LTM lock if - * it's acquired by a nested snapshot. Zero if there are no concurrent snapshots in - * progress. + * The depth of nested locked snapshots. Used to treat nested locked snapshots as non-concurrent for purposes of + * consistency checks and to avoid releasing the LTM lock if it's acquired by a nested snapshot. Zero if there + * are no concurrent snapshots in progress. */ private int lockedSnapshotDepth; @@ -163,8 +159,8 @@ private ConcurrentAttemptParameters(@NotNull final SnapshotControl control, private long lastObservedClockValue; /** - * Called before starting a concurrent snapshot in order to increase depth and record - * per-snapshot attempt parameters. + * Called before starting a concurrent snapshot in order to increase depth and record per-snapshot attempt + * parameters. * * @param control The {@link SnapshotControl control} to record * @param beforeClockValue The "before" clock value to record @@ -173,20 +169,19 @@ private ConcurrentAttemptParameters(@NotNull final SnapshotControl control, * {@link #endConcurrentSnapshot(Object)} */ private Object startConcurrentSnapshot(@NotNull final SnapshotControl control, - final long beforeClockValue, - final boolean usingPreviousValues) { + final long beforeClockValue, + final boolean usingPreviousValues) { Assert.assertion(!locked() && !acquiredLock, "!locked() && !acquiredLock"); final Object enclosingAttemptState = activeConcurrentAttempt; - activeConcurrentAttempt = - new ConcurrentAttemptParameters(control, beforeClockValue, usingPreviousValues); + activeConcurrentAttempt = new ConcurrentAttemptParameters(control, beforeClockValue, usingPreviousValues); ++concurrentSnapshotDepth; lastObservedClockValue = beforeClockValue; return enclosingAttemptState; } /** - * Called after finishing a concurrent snapshot attempt in order to record the decrease in - * depth and restore the enclosing attempt's parameters. + * Called after finishing a concurrent snapshot attempt in order to record the decrease in depth and restore the + * enclosing attempt's parameters. * * @param enclosingAttemptParameters The opaque state object returned from * {@link #startConcurrentSnapshot(SnapshotControl, long, boolean)} @@ -197,8 +192,7 @@ private void endConcurrentSnapshot(final Object enclosingAttemptParameters) { } /** - * Called before starting a locked snapshot in order to increase depth and acquire the LTM - * lock if needed. + * Called before starting a locked snapshot in order to increase depth and acquire the LTM lock if needed. */ private void startLockedSnapshot() { ++lockedSnapshotDepth; @@ -206,8 +200,7 @@ private void startLockedSnapshot() { } /** - * Called after finishing a concurrent snapshot in order to decrease depth and release the - * LTM lock if needed. + * Called after finishing a concurrent snapshot in order to decrease depth and release the LTM lock if needed. */ private void endLockedSnapshot() { --lockedSnapshotDepth; @@ -224,37 +217,35 @@ private boolean concurrentAttemptActive() { } /** - * Test that determines whether the currently-active concurrent snapshot attempt has become - * inconsistent. Always returns {@code false} if there is no snapshot attempt active, or if - * there is a locked attempt active (necessarily at lower depth than the lowest concurrent - * attempt). + * Test that determines whether the currently-active concurrent snapshot attempt has become inconsistent. Always + * returns {@code false} if there is no snapshot attempt active, or if there is a locked attempt active + * (necessarily at lower depth than the lowest concurrent attempt). * - * @return Whether the clock or sources have changed in such a way as to make the - * currently-active concurrent snapshot attempt inconsistent + * @return Whether the clock or sources have changed in such a way as to make the currently-active concurrent + * snapshot attempt inconsistent */ private boolean concurrentAttemptInconsistent() { if (!concurrentAttemptActive()) { return false; } if (!clockConsistent( - activeConcurrentAttempt.beforeClockValue, - lastObservedClockValue = LogicalClock.DEFAULT.currentValue(), - activeConcurrentAttempt.usingPreviousValues)) { + activeConcurrentAttempt.beforeClockValue, + lastObservedClockValue = LogicalClock.DEFAULT.currentValue(), + activeConcurrentAttempt.usingPreviousValues)) { return true; } return !activeConcurrentAttempt.control.snapshotConsistent( - lastObservedClockValue, - activeConcurrentAttempt.usingPreviousValues); + lastObservedClockValue, + activeConcurrentAttempt.usingPreviousValues); } /** - * Check that fails if the currently-active concurrent snapshot attempt has become - * inconsistent. source. This is a no-op if there is no snapshot attempt active, or if there - * is a locked attempt active (necessarily at lower depth than the lowest concurrent - * attempt). + * Check that fails if the currently-active concurrent snapshot attempt has become inconsistent. source. This is + * a no-op if there is no snapshot attempt active, or if there is a locked attempt active (necessarily at lower + * depth than the lowest concurrent attempt). * - * @throws SnapshotInconsistentException If the currently-active concurrent snapshot attempt - * has become inconsistent + * @throws SnapshotInconsistentException If the currently-active concurrent snapshot attempt has become + * inconsistent */ private void failIfConcurrentAttemptInconsistent() { if (concurrentAttemptInconsistent()) @@ -262,30 +253,26 @@ private void failIfConcurrentAttemptInconsistent() { } /** - * Wait for a dependency to become satisfied on the current cycle if we're trying to use - * current values for the currently-active concurrent snapshot attempt. This is a no-op if - * there is no snapshot attempt active, or if there is a locked attempt active (necessarily - * at lower depth than the lowest concurrent attempt). + * Wait for a dependency to become satisfied on the current cycle if we're trying to use current values for the + * currently-active concurrent snapshot attempt. This is a no-op if there is no snapshot attempt active, or if + * there is a locked attempt active (necessarily at lower depth than the lowest concurrent attempt). * - * @param dependency The dependency, which may be null in order to avoid redundant checks in - * calling code - * @throws SnapshotInconsistentException If we cannot wait for this dependency on the - * current step because the step changed + * @param dependency The dependency, which may be null in order to avoid redundant checks in calling code + * @throws SnapshotInconsistentException If we cannot wait for this dependency on the current step because the + * step changed */ - private void maybeWaitForSatisfaction( - @Nullable final NotificationQueue.Dependency dependency) { + private void maybeWaitForSatisfaction(@Nullable final NotificationQueue.Dependency dependency) { if (!concurrentAttemptActive() - || dependency == null - || activeConcurrentAttempt.usingPreviousValues - || LogicalClock.getState( - activeConcurrentAttempt.beforeClockValue) == LogicalClock.State.Idle) { + || dependency == null + || activeConcurrentAttempt.usingPreviousValues + || LogicalClock.getState(activeConcurrentAttempt.beforeClockValue) == LogicalClock.State.Idle) { // No cycle or dependency to wait for. return; } final long beforeStep = LogicalClock.getStep(activeConcurrentAttempt.beforeClockValue); // Wait for satisfaction if necessary if (dependency.satisfied(beforeStep) - || WaitNotification.waitForSatisfaction(beforeStep, dependency)) { + || WaitNotification.waitForSatisfaction(beforeStep, dependency)) { return; } lastObservedClockValue = LogicalClock.DEFAULT.currentValue(); @@ -296,8 +283,8 @@ private void maybeWaitForSatisfaction( } /** - * Return the currently-active concurrent snapshot attempt's "before" clock value, or zero - * if there is no concurrent attempt active. + * Return the currently-active concurrent snapshot attempt's "before" clock value, or zero if there is no + * concurrent attempt active. * * @return The concurrent snapshot attempt's "before" clock value, or zero */ @@ -314,10 +301,8 @@ private long getConcurrentAttemptClockValue() { private LogOutput appendConcurrentAttemptClockInfo(@NotNull final LogOutput logOutput) { logOutput.append("concurrent snapshot state: "); if (concurrentAttemptActive()) { - logOutput.append("active, beforeClockValue=") - .append(activeConcurrentAttempt.beforeClockValue) - .append(", usingPreviousValues=") - .append(activeConcurrentAttempt.usingPreviousValues); + logOutput.append("active, beforeClockValue=").append(activeConcurrentAttempt.beforeClockValue) + .append(", usingPreviousValues=").append(activeConcurrentAttempt.usingPreviousValues); } else { logOutput.append("inactive"); } @@ -331,7 +316,7 @@ private LogOutput appendConcurrentAttemptClockInfo(@NotNull final LogOutput logO */ private boolean locked() { return LiveTableMonitor.DEFAULT.sharedLock().isHeldByCurrentThread() - || LiveTableMonitor.DEFAULT.exclusiveLock().isHeldByCurrentThread(); + || LiveTableMonitor.DEFAULT.exclusiveLock().isHeldByCurrentThread(); } /** @@ -363,10 +348,9 @@ public static class SnapshotInconsistentException extends UncheckedDeephavenExce } /** - * Test whether the logical clock has remained sufficiently consistent to allow a snapshot - * function to succeed. Note that this is a necessary but not sufficient test in most cases; - * this test is invoked (in short-circuit fashion) before invoking @link - * SnapshotControl#snapshotConsistent(long, boolean)} (long, boolean)}. + * Test whether the logical clock has remained sufficiently consistent to allow a snapshot function to succeed. Note + * that this is a necessary but not sufficient test in most cases; this test is invoked (in short-circuit fashion) + * before invoking @link SnapshotControl#snapshotConsistent(long, boolean)} (long, boolean)}. * * @param beforeClockValue The clock value from before the snapshot was attempted * @param afterClockValue The clock value from after the snapshot was attempted @@ -374,58 +358,51 @@ public static class SnapshotInconsistentException extends UncheckedDeephavenExce * @return Whether the snapshot succeeded (based on clock changes) */ private static boolean clockConsistent(final long beforeClockValue, final long afterClockValue, - final boolean usedPrev) { - final boolean stepSame = - LogicalClock.getStep(beforeClockValue) == LogicalClock.getStep(afterClockValue); - final boolean stateSame = - LogicalClock.getState(beforeClockValue) == LogicalClock.getState(afterClockValue); + final boolean usedPrev) { + final boolean stepSame = LogicalClock.getStep(beforeClockValue) == LogicalClock.getStep(afterClockValue); + final boolean stateSame = LogicalClock.getState(beforeClockValue) == LogicalClock.getState(afterClockValue); return stepSame && (stateSame || !usedPrev); } /** - * Test that determines whether the currently-active concurrent snapshot attempt has become - * inconsistent. Always returns {@code false} if there is no snapshot attempt active, or if - * there is a locked attempt active (necessarily at lower depth than the lowest concurrent - * attempt). + * Test that determines whether the currently-active concurrent snapshot attempt has become inconsistent. Always + * returns {@code false} if there is no snapshot attempt active, or if there is a locked attempt active (necessarily + * at lower depth than the lowest concurrent attempt). * - * @return Whether the clock or sources have changed in such a way as to make the - * currently-active concurrent snapshot attempt inconsistent + * @return Whether the clock or sources have changed in such a way as to make the currently-active concurrent + * snapshot attempt inconsistent */ public static boolean concurrentAttemptInconsistent() { return State.get().concurrentAttemptInconsistent(); } /** - * Check that fails if the currently-active concurrent snapshot attempt has become inconsistent. - * source. This is a no-op if there is no snapshot attempt active, or if there is a locked - * attempt active (necessarily at lower depth than the lowest concurrent attempt). + * Check that fails if the currently-active concurrent snapshot attempt has become inconsistent. source. This is a + * no-op if there is no snapshot attempt active, or if there is a locked attempt active (necessarily at lower depth + * than the lowest concurrent attempt). * - * @throws SnapshotInconsistentException If the currently-active concurrent snapshot attempt has - * become inconsistent + * @throws SnapshotInconsistentException If the currently-active concurrent snapshot attempt has become inconsistent */ public static void failIfConcurrentAttemptInconsistent() { State.get().failIfConcurrentAttemptInconsistent(); } /** - * Wait for a dependency to become satisfied on the current cycle if we're trying to use current - * values for the currently-active concurrent snapshot attempt. This is a no-op if there is no - * snapshot attempt active, or if there is a locked attempt active (necessarily at lower depth - * than the lowest concurrent attempt). + * Wait for a dependency to become satisfied on the current cycle if we're trying to use current values for the + * currently-active concurrent snapshot attempt. This is a no-op if there is no snapshot attempt active, or if there + * is a locked attempt active (necessarily at lower depth than the lowest concurrent attempt). * - * @param dependency The dependency, which may be null in order to avoid redundant checks in - * calling code - * @throws SnapshotInconsistentException If we cannot wait for this dependency on the current - * step because the step changed + * @param dependency The dependency, which may be null in order to avoid redundant checks in calling code + * @throws SnapshotInconsistentException If we cannot wait for this dependency on the current step because the step + * changed */ - public static void maybeWaitForSatisfaction( - @Nullable final NotificationQueue.Dependency dependency) { + public static void maybeWaitForSatisfaction(@Nullable final NotificationQueue.Dependency dependency) { State.get().maybeWaitForSatisfaction(dependency); } /** - * Return the currently-active concurrent snapshot attempt's "before" clock value, or zero if - * there is no concurrent attempt active. + * Return the currently-active concurrent snapshot attempt's "before" clock value, or zero if there is no concurrent + * attempt active. * * @return The concurrent snapshot attempt's "before" clock value, or zero */ @@ -445,23 +422,22 @@ public static LogOutput appendConcurrentAttemptClockInfo(@NotNull final LogOutpu } /** - * Create a {@link InitialSnapshot snapshot} of the entire table specified. Note that this - * method is notification-oblivious, i.e. it makes no attempt to ensure that notifications are - * not missed. + * Create a {@link InitialSnapshot snapshot} of the entire table specified. Note that this method is + * notification-oblivious, i.e. it makes no attempt to ensure that notifications are not missed. * * @param logIdentityObject An object used to prepend to log rows. * @param table the table to snapshot. * @return a snapshot of the entire base table. */ public static InitialSnapshot constructInitialSnapshot(final Object logIdentityObject, - @NotNull final BaseTable table) { + @NotNull final BaseTable table) { return constructInitialSnapshot(logIdentityObject, table, null, null); } /** - * Create a {@link InitialSnapshot snapshot} of the specified table using a set of requested - * columns and keys. Note that this method uses an index that is in Key space, and that it is - * notification-oblivious, i.e. it makes no attempt to ensure that notifications are not missed. + * Create a {@link InitialSnapshot snapshot} of the specified table using a set of requested columns and keys. Note + * that this method uses an index that is in Key space, and that it is notification-oblivious, i.e. it makes no + * attempt to ensure that notifications are not missed. * * @param logIdentityObject An object used to prepend to log rows. * @param table the table to snapshot. @@ -470,35 +446,32 @@ public static InitialSnapshot constructInitialSnapshot(final Object logIdentityO * @return a snapshot of the entire base table. */ public static InitialSnapshot constructInitialSnapshot(final Object logIdentityObject, - @NotNull final BaseTable table, - @Nullable final BitSet columnsToSerialize, - @Nullable final Index keysToSnapshot) { - return constructInitialSnapshot(logIdentityObject, table, columnsToSerialize, - keysToSnapshot, makeSnapshotControl(false, table)); + @NotNull final BaseTable table, + @Nullable final BitSet columnsToSerialize, + @Nullable final Index keysToSnapshot) { + return constructInitialSnapshot(logIdentityObject, table, columnsToSerialize, keysToSnapshot, + makeSnapshotControl(false, table)); } static InitialSnapshot constructInitialSnapshot(final Object logIdentityObject, - @NotNull final BaseTable table, - @Nullable final BitSet columnsToSerialize, - @Nullable final Index keysToSnapshot, - @NotNull final SnapshotControl control) { + @NotNull final BaseTable table, + @Nullable final BitSet columnsToSerialize, + @Nullable final Index keysToSnapshot, + @NotNull final SnapshotControl control) { final InitialSnapshot snapshot = new InitialSnapshot(); - final SnapshotFunction doSnapshot = - (usePrev, beforeClockValue) -> serializeAllTable(usePrev, snapshot, table, + final SnapshotFunction doSnapshot = (usePrev, beforeClockValue) -> serializeAllTable(usePrev, snapshot, table, logIdentityObject, columnsToSerialize, keysToSnapshot); - snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), - control, doSnapshot); + snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), control, doSnapshot); return snapshot; } /** - * Create a {@link InitialSnapshot snapshot} of the specified table using a set of requested - * columns and positions. Note that this method uses an index that is in Position space, and - * that it is notification-oblivious, i.e. it makes no attempt to ensure that notifications are - * not missed. + * Create a {@link InitialSnapshot snapshot} of the specified table using a set of requested columns and positions. + * Note that this method uses an index that is in Position space, and that it is notification-oblivious, i.e. it + * makes no attempt to ensure that notifications are not missed. * * @param logIdentityObject An object used to prepend to log rows. * @param table the table to snapshot. @@ -506,20 +479,19 @@ static InitialSnapshot constructInitialSnapshot(final Object logIdentityObject, * @param positionsToSnapshot An Index of positions within the table to include, null for all * @return a snapshot of the entire base table. */ - public static InitialSnapshot constructInitialSnapshotInPositionSpace( - final Object logIdentityObject, - @NotNull final BaseTable table, - @Nullable final BitSet columnsToSerialize, - @Nullable final Index positionsToSnapshot) { + public static InitialSnapshot constructInitialSnapshotInPositionSpace(final Object logIdentityObject, + @NotNull final BaseTable table, + @Nullable final BitSet columnsToSerialize, + @Nullable final Index positionsToSnapshot) { return constructInitialSnapshotInPositionSpace(logIdentityObject, table, columnsToSerialize, - positionsToSnapshot, makeSnapshotControl(false, table)); + positionsToSnapshot, makeSnapshotControl(false, table)); } static InitialSnapshot constructInitialSnapshotInPositionSpace(final Object logIdentityObject, - @NotNull final BaseTable table, - @Nullable final BitSet columnsToSerialize, - @Nullable final Index positionsToSnapshot, - @NotNull final SnapshotControl control) { + @NotNull final BaseTable table, + @Nullable final BitSet columnsToSerialize, + @Nullable final Index positionsToSnapshot, + @NotNull final SnapshotControl control) { final InitialSnapshot snapshot = new InitialSnapshot(); final SnapshotFunction doSnapshot = (usePrev, beforeClockValue) -> { @@ -533,34 +505,30 @@ static InitialSnapshot constructInitialSnapshotInPositionSpace(final Object logI } else { keysToSnapshot = table.getIndex().subindexByPos(positionsToSnapshot); } - return serializeAllTable(usePrev, snapshot, table, logIdentityObject, - columnsToSerialize, keysToSnapshot); + return serializeAllTable(usePrev, snapshot, table, logIdentityObject, columnsToSerialize, keysToSnapshot); }; - snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), - control, doSnapshot); + snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), control, doSnapshot); return snapshot; } /** - * Create a {@link BarrageMessage snapshot} of the specified table including all columns and - * rows. Note that this method is notification-oblivious, i.e. it makes no attempt to ensure - * that notifications are not missed. + * Create a {@link BarrageMessage snapshot} of the specified table including all columns and rows. Note that this + * method is notification-oblivious, i.e. it makes no attempt to ensure that notifications are not missed. * * @param logIdentityObject An object used to prepend to log rows. * @param table the table to snapshot. * @return a snapshot of the entire base table. */ public static BarrageMessage constructBackplaneSnapshot(final Object logIdentityObject, - final BaseTable table) { + final BaseTable table) { return constructBackplaneSnapshotInPositionSpace(logIdentityObject, table, null, null); } /** - * Create a {@link BarrageMessage snapshot} of the specified table using a set of requested - * columns and positions. Note that this method uses an index that is in Position space, and - * that it is notification-oblivious, i.e. it makes no attempt to ensure that notifications are - * not missed. + * Create a {@link BarrageMessage snapshot} of the specified table using a set of requested columns and positions. + * Note that this method uses an index that is in Position space, and that it is notification-oblivious, i.e. it + * makes no attempt to ensure that notifications are not missed. * * @param logIdentityObject An object used to prepend to log rows. * @param table the table to snapshot. @@ -568,33 +536,30 @@ public static BarrageMessage constructBackplaneSnapshot(final Object logIdentity * @param positionsToSnapshot An Index of positions within the table to include, null for all * @return a snapshot of the entire base table. */ - public static BarrageMessage constructBackplaneSnapshotInPositionSpace( - final Object logIdentityObject, - final BaseTable table, - @Nullable final BitSet columnsToSerialize, - @Nullable final Index positionsToSnapshot) { - return constructBackplaneSnapshotInPositionSpace(logIdentityObject, table, - columnsToSerialize, positionsToSnapshot, makeSnapshotControl(false, table)); + public static BarrageMessage constructBackplaneSnapshotInPositionSpace(final Object logIdentityObject, + final BaseTable table, + @Nullable final BitSet columnsToSerialize, + @Nullable final Index positionsToSnapshot) { + return constructBackplaneSnapshotInPositionSpace(logIdentityObject, table, columnsToSerialize, + positionsToSnapshot, makeSnapshotControl(false, table)); } /** - * Create a {@link BarrageMessage snapshot} of the specified table using a set of requested - * columns and positions. Note that this method uses an index that is in Position space. + * Create a {@link BarrageMessage snapshot} of the specified table using a set of requested columns and positions. + * Note that this method uses an index that is in Position space. * * @param logIdentityObject An object used to prepend to log rows. * @param table the table to snapshot. * @param columnsToSerialize A {@link BitSet} of columns to include, null for all * @param positionsToSnapshot An Index of positions within the table to include, null for all - * @param control A {@link SnapshotControl} to define the parameters and consistency for this - * snapshot + * @param control A {@link SnapshotControl} to define the parameters and consistency for this snapshot * @return a snapshot of the entire base table. */ - public static BarrageMessage constructBackplaneSnapshotInPositionSpace( - final Object logIdentityObject, - @NotNull final BaseTable table, - @Nullable final BitSet columnsToSerialize, - @Nullable final Index positionsToSnapshot, - @NotNull final SnapshotControl control) { + public static BarrageMessage constructBackplaneSnapshotInPositionSpace(final Object logIdentityObject, + @NotNull final BaseTable table, + @Nullable final BitSet columnsToSerialize, + @Nullable final Index positionsToSnapshot, + @NotNull final SnapshotControl control) { final BarrageMessage snapshot = new BarrageMessage(); snapshot.isSnapshot = true; @@ -611,12 +576,10 @@ public static BarrageMessage constructBackplaneSnapshotInPositionSpace( } else { keysToSnapshot = table.getIndex().subindexByPos(positionsToSnapshot); } - return serializeAllTable(usePrev, snapshot, table, logIdentityObject, - columnsToSerialize, keysToSnapshot); + return serializeAllTable(usePrev, snapshot, table, logIdentityObject, columnsToSerialize, keysToSnapshot); }; - snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), - control, doSnapshot); + snapshot.step = callDataSnapshotFunction(System.identityHashCode(logIdentityObject), control, doSnapshot); snapshot.firstSeq = snapshot.lastSeq = snapshot.step; return snapshot; @@ -631,17 +594,16 @@ public static BarrageMessage constructBackplaneSnapshotInPositionSpace( * @return list of the resulting {@link InitialSnapshot}s */ public static List constructInitialSnapshots(final Object logIdentityObject, - final BaseTable... tables) { + final BaseTable... tables) { final List snapshots = new ArrayList<>(); final NotificationObliviousMultipleSourceSnapshotControl snapshotControl = - new NotificationObliviousMultipleSourceSnapshotControl(tables); + new NotificationObliviousMultipleSourceSnapshotControl(tables); - final SnapshotFunction doSnapshot = (usePrev, - beforeClockValue) -> serializeAllTables(usePrev, snapshots, tables, logIdentityObject); + final SnapshotFunction doSnapshot = + (usePrev, beforeClockValue) -> serializeAllTables(usePrev, snapshots, tables, logIdentityObject); - callDataSnapshotFunction(System.identityHashCode(logIdentityObject), snapshotControl, - doSnapshot); + callDataSnapshotFunction(System.identityHashCode(logIdentityObject), snapshotControl, doSnapshot); return snapshots; } @@ -649,14 +611,11 @@ public static List constructInitialSnapshots(final Object logId @FunctionalInterface public interface SnapshotFunction { /** - * A function that would like to take a snapshot of data guarded by a retry loop with data - * consistency tests. + * A function that would like to take a snapshot of data guarded by a retry loop with data consistency tests. * - * @param usePrev Whether data from the previous cycle should be used (otherwise use this - * cycle) - * @param beforeClockValue The clock value that we captured before the function began; the - * function can use this value to bail out early if it notices something has gone - * wrong + * @param usePrev Whether data from the previous cycle should be used (otherwise use this cycle) + * @param beforeClockValue The clock value that we captured before the function began; the function can use this + * value to bail out early if it notices something has gone wrong * @return true if the function was successful, false if it should be retried */ boolean call(boolean usePrev, long beforeClockValue); @@ -669,19 +628,17 @@ public interface SnapshotFunction { public interface UsePreviousValues { /** *

    - * Determine if previous values should be used in table data access for the given - * {@link LogicalClock clock} value. + * Determine if previous values should be used in table data access for the given {@link LogicalClock clock} + * value. *

    * Expected to never request previous values during the idle phase of a cycle. *

    - * Must never request previous values for a source that has already been updated on the - * current cycle, unless it can be proven that that source was not instantiated on the - * current cycle. + * Must never request previous values for a source that has already been updated on the current cycle, unless it + * can be proven that that source was not instantiated on the current cycle. *

    * Must be safe to call more than once, exactly once per snapshot attempt. * - * @param beforeClockValue The current clock value before the snapshot function will be - * invoked + * @param beforeClockValue The current clock value before the snapshot function will be invoked * @return A {@link Boolean} with the following meaning: *

      *
    • {@code true} if previous values should be used
    • @@ -700,21 +657,17 @@ public interface UsePreviousValues { public interface SnapshotConsistent { /** *

      - * Determine (from within a snapshot function) if the snapshot appears to still be - * consistent. + * Determine (from within a snapshot function) if the snapshot appears to still be consistent. *

      - * This should be no more restrictive than the associated - * {@link SnapshotCompletedConsistently}. + * This should be no more restrictive than the associated {@link SnapshotCompletedConsistently}. *

      - * Can assume as a precondition that the clock step has not been observed to change since - * the last time the associated {@link UsePreviousValues#usePreviousValues(long)} was - * invoked, and that the clock state has not been observed to change if previous values were - * used. See {@link #clockConsistent(long, long, boolean)}. + * Can assume as a precondition that the clock step has not been observed to change since the last time the + * associated {@link UsePreviousValues#usePreviousValues(long)} was invoked, and that the clock state has not + * been observed to change if previous values were used. See {@link #clockConsistent(long, long, boolean)}. * * @param currentClockValue The current clock value * @param usingPreviousValues Whether the snapshot function is using previous values - * @return True if we can no longer expect that the snapshot function's result will be - * consistent + * @return True if we can no longer expect that the snapshot function's result will be consistent */ boolean snapshotConsistent(long currentClockValue, boolean usingPreviousValues); } @@ -726,38 +679,32 @@ public interface SnapshotConsistent { public interface SnapshotCompletedConsistently { /** *

      - * Determine if a snapshot was consistent according to the clock cycle. Intended to be - * paired with a {@link UsePreviousValues} function. + * Determine if a snapshot was consistent according to the clock cycle. Intended to be paired with a + * {@link UsePreviousValues} function. *

      - * Can assume as a precondition that the clock step has not been observed to change since - * the last time the associated {@link UsePreviousValues#usePreviousValues(long)} was - * invoked, and that the clock state has not been observed to change if previous values were - * used. See {@link #clockConsistent(long, long, boolean)}. + * Can assume as a precondition that the clock step has not been observed to change since the last time the + * associated {@link UsePreviousValues#usePreviousValues(long)} was invoked, and that the clock state has not + * been observed to change if previous values were used. See {@link #clockConsistent(long, long, boolean)}. *

      - * Will be called at most once per snapshot attempt. Will be called for all - * possibly-successful snapshot attempts. May be called after unsuccessful concurrent - * snapshot attempts. + * Will be called at most once per snapshot attempt. Will be called for all possibly-successful snapshot + * attempts. May be called after unsuccessful concurrent snapshot attempts. * * @param afterClockValue The current clock value after the snapshot function was invoked * @param usedPreviousValues If previous values were used * @return Whether the snapshot is provably consistent - * @throws RuntimeException If the snapshot was consistent but the snapshot function failed - * to satisfy this function's expectations; this will be treated as a failure of the - * snapshot function + * @throws RuntimeException If the snapshot was consistent but the snapshot function failed to satisfy this + * function's expectations; this will be treated as a failure of the snapshot function */ boolean snapshotCompletedConsistently(long afterClockValue, boolean usedPreviousValues); } /** - * Interface used to control snapshot behavior, including previous value usage and consistency - * testing. + * Interface used to control snapshot behavior, including previous value usage and consistency testing. */ - public interface SnapshotControl - extends UsePreviousValues, SnapshotConsistent, SnapshotCompletedConsistently { + public interface SnapshotControl extends UsePreviousValues, SnapshotConsistent, SnapshotCompletedConsistently { @Override - default boolean snapshotCompletedConsistently(final long afterClockValue, - final boolean usedPreviousValues) { + default boolean snapshotCompletedConsistently(final long afterClockValue, final boolean usedPreviousValues) { return snapshotConsistent(afterClockValue, usedPreviousValues); } } @@ -790,22 +737,19 @@ public boolean snapshotConsistent(long currentClockValue, boolean usingPreviousV * * @param snapshotConsistent The {@link SnapshotConsistent} to use * - * @param snapshotCompletedConsistently The {@link SnapshotCompletedConsistently} to use, or - * null to use * {@code snapshotConsistent} + * @param snapshotCompletedConsistently The {@link SnapshotCompletedConsistently} to use, or null to use * {@code + * snapshotConsistent} */ - public static SnapshotControl makeSnapshotControl( - @NotNull final UsePreviousValues usePreviousValues, - @NotNull final SnapshotConsistent snapshotConsistent, - @Nullable final SnapshotCompletedConsistently snapshotCompletedConsistently) { + public static SnapshotControl makeSnapshotControl(@NotNull final UsePreviousValues usePreviousValues, + @NotNull final SnapshotConsistent snapshotConsistent, + @Nullable final SnapshotCompletedConsistently snapshotCompletedConsistently) { return snapshotCompletedConsistently == null - ? new SnapshotControlAdapter(usePreviousValues, snapshotConsistent) - : new SnapshotControlAdapter(usePreviousValues, snapshotConsistent, - snapshotCompletedConsistently); + ? new SnapshotControlAdapter(usePreviousValues, snapshotConsistent) + : new SnapshotControlAdapter(usePreviousValues, snapshotConsistent, snapshotCompletedConsistently); } /** - * Adapter to combine the individual component functions of {@link SnapshotControl} into a valid - * snapshot control. + * Adapter to combine the individual component functions of {@link SnapshotControl} into a valid snapshot control. */ private static class SnapshotControlAdapter implements SnapshotControl { @@ -821,23 +765,22 @@ private static class SnapshotControlAdapter implements SnapshotControl { * @param snapshotCompletedConsistently The {@link SnapshotCompletedConsistently} to use */ private SnapshotControlAdapter(@NotNull final UsePreviousValues usePreviousValues, - @NotNull final SnapshotConsistent snapshotConsistent, - @NotNull final SnapshotCompletedConsistently snapshotCompletedConsistently) { + @NotNull final SnapshotConsistent snapshotConsistent, + @NotNull final SnapshotCompletedConsistently snapshotCompletedConsistently) { this.usePreviousValues = usePreviousValues; this.snapshotConsistent = snapshotConsistent; this.snapshotCompletedConsistently = snapshotCompletedConsistently; } /** - * Make a SnapshotControlAdapter without a dedicated {@link SnapshotCompletedConsistently} - * implementation. The {@link SnapshotConsistent} will be used as the - * {@link SnapshotCompletedConsistently} implementation. + * Make a SnapshotControlAdapter without a dedicated {@link SnapshotCompletedConsistently} implementation. The + * {@link SnapshotConsistent} will be used as the {@link SnapshotCompletedConsistently} implementation. * * @param usePreviousValues The {@link UsePreviousValues} to use * @param snapshotConsistent The {@link SnapshotCompletedConsistently} to use */ private SnapshotControlAdapter(@NotNull final UsePreviousValues usePreviousValues, - @NotNull final SnapshotConsistent snapshotConsistent) { + @NotNull final SnapshotConsistent snapshotConsistent) { this.usePreviousValues = usePreviousValues; this.snapshotConsistent = snapshotConsistent; this.snapshotCompletedConsistently = snapshotConsistent::snapshotConsistent; @@ -849,63 +792,56 @@ public Boolean usePreviousValues(final long beforeClockValue) { } @Override - public boolean snapshotConsistent(final long currentClockValue, - final boolean usingPreviousValues) { + public boolean snapshotConsistent(final long currentClockValue, final boolean usingPreviousValues) { return snapshotConsistent.snapshotConsistent(currentClockValue, usingPreviousValues); } @Override - public boolean snapshotCompletedConsistently(final long afterClockValue, - final boolean usedPreviousValues) { - return snapshotCompletedConsistently.snapshotCompletedConsistently(afterClockValue, - usedPreviousValues); + public boolean snapshotCompletedConsistently(final long afterClockValue, final boolean usedPreviousValues) { + return snapshotCompletedConsistently.snapshotCompletedConsistently(afterClockValue, usedPreviousValues); } } /** * Make a default {@link SnapshotControl} for a single source. * - * @param notificationAware Whether the result should be concerned with not missing - * notifications + * @param notificationAware Whether the result should be concerned with not missing notifications * @param source The source * @return An appropriate {@link SnapshotControl} */ public static SnapshotControl makeSnapshotControl(final boolean notificationAware, - @NotNull final NotificationStepSource source) { + @NotNull final NotificationStepSource source) { return notificationAware - ? new NotificationAwareSingleSourceSnapshotControl(source) - : new NotificationObliviousSingleSourceSnapshotControl(source); + ? new NotificationAwareSingleSourceSnapshotControl(source) + : new NotificationObliviousSingleSourceSnapshotControl(source); } /** * Make a default {@link SnapshotControl} for one or more sources. * - * @param notificationAware Whether the result should be concerned with not missing - * notifications + * @param notificationAware Whether the result should be concerned with not missing notifications * @param sources The sources * @return An appropriate {@link SnapshotControl} */ public static SnapshotControl makeSnapshotControl(final boolean notificationAware, - @NotNull final NotificationStepSource... sources) { + @NotNull final NotificationStepSource... sources) { if (sources.length == 1) { return makeSnapshotControl(notificationAware, sources[0]); } return notificationAware - ? new NotificationAwareMultipleSourceSnapshotControl(sources) - : new NotificationObliviousMultipleSourceSnapshotControl(sources); + ? new NotificationAwareMultipleSourceSnapshotControl(sources) + : new NotificationObliviousMultipleSourceSnapshotControl(sources); } /** - * A SnapshotControl implementation driven by a single data source for use cases when the - * snapshot function must not miss a notification. For use when instantiating concurrent - * consumers of all updates from a source. + * A SnapshotControl implementation driven by a single data source for use cases when the snapshot function must not + * miss a notification. For use when instantiating concurrent consumers of all updates from a source. */ private static class NotificationAwareSingleSourceSnapshotControl implements SnapshotControl { private final NotificationStepSource source; - private NotificationAwareSingleSourceSnapshotControl( - @NotNull final NotificationStepSource source) { + private NotificationAwareSingleSourceSnapshotControl(@NotNull final NotificationStepSource source) { this.source = source; } @@ -913,36 +849,31 @@ private NotificationAwareSingleSourceSnapshotControl( public Boolean usePreviousValues(final long beforeClockValue) { // noinspection AutoBoxing return LogicalClock.getState(beforeClockValue) == LogicalClock.State.Updating && - source.getLastNotificationStep() != LogicalClock.getStep(beforeClockValue); + source.getLastNotificationStep() != LogicalClock.getStep(beforeClockValue); } @Override - public boolean snapshotConsistent(final long currentClockValue, - final boolean usingPreviousValues) { + public boolean snapshotConsistent(final long currentClockValue, final boolean usingPreviousValues) { if (!usingPreviousValues) { - // Cycle was Idle or we had already ticked, and so we're using current values on the - // current cycle: Success + // Cycle was Idle or we had already ticked, and so we're using current values on the current cycle: + // Success return true; } - // If we didn't miss an update then we're succeeding using previous values, else we've - // failed + // If we didn't miss an update then we're succeeding using previous values, else we've failed return source.getLastNotificationStep() != LogicalClock.getStep(currentClockValue); } } /** - * A SnapshotControl implementation driven by a single data source for use cases when the - * snapshot function doesn't care if it misses a notification. For use by consistent consumers - * of consistent current state. + * A SnapshotControl implementation driven by a single data source for use cases when the snapshot function doesn't + * care if it misses a notification. For use by consistent consumers of consistent current state. */ - private static class NotificationObliviousSingleSourceSnapshotControl - implements SnapshotControl { + private static class NotificationObliviousSingleSourceSnapshotControl implements SnapshotControl { private final NotificationStepSource source; - private NotificationObliviousSingleSourceSnapshotControl( - @NotNull final NotificationStepSource source) { + private NotificationObliviousSingleSourceSnapshotControl(@NotNull final NotificationStepSource source) { this.source = source; } @@ -950,28 +881,25 @@ private NotificationObliviousSingleSourceSnapshotControl( public Boolean usePreviousValues(final long beforeClockValue) { // noinspection AutoBoxing return LogicalClock.getState(beforeClockValue) == LogicalClock.State.Updating && - source.getLastNotificationStep() != LogicalClock.getStep(beforeClockValue); + source.getLastNotificationStep() != LogicalClock.getStep(beforeClockValue); } @Override - public boolean snapshotConsistent(final long currentClockValue, - final boolean usingPreviousValues) { + public boolean snapshotConsistent(final long currentClockValue, final boolean usingPreviousValues) { return true; } } /** - * A SnapshotControl implementation driven by multiple data sources for use cases when the - * snapshot function must not miss a notification. Waits for all sources to be notified on this - * cycle if any has been notified on this cycle. For use when instantiating concurrent consumers - * of all updates from a set of sources. + * A SnapshotControl implementation driven by multiple data sources for use cases when the snapshot function must + * not miss a notification. Waits for all sources to be notified on this cycle if any has been notified on this + * cycle. For use when instantiating concurrent consumers of all updates from a set of sources. */ private static class NotificationAwareMultipleSourceSnapshotControl implements SnapshotControl { private final NotificationStepSource[] sources; - private NotificationAwareMultipleSourceSnapshotControl( - @NotNull final NotificationStepSource... sources) { + private NotificationAwareMultipleSourceSnapshotControl(@NotNull final NotificationStepSource... sources) { this.sources = sources; } @@ -983,19 +911,18 @@ public Boolean usePreviousValues(final long beforeClockValue) { } final long beforeStep = LogicalClock.getStep(beforeClockValue); final NotificationStepSource[] notYetNotified = Stream.of(sources) - .filter((final NotificationStepSource source) -> source - .getLastNotificationStep() != beforeStep) - .toArray(NotificationStepSource[]::new); + .filter((final NotificationStepSource source) -> source.getLastNotificationStep() != beforeStep) + .toArray(NotificationStepSource[]::new); if (notYetNotified.length == sources.length) { return true; } if (notYetNotified.length > 0) { final NotificationStepSource[] notYetSatisfied = Stream.of(sources) - .filter((final NotificationQueue.Dependency dep) -> !dep.satisfied(beforeStep)) - .toArray(NotificationStepSource[]::new); + .filter((final NotificationQueue.Dependency dep) -> !dep.satisfied(beforeStep)) + .toArray(NotificationStepSource[]::new); if (notYetSatisfied.length > 0 - && !WaitNotification.waitForSatisfaction(beforeStep, notYetSatisfied) - && LogicalClock.DEFAULT.currentStep() != beforeStep) { + && !WaitNotification.waitForSatisfaction(beforeStep, notYetSatisfied) + && LogicalClock.DEFAULT.currentStep() != beforeStep) { // If we missed a step change, we've already failed, request a do-over. return null; } @@ -1004,35 +931,29 @@ public Boolean usePreviousValues(final long beforeClockValue) { } @Override - public boolean snapshotConsistent(final long currentClockValue, - final boolean usingPreviousValues) { + public boolean snapshotConsistent(final long currentClockValue, final boolean usingPreviousValues) { if (!usingPreviousValues) { - // Cycle was Idle or we had already ticked, and so we're using current values on the - // current cycle: Success + // Cycle was Idle or we had already ticked, and so we're using current values on the current cycle: + // Success return true; } - // If we didn't miss an update then we're succeeding using previous values, else we've - // failed + // If we didn't miss an update then we're succeeding using previous values, else we've failed final long currentStep = LogicalClock.getStep(currentClockValue); return Stream.of(sources) - .allMatch((final NotificationStepSource source) -> source - .getLastNotificationStep() != currentStep); + .allMatch((final NotificationStepSource source) -> source.getLastNotificationStep() != currentStep); } } /** - * A SnapshotControl implementation driven by multiple data sources for use cases when the - * snapshot function must not miss a notification. Waits for all sources to be notified on this - * cycle if any has been notified on this cycle. For use by consistent consumers of consistent - * current state. + * A SnapshotControl implementation driven by multiple data sources for use cases when the snapshot function must + * not miss a notification. Waits for all sources to be notified on this cycle if any has been notified on this + * cycle. For use by consistent consumers of consistent current state. */ - private static class NotificationObliviousMultipleSourceSnapshotControl - implements SnapshotControl { + private static class NotificationObliviousMultipleSourceSnapshotControl implements SnapshotControl { private final NotificationStepSource[] sources; - private NotificationObliviousMultipleSourceSnapshotControl( - @NotNull final NotificationStepSource... sources) { + private NotificationObliviousMultipleSourceSnapshotControl(@NotNull final NotificationStepSource... sources) { this.sources = sources; } @@ -1044,19 +965,18 @@ public Boolean usePreviousValues(final long beforeClockValue) { } final long beforeStep = LogicalClock.getStep(beforeClockValue); final NotificationStepSource[] notYetNotified = Stream.of(sources) - .filter((final NotificationStepSource source) -> source - .getLastNotificationStep() != beforeStep) - .toArray(NotificationStepSource[]::new); + .filter((final NotificationStepSource source) -> source.getLastNotificationStep() != beforeStep) + .toArray(NotificationStepSource[]::new); if (notYetNotified.length == sources.length) { return true; } if (notYetNotified.length > 0) { final NotificationStepSource[] notYetSatisfied = Stream.of(sources) - .filter((final NotificationQueue.Dependency dep) -> !dep.satisfied(beforeStep)) - .toArray(NotificationStepSource[]::new); + .filter((final NotificationQueue.Dependency dep) -> !dep.satisfied(beforeStep)) + .toArray(NotificationStepSource[]::new); if (notYetSatisfied.length > 0 - && !WaitNotification.waitForSatisfaction(beforeStep, notYetSatisfied) - && LogicalClock.DEFAULT.currentStep() != beforeStep) { + && !WaitNotification.waitForSatisfaction(beforeStep, notYetSatisfied) + && LogicalClock.DEFAULT.currentStep() != beforeStep) { // If we missed a step change, we've already failed, request a do-over. return null; } @@ -1065,50 +985,46 @@ public Boolean usePreviousValues(final long beforeClockValue) { } @Override - public boolean snapshotConsistent(final long currentClockValue, - final boolean usingPreviousValues) { + public boolean snapshotConsistent(final long currentClockValue, final boolean usingPreviousValues) { return true; } } private static long callDataSnapshotFunction(final int logInt, - @NotNull final SnapshotControl control, - @NotNull final SnapshotFunction function) { + @NotNull final SnapshotControl control, + @NotNull final SnapshotFunction function) { return callDataSnapshotFunction(logOutput -> logOutput.append(logInt), control, function); } /** - * Invokes the snapshot function in a loop until it succeeds with provably consistent results, - * or until {@code MAX_CONCURRENT_ATTEMPTS} or {@code MAX_CONCURRENT_ATTEMPT_DURATION_MILLIS} - * are exceeded. Falls back to acquiring a shared LTM lock for a final attempt. + * Invokes the snapshot function in a loop until it succeeds with provably consistent results, or until + * {@code MAX_CONCURRENT_ATTEMPTS} or {@code MAX_CONCURRENT_ATTEMPT_DURATION_MILLIS} are exceeded. Falls back to + * acquiring a shared LTM lock for a final attempt. * * @param logPrefix A prefix for our log messages - * @param control A {@link SnapshotControl} to define the parameters and consistency for this - * snapshot + * @param control A {@link SnapshotControl} to define the parameters and consistency for this snapshot * @param function The function to execute * @return The logical clock step that applied to this snapshot */ public static long callDataSnapshotFunction(@NotNull final String logPrefix, - @NotNull final SnapshotControl control, - @NotNull final SnapshotFunction function) { - return callDataSnapshotFunction((final LogOutput logOutput) -> logOutput.append(logPrefix), - control, function); + @NotNull final SnapshotControl control, + @NotNull final SnapshotFunction function) { + return callDataSnapshotFunction((final LogOutput logOutput) -> logOutput.append(logPrefix), control, function); } /** - * Invokes the snapshot function in a loop until it succeeds with provably consistent results, - * or until {@code MAX_CONCURRENT_ATTEMPTS} or {@code MAX_CONCURRENT_ATTEMPT_DURATION_MILLIS} - * are exceeded. Falls back to acquiring a shared LTM lock for a final attempt. + * Invokes the snapshot function in a loop until it succeeds with provably consistent results, or until + * {@code MAX_CONCURRENT_ATTEMPTS} or {@code MAX_CONCURRENT_ATTEMPT_DURATION_MILLIS} are exceeded. Falls back to + * acquiring a shared LTM lock for a final attempt. * * @param logPrefix A prefix for our log messages - * @param control A {@link SnapshotControl} to define the parameters and consistency for this - * snapshot + * @param control A {@link SnapshotControl} to define the parameters and consistency for this snapshot * @param function The function to execute * @return The logical clock step that applied to this snapshot */ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable logPrefix, - @NotNull final SnapshotControl control, - @NotNull final SnapshotFunction function) { + @NotNull final SnapshotControl control, + @NotNull final SnapshotFunction function) { final long overallStart = System.currentTimeMillis(); final State state = State.get(); @@ -1133,38 +1049,30 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l // noinspection AutoUnboxing final boolean usePrev = previousValuesRequested; if (LogicalClock.getState(beforeClockValue) == LogicalClock.State.Idle && usePrev) { - Assert.statementNeverExecuted( - "Previous values requested while not updating: " + beforeClockValue); + Assert.statementNeverExecuted("Previous values requested while not updating: " + beforeClockValue); } if (LiveTableMonitor.DEFAULT.isRefreshThread() && usePrev) { - Assert.statementNeverExecuted( - "Previous values requested from a refresh thread: " + beforeClockValue); + Assert.statementNeverExecuted("Previous values requested from a refresh thread: " + beforeClockValue); } final long attemptDurationMillis; final LivenessScope snapshotLivenessScope = new LivenessScope(); - try (final SafeCloseable ignored = - LivenessScopeStack.open(snapshotLivenessScope, true)) { - final Object startObject = - state.startConcurrentSnapshot(control, beforeClockValue, usePrev); + try (final SafeCloseable ignored = LivenessScopeStack.open(snapshotLivenessScope, true)) { + final Object startObject = state.startConcurrentSnapshot(control, beforeClockValue, usePrev); try { functionSuccessful = function.call(usePrev, beforeClockValue); } catch (NoSnapshotAllowedException ex) { // Breaking here will force an LTM acquire. - // TODO: Optimization. If this exception is only used for cases when we can't - // use previous values, - // then we could simply wait for the source to become satisfied on this cycle, - // rather than - // waiting for the LTM lock. Likely requires work for all code that uses this - // pattern. - log.debug().append(logPrefix) - .append(" Disallowed LTM-less Snapshot Function took ") - .append(System.currentTimeMillis() - attemptStart).append("ms") - .append(", beforeClockValue=").append(beforeClockValue) - .append(", afterClockValue=").append(LogicalClock.DEFAULT.currentValue()) - .append(", usePrev=").append(usePrev) - .endl(); + // TODO: Optimization. If this exception is only used for cases when we can't use previous values, + // then we could simply wait for the source to become satisfied on this cycle, rather than + // waiting for the LTM lock. Likely requires work for all code that uses this pattern. + log.debug().append(logPrefix).append(" Disallowed LTM-less Snapshot Function took ") + .append(System.currentTimeMillis() - attemptStart).append("ms") + .append(", beforeClockValue=").append(beforeClockValue) + .append(", afterClockValue=").append(LogicalClock.DEFAULT.currentValue()) + .append(", usePrev=").append(usePrev) + .endl(); break; } catch (Exception e) { functionSuccessful = false; @@ -1176,43 +1084,40 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l final long afterClockValue = LogicalClock.DEFAULT.currentValue(); try { snapshotSuccessful = clockConsistent(beforeClockValue, afterClockValue, usePrev) - && control.snapshotCompletedConsistently(afterClockValue, usePrev); + && control.snapshotCompletedConsistently(afterClockValue, usePrev); } catch (Exception e) { if (functionSuccessful) { - // Treat this exception as a snapshot function failure despite consistent - // snapshot + // Treat this exception as a snapshot function failure despite consistent snapshot functionSuccessful = false; caughtException = e; snapshotSuccessful = true; } else { - log.debug().append(logPrefix) - .append(" Suppressed exception from snapshot success function: ") - .append(e).endl(); + log.debug().append(logPrefix).append(" Suppressed exception from snapshot success function: ") + .append(e).endl(); } } attemptDurationMillis = System.currentTimeMillis() - attemptStart; log.debug().append(logPrefix).append(" LTM-less Snapshot Function took ") - .append(attemptDurationMillis).append("ms") - .append(", snapshotSuccessful=").append(snapshotSuccessful) - .append(", functionSuccessful=").append(functionSuccessful) - .append(", beforeClockValue=").append(beforeClockValue) - .append(", afterClockValue=").append(afterClockValue) - .append(", usePrev=").append(usePrev) - .endl(); + .append(attemptDurationMillis).append("ms") + .append(", snapshotSuccessful=").append(snapshotSuccessful) + .append(", functionSuccessful=").append(functionSuccessful) + .append(", beforeClockValue=").append(beforeClockValue) + .append(", afterClockValue=").append(afterClockValue) + .append(", usePrev=").append(usePrev) + .endl(); if (snapshotSuccessful) { if (functionSuccessful) { step = usePrev ? LogicalClock.getStep(beforeClockValue) - 1 - : LogicalClock.getStep(beforeClockValue); + : LogicalClock.getStep(beforeClockValue); snapshotLivenessScope.transferTo(initialLivenessManager); } break; } } if (attemptDurationMillis > MAX_CONCURRENT_ATTEMPT_DURATION_MILLIS) { - log.info().append(logPrefix) - .append(" Failed concurrent execution exceeded maximum duration (") - .append(attemptDurationMillis).append(" ms > ") - .append(MAX_CONCURRENT_ATTEMPT_DURATION_MILLIS).append(" ms)").endl(); + log.info().append(logPrefix).append(" Failed concurrent execution exceeded maximum duration (") + .append(attemptDurationMillis).append(" ms > ") + .append(MAX_CONCURRENT_ATTEMPT_DURATION_MILLIS).append(" ms)").endl(); break; } else { try { @@ -1229,22 +1134,18 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l state.maybeReleaseLock(); if (!functionSuccessful) { if (caughtException != null) { - throw new UncheckedDeephavenException( - "Failure to execute snapshot function with unchanged clock", - caughtException); + throw new UncheckedDeephavenException("Failure to execute snapshot function with unchanged clock", + caughtException); } else { - throw new UncheckedDeephavenException( - "Failure to execute snapshot function with unchanged clock"); + throw new UncheckedDeephavenException("Failure to execute snapshot function with unchanged clock"); } } } else { if (numConcurrentAttempts == 0) { - log.info().append(logPrefix) - .append(" Already held lock, proceeding to locked snapshot").endl(); + log.info().append(logPrefix).append(" Already held lock, proceeding to locked snapshot").endl(); } else { log.info().append(logPrefix) - .append(" Failed to obtain clean execution without blocking refresh processing") - .endl(); + .append(" Failed to obtain clean execution without blocking refresh processing").endl(); } state.startLockedSnapshot(); @@ -1254,9 +1155,8 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l final Boolean previousValuesRequested = control.usePreviousValues(beforeClockValue); if (!Boolean.FALSE.equals(previousValuesRequested)) { Assert.statementNeverExecuted( - "Previous values requested or inconsistent while blocking refresh processing: beforeClockValue=" - + beforeClockValue + ", previousValuesRequested=" - + previousValuesRequested); + "Previous values requested or inconsistent while blocking refresh processing: beforeClockValue=" + + beforeClockValue + ", previousValuesRequested=" + previousValuesRequested); } final long attemptStart = System.currentTimeMillis(); @@ -1267,15 +1167,14 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l Assert.eq(beforeClockValue, "beforeClockValue", afterClockValue, "afterClockValue"); - final boolean consistent = - control.snapshotCompletedConsistently(afterClockValue, false); + final boolean consistent = control.snapshotCompletedConsistently(afterClockValue, false); if (!consistent) { Assert.statementNeverExecuted( - "Consistent snapshot not generated despite blocking refresh processing!"); + "Consistent snapshot not generated despite blocking refresh processing!"); } log.info().append(logPrefix).append(" non-concurrent Snapshot Function took ") - .append(System.currentTimeMillis() - attemptStart).append("ms").endl(); + .append(System.currentTimeMillis() - attemptStart).append("ms").endl(); step = LogicalClock.getStep(afterClockValue); } finally { state.endLockedSnapshot(); @@ -1283,11 +1182,11 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l } final long duration = System.currentTimeMillis() - overallStart; if (duration > 10) { - log.info().append(logPrefix).append(" Snapshot Function elapsed time ").append(duration) - .append(" ms").append(", step=").append(step).endl(); + log.info().append(logPrefix).append(" Snapshot Function elapsed time ").append(duration).append(" ms") + .append(", step=").append(step).endl(); } else { - log.debug().append(logPrefix).append(" Snapshot Function elapsed time ") - .append(duration).append(" ms").append(", step=").append(step).endl(); + log.debug().append(logPrefix).append(" Snapshot Function elapsed time ").append(duration).append(" ms") + .append(", step=").append(step).endl(); } return step; } @@ -1296,10 +1195,10 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l *

      * Populate an {@link InitialSnapshot} with the specified keys and columns to snapshot. *

      - * Note that care must be taken while using this method to ensure the underlying table is locked - * or does not change, otherwise the resulting snapshot may be inconsistent. In general users - * should instead use {@link #constructInitialSnapshot} for simple use cases or - * {@link #callDataSnapshotFunction} for more advanced uses. + * Note that care must be taken while using this method to ensure the underlying table is locked or does not change, + * otherwise the resulting snapshot may be inconsistent. In general users should instead use + * {@link #constructInitialSnapshot} for simple use cases or {@link #callDataSnapshotFunction} for more advanced + * uses. * * @param usePrev Whether to use previous values * @param snapshot The snapshot to populate @@ -1310,11 +1209,11 @@ public static long callDataSnapshotFunction(@NotNull final LogOutputAppendable l * @return Whether the snapshot succeeded */ public static boolean serializeAllTable(boolean usePrev, - InitialSnapshot snapshot, - BaseTable table, - Object logIdentityObject, - BitSet columnsToSerialize, - Index keysToSnapshot) { + InitialSnapshot snapshot, + BaseTable table, + Object logIdentityObject, + BitSet columnsToSerialize, + Index keysToSnapshot) { snapshot.index = (usePrev ? table.getIndex().getPrevIndex() : table.getIndex()).clone(); if (keysToSnapshot != null) { @@ -1326,20 +1225,18 @@ public static boolean serializeAllTable(boolean usePrev, LongSizedDataStructure.intSize("construct snapshot", snapshot.rowsIncluded.size()); final Map sourceMap = table.getColumnSourceMap(); - final String[] columnSources = - sourceMap.keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] columnSources = sourceMap.keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); snapshot.dataColumns = new Object[columnSources.length]; try (final SharedContext sharedContext = - (columnSources.length > 1) ? SharedContext.makeSharedContext() : null) { + (columnSources.length > 1) ? SharedContext.makeSharedContext() : null) { for (int ii = 0; ii < columnSources.length; ii++) { if (columnsToSerialize != null && !columnsToSerialize.get(ii)) { continue; } if (concurrentAttemptInconsistent()) { - final LogEntry logEntry = - log.info().append(System.identityHashCode(logIdentityObject)) + final LogEntry logEntry = log.info().append(System.identityHashCode(logIdentityObject)) .append(" Bad snapshot before column ").append(ii); appendConcurrentAttemptClockInfo(logEntry); logEntry.endl(); @@ -1347,19 +1244,18 @@ public static boolean serializeAllTable(boolean usePrev, } final ColumnSource columnSource = table.getColumnSource(columnSources[ii]); - snapshot.dataColumns[ii] = - getSnapshotData(columnSource, sharedContext, snapshot.rowsIncluded, usePrev); + snapshot.dataColumns[ii] = getSnapshotData(columnSource, sharedContext, snapshot.rowsIncluded, usePrev); } } log.info().append(System.identityHashCode(logIdentityObject)) - .append(": Snapshot candidate step=") - .append((usePrev ? -1 : 0) + LogicalClock.getStep(getConcurrentAttemptClockValue())) - .append(", rows=").append(snapshot.rowsIncluded).append("/").append(keysToSnapshot) - .append(", cols=").append(FormatBitSet.arrayToLog(snapshot.dataColumns)).append("/") - .append((columnsToSerialize != null) ? FormatBitSet.formatBitSet(columnsToSerialize) - : FormatBitSet.arrayToLog(snapshot.dataColumns)) - .append(", usePrev=").append(usePrev).endl(); + .append(": Snapshot candidate step=") + .append((usePrev ? -1 : 0) + LogicalClock.getStep(getConcurrentAttemptClockValue())) + .append(", rows=").append(snapshot.rowsIncluded).append("/").append(keysToSnapshot) + .append(", cols=").append(FormatBitSet.arrayToLog(snapshot.dataColumns)).append("/") + .append((columnsToSerialize != null) ? FormatBitSet.formatBitSet(columnsToSerialize) + : FormatBitSet.arrayToLog(snapshot.dataColumns)) + .append(", usePrev=").append(usePrev).endl(); return true; } @@ -1367,10 +1263,10 @@ public static boolean serializeAllTable(boolean usePrev, *

      * Populate a BarrageMessage with the specified positions to snapshot and columns. *

      - * >Note that care must be taken while using this method to ensure the underlying table is - * locked or does not change, otherwise the resulting snapshot may be inconsistent. In general - * users should instead use {@link #constructBackplaneSnapshot} for simple use cases or - * {@link #callDataSnapshotFunction} for more advanced uses. + * >Note that care must be taken while using this method to ensure the underlying table is locked or does not + * change, otherwise the resulting snapshot may be inconsistent. In general users should instead use + * {@link #constructBackplaneSnapshot} for simple use cases or {@link #callDataSnapshotFunction} for more advanced + * uses. * * @param usePrev Use previous values? * @param snapshot The snapshot to populate @@ -1381,11 +1277,11 @@ public static boolean serializeAllTable(boolean usePrev, * @return true if the snapshot was computed with an unchanged clock, false otherwise. */ public static boolean serializeAllTable(final boolean usePrev, - final BarrageMessage snapshot, - final BaseTable table, - final Object logIdentityObject, - final BitSet columnsToSerialize, - final Index positionsToSnapshot) { + final BarrageMessage snapshot, + final BaseTable table, + final Object logIdentityObject, + final BitSet columnsToSerialize, + final Index positionsToSnapshot) { snapshot.rowsAdded = (usePrev ? table.getIndex().getPrevIndex() : table.getIndex()).clone(); snapshot.rowsRemoved = Index.CURRENT_FACTORY.getEmptyIndex(); snapshot.addColumnData = new BarrageMessage.AddColumnData[table.getColumnSources().size()]; @@ -1402,15 +1298,13 @@ public static boolean serializeAllTable(final boolean usePrev, LongSizedDataStructure.intSize("construct snapshot", snapshot.rowsIncluded.size()); final Map sourceMap = table.getColumnSourceMap(); - final String[] columnSources = - sourceMap.keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] columnSources = sourceMap.keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); try (final SharedContext sharedContext = - (columnSources.length > 1) ? SharedContext.makeSharedContext() : null) { + (columnSources.length > 1) ? SharedContext.makeSharedContext() : null) { for (int ii = 0; ii < columnSources.length; ++ii) { if (concurrentAttemptInconsistent()) { - final LogEntry logEntry = - log.info().append(System.identityHashCode(logIdentityObject)) + final LogEntry logEntry = log.info().append(System.identityHashCode(logIdentityObject)) .append(" Bad snapshot before column ").append(ii); appendConcurrentAttemptClockInfo(logEntry); logEntry.endl(); @@ -1421,32 +1315,28 @@ public static boolean serializeAllTable(final boolean usePrev, final BarrageMessage.AddColumnData acd = new BarrageMessage.AddColumnData(); snapshot.addColumnData[ii] = acd; - final boolean columnIsEmpty = - columnsToSerialize != null && !columnsToSerialize.get(ii); - final Index rows = - columnIsEmpty ? Index.FACTORY.getEmptyIndex() : snapshot.rowsIncluded; - // Note: cannot use shared context across several calls of differing lengths and no - // sharing necessary when empty - acd.data = getSnapshotDataAsChunk(columnSource, - columnIsEmpty ? null : sharedContext, rows, usePrev); + final boolean columnIsEmpty = columnsToSerialize != null && !columnsToSerialize.get(ii); + final Index rows = columnIsEmpty ? Index.FACTORY.getEmptyIndex() : snapshot.rowsIncluded; + // Note: cannot use shared context across several calls of differing lengths and no sharing necessary + // when empty + acd.data = getSnapshotDataAsChunk(columnSource, columnIsEmpty ? null : sharedContext, rows, usePrev); acd.type = columnSource.getType(); acd.componentType = columnSource.getComponentType(); final BarrageMessage.ModColumnData mcd = new BarrageMessage.ModColumnData(); snapshot.modColumnData[ii] = mcd; mcd.rowsModified = Index.CURRENT_FACTORY.getEmptyIndex(); - mcd.data = getSnapshotDataAsChunk(columnSource, null, Index.FACTORY.getEmptyIndex(), - usePrev); + mcd.data = getSnapshotDataAsChunk(columnSource, null, Index.FACTORY.getEmptyIndex(), usePrev); mcd.type = acd.type; mcd.componentType = acd.componentType; } } final LogEntry infoEntry = log.info().append(System.identityHashCode(logIdentityObject)) - .append(": Snapshot candidate step=") - .append((usePrev ? -1 : 0) + LogicalClock.getStep(getConcurrentAttemptClockValue())) - .append(", rows=").append(snapshot.rowsIncluded).append("/").append(positionsToSnapshot) - .append(", cols="); + .append(": Snapshot candidate step=") + .append((usePrev ? -1 : 0) + LogicalClock.getStep(getConcurrentAttemptClockValue())) + .append(", rows=").append(snapshot.rowsIncluded).append("/").append(positionsToSnapshot) + .append(", cols="); if (columnsToSerialize == null) { infoEntry.append("ALL"); } else { @@ -1457,8 +1347,8 @@ public static boolean serializeAllTable(final boolean usePrev, return true; } - private static boolean serializeAllTables(boolean usePrev, List snapshots, - BaseTable[] tables, Object logIdentityObject) { + private static boolean serializeAllTables(boolean usePrev, List snapshots, BaseTable[] tables, + Object logIdentityObject) { snapshots.clear(); for (final BaseTable table : tables) { @@ -1473,14 +1363,12 @@ private static boolean serializeAllTables(boolean usePrev, List return true; } - private static Object getSnapshotData(final ColumnSource columnSource, - final SharedContext sharedContext, final Index index, final boolean usePrev) { - final ColumnSource sourceToUse = - ReinterpretUtilities.maybeConvertToPrimitive(columnSource); + private static Object getSnapshotData(final ColumnSource columnSource, final SharedContext sharedContext, + final Index index, final boolean usePrev) { + final ColumnSource sourceToUse = ReinterpretUtilities.maybeConvertToPrimitive(columnSource); final Class type = sourceToUse.getType(); final int size = index.intSize(); - try (final ColumnSource.FillContext context = - sourceToUse.makeFillContext(size, sharedContext)) { + try (final ColumnSource.FillContext context = sourceToUse.makeFillContext(size, sharedContext)) { final ChunkType chunkType = sourceToUse.getChunkType(); final Object resultArray = chunkType.makeArray(size); final WritableChunk result = chunkType.writableChunkWrap(resultArray, 0, size); @@ -1503,15 +1391,13 @@ private static Object getSnapshotData(final ColumnSource columnSource, } } - private static WritableChunk getSnapshotDataAsChunk( - final ColumnSource columnSource, final SharedContext sharedContext, final Index index, - final boolean usePrev) { - final ColumnSource sourceToUse = - ReinterpretUtilities.maybeConvertToPrimitive(columnSource); + private static WritableChunk getSnapshotDataAsChunk(final ColumnSource columnSource, + final SharedContext sharedContext, final Index index, final boolean usePrev) { + final ColumnSource sourceToUse = ReinterpretUtilities.maybeConvertToPrimitive(columnSource); final int size = index.intSize(); try (final ColumnSource.FillContext context = sharedContext != null - ? sourceToUse.makeFillContext(size, sharedContext) - : sourceToUse.makeFillContext(size)) { + ? sourceToUse.makeFillContext(size, sharedContext) + : sourceToUse.makeFillContext(size)) { final ChunkType chunkType = sourceToUse.getChunkType(); final WritableChunk result = chunkType.makeWritableChunk(size); if (usePrev) { @@ -1536,17 +1422,15 @@ public static long estimateSnapshotSize(Table table) { } /** - * Make a rough guess at the size of a snapshot, using the column types and common column names. - * The use case is when a user requests something from the GUI; we'd like to know if it is - * ridiculous before actually doing it. + * Make a rough guess at the size of a snapshot, using the column types and common column names. The use case is + * when a user requests something from the GUI; we'd like to know if it is ridiculous before actually doing it. * * @param tableDefinition the table definitionm * @param columns a bitset indicating which columns are included * @param rowCount how many rows of this data we'll be snapshotting * @return the estimated size of the snapshot */ - public static long estimateSnapshotSize(TableDefinition tableDefinition, BitSet columns, - long rowCount) { + public static long estimateSnapshotSize(TableDefinition tableDefinition, BitSet columns, long rowCount) { long sizePerRow = 0; long totalSize = 0; @@ -1560,16 +1444,14 @@ public static long estimateSnapshotSize(TableDefinition tableDefinition, BitSet final ColumnDefinition definition = columnDefinitions[ii]; if (definition.getDataType() == byte.class || definition.getDataType() == char.class - || definition.getDataType() == Boolean.class) { + || definition.getDataType() == Boolean.class) { sizePerRow += 1; } else if (definition.getDataType() == short.class) { sizePerRow += 2; - } else if (definition.getDataType() == int.class - || definition.getDataType() == float.class) { + } else if (definition.getDataType() == int.class || definition.getDataType() == float.class) { sizePerRow += 4; - } else if (definition.getDataType() == long.class - || definition.getDataType() == double.class - || definition.getDataType() == DBDateTime.class) { + } else if (definition.getDataType() == long.class || definition.getDataType() == double.class + || definition.getDataType() == DBDateTime.class) { sizePerRow += 8; } else { switch (definition.getName()) { diff --git a/DB/src/main/java/io/deephaven/db/v2/remote/InitialSnapshot.java b/DB/src/main/java/io/deephaven/db/v2/remote/InitialSnapshot.java index 677a14afe78..594ab5832b9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/remote/InitialSnapshot.java +++ b/DB/src/main/java/io/deephaven/db/v2/remote/InitialSnapshot.java @@ -11,9 +11,8 @@ import java.io.Serializable; /** - * A Raw table snapshot. Users may use - * {@link InitialSnapshotTable#setupInitialSnapshotTable(Table, InitialSnapshot)} to convert this - * into a {@link io.deephaven.db.v2.QueryTable} + * A Raw table snapshot. Users may use {@link InitialSnapshotTable#setupInitialSnapshotTable(Table, InitialSnapshot)} to + * convert this into a {@link io.deephaven.db.v2.QueryTable} */ public class InitialSnapshot implements Serializable, Cloneable { static final long serialVersionUID = 4380513367437361741L; @@ -47,12 +46,11 @@ public InitialSnapshot setViewport(Index viewport) { @Override public String toString() { return "InitialSnapshot{" + - "type=" + type + - ", rows=" + rowsIncluded + (index == null ? "" : "/" + index) + - ", columns=" - + FormatBitSet.formatBitSetAsString(FormatBitSet.arrayToBitSet(dataColumns)) + - ", deltaSequence=" + deltaSequence + - ", step=" + step + - '}'; + "type=" + type + + ", rows=" + rowsIncluded + (index == null ? "" : "/" + index) + + ", columns=" + FormatBitSet.formatBitSetAsString(FormatBitSet.arrayToBitSet(dataColumns)) + + ", deltaSequence=" + deltaSequence + + ", step=" + step + + '}'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/remote/InitialSnapshotTable.java b/DB/src/main/java/io/deephaven/db/v2/remote/InitialSnapshotTable.java index 566789c689b..d7ee26bc67d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/remote/InitialSnapshotTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/remote/InitialSnapshotTable.java @@ -32,9 +32,8 @@ public class InitialSnapshotTable extends QueryTable { private final BitSet subscribedColumns; - protected InitialSnapshotTable(Map result, - WritableSource[] writableSources, RedirectionIndex redirectionIndex, - BitSet subscribedColumns) { + protected InitialSnapshotTable(Map result, WritableSource[] writableSources, + RedirectionIndex redirectionIndex, BitSet subscribedColumns) { super(Index.FACTORY.getEmptyIndex(), result); this.subscribedColumns = subscribedColumns; this.writableSources = writableSources; @@ -58,29 +57,21 @@ public boolean isSubscribedColumn(int column) { protected Setter getSetter(final WritableSource source) { if (source.getType() == byte.class) { - return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, - array[arrayIndex]); + return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else if (source.getType() == char.class) { - return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, - array[arrayIndex]); + return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else if (source.getType() == double.class) { - return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, - array[arrayIndex]); + return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else if (source.getType() == float.class) { - return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, - array[arrayIndex]); + return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else if (source.getType() == int.class) { - return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, - array[arrayIndex]); + return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else if (source.getType() == long.class || source.getType() == DBDateTime.class) { - return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, - array[arrayIndex]); + return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else if (source.getType() == short.class) { - return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, - array[arrayIndex]); + return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else if (source.getType() == Boolean.class) { - return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, - array[arrayIndex]); + return (Setter) (array, arrayIndex, destIndex) -> source.set(destIndex, array[arrayIndex]); } else { return (Setter) (array, arrayIndex, destIndex) -> { // noinspection unchecked @@ -92,8 +83,7 @@ protected Setter getSetter(final WritableSource source) { protected void processInitialSnapshot(InitialSnapshot snapshot) { final Index viewPort = snapshot.viewport; final Index addedIndex = snapshot.rowsIncluded; - final Index newlyPopulated = - viewPort == null ? addedIndex : snapshot.index.subindexByPos(viewPort); + final Index newlyPopulated = viewPort == null ? addedIndex : snapshot.index.subindexByPos(viewPort); if (viewPort != null) { newlyPopulated.retain(addedIndex); } @@ -177,43 +167,40 @@ protected interface Setter { void set(T array, int arrayIndex, long destIndex); } - public static InitialSnapshotTable setupInitialSnapshotTable(Table originalTable, - InitialSnapshot snapshot) { + public static InitialSnapshotTable setupInitialSnapshotTable(Table originalTable, InitialSnapshot snapshot) { return setupInitialSnapshotTable(originalTable.getDefinition(), snapshot); } - public static InitialSnapshotTable setupInitialSnapshotTable(Table originalTable, - InitialSnapshot snapshot, BitSet subscribedColumns) { - return setupInitialSnapshotTable(originalTable.getDefinition(), snapshot, - subscribedColumns); + public static InitialSnapshotTable setupInitialSnapshotTable(Table originalTable, InitialSnapshot snapshot, + BitSet subscribedColumns) { + return setupInitialSnapshotTable(originalTable.getDefinition(), snapshot, subscribedColumns); } - public static InitialSnapshotTable setupInitialSnapshotTable(TableDefinition definition, - InitialSnapshot snapshot) { + public static InitialSnapshotTable setupInitialSnapshotTable(TableDefinition definition, InitialSnapshot snapshot) { BitSet allColumns = new BitSet(definition.getColumns().length); allColumns.set(0, definition.getColumns().length); return setupInitialSnapshotTable(definition, snapshot, allColumns); } - public static InitialSnapshotTable setupInitialSnapshotTable(TableDefinition definition, - InitialSnapshot snapshot, BitSet subscribedColumns) { + public static InitialSnapshotTable setupInitialSnapshotTable(TableDefinition definition, InitialSnapshot snapshot, + BitSet subscribedColumns) { final ColumnDefinition[] columns = definition.getColumns(); WritableSource writableSources[] = new WritableSource[columns.length]; RedirectionIndex redirectionIndex = RedirectionIndex.FACTORY.createRedirectionIndex(8); LinkedHashMap finalColumns = new LinkedHashMap<>(); for (int i = 0; i < columns.length; i++) { - writableSources[i] = ArrayBackedColumnSource.getMemoryColumnSource(0, - columns[i].getDataType(), columns[i].getComponentType()); + writableSources[i] = ArrayBackedColumnSource.getMemoryColumnSource(0, columns[i].getDataType(), + columns[i].getComponentType()); // noinspection unchecked finalColumns.put(columns[i].getName(), - new RedirectedColumnSource<>(redirectionIndex, writableSources[i], 0)); + new RedirectedColumnSource<>(redirectionIndex, writableSources[i], 0)); } - // This table does not refresh, so we don't need to tell our redirection index or column - // source to start tracking + // This table does not refresh, so we don't need to tell our redirection index or column source to start + // tracking // prev values. - InitialSnapshotTable initialSnapshotTable = new InitialSnapshotTable(finalColumns, - writableSources, redirectionIndex, subscribedColumns); + InitialSnapshotTable initialSnapshotTable = + new InitialSnapshotTable(finalColumns, writableSources, redirectionIndex, subscribedColumns); initialSnapshotTable.processInitialSnapshot(snapshot); return initialSnapshotTable; } diff --git a/DB/src/main/java/io/deephaven/db/v2/remote/WrappedDelegatingTable.java b/DB/src/main/java/io/deephaven/db/v2/remote/WrappedDelegatingTable.java index 7e0a1ce4237..49fec055ce2 100644 --- a/DB/src/main/java/io/deephaven/db/v2/remote/WrappedDelegatingTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/remote/WrappedDelegatingTable.java @@ -26,8 +26,8 @@ public abstract class WrappedDelegatingTable extends BaseTable { /** - * Marks a {@link io.deephaven.base.Function.Unary#call(Object)} method as opting out of being - * re-wrapped, as a WrappedDelegatingTable would normally do. + * Marks a {@link io.deephaven.base.Function.Unary#call(Object)} method as opting out of being re-wrapped, as a + * WrappedDelegatingTable would normally do. */ @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) @@ -38,7 +38,7 @@ public abstract class WrappedDelegatingTable extends BaseTable { Function wrapTable; public WrappedDelegatingTable(final Table parent, final String wrapperSuffix, - final Function wrapTable) { + final Function wrapTable) { super(parent.getDefinition(), parent.getDescription() + "-" + wrapperSuffix); this.parent = parent; this.wrapTable = wrapTable; @@ -49,18 +49,17 @@ public R apply(io.deephaven.base.Function.Unary function) { final R result = parent.apply(function); if (result instanceof Table) { try { - if (function.getClass().getMethod("call", Object.class) - .getAnnotation(DoNotWrap.class) != null) { + if (function.getClass().getMethod("call", Object.class).getAnnotation(DoNotWrap.class) != null) { return result; } } catch (final NoSuchMethodException e) { // Function.Unary no longer has a call() method? throw new IllegalStateException("Function.Unary.call() method is missing?", e); } - // We can't reflectively check if R is Table or is some unexpected subclass that doesn't - // match what wrapTable - // returns, so we just have to "cast to R" and let the calling code potentially fail - // with a ClassCastException. + // We can't reflectively check if R is Table or is some unexpected subclass that doesn't match what + // wrapTable + // returns, so we just have to "cast to R" and let the calling code potentially fail with a + // ClassCastException. return (R) wrapTable.apply((Table) result); } return result; @@ -103,9 +102,8 @@ public Table wouldMatch(WouldMatchPair... matchers) { @Override public Table whereIn(GroupStrategy groupStrategy, Table rightTable, boolean inclusion, - MatchPair... columnsToMatch) { - return wrapTable - .apply(parent.whereIn(groupStrategy, rightTable, inclusion, columnsToMatch)); + MatchPair... columnsToMatch) { + return wrapTable.apply(parent.whereIn(groupStrategy, rightTable, inclusion, columnsToMatch)); } @Override @@ -190,32 +188,29 @@ public Table exactJoin(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] @Override public Table aj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - AsOfMatchRule asOfMatchRule) { + AsOfMatchRule asOfMatchRule) { return wrapTable.apply(parent.aj(rightTable, columnsToMatch, columnsToAdd, asOfMatchRule)); } @Override public Table raj(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - AsOfMatchRule asOfMatchRule) { + AsOfMatchRule asOfMatchRule) { return wrapTable.apply(parent.raj(rightTable, columnsToMatch, columnsToAdd, asOfMatchRule)); } @Override - public Table naturalJoin(Table rightTable, MatchPair[] columnsToMatch, - MatchPair[] columnsToAdd) { + public Table naturalJoin(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd) { return wrapTable.apply(parent.naturalJoin(rightTable, columnsToMatch, columnsToAdd)); } @Override public Table join(Table rightTable, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - int numRightBitsToReserve) { - return wrapTable - .apply(parent.join(rightTable, columnsToMatch, columnsToAdd, numRightBitsToReserve)); + int numRightBitsToReserve) { + return wrapTable.apply(parent.join(rightTable, columnsToMatch, columnsToAdd, numRightBitsToReserve)); } @Override - public Table by(AggregationStateFactory aggregationStateFactory, - SelectColumn... groupByColumns) { + public Table by(AggregationStateFactory aggregationStateFactory, SelectColumn... groupByColumns) { return wrapTable.apply(parent.by(aggregationStateFactory, groupByColumns)); } @@ -230,8 +225,7 @@ public Table tailBy(long nRows, String... groupByColumns) { } @Override - public Table applyToAllBy(String formulaColumn, String columnParamName, - SelectColumn... groupByColumns) { + public Table applyToAllBy(String formulaColumn, String columnParamName, SelectColumn... groupByColumns) { return wrapTable.apply(parent.applyToAllBy(formulaColumn, columnParamName, groupByColumns)); } @@ -312,7 +306,7 @@ public TableMap byExternal(boolean dropKeys, String... keyColumnNames) { @Override public Table rollup(ComboAggregateFactory comboAggregateFactory, boolean includeConstituents, - SelectColumn... columns) { + SelectColumn... columns) { return wrapTable.apply(parent.rollup(comboAggregateFactory, includeConstituents, columns)); } @@ -337,10 +331,8 @@ public Table snapshot(Table baseTable, boolean doInitialSnapshot, String... stam } @Override - public Table snapshotIncremental(Table rightTable, boolean doInitialSnapshot, - String... stampColumns) { - return wrapTable - .apply(parent.snapshotIncremental(rightTable, doInitialSnapshot, stampColumns)); + public Table snapshotIncremental(Table rightTable, boolean doInitialSnapshot, String... stampColumns) { + return wrapTable.apply(parent.snapshotIncremental(rightTable, doInitialSnapshot, stampColumns)); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/replay/QueryReplayGroupedTable.java b/DB/src/main/java/io/deephaven/db/v2/replay/QueryReplayGroupedTable.java index 00d7432e414..b2acef54616 100644 --- a/DB/src/main/java/io/deephaven/db/v2/replay/QueryReplayGroupedTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/replay/QueryReplayGroupedTable.java @@ -28,13 +28,12 @@ public abstract class QueryReplayGroupedTable extends QueryTable implements Live final Replayer replayer; protected PriorityQueue allIterators = new PriorityQueue<>(); - private static Map getResultSources( - Map input, RedirectionIndex redirectionIndex) { + private static Map getResultSources(Map input, + RedirectionIndex redirectionIndex) { Map result = new LinkedHashMap<>(); for (Map.Entry stringEntry : input.entrySet()) { ColumnSource value = stringEntry.getValue(); - result.put(stringEntry.getKey(), - new ReadOnlyRedirectedColumnSource<>(redirectionIndex, value)); + result.put(stringEntry.getKey(), new ReadOnlyRedirectedColumnSource<>(redirectionIndex, value)); } return result; } @@ -47,8 +46,7 @@ static class IteratorsAndNextTime implements Comparable { long lastIndex; public final long pos; - private IteratorsAndNextTime(Index.Iterator iterator, ColumnSource columnSource, - long pos) { + private IteratorsAndNextTime(Index.Iterator iterator, ColumnSource columnSource, long pos) { this.iterator = iterator; this.columnSource = columnSource; this.pos = pos; @@ -76,15 +74,14 @@ public int compareTo(Object o) { } protected QueryReplayGroupedTable(Index index, Map input, - String timeColumn, Replayer replayer, RedirectionIndex redirectionIndex, - String[] groupingColumns) { + String timeColumn, Replayer replayer, RedirectionIndex redirectionIndex, String[] groupingColumns) { super(Index.FACTORY.getIndexByValues(), getResultSources(input, redirectionIndex)); this.redirectionIndex = redirectionIndex; Map grouping; final ColumnSource[] columnSources = - Arrays.stream(groupingColumns).map(gc -> input.get(gc)).toArray(ColumnSource[]::new); + Arrays.stream(groupingColumns).map(gc -> input.get(gc)).toArray(ColumnSource[]::new); final TupleSource tupleSource = TupleSourceFactory.makeTupleSource(columnSources); grouping = index.getGrouping(tupleSource); diff --git a/DB/src/main/java/io/deephaven/db/v2/replay/ReplayGroupedFullTable.java b/DB/src/main/java/io/deephaven/db/v2/replay/ReplayGroupedFullTable.java index d3d7c8b4862..07c9c4c753d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/replay/ReplayGroupedFullTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/replay/ReplayGroupedFullTable.java @@ -14,14 +14,12 @@ public class ReplayGroupedFullTable extends QueryReplayGroupedTable { private int redirIndexSize; - public ReplayGroupedFullTable(Index index, Map input, - String timeColumn, Replayer replayer, String groupingColumn) { - super(index, input, timeColumn, replayer, - RedirectionIndex.FACTORY.createRedirectionIndex((int) index.size()), - new String[] {groupingColumn}); + public ReplayGroupedFullTable(Index index, Map input, String timeColumn, + Replayer replayer, String groupingColumn) { + super(index, input, timeColumn, replayer, RedirectionIndex.FACTORY.createRedirectionIndex((int) index.size()), + new String[] {groupingColumn}); redirIndexSize = 0; - // We do not modify existing entries in the RedirectionIndex (we only add at the end), so - // there's no need to + // We do not modify existing entries in the RedirectionIndex (we only add at the end), so there's no need to // ask the RedirectionIndex to track previous values. } @@ -31,8 +29,7 @@ public void refresh() { return; } IndexBuilder indexBuilder = Index.FACTORY.getBuilder(); - while (!allIterators.isEmpty() - && allIterators.peek().lastTime.getNanos() < replayer.currentTimeNanos()) { + while (!allIterators.isEmpty() && allIterators.peek().lastTime.getNanos() < replayer.currentTimeNanos()) { IteratorsAndNextTime currentIt = allIterators.poll(); final long key = redirIndexSize++; redirectionIndex.put(key, currentIt.lastIndex); diff --git a/DB/src/main/java/io/deephaven/db/v2/replay/ReplayLastByGroupedTable.java b/DB/src/main/java/io/deephaven/db/v2/replay/ReplayLastByGroupedTable.java index e10a2023d6b..c030e65c967 100644 --- a/DB/src/main/java/io/deephaven/db/v2/replay/ReplayLastByGroupedTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/replay/ReplayLastByGroupedTable.java @@ -13,10 +13,10 @@ public class ReplayLastByGroupedTable extends QueryReplayGroupedTable { - public ReplayLastByGroupedTable(Index index, Map input, - String timeColumn, Replayer replayer, String[] groupingColumns) { - super(index, input, timeColumn, replayer, - RedirectionIndex.FACTORY.createRedirectionIndex(100), groupingColumns); + public ReplayLastByGroupedTable(Index index, Map input, String timeColumn, + Replayer replayer, String[] groupingColumns) { + super(index, input, timeColumn, replayer, RedirectionIndex.FACTORY.createRedirectionIndex(100), + groupingColumns); replayer.registerTimeSource(index, input.get(timeColumn)); } @@ -28,8 +28,7 @@ public void refresh() { IndexBuilder addedBuilder = Index.FACTORY.getBuilder(); IndexBuilder modifiedBuilder = Index.FACTORY.getBuilder(); // List iteratorsToAddBack = new ArrayList<>(allIterators.size()); - while (!allIterators.isEmpty() - && allIterators.peek().lastTime.getNanos() < replayer.currentTimeNanos()) { + while (!allIterators.isEmpty() && allIterators.peek().lastTime.getNanos() < replayer.currentTimeNanos()) { IteratorsAndNextTime currentIt = allIterators.poll(); redirectionIndex.put(currentIt.pos, currentIt.lastIndex); if (getIndex().find(currentIt.pos) >= 0) { @@ -39,8 +38,7 @@ public void refresh() { } do { currentIt = currentIt.next(); - } while (currentIt != null - && currentIt.lastTime.getNanos() < replayer.currentTimeNanos()); + } while (currentIt != null && currentIt.lastTime.getNanos() < replayer.currentTimeNanos()); if (currentIt != null) { allIterators.add(currentIt); } diff --git a/DB/src/main/java/io/deephaven/db/v2/replay/ReplayTable.java b/DB/src/main/java/io/deephaven/db/v2/replay/ReplayTable.java index 1f11a43ecca..b18e478cbb3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/replay/ReplayTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/replay/ReplayTable.java @@ -23,8 +23,7 @@ public class ReplayTable extends QueryTable implements LiveTable { private boolean done; private final Replayer replayer; - public ReplayTable(Index index, Map result, String timeColumn, - Replayer replayer) { + public ReplayTable(Index index, Map result, String timeColumn, Replayer replayer) { super(Index.FACTORY.getIndexByValues(), result); Require.requirement(replayer != null, "replayer != null"); replayer.registerTimeSource(index, result.get(timeColumn)); diff --git a/DB/src/main/java/io/deephaven/db/v2/replay/Replayer.java b/DB/src/main/java/io/deephaven/db/v2/replay/Replayer.java index 0ed701cd53f..a4c7de532d6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/replay/Replayer.java +++ b/DB/src/main/java/io/deephaven/db/v2/replay/Replayer.java @@ -66,8 +66,7 @@ public Replayer(DBDateTime startTime, DBDateTime endTime) { */ @Override public void start() { - delta = nanosToTime(millisToNanos(System.currentTimeMillis())).getNanos() - - startTime.getNanos(); + delta = nanosToTime(millisToNanos(System.currentTimeMillis())).getNanos() - startTime.getNanos(); for (LiveTable currentTable : currentTables) { LiveTableMonitor.DEFAULT.addTable(currentTable); } @@ -103,8 +102,8 @@ public void shutdown() throws IOException { } /** - * Wait a specified interval for the replayer to complete. If the replayer has not completed by - * the end of the interval, the method returns. + * Wait a specified interval for the replayer to complete. If the replayer has not completed by the end of the + * interval, the method returns. * * @param maxTimeMillis maximum number of milliseconds to wait. * @throws QueryCancellationException thread was interrupted. @@ -118,8 +117,7 @@ public void waitDone(long maxTimeMillis) { LiveTableMonitor.DEFAULT.exclusiveLock().doLocked(() -> { while (!done && expiryTime > System.currentTimeMillis()) { try { - ltmCondition.await(expiryTime - System.currentTimeMillis(), - TimeUnit.MILLISECONDS); + ltmCondition.await(expiryTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException interruptIsCancel) { throw new QueryCancellationException("Interrupt detected", interruptIsCancel); } @@ -181,8 +179,7 @@ public long currentTimeNanos() { public DBDateTime currentTime() { if (delta == Long.MAX_VALUE) return startTime; - final DBDateTime result = - DBTimeUtils.minus(nanosToTime(millisToNanos(System.currentTimeMillis())), delta); + final DBDateTime result = DBTimeUtils.minus(nanosToTime(millisToNanos(System.currentTimeMillis())), delta); if (result.getNanos() > endTime.getNanos()) { return endTime; } @@ -215,8 +212,8 @@ public void setTime(long updatedTime) { */ @Override public DynamicTable replay(Table dataSource, String timeColumn) { - final ReplayTable result = new ReplayTable(dataSource.getIndex(), - dataSource.getColumnSourceMap(), timeColumn, this); + final ReplayTable result = + new ReplayTable(dataSource.getIndex(), dataSource.getColumnSourceMap(), timeColumn, this); currentTables.add(result); if (delta < Long.MAX_VALUE) { LiveTableMonitor.DEFAULT.addTable(result); @@ -225,9 +222,9 @@ public DynamicTable replay(Table dataSource, String timeColumn) { } /** - * Prepares a grouped historical table for replaying. This method can be faster than the - * ungrouped replay, but the performance increase comes with a cost. Within a group, the data - * ordering is maintained. Between groups, data ordering is not maintained for a time interval. + * Prepares a grouped historical table for replaying. This method can be faster than the ungrouped replay, but the + * performance increase comes with a cost. Within a group, the data ordering is maintained. Between groups, data + * ordering is not maintained for a time interval. * * @param dataSource historical table to replay * @param timeColumn column in the table containing timestamps @@ -236,7 +233,7 @@ public DynamicTable replay(Table dataSource, String timeColumn) { @Override public DynamicTable replayGrouped(Table dataSource, String timeColumn, String groupingColumn) { final ReplayGroupedFullTable result = new ReplayGroupedFullTable(dataSource.getIndex(), - dataSource.getColumnSourceMap(), timeColumn, this, groupingColumn); + dataSource.getColumnSourceMap(), timeColumn, this, groupingColumn); currentTables.add(result); if (delta < Long.MAX_VALUE) { LiveTableMonitor.DEFAULT.addTable(result); @@ -253,10 +250,9 @@ public DynamicTable replayGrouped(Table dataSource, String timeColumn, String gr * @return dynamic, replayed version of the last-by table. */ @Override - public DynamicTable replayGroupedLastBy(Table dataSource, String timeColumn, - String... groupingColumns) { + public DynamicTable replayGroupedLastBy(Table dataSource, String timeColumn, String... groupingColumns) { final ReplayLastByGroupedTable result = new ReplayLastByGroupedTable(dataSource.getIndex(), - dataSource.getColumnSourceMap(), timeColumn, this, groupingColumns); + dataSource.getColumnSourceMap(), timeColumn, this, groupingColumns); currentTables.add(result); if (delta < Long.MAX_VALUE) { LiveTableMonitor.DEFAULT.addTable(result); @@ -265,9 +261,8 @@ public DynamicTable replayGroupedLastBy(Table dataSource, String timeColumn, } /** - * Register the time column and index from a new table to replay. Most users will use - * replay, replayGrouped, or replayGroupedLastBy instead - * of this function. + * Register the time column and index from a new table to replay. Most users will use replay, + * replayGrouped, or replayGroupedLastBy instead of this function. * * @param index table index * @param timestampSource column source containing time information. diff --git a/DB/src/main/java/io/deephaven/db/v2/select/AbstractConditionFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/AbstractConditionFilter.java index a24a2a91dd6..9a9ce640315 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/AbstractConditionFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/AbstractConditionFilter.java @@ -24,8 +24,7 @@ import static io.deephaven.db.v2.select.DhFormulaColumn.COLUMN_SUFFIX; public abstract class AbstractConditionFilter extends SelectFilterImpl { - private static final Logger log = - ProcessEnvironment.getDefaultLog(AbstractConditionFilter.class); + private static final Logger log = ProcessEnvironment.getDefaultLog(AbstractConditionFilter.class); final Map outerToInnerNames; final Map innerToOuterNames; @NotNull @@ -47,8 +46,7 @@ protected AbstractConditionFilter(@NotNull String formula, boolean unboxArgument this.innerToOuterNames = Collections.emptyMap(); } - protected AbstractConditionFilter(@NotNull String formula, Map renames, - boolean unboxArguments) { + protected AbstractConditionFilter(@NotNull String formula, Map renames, boolean unboxArguments) { this.formula = formula; this.outerToInnerNames = renames; this.unboxArguments = unboxArguments; @@ -93,8 +91,7 @@ public synchronized void init(TableDefinition tableDefinition) { Class compType; for (ColumnDefinition column : tableDefinition.getColumns()) { final Class dbArrayType = DhFormulaColumn.getDbArrayType(column.getDataType()); - final String columnName = - innerToOuterNames.getOrDefault(column.getName(), column.getName()); + final String columnName = innerToOuterNames.getOrDefault(column.getName(), column.getName()); possibleVariables.put(columnName, column.getDataType()); possibleVariables.put(columnName + COLUMN_SUFFIX, dbArrayType); @@ -105,7 +102,7 @@ public synchronized void init(TableDefinition tableDefinition) { } if (dbArrayType == DbArray.class) { possibleVariableParameterizedTypes.put(columnName + COLUMN_SUFFIX, - new Class[] {column.getDataType()}); + new Class[] {column.getDataType()}); } } @@ -113,19 +110,15 @@ public synchronized void init(TableDefinition tableDefinition) { final DBTimeUtils.Result timeConversionResult = DBTimeUtils.convertExpression(formula); - log.debug("Expression (after time conversion) : " - + timeConversionResult.getConvertedFormula()); + log.debug("Expression (after time conversion) : " + timeConversionResult.getConvertedFormula()); possibleVariables.putAll(timeConversionResult.getNewVariables()); - final DBLanguageParser.Result result = - new DBLanguageParser(timeConversionResult.getConvertedFormula(), - QueryLibrary.getPackageImports(), QueryLibrary.getClassImports(), - QueryLibrary.getStaticImports(), possibleVariables, - possibleVariableParameterizedTypes, unboxArguments).getResult(); + final DBLanguageParser.Result result = new DBLanguageParser(timeConversionResult.getConvertedFormula(), + QueryLibrary.getPackageImports(), QueryLibrary.getClassImports(), QueryLibrary.getStaticImports(), + possibleVariables, possibleVariableParameterizedTypes, unboxArguments).getResult(); - log.debug( - "Expression (after language conversion) : " + result.getConvertedExpression()); + log.debug("Expression (after language conversion) : " + result.getConvertedExpression()); usedColumns = new ArrayList<>(); usedColumnArrays = new ArrayList<>(); @@ -135,8 +128,7 @@ public synchronized void init(TableDefinition tableDefinition) { final String columnToFind = outerToInnerNames.getOrDefault(variable, variable); final String arrayColumnToFind; if (variable.endsWith(COLUMN_SUFFIX)) { - final String originalName = - variable.substring(0, variable.length() - COLUMN_SUFFIX.length()); + final String originalName = variable.substring(0, variable.length() - COLUMN_SUFFIX.length()); arrayColumnToFind = outerToInnerNames.getOrDefault(originalName, originalName); } else { arrayColumnToFind = null; @@ -150,8 +142,7 @@ public synchronized void init(TableDefinition tableDefinition) { usesK = true; } else if (tableDefinition.getColumn(columnToFind) != null) { usedColumns.add(columnToFind); - } else if (arrayColumnToFind != null - && tableDefinition.getColumn(arrayColumnToFind) != null) { + } else if (arrayColumnToFind != null && tableDefinition.getColumn(arrayColumnToFind) != null) { usedColumnArrays.add(arrayColumnToFind); } else if (possibleParams.containsKey(variable)) { paramsList.add(possibleParams.get(variable)); @@ -160,29 +151,27 @@ public synchronized void init(TableDefinition tableDefinition) { params = paramsList.toArray(Param.ZERO_LENGTH_PARAM_ARRAY); // check if this is a filter that uses a numba vectorized function - Optional paramOptional = Arrays.stream(params) - .filter(p -> p.getValue() instanceof NumbaCallableWrapper).findFirst(); + Optional paramOptional = + Arrays.stream(params).filter(p -> p.getValue() instanceof NumbaCallableWrapper).findFirst(); if (paramOptional.isPresent()) { /* - * numba vectorized function must be used alone as an entire expression, and that - * should have been checked in the DBLanguageParser already, this is a sanity check + * numba vectorized function must be used alone as an entire expression, and that should have been + * checked in the DBLanguageParser already, this is a sanity check */ if (params.length != 1) { throw new UncheckedDeephavenException( - "internal error - misuse of numba vectorized functions wasn't detected."); + "internal error - misuse of numba vectorized functions wasn't detected."); } - NumbaCallableWrapper numbaCallableWrapper = - (NumbaCallableWrapper) paramOptional.get().getValue(); - DeephavenCompatibleFunction dcf = - DeephavenCompatibleFunction.create(numbaCallableWrapper.getPyObject(), + NumbaCallableWrapper numbaCallableWrapper = (NumbaCallableWrapper) paramOptional.get().getValue(); + DeephavenCompatibleFunction dcf = DeephavenCompatibleFunction.create(numbaCallableWrapper.getPyObject(), numbaCallableWrapper.getReturnType(), usedColumns.toArray(new String[0]), true); checkReturnType(result, dcf.getReturnedType()); setFilter(new ConditionFilter.ChunkFilter( - dcf.toFilterKernel(), - dcf.getColumnNames().toArray(new String[0]), - ConditionFilter.CHUNK_SIZE)); + dcf.toFilterKernel(), + dcf.getColumnNames().toArray(new String[0]), + ConditionFilter.CHUNK_SIZE)); initialized = true; return; } @@ -199,24 +188,20 @@ public synchronized void init(TableDefinition tableDefinition) { private void checkReturnType(DBLanguageParser.Result result, Class resultType) { if (!Boolean.class.equals(resultType) && !boolean.class.equals(resultType)) { - throw new RuntimeException( - "Invalid condition filter expression type: boolean required.\n" + + throw new RuntimeException("Invalid condition filter expression type: boolean required.\n" + "Formula : " + truncateLongFormula(formula) + '\n' + - "Converted Expression : " + truncateLongFormula(result.getConvertedExpression()) - + '\n' + + "Converted Expression : " + truncateLongFormula(result.getConvertedExpression()) + '\n' + "Expression Type : " + resultType.getName()); } } - protected abstract void generateFilterCode(TableDefinition tableDefinition, - DBTimeUtils.Result timeConversionResult, DBLanguageParser.Result result) - throws MalformedURLException, ClassNotFoundException; + protected abstract void generateFilterCode(TableDefinition tableDefinition, DBTimeUtils.Result timeConversionResult, + DBLanguageParser.Result result) throws MalformedURLException, ClassNotFoundException; @Override public Index filter(Index selection, Index fullSet, Table table, boolean usePrev) { if (usePrev && params.length > 0) { - throw new PreviousFilteringNotSupported( - "Previous filter with parameters not supported."); + throw new PreviousFilteringNotSupported("Previous filter with parameters not supported."); } final Filter filter; @@ -228,14 +213,13 @@ public Index filter(Index selection, Index fullSet, Table table, boolean usePrev return filter.filter(selection, fullSet, table, usePrev, formula, params); } - protected abstract Filter getFilter(Table table, Index fullSet) throws InstantiationException, - IllegalAccessException, NoSuchMethodException, InvocationTargetException; + protected abstract Filter getFilter(Table table, Index fullSet) + throws InstantiationException, IllegalAccessException, NoSuchMethodException, InvocationTargetException; /** - * When numba vectorized functions are used to evaluate query filters, we need to create a - * special ChunkFilter that can handle packing and unpacking arrays required/returned by the - * vectorized function, essentially bypassing the regular code generation process which isn't - * able to support such use cases without needing some major rework. + * When numba vectorized functions are used to evaluate query filters, we need to create a special ChunkFilter that + * can handle packing and unpacking arrays required/returned by the vectorized function, essentially bypassing the + * regular code generation process which isn't able to support such use cases without needing some major rework. * * @param filter */ @@ -261,12 +245,12 @@ public boolean isSimpleFilter() { public interface Filter { Index filter( - Index selection, - Index fullSet, - Table table, - boolean usePrev, - String formula, - Param... params); + Index selection, + Index fullSet, + Table table, + boolean usePrev, + String formula, + Param... params); } static String truncateLongFormula(String formula) { diff --git a/DB/src/main/java/io/deephaven/db/v2/select/AbstractFormulaColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/AbstractFormulaColumn.java index 8cb59e49999..da922b1118d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/AbstractFormulaColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/AbstractFormulaColumn.java @@ -28,8 +28,7 @@ import java.util.*; /** - * A SelectColumn that implements a formula computed from the existing columns in the table and a - * query scope. + * A SelectColumn that implements a formula computed from the existing columns in the table and a query scope. */ public abstract class AbstractFormulaColumn implements FormulaColumn { private static final Logger log = LoggerFactory.getLogger(AbstractFormulaColumn.class); @@ -37,9 +36,8 @@ public abstract class AbstractFormulaColumn implements FormulaColumn { private final boolean useKernelFormulas; - private static final boolean ALLOW_UNSAFE_REFRESHING_FORMULAS = - Configuration.getInstance().getBooleanForClassWithDefault(AbstractFormulaColumn.class, - "allowUnsafeRefreshingFormulas", false); + private static final boolean ALLOW_UNSAFE_REFRESHING_FORMULAS = Configuration.getInstance() + .getBooleanForClassWithDefault(AbstractFormulaColumn.class, "allowUnsafeRefreshingFormulas", false); protected String formulaString; @@ -69,8 +67,7 @@ public abstract class AbstractFormulaColumn implements FormulaColumn { * @param formulaString the formula string to be parsed by the DBLanguageParser * @param useKernelFormulas */ - protected AbstractFormulaColumn(String columnName, String formulaString, - boolean useKernelFormulas) { + protected AbstractFormulaColumn(String columnName, String formulaString, boolean useKernelFormulas) { this.formulaString = Require.neqNull(formulaString, "formulaString"); this.columnName = NameValidator.validateColumnName(columnName); this.useKernelFormulas = useKernelFormulas; @@ -87,8 +84,7 @@ public Class getReturnedType() { } @Override - public List initInputs(Index index, - Map columnsOfInterest) { + public List initInputs(Index index, Map columnsOfInterest) { if (this.index != null) { Assert.eq(this.index, "this.index", index, "index"); } @@ -101,8 +97,7 @@ public List initInputs(Index index, return initDef(extractDefinitions(columnsOfInterest)); } - protected void applyUsedVariables(Map columnDefinitionMap, - Set variablesUsed) { + protected void applyUsedVariables(Map columnDefinitionMap, Set variablesUsed) { final Map possibleParams = new HashMap<>(); final QueryScope queryScope = QueryScope.getScope(); for (Param param : queryScope.getParams(queryScope.getParamNames())) { @@ -123,9 +118,8 @@ protected void applyUsedVariables(Map columnDefinition } else if (columnDefinitionMap.get(variable) != null) { usedColumns.add(variable); } else if (variable.endsWith(COLUMN_SUFFIX) && columnDefinitionMap - .get(variable.substring(0, variable.length() - COLUMN_SUFFIX.length())) != null) { - usedColumnArrays - .add(variable.substring(0, variable.length() - COLUMN_SUFFIX.length())); + .get(variable.substring(0, variable.length() - COLUMN_SUFFIX.length())) != null) { + usedColumnArrays.add(variable.substring(0, variable.length() - COLUMN_SUFFIX.length())); } else if (possibleParams.containsKey(variable)) { paramsList.add(possibleParams.get(variable)); userParams.add(variable); @@ -135,8 +129,7 @@ protected void applyUsedVariables(Map columnDefinition params = paramsList.toArray(Param.ZERO_LENGTH_PARAM_ARRAY); for (Param param : paramsList) { try { - // noinspection ResultOfMethodCallIgnored, we only care that we can get the value - // here not what it is + // noinspection ResultOfMethodCallIgnored, we only care that we can get the value here not what it is param.getValue(); } catch (RuntimeException e) { throw new RuntimeException("Error retrieving " + param.getName(), e); @@ -156,14 +149,14 @@ public List getColumnArrays() { } private static Map extractDefinitions( - Map columnsOfInterest) { + Map columnsOfInterest) { final Map result = new LinkedHashMap<>(); for (Map.Entry entry : columnsOfInterest.entrySet()) { final String name = entry.getKey(); final Class type = entry.getValue().getType(); // noinspection unchecked final ColumnDefinition definition = - ColumnDefinition.fromGenericType(name, type, entry.getValue().getComponentType()); + ColumnDefinition.fromGenericType(name, type, entry.getValue().getComponentType()); result.put(name, definition); } return result; @@ -177,11 +170,11 @@ public ColumnSource updateData(WritableSource result, long destPos, long sourceP } /** - * Creates a {@link ColumnSource} that will evaluate the result of the {@link #formula} for a - * given row on demand when it is accessed. + * Creates a {@link ColumnSource} that will evaluate the result of the {@link #formula} for a given row on demand + * when it is accessed. *

      - * The result of this is the column source produced by calling {@link Table#updateView} or - * {@link Table#view} on a {@link Table}. + * The result of this is the column source produced by calling {@link Table#updateView} or {@link Table#view} on a + * {@link Table}. */ @NotNull @Override @@ -190,8 +183,8 @@ public ColumnSource getDataView() { } /** - * Creates a {@link ColumnSource} that will evaluate the result of the {@link #formula} for a - * given row on demand when it is accessed and cache the result + * Creates a {@link ColumnSource} that will evaluate the result of the {@link #formula} for a given row on demand + * when it is accessed and cache the result * * @return the column source produced by calling {@link Table#lazyUpdate} on a {@link Table}. */ @@ -205,16 +198,14 @@ public ColumnSource getLazyView() { private ColumnSource getViewColumnSource(boolean lazy) { final SecurityManager sm = System.getSecurityManager(); if (sm != null) { - // We explicitly want all Groovy commands to run under the 'file:/groovy/shell' source, - // so explicitly create that. + // We explicitly want all Groovy commands to run under the 'file:/groovy/shell' source, so explicitly create + // that. AccessControlContext context = null; try { final URL urlSource = new URL("file:/groovy/shell"); - final CodeSource codeSource = - new CodeSource(urlSource, (java.security.cert.Certificate[]) null); + final CodeSource codeSource = new CodeSource(urlSource, (java.security.cert.Certificate[]) null); final PermissionCollection perms = Policy.getPolicy().getPermissions(codeSource); - context = new AccessControlContext( - new ProtectionDomain[] {new ProtectionDomain(codeSource, perms)}); + context = new AccessControlContext(new ProtectionDomain[] {new ProtectionDomain(codeSource, perms)}); } catch (MalformedURLException e) { throw new RuntimeException("Invalid file path in groovy url source", e); } @@ -222,23 +213,20 @@ private ColumnSource getViewColumnSource(boolean lazy) { return AccessController.doPrivileged((PrivilegedAction) () -> { final Formula formula = getFormula(lazy, columnSources, params); // noinspection unchecked - return new ViewColumnSource( - (returnedType == boolean.class ? Boolean.class : returnedType), formula); + return new ViewColumnSource((returnedType == boolean.class ? Boolean.class : returnedType), formula); }, context); } else { final Formula formula = getFormula(lazy, columnSources, params); // noinspection unchecked - return new ViewColumnSource( - (returnedType == boolean.class ? Boolean.class : returnedType), formula); + return new ViewColumnSource((returnedType == boolean.class ? Boolean.class : returnedType), formula); } } private Formula getFormula(boolean initLazyMap, - Map columnsToData, - Param... params) { + Map columnsToData, + Param... params) { if (formulaFactory == null) { - formulaFactory = - useKernelFormulas ? createKernelFormulaFactory() : createFormulaFactory(); + formulaFactory = useKernelFormulas ? createKernelFormulaFactory() : createFormulaFactory(); } formula = formulaFactory.createFormula(index, initLazyMap, columnsToData, params); return formula; @@ -283,8 +271,7 @@ private FormulaFactory createKernelFormulaFactory() { return (index, lazy, columnsToData, params) -> { if (lazy) { - // Maybe warn that we ignore "lazy". By the way, "lazy" is the wrong term anyway. - // "lazy" doesn't mean + // Maybe warn that we ignore "lazy". By the way, "lazy" is the wrong term anyway. "lazy" doesn't mean // "cached", which is how we are using it. } final Map netColumnSources = new HashMap<>(); @@ -314,8 +301,8 @@ public String getName() { @Override public MatchPair getMatchPair() { - throw new UnsupportedOperationException("Formula " + columnName + " =" + formulaString - + " cannot be interpreted as a name value pair"); + throw new UnsupportedOperationException( + "Formula " + columnName + " =" + formulaString + " cannot be interpreted as a name value pair"); } @Override @@ -334,8 +321,7 @@ public WritableSource newDestInstance(long size) { @Override public boolean disallowRefresh() { - return !ALLOW_UNSAFE_REFRESHING_FORMULAS && !usesI && !usesII && !usesK - && usedColumnArrays.isEmpty(); + return !ALLOW_UNSAFE_REFRESHING_FORMULAS && !usesI && !usesII && !usesK && usedColumnArrays.isEmpty(); } static class ColumnArrayParameter { @@ -347,7 +333,7 @@ static class ColumnArrayParameter { final ColumnSource columnSource; public ColumnArrayParameter(String name, String bareName, Class dataType, Class dbArrayType, - String dbArrayTypeString, ColumnSource columnSource) { + String dbArrayTypeString, ColumnSource columnSource) { this.name = name; this.bareName = bareName; this.dataType = dataType; diff --git a/DB/src/main/java/io/deephaven/db/v2/select/AbstractRangeFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/AbstractRangeFilter.java index 8350e14cdc2..3e8c2d01a3c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/AbstractRangeFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/AbstractRangeFilter.java @@ -14,8 +14,8 @@ import java.util.regex.Pattern; /** - * A filter that determines if a column value is between an upper and lower bound (which each may - * either be inclusive or exclusive). + * A filter that determines if a column value is between an upper and lower bound (which each may either be inclusive or + * exclusive). */ public abstract class AbstractRangeFilter extends SelectFilterImpl { private static final Pattern decimalPattern = Pattern.compile("(-)?\\d+(?:\\.((\\d+)0*)?)?"); @@ -31,8 +31,7 @@ public abstract class AbstractRangeFilter extends SelectFilterImpl { */ ChunkFilter chunkFilter; /** - * If the column can be be reinterpreted to a long, then we should prefer to use the longFilter - * instead. + * If the column can be be reinterpreted to a long, then we should prefer to use the longFilter instead. * * In practice, this is used for reinterpretable DBDateTimes. */ @@ -51,8 +50,7 @@ public static SelectFilter makeBigDecimalRange(String columnName, String val) { final boolean positiveOrZero = parsed.signum() >= 0; return new ComparableRangeFilter(columnName, parsed, - positiveOrZero ? parsed.add(offset) : parsed.subtract(offset), positiveOrZero, - !positiveOrZero); + positiveOrZero ? parsed.add(offset) : parsed.subtract(offset), positiveOrZero, !positiveOrZero); } static int findPrecision(String val) { @@ -78,22 +76,18 @@ public List getColumnArrays() { @Override public Index filter(Index selection, Index fullSet, Table table, boolean usePrev) { final ColumnSource columnSource = table.getColumnSource(columnName); - final Optional orderForColumn = - SortedColumnsAttribute.getOrderForColumn(table, columnName); + final Optional orderForColumn = SortedColumnsAttribute.getOrderForColumn(table, columnName); if (orderForColumn.isPresent()) { // do binary search for value - return binarySearch(selection, columnSource, usePrev, - orderForColumn.get().isDescending()); + return binarySearch(selection, columnSource, usePrev, orderForColumn.get().isDescending()); } if (longFilter != null && columnSource.allowsReinterpret(long.class)) { - return ChunkFilter.applyChunkFilter(selection, columnSource.reinterpret(long.class), - usePrev, longFilter); + return ChunkFilter.applyChunkFilter(selection, columnSource.reinterpret(long.class), usePrev, longFilter); } return ChunkFilter.applyChunkFilter(selection, columnSource, usePrev, chunkFilter); } - abstract Index binarySearch(Index selection, ColumnSource columnSource, boolean usePrev, - boolean reverse); + abstract Index binarySearch(Index selection, ColumnSource columnSource, boolean usePrev, boolean reverse); @Override public boolean isSimpleFilter() { diff --git a/DB/src/main/java/io/deephaven/db/v2/select/AutoTuningIncrementalReleaseFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/AutoTuningIncrementalReleaseFilter.java index 65abc9ef28b..452ab315669 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/AutoTuningIncrementalReleaseFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/AutoTuningIncrementalReleaseFilter.java @@ -20,26 +20,22 @@ /** * Filter that releases the required number of rows from a table to saturate the LTM cycle. *

      - * The table has an initial size, which can be thought of as the size during query initialization. - * There is an initial number of rows that are released, which is then used to tune the number of - * rows to release on the subsequent cycle. + * The table has an initial size, which can be thought of as the size during query initialization. There is an initial + * number of rows that are released, which is then used to tune the number of rows to release on the subsequent cycle. *

      - * The targetFactor parameter is multiplied by the LTM's targetCycle. This allows you to determine - * how busy you want the LTM to be. For example a factor of 1, will attempt to hit the target cycle - * exactly. A target of 0.5 should result an LTM ratio of about 50%. A factor of 10 would mean that - * the system will extend beyond the target cycle time, coalesce updates accordingly and have a - * ratio that is nearly 100%. + * The targetFactor parameter is multiplied by the LTM's targetCycle. This allows you to determine how busy you want the + * LTM to be. For example a factor of 1, will attempt to hit the target cycle exactly. A target of 0.5 should result an + * LTM ratio of about 50%. A factor of 10 would mean that the system will extend beyond the target cycle time, coalesce + * updates accordingly and have a ratio that is nearly 100%. *

      - * The time the rows are released is recorded, and a terminal notification is enqueued to record the - * end of the cycle. On each cycle, the number of rows per second is computed; and then the number - * of rows released is the LTM's target cycle multiplied by the rows per second multiplied by the - * target factor. + * The time the rows are released is recorded, and a terminal notification is enqueued to record the end of the cycle. + * On each cycle, the number of rows per second is computed; and then the number of rows released is the LTM's target + * cycle multiplied by the rows per second multiplied by the target factor. * * *

      - * The AutotuningIncrementalReleaseFilter can be used to benchmark how many rows of data a query can - * process. In its simplest form we can measure how many rows a lastBy statement can process. For - * example: + * The AutotuningIncrementalReleaseFilter can be used to benchmark how many rows of data a query can process. In its + * simplest form we can measure how many rows a lastBy statement can process. For example: *

      * *
      @@ -51,8 +47,8 @@
        * currentQuote = quotesFiltered.lastBy("LocalCodeStr").update("Mid=(Bid + Ask)/2")
        * 
      * - * Produces a currentQuote table, and you can view the Log tab to determine how many rows per second - * were processed. The summary is sent to the WARN level: + * Produces a currentQuote table, and you can view the Log tab to determine how many rows per second were processed. The + * summary is sent to the WARN level: * *
        * 12:55:49.985 WARN Completed release 6.97 seconds, rows=19630961, rows/second=2,817,053.86
      @@ -79,13 +75,12 @@
        * 

      * *

      - * The AutotuningIncrementalReleaseFilter is best suited for queries that have a single source table - * with arbitrary amounts of processing on that table. Multiple incremental release filters may be - * combined, and each filter will report the number of rows that were released per second, however - * the data is not synchronized between tables and it is not possible to differentiate which table - * is contributing more to the query's load without examining the performance tables. You may need - * to adjust the initial size parameters so that one table does not complete processing before - * another. + * The AutotuningIncrementalReleaseFilter is best suited for queries that have a single source table with arbitrary + * amounts of processing on that table. Multiple incremental release filters may be combined, and each filter will + * report the number of rows that were released per second, however the data is not synchronized between tables and it + * is not possible to differentiate which table is contributing more to the query's load without examining the + * performance tables. You may need to adjust the initial size parameters so that one table does not complete processing + * before another. * *

        * import io.deephaven.db.v2.select.AutoTuningIncrementalReleaseFilter
      @@ -119,14 +114,12 @@ public class AutoTuningIncrementalReleaseFilter extends BaseIncrementalReleaseFi
            * Create an auto tuning release filter using a real time clock, without printing on each cycle.
            *
            * @param initialSize the initial table size
      -     * @param initialRelease the initial incremental update; after the first cycle the rows per
      -     *        second is calculated based on the duration of the last cycle and the number of rows
      -     *        released by this filter
      +     * @param initialRelease the initial incremental update; after the first cycle the rows per second is calculated
      +     *        based on the duration of the last cycle and the number of rows released by this filter
            * @param targetFactor the multiple of the LTM cycle we should aim for
            */
           @ScriptApi
      -    public AutoTuningIncrementalReleaseFilter(long initialSize, long initialRelease,
      -        double targetFactor) {
      +    public AutoTuningIncrementalReleaseFilter(long initialSize, long initialRelease, double targetFactor) {
               this(initialSize, initialRelease, targetFactor, false);
           }
       
      @@ -135,14 +128,13 @@ public AutoTuningIncrementalReleaseFilter(long initialSize, long initialRelease,
            *
            * @param logger the logger the final row/second calculations to
            * @param initialSize the initial table size
      -     * @param initialRelease the initial incremental update; after the first cycle the rows per
      -     *        second is calculated based on the duration of the last cycle and the number of rows
      -     *        released by this filter
      +     * @param initialRelease the initial incremental update; after the first cycle the rows per second is calculated
      +     *        based on the duration of the last cycle and the number of rows released by this filter
            * @param targetFactor the multiple of the LTM cycle we should aim for
            */
           @ScriptApi
           public AutoTuningIncrementalReleaseFilter(Logger logger, long initialSize, long initialRelease,
      -        double targetFactor) {
      +            double targetFactor) {
               this(logger, initialSize, initialRelease, targetFactor, false);
           }
       
      @@ -150,35 +142,32 @@ public AutoTuningIncrementalReleaseFilter(Logger logger, long initialSize, long
            * Create an auto tuning release filter using a real time clock.
            *
            * @param initialSize the initial table size
      -     * @param initialRelease the initial incremental update; after the first cycle the rows per
      -     *        second is calculated based on the duration of the last cycle and the number of rows
      -     *        released by this filter
      +     * @param initialRelease the initial incremental update; after the first cycle the rows per second is calculated
      +     *        based on the duration of the last cycle and the number of rows released by this filter
            * @param targetFactor the multiple of the LTM cycle we should aim for
      -     * @param verbose whether information should be printed on each LTM cycle describing the current
      -     *        rate and number of rows released
      +     * @param verbose whether information should be printed on each LTM cycle describing the current rate and number of
      +     *        rows released
            */
           @ScriptApi
      -    public AutoTuningIncrementalReleaseFilter(long initialSize, long initialRelease,
      -        double targetFactor, boolean verbose) {
      +    public AutoTuningIncrementalReleaseFilter(long initialSize, long initialRelease, double targetFactor,
      +            boolean verbose) {
               this(initialSize, initialRelease, targetFactor, verbose, getRealTimeProvider());
           }
       
           /**
            * Create an auto tuning release filter using the provided {@link TimeProvider}.
            *
      -     * @param logger the logger to report progress (if verbose is set) and the final row/second
      -     *        calculations
      +     * @param logger the logger to report progress (if verbose is set) and the final row/second calculations
            * @param initialSize the initial table size
      -     * @param initialRelease the initial incremental update; after the first cycle the rows per
      -     *        second is calculated based on the duration of the last cycle and the number of rows
      -     *        released by this filter
      +     * @param initialRelease the initial incremental update; after the first cycle the rows per second is calculated
      +     *        based on the duration of the last cycle and the number of rows released by this filter
            * @param targetFactor the multiple of the LTM cycle we should aim for
      -     * @param verbose whether information should be printed on each LTM cycle describing the current
      -     *        rate and number of rows released
      +     * @param verbose whether information should be printed on each LTM cycle describing the current rate and number of
      +     *        rows released
            */
           @ScriptApi
      -    public AutoTuningIncrementalReleaseFilter(Logger logger, long initialSize, long initialRelease,
      -        double targetFactor, boolean verbose) {
      +    public AutoTuningIncrementalReleaseFilter(Logger logger, long initialSize, long initialRelease, double targetFactor,
      +            boolean verbose) {
               this(logger, initialSize, initialRelease, targetFactor, verbose, getRealTimeProvider());
           }
       
      @@ -191,40 +180,34 @@ private static ClockTimeProvider getRealTimeProvider() {
            * Create an auto tuning release filter using the provided {@link TimeProvider}.
            *
            * @param initialSize the initial table size
      -     * @param initialRelease the initial incremental update; after the first cycle the rows per
      -     *        second is calculated based on the duration of the last cycle and the number of rows
      -     *        released by this filter
      +     * @param initialRelease the initial incremental update; after the first cycle the rows per second is calculated
      +     *        based on the duration of the last cycle and the number of rows released by this filter
            * @param targetFactor the multiple of the LTM cycle we should aim for
      -     * @param verbose whether information should be printed on each LTM cycle describing the current
      -     *        rate and number of rows released
      -     * @param timeProvider the time provider, which is used to determine the start and end of each
      -     *        cycle
      +     * @param verbose whether information should be printed on each LTM cycle describing the current rate and number of
      +     *        rows released
      +     * @param timeProvider the time provider, which is used to determine the start and end of each cycle
            */
           @ScriptApi
      -    public AutoTuningIncrementalReleaseFilter(long initialSize, long initialRelease,
      -        double targetFactor, boolean verbose, TimeProvider timeProvider) {
      -        this(ProcessEnvironment.getDefaultLog(), initialSize, initialRelease, targetFactor, verbose,
      -            timeProvider);
      +    public AutoTuningIncrementalReleaseFilter(long initialSize, long initialRelease, double targetFactor,
      +            boolean verbose, TimeProvider timeProvider) {
      +        this(ProcessEnvironment.getDefaultLog(), initialSize, initialRelease, targetFactor, verbose, timeProvider);
           }
       
           /**
            * Create an auto tuning release filter using the provided {@link TimeProvider}.
            *
      -     * @param logger the logger to report progress (if verbose is set) and the final row/second
      -     *        calculations
      +     * @param logger the logger to report progress (if verbose is set) and the final row/second calculations
            * @param initialSize the initial table size
      -     * @param initialRelease the initial incremental update; after the first cycle the rows per
      -     *        second is calculated based on the duration of the last cycle and the number of rows
      -     *        released by this filter
      +     * @param initialRelease the initial incremental update; after the first cycle the rows per second is calculated
      +     *        based on the duration of the last cycle and the number of rows released by this filter
            * @param targetFactor the multiple of the LTM cycle we should aim for
      -     * @param verbose whether information should be printed on each LTM cycle describing the current
      -     *        rate and number of rows released
      -     * @param timeProvider the time provider, which is used to determine the start and end of each
      -     *        cycle
      +     * @param verbose whether information should be printed on each LTM cycle describing the current rate and number of
      +     *        rows released
      +     * @param timeProvider the time provider, which is used to determine the start and end of each cycle
            */
           @ScriptApi
      -    public AutoTuningIncrementalReleaseFilter(Logger logger, long initialSize, long initialRelease,
      -        double targetFactor, boolean verbose, TimeProvider timeProvider) {
      +    public AutoTuningIncrementalReleaseFilter(Logger logger, long initialSize, long initialRelease, double targetFactor,
      +            boolean verbose, TimeProvider timeProvider) {
               super(initialSize);
               this.logger = logger;
               this.targetFactor = targetFactor;
      @@ -261,13 +244,11 @@ public void refresh() {
                       final double totalRowsPerNano = totalRows / totalNanos;
                       final double totalRowsPerSecond = totalRowsPerNano * 1_000_000_000L;
                       final double eta = (remaining / totalRowsPerSecond);
      -                logger.info().append("Releasing: ").append(nextSize)
      -                    .append(" rows, last rows/second: ")
      -                    .append(decimalFormat.format(rowsPerNanoSecond * 1_000_000_000L))
      -                    .append(", duration=").append(cycleDuration / 1000000L)
      -                    .append(" ms, total rows/second=")
      -                    .append(decimalFormat.format(totalRowsPerSecond)).append(", ETA ")
      -                    .append(decimalFormat.format(eta)).append(" sec").endl();
      +                logger.info().append("Releasing: ").append(nextSize).append(" rows, last rows/second: ")
      +                        .append(decimalFormat.format(rowsPerNanoSecond * 1_000_000_000L)).append(", duration=")
      +                        .append(cycleDuration / 1000000L).append(" ms, total rows/second=")
      +                        .append(decimalFormat.format(totalRowsPerSecond)).append(", ETA ")
      +                        .append(decimalFormat.format(eta)).append(" sec").endl();
                   }
               }
               LiveTableMonitor.DEFAULT.addNotification(new TerminalNotification() {
      @@ -282,10 +263,9 @@ public void run() {
                           final double durationSeconds = (double) durationNanos / (double) 1_000_000_000L;
                           final long rows = getReleasedSize();
                           final double rowsPerSecond = (double) rows / durationSeconds;
      -                    logger.warn().append("Completed release ")
      -                        .append(decimalFormat.format(durationSeconds)).append(" seconds, rows=")
      -                        .append(rows).append(", rows/second=")
      -                        .append(decimalFormat.format(rowsPerSecond)).endl();
      +                    logger.warn().append("Completed release ").append(decimalFormat.format(durationSeconds))
      +                            .append(" seconds, rows=").append(rows).append(", rows/second=")
      +                            .append(decimalFormat.format(rowsPerSecond)).endl();
                       }
                   }
               });
      @@ -300,7 +280,7 @@ void onReleaseAll() {
       
           @Override
           public AutoTuningIncrementalReleaseFilter copy() {
      -        return new AutoTuningIncrementalReleaseFilter(getInitialSize(), initialRelease,
      -            targetFactor, verbose, timeProvider);
      +        return new AutoTuningIncrementalReleaseFilter(getInitialSize(), initialRelease, targetFactor, verbose,
      +                timeProvider);
           }
       }
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/BaseIncrementalReleaseFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/BaseIncrementalReleaseFilter.java
      index 6110641fde7..658825bac2e 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/BaseIncrementalReleaseFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/BaseIncrementalReleaseFilter.java
      @@ -17,11 +17,10 @@
       /**
        * Base class for filters that will release more rows of a table on each LTM cycle.
        *
      - * The use case is for benchmarks that want to replay a table in order to better understand
      - * incremental processing capacity.
      + * The use case is for benchmarks that want to replay a table in order to better understand incremental processing
      + * capacity.
        */
      -public abstract class BaseIncrementalReleaseFilter extends SelectFilterLivenessArtifactImpl
      -    implements LiveTable {
      +public abstract class BaseIncrementalReleaseFilter extends SelectFilterLivenessArtifactImpl implements LiveTable {
           private final long initialSize;
           private long releasedSize;
           private long expectedSize;
      @@ -93,10 +92,7 @@ public long getExpectedSize() {
       
           @Override
           public boolean isSimpleFilter() {
      -        /*
      -         * This doesn't execute any user code, so it should be safe to execute it against untrusted
      -         * data.
      -         */
      +        /* This doesn't execute any user code, so it should be safe to execute it against untrusted data. */
               return true;
           }
       
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/ChunkFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/ChunkFilter.java
      index ca228d1814b..462da12046b 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/ChunkFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/ChunkFilter.java
      @@ -13,92 +13,91 @@ public interface ChunkFilter {
           /**
            * Filter a chunk of values, setting parallel values in results to "true" or "false".
            *
      -     * The results chunk must have capacity at least as large as values.size(); and the result size
      -     * will be set to values.size() on return.
      +     * The results chunk must have capacity at least as large as values.size(); and the result size will be set to
      +     * values.size() on return.
            * 
            * @param values the values to filter
      -     * @param results a boolean chunk with true values for items that match the filter, and false
      -     *        otherwise
      +     * @param results a boolean chunk with true values for items that match the filter, and false otherwise
            */
           void filter(Chunk values, LongChunk keys,
      -        WritableLongChunk results);
      +            WritableLongChunk results);
       
           interface CharChunkFilter extends ChunkFilter {
               void filter(CharChunk values, LongChunk keys,
      -            WritableLongChunk results);
      +                WritableLongChunk results);
       
               default void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      +                WritableLongChunk results) {
                   filter(values.asCharChunk(), keys, results);
               }
           }
       
           interface ByteChunkFilter extends ChunkFilter {
               void filter(ByteChunk values, LongChunk keys,
      -            WritableLongChunk results);
      +                WritableLongChunk results);
       
               default void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      +                WritableLongChunk results) {
                   filter(values.asByteChunk(), keys, results);
               }
           }
       
           interface ShortChunkFilter extends ChunkFilter {
               void filter(ShortChunk values, LongChunk keys,
      -            WritableLongChunk results);
      +                WritableLongChunk results);
       
               default void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      +                WritableLongChunk results) {
                   filter(values.asShortChunk(), keys, results);
               }
           }
       
           interface IntChunkFilter extends ChunkFilter {
               void filter(IntChunk values, LongChunk keys,
      -            WritableLongChunk results);
      +                WritableLongChunk results);
       
               default void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      +                WritableLongChunk results) {
                   filter(values.asIntChunk(), keys, results);
               }
           }
       
           interface LongChunkFilter extends ChunkFilter {
               void filter(LongChunk values, LongChunk keys,
      -            WritableLongChunk results);
      +                WritableLongChunk results);
       
               default void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      +                WritableLongChunk results) {
                   filter(values.asLongChunk(), keys, results);
               }
           }
       
           interface FloatChunkFilter extends ChunkFilter {
               void filter(FloatChunk values, LongChunk keys,
      -            WritableLongChunk results);
      +                WritableLongChunk results);
       
               default void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      +                WritableLongChunk results) {
                   filter(values.asFloatChunk(), keys, results);
               }
           }
       
           interface DoubleChunkFilter extends ChunkFilter {
               void filter(DoubleChunk values, LongChunk keys,
      -            WritableLongChunk results);
      +                WritableLongChunk results);
       
               default void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      +                WritableLongChunk results) {
                   filter(values.asDoubleChunk(), keys, results);
               }
           }
       
           interface ObjectChunkFilter extends ChunkFilter {
               void filter(ObjectChunk values, LongChunk keys,
      -            WritableLongChunk results);
      +                WritableLongChunk results);
       
               default void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      +                WritableLongChunk results) {
                   filter(values.asObjectChunk(), keys, results);
               }
           }
      @@ -121,20 +120,18 @@ default void filter(Chunk values, LongChunk
            */
           int FILTER_CHUNK_SIZE = 2048;
           /**
      -     * How many values we wait for before checking for interruption and throwing a cancellation
      -     * exception
      +     * How many values we wait for before checking for interruption and throwing a cancellation exception
            */
      -    long INITIAL_INTERRUPTION_SIZE = Configuration.getInstance()
      -        .getLongWithDefault("ChunkFilter.initialInterruptionSize", 1 << 20);
      +    long INITIAL_INTERRUPTION_SIZE =
      +            Configuration.getInstance().getLongWithDefault("ChunkFilter.initialInterruptionSize", 1 << 20);
           /**
            * How long we would like to take, in milliseconds between interruption checks
            */
           long INTERRUPTION_GOAL_MILLIS =
      -        Configuration.getInstance().getLongWithDefault("ChunkFilter.interruptionGoalMillis", 100);
      +            Configuration.getInstance().getLongWithDefault("ChunkFilter.interruptionGoalMillis", 100);
       
           /**
      -     * Apply a chunk filter to an Index and column source, producing a new Index that is responsive
      -     * to the filter.
      +     * Apply a chunk filter to an Index and column source, producing a new Index that is responsive to the filter.
            *
            * @param selection the Index to filter
            * @param columnSource the column source to filter
      @@ -144,7 +141,7 @@ default void filter(Chunk values, LongChunk
            * @return a new Index representing the filtered values
            */
           static Index applyChunkFilter(Index selection, ColumnSource columnSource, boolean usePrev,
      -        ChunkFilter chunkFilter) {
      +            ChunkFilter chunkFilter) {
               final Index.SequentialBuilder builder = Index.FACTORY.getSequentialBuilder();
       
               final int contextSize = (int) Math.min(FILTER_CHUNK_SIZE, selection.size());
      @@ -153,9 +150,8 @@ static Index applyChunkFilter(Index selection, ColumnSource columnSource, boo
               long lastInterruptCheck = System.currentTimeMillis();
       
               try (final ColumnSource.GetContext getContext = columnSource.makeGetContext(contextSize);
      -            final WritableLongChunk longChunk =
      -                WritableLongChunk.makeWritableChunk(contextSize);
      -            final OrderedKeys.Iterator okIt = selection.getOrderedKeysIterator()) {
      +                final WritableLongChunk longChunk = WritableLongChunk.makeWritableChunk(contextSize);
      +                final OrderedKeys.Iterator okIt = selection.getOrderedKeysIterator()) {
                   while (okIt.hasMore()) {
                       if (filteredChunks++ == chunksBetweenChecks) {
                           if (Thread.interrupted()) {
      @@ -166,8 +162,7 @@ static Index applyChunkFilter(Index selection, ColumnSource columnSource, boo
                           final long checkDuration = now - lastInterruptCheck;
       
                           // tune so that we check at the desired interval, never less than one chunk
      -                    chunksBetweenChecks =
      -                        Math.max(1, Math.min(1, checkDuration <= 0 ? chunksBetweenChecks * 2
      +                    chunksBetweenChecks = Math.max(1, Math.min(1, checkDuration <= 0 ? chunksBetweenChecks * 2
                                   : chunksBetweenChecks * INTERRUPTION_GOAL_MILLIS / checkDuration));
                           lastInterruptCheck = now;
                           filteredChunks = 0;
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/ClockFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/ClockFilter.java
      index ffc8b252c0b..1356c89fb84 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/ClockFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/ClockFilter.java
      @@ -25,8 +25,7 @@
       /**
        * Boilerplate super-class for various clock-oriented filters.
        */
      -public abstract class ClockFilter extends SelectFilterLivenessArtifactImpl
      -    implements ReindexingFilter, LiveTable {
      +public abstract class ClockFilter extends SelectFilterLivenessArtifactImpl implements ReindexingFilter, LiveTable {
       
           protected final String columnName;
           protected final Clock clock;
      @@ -36,8 +35,7 @@ public abstract class ClockFilter extends SelectFilterLivenessArtifactImpl
           private QueryTable resultTable;
       
           @SuppressWarnings("WeakerAccess")
      -    public ClockFilter(@NotNull final String columnName, @NotNull final Clock clock,
      -        final boolean live) {
      +    public ClockFilter(@NotNull final String columnName, @NotNull final Clock clock, final boolean live) {
               this.columnName = columnName;
               this.clock = clock;
               this.live = live;
      @@ -57,39 +55,38 @@ public final List getColumnArrays() {
           }
       
           @Override
      -    public final Index filter(@NotNull final Index selection, @NotNull final Index fullSet,
      -        @NotNull final Table table, boolean usePrev) {
      +    public final Index filter(@NotNull final Index selection, @NotNull final Index fullSet, @NotNull final Table table,
      +            boolean usePrev) {
               if (usePrev) {
                   throw new PreviousFilteringNotSupported();
               }
       
               // We have no support for refreshing tables, nor any known use cases for that support.
               Require.requirement(DynamicNode.notDynamicOrNotRefreshing(table),
      -            "DynamicNode.notDynamicOrNotRefreshing(table)");
      +                "DynamicNode.notDynamicOrNotRefreshing(table)");
       
               // noinspection unchecked
               final ColumnSource dateTimeColumnSource = table.getColumnSource(columnName);
               // Obviously, column needs to be of date-time values.
               Require.requirement(DBDateTime.class.isAssignableFrom(dateTimeColumnSource.getType()),
      -            "DBDateTime.class.isAssignableFrom(dateTimeColumnSource.getType())");
      +                "DBDateTime.class.isAssignableFrom(dateTimeColumnSource.getType())");
       
               // noinspection unchecked
               nanosColumnSource = dateTimeColumnSource.allowsReinterpret(long.class)
      -            ? table.dateTimeColumnAsNanos(columnName).getColumnSource(columnName)
      -            : table.view(columnName + " = isNull(" + columnName + ") ? NULL_LONG : " + columnName
      -                + ".getNanos()").getColumnSource(columnName);
      +                ? table.dateTimeColumnAsNanos(columnName).getColumnSource(columnName)
      +                : table.view(columnName + " = isNull(" + columnName + ") ? NULL_LONG : " + columnName + ".getNanos()")
      +                        .getColumnSource(columnName);
       
               final Index initial = initializeAndGetInitialIndex(selection, fullSet, table);
               return initial == null ? Index.FACTORY.getEmptyIndex() : initial;
           }
       
           protected abstract @Nullable Index initializeAndGetInitialIndex(@NotNull final Index selection,
      -        @NotNull final Index fullSet, @NotNull final Table table);
      +            @NotNull final Index fullSet, @NotNull final Table table);
       
           @Override
           public final boolean isSimpleFilter() {
      -        // This doesn't execute any user code, so it should be safe to execute it before ACL filters
      -        // are applied.
      +        // This doesn't execute any user code, so it should be safe to execute it before ACL filters are applied.
               return true;
           }
       
      @@ -119,8 +116,7 @@ public final void refresh() {
               final Index added = updateAndGetAddedIndex();
               if (added != null && !added.empty()) {
                   resultTable.getIndex().insert(added);
      -            resultTable.notifyListeners(added, Index.FACTORY.getEmptyIndex(),
      -                Index.FACTORY.getEmptyIndex());
      +            resultTable.notifyListeners(added, Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex());
               }
           }
       
      @@ -149,12 +145,12 @@ protected boolean isEmpty() {
       
               @Nullable
               Index.RandomBuilder consumeKeysAndAppendAdded(final ColumnSource nanosColumnSource,
      -            final long nowNanos,
      -            @Nullable Index.RandomBuilder addedBuilder) {
      +                final long nowNanos,
      +                @Nullable Index.RandomBuilder addedBuilder) {
                   final long firstKeyAdded = nextKey;
                   long lastKeyAdded = -1L;
      -            while (nextKey <= lastKey && DBLanguageFunctionUtil
      -                .lessEquals(nanosColumnSource.getLong(nextKey), nowNanos)) {
      +            while (nextKey <= lastKey
      +                    && DBLanguageFunctionUtil.lessEquals(nanosColumnSource.getLong(nextKey), nowNanos)) {
                       lastKeyAdded = nextKey++;
                   }
                   if (lastKeyAdded == -1L) {
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/ComparableRangeFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/ComparableRangeFilter.java
      index 366b64eb17b..e1bbec46c09 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/ComparableRangeFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/ComparableRangeFilter.java
      @@ -15,8 +15,8 @@ public class ComparableRangeFilter extends AbstractRangeFilter {
           private final Comparable upper;
           private final Comparable lower;
       
      -    ComparableRangeFilter(String columnName, Comparable val1, Comparable val2,
      -        boolean lowerInclusive, boolean upperInclusive) {
      +    ComparableRangeFilter(String columnName, Comparable val1, Comparable val2, boolean lowerInclusive,
      +            boolean upperInclusive) {
               super(columnName, lowerInclusive, upperInclusive);
       
               if (DhObjectComparisons.compare(val1, val2) > 0) {
      @@ -29,8 +29,8 @@ public class ComparableRangeFilter extends AbstractRangeFilter {
           }
       
           @TestUseOnly
      -    public static ComparableRangeFilter makeForTest(String columnName, Comparable lower,
      -        Comparable upper, boolean lowerInclusive, boolean upperInclusive) {
      +    public static ComparableRangeFilter makeForTest(String columnName, Comparable lower, Comparable upper,
      +            boolean lowerInclusive, boolean upperInclusive) {
               return new ComparableRangeFilter(columnName, lower, upper, lowerInclusive, upperInclusive);
           }
       
      @@ -42,20 +42,18 @@ public void init(TableDefinition tableDefinition) {
       
               final ColumnDefinition def = tableDefinition.getColumn(columnName);
               if (def == null) {
      -            throw new RuntimeException(
      -                "Column \"" + columnName + "\" doesn't exist in this table, available columns: "
      +            throw new RuntimeException("Column \"" + columnName + "\" doesn't exist in this table, available columns: "
                           + tableDefinition.getColumnNames());
               }
       
               Assert.assertion(Comparable.class.isAssignableFrom(def.getDataType()),
      -            "Comparable.class.isAssignableFrom(def.getDataType())", def.getDataType(),
      -            "def.getDataType()");
      +                "Comparable.class.isAssignableFrom(def.getDataType())", def.getDataType(), "def.getDataType()");
       
               chunkFilter = makeComparableChunkFilter(lower, upper, lowerInclusive, upperInclusive);
           }
       
      -    public static ChunkFilter makeComparableChunkFilter(Comparable lower, Comparable upper,
      -        boolean lowerInclusive, boolean upperInclusive) {
      +    public static ChunkFilter makeComparableChunkFilter(Comparable lower, Comparable upper, boolean lowerInclusive,
      +            boolean upperInclusive) {
               if (lowerInclusive) {
                   if (upperInclusive) {
                       return new InclusiveInclusiveComparableChunkFilter(lower, upper);
      @@ -79,8 +77,8 @@ public SelectFilter copy() {
           @Override
           public String toString() {
               return "ComparableRangeFilter(" + columnName + " in " +
      -            (lowerInclusive ? "[" : "(") + lower + "," + upper +
      -            (upperInclusive ? "]" : ")") + ")";
      +                (lowerInclusive ? "[" : "(") + lower + "," + upper +
      +                (upperInclusive ? "]" : ")") + ")";
           }
       
           private static class InclusiveInclusiveComparableChunkFilter implements ChunkFilter {
      @@ -94,9 +92,8 @@ private InclusiveInclusiveComparableChunkFilter(Comparable lower, Comparable upp
       
               @Override
               public void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      -            final ObjectChunk objectChunk =
      -                values.asObjectChunk();
      +                WritableLongChunk results) {
      +            final ObjectChunk objectChunk = values.asObjectChunk();
       
                   results.setSize(0);
                   for (int ii = 0; ii < values.size(); ++ii) {
      @@ -128,9 +125,8 @@ private InclusiveExclusiveComparableChunkFilter(Comparable lower, Comparable upp
       
               @Override
               public void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      -            final ObjectChunk objectChunk =
      -                values.asObjectChunk();
      +                WritableLongChunk results) {
      +            final ObjectChunk objectChunk = values.asObjectChunk();
       
                   results.setSize(0);
                   for (int ii = 0; ii < values.size(); ++ii) {
      @@ -162,9 +158,8 @@ private ExclusiveInclusiveComparableChunkFilter(Comparable lower, Comparable upp
       
               @Override
               public void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      -            final ObjectChunk objectChunk =
      -                values.asObjectChunk();
      +                WritableLongChunk results) {
      +            final ObjectChunk objectChunk = values.asObjectChunk();
       
                   results.setSize(0);
                   for (int ii = 0; ii < values.size(); ++ii) {
      @@ -198,9 +193,8 @@ private ExclusiveExclusiveComparableChunkFilter(Comparable lower, Comparable upp
       
               @Override
               public void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      -            final ObjectChunk objectChunk =
      -                values.asObjectChunk();
      +                WritableLongChunk results) {
      +            final ObjectChunk objectChunk = values.asObjectChunk();
       
                   results.setSize(0);
                   for (int ii = 0; ii < values.size(); ++ii) {
      @@ -223,15 +217,13 @@ boolean meetsUpperBound(Comparable value) {
           }
       
           @Override
      -    Index binarySearch(Index selection, ColumnSource columnSource, boolean usePrev,
      -        boolean reverse) {
      +    Index binarySearch(Index selection, ColumnSource columnSource, boolean usePrev, boolean reverse) {
               if (selection.isEmpty()) {
                   return selection;
               }
       
               // noinspection unchecked
      -        final ColumnSource comparableColumnSource =
      -            (ColumnSource) columnSource;
      +        final ColumnSource comparableColumnSource = (ColumnSource) columnSource;
       
               final Comparable startValue = reverse ? upper : lower;
               final Comparable endValue = reverse ? lower : upper;
      @@ -239,26 +231,25 @@ Index binarySearch(Index selection, ColumnSource columnSource, boolean usePrev,
               final boolean endInclusive = reverse ? lowerInclusive : upperInclusive;
               final int compareSign = reverse ? -1 : 1;
       
      -        long lowerBoundMin = bound(selection, usePrev, comparableColumnSource, 0, selection.size(),
      -            startValue, startInclusive, compareSign, false);
      -        long upperBoundMin = bound(selection, usePrev, comparableColumnSource, lowerBoundMin,
      -            selection.size(), endValue, endInclusive, compareSign, true);
      +        long lowerBoundMin = bound(selection, usePrev, comparableColumnSource, 0, selection.size(), startValue,
      +                startInclusive, compareSign, false);
      +        long upperBoundMin = bound(selection, usePrev, comparableColumnSource, lowerBoundMin, selection.size(),
      +                endValue, endInclusive, compareSign, true);
       
               return selection.subindexByPos(lowerBoundMin, upperBoundMin);
           }
       
       
      -    static long bound(Index selection, boolean usePrev,
      -        ColumnSource comparableColumnSource, long minPosition, long maxPosition,
      -        Comparable targetValue, boolean inclusive, int compareSign, boolean end) {
      +    static long bound(Index selection, boolean usePrev, ColumnSource comparableColumnSource,
      +            long minPosition, long maxPosition, Comparable targetValue, boolean inclusive, int compareSign,
      +            boolean end) {
               while (minPosition < maxPosition) {
                   final long midPos = (minPosition + maxPosition) / 2;
                   final long midIdx = selection.get(midPos);
       
      -            final Comparable compareValue = usePrev ? comparableColumnSource.getPrev(midIdx)
      -                : comparableColumnSource.get(midIdx);
      -            final int compareResult =
      -                compareSign * DhObjectComparisons.compare(compareValue, targetValue);
      +            final Comparable compareValue =
      +                    usePrev ? comparableColumnSource.getPrev(midIdx) : comparableColumnSource.get(midIdx);
      +            final int compareResult = compareSign * DhObjectComparisons.compare(compareValue, targetValue);
       
                   if (compareResult < 0) {
                       minPosition = midPos + 1;
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/ComposedFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/ComposedFilter.java
      index 78c33661b85..42683da4c8d 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/ComposedFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/ComposedFilter.java
      @@ -13,15 +13,14 @@
       import java.util.*;
       import java.util.stream.Stream;
       
      -public abstract class ComposedFilter extends SelectFilterLivenessArtifactImpl
      -    implements DependencyStreamProvider {
      +public abstract class ComposedFilter extends SelectFilterLivenessArtifactImpl implements DependencyStreamProvider {
           final SelectFilter[] componentFilters;
       
           ComposedFilter(SelectFilter[] componentFilters) {
               for (final SelectFilter componentFilter : componentFilters) {
                   if (componentFilter instanceof ReindexingFilter) {
                       throw new UnsupportedOperationException(
      -                    "ComposedFilters do not support ReindexingFilters: " + componentFilter);
      +                        "ComposedFilters do not support ReindexingFilters: " + componentFilter);
                   }
               }
               this.componentFilters = componentFilters;
      @@ -32,8 +31,7 @@ public abstract class ComposedFilter extends SelectFilterLivenessArtifactImpl
                   }
               }
       
      -        setAutomatedFilter(
      -            Arrays.stream(componentFilters).allMatch(SelectFilter::isAutomatedFilter));
      +        setAutomatedFilter(Arrays.stream(componentFilters).allMatch(SelectFilter::isAutomatedFilter));
           }
       
           @Override
      @@ -100,10 +98,10 @@ public boolean isRefreshing() {
           @Override
           public Stream getDependencyStream() {
               return Stream.concat(
      -            Arrays.stream(componentFilters).filter(f -> f instanceof NotificationQueue.Dependency)
      -                .map(f -> (NotificationQueue.Dependency) f),
      -            Arrays.stream(componentFilters).filter(f -> f instanceof DependencyStreamProvider)
      -                .flatMap(f -> ((DependencyStreamProvider) f).getDependencyStream()));
      +                Arrays.stream(componentFilters).filter(f -> f instanceof NotificationQueue.Dependency)
      +                        .map(f -> (NotificationQueue.Dependency) f),
      +                Arrays.stream(componentFilters).filter(f -> f instanceof DependencyStreamProvider)
      +                        .flatMap(f -> ((DependencyStreamProvider) f).getDependencyStream()));
           }
       
           @Override
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/ConditionFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/ConditionFilter.java
      index 53aae57dd92..c21da03e5a9 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/ConditionFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/ConditionFilter.java
      @@ -56,14 +56,12 @@ private ConditionFilter(@NotNull String formula, Map renames) {
               super(formula, renames, false);
           }
       
      -    public static SelectFilter createConditionFilter(@NotNull String formula,
      -        FormulaParserConfiguration parser) {
      +    public static SelectFilter createConditionFilter(@NotNull String formula, FormulaParserConfiguration parser) {
               switch (parser) {
                   case Deephaven:
                       return new ConditionFilter(formula);
                   case Numba:
      -                throw new UnsupportedOperationException(
      -                    "Python condition filter should be created from python");
      +                throw new UnsupportedOperationException("Python condition filter should be created from python");
                   default:
                       throw new UnsupportedOperationException("Unknow parser type " + parser);
               }
      @@ -95,7 +93,7 @@ public void close() {
               CONTEXT getContext(int maxChunkSize);
       
               LongChunk filter(CONTEXT context, LongChunk indices,
      -            Chunk... inputChunks);
      +                Chunk... inputChunks);
           }
       
       
      @@ -141,10 +139,9 @@ public Context getContext(int chunkSize) {
       
               @Override
               public Chunk getChunk(@NotNull io.deephaven.db.v2.sources.chunk.Context context,
      -            @NotNull OrderedKeys orderedKeys) {
      +                @NotNull OrderedKeys orderedKeys) {
                   final WritableLongChunk wlc = ((Context) context).chunk;
      -            final OrderedKeys valuesForChunk =
      -                invertedIterator.getNextOrderedKeysWithLength(orderedKeys.size());
      +            final OrderedKeys valuesForChunk = invertedIterator.getNextOrderedKeysWithLength(orderedKeys.size());
                   valuesForChunk.fillKeyIndicesChunk(wlc);
                   return wlc;
               }
      @@ -178,7 +175,7 @@ public Context getContext(int chunkSize) {
       
               @Override
               public Chunk getChunk(@NotNull io.deephaven.db.v2.sources.chunk.Context context,
      -            @NotNull OrderedKeys orderedKeys) {
      +                @NotNull OrderedKeys orderedKeys) {
                   final LongChunk lc = super.getChunk(context, orderedKeys).asLongChunk();
                   final WritableIntChunk wic = ((IntegerContext) context).intChunk;
                   wic.setSize(lc.size());
      @@ -224,7 +221,7 @@ public static final class ColumnIICount extends IndexCount {
       
               @Override
               public Chunk getChunk(@NotNull io.deephaven.db.v2.sources.chunk.Context context,
      -            @NotNull OrderedKeys orderedKeys) {
      +                @NotNull OrderedKeys orderedKeys) {
                   final Context ctx = (Context) context;
                   final WritableLongChunk wlc = ctx.chunk.asWritableLongChunk();
                   for (int i = 0; i < orderedKeys.size(); i++) {
      @@ -242,7 +239,7 @@ public static final class ColumnICount extends IndexCount {
       
               @Override
               public Chunk getChunk(@NotNull io.deephaven.db.v2.sources.chunk.Context context,
      -            @NotNull OrderedKeys orderedKeys) {
      +                @NotNull OrderedKeys orderedKeys) {
                   final Context ctx = (Context) context;
                   final WritableIntChunk wic = ctx.chunk.asWritableIntChunk();
                   for (int ii = 0; ii < orderedKeys.size(); ii++) {
      @@ -279,23 +276,22 @@ public ChunkFilter(FilterKernel filterKernel, String[] columnNames, int chunkSiz
               }
       
               private SharedContext populateChunkGettersAndContexts(
      -            final Index selection, final Index fullSet, final Table table, final boolean usePrev,
      -            final ChunkGetter[] chunkGetters, final Context[] sourceContexts) {
      -            final SharedContext sharedContext =
      -                (columnNames.length > 1) ? SharedContext.makeSharedContext() : null;
      +                final Index selection, final Index fullSet, final Table table, final boolean usePrev,
      +                final ChunkGetter[] chunkGetters, final Context[] sourceContexts) {
      +            final SharedContext sharedContext = (columnNames.length > 1) ? SharedContext.makeSharedContext() : null;
                   for (int i = 0; i < columnNames.length; i++) {
                       final String columnName = columnNames[i];
                       final ChunkGetterWithContext chunkGetterWithContext;
                       switch (columnName) {
                           case "i":
      -                        chunkGetterWithContext = (selection == fullSet ? new ColumnICount()
      -                            : new ColumnILookup(fullSet, selection));
      +                        chunkGetterWithContext =
      +                                (selection == fullSet ? new ColumnICount() : new ColumnILookup(fullSet, selection));
                               chunkGetters[i] = chunkGetterWithContext;
                               sourceContexts[i] = chunkGetterWithContext.getContext(chunkSize);
                               break;
                           case "ii":
      -                        chunkGetterWithContext = (selection == fullSet ? new ColumnIICount()
      -                            : new IndexLookup(fullSet, selection));
      +                        chunkGetterWithContext =
      +                                (selection == fullSet ? new ColumnIICount() : new IndexLookup(fullSet, selection));
                               chunkGetters[i] = chunkGetterWithContext;
                               sourceContexts[i] = chunkGetterWithContext.getContext(chunkSize);
                               break;
      @@ -307,10 +303,10 @@ private SharedContext populateChunkGettersAndContexts(
                           default: {
                               final ColumnSource columnSource = table.getColumnSource(columnName);
                               chunkGetters[i] = usePrev
      -                            ? (context, orderedKeys) -> columnSource
      -                                .getPrevChunk((ColumnSource.GetContext) context, orderedKeys)
      -                            : (context, orderedKeys) -> columnSource
      -                                .getChunk((ColumnSource.GetContext) context, orderedKeys);
      +                                ? (context, orderedKeys) -> columnSource.getPrevChunk((ColumnSource.GetContext) context,
      +                                        orderedKeys)
      +                                : (context, orderedKeys) -> columnSource.getChunk((ColumnSource.GetContext) context,
      +                                        orderedKeys);
                               sourceContexts[i] = columnSource.makeGetContext(chunkSize, sharedContext);
                           }
                       }
      @@ -319,36 +315,32 @@ private SharedContext populateChunkGettersAndContexts(
               }
       
               @Override
      -        public Index filter(final Index selection, final Index fullSet, final Table table,
      -            final boolean usePrev, String formula, final Param... params) {
      +        public Index filter(final Index selection, final Index fullSet, final Table table, final boolean usePrev,
      +                String formula, final Param... params) {
                   try (final FilterKernel.Context context = filterKernel.getContext(chunkSize);
      -                final OrderedKeys.Iterator okIterator = selection.getOrderedKeysIterator()) {
      +                    final OrderedKeys.Iterator okIterator = selection.getOrderedKeysIterator()) {
                       final ChunkGetter[] chunkGetters = new ChunkGetter[columnNames.length];
                       final Context sourceContexts[] = new Context[columnNames.length];
      -                final SharedContext sharedContext = populateChunkGettersAndContexts(selection,
      -                    fullSet, table, usePrev, chunkGetters, sourceContexts);
      +                final SharedContext sharedContext = populateChunkGettersAndContexts(selection, fullSet, table, usePrev,
      +                        chunkGetters, sourceContexts);
                       final SequentialBuilder resultBuilder = Index.FACTORY.getSequentialBuilder();
                       final Chunk inputChunks[] = new Chunk[columnNames.length];
                       while (okIterator.hasMore()) {
      -                    final OrderedKeys currentChunkOrderedKeys =
      -                        okIterator.getNextOrderedKeysWithLength(chunkSize);
      +                    final OrderedKeys currentChunkOrderedKeys = okIterator.getNextOrderedKeysWithLength(chunkSize);
                           for (int i = 0; i < chunkGetters.length; i++) {
                               final ChunkGetter chunkFiller = chunkGetters[i];
      -                        inputChunks[i] =
      -                            chunkFiller.getChunk(sourceContexts[i], currentChunkOrderedKeys);
      +                        inputChunks[i] = chunkFiller.getChunk(sourceContexts[i], currentChunkOrderedKeys);
                           }
                           if (sharedContext != null) {
                               sharedContext.reset();
                           }
                           try {
      -                        final LongChunk matchedIndices = filterKernel.filter(
      -                            context, currentChunkOrderedKeys.asKeyIndicesChunk(), inputChunks);
      +                        final LongChunk matchedIndices =
      +                                filterKernel.filter(context, currentChunkOrderedKeys.asKeyIndicesChunk(), inputChunks);
                               resultBuilder.appendOrderedKeyIndicesChunk(matchedIndices);
                           } catch (Exception e) {
      -                        throw new FormulaEvaluationException(
      -                            e.getClass().getName() + " encountered in filter={ "
      -                                + StringEscapeUtils.escapeJava(truncateLongFormula(formula)) + " }",
      -                            e);
      +                        throw new FormulaEvaluationException(e.getClass().getName() + " encountered in filter={ "
      +                                + StringEscapeUtils.escapeJava(truncateLongFormula(formula)) + " }", e);
                           }
                       }
                       SafeCloseable.closeArray(sourceContexts);
      @@ -367,14 +359,12 @@ private static String toTitleCase(String input) {
           }
       
           @Override
      -    protected void generateFilterCode(TableDefinition tableDefinition,
      -        DBTimeUtils.Result timeConversionResult, DBLanguageParser.Result result)
      -        throws MalformedURLException, ClassNotFoundException {
      +    protected void generateFilterCode(TableDefinition tableDefinition, DBTimeUtils.Result timeConversionResult,
      +            DBLanguageParser.Result result) throws MalformedURLException, ClassNotFoundException {
               final StringBuilder classBody = getClassBody(tableDefinition, timeConversionResult, result);
               if (classBody == null)
                   return;
      -        final QueryPerformanceNugget nugget =
      -            QueryPerformanceRecorder.getInstance().getNugget("Compile:" + formula);
      +        final QueryPerformanceNugget nugget = QueryPerformanceRecorder.getInstance().getNugget("Compile:" + formula);
               try {
                   final List> paramClasses = new ArrayList<>();
                   final Consumer> addParamClass = (cls) -> {
      @@ -396,17 +386,16 @@ protected void generateFilterCode(TableDefinition tableDefinition,
                       addParamClass.accept(param.getDeclaredType());
                   }
       
      -            filterKernelClass = CompilerTools.compile("GeneratedFilterKernel",
      -                this.classBody = classBody.toString(), CompilerTools.FORMULA_PREFIX,
      -                Param.expandParameterClasses(paramClasses));
      +            filterKernelClass = CompilerTools.compile("GeneratedFilterKernel", this.classBody = classBody.toString(),
      +                    CompilerTools.FORMULA_PREFIX, Param.expandParameterClasses(paramClasses));
               } finally {
                   nugget.done();
               }
           }
       
           @Nullable
      -    private StringBuilder getClassBody(TableDefinition tableDefinition,
      -        DBTimeUtils.Result timeConversionResult, DBLanguageParser.Result result) {
      +    private StringBuilder getClassBody(TableDefinition tableDefinition, DBTimeUtils.Result timeConversionResult,
      +            DBLanguageParser.Result result) {
               if (filterKernelClass != null) {
                   return null;
               }
      @@ -428,28 +417,25 @@ private StringBuilder getClassBody(TableDefinition tableDefinition,
               }
               final StringBuilder classBody = new StringBuilder();
               classBody.append(QueryLibrary.getImportStatement().build()).append(
      -            "\n\n" +
      -                "public class $CLASSNAME$ implements ")
      -            .append(FilterKernel.class.getCanonicalName()).append("{\n");
      -        classBody.append("\n").append(timeConversionResult.getInstanceVariablesString())
      -            .append("\n");
      +                "\n\n" +
      +                        "public class $CLASSNAME$ implements ")
      +                .append(FilterKernel.class.getCanonicalName()).append("{\n");
      +        classBody.append("\n").append(timeConversionResult.getInstanceVariablesString()).append("\n");
               final Indenter indenter = new Indenter();
               for (Param param : params) {
                   /*
      -             * adding context param fields like: "            final int p1;\n" +
      -             * "            final float p2;\n" + "            final String p3;\n" +
      +             * adding context param fields like: "            final int p1;\n" + "            final float p2;\n" +
      +             * "            final String p3;\n" +
                    */
      -            classBody.append(indenter).append("private final ")
      -                .append(param.getPrimitiveTypeNameIfAvailable()).append(" ").append(param.getName())
      -                .append(";\n");
      +            classBody.append(indenter).append("private final ").append(param.getPrimitiveTypeNameIfAvailable())
      +                    .append(" ").append(param.getName()).append(";\n");
               }
               if (!usedColumnArrays.isEmpty()) {
                   classBody.append(indenter).append("// Array Column Variables\n");
                   for (String columnName : usedColumnArrays) {
                       final ColumnDefinition column = tableDefinition.getColumn(columnName);
                       if (column == null) {
      -                    throw new RuntimeException(
      -                        "Column \"" + columnName + "\" doesn't exist in this table");
      +                    throw new RuntimeException("Column \"" + columnName + "\" doesn't exist in this table");
                       }
                       final Class dataType = column.getDataType();
                       final Class columnType = DhFormulaColumn.getDbArrayType(dataType);
      @@ -457,28 +443,26 @@ private StringBuilder getClassBody(TableDefinition tableDefinition,
                       /*
                        * Adding array column fields.
                        */
      -                classBody.append(indenter).append("private final ")
      -                    .append(columnType.getCanonicalName())
      -                    .append(TypeUtils.isConvertibleToPrimitive(dataType) ? ""
      -                        : "<" + dataType.getCanonicalName() + ">")
      -                    .append(" ").append(column.getName()).append(COLUMN_SUFFIX).append(";\n");
      +                classBody.append(indenter).append("private final ").append(columnType.getCanonicalName())
      +                        .append(TypeUtils.isConvertibleToPrimitive(dataType) ? ""
      +                                : "<" + dataType.getCanonicalName() + ">")
      +                        .append(" ").append(column.getName()).append(COLUMN_SUFFIX).append(";\n");
                   }
                   classBody.append("\n");
               }
       
               classBody.append("\n").append(indenter)
      -            .append("public $CLASSNAME$(Table table, Index fullSet, Param... params) {\n");
      +                .append("public $CLASSNAME$(Table table, Index fullSet, Param... params) {\n");
               indenter.increaseLevel();
               for (int i = 0; i < params.length; i++) {
                   final Param param = params[i];
                   /*
      -             * Initializing context parameters this.p1 = (Integer) params[0].getValue(); this.p2 =
      -             * (Float) params[1].getValue(); this.p3 = (String) params[2].getValue();
      +             * Initializing context parameters this.p1 = (Integer) params[0].getValue(); this.p2 = (Float)
      +             * params[1].getValue(); this.p3 = (String) params[2].getValue();
                    */
                   final String name = param.getName();
      -            classBody.append(indenter).append("this.").append(name).append(" = (")
      -                .append(param.getDeclaredTypeName()).append(") params[").append(i)
      -                .append("].getValue();\n");
      +            classBody.append(indenter).append("this.").append(name).append(" = (").append(param.getDeclaredTypeName())
      +                    .append(") params[").append(i).append("].getValue();\n");
               }
       
               if (!usedColumnArrays.isEmpty()) {
      @@ -487,35 +471,34 @@ private StringBuilder getClassBody(TableDefinition tableDefinition,
                   for (String columnName : usedColumnArrays) {
                       final ColumnDefinition column = tableDefinition.getColumn(columnName);
                       if (column == null) {
      -                    throw new RuntimeException(
      -                        "Column \"" + columnName + "\" doesn't exist in this table");
      +                    throw new RuntimeException("Column \"" + columnName + "\" doesn't exist in this table");
                       }
                       final Class dataType = column.getDataType();
                       final Class columnType = DhFormulaColumn.getDbArrayType(dataType);
       
                       final String arrayType = columnType.getCanonicalName().replace(
      -                    "io.deephaven.db.tables.dbarrays",
      -                    "io.deephaven.db.v2.dbarrays") + "ColumnWrapper";
      +                        "io.deephaven.db.tables.dbarrays",
      +                        "io.deephaven.db.v2.dbarrays") + "ColumnWrapper";
       
                       /*
                        * Adding array column fields.
                        */
      -                classBody.append(indenter).append(column.getName()).append(COLUMN_SUFFIX)
      -                    .append(" = new ").append(arrayType).append("(table.getColumnSource(\"")
      -                    .append(columnName).append("\"), fullSet);\n");
      +                classBody.append(indenter).append(column.getName()).append(COLUMN_SUFFIX).append(" = new ")
      +                        .append(arrayType).append("(table.getColumnSource(\"").append(columnName)
      +                        .append("\"), fullSet);\n");
                   }
               }
       
               indenter.decreaseLevel();
       
               indenter.indent(classBody, "}\n" +
      -            "@Override\n" +
      -            "public Context getContext(int maxChunkSize) {\n" +
      -            "    return new Context(maxChunkSize);\n" +
      -            "}\n" +
      -            "\n" +
      -            "@Override\n" +
      -            "public LongChunk filter(Context context, LongChunk indices, Chunk... inputChunks) {\n");
      +                "@Override\n" +
      +                "public Context getContext(int maxChunkSize) {\n" +
      +                "    return new Context(maxChunkSize);\n" +
      +                "}\n" +
      +                "\n" +
      +                "@Override\n" +
      +                "public LongChunk filter(Context context, LongChunk indices, Chunk... inputChunks) {\n");
               indenter.increaseLevel();
               for (int i = 0; i < usedInputs.size(); i++) {
                   final Class columnType = usedInputs.get(i).second;
      @@ -526,42 +509,40 @@ private StringBuilder getClassBody(TableDefinition tableDefinition,
                       // TODO: Reinterpret Boolean and DBDateTime to byte and long
                       chunkType = "ObjectChunk";
                   }
      -            classBody.append(indenter).append("final ").append(chunkType).append(" __columnChunk")
      -                .append(i).append(" = inputChunks[").append(i).append("].as").append(chunkType)
      -                .append("();\n");
      +            classBody.append(indenter).append("final ").append(chunkType).append(" __columnChunk").append(i)
      +                    .append(" = inputChunks[").append(i).append("].as").append(chunkType).append("();\n");
               }
               indenter.indent(classBody, "final int size = indices.size();\n" +
      -            "context.resultChunk.setSize(0);\n" +
      -            "for (int __my_i__ = 0; __my_i__ < size; __my_i__++) {\n");
      +                "context.resultChunk.setSize(0);\n" +
      +                "for (int __my_i__ = 0; __my_i__ < size; __my_i__++) {\n");
               indenter.increaseLevel();
               for (int i = 0; i < usedInputs.size(); i++) {
                   final Pair usedInput = usedInputs.get(i);
                   final Class columnType = usedInput.second;
                   final String canonicalName = columnType.getCanonicalName();
      -            classBody.append(indenter).append("final ").append(canonicalName).append(" ")
      -                .append(usedInput.first).append(" =  (").append(canonicalName)
      -                .append(")__columnChunk").append(i).append(".get(__my_i__);\n");
      +            classBody.append(indenter).append("final ").append(canonicalName).append(" ").append(usedInput.first)
      +                    .append(" =  (").append(canonicalName).append(")__columnChunk").append(i)
      +                    .append(".get(__my_i__);\n");
               }
               classBody.append(
      -            "            if (").append(result.getConvertedExpression()).append(") {\n" +
      -                "                context.resultChunk.add(indices.get(__my_i__));\n" +
      -                "            }\n" +
      -                "        }\n" +
      -                "        return context.resultChunk;\n" +
      -                "    }\n" +
      -                "}");
      +                "            if (").append(result.getConvertedExpression()).append(") {\n" +
      +                        "                context.resultChunk.add(indices.get(__my_i__));\n" +
      +                        "            }\n" +
      +                        "        }\n" +
      +                        "        return context.resultChunk;\n" +
      +                        "    }\n" +
      +                        "}");
               return classBody;
           }
       
           @Override
      -    protected Filter getFilter(Table table, Index fullSet) throws InstantiationException,
      -        IllegalAccessException, NoSuchMethodException, InvocationTargetException {
      +    protected Filter getFilter(Table table, Index fullSet)
      +            throws InstantiationException, IllegalAccessException, NoSuchMethodException, InvocationTargetException {
               if (filter != null) {
                   return filter;
               }
      -        final FilterKernel filterKernel =
      -            (FilterKernel) filterKernelClass.getConstructor(Table.class, Index.class, Param[].class)
      -                .newInstance(table, fullSet, (Object) params);
      +        final FilterKernel filterKernel = (FilterKernel) filterKernelClass
      +                .getConstructor(Table.class, Index.class, Param[].class).newInstance(table, fullSet, (Object) params);
               final String[] columnNames = usedInputs.stream().map(p -> p.first).toArray(String[]::new);
               return new ChunkFilter(filterKernel, columnNames, CHUNK_SIZE);
           }
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/ConjunctiveFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/ConjunctiveFilter.java
      index 16660349ed4..bcf58607b73 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/ConjunctiveFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/ConjunctiveFilter.java
      @@ -27,15 +27,13 @@ public static SelectFilter makeConjunctiveFilter(SelectFilter... componentFilter
               final List rawComponents = new ArrayList<>();
               for (int ii = 0; ii < componentFilters.length; ++ii) {
                   if (componentFilters[ii] instanceof ConjunctiveFilter) {
      -                rawComponents.addAll(Arrays
      -                    .asList(((ConjunctiveFilter) componentFilters[ii]).getComponentFilters()));
      +                rawComponents.addAll(Arrays.asList(((ConjunctiveFilter) componentFilters[ii]).getComponentFilters()));
                   } else {
                       rawComponents.add(componentFilters[ii]);
                   }
               }
       
      -        return new ConjunctiveFilter(
      -            rawComponents.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY));
      +        return new ConjunctiveFilter(rawComponents.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY));
           }
       
           @Override
      @@ -55,8 +53,8 @@ public Index filter(Index selection, Index fullSet, Table table, boolean usePrev
       
           @Override
           public ConjunctiveFilter copy() {
      -        return new ConjunctiveFilter(Arrays.stream(getComponentFilters()).map(SelectFilter::copy)
      -            .toArray(SelectFilter[]::new));
      +        return new ConjunctiveFilter(
      +                Arrays.stream(getComponentFilters()).map(SelectFilter::copy).toArray(SelectFilter[]::new));
           }
       
           @Override
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/DateTimeRangeFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/DateTimeRangeFilter.java
      index e2e7ee39d46..5df0a95c82b 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/DateTimeRangeFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/DateTimeRangeFilter.java
      @@ -17,13 +17,13 @@ public DateTimeRangeFilter(String columnName, DBDateTime val1, DBDateTime val2)
               super(columnName, val1.getNanos(), val2.getNanos(), true, true);
           }
       
      -    public DateTimeRangeFilter(String columnName, DBDateTime val1, DBDateTime val2,
      -        boolean lowerInclusive, boolean upperInclusive) {
      +    public DateTimeRangeFilter(String columnName, DBDateTime val1, DBDateTime val2, boolean lowerInclusive,
      +            boolean upperInclusive) {
               super(columnName, val1.getNanos(), val2.getNanos(), lowerInclusive, upperInclusive);
           }
       
           public DateTimeRangeFilter(String columnName, long val1, long val2, boolean lowerInclusive,
      -        boolean upperInclusive) {
      +            boolean upperInclusive) {
               super(columnName, val1, val2, lowerInclusive, upperInclusive);
           }
       
      @@ -35,8 +35,7 @@ public void init(TableDefinition tableDefinition) {
       
               final ColumnDefinition def = tableDefinition.getColumn(columnName);
               if (def == null) {
      -            throw new RuntimeException(
      -                "Column \"" + columnName + "\" doesn't exist in this table, available columns: "
      +            throw new RuntimeException("Column \"" + columnName + "\" doesn't exist in this table, available columns: "
                           + tableDefinition.getColumnNames());
               }
       
      @@ -55,36 +54,33 @@ public DateTimeRangeFilter copy() {
           @Override
           public String toString() {
               return "DateTimeRangeFilter(" + columnName + " in " +
      -            (lowerInclusive ? "[" : "(") + new DBDateTime(lower) + "," + new DBDateTime(upper) +
      -            (upperInclusive ? "]" : ")") + ")";
      +                (lowerInclusive ? "[" : "(") + new DBDateTime(lower) + "," + new DBDateTime(upper) +
      +                (upperInclusive ? "]" : ")") + ")";
           }
       
           @Override
      -    Index binarySearch(Index selection, ColumnSource columnSource, boolean usePrev,
      -        boolean reverse) {
      +    Index binarySearch(Index selection, ColumnSource columnSource, boolean usePrev, boolean reverse) {
               if (selection.isEmpty()) {
                   return selection;
               }
       
               // noinspection unchecked
               final ColumnSource dateTimeColumnSource =
      -            ReinterpretUtilities.dateTimeToLongSource((ColumnSource) columnSource);
      +                ReinterpretUtilities.dateTimeToLongSource((ColumnSource) columnSource);
               return super.binarySearch(selection, dateTimeColumnSource, usePrev, reverse);
           }
       
           private class DateTimeLongChunkFilterAdapter implements ChunkFilter {
               @Override
               public void filter(Chunk values, LongChunk keys,
      -            WritableLongChunk results) {
      +                WritableLongChunk results) {
                   try (final WritableLongChunk writableLongChunk =
      -                WritableLongChunk.makeWritableChunk(values.size())) {
      +                    WritableLongChunk.makeWritableChunk(values.size())) {
       
      -                final ObjectChunk objectValues =
      -                    values.asObjectChunk();
      +                final ObjectChunk objectValues = values.asObjectChunk();
                       for (int ii = 0; ii < values.size(); ++ii) {
                           final DBDateTime dbDateTime = objectValues.get(ii);
      -                    writableLongChunk.set(ii,
      -                        dbDateTime == null ? QueryConstants.NULL_LONG : dbDateTime.getNanos());
      +                    writableLongChunk.set(ii, dbDateTime == null ? QueryConstants.NULL_LONG : dbDateTime.getNanos());
                       }
                       writableLongChunk.setSize(values.size());
                       longFilter.filter(writableLongChunk, keys, results);
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/DbArrayChunkAdapter.java b/DB/src/main/java/io/deephaven/db/v2/select/DbArrayChunkAdapter.java
      index 975bbee9231..67179e973c7 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/DbArrayChunkAdapter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/DbArrayChunkAdapter.java
      @@ -7,13 +7,12 @@
       import org.jetbrains.annotations.NotNull;
       
       /**
      - * This class wraps an inner ChunkSource holding a DbArray. The purpose of doing so is to apply
      - * DbArray#getDirect to the underlying DbArray values returned by the underlying ChunkSource. This
      - * is the strategy for implementing this class: makeGetContext() - doesn't need to change. The
      - * default implementation in our parent, namely DefaultChunkSource#makeGetContext, already does the
      - * right thing. getChunk() - likewise. makeFillContext() - We don't need to add anything to the
      - * "inner" context, so we just delegate to inner and return its context fillContext() - We first let
      - * the inner fill the chunk, then we overwrite each value (where non-null) with the result of
      + * This class wraps an inner ChunkSource holding a DbArray. The purpose of doing so is to apply DbArray#getDirect to the
      + * underlying DbArray values returned by the underlying ChunkSource. This is the strategy for implementing this class:
      + * makeGetContext() - doesn't need to change. The default implementation in our parent, namely
      + * DefaultChunkSource#makeGetContext, already does the right thing. getChunk() - likewise. makeFillContext() - We don't
      + * need to add anything to the "inner" context, so we just delegate to inner and return its context fillContext() - We
      + * first let the inner fill the chunk, then we overwrite each value (where non-null) with the result of
        * DbArrayBase#getDirect() invoked on that value.
        */
       public class DbArrayChunkAdapter implements DefaultChunkSource {
      @@ -29,15 +28,12 @@ public ChunkType getChunkType() {
           }
       
           @Override
      -    public void fillChunk(@NotNull FillContext context,
      -        @NotNull WritableChunk destination,
      -        @NotNull OrderedKeys orderedKeys) {
      -        // First let the underlying ChunkSource fill the chunk, and then we overwrite the values
      -        // with the result
      +    public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination,
      +            @NotNull OrderedKeys orderedKeys) {
      +        // First let the underlying ChunkSource fill the chunk, and then we overwrite the values with the result
               // of applying DbArray#getDirect to each element.
               underlying.fillChunk(context, destination, orderedKeys);
      -        final WritableObjectChunk typedDest =
      -            destination.asWritableObjectChunk();
      +        final WritableObjectChunk typedDest = destination.asWritableObjectChunk();
               for (int ii = 0; ii < destination.size(); ++ii) {
                   final DbArrayBase dbArray = typedDest.get(ii);
                   if (dbArray != null) {
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/DhFormulaColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/DhFormulaColumn.java
      index d0bea0418e1..7185131c1ed 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/DhFormulaColumn.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/DhFormulaColumn.java
      @@ -48,10 +48,9 @@ public class DhFormulaColumn extends AbstractFormulaColumn {
           private static final String LAZY_RESULT_CACHE_NAME = "__lazyResultCache";
           private static final String FORMULA_FACTORY_NAME = "__FORMULA_FACTORY";
           private static final String PARAM_CLASSNAME = Param.class.getCanonicalName();
      -    private static final String EVALUATION_EXCEPTION_CLASSNAME =
      -        FormulaEvaluationException.class.getCanonicalName();
      -    public static boolean useKernelFormulasProperty = Configuration.getInstance()
      -        .getBooleanWithDefault("FormulaColumn.useKernelFormulasProperty", false);
      +    private static final String EVALUATION_EXCEPTION_CLASSNAME = FormulaEvaluationException.class.getCanonicalName();
      +    public static boolean useKernelFormulasProperty =
      +            Configuration.getInstance().getBooleanWithDefault("FormulaColumn.useKernelFormulasProperty", false);
       
           private FormulaAnalyzer.Result analyzedFormula;
           private String timeInstanceVariables;
      @@ -76,22 +75,20 @@ public FormulaColumnPython getFormulaColumnPython() {
           }
       
           /**
      -     * Returns the name of a primitive-type-specific getter, e.g. {@code getDouble()},
      -     * {@code getPrevDouble()}, {@code getInt()}, {@code getPrevInt()}, etc.
      +     * Returns the name of a primitive-type-specific getter, e.g. {@code getDouble()}, {@code getPrevDouble()},
      +     * {@code getInt()}, {@code getPrevInt()}, etc.
            *
            * @param type The return type
            * @param prev {@code true} for get(), {@code false} for getPrev()
            * @return An appropriate name for a getter
            */
           private static String getGetterName(Class type, boolean prev) {
      -        final Class unboxedType =
      -            (type.isPrimitive() ? type : io.deephaven.util.type.TypeUtils.getUnboxedType(type));
      +        final Class unboxedType = (type.isPrimitive() ? type : io.deephaven.util.type.TypeUtils.getUnboxedType(type));
               final String get = prev ? "getPrev" : "get";
               if (unboxedType == null) {
                   return get;
               } else {
      -            return get + Character.toUpperCase(unboxedType.getName().charAt(0))
      -                + unboxedType.getName().substring(1);
      +            return get + Character.toUpperCase(unboxedType.getName().charAt(0)) + unboxedType.getName().substring(1);
               }
           }
       
      @@ -109,15 +106,14 @@ private static String columnSourceGetMethodReturnType(ColumnSource cs) {
               }
               sb.append(columnType.getCanonicalName());
               final Class componentType = cs.getComponentType();
      -        if (componentType != null && !componentType.isPrimitive()
      -            && columnType.getTypeParameters().length == 1) {
      +        if (componentType != null && !componentType.isPrimitive() && columnType.getTypeParameters().length == 1) {
                   sb.append("<").append(componentType.getCanonicalName()).append(">");
               }
               return sb.toString();
           }
       
           private static Map makeNameToRichTypeDict(final String[] names,
      -        final Map columnSources) {
      +            final Map columnSources) {
               final Map result = new HashMap<>();
               for (final String s : names) {
                   final RichType richType;
      @@ -133,7 +129,7 @@ private static Map makeNameToRichTypeDict(final String[] names
                       }
                       final Class componentType = cs.getComponentType();
                       if (componentType != null && !componentType.isPrimitive()
      -                    && columnType.getTypeParameters().length == 1) {
      +                        && columnType.getTypeParameters().length == 1) {
                           richType = RichType.createGeneric(columnType, componentType);
                       } else {
                           richType = RichType.createNonGeneric(columnType);
      @@ -145,7 +141,7 @@ private static Map makeNameToRichTypeDict(final String[] names
           }
       
           private static Map makeNameToTypeDict(final String[] names,
      -        final Map columnSources) {
      +            final Map columnSources) {
               final Map result = new HashMap<>();
               for (final String s : names) {
                   final ColumnSource cs = columnSources.get(s);
      @@ -155,18 +151,17 @@ private static Map makeNameToTypeDict(final String[] names,
           }
       
           public static Class getDbArrayType(Class declaredType) {
      -        if (!io.deephaven.util.type.TypeUtils.isConvertibleToPrimitive(declaredType)
      -            || declaredType == boolean.class || declaredType == Boolean.class) {
      +        if (!io.deephaven.util.type.TypeUtils.isConvertibleToPrimitive(declaredType) || declaredType == boolean.class
      +                || declaredType == Boolean.class) {
                   return DbArray.class;
               } else {
                   try {
                       return Class.forName(DbArray.class.getPackage().getName() + ".Db"
      -                    + Character.toUpperCase(io.deephaven.util.type.TypeUtils
      -                        .getUnboxedType(declaredType).getSimpleName().charAt(0))
      -                    +
      -                    io.deephaven.util.type.TypeUtils.getUnboxedType(declaredType).getSimpleName()
      -                        .substring(1)
      -                    + "Array");
      +                        + Character.toUpperCase(
      +                                io.deephaven.util.type.TypeUtils.getUnboxedType(declaredType).getSimpleName().charAt(0))
      +                        +
      +                        io.deephaven.util.type.TypeUtils.getUnboxedType(declaredType).getSimpleName().substring(1)
      +                        + "Array");
                   } catch (ClassNotFoundException e) {
                       throw new RuntimeException("Unexpected exception for type " + declaredType, e);
                   }
      @@ -176,28 +171,22 @@ public static Class getDbArrayType(Class declaredType) {
           @Override
           public List initDef(Map columnDefinitionMap) {
               try {
      -            analyzedFormula =
      -                FormulaAnalyzer.analyze(formulaString, columnDefinitionMap, timeNewVariables);
      -            final DBTimeUtils.Result timeConversionResult =
      -                DBTimeUtils.convertExpression(formulaString);
      -            final DBLanguageParser.Result result =
      -                FormulaAnalyzer.getCompiledFormula(columnDefinitionMap,
      +            analyzedFormula = FormulaAnalyzer.analyze(formulaString, columnDefinitionMap, timeNewVariables);
      +            final DBTimeUtils.Result timeConversionResult = DBTimeUtils.convertExpression(formulaString);
      +            final DBLanguageParser.Result result = FormulaAnalyzer.getCompiledFormula(columnDefinitionMap,
                           timeConversionResult, timeNewVariables);
       
      -            log.debug().append("Expression (after language conversion) : ")
      -                .append(result.getConvertedExpression()).endl();
      +            log.debug().append("Expression (after language conversion) : ").append(result.getConvertedExpression())
      +                    .endl();
       
                   applyUsedVariables(columnDefinitionMap, result.getVariablesUsed());
                   returnedType = result.getType();
                   if (returnedType == boolean.class) {
                       returnedType = Boolean.class;
                   }
      -            // The first time we do an initDef, we allow the formulaString to be transformed by
      -            // DBTimeUtils,
      -            // possibly with the side effect of creating 'timeInstanceVariables' and
      -            // 'timeNewVariables'.
      -            // However, we should not do this on subsequent calls because the answer is not expected
      -            // to
      +            // The first time we do an initDef, we allow the formulaString to be transformed by DBTimeUtils,
      +            // possibly with the side effect of creating 'timeInstanceVariables' and 'timeNewVariables'.
      +            // However, we should not do this on subsequent calls because the answer is not expected to
                   // change further, and we don't want to overwrite our 'timeInstanceVariables'.
                   if (timeNewVariables == null) {
                       formulaString = result.getConvertedExpression();
      @@ -205,8 +194,7 @@ public List initDef(Map columnDefinitionMap) {
                       timeNewVariables = timeConversionResult.getNewVariables();
                   }
               } catch (Exception e) {
      -            throw new FormulaCompilationException("Formula compilation error for: " + formulaString,
      -                e);
      +            throw new FormulaCompilationException("Formula compilation error for: " + formulaString, e);
               }
       
               // check if this is a column to be created with a numba vectorized function
      @@ -214,10 +202,9 @@ public List initDef(Map columnDefinitionMap) {
                   if (param.getValue().getClass() == NumbaCallableWrapper.class) {
                       NumbaCallableWrapper numbaCallableWrapper = (NumbaCallableWrapper) param.getValue();
                       formulaColumnPython = FormulaColumnPython.create(this.columnName,
      -                    DeephavenCompatibleFunction.create(numbaCallableWrapper.getPyObject(),
      -                        numbaCallableWrapper.getReturnType(),
      -                        this.analyzedFormula.sourceDescriptor.sources,
      -                        true));
      +                        DeephavenCompatibleFunction.create(numbaCallableWrapper.getPyObject(),
      +                                numbaCallableWrapper.getReturnType(), this.analyzedFormula.sourceDescriptor.sources,
      +                                true));
                       formulaColumnPython.initDef(columnDefinitionMap);
                       return formulaColumnPython.usedColumns;
                   }
      @@ -235,53 +222,53 @@ String generateClassBody() {
               final TypeAnalyzer ta = TypeAnalyzer.create(returnedType);
       
               final CodeGenerator g = CodeGenerator.create(
      -            QueryLibrary.getImportStatement(), "",
      -            "public class $CLASSNAME$ extends [[FORMULA_CLASS_NAME]]", CodeGenerator.block(
      -                generateFormulaFactoryLambda(), "",
      -                CodeGenerator.repeated("instanceVar", "private final [[TYPE]] [[NAME]];"),
      -                "private final Map [[LAZY_RESULT_CACHE_NAME]];",
      -                timeInstanceVariables, "",
      -                generateConstructor(), "",
      -                generateAppropriateGetMethod(ta, false), "",
      -                generateAppropriateGetMethod(ta, true), "",
      -                generateOptionalObjectGetMethod(ta, false),
      -                generateOptionalObjectGetMethod(ta, true),
      -                generateGetChunkType(ta), "",
      -                generateFillChunk(false), "",
      -                generateFillChunk(true), "",
      -                generateFillChunkHelper(ta), "",
      -                generateApplyFormulaPerItem(ta), "",
      -                generateMakeFillContext(), "",
      -                generateNormalContextClass(), "",
      -                generateIntSize()),
      -            "");
      +                QueryLibrary.getImportStatement(), "",
      +                "public class $CLASSNAME$ extends [[FORMULA_CLASS_NAME]]", CodeGenerator.block(
      +                        generateFormulaFactoryLambda(), "",
      +                        CodeGenerator.repeated("instanceVar", "private final [[TYPE]] [[NAME]];"),
      +                        "private final Map [[LAZY_RESULT_CACHE_NAME]];",
      +                        timeInstanceVariables, "",
      +                        generateConstructor(), "",
      +                        generateAppropriateGetMethod(ta, false), "",
      +                        generateAppropriateGetMethod(ta, true), "",
      +                        generateOptionalObjectGetMethod(ta, false),
      +                        generateOptionalObjectGetMethod(ta, true),
      +                        generateGetChunkType(ta), "",
      +                        generateFillChunk(false), "",
      +                        generateFillChunk(true), "",
      +                        generateFillChunkHelper(ta), "",
      +                        generateApplyFormulaPerItem(ta), "",
      +                        generateMakeFillContext(), "",
      +                        generateNormalContextClass(), "",
      +                        generateIntSize()),
      +                "");
               g.replace("FORMULA_CLASS_NAME", Formula.class.getCanonicalName());
               g.replace("LAZY_RESULT_CACHE_NAME", LAZY_RESULT_CACHE_NAME);
               visitFormulaParameters(null,
      -            cs -> {
      -                final CodeGenerator fc = g.instantiateNewRepeated("instanceVar");
      -                fc.replace("TYPE", cs.columnSourceGetTypeString);
      -                fc.replace("NAME", cs.name);
      -                return null;
      -            },
      -            ca -> {
      -                final CodeGenerator fc = g.instantiateNewRepeated("instanceVar");
      -                fc.replace("TYPE", ca.dbArrayTypeString);
      -                fc.replace("NAME", ca.name);
      -                return null;
      -            },
      -            p -> {
      -                final CodeGenerator fc = g.instantiateNewRepeated("instanceVar");
      -                fc.replace("TYPE", p.typeString);
      -                fc.replace("NAME", p.name);
      -                return null;
      -            });
      +                cs -> {
      +                    final CodeGenerator fc = g.instantiateNewRepeated("instanceVar");
      +                    fc.replace("TYPE", cs.columnSourceGetTypeString);
      +                    fc.replace("NAME", cs.name);
      +                    return null;
      +                },
      +                ca -> {
      +                    final CodeGenerator fc = g.instantiateNewRepeated("instanceVar");
      +                    fc.replace("TYPE", ca.dbArrayTypeString);
      +                    fc.replace("NAME", ca.name);
      +                    return null;
      +                },
      +                p -> {
      +                    final CodeGenerator fc = g.instantiateNewRepeated("instanceVar");
      +                    fc.replace("TYPE", p.typeString);
      +                    fc.replace("NAME", p.name);
      +                    return null;
      +                });
               return g.build();
           }
       
           private CodeGenerator generateFormulaFactoryLambda() {
               final CodeGenerator g = CodeGenerator.create(
      -            "public static final [[FORMULA_FACTORY]] [[FORMULA_FACTORY_NAME]] = $CLASSNAME$::new;");
      +                "public static final [[FORMULA_FACTORY]] [[FORMULA_FACTORY_NAME]] = $CLASSNAME$::new;");
               g.replace("FORMULA_FACTORY", FormulaFactory.class.getCanonicalName());
               g.replace("FORMULA_FACTORY_NAME", FORMULA_FACTORY_NAME);
               return g.freeze();
      @@ -289,64 +276,63 @@ private CodeGenerator generateFormulaFactoryLambda() {
       
           private CodeGenerator generateConstructor() {
               final CodeGenerator g = CodeGenerator.create(
      -            "public $CLASSNAME$(final Index index,", CodeGenerator.indent(
      -                "final boolean __lazy,",
      -                "final java.util.Map __columnsToData,",
      -                "final [[PARAM_CLASSNAME]]... __params)"),
      -            CodeGenerator.block(
      -                "super(index);",
      -                CodeGenerator.repeated("initColumn",
      -                    "[[COLUMN_NAME]] = __columnsToData.get(\"[[COLUMN_NAME]]\");"),
      -                CodeGenerator.repeated("initNormalColumnArray",
      -                    "[[COLUMN_ARRAY_NAME]] = new [[DB_ARRAY_TYPE_PREFIX]]ColumnWrapper(__columnsToData.get(\"[[COLUMN_NAME]]\"), __index);"),
      -                CodeGenerator.repeated("initParam",
      -                    "[[PARAM_NAME]] = ([[PARAM_TYPE]]) __params[[[PARAM_INDEX]]].getValue();"),
      -                "[[LAZY_RESULT_CACHE_NAME]] = __lazy ? new ConcurrentHashMap<>() : null;"));
      +                "public $CLASSNAME$(final Index index,", CodeGenerator.indent(
      +                        "final boolean __lazy,",
      +                        "final java.util.Map __columnsToData,",
      +                        "final [[PARAM_CLASSNAME]]... __params)"),
      +                CodeGenerator.block(
      +                        "super(index);",
      +                        CodeGenerator.repeated("initColumn",
      +                                "[[COLUMN_NAME]] = __columnsToData.get(\"[[COLUMN_NAME]]\");"),
      +                        CodeGenerator.repeated("initNormalColumnArray",
      +                                "[[COLUMN_ARRAY_NAME]] = new [[DB_ARRAY_TYPE_PREFIX]]ColumnWrapper(__columnsToData.get(\"[[COLUMN_NAME]]\"), __index);"),
      +                        CodeGenerator.repeated("initParam",
      +                                "[[PARAM_NAME]] = ([[PARAM_TYPE]]) __params[[[PARAM_INDEX]]].getValue();"),
      +                        "[[LAZY_RESULT_CACHE_NAME]] = __lazy ? new ConcurrentHashMap<>() : null;"));
       
               g.replace("LAZY_RESULT_CACHE_NAME", LAZY_RESULT_CACHE_NAME);
               g.replace("COLUMN_SOURCE_CLASSNAME", COLUMN_SOURCE_CLASSNAME);
               g.replace("PARAM_CLASSNAME", PARAM_CLASSNAME);
               visitFormulaParameters(null,
      -            cs -> {
      -                final CodeGenerator fc = g.instantiateNewRepeated("initColumn");
      -                fc.replace("COLUMN_NAME", cs.name);
      -                return null;
      -            },
      -            ac -> {
      -                final CodeGenerator fc = g.instantiateNewRepeated("initNormalColumnArray");
      -                fc.replace("COLUMN_ARRAY_NAME", ac.name);
      -                fc.replace("COLUMN_NAME", ac.bareName);
      -
      -                final String datp =
      -                    getDbArrayType(ac.columnSource.getType()).getCanonicalName().replace(
      -                        "io.deephaven.db.tables.dbarrays",
      -                        "io.deephaven.db.v2.dbarrays");
      -                fc.replace("DB_ARRAY_TYPE_PREFIX", datp);
      -                return null;
      -            },
      -            p -> {
      -                final CodeGenerator fc = g.instantiateNewRepeated("initParam");
      -                fc.replace("PARAM_NAME", p.name);
      -                fc.replace("PARAM_TYPE", p.typeString);
      -                fc.replace("PARAM_INDEX", p.index + "");
      -                return null;
      -            });
      +                cs -> {
      +                    final CodeGenerator fc = g.instantiateNewRepeated("initColumn");
      +                    fc.replace("COLUMN_NAME", cs.name);
      +                    return null;
      +                },
      +                ac -> {
      +                    final CodeGenerator fc = g.instantiateNewRepeated("initNormalColumnArray");
      +                    fc.replace("COLUMN_ARRAY_NAME", ac.name);
      +                    fc.replace("COLUMN_NAME", ac.bareName);
      +
      +                    final String datp = getDbArrayType(ac.columnSource.getType()).getCanonicalName().replace(
      +                            "io.deephaven.db.tables.dbarrays",
      +                            "io.deephaven.db.v2.dbarrays");
      +                    fc.replace("DB_ARRAY_TYPE_PREFIX", datp);
      +                    return null;
      +                },
      +                p -> {
      +                    final CodeGenerator fc = g.instantiateNewRepeated("initParam");
      +                    fc.replace("PARAM_NAME", p.name);
      +                    fc.replace("PARAM_TYPE", p.typeString);
      +                    fc.replace("PARAM_INDEX", p.index + "");
      +                    return null;
      +                });
       
               return g.freeze();
           }
       
           private CodeGenerator generateApplyFormulaPerItem(final TypeAnalyzer ta) {
               final CodeGenerator g = CodeGenerator.create(
      -            "private [[RETURN_TYPE]] applyFormulaPerItem([[ARGS]])", CodeGenerator.block(
      -                "try", CodeGenerator.block(
      -                    "return [[FORMULA_STRING]];"),
      -                CodeGenerator.samelineBlock("catch (java.lang.Exception __e)",
      -                    "throw new [[EXCEPTION_TYPE]](\"In formula: [[COLUMN_NAME]] = \" + [[JOINED_FORMULA_STRING]], __e);")));
      +                "private [[RETURN_TYPE]] applyFormulaPerItem([[ARGS]])", CodeGenerator.block(
      +                        "try", CodeGenerator.block(
      +                                "return [[FORMULA_STRING]];"),
      +                        CodeGenerator.samelineBlock("catch (java.lang.Exception __e)",
      +                                "throw new [[EXCEPTION_TYPE]](\"In formula: [[COLUMN_NAME]] = \" + [[JOINED_FORMULA_STRING]], __e);")));
               g.replace("RETURN_TYPE", ta.typeString);
               final List args = visitFormulaParameters(n -> n.typeString + " " + n.name,
      -            n -> n.typeString + " " + n.name,
      -            null,
      -            null);
      +                n -> n.typeString + " " + n.name,
      +                null,
      +                null);
               g.replace("ARGS", makeCommaSeparatedList(args));
               g.replace("FORMULA_STRING", ta.wrapWithCastIfNecessary(formulaString));
               g.replace("COLUMN_NAME", StringEscapeUtils.escapeJava(columnName));
      @@ -359,25 +345,24 @@ private CodeGenerator generateApplyFormulaPerItem(final TypeAnalyzer ta) {
           @NotNull
           private CodeGenerator generateAppropriateGetMethod(TypeAnalyzer ta, boolean usePrev) {
               final CodeGenerator g = CodeGenerator.create(
      -            "@Override",
      -            "public [[RETURN_TYPE]] [[GETTER_NAME]](final long k)", CodeGenerator.block(
      -                (usePrev
      -                    ? CodeGenerator.optional("maybeCreateIorII",
      -                        "final long findResult;",
      -                        "try (final Index prev = __index.getPrevIndex())", CodeGenerator.block(
      -                            "findResult = prev.find(k);"))
      -                    : CodeGenerator.optional("maybeCreateIorII",
      -                        "final long findResult = __index.find(k);")),
      -                CodeGenerator.optional("maybeCreateI",
      -                    "final int i = __intSize(findResult);"),
      -                CodeGenerator.optional("maybeCreateII",
      -                    "final long ii = findResult;"),
      -                CodeGenerator.repeated("cacheColumnSourceGet",
      -                    "final [[TYPE]] [[VAR]] = [[GET_EXPRESSION]];"),
      -                "if ([[LAZY_RESULT_CACHE_NAME]] != null)", CodeGenerator.block(
      -                    "final Object __lazyKey = [[C14NUTIL_CLASSNAME]].maybeMakeSmartKey([[FORMULA_ARGS]]);",
      -                    "return ([[RESULT_TYPE]])[[LAZY_RESULT_CACHE_NAME]].computeIfAbsent(__lazyKey, __unusedKey -> applyFormulaPerItem([[FORMULA_ARGS]]));"),
      -                "return applyFormulaPerItem([[FORMULA_ARGS]]);"));
      +                "@Override",
      +                "public [[RETURN_TYPE]] [[GETTER_NAME]](final long k)", CodeGenerator.block(
      +                        (usePrev
      +                                ? CodeGenerator.optional("maybeCreateIorII",
      +                                        "final long findResult;",
      +                                        "try (final Index prev = __index.getPrevIndex())", CodeGenerator.block(
      +                                                "findResult = prev.find(k);"))
      +                                : CodeGenerator.optional("maybeCreateIorII",
      +                                        "final long findResult = __index.find(k);")),
      +                        CodeGenerator.optional("maybeCreateI",
      +                                "final int i = __intSize(findResult);"),
      +                        CodeGenerator.optional("maybeCreateII",
      +                                "final long ii = findResult;"),
      +                        CodeGenerator.repeated("cacheColumnSourceGet", "final [[TYPE]] [[VAR]] = [[GET_EXPRESSION]];"),
      +                        "if ([[LAZY_RESULT_CACHE_NAME]] != null)", CodeGenerator.block(
      +                                "final Object __lazyKey = [[C14NUTIL_CLASSNAME]].maybeMakeSmartKey([[FORMULA_ARGS]]);",
      +                                "return ([[RESULT_TYPE]])[[LAZY_RESULT_CACHE_NAME]].computeIfAbsent(__lazyKey, __unusedKey -> applyFormulaPerItem([[FORMULA_ARGS]]));"),
      +                        "return applyFormulaPerItem([[FORMULA_ARGS]]);"));
               final String returnTypeString;
               final String resultTypeString;
               if (ta.dbPrimitiveType != null) {
      @@ -401,21 +386,20 @@ private CodeGenerator generateAppropriateGetMethod(TypeAnalyzer ta, boolean useP
                   g.activateOptional("maybeCreateII");
               }
       
      -        // This visitor initializes variables for the column source gets, and also puts together
      -        // (via the lambda return
      +        // This visitor initializes variables for the column source gets, and also puts together (via the lambda return
               // values), the names of all the arguments.
               final int[] nextId = {0};
               final List formulaArgs = visitFormulaParameters(idx -> idx.name,
      -            cs -> {
      -                final String cachedName = "__temp" + nextId[0]++;
      -                final CodeGenerator cc = g.instantiateNewRepeated("cacheColumnSourceGet");
      -                cc.replace("TYPE", cs.typeString);
      -                cc.replace("VAR", cachedName);
      -                cc.replace("GET_EXPRESSION", cs.makeGetExpression(usePrev));
      -                return cachedName;
      -            },
      -            null,
      -            null);
      +                cs -> {
      +                    final String cachedName = "__temp" + nextId[0]++;
      +                    final CodeGenerator cc = g.instantiateNewRepeated("cacheColumnSourceGet");
      +                    cc.replace("TYPE", cs.typeString);
      +                    cc.replace("VAR", cachedName);
      +                    cc.replace("GET_EXPRESSION", cs.makeGetExpression(usePrev));
      +                    return cachedName;
      +                },
      +                null,
      +                null);
       
               g.replace("FORMULA_ARGS", makeCommaSeparatedList(formulaArgs));
               g.replace("LAZY_RESULT_CACHE_NAME", LAZY_RESULT_CACHE_NAME);
      @@ -430,10 +414,10 @@ private CodeGenerator generateOptionalObjectGetMethod(TypeAnalyzer ta, boolean u
                   return CodeGenerator.create(); // empty
               }
               final CodeGenerator g = CodeGenerator.create(
      -            "@Override",
      -            "public Object [[GETTER_NAME]](final long k)", CodeGenerator.block(
      -                "return TypeUtils.box([[DELEGATED_GETTER_NAME]](k));"),
      -            "" // Extra spacing to get spacing right for my caller (because I am optional)
      +                "@Override",
      +                "public Object [[GETTER_NAME]](final long k)", CodeGenerator.block(
      +                        "return TypeUtils.box([[DELEGATED_GETTER_NAME]](k));"),
      +                "" // Extra spacing to get spacing right for my caller (because I am optional)
               );
               final String getterName = usePrev ? "getPrev" : "get";
               final String delegatedGetterName = getGetterName(ta.dbPrimitiveType, usePrev);
      @@ -445,32 +429,30 @@ private CodeGenerator generateOptionalObjectGetMethod(TypeAnalyzer ta, boolean u
           @NotNull
           private CodeGenerator generateNormalContextClass() {
               final CodeGenerator g = CodeGenerator.create(
      -            "private class FormulaFillContext implements [[FILL_CONTEXT_CANONICAL]]",
      -            CodeGenerator.block(
      -                // The optional i chunk
      -                CodeGenerator.optional("needsIChunk",
      -                    "private final WritableIntChunk __iChunk;"),
      -                // The optional ii chunk
      -                CodeGenerator.optional("needsIIChunk",
      -                    "private final WritableLongChunk __iiChunk;"),
      -                // fields
      -                CodeGenerator.repeated("defineField",
      -                    "private final ColumnSource.GetContext __subContext[[COL_SOURCE_NAME]];"),
      -                // constructor
      -                "FormulaFillContext(int __chunkCapacity)", CodeGenerator.block(
      -                    CodeGenerator.optional("needsIChunk",
      -                        "__iChunk = WritableIntChunk.makeWritableChunk(__chunkCapacity);"),
      -                    CodeGenerator.optional("needsIIChunk",
      -                        "__iiChunk = WritableLongChunk.makeWritableChunk(__chunkCapacity);"),
      -                    CodeGenerator.repeated("initField",
      -                        "__subContext[[COL_SOURCE_NAME]] = [[COL_SOURCE_NAME]].makeGetContext(__chunkCapacity);")),
      -                "",
      -                "@Override",
      -                "public void close()", CodeGenerator.block(
      -                    CodeGenerator.optional("needsIChunk", "__iChunk.close();"),
      -                    CodeGenerator.optional("needsIIChunk", "__iiChunk.close();"),
      -                    CodeGenerator.repeated("closeField",
      -                        "__subContext[[COL_SOURCE_NAME]].close();"))));
      +                "private class FormulaFillContext implements [[FILL_CONTEXT_CANONICAL]]", CodeGenerator.block(
      +                        // The optional i chunk
      +                        CodeGenerator.optional("needsIChunk",
      +                                "private final WritableIntChunk __iChunk;"),
      +                        // The optional ii chunk
      +                        CodeGenerator.optional("needsIIChunk",
      +                                "private final WritableLongChunk __iiChunk;"),
      +                        // fields
      +                        CodeGenerator.repeated("defineField",
      +                                "private final ColumnSource.GetContext __subContext[[COL_SOURCE_NAME]];"),
      +                        // constructor
      +                        "FormulaFillContext(int __chunkCapacity)", CodeGenerator.block(
      +                                CodeGenerator.optional("needsIChunk",
      +                                        "__iChunk = WritableIntChunk.makeWritableChunk(__chunkCapacity);"),
      +                                CodeGenerator.optional("needsIIChunk",
      +                                        "__iiChunk = WritableLongChunk.makeWritableChunk(__chunkCapacity);"),
      +                                CodeGenerator.repeated("initField",
      +                                        "__subContext[[COL_SOURCE_NAME]] = [[COL_SOURCE_NAME]].makeGetContext(__chunkCapacity);")),
      +                        "",
      +                        "@Override",
      +                        "public void close()", CodeGenerator.block(
      +                                CodeGenerator.optional("needsIChunk", "__iChunk.close();"),
      +                                CodeGenerator.optional("needsIIChunk", "__iiChunk.close();"),
      +                                CodeGenerator.repeated("closeField", "__subContext[[COL_SOURCE_NAME]].close();"))));
               g.replace("FILL_CONTEXT_CANONICAL", Formula.FillContext.class.getCanonicalName());
               if (usesI) {
                   g.activateAllOptionals("needsIChunk");
      @@ -479,34 +461,33 @@ private CodeGenerator generateNormalContextClass() {
                   g.activateAllOptionals("needsIIChunk");
               }
               visitFormulaParameters(null,
      -            cs -> {
      -                final CodeGenerator defineField = g.instantiateNewRepeated("defineField");
      -                final CodeGenerator initField = g.instantiateNewRepeated("initField");
      -                final CodeGenerator closeField = g.instantiateNewRepeated("closeField");
      -                defineField.replace("COL_SOURCE_NAME", cs.name);
      -                initField.replace("COL_SOURCE_NAME", cs.name);
      -                closeField.replace("COL_SOURCE_NAME", cs.name);
      -                return null;
      -            }, null, null);
      +                cs -> {
      +                    final CodeGenerator defineField = g.instantiateNewRepeated("defineField");
      +                    final CodeGenerator initField = g.instantiateNewRepeated("initField");
      +                    final CodeGenerator closeField = g.instantiateNewRepeated("closeField");
      +                    defineField.replace("COL_SOURCE_NAME", cs.name);
      +                    initField.replace("COL_SOURCE_NAME", cs.name);
      +                    closeField.replace("COL_SOURCE_NAME", cs.name);
      +                    return null;
      +                }, null, null);
               return g.freeze();
           }
       
           @NotNull
           private CodeGenerator generateMakeFillContext() {
               final CodeGenerator g = CodeGenerator.create(
      -            "@Override",
      -            "public FormulaFillContext makeFillContext(final int __chunkCapacity)",
      -            CodeGenerator.block(
      -                "return new FormulaFillContext(__chunkCapacity);"));
      +                "@Override",
      +                "public FormulaFillContext makeFillContext(final int __chunkCapacity)", CodeGenerator.block(
      +                        "return new FormulaFillContext(__chunkCapacity);"));
               return g.freeze();
           }
       
           @NotNull
           private CodeGenerator generateGetChunkType(TypeAnalyzer ta) {
               final CodeGenerator g = CodeGenerator.create(
      -            "@Override",
      -            "protected [[CHUNK_TYPE_CLASSNAME]] getChunkType()", CodeGenerator.block(
      -                "return [[CHUNK_TYPE_CLASSNAME]].[[CHUNK_TYPE]];"));
      +                "@Override",
      +                "protected [[CHUNK_TYPE_CLASSNAME]] getChunkType()", CodeGenerator.block(
      +                        "return [[CHUNK_TYPE_CLASSNAME]].[[CHUNK_TYPE]];"));
               g.replace("CHUNK_TYPE_CLASSNAME", ChunkType.class.getCanonicalName());
               g.replace("CHUNK_TYPE", ta.chunkTypeString);
               return g.freeze();
      @@ -515,32 +496,31 @@ private CodeGenerator generateGetChunkType(TypeAnalyzer ta) {
           @NotNull
           private CodeGenerator generateFillChunk(boolean usePrev) {
               final CodeGenerator g = CodeGenerator.create(
      -            "@Override",
      -            "public void [[FILL_METHOD]](final FillContext __context, final WritableChunk __destination, final OrderedKeys __orderedKeys)",
      -            CodeGenerator.block(
      -                "final FormulaFillContext __typedContext = (FormulaFillContext)__context;",
      -                CodeGenerator.repeated("getChunks",
      -                    "final [[CHUNK_TYPE]] __chunk__col__[[COL_SOURCE_NAME]] = this.[[COL_SOURCE_NAME]].[[GET_CURR_OR_PREV_CHUNK]]("
      -                        +
      -                        "__typedContext.__subContext[[COL_SOURCE_NAME]], __orderedKeys).[[AS_CHUNK_METHOD]]();"),
      -                "fillChunkHelper(" + Boolean.toString(usePrev)
      -                    + ", __typedContext, __destination, __orderedKeys[[ADDITIONAL_CHUNK_ARGS]]);"));
      +                "@Override",
      +                "public void [[FILL_METHOD]](final FillContext __context, final WritableChunk __destination, final OrderedKeys __orderedKeys)",
      +                CodeGenerator.block(
      +                        "final FormulaFillContext __typedContext = (FormulaFillContext)__context;",
      +                        CodeGenerator.repeated("getChunks",
      +                                "final [[CHUNK_TYPE]] __chunk__col__[[COL_SOURCE_NAME]] = this.[[COL_SOURCE_NAME]].[[GET_CURR_OR_PREV_CHUNK]]("
      +                                        +
      +                                        "__typedContext.__subContext[[COL_SOURCE_NAME]], __orderedKeys).[[AS_CHUNK_METHOD]]();"),
      +                        "fillChunkHelper(" + Boolean.toString(usePrev)
      +                                + ", __typedContext, __destination, __orderedKeys[[ADDITIONAL_CHUNK_ARGS]]);"));
       
               final String fillMethodName = String.format("fill%sChunk", usePrev ? "Prev" : "");
               g.replace("FILL_METHOD", fillMethodName);
               List chunkList = visitFormulaParameters(null,
      -            cs -> {
      -                final CodeGenerator getChunks = g.instantiateNewRepeated("getChunks");
      -                getChunks.replace("COL_SOURCE_NAME", cs.name);
      -                getChunks.replace("GET_CURR_OR_PREV_CHUNK", usePrev ? "getPrevChunk" : "getChunk");
      -                final TypeAnalyzer tm = TypeAnalyzer.create(cs.columnSource.getType());
      -                getChunks.replace("CHUNK_TYPE", tm.readChunkVariableType);
      -                getChunks.replace("AS_CHUNK_METHOD", tm.asReadChunkMethodName);
      -                return "__chunk__col__" + cs.name;
      -            },
      -            null, null);
      -        final String additionalChunkArgs =
      -            chunkList.isEmpty() ? "" : ", " + makeCommaSeparatedList(chunkList);
      +                cs -> {
      +                    final CodeGenerator getChunks = g.instantiateNewRepeated("getChunks");
      +                    getChunks.replace("COL_SOURCE_NAME", cs.name);
      +                    getChunks.replace("GET_CURR_OR_PREV_CHUNK", usePrev ? "getPrevChunk" : "getChunk");
      +                    final TypeAnalyzer tm = TypeAnalyzer.create(cs.columnSource.getType());
      +                    getChunks.replace("CHUNK_TYPE", tm.readChunkVariableType);
      +                    getChunks.replace("AS_CHUNK_METHOD", tm.asReadChunkMethodName);
      +                    return "__chunk__col__" + cs.name;
      +                },
      +                null, null);
      +        final String additionalChunkArgs = chunkList.isEmpty() ? "" : ", " + makeCommaSeparatedList(chunkList);
               g.replace("ADDITIONAL_CHUNK_ARGS", additionalChunkArgs);
               return g.freeze();
           }
      @@ -548,61 +528,60 @@ private CodeGenerator generateFillChunk(boolean usePrev) {
           @NotNull
           private CodeGenerator generateFillChunkHelper(TypeAnalyzer ta) {
               final CodeGenerator g = CodeGenerator.create(
      -            "private void fillChunkHelper(final boolean __usePrev, final FormulaFillContext __context,",
      -            CodeGenerator.indent(
      -                "final WritableChunk __destination,",
      -                "final OrderedKeys __orderedKeys[[ADDITIONAL_CHUNK_ARGS]])"),
      -            CodeGenerator.block(
      -                "final [[DEST_CHUNK_TYPE]] __typedDestination = __destination.[[DEST_AS_CHUNK_METHOD]]();",
      -                CodeGenerator.optional("maybeCreateIOrII",
      -                    "try (final Index prev = __usePrev ? __index.getPrevIndex() : null;",
      -                    CodeGenerator.indent(
      -                        "final Index inverted = ((prev != null) ? prev : __index).invert(__orderedKeys.asIndex()))"),
      -                    CodeGenerator.block(
      -                        CodeGenerator.optional("maybeCreateI",
      -                            "__context.__iChunk.setSize(0);",
      -                            "inverted.forAllLongs(l -> __context.__iChunk.add(__intSize(l)));"),
      -                        CodeGenerator.optional("maybeCreateII",
      -                            "inverted.fillKeyIndicesChunk(__context.__iiChunk);"))),
      -                CodeGenerator.repeated("getChunks",
      -                    "final [[CHUNK_TYPE]] __chunk__col__[[COL_SOURCE_NAME]] = __sources[[[SOURCE_INDEX]]].[[AS_CHUNK_METHOD]]();"),
      -                "final int[] __chunkPosHolder = new int[] {0};",
      -                "if ([[LAZY_RESULT_CACHE_NAME]] != null)", CodeGenerator.block(
      -                    "__orderedKeys.forAllLongs(k ->", CodeGenerator.block(
      -                        "final int __chunkPos = __chunkPosHolder[0]++;",
      -                        CodeGenerator.optional("maybeCreateI",
      -                            "final int i = __context.__iChunk.get(__chunkPos);"),
      -                        CodeGenerator.optional("maybeCreateII",
      -                            "final long ii = __context.__iiChunk.get(__chunkPos);"),
      -                        "final Object __lazyKey = [[C14NUTIL_CLASSNAME]].maybeMakeSmartKey([[APPLY_FORMULA_ARGS]]);",
      -                        "__typedDestination.set(__chunkPos, ([[RESULT_TYPE]])[[LAZY_RESULT_CACHE_NAME]].computeIfAbsent(__lazyKey, __unusedKey -> applyFormulaPerItem([[APPLY_FORMULA_ARGS]])));"),
      -                    ");" // close the lambda
      -                ), CodeGenerator.samelineBlock("else",
      -                    "__orderedKeys.forAllLongs(k ->", CodeGenerator.block(
      -                        "final int __chunkPos = __chunkPosHolder[0]++;",
      -                        CodeGenerator.optional("maybeCreateI",
      -                            "final int i = __context.__iChunk.get(__chunkPos);"),
      -                        CodeGenerator.optional("maybeCreateII",
      -                            "final long ii = __context.__iiChunk.get(__chunkPos);"),
      -                        "__typedDestination.set(__chunkPos, applyFormulaPerItem([[APPLY_FORMULA_ARGS]]));"),
      -                    ");" // close the lambda
      -                ),
      -                "__typedDestination.setSize(__chunkPosHolder[0]);"
      -
      -            ));
      +                "private void fillChunkHelper(final boolean __usePrev, final FormulaFillContext __context,",
      +                CodeGenerator.indent(
      +                        "final WritableChunk __destination,",
      +                        "final OrderedKeys __orderedKeys[[ADDITIONAL_CHUNK_ARGS]])"),
      +                CodeGenerator.block(
      +                        "final [[DEST_CHUNK_TYPE]] __typedDestination = __destination.[[DEST_AS_CHUNK_METHOD]]();",
      +                        CodeGenerator.optional("maybeCreateIOrII",
      +                                "try (final Index prev = __usePrev ? __index.getPrevIndex() : null;",
      +                                CodeGenerator.indent(
      +                                        "final Index inverted = ((prev != null) ? prev : __index).invert(__orderedKeys.asIndex()))"),
      +                                CodeGenerator.block(
      +                                        CodeGenerator.optional("maybeCreateI",
      +                                                "__context.__iChunk.setSize(0);",
      +                                                "inverted.forAllLongs(l -> __context.__iChunk.add(__intSize(l)));"),
      +                                        CodeGenerator.optional("maybeCreateII",
      +                                                "inverted.fillKeyIndicesChunk(__context.__iiChunk);"))),
      +                        CodeGenerator.repeated("getChunks",
      +                                "final [[CHUNK_TYPE]] __chunk__col__[[COL_SOURCE_NAME]] = __sources[[[SOURCE_INDEX]]].[[AS_CHUNK_METHOD]]();"),
      +                        "final int[] __chunkPosHolder = new int[] {0};",
      +                        "if ([[LAZY_RESULT_CACHE_NAME]] != null)", CodeGenerator.block(
      +                                "__orderedKeys.forAllLongs(k ->", CodeGenerator.block(
      +                                        "final int __chunkPos = __chunkPosHolder[0]++;",
      +                                        CodeGenerator.optional("maybeCreateI",
      +                                                "final int i = __context.__iChunk.get(__chunkPos);"),
      +                                        CodeGenerator.optional("maybeCreateII",
      +                                                "final long ii = __context.__iiChunk.get(__chunkPos);"),
      +                                        "final Object __lazyKey = [[C14NUTIL_CLASSNAME]].maybeMakeSmartKey([[APPLY_FORMULA_ARGS]]);",
      +                                        "__typedDestination.set(__chunkPos, ([[RESULT_TYPE]])[[LAZY_RESULT_CACHE_NAME]].computeIfAbsent(__lazyKey, __unusedKey -> applyFormulaPerItem([[APPLY_FORMULA_ARGS]])));"),
      +                                ");" // close the lambda
      +                        ), CodeGenerator.samelineBlock("else",
      +                                "__orderedKeys.forAllLongs(k ->", CodeGenerator.block(
      +                                        "final int __chunkPos = __chunkPosHolder[0]++;",
      +                                        CodeGenerator.optional("maybeCreateI",
      +                                                "final int i = __context.__iChunk.get(__chunkPos);"),
      +                                        CodeGenerator.optional("maybeCreateII",
      +                                                "final long ii = __context.__iiChunk.get(__chunkPos);"),
      +                                        "__typedDestination.set(__chunkPos, applyFormulaPerItem([[APPLY_FORMULA_ARGS]]));"),
      +                                ");" // close the lambda
      +                        ),
      +                        "__typedDestination.setSize(__chunkPosHolder[0]);"
      +
      +                ));
       
               g.replace("DEST_CHUNK_TYPE", ta.writableChunkVariableType);
               g.replace("DEST_AS_CHUNK_METHOD", ta.asWritableChunkMethodName);
               final List chunkArgs = visitFormulaParameters(null,
      -            cs -> {
      -                final String name = "__chunk__col__" + cs.name;
      -                final TypeAnalyzer t2 = TypeAnalyzer.create(cs.columnSource.getType());
      -                return t2.readChunkVariableType + " " + name;
      -            },
      -            null,
      -            null);
      -        final String additionalChunkArgs =
      -            chunkArgs.isEmpty() ? "" : ", " + makeCommaSeparatedList(chunkArgs);
      +                cs -> {
      +                    final String name = "__chunk__col__" + cs.name;
      +                    final TypeAnalyzer t2 = TypeAnalyzer.create(cs.columnSource.getType());
      +                    return t2.readChunkVariableType + " " + name;
      +                },
      +                null,
      +                null);
      +        final String additionalChunkArgs = chunkArgs.isEmpty() ? "" : ", " + makeCommaSeparatedList(chunkArgs);
               g.replace("ADDITIONAL_CHUNK_ARGS", additionalChunkArgs);
               if (usesI || usesII) {
                   g.activateOptional("maybeCreateIOrII");
      @@ -614,13 +593,12 @@ private CodeGenerator generateFillChunkHelper(TypeAnalyzer ta) {
                   g.activateAllOptionals("maybeCreateII");
               }
               final List applyFormulaArgs = visitFormulaParameters(ix -> ix.name,
      -            p -> String.format("__chunk__col__%s.get(%s)", p.name, "__chunkPos"),
      -            null,
      -            null);
      +                p -> String.format("__chunk__col__%s.get(%s)", p.name, "__chunkPos"),
      +                null,
      +                null);
               g.replace("APPLY_FORMULA_ARGS", makeCommaSeparatedList(applyFormulaArgs));
       
      -        g.replace("RESULT_TYPE",
      -            ta.dbPrimitiveType != null ? ta.dbPrimitiveType.getName() : ta.typeString);
      +        g.replace("RESULT_TYPE", ta.dbPrimitiveType != null ? ta.dbPrimitiveType.getName() : ta.typeString);
               g.replace("LAZY_RESULT_CACHE_NAME", LAZY_RESULT_CACHE_NAME);
               g.replace("C14NUTIL_CLASSNAME", C14NUTIL_CLASSNAME);
       
      @@ -629,16 +607,16 @@ private CodeGenerator generateFillChunkHelper(TypeAnalyzer ta) {
       
           private CodeGenerator generateIntSize() {
               final CodeGenerator g = CodeGenerator.create(
      -            "private int __intSize(final long l)", CodeGenerator.block(
      -                "return LongSizedDataStructure.intSize(\"FormulaColumn ii usage\", l);"));
      +                "private int __intSize(final long l)", CodeGenerator.block(
      +                        "return LongSizedDataStructure.intSize(\"FormulaColumn ii usage\", l);"));
               return g.freeze();
           }
       
           private  List visitFormulaParameters(
      -        Function indexLambda,
      -        Function columnSourceLambda,
      -        Function columnArrayLambda,
      -        Function paramLambda) {
      +            Function indexLambda,
      +            Function columnSourceLambda,
      +            Function columnArrayLambda,
      +            Function paramLambda) {
               final List results = new ArrayList<>();
               if (indexLambda != null) {
                   if (usesI) {
      @@ -663,10 +641,8 @@ private  List visitFormulaParameters(
                       final String columnSourceGetType = columnSourceGetMethodReturnType(cs);
                       final Class csType = cs.getType();
                       final String csTypeString = COLUMN_SOURCE_CLASSNAME + '<'
      -                    + io.deephaven.util.type.TypeUtils.getBoxedType(cs.getType()).getCanonicalName()
      -                    + '>';
      -                final ColumnSourceParameter csp =
      -                    new ColumnSourceParameter(usedColumn, csType, columnSourceGetType,
      +                        + io.deephaven.util.type.TypeUtils.getBoxedType(cs.getType()).getCanonicalName() + '>';
      +                final ColumnSourceParameter csp = new ColumnSourceParameter(usedColumn, csType, columnSourceGetType,
                               cs, csTypeString);
                       addIfNotNull(results, columnSourceLambda.apply(csp));
                   }
      @@ -678,10 +654,9 @@ private  List visitFormulaParameters(
                       final Class dataType = cs.getType();
                       final Class dbArrayType = getDbArrayType(dataType);
                       final String dbArrayTypeAsString = dbArrayType.getCanonicalName() +
      -                    (TypeUtils.isConvertibleToPrimitive(dataType) ? ""
      -                        : "<" + dataType.getCanonicalName() + ">");
      +                        (TypeUtils.isConvertibleToPrimitive(dataType) ? "" : "<" + dataType.getCanonicalName() + ">");
                       final ColumnArrayParameter cap = new ColumnArrayParameter(uca + COLUMN_SUFFIX, uca,
      -                    dataType, dbArrayType, dbArrayTypeAsString, cs);
      +                        dataType, dbArrayType, dbArrayTypeAsString, cs);
                       addIfNotNull(results, columnArrayLambda.apply(cap));
                   }
               }
      @@ -690,7 +665,7 @@ private  List visitFormulaParameters(
                   for (int ii = 0; ii < params.length; ++ii) {
                       final Param p = params[ii];
                       final ParamParameter pp = new ParamParameter(ii, p.getName(), p.getDeclaredType(),
      -                    p.getDeclaredTypeName());
      +                        p.getDeclaredTypeName());
                       addIfNotNull(results, paramLambda.apply(pp));
                   }
               }
      @@ -718,9 +693,8 @@ private JavaKernelBuilder.Result invokeKernelBuilder() {
               for (final String p : sd.params) {
                   paramDict.put(p, allParamDict.get(p));
               }
      -        return JavaKernelBuilder.create(af.cookedFormulaString, sd.returnType,
      -            af.timeInstanceVariables, columnDict,
      -            arrayDict, paramDict);
      +        return JavaKernelBuilder.create(af.cookedFormulaString, sd.returnType, af.timeInstanceVariables, columnDict,
      +                arrayDict, paramDict);
           }
       
           /**
      @@ -747,13 +721,11 @@ protected FormulaFactory createFormulaFactory() {
               }
           }
       
      -    private Class compileFormula(final String what, final String classBody,
      -        final String className) {
      +    private Class compileFormula(final String what, final String classBody, final String className) {
               // System.out.printf("compileFormula: what is %s. Code is...%n%s%n", what, classBody);
               try (final QueryPerformanceNugget nugget =
      -            QueryPerformanceRecorder.getInstance().getNugget("Compile:" + what)) {
      -            // Compilation needs to take place with elevated privileges, but the created object
      -            // should not have them.
      +                QueryPerformanceRecorder.getInstance().getNugget("Compile:" + what)) {
      +            // Compilation needs to take place with elevated privileges, but the created object should not have them.
       
                   final List> paramClasses = new ArrayList<>();
                   final Consumer> addParamClass = (cls) -> {
      @@ -762,27 +734,26 @@ private Class compileFormula(final String what, final String classBody,
                       }
                   };
                   visitFormulaParameters(null,
      -                csp -> {
      -                    addParamClass.accept(csp.type);
      -                    addParamClass.accept(csp.columnSource.getComponentType());
      -                    return null;
      -                },
      -                cap -> {
      -                    addParamClass.accept(cap.dataType);
      -                    addParamClass.accept(cap.columnSource.getComponentType());
      -                    return null;
      -                },
      -                p -> {
      -                    addParamClass.accept(p.type);
      -                    return null;
      -                });
      -            return AccessController.doPrivileged(
      -                (PrivilegedExceptionAction) () -> CompilerTools.compile(className, classBody,
      -                    CompilerTools.FORMULA_PREFIX,
      -                    Param.expandParameterClasses(paramClasses)));
      +                    csp -> {
      +                        addParamClass.accept(csp.type);
      +                        addParamClass.accept(csp.columnSource.getComponentType());
      +                        return null;
      +                    },
      +                    cap -> {
      +                        addParamClass.accept(cap.dataType);
      +                        addParamClass.accept(cap.columnSource.getComponentType());
      +                        return null;
      +                    },
      +                    p -> {
      +                        addParamClass.accept(p.type);
      +                        return null;
      +                    });
      +            return AccessController
      +                    .doPrivileged((PrivilegedExceptionAction) () -> CompilerTools.compile(className, classBody,
      +                            CompilerTools.FORMULA_PREFIX,
      +                            Param.expandParameterClasses(paramClasses)));
               } catch (PrivilegedActionException pae) {
      -            throw new FormulaCompilationException("Formula compilation error for: " + what,
      -                pae.getException());
      +            throw new FormulaCompilationException("Formula compilation error for: " + what, pae.getException());
               }
           }
       
      @@ -805,9 +776,8 @@ private static class ColumnSourceParameter {
               final ColumnSource columnSource;
               final String columnSourceGetTypeString;
       
      -        public ColumnSourceParameter(String name, Class type, String typeString,
      -            ColumnSource columnSource,
      -            String columnSourceGetTypeString) {
      +        public ColumnSourceParameter(String name, Class type, String typeString, ColumnSource columnSource,
      +                String columnSourceGetTypeString) {
                   this.name = name;
                   this.type = type;
                   this.typeString = typeString;
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/DisjunctiveFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/DisjunctiveFilter.java
      index 41782bf7480..b6bf6da78ec 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/DisjunctiveFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/DisjunctiveFilter.java
      @@ -24,15 +24,13 @@ public static SelectFilter makeDisjunctiveFilter(SelectFilter... componentFilter
               final List rawComponents = new ArrayList<>();
               for (int ii = 0; ii < componentFilters.length; ++ii) {
                   if (componentFilters[ii] instanceof DisjunctiveFilter) {
      -                rawComponents.addAll(Arrays
      -                    .asList(((DisjunctiveFilter) componentFilters[ii]).getComponentFilters()));
      +                rawComponents.addAll(Arrays.asList(((DisjunctiveFilter) componentFilters[ii]).getComponentFilters()));
                   } else {
                       rawComponents.add(componentFilters[ii]);
                   }
               }
       
      -        return new DisjunctiveFilter(
      -            rawComponents.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY));
      +        return new DisjunctiveFilter(rawComponents.toArray(SelectFilter.ZERO_LENGTH_SELECT_FILTER_ARRAY));
           }
       
           @Override
      @@ -67,15 +65,14 @@ public Index filter(Index selection, Index fullSet, Table table, boolean usePrev
               }
       
               final Index result = matched == null ? selection.clone() : matched.clone();
      -        Assert.eq(result.size(), "result.size()", result.getPrevIndex().size(),
      -            "result.getPrevIndex.size()");
      +        Assert.eq(result.size(), "result.size()", result.getPrevIndex().size(), "result.getPrevIndex.size()");
               return result;
           }
       
           @Override
           public DisjunctiveFilter copy() {
      -        return new DisjunctiveFilter(Arrays.stream(getComponentFilters()).map(SelectFilter::copy)
      -            .toArray(SelectFilter[]::new));
      +        return new DisjunctiveFilter(
      +                Arrays.stream(getComponentFilters()).map(SelectFilter::copy).toArray(SelectFilter[]::new));
           }
       
           @Override
      diff --git a/DB/src/main/java/io/deephaven/db/v2/select/DownsampledWhereFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/DownsampledWhereFilter.java
      index 75e0649455c..d7a2cc39133 100644
      --- a/DB/src/main/java/io/deephaven/db/v2/select/DownsampledWhereFilter.java
      +++ b/DB/src/main/java/io/deephaven/db/v2/select/DownsampledWhereFilter.java
      @@ -16,8 +16,8 @@
       import java.util.List;
       
       /**
      - * Utilities for downsampling non-ticking time series data within a query. The input table must be
      - * sorted by the {@link DBDateTime} column to be used for binning rows.
      + * Utilities for downsampling non-ticking time series data within a query. The input table must be sorted by the
      + * {@link DBDateTime} column to be used for binning rows.
        * 

      *

      *

      @@ -45,12 +45,11 @@ public enum SampleOrder { } /** - * Creates a {@link DownsampledWhereFilter} which can be used in a .where clause to downsample - * time series rows. + * Creates a {@link DownsampledWhereFilter} which can be used in a .where clause to downsample time series rows. * * @param column {@link DBDateTime} column to use for filtering. - * @param binSize Size in nanoseconds for the time bins. Constants like - * {@link DBTimeUtils#MINUTE} are typically used. + * @param binSize Size in nanoseconds for the time bins. Constants like {@link DBTimeUtils#MINUTE} are typically + * used. * @param order {@link SampleOrder} to set desired behavior. */ public DownsampledWhereFilter(String column, long binSize, SampleOrder order) { @@ -60,12 +59,11 @@ public DownsampledWhereFilter(String column, long binSize, SampleOrder order) { } /** - * Creates a {@link DownsampledWhereFilter} which can be used in a .where clause to downsample - * time series rows. + * Creates a {@link DownsampledWhereFilter} which can be used in a .where clause to downsample time series rows. * * @param column {@link DBDateTime} column to use for filtering. - * @param binSize Size in nanoseconds for the time bins. Constants like - * {@link DBTimeUtils#MINUTE} are typically used. + * @param binSize Size in nanoseconds for the time bins. Constants like {@link DBTimeUtils#MINUTE} are typically + * used. */ public DownsampledWhereFilter(String column, long binSize) { this.column = column; @@ -89,8 +87,7 @@ public void init(TableDefinition tableDefinition) {} @Override public Index filter(Index selection, Index fullSet, Table table, boolean usePrev) { if (DynamicNode.isDynamicAndIsRefreshing(table)) { - throw new UnsupportedOperationException( - "Can not do a DownsampledWhereFilter on a refreshing table!"); + throw new UnsupportedOperationException("Can not do a DownsampledWhereFilter on a refreshing table!"); } // NB: because our source is not refreshing, we don't care about the previous values @@ -112,8 +109,7 @@ public Index filter(Index selection, Index fullSet, Table table, boolean usePrev hasNext = it.hasNext(); DBDateTime timestamp = timestampColumn.get(next); - DBDateTime bin = - (order == SampleOrder.UPPERLAST) ? DBTimeUtils.upperBin(timestamp, binSize) + DBDateTime bin = (order == SampleOrder.UPPERLAST) ? DBTimeUtils.upperBin(timestamp, binSize) : DBTimeUtils.lowerBin(timestamp, binSize); if (!hasNext) { if (order == SampleOrder.UPPERLAST) { diff --git a/DB/src/main/java/io/deephaven/db/v2/select/DynamicWhereFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/DynamicWhereFilter.java index 405cb649c3f..64f6adda46b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/DynamicWhereFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/DynamicWhereFilter.java @@ -34,8 +34,7 @@ * * Each time the set table ticks, the entire where filter is recalculated. */ -public class DynamicWhereFilter extends SelectFilterLivenessArtifactImpl - implements NotificationQueue.Dependency { +public class DynamicWhereFilter extends SelectFilterLivenessArtifactImpl implements NotificationQueue.Dependency { private static final int CHUNK_SIZE = 1 << 16; private final MatchPair[] matchPairs; @@ -60,13 +59,12 @@ public class DynamicWhereFilter extends SelectFilterLivenessArtifactImpl private RecomputeListener listener; private QueryTable resultTable; - public DynamicWhereFilter(final Table setTable, final boolean inclusion, - final MatchPair... setColumnsNames) { + public DynamicWhereFilter(final Table setTable, final boolean inclusion, final MatchPair... setColumnsNames) { this(Table.GroupStrategy.DEFAULT, setTable, inclusion, setColumnsNames); } - public DynamicWhereFilter(final Table.GroupStrategy groupStrategy, final Table setTable, - final boolean inclusion, final MatchPair... setColumnsNames) { + public DynamicWhereFilter(final Table.GroupStrategy groupStrategy, final Table setTable, final boolean inclusion, + final MatchPair... setColumnsNames) { if (setTable.isLive()) { LiveTableMonitor.DEFAULT.checkInitiateTableOperation(); } @@ -76,25 +74,22 @@ public DynamicWhereFilter(final Table.GroupStrategy groupStrategy, final Table s this.inclusion = inclusion; this.setTable = setTable; - final ColumnSource[] setColumns = Arrays.stream(matchPairs) - .map(mp -> setTable.getColumnSource(mp.right())).toArray(ColumnSource[]::new); + final ColumnSource[] setColumns = + Arrays.stream(matchPairs).map(mp -> setTable.getColumnSource(mp.right())).toArray(ColumnSource[]::new); setTupleSource = TupleSourceFactory.makeTupleSource(setColumns); setTable.getIndex().forAllLongs((final long v) -> addKey(makeKey(v))); if (DynamicNode.isDynamicAndIsRefreshing(setTable)) { - final String[] columnNames = - Arrays.stream(matchPairs).map(MatchPair::right).toArray(String[]::new); - final ModifiedColumnSet modTokenSet = - ((DynamicTable) setTable).newModifiedColumnSet(columnNames); + final String[] columnNames = Arrays.stream(matchPairs).map(MatchPair::right).toArray(String[]::new); + final ModifiedColumnSet modTokenSet = ((DynamicTable) setTable).newModifiedColumnSet(columnNames); setUpdateListener = new InstrumentedShiftAwareListenerAdapter( - "DynamicWhereFilter(" + Arrays.toString(setColumnsNames) + ")", - (DynamicTable) setTable, false) { + "DynamicWhereFilter(" + Arrays.toString(setColumnsNames) + ")", (DynamicTable) setTable, false) { @Override public void onUpdate(final Update upstream) { if (upstream.added.empty() && upstream.removed.empty() - && !upstream.modifiedColumnSet.containsAny(modTokenSet)) { + && !upstream.modifiedColumnSet.containsAny(modTokenSet)) { return; } @@ -113,8 +108,7 @@ public void onUpdate(final Update upstream) { } }); - // Pretend every row of the original table was modified, this is essential so - // that the where clause + // Pretend every row of the original table was modified, this is essential so that the where clause // can be re-evaluated based on the updated live set. if (listener != null) { if (upstream.added.nonempty() || trueModification.booleanValue()) { @@ -135,8 +129,7 @@ public void onUpdate(final Update upstream) { } @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { if (listener != null) { resultTable.notifyListenersOnError(originalException, sourceEntry); } @@ -195,8 +188,8 @@ public Index filter(Index selection, Index fullSet, Table table, boolean usePrev throw new PreviousFilteringNotSupported(); } - final ColumnSource[] keyColumns = Arrays.stream(matchPairs) - .map(mp -> table.getColumnSource(mp.left())).toArray(ColumnSource[]::new); + final ColumnSource[] keyColumns = + Arrays.stream(matchPairs).map(mp -> table.getColumnSource(mp.left())).toArray(ColumnSource[]::new); final TupleSource tupleSource = TupleSourceFactory.makeTupleSource(keyColumns); switch (groupStrategy) { @@ -204,12 +197,11 @@ public Index filter(Index selection, Index fullSet, Table table, boolean usePrev if (matchPairs.length == 1) { // this is just a single column filter so it will actually be exactly right if (!liveValuesArrayValid) { - liveValuesArray = - liveValues.toArray(CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); + liveValuesArray = liveValues.toArray(CollectionUtil.ZERO_LENGTH_OBJECT_ARRAY); liveValuesArrayValid = true; } - return table.getColumnSource(matchPairs[0].left()).match(!inclusion, false, - false, selection, liveValuesArray); + return table.getColumnSource(matchPairs[0].left()).match(!inclusion, false, false, selection, + liveValuesArray); } // pick something sensible @@ -220,18 +212,16 @@ public Index filter(Index selection, Index fullSet, Table table, boolean usePrev return filterLinear(selection, keyColumns, tupleSource); } } - final boolean allGrouping = - Arrays.stream(keyColumns).allMatch(selection::hasGrouping); + final boolean allGrouping = Arrays.stream(keyColumns).allMatch(selection::hasGrouping); if (allGrouping) { return filterGrouping(selection, tupleSource); } - final ColumnSource[] sourcesWithGroupings = Arrays.stream(keyColumns) - .filter(selection::hasGrouping).toArray(ColumnSource[]::new); - final OptionalInt minGroupCount = Arrays.stream(sourcesWithGroupings) - .mapToInt(x -> selection.getGrouping(x).size()).min(); - if (minGroupCount.isPresent() - && (minGroupCount.getAsInt() * 4) < selection.size()) { + final ColumnSource[] sourcesWithGroupings = + Arrays.stream(keyColumns).filter(selection::hasGrouping).toArray(ColumnSource[]::new); + final OptionalInt minGroupCount = + Arrays.stream(sourcesWithGroupings).mapToInt(x -> selection.getGrouping(x).size()).min(); + if (minGroupCount.isPresent() && (minGroupCount.getAsInt() * 4) < selection.size()) { return filterGrouping(selection, tupleSource); } return filterLinear(selection, keyColumns, tupleSource); @@ -256,14 +246,13 @@ private Index filterGrouping(Index selection, TupleSource tupleSource) { } private Index filterGrouping(Index selection, Table table) { - final ColumnSource[] keyColumns = Arrays.stream(matchPairs) - .map(mp -> table.getColumnSource(mp.left())).toArray(ColumnSource[]::new); + final ColumnSource[] keyColumns = + Arrays.stream(matchPairs).map(mp -> table.getColumnSource(mp.left())).toArray(ColumnSource[]::new); final TupleSource tupleSource = TupleSourceFactory.makeTupleSource(keyColumns); return filterGrouping(selection, tupleSource); } - private Index filterLinear(Index selection, ColumnSource[] keyColumns, - TupleSource tupleSource) { + private Index filterLinear(Index selection, ColumnSource[] keyColumns, TupleSource tupleSource) { if (keyColumns.length == 1) { return filterLinearOne(selection, keyColumns[0]); } else { @@ -277,19 +266,17 @@ private Index filterLinearOne(Index selection, ColumnSource keyColumn) { } if (!kernelValid) { - setInclusionKernel = - SetInclusionKernel.makeKernel(keyColumn.getChunkType(), liveValues, inclusion); + setInclusionKernel = SetInclusionKernel.makeKernel(keyColumn.getChunkType(), liveValues, inclusion); kernelValid = true; } final Index.SequentialBuilder indexBuilder = Index.FACTORY.getSequentialBuilder(); try (final ColumnSource.GetContext getContext = keyColumn.makeGetContext(CHUNK_SIZE); - final OrderedKeys.Iterator okIt = selection.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okIt = selection.getOrderedKeysIterator()) { final WritableLongChunk keyIndices = - WritableLongChunk.makeWritableChunk(CHUNK_SIZE); - final WritableBooleanChunk matches = - WritableBooleanChunk.makeWritableChunk(CHUNK_SIZE); + WritableLongChunk.makeWritableChunk(CHUNK_SIZE); + final WritableBooleanChunk matches = WritableBooleanChunk.makeWritableChunk(CHUNK_SIZE); while (okIt.hasMore()) { final OrderedKeys chunkOk = okIt.getNextOrderedKeysWithLength(CHUNK_SIZE); @@ -328,10 +315,7 @@ private Index filterLinearTuple(Index selection, TupleSource tupleSource) { @Override public boolean isSimpleFilter() { - /* - * This doesn't execute any user code, so it should be safe to execute it against untrusted - * data. - */ + /* This doesn't execute any user code, so it should be safe to execute it against untrusted data. */ return true; } @@ -361,8 +345,8 @@ public boolean satisfied(final long step) { @Override public LogOutput append(LogOutput logOutput) { - return logOutput.append("DynamicWhereFilter(") - .append(MatchPair.MATCH_PAIR_ARRAY_FORMATTER, matchPairs).append(")"); + return logOutput.append("DynamicWhereFilter(").append(MatchPair.MATCH_PAIR_ARRAY_FORMATTER, matchPairs) + .append(")"); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/Formula.java b/DB/src/main/java/io/deephaven/db/v2/select/Formula.java index 0a5b0bb448b..872cc193c84 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/Formula.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/Formula.java @@ -134,8 +134,7 @@ private static class FormulaGetContext implements GetContext { final WritableChunk sourceChunk; final FillContext fillContext; - FormulaGetContext(final ChunkType chunkType, final FillContext fillContext, - final int chunkCapacity) { + FormulaGetContext(final ChunkType chunkType, final FillContext fillContext, final int chunkCapacity) { this.sourceChunk = chunkType.makeWritableChunk(chunkCapacity); this.fillContext = fillContext; } @@ -153,8 +152,7 @@ public GetContext makeGetContext(final int chunkCapacity) { public abstract FillContext makeFillContext(final int chunkCapacity); - public Chunk getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + public Chunk getChunk(@NotNull final GetContext context, @NotNull final OrderedKeys orderedKeys) { final FormulaGetContext formulaGetContext = (FormulaGetContext) context; final WritableChunk sourceChunk = formulaGetContext.sourceChunk; fillChunk(formulaGetContext.fillContext, sourceChunk, orderedKeys); @@ -163,7 +161,7 @@ public Chunk getChunk(@NotNull final GetContext context, public Chunk getPrevChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { final FormulaGetContext formulaGetContext = (FormulaGetContext) context; final WritableChunk sourceChunk = formulaGetContext.sourceChunk; fillPrevChunk(formulaGetContext.fillContext, sourceChunk, orderedKeys); @@ -171,12 +169,12 @@ public Chunk getPrevChunk(@NotNull final GetContext context, } public abstract void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys); + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys); public abstract void fillPrevChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys); + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys); protected abstract ChunkType getChunkType(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/FormulaColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/FormulaColumn.java index d7ed278dff6..fa46d213abb 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/FormulaColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/FormulaColumn.java @@ -6,16 +6,14 @@ public interface FormulaColumn extends SelectColumn { static FormulaColumn createFormulaColumn(String columnName, String formulaString, - FormulaParserConfiguration parser) { + FormulaParserConfiguration parser) { switch (parser) { case Deephaven: return new DhFormulaColumn(columnName, formulaString); case Numba: - throw new UnsupportedOperationException( - "Python formula columns must be created from python"); + throw new UnsupportedOperationException("Python formula columns must be created from python"); default: - throw new UnsupportedOperationException( - "Parser support not implemented for " + parser); + throw new UnsupportedOperationException("Parser support not implemented for " + parser); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/FormulaGenerator.java b/DB/src/main/java/io/deephaven/db/v2/select/FormulaGenerator.java index 2ade85f5670..c5ce453fe2f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/FormulaGenerator.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/FormulaGenerator.java @@ -9,12 +9,12 @@ public interface FormulaGenerator { List initDef(Map columnDefinitionMap, - Map columnsOverride); + Map columnsOverride); Formula getFormula(Index index, Class returnType, boolean initLazyMap, - Map columnsToData, - Map fallThroughColumns, boolean fallThroughContiguous, - Map columnsOverride, Index overrideIndex); + Map columnsToData, + Map fallThroughColumns, boolean fallThroughContiguous, + Map columnsOverride, Index overrideIndex); Class getReturnedType(); diff --git a/DB/src/main/java/io/deephaven/db/v2/select/FormulaKernelTypedBase.java b/DB/src/main/java/io/deephaven/db/v2/select/FormulaKernelTypedBase.java index 6215b8d047b..7a6336cd376 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/FormulaKernelTypedBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/FormulaKernelTypedBase.java @@ -27,63 +27,62 @@ import io.deephaven.db.v2.sources.chunk.WritableShortChunk; /** - * Extends {@link FormulaKernel} for specifically typed destination {@link WritableChunk - * WritableChunks}. + * Extends {@link FormulaKernel} for specifically typed destination {@link WritableChunk WritableChunks}. */ public abstract class FormulaKernelTypedBase implements FormulaKernel { @Override public final void applyFormulaChunk( - FillContext __context, - WritableChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableChunk __destination, + Chunk[] __sources) { __destination.walk(new ToTypedMethod<>(__context, __sources)); } public abstract void applyFormulaChunk( - Formula.FillContext __context, - WritableByteChunk __destination, - Chunk[] __sources); + Formula.FillContext __context, + WritableByteChunk __destination, + Chunk[] __sources); public abstract void applyFormulaChunk( - Formula.FillContext __context, - WritableBooleanChunk __destination, - Chunk[] __sources); + Formula.FillContext __context, + WritableBooleanChunk __destination, + Chunk[] __sources); public abstract void applyFormulaChunk( - Formula.FillContext __context, - WritableCharChunk __destination, - Chunk[] __sources); + Formula.FillContext __context, + WritableCharChunk __destination, + Chunk[] __sources); public abstract void applyFormulaChunk( - Formula.FillContext __context, - WritableShortChunk __destination, - Chunk[] __sources); + Formula.FillContext __context, + WritableShortChunk __destination, + Chunk[] __sources); public abstract void applyFormulaChunk( - Formula.FillContext __context, - WritableIntChunk __destination, - Chunk[] __sources); + Formula.FillContext __context, + WritableIntChunk __destination, + Chunk[] __sources); public abstract void applyFormulaChunk( - Formula.FillContext __context, - WritableLongChunk __destination, - Chunk[] __sources); + Formula.FillContext __context, + WritableLongChunk __destination, + Chunk[] __sources); public abstract void applyFormulaChunk( - Formula.FillContext __context, - WritableFloatChunk __destination, - Chunk[] __sources); + Formula.FillContext __context, + WritableFloatChunk __destination, + Chunk[] __sources); public abstract void applyFormulaChunk( - Formula.FillContext __context, - WritableDoubleChunk __destination, - Chunk[] __sources); + Formula.FillContext __context, + WritableDoubleChunk __destination, + Chunk[] __sources); public abstract void applyFormulaChunk( - Formula.FillContext __context, - WritableObjectChunk __destination, - Chunk[] __sources); + Formula.FillContext __context, + WritableObjectChunk __destination, + Chunk[] __sources); private class ToTypedMethod implements Visitor { private final FillContext __context; @@ -98,81 +97,81 @@ private class ToTypedMethod implements Visitor { public void visit(ByteChunk chunk) { // noinspection unchecked,rawtypes applyFormulaChunk( - __context, - ((WritableChunk) chunk).asWritableByteChunk(), - __sources); + __context, + ((WritableChunk) chunk).asWritableByteChunk(), + __sources); } @Override public void visit(BooleanChunk chunk) { // noinspection unchecked,rawtypes applyFormulaChunk( - __context, - ((WritableChunk) chunk).asWritableBooleanChunk(), - __sources); + __context, + ((WritableChunk) chunk).asWritableBooleanChunk(), + __sources); } @Override public void visit(CharChunk chunk) { // noinspection unchecked,rawtypes applyFormulaChunk( - __context, - ((WritableChunk) chunk).asWritableCharChunk(), - __sources); + __context, + ((WritableChunk) chunk).asWritableCharChunk(), + __sources); } @Override public void visit(ShortChunk chunk) { // noinspection unchecked,rawtypes applyFormulaChunk( - __context, - ((WritableChunk) chunk).asWritableShortChunk(), - __sources); + __context, + ((WritableChunk) chunk).asWritableShortChunk(), + __sources); } @Override public void visit(IntChunk chunk) { // noinspection unchecked,rawtypes applyFormulaChunk( - __context, - ((WritableChunk) chunk).asWritableIntChunk(), - __sources); + __context, + ((WritableChunk) chunk).asWritableIntChunk(), + __sources); } @Override public void visit(LongChunk chunk) { // noinspection unchecked,rawtypes applyFormulaChunk( - __context, - ((WritableChunk) chunk).asWritableLongChunk(), - __sources); + __context, + ((WritableChunk) chunk).asWritableLongChunk(), + __sources); } @Override public void visit(FloatChunk chunk) { // noinspection unchecked,rawtypes applyFormulaChunk( - __context, - ((WritableChunk) chunk).asWritableFloatChunk(), - __sources); + __context, + ((WritableChunk) chunk).asWritableFloatChunk(), + __sources); } @Override public void visit(DoubleChunk chunk) { // noinspection unchecked,rawtypes applyFormulaChunk( - __context, - ((WritableChunk) chunk).asWritableDoubleChunk(), - __sources); + __context, + ((WritableChunk) chunk).asWritableDoubleChunk(), + __sources); } @Override public void visit(ObjectChunk chunk) { // noinspection unchecked,rawtypes applyFormulaChunk( - __context, - ((WritableChunk) chunk).asWritableObjectChunk(), - __sources); + __context, + ((WritableChunk) chunk).asWritableObjectChunk(), + __sources); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/FormulaParserConfiguration.java b/DB/src/main/java/io/deephaven/db/v2/select/FormulaParserConfiguration.java index a2c33004c0b..9b5894666ff 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/FormulaParserConfiguration.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/FormulaParserConfiguration.java @@ -8,20 +8,19 @@ public enum FormulaParserConfiguration { public static final String OBFUSCATED_PARSER_ANNOTATION = ""; - public static FormulaParserConfiguration parser = FormulaParserConfiguration.valueOf( - Configuration.getInstance().getStringWithDefault("default.parser", Deephaven.name())); + public static FormulaParserConfiguration parser = FormulaParserConfiguration + .valueOf(Configuration.getInstance().getStringWithDefault("default.parser", Deephaven.name())); public static void setParser(FormulaParserConfiguration parser) { FormulaParserConfiguration.parser = parser; } - public static Pair extractParserAndExpression( - String expression) { + public static Pair extractParserAndExpression(String expression) { if (expression.startsWith(FormulaParserConfiguration.OBFUSCATED_PARSER_ANNOTATION)) { expression = expression.substring(OBFUSCATED_PARSER_ANNOTATION.length()); int endOfTag = expression.indexOf(':'); FormulaParserConfiguration parserConfig = - FormulaParserConfiguration.valueOf(expression.substring(0, endOfTag)); + FormulaParserConfiguration.valueOf(expression.substring(0, endOfTag)); return new Pair<>(parserConfig, expression.substring(endOfTag + 1)); } @@ -29,13 +28,13 @@ public static Pair extractParserAndExpressio } public static String nb(String expression) { - return new StringBuilder().append(OBFUSCATED_PARSER_ANNOTATION).append(Numba.name()) - .append(":").append(expression).toString(); + return new StringBuilder().append(OBFUSCATED_PARSER_ANNOTATION).append(Numba.name()).append(":") + .append(expression).toString(); } public static String dh(String expression) { - return new StringBuilder().append(OBFUSCATED_PARSER_ANNOTATION).append(Deephaven.name()) - .append(":").append(expression).toString(); + return new StringBuilder().append(OBFUSCATED_PARSER_ANNOTATION).append(Deephaven.name()).append(":") + .append(expression).toString(); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/FunctionalColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/FunctionalColumn.java index 15469652c03..36cd5311abe 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/FunctionalColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/FunctionalColumn.java @@ -45,37 +45,36 @@ public class FunctionalColumn implements SelectColumn { private ColumnSource sourceColumnSource; public FunctionalColumn(@NotNull String sourceName, - @NotNull Class sourceDataType, - @NotNull String destName, - @NotNull Class destDataType, - @NotNull Function function) { + @NotNull Class sourceDataType, + @NotNull String destName, + @NotNull Class destDataType, + @NotNull Function function) { this(sourceName, sourceDataType, destName, destDataType, (l, v) -> function.apply(v)); } public FunctionalColumn(@NotNull String sourceName, - @NotNull Class sourceDataType, - @NotNull String destName, - @NotNull Class destDataType, - @NotNull Class componentType, - @NotNull Function function) { - this(sourceName, sourceDataType, destName, destDataType, componentType, - (l, v) -> function.apply(v)); + @NotNull Class sourceDataType, + @NotNull String destName, + @NotNull Class destDataType, + @NotNull Class componentType, + @NotNull Function function) { + this(sourceName, sourceDataType, destName, destDataType, componentType, (l, v) -> function.apply(v)); } public FunctionalColumn(@NotNull String sourceName, - @NotNull Class sourceDataType, - @NotNull String destName, - @NotNull Class destDataType, - @NotNull BiFunction function) { + @NotNull Class sourceDataType, + @NotNull String destName, + @NotNull Class destDataType, + @NotNull BiFunction function) { this(sourceName, sourceDataType, destName, destDataType, Object.class, function); } public FunctionalColumn(@NotNull String sourceName, - @NotNull Class sourceDataType, - @NotNull String destName, - @NotNull Class destDataType, - @NotNull Class componentType, - @NotNull BiFunction function) { + @NotNull Class sourceDataType, + @NotNull String destName, + @NotNull Class destDataType, + @NotNull Class componentType, + @NotNull BiFunction function) { this.sourceName = NameValidator.validateColumnName(sourceName); this.sourceDataType = Require.neqNull(sourceDataType, "sourceDataType"); this.destName = NameValidator.validateColumnName(destName); @@ -96,18 +95,15 @@ public List initInputs(Table table) { } @Override - public List initInputs(Index index, - Map columnsOfInterest) { + public List initInputs(Index index, Map columnsOfInterest) { // noinspection unchecked final ColumnSource localSourceColumnSource = columnsOfInterest.get(sourceName); if (localSourceColumnSource == null) { throw new NoSuchColumnException(columnsOfInterest.keySet(), sourceName); } - if (!(sourceDataType.isAssignableFrom(localSourceColumnSource.getType()) - || sourceDataType.isAssignableFrom(io.deephaven.util.type.TypeUtils - .getBoxedType(localSourceColumnSource.getType())))) { - throw new IllegalArgumentException( - "Source column " + sourceName + " has wrong data type " + if (!(sourceDataType.isAssignableFrom(localSourceColumnSource.getType()) || sourceDataType + .isAssignableFrom(io.deephaven.util.type.TypeUtils.getBoxedType(localSourceColumnSource.getType())))) { + throw new IllegalArgumentException("Source column " + sourceName + " has wrong data type " + localSourceColumnSource.getType() + ", expected " + sourceDataType); } // noinspection unchecked @@ -123,10 +119,8 @@ public List initDef(Map columnDefinitionMap) { throw new NoSuchColumnException(columnDefinitionMap.keySet(), sourceName); } if (!(sourceDataType.isAssignableFrom(sourceColumnDefinition.getDataType()) - || sourceDataType - .isAssignableFrom(TypeUtils.getBoxedType(sourceColumnDefinition.getDataType())))) { - throw new IllegalArgumentException( - "Source column " + sourceName + " has wrong data type " + || sourceDataType.isAssignableFrom(TypeUtils.getBoxedType(sourceColumnDefinition.getDataType())))) { + throw new IllegalArgumentException("Source column " + sourceName + " has wrong data type " + sourceColumnDefinition.getDataType() + ", expected " + sourceDataType); } return getColumns(); @@ -174,16 +168,16 @@ public FillContext makeFillContext(int chunkCapacity) { @Override public void fillChunk(@NotNull FillContext fillContext, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { final FunctionalColumnFillContext ctx = (FunctionalColumnFillContext) fillContext; ctx.chunkFiller.fillByIndices(this, orderedKeys, destination); } @Override public void fillPrevChunk(@NotNull FillContext fillContext, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { final FunctionalColumnFillContext ctx = (FunctionalColumnFillContext) fillContext; ctx.chunkFiller.fillByIndices(this, orderedKeys, destination); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/IncrementalReleaseFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/IncrementalReleaseFilter.java index 13d89a6ee37..1a6279a57a5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/IncrementalReleaseFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/IncrementalReleaseFilter.java @@ -5,15 +5,13 @@ package io.deephaven.db.v2.select; /** - * This will filter a table starting off with the first N rows, and then adding new rows to the - * table on each refresh. + * This will filter a table starting off with the first N rows, and then adding new rows to the table on each refresh. */ public class IncrementalReleaseFilter extends BaseIncrementalReleaseFilter { private final long sizeIncrement; /** - * Create an incremental release filter with an initial size that will release sizeIncrement - * rows each cycle. + * Create an incremental release filter with an initial size that will release sizeIncrement rows each cycle. * * @param initialSize how many rows should be in the table initially * @param sizeIncrement how many rows to release at the beginning of each LTM cycle. diff --git a/DB/src/main/java/io/deephaven/db/v2/select/MatchFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/MatchFilter.java index 191007feb6b..8c195481c10 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/MatchFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/MatchFilter.java @@ -26,8 +26,7 @@ public class MatchFilter extends SelectFilterImpl { @NotNull private final String columnName; - private Object[] values; // TODO: Does values need to be declared volatile (if we go back to the - // double-check)? + private Object[] values; // TODO: Does values need to be declared volatile (if we go back to the double-check)? private final String[] strValues; private final boolean invertMatch; private final boolean caseInsensitive; @@ -57,8 +56,7 @@ public MatchFilter(CaseSensitivity sensitivity, String columnName, String... str this(sensitivity, MatchType.Regular, columnName, strValues); } - public MatchFilter(CaseSensitivity sensitivity, MatchType matchType, String columnName, - String... strValues) { + public MatchFilter(CaseSensitivity sensitivity, MatchType matchType, String columnName, String... strValues) { this.columnName = columnName; this.strValues = strValues; this.caseInsensitive = (sensitivity == CaseSensitivity.IgnoreCase); @@ -67,10 +65,9 @@ public MatchFilter(CaseSensitivity sensitivity, MatchType matchType, String colu public MatchFilter renameFilter(String newName) { io.deephaven.db.v2.select.MatchFilter.MatchType matchType = - invertMatch ? io.deephaven.db.v2.select.MatchFilter.MatchType.Inverted - : io.deephaven.db.v2.select.MatchFilter.MatchType.Regular; - CaseSensitivity sensitivity = - (caseInsensitive) ? CaseSensitivity.IgnoreCase : CaseSensitivity.MatchCase; + invertMatch ? io.deephaven.db.v2.select.MatchFilter.MatchType.Inverted + : io.deephaven.db.v2.select.MatchFilter.MatchType.Regular; + CaseSensitivity sensitivity = (caseInsensitive) ? CaseSensitivity.IgnoreCase : CaseSensitivity.MatchCase; if (strValues == null) { return new MatchFilter(matchType, newName, values); } else { @@ -112,14 +109,13 @@ public void init(TableDefinition tableDefinition) { } ColumnDefinition column = tableDefinition.getColumn(columnName); if (column == null) { - throw new RuntimeException( - "Column \"" + columnName + "\" doesn't exist in this table, available columns: " - + tableDefinition.getColumnNames()); + throw new RuntimeException("Column \"" + columnName + + "\" doesn't exist in this table, available columns: " + tableDefinition.getColumnNames()); } final List valueList = new ArrayList<>(); final QueryScope queryScope = QueryScope.getScope(); final ColumnTypeConvertor convertor = - ColumnTypeConvertorFactory.getConvertor(column.getDataType(), column.getName()); + ColumnTypeConvertorFactory.getConvertor(column.getDataType(), column.getName()); for (int valIdx = 0; valIdx < strValues.length; ++valIdx) { if (queryScope.hasParamName(strValues[valIdx])) { Object paramValue = queryScope.readParamValue(strValues[valIdx]); @@ -128,8 +124,7 @@ public void init(TableDefinition tableDefinition) { for (int ai = 0; ai < accessor.length(); ++ai) { valueList.add(convertor.convertParamValue(accessor.get(ai))); } - } else if (paramValue != null - && Collection.class.isAssignableFrom(paramValue.getClass())) { + } else if (paramValue != null && Collection.class.isAssignableFrom(paramValue.getClass())) { for (final Object paramValueMember : (Collection) paramValue) { valueList.add(convertor.convertParamValue(paramValueMember)); } @@ -141,17 +136,13 @@ public void init(TableDefinition tableDefinition) { try { convertedValue = convertor.convertStringLiteral(strValues[valIdx]); } catch (Throwable t) { - throw new IllegalArgumentException( - "Failed to convert literal value <" + strValues[valIdx] + - "> for column \"" + columnName + "\" of type " - + column.getDataType().getName(), - t); + throw new IllegalArgumentException("Failed to convert literal value <" + strValues[valIdx] + + "> for column \"" + columnName + "\" of type " + column.getDataType().getName(), t); } valueList.add(convertedValue); } } - // values = (Object[])ArrayUtils.toArray(valueList, - // TypeUtils.getBoxedType(theColumn.getDataType())); + // values = (Object[])ArrayUtils.toArray(valueList, TypeUtils.getBoxedType(theColumn.getDataType())); values = valueList.toArray(); initialized = true; } @@ -234,8 +225,7 @@ Object convertStringLiteral(String str) { return new ColumnTypeConvertor() { @Override Object convertStringLiteral(String str) { - // NB: Boolean.parseBoolean(str) doesn't do what we want here - anything not - // true is false. + // NB: Boolean.parseBoolean(str) doesn't do what we want here - anything not true is false. if (str.equalsIgnoreCase("true")) { return Boolean.TRUE; } @@ -243,7 +233,7 @@ Object convertStringLiteral(String str) { return Boolean.FALSE; } throw new IllegalArgumentException("String " + str - + " isn't a valid boolean value (!str.equalsIgnoreCase(\"true\") && !str.equalsIgnoreCase(\"false\"))"); + + " isn't a valid boolean value (!str.equalsIgnoreCase(\"true\") && !str.equalsIgnoreCase(\"false\"))"); } }; } else if (cls == char.class) { @@ -251,12 +241,11 @@ Object convertStringLiteral(String str) { @Override Object convertStringLiteral(String str) { if (str.length() > 1) { - if (str.length() == 3 && str.charAt(0) == '\'' - && str.charAt(2) == '\'') { + if (str.length() == 3 && str.charAt(0) == '\'' && str.charAt(2) == '\'') { return str.charAt(1); } else { throw new IllegalArgumentException( - "String " + str + " has length greater than one for column "); + "String " + str + " has length greater than one for column "); } } return str.charAt(0); @@ -270,11 +259,10 @@ Object convertStringLiteral(String str) { return null; } if ((str.charAt(0) != '"' && str.charAt(0) != '\'' && str.charAt(0) != '`') - || (str.charAt(str.length() - 1) != '"' - && str.charAt(str.length() - 1) != '\'' - && str.charAt(str.length() - 1) != '`')) { + || (str.charAt(str.length() - 1) != '"' && str.charAt(str.length() - 1) != '\'' + && str.charAt(str.length() - 1) != '`')) { throw new IllegalArgumentException( - "String literal not enclosed in quotes (\"" + str + "\")"); + "String literal not enclosed in quotes (\"" + str + "\")"); } return str.substring(1, str.length() - 1); } @@ -301,11 +289,9 @@ Object convertStringLiteral(String str) { return null; } if ((str.charAt(0) != '"' && str.charAt(0) != '\'' && str.charAt(0) != '`') - || (str.charAt(str.length() - 1) != '"' - && str.charAt(str.length() - 1) != '\'' - && str.charAt(str.length() - 1) != '`')) { - throw new IllegalArgumentException( - "String literal not enclosed in quotes"); + || (str.charAt(str.length() - 1) != '"' && str.charAt(str.length() - 1) != '\'' + && str.charAt(str.length() - 1) != '`')) { + throw new IllegalArgumentException("String literal not enclosed in quotes"); } return new CompressedString(str.substring(1, str.length() - 1)); } @@ -313,8 +299,7 @@ Object convertStringLiteral(String str) { @Override Object convertParamValue(Object paramValue) { if (paramValue instanceof String) { - System.out.println("MatchFilter debug: Converting " + paramValue - + " to CompressedString"); + System.out.println("MatchFilter debug: Converting " + paramValue + " to CompressedString"); return new CompressedString((String) paramValue); } if (paramValue instanceof PyObject && ((PyObject) paramValue).isString()) { @@ -332,8 +317,7 @@ Object convertParamValue(Object paramValue) { Object convertStringLiteral(String str) { if (str.charAt(0) != '\'' || str.charAt(str.length() - 1) != '\'') { throw new IllegalArgumentException( - "DBDateTime literal not enclosed in single-quotes (\"" + str - + "\")"); + "DBDateTime literal not enclosed in single-quotes (\"" + str + "\")"); } return DBTimeUtils.convertDateTime(str.substring(1, str.length() - 1)); } @@ -374,7 +358,7 @@ Object convertStringLiteral(String str) { }; } else { throw new IllegalArgumentException( - "Unknown type " + cls.getName() + " for MatchFilter value auto-conversion"); + "Unknown type " + cls.getName() + " for MatchFilter value auto-conversion"); } } } @@ -395,10 +379,10 @@ public boolean equals(Object o) { return false; final MatchFilter that = (MatchFilter) o; return invertMatch == that.invertMatch && - caseInsensitive == that.caseInsensitive && - Objects.equals(columnName, that.columnName) && - Arrays.equals(values, that.values) && - Arrays.equals(strValues, that.strValues); + caseInsensitive == that.caseInsensitive && + Objects.equals(columnName, that.columnName) && + Arrays.equals(values, that.values) && + Arrays.equals(strValues, that.strValues); } @Override @@ -418,9 +402,8 @@ public boolean canMemoize() { @Override public SelectFilter copy() { if (strValues != null) { - return new MatchFilter( - caseInsensitive ? CaseSensitivity.IgnoreCase : CaseSensitivity.MatchCase, - getMatchType(), columnName, strValues); + return new MatchFilter(caseInsensitive ? CaseSensitivity.IgnoreCase : CaseSensitivity.MatchCase, + getMatchType(), columnName, strValues); } else { return new MatchFilter(getMatchType(), columnName, values); diff --git a/DB/src/main/java/io/deephaven/db/v2/select/MultiSourceFunctionalColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/MultiSourceFunctionalColumn.java index de50c7413b4..a4a3fb3a215 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/MultiSourceFunctionalColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/MultiSourceFunctionalColumn.java @@ -46,20 +46,20 @@ public class MultiSourceFunctionalColumn implements SelectColumn { private final Class componentType; public MultiSourceFunctionalColumn(@NotNull List sourceNames, - @NotNull String destName, - @NotNull Class destDataType, - @NotNull BiFunction function) { + @NotNull String destName, + @NotNull Class destDataType, + @NotNull BiFunction function) { this(sourceNames, destName, destDataType, Object.class, function); } public MultiSourceFunctionalColumn(@NotNull List sourceNames, - @NotNull String destName, - @NotNull Class destDataType, - @NotNull Class componentType, - @NotNull BiFunction function) { + @NotNull String destName, + @NotNull Class destDataType, + @NotNull Class componentType, + @NotNull BiFunction function) { this.sourceNames = sourceNames.stream() - .map(NameValidator::validateColumnName) - .collect(Collectors.toList()); + .map(NameValidator::validateColumnName) + .collect(Collectors.toList()); this.destName = NameValidator.validateColumnName(destName); this.destDataType = Require.neqNull(destDataType, "destDataType"); @@ -79,16 +79,14 @@ public List initInputs(Table table) { } @Override - public List initInputs(Index index, - Map columnsOfInterest) { + public List initInputs(Index index, Map columnsOfInterest) { if (sourceColumns == null) { final List> localSources = new ArrayList<>(sourceNames.size()); final List> localPrev = new ArrayList<>(sourceNames.size()); - // the column overrides occur when we are in the midst of an update; but we only - // reinterpret columns with an - // updateView, not as part of a generalized update. Thus if this is happening our - // assumptions have been violated + // the column overrides occur when we are in the midst of an update; but we only reinterpret columns with an + // updateView, not as part of a generalized update. Thus if this is happening our assumptions have been + // violated // and we could provide the wrong answer by not paying attention to the columnsOverride sourceNames.forEach(name -> { final ColumnSource localSourceColumnSource = columnsOfInterest.get(name); @@ -123,8 +121,7 @@ public List initDef(Map columnDefinitionMap) { }); if (missingColumnsHolder.getValue() != null) { - throw new NoSuchColumnException(columnDefinitionMap.keySet(), - missingColumnsHolder.getValue()); + throw new NoSuchColumnException(columnDefinitionMap.keySet(), missingColumnsHolder.getValue()); } return getColumns(); @@ -172,16 +169,16 @@ public FillContext makeFillContext(int chunkCapacity) { @Override public void fillChunk(@NotNull FillContext fillContext, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { final FunctionalColumnFillContext ctx = (FunctionalColumnFillContext) fillContext; ctx.chunkFiller.fillByIndices(this, orderedKeys, destination); } @Override public void fillPrevChunk(@NotNull FillContext fillContext, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { final FunctionalColumnFillContext ctx = (FunctionalColumnFillContext) fillContext; ctx.chunkFiller.fillByIndices(this, orderedKeys, destination); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/NullSelectColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/NullSelectColumn.java index f7851372cf2..4f433a10342 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/NullSelectColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/NullSelectColumn.java @@ -12,8 +12,7 @@ import java.util.Map; /** - * A SelectColumn implementation that can be used to replace columns with - * {@link NullValueColumnSource}s + * A SelectColumn implementation that can be used to replace columns with {@link NullValueColumnSource}s */ public class NullSelectColumn implements SelectColumn { private final String name; @@ -30,8 +29,7 @@ public List initInputs(final Table table) { } @Override - public List initInputs(final Index index, - final Map columnsOfInterest) { + public List initInputs(final Index index, final Map columnsOfInterest) { return Collections.emptyList(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/PatternFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/PatternFilter.java index d772bef9c15..1869d0c3d76 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/PatternFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/PatternFilter.java @@ -42,8 +42,7 @@ public PatternFilter(String columnName, String value) { this(MatchType.Regular, columnName, value); } - public PatternFilter(CaseSensitivity sensitivity, MatchType matchType, - @NotNull String columnName, String value) { + public PatternFilter(CaseSensitivity sensitivity, MatchType matchType, @NotNull String columnName, String value) { this.columnName = columnName; this.value = value; this.caseInsensitive = (sensitivity == CaseSensitivity.IgnoreCase); @@ -84,9 +83,8 @@ public void init(TableDefinition tableDefinition) { final ColumnDefinition column = tableDefinition.getColumn(columnName); if (column == null) { - throw new RuntimeException( - "Column \"" + columnName + "\" doesn't exist in this table, available columns: " - + tableDefinition.getColumnNames()); + throw new RuntimeException("Column \"" + columnName + + "\" doesn't exist in this table, available columns: " + tableDefinition.getColumnNames()); } pattern = compile(value, caseInsensitive ? Pattern.CASE_INSENSITIVE : 0); } @@ -101,34 +99,34 @@ public Index filter(Index selection, Index fullSet, Table table, boolean usePrev if (invertMatch) { return ChunkFilter.applyChunkFilter(selection, columnSource, usePrev, - (ChunkFilter.ObjectChunkFilter) (values, keys, results) -> { - results.setSize(0); - for (int ii = 0; ii < values.size(); ++ii) { - final String columnValue = (String) values.get(ii); - if (columnValue == null) { - continue; + (ChunkFilter.ObjectChunkFilter) (values, keys, results) -> { + results.setSize(0); + for (int ii = 0; ii < values.size(); ++ii) { + final String columnValue = (String) values.get(ii); + if (columnValue == null) { + continue; + } + + if (!match(columnValue)) { + results.add(keys.get(ii)); + } } - - if (!match(columnValue)) { - results.add(keys.get(ii)); - } - } - }); + }); } else { return ChunkFilter.applyChunkFilter(selection, columnSource, usePrev, - (ChunkFilter.ObjectChunkFilter) (values, keys, results) -> { - results.setSize(0); - for (int ii = 0; ii < values.size(); ++ii) { - final String columnValue = (String) values.get(ii); - if (columnValue == null) { - continue; - } - - if (match(columnValue)) { - results.add(keys.get(ii)); + (ChunkFilter.ObjectChunkFilter) (values, keys, results) -> { + results.setSize(0); + for (int ii = 0; ii < values.size(); ++ii) { + final String columnValue = (String) values.get(ii); + if (columnValue == null) { + continue; + } + + if (match(columnValue)) { + results.add(keys.get(ii)); + } } - } - }); + }); } } @@ -151,9 +149,9 @@ public boolean equals(Object o) { return false; final PatternFilter that = (PatternFilter) o; return invertMatch == that.invertMatch && - caseInsensitive == that.caseInsensitive && - Objects.equals(columnName, that.columnName) && - Objects.equals(value, that.value); + caseInsensitive == that.caseInsensitive && + Objects.equals(columnName, that.columnName) && + Objects.equals(value, that.value); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/RangeConditionFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/RangeConditionFilter.java index 57af6b68c3d..5ce20cb40c0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/RangeConditionFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/RangeConditionFilter.java @@ -43,8 +43,8 @@ public class RangeConditionFilter extends SelectFilterImpl { * @param expression the original expression prior to being parsed * @param parserConfiguration */ - public RangeConditionFilter(String columnName, Condition condition, String value, - String expression, FormulaParserConfiguration parserConfiguration) { + public RangeConditionFilter(String columnName, Condition condition, String value, String expression, + FormulaParserConfiguration parserConfiguration) { this(columnName, condition, value, expression, null, parserConfiguration); } @@ -57,17 +57,15 @@ public RangeConditionFilter(String columnName, Condition condition, String value * @param expression the original expression prior to being parsed * @param parserConfiguration */ - public RangeConditionFilter(String columnName, String conditionString, String value, - String expression, FormulaParserConfiguration parserConfiguration) { - this(columnName, conditionFromString(conditionString), value, expression, - parserConfiguration); + public RangeConditionFilter(String columnName, String conditionString, String value, String expression, + FormulaParserConfiguration parserConfiguration) { + this(columnName, conditionFromString(conditionString), value, expression, parserConfiguration); } // Used for copy method - private RangeConditionFilter(String columnName, Condition condition, String value, - String expression, SelectFilter filter, FormulaParserConfiguration parserConfiguration) { - Assert.eqTrue(conditionSupported(condition), - condition + " is not supported by RangeConditionFilter"); + private RangeConditionFilter(String columnName, Condition condition, String value, String expression, + SelectFilter filter, FormulaParserConfiguration parserConfiguration) { + Assert.eqTrue(conditionSupported(condition), condition + " is not supported by RangeConditionFilter"); this.columnName = columnName; this.condition = condition; this.value = value; @@ -99,8 +97,7 @@ private static Condition conditionFromString(String conditionString) { case ">=": return Condition.GREATER_THAN_OR_EQUAL; default: - throw new IllegalArgumentException( - conditionString + " is not supported by RangeConditionFilter"); + throw new IllegalArgumentException(conditionString + " is not supported by RangeConditionFilter"); } } @@ -122,8 +119,7 @@ public void init(TableDefinition tableDefinition) { final ColumnDefinition def = tableDefinition.getColumn(columnName); if (def == null) { - throw new RuntimeException( - "Column \"" + columnName + "\" doesn't exist in this table, available columns: " + throw new RuntimeException("Column \"" + columnName + "\" doesn't exist in this table, available columns: " + tableDefinition.getColumnNames()); } @@ -150,8 +146,8 @@ public void init(TableDefinition tableDefinition) { } else if (BigInteger.class.isAssignableFrom(colClass)) { filter = makeComparableRangeFilter(columnName, condition, new BigInteger(value)); } else if (io.deephaven.util.type.TypeUtils.isString(colClass)) { - final String stringValue = MatchFilter.ColumnTypeConvertorFactory - .getConvertor(String.class, columnName).convertStringLiteral(value).toString(); + final String stringValue = MatchFilter.ColumnTypeConvertorFactory.getConvertor(String.class, columnName) + .convertStringLiteral(value).toString(); filter = makeComparableRangeFilter(columnName, condition, stringValue); } else if (TypeUtils.isBoxedBoolean(colClass) || colClass == boolean.class) { filter = makeComparableRangeFilter(columnName, condition, Boolean.valueOf(value)); @@ -162,7 +158,7 @@ public void init(TableDefinition tableDefinition) { filter = ConditionFilter.createConditionFilter(expression, parserConfiguration); } else { throw new IllegalArgumentException("RangeConditionFilter does not support type " - + colClass.getSimpleName() + " for column " + columnName); + + colClass.getSimpleName() + " for column " + columnName); } } @@ -195,24 +191,18 @@ public static long parseLongFilter(String value) { return Long.parseLong(value); } - private static LongRangeFilter makeDateTimeRangeFilter(String columnName, Condition condition, - String value) { + private static LongRangeFilter makeDateTimeRangeFilter(String columnName, Condition condition, String value) { switch (condition) { case LESS_THAN: - return new DateTimeRangeFilter(columnName, parseDateTimeNanos(value), - Long.MIN_VALUE, true, false); + return new DateTimeRangeFilter(columnName, parseDateTimeNanos(value), Long.MIN_VALUE, true, false); case LESS_THAN_OR_EQUAL: - return new DateTimeRangeFilter(columnName, parseDateTimeNanos(value), - Long.MIN_VALUE, true, true); + return new DateTimeRangeFilter(columnName, parseDateTimeNanos(value), Long.MIN_VALUE, true, true); case GREATER_THAN: - return new DateTimeRangeFilter(columnName, parseDateTimeNanos(value), - Long.MAX_VALUE, false, true); + return new DateTimeRangeFilter(columnName, parseDateTimeNanos(value), Long.MAX_VALUE, false, true); case GREATER_THAN_OR_EQUAL: - return new DateTimeRangeFilter(columnName, parseDateTimeNanos(value), - Long.MAX_VALUE, true, true); + return new DateTimeRangeFilter(columnName, parseDateTimeNanos(value), Long.MAX_VALUE, true, true); default: - throw new IllegalArgumentException( - "RangeConditionFilter does not support condition " + condition); + throw new IllegalArgumentException("RangeConditionFilter does not support condition " + condition); } } @@ -223,8 +213,8 @@ private static long parseDateTimeNanos(String value) { return Long.parseLong(value); } - private static SingleSidedComparableRangeFilter makeComparableRangeFilter(String columnName, - Condition condition, Comparable comparable) { + private static SingleSidedComparableRangeFilter makeComparableRangeFilter(String columnName, Condition condition, + Comparable comparable) { switch (condition) { case LESS_THAN: return new SingleSidedComparableRangeFilter(columnName, comparable, false, false); @@ -235,8 +225,7 @@ private static SingleSidedComparableRangeFilter makeComparableRangeFilter(String case GREATER_THAN_OR_EQUAL: return new SingleSidedComparableRangeFilter(columnName, comparable, true, true); default: - throw new IllegalArgumentException( - "RangeConditionFilter does not support condition " + condition); + throw new IllegalArgumentException("RangeConditionFilter does not support condition " + condition); } } @@ -255,13 +244,11 @@ public void setRecomputeListener(RecomputeListener listener) {} @Override public SelectFilter copy() { - return new RangeConditionFilter(columnName, condition, value, expression, filter, - parserConfiguration); + return new RangeConditionFilter(columnName, condition, value, expression, filter, parserConfiguration); } @Override public String toString() { - return "RangeConditionFilter(" + columnName + " " + condition.description + " " + value - + ")"; + return "RangeConditionFilter(" + columnName + " " + condition.description + " " + value + ")"; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/RegexFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/RegexFilter.java index 04ae9ef45c0..b2b73793037 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/RegexFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/RegexFilter.java @@ -26,8 +26,7 @@ public RegexFilter(String columnName, String value) { this(MatchType.Regular, columnName, value); } - public RegexFilter(CaseSensitivity sensitivity, MatchType matchType, @NotNull String columnName, - String value) { + public RegexFilter(CaseSensitivity sensitivity, MatchType matchType, @NotNull String columnName, String value) { super(sensitivity, matchType, columnName, value); } @@ -47,8 +46,7 @@ public RegexFilter renameFilter(String newName) { @Override public String toString() { - return (invertMatch ? "!" : "") + columnName + " =~ /" + value + "/" - + (caseInsensitive ? "i" : ""); + return (invertMatch ? "!" : "") + columnName + " =~ /" + value + "/" + (caseInsensitive ? "i" : ""); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/ReindexingFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/ReindexingFilter.java index 0fd7ebf4242..0461ae09e97 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/ReindexingFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/ReindexingFilter.java @@ -7,16 +7,14 @@ import org.jetbrains.annotations.Nullable; /** - * This interface marks SelectFilters that return a result set that should be the full set for - * subsequent filters. Said another way, subsequent filters need to see the Index selected by this - * filter for purposes of determining i values. ReindexingFilters may also optionally specify a - * re-sorting of the table to be input. + * This interface marks SelectFilters that return a result set that should be the full set for subsequent filters. Said + * another way, subsequent filters need to see the Index selected by this filter for purposes of determining i values. + * ReindexingFilters may also optionally specify a re-sorting of the table to be input. */ public interface ReindexingFilter extends SelectFilter { /** - * @return True iff getSortColumns will return a non-null, non-empty array of column names to - * sort on. + * @return True iff getSortColumns will return a non-null, non-empty array of column names to sort on. */ boolean requiresSorting(); @@ -29,8 +27,7 @@ public interface ReindexingFilter extends SelectFilter { String[] getSortColumns(); /** - * Advise this filter that sorting has been performed. requiresSorting must return false - * hereafter. + * Advise this filter that sorting has been performed. requiresSorting must return false hereafter. */ void sortingDone(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/ReinterpretedColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/ReinterpretedColumn.java index 76cae10afbb..b9f6d2b9831 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/ReinterpretedColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/ReinterpretedColumn.java @@ -20,18 +20,18 @@ import java.util.Map; /** - * Allows us to support ColumnSource reinterpretation via view-type Table operations. Currently, - * this is only exposed in V2 tables' support for dateTimeColumnAsNanos(). + * Allows us to support ColumnSource reinterpretation via view-type Table operations. Currently, this is only exposed in + * V2 tables' support for dateTimeColumnAsNanos(). * - * TODO: If we come up with other valid, useful reinterpretations, it would be trivial to create a - * general purpose syntax for use in view()/updateView() column expressions. + * TODO: If we come up with other valid, useful reinterpretations, it would be trivial to create a general purpose + * syntax for use in view()/updateView() column expressions. * * The syntax I have in mind is: "<ColumnNameB>=<ColumnNameA>.as(<ClassName>)" * "<ColumnName>.as(<ClassName>)" * - * Making this work would consist of any one of: 1. Adding a V1 version and updating - * SelectColumnFactory and SelectColumnAdaptor 2. Adding the appropriate if-regex-matches to - * realColumn selection in V2 SwitchColumn 3. Creating a V2-native SelectColumnFactory + * Making this work would consist of any one of: 1. Adding a V1 version and updating SelectColumnFactory and + * SelectColumnAdaptor 2. Adding the appropriate if-regex-matches to realColumn selection in V2 SwitchColumn 3. Creating + * a V2-native SelectColumnFactory */ public class ReinterpretedColumn implements SelectColumn { @@ -46,8 +46,7 @@ public class ReinterpretedColumn implements SelectColumn { private ColumnSource sourceColumnSource; - public ReinterpretedColumn(String sourceName, Class sourceDataType, String destName, - Class destDataType) { + public ReinterpretedColumn(String sourceName, Class sourceDataType, String destName, Class destDataType) { this.sourceName = NameValidator.validateColumnName(sourceName); this.sourceDataType = Require.neqNull(sourceDataType, "sourceDataType"); this.destName = NameValidator.validateColumnName(destName); @@ -67,22 +66,19 @@ public List initInputs(Table table) { } @Override - public List initInputs(Index index, - Map columnsOfInterest) { + public List initInputs(Index index, Map columnsOfInterest) { // noinspection unchecked final ColumnSource localSourceColumnSource = columnsOfInterest.get(sourceName); if (localSourceColumnSource == null) { throw new NoSuchColumnException(columnsOfInterest.keySet(), sourceName); } if (!localSourceColumnSource.getType().equals(sourceDataType)) { - throw new IllegalArgumentException( - "Source column " + sourceName + " has wrong data type " + throw new IllegalArgumentException("Source column " + sourceName + " has wrong data type " + localSourceColumnSource.getType() + ", expected " + sourceDataType); } if (!(localSourceColumnSource.allowsReinterpret(destDataType))) { - throw new IllegalArgumentException( - "Source column " + sourceName + " (Class=" + localSourceColumnSource.getClass() - + ") - cannot be reinterpreted as " + destDataType); + throw new IllegalArgumentException("Source column " + sourceName + " (Class=" + + localSourceColumnSource.getClass() + ") - cannot be reinterpreted as " + destDataType); } // noinspection unchecked sourceColumnSource = (ColumnSource) columnsOfInterest.get(sourceName); @@ -97,8 +93,7 @@ public List initDef(Map columnDefinitionMap) { throw new NoSuchColumnException(columnDefinitionMap.keySet(), sourceName); } if (!sourceColumnDefinition.getDataType().equals(sourceDataType)) { - throw new IllegalArgumentException( - "Source column " + sourceName + " has wrong data type " + throw new IllegalArgumentException("Source column " + sourceName + " has wrong data type " + sourceColumnDefinition.getDataType() + ", expected " + sourceDataType); } return getColumns(); @@ -124,8 +119,8 @@ public List getColumnArrays() { public ColumnSource getDataView() { final ColumnSource result = sourceColumnSource.reinterpret(destDataType); if (!result.getType().equals(destDataType)) { - throw new IllegalArgumentException("Reinterpreted column from " + sourceName - + " has wrong data type " + result.getType() + ", expected " + destDataType); + throw new IllegalArgumentException("Reinterpreted column from " + sourceName + " has wrong data type " + + result.getType() + ", expected " + destDataType); } return result; } @@ -166,7 +161,7 @@ public boolean equals(Object o) { ReinterpretedColumn that = (ReinterpretedColumn) o; return sourceName.equals(that.sourceName) && sourceDataType.equals(that.sourceDataType) - && destName.equals(that.destName) && destDataType.equals(that.destDataType); + && destName.equals(that.destName) && destDataType.equals(that.destDataType); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/RollingReleaseFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/RollingReleaseFilter.java index a4ee83d8c83..f142f3a8686 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/RollingReleaseFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/RollingReleaseFilter.java @@ -14,8 +14,7 @@ import java.util.List; /** - * This will filter a table starting off with the first N rows, and then adding new rows to the - * table on each refresh. + * This will filter a table starting off with the first N rows, and then adding new rows to the table on each refresh. */ public class RollingReleaseFilter extends SelectFilterLivenessArtifactImpl implements LiveTable { private final long workingSize; @@ -78,10 +77,7 @@ public Index filter(Index selection, Index fullSet, Table table, boolean usePrev @Override public boolean isSimpleFilter() { - /* - * This doesn't execute any user code, so it should be safe to execute it against untrusted - * data. - */ + /* This doesn't execute any user code, so it should be safe to execute it against untrusted data. */ return true; } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/SelectColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/SelectColumn.java index 3c7d8190306..5c720ac048b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/SelectColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/SelectColumn.java @@ -24,8 +24,7 @@ import java.util.Objects; /** - * The interface for a query table to perform retrieve values from a column for select like - * operations. + * The interface for a query table to perform retrieve values from a column for select like operations. */ public interface SelectColumn { @@ -38,14 +37,13 @@ static SelectColumn[] from(Collection selectables) { } /** - * Convenient static final instance of a zero length Array of SelectColumns for use in toArray - * calls. + * Convenient static final instance of a zero length Array of SelectColumns for use in toArray calls. */ SelectColumn[] ZERO_LENGTH_SELECT_COLUMN_ARRAY = new SelectColumn[0]; /** - * Initialize the SelectColumn using the input table and return a list of underlying columns - * that this SelectColumn is dependent upon. + * Initialize the SelectColumn using the input table and return a list of underlying columns that this SelectColumn + * is dependent upon. * * @param table the table to initialize internals from * @return a list containing all columns from 'table' that the result depends on @@ -79,16 +77,16 @@ static SelectColumn[] from(Collection selectables) { Class getReturnedType(); /** - * Get a list of the names of columns used in this SelectColumn. Behavior is undefined if none - * of the init* methods have been called yet. + * Get a list of the names of columns used in this SelectColumn. Behavior is undefined if none of the init* methods + * have been called yet. * * @return the columns used in this SelectColumn */ List getColumns(); /** - * Get a list of the names of column arrays used in this SelectColumn. Behavior is undefined if - * none of the init* methods have been called yet. + * Get a list of the names of column arrays used in this SelectColumn. Behavior is undefined if none of the init* + * methods have been called yet. * * @return the list of column arrays used */ @@ -142,8 +140,8 @@ static SelectColumn[] from(Collection selectables) { /** * Should we disallow use of this column for refreshing tables? * - * Some formulas can not be reliably computed with a refreshing table, therefore we will refuse - * to compute those values. + * Some formulas can not be reliably computed with a refreshing table, therefore we will refuse to compute those + * values. */ boolean disallowRefresh(); @@ -178,8 +176,7 @@ public void visit(ColumnName rhs) { @Override public void visit(RawString rhs) { - out = - SelectColumnFactory.getExpression(String.format("%s=%s", lhs.name(), rhs.value())); + out = SelectColumnFactory.getExpression(String.format("%s=%s", lhs.name(), rhs.value())); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/SelectFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/SelectFilter.java index 4cf039eb21c..d501f25a8c5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/SelectFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/SelectFilter.java @@ -51,8 +51,7 @@ static SelectFilter[] fromInverted(Collection filters) { } /** - * Users of SelectFilter may implement this interface if they must react to the filter - * fundamentally changing. + * Users of SelectFilter may implement this interface if they must react to the filter fundamentally changing. * * @see DynamicWhereFilter */ @@ -63,14 +62,14 @@ interface RecomputeListener { void requestRecompute(); /** - * Notify the something about the filters has changed such that all unmatched rows of the - * source table should be re-evaluated. + * Notify the something about the filters has changed such that all unmatched rows of the source table should be + * re-evaluated. */ void requestRecomputeUnmatched(); /** - * Notify the something about the filters has changed such that all matched rows of the - * source table should be re-evaluated. + * Notify the something about the filters has changed such that all matched rows of the source table should be + * re-evaluated. */ void requestRecomputeMatched(); @@ -114,24 +113,22 @@ interface RecomputeListener { /** * Filter selection to only matching rows. * - * @param selection the indices that should be filtered. The selection must be a subset of - * fullSet; and may not include rows that the engine determines need not be evaluated to - * produce the result. - * @param fullSet the complete Index of the table to filter. The fullSet is used for calculating - * variables like "i" or "ii". + * @param selection the indices that should be filtered. The selection must be a subset of fullSet; and may not + * include rows that the engine determines need not be evaluated to produce the result. + * @param fullSet the complete Index of the table to filter. The fullSet is used for calculating variables like "i" + * or "ii". * @param table the table to filter - * @param usePrev true if previous values should be used. Implementing previous value filtering - * is optional, and a {@link PreviousFilteringNotSupported} exception may be thrown. If a - * PreviousFiltering exception is thrown, then the caller must acquire the - * LiveTableMonitor lock. + * @param usePrev true if previous values should be used. Implementing previous value filtering is optional, and a + * {@link PreviousFilteringNotSupported} exception may be thrown. If a PreviousFiltering exception is thrown, + * then the caller must acquire the LiveTableMonitor lock. * * @return the subset of selection accepted by this filter */ Index filter(Index selection, Index fullSet, Table table, boolean usePrev); /** - * @return true if this is a filter that does not require any code execution, but rather is - * handled entirely within the database engine. + * @return true if this is a filter that does not require any code execution, but rather is handled entirely within + * the database engine. */ boolean isSimpleFilter(); @@ -152,24 +149,22 @@ default boolean isRefreshing() { void setRecomputeListener(RecomputeListener result); /** - * The database system may automatically generate a filter, for example, when applying an ACL to - * a table. There are certain operations which may bypass these filters. + * The database system may automatically generate a filter, for example, when applying an ACL to a table. There are + * certain operations which may bypass these filters. * * This function returns whether or not this filter is automated. * - * @return true if this filter was automatically applied by the database system. False - * otherwise. + * @return true if this filter was automatically applied by the database system. False otherwise. */ boolean isAutomatedFilter(); /** - * The database system may automatically generate a filter, for example, when applying an ACL to - * a table. There are certain operations which may bypass these filters. + * The database system may automatically generate a filter, for example, when applying an ACL to a table. There are + * certain operations which may bypass these filters. * * This function indicates that this filter is automated. * - * @param value true if this filter was automatically applied by the database system. False - * otherwise. + * @param value true if this filter was automatically applied by the database system. False otherwise. */ void setAutomatedFilter(boolean value); @@ -190,8 +185,8 @@ default boolean canMemoize() { SelectFilter copy(); /** - * This exception is thrown when a where() filter is incapable of handling previous values, and - * thus needs to be executed while under the LTM lock. + * This exception is thrown when a where() filter is incapable of handling previous values, and thus needs to be + * executed while under the LTM lock. */ class PreviousFilteringNotSupported extends ConstructSnapshot.NoSnapshotAllowedException { public PreviousFilteringNotSupported() { @@ -286,8 +281,7 @@ private static class FilterConditionAdapter implements Value.Visitor { public static SelectFilter of(FilterCondition condition) { FilterCondition preferred = condition.maybeTranspose(); - return preferred.lhs().walk(new FilterConditionAdapter(condition, preferred)) - .getOut(); + return preferred.lhs().walk(new FilterConditionAdapter(condition, preferred)).getOut(); } private final FilterCondition original; @@ -316,20 +310,16 @@ public void visit(ColumnName rhs) { public void visit(long rhs) { switch (preferred.operator()) { case LESS_THAN: - out = new LongRangeFilter(lhs.name(), Long.MIN_VALUE, rhs, true, - false); + out = new LongRangeFilter(lhs.name(), Long.MIN_VALUE, rhs, true, false); break; case LESS_THAN_OR_EQUAL: - out = new LongRangeFilter(lhs.name(), Long.MIN_VALUE, rhs, true, - true); + out = new LongRangeFilter(lhs.name(), Long.MIN_VALUE, rhs, true, true); break; case GREATER_THAN: - out = new LongRangeFilter(lhs.name(), rhs, Long.MAX_VALUE, false, - true); + out = new LongRangeFilter(lhs.name(), rhs, Long.MAX_VALUE, false, true); break; case GREATER_THAN_OR_EQUAL: - out = new LongRangeFilter(lhs.name(), rhs, Long.MAX_VALUE, true, - true); + out = new LongRangeFilter(lhs.name(), rhs, Long.MAX_VALUE, true, true); break; case EQUALS: out = new MatchFilter(lhs.name(), rhs); @@ -338,15 +328,13 @@ public void visit(long rhs) { out = new MatchFilter(MatchType.Inverted, lhs.name(), rhs); break; default: - throw new IllegalStateException( - "Unexpected operator " + original.operator()); + throw new IllegalStateException("Unexpected operator " + original.operator()); } } }); } - // Note for all remaining cases: since we are walking the preferred object, we know we - // don't have to handle + // Note for all remaining cases: since we are walking the preferred object, we know we don't have to handle // the case where rhs is column name. @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/SelectFilterLivenessArtifactImpl.java b/DB/src/main/java/io/deephaven/db/v2/select/SelectFilterLivenessArtifactImpl.java index 9c3bb2c93a4..051fc8f7040 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/SelectFilterLivenessArtifactImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/SelectFilterLivenessArtifactImpl.java @@ -5,8 +5,7 @@ import java.io.Serializable; -public abstract class SelectFilterLivenessArtifactImpl extends LivenessArtifact - implements SelectFilter, Serializable { +public abstract class SelectFilterLivenessArtifactImpl extends LivenessArtifact implements SelectFilter, Serializable { private boolean isAutomatedFilter = false; @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/SimulationClock.java b/DB/src/main/java/io/deephaven/db/v2/select/SimulationClock.java index 767e29d9337..53968094ec9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/SimulationClock.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/SimulationClock.java @@ -21,8 +21,7 @@ public class SimulationClock implements Clock { private final DBDateTime endTime; private final long stepNanos; - private final LiveTable refreshTask = this::advance; // Save this in a reference so we can - // deregister it. + private final LiveTable refreshTask = this::advance; // Save this in a reference so we can deregister it. private enum State { NOT_STARTED, STARTED, DONE @@ -37,32 +36,29 @@ private enum State { * Create a simulation clock for the specified time range and step. * * @param startTime The initial time that will be returned by this clock, before it is started - * @param endTime The final time that will be returned by this clock, when the simulation has - * completed + * @param endTime The final time that will be returned by this clock, when the simulation has completed * @param stepSize The time to "elapse" in each refresh loop */ public SimulationClock(@NotNull final String startTime, - @NotNull final String endTime, - @NotNull final String stepSize) { + @NotNull final String endTime, + @NotNull final String stepSize) { this(DBTimeUtils.convertDateTime(startTime), DBTimeUtils.convertDateTime(endTime), - DBTimeUtils.convertTime(stepSize)); + DBTimeUtils.convertTime(stepSize)); } /** * Create a simulation clock for the specified time range and step. * * @param startTime The initial time that will be returned by this clock, before it is started - * @param endTime The final time that will be returned by this clock, when the simulation has - * completed + * @param endTime The final time that will be returned by this clock, when the simulation has completed * @param stepNanos The number of nanoseconds to "elapse" in each refresh loop */ public SimulationClock(@NotNull final DBDateTime startTime, - @NotNull final DBDateTime endTime, - final long stepNanos) { + @NotNull final DBDateTime endTime, + final long stepNanos) { Require.neqNull(startTime, "startTime"); this.endTime = Require.neqNull(endTime, "endTime"); - Require.requirement(DBTimeUtils.isBefore(startTime, endTime), - "DBTimeUtils.isBefore(startTime, endTime)"); + Require.requirement(DBTimeUtils.isBefore(startTime, endTime), "DBTimeUtils.isBefore(startTime, endTime)"); this.stepNanos = Require.gtZero(stepNanos, "stepNanos"); now = startTime; } @@ -106,7 +102,7 @@ void advance() { Assert.eq(state.get(), "state.get()", State.STARTED); if (now.getNanos() == endTime.getNanos()) { Assert.assertion(state.compareAndSet(State.STARTED, State.DONE), - "state.compareAndSet(State.STARTED, State.DONE)"); + "state.compareAndSet(State.STARTED, State.DONE)"); LiveTableMonitor.DEFAULT.removeTable(refreshTask); LiveTableMonitor.DEFAULT.requestSignal(ltmCondition); return; // This return is not strictly necessary, but it seems clearer this way. diff --git a/DB/src/main/java/io/deephaven/db/v2/select/SingleSidedComparableRangeFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/SingleSidedComparableRangeFilter.java index 661bfa06a50..e593c487a27 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/SingleSidedComparableRangeFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/SingleSidedComparableRangeFilter.java @@ -18,16 +18,15 @@ public class SingleSidedComparableRangeFilter extends AbstractRangeFilter { private final Comparable pivot; private final boolean isGreaterThan; - SingleSidedComparableRangeFilter(String columnName, Comparable val, boolean inclusive, - boolean isGreaterThan) { + SingleSidedComparableRangeFilter(String columnName, Comparable val, boolean inclusive, boolean isGreaterThan) { super(columnName, inclusive, inclusive); this.isGreaterThan = isGreaterThan; pivot = val; } @TestUseOnly - public static SingleSidedComparableRangeFilter makeForTest(String columnName, - Comparable value, boolean inclusive, boolean isGreaterThan) { + public static SingleSidedComparableRangeFilter makeForTest(String columnName, Comparable value, + boolean inclusive, boolean isGreaterThan) { return new SingleSidedComparableRangeFilter(columnName, value, inclusive, isGreaterThan); } @@ -39,20 +38,17 @@ public void init(TableDefinition tableDefinition) { final ColumnDefinition def = tableDefinition.getColumn(columnName); if (def == null) { - throw new RuntimeException( - "Column \"" + columnName + "\" doesn't exist in this table, available columns: " + throw new RuntimeException("Column \"" + columnName + "\" doesn't exist in this table, available columns: " + tableDefinition.getColumnNames()); } Assert.assertion(Comparable.class.isAssignableFrom(def.getDataType()), - "Comparable.class.isAssignableFrom(def.getDataType())", def.getDataType(), - "def.getDataType()"); + "Comparable.class.isAssignableFrom(def.getDataType())", def.getDataType(), "def.getDataType()"); chunkFilter = makeComparableChunkFilter(pivot, lowerInclusive, isGreaterThan); } - public static ChunkFilter makeComparableChunkFilter(Comparable pivot, boolean inclusive, - boolean isGreaterThan) { + public static ChunkFilter makeComparableChunkFilter(Comparable pivot, boolean inclusive, boolean isGreaterThan) { if (inclusive) { if (isGreaterThan) { return new GeqComparableChunkFilter(pivot); @@ -70,14 +66,13 @@ public static ChunkFilter makeComparableChunkFilter(Comparable pivot, boolean @Override public SelectFilter copy() { - return new SingleSidedComparableRangeFilter(columnName, pivot, lowerInclusive, - upperInclusive); + return new SingleSidedComparableRangeFilter(columnName, pivot, lowerInclusive, upperInclusive); } @Override public String toString() { return "SingleSidedComparableRangeFilter(" + columnName + (isGreaterThan ? '>' : '>') - + (lowerInclusive ? "=" : "") + pivot + ")"; + + (lowerInclusive ? "=" : "") + pivot + ")"; } private static class GeqComparableChunkFilter implements ChunkFilter { @@ -89,9 +84,8 @@ private GeqComparableChunkFilter(Comparable pivot) { @Override public void filter(Chunk values, LongChunk keys, - WritableLongChunk results) { - final ObjectChunk, ? extends Values> objectChunk = - values.asObjectChunk(); + WritableLongChunk results) { + final ObjectChunk, ? extends Values> objectChunk = values.asObjectChunk(); results.setSize(0); for (int ii = 0; ii < values.size(); ++ii) { @@ -112,9 +106,8 @@ private LeqComparableChunkFilter(Comparable pivot) { @Override public void filter(Chunk values, LongChunk keys, - WritableLongChunk results) { - final ObjectChunk, ? extends Values> objectChunk = - values.asObjectChunk(); + WritableLongChunk results) { + final ObjectChunk, ? extends Values> objectChunk = values.asObjectChunk(); results.setSize(0); for (int ii = 0; ii < values.size(); ++ii) { @@ -135,9 +128,8 @@ private GtComparableChunkFilter(Comparable pivot) { @Override public void filter(Chunk values, LongChunk keys, - WritableLongChunk results) { - final ObjectChunk, ? extends Values> objectChunk = - values.asObjectChunk(); + WritableLongChunk results) { + final ObjectChunk, ? extends Values> objectChunk = values.asObjectChunk(); results.setSize(0); for (int ii = 0; ii < values.size(); ++ii) { @@ -158,9 +150,8 @@ private LtComparableChunkFilter(Comparable pivot) { @Override public void filter(Chunk values, LongChunk keys, - WritableLongChunk results) { - final ObjectChunk, ? extends Values> objectChunk = - values.asObjectChunk(); + WritableLongChunk results) { + final ObjectChunk, ? extends Values> objectChunk = values.asObjectChunk(); results.setSize(0); for (int ii = 0; ii < values.size(); ++ii) { @@ -173,19 +164,17 @@ public void filter(Chunk values, LongChunk } @Override - Index binarySearch(Index selection, ColumnSource columnSource, boolean usePrev, - boolean reverse) { + Index binarySearch(Index selection, ColumnSource columnSource, boolean usePrev, boolean reverse) { if (selection.isEmpty()) { return selection; } // noinspection unchecked - final ColumnSource comparableColumnSource = - (ColumnSource) columnSource; + final ColumnSource comparableColumnSource = (ColumnSource) columnSource; final int compareSign = reverse ? -1 : 1; - long lowerBoundMin = ComparableRangeFilter.bound(selection, usePrev, comparableColumnSource, - 0, selection.size(), pivot, lowerInclusive, compareSign, isGreaterThan == reverse); + long lowerBoundMin = ComparableRangeFilter.bound(selection, usePrev, comparableColumnSource, 0, + selection.size(), pivot, lowerInclusive, compareSign, isGreaterThan == reverse); if (isGreaterThan == reverse) { return selection.subindexByPos(0, lowerBoundMin); diff --git a/DB/src/main/java/io/deephaven/db/v2/select/SortedClockFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/SortedClockFilter.java index 19058928039..2038c18001d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/SortedClockFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/SortedClockFilter.java @@ -12,10 +12,10 @@ import org.jetbrains.annotations.Nullable; /** - * This will filter a table on a DBDateTime column for all rows greater than "now" according to a - * supplied clock. It requires sorting of the input table according to the specified timestamp - * column, leveraging this for a very efficient implementation (albeit one that requires sorting - * first) and an output sequence that is monotonically nondecreasing in the specified column. + * This will filter a table on a DBDateTime column for all rows greater than "now" according to a supplied clock. It + * requires sorting of the input table according to the specified timestamp column, leveraging this for a very efficient + * implementation (albeit one that requires sorting first) and an output sequence that is monotonically nondecreasing in + * the specified column. */ public class SortedClockFilter extends ClockFilter { @@ -23,8 +23,8 @@ public class SortedClockFilter extends ClockFilter { private Range range; public SortedClockFilter(@NotNull final String columnName, - @NotNull final Clock clock, - final boolean live) { + @NotNull final Clock clock, + final boolean live) { super(columnName, clock, live); } @@ -49,23 +49,20 @@ public void sortingDone() { } @Override - protected @Nullable Index initializeAndGetInitialIndex(@NotNull final Index selection, - @NotNull final Index fullSet, @NotNull final Table table) { - // External code is required to have sorted according to column before calling this, so we - // expect the input to - // be flat. This is not actually a guarantee of the sort() method, but is something that - // happens to be true + protected @Nullable Index initializeAndGetInitialIndex(@NotNull final Index selection, @NotNull final Index fullSet, + @NotNull final Table table) { + // External code is required to have sorted according to column before calling this, so we expect the input to + // be flat. This is not actually a guarantee of the sort() method, but is something that happens to be true // because the input table must be historical, and the historical sort implementation uses a // ContiguousRedirectionIndex. Require.requirement(table.isFlat(), "table.isFlat()"); - // This must be the first filter in a where-clause of its own, again because of the sort, - // hence selection must + // This must be the first filter in a where-clause of its own, again because of the sort, hence selection must // be equal to fullSet. // This test as implemented only works because the table is flat. Require.requirement(selection.size() == fullSet.size() - && selection.size() == selection.lastKey() - selection.firstKey() + 1 - && fullSet.size() == fullSet.lastKey() - fullSet.firstKey() + 1, - "selection.size() == fullSet.size() && selection.size() == selection.lastKey() - selection.firstKey() + 1 && fullSet.size() == fullSet.lastKey() - fullSet.firstKey() + 1"); + && selection.size() == selection.lastKey() - selection.firstKey() + 1 + && fullSet.size() == fullSet.lastKey() - fullSet.firstKey() + 1, + "selection.size() == fullSet.size() && selection.size() == selection.lastKey() - selection.firstKey() + 1 && fullSet.size() == fullSet.lastKey() - fullSet.firstKey() + 1"); range = new Range(selection.firstKey(), selection.lastKey()); return updateAndGetAddedIndex(); @@ -76,8 +73,8 @@ public void sortingDone() { if (range.isEmpty()) { return null; } - final Index.RandomBuilder addedBuilder = range.consumeKeysAndAppendAdded(nanosColumnSource, - clock.currentTimeMicros() * 1000L, null); + final Index.RandomBuilder addedBuilder = + range.consumeKeysAndAppendAdded(nanosColumnSource, clock.currentTimeMicros() * 1000L, null); return addedBuilder == null ? null : addedBuilder.getIndex(); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/SourceColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/SourceColumn.java index dbd65f836dd..3e9c66824eb 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/SourceColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/SourceColumn.java @@ -49,8 +49,7 @@ public List initInputs(Table table) { } @Override - public List initInputs(Index index, - Map columnsOfInterest) { + public List initInputs(Index index, Map columnsOfInterest) { this.sourceColumn = columnsOfInterest.get(sourceName); if (sourceColumn == null) { throw new NoSuchColumnException(columnsOfInterest.keySet(), sourceName); @@ -120,8 +119,7 @@ public MatchPair getMatchPair() { public WritableSource newDestInstance(long size) { Class type = sourceColumn.getType(); if (DbArrayBase.class.isAssignableFrom(type)) { - return SparseArrayColumnSource.getSparseMemoryColumnSource(size, type, - sourceColumn.getComponentType()); + return SparseArrayColumnSource.getSparseMemoryColumnSource(size, type, sourceColumn.getComponentType()); } else { return SparseArrayColumnSource.getSparseMemoryColumnSource(size, type); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/StringContainsFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/StringContainsFilter.java index 7b6f9012dd8..0de5bfaa96e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/StringContainsFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/StringContainsFilter.java @@ -16,8 +16,8 @@ import static io.deephaven.db.v2.select.MatchFilter.MatchType; /** - * SelectFilter that implements String.contains(valueToMatch), for use in QuickFilter so that we can - * bypass compilation on small tables. + * SelectFilter that implements String.contains(valueToMatch), for use in QuickFilter so that we can bypass compilation + * on small tables. */ public class StringContainsFilter extends PatternFilter { private static final long serialVersionUID = 2L; @@ -33,52 +33,49 @@ public StringContainsFilter(String columnName, String... values) { this(MatchType.Regular, columnName, values); } - public StringContainsFilter(CaseSensitivity sensitivity, MatchType matchType, - @NotNull String columnName, String... values) { + public StringContainsFilter(CaseSensitivity sensitivity, MatchType matchType, @NotNull String columnName, + String... values) { this(sensitivity, matchType, columnName, true, false, values); } - public StringContainsFilter(CaseSensitivity sensitivity, MatchType matchType, - @NotNull String columnName, boolean internalDisjuntive, boolean removeQuotes, - String... values) { + public StringContainsFilter(CaseSensitivity sensitivity, MatchType matchType, @NotNull String columnName, + boolean internalDisjuntive, boolean removeQuotes, String... values) { super(sensitivity, matchType, columnName, - constructRegex(values, matchType, internalDisjuntive, removeQuotes, columnName)); + constructRegex(values, matchType, internalDisjuntive, removeQuotes, columnName)); this.internalDisjunctive = internalDisjuntive; this.values = values; } - private static String constructRegex(String[] values, MatchType matchType, - boolean internalDisjunctive, boolean removeQuotes, String columnName) { + private static String constructRegex(String[] values, MatchType matchType, boolean internalDisjunctive, + boolean removeQuotes, String columnName) { if (values == null || values.length == 0) { throw new IllegalArgumentException( - "StringContainsFilter must be created with at least one value parameter"); + "StringContainsFilter must be created with at least one value parameter"); } - final MatchFilter.ColumnTypeConvertor converter = removeQuotes - ? MatchFilter.ColumnTypeConvertorFactory.getConvertor(String.class, columnName) - : null; + final MatchFilter.ColumnTypeConvertor converter = + removeQuotes ? MatchFilter.ColumnTypeConvertorFactory.getConvertor(String.class, columnName) : null; final String regex; final Stream valueStream = Arrays.stream(values) - .map(val -> { - if (StringUtils.isNullOrEmpty(val)) { - throw new IllegalArgumentException( - "Parameters to StringContainsFilter must not be null or empty"); - } - return Pattern.quote( - converter == null ? val : converter.convertStringLiteral(val).toString()); - }); + .map(val -> { + if (StringUtils.isNullOrEmpty(val)) { + throw new IllegalArgumentException( + "Parameters to StringContainsFilter must not be null or empty"); + } + return Pattern.quote(converter == null ? val : converter.convertStringLiteral(val).toString()); + }); // If the match is simple, includes -any- or includes -none- we can just use a simple // regex of or'd values if ((matchType == MatchType.Regular && internalDisjunctive) || - (matchType == MatchType.Inverted && !internalDisjunctive)) { + (matchType == MatchType.Inverted && !internalDisjunctive)) { regex = valueStream.collect(Collectors.joining("|")); } else { // Note that internalDisjunctive is -always- false here. // If we need to match -all of- or -not one of- then we must use forward matching regex = valueStream.map(item -> "(?=.*" + item + ")") - .collect(Collectors.joining()) + ".*"; + .collect(Collectors.joining()) + ".*"; } return regex; @@ -101,8 +98,8 @@ public StringContainsFilter renameFilter(String newName) { @Override public String toString() { return (invertMatch ? "!" : "") + columnName + ".contains" - + ((values.length == 1) ? "" : internalDisjunctive ? "Any" : "All") - + (caseInsensitive ? "IgnoreCase" : "") + "(\"" + value + "\")"; + + ((values.length == 1) ? "" : internalDisjunctive ? "Any" : "All") + + (caseInsensitive ? "IgnoreCase" : "") + "(\"" + value + "\")"; } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/SwitchColumn.java b/DB/src/main/java/io/deephaven/db/v2/select/SwitchColumn.java index b9e98358edc..cb2a5e658ea 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/SwitchColumn.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/SwitchColumn.java @@ -28,8 +28,7 @@ public class SwitchColumn implements SelectColumn { private final FormulaParserConfiguration parser; - public SwitchColumn(String columnName, String expression, - FormulaParserConfiguration parserConfiguration) { + public SwitchColumn(String columnName, String expression, FormulaParserConfiguration parserConfiguration) { this.expression = Require.neqNull(expression, "expression"); this.columnName = NameValidator.validateColumnName(columnName); this.parser = parserConfiguration; @@ -48,8 +47,7 @@ public List initInputs(Table table) { } @Override - public List initInputs(Index index, - Map columnsOfInterest) { + public List initInputs(Index index, Map columnsOfInterest) { if (realColumn == null) { if (columnsOfInterest.get(expression) != null) { realColumn = new SourceColumn(expression, columnName); @@ -71,8 +69,7 @@ public List initDef(Map columnDefinitionMap) { } List usedColumns = realColumn.initDef(columnDefinitionMap); if (realColumn instanceof DhFormulaColumn) { - FormulaColumnPython formulaColumnPython = - ((DhFormulaColumn) realColumn).getFormulaColumnPython(); + FormulaColumnPython formulaColumnPython = ((DhFormulaColumn) realColumn).getFormulaColumnPython(); realColumn = formulaColumnPython != null ? formulaColumnPython : realColumn; } return usedColumns; @@ -122,8 +119,7 @@ public WritableSource newDestInstance(long size) { @Override public boolean isRetain() { - return false; // We use SourceColumns if there's no "=", so there's no need for something - // more complicated here. + return false; // We use SourceColumns if there's no "=", so there's no need for something more complicated here. } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/TimeSeriesFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/TimeSeriesFilter.java index e2758f804b0..e947aacce5c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/TimeSeriesFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/TimeSeriesFilter.java @@ -90,10 +90,7 @@ protected DBDateTime getNow() { @Override public boolean isSimpleFilter() { - /* - * This doesn't execute any user code, so it should be safe to execute it against untrusted - * data. - */ + /* This doesn't execute any user code, so it should be safe to execute it against untrusted data. */ return true; } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/UnsortedClockFilter.java b/DB/src/main/java/io/deephaven/db/v2/select/UnsortedClockFilter.java index fa2a23b6d8a..04171f9656a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/UnsortedClockFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/UnsortedClockFilter.java @@ -15,12 +15,11 @@ import java.util.*; /** - * This will filter a table on a DBDateTime column for all rows greater than "now" according to a - * supplied clock. It does not require any pre-sorting of the input table, instead preserving - * relative order in the initial output and each subsequent refresh. Relative to SortedClockFilter, - * this implementation may require less overall storage and do less overall work for tables with - * relatively few monotonically nondecreasing ranges (that is, m (number of ranges) <<< n - * (size in rows)), but it will do more work on refresh(). + * This will filter a table on a DBDateTime column for all rows greater than "now" according to a supplied clock. It + * does not require any pre-sorting of the input table, instead preserving relative order in the initial output and each + * subsequent refresh. Relative to SortedClockFilter, this implementation may require less overall storage and do less + * overall work for tables with relatively few monotonically nondecreasing ranges (that is, m (number of ranges) + * <<< n (size in rows)), but it will do more work on refresh(). */ public class UnsortedClockFilter extends ClockFilter { @@ -29,8 +28,8 @@ public class UnsortedClockFilter extends ClockFilter { private Queue rangesByNextTime; public UnsortedClockFilter(@NotNull final String columnName, - @NotNull final Clock clock, - final boolean live) { + @NotNull final Clock clock, + final boolean live) { super(columnName, clock, live); } @@ -59,13 +58,13 @@ public int compare(final Range r1, final Range r2) { Assert.assertion(!r1.isEmpty(), "!r1.isEmpty()"); Assert.assertion(!r2.isEmpty(), "!r2.isEmpty()"); return DBLanguageFunctionUtil.compareTo(nanosColumnSource.getLong(r1.nextKey), - nanosColumnSource.getLong(r2.nextKey)); + nanosColumnSource.getLong(r2.nextKey)); } } @Override - protected @Nullable Index initializeAndGetInitialIndex(@NotNull final Index selection, - @NotNull final Index fullSet, @NotNull final Table table) { + protected @Nullable Index initializeAndGetInitialIndex(@NotNull final Index selection, @NotNull final Index fullSet, + @NotNull final Table table) { rangesByNextTime = new PriorityQueue<>(INITIAL_RANGE_QUEUE_CAPACITY, new RangeComparator()); if (selection.empty()) { @@ -77,8 +76,7 @@ public int compare(final Range r1, final Range r2) { final long nowNanos = clock.currentTimeMicros() * 1000L; final Index.Iterator selectionIterator = selection.iterator(); - // Initial current range begins and ends at the first key in the selection (which must exist - // because we've + // Initial current range begins and ends at the first key in the selection (which must exist because we've // already tested non-emptiness). long activeRangeFirstKey = selectionIterator.nextLong(); long activeRangeLastKey = activeRangeFirstKey; @@ -88,13 +86,12 @@ public int compare(final Range r1, final Range r2) { while (selectionIterator.hasNext()) { final long currentKey = selectionIterator.nextLong(); final long currentValue = nanosColumnSource.getLong(currentKey); - final boolean currentIsDeferred = - DBLanguageFunctionUtil.greater(currentValue, nowNanos); + final boolean currentIsDeferred = DBLanguageFunctionUtil.greater(currentValue, nowNanos); - // If we observe a change in deferral status, a discontinuity in the keys, or a decrease - // in the values, we have entered a new range + // If we observe a change in deferral status, a discontinuity in the keys, or a decrease in the values, we + // have entered a new range if (currentIsDeferred != activeRangeIsDeferred || currentKey != activeRangeLastKey + 1 - || DBLanguageFunctionUtil.less(currentValue, previousValue)) { + || DBLanguageFunctionUtil.less(currentValue, previousValue)) { // Add the current range, as appropriate if (activeRangeIsDeferred) { rangesByNextTime.add(new Range(activeRangeFirstKey, activeRangeLastKey)); @@ -129,11 +126,10 @@ protected Index updateAndGetAddedIndex() { Index.RandomBuilder addedBuilder = null; Range nextRange; Index.RandomBuilder resultBuilder; - while ((nextRange = rangesByNextTime.peek()) != null && (resultBuilder = nextRange - .consumeKeysAndAppendAdded(nanosColumnSource, nowNanos, addedBuilder)) != null) { + while ((nextRange = rangesByNextTime.peek()) != null && (resultBuilder = + nextRange.consumeKeysAndAppendAdded(nanosColumnSource, nowNanos, addedBuilder)) != null) { addedBuilder = resultBuilder; - Assert.eq(nextRange, "nextRange", rangesByNextTime.remove(), - "rangesByNextTime.remove()"); + Assert.eq(nextRange, "nextRange", rangesByNextTime.remove(), "rangesByNextTime.remove()"); if (!nextRange.isEmpty()) { rangesByNextTime.add(nextRange); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/BaseLayer.java b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/BaseLayer.java index 68fec011199..a4d3452390e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/BaseLayer.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/BaseLayer.java @@ -19,8 +19,7 @@ public class BaseLayer extends SelectAndViewAnalyzer { } @Override - void populateModifiedColumnSetRecurse(ModifiedColumnSet mcsBuilder, - Set remainingDepsToSatisfy) { + void populateModifiedColumnSetRecurse(ModifiedColumnSet mcsBuilder, Set remainingDepsToSatisfy) { mcsBuilder.setAll(remainingDepsToSatisfy.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); } @@ -35,21 +34,18 @@ final Map getColumnSourcesRecurse(GetMode mode) { } @Override - public void updateColumnDefinitionsFromTopLayer( - Map columnDefinitions) { + public void updateColumnDefinitionsFromTopLayer(Map columnDefinitions) { for (Map.Entry entry : sources.entrySet()) { final String name = entry.getKey(); final ColumnSource cs = entry.getValue(); // noinspection unchecked - final ColumnDefinition cd = - ColumnDefinition.fromGenericType(name, cs.getType(), cs.getComponentType()); + final ColumnDefinition cd = ColumnDefinition.fromGenericType(name, cs.getType(), cs.getComponentType()); columnDefinitions.put(name, cd); } } @Override - public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, - UpdateHelper helper) { + public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, UpdateHelper helper) { // nothing to do at the base layer } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/DependencyLayerBase.java b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/DependencyLayerBase.java index b5400f5a1d8..f317c0ef8fd 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/DependencyLayerBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/DependencyLayerBase.java @@ -21,14 +21,12 @@ public abstract class DependencyLayerBase extends SelectAndViewAnalyzer { private final String[] dependencies; final ModifiedColumnSet myModifiedColumnSet; - DependencyLayerBase(SelectAndViewAnalyzer inner, String name, SelectColumn selectColumn, - ColumnSource columnSource, - String[] dependencies, ModifiedColumnSet mcsBuilder) { + DependencyLayerBase(SelectAndViewAnalyzer inner, String name, SelectColumn selectColumn, ColumnSource columnSource, + String[] dependencies, ModifiedColumnSet mcsBuilder) { this.inner = inner; this.name = name; this.selectColumn = selectColumn; - selectColumnHoldsDbArray = - DbArrayBase.class.isAssignableFrom(selectColumn.getReturnedType()); + selectColumnHoldsDbArray = DbArrayBase.class.isAssignableFrom(selectColumn.getReturnedType()); this.columnSource = columnSource; this.dependencies = dependencies; final Set remainingDepsToSatisfy = new HashSet<>(Arrays.asList(dependencies)); @@ -38,19 +36,16 @@ public abstract class DependencyLayerBase extends SelectAndViewAnalyzer { @Override - public void updateColumnDefinitionsFromTopLayer( - Map columnDefinitions) { + public void updateColumnDefinitionsFromTopLayer(Map columnDefinitions) { // noinspection unchecked - final ColumnDefinition cd = ColumnDefinition.fromGenericType(name, columnSource.getType(), - columnSource.getComponentType()); + final ColumnDefinition cd = + ColumnDefinition.fromGenericType(name, columnSource.getType(), columnSource.getComponentType()); columnDefinitions.put(name, cd); } @Override - void populateModifiedColumnSetRecurse(ModifiedColumnSet mcsBuilder, - Set remainingDepsToSatisfy) { - // Later-defined columns override earlier-defined columns. So we satisfy column dependencies - // "on the way + void populateModifiedColumnSetRecurse(ModifiedColumnSet mcsBuilder, Set remainingDepsToSatisfy) { + // Later-defined columns override earlier-defined columns. So we satisfy column dependencies "on the way // down" the recursion. if (remainingDepsToSatisfy.remove(name)) { // Caller had a depenency on us, so caller gets our dependencies diff --git a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/PreserveColumnLayer.java b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/PreserveColumnLayer.java index 8f9e6f87a00..884c59cc298 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/PreserveColumnLayer.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/PreserveColumnLayer.java @@ -14,24 +14,21 @@ * {@implNote This class is part of the Deephaven engine, and not intended for direct use.} */ final public class PreserveColumnLayer extends DependencyLayerBase { - PreserveColumnLayer(SelectAndViewAnalyzer inner, String name, SelectColumn sc, ColumnSource cs, - String[] deps, - ModifiedColumnSet mcsBuilder) { + PreserveColumnLayer(SelectAndViewAnalyzer inner, String name, SelectColumn sc, ColumnSource cs, String[] deps, + ModifiedColumnSet mcsBuilder) { super(inner, name, sc, cs, deps, mcsBuilder); } @Override - public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, - UpdateHelper helper) { - // Nothing to do at this level, but need to recurse because my inner layers might need to be - // called (e.g. because they are SelectColumnLayers) + public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, UpdateHelper helper) { + // Nothing to do at this level, but need to recurse because my inner layers might need to be called (e.g. + // because they are SelectColumnLayers) inner.applyUpdate(upstream, toClear, helper); } @Override Map getColumnSourcesRecurse(GetMode mode) { - // our column is not a new column, so we need to make sure that we do not double enable - // previous tracking + // our column is not a new column, so we need to make sure that we do not double enable previous tracking final Map result = inner.getColumnSourcesRecurse(mode); switch (mode) { case New: diff --git a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/RedirectionLayer.java b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/RedirectionLayer.java index 3de592e5598..02c8296b6e0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/RedirectionLayer.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/RedirectionLayer.java @@ -24,8 +24,7 @@ final public class RedirectionLayer extends SelectAndViewAnalyzer { private final Index freeValues = Index.CURRENT_FACTORY.getEmptyIndex(); private long maxInnerIndex; - RedirectionLayer(SelectAndViewAnalyzer inner, Index resultIndex, - RedirectionIndex redirectionIndex) { + RedirectionLayer(SelectAndViewAnalyzer inner, Index resultIndex, RedirectionIndex redirectionIndex) { this.inner = inner; this.resultIndex = resultIndex; this.redirectionIndex = redirectionIndex; @@ -33,8 +32,7 @@ final public class RedirectionLayer extends SelectAndViewAnalyzer { } @Override - public void populateModifiedColumnSetRecurse(ModifiedColumnSet mcsBuilder, - Set remainingDepsToSatisfy) { + public void populateModifiedColumnSetRecurse(ModifiedColumnSet mcsBuilder, Set remainingDepsToSatisfy) { inner.populateModifiedColumnSetRecurse(mcsBuilder, remainingDepsToSatisfy); } @@ -44,24 +42,20 @@ public Map getColumnSourcesRecurse(GetMode mode) { } @Override - public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, - UpdateHelper helper) { + public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, UpdateHelper helper) { inner.applyUpdate(upstream, toClear, helper); - // we need to remove the removed values from our redirection index, and add them to our free - // index; so that + // we need to remove the removed values from our redirection index, and add them to our free index; so that // updating tables will not consume more space over the course of a day for abandoned rows final Index.RandomBuilder innerToFreeBuilder = Index.CURRENT_FACTORY.getRandomBuilder(); - upstream.removed - .forAllLongs(key -> innerToFreeBuilder.addKey(redirectionIndex.remove(key))); + upstream.removed.forAllLongs(key -> innerToFreeBuilder.addKey(redirectionIndex.remove(key))); freeValues.insert(innerToFreeBuilder.getIndex()); - // we have to shift things that have not been removed, this handles the unmodified rows; but - // also the + // we have to shift things that have not been removed, this handles the unmodified rows; but also the // modified rows need to have their redirections updated for subsequent modified columns if (upstream.shifted.nonempty()) { try (final Index prevIndex = resultIndex.getPrevIndex(); - final Index prevNoRemovals = prevIndex.minus(upstream.removed)) { + final Index prevNoRemovals = prevIndex.minus(upstream.removed)) { final MutableObject forwardIt = new MutableObject<>(); upstream.shifted.intersect(prevNoRemovals).apply((begin, end, delta) -> { @@ -71,8 +65,8 @@ public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClea } final Index.SearchIterator localForwardIt = forwardIt.getValue(); if (localForwardIt.advance(begin)) { - for (long key = localForwardIt.currentValue(); localForwardIt - .currentValue() <= end; key = localForwardIt.nextLong()) { + for (long key = localForwardIt.currentValue(); localForwardIt.currentValue() <= end; key = + localForwardIt.nextLong()) { final long inner = redirectionIndex.remove(key); if (inner != Index.NULL_KEY) { redirectionIndex.put(key + delta, inner); @@ -83,11 +77,10 @@ public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClea } } } else { - try (final Index.SearchIterator reverseIt = - prevNoRemovals.reverseIterator()) { + try (final Index.SearchIterator reverseIt = prevNoRemovals.reverseIterator()) { if (reverseIt.advance(end)) { - for (long key = reverseIt.currentValue(); reverseIt - .currentValue() >= begin; key = reverseIt.nextLong()) { + for (long key = reverseIt.currentValue(); reverseIt.currentValue() >= begin; key = + reverseIt.nextLong()) { final long inner = redirectionIndex.remove(key); if (inner != Index.NULL_KEY) { redirectionIndex.put(key + delta, inner); @@ -108,10 +101,8 @@ public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClea } if (upstream.added.nonempty()) { - // added is non-empty, so can always remove at least one value from the index (which - // must be >= 0); - // if there is no freeValue, this is safe because we'll just remove something from an - // empty index + // added is non-empty, so can always remove at least one value from the index (which must be >= 0); + // if there is no freeValue, this is safe because we'll just remove something from an empty index // if there is a freeValue, we'll remove up to that // if there are not enough free values, we'll remove all the free values then beyond final MutableLong lastAllocated = new MutableLong(0); @@ -136,8 +127,7 @@ public SelectAndViewAnalyzer getInner() { } @Override - public void updateColumnDefinitionsFromTopLayer( - Map columnDefinitions) { + public void updateColumnDefinitionsFromTopLayer(Map columnDefinitions) { inner.updateColumnDefinitionsFromTopLayer(columnDefinitions); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectAndViewAnalyzer.java b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectAndViewAnalyzer.java index 5fc209874de..7321658da08 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectAndViewAnalyzer.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectAndViewAnalyzer.java @@ -25,8 +25,7 @@ public enum Mode { } public static SelectAndViewAnalyzer create(Mode mode, Map columnSources, - Index index, ModifiedColumnSet parentMcs, boolean publishTheseSources, - SelectColumn... selectColumns) { + Index index, ModifiedColumnSet parentMcs, boolean publishTheseSources, SelectColumn... selectColumns) { SelectAndViewAnalyzer analyzer = createBaseLayer(columnSources, publishTheseSources); final Map columnDefinitions = new LinkedHashMap<>(); final RedirectionIndex redirectionIndex; @@ -43,18 +42,17 @@ public static SelectAndViewAnalyzer create(Mode mode, Map sc.initDef(columnDefinitions); sc.initInputs(index, columnsOfInterest); final Stream allDependencies = - Stream.concat(sc.getColumns().stream(), sc.getColumnArrays().stream()); + Stream.concat(sc.getColumns().stream(), sc.getColumnArrays().stream()); final String[] distinctDeps = allDependencies.distinct().toArray(String[]::new); final ModifiedColumnSet mcsBuilder = new ModifiedColumnSet(parentMcs); - if (sc instanceof SourceColumn || (sc instanceof SwitchColumn - && ((SwitchColumn) sc).getRealColumn() instanceof SourceColumn)) { + if (sc instanceof SourceColumn + || (sc instanceof SwitchColumn && ((SwitchColumn) sc).getRealColumn() instanceof SourceColumn)) { final ColumnSource sccs = sc.getDataView(); - if ((sccs instanceof SparseArrayColumnSource - || sccs instanceof ArrayBackedColumnSource) - && !DbArrayBase.class.isAssignableFrom(sc.getReturnedType())) { - analyzer = analyzer.createLayerForPreserve(sc.getName(), sc, sc.getDataView(), - distinctDeps, mcsBuilder); + if ((sccs instanceof SparseArrayColumnSource || sccs instanceof ArrayBackedColumnSource) + && !DbArrayBase.class.isAssignableFrom(sc.getReturnedType())) { + analyzer = analyzer.createLayerForPreserve(sc.getName(), sc, sc.getDataView(), distinctDeps, + mcsBuilder); continue; } } @@ -63,40 +61,35 @@ public static SelectAndViewAnalyzer create(Mode mode, Map switch (mode) { case VIEW_LAZY: { final ColumnSource viewCs = sc.getLazyView(); - analyzer = analyzer.createLayerForView(sc.getName(), sc, viewCs, distinctDeps, - mcsBuilder); + analyzer = analyzer.createLayerForView(sc.getName(), sc, viewCs, distinctDeps, mcsBuilder); break; } case VIEW_EAGER: { final ColumnSource viewCs = sc.getDataView(); - analyzer = analyzer.createLayerForView(sc.getName(), sc, viewCs, distinctDeps, - mcsBuilder); + analyzer = analyzer.createLayerForView(sc.getName(), sc, viewCs, distinctDeps, mcsBuilder); break; } case SELECT_STATIC: { - // We need to call newDestInstance because only newDestInstance has the - // knowledge to endow our + // We need to call newDestInstance because only newDestInstance has the knowledge to endow our // created array with the proper componentType (in the case of DbArrays). final WritableSource scs = sc.newDestInstance(targetSize); - analyzer = analyzer.createLayerForSelect(sc.getName(), sc, scs, null, - distinctDeps, mcsBuilder, false); + analyzer = + analyzer.createLayerForSelect(sc.getName(), sc, scs, null, distinctDeps, mcsBuilder, false); break; } case SELECT_REDIRECTED_REFRESHING: case SELECT_REFRESHING: { - // We need to call newDestInstance because only newDestInstance has the - // knowledge to endow our + // We need to call newDestInstance because only newDestInstance has the knowledge to endow our // created array with the proper componentType (in the case of DbArrays). // TODO(kosak): use DeltaAwareColumnSource WritableSource scs = sc.newDestInstance(targetSize); WritableSource underlyingSource = null; if (redirectionIndex != null) { underlyingSource = scs; - scs = new RedirectedColumnSource(redirectionIndex, underlyingSource, - index.intSize()); + scs = new RedirectedColumnSource(redirectionIndex, underlyingSource, index.intSize()); } - analyzer = analyzer.createLayerForSelect(sc.getName(), sc, scs, - underlyingSource, distinctDeps, mcsBuilder, redirectionIndex != null); + analyzer = analyzer.createLayerForSelect(sc.getName(), sc, scs, underlyingSource, distinctDeps, + mcsBuilder, redirectionIndex != null); break; } default: @@ -107,35 +100,32 @@ public static SelectAndViewAnalyzer create(Mode mode, Map } private static SelectAndViewAnalyzer createBaseLayer(Map sources, - boolean publishTheseSources) { + boolean publishTheseSources) { return new BaseLayer(sources, publishTheseSources); } - private RedirectionLayer createRedirectionLayer(Index resultIndex, - RedirectionIndex redirectionIndex) { + private RedirectionLayer createRedirectionLayer(Index resultIndex, RedirectionIndex redirectionIndex) { return new RedirectionLayer(this, resultIndex, redirectionIndex); } private SelectAndViewAnalyzer createLayerForSelect(String name, SelectColumn sc, - WritableSource cs, WritableSource underlyingSource, - String[] parentColumnDependencies, ModifiedColumnSet mcsBuilder, boolean isRedirected) { - return new SelectColumnLayer(this, name, sc, cs, underlyingSource, parentColumnDependencies, - mcsBuilder, isRedirected); + WritableSource cs, WritableSource underlyingSource, + String[] parentColumnDependencies, ModifiedColumnSet mcsBuilder, boolean isRedirected) { + return new SelectColumnLayer(this, name, sc, cs, underlyingSource, parentColumnDependencies, mcsBuilder, + isRedirected); } private SelectAndViewAnalyzer createLayerForView(String name, SelectColumn sc, ColumnSource cs, - String[] parentColumnDependencies, ModifiedColumnSet mcsBuilder) { + String[] parentColumnDependencies, ModifiedColumnSet mcsBuilder) { return new ViewColumnLayer(this, name, sc, cs, parentColumnDependencies, mcsBuilder); } - private SelectAndViewAnalyzer createLayerForPreserve(String name, SelectColumn sc, - ColumnSource cs, - String[] parentColumnDependencies, ModifiedColumnSet mcsBuilder) { + private SelectAndViewAnalyzer createLayerForPreserve(String name, SelectColumn sc, ColumnSource cs, + String[] parentColumnDependencies, ModifiedColumnSet mcsBuilder) { return new PreserveColumnLayer(this, name, sc, cs, parentColumnDependencies, mcsBuilder); } - abstract void populateModifiedColumnSetRecurse(ModifiedColumnSet mcsBuilder, - Set remainingDepsToSatisfy); + abstract void populateModifiedColumnSetRecurse(ModifiedColumnSet mcsBuilder, Set remainingDepsToSatisfy); enum GetMode { All, New, Published @@ -177,12 +167,12 @@ private Index getExisting() { private void ensure(boolean withModifies) { if (withModifies && shiftedWithModifies == null) { - shiftedWithModifies = SafeCloseablePair.downcast( - upstream.shifted.extractParallelShiftedRowsFromPostShiftIndex(getExisting())); + shiftedWithModifies = SafeCloseablePair + .downcast(upstream.shifted.extractParallelShiftedRowsFromPostShiftIndex(getExisting())); } else if (!withModifies && shiftedWithoutModifies == null) { try (final Index candidates = getExisting().minus(upstream.modified)) { - shiftedWithoutModifies = SafeCloseablePair.downcast( - upstream.shifted.extractParallelShiftedRowsFromPostShiftIndex(candidates)); + shiftedWithoutModifies = SafeCloseablePair + .downcast(upstream.shifted.extractParallelShiftedRowsFromPostShiftIndex(candidates)); } } } @@ -227,13 +217,12 @@ public void close() { * @param toClear rows that used to exist and no longer exist * @param helper convenience class that memoizes reusable calculations for this update */ - public abstract void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, - UpdateHelper helper); + public abstract void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, UpdateHelper helper); /** - * Our job here is to calculate the effects: a map from incoming column to a list of columns - * that it effects. We do this in two stages. In the first stage we create a map from column to - * (set of dependent columns). In the second stage we reverse that map. + * Our job here is to calculate the effects: a map from incoming column to a list of columns that it effects. We do + * this in two stages. In the first stage we create a map from column to (set of dependent columns). In the second + * stage we reverse that map. */ public final Map calcEffects() { final Map> dependsOn = calcDependsOnRecurse(); @@ -250,8 +239,7 @@ public final Map calcEffects() { // Convert effects type into result type final Map result = new HashMap<>(); for (Map.Entry> entry : effects.entrySet()) { - final String[] value = - entry.getValue().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] value = entry.getValue().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); result.put(entry.getKey(), value); } return result; @@ -261,8 +249,7 @@ public final Map calcEffects() { public abstract SelectAndViewAnalyzer getInner(); - public abstract void updateColumnDefinitionsFromTopLayer( - Map columnDefinitions); + public abstract void updateColumnDefinitionsFromTopLayer(Map columnDefinitions); public abstract void startTrackingPrev(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectColumnLayer.java b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectColumnLayer.java index 612d25d4c2c..ef776b8cada 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectColumnLayer.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectColumnLayer.java @@ -25,14 +25,13 @@ final public class SelectColumnLayer extends SelectOrViewColumnLayer { private final boolean isRedirected; /** - * A memoized copy of selectColumn's data view. Use {@link SelectColumnLayer#getChunkSource()} - * to access. + * A memoized copy of selectColumn's data view. Use {@link SelectColumnLayer#getChunkSource()} to access. */ private ChunkSource chunkSource; SelectColumnLayer(SelectAndViewAnalyzer inner, String name, SelectColumn sc, - WritableSource ws, WritableSource underlying, - String[] deps, ModifiedColumnSet mcsBuilder, boolean isRedirected) { + WritableSource ws, WritableSource underlying, + String[] deps, ModifiedColumnSet mcsBuilder, boolean isRedirected) { super(inner, name, sc, ws, underlying, deps, mcsBuilder); this.writableSource = ws; this.isRedirected = isRedirected; @@ -51,10 +50,9 @@ private ChunkSource getChunkSource() { @Override public void applyUpdate(final ShiftAwareListener.Update upstream, final ReadOnlyIndex toClear, - final UpdateHelper helper) { + final UpdateHelper helper) { final int PAGE_SIZE = 4096; - final LongToIntFunction contextSize = - (long size) -> size > PAGE_SIZE ? PAGE_SIZE : (int) size; + final LongToIntFunction contextSize = (long size) -> size > PAGE_SIZE ? PAGE_SIZE : (int) size; if (isRedirected && upstream.removed.nonempty()) { clearObjectsAtThisLevel(upstream.removed); @@ -64,59 +62,49 @@ public void applyUpdate(final ShiftAwareListener.Update upstream, final ReadOnly inner.applyUpdate(upstream, toClear, helper); final boolean modifiesAffectUs = - upstream.modified.nonempty() - && upstream.modifiedColumnSet.containsAny(myModifiedColumnSet); + upstream.modified.nonempty() && upstream.modifiedColumnSet.containsAny(myModifiedColumnSet); // We include modifies in our shifted sets if we are not going to process them separately. final ReadOnlyIndex preMoveKeys = helper.getPreShifted(!modifiesAffectUs); final ReadOnlyIndex postMoveKeys = helper.getPostShifted(!modifiesAffectUs); final long lastKey = Math.max(postMoveKeys.empty() ? -1 : postMoveKeys.lastKey(), - upstream.added.empty() ? -1 : upstream.added.lastKey()); + upstream.added.empty() ? -1 : upstream.added.lastKey()); if (lastKey != -1) { writableSource.ensureCapacity(lastKey + 1); } - // Note that applyUpdate is called during initialization. If the table begins empty, we - // still want to force that - // an initial call to getDataView() (via getChunkSource()) or else the formula will only be - // computed later when - // data begins to flow; start-of-day is likely a bad time to find formula errors for our - // customers. + // Note that applyUpdate is called during initialization. If the table begins empty, we still want to force that + // an initial call to getDataView() (via getChunkSource()) or else the formula will only be computed later when + // data begins to flow; start-of-day is likely a bad time to find formula errors for our customers. final ChunkSource chunkSource = getChunkSource(); final boolean needGetContext = upstream.added.nonempty() || modifiesAffectUs; final boolean needDestContext = preMoveKeys.nonempty() || needGetContext; final int chunkSourceContextSize = - contextSize.applyAsInt(Math.max(upstream.added.size(), upstream.modified.size())); - final int destContextSize = - contextSize.applyAsInt(Math.max(preMoveKeys.size(), chunkSourceContextSize)); + contextSize.applyAsInt(Math.max(upstream.added.size(), upstream.modified.size())); + final int destContextSize = contextSize.applyAsInt(Math.max(preMoveKeys.size(), chunkSourceContextSize)); - try ( - final WritableChunkSink.FillFromContext destContext = + try (final WritableChunkSink.FillFromContext destContext = needDestContext ? writableSource.makeFillFromContext(destContextSize) : null; - final ChunkSource.GetContext chunkSourceContext = - needGetContext ? chunkSource.makeGetContext(chunkSourceContextSize) : null) { + final ChunkSource.GetContext chunkSourceContext = + needGetContext ? chunkSource.makeGetContext(chunkSourceContextSize) : null) { // apply shifts! if (!isRedirected && preMoveKeys.nonempty()) { assert destContext != null; // note: we cannot use a get context here as destination is identical to source final int shiftContextSize = contextSize.applyAsInt(preMoveKeys.size()); - try ( - final ChunkSource.FillContext srcContext = - writableSource.makeFillContext(shiftContextSize); - final WritableChunk chunk = - writableSource.getChunkType().makeWritableChunk(shiftContextSize); - final OrderedKeys.Iterator srcIter = preMoveKeys.getOrderedKeysIterator(); - final OrderedKeys.Iterator destIter = postMoveKeys.getOrderedKeysIterator()) { + try (final ChunkSource.FillContext srcContext = writableSource.makeFillContext(shiftContextSize); + final WritableChunk chunk = + writableSource.getChunkType().makeWritableChunk(shiftContextSize); + final OrderedKeys.Iterator srcIter = preMoveKeys.getOrderedKeysIterator(); + final OrderedKeys.Iterator destIter = postMoveKeys.getOrderedKeysIterator()) { while (srcIter.hasMore()) { final OrderedKeys srcKeys = srcIter.getNextOrderedKeysWithLength(PAGE_SIZE); - final OrderedKeys destKeys = - destIter.getNextOrderedKeysWithLength(PAGE_SIZE); - Assert.eq(srcKeys.size(), "srcKeys.size()", destKeys.size(), - "destKeys.size()"); + final OrderedKeys destKeys = destIter.getNextOrderedKeysWithLength(PAGE_SIZE); + Assert.eq(srcKeys.size(), "srcKeys.size()", destKeys.size(), "destKeys.size()"); writableSource.fillPrevChunk(srcContext, chunk, srcKeys); writableSource.fillFromChunk(destContext, chunk, destKeys); } @@ -130,8 +118,7 @@ public void applyUpdate(final ShiftAwareListener.Update upstream, final ReadOnly try (final OrderedKeys.Iterator keyIter = upstream.added.getOrderedKeysIterator()) { while (keyIter.hasMore()) { final OrderedKeys keys = keyIter.getNextOrderedKeysWithLength(PAGE_SIZE); - writableSource.fillFromChunk(destContext, - chunkSource.getChunk(chunkSourceContext, keys), keys); + writableSource.fillFromChunk(destContext, chunkSource.getChunk(chunkSourceContext, keys), keys); } } } @@ -139,12 +126,10 @@ public void applyUpdate(final ShiftAwareListener.Update upstream, final ReadOnly // apply modifies! if (modifiesAffectUs) { assert chunkSourceContext != null; - try (final OrderedKeys.Iterator keyIter = - upstream.modified.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator keyIter = upstream.modified.getOrderedKeysIterator()) { while (keyIter.hasMore()) { final OrderedKeys keys = keyIter.getNextOrderedKeysWithLength(PAGE_SIZE); - writableSource.fillFromChunk(destContext, - chunkSource.getChunk(chunkSourceContext, keys), keys); + writableSource.fillFromChunk(destContext, chunkSource.getChunk(chunkSourceContext, keys), keys); } } } @@ -157,8 +142,7 @@ public void applyUpdate(final ShiftAwareListener.Update upstream, final ReadOnly private void clearObjectsAtThisLevel(ReadOnlyIndex keys) { // Only bother doing this if we're holding on to references. - if (!writableSource.getType().isPrimitive() - && (writableSource.getType() != DBDateTime.class)) { + if (!writableSource.getType().isPrimitive() && (writableSource.getType() != DBDateTime.class)) { ChunkUtils.fillWithNullValue(writableSource, keys); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectOrViewColumnLayer.java b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectOrViewColumnLayer.java index b4168f41412..58d50319925 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectOrViewColumnLayer.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/SelectOrViewColumnLayer.java @@ -11,8 +11,8 @@ public abstract class SelectOrViewColumnLayer extends DependencyLayerBase { private final ColumnSource optionalUnderlying; SelectOrViewColumnLayer(SelectAndViewAnalyzer inner, String name, SelectColumn sc, - ColumnSource ws, ColumnSource optionalUnderlying, - String[] deps, ModifiedColumnSet mcsBuilder) { + ColumnSource ws, ColumnSource optionalUnderlying, + String[] deps, ModifiedColumnSet mcsBuilder) { super(inner, name, sc, ws, deps, mcsBuilder); this.optionalUnderlying = optionalUnderlying; } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/ViewColumnLayer.java b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/ViewColumnLayer.java index 49af6b015bc..40517066c78 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/analyzers/ViewColumnLayer.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/analyzers/ViewColumnLayer.java @@ -7,17 +7,14 @@ import io.deephaven.db.v2.utils.ReadOnlyIndex; final public class ViewColumnLayer extends SelectOrViewColumnLayer { - ViewColumnLayer(SelectAndViewAnalyzer inner, String name, SelectColumn sc, ColumnSource cs, - String[] deps, - ModifiedColumnSet mcsBuilder) { + ViewColumnLayer(SelectAndViewAnalyzer inner, String name, SelectColumn sc, ColumnSource cs, String[] deps, + ModifiedColumnSet mcsBuilder) { super(inner, name, sc, cs, null, deps, mcsBuilder); } @Override - public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, - UpdateHelper helper) { - // To be parallel with SelectColumnLayer, we would recurse here, but since this is - // ViewColumnLayer + public void applyUpdate(ShiftAwareListener.Update upstream, ReadOnlyIndex toClear, UpdateHelper helper) { + // To be parallel with SelectColumnLayer, we would recurse here, but since this is ViewColumnLayer // (and all my inner layers are ViewColumnLayer), there's nothing to do. } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/ChunkMatchFilterFactory.java b/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/ChunkMatchFilterFactory.java index d69c0d67089..b91b743ab6c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/ChunkMatchFilterFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/ChunkMatchFilterFactory.java @@ -6,8 +6,8 @@ public class ChunkMatchFilterFactory { private ChunkMatchFilterFactory() {} // static only - public static ChunkFilter getChunkFilter(Class type, boolean caseInsensitive, - boolean invertMatch, final Object... keys) { + public static ChunkFilter getChunkFilter(Class type, boolean caseInsensitive, boolean invertMatch, + final Object... keys) { if (keys.length == 0) { if (invertMatch) { return ChunkFilter.TRUE_FILTER_INSTANCE; diff --git a/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/ReplicateChunkFilters.java b/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/ReplicateChunkFilters.java index 509eaee9106..a6798f4ecc5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/ReplicateChunkFilters.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/ReplicateChunkFilters.java @@ -10,23 +10,18 @@ public class ReplicateChunkFilters { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToShortAndByte(CharRangeComparator.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToInteger(CharRangeComparator.class, - ReplicatePrimitiveCode.MAIN_SRC, Collections.emptyMap()); + ReplicatePrimitiveCode.charToShortAndByte(CharRangeComparator.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToInteger(CharRangeComparator.class, ReplicatePrimitiveCode.MAIN_SRC, + Collections.emptyMap()); - ReplicatePrimitiveCode.charToShortAndByte(CharRangeFilter.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToShortAndByte(CharRangeFilter.class, ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToInteger(CharRangeFilter.class, ReplicatePrimitiveCode.MAIN_SRC, - Collections.emptyMap()); + Collections.emptyMap()); ReplicatePrimitiveCode.charToLong(CharRangeFilter.class, ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatRangeComparator.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatRangeFilter.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatRangeComparator.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatRangeFilter.class, ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(CharChunkMatchFilterFactory.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharChunkMatchFilterFactory.class, ReplicatePrimitiveCode.MAIN_SRC); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/StringChunkMatchFilterFactory.java b/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/StringChunkMatchFilterFactory.java index 2f8718f9be3..d6a5cbb0ec9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/StringChunkMatchFilterFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/chunkfilters/StringChunkMatchFilterFactory.java @@ -30,8 +30,7 @@ public boolean equalKey(String s, String s2) { private StringChunkMatchFilterFactory() {} // static use only - static ChunkFilter.ObjectChunkFilter makeCaseInsensitiveFilter(boolean invert, - Object... values) { + static ChunkFilter.ObjectChunkFilter makeCaseInsensitiveFilter(boolean invert, Object... values) { if (invert) { if (values.length == 1) { return new InverseSingleValueStringChunkFilter((String) values[0]); @@ -40,8 +39,8 @@ static ChunkFilter.ObjectChunkFilter makeCaseInsensitiveFilter(boolean invert, return new InverseTwoValueStringChunkFilter((String) values[0], (String) values[1]); } if (values.length == 3) { - return new InverseThreeValueStringChunkFilter((String) values[0], - (String) values[1], (String) values[2]); + return new InverseThreeValueStringChunkFilter((String) values[0], (String) values[1], + (String) values[2]); } return new InverseMultiValueStringChunkFilter(values); } else { @@ -52,15 +51,13 @@ static ChunkFilter.ObjectChunkFilter makeCaseInsensitiveFilter(boolean invert, return new TwoValueStringChunkFilter((String) values[0], (String) values[1]); } if (values.length == 3) { - return new ThreeValueStringChunkFilter((String) values[0], (String) values[1], - (String) values[2]); + return new ThreeValueStringChunkFilter((String) values[0], (String) values[1], (String) values[2]); } return new MultiValueStringChunkFilter(values); } } - private static class SingleValueStringChunkFilter - implements ChunkFilter.ObjectChunkFilter { + private static class SingleValueStringChunkFilter implements ChunkFilter.ObjectChunkFilter { private final String value; private SingleValueStringChunkFilter(String value) { @@ -68,8 +65,8 @@ private SingleValueStringChunkFilter(String value) { } @Override - public void filter(ObjectChunk values, - LongChunk keys, WritableLongChunk results) { + public void filter(ObjectChunk values, LongChunk keys, + WritableLongChunk results) { final ObjectChunk stringChunk = values.asTypedObjectChunk(); results.setSize(0); for (int ii = 0; ii < stringChunk.size(); ++ii) { @@ -81,8 +78,7 @@ public void filter(ObjectChunk values, } } - private static class InverseSingleValueStringChunkFilter - implements ChunkFilter.ObjectChunkFilter { + private static class InverseSingleValueStringChunkFilter implements ChunkFilter.ObjectChunkFilter { private final String value; private InverseSingleValueStringChunkFilter(String value) { @@ -90,8 +86,8 @@ private InverseSingleValueStringChunkFilter(String value) { } @Override - public void filter(ObjectChunk values, - LongChunk keys, WritableLongChunk results) { + public void filter(ObjectChunk values, LongChunk keys, + WritableLongChunk results) { final ObjectChunk stringChunk = values.asTypedObjectChunk(); results.setSize(0); for (int ii = 0; ii < stringChunk.size(); ++ii) { @@ -103,8 +99,7 @@ public void filter(ObjectChunk values, } } - private static class TwoValueStringChunkFilter - implements ChunkFilter.ObjectChunkFilter { + private static class TwoValueStringChunkFilter implements ChunkFilter.ObjectChunkFilter { private final String value1; private final String value2; @@ -114,8 +109,8 @@ private TwoValueStringChunkFilter(String value1, String value2) { } @Override - public void filter(ObjectChunk values, - LongChunk keys, WritableLongChunk results) { + public void filter(ObjectChunk values, LongChunk keys, + WritableLongChunk results) { final ObjectChunk stringChunk = values.asTypedObjectChunk(); results.setSize(0); for (int ii = 0; ii < stringChunk.size(); ++ii) { @@ -127,8 +122,7 @@ public void filter(ObjectChunk values, } } - private static class InverseTwoValueStringChunkFilter - implements ChunkFilter.ObjectChunkFilter { + private static class InverseTwoValueStringChunkFilter implements ChunkFilter.ObjectChunkFilter { private final String value1; private final String value2; @@ -138,22 +132,20 @@ private InverseTwoValueStringChunkFilter(String value1, String value2) { } @Override - public void filter(ObjectChunk values, - LongChunk keys, WritableLongChunk results) { + public void filter(ObjectChunk values, LongChunk keys, + WritableLongChunk results) { final ObjectChunk stringChunk = values.asTypedObjectChunk(); results.setSize(0); for (int ii = 0; ii < stringChunk.size(); ++ii) { final String checkString = stringChunk.get(ii); - if (!(value1.equalsIgnoreCase(checkString) - || value2.equalsIgnoreCase(checkString))) { + if (!(value1.equalsIgnoreCase(checkString) || value2.equalsIgnoreCase(checkString))) { results.add(keys.get(ii)); } } } } - private static class ThreeValueStringChunkFilter - implements ChunkFilter.ObjectChunkFilter { + private static class ThreeValueStringChunkFilter implements ChunkFilter.ObjectChunkFilter { private final String value1; private final String value2; private final String value3; @@ -165,22 +157,21 @@ private ThreeValueStringChunkFilter(String value1, String value2, String value3) } @Override - public void filter(ObjectChunk values, - LongChunk keys, WritableLongChunk results) { + public void filter(ObjectChunk values, LongChunk keys, + WritableLongChunk results) { final ObjectChunk stringChunk = values.asTypedObjectChunk(); results.setSize(0); for (int ii = 0; ii < stringChunk.size(); ++ii) { final String checkString = stringChunk.get(ii); if (value1.equalsIgnoreCase(checkString) || value2.equalsIgnoreCase(checkString) - || value3.equalsIgnoreCase(checkString)) { + || value3.equalsIgnoreCase(checkString)) { results.add(keys.get(ii)); } } } } - private static class InverseThreeValueStringChunkFilter - implements ChunkFilter.ObjectChunkFilter { + private static class InverseThreeValueStringChunkFilter implements ChunkFilter.ObjectChunkFilter { private final String value1; private final String value2; private final String value3; @@ -192,22 +183,21 @@ private InverseThreeValueStringChunkFilter(String value1, String value2, String } @Override - public void filter(ObjectChunk values, - LongChunk keys, WritableLongChunk results) { + public void filter(ObjectChunk values, LongChunk keys, + WritableLongChunk results) { final ObjectChunk stringChunk = values.asTypedObjectChunk(); results.setSize(0); for (int ii = 0; ii < stringChunk.size(); ++ii) { final String checkString = stringChunk.get(ii); if (!(value1.equalsIgnoreCase(checkString) || value2.equalsIgnoreCase(checkString) - || value3.equalsIgnoreCase(checkString))) { + || value3.equalsIgnoreCase(checkString))) { results.add(keys.get(ii)); } } } } - private static class MultiValueStringChunkFilter - implements ChunkFilter.ObjectChunkFilter { + private static class MultiValueStringChunkFilter implements ChunkFilter.ObjectChunkFilter { private final KeyedObjectHashSet values; private MultiValueStringChunkFilter(Object... values) { @@ -218,8 +208,8 @@ private MultiValueStringChunkFilter(Object... values) { } @Override - public void filter(ObjectChunk values, - LongChunk keys, WritableLongChunk results) { + public void filter(ObjectChunk values, LongChunk keys, + WritableLongChunk results) { final ObjectChunk stringChunk = values.asTypedObjectChunk(); results.setSize(0); for (int ii = 0; ii < stringChunk.size(); ++ii) { @@ -231,8 +221,7 @@ public void filter(ObjectChunk values, } } - private static class InverseMultiValueStringChunkFilter - implements ChunkFilter.ObjectChunkFilter { + private static class InverseMultiValueStringChunkFilter implements ChunkFilter.ObjectChunkFilter { private final KeyedObjectHashSet values; private InverseMultiValueStringChunkFilter(Object... values) { @@ -243,8 +232,8 @@ private InverseMultiValueStringChunkFilter(Object... values) { } @Override - public void filter(ObjectChunk values, - LongChunk keys, WritableLongChunk results) { + public void filter(ObjectChunk values, LongChunk keys, + WritableLongChunk results) { final ObjectChunk stringChunk = values.asTypedObjectChunk(); results.setSize(0); for (int ii = 0; ii < stringChunk.size(); ++ii) { diff --git a/DB/src/main/java/io/deephaven/db/v2/select/codegen/FormulaAnalyzer.java b/DB/src/main/java/io/deephaven/db/v2/select/codegen/FormulaAnalyzer.java index 77ad507b4fd..2b3af59415a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/codegen/FormulaAnalyzer.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/codegen/FormulaAnalyzer.java @@ -22,33 +22,29 @@ public class FormulaAnalyzer { private static final Logger log = LoggerFactory.getLogger(FormulaAnalyzer.class); public static Result analyze(final String rawFormulaString, - final Map columnDefinitionMap, - Map otherVariables) { + final Map columnDefinitionMap, + Map otherVariables) { try { return analyzeHelper(rawFormulaString, columnDefinitionMap, otherVariables); } catch (Exception e) { - throw new FormulaCompilationException( - "Formula compilation error for: " + rawFormulaString, e); + throw new FormulaCompilationException("Formula compilation error for: " + rawFormulaString, e); } } private static Result analyzeHelper(final String rawFormulaString, - final Map columnDefinitionMap, - Map otherVariables) throws Exception { + final Map columnDefinitionMap, + Map otherVariables) throws Exception { final Map possibleParams = new HashMap<>(); final QueryScope queryScope = QueryScope.getScope(); for (Param param : queryScope.getParams(queryScope.getParamNames())) { possibleParams.put(param.getName(), param); } - final DBTimeUtils.Result timeConversionResult = - DBTimeUtils.convertExpression(rawFormulaString); - final DBLanguageParser.Result result = - getCompiledFormula(columnDefinitionMap, timeConversionResult, + final DBTimeUtils.Result timeConversionResult = DBTimeUtils.convertExpression(rawFormulaString); + final DBLanguageParser.Result result = getCompiledFormula(columnDefinitionMap, timeConversionResult, otherVariables); - log.debug().append("Expression (after language conversion) : ") - .append(result.getConvertedExpression()).endl(); + log.debug().append("Expression (after language conversion) : ").append(result.getConvertedExpression()).endl(); final List usedColumns = new ArrayList<>(); final List userParams = new ArrayList<>(); @@ -61,8 +57,8 @@ private static Result analyzeHelper(final String rawFormulaString, } else if (columnDefinitionMap.get(variable) != null) { usedColumns.add(variable); } else if (variable.endsWith(colSuffix) && - null != columnDefinitionMap.get( - bareName = variable.substring(0, variable.length() - colSuffix.length()))) { + null != columnDefinitionMap + .get(bareName = variable.substring(0, variable.length() - colSuffix.length()))) { usedColumnArrays.add(bareName); } else if (possibleParams.containsKey(variable)) { userParams.add(variable); @@ -75,16 +71,15 @@ private static Result analyzeHelper(final String rawFormulaString, final String cookedFormulaString = result.getConvertedExpression(); final String timeInstanceVariables = timeConversionResult.getInstanceVariablesString(); return new Result(returnedType, - usedColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - usedColumnArrays.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - userParams.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - rawFormulaString, cookedFormulaString, timeInstanceVariables); + usedColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY), + usedColumnArrays.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY), + userParams.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY), + rawFormulaString, cookedFormulaString, timeInstanceVariables); } - public static DBLanguageParser.Result getCompiledFormula( - Map availableColumns, - DBTimeUtils.Result timeConversionResult, - Map otherVariables) throws Exception { + public static DBLanguageParser.Result getCompiledFormula(Map availableColumns, + DBTimeUtils.Result timeConversionResult, + Map otherVariables) throws Exception { final Map possibleVariables = new HashMap<>(); possibleVariables.put("i", int.class); possibleVariables.put("ii", long.class); @@ -94,14 +89,13 @@ public static DBLanguageParser.Result getCompiledFormula( for (ColumnDefinition columnDefinition : availableColumns.values()) { final String columnSuffix = DhFormulaColumn.COLUMN_SUFFIX; - final Class dbArrayType = - DhFormulaColumn.getDbArrayType(columnDefinition.getDataType()); + final Class dbArrayType = DhFormulaColumn.getDbArrayType(columnDefinition.getDataType()); possibleVariables.put(columnDefinition.getName() + columnSuffix, dbArrayType); if (dbArrayType == DbArray.class) { possibleVariableParameterizedTypes.put(columnDefinition.getName() + columnSuffix, - new Class[] {columnDefinition.getDataType()}); + new Class[] {columnDefinition.getDataType()}); } } @@ -114,15 +108,14 @@ public static DBLanguageParser.Result getCompiledFormula( possibleVariables.put(columnDefinition.getName(), columnDefinition.getDataType()); final Class compType = columnDefinition.getComponentType(); if (compType != null && !compType.isPrimitive()) { - possibleVariableParameterizedTypes.put(columnDefinition.getName(), - new Class[] {compType}); + possibleVariableParameterizedTypes.put(columnDefinition.getName(), new Class[] {compType}); } } // log.debug().append("Expression (before) : ").append(formulaString).endl(); - log.debug().append("Expression (after time conversion) : ") - .append(timeConversionResult.getConvertedFormula()).endl(); + log.debug().append("Expression (after time conversion) : ").append(timeConversionResult.getConvertedFormula()) + .endl(); possibleVariables.putAll(timeConversionResult.getNewVariables()); if (otherVariables != null) { @@ -132,10 +125,9 @@ public static DBLanguageParser.Result getCompiledFormula( final Set classImports = new HashSet<>(QueryLibrary.getClassImports()); classImports.add(Index.class); classImports.add(WritableSource.class); - return new DBLanguageParser(timeConversionResult.getConvertedFormula(), - QueryLibrary.getPackageImports(), - classImports, QueryLibrary.getStaticImports(), possibleVariables, - possibleVariableParameterizedTypes).getResult(); + return new DBLanguageParser(timeConversionResult.getConvertedFormula(), QueryLibrary.getPackageImports(), + classImports, QueryLibrary.getStaticImports(), possibleVariables, possibleVariableParameterizedTypes) + .getResult(); } public static class Result { @@ -144,11 +136,9 @@ public static class Result { public final String cookedFormulaString; public final String timeInstanceVariables; - public Result(Class returnedType, String[] usedColumns, String[] usedArrays, - String[] usedParams, - String rawFormulaString, String cookedFormulaString, String timeInstanceVariables) { - this.sourceDescriptor = - new FormulaSourceDescriptor(returnedType, usedColumns, usedArrays, usedParams); + public Result(Class returnedType, String[] usedColumns, String[] usedArrays, String[] usedParams, + String rawFormulaString, String cookedFormulaString, String timeInstanceVariables) { + this.sourceDescriptor = new FormulaSourceDescriptor(returnedType, usedColumns, usedArrays, usedParams); this.rawFormulaString = rawFormulaString; this.cookedFormulaString = cookedFormulaString; this.timeInstanceVariables = timeInstanceVariables; diff --git a/DB/src/main/java/io/deephaven/db/v2/select/codegen/JavaKernelBuilder.java b/DB/src/main/java/io/deephaven/db/v2/select/codegen/JavaKernelBuilder.java index 507863d0b1a..a18557c046e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/codegen/JavaKernelBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/codegen/JavaKernelBuilder.java @@ -30,11 +30,9 @@ public class JavaKernelBuilder { private static final String FORMULA_KERNEL_FACTORY_NAME = "__FORMULA_KERNEL_FACTORY"; - public static Result create(String cookedFormulaString, Class returnedType, - String timeInstanceVariables, - Map columns, Map arrays, Map params) { - final JavaKernelBuilder jkf = - new JavaKernelBuilder(cookedFormulaString, returnedType, timeInstanceVariables, + public static Result create(String cookedFormulaString, Class returnedType, String timeInstanceVariables, + Map columns, Map arrays, Map params) { + final JavaKernelBuilder jkf = new JavaKernelBuilder(cookedFormulaString, returnedType, timeInstanceVariables, columns, arrays, params); final String classBody = jkf.generateKernelClassBody(); final Class clazz = compileFormula(cookedFormulaString, classBody, "Formula"); @@ -42,8 +40,7 @@ public static Result create(String cookedFormulaString, Class returnedType, try { fkf = (FormulaKernelFactory) clazz.getField(FORMULA_KERNEL_FACTORY_NAME).get(null); } catch (ReflectiveOperationException e) { - throw new FormulaCompilationException( - "Formula compilation error for: " + cookedFormulaString, e); + throw new FormulaCompilationException("Formula compilation error for: " + cookedFormulaString, e); } return new Result(classBody, clazz, fkf); } @@ -65,9 +62,8 @@ public static Result create(String cookedFormulaString, Class returnedType, */ private final Map params; - private JavaKernelBuilder(String cookedFormulaString, Class returnedType, - String timeInstanceVariables, - Map columns, Map arrays, Map params) { + private JavaKernelBuilder(String cookedFormulaString, Class returnedType, String timeInstanceVariables, + Map columns, Map arrays, Map params) { this.cookedFormulaString = cookedFormulaString; this.returnedType = returnedType; this.timeInstanceVariables = timeInstanceVariables; @@ -83,80 +79,76 @@ private String generateKernelClassBody() { final TypeAnalyzer ta = TypeAnalyzer.create(returnedType); final CodeGenerator g = CodeGenerator.create( - QueryLibrary.getImportStatement(), "", - "public class $CLASSNAME$ implements [[FORMULA_KERNEL_INTERFACE_CANONICAL]]", - CodeGenerator.block( - generateFactoryLambda(), "", - CodeGenerator.repeated("instanceVar", "private final [[TYPE]] [[NAME]];"), - timeInstanceVariables, - generateKernelConstructor(), "", - generateMakeFillContext(), "", - generateApplyFormulaChunk(ta), "", - generateApplyFormulaPerItem(ta), "", - generateKernelContextClass(), "")); + QueryLibrary.getImportStatement(), "", + "public class $CLASSNAME$ implements [[FORMULA_KERNEL_INTERFACE_CANONICAL]]", CodeGenerator.block( + generateFactoryLambda(), "", + CodeGenerator.repeated("instanceVar", "private final [[TYPE]] [[NAME]];"), + timeInstanceVariables, + generateKernelConstructor(), "", + generateMakeFillContext(), "", + generateApplyFormulaChunk(ta), "", + generateApplyFormulaPerItem(ta), "", + generateKernelContextClass(), "")); g.replace("FORMULA_KERNEL_INTERFACE_CANONICAL", FormulaKernel.class.getCanonicalName()); visitFormulaParameters(null, - ca -> { - final CodeGenerator fc = g.instantiateNewRepeated("instanceVar"); - fc.replace("TYPE", ca.arrayTypeAsString); - fc.replace("NAME", ca.name); - return null; - }, - p -> { - final CodeGenerator fc = g.instantiateNewRepeated("instanceVar"); - fc.replace("TYPE", p.typeString); - fc.replace("NAME", p.name); - return null; - }); + ca -> { + final CodeGenerator fc = g.instantiateNewRepeated("instanceVar"); + fc.replace("TYPE", ca.arrayTypeAsString); + fc.replace("NAME", ca.name); + return null; + }, + p -> { + final CodeGenerator fc = g.instantiateNewRepeated("instanceVar"); + fc.replace("TYPE", p.typeString); + fc.replace("NAME", p.name); + return null; + }); return g.build(); } private CodeGenerator generateFactoryLambda() { final CodeGenerator g = CodeGenerator.create( - "public static final [[FORMULA_KERNEL_FACTORY_CANONICAL]] [[FORMULA_KERNEL_FACTORY_NAME]] = $CLASSNAME$::new;"); - g.replace("FORMULA_KERNEL_FACTORY_CANONICAL", - FormulaKernelFactory.class.getCanonicalName()); + "public static final [[FORMULA_KERNEL_FACTORY_CANONICAL]] [[FORMULA_KERNEL_FACTORY_NAME]] = $CLASSNAME$::new;"); + g.replace("FORMULA_KERNEL_FACTORY_CANONICAL", FormulaKernelFactory.class.getCanonicalName()); g.replace("FORMULA_KERNEL_FACTORY_NAME", FORMULA_KERNEL_FACTORY_NAME); return g.freeze(); } private CodeGenerator generateKernelConstructor() { final CodeGenerator g = CodeGenerator.create( - "public $CLASSNAME$([[DBARRAYBASE_CANONICAL]][] __dbArrays,", CodeGenerator.indent( - "[[PARAM_CANONICAL]][] __params)"), - CodeGenerator.block( - CodeGenerator.repeated("getDbArray", "[[NAME]] = ([[TYPE]])__dbArrays[[[INDEX]]];"), - CodeGenerator.repeated("getParam", - "[[NAME]] = ([[TYPE]])__params[[[INDEX]]].getValue();"))); + "public $CLASSNAME$([[DBARRAYBASE_CANONICAL]][] __dbArrays,", CodeGenerator.indent( + "[[PARAM_CANONICAL]][] __params)"), + CodeGenerator.block( + CodeGenerator.repeated("getDbArray", "[[NAME]] = ([[TYPE]])__dbArrays[[[INDEX]]];"), + CodeGenerator.repeated("getParam", "[[NAME]] = ([[TYPE]])__params[[[INDEX]]].getValue();"))); g.replace("DBARRAYBASE_CANONICAL", DbArrayBase.class.getCanonicalName()); g.replace("PARAM_CANONICAL", Param.class.getCanonicalName()); final int[] nextArrayIndex = {0}; final int[] nextParamIndex = {0}; visitFormulaParameters(null, - ap -> { - final CodeGenerator ag = g.instantiateNewRepeated("getDbArray"); - ag.replace("NAME", ap.name); - ag.replace("TYPE", ap.arrayTypeAsString); - ag.replace("INDEX", "" + nextArrayIndex[0]++); - return null; - }, - pp -> { - final CodeGenerator pg = g.instantiateNewRepeated("getParam"); - pg.replace("NAME", pp.name); - pg.replace("TYPE", pp.typeString); - pg.replace("INDEX", "" + nextParamIndex[0]++); - return null; - }); + ap -> { + final CodeGenerator ag = g.instantiateNewRepeated("getDbArray"); + ag.replace("NAME", ap.name); + ag.replace("TYPE", ap.arrayTypeAsString); + ag.replace("INDEX", "" + nextArrayIndex[0]++); + return null; + }, + pp -> { + final CodeGenerator pg = g.instantiateNewRepeated("getParam"); + pg.replace("NAME", pp.name); + pg.replace("TYPE", pp.typeString); + pg.replace("INDEX", "" + nextParamIndex[0]++); + return null; + }); return g.freeze(); } @NotNull private CodeGenerator generateKernelContextClass() { final CodeGenerator g = CodeGenerator.create( - "private class FormulaFillContext implements [[FILL_CONTEXT_CANONICAL]]", - CodeGenerator.block( - // constructor - "FormulaFillContext(int __chunkCapacity)", CodeGenerator.block())); + "private class FormulaFillContext implements [[FILL_CONTEXT_CANONICAL]]", CodeGenerator.block( + // constructor + "FormulaFillContext(int __chunkCapacity)", CodeGenerator.block())); g.replace("FILL_CONTEXT_CANONICAL", Formula.FillContext.class.getCanonicalName()); return g.freeze(); } @@ -164,84 +156,80 @@ private CodeGenerator generateKernelContextClass() { @NotNull private CodeGenerator generateMakeFillContext() { final CodeGenerator g = CodeGenerator.create( - "@Override", - "public FormulaFillContext makeFillContext(final int __chunkCapacity)", - CodeGenerator.block( - "return new FormulaFillContext(__chunkCapacity);")); + "@Override", + "public FormulaFillContext makeFillContext(final int __chunkCapacity)", CodeGenerator.block( + "return new FormulaFillContext(__chunkCapacity);")); return g.freeze(); } @NotNull private CodeGenerator generateApplyFormulaChunk(TypeAnalyzer ta) { final CodeGenerator g = CodeGenerator.create( - "@Override", - "public void applyFormulaChunk([[CANONICAL_FORMULA_FILLCONTEXT]] __context,", - CodeGenerator.indent( - "final WritableChunk __destination,", - "Chunk[] __sources)"), - CodeGenerator.block( - "final [[DEST_CHUNK_TYPE]] __typedDestination = __destination.[[DEST_AS_CHUNK_METHOD]]();", - CodeGenerator.repeated("getChunks", - "final [[CHUNK_TYPE]] [[CHUNK_NAME]] = __sources[[[SOURCE_INDEX]]].[[AS_CHUNK_METHOD]]();"), - "final int __size = __typedDestination.size();", - "for (int __chunkPos = 0; __chunkPos < __size; ++__chunkPos)", CodeGenerator.block( - CodeGenerator.repeated("setLocalVars", - "final [[VAR_TYPE]] [[VAR_NAME]] = [[VAR_INITIALIZER]];"), - "__typedDestination.set(__chunkPos, applyFormulaPerItem([[APPLY_FORMULA_ARGS]]));"))); + "@Override", + "public void applyFormulaChunk([[CANONICAL_FORMULA_FILLCONTEXT]] __context,", CodeGenerator.indent( + "final WritableChunk __destination,", + "Chunk[] __sources)"), + CodeGenerator.block( + "final [[DEST_CHUNK_TYPE]] __typedDestination = __destination.[[DEST_AS_CHUNK_METHOD]]();", + CodeGenerator.repeated("getChunks", + "final [[CHUNK_TYPE]] [[CHUNK_NAME]] = __sources[[[SOURCE_INDEX]]].[[AS_CHUNK_METHOD]]();"), + "final int __size = __typedDestination.size();", + "for (int __chunkPos = 0; __chunkPos < __size; ++__chunkPos)", CodeGenerator.block( + CodeGenerator.repeated("setLocalVars", + "final [[VAR_TYPE]] [[VAR_NAME]] = [[VAR_INITIALIZER]];"), + "__typedDestination.set(__chunkPos, applyFormulaPerItem([[APPLY_FORMULA_ARGS]]));"))); g.replace("CANONICAL_FORMULA_FILLCONTEXT", Formula.FillContext.class.getCanonicalName()); g.replace("DEST_CHUNK_TYPE", ta.writableChunkVariableType); g.replace("DEST_AS_CHUNK_METHOD", ta.asWritableChunkMethodName); final int[] chunkIndexHolder = {0}; final List args = visitFormulaParameters( - cs -> { - final TypeAnalyzer tm = TypeAnalyzer.create(cs.type); - final String chunkName = "__chunk__col__" + cs.name; - final CodeGenerator getChunks = g.instantiateNewRepeated("getChunks"); - getChunks.replace("CHUNK_NAME", chunkName); - getChunks.replace("SOURCE_INDEX", "" + chunkIndexHolder[0]++); - getChunks.replace("CHUNK_TYPE", tm.readChunkVariableType); - getChunks.replace("AS_CHUNK_METHOD", tm.asReadChunkMethodName); - return chunkName + ".get(__chunkPos)"; - }, - null, - null); + cs -> { + final TypeAnalyzer tm = TypeAnalyzer.create(cs.type); + final String chunkName = "__chunk__col__" + cs.name; + final CodeGenerator getChunks = g.instantiateNewRepeated("getChunks"); + getChunks.replace("CHUNK_NAME", chunkName); + getChunks.replace("SOURCE_INDEX", "" + chunkIndexHolder[0]++); + getChunks.replace("CHUNK_TYPE", tm.readChunkVariableType); + getChunks.replace("AS_CHUNK_METHOD", tm.asReadChunkMethodName); + return chunkName + ".get(__chunkPos)"; + }, + null, + null); g.replace("APPLY_FORMULA_ARGS", makeCommaSeparatedList(args)); return g.freeze(); } private CodeGenerator generateApplyFormulaPerItem(final TypeAnalyzer ta) { final CodeGenerator g = CodeGenerator.create( - "private [[RETURN_TYPE]] applyFormulaPerItem([[ARGS]])", CodeGenerator.block( - "try", CodeGenerator.block( - "return [[FORMULA_STRING]];"), - CodeGenerator.samelineBlock("catch (java.lang.Exception __e)", - "throw new [[EXCEPTION_TYPE]](\"In formula: \" + [[JOINED_FORMULA_STRING]], __e);"))); + "private [[RETURN_TYPE]] applyFormulaPerItem([[ARGS]])", CodeGenerator.block( + "try", CodeGenerator.block( + "return [[FORMULA_STRING]];"), + CodeGenerator.samelineBlock("catch (java.lang.Exception __e)", + "throw new [[EXCEPTION_TYPE]](\"In formula: \" + [[JOINED_FORMULA_STRING]], __e);"))); g.replace("RETURN_TYPE", ta.typeString); final List args = visitFormulaParameters( - n -> n.typeString + " " + n.name, - null, - null); + n -> n.typeString + " " + n.name, + null, + null); g.replace("ARGS", makeCommaSeparatedList(args)); g.replace("FORMULA_STRING", ta.wrapWithCastIfNecessary(cookedFormulaString)); - final String joinedFormulaString = - CompilerTools.createEscapedJoinedString(cookedFormulaString); + final String joinedFormulaString = CompilerTools.createEscapedJoinedString(cookedFormulaString); g.replace("JOINED_FORMULA_STRING", joinedFormulaString); g.replace("EXCEPTION_TYPE", FormulaEvaluationException.class.getCanonicalName()); return g.freeze(); } private List visitFormulaParameters( - Function chunkLambda, - Function columnArrayLambda, - Function paramLambda) { + Function chunkLambda, + Function columnArrayLambda, + Function paramLambda) { final List results = new ArrayList<>(); if (chunkLambda != null) { for (Map.Entry entry : columns.entrySet()) { final String name = entry.getKey(); final RichType rt = entry.getValue(); - final ChunkParameter cp = - new ChunkParameter(name, rt.getBareType(), rt.getCanonicalName()); + final ChunkParameter cp = new ChunkParameter(name, rt.getBareType(), rt.getCanonicalName()); addIfNotNull(results, chunkLambda.apply(cp)); } } @@ -252,10 +240,8 @@ private List visitFormulaParameters( final Class dataType = entry.getValue(); final Class dbArrayType = DhFormulaColumn.getDbArrayType(dataType); final String dbArrayTypeAsString = dbArrayType.getCanonicalName() + - (TypeUtils.isConvertibleToPrimitive(dataType) ? "" - : "<" + dataType.getCanonicalName() + ">"); - final ColumnArrayParameter cap = - new ColumnArrayParameter(name, dbArrayType, dbArrayTypeAsString); + (TypeUtils.isConvertibleToPrimitive(dataType) ? "" : "<" + dataType.getCanonicalName() + ">"); + final ColumnArrayParameter cap = new ColumnArrayParameter(name, dbArrayType, dbArrayTypeAsString); addIfNotNull(results, columnArrayLambda.apply(cap)); } } @@ -271,20 +257,15 @@ private List visitFormulaParameters( return results; } - private static Class compileFormula(final String what, final String classBody, - final String className) { - // System.out.printf("compileFormula: formulaString is %s. Code is...%n%s%n", what, - // classBody); + private static Class compileFormula(final String what, final String classBody, final String className) { + // System.out.printf("compileFormula: formulaString is %s. Code is...%n%s%n", what, classBody); try (final QueryPerformanceNugget nugget = - QueryPerformanceRecorder.getInstance().getNugget("Compile:" + what)) { - // Compilation needs to take place with elevated privileges, but the created object - // should not have them. - return AccessController - .doPrivileged((PrivilegedExceptionAction) () -> CompilerTools + QueryPerformanceRecorder.getInstance().getNugget("Compile:" + what)) { + // Compilation needs to take place with elevated privileges, but the created object should not have them. + return AccessController.doPrivileged((PrivilegedExceptionAction) () -> CompilerTools .compile(className, classBody, CompilerTools.FORMULA_PREFIX)); } catch (PrivilegedActionException pae) { - throw new FormulaCompilationException("Formula compilation error for: " + what, - pae.getException()); + throw new FormulaCompilationException("Formula compilation error for: " + what, pae.getException()); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/codegen/RichType.java b/DB/src/main/java/io/deephaven/db/v2/select/codegen/RichType.java index 945eb3e162a..3e39e95f880 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/codegen/RichType.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/codegen/RichType.java @@ -39,8 +39,7 @@ public String getCanonicalName() { sb.append(bareType.getCanonicalName()); if (isGeneric) { sb.append('<'); - sb.append(IterableUtils.makeSeparatedList(Arrays.asList(typeAttributes), ", ", - Class::getCanonicalName)); + sb.append(IterableUtils.makeSeparatedList(Arrays.asList(typeAttributes), ", ", Class::getCanonicalName)); sb.append('>'); } return sb.toString(); diff --git a/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaFactory.java b/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaFactory.java index 3d47512ac26..48956375237 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaFactory.java @@ -8,7 +8,6 @@ import java.util.Map; public interface FormulaFactory { - Formula createFormula(Index index, boolean initLazyMap, - Map columnsToData, - Param... params); + Formula createFormula(Index index, boolean initLazyMap, Map columnsToData, + Param... params); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaKernel.java b/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaKernel.java index 25292bd9bc8..90d0deba722 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaKernel.java @@ -8,7 +8,6 @@ public interface FormulaKernel { Formula.FillContext makeFillContext(final int __chunkCapacity); - void applyFormulaChunk(Formula.FillContext __context, - final WritableChunk __destination, - Chunk[] __sources); + void applyFormulaChunk(Formula.FillContext __context, final WritableChunk __destination, + Chunk[] __sources); } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaKernelAdapter.java b/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaKernelAdapter.java index d98f2e9f754..a9949494fa2 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaKernelAdapter.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaKernelAdapter.java @@ -20,8 +20,8 @@ public class FormulaKernelAdapter extends io.deephaven.db.v2.select.Formula { private final GetHandler getHandler; public FormulaKernelAdapter(final Index index, final FormulaSourceDescriptor sourceDescriptor, - final Map columnSources, - final FormulaKernel kernel) { + final Map columnSources, + final FormulaKernel kernel) { super(index); this.sourceDescriptor = sourceDescriptor; this.columnSources = columnSources; @@ -32,25 +32,19 @@ public FormulaKernelAdapter(final Index index, final FormulaSourceDescriptor sou } this.chunkType = ChunkType.fromElementType(rt); if (rt == byte.class) { - getHandler = - (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetByte(key, prev)); + getHandler = (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetByte(key, prev)); } else if (rt == Boolean.class) { getHandler = this::handleGetBoolean; } else if (rt == char.class) { - getHandler = - (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetChar(key, prev)); + getHandler = (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetChar(key, prev)); } else if (rt == double.class) { - getHandler = - (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetDouble(key, prev)); + getHandler = (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetDouble(key, prev)); } else if (rt == float.class) { - getHandler = - (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetFloat(key, prev)); + getHandler = (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetFloat(key, prev)); } else if (rt == int.class) { - getHandler = - (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetInt(key, prev)); + getHandler = (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetInt(key, prev)); } else if (rt == long.class) { - getHandler = - (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetLong(key, prev)); + getHandler = (key, prev) -> io.deephaven.util.type.TypeUtils.box(handleGetLong(key, prev)); } else if (rt == short.class) { getHandler = (key, prev) -> TypeUtils.box(handleGetShort(key, prev)); } else { @@ -148,72 +142,63 @@ public short getPrevShort(final long k) { } private Object handleGetObject(final long k, boolean usePrev) { - try (final WritableObjectChunk __dest = - WritableObjectChunk.makeWritableChunk(1)) { + try (final WritableObjectChunk __dest = WritableObjectChunk.makeWritableChunk(1)) { commonGetLogic(__dest, k, usePrev); return __dest.get(0); } } private Boolean handleGetBoolean(final long k, boolean usePrev) { - try (final WritableObjectChunk __dest = - WritableObjectChunk.makeWritableChunk(1)) { + try (final WritableObjectChunk __dest = WritableObjectChunk.makeWritableChunk(1)) { commonGetLogic(__dest, k, usePrev); return __dest.get(0); } } private byte handleGetByte(final long k, boolean usePrev) { - try (final WritableByteChunk __dest = - WritableByteChunk.makeWritableChunk(1)) { + try (final WritableByteChunk __dest = WritableByteChunk.makeWritableChunk(1)) { commonGetLogic(__dest, k, usePrev); return __dest.get(0); } } private char handleGetChar(final long k, boolean usePrev) { - try (final WritableCharChunk __dest = - WritableCharChunk.makeWritableChunk(1)) { + try (final WritableCharChunk __dest = WritableCharChunk.makeWritableChunk(1)) { commonGetLogic(__dest, k, usePrev); return __dest.get(0); } } private double handleGetDouble(final long k, boolean usePrev) { - try (final WritableDoubleChunk __dest = - WritableDoubleChunk.makeWritableChunk(1)) { + try (final WritableDoubleChunk __dest = WritableDoubleChunk.makeWritableChunk(1)) { commonGetLogic(__dest, k, usePrev); return __dest.get(0); } } private float handleGetFloat(final long k, boolean usePrev) { - try (final WritableFloatChunk __dest = - WritableFloatChunk.makeWritableChunk(1)) { + try (final WritableFloatChunk __dest = WritableFloatChunk.makeWritableChunk(1)) { commonGetLogic(__dest, k, usePrev); return __dest.get(0); } } private int handleGetInt(final long k, boolean usePrev) { - try (final WritableIntChunk __dest = - WritableIntChunk.makeWritableChunk(1)) { + try (final WritableIntChunk __dest = WritableIntChunk.makeWritableChunk(1)) { commonGetLogic(__dest, k, usePrev); return __dest.get(0); } } private long handleGetLong(final long k, boolean usePrev) { - try (final WritableLongChunk __dest = - WritableLongChunk.makeWritableChunk(1)) { + try (final WritableLongChunk __dest = WritableLongChunk.makeWritableChunk(1)) { commonGetLogic(__dest, k, usePrev); return __dest.get(0); } } private short handleGetShort(final long k, boolean usePrev) { - try (final WritableShortChunk __dest = - WritableShortChunk.makeWritableChunk(1)) { + try (final WritableShortChunk __dest = WritableShortChunk.makeWritableChunk(1)) { commonGetLogic(__dest, k, usePrev); return __dest.get(0); } @@ -234,21 +219,21 @@ protected ChunkType getChunkType() { @Override public void fillChunk(@NotNull final FillContext __context, - @NotNull final WritableChunk __destination, - @NotNull final OrderedKeys __orderedKeys) { + @NotNull final WritableChunk __destination, + @NotNull final OrderedKeys __orderedKeys) { fillChunkHelper(__context, __destination, __orderedKeys, false, true); } @Override public void fillPrevChunk(@NotNull final FillContext __context, - @NotNull final WritableChunk __destination, - @NotNull final OrderedKeys __orderedKeys) { + @NotNull final WritableChunk __destination, + @NotNull final OrderedKeys __orderedKeys) { fillChunkHelper(__context, __destination, __orderedKeys, true, true); } private void fillChunkHelper(@NotNull final FillContext __context, - @NotNull final WritableChunk __destination, - @NotNull final OrderedKeys __orderedKeys, final boolean usePrev, final boolean lookupI) { + @NotNull final WritableChunk __destination, + @NotNull final OrderedKeys __orderedKeys, final boolean usePrev, final boolean lookupI) { final int orderedKeysSize = __orderedKeys.intSize(); __destination.setSize(orderedKeysSize); // Shortcut if __orderedKeys is empty @@ -256,8 +241,7 @@ private void fillChunkHelper(@NotNull final FillContext __context, return; } final AdapterContext __typedContext = (AdapterContext) __context; - final Chunk[] sourceChunks = - new Chunk[sourceDescriptor.sources.length]; + final Chunk[] sourceChunks = new Chunk[sourceDescriptor.sources.length]; try (final OrderedKeys flat = Index.FACTORY.getFlatIndex(__orderedKeys.size())) { for (int ii = 0; ii < sourceDescriptor.sources.length; ++ii) { final String name = sourceDescriptor.sources[ii]; @@ -267,8 +251,7 @@ private void fillChunkHelper(@NotNull final FillContext __context, // Potentially repeated work w.r.t. "ii". __typedContext.iChunk.setSize(0); __index.invert(__orderedKeys.asIndex()).forAllLongs(longVal -> { - final int i = LongSizedDataStructure - .intSize("FormulaNubbin i usage", longVal); + final int i = LongSizedDataStructure.intSize("FormulaNubbin i usage", longVal); __typedContext.iChunk.add(i); }); } else { @@ -283,8 +266,7 @@ private void fillChunkHelper(@NotNull final FillContext __context, } case "ii": { if (lookupI) { - __index.invert(__orderedKeys.asIndex()) - .fillKeyIndicesChunk(__typedContext.iiChunk); + __index.invert(__orderedKeys.asIndex()).fillKeyIndicesChunk(__typedContext.iiChunk); } else { // sequential ii for (int pos = 0; pos < orderedKeysSize; ++pos) { @@ -303,8 +285,8 @@ private void fillChunkHelper(@NotNull final FillContext __context, default: { final ColumnSource cs = columnSources.get(name); final ColumnSource.GetContext ctx = __typedContext.sourceContexts[ii]; - sourceChunks[ii] = usePrev ? cs.getPrevChunk(ctx, __orderedKeys) - : cs.getChunk(ctx, __orderedKeys); + sourceChunks[ii] = + usePrev ? cs.getPrevChunk(ctx, __orderedKeys) : cs.getChunk(ctx, __orderedKeys); } } } @@ -335,8 +317,7 @@ public AdapterContext makeFillContext(final int chunkCapacity) { } // Make contexts -- we leave nulls in the slots where i, ii, or k would be. - final ColumnSource.GetContext[] sourceContexts = - new ColumnSource.GetContext[sources.length]; + final ColumnSource.GetContext[] sourceContexts = new ColumnSource.GetContext[sources.length]; for (int ii = 0; ii < sources.length; ++ii) { final String name = sources[ii]; if (name.equals("i") || name.equals("ii") || name.equals("k")) { @@ -357,10 +338,10 @@ private static class AdapterContext implements FillContext { final FillContext kernelContext; AdapterContext(WritableIntChunk iChunk, - WritableLongChunk iiChunk, - WritableLongChunk kChunk, - ColumnSource.GetContext[] sourceContexts, - FillContext kernelContext) { + WritableLongChunk iiChunk, + WritableLongChunk kChunk, + ColumnSource.GetContext[] sourceContexts, + FillContext kernelContext) { this.iChunk = iChunk; this.iiChunk = iiChunk; this.kChunk = kChunk; diff --git a/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaSourceDescriptor.java b/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaSourceDescriptor.java index 5e7c25c2275..45b9131b151 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaSourceDescriptor.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/formula/FormulaSourceDescriptor.java @@ -6,8 +6,7 @@ public class FormulaSourceDescriptor { public final String[] arrays; public final String[] params; - public FormulaSourceDescriptor(Class returnType, String[] sources, String[] arrays, - String[] params) { + public FormulaSourceDescriptor(Class returnType, String[] sources, String[] arrays, String[] params) { this.returnType = returnType; this.sources = sources; this.arrays = arrays; diff --git a/DB/src/main/java/io/deephaven/db/v2/select/python/ArgumentsChunked.java b/DB/src/main/java/io/deephaven/db/v2/select/python/ArgumentsChunked.java index 639393c0a15..5ed0fb0f8b4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/python/ArgumentsChunked.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/python/ArgumentsChunked.java @@ -113,8 +113,7 @@ public void visit(DoubleChunk chunk) { @Override public void visit(ObjectChunk chunk) { - // this is LESS THAN IDEAL - it would be much better if ObjectChunk would be able to - // return + // this is LESS THAN IDEAL - it would be much better if ObjectChunk would be able to return // the array type arrayType = Object[].class; final Object[] out = new Object[chunk.size()]; diff --git a/DB/src/main/java/io/deephaven/db/v2/select/python/ArgumentsSingular.java b/DB/src/main/java/io/deephaven/db/v2/select/python/ArgumentsSingular.java index 2611d85b6e7..7b8fa7a1860 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/python/ArgumentsSingular.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/python/ArgumentsSingular.java @@ -12,16 +12,16 @@ class ArgumentsSingular { static Class[] buildParamTypes(Chunk[] __sources) { return Stream.of(__sources) - .map(c -> c.walk(new ChunkToSingularType<>())) - .map(ChunkToSingularType::getOut) - .toArray(Class[]::new); + .map(c -> c.walk(new ChunkToSingularType<>())) + .map(ChunkToSingularType::getOut) + .toArray(Class[]::new); } static Object[] buildArguments(Chunk[] __sources, int index) { return Stream.of(__sources) - .map(c -> c.walk(new ChunkIndexToObject<>(index))) - .map(ChunkIndexToObject::getOut) - .toArray(); + .map(c -> c.walk(new ChunkIndexToObject<>(index))) + .map(ChunkIndexToObject::getOut) + .toArray(); } private static class ChunkIndexToObject implements Visitor { @@ -133,8 +133,7 @@ public void visit(DoubleChunk chunk) { @Override public void visit(ObjectChunk chunk) { - // this is LESS THAN IDEAL - it would be much better if ObjectChunk would be able to - // return + // this is LESS THAN IDEAL - it would be much better if ObjectChunk would be able to return // the item type out = Object.class; } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/python/DeephavenCompatibleFunction.java b/DB/src/main/java/io/deephaven/db/v2/select/python/DeephavenCompatibleFunction.java index 9a8461e1d0a..75483851332 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/python/DeephavenCompatibleFunction.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/python/DeephavenCompatibleFunction.java @@ -10,27 +10,25 @@ import java.util.Objects; /** - * A Deephaven-compatible functions holds a native python function with associated typing - * information, used to help implement {@link io.deephaven.db.v2.select.AbstractConditionFilter} and - * {@link FormulaColumnPython}. + * A Deephaven-compatible functions holds a native python function with associated typing information, used to help + * implement {@link io.deephaven.db.v2.select.AbstractConditionFilter} and {@link FormulaColumnPython}. */ public class DeephavenCompatibleFunction { @SuppressWarnings("unused") // called from python public static DeephavenCompatibleFunction create( - PyObject function, + PyObject function, - // todo: python can't convert from java type to Class (ie, - // java_func_on_type(jpy.get_type('...'))) - // but it *will* match on object, and unwrap the actual java type... - Object returnedType, + // todo: python can't convert from java type to Class (ie, java_func_on_type(jpy.get_type('...'))) + // but it *will* match on object, and unwrap the actual java type... + Object returnedType, - // todo: python can't convert from list of strings to List - // but it can convert from list of strings to String[]... - String[] columnNames, - boolean isVectorized) { - return new DeephavenCompatibleFunction(function, (Class) returnedType, - Arrays.asList(columnNames), isVectorized); + // todo: python can't convert from list of strings to List + // but it can convert from list of strings to String[]... + String[] columnNames, + boolean isVectorized) { + return new DeephavenCompatibleFunction(function, (Class) returnedType, Arrays.asList(columnNames), + isVectorized); } private final PyObject function; @@ -39,10 +37,10 @@ public static DeephavenCompatibleFunction create( private final boolean isVectorized; private DeephavenCompatibleFunction( - PyObject function, - Class returnedType, - List columnNames, - boolean isVectorized) { + PyObject function, + Class returnedType, + List columnNames, + boolean isVectorized) { this.function = Objects.requireNonNull(function, "function"); this.returnedType = Objects.requireNonNull(returnedType, "returnedType"); this.columnNames = Objects.requireNonNull(columnNames, "columnNames"); @@ -51,16 +49,15 @@ private DeephavenCompatibleFunction( public FormulaKernel toFormulaKernel() { return isVectorized ? new FormulaKernelPythonChunkedFunction(function) - : new io.deephaven.db.v2.select.python.FormulaKernelPythonSingularFunction(function); + : new io.deephaven.db.v2.select.python.FormulaKernelPythonSingularFunction(function); } public FilterKernel toFilterKernel() { if (returnedType != boolean.class) { - throw new IllegalStateException( - "FilterKernel functions must be annotated with a boolean return type"); + throw new IllegalStateException("FilterKernel functions must be annotated with a boolean return type"); } return isVectorized ? new FilterKernelPythonChunkedFunction(function) - : new FilterKernelPythonSingularFunction(function); + : new FilterKernelPythonSingularFunction(function); } public PyObject getFunction() { diff --git a/DB/src/main/java/io/deephaven/db/v2/select/python/FilterKernelPythonChunkedFunction.java b/DB/src/main/java/io/deephaven/db/v2/select/python/FilterKernelPythonChunkedFunction.java index 27e59f6f33a..9333e5cefa1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/python/FilterKernelPythonChunkedFunction.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/python/FilterKernelPythonChunkedFunction.java @@ -9,8 +9,7 @@ import java.util.Objects; /** - * A python filter kernel which is implemented by passing the chunks as arrays into the python - * function. + * A python filter kernel which is implemented by passing the chunks as arrays into the python function. * * @see io.deephaven.db.v2.select.python.FilterKernelPythonSingularFunction */ @@ -32,17 +31,17 @@ public Context getContext(int maxChunkSize) { @Override public LongChunk filter( - Context context, - LongChunk indices, - Chunk... inputChunks) { + Context context, + LongChunk indices, + Chunk... inputChunks) { final int size = indices.size(); final io.deephaven.db.v2.select.python.ArgumentsChunked arguments = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(inputChunks); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(inputChunks); final boolean[] results = function - .call(boolean[].class, CALL_METHOD, arguments.getParamTypes(), arguments.getParams()); + .call(boolean[].class, CALL_METHOD, arguments.getParamTypes(), arguments.getParams()); if (size != results.length) { throw new IllegalStateException( - "FilterKernelPythonChunkedFunction returned results are not the proper size"); + "FilterKernelPythonChunkedFunction returned results are not the proper size"); } context.resultChunk.setSize(0); for (int i = 0; i < size; ++i) { diff --git a/DB/src/main/java/io/deephaven/db/v2/select/python/FilterKernelPythonSingularFunction.java b/DB/src/main/java/io/deephaven/db/v2/select/python/FilterKernelPythonSingularFunction.java index 41a1c14affa..43e0fa7a368 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/python/FilterKernelPythonSingularFunction.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/python/FilterKernelPythonSingularFunction.java @@ -9,8 +9,8 @@ import java.util.Objects; /** - * A python filter kernel which is implemented by iterating over the input chunks and calling the - * python function N times. + * A python filter kernel which is implemented by iterating over the input chunks and calling the python function N + * times. * * @see FilterKernelPythonChunkedFunction */ @@ -31,9 +31,9 @@ public Context getContext(int maxChunkSize) { @Override public LongChunk filter( - Context context, - LongChunk indices, - Chunk... inputChunks) { + Context context, + LongChunk indices, + Chunk... inputChunks) { final int size = indices.size(); final Class[] paramTypes = ArgumentsSingular.buildParamTypes(inputChunks); context.resultChunk.setSize(0); diff --git a/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaColumnPython.java b/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaColumnPython.java index c4d84b7a1bf..e0062516d6f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaColumnPython.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaColumnPython.java @@ -25,7 +25,7 @@ public class FormulaColumnPython extends AbstractFormulaColumn implements Formul @SuppressWarnings("unused") // called from python public static FormulaColumnPython create(String columnName, - io.deephaven.db.v2.select.python.DeephavenCompatibleFunction dcf) { + io.deephaven.db.v2.select.python.DeephavenCompatibleFunction dcf) { return new FormulaColumnPython(columnName, dcf); } @@ -33,8 +33,7 @@ public static FormulaColumnPython create(String columnName, private boolean initialized; - private FormulaColumnPython(String columnName, - io.deephaven.db.v2.select.python.DeephavenCompatibleFunction dcf) { + private FormulaColumnPython(String columnName, io.deephaven.db.v2.select.python.DeephavenCompatibleFunction dcf) { super(columnName, "", true); this.dcf = Objects.requireNonNull(dcf); } @@ -67,10 +66,10 @@ protected final FormulaSourceDescriptor getSourceDescriptor() { throw new IllegalStateException("Must be initialized first"); } return new FormulaSourceDescriptor( - returnedType, - dcf.getColumnNames().toArray(new String[0]), - ZERO_LENGTH_STRING_ARRAY, - ZERO_LENGTH_STRING_ARRAY); + returnedType, + dcf.getColumnNames().toArray(new String[0]), + ZERO_LENGTH_STRING_ARRAY, + ZERO_LENGTH_STRING_ARRAY); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaKernelPythonChunkedFunction.java b/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaKernelPythonChunkedFunction.java index 1f04585bed8..4706610a657 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaKernelPythonChunkedFunction.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaKernelPythonChunkedFunction.java @@ -10,8 +10,7 @@ import java.util.Objects; /** - * A python formula kernel which is implemented by passing the chunks as arrays into the python - * function. + * A python formula kernel which is implemented by passing the chunks as arrays into the python function. * * @see io.deephaven.db.v2.select.python.FormulaKernelPythonSingularFunction */ @@ -28,112 +27,112 @@ class FormulaKernelPythonChunkedFunction extends FormulaKernelTypedBase implemen @Override public void applyFormulaChunk( - FillContext __context, - WritableByteChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableByteChunk __destination, + Chunk[] __sources) { final io.deephaven.db.v2.select.python.ArgumentsChunked args = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); final byte[] output = function - .call(byte[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); + .call(byte[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); __destination.copyFromTypedArray(output, 0, 0, output.length); } @Override public void applyFormulaChunk( - FillContext __context, - WritableBooleanChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableBooleanChunk __destination, + Chunk[] __sources) { final io.deephaven.db.v2.select.python.ArgumentsChunked args = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); final boolean[] output = function - .call(boolean[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); + .call(boolean[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); __destination.copyFromTypedArray(output, 0, 0, output.length); } @Override public void applyFormulaChunk( - FillContext __context, - WritableCharChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableCharChunk __destination, + Chunk[] __sources) { final io.deephaven.db.v2.select.python.ArgumentsChunked args = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); final char[] output = function - .call(char[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); + .call(char[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); __destination.copyFromTypedArray(output, 0, 0, output.length); } @Override public void applyFormulaChunk( - FillContext __context, - WritableShortChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableShortChunk __destination, + Chunk[] __sources) { final io.deephaven.db.v2.select.python.ArgumentsChunked args = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); final short[] output = function - .call(short[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); + .call(short[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); __destination.copyFromTypedArray(output, 0, 0, output.length); } @Override public void applyFormulaChunk( - FillContext __context, - WritableIntChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableIntChunk __destination, + Chunk[] __sources) { final io.deephaven.db.v2.select.python.ArgumentsChunked args = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); final int[] output = function - .call(int[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); + .call(int[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); __destination.copyFromTypedArray(output, 0, 0, output.length); } @Override public void applyFormulaChunk( - FillContext __context, - WritableLongChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableLongChunk __destination, + Chunk[] __sources) { final io.deephaven.db.v2.select.python.ArgumentsChunked args = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); final long[] output = function - .call(long[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); + .call(long[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); __destination.copyFromTypedArray(output, 0, 0, output.length); } @Override public void applyFormulaChunk( - FillContext __context, - WritableFloatChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableFloatChunk __destination, + Chunk[] __sources) { final io.deephaven.db.v2.select.python.ArgumentsChunked args = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); final float[] output = function - .call(float[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); + .call(float[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); __destination.copyFromTypedArray(output, 0, 0, output.length); } @Override public void applyFormulaChunk( - FillContext __context, - WritableDoubleChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableDoubleChunk __destination, + Chunk[] __sources) { final io.deephaven.db.v2.select.python.ArgumentsChunked args = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); final double[] output = function - .call(double[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); + .call(double[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); __destination.copyFromTypedArray(output, 0, 0, output.length); } @Override public void applyFormulaChunk( - FillContext __context, - WritableObjectChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableObjectChunk __destination, + Chunk[] __sources) { final io.deephaven.db.v2.select.python.ArgumentsChunked args = - io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); + io.deephaven.db.v2.select.python.ArgumentsChunked.buildArguments(__sources); // this is LESS THAN IDEAL - it would be much better if ObjectChunk would be able to return // the array type final Object[] output = function - .call(Object[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); + .call(Object[].class, CALL_METHOD, args.getParamTypes(), args.getParams()); // noinspection unchecked __destination.copyFromTypedArray((T[]) output, 0, 0, output.length); diff --git a/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaKernelPythonSingularFunction.java b/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaKernelPythonSingularFunction.java index c0ed3c10273..8a57521452f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaKernelPythonSingularFunction.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/python/FormulaKernelPythonSingularFunction.java @@ -10,8 +10,8 @@ import java.util.Objects; /** - * A python formula kernel which is implemented by iterating over the input chunks and calling the - * python function N times. + * A python formula kernel which is implemented by iterating over the input chunks and calling the python function N + * times. * * @see FormulaKernelPythonChunkedFunction */ @@ -28,138 +28,129 @@ class FormulaKernelPythonSingularFunction extends FormulaKernelTypedBase impleme @Override public void applyFormulaChunk( - FillContext __context, - WritableByteChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableByteChunk __destination, + Chunk[] __sources) { final Class[] types = ArgumentsSingular.buildParamTypes(__sources); final int L = __destination.size(); for (int i = 0; i < L; i++) { final byte output = function - .call(byte.class, CALL_METHOD, types, - ArgumentsSingular.buildArguments(__sources, i)); + .call(byte.class, CALL_METHOD, types, ArgumentsSingular.buildArguments(__sources, i)); __destination.set(i, output); } } @Override public void applyFormulaChunk( - FillContext __context, - WritableBooleanChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableBooleanChunk __destination, + Chunk[] __sources) { final Class[] types = ArgumentsSingular.buildParamTypes(__sources); final int L = __destination.size(); for (int i = 0; i < L; i++) { final boolean output = function - .call(boolean.class, CALL_METHOD, types, - ArgumentsSingular.buildArguments(__sources, i)); + .call(boolean.class, CALL_METHOD, types, ArgumentsSingular.buildArguments(__sources, i)); __destination.set(i, output); } } @Override public void applyFormulaChunk( - FillContext __context, - WritableCharChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableCharChunk __destination, + Chunk[] __sources) { final Class[] types = ArgumentsSingular.buildParamTypes(__sources); final int L = __destination.size(); for (int i = 0; i < L; i++) { final char output = function - .call(char.class, CALL_METHOD, types, - ArgumentsSingular.buildArguments(__sources, i)); + .call(char.class, CALL_METHOD, types, ArgumentsSingular.buildArguments(__sources, i)); __destination.set(i, output); } } @Override public void applyFormulaChunk( - FillContext __context, - WritableShortChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableShortChunk __destination, + Chunk[] __sources) { final Class[] types = ArgumentsSingular.buildParamTypes(__sources); final int L = __destination.size(); for (int i = 0; i < L; i++) { final short output = function - .call(short.class, CALL_METHOD, types, - ArgumentsSingular.buildArguments(__sources, i)); + .call(short.class, CALL_METHOD, types, ArgumentsSingular.buildArguments(__sources, i)); __destination.set(i, output); } } @Override public void applyFormulaChunk( - FillContext __context, - WritableIntChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableIntChunk __destination, + Chunk[] __sources) { final Class[] types = ArgumentsSingular.buildParamTypes(__sources); final int L = __destination.size(); for (int i = 0; i < L; i++) { final int output = function - .call(int.class, CALL_METHOD, types, - ArgumentsSingular.buildArguments(__sources, i)); + .call(int.class, CALL_METHOD, types, ArgumentsSingular.buildArguments(__sources, i)); __destination.set(i, output); } } @Override public void applyFormulaChunk( - FillContext __context, - WritableLongChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableLongChunk __destination, + Chunk[] __sources) { final Class[] types = ArgumentsSingular.buildParamTypes(__sources); final int L = __destination.size(); for (int i = 0; i < L; i++) { final long output = function - .call(long.class, CALL_METHOD, types, - ArgumentsSingular.buildArguments(__sources, i)); + .call(long.class, CALL_METHOD, types, ArgumentsSingular.buildArguments(__sources, i)); __destination.set(i, output); } } @Override public void applyFormulaChunk( - FillContext __context, - WritableFloatChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableFloatChunk __destination, + Chunk[] __sources) { final Class[] types = ArgumentsSingular.buildParamTypes(__sources); final int L = __destination.size(); for (int i = 0; i < L; i++) { final float output = function - .call(float.class, CALL_METHOD, types, - ArgumentsSingular.buildArguments(__sources, i)); + .call(float.class, CALL_METHOD, types, ArgumentsSingular.buildArguments(__sources, i)); __destination.set(i, output); } } @Override public void applyFormulaChunk( - FillContext __context, - WritableDoubleChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableDoubleChunk __destination, + Chunk[] __sources) { final Class[] types = ArgumentsSingular.buildParamTypes(__sources); final int L = __destination.size(); for (int i = 0; i < L; i++) { final double output = function - .call(double.class, CALL_METHOD, types, - ArgumentsSingular.buildArguments(__sources, i)); + .call(double.class, CALL_METHOD, types, ArgumentsSingular.buildArguments(__sources, i)); __destination.set(i, output); } } @Override public void applyFormulaChunk( - FillContext __context, - WritableObjectChunk __destination, - Chunk[] __sources) { + FillContext __context, + WritableObjectChunk __destination, + Chunk[] __sources) { final Class[] types = ArgumentsSingular.buildParamTypes(__sources); final int L = __destination.size(); for (int i = 0; i < L; i++) { - // this is LESS THAN IDEAL - it would be much better if ObjectChunk would be able to - // return + // this is LESS THAN IDEAL - it would be much better if ObjectChunk would be able to return // the non-array type final Object output = function - .call(Object.class, CALL_METHOD, types, ArgumentsSingular - .buildArguments(__sources, i)); + .call(Object.class, CALL_METHOD, types, ArgumentsSingular + .buildArguments(__sources, i)); // noinspection unchecked __destination.set(i, (T) output); diff --git a/DB/src/main/java/io/deephaven/db/v2/select/setinclusion/ReplicateSetInclusionKernel.java b/DB/src/main/java/io/deephaven/db/v2/select/setinclusion/ReplicateSetInclusionKernel.java index 80659c62848..90348c5ab27 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/setinclusion/ReplicateSetInclusionKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/setinclusion/ReplicateSetInclusionKernel.java @@ -6,7 +6,6 @@ public class ReplicateSetInclusionKernel { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(CharSetInclusionKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharSetInclusionKernel.class, ReplicatePrimitiveCode.MAIN_SRC); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/select/setinclusion/SetInclusionKernel.java b/DB/src/main/java/io/deephaven/db/v2/select/setinclusion/SetInclusionKernel.java index bffd1ae9129..79dce0d0839 100644 --- a/DB/src/main/java/io/deephaven/db/v2/select/setinclusion/SetInclusionKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/select/setinclusion/SetInclusionKernel.java @@ -10,8 +10,7 @@ public interface SetInclusionKernel { void matchValues(Chunk values, WritableBooleanChunk matches); - static SetInclusionKernel makeKernel(ChunkType type, Collection values, - boolean inclusion) { + static SetInclusionKernel makeKernel(ChunkType type, Collection values, boolean inclusion) { switch (type) { case Object: return new ObjectSetInclusionKernel(values, inclusion); diff --git a/DB/src/main/java/io/deephaven/db/v2/snapshot/SnapshotIncrementalListener.java b/DB/src/main/java/io/deephaven/db/v2/snapshot/SnapshotIncrementalListener.java index df850a03771..af68f7ea9c8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/snapshot/SnapshotIncrementalListener.java +++ b/DB/src/main/java/io/deephaven/db/v2/snapshot/SnapshotIncrementalListener.java @@ -26,11 +26,10 @@ public class SnapshotIncrementalListener extends MergedListener { private boolean firstSnapshot = true; public SnapshotIncrementalListener(QueryTable triggerTable, QueryTable resultTable, - Map resultColumns, - ListenerRecorder rightListener, ListenerRecorder leftListener, QueryTable rightTable, - Map leftColumns) { - super(Arrays.asList(rightListener, leftListener), Collections.emptyList(), - "snapshotIncremental", resultTable); + Map resultColumns, + ListenerRecorder rightListener, ListenerRecorder leftListener, QueryTable rightTable, + Map leftColumns) { + super(Arrays.asList(rightListener, leftListener), Collections.emptyList(), "snapshotIncremental", resultTable); this.triggerTable = triggerTable; this.resultTable = resultTable; this.resultColumns = resultColumns; @@ -45,8 +44,7 @@ public SnapshotIncrementalListener(QueryTable triggerTable, QueryTable resultTab protected void process() { if (!firstSnapshot && rightListener.recordedVariablesAreValid()) { if (rightUpdates == null) { - rightUpdates = new Index.IndexUpdateCoalescer(rightTable.getIndex(), - rightListener.getUpdate()); + rightUpdates = new Index.IndexUpdateCoalescer(rightTable.getIndex(), rightListener.getUpdate()); } else { rightUpdates.update(rightListener.getUpdate()); } @@ -67,7 +65,7 @@ public void doFirstSnapshot(boolean initial) { resultTable.getIndex().insert(rightTable.getIndex()); if (!initial) { resultTable.notifyListeners(resultTable.getIndex(), Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex()); + Index.FACTORY.getEmptyIndex()); } firstSnapshot = false; } @@ -76,7 +74,7 @@ public void doSnapshot() { lastRightIndex.clear(); lastRightIndex.insert(rightTable.getIndex()); try (final IndexShiftDataExpander expander = - new IndexShiftDataExpander(rightUpdates.coalesce(), lastRightIndex)) { + new IndexShiftDataExpander(rightUpdates.coalesce(), lastRightIndex)) { final Index rightAdded = expander.getAdded(); final Index rightModified = expander.getModified(); final Index rightRemoved = expander.getRemoved(); @@ -93,15 +91,12 @@ private void doRowCopy(Index index) { copyRowsToResult(index, triggerTable, rightTable, leftColumns, resultColumns); } - public static void copyRowsToResult(Index rowsToCopy, QueryTable triggerTable, - QueryTable rightTable, Map leftColumns, - Map resultColumns) { + public static void copyRowsToResult(Index rowsToCopy, QueryTable triggerTable, QueryTable rightTable, + Map leftColumns, Map resultColumns) { final Index qtIndex = triggerTable.getIndex(); if (!qtIndex.empty()) { - SnapshotUtils.copyStampColumns(leftColumns, qtIndex.lastKey(), resultColumns, - rowsToCopy); + SnapshotUtils.copyStampColumns(leftColumns, qtIndex.lastKey(), resultColumns, rowsToCopy); } - SnapshotUtils.copyDataColumns(rightTable.getColumnSourceMap(), rowsToCopy, resultColumns, - rowsToCopy, false); + SnapshotUtils.copyDataColumns(rightTable.getColumnSourceMap(), rowsToCopy, resultColumns, rowsToCopy, false); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/snapshot/SnapshotUtils.java b/DB/src/main/java/io/deephaven/db/v2/snapshot/SnapshotUtils.java index 7dc705c8320..15c905f8804 100644 --- a/DB/src/main/java/io/deephaven/db/v2/snapshot/SnapshotUtils.java +++ b/DB/src/main/java/io/deephaven/db/v2/snapshot/SnapshotUtils.java @@ -15,9 +15,8 @@ public class SnapshotUtils { /** * Creates a new columnSourceMap based on 'columns'. */ - public static Map createColumnSourceMap( - Map columns, - BiFunction factory) { + public static Map createColumnSourceMap(Map columns, + BiFunction factory) { final Map result = new LinkedHashMap<>(); for (final Map.Entry entry : columns.entrySet()) { final String key = entry.getKey(); @@ -29,19 +28,18 @@ public static Map createColumnSourceMap( } /** - * For each name in stampColumns: - identify a stamp source (namely 'stampColumns.get(name)') - - * a row in that stamp source (namely 'stampKey') - a stamp dest (namely the column identified - * by 'destColumns.get(name)') - a bunch of destination rows (namely all the rows defined in - * 'destIndex') Then "spray" that single source value over those destination values. + * For each name in stampColumns: - identify a stamp source (namely 'stampColumns.get(name)') - a row in that stamp + * source (namely 'stampKey') - a stamp dest (namely the column identified by 'destColumns.get(name)') - a bunch of + * destination rows (namely all the rows defined in 'destIndex') Then "spray" that single source value over those + * destination values. * * @param stampColumns The stamp columns that serve as the source data * @param stampKey The source key * @param destColumns The destination columns we are "spraying" to * @param destIndex The keys in destColumns we want to write to */ - public static void copyStampColumns(@NotNull Map stampColumns, - long stampKey, - @NotNull Map destColumns, @NotNull Index destIndex) { + public static void copyStampColumns(@NotNull Map stampColumns, long stampKey, + @NotNull Map destColumns, @NotNull Index destIndex) { for (Map.Entry entry : stampColumns.entrySet()) { final String name = entry.getKey(); final ColumnSource src = entry.getValue(); @@ -54,19 +52,18 @@ public static void copyStampColumns(@NotNull Map } /** - * Like the above, but with a singleton destination. For each name in stampColumns: - identify a - * stamp source (namely 'stampColumns.get(name)') - a row in that stamp source (namely - * 'stampKey') - a stamp dest (namely the column identified by 'destColumns.get(name)') - a row - * in the destination (namely 'destKey') Then copy the source value to the destination value. + * Like the above, but with a singleton destination. For each name in stampColumns: - identify a stamp source + * (namely 'stampColumns.get(name)') - a row in that stamp source (namely 'stampKey') - a stamp dest (namely the + * column identified by 'destColumns.get(name)') - a row in the destination (namely 'destKey') Then copy the source + * value to the destination value. * * @param stampColumns The stamp columns that serve as the source data * @param stampKey The source key * @param destColumns The destination columns we are writing to to * @param destKey The key in destColumns we want to write to */ - public static void copyStampColumns(@NotNull Map stampColumns, - long stampKey, - @NotNull Map destColumns, long destKey) { + public static void copyStampColumns(@NotNull Map stampColumns, long stampKey, + @NotNull Map destColumns, long destKey) { for (Map.Entry entry : stampColumns.entrySet()) { final String name = entry.getKey(); final ColumnSource src = entry.getValue(); @@ -78,9 +75,8 @@ public static void copyStampColumns(@NotNull Map } /** - * For each name in srcColumns, copy all the data at srcColumns.get(name) (with a range of rows - * defined by srcIndex) to a column indicated by destColumns.get(name) (with a range of rows - * defined by destIndex). + * For each name in srcColumns, copy all the data at srcColumns.get(name) (with a range of rows defined by srcIndex) + * to a column indicated by destColumns.get(name) (with a range of rows defined by destIndex). * * @param srcColumns The stamp columns that serve as the source data * @param srcIndex The keys in the srcColumns we are reading from @@ -88,9 +84,9 @@ public static void copyStampColumns(@NotNull Map * @param destIndex The keys in destColumns we want to write to */ public static void copyDataColumns(@NotNull Map srcColumns, - @NotNull Index srcIndex, @NotNull Map destColumns, - @NotNull Index destIndex, - boolean usePrev) { + @NotNull Index srcIndex, @NotNull Map destColumns, + @NotNull Index destIndex, + boolean usePrev) { assert srcIndex.size() == destIndex.size(); if (srcIndex.empty()) { return; diff --git a/DB/src/main/java/io/deephaven/db/v2/sort/ReplicateSortKernel.java b/DB/src/main/java/io/deephaven/db/v2/sort/ReplicateSortKernel.java index 9e02f1e827e..003c88d0360 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sort/ReplicateSortKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/sort/ReplicateSortKernel.java @@ -34,36 +34,30 @@ public static void main(String[] args) throws IOException { doCharMegaMergeReplication(CharLongMegaMergeKernel.class); - ReplicatePrimitiveCode.charToAllButBoolean(CharFindRunsKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); - final String objectRunPath = ReplicatePrimitiveCode.charToObject(CharFindRunsKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharFindRunsKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final String objectRunPath = + ReplicatePrimitiveCode.charToObject(CharFindRunsKernel.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectRuns(objectRunPath); - ReplicatePrimitiveCode.charToAllButBoolean(CharPartitionKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); - final String objectPartitionPath = ReplicatePrimitiveCode - .charToObject(CharPartitionKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharPartitionKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final String objectPartitionPath = + ReplicatePrimitiveCode.charToObject(CharPartitionKernel.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectPartition(objectPartitionPath); - ReplicatePrimitiveCode.charToAllButBoolean(CharPermuteKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); - fixupObjectPermute(ReplicatePrimitiveCode.charToObject(CharPermuteKernel.class, - ReplicatePrimitiveCode.MAIN_SRC)); + ReplicatePrimitiveCode.charToAllButBoolean(CharPermuteKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + fixupObjectPermute( + ReplicatePrimitiveCode.charToObject(CharPermuteKernel.class, ReplicatePrimitiveCode.MAIN_SRC)); } private static void doCharReplication(Class sourceClass) throws IOException { // replicate char to each of the other types - final List timsortPaths = ReplicatePrimitiveCode.charToAllButBoolean(sourceClass, - ReplicatePrimitiveCode.MAIN_SRC); - final String objectSortPath = - ReplicatePrimitiveCode.charToObject(sourceClass, ReplicatePrimitiveCode.MAIN_SRC); - timsortPaths - .add(ReplicatePrimitiveCode.pathForClass(sourceClass, ReplicatePrimitiveCode.MAIN_SRC)); + final List timsortPaths = + ReplicatePrimitiveCode.charToAllButBoolean(sourceClass, ReplicatePrimitiveCode.MAIN_SRC); + final String objectSortPath = ReplicatePrimitiveCode.charToObject(sourceClass, ReplicatePrimitiveCode.MAIN_SRC); + timsortPaths.add(ReplicatePrimitiveCode.pathForClass(sourceClass, ReplicatePrimitiveCode.MAIN_SRC)); timsortPaths.add(objectSortPath); - // now replicate each type to a descending kernel, and swap the sense of gt, lt, geq, and - // leq + // now replicate each type to a descending kernel, and swap the sense of gt, lt, geq, and leq for (final String path : timsortPaths) { final String descendingPath = path.replace("TimsortKernel", "TimsortDescendingKernel"); @@ -83,13 +77,12 @@ private static void doCharReplication(Class sourceClass) throws IOException { final String nullAwareAscendingName = "NullAware" + sourceName; final String nullAwarePath = path.replace(sourceName, nullAwareAscendingName); final String nullAwareDescendingPath = - nullAwarePath.replaceAll("TimsortKernel", "TimsortDescendingKernel"); + nullAwarePath.replaceAll("TimsortKernel", "TimsortDescendingKernel"); - fixupCharNullComparisons(sourceClass, path, nullAwarePath, sourceName, - nullAwareAscendingName, true); + fixupCharNullComparisons(sourceClass, path, nullAwarePath, sourceName, nullAwareAscendingName, true); // we are going to fix it up ascending, then follow it up with a sense inversion - fixupCharNullComparisons(sourceClass, path, nullAwareDescendingPath, sourceName, - nullAwareAscendingName, true); + fixupCharNullComparisons(sourceClass, path, nullAwareDescendingPath, sourceName, nullAwareAscendingName, + true); invertSense(nullAwareDescendingPath, nullAwareDescendingPath); } else if (path.contains("Object")) { FileUtils.copyFile(new File(path), new File(descendingPath)); @@ -106,19 +99,15 @@ private static void doCharReplication(Class sourceClass) throws IOException { private static void doCharMegaMergeReplication(Class sourceClass) throws IOException { // replicate char to each of the other types - final List megaMergePaths = ReplicatePrimitiveCode.charToAllButBoolean(sourceClass, - ReplicatePrimitiveCode.MAIN_SRC); - final String objectSortPath = - ReplicatePrimitiveCode.charToObject(sourceClass, ReplicatePrimitiveCode.MAIN_SRC); - megaMergePaths - .add(ReplicatePrimitiveCode.pathForClass(sourceClass, ReplicatePrimitiveCode.MAIN_SRC)); + final List megaMergePaths = + ReplicatePrimitiveCode.charToAllButBoolean(sourceClass, ReplicatePrimitiveCode.MAIN_SRC); + final String objectSortPath = ReplicatePrimitiveCode.charToObject(sourceClass, ReplicatePrimitiveCode.MAIN_SRC); + megaMergePaths.add(ReplicatePrimitiveCode.pathForClass(sourceClass, ReplicatePrimitiveCode.MAIN_SRC)); megaMergePaths.add(objectSortPath); - // now replicate each type to a descending kernel, and swap the sense of gt, lt, geq, and - // leq + // now replicate each type to a descending kernel, and swap the sense of gt, lt, geq, and leq for (final String path : megaMergePaths) { - final String descendingPath = - path.replace("LongMegaMergeKernel", "LongMegaMergeDescendingKernel"); + final String descendingPath = path.replace("LongMegaMergeKernel", "LongMegaMergeDescendingKernel"); if (path.contains("Object")) { FileUtils.copyFile(new File(path), new File(descendingPath)); fixupObjectMegaMerge(objectSortPath, true); @@ -131,21 +120,19 @@ private static void doCharMegaMergeReplication(Class sourceClass) throws IOEx private static void replicateLongToInt() throws IOException { final String intSortKernelPath = - ReplicatePrimitiveCode.longToInt(LongSortKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.longToInt(LongSortKernel.class, ReplicatePrimitiveCode.MAIN_SRC); fixupIntSortKernel(intSortKernelPath); - ReplicatePrimitiveCode.longToInt(CharLongTimsortKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.longToInt(BooleanLongRadixSortKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.longToInt(CharLongTimsortKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.longToInt(BooleanLongRadixSortKernel.class, ReplicatePrimitiveCode.MAIN_SRC); } private static void fixupIntSortKernel(String intSortKernelPath) throws IOException { final List longCase = Arrays.asList("case Long:", - "if (order == SortingOrder.Ascending) {", - " return LongIntTimsortKernel.createContext(size);", - "} else {", - " return LongIntTimsortDescendingKernel.createContext(size);", - "}"); + "if (order == SortingOrder.Ascending) {", + " return LongIntTimsortKernel.createContext(size);", + "} else {", + " return LongIntTimsortDescendingKernel.createContext(size);", + "}"); final File file = new File(intSortKernelPath); final List lines = FileUtils.readLines(file, Charset.defaultCharset()); @@ -155,22 +142,20 @@ private static void fixupIntSortKernel(String intSortKernelPath) throws IOExcept private static void replicateLongInt() throws IOException { // our special fancy LongInt sort kernel for use in a multicolumn sort - final String targetName = ReplicatePrimitiveCode - .charLongToLongInt(CharLongTimsortKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final String targetName = + ReplicatePrimitiveCode.charLongToLongInt(CharLongTimsortKernel.class, ReplicatePrimitiveCode.MAIN_SRC); fixupLongInt(targetName); - final File longIntDest = - new File(targetName.replace("LongTimsortKernel", "LongIntTimsortKernel")); + final File longIntDest = new File(targetName.replace("LongTimsortKernel", "LongIntTimsortKernel")); // noinspection ResultOfMethodCallIgnored longIntDest.delete(); FileUtils.moveFile(new File(targetName), longIntDest); } private static void replicateIntInt() throws IOException { - final String targetName = ReplicatePrimitiveCode - .charLongToIntInt(CharLongTimsortKernel.class, ReplicatePrimitiveCode.MAIN_SRC); + final String targetName = + ReplicatePrimitiveCode.charLongToIntInt(CharLongTimsortKernel.class, ReplicatePrimitiveCode.MAIN_SRC); fixupIntInt(targetName); - final File intIntDest = - new File(targetName.replace("IntTimsortKernel", "IntIntTimsortKernel")); + final File intIntDest = new File(targetName.replace("IntTimsortKernel", "IntIntTimsortKernel")); // noinspection ResultOfMethodCallIgnored intIntDest.delete(); FileUtils.moveFile(new File(targetName), intIntDest); @@ -179,8 +164,7 @@ private static void replicateIntInt() throws IOException { private static void invertSense(String path, String descendingPath) throws IOException { final File file = new File(path); - final List lines = - ascendingNameToDescendingName(FileUtils.readLines(file, Charset.defaultCharset())); + final List lines = ascendingNameToDescendingName(FileUtils.readLines(file, Charset.defaultCharset())); FileUtils.writeLines(new File(descendingPath), invertComparisons(lines)); } @@ -188,12 +172,11 @@ private static void invertSense(String path, String descendingPath) throws IOExc @NotNull private static List ascendingNameToDescendingName(List lines) { // we should skip the replicate header - return globalReplacements(3, lines, "TimsortKernel", "TimsortDescendingKernel", - "\\BLongMegaMergeKernel", "LongMegaMergeDescendingKernel"); + return globalReplacements(3, lines, "TimsortKernel", "TimsortDescendingKernel", "\\BLongMegaMergeKernel", + "LongMegaMergeDescendingKernel"); } - private static void fixupObjectTimSort(String objectPath, boolean ascending) - throws IOException { + private static void fixupObjectTimSort(String objectPath, boolean ascending) throws IOException { final File objectFile = new File(objectPath); List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); @@ -206,8 +189,7 @@ private static void fixupObjectTimSort(String objectPath, boolean ascending) FileUtils.writeLines(objectFile, fixupObjectComparisons(lines, ascending)); } - private static void fixupObjectMegaMerge(String objectPath, boolean ascending) - throws IOException { + private static void fixupObjectMegaMerge(String objectPath, boolean ascending) throws IOException { final File objectFile = new File(objectPath); List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); @@ -231,10 +213,9 @@ private static void fixupObjectPermute(String objectPath) throws IOException { lines = fixupTypedChunkAttributes(lines); lines = lines.stream() - .map(x -> x.replaceAll("asObjectChunk\\(\\)", "asObjectChunk()")) - .map( - x -> x.replaceAll("asWritableObjectChunk\\(\\)", "asWritableObjectChunk()")) - .collect(Collectors.toList()); + .map(x -> x.replaceAll("asObjectChunk\\(\\)", "asObjectChunk()")) + .map(x -> x.replaceAll("asWritableObjectChunk\\(\\)", "asWritableObjectChunk()")) + .collect(Collectors.toList()); FileUtils.writeLines(objectFile, lines); } @@ -242,9 +223,9 @@ private static void fixupObjectPermute(String objectPath) throws IOException { @NotNull private static List fixupTypedChunkAttributes(List lines) { lines = lines.stream() - .map(x -> x.replaceAll("static ", "static")) - .map(x -> x.replaceAll("ObjectChunk<([^>]*)>", "ObjectChunk")) - .collect(Collectors.toList()); + .map(x -> x.replaceAll("static ", "static")) + .map(x -> x.replaceAll("ObjectChunk<([^>]*)>", "ObjectChunk")) + .collect(Collectors.toList()); return lines; } @@ -274,12 +255,12 @@ private static void fixupLongInt(String path) throws IOException { lines = removeImport(lines, LongSortKernel.class); lines = lines.stream().map(x -> x.replaceAll("LongTimsortKernel", "LongIntTimsortKernel")) - .map(x -> x.replaceAll("LongSortKernelContext", "LongIntSortKernelContext")) - .map(x -> x.replaceAll( - "static class LongIntSortKernelContext implements SortKernel", - "static class LongIntSortKernelContext implements AutoCloseable")) - .map(x -> x.replaceAll("IntChunk", "IntChunk")) - .collect(Collectors.toList()); + .map(x -> x.replaceAll("LongSortKernelContext", "LongIntSortKernelContext")) + .map(x -> x.replaceAll( + "static class LongIntSortKernelContext implements SortKernel", + "static class LongIntSortKernelContext implements AutoCloseable")) + .map(x -> x.replaceAll("IntChunk", "IntChunk")) + .collect(Collectors.toList()); lines = applyFixup(lines, "Context", "\\s+@Override", (m) -> Collections.singletonList("")); @@ -294,12 +275,12 @@ private static void fixupIntInt(String path) throws IOException { lines = removeImport(lines, LongSortKernel.class); lines = lines.stream().map(x -> x.replaceAll("IntTimsortKernel", "IntIntTimsortKernel")) - .map(x -> x.replaceAll("IntSortKernelContext", "IntIntSortKernelContext")) - .map(x -> x.replaceAll( - "static class IntIntSortKernelContext implements SortKernel", - "static class IntIntSortKernelContext implements AutoCloseable")) - .map(x -> x.replaceAll("IntChunk", "IntChunk")) - .collect(Collectors.toList()); + .map(x -> x.replaceAll("IntSortKernelContext", "IntIntSortKernelContext")) + .map(x -> x.replaceAll( + "static class IntIntSortKernelContext implements SortKernel", + "static class IntIntSortKernelContext implements AutoCloseable")) + .map(x -> x.replaceAll("IntChunk", "IntChunk")) + .collect(Collectors.toList()); lines = applyFixup(lines, "Context", "\\s+@Override", (m) -> Collections.singletonList("")); @@ -313,28 +294,24 @@ public static void fixupNanComparisons(String path, boolean ascending) throws IO final List lines = FileUtils.readLines(file, Charset.defaultCharset()); FileUtils.writeLines(new File(path), - fixupNanComparisons(lines, path.contains("Double") ? "Double" : "Float", ascending)); + fixupNanComparisons(lines, path.contains("Double") ? "Double" : "Float", ascending)); } - public static List fixupNanComparisons(List lines, String type, - boolean ascending) { + public static List fixupNanComparisons(List lines, String type, boolean ascending) { final String lcType = type.toLowerCase(); - lines = ReplicateUtilities.addImport(lines, - "import io.deephaven.db.util.Dh" + type + "Comparisons;"); + lines = ReplicateUtilities.addImport(lines, "import io.deephaven.db.util.Dh" + type + "Comparisons;"); lines = replaceRegion(lines, "comparison functions", - Arrays.asList( - " private static int doComparison(" + lcType + " lhs, " + lcType + " rhs) {", - " return " + (ascending ? "" : "-1 * ") + "Dh" + type - + "Comparisons.compare(lhs, rhs);", - " }")); + Arrays.asList(" private static int doComparison(" + lcType + " lhs, " + lcType + " rhs) {", + " return " + (ascending ? "" : "-1 * ") + "Dh" + type + "Comparisons.compare(lhs, rhs);", + " }")); return lines; } @SuppressWarnings("SameParameterValue") - private static void fixupCharNullComparisons(Class sourceClass, String path, String newPath, - String oldName, String newName, boolean ascending) throws IOException { + private static void fixupCharNullComparisons(Class sourceClass, String path, String newPath, String oldName, + String newName, boolean ascending) throws IOException { final File file = new File(path); List lines = FileUtils.readLines(file, Charset.defaultCharset()); @@ -344,20 +321,19 @@ private static void fixupCharNullComparisons(Class sourceClass, String path, Str lines = globalReplacements(fixupCharNullComparisons(lines, ascending), oldName, newName); lines.addAll(0, Arrays.asList( - "/* ---------------------------------------------------------------------------------------------------------------------", - " * AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit " - + sourceClass.getSimpleName() + " and regenerate", - " * ------------------------------------------------------------------------------------------------------------------ */")); + "/* ---------------------------------------------------------------------------------------------------------------------", + " * AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - for any changes edit " + sourceClass.getSimpleName() + + " and regenerate", + " * ------------------------------------------------------------------------------------------------------------------ */")); FileUtils.writeLines(new File(newPath), lines); } public static List fixupCharNullComparisons(List lines, boolean ascending) { lines = replaceRegion(lines, "comparison functions", - Arrays.asList(" private static int doComparison(char lhs, char rhs) {", - " return " + (ascending ? "" : "-1 * ") - + "DhCharComparisons.compare(lhs, rhs);", - " }")); + Arrays.asList(" private static int doComparison(char lhs, char rhs) {", + " return " + (ascending ? "" : "-1 * ") + "DhCharComparisons.compare(lhs, rhs);", + " }")); return lines; } @@ -367,55 +343,54 @@ public static List fixupObjectComparisons(List lines) { public static List fixupObjectComparisons(List lines, boolean ascending) { final List ascendingComparision = Arrays.asList( - " // ascending comparison", - " private static int doComparison(Object lhs, Object rhs) {", - " if (lhs == rhs) {", - " return 0;", - " }", - " if (lhs == null) {", - " return -1;", - " }", - " if (rhs == null) {", - " return 1;", - " }", - " //noinspection unchecked", - " return ((Comparable)lhs).compareTo(rhs);", - " }", - ""); + " // ascending comparison", + " private static int doComparison(Object lhs, Object rhs) {", + " if (lhs == rhs) {", + " return 0;", + " }", + " if (lhs == null) {", + " return -1;", + " }", + " if (rhs == null) {", + " return 1;", + " }", + " //noinspection unchecked", + " return ((Comparable)lhs).compareTo(rhs);", + " }", + ""); final List descendingComparision = Arrays.asList( - " // descending comparison", - " private static int doComparison(Object lhs, Object rhs) {", - " if (lhs == rhs) {", - " return 0;", - " }", - " if (lhs == null) {", - " return 1;", - " }", - " if (rhs == null) {", - " return -1;", - " }", - " //noinspection unchecked", - " return ((Comparable)rhs).compareTo(lhs);", - " }"); + " // descending comparison", + " private static int doComparison(Object lhs, Object rhs) {", + " if (lhs == rhs) {", + " return 0;", + " }", + " if (lhs == null) {", + " return 1;", + " }", + " if (rhs == null) {", + " return -1;", + " }", + " //noinspection unchecked", + " return ((Comparable)rhs).compareTo(lhs);", + " }"); return addImport(simpleFixup( - replaceRegion(lines, "comparison functions", - ascending ? ascendingComparision : descendingComparision), - "equality function", "lhs == rhs", "Objects.equals(lhs, rhs)"), Objects.class); + replaceRegion(lines, "comparison functions", ascending ? ascendingComparision : descendingComparision), + "equality function", "lhs == rhs", "Objects.equals(lhs, rhs)"), Objects.class); } public static List invertComparisons(List lines) { final List descendingComment = Collections.singletonList( - " // note that this is a descending kernel, thus the comparisons here are backwards (e.g., the lt function is in terms of the sort direction, so is implemented by gt)"); + " // note that this is a descending kernel, thus the comparisons here are backwards (e.g., the lt function is in terms of the sort direction, so is implemented by gt)"); return insertRegion( - applyFixup(lines, "comparison functions", "(\\s+return )(.*compare.*;)", - m -> Collections.singletonList(m.group(1) + "-1 * " + m.group(2))), - "comparison functions", descendingComment); + applyFixup(lines, "comparison functions", "(\\s+return )(.*compare.*;)", + m -> Collections.singletonList(m.group(1) + "-1 * " + m.group(2))), + "comparison functions", descendingComment); } private static List fixupNeq(List lines) { return applyFixup(lines, "neq", "\\s+return next != last;", - m -> Collections.singletonList(" return !Objects.equals(next, last);")); + m -> Collections.singletonList(" return !Objects.equals(next, last);")); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sort/findruns/FindRunsKernel.java b/DB/src/main/java/io/deephaven/db/v2/sort/findruns/FindRunsKernel.java index 5cffa342a2d..5458c17f5b1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sort/findruns/FindRunsKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/sort/findruns/FindRunsKernel.java @@ -26,9 +26,8 @@ static FindRunsKernel makeContext(ChunkType chunkType) { } /** - * Find runs of identical values in a sorted chunk. This is used as part of an overall sort, - * after the timsort (or other sorting) kernel to identify the runs that must be sorted - * according to secondary keys. + * Find runs of identical values in a sorted chunk. This is used as part of an overall sort, after the timsort (or + * other sorting) kernel to identify the runs that must be sorted according to secondary keys. * * Runs with only a single value are not included. * @@ -37,7 +36,7 @@ static FindRunsKernel makeContext(ChunkType chunkType) { * @param lengthsOut an output chunk, parallel to offsetsOut, with the lengths of found runs */ void findRuns(Chunk sortedValues, WritableIntChunk offsetsOut, - WritableIntChunk lengthsOut); + WritableIntChunk lengthsOut); /** * Find runs of identical values in a sorted chunk. @@ -49,12 +48,11 @@ void findRuns(Chunk sortedValues, WritableIntChunk offsetsOut, * @param lengthsOut an output chunk, parallel to offsetsOut, with the lengths of found runs */ void findRunsSingles(Chunk sortedValues, WritableIntChunk offsetsOut, - WritableIntChunk lengthsOut); + WritableIntChunk lengthsOut); /** - * Find runs of identical values in a sorted chunk. This is used as part of an overall sort, - * after the timsort (or other sorting) kernel to identify the runs that must be sorted - * according to secondary keys. + * Find runs of identical values in a sorted chunk. This is used as part of an overall sort, after the timsort (or + * other sorting) kernel to identify the runs that must be sorted according to secondary keys. * * @param sortedValues a chunk of sorted values * @param offsetsIn the offsets within the chunk to check for runs @@ -62,10 +60,9 @@ void findRunsSingles(Chunk sortedValues, WritableIntChunk offset * @param offsetsOut an output chunk, with offsets of starting locations that a run occurred * @param lengthsOut an output chunk, parallel to offsetsOut, with the lengths of found runs * - * Note, that lengthsIn must contain values greater than 1, and lengthsOut additionally - * only contain values greater than one + * Note, that lengthsIn must contain values greater than 1, and lengthsOut additionally only contain values + * greater than one */ - void findRuns(Chunk sortedValues, IntChunk offsetsIn, - IntChunk lengthsIn, WritableIntChunk offsetsOut, - WritableIntChunk lengthsOut); + void findRuns(Chunk sortedValues, IntChunk offsetsIn, IntChunk lengthsIn, + WritableIntChunk offsetsOut, WritableIntChunk lengthsOut); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sort/partition/PartitionUtilities.java b/DB/src/main/java/io/deephaven/db/v2/sort/partition/PartitionUtilities.java index 0d727bf8b5d..4b877b39dd1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sort/partition/PartitionUtilities.java +++ b/DB/src/main/java/io/deephaven/db/v2/sort/partition/PartitionUtilities.java @@ -14,8 +14,8 @@ class PartitionUtilities { * http://www.nowherenearithaca.com/2013/05/robert-floyds-tiny-and-beautiful.html */ static void sampleIndexKeys( - final long seed, final Index index, final int sampleSize, - final WritableLongChunk sampledKeys) { + final long seed, final Index index, final int sampleSize, + final WritableLongChunk sampledKeys) { final Random random = new Random(seed); final TLongHashSet sample = new TLongHashSet(sampleSize); final long maxValue = index.size(); @@ -36,8 +36,7 @@ static void sampleIndexKeys( } } - // using the java array sort or our own timsort would be nice, though it is only suitable - // for parallel arrays + // using the java array sort or our own timsort would be nice, though it is only suitable for parallel arrays final TLongArrayList array = new TLongArrayList(sampleSize); sample.forEach(key -> { array.add(index.get(key - 1)); diff --git a/DB/src/main/java/io/deephaven/db/v2/sort/permute/PermuteKernel.java b/DB/src/main/java/io/deephaven/db/v2/sort/permute/PermuteKernel.java index 1b8ffd5ba0b..528bf7034c4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sort/permute/PermuteKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/sort/permute/PermuteKernel.java @@ -34,42 +34,36 @@ static PermuteKernel makePermuteKernel(ChunkType chunkType) { * Permute the inputValues into outputValues according to the positions in outputPositions. * * @param inputValues a chunk of values, which must have the same size as outputPositions - * @param outputPositions a chunk of positions, parallel to inputValues, that indicates the - * position in outputValues for the corresponding inputValues value - * @param outputValues an output chunk, which must be at least as big as the largest value in - * outputPositions + * @param outputPositions a chunk of positions, parallel to inputValues, that indicates the position in outputValues + * for the corresponding inputValues value + * @param outputValues an output chunk, which must be at least as big as the largest value in outputPositions */ - void permute(Chunk inputValues, - IntChunk outputPositions, WritableChunk outputValues); + void permute(Chunk inputValues, IntChunk outputPositions, + WritableChunk outputValues); /** - * Permute the inputValues into outputValues according to positions in inputPositions and - * outputPositions. + * Permute the inputValues into outputValues according to positions in inputPositions and outputPositions. *

      * outputValues[outputPositions] = inputValues[inputPositions] * - * @param inputPositions a chunk of positions that indicates the position in inputValues to copy - * to the outputValues chunk - * @param inputValues a chunk of values, which must be at least as large as the largest value in - * inputPositions - * @param outputPositions a chunk of positions, parallel to inputPositions, that indicates the - * position in outputValues for the corresponding inputValues value - * @param outputValues an output chunk, which must be at least as big as the largest value in - * outputPositions + * @param inputPositions a chunk of positions that indicates the position in inputValues to copy to the outputValues + * chunk + * @param inputValues a chunk of values, which must be at least as large as the largest value in inputPositions + * @param outputPositions a chunk of positions, parallel to inputPositions, that indicates the position in + * outputValues for the corresponding inputValues value + * @param outputValues an output chunk, which must be at least as big as the largest value in outputPositions */ - void permute(IntChunk inputPositions, - Chunk inputValues, IntChunk outputPositions, - WritableChunk outputValues); + void permute(IntChunk inputPositions, Chunk inputValues, + IntChunk outputPositions, WritableChunk outputValues); /** * Permute the inputValues into outputValues according to the positions in inputPositions. * - * @param inputValues a chunk of values, which must be at least as big as the largest value in - * inputPositions - * @param inputPositions a chunk of positions, parallel to outputValues, that indicates the - * position in inputValues for the corresponding outputValues value + * @param inputValues a chunk of values, which must be at least as big as the largest value in inputPositions + * @param inputPositions a chunk of positions, parallel to outputValues, that indicates the position in inputValues + * for the corresponding outputValues value * @param outputValues an output chunk, which must have the same size as inputPositions */ - void permuteInput(Chunk inputValues, - IntChunk inputPositions, WritableChunk outputValues); + void permuteInput(Chunk inputValues, IntChunk inputPositions, + WritableChunk outputValues); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sort/timsort/TimsortUtilities.java b/DB/src/main/java/io/deephaven/db/v2/sort/timsort/TimsortUtilities.java index 5069570dd69..70952cee440 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sort/timsort/TimsortUtilities.java +++ b/DB/src/main/java/io/deephaven/db/v2/sort/timsort/TimsortUtilities.java @@ -2,17 +2,16 @@ public class TimsortUtilities { /** - * The initial setting for the number of consecutive values taken from either run1 or run2 - * before we switch to galloping mode. + * The initial setting for the number of consecutive values taken from either run1 or run2 before we switch to + * galloping mode. */ public static final int INITIAL_GALLOP = 7; /** - * Given a length of our input, we should pick a number that is between 32 and 64; ideally such - * that (length / run) is just under 2^n < length, but only by a little bit. + * Given a length of our input, we should pick a number that is between 32 and 64; ideally such that (length / run) + * is just under 2^n < length, but only by a little bit. * - * Take the six most significant bits of length; if the remaining bits have any ones, then add - * one. + * Take the six most significant bits of length; if the remaining bits have any ones, then add one. * * @param length the length of the values within a chunk to sort * @return the run length diff --git a/DB/src/main/java/io/deephaven/db/v2/sortcheck/ReplicateSortCheck.java b/DB/src/main/java/io/deephaven/db/v2/sortcheck/ReplicateSortCheck.java index 9943dbe436e..4d2ef712295 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sortcheck/ReplicateSortCheck.java +++ b/DB/src/main/java/io/deephaven/db/v2/sortcheck/ReplicateSortCheck.java @@ -19,12 +19,11 @@ public class ReplicateSortCheck { public static void main(String[] args) throws IOException { final List invertList = new ArrayList<>(); - invertList.add(ReplicatePrimitiveCode.pathForClass(CharSortCheck.class, - ReplicatePrimitiveCode.MAIN_SRC)); - invertList.addAll(ReplicatePrimitiveCode.charToAllButBoolean(CharSortCheck.class, - ReplicatePrimitiveCode.MAIN_SRC)); - final String objectPath = ReplicatePrimitiveCode.charToObject(CharSortCheck.class, - ReplicatePrimitiveCode.MAIN_SRC); + invertList.add(ReplicatePrimitiveCode.pathForClass(CharSortCheck.class, ReplicatePrimitiveCode.MAIN_SRC)); + invertList.addAll( + ReplicatePrimitiveCode.charToAllButBoolean(CharSortCheck.class, ReplicatePrimitiveCode.MAIN_SRC)); + final String objectPath = + ReplicatePrimitiveCode.charToObject(CharSortCheck.class, ReplicatePrimitiveCode.MAIN_SRC); invertList.add(objectPath); ReplicateUtilities.fixupChunkAttributes(objectPath); @@ -36,10 +35,9 @@ public static void main(String[] args) throws IOException { private static void invertSense(String path, String descendingPath) throws IOException { final File file = new File(path); - List lines = simpleFixup( - ascendingNameToDescendingName(path, - FileUtils.readLines(file, Charset.defaultCharset())), - "initialize last", "MIN_VALUE", "MAX_VALUE"); + List lines = + simpleFixup(ascendingNameToDescendingName(path, FileUtils.readLines(file, Charset.defaultCharset())), + "initialize last", "MIN_VALUE", "MAX_VALUE"); if (path.contains("Object")) { lines = ReplicateSortKernel.fixupObjectComparisons(lines, false); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/AbstractColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/AbstractColumnSource.java index a2c379ab44e..24d6c312979 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/AbstractColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/AbstractColumnSource.java @@ -51,8 +51,7 @@ protected AbstractColumnSource(@NotNull final Class type) { this(type, Object.class); } - public AbstractColumnSource(@NotNull final Class type, - @Nullable final Class elementType) { + public AbstractColumnSource(@NotNull final Class type, @Nullable final Class elementType) { if (type == boolean.class) { // noinspection unchecked this.type = (Class) Boolean.class; @@ -117,9 +116,8 @@ public final void setGroupToRange(@Nullable Map groupToRange) { @Override public Index match(boolean invertMatch, boolean usePrev, boolean caseInsensitive, Index mapper, - final Object... keys) { - final Map groupToRange = - (isImmutable() || !usePrev) ? getGroupToRange(mapper) : null; + final Object... keys) { + final Map groupToRange = (isImmutable() || !usePrev) ? getGroupToRange(mapper) : null; if (groupToRange != null) { IndexBuilder allInMatchingGroups = Index.FACTORY.getRandomBuilder(); @@ -150,7 +148,7 @@ public Index match(boolean invertMatch, boolean usePrev, boolean caseInsensitive return matchingValues; } else { return ChunkFilter.applyChunkFilter(mapper, this, usePrev, - ChunkMatchFilterFactory.getChunkFilter(type, caseInsensitive, invertMatch, keys)); + ChunkMatchFilterFactory.getChunkFilter(type, caseInsensitive, invertMatch, keys)); } } @@ -176,14 +174,10 @@ public Map getValuesMapping(Index subRange) { Map result = new LinkedHashMap<>(); final Map groupToRange = getGroupToRange(); - // if we have a grouping we can use it to avoid iterating the entire subRange. The issue is - // that our grouping - // could be bigger than the index we care about, by a very large margin. In this case we - // could be spinning - // on Index intersect operations that are actually useless. This check says that if our - // subRange is smaller - // than the number of keys in our grouping, we should just fetch the keys instead and - // generate the grouping + // if we have a grouping we can use it to avoid iterating the entire subRange. The issue is that our grouping + // could be bigger than the index we care about, by a very large margin. In this case we could be spinning + // on Index intersect operations that are actually useless. This check says that if our subRange is smaller + // than the number of keys in our grouping, we should just fetch the keys instead and generate the grouping // from scratch. boolean useGroupToRange = (groupToRange != null) && (groupToRange.size() < subRange.size()); if (useGroupToRange) { @@ -214,10 +208,9 @@ public Map getValuesMapping(Index subRange) { } /** - * We have a fair bit of internal state that must be serialized, but not all of our descendants - * in the class hierarchy should actually be sent over the wire. If you are implementing a class - * that should allow this to be serialized, then you must annotate it with an IsSerializable - * annotation, containing a value of true. + * We have a fair bit of internal state that must be serialized, but not all of our descendants in the class + * hierarchy should actually be sent over the wire. If you are implementing a class that should allow this to be + * serialized, then you must annotate it with an IsSerializable annotation, containing a value of true. */ @Retention(RetentionPolicy.RUNTIME) public @interface IsSerializable { @@ -229,17 +222,16 @@ private void writeObject(ObjectOutputStream oos) throws IOException { oos.defaultWriteObject(); else throw new UnsupportedOperationException( - "AbstractColumnSources are not all serializable, you may be missing a select() call."); + "AbstractColumnSources are not all serializable, you may be missing a select() call."); } /** - * Finds the most derived class that has an IsSerializable annotation, and returns its value. If - * no annotation is found, then returns false. + * Finds the most derived class that has an IsSerializable annotation, and returns its value. If no annotation is + * found, then returns false. */ private boolean isSerializable() { for (Class clazz = getClass(); clazz != null; clazz = clazz.getSuperclass()) { - IsSerializable isSerializable = - (IsSerializable) clazz.getAnnotation(IsSerializable.class); + IsSerializable isSerializable = (IsSerializable) clazz.getAnnotation(IsSerializable.class); if (isSerializable != null) { return isSerializable.value(); } @@ -255,31 +247,30 @@ private boolean isSerializable() { * @return A new value to range map (i.e. grouping metadata) */ public static Map getValueToRangeMap(@NotNull final Index index, - @Nullable final ColumnSource columnSource) { + @Nullable final ColumnSource columnSource) { final long size = index.size(); if (columnSource == null) { return Collections.singletonMap(null, new long[] {0, size}); } // noinspection unchecked return ((Map) index.getGrouping(columnSource)).entrySet().stream() - .sorted(java.util.Comparator.comparingLong(e -> e.getValue().firstKey())) - .collect(Collectors.toMap( - Map.Entry::getKey, - new Function, long[]>() { - private long prevLastKey = -1L; - private long currentSize = 0; - - @Override - public long[] apply(@NotNull final Map.Entry entry) { - final Index index = entry.getValue(); - Assert.instanceOf(index, "index", SortedIndex.class); - Assert.gt(index.firstKey(), "index.firstKey()", prevLastKey, "prevLastKey"); - prevLastKey = index.lastKey(); - return new long[] {currentSize, currentSize += index.size()}; - } - }, - Assert::neverInvoked, - LinkedHashMap::new)); + .sorted(java.util.Comparator.comparingLong(e -> e.getValue().firstKey())).collect(Collectors.toMap( + Map.Entry::getKey, + new Function, long[]>() { + private long prevLastKey = -1L; + private long currentSize = 0; + + @Override + public long[] apply(@NotNull final Map.Entry entry) { + final Index index = entry.getValue(); + Assert.instanceOf(index, "index", SortedIndex.class); + Assert.gt(index.firstKey(), "index.firstKey()", prevLastKey, "prevLastKey"); + prevLastKey = index.lastKey(); + return new long[] {currentSize, currentSize += index.size()}; + } + }, + Assert::neverInvoked, + LinkedHashMap::new)); } /** @@ -289,32 +280,29 @@ public long[] apply(@NotNull final Map.Entry entry) { * @param groupConsumer Consumer for responsive groups */ public static void forEachGroup(@NotNull final Map groupToIndex, - @NotNull final BiConsumer groupConsumer) { + @NotNull final BiConsumer groupConsumer) { groupToIndex.entrySet().stream() - .filter(kie -> kie.getValue().nonempty()) - .sorted(java.util.Comparator.comparingLong(kie -> kie.getValue().firstKey())) - .forEachOrdered(kie -> groupConsumer.accept(kie.getKey(), kie.getValue())); + .filter(kie -> kie.getValue().nonempty()) + .sorted(java.util.Comparator.comparingLong(kie -> kie.getValue().firstKey())) + .forEachOrdered(kie -> groupConsumer.accept(kie.getKey(), kie.getValue())); } /** - * Convert a group-to-index map to a pair of flat in-memory column sources, one for the keys and - * one for the indexes. + * Convert a group-to-index map to a pair of flat in-memory column sources, one for the keys and one for the + * indexes. * - * @param originalKeyColumnSource The key column source whose contents are reflected by the - * group-to-index map (used for typing, only) + * @param originalKeyColumnSource The key column source whose contents are reflected by the group-to-index map (used + * for typing, only) * @param groupToIndex The group-to-index map to convert * @return A pair of a flat key column source and a flat index column source */ @SuppressWarnings("unused") public static Pair, ObjectArraySource> groupingToFlatSources( - @NotNull final ColumnSource originalKeyColumnSource, - @NotNull final Map groupToIndex) { + @NotNull final ColumnSource originalKeyColumnSource, @NotNull final Map groupToIndex) { final int numGroups = groupToIndex.size(); - final ArrayBackedColumnSource resultKeyColumnSource = - ArrayBackedColumnSource.getMemoryColumnSource(numGroups, - originalKeyColumnSource.getType(), originalKeyColumnSource.getComponentType()); - final ObjectArraySource resultIndexColumnSource = - new ObjectArraySource<>(Index.class); + final ArrayBackedColumnSource resultKeyColumnSource = ArrayBackedColumnSource.getMemoryColumnSource( + numGroups, originalKeyColumnSource.getType(), originalKeyColumnSource.getComponentType()); + final ObjectArraySource resultIndexColumnSource = new ObjectArraySource<>(Index.class); resultIndexColumnSource.ensureCapacity(numGroups); final MutableInt processedGroupCount = new MutableInt(0); @@ -324,8 +312,7 @@ public static Pair, ObjectArraySource(resultKeyColumnSource, resultIndexColumnSource); } @@ -333,42 +320,39 @@ public static Pair, ObjectArraySource void forEachResponsiveGroup(@NotNull final Map groupToIndex, - @NotNull final Index intersect, - @NotNull final BiConsumer groupConsumer) { + @NotNull final Index intersect, + @NotNull final BiConsumer groupConsumer) { groupToIndex.entrySet().stream() - .map(kie -> new Pair<>(kie.getKey(), kie.getValue().intersect(intersect))) - .filter(kip -> kip.getSecond().nonempty()) - .sorted(java.util.Comparator.comparingLong(kip -> kip.getSecond().firstKey())) - .forEachOrdered(kip -> groupConsumer.accept(kip.getFirst(), kip.getSecond())); + .map(kie -> new Pair<>(kie.getKey(), kie.getValue().intersect(intersect))) + .filter(kip -> kip.getSecond().nonempty()) + .sorted(java.util.Comparator.comparingLong(kip -> kip.getSecond().firstKey())) + .forEachOrdered(kip -> groupConsumer.accept(kip.getFirst(), kip.getSecond())); } /** - * Convert a group-to-index map to a pair of flat in-memory column sources, one for the keys and - * one for the indexes. + * Convert a group-to-index map to a pair of flat in-memory column sources, one for the keys and one for the + * indexes. * - * @param originalKeyColumnSource The key column source whose contents are reflected by the - * group-to-index map (used for typing, only) + * @param originalKeyColumnSource The key column source whose contents are reflected by the group-to-index map (used + * for typing, only) * @param groupToIndex The group-to-index map to convert * @param intersect Limit returned indices to values contained within intersect * @param responsiveGroups Set to the number of responsive groups on exit * @return A pair of a flat key column source and a flat index column source */ public static Pair, ObjectArraySource> groupingToFlatSources( - @NotNull final ColumnSource originalKeyColumnSource, - @NotNull final Map groupToIndex, - @NotNull final Index intersect, - @NotNull final MutableInt responsiveGroups) { + @NotNull final ColumnSource originalKeyColumnSource, + @NotNull final Map groupToIndex, + @NotNull final Index intersect, + @NotNull final MutableInt responsiveGroups) { final int numGroups = groupToIndex.size(); - final ArrayBackedColumnSource resultKeyColumnSource = - ArrayBackedColumnSource.getMemoryColumnSource(numGroups, - originalKeyColumnSource.getType(), originalKeyColumnSource.getComponentType()); - final ObjectArraySource resultIndexColumnSource = - new ObjectArraySource<>(Index.class); + final ArrayBackedColumnSource resultKeyColumnSource = ArrayBackedColumnSource.getMemoryColumnSource( + numGroups, originalKeyColumnSource.getType(), originalKeyColumnSource.getComponentType()); + final ObjectArraySource resultIndexColumnSource = new ObjectArraySource<>(Index.class); resultIndexColumnSource.ensureCapacity(numGroups); responsiveGroups.setValue(0); @@ -383,17 +367,15 @@ public static Pair, ObjectArraySource destination, - @NotNull final OrderedKeys orderedKeys) { + public void fillChunk(@NotNull final FillContext context, @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { defaultFillChunk(context, destination, orderedKeys); } @VisibleForTesting - public final void defaultFillChunk( - @SuppressWarnings("unused") @NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + public final void defaultFillChunk(@SuppressWarnings("unused") @NotNull final FillContext context, + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { final ChunkFiller filler = destination.getChunkFiller(); if (orderedKeys.getAverageRunLengthEstimate() >= USE_RANGES_AVERAGE_RUN_LENGTH) { filler.fillByRanges(this, orderedKeys, destination); @@ -404,14 +386,13 @@ public final void defaultFillChunk( @Override public void fillPrevChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { defaultFillPrevChunk(context, destination, orderedKeys); } final void defaultFillPrevChunk(@SuppressWarnings("unused") @NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { final ChunkFiller filler = destination.getChunkFiller(); if (orderedKeys.getAverageRunLengthEstimate() >= USE_RANGES_AVERAGE_RUN_LENGTH) { filler.fillPrevByRanges(this, orderedKeys, destination); @@ -422,17 +403,15 @@ final void defaultFillPrevChunk(@SuppressWarnings("unused") @NotNull final FillC @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return false; } @Override public final ColumnSource reinterpret( - @NotNull final Class alternateDataType) - throws IllegalArgumentException { + @NotNull final Class alternateDataType) throws IllegalArgumentException { if (!allowsReinterpret(alternateDataType)) { - throw new IllegalArgumentException( - "Unsupported reinterpret for " + getClass().getSimpleName() + throw new IllegalArgumentException("Unsupported reinterpret for " + getClass().getSimpleName() + ": type=" + getType() + ", alternateDataType=" + alternateDataType); } @@ -440,44 +419,40 @@ public final ColumnSource reinterpret } /** - * Supply allowed reinterpret results. The default implementation handles the most common case - * to avoid code duplication. + * Supply allowed reinterpret results. The default implementation handles the most common case to avoid code + * duplication. * * @param alternateDataType The alternate data type * @return The resulting {@link ColumnSource} */ protected ColumnSource doReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { Assert.eq(getType(), "getType()", DBDateTime.class); Assert.eq(alternateDataType, "alternateDataType", long.class); // noinspection unchecked - return (ColumnSource) new UnboxedDateTimeWritableSource( - (WritableSource) this); + return (ColumnSource) new UnboxedDateTimeWritableSource((WritableSource) this); } public static abstract class DefaultedMutable extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForObject { + implements MutableColumnSourceGetDefaults.ForObject { protected DefaultedMutable(@NotNull final Class type) { super(type); } - protected DefaultedMutable(@NotNull final Class type, - @Nullable final Class elementType) { + protected DefaultedMutable(@NotNull final Class type, @Nullable final Class elementType) { super(type, elementType); } } - public static abstract class DefaultedImmutable - extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForObject { + public static abstract class DefaultedImmutable extends AbstractColumnSource + implements ImmutableColumnSourceGetDefaults.ForObject { protected DefaultedImmutable(@NotNull final Class type) { super(type); } - protected DefaultedImmutable(@NotNull final Class type, - @Nullable final Class elementType) { + protected DefaultedImmutable(@NotNull final Class type, @Nullable final Class elementType) { super(type, elementType); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/AbstractDeferredGroupingColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/AbstractDeferredGroupingColumnSource.java index 1d7e3e0df3a..6d6c2bc7c79 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/AbstractDeferredGroupingColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/AbstractDeferredGroupingColumnSource.java @@ -15,7 +15,7 @@ * Adds deferred grouping support to {@link AbstractColumnSource}. */ public abstract class AbstractDeferredGroupingColumnSource extends AbstractColumnSource - implements DeferredGroupingColumnSource { + implements DeferredGroupingColumnSource { private transient volatile GroupingProvider groupingProvider; diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ArrayBackedColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/ArrayBackedColumnSource.java index 2550e67cce0..c33bdf48bf8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ArrayBackedColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ArrayBackedColumnSource.java @@ -67,14 +67,13 @@ /** * A ColumnSource backed by in-memory arrays of data. *

      - * The column source is dense with keys from 0 to capacity, there can be no holes. Arrays are - * divided into blocks so that the column source can be incrementally expanded without copying data - * from one array to another. + * The column source is dense with keys from 0 to capacity, there can be no holes. Arrays are divided into blocks so + * that the column source can be incrementally expanded without copying data from one array to another. */ @AbstractColumnSource.IsSerializable(value = true) public abstract class ArrayBackedColumnSource - extends AbstractDeferredGroupingColumnSource - implements FillUnordered, ShiftData.ShiftCallback, WritableSource, Serializable { + extends AbstractDeferredGroupingColumnSource + implements FillUnordered, ShiftData.ShiftCallback, WritableSource, Serializable { private static final long serialVersionUID = -7823391894248382929L; static final int DEFAULT_RECYCLER_CAPACITY = 1024; @@ -103,8 +102,7 @@ public abstract class ArrayBackedColumnSource static final long INDEX_MASK = (1 << LOG_BLOCK_SIZE) - 1; public static final int BLOCK_SIZE = 1 << LOG_BLOCK_SIZE; - // The inUse calculations are confusing because there are actually three levels of indexing - // (where the third level + // The inUse calculations are confusing because there are actually three levels of indexing (where the third level // is really an index into a bitmask). In pseudocode: // bool inUse = prevInUse[block][indexWithinInUse][inUseBitIndex] // Or, in actual code @@ -120,14 +118,14 @@ public abstract class ArrayBackedColumnSource static final int IN_USE_MASK = (1 << LOG_INUSE_BITSET_SIZE) - 1; /** - * Minimum average run length in an {@link OrderedKeys} that should trigger - * {@link Chunk}-filling by key ranges instead of individual keys. + * Minimum average run length in an {@link OrderedKeys} that should trigger {@link Chunk}-filling by key ranges + * instead of individual keys. */ static final long USE_RANGES_AVERAGE_RUN_LENGTH = 5; static final SoftRecycler inUseRecycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, - () -> new long[IN_USE_BLOCK_SIZE], - block -> Arrays.fill(block, 0)); + () -> new long[IN_USE_BLOCK_SIZE], + block -> Arrays.fill(block, 0)); public static ArrayBackedColumnSource from(Array array) { return array.walk(new ArrayAdapter<>()).getOut(); @@ -147,9 +145,9 @@ public static ArrayBackedColumnSource from(PrimitiveArray array) { long maxIndex; /** - * A bitset with the same two-level structure as the array-backed data, except that the inner - * array is interpreted as a bitset (and is thus not very big... its length is blockSize / 64, - * and because blockSize is currently 256, the size of the inner array is 4. + * A bitset with the same two-level structure as the array-backed data, except that the inner array is interpreted + * as a bitset (and is thus not very big... its length is blockSize / 64, and because blockSize is currently 256, + * the size of the inner array is 4. */ transient long[][] prevInUse; @@ -204,12 +202,10 @@ public void set(long key, short value) { * @param componentType the component type of the resulting column source * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getMemoryColumnSource( - @NotNull final Collection data, - @NotNull final Class dataType, - @Nullable final Class componentType) { - final ArrayBackedColumnSource result = - getMemoryColumnSource(data.size(), dataType, componentType); + public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final Collection data, + @NotNull final Class dataType, + @Nullable final Class componentType) { + final ArrayBackedColumnSource result = getMemoryColumnSource(data.size(), dataType, componentType); long i = 0; for (T o : data) { result.set(i++, o); @@ -226,12 +222,11 @@ public static ArrayBackedColumnSource getMemoryColumnSource( * @return an in-memory column source with the requested data */ public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final T[] data, - @NotNull final Class dataType, - @Nullable final Class componentType) { - final ArrayBackedColumnSource result = - getMemoryColumnSource(data.length, dataType, componentType); + @NotNull final Class dataType, + @Nullable final Class componentType) { + final ArrayBackedColumnSource result = getMemoryColumnSource(data.length, dataType, componentType); try (final FillFromContext context = result.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { result.fillFromChunk(context, ObjectChunk.chunkWrap(data), range); } return result; @@ -247,7 +242,7 @@ public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final final ArrayBackedColumnSource result = new ByteArraySource(); result.ensureCapacity(data.length); try (final FillFromContext context = result.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { result.fillFromChunk(context, ByteChunk.chunkWrap(data), range); } return result; @@ -259,13 +254,12 @@ public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final * @param data an array containing the data to insert into the ColumnSource. * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getBooleanMemoryColumnSource( - @NotNull final byte[] data) { + public static ArrayBackedColumnSource getBooleanMemoryColumnSource(@NotNull final byte[] data) { final ArrayBackedColumnSource result = new BooleanArraySource(); final WritableSource dest = (WritableSource) result.reinterpret(byte.class); result.ensureCapacity(data.length); try (final FillFromContext context = dest.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { dest.fillFromChunk(context, ByteChunk.chunkWrap(data), range); } return result; @@ -277,12 +271,11 @@ public static ArrayBackedColumnSource getBooleanMemoryColumnSource( * @param data an array containing the data to insert into the ColumnSource. * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getMemoryColumnSource( - @NotNull final char[] data) { + public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final char[] data) { final ArrayBackedColumnSource result = new CharacterArraySource(); result.ensureCapacity(data.length); try (final FillFromContext context = result.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { result.fillFromChunk(context, CharChunk.chunkWrap(data), range); } return result; @@ -294,12 +287,11 @@ public static ArrayBackedColumnSource getMemoryColumnSource( * @param data an array containing the data to insert into the ColumnSource. * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getMemoryColumnSource( - @NotNull final double[] data) { + public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final double[] data) { final ArrayBackedColumnSource result = new DoubleArraySource(); result.ensureCapacity(data.length); try (final FillFromContext context = result.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { result.fillFromChunk(context, DoubleChunk.chunkWrap(data), range); } return result; @@ -311,12 +303,11 @@ public static ArrayBackedColumnSource getMemoryColumnSource( * @param data an array containing the data to insert into the ColumnSource. * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getMemoryColumnSource( - @NotNull final float[] data) { + public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final float[] data) { final ArrayBackedColumnSource result = new FloatArraySource(); result.ensureCapacity(data.length); try (final FillFromContext context = result.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { result.fillFromChunk(context, FloatChunk.chunkWrap(data), range); } return result; @@ -328,12 +319,11 @@ public static ArrayBackedColumnSource getMemoryColumnSource( * @param data an array containing the data to insert into the ColumnSource. * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getMemoryColumnSource( - @NotNull final int[] data) { + public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final int[] data) { final ArrayBackedColumnSource result = new IntegerArraySource(); result.ensureCapacity(data.length); try (final FillFromContext context = result.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { result.fillFromChunk(context, IntChunk.chunkWrap(data), range); } return result; @@ -349,7 +339,7 @@ public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final final ArrayBackedColumnSource result = new LongArraySource(); result.ensureCapacity(data.length); try (final FillFromContext context = result.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { result.fillFromChunk(context, LongChunk.chunkWrap(data), range); } return result; @@ -358,17 +348,16 @@ public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final /** * Produces an DateTimeArraySource with the given data. * - * @param data an array containing the data to insert into the ColumnSource, represented as long - * nanoseconds since the epoch + * @param data an array containing the data to insert into the ColumnSource, represented as long nanoseconds since + * the epoch * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getDateTimeMemoryColumnSource( - @NotNull final long[] data) { + public static ArrayBackedColumnSource getDateTimeMemoryColumnSource(@NotNull final long[] data) { final ArrayBackedColumnSource result = new DateTimeArraySource(); result.ensureCapacity(data.length); final WritableSource asLong = (WritableSource) result.reinterpret(long.class); try (final FillFromContext context = asLong.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { asLong.fillFromChunk(context, LongChunk.chunkWrap(data), range); } return result; @@ -380,12 +369,11 @@ public static ArrayBackedColumnSource getDateTimeMemoryColumnSource( * @param data an array containing the data to insert into the ColumnSource. * @return an in-memory column source with the requested data */ - public static ArrayBackedColumnSource getMemoryColumnSource( - @NotNull final short[] data) { + public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final short[] data) { final ArrayBackedColumnSource result = new ShortArraySource(); result.ensureCapacity(data.length); try (final FillFromContext context = result.makeFillFromContext(data.length); - final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { + final OrderedKeys range = OrderedKeys.forRange(0, data.length - 1)) { result.fillFromChunk(context, ShortChunk.chunkWrap(data), range); } return result; @@ -400,12 +388,12 @@ public static ArrayBackedColumnSource getMemoryColumnSource( * @return an in-memory column source of the requested type */ public static ArrayBackedColumnSource getMemoryColumnSource(final long size, - @NotNull final Class dataType) { + @NotNull final Class dataType) { return getMemoryColumnSource(size, dataType, null); } - public static ArrayBackedColumnSource getMemoryColumnSource( - @NotNull final Class dataType, @Nullable final Class componentType) { + public static ArrayBackedColumnSource getMemoryColumnSource(@NotNull final Class dataType, + @Nullable final Class componentType) { return getMemoryColumnSource(0, dataType, componentType); } @@ -419,7 +407,7 @@ public static ArrayBackedColumnSource getMemoryColumnSource( * @return an in-memory column source of the requested type */ public static ArrayBackedColumnSource getMemoryColumnSource(final long size, - @NotNull final Class dataType, @Nullable final Class componentType) { + @NotNull final Class dataType, @Nullable final Class componentType) { final ArrayBackedColumnSource result; if (dataType == byte.class || dataType == Byte.class) { result = new ByteArraySource(); @@ -471,34 +459,32 @@ public void shift(final long start, final long end, final long offset) { } /** - * Creates an in-memory ColumnSource from the supplied dataArray, using instanceof checks to - * determine the appropriate type of column source to produce. + * Creates an in-memory ColumnSource from the supplied dataArray, using instanceof checks to determine the + * appropriate type of column source to produce. * * @param dataArray the data to insert into the new column source * @return a ColumnSource with the supplied data. */ public static WritableSource getMemoryColumnSourceUntyped(@NotNull final Object dataArray) { return getMemoryColumnSourceUntyped(dataArray, dataArray.getClass().getComponentType(), - dataArray.getClass().getComponentType().getComponentType()); + dataArray.getClass().getComponentType().getComponentType()); } /** - * Creates an in-memory ColumnSource from the supplied dataArray, using instanceof checks to - * determine the appropriate type of column source to produce. + * Creates an in-memory ColumnSource from the supplied dataArray, using instanceof checks to determine the + * appropriate type of column source to produce. * * @param dataArray the data to insert into the new column source * @param dataType the data type of the resultant column source * @param componentType the component type for column sources of arrays or DbArrays * @return a ColumnSource with the supplied data. */ - public static WritableSource getMemoryColumnSourceUntyped( - @NotNull final Object dataArray, - @NotNull final Class dataType, - @Nullable final Class componentType) { + public static WritableSource getMemoryColumnSourceUntyped(@NotNull final Object dataArray, + @NotNull final Class dataType, + @Nullable final Class componentType) { final WritableSource result; if (dataArray instanceof boolean[]) { - result = getMemoryColumnSource(ArrayUtils.getBoxedArray((boolean[]) dataArray), - Boolean.class, null); + result = getMemoryColumnSource(ArrayUtils.getBoxedArray((boolean[]) dataArray), Boolean.class, null); } else if (dataArray instanceof byte[]) { result = getMemoryColumnSource((byte[]) dataArray); } else if (dataArray instanceof char[]) { @@ -538,8 +524,8 @@ public static WritableSource getMemoryColumnSourceUntyped( } /** - * Wrap the input array in an immutable {@link ColumnSource}. This method will unbox any boxed - * values, and directly use the result array. + * Wrap the input array in an immutable {@link ColumnSource}. This method will unbox any boxed values, and directly + * use the result array. * * @param dataArray The array to turn into a ColumnSource * @return An Immutable ColumnSource that directly wraps the input array. @@ -547,34 +533,31 @@ public static WritableSource getMemoryColumnSourceUntyped( public static ColumnSource getImmutableMemoryColumnSource(@NotNull final Object dataArray) { final Class arrayType = dataArray.getClass().getComponentType(); if (arrayType == null) { - throw new IllegalArgumentException( - "Input value was not an array, was " + dataArray.getClass().getName()); + throw new IllegalArgumentException("Input value was not an array, was " + dataArray.getClass().getName()); } return getImmutableMemoryColumnSource(dataArray, arrayType, arrayType.getComponentType()); } /** - * Wrap the input array in an immutable {@link ColumnSource}. This method will unbox any boxed - * values, and directly use the result array. This version allows the user to specify the column - * data type. It will automatically map column type Boolean/boolean with input array types - * byte[] to {@link ImmutableBooleanArraySource} and columnType DBDateTime / array type long[] - * to {@link ImmutableDateTimeArraySource} + * Wrap the input array in an immutable {@link ColumnSource}. This method will unbox any boxed values, and directly + * use the result array. This version allows the user to specify the column data type. It will automatically map + * column type Boolean/boolean with input array types byte[] to {@link ImmutableBooleanArraySource} and columnType + * DBDateTime / array type long[] to {@link ImmutableDateTimeArraySource} * * @param dataArray The array to turn into a ColumnSource * @param dataType the data type of the resultant column source * @param componentType the component type for column sources of arrays or DbArrays * @return An Immutable ColumnSource that directly wraps the input array. */ - public static ColumnSource getImmutableMemoryColumnSource( - @NotNull final Object dataArray, - @NotNull final Class dataType, - @Nullable final Class componentType) { + public static ColumnSource getImmutableMemoryColumnSource(@NotNull final Object dataArray, + @NotNull final Class dataType, + @Nullable final Class componentType) { final ColumnSource result; if (dataType == boolean.class) { result = (dataArray instanceof byte[]) - ? new ImmutableBooleanArraySource((byte[]) dataArray) - : new ImmutableBooleanArraySource((boolean[]) dataArray); + ? new ImmutableBooleanArraySource((byte[]) dataArray) + : new ImmutableBooleanArraySource((boolean[]) dataArray); } else if (dataType == byte.class) { result = new ImmutableByteArraySource((byte[]) dataArray); } else if (dataType == char.class) { @@ -591,16 +574,14 @@ public static ColumnSource getImmutableMemoryColumnSource( result = new ImmutableShortArraySource((short[]) dataArray); } else if (dataType == Boolean.class) { result = (dataArray instanceof byte[]) - ? new ImmutableBooleanArraySource((byte[]) dataArray) - : new ImmutableBooleanArraySource((Boolean[]) dataArray); + ? new ImmutableBooleanArraySource((byte[]) dataArray) + : new ImmutableBooleanArraySource((Boolean[]) dataArray); } else if (dataType == Byte.class) { result = new ImmutableByteArraySource(ArrayUtils.getUnboxedArray((Byte[]) dataArray)); } else if (dataType == Character.class) { - result = - new ImmutableCharArraySource(ArrayUtils.getUnboxedArray((Character[]) dataArray)); + result = new ImmutableCharArraySource(ArrayUtils.getUnboxedArray((Character[]) dataArray)); } else if (dataType == Double.class) { - result = - new ImmutableDoubleArraySource(ArrayUtils.getUnboxedArray((Double[]) dataArray)); + result = new ImmutableDoubleArraySource(ArrayUtils.getUnboxedArray((Double[]) dataArray)); } else if (dataType == Float.class) { result = new ImmutableFloatArraySource(ArrayUtils.getUnboxedArray((Float[]) dataArray)); } else if (dataType == Integer.class) { @@ -633,9 +614,8 @@ static int getBlockNo(final long from) { abstract Object getPrevBlock(int blockIndex); @Override - public void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + public void fillChunk(@NotNull final FillContext context, @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { if (orderedKeys.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { fillSparseChunk(destination, orderedKeys); return; @@ -647,17 +627,14 @@ public void fillChunk(@NotNull final FillContext context, final int fromOffsetInBlock = (int) (from & INDEX_MASK); if (fromBlock == toBlock) { final int sz = LongSizedDataStructure.intSize("int cast", to - from + 1); - destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, - destOffset.intValue(), sz); + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); destOffset.add(sz); } else { final int sz = BLOCK_SIZE - fromOffsetInBlock; - destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, - destOffset.intValue(), sz); + destination.copyFromArray(getBlock(fromBlock), fromOffsetInBlock, destOffset.intValue(), sz); destOffset.add(sz); for (int blockNo = fromBlock + 1; blockNo < toBlock; ++blockNo) { - destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), - BLOCK_SIZE); + destination.copyFromArray(getBlock(blockNo), 0, destOffset.intValue(), BLOCK_SIZE); destOffset.add(BLOCK_SIZE); } int restSz = (int) (to & INDEX_MASK) + 1; @@ -670,15 +647,15 @@ public void fillChunk(@NotNull final FillContext context, @Override public void fillChunkUnordered(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final LongChunk keyIndices) { + @NotNull final WritableChunk destination, + @NotNull final LongChunk keyIndices) { fillSparseChunkUnordered(destination, keyIndices); } @Override public void fillPrevChunkUnordered(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final LongChunk keyIndices) { + @NotNull final WritableChunk destination, + @NotNull final LongChunk keyIndices) { fillSparsePrevChunkUnordered(destination, keyIndices); } @@ -691,37 +668,33 @@ public void fillPrevChunkUnordered(@NotNull final FillContext context, * @param position position that we require * @return the first position addressable by the chunk */ - public abstract long resetWritableChunkToBackingStore( - @NotNull final ResettableWritableChunk chunk, long position); + public abstract long resetWritableChunkToBackingStore(@NotNull final ResettableWritableChunk chunk, + long position); protected abstract void fillSparseChunk(@NotNull WritableChunk destination, - @NotNull OrderedKeys indices); + @NotNull OrderedKeys indices); protected abstract void fillSparsePrevChunk(@NotNull WritableChunk destination, - @NotNull OrderedKeys indices); + @NotNull OrderedKeys indices); - protected abstract void fillSparseChunkUnordered( - @NotNull WritableChunk destination, - @NotNull LongChunk indices); + protected abstract void fillSparseChunkUnordered(@NotNull WritableChunk destination, + @NotNull LongChunk indices); - protected abstract void fillSparsePrevChunkUnordered( - @NotNull WritableChunk destination, - @NotNull LongChunk indices); + protected abstract void fillSparsePrevChunkUnordered(@NotNull WritableChunk destination, + @NotNull LongChunk indices); @Override - public Chunk getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + public Chunk getChunk(@NotNull final GetContext context, @NotNull final OrderedKeys orderedKeys) { final LongChunk ranges = orderedKeys.asKeyRangesChunk(); if (ranges.size() == 2) { final long first = ranges.get(0); final long last = ranges.get(1); if (getBlockNo(first) == getBlockNo(last)) { - // Optimization if the caller requests a single range which happens to fall within a - // single block + // Optimization if the caller requests a single range which happens to fall within a single block final int blockPos = getBlockNo(first); return DefaultGetContext.resetChunkFromArray(context, - getBlock(blockPos), (int) (first & INDEX_MASK), (int) (last - first + 1)); + getBlock(blockPos), (int) (first & INDEX_MASK), (int) (last - first + 1)); } } @@ -785,8 +758,8 @@ public void visit(GenericArray generic) { generic.componentType().walk(new Visitor() { @Override public void visit(StringType stringType) { - out = ArrayBackedColumnSource.getMemoryColumnSource( - generic.cast(stringType).values(), String.class, null); + out = ArrayBackedColumnSource.getMemoryColumnSource(generic.cast(stringType).values(), String.class, + null); } @Override @@ -798,8 +771,8 @@ public void visit(InstantType instantType) { if (value == null) { source.set(ix++, NULL_LONG); } else { - long nanos = Math.addExact( - TimeUnit.SECONDS.toNanos(value.getEpochSecond()), value.getNano()); + long nanos = + Math.addExact(TimeUnit.SECONDS.toNanos(value.getEpochSecond()), value.getNano()); source.set(ix++, nanos); } } @@ -810,17 +783,16 @@ public void visit(InstantType instantType) { public void visit(ArrayType arrayType) { // noinspection unchecked ArrayType tType = (ArrayType) arrayType; - out = - ArrayBackedColumnSource.getMemoryColumnSource(generic.cast(tType).values(), - tType.clazz(), arrayType.componentType().clazz()); + out = ArrayBackedColumnSource.getMemoryColumnSource(generic.cast(tType).values(), tType.clazz(), + arrayType.componentType().clazz()); } @Override public void visit(CustomType customType) { // noinspection unchecked CustomType tType = (CustomType) customType; - out = ArrayBackedColumnSource - .getMemoryColumnSource(generic.cast(tType).values(), tType.clazz(), null); + out = ArrayBackedColumnSource.getMemoryColumnSource(generic.cast(tType).values(), tType.clazz(), + null); } }); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ArraySourceHelper.java b/DB/src/main/java/io/deephaven/db/v2/sources/ArraySourceHelper.java index 35c942bb4fc..dfc6cf9402f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ArraySourceHelper.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ArraySourceHelper.java @@ -19,9 +19,9 @@ abstract class ArraySourceHelper extends ArrayBackedColumnSource { /** - * The presence of a prevFlusher means that this ArraySource wants to track previous values. If - * prevFlusher is null, the ArraySource does not want (or does not yet want) to track previous - * values. Deserialized ArraySources never track previous values. + * The presence of a prevFlusher means that this ArraySource wants to track previous values. If prevFlusher is null, + * the ArraySource does not want (or does not yet want) to track previous values. Deserialized ArraySources never + * track previous values. */ protected transient UpdateCommitter> prevFlusher = null; private transient TIntArrayList prevAllocated = null; @@ -58,9 +58,9 @@ private interface CopyFromBlockFunctor { @Override public void fillPrevChunk( - @NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final ColumnSource.FillContext context, + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { if (prevFlusher == null) { fillChunk(context, destination, orderedKeys); return; @@ -77,11 +77,10 @@ public void fillPrevChunk( CopyFromBlockFunctor lambda = (blockNo, srcOffset, length) -> { final long[] inUse = prevInUse[blockNo]; if (inUse != null) { - effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), - getPrevBlock(blockNo), inUse, srcOffset, destOffset.intValue(), length); + effectiveContext.copyKernel.conditionalCopy(destination, getBlock(blockNo), getPrevBlock(blockNo), + inUse, srcOffset, destOffset.intValue(), length); } else { - destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), - length); + destination.copyFromArray(getBlock(blockNo), srcOffset, destOffset.intValue(), length); } destOffset.add(length); }; @@ -109,8 +108,8 @@ public void fillPrevChunk( } /** - * Get the capacity of this column source. This number is one higher than the highest key that - * may be accessed (read or written). + * Get the capacity of this column source. This number is one higher than the highest key that may be accessed (read + * or written). * * @return The capacity of this column source */ @@ -125,10 +124,8 @@ final void ensureCapacity(final long capacity, UArray[] blocks, UArray[] prevBlo /** * This method supports the 'ensureCapacity' method for all of this class' inheritors. */ - final void ensureCapacity(final long capacity, UArray[] blocks, UArray[] prevBlocks, - boolean nullFilled) { - // Convert requested capacity to requestedMaxIndex and requestedNumBlocks, but leave early - // if the requested + final void ensureCapacity(final long capacity, UArray[] blocks, UArray[] prevBlocks, boolean nullFilled) { + // Convert requested capacity to requestedMaxIndex and requestedNumBlocks, but leave early if the requested // maxIndex is <= the current maxIndex. // // Rationale for this formula: @@ -138,8 +135,8 @@ final void ensureCapacity(final long capacity, UArray[] blocks, UArray[] prevBlo return; } final long requestedNumBlocksLong = (requestedMaxIndex + 1) >> LOG_BLOCK_SIZE; - final int requestedNumBlocks = LongSizedDataStructure - .intSize("ArrayBackedColumnSource block allocation", requestedNumBlocksLong); + final int requestedNumBlocks = + LongSizedDataStructure.intSize("ArrayBackedColumnSource block allocation", requestedNumBlocksLong); // If we don't have enough blocks, reallocate the array if (blocks.length < requestedNumBlocks) { @@ -155,17 +152,13 @@ final void ensureCapacity(final long capacity, UArray[] blocks, UArray[] prevBlo resetBlocks(blocks, prevBlocks); } - // We know how many blocks we have allocated by looking at maxIndex. This may well be less - // than the size of the + // We know how many blocks we have allocated by looking at maxIndex. This may well be less than the size of the // 'blocks' array because we only allocate blocks as needed. final int allocatedNumBlocks = (int) ((maxIndex + 1) >> LOG_BLOCK_SIZE); - // Allocate storage up to 'requestedNumBlocks' (not roundedNumBlocks). The difference is - // that the array size may - // double, but we only allocate the minimum number of blocks needed. Put another way, we - // only allocate blocks up - // to the requested capacity, not all the way up to (the capacity rounded to the next power - // of two). + // Allocate storage up to 'requestedNumBlocks' (not roundedNumBlocks). The difference is that the array size may + // double, but we only allocate the minimum number of blocks needed. Put another way, we only allocate blocks up + // to the requested capacity, not all the way up to (the capacity rounded to the next power of two). for (int ii = allocatedNumBlocks; ii < requestedNumBlocks; ++ii) { if (nullFilled) { blocks[ii] = allocateNullFilledBlock(BLOCK_SIZE); @@ -173,25 +166,23 @@ final void ensureCapacity(final long capacity, UArray[] blocks, UArray[] prevBlo blocks[ii] = allocateBlock(BLOCK_SIZE); } } - // Note: if we get this far, requestedMaxIndex > maxIndex, so this will always increase - // maxIndex. + // Note: if we get this far, requestedMaxIndex > maxIndex, so this will always increase maxIndex. maxIndex = requestedMaxIndex; } /** - * This method supports the 'set' method for its inheritors, doing some of the 'inUse' - * housekeeping that is common to all inheritors. + * This method supports the 'set' method for its inheritors, doing some of the 'inUse' housekeeping that is common + * to all inheritors. * - * @return true if the inheritor should copy a value from current to prev before setting - * current; false if it should just set a current value without touching prev. + * @return true if the inheritor should copy a value from current to prev before setting current; false if it should + * just set a current value without touching prev. */ final boolean shouldRecordPrevious(final long key, final UArray[] prevBlocks, - final SoftRecycler recycler) { + final SoftRecycler recycler) { if (prevFlusher == null) { return false; } - // If we want to track previous values, we make sure we are registered with the - // LiveTableMonitor. + // If we want to track previous values, we make sure we are registered with the LiveTableMonitor. prevFlusher.maybeActivate(); final int block = (int) (key >> LOG_BLOCK_SIZE); @@ -231,18 +222,18 @@ final boolean shouldRecordPrevious(final long key, final UArray[] prevBlocks, final void startTrackingPrev(int numBlocks) { if (prevFlusher != null) { throw new IllegalStateException("Can't call startTrackingPrevValues() twice: " + - this.getClass().getCanonicalName()); + this.getClass().getCanonicalName()); } prevFlusher = new UpdateCommitter<>(this, ArraySourceHelper::commitBlocks); prevInUse = new long[numBlocks][]; } /** - * This method supports the 'getPrev' method for its inheritors, doing some of the 'inUse' - * housekeeping that is common to all inheritors. + * This method supports the 'getPrev' method for its inheritors, doing some of the 'inUse' housekeeping that is + * common to all inheritors. * - * @return true if the inheritor should return a value from its "prev" data structure; false if - * it should return a value from its "current" data structure. + * @return true if the inheritor should return a value from its "prev" data structure; false if it should return a + * value from its "current" data structure. */ final boolean shouldUsePrevious(final long index) { if (prevFlusher == null) { @@ -289,9 +280,8 @@ public FillFromContext makeFillFromContext(int chunkCapacity) { } @Override - public void fillFromChunk(@NotNull FillFromContext context, - @NotNull Chunk src, - @NotNull OrderedKeys orderedKeys) { + public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull OrderedKeys orderedKeys) { if (orderedKeys.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { fillFromChunkByKeys(orderedKeys, src); } else { @@ -299,11 +289,9 @@ public void fillFromChunk(@NotNull FillFromContext context, } } - abstract void fillFromChunkByRanges(@NotNull OrderedKeys orderedKeys, - Chunk src); + abstract void fillFromChunkByRanges(@NotNull OrderedKeys orderedKeys, Chunk src); - abstract void fillFromChunkByKeys(@NotNull OrderedKeys orderedKeys, - Chunk src); + abstract void fillFromChunkByKeys(@NotNull OrderedKeys orderedKeys, Chunk src); abstract UArray allocateNullFilledBlock(int size); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/BitMaskingColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/BitMaskingColumnSource.java index 86c5cc857b1..79248da9287 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/BitMaskingColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/BitMaskingColumnSource.java @@ -12,14 +12,12 @@ import static io.deephaven.db.v2.sources.chunk.Attributes.Values; import static io.deephaven.util.QueryConstants.*; -public class BitMaskingColumnSource extends AbstractColumnSource - implements UngroupableColumnSource { +public class BitMaskingColumnSource extends AbstractColumnSource implements UngroupableColumnSource { private final CrossJoinShiftState shiftState; private final ColumnSource innerSource; - public BitMaskingColumnSource(final CrossJoinShiftState shiftState, - @NotNull final ColumnSource innerSource) { + public BitMaskingColumnSource(final CrossJoinShiftState shiftState, @NotNull final ColumnSource innerSource) { super(innerSource.getType()); this.shiftState = shiftState; this.innerSource = innerSource; @@ -185,129 +183,120 @@ public boolean isImmutable() { @Override public boolean isUngroupable() { return innerSource instanceof UngroupableColumnSource - && ((UngroupableColumnSource) innerSource).isUngroupable(); + && ((UngroupableColumnSource) innerSource).isUngroupable(); } @Override public long getUngroupedSize(long columnIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedSize(shiftState.getMasked(columnIndex)); + return ((UngroupableColumnSource) innerSource).getUngroupedSize(shiftState.getMasked(columnIndex)); } @Override public long getUngroupedPrevSize(long columnIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevSize(shiftState.getPrevMasked(columnIndex)); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevSize(shiftState.getPrevMasked(columnIndex)); } @Override public T getUngrouped(long columnIndex, int arrayIndex) { // noinspection unchecked - return (T) ((UngroupableColumnSource) innerSource) - .getUngrouped(shiftState.getMasked(columnIndex), arrayIndex); + return (T) ((UngroupableColumnSource) innerSource).getUngrouped(shiftState.getMasked(columnIndex), arrayIndex); } @Override public T getUngroupedPrev(long columnIndex, int arrayIndex) { // noinspection unchecked - return (T) ((UngroupableColumnSource) innerSource) - .getUngroupedPrev(shiftState.getPrevMasked(columnIndex), arrayIndex); + return (T) ((UngroupableColumnSource) innerSource).getUngroupedPrev(shiftState.getPrevMasked(columnIndex), + arrayIndex); } @Override public Boolean getUngroupedBoolean(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedBoolean(shiftState.getMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedBoolean(shiftState.getMasked(columnIndex), + arrayIndex); } @Override public Boolean getUngroupedPrevBoolean(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevBoolean(shiftState.getPrevMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevBoolean(shiftState.getPrevMasked(columnIndex), + arrayIndex); } @Override public double getUngroupedDouble(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedDouble(shiftState.getMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedDouble(shiftState.getMasked(columnIndex), + arrayIndex); } @Override public double getUngroupedPrevDouble(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevDouble(shiftState.getPrevMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevDouble(shiftState.getPrevMasked(columnIndex), + arrayIndex); } @Override public float getUngroupedFloat(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedFloat(shiftState.getMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedFloat(shiftState.getMasked(columnIndex), arrayIndex); } @Override public float getUngroupedPrevFloat(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevFloat(shiftState.getPrevMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevFloat(shiftState.getPrevMasked(columnIndex), + arrayIndex); } @Override public byte getUngroupedByte(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedByte(shiftState.getMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedByte(shiftState.getMasked(columnIndex), arrayIndex); } @Override public byte getUngroupedPrevByte(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevByte(shiftState.getPrevMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevByte(shiftState.getPrevMasked(columnIndex), + arrayIndex); } @Override public char getUngroupedChar(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedChar(shiftState.getMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedChar(shiftState.getMasked(columnIndex), arrayIndex); } @Override public char getUngroupedPrevChar(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevChar(shiftState.getPrevMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevChar(shiftState.getPrevMasked(columnIndex), + arrayIndex); } @Override public short getUngroupedShort(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedShort(shiftState.getMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedShort(shiftState.getMasked(columnIndex), arrayIndex); } @Override public short getUngroupedPrevShort(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevShort(shiftState.getPrevMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevShort(shiftState.getPrevMasked(columnIndex), + arrayIndex); } @Override public int getUngroupedInt(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedInt(shiftState.getMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedInt(shiftState.getMasked(columnIndex), arrayIndex); } @Override public int getUngroupedPrevInt(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevInt(shiftState.getPrevMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevInt(shiftState.getPrevMasked(columnIndex), + arrayIndex); } @Override public long getUngroupedLong(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedLong(shiftState.getMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedLong(shiftState.getMasked(columnIndex), arrayIndex); } @Override public long getUngroupedPrevLong(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevLong(shiftState.getPrevMasked(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevLong(shiftState.getPrevMasked(columnIndex), + arrayIndex); } @Override @@ -318,22 +307,21 @@ public void releaseCachedResources() { @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return innerSource.allowsReinterpret(alternateDataType); } @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return new ReinterpretToOriginal(alternateDataType); } - private class ReinterpretToOriginal - extends BitMaskingColumnSource { + private class ReinterpretToOriginal extends BitMaskingColumnSource { private ReinterpretToOriginal(Class alternateDataType) { super(BitMaskingColumnSource.this.shiftState, - BitMaskingColumnSource.this.innerSource.reinterpret(alternateDataType)); + BitMaskingColumnSource.this.innerSource.reinterpret(alternateDataType)); } @Override @@ -343,7 +331,7 @@ public boolean allowsReinterpret(@NotNull Class alternateDataType) { @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return (ColumnSource) BitMaskingColumnSource.this; } @@ -355,10 +343,10 @@ private static class FillContext implements ColumnSource.FillContext { private final ColumnSource.FillContext innerFillContext; private FillContext(final BitMaskingColumnSource cs, final int chunkCapacity, - final SharedContext sharedContext) { + final SharedContext sharedContext) { shareable = sharedContext == null ? new Shareable(false, chunkCapacity) - : sharedContext.getOrCreate(new SharingKey(cs.shiftState), - () -> new Shareable(true, chunkCapacity)); + : sharedContext.getOrCreate(new SharingKey(cs.shiftState), + () -> new Shareable(true, chunkCapacity)); if (cs.innerSource instanceof FillUnordered) { innerFillContext = cs.innerSource.makeFillContext(chunkCapacity, shareable); } else { @@ -376,8 +364,7 @@ public void close() { } } - private static final class SharingKey - extends SharedContext.ExactReferenceSharingKey { + private static final class SharingKey extends SharedContext.ExactReferenceSharingKey { private SharingKey(@NotNull final CrossJoinShiftState crossJoinShiftState) { super(crossJoinShiftState); @@ -398,7 +385,7 @@ private Shareable(final boolean shared, final int chunkCapacity) { } private void ensureMaskedKeysInitialized(@NotNull final CrossJoinShiftState shiftState, - final boolean usePrev, @NotNull final OrderedKeys orderedKeys) { + final boolean usePrev, @NotNull final OrderedKeys orderedKeys) { if (maskedKeysReusable) { return; } @@ -408,8 +395,8 @@ private void ensureMaskedKeysInitialized(@NotNull final CrossJoinShiftState shif maskedKeys.setSize(0); orderedKeys.forAllLongs((final long indexKey) -> { - final long innerIndexKey = usePrev ? shiftState.getPrevMasked(indexKey) - : shiftState.getMasked(indexKey); + final long innerIndexKey = + usePrev ? shiftState.getPrevMasked(indexKey) : shiftState.getMasked(indexKey); maskedKeys.add(innerIndexKey); }); @@ -437,24 +424,23 @@ public FillContext makeFillContext(final int chunkCapacity, final SharedContext @Override public void fillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { doFillChunk(context, destination, orderedKeys, false); } @Override public void fillPrevChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { doFillChunk(context, destination, orderedKeys, true); } private void doFillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys, - boolean usePrev) { - // TODO (nate): revisit and decide if it is worth generating all right-side indexes, - // sorting, compacting, + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys, + boolean usePrev) { + // TODO (nate): revisit and decide if it is worth generating all right-side indexes, sorting, compacting, // and then permuting back. (Note: fillChunk takes orderedKeys which are unique.) final long sz = orderedKeys.size(); if (sz <= 0) { @@ -469,8 +455,7 @@ private void doFillChunk(@NotNull final ColumnSource.FillContext context, if (innerSource instanceof FillUnordered) { final FillUnordered cs = (FillUnordered) innerSource; if (usePrev) { - cs.fillPrevChunkUnordered(effectiveContext.innerFillContext, destination, - maskedKeys); + cs.fillPrevChunkUnordered(effectiveContext.innerFillContext, destination, maskedKeys); } else { cs.fillChunkUnordered(effectiveContext.innerFillContext, destination, maskedKeys); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/BitShiftingColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/BitShiftingColumnSource.java index 18d8c1ab571..e6e6ca96f74 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/BitShiftingColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/BitShiftingColumnSource.java @@ -15,14 +15,12 @@ import static io.deephaven.db.v2.sources.chunk.Attributes.*; import static io.deephaven.util.QueryConstants.*; -public class BitShiftingColumnSource extends AbstractColumnSource - implements UngroupableColumnSource { +public class BitShiftingColumnSource extends AbstractColumnSource implements UngroupableColumnSource { private final CrossJoinShiftState shiftState; private final ColumnSource innerSource; - public BitShiftingColumnSource(final CrossJoinShiftState shiftState, - @NotNull final ColumnSource innerSource) { + public BitShiftingColumnSource(final CrossJoinShiftState shiftState, @NotNull final ColumnSource innerSource) { super(innerSource.getType()); this.shiftState = shiftState; this.innerSource = innerSource; @@ -188,129 +186,122 @@ public boolean isImmutable() { @Override public boolean isUngroupable() { return innerSource instanceof UngroupableColumnSource - && ((UngroupableColumnSource) innerSource).isUngroupable(); + && ((UngroupableColumnSource) innerSource).isUngroupable(); } @Override public long getUngroupedSize(long columnIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedSize(shiftState.getShifted(columnIndex)); + return ((UngroupableColumnSource) innerSource).getUngroupedSize(shiftState.getShifted(columnIndex)); } @Override public long getUngroupedPrevSize(long columnIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevSize(shiftState.getPrevShifted(columnIndex)); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevSize(shiftState.getPrevShifted(columnIndex)); } @Override public T getUngrouped(long columnIndex, int arrayIndex) { // noinspection unchecked - return (T) ((UngroupableColumnSource) innerSource) - .getUngrouped(shiftState.getShifted(columnIndex), arrayIndex); + return (T) ((UngroupableColumnSource) innerSource).getUngrouped(shiftState.getShifted(columnIndex), arrayIndex); } @Override public T getUngroupedPrev(long columnIndex, int arrayIndex) { // noinspection unchecked - return (T) ((UngroupableColumnSource) innerSource) - .getUngroupedPrev(shiftState.getPrevShifted(columnIndex), arrayIndex); + return (T) ((UngroupableColumnSource) innerSource).getUngroupedPrev(shiftState.getPrevShifted(columnIndex), + arrayIndex); } @Override public Boolean getUngroupedBoolean(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedBoolean(shiftState.getShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedBoolean(shiftState.getShifted(columnIndex), + arrayIndex); } @Override public Boolean getUngroupedPrevBoolean(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevBoolean(shiftState.getPrevShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevBoolean(shiftState.getPrevShifted(columnIndex), + arrayIndex); } @Override public double getUngroupedDouble(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedDouble(shiftState.getShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedDouble(shiftState.getShifted(columnIndex), + arrayIndex); } @Override public double getUngroupedPrevDouble(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevDouble(shiftState.getPrevShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevDouble(shiftState.getPrevShifted(columnIndex), + arrayIndex); } @Override public float getUngroupedFloat(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedFloat(shiftState.getShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedFloat(shiftState.getShifted(columnIndex), + arrayIndex); } @Override public float getUngroupedPrevFloat(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevFloat(shiftState.getPrevShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevFloat(shiftState.getPrevShifted(columnIndex), + arrayIndex); } @Override public byte getUngroupedByte(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedByte(shiftState.getShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedByte(shiftState.getShifted(columnIndex), arrayIndex); } @Override public byte getUngroupedPrevByte(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevByte(shiftState.getPrevShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevByte(shiftState.getPrevShifted(columnIndex), + arrayIndex); } @Override public char getUngroupedChar(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedChar(shiftState.getShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedChar(shiftState.getShifted(columnIndex), arrayIndex); } @Override public char getUngroupedPrevChar(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevChar(shiftState.getPrevShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevChar(shiftState.getPrevShifted(columnIndex), + arrayIndex); } @Override public short getUngroupedShort(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedShort(shiftState.getShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedShort(shiftState.getShifted(columnIndex), + arrayIndex); } @Override public short getUngroupedPrevShort(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevShort(shiftState.getPrevShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevShort(shiftState.getPrevShifted(columnIndex), + arrayIndex); } @Override public int getUngroupedInt(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedInt(shiftState.getShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedInt(shiftState.getShifted(columnIndex), arrayIndex); } @Override public int getUngroupedPrevInt(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevInt(shiftState.getPrevShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevInt(shiftState.getPrevShifted(columnIndex), + arrayIndex); } @Override public long getUngroupedLong(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedLong(shiftState.getShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedLong(shiftState.getShifted(columnIndex), arrayIndex); } @Override public long getUngroupedPrevLong(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevLong(shiftState.getPrevShifted(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevLong(shiftState.getPrevShifted(columnIndex), + arrayIndex); } @Override @@ -321,22 +312,21 @@ public void releaseCachedResources() { @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return innerSource.allowsReinterpret(alternateDataType); } @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return new ReinterpretToOriginal(alternateDataType); } - private class ReinterpretToOriginal - extends BitShiftingColumnSource { + private class ReinterpretToOriginal extends BitShiftingColumnSource { private ReinterpretToOriginal(Class alternateDataType) { super(BitShiftingColumnSource.this.shiftState, - BitShiftingColumnSource.this.innerSource.reinterpret(alternateDataType)); + BitShiftingColumnSource.this.innerSource.reinterpret(alternateDataType)); } @Override @@ -346,7 +336,7 @@ public boolean allowsReinterpret(@NotNull Class alternateDataType) { @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return (ColumnSource) BitShiftingColumnSource.this; } @@ -359,10 +349,10 @@ private static class FillContext implements ColumnSource.FillContext { private final DupExpandKernel dupExpandKernel; private FillContext(final BitShiftingColumnSource cs, final int chunkCapacity, - final SharedContext sharedContext) { + final SharedContext sharedContext) { shareable = sharedContext == null ? new Shareable(false, chunkCapacity) - : sharedContext.getOrCreate(new SharingKey(cs.shiftState), - () -> new Shareable(true, chunkCapacity)); + : sharedContext.getOrCreate(new SharingKey(cs.shiftState), + () -> new Shareable(true, chunkCapacity)); innerFillContext = cs.innerSource.makeFillContext(chunkCapacity, shareable); dupExpandKernel = DupExpandKernel.makeDupExpand(cs.getChunkType()); } @@ -375,8 +365,7 @@ public void close() { } } - private static final class SharingKey - extends SharedContext.ExactReferenceSharingKey { + private static final class SharingKey extends SharedContext.ExactReferenceSharingKey { private SharingKey(@NotNull final CrossJoinShiftState crossJoinShiftState) { super(crossJoinShiftState); @@ -401,9 +390,8 @@ private Shareable(final boolean shared, final int chunkCapacity) { uniqueIndices = WritableLongChunk.makeWritableChunk(chunkCapacity); } - private void ensureKeysAndLengthsInitialized( - @NotNull final CrossJoinShiftState shiftState, final boolean usePrev, - @NotNull final OrderedKeys orderedKeys) { + private void ensureKeysAndLengthsInitialized(@NotNull final CrossJoinShiftState shiftState, + final boolean usePrev, @NotNull final OrderedKeys orderedKeys) { if (keysAndLengthsReusable) { return; } @@ -417,13 +405,12 @@ private void ensureKeysAndLengthsInitialized( orderedKeys.forAllLongs((final long indexKey) -> { final long lastInnerIndexKey = currentRunInnerIndexKey.longValue(); - final long innerIndexKey = usePrev ? shiftState.getPrevShifted(indexKey) - : shiftState.getShifted(indexKey); + final long innerIndexKey = + usePrev ? shiftState.getPrevShifted(indexKey) : shiftState.getShifted(indexKey); if (innerIndexKey != lastInnerIndexKey) { if (lastInnerIndexKey != Index.NULL_KEY) { uniqueIndices.set(currentRunPosition.intValue(), lastInnerIndexKey); - runLengths.set(currentRunPosition.intValue(), - currentRunLength.intValue()); + runLengths.set(currentRunPosition.intValue(), currentRunLength.intValue()); currentRunPosition.increment(); } currentRunLength.setValue(1); @@ -433,8 +420,7 @@ private void ensureKeysAndLengthsInitialized( } }); - uniqueIndices.set(currentRunPosition.intValue(), - currentRunInnerIndexKey.longValue()); + uniqueIndices.set(currentRunPosition.intValue(), currentRunInnerIndexKey.longValue()); runLengths.set(currentRunPosition.intValue(), currentRunLength.intValue()); uniqueIndices.setSize(currentRunPosition.intValue() + 1); runLengths.setSize(currentRunPosition.intValue() + 1); @@ -464,22 +450,22 @@ public FillContext makeFillContext(final int chunkCapacity, final SharedContext @Override public void fillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { doFillChunk(context, destination, orderedKeys, false); } @Override public void fillPrevChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { doFillChunk(context, destination, orderedKeys, true); } private void doFillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys, - boolean usePrev) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys, + boolean usePrev) { final long sz = orderedKeys.size(); if (sz <= 0) { destination.setSize(0); @@ -487,11 +473,10 @@ private void doFillChunk(@NotNull final ColumnSource.FillContext context, } final FillContext effectiveContext = (FillContext) context; - effectiveContext.shareable.ensureKeysAndLengthsInitialized(shiftState, usePrev, - orderedKeys); + effectiveContext.shareable.ensureKeysAndLengthsInitialized(shiftState, usePrev, orderedKeys); - try (final OrderedKeys innerOK = OrderedKeys - .wrapKeyIndicesChunkAsOrderedKeys(effectiveContext.shareable.uniqueIndices)) { + try (final OrderedKeys innerOK = + OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(effectiveContext.shareable.uniqueIndices)) { if (usePrev) { innerSource.fillPrevChunk(effectiveContext.innerFillContext, destination, innerOK); } else { @@ -500,6 +485,6 @@ private void doFillChunk(@NotNull final ColumnSource.FillContext context, } effectiveContext.dupExpandKernel.expandDuplicates(orderedKeys.intSize(), destination, - effectiveContext.shareable.runLengths); + effectiveContext.shareable.runLengths); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/BoxedColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/BoxedColumnSource.java index bec6317f7a7..a9678be13ec 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/BoxedColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/BoxedColumnSource.java @@ -10,16 +10,15 @@ import org.jetbrains.annotations.NotNull; /** - * {@link ColumnSource} implementation for explicitly boxing a primitive into a more complex type, - * e.g. {@code byte} as {@link Boolean} or {@code long} as {@link DBDateTime}. + * {@link ColumnSource} implementation for explicitly boxing a primitive into a more complex type, e.g. {@code byte} as + * {@link Boolean} or {@code long} as {@link DBDateTime}. */ public abstract class BoxedColumnSource extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForObject { + implements MutableColumnSourceGetDefaults.ForObject { final ColumnSource originalSource; - BoxedColumnSource(@NotNull final Class dataType, - @NotNull final ColumnSource originalSource) { + BoxedColumnSource(@NotNull final Class dataType, @NotNull final ColumnSource originalSource) { super(dataType); this.originalSource = originalSource; } @@ -31,14 +30,14 @@ public abstract class BoxedColumnSource extends AbstractColumnSource< public abstract DATA_TYPE getPrev(long index); abstract void transformChunk(@NotNull final Chunk source, - @NotNull final WritableChunk destination); + @NotNull final WritableChunk destination); private static final class BoxedFillContext implements FillContext { private final GetContext originalGetContext; - private BoxedFillContext(@NotNull final ColumnSource originalSource, - final int chunkCapacity, final SharedContext sharedContext) { + private BoxedFillContext(@NotNull final ColumnSource originalSource, final int chunkCapacity, + final SharedContext sharedContext) { originalGetContext = originalSource.makeGetContext(chunkCapacity, sharedContext); } @@ -49,26 +48,23 @@ public final void close() { } @Override - public final FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return new BoxedFillContext(originalSource, chunkCapacity, sharedContext); } @Override public final void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { final Chunk originalChunk = - originalSource.getChunk(((BoxedFillContext) context).originalGetContext, orderedKeys); + originalSource.getChunk(((BoxedFillContext) context).originalGetContext, orderedKeys); transformChunk(originalChunk, destination); } @Override public final void fillPrevChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { - final Chunk originalChunk = originalSource - .getPrevChunk(((BoxedFillContext) context).originalGetContext, orderedKeys); + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { + final Chunk originalChunk = + originalSource.getPrevChunk(((BoxedFillContext) context).originalGetContext, orderedKeys); transformChunk(originalChunk, destination); } @@ -79,13 +75,13 @@ public final boolean isImmutable() { @Override public final boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return originalSource.getType() == alternateDataType; } @Override protected final ColumnSource doReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { // noinspection unchecked return (ColumnSource) originalSource; } @@ -108,10 +104,9 @@ public final Boolean getPrev(final long index) { @Override final void transformChunk(@NotNull final Chunk source, - @NotNull final WritableChunk destination) { + @NotNull final WritableChunk destination) { final ByteChunk typedSource = source.asByteChunk(); - final WritableObjectChunk typedDestination = - destination.asWritableObjectChunk(); + final WritableObjectChunk typedDestination = destination.asWritableObjectChunk(); final int sourceSize = typedSource.size(); for (int pi = 0; pi < sourceSize; ++pi) { @@ -140,10 +135,10 @@ public final DBDateTime getPrev(final long index) { @Override final void transformChunk(@NotNull final Chunk source, - @NotNull final WritableChunk destination) { + @NotNull final WritableChunk destination) { final LongChunk typedSource = source.asLongChunk(); final WritableObjectChunk typedDestination = - destination.asWritableObjectChunk(); + destination.asWritableObjectChunk(); final int sourceSize = typedSource.size(); for (int pi = 0; pi < sourceSize; ++pi) { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/ColumnSource.java index 7b4ce2998f9..1217fb9c11e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ColumnSource.java @@ -22,11 +22,11 @@ * A "source" for column data - allows cell values to be looked up by (long) keys. * *

      - * Note for implementors: All {@link ColumnSource} implementations must map {@link Index#NULL_KEY} - * to a null value for all {@code get} and {@code getPrev} methods. + * Note for implementors: All {@link ColumnSource} implementations must map {@link Index#NULL_KEY} to a null value for + * all {@code get} and {@code getPrev} methods. */ public interface ColumnSource - extends DefaultChunkSource.WithPrev, ElementSource, TupleSource, Releasable { + extends DefaultChunkSource.WithPrev, ElementSource, TupleSource, Releasable { ColumnSource[] ZERO_LENGTH_COLUMN_SOURCE_ARRAY = new ColumnSource[0]; @@ -43,20 +43,17 @@ default ChunkType getChunkType() { return ChunkType.fromElementType(dataType); } - Index match(boolean invertMatch, boolean usePrev, boolean caseInsensitive, Index mapper, - final Object... keys); + Index match(boolean invertMatch, boolean usePrev, boolean caseInsensitive, Index mapper, final Object... keys); Map getValuesMapping(Index subRange); /** - * ColumnSource implementations that track previous values have the option to not actually start - * tracking previous values until this method is called. This is an option, not an obligation: - * some simple ColumnSource implementations (like TSingleValueSource for various T) always track - * previous values; other implementations (like PrevColumnSource) never do; some (like - * TArrayColumnSource) only start tracking once this method is called. + * ColumnSource implementations that track previous values have the option to not actually start tracking previous + * values until this method is called. This is an option, not an obligation: some simple ColumnSource + * implementations (like TSingleValueSource for various T) always track previous values; other implementations (like + * PrevColumnSource) never do; some (like TArrayColumnSource) only start tracking once this method is called. * - * An immutable column source can not have distinct prev values; therefore it is implemented as - * a no-op. + * An immutable column source can not have distinct prev values; therefore it is implemented as a no-op. */ default void startTrackingPrevValues() { if (!isImmutable()) { @@ -80,18 +77,15 @@ default void startTrackingPrevValues() { Map getGroupToRange(Index index); /** - * Determine if this column source is immutable, meaning that the values at a given index key - * never change. + * Determine if this column source is immutable, meaning that the values at a given index key never change. * - * @return true if the values at a given index of the column source never change, false - * otherwise + * @return true if the values at a given index of the column source never change, false otherwise */ boolean isImmutable(); /** - * Release any resources held for caching purposes. Implementations need not guarantee that - * concurrent accesses are correct, as the purpose of this method is to ensure cleanup for - * column sources that will no longer be used. + * Release any resources held for caching purposes. Implementations need not guarantee that concurrent accesses are + * correct, as the purpose of this method is to ensure cleanup for column sources that will no longer be used. */ @Override @OverridingMethodsMustInvokeSuper @@ -103,11 +97,9 @@ default void releaseCachedResources() { * Test if a reinterpret call will succeed. * * @param alternateDataType The alternative type to consider - * @return If a reinterpret on this column source with the supplied alternateDataType will - * succeed. + * @return If a reinterpret on this column source with the supplied alternateDataType will succeed. */ - boolean allowsReinterpret( - @NotNull final Class alternateDataType); + boolean allowsReinterpret(@NotNull final Class alternateDataType); /** * Provide an alternative view into the data underlying this column source. @@ -117,8 +109,7 @@ boolean allowsReinterpret( * @throws IllegalArgumentException If the alternativeDataType supplied is not supported */ ColumnSource reinterpret( - @NotNull final Class alternateDataType) - throws IllegalArgumentException; + @NotNull final Class alternateDataType) throws IllegalArgumentException; @Override default List getColumnSources() { @@ -143,8 +134,7 @@ default T createTupleFromValues(@NotNull final Object... values) { @Override default void exportElement(final T tuple, final int elementIndex, - @NotNull final WritableSource writableSource, - final long destinationIndexKey) { + @NotNull final WritableSource writableSource, final long destinationIndexKey) { // noinspection unchecked writableSource.set(destinationIndexKey, (ELEMENT_TYPE) tuple); } @@ -166,24 +156,22 @@ default ChunkSource getPrevSource() { } /** - * Returns this {@code ColumnSource}, parameterized by {@code }, if the data type of this - * column (as given by {@link #getType()}) can be cast to {@code clazz}. This is analogous to - * casting the objects provided by this column source to {@code clazz}. + * Returns this {@code ColumnSource}, parameterized by {@code }, if the data type of this column (as given by + * {@link #getType()}) can be cast to {@code clazz}. This is analogous to casting the objects provided by this + * column source to {@code clazz}. *

      - * For example, the following code will throw an exception if the "MyString" column does not - * actually contain {@code String} data: + * For example, the following code will throw an exception if the "MyString" column does not actually contain + * {@code String} data: * *

            *     ColumnSource<String> colSource = table.getColumnSource("MyString").getParameterized(String.class)
            * 
      *

      - * Due to the nature of type erasure, the JVM will still insert an additional cast to - * {@code TYPE} when elements are retrieved from the column source, such as with - * {@code String myStr = colSource.get(0)}. + * Due to the nature of type erasure, the JVM will still insert an additional cast to {@code TYPE} when elements are + * retrieved from the column source, such as with {@code String myStr = colSource.get(0)}. * * @param clazz The target type. - * @param The target type, as a type parameter. Intended to be inferred from - * {@code clazz}. + * @param The target type, as a type parameter. Intended to be inferred from {@code clazz}. * @return A {@code ColumnSource} parameterized by {@code TYPE}. */ default ColumnSource cast(Class clazz) { @@ -191,8 +179,8 @@ default ColumnSource cast(Class clazz) { final Class columnSourceType = getType(); if (!clazz.isAssignableFrom(columnSourceType)) { throw new ClassCastException( - "Cannot convert column source for type " + columnSourceType.getName() + " to " + - "type " + clazz.getName()); + "Cannot convert column source for type " + columnSourceType.getName() + " to " + + "type " + clazz.getName()); } // noinspection unchecked return (ColumnSource) this; diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ColumnSourceGetDefaults.java b/DB/src/main/java/io/deephaven/db/v2/sources/ColumnSourceGetDefaults.java index db0783fa964..4550ce14a99 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ColumnSourceGetDefaults.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ColumnSourceGetDefaults.java @@ -6,8 +6,8 @@ import static io.deephaven.util.type.TypeUtils.box; /** - * Defaulted interfaces for various base {@link ColumnSource} types, in order to avoid having - * defaults at higher levels in the class hierarchy. + * Defaulted interfaces for various base {@link ColumnSource} types, in order to avoid having defaults at higher levels + * in the class hierarchy. */ public final class ColumnSourceGetDefaults { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/CrossJoinRightColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/CrossJoinRightColumnSource.java index 3f707b4a28e..08b43f8f39b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/CrossJoinRightColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/CrossJoinRightColumnSource.java @@ -25,15 +25,14 @@ import static io.deephaven.util.QueryConstants.*; -public class CrossJoinRightColumnSource extends AbstractColumnSource - implements UngroupableColumnSource { +public class CrossJoinRightColumnSource extends AbstractColumnSource implements UngroupableColumnSource { private final boolean rightIsLive; private final CrossJoinStateManager crossJoinManager; protected final ColumnSource innerSource; public CrossJoinRightColumnSource(@NotNull final CrossJoinStateManager crossJoinManager, - @NotNull final ColumnSource innerSource, boolean rightIsLive) { + @NotNull final ColumnSource innerSource, boolean rightIsLive) { super(innerSource.getType()); this.rightIsLive = rightIsLive; this.crossJoinManager = crossJoinManager; @@ -200,7 +199,7 @@ public boolean isImmutable() { @Override public boolean isUngroupable() { return innerSource instanceof UngroupableColumnSource - && ((UngroupableColumnSource) innerSource).isUngroupable(); + && ((UngroupableColumnSource) innerSource).isUngroupable(); } @Override @@ -210,118 +209,99 @@ public long getUngroupedSize(long columnIndex) { @Override public long getUngroupedPrevSize(long columnIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevSize(redirectPrev(columnIndex)); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevSize(redirectPrev(columnIndex)); } @Override public T getUngrouped(long columnIndex, int arrayIndex) { // noinspection unchecked - return (T) ((UngroupableColumnSource) innerSource).getUngrouped(redirect(columnIndex), - arrayIndex); + return (T) ((UngroupableColumnSource) innerSource).getUngrouped(redirect(columnIndex), arrayIndex); } @Override public T getUngroupedPrev(long columnIndex, int arrayIndex) { // noinspection unchecked - return (T) ((UngroupableColumnSource) innerSource) - .getUngroupedPrev(redirectPrev(columnIndex), arrayIndex); + return (T) ((UngroupableColumnSource) innerSource).getUngroupedPrev(redirectPrev(columnIndex), arrayIndex); } @Override public Boolean getUngroupedBoolean(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource).getUngroupedBoolean(redirect(columnIndex), - arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedBoolean(redirect(columnIndex), arrayIndex); } @Override public Boolean getUngroupedPrevBoolean(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevBoolean(redirectPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevBoolean(redirectPrev(columnIndex), arrayIndex); } @Override public double getUngroupedDouble(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource).getUngroupedDouble(redirect(columnIndex), - arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedDouble(redirect(columnIndex), arrayIndex); } @Override public double getUngroupedPrevDouble(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevDouble(redirectPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevDouble(redirectPrev(columnIndex), arrayIndex); } @Override public float getUngroupedFloat(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource).getUngroupedFloat(redirect(columnIndex), - arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedFloat(redirect(columnIndex), arrayIndex); } @Override public float getUngroupedPrevFloat(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevFloat(redirectPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevFloat(redirectPrev(columnIndex), arrayIndex); } @Override public byte getUngroupedByte(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource).getUngroupedByte(redirect(columnIndex), - arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedByte(redirect(columnIndex), arrayIndex); } @Override public byte getUngroupedPrevByte(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevByte(redirectPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevByte(redirectPrev(columnIndex), arrayIndex); } @Override public char getUngroupedChar(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource).getUngroupedChar(redirect(columnIndex), - arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedChar(redirect(columnIndex), arrayIndex); } @Override public char getUngroupedPrevChar(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevChar(redirectPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevChar(redirectPrev(columnIndex), arrayIndex); } @Override public short getUngroupedShort(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource).getUngroupedShort(redirect(columnIndex), - arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedShort(redirect(columnIndex), arrayIndex); } @Override public short getUngroupedPrevShort(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevShort(redirectPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevShort(redirectPrev(columnIndex), arrayIndex); } @Override public int getUngroupedInt(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource).getUngroupedInt(redirect(columnIndex), - arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedInt(redirect(columnIndex), arrayIndex); } @Override public int getUngroupedPrevInt(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevInt(redirectPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevInt(redirectPrev(columnIndex), arrayIndex); } @Override public long getUngroupedLong(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource).getUngroupedLong(redirect(columnIndex), - arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedLong(redirect(columnIndex), arrayIndex); } @Override public long getUngroupedPrevLong(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevLong(redirectPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevLong(redirectPrev(columnIndex), arrayIndex); } @Override @@ -332,23 +312,21 @@ public void releaseCachedResources() { @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return innerSource.allowsReinterpret(alternateDataType); } @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return new ReinterpretToOriginal(alternateDataType); } - private class ReinterpretToOriginal - extends CrossJoinRightColumnSource { + private class ReinterpretToOriginal extends CrossJoinRightColumnSource { private ReinterpretToOriginal(Class alternateDataType) { super(CrossJoinRightColumnSource.this.crossJoinManager, - CrossJoinRightColumnSource.this.innerSource.reinterpret(alternateDataType), - rightIsLive); + CrossJoinRightColumnSource.this.innerSource.reinterpret(alternateDataType), rightIsLive); } @Override @@ -358,7 +336,7 @@ public boolean allowsReinterpret(@NotNull Class alternateDataType) { @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return (ColumnSource) CrossJoinRightColumnSource.this; } @@ -371,15 +349,15 @@ public FillContext makeFillContext(final int chunkCapacity, final SharedContext @Override public void fillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { doFillChunk(context, destination, orderedKeys, false); } @Override public void fillPrevChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { doFillChunk(context, destination, orderedKeys, true); } @@ -398,9 +376,9 @@ private long redirectPrev(long outerKey) { } private void doFillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys, - final boolean usePrev) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys, + final boolean usePrev) { final int size = orderedKeys.intSize(); if (size <= 0) { destination.setSize(0); @@ -408,8 +386,7 @@ private void doFillChunk(@NotNull final ColumnSource.FillContext context, } final FillContext effectiveContext = (FillContext) context; - effectiveContext.shareable.ensureMappedKeysInitialized(crossJoinManager, usePrev, - orderedKeys); + effectiveContext.shareable.ensureMappedKeysInitialized(crossJoinManager, usePrev, orderedKeys); if (innerSource instanceof FillUnordered) { effectiveContext.doUnorderedFill((FillUnordered) innerSource, usePrev, destination); @@ -429,13 +406,12 @@ private static class FillContext implements ColumnSource.FillContext { private final DupExpandKernel dupExpandKernel; private final PermuteKernel permuteKernel; - FillContext(final CrossJoinRightColumnSource cs, final int chunkCapacity, - final SharedContext sharedContext) { + FillContext(final CrossJoinRightColumnSource cs, final int chunkCapacity, final SharedContext sharedContext) { if (sharedContext == null) { shareable = new Shareable(cs.rightIsLive, false, chunkCapacity); } else { shareable = sharedContext.getOrCreate(new SharingKey(cs.crossJoinManager), - () -> new Shareable(cs.rightIsLive, true, chunkCapacity)); + () -> new Shareable(cs.rightIsLive, true, chunkCapacity)); } innerFillContext = cs.innerSource.makeFillContext(chunkCapacity, shareable); @@ -466,8 +442,7 @@ public void close() { } } - private static final class SharingKey - extends SharedContext.ExactReferenceSharingKey { + private static final class SharingKey extends SharedContext.ExactReferenceSharingKey { private SharingKey(@NotNull final CrossJoinStateManager crossJoinManager) { super(crossJoinManager); @@ -496,28 +471,24 @@ private static final class Shareable extends SharedContext { private boolean hasNulls; private OrderedKeys innerOrderedKeys; - private Shareable(final boolean rightIsLive, final boolean shared, - final int chunkCapacity) { + private Shareable(final boolean rightIsLive, final boolean shared, final int chunkCapacity) { this.rightIsLive = rightIsLive; this.shared = shared; mappedKeys = WritableLongChunk.makeWritableChunk(chunkCapacity); sortKernelContext = LongIntTimsortKernel.createContext(chunkCapacity); - sortedMappedKeys = - shared ? WritableLongChunk.makeWritableChunk(chunkCapacity) : mappedKeys; + sortedMappedKeys = shared ? WritableLongChunk.makeWritableChunk(chunkCapacity) : mappedKeys; mappedKeysOrder = WritableIntChunk.makeWritableChunk(chunkCapacity); - // Note that we can't just compact mappedKeys in place, in case we're sharing with - // another + // Note that we can't just compact mappedKeys in place, in case we're sharing with another // source with an inner source that is a FillUnordered. compactedMappedKeys = WritableLongChunk.makeWritableChunk(chunkCapacity); nonNullCompactedMappedKeys = ResettableWritableLongChunk.makeResettableChunk(); runLengths = WritableIntChunk.makeWritableChunk(chunkCapacity); } - private void ensureMappedKeysInitialized( - @NotNull final CrossJoinStateManager crossJoinManager, final boolean usePrev, - @NotNull final OrderedKeys orderedKeys) { + private void ensureMappedKeysInitialized(@NotNull final CrossJoinStateManager crossJoinManager, + final boolean usePrev, @NotNull final OrderedKeys orderedKeys) { if (mappedKeysReusable) { return; } @@ -540,40 +511,36 @@ private void ensureMappedKeysInitialized( Index rightGroup; if (usePrev) { - rightGroup = crossJoinManager - .getRightIndexFromPrevLeftIndex(lastLeftIndex.getValue()); + rightGroup = crossJoinManager.getRightIndexFromPrevLeftIndex(lastLeftIndex.getValue()); if (rightIsLive) { rightGroup = rightGroup.getPrevIndex(); } } else { - rightGroup = - crossJoinManager.getRightIndexFromLeftIndex(lastLeftIndex.getValue()); + rightGroup = crossJoinManager.getRightIndexFromLeftIndex(lastLeftIndex.getValue()); } final int alreadyWritten = postMapOffset.intValue(); final int inRightGroup = preMapOffset.intValue(); rightGroup.getKeysForPositions( - ChunkStream.of(mappedKeys, alreadyWritten, inRightGroup - alreadyWritten) - .iterator(), - destKey -> { - mappedKeys.set(postMapOffset.intValue(), destKey); - postMapOffset.increment(); - }); + ChunkStream.of(mappedKeys, alreadyWritten, inRightGroup - alreadyWritten).iterator(), + destKey -> { + mappedKeys.set(postMapOffset.intValue(), destKey); + postMapOffset.increment(); + }); if (usePrev && rightIsLive) { rightGroup.close(); } }; orderedKeys.forAllLongs(ii -> { - final long leftIndex = usePrev ? crossJoinManager.getPrevShifted(ii) - : crossJoinManager.getShifted(ii); + final long leftIndex = + usePrev ? crossJoinManager.getPrevShifted(ii) : crossJoinManager.getShifted(ii); if (leftIndex != lastLeftIndex.longValue()) { flush.run(); lastLeftIndex.setValue(leftIndex); } mappedKeys.set(preMapOffset.intValue(), - usePrev ? crossJoinManager.getPrevMasked(ii) - : crossJoinManager.getMasked(ii)); + usePrev ? crossJoinManager.getPrevMasked(ii) : crossJoinManager.getMasked(ii)); preMapOffset.increment(); }); flush.run(); @@ -622,8 +589,8 @@ private void ensureSortedFillContextInitialized() { hasNulls = compactedMappedKeys.get(0) == Index.NULL_KEY; final int keysToSkip = hasNulls ? 1 : 0; innerOrderedKeys = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys( - LongChunk.downcast(nonNullCompactedMappedKeys.resetFromTypedChunk( - compactedMappedKeys, keysToSkip, uniqueKeyCount - keysToSkip))); + LongChunk.downcast(nonNullCompactedMappedKeys.resetFromTypedChunk(compactedMappedKeys, + keysToSkip, uniqueKeyCount - keysToSkip))); sortedFillContextReusable = shared; } @@ -666,19 +633,18 @@ public void close() { } } - private void doUnorderedFill(@NotNull final FillUnordered innerSource, - final boolean usePrev, @NotNull final WritableChunk destination) { + private void doUnorderedFill(@NotNull final FillUnordered innerSource, final boolean usePrev, + @NotNull final WritableChunk destination) { if (usePrev) { - innerSource.fillPrevChunkUnordered(innerFillContext, destination, - shareable.mappedKeys); + innerSource.fillPrevChunkUnordered(innerFillContext, destination, shareable.mappedKeys); } else { innerSource.fillChunkUnordered(innerFillContext, destination, shareable.mappedKeys); } destination.setSize(shareable.totalKeyCount); } - private void doOrderedFillAndPermute(@NotNull final ColumnSource innerSource, - final boolean usePrev, @NotNull final WritableChunk destination) { + private void doOrderedFillAndPermute(@NotNull final ColumnSource innerSource, final boolean usePrev, + @NotNull final WritableChunk destination) { shareable.ensureSortedFillContextInitialized(); innerOrderedValues.setSize(shareable.uniqueKeyCount); @@ -686,8 +652,8 @@ private void doOrderedFillAndPermute(@NotNull final ColumnSource innerSource, final WritableChunk compactedOrderedValuesDestination; if (shareable.hasNulls) { innerOrderedValues.fillWithNullValue(0, 1); - compactedOrderedValuesDestination = innerOrderedValuesSlice - .resetFromChunk(innerOrderedValues, 1, shareable.uniqueKeyCount - 1); + compactedOrderedValuesDestination = + innerOrderedValuesSlice.resetFromChunk(innerOrderedValues, 1, shareable.uniqueKeyCount - 1); } else { compactedOrderedValuesDestination = innerOrderedValues; } @@ -695,16 +661,14 @@ private void doOrderedFillAndPermute(@NotNull final ColumnSource innerSource, // Read compacted, ordered keys if (usePrev) { innerSource.fillPrevChunk(innerFillContext, compactedOrderedValuesDestination, - shareable.innerOrderedKeys); + shareable.innerOrderedKeys); } else { - innerSource.fillChunk(innerFillContext, compactedOrderedValuesDestination, - shareable.innerOrderedKeys); + innerSource.fillChunk(innerFillContext, compactedOrderedValuesDestination, shareable.innerOrderedKeys); } // Expand unique values if necessary if (shareable.uniqueKeyCount != shareable.totalKeyCount) { - dupExpandKernel.expandDuplicates(shareable.totalKeyCount, innerOrderedValues, - shareable.runLengths); + dupExpandKernel.expandDuplicates(shareable.totalKeyCount, innerOrderedValues, shareable.runLengths); innerOrderedValues.setSize(shareable.totalKeyCount); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/DateTimeArraySource.java b/DB/src/main/java/io/deephaven/db/v2/sources/DateTimeArraySource.java index ee0dfdfec8e..30d54fbe6da 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/DateTimeArraySource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/DateTimeArraySource.java @@ -48,14 +48,14 @@ public void copy(ColumnSource sourceColumn, long sourceKey, long des @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return alternateDataType == long.class; } // the ArrayBackedColumnSource fillChunk can't handle changing the type @Override - public void fillChunk(@NotNull ColumnSource.FillContext context, - @NotNull WritableChunk dest, @NotNull OrderedKeys orderedKeys) { + public void fillChunk(@NotNull ColumnSource.FillContext context, @NotNull WritableChunk dest, + @NotNull OrderedKeys orderedKeys) { final ChunkFiller filler = dest.getChunkFiller(); if (orderedKeys.getAverageRunLengthEstimate() > USE_RANGES_AVERAGE_RUN_LENGTH) { filler.fillByRanges(this, orderedKeys, dest); @@ -65,8 +65,8 @@ public void fillChunk(@NotNull ColumnSource.FillContext context, } @Override - public void fillPrevChunk(@NotNull ColumnSource.FillContext context, - @NotNull WritableChunk dest, @NotNull OrderedKeys orderedKeys) { + public void fillPrevChunk(@NotNull ColumnSource.FillContext context, @NotNull WritableChunk dest, + @NotNull OrderedKeys orderedKeys) { final ChunkFiller filler = dest.getChunkFiller(); if (orderedKeys.getAverageRunLengthEstimate() > USE_RANGES_AVERAGE_RUN_LENGTH) { filler.fillPrevByRanges(this, orderedKeys, dest); @@ -81,40 +81,36 @@ public Chunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys } @Override - public Chunk getPrevChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys) { + public Chunk getPrevChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) { return getPrevChunkByFilling(context, orderedKeys); } @Override protected void fillSparseChunk(@NotNull final WritableChunk destGeneric, - @NotNull final OrderedKeys indices) { + @NotNull final OrderedKeys indices) { super.fillSparseChunk(destGeneric, indices, DBTimeUtils::nanosToTime); } @Override protected void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, - @NotNull final OrderedKeys indices) { + @NotNull final OrderedKeys indices) { super.fillSparsePrevChunk(destGeneric, indices, DBTimeUtils::nanosToTime); } @Override - protected void fillSparseChunkUnordered( - @NotNull final WritableChunk destGeneric, - @NotNull final LongChunk indices) { + protected void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, + @NotNull final LongChunk indices) { super.fillSparseChunkUnordered(destGeneric, indices, DBTimeUtils::nanosToTime); } @Override - protected void fillSparsePrevChunkUnordered( - @NotNull final WritableChunk destGeneric, - @NotNull final LongChunk indices) { + protected void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, + @NotNull final LongChunk indices) { super.fillSparsePrevChunkUnordered(destGeneric, indices, DBTimeUtils::nanosToTime); } @Override - public void fillFromChunkByRanges(@NotNull OrderedKeys orderedKeys, - Chunk src) { + public void fillFromChunkByRanges(@NotNull OrderedKeys orderedKeys, Chunk src) { super.fillFromChunkByRanges(orderedKeys, src, DBTimeUtils::nanos); } @@ -124,20 +120,20 @@ void fillFromChunkByKeys(@NotNull OrderedKeys orderedKeys, Chunk src, @NotNull LongChunk keys) { + public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull LongChunk keys) { super.fillFromChunkUnordered(src, keys, DBTimeUtils::nanos); } @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return (ColumnSource) new ReinterpretedAsLong(); } private class ReinterpretedAsLong extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForLong, FillUnordered, WritableSource { + implements MutableColumnSourceGetDefaults.ForLong, FillUnordered, WritableSource { private ReinterpretedAsLong() { super(long.class); } @@ -158,14 +154,13 @@ public long getPrevLong(long index) { } @Override - public boolean allowsReinterpret( - @NotNull Class alternateDataType) { + public boolean allowsReinterpret(@NotNull Class alternateDataType) { return alternateDataType == DBDateTime.class; } @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { return (ColumnSource) DateTimeArraySource.this; } @@ -176,10 +171,9 @@ public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContex @Override public void fillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { - // can't defer this case to super as they will ultimately call a method on - // DateTimeArraySource instead of AbstractLongArraySource + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { + // can't defer this case to super as they will ultimately call a method on DateTimeArraySource instead of + // AbstractLongArraySource if (orderedKeys.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { fillSparseLongChunk(destination, orderedKeys); } else { @@ -189,10 +183,9 @@ public void fillChunk(@NotNull final ColumnSource.FillContext context, @Override public void fillPrevChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { - // can't defer these two cases to super as they will ultimately call a method on - // DateTimeArraySource instead of AbstractLongArraySource + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { + // can't defer these two cases to super as they will ultimately call a method on DateTimeArraySource instead + // of AbstractLongArraySource if (prevFlusher == null) { fillChunk(context, destination, orderedKeys); return; @@ -208,15 +201,15 @@ public void fillPrevChunk(@NotNull final ColumnSource.FillContext context, @Override public void fillChunkUnordered(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final LongChunk keyIndices) { + @NotNull final WritableChunk destination, + @NotNull final LongChunk keyIndices) { fillSparseLongChunkUnordered(destination, keyIndices); } @Override public void fillPrevChunkUnordered(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final LongChunk keyIndices) { + @NotNull final WritableChunk destination, + @NotNull final LongChunk keyIndices) { fillSparsePrevLongChunkUnordered(destination, keyIndices); } @@ -240,11 +233,10 @@ public FillFromContext makeFillFromContext(int chunkCapacity) { } @Override - public void fillFromChunk(@NotNull FillFromContext context, - @NotNull Chunk src, - @NotNull OrderedKeys orderedKeys) { - // Note: we cannot call super.fillFromChunk here as that method will call the override - // versions that expect ObjectChunks. + public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull OrderedKeys orderedKeys) { + // Note: we cannot call super.fillFromChunk here as that method will call the override versions that expect + // ObjectChunks. if (orderedKeys.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { DateTimeArraySource.super.fillFromChunkByKeys(orderedKeys, src); } else { @@ -253,8 +245,8 @@ public void fillFromChunk(@NotNull FillFromContext context, } @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, - @NotNull Chunk src, @NotNull LongChunk keys) { + public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull LongChunk keys) { DateTimeArraySource.super.fillFromChunkUnordered(context, src, keys); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/DateTimeSparseArraySource.java b/DB/src/main/java/io/deephaven/db/v2/sources/DateTimeSparseArraySource.java index c385580d972..4dbd2923114 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/DateTimeSparseArraySource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/DateTimeSparseArraySource.java @@ -21,7 +21,7 @@ * Array-backed ColumnSource for DBDateTimes. Allows reinterpret as long. */ public class DateTimeSparseArraySource extends AbstractSparseLongArraySource - implements MutableColumnSourceGetDefaults.ForLongAsDateTime, DefaultChunkSource { + implements MutableColumnSourceGetDefaults.ForLongAsDateTime, DefaultChunkSource { public DateTimeSparseArraySource() { super(DBDateTime.class); @@ -44,7 +44,7 @@ public void copy(ColumnSource sourceColumn, long sourceKey, long des @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return alternateDataType == long.class; } @@ -52,7 +52,7 @@ public boolean allowsReinterpret( // the ArrayBackedColumnSource fillChunk can't handle changing the type @Override public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk dest, - @NotNull OrderedKeys orderedKeys) { + @NotNull OrderedKeys orderedKeys) { final ChunkFiller filler = dest.getChunkFiller(); if (orderedKeys.getAverageRunLengthEstimate() > USE_RANGES_AVERAGE_RUN_LENGTH) { filler.fillByRanges(this, orderedKeys, dest); @@ -62,8 +62,8 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk dest, @NotNull OrderedKeys orderedKeys) { + public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, + @NotNull OrderedKeys orderedKeys) { final ChunkFiller filler = dest.getChunkFiller(); if (orderedKeys.getAverageRunLengthEstimate() > USE_RANGES_AVERAGE_RUN_LENGTH) { filler.fillPrevByRanges(this, orderedKeys, dest); @@ -78,16 +78,14 @@ public Chunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys } @Override - public Chunk getPrevChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys) { + public Chunk getPrevChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) { return getPrevChunkByFilling(context, orderedKeys); } @Override void fillByUnorderedKeys(@NotNull WritableChunk dest, - @NotNull LongChunk keys) { - final WritableObjectChunk objectChunk = - dest.asWritableObjectChunk(); + @NotNull LongChunk keys) { + final WritableObjectChunk objectChunk = dest.asWritableObjectChunk(); for (int ii = 0; ii < keys.size();) { final long firstKey = keys.get(ii); if (firstKey == Index.NULL_KEY) { @@ -121,9 +119,8 @@ void fillByUnorderedKeys(@NotNull WritableChunk dest, } void fillPrevByUnorderedKeys(@NotNull WritableChunk dest, - @NotNull LongChunk keys) { - final WritableObjectChunk objectChunk = - dest.asWritableObjectChunk(); + @NotNull LongChunk keys) { + final WritableObjectChunk objectChunk = dest.asWritableObjectChunk(); for (int ii = 0; ii < keys.size();) { final long firstKey = keys.get(ii); if (firstKey == Index.NULL_KEY) { @@ -150,18 +147,15 @@ void fillPrevByUnorderedKeys(@NotNull WritableChunk dest, } final long[] prevInUse = (prevFlusher == null || this.prevInUse == null) ? null - : this.prevInUse.getInnermostBlockByKeyOrNull(firstKey); - final long[] prevBlock = - prevInUse == null ? null : prevBlocks.getInnermostBlockByKeyOrNull(firstKey); + : this.prevInUse.getInnermostBlockByKeyOrNull(firstKey); + final long[] prevBlock = prevInUse == null ? null : prevBlocks.getInnermostBlockByKeyOrNull(firstKey); while (ii <= lastII) { final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK); final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); final long[] blockToUse = - (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) - ? prevBlock - : block; + (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block; final long nanos = blockToUse == null ? NULL_LONG : blockToUse[indexWithinBlock]; objectChunk.set(ii++, nanos == NULL_LONG ? null : new DBDateTime(nanos)); } @@ -170,8 +164,7 @@ void fillPrevByUnorderedKeys(@NotNull WritableChunk dest, } @Override - public void fillFromChunkByRanges(@NotNull OrderedKeys orderedKeys, - Chunk src) { + public void fillFromChunkByRanges(@NotNull OrderedKeys orderedKeys, Chunk src) { final ObjectChunk chunk = src.asObjectChunk(); final LongChunk ranges = orderedKeys.asKeyRangesChunk(); int offset = 0; @@ -237,8 +230,8 @@ public void fillFromChunkByKeys(@NotNull OrderedKeys orderedKeys, Chunk src, @NotNull LongChunk keys) { + public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull LongChunk keys) { if (keys.size() == 0) { return; } @@ -264,8 +257,7 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, throw new UnsupportedOperationException("Source chunk is an alias for target data"); } - // This conditional with its constant condition should be very friendly to the branch - // predictor. + // This conditional with its constant condition should be very friendly to the branch predictor. long key = keys.get(ii); do { @@ -279,8 +271,7 @@ public void fillFromChunkUnordered(@NotNull FillFromContext context, } final DBDateTime time = chunk.get(ii++); block[indexWithinBlock] = (time == null) ? NULL_LONG : time.getNanos(); - } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock - && key <= maxKeyInCurrentBlock); + } while (ii < keys.size() && (key = keys.get(ii)) >= minKeyInCurrentBlock && key <= maxKeyInCurrentBlock); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/FillFromUnordered.java b/DB/src/main/java/io/deephaven/db/v2/sources/FillFromUnordered.java index 51761cfa889..3ed8bfe0112 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/FillFromUnordered.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/FillFromUnordered.java @@ -8,16 +8,15 @@ public interface FillFromUnordered { /** - * Populates a contiguous portion of the given destination chunk with data corresponding to the - * keys from the given {@link LongChunk}. + * Populates a contiguous portion of the given destination chunk with data corresponding to the keys from the given + * {@link LongChunk}. * - * @param context A context containing all mutable/state related data used in retrieving the - * Chunk. + * @param context A context containing all mutable/state related data used in retrieving the Chunk. * @param dest The chunk to be populated according to {@code keys} * @param keys A chunk of individual, not assumed to be ordered keys to be fetched */ void fillFromChunkUnordered( - @NotNull WritableChunkSink.FillFromContext context, - @NotNull WritableChunk dest, - @NotNull LongChunk keys); + @NotNull WritableChunkSink.FillFromContext context, + @NotNull WritableChunk dest, + @NotNull LongChunk keys); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/FillUnordered.java b/DB/src/main/java/io/deephaven/db/v2/sources/FillUnordered.java index 83511411ca3..8b7fdf28649 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/FillUnordered.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/FillUnordered.java @@ -8,30 +8,28 @@ public interface FillUnordered { /** - * Populates a contiguous portion of the given destination chunk with data corresponding to the - * keys from the given {@link LongChunk}. + * Populates a contiguous portion of the given destination chunk with data corresponding to the keys from the given + * {@link LongChunk}. * - * @param context A context containing all mutable/state related data used in retrieving the - * Chunk. + * @param context A context containing all mutable/state related data used in retrieving the Chunk. * @param dest The chunk to be populated according to {@code keys} * @param keys A chunk of individual, not assumed to be ordered keys to be fetched */ void fillChunkUnordered( - @NotNull ColumnSource.FillContext context, - @NotNull WritableChunk dest, - @NotNull LongChunk keys); + @NotNull ColumnSource.FillContext context, + @NotNull WritableChunk dest, + @NotNull LongChunk keys); /** - * Populates a contiguous portion of the given destination chunk with prev data corresponding to - * the keys from the given {@link LongChunk}. + * Populates a contiguous portion of the given destination chunk with prev data corresponding to the keys from the + * given {@link LongChunk}. * - * @param context A context containing all mutable/state related data used in retrieving the - * Chunk. + * @param context A context containing all mutable/state related data used in retrieving the Chunk. * @param dest The chunk to be populated according to {@code keys} * @param keys A chunk of individual, not assumed to be ordered keys to be fetched */ void fillPrevChunkUnordered( - @NotNull ColumnSource.FillContext context, - @NotNull WritableChunk dest, - @NotNull LongChunk keys); + @NotNull ColumnSource.FillContext context, + @NotNull WritableChunk dest, + @NotNull LongChunk keys); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ImmutableColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/ImmutableColumnSource.java index dcc3b4afc44..e2161040fe5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ImmutableColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ImmutableColumnSource.java @@ -1,9 +1,8 @@ package io.deephaven.db.v2.sources; /** - * Sub-interface of {@link ColumnSource} for implementations that always use return {@code true} - * from {@link #isImmutable()} and delegate all {@code getPrev*} methods to their current - * (non-previous) equivalents. + * Sub-interface of {@link ColumnSource} for implementations that always use return {@code true} from + * {@link #isImmutable()} and delegate all {@code getPrev*} methods to their current (non-previous) equivalents. */ public interface ImmutableColumnSource extends ColumnSource { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ImmutableColumnSourceGetDefaults.java b/DB/src/main/java/io/deephaven/db/v2/sources/ImmutableColumnSourceGetDefaults.java index a7c57375c75..c4a50e64436 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ImmutableColumnSourceGetDefaults.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ImmutableColumnSourceGetDefaults.java @@ -3,8 +3,8 @@ import io.deephaven.db.tables.utils.DBDateTime; /** - * Defaulted interfaces for various immutable {@link ColumnSource} types, in order to avoid having - * defaults at higher levels in the class hierarchy. + * Defaulted interfaces for various immutable {@link ColumnSource} types, in order to avoid having defaults at higher + * levels in the class hierarchy. */ public final class ImmutableColumnSourceGetDefaults { @@ -12,14 +12,13 @@ public final class ImmutableColumnSourceGetDefaults { * Default interface for immutable Object {@link ColumnSource} implementations. */ public interface ForObject - extends ColumnSourceGetDefaults.ForObject, ImmutableColumnSource { + extends ColumnSourceGetDefaults.ForObject, ImmutableColumnSource { } /** * Default interface for immutable Boolean {@link ColumnSource} implementations. */ - public interface ForBoolean - extends ColumnSourceGetDefaults.ForBoolean, ImmutableColumnSource { + public interface ForBoolean extends ColumnSourceGetDefaults.ForBoolean, ImmutableColumnSource { } /** @@ -31,22 +30,19 @@ public interface ForByte extends ColumnSourceGetDefaults.ForByte, ImmutableColum /** * Default interface for immutable char {@link ColumnSource} implementations. */ - public interface ForChar - extends ColumnSourceGetDefaults.ForChar, ImmutableColumnSource { + public interface ForChar extends ColumnSourceGetDefaults.ForChar, ImmutableColumnSource { } /** * Default interface for immutable double {@link ColumnSource} implementations. */ - public interface ForDouble - extends ColumnSourceGetDefaults.ForDouble, ImmutableColumnSource { + public interface ForDouble extends ColumnSourceGetDefaults.ForDouble, ImmutableColumnSource { } /** * Default interface for immutable float {@link ColumnSource} implementations. */ - public interface ForFloat - extends ColumnSourceGetDefaults.ForFloat, ImmutableColumnSource { + public interface ForFloat extends ColumnSourceGetDefaults.ForFloat, ImmutableColumnSource { } /** @@ -59,7 +55,7 @@ public interface ForInt extends ColumnSourceGetDefaults.ForInt, ImmutableColumnS * Default interface for immutable long-backed {@link ColumnSource} implementations. */ public interface LongBacked - extends ColumnSourceGetDefaults.LongBacked, ImmutableColumnSource { + extends ColumnSourceGetDefaults.LongBacked, ImmutableColumnSource { } /** @@ -72,13 +68,12 @@ public interface ForLong extends ColumnSourceGetDefaults.ForLong, ImmutableColum * Default interface for immutable {@link DBDateTime} {@link ColumnSource} implementations. */ public interface ForLongAsDateTime - extends ColumnSourceGetDefaults.ForLongAsDateTime, ImmutableColumnSource { + extends ColumnSourceGetDefaults.ForLongAsDateTime, ImmutableColumnSource { } /** * Default interface for immutable short {@link ColumnSource} implementations. */ - public interface ForShort - extends ColumnSourceGetDefaults.ForShort, ImmutableColumnSource { + public interface ForShort extends ColumnSourceGetDefaults.ForShort, ImmutableColumnSource { } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/LogicalClock.java b/DB/src/main/java/io/deephaven/db/v2/sources/LogicalClock.java index f7b2c40fb47..2774bae6aaa 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/LogicalClock.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/LogicalClock.java @@ -16,8 +16,8 @@ *

      * *

      - * Each time {@link #startUpdateCycle()} is called, the clock transitions to the Updating state and - * the current {@link #currentValue() value} is incremented by one. + * Each time {@link #startUpdateCycle()} is called, the clock transitions to the Updating state and the current + * {@link #currentValue() value} is incremented by one. *

      * *

      @@ -34,14 +34,14 @@ public enum LogicalClock { public enum State { /** - * Clock state for logical timestamps when the associated - * {@link io.deephaven.db.tables.live.LiveTableMonitor} is propagating updates. + * Clock state for logical timestamps when the associated {@link io.deephaven.db.tables.live.LiveTableMonitor} + * is propagating updates. */ Updating, /** - * Clock state for logical timestamps when the associated - * {@link io.deephaven.db.tables.live.LiveTableMonitor} is not propagating updates. + * Clock state for logical timestamps when the associated {@link io.deephaven.db.tables.live.LiveTableMonitor} + * is not propagating updates. */ Idle } @@ -114,28 +114,25 @@ public final long startUpdateCycle() { /** * Increment the current step and set the clock state to {@link State#Idle idle}. * - * @implNote The clock must have been {@link State#Updating updating} before this method is - * called. + * @implNote The clock must have been {@link State#Updating updating} before this method is called. */ public final void completeUpdateCycle() { final long value = currentValue.get(); Assert.eq(getState(value), "getState(value)", State.Updating); - Assert.eq(currentValue.incrementAndGet(), "currentValue.incrementAndGet()", value + 1, - "value + 1"); + Assert.eq(currentValue.incrementAndGet(), "currentValue.incrementAndGet()", value + 1, "value + 1"); } /** * After we complete a table refresh, we must ensure that the logical clock is idle. * *

      - * The only valid possibilities are (1) we have completed the cycle, in which case we return; or - * (2) we have terminated the cycle early and have the same value as at the start of our - * updating cycle, in which case we complete the cycle. + * The only valid possibilities are (1) we have completed the cycle, in which case we return; or (2) we have + * terminated the cycle early and have the same value as at the start of our updating cycle, in which case we + * complete the cycle. *

      * *

      - * If our clock is any other value; then it was changed out from under us and we throw an - * exception. + * If our clock is any other value; then it was changed out from under us and we throw an exception. *

      * * @param updatingCycleValue the clock value at the end of {@link #startUpdateCycle} @@ -147,13 +144,12 @@ public final void ensureUpdateCycleCompleted(final long updatingCycleValue) { } if (value == updatingCycleValue) { ProcessEnvironment.getDefaultLog(LogicalClock.class).warn() - .append("LogicalClock cycle was not completed in normal operation, value=") - .append(value).endl(); + .append("LogicalClock cycle was not completed in normal operation, value=").append(value).endl(); completeUpdateCycle(); return; } throw new IllegalStateException("Inconsistent LogicalClock value at end of cycle, expected " - + (updatingCycleValue + 1) + ", encountered " + value); + + (updatingCycleValue + 1) + ", encountered " + value); } /** diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/MutableColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/MutableColumnSource.java index bd5ed3c604f..772d10611a0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/MutableColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/MutableColumnSource.java @@ -1,8 +1,8 @@ package io.deephaven.db.v2.sources; /** - * Sub-interface of {@link ColumnSource} for implementations that always use return {@code false} - * from {@link #isImmutable()}. + * Sub-interface of {@link ColumnSource} for implementations that always use return {@code false} from + * {@link #isImmutable()}. */ public interface MutableColumnSource extends ColumnSource { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/MutableColumnSourceGetDefaults.java b/DB/src/main/java/io/deephaven/db/v2/sources/MutableColumnSourceGetDefaults.java index 7535d625794..8c32eacd6c3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/MutableColumnSourceGetDefaults.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/MutableColumnSourceGetDefaults.java @@ -6,8 +6,8 @@ import static io.deephaven.util.type.TypeUtils.box; /** - * Defaulted interfaces for various mutable {@link ColumnSource} types, in order to avoid having - * defaults at higher levels in the class hierarchy. + * Defaulted interfaces for various mutable {@link ColumnSource} types, in order to avoid having defaults at higher + * levels in the class hierarchy. */ public final class MutableColumnSourceGetDefaults { @@ -15,7 +15,7 @@ public final class MutableColumnSourceGetDefaults { * Default interface for mutable Object {@link ColumnSource} implementations. */ public interface ForObject - extends ColumnSourceGetDefaults.ForObject, MutableColumnSource { + extends ColumnSourceGetDefaults.ForObject, MutableColumnSource { @Override default Boolean getPrevBoolean(final long index) { @@ -61,8 +61,7 @@ default short getPrevShort(final long index) { /** * Default interface for mutable Boolean {@link ColumnSource} implementations. */ - public interface ForBoolean - extends ColumnSourceGetDefaults.ForBoolean, MutableColumnSource { + public interface ForBoolean extends ColumnSourceGetDefaults.ForBoolean, MutableColumnSource { @Override default Boolean getPrevBoolean(final long index) { @@ -154,8 +153,7 @@ default short getPrevShort(final long index) { /** * Default interface for mutable char {@link ColumnSource} implementations. */ - public interface ForChar - extends ColumnSourceGetDefaults.ForChar, MutableColumnSource { + public interface ForChar extends ColumnSourceGetDefaults.ForChar, MutableColumnSource { @Override default Character getPrev(final long index) { @@ -201,8 +199,7 @@ default short getPrevShort(final long index) { /** * Default interface for mutable double {@link ColumnSource} implementations. */ - public interface ForDouble - extends ColumnSourceGetDefaults.ForDouble, MutableColumnSource { + public interface ForDouble extends ColumnSourceGetDefaults.ForDouble, MutableColumnSource { @Override default Double getPrev(final long index) { @@ -341,7 +338,7 @@ default short getPrevShort(final long index) { * Default interface for mutable long-backed {@link ColumnSource} implementations. */ public interface LongBacked - extends ColumnSourceGetDefaults.LongBacked, MutableColumnSource { + extends ColumnSourceGetDefaults.LongBacked, MutableColumnSource { default Boolean getPrevBoolean(final long index) { throw new UnsupportedOperationException(); @@ -392,8 +389,7 @@ default Long getPrev(final long index) { /** * Default interface for mutable {@link DBDateTime} {@link ColumnSource} implementations. */ - public interface ForLongAsDateTime - extends ColumnSourceGetDefaults.ForLongAsDateTime, LongBacked { + public interface ForLongAsDateTime extends ColumnSourceGetDefaults.ForLongAsDateTime, LongBacked { @Override default DBDateTime getPrev(final long index) { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/NullValueColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/NullValueColumnSource.java index afb31846f91..8b44ba1073b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/NullValueColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/NullValueColumnSource.java @@ -28,36 +28,34 @@ * A column source that returns null for all keys. */ @AbstractColumnSource.IsSerializable(value = true) -public class NullValueColumnSource extends AbstractColumnSource - implements ShiftData.ShiftCallback { +public class NullValueColumnSource extends AbstractColumnSource implements ShiftData.ShiftCallback { private static final KeyedObjectKey.Basic, Class>, NullValueColumnSource> KEY_TYPE = - new KeyedObjectKey.Basic, Class>, NullValueColumnSource>() { - @Override - public Pair, Class> getKey(NullValueColumnSource columnSource) { - // noinspection unchecked,rawtypes - return new Pair<>(columnSource.getType(), columnSource.getComponentType()); - } - }; + new KeyedObjectKey.Basic, Class>, NullValueColumnSource>() { + @Override + public Pair, Class> getKey(NullValueColumnSource columnSource) { + // noinspection unchecked,rawtypes + return new Pair<>(columnSource.getType(), columnSource.getComponentType()); + } + }; private static final KeyedObjectHashMap, Class>, NullValueColumnSource> INSTANCES = - new KeyedObjectHashMap<>(KEY_TYPE); + new KeyedObjectHashMap<>(KEY_TYPE); private static final ColumnSource BOOL_AS_BYTE_SOURCE = - new BooleanAsByteColumnSource(getInstance(Boolean.class, null)); + new BooleanAsByteColumnSource(getInstance(Boolean.class, null)); - public static NullValueColumnSource getInstance(Class clazz, - @Nullable final Class elementType) { + public static NullValueColumnSource getInstance(Class clazz, @Nullable final Class elementType) { // noinspection unchecked,rawtypes return (NullValueColumnSource) INSTANCES.putIfAbsent(new Pair<>(clazz, elementType), - p -> new NullValueColumnSource(clazz, elementType)); + p -> new NullValueColumnSource(clazz, elementType)); } public static Map> createColumnSourceMap(TableDefinition definition) { // noinspection unchecked return Arrays.stream(definition.getColumns()).collect(Collectors.toMap( - ColumnDefinition::getName, - c -> getInstance(c.getDataType(), c.getComponentType()), - (BinaryOperator>) Assert::neverInvoked, - LinkedHashMap::new)); + ColumnDefinition::getName, + c -> getInstance(c.getDataType(), c.getComponentType()), + (BinaryOperator>) Assert::neverInvoked, + LinkedHashMap::new)); } private NullValueColumnSource(Class type, @Nullable final Class elementType) { @@ -169,15 +167,15 @@ public boolean isImmutable() { @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return true; } @Override protected ColumnSource doReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { if ((type == Boolean.class || type == boolean.class) && - (alternateDataType == byte.class || alternateDataType == Byte.class)) { + (alternateDataType == byte.class || alternateDataType == Byte.class)) { // noinspection unchecked return (ColumnSource) BOOL_AS_BYTE_SOURCE; } @@ -186,17 +184,15 @@ protected ColumnSource doReinterpret( } @Override - public void fillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { destination.setSize(orderedKeys.intSize()); destination.fillWithNullValue(0, orderedKeys.intSize()); } @Override public void fillPrevChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { fillChunk(context, destination, orderedKeys); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/PrevColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/PrevColumnSource.java index 6fb4db2fa0d..dc6b6cf8b17 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/PrevColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/PrevColumnSource.java @@ -10,8 +10,8 @@ import org.jetbrains.annotations.NotNull; /** - * Wrapper {@link ColumnSource} that maps current data accessors to previous data accessors (and - * disables previous data accessors). + * Wrapper {@link ColumnSource} that maps current data accessors to previous data accessors (and disables previous data + * accessors). */ @AbstractColumnSource.IsSerializable(value = true) public final class PrevColumnSource extends AbstractColumnSource { @@ -134,40 +134,36 @@ public final ChunkType getChunkType() { } @Override - public final GetContext makeGetContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final GetContext makeGetContext(final int chunkCapacity, final SharedContext sharedContext) { return originalSource.makeGetContext(chunkCapacity, sharedContext); } @Override public final Chunk getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { return originalSource.getPrevChunk(context, orderedKeys); } @Override public final Chunk getPrevChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { throw new UnsupportedOperationException(); } @Override - public final FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return originalSource.makeFillContext(chunkCapacity, sharedContext); } @Override public final void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { originalSource.fillPrevChunk(context, destination, orderedKeys); } @Override public final void fillPrevChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { throw new UnsupportedOperationException(); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ReadOnlyRedirectedColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/ReadOnlyRedirectedColumnSource.java index 264a1fff11f..a95ad33901d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ReadOnlyRedirectedColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ReadOnlyRedirectedColumnSource.java @@ -18,13 +18,12 @@ import static io.deephaven.db.v2.sources.chunk.Attributes.Values; import static io.deephaven.util.QueryConstants.*; -public class ReadOnlyRedirectedColumnSource extends AbstractColumnSource - implements UngroupableColumnSource { +public class ReadOnlyRedirectedColumnSource extends AbstractColumnSource implements UngroupableColumnSource { protected final RedirectionIndex redirectionIndex; protected final ColumnSource innerSource; public ReadOnlyRedirectedColumnSource(@NotNull final RedirectionIndex redirectionIndex, - @NotNull final ColumnSource innerSource) { + @NotNull final ColumnSource innerSource) { super(innerSource.getType()); this.redirectionIndex = redirectionIndex; this.innerSource = innerSource; @@ -194,129 +193,120 @@ public boolean isImmutable() { @Override public boolean isUngroupable() { return innerSource instanceof UngroupableColumnSource - && ((UngroupableColumnSource) innerSource).isUngroupable(); + && ((UngroupableColumnSource) innerSource).isUngroupable(); } @Override public long getUngroupedSize(long columnIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedSize(redirectionIndex.get(columnIndex)); + return ((UngroupableColumnSource) innerSource).getUngroupedSize(redirectionIndex.get(columnIndex)); } @Override public long getUngroupedPrevSize(long columnIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevSize(redirectionIndex.getPrev(columnIndex)); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevSize(redirectionIndex.getPrev(columnIndex)); } @Override public T getUngrouped(long columnIndex, int arrayIndex) { // noinspection unchecked - return (T) ((UngroupableColumnSource) innerSource) - .getUngrouped(redirectionIndex.get(columnIndex), arrayIndex); + return (T) ((UngroupableColumnSource) innerSource).getUngrouped(redirectionIndex.get(columnIndex), arrayIndex); } @Override public T getUngroupedPrev(long columnIndex, int arrayIndex) { // noinspection unchecked - return (T) ((UngroupableColumnSource) innerSource) - .getUngroupedPrev(redirectionIndex.getPrev(columnIndex), arrayIndex); + return (T) ((UngroupableColumnSource) innerSource).getUngroupedPrev(redirectionIndex.getPrev(columnIndex), + arrayIndex); } @Override public Boolean getUngroupedBoolean(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedBoolean(redirectionIndex.get(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedBoolean(redirectionIndex.get(columnIndex), + arrayIndex); } @Override public Boolean getUngroupedPrevBoolean(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevBoolean(redirectionIndex.getPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevBoolean(redirectionIndex.getPrev(columnIndex), + arrayIndex); } @Override public double getUngroupedDouble(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedDouble(redirectionIndex.get(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedDouble(redirectionIndex.get(columnIndex), + arrayIndex); } @Override public double getUngroupedPrevDouble(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevDouble(redirectionIndex.getPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevDouble(redirectionIndex.getPrev(columnIndex), + arrayIndex); } @Override public float getUngroupedFloat(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedFloat(redirectionIndex.get(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedFloat(redirectionIndex.get(columnIndex), arrayIndex); } @Override public float getUngroupedPrevFloat(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevFloat(redirectionIndex.getPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevFloat(redirectionIndex.getPrev(columnIndex), + arrayIndex); } @Override public byte getUngroupedByte(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedByte(redirectionIndex.get(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedByte(redirectionIndex.get(columnIndex), arrayIndex); } @Override public byte getUngroupedPrevByte(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevByte(redirectionIndex.getPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevByte(redirectionIndex.getPrev(columnIndex), + arrayIndex); } @Override public char getUngroupedChar(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedChar(redirectionIndex.get(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedChar(redirectionIndex.get(columnIndex), arrayIndex); } @Override public char getUngroupedPrevChar(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevChar(redirectionIndex.getPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevChar(redirectionIndex.getPrev(columnIndex), + arrayIndex); } @Override public short getUngroupedShort(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedShort(redirectionIndex.get(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedShort(redirectionIndex.get(columnIndex), arrayIndex); } @Override public short getUngroupedPrevShort(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevShort(redirectionIndex.getPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevShort(redirectionIndex.getPrev(columnIndex), + arrayIndex); } @Override public int getUngroupedInt(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedInt(redirectionIndex.get(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedInt(redirectionIndex.get(columnIndex), arrayIndex); } @Override public int getUngroupedPrevInt(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevInt(redirectionIndex.getPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevInt(redirectionIndex.getPrev(columnIndex), + arrayIndex); } @Override public long getUngroupedLong(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedLong(redirectionIndex.get(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedLong(redirectionIndex.get(columnIndex), arrayIndex); } @Override public long getUngroupedPrevLong(long columnIndex, int arrayIndex) { - return ((UngroupableColumnSource) innerSource) - .getUngroupedPrevLong(redirectionIndex.getPrev(columnIndex), arrayIndex); + return ((UngroupableColumnSource) innerSource).getUngroupedPrevLong(redirectionIndex.getPrev(columnIndex), + arrayIndex); } @Override @@ -327,15 +317,14 @@ public void releaseCachedResources() { @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return innerSource.allowsReinterpret(alternateDataType); } @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { - if (TypeUtils.getUnboxedTypeIfBoxed(alternateDataType) == byte.class - && getType() == Boolean.class) { + @NotNull Class alternateDataType) { + if (TypeUtils.getUnboxedTypeIfBoxed(alternateDataType) == byte.class && getType() == Boolean.class) { return new ReinterpretToOriginalForBoolean<>(alternateDataType); } // noinspection unchecked @@ -343,10 +332,10 @@ && getType() == Boolean.class) { } private class ReinterpretToOriginal - extends ReadOnlyRedirectedColumnSource { + extends ReadOnlyRedirectedColumnSource { private ReinterpretToOriginal(Class alternateDataType) { super(ReadOnlyRedirectedColumnSource.this.redirectionIndex, - ReadOnlyRedirectedColumnSource.this.innerSource.reinterpret(alternateDataType)); + ReadOnlyRedirectedColumnSource.this.innerSource.reinterpret(alternateDataType)); } @Override @@ -356,14 +345,14 @@ public boolean allowsReinterpret(@NotNull Class alternateDataType) { @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return (ColumnSource) ReadOnlyRedirectedColumnSource.this; } } private class ReinterpretToOriginalForBoolean - extends ReinterpretToOriginal { + extends ReinterpretToOriginal { private ReinterpretToOriginalForBoolean(Class alternateDataType) { super(alternateDataType); } @@ -397,21 +386,19 @@ public FillContext makeFillContext(final int chunkCapacity, final SharedContext @Override public void fillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { doFillChunk(context, destination, orderedKeys, false); } @Override public void fillPrevChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { doFillChunk(context, destination, orderedKeys, true); } private void doFillChunk(@NotNull final ColumnSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys, final boolean usePrev) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys, + final boolean usePrev) { final int size = orderedKeys.intSize(); if (size <= 0) { destination.setSize(0); @@ -419,8 +406,7 @@ private void doFillChunk(@NotNull final ColumnSource.FillContext context, } final FillContext effectiveContext = (FillContext) context; - effectiveContext.shareable.ensureMappedKeysInitialized(redirectionIndex, usePrev, - orderedKeys); + effectiveContext.shareable.ensureMappedKeysInitialized(redirectionIndex, usePrev, orderedKeys); if (innerSource instanceof FillUnordered) { effectiveContext.doUnorderedFill((FillUnordered) innerSource, usePrev, destination); @@ -441,12 +427,12 @@ private static class FillContext implements ColumnSource.FillContext { private final PermuteKernel permuteKernel; private final boolean booleanNullByte; - FillContext(final ReadOnlyRedirectedColumnSource cs, final int chunkCapacity, - final SharedContext sharedContext, boolean booleanNullByte) { + FillContext(final ReadOnlyRedirectedColumnSource cs, final int chunkCapacity, final SharedContext sharedContext, + boolean booleanNullByte) { this.booleanNullByte = booleanNullByte; shareable = sharedContext == null ? new Shareable(false, cs, chunkCapacity) - : sharedContext.getOrCreate(new SharingKey(cs.redirectionIndex), - () -> new Shareable(true, cs, chunkCapacity)); + : sharedContext.getOrCreate(new SharingKey(cs.redirectionIndex), + () -> new Shareable(true, cs, chunkCapacity)); innerFillContext = cs.innerSource.makeFillContext(chunkCapacity, shareable); if (cs.innerSource instanceof FillUnordered) { @@ -476,8 +462,7 @@ public void close() { } } - private static final class SharingKey - extends SharedContext.ExactReferenceSharingKey { + private static final class SharingKey extends SharedContext.ExactReferenceSharingKey { private SharingKey(@NotNull final RedirectionIndex redirectionIndex) { super(redirectionIndex); @@ -506,29 +491,24 @@ private static final class Shareable extends SharedContext { private boolean hasNulls; private OrderedKeys innerOrderedKeys; - private Shareable(final boolean shared, final ReadOnlyRedirectedColumnSource cs, - final int chunkCapacity) { + private Shareable(final boolean shared, final ReadOnlyRedirectedColumnSource cs, final int chunkCapacity) { this.shared = shared; - redirectionIndexFillContext = - cs.redirectionIndex.makeFillContext(chunkCapacity, this); + redirectionIndexFillContext = cs.redirectionIndex.makeFillContext(chunkCapacity, this); mappedKeys = WritableLongChunk.makeWritableChunk(chunkCapacity); sortKernelContext = LongIntTimsortKernel.createContext(chunkCapacity); - sortedMappedKeys = - shared ? WritableLongChunk.makeWritableChunk(chunkCapacity) : mappedKeys; + sortedMappedKeys = shared ? WritableLongChunk.makeWritableChunk(chunkCapacity) : mappedKeys; mappedKeysOrder = WritableIntChunk.makeWritableChunk(chunkCapacity); - // Note that we can't just compact mappedKeys in place, in case we're sharing with - // another + // Note that we can't just compact mappedKeys in place, in case we're sharing with another // source with an inner source that is a FillUnordered. compactedMappedKeys = WritableLongChunk.makeWritableChunk(chunkCapacity); nonNullCompactedMappedKeys = ResettableWritableLongChunk.makeResettableChunk(); runLengths = WritableIntChunk.makeWritableChunk(chunkCapacity); } - private void ensureMappedKeysInitialized( - @NotNull final RedirectionIndex redirectionIndex, final boolean usePrev, - @NotNull final OrderedKeys orderedKeys) { + private void ensureMappedKeysInitialized(@NotNull final RedirectionIndex redirectionIndex, + final boolean usePrev, @NotNull final OrderedKeys orderedKeys) { if (mappedKeysReusable) { return; } @@ -540,11 +520,9 @@ private void ensureMappedKeysInitialized( Assert.gtZero(totalKeyCount, "totalKeyCount"); if (usePrev) { - redirectionIndex.fillPrevChunk(redirectionIndexFillContext, mappedKeys, - orderedKeys); + redirectionIndex.fillPrevChunk(redirectionIndexFillContext, mappedKeys, orderedKeys); } else { - redirectionIndex.fillChunk(redirectionIndexFillContext, mappedKeys, - orderedKeys); + redirectionIndex.fillChunk(redirectionIndexFillContext, mappedKeys, orderedKeys); } mappedKeysReusable = shared; @@ -591,8 +569,8 @@ private void ensureSortedFillContextInitialized() { hasNulls = compactedMappedKeys.get(0) == Index.NULL_KEY; final int keysToSkip = hasNulls ? 1 : 0; innerOrderedKeys = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys( - LongChunk.downcast(nonNullCompactedMappedKeys.resetFromTypedChunk( - compactedMappedKeys, keysToSkip, uniqueKeyCount - keysToSkip))); + LongChunk.downcast(nonNullCompactedMappedKeys.resetFromTypedChunk(compactedMappedKeys, + keysToSkip, uniqueKeyCount - keysToSkip))); sortedFillContextReusable = shared; } @@ -636,19 +614,18 @@ public void close() { } } - private void doUnorderedFill(@NotNull final FillUnordered innerSource, - final boolean usePrev, @NotNull final WritableChunk destination) { + private void doUnorderedFill(@NotNull final FillUnordered innerSource, final boolean usePrev, + @NotNull final WritableChunk destination) { if (usePrev) { - innerSource.fillPrevChunkUnordered(innerFillContext, destination, - shareable.mappedKeys); + innerSource.fillPrevChunkUnordered(innerFillContext, destination, shareable.mappedKeys); } else { innerSource.fillChunkUnordered(innerFillContext, destination, shareable.mappedKeys); } destination.setSize(shareable.totalKeyCount); } - private void doOrderedFillAndPermute(@NotNull final ColumnSource innerSource, - final boolean usePrev, @NotNull final WritableChunk destination) { + private void doOrderedFillAndPermute(@NotNull final ColumnSource innerSource, final boolean usePrev, + @NotNull final WritableChunk destination) { shareable.ensureSortedFillContextInitialized(); innerOrderedValues.setSize(shareable.uniqueKeyCount); @@ -656,13 +633,12 @@ private void doOrderedFillAndPermute(@NotNull final ColumnSource innerSource, final WritableChunk compactedOrderedValuesDestination; if (shareable.hasNulls) { if (booleanNullByte) { - innerOrderedValues.asWritableByteChunk().fillWithValue(0, 1, - BooleanUtils.NULL_BOOLEAN_AS_BYTE); + innerOrderedValues.asWritableByteChunk().fillWithValue(0, 1, BooleanUtils.NULL_BOOLEAN_AS_BYTE); } else { innerOrderedValues.fillWithNullValue(0, 1); } - compactedOrderedValuesDestination = innerOrderedValuesSlice - .resetFromChunk(innerOrderedValues, 1, shareable.uniqueKeyCount - 1); + compactedOrderedValuesDestination = + innerOrderedValuesSlice.resetFromChunk(innerOrderedValues, 1, shareable.uniqueKeyCount - 1); } else { compactedOrderedValuesDestination = innerOrderedValues; } @@ -670,16 +646,14 @@ private void doOrderedFillAndPermute(@NotNull final ColumnSource innerSource, // Read compacted, ordered keys if (usePrev) { innerSource.fillPrevChunk(innerFillContext, compactedOrderedValuesDestination, - shareable.innerOrderedKeys); + shareable.innerOrderedKeys); } else { - innerSource.fillChunk(innerFillContext, compactedOrderedValuesDestination, - shareable.innerOrderedKeys); + innerSource.fillChunk(innerFillContext, compactedOrderedValuesDestination, shareable.innerOrderedKeys); } // Expand unique values if necessary if (shareable.uniqueKeyCount != shareable.totalKeyCount) { - dupExpandKernel.expandDuplicates(shareable.totalKeyCount, innerOrderedValues, - shareable.runLengths); + dupExpandKernel.expandDuplicates(shareable.totalKeyCount, innerOrderedValues, shareable.runLengths); innerOrderedValues.setSize(shareable.totalKeyCount); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/RedirectedColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/RedirectedColumnSource.java index 8b2ab2877a3..805f2d41b2c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/RedirectedColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/RedirectedColumnSource.java @@ -11,24 +11,23 @@ import org.jetbrains.annotations.NotNull; /** - * A {@link ColumnSource} that provides a redirected view into another {@link ColumnSource} by - * mapping keys using a {@link RedirectionIndex}. + * A {@link ColumnSource} that provides a redirected view into another {@link ColumnSource} by mapping keys using a + * {@link RedirectionIndex}. */ -public class RedirectedColumnSource extends ReadOnlyRedirectedColumnSource - implements WritableSource { +public class RedirectedColumnSource extends ReadOnlyRedirectedColumnSource implements WritableSource { private long maxInnerIndex; /** - * Create a type-appropriate RedirectedColumnSource for the supplied {@link RedirectionIndex} - * and inner {@link ColumnSource}. + * Create a type-appropriate RedirectedColumnSource for the supplied {@link RedirectionIndex} and inner + * {@link ColumnSource}. * * @param redirectionIndex The redirection index to use * @param innerSource The column source to redirect * @param maxInnerIndex The maximum index key available in innerSource */ public RedirectedColumnSource(@NotNull final RedirectionIndex redirectionIndex, - @NotNull final ColumnSource innerSource, - final long maxInnerIndex) { + @NotNull final ColumnSource innerSource, + final long maxInnerIndex) { super(redirectionIndex, innerSource); this.maxInnerIndex = maxInnerIndex; } @@ -101,8 +100,7 @@ private class RedirectionFillFrom implements FillFromContext { private RedirectionFillFrom(int chunkCapacity) { this.redirectionFillContext = redirectionIndex.makeFillContext(chunkCapacity, null); - this.innerFillFromContext = - ((WritableSource) innerSource).makeFillFromContext(chunkCapacity); + this.innerFillFromContext = ((WritableSource) innerSource).makeFillFromContext(chunkCapacity); this.redirections = WritableLongChunk.makeWritableChunk(chunkCapacity); } @@ -120,24 +118,24 @@ public FillFromContext makeFillFromContext(int chunkCapacity) { } @Override - public void fillFromChunk(@NotNull FillFromContext context, - @NotNull Chunk src, @NotNull OrderedKeys orderedKeys) { + public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull OrderedKeys orderedKeys) { // noinspection unchecked final RedirectionFillFrom redirectionFillFrom = (RedirectionFillFrom) context; - redirectionIndex.fillChunk(redirectionFillFrom.redirectionFillContext, - redirectionFillFrom.redirections, orderedKeys); - ((WritableSource) innerSource).fillFromChunkUnordered( - redirectionFillFrom.innerFillFromContext, src, redirectionFillFrom.redirections); + redirectionIndex.fillChunk(redirectionFillFrom.redirectionFillContext, redirectionFillFrom.redirections, + orderedKeys); + ((WritableSource) innerSource).fillFromChunkUnordered(redirectionFillFrom.innerFillFromContext, src, + redirectionFillFrom.redirections); } @Override public void fillFromChunkUnordered(@NotNull FillFromContext context, - @NotNull Chunk src, @NotNull LongChunk keys) { + @NotNull Chunk src, @NotNull LongChunk keys) { // noinspection unchecked final RedirectionFillFrom redirectionFillFrom = (RedirectionFillFrom) context; redirectionIndex.fillChunkUnordered(redirectionFillFrom.redirectionFillContext, - redirectionFillFrom.redirections, keys); - ((WritableSource) innerSource).fillFromChunkUnordered( - redirectionFillFrom.innerFillFromContext, src, redirectionFillFrom.redirections); + redirectionFillFrom.redirections, keys); + ((WritableSource) innerSource).fillFromChunkUnordered(redirectionFillFrom.innerFillFromContext, src, + redirectionFillFrom.redirections); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ReinterpretUtilities.java b/DB/src/main/java/io/deephaven/db/v2/sources/ReinterpretUtilities.java index 899d556d913..6ab6020cef8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ReinterpretUtilities.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ReinterpretUtilities.java @@ -6,8 +6,7 @@ public class ReinterpretUtilities { /** - * Given a DateTime column source turn it into a long column source, either via reinterpretation - * or wrapping. + * Given a DateTime column source turn it into a long column source, either via reinterpretation or wrapping. * * @param source the source to turn into a long source * @@ -23,8 +22,7 @@ public static ColumnSource dateTimeToLongSource(ColumnSource source) { } /** - * Given a Boolean column source turn it into a byte column source, either via reinterpretation - * or wrapping. + * Given a Boolean column source turn it into a byte column source, either via reinterpretation or wrapping. * * @param source the source to turn into a byte source * @@ -40,8 +38,7 @@ public static ColumnSource booleanToByteSource(ColumnSource source) { } /** - * If source is something that we prefer to handle as a primitive, do the appropriate - * conversion. + * If source is something that we prefer to handle as a primitive, do the appropriate conversion. * * @param source The source to convert * @return If possible, the source converted to a primitive, otherwise the source @@ -64,24 +61,24 @@ public static ColumnSource maybeConvertToPrimitive(ColumnSource source) { * @return Reinterpret or box source back to the original type if possible */ public static ColumnSource convertToOriginal(@NotNull final Class originalType, - @NotNull final ColumnSource source) { + @NotNull final ColumnSource source) { if (originalType == Boolean.class) { if (source.getType() != byte.class) { throw new UnsupportedOperationException( - "Cannot convert column of type " + source.getType() + " to Boolean"); + "Cannot convert column of type " + source.getType() + " to Boolean"); } // noinspection unchecked return source.allowsReinterpret(Boolean.class) ? source.reinterpret(Boolean.class) - : new BoxedColumnSource.OfBoolean((ColumnSource) source); + : new BoxedColumnSource.OfBoolean((ColumnSource) source); } if (originalType == DBDateTime.class) { if (source.getType() != long.class) { throw new UnsupportedOperationException( - "Cannot convert column of type " + source.getType() + " to DBDateTime"); + "Cannot convert column of type " + source.getType() + " to DBDateTime"); } // noinspection unchecked return source.allowsReinterpret(DBDateTime.class) ? source.reinterpret(DBDateTime.class) - : new BoxedColumnSource.OfDateTime((ColumnSource) source); + : new BoxedColumnSource.OfDateTime((ColumnSource) source); } throw new UnsupportedOperationException("Unsupported original type " + originalType); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/Releasable.java b/DB/src/main/java/io/deephaven/db/v2/sources/Releasable.java index f8cfa7c00d6..f20fc01f638 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/Releasable.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/Releasable.java @@ -8,8 +8,8 @@ public interface Releasable { /** - * Release any resources held for caching purposes. Implementations need not guarantee that they - * are safe for normal use concurrently with invocations of this method. + * Release any resources held for caching purposes. Implementations need not guarantee that they are safe for normal + * use concurrently with invocations of this method. */ @OverridingMethodsMustInvokeSuper default void releaseCachedResources() {} diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ReplicateSourcesAndChunks.java b/DB/src/main/java/io/deephaven/db/v2/sources/ReplicateSourcesAndChunks.java index 6240892f9f1..1d7bc1fc70a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ReplicateSourcesAndChunks.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ReplicateSourcesAndChunks.java @@ -39,20 +39,17 @@ public static void main(String... args) throws IOException { replicateSparseArraySources(); replicateSingleValues(); - ReplicatePrimitiveCode.charToAllButBooleanAndLong(CharacterArraySource.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(CharAggregateColumnSource.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBooleanAndLong(CharacterArraySource.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharAggregateColumnSource.class, ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAllButBoolean(UngroupedCharArrayColumnSource.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAllButBoolean(UngroupedCharDbArrayColumnSource.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAllButBoolean(UngroupedBoxedCharDbArrayColumnSource.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAllButBoolean(UngroupedBoxedCharArrayColumnSource.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(ImmutableCharArraySource.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(ImmutableCharArraySource.class, ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.charToAll(SizedCharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); replicateChunks(); @@ -78,68 +75,65 @@ public static void main(String... args) throws IOException { } private static void replicateSingleValues() throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(CharacterSingleValueSource.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharacterSingleValueSource.class, ReplicatePrimitiveCode.MAIN_SRC); replicateObjectSingleValue(); } private static void replicateObjectSingleValue() throws IOException { - final String className = ReplicatePrimitiveCode - .charToObject(CharacterSingleValueSource.class, ReplicatePrimitiveCode.MAIN_SRC); + final String className = + ReplicatePrimitiveCode.charToObject(CharacterSingleValueSource.class, ReplicatePrimitiveCode.MAIN_SRC); final File classFile = new File(className); List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "class ObjectSingleValueSource", "class ObjectSingleValueSource", - "", "", - "ForObject", "ForObject", - "Object getObject", "T get", - "Object getPrevObject", "T getPrev", - "Object current", "T current", - "Object prev", "T prev", - "set\\(Object", "set(T", - "set\\(long key, Object", "set(long key, T", - "final ObjectChunk", "final ObjectChunk", - "unbox\\((.*)\\)", "$1"); + "class ObjectSingleValueSource", "class ObjectSingleValueSource", + "", "", + "ForObject", "ForObject", + "Object getObject", "T get", + "Object getPrevObject", "T getPrev", + "Object current", "T current", + "Object prev", "T prev", + "set\\(Object", "set(T", + "set\\(long key, Object", "set(long key, T", + "final ObjectChunk", "final ObjectChunk", + "unbox\\((.*)\\)", "$1"); lines = ReplicateUtilities.removeRegion(lines, "UnboxedSetter"); lines = ReplicateUtilities.replaceRegion(lines, "Constructor", Arrays.asList( - " public ObjectSingleValueSource(Class type) {", - " super(type);", - " current = null;", - " prev = null;", - " }")); + " public ObjectSingleValueSource(Class type) {", + " super(type);", + " current = null;", + " prev = null;", + " }")); FileUtils.writeLines(classFile, lines); } private static void replicateChunkColumnSource() throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(CharChunkColumnSource.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharChunkColumnSource.class, ReplicatePrimitiveCode.MAIN_SRC); replicateObjectChunkColumnSource(); } private static void replicateObjectChunkColumnSource() throws IOException { - final String className = ReplicatePrimitiveCode.charToObject(CharChunkColumnSource.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String className = + ReplicatePrimitiveCode.charToObject(CharChunkColumnSource.class, ReplicatePrimitiveCode.MAIN_SRC); final File classFile = new File(className); List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "class ObjectChunkColumnSource", "class ObjectChunkColumnSource", - "", "", - "ForObject", "ForObject", - "Object getObject", "T get", - "Object current", "T current", - "ObjectChunk<\\? extends Attributes.Values>", - "ObjectChunk", - "QueryConstants.NULL_OBJECT", "null"); + "class ObjectChunkColumnSource", "class ObjectChunkColumnSource", + "", "", + "ForObject", "ForObject", + "Object getObject", "T get", + "Object current", "T current", + "ObjectChunk<\\? extends Attributes.Values>", "ObjectChunk", + "QueryConstants.NULL_OBJECT", "null"); lines = ReplicateUtilities.replaceRegion(lines, "constructor", Arrays.asList( - " protected ObjectChunkColumnSource(Class type, Class componentType) {", - " this(type, componentType, new TLongArrayList());", - " }", - "", - " protected ObjectChunkColumnSource(Class type, Class componentType, final TLongArrayList firstOffsetForData) {", - " super(type, componentType);", - " this.firstOffsetForData = firstOffsetForData;", - " }" + " protected ObjectChunkColumnSource(Class type, Class componentType) {", + " this(type, componentType, new TLongArrayList());", + " }", + "", + " protected ObjectChunkColumnSource(Class type, Class componentType, final TLongArrayList firstOffsetForData) {", + " super(type, componentType);", + " this.firstOffsetForData = firstOffsetForData;", + " }" )); @@ -150,7 +144,7 @@ private static void replicateSparseArraySources() throws IOException { replicateOneOrN(); ReplicatePrimitiveCode.charToAllButBooleanAndLong(CharacterSparseArraySource.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); replicateSparseLongSource(); replicateSparseBooleanSource(); @@ -158,37 +152,34 @@ private static void replicateSparseArraySources() throws IOException { } private static void replicateChunks() throws IOException { - ReplicatePrimitiveCode.charToAllButBooleanAndByte(CharChunk.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBooleanAndByte(CharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); replicateByteChunks(); replicateBooleanChunks(); replicateObjectChunks(); } private static void replicateByteChunks() throws IOException { - final String className = - ReplicatePrimitiveCode.charToByte(CharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); + final String className = ReplicatePrimitiveCode.charToByte(CharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); final File classFile = new File(className); List classLines = FileUtils.readLines(classFile, Charset.defaultCharset()); classLines = replaceRegion(classLines, "ApplyDecoder", Arrays.asList( - " public final T applyDecoder(ObjectDecoder decoder) {", - " return decoder.decode(data, offset, size);", - " }", - "", - " public final T applyDecoder(ObjectDecoder decoder, int offsetSrc, int length) {", - " return decoder.decode(data, offset + offsetSrc, length);", - " }")); + " public final T applyDecoder(ObjectDecoder decoder) {", + " return decoder.decode(data, offset, size);", + " }", + "", + " public final T applyDecoder(ObjectDecoder decoder, int offsetSrc, int length) {", + " return decoder.decode(data, offset + offsetSrc, length);", + " }")); classLines = replaceRegion(classLines, "ApplyDecoderImports", Collections.singletonList( - "import io.deephaven.util.codec.ObjectDecoder;")); + "import io.deephaven.util.codec.ObjectDecoder;")); FileUtils.writeLines(classFile, classLines); } private static void replicateBooleanChunks() throws IOException { - final String className = - ReplicatePrimitiveCode.charToBoolean(CharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); + final String className = ReplicatePrimitiveCode.charToBoolean(CharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); final File classFile = new File(className); List classLines = FileUtils.readLines(classFile, Charset.defaultCharset()); classLines = ReplicateUtilities.removeRegion(classLines, "BufferImports"); @@ -197,29 +188,28 @@ private static void replicateBooleanChunks() throws IOException { } private static void replicateObjectChunks() throws IOException { - final String className = - ReplicatePrimitiveCode.charToObject(CharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); + final String className = ReplicatePrimitiveCode.charToObject(CharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); final File classFile = new File(className); List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "Object\\[\\]", "T[]", - "ObjectChunk.downcast", "ObjectChunk.downcastTypeAndAttr", - "Object get", "T get", - " EMPTY", "ObjectChunk EMPTY", - "static T\\[\\] makeArray", "static T[] makeArray"); + "Object\\[\\]", "T[]", + "ObjectChunk.downcast", "ObjectChunk.downcastTypeAndAttr", + "Object get", "T get", + " EMPTY", "ObjectChunk EMPTY", + "static T\\[\\] makeArray", "static T[] makeArray"); lines = replaceRegion(lines, "makeArray", Arrays.asList( - " public static T[] makeArray(int capacity) {", - " if (capacity == 0) {", - " //noinspection unchecked", - " return (T[]) ArrayUtils.EMPTY_OBJECT_ARRAY;", - " }", - " //noinspection unchecked", - " return (T[])new Object[capacity];", - " }")); + " public static T[] makeArray(int capacity) {", + " if (capacity == 0) {", + " //noinspection unchecked", + " return (T[]) ArrayUtils.EMPTY_OBJECT_ARRAY;", + " }", + " //noinspection unchecked", + " return (T[])new Object[capacity];", + " }")); lines = ReplicateUtilities.removeRegion(lines, "BufferImports"); lines = ReplicateUtilities.removeRegion(lines, "CopyToBuffer"); lines = expandDowncast(lines, "ObjectChunk"); @@ -232,28 +222,28 @@ private static void replicateChunkChunks() throws IOException { } private static void replicateObjectChunkChunks() throws IOException { - final String className = ReplicatePrimitiveCode.charToObject(CharChunkChunk.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String className = + ReplicatePrimitiveCode.charToObject(CharChunkChunk.class, ReplicatePrimitiveCode.MAIN_SRC); final File classFile = new File(className); List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "", "", - "ObjectChunkChunk EMPTY", "ObjectChunkChunk EMPTY", - "ObjectChunk\\[]", "ObjectChunk[]", - "Object\\[]\\[]", "T[][]", - "Object get", "T get", - "( +)innerData = ", "$1//noinspection unchecked" + "\n$1innerData = (T[][])"); + "", "", + "ObjectChunkChunk EMPTY", "ObjectChunkChunk EMPTY", + "ObjectChunk\\[]", "ObjectChunk[]", + "Object\\[]\\[]", "T[][]", + "Object get", "T get", + "( +)innerData = ", "$1//noinspection unchecked" + "\n$1innerData = (T[][])"); lines = expandDowncast(lines, "ObjectChunkChunk"); FileUtils.writeLines(classFile, lines); } private static void replicateWritableChunks() throws IOException { - final List files = ReplicatePrimitiveCode - .charToAllButBoolean(WritableCharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); + final List files = + ReplicatePrimitiveCode.charToAllButBoolean(WritableCharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); for (String fileName : files) { final File classFile = new File(fileName); List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); @@ -265,97 +255,94 @@ private static void replicateWritableChunks() throws IOException { } private static void replicateWritableBooleanChunks() throws IOException { - final String writableBooleanChunkClassName = ReplicatePrimitiveCode - .charToBoolean(WritableCharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); + final String writableBooleanChunkClassName = + ReplicatePrimitiveCode.charToBoolean(WritableCharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); final File writableBooleanChunkClassFile = new File(writableBooleanChunkClassName); List writableBooleanChunkClassLines = - FileUtils.readLines(writableBooleanChunkClassFile, Charset.defaultCharset()); + FileUtils.readLines(writableBooleanChunkClassFile, Charset.defaultCharset()); + writableBooleanChunkClassLines = + ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "BufferImports"); writableBooleanChunkClassLines = - ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "BufferImports"); + ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "CopyFromBuffer"); writableBooleanChunkClassLines = - ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "CopyFromBuffer"); - writableBooleanChunkClassLines = ReplicateUtilities - .removeRegion(writableBooleanChunkClassLines, "FillWithNullValueImports"); - writableBooleanChunkClassLines = ReplicateUtilities - .removeRegion(writableBooleanChunkClassLines, "FillWithNullValueImpl"); + ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "FillWithNullValueImports"); writableBooleanChunkClassLines = - ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "sort"); + ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "FillWithNullValueImpl"); + writableBooleanChunkClassLines = ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "sort"); FileUtils.writeLines(writableBooleanChunkClassFile, writableBooleanChunkClassLines); } private static void replicateWritableObjectChunks() throws IOException { - final String className = ReplicatePrimitiveCode.charToObject(WritableCharChunk.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String className = + ReplicatePrimitiveCode.charToObject(WritableCharChunk.class, ReplicatePrimitiveCode.MAIN_SRC); final File classFile = new File(className); List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "", "", - "Object\\[]", "T[]", - "ObjectChunk", "", + "Object\\[]", "T[]", + "ObjectChunk writableBooleanChunkClassLines = - FileUtils.readLines(writableBooleanChunkClassFile, Charset.defaultCharset()); + FileUtils.readLines(writableBooleanChunkClassFile, Charset.defaultCharset()); + writableBooleanChunkClassLines = + ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "BufferImports"); writableBooleanChunkClassLines = - ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "BufferImports"); + ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "CopyFromBuffer"); writableBooleanChunkClassLines = - ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "CopyFromBuffer"); - writableBooleanChunkClassLines = ReplicateUtilities - .removeRegion(writableBooleanChunkClassLines, "FillWithNullValueImports"); - writableBooleanChunkClassLines = ReplicateUtilities - .removeRegion(writableBooleanChunkClassLines, "FillWithNullValueImpl"); + ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "FillWithNullValueImports"); writableBooleanChunkClassLines = - ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "sort"); + ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "FillWithNullValueImpl"); + writableBooleanChunkClassLines = ReplicateUtilities.removeRegion(writableBooleanChunkClassLines, "sort"); FileUtils.writeLines(writableBooleanChunkClassFile, writableBooleanChunkClassLines); } private static void replicateWritableObjectChunkChunks() throws IOException { - final String className = ReplicatePrimitiveCode.charToObject(WritableCharChunkChunk.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String className = + ReplicatePrimitiveCode.charToObject(WritableCharChunkChunk.class, ReplicatePrimitiveCode.MAIN_SRC); final File classFile = new File(className); List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "", "", - "Object\\[]", "T[]", - "ObjectChunkChunk", "", + "Object\\[]", "T[]", + "ObjectChunkChunk lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - " lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - " lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "", "", - "WritableObjectChunkChunk", "", + "WritableObjectChunkChunk lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "", "", - "ResettableObjectChunkChunk", "", + "ResettableObjectChunkChunk classLines = FileUtils.readLines(classFile, Charset.defaultCharset()); classLines = ReplicateUtilities.replaceRegion(classLines, "dbArrayWrap", Arrays.asList( - " @NotNull", - " @Override", - " public final DbBooleanArrayDirect dbArrayWrap(Object array) {", - " throw new UnsupportedOperationException(\"No boolean primitive DbArray.\");", - " }", - "", - " @NotNull", - " @Override", - " public DbBooleanArraySlice dbArrayWrap(Object array, int offset, int capacity) {", - " throw new UnsupportedOperationException(\"No boolean primitive DbArray.\");", - " }")); + " @NotNull", + " @Override", + " public final DbBooleanArrayDirect dbArrayWrap(Object array) {", + " throw new UnsupportedOperationException(\"No boolean primitive DbArray.\");", + " }", + "", + " @NotNull", + " @Override", + " public DbBooleanArraySlice dbArrayWrap(Object array, int offset, int capacity) {", + " throw new UnsupportedOperationException(\"No boolean primitive DbArray.\");", + " }")); FileUtils.writeLines(classFile, classLines); } @@ -486,408 +468,395 @@ private static void replicateChunkFillers() throws IOException { } private static void replicateObjectChunkFiller() throws IOException { - final String className = ReplicatePrimitiveCode.charToObject(CharChunkFiller.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String className = + ReplicatePrimitiveCode.charToObject(CharChunkFiller.class, ReplicatePrimitiveCode.MAIN_SRC); final File classFile = new File(className); List lines = FileUtils.readLines(classFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "ObjectChunk<\\?", "ObjectChunk abstractLines = - FileUtils.readLines(abstractLongSparseArraySourceFile, Charset.defaultCharset()); - abstractLines = globalReplacements(abstractLines, "LongSparseArraySource", - "AbstractSparseLongArraySource", - "public class AbstractSparseLongArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForLong", - "abstract public class AbstractSparseLongArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.LongBacked", - "ColumnSource", "ColumnSource"); + List abstractLines = FileUtils.readLines(abstractLongSparseArraySourceFile, Charset.defaultCharset()); + abstractLines = globalReplacements(abstractLines, "LongSparseArraySource", "AbstractSparseLongArraySource", + "public class AbstractSparseLongArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForLong", + "abstract public class AbstractSparseLongArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.LongBacked", + "ColumnSource", "ColumnSource"); abstractLines = replaceRegion(abstractLines, "constructor", Arrays.asList( - " AbstractSparseLongArraySource(Class type) {", - " super(type);", - " blocks = new LongOneOrN.Block0();", " }")); + " AbstractSparseLongArraySource(Class type) {", + " super(type);", + " blocks = new LongOneOrN.Block0();", " }")); abstractLines = replaceRegion(abstractLines, "boxed methods", Collections.emptyList()); abstractLines = replaceRegion(abstractLines, "copy method", Collections.emptyList()); - abstractLines = simpleFixup(abstractLines, "getChunk", "LongChunk getChunk", - "Chunk getChunk"); + abstractLines = simpleFixup(abstractLines, "getChunk", "LongChunk getChunk", "Chunk getChunk"); abstractLines = simpleFixup(abstractLines, "getPrevChunk", "LongChunk getPrevChunk", - "Chunk getPrevChunk"); + "Chunk getPrevChunk"); abstractLines = standardCleanups(abstractLines); FileUtils.writeLines(abstractLongSparseArraySourceFile, abstractLines); } private static void replicateSparseBooleanSource() throws IOException { - final String booleanPath = - ReplicatePrimitiveCode.charToBooleanAsByte(CharacterSparseArraySource.class, + final String booleanPath = ReplicatePrimitiveCode.charToBooleanAsByte(CharacterSparseArraySource.class, ReplicatePrimitiveCode.MAIN_SRC, Collections.emptyMap()); final File booleanFile = new File(booleanPath); List lines = FileUtils.readLines(booleanFile, Charset.defaultCharset()); - lines = addImport(lines, - "import static " + BooleanUtils.class.getCanonicalName() + ".NULL_BOOLEAN_AS_BYTE;\n"); + lines = addImport(lines, "import static " + BooleanUtils.class.getCanonicalName() + ".NULL_BOOLEAN_AS_BYTE;\n"); lines = addImport(lines, BooleanUtils.class); lines = globalReplacements(lines, "BooleanOneOrN", "ByteOneOrN"); lines = globalReplacements(lines, "WritableBooleanChunk", "WritableObjectChunk", - "asBooleanChunk", "asObjectChunk", - "BooleanChunk", "ObjectChunk", - "ObjectChunk", "ObjectChunk", - "BooleanChunk<[?] extends Values>", "ObjectChunk", - "ObjectChunk<[?] extends Values>", "ObjectChunk", - "BooleanChunk<[?] super Values>", "ObjectChunk", - "ObjectChunk<[?] super Values>", "ObjectChunk"); - lines = simpleFixup(lines, "primitive get", "NULL_BOOLEAN", "NULL_BOOLEAN_AS_BYTE", - "getBoolean", "getByte", "getPrevBoolean", "getPrevByte"); + "asBooleanChunk", "asObjectChunk", + "BooleanChunk", "ObjectChunk", + "ObjectChunk", "ObjectChunk", + "BooleanChunk<[?] extends Values>", "ObjectChunk", + "ObjectChunk<[?] extends Values>", "ObjectChunk", + "BooleanChunk<[?] super Values>", "ObjectChunk", + "ObjectChunk<[?] super Values>", "ObjectChunk"); + lines = simpleFixup(lines, "primitive get", "NULL_BOOLEAN", "NULL_BOOLEAN_AS_BYTE", "getBoolean", "getByte", + "getPrevBoolean", "getPrevByte"); lines = replaceRegion(lines, "copyFromTypedArray", Arrays.asList( - " for (int jj = 0; jj < length; ++jj) {", - " chunk.set(jj + ctx.offset, BooleanUtils.byteAsBoolean(ctx.block[sIndexWithinBlock + jj]));", - " }")); + " for (int jj = 0; jj < length; ++jj) {", + " chunk.set(jj + ctx.offset, BooleanUtils.byteAsBoolean(ctx.block[sIndexWithinBlock + jj]));", + " }")); lines = replaceRegion(lines, "copyToTypedArray", Arrays.asList( - " for (int jj = 0; jj < length; ++jj) {", - " block[sIndexWithinBlock + jj] = BooleanUtils.booleanAsByte(chunk.get(offset + jj));", - " }")); - - lines = applyFixup(lines, "fillByKeys", "(.*chunk.set\\(.*, )(ctx\\.block.*)(\\);.*)", - m -> Collections.singletonList( - m.group(1) + "BooleanUtils.byteAsBoolean(" + m.group(2) + ")" + m.group(3))); - lines = applyFixup(lines, "fillByUnorderedKeys", - "(.*byteChunk.set\\(.*, )(block.*)(\\);.*)", m -> Collections.singletonList( - m.group(1) + "BooleanUtils.byteAsBoolean(" + m.group(2) + ")" + m.group(3))); - lines = - applyFixup(lines, "fillFromChunkByKeys", "(.*)(chunk.get\\(.*\\));", m -> Collections - .singletonList(m.group(1) + "BooleanUtils.booleanAsByte(" + m.group(2) + ");")); - lines = - applyFixup(lines, "fillFromChunkUnordered", "(.*)(chunk.get\\(.*\\));", m -> Collections - .singletonList(m.group(1) + "BooleanUtils.booleanAsByte(" + m.group(2) + ");")); - - lines = - simpleFixup(lines, "allocateNullFilledBlock", "NULL_BOOLEAN", "NULL_BOOLEAN_AS_BYTE"); - lines = simpleFixup(lines, "boxed methods", "box\\(getBoolean\\(", - "BooleanUtils.byteAsBoolean(getByte(", "box\\(getPrevBoolean\\(", - "BooleanUtils.byteAsBoolean(getPrevByte("); + " for (int jj = 0; jj < length; ++jj) {", + " block[sIndexWithinBlock + jj] = BooleanUtils.booleanAsByte(chunk.get(offset + jj));", + " }")); + + lines = applyFixup(lines, "fillByKeys", "(.*chunk.set\\(.*, )(ctx\\.block.*)(\\);.*)", m -> Collections + .singletonList(m.group(1) + "BooleanUtils.byteAsBoolean(" + m.group(2) + ")" + m.group(3))); + lines = applyFixup(lines, "fillByUnorderedKeys", "(.*byteChunk.set\\(.*, )(block.*)(\\);.*)", m -> Collections + .singletonList(m.group(1) + "BooleanUtils.byteAsBoolean(" + m.group(2) + ")" + m.group(3))); + lines = applyFixup(lines, "fillFromChunkByKeys", "(.*)(chunk.get\\(.*\\));", + m -> Collections.singletonList(m.group(1) + "BooleanUtils.booleanAsByte(" + m.group(2) + ");")); + lines = applyFixup(lines, "fillFromChunkUnordered", "(.*)(chunk.get\\(.*\\));", + m -> Collections.singletonList(m.group(1) + "BooleanUtils.booleanAsByte(" + m.group(2) + ");")); + + lines = simpleFixup(lines, "allocateNullFilledBlock", "NULL_BOOLEAN", "NULL_BOOLEAN_AS_BYTE"); + lines = simpleFixup(lines, "boxed methods", "box\\(getBoolean\\(", "BooleanUtils.byteAsBoolean(getByte(", + "box\\(getPrevBoolean\\(", "BooleanUtils.byteAsBoolean(getPrevByte("); lines = simpleFixup(lines, "boxed methods", "unbox", "BooleanUtils.booleanAsByte"); lines = applyFixup(lines, "constructor", "(.*super\\().*(\\);.*)", - m -> Collections.singletonList(m.group(1) + "Boolean.class" + m.group(2))); + m -> Collections.singletonList(m.group(1) + "Boolean.class" + m.group(2))); lines = removeRegion(removeRegion(lines, "getChunk"), "getPrevChunk"); lines = replaceRegion(lines, "getChunk", Arrays.asList( - " @Override", - " public ObjectChunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) {", - " return getChunkByFilling(context, orderedKeys).asObjectChunk();", - " }")); + " @Override", + " public ObjectChunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) {", + " return getChunkByFilling(context, orderedKeys).asObjectChunk();", + " }")); lines = replaceRegion(lines, "getPrevChunk", Arrays.asList( - " @Override", - " public ObjectChunk getPrevChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) {", - " return getPrevChunkByFilling(context, orderedKeys).asObjectChunk();", - " }")); + " @Override", + " public ObjectChunk getPrevChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) {", + " return getPrevChunkByFilling(context, orderedKeys).asObjectChunk();", + " }")); lines = simpleFixup(lines, "fillByUnorderedKeys", "WritableObjectChunk byteChunk", - "WritableObjectChunk byteChunk"); + "WritableObjectChunk byteChunk"); lines = simpleFixup(lines, "fillByUnorderedKeys", "byteChunk", "booleanObjectChunk"); lines = simpleFixup(lines, "fillByUnorderedKeys", - "BooleanUtils\\.byteAsBoolean\\(blockToUse == null \\? NULL_BOOLEAN : blockToUse\\[indexWithinBlock\\]\\)", - "blockToUse == null ? NULL_BOOLEAN : BooleanUtils.byteAsBoolean(blockToUse[indexWithinBlock])"); + "BooleanUtils\\.byteAsBoolean\\(blockToUse == null \\? NULL_BOOLEAN : blockToUse\\[indexWithinBlock\\]\\)", + "blockToUse == null ? NULL_BOOLEAN : BooleanUtils.byteAsBoolean(blockToUse[indexWithinBlock])"); lines = simpleFixup(lines, "serialization", - "NULL_BOOLEAN", "NULL_BOOLEAN_AS_BYTE", - "ObjectChunk", "ByteChunk", - "BooleanChunk", "ByteChunk", - "", "", - "", ""); + "NULL_BOOLEAN", "NULL_BOOLEAN_AS_BYTE", + "ObjectChunk", "ByteChunk", + "BooleanChunk", "ByteChunk", + "", "", + "", ""); lines = insertRegion(lines, "serialization", Arrays.asList( - " WritableSource reinterpretForSerialization() {", - " return (WritableSource)reinterpret(byte.class);", - " }", - "")); + " WritableSource reinterpretForSerialization() {", + " return (WritableSource)reinterpret(byte.class);", + " }", + "")); lines = simpleFixup(lines, "reinterpretForSerialization", - "return this;", "return (WritableSource)reinterpret(byte.class);"); + "return this;", "return (WritableSource)reinterpret(byte.class);"); // AND SO IT BEGINS lines = replaceRegion(lines, "reinterpretation", Arrays.asList( - " @Override", - " public boolean allowsReinterpret(@NotNull Class alternateDataType) {", - " return alternateDataType.equals(byte.class);", - " }", - "", - " @Override", - " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", - " //noinspection unchecked", - " return (ColumnSource) new BooleanSparseArraySource.ReinterpretedAsByte(this);", - " }", - "", - " private static class ReinterpretedAsByte extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForByte, FillUnordered, WritableSource {", - " private final BooleanSparseArraySource wrapped;", - "", - " private ReinterpretedAsByte(BooleanSparseArraySource wrapped) {", - " super(byte.class);", - " this.wrapped = wrapped;", - " }", - "", - " @Override", - " public byte getByte(long index) {", - " return wrapped.getByte(index);", - " }", - "", - " @Override", - " public byte getPrevByte(long index) {", - " return wrapped.getPrevByte(index);", - " }", - "", - " @Override", - " public void set(long key, Byte value) {", - " wrapped.set(key, value);", - " }", - "", - " @Override", - " public void set(long key, byte value) {", - " wrapped.set(key, value);", - " }", - "", - " @Override", - " public void ensureCapacity(long capacity, boolean nullFilled) {", - " wrapped.ensureCapacity(capacity, nullFilled);", - " }", - "", - " @Override", - " public void copy(ColumnSource sourceColumn, long sourceKey, long destKey) {", - " set(destKey, sourceColumn.getByte(sourceKey));", - " }", - "", - " @Override", - " public boolean allowsReinterpret(@NotNull Class alternateDataType) {", - " return alternateDataType == Boolean.class;", - " }", - "", - " @Override", - " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", - " // noinspection unchecked", - " return (ColumnSource)wrapped;", - " }", - "", - " @Override", - " public void fillChunk(@NotNull final ColumnSource.FillContext context, @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) {", - " fillSparseChunk(destination, orderedKeys);", - " }", - "", - " @Override", - " public void fillPrevChunk(@NotNull final ColumnSource.FillContext context, @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) {", - " fillSparsePrevChunk(destination, orderedKeys);", - " }", - "", - " @Override", - " public void fillChunkUnordered(@NotNull final FillContext context, @NotNull final WritableChunk destination, @NotNull final LongChunk keyIndices) {", - " fillSparseChunkUnordered(destination, keyIndices);", - " }", - "", - " @Override", - " public void fillPrevChunkUnordered(@NotNull final FillContext context, @NotNull final WritableChunk destination, @NotNull final LongChunk keyIndices) {", - " fillSparsePrevChunkUnordered(destination, keyIndices);", - " }", - "", - " private void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final OrderedKeys indices) {", - " if (indices.size() == 0) {", - " destGeneric.setSize(0);", - " return;", - " }", - " // This implementation is in \"key\" style (rather than range style).", - " final WritableByteChunk chunk = destGeneric.asWritableByteChunk();", - " final FillByContext ctx = new FillByContext<>();", - " indices.forEachLong((final long v) -> {", - " if (v > ctx.maxKeyInCurrentBlock) {", - " ctx.block = wrapped.blocks.getInnermostBlockByKeyOrNull(v);", - " ctx.maxKeyInCurrentBlock = v | INDEX_MASK;", - " }", - " if (ctx.block == null) {", - " chunk.fillWithNullValue(ctx.offset, 1);", - " } else {", - " chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]);", - " }", - " ++ctx.offset;", - " return true;", - " });", - " destGeneric.setSize(ctx.offset);", - " }", - "", - " private void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final OrderedKeys indices) {", - " final long sz = indices.size();", - " if (sz == 0) {", - " destGeneric.setSize(0);", - " return;", - " }", - "", - " if (wrapped.prevFlusher == null) {", - " fillSparseChunk(destGeneric, indices);", - " return;", - " }", - " fillSparsePrevChunkUnordered(destGeneric, indices.asKeyIndicesChunk());", - " }", - "", - " private void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices) {", - " final WritableByteChunk chunk = destGeneric.asWritableByteChunk();", - " // This implementation is in \"key\" style (rather than range style).", - " for (int ii = 0; ii < indices.size(); ) {", - " final long firstKey = indices.get(ii);", - " if (firstKey == Index.NULL_KEY) {", - " chunk.set(ii++, NULL_BOOLEAN_AS_BYTE);", - " continue;", - " }", - " final long masked = firstKey & ~INDEX_MASK;", - " int lastII = ii;", - " while (lastII + 1 < indices.size()) {", - " final int nextII = lastII + 1;", - " final long nextKey = indices.get(nextII);", - " final long nextMasked = nextKey & ~INDEX_MASK;", - " if (nextMasked != masked) {", - " break;", - " }", - " lastII = nextII;", - " }", - " final byte [] block = wrapped.blocks.getInnermostBlockByKeyOrNull(firstKey);", - " if (block == null) {", - " chunk.fillWithNullValue(ii, lastII - ii + 1);", - " ii = lastII + 1;", - " continue;", - " }", - " while (ii <= lastII) {", - " final int indexWithinBlock = (int) (indices.get(ii) & INDEX_MASK);", - " chunk.set(ii++, block[indexWithinBlock]);", - " }", - " }", - " destGeneric.setSize(indices.size());", - " }", - "", - " private void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices) {", - " final WritableByteChunk booleanObjectChunk = destGeneric.asWritableByteChunk();", - " for (int ii = 0; ii < indices.size(); ) {", - " final long firstKey = indices.get(ii);", - " if (firstKey == Index.NULL_KEY) {", - " booleanObjectChunk.set(ii++, NULL_BOOLEAN_AS_BYTE);", - " continue;", - " }", - " final long masked = firstKey & ~INDEX_MASK;", - " int lastII = ii;", - " while (lastII + 1 < indices.size()) {", - " final int nextII = lastII + 1;", - " final long nextKey = indices.get(nextII);", - " final long nextMasked = nextKey & ~INDEX_MASK;", - " if (nextMasked != masked) {", - " break;", - " }", - " lastII = nextII;", - " }", - "", - " final byte [] block = wrapped.blocks.getInnermostBlockByKeyOrNull(firstKey);", - " if (block == null) {", - " booleanObjectChunk.fillWithNullValue(ii, lastII - ii + 1);", - " ii = lastII + 1;", - " continue;", - " }", - "", - " final long [] prevInUse = (wrapped.prevFlusher == null || wrapped.prevInUse == null) ? null :", - " wrapped.prevInUse.getInnermostBlockByKeyOrNull(firstKey);", - " final byte [] prevBlock = prevInUse == null ? null : wrapped.prevBlocks.getInnermostBlockByKeyOrNull(firstKey);", - " while (ii <= lastII) {", - " final int indexWithinBlock = (int) (indices.get(ii) & INDEX_MASK);", - " final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE;", - " final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK);", - "", - " final byte[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block;", - " booleanObjectChunk.set(ii++, blockToUse == null ? NULL_BOOLEAN_AS_BYTE : blockToUse[indexWithinBlock]);", - " }", - " }", - " destGeneric.setSize(indices.size());", - " }", - "", - " @Override", - " public void fillFromChunk(@NotNull FillFromContext context_unused, @NotNull Chunk src, @NotNull OrderedKeys orderedKeys) {", - " // This implementation is in \"key\" style (rather than range style).", - " if (orderedKeys.size() == 0) {", - " return;", - " }", - " final ByteChunk chunk = src.asByteChunk();", - " final LongChunk keys = orderedKeys.asKeyIndicesChunk();", - "", - " final boolean hasPrev = wrapped.prevFlusher != null;", - "", - " if (hasPrev) {", - " wrapped.prevFlusher.maybeActivate();", - " }", - "", - " for (int ii = 0; ii < keys.size(); ) {", - " final long firstKey = keys.get(ii);", - " final long maxKeyInCurrentBlock = firstKey | INDEX_MASK;", - " int lastII = ii;", - " while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) {", - " ++lastII;", - " }", - "", - " final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK;", - " final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK;", - " final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK;", - " final byte [] block = wrapped.ensureBlock(block0, block1, block2);", - "", - " if (chunk.isAlias(block)) {", - " throw new UnsupportedOperationException(\"Source chunk is an alias for target data\");", - " }", - "", - " // This conditional with its constant condition should be very friendly to the branch predictor.", - " final byte[] prevBlock = hasPrev ? wrapped.ensurePrevBlock(firstKey, block0, block1, block2) : null;", - " final long[] inUse = hasPrev ? wrapped.prevInUse.get(block0).get(block1).get(block2) : null;", - "", - " while (ii <= lastII) {", - " final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK);", - " // This 'if' with its constant condition should be very friendly to the branch predictor.", - " if (hasPrev) {", - " assert inUse != null;", - " assert prevBlock != null;", - "", - " final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE;", - " final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK);", - "", - " if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) {", - " prevBlock[indexWithinBlock] = block[indexWithinBlock];", - " inUse[indexWithinInUse] |= maskWithinInUse;", - " }", - " }", - " block[indexWithinBlock] = chunk.get(ii);", - " ++ii;", - " }", - " }", - " }", - " }")); + " @Override", + " public boolean allowsReinterpret(@NotNull Class alternateDataType) {", + " return alternateDataType.equals(byte.class);", + " }", + "", + " @Override", + " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", + " //noinspection unchecked", + " return (ColumnSource) new BooleanSparseArraySource.ReinterpretedAsByte(this);", + " }", + "", + " private static class ReinterpretedAsByte extends AbstractColumnSource implements MutableColumnSourceGetDefaults.ForByte, FillUnordered, WritableSource {", + " private final BooleanSparseArraySource wrapped;", + "", + " private ReinterpretedAsByte(BooleanSparseArraySource wrapped) {", + " super(byte.class);", + " this.wrapped = wrapped;", + " }", + "", + " @Override", + " public byte getByte(long index) {", + " return wrapped.getByte(index);", + " }", + "", + " @Override", + " public byte getPrevByte(long index) {", + " return wrapped.getPrevByte(index);", + " }", + "", + " @Override", + " public void set(long key, Byte value) {", + " wrapped.set(key, value);", + " }", + "", + " @Override", + " public void set(long key, byte value) {", + " wrapped.set(key, value);", + " }", + "", + " @Override", + " public void ensureCapacity(long capacity, boolean nullFilled) {", + " wrapped.ensureCapacity(capacity, nullFilled);", + " }", + "", + " @Override", + " public void copy(ColumnSource sourceColumn, long sourceKey, long destKey) {", + " set(destKey, sourceColumn.getByte(sourceKey));", + " }", + "", + " @Override", + " public boolean allowsReinterpret(@NotNull Class alternateDataType) {", + " return alternateDataType == Boolean.class;", + " }", + "", + " @Override", + " protected ColumnSource doReinterpret(@NotNull Class alternateDataType) {", + " // noinspection unchecked", + " return (ColumnSource)wrapped;", + " }", + "", + " @Override", + " public void fillChunk(@NotNull final ColumnSource.FillContext context, @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) {", + " fillSparseChunk(destination, orderedKeys);", + " }", + "", + " @Override", + " public void fillPrevChunk(@NotNull final ColumnSource.FillContext context, @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) {", + " fillSparsePrevChunk(destination, orderedKeys);", + " }", + "", + " @Override", + " public void fillChunkUnordered(@NotNull final FillContext context, @NotNull final WritableChunk destination, @NotNull final LongChunk keyIndices) {", + " fillSparseChunkUnordered(destination, keyIndices);", + " }", + "", + " @Override", + " public void fillPrevChunkUnordered(@NotNull final FillContext context, @NotNull final WritableChunk destination, @NotNull final LongChunk keyIndices) {", + " fillSparsePrevChunkUnordered(destination, keyIndices);", + " }", + "", + " private void fillSparseChunk(@NotNull final WritableChunk destGeneric, @NotNull final OrderedKeys indices) {", + " if (indices.size() == 0) {", + " destGeneric.setSize(0);", + " return;", + " }", + " // This implementation is in \"key\" style (rather than range style).", + " final WritableByteChunk chunk = destGeneric.asWritableByteChunk();", + " final FillByContext ctx = new FillByContext<>();", + " indices.forEachLong((final long v) -> {", + " if (v > ctx.maxKeyInCurrentBlock) {", + " ctx.block = wrapped.blocks.getInnermostBlockByKeyOrNull(v);", + " ctx.maxKeyInCurrentBlock = v | INDEX_MASK;", + " }", + " if (ctx.block == null) {", + " chunk.fillWithNullValue(ctx.offset, 1);", + " } else {", + " chunk.set(ctx.offset, ctx.block[(int) (v & INDEX_MASK)]);", + " }", + " ++ctx.offset;", + " return true;", + " });", + " destGeneric.setSize(ctx.offset);", + " }", + "", + " private void fillSparsePrevChunk(@NotNull final WritableChunk destGeneric, @NotNull final OrderedKeys indices) {", + " final long sz = indices.size();", + " if (sz == 0) {", + " destGeneric.setSize(0);", + " return;", + " }", + "", + " if (wrapped.prevFlusher == null) {", + " fillSparseChunk(destGeneric, indices);", + " return;", + " }", + " fillSparsePrevChunkUnordered(destGeneric, indices.asKeyIndicesChunk());", + " }", + "", + " private void fillSparseChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices) {", + " final WritableByteChunk chunk = destGeneric.asWritableByteChunk();", + " // This implementation is in \"key\" style (rather than range style).", + " for (int ii = 0; ii < indices.size(); ) {", + " final long firstKey = indices.get(ii);", + " if (firstKey == Index.NULL_KEY) {", + " chunk.set(ii++, NULL_BOOLEAN_AS_BYTE);", + " continue;", + " }", + " final long masked = firstKey & ~INDEX_MASK;", + " int lastII = ii;", + " while (lastII + 1 < indices.size()) {", + " final int nextII = lastII + 1;", + " final long nextKey = indices.get(nextII);", + " final long nextMasked = nextKey & ~INDEX_MASK;", + " if (nextMasked != masked) {", + " break;", + " }", + " lastII = nextII;", + " }", + " final byte [] block = wrapped.blocks.getInnermostBlockByKeyOrNull(firstKey);", + " if (block == null) {", + " chunk.fillWithNullValue(ii, lastII - ii + 1);", + " ii = lastII + 1;", + " continue;", + " }", + " while (ii <= lastII) {", + " final int indexWithinBlock = (int) (indices.get(ii) & INDEX_MASK);", + " chunk.set(ii++, block[indexWithinBlock]);", + " }", + " }", + " destGeneric.setSize(indices.size());", + " }", + "", + " private void fillSparsePrevChunkUnordered(@NotNull final WritableChunk destGeneric, @NotNull final LongChunk indices) {", + " final WritableByteChunk booleanObjectChunk = destGeneric.asWritableByteChunk();", + " for (int ii = 0; ii < indices.size(); ) {", + " final long firstKey = indices.get(ii);", + " if (firstKey == Index.NULL_KEY) {", + " booleanObjectChunk.set(ii++, NULL_BOOLEAN_AS_BYTE);", + " continue;", + " }", + " final long masked = firstKey & ~INDEX_MASK;", + " int lastII = ii;", + " while (lastII + 1 < indices.size()) {", + " final int nextII = lastII + 1;", + " final long nextKey = indices.get(nextII);", + " final long nextMasked = nextKey & ~INDEX_MASK;", + " if (nextMasked != masked) {", + " break;", + " }", + " lastII = nextII;", + " }", + "", + " final byte [] block = wrapped.blocks.getInnermostBlockByKeyOrNull(firstKey);", + " if (block == null) {", + " booleanObjectChunk.fillWithNullValue(ii, lastII - ii + 1);", + " ii = lastII + 1;", + " continue;", + " }", + "", + " final long [] prevInUse = (wrapped.prevFlusher == null || wrapped.prevInUse == null) ? null :", + " wrapped.prevInUse.getInnermostBlockByKeyOrNull(firstKey);", + " final byte [] prevBlock = prevInUse == null ? null : wrapped.prevBlocks.getInnermostBlockByKeyOrNull(firstKey);", + " while (ii <= lastII) {", + " final int indexWithinBlock = (int) (indices.get(ii) & INDEX_MASK);", + " final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE;", + " final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK);", + "", + " final byte[] blockToUse = (prevInUse != null && (prevInUse[indexWithinInUse] & maskWithinInUse) != 0) ? prevBlock : block;", + " booleanObjectChunk.set(ii++, blockToUse == null ? NULL_BOOLEAN_AS_BYTE : blockToUse[indexWithinBlock]);", + " }", + " }", + " destGeneric.setSize(indices.size());", + " }", + "", + " @Override", + " public void fillFromChunk(@NotNull FillFromContext context_unused, @NotNull Chunk src, @NotNull OrderedKeys orderedKeys) {", + " // This implementation is in \"key\" style (rather than range style).", + " if (orderedKeys.size() == 0) {", + " return;", + " }", + " final ByteChunk chunk = src.asByteChunk();", + " final LongChunk keys = orderedKeys.asKeyIndicesChunk();", + "", + " final boolean hasPrev = wrapped.prevFlusher != null;", + "", + " if (hasPrev) {", + " wrapped.prevFlusher.maybeActivate();", + " }", + "", + " for (int ii = 0; ii < keys.size(); ) {", + " final long firstKey = keys.get(ii);", + " final long maxKeyInCurrentBlock = firstKey | INDEX_MASK;", + " int lastII = ii;", + " while (lastII + 1 < keys.size() && keys.get(lastII + 1) <= maxKeyInCurrentBlock) {", + " ++lastII;", + " }", + "", + " final int block0 = (int) (firstKey >> BLOCK0_SHIFT) & BLOCK0_MASK;", + " final int block1 = (int) (firstKey >> BLOCK1_SHIFT) & BLOCK1_MASK;", + " final int block2 = (int) (firstKey >> BLOCK2_SHIFT) & BLOCK2_MASK;", + " final byte [] block = wrapped.ensureBlock(block0, block1, block2);", + "", + " if (chunk.isAlias(block)) {", + " throw new UnsupportedOperationException(\"Source chunk is an alias for target data\");", + " }", + "", + " // This conditional with its constant condition should be very friendly to the branch predictor.", + " final byte[] prevBlock = hasPrev ? wrapped.ensurePrevBlock(firstKey, block0, block1, block2) : null;", + " final long[] inUse = hasPrev ? wrapped.prevInUse.get(block0).get(block1).get(block2) : null;", + "", + " while (ii <= lastII) {", + " final int indexWithinBlock = (int) (keys.get(ii) & INDEX_MASK);", + " // This 'if' with its constant condition should be very friendly to the branch predictor.", + " if (hasPrev) {", + " assert inUse != null;", + " assert prevBlock != null;", + "", + " final int indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE;", + " final long maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK);", + "", + " if ((inUse[indexWithinInUse] & maskWithinInUse) == 0) {", + " prevBlock[indexWithinBlock] = block[indexWithinBlock];", + " inUse[indexWithinInUse] |= maskWithinInUse;", + " }", + " }", + " block[indexWithinBlock] = chunk.get(ii);", + " ++ii;", + " }", + " }", + " }", + " }")); FileUtils.writeLines(booleanFile, lines); } private static void replicateSparseObjectSource() throws IOException { - final String objectPath = ReplicatePrimitiveCode - .charToObject(CharacterSparseArraySource.class, ReplicatePrimitiveCode.MAIN_SRC); + final String objectPath = + ReplicatePrimitiveCode.charToObject(CharacterSparseArraySource.class, ReplicatePrimitiveCode.MAIN_SRC); final File objectFile = new File(objectPath); List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); @@ -897,77 +866,76 @@ private static void replicateSparseObjectSource() throws IOException { lines = globalReplacements(lines, "ObjectOneOrN.Block([0-2])", "ObjectOneOrN.Block$1"); lines = globalReplacements(lines, - "public class ObjectSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForObject", - "public class ObjectSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForObject", - "Object[ ]?\\[\\]", "T []", - "NULL_OBJECT", "null", - "getObject", "get", - "getPrevObject", "getPrev", - "ColumnSource", "ColumnSource", - "ObjectChunk", "ObjectChunk", - "ObjectChunk<[?] super Values>", "ObjectChunk", - "ObjectChunk<[?] extends Values>", "ObjectChunk", - "Object get", "T get", - "recycler.borrowItem\\(\\)", "(T[])recycler.borrowItem()", - "recycler2.borrowItem\\(\\)", "(T[][])recycler2.borrowItem()", - "recycler1.borrowItem\\(\\)", "(T[][][])recycler1.borrowItem()", - "recycler0.borrowItem\\(\\)", "(T[][][][])recycler0.borrowItem()", - "public final void set\\(long key, Object value\\) \\{", - "public final void set(long key, T value) {"); + "public class ObjectSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForObject", + "public class ObjectSparseArraySource extends SparseArrayColumnSource implements MutableColumnSourceGetDefaults.ForObject", + "Object[ ]?\\[\\]", "T []", + "NULL_OBJECT", "null", + "getObject", "get", + "getPrevObject", "getPrev", + "ColumnSource", "ColumnSource", + "ObjectChunk", "ObjectChunk", + "ObjectChunk<[?] super Values>", "ObjectChunk", + "ObjectChunk<[?] extends Values>", "ObjectChunk", + "Object get", "T get", + "recycler.borrowItem\\(\\)", "(T[])recycler.borrowItem()", + "recycler2.borrowItem\\(\\)", "(T[][])recycler2.borrowItem()", + "recycler1.borrowItem\\(\\)", "(T[][][])recycler1.borrowItem()", + "recycler0.borrowItem\\(\\)", "(T[][][][])recycler0.borrowItem()", + "public final void set\\(long key, Object value\\) \\{", "public final void set(long key, T value) {"); lines = replaceRegion(lines, "recyclers", Arrays.asList( - " private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY,", - " () -> new Object[BLOCK_SIZE], block -> Arrays.fill(block, null)); // we'll hold onto previous values, fix that", - " private static final SoftRecycler recycler2 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY,", - " () -> new Object[BLOCK2_SIZE][], null);", - " private static final SoftRecycler recycler1 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY,", - " () -> new ObjectOneOrN.Block2[BLOCK1_SIZE], null);", - " private static final SoftRecycler recycler0 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY,", - " () -> new ObjectOneOrN.Block1[BLOCK0_SIZE], null);")); + " private static final SoftRecycler recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY,", + " () -> new Object[BLOCK_SIZE], block -> Arrays.fill(block, null)); // we'll hold onto previous values, fix that", + " private static final SoftRecycler recycler2 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY,", + " () -> new Object[BLOCK2_SIZE][], null);", + " private static final SoftRecycler recycler1 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY,", + " () -> new ObjectOneOrN.Block2[BLOCK1_SIZE], null);", + " private static final SoftRecycler recycler0 = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY,", + " () -> new ObjectOneOrN.Block1[BLOCK0_SIZE], null);")); lines = replaceRegion(lines, "constructor", Arrays.asList( - " private final boolean isArrayType;", - "", - " ObjectSparseArraySource(Class type) {", - " super(type);", - " blocks = new ObjectOneOrN.Block0<>();", - " isArrayType = DbArrayBase.class.isAssignableFrom(type);", - " }", - "", - " ObjectSparseArraySource(Class type, Class componentType) {", - " super(type, componentType);", - " blocks = new ObjectOneOrN.Block0<>();", - " isArrayType = DbArrayBase.class.isAssignableFrom(type);", - " }")); + " private final boolean isArrayType;", + "", + " ObjectSparseArraySource(Class type) {", + " super(type);", + " blocks = new ObjectOneOrN.Block0<>();", + " isArrayType = DbArrayBase.class.isAssignableFrom(type);", + " }", + "", + " ObjectSparseArraySource(Class type, Class componentType) {", + " super(type, componentType);", + " blocks = new ObjectOneOrN.Block0<>();", + " isArrayType = DbArrayBase.class.isAssignableFrom(type);", + " }")); lines = replaceRegion(lines, "move method", Arrays.asList( - " @Override", - " public void move(long sourceKey, long destKey) {", - " final T value = get(sourceKey);", - " set(destKey, value);", - " if (value != null) {", - " set(sourceKey, null);", - " }", - " }")); + " @Override", + " public void move(long sourceKey, long destKey) {", + " final T value = get(sourceKey);", + " set(destKey, value);", + " if (value != null) {", + " set(sourceKey, null);", + " }", + " }")); lines = replaceRegion(lines, "allocateNullFilledBlock", Arrays.asList( - " final T[] allocateNullFilledBlock(int size){", - " //noinspection unchecked", - " return (T[]) new Object[size];", - " }")); + " final T[] allocateNullFilledBlock(int size){", + " //noinspection unchecked", + " return (T[]) new Object[size];", + " }")); lines = replaceRegion(lines, "copy method", Arrays.asList( - " @Override", - " public void copy(ColumnSource sourceColumn, long sourceKey, long destKey) {", - " final T value = sourceColumn.get(sourceKey);", - "", - " if (isArrayType && value instanceof DbArrayBase) {", - " final DbArrayBase dbArray = (DbArrayBase) value;", - " set(destKey, (T) dbArray.getDirect());", - " } else {", - " set(destKey, value);", - " }", - " }")); + " @Override", + " public void copy(ColumnSource sourceColumn, long sourceKey, long destKey) {", + " final T value = sourceColumn.get(sourceKey);", + "", + " if (isArrayType && value instanceof DbArrayBase) {", + " final DbArrayBase dbArray = (DbArrayBase) value;", + " set(destKey, (T) dbArray.getDirect());", + " } else {", + " set(destKey, value);", + " }", + " }")); lines = addImport(lines, DbArrayBase.class); @@ -977,23 +945,23 @@ private static void replicateSparseObjectSource() throws IOException { private static void replicateOneOrN() throws IOException { ReplicatePrimitiveCode.charToAll(CharOneOrN.class, ReplicatePrimitiveCode.MAIN_SRC); final String objectOneOrNPath = - ReplicatePrimitiveCode.charToObject(CharOneOrN.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToObject(CharOneOrN.class, ReplicatePrimitiveCode.MAIN_SRC); final File oneOrNFile = new File(objectOneOrNPath); List lines = FileUtils.readLines(oneOrNFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "class Block([0-2])", "class Block$1", - "Object \\[\\]", "T []", "SoftRecycler", + "Object \\[\\]", "T []", "SoftRecycler", "Block2", "Block2"); lines = simpleFixup(lines, "Block1", "Block2", "Block2"); lines = globalReplacements(lines, - "new Block2\\[BLOCK1_SIZE\\]", "new Block2[BLOCK1_SIZE]", - "new Block1\\[BLOCK0_SIZE\\]", "new Block1[BLOCK0_SIZE]"); + "new Block2\\[BLOCK1_SIZE\\]", "new Block2[BLOCK1_SIZE]", + "new Block1\\[BLOCK0_SIZE\\]", "new Block1[BLOCK0_SIZE]"); FileUtils.writeLines(oneOrNFile, lines); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ReverseLookupColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/ReverseLookupColumnSource.java index f269a3472ad..db514956485 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ReverseLookupColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ReverseLookupColumnSource.java @@ -7,20 +7,18 @@ import java.util.function.ToIntFunction; /** - * Common interface for column sources that provide a reverse-lookup function (value to int key). - * Note that int keys are used because this is intended for column sources with a small, contiguous - * key range starting from 0 and well shorter than Integer.MAX_VALUE. + * Common interface for column sources that provide a reverse-lookup function (value to int key). Note that int keys are + * used because this is intended for column sources with a small, contiguous key range starting from 0 and well shorter + * than Integer.MAX_VALUE. */ -public interface ReverseLookupColumnSource - extends ColumnSource, - StringSetImpl.ReversibleLookup { +public interface ReverseLookupColumnSource extends ColumnSource, + StringSetImpl.ReversibleLookup { /** * Get a reverse-lookup function for all non-null values stored in this column source at * {@code keys <= highestKeyNeeded}. * * @param highestKeyNeeded The highest key needed in the result map - * @return A reverse-lookup function that has all values defined for keys in [0, - * highestKeyNeeded] + * @return A reverse-lookup function that has all values defined for keys in [0, highestKeyNeeded] */ ToIntFunction getReverseLookup(final int highestKeyNeeded); @@ -34,8 +32,8 @@ public interface ReverseLookupColumnSource * * @param highestIndex The highest key needed for the lookup * @param value The value we are looking up - * @return The key, between 0 and highestIndex, for the value. A value outside this range if the - * value has no mapping in the range. + * @return The key, between 0 and highestIndex, for the value. A value outside this range if the value has no + * mapping in the range. */ default int rget(int highestIndex, DATA_TYPE value) { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ReversedColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/ReversedColumnSource.java index 7f7eadf0a65..63a97d0685f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ReversedColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ReversedColumnSource.java @@ -13,9 +13,9 @@ import org.jetbrains.annotations.NotNull; /** - * This column source wraps another column source, and returns the values in the opposite order. It - * must be paired with a ReverseOperation (that can be shared among reversed column sources) that - * implements the index transformations for this source. + * This column source wraps another column source, and returns the values in the opposite order. It must be paired with + * a ReverseOperation (that can be shared among reversed column sources) that implements the index transformations for + * this source. */ public class ReversedColumnSource extends AbstractColumnSource { private final ColumnSource innerSource; @@ -27,8 +27,7 @@ public Class getComponentType() { return innerSource.getComponentType(); } - public ReversedColumnSource(@NotNull ColumnSource innerSource, - @NotNull ReverseOperation indexReverser) { + public ReversedColumnSource(@NotNull ColumnSource innerSource, @NotNull ReverseOperation indexReverser) { super(innerSource.getType()); this.innerSource = innerSource; this.indexReverser = indexReverser; @@ -155,8 +154,8 @@ public FillContext makeFillContext(final int chunkCapacity, final SharedContext @Override public void fillChunk(@NotNull ColumnSource.FillContext _context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { // noinspection unchecked final FillContext context = (FillContext) _context; final OrderedKeys reversedIndex = indexReverser.transform(orderedKeys.asIndex()); @@ -166,8 +165,8 @@ public void fillChunk(@NotNull ColumnSource.FillContext _context, @Override public void fillPrevChunk(@NotNull ColumnSource.FillContext _context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { // noinspection unchecked final FillContext context = (FillContext) _context; final OrderedKeys reversedIndex = indexReverser.transformPrev(orderedKeys.asIndex()); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/RowIdSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/RowIdSource.java index e4eb644b749..724ebb47202 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/RowIdSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/RowIdSource.java @@ -5,8 +5,7 @@ package io.deephaven.db.v2.sources; @AbstractColumnSource.IsSerializable(value = true) -public class RowIdSource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForLong { +public class RowIdSource extends AbstractColumnSource implements ImmutableColumnSourceGetDefaults.ForLong { public static final RowIdSource INSTANCE = new RowIdSource(); public RowIdSource() { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/SingleValueColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/SingleValueColumnSource.java index d5f8f779ca8..bfd17cd8788 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/SingleValueColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/SingleValueColumnSource.java @@ -13,8 +13,7 @@ @AbstractColumnSource.IsSerializable(value = true) public abstract class SingleValueColumnSource extends AbstractColumnSource - implements WritableSource, WritableChunkSink, ShiftData.ShiftCallback, - Serializable { + implements WritableSource, WritableChunkSink, ShiftData.ShiftCallback, Serializable { protected transient long changeTime; protected boolean isTrackingPrevValues; diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/SinkFiller.java b/DB/src/main/java/io/deephaven/db/v2/sources/SinkFiller.java index f54a1cf3b07..d3f6e93b8a3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/SinkFiller.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/SinkFiller.java @@ -4,8 +4,7 @@ import io.deephaven.db.v2.sources.chunk.Attributes.Values; import io.deephaven.db.v2.utils.LongAbortableConsumer; -public abstract class SinkFiller - implements WritableChunkSink.FillFromContext, LongAbortableConsumer { +public abstract class SinkFiller implements WritableChunkSink.FillFromContext, LongAbortableConsumer { public static SinkFiller create(final ChunkType chunkType) { switch (chunkType) { case Byte: @@ -25,8 +24,7 @@ public static SinkFiller create(final ChunkType chunkType) { case Object: return ObjectFiller.INSTANCE; - // Boolean Chunks will be passing in chunkType = Object, so there is no use case for - // passing in + // Boolean Chunks will be passing in chunkType = Object, so there is no use case for passing in // ChunkType.Boolean. case Boolean: default: diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/SizedColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/SizedColumnSource.java index 7f17d6aec79..d93aba01cc5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/SizedColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/SizedColumnSource.java @@ -5,6 +5,5 @@ /** * Interface for {@link ColumnSource}s that know their size. */ -public interface SizedColumnSource - extends ColumnSource, LongSizedDataStructure { +public interface SizedColumnSource extends ColumnSource, LongSizedDataStructure { } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/SparseArrayColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/SparseArrayColumnSource.java index 625e8ff699a..dea8bf85957 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/SparseArrayColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/SparseArrayColumnSource.java @@ -29,9 +29,8 @@ * A column source backed by arrays that may not be filled in all blocks. * *

      - * To store the blocks, we use a multi-level page table like structure. Each entry that exists is - * complete, i.e. we never reallocate partial blocks, we always allocate the complete block. The - * index key is divided as follows: + * To store the blocks, we use a multi-level page table like structure. Each entry that exists is complete, i.e. we + * never reallocate partial blocks, we always allocate the complete block. The index key is divided as follows: *

      *

    * @@ -61,21 +60,20 @@ * *
    *

    - * Bit 63, the sign bit, is used to indicate null (that is, all negative numbers are defined to be - * null) + * Bit 63, the sign bit, is used to indicate null (that is, all negative numbers are defined to be null) *

    *

    - * Parallel structures are used for previous values and prevInUse. We recycle all levels of the - * previous blocks, so that the previous structure takes up memory only while it is in use. + * Parallel structures are used for previous values and prevInUse. We recycle all levels of the previous blocks, so that + * the previous structure takes up memory only while it is in use. *

    *

    */ @AbstractColumnSource.IsSerializable(value = true) public abstract class SparseArrayColumnSource - extends AbstractDeferredGroupingColumnSource - implements FillUnordered, WritableSource { + extends AbstractDeferredGroupingColumnSource + implements FillUnordered, WritableSource { public static final SparseArrayColumnSource[] ZERO_LENGTH_SPARSE_ARRAY_COLUMN_SOURCE_ARRAY = - new SparseArrayColumnSource[0]; + new SparseArrayColumnSource[0]; static final int DEFAULT_RECYCLER_CAPACITY = 1024; static final int INITIAL_NUMBER_OF_BLOCKS = 4; @@ -89,16 +87,12 @@ public abstract class SparseArrayColumnSource // final int indexWithinBlock = (int) (key & INDEX_MASK); // data = blocks[block0][block1][block2][indexWithinBlock]; // - // To access a "previous" data element: the structure is identical, except you refer to the prev - // structure: + // To access a "previous" data element: the structure is identical, except you refer to the prev structure: // prevData = prevBlocks[block0][block1][block2][indexWithinBlock]; // - // To access a true/false entry from the "prevInUse" data structure: the structure is similar, - // except that the - // innermost array is logically is a two-level structure: it is an array of "bitsets", where - // each "bitset" is a - // 64-element "array" of bits, in reality a 64-bit long. If we were able to access the bitset as - // an array, the code + // To access a true/false entry from the "prevInUse" data structure: the structure is similar, except that the + // innermost array is logically is a two-level structure: it is an array of "bitsets", where each "bitset" is a + // 64-element "array" of bits, in reality a 64-bit long. If we were able to access the bitset as an array, the code // would be: // bool inUse = prevInUse[block0][block1][block2][indexWithinInUse][inUseBitIndex] // The actual code is: @@ -111,52 +105,45 @@ public abstract class SparseArrayColumnSource // // and, if an inUse block is null (at any level), then the inUse result is defined as false. // - // In the code below we do all the calculations in the "log" space so, in actuality it's more - // like + // In the code below we do all the calculations in the "log" space so, in actuality it's more like // indexWithinInUse = indexWithinBlock >> LOG_INUSE_BITSET_SIZE; // maskWithinInUse = 1L << (indexWithinBlock & IN_USE_MASK); // - // Finally, this bitset manipulation logic only really makes sense if the innermost data block - // size is larger than - // the bitset size (64), so we have the additional constraint that LOG_BLOCK_SIZE >= - // LOG_INUSE_BITSET_SIZE. + // Finally, this bitset manipulation logic only really makes sense if the innermost data block size is larger than + // the bitset size (64), so we have the additional constraint that LOG_BLOCK_SIZE >= LOG_INUSE_BITSET_SIZE. static { - // we must completely use the 63-bit address space of index keys (negative numbers are - // defined to be null) + // we must completely use the 63-bit address space of index keys (negative numbers are defined to be null) Assert.eq(LOG_BLOCK_SIZE + LOG_BLOCK0_SIZE + LOG_BLOCK1_SIZE + LOG_BLOCK2_SIZE, - "LOG_BLOCK_SIZE + LOG_BLOCK0_SIZE + LOG_BLOCK1_SIZE + LOG_BLOCK2_SIZE", 63); + "LOG_BLOCK_SIZE + LOG_BLOCK0_SIZE + LOG_BLOCK1_SIZE + LOG_BLOCK2_SIZE", 63); Assert.geq(LOG_BLOCK_SIZE, "LOG_BLOCK_SIZE", LOG_INUSE_BITSET_SIZE); } // the lowest level inUse bitmap recycle static final SoftRecycler inUseRecycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, - () -> new long[IN_USE_BLOCK_SIZE], - block -> Arrays.fill(block, 0)); + () -> new long[IN_USE_BLOCK_SIZE], + block -> Arrays.fill(block, 0)); // the recycler for blocks of bitmaps - static final SoftRecycler inUse2Recycler = - new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, + static final SoftRecycler inUse2Recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new long[BLOCK2_SIZE][], null); // the recycler for blocks of blocks of bitmaps - static final SoftRecycler inUse1Recycler = - new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, + static final SoftRecycler inUse1Recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new LongOneOrN.Block2[BLOCK1_SIZE], null); // the highest level block of blocks of blocks of inUse bitmaps - static final SoftRecycler inUse0Recycler = - new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, + static final SoftRecycler inUse0Recycler = new SoftRecycler<>(DEFAULT_RECYCLER_CAPACITY, () -> new LongOneOrN.Block1[BLOCK0_SIZE], null); transient LongOneOrN.Block0 prevInUse; /* - * Normally the SparseArrayColumnSource can be changed, but if we are looking a static select, - * for example, we know that the values are never going to actually change. + * Normally the SparseArrayColumnSource can be changed, but if we are looking a static select, for example, we know + * that the values are never going to actually change. */ boolean immutable = false; @@ -168,10 +155,8 @@ public abstract class SparseArrayColumnSource super(type); } - // This is customized in two different classes: In BooleanSparseArraySource it is special-cased - // by the - // Replicator. In DateTimeSparseArraySource (a non-replicated class), the humans have overridden - // it manually. + // This is customized in two different classes: In BooleanSparseArraySource it is special-cased by the + // Replicator. In DateTimeSparseArraySource (a non-replicated class), the humans have overridden it manually. WritableSource reinterpretForSerialization() { return this; } @@ -219,8 +204,7 @@ public void remove(Index toRemove) { throw new UnsupportedOperationException(); } - public static SparseArrayColumnSource getSparseMemoryColumnSource(Collection data, - Class type) { + public static SparseArrayColumnSource getSparseMemoryColumnSource(Collection data, Class type) { final SparseArrayColumnSource result = getSparseMemoryColumnSource(data.size(), type); long i = 0; for (T o : data) { @@ -229,8 +213,7 @@ public static SparseArrayColumnSource getSparseMemoryColumnSource(Collect return result; } - private static SparseArrayColumnSource getSparseMemoryColumnSource(T[] data, - Class type) { + private static SparseArrayColumnSource getSparseMemoryColumnSource(T[] data, Class type) { final SparseArrayColumnSource result = getSparseMemoryColumnSource(data.length, type); long i = 0; for (T o : data) { @@ -323,18 +306,16 @@ public static SparseArrayColumnSource getSparseMemoryColumnSource(Class SparseArrayColumnSource getSparseMemoryColumnSource(Class type, - Class componentType) { + public static SparseArrayColumnSource getSparseMemoryColumnSource(Class type, Class componentType) { return getSparseMemoryColumnSource(0, type, componentType); } - public static SparseArrayColumnSource getSparseMemoryColumnSource(long size, - Class type) { + public static SparseArrayColumnSource getSparseMemoryColumnSource(long size, Class type) { return getSparseMemoryColumnSource(size, type, null); } - public static SparseArrayColumnSource getSparseMemoryColumnSource(long size, - Class type, @Nullable Class componentType) { + public static SparseArrayColumnSource getSparseMemoryColumnSource(long size, Class type, + @Nullable Class componentType) { final SparseArrayColumnSource result; if (type == byte.class || type == Byte.class) { result = new ByteSparseArraySource(); @@ -370,8 +351,7 @@ public static SparseArrayColumnSource getSparseMemoryColumnSource(long si public static ColumnSource getSparseMemoryColumnSource(Object dataArray) { if (dataArray instanceof boolean[]) { - return getSparseMemoryColumnSource(ArrayUtils.getBoxedArray((boolean[]) dataArray), - Boolean.class); + return getSparseMemoryColumnSource(ArrayUtils.getBoxedArray((boolean[]) dataArray), Boolean.class); } else if (dataArray instanceof byte[]) { return getSparseMemoryColumnSource((byte[]) dataArray); } else if (dataArray instanceof char[]) { @@ -405,13 +385,13 @@ public static ColumnSource getSparseMemoryColumnSource(Object dataArray) { } else { // noinspection unchecked return getSparseMemoryColumnSource((Object[]) dataArray, - (Class) dataArray.getClass().getComponentType()); + (Class) dataArray.getClass().getComponentType()); } } /** - * Using a preferred chunk size of BLOCK_SIZE gives us the opportunity to directly return chunks - * from our data structure rather than copying data. + * Using a preferred chunk size of BLOCK_SIZE gives us the opportunity to directly return chunks from our data + * structure rather than copying data. */ public int getPreferredChunkSize() { return BLOCK_SIZE; @@ -420,7 +400,7 @@ public int getPreferredChunkSize() { // region fillChunk @Override public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk dest, - @NotNull OrderedKeys orderedKeys) { + @NotNull OrderedKeys orderedKeys) { if (orderedKeys.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { fillByKeys(dest, orderedKeys); } else { @@ -431,31 +411,29 @@ public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk dest, - @NotNull LongChunk keys) { + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull LongChunk keys) { fillByUnorderedKeys(dest, keys); } @Override public void fillPrevChunkUnordered( - @NotNull final FillContext context, - @NotNull final WritableChunk dest, - @NotNull LongChunk keys) { + @NotNull final FillContext context, + @NotNull final WritableChunk dest, + @NotNull LongChunk keys) { fillPrevByUnorderedKeys(dest, keys); } - abstract void fillByRanges(@NotNull WritableChunk dest, - @NotNull OrderedKeys orderedKeys); + abstract void fillByRanges(@NotNull WritableChunk dest, @NotNull OrderedKeys orderedKeys); - abstract void fillByKeys(@NotNull WritableChunk dest, - @NotNull OrderedKeys orderedKeys); + abstract void fillByKeys(@NotNull WritableChunk dest, @NotNull OrderedKeys orderedKeys); abstract void fillByUnorderedKeys(@NotNull WritableChunk dest, - @NotNull LongChunk keyIndices); + @NotNull LongChunk keyIndices); abstract void fillPrevByUnorderedKeys(@NotNull WritableChunk dest, - @NotNull LongChunk keyIndices); + @NotNull LongChunk keyIndices); private static final FillFromContext FILL_FROM_CONTEXT_INSTANCE = new FillFromContext() {}; @@ -465,8 +443,8 @@ public FillFromContext makeFillFromContext(int chunkCapacity) { } @Override - public void fillFromChunk(@NotNull FillFromContext context, - @NotNull Chunk src, @NotNull OrderedKeys orderedKeys) { + public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull OrderedKeys orderedKeys) { if (orderedKeys.getAverageRunLengthEstimate() < USE_RANGES_AVERAGE_RUN_LENGTH) { fillFromChunkByKeys(orderedKeys, src); } else { @@ -474,11 +452,9 @@ public void fillFromChunk(@NotNull FillFromContext context, } } - abstract void fillFromChunkByRanges(@NotNull OrderedKeys orderedKeys, - Chunk src); + abstract void fillFromChunkByRanges(@NotNull OrderedKeys orderedKeys, Chunk src); - abstract void fillFromChunkByKeys(@NotNull OrderedKeys orderedKeys, - Chunk src); + abstract void fillFromChunkByKeys(@NotNull OrderedKeys orderedKeys, Chunk src); @Override public boolean isImmutable() { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/StringSetImpl.java b/DB/src/main/java/io/deephaven/db/v2/sources/StringSetImpl.java index b9c1a91cc92..98729a5d29c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/StringSetImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/StringSetImpl.java @@ -25,35 +25,33 @@ public class StringSetImpl implements StringSet, Serializable { - public final static KeyedLongObjectKey CACHE_KEY = - new KeyedLongObjectKey.Strict() { + public final static KeyedLongObjectKey CACHE_KEY = new KeyedLongObjectKey.Strict() { - @Override - public long getLongKey(@NotNull final StringSetImpl stringSet) { - return stringSet.valueBitSet; - } + @Override + public long getLongKey(@NotNull final StringSetImpl stringSet) { + return stringSet.valueBitSet; + } - @Override - public int hashLongKey(final long valueBitSet) { - return HashCodeUtil.toHashCode(valueBitSet); - } + @Override + public int hashLongKey(final long valueBitSet) { + return HashCodeUtil.toHashCode(valueBitSet); + } - @Override - public boolean equalLongKey(final long valueBitSet, - @NotNull final StringSetImpl stringSet) { - return valueBitSet == stringSet.valueBitSet; - } - }; + @Override + public boolean equalLongKey(final long valueBitSet, @NotNull final StringSetImpl stringSet) { + return valueBitSet == stringSet.valueBitSet; + } + }; public final static KeyedLongObjectHash.ValueFactoryT> VALUE_FACTORY = - new KeyedLongObjectHash.ValueFactoryT.Strict>() { + new KeyedLongObjectHash.ValueFactoryT.Strict>() { - @Override - public StringSetImpl newValue(final long valueBitSet, - @NotNull final ReverseLookupColumnSource columnSource) { - return new StringSetImpl(columnSource, valueBitSet); - } - }; + @Override + public StringSetImpl newValue(final long valueBitSet, + @NotNull final ReverseLookupColumnSource columnSource) { + return new StringSetImpl(columnSource, valueBitSet); + } + }; private final ReversibleLookup reversibleLookup; private final long valueBitSet; @@ -67,16 +65,14 @@ public StringSetImpl newValue(final long valueBitSet, // TODO: If we start caching values, consider changing getEncoding to use values(). // TODO: On this note, look at LongBitmapIndexedImmutableSetFactory. - public StringSetImpl(@NotNull final ReversibleLookup reversibleLookup, - final long valueBitSet) { + public StringSetImpl(@NotNull final ReversibleLookup reversibleLookup, final long valueBitSet) { this.reversibleLookup = reversibleLookup; this.valueBitSet = valueBitSet; } private int getHighestOneBitIndex() { - return highestOneBitIndex == Integer.MIN_VALUE - ? (highestOneBitIndex = MathUtil.floorLog2(valueBitSet)) - : highestOneBitIndex; + return highestOneBitIndex == Integer.MIN_VALUE ? (highestOneBitIndex = MathUtil.floorLog2(valueBitSet)) + : highestOneBitIndex; } private int getBitCount() { @@ -96,7 +92,7 @@ private Object writeReplace() { private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException { throw new UnsupportedOperationException( - "StringSetImpl should never be deserialized - it uses writeReplace() to serialize itself as different class entirely."); + "StringSetImpl should never be deserialized - it uses writeReplace() to serialize itself as different class entirely."); } @Override @@ -166,7 +162,7 @@ public long getEncoding(@NotNull final ToIntFunction toOffset) { final int keyBitIndex = toOffset.applyAsInt(value); if (keyBitIndex >= Long.SIZE) { throw new RuntimeException("Symbol manager returned an index " + keyBitIndex - + " greater than the maximum, for symbol " + value); + + " greater than the maximum, for symbol " + value); } encoding |= (1L << keyBitIndex); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/SwitchColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/SwitchColumnSource.java index 1e4a015f209..6fbe013eb3b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/SwitchColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/SwitchColumnSource.java @@ -25,7 +25,7 @@ public SwitchColumnSource(ColumnSource currentSource) { } public SwitchColumnSource(@NotNull final ColumnSource currentSource, - @Nullable final Consumer> onPreviousCommitted) { + @Nullable final Consumer> onPreviousCommitted) { super(currentSource.getType(), currentSource.getComponentType()); this.updateCommitter = new UpdateCommitter<>(this, SwitchColumnSource::clearPrevious); this.onPreviousCommitted = onPreviousCommitted; @@ -43,8 +43,8 @@ private void clearPrevious() { public void setNewCurrent(ColumnSource newCurrent) { Assert.eq(newCurrent.getType(), "newCurrent.getType()", getType(), "getType()"); - Assert.eq(newCurrent.getComponentType(), "newCurrent.getComponentType()", - getComponentType(), "getComponentType()"); + Assert.eq(newCurrent.getComponentType(), "newCurrent.getComponentType()", getComponentType(), + "getComponentType()"); prevSource = currentSource; prevValidityStep = LogicalClock.DEFAULT.currentStep(); currentSource = newCurrent; @@ -73,23 +73,23 @@ private SwitchContext(final int chunkCapacity, final SharedContext sharedContext public CT getCurrentContext() { return currentContext == null - ? currentContext = makeContext(currentSource) - : currentContext; + ? currentContext = makeContext(currentSource) + : currentContext; } public CT getPrevContext() { return prevInvalid() - ? getCurrentContext() - : prevContext == null - ? prevContext = makeContext(prevSource) - : prevContext; + ? getCurrentContext() + : prevContext == null + ? prevContext = makeContext(prevSource) + : prevContext; } @Override public void close() { // noinspection EmptyTryBlock try (final SafeCloseable ignored1 = currentContext; - final SafeCloseable ignored2 = prevContext) { + final SafeCloseable ignored2 = prevContext) { } } } @@ -113,26 +113,23 @@ public FillContext makeFillContext(final int chunkCapacity, final SharedContext @Override public void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { // noinspection unchecked - currentSource.fillChunk(((SwitchFillContext) context).getCurrentContext(), destination, - orderedKeys); + currentSource.fillChunk(((SwitchFillContext) context).getCurrentContext(), destination, orderedKeys); } @Override public void fillPrevChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { if (prevInvalid()) { // noinspection unchecked - currentSource.fillPrevChunk(((SwitchFillContext) context).getCurrentContext(), - destination, orderedKeys); + currentSource.fillPrevChunk(((SwitchFillContext) context).getCurrentContext(), destination, orderedKeys); return; } // noinspection unchecked - prevSource.fillPrevChunk(((SwitchFillContext) context).getPrevContext(), destination, - orderedKeys); + prevSource.fillPrevChunk(((SwitchFillContext) context).getPrevContext(), destination, orderedKeys); } private class SwitchGetContext extends SwitchContext implements GetContext { @@ -154,19 +151,17 @@ public GetContext makeGetContext(final int chunkCapacity, final SharedContext sh @Override public Chunk getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { // noinspection unchecked - return currentSource.getChunk(((SwitchGetContext) context).getCurrentContext(), - orderedKeys); + return currentSource.getChunk(((SwitchGetContext) context).getCurrentContext(), orderedKeys); } @Override public Chunk getPrevChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { if (prevInvalid()) { // noinspection unchecked - return currentSource.getPrevChunk(((SwitchGetContext) context).getCurrentContext(), - orderedKeys); + return currentSource.getPrevChunk(((SwitchGetContext) context).getCurrentContext(), orderedKeys); } // noinspection unchecked return prevSource.getPrevChunk(((SwitchGetContext) context).getPrevContext(), orderedKeys); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/UnboxedDateTimeColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/UnboxedDateTimeColumnSource.java index 489f88d409b..e327e002260 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/UnboxedDateTimeColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/UnboxedDateTimeColumnSource.java @@ -8,12 +8,12 @@ import org.jetbrains.annotations.NotNull; /** - * Reinterpret result for many {@link ColumnSource} implementations that internally represent - * {@link DBDateTime} values as {@code long} values. + * Reinterpret result for many {@link ColumnSource} implementations that internally represent {@link DBDateTime} values + * as {@code long} values. */ @AbstractColumnSource.IsSerializable(value = true) public class UnboxedDateTimeColumnSource extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForLong { + implements MutableColumnSourceGetDefaults.ForLong { private final ColumnSource alternateColumnSource; @@ -39,14 +39,13 @@ public boolean isImmutable() { @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return alternateDataType == DBDateTime.class; } @Override public ColumnSource doReinterpret( - @NotNull final Class alternateDataType) - throws IllegalArgumentException { + @NotNull final Class alternateDataType) throws IllegalArgumentException { // noinspection unchecked return (ColumnSource) alternateColumnSource; } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/UnboxedDateTimeWritableSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/UnboxedDateTimeWritableSource.java index 709a766e932..b40bad7821d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/UnboxedDateTimeWritableSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/UnboxedDateTimeWritableSource.java @@ -2,8 +2,7 @@ import io.deephaven.db.tables.utils.DBDateTime; -public class UnboxedDateTimeWritableSource extends UnboxedDateTimeColumnSource - implements WritableSource { +public class UnboxedDateTimeWritableSource extends UnboxedDateTimeColumnSource implements WritableSource { private final WritableSource alternateWritableSource; public UnboxedDateTimeWritableSource(WritableSource alternateWritableSource) { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedArrayColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedArrayColumnSource.java index 2aa23583d2c..e07c919d782 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedArrayColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedArrayColumnSource.java @@ -5,7 +5,7 @@ package io.deephaven.db.v2.sources; public class UngroupedArrayColumnSource extends UngroupedColumnSource - implements MutableColumnSourceGetDefaults.ForObject { + implements MutableColumnSourceGetDefaults.ForObject { private ColumnSource innerSource; @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedColumnSource.java index a6db004d148..b30348687cf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedColumnSource.java @@ -55,51 +55,37 @@ public static UngroupedColumnSource getColumnSource(ColumnSource column) { return ((AggregateColumnSource) column).ungrouped(); } if (DbArray.class.isAssignableFrom(column.getType())) { - if (column.getComponentType() == Byte.class - || column.getComponentType() == byte.class) { + if (column.getComponentType() == Byte.class || column.getComponentType() == byte.class) { return new UngroupedBoxedByteDbArrayColumnSource(column); - } else if (column.getComponentType() == Character.class - || column.getComponentType() == char.class) { + } else if (column.getComponentType() == Character.class || column.getComponentType() == char.class) { return new UngroupedBoxedCharDbArrayColumnSource(column); - } else if (column.getComponentType() == Double.class - || column.getComponentType() == double.class) { + } else if (column.getComponentType() == Double.class || column.getComponentType() == double.class) { return new UngroupedBoxedDoubleDbArrayColumnSource(column); - } else if (column.getComponentType() == Float.class - || column.getComponentType() == float.class) { + } else if (column.getComponentType() == Float.class || column.getComponentType() == float.class) { return new UngroupedBoxedFloatDbArrayColumnSource(column); - } else if (column.getComponentType() == Integer.class - || column.getComponentType() == int.class) { + } else if (column.getComponentType() == Integer.class || column.getComponentType() == int.class) { return new UngroupedBoxedIntDbArrayColumnSource(column); - } else if (column.getComponentType() == Long.class - || column.getComponentType() == long.class) { + } else if (column.getComponentType() == Long.class || column.getComponentType() == long.class) { return new UngroupedBoxedLongDbArrayColumnSource(column); - } else if (column.getComponentType() == Short.class - || column.getComponentType() == short.class) { + } else if (column.getComponentType() == Short.class || column.getComponentType() == short.class) { return new UngroupedBoxedShortDbArrayColumnSource(column); } else { return new UngroupedDbArrayColumnSource(column); } } else if (DbArrayBase.class.isAssignableFrom(column.getType())) { - if (column.getComponentType() == Byte.class - || column.getComponentType() == byte.class) { + if (column.getComponentType() == Byte.class || column.getComponentType() == byte.class) { return new UngroupedByteDbArrayColumnSource(column); - } else if (column.getComponentType() == Character.class - || column.getComponentType() == char.class) { + } else if (column.getComponentType() == Character.class || column.getComponentType() == char.class) { return new UngroupedCharDbArrayColumnSource(column); - } else if (column.getComponentType() == Double.class - || column.getComponentType() == double.class) { + } else if (column.getComponentType() == Double.class || column.getComponentType() == double.class) { return new UngroupedDoubleDbArrayColumnSource(column); - } else if (column.getComponentType() == Float.class - || column.getComponentType() == float.class) { + } else if (column.getComponentType() == Float.class || column.getComponentType() == float.class) { return new UngroupedFloatDbArrayColumnSource(column); - } else if (column.getComponentType() == Integer.class - || column.getComponentType() == int.class) { + } else if (column.getComponentType() == Integer.class || column.getComponentType() == int.class) { return new UngroupedIntDbArrayColumnSource(column); - } else if (column.getComponentType() == Long.class - || column.getComponentType() == long.class) { + } else if (column.getComponentType() == Long.class || column.getComponentType() == long.class) { return new UngroupedLongDbArrayColumnSource(column); - } else if (column.getComponentType() == Short.class - || column.getComponentType() == short.class) { + } else if (column.getComponentType() == Short.class || column.getComponentType() == short.class) { return new UngroupedShortDbArrayColumnSource(column); } else { return new UngroupedDbArrayColumnSource(column); @@ -142,6 +128,6 @@ public static UngroupedColumnSource getColumnSource(ColumnSource column) { } } throw new UnsupportedOperationException( - "column.getType() = " + column.getType() + " column.getClass() = " + column.getClass()); + "column.getType() = " + column.getType() + " column.getClass() = " + column.getClass()); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedDbArrayColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedDbArrayColumnSource.java index e2921430154..3c6bcaf9d72 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedDbArrayColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/UngroupedDbArrayColumnSource.java @@ -8,7 +8,7 @@ import io.deephaven.db.tables.dbarrays.*; public class UngroupedDbArrayColumnSource extends UngroupedColumnSource - implements MutableColumnSourceGetDefaults.ForObject { + implements MutableColumnSourceGetDefaults.ForObject { private final ColumnSource> innerSource; private final boolean isUngroupable; @@ -22,7 +22,7 @@ public UngroupedDbArrayColumnSource(ColumnSource> innerSource) { super((Class) innerSource.getComponentType()); this.innerSource = innerSource; this.isUngroupable = innerSource instanceof UngroupableColumnSource - && ((UngroupableColumnSource) innerSource).isUngroupable(); + && ((UngroupableColumnSource) innerSource).isUngroupable(); } @Override @@ -50,8 +50,7 @@ public T getPrev(long index) { if (isUngroupable) { // noinspection unchecked - return (T) ((UngroupableColumnSource) innerSource).getUngroupedPrev(segment, - (int) offset); + return (T) ((UngroupableColumnSource) innerSource).getUngroupedPrev(segment, (int) offset); } else { Assert.neqNull(innerSource, "innerSource"); DbArray prevArray = innerSource.getPrev(segment); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/UnionColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/UnionColumnSource.java index 8800e2e6ba3..6e94aa43475 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/UnionColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/UnionColumnSource.java @@ -36,19 +36,19 @@ public class UnionColumnSource extends AbstractColumnSource { private Map reinterpretedSources; UnionColumnSource(@NotNull final Class type, - @Nullable final Class componentType, - @NotNull final UnionRedirection unionRedirection, - @NotNull final UnionSourceManager unionSourceManager) { + @Nullable final Class componentType, + @NotNull final UnionRedirection unionRedirection, + @NotNull final UnionSourceManager unionSourceManager) { // noinspection unchecked this(type, componentType, unionRedirection, unionSourceManager, 0, new ColumnSource[8]); } private UnionColumnSource(@NotNull final Class type, - @Nullable final Class componentType, - @NotNull final UnionRedirection unionRedirection, - @NotNull final UnionSourceManager unionSourceManager, - final int numSources, - @NotNull final ColumnSource[] subSources) { + @Nullable final Class componentType, + @NotNull final UnionRedirection unionRedirection, + @NotNull final UnionSourceManager unionSourceManager, + final int numSources, + @NotNull final ColumnSource[] subSources) { super(type, componentType); this.unionRedirection = unionRedirection; this.unionSourceManager = unionSourceManager; @@ -166,7 +166,7 @@ public T get(long index) { private void checkPos(long index, int pos) { if (pos >= subSources.length) { throw Assert.statementNeverExecuted( - "index: " + index + ", pos: " + pos + ", subSources.length: " + subSources.length); + "index: " + index + ", pos: " + pos + ", subSources.length: " + subSources.length); } } @@ -301,53 +301,49 @@ public void close() { } @Override - public ColumnSource.FillContext makeFillContext(int chunkCapacity, - final SharedContext sharedContext) { + public ColumnSource.FillContext makeFillContext(int chunkCapacity, final SharedContext sharedContext) { return new FillContext(chunkCapacity); } @Override public void fillChunk(@NotNull ColumnSource.FillContext _context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { final FillContext context = (FillContext) _context; doFillChunk(context, destination, orderedKeys, false); } @Override public void fillPrevChunk(@NotNull ColumnSource.FillContext _context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { final FillContext context = (FillContext) _context; doFillChunk(context, destination, orderedKeys, true); } private void doFillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys, - boolean usePrev) { + @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys, + boolean usePrev) { final int okSize = orderedKeys.intSize(); // Safe to assume destination has sufficient size for the request. destination.setSize(okSize); - // To compute startTid and lastTid we assume at least one element is in the provided - // orderedKeys. + // To compute startTid and lastTid we assume at least one element is in the provided orderedKeys. if (okSize == 0) { return; } final int startTid = usePrev ? unionRedirection.tidForPrevIndex(orderedKeys.firstKey()) - : unionRedirection.tidForIndex(orderedKeys.firstKey()); + : unionRedirection.tidForIndex(orderedKeys.firstKey()); final int lastTid = usePrev ? unionRedirection.tidForPrevIndex(orderedKeys.lastKey()) - : unionRedirection.tidForIndex(orderedKeys.lastKey()); - final long[] startOfIndices = - usePrev ? unionRedirection.prevStartOfIndices : unionRedirection.startOfIndices; + : unionRedirection.tidForIndex(orderedKeys.lastKey()); + final long[] startOfIndices = usePrev ? unionRedirection.prevStartOfIndices : unionRedirection.startOfIndices; try (final OrderedKeys.Iterator okit = orderedKeys.getOrderedKeysIterator(); - final ResettableWritableChunk resettableDestination = - getChunkType().makeResettableWritableChunk(); - final ShiftedOrderedKeys okHelper = new ShiftedOrderedKeys()) { + final ResettableWritableChunk resettableDestination = getChunkType().makeResettableWritableChunk(); + final ShiftedOrderedKeys okHelper = new ShiftedOrderedKeys()) { int offset = 0; for (int tid = startTid; tid <= lastTid; ++tid) { final int capacityRemaining = Math.min(context.chunkCapacity, okSize - offset); @@ -355,8 +351,7 @@ private void doFillChunk(@NotNull FillContext context, break; } - okHelper.reset(okit.getNextOrderedKeysThrough(startOfIndices[tid + 1] - 1), - -startOfIndices[tid]); + okHelper.reset(okit.getNextOrderedKeysThrough(startOfIndices[tid + 1] - 1), -startOfIndices[tid]); if (okHelper.intSize() <= 0) { // we do not need to invoke fillChunk on this subSource continue; @@ -375,11 +370,9 @@ private void doFillChunk(@NotNull FillContext context, resettableDestination.resetFromChunk(destination, offset, capacityRemaining); if (usePrev) { - subSources[tid].fillPrevChunk(context.lastTableContext, resettableDestination, - okHelper); + subSources[tid].fillPrevChunk(context.lastTableContext, resettableDestination, okHelper); } else { - subSources[tid].fillChunk(context.lastTableContext, resettableDestination, - okHelper); + subSources[tid].fillChunk(context.lastTableContext, resettableDestination, okHelper); } offset += resettableDestination.size(); } @@ -396,8 +389,8 @@ void appendColumnSource(ColumnSource sourceToAdd) { return; } - for (final Iterator> it = - reinterpretedSources.entrySet().iterator(); it.hasNext();) { + for (final Iterator> it = reinterpretedSources.entrySet().iterator(); it + .hasNext();) { final Map.Entry entry = it.next(); final WeakReference weakReference = entry.getValue(); final ReinterpretToOriginal reinterpretToOriginal = weakReference.get(); @@ -434,17 +427,17 @@ public ColumnSource getSubSource(int i) { @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return unionSourceManager.allowsReinterpret() - && Arrays.stream(subSources).filter(Objects::nonNull) - .allMatch(cs -> cs.allowsReinterpret(alternateDataType)); + && Arrays.stream(subSources).filter(Objects::nonNull) + .allMatch(cs -> cs.allowsReinterpret(alternateDataType)); } @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { final WeakReference reinterpretedSourceWeakReference = - reinterpretedSources == null ? null : reinterpretedSources.get(alternateDataType); + reinterpretedSources == null ? null : reinterpretedSources.get(alternateDataType); if (reinterpretedSourceWeakReference != null) { final UnionColumnSource cachedValue = reinterpretedSourceWeakReference.get(); if (cachedValue != null) { @@ -454,8 +447,7 @@ protected ColumnSource doReinterpret( } // noinspection unchecked - final ColumnSource[] reinterpretedSubSources = - new ColumnSource[subSources.length]; + final ColumnSource[] reinterpretedSubSources = new ColumnSource[subSources.length]; for (int ii = 0; ii < subSources.length; ++ii) { if (subSources[ii] == null) { continue; @@ -465,14 +457,13 @@ protected ColumnSource doReinterpret( // noinspection unchecked final ReinterpretToOriginal reinterpretedSource = - new ReinterpretToOriginal(alternateDataType, numSources, reinterpretedSubSources, this); + new ReinterpretToOriginal(alternateDataType, numSources, reinterpretedSubSources, this); if (reinterpretedSources == null) { reinterpretedSources = new KeyedObjectHashMap<>(REINTERPRETED_CLASS_KEY_INSTANCE); } - reinterpretedSources.put(alternateDataType, - new ReinterpretReference(reinterpretedSource, alternateDataType)); + reinterpretedSources.put(alternateDataType, new ReinterpretReference(reinterpretedSource, alternateDataType)); return reinterpretedSource; } @@ -490,9 +481,9 @@ private static class ReinterpretToOriginal extends UnionColumnSource< private final UnionColumnSource originalSource; private ReinterpretToOriginal(Class alternateDataType, int numSources, - ColumnSource[] reinterpretedSubSources, UnionColumnSource originalSource) { + ColumnSource[] reinterpretedSubSources, UnionColumnSource originalSource) { super(alternateDataType, null, originalSource.unionRedirection, - originalSource.unionSourceManager, numSources, reinterpretedSubSources); + originalSource.unionSourceManager, numSources, reinterpretedSubSources); this.originalSource = originalSource; } @@ -503,20 +494,18 @@ public boolean allowsReinterpret(@NotNull Class alternateDataType) { @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return (ColumnSource) originalSource; } } - private static class ReinterpretedClassKey - extends KeyedObjectKey.Basic { + private static class ReinterpretedClassKey extends KeyedObjectKey.Basic { @Override public Class getKey(ReinterpretReference reinterpretReference) { return reinterpretReference.alternateDataType; } } - private static final ReinterpretedClassKey REINTERPRETED_CLASS_KEY_INSTANCE = - new ReinterpretedClassKey(); + private static final ReinterpretedClassKey REINTERPRETED_CLASS_KEY_INSTANCE = new ReinterpretedClassKey(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/UnionRedirection.java b/DB/src/main/java/io/deephaven/db/v2/sources/UnionRedirection.java index 980e33c3a12..739727db89e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/UnionRedirection.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/UnionRedirection.java @@ -10,45 +10,42 @@ import java.util.Arrays; /** - * This class manages the constituent Tables for a UnionColumnSource, so that we can map from an - * outer (merged) index into the appropriate segment of a component table. + * This class manages the constituent Tables for a UnionColumnSource, so that we can map from an outer (merged) index + * into the appropriate segment of a component table. */ public class UnionRedirection implements Serializable { /** * What do we tell users when they try to insert into a full Redirection index. */ private static final String INDEX_OVERFLOW_MESSAGE = - "Failure to insert index into UnionRedirection, Index values exceed long. If you have several recursive merges, consider rewriting your query to do a single merge of many tables."; + "Failure to insert index into UnionRedirection, Index values exceed long. If you have several recursive merges, consider rewriting your query to do a single merge of many tables."; /** * This is the minimum size of an initial allocation of a region. */ public static final long CHUNK_MULTIPLE = - Configuration.getInstance().getLongWithDefault("UnionRedirection.chunkMultiple", 1 << 16); + Configuration.getInstance().getLongWithDefault("UnionRedirection.chunkMultiple", 1 << 16); /** * How many slots do we allocate for tables (one slot per table). */ private static final int INITIAL_SIZE = 8; - // cached last position, used to avoid the binary search for the table id, when requesting it - // for consecutive indices + // cached last position, used to avoid the binary search for the table id, when requesting it for consecutive + // indices private final ThreadLocal lastPos = ThreadLocal.withInitial(() -> 0); private final ThreadLocal prevLastPos = ThreadLocal.withInitial(() -> 0); // how many tables have been added to this redirection private int size = 0; - // the start of our outer index for this entry, the end of the current entry (+ 1) is in the - // next table + // the start of our outer index for this entry, the end of the current entry (+ 1) is in the next table long[] startOfIndices = new long[INITIAL_SIZE]; - // the start of our outer prev index for this entry, the end of the current entry (+ 1) is in - // the next table + // the start of our outer prev index for this entry, the end of the current entry (+ 1) is in the next table long[] prevStartOfIndices = new long[INITIAL_SIZE]; - // copy of prevStartOfIndices to be updated during the LTM cycle and swapped as a terminal - // notification + // copy of prevStartOfIndices to be updated during the LTM cycle and swapped as a terminal notification long[] prevStartOfIndicesAlt = new long[INITIAL_SIZE]; /** @@ -114,14 +111,13 @@ private long roundToRegionBoundary(long key) { throw new UnsupportedOperationException(INDEX_OVERFLOW_MESSAGE); } - // Require empty tables have non-empty keyspace so that we can binary search on key to find - // source table. + // Require empty tables have non-empty keyspace so that we can binary search on key to find source table. return Math.max(1, numChunks) * CHUNK_MULTIPLE; } /** - * Append a new table at the end of this union with the given maxKey. It is expected that tables - * will be added in tableId order. + * Append a new table at the end of this union with the given maxKey. It is expected that tables will be added in + * tableId order. * * @param maxKey the maximum key of the table */ @@ -139,8 +135,7 @@ public void appendTable(long maxKey) { prevStartOfIndices[size] = prevStartOfIndices[size - 1] + keySpace; prevStartOfIndicesAlt[size] = prevStartOfIndicesAlt[size - 1] + keySpace; - if (startOfIndices[size] < 0 || prevStartOfIndices[size] < 0 - || prevStartOfIndicesAlt[size] < 0) { + if (startOfIndices[size] < 0 || prevStartOfIndices[size] < 0 || prevStartOfIndicesAlt[size] < 0) { throw new UnsupportedOperationException(INDEX_OVERFLOW_MESSAGE); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/UnionSourceManager.java b/DB/src/main/java/io/deephaven/db/v2/sources/UnionSourceManager.java index f035181154d..82fcbf4ef6b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/UnionSourceManager.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/UnionSourceManager.java @@ -36,8 +36,7 @@ public class UnionSourceManager { private final NotificationQueue.Dependency parentDependency; private final UnionRedirection unionRedirection = new UnionRedirection(); private final List tables = new ArrayList<>(); - private final List listeners = - Collections.synchronizedList(new ArrayList<>()); + private final List listeners = Collections.synchronizedList(new ArrayList<>()); private final MergedListener mergedListener; private final QueryTable result; @@ -48,14 +47,12 @@ public class UnionSourceManager { private UpdateCommitter prevFlusher = null; public UnionSourceManager(TableDefinition tableDefinition, - @Nullable NotificationQueue.Dependency parentDependency) { + @Nullable NotificationQueue.Dependency parentDependency) { // noinspection unchecked sources = tableDefinition.getColumnList().stream() - .map((cd) -> new UnionColumnSource(cd.getDataType(), cd.getComponentType(), - unionRedirection, this)) - .toArray(UnionColumnSource[]::new); - names = tableDefinition.getColumnList().stream().map(ColumnDefinition::getName) - .toArray(String[]::new); + .map((cd) -> new UnionColumnSource(cd.getDataType(), cd.getComponentType(), unionRedirection, this)) + .toArray(UnionColumnSource[]::new); + names = tableDefinition.getColumnList().stream().map(ColumnDefinition::getName).toArray(String[]::new); this.parentDependency = parentDependency; index = Index.FACTORY.getEmptyIndex(); @@ -66,9 +63,9 @@ public UnionSourceManager(TableDefinition tableDefinition, } /** - * Ensure that this UnionSourceManager will be refreshing. Should be called proactively if it is - * expected that refreshing DynamicTables may be added *after* the initial set, in order to - * ensure that children of the result table are correctly setup to listen and refresh. + * Ensure that this UnionSourceManager will be refreshing. Should be called proactively if it is expected that + * refreshing DynamicTables may be added *after* the initial set, in order to ensure that children of the result + * table are correctly setup to listen and refresh. */ public void setRefreshing() { if (refreshing) { @@ -96,16 +93,15 @@ public boolean allowsReinterpret() { } /** - * Note that this UnionSourceManager might have tables added dynamically throughout its - * lifetime. + * Note that this UnionSourceManager might have tables added dynamically throughout its lifetime. */ public void noteUsingComponentsIsUnsafe() { isUsingComponentsSafe = false; } /** - * Determine whether using the component tables directly in a subsequent merge will affect the - * correctness of the merge. + * Determine whether using the component tables directly in a subsequent merge will affect the correctness of the + * merge. * * @return If using the component tables is allowed. */ @@ -125,14 +121,14 @@ public synchronized void addTable(@NotNull final Table table, final boolean onNe Require.requirement(!isUsingComponentsSafe(), "!isUsingComponentsSafe()"); } Require.requirement(sources.size() == this.sources.length, - "sources.size() == this.sources.length", sources.size(), - "sources.size()", this.sources.length, "this.sources.length"); + "sources.size() == this.sources.length", sources.size(), + "sources.size()", this.sources.length, "this.sources.length"); unionRedirection.appendTable(table.getIndex().lastKey()); for (int i = 0; i < this.sources.length; i++) { final ColumnSource sourceToAdd = sources.get(names[i]); Assert.assertion(sourceToAdd != null, "sources.get(names[i]) != null", names[i], - "names[i]"); + "names[i]"); // noinspection unchecked this.sources[i].appendColumnSource(sourceToAdd); } @@ -140,18 +136,16 @@ public synchronized void addTable(@NotNull final Table table, final boolean onNe tables.add(table); if (onNewTableMapKey && !disallowReinterpret) { - // if we allow new tables to be added, then we have concurrency concerns about doing - // reinterpretations off + // if we allow new tables to be added, then we have concurrency concerns about doing reinterpretations off // of the LTM thread - throw new IllegalStateException( - "Can not add new tables when reinterpretation is enabled!"); + throw new IllegalStateException("Can not add new tables when reinterpretation is enabled!"); } if (table.isLive()) { setRefreshing(); final DynamicTable dynTable = (DynamicTable) table; final UnionListenerRecorder listener = new UnionListenerRecorder("TableTools.merge", - dynTable, tableId); + dynTable, tableId); listeners.add(listener); modColumnTransformers.add(dynTable.newModifiedColumnSetTransformer(result, names)); @@ -160,9 +154,8 @@ public synchronized void addTable(@NotNull final Table table, final boolean onNe if (onNewTableMapKey) { // synthetically invoke onUpdate lest our MergedUnionListener#process never fires. final ShiftAwareListener.Update update = new ShiftAwareListener.Update( - table.getIndex().clone(), Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex(), - IndexShiftData.EMPTY, ModifiedColumnSet.ALL); + table.getIndex().clone(), Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), + IndexShiftData.EMPTY, ModifiedColumnSet.ALL); listener.onUpdate(update); update.release(); } @@ -216,7 +209,7 @@ class UnionListenerRecorder extends ListenerRecorder { class MergedUnionListener extends MergedListener { MergedUnionListener(Collection recorders, String listenerDescription, - QueryTable result) { + QueryTable result) { super(recorders, Collections.emptyList(), listenerDescription, result); } @@ -231,27 +224,24 @@ protected void process() { long accumulatedShift = 0; int firstShiftingTable = tables.size(); for (int tableId = 0; tableId < tables.size(); ++tableId) { - final long newShift = unionRedirection.computeShiftIfNeeded(tableId, - tables.get(tableId).getIndex().lastKey()); + final long newShift = + unionRedirection.computeShiftIfNeeded(tableId, tables.get(tableId).getIndex().lastKey()); unionRedirection.prevStartOfIndicesAlt[tableId] = - unionRedirection.startOfIndices[tableId] += accumulatedShift; + unionRedirection.startOfIndices[tableId] += accumulatedShift; accumulatedShift += newShift; if (newShift > 0 && tableId + 1 < firstShiftingTable) { firstShiftingTable = tableId + 1; } } - // note: prevStart must be set irregardless of whether accumulatedShift is non-zero or - // not. + // note: prevStart must be set irregardless of whether accumulatedShift is non-zero or not. unionRedirection.prevStartOfIndicesAlt[tables.size()] = - unionRedirection.startOfIndices[tables.size()] += accumulatedShift; + unionRedirection.startOfIndices[tables.size()] += accumulatedShift; if (accumulatedShift > 0) { final int maxTableId = tables.size() - 1; - final Index.SequentialBuilder builder = - Index.CURRENT_FACTORY.getSequentialBuilder(); - index.removeRange(unionRedirection.prevStartOfIndices[firstShiftingTable], - Long.MAX_VALUE); + final Index.SequentialBuilder builder = Index.CURRENT_FACTORY.getSequentialBuilder(); + index.removeRange(unionRedirection.prevStartOfIndices[firstShiftingTable], Long.MAX_VALUE); for (int tableId = firstShiftingTable; tableId <= maxTableId; ++tableId) { final long startOfShift = unionRedirection.startOfIndices[tableId]; @@ -264,80 +254,67 @@ protected void process() { final Index.SequentialBuilder updateAddedBuilder = Index.FACTORY.getSequentialBuilder(); final Index.SequentialBuilder shiftAddedBuilder = Index.FACTORY.getSequentialBuilder(); final Index.SequentialBuilder shiftRemoveBuilder = Index.FACTORY.getSequentialBuilder(); - final Index.SequentialBuilder updateRemovedBuilder = - Index.FACTORY.getSequentialBuilder(); - final Index.SequentialBuilder updateModifiedBuilder = - Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder updateRemovedBuilder = Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder updateModifiedBuilder = Index.FACTORY.getSequentialBuilder(); - // listeners should be quiescent by the time we are processing this notification, - // because of the dependency tracking + // listeners should be quiescent by the time we are processing this notification, because of the dependency + // tracking int nextListenerId = 0; for (int tableId = 0; tableId < tables.size(); ++tableId) { final long offset = unionRedirection.prevStartOfIndices[tableId]; final long currOffset = unionRedirection.startOfIndices[tableId]; final long shiftDelta = currOffset - offset; - // Listeners only contains ticking tables. However, we might need to shift tables - // that do not tick. + // Listeners only contains ticking tables. However, we might need to shift tables that do not tick. final ListenerRecorder listener = - (nextListenerId < listeners.size() - && listeners.get(nextListenerId).tableId == tableId) - ? listeners.get(nextListenerId++) - : null; + (nextListenerId < listeners.size() && listeners.get(nextListenerId).tableId == tableId) + ? listeners.get(nextListenerId++) + : null; if (listener == null || listener.getNotificationStep() != currentStep) { if (shiftDelta != 0) { shiftedBuilder.shiftRange(unionRedirection.prevStartOfIndices[tableId], - unionRedirection.prevStartOfIndices[tableId + 1] - 1, shiftDelta); + unionRedirection.prevStartOfIndices[tableId + 1] - 1, shiftDelta); } continue; } // Mark all dirty columns in this source table as dirty in aggregate. - modColumnTransformers.get(nextListenerId - 1) - .transform(listener.getModifiedColumnSet(), modifiedColumnSet); + modColumnTransformers.get(nextListenerId - 1).transform(listener.getModifiedColumnSet(), + modifiedColumnSet); final IndexShiftData shiftData = listener.getShifted(); - updateAddedBuilder.appendIndexWithOffset(listener.getAdded(), - unionRedirection.startOfIndices[tableId]); + updateAddedBuilder.appendIndexWithOffset(listener.getAdded(), unionRedirection.startOfIndices[tableId]); updateModifiedBuilder.appendIndexWithOffset(listener.getModified(), - unionRedirection.startOfIndices[tableId]); + unionRedirection.startOfIndices[tableId]); if (shiftDelta == 0) { - try (final Index newRemoved = - getShiftedPrevIndex(listener.getRemoved(), tableId)) { + try (final Index newRemoved = getShiftedPrevIndex(listener.getRemoved(), tableId)) { updateRemovedBuilder.appendIndex(newRemoved); index.remove(newRemoved); } } else { - // If the shiftDelta is non-zero we have already updated the index above - // (because we used the new index), - // otherwise we need to apply the removals (adjusted by the table's starting - // key) + // If the shiftDelta is non-zero we have already updated the index above (because we used the new + // index), + // otherwise we need to apply the removals (adjusted by the table's starting key) updateRemovedBuilder.appendIndexWithOffset(listener.getRemoved(), - unionRedirection.prevStartOfIndices[tableId]); + unionRedirection.prevStartOfIndices[tableId]); } // Apply and process shifts. final long firstTableKey = unionRedirection.startOfIndices[tableId]; final long lastTableKey = unionRedirection.startOfIndices[tableId + 1] - 1; if (shiftData.nonempty() && index.overlapsRange(firstTableKey, lastTableKey)) { - final long prevCardinality = - unionRedirection.prevStartOfIndices[tableId + 1] - offset; - final long currCardinality = - unionRedirection.startOfIndices[tableId + 1] - currOffset; - shiftedBuilder.appendShiftData(shiftData, offset, prevCardinality, currOffset, - currCardinality); + final long prevCardinality = unionRedirection.prevStartOfIndices[tableId + 1] - offset; + final long currCardinality = unionRedirection.startOfIndices[tableId + 1] - currOffset; + shiftedBuilder.appendShiftData(shiftData, offset, prevCardinality, currOffset, currCardinality); // if the entire table was shifted, we've already applied the index update if (shiftDelta == 0) { - // it is possible that shifts occur outside of our reserved keyspace for - // this table; we must - // protect from shifting keys that belong to other tables by clipping the - // shift space - final long lastLegalKey = - unionRedirection.prevStartOfIndices[tableId + 1] - 1; + // it is possible that shifts occur outside of our reserved keyspace for this table; we must + // protect from shifting keys that belong to other tables by clipping the shift space + final long lastLegalKey = unionRedirection.prevStartOfIndices[tableId + 1] - 1; try (OrderedKeys.Iterator okIt = index.getOrderedKeysIterator()) { for (int idx = 0; idx < shiftData.size(); ++idx) { @@ -345,8 +322,7 @@ protected void process() { if (beginRange > lastLegalKey) { break; } - final long endRange = - Math.min(shiftData.getEndRange(idx) + offset, lastLegalKey); + final long endRange = Math.min(shiftData.getEndRange(idx) + offset, lastLegalKey); final long rangeDelta = shiftData.getShiftDelta(idx); if (!okIt.advance(beginRange)) { @@ -354,16 +330,15 @@ protected void process() { } Assert.leq(beginRange, "beginRange", endRange, "endRange"); shiftRemoveBuilder.appendRange(beginRange, endRange); - okIt.getNextOrderedKeysThrough(endRange) - .forAllLongRanges((s, e) -> shiftAddedBuilder - .appendRange(s + rangeDelta, e + rangeDelta)); + okIt.getNextOrderedKeysThrough(endRange).forAllLongRanges( + (s, e) -> shiftAddedBuilder.appendRange(s + rangeDelta, e + rangeDelta)); } } } } else if (shiftDelta != 0) { // shift entire thing shiftedBuilder.shiftRange(unionRedirection.prevStartOfIndices[tableId], - unionRedirection.prevStartOfIndices[tableId + 1] - 1, shiftDelta); + unionRedirection.prevStartOfIndices[tableId + 1] - 1, shiftDelta); } } @@ -379,7 +354,7 @@ protected void process() { // Finally add the new keys to the index in post-shift key-space. try (Index shiftRemoveIndex = shiftRemoveBuilder.getIndex(); - Index shiftAddedIndex = shiftAddedBuilder.getIndex()) { + Index shiftAddedIndex = shiftAddedBuilder.getIndex()) { index.remove(shiftRemoveIndex); index.insert(shiftAddedIndex); } @@ -394,8 +369,7 @@ protected boolean canExecute(final long step) { return false; } synchronized (listeners) { - return listeners.stream() - .allMatch((final UnionListenerRecorder recorder) -> recorder.satisfied(step)); + return listeners.stream().allMatch((final UnionListenerRecorder recorder) -> recorder.satisfied(step)); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/ViewColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/ViewColumnSource.java index 18418d81718..1f1f89f8922 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/ViewColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/ViewColumnSource.java @@ -20,8 +20,7 @@ public class ViewColumnSource extends AbstractColumnSource { private final Formula formula; - // We explicitly want all Groovy commands to run under the 'file:/groovy/shell' source, so - // explicitly create that. + // We explicitly want all Groovy commands to run under the 'file:/groovy/shell' source, so explicitly create that. private static URL groovyShellUrl; static { try { @@ -33,15 +32,14 @@ public class ViewColumnSource extends AbstractColumnSource { } private static final CodeSource codeSource = - new CodeSource(groovyShellUrl, (java.security.cert.Certificate[]) null); - // The permission collection should not be static, because the class loader might take place - // before the + new CodeSource(groovyShellUrl, (java.security.cert.Certificate[]) null); + // The permission collection should not be static, because the class loader might take place before the // custom policy object is assigned. private final PermissionCollection perms = Policy.getPolicy().getPermissions(codeSource); - private final AccessControlContext context = AccessController - .doPrivileged((PrivilegedAction) () -> new AccessControlContext( - new ProtectionDomain[] {new ProtectionDomain( - new CodeSource(groovyShellUrl, (java.security.cert.Certificate[]) null), perms)})); + private final AccessControlContext context = + AccessController.doPrivileged((PrivilegedAction) () -> new AccessControlContext( + new ProtectionDomain[] {new ProtectionDomain( + new CodeSource(groovyShellUrl, (java.security.cert.Certificate[]) null), perms)})); public ViewColumnSource(Class type, Formula formula) { super(type); @@ -229,33 +227,31 @@ public FillContext makeFillContext(final int chunkCapacity, final SharedContext @Override public Chunk getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { return formula.getChunk(((VCSGetContext) context).underlyingGetContext, orderedKeys); } @Override public Chunk getPrevChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { return formula.getPrevChunk(((VCSGetContext) context).underlyingGetContext, orderedKeys); } @Override public void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { - formula.fillChunk(((VCSFillContext) context).underlyingFillContext, destination, - orderedKeys); + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { + formula.fillChunk(((VCSFillContext) context).underlyingFillContext, destination, orderedKeys); } @Override public void fillPrevChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { - formula.fillPrevChunk(((VCSFillContext) context).underlyingFillContext, destination, - orderedKeys); + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { + formula.fillPrevChunk(((VCSFillContext) context).underlyingFillContext, destination, orderedKeys); } public static class VCSGetContext implements GetContext { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/WritableChunkSink.java b/DB/src/main/java/io/deephaven/db/v2/sources/WritableChunkSink.java index 592d1827199..5967417743b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/WritableChunkSink.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/WritableChunkSink.java @@ -12,26 +12,25 @@ interface FillFromContext extends Context { } /** - * Fills the ChunkSink with data from the source, with data corresponding to the keys from the - * given {@link OrderedKeys}. + * Fills the ChunkSink with data from the source, with data corresponding to the keys from the given + * {@link OrderedKeys}. * * @param context A context containing all mutable/state related data used in writing the Chunk. * @param src The source of the data {@code orderedKeys} * @param orderedKeys An {@link OrderedKeys} representing the keys to be written */ void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, - @NotNull OrderedKeys orderedKeys); + @NotNull OrderedKeys orderedKeys); /** - * Fills the ChunkSink with data from the source, with data corresponding to the keys from the - * given key chunk. + * Fills the ChunkSink with data from the source, with data corresponding to the keys from the given key chunk. * * @param context A context containing all mutable/state related data used in writing the Chunk. * @param src The source of the data {@code orderedKeys} * @param keys A {@link LongChunk} representing the keys to be written */ - void fillFromChunkUnordered(@NotNull FillFromContext context, - @NotNull Chunk src, @NotNull LongChunk keys); + void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull LongChunk keys); /** * Make a context suitable for the {@link WritableChunkSink#fillFromChunk} method. diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/WritableSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/WritableSource.java index bd9aa88b858..a81551255b3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/WritableSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/WritableSource.java @@ -57,19 +57,16 @@ default void ensureCapacity(long capacity) { * Ensure that this WritableSource can accept index keys in range {@code [0, capacity)}. * * @param capacity The new minimum capacity - * @param nullFilled Whether data should be "null-filled". If true, get operations at index keys - * that have not been set will return the appropriate null value; otherwise such gets - * produce undefined results. + * @param nullFilled Whether data should be "null-filled". If true, get operations at index keys that have not been + * set will return the appropriate null value; otherwise such gets produce undefined results. */ void ensureCapacity(long capacity, boolean nullFilled); - // WritableSource provides a slow, default implementation of fillFromChunk. Inheritors who care - // should provide + // WritableSource provides a slow, default implementation of fillFromChunk. Inheritors who care should provide // something more efficient. /** - * Provide a default, empty {@link FillFromContext} for use with our default - * {@link WritableSource#fillFromChunk}. + * Provide a default, empty {@link FillFromContext} for use with our default {@link WritableSource#fillFromChunk}. */ @Override default FillFromContext makeFillFromContext(int chunkCapacity) { @@ -78,21 +75,19 @@ default FillFromContext makeFillFromContext(int chunkCapacity) { } /** - * Our default, inefficient, implementation. Inheritors who care should provide a better - * implementation. + * Our default, inefficient, implementation. Inheritors who care should provide a better implementation. */ @Override - default void fillFromChunk(@NotNull FillFromContext context, - @NotNull Chunk src, - @NotNull OrderedKeys orderedKeys) { + default void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull OrderedKeys orderedKeys) { final SinkFiller filler = (SinkFiller) context; filler.reset(this, src); orderedKeys.forEachLong(filler); } @Override - default void fillFromChunkUnordered(@NotNull FillFromContext context, - @NotNull Chunk src, @NotNull LongChunk keys) { + default void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull LongChunk keys) { final SinkFiller filler = (SinkFiller) context; filler.reset(this, src); for (int ii = 0; ii < keys.size(); ++ii) { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/AggregateColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/AggregateColumnSource.java index ca8fcea90bf..cf3a59b186f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/AggregateColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/AggregateColumnSource.java @@ -12,20 +12,19 @@ import java.util.function.BiFunction; /** - * {@link ColumnSource} and {@link UngroupableColumnSource} interface for aggregation result - * columns. + * {@link ColumnSource} and {@link UngroupableColumnSource} interface for aggregation result columns. */ public interface AggregateColumnSource - extends UngroupableColumnSource, MutableColumnSourceGetDefaults.ForObject { + extends UngroupableColumnSource, MutableColumnSourceGetDefaults.ForObject { UngroupedColumnSource ungrouped(); static AggregateColumnSource make( - @NotNull final ColumnSource aggregatedSource, - @NotNull final ColumnSource indexSource) { + @NotNull final ColumnSource aggregatedSource, + @NotNull final ColumnSource indexSource) { // noinspection unchecked return (AggregateColumnSource) FactoryHelper.TYPE_TO_CONSTRUCTOR - .get(aggregatedSource.getType()).apply(aggregatedSource, indexSource); + .get(aggregatedSource.getType()).apply(aggregatedSource, indexSource); } final class FactoryHelper { @@ -34,8 +33,8 @@ private FactoryHelper() {} @SuppressWarnings({"unchecked", "AutoUnboxing"}) private static final SimpleTypeMap, ColumnSource, AggregateColumnSource>> TYPE_TO_CONSTRUCTOR = - SimpleTypeMap.create( - // @formatter:off + SimpleTypeMap.create( + // @formatter:off (final ColumnSource aggregatedSource, final ColumnSource indexSource) -> { throw new UnsupportedOperationException("Cannot create a primitive boolean ColumnSource"); }, @@ -48,6 +47,6 @@ private FactoryHelper() {} (final ColumnSource aggregatedSource, final ColumnSource indexSource) -> new DoubleAggregateColumnSource((ColumnSource ) aggregatedSource, indexSource), (final ColumnSource aggregatedSource, final ColumnSource indexSource) -> new ObjectAggregateColumnSource<>((ColumnSource ) aggregatedSource, indexSource) // @formatter:on - ); + ); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/BaseAggregateColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/BaseAggregateColumnSource.java index 1f14eabc6c3..5fc4ab6a61c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/BaseAggregateColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/BaseAggregateColumnSource.java @@ -18,15 +18,14 @@ * Base {@link ColumnSource} implementation for aggregation result columns. */ abstract class BaseAggregateColumnSource - extends AbstractColumnSource - implements AggregateColumnSource { + extends AbstractColumnSource implements AggregateColumnSource { final ColumnSource aggregatedSource; final ColumnSource indexSource; BaseAggregateColumnSource(@NotNull final Class dbArrayType, - @NotNull final ColumnSource aggregatedSource, - @NotNull final ColumnSource indexSource) { + @NotNull final ColumnSource aggregatedSource, + @NotNull final ColumnSource indexSource) { super(dbArrayType, aggregatedSource.getType()); this.aggregatedSource = aggregatedSource; this.indexSource = indexSource; @@ -44,14 +43,11 @@ static final class AggregateFillContext implements FillContext { final GetContext indexGetContext; - private AggregateFillContext(@NotNull final ColumnSource indexSource, - final int chunkCapacity, final SharedContext sharedContext) { - // TODO: Implement a proper shareable context to use with other instances that share an - // index source. - // Current usage is "safe" because index sources are only exposed through this wrapper, - // and all - // sources at a given level will pass through their ordered keys to the index source - // unchanged. + private AggregateFillContext(@NotNull final ColumnSource indexSource, final int chunkCapacity, + final SharedContext sharedContext) { + // TODO: Implement a proper shareable context to use with other instances that share an index source. + // Current usage is "safe" because index sources are only exposed through this wrapper, and all + // sources at a given level will pass through their ordered keys to the index source unchanged. indexGetContext = indexSource.makeGetContext(chunkCapacity, sharedContext); } @@ -62,8 +58,7 @@ public final void close() { } @Override - public final FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return new AggregateFillContext(indexSource, chunkCapacity, sharedContext); } @@ -101,8 +96,7 @@ public final Object getUngroupedPrev(final long groupIndexKey, final int offsetI if (groupIndexKey == Index.NULL_KEY) { return null; } - return aggregatedSource - .getPrev(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); + return aggregatedSource.getPrev(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); } @Override @@ -114,13 +108,11 @@ public final Boolean getUngroupedBoolean(final long groupIndexKey, final int off } @Override - public final Boolean getUngroupedPrevBoolean(final long groupIndexKey, - final int offsetInGroup) { + public final Boolean getUngroupedPrevBoolean(final long groupIndexKey, final int offsetInGroup) { if (groupIndexKey == Index.NULL_KEY) { return NULL_BOOLEAN; } - return aggregatedSource - .getPrevBoolean(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); + return aggregatedSource.getPrevBoolean(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); } @Override @@ -136,8 +128,7 @@ public final double getUngroupedPrevDouble(final long groupIndexKey, final int o if (groupIndexKey == Index.NULL_KEY) { return NULL_DOUBLE; } - return aggregatedSource - .getPrevDouble(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); + return aggregatedSource.getPrevDouble(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); } @Override @@ -153,8 +144,7 @@ public final float getUngroupedPrevFloat(final long groupIndexKey, final int off if (groupIndexKey == Index.NULL_KEY) { return NULL_FLOAT; } - return aggregatedSource - .getPrevFloat(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); + return aggregatedSource.getPrevFloat(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); } @Override @@ -170,8 +160,7 @@ public final byte getUngroupedPrevByte(final long groupIndexKey, final int offse if (groupIndexKey == Index.NULL_KEY) { return NULL_BYTE; } - return aggregatedSource - .getPrevByte(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); + return aggregatedSource.getPrevByte(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); } @Override @@ -187,8 +176,7 @@ public final char getUngroupedPrevChar(final long groupIndexKey, final int offse if (groupIndexKey == Index.NULL_KEY) { return NULL_CHAR; } - return aggregatedSource - .getPrevChar(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); + return aggregatedSource.getPrevChar(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); } @Override @@ -204,8 +192,7 @@ public final short getUngroupedPrevShort(final long groupIndexKey, final int off if (groupIndexKey == Index.NULL_KEY) { return NULL_SHORT; } - return aggregatedSource - .getPrevShort(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); + return aggregatedSource.getPrevShort(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); } @Override @@ -221,8 +208,7 @@ public final int getUngroupedPrevInt(final long groupIndexKey, final int offsetI if (groupIndexKey == Index.NULL_KEY) { return NULL_INT; } - return aggregatedSource - .getPrevInt(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); + return aggregatedSource.getPrevInt(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); } @Override @@ -238,7 +224,6 @@ public final long getUngroupedPrevLong(final long groupIndexKey, final int offse if (groupIndexKey == Index.NULL_KEY) { return NULL_LONG; } - return aggregatedSource - .getPrevLong(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); + return aggregatedSource.getPrevLong(indexSource.getPrev(groupIndexKey).getPrevIndex().get(offsetInGroup)); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/UngroupedAggregateColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/UngroupedAggregateColumnSource.java index fae98db0c7d..bd8d9a7b16c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/UngroupedAggregateColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/aggregate/UngroupedAggregateColumnSource.java @@ -25,8 +25,7 @@ final class UngroupedAggregateColumnSource extends UngroupedColumnSou private final BaseAggregateColumnSource aggregateColumnSource; - UngroupedAggregateColumnSource( - @NotNull final BaseAggregateColumnSource aggregateColumnSource) { + UngroupedAggregateColumnSource(@NotNull final BaseAggregateColumnSource aggregateColumnSource) { super(aggregateColumnSource.aggregatedSource.getType()); this.aggregateColumnSource = aggregateColumnSource; } @@ -131,8 +130,7 @@ public final DATA_TYPE getPrev(final long keyIndex) { final long groupIndexKey = getGroupIndexKey(keyIndex, prevBase); final long offsetInGroup = getOffsetInGroup(keyIndex, prevBase); // noinspection unchecked - return (DATA_TYPE) aggregateColumnSource.getUngroupedPrev(groupIndexKey, - (int) offsetInGroup); + return (DATA_TYPE) aggregateColumnSource.getUngroupedPrev(groupIndexKey, (int) offsetInGroup); } @Override @@ -230,25 +228,23 @@ private static final class UngroupedFillContext implements FillContext { private final FillContext aggregatedFillContext; private final ResettableWritableChunk destinationSlice; - private UngroupedFillContext( - @NotNull final BaseAggregateColumnSource aggregateColumnSource, - final int chunkCapacity, - final SharedContext sharedContext) { + private UngroupedFillContext(@NotNull final BaseAggregateColumnSource aggregateColumnSource, + final int chunkCapacity, + final SharedContext sharedContext) { final ColumnSource indexSource = aggregateColumnSource.indexSource; final ColumnSource aggregatedSource = aggregateColumnSource.aggregatedSource; shareable = sharedContext == null ? new Shareable(false, indexSource, chunkCapacity) - : sharedContext.getOrCreate(new SharingKey(indexSource), - () -> new Shareable(true, indexSource, chunkCapacity)); + : sharedContext.getOrCreate(new SharingKey(indexSource), + () -> new Shareable(true, indexSource, chunkCapacity)); - // NB: There's no reason to use a shared context for the values source. We'd have to - // reset it between each sub-fill. + // NB: There's no reason to use a shared context for the values source. We'd have to reset it between each + // sub-fill. aggregatedFillContext = aggregatedSource.makeFillContext(chunkCapacity); destinationSlice = aggregatedSource.getChunkType().makeResettableWritableChunk(); } - private static final class SharingKey - extends SharedContext.ExactReferenceSharingKey { + private static final class SharingKey extends SharedContext.ExactReferenceSharingKey { private SharingKey(@NotNull final ColumnSource indexSource) { super(indexSource); @@ -270,8 +266,8 @@ private static final class Shareable extends SharedContext { private int currentIndexPosition; private Shareable(final boolean shared, - @NotNull final ColumnSource indexSource, - final int chunkCapacity) { + @NotNull final ColumnSource indexSource, + final int chunkCapacity) { this.shared = shared; indexGetContext = indexSource.makeGetContext(chunkCapacity, this); @@ -281,9 +277,8 @@ private Shareable(final boolean shared, componentKeyIndicesSlice = ResettableWritableLongChunk.makeResettableChunk(); } - private void extractFillChunkInformation( - @NotNull final ColumnSource indexSource, final long base, - final boolean usePrev, @NotNull final OrderedKeys orderedKeys) { + private void extractFillChunkInformation(@NotNull final ColumnSource indexSource, + final long base, final boolean usePrev, @NotNull final OrderedKeys orderedKeys) { if (stateReusable) { return; } @@ -295,14 +290,13 @@ private void extractFillChunkInformation( componentKeyIndices.setSize(0); orderedKeys.forAllLongs((final long keyIndex) -> { final long indexKeyIndex = getGroupIndexKey(keyIndex, base); - if (currentIndexPosition == -1 - || indexKeyIndex != indexKeyIndices.get(currentIndexPosition)) { + if (currentIndexPosition == -1 || indexKeyIndex != indexKeyIndices.get(currentIndexPosition)) { ++currentIndexPosition; indexKeyIndices.set(currentIndexPosition, indexKeyIndex); sameIndexRunLengths.set(currentIndexPosition, 1); } else { sameIndexRunLengths.set(currentIndexPosition, - sameIndexRunLengths.get(currentIndexPosition) + 1); + sameIndexRunLengths.get(currentIndexPosition) + 1); } final long componentKeyIndex = getOffsetInGroup(keyIndex, base); componentKeyIndices.add(componentKeyIndex); @@ -312,13 +306,11 @@ private void extractFillChunkInformation( final ObjectChunk indexes; try (final OrderedKeys indexOrderedKeys = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(indexKeyIndices)) { + OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(indexKeyIndices)) { if (usePrev) { - indexes = indexSource.getPrevChunk(indexGetContext, indexOrderedKeys) - .asObjectChunk(); + indexes = indexSource.getPrevChunk(indexGetContext, indexOrderedKeys).asObjectChunk(); } else { - indexes = - indexSource.getChunk(indexGetContext, indexOrderedKeys).asObjectChunk(); + indexes = indexSource.getChunk(indexGetContext, indexOrderedKeys).asObjectChunk(); } } @@ -326,17 +318,16 @@ private void extractFillChunkInformation( for (int ii = 0; ii < indexes.size(); ++ii) { final Index currIndex = indexes.get(ii); Assert.neqNull(currIndex, "currIndex"); - final boolean usePrevIndex = - usePrev && !(currIndex instanceof CurrentOnlyIndex); + final boolean usePrevIndex = usePrev && !(currIndex instanceof CurrentOnlyIndex); final Index index = usePrevIndex ? currIndex.getPrevIndex() : currIndex; try { final int lengthFromThisIndex = sameIndexRunLengths.get(ii); final WritableLongChunk remappedComponentKeys = - componentKeyIndicesSlice.resetFromTypedChunk(componentKeyIndices, - componentKeyIndicesPosition, lengthFromThisIndex); + componentKeyIndicesSlice.resetFromTypedChunk(componentKeyIndices, + componentKeyIndicesPosition, lengthFromThisIndex); index.getKeysForPositions(new LongChunkIterator(componentKeyIndicesSlice), - new LongChunkAppender(remappedComponentKeys)); + new LongChunkAppender(remappedComponentKeys)); componentKeyIndicesPosition += lengthFromThisIndex; } finally { @@ -368,28 +359,23 @@ public final void close() { } private void doFillChunk(@NotNull final ColumnSource valueSource, final boolean usePrev, - @NotNull final WritableChunk destination) { + @NotNull final WritableChunk destination) { int componentKeyIndicesPosition = 0; for (int ii = 0; ii < shareable.sameIndexRunLengths.size(); ++ii) { final int lengthFromThisIndex = shareable.sameIndexRunLengths.get(ii); final WritableLongChunk remappedComponentKeys = - shareable.componentKeyIndicesSlice.resetFromTypedChunk( - shareable.componentKeyIndices, componentKeyIndicesPosition, - lengthFromThisIndex); + shareable.componentKeyIndicesSlice.resetFromTypedChunk(shareable.componentKeyIndices, + componentKeyIndicesPosition, lengthFromThisIndex); try (final OrderedKeys componentOrderedKeys = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(remappedComponentKeys)) { + OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(remappedComponentKeys)) { if (usePrev) { - valueSource.fillPrevChunk(aggregatedFillContext, - destinationSlice.resetFromChunk(destination, - componentKeyIndicesPosition, lengthFromThisIndex), - componentOrderedKeys); + valueSource.fillPrevChunk(aggregatedFillContext, destinationSlice.resetFromChunk(destination, + componentKeyIndicesPosition, lengthFromThisIndex), componentOrderedKeys); } else { - valueSource.fillChunk(aggregatedFillContext, - destinationSlice.resetFromChunk(destination, - componentKeyIndicesPosition, lengthFromThisIndex), - componentOrderedKeys); + valueSource.fillChunk(aggregatedFillContext, destinationSlice.resetFromChunk(destination, + componentKeyIndicesPosition, lengthFromThisIndex), componentOrderedKeys); } } @@ -409,36 +395,31 @@ public final void close() { } @Override - public final FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return new UngroupedFillContext(aggregateColumnSource, chunkCapacity, sharedContext); } @Override public final void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { destination.setSize(orderedKeys.intSize()); if (destination.size() == 0) { return; } final UngroupedFillContext tc = (UngroupedFillContext) context; - tc.shareable.extractFillChunkInformation(aggregateColumnSource.indexSource, base, false, - orderedKeys); + tc.shareable.extractFillChunkInformation(aggregateColumnSource.indexSource, base, false, orderedKeys); tc.doFillChunk(aggregateColumnSource.aggregatedSource, false, destination); } @Override public final void fillPrevChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { destination.setSize(orderedKeys.intSize()); if (destination.size() == 0) { return; } final UngroupedFillContext tc = (UngroupedFillContext) context; - tc.shareable.extractFillChunkInformation(aggregateColumnSource.indexSource, getPrevBase(), - true, orderedKeys); + tc.shareable.extractFillChunkInformation(aggregateColumnSource.indexSource, getPrevBase(), true, orderedKeys); tc.doFillChunk(aggregateColumnSource.aggregatedSource, true, destination); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Attributes.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Attributes.java index 39d75fd1f29..e7b7b447948 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Attributes.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Attributes.java @@ -43,8 +43,8 @@ public interface OrderedKeyIndices extends KeyIndices { /** * The chunk contains index ranges. * - * These are to be represented as pairs of an inclusive start and an inclusive end in even and - * odd slots, respectively. + * These are to be represented as pairs of an inclusive start and an inclusive end in even and odd slots, + * respectively. */ public interface OrderedKeyRanges extends Keys { } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Chunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Chunk.java index 0d57ae0f78c..82ee9634925 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Chunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Chunk.java @@ -45,15 +45,12 @@ interface Visitor { } /** - * Make a new Chunk that represents either exactly the same view on the underlying data as this - * Chunk, or a subrange of that view. The view is defined as [0..size) (in the coordinate space - * of this Chunk). + * Make a new Chunk that represents either exactly the same view on the underlying data as this Chunk, or a subrange + * of that view. The view is defined as [0..size) (in the coordinate space of this Chunk). * * @param offset Offset of the new Chunk, relative to this Chunk. 0 ≤ offset ≤ this.size - * @param capacity Capacity and initial size of the new Chunk. 0 ≤ capacity ≤ this.size - - * {@code offset}. - * @return The new Chunk. A new Chunk will always be returned, even if the Chunks represent the - * same view. + * @param capacity Capacity and initial size of the new Chunk. 0 ≤ capacity ≤ this.size - {@code offset}. + * @return The new Chunk. A new Chunk will always be returned, even if the Chunks represent the same view. */ Chunk slice(int offset, int capacity); @@ -80,29 +77,27 @@ interface Visitor { /** *

    - * Copy a sub-range of this chunk to a {@link Buffer}. This is an optional method, as some chunk - * types do not have a corresponding buffer type. + * Copy a sub-range of this chunk to a {@link Buffer}. This is an optional method, as some chunk types do not have a + * corresponding buffer type. * *

    - * Implementations are free to copy data as efficiently as they may, and will use absolute - * rather than positional access where possible. To facilitate this pattern, {@code destOffset} - * is an absolute offset from position 0, rather than a relative offset from - * {@code destBuffer.position()}. + * Implementations are free to copy data as efficiently as they may, and will use absolute rather than positional + * access where possible. To facilitate this pattern, {@code destOffset} is an absolute offset from position 0, + * rather than a relative offset from {@code destBuffer.position()}. * *

    * - * {@code destBuffer}'s position may be modified, but will always be restored to its initial - * value upon successful return. + * {@code destBuffer}'s position may be modified, but will always be restored to its initial value upon successful + * return. * * @param srcOffset The offset into this chunk to start copying from * @param destBuffer The destination {@link Buffer} * @param destOffset The absolute offset into {@code destBuffer} to start copying to * @param length The number of elements to copy */ - default void copyToBuffer(int srcOffset, @NotNull Buffer destBuffer, int destOffset, - int length) { + default void copyToBuffer(int srcOffset, @NotNull Buffer destBuffer, int destOffset, int length) { throw new UnsupportedOperationException(); } @@ -120,7 +115,7 @@ default void checkChunkType(ChunkType expected) { final ChunkType actual = getChunkType(); if (actual != expected) { throw new IllegalArgumentException( - String.format("Expected chunk type '%s', but is '%s'.", expected, actual)); + String.format("Expected chunk type '%s', but is '%s'.", expected, actual)); } } @@ -176,17 +171,15 @@ default ObjectChunk asObjectChunk() { /** * Downcast the attribute. * - * When you know the data in this chunk which you plan to read is a more specific sub-type, you - * can downcast the attribute with this helper method. This might be necessary, for instance, - * when you have a KeyIndices chunk which you sort, and now want to treat it as an - * OrderedKeyIndices. + * When you know the data in this chunk which you plan to read is a more specific sub-type, you can downcast the + * attribute with this helper method. This might be necessary, for instance, when you have a KeyIndices chunk which + * you sort, and now want to treat it as an OrderedKeyIndices. * - * @apiNote Upcast should not be necessary on read-only chunks, as a read-only chunk method - * should accept an upper bound wildcard. + * @apiNote Upcast should not be necessary on read-only chunks, as a read-only chunk method should accept an upper + * bound wildcard. */ - static Chunk downcast( - Chunk self) { + static Chunk downcast(Chunk self) { // noinspection unchecked return (Chunk) self; } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkBase.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkBase.java index 3b5b1624699..723245750b6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkBase.java @@ -8,14 +8,14 @@ */ public abstract class ChunkBase implements Chunk { /** - * The Chunk's storage is the sub-range of the underlying array defined by [offset, offset + - * capacity). It is illegal to access the underlying array outside of this range. + * The Chunk's storage is the sub-range of the underlying array defined by [offset, offset + capacity). It is + * illegal to access the underlying array outside of this range. */ int offset; int capacity; /** - * Useful data data in the chunk is in the sub-range of the underlying array defined by [offset, - * offset + size). It is illegal to set size < 0 or size > capacity. + * Useful data data in the chunk is in the sub-range of the underlying array defined by [offset, offset + size). It + * is illegal to set size < 0 or size > capacity. */ int size; @@ -32,31 +32,30 @@ public final int size() { } /** - * DO NOT CALL THIS INTERNAL METHOD. If you want to set a size, call - * {@link WritableChunk#setSize}. That method is the only legal caller of this method in the - * entire system. + * DO NOT CALL THIS INTERNAL METHOD. If you want to set a size, call {@link WritableChunk#setSize}. That method is + * the only legal caller of this method in the entire system. */ public final void internalSetSize(int newSize, long password) { if (password != -7025656774858671822L) { throw new UnsupportedOperationException( - "DO NOT CALL THIS INTERNAL METHOD. Instead call WritableChunk.setSize()"); + "DO NOT CALL THIS INTERNAL METHOD. Instead call WritableChunk.setSize()"); } if (newSize < 0 || newSize > capacity) { throw new IllegalArgumentException( - String.format("size %d is incompatible with capacity %d", newSize, capacity)); + String.format("size %d is incompatible with capacity %d", newSize, capacity)); } this.size = newSize; } /** - * DO NOT CALL THIS INTERNAL METHOD. Call {@link WritableChunk#capacity()} That method is the - * only legal caller of this method in the entire system. + * DO NOT CALL THIS INTERNAL METHOD. Call {@link WritableChunk#capacity()} That method is the only legal caller of + * this method in the entire system. */ public final int internalCapacity(long password) { if (password != 1837055652467547514L) { throw new UnsupportedOperationException( - "DO NOT CALL THIS INTERNAL METHOD. Instead call WritableChunk.capacity()"); + "DO NOT CALL THIS INTERNAL METHOD. Instead call WritableChunk.capacity()"); } return capacity; } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkChunkBase.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkChunkBase.java index 519fbbad212..2615ae64ef5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkChunkBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkChunkBase.java @@ -8,14 +8,14 @@ */ public abstract class ChunkChunkBase implements ChunkChunk { /** - * The Chunk-of-Chunk's storage is the sub-range of the underlying array defined by [offset, - * offset + capacity). It is illegal to access the underlying array outside of this range. + * The Chunk-of-Chunk's storage is the sub-range of the underlying array defined by [offset, offset + capacity). It + * is illegal to access the underlying array outside of this range. */ int offset; int capacity; /** - * Useful data in the chunk-of-chunks is in the sub-range of the underlying array defined by - * [offset, offset + size). It is illegal to set size < 0 or size > capacity. + * Useful data in the chunk-of-chunks is in the sub-range of the underlying array defined by [offset, offset + + * size). It is illegal to set size < 0 or size > capacity. */ int size; diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkSource.java index 31838bb9848..d5511a37d6a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkSource.java @@ -8,8 +8,7 @@ * * @param the attribute describing what kind of chunks are produced by this source */ -public interface ChunkSource - extends FillContextMaker, GetContextMaker { +public interface ChunkSource extends FillContextMaker, GetContextMaker { ChunkSource[] ZERO_LENGTH_CHUNK_SOURCE_ARRAY = new ChunkSource[0]; @@ -25,17 +24,16 @@ public interface ChunkSource /** * Returns a chunk of data corresponding to the keys from the given {@link OrderedKeys}. * - * @param context A context containing all mutable/state related data used in retrieving the - * Chunk. In particular, the Context may be used to provide a Chunk data pool + * @param context A context containing all mutable/state related data used in retrieving the Chunk. In particular, + * the Context may be used to provide a Chunk data pool * @param orderedKeys An {@link OrderedKeys} representing the keys to be fetched * @return A chunk of data corresponding to the keys from the given {@link OrderedKeys} * * @apiNote *

    - * The returned chunk belongs to the ColumnSource and may be mutated as result of - * calling getChunk again under the same context or as a result of the column source - * itself being mutated. The callee is not supposed to keep references to the chunk - * beyond the scope of the call. + * The returned chunk belongs to the ColumnSource and may be mutated as result of calling getChunk again + * under the same context or as a result of the column source itself being mutated. The callee is not + * supposed to keep references to the chunk beyond the scope of the call. *

    *

    * Post-condition: The retrieved values start at position 0 in the chunk. @@ -47,22 +45,20 @@ public interface ChunkSource Chunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys); /** - * Same as {@link #getChunk(GetContext, OrderedKeys)}, except that you pass in the begin and - * last keys representing the begin and last (inclusive) keys of a single range rather than an - * {@link OrderedKeys}. Typically you want to call this only if you don't have an - * {@link OrderedKeys}, such as during an - * {@link OrderedKeys#forAllLongRanges(LongRangeConsumer)} call. In this case, it allows you to - * avoid creating an intermediary {@link OrderedKeys} object. + * Same as {@link #getChunk(GetContext, OrderedKeys)}, except that you pass in the begin and last keys representing + * the begin and last (inclusive) keys of a single range rather than an {@link OrderedKeys}. Typically you want to + * call this only if you don't have an {@link OrderedKeys}, such as during an + * {@link OrderedKeys#forAllLongRanges(LongRangeConsumer)} call. In this case, it allows you to avoid creating an + * intermediary {@link OrderedKeys} object. * - * @param context A context containing all mutable/state related data used in retrieving the - * Chunk. In particular, the Context may be used to provide a Chunk data pool + * @param context A context containing all mutable/state related data used in retrieving the Chunk. In particular, + * the Context may be used to provide a Chunk data pool * @param firstKey The beginning key (inclusive) of the range to fetch in the chunk * @param lastKey The last key (inclusive) of the range to fetch in the chunk * * @apiNote *

    - * [beginKey,lastKey] must be a range that exists in this ChunkSource. This is - * unchecked. + * [beginKey,lastKey] must be a range that exists in this ChunkSource. This is unchecked. *

    *

    * Post-condition: The retrieved values start at position 0 in the chunk. @@ -74,14 +70,11 @@ public interface ChunkSource Chunk getChunk(@NotNull GetContext context, long firstKey, long lastKey); /** - * Populates the given destination chunk with data corresponding to the keys from the given - * {@link OrderedKeys}. + * Populates the given destination chunk with data corresponding to the keys from the given {@link OrderedKeys}. * - * @param context A context containing all mutable/state related data used in retrieving the - * Chunk. - * @param destination The chunk to be populated according to {@code orderedKeys}. No assumptions - * shall be made about the size of the chunk shall be made. The chunk will be populated - * from position [0,orderedKeys.size()). + * @param context A context containing all mutable/state related data used in retrieving the Chunk. + * @param destination The chunk to be populated according to {@code orderedKeys}. No assumptions shall be made about + * the size of the chunk shall be made. The chunk will be populated from position [0,orderedKeys.size()). * @param orderedKeys An {@link OrderedKeys} representing the keys to be fetched * @apiNote *

    @@ -92,11 +85,10 @@ public interface ChunkSource *

    */ void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys); + @NotNull OrderedKeys orderedKeys); /** - * Marker interface for {@link Context}s that are used in - * {@link #getChunk(GetContext, OrderedKeys)}. + * Marker interface for {@link Context}s that are used in {@link #getChunk(GetContext, OrderedKeys)}. */ interface GetContext extends Context { } @@ -116,49 +108,41 @@ interface FillContext extends Context { interface WithPrev extends ChunkSource { WithPrev[] ZERO_LENGTH_CHUNK_SOURCE_WITH_PREV_ARRAY = new WithPrev[0]; - // TODO: Deprecate or remove getPrevChunk and fillPrevChunk if/when we do away with getPrev - // methods + // TODO: Deprecate or remove getPrevChunk and fillPrevChunk if/when we do away with getPrev methods /** - * Returns a chunk of previous data corresponding to the keys from the given - * {@link OrderedKeys}. + * Returns a chunk of previous data corresponding to the keys from the given {@link OrderedKeys}. * - * @param context A context containing all mutable/state related data used in retrieving the - * Chunk. In particular, the Context may be used to provide a Chunk data pool + * @param context A context containing all mutable/state related data used in retrieving the Chunk. In + * particular, the Context may be used to provide a Chunk data pool * @param orderedKeys An {@link OrderedKeys} representing the keys to be fetched * @return A chunk of data corresponding to the keys from the given {@link OrderedKeys} * @apiNote *

    - * The returned chunk belongs to the ColumnSource and may be mutated as result of - * calling getChunk again under the same context or as a result of the column - * source itself being mutated. The callee is not supposed to keep references to - * the chunk beyond the scope of the call. + * The returned chunk belongs to the ColumnSource and may be mutated as result of calling getChunk + * again under the same context or as a result of the column source itself being mutated. The callee is + * not supposed to keep references to the chunk beyond the scope of the call. *

    *

    * Post-condition: The retrieved values start at position 0 in the chunk. *

    * Post-condition: destination.size() will be equal to orderedKeys.size() */ - Chunk getPrevChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys); + Chunk getPrevChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys); /** - * Same as {@link #getPrevChunk(GetContext, OrderedKeys)}, except that you pass in the begin - * and last keys representing the begin and last (inclusive) keys of a single range rather - * than an {@link OrderedKeys}. + * Same as {@link #getPrevChunk(GetContext, OrderedKeys)}, except that you pass in the begin and last keys + * representing the begin and last (inclusive) keys of a single range rather than an {@link OrderedKeys}. */ - Chunk getPrevChunk(@NotNull GetContext context, long firstKey, - long lastKey); + Chunk getPrevChunk(@NotNull GetContext context, long firstKey, long lastKey); /** - * Populates the given destination chunk with data corresponding to the keys from the given - * {@link OrderedKeys}. + * Populates the given destination chunk with data corresponding to the keys from the given {@link OrderedKeys}. * - * @param context A context containing all mutable/state related data used in retrieving the - * Chunk. - * @param destination The chunk to be populated according to {@code orderedKeys}. No - * assumptions shall be made about the size of the chunk shall be made. The chunk - * will be populated from position [0,orderedKeys.size()). + * @param context A context containing all mutable/state related data used in retrieving the Chunk. + * @param destination The chunk to be populated according to {@code orderedKeys}. No assumptions shall be made + * about the size of the chunk shall be made. The chunk will be populated from position + * [0,orderedKeys.size()). * @param orderedKeys An {@link OrderedKeys} representing the keys to be fetched * @apiNote *

    @@ -168,8 +152,8 @@ Chunk getPrevChunk(@NotNull GetContext context, long firstKey, * Post-condition: destination.size() will be equal to orderedKeys.size() *

    */ - void fillPrevChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys); + void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys); /** * @return a chunk source which accesses the previous values. diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkStream.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkStream.java index 0635a3ade44..ddb8f939113 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkStream.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkStream.java @@ -8,8 +8,7 @@ import java.util.stream.LongStream; public class ChunkStream { - public static DoubleStream of(DoubleChunk chunk, int offset, - int capacity) { + public static DoubleStream of(DoubleChunk chunk, int offset, int capacity) { ChunkUtils.checkSliceArgs(chunk.size, offset, capacity); return Arrays.stream(chunk.data, chunk.offset + offset, chunk.offset + offset + capacity); } @@ -19,8 +18,7 @@ public static IntStream of(IntChunk chunk, int offset, return Arrays.stream(chunk.data, chunk.offset + offset, chunk.offset + offset + capacity); } - public static LongStream of(LongChunk chunk, int offset, - int capacity) { + public static LongStream of(LongChunk chunk, int offset, int capacity) { ChunkUtils.checkSliceArgs(chunk.size, offset, capacity); return Arrays.stream(chunk.data, chunk.offset + offset, chunk.offset + offset + capacity); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkType.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkType.java index e617caad669..46033c303b2 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkType.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ChunkType.java @@ -10,14 +10,13 @@ import java.util.function.IntFunction; public enum ChunkType implements ChunkFactory { - Boolean(new BooleanChunkFactory()), Char(new CharChunkFactory()), Byte( - new ByteChunkFactory()), Short(new ShortChunkFactory()), Int(new IntChunkFactory()), Long( - new LongChunkFactory()), Float(new FloatChunkFactory()), Double( - new DoubleChunkFactory()), Object(new ObjectChunkFactory()); + Boolean(new BooleanChunkFactory()), Char(new CharChunkFactory()), Byte(new ByteChunkFactory()), Short( + new ShortChunkFactory()), Int(new IntChunkFactory()), Long(new LongChunkFactory()), Float( + new FloatChunkFactory()), Double(new DoubleChunkFactory()), Object(new ObjectChunkFactory()); private static final SimpleTypeMap fromElementTypeMap = SimpleTypeMap.create( - ChunkType.Boolean, ChunkType.Char, ChunkType.Byte, ChunkType.Short, ChunkType.Int, - ChunkType.Long, ChunkType.Float, ChunkType.Double, ChunkType.Object); + ChunkType.Boolean, ChunkType.Char, ChunkType.Byte, ChunkType.Short, ChunkType.Int, + ChunkType.Long, ChunkType.Float, ChunkType.Double, ChunkType.Object); public static ChunkType fromElementType(Class elementType) { return fromElementTypeMap.get(elementType); @@ -74,22 +73,20 @@ public final ChunkChunk chunkChunkWrap(Chunk[] ar @NotNull @Override - public final ChunkChunk chunkChunkWrap(Chunk[] array, int offset, - int capacity) { + public final ChunkChunk chunkChunkWrap(Chunk[] array, int offset, int capacity) { return factory.chunkChunkWrap(array, offset, capacity); } @NotNull @Override - public final ChunkPage pageWrap(long beginRow, Object array, - long mask) { + public final ChunkPage pageWrap(long beginRow, Object array, long mask) { return factory.pageWrap(beginRow, array, mask); } @NotNull @Override - public final ChunkPage pageWrap(long beginRow, Object array, - int offset, int capacity, long mask) { + public final ChunkPage pageWrap(long beginRow, Object array, int offset, int capacity, + long mask) { return factory.pageWrap(beginRow, array, offset, capacity, mask); } @@ -131,15 +128,14 @@ public final WritableChunkChunk makeWritableChunkChunk( @NotNull @Override - public final WritableChunk writableChunkWrap(Object array, int offset, - int capacity) { + public final WritableChunk writableChunkWrap(Object array, int offset, int capacity) { return factory.writableChunkWrap(array, offset, capacity); } @NotNull @Override - public final WritableChunkChunk writableChunkChunkWrap( - WritableChunk[] array, int offset, int capacity) { + public final WritableChunkChunk writableChunkChunkWrap(WritableChunk[] array, + int offset, int capacity) { return factory.writableChunkChunkWrap(array, offset, capacity); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Context.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Context.java index 0b6dd8ad885..e49b52563be 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Context.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/Context.java @@ -3,13 +3,12 @@ import io.deephaven.util.SafeCloseable; /** - * Base interface for state/mutable data that needs to be kept over the course of an evaluation - * session for a Chunk Source, Functor or Sink. + * Base interface for state/mutable data that needs to be kept over the course of an evaluation session for a Chunk + * Source, Functor or Sink. */ public interface Context extends SafeCloseable { /** - * Release any resources associated with this context. The context should not be used - * afterwards. + * Release any resources associated with this context. The context should not be used afterwards. */ default void close() {} diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ContextWithChunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ContextWithChunk.java index e1e19554360..f14ee9664b1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ContextWithChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ContextWithChunk.java @@ -3,8 +3,7 @@ import io.deephaven.util.SafeCloseable; import org.jetbrains.annotations.NotNull; -public class ContextWithChunk - implements Context { +public class ContextWithChunk implements Context { private final CONTEXT context; private WritableChunk writableChunk; @@ -27,9 +26,8 @@ public void close() { /** * @return a {@link WritableChunk} which you can use for results * - * @apiNote the chunk is valid until the next call to this function, - * {@link #getResettableChunk()}, {@link #getWritableChunk(Context)}, - * {@link #getResettableChunk(Context)}, or + * @apiNote the chunk is valid until the next call to this function, {@link #getResettableChunk()}, + * {@link #getWritableChunk(Context)}, {@link #getResettableChunk(Context)}, or * {@link #resetChunkFromArray(Context, Object, int, int)} for this context. */ public WritableChunk getWritableChunk() { @@ -37,12 +35,11 @@ public WritableChunk getWritableChunk() { } /** - * @return a {@link ResettableChunk} chunk which you can use for results by calling one of its - * various reset methods. + * @return a {@link ResettableChunk} chunk which you can use for results by calling one of its various reset + * methods. * - * @apiNote the chunk is valid until the next call to this function, - * {@link #getWritableChunk()}, {@link #getWritableChunk(Context)}, - * {@link #getResettableChunk(Context)}, or + * @apiNote the chunk is valid until the next call to this function, {@link #getWritableChunk()}, + * {@link #getWritableChunk(Context)}, {@link #getResettableChunk(Context)}, or * {@link #resetChunkFromArray(Context, Object, int, int)} for this context. */ public ResettableChunk getResettableChunk() { @@ -83,16 +80,14 @@ public void ensureLength(final int length) { * * @return a {@link WritableChunk} which you can use for results. The size will be set to 0. * - * @apiNote the chunk is valid until the next call to this function, - * {@link #getWritableChunk()}, {@link #getResettableChunk()}, - * {@link #getResettableChunk(Context)}, or + * @apiNote the chunk is valid until the next call to this function, {@link #getWritableChunk()}, + * {@link #getResettableChunk()}, {@link #getResettableChunk(Context)}, or * {@link #resetChunkFromArray(Context, Object, int, int)} for this context. */ public static > WRITABLE_CHUNK getWritableChunk( - @NotNull Context context) { + @NotNull Context context) { // noinspection unchecked - WRITABLE_CHUNK writableChunk = - (WRITABLE_CHUNK) ((ContextWithChunk) context).getWritableChunk(); + WRITABLE_CHUNK writableChunk = (WRITABLE_CHUNK) ((ContextWithChunk) context).getWritableChunk(); writableChunk.setSize(0); return writableChunk; } @@ -100,20 +95,18 @@ public static > RESETTABLE_WRITABLE_CHUNK getResettableChunk( - @NotNull Context context) { + @NotNull Context context) { // noinspection unchecked - return (RESETTABLE_WRITABLE_CHUNK) ((ContextWithChunk) context) - .getResettableChunk(); + return (RESETTABLE_WRITABLE_CHUNK) ((ContextWithChunk) context).getResettableChunk(); } /** @@ -124,13 +117,12 @@ public static > CHUNK resetChunkFromArray( - @NotNull Context context, Object array, int offset, int length) { + @NotNull Context context, Object array, int offset, int length) { // noinspection unchecked ContextWithChunk getContext = (ContextWithChunk) context; @@ -149,27 +141,24 @@ public static > CHUNK res /** - * Checks if this chunk is the result of a call to {@link #getWritableChunk()} or - * {@link #getWritableChunk(Context)} with this context. This is primarily intended for testing - * and verification code. + * Checks if this chunk is the result of a call to {@link #getWritableChunk()} or {@link #getWritableChunk(Context)} + * with this context. This is primarily intended for testing and verification code. */ - public static boolean isMyWritableChunk(@NotNull Context context, - Chunk chunk) { + public static boolean isMyWritableChunk(@NotNull Context context, Chunk chunk) { // noinspection unchecked return chunk.isAlias(((ContextWithChunk) context).writableChunk); } /** * Checks if this chunk is the result of a call to {@link #getResettableChunk()} or - * {@link #getResettableChunk(Context)} with this context, followed by a some reset call, - * including the result of a call to {@link #resetChunkFromArray(Context, Object, int, int)}. - * This is primarily intended for testing and verification code. + * {@link #getResettableChunk(Context)} with this context, followed by a some reset call, including the result of a + * call to {@link #resetChunkFromArray(Context, Object, int, int)}. This is primarily intended for testing and + * verification code. */ - public static boolean isMyResettableChunk( - @NotNull Context context, Chunk chunk) { + public static boolean isMyResettableChunk(@NotNull Context context, + Chunk chunk) { // noinspection unchecked ContextWithChunk getContext = (ContextWithChunk) context; - return !chunk.isAlias(getContext.writableChunk) - && chunk.isAlias(getContext.resettableWritableChunk); + return !chunk.isAlias(getContext.writableChunk) && chunk.isAlias(getContext.resettableWritableChunk); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/DefaultChunkSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/DefaultChunkSource.java index bc3eb12c2f8..e2bfa46f437 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/DefaultChunkSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/DefaultChunkSource.java @@ -16,45 +16,39 @@ default GetContext makeGetContext(final int chunkCapacity, final SharedContext s } @Override - default FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + default FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return DEFAULT_FILL_INSTANCE; } @Override - default Chunk getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + default Chunk getChunk(@NotNull final GetContext context, @NotNull final OrderedKeys orderedKeys) { return getChunkByFilling(context, orderedKeys); } @Override - default Chunk getChunk(@NotNull final GetContext context, long firstKey, - long lastKey) { + default Chunk getChunk(@NotNull final GetContext context, long firstKey, long lastKey) { try (OrderedKeys orderedKeys = OrderedKeys.forRange(firstKey, lastKey)) { return getChunk(context, orderedKeys); } } @FinalDefault - default Chunk getChunkByFilling(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + default Chunk getChunkByFilling(@NotNull final GetContext context, @NotNull final OrderedKeys orderedKeys) { WritableChunk chunk = DefaultGetContext.getWritableChunk(context); fillChunk(DefaultGetContext.getFillContext(context), chunk, orderedKeys); return chunk; } - interface WithPrev - extends DefaultChunkSource, ChunkSource.WithPrev { + interface WithPrev extends DefaultChunkSource, ChunkSource.WithPrev { @Override default Chunk getPrevChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { return getPrevChunkByFilling(context, orderedKeys); } @Override - default Chunk getPrevChunk(@NotNull final GetContext context, long firstKey, - long lastKey) { + default Chunk getPrevChunk(@NotNull final GetContext context, long firstKey, long lastKey) { try (OrderedKeys orderedKeys = OrderedKeys.forRange(firstKey, lastKey)) { return getPrevChunk(context, orderedKeys); } @@ -62,7 +56,7 @@ default Chunk getPrevChunk(@NotNull final GetContext context, lo @FinalDefault default Chunk getPrevChunkByFilling(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { WritableChunk chunk = DefaultGetContext.getWritableChunk(context); fillPrevChunk(DefaultGetContext.getFillContext(context), chunk, orderedKeys); return chunk; @@ -79,21 +73,18 @@ public ChunkType getChunkType() { } @Override - public Chunk getChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys) { + public Chunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) { return chunkSource.getPrevChunk(context, orderedKeys); } @Override - public Chunk getChunk(@NotNull GetContext context, long firstKey, - long lastKey) { + public Chunk getChunk(@NotNull GetContext context, long firstKey, long lastKey) { return chunkSource.getPrevChunk(context, firstKey, lastKey); } @Override - public void fillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { chunkSource.fillPrevChunk(context, destination, orderedKeys); } @@ -121,21 +112,18 @@ public FillContext makeFillContext(int chunkCapacity) { } /** - * An alternative set of defaults which may typically be used by {@link ChunkSource}s which - * support a get method which only works for contiguous ranges. They should just implement - * {@link #getChunk(GetContext, long, long)}. + * An alternative set of defaults which may typically be used by {@link ChunkSource}s which support a get method + * which only works for contiguous ranges. They should just implement {@link #getChunk(GetContext, long, long)}. */ interface SupportsContiguousGet extends DefaultChunkSource { @Override default Chunk getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { - return orderedKeys.isContiguous() - ? getChunk(context, orderedKeys.firstKey(), orderedKeys.lastKey()) - : getChunkByFilling(context, orderedKeys); + @NotNull final OrderedKeys orderedKeys) { + return orderedKeys.isContiguous() ? getChunk(context, orderedKeys.firstKey(), orderedKeys.lastKey()) + : getChunkByFilling(context, orderedKeys); } @Override - Chunk getChunk(@NotNull final GetContext context, long firstKey, - long lastKey); + Chunk getChunk(@NotNull final GetContext context, long firstKey, long lastKey); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/DefaultGetContext.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/DefaultGetContext.java index 9fba9ebb489..24dcca9072b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/DefaultGetContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/DefaultGetContext.java @@ -2,18 +2,15 @@ import org.jetbrains.annotations.NotNull; -public class DefaultGetContext - extends ContextWithChunk implements ChunkSource.GetContext { +public class DefaultGetContext extends ContextWithChunk + implements ChunkSource.GetContext { - public DefaultGetContext(ChunkSource.FillContext fillContext, ChunkType chunkType, - int chunkCapacity) { + public DefaultGetContext(ChunkSource.FillContext fillContext, ChunkType chunkType, int chunkCapacity) { super(fillContext, chunkType, chunkCapacity); } - public DefaultGetContext(ChunkSource chunkSource, int chunkCapacity, - SharedContext sharedContext) { - super(chunkSource.makeFillContext(chunkCapacity, sharedContext), chunkSource.getChunkType(), - chunkCapacity); + public DefaultGetContext(ChunkSource chunkSource, int chunkCapacity, SharedContext sharedContext) { + super(chunkSource.makeFillContext(chunkCapacity, sharedContext), chunkSource.getChunkType(), chunkCapacity); } public ChunkSource.FillContext getFillContext() { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/FillContextMaker.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/FillContextMaker.java index 7bda37d1c00..1881374aee4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/FillContextMaker.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/FillContextMaker.java @@ -4,22 +4,20 @@ public interface FillContextMaker { /** - * Allocate a new {@link ChunkSource.FillContext} for filling chunks from this - * {@code FillContextMaker}, typically a {@code ChunkSource}. + * Allocate a new {@link ChunkSource.FillContext} for filling chunks from this {@code FillContextMaker}, typically a + * {@code ChunkSource}. * - * @param chunkCapacity The maximum size of any {@link WritableChunk} that will be filled with - * this context + * @param chunkCapacity The maximum size of any {@link WritableChunk} that will be filled with this context * @param sharedContext Shared store of intermediate results. * @return A context for use with fill operations */ ChunkSource.FillContext makeFillContext(int chunkCapacity, SharedContext sharedContext); /** - * Allocate a new {@link ChunkSource.FillContext} for filling chunks from this - * {@code FillContextMaker}, typically a {@link ChunkSource}, without a {@link SharedContext}. + * Allocate a new {@link ChunkSource.FillContext} for filling chunks from this {@code FillContextMaker}, typically a + * {@link ChunkSource}, without a {@link SharedContext}. * - * @param chunkCapacity The maximum size of any {@link WritableChunk} that will be filled with - * this context + * @param chunkCapacity The maximum size of any {@link WritableChunk} that will be filled with this context * @return A context for use with fill operations */ @FinalDefault diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/GetContextMaker.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/GetContextMaker.java index 0490d6c8f36..6b7f1de5090 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/GetContextMaker.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/GetContextMaker.java @@ -4,22 +4,20 @@ public interface GetContextMaker { /** - * Allocate a new {@link ChunkSource.GetContext} for retrieving chunks from this - * {@code GetContextMaker}, typically a {@code ChunkSource}. + * Allocate a new {@link ChunkSource.GetContext} for retrieving chunks from this {@code GetContextMaker}, typically + * a {@code ChunkSource}. * - * @param chunkCapacity The maximum size required for any {@link WritableChunk} allocated as - * part of the result. + * @param chunkCapacity The maximum size required for any {@link WritableChunk} allocated as part of the result. * @param sharedContext Shared store of intermediate results. * @return A context for use with get operations */ ChunkSource.GetContext makeGetContext(int chunkCapacity, SharedContext sharedContext); /** - * Allocate a new {@link ChunkSource.GetContext} for retrieving chunks from this - * {@code FillContextMaker}, typically a {@code ChunkSource} without a {@link SharedContext}. + * Allocate a new {@link ChunkSource.GetContext} for retrieving chunks from this {@code FillContextMaker}, typically + * a {@code ChunkSource} without a {@link SharedContext}. * - * @param chunkCapacity The maximum size required for any {@link WritableChunk} allocated as - * part of the result. + * @param chunkCapacity The maximum size required for any {@link WritableChunk} allocated as part of the result. * @return A context for use with get operations */ @FinalDefault diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/OrderedChunkUtils.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/OrderedChunkUtils.java index e31769ce049..292d60d8077 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/OrderedChunkUtils.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/OrderedChunkUtils.java @@ -13,8 +13,7 @@ public class OrderedChunkUtils { * @param value the value to find * @return The insertion point in {@code [0, chunk.size())}. */ - public static int findInChunk(final LongChunk chunk, - final long value) { + public static int findInChunk(final LongChunk chunk, final long value) { return findInChunk(chunk, value, 0, chunk.size()); } @@ -28,9 +27,9 @@ public static int findInChunk(final LongChunk chunk, * @return The insertion point in {@code [startOffset, endOffsetExclusive)}. */ public static int findInChunk(final LongChunk chunk, final long value, - final int startOffset, final int endOffsetExclusive) { + final int startOffset, final int endOffsetExclusive) { int retVal = Arrays.binarySearch(chunk.data, chunk.offset + startOffset, - chunk.offset + endOffsetExclusive, value); + chunk.offset + endOffsetExclusive, value); // Note that Arrays.binarySearch returns `-i + 1` if element not found. retVal = (retVal < 0) ? ~retVal : retVal; return Math.min(retVal - chunk.offset, chunk.size); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableChunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableChunk.java index af066165adb..e64435ff834 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableChunk.java @@ -3,14 +3,13 @@ import io.deephaven.db.v2.sources.chunk.Attributes.Any; /** - * {@link WritableChunk} that may have its backing storage reset to a slice of that belonging to - * another {@link WritableChunk} or a native array. + * {@link WritableChunk} that may have its backing storage reset to a slice of that belonging to another + * {@link WritableChunk} or a native array. */ public interface ResettableChunk extends Chunk { /** - * Reset the data and bounds of this chunk to a range or sub-range of the specified - * {@link WritableChunk}. + * Reset the data and bounds of this chunk to a range or sub-range of the specified {@link WritableChunk}. * * @param other The other {@link WritableChunk} * @param offset The offset into other @@ -18,8 +17,7 @@ public interface ResettableChunk extends Chunk { * * @return this */ - Chunk resetFromChunk(WritableChunk other, int offset, - int capacity); + Chunk resetFromChunk(WritableChunk other, int offset, int capacity); /** * Reset the data and bounds of this chunk to a range or sub-range of the specified array. diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableChunkChunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableChunkChunk.java index 7adeb5b088a..c085d152be9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableChunkChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableChunkChunk.java @@ -3,14 +3,13 @@ import io.deephaven.db.v2.sources.chunk.Attributes.Any; /** - * {@link ChunkChunk} that may have its backing storage reset to a slice of that belonging to - * another {@link ChunkChunk} or a native array. + * {@link ChunkChunk} that may have its backing storage reset to a slice of that belonging to another {@link ChunkChunk} + * or a native array. */ public interface ResettableChunkChunk extends ChunkChunk { /** - * Reset the data and bounds of this chunk to a range or sub-range of the specified - * {@link ChunkChunk}. + * Reset the data and bounds of this chunk to a range or sub-range of the specified {@link ChunkChunk}. * * @param other The other {@link ChunkChunk} * @param offset The offset into other diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableContext.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableContext.java index de55a168787..54de3d5707c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableContext.java @@ -1,8 +1,8 @@ package io.deephaven.db.v2.sources.chunk; /** - * Resettable {@link Context} interface, for contexts that must be reset between steps of an - * operation (e.g. when advancing to a new region, or a new chunk of ordered keys). + * Resettable {@link Context} interface, for contexts that must be reset between steps of an operation (e.g. when + * advancing to a new region, or a new chunk of ordered keys). */ public interface ResettableContext extends Context { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableReadOnlyChunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableReadOnlyChunk.java index 3239cc75cb6..af7243be1e0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableReadOnlyChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableReadOnlyChunk.java @@ -4,26 +4,22 @@ import io.deephaven.db.v2.sources.chunk.util.pools.PoolableChunk; /** - * {@link Chunk} that may have its backing storage reset to a slice of that belonging to another - * {@link Chunk} or a native array. + * {@link Chunk} that may have its backing storage reset to a slice of that belonging to another {@link Chunk} or a + * native array. */ -public interface ResettableReadOnlyChunk - extends ResettableChunk, PoolableChunk { +public interface ResettableReadOnlyChunk extends ResettableChunk, PoolableChunk { /** - * Reset the data and bounds of this chunk to a range or sub-range of the specified - * {@link Chunk}. + * Reset the data and bounds of this chunk to a range or sub-range of the specified {@link Chunk}. * * @param other The other {@link Chunk} * @param offset The offset into other * @param capacity The capacity this should have after reset */ - Chunk resetFromChunk(Chunk other, int offset, - int capacity); + Chunk resetFromChunk(Chunk other, int offset, int capacity); @Override - default Chunk resetFromChunk(WritableChunk other, - int offset, int capacity) { + default Chunk resetFromChunk(WritableChunk other, int offset, int capacity) { return resetFromChunk((Chunk) other, offset, capacity); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableWritableChunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableWritableChunk.java index 248db242538..7fd82d0a61a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableWritableChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableWritableChunk.java @@ -4,19 +4,17 @@ import io.deephaven.db.v2.sources.chunk.util.pools.PoolableChunk; /** - * {@link WritableChunk} that may have its backing storage reset to a slice of that belonging to - * another {@link WritableChunk} or a native array. + * {@link WritableChunk} that may have its backing storage reset to a slice of that belonging to another + * {@link WritableChunk} or a native array. */ public interface ResettableWritableChunk - extends ResettableChunk, WritableChunk, PoolableChunk { + extends ResettableChunk, WritableChunk, PoolableChunk { @Override - WritableChunk resetFromChunk(WritableChunk other, - int offset, int capacity); + WritableChunk resetFromChunk(WritableChunk other, int offset, int capacity); @Override - WritableChunk resetFromArray(Object array, int offset, - int capacity); + WritableChunk resetFromArray(Object array, int offset, int capacity); @Override WritableChunk resetFromArray(Object array); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableWritableChunkChunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableWritableChunkChunk.java index 0b742063f85..51413567502 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableWritableChunkChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/ResettableWritableChunkChunk.java @@ -3,14 +3,13 @@ import io.deephaven.db.v2.sources.chunk.Attributes.Any; /** - * {@link WritableChunkChunk} that may have its backing storage reset to a slice of that belonging - * to another {@link WritableChunkChunk} or a native array. + * {@link WritableChunkChunk} that may have its backing storage reset to a slice of that belonging to another + * {@link WritableChunkChunk} or a native array. */ public interface ResettableWritableChunkChunk extends WritableChunkChunk { /** - * Reset the data and bounds of this chunk to a range or sub-range of the specified - * {@link WritableChunkChunk}. + * Reset the data and bounds of this chunk to a range or sub-range of the specified {@link WritableChunkChunk}. * * @param other The other {@link WritableChunkChunk} * @param offset The offset into other diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/SharedContext.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/SharedContext.java index 7adf29822fe..057063ad982 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/SharedContext.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/SharedContext.java @@ -10,24 +10,22 @@ /** *

    - * {@link ResettableContext} used as a holder for other {@link ResettableContext}s that may be - * shared across components. + * {@link ResettableContext} used as a holder for other {@link ResettableContext}s that may be shared across components. * *

    - * This serves as a place to cache re-usable computations or resources, but must be {@link #reset()} - * for every step of an operation (usually a chunk of ordered keys). + * This serves as a place to cache re-usable computations or resources, but must be {@link #reset()} for every step of + * an operation (usually a chunk of ordered keys). * *

    - * For example, {@link io.deephaven.db.v2.sources.ReadOnlyRedirectedColumnSource}s that share the - * same {@link io.deephaven.db.v2.utils.RedirectionIndex} cache a chunk of redirections for the most - * recent chunk of ordered keys they have been handed. + * For example, {@link io.deephaven.db.v2.sources.ReadOnlyRedirectedColumnSource}s that share the same + * {@link io.deephaven.db.v2.utils.RedirectionIndex} cache a chunk of redirections for the most recent chunk of ordered + * keys they have been handed. * *

    - * It's important that "nested" usage follows the convention of creating a new instance and passing - * that instance to context creation methods. Said nested instance should be (or be attached to) an - * entry in the parent context, and reset/closed when said entry is. It should always be safe to - * skip nested SharedContext creation if all sources that may be using a given instance will be - * passed the same ordered keys. + * It's important that "nested" usage follows the convention of creating a new instance and passing that instance to + * context creation methods. Said nested instance should be (or be attached to) an entry in the parent context, and + * reset/closed when said entry is. It should always be safe to skip nested SharedContext creation if all sources that + * may be using a given instance will be passed the same ordered keys. */ public class SharedContext implements ResettableContext { @@ -46,15 +44,14 @@ protected SharedContext() { * @param The type of the context that should be associated with this key type */ @SuppressWarnings("unused") - // The VALUE_TYPE parameter is in fact used to produce a compile-time association between a key - // class and its associated value class + // The VALUE_TYPE parameter is in fact used to produce a compile-time association between a key class and its + // associated value class public interface Key { } /** - * Get or create the {@link ResettableContext} value for a {@link Key} key. If the value is - * computed, the result value will be associated with the {@code key} until the - * {@link SharedContext} is {@link #close()}ed. + * Get or create the {@link ResettableContext} value for a {@link Key} key. If the value is computed, the result + * value will be associated with the {@code key} until the {@link SharedContext} is {@link #close()}ed. * * @param key The key * @param valueFactory The value factory, to be invoked if {@code key} is not found within this @@ -62,15 +59,15 @@ public interface Key { * @return The value associated with {@code key}, possibly newly-created */ public final > V getOrCreate(final K key, - @NotNull final Supplier valueFactory) { + @NotNull final Supplier valueFactory) { // noinspection unchecked return (V) entries.computeIfAbsent(key, k -> valueFactory.get()); } /** *

    - * Reset implementation which invokes {@link ResettableContext#reset()} on all values registered - * via {@link #getOrCreate(Key, Supplier)}. + * Reset implementation which invokes {@link ResettableContext#reset()} on all values registered via + * {@link #getOrCreate(Key, Supplier)}. * *

    * Sub-classes should be sure to call {@code super.reset()}. @@ -104,11 +101,11 @@ public static SharedContext makeSharedContext() { } /** - * Abstract {@link Key} implementation for use when a simple Object reference coupled with - * sub-class identity can determine equality for sharing purposes. + * Abstract {@link Key} implementation for use when a simple Object reference coupled with sub-class identity can + * determine equality for sharing purposes. */ public static abstract class ExactReferenceSharingKey - implements Key { + implements Key { private final Object differentiator; @@ -131,8 +128,8 @@ public final boolean equals(final Object other) { @Override public final int hashCode() { return 31 - + 31 * getClass().hashCode() - + 31 * differentiator.hashCode(); + + 31 * getClass().hashCode() + + 31 * differentiator.hashCode(); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/WritableChunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/WritableChunk.java index c1b43fba821..5a80cad7ddf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/WritableChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/WritableChunk.java @@ -17,8 +17,7 @@ public interface WritableChunk extends Chunk, PoolableCh WritableChunk slice(int offset, int capacity); /** - * Fill a sub-range of this writable chunk with the appropriate Deephaven null value for the - * type. + * Fill a sub-range of this writable chunk with the appropriate Deephaven null value for the type. * * @param offset Starting offset * @param size Number of values to fill @@ -45,25 +44,24 @@ default void fillWithBoxedValue(int offset, int size, Object value) { /** *

    - * Fill a sub-range of this writable chunk with values from a {@link Buffer}. This is an - * optional method, as some chunk types do not have a corresponding buffer type. + * Fill a sub-range of this writable chunk with values from a {@link Buffer}. This is an optional method, as some + * chunk types do not have a corresponding buffer type. * *

    - * Implementations are free to copy data as efficiently as they may, and will use absolute - * rather than positional access where possible. To facilitate this pattern, {@code srcOffset} - * is an absolute offset from position 0, rather than a relative offset from - * {@code srcBuffer.position()}. + * Implementations are free to copy data as efficiently as they may, and will use absolute rather than positional + * access where possible. To facilitate this pattern, {@code srcOffset} is an absolute offset from position 0, + * rather than a relative offset from {@code srcBuffer.position()}. * *

    * - * {@code srcBuffer}'s position may be modified, but will always be restored to its initial - * value upon successful return. + * {@code srcBuffer}'s position may be modified, but will always be restored to its initial value upon successful + * return. * * @param srcBuffer The source buffer, which will be cast to the appropriate type for this chunk - * @param srcOffset The offset into {@code srcBuffer} (from position 0, not - * {@code srcBuffer.position()}) to start copying from + * @param srcOffset The offset into {@code srcBuffer} (from position 0, not {@code srcBuffer.position()}) + * to start copying from * @param destOffset The offset into this chunk to start copying to * @param length The number of elements to copy */ @@ -76,9 +74,8 @@ default void setSize(int newSize) { } /** - * DO NOT CALL THIS INTERNAL METHOD. If you want to set a size, call - * {@link WritableChunk#setSize}. That method is the only legal caller of this method in the - * entire system. + * DO NOT CALL THIS INTERNAL METHOD. If you want to set a size, call {@link WritableChunk#setSize}. That method is + * the only legal caller of this method in the entire system. */ void internalSetSize(int newSize, long password); @@ -87,8 +84,8 @@ default int capacity() { } /** - * DO NOT CALL THIS INTERNAL METHOD. Call {@link WritableChunk#capacity()} That method is the - * only legal caller of this method in the entire system. + * DO NOT CALL THIS INTERNAL METHOD. Call {@link WritableChunk#capacity()} That method is the only legal caller of + * this method in the entire system. */ int internalCapacity(long password); @@ -152,15 +149,14 @@ default WritableObjectChunk asWritableObjectChunk() { /** * Upcast the attribute. * - * When you know the data you will receive in this chunk from another source is a more specific - * suptype than the source provides, you can upcast the attribute with this helper method (such - * as reading KeyIndices from a ColumnSource which thinks they are just Values.) + * When you know the data you will receive in this chunk from another source is a more specific suptype than the + * source provides, you can upcast the attribute with this helper method (such as reading KeyIndices from a + * ColumnSource which thinks they are just Values.) * - * @apiNote Downcast should not be necessary on WritableChunks, as a WritableChunk filling - * method should accept an lower bound wildcard. + * @apiNote Downcast should not be necessary on WritableChunks, as a WritableChunk filling method should accept an + * lower bound wildcard. */ - static WritableChunk upcast( - WritableChunk self) { + static WritableChunk upcast(WritableChunk self) { // noinspection unchecked return (WritableChunk) self; } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/ChunkPage.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/ChunkPage.java index d46e3bb854b..7888263cc66 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/ChunkPage.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/ChunkPage.java @@ -7,9 +7,8 @@ import io.deephaven.util.annotations.FinalDefault; import org.jetbrains.annotations.NotNull; -public interface ChunkPage - extends Page.WithDefaults, Chunk, - DefaultChunkSource.SupportsContiguousGet { +public interface ChunkPage extends Page.WithDefaults, Chunk, + DefaultChunkSource.SupportsContiguousGet { @Override ChunkType getChunkType(); @@ -30,10 +29,9 @@ default long maxRow(final long row) { /** * @return The offset into the chunk for this row. - * @apiNote This function is for convenience over {@link #getRowOffset(long)}, so the caller - * doesn't have to cast to an int. - * @implNote This page is known to be a chunk, so {@link #size()} is an int, and so is the - * offset. + * @apiNote This function is for convenience over {@link #getRowOffset(long)}, so the caller doesn't have to cast to + * an int. + * @implNote This page is known to be a chunk, so {@link #size()} is an int, and so is the offset. */ @FinalDefault default int getChunkOffset(final long row) { @@ -42,8 +40,7 @@ default int getChunkOffset(final long row) { @FinalDefault @Override - default Chunk getChunk(@NotNull final GetContext context, final long firstKey, - final long lastKey) { + default Chunk getChunk(@NotNull final GetContext context, final long firstKey, final long lastKey) { return slice(getChunkOffset(firstKey), Math.toIntExact(lastKey - firstKey + 1)); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/Page.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/Page.java index 950e0651dca..06a46d51be0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/Page.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/Page.java @@ -11,34 +11,30 @@ import org.jetbrains.annotations.NotNull; /** - * This provides the {@link ChunkSource} interface to a contiguous block of data beginning at - * {@link #firstRowOffset()} and continuing to some row less than or equal to - * {@link #firstRowOffset()} + {@link #maxRow(long)}. + * This provides the {@link ChunkSource} interface to a contiguous block of data beginning at {@link #firstRowOffset()} + * and continuing to some row less than or equal to {@link #firstRowOffset()} + {@link #maxRow(long)}. *

    - * Non overlapping pages can be collected together in a {@link PageStore}, which provides the - * {@link ChunkSource} interface to the collection of all of its Pages. + * Non overlapping pages can be collected together in a {@link PageStore}, which provides the {@link ChunkSource} + * interface to the collection of all of its Pages. *

    - * There are two distinct use cases/types of pages. The first use case are {@code Page}s which - * always have a length() > 0. These store length() values, which can be assessed via the - * {@link ChunkSource} methods. Valid {@link OrderedKeys} passed to those methods will have their - * offset in the range [firstRowOffset(), firstRowOffset() + length()). Passing OrderKeys with - * offsets outside of this range will have undefined results. + * There are two distinct use cases/types of pages. The first use case are {@code Page}s which always have a length() > + * 0. These store length() values, which can be assessed via the {@link ChunkSource} methods. Valid {@link OrderedKeys} + * passed to those methods will have their offset in the range [firstRowOffset(), firstRowOffset() + length()). Passing + * OrderKeys with offsets outside of this range will have undefined results. *

    - * The second use case will always have length() == 0 and firstRowOffset() == 0. These represent - * "Null" regions which return a fixed value, typically a null value, for every {@link OrderedKeys} - * passed into the {@link ChunkSource} methods. In order to have this use case, override - * {@code length} and override {@code lastRow} as {@code maxRow}. + * The second use case will always have length() == 0 and firstRowOffset() == 0. These represent "Null" regions which + * return a fixed value, typically a null value, for every {@link OrderedKeys} passed into the {@link ChunkSource} + * methods. In order to have this use case, override {@code length} and override {@code lastRow} as {@code maxRow}. *

    - * Though the {@link ChunkSource} methods ignore the non-offset portion of the rows in the - * {@link OrderedKeys}, they can assume they are identical for all the passed in elements of the - * {@link OrderedKeys}. For instance, they can use the simple difference between the complete row - * value to determine a length. + * Though the {@link ChunkSource} methods ignore the non-offset portion of the rows in the {@link OrderedKeys}, they can + * assume they are identical for all the passed in elements of the {@link OrderedKeys}. For instance, they can use the + * simple difference between the complete row value to determine a length. */ public interface Page extends PagingChunkSource { /** - * @return the first row of this page, after applying the {@link #mask()}, which refers to the - * first row of this page. + * @return the first row of this page, after applying the {@link #mask()}, which refers to the first row of this + * page. */ long firstRowOffset(); @@ -67,51 +63,46 @@ interface WithDefaults extends Page, DefaultChunkSource< @Override @FinalDefault default void fillChunkAppend(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys.Iterator orderedKeysIterator) { - fillChunkAppend(context, destination, orderedKeysIterator - .getNextOrderedKeysThrough(maxRow(orderedKeysIterator.peekNextKey()))); + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys.Iterator orderedKeysIterator) { + fillChunkAppend(context, destination, + orderedKeysIterator.getNextOrderedKeysThrough(maxRow(orderedKeysIterator.peekNextKey()))); } @Override @FinalDefault default void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { destination.setSize(0); fillChunkAppend(context, destination, orderedKeys); } /** - * Appends the values referenced by {@code orderKeys} onto {@code destination}. - * {@code orderKeys} are assumed to be entirely contained on this {@code Page}. + * Appends the values referenced by {@code orderKeys} onto {@code destination}. {@code orderKeys} are assumed to + * be entirely contained on this {@code Page}. */ - void fillChunkAppend(@NotNull FillContext context, - @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys); + void fillChunkAppend(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys); } /** - * Helper defaults for pages that represent a repeating value, e.g. null or partitioning column - * regions. + * Helper defaults for pages that represent a repeating value, e.g. null or partitioning column regions. */ - interface WithDefaultsForRepeatingValues - extends Page, DefaultChunkSource { + interface WithDefaultsForRepeatingValues extends Page, DefaultChunkSource { @Override @FinalDefault default void fillChunkAppend(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys.Iterator orderedKeysIterator) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys.Iterator orderedKeysIterator) { fillChunkAppend(context, destination, LongSizedDataStructure.intSize("fillChunkAppend", - orderedKeysIterator - .advanceAndGetPositionDistance(maxRow(orderedKeysIterator.peekNextKey()) + 1))); + orderedKeysIterator.advanceAndGetPositionDistance(maxRow(orderedKeysIterator.peekNextKey()) + 1))); } @Override @FinalDefault default void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { destination.setSize(0); fillChunkAppend(context, destination, orderedKeys.intSize()); } @@ -119,13 +110,13 @@ default void fillChunk(@NotNull final FillContext context, /** * Appends the values repeating value {@code length} times to {@code destination}. */ - void fillChunkAppend(@NotNull FillContext context, - @NotNull WritableChunk destination, int length); + void fillChunkAppend(@NotNull FillContext context, @NotNull WritableChunk destination, + int length); } /** - * Assuming {@code orderedKeysIterator} is position at its first index key on this page, consume - * all keys on this page. + * Assuming {@code orderedKeysIterator} is position at its first index key on this page, consume all keys on this + * page. * * @param orderedKeysIterator The iterator to advance */ @@ -135,25 +126,21 @@ default void advanceToNextPage(@NotNull final OrderedKeys.Iterator orderedKeysIt } /** - * Assuming {@code orderedKeysIterator} is position at its first index key on this page, consume - * all keys on this page and return the number of keys consumed. + * Assuming {@code orderedKeysIterator} is position at its first index key on this page, consume all keys on this + * page and return the number of keys consumed. * * @param orderedKeysIterator The iterator to advance */ @FinalDefault - default long advanceToNextPageAndGetPositionDistance( - @NotNull final OrderedKeys.Iterator orderedKeysIterator) { - return orderedKeysIterator - .advanceAndGetPositionDistance(maxRow(orderedKeysIterator.peekNextKey()) + 1); + default long advanceToNextPageAndGetPositionDistance(@NotNull final OrderedKeys.Iterator orderedKeysIterator) { + return orderedKeysIterator.advanceAndGetPositionDistance(maxRow(orderedKeysIterator.peekNextKey()) + 1); } /** - * Assuming {@code searchIterator} is position at its first index key on this page, consume all - * keys on this page. + * Assuming {@code searchIterator} is position at its first index key on this page, consume all keys on this page. * * @param searchIterator The iterator to advance - * @return The result of - * {@link io.deephaven.db.v2.utils.ReadOnlyIndex.SearchIterator#advance(long)} + * @return The result of {@link io.deephaven.db.v2.utils.ReadOnlyIndex.SearchIterator#advance(long)} */ @FinalDefault default boolean advanceToNextPage(@NotNull final ReadOnlyIndex.SearchIterator searchIterator) { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/PageStore.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/PageStore.java index d0761c995fa..64db4082d35 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/PageStore.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/PageStore.java @@ -6,11 +6,11 @@ import org.jetbrains.annotations.NotNull; /** - * PageStores are a collection of non-overlapping pages, which provides a single {@link ChunkSource} - * interface across all the pages. + * PageStores are a collection of non-overlapping pages, which provides a single {@link ChunkSource} interface across + * all the pages. */ public interface PageStore> - extends PagingChunkSource, DefaultChunkSource.SupportsContiguousGet { + extends PagingChunkSource, DefaultChunkSource.SupportsContiguousGet { /** * @return The page containing row, after applying {@link #mask()}. @@ -19,8 +19,7 @@ public interface PageStore getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + default Chunk getChunk(@NotNull final GetContext context, @NotNull final OrderedKeys orderedKeys) { if (orderedKeys.size() == 0) { return getChunkType().getEmptyChunk(); } @@ -41,8 +40,7 @@ default Chunk getChunk(@NotNull final GetContext context, @Override @NotNull - default Chunk getChunk(@NotNull final GetContext context, final long firstKey, - final long lastKey) { + default Chunk getChunk(@NotNull final GetContext context, final long firstKey, final long lastKey) { final FillContext fillContext = DefaultGetContext.getFillContext(context); final PAGE page = getPageContaining(fillContext, firstKey); final long pageMaxRow = page.maxRow(firstKey); @@ -59,9 +57,8 @@ default Chunk getChunk(@NotNull final GetContext context, final } @Override - default void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + default void fillChunk(@NotNull final FillContext context, @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys) { if (orderedKeys.size() == 0) { return; } @@ -79,8 +76,8 @@ default void fillChunk(@NotNull final FillContext context, @Override default void fillChunkAppend(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys.Iterator orderedKeysIterator) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys.Iterator orderedKeysIterator) { long firstKey = orderedKeysIterator.peekNextKey(); final long pageStoreMaxKey = maxRow(firstKey); @@ -88,23 +85,21 @@ default void fillChunkAppend(@NotNull final FillContext context, final PAGE page = getPageContaining(context, firstKey); page.fillChunkAppend(context, destination, orderedKeysIterator); } while (orderedKeysIterator.hasMore() && - (firstKey = orderedKeysIterator.peekNextKey()) <= pageStoreMaxKey); + (firstKey = orderedKeysIterator.peekNextKey()) <= pageStoreMaxKey); } /** - * This is a helper which is the same as a call to {@link #fillChunkAppend}, except that some of - * the initial work has already been done for the first call to - * {@link Page#fillChunkAppend(FillContext, WritableChunk, OrderedKeys.Iterator)} which we don't - * want to repeat. + * This is a helper which is the same as a call to {@link #fillChunkAppend}, except that some of the initial work + * has already been done for the first call to + * {@link Page#fillChunkAppend(FillContext, WritableChunk, OrderedKeys.Iterator)} which we don't want to repeat. */ // Should be private @FinalDefault default void doFillChunkAppend(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys, @NotNull final Page page) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys orderedKeys, @NotNull final Page page) { destination.setSize(0); - try ( - final OrderedKeys.Iterator orderedKeysIterator = orderedKeys.getOrderedKeysIterator()) { + try (final OrderedKeys.Iterator orderedKeysIterator = orderedKeys.getOrderedKeysIterator()) { page.fillChunkAppend(context, destination, orderedKeysIterator); fillChunkAppend(context, destination, orderedKeysIterator); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/PagingChunkSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/PagingChunkSource.java index 062b511d075..21efb28b71a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/PagingChunkSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/page/PagingChunkSource.java @@ -8,18 +8,14 @@ /** * In order to be able to cache and reuse ChunkSources across multiple Tables (or other references), - * {@code PagingChunkSource} adds a mask to the ChunkSource, and supports some additional - * {@code fillChunk} methods. + * {@code PagingChunkSource} adds a mask to the ChunkSource, and supports some additional {@code fillChunk} methods. * - * The mask is a bitmask of the lower order bits of the keys in an OrderKeys, which specifies the - * bits from the {@link OrderedKeys} which will be used to uniquely specify the offsets into the - * ChunkSource elements on calls to + * The mask is a bitmask of the lower order bits of the keys in an OrderKeys, which specifies the bits from the + * {@link OrderedKeys} which will be used to uniquely specify the offsets into the ChunkSource elements on calls to * {@link ChunkSource#fillChunk(FillContext, WritableChunk, OrderedKeys)}, - * {@link ChunkSource#getChunk(GetContext, OrderedKeys)}, - * {@link ChunkSource#getChunk(GetContext, long, long)}. + * {@link ChunkSource#getChunk(GetContext, OrderedKeys)}, {@link ChunkSource#getChunk(GetContext, long, long)}. * - * Also, a new method - * {@link PagingChunkSource#fillChunkAppend(FillContext, WritableChunk, OrderedKeys.Iterator)} is + * Also, a new method {@link PagingChunkSource#fillChunkAppend(FillContext, WritableChunk, OrderedKeys.Iterator)} is * added, which supports doing a fillChunk incrementally across a series of pages. */ public interface PagingChunkSource extends ChunkSource { @@ -27,32 +23,31 @@ public interface PagingChunkSource extends ChunkSou /** * This mask is applied to {@link OrderedKeys} which are passed into * {@link #getChunk(ChunkSource.GetContext, OrderedKeys)} and - * {@link #fillChunk(ChunkSource.FillContext, WritableChunk, OrderedKeys)}. This allows the - * {@link PagingChunkSource PagingChunkSources} to be cached, and reused even if they are - * properly relocated in key space. + * {@link #fillChunk(ChunkSource.FillContext, WritableChunk, OrderedKeys)}. This allows the {@link PagingChunkSource + * PagingChunkSources} to be cached, and reused even if they are properly relocated in key space. * - * @return the mask for this page, which must be a bitmask representing the some number of lower - * order bits of a long. + * @return the mask for this page, which must be a bitmask representing the some number of lower order bits of a + * long. */ long mask(); /** *

    - * The {@code maxRow} is the greatest possible row which may reference this ChunkSource. This - * method is used by {@link #fillChunkAppend(FillContext, WritableChunk, OrderedKeys.Iterator)} - * to determine which of its {@code OrderedKeys} are referencing this {@code PagingChunkSource}. + * The {@code maxRow} is the greatest possible row which may reference this ChunkSource. This method is used by + * {@link #fillChunkAppend(FillContext, WritableChunk, OrderedKeys.Iterator)} to determine which of its + * {@code OrderedKeys} are referencing this {@code PagingChunkSource}. *

    * *

    - * The default implementation assumes that only one {@code PagingChunkSource} exits for each - * page reference. That is, there is only one {@code PagingChunkSource} for {@code OrderedKey}s - * with the same bits outside of {@link #mask()}. + * The default implementation assumes that only one {@code PagingChunkSource} exits for each page reference. That + * is, there is only one {@code PagingChunkSource} for {@code OrderedKey}s with the same bits outside of + * {@link #mask()}. *

    * *

    - * It is also possible to pack multiple, non-overlapping {@code PagingChunkSources} into the - * same page reference. In this case, one typically will want to override {@code maxRow}. An - * example such implementation is {@link ChunkPage}. + * It is also possible to pack multiple, non-overlapping {@code PagingChunkSources} into the same page reference. In + * this case, one typically will want to override {@code maxRow}. An example such implementation is + * {@link ChunkPage}. * * @param row Any row contained on this page. * @return the maximum last row of this page, located in the same way as row. @@ -63,31 +58,27 @@ default long maxRow(final long row) { /** *

    - * Similar to {@link #fillChunk(FillContext, WritableChunk, OrderedKeys)}, except that the - * values from the ChunkSource are appended to {@code destination}, rather than placed at the - * beginning. + * Similar to {@link #fillChunk(FillContext, WritableChunk, OrderedKeys)}, except that the values from the + * ChunkSource are appended to {@code destination}, rather than placed at the beginning. *

    * *

    - * The values to fill into {@code destination} are specified by {@code orderedKeysIterator}, - * whose {@link OrderedKeys.Iterator#firstKey()} must exist, and must be represented by this - * {@code PagingChunkSource} (modulo {#link @mask}), otherwise results are undefined. + * The values to fill into {@code destination} are specified by {@code orderedKeysIterator}, whose + * {@link OrderedKeys.Iterator#firstKey()} must exist, and must be represented by this {@code PagingChunkSource} + * (modulo {#link @mask}), otherwise results are undefined. *

    * *

    * No more than the elements in {@code orderedKeysIterator}, which are on the same page as - * {@link OrderedKeys.Iterator#firstKey()}, have their values appended to {@code destination}, - * and consumed from {@code orderedKeysIterator}. Keys are on the same page when the bits - * outside of {@link #mask()} are identical. + * {@link OrderedKeys.Iterator#firstKey()}, have their values appended to {@code destination}, and consumed from + * {@code orderedKeysIterator}. Keys are on the same page when the bits outside of {@link #mask()} are identical. * - * @param context A context containing all mutable/state related data used in retrieving the - * Chunk. In particular, the Context may be used to provide a Chunk data pool + * @param context A context containing all mutable/state related data used in retrieving the Chunk. In particular, + * the Context may be used to provide a Chunk data pool * @param destination The chunk to append the results to. - * @param orderedKeysIterator The iterator to the ordered keys, which contain at least the keys - * to extract from this {@code ChunkSource}. The keys to extract will be at the beginning - * of iteration order. + * @param orderedKeysIterator The iterator to the ordered keys, which contain at least the keys to extract from this + * {@code ChunkSource}. The keys to extract will be at the beginning of iteration order. */ - void fillChunkAppend(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys.Iterator orderedKeysIterator); + void fillChunkAppend(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys.Iterator orderedKeysIterator); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/sized/SizedChunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/sized/SizedChunk.java index 252cbd71e9b..da1c2a2641f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/sized/SizedChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/sized/SizedChunk.java @@ -56,8 +56,8 @@ public WritableChunk ensureCapacity(int capacity) { * * If the chunk has existing data, then it is copied to the new chunk. * - * If the underlying chunk already exists, then the size of the chunk is the original size. If - * the chunk did not exist, then the size of the returned chunk is zero. + * If the underlying chunk already exists, then the size of the chunk is the original size. If the chunk did not + * exist, then the size of the returned chunk is zero. * * @param capacity the minimum capacity for the chunk. * diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/SimpleTypeMap.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/SimpleTypeMap.java index 904bedcce0c..2ebe579ee95 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/SimpleTypeMap.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/SimpleTypeMap.java @@ -4,9 +4,8 @@ public final class SimpleTypeMap { - public static SimpleTypeMap create(V forBoolean, V forChar, V forByte, V forShort, - V forInt, V forLong, - V forFloat, V forDouble, V forObject) { + public static SimpleTypeMap create(V forBoolean, V forChar, V forByte, V forShort, V forInt, V forLong, + V forFloat, V forDouble, V forObject) { final HashMap, V> map = new HashMap<>(); map.put(boolean.class, forBoolean); map.put(char.class, forChar); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/chunkfillers/ChunkFiller.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/chunkfillers/ChunkFiller.java index 4ecf65546b0..13e5d6dacde 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/chunkfillers/ChunkFiller.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/chunkfillers/ChunkFiller.java @@ -39,21 +39,19 @@ static ChunkFiller fromChunkType(final ChunkType chunkType) { void fillByIndices(ElementSource src, OrderedKeys keys, WritableChunk dest); - void fillByIndices(ElementSource src, LongChunk chunk, - WritableChunk dest); + void fillByIndices(ElementSource src, LongChunk chunk, WritableChunk dest); void fillPrevByRanges(ElementSource src, OrderedKeys keys, WritableChunk dest); void fillPrevByIndices(ElementSource src, OrderedKeys keys, WritableChunk dest); void fillPrevByIndices(ElementSource src, LongChunk chunk, - WritableChunk dest); + WritableChunk dest); /** - * This doesn't really belong here but we are putting it here for now for implementation - * convenience. In the long run we may want to generalize this functionality, or, at the very - * least, move it to some "ColumnSourceFiller" class. + * This doesn't really belong here but we are putting it here for now for implementation convenience. In the long + * run we may want to generalize this functionality, or, at the very least, move it to some "ColumnSourceFiller" + * class. */ - void fillFromSingleValue(ElementSource src, long srcKey, WritableSource dest, - OrderedKeys destKeys); + void fillFromSingleValue(ElementSource src, long srcKey, WritableSource dest, OrderedKeys destKeys); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/factories/ChunkFactory.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/factories/ChunkFactory.java index 795029679bf..bf0edf555c4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/factories/ChunkFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/factories/ChunkFactory.java @@ -31,8 +31,7 @@ public interface ChunkFactory { ChunkChunk chunkChunkWrap(Chunk[] array); @NotNull - ChunkChunk chunkChunkWrap(Chunk[] array, int offset, - int capacity); + ChunkChunk chunkChunkWrap(Chunk[] array, int offset, int capacity); @NotNull DbArrayBase dbArrayWrap(Object array); @@ -44,8 +43,7 @@ ChunkChunk chunkChunkWrap(Chunk[] array, int offs ChunkPage pageWrap(long beginRow, Object array, long mask); @NotNull - ChunkPage pageWrap(long beginRow, Object array, int offset, - int capacity, long mask); + ChunkPage pageWrap(long beginRow, Object array, int offset, int capacity, long mask); @NotNull ResettableReadOnlyChunk makeResettableReadOnlyChunk(); @@ -60,12 +58,11 @@ ChunkPage pageWrap(long beginRow, Object array, int off WritableChunkChunk makeWritableChunkChunk(int capacity); @NotNull - WritableChunk writableChunkWrap(Object array, int offset, - int capacity); + WritableChunk writableChunkWrap(Object array, int offset, int capacity); @NotNull - WritableChunkChunk writableChunkChunkWrap(WritableChunk[] array, - int offset, int capacity); + WritableChunkChunk writableChunkChunkWrap(WritableChunk[] array, int offset, + int capacity); @NotNull ResettableWritableChunk makeResettableWritableChunk(); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPool.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPool.java index 36359e379dc..b3d53ab898d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPool.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPool.java @@ -13,12 +13,12 @@ public interface ChunkPool { /** - * Take a {@link WritableChunk} of at least the specified {@code capacity}. The result belongs - * to the caller until released. + * Take a {@link WritableChunk} of at least the specified {@code capacity}. The result belongs to the caller until + * released. * * @param capacity The minimum capacity for the result - * @return A {@link WritableChunk} of at least the specified {@code capacity} that belongs to - * the caller until released + * @return A {@link WritableChunk} of at least the specified {@code capacity} that belongs to the caller until + * released */ WritableChunk takeWritableChunk(int capacity); @@ -41,8 +41,7 @@ public interface ChunkPool { * * @param resettableChunk The chunk to give */ - void giveResettableChunk( - @NotNull ResettableReadOnlyChunk resettableChunk); + void giveResettableChunk(@NotNull ResettableReadOnlyChunk resettableChunk); /** * Take a {@link ResettableWritableChunk}. The result belongs to the caller until released. @@ -56,6 +55,5 @@ void giveResettableChunk( * * @param resettableWritableChunk The chunk to give */ - void giveResettableWritableChunk( - @NotNull ResettableWritableChunk resettableWritableChunk); + void giveResettableWritableChunk(@NotNull ResettableWritableChunk resettableWritableChunk); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPoolConstants.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPoolConstants.java index 185bd8254f2..061b020f187 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPoolConstants.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPoolConstants.java @@ -15,7 +15,7 @@ final class ChunkPoolConstants { @VisibleForTesting static final int LARGEST_POOLED_CHUNK_LOG2_CAPACITY = 16; static final int NUM_POOLED_CHUNK_CAPACITIES = - LARGEST_POOLED_CHUNK_LOG2_CAPACITY - SMALLEST_POOLED_CHUNK_LOG2_CAPACITY + 1; + LARGEST_POOLED_CHUNK_LOG2_CAPACITY - SMALLEST_POOLED_CHUNK_LOG2_CAPACITY + 1; static int checkCapacityBounds(final int chunkCapacity) { return Require.geqZero(chunkCapacity, "chunkCapacity"); @@ -32,7 +32,7 @@ static int getPoolIndexForTake(final int minimumChunkCapacity) { return 0; } final int roundedChunkLog2Capacity = - Math.max(MathUtil.ceilLog2(minimumChunkCapacity), SMALLEST_POOLED_CHUNK_LOG2_CAPACITY); + Math.max(MathUtil.ceilLog2(minimumChunkCapacity), SMALLEST_POOLED_CHUNK_LOG2_CAPACITY); if (roundedChunkLog2Capacity > LARGEST_POOLED_CHUNK_LOG2_CAPACITY) { return -1; } @@ -51,7 +51,7 @@ static int getPoolIndexForGive(final int actualChunkCapacity) { } final int chunkLog2Capacity = MathUtil.ceilLog2(actualChunkCapacity); if (chunkLog2Capacity < SMALLEST_POOLED_CHUNK_LOG2_CAPACITY - || chunkLog2Capacity > LARGEST_POOLED_CHUNK_LOG2_CAPACITY) { + || chunkLog2Capacity > LARGEST_POOLED_CHUNK_LOG2_CAPACITY) { return -1; } return getChunkLog2CapacityOffset(chunkLog2Capacity); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPoolReleaseTracking.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPoolReleaseTracking.java index c2a9dcf1113..935c2dc557b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPoolReleaseTracking.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/ChunkPoolReleaseTracking.java @@ -28,16 +28,13 @@ private static void enable(final ReleaseTracker.Factory factory, boolean preChec return; } if (!factory.isMyType(releaseTracker.getClass())) { - throw new IllegalStateException( - "Can't enable to a different tracking type (strict versus not)"); + throw new IllegalStateException("Can't enable to a different tracking type (strict versus not)"); } if (preCheck) { try { releaseTracker.check(); - } catch (ReleaseTracker.LeakedException - | ReleaseTracker.MissedReleaseException checkException) { - throw new IllegalStateException("Release tracker had errors on enable", - checkException); + } catch (ReleaseTracker.LeakedException | ReleaseTracker.MissedReleaseException checkException) { + throw new IllegalStateException("Release tracker had errors on enable", checkException); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/MultiChunkPool.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/MultiChunkPool.java index 6787fdc6f17..d4d5153109d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/MultiChunkPool.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/MultiChunkPool.java @@ -8,14 +8,13 @@ import java.util.Map; /** - * Provides a set of per-type {@link ChunkPool}s. Normally accessed via a {@link ThreadLocal}, to - * allow some threads to share a common pool and others to allocate their own. + * Provides a set of per-type {@link ChunkPool}s. Normally accessed via a {@link ThreadLocal}, to allow some threads to + * share a common pool and others to allocate their own. */ public final class MultiChunkPool { private static final MultiChunkPool SHARED_POOL = new MultiChunkPool(); - private static final ThreadLocal POOL_THREAD_LOCAL = - ThreadLocal.withInitial(() -> SHARED_POOL); + private static final ThreadLocal POOL_THREAD_LOCAL = ThreadLocal.withInitial(() -> SHARED_POOL); public static void enableDedicatedPoolForThisThread() { if (POOL_THREAD_LOCAL.get() == SHARED_POOL) { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/PoolableChunk.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/PoolableChunk.java index 43ca608589b..91d2baa3bb8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/PoolableChunk.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunk/util/pools/PoolableChunk.java @@ -4,8 +4,8 @@ import io.deephaven.util.SafeCloseable; /** - * Marker interface for {@link Chunk} subclasses that can be kept with in a {@link ChunkPool}, and - * whose {@link #close()} method will return them to the appropriate pool. + * Marker interface for {@link Chunk} subclasses that can be kept with in a {@link ChunkPool}, and whose + * {@link #close()} method will return them to the appropriate pool. */ public interface PoolableChunk extends SafeCloseable { } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/chunkcolumnsource/ChunkColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/chunkcolumnsource/ChunkColumnSource.java index e9215d7f8fa..24ffd6d7423 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/chunkcolumnsource/ChunkColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/chunkcolumnsource/ChunkColumnSource.java @@ -31,12 +31,10 @@ static ChunkColumnSource make(ChunkType chunkType, Class dataType) { * * @param chunkType the type of chunk * @param dataType the datatype for the newly created column source - * @param componentType the component type for the newly created column source (only applies to - * Objects) + * @param componentType the component type for the newly created column source (only applies to Objects) * @return an empty ChunkColumnSource */ - static ChunkColumnSource make(ChunkType chunkType, Class dataType, - Class componentType) { + static ChunkColumnSource make(ChunkType chunkType, Class dataType, Class componentType) { switch (chunkType) { case Char: return new CharChunkColumnSource(); @@ -55,8 +53,7 @@ static ChunkColumnSource make(ChunkType chunkType, Class dataType, case Object: return new ObjectChunkColumnSource<>(dataType, componentType); default: - throw new IllegalArgumentException( - "Can not make ChunkColumnSource of type " + chunkType); + throw new IllegalArgumentException("Can not make ChunkColumnSource of type " + chunkType); } } @@ -65,12 +62,11 @@ static ChunkColumnSource make(ChunkType chunkType, Class dataType, * * @param chunkType the type of chunk * @param dataType the datatype for the newly created column source - * @param sharedOffsetForData an array list representing the shared offsets for data across - * several ChunkColumnSources + * @param sharedOffsetForData an array list representing the shared offsets for data across several + * ChunkColumnSources * @return an empty ChunkColumnSource */ - static ChunkColumnSource make(ChunkType chunkType, Class dataType, - TLongArrayList sharedOffsetForData) { + static ChunkColumnSource make(ChunkType chunkType, Class dataType, TLongArrayList sharedOffsetForData) { return make(chunkType, dataType, null, sharedOffsetForData); } @@ -79,14 +75,13 @@ static ChunkColumnSource make(ChunkType chunkType, Class dataType, * * @param chunkType the type of chunk * @param dataType the datatype for the newly created column source - * @param componentType the component type for the newly created column source (only applies to - * Objects) - * @param sharedOffsetForData an array list representing the shared offsets for data across - * several ChunkColumnSources + * @param componentType the component type for the newly created column source (only applies to Objects) + * @param sharedOffsetForData an array list representing the shared offsets for data across several + * ChunkColumnSources * @return an empty ChunkColumnSource */ static ChunkColumnSource make(ChunkType chunkType, Class dataType, Class componentType, - TLongArrayList sharedOffsetForData) { + TLongArrayList sharedOffsetForData) { switch (chunkType) { case Char: return new CharChunkColumnSource(sharedOffsetForData); @@ -105,8 +100,7 @@ static ChunkColumnSource make(ChunkType chunkType, Class dataType, Class(dataType, componentType, sharedOffsetForData); default: - throw new IllegalArgumentException( - "Can not make ChunkColumnSource of type " + chunkType); + throw new IllegalArgumentException("Can not make ChunkColumnSource of type " + chunkType); } } @@ -122,8 +116,8 @@ static ChunkColumnSource make(ChunkType chunkType, Class dataType, Class - * Clear will discard the currently held chunks. This should not be called if a table will - * continue to reference the column source; as it violates the immutability contract. + * Clear will discard the currently held chunks. This should not be called if a table will continue to reference the + * column source; as it violates the immutability contract. */ void clear(); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/ChunkAdapter.java b/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/ChunkAdapter.java index 6a8aed10d72..cc0b8eb0328 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/ChunkAdapter.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/ChunkAdapter.java @@ -7,10 +7,10 @@ public class ChunkAdapter { public static ChunkAdapter create(Class type, final WritableChunkSink baseline, - final WritableChunkSink delta) { + final WritableChunkSink delta) { // noinspection unchecked return type == Boolean.class ? (ChunkAdapter) new BooleanChunkAdapter(baseline, delta) - : new ChunkAdapter<>(baseline, delta); + : new ChunkAdapter<>(baseline, delta); } /** @@ -18,8 +18,8 @@ public static ChunkAdapter create(Class type, final WritableChunkSink bas */ private final WritableChunkSink baseline; /** - * A copy of DeltaAwareColumnSource.delta, kept here for convenience, and updated when the - * corresponding delta changes. + * A copy of DeltaAwareColumnSource.delta, kept here for convenience, and updated when the corresponding delta + * changes. */ private WritableChunkSink delta; /** @@ -31,8 +31,7 @@ public static ChunkAdapter create(Class type, final WritableChunkSink bas */ private ChunkSource.FillContext deltaContext; /** - * A context suitable for getting filling the delta from a context. Updated when the - * corresponding delta changes. + * A context suitable for getting filling the delta from a context. Updated when the corresponding delta changes. */ private WritableChunkSink.FillFromContext deltaFillFromContext; /** diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/ChunkMerger.java b/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/ChunkMerger.java index 311817b3634..b6b71954af2 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/ChunkMerger.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/ChunkMerger.java @@ -8,9 +8,8 @@ class ChunkMerger { // Copy the data back into the positions where it needs to go. - static void merge(Chunk bChunk, Chunk dChunk, - OrderedKeys bKeys, - OrderedKeys dKeys, WritableChunk dest) { + static void merge(Chunk bChunk, Chunk dChunk, OrderedKeys bKeys, + OrderedKeys dKeys, WritableChunk dest) { final ChunkMerger bMerger = new ChunkMerger<>(bChunk, bKeys); final ChunkMerger dMerger = new ChunkMerger<>(dChunk, dKeys); @@ -40,14 +39,13 @@ private ChunkMerger(Chunk src, OrderedKeys keys) { } /** - * @return New destOffset. If the new offset is the same as the input parameter, then I did no - * work. + * @return New destOffset. If the new offset is the same as the input parameter, then I did no work. */ private int copyIfYouCan(WritableChunk dest, int destOffset, ChunkMerger other) { int contiguousSize = 0; - final long otherFirst = other.keyOffset == other.keyRanges.size() ? Long.MAX_VALUE - : other.keyRanges.get(other.keyOffset); + final long otherFirst = + other.keyOffset == other.keyRanges.size() ? Long.MAX_VALUE : other.keyRanges.get(other.keyOffset); while (true) { if (keyOffset == keyRanges.size()) { @@ -57,8 +55,7 @@ private int copyIfYouCan(WritableChunk dest, int destOffset, Chunk final long rangeFirst = keyRanges.get(keyOffset); final long rangeLast = keyRanges.get(keyOffset + 1); if (rangeFirst > otherFirst) { - // Get out because both (myself and my other) have keys but next smallest key is not - // mine + // Get out because both (myself and my other) have keys but next smallest key is not mine break; } contiguousSize += rangeLast - rangeFirst + 1; diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/DeltaAwareColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/DeltaAwareColumnSource.java index 9e0201922e3..3ea45cd600a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/DeltaAwareColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/DeltaAwareColumnSource.java @@ -54,8 +54,7 @@ // We calculate orderedKeys - deltaRows, representing the baseline keys in the baseline space. // baselineKeysBs = {0, 4, 10, 14} // -// We also calculate intersection(orderedKeys, deltaRows), representing the delta keys in the -// _baseline_ space. +// We also calculate intersection(orderedKeys, deltaRows), representing the delta keys in the _baseline_ space. // deltaKeysBS = {5, 9, 15} // // We translate the above using invert, representing the delta keys in the _delta_ space. @@ -68,14 +67,10 @@ // baselineChunk = {b0, b4, b10, b14} // deltaChunk = {d5, d9, d15} // -// To get these values back in the right spot, we analyze baselineKeysBs and deltaKeysBS. We take -// advantage of the -// property that these two sets do not intersect; furthermore, that their union is the original -// index requested by -// the caller. To do this, we simply count the number of consecutive (not necessarily adjacent) -// baseline items not -// interrupted by delta; and likewise the number of consecutive (not necessarily adjacent) delta -// items not +// To get these values back in the right spot, we analyze baselineKeysBs and deltaKeysBS. We take advantage of the +// property that these two sets do not intersect; furthermore, that their union is the original index requested by +// the caller. To do this, we simply count the number of consecutive (not necessarily adjacent) baseline items not +// interrupted by delta; and likewise the number of consecutive (not necessarily adjacent) delta items not // interrupted by baseline. In our example: // // We need the first two items from the baseline chunk: b0, b4 @@ -84,7 +79,7 @@ // Then the final item from the delta chunk: d15 public final class DeltaAwareColumnSource extends AbstractColumnSource - implements WritableSource, WritableChunkSink { + implements WritableSource, WritableChunkSink { /** * The initial size of the delta column source. */ @@ -104,15 +99,13 @@ private interface CapacityEnsurer { } /** - * A lambda that ensures the capacity of the baseline data structure. (We have this because the - * WritableChunkSink does not have an 'ensureCapacity', but the underlying data structure we use - * does). + * A lambda that ensures the capacity of the baseline data structure. (We have this because the WritableChunkSink + * does not have an 'ensureCapacity', but the underlying data structure we use does). */ private final CapacityEnsurer baselineCapacityEnsurer; /** - * A lambda that ensures the capacity of the delta data structure. (We have this because the - * WritableChunkSink does not have an 'ensureCapacity', but the underlying data structure we use - * does). + * A lambda that ensures the capacity of the delta data structure. (We have this because the WritableChunkSink does + * not have an 'ensureCapacity', but the underlying data structure we use does). */ private CapacityEnsurer deltaCapacityEnsurer; /** @@ -124,20 +117,19 @@ private interface CapacityEnsurer { */ private int deltaCapacity; /** - * The used delta keys (in the 'baseline' coordinate space). Null until - * startTrackingPrevValues() is called. This field is volatile because we want concurrent - * lockfree getters to see correct values from "get()" even though we might be in the middle of - * commitValues(). + * The used delta keys (in the 'baseline' coordinate space). Null until startTrackingPrevValues() is called. This + * field is volatile because we want concurrent lockfree getters to see correct values from "get()" even though we + * might be in the middle of commitValues(). */ private volatile Index deltaRows; /** - * The maximum key inserted into deltaRows during this phase. We use this to make sure that keys - * are not inserted out of numerical order. + * The maximum key inserted into deltaRows during this phase. We use this to make sure that keys are not inserted + * out of numerical order. */ private long maxKey; /** - * Adapter (not especially efficient) for turning single-element accesses into Chunk accesses. - * ThreadLocal because multiple threads might be concurrently calling various get() methods. + * Adapter (not especially efficient) for turning single-element accesses into Chunk accesses. ThreadLocal because + * multiple threads might be concurrently calling various get() methods. */ private ThreadLocal> chunkAdapter; /** @@ -148,7 +140,7 @@ private interface CapacityEnsurer { public DeltaAwareColumnSource(Class type) { super(type); final SparseArrayColumnSource sparseBaseline = - SparseArrayColumnSource.getSparseMemoryColumnSource(getType(), null); + SparseArrayColumnSource.getSparseMemoryColumnSource(getType(), null); baseline = sparseBaseline; delta = baseline; @@ -159,34 +151,27 @@ public DeltaAwareColumnSource(Class type) { deltaCapacity = 0; deltaRows = null; - chunkAdapter = - ThreadLocal.withInitial(() -> ChunkAdapter.create(getType(), baseline, delta)); + chunkAdapter = ThreadLocal.withInitial(() -> ChunkAdapter.create(getType(), baseline, delta)); updateCommitter = null; } // ================================================================================================================== // CONTEXT METHODS // - // We have lots of different ways of fetching elements, and therefore lots of different 'fetch' - // methods. Furthermore, - // because each type of 'fetch' method needs a getContext method customized to it, we would in - // principle need one - // 'getContext' method for each kind of 'fetch' method. In practice, because certain 'fetch' - // methods share the same + // We have lots of different ways of fetching elements, and therefore lots of different 'fetch' methods. + // Furthermore, + // because each type of 'fetch' method needs a getContext method customized to it, we would in principle need one + // 'getContext' method for each kind of 'fetch' method. In practice, because certain 'fetch' methods share the same // Context, we can get away with fewer. // - // Breaking it town, there are twelve kinds of 'fetching' that one might want, represented as - // points in this + // Breaking it town, there are twelve kinds of 'fetching' that one might want, represented as points in this // three-dimensional space: // 1. Will you be doing get or fill? // 2. Will you be accessing baseline (aka prev), delta, or current? - // 3. FUTURE WORK: Will you be specifying all your keys up up front and slurping them - // sequentially (call this - // "sequential access") or will you be specifying OrderedKeys at every get call (call this - // "random access") + // 3. FUTURE WORK: Will you be specifying all your keys up up front and slurping them sequentially (call this + // "sequential access") or will you be specifying OrderedKeys at every get call (call this "random access") // - // Because #3 is future work we only have six types of "fetch" calls we care about, denoted - // compactly like this: + // Because #3 is future work we only have six types of "fetch" calls we care about, denoted compactly like this: // {get, fill} x {prev, delta, current}. // // These are their names @@ -197,8 +182,7 @@ public DeltaAwareColumnSource(Class type) { // {get, current}: getChunk // {fill, current}: fillChunk // - // To reduce the number of getContext methods, we group the above into triplets so we only need - // to provide two + // To reduce the number of getContext methods, we group the above into triplets so we only need to provide two // GetContext methods. The groupings and their names are: // {get} x {baseline, delta, current}: makeGetContext(int) // {fill} x {baseline, delta, current: makeFillContext(int) @@ -228,33 +212,29 @@ public FillContext makeFillContext(final int chunkSize, final SharedContext shar @Override public Chunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) { - // TODO: this can probably use the defaultChunkSource.defaultGetChunk and avoid this cast - // with a refactoring. + // TODO: this can probably use the defaultChunkSource.defaultGetChunk and avoid this cast with a refactoring. // noinspection unchecked return (Chunk) getOrFillChunk((DAContext) context, null, orderedKeys); } @Override public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk dest, - @NotNull OrderedKeys orderedKeys) { + @NotNull OrderedKeys orderedKeys) { // Ignore return type. getOrFillChunk((DAContext) context, dest, orderedKeys); } /** - * This method encapsulates some shared logic for the 'get' and 'fill' paths. If you pass in - * {@code dest} = null, we assume you are doing a 'get'. Otherwise (if {@code dest} is not - * null), we assume you are doing a 'fill'. + * This method encapsulates some shared logic for the 'get' and 'fill' paths. If you pass in {@code dest} = null, we + * assume you are doing a 'get'. Otherwise (if {@code dest} is not null), we assume you are doing a 'fill'. * * @param context The context. - * @param optionalDest Null if you are doing a get, or destination chunk if you are doing a - * fill. + * @param optionalDest Null if you are doing a get, or destination chunk if you are doing a fill. * @param orderedKeys Keys to get. * @return The chunk if you are doing a get, or {@code dest} if you are doing a fill. */ - private Chunk getOrFillChunk(@NotNull DAContext context, - WritableChunk optionalDest, - @NotNull OrderedKeys orderedKeys) { + private Chunk getOrFillChunk(@NotNull DAContext context, WritableChunk optionalDest, + @NotNull OrderedKeys orderedKeys) { // Do the volatile read once final Index dRows = deltaRows; // Optimization if we're not tracking prev or if there are no deltas. @@ -263,8 +243,7 @@ private Chunk getOrFillChunk(@NotNull DAContext context, } // baselineKeysBS: (orderedKeys - deltaRows): baseline keys in the baseline coordinate space - // deltaKeysBS: (orderedKeys intersect deltaRows) delta keys, also in the baseline - // coordinate space + // deltaKeysBS: (orderedKeys intersect deltaRows) delta keys, also in the baseline coordinate space // deltaKeysDS: the above, translated to the delta coordinate space final Index[] splitResult = new Index[2]; splitKeys(orderedKeys, dRows, splitResult); @@ -285,22 +264,18 @@ private Chunk getOrFillChunk(@NotNull DAContext context, // Always use "get" to pull in the baseline and delta pieces final Chunk bChunk = baseline.getChunk(context.baseline.getContext, baselineKeysBS); final Chunk dChunk = delta.getChunk(context.delta.getContext, deltaKeysDS); - // Merge them into either the user-provided chunk, or our own preallocated chunk. Note that - // 'destToUse' will - // always be non-null. This is because if we arrived here from fillChunk(), then - // optionalDest will be non-null. - // Otherwise (if we arrived here from getChunk()), then optionalDest will be null, but - // context.optionalChunk + // Merge them into either the user-provided chunk, or our own preallocated chunk. Note that 'destToUse' will + // always be non-null. This is because if we arrived here from fillChunk(), then optionalDest will be non-null. + // Otherwise (if we arrived here from getChunk()), then optionalDest will be null, but context.optionalChunk // will be non-null (having been created through makeGetContext()). - final WritableChunk destToUse = - optionalDest != null ? optionalDest : context.optionalChunk; + final WritableChunk destToUse = optionalDest != null ? optionalDest : context.optionalChunk; ChunkMerger.merge(bChunk, dChunk, baselineKeysBS, deltaKeysBS, destToUse); return destToUse; } private static Chunk getOrFillSimple(ChunkSource src, GetAndFillContexts ctx, - WritableChunk optionalDest, - OrderedKeys orderedKeys) { + WritableChunk optionalDest, + OrderedKeys orderedKeys) { if (optionalDest == null) { return src.getChunk(ctx.getContext, orderedKeys); } @@ -315,15 +290,14 @@ private static Chunk getOrFillSimple(ChunkSource src, GetAndFill // ================================================================================================================== @Override - public Chunk getPrevChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys) { + public Chunk getPrevChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) { final DAContext dactx = (DAContext) context; return baseline.getChunk(dactx.baseline.getContext, orderedKeys); } @Override - public void fillPrevChunk(@NotNull FillContext context, - @NotNull WritableChunk dest, @NotNull OrderedKeys orderedKeys) { + public void fillPrevChunk(@NotNull FillContext context, @NotNull WritableChunk dest, + @NotNull OrderedKeys orderedKeys) { final DAContext dactx = (DAContext) context; baseline.fillChunk(dactx.baseline.optionalFillContext, dest, orderedKeys); } @@ -341,14 +315,14 @@ public void fillPrevChunk(@NotNull FillContext context, // ================================================================================================================== @Override - public void fillFromChunk(@NotNull FillFromContext context, - @NotNull Chunk src, @NotNull OrderedKeys orderedKeys) { + public void fillFromChunk(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull OrderedKeys orderedKeys) { throw new UnsupportedOperationException("TODO(kosak)"); } @Override - public void fillFromChunkUnordered(@NotNull FillFromContext context, - @NotNull Chunk src, @NotNull LongChunk keys) { + public void fillFromChunkUnordered(@NotNull FillFromContext context, @NotNull Chunk src, + @NotNull LongChunk keys) { throw new UnsupportedOperationException("TODO"); } @@ -509,10 +483,9 @@ public void set(final long key, final short value) { /** * @param index The key to look up. - * @return The index, translated into delta space, that the caller should use, or -1 if the - * caller should use the original index in baseline space. Will return -1 if either - * startTrackingPrevValues() has not been called yet, or if the index does not exist in - * the deltaRows. + * @return The index, translated into delta space, that the caller should use, or -1 if the caller should use the + * original index in baseline space. Will return -1 if either startTrackingPrevValues() has not been called + * yet, or if the index does not exist in the deltaRows. */ private long lookupIndexInDeltaSpace(final long index) { assertIndexValid(index); @@ -524,14 +497,13 @@ private long lookupIndexInDeltaSpace(final long index) { /** * @param index The key to look up. - * @return If we're not tracking previous values yet, simply return the key (note 1). Otherwise, - * if the key already exists in the 'deltaRows' set, return its index. Otherwise - * allocate a new element of the deltaRows set and return that index. + * @return If we're not tracking previous values yet, simply return the key (note 1). Otherwise, if the key already + * exists in the 'deltaRows' set, return its index. Otherwise allocate a new element of the deltaRows set + * and return that index. */ private long lookupOrCreateIndexInDeltaSpace(final long index) { assertIndexValid(index); - // We're in that special initial state where we're not tracking previous values, so we can - // just write directly + // We're in that special initial state where we're not tracking previous values, so we can just write directly // to the column source. if (baseline == delta) { return index; @@ -547,8 +519,7 @@ private long lookupOrCreateIndexInDeltaSpace(final long index) { } if (index < maxKey) { - throw new UnsupportedOperationException( - "New keys need to be inserted in ascending order, but " + index + + throw new UnsupportedOperationException("New keys need to be inserted in ascending order, but " + index + "came after" + maxKey); } maxKey = index; @@ -568,17 +539,16 @@ private long lookupOrCreateIndexInDeltaSpace(final long index) { private static void assertIndexValid(final long index) { if (index < 0) { throw new UnsupportedOperationException( - "DeltaAwareColumnSource does not accept negative indices: " + index); + "DeltaAwareColumnSource does not accept negative indices: " + index); } } private void commitValues() { try ( - final FillFromContext baselineCtx = baseline.makeFillFromContext(preferredChunkSize); - final WritableLongChunk orderedKeyRanges = - WritableLongChunk.makeWritableChunk(2); - final GetContext deltaCtx = delta.makeGetContext(preferredChunkSize); - final OrderedKeys.Iterator it = deltaRows.getOrderedKeysIterator()) { + final FillFromContext baselineCtx = baseline.makeFillFromContext(preferredChunkSize); + final WritableLongChunk orderedKeyRanges = WritableLongChunk.makeWritableChunk(2); + final GetContext deltaCtx = delta.makeGetContext(preferredChunkSize); + final OrderedKeys.Iterator it = deltaRows.getOrderedKeysIterator()) { long startKey = 0; while (it.hasMore()) { final OrderedKeys baselineOk = it.getNextOrderedKeysWithLength(preferredChunkSize); @@ -587,8 +557,7 @@ private void commitValues() { orderedKeyRanges.set(1, startKey + baselineOkSize - 1); orderedKeyRanges.setSize(2); startKey += baselineOkSize; - final OrderedKeys deltaOk = - OrderedKeys.wrapKeyRangesChunkAsOrderedKeys(orderedKeyRanges); + final OrderedKeys deltaOk = OrderedKeys.wrapKeyRangesChunkAsOrderedKeys(orderedKeyRanges); final Chunk data = delta.getChunk(deltaCtx, deltaOk); baseline.fillFromChunk(baselineCtx, data, baselineOk); } @@ -604,24 +573,22 @@ public void startTrackingPrevValues() { } deltaCapacity = INITIAL_DELTA_CAPACITY; final ArrayBackedColumnSource delta = - ArrayBackedColumnSource.getMemoryColumnSource(deltaCapacity, getType(), null); + ArrayBackedColumnSource.getMemoryColumnSource(deltaCapacity, getType(), null); this.delta = delta; deltaCapacityEnsurer = delta::ensureCapacity; deltaRows = Index.FACTORY.getEmptyIndex(); maxKey = Long.MIN_VALUE; /* - * When 'delta' changes, we need a way to notify all the ChunkAdapters about its new value. - * We say "all the ChunkAdapters" because chunkAdapter is a ThreadLocal, so there is one - * lying around for each thread that has happened to have called get* or set*. The reason - * the ChunkAdapters need to know about this change is because they have Contexts that need - * to be updated. The simplest way to update them is to just throw them all away and start - * with a fresh ThreadLocal. This is not that big of a deal because this method is called at - * most twice during the lifetime of a given DeltaAwareColumnSource: once at construction - * and once at the time of startTrackingPrevValues(). + * When 'delta' changes, we need a way to notify all the ChunkAdapters about its new value. We say "all the + * ChunkAdapters" because chunkAdapter is a ThreadLocal, so there is one lying around for each thread that has + * happened to have called get* or set*. The reason the ChunkAdapters need to know about this change is because + * they have Contexts that need to be updated. The simplest way to update them is to just throw them all away + * and start with a fresh ThreadLocal. This is not that big of a deal because this method is called at most + * twice during the lifetime of a given DeltaAwareColumnSource: once at construction and once at the time of + * startTrackingPrevValues(). */ - chunkAdapter = - ThreadLocal.withInitial(() -> ChunkAdapter.create(getType(), baseline, delta)); + chunkAdapter = ThreadLocal.withInitial(() -> ChunkAdapter.create(getType(), baseline, delta)); updateCommitter = new UpdateCommitter<>(this, DeltaAwareColumnSource::commitValues); } @@ -645,8 +612,8 @@ public boolean isImmutable() { * * @param lhs The {@link OrderedKeys} to partition * @param rhs The keys which control the partition operation - * @param results Allocated by the caller. {@code results[0]} will be set to (lhs intersect - * rhs). {@code results[1]} will be set to (lhs minus rhs). + * @param results Allocated by the caller. {@code results[0]} will be set to (lhs intersect rhs). {@code results[1]} + * will be set to (lhs minus rhs). */ private static void splitKeys(OrderedKeys lhs, Index rhs, Index[] results) { final Index lhsIndex = lhs.asIndex(); @@ -655,8 +622,7 @@ private static void splitKeys(OrderedKeys lhs, Index rhs, Index[] results) { } private static class DAContext implements ChunkSource.GetContext, ChunkSource.FillContext { - static DAContext createForGet(ChunkType chunkType, ChunkSource baseline, ChunkSource delta, - int chunkCapacity) { + static DAContext createForGet(ChunkType chunkType, ChunkSource baseline, ChunkSource delta, int chunkCapacity) { final GetAndFillContexts b = GetAndFillContexts.createForGet(baseline, chunkCapacity); final GetAndFillContexts d = GetAndFillContexts.createForGet(delta, chunkCapacity); return new DAContext(b, d, chunkType.makeWritableChunk(chunkCapacity)); @@ -675,8 +641,7 @@ static DAContext createForFill(ChunkSource baseline, ChunkSource delta, int chun */ final WritableChunk optionalChunk; - private DAContext(GetAndFillContexts baseline, GetAndFillContexts delta, - WritableChunk optionalChunk) { + private DAContext(GetAndFillContexts baseline, GetAndFillContexts delta, WritableChunk optionalChunk) { this.baseline = baseline; this.delta = delta; this.optionalChunk = optionalChunk; @@ -690,7 +655,7 @@ static GetAndFillContexts createForGet(ChunkSource chunkSource, int chunkCapacit static GetAndFillContexts createForFill(ChunkSource chunkSource, int chunkCapacity) { return new GetAndFillContexts(chunkSource.makeGetContext(chunkCapacity), - chunkSource.makeFillContext(chunkCapacity)); + chunkSource.makeFillContext(chunkCapacity)); } /** @@ -702,8 +667,7 @@ static GetAndFillContexts createForFill(ChunkSource chunkSource, int chunkCapaci */ final ChunkSource.FillContext optionalFillContext; - private GetAndFillContexts(ChunkSource.GetContext getContext, - ChunkSource.FillContext optionalFillContext) { + private GetAndFillContexts(ChunkSource.GetContext getContext, ChunkSource.FillContext optionalFillContext) { this.getContext = getContext; this.optionalFillContext = optionalFillContext; } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/SoleKey.java b/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/SoleKey.java index f60f682bff2..a217ea5649f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/SoleKey.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/deltaaware/SoleKey.java @@ -63,8 +63,7 @@ public LongChunk asKeyRangesChunk() { } @Override - public void fillKeyIndicesChunk( - WritableLongChunk chunkToFill) { + public void fillKeyIndicesChunk(WritableLongChunk chunkToFill) { chunkToFill.set(0, key); chunkToFill.setSize(1); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/immutable/ImmutableDateTimeArraySource.java b/DB/src/main/java/io/deephaven/db/v2/sources/immutable/ImmutableDateTimeArraySource.java index ba5a99a4a35..4bb710817a9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/immutable/ImmutableDateTimeArraySource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/immutable/ImmutableDateTimeArraySource.java @@ -6,7 +6,7 @@ import io.deephaven.db.v2.sources.ImmutableColumnSourceGetDefaults; public class ImmutableDateTimeArraySource extends AbstractColumnSource - implements ImmutableColumnSourceGetDefaults.ForObject { + implements ImmutableColumnSourceGetDefaults.ForObject { private final long[] data; public ImmutableDateTimeArraySource(long[] source) { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegion.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegion.java index 8d6fc011ff0..548d7190b84 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegion.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegion.java @@ -16,8 +16,8 @@ default long firstRowOffset() { } abstract class Null - extends GenericColumnRegionBase - implements ColumnRegion, WithDefaultsForRepeatingValues { + extends GenericColumnRegionBase + implements ColumnRegion, WithDefaultsForRepeatingValues { Null(final long pageMask) { super(pageMask); @@ -25,7 +25,7 @@ abstract class Null @Override public void fillChunkAppend(@NotNull final FillContext context, - @NotNull final WritableChunk destination, final int length) { + @NotNull final WritableChunk destination, final int length) { final int offset = destination.size(); destination.fillWithNullValue(offset, length); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionChunkDictionary.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionChunkDictionary.java index 279be8f4745..4df4c804445 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionChunkDictionary.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionChunkDictionary.java @@ -13,38 +13,35 @@ import java.util.function.Function; /** - * {@link ColumnRegionObject} implementation for regions that support fetching symbols from a - * dictionary represented as an {@link ObjectChunk}. + * {@link ColumnRegionObject} implementation for regions that support fetching symbols from a dictionary represented as + * an {@link ObjectChunk}. */ public class ColumnRegionChunkDictionary - extends GenericColumnRegionBase - implements ColumnRegionObject, Page.WithDefaults, - DefaultChunkSource.SupportsContiguousGet { + extends GenericColumnRegionBase + implements ColumnRegionObject, Page.WithDefaults, + DefaultChunkSource.SupportsContiguousGet { private final ObjectChunk dictionary; private final Function conversion; public static ColumnRegionObject create( - final long pageMask, - @NotNull final Class dataType, - @NotNull final Chunk dictionary) { + final long pageMask, + @NotNull final Class dataType, + @NotNull final Chunk dictionary) { if (CharSequence.class.isAssignableFrom(dataType)) { // noinspection unchecked - final StringCache stringCache = - StringUtils.getStringCache((Class) dataType); + final StringCache stringCache = StringUtils.getStringCache((Class) dataType); // noinspection unchecked final Function conversion = - (final String dictValue) -> (DATA_TYPE) stringCache.getCachedString(dictValue); - return new ColumnRegionChunkDictionary<>(pageMask, dictionary.asObjectChunk(), - conversion); + (final String dictValue) -> (DATA_TYPE) stringCache.getCachedString(dictValue); + return new ColumnRegionChunkDictionary<>(pageMask, dictionary.asObjectChunk(), conversion); } - return new ColumnRegionChunkDictionary<>(pageMask, dictionary.asObjectChunk(), - Function.identity()); + return new ColumnRegionChunkDictionary<>(pageMask, dictionary.asObjectChunk(), Function.identity()); } private ColumnRegionChunkDictionary(final long pageMask, - @NotNull final ObjectChunk dictionary, - @NotNull final Function conversion) { + @NotNull final ObjectChunk dictionary, + @NotNull final Function conversion) { super(pageMask); this.dictionary = dictionary; this.conversion = conversion; @@ -56,26 +53,21 @@ public DATA_TYPE getObject(final long elementIndex) { } @Override - public Chunk getChunk(@NotNull final GetContext context, final long firstKey, - final long lastKey) { - return dictionary.slice(Math.toIntExact(getRowOffset(firstKey)), - Math.toIntExact(lastKey - firstKey + 1)); + public Chunk getChunk(@NotNull final GetContext context, final long firstKey, final long lastKey) { + return dictionary.slice(Math.toIntExact(getRowOffset(firstKey)), Math.toIntExact(lastKey - firstKey + 1)); } @Override public void fillChunkAppend(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { - final WritableObjectChunk objectDestination = - destination.asWritableObjectChunk(); + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { + final WritableObjectChunk objectDestination = destination.asWritableObjectChunk(); orderedKeys.forAllLongs((final long key) -> objectDestination.add(getObject(key))); } @Override - public boolean gatherDictionaryValuesIndex( - @NotNull final ReadOnlyIndex.SearchIterator keysToVisit, - @NotNull final OrderedKeys.Iterator knownKeys, - @NotNull final Index.SequentialBuilder sequentialBuilder) { + public boolean gatherDictionaryValuesIndex(@NotNull final ReadOnlyIndex.SearchIterator keysToVisit, + @NotNull final OrderedKeys.Iterator knownKeys, + @NotNull final Index.SequentialBuilder sequentialBuilder) { final long pageFirstKey = firstRow(keysToVisit.currentValue()); final long pageLastKey = pageFirstKey + dictionary.size() - 1; if (knownKeys.peekNextKey() != pageFirstKey) { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionReferencing.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionReferencing.java index c208afc640d..55424add54e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionReferencing.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionReferencing.java @@ -9,7 +9,7 @@ import org.jetbrains.annotations.NotNull; public interface ColumnRegionReferencing> - extends ColumnRegion { + extends ColumnRegion { @NotNull REFERENCED_COLUMN_REGION getReferencedRegion(); @@ -23,16 +23,16 @@ default ChunkType getChunkType() { interface Converter { /** - * Converts all the native source values represented by {@code orderedKeys} from a - * single region into the {@code destination} chunk by appending. + * Converts all the native source values represented by {@code orderedKeys} from a single region into + * the {@code destination} chunk by appending. */ void convertRegion(WritableChunk destination, Chunk source, - OrderedKeys orderedKeys); + OrderedKeys orderedKeys); } class Null> - extends ColumnRegion.Null - implements ColumnRegionReferencing { + extends ColumnRegion.Null + implements ColumnRegionReferencing { private final REFERENCED_COLUMN_REGION nullReferencedColumnRegion; diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionReferencingImpl.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionReferencingImpl.java index fe8605448c9..0db0c6db9f8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionReferencingImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ColumnRegionReferencingImpl.java @@ -12,12 +12,11 @@ import javax.annotation.OverridingMethodsMustInvokeSuper; public class ColumnRegionReferencingImpl> - implements ColumnRegionReferencing, Page.WithDefaults { + implements ColumnRegionReferencing, Page.WithDefaults { private final REFERENCED_COLUMN_REGION referencedColumnRegion; - public ColumnRegionReferencingImpl( - @NotNull final REFERENCED_COLUMN_REGION referencedColumnRegion) { + public ColumnRegionReferencingImpl(@NotNull final REFERENCED_COLUMN_REGION referencedColumnRegion) { this.referencedColumnRegion = referencedColumnRegion; } @@ -34,10 +33,9 @@ public long mask() { @Override public void fillChunkAppend(@NotNull ChunkSource.FillContext context, - @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { FillContext.converter(context).convertRegion(destination, - referencedColumnRegion.getChunk(FillContext.nativeGetContext(context), orderedKeys), - orderedKeys); + referencedColumnRegion.getChunk(FillContext.nativeGetContext(context), orderedKeys), orderedKeys); } @Override @@ -52,7 +50,7 @@ static class FillContext implements ChunkSource.FillContext { private final Converter converter; FillContext(GetContextMaker getContextMaker, Converter converter, int chunkCapacity, - SharedContext sharedContext) { + SharedContext sharedContext) { this.converter = converter; this.nativeGetContext = getContextMaker.makeGetContext(chunkCapacity, sharedContext); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegion.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegion.java index 7ab2fce6f97..1638d3363c3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegion.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegion.java @@ -3,12 +3,11 @@ import io.deephaven.db.v2.sources.chunk.Attributes; /** - * Adds region factory for deferred column regions implementations. Deferred regions serve as - * placeholders, constructing (and usually swapping themselves for) the "real" region on first - * access. + * Adds region factory for deferred column regions implementations. Deferred regions serve as placeholders, constructing + * (and usually swapping themselves for) the "real" region on first access. */ interface DeferredColumnRegion> - extends ColumnRegion { + extends ColumnRegion { /** * Get (and possibly construct) the "real" region whose construction was deferred. @@ -18,10 +17,10 @@ interface DeferredColumnRegion, INNER_REGION_TYPE extends REGION_TYPE> REGION_TYPE materialize( - INNER_REGION_TYPE region) { + INNER_REGION_TYPE region) { // noinspection unchecked return region instanceof DeferredColumnRegion - ? ((DeferredColumnRegion) region).getResultRegion() - : region; + ? ((DeferredColumnRegion) region).getResultRegion() + : region; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegionBase.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegionBase.java index 1661c3d3259..b1fcfdd3c75 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegionBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegionBase.java @@ -12,15 +12,14 @@ * Base deferred region implementation. */ public abstract class DeferredColumnRegionBase> - extends GenericColumnRegionBase - implements DeferredColumnRegion { + extends GenericColumnRegionBase + implements DeferredColumnRegion { private Supplier resultRegionFactory; private volatile REGION_TYPE resultRegion; - DeferredColumnRegionBase(final long pageMask, - @NotNull final Supplier resultRegionFactory) { + DeferredColumnRegionBase(final long pageMask, @NotNull final Supplier resultRegionFactory) { super(pageMask); this.resultRegionFactory = Require.neqNull(resultRegionFactory, "resultRegionFactory"); } @@ -30,8 +29,7 @@ public final REGION_TYPE getResultRegion() { if (resultRegion == null) { synchronized (this) { if (resultRegion == null) { - resultRegion = - Require.neqNull(resultRegionFactory.get(), "resultRegionFactory.get()"); + resultRegion = Require.neqNull(resultRegionFactory.get(), "resultRegionFactory.get()"); resultRegionFactory = null; } } @@ -40,8 +38,7 @@ public final REGION_TYPE getResultRegion() { } /** - * Get the result region if it has already been supplied (because of a call to - * {@link #getResultRegion()}). + * Get the result region if it has already been supplied (because of a call to {@link #getResultRegion()}). * * @return The result region */ @@ -65,27 +62,24 @@ public ChunkType getChunkType() { } @Override - public void fillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { getResultRegion().fillChunk(context, destination, orderedKeys); } @Override - public void fillChunkAppend(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys.Iterator orderedKeysIterator) { + public void fillChunkAppend(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys.Iterator orderedKeysIterator) { getResultRegion().fillChunkAppend(context, destination, orderedKeysIterator); } @Override - public Chunk getChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys) { + public Chunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) { return getResultRegion().getChunk(context, orderedKeys); } @Override - public Chunk getChunk(@NotNull GetContext context, long firstKey, - long lastKey) { + public Chunk getChunk(@NotNull GetContext context, long firstKey, long lastKey) { return getResultRegion().getChunk(context, firstKey, lastKey); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegionReferencing.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegionReferencing.java index cfaca4d0f81..183135b961d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegionReferencing.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/DeferredColumnRegionReferencing.java @@ -6,15 +6,15 @@ import java.util.function.Supplier; /** - * {@link ColumnRegionReferencing} implementation for deferred regions, i.e. regions that will be - * properly constructed on first access. + * {@link ColumnRegionReferencing} implementation for deferred regions, i.e. regions that will be properly constructed + * on first access. */ public class DeferredColumnRegionReferencing> - extends DeferredColumnRegionBase> - implements ColumnRegionReferencing { + extends DeferredColumnRegionBase> + implements ColumnRegionReferencing { DeferredColumnRegionReferencing(final long pageMask, - @NotNull Supplier> resultRegionFactory) { + @NotNull Supplier> resultRegionFactory) { super(pageMask, resultRegionFactory); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/MakeRegion.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/MakeRegion.java index 161b1bf5799..b41040b8a56 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/MakeRegion.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/MakeRegion.java @@ -16,13 +16,12 @@ public interface MakeRegion columnDefinition, - @NotNull ColumnLocation columnLocation, - int regionIndex); + @NotNull ColumnLocation columnLocation, + int regionIndex); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ParquetColumnRegionBase.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ParquetColumnRegionBase.java index b2345546388..44d86401460 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ParquetColumnRegionBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ParquetColumnRegionBase.java @@ -13,47 +13,43 @@ import javax.annotation.OverridingMethodsMustInvokeSuper; public abstract class ParquetColumnRegionBase - extends GenericColumnRegionBase - implements ParquetColumnRegion { + extends GenericColumnRegionBase + implements ParquetColumnRegion { final ColumnChunkPageStore columnChunkPageStore; - ParquetColumnRegionBase(final long pageMask, - @NotNull final ColumnChunkPageStore columnChunkPageStore) { + ParquetColumnRegionBase(final long pageMask, @NotNull final ColumnChunkPageStore columnChunkPageStore) { super(pageMask); this.columnChunkPageStore = Require.neqNull(columnChunkPageStore, "columnChunkPageStore"); - // We are making the following assumptions, so these basic functions are inlined rather than - // virtual calls. - Require.eq(columnChunkPageStore.mask(), "columnChunkPageStore.mask()", mask(), - "ColumnRegion.mask()"); - Require.eq(columnChunkPageStore.firstRowOffset(), "columnChunkPageStore.firstRowOffset()", - firstRowOffset(), "ColumnRegion.firstrRowOffset()"); + // We are making the following assumptions, so these basic functions are inlined rather than virtual calls. + Require.eq(columnChunkPageStore.mask(), "columnChunkPageStore.mask()", mask(), "ColumnRegion.mask()"); + Require.eq(columnChunkPageStore.firstRowOffset(), "columnChunkPageStore.firstRowOffset()", firstRowOffset(), + "ColumnRegion.firstrRowOffset()"); } @Override public final Chunk getChunk(@NotNull final GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { return columnChunkPageStore.getChunk(context, orderedKeys); } @Override - public final Chunk getChunk(@NotNull final GetContext context, - final long firstKey, final long lastKey) { + public final Chunk getChunk(@NotNull final GetContext context, final long firstKey, + final long lastKey) { return columnChunkPageStore.getChunk(context, firstKey, lastKey); } @Override public final void fillChunk(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { columnChunkPageStore.fillChunk(context, destination, orderedKeys); } @Override public final void fillChunkAppend(@NotNull final FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys.Iterator orderedKeysIterator) { + @NotNull final WritableChunk destination, + @NotNull final OrderedKeys.Iterator orderedKeysIterator) { columnChunkPageStore.fillChunkAppend(context, destination, orderedKeysIterator); } @@ -70,14 +66,12 @@ public void releaseCachedResources() { } @Override - public final FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return columnChunkPageStore.makeFillContext(chunkCapacity, sharedContext); } @Override - public final GetContext makeGetContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final GetContext makeGetContext(final int chunkCapacity, final SharedContext sharedContext) { return columnChunkPageStore.makeGetContext(chunkCapacity, sharedContext); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/PartitioningSourceFactory.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/PartitioningSourceFactory.java index b453c72437f..f785c293a7b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/PartitioningSourceFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/PartitioningSourceFactory.java @@ -11,7 +11,7 @@ class PartitioningSourceFactory { * @return A new partitioning {@link RegionedColumnSource} */ static RegionedColumnSource makePartitioningSource( - @NotNull final Class dataType) { + @NotNull final Class dataType) { final RegionedColumnSource result; if (dataType == boolean.class || dataType == Boolean.class) { result = new RegionedColumnSourceObject.Partitioning<>(dataType); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionContextHolder.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionContextHolder.java index bfd6d5d4949..8323f68770f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionContextHolder.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionContextHolder.java @@ -4,6 +4,5 @@ public class RegionContextHolder implements ChunkSource.FillContext { // Currently mo column regions use a non-default context. - // If that changes, we'll need to add indirection and/or caching here, switching out contexts on - // region boundaries. + // If that changes, we'll need to add indirection and/or caching here, switching out contexts on region boundaries. } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSource.java index 457d6e69603..454b7959d28 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSource.java @@ -17,31 +17,28 @@ * *

    * {@link io.deephaven.db.v2.SourceTable source tables} can be thought of a tree of partitions with - * {@link io.deephaven.db.v2.locations.TableLocation table locations} at the leaf nodes. When - * building the {@link io.deephaven.db.v2.utils.Index Index} for such a - * {@link io.deephaven.db.tables.Table table}, we statically partition the available element address - * space from [0, {@value Long#MAX_VALUE} (2^63-1)]. + * {@link io.deephaven.db.v2.locations.TableLocation table locations} at the leaf nodes. When building the + * {@link io.deephaven.db.v2.utils.Index Index} for such a {@link io.deephaven.db.tables.Table table}, we statically + * partition the available element address space from [0, {@value Long#MAX_VALUE} (2^63-1)]. * *

    - * We constrain the size at these leaf nodes in order to support a partitioning of the element - * address space into region index and sub-region element index. In order to make the calculations - * as inexpensive as possible, this is done by assigning {@link #REGION_INDEX_ADDRESS_BITS some - * bits} of each index key (element address) to the region index, and the - * {@link #SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS remaining bits} to the sub-region element index. + * We constrain the size at these leaf nodes in order to support a partitioning of the element address space into region + * index and sub-region element index. In order to make the calculations as inexpensive as possible, this is done by + * assigning {@link #REGION_INDEX_ADDRESS_BITS some bits} of each index key (element address) to the region index, and + * the {@link #SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS remaining bits} to the sub-region element index. * *

    - * This type of address space allocation allows very cheap O(1) element access. Denser alternatives - * tend to introduce more complication and/or O(log n) lookups. + * This type of address space allocation allows very cheap O(1) element access. Denser alternatives tend to introduce + * more complication and/or O(log n) lookups. * *

    * Currently, region indices use {@value REGION_INDEX_ADDRESS_BITS} and region offsets use - * {@value SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS}, allowing tables to consist of - * {@value MAXIMUM_REGION_COUNT} locations with {@value REGION_CAPACITY_IN_ELEMENTS} each. + * {@value SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS}, allowing tables to consist of {@value MAXIMUM_REGION_COUNT} locations + * with {@value REGION_CAPACITY_IN_ELEMENTS} each. */ -@VisibleForTesting // This could be package-private, but for mock-based unit testing purposes it - // must be public +@VisibleForTesting // This could be package-private, but for mock-based unit testing purposes it must be public public interface RegionedColumnSource - extends DeferredGroupingColumnSource, ImmutableColumnSource { + extends DeferredGroupingColumnSource, ImmutableColumnSource { /** * Address bits allocated to the region index. @@ -51,8 +48,8 @@ public interface RegionedColumnSource /** * Address bits allocated to the sub-region element index. *

    - * Note that we do not use the sign bit, as negative index keys are not permitted (or used to - * signify the {@link io.deephaven.db.v2.utils.ReadOnlyIndex#NULL_KEY null key}). + * Note that we do not use the sign bit, as negative index keys are not permitted (or used to signify the + * {@link io.deephaven.db.v2.utils.ReadOnlyIndex#NULL_KEY null key}). */ int SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS = Long.SIZE - 1 - REGION_INDEX_ADDRESS_BITS; @@ -87,7 +84,7 @@ static long getFirstElementIndex(final int regionIndex) { */ static long getLastElementIndex(final int regionIndex) { return (long) regionIndex << SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS - | ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK; + | ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK; } /** @@ -106,11 +103,10 @@ static long getElementIndex(final int regionIndex, final long regionOffset) { *

    * Elements in this region are ordered after elements in other regions added previously. * - * @param columnDefinition The column definition for this column source (potentially varies by - * region) + * @param columnDefinition The column definition for this column source (potentially varies by region) * @param columnLocation The column location for the region being added * @return The index assigned to the added region */ int addRegion(@NotNull final ColumnDefinition columnDefinition, - @NotNull final ColumnLocation columnLocation); + @NotNull final ColumnLocation columnLocation); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceArray.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceArray.java index 6d317e90613..b6975d804ed 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceArray.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceArray.java @@ -13,12 +13,11 @@ import java.util.function.Supplier; /** - * Base class for all {@link RegionedColumnSource} implementations with column regions stored in an - * array. + * Base class for all {@link RegionedColumnSource} implementations with column regions stored in an array. */ abstract class RegionedColumnSourceArray> - extends RegionedColumnSourceBase - implements MakeRegion { + extends RegionedColumnSourceBase + implements MakeRegion { @FunctionalInterface interface MakeDeferred> { @@ -34,26 +33,25 @@ interface MakeDeferred> REGION_TYPE[] allocateRegionArray( - int length) { + int length) { // noinspection unchecked return (REGION_TYPE[]) (length == 0 ? EMPTY : new ColumnRegion[length]); } /** - * Construct a {@code RegionedColumnSource} which is an array of references to - * {@code ColumnRegion}s. + * Construct a {@code RegionedColumnSource} which is an array of references to {@code ColumnRegion}s. * - * @param nullRegion A ColumnRegion to be used when the actual region doesn't exist, which - * returns the correct null values for that region. + * @param nullRegion A ColumnRegion to be used when the actual region doesn't exist, which returns the correct null + * values for that region. * @param type The type of the column. * @param componentType The component type in case the main type is a DbArray - * @param makeDeferred A function which creates the correct deferred region for this - * ColumnSource. If you don't want any deferred regions then use Supplier::get. + * @param makeDeferred A function which creates the correct deferred region for this ColumnSource. If you don't want + * any deferred regions then use Supplier::get. */ RegionedColumnSourceArray(@NotNull final REGION_TYPE nullRegion, - @NotNull final Class type, - @Nullable final Class componentType, - @NotNull final MakeDeferred makeDeferred) { + @NotNull final Class type, + @Nullable final Class componentType, + @NotNull final MakeDeferred makeDeferred) { super(type, componentType); this.nullRegion = nullRegion; this.makeDeferred = makeDeferred; @@ -61,30 +59,29 @@ private static type, - @NotNull final MakeDeferred makeDeferred) { + @NotNull final Class type, + @NotNull final MakeDeferred makeDeferred) { this(nullRegion, type, null, makeDeferred); } @Override @OverridingMethodsMustInvokeSuper public synchronized int addRegion(@NotNull final ColumnDefinition columnDefinition, - @NotNull final ColumnLocation columnLocation) { + @NotNull final ColumnLocation columnLocation) { maybeExtendRegions(); final int regionIndex = regionCount; - regions[regionIndex] = - makeDeferred.make(PARAMETERS.regionMask, () -> updateRegion(regionIndex, - makeRegion(columnDefinition, columnLocation, regionIndex))); + regions[regionIndex] = makeDeferred.make(PARAMETERS.regionMask, + () -> updateRegion(regionIndex, makeRegion(columnDefinition, columnLocation, regionIndex))); return regionCount++; } @@ -116,19 +113,18 @@ private void maybeExtendRegions() { return; } if (regionCount == MAXIMUM_REGION_COUNT) { - throw new IllegalStateException("Cannot add another region to " + this - + ", maximum region count " + MAXIMUM_REGION_COUNT + " reached"); + throw new IllegalStateException("Cannot add another region to " + this + ", maximum region count " + + MAXIMUM_REGION_COUNT + " reached"); } - final int newLength = - Math.min(Math.max(regions.length * 2, regionCount + 1), MAXIMUM_REGION_COUNT); + final int newLength = Math.min(Math.max(regions.length * 2, regionCount + 1), MAXIMUM_REGION_COUNT); final REGION_TYPE[] newRegions = allocateRegionArray(newLength); System.arraycopy(regions, 0, newRegions, 0, regionCount); regions = newRegions; } /** - * Update the region at a given index in this regioned column source. This is intended to be - * used by the region suppliers in DeferredColumnRegion implementations. + * Update the region at a given index in this regioned column source. This is intended to be used by the region + * suppliers in DeferredColumnRegion implementations. * * @param regionIndex The region index * @param region The new column region diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceBase.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceBase.java index 3c7d6474ef6..34af9be9dbf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceBase.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceBase.java @@ -10,27 +10,23 @@ import org.jetbrains.annotations.Nullable; /** - * Partial implementation of {@link RegionedColumnSource} for array-backed and delegating - * implementations to extend. + * Partial implementation of {@link RegionedColumnSource} for array-backed and delegating implementations to extend. */ abstract class RegionedColumnSourceBase> - extends AbstractDeferredGroupingColumnSource - implements RegionedPageStore, - RegionedColumnSource { + extends AbstractDeferredGroupingColumnSource + implements RegionedPageStore, RegionedColumnSource { static final Parameters PARAMETERS; static { - PARAMETERS = new RegionedPageStore.Parameters(Long.MAX_VALUE, MAXIMUM_REGION_COUNT, - REGION_CAPACITY_IN_ELEMENTS); - Assert.eq(PARAMETERS.regionMask, "parameters.regionMask", - ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, - "ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK"); - Assert.eq(PARAMETERS.regionMaskNumBits, "parameters.regionMaskNumBits", - SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS, "SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS"); + PARAMETERS = + new RegionedPageStore.Parameters(Long.MAX_VALUE, MAXIMUM_REGION_COUNT, REGION_CAPACITY_IN_ELEMENTS); + Assert.eq(PARAMETERS.regionMask, "parameters.regionMask", ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK, + "ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK"); + Assert.eq(PARAMETERS.regionMaskNumBits, "parameters.regionMaskNumBits", SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS, + "SUB_REGION_ELEMENT_INDEX_ADDRESS_BITS"); } - RegionedColumnSourceBase(@NotNull final Class type, - @Nullable final Class componentType) { + RegionedColumnSourceBase(@NotNull final Class type, @Nullable final Class componentType) { super(type, componentType); } @@ -47,9 +43,8 @@ public final Parameters parameters() { * Use the more efficient fill chunk implementation, rather than the default which uses get(). */ @Override - public void fillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { RegionedPageStore.super.fillChunk(context, destination, orderedKeys); } @@ -58,8 +53,7 @@ public void fillChunk(@NotNull FillContext context, */ @Override public void fillPrevChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { fillChunk(context, destination, orderedKeys); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceDBDateTime.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceDBDateTime.java index 2d6afbb1447..46598a3712f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceDBDateTime.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceDBDateTime.java @@ -12,20 +12,19 @@ * Regioned column source implementation for columns of {@link DBDateTime}s. */ final class RegionedColumnSourceDBDateTime - extends - RegionedColumnSourceReferencing> - implements ColumnSourceGetDefaults.ForObject { + extends + RegionedColumnSourceReferencing> + implements ColumnSourceGetDefaults.ForObject { public RegionedColumnSourceDBDateTime() { super(ColumnRegionLong.createNull(PARAMETERS.regionMask), DBDateTime.class, - RegionedColumnSourceLong.NativeType.AsValues::new); + RegionedColumnSourceLong.NativeType.AsValues::new); } @Override public void convertRegion(WritableChunk destination, - Chunk source, OrderedKeys orderedKeys) { - WritableObjectChunk objectChunk = - destination.asWritableObjectChunk(); + Chunk source, OrderedKeys orderedKeys) { + WritableObjectChunk objectChunk = destination.asWritableObjectChunk(); LongChunk longChunk = source.asLongChunk(); final int size = objectChunk.size(); @@ -40,7 +39,6 @@ public void convertRegion(WritableChunk destination, @Override public DBDateTime get(long elementIndex) { return elementIndex == NULL_KEY ? null - : DBTimeUtils.nanosToTime( - lookupRegion(elementIndex).getReferencedRegion().getLong(elementIndex)); + : DBTimeUtils.nanosToTime(lookupRegion(elementIndex).getReferencedRegion().getLong(elementIndex)); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceInner.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceInner.java index b06aa1660ca..9f85ea3fbb7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceInner.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceInner.java @@ -9,23 +9,22 @@ /** *

    - * Base class for column source which reaches its regions by reaching into the regions of an outer - * column source. These derive directly from {@link RegionedColumnSourceBase}, and thus don't - * maintain their own array of regions. + * Base class for column source which reaches its regions by reaching into the regions of an outer column source. These + * derive directly from {@link RegionedColumnSourceBase}, and thus don't maintain their own array of regions. *

    * *

    - * Extending classes will typically override {@link RegionedPageStore#getRegion(int)} to reach into - * the outer column source. + * Extending classes will typically override {@link RegionedPageStore#getRegion(int)} to reach into the outer column + * source. *

    */ abstract class RegionedColumnSourceInner, OUTER_DATA_TYPE, OUTER_REGION_TYPE extends ColumnRegion> - extends RegionedColumnSourceBase { + extends RegionedColumnSourceBase { private final RegionedColumnSourceBase outerColumnSource; RegionedColumnSourceInner(@NotNull Class type, - RegionedColumnSourceBase outerColumnSource) { + RegionedColumnSourceBase outerColumnSource) { super(type); this.outerColumnSource = outerColumnSource; } @@ -36,8 +35,7 @@ final int addRegionForUnitTests(OTHER_REGION_TYPE region) { } @Override - public final int addRegion(@NotNull ColumnDefinition columnDefinition, - @NotNull ColumnLocation columnLocation) { + public final int addRegion(@NotNull ColumnDefinition columnDefinition, @NotNull ColumnLocation columnLocation) { return outerColumnSource.addRegion(columnDefinition, columnLocation); } @@ -49,8 +47,7 @@ public final int getRegionCount() { @Override @OverridingMethodsMustInvokeSuper public void releaseCachedResources() { - // We are a reinterpreted column of the outer column source, so if we're asked to release - // our resources, release + // We are a reinterpreted column of the outer column source, so if we're asked to release our resources, release // the real resources in the underlying column. super.releaseCachedResources(); getOuterColumnSource().releaseCachedResources(); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceManager.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceManager.java index c199735eae0..5bf3a55bb8f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceManager.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceManager.java @@ -49,26 +49,24 @@ public class RegionedColumnSourceManager implements ColumnSourceManager { * An unmodifiable view of columnSources. */ private final Map> sharedColumnSources = - Collections.unmodifiableMap(columnSources); + Collections.unmodifiableMap(columnSources); /** - * State for table locations that have been added, but have never been found to exist with - * non-zero size. + * State for table locations that have been added, but have never been found to exist with non-zero size. */ private final KeyedObjectHashMap emptyTableLocations = - new KeyedObjectHashMap<>(EMPTY_TABLE_LOCATION_ENTRY_KEY); + new KeyedObjectHashMap<>(EMPTY_TABLE_LOCATION_ENTRY_KEY); /** * State for table locations that provide the regions backing our column sources. */ private final KeyedObjectHashMap includedTableLocations = - new KeyedObjectHashMap<>(INCLUDED_TABLE_LOCATION_ENTRY_KEY); + new KeyedObjectHashMap<>(INCLUDED_TABLE_LOCATION_ENTRY_KEY); /** * Table locations that provide the regions backing our column sources, in insertion order. */ - private final List orderedIncludedTableLocations = - new ArrayList<>(); + private final List orderedIncludedTableLocations = new ArrayList<>(); /** * Whether grouping is enabled. @@ -84,24 +82,22 @@ public class RegionedColumnSourceManager implements ColumnSourceManager { * @param columnDefinitions The column definitions */ RegionedColumnSourceManager(final boolean isRefreshing, - @NotNull final RegionedTableComponentFactory componentFactory, - @NotNull final ColumnToCodecMappings codecMappings, - @NotNull final ColumnDefinition... columnDefinitions) { + @NotNull final RegionedTableComponentFactory componentFactory, + @NotNull final ColumnToCodecMappings codecMappings, + @NotNull final ColumnDefinition... columnDefinitions) { this.isRefreshing = isRefreshing; this.columnDefinitions = columnDefinitions; for (final ColumnDefinition columnDefinition : columnDefinitions) { columnSources.put( - columnDefinition.getName(), - componentFactory.createRegionedColumnSource(columnDefinition, codecMappings)); + columnDefinition.getName(), + componentFactory.createRegionedColumnSource(columnDefinition, codecMappings)); } } @Override public synchronized void addLocation(@NotNull final TableLocation tableLocation) { - final IncludedTableLocationEntry includedLocation = - includedTableLocations.get(tableLocation.getKey()); - final EmptyTableLocationEntry emptyLocation = - emptyTableLocations.get(tableLocation.getKey()); + final IncludedTableLocationEntry includedLocation = includedTableLocations.get(tableLocation.getKey()); + final EmptyTableLocationEntry emptyLocation = emptyTableLocations.get(tableLocation.getKey()); if (includedLocation == null && emptyLocation == null) { if (log.isDebugEnabled()) { @@ -111,21 +107,17 @@ public synchronized void addLocation(@NotNull final TableLocation tableLocation) } else { // Duplicate location - not allowed final TableLocation duplicateLocation = - includedLocation != null ? includedLocation.location : emptyLocation.location; + includedLocation != null ? includedLocation.location : emptyLocation.location; if (tableLocation != duplicateLocation) { - // If it ever transpires that we need to compare the locations and not just detect a - // second add, then + // If it ever transpires that we need to compare the locations and not just detect a second add, then // we need to add plumbing to include access to the location provider throw new TableDataException( - "Data Routing Configuration error: TableDataService elements overlap at locations " - + - tableLocation.toStringDetailed() + " and " - + duplicateLocation.toStringDetailed()); + "Data Routing Configuration error: TableDataService elements overlap at locations " + + tableLocation.toStringDetailed() + " and " + duplicateLocation.toStringDetailed()); } else { // This is unexpected - we got the identical table location object twice // If we ever get this, some thought needs to go into why. - throw new TableDataException( - "Unexpected: TableDataService returned the same location twice: " + + throw new TableDataException("Unexpected: TableDataService returned the same location twice: " + tableLocation.toStringDetailed()); } } @@ -134,18 +126,12 @@ public synchronized void addLocation(@NotNull final TableLocation tableLocation) @Override public synchronized Index refresh() { final Index.SequentialBuilder addedIndexBuilder = Index.FACTORY.getSequentialBuilder(); - for (final IncludedTableLocationEntry entry : orderedIncludedTableLocations) { // Ordering - // matters, - // since - // we're - // using a - // sequential - // builder. + for (final IncludedTableLocationEntry entry : orderedIncludedTableLocations) { // Ordering matters, since we're + // using a sequential builder. entry.pollUpdates(addedIndexBuilder); } Collection entriesToInclude = null; - for (final Iterator iterator = - emptyTableLocations.iterator(); iterator.hasNext();) { + for (final Iterator iterator = emptyTableLocations.iterator(); iterator.hasNext();) { final EmptyTableLocationEntry nonexistentEntry = iterator.next(); nonexistentEntry.refresh(); final ReadOnlyIndex locationIndex = nonexistentEntry.location.getIndex(); @@ -154,16 +140,15 @@ public synchronized Index refresh() { locationIndex.close(); } else { nonexistentEntry.initialIndex = locationIndex; - (entriesToInclude == null ? entriesToInclude = new TreeSet<>() - : entriesToInclude).add(nonexistentEntry); + (entriesToInclude == null ? entriesToInclude = new TreeSet<>() : entriesToInclude) + .add(nonexistentEntry); iterator.remove(); } } } if (entriesToInclude != null) { for (final EmptyTableLocationEntry entryToInclude : entriesToInclude) { - final IncludedTableLocationEntry entry = - new IncludedTableLocationEntry(entryToInclude); + final IncludedTableLocationEntry entry = new IncludedTableLocationEntry(entryToInclude); includedTableLocations.add(entry); orderedIncludedTableLocations.add(entry); entry.processInitial(addedIndexBuilder, entryToInclude.initialIndex); @@ -178,15 +163,15 @@ public synchronized Index refresh() { @Override public final synchronized Collection allLocations() { return Stream.concat( - orderedIncludedTableLocations.stream().map(e -> e.location), - emptyTableLocations.values().stream().sorted().map(e -> e.location)) - .collect(Collectors.toCollection(ArrayList::new)); + orderedIncludedTableLocations.stream().map(e -> e.location), + emptyTableLocations.values().stream().sorted().map(e -> e.location)) + .collect(Collectors.toCollection(ArrayList::new)); } @Override public final synchronized Collection includedLocations() { return orderedIncludedTableLocations.stream().map(e -> e.location) - .collect(Collectors.toCollection(ArrayList::new)); + .collect(Collectors.toCollection(ArrayList::new)); } @Override @@ -207,8 +192,7 @@ public final synchronized void disableGrouping() { isGroupingEnabled = false; for (ColumnDefinition columnDefinition : columnDefinitions) { if (columnDefinition.isGrouping()) { - DeferredGroupingColumnSource columnSource = - getColumnSources().get(columnDefinition.getName()); + DeferredGroupingColumnSource columnSource = getColumnSources().get(columnDefinition.getName()); columnSource.setGroupingProvider(null); columnSource.setGroupToRange(null); } @@ -216,8 +200,8 @@ public final synchronized void disableGrouping() { } /** - * State keeper for a table location and its subscription buffer if it hasn't been found to have - * a non-null, non-zero size yet. + * State keeper for a table location and its subscription buffer if it hasn't been found to have a non-null, + * non-zero size yet. */ private class EmptyTableLocationEntry implements Comparable { @@ -239,10 +223,8 @@ private void refresh() { if (subscriptionBuffer != null) { subscriptionBuffer.processPending(); } else { - // NB: This should be hit only once per entry - subscription buffers handle all - // "isRefreshing" - // (i.e. "live") tables, regardless of whether the underlying locations support - // subscriptions. + // NB: This should be hit only once per entry - subscription buffers handle all "isRefreshing" + // (i.e. "live") tables, regardless of whether the underlying locations support subscriptions. location.refresh(); } } @@ -257,18 +239,17 @@ public int compareTo(@NotNull final EmptyTableLocationEntry other) { } private static final KeyedObjectKey EMPTY_TABLE_LOCATION_ENTRY_KEY = - new KeyedObjectKey.Basic() { + new KeyedObjectKey.Basic() { - @Override - public ImmutableTableLocationKey getKey( - @NotNull final EmptyTableLocationEntry emptyTableLocationEntry) { - return emptyTableLocationEntry.location.getKey(); - } - }; + @Override + public ImmutableTableLocationKey getKey( + @NotNull final EmptyTableLocationEntry emptyTableLocationEntry) { + return emptyTableLocationEntry.location.getKey(); + } + }; /** - * State-keeper for a table location and its column locations, once it's been found to have a - * positive size. + * State-keeper for a table location and its column locations, once it's been found to have a positive size. */ private class IncludedTableLocationEntry implements Comparable { @@ -288,37 +269,34 @@ private IncludedTableLocationEntry(final EmptyTableLocationEntry nonexistentEntr this.subscriptionBuffer = nonexistentEntry.subscriptionBuffer; } - private void processInitial(final Index.SequentialBuilder addedIndexBuilder, - final ReadOnlyIndex initialIndex) { + private void processInitial(final Index.SequentialBuilder addedIndexBuilder, final ReadOnlyIndex initialIndex) { Assert.neqNull(initialIndex, "initialIndex"); Assert.eqTrue(initialIndex.nonempty(), "initialIndex.nonempty()"); Assert.eqNull(indexAtLastUpdate, "indexAtLastUpdate"); - if (initialIndex - .lastKey() > RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK) { + if (initialIndex.lastKey() > RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK) { throw new TableDataException(String.format( - "Location %s has initial last key %#016X, larger than maximum supported key %#016X", - location, initialIndex.lastKey(), - RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK)); + "Location %s has initial last key %#016X, larger than maximum supported key %#016X", + location, initialIndex.lastKey(), + RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK)); } final long regionFirstKey = RegionedColumnSource.getFirstElementIndex(regionIndex); - initialIndex.forAllLongRanges( - (subRegionFirstKey, subRegionLastKey) -> addedIndexBuilder.appendRange( - regionFirstKey + subRegionFirstKey, regionFirstKey + subRegionLastKey)); + initialIndex.forAllLongRanges((subRegionFirstKey, subRegionLastKey) -> addedIndexBuilder + .appendRange(regionFirstKey + subRegionFirstKey, regionFirstKey + subRegionLastKey)); ReadOnlyIndex addIndexInTable = null; try { for (final ColumnDefinition columnDefinition : columnDefinitions) { // noinspection unchecked final ColumnLocationState state = new ColumnLocationState( - columnDefinition, - columnSources.get(columnDefinition.getName()), - location.getColumnLocation(columnDefinition.getName())); + columnDefinition, + columnSources.get(columnDefinition.getName()), + location.getColumnLocation(columnDefinition.getName())); columnLocationStates.add(state); state.regionAllocated(regionIndex); if (state.needToUpdateGrouping()) { - state.updateGrouping(addIndexInTable == null - ? addIndexInTable = initialIndex.shift(regionFirstKey) - : addIndexInTable); + state.updateGrouping( + addIndexInTable == null ? addIndexInTable = initialIndex.shift(regionFirstKey) + : addIndexInTable); } } } finally { @@ -330,54 +308,50 @@ private void processInitial(final Index.SequentialBuilder addedIndexBuilder, } private void pollUpdates(final Index.SequentialBuilder addedIndexBuilder) { - Assert.neqNull(subscriptionBuffer, "subscriptionBuffer"); // Effectively, this is - // asserting "isRefreshing". + Assert.neqNull(subscriptionBuffer, "subscriptionBuffer"); // Effectively, this is asserting "isRefreshing". if (!subscriptionBuffer.processPending()) { return; } final ReadOnlyIndex updateIndex = location.getIndex(); try { if (updateIndex == null) { - // This should be impossible - the subscription buffer transforms a transition - // to null into a pending exception + // This should be impossible - the subscription buffer transforms a transition to null into a + // pending exception throw new TableDataException( - "Location " + location + " is no longer available, data has been removed"); + "Location " + location + " is no longer available, data has been removed"); } if (!indexAtLastUpdate.subsetOf(updateIndex)) { // Bad change // noinspection ThrowableNotThrown - Assert.statementNeverExecuted("Index keys removed at location " + location - + ": " + indexAtLastUpdate.minus(updateIndex)); + Assert.statementNeverExecuted( + "Index keys removed at location " + location + ": " + indexAtLastUpdate.minus(updateIndex)); } if (indexAtLastUpdate.size() == updateIndex.size()) { // Nothing to do return; } - if (updateIndex - .lastKey() > RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK) { + if (updateIndex.lastKey() > RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK) { throw new TableDataException(String.format( - "Location %s has updated last key %#016X, larger than maximum supported key %#016X", - location, updateIndex.lastKey(), - RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK)); + "Location %s has updated last key %#016X, larger than maximum supported key %#016X", + location, updateIndex.lastKey(), + RegionedColumnSource.ELEMENT_INDEX_TO_SUB_REGION_ELEMENT_INDEX_MASK)); } if (log.isDebugEnabled()) { log.debug().append("LOCATION_SIZE_CHANGE:").append(location.toString()) - .append(",FROM:").append(indexAtLastUpdate.size()) - .append(",TO:").append(updateIndex.size()).endl(); + .append(",FROM:").append(indexAtLastUpdate.size()) + .append(",TO:").append(updateIndex.size()).endl(); } try (final ReadOnlyIndex addedIndex = updateIndex.minus(indexAtLastUpdate)) { - final long regionFirstKey = - RegionedColumnSource.getFirstElementIndex(regionIndex); - addedIndex.forAllLongRanges( - (subRegionFirstKey, subRegionLastKey) -> addedIndexBuilder.appendRange( - regionFirstKey + subRegionFirstKey, regionFirstKey + subRegionLastKey)); + final long regionFirstKey = RegionedColumnSource.getFirstElementIndex(regionIndex); + addedIndex.forAllLongRanges((subRegionFirstKey, subRegionLastKey) -> addedIndexBuilder + .appendRange(regionFirstKey + subRegionFirstKey, regionFirstKey + subRegionLastKey)); ReadOnlyIndex addIndexInTable = null; try { for (final ColumnLocationState state : columnLocationStates) { if (state.needToUpdateGrouping()) { - state.updateGrouping(addIndexInTable == null - ? addIndexInTable = updateIndex.shift(regionFirstKey) - : addIndexInTable); + state.updateGrouping( + addIndexInTable == null ? addIndexInTable = updateIndex.shift(regionFirstKey) + : addIndexInTable); } } } finally { @@ -404,18 +378,17 @@ public int compareTo(@NotNull final IncludedTableLocationEntry other) { } private static final KeyedObjectKey INCLUDED_TABLE_LOCATION_ENTRY_KEY = - new KeyedObjectKey.Basic() { + new KeyedObjectKey.Basic() { - @Override - public ImmutableTableLocationKey getKey( - @NotNull final IncludedTableLocationEntry includedTableLocationEntry) { - return includedTableLocationEntry.location.getKey(); - } - }; + @Override + public ImmutableTableLocationKey getKey( + @NotNull final IncludedTableLocationEntry includedTableLocationEntry) { + return includedTableLocationEntry.location.getKey(); + } + }; /** - * Batches up a definition, source, and location for ease of use. Implements grouping - * maintenance. + * Batches up a definition, source, and location for ease of use. Implements grouping maintenance. */ private class ColumnLocationState { @@ -424,8 +397,8 @@ private class ColumnLocationState { protected final ColumnLocation location; private ColumnLocationState(ColumnDefinition definition, - RegionedColumnSource source, - ColumnLocation location) { + RegionedColumnSource source, + ColumnLocation location) { this.definition = definition; this.source = source; this.location = location; @@ -433,7 +406,7 @@ private ColumnLocationState(ColumnDefinition definition, private void regionAllocated(final int regionIndex) { Assert.eq(regionIndex, "regionIndex", source.addRegion(definition, location), - "source.addRegion((definition, location)"); + "source.addRegion((definition, location)"); } private boolean needToUpdateGrouping() { @@ -455,8 +428,7 @@ private void updateGrouping(@NotNull final ReadOnlyIndex locationAddedIndexInTab source.setGroupingProvider(groupingProvider); } if (groupingProvider instanceof KeyRangeGroupingProvider) { - ((KeyRangeGroupingProvider) groupingProvider).addSource(location, - locationAddedIndexInTable); + ((KeyRangeGroupingProvider) groupingProvider).addSource(location, locationAddedIndexInTable); } } else if (definition.isPartitioning()) { final DeferredGroupingColumnSource partitioningColumnSource = source; @@ -466,11 +438,10 @@ private void updateGrouping(@NotNull final ReadOnlyIndex locationAddedIndexInTab partitioningColumnSource.setGroupToRange(columnPartitionToIndex); } final T columnPartitionValue = - location.getTableLocation().getKey().getPartitionValue(definition.getName()); + location.getTableLocation().getKey().getPartitionValue(definition.getName()); final Index current = columnPartitionToIndex.get(columnPartitionValue); if (current == null) { - columnPartitionToIndex.put(columnPartitionValue, - locationAddedIndexInTable.clone()); + columnPartitionToIndex.put(columnPartitionValue, locationAddedIndexInTable.clone()); } else { current.insert(locationAddedIndexInTable); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceReferencing.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceReferencing.java index dd921e83e03..bff5561bea7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceReferencing.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceReferencing.java @@ -12,64 +12,56 @@ import javax.annotation.OverridingMethodsMustInvokeSuper; /** - * Base class for a column of {@code DATA_TYPE} which is a wrapped {@code NATIVE_DATA_TYPE}. The - * column owns the underlying native column and its resources. + * Base class for a column of {@code DATA_TYPE} which is a wrapped {@code NATIVE_DATA_TYPE}. The column owns the + * underlying native column and its resources. */ abstract class RegionedColumnSourceReferencing> - extends - RegionedColumnSourceArray> - implements ColumnRegionReferencingImpl.Converter { + extends RegionedColumnSourceArray> + implements ColumnRegionReferencingImpl.Converter { @FunctionalInterface interface NativeSourceCreator> { NativeColumnSource create( - RegionedColumnSourceBase> outerSource); + RegionedColumnSourceBase> outerSource); } @NotNull private final NativeColumnSource nativeSource; RegionedColumnSourceReferencing(@NotNull final NATIVE_REGION_TYPE nullRegion, - @NotNull Class type, - @NotNull NativeSourceCreator nativeSourceCreator) { - super(new ColumnRegionReferencing.Null<>(nullRegion), type, - DeferredColumnRegionReferencing::new); + @NotNull Class type, + @NotNull NativeSourceCreator nativeSourceCreator) { + super(new ColumnRegionReferencing.Null<>(nullRegion), type, DeferredColumnRegionReferencing::new); nativeSource = nativeSourceCreator.create(this); } @Override @OverridingMethodsMustInvokeSuper public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { - return nativeSource.getType() == alternateDataType - || nativeSource.allowsReinterpret(alternateDataType); + @NotNull final Class alternateDataType) { + return nativeSource.getType() == alternateDataType || nativeSource.allowsReinterpret(alternateDataType); } @Override @OverridingMethodsMustInvokeSuper protected ColumnSource doReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { // noinspection unchecked - return nativeSource.getType() == alternateDataType - ? (ColumnSource) nativeSource - : nativeSource.reinterpret(alternateDataType); + return nativeSource.getType() == alternateDataType ? (ColumnSource) nativeSource + : nativeSource.reinterpret(alternateDataType); } @Override @Nullable - public ColumnRegionReferencing makeRegion( - @NotNull ColumnDefinition columnDefinition, @NotNull ColumnLocation columnLocation, - int regionIndex) { - NATIVE_REGION_TYPE nativeRegionType = - nativeSource.makeRegion(columnDefinition, columnLocation, regionIndex); - return nativeRegionType == null ? null - : new ColumnRegionReferencingImpl<>(nativeRegionType); + public ColumnRegionReferencing makeRegion(@NotNull ColumnDefinition columnDefinition, + @NotNull ColumnLocation columnLocation, int regionIndex) { + NATIVE_REGION_TYPE nativeRegionType = nativeSource.makeRegion(columnDefinition, columnLocation, regionIndex); + return nativeRegionType == null ? null : new ColumnRegionReferencingImpl<>(nativeRegionType); } - final ChunkSource.FillContext makeFillContext(ColumnRegionReferencing.Converter converter, - int chunkCapacity, SharedContext sharedContext) { - return new ColumnRegionReferencingImpl.FillContext<>(nativeSource, converter, chunkCapacity, - sharedContext); + final ChunkSource.FillContext makeFillContext(ColumnRegionReferencing.Converter converter, int chunkCapacity, + SharedContext sharedContext) { + return new ColumnRegionReferencingImpl.FillContext<>(nativeSource, converter, chunkCapacity, sharedContext); } @Override @@ -78,12 +70,12 @@ public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContex } abstract static class NativeColumnSource> - extends - RegionedColumnSourceInner> - implements MakeRegion { + extends + RegionedColumnSourceInner> + implements MakeRegion { NativeColumnSource(@NotNull Class type, - RegionedColumnSourceBase> outerColumnSource) { + RegionedColumnSourceBase> outerColumnSource) { super(type, outerColumnSource); } @@ -100,13 +92,13 @@ public NATIVE_REGION_TYPE getRegion(int regionIndex) { @Override public final boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return getOuterColumnSource().getType() == alternateDataType; } @Override protected final ColumnSource doReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { // noinspection unchecked return (ColumnSource) getOuterColumnSource(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceWithDictionary.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceWithDictionary.java index 25750f9afbf..dfc165457fe 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceWithDictionary.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedColumnSourceWithDictionary.java @@ -29,31 +29,30 @@ import static io.deephaven.db.v2.utils.ReadOnlyIndex.NULL_KEY; /** - * {@link RegionedColumnSourceObject} with support for dictionary access via - * {@link SymbolTableSource} methods. Note that it may not be the case that all values are stored as - * dictionary offsets. See {@link #hasSymbolTable(ReadOnlyIndex)}. + * {@link RegionedColumnSourceObject} with support for dictionary access via {@link SymbolTableSource} methods. Note + * that it may not be the case that all values are stored as dictionary offsets. See + * {@link #hasSymbolTable(ReadOnlyIndex)}. */ class RegionedColumnSourceWithDictionary - extends RegionedColumnSourceObject.AsValues - implements SymbolTableSource { + extends RegionedColumnSourceObject.AsValues + implements SymbolTableSource { RegionedColumnSourceWithDictionary(@NotNull final Class dataType, - @Nullable final Class componentType) { + @Nullable final Class componentType) { super(dataType, componentType); } @Override - public boolean allowsReinterpret( - @NotNull Class alternateDataType) { + public boolean allowsReinterpret(@NotNull Class alternateDataType) { return alternateDataType == long.class || super.allowsReinterpret(alternateDataType); } @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return alternateDataType == long.class ? (ColumnSource) new AsLong() - : super.doReinterpret(alternateDataType); + : super.doReinterpret(alternateDataType); } @Override @@ -62,8 +61,8 @@ public void releaseCachedResources() { } private final class AsLong - extends RegionedColumnSourceBase> - implements ColumnSourceGetDefaults.ForLong { + extends RegionedColumnSourceBase> + implements ColumnSourceGetDefaults.ForLong { private final ColumnRegionLong nullRegion; private volatile ColumnRegionLong[] wrapperRegions; @@ -77,15 +76,13 @@ private AsLong() { @Override public long getLong(final long elementIndex) { - return (elementIndex == NULL_KEY ? getNullRegion() : lookupRegion(elementIndex)) - .getLong(elementIndex); + return (elementIndex == NULL_KEY ? getNullRegion() : lookupRegion(elementIndex)).getLong(elementIndex); } @Override public int addRegion(@NotNull final ColumnDefinition columnDefinition, - @NotNull final ColumnLocation columnLocation) { - return RegionedColumnSourceWithDictionary.this.addRegion(columnDefinition, - columnLocation); + @NotNull final ColumnLocation columnLocation) { + return RegionedColumnSourceWithDictionary.this.addRegion(columnDefinition, columnLocation); } @Override @@ -107,39 +104,38 @@ public int getRegionCount() { @Override public ColumnRegionLong getRegion(final int regionIndex) { final ColumnRegionObject sourceRegion = - RegionedColumnSourceWithDictionary.this.getRegion(regionIndex); + RegionedColumnSourceWithDictionary.this.getRegion(regionIndex); if (sourceRegion instanceof ColumnRegion.Null) { return nullRegion; } ColumnRegionLong[] localWrappers; ColumnRegionLong wrapper; if ((localWrappers = wrapperRegions).length > regionIndex - && (wrapper = localWrappers[regionIndex]) != null) { + && (wrapper = localWrappers[regionIndex]) != null) { return wrapper; } synchronized (this) { if ((localWrappers = wrapperRegions).length > regionIndex - && (wrapper = localWrappers[regionIndex]) != null) { + && (wrapper = localWrappers[regionIndex]) != null) { return wrapper; } if (localWrappers.length <= regionIndex) { - wrapperRegions = localWrappers = Arrays.copyOf(localWrappers, - Math.min(regionIndex + 1 << 1, getRegionCount())); + wrapperRegions = localWrappers = + Arrays.copyOf(localWrappers, Math.min(regionIndex + 1 << 1, getRegionCount())); } - return localWrappers[regionIndex] = ColumnRegionObject.DictionaryKeysWrapper - .create(parameters(), regionIndex, sourceRegion); + return localWrappers[regionIndex] = + ColumnRegionObject.DictionaryKeysWrapper.create(parameters(), regionIndex, sourceRegion); } } @Override - public boolean allowsReinterpret( - @NotNull Class alternateDataType) { + public boolean allowsReinterpret(@NotNull Class alternateDataType) { return alternateDataType == RegionedColumnSourceWithDictionary.this.getType(); } @Override protected ColumnSource doReinterpret( - @NotNull Class alternateDataType) { + @NotNull Class alternateDataType) { // noinspection unchecked return (ColumnSource) RegionedColumnSourceWithDictionary.this; } @@ -148,38 +144,34 @@ protected ColumnSource doReinterpret( @OverridingMethodsMustInvokeSuper public void releaseCachedResources() { super.releaseCachedResources(); - // We are a reinterpreted column of RegionedColumnSourceObjectReferencing.this, so if - // we're asked to release + // We are a reinterpreted column of RegionedColumnSourceObjectReferencing.this, so if we're asked to release // our resources, release the real resources in the underlying column. RegionedColumnSourceWithDictionary.this.releaseCachedResources(); final ColumnRegionLong[] localWrappers = wrapperRegions; // noinspection unchecked wrapperRegions = new ColumnRegionLong[0]; - Arrays.stream(localWrappers).filter(Objects::nonNull) - .forEach(Releasable::releaseCachedResources); + Arrays.stream(localWrappers).filter(Objects::nonNull).forEach(Releasable::releaseCachedResources); } } private final class AsDictionary - extends RegionedColumnSourceBase> - implements ColumnSourceGetDefaults.ForObject { + extends RegionedColumnSourceBase> + implements ColumnSourceGetDefaults.ForObject { private AsDictionary() { super(RegionedColumnSourceWithDictionary.this.getType(), - RegionedColumnSourceWithDictionary.this.getComponentType()); + RegionedColumnSourceWithDictionary.this.getComponentType()); } @Override public DATA_TYPE get(final long elementIndex) { - return (elementIndex == NULL_KEY ? getNullRegion() : lookupRegion(elementIndex)) - .getObject(elementIndex); + return (elementIndex == NULL_KEY ? getNullRegion() : lookupRegion(elementIndex)).getObject(elementIndex); } @Override public int addRegion(@NotNull final ColumnDefinition columnDefinition, - @NotNull final ColumnLocation columnLocation) { - return RegionedColumnSourceWithDictionary.this.addRegion(columnDefinition, - columnLocation); + @NotNull final ColumnLocation columnLocation) { + return RegionedColumnSourceWithDictionary.this.addRegion(columnDefinition, columnLocation); } @Override @@ -200,12 +192,9 @@ public int getRegionCount() { @Override public ColumnRegionObject getRegion(final int regionIndex) { - // ColumnRegionObject implementations are expected to cache the result of - // getDictionaryValuesRegion(), - // so it's fine to call more than once and avoid extra backing storage in the column - // source. - return RegionedColumnSourceWithDictionary.this.getRegion(regionIndex) - .getDictionaryValuesRegion(); + // ColumnRegionObject implementations are expected to cache the result of getDictionaryValuesRegion(), + // so it's fine to call more than once and avoid extra backing storage in the column source. + return RegionedColumnSourceWithDictionary.this.getRegion(regionIndex).getDictionaryValuesRegion(); } } @@ -219,32 +208,28 @@ public boolean hasSymbolTable(@NotNull final ReadOnlyIndex sourceIndex) { try (final ReadOnlyIndex.SearchIterator keysToVisit = sourceIndex.searchIterator()) { keysToVisit.nextLong(); // Safe, since sourceIndex must be non-empty do { - result = - lookupRegion(keysToVisit.currentValue()).supportsDictionaryFormat(keysToVisit); + result = lookupRegion(keysToVisit.currentValue()).supportsDictionaryFormat(keysToVisit); } while (result == RegionVisitResult.CONTINUE); } return result != RegionVisitResult.FAILED; } @Override - public QueryTable getStaticSymbolTable(@NotNull ReadOnlyIndex sourceIndex, - boolean useLookupCaching) { + public QueryTable getStaticSymbolTable(@NotNull ReadOnlyIndex sourceIndex, boolean useLookupCaching) { // NB: We assume that hasSymbolTable has been tested by the caller final RegionedColumnSourceBase> dictionaryColumn = - new AsDictionary(); + new AsDictionary(); final Index symbolTableIndex; if (sourceIndex.empty()) { symbolTableIndex = Index.FACTORY.getEmptyIndex(); } else { - final Index.SequentialBuilder symbolTableIndexBuilder = - Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder symbolTableIndexBuilder = Index.FACTORY.getSequentialBuilder(); try (final Index.SearchIterator keysToVisit = sourceIndex.searchIterator()) { keysToVisit.nextLong(); // Safe, since sourceIndex must be non-empty do { - dictionaryColumn.lookupRegion(keysToVisit.currentValue()) - .gatherDictionaryValuesIndex(keysToVisit, OrderedKeys.Iterator.EMPTY, - symbolTableIndexBuilder); + dictionaryColumn.lookupRegion(keysToVisit.currentValue()).gatherDictionaryValuesIndex(keysToVisit, + OrderedKeys.Iterator.EMPTY, symbolTableIndexBuilder); } while (keysToVisit.hasNext()); } symbolTableIndex = symbolTableIndexBuilder.getIndex(); @@ -258,40 +243,35 @@ public QueryTable getStaticSymbolTable(@NotNull ReadOnlyIndex sourceIndex, } @Override - public final Table getSymbolTable(@NotNull final QueryTable sourceTable, - final boolean useLookupCaching) { - // NB: We assume that hasSymbolTable has been tested by the caller, and that for refreshing - // tables it will + public final Table getSymbolTable(@NotNull final QueryTable sourceTable, final boolean useLookupCaching) { + // NB: We assume that hasSymbolTable has been tested by the caller, and that for refreshing tables it will // remain true. - return sourceTable.memoizeResult(MemoizedOperationKey.symbolTable(this, useLookupCaching), - () -> { - final String description = "getSymbolTable(" + sourceTable.getDescription() + ", " - + useLookupCaching + ')'; - return QueryPerformanceRecorder.withNugget(description, sourceTable.size(), () -> { - final ShiftAwareSwapListener swapListener = + return sourceTable.memoizeResult(MemoizedOperationKey.symbolTable(this, useLookupCaching), () -> { + final String description = "getSymbolTable(" + sourceTable.getDescription() + ", " + useLookupCaching + ')'; + return QueryPerformanceRecorder.withNugget(description, sourceTable.size(), () -> { + final ShiftAwareSwapListener swapListener = sourceTable.createSwapListenerIfRefreshing(ShiftAwareSwapListener::new); - final Mutable
    result = new MutableObject<>(); - sourceTable.initializeWithSnapshot(description, swapListener, + final Mutable
    result = new MutableObject<>(); + sourceTable.initializeWithSnapshot(description, swapListener, (final boolean usePrev, final long beforeClockValue) -> { final QueryTable symbolTable; if (swapListener == null) { - symbolTable = - getStaticSymbolTable(sourceTable.getIndex(), useLookupCaching); + symbolTable = getStaticSymbolTable(sourceTable.getIndex(), useLookupCaching); } else { symbolTable = getStaticSymbolTable( - usePrev ? sourceTable.getIndex().getPrevIndex() - : sourceTable.getIndex(), - useLookupCaching); - swapListener.setListenerAndResult(new SymbolTableUpdateListener( - description, sourceTable, symbolTable), symbolTable); + usePrev ? sourceTable.getIndex().getPrevIndex() : sourceTable.getIndex(), + useLookupCaching); + swapListener.setListenerAndResult( + new SymbolTableUpdateListener(description, sourceTable, symbolTable), + symbolTable); symbolTable.addParentReference(swapListener); } result.setValue(symbolTable); return true; }); - return result.getValue(); - }); + return result.getValue(); }); + }); } private final class SymbolTableUpdateListener extends BaseTable.ShiftAwareListenerImpl { @@ -299,8 +279,8 @@ private final class SymbolTableUpdateListener extends BaseTable.ShiftAwareListen private final BaseTable symbolTable; private final ModifiedColumnSet emptyModifiedColumns; - private SymbolTableUpdateListener(@NotNull final String description, - @NotNull final DynamicTable sourceTable, @NotNull final BaseTable symbolTable) { + private SymbolTableUpdateListener(@NotNull final String description, @NotNull final DynamicTable sourceTable, + @NotNull final BaseTable symbolTable) { super(description, sourceTable, symbolTable); this.symbolTable = symbolTable; this.emptyModifiedColumns = symbolTable.newModifiedColumnSet(); @@ -308,43 +288,36 @@ private SymbolTableUpdateListener(@NotNull final String description, @Override public void onUpdate(@NotNull final Update upstream) { - // TODO-RWC: Update and use - // io.deephaven.db.tables.verify.TableAssertions.assertAppendOnly(java.lang.String, + // TODO-RWC: Update and use io.deephaven.db.tables.verify.TableAssertions.assertAppendOnly(java.lang.String, // io.deephaven.db.tables.Table) ? - if (upstream.removed.nonempty() || upstream.modified.nonempty() - || upstream.shifted.nonempty()) { - throw new IllegalStateException( - "Source table for a regioned symbol table should be add-only, instead " - + "removed=" + upstream.removed + ", modified=" + upstream.modified - + ", shifted=" + upstream.shifted); + if (upstream.removed.nonempty() || upstream.modified.nonempty() || upstream.shifted.nonempty()) { + throw new IllegalStateException("Source table for a regioned symbol table should be add-only, instead " + + "removed=" + upstream.removed + ", modified=" + upstream.modified + ", shifted=" + + upstream.shifted); } if (upstream.added.empty()) { return; } - final Index.SequentialBuilder symbolTableAddedBuilder = - Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder symbolTableAddedBuilder = Index.FACTORY.getSequentialBuilder(); // noinspection unchecked final RegionedColumnSourceBase> dictionaryColumn = - (RegionedColumnSourceBase>) symbolTable - .getColumnSource(SymbolTableSource.SYMBOL_COLUMN_NAME); + (RegionedColumnSourceBase>) symbolTable + .getColumnSource(SymbolTableSource.SYMBOL_COLUMN_NAME); try (final Index.SearchIterator keysToVisit = upstream.added.searchIterator(); - final OrderedKeys.Iterator knownKeys = - symbolTable.getIndex().getOrderedKeysIterator()) { + final OrderedKeys.Iterator knownKeys = symbolTable.getIndex().getOrderedKeysIterator()) { keysToVisit.nextLong(); // Safe, since sourceIndex must be non-empty do { - dictionaryColumn.lookupRegion(keysToVisit.currentValue()) - .gatherDictionaryValuesIndex(keysToVisit, knownKeys, - symbolTableAddedBuilder); + dictionaryColumn.lookupRegion(keysToVisit.currentValue()).gatherDictionaryValuesIndex(keysToVisit, + knownKeys, symbolTableAddedBuilder); } while (keysToVisit.hasNext()); } final Index symbolTableAdded = symbolTableAddedBuilder.getIndex(); if (symbolTableAdded.nonempty()) { symbolTable.getIndex().insert(symbolTableAdded); - symbolTable - .notifyListeners(new Update(symbolTableAdded, Index.FACTORY.getEmptyIndex(), + symbolTable.notifyListeners(new Update(symbolTableAdded, Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, emptyModifiedColumns)); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedPageStore.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedPageStore.java index 0389267f29f..dfc6343f016 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedPageStore.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedPageStore.java @@ -10,7 +10,7 @@ import org.jetbrains.annotations.NotNull; public interface RegionedPageStore> - extends PageStore { + extends PageStore { /** * @return The parameters object that describes this regioned page store @@ -27,8 +27,8 @@ default long mask() { } /** - * @return The mask that should be applied to {@link io.deephaven.db.v2.utils.OrderedKeys} - * indices when calculating their address within a region + * @return The mask that should be applied to {@link io.deephaven.db.v2.utils.OrderedKeys} indices when calculating + * their address within a region */ @FinalDefault default long regionMask() { @@ -87,8 +87,7 @@ default REGION_TYPE getPageContaining(final FillContext fillContext, final long } @Override - default FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + default FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return new RegionContextHolder(); } @@ -103,29 +102,25 @@ final class Parameters { public final long regionMask; public final int regionMaskNumBits; - public Parameters(final long pageMask, final int maximumRegionCount, - final long maximumRegionSize) { + public Parameters(final long pageMask, final int maximumRegionCount, final long maximumRegionSize) { this.pageMask = validateMask(pageMask, "page"); this.maximumRegionCount = Require.geqZero(maximumRegionCount, "maximum region count"); this.maximumRegionSize = Require.geqZero(maximumRegionSize, "maximum region size"); - final int regionNumBits = - maximumRegionCount == 0 ? 0 : MathUtil.ceilLog2(maximumRegionCount); + final int regionNumBits = maximumRegionCount == 0 ? 0 : MathUtil.ceilLog2(maximumRegionCount); regionMask = validateMask(pageMask >>> regionNumBits, "region"); regionMaskNumBits = MathUtil.ceilLog2(regionMask); - final long maxRegionSizeNumBits = - maximumRegionSize == 0 ? 0 : MathUtil.ceilLog2(maximumRegionSize); + final long maxRegionSizeNumBits = maximumRegionSize == 0 ? 0 : MathUtil.ceilLog2(maximumRegionSize); if (maxRegionSizeNumBits > regionMaskNumBits) { throw new IllegalArgumentException(String.format( - "Maximum region size %,d is too large to access with page mask %#016X and maximum region count %,d", - maximumRegionSize, pageMask, maximumRegionCount)); + "Maximum region size %,d is too large to access with page mask %#016X and maximum region count %,d", + maximumRegionSize, pageMask, maximumRegionCount)); } } private static long validateMask(final long mask, final String name) { if (mask < 0 || (Long.SIZE - Long.numberOfLeadingZeros(mask)) != Long.bitCount(mask)) { - throw new IllegalArgumentException( - String.format("Invalid %s mask %#016X", name, mask)); + throw new IllegalArgumentException(String.format("Invalid %s mask %#016X", name, mask)); } return mask; } @@ -135,25 +130,23 @@ private static long validateMask(final long mask, final String name) { * A regioned page store for use when the full set of regions and their sizes are known. */ abstract class Static> - implements RegionedPageStore { + implements RegionedPageStore { private final Parameters parameters; private final REGION_TYPE[] regions; /** * @param parameters Mask and shift parameters - * @param regions Array of all regions in this page store. Array becomes property of the - * page store. + * @param regions Array of all regions in this page store. Array becomes property of the page store. */ public Static(@NotNull final Parameters parameters, - @NotNull final REGION_TYPE[] regions) { + @NotNull final REGION_TYPE[] regions) { this.parameters = parameters; this.regions = Require.elementsNeqNull(regions, "regions"); Require.leq(regions.length, "regions.length", parameters.maximumRegionCount, - "parameters.maximumRegionCount"); + "parameters.maximumRegionCount"); for (final REGION_TYPE region : regions) { - Require.eq(region.mask(), "region.mask()", parameters.regionMask, - "parameters.regionMask"); + Require.eq(region.mask(), "region.mask()", parameters.regionMask, "parameters.regionMask"); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedTableComponentFactory.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedTableComponentFactory.java index 10488cf17ea..161cdb96689 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedTableComponentFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedTableComponentFactory.java @@ -17,6 +17,6 @@ public interface RegionedTableComponentFactory extends SourceTableComponentFactory { RegionedColumnSource createRegionedColumnSource( - ColumnDefinition columnDefinition, - ColumnToCodecMappings codecMappings); + ColumnDefinition columnDefinition, + ColumnToCodecMappings codecMappings); } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedTableComponentFactoryImpl.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedTableComponentFactoryImpl.java index afa3d4544ff..e9283760f8f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedTableComponentFactoryImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/RegionedTableComponentFactoryImpl.java @@ -34,27 +34,23 @@ public class RegionedTableComponentFactoryImpl implements RegionedTableComponent typeToSupplier.put(Short.class, RegionedColumnSourceShort.AsValues::new); typeToSupplier.put(Boolean.class, RegionedColumnSourceBoolean::new); typeToSupplier.put(DBDateTime.class, RegionedColumnSourceDBDateTime::new); - SIMPLE_DATA_TYPE_TO_REGIONED_COLUMN_SOURCE_SUPPLIER = - Collections.unmodifiableMap(typeToSupplier); + SIMPLE_DATA_TYPE_TO_REGIONED_COLUMN_SOURCE_SUPPLIER = Collections.unmodifiableMap(typeToSupplier); } - public static final RegionedTableComponentFactory INSTANCE = - new RegionedTableComponentFactoryImpl(); + public static final RegionedTableComponentFactory INSTANCE = new RegionedTableComponentFactoryImpl(); private RegionedTableComponentFactoryImpl() {} @Override public ColumnSourceManager createColumnSourceManager( - final boolean isRefreshing, - @NotNull final ColumnToCodecMappings codecMappings, - @NotNull final ColumnDefinition... columnDefinitions) { - return new RegionedColumnSourceManager(isRefreshing, this, codecMappings, - columnDefinitions); + final boolean isRefreshing, + @NotNull final ColumnToCodecMappings codecMappings, + @NotNull final ColumnDefinition... columnDefinitions) { + return new RegionedColumnSourceManager(isRefreshing, this, codecMappings, columnDefinitions); } /** - * Create a new {@link RegionedColumnSource} appropriate to implement the supplied - * {@link ColumnDefinition}. + * Create a new {@link RegionedColumnSource} appropriate to implement the supplied {@link ColumnDefinition}. * * @param columnDefinition The column definition * @param The data type of the column @@ -63,8 +59,8 @@ public ColumnSourceManager createColumnSourceManager( @SuppressWarnings("unchecked") @Override public RegionedColumnSource createRegionedColumnSource( - @NotNull final ColumnDefinition columnDefinition, - @NotNull final ColumnToCodecMappings codecMappings) { + @NotNull final ColumnDefinition columnDefinition, + @NotNull final ColumnToCodecMappings codecMappings) { Class dataType = TypeUtils.getBoxedType(columnDefinition.getDataType()); if (columnDefinition.isPartitioning()) { @@ -72,7 +68,7 @@ public RegionedColumnSource createRegionedColumnSource( } final Supplier> simpleImplementationSupplier = - SIMPLE_DATA_TYPE_TO_REGIONED_COLUMN_SOURCE_SUPPLIER.get(dataType); + SIMPLE_DATA_TYPE_TO_REGIONED_COLUMN_SOURCE_SUPPLIER.get(dataType); if (simpleImplementationSupplier != null) { return (RegionedColumnSource) simpleImplementationSupplier.get(); } @@ -81,13 +77,11 @@ public RegionedColumnSource createRegionedColumnSource( if (CharSequence.class.isAssignableFrom(dataType)) { return new RegionedColumnSourceWithDictionary<>(dataType, null); } else { - return new RegionedColumnSourceObject.AsValues<>(dataType, - columnDefinition.getComponentType()); + return new RegionedColumnSourceObject.AsValues<>(dataType, columnDefinition.getComponentType()); } } catch (IllegalArgumentException except) { throw new UnsupportedOperationException( - "Can't create column for " + dataType + " in column definition " + columnDefinition, - except); + "Can't create column for " + dataType + " in column definition " + columnDefinition, except); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ReplicateRegionsAndRegionedSources.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ReplicateRegionsAndRegionedSources.java index b5205866e4f..fa46f6c8e4c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ReplicateRegionsAndRegionedSources.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/ReplicateRegionsAndRegionedSources.java @@ -5,8 +5,8 @@ import java.io.IOException; /** - * Code generation for basic {@link RegionedColumnSource} implementations as well as well as the - * primary region interfaces for some primitive types. + * Code generation for basic {@link RegionedColumnSource} implementations as well as well as the primary region + * interfaces for some primitive types. */ public class ReplicateRegionsAndRegionedSources extends ReplicatePrimitiveCode { diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/SymbolTableSource.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/SymbolTableSource.java index 202178a10dc..d308a88b574 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/SymbolTableSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/SymbolTableSource.java @@ -9,13 +9,11 @@ /** *

    - * Interface for {@link io.deephaven.db.v2.sources.ColumnSource}s that can provide a {@link Table} - * view of their symbol tables, providing a many:1 or 1:1 mapping of unique {@code long} identifiers - * to the symbol values in this source. + * Interface for {@link io.deephaven.db.v2.sources.ColumnSource}s that can provide a {@link Table} view of their symbol + * tables, providing a many:1 or 1:1 mapping of unique {@code long} identifiers to the symbol values in this source. *

    - * Such sources are also expected to be reinterpretable - * ({@link ColumnSource#allowsReinterpret(Class)}) as {@code long} {@link ColumnSource}s of the same - * identifiers. + * Such sources are also expected to be reinterpretable ({@link ColumnSource#allowsReinterpret(Class)}) as {@code long} + * {@link ColumnSource}s of the same identifiers. */ public interface SymbolTableSource extends ColumnSource { @@ -25,44 +23,41 @@ public interface SymbolTableSource extends ColumnSource - * Get a static {@link Table} view of this SymbolTableSource's symbol table, providing a many:1 - * or 1:1 mapping of unique {@code long} identifiers to the symbol values in this source. + * Get a static {@link Table} view of this SymbolTableSource's symbol table, providing a many:1 or 1:1 mapping of + * unique {@code long} identifiers to the symbol values in this source. * - * @param sourceIndex The {@link ReadOnlyIndex} whose keys must be mappable via the result - * {@link Table}'s identifier column - * @param useLookupCaching Hint whether symbol lookups performed to generate the symbol table - * should apply caching. Implementations may ignore this hint. + * @param sourceIndex The {@link ReadOnlyIndex} whose keys must be mappable via the result {@link Table}'s + * identifier column + * @param useLookupCaching Hint whether symbol lookups performed to generate the symbol table should apply caching. + * Implementations may ignore this hint. * @return The symbol table */ Table getStaticSymbolTable(@NotNull ReadOnlyIndex sourceIndex, boolean useLookupCaching); /** *

    - * Get a {@link Table} view of this SymbolTableSource's symbol table, providing a many:1 or 1:1 - * mapping of unique {@code long} identifiers to the symbol values in this source. + * Get a {@link Table} view of this SymbolTableSource's symbol table, providing a many:1 or 1:1 mapping of unique + * {@code long} identifiers to the symbol values in this source. * *

    - * The result will be refreshing if {@code table} is a refreshing - * {@link io.deephaven.db.v2.DynamicTable}. + * The result will be refreshing if {@code table} is a refreshing {@link io.deephaven.db.v2.DynamicTable}. * - * @param sourceTable The {@link QueryTable} whose {@link Index} keys must be mappable via the - * result {@link Table}'s identifier column - * @param useLookupCaching Hint whether symbol lookups performed to generate the symbol table - * should apply caching. Implementations may ignore this hint. + * @param sourceTable The {@link QueryTable} whose {@link Index} keys must be mappable via the result + * {@link Table}'s identifier column + * @param useLookupCaching Hint whether symbol lookups performed to generate the symbol table should apply caching. + * Implementations may ignore this hint. * @return The symbol table */ Table getSymbolTable(@NotNull QueryTable sourceTable, boolean useLookupCaching); diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/decoder/EncodedStringDecoder.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/decoder/EncodedStringDecoder.java index d89ae4e7232..7c5332987a6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/decoder/EncodedStringDecoder.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/decoder/EncodedStringDecoder.java @@ -6,8 +6,7 @@ import io.deephaven.util.codec.ObjectDecoder; import org.jetbrains.annotations.NotNull; -public class EncodedStringDecoder - implements ObjectDecoder { +public class EncodedStringDecoder implements ObjectDecoder { private final StringCache cache; private final EncodingInfo encodingInfo; @@ -27,8 +26,7 @@ public final int expectedObjectWidth() { } @Override - public final STRING_LIKE_TYPE decode(@NotNull final byte[] data, final int offset, - final int length) { + public final STRING_LIKE_TYPE decode(@NotNull final byte[] data, final int offset, final int length) { if (length == 0) { return null; } diff --git a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/decoder/SimpleStringDecoder.java b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/decoder/SimpleStringDecoder.java index 9dab73da89c..7eae3b03c56 100644 --- a/DB/src/main/java/io/deephaven/db/v2/sources/regioned/decoder/SimpleStringDecoder.java +++ b/DB/src/main/java/io/deephaven/db/v2/sources/regioned/decoder/SimpleStringDecoder.java @@ -6,10 +6,9 @@ import io.deephaven.util.codec.ObjectDecoder; import org.jetbrains.annotations.NotNull; -public class SimpleStringDecoder - implements ObjectDecoder { +public class SimpleStringDecoder implements ObjectDecoder { private static final ThreadLocal DECODER_ADAPTER = - ThreadLocal.withInitial(ByteArrayCharSequenceAdapterImpl::new); + ThreadLocal.withInitial(ByteArrayCharSequenceAdapterImpl::new); private final StringCache cache; @@ -27,19 +26,16 @@ public final int expectedObjectWidth() { } @Override - public final STRING_LIKE_TYPE decode(@NotNull final byte[] data, final int offset, - final int length) { + public final STRING_LIKE_TYPE decode(@NotNull final byte[] data, final int offset, final int length) { if (length == 0) { return null; } if (length == 1 && data[offset] == 0) { return cache.getEmptyString(); } - // NB: Because the StringCache implementations in use convert bytes to chars 1:1 (with a - // 0xFF mask), we're + // NB: Because the StringCache implementations in use convert bytes to chars 1:1 (with a 0xFF mask), we're // effectively using an ISO-8859-1 decoder. - // We could probably move towards StringCaches with configurable Charsets for - // encoding/decoding directly + // We could probably move towards StringCaches with configurable Charsets for encoding/decoding directly // to/from ByteBuffers, but that's a step for later. final ByteArrayCharSequenceAdapterImpl adapter = DECODER_ADAPTER.get(); final STRING_LIKE_TYPE result = cache.getCachedString(adapter.set(data, offset, length)); diff --git a/DB/src/main/java/io/deephaven/db/v2/ssa/ChunkSsaStamp.java b/DB/src/main/java/io/deephaven/db/v2/ssa/ChunkSsaStamp.java index bcfab1823d7..36bd795825d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ssa/ChunkSsaStamp.java +++ b/DB/src/main/java/io/deephaven/db/v2/ssa/ChunkSsaStamp.java @@ -55,26 +55,24 @@ static ChunkSsaStamp make(ChunkType type, boolean reverse) { } } - void processEntry(Chunk leftStampValues, Chunk leftStampKeys, - SegmentedSortedArray ssa, WritableLongChunk rightKeysForLeft, - boolean disallowExactMatch); + void processEntry(Chunk leftStampValues, Chunk leftStampKeys, SegmentedSortedArray ssa, + WritableLongChunk rightKeysForLeft, boolean disallowExactMatch); void processRemovals(Chunk leftStampValues, LongChunk leftStampKeys, - Chunk rightStampChunk, LongChunk rightKeys, - WritableLongChunk priorRedirections, RedirectionIndex redirectionIndex, - Index.RandomBuilder modifiedBuilder, boolean disallowExactMatch); + Chunk rightStampChunk, LongChunk rightKeys, + WritableLongChunk priorRedirections, RedirectionIndex redirectionIndex, + Index.RandomBuilder modifiedBuilder, boolean disallowExactMatch); void processInsertion(Chunk leftStampValues, LongChunk leftStampKeys, - Chunk rightStampChunk, LongChunk rightKeys, - Chunk nextRightValue, RedirectionIndex redirectionIndex, - Index.RandomBuilder modifiedBuilder, boolean endsWithLastValue, boolean disallowExactMatch); + Chunk rightStampChunk, LongChunk rightKeys, Chunk nextRightValue, + RedirectionIndex redirectionIndex, Index.RandomBuilder modifiedBuilder, boolean endsWithLastValue, + boolean disallowExactMatch); int findModified(int first, Chunk leftStampValues, LongChunk leftStampKeys, - RedirectionIndex redirectionIndex, Chunk rightStampChunk, - LongChunk rightStampIndices, Index.RandomBuilder modifiedBuilder, - boolean disallowExactMatch); + RedirectionIndex redirectionIndex, Chunk rightStampChunk, + LongChunk rightStampIndices, Index.RandomBuilder modifiedBuilder, boolean disallowExactMatch); void applyShift(Chunk leftStampValues, LongChunk leftStampKeys, - Chunk rightStampChunk, LongChunk rightStampKeys, - long shiftDelta, RedirectionIndex redirectionIndex, boolean disallowExactMatch); + Chunk rightStampChunk, LongChunk rightStampKeys, long shiftDelta, + RedirectionIndex redirectionIndex, boolean disallowExactMatch); } diff --git a/DB/src/main/java/io/deephaven/db/v2/ssa/ReplicateSegmentedSortedArray.java b/DB/src/main/java/io/deephaven/db/v2/ssa/ReplicateSegmentedSortedArray.java index 3c94dd54d31..7aba231f527 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ssa/ReplicateSegmentedSortedArray.java +++ b/DB/src/main/java/io/deephaven/db/v2/ssa/ReplicateSegmentedSortedArray.java @@ -17,20 +17,20 @@ public class ReplicateSegmentedSortedArray { public static void main(String[] args) throws IOException { - final List ssas = ReplicatePrimitiveCode - .charToAllButBoolean(CharSegmentedSortedArray.class, ReplicatePrimitiveCode.MAIN_SRC); + final List ssas = ReplicatePrimitiveCode.charToAllButBoolean(CharSegmentedSortedArray.class, + ReplicatePrimitiveCode.MAIN_SRC); - final String charSsaPath = ReplicatePrimitiveCode - .pathForClass(CharSegmentedSortedArray.class, ReplicatePrimitiveCode.MAIN_SRC); + final String charSsaPath = + ReplicatePrimitiveCode.pathForClass(CharSegmentedSortedArray.class, ReplicatePrimitiveCode.MAIN_SRC); ssas.add(charSsaPath); invertSense(charSsaPath, descendingPath(charSsaPath)); - final String charNullSsaPath = ReplicateDupCompactKernel - .fixupCharNullComparisons(CharSegmentedSortedArray.class, charSsaPath); + final String charNullSsaPath = + ReplicateDupCompactKernel.fixupCharNullComparisons(CharSegmentedSortedArray.class, charSsaPath); invertSense(charNullSsaPath, descendingPath(charNullSsaPath)); - final String objectSsa = ReplicatePrimitiveCode.charToObject(CharSegmentedSortedArray.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String objectSsa = + ReplicatePrimitiveCode.charToObject(CharSegmentedSortedArray.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectSsa(objectSsa, true); ssas.add(objectSsa); @@ -47,25 +47,23 @@ public static void main(String[] args) throws IOException { } } - final List chunkSsaStamps = ReplicatePrimitiveCode - .charToAllButBoolean(CharChunkSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); - final String charChunkSsaStampPath = ReplicatePrimitiveCode - .pathForClass(CharChunkSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); + final List chunkSsaStamps = + ReplicatePrimitiveCode.charToAllButBoolean(CharChunkSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); + final String charChunkSsaStampPath = + ReplicatePrimitiveCode.pathForClass(CharChunkSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); invertSense(charChunkSsaStampPath, descendingPath(charChunkSsaStampPath)); - final String charNullChunkSsaStampPath = ReplicateDupCompactKernel - .fixupCharNullComparisons(CharChunkSsaStamp.class, charChunkSsaStampPath); - final String descendingCharNullChunkSsaStampPath = - descendingPath(charNullChunkSsaStampPath); + final String charNullChunkSsaStampPath = + ReplicateDupCompactKernel.fixupCharNullComparisons(CharChunkSsaStamp.class, charChunkSsaStampPath); + final String descendingCharNullChunkSsaStampPath = descendingPath(charNullChunkSsaStampPath); invertSense(charNullChunkSsaStampPath, descendingCharNullChunkSsaStampPath); fixupSsaName(charNullChunkSsaStampPath, CharSegmentedSortedArray.class.getSimpleName(), - NullAwareCharSegmentedSortedArray.class.getSimpleName()); - fixupSsaName(descendingCharNullChunkSsaStampPath, - CharReverseSegmentedSortedArray.class.getSimpleName(), - NullAwareCharReverseSegmentedSortedArray.class.getSimpleName()); + NullAwareCharSegmentedSortedArray.class.getSimpleName()); + fixupSsaName(descendingCharNullChunkSsaStampPath, CharReverseSegmentedSortedArray.class.getSimpleName(), + NullAwareCharReverseSegmentedSortedArray.class.getSimpleName()); - final String objectSsaStamp = ReplicatePrimitiveCode.charToObject(CharChunkSsaStamp.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String objectSsaStamp = + ReplicatePrimitiveCode.charToObject(CharChunkSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectSsa(objectSsaStamp, true); chunkSsaStamps.add(objectSsaStamp); @@ -82,24 +80,23 @@ public static void main(String[] args) throws IOException { } } - final List ssaSsaStamps = ReplicatePrimitiveCode - .charToAllButBoolean(CharSsaSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); - final String charSsaSsaStampPath = ReplicatePrimitiveCode - .pathForClass(CharSsaSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); + final List ssaSsaStamps = + ReplicatePrimitiveCode.charToAllButBoolean(CharSsaSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); + final String charSsaSsaStampPath = + ReplicatePrimitiveCode.pathForClass(CharSsaSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); invertSense(charSsaSsaStampPath, descendingPath(charSsaSsaStampPath)); - final String charNullSsaSsaStampPath = ReplicateDupCompactKernel - .fixupCharNullComparisons(CharSsaSsaStamp.class, charSsaSsaStampPath); + final String charNullSsaSsaStampPath = + ReplicateDupCompactKernel.fixupCharNullComparisons(CharSsaSsaStamp.class, charSsaSsaStampPath); final String descendingCharNullSsaSsaStampPath = descendingPath(charNullSsaSsaStampPath); invertSense(charNullSsaSsaStampPath, descendingCharNullSsaSsaStampPath); fixupSsaName(charNullSsaSsaStampPath, CharSegmentedSortedArray.class.getSimpleName(), - NullAwareCharSegmentedSortedArray.class.getSimpleName()); - fixupSsaName(descendingCharNullSsaSsaStampPath, - CharReverseSegmentedSortedArray.class.getSimpleName(), - NullAwareCharReverseSegmentedSortedArray.class.getSimpleName()); + NullAwareCharSegmentedSortedArray.class.getSimpleName()); + fixupSsaName(descendingCharNullSsaSsaStampPath, CharReverseSegmentedSortedArray.class.getSimpleName(), + NullAwareCharReverseSegmentedSortedArray.class.getSimpleName()); - final String objectSsaSsaStamp = ReplicatePrimitiveCode.charToObject(CharSsaSsaStamp.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String objectSsaSsaStamp = + ReplicatePrimitiveCode.charToObject(CharSsaSsaStamp.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectSsa(objectSsaSsaStamp, true); ssaSsaStamps.add(objectSsaSsaStamp); @@ -116,14 +113,14 @@ public static void main(String[] args) throws IOException { } } - final List ssaCheckers = ReplicatePrimitiveCode - .charToAllButBoolean(CharSsaChecker.class, ReplicatePrimitiveCode.MAIN_SRC); - final String charSsaCheckerPath = ReplicatePrimitiveCode.pathForClass(CharSsaChecker.class, - ReplicatePrimitiveCode.MAIN_SRC); + final List ssaCheckers = + ReplicatePrimitiveCode.charToAllButBoolean(CharSsaChecker.class, ReplicatePrimitiveCode.MAIN_SRC); + final String charSsaCheckerPath = + ReplicatePrimitiveCode.pathForClass(CharSsaChecker.class, ReplicatePrimitiveCode.MAIN_SRC); invertSense(charSsaCheckerPath, descendingPath(charSsaCheckerPath)); - final String objectSsaChecker = ReplicatePrimitiveCode.charToObject(CharSsaChecker.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String objectSsaChecker = + ReplicatePrimitiveCode.charToObject(CharSsaChecker.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectSsa(objectSsaChecker, true); ssaCheckers.add(objectSsaChecker); @@ -144,13 +141,10 @@ public static void main(String[] args) throws IOException { private static void invertSense(String path, String descendingPath) throws IOException { final File file = new File(path); - List lines = ascendingNameToDescendingName(path, - FileUtils.readLines(file, Charset.defaultCharset())); + List lines = ascendingNameToDescendingName(path, FileUtils.readLines(file, Charset.defaultCharset())); - if (path.contains("ChunkSsaStamp") || path.contains("SsaSsaStamp") - || path.contains("SsaChecker")) { - lines = globalReplacements(3, lines, "\\BSegmentedSortedArray", - "ReverseSegmentedSortedArray"); + if (path.contains("ChunkSsaStamp") || path.contains("SsaSsaStamp") || path.contains("SsaChecker")) { + lines = globalReplacements(3, lines, "\\BSegmentedSortedArray", "ReverseSegmentedSortedArray"); } if (path.contains("SegmentedSortedArray")) { @@ -170,8 +164,7 @@ private static void invertSense(String path, String descendingPath) throws IOExc FileUtils.writeLines(new File(descendingPath), lines); } - private static void fixupSsaName(String path, String oldName, String newName) - throws IOException { + private static void fixupSsaName(String path, String oldName, String newName) throws IOException { final File file = new File(path); List lines = FileUtils.readLines(file, Charset.defaultCharset()); lines = globalReplacements(3, lines, oldName, newName); @@ -189,19 +182,17 @@ private static List ascendingNameToDescendingName(String path, List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); - FileUtils.writeLines(objectFile, - ReplicateUtilities.simpleFixup( - ReplicateSortKernel.fixupObjectComparisons( - ReplicateUtilities.fixupChunkAttributes(lines), ascending), + FileUtils.writeLines(objectFile, ReplicateUtilities.simpleFixup( + ReplicateSortKernel.fixupObjectComparisons(ReplicateUtilities.fixupChunkAttributes(lines), ascending), "fillValue", "Object.MIN_VALUE", "null")); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/ssa/SegmentedSortedArray.java b/DB/src/main/java/io/deephaven/db/v2/ssa/SegmentedSortedArray.java index 5911f177fd8..ce7e81073d9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ssa/SegmentedSortedArray.java +++ b/DB/src/main/java/io/deephaven/db/v2/ssa/SegmentedSortedArray.java @@ -11,39 +11,38 @@ public interface SegmentedSortedArray extends LongSizedDataStructure { boolean SEGMENTED_SORTED_ARRAY_VALIDATION = - Configuration.getInstance().getBooleanWithDefault("SegmentedSortedArray.validation", false); + Configuration.getInstance().getBooleanWithDefault("SegmentedSortedArray.validation", false); static SegmentedSortedArray make(ChunkType chunkType, boolean reverse, int nodeSize) { return makeFactory(chunkType, reverse, nodeSize).get(); } - static Supplier makeFactory(ChunkType chunkType, boolean reverse, - int nodeSize) { + static Supplier makeFactory(ChunkType chunkType, boolean reverse, int nodeSize) { switch (chunkType) { case Char: return reverse ? () -> new NullAwareCharReverseSegmentedSortedArray(nodeSize) - : () -> new NullAwareCharSegmentedSortedArray(nodeSize); + : () -> new NullAwareCharSegmentedSortedArray(nodeSize); case Byte: return reverse ? () -> new ByteReverseSegmentedSortedArray(nodeSize) - : () -> new ByteSegmentedSortedArray(nodeSize); + : () -> new ByteSegmentedSortedArray(nodeSize); case Short: return reverse ? () -> new ShortReverseSegmentedSortedArray(nodeSize) - : () -> new ShortSegmentedSortedArray(nodeSize); + : () -> new ShortSegmentedSortedArray(nodeSize); case Int: return reverse ? () -> new IntReverseSegmentedSortedArray(nodeSize) - : () -> new IntSegmentedSortedArray(nodeSize); + : () -> new IntSegmentedSortedArray(nodeSize); case Long: return reverse ? () -> new LongReverseSegmentedSortedArray(nodeSize) - : () -> new LongSegmentedSortedArray(nodeSize); + : () -> new LongSegmentedSortedArray(nodeSize); case Float: return reverse ? () -> new FloatReverseSegmentedSortedArray(nodeSize) - : () -> new FloatSegmentedSortedArray(nodeSize); + : () -> new FloatSegmentedSortedArray(nodeSize); case Double: return reverse ? () -> new DoubleReverseSegmentedSortedArray(nodeSize) - : () -> new DoubleSegmentedSortedArray(nodeSize); + : () -> new DoubleSegmentedSortedArray(nodeSize); case Object: return reverse ? () -> new ObjectReverseSegmentedSortedArray(nodeSize) - : () -> new ObjectSegmentedSortedArray(nodeSize); + : () -> new ObjectSegmentedSortedArray(nodeSize); default: case Boolean: throw new UnsupportedOperationException(); @@ -56,8 +55,7 @@ static Supplier makeFactory(ChunkType chunkType, boolean r * @param valuesToInsert the valuesToInsert to insert * @param indicesToInsert the corresponding indicesToInsert */ - void insert(Chunk valuesToInsert, - LongChunk indicesToInsert); + void insert(Chunk valuesToInsert, LongChunk indicesToInsert); /** * Remove valuesToRemove from this SSA. The valuesToRemove to remove must be sorted. @@ -65,29 +63,25 @@ void insert(Chunk valuesToInsert, * @param valuesToRemove the valuesToRemove to remove * @param indicesToRemove the corresponding indices */ - void remove(Chunk valuesToRemove, - LongChunk indicesToRemove); + void remove(Chunk valuesToRemove, LongChunk indicesToRemove); /** - * Remove the values and indices referenced in stampChunk and indicesToRemove. Fill - * priorRedirections with the redirection value immediately preceding the removed value. + * Remove the values and indices referenced in stampChunk and indicesToRemove. Fill priorRedirections with the + * redirection value immediately preceding the removed value. * * @param stampChunk the values to remove * @param indicesToRemove the indices (parallel to the values) * @param priorRedirections the output prior redirections (parallel to valeus/indices) */ - void removeAndGetPrior(Chunk stampChunk, - LongChunk indicesToRemove, - WritableLongChunk priorRedirections); + void removeAndGetPrior(Chunk stampChunk, LongChunk indicesToRemove, + WritableLongChunk priorRedirections); - int insertAndGetNextValue(Chunk valuesToInsert, - LongChunk indicesToInsert, WritableChunk nextValue); + int insertAndGetNextValue(Chunk valuesToInsert, LongChunk indicesToInsert, + WritableChunk nextValue); - void applyShift(Chunk stampChunk, LongChunk keyChunk, - long shiftDelta); + void applyShift(Chunk stampChunk, LongChunk keyChunk, long shiftDelta); - void applyShiftReverse(Chunk stampChunk, - LongChunk keyChunk, long shiftDelta); + void applyShiftReverse(Chunk stampChunk, LongChunk keyChunk, long shiftDelta); int getNodeSize(); diff --git a/DB/src/main/java/io/deephaven/db/v2/ssa/SsaChecker.java b/DB/src/main/java/io/deephaven/db/v2/ssa/SsaChecker.java index a7a7933c8f6..0d343353da4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ssa/SsaChecker.java +++ b/DB/src/main/java/io/deephaven/db/v2/ssa/SsaChecker.java @@ -12,17 +12,13 @@ import io.deephaven.db.v2.utils.OrderedKeys; public interface SsaChecker { - default void checkSsa(SegmentedSortedArray ssa, ColumnSource columnSource, - OrderedKeys orderedKeys) { + default void checkSsa(SegmentedSortedArray ssa, ColumnSource columnSource, OrderedKeys orderedKeys) { final int size = orderedKeys.intSize(); try (final ColumnSource.FillContext fillContext = columnSource.makeFillContext(size); - final WritableChunk valuesChunk = - columnSource.getChunkType().makeWritableChunk(size); - final WritableLongChunk keyChunk = - WritableLongChunk.makeWritableChunk(size); - final LongSortKernel sortKernel = LongSortKernel.makeContext( - columnSource.getChunkType(), - ssa.isReversed() ? SortingOrder.Descending : SortingOrder.Ascending, size, true)) { + final WritableChunk valuesChunk = columnSource.getChunkType().makeWritableChunk(size); + final WritableLongChunk keyChunk = WritableLongChunk.makeWritableChunk(size); + final LongSortKernel sortKernel = LongSortKernel.makeContext(columnSource.getChunkType(), + ssa.isReversed() ? SortingOrder.Descending : SortingOrder.Ascending, size, true)) { columnSource.fillChunk(fillContext, valuesChunk, orderedKeys); orderedKeys.fillKeyIndicesChunk(WritableLongChunk.downcast(keyChunk)); sortKernel.sort(keyChunk, valuesChunk); @@ -31,7 +27,7 @@ default void checkSsa(SegmentedSortedArray ssa, ColumnSource columnSource, } void checkSsa(SegmentedSortedArray ssa, Chunk valueChunk, - LongChunk tableIndexChunk); + LongChunk tableIndexChunk); class SsaCheckException extends RuntimeException { SsaCheckException(String message) { diff --git a/DB/src/main/java/io/deephaven/db/v2/ssa/SsaSsaStamp.java b/DB/src/main/java/io/deephaven/db/v2/ssa/SsaSsaStamp.java index 0e36acaf450..b0c9952db6f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ssa/SsaSsaStamp.java +++ b/DB/src/main/java/io/deephaven/db/v2/ssa/SsaSsaStamp.java @@ -58,24 +58,22 @@ static SsaSsaStamp make(ChunkType type, boolean reverse) { } } - void processEntry(SegmentedSortedArray leftSsa, SegmentedSortedArray ssa, - RedirectionIndex redirectionIndex, boolean disallowExactMatch); + void processEntry(SegmentedSortedArray leftSsa, SegmentedSortedArray ssa, RedirectionIndex redirectionIndex, + boolean disallowExactMatch); void processRemovals(SegmentedSortedArray leftSsa, Chunk rightStampChunk, - LongChunk rightKeys, WritableLongChunk priorRedirections, - RedirectionIndex redirectionIndex, Index.RandomBuilder modifiedBuilder, - boolean disallowExactMatch); + LongChunk rightKeys, WritableLongChunk priorRedirections, + RedirectionIndex redirectionIndex, Index.RandomBuilder modifiedBuilder, boolean disallowExactMatch); void processInsertion(SegmentedSortedArray leftSsa, Chunk rightStampChunk, - LongChunk rightKeys, Chunk nextRightValue, - RedirectionIndex redirectionIndex, Index.RandomBuilder modifiedBuilder, - boolean endsWithLastValue, boolean disallowExactMatch); + LongChunk rightKeys, Chunk nextRightValue, RedirectionIndex redirectionIndex, + Index.RandomBuilder modifiedBuilder, boolean endsWithLastValue, boolean disallowExactMatch); void findModified(SegmentedSortedArray leftSsa, RedirectionIndex redirectionIndex, - Chunk rightStampChunk, LongChunk rightStampIndices, - Index.RandomBuilder modifiedBuilder, boolean disallowExactMatch); + Chunk rightStampChunk, LongChunk rightStampIndices, + Index.RandomBuilder modifiedBuilder, boolean disallowExactMatch); void applyShift(SegmentedSortedArray leftSsa, Chunk rightStampChunk, - LongChunk rightStampKeys, long shiftDelta, RedirectionIndex redirectionIndex, - boolean disallowExactMatch); + LongChunk rightStampKeys, long shiftDelta, RedirectionIndex redirectionIndex, + boolean disallowExactMatch); } diff --git a/DB/src/main/java/io/deephaven/db/v2/ssms/ReplicateSegmentedSortedMultiset.java b/DB/src/main/java/io/deephaven/db/v2/ssms/ReplicateSegmentedSortedMultiset.java index 531692ddf73..b5e1f6c6778 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ssms/ReplicateSegmentedSortedMultiset.java +++ b/DB/src/main/java/io/deephaven/db/v2/ssms/ReplicateSegmentedSortedMultiset.java @@ -35,136 +35,125 @@ public class ReplicateSegmentedSortedMultiset { public static void main(String[] args) throws IOException { ReplicatePrimitiveCode.charToAllButBooleanAndLong(CharSegmentedSortedMultiset.class, - ReplicatePrimitiveCode.MAIN_SRC); - insertDbDateTimeExtensions(ReplicatePrimitiveCode - .charToLong(CharSegmentedSortedMultiset.class, ReplicatePrimitiveCode.MAIN_SRC)); + ReplicatePrimitiveCode.MAIN_SRC); + insertDbDateTimeExtensions( + ReplicatePrimitiveCode.charToLong(CharSegmentedSortedMultiset.class, ReplicatePrimitiveCode.MAIN_SRC)); - String objectSsm = ReplicatePrimitiveCode.charToObject(CharSegmentedSortedMultiset.class, - ReplicatePrimitiveCode.MAIN_SRC); + String objectSsm = + ReplicatePrimitiveCode.charToObject(CharSegmentedSortedMultiset.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectSsm(objectSsm, ReplicateSegmentedSortedMultiset::fixupNulls, - ReplicateSegmentedSortedMultiset::fixupDbArrays, - ReplicateSegmentedSortedMultiset::fixupTHashes, - ReplicateSegmentedSortedMultiset::fixupSsmConstructor, - ReplicateSegmentedSortedMultiset::fixupObjectCompare); + ReplicateSegmentedSortedMultiset::fixupDbArrays, ReplicateSegmentedSortedMultiset::fixupTHashes, + ReplicateSegmentedSortedMultiset::fixupSsmConstructor, + ReplicateSegmentedSortedMultiset::fixupObjectCompare); - ReplicatePrimitiveCode.charToAllButBoolean( - io.deephaven.db.v2.by.ssmminmax.CharSetResult.class, ReplicatePrimitiveCode.MAIN_SRC); - fixupObjectSsm( - ReplicatePrimitiveCode.charToObject(io.deephaven.db.v2.by.ssmminmax.CharSetResult.class, - ReplicatePrimitiveCode.MAIN_SRC), - ReplicateSegmentedSortedMultiset::fixupNulls); + ReplicatePrimitiveCode.charToAllButBoolean(io.deephaven.db.v2.by.ssmminmax.CharSetResult.class, + ReplicatePrimitiveCode.MAIN_SRC); + fixupObjectSsm(ReplicatePrimitiveCode.charToObject(io.deephaven.db.v2.by.ssmminmax.CharSetResult.class, + ReplicatePrimitiveCode.MAIN_SRC), ReplicateSegmentedSortedMultiset::fixupNulls); - ReplicatePrimitiveCode.charToAllButBoolean(CharPercentileTypeHelper.class, - ReplicatePrimitiveCode.MAIN_SRC); - fixupObjectSsm(ReplicatePrimitiveCode.charToObject(CharPercentileTypeHelper.class, - ReplicatePrimitiveCode.MAIN_SRC), ReplicateSegmentedSortedMultiset::fixupNulls); + ReplicatePrimitiveCode.charToAllButBoolean(CharPercentileTypeHelper.class, ReplicatePrimitiveCode.MAIN_SRC); + fixupObjectSsm( + ReplicatePrimitiveCode.charToObject(CharPercentileTypeHelper.class, ReplicatePrimitiveCode.MAIN_SRC), + ReplicateSegmentedSortedMultiset::fixupNulls); - ReplicatePrimitiveCode.charToIntegers(CharPercentileTypeMedianHelper.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToIntegers(CharPercentileTypeMedianHelper.class, ReplicatePrimitiveCode.MAIN_SRC); ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatPercentileTypeMedianHelper.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(CharSsmBackedSource.class, - ReplicatePrimitiveCode.MAIN_SRC); - objectSsm = ReplicatePrimitiveCode.charToObject(CharSsmBackedSource.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharSsmBackedSource.class, ReplicatePrimitiveCode.MAIN_SRC); + objectSsm = ReplicatePrimitiveCode.charToObject(CharSsmBackedSource.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObjectSsm(objectSsm, ReplicateSegmentedSortedMultiset::fixupDbArrays, - ReplicateSegmentedSortedMultiset::fixupSourceConstructor, - (l) -> replaceRegion(l, "CreateNew", Collections.singletonList( - " underlying.set(key, ssm = new ObjectSegmentedSortedMultiset(DistinctOperatorFactory.NODE_SIZE, Object.class));"))); + ReplicateSegmentedSortedMultiset::fixupSourceConstructor, + (l) -> replaceRegion(l, "CreateNew", Collections.singletonList( + " underlying.set(key, ssm = new ObjectSegmentedSortedMultiset(DistinctOperatorFactory.NODE_SIZE, Object.class));"))); ReplicatePrimitiveCode.charToAllButBoolean(CharChunkedCountDistinctOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); - fixupObjectKernelOperator(ReplicatePrimitiveCode.charToObject( - CharChunkedCountDistinctOperator.class, ReplicatePrimitiveCode.MAIN_SRC), "ssms"); + ReplicatePrimitiveCode.MAIN_SRC); + fixupObjectKernelOperator(ReplicatePrimitiveCode.charToObject(CharChunkedCountDistinctOperator.class, + ReplicatePrimitiveCode.MAIN_SRC), "ssms"); ReplicatePrimitiveCode.charToAllButBooleanAndLong(CharChunkedDistinctOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); fixupLongKernelOperator( - ReplicatePrimitiveCode.charToLong(CharChunkedDistinctOperator.class, - ReplicatePrimitiveCode.MAIN_SRC), - " externalResult = new DbDateTimeSsmSourceWrapper(internalResult);"); - fixupObjectKernelOperator(ReplicatePrimitiveCode.charToObject( - CharChunkedDistinctOperator.class, ReplicatePrimitiveCode.MAIN_SRC), "internalResult"); + ReplicatePrimitiveCode.charToLong(CharChunkedDistinctOperator.class, ReplicatePrimitiveCode.MAIN_SRC), + " externalResult = new DbDateTimeSsmSourceWrapper(internalResult);"); + fixupObjectKernelOperator( + ReplicatePrimitiveCode.charToObject(CharChunkedDistinctOperator.class, ReplicatePrimitiveCode.MAIN_SRC), + "internalResult"); ReplicatePrimitiveCode.charToAllButBooleanAndLong(CharChunkedUniqueOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); fixupLongKernelOperator( - ReplicatePrimitiveCode.charToLong(CharChunkedUniqueOperator.class, - ReplicatePrimitiveCode.MAIN_SRC), - " externalResult = new BoxedColumnSource.OfDateTime(internalResult);"); - fixupObjectKernelOperator(ReplicatePrimitiveCode.charToObject( - CharChunkedUniqueOperator.class, ReplicatePrimitiveCode.MAIN_SRC), "ssms"); + ReplicatePrimitiveCode.charToLong(CharChunkedUniqueOperator.class, ReplicatePrimitiveCode.MAIN_SRC), + " externalResult = new BoxedColumnSource.OfDateTime(internalResult);"); + fixupObjectKernelOperator( + ReplicatePrimitiveCode.charToObject(CharChunkedUniqueOperator.class, ReplicatePrimitiveCode.MAIN_SRC), + "ssms"); ReplicatePrimitiveCode.charToAllButBoolean(CharRollupCountDistinctOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); - fixupObjectKernelOperator(ReplicatePrimitiveCode.charToObject( - CharRollupCountDistinctOperator.class, ReplicatePrimitiveCode.MAIN_SRC), "ssms"); + ReplicatePrimitiveCode.MAIN_SRC); + fixupObjectKernelOperator(ReplicatePrimitiveCode.charToObject(CharRollupCountDistinctOperator.class, + ReplicatePrimitiveCode.MAIN_SRC), "ssms"); ReplicatePrimitiveCode.charToAllButBooleanAndLong(CharRollupDistinctOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); fixupLongKernelOperator( - ReplicatePrimitiveCode.charToLong(CharRollupDistinctOperator.class, - ReplicatePrimitiveCode.MAIN_SRC), - " externalResult = new DbDateTimeSsmSourceWrapper(internalResult);"); - fixupObjectKernelOperator(ReplicatePrimitiveCode.charToObject( - CharRollupDistinctOperator.class, ReplicatePrimitiveCode.MAIN_SRC), "internalResult"); + ReplicatePrimitiveCode.charToLong(CharRollupDistinctOperator.class, ReplicatePrimitiveCode.MAIN_SRC), + " externalResult = new DbDateTimeSsmSourceWrapper(internalResult);"); + fixupObjectKernelOperator( + ReplicatePrimitiveCode.charToObject(CharRollupDistinctOperator.class, ReplicatePrimitiveCode.MAIN_SRC), + "internalResult"); ReplicatePrimitiveCode.charToAllButBooleanAndLong(CharRollupUniqueOperator.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); fixupLongKernelOperator( - ReplicatePrimitiveCode.charToLong(CharRollupUniqueOperator.class, - ReplicatePrimitiveCode.MAIN_SRC), - " externalResult = new BoxedColumnSource.OfDateTime(internalResult);"); - fixupObjectKernelOperator(ReplicatePrimitiveCode - .charToObject(CharRollupUniqueOperator.class, ReplicatePrimitiveCode.MAIN_SRC), "ssms"); + ReplicatePrimitiveCode.charToLong(CharRollupUniqueOperator.class, ReplicatePrimitiveCode.MAIN_SRC), + " externalResult = new BoxedColumnSource.OfDateTime(internalResult);"); + fixupObjectKernelOperator( + ReplicatePrimitiveCode.charToObject(CharRollupUniqueOperator.class, ReplicatePrimitiveCode.MAIN_SRC), + "ssms"); } - private static void fixupLongKernelOperator(String longPath, String externalResultSetter) - throws IOException { + private static void fixupLongKernelOperator(String longPath, String externalResultSetter) throws IOException { final File longFile = new File(longPath); List lines = FileUtils.readLines(longFile, Charset.defaultCharset()); - lines = addImport(lines, BoxedColumnSource.class, DBDateTime.class, - DbDateTimeSsmSourceWrapper.class); + lines = addImport(lines, BoxedColumnSource.class, DBDateTime.class, DbDateTimeSsmSourceWrapper.class); lines = replaceRegion(lines, "Constructor", - indent(Collections.singletonList("Class type,"), 12)); + indent(Collections.singletonList("Class type,"), 12)); lines = replaceRegion(lines, "ResultAssignment", - indent(Arrays.asList( - "if(type == DBDateTime.class) {", - externalResultSetter, - "} else {", - " externalResult = internalResult;", - "}"), 8)); + indent(Arrays.asList( + "if(type == DBDateTime.class) {", + externalResultSetter, + "} else {", + " externalResult = internalResult;", + "}"), 8)); FileUtils.writeLines(longFile, lines); } - private static void fixupObjectKernelOperator(String objectPath, String ssmVarName) - throws IOException { + private static void fixupObjectKernelOperator(String objectPath, String ssmVarName) throws IOException { final File objectFile = new File(objectPath); List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = replaceRegion(lines, "Constructor", - indent(Collections.singletonList("Class type,"), 12)); + indent(Collections.singletonList("Class type,"), 12)); lines = replaceRegion(lines, "SsmCreation", - indent(Collections - .singletonList("this." + ssmVarName + " = new ObjectSsmBackedSource(type);"), 8)); + indent(Collections.singletonList("this." + ssmVarName + " = new ObjectSsmBackedSource(type);"), 8)); lines = replaceRegion(lines, "ResultCreation", - indent(Collections.singletonList("this.internalResult = new ObjectArraySource(type);"), - 8)); + indent(Collections.singletonList("this.internalResult = new ObjectArraySource(type);"), 8)); lines = globalReplacements(lines, "\\(WritableObjectChunk<\\? extends Values>\\)", - "(WritableObjectChunk)"); + "(WritableObjectChunk)"); FileUtils.writeLines(objectFile, lines); } - private static void fixupObjectSsm(String objectPath, - Function, List>... mutators) throws IOException { + private static void fixupObjectSsm(String objectPath, Function, List>... mutators) + throws IOException { final File objectFile = new File(objectPath); List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = fixupChunkAttributes(lines); lines = ReplicateSortKernel.fixupObjectComparisons(lines); lines = replaceRegion(lines, "averageMedian", - indent(Collections.singletonList("throw new UnsupportedOperationException();"), 16)); + indent(Collections.singletonList("throw new UnsupportedOperationException();"), 16)); if (mutators != null) { for (int i = 0; i < mutators.length; i++) { @@ -182,16 +171,16 @@ private static List fixupNulls(List lines) { private static List fixupDbArrays(List lines) { lines = removeAnyImports(lines, "\\s*import .*DbArray.+Wrapper;", - "\\s*import .*Db.+Array;", - "\\s*import .*Db.+Direct;"); + "\\s*import .*Db.+Array;", + "\\s*import .*Db.+Direct;"); lines = addImport(lines, DbArray.class, DbArrayDirect.class); return globalReplacements(lines, "DbObjectArray>", "DbArray>", - "DbObjectArray\\s+", "DbArray ", - "DbObjectArray\\.", "DbArray.", - "DbObjectArrayDirect", "DbArrayDirect<>", - "new DbArrayObjectWrapper\\(this\\)", "this"); + "DbObjectArray\\s+", "DbArray ", + "DbObjectArray\\.", "DbArray.", + "DbObjectArrayDirect", "DbArrayDirect<>", + "new DbArrayObjectWrapper\\(this\\)", "this"); } private static List fixupTHashes(List lines) { @@ -202,55 +191,52 @@ private static List fixupTHashes(List lines) { private static List fixupSsmConstructor(List lines) { return replaceRegion(lines, "Constructor", - Collections.singletonList(" private final Class componentType;\n" + - "\n" + - " /**\n" + - " * Create a ObjectSegmentedSortedArray with the given leafSize.\n" + - " *\n" + - " * @param leafSize the maximumSize for any leaf\n" + - " * @param componentType the type of the underlying Object\n" + - " */\n" + - " public ObjectSegmentedSortedMultiset(int leafSize, Class componentType) {\n" - + - " this.leafSize = leafSize;\n" + - " this.componentType = componentType;\n" + - " leafCount = 0;\n" + - " size = 0;\n" + - " }\n" + - "\n" + - " @Override\n" + - " public Class getComponentType() {\n" + - " return componentType;\n" + - " }")); + Collections.singletonList(" private final Class componentType;\n" + + "\n" + + " /**\n" + + " * Create a ObjectSegmentedSortedArray with the given leafSize.\n" + + " *\n" + + " * @param leafSize the maximumSize for any leaf\n" + + " * @param componentType the type of the underlying Object\n" + + " */\n" + + " public ObjectSegmentedSortedMultiset(int leafSize, Class componentType) {\n" + + " this.leafSize = leafSize;\n" + + " this.componentType = componentType;\n" + + " leafCount = 0;\n" + + " size = 0;\n" + + " }\n" + + "\n" + + " @Override\n" + + " public Class getComponentType() {\n" + + " return componentType;\n" + + " }")); } private static List fixupSourceConstructor(List lines) { return replaceRegion(lines, "Constructor", - Collections.singletonList(" public ObjectSsmBackedSource(Class type) {\n" + - " super(DbArray.class, type);\n" + - " underlying = new ObjectArraySource<>(ObjectSegmentedSortedMultiset.class, type);\n" - + - " }")); + Collections.singletonList(" public ObjectSsmBackedSource(Class type) {\n" + + " super(DbArray.class, type);\n" + + " underlying = new ObjectArraySource<>(ObjectSegmentedSortedMultiset.class, type);\n" + + " }")); } private static List fixupObjectCompare(List lines) { lines = removeRegion(lines, "DbArrayEquals"); lines = replaceRegion(lines, "EqualsArrayTypeCheck", Collections.singletonList( - " if(o.getComponentType() != o.getComponentType()) {\n" + - " return false;\n" + - " }")); + " if(o.getComponentType() != o.getComponentType()) {\n" + + " return false;\n" + + " }")); lines = replaceRegion(lines, "DirObjectEquals", - Collections.singletonList( - " if(!Objects.equals(directoryValues[ii], that.directoryValues[ii])) {\n" - + - " return false;\n" + - " }")); + Collections.singletonList( + " if(!Objects.equals(directoryValues[ii], that.directoryValues[ii])) {\n" + + " return false;\n" + + " }")); return replaceRegion(lines, "LeafObjectEquals", - Collections.singletonList( - " if(!Objects.equals(leafValues[li][ai], that.leafValues[otherLeaf][otherLeafIdx++])) {\n" - + - " return false;\n" + - " }")); + Collections.singletonList( + " if(!Objects.equals(leafValues[li][ai], that.leafValues[otherLeaf][otherLeafIdx++])) {\n" + + + " return false;\n" + + " }")); } private static void insertDbDateTimeExtensions(String longPath) throws IOException { @@ -259,140 +245,140 @@ private static void insertDbDateTimeExtensions(String longPath) throws IOExcepti lines = addImport(lines, DBDateTime.class, DbArrayDirect.class, DBTimeUtils.class); lines = insertRegion(lines, "Extensions", - Arrays.asList( - " public DBDateTime getAsDate(long i) {", - " return DBTimeUtils.nanosToTime(get(i));", - " }", - "", - " public DbArray subArrayAsDate(long fromIndexInclusive, long toIndexExclusive) {", - " return new DbArrayDirect<>(keyArrayAsDate(fromIndexInclusive, toIndexExclusive));", - " }", - "", - " public DbArray subArrayByPositionsAsDates(long[] positions) {", - " final DBDateTime[] keyArray = new DBDateTime[positions.length];", - " int writePos = 0;", - " for (long position : positions) {", - " keyArray[writePos++] = getAsDate(position);", - " }", - "", - " return new DbArrayDirect<>(keyArray);", - " }", - "", - "", - " public DBDateTime[] toDateArray() {", - " return keyArrayAsDate();", - " }", - "", - " public DBDateTime getPrevAsDate(long offset) {", - " return DBTimeUtils.nanosToTime(getPrev(offset));", - " }", - "", - "", - " public Chunk toDateChunk() {", - " return ObjectChunk.chunkWrap(toDateArray());", - " }", - "", - " public void fillDateChunk(WritableChunk destChunk) {", - " if(isEmpty()) {", - " return ;", - " }", - "", - " //noinspection unchecked", - " WritableObjectChunk writable = destChunk.asWritableObjectChunk();", - " if (leafCount == 1) {", - " for(int ii = 0; ii < size(); ii++) {", - " writable.set(ii, DBTimeUtils.nanosToTime(directoryValues[ii]));", - " }", - " } else if (leafCount > 0) {", - " int offset = 0;", - " for (int li = 0; li < leafCount; ++li) {", - " for(int jj = 0; jj < leafSizes[li]; jj++) {", - " writable.set(jj + offset, DBTimeUtils.nanosToTime(leafValues[li][jj]));", - " }", - " offset += leafSizes[li];", - " }", - " }", - " }", - "", - "", - " public DbArray getDirectAsDate() {", - " return new DbArrayDirect<>(keyArrayAsDate());", - " }", - "", - " private DBDateTime[] keyArrayAsDate() {", - " return keyArrayAsDate(0, size()-1);", - " }", - "", - " /**", - " * Create an array of the current keys beginning with the first (inclusive) and ending with the last (inclusive)", - " * @param first", - " * @param last", - " * @return", - " */", - " private DBDateTime[] keyArrayAsDate(long first, long last) {", - " if(isEmpty()) {", - " return ArrayUtils.EMPTY_DATETIME_ARRAY;", - " }", - "", - " final int totalSize = (int)(last - first + 1);", - " final DBDateTime[] keyArray = new DBDateTime[intSize()];", - " if (leafCount == 1) {", - " for(int ii = 0; ii < totalSize; ii++) {", - " keyArray[ii] = DBTimeUtils.nanosToTime(directoryValues[ii + (int)first]);", - " }", - " } else if (leafCount > 0) {", - " int offset = 0;", - " int copied = 0;", - " int skipped = 0;", - " for (int li = 0; li < leafCount; ++li) {", - " if(skipped < first) {", - " final int toSkip = (int)first - skipped;", - " if(toSkip < leafSizes[li]) {", - " final int nToCopy = Math.min(leafSizes[li] - toSkip, totalSize);", - " for(int jj = 0; jj < nToCopy; jj++) {", - " keyArray[jj] = DBTimeUtils.nanosToTime(leafValues[li][jj + toSkip]);", - " }", - " copied = nToCopy;", - " offset = copied;", - " skipped = (int)first;", - " } else {", - " skipped += leafSizes[li];", - " }", - " } else {", - " int nToCopy = Math.min(leafSizes[li], totalSize - copied);", - " for(int jj = 0; jj < nToCopy; jj++) {", - " keyArray[jj + offset] = DBTimeUtils.nanosToTime(leafValues[li][jj]);", - " }", - " offset += leafSizes[li];", - " copied += nToCopy;", - " }", - " }", - " }", - " return keyArray;", - " }", - "", - " public String toDateString() {", - " final StringBuilder arrAsString = new StringBuilder(\"[\");", - " if (leafCount == 1) {", - " for(int ii = 0; ii < intSize(); ii++) {", - " arrAsString.append(DBTimeUtils.nanosToTime(directoryValues[ii])).append(\", \");", - " }", - " ", - " arrAsString.replace(arrAsString.length() - 2, arrAsString.length(), \"]\");", - " return arrAsString.toString();", - " } else if (leafCount > 0) {", - " for (int li = 0; li < leafCount; ++li) {", - " for(int ai = 0; ai < leafSizes[li]; ai++) {", - " arrAsString.append(DBTimeUtils.nanosToTime(leafValues[li][ai])).append(\", \");", - " }", - " }", - "", - " arrAsString.replace(arrAsString.length() - 2, arrAsString.length(), \"]\");", - " return arrAsString.toString();", - " }", - "", - " return \"[]\";", - " }")); + Arrays.asList( + " public DBDateTime getAsDate(long i) {", + " return DBTimeUtils.nanosToTime(get(i));", + " }", + "", + " public DbArray subArrayAsDate(long fromIndexInclusive, long toIndexExclusive) {", + " return new DbArrayDirect<>(keyArrayAsDate(fromIndexInclusive, toIndexExclusive));", + " }", + "", + " public DbArray subArrayByPositionsAsDates(long[] positions) {", + " final DBDateTime[] keyArray = new DBDateTime[positions.length];", + " int writePos = 0;", + " for (long position : positions) {", + " keyArray[writePos++] = getAsDate(position);", + " }", + "", + " return new DbArrayDirect<>(keyArray);", + " }", + "", + "", + " public DBDateTime[] toDateArray() {", + " return keyArrayAsDate();", + " }", + "", + " public DBDateTime getPrevAsDate(long offset) {", + " return DBTimeUtils.nanosToTime(getPrev(offset));", + " }", + "", + "", + " public Chunk toDateChunk() {", + " return ObjectChunk.chunkWrap(toDateArray());", + " }", + "", + " public void fillDateChunk(WritableChunk destChunk) {", + " if(isEmpty()) {", + " return ;", + " }", + "", + " //noinspection unchecked", + " WritableObjectChunk writable = destChunk.asWritableObjectChunk();", + " if (leafCount == 1) {", + " for(int ii = 0; ii < size(); ii++) {", + " writable.set(ii, DBTimeUtils.nanosToTime(directoryValues[ii]));", + " }", + " } else if (leafCount > 0) {", + " int offset = 0;", + " for (int li = 0; li < leafCount; ++li) {", + " for(int jj = 0; jj < leafSizes[li]; jj++) {", + " writable.set(jj + offset, DBTimeUtils.nanosToTime(leafValues[li][jj]));", + " }", + " offset += leafSizes[li];", + " }", + " }", + " }", + "", + "", + " public DbArray getDirectAsDate() {", + " return new DbArrayDirect<>(keyArrayAsDate());", + " }", + "", + " private DBDateTime[] keyArrayAsDate() {", + " return keyArrayAsDate(0, size()-1);", + " }", + "", + " /**", + " * Create an array of the current keys beginning with the first (inclusive) and ending with the last (inclusive)", + " * @param first", + " * @param last", + " * @return", + " */", + " private DBDateTime[] keyArrayAsDate(long first, long last) {", + " if(isEmpty()) {", + " return ArrayUtils.EMPTY_DATETIME_ARRAY;", + " }", + "", + " final int totalSize = (int)(last - first + 1);", + " final DBDateTime[] keyArray = new DBDateTime[intSize()];", + " if (leafCount == 1) {", + " for(int ii = 0; ii < totalSize; ii++) {", + " keyArray[ii] = DBTimeUtils.nanosToTime(directoryValues[ii + (int)first]);", + " }", + " } else if (leafCount > 0) {", + " int offset = 0;", + " int copied = 0;", + " int skipped = 0;", + " for (int li = 0; li < leafCount; ++li) {", + " if(skipped < first) {", + " final int toSkip = (int)first - skipped;", + " if(toSkip < leafSizes[li]) {", + " final int nToCopy = Math.min(leafSizes[li] - toSkip, totalSize);", + " for(int jj = 0; jj < nToCopy; jj++) {", + " keyArray[jj] = DBTimeUtils.nanosToTime(leafValues[li][jj + toSkip]);", + " }", + " copied = nToCopy;", + " offset = copied;", + " skipped = (int)first;", + " } else {", + " skipped += leafSizes[li];", + " }", + " } else {", + " int nToCopy = Math.min(leafSizes[li], totalSize - copied);", + " for(int jj = 0; jj < nToCopy; jj++) {", + " keyArray[jj + offset] = DBTimeUtils.nanosToTime(leafValues[li][jj]);", + " }", + " offset += leafSizes[li];", + " copied += nToCopy;", + " }", + " }", + " }", + " return keyArray;", + " }", + "", + " public String toDateString() {", + " final StringBuilder arrAsString = new StringBuilder(\"[\");", + " if (leafCount == 1) {", + " for(int ii = 0; ii < intSize(); ii++) {", + " arrAsString.append(DBTimeUtils.nanosToTime(directoryValues[ii])).append(\", \");", + " }", + " ", + " arrAsString.replace(arrAsString.length() - 2, arrAsString.length(), \"]\");", + " return arrAsString.toString();", + " } else if (leafCount > 0) {", + " for (int li = 0; li < leafCount; ++li) {", + " for(int ai = 0; ai < leafSizes[li]; ai++) {", + " arrAsString.append(DBTimeUtils.nanosToTime(leafValues[li][ai])).append(\", \");", + " }", + " }", + "", + " arrAsString.replace(arrAsString.length() - 2, arrAsString.length(), \"]\");", + " return arrAsString.toString();", + " }", + "", + " return \"[]\";", + " }")); FileUtils.writeLines(longFile, lines); } diff --git a/DB/src/main/java/io/deephaven/db/v2/ssms/SegmentedSortedMultiSet.java b/DB/src/main/java/io/deephaven/db/v2/ssms/SegmentedSortedMultiSet.java index 84b5671b323..b91c63c8a00 100644 --- a/DB/src/main/java/io/deephaven/db/v2/ssms/SegmentedSortedMultiSet.java +++ b/DB/src/main/java/io/deephaven/db/v2/ssms/SegmentedSortedMultiSet.java @@ -11,21 +11,19 @@ import java.util.function.Supplier; /** - * MultiSet of primitive or object values stored as parallel arrays of counts and values. Nulls - * disallowed. + * MultiSet of primitive or object values stored as parallel arrays of counts and values. Nulls disallowed. * * @param */ public interface SegmentedSortedMultiSet extends LongSizedDataStructure { - boolean SEGMENTED_SORTED_MULTISET_VALIDATION = Configuration.getInstance() - .getBooleanWithDefault("SegmentedSortedMultiSet.validation", false); + boolean SEGMENTED_SORTED_MULTISET_VALIDATION = + Configuration.getInstance().getBooleanWithDefault("SegmentedSortedMultiSet.validation", false); static SegmentedSortedMultiSet make(ChunkType chunkType, int nodeSize, Class objectType) { return makeFactory(chunkType, nodeSize, objectType).get(); } - static Supplier makeFactory(ChunkType chunkType, int nodeSize, - Class objectType) { + static Supplier makeFactory(ChunkType chunkType, int nodeSize, Class objectType) { switch (chunkType) { case Char: return () -> new CharSegmentedSortedMultiset(nodeSize); @@ -58,18 +56,15 @@ static RemoveContext makeRemoveContext(int nodeSize) { } /** - * Insert new valuesToInsert into this SSMS. The valuesToInsert to insert must be sorted, - * without duplicates. + * Insert new valuesToInsert into this SSMS. The valuesToInsert to insert must be sorted, without duplicates. * - * The valuesToInsert and counts chunks will be modified during this call, and the resulting - * chunks are undefined. + * The valuesToInsert and counts chunks will be modified during this call, and the resulting chunks are undefined. * * @param valuesToInsert the valuesToInsert to insert * @param counts the number of times each value occurs * @return true if any new values were inserted */ - boolean insert(WritableChunk valuesToInsert, - WritableIntChunk counts); + boolean insert(WritableChunk valuesToInsert, WritableIntChunk counts); /** * Remove valuesToRemove from this SSMS. The valuesToRemove to remove must be sorted. @@ -79,7 +74,7 @@ boolean insert(WritableChunk valuesToInsert, * @return true if any values were removed. */ boolean remove(RemoveContext removeContext, WritableChunk valuesToRemove, - WritableIntChunk lengths); + WritableIntChunk lengths); @NotNull default Chunk keyChunk() { @@ -130,8 +125,7 @@ void ensureLeafCount(int leafCount) { long totalSize(); /** - * Remove count elements from the front of this SSM and add them to the back of the destination - * SSM. + * Remove count elements from the front of this SSM and add them to the back of the destination SSM. *

    * The minimum element of this SSM must be greater than or equal to the maximum of destination. * @@ -141,8 +135,7 @@ void ensureLeafCount(int leafCount) { void moveFrontToBack(SegmentedSortedMultiSet destination, long count); /** - * Remove count elements from the back of this SSM and add them to the front of the destination - * SSM. + * Remove count elements from the back of this SSM and add them to the front of the destination SSM. *

    * The minimum element of this SSM must be less than or equal to the maximum of destination. * diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/AbstractTupleSource.java b/DB/src/main/java/io/deephaven/db/v2/tuples/AbstractTupleSource.java index daef6fe3616..dd53d05a6fc 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/AbstractTupleSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/AbstractTupleSource.java @@ -11,7 +11,7 @@ import java.util.stream.Stream; public abstract class AbstractTupleSource - implements TupleSource, DefaultChunkSource.WithPrev { + implements TupleSource, DefaultChunkSource.WithPrev { private final ColumnSource[] columnSources; private final List listColumnSources; @@ -38,8 +38,7 @@ public final FillContext makeFillContext(int chunkCapacity, SharedContext shared @Override public final void fillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { // noinspection unchecked TupleFillContext tupleFillContext = (TupleFillContext) context; GetContext[] getContexts = tupleFillContext.getContexts; @@ -55,8 +54,7 @@ public final void fillChunk(@NotNull FillContext context, @Override public final void fillPrevChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { // noinspection unchecked TupleFillContext tupleFillContext = (TupleFillContext) context; GetContext[] getContexts = tupleFillContext.getContexts; @@ -70,9 +68,8 @@ public final void fillPrevChunk(@NotNull FillContext context, convertChunks(destination, orderedKeys.intSize(), chunks); } - protected abstract void convertChunks( - @NotNull WritableChunk destination, int chunkSize, - Chunk[] chunks); + protected abstract void convertChunks(@NotNull WritableChunk destination, int chunkSize, + Chunk[] chunks); class TupleFillContext implements FillContext { @@ -81,8 +78,7 @@ class TupleFillContext implements FillContext { TupleFillContext(int chunkCapacity, SharedContext sharedContext) { - this.getContexts = - Stream.of(columnSources).map(cs -> cs.makeGetContext(chunkCapacity, sharedContext)) + this.getContexts = Stream.of(columnSources).map(cs -> cs.makeGetContext(chunkCapacity, sharedContext)) .toArray(GetContext[]::new); // noinspection unchecked this.chunks = new Chunk[columnSources.length]; diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/EmptyTupleSource.java b/DB/src/main/java/io/deephaven/db/v2/tuples/EmptyTupleSource.java index 63a8afd5422..8bc41c14c3f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/EmptyTupleSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/EmptyTupleSource.java @@ -15,8 +15,7 @@ *

    * {@link TupleSource} that produces only the {@link EmptyTuple}. */ -enum EmptyTupleSource - implements TupleSource, DefaultChunkSource.WithPrev { +enum EmptyTupleSource implements TupleSource, DefaultChunkSource.WithPrev { INSTANCE; @@ -42,15 +41,13 @@ public EmptyTuple createTupleFromValues(@NotNull final Object... values) { @Override public void exportElement(@NotNull final EmptyTuple tuple, final int elementIndex, - @NotNull final WritableSource writableSource, final long destinationIndexKey) { - throw new UnsupportedOperationException( - "EmptyTuple does not contain any elements to export"); + @NotNull final WritableSource writableSource, final long destinationIndexKey) { + throw new UnsupportedOperationException("EmptyTuple does not contain any elements to export"); } @Override public Object exportElement(EmptyTuple tuple, int elementIndex) { - throw new UnsupportedOperationException( - "EmptyTuple does not contain any elements to export"); + throw new UnsupportedOperationException("EmptyTuple does not contain any elements to export"); } @Override @@ -64,18 +61,15 @@ public ChunkType getChunkType() { } @Override - public void fillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { - destination.asWritableObjectChunk().fillWithValue(0, orderedKeys.intSize(), - EmptyTuple.INSTANCE); + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { + destination.asWritableObjectChunk().fillWithValue(0, orderedKeys.intSize(), EmptyTuple.INSTANCE); destination.setSize(orderedKeys.intSize()); } @Override public void fillPrevChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { fillChunk(context, destination, orderedKeys); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/MultiColumnTupleSource.java b/DB/src/main/java/io/deephaven/db/v2/tuples/MultiColumnTupleSource.java index a8f3709357a..af0022e16fa 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/MultiColumnTupleSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/MultiColumnTupleSource.java @@ -18,19 +18,17 @@ /** *

    - * {@link TupleSource} that produces key column values as {@link ArrayTuple}s from multiple - * {@link ColumnSource}s. + * {@link TupleSource} that produces key column values as {@link ArrayTuple}s from multiple {@link ColumnSource}s. */ -final class MultiColumnTupleSource - implements TupleSource, DefaultChunkSource.WithPrev { +final class MultiColumnTupleSource implements TupleSource, DefaultChunkSource.WithPrev { private final ColumnSource[] columnSources; private final List columnSourceList; /** - * Construct a new tuple source backed by the supplied column sources. The column sources array - * should not be changed after this call. + * Construct a new tuple source backed by the supplied column sources. The column sources array should not be + * changed after this call. * * @param columnSources The column sources to produce tuples from */ @@ -73,9 +71,8 @@ public final ArrayTuple createTupleFromValues(@NotNull final Object... values) { } @Override - public final void exportElement(@NotNull final ArrayTuple tuple, - final int elementIndex, @NotNull final WritableSource writableSource, - final long destinationIndexKey) { + public final void exportElement(@NotNull final ArrayTuple tuple, final int elementIndex, + @NotNull final WritableSource writableSource, final long destinationIndexKey) { writableSource.set(destinationIndexKey, tuple.getElement(elementIndex)); } @@ -95,28 +92,25 @@ public ChunkType getChunkType() { } @Override - public Chunk getChunk(@NotNull ChunkSource.GetContext context, - @NotNull OrderedKeys orderedKeys) { + public Chunk getChunk(@NotNull ChunkSource.GetContext context, @NotNull OrderedKeys orderedKeys) { return getChunk(context, orderedKeys, false); } - public Chunk getPrevChunk(@NotNull ChunkSource.GetContext context, - @NotNull OrderedKeys orderedKeys) { + public Chunk getPrevChunk(@NotNull ChunkSource.GetContext context, @NotNull OrderedKeys orderedKeys) { return getChunk(context, orderedKeys, true); } - private Chunk getChunk(@NotNull ChunkSource.GetContext context, - @NotNull OrderedKeys orderedKeys, boolean usePrev) { + private Chunk getChunk(@NotNull ChunkSource.GetContext context, @NotNull OrderedKeys orderedKeys, + boolean usePrev) { final GetContext gc = (GetContext) context; - final ObjectChunk[] underlyingValues = - getUnderlyingChunks(orderedKeys, usePrev, gc); + final ObjectChunk[] underlyingValues = getUnderlyingChunks(orderedKeys, usePrev, gc); fillFromUnderlying(orderedKeys, underlyingValues, gc.values); return gc.values; } private void fillFromUnderlying(@NotNull OrderedKeys orderedKeys, - ObjectChunk[] underlyingValues, - WritableObjectChunk destination) { + ObjectChunk[] underlyingValues, + WritableObjectChunk destination) { final int length = columnSources.length; final int size = orderedKeys.intSize(); destination.setSize(size); @@ -130,8 +124,8 @@ private void fillFromUnderlying(@NotNull OrderedKeys orderedKeys, } @NotNull - private ObjectChunk[] getUnderlyingChunks(@NotNull OrderedKeys orderedKeys, - boolean usePrev, FillContext fc) { + private ObjectChunk[] getUnderlyingChunks(@NotNull OrderedKeys orderedKeys, boolean usePrev, + FillContext fc) { final int length = columnSources.length; // noinspection unchecked @@ -140,11 +134,9 @@ private void fillFromUnderlying(@NotNull OrderedKeys orderedKeys, final Chunk underlyingChunk; if (usePrev) { // noinspection unchecked - underlyingChunk = - columnSources[csi].getPrevChunk(fc.underlyingContexts[csi], orderedKeys); + underlyingChunk = columnSources[csi].getPrevChunk(fc.underlyingContexts[csi], orderedKeys); } else { - underlyingChunk = - columnSources[csi].getChunk(fc.underlyingContexts[csi], orderedKeys); + underlyingChunk = columnSources[csi].getChunk(fc.underlyingContexts[csi], orderedKeys); } underlyingValues[csi] = fc.boxers[csi].box(underlyingChunk); } @@ -152,19 +144,17 @@ private void fillFromUnderlying(@NotNull OrderedKeys orderedKeys, } @Override - public void fillChunk(@NotNull ChunkSource.FillContext context, - @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { + public void fillChunk(@NotNull ChunkSource.FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { final FillContext fc = (FillContext) context; - final ObjectChunk[] underlyingValues = - getUnderlyingChunks(orderedKeys, false, fc); + final ObjectChunk[] underlyingValues = getUnderlyingChunks(orderedKeys, false, fc); fillFromUnderlying(orderedKeys, underlyingValues, destination.asWritableObjectChunk()); } public void fillPrevChunk(@NotNull ChunkSource.FillContext context, - @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { final FillContext fc = (FillContext) context; - final ObjectChunk[] underlyingValues = - getUnderlyingChunks(orderedKeys, true, fc); + final ObjectChunk[] underlyingValues = getUnderlyingChunks(orderedKeys, true, fc); fillFromUnderlying(orderedKeys, underlyingValues, destination.asWritableObjectChunk()); } @@ -173,11 +163,10 @@ private static class FillContext implements ChunkSource.FillContext { final ChunkBoxer.BoxerKernel[] boxers; private FillContext(int chunkCapacity, ColumnSource[] columnSources) { - underlyingContexts = Arrays.stream(columnSources) - .map(cs -> cs.makeGetContext(chunkCapacity)).toArray(ChunkSource.GetContext[]::new); - boxers = Arrays.stream(columnSources) - .map(cs -> ChunkBoxer.getBoxer(cs.getChunkType(), chunkCapacity)) - .toArray(ChunkBoxer.BoxerKernel[]::new); + underlyingContexts = Arrays.stream(columnSources).map(cs -> cs.makeGetContext(chunkCapacity)) + .toArray(ChunkSource.GetContext[]::new); + boxers = Arrays.stream(columnSources).map(cs -> ChunkBoxer.getBoxer(cs.getChunkType(), chunkCapacity)) + .toArray(ChunkBoxer.BoxerKernel[]::new); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/SingleColumnTupleSource.java b/DB/src/main/java/io/deephaven/db/v2/tuples/SingleColumnTupleSource.java index 412652e5dd3..1525510cce4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/SingleColumnTupleSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/SingleColumnTupleSource.java @@ -16,7 +16,7 @@ */ @SuppressWarnings("unused") class SingleColumnTupleSource - implements TupleSource, DefaultChunkSource.WithPrev { + implements TupleSource, DefaultChunkSource.WithPrev { private final ColumnSource columnSource; @@ -49,9 +49,8 @@ public final TUPLE_TYPE createTupleFromValues(@NotNull final Object... values) { } @Override - public void exportElement(@NotNull final TUPLE_TYPE tuple, - final int elementIndex, @NotNull final WritableSource writableSource, - final long destinationIndexKey) { + public void exportElement(@NotNull final TUPLE_TYPE tuple, final int elementIndex, + @NotNull final WritableSource writableSource, final long destinationIndexKey) { // noinspection unchecked writableSource.set(destinationIndexKey, (ELEMENT_TYPE) tuple); } @@ -73,22 +72,19 @@ public ChunkType getChunkType() { } @Override - public Chunk getChunk(@NotNull GetContext context, - @NotNull OrderedKeys orderedKeys) { + public Chunk getChunk(@NotNull GetContext context, @NotNull OrderedKeys orderedKeys) { return columnSource.getChunk(context, orderedKeys); } @Override - public void fillChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk destination, + @NotNull OrderedKeys orderedKeys) { columnSource.fillChunk(context, destination, orderedKeys); } @Override public void fillPrevChunk(@NotNull FillContext context, - @NotNull WritableChunk destination, - @NotNull OrderedKeys orderedKeys) { + @NotNull WritableChunk destination, @NotNull OrderedKeys orderedKeys) { columnSource.fillPrevChunk(context, destination, orderedKeys); } diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/SmartKeySource.java b/DB/src/main/java/io/deephaven/db/v2/tuples/SmartKeySource.java index dc908478e5a..2ce576c2b5d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/SmartKeySource.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/SmartKeySource.java @@ -16,18 +16,17 @@ /** *

    - * {@link ColumnSource} that produces key column values as {@link SmartKey}s from multiple - * {@link ColumnSource}s. + * {@link ColumnSource} that produces key column values as {@link SmartKey}s from multiple {@link ColumnSource}s. */ public final class SmartKeySource extends AbstractColumnSource - implements TupleSource, MutableColumnSourceGetDefaults.ForObject { + implements TupleSource, MutableColumnSourceGetDefaults.ForObject { private final ColumnSource[] columnSources; private final List columnSourceList; /** - * Construct a new tuple source backed by the supplied column sources. The column sources array - * should not be changed after this call. + * Construct a new tuple source backed by the supplied column sources. The column sources array should not be + * changed after this call. * * @param columnSources The column sources to produce tuples from */ @@ -90,9 +89,8 @@ public final SmartKey createTupleFromValues(@NotNull final Object... values) { } @Override - public final void exportElement(@NotNull final SmartKey smartKey, - final int elementIndex, @NotNull final WritableSource writableSource, - final long destinationIndexKey) { + public final void exportElement(@NotNull final SmartKey smartKey, final int elementIndex, + @NotNull final WritableSource writableSource, final long destinationIndexKey) { // noinspection unchecked writableSource.set(destinationIndexKey, (ELEMENT_TYPE) smartKey.get(elementIndex)); } @@ -109,27 +107,26 @@ public final SmartKey exportToExternalKey(@NotNull final SmartKey smartKey) { @Override public final Chunk getChunk(@NotNull final ChunkSource.GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { return getChunk(context, orderedKeys, false); } public final Chunk getPrevChunk(@NotNull final ChunkSource.GetContext context, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final OrderedKeys orderedKeys) { return getChunk(context, orderedKeys, true); } private Chunk getChunk(@NotNull final ChunkSource.GetContext context, - @NotNull final OrderedKeys orderedKeys, final boolean usePrev) { + @NotNull final OrderedKeys orderedKeys, final boolean usePrev) { final GetContext gc = (GetContext) context; - final ObjectChunk[] underlyingValues = - getUnderlyingChunks(orderedKeys, usePrev, gc); + final ObjectChunk[] underlyingValues = getUnderlyingChunks(orderedKeys, usePrev, gc); fillFromUnderlying(orderedKeys, underlyingValues, gc.values); return gc.values; } private void fillFromUnderlying(@NotNull final OrderedKeys orderedKeys, - @NotNull final ObjectChunk[] underlyingValues, - @NotNull final WritableObjectChunk destination) { + @NotNull final ObjectChunk[] underlyingValues, + @NotNull final WritableObjectChunk destination) { final int length = columnSources.length; final int size = orderedKeys.intSize(); destination.setSize(size); @@ -143,9 +140,8 @@ private void fillFromUnderlying(@NotNull final OrderedKeys orderedKeys, } @NotNull - private ObjectChunk[] getUnderlyingChunks( - @NotNull final OrderedKeys orderedKeys, final boolean usePrev, - @NotNull final FillContext fillContext) { + private ObjectChunk[] getUnderlyingChunks(@NotNull final OrderedKeys orderedKeys, + final boolean usePrev, @NotNull final FillContext fillContext) { final int length = columnSources.length; // noinspection unchecked @@ -154,12 +150,10 @@ private void fillFromUnderlying(@NotNull final OrderedKeys orderedKeys, final Chunk underlyingChunk; if (usePrev) { // noinspection unchecked - underlyingChunk = columnSources[csi] - .getPrevChunk(fillContext.underlyingContexts[csi], orderedKeys); + underlyingChunk = columnSources[csi].getPrevChunk(fillContext.underlyingContexts[csi], orderedKeys); } else { // noinspection unchecked - underlyingChunk = - columnSources[csi].getChunk(fillContext.underlyingContexts[csi], orderedKeys); + underlyingChunk = columnSources[csi].getChunk(fillContext.underlyingContexts[csi], orderedKeys); } underlyingValues[csi] = fillContext.boxers[csi].box(underlyingChunk); } @@ -168,20 +162,16 @@ private void fillFromUnderlying(@NotNull final OrderedKeys orderedKeys, @Override public final void fillChunk(@NotNull final ChunkSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { final FillContext fc = (FillContext) context; - final ObjectChunk[] underlyingValues = - getUnderlyingChunks(orderedKeys, false, fc); + final ObjectChunk[] underlyingValues = getUnderlyingChunks(orderedKeys, false, fc); fillFromUnderlying(orderedKeys, underlyingValues, destination.asWritableObjectChunk()); } public final void fillPrevChunk(@NotNull final ChunkSource.FillContext context, - @NotNull final WritableChunk destination, - @NotNull final OrderedKeys orderedKeys) { + @NotNull final WritableChunk destination, @NotNull final OrderedKeys orderedKeys) { final FillContext fc = (FillContext) context; - final ObjectChunk[] underlyingValues = - getUnderlyingChunks(orderedKeys, true, fc); + final ObjectChunk[] underlyingValues = getUnderlyingChunks(orderedKeys, true, fc); fillFromUnderlying(orderedKeys, underlyingValues, destination.asWritableObjectChunk()); } @@ -191,11 +181,10 @@ private static class FillContext implements ChunkSource.FillContext { private final ChunkBoxer.BoxerKernel[] boxers; private FillContext(final int chunkCapacity, @NotNull final ColumnSource[] columnSources) { - underlyingContexts = Arrays.stream(columnSources) - .map(cs -> cs.makeGetContext(chunkCapacity)).toArray(ChunkSource.GetContext[]::new); - boxers = Arrays.stream(columnSources) - .map(cs -> ChunkBoxer.getBoxer(cs.getChunkType(), chunkCapacity)) - .toArray(ChunkBoxer.BoxerKernel[]::new); + underlyingContexts = Arrays.stream(columnSources).map(cs -> cs.makeGetContext(chunkCapacity)) + .toArray(ChunkSource.GetContext[]::new); + boxers = Arrays.stream(columnSources).map(cs -> ChunkBoxer.getBoxer(cs.getChunkType(), chunkCapacity)) + .toArray(ChunkBoxer.BoxerKernel[]::new); } @Override @@ -222,14 +211,12 @@ public void close() { } @Override - public final GetContext makeGetContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final GetContext makeGetContext(final int chunkCapacity, final SharedContext sharedContext) { return new GetContext(chunkCapacity, columnSources); } @Override - public final FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + public final FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return new FillContext(chunkCapacity, columnSources); } diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/ThreeColumnTupleSourceFactory.java b/DB/src/main/java/io/deephaven/db/v2/tuples/ThreeColumnTupleSourceFactory.java index ef1a2c78cb0..4ae72f8231a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/ThreeColumnTupleSourceFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/ThreeColumnTupleSourceFactory.java @@ -17,6 +17,6 @@ public interface ThreeColumnTupleSourceFactory create(@NotNull ColumnSource columnSource1, - @NotNull ColumnSource columnSource2, - @NotNull ColumnSource columnSource3); + @NotNull ColumnSource columnSource2, + @NotNull ColumnSource columnSource3); } diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/TupleExporter.java b/DB/src/main/java/io/deephaven/db/v2/tuples/TupleExporter.java index 968d93af2e3..0937704cd45 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/TupleExporter.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/TupleExporter.java @@ -5,15 +5,15 @@ import org.jetbrains.annotations.NotNull; /** - * Interface for classes that know how to export the elements of a given tuple type. Currently - * supports element-wise export to a {@link WritableSource} without unnecessary boxing, or full - * export to a "{@link SmartKey}" with the necessary boxing. + * Interface for classes that know how to export the elements of a given tuple type. Currently supports element-wise + * export to a {@link WritableSource} without unnecessary boxing, or full export to a "{@link SmartKey}" with the + * necessary boxing. */ public interface TupleExporter { /** - * Export a single element from the tuple, identified by its element index, to the destination - * index key of the supplied writable source. + * Export a single element from the tuple, identified by its element index, to the destination index key of the + * supplied writable source. *

    * For the empty tuple, this is unsupported. *

    @@ -27,7 +27,7 @@ public interface TupleExporter { * @param destinationIndexKey The destination index key */ void exportElement(TUPLE_TYPE tuple, int elementIndex, - @NotNull WritableSource writableSource, long destinationIndexKey); + @NotNull WritableSource writableSource, long destinationIndexKey); /** * Export a single element from the tuple, identified by its element index, to an Object @@ -45,8 +45,8 @@ void exportElement(TUPLE_TYPE tuple, int elementIndex, Object exportElement(TUPLE_TYPE tuple, int elementIndex); /** - * Export a single element from the tuple, identified by its element index, to an Object. If the - * tuple has been internally reinterpreted, return the reinterpreted value. + * Export a single element from the tuple, identified by its element index, to an Object. If the tuple has been + * internally reinterpreted, return the reinterpreted value. * *

    * For the empty tuple, this is unsupported. @@ -64,8 +64,7 @@ default Object exportElementReinterpreted(TUPLE_TYPE tuple, int elementIndex) { /** *

    - * Export this tuple's element list as a key suitable for the - * {@link io.deephaven.db.v2.TableMap}s resulting from + * Export this tuple's element list as a key suitable for the {@link io.deephaven.db.v2.TableMap}s resulting from * {@link io.deephaven.db.tables.Table#byExternal}. *

    * For the empty tuple this is a unsupported. diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSource.java b/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSource.java index eb4605530e7..5668b40d7a3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSource.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSource.java @@ -8,11 +8,9 @@ import java.util.List; /** - * Factory to produce immutable tuples from a long key in {@link io.deephaven.db.v2.utils.Index} - * space. + * Factory to produce immutable tuples from a long key in {@link io.deephaven.db.v2.utils.Index} space. */ -public interface TupleSource - extends TupleExporter, ChunkSource.WithPrev { +public interface TupleSource extends TupleExporter, ChunkSource.WithPrev { /** * Get the {@link ColumnSource}s backing this tuple source. @@ -46,8 +44,8 @@ public interface TupleSource TUPLE_TYPE createTupleFromValues(@NotNull final Object... values); /** - * Create a tuple for the supplied reinterpreted values (e.g., those that come from the - * getColumnSources after a reinterpretation by {@link TupleSourceFactory}). + * Create a tuple for the supplied reinterpreted values (e.g., those that come from the getColumnSources after a + * reinterpretation by {@link TupleSourceFactory}). * * @param values The values * @return The resulting tuple diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSourceCodeGenerator.java b/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSourceCodeGenerator.java index 86d46c6df74..5554d7e6dde 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSourceCodeGenerator.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSourceCodeGenerator.java @@ -28,10 +28,9 @@ */ public class TupleSourceCodeGenerator { - private static final String OUTPUT_PACKAGE = - TupleSourceCodeGenerator.class.getPackage().getName() + ".generated"; + private static final String OUTPUT_PACKAGE = TupleSourceCodeGenerator.class.getPackage().getName() + ".generated"; private static final File OUTPUT_RELATIVE_PATH = - new File(new File("DB", MAIN_SRC), OUTPUT_PACKAGE.replace('.', File.separatorChar)); + new File(new File("DB", MAIN_SRC), OUTPUT_PACKAGE.replace('.', File.separatorChar)); private static final String CS = "$cs$"; private static final String VAL = "$val$"; @@ -39,17 +38,17 @@ public class TupleSourceCodeGenerator { private static final String NEW_LINE = System.getProperty("line.separator"); private static final String[] DEFAULT_IMPORTS = Stream.of( - ColumnSource.class, - NotNull.class, - SmartKey.class, - TupleSource.class, - WritableSource.class, - AbstractTupleSource.class, - Chunk.class, - Attributes.class, - WritableChunk.class, - ObjectChunk.class, - WritableObjectChunk.class).map(Class::getName).toArray(String[]::new); + ColumnSource.class, + NotNull.class, + SmartKey.class, + TupleSource.class, + WritableSource.class, + AbstractTupleSource.class, + Chunk.class, + Attributes.class, + WritableChunk.class, + ObjectChunk.class, + WritableObjectChunk.class).map(Class::getName).toArray(String[]::new); private static final String CLASS_NAME_SUFFIX = "ColumnTupleSource"; private static final String CS1 = "columnSource1"; @@ -90,20 +89,19 @@ enum ColumnSourceType { private final ChunkType chunkType; ColumnSourceType(@NotNull final String nameText, - @NotNull final Class elementClass, - @Nullable final Class overrideInternalClass, - @Nullable final ColumnSourceType reinterpretAsType, - final boolean isReinterpreted, - @NotNull final String elementGetter, - @NotNull final String elementPrevGetter, - @NotNull final String boxing, - @NotNull final String unboxing, - @NotNull final String cast, - @NotNull final Class... importClasses) { + @NotNull final Class elementClass, + @Nullable final Class overrideInternalClass, + @Nullable final ColumnSourceType reinterpretAsType, + final boolean isReinterpreted, + @NotNull final String elementGetter, + @NotNull final String elementPrevGetter, + @NotNull final String boxing, + @NotNull final String unboxing, + @NotNull final String cast, + @NotNull final Class... importClasses) { this.nameText = nameText; this.elementClass = elementClass; - this.internalClass = - overrideInternalClass == null ? elementClass : overrideInternalClass; + this.internalClass = overrideInternalClass == null ? elementClass : overrideInternalClass; this.reinterpretAsType = reinterpretAsType; this.isReinterpreted = isReinterpreted; this.elementGetter = elementGetter; @@ -113,8 +111,8 @@ enum ColumnSourceType { this.cast = cast; this.chunkClass = ChunkType.fromElementType(elementClass).getEmptyChunk().getClass(); this.chunkType = ChunkType.fromElementType(elementClass); - this.imports = Stream.concat(Stream.of(chunkClass), Arrays.stream(importClasses)) - .map(Class::getName).toArray(String[]::new); + this.imports = Stream.concat(Stream.of(chunkClass), Arrays.stream(importClasses)).map(Class::getName) + .toArray(String[]::new); } String getNameText() { @@ -171,8 +169,8 @@ private String getUnboxingText(@NotNull final String valueName) { private String getExportElementText(@NotNull final String valueName) { return elementClass != internalClass || isReinterpreted || this == OBJECT - ? "(ELEMENT_TYPE) " + getBoxingText(valueName) - : valueName; + ? "(ELEMENT_TYPE) " + getBoxingText(valueName) + : valueName; } private String[] getImports() { @@ -181,136 +179,119 @@ private String[] getImports() { } private static ColumnSourceType forPrimitive(final Class clazz) { - return Arrays.stream(ColumnSourceType.values()) - .filter(cst -> cst.getElementClass() == clazz).findFirst() - .orElseThrow(() -> new RuntimeException("Could not find type to: " + clazz)); + return Arrays.stream(ColumnSourceType.values()).filter(cst -> cst.getElementClass() == clazz).findFirst() + .orElseThrow(() -> new RuntimeException("Could not find type to: " + clazz)); } private static String generateSimpleClassName(@NotNull final ColumnSourceType... types) { return Arrays.stream(types).map(ColumnSourceType::getNameText).collect(Collectors.joining()) - + CLASS_NAME_SUFFIX; + + CLASS_NAME_SUFFIX; } static String generateClassName(@NotNull final ColumnSourceType... types) { return OUTPUT_PACKAGE + '.' + generateSimpleClassName(types); } - private static void addReinterpretedUnboxing(@NotNull ColumnSourceType type1, Indenter indenter, - StringBuilder code, String valueName, boolean comma) { + private static void addReinterpretedUnboxing(@NotNull ColumnSourceType type1, Indenter indenter, StringBuilder code, + String valueName, boolean comma) { if (type1.isReinterpreted()) { - code.append(indenter) - .append(forPrimitive(type1.elementClass).getUnboxingText(valueName)) - .append(comma ? "," : "").append(NEW_LINE); + code.append(indenter).append(forPrimitive(type1.elementClass).getUnboxingText(valueName)) + .append(comma ? "," : "").append(NEW_LINE); } else { - code.append(indenter).append(type1.getUnboxingText(valueName)).append(comma ? "," : "") - .append(NEW_LINE); + code.append(indenter).append(type1.getUnboxingText(valueName)).append(comma ? "," : "").append(NEW_LINE); } } - private static void addReinterpretedExport(@NotNull ColumnSourceType type1, Indenter indenter, - StringBuilder code, String valueName) { + private static void addReinterpretedExport(@NotNull ColumnSourceType type1, Indenter indenter, StringBuilder code, + String valueName) { if (type1.isReinterpreted()) { code.append(indenter.increaseLevel()).append("return ") - .append(forPrimitive(type1.elementClass).getBoxingText(valueName)).append(";") - .append(NEW_LINE); + .append(forPrimitive(type1.elementClass).getBoxingText(valueName)).append(";").append(NEW_LINE); } else { - code.append(indenter.increaseLevel()).append("return ") - .append(type1.getBoxingText(valueName)).append(";").append(NEW_LINE); + code.append(indenter.increaseLevel()).append("return ").append(type1.getBoxingText(valueName)).append(";") + .append(NEW_LINE); } } - private String generateTwoColumnTupleSource(@NotNull final String className, - @NotNull final ColumnSourceType type1, @NotNull final ColumnSourceType type2) { + private String generateTwoColumnTupleSource(@NotNull final String className, @NotNull final ColumnSourceType type1, + @NotNull final ColumnSourceType type2) { final Indenter indenter = new Indenter(); final StringBuilder code = new StringBuilder(1024); - final String sourceClass1Name = - TypeUtils.getBoxedType(type1.getElementClass()).getSimpleName(); - final String sourceClass2Name = - TypeUtils.getBoxedType(type2.getElementClass()).getSimpleName(); - final String tupleClassName = TupleCodeGenerator.getTupleClassName(type1.getInternalClass(), - type2.getInternalClass()); - final String[] extraImports = - new String[] {TupleCodeGenerator.getTupleImport(tupleClassName), - TwoColumnTupleSourceFactory.class.getName()}; + final String sourceClass1Name = TypeUtils.getBoxedType(type1.getElementClass()).getSimpleName(); + final String sourceClass2Name = TypeUtils.getBoxedType(type2.getElementClass()).getSimpleName(); + final String tupleClassName = + TupleCodeGenerator.getTupleClassName(type1.getInternalClass(), type2.getInternalClass()); + final String[] extraImports = new String[] {TupleCodeGenerator.getTupleImport(tupleClassName), + TwoColumnTupleSourceFactory.class.getName()}; code.append("package ").append(OUTPUT_PACKAGE).append(';').append(NEW_LINE); code.append(NEW_LINE); - Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), extraImports) - .flatMap(Arrays::stream).filter(i -> !i.startsWith("java.")).sorted().distinct() - .forEachOrdered( - i -> code.append("import ").append(i).append(';').append(NEW_LINE)); + Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), extraImports).flatMap(Arrays::stream) + .filter(i -> !i.startsWith("java.")).sorted().distinct().forEachOrdered( + i -> code.append("import ").append(i).append(';').append(NEW_LINE)); code.append(NEW_LINE); - Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), extraImports) - .flatMap(Arrays::stream).filter(i -> i.startsWith("java.")).sorted().distinct() - .forEachOrdered( - i -> code.append("import ").append(i).append(';').append(NEW_LINE)); + Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), extraImports).flatMap(Arrays::stream) + .filter(i -> i.startsWith("java.")).sorted().distinct().forEachOrdered( + i -> code.append("import ").append(i).append(';').append(NEW_LINE)); code.append(NEW_LINE); code.append("/**").append(NEW_LINE); - code.append( - " *

    {@link TupleSource} that produces key column values from {@link ColumnSource} types ") - .append(sourceClass1Name).append(" and ").append(sourceClass2Name).append('.') - .append(NEW_LINE); - code.append(" *

    Generated by {@link ").append(TupleSourceCodeGenerator.class.getName()) - .append("}.").append(NEW_LINE); + code.append(" *

    {@link TupleSource} that produces key column values from {@link ColumnSource} types ") + .append(sourceClass1Name).append(" and ").append(sourceClass2Name).append('.').append(NEW_LINE); + code.append(" *

    Generated by {@link ").append(TupleSourceCodeGenerator.class.getName()).append("}.") + .append(NEW_LINE); code.append(" */").append(NEW_LINE); code.append("@SuppressWarnings({\"unused\", \"WeakerAccess\"})").append(NEW_LINE); - code.append("public class ").append(className).append(" extends AbstractTupleSource<") - .append(tupleClassName).append("> {").append(NEW_LINE); + code.append("public class ").append(className).append(" extends AbstractTupleSource<").append(tupleClassName) + .append("> {").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("/** {@link ") - .append(TwoColumnTupleSourceFactory.class.getSimpleName()) - .append("} instance to create instances of {@link ").append(className).append("}. **/") - .append(NEW_LINE); - code.append(indenter).append("public static final ") - .append(TwoColumnTupleSourceFactory.class.getSimpleName()).append('<') - .append(tupleClassName).append(", ").append(sourceClass1Name).append(", ") - .append(sourceClass2Name) - .append("> FACTORY = new Factory();").append(NEW_LINE); + code.append(indenter).append("/** {@link ").append(TwoColumnTupleSourceFactory.class.getSimpleName()) + .append("} instance to create instances of {@link ").append(className).append("}. **/") + .append(NEW_LINE); + code.append(indenter).append("public static final ").append(TwoColumnTupleSourceFactory.class.getSimpleName()) + .append('<') + .append(tupleClassName).append(", ").append(sourceClass1Name).append(", ").append(sourceClass2Name) + .append("> FACTORY = new Factory();").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("private final ColumnSource<").append(sourceClass1Name) - .append("> ").append(CS1).append(';').append(NEW_LINE); - code.append(indenter).append("private final ColumnSource<").append(sourceClass2Name) - .append("> ").append(CS2).append(';').append(NEW_LINE); + code.append(indenter).append("private final ColumnSource<").append(sourceClass1Name).append("> ").append(CS1) + .append(';').append(NEW_LINE); + code.append(indenter).append("private final ColumnSource<").append(sourceClass2Name).append("> ").append(CS2) + .append(';').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("public ").append(className).append('(').append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass1Name) - .append("> ").append(CS1).append(',').append(NEW_LINE); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass2Name) - .append("> ").append(CS2).append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass1Name).append("> ").append(CS1) + .append(',').append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass2Name).append("> ").append(CS2) + .append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(") {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("super(").append(CS1).append(", ").append(CS2).append(");") - .append(NEW_LINE); - code.append(indenter).append("this.").append(CS1).append(" = ").append(CS1).append(';') - .append(NEW_LINE); - code.append(indenter).append("this.").append(CS2).append(" = ").append(CS2).append(';') - .append(NEW_LINE); + code.append(indenter).append("super(").append(CS1).append(", ").append(CS2).append(");").append(NEW_LINE); + code.append(indenter).append("this.").append(CS1).append(" = ").append(CS1).append(';').append(NEW_LINE); + code.append(indenter).append("this.").append(CS2).append(" = ").append(CS2).append(';').append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final ").append(tupleClassName) - .append(" createTuple(final long ").append(IK).append(") {").append(NEW_LINE); + code.append(indenter).append("public final ").append(tupleClassName).append(" createTuple(final long ") + .append(IK).append(") {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return new ").append(tupleClassName).append('(') - .append(NEW_LINE); + code.append(indenter).append("return new ").append(tupleClassName).append('(').append(NEW_LINE); indenter.increaseLevel(2); code.append(indenter).append(type1.getElementGetterText(CS1)).append(',').append(NEW_LINE); code.append(indenter).append(type2.getElementGetterText(CS2)).append(NEW_LINE); @@ -322,14 +303,12 @@ private String generateTwoColumnTupleSource(@NotNull final String className, code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final ").append(tupleClassName) - .append(" createPreviousTuple(final long ").append(IK).append(") {").append(NEW_LINE); + code.append(indenter).append("public final ").append(tupleClassName).append(" createPreviousTuple(final long ") + .append(IK).append(") {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return new ").append(tupleClassName).append('(') - .append(NEW_LINE); + code.append(indenter).append("return new ").append(tupleClassName).append('(').append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append(type1.getElementPrevGetterText(CS1)).append(',') - .append(NEW_LINE); + code.append(indenter).append(type1.getElementPrevGetterText(CS1)).append(',').append(NEW_LINE); code.append(indenter).append(type2.getElementPrevGetterText(CS2)).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(");").append(NEW_LINE); @@ -340,13 +319,11 @@ private String generateTwoColumnTupleSource(@NotNull final String className, code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public final ").append(tupleClassName) - .append(" createTupleFromValues(@NotNull final Object... values) {").append(NEW_LINE); + .append(" createTupleFromValues(@NotNull final Object... values) {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return new ").append(tupleClassName).append('(') - .append(NEW_LINE); + code.append(indenter).append("return new ").append(tupleClassName).append('(').append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append(type1.getUnboxingText("values[0]")).append(',') - .append(NEW_LINE); + code.append(indenter).append(type1.getUnboxingText("values[0]")).append(',').append(NEW_LINE); code.append(indenter).append(type2.getUnboxingText("values[1]")).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(");").append(NEW_LINE); @@ -357,11 +334,9 @@ private String generateTwoColumnTupleSource(@NotNull final String className, code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public final ").append(tupleClassName) - .append(" createTupleFromReinterpretedValues(@NotNull final Object... values) {") - .append(NEW_LINE); + .append(" createTupleFromReinterpretedValues(@NotNull final Object... values) {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return new ").append(tupleClassName).append('(') - .append(NEW_LINE); + code.append(indenter).append("return new ").append(tupleClassName).append('(').append(NEW_LINE); indenter.increaseLevel(2); addReinterpretedUnboxing(type1, indenter, code, "values[0]", true); addReinterpretedUnboxing(type2, indenter, code, "values[1]", false); @@ -374,47 +349,41 @@ private String generateTwoColumnTupleSource(@NotNull final String className, code.append(indenter).append("@SuppressWarnings(\"unchecked\")").append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter) - .append("public final void exportElement(@NotNull final ") - .append(tupleClassName) - .append( - " tuple, final int elementIndex, @NotNull final WritableSource writableSource, final long destinationIndexKey) {") - .append(NEW_LINE); + code.append(indenter).append("public final void exportElement(@NotNull final ") + .append(tupleClassName) + .append(" tuple, final int elementIndex, @NotNull final WritableSource writableSource, final long destinationIndexKey) {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (elementIndex == 0) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("writableSource.set(destinationIndexKey, ") - .append(type1.getExportElementText("tuple.getFirstElement()")).append(");") - .append(NEW_LINE); + .append(type1.getExportElementText("tuple.getFirstElement()")).append(");").append(NEW_LINE); code.append(indenter).append("return;").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(indenter).append("if (elementIndex == 1) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("writableSource.set(destinationIndexKey, ") - .append(type2.getExportElementText("tuple.getSecondElement()")).append(");") - .append(NEW_LINE); + .append(type2.getExportElementText("tuple.getSecondElement()")).append(");").append(NEW_LINE); code.append(indenter).append("return;").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(indenter).append( - "throw new IndexOutOfBoundsException(\"Invalid element index \" + elementIndex + \" for export\");") - .append(NEW_LINE); + "throw new IndexOutOfBoundsException(\"Invalid element index \" + elementIndex + \" for export\");") + .append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final Object exportToExternalKey(@NotNull final ") - .append(tupleClassName).append(" tuple) {").append(NEW_LINE); + code.append(indenter).append("public final Object exportToExternalKey(@NotNull final ").append(tupleClassName) + .append(" tuple) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return new SmartKey(").append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append(type1.getBoxingText("tuple.getFirstElement()")).append(',') - .append(NEW_LINE); - code.append(indenter).append(type2.getBoxingText("tuple.getSecondElement()")) - .append(NEW_LINE); + code.append(indenter).append(type1.getBoxingText("tuple.getFirstElement()")).append(',').append(NEW_LINE); + code.append(indenter).append(type2.getBoxingText("tuple.getSecondElement()")).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(");").append(NEW_LINE); indenter.decreaseLevel(); @@ -423,29 +392,28 @@ private String generateTwoColumnTupleSource(@NotNull final String className, code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final Object exportElement(@NotNull final ") - .append(tupleClassName).append(" tuple, int elementIndex) {").append(NEW_LINE); + code.append(indenter).append("public final Object exportElement(@NotNull final ").append(tupleClassName) + .append(" tuple, int elementIndex) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (elementIndex == 0) {").append(NEW_LINE); - code.append(indenter.increaseLevel()).append("return ") - .append(type1.getBoxingText("tuple.getFirstElement()")).append(";").append(NEW_LINE); + code.append(indenter.increaseLevel()).append("return ").append(type1.getBoxingText("tuple.getFirstElement()")) + .append(";").append(NEW_LINE); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(indenter).append("if (elementIndex == 1) {").append(NEW_LINE); - code.append(indenter.increaseLevel()).append("return ") - .append(type2.getBoxingText("tuple.getSecondElement()")).append(";").append(NEW_LINE); + code.append(indenter.increaseLevel()).append("return ").append(type2.getBoxingText("tuple.getSecondElement()")) + .append(";").append(NEW_LINE); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(indenter).append( - "throw new IllegalArgumentException(\"Bad elementIndex for 2 element tuple: \" + elementIndex);") - .append(NEW_LINE); + "throw new IllegalArgumentException(\"Bad elementIndex for 2 element tuple: \" + elementIndex);") + .append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter) - .append("public final Object exportElementReinterpreted(@NotNull final ") - .append(tupleClassName).append(" tuple, int elementIndex) {").append(NEW_LINE); + code.append(indenter).append("public final Object exportElementReinterpreted(@NotNull final ") + .append(tupleClassName).append(" tuple, int elementIndex) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (elementIndex == 0) {").append(NEW_LINE); addReinterpretedExport(type1, indenter, code, "tuple.getFirstElement()"); @@ -454,43 +422,37 @@ private String generateTwoColumnTupleSource(@NotNull final String className, addReinterpretedExport(type2, indenter, code, "tuple.getSecondElement()"); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(indenter).append( - "throw new IllegalArgumentException(\"Bad elementIndex for 2 element tuple: \" + elementIndex);") - .append(NEW_LINE); + "throw new IllegalArgumentException(\"Bad elementIndex for 2 element tuple: \" + elementIndex);") + .append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append( - "protected void convertChunks(@NotNull WritableChunk destination, int chunkSize, Chunk [] chunks) {") - .append(NEW_LINE); + "protected void convertChunks(@NotNull WritableChunk destination, int chunkSize, Chunk [] chunks) {") + .append(NEW_LINE); code.append(indenter.increaseLevel()).append("WritableObjectChunk<").append(tupleClassName) - .append( - ", ? super Attributes.Values> destinationObjectChunk = destination.asWritableObjectChunk();") - .append(NEW_LINE); - code.append(indenter).append(type1.getValuesChunkTypeString()) - .append(" chunk1 = chunks[0].as").append(type1.chunkType).append("Chunk();") - .append(NEW_LINE); - code.append(indenter).append(type2.getValuesChunkTypeString()) - .append(" chunk2 = chunks[1].as").append(type2.chunkType).append("Chunk();") - .append(NEW_LINE); + .append(", ? super Attributes.Values> destinationObjectChunk = destination.asWritableObjectChunk();") + .append(NEW_LINE); + code.append(indenter).append(type1.getValuesChunkTypeString()).append(" chunk1 = chunks[0].as") + .append(type1.chunkType).append("Chunk();").append(NEW_LINE); + code.append(indenter).append(type2.getValuesChunkTypeString()).append(" chunk2 = chunks[1].as") + .append(type2.chunkType).append("Chunk();").append(NEW_LINE); code.append(indenter).append("for (int ii = 0; ii < chunkSize; ++ii) {").append(NEW_LINE); - code.append(indenter.increaseLevel()).append("destinationObjectChunk.set(ii, new ") - .append(tupleClassName).append("(").append(type1.getFromChunkText("chunk1.get(ii)")) - .append(", ").append(type2.getFromChunkText("chunk2.get(ii)")).append("));") - .append(NEW_LINE); + code.append(indenter.increaseLevel()).append("destinationObjectChunk.set(ii, new ").append(tupleClassName) + .append("(").append(type1.getFromChunkText("chunk1.get(ii)")).append(", ") + .append(type2.getFromChunkText("chunk2.get(ii)")).append("));").append(NEW_LINE); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(indenter).append("destination.setSize(chunkSize);").append(NEW_LINE); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("/** {@link ") - .append(TwoColumnTupleSourceFactory.class.getSimpleName()) - .append("} for instances of {@link ").append(className).append("}. **/") - .append(NEW_LINE); + code.append(indenter).append("/** {@link ").append(TwoColumnTupleSourceFactory.class.getSimpleName()) + .append("} for instances of {@link ").append(className).append("}. **/").append(NEW_LINE); code.append(indenter).append("private static final class Factory implements ") - .append(TwoColumnTupleSourceFactory.class.getSimpleName()) - .append('<').append(tupleClassName).append(", ").append(sourceClass1Name).append(", ") - .append(sourceClass2Name).append("> {").append(NEW_LINE); + .append(TwoColumnTupleSourceFactory.class.getSimpleName()) + .append('<').append(tupleClassName).append(", ").append(sourceClass1Name).append(", ") + .append(sourceClass2Name).append("> {").append(NEW_LINE); indenter.increaseLevel(); code.append(NEW_LINE); @@ -501,13 +463,12 @@ private String generateTwoColumnTupleSource(@NotNull final String className, code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public TupleSource<").append(tupleClassName) - .append("> create(").append(NEW_LINE); + code.append(indenter).append("public TupleSource<").append(tupleClassName).append("> create(").append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass1Name) - .append("> ").append(CS1).append(',').append(NEW_LINE); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass2Name) - .append("> ").append(CS2).append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass1Name).append("> ").append(CS1) + .append(',').append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass2Name).append("> ").append(CS2) + .append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(") {").append(NEW_LINE); indenter.increaseLevel(); @@ -529,109 +490,93 @@ private String generateTwoColumnTupleSource(@NotNull final String className, } private String generateThreeColumnTupleSource(@NotNull final String className, - @NotNull final ColumnSourceType type1, @NotNull final ColumnSourceType type2, - @NotNull final ColumnSourceType type3) { + @NotNull final ColumnSourceType type1, @NotNull final ColumnSourceType type2, + @NotNull final ColumnSourceType type3) { final Indenter indenter = new Indenter(); final StringBuilder code = new StringBuilder(1024); - final String sourceClass1Name = - TypeUtils.getBoxedType(type1.getElementClass()).getSimpleName(); - final String sourceClass2Name = - TypeUtils.getBoxedType(type2.getElementClass()).getSimpleName(); - final String sourceClass3Name = - TypeUtils.getBoxedType(type3.getElementClass()).getSimpleName(); + final String sourceClass1Name = TypeUtils.getBoxedType(type1.getElementClass()).getSimpleName(); + final String sourceClass2Name = TypeUtils.getBoxedType(type2.getElementClass()).getSimpleName(); + final String sourceClass3Name = TypeUtils.getBoxedType(type3.getElementClass()).getSimpleName(); final String tupleClassName = TupleCodeGenerator.getTupleClassName(type1.getInternalClass(), - type2.getInternalClass(), type3.getInternalClass()); - final String[] extraImports = - new String[] {TupleCodeGenerator.getTupleImport(tupleClassName), - ThreeColumnTupleSourceFactory.class.getName()}; + type2.getInternalClass(), type3.getInternalClass()); + final String[] extraImports = new String[] {TupleCodeGenerator.getTupleImport(tupleClassName), + ThreeColumnTupleSourceFactory.class.getName()}; code.append("package ").append(OUTPUT_PACKAGE).append(';').append(NEW_LINE); code.append(NEW_LINE); - Stream - .of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), type3.getImports(), - extraImports) - .flatMap(Arrays::stream).filter(i -> !i.startsWith("java.")).sorted().distinct() - .forEachOrdered( - i -> code.append("import ").append(i).append(';').append(NEW_LINE)); + Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), type3.getImports(), extraImports) + .flatMap(Arrays::stream).filter(i -> !i.startsWith("java.")).sorted().distinct().forEachOrdered( + i -> code.append("import ").append(i).append(';').append(NEW_LINE)); code.append(NEW_LINE); - Stream - .of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), type3.getImports(), - extraImports) - .flatMap(Arrays::stream).filter(i -> i.startsWith("java.")).sorted().distinct() - .forEachOrdered( - i -> code.append("import ").append(i).append(';').append(NEW_LINE)); + Stream.of(DEFAULT_IMPORTS, type1.getImports(), type2.getImports(), type3.getImports(), extraImports) + .flatMap(Arrays::stream).filter(i -> i.startsWith("java.")).sorted().distinct().forEachOrdered( + i -> code.append("import ").append(i).append(';').append(NEW_LINE)); code.append(NEW_LINE); code.append("/**").append(NEW_LINE); - code.append( - " *

    {@link TupleSource} that produces key column values from {@link ColumnSource} types ") - .append(sourceClass1Name).append(", ").append(sourceClass2Name).append(", and ") - .append(sourceClass3Name).append('.').append(NEW_LINE); - code.append(" *

    Generated by {@link ").append(TupleSourceCodeGenerator.class.getName()) - .append("}.").append(NEW_LINE); + code.append(" *

    {@link TupleSource} that produces key column values from {@link ColumnSource} types ") + .append(sourceClass1Name).append(", ").append(sourceClass2Name).append(", and ") + .append(sourceClass3Name).append('.').append(NEW_LINE); + code.append(" *

    Generated by {@link ").append(TupleSourceCodeGenerator.class.getName()).append("}.") + .append(NEW_LINE); code.append(" */").append(NEW_LINE); code.append("@SuppressWarnings({\"unused\", \"WeakerAccess\"})").append(NEW_LINE); - code.append("public class ").append(className).append(" extends AbstractTupleSource<") - .append(tupleClassName).append("> {").append(NEW_LINE); + code.append("public class ").append(className).append(" extends AbstractTupleSource<").append(tupleClassName) + .append("> {").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("/** {@link ") - .append(ThreeColumnTupleSourceFactory.class.getSimpleName()) - .append("} instance to create instances of {@link ").append(className).append("}. **/") - .append(NEW_LINE); - code.append(indenter).append("public static final ") - .append(ThreeColumnTupleSourceFactory.class.getSimpleName()).append('<') - .append(tupleClassName).append(", ").append(sourceClass1Name).append(", ") - .append(sourceClass2Name).append(", ").append(sourceClass3Name) - .append("> FACTORY = new Factory();").append(NEW_LINE); + code.append(indenter).append("/** {@link ").append(ThreeColumnTupleSourceFactory.class.getSimpleName()) + .append("} instance to create instances of {@link ").append(className).append("}. **/") + .append(NEW_LINE); + code.append(indenter).append("public static final ").append(ThreeColumnTupleSourceFactory.class.getSimpleName()) + .append('<') + .append(tupleClassName).append(", ").append(sourceClass1Name).append(", ").append(sourceClass2Name) + .append(", ").append(sourceClass3Name) + .append("> FACTORY = new Factory();").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("private final ColumnSource<").append(sourceClass1Name) - .append("> ").append(CS1).append(';').append(NEW_LINE); - code.append(indenter).append("private final ColumnSource<").append(sourceClass2Name) - .append("> ").append(CS2).append(';').append(NEW_LINE); - code.append(indenter).append("private final ColumnSource<").append(sourceClass3Name) - .append("> ").append(CS3).append(';').append(NEW_LINE); + code.append(indenter).append("private final ColumnSource<").append(sourceClass1Name).append("> ").append(CS1) + .append(';').append(NEW_LINE); + code.append(indenter).append("private final ColumnSource<").append(sourceClass2Name).append("> ").append(CS2) + .append(';').append(NEW_LINE); + code.append(indenter).append("private final ColumnSource<").append(sourceClass3Name).append("> ").append(CS3) + .append(';').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("public ").append(className).append('(').append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass1Name) - .append("> ").append(CS1).append(',').append(NEW_LINE); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass2Name) - .append("> ").append(CS2).append(',').append(NEW_LINE); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass3Name) - .append("> ").append(CS3).append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass1Name).append("> ").append(CS1) + .append(',').append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass2Name).append("> ").append(CS2) + .append(',').append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass3Name).append("> ").append(CS3) + .append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(") {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("super(").append(CS1).append(", ").append(CS2).append(", ") - .append(CS3).append(");").append(NEW_LINE); - code.append(indenter).append("this.").append(CS1).append(" = ").append(CS1).append(';') - .append(NEW_LINE); - code.append(indenter).append("this.").append(CS2).append(" = ").append(CS2).append(';') - .append(NEW_LINE); - code.append(indenter).append("this.").append(CS3).append(" = ").append(CS3).append(';') - .append(NEW_LINE); + code.append(indenter).append("super(").append(CS1).append(", ").append(CS2).append(", ").append(CS3) + .append(");").append(NEW_LINE); + code.append(indenter).append("this.").append(CS1).append(" = ").append(CS1).append(';').append(NEW_LINE); + code.append(indenter).append("this.").append(CS2).append(" = ").append(CS2).append(';').append(NEW_LINE); + code.append(indenter).append("this.").append(CS3).append(" = ").append(CS3).append(';').append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final ").append(tupleClassName) - .append(" createTuple(final long ").append(IK).append(") {").append(NEW_LINE); + code.append(indenter).append("public final ").append(tupleClassName).append(" createTuple(final long ") + .append(IK).append(") {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return new ").append(tupleClassName).append('(') - .append(NEW_LINE); + code.append(indenter).append("return new ").append(tupleClassName).append('(').append(NEW_LINE); indenter.increaseLevel(2); code.append(indenter).append(type1.getElementGetterText(CS1)).append(',').append(NEW_LINE); code.append(indenter).append(type2.getElementGetterText(CS2)).append(',').append(NEW_LINE); @@ -644,16 +589,13 @@ private String generateThreeColumnTupleSource(@NotNull final String className, code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final ").append(tupleClassName) - .append(" createPreviousTuple(final long ").append(IK).append(") {").append(NEW_LINE); + code.append(indenter).append("public final ").append(tupleClassName).append(" createPreviousTuple(final long ") + .append(IK).append(") {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return new ").append(tupleClassName).append('(') - .append(NEW_LINE); + code.append(indenter).append("return new ").append(tupleClassName).append('(').append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append(type1.getElementPrevGetterText(CS1)).append(',') - .append(NEW_LINE); - code.append(indenter).append(type2.getElementPrevGetterText(CS2)).append(',') - .append(NEW_LINE); + code.append(indenter).append(type1.getElementPrevGetterText(CS1)).append(',').append(NEW_LINE); + code.append(indenter).append(type2.getElementPrevGetterText(CS2)).append(',').append(NEW_LINE); code.append(indenter).append(type3.getElementPrevGetterText(CS3)).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(");").append(NEW_LINE); @@ -664,15 +606,12 @@ private String generateThreeColumnTupleSource(@NotNull final String className, code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public final ").append(tupleClassName) - .append(" createTupleFromValues(@NotNull final Object... values) {").append(NEW_LINE); + .append(" createTupleFromValues(@NotNull final Object... values) {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return new ").append(tupleClassName).append('(') - .append(NEW_LINE); + code.append(indenter).append("return new ").append(tupleClassName).append('(').append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append(type1.getUnboxingText("values[0]")).append(',') - .append(NEW_LINE); - code.append(indenter).append(type2.getUnboxingText("values[1]")).append(',') - .append(NEW_LINE); + code.append(indenter).append(type1.getUnboxingText("values[0]")).append(',').append(NEW_LINE); + code.append(indenter).append(type2.getUnboxingText("values[1]")).append(',').append(NEW_LINE); code.append(indenter).append(type3.getUnboxingText("values[2]")).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(");").append(NEW_LINE); @@ -683,11 +622,9 @@ private String generateThreeColumnTupleSource(@NotNull final String className, code.append(indenter).append("@Override").append(NEW_LINE); code.append(indenter).append("public final ").append(tupleClassName) - .append(" createTupleFromReinterpretedValues(@NotNull final Object... values) {") - .append(NEW_LINE); + .append(" createTupleFromReinterpretedValues(@NotNull final Object... values) {").append(NEW_LINE); indenter.increaseLevel(); - code.append(indenter).append("return new ").append(tupleClassName).append('(') - .append(NEW_LINE); + code.append(indenter).append("return new ").append(tupleClassName).append('(').append(NEW_LINE); indenter.increaseLevel(2); addReinterpretedUnboxing(type1, indenter, code, "values[0]", true); addReinterpretedUnboxing(type2, indenter, code, "values[1]", true); @@ -701,57 +638,49 @@ private String generateThreeColumnTupleSource(@NotNull final String className, code.append(indenter).append("@SuppressWarnings(\"unchecked\")").append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter) - .append("public final void exportElement(@NotNull final ") - .append(tupleClassName) - .append( - " tuple, final int elementIndex, @NotNull final WritableSource writableSource, final long destinationIndexKey) {") - .append(NEW_LINE); + code.append(indenter).append("public final void exportElement(@NotNull final ") + .append(tupleClassName) + .append(" tuple, final int elementIndex, @NotNull final WritableSource writableSource, final long destinationIndexKey) {") + .append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (elementIndex == 0) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("writableSource.set(destinationIndexKey, ") - .append(type1.getExportElementText("tuple.getFirstElement()")).append(");") - .append(NEW_LINE); + .append(type1.getExportElementText("tuple.getFirstElement()")).append(");").append(NEW_LINE); code.append(indenter).append("return;").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(indenter).append("if (elementIndex == 1) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("writableSource.set(destinationIndexKey, ") - .append(type2.getExportElementText("tuple.getSecondElement()")).append(");") - .append(NEW_LINE); + .append(type2.getExportElementText("tuple.getSecondElement()")).append(");").append(NEW_LINE); code.append(indenter).append("return;").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(indenter).append("if (elementIndex == 2) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("writableSource.set(destinationIndexKey, ") - .append(type3.getExportElementText("tuple.getThirdElement()")).append(");") - .append(NEW_LINE); + .append(type3.getExportElementText("tuple.getThirdElement()")).append(");").append(NEW_LINE); code.append(indenter).append("return;").append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(indenter).append( - "throw new IndexOutOfBoundsException(\"Invalid element index \" + elementIndex + \" for export\");") - .append(NEW_LINE); + "throw new IndexOutOfBoundsException(\"Invalid element index \" + elementIndex + \" for export\");") + .append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final Object exportToExternalKey(@NotNull final ") - .append(tupleClassName).append(" tuple) {").append(NEW_LINE); + code.append(indenter).append("public final Object exportToExternalKey(@NotNull final ").append(tupleClassName) + .append(" tuple) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("return new SmartKey(").append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append(type1.getBoxingText("tuple.getFirstElement()")).append(',') - .append(NEW_LINE); - code.append(indenter).append(type2.getBoxingText("tuple.getSecondElement()")).append(',') - .append(NEW_LINE); - code.append(indenter).append(type3.getBoxingText("tuple.getThirdElement()")) - .append(NEW_LINE); + code.append(indenter).append(type1.getBoxingText("tuple.getFirstElement()")).append(',').append(NEW_LINE); + code.append(indenter).append(type2.getBoxingText("tuple.getSecondElement()")).append(',').append(NEW_LINE); + code.append(indenter).append(type3.getBoxingText("tuple.getThirdElement()")).append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(");").append(NEW_LINE); indenter.decreaseLevel(); @@ -760,33 +689,32 @@ private String generateThreeColumnTupleSource(@NotNull final String className, code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public final Object exportElement(@NotNull final ") - .append(tupleClassName).append(" tuple, int elementIndex) {").append(NEW_LINE); + code.append(indenter).append("public final Object exportElement(@NotNull final ").append(tupleClassName) + .append(" tuple, int elementIndex) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (elementIndex == 0) {").append(NEW_LINE); - code.append(indenter.increaseLevel()).append("return ") - .append(type1.getBoxingText("tuple.getFirstElement()")).append(";").append(NEW_LINE); + code.append(indenter.increaseLevel()).append("return ").append(type1.getBoxingText("tuple.getFirstElement()")) + .append(";").append(NEW_LINE); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(indenter).append("if (elementIndex == 1) {").append(NEW_LINE); - code.append(indenter.increaseLevel()).append("return ") - .append(type2.getBoxingText("tuple.getSecondElement()")).append(";").append(NEW_LINE); + code.append(indenter.increaseLevel()).append("return ").append(type2.getBoxingText("tuple.getSecondElement()")) + .append(";").append(NEW_LINE); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(indenter).append("if (elementIndex == 2) {").append(NEW_LINE); - code.append(indenter.increaseLevel()).append("return ") - .append(type3.getBoxingText("tuple.getThirdElement()")).append(";").append(NEW_LINE); + code.append(indenter.increaseLevel()).append("return ").append(type3.getBoxingText("tuple.getThirdElement()")) + .append(";").append(NEW_LINE); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(indenter).append( - "throw new IllegalArgumentException(\"Bad elementIndex for 3 element tuple: \" + elementIndex);") - .append(NEW_LINE); + "throw new IllegalArgumentException(\"Bad elementIndex for 3 element tuple: \" + elementIndex);") + .append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter) - .append("public final Object exportElementReinterpreted(@NotNull final ") - .append(tupleClassName).append(" tuple, int elementIndex) {").append(NEW_LINE); + code.append(indenter).append("public final Object exportElementReinterpreted(@NotNull final ") + .append(tupleClassName).append(" tuple, int elementIndex) {").append(NEW_LINE); indenter.increaseLevel(); code.append(indenter).append("if (elementIndex == 0) {").append(NEW_LINE); addReinterpretedExport(type1, indenter, code, "tuple.getFirstElement()"); @@ -798,8 +726,8 @@ private String generateThreeColumnTupleSource(@NotNull final String className, addReinterpretedExport(type3, indenter, code, "tuple.getThirdElement()"); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(indenter).append( - "throw new IllegalArgumentException(\"Bad elementIndex for 3 element tuple: \" + elementIndex);") - .append(NEW_LINE); + "throw new IllegalArgumentException(\"Bad elementIndex for 3 element tuple: \" + elementIndex);") + .append(NEW_LINE); indenter.decreaseLevel(); code.append(indenter).append('}').append(NEW_LINE); @@ -807,40 +735,33 @@ private String generateThreeColumnTupleSource(@NotNull final String className, code.append(indenter).append("@Override").append((NEW_LINE)); code.append(indenter).append( - "protected void convertChunks(@NotNull WritableChunk destination, int chunkSize, Chunk [] chunks) {") - .append(NEW_LINE); + "protected void convertChunks(@NotNull WritableChunk destination, int chunkSize, Chunk [] chunks) {") + .append(NEW_LINE); code.append(indenter.increaseLevel()).append("WritableObjectChunk<").append(tupleClassName) - .append( - ", ? super Attributes.Values> destinationObjectChunk = destination.asWritableObjectChunk();") - .append(NEW_LINE); - code.append(indenter).append(type1.getValuesChunkTypeString()) - .append(" chunk1 = chunks[0].as").append(type1.chunkType).append("Chunk();") - .append(NEW_LINE); - code.append(indenter).append(type2.getValuesChunkTypeString()) - .append(" chunk2 = chunks[1].as").append(type2.chunkType).append("Chunk();") - .append(NEW_LINE); - code.append(indenter).append(type3.getValuesChunkTypeString()) - .append(" chunk3 = chunks[2].as").append(type3.chunkType).append("Chunk();") - .append(NEW_LINE); + .append(", ? super Attributes.Values> destinationObjectChunk = destination.asWritableObjectChunk();") + .append(NEW_LINE); + code.append(indenter).append(type1.getValuesChunkTypeString()).append(" chunk1 = chunks[0].as") + .append(type1.chunkType).append("Chunk();").append(NEW_LINE); + code.append(indenter).append(type2.getValuesChunkTypeString()).append(" chunk2 = chunks[1].as") + .append(type2.chunkType).append("Chunk();").append(NEW_LINE); + code.append(indenter).append(type3.getValuesChunkTypeString()).append(" chunk3 = chunks[2].as") + .append(type3.chunkType).append("Chunk();").append(NEW_LINE); code.append(indenter).append("for (int ii = 0; ii < chunkSize; ++ii) {").append(NEW_LINE); - code.append(indenter.increaseLevel()).append("destinationObjectChunk.set(ii, new ") - .append(tupleClassName).append("(").append(type1.getFromChunkText("chunk1.get(ii)")) - .append(", ").append(type2.getFromChunkText("chunk2.get(ii)")).append(", ") - .append(type3.getFromChunkText("chunk3.get(ii)")).append("));").append(NEW_LINE); + code.append(indenter.increaseLevel()).append("destinationObjectChunk.set(ii, new ").append(tupleClassName) + .append("(").append(type1.getFromChunkText("chunk1.get(ii)")).append(", ") + .append(type2.getFromChunkText("chunk2.get(ii)")).append(", ") + .append(type3.getFromChunkText("chunk3.get(ii)")).append("));").append(NEW_LINE); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(indenter).append("destinationObjectChunk.setSize(chunkSize);").append(NEW_LINE); code.append(indenter.decreaseLevel()).append("}").append(NEW_LINE); code.append(NEW_LINE); - code.append(indenter).append("/** {@link ") - .append(ThreeColumnTupleSourceFactory.class.getSimpleName()) - .append("} for instances of {@link ").append(className).append("}. **/") - .append(NEW_LINE); + code.append(indenter).append("/** {@link ").append(ThreeColumnTupleSourceFactory.class.getSimpleName()) + .append("} for instances of {@link ").append(className).append("}. **/").append(NEW_LINE); code.append(indenter).append("private static final class Factory implements ") - .append(ThreeColumnTupleSourceFactory.class.getSimpleName()) - .append('<').append(tupleClassName).append(", ").append(sourceClass1Name).append(", ") - .append(sourceClass2Name).append(", ").append(sourceClass3Name).append("> {") - .append(NEW_LINE); + .append(ThreeColumnTupleSourceFactory.class.getSimpleName()) + .append('<').append(tupleClassName).append(", ").append(sourceClass1Name).append(", ") + .append(sourceClass2Name).append(", ").append(sourceClass3Name).append("> {").append(NEW_LINE); indenter.increaseLevel(); code.append(NEW_LINE); @@ -851,15 +772,14 @@ private String generateThreeColumnTupleSource(@NotNull final String className, code.append(NEW_LINE); code.append(indenter).append("@Override").append(NEW_LINE); - code.append(indenter).append("public TupleSource<").append(tupleClassName) - .append("> create(").append(NEW_LINE); + code.append(indenter).append("public TupleSource<").append(tupleClassName).append("> create(").append(NEW_LINE); indenter.increaseLevel(2); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass1Name) - .append("> ").append(CS1).append(',').append(NEW_LINE); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass2Name) - .append("> ").append(CS2).append(',').append(NEW_LINE); - code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass3Name) - .append("> ").append(CS3).append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass1Name).append("> ").append(CS1) + .append(',').append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass2Name).append("> ").append(CS2) + .append(',').append(NEW_LINE); + code.append(indenter).append("@NotNull final ColumnSource<").append(sourceClass3Name).append("> ").append(CS3) + .append(NEW_LINE); indenter.decreaseLevel(2); code.append(indenter).append(") {").append(NEW_LINE); indenter.increaseLevel(); @@ -882,8 +802,8 @@ private String generateThreeColumnTupleSource(@NotNull final String className, } private void writeClass(@NotNull final String className, @NotNull final String classBody) { - try (final PrintStream destination = new PrintStream( - new FileOutputStream(new File(OUTPUT_RELATIVE_PATH, className + ".java")))) { + try (final PrintStream destination = + new PrintStream(new FileOutputStream(new File(OUTPUT_RELATIVE_PATH, className + ".java")))) { destination.print(classBody); destination.flush(); } catch (FileNotFoundException e) { @@ -893,18 +813,17 @@ private void writeClass(@NotNull final String className, @NotNull final String c public static void main(@NotNull final String... args) { final TupleSourceCodeGenerator generator = new TupleSourceCodeGenerator(); - Arrays.stream(ColumnSourceType.values()) - .forEach(t1 -> Arrays.stream(ColumnSourceType.values()).forEach(t2 -> { - final String twoColumnTupleSourceName = generateSimpleClassName(t1, t2); - final String twoColumnTupleSourceBody = + Arrays.stream(ColumnSourceType.values()).forEach(t1 -> Arrays.stream(ColumnSourceType.values()).forEach(t2 -> { + final String twoColumnTupleSourceName = generateSimpleClassName(t1, t2); + final String twoColumnTupleSourceBody = generator.generateTwoColumnTupleSource(twoColumnTupleSourceName, t1, t2); - generator.writeClass(twoColumnTupleSourceName, twoColumnTupleSourceBody); - Arrays.stream(ColumnSourceType.values()).forEach(t3 -> { - final String threeColumnTupleSourceName = generateSimpleClassName(t1, t2, t3); - final String threeColumnTupleSourceBody = generator - .generateThreeColumnTupleSource(threeColumnTupleSourceName, t1, t2, t3); - generator.writeClass(threeColumnTupleSourceName, threeColumnTupleSourceBody); - }); - })); + generator.writeClass(twoColumnTupleSourceName, twoColumnTupleSourceBody); + Arrays.stream(ColumnSourceType.values()).forEach(t3 -> { + final String threeColumnTupleSourceName = generateSimpleClassName(t1, t2, t3); + final String threeColumnTupleSourceBody = + generator.generateThreeColumnTupleSource(threeColumnTupleSourceName, t1, t2, t3); + generator.writeClass(threeColumnTupleSourceName, threeColumnTupleSourceBody); + }); + })); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSourceFactory.java b/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSourceFactory.java index 7c0d9bb1af9..c337bd5d5ed 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSourceFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/TupleSourceFactory.java @@ -18,9 +18,9 @@ public class TupleSourceFactory { private static final Map EXPLICIT_CLASS_TO_COLUMN_SOURCE_TYPE = - Collections.unmodifiableMap(Arrays.stream(ColumnSourceType.values()) - .filter(et -> et != ColumnSourceType.OBJECT && !et.isReinterpreted()) - .collect(Collectors.toMap(ColumnSourceType::getElementClass, Function.identity()))); + Collections.unmodifiableMap(Arrays.stream(ColumnSourceType.values()) + .filter(et -> et != ColumnSourceType.OBJECT && !et.isReinterpreted()) + .collect(Collectors.toMap(ColumnSourceType::getElementClass, Function.identity()))); /** * Create a {@link TupleSource} for the supplied array of {@link ColumnSource}s. @@ -34,13 +34,12 @@ public static TupleSource makeTupleSource(@NotNull final ColumnSource... columnS return EmptyTupleSource.INSTANCE; } if (length == 1) { - // NB: Don't reinterpret here, or you may have a bad time with join states when the LHS - // and RHS columns are differently reinterpretable. + // NB: Don't reinterpret here, or you may have a bad time with join states when the LHS and RHS columns are + // differently reinterpretable. return columnSources[0]; } if (length < 4) { - // NB: The array copy that looks like a side effect here is in fact deliberate and - // desirable. + // NB: The array copy that looks like a side effect here is in fact deliberate and desirable. final ColumnSourceType types[] = new ColumnSourceType[length]; final ColumnSource internalSources[] = new ColumnSource[length]; for (int csi = 0; csi < length; ++csi) { @@ -53,45 +52,39 @@ public static TupleSource makeTupleSource(@NotNull final ColumnSource... columnS // noinspection unchecked factoryClass = (Class) Class.forName(factoryClassName); } catch (ClassNotFoundException e) { - throw new IllegalStateException( - "Could not find tuple factory class for name " + factoryClassName, e); + throw new IllegalStateException("Could not find tuple factory class for name " + factoryClassName, e); } final Constructor factoryConstructor; try { factoryConstructor = length == 2 - ? factoryClass.getConstructor(ColumnSource.class, ColumnSource.class) - : factoryClass.getConstructor(ColumnSource.class, ColumnSource.class, - ColumnSource.class); + ? factoryClass.getConstructor(ColumnSource.class, ColumnSource.class) + : factoryClass.getConstructor(ColumnSource.class, ColumnSource.class, ColumnSource.class); } catch (NoSuchMethodException e) { - throw new IllegalStateException( - "Could not find tuple factory constructor for name " + factoryClassName, e); + throw new IllegalStateException("Could not find tuple factory constructor for name " + factoryClassName, + e); } try { return factoryConstructor.newInstance((Object[]) internalSources); - } catch (InstantiationException | IllegalAccessException - | InvocationTargetException e) { + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new IllegalStateException("Could not construct " + factoryClassName, e); } } - // NB: Don't reinterpret here, or you may have a bad time with join states when the LHS and - // RHS columns are differently reinterpretable. - return new MultiColumnTupleSource( - Arrays.stream(columnSources).toArray(ColumnSource[]::new)); + // NB: Don't reinterpret here, or you may have a bad time with join states when the LHS and RHS columns are + // differently reinterpretable. + return new MultiColumnTupleSource(Arrays.stream(columnSources).toArray(ColumnSource[]::new)); } private static ColumnSourceType getColumnSourceType(@NotNull final ColumnSource columnSource) { - final ColumnSourceType candidate = EXPLICIT_CLASS_TO_COLUMN_SOURCE_TYPE - .getOrDefault(columnSource.getType(), ColumnSourceType.OBJECT); - if (candidate.getReinterpretAsType() != null - && columnSource.allowsReinterpret(candidate.getInternalClass())) { + final ColumnSourceType candidate = + EXPLICIT_CLASS_TO_COLUMN_SOURCE_TYPE.getOrDefault(columnSource.getType(), ColumnSourceType.OBJECT); + if (candidate.getReinterpretAsType() != null && columnSource.allowsReinterpret(candidate.getInternalClass())) { return candidate.getReinterpretAsType(); } return candidate; } private static ColumnSource maybeReinterpret(@NotNull final ColumnSourceType type, - @NotNull final ColumnSource columnSource) { - return type.isReinterpreted() ? columnSource.reinterpret(type.getElementClass()) - : columnSource; + @NotNull final ColumnSource columnSource) { + return type.isReinterpreted() ? columnSource.reinterpret(type.getElementClass()) : columnSource; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/tuples/TwoColumnTupleSourceFactory.java b/DB/src/main/java/io/deephaven/db/v2/tuples/TwoColumnTupleSourceFactory.java index 3dbf54f852c..945fcc6ff8a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/tuples/TwoColumnTupleSourceFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/tuples/TwoColumnTupleSourceFactory.java @@ -16,5 +16,5 @@ public interface TwoColumnTupleSourceFactory create(@NotNull ColumnSource columnSource1, - @NotNull ColumnSource columnSource2); + @NotNull ColumnSource columnSource2); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/AbstractIndexUpdateNotification.java b/DB/src/main/java/io/deephaven/db/v2/utils/AbstractIndexUpdateNotification.java index a98b69c502d..739d7de5bb2 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/AbstractIndexUpdateNotification.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/AbstractIndexUpdateNotification.java @@ -10,7 +10,7 @@ * Common base class for index update notifications. */ public abstract class AbstractIndexUpdateNotification extends AbstractNotification - implements NotificationQueue.IndexUpdateNotification { + implements NotificationQueue.IndexUpdateNotification { protected AbstractIndexUpdateNotification(final boolean isTerminal) { super(isTerminal); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/AbstractTreeIndexImplRandomBuilder.java b/DB/src/main/java/io/deephaven/db/v2/utils/AbstractTreeIndexImplRandomBuilder.java index 0c1724ad6ff..79e3efa682a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/AbstractTreeIndexImplRandomBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/AbstractTreeIndexImplRandomBuilder.java @@ -86,8 +86,7 @@ private void newKey(final long key) { private void newRange(final long firstKey, final long lastKey) { if (firstKey > lastKey) { if (Index.BAD_RANGES_AS_ERROR) { - throw new IllegalArgumentException( - "Illegal range start=" + firstKey + " > end=" + lastKey + "."); + throw new IllegalArgumentException("Illegal range start=" + firstKey + " > end=" + lastKey + "."); } // Ignore. return; diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/AppendOnlyArrayBackedMutableTable.java b/DB/src/main/java/io/deephaven/db/v2/utils/AppendOnlyArrayBackedMutableTable.java index 086c409d600..128c4997f86 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/AppendOnlyArrayBackedMutableTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/AppendOnlyArrayBackedMutableTable.java @@ -18,11 +18,9 @@ import java.util.function.Consumer; /** - * An in-memory table that allows you to add rows as if it were an InputTable, which can be updated - * on the LTM. + * An in-memory table that allows you to add rows as if it were an InputTable, which can be updated on the LTM. * - * The table is not keyed, all rows are added to the end of the table. Deletions and edits are not - * permitted. + * The table is not keyed, all rows are added to the end of the table. Deletions and edits are not permitted. */ public class AppendOnlyArrayBackedMutableTable extends BaseArrayBackedMutableTable { static final String DEFAULT_DESCRIPTION = "Append Only In-Memory Input Table"; @@ -47,9 +45,9 @@ public static AppendOnlyArrayBackedMutableTable make(@NotNull TableDefinition de * @return an empty AppendOnlyArrayBackedMutableTable with the given definition */ public static AppendOnlyArrayBackedMutableTable make(@NotNull TableDefinition definition, - final Map enumValues) { + final Map enumValues) { return make(new QueryTable(definition, Index.FACTORY.getEmptyIndex(), - NullValueColumnSource.createColumnSourceMap(definition)), enumValues); + NullValueColumnSource.createColumnSourceMap(definition)), enumValues); } /** @@ -72,9 +70,9 @@ public static AppendOnlyArrayBackedMutableTable make(final Table initialTable) { * @return an empty AppendOnlyArrayBackedMutableTable with the given definition */ public static AppendOnlyArrayBackedMutableTable make(final Table initialTable, - final Map enumValues) { + final Map enumValues) { final AppendOnlyArrayBackedMutableTable result = new AppendOnlyArrayBackedMutableTable( - initialTable.getDefinition(), enumValues, new ProcessPendingUpdater()); + initialTable.getDefinition(), enumValues, new ProcessPendingUpdater()); result.setAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE, Boolean.TRUE); result.setFlat(); processInitial(initialTable, result); @@ -82,16 +80,15 @@ public static AppendOnlyArrayBackedMutableTable make(final Table initialTable, } private AppendOnlyArrayBackedMutableTable(@NotNull TableDefinition definition, - final Map enumValues, final ProcessPendingUpdater processPendingUpdater) { - super(Index.FACTORY.getEmptyIndex(), makeColumnSourceMap(definition), enumValues, - processPendingUpdater); + final Map enumValues, final ProcessPendingUpdater processPendingUpdater) { + super(Index.FACTORY.getEmptyIndex(), makeColumnSourceMap(definition), enumValues, processPendingUpdater); inputTableDefinition.setKeys(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); inputTableDefinition.setValues(definition.getColumnNamesArray()); } @Override - protected void processPendingTable(Table table, boolean allowEdits, - IndexChangeRecorder indexChangeRecorder, Consumer errorNotifier) { + protected void processPendingTable(Table table, boolean allowEdits, IndexChangeRecorder indexChangeRecorder, + Consumer errorNotifier) { final Index addIndex = table.getIndex(); final long firstRow = nextRow; final long lastRow = firstRow + addIndex.intSize() - 1; @@ -103,17 +100,15 @@ protected void processPendingTable(Table table, boolean allowEdits, final int chunkCapacity = table.intSize(); getColumnSourceMap().forEach((name, cs) -> { - final ArrayBackedColumnSource arrayBackedColumnSource = - (ArrayBackedColumnSource) cs; + final ArrayBackedColumnSource arrayBackedColumnSource = (ArrayBackedColumnSource) cs; arrayBackedColumnSource.ensureCapacity(nextRow); final ColumnSource sourceColumnSource = table.getColumnSource(name); - try ( - final WritableChunkSink.FillFromContext ffc = + try (final WritableChunkSink.FillFromContext ffc = arrayBackedColumnSource.makeFillFromContext(chunkCapacity); - final ChunkSource.GetContext getContext = - sourceColumnSource.makeGetContext(chunkCapacity, sharedContext)) { + final ChunkSource.GetContext getContext = + sourceColumnSource.makeGetContext(chunkCapacity, sharedContext)) { final Chunk valuesChunk = - sourceColumnSource.getChunk(getContext, addIndex); + sourceColumnSource.getChunk(getContext, addIndex); arrayBackedColumnSource.fillFromChunk(ffc, valuesChunk, destinations); } }); @@ -146,14 +141,13 @@ public void delete(Table table, Index index) { } @Override - public void setRows(@NotNull Table defaultValues, int[] rowArray, - Map[] valueArray, InputTableStatusListener listener) { + public void setRows(@NotNull Table defaultValues, int[] rowArray, Map[] valueArray, + InputTableStatusListener listener) { throw new UnsupportedOperationException(); } @Override - public void addRows(Map[] valueArray, boolean allowEdits, - InputTableStatusListener listener) { + public void addRows(Map[] valueArray, boolean allowEdits, InputTableStatusListener listener) { if (allowEdits) { throw new UnsupportedOperationException(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/AsyncClientErrorNotifier.java b/DB/src/main/java/io/deephaven/db/v2/utils/AsyncClientErrorNotifier.java index 7ed91f26520..9ba4c839c30 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/AsyncClientErrorNotifier.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/AsyncClientErrorNotifier.java @@ -9,8 +9,8 @@ import java.io.IOException; /** - * When we get an error from a table in the listener tree, we want to send an appropriate command to - * the clients indicating that something has gone wrong with the table. + * When we get an error from a table in the listener tree, we want to send an appropriate command to the clients + * indicating that something has gone wrong with the table. */ public class AsyncClientErrorNotifier { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/AsyncErrorLogger.java b/DB/src/main/java/io/deephaven/db/v2/utils/AsyncErrorLogger.java index 8e223622c78..ad2dace6ad6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/AsyncErrorLogger.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/AsyncErrorLogger.java @@ -14,23 +14,19 @@ public class AsyncErrorLogger { private static final DynamicTableWriter tableWriter = new DynamicTableWriter( - new String[] {"Time", "EvaluationNumber", "OperationNumber", "Description", - "SourceQueryEvaluationNumber", "SourceQueryOperationNumber", - "SourceQueryDescription", "Cause", "WorkerName", "HostName"}, - new Class[] {DBDateTime.class, int.class, int.class, String.class, int.class, int.class, - String.class, Exception.class, String.class, String.class}); + new String[] {"Time", "EvaluationNumber", "OperationNumber", "Description", "SourceQueryEvaluationNumber", + "SourceQueryOperationNumber", "SourceQueryDescription", "Cause", "WorkerName", "HostName"}, + new Class[] {DBDateTime.class, int.class, int.class, String.class, int.class, int.class, String.class, + Exception.class, String.class, String.class}); private static final RowSetter timeSetter = tableWriter.getSetter("Time"); - private static final RowSetter evaluationNumberSetter = - tableWriter.getSetter("EvaluationNumber"); - private static final RowSetter operationNumberSetter = - tableWriter.getSetter("OperationNumber"); + private static final RowSetter evaluationNumberSetter = tableWriter.getSetter("EvaluationNumber"); + private static final RowSetter operationNumberSetter = tableWriter.getSetter("OperationNumber"); private static final RowSetter descriptionSetter = tableWriter.getSetter("Description"); private static final RowSetter failingEvaluationNumberSetter = - tableWriter.getSetter("SourceQueryEvaluationNumber"); + tableWriter.getSetter("SourceQueryEvaluationNumber"); private static final RowSetter failingOperationNumberSetter = - tableWriter.getSetter("SourceQueryOperationNumber"); - private static final RowSetter failingDescriptionSetter = - tableWriter.getSetter("SourceQueryDescription"); + tableWriter.getSetter("SourceQueryOperationNumber"); + private static final RowSetter failingDescriptionSetter = tableWriter.getSetter("SourceQueryDescription"); private static final RowSetter causeSetter = tableWriter.getSetter("Cause"); private static final RowSetter workerNameSetter = tableWriter.getSetter("WorkerName"); private static final RowSetter hostNameSetter = tableWriter.getSetter("HostName"); @@ -40,8 +36,7 @@ public static DynamicTable getErrorLog() { } public static void log(DBDateTime time, UpdatePerformanceTracker.Entry entry, - UpdatePerformanceTracker.Entry sourceEntry, Throwable originalException) - throws IOException { + UpdatePerformanceTracker.Entry sourceEntry, Throwable originalException) throws IOException { timeSetter.set(time); if (entry != null) { evaluationNumberSetter.set(entry.getEvaluationNumber()); @@ -53,8 +48,8 @@ public static void log(DBDateTime time, UpdatePerformanceTracker.Entry entry, failingOperationNumberSetter.setInt(sourceEntry.getOperationNumber()); failingDescriptionSetter.set(sourceEntry.getDescription()); } - // TODO (deephaven/deephaven-core/issues/159): Do we continue supporting this? If so, we - // should consider fixing host name and worker name. + // TODO (deephaven/deephaven-core/issues/159): Do we continue supporting this? If so, we should consider fixing + // host name and worker name. workerNameSetter.set(null); hostNameSetter.set(null); causeSetter.set(originalException); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/BarrageMessage.java b/DB/src/main/java/io/deephaven/db/v2/utils/BarrageMessage.java index e01f823b1de..f72b3476703 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/BarrageMessage.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/BarrageMessage.java @@ -54,10 +54,10 @@ public static class AddColumnData { // Ensure that we clean up only after all copies of the update are released. private volatile int refCount = 1; - // Field updater for refCount, so we can avoid creating an {@link - // java.util.concurrent.atomic.AtomicInteger} for each instance. + // Field updater for refCount, so we can avoid creating an {@link java.util.concurrent.atomic.AtomicInteger} for + // each instance. private static final AtomicIntegerFieldUpdater REFERENCE_COUNT_UPDATER = - AtomicIntegerFieldUpdater.newUpdater(BarrageMessage.class, "refCount"); + AtomicIntegerFieldUpdater.newUpdater(BarrageMessage.class, "refCount"); public BarrageMessage clone() { REFERENCE_COUNT_UPDATER.incrementAndGet(this); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/BaseArrayBackedMutableTable.java b/DB/src/main/java/io/deephaven/db/v2/utils/BaseArrayBackedMutableTable.java index f7fd50ff805..8cc78943e3c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/BaseArrayBackedMutableTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/BaseArrayBackedMutableTable.java @@ -28,8 +28,7 @@ abstract class BaseArrayBackedMutableTable extends UpdatableTable { private static final Object[] BOOLEAN_ENUM_ARRAY = new Object[] {true, false, null}; protected final InputTableDefinition inputTableDefinition; - private final List pendingChanges = - Collections.synchronizedList(new ArrayList<>()); + private final List pendingChanges = Collections.synchronizedList(new ArrayList<>()); private final AtomicLong nextSequence = new AtomicLong(0); private final AtomicLong processedSequence = new AtomicLong(0); private final Map enumValues; @@ -40,9 +39,8 @@ abstract class BaseArrayBackedMutableTable extends UpdatableTable { long nextRow = 0; private long pendingProcessed = NULL_NOTIFICATION_STEP; - public BaseArrayBackedMutableTable(Index index, - Map nameToColumnSource, Map enumValues, - ProcessPendingUpdater processPendingUpdater) { + public BaseArrayBackedMutableTable(Index index, Map nameToColumnSource, + Map enumValues, ProcessPendingUpdater processPendingUpdater) { super(index, nameToColumnSource, processPendingUpdater); this.enumValues = enumValues; this.inputTableDefinition = new InputTableDefinition(); @@ -52,12 +50,11 @@ public BaseArrayBackedMutableTable(Index index, processPendingUpdater.setThis(this); } - protected static Map> makeColumnSourceMap( - TableDefinition definition) { + protected static Map> makeColumnSourceMap(TableDefinition definition) { final Map> resultMap = new LinkedHashMap<>(); for (final ColumnDefinition columnDefinition : definition.getColumns()) { resultMap.put(columnDefinition.getName(), - ArrayBackedColumnSource.getMemoryColumnSource(0, columnDefinition.getDataType())); + ArrayBackedColumnSource.getMemoryColumnSource(0, columnDefinition.getDataType())); } return resultMap; } @@ -94,14 +91,13 @@ public BaseArrayBackedMutableTable setDescription(String newDescription) { /** * For unit test use only. Specify the function to invoke after enqueuing a pending change. * - * @param onPendingChange The function to invoke after enqueuing a pending change, or null to - * restore the default behavior + * @param onPendingChange The function to invoke after enqueuing a pending change, or null to restore the default + * behavior */ @TestUseOnly void setOnPendingChange(final Runnable onPendingChange) { this.onPendingChange = - onPendingChange == null ? () -> LiveTableMonitor.DEFAULT.requestRefresh(this) - : onPendingChange; + onPendingChange == null ? () -> LiveTableMonitor.DEFAULT.requestRefresh(this) : onPendingChange; } private void processPending(IndexChangeRecorder indexChangeRecorder) { @@ -110,8 +106,8 @@ private void processPending(IndexChangeRecorder indexChangeRecorder) { if (pendingChange.delete) { processPendingDelete(pendingChange.table, indexChangeRecorder); } else { - processPendingTable(pendingChange.table, pendingChange.allowEdits, - indexChangeRecorder, (e) -> pendingChange.error = e); + processPendingTable(pendingChange.table, pendingChange.allowEdits, indexChangeRecorder, + (e) -> pendingChange.error = e); } pendingProcessed = pendingChange.sequence; } @@ -130,10 +126,9 @@ public void refresh() { } protected abstract void processPendingTable(Table table, boolean allowEdits, - IndexChangeRecorder indexChangeRecorder, Consumer errorNotifier); + IndexChangeRecorder indexChangeRecorder, Consumer errorNotifier); - protected abstract void processPendingDelete(Table table, - IndexChangeRecorder indexChangeRecorder); + protected abstract void processPendingDelete(Table table, IndexChangeRecorder indexChangeRecorder); protected abstract String getDefaultDescription(); @@ -196,26 +191,23 @@ public void add(Table newData) { private void add(Table newData, boolean allowEdits, InputTableStatusListener listener) { final PendingChange pendingChange = enqueueAddition(newData, allowEdits); - CompletableFuture.runAsync(() -> waitForSequence(pendingChange.sequence)) - .thenAccept((v) -> { - if (pendingChange.error == null) { - listener.onSuccess(); - } else { - listener.onError(new IllegalArgumentException(pendingChange.error)); - } - }).exceptionally(ex -> { - listener.onError(ex); - return null; - }); + CompletableFuture.runAsync(() -> waitForSequence(pendingChange.sequence)).thenAccept((v) -> { + if (pendingChange.error == null) { + listener.onSuccess(); + } else { + listener.onError(new IllegalArgumentException(pendingChange.error)); + } + }).exceptionally(ex -> { + listener.onError(ex); + return null; + }); } private PendingChange enqueueAddition(Table newData, boolean allowEdits) { validateDefinition(newData.getDefinition()); - // we want to get a clean copy of the table; that can not change out from under us or - // result in long reads + // we want to get a clean copy of the table; that can not change out from under us or result in long reads // during our LTM refresh - final PendingChange pendingChange = - new PendingChange(doSnap(newData), false, allowEdits); + final PendingChange pendingChange = new PendingChange(doSnap(newData), false, allowEdits); pendingChanges.add(pendingChange); onPendingChange.run(); return pendingChange; @@ -238,8 +230,7 @@ private Table doSnap(Table newData) { @Override public void delete(Table table, Index index) { validateDelete(table.getDefinition()); - final PendingChange pendingChange = - new PendingChange(doSnap(table, index), true, false); + final PendingChange pendingChange = new PendingChange(doSnap(table, index), true, false); pendingChanges.add(pendingChange); onPendingChange.run(); waitForSequence(pendingChange.sequence); @@ -252,8 +243,7 @@ public String getDescription() { void waitForSequence(long sequence) { if (LiveTableMonitor.DEFAULT.exclusiveLock().isHeldByCurrentThread()) { - // We're holding the lock. currentTable had better be a DynamicTable. Wait on its - // LTM condition + // We're holding the lock. currentTable had better be a DynamicTable. Wait on its LTM condition // in order to allow updates. while (processedSequence.longValue() < sequence) { try { @@ -275,33 +265,31 @@ void waitForSequence(long sequence) { } @Override - public void setRows(@NotNull Table defaultValues, int[] rowArray, - Map[] valueArray, InputTableStatusListener listener) { + public void setRows(@NotNull Table defaultValues, int[] rowArray, Map[] valueArray, + InputTableStatusListener listener) { Assert.neqNull(defaultValues, "defaultValues"); if (defaultValues.isLive()) { LiveTableMonitor.DEFAULT.checkInitiateTableOperation(); } final List columnDefinitions = getTableDefinition().getColumnList(); - final Map sources = - buildSourcesMap(valueArray.length, columnDefinitions); - final String[] kabmtColumns = getTableDefinition().getColumnNames() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - final ArrayBackedColumnSource[] sourcesByPosition = Arrays.stream(kabmtColumns) - .map(sources::get).toArray(ArrayBackedColumnSource[]::new); + final Map sources = buildSourcesMap(valueArray.length, columnDefinitions); + final String[] kabmtColumns = + getTableDefinition().getColumnNames().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final ArrayBackedColumnSource[] sourcesByPosition = + Arrays.stream(kabmtColumns).map(sources::get).toArray(ArrayBackedColumnSource[]::new); final Set missingColumns = new HashSet<>(getTableDefinition().getColumnNames()); - for (final Map.Entry entry : defaultValues - .getColumnSourceMap().entrySet()) { + for (final Map.Entry entry : defaultValues.getColumnSourceMap() + .entrySet()) { final String colName = entry.getKey(); if (!sources.containsKey(colName)) { continue; } - final ColumnSource cs = - Require.neqNull(entry.getValue(), "defaultValue column source: " + colName); + final ColumnSource cs = Require.neqNull(entry.getValue(), "defaultValue column source: " + colName); final ArrayBackedColumnSource dest = - Require.neqNull(sources.get(colName), "destination column source: " + colName); + Require.neqNull(sources.get(colName), "destination column source: " + colName); final Index defaultValuesIndex = defaultValues.getIndex(); for (int rr = 0; rr < rowArray.length; ++rr) { @@ -320,47 +308,43 @@ public void setRows(@NotNull Table defaultValues, int[] rowArray, if (passedInValues.containsKey(colName)) { sourcesByPosition[cc].set(ii, passedInValues.get(colName)); } else if (missingColumns.contains(colName)) { - throw new IllegalArgumentException( - "No value specified for " + colName + " row " + ii); + throw new IllegalArgumentException("No value specified for " + colName + " row " + ii); } } } - final QueryTable newData = new QueryTable(getTableDefinition(), - Index.FACTORY.getFlatIndex(valueArray.length), sources); + final QueryTable newData = + new QueryTable(getTableDefinition(), Index.FACTORY.getFlatIndex(valueArray.length), sources); add(newData, true, listener); } @Override - public void addRows(Map[] valueArray, boolean allowEdits, - InputTableStatusListener listener) { + public void addRows(Map[] valueArray, boolean allowEdits, InputTableStatusListener listener) { final List columnDefinitions = getTableDefinition().getColumnList(); - final Map sources = - buildSourcesMap(valueArray.length, columnDefinitions); + final Map sources = buildSourcesMap(valueArray.length, columnDefinitions); for (int rowNumber = 0; rowNumber < valueArray.length; rowNumber++) { final Map values = valueArray[rowNumber]; for (final ColumnDefinition columnDefinition : columnDefinitions) { // noinspection unchecked - sources.get(columnDefinition.getName()).set(rowNumber, - values.get(columnDefinition.getName())); + sources.get(columnDefinition.getName()).set(rowNumber, values.get(columnDefinition.getName())); } } - final QueryTable newData = new QueryTable(getTableDefinition(), - Index.FACTORY.getFlatIndex(valueArray.length), sources); + final QueryTable newData = + new QueryTable(getTableDefinition(), Index.FACTORY.getFlatIndex(valueArray.length), sources); add(newData, allowEdits, listener); } @NotNull private Map buildSourcesMap(int capacity, - List columnDefinitions) { + List columnDefinitions) { final Map sources = new LinkedHashMap<>(); for (final ColumnDefinition columnDefinition : columnDefinitions) { - final ArrayBackedColumnSource memoryColumnSource = ArrayBackedColumnSource - .getMemoryColumnSource(capacity, columnDefinition.getDataType()); + final ArrayBackedColumnSource memoryColumnSource = + ArrayBackedColumnSource.getMemoryColumnSource(capacity, columnDefinition.getDataType()); memoryColumnSource.ensureCapacity(capacity); sources.put(columnDefinition.getName(), memoryColumnSource); } @@ -382,8 +366,7 @@ public Table getTable() { @Override public boolean canEdit() { - // TODO: Should we be more restrictive, or provide a mechanism for determining which - // users can edit this + // TODO: Should we be more restrictive, or provide a mechanism for determining which users can edit this // table beyond "they have a handle to it"? return true; } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/ChunkUtils.java b/DB/src/main/java/io/deephaven/db/v2/utils/ChunkUtils.java index 7e4dae6cca2..9afbfa0cc76 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/ChunkUtils.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/ChunkUtils.java @@ -23,27 +23,24 @@ public class ChunkUtils { private static final int COPY_DATA_CHUNK_SIZE = 16384; /** - * Generates a {@link LongChunk} from {@link LongChunk} - * chunk. + * Generates a {@link LongChunk} from {@link LongChunk} chunk. * * @param chunk the chunk to convert * @return the generated chunk */ public static WritableLongChunk convertToOrderedKeyRanges( - final LongChunk chunk) { + final LongChunk chunk) { return convertToOrderedKeyRanges(chunk, Chunk.MAXIMUM_SIZE); } @VisibleForTesting - static WritableLongChunk convertToOrderedKeyRanges( - final LongChunk chunk, - final long maxChunkSize) { + static WritableLongChunk convertToOrderedKeyRanges(final LongChunk chunk, + final long maxChunkSize) { if (chunk.size() == 0) { return WritableLongChunk.makeWritableChunk(0); } - // First we'll count the number of ranges so that we can allocate the exact amount of space - // needed. + // First we'll count the number of ranges so that we can allocate the exact amount of space needed. long numRanges = 1; for (int idx = 1; idx < chunk.size(); ++idx) { if (chunk.get(idx - 1) + 1 != chunk.get(idx)) { @@ -53,12 +50,10 @@ static WritableLongChunk convertToOrderedKeyRanges( final long newSize = numRanges * 2L; if (newSize > maxChunkSize) { - throw new SizeException("Cannot expand KeyIndices Chunk into KeyRanges Chunk.", newSize, - maxChunkSize); + throw new SizeException("Cannot expand KeyIndices Chunk into KeyRanges Chunk.", newSize, maxChunkSize); } - final WritableLongChunk newChunk = - WritableLongChunk.makeWritableChunk((int) newSize); + final WritableLongChunk newChunk = WritableLongChunk.makeWritableChunk((int) newSize); convertToOrderedKeyRanges(chunk, newChunk); @@ -66,14 +61,13 @@ static WritableLongChunk convertToOrderedKeyRanges( } /** - * Fills {@code OrderedKeyRanges} into {@code dest} from the provided {@code chunk} and - * specified source range. + * Fills {@code OrderedKeyRanges} into {@code dest} from the provided {@code chunk} and specified source range. * * @param chunk the chunk to convert * @param dest the chunk to fill with ranges */ public static void convertToOrderedKeyRanges(final LongChunk chunk, - final WritableLongChunk dest) { + final WritableLongChunk dest) { int destOffset = 0; if (chunk.size() == 0) { dest.setSize(destOffset); @@ -95,27 +89,24 @@ public static void convertToOrderedKeyRanges(final LongChunk } /** - * Generates a {@link LongChunk} from {@link LongChunk} - * chunk. + * Generates a {@link LongChunk} from {@link LongChunk} chunk. * * @param chunk the chunk to convert * @return the generated chunk */ - public static LongChunk convertToOrderedKeyIndices( - final LongChunk chunk) { + public static LongChunk convertToOrderedKeyIndices(final LongChunk chunk) { return convertToOrderedKeyIndices(0, chunk); } /** - * Generates a {@link LongChunk} from {@link LongChunk} - * chunk. + * Generates a {@link LongChunk} from {@link LongChunk} chunk. * * @param srcOffset the offset into {@code chunk} to begin including in the generated chunk * @param chunk the chunk to convert * @return the generated chunk */ public static LongChunk convertToOrderedKeyIndices(int srcOffset, - final LongChunk chunk) { + final LongChunk chunk) { srcOffset += srcOffset % 2; // ensure that we are using the correct range edges long numElements = 0; @@ -123,38 +114,32 @@ public static LongChunk convertToOrderedKeyIndices(int srcOff numElements += chunk.get(idx + 1) - chunk.get(idx) + 1; } - // Note that maximum range is [0, Long.MAX_VALUE] and all ranges are non-overlapping. - // Therefore we will never + // Note that maximum range is [0, Long.MAX_VALUE] and all ranges are non-overlapping. Therefore we will never // overflow past Long.MIN_VALUE. if (numElements < 0 || numElements > Chunk.MAXIMUM_SIZE) { - throw new SizeException( - "Cannot expand OrderedKeyRanges Chunk into OrderedKeyIndices Chunk.", numElements, - Chunk.MAXIMUM_SIZE); + throw new SizeException("Cannot expand OrderedKeyRanges Chunk into OrderedKeyIndices Chunk.", numElements, + Chunk.MAXIMUM_SIZE); } - final WritableLongChunk newChunk = - WritableLongChunk.makeWritableChunk((int) numElements); + final WritableLongChunk newChunk = WritableLongChunk.makeWritableChunk((int) numElements); convertToOrderedKeyIndices(srcOffset, chunk, newChunk, 0); return newChunk; } /** - * Generates a {@link LongChunk} from {@link LongChunk} - * chunk. + * Generates a {@link LongChunk} from {@link LongChunk} chunk. * * @param srcOffset the offset into {@code chunk} to begin including in the generated chunk * @param chunk the chunk to convert * @param dest the chunk to fill with indices */ - public static void convertToOrderedKeyIndices(int srcOffset, - final LongChunk chunk, - final WritableLongChunk dest, int destOffset) { + public static void convertToOrderedKeyIndices(int srcOffset, final LongChunk chunk, + final WritableLongChunk dest, int destOffset) { srcOffset += srcOffset & 1; // ensure that we are using the correct range edges for (int idx = srcOffset; idx + 1 < chunk.size() && destOffset < dest.size(); idx += 2) { final long start = chunk.get(idx); - final long range = chunk.get(idx + 1) - start + 1; // note that due to checks above, - // range cannot overflow + final long range = chunk.get(idx + 1) - start + 1; // note that due to checks above, range cannot overflow for (long jdx = 0; jdx < range && destOffset < dest.size(); ++jdx) { dest.set(destOffset++, start + jdx); } @@ -166,8 +151,8 @@ public static void convertToOrderedKeyIndices(int srcOffset, /** * Produce a pretty key for error messages from an element within parallel chunks. */ - public static String extractKeyStringFromChunks(ChunkType[] keyChunkTypes, - Chunk[] chunks, int chunkPosition) { + public static String extractKeyStringFromChunks(ChunkType[] keyChunkTypes, Chunk[] chunks, + int chunkPosition) { final StringBuilder builder = new StringBuilder(); if (chunks.length != 1) { builder.append("["); @@ -188,8 +173,8 @@ public static String extractKeyStringFromChunks(ChunkType[] keyChunkTypes, /** * Produce a pretty key for error messages from an element within parallel chunks. */ - public static String extractKeyStringFromChunk(ChunkType keyChunkType, - Chunk chunk, int chunkPosition) { + public static String extractKeyStringFromChunk(ChunkType keyChunkType, Chunk chunk, + int chunkPosition) { final StringBuilder builder = new StringBuilder(); extractStringOne(chunkPosition, builder, keyChunkType, chunk); return builder.toString(); @@ -198,13 +183,12 @@ public static String extractKeyStringFromChunk(ChunkType keyChunkType, /** * Produce a pretty key for error messages from an element within parallel chunks. */ - public static String extractKeyStringFromChunk(Chunk chunk, - int chunkPosition) { + public static String extractKeyStringFromChunk(Chunk chunk, int chunkPosition) { return extractKeyStringFromChunk(chunk.getChunkType(), chunk, chunkPosition); } - private static void extractStringOne(int chunkPosition, StringBuilder builder, - ChunkType keyChunkType, Chunk chunk) { + private static void extractStringOne(int chunkPosition, StringBuilder builder, ChunkType keyChunkType, + Chunk chunk) { switch (keyChunkType) { case Boolean: builder.append(chunk.asBooleanChunk().get(chunkPosition)); @@ -275,26 +259,25 @@ private static void extractStringOne(int chunkPosition, StringBuilder builder, public static void checkSliceArgs(int size, int offset, int capacity) { if (offset < 0 || offset > size || capacity < 0 || capacity > size - offset) { throw new IllegalArgumentException( - String.format("New slice offset %d, capacity %d is incompatible with size %d", - offset, capacity, size)); + String.format("New slice offset %d, capacity %d is incompatible with size %d", + offset, capacity, size)); } } public static void checkArrayArgs(int arrayLength, int offset, int capacity) { if (offset < 0 || capacity < 0 || offset + capacity > arrayLength) { throw new IllegalArgumentException( - String.format("offset %d, capacity %d is incompatible with array of length %d", - offset, capacity, arrayLength)); + String.format("offset %d, capacity %d is incompatible with array of length %d", + offset, capacity, arrayLength)); } } /** - * Determines, when copying data from the source array to the dest array, whether the copying - * should proceed in the forward or reverse direction in order to be safe. The issue is that - * (like memmove), care needs to be taken when the arrays are the same and the ranges overlap. - * In some cases (like when the arrays are different), either direction will do; in those cases - * we will recommend copying in the forward direction for performance reasons. When srcArray and - * destArray refer to the array, one of these five cases applies: + * Determines, when copying data from the source array to the dest array, whether the copying should proceed in the + * forward or reverse direction in order to be safe. The issue is that (like memmove), care needs to be taken when + * the arrays are the same and the ranges overlap. In some cases (like when the arrays are different), either + * direction will do; in those cases we will recommend copying in the forward direction for performance reasons. + * When srcArray and destArray refer to the array, one of these five cases applies: * *

    * @@ -327,14 +310,14 @@ public static void checkArrayArgs(int arrayLength, int offset, int capacity) { * @param destArray The destination array * @param destOffset The starting offset in the destination array * @param length The number of elements that will be copied - * @return true if the copy should proceed in the forward direction; false if it should proceed - * in the reverse direction + * @return true if the copy should proceed in the forward direction; false if it should proceed in the reverse + * direction */ public static boolean canCopyForward(TARRAY srcArray, int srcOffset, TARRAY destArray, - int destOffset, int length) { + int destOffset, int length) { return srcArray != destArray || // arrays different - srcOffset + length <= destOffset || // case 1 - srcOffset >= destOffset; // cases 3, 4, 5 + srcOffset + length <= destOffset || // case 1 + srcOffset >= destOffset; // cases 3, 4, 5 } public static String dumpChunk(Chunk chunk) { @@ -375,7 +358,7 @@ public static String dumpChunk(CharChunk chunk) { final char charValue = chunk.get(ii); // noinspection UnnecessaryBoxing builder.append(" '").append(charValue).append("' ") - .append(String.format("%6d", Integer.valueOf(charValue))); + .append(String.format("%6d", Integer.valueOf(charValue))); } return builder.append("\n").toString(); } @@ -577,15 +560,14 @@ public static boolean contains(ObjectChunk chunk, Object value * @param destAllKeys The destination keys. It is ok for srcAllKeys == destAllKeys. * @param usePrev Should we read previous values from src */ - public static void copyData(ChunkSource.WithPrev src, - OrderedKeys srcAllKeys, WritableSource dest, - OrderedKeys destAllKeys, boolean usePrev) { + public static void copyData(ChunkSource.WithPrev src, OrderedKeys srcAllKeys, + WritableSource dest, + OrderedKeys destAllKeys, boolean usePrev) { if (src == dest) { throw new UnsupportedOperationException("This method isn't safe when src == dest"); } if (srcAllKeys.size() != destAllKeys.size()) { - final String msg = - String.format("Expected srcAllKeys.size() == destAllKeys.size(), but got %d and %d", + final String msg = String.format("Expected srcAllKeys.size() == destAllKeys.size(), but got %d and %d", srcAllKeys.size(), destAllKeys.size()); throw new IllegalArgumentException(msg); } @@ -595,19 +577,17 @@ public static void copyData(ChunkSource.WithPrev sr } dest.ensureCapacity(destAllKeys.lastKey() + 1); try (final ChunkSource.GetContext srcContext = src.makeGetContext(minSize); - final WritableChunkSink.FillFromContext destContext = dest.makeFillFromContext(minSize); - final OrderedKeys.Iterator srcIter = srcAllKeys.getOrderedKeysIterator(); - final OrderedKeys.Iterator destIter = destAllKeys.getOrderedKeysIterator()) { + final WritableChunkSink.FillFromContext destContext = dest.makeFillFromContext(minSize); + final OrderedKeys.Iterator srcIter = srcAllKeys.getOrderedKeysIterator(); + final OrderedKeys.Iterator destIter = destAllKeys.getOrderedKeysIterator()) { while (srcIter.hasMore()) { Assert.assertion(destIter.hasMore(), "destIter.hasMore()"); final OrderedKeys srcNextKeys = srcIter.getNextOrderedKeysWithLength(minSize); final OrderedKeys destNextKeys = destIter.getNextOrderedKeysWithLength(minSize); - Assert.eq(srcNextKeys.size(), "srcNextKeys.size()", destNextKeys.size(), - "destNextKeys.size()"); + Assert.eq(srcNextKeys.size(), "srcNextKeys.size()", destNextKeys.size(), "destNextKeys.size()"); final Chunk chunk = - usePrev ? src.getPrevChunk(srcContext, srcNextKeys) - : src.getChunk(srcContext, srcNextKeys); + usePrev ? src.getPrevChunk(srcContext, srcNextKeys) : src.getChunk(srcContext, srcNextKeys); dest.fillFromChunk(destContext, chunk, destNextKeys); } } @@ -624,12 +604,11 @@ public static void copyData(ChunkSource.WithPrev sr * @param destAllKeys The destination keys. It is ok for srcAllKeys == destAllKeys. * @param usePrev Should we read previous values from src */ - public static void copyData(ChunkSource.WithPrev[] sources, - OrderedKeys srcAllKeys, WritableSource[] destinations, - OrderedKeys destAllKeys, boolean usePrev) { + public static void copyData(ChunkSource.WithPrev[] sources, OrderedKeys srcAllKeys, + WritableSource[] destinations, + OrderedKeys destAllKeys, boolean usePrev) { if (srcAllKeys.size() != destAllKeys.size()) { - final String msg = - String.format("Expected srcAllKeys.size() == destAllKeys.size(), but got %d and %d", + final String msg = String.format("Expected srcAllKeys.size() == destAllKeys.size(), but got %d and %d", srcAllKeys.size(), destAllKeys.size()); throw new IllegalArgumentException(msg); } @@ -639,21 +618,19 @@ public static void copyData(ChunkSource.WithPrev[] } if (sources.length != destinations.length) { throw new IllegalArgumentException( - "Expected sources and destinations to be parallel arrays: sources length=" - + sources.length + ", destinations length=" + destinations.length); + "Expected sources and destinations to be parallel arrays: sources length=" + sources.length + + ", destinations length=" + destinations.length); } final ChunkSource.GetContext[] sourceContexts = new ChunkSource.GetContext[sources.length]; - final WritableChunkSink.FillFromContext[] destContexts = - new WritableChunkSink.FillFromContext[sources.length]; + final WritableChunkSink.FillFromContext[] destContexts = new WritableChunkSink.FillFromContext[sources.length]; try (final SharedContext sharedContext = SharedContext.makeSharedContext(); - final OrderedKeys.Iterator srcIter = srcAllKeys.getOrderedKeysIterator(); - final OrderedKeys.Iterator destIter = destAllKeys.getOrderedKeysIterator(); - final SafeCloseableArray ignored = - new SafeCloseableArray<>(sourceContexts); - final SafeCloseableArray ignored2 = - new SafeCloseableArray<>(destContexts)) { + final OrderedKeys.Iterator srcIter = srcAllKeys.getOrderedKeysIterator(); + final OrderedKeys.Iterator destIter = destAllKeys.getOrderedKeysIterator(); + final SafeCloseableArray ignored = new SafeCloseableArray<>(sourceContexts); + final SafeCloseableArray ignored2 = + new SafeCloseableArray<>(destContexts)) { for (int ss = 0; ss < sources.length; ++ss) { for (int dd = 0; dd < destinations.length; ++dd) { @@ -670,34 +647,30 @@ public static void copyData(ChunkSource.WithPrev[] Assert.assertion(destIter.hasMore(), "destIter.hasMore()"); final OrderedKeys srcNextKeys = srcIter.getNextOrderedKeysWithLength(minSize); final OrderedKeys destNextKeys = destIter.getNextOrderedKeysWithLength(minSize); - Assert.eq(srcNextKeys.size(), "srcNextKeys.size()", destNextKeys.size(), - "destNextKeys.size()"); + Assert.eq(srcNextKeys.size(), "srcNextKeys.size()", destNextKeys.size(), "destNextKeys.size()"); sharedContext.reset(); for (int cc = 0; cc < sources.length; ++cc) { final Chunk chunk = - usePrev ? sources[cc].getPrevChunk(sourceContexts[cc], srcNextKeys) - : sources[cc].getChunk(sourceContexts[cc], srcNextKeys); + usePrev ? sources[cc].getPrevChunk(sourceContexts[cc], srcNextKeys) + : sources[cc].getChunk(sourceContexts[cc], srcNextKeys); destinations[cc].fillFromChunk(destContexts[cc], chunk, destNextKeys); } } } } - public static void fillWithNullValue(WritableChunkSink dest, - OrderedKeys allKeys) { + public static void fillWithNullValue(WritableChunkSink dest, OrderedKeys allKeys) { final int minSize = Math.min(allKeys.intSize(), COPY_DATA_CHUNK_SIZE); if (minSize == 0) { return; } - try ( - final WritableChunkSink.FillFromContext destContext = dest.makeFillFromContext(minSize); - final WritableChunk chunk = dest.getChunkType().makeWritableChunk(minSize); - final OrderedKeys.Iterator iter = allKeys.getOrderedKeysIterator()) { + try (final WritableChunkSink.FillFromContext destContext = dest.makeFillFromContext(minSize); + final WritableChunk chunk = dest.getChunkType().makeWritableChunk(minSize); + final OrderedKeys.Iterator iter = allKeys.getOrderedKeysIterator()) { chunk.fillWithNullValue(0, minSize); while (iter.hasMore()) { - try (final OrderedKeys nextKeys = - iter.getNextOrderedKeysWithLength(COPY_DATA_CHUNK_SIZE)) { + try (final OrderedKeys nextKeys = iter.getNextOrderedKeysWithLength(COPY_DATA_CHUNK_SIZE)) { dest.fillFromChunk(destContext, chunk, nextKeys); } } @@ -711,8 +684,7 @@ public static void fillWithNullValue(WritableChunk * * @return a chunk of integers from 0 to chunkSize - 1 */ - public static WritableIntChunk makeInOrderIntChunk( - int chunkSize) { + public static WritableIntChunk makeInOrderIntChunk(int chunkSize) { final WritableIntChunk inOrderChunk = WritableIntChunk.makeWritableChunk(chunkSize); fillInOrder(inOrderChunk); return inOrderChunk; diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/ColumnHolder.java b/DB/src/main/java/io/deephaven/db/v2/utils/ColumnHolder.java index 1a2a44cffaf..b21723f78e7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/ColumnHolder.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/ColumnHolder.java @@ -26,13 +26,12 @@ public class ColumnHolder { * * @param name column name * @param dataType column data type - * @param componentType column component type (for array or - * {@link io.deephaven.db.tables.dbarrays.DbArray>} data types) + * @param componentType column component type (for array or {@link io.deephaven.db.tables.dbarrays.DbArray>} data + * types) * @param data column data */ @SuppressWarnings("unchecked") - public ColumnHolder(String name, Class dataType, Class componentType, boolean grouped, - T... data) { + public ColumnHolder(String name, Class dataType, Class componentType, boolean grouped, T... data) { this(name, grouped, dataType, componentType, data); } @@ -114,26 +113,25 @@ public ColumnHolder(String name, boolean grouped, float... data) { } /** - * Construct a new set of column data with a specified type. This overload allows the creation - * of a ColumnHolder where the official data type type does not match the data. + * Construct a new set of column data with a specified type. This overload allows the creation of a ColumnHolder + * where the official data type type does not match the data. * * @param name column name * @param grouped true if the column is grouped; false otherwise * @param dataType column data type - * @param componentType column component type (for array or - * {@link io.deephaven.db.tables.dbarrays.DbArray>} data types) + * @param componentType column component type (for array or {@link io.deephaven.db.tables.dbarrays.DbArray>} data + * types) * @param data column data */ - private ColumnHolder(String name, boolean grouped, Class dataType, Class componentType, - Object data) { + private ColumnHolder(String name, boolean grouped, Class dataType, Class componentType, Object data) { if (!data.getClass().isArray()) { throw new IllegalArgumentException("Data must be provided as an array"); } if (!data.getClass().getComponentType().isAssignableFrom(dataType) - && !(dataType == DBDateTime.class && data.getClass().getComponentType() == long.class) - && !(dataType == Boolean.class && data.getClass().getComponentType() == byte.class)) { - throw new IllegalArgumentException("Incompatible data type: " + dataType - + " can not be stored in array of type " + data.getClass()); + && !(dataType == DBDateTime.class && data.getClass().getComponentType() == long.class) + && !(dataType == Boolean.class && data.getClass().getComponentType() == byte.class)) { + throw new IllegalArgumentException( + "Incompatible data type: " + dataType + " can not be stored in array of type " + data.getClass()); } this.name = NameValidator.validateColumnName(name); // noinspection unchecked @@ -144,33 +142,29 @@ private ColumnHolder(String name, boolean grouped, Class dataType, Class c } /** - * Create a column holder for a DateTime column where the values are represented as longs. - * Whatever process produces a table from this column holder should respect this and create the - * appropriate type of ColumnSource. Under normal conditions, this will be a DateTimeArraySource - * (see {@link #getColumnSource()}). + * Create a column holder for a DateTime column where the values are represented as longs. Whatever process produces + * a table from this column holder should respect this and create the appropriate type of ColumnSource. Under normal + * conditions, this will be a DateTimeArraySource (see {@link #getColumnSource()}). * * @param name column name * @param grouped true if the column is grouped; false otherwise * @param data column data (long integers representing nanos since the epoch) * @return a DBDateTime column holder implemented with longs for storage */ - public static ColumnHolder getDateTimeColumnHolder(String name, boolean grouped, - long... data) { + public static ColumnHolder getDateTimeColumnHolder(String name, boolean grouped, long... data) { return new ColumnHolder<>(name, grouped, DBDateTime.class, null, data); } /** - * Create a column holder for a Boolean column where the calues are represented as bytes. The - * given byte array will be converted to a Boolean array. + * Create a column holder for a Boolean column where the calues are represented as bytes. The given byte array will + * be converted to a Boolean array. * * @param name column name * @param grouped true if the column is grouped; false otherwise - * @param data column data (byte values where 1 represents true, 0 represents false, and null - * otherwise) + * @param data column data (byte values where 1 represents true, 0 represents false, and null otherwise) * @return a Boolean column holder */ - public static ColumnHolder getBooleanColumnHolder(String name, boolean grouped, - byte... data) { + public static ColumnHolder getBooleanColumnHolder(String name, boolean grouped, byte... data) { final Boolean[] dbData = new Boolean[data.length]; for (int i = 0; i < data.length; i++) { if (data[i] == (byte) 0) { @@ -185,8 +179,7 @@ public static ColumnHolder getBooleanColumnHolder(String name, boolean } /** - * Create a column holder from an array object, inferring the data type from the given array - * object. + * Create a column holder from an array object, inferring the data type from the given array object. * * @param name The column name * @param grouped true if the column is grouped; false otherwise @@ -195,7 +188,7 @@ public static ColumnHolder getBooleanColumnHolder(String name, boolean */ public static ColumnHolder createColumnHolder(String name, boolean grouped, T... data) { return new ColumnHolder(name, data.getClass().getComponentType(), - data.getClass().getComponentType().getComponentType(), grouped, data); + data.getClass().getComponentType().getComponentType(), grouped, data); } public String getName() { @@ -203,21 +196,18 @@ public String getName() { } /** - * Gets a column source for the data. Other than the special case of DBDateTime columns, this - * requires that the type specified match the component type of the actual data. + * Gets a column source for the data. Other than the special case of DBDateTime columns, this requires that the type + * specified match the component type of the actual data. * * @return column source constructed with data from this column holder */ public ColumnSource getColumnSource() { if (data.getClass().getComponentType().equals(dataType)) { - return ArrayBackedColumnSource.getMemoryColumnSourceUntyped(data, dataType, - componentType); - } else if (dataType.equals(DBDateTime.class) - && data.getClass().getComponentType().equals(long.class)) { + return ArrayBackedColumnSource.getMemoryColumnSourceUntyped(data, dataType, componentType); + } else if (dataType.equals(DBDateTime.class) && data.getClass().getComponentType().equals(long.class)) { return ArrayBackedColumnSource.getDateTimeMemoryColumnSource((long[]) data); } else { - throw new IllegalStateException( - "Unsupported column holder data & type: " + dataType.getName() + ", " + throw new IllegalStateException("Unsupported column holder data & type: " + dataType.getName() + ", " + data.getClass().getComponentType().getName()); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/ColumnsToRowsTransform.java b/DB/src/main/java/io/deephaven/db/v2/utils/ColumnsToRowsTransform.java index c17ea416535..91bb88a4d67 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/ColumnsToRowsTransform.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/ColumnsToRowsTransform.java @@ -21,33 +21,32 @@ * Convert value columns into labeled rows. * *

    - * There are times when you have a wide table, that is better displayed to the user as a narrow - * table with additional rows. For example, you might have a table with columns for "Bid", "Ask" and - * "Last", which you may prefer to have three rows, one each for Bid, Ask, and Last with a label for - * each symbol. + * There are times when you have a wide table, that is better displayed to the user as a narrow table with additional + * rows. For example, you might have a table with columns for "Bid", "Ask" and "Last", which you may prefer to have + * three rows, one each for Bid, Ask, and Last with a label for each symbol. *

    * *

    * The same can be accomplished by calling - * .update("Label=new String[]{`Bid`, `Ask`, `Last`}", "Value=new double[]{Bid, Ask, Last}").ungroup(), - * but the creation of arrays in the update statement introduces additional overhead and garbage - * creation to the query execution. + * .update("Label=new String[]{`Bid`, `Ask`, `Last`}", "Value=new double[]{Bid, Ask, Last}").ungroup(), but + * the creation of arrays in the update statement introduces additional overhead and garbage creation to the query + * execution. *

    * *

    - * You may have only a single label column, but you may define multiple output value columns, all of - * which must have the same number of source columns. + * You may have only a single label column, but you may define multiple output value columns, all of which must have the + * same number of source columns. *

    * *

    - * For each output value column, all of the constituent input columns columns must have the same - * type. If the types are different, then an IllegalArgumentException is thrown. + * For each output value column, all of the constituent input columns columns must have the same type. If the types are + * different, then an IllegalArgumentException is thrown. *

    * *

    - * For example, when calling @{code ColumnsToRowsTransform.columnsToRows(inTable, "Name", new - * String[]{"IV", "DV"}, new String[]{"Apple", "Banana", "Canteloupe"}, new String[][]{new - * String[]{"Val1", "Val2", "Val3"}, new String[]{"D1", "D2", "D3"}});}, on this table: + * For example, when calling @{code ColumnsToRowsTransform.columnsToRows(inTable, "Name", new String[]{"IV", "DV"}, new + * String[]{"Apple", "Banana", "Canteloupe"}, new String[][]{new String[]{"Val1", "Val2", "Val3"}, new String[]{"D1", + * "D2", "D3"}});}, on this table: * *

      *        Sym|      Val1|                  D1|                  D2|      Val2|      Val3|                  D3
    @@ -77,12 +76,11 @@ public class ColumnsToRowsTransform {
          * @param source the table with multiple value columns
          * @param labelColumn the output column name for the label column
          * @param valueColumn the output column name for the value column
    -     * @param transposeColumns the names of the columns to transpose, the label value is the name of
    -     *        the column
    +     * @param transposeColumns the names of the columns to transpose, the label value is the name of the column
          * @return the transformed table
          */
    -    public static Table columnsToRows(final Table source, final String labelColumn,
    -        final String valueColumn, final String... transposeColumns) {
    +    public static Table columnsToRows(final Table source, final String labelColumn, final String valueColumn,
    +            final String... transposeColumns) {
             return columnsToRows(source, labelColumn, valueColumn, transposeColumns, transposeColumns);
         }
     
    @@ -96,10 +94,10 @@ public static Table columnsToRows(final Table source, final String labelColumn,
          * @param transposeColumns the input column names to transpose, must be parallel to labels
          * @return the transformed table
          */
    -    public static Table columnsToRows(final Table source, final String labelColumn,
    -        final String valueColumn, final String[] labels, final String[] transposeColumns) {
    +    public static Table columnsToRows(final Table source, final String labelColumn, final String valueColumn,
    +            final String[] labels, final String[] transposeColumns) {
             return columnsToRows(source, labelColumn, new String[] {valueColumn}, labels,
    -            new String[][] {transposeColumns});
    +                new String[][] {transposeColumns});
         }
     
         /**
    @@ -108,27 +106,24 @@ public static Table columnsToRows(final Table source, final String labelColumn,
          * @param source the table with multiple value columns
          * @param labelColumn the output column name for the label column
          * @param valueColumns the output column names for the value columns
    -     * @param labels the labels for the transposed columns, must be parallel to each element of
    -     *        transposeColumns
    -     * @param transposeColumns an array parallel to valueColumns; each element is in turn an array
    -     *        of input column names that are constituents for the output column. The input columns
    -     *        within each element must be the same type, and the cardinality much match labels.
    +     * @param labels the labels for the transposed columns, must be parallel to each element of transposeColumns
    +     * @param transposeColumns an array parallel to valueColumns; each element is in turn an array of input column names
    +     *        that are constituents for the output column. The input columns within each element must be the same type,
    +     *        and the cardinality much match labels.
          * @return the transformed table
          */
    -    public static Table columnsToRows(final Table source, final String labelColumn,
    -        final String[] valueColumns, final String[] labels, final String[][] transposeColumns) {
    +    public static Table columnsToRows(final Table source, final String labelColumn, final String[] valueColumns,
    +            final String[] labels, final String[][] transposeColumns) {
             if (valueColumns.length == 0) {
                 throw new IllegalArgumentException("No columns to transpose defined!");
             }
             if (valueColumns.length != transposeColumns.length) {
    -            throw new IllegalArgumentException(
    -                "Inconsistent transpose column definition, " + valueColumns.length
    +            throw new IllegalArgumentException("Inconsistent transpose column definition, " + valueColumns.length
                         + " names defined, " + transposeColumns.length + " columns defined.");
             }
             for (int cc = 0; cc < transposeColumns.length; ++cc) {
                 if (labels.length != transposeColumns[cc].length) {
    -                throw new IllegalArgumentException(
    -                    labels.length + " labels defined, but " + transposeColumns[cc].length
    +                throw new IllegalArgumentException(labels.length + " labels defined, but " + transposeColumns[cc].length
                             + " transpose columns defined for " + valueColumns[cc] + ".");
                 }
             }
    @@ -148,12 +143,10 @@ public static Table columnsToRows(final Table source, final String labelColumn,
             final List expandSet = new ArrayList<>();
     
             final int bits = 64 - Long.numberOfLeadingZeros(fanout - 1);
    -        final CrossJoinShiftState crossJoinShiftState =
    -            bits > 0 ? new CrossJoinShiftState(bits) : null;
    +        final CrossJoinShiftState crossJoinShiftState = bits > 0 ? new CrossJoinShiftState(bits) : null;
             final Class[] valueTypes = new Class[transposeColumns.length];
             final String[] typeSourceName = new String[transposeColumns.length];
    -        final ColumnSource[][] sourcesToTranspose =
    -            new ColumnSource[transposeColumns.length][labels.length];
    +        final ColumnSource[][] sourcesToTranspose = new ColumnSource[transposeColumns.length][labels.length];
             for (int cc = 0; cc < transposeColumns.length; ++cc) {
                 for (int dd = 0; dd < transposeColumns[cc].length; ++dd) {
                     sourcesToTranspose[cc][dd] = source.getColumnSource(transposeColumns[cc][dd]);
    @@ -169,9 +162,8 @@ public static Table columnsToRows(final Table source, final String labelColumn,
                                 typeSourceName[cc] = name;
                             } else {
                                 if (valueTypes[cc] != cs.getType()) {
    -                                throw new IllegalArgumentException(
    -                                    "Incompatible transpose types " + typeSourceName[cc] + " is "
    -                                        + valueTypes[cc] + ", " + name + " is " + cs.getType());
    +                                throw new IllegalArgumentException("Incompatible transpose types " + typeSourceName[cc]
    +                                        + " is " + valueTypes[cc] + ", " + name + " is " + cs.getType());
                                 }
                             }
                             return;
    @@ -196,7 +188,7 @@ public static Table columnsToRows(final Table source, final String labelColumn,
                 for (int cc = 0; cc < valueColumns.length; cc++) {
                     // noinspection unchecked
                     resultMap.put(valueColumns[cc],
    -                    new TransposedColumnSource(valueTypes[cc], bits, sourcesToTranspose[cc]));
    +                        new TransposedColumnSource(valueTypes[cc], bits, sourcesToTranspose[cc]));
                 }
             }
     
    @@ -211,7 +203,7 @@ public static Table columnsToRows(final Table source, final String labelColumn,
                 final String[] sourceColumns = new String[sourceColumnCount];
                 final MutableInt columnIndex = new MutableInt();
                 final ModifiedColumnSet modifyAll = ((DynamicTable) source)
    -                .newModifiedColumnSet(expandSet.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY));
    +                    .newModifiedColumnSet(expandSet.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY));
                 final ModifiedColumnSet[] modifyOneRow = new ModifiedColumnSet[labels.length];
                 // noinspection unchecked
                 final List[] sourcesForRow = new ArrayList[labels.length];
    @@ -225,8 +217,7 @@ public static Table columnsToRows(final Table source, final String labelColumn,
                     if (allTransposeSet.contains(name)) {
                         for (int cc = 0; cc < transposeSet.size(); ++cc) {
                             if (transposeSet.get(cc).contains(name)) {
    -                            resultColumnSets[columnIndex.intValue()] =
    -                                result.newModifiedColumnSet(valueColumns[cc]);
    +                            resultColumnSets[columnIndex.intValue()] = result.newModifiedColumnSet(valueColumns[cc]);
                                 sourcesForRow[transposeIndex[cc]++].add(name);
                             }
                         }
    @@ -237,16 +228,15 @@ public static Table columnsToRows(final Table source, final String labelColumn,
                 });
     
                 for (int cc = 0; cc < labels.length; ++cc) {
    -                modifyOneRow[cc] = ((DynamicTable) source).newModifiedColumnSet(
    -                    sourcesForRow[cc].toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY));
    +                modifyOneRow[cc] = ((DynamicTable) source)
    +                        .newModifiedColumnSet(sourcesForRow[cc].toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY));
                 }
     
                 final ModifiedColumnSet.Transformer transformer =
    -                dynamicSource.newModifiedColumnSetTransformer(sourceColumns, resultColumnSets);
    -            dynamicSource.listenForUpdates(new BaseTable.ShiftAwareListenerImpl(
    -                "columnsToRows(" + labelColumn + ", " + Arrays.toString(valueColumns) + ", "
    -                    + Arrays.deepToString(transposeColumns) + ")",
    -                dynamicSource, result) {
    +                    dynamicSource.newModifiedColumnSetTransformer(sourceColumns, resultColumnSets);
    +            dynamicSource.listenForUpdates(new BaseTable.ShiftAwareListenerImpl("columnsToRows(" + labelColumn + ", "
    +                    + Arrays.toString(valueColumns) + ", " + Arrays.deepToString(transposeColumns) + ")", dynamicSource,
    +                    result) {
                     @Override
                     public void onUpdate(final Update upstream) {
                         final Update downstream = new Update();
    @@ -255,20 +245,17 @@ public void onUpdate(final Update upstream) {
                         downstream.removed = transformIndex(upstream.removed, fanout, fanoutPow2);
     
                         if (upstream.modified.nonempty()) {
    -                        final boolean expandModified =
    -                            upstream.modifiedColumnSet.containsAny(modifyAll);
    +                        final boolean expandModified = upstream.modifiedColumnSet.containsAny(modifyAll);
                             if (expandModified) {
                                 // all rows are modified, because there is an expanded column modified
    -                            downstream.modified =
    -                                transformIndex(upstream.modified, fanout, fanoutPow2);
    +                            downstream.modified = transformIndex(upstream.modified, fanout, fanoutPow2);
                             } else {
                                 // we should determine modifications based on the value changes
                                 final boolean[] rowModified = new boolean[modifyOneRow.length];
                                 boolean allTrue = true;
                                 int maxModified = 0;
                                 for (int ii = 0; ii < rowModified.length; ++ii) {
    -                                final boolean modified =
    -                                    upstream.modifiedColumnSet.containsAny(modifyOneRow[ii]);
    +                                final boolean modified = upstream.modifiedColumnSet.containsAny(modifyOneRow[ii]);
                                     rowModified[ii] = modified;
                                     if (modified) {
                                         maxModified = ii;
    @@ -277,11 +264,10 @@ public void onUpdate(final Update upstream) {
                                     }
                                 }
                                 if (allTrue) {
    -                                downstream.modified =
    -                                    transformIndex(upstream.modified, fanout, fanoutPow2);
    +                                downstream.modified = transformIndex(upstream.modified, fanout, fanoutPow2);
                                 } else {
    -                                downstream.modified = transformIndex(upstream.modified, fanoutPow2,
    -                                    rowModified, maxModified);
    +                                downstream.modified =
    +                                        transformIndex(upstream.modified, fanoutPow2, rowModified, maxModified);
                                 }
                             }
                         } else {
    @@ -297,8 +283,7 @@ public void onUpdate(final Update upstream) {
                             final int shiftCount = upstream.shifted.size();
                             for (int ii = 0; ii < shiftCount; ++ii) {
                                 final long beginRange = upstream.shifted.getBeginRange(ii) * fanoutPow2;
    -                            final long endRange =
    -                                upstream.shifted.getEndRange(ii) * fanoutPow2 + fanoutPow2 - 1;
    +                            final long endRange = upstream.shifted.getEndRange(ii) * fanoutPow2 + fanoutPow2 - 1;
                                 final long delta = upstream.shifted.getShiftDelta(ii) * fanoutPow2;
     
                                 shiftBuilder.shiftRange(beginRange, endRange, delta);
    @@ -311,8 +296,7 @@ public void onUpdate(final Update upstream) {
     
                         resultIndex.insert(downstream.added);
     
    -                    transformer.clearAndTransform(upstream.modifiedColumnSet,
    -                        downstream.modifiedColumnSet);
    +                    transformer.clearAndTransform(upstream.modifiedColumnSet, downstream.modifiedColumnSet);
                         result.notifyListeners(downstream);
                     }
                 });
    @@ -323,13 +307,12 @@ public void onUpdate(final Update upstream) {
     
         private static Index transformIndex(final Index index, final int fanout, final int fanoutPow2) {
             final Index.SequentialBuilder sequentialBuilder = Index.FACTORY.getSequentialBuilder();
    -        index.forAllLongs(
    -            idx -> sequentialBuilder.appendRange(idx * fanoutPow2, idx * fanoutPow2 + fanout - 1));
    +        index.forAllLongs(idx -> sequentialBuilder.appendRange(idx * fanoutPow2, idx * fanoutPow2 + fanout - 1));
             return sequentialBuilder.getIndex();
         }
     
    -    private static Index transformIndex(final Index index, final int fanoutPow2,
    -        final boolean[] rowModified, final int maxModified) {
    +    private static Index transformIndex(final Index index, final int fanoutPow2, final boolean[] rowModified,
    +            final int maxModified) {
             final Index.SequentialBuilder sequentialBuilder = Index.FACTORY.getSequentialBuilder();
             index.forAllLongs(idx -> {
                 for (int ii = 0; ii <= maxModified; ++ii) {
    @@ -367,8 +350,8 @@ private String getLabel(final long index) {
     
             @Override
             public void fillChunk(@NotNull final FillContext context,
    -            @NotNull final WritableChunk destination,
    -            @NotNull final OrderedKeys orderedKeys) {
    +                @NotNull final WritableChunk destination,
    +                @NotNull final OrderedKeys orderedKeys) {
                 final MutableInt outputPosition = new MutableInt();
                 final WritableObjectChunk objectChunk = destination.asWritableObjectChunk();
                 destination.setSize(orderedKeys.intSize());
    @@ -380,8 +363,8 @@ public void fillChunk(@NotNull final FillContext context,
     
             @Override
             public void fillPrevChunk(@NotNull final FillContext context,
    -            @NotNull final WritableChunk destination,
    -            @NotNull final OrderedKeys orderedKeys) {
    +                @NotNull final WritableChunk destination,
    +                @NotNull final OrderedKeys orderedKeys) {
                 fillChunk(context, destination, orderedKeys);
             }
         }
    @@ -393,7 +376,7 @@ private static class TransposedColumnSource extends AbstractColumnSource {
             private final ColumnSource[] transposeColumns;
     
             private TransposedColumnSource(final Class valueType, final int bits,
    -            final ColumnSource[] transposeColumns) {
    +                final ColumnSource[] transposeColumns) {
                 super(valueType);
                 this.bits = bits;
                 this.mask = (1L << bits) - 1;
    @@ -544,8 +527,8 @@ private class TransposeFillContext implements FillContext {
                 private TransposeFillContext(final int chunkCapacity) {
                     tempValues = getChunkType().makeWritableChunk(chunkCapacity);
                     permuteKernel = PermuteKernel.makePermuteKernel(getChunkType());
    -                innerContexts = Arrays.stream(transposeColumns)
    -                    .map(tc -> tc.makeFillContext(chunkCapacity)).toArray(FillContext[]::new);
    +                innerContexts = Arrays.stream(transposeColumns).map(tc -> tc.makeFillContext(chunkCapacity))
    +                        .toArray(FillContext[]::new);
                     // noinspection unchecked
                     innerKeys = new WritableLongChunk[transposeColumns.length];
                     // noinspection unchecked
    @@ -558,15 +541,14 @@ private TransposeFillContext(final int chunkCapacity) {
             }
     
             @Override
    -        public FillContext makeFillContext(final int chunkCapacity,
    -            final SharedContext sharedContext) {
    +        public FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) {
                 return new TransposeFillContext(chunkCapacity);
             }
     
             @Override
             public void fillChunk(@NotNull final FillContext context,
    -            @NotNull final WritableChunk destination,
    -            @NotNull final OrderedKeys orderedKeys) {
    +                @NotNull final WritableChunk destination,
    +                @NotNull final OrderedKeys orderedKeys) {
                 // noinspection unchecked
                 final TransposeFillContext transposeFillContext = (TransposeFillContext) context;
                 updateContext(transposeFillContext, orderedKeys);
    @@ -575,8 +557,8 @@ public void fillChunk(@NotNull final FillContext context,
     
             @Override
             public void fillPrevChunk(@NotNull final FillContext context,
    -            @NotNull final WritableChunk destination,
    -            @NotNull final OrderedKeys orderedKeys) {
    +                @NotNull final WritableChunk destination,
    +                @NotNull final OrderedKeys orderedKeys) {
                 // noinspection unchecked
                 final TransposeFillContext transposeFillContext = (TransposeFillContext) context;
                 updateContext(transposeFillContext, orderedKeys);
    @@ -584,7 +566,7 @@ public void fillPrevChunk(@NotNull final FillContext context,
             }
     
             private void updateContext(@NotNull final TransposeFillContext context,
    -            @NotNull final OrderedKeys orderedKeys) {
    +                @NotNull final OrderedKeys orderedKeys) {
                 for (int ii = 0; ii < transposeColumns.length; ++ii) {
                     context.innerKeys[ii].setSize(0);
                     context.outputPositions[ii].setSize(0);
    @@ -599,38 +581,32 @@ private void updateContext(@NotNull final TransposeFillContext context,
                 });
             }
     
    -        private void doFillAndPermute(
    -            @NotNull final WritableChunk destination,
    -            final TransposeFillContext transposeFillContext, final boolean usePrev,
    -            final long originalSize) {
    +        private void doFillAndPermute(@NotNull final WritableChunk destination,
    +                final TransposeFillContext transposeFillContext, final boolean usePrev, final long originalSize) {
                 for (int ii = 0; ii < transposeColumns.length; ++ii) {
                     if (transposeFillContext.innerKeys[ii].size() == 0) {
                         continue;
                     }
    -                final boolean isComplete =
    -                    transposeFillContext.innerKeys[ii].size() == originalSize;
    +                final boolean isComplete = transposeFillContext.innerKeys[ii].size() == originalSize;
                     // noinspection unchecked
                     final WritableChunk tempDest =
    -                    isComplete ? (WritableChunk) destination : transposeFillContext.tempValues;
    -                try (final OrderedKeys innerOk = OrderedKeys
    -                    .wrapKeyIndicesChunkAsOrderedKeys(transposeFillContext.innerKeys[ii])) {
    +                        isComplete ? (WritableChunk) destination : transposeFillContext.tempValues;
    +                try (final OrderedKeys innerOk =
    +                        OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(transposeFillContext.innerKeys[ii])) {
                         if (usePrev) {
                             // noinspection unchecked
    -                        transposeColumns[ii].fillPrevChunk(transposeFillContext.innerContexts[ii],
    -                            tempDest, innerOk);
    +                        transposeColumns[ii].fillPrevChunk(transposeFillContext.innerContexts[ii], tempDest, innerOk);
                         } else {
                             // noinspection unchecked
    -                        transposeColumns[ii].fillChunk(transposeFillContext.innerContexts[ii],
    -                            tempDest, innerOk);
    +                        transposeColumns[ii].fillChunk(transposeFillContext.innerContexts[ii], tempDest, innerOk);
                         }
                     }
                     if (isComplete) {
                         return;
                     }
                     // noinspection unchecked
    -                transposeFillContext.permuteKernel.permute(
    -                    (WritableChunk) transposeFillContext.tempValues,
    -                    transposeFillContext.outputPositions[ii], destination);
    +                transposeFillContext.permuteKernel.permute((WritableChunk) transposeFillContext.tempValues,
    +                        transposeFillContext.outputPositions[ii], destination);
                 }
             }
         }
    diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/ContiguousRedirectionIndexImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/ContiguousRedirectionIndexImpl.java
    index 23ede407c9d..720fceaed7f 100644
    --- a/DB/src/main/java/io/deephaven/db/v2/utils/ContiguousRedirectionIndexImpl.java
    +++ b/DB/src/main/java/io/deephaven/db/v2/utils/ContiguousRedirectionIndexImpl.java
    @@ -43,14 +43,11 @@ public ContiguousRedirectionIndexImpl(long[] redirections) {
     
         @Override
         public long put(long key, long index) {
    -        Require.requirement(key <= Integer.MAX_VALUE && key >= 0,
    -            "key <= Integer.MAX_VALUE && key >= 0", key, "key");
    +        Require.requirement(key <= Integer.MAX_VALUE && key >= 0, "key <= Integer.MAX_VALUE && key >= 0", key, "key");
             if (key >= redirections.length) {
    -            final long[] newRedirections =
    -                new long[Math.max((int) key + 100, redirections.length * 2)];
    +            final long[] newRedirections = new long[Math.max((int) key + 100, redirections.length * 2)];
                 System.arraycopy(redirections, 0, newRedirections, 0, redirections.length);
    -            Arrays.fill(newRedirections, redirections.length, newRedirections.length,
    -                Index.NULL_KEY);
    +            Arrays.fill(newRedirections, redirections.length, newRedirections.length, Index.NULL_KEY);
                 redirections = newRedirections;
             }
             final long previous = redirections[(int) key];
    @@ -83,9 +80,9 @@ public long get(long key) {
     
         @Override
         public void fillChunk(
    -        @NotNull final FillContext fillContext,
    -        @NotNull final WritableLongChunk mappedKeysOut,
    -        @NotNull final OrderedKeys keysToMap) {
    +            @NotNull final FillContext fillContext,
    +            @NotNull final WritableLongChunk mappedKeysOut,
    +            @NotNull final OrderedKeys keysToMap) {
             mappedKeysOut.setSize(0);
             keysToMap.forAllLongRanges((final long start, final long end) -> {
                 for (long v = start; v <= end; ++v) {
    @@ -109,9 +106,9 @@ public long getPrev(long key) {
     
         @Override
         public void fillPrevChunk(
    -        @NotNull final FillContext fillContext,
    -        @NotNull final WritableLongChunk mappedKeysOut,
    -        @NotNull final OrderedKeys keysToMap) {
    +            @NotNull final FillContext fillContext,
    +            @NotNull final WritableLongChunk mappedKeysOut,
    +            @NotNull final OrderedKeys keysToMap) {
             if (checkpoint == null) {
                 fillChunk(fillContext, mappedKeysOut, keysToMap);
                 return;
    @@ -144,10 +141,9 @@ public long remove(long leftIndex) {
     
         public synchronized void startTrackingPrevValues() {
             Assert.eqNull(updateCommitter, "updateCommitter");
    -        checkpoint = new TLongLongHashMap(Math.min(size, 1024 * 1024), 0.75f, UPDATES_KEY_NOT_FOUND,
    -            UPDATES_KEY_NOT_FOUND);
    -        updateCommitter =
    -            new UpdateCommitter<>(this, ContiguousRedirectionIndexImpl::commitUpdates);
    +        checkpoint =
    +                new TLongLongHashMap(Math.min(size, 1024 * 1024), 0.75f, UPDATES_KEY_NOT_FOUND, UPDATES_KEY_NOT_FOUND);
    +        updateCommitter = new UpdateCommitter<>(this, ContiguousRedirectionIndexImpl::commitUpdates);
         }
     
         private synchronized void commitUpdates() {
    diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/CumulativeUtil.java b/DB/src/main/java/io/deephaven/db/v2/utils/CumulativeUtil.java
    index e79db3f8799..c5a5c9214bd 100644
    --- a/DB/src/main/java/io/deephaven/db/v2/utils/CumulativeUtil.java
    +++ b/DB/src/main/java/io/deephaven/db/v2/utils/CumulativeUtil.java
    @@ -19,8 +19,8 @@
     public class CumulativeUtil {
     
         /**
    -     * Provides a helper class with a map to cache previous row values when performing rolling or
    -     * cumulative aggregations.
    +     * Provides a helper class with a map to cache previous row values when performing rolling or cumulative
    +     * aggregations.
          */
         @SuppressWarnings("unchecked")
         public static class CumulativeHelper implements Serializable {
    @@ -41,8 +41,8 @@ public double put(Object key, double val) {
         }
     
         /**
    -     * Provides a helper class with a map to cache previous row values when performing filtering of
    -     * data whose values change from row to row.
    +     * Provides a helper class with a map to cache previous row values when performing filtering of data whose values
    +     * change from row to row.
          */
         @SuppressWarnings("unchecked")
         public static class CumulativeFilterHelper implements Serializable {
    @@ -63,28 +63,26 @@ public boolean shouldKeep(Object key, double val) {
         }
     
         /**
    -     * General purpose core method for executing running and cumulative aggregations. Used by helper
    -     * methods which provide template formulae for specific types of aggregations.
    +     * General purpose core method for executing running and cumulative aggregations. Used by helper methods which
    +     * provide template formulae for specific types of aggregations.
          * 
          * @param t The {@link Table} to use as input to the aggregation.
    -     * @param key Nullable string key to use to access the cached previous row value within the
    -     *        {@link CumulativeHelper} map. Note that a non-null literal key must be enclosed in
    -     *        backticks since it will be used in a formula. Typically a column expression, rather
    -     *        than a literal, is used to allow operations based on the grouping of the expression.
    +     * @param key Nullable string key to use to access the cached previous row value within the {@link CumulativeHelper}
    +     *        map. Note that a non-null literal key must be enclosed in backticks since it will be used in a formula.
    +     *        Typically a column expression, rather than a literal, is used to allow operations based on the grouping of
    +     *        the expression.
          * @param startValue Initial value from which to start aggregating. Normally 0.
          * @param newCol The name of the aggregation column to add to the table.
    -     * @param formula A formula indicating how to calculate the aggregation. This is normally
    -     *        provides by a helper method. An example would be something like: "_prev+A" to
    -     *        cumulatively sum A.
    -     * @return A {@link Table} with the new aggregation column added. Note that this column will be
    -     *         a double column, regardless of the numeric type(s) of the formula input(s).
    +     * @param formula A formula indicating how to calculate the aggregation. This is normally provides by a helper
    +     *        method. An example would be something like: "_prev+A" to cumulatively sum A.
    +     * @return A {@link Table} with the new aggregation column added. Note that this column will be a double column,
    +     *         regardless of the numeric type(s) of the formula input(s).
          */
    -    public static Table accumulate(Table t, String key, double startValue, String newCol,
    -        String formula) {
    +    public static Table accumulate(Table t, String key, double startValue, String newCol, String formula) {
             QueryScope.addParam("__CumulativeUtil_map", new CumulativeHelper(startValue));
     
             formula = newCol + "=__CumulativeUtil_map.put(" + key + ","
    -            + formula.replaceAll("_prev", "__CumulativeUtil_map.get(" + key + ")") + ")";
    +                + formula.replaceAll("_prev", "__CumulativeUtil_map.get(" + key + ")") + ")";
     
             return t.update(formula);
         }
    @@ -94,10 +92,10 @@ public static Table accumulate(Table t, String key, double startValue, String ne
          * 
          * @param t The {@link Table} to use as input to the aggregation.
          * @param newCol The name of the aggregation column to add to the table.
    -     * @param formula A formula for the source value on which to calculate a running minimum. This
    -     *        can be as simple as the column name or a more complex expression.
    -     * @return A {@link Table} with the new aggregation column added. Note that this column will be
    -     *         a double column, regardless of the numeric type(s) of the formula input(s).
    +     * @param formula A formula for the source value on which to calculate a running minimum. This can be as simple as
    +     *        the column name or a more complex expression.
    +     * @return A {@link Table} with the new aggregation column added. Note that this column will be a double column,
    +     *         regardless of the numeric type(s) of the formula input(s).
          */
         public static Table cumMin(Table t, String newCol, String formula) {
             return accumulate(t, null, Double.MAX_VALUE, newCol, "min(_prev, (double)" + formula + ")");
    @@ -107,15 +105,15 @@ public static Table cumMin(Table t, String newCol, String formula) {
          * Executes a cumulative sum aggregation far for a formula.
          * 
          * @param t The {@link Table} to use as input to the aggregation.
    -     * @param key Nullable string key to use to access the cached previous row value within the
    -     *        {@link CumulativeHelper} map. Note that a non-null literal key must be enclosed in
    -     *        backticks since it will be used in a formula. Typically a column expression, rather
    -     *        than a literal, is used to allow operations based on the grouping of the expression.
    +     * @param key Nullable string key to use to access the cached previous row value within the {@link CumulativeHelper}
    +     *        map. Note that a non-null literal key must be enclosed in backticks since it will be used in a formula.
    +     *        Typically a column expression, rather than a literal, is used to allow operations based on the grouping of
    +     *        the expression.
          * @param newCol The name of the aggregation column to add to the table.
    -     * @param formula A formula for the source value on which to calculate a running sum. This can
    -     *        be as simple as the column name or a more complex expression.
    -     * @return A {@link Table} with the new aggregation column added. Note that this column will be
    -     *         a double column, regardless of the numeric type(s) of the formula input(s).
    +     * @param formula A formula for the source value on which to calculate a running sum. This can be as simple as the
    +     *        column name or a more complex expression.
    +     * @return A {@link Table} with the new aggregation column added. Note that this column will be a double column,
    +     *         regardless of the numeric type(s) of the formula input(s).
          */
         public static Table cumSum(Table t, String key, String newCol, String formula) {
             return accumulate(t, key, 0, newCol, "_prev+(" + formula + ")");
    @@ -126,10 +124,10 @@ public static Table cumSum(Table t, String key, String newCol, String formula) {
          * 
          * @param t The {@link Table} to use as input to the aggregation.
          * @param newCol The name of the aggregation column to add to the table.
    -     * @param formula A formula for the source value on which to calculate a running sum. This can
    -     *        be as simple as the column name or a more complex expression.
    -     * @return A {@link Table} with the new aggregation column added. Note that this column will be
    -     *         a double column, regardless of the numeric type(s) of the formula input(s).
    +     * @param formula A formula for the source value on which to calculate a running sum. This can be as simple as the
    +     *        column name or a more complex expression.
    +     * @return A {@link Table} with the new aggregation column added. Note that this column will be a double column,
    +     *         regardless of the numeric type(s) of the formula input(s).
          */
         public static Table cumSum(Table t, String newCol, String formula) {
             return accumulate(t, null, 0, newCol, "_prev+(" + formula + ")");
    @@ -141,28 +139,26 @@ public static Table cumSum(Table t, String newCol, String formula) {
          * @param t The {@link Table} to use as input to the aggregation.
          * @param windowSize The number of rows to include in the rolling sum window.
          * @param newCol The name of the aggregation column to add to the table.
    -     * @param formula A formula for the source value on which to calculate a running sum. This can
    -     *        be as simple as the column name or a more complex expression.
    -     * @return A {@link Table} with the new aggregation column added. Note that this column will be
    -     *         a double column, regardless of the numeric type(s) of the formula input(s).
    +     * @param formula A formula for the source value on which to calculate a running sum. This can be as simple as the
    +     *        column name or a more complex expression.
    +     * @return A {@link Table} with the new aggregation column added. Note that this column will be a double column,
    +     *         regardless of the numeric type(s) of the formula input(s).
          */
         public static Table rollingSum(Table t, int windowSize, String newCol, String formula) {
             return accumulate(t, null, 0, newCol, "_prev+(" + formula + ")")
    -            .update(newCol + "=" + newCol + "-" + newCol + "_[i-" + windowSize + "]");
    +                .update(newCol + "=" + newCol + "-" + newCol + "_[i-" + windowSize + "]");
         }
     
         /**
    -     * Returns only rows for which the selected column value is different from the value in the
    -     * previous row.
    +     * Returns only rows for which the selected column value is different from the value in the previous row.
          * 
          * @param t The {@link Table} to use as input to the method.
    -     * @param key Nullable string key to use to access the cached previous row value within the
    -     *        {@link CumulativeHelper} map. Note that a non-null literal key must be enclosed in
    -     *        backticks since it will be used in a formula. Typically a column expression, rather
    -     *        than a literal, is used to allow operations based on the grouping of the expression.
    +     * @param key Nullable string key to use to access the cached previous row value within the {@link CumulativeHelper}
    +     *        map. Note that a non-null literal key must be enclosed in backticks since it will be used in a formula.
    +     *        Typically a column expression, rather than a literal, is used to allow operations based on the grouping of
    +     *        the expression.
          * @param col The column to check for changing values.
    -     * @return A {@link Table} of only rows where the selected value has changed from the value in
    -     *         the previous row.
    +     * @return A {@link Table} of only rows where the selected value has changed from the value in the previous row.
          */
         public static Table filterChanged(Table t, String key, String col) {
             QueryScope.addParam("__CumulativeUtil_map", new CumulativeFilterHelper());
    @@ -177,8 +173,7 @@ public static Table filterChanged(Table t, String key, String col) {
          * @throws IOException
          */
         public static void main(String... args) throws IOException {
    -        Table t = TableTools.emptyTable(20).update("USym=Math.random()>.5 ? `SPY` : `AAPL`",
    -            "Num=Math.random()");
    +        Table t = TableTools.emptyTable(20).update("USym=Math.random()>.5 ? `SPY` : `AAPL`", "Num=Math.random()");
     
             Table test = accumulate(t, "USym", Double.MAX_VALUE, "Min", "min(_prev, Num)");
             TableTools.show(test);
    diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/CurrentOnlyIndex.java b/DB/src/main/java/io/deephaven/db/v2/utils/CurrentOnlyIndex.java
    index 349711af9a0..b9273744ddc 100644
    --- a/DB/src/main/java/io/deephaven/db/v2/utils/CurrentOnlyIndex.java
    +++ b/DB/src/main/java/io/deephaven/db/v2/utils/CurrentOnlyIndex.java
    @@ -22,7 +22,7 @@
     import java.util.function.LongConsumer;
     
     public class CurrentOnlyIndex extends OrderedKeysAsChunkImpl
    -    implements ImplementedByTreeIndexImpl, Index, Externalizable {
    +        implements ImplementedByTreeIndexImpl, Index, Externalizable {
     
         private static final long serialVersionUID = 1L;
     
    @@ -64,8 +64,7 @@ public void insertRange(final long startKey, final long endKey) {
         }
     
         @Override
    -    public void insert(final LongChunk keys, final int offset,
    -        final int length) {
    +    public void insert(final LongChunk keys, final int offset, final int length) {
             Assert.leq(offset + length, "offset + length", keys.size(), "keys.size()");
             assign(impl.ixInsert(keys, offset, length));
         }
    @@ -86,8 +85,7 @@ public void removeRange(final long start, final long end) {
         }
     
         @Override
    -    public void remove(final LongChunk keys, final int offset,
    -        final int length) {
    +    public void remove(final LongChunk keys, final int offset, final int length) {
             Assert.leq(offset + length, "offset + length", keys.size(), "keys.size()");
             assign(impl.ixRemove(keys, offset, length));
         }
    @@ -134,14 +132,12 @@ public OrderedKeys.Iterator getOrderedKeysIterator() {
         }
     
         @Override
    -    public OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive,
    -        final long length) {
    +    public OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, final long length) {
             return impl.ixGetOrderedKeysByPosition(startPositionInclusive, length);
         }
     
         @Override
    -    public OrderedKeys getOrderedKeysByKeyRange(final long startKeyInclusive,
    -        final long endKeyInclusive) {
    +    public OrderedKeys getOrderedKeysByKeyRange(final long startKeyInclusive, final long endKeyInclusive) {
             return impl.ixGetOrderedKeysByKeyRange(startKeyInclusive, endKeyInclusive);
         }
     
    @@ -237,8 +233,7 @@ public Map getPrevGrouping(final TupleSource tupleSource) {
         public void copyImmutableGroupings(TupleSource source, TupleSource dest) {}
     
         @Override
    -    public Map getGroupingForKeySet(final Set keys,
    -        final TupleSource tupleSource) {
    +    public Map getGroupingForKeySet(final Set keys, final TupleSource tupleSource) {
             throw new UnsupportedOperationException();
         }
     
    @@ -401,8 +396,7 @@ private void assign(final TreeIndexImpl maybeNewImpl) {
         }
     
         @Override
    -    public void fillKeyIndicesChunk(
    -        final WritableLongChunk chunkToFill) {
    +    public void fillKeyIndicesChunk(final WritableLongChunk chunkToFill) {
             IndexUtilities.fillKeyIndicesChunk(this, chunkToFill);
         }
     
    diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/DynamicTableWriter.java b/DB/src/main/java/io/deephaven/db/v2/utils/DynamicTableWriter.java
    index adbe9a6533a..cf6376e821f 100644
    --- a/DB/src/main/java/io/deephaven/db/v2/utils/DynamicTableWriter.java
    +++ b/DB/src/main/java/io/deephaven/db/v2/utils/DynamicTableWriter.java
    @@ -27,8 +27,8 @@
     import java.util.stream.Collectors;
     
     /**
    - * The DynamicTableWriter creates an in-memory table using ArrayBackedColumnSources of the type
    - * specified in the constructor. You can retrieve the table using the {@code getTable} function.
    + * The DynamicTableWriter creates an in-memory table using ArrayBackedColumnSources of the type specified in the
    + * constructor. You can retrieve the table using the {@code getTable} function.
      * 

    * This class is not thread safe, you must synchronize externally. */ @@ -45,30 +45,27 @@ public class DynamicTableWriter implements TableWriter { private int lastSetterRow; /** - * Creates a TableWriter that produces an in-memory table using the provided column names and - * types. + * Creates a TableWriter that produces an in-memory table using the provided column names and types. * * @param columnNames the names of the columns in the output table (and our input) - * @param columnTypes the types of the columns in the output table (must be compatible with the - * input) + * @param columnTypes the types of the columns in the output table (must be compatible with the input) * @param constantValues a Map of columns with constant values */ @SuppressWarnings("WeakerAccess") public DynamicTableWriter(final String[] columnNames, final Class[] columnTypes, - final Map constantValues) { + final Map constantValues) { final Map sources = new LinkedHashMap<>(); arrayColumnSources = new ArrayBackedColumnSource[columnTypes.length]; allocatedSize = 256; for (int i = 0; i < columnTypes.length; i++) { if (constantValues.containsKey(columnNames[i])) { final SingleValueColumnSource singleValueColumnSource = - SingleValueColumnSource.getSingleValueColumnSource(columnTypes[i]); + SingleValueColumnSource.getSingleValueColumnSource(columnTypes[i]); // noinspection unchecked singleValueColumnSource.set(constantValues.get(columnNames[i])); sources.put(columnNames[i], singleValueColumnSource); } else { - arrayColumnSources[i] = - ArrayBackedColumnSource.getMemoryColumnSource(allocatedSize, columnTypes[i]); + arrayColumnSources[i] = ArrayBackedColumnSource.getMemoryColumnSource(allocatedSize, columnTypes[i]); sources.put(columnNames[i], arrayColumnSources[i]); } } @@ -84,18 +81,15 @@ public DynamicTableWriter(final String[] columnNames, final Class[] columnTyp } final int index = ii; factoryMap.put(columns[index].getName(), - (currentRow) -> createRowSetter(columns[index].getType(), - arrayColumnSources[index])); + (currentRow) -> createRowSetter(columns[index].getType(), arrayColumnSources[index])); } } /** - * Creates a TableWriter that produces an in-memory table using the provided column names and - * types. + * Creates a TableWriter that produces an in-memory table using the provided column names and types. * * @param columnNames the names of the columns in the output table (and our input) - * @param columnTypes the types of the columns in the output table (must be compatible with the - * input) + * @param columnTypes the types of the columns in the output table (must be compatible with the input) */ public DynamicTableWriter(final String[] columnNames, final Class[] columnTypes) { this(columnNames, columnTypes, Collections.emptyMap()); @@ -122,8 +116,7 @@ public DynamicTableWriter(TableDefinition definition, Map consta /** * Gets the table created by this DynamicTableWriter. *

    - * The returned table is registered with the LiveTableMonitor, and new rows become visible - * within the refresh loop. + * The returned table is registered with the LiveTableMonitor, and new rows become visible within the refresh loop. * * @return a live table with the output of this log */ @@ -132,12 +125,12 @@ public LiveQueryTable getTable() { } /** - * Returns a row writer, which allocates the row. You may get setters for the row, and then call - * addRowToTableIndex when you are finished. Because the row is allocated when you call this - * function, it is possible to get several Row objects before calling addRowToTableIndex. + * Returns a row writer, which allocates the row. You may get setters for the row, and then call addRowToTableIndex + * when you are finished. Because the row is allocated when you call this function, it is possible to get several + * Row objects before calling addRowToTableIndex. *

    - * This contrasts with {@code DynamicTableWriter.getSetter}, which allocates a single row; and - * you must call {@code DynamicTableWriter.addRowToTableIndex} before advancing to the next row. + * This contrasts with {@code DynamicTableWriter.getSetter}, which allocates a single row; and you must call + * {@code DynamicTableWriter.addRowToTableIndex} before advancing to the next row. * * @return a Row from which you can retrieve setters and call write row. */ @@ -147,9 +140,9 @@ public Row getRowWriter() { } /** - * Returns a RowSetter for the given column. If required, a Row object is allocated. You can not - * mix calls with {@code getSetter} and {@code getRowWriter}. After setting each column, you - * must call {@code addRowToTableIndex}, before beginning to write the next row. + * Returns a RowSetter for the given column. If required, a Row object is allocated. You can not mix calls with + * {@code getSetter} and {@code getRowWriter}. After setting each column, you must call {@code addRowToTableIndex}, + * before beginning to write the next row. * * @param name column name. * @return a RowSetter for the given column @@ -171,8 +164,7 @@ public void setFlags(Row.Flags flags) { } /** - * Writes the current row created with the {@code getSetter} call, and advances the current row - * by one. + * Writes the current row created with the {@code getSetter} call, and advances the current row by one. *

    * The row will be made visible in the table after the LiveTableMonitor refresh cycle completes. */ @@ -206,17 +198,16 @@ private void ensureCapacity(int row) { } /** - * This is a convenience function so that you can log an entire row at a time using a Map. You - * must specify all values in the setters map (and can't have any extras). The type of the value - * must be castable to the type of the setter. + * This is a convenience function so that you can log an entire row at a time using a Map. You must specify all + * values in the setters map (and can't have any extras). The type of the value must be castable to the type of the + * setter. * * @param values a map from column name to value for the row to be logged */ @SuppressWarnings("unused") public void logRow(Map values) { if (values.size() != factoryMap.size()) { - throw new RuntimeException( - "Incompatible logRow call: " + values.keySet() + " != " + factoryMap.keySet()); + throw new RuntimeException("Incompatible logRow call: " + values.keySet() + " != " + factoryMap.keySet()); } for (final Map.Entry value : values.entrySet()) { // noinspection unchecked @@ -229,14 +220,13 @@ public void logRow(Map values) { /** * This is a convenience function so that you can log an entire row at a time. * - * @param values an array containing values to be logged, in order of the fields specified by - * the constructor + * @param values an array containing values to be logged, in order of the fields specified by the constructor */ @SuppressWarnings("unused") public void logRow(Object... values) { if (values.length != factoryMap.size()) { - throw new RuntimeException("Incompatible logRow call, values length=" + values.length - + " != setters=" + factoryMap.size()); + throw new RuntimeException( + "Incompatible logRow call, values length=" + values.length + " != setters=" + factoryMap.size()); } for (int ii = 0; ii < values.length; ++ii) { // noinspection unchecked @@ -581,7 +571,7 @@ public void set(final Object value) { private class DynamicTableRow implements Row { private int row = lastSetterRow; private final Map setterMap = factoryMap.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, (e) -> (e.getValue().apply(row)))); + .collect(Collectors.toMap(Map.Entry::getKey, (e) -> (e.getValue().apply(row)))); private Row.Flags flags = Flags.SingleRow; @Override @@ -589,8 +579,7 @@ public RowSetter getSetter(final String name) { final RowSetter rowSetter = setterMap.get(name); if (rowSetter == null) { if (table.getColumnSourceMap().containsKey(name)) { - throw new RuntimeException( - "Column has a constant value, can not get setter " + name); + throw new RuntimeException("Column has a constant value, can not get setter " + name); } else { throw new RuntimeException("Unknown column name " + name); } @@ -617,14 +606,12 @@ public void writeRow() { } row = lastSetterRow++; - // Before this row can be returned to a pool, it needs to ensure that the underlying - // sources + // Before this row can be returned to a pool, it needs to ensure that the underlying sources // are appropriately sized to avoid race conditions. ensureCapacity(row); setterMap.values().forEach((x) -> x.setRow(row)); - // The row has been committed during set, we just need to insert the index into the - // table + // The row has been committed during set, we just need to insert the index into the table if (doFlush) { DynamicTableWriter.this.addRangeToTableIndex(lastCommittedRow + 1, row); lastCommittedRow = row; diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/ExternalizableIndexUtils.java b/DB/src/main/java/io/deephaven/db/v2/utils/ExternalizableIndexUtils.java index b509507f243..85bce8313cf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/ExternalizableIndexUtils.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/ExternalizableIndexUtils.java @@ -34,8 +34,8 @@ public class ExternalizableIndexUtils { * @param out The destination * @param index The index */ - public static void writeExternalCompressedDeltas(@NotNull final DataOutput out, - @NotNull final Index index) throws IOException { + public static void writeExternalCompressedDeltas(@NotNull final DataOutput out, @NotNull final Index index) + throws IOException { long offset = 0; final TShortArrayList shorts = new TShortArrayList(); @@ -54,9 +54,8 @@ public static void writeExternalCompressedDeltas(@NotNull final DataOutput out, out.writeByte(END); } - private static long appendWithOffsetDelta(@NotNull final DataOutput out, - @NotNull final TShortArrayList shorts, - final long offset, final long value, final boolean negate) throws IOException { + private static long appendWithOffsetDelta(@NotNull final DataOutput out, @NotNull final TShortArrayList shorts, + final long offset, final long value, final boolean negate) throws IOException { if (value >= offset + Short.MAX_VALUE) { flushShorts(out, shorts); @@ -73,12 +72,11 @@ private static long appendWithOffsetDelta(@NotNull final DataOutput out, } - private static void flushShorts(@NotNull final DataOutput out, - @NotNull final TShortArrayList shorts) throws IOException { + private static void flushShorts(@NotNull final DataOutput out, @NotNull final TShortArrayList shorts) + throws IOException { for (int offset = 0; offset < shorts.size();) { int byteCount = 0; - while (offset + byteCount < shorts.size() - && (shorts.getQuick(offset + byteCount) < Byte.MAX_VALUE + while (offset + byteCount < shorts.size() && (shorts.getQuick(offset + byteCount) < Byte.MAX_VALUE && shorts.getQuick(offset + byteCount) > Byte.MIN_VALUE)) { byteCount++; } @@ -97,10 +95,8 @@ private static void flushShorts(@NotNull final DataOutput out, int shortCount = byteCount; int consecutiveBytes = 0; while (shortCount + consecutiveBytes + offset < shorts.size()) { - final short shortValue = - shorts.getQuick(offset + shortCount + consecutiveBytes); - final boolean requiresShort = - (shortValue >= Byte.MAX_VALUE || shortValue <= Byte.MIN_VALUE); + final short shortValue = shorts.getQuick(offset + shortCount + consecutiveBytes); + final boolean requiresShort = (shortValue >= Byte.MAX_VALUE || shortValue <= Byte.MIN_VALUE); if (!requiresShort) { consecutiveBytes++; } else { @@ -115,7 +111,7 @@ private static void flushShorts(@NotNull final DataOutput out, } // if we have a small number of trailing bytes, tack them onto the end if (consecutiveBytes > 0 && consecutiveBytes <= 3 - && (offset + shortCount + consecutiveBytes == shorts.size())) { + && (offset + shortCount + consecutiveBytes == shorts.size())) { shortCount += consecutiveBytes; } if (shortCount >= 2) { @@ -132,8 +128,8 @@ private static void flushShorts(@NotNull final DataOutput out, shorts.resetQuick(); } - private static void writeValue(@NotNull final DataOutput out, final byte command, - final long value) throws IOException { + private static void writeValue(@NotNull final DataOutput out, final byte command, final long value) + throws IOException { if (value > Integer.MAX_VALUE || value < Integer.MIN_VALUE) { out.writeByte(command | LONG_VALUE); out.writeLong(value); @@ -149,8 +145,7 @@ private static void writeValue(@NotNull final DataOutput out, final byte command } } - public static Index readExternalCompressedDelta(@NotNull final DataInput in) - throws IOException { + public static Index readExternalCompressedDelta(@NotNull final DataInput in) throws IOException { long offset = 0; final Index.SequentialBuilder builder = Index.FACTORY.getSequentialBuilder(); @@ -210,8 +205,7 @@ public static Index readExternalCompressedDelta(@NotNull final DataInput in) return builder.getIndex(); } - private static long readValue(@NotNull final DataInput in, final int command) - throws IOException { + private static long readValue(@NotNull final DataInput in, final int command) throws IOException { final long value; switch (command & VALUE_MASK) { case LONG_VALUE: diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/FilteredTableMap.java b/DB/src/main/java/io/deephaven/db/v2/utils/FilteredTableMap.java index 00e377dde6e..bd493a7b4dd 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/FilteredTableMap.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/FilteredTableMap.java @@ -37,11 +37,8 @@ public FilteredTableMap(TableMap source, Predicate filter) { * @param filter the predicate for filtering the keys * @param keyTransformer a function that transforms the source's keys to our output's keys */ - public FilteredTableMap(TableMap source, Predicate filter, - Function keyTransformer) { - super(null, - (source instanceof LocalTableMap) - ? ((LocalTableMap) source).getConstituentDefinition().orElse(null) + public FilteredTableMap(TableMap source, Predicate filter, Function keyTransformer) { + super(null, (source instanceof LocalTableMap) ? ((LocalTableMap) source).getConstituentDefinition().orElse(null) : null); addParentReference(source); setDependency((NotificationQueue.Dependency) source); @@ -55,8 +52,7 @@ public FilteredTableMap(TableMap source, Predicate filter, final Object newKey = keyTransformer.apply(key); final Table oldTable = put(newKey, source.get(key)); if (oldTable != null) { - throw new IllegalStateException( - "Can not replace a table in a FilteredTableMap, new key=" + newKey + throw new IllegalStateException("Can not replace a table in a FilteredTableMap, new key=" + newKey + ", original key=" + key); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/FreezeBy.java b/DB/src/main/java/io/deephaven/db/v2/utils/FreezeBy.java index d4004d97910..9f2831d4cbd 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/FreezeBy.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/FreezeBy.java @@ -9,18 +9,16 @@ import java.util.Collection; /** - * FreezeBy records the first value for a given key in the output table and ignores subsequent - * changes. + * FreezeBy records the first value for a given key in the output table and ignores subsequent changes. * - * When keys are removed, the corresponding row is removed from the output table. When keys are - * added back on another cycle, the newly added value is frozen in the output. + * When keys are removed, the corresponding row is removed from the output table. When keys are added back on another + * cycle, the newly added value is frozen in the output. * - * Only one row per key is allowed in the input. This is because the operation can not determine - * which is the correct row to freeze in cases where there are multiple rows per key. The freeze - * operation is not sensitive to index changes (e.g., adds, removes, modifies, shifts); whether a - * row is updated is based solely on (1) a key did not exist in the input table at the start of the - * cycle and (2) it now exists in the input table. If the key did not exist, a frozen copy is taken. - * If the key did exist, then no modifications occur. + * Only one row per key is allowed in the input. This is because the operation can not determine which is the correct + * row to freeze in cases where there are multiple rows per key. The freeze operation is not sensitive to index changes + * (e.g., adds, removes, modifies, shifts); whether a row is updated is based solely on (1) a key did not exist in the + * input table at the start of the cycle and (2) it now exists in the input table. If the key did not exist, a frozen + * copy is taken. If the key did exist, then no modifications occur. */ public class FreezeBy { private FreezeBy() {} // static use only @@ -29,8 +27,7 @@ private FreezeBy() {} // static use only * Freeze the input table. * *

    - * The input table may only have zero or one rows. The first added row will be frozen until the - * table becomes empty. + * The input table may only have zero or one rows. The first added row will be frozen until the table becomes empty. *

    * * @param input the table to freeze @@ -44,8 +41,8 @@ public static Table freezeBy(Table input) { * Freeze the input table. * *

    - * When a key is added to the table, a copy is added to the output. When a key is removed, the - * row is removed from the output. The input may have only one row per key. + * When a key is added to the table, a copy is added to the output. When a key is removed, the row is removed from + * the output. The input may have only one row per key. *

    * * @param input the table to freeze @@ -61,8 +58,8 @@ public static Table freezeBy(Table input, String... groupByColumns) { * Freeze the input table. * *

    - * When a key is added to the table, a copy is added to the output. When a key is removed, the - * row is removed from the output. The input may have only one row per key. + * When a key is added to the table, a copy is added to the output. When a key is removed, the row is removed from + * the output. The input may have only one row per key. *

    * * @param input the table to freeze @@ -78,8 +75,8 @@ public static Table freezeBy(Table input, Collection groupByColumns) { * Freeze the input table. * *

    - * When a key is added to the table, a copy is added to the output. When a key is removed, the - * row is removed from the output. The input may have only one row per key. + * When a key is added to the table, a copy is added to the output. When a key is removed, the row is removed from + * the output. The input may have only one row per key. *

    * * @param input the table to freeze @@ -89,6 +86,6 @@ public static Table freezeBy(Table input, Collection groupByColumns) { */ public static Table freezeBy(Table input, SelectColumn... groupByColumns) { return ChunkedOperatorAggregationHelper.aggregation(new FreezeByAggregationFactory(), - (QueryTable) input.coalesce(), groupByColumns); + (QueryTable) input.coalesce(), groupByColumns); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/FunctionGeneratedTableFactory.java b/DB/src/main/java/io/deephaven/db/v2/utils/FunctionGeneratedTableFactory.java index bcf319ac560..d96992a6572 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/FunctionGeneratedTableFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/FunctionGeneratedTableFactory.java @@ -19,19 +19,17 @@ /** * An abstract table that represents the result of a function. * - * The table will refresh by regenerating the full values (using the tableGenerator Function passed - * in). The resultant table's values are copied into the result table and appropriate listener - * notifications are fired. + * The table will refresh by regenerating the full values (using the tableGenerator Function passed in). The resultant + * table's values are copied into the result table and appropriate listener notifications are fired. * - * All of the rows in the output table are modified on every tick, even if no actual changes - * occurred. The output table also has a contiguous index. + * All of the rows in the output table are modified on every tick, even if no actual changes occurred. The output table + * also has a contiguous index. * - * The generator function must produce a V2 table, and the table definition must not change between - * invocations. + * The generator function must produce a V2 table, and the table definition must not change between invocations. * - * If you are transforming a table, you should generally prefer to use the regular table operations - * as opposed to this factory, because they are capable of performing some operations incrementally. - * However, for small tables this might prove to require less development effort. + * If you are transforming a table, you should generally prefer to use the regular table operations as opposed to this + * factory, because they are capable of performing some operations incrementally. However, for small tables this might + * prove to require less development effort. */ public class FunctionGeneratedTableFactory { private final Function.Nullary

    tableGenerator; @@ -43,60 +41,52 @@ public class FunctionGeneratedTableFactory { Index index; /** - * Create a table that refreshes based on the value of your function, automatically called every - * refreshIntervalMs. + * Create a table that refreshes based on the value of your function, automatically called every refreshIntervalMs. * * @param tableGenerator a function returning a table to copy into the output table - * @return a ticking table (assuming sourceTables have been specified) generated by - * tableGenerator + * @return a ticking table (assuming sourceTables have been specified) generated by tableGenerator */ public static Table create(Function.Nullary
    tableGenerator, int refreshIntervalMs) { return new FunctionGeneratedTableFactory(tableGenerator, refreshIntervalMs).getTable(); } /** - * Create a table that refreshes based on the value of your function, automatically called when - * any of the sourceTables tick. + * Create a table that refreshes based on the value of your function, automatically called when any of the + * sourceTables tick. * * @param tableGenerator a function returning a table to copy into the output table - * @param sourceTables The query engine does not know the details of your function inputs. If - * you are dependent on a ticking table tables in your tableGenerator function, you can - * add it to this list so that the function will be recomputed on each tick. - * @return a ticking table (assuming sourceTables have been specified) generated by - * tableGenerator + * @param sourceTables The query engine does not know the details of your function inputs. If you are dependent on a + * ticking table tables in your tableGenerator function, you can add it to this list so that the function + * will be recomputed on each tick. + * @return a ticking table (assuming sourceTables have been specified) generated by tableGenerator */ - public static Table create(Function.Nullary
    tableGenerator, - DynamicTable... sourceTables) { - final FunctionGeneratedTableFactory factory = - new FunctionGeneratedTableFactory(tableGenerator, 0); + public static Table create(Function.Nullary
    tableGenerator, DynamicTable... sourceTables) { + final FunctionGeneratedTableFactory factory = new FunctionGeneratedTableFactory(tableGenerator, 0); final FunctionBackedTable result = factory.getTable(); for (DynamicTable source : sourceTables) { - source.listenForUpdates( - new BaseTable.ShiftAwareListenerImpl("FunctionGeneratedTable", source, result) { - @Override - public void onUpdate(final Update upstream) { - result.doRefresh(); - } - }); + source.listenForUpdates(new BaseTable.ShiftAwareListenerImpl("FunctionGeneratedTable", source, result) { + @Override + public void onUpdate(final Update upstream) { + result.doRefresh(); + } + }); } return result; } - private FunctionGeneratedTableFactory(final Function.Nullary
    tableGenerator, - final int refreshIntervalMs) { + private FunctionGeneratedTableFactory(final Function.Nullary
    tableGenerator, final int refreshIntervalMs) { this.tableGenerator = tableGenerator; this.refreshIntervalMs = refreshIntervalMs; nextRefresh = System.currentTimeMillis() + this.refreshIntervalMs; Table initialTable = tableGenerator.call(); - for (Map.Entry entry : initialTable.getColumnSourceMap() - .entrySet()) { + for (Map.Entry entry : initialTable.getColumnSourceMap().entrySet()) { ColumnSource columnSource = entry.getValue(); - final ArrayBackedColumnSource memoryColumnSource = ArrayBackedColumnSource - .getMemoryColumnSource(0, columnSource.getType(), columnSource.getComponentType()); + final ArrayBackedColumnSource memoryColumnSource = ArrayBackedColumnSource.getMemoryColumnSource(0, + columnSource.getType(), columnSource.getComponentType()); columns.put(entry.getKey(), memoryColumnSource); writableSources.put(entry.getKey(), memoryColumnSource); } @@ -183,8 +173,7 @@ protected void doRefresh() { if (size > 0) { // no size change, just modified final Index modified = index.clone(); - notifyListeners(Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), - modified); + notifyListeners(Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), modified); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/GroupedRedirectionIndex.java b/DB/src/main/java/io/deephaven/db/v2/utils/GroupedRedirectionIndex.java index b0b46001902..9e7406f3f91 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/GroupedRedirectionIndex.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/GroupedRedirectionIndex.java @@ -9,13 +9,12 @@ import java.util.Arrays; /** - * The GroupedRedirectionIndex is intended for situations where you have several Indices that - * represent contiguous rows of your output table and a flat output index. + * The GroupedRedirectionIndex is intended for situations where you have several Indices that represent contiguous rows + * of your output table and a flat output index. * - * When sorting a table by it's grouping column, instead of using a large contiguous - * RedirectionIndex, we simply store the indices for each group and the accumulated cardinality. We - * then binary search in the accumulated cardinality for a given key; and fetch the corresponding - * offset from that group's Index. + * When sorting a table by it's grouping column, instead of using a large contiguous RedirectionIndex, we simply store + * the indices for each group and the accumulated cardinality. We then binary search in the accumulated cardinality for + * a given key; and fetch the corresponding offset from that group's Index. * * This RedirectionIndex does not support mutation. */ @@ -25,8 +24,8 @@ public class GroupedRedirectionIndex implements RedirectionIndex { */ private final long size; /** - * The accumulated size of each group. Element 0 is the size of the first group. Element 1 is - * the size of the first and second group. The final element will equal size. + * The accumulated size of each group. Element 0 is the size of the first group. Element 1 is the size of the first + * and second group. The final element will equal size. */ private final long[] groupSizes; /** @@ -35,12 +34,11 @@ public class GroupedRedirectionIndex implements RedirectionIndex { private final Index[] groups; /** - * If you are doing repeated get calls, then we must redo the binary search from scratch each - * time. To avoid this behavior, we cache the last slot that you found, so that repeated calls - * to get() skip the binary search if the key is within the same group as your last call. + * If you are doing repeated get calls, then we must redo the binary search from scratch each time. To avoid this + * behavior, we cache the last slot that you found, so that repeated calls to get() skip the binary search if the + * key is within the same group as your last call. */ - private final ThreadLocal threadContext = - ThreadLocal.withInitial(SavedContext::new); + private final ThreadLocal threadContext = ThreadLocal.withInitial(SavedContext::new); public GroupedRedirectionIndex(long size, long[] groupSizes, Index[] groups) { this.size = size; @@ -101,18 +99,16 @@ private static class SavedContext { @Override public void fillChunk(@NotNull FillContext fillContext, - @NotNull WritableLongChunk mappedKeysOut, - @NotNull OrderedKeys keysToMap) { + @NotNull WritableLongChunk mappedKeysOut, @NotNull OrderedKeys keysToMap) { final MutableInt outputPosition = new MutableInt(0); final MutableInt lastSlot = new MutableInt(0); mappedKeysOut.setSize(keysToMap.intSize()); try (final ResettableWritableLongChunk resettableKeys = - ResettableWritableLongChunk.makeResettableChunk()) { + ResettableWritableLongChunk.makeResettableChunk()) { keysToMap.forAllLongRanges((begin, end) -> { while (begin <= end) { // figure out which group we belong to, based on the first key in the range - int slot = Arrays.binarySearch(groupSizes, lastSlot.intValue(), - groupSizes.length, begin); + int slot = Arrays.binarySearch(groupSizes, lastSlot.intValue(), groupSizes.length, begin); if (slot < 0) { slot = ~slot; } else { @@ -122,21 +118,17 @@ public void fillChunk(@NotNull FillContext fillContext, // for the next one we should not search the beginning of the array lastSlot.setValue(slot); - // for the first key, we have an offset of 0; for other keys we need to offset - // the key - final long beginKeyWithOffset = - slot == 0 ? begin : begin - groupSizes[slot - 1]; + // for the first key, we have an offset of 0; for other keys we need to offset the key + final long beginKeyWithOffset = slot == 0 ? begin : begin - groupSizes[slot - 1]; final long size = end - begin + 1; final int groupSize; - final WritableLongChunk chunkToFill = - resettableKeys.resetFromTypedChunk(mappedKeysOut, outputPosition.intValue(), - mappedKeysOut.size() - outputPosition.intValue()); - if (beginKeyWithOffset > 0 - || (beginKeyWithOffset + size < groups[slot].size())) { + final WritableLongChunk chunkToFill = resettableKeys.resetFromTypedChunk( + mappedKeysOut, outputPosition.intValue(), mappedKeysOut.size() - outputPosition.intValue()); + if (beginKeyWithOffset > 0 || (beginKeyWithOffset + size < groups[slot].size())) { try (OrderedKeys orderedKeysByPosition = - groups[slot].getOrderedKeysByPosition(beginKeyWithOffset, size)) { + groups[slot].getOrderedKeysByPosition(beginKeyWithOffset, size)) { orderedKeysByPosition.fillKeyIndicesChunk(chunkToFill); groupSize = orderedKeysByPosition.intSize(); } @@ -154,8 +146,7 @@ public void fillChunk(@NotNull FillContext fillContext, @Override public void fillPrevChunk(@NotNull FillContext fillContext, - @NotNull WritableLongChunk mappedKeysOut, - @NotNull OrderedKeys keysToMap) { + @NotNull WritableLongChunk mappedKeysOut, @NotNull OrderedKeys keysToMap) { fillChunk(fillContext, mappedKeysOut, keysToMap); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/HashSetBackedTableFactory.java b/DB/src/main/java/io/deephaven/db/v2/utils/HashSetBackedTableFactory.java index 9f8113c331f..c1f35cf63b5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/HashSetBackedTableFactory.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/HashSetBackedTableFactory.java @@ -29,12 +29,11 @@ import java.util.Map; /** - * An abstract table that represents a hashset of smart keys. Since we are representing a set, there - * we are not defining an order to our output. Whatever order the table happens to end up in, is - * fine. + * An abstract table that represents a hashset of smart keys. Since we are representing a set, there we are not defining + * an order to our output. Whatever order the table happens to end up in, is fine. * - * The table will refresh by regenerating the full hashset (using the setGenerator Function passed - * in); and then comparing that to the existing hash set. + * The table will refresh by regenerating the full hashset (using the setGenerator Function passed in); and then + * comparing that to the existing hash set. */ public class HashSetBackedTableFactory { private final Function.Nullary> setGenerator; @@ -50,8 +49,8 @@ public class HashSetBackedTableFactory { private final TLongArrayList freeSet = new TLongArrayList(); Index index; - private HashSetBackedTableFactory(Function.Nullary> setGenerator, - int refreshIntervalMs, String... colNames) { + private HashSetBackedTableFactory(Function.Nullary> setGenerator, int refreshIntervalMs, + String... colNames) { this.setGenerator = setGenerator; this.refreshIntervalMs = refreshIntervalMs; nextRefresh = System.currentTimeMillis() + this.refreshIntervalMs; @@ -66,18 +65,14 @@ private HashSetBackedTableFactory(Function.Nullary> setGenerat /** * Create a ticking table based on a setGenerator. * - * @param setGenerator a function that returns a HashSet of SmartKeys, each SmartKey is a row in - * the output. - * @param refreshIntervalMs how often to refresh the table, if less than or equal to 0 the table - * does not tick. - * @param colNames the column names for the output table, must match the number of elements in - * each SmartKey. + * @param setGenerator a function that returns a HashSet of SmartKeys, each SmartKey is a row in the output. + * @param refreshIntervalMs how often to refresh the table, if less than or equal to 0 the table does not tick. + * @param colNames the column names for the output table, must match the number of elements in each SmartKey. * @return a table representing the Set returned by the setGenerator */ - public static Table create(Function.Nullary> setGenerator, - int refreshIntervalMs, String... colNames) { - HashSetBackedTableFactory factory = - new HashSetBackedTableFactory(setGenerator, refreshIntervalMs, colNames); + public static Table create(Function.Nullary> setGenerator, int refreshIntervalMs, + String... colNames) { + HashSetBackedTableFactory factory = new HashSetBackedTableFactory(setGenerator, refreshIntervalMs, colNames); IndexBuilder addedBuilder = Index.FACTORY.getRandomBuilder(); IndexBuilder removedBuilder = Index.FACTORY.getRandomBuilder(); @@ -193,7 +188,7 @@ public void destroy() { } private class SmartKeyWrapperColumnSource extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForObject { + implements MutableColumnSourceGetDefaults.ForObject { private final int columnIndex; diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/ImplementedByTreeIndexImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/ImplementedByTreeIndexImpl.java index 9750b054611..de374d16efc 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/ImplementedByTreeIndexImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/ImplementedByTreeIndexImpl.java @@ -4,8 +4,8 @@ interface ImplementedByTreeIndexImpl { /** * DO NOT USE! * - * This method exists for use by internal index implementations when it is known that the Index - * type must own a {@link TreeIndexImpl}. + * This method exists for use by internal index implementations when it is known that the Index type must own a + * {@link TreeIndexImpl}. * * @return the backing TreeIndexImpl */ diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/Index.java b/DB/src/main/java/io/deephaven/db/v2/utils/Index.java index afebb8588e2..51e11cc97d4 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/Index.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/Index.java @@ -29,16 +29,15 @@ /** * A set of sorted long keys between 0 and Long.MAX_VALUE */ -public interface Index - extends ReadOnlyIndex, LogOutputAppendable, Iterable, LongSizedDataStructure { - boolean USE_PRIORITY_QUEUE_RANDOM_BUILDER = Configuration.getInstance() - .getBooleanWithDefault("Index.usePriorityQueueRandomBuilder", true); +public interface Index extends ReadOnlyIndex, LogOutputAppendable, Iterable, LongSizedDataStructure { + boolean USE_PRIORITY_QUEUE_RANDOM_BUILDER = + Configuration.getInstance().getBooleanWithDefault("Index.usePriorityQueueRandomBuilder", true); boolean VALIDATE_COALESCED_UPDATES = - Configuration.getInstance().getBooleanWithDefault("Index.validateCoalescedUpdates", true); + Configuration.getInstance().getBooleanWithDefault("Index.validateCoalescedUpdates", true); - boolean BAD_RANGES_AS_ERROR = Configuration.getInstance() - .getBooleanForClassWithDefault(Index.class, "badRangeAsError", true); + boolean BAD_RANGES_AS_ERROR = + Configuration.getInstance().getBooleanForClassWithDefault(Index.class, "badRangeAsError", true); /** * Add a single key to this index if it's not already present. @@ -56,8 +55,7 @@ public interface Index void insertRange(long startKey, long endKey); /** - * Add all of the (ordered) keys in a slice of {@code keys} to this index if they are not - * already present. + * Add all of the (ordered) keys in a slice of {@code keys} to this index if they are not already present. * * @param keys The {@link LongChunk} of {@link OrderedKeyIndices} to insert * @param offset The offset in {@code keys} to begin inserting keys from @@ -88,8 +86,7 @@ public interface Index void removeRange(long startKey, long endKey); /** - * Remove all of the (ordered) keys in a slice of {@code keys} from this index if they are - * present. + * Remove all of the (ordered) keys in a slice of {@code keys} from this index if they are present. * * @param keys The {@link LongChunk} of {@link OrderedKeyIndices} to remove * @param offset The offset in {@code keys} to begin removing keys from @@ -105,8 +102,8 @@ public interface Index void remove(ReadOnlyIndex removed); /** - * Simultaneously adds the keys from the first index and removes the keys from the second one. - * API assumption: the intersection of added and removed is empty. + * Simultaneously adds the keys from the first index and removes the keys from the second one. API assumption: the + * intersection of added and removed is empty. */ void update(ReadOnlyIndex added, ReadOnlyIndex removed); @@ -125,8 +122,7 @@ default Index extract(@NotNull Index other) { /** * Modifies the index by removing any keys not in the indexToIntersect argument. * - * @param indexToIntersect an index with the keys to retain; any other keys not in - * indexToIntersect will be removed. + * @param indexToIntersect an index with the keys to retain; any other keys not in indexToIntersect will be removed. */ void retain(ReadOnlyIndex indexToIntersect); @@ -143,8 +139,7 @@ default Index extract(@NotNull Index other) { void shiftInPlace(long shiftAmount); /** - * For each key in the provided index, shift it by shiftAmount and insert it in the current - * index. + * For each key in the provided index, shift it by shiftAmount and insert it in the current index. * * @param shiftAmount the amount to add to each key in the index argument before insertion. * @param other the index with the keys to shift and insert. @@ -159,8 +154,8 @@ default Index extract(@NotNull Index other) { /** * Initializes our previous value from the current value. * - * This call is used by operations that manipulate an Index while constructing it, but need to - * set the state at the end of the initial operation to the current state. + * This call is used by operations that manipulate an Index while constructing it, but need to set the state at the + * end of the initial operation to the current state. * * Calling this in other circumstances will yield undefined results. */ @@ -171,10 +166,10 @@ interface RandomBuilder extends IndexBuilder { interface SequentialBuilder extends TLongProcedure, LongRangeConsumer { /** - * No obligation to call, but if called, (a) should be called before providing any values, - * and (b) no value should be provided outside of the domain. Implementations may be able to - * use this information to improve memory utilization. Either of the arguments may be given - * as Index.NULL_KEY, indicating that the respective value is not known. + * No obligation to call, but if called, (a) should be called before providing any values, and (b) no value + * should be provided outside of the domain. Implementations may be able to use this information to improve + * memory utilization. Either of the arguments may be given as Index.NULL_KEY, indicating that the respective + * value is not known. * * @param minKey the minimum key to be provided, or Index.NULL_KEY if not known. * @param maxKey the maximum key to be provided, or Index.NULL_KEY if not known. @@ -273,8 +268,8 @@ interface Factory { Index getIndexByRange(long firstKey, long lastKey); /** - * Get a flat {@link Index} containing the range [0, size), or an {@link #getEmptyIndex() - * empty index} if the specified size is <= 0. + * Get a flat {@link Index} containing the range [0, size), or an {@link #getEmptyIndex() empty index} if the + * specified size is <= 0. * * @param size The size of the index to create * @return A flat index containing the keys [0, size) or an empty index if the size is <= 0 @@ -391,8 +386,8 @@ public LegacyIndexUpdateCoalescer() { } /** - * The class assumes ownership of one reference to the indices passed; the caller should - * ensure to Index.clone() them before passing them if they are shared. + * The class assumes ownership of one reference to the indices passed; the caller should ensure to Index.clone() + * them before passing them if they are shared. */ public LegacyIndexUpdateCoalescer(Index added, Index removed, Index modified) { this.added = added; @@ -400,43 +395,40 @@ public LegacyIndexUpdateCoalescer(Index added, Index removed, Index modified) { this.modified = modified; } - public void update(final Index addedOnUpdate, final Index removedOnUpdate, - final Index modifiedOnUpdate) { + public void update(final Index addedOnUpdate, final Index removedOnUpdate, final Index modifiedOnUpdate) { // Note: extract removes matching ranges from the source index try (final Index addedBack = this.removed.extract(addedOnUpdate); - final Index actuallyAdded = addedOnUpdate.minus(addedBack)) { + final Index actuallyAdded = addedOnUpdate.minus(addedBack)) { this.added.insert(actuallyAdded); this.modified.insert(addedBack); } - // Things we've added, but are now removing. Do not aggregate these as removed since - // client never saw them. + // Things we've added, but are now removing. Do not aggregate these as removed since client never saw them. try (final Index additionsRemoved = this.added.extract(removedOnUpdate); - final Index actuallyRemoved = removedOnUpdate.minus(additionsRemoved)) { + final Index actuallyRemoved = removedOnUpdate.minus(additionsRemoved)) { this.removed.insert(actuallyRemoved); } // If we've removed it, it should no longer be modified. this.modified.remove(removedOnUpdate); - // And anything modified, should be added to the modified set; unless we've previously - // added it. + // And anything modified, should be added to the modified set; unless we've previously added it. try (final Index actuallyModified = modifiedOnUpdate.minus(this.added)) { this.modified.insert(actuallyModified); } - if (VALIDATE_COALESCED_UPDATES && (this.added.overlaps(this.modified) - || this.added.overlaps(this.removed) || this.removed.overlaps(modified))) { + if (VALIDATE_COALESCED_UPDATES && (this.added.overlaps(this.modified) || this.added.overlaps(this.removed) + || this.removed.overlaps(modified))) { final String assertionMessage = "Coalesced overlaps detected: " + - "added=" + added.toString() + - ", removed=" + removed.toString() + - ", modified=" + modified.toString() + - ", addedOnUpdate=" + addedOnUpdate.toString() + - ", removedOnUpdate=" + removedOnUpdate.toString() + - ", modifiedOnUpdate=" + modifiedOnUpdate.toString() + - "addedIntersectRemoved=" + added.intersect(removed).toString() + - "addedIntersectModified=" + added.intersect(modified).toString() + - "removedIntersectModified=" + removed.intersect(modified).toString(); + "added=" + added.toString() + + ", removed=" + removed.toString() + + ", modified=" + modified.toString() + + ", addedOnUpdate=" + addedOnUpdate.toString() + + ", removedOnUpdate=" + removedOnUpdate.toString() + + ", modifiedOnUpdate=" + modifiedOnUpdate.toString() + + "addedIntersectRemoved=" + added.intersect(removed).toString() + + "addedIntersectModified=" + added.intersect(modified).toString() + + "removedIntersectModified=" + removed.intersect(modified).toString(); Assert.assertion(false, assertionMessage); } } @@ -473,8 +465,7 @@ class IndexUpdateCoalescer { public IndexShiftData shifted; public ModifiedColumnSet modifiedColumnSet; - // This is an index that represents which keys still exist in prevSpace for the agg update. - // It is necessary to + // This is an index that represents which keys still exist in prevSpace for the agg update. It is necessary to // keep to ensure we make the correct selections when shift destinations overlap. private final Index index; @@ -496,8 +487,7 @@ public IndexUpdateCoalescer(final Index index, final ShiftAwareListener.Update u } public ShiftAwareListener.Update coalesce() { - return new ShiftAwareListener.Update(added, removed, modified, shifted, - modifiedColumnSet); + return new ShiftAwareListener.Update(added, removed, modified, shifted, modifiedColumnSet); } public IndexUpdateCoalescer update(final ShiftAwareListener.Update update) { @@ -558,8 +548,7 @@ private void updateShifts(final IndexShiftData myShifts) { final Index.SearchIterator indexIter = index.searchIterator(); final IndexShiftData.Builder newShifts = new IndexShiftData.Builder(); - // Appends shifts to our builder from watermarkKey to supplied key adding extra delta if - // needed. + // Appends shifts to our builder from watermarkKey to supplied key adding extra delta if needed. final MutableInt outerIdx = new MutableInt(0); final MutableLong watermarkKey = new MutableLong(0); final BiConsumer fixShiftIfOverlap = (end, ttlDelta) -> { @@ -567,7 +556,7 @@ private void updateShifts(final IndexShiftData myShifts) { if (ttlDelta < 0) { final Index.SearchIterator revIter = index.reverseIterator(); if (revIter.advance(watermarkKey.longValue() - 1) - && revIter.currentValue() > newShifts.lastShiftEnd()) { + && revIter.currentValue() > newShifts.lastShiftEnd()) { minBegin = Math.max(minBegin, revIter.currentValue() + 1 - ttlDelta); } } @@ -579,8 +568,7 @@ private void updateShifts(final IndexShiftData myShifts) { // this means the previous shift overlaps this shift; let's figure out who wins final long contestBegin = watermarkKey.longValue(); final boolean currentValid = indexIter.advance(contestBegin); - if (currentValid && indexIter.currentValue() < minBegin - && indexIter.currentValue() <= end) { + if (currentValid && indexIter.currentValue() < minBegin && indexIter.currentValue() <= end) { newShifts.limitPreviousShiftFor(indexIter.currentValue(), ttlDelta); watermarkKey.setValue(indexIter.currentValue()); } else { @@ -589,26 +577,23 @@ private void updateShifts(final IndexShiftData myShifts) { }; final BiConsumer consumeUntilWithExtraDelta = (endRange, extraDelta) -> { - while (outerIdx.intValue() < shifted.size() - && watermarkKey.longValue() <= endRange) { - final long outerBegin = Math.max(watermarkKey.longValue(), - shifted.getBeginRange(outerIdx.intValue())); + while (outerIdx.intValue() < shifted.size() && watermarkKey.longValue() <= endRange) { + final long outerBegin = + Math.max(watermarkKey.longValue(), shifted.getBeginRange(outerIdx.intValue())); final long outerEnd = shifted.getEndRange(outerIdx.intValue()); final long outerDelta = shifted.getShiftDelta(outerIdx.intValue()); // Shift before the outer shift. - final long headerEnd = - Math.min(endRange, outerBegin - 1 + (outerDelta < 0 ? outerDelta : 0)); + final long headerEnd = Math.min(endRange, outerBegin - 1 + (outerDelta < 0 ? outerDelta : 0)); if (watermarkKey.longValue() <= headerEnd && extraDelta != 0) { fixShiftIfOverlap.accept(headerEnd, extraDelta); newShifts.shiftRange(watermarkKey.longValue(), headerEnd, extraDelta); } - final long maxWatermark = endRange == Long.MAX_VALUE ? outerBegin - : Math.min(endRange + 1, outerBegin); + final long maxWatermark = + endRange == Long.MAX_VALUE ? outerBegin : Math.min(endRange + 1, outerBegin); watermarkKey.setValue(Math.max(watermarkKey.longValue(), maxWatermark)); - // Does endRange occur before this outerIdx shift? If so pop-out we need to - // change extraDelta. + // Does endRange occur before this outerIdx shift? If so pop-out we need to change extraDelta. if (watermarkKey.longValue() > endRange) { return; } @@ -626,8 +611,7 @@ private void updateShifts(final IndexShiftData myShifts) { } } - if (outerIdx.intValue() == shifted.size() && watermarkKey.longValue() <= endRange - && extraDelta != 0) { + if (outerIdx.intValue() == shifted.size() && watermarkKey.longValue() <= endRange && extraDelta != 0) { fixShiftIfOverlap.accept(endRange, extraDelta); newShifts.shiftRange(watermarkKey.longValue(), endRange, extraDelta); } @@ -663,30 +647,25 @@ public ShiftInversionHelper(final IndexShiftData shifted) { private void advanceDestShiftIdx(long destKey) { Assert.geq(destKey, "destKey", 0); destShiftIdx = (int) binarySearch(destShiftIdx, shifted.size(), innerShiftIdx -> { - long destEnd = shifted.getEndRange((int) innerShiftIdx) - + shifted.getShiftDelta((int) innerShiftIdx); + long destEnd = shifted.getEndRange((int) innerShiftIdx) + shifted.getShiftDelta((int) innerShiftIdx); // due to destKey's expected range, we know this subtraction will not overflow return destEnd - destKey; }); } - // Converts post-keyspace key to pre-keyspace key. It expects to be invoked in ascending key - // order. + // Converts post-keyspace key to pre-keyspace key. It expects to be invoked in ascending key order. public long mapToPrevKeyspace(long key, boolean isEnd) { advanceDestShiftIdx(key); final long retval; final int idx = destShiftIdx; - if (idx < shifted.size() - && shifted.getBeginRange(idx) + shifted.getShiftDelta(idx) <= key) { + if (idx < shifted.size() && shifted.getBeginRange(idx) + shifted.getShiftDelta(idx) <= key) { // inside of a destination shift; this is easy to map to prev retval = key - shifted.getShiftDelta(idx); - } else if (idx < shifted.size() && shifted.getShiftDelta(idx) > 0 - && shifted.getBeginRange(idx) <= key) { + } else if (idx < shifted.size() && shifted.getShiftDelta(idx) > 0 && shifted.getBeginRange(idx) <= key) { // our key is left of the destination but to right of the shift start retval = shifted.getBeginRange(idx) - (isEnd ? 1 : 0); - } else if (idx > 0 && shifted.getShiftDelta(idx - 1) < 0 - && key <= shifted.getEndRange(idx - 1)) { + } else if (idx > 0 && shifted.getShiftDelta(idx - 1) < 0 && key <= shifted.getEndRange(idx - 1)) { // our key is right of the destination but left of the shift start retval = shifted.getEndRange(idx - 1) + (isEnd ? 0 : 1); } else { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/IndexCounts.java b/DB/src/main/java/io/deephaven/db/v2/utils/IndexCounts.java index 927d30a2d28..e6756609079 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/IndexCounts.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/IndexCounts.java @@ -53,74 +53,74 @@ public final class IndexCounts { public IndexCounts(final String prefix) { emptyCount = - new IntCounterMetric(prefix + "EmptyCount"); + new IntCounterMetric(prefix + "EmptyCount"); shortShortSingleRangeCount = - new IntCounterMetric(prefix + "ShortShortSingleRangeCount"); + new IntCounterMetric(prefix + "ShortShortSingleRangeCount"); longLongSingleRangeCount = - new IntCounterMetric(prefix + "LongLongSingleRangeCount"); + new IntCounterMetric(prefix + "LongLongSingleRangeCount"); intLongSingleRangeCount = - new IntCounterMetric(prefix + "IntLongSingleRangeCount"); + new IntCounterMetric(prefix + "IntLongSingleRangeCount"); longIntSingleRangeCount = - new IntCounterMetric(prefix + "LongIntSingleRangeCount"); + new IntCounterMetric(prefix + "LongIntSingleRangeCount"); sortedRangesCount = - new IntCounterMetric(prefix + "SortedRangesCount"); + new IntCounterMetric(prefix + "SortedRangesCount"); sortedRangesIndexCardinality = - new LongCounterLog2HistogramMetric(prefix + "SortedRangesIndexCardinality"); + new LongCounterLog2HistogramMetric(prefix + "SortedRangesIndexCardinality"); sortedRangesIndexBytesUnused = - new LongCounterLog2HistogramMetric(prefix + "SortedRangesIndexBytesUnused"); + new LongCounterLog2HistogramMetric(prefix + "SortedRangesIndexBytesUnused"); sortedRangesIndexBytesAllocated = - new LongCounterLog2HistogramMetric(prefix + "SortedRangesIndexBytesAllocated"); + new LongCounterLog2HistogramMetric(prefix + "SortedRangesIndexBytesAllocated"); rspCount = - new IntCounterMetric(prefix + "RspCount"); + new IntCounterMetric(prefix + "RspCount"); rspIndexCardinality = - new LongCounterLog2HistogramMetric(prefix + "RspIndexCardinality"); + new LongCounterLog2HistogramMetric(prefix + "RspIndexCardinality"); rspParallelArraysSizeUsed = - new LongCounterLog2HistogramMetric(prefix + "RspParallelArraysSizeUsed"); + new LongCounterLog2HistogramMetric(prefix + "RspParallelArraysSizeUsed"); rspParallelArraysSizeUnused = - new LongCounterLog2HistogramMetric(prefix + "RspParallelArraysSizeUnused"); + new LongCounterLog2HistogramMetric(prefix + "RspParallelArraysSizeUnused"); rspArrayContainersBytesUnused = - new LongCounterLog2HistogramMetric(prefix + "RspArrayContainersBytesUnused"); + new LongCounterLog2HistogramMetric(prefix + "RspArrayContainersBytesUnused"); rspArrayContainersBytesAllocated = - new LongCounterLog2HistogramMetric(prefix + "RspArrayContainersBytesAllocated"); + new LongCounterLog2HistogramMetric(prefix + "RspArrayContainersBytesAllocated"); rspArrayContainersCardinality = - new LongCounterLog2HistogramMetric(prefix + "RspArrayContainersCardinality"); + new LongCounterLog2HistogramMetric(prefix + "RspArrayContainersCardinality"); rspArrayContainersCount = - new LongCounterMetric(prefix + "RspArrayContainersCount"); + new LongCounterMetric(prefix + "RspArrayContainersCount"); rspBitmapContainersBytesUnused = - new LongCounterLog2HistogramMetric(prefix + "RspBitmapContainersBytesUnused"); + new LongCounterLog2HistogramMetric(prefix + "RspBitmapContainersBytesUnused"); rspBitmapContainersBytesAllocated = - new LongCounterLog2HistogramMetric(prefix + "RspBitmapContainersBytesAllocated"); + new LongCounterLog2HistogramMetric(prefix + "RspBitmapContainersBytesAllocated"); rspBitmapContainersCardinality = - new LongCounterLog2HistogramMetric(prefix + "RspBitmapContainersCardinality"); + new LongCounterLog2HistogramMetric(prefix + "RspBitmapContainersCardinality"); rspBitmapContainersCount = - new LongCounterMetric(prefix + "RspBitmapContainersCount"); + new LongCounterMetric(prefix + "RspBitmapContainersCount"); rspRunContainersBytesUnused = - new LongCounterLog2HistogramMetric(prefix + "RspRunContainersBytesUnused"); + new LongCounterLog2HistogramMetric(prefix + "RspRunContainersBytesUnused"); rspRunContainersBytesAllocated = - new LongCounterLog2HistogramMetric(prefix + "RspRunContainersBytesAllocated"); + new LongCounterLog2HistogramMetric(prefix + "RspRunContainersBytesAllocated"); rspRunContainersCardinality = - new LongCounterLog2HistogramMetric(prefix + "RspRunContainersCardinality"); + new LongCounterLog2HistogramMetric(prefix + "RspRunContainersCardinality"); rspRunContainersCount = - new LongCounterMetric(prefix + "RspRunContainersCount"); + new LongCounterMetric(prefix + "RspRunContainersCount"); rspRunContainersRunsCount = - new LongCounterLog2HistogramMetric(prefix + "RspRunContainersRunCount"); + new LongCounterLog2HistogramMetric(prefix + "RspRunContainersRunCount"); rspSingleRangeContainersCount = - new LongCounterMetric(prefix + "RspSingleRangeContainersCount"); + new LongCounterMetric(prefix + "RspSingleRangeContainersCount"); rspSingleRangeContainerCardinality = - new LongCounterLog2HistogramMetric(prefix + "RspSingleRangeContainerCardinality"); + new LongCounterLog2HistogramMetric(prefix + "RspSingleRangeContainerCardinality"); rspTwoValuesContainerCount = - new LongCounterMetric(prefix + "RspTwoValuesContainerCount"); + new LongCounterMetric(prefix + "RspTwoValuesContainerCount"); rspSingletonContainersCount = - new LongCounterMetric(prefix + "RspSingletonContainersCount"); + new LongCounterMetric(prefix + "RspSingletonContainersCount"); } public void sampleRsp(final RspBitmap rb) { @@ -130,25 +130,25 @@ public void sampleRsp(final RspBitmap rb) { rspIndexCardinality.sample(rb.getCardinality()); rspCount.sample(1); rb.sampleMetrics( - rspParallelArraysSizeUsed, - rspParallelArraysSizeUnused, - rspArrayContainersBytesAllocated, - rspArrayContainersBytesUnused, - rspArrayContainersCardinality, - rspArrayContainersCount, - rspBitmapContainersBytesAllocated, - rspBitmapContainersBytesUnused, - rspBitmapContainersCardinality, - rspBitmapContainersCount, - rspRunContainersBytesAllocated, - rspRunContainersBytesUnused, - rspRunContainersCardinality, - rspRunContainersCount, - rspRunContainersRunsCount, - rspSingleRangeContainersCount, - rspSingleRangeContainerCardinality, - rspSingletonContainersCount, - rspTwoValuesContainerCount); + rspParallelArraysSizeUsed, + rspParallelArraysSizeUnused, + rspArrayContainersBytesAllocated, + rspArrayContainersBytesUnused, + rspArrayContainersCardinality, + rspArrayContainersCount, + rspBitmapContainersBytesAllocated, + rspBitmapContainersBytesUnused, + rspBitmapContainersCardinality, + rspBitmapContainersCount, + rspRunContainersBytesAllocated, + rspRunContainersBytesUnused, + rspRunContainersCardinality, + rspRunContainersCount, + rspRunContainersRunsCount, + rspSingleRangeContainersCount, + rspSingleRangeContainerCardinality, + rspSingletonContainersCount, + rspTwoValuesContainerCount); } public void sampleSingleRange(final SingleRange sr) { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/IndexPeformanceTest.java b/DB/src/main/java/io/deephaven/db/v2/utils/IndexPeformanceTest.java index d41a7712d73..fcfc2df9b8c 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/IndexPeformanceTest.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/IndexPeformanceTest.java @@ -17,8 +17,8 @@ public static void main(String[] args) { for (Index.Iterator it = index.iterator(); it.hasNext();) { sum += it.nextLong(); } - System.out.println("Range iteration per item time = " - + (System.currentTimeMillis() - start) / (SIZE / 1000000) + "ns " + sum); + System.out.println("Range iteration per item time = " + (System.currentTimeMillis() - start) / (SIZE / 1000000) + + "ns " + sum); Random random = new Random(0); Index.RandomBuilder indexBuilder = Index.FACTORY.getRandomBuilder(); long runningValue = 0;// Math.abs(random.nextLong()); @@ -40,16 +40,16 @@ public static void main(String[] args) { indexBuilder.addKey(runningValue); } } - System.out.println("Random construction per item time = " - + (System.currentTimeMillis() - start) / (SIZE / 1000000) + - "ns " + sum + " " + runningValue); + System.out.println( + "Random construction per item time = " + (System.currentTimeMillis() - start) / (SIZE / 1000000) + + "ns " + sum + " " + runningValue); index = indexBuilder.getIndex(); sum = 0; start = System.currentTimeMillis(); for (Index.Iterator it = index.iterator(); it.hasNext();) { sum += it.nextLong(); } - System.out.println("Random iteration per item time = " - + (System.currentTimeMillis() - start) / (SIZE / 1000000) + "ns " + sum); + System.out.println("Random iteration per item time = " + (System.currentTimeMillis() - start) / (SIZE / 1000000) + + "ns " + sum); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/IndexShiftData.java b/DB/src/main/java/io/deephaven/db/v2/utils/IndexShiftData.java index dcfaf5db54b..bea310bfa29 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/IndexShiftData.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/IndexShiftData.java @@ -19,11 +19,10 @@ import java.io.Serializable; /** - * A set of sorted shifts. To apply shifts without losing data, use - * {@link IndexShiftData#apply(Callback)}. The callback will be invoked with shifts in an order that - * will preserve data when applied immediately using memmove semantics. Internally the shifts are - * ordered by rangeStart. The {@link IndexShiftData.Builder} will verify that no two ranges overlap - * before or after shifting and assert that the constructed {@code IndexShiftData} will be valid. + * A set of sorted shifts. To apply shifts without losing data, use {@link IndexShiftData#apply(Callback)}. The callback + * will be invoked with shifts in an order that will preserve data when applied immediately using memmove semantics. + * Internally the shifts are ordered by rangeStart. The {@link IndexShiftData.Builder} will verify that no two ranges + * overlap before or after shifting and assert that the constructed {@code IndexShiftData} will be valid. */ public final class IndexShiftData implements Serializable, LogOutputAppendable { @@ -33,14 +32,14 @@ public final class IndexShiftData implements Serializable, LogOutputAppendable { private static final int NUM_ATTR = 3; /** - * {@code payload} is organized into triplets of (rangeStart, rangeEnd, shiftDelta). Triplets - * are ordered by rangeStart. This is not the order that will apply shifts without losing data. + * {@code payload} is organized into triplets of (rangeStart, rangeEnd, shiftDelta). Triplets are ordered by + * rangeStart. This is not the order that will apply shifts without losing data. */ private final TLongList payload; /** - * {@code polaritySwapIndices} are indices into {@code payload} where the previous and current - * range's {@code shiftDelta} swap between positive and negative shifts. + * {@code polaritySwapIndices} are indices into {@code payload} where the previous and current range's + * {@code shiftDelta} swap between positive and negative shifts. */ private final TIntList polaritySwapIndices; @@ -132,8 +131,7 @@ public final long getShiftDelta(int idx) { public final void validate() { int polarOffset = 0; for (int idx = 0; idx < size(); ++idx) { - Assert.leq(getBeginRange(idx), "getBeginRange(idx)", getEndRange(idx), - "getEndRange(idx)"); + Assert.leq(getBeginRange(idx), "getBeginRange(idx)", getEndRange(idx), "getEndRange(idx)"); Assert.neqZero(getShiftDelta(idx), "getShiftDelta(idx)"); if (idx == 0) { @@ -142,7 +140,7 @@ public final void validate() { // Check no overlap in original key space. Assert.lt(getEndRange(idx - 1), "getEndRange(idx - 1)", - getBeginRange(idx), "getBeginRange(idx)"); + getBeginRange(idx), "getBeginRange(idx)"); // Check no overlap in new key space. final long newPrevEnd = getEndRange(idx - 1) + getShiftDelta(idx - 1); @@ -153,10 +151,8 @@ public final void validate() { final int prevShiftSign = getShiftDelta(idx - 1) < 0 ? -1 : 1; final int currShiftSign = getShiftDelta(idx) < 0 ? -1 : 1; if (prevShiftSign != currShiftSign) { - Assert.gt(polaritySwapIndices.size(), "polaritySwapIndices.size()", polarOffset, - "polarOffset"); - Assert.eq(polaritySwapIndices.get(polarOffset), - "polaritySwapIndices.get(polarOffset)", idx, "idx"); + Assert.gt(polaritySwapIndices.size(), "polaritySwapIndices.size()", polarOffset, "polarOffset"); + Assert.eq(polaritySwapIndices.get(polarOffset), "polaritySwapIndices.get(polarOffset)", idx, "idx"); ++polarOffset; } } @@ -197,9 +193,9 @@ public LogOutput append(final LogOutput logOutput, final int maxShifts) { for (int idx = 0; idx < size(); ++idx) { final long shift = getShiftDelta(idx); logOutput.append(isFirst ? "" : ",") - .append("[").append(getBeginRange(idx)) - .append(",").append(getEndRange(idx)) - .append(shift < 0 ? "]" : "]+").append(shift); + .append("[").append(getBeginRange(idx)) + .append(",").append(getEndRange(idx)) + .append(shift < 0 ? "]" : "]+").append(shift); isFirst = false; if (++count >= maxShifts) { logOutput.append(",..."); @@ -216,8 +212,7 @@ public boolean equals(final Object obj) { return false; } final IndexShiftData shiftData = (IndexShiftData) obj; - // Note that comparing payload is sufficient. The polarity indices are precomputed from the - // payload. + // Note that comparing payload is sufficient. The polarity indices are precomputed from the payload. return shiftData.payload.equals(payload); } @@ -239,8 +234,7 @@ public interface Callback { } /** - * Apply all shifts in a memmove-semantics-safe ordering through the provided - * {@code shiftCallback}. + * Apply all shifts in a memmove-semantics-safe ordering through the provided {@code shiftCallback}. * * Use this to move from pre-shift keyspace to post-shift keyspace. * @@ -263,8 +257,7 @@ public void apply(final Callback shiftCallback) { } /** - * Apply all shifts in reverse in a memmove-semantics-safe ordering through the provided - * {@code shiftCallback}. + * Apply all shifts in reverse in a memmove-semantics-safe ordering through the provided {@code shiftCallback}. * * Use this to move from post-shift keyspace to pre-shift keyspace. * @@ -288,8 +281,7 @@ public void unapply(final Callback shiftCallback) { } /** - * Apply all shifts to the provided index. Moves index from pre-shift keyspace to post-shift - * keyspace. + * Apply all shifts to the provided index. Moves index from pre-shift keyspace to post-shift keyspace. * * @param index the index to shift */ @@ -314,15 +306,14 @@ public void apply(final Index index) { } try (final Index remove = toRemove.getIndex(); - final Index insert = toInsert.getIndex()) { + final Index insert = toInsert.getIndex()) { index.remove(remove); index.insert(insert); } } /** - * Apply a shift to the provided index. Moves index from pre-shift keyspace to post-shift - * keyspace. + * Apply a shift to the provided index. Moves index from pre-shift keyspace to post-shift keyspace. * * @param index The index to apply the shift to * @param beginRange start of range (inclusive) @@ -330,8 +321,8 @@ public void apply(final Index index) { * @param shiftDelta amount range has moved by * @return Whether there was any overlap found to shift */ - public static boolean applyShift(@NotNull final Index index, final long beginRange, - final long endRange, final long shiftDelta) { + public static boolean applyShift(@NotNull final Index index, final long beginRange, final long endRange, + final long shiftDelta) { try (final Index toShift = index.subindexByKey(beginRange, endRange)) { if (toShift.empty()) { return false; @@ -344,8 +335,7 @@ public static boolean applyShift(@NotNull final Index index, final long beginRan } /** - * Unapply all shifts to the provided index. Moves index from post-shift keyspace to pre-shift - * keyspace. + * Unapply all shifts to the provided index. Moves index from post-shift keyspace to pre-shift keyspace. * * @param index the index to shift */ @@ -370,31 +360,28 @@ public void unapply(final Index index) { } try (final Index remove = toRemove.getIndex(); - final Index insert = toInsert.getIndex()) { + final Index insert = toInsert.getIndex()) { index.remove(remove); index.insert(insert); } } /** - * Unapply all shifts to the provided index. Moves index from post-shift keyspace to pre-shift - * keyspace. + * Unapply all shifts to the provided index. Moves index from post-shift keyspace to pre-shift keyspace. * * @param index the index to shift - * @param offset an additional offset to apply to all shifts (such as when applying to a wrapped - * table) + * @param offset an additional offset to apply to all shifts (such as when applying to a wrapped table) */ public void unapply(final Index index, final long offset) { - // NB: This is an unapply callback, and beginRange, endRange, and shiftDelta have been - // adjusted so that this is a reversed shift, + // NB: This is an unapply callback, and beginRange, endRange, and shiftDelta have been adjusted so that this is + // a reversed shift, // hence we use the applyShift helper. - unapply((beginRange, endRange, shiftDelta) -> applyShift(index, beginRange + offset, - endRange + offset, shiftDelta)); + unapply((beginRange, endRange, shiftDelta) -> applyShift(index, beginRange + offset, endRange + offset, + shiftDelta)); } /** - * Unapply a shift to the provided index. Moves index from post-shift keyspace to pre-shift - * keyspace. + * Unapply a shift to the provided index. Moves index from post-shift keyspace to pre-shift keyspace. * * @param index The index to apply the shift to * @param beginRange start of range (inclusive) @@ -402,10 +389,9 @@ public void unapply(final Index index, final long offset) { * @param shiftDelta amount range has moved by * @return Whether there was any overlap found to shift */ - public static boolean unapplyShift(@NotNull final Index index, final long beginRange, - final long endRange, final long shiftDelta) { - try (final Index toShift = - index.subindexByKey(beginRange + shiftDelta, endRange + shiftDelta)) { + public static boolean unapplyShift(@NotNull final Index index, final long beginRange, final long endRange, + final long shiftDelta) { + try (final Index toShift = index.subindexByKey(beginRange + shiftDelta, endRange + shiftDelta)) { if (toShift.empty()) { return false; } @@ -427,8 +413,7 @@ public interface SingleElementShiftCallback { void shift(long key, long shiftDelta); } - public void forAllInIndex(final ReadOnlyIndex filterIndex, - final SingleElementShiftCallback callback) { + public void forAllInIndex(final ReadOnlyIndex filterIndex, final SingleElementShiftCallback callback) { boolean hasReverseShift = false; ReadOnlyIndex.SearchIterator it = filterIndex.reverseIterator(); FORWARD_SHIFT: for (int ii = size() - 1; ii >= 0; --ii) { @@ -605,8 +590,8 @@ public IndexShiftData intersect(final Index index) { } /** - * Helper utility to build instances of {@link IndexShiftData} with internally consistent data. - * No other ranges should be added to this builder after {@link Builder#build} is invoked. + * Helper utility to build instances of {@link IndexShiftData} with internally consistent data. No other ranges + * should be added to this builder after {@link Builder#build} is invoked. */ public static class Builder { private IndexShiftData shiftData; @@ -647,7 +632,7 @@ public void shiftRange(final long beginRange, final long endRange, final long sh // Coalesce when possible. if (prevIdx >= 0 && shiftData.getShiftDelta(prevIdx) == shiftDelta - && shiftData.getEndRange(prevIdx) + 1 == beginRange) { + && shiftData.getEndRange(prevIdx) + 1 == beginRange) { shiftData.payload.set(prevIdx * NUM_ATTR + END_RANGE_ATTR, endRange); return; } @@ -660,26 +645,20 @@ public void shiftRange(final long beginRange, final long endRange, final long sh return; } - // If previous shift has different sign than shiftDelta, we must add current index to - // split run into chunks + // If previous shift has different sign than shiftDelta, we must add current index to split run into chunks if ((shiftData.getShiftDelta(prevIdx) < 0 ? -1 : 1) * shiftDelta < 0) { - shiftData.polaritySwapIndices.add(shiftData.size() - 1); // note the -1 excludes the - // new range + shiftData.polaritySwapIndices.add(shiftData.size() - 1); // note the -1 excludes the new range } if (beginRange <= shiftData.getEndRange(prevIdx)) { throw new IllegalArgumentException("new range [" + beginRange + "," + endRange - + "]->" + shiftDelta + " overlaps previous [" + shiftData.getBeginRange(prevIdx) - + "," - + shiftData.getEndRange(prevIdx) + "]->" + shiftData.getShiftDelta(prevIdx)); + + "]->" + shiftDelta + " overlaps previous [" + shiftData.getBeginRange(prevIdx) + "," + + shiftData.getEndRange(prevIdx) + "]->" + shiftData.getShiftDelta(prevIdx)); } - if (beginRange + shiftDelta <= shiftData.getEndRange(prevIdx) - + shiftData.getShiftDelta(prevIdx)) { - throw new IllegalArgumentException("new resulting range [" + beginRange + "," - + endRange - + "]->" + shiftDelta + " overlaps previous [" + shiftData.getBeginRange(prevIdx) - + "," - + shiftData.getEndRange(prevIdx) + "]->" + shiftData.getShiftDelta(prevIdx)); + if (beginRange + shiftDelta <= shiftData.getEndRange(prevIdx) + shiftData.getShiftDelta(prevIdx)) { + throw new IllegalArgumentException("new resulting range [" + beginRange + "," + endRange + + "]->" + shiftDelta + " overlaps previous [" + shiftData.getBeginRange(prevIdx) + "," + + shiftData.getEndRange(prevIdx) + "]->" + shiftData.getShiftDelta(prevIdx)); } } @@ -689,7 +668,7 @@ public long getMinimumValidBeginForNextDelta(long nextShiftDelta) { } final int idx = shiftData.size() - 1; return Math.max(shiftData.getEndRange(idx) + 1, - shiftData.getEndRange(idx) + shiftData.getShiftDelta(idx) - nextShiftDelta + 1); + shiftData.getEndRange(idx) + shiftData.getShiftDelta(idx) - nextShiftDelta + 1); } /** @@ -712,19 +691,17 @@ public IndexShiftData build() { } /** - * Use this method to append shifts that propagate from a parent table to a subset of a - * dependent table. The canonical use-case is merge, where tables are shifted in key-space - * so that they do not overlap each other. If one of these merged tables has a shift, then - * it must propagate these shifts to the merged table in the appropriately shifted key - * space. + * Use this method to append shifts that propagate from a parent table to a subset of a dependent table. The + * canonical use-case is merge, where tables are shifted in key-space so that they do not overlap each other. If + * one of these merged tables has a shift, then it must propagate these shifts to the merged table in the + * appropriately shifted key space. * - * This method also supports shifting the entire range in addition to propagating upstream - * shifts. For example, if a table needs more keyspace, then any tables slotted to the right - * (in a greater keyspace) will need to shift out of the way to free up the keyspace for the - * table. + * This method also supports shifting the entire range in addition to propagating upstream shifts. For example, + * if a table needs more keyspace, then any tables slotted to the right (in a greater keyspace) will need to + * shift out of the way to free up the keyspace for the table. * - * This method assumes that 1) the upstream shift data is valid and 2) shifts can be - * truncated when they extend beyond the table's known range. + * This method assumes that 1) the upstream shift data is valid and 2) shifts can be truncated when they extend + * beyond the table's known range. * * @param innerShiftData the upstream shifts oriented in upstream keyspace [0, innerRange) * @param prevOffset the previous offset where this sub-table began @@ -733,42 +710,35 @@ public IndexShiftData build() { * @param currCardinality the cardinality of the keyspace currently allocated to this table */ public void appendShiftData(final IndexShiftData innerShiftData, final long prevOffset, - final long prevCardinality, final long currOffset, final long currCardinality) { + final long prevCardinality, final long currOffset, final long currCardinality) { long watermarkKey = 0; // id space of source table - // These bounds seem weird. We are going to insert a shift for the keyspace prior to the - // shift with - // index sidx. Thus, the first and last sidx are to cover shifting via - // `indexSpaceInserted` on the - // outside of shifts. Note that we use the knowledge/contract that shift data is ordered - // by key. + // These bounds seem weird. We are going to insert a shift for the keyspace prior to the shift with + // index sidx. Thus, the first and last sidx are to cover shifting via `indexSpaceInserted` on the + // outside of shifts. Note that we use the knowledge/contract that shift data is ordered by key. for (int sidx = 0; sidx < innerShiftData.size() + 1; ++sidx) { final long nextShiftEnd; final long nextShiftStart; final long nextShiftDelta; if (sidx < innerShiftData.size()) { nextShiftDelta = innerShiftData.getShiftDelta(sidx); - // Shifts to indices less than zero are meaningless and might cause our builder - // to complain. - nextShiftStart = Math.max(innerShiftData.getBeginRange(sidx), - nextShiftDelta < 0 ? -nextShiftDelta : 0); - // Shifts beyond the cardinality are meaningless (assumptions) but might destroy - // neighboring table data. - nextShiftEnd = Math.min( - Math.min(prevCardinality - 1, currCardinality - 1 - nextShiftDelta), - innerShiftData.getEndRange(sidx)); + // Shifts to indices less than zero are meaningless and might cause our builder to complain. + nextShiftStart = + Math.max(innerShiftData.getBeginRange(sidx), nextShiftDelta < 0 ? -nextShiftDelta : 0); + // Shifts beyond the cardinality are meaningless (assumptions) but might destroy neighboring table + // data. + nextShiftEnd = Math.min(Math.min(prevCardinality - 1, currCardinality - 1 - nextShiftDelta), + innerShiftData.getEndRange(sidx)); } else { nextShiftEnd = nextShiftStart = prevCardinality; nextShiftDelta = 0; } - // insert range prior to here; note shift ends are inclusive so we need the -1 for - // endRange + // insert range prior to here; note shift ends are inclusive so we need the -1 for endRange final long innerEnd = Math.min(prevCardinality - 1, nextShiftStart - 1) - + (nextShiftDelta < 0 ? nextShiftDelta : 0); + + (nextShiftDelta < 0 ? nextShiftDelta : 0); - shiftRange(watermarkKey + prevOffset, innerEnd + prevOffset, - currOffset - prevOffset); + shiftRange(watermarkKey + prevOffset, innerEnd + prevOffset, currOffset - prevOffset); if (sidx >= innerShiftData.size() || nextShiftStart > prevCardinality) { break; @@ -776,15 +746,14 @@ public void appendShiftData(final IndexShiftData innerShiftData, final long prev // insert this range shiftRange(nextShiftStart + prevOffset, nextShiftEnd + prevOffset, - currOffset - prevOffset + nextShiftDelta); + currOffset - prevOffset + nextShiftDelta); watermarkKey = nextShiftEnd + 1 + (nextShiftDelta > 0 ? nextShiftDelta : 0); } } /** - * This method adjusts the previous shift so that the upcoming shift will not be considered - * overlapping. This is useful if the previous shift included empty space for efficiency, - * but would intersect with our new shift. + * This method adjusts the previous shift so that the upcoming shift will not be considered overlapping. This is + * useful if the previous shift included empty space for efficiency, but would intersect with our new shift. * * @param nextShiftBegin The first real-key that needs to shift in the upcoming shift. * @param nextShiftDelta The delta that applies to the upcoming shift. @@ -796,17 +765,16 @@ public void limitPreviousShiftFor(long nextShiftBegin, long nextShiftDelta) { shiftData.payload.set(prevIdx * NUM_ATTR + END_RANGE_ATTR, nextShiftBegin - 1); } if (nextShiftBegin + nextShiftDelta <= shiftData.getEndRange(prevIdx) - + shiftData.getShiftDelta(prevIdx)) { + + shiftData.getShiftDelta(prevIdx)) { shiftData.payload.set(prevIdx * NUM_ATTR + END_RANGE_ATTR, - nextShiftBegin + nextShiftDelta - shiftData.getShiftDelta(prevIdx) - 1); + nextShiftBegin + nextShiftDelta - shiftData.getShiftDelta(prevIdx) - 1); } if (shiftData.getEndRange(prevIdx) < shiftData.getBeginRange(prevIdx)) { // remove shift completely: shiftData.payload.remove(shiftData.payload.size() - 3, 3); final int numSwaps = shiftData.polaritySwapIndices.size(); - if (numSwaps > 0 - && shiftData.polaritySwapIndices.get(numSwaps - 1) >= shiftData.size()) { + if (numSwaps > 0 && shiftData.polaritySwapIndices.get(numSwaps - 1) >= shiftData.size()) { shiftData.polaritySwapIndices.removeAt(numSwaps - 1); } } else { @@ -817,19 +785,17 @@ public void limitPreviousShiftFor(long nextShiftBegin, long nextShiftDelta) { } /** - * Helper utility to build instances of {@link IndexShiftData} with internally consistent data. - * No other ranges should be added to this builder after {@link Builder#build} is invoked. + * Helper utility to build instances of {@link IndexShiftData} with internally consistent data. No other ranges + * should be added to this builder after {@link Builder#build} is invoked. *

    - * Differs from {@link Builder} in that it coalesces ranges with the same delta if they have no - * intervening keys in the pre-shift keys of the input, e.g. a - * {@link io.deephaven.db.tables.Table}. + * Differs from {@link Builder} in that it coalesces ranges with the same delta if they have no intervening keys in + * the pre-shift keys of the input, e.g. a {@link io.deephaven.db.tables.Table}. *

    *

    - * The data should be presented to the builder in shift iterator order, meaning the first - * contiguous run with a given polarity is presented to the builder, then the next run is - * presented with the opposite polarity. When the polarity is reversed (i.e., the delta is - * positive); the ranges must be presented in reverse (descending) order within the run. When - * the polarity is not reversed (i.e., the delta is negative); the ranges must be presented in + * The data should be presented to the builder in shift iterator order, meaning the first contiguous run with a + * given polarity is presented to the builder, then the next run is presented with the opposite polarity. When the + * polarity is reversed (i.e., the delta is positive); the ranges must be presented in reverse (descending) order + * within the run. When the polarity is not reversed (i.e., the delta is negative); the ranges must be presented in * ascending order. *

    */ @@ -839,14 +805,13 @@ public static final class SmartCoalescingBuilder implements SafeCloseable { */ private ReadOnlyIndex preShiftKeys; /** - * A forward iterator, which is used for all shifts that do not have reversed polarity (i.e. - * negative delta). We create this on the first negative delta shift and reuse it until we - * are closed. + * A forward iterator, which is used for all shifts that do not have reversed polarity (i.e. negative delta). We + * create this on the first negative delta shift and reuse it until we are closed. */ private ReadOnlyIndex.SearchIterator preShiftKeysIteratorForward; /** - * For each run of shifts that have reversed polarity (positive delta), we create a new - * reverse iterator. We reuse this until we find a negative delta shift and then close it. + * For each run of shifts that have reversed polarity (positive delta), we create a new reverse iterator. We + * reuse this until we find a negative delta shift and then close it. */ private ReadOnlyIndex.SearchIterator preShiftKeysIteratorReverse; /** @@ -855,8 +820,7 @@ public static final class SmartCoalescingBuilder implements SafeCloseable { private IndexShiftData shiftData; /** - * The index of the first range that needs to be reversed. -1 if there is no range to - * reverse at the moment. + * The index of the first range that needs to be reversed. -1 if there is no range to reverse at the moment. */ private int rangeToReverseStart = -1; @@ -874,10 +838,9 @@ public static final class SmartCoalescingBuilder implements SafeCloseable { */ private long nextReverseKey; /** - * The next key after our last shift range. We record this value so that if two subsequent - * shifts have the same delta, but do not include the intervening key we do not permit - * coalescing. If there is no intervening key, we permit coalescing. ReadOnlyIndex.NULL_KEY - * indicates there is no intervening key of interest. + * The next key after our last shift range. We record this value so that if two subsequent shifts have the same + * delta, but do not include the intervening key we do not permit coalescing. If there is no intervening key, we + * permit coalescing. ReadOnlyIndex.NULL_KEY indicates there is no intervening key of interest. */ private long interveningKey = ReadOnlyIndex.NULL_KEY; @@ -887,8 +850,8 @@ public static final class SmartCoalescingBuilder implements SafeCloseable { private long lastReverseIteratorStart = ReadOnlyIndex.NULL_KEY; /** - * Make a builder that tries to coalesce non-adjacent ranges with the same delta if there - * are no intervening keys in the pre-shift ordered keys. + * Make a builder that tries to coalesce non-adjacent ranges with the same delta if there are no intervening + * keys in the pre-shift ordered keys. * * @param preShiftKeys The pre-shift ordered keys for the space being shifted. */ @@ -919,7 +882,7 @@ public void shiftRange(final long beginRange, final long endRange, final long sh final boolean polarityReversed = shiftDelta > 0; final boolean polarityChanged = lastPolarityReversed != polarityReversed; final boolean reinitializeReverseIterator = - polarityReversed && (polarityChanged || beginRange > lastReverseIteratorStart); + polarityReversed && (polarityChanged || beginRange > lastReverseIteratorStart); if (polarityChanged || reinitializeReverseIterator) { interveningKey = ReadOnlyIndex.NULL_KEY; if (lastPolarityReversed) { @@ -928,8 +891,7 @@ public void shiftRange(final long beginRange, final long endRange, final long sh preShiftKeysIteratorReverse.close(); preShiftKeysIteratorReverse = null; } - // we take care of creating the iterator below for the case where the polarity - // is not reversed + // we take care of creating the iterator below for the case where the polarity is not reversed // (but only once, as the iterator is usable for this entire builder) } } @@ -965,45 +927,36 @@ public void shiftRange(final long beginRange, final long endRange, final long sh if (beginRange == 0 || !preShiftKeysIteratorReverse.advance(beginRange - 1)) { nextInterveningKey = nextReverseKey = ReadOnlyIndex.NULL_KEY; } else { - nextInterveningKey = - nextReverseKey = preShiftKeysIteratorReverse.currentValue(); + nextInterveningKey = nextReverseKey = preShiftKeysIteratorReverse.currentValue(); } } else { if (nextForwardKey == ReadOnlyIndex.NULL_KEY || nextForwardKey > endRange) { return; } - if (endRange == Long.MAX_VALUE - || !preShiftKeysIteratorForward.advance(endRange + 1)) { + if (endRange == Long.MAX_VALUE || !preShiftKeysIteratorForward.advance(endRange + 1)) { nextInterveningKey = nextForwardKey = ReadOnlyIndex.NULL_KEY; } else { - nextInterveningKey = - nextForwardKey = preShiftKeysIteratorForward.currentValue(); + nextInterveningKey = nextForwardKey = preShiftKeysIteratorForward.currentValue(); } } final int currentRangeIndex = shiftData.size() - 1; // Coalesce when possible. - if (currentRangeIndex >= 0 - && shiftData.getShiftDelta(currentRangeIndex) == shiftDelta) { - // if we had an intervening key between the last end (or begin) and the current - // begin (or end); then + if (currentRangeIndex >= 0 && shiftData.getShiftDelta(currentRangeIndex) == shiftDelta) { + // if we had an intervening key between the last end (or begin) and the current begin (or end); then // these two ranges can not be coalesced if (polarityReversed) { if (interveningKey == ReadOnlyIndex.NULL_KEY || interveningKey <= endRange) { - // we must merge these ranges; this is not as simple as the forward case, - // because if we had the - // same reverse iterator as last time (i.e. the polarity was applied - // "correctly"), we should - // simply be able to update the beginning of the range. However, if the - // existing range is - // before this range; it means we are in a new segment of shifts; and must - // merge ourselves + // we must merge these ranges; this is not as simple as the forward case, because if we had the + // same reverse iterator as last time (i.e. the polarity was applied "correctly"), we should + // simply be able to update the beginning of the range. However, if the existing range is + // before this range; it means we are in a new segment of shifts; and must merge ourselves // to the existing shift by extending the end final long existingBegin = shiftData.getBeginRange(currentRangeIndex); final long existingEnd = shiftData.getEndRange(currentRangeIndex); if (existingBegin < beginRange) { - // if there was an intervening key between our beginRange and the - // existing end, we can not merge + // if there was an intervening key between our beginRange and the existing end, we can not + // merge if (nextInterveningKey <= existingEnd) { shiftData.payload.set(currentRangeIndex * 3 + 1, endRange); interveningKey = nextInterveningKey; @@ -1034,48 +987,43 @@ public void shiftRange(final long beginRange, final long endRange, final long sh return; } - // If previous shift has different sign than shiftDelta, we must add current index to - // split run into chunks. - final boolean polaritySwap = - (shiftData.getShiftDelta(currentRangeIndex) < 0 ? -1 : 1) * shiftDelta < 0; + // If previous shift has different sign than shiftDelta, we must add current index to split run into chunks. + final boolean polaritySwap = (shiftData.getShiftDelta(currentRangeIndex) < 0 ? -1 : 1) * shiftDelta < 0; if (polaritySwap) { - shiftData.polaritySwapIndices.add(shiftData.size() - 1); // NB: The -1 excludes the - // new range. + shiftData.polaritySwapIndices.add(shiftData.size() - 1); // NB: The -1 excludes the new range. } if (!polarityReversed) { if (beginRange <= shiftData.getEndRange(currentRangeIndex)) { throw new IllegalArgumentException("new range [" + beginRange + "," + endRange - + "]->" + shiftDelta + " overlaps previous [" - + shiftData.getBeginRange(currentRangeIndex) + "," - + shiftData.getEndRange(currentRangeIndex) + "]->" - + shiftData.getShiftDelta(currentRangeIndex)); + + "]->" + shiftDelta + " overlaps previous [" + shiftData.getBeginRange(currentRangeIndex) + + "," + + shiftData.getEndRange(currentRangeIndex) + "]->" + + shiftData.getShiftDelta(currentRangeIndex)); } if (beginRange + shiftDelta <= shiftData.getEndRange(currentRangeIndex) - + shiftData.getShiftDelta(currentRangeIndex)) { - throw new IllegalArgumentException( - "new resulting range [" + beginRange + "," + endRange - + "]->" + shiftDelta + " overlaps previous [" - + shiftData.getBeginRange(currentRangeIndex) + "," + + shiftData.getShiftDelta(currentRangeIndex)) { + throw new IllegalArgumentException("new resulting range [" + beginRange + "," + endRange + + "]->" + shiftDelta + " overlaps previous [" + shiftData.getBeginRange(currentRangeIndex) + + "," + shiftData.getEndRange(currentRangeIndex) + "]->" + shiftData.getShiftDelta(currentRangeIndex)); } } else if (!reinitializeReverseIterator) { - // we are in the midst of a sequence of reversed polarity things, so we should be - // less than the previous shift + // we are in the midst of a sequence of reversed polarity things, so we should be less than the previous + // shift if (beginRange >= shiftData.getEndRange(currentRangeIndex)) { throw new IllegalArgumentException("new range [" + beginRange + "," + endRange - + "]->" + shiftDelta + " overlaps previous [" - + shiftData.getBeginRange(currentRangeIndex) + "," - + shiftData.getEndRange(currentRangeIndex) + "]->" - + shiftData.getShiftDelta(currentRangeIndex)); + + "]->" + shiftDelta + " overlaps previous [" + shiftData.getBeginRange(currentRangeIndex) + + "," + + shiftData.getEndRange(currentRangeIndex) + "]->" + + shiftData.getShiftDelta(currentRangeIndex)); } if (beginRange + shiftDelta >= shiftData.getEndRange(currentRangeIndex) - + shiftData.getShiftDelta(currentRangeIndex)) { - throw new IllegalArgumentException( - "new resulting range [" + beginRange + "," + endRange - + "]->" + shiftDelta + " overlaps previous [" - + shiftData.getBeginRange(currentRangeIndex) + "," + + shiftData.getShiftDelta(currentRangeIndex)) { + throw new IllegalArgumentException("new resulting range [" + beginRange + "," + endRange + + "]->" + shiftDelta + " overlaps previous [" + shiftData.getBeginRange(currentRangeIndex) + + "," + shiftData.getEndRange(currentRangeIndex) + "]->" + shiftData.getShiftDelta(currentRangeIndex)); } @@ -1083,8 +1031,8 @@ public void shiftRange(final long beginRange, final long endRange, final long sh } /** - * When the polarity is reversed, we build the run backwards; and we flip it around when - * transitioning to the next run (or when the final build is called). + * When the polarity is reversed, we build the run backwards; and we flip it around when transitioning to the + * next run (or when the final build is called). */ private void maybeReverseLastRun() { if (rangeToReverseStart >= 0) { @@ -1111,8 +1059,8 @@ private void maybeReverseLastRun() { } /** - * Make final modifications to the {@link IndexShiftData} and return it. Invoke - * {@link #close()} to minimize the lifetime of the pre-shift {@link OrderedKeys.Iterator}. + * Make final modifications to the {@link IndexShiftData} and return it. Invoke {@link #close()} to minimize the + * lifetime of the pre-shift {@link OrderedKeys.Iterator}. * * @return The built IndexShiftData */ @@ -1149,20 +1097,19 @@ public void close() { } /** - * This method creates two parallel Index structures that contain postShiftIndex keys affected - * by shifts. The two Indexes have the same size. An element at position k in the first index is - * the pre-shift key for the same row whose post-shift key is at position k in the second index. + * This method creates two parallel Index structures that contain postShiftIndex keys affected by shifts. The two + * Indexes have the same size. An element at position k in the first index is the pre-shift key for the same row + * whose post-shift key is at position k in the second index. * - * @param postShiftIndex The index of keys that were shifted in post-shift keyspace. It should - * not contain rows that did not exist prior to the shift. - * @return A SafeCloseablePair of preShiftedKeys and postShiftedKeys that intersect this - * IndexShiftData with postShiftIndex. + * @param postShiftIndex The index of keys that were shifted in post-shift keyspace. It should not contain rows that + * did not exist prior to the shift. + * @return A SafeCloseablePair of preShiftedKeys and postShiftedKeys that intersect this IndexShiftData with + * postShiftIndex. */ public SafeCloseablePair extractParallelShiftedRowsFromPostShiftIndex( - final ReadOnlyIndex postShiftIndex) { + final ReadOnlyIndex postShiftIndex) { if (empty()) { - return SafeCloseablePair.of(Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex()); + return SafeCloseablePair.of(Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex()); } final Index.SequentialBuilder preShiftBuilder = Index.FACTORY.getSequentialBuilder(); @@ -1186,9 +1133,8 @@ public SafeCloseablePair extractParallelShiftedRowsFromPostShiftIn } final SafeCloseablePair retVal = - SafeCloseablePair.of(preShiftBuilder.getIndex(), postShiftBuilder.getIndex()); - Assert.eq(retVal.first.size(), "retVal.first.size()", retVal.second.size(), - "retVal.second.size()"); + SafeCloseablePair.of(preShiftBuilder.getIndex(), postShiftBuilder.getIndex()); + Assert.eq(retVal.first.size(), "retVal.first.size()", retVal.second.size(), "retVal.second.size()"); return retVal; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/IndexShiftDataExpander.java b/DB/src/main/java/io/deephaven/db/v2/utils/IndexShiftDataExpander.java index 9f325fd6475..88b0607d207 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/IndexShiftDataExpander.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/IndexShiftDataExpander.java @@ -16,8 +16,8 @@ import java.util.function.BiConsumer; /** - * Expands {@link ShiftAwareListener#onUpdate(ShiftAwareListener.Update)}'s Update into a backward - * compatible ARM (added, removed, modified) by expanding keyspace shifts. + * Expands {@link ShiftAwareListener#onUpdate(ShiftAwareListener.Update)}'s Update into a backward compatible ARM + * (added, removed, modified) by expanding keyspace shifts. * * Using this is almost always less efficient than using the Update directly. */ @@ -46,18 +46,13 @@ public IndexShiftDataExpander(final ShiftAwareListener.Update update, final Inde this.update.removed = prevIndex.minus(sourceIndex); } - // Conceptually we can group modifies into two: a) modifies that were not part of any - // shift, and b) modifies - // that are now at a shift destination. Group A is in upstream's modified set already. - // Group B indices - // either existed last cycle or it did not. If it existed last cycle, then it should - // remain in the modified set. - // If it did not exist last cycle then it is accounted for in `this.update.added`. The - // is one more group of - // modified rows. These are rows that existed in both previous and current indexes but - // were shifted. - // Thus we need to add mods for shifted rows and remove any rows that are added (by old - // definition). + // Conceptually we can group modifies into two: a) modifies that were not part of any shift, and b) modifies + // that are now at a shift destination. Group A is in upstream's modified set already. Group B indices + // either existed last cycle or it did not. If it existed last cycle, then it should remain in the modified + // set. + // If it did not exist last cycle then it is accounted for in `this.update.added`. The is one more group of + // modified rows. These are rows that existed in both previous and current indexes but were shifted. + // Thus we need to add mods for shifted rows and remove any rows that are added (by old definition). this.update.modified = update.modified.clone(); // Expand shift destinations to paint rows that might need to be considered modified. @@ -74,14 +69,13 @@ public IndexShiftDataExpander(final ShiftAwareListener.Update update, final Inde // consider all rows that are in a shift region as modified (if they still exist) try (final Index addedByShift = addedByShiftB.getIndex(); - final Index rmByShift = removedByShiftB.getIndex()) { + final Index rmByShift = removedByShiftB.getIndex()) { addedByShift.insert(rmByShift); addedByShift.retain(sourceIndex); this.update.modified.insert(addedByShift); } - // remove all rows we define as added (i.e. modified rows that were actually shifted - // into a new index) + // remove all rows we define as added (i.e. modified rows that were actually shifted into a new index) try (final Index absoluteModified = update.removed.intersect(update.added)) { this.update.modified.insert(absoluteModified); } @@ -128,10 +122,9 @@ public void close() { /** * Immutable, re-usable {@link IndexShiftDataExpander} for an empty set of changes. */ - public static IndexShiftDataExpander EMPTY = - new IndexShiftDataExpander(new ShiftAwareListener.Update( - Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, + public static IndexShiftDataExpander EMPTY = new IndexShiftDataExpander(new ShiftAwareListener.Update( + Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), + IndexShiftData.EMPTY, ModifiedColumnSet.ALL), Index.FACTORY.getEmptyIndex()); /** @@ -153,32 +146,28 @@ public void validate(final Index sourceIndex) { final boolean currentMissingModifications = !update.modified.subsetOf(sourceIndex); if (!previousContainsAdds && !previousMissingRemovals && !previousMissingModifications && - !currentMissingAdds && !currentContainsRemovals && !currentMissingModifications) { + !currentMissingAdds && !currentContainsRemovals && !currentMissingModifications) { return; } - // Excuse the sloppiness in Index closing after this point, we're planning to crash the - // process anyway... + // Excuse the sloppiness in Index closing after this point, we're planning to crash the process anyway... String serializedIndices = null; if (BaseTable.PRINT_SERIALIZED_UPDATE_OVERLAPS) { - // The indices are really rather complicated, if we fail this check let's generate a - // serialized representation - // of them that can later be loaded into a debugger. If this fails, we'll ignore it and - // continue with our + // The indices are really rather complicated, if we fail this check let's generate a serialized + // representation + // of them that can later be loaded into a debugger. If this fails, we'll ignore it and continue with our // regularly scheduled exception. try { final StringBuilder outputBuffer = new StringBuilder(); final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); - final ObjectOutputStream objectOutputStream = - new ObjectOutputStream(byteArrayOutputStream); + final ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream); final BiConsumer append = (name, obj) -> { try { objectOutputStream.writeObject(obj); outputBuffer.append(name); - outputBuffer - .append(Base64.byteArrayToBase64(byteArrayOutputStream.toByteArray())); + outputBuffer.append(Base64.byteArrayToBase64(byteArrayOutputStream.toByteArray())); byteArrayOutputStream.reset(); objectOutputStream.reset(); } catch (final Exception ignored) { @@ -196,8 +185,7 @@ public void validate(final Index sourceIndex) { } } - // If we're still here, we know that things are off the rails, and we want to fire the - // assertion + // If we're still here, we know that things are off the rails, and we want to fire the assertion final Index addedIntersectPrevious = update.added.intersect(sourceIndex.getPrevIndex()); final Index removalsMinusPrevious = update.removed.minus(sourceIndex.getPrevIndex()); final Index modifiedMinusPrevious = update.modified.minus(sourceIndex.getPrevIndex()); @@ -205,41 +193,31 @@ public void validate(final Index sourceIndex) { final Index removedIntersectCurrent = update.removed.intersect(sourceIndex); final Index modifiedMinusCurrent = update.modified.minus(sourceIndex); - // Everything is messed up for this table, print out the indices in an easy to understand - // way - final String indexUpdateErrorMessage = new LogOutputStringImpl() - .append("Index update error detected: ") - .append(LogOutput::nl).append("\t previousIndex=") - .append(sourceIndex.getPrevIndex()) - .append(LogOutput::nl).append("\t currentIndex=").append(sourceIndex) - .append(LogOutput::nl).append("\t updateToExpand=").append(update) - .append(LogOutput::nl).append("\t added=").append(update.added) - .append(LogOutput::nl).append("\t removed=").append(update.removed) - .append(LogOutput::nl).append("\t modified=").append(update.modified) - .append(LogOutput::nl).append("\t shifted.size()=") - .append(update.shifted.size()) - .append(LogOutput::nl).append("\t addedIntersectPrevious=") - .append(addedIntersectPrevious) - .append(LogOutput::nl).append("\t removalsMinusPrevious=") - .append(removalsMinusPrevious) - .append(LogOutput::nl).append("\t modifiedMinusPrevious=") - .append(modifiedMinusPrevious) - .append(LogOutput::nl).append("\t addedMinusCurrent=").append(addedMinusCurrent) - .append(LogOutput::nl).append("\tremovedIntersectCurrent=") - .append(removedIntersectCurrent) - .append(LogOutput::nl).append("\t modifiedMinusCurrent=").append(modifiedMinusCurrent) - .toString(); + // Everything is messed up for this table, print out the indices in an easy to understand way + final String indexUpdateErrorMessage = new LogOutputStringImpl().append("Index update error detected: ") + .append(LogOutput::nl).append("\t previousIndex=").append(sourceIndex.getPrevIndex()) + .append(LogOutput::nl).append("\t currentIndex=").append(sourceIndex) + .append(LogOutput::nl).append("\t updateToExpand=").append(update) + .append(LogOutput::nl).append("\t added=").append(update.added) + .append(LogOutput::nl).append("\t removed=").append(update.removed) + .append(LogOutput::nl).append("\t modified=").append(update.modified) + .append(LogOutput::nl).append("\t shifted.size()=").append(update.shifted.size()) + .append(LogOutput::nl).append("\t addedIntersectPrevious=").append(addedIntersectPrevious) + .append(LogOutput::nl).append("\t removalsMinusPrevious=").append(removalsMinusPrevious) + .append(LogOutput::nl).append("\t modifiedMinusPrevious=").append(modifiedMinusPrevious) + .append(LogOutput::nl).append("\t addedMinusCurrent=").append(addedMinusCurrent) + .append(LogOutput::nl).append("\tremovedIntersectCurrent=").append(removedIntersectCurrent) + .append(LogOutput::nl).append("\t modifiedMinusCurrent=").append(modifiedMinusCurrent).toString(); final Logger log = ProcessEnvironment.getDefaultLog(); log.error().append(indexUpdateErrorMessage).endl(); if (serializedIndices != null) { - log.error().append("Index update error detected: serialized data=") - .append(serializedIndices).endl(); + log.error().append("Index update error detected: serialized data=").append(serializedIndices).endl(); } Assert.assertion(false, "!(previousContainsAdds || previousMissingRemovals || " + - "previousMissingModifications || currentMissingAdds || currentContainsRemovals || " + - "currentMissingModifications)", indexUpdateErrorMessage); + "previousMissingModifications || currentMissingAdds || currentContainsRemovals || " + + "currentMissingModifications)", indexUpdateErrorMessage); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/IndexUtilities.java b/DB/src/main/java/io/deephaven/db/v2/utils/IndexUtilities.java index 2933bed590f..ab0160b0b96 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/IndexUtilities.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/IndexUtilities.java @@ -16,9 +16,8 @@ static String toString(Index index, int maxNodes) { try (final Index.RangeIterator it = index.rangeIterator()) { while (it.hasNext()) { it.next(); - result.append(isFirst ? "" : ",").append(it.currentRangeStart()).append( - it.currentRangeEnd() != it.currentRangeStart() ? "-" + it.currentRangeEnd() - : ""); + result.append(isFirst ? "" : ",").append(it.currentRangeStart()) + .append(it.currentRangeEnd() != it.currentRangeStart() ? "-" + it.currentRangeEnd() : ""); isFirst = false; if (count++ > maxNodes) { result.append("..."); @@ -31,7 +30,7 @@ static String toString(Index index, int maxNodes) { } public static void fillKeyIndicesChunk(final ReadOnlyIndex index, - final WritableLongChunk chunkToFill) { + final WritableLongChunk chunkToFill) { chunkToFill.setSize(0); // so that we can actually add from the beginning. index.forEachLong((final long v) -> { chunkToFill.add(v); @@ -40,7 +39,7 @@ public static void fillKeyIndicesChunk(final ReadOnlyIndex index, } public static void fillKeyRangesChunk(final ReadOnlyIndex index, - final WritableLongChunk chunkToFill) { + final WritableLongChunk chunkToFill) { chunkToFill.setSize(0); index.forAllLongRanges((final long start, final long end) -> { chunkToFill.add(start); @@ -109,8 +108,7 @@ static boolean equalsDeepImpl(final ReadOnlyIndex index, final ReadOnlyIndex oth while (it1.hasNext() && it2.hasNext()) { it1.next(); it2.next(); - if (it1.currentRangeStart() != it2.currentRangeStart() - || it1.currentRangeEnd() != it2.currentRangeEnd()) { + if (it1.currentRangeStart() != it2.currentRangeStart() || it1.currentRangeEnd() != it2.currentRangeEnd()) { return false; } } @@ -122,14 +120,13 @@ static boolean equals(final ReadOnlyIndex index, final Object other) { return false; } final Index otherIndex = (Index) other; - return index.size() == otherIndex.size() - && IndexUtilities.equalsDeepImpl(index, otherIndex); + return index.size() == otherIndex.size() && IndexUtilities.equalsDeepImpl(index, otherIndex); } public interface Comparator { /** - * Compare the underlying target to the provided value. Return -1, 0, or 1 if target is less - * than, equal, or greater than the provided value, respectively. + * Compare the underlying target to the provided value. Return -1, 0, or 1 if target is less than, equal, or + * greater than the provided value, respectively. * * @param value * @return -1 if target < value; 0 if value == target; +1 if value < target. @@ -138,16 +135,14 @@ public interface Comparator { } /** - * Look for the biggest value of i that satisfies begin <= i <= end and - * comp.directionToTargetFrom(i) > 0, or some value that satisfies comp.directionToTargetFrom(i) - * == 0. + * Look for the biggest value of i that satisfies begin <= i <= end and comp.directionToTargetFrom(i) > 0, or some + * value that satisfies comp.directionToTargetFrom(i) == 0. * * @param begin The beginning of the range (inclusive) * @param end The end of the range (inclusive) * @param comp a Comparator. - * @return the last position i inside the provided range that satisfies - * comp.directionToTargetFrom(i) > 0, or some position that satisfies - * comp.directionToTargetFrom(i) == 0. + * @return the last position i inside the provided range that satisfies comp.directionToTargetFrom(i) > 0, or some + * position that satisfies comp.directionToTargetFrom(i) == 0. */ public static long rangeSearch(final long begin, final long end, final Comparator comp) { long i = begin; @@ -181,25 +176,22 @@ public static long rangeSearch(final long begin, final long end, final Comparato } /** - * This is equivalent to `sourceIndex.invert(destIndex).forAllLongRanges(lrc)`, but requires - * O(1) space. Note that coalescing adjacent position-space runs enables callers to make minimal - * System.arraycopy calls. + * This is equivalent to `sourceIndex.invert(destIndex).forAllLongRanges(lrc)`, but requires O(1) space. Note that + * coalescing adjacent position-space runs enables callers to make minimal System.arraycopy calls. * - * @param sourceIndex index to find the destIndex keys in - ranges in the callback will be on - * this index + * @param sourceIndex index to find the destIndex keys in - ranges in the callback will be on this index * @param destIndex index values to look for within sourceIndex * @param lrc consumer to handle each inverted range that is encountered */ public static void forAllInvertedLongRanges(final Index sourceIndex, final Index destIndex, - final LongRangeConsumer lrc) { + final LongRangeConsumer lrc) { final MutableBoolean hasPending = new MutableBoolean(); final MutableLong pendingStart = new MutableLong(Index.NULL_KEY); final MutableLong pendingEnd = new MutableLong(Index.NULL_KEY); final OrderedKeys.Iterator sourceProbe = sourceIndex.getOrderedKeysIterator(); final MutableLong sourceOffset = new MutableLong(); destIndex.forAllLongRanges((start, end) -> { - final long sourceStart = - sourceOffset.getValue() + sourceProbe.advanceAndGetPositionDistance(start); + final long sourceStart = sourceOffset.getValue() + sourceProbe.advanceAndGetPositionDistance(start); final long sourceEnd = sourceStart + sourceProbe.advanceAndGetPositionDistance(end); if (!hasPending.booleanValue()) { pendingStart.setValue(sourceStart); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/KeyedArrayBackedMutableTable.java b/DB/src/main/java/io/deephaven/db/v2/utils/KeyedArrayBackedMutableTable.java index 0573178bd58..120f1306ce0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/KeyedArrayBackedMutableTable.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/KeyedArrayBackedMutableTable.java @@ -31,8 +31,8 @@ public class KeyedArrayBackedMutableTable extends BaseArrayBackedMutableTable { private final Set keyColumnSet; protected final ObjectArraySource[] arrayValueSources; - private final TObjectLongMap keyToRowMap = new TObjectLongHashMap<>( - Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, Long.MIN_VALUE); + private final TObjectLongMap keyToRowMap = + new TObjectLongHashMap<>(Constants.DEFAULT_CAPACITY, Constants.DEFAULT_LOAD_FACTOR, Long.MIN_VALUE); /** * Create an empty KeyedArrayBackedMutableTable. @@ -43,9 +43,9 @@ public class KeyedArrayBackedMutableTable extends BaseArrayBackedMutableTable { * @return an empty KeyedArrayBackedMutableTable with the given definition and key columns */ public static KeyedArrayBackedMutableTable make(@NotNull TableDefinition definition, - final String... keyColumnNames) { + final String... keyColumnNames) { return make(new QueryTable(definition, Index.FACTORY.getEmptyIndex(), - NullValueColumnSource.createColumnSourceMap(definition)), keyColumnNames); + NullValueColumnSource.createColumnSourceMap(definition)), keyColumnNames); } /** @@ -58,34 +58,31 @@ public static KeyedArrayBackedMutableTable make(@NotNull TableDefinition definit * @return an empty KeyedArrayBackedMutableTable with the given definition and key columns */ public static KeyedArrayBackedMutableTable make(@NotNull TableDefinition definition, - final Map enumValues, final String... keyColumnNames) { - return make( - new QueryTable(definition, Index.FACTORY.getEmptyIndex(), - NullValueColumnSource.createColumnSourceMap(definition)), - enumValues, keyColumnNames); + final Map enumValues, final String... keyColumnNames) { + return make(new QueryTable(definition, Index.FACTORY.getEmptyIndex(), + NullValueColumnSource.createColumnSourceMap(definition)), enumValues, keyColumnNames); } /** * Create an empty KeyedArrayBackedMutableTable. * - * The initialTable is processed in order, so if there are duplicate keys only the last row is - * reflected in the output. + * The initialTable is processed in order, so if there are duplicate keys only the last row is reflected in the + * output. * * @param initialTable the initial values to copy into the KeyedArrayBackedMutableTable * @param keyColumnNames the name of the key columns * * @return an empty KeyedArrayBackedMutableTable with the given definition and key columns */ - public static KeyedArrayBackedMutableTable make(final Table initialTable, - final String... keyColumnNames) { + public static KeyedArrayBackedMutableTable make(final Table initialTable, final String... keyColumnNames) { return make(initialTable, Collections.emptyMap(), keyColumnNames); } /** * Create an empty KeyedArrayBackedMutableTable. * - * The initialTable is processed in order, so if there are duplicate keys only the last row is - * reflected in the output. + * The initialTable is processed in order, so if there are duplicate keys only the last row is reflected in the + * output. * * @param initialTable the initial values to copy into the KeyedArrayBackedMutableTable * @param enumValues a map of column names to enumeration values @@ -93,35 +90,33 @@ public static KeyedArrayBackedMutableTable make(final Table initialTable, * * @return an empty KeyedArrayBackedMutableTable with the given definition and key columns */ - public static KeyedArrayBackedMutableTable make(final Table initialTable, - final Map enumValues, final String... keyColumnNames) { - final KeyedArrayBackedMutableTable result = new KeyedArrayBackedMutableTable( - initialTable.getDefinition(), keyColumnNames, enumValues, new ProcessPendingUpdater()); + public static KeyedArrayBackedMutableTable make(final Table initialTable, final Map enumValues, + final String... keyColumnNames) { + final KeyedArrayBackedMutableTable result = new KeyedArrayBackedMutableTable(initialTable.getDefinition(), + keyColumnNames, enumValues, new ProcessPendingUpdater()); processInitial(initialTable, result); result.startTrackingPrev(); return result; } - private KeyedArrayBackedMutableTable(@NotNull TableDefinition definition, - final String[] keyColumnNames, final Map enumValues, - final ProcessPendingUpdater processPendingUpdater) { - super(Index.FACTORY.getEmptyIndex(), makeColumnSourceMap(definition), enumValues, - processPendingUpdater); + private KeyedArrayBackedMutableTable(@NotNull TableDefinition definition, final String[] keyColumnNames, + final Map enumValues, final ProcessPendingUpdater processPendingUpdater) { + super(Index.FACTORY.getEmptyIndex(), makeColumnSourceMap(definition), enumValues, processPendingUpdater); final List missingKeyColumns = new ArrayList<>(Arrays.asList(keyColumnNames)); missingKeyColumns.removeAll(definition.getColumnNames()); if (!missingKeyColumns.isEmpty()) { throw new ArgumentException("Missing key columns in definition: " + missingKeyColumns - + ", available columns: " + definition.getColumnNames()); + + ", available columns: " + definition.getColumnNames()); } this.keyColumnNames = keyColumnNames; this.keyColumnSet = new HashSet<>(Arrays.asList(keyColumnNames)); inputTableDefinition.setKeys(keyColumnNames); - inputTableDefinition.setValues(definition.getColumnNames().stream() - .filter(n -> !keyColumnSet.contains(n)).toArray(String[]::new)); + inputTableDefinition.setValues( + definition.getColumnNames().stream().filter(n -> !keyColumnSet.contains(n)).toArray(String[]::new)); final Stream> objectArraySourceStream = - Arrays.stream(inputTableDefinition.getValues()).map(this::getColumnSource) - .filter(cs -> cs instanceof ObjectArraySource).map(cs -> (ObjectArraySource) cs); + Arrays.stream(inputTableDefinition.getValues()).map(this::getColumnSource) + .filter(cs -> cs instanceof ObjectArraySource).map(cs -> (ObjectArraySource) cs); arrayValueSources = objectArraySourceStream.toArray(ObjectArraySource[]::new); } @@ -130,8 +125,8 @@ private void startTrackingPrev() { } @Override - protected void processPendingTable(Table table, boolean allowEdits, - IndexChangeRecorder indexChangeRecorder, Consumer errorNotifier) { + protected void processPendingTable(Table table, boolean allowEdits, IndexChangeRecorder indexChangeRecorder, + Consumer errorNotifier) { final ChunkSource keySource = makeKeySource(table); final int chunkCapacity = table.intSize(); @@ -143,14 +138,10 @@ protected void processPendingTable(Table table, boolean allowEdits, final StringBuilder errorBuilder = new StringBuilder(); try (final WritableLongChunk destinations = - WritableLongChunk.makeWritableChunk(chunkCapacity)) { - try ( - final ChunkSource.GetContext getContext = - keySource.makeGetContext(chunkCapacity, sharedContext); - final ChunkBoxer.BoxerKernel boxer = - ChunkBoxer.getBoxer(keySource.getChunkType(), chunkCapacity)) { - final Chunk keys = - keySource.getChunk(getContext, addIndex); + WritableLongChunk.makeWritableChunk(chunkCapacity)) { + try (final ChunkSource.GetContext getContext = keySource.makeGetContext(chunkCapacity, sharedContext); + final ChunkBoxer.BoxerKernel boxer = ChunkBoxer.getBoxer(keySource.getChunkType(), chunkCapacity)) { + final Chunk keys = keySource.getChunk(getContext, addIndex); final ObjectChunk boxed = boxer.box(keys); for (int ii = 0; ii < boxed.size(); ++ii) { final Object key = boxed.get(ii); @@ -190,17 +181,15 @@ protected void processPendingTable(Table table, boolean allowEdits, sharedContext.reset(); getColumnSourceMap().forEach((name, cs) -> { - final ArrayBackedColumnSource arrayBackedColumnSource = - (ArrayBackedColumnSource) cs; + final ArrayBackedColumnSource arrayBackedColumnSource = (ArrayBackedColumnSource) cs; arrayBackedColumnSource.ensureCapacity(nextRow); final ColumnSource sourceColumnSource = table.getColumnSource(name); - try ( - final WritableChunkSink.FillFromContext ffc = + try (final WritableChunkSink.FillFromContext ffc = arrayBackedColumnSource.makeFillFromContext(chunkCapacity); - final ChunkSource.GetContext getContext = - sourceColumnSource.makeGetContext(chunkCapacity, sharedContext)) { + final ChunkSource.GetContext getContext = + sourceColumnSource.makeGetContext(chunkCapacity, sharedContext)) { final Chunk valuesChunk = - sourceColumnSource.getChunk(getContext, addIndex); + sourceColumnSource.getChunk(getContext, addIndex); arrayBackedColumnSource.fillFromChunkUnordered(ffc, valuesChunk, destinations); } }); @@ -215,21 +204,16 @@ protected void processPendingDelete(Table table, IndexChangeRecorder indexChange final SharedContext sharedContext = SharedContext.makeSharedContext(); try (final WritableLongChunk destinations = - WritableLongChunk.makeWritableChunk(chunkCapacity)) { - try ( - final ChunkSource.GetContext getContext = - keySource.makeGetContext(chunkCapacity, sharedContext); - final ChunkBoxer.BoxerKernel boxer = - ChunkBoxer.getBoxer(keySource.getChunkType(), chunkCapacity)) { - final Chunk keys = - keySource.getChunk(getContext, table.getIndex()); + WritableLongChunk.makeWritableChunk(chunkCapacity)) { + try (final ChunkSource.GetContext getContext = keySource.makeGetContext(chunkCapacity, sharedContext); + final ChunkBoxer.BoxerKernel boxer = ChunkBoxer.getBoxer(keySource.getChunkType(), chunkCapacity)) { + final Chunk keys = keySource.getChunk(getContext, table.getIndex()); final ObjectChunk boxed = boxer.box(keys); destinations.setSize(0); for (int ii = 0; ii < boxed.size(); ++ii) { final Object key = boxed.get(ii); long rowNumber = keyToRowMap.get(key); - if (rowNumber != keyToRowMap.getNoEntryValue() - && !isDeletedRowNumber(rowNumber)) { + if (rowNumber != keyToRowMap.getNoEntryValue() && !isDeletedRowNumber(rowNumber)) { indexChangeRecorder.removeIndex(rowNumber); destinations.add(rowNumber); keyToRowMap.put(key, rowNumberToDeletedRowNumber(rowNumber)); @@ -240,9 +224,9 @@ protected void processPendingDelete(Table table, IndexChangeRecorder indexChange // null out the values, so that we do not hold onto garbage forever, we keep the keys for (ObjectArraySource objectArraySource : arrayValueSources) { try (final WritableChunkSink.FillFromContext ffc = - objectArraySource.makeFillFromContext(chunkCapacity)) { + objectArraySource.makeFillFromContext(chunkCapacity)) { final WritableObjectChunk nullChunk = - WritableObjectChunk.makeWritableChunk(chunkCapacity); + WritableObjectChunk.makeWritableChunk(chunkCapacity); nullChunk.fillWithNullValue(0, chunkCapacity); objectArraySource.fillFromChunkUnordered(ffc, nullChunk, destinations); } @@ -253,7 +237,7 @@ protected void processPendingDelete(Table table, IndexChangeRecorder indexChange private ChunkSource makeKeySource(Table table) { // noinspection unchecked return TupleSourceFactory.makeTupleSource( - Arrays.stream(keyColumnNames).map(table::getColumnSource).toArray(ColumnSource[]::new)); + Arrays.stream(keyColumnNames).map(table::getColumnSource).toArray(ColumnSource[]::new)); } @Override @@ -274,8 +258,8 @@ void validateDelete(final TableDefinition keyDefinition) { error.append("Key Column \"").append(keyColumn).append("\" is not compatible.\n"); } } - final List extraKeys = keyDefinition.getColumnNames().stream() - .filter(kd -> !keyColumnSet.contains(kd)).collect(Collectors.toList()); + final List extraKeys = keyDefinition.getColumnNames().stream().filter(kd -> !keyColumnSet.contains(kd)) + .collect(Collectors.toList()); if (!extraKeys.isEmpty()) { error.append("Unknown key columns: ").append(extraKeys); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/MemoryTableLogger.java b/DB/src/main/java/io/deephaven/db/v2/utils/MemoryTableLogger.java index 4684ddf80bb..f01c2ab921f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/MemoryTableLogger.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/MemoryTableLogger.java @@ -15,35 +15,34 @@ public class MemoryTableLogger { private final T tableLogger; private final TableDefinition tableDefinition; - public MemoryTableLogger(@NotNull Logger logger, @NotNull T tableLogger, - @NotNull TableDefinition tableDefinition, final int initialSizeArg) { + public MemoryTableLogger(@NotNull Logger logger, @NotNull T tableLogger, @NotNull TableDefinition tableDefinition, + final int initialSizeArg) { this.tableLogger = tableLogger; this.tableDefinition = tableDefinition; final Class loggerClass = tableLogger.getClass(); final int initialSize = (initialSizeArg == -1) - ? Configuration.getInstance().getIntegerForClassWithDefault( - MemoryTableLogger.class, - loggerClass.getSimpleName() + ".logQueueSize", - 10000) - : initialSizeArg; + ? Configuration.getInstance().getIntegerForClassWithDefault( + MemoryTableLogger.class, + loggerClass.getSimpleName() + ".logQueueSize", + 10000) + : initialSizeArg; try { tableWriter = new DynamicTableWriter(tableDefinition); tableLogger.init(tableWriter, initialSize); } catch (IOException e) { // If we can't get the table definition there's a real problem logger.error() - .append("Error creating in-memory performance logger for ") - .append(loggerClass.getSimpleName()) - .append(":") - .append(e.toString()) - .endl(); + .append("Error creating in-memory performance logger for ") + .append(loggerClass.getSimpleName()) + .append(":") + .append(e.toString()) + .endl(); throw new UncheckedIOException(e); } } - public MemoryTableLogger(@NotNull Logger logger, @NotNull T tableLogger, - @NotNull TableDefinition tableDefinition) { + public MemoryTableLogger(@NotNull Logger logger, @NotNull T tableLogger, @NotNull TableDefinition tableDefinition) { this(logger, tableLogger, tableDefinition, -1); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/MemoryTableLoggers.java b/DB/src/main/java/io/deephaven/db/v2/utils/MemoryTableLoggers.java index 495c2d84c56..d4ca461152e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/MemoryTableLoggers.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/MemoryTableLoggers.java @@ -19,12 +19,10 @@ import java.io.IOException; public class MemoryTableLoggers { - private static final boolean STATS_LOGGING_ENABLED = - Configuration.getInstance().getBooleanWithDefault( + private static final boolean STATS_LOGGING_ENABLED = Configuration.getInstance().getBooleanWithDefault( "statsLoggingEnabled", true); - private static final int DEFAULT_PROCESSS_INFO_LOG_SIZE = - Configuration.getInstance().getIntegerWithDefault( + private static final int DEFAULT_PROCESSS_INFO_LOG_SIZE = Configuration.getInstance().getIntegerWithDefault( "defaultProcessInfoLogSize", 400); private volatile static MemoryTableLoggers INSTANCE; @@ -55,8 +53,8 @@ private MemoryTableLoggers() { try { pInfo = ProcessInfoConfig.createForCurrentProcess(configuration); pInfoLogger = new MemoryTableLogger<>( - log, new ProcessInfoLogLogger(), ProcessInfoLogLogger.getTableDefinition(), - DEFAULT_PROCESSS_INFO_LOG_SIZE); + log, new ProcessInfoLogLogger(), ProcessInfoLogLogger.getTableDefinition(), + DEFAULT_PROCESSS_INFO_LOG_SIZE); new ProcessInfoStoreDBImpl(pInfoLogger.getTableLogger()).put(pInfo); } catch (IOException e) { log.fatal().append("Failed to configure process info: ").append(e.toString()).endl(); @@ -65,16 +63,14 @@ log, new ProcessInfoLogLogger(), ProcessInfoLogLogger.getTableDefinition(), processInfoLogger = pInfoLogger; final String pInfoId = pInfo.getId().value(); qplLogger = new MemoryTableLogger<>( - log, new QueryPerformanceLogLogger(pInfoId), - QueryPerformanceLogLogger.getTableDefinition()); + log, new QueryPerformanceLogLogger(pInfoId), QueryPerformanceLogLogger.getTableDefinition()); qoplLogger = new MemoryTableLogger<>( - log, new QueryOperationPerformanceLogLogger(pInfoId), - QueryOperationPerformanceLogLogger.getTableDefinition()); + log, new QueryOperationPerformanceLogLogger(pInfoId), + QueryOperationPerformanceLogLogger.getTableDefinition()); if (STATS_LOGGING_ENABLED) { processMetricsLogger = new MemoryTableLogger<>( - log, new ProcessMetricsLogLogger(), ProcessMetricsLogLogger.getTableDefinition()); - statsLogger = - new StatsIntradayLoggerDBImpl(pInfo.getId(), processMetricsLogger.getTableLogger()); + log, new ProcessMetricsLogLogger(), ProcessMetricsLogLogger.getTableDefinition()); + statsLogger = new StatsIntradayLoggerDBImpl(pInfo.getId(), processMetricsLogger.getTableLogger()); } else { processMetricsLogger = null; statsLogger = null; @@ -121,8 +117,7 @@ public static void maybeStartStatsCollection() { return; } final boolean fdStatsLoggingEnabled = Configuration.getInstance().getBooleanWithDefault( - "fdStatsLoggingEnabled", false); - Driver.start(new RealTimeClock(), MemoryTableLoggers.getInstance().getStatsLogger(), - fdStatsLoggingEnabled); + "fdStatsLoggingEnabled", false); + Driver.start(new RealTimeClock(), MemoryTableLoggers.getInstance().getStatsLogger(), fdStatsLoggingEnabled); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/MergeSortedHelper.java b/DB/src/main/java/io/deephaven/db/v2/utils/MergeSortedHelper.java index 6c443048cc8..b65abd7d4ac 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/MergeSortedHelper.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/MergeSortedHelper.java @@ -17,13 +17,13 @@ import java.util.*; /** - * Utility to take a set of tables, each of which is sorted; and merge them together into a single - * table, which will also be sorted. For now we do not support refreshing tables, just zipping - * together tables that are already sorted and will not tick. + * Utility to take a set of tables, each of which is sorted; and merge them together into a single table, which will + * also be sorted. For now we do not support refreshing tables, just zipping together tables that are already sorted and + * will not tick. * - * To handle ticking tables; the data structures would need to be changed, we're storing everything - * in parallel arrays and to tick we would need to shift those around. Handling append only could - * work; but there would be a lot of shifting if the data arrives particularly out of order. + * To handle ticking tables; the data structures would need to be changed, we're storing everything in parallel arrays + * and to tick we would need to shift those around. Handling append only could work; but there would be a lot of + * shifting if the data arrives particularly out of order. */ public class MergeSortedHelper { private static class TableCursor implements Comparable { @@ -94,29 +94,25 @@ public static Table mergeSortedHelper(String keyColumn, Collection
    tables int tableIndex = 0; for (Table table : tables) { if (!(table instanceof BaseTable)) { - throw new UnsupportedOperationException( - "Can not perform mergeSorted unless you pass in a BaseTable!"); + throw new UnsupportedOperationException("Can not perform mergeSorted unless you pass in a BaseTable!"); } if (((BaseTable) table).isRefreshing()) { - throw new UnsupportedOperationException( - "mergeSorted does not yet support refreshing tables!"); + throw new UnsupportedOperationException("mergeSorted does not yet support refreshing tables!"); } if (tableIndex == 0) { - for (Map.Entry entry : table.getColumnSourceMap() - .entrySet()) { + for (Map.Entry entry : table.getColumnSourceMap().entrySet()) { // noinspection unchecked columnSources.put(entry.getKey(), - new SortedMergeColumnSource(tableList, indexList, entry.getValue())); + new SortedMergeColumnSource(tableList, indexList, entry.getValue())); } } else { if (!table.getColumnSourceMap().keySet().equals(columnSources.keySet())) { - throw new RuntimeException("Incompatible column sources: " - + Arrays.toString(columnSources.keySet().toArray()) + " and " - + Arrays.toString(table.getColumnSourceMap().keySet().toArray())); + throw new RuntimeException( + "Incompatible column sources: " + Arrays.toString(columnSources.keySet().toArray()) + + " and " + Arrays.toString(table.getColumnSourceMap().keySet().toArray())); } - for (Map.Entry entry : table.getColumnSourceMap() - .entrySet()) { + for (Map.Entry entry : table.getColumnSourceMap().entrySet()) { // noinspection unchecked columnSources.get(entry.getKey()).addSource(entry.getValue()); } @@ -155,7 +151,7 @@ public Class getComponentType() { } public SortedMergeColumnSource(TIntArrayList tableIndex, TLongArrayList columnIndex, - ColumnSource firstSource) { + ColumnSource firstSource) { super(firstSource.getType()); this.tableIndex = tableIndex; this.columnIndex = columnIndex; @@ -166,7 +162,7 @@ public SortedMergeColumnSource(TIntArrayList tableIndex, TLongArrayList columnIn void addSource(ColumnSource source) { innerSources.add(source); Require.eq(source.getType(), "source.getType()", innerSources.get(0).getType(), - "innerSources.get(0).getType()"); + "innerSources.get(0).getType()"); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/MixedBuilder.java b/DB/src/main/java/io/deephaven/db/v2/utils/MixedBuilder.java index 3929a0d4a9b..80c4dac08a8 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/MixedBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/MixedBuilder.java @@ -8,12 +8,10 @@ public class MixedBuilder implements TreeIndexImpl.RandomBuilder { protected RangePriorityQueueBuilder pqb; private TreeIndexImpl accumIndex; - private static final int pqSizeThreshold = - Configuration.getInstance().getIntegerForClassWithDefault( + private static final int pqSizeThreshold = Configuration.getInstance().getIntegerForClassWithDefault( MixedBuilder.class, "pqSizeThreshold", 2 * 1024 * 1024); - private static final int addAsIndexThreshold = - Configuration.getInstance().getIntegerForClassWithDefault( + private static final int addAsIndexThreshold = Configuration.getInstance().getIntegerForClassWithDefault( MixedBuilder.class, "addAsIndexThreshold", 64 * 1024); public MixedBuilder(final int pqInitialCapacity) { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeys.java b/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeys.java index 3f554e32ba6..09e577207d6 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeys.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeys.java @@ -24,8 +24,7 @@ public interface OrderedKeys extends SafeCloseable, LongSizedDataStructure { * @param longChunk A chunk to wrap as a new OrderedKeys object. * @return A new OrderedKeys object, who does not own the passed chunk. */ - static OrderedKeys wrapKeyIndicesChunkAsOrderedKeys( - final LongChunk longChunk) { + static OrderedKeys wrapKeyIndicesChunkAsOrderedKeys(final LongChunk longChunk) { return OrderedKeysKeyIndicesChunkImpl.makeByWrapping(longChunk); } @@ -35,8 +34,7 @@ static OrderedKeys wrapKeyIndicesChunkAsOrderedKeys( * @param longChunk A chunk to wrap as a new OrderedKeys object. * @return A new OrderedKeys object, who does not own the passed chunk. */ - static OrderedKeys wrapKeyRangesChunkAsOrderedKeys( - final LongChunk longChunk) { + static OrderedKeys wrapKeyRangesChunkAsOrderedKeys(final LongChunk longChunk) { return OrderedKeysKeyRangesChunkImpl.makeByWrapping(longChunk); } @@ -46,8 +44,7 @@ static OrderedKeys wrapKeyRangesChunkAsOrderedKeys( * @param longChunk The input chunk. The returned object will take ownership of this chunk. * @return A new OrderedKeys object, who owns the passed chunk. */ - static OrderedKeys takeKeyIndicesChunkAndMakeOrderedKeys( - final WritableLongChunk longChunk) { + static OrderedKeys takeKeyIndicesChunkAndMakeOrderedKeys(final WritableLongChunk longChunk) { return OrderedKeysKeyIndicesChunkImpl.makeByTaking(longChunk); } @@ -57,8 +54,7 @@ static OrderedKeys takeKeyIndicesChunkAndMakeOrderedKeys( * @param longChunk The input chunk. The returned object will take ownership of this chunk. * @return A new OrderedKeys object, who owns the passed chunk. */ - static OrderedKeys takeKeyRangesChunkAndMakeOrderedKeys( - final WritableLongChunk longChunk) { + static OrderedKeys takeKeyRangesChunkAndMakeOrderedKeys(final WritableLongChunk longChunk) { return OrderedKeysKeyRangesChunkImpl.makeByTaking(longChunk); } @@ -71,12 +67,11 @@ static OrderedKeys takeKeyRangesChunkAndMakeOrderedKeys( /** *

    - * Get an ordered subset of the keys in this {@code OrderedKeys} for a position range. The - * result will contain the set of keys in {@code this} that lie at positions in the half-open - * range [{@code startPositionInclusive}, {@code startPositionInclusive + length}). + * Get an ordered subset of the keys in this {@code OrderedKeys} for a position range. The result will contain the + * set of keys in {@code this} that lie at positions in the half-open range [{@code startPositionInclusive}, + * {@code startPositionInclusive + length}). * - * The returned reference is owned by the caller, who should call {@code close()} when it is - * done with it. + * The returned reference is owned by the caller, who should call {@code close()} when it is done with it. * * @param startPositionInclusive The position of the first key to include * @param length The number of keys to include @@ -86,12 +81,11 @@ static OrderedKeys takeKeyRangesChunkAndMakeOrderedKeys( /** *

    - * Get an ordered subset of the keys in this {@code OrderedKeys} for a key range. The returned - * set will be the intersection of the keys in {@code this} with the keys in the closed interval - * [{@code startKeyInclusive}, {@code endKeyInclusive}]. + * Get an ordered subset of the keys in this {@code OrderedKeys} for a key range. The returned set will be the + * intersection of the keys in {@code this} with the keys in the closed interval [{@code startKeyInclusive}, + * {@code endKeyInclusive}]. * - * The returned reference is owned by the caller, who should call {@code close()} when it is - * done with it. + * The returned reference is owned by the caller, who should call {@code close()} when it is done with it. * * @param startKeyInclusive The minimum key to include * @param endKeyInclusive The maximum key to include @@ -112,8 +106,8 @@ static OrderedKeys takeKeyRangesChunkAndMakeOrderedKeys( * Get a {@link LongChunk} representation of the individual keys in this {@code OrderedKeys}. * * @return A {@link LongChunk} containing the keys in this {@code OrderedKeys} - * @apiNote This {@code OrderedKeys} owns the result, which is valid only as long as this - * {@code OrderedKeys} remains valid. + * @apiNote This {@code OrderedKeys} owns the result, which is valid only as long as this {@code OrderedKeys} + * remains valid. * @apiNote You must not mutate the result. */ LongChunk asKeyIndicesChunk(); @@ -122,16 +116,15 @@ static OrderedKeys takeKeyRangesChunkAndMakeOrderedKeys( * Get a {@link LongChunk} representation of key ranges in this {@code OrderedKeys}. * * @return A {@link LongChunk} containing the key ranges in this {@code OrderedKeys} - * @apiNote This {@code OrderedKeys} owns the result, which is valid only as long as this - * {@code OrderedKeys} remains valid. + * @apiNote This {@code OrderedKeys} owns the result, which is valid only as long as this {@code OrderedKeys} + * remains valid. * @apiNote You must not mutate the result. */ LongChunk asKeyRangesChunk(); /** *

    - * Fill the supplied {@link WritableLongChunk} with individual keys from this - * {@code OrderedKeys}. + * Fill the supplied {@link WritableLongChunk} with individual keys from this {@code OrderedKeys}. *

    * The chunk's capacity is assumed to be big enough. * @@ -186,22 +179,19 @@ default boolean isContiguous() { /** *

    - * Get an estimate of the average (mean) length of runs of adjacent keys in this - * {@code OrderedKeys}. + * Get an estimate of the average (mean) length of runs of adjacent keys in this {@code OrderedKeys}. *

    - * Implementations should strive to keep this method efficient (O(1) preferred) at the - * expense of accuracy. + * Implementations should strive to keep this method efficient (O(1) preferred) at the expense of accuracy. *

    * Empty {@code OrderedKeys} should return an arbitrary valid value, usually 1. * - * @return An estimate of the average run length in this {@code OrderedKeys}, in [1, - * {@code size()}] + * @return An estimate of the average run length in this {@code OrderedKeys}, in [1, {@code size()}] */ long getAverageRunLengthEstimate(); /** - * For as long as the consumer wants more keys, call accept on the consumer with the individual - * key instances in this OrderedKeys, in increasing order. + * For as long as the consumer wants more keys, call accept on the consumer with the individual key instances in + * this OrderedKeys, in increasing order. * * @param lac a consumer to feed the individual key values to. * @return false if the consumer provided ever returned false, true otherwise. @@ -209,8 +199,8 @@ default boolean isContiguous() { boolean forEachLong(LongAbortableConsumer lac); /** - * For as long as the consumer wants more ranges, call accept on the consumer with the - * individual key ranges in this OrderedKeys, in increasing order. + * For as long as the consumer wants more ranges, call accept on the consumer with the individual key ranges in this + * OrderedKeys, in increasing order. * * @param larc a consumer to feed the individual key values to. * @return false if the consumer provided ever returned false, true otherwise. @@ -235,8 +225,8 @@ default void forAllLongRanges(LongRangeConsumer lrc) { *

    * Free any resources associated with this object. *

    - * Using any {@code OrderedKeys} methods after {@code close()} is an error and may produce - * exceptions or undefined results. + * Using any {@code OrderedKeys} methods after {@code close()} is an error and may produce exceptions or undefined + * results. */ default void close() {} @@ -259,64 +249,57 @@ interface Iterator extends SafeCloseable { boolean hasMore(); /** - * Peek at the next key that would be returned by {@link #getNextOrderedKeysThrough(long)} - * or {@link #getNextOrderedKeysWithLength(long)}. Does not advance the position. + * Peek at the next key that would be returned by {@link #getNextOrderedKeysThrough(long)} or + * {@link #getNextOrderedKeysWithLength(long)}. Does not advance the position. * - * @return The next key that would be returned, or {@link Index#NULL_KEY} if this iterator - * is exhausted + * @return The next key that would be returned, or {@link Index#NULL_KEY} if this iterator is exhausted */ long peekNextKey(); /** - * Get an {@code OrderedKeys} from the key at the position of this iterator up to the - * maximum key (inclusive). Advances the position of this iterator by the size of the - * result. If the maximum key provided is smaller than the next key (as would be returned by - * {@link #peekNextKey()}), the empty OrderedKeys is returned. + * Get an {@code OrderedKeys} from the key at the position of this iterator up to the maximum key (inclusive). + * Advances the position of this iterator by the size of the result. If the maximum key provided is smaller than + * the next key (as would be returned by {@link #peekNextKey()}), the empty OrderedKeys is returned. * - * The returned OrderedKeys object is only borrowed by the caller from the {@link Iterator}, - * who owns it. It is guaranteed to be valid and not change only until a later call to - * another {@code getNext*} method. As the returned reference is owned by the - * {@link Iterator}, the caller should not call {@code close()} on it. + * The returned OrderedKeys object is only borrowed by the caller from the {@link Iterator}, who owns it. It is + * guaranteed to be valid and not change only until a later call to another {@code getNext*} method. As the + * returned reference is owned by the {@link Iterator}, the caller should not call {@code close()} on it. * * @param maxKeyInclusive The maximum key to include. - * @return An {@code OrderedKeys} from the key at the initial position up to the maximum key - * (inclusive). + * @return An {@code OrderedKeys} from the key at the initial position up to the maximum key (inclusive). */ OrderedKeys getNextOrderedKeysThrough(long maxKeyInclusive); /** - * Get an {@code OrderedKeys} from the key at the position of this iterator up to the - * desired number of keys. Advances the position of this iterator by the size of the result. + * Get an {@code OrderedKeys} from the key at the position of this iterator up to the desired number of keys. + * Advances the position of this iterator by the size of the result. * - * The returned OrderedKeys object is only borrowed by the caller from the {@link Iterator}, - * who owns it. It is guaranteed to be valid and not change only until the next call to - * another {@code getNext*} method. As the returned reference is owned by the - * {@link Iterator}, the caller should not call {@code close()} on it. + * The returned OrderedKeys object is only borrowed by the caller from the {@link Iterator}, who owns it. It is + * guaranteed to be valid and not change only until the next call to another {@code getNext*} method. As the + * returned reference is owned by the {@link Iterator}, the caller should not call {@code close()} on it. * * @param numberOfKeys The desired number of keys - * @return An {@code OrderedKeys} from the key at the initial position up to the desired - * number of keys + * @return An {@code OrderedKeys} from the key at the initial position up to the desired number of keys */ OrderedKeys getNextOrderedKeysWithLength(long numberOfKeys); /** *

    - * Advance this iterator's position to {@code nextKey}, or to the first present key greater - * than {@code nextKey} if {@code nextKey} is not found. If {@code nextKey} is less than or - * equal to the key at this iterator's current position, this method is a no-op. + * Advance this iterator's position to {@code nextKey}, or to the first present key greater than {@code nextKey} + * if {@code nextKey} is not found. If {@code nextKey} is less than or equal to the key at this iterator's + * current position, this method is a no-op. *

    * Subsequent calls to {@link #peekNextKey()}, {@link #getNextOrderedKeysThrough(long)}, or * {@link #getNextOrderedKeysWithLength(long)} will begin with the key advanced to. * * @param nextKey The key to advance to - * @return true If there are any keys remaining to be iterated after the advance, false if - * this {@link Iterator} is exhausted + * @return true If there are any keys remaining to be iterated after the advance, false if this {@link Iterator} + * is exhausted */ boolean advance(long nextKey); /** - * Advance this iterator's position as in {@link #advance(long)}, returning the number of - * keys thus consumed. + * Advance this iterator's position as in {@link #advance(long)}, returning the number of keys thus consumed. * * @param nextKey The key to advance to * @return The number of keys consumed from the iterator @@ -331,22 +314,20 @@ default long advanceAndGetPositionDistance(final long nextKey) { *

    * Free any resources associated with this iterator. *

    - * Callers of {@link OrderedKeys#getOrderedKeysIterator()} are responsible for ensuring that - * {@code close()} is called when they are done with resulting {@link Iterator}. + * Callers of {@link OrderedKeys#getOrderedKeysIterator()} are responsible for ensuring that {@code close()} is + * called when they are done with resulting {@link Iterator}. *

    - * Using any {@link Iterator} methods after {@code close()} is an error and may produce - * exceptions or undefined results. + * Using any {@link Iterator} methods after {@code close()} is an error and may produce exceptions or undefined + * results. */ default void close() {} /** - * Taking the difference between values returned by this method at different positions in - * the iterator gives you the cardinality of the set of keys between them, exclusive. Note a - * single value itself is not meaningful; like measuring elapsed time, it only makes sense - * to take the difference from absolute points. + * Taking the difference between values returned by this method at different positions in the iterator gives you + * the cardinality of the set of keys between them, exclusive. Note a single value itself is not meaningful; + * like measuring elapsed time, it only makes sense to take the difference from absolute points. * - * @return A relative position offset from some arbitrary initial point in the underlying - * ordered keys. + * @return A relative position offset from some arbitrary initial point in the underlying ordered keys. */ long getRelativePosition(); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysAsChunkImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysAsChunkImpl.java index 6d859c0043b..dd7c4a1ec2e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysAsChunkImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysAsChunkImpl.java @@ -35,7 +35,7 @@ private int sizeForRangesChunk() { private void makeKeyRangesChunk(final int size) { final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(size); + WritableLongChunk.makeWritableChunk(size); keyRangesChunk = chunk; } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysKeyIndicesChunkImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysKeyIndicesChunkImpl.java index 4a0d4bd5d71..81a8c9b3130 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysKeyIndicesChunkImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysKeyIndicesChunkImpl.java @@ -23,18 +23,15 @@ private OrderedKeysKeyIndicesChunkImpl(final LongChunk backin this.toReleaseChunk = null; } - static OrderedKeysKeyIndicesChunkImpl makeByWrapping( - final LongChunk backingChunk) { + static OrderedKeysKeyIndicesChunkImpl makeByWrapping(final LongChunk backingChunk) { return new OrderedKeysKeyIndicesChunkImpl(backingChunk); } - private OrderedKeysKeyIndicesChunkImpl( - final WritableLongChunk backingChunk) { + private OrderedKeysKeyIndicesChunkImpl(final WritableLongChunk backingChunk) { this.backingChunk = this.toReleaseChunk = backingChunk; } - static OrderedKeysKeyIndicesChunkImpl makeByTaking( - final WritableLongChunk backingChunkToOwn) { + static OrderedKeysKeyIndicesChunkImpl makeByTaking(final WritableLongChunk backingChunkToOwn) { return new OrderedKeysKeyIndicesChunkImpl(backingChunkToOwn); } @@ -74,7 +71,7 @@ public final OrderedKeys getNextOrderedKeysThrough(final long maxKey) { return OrderedKeys.EMPTY; } pendingClose = - new OrderedKeysKeyIndicesChunkImpl(backingChunk.slice(iteratorOffset, newLen)); + new OrderedKeysKeyIndicesChunkImpl(backingChunk.slice(iteratorOffset, newLen)); iteratorOffset = newEndOffset; return pendingClose; } @@ -82,13 +79,12 @@ public final OrderedKeys getNextOrderedKeysThrough(final long maxKey) { @Override public final OrderedKeys getNextOrderedKeysWithLength(final long numberOfKeys) { tryClosePendingClose(); - final int newLen = - Math.toIntExact(Math.min(numberOfKeys, backingChunk.size() - iteratorOffset)); + final int newLen = Math.toIntExact(Math.min(numberOfKeys, backingChunk.size() - iteratorOffset)); if (newLen == 0) { return OrderedKeys.EMPTY; } pendingClose = - new OrderedKeysKeyIndicesChunkImpl(backingChunk.slice(iteratorOffset, newLen)); + new OrderedKeysKeyIndicesChunkImpl(backingChunk.slice(iteratorOffset, newLen)); iteratorOffset += newLen; return pendingClose; } @@ -111,10 +107,8 @@ public final OrderedKeys.Iterator getOrderedKeysIterator() { } @Override - public final OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, - final long length) { - final int newStartOffset = - Math.toIntExact(Math.min(backingChunk.size(), startPositionInclusive)); + public final OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, final long length) { + final int newStartOffset = Math.toIntExact(Math.min(backingChunk.size(), startPositionInclusive)); final int newLen = Math.toIntExact(Math.min(backingChunk.size() - newStartOffset, length)); if (newLen == 0) { return OrderedKeys.EMPTY; @@ -123,8 +117,7 @@ public final OrderedKeys getOrderedKeysByPosition(final long startPositionInclus } @Override - public final OrderedKeys getOrderedKeysByKeyRange(final long startKeyInclusive, - final long endKeyInclusive) { + public final OrderedKeys getOrderedKeysByKeyRange(final long startKeyInclusive, final long endKeyInclusive) { final int newStartOffset = findLowerBoundOfKey(startKeyInclusive, 0); final int newLen = findFirstIndexAfterKey(endKeyInclusive, newStartOffset) - newStartOffset; if (newLen == 0) { @@ -162,8 +155,7 @@ public final LongChunk asKeyRangesChunk() { } @Override - public final void fillKeyIndicesChunk( - final WritableLongChunk chunkToFill) { + public final void fillKeyIndicesChunk(final WritableLongChunk chunkToFill) { final int newSize = Math.toIntExact(size()); // noinspection unchecked backingChunk.copyToChunk(0, (WritableLongChunk) chunkToFill, 0, newSize); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysKeyRangesChunkImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysKeyRangesChunkImpl.java index 1f011d78e1b..1f6f9d80f8d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysKeyRangesChunkImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/OrderedKeysKeyRangesChunkImpl.java @@ -25,7 +25,7 @@ public class OrderedKeysKeyRangesChunkImpl implements OrderedKeys { private long cachedSize = 0; private OrderedKeysKeyRangesChunkImpl(final LongChunk backingChunk, - final WritableLongChunk toReleaseChunk) { + final WritableLongChunk toReleaseChunk) { this.minKeyValue = 0; this.maxKeyValue = Long.MAX_VALUE; this.backingChunk = backingChunk; @@ -33,17 +33,15 @@ private OrderedKeysKeyRangesChunkImpl(final LongChunk backingC if (backingChunk.size() % 2 != 0) { throw new IllegalArgumentException("the backingChunk.size() must be a multiple of two (" - + backingChunk.size() + " % 2 != 0)"); + + backingChunk.size() + " % 2 != 0)"); } } - private OrderedKeysKeyRangesChunkImpl( - final WritableLongChunk backingChunkToOwn) { + private OrderedKeysKeyRangesChunkImpl(final WritableLongChunk backingChunkToOwn) { this(backingChunkToOwn, backingChunkToOwn); } - static OrderedKeysKeyRangesChunkImpl makeByTaking( - final WritableLongChunk backingChunkToOwn) { + static OrderedKeysKeyRangesChunkImpl makeByTaking(final WritableLongChunk backingChunkToOwn) { return new OrderedKeysKeyRangesChunkImpl(backingChunkToOwn); } @@ -51,15 +49,14 @@ private OrderedKeysKeyRangesChunkImpl(final LongChunk backingC this(backingChunk, null); } - static OrderedKeysKeyRangesChunkImpl makeByWrapping( - final LongChunk backingChunk) { + static OrderedKeysKeyRangesChunkImpl makeByWrapping(final LongChunk backingChunk) { return new OrderedKeysKeyRangesChunkImpl(backingChunk); } private OrderedKeysKeyRangesChunkImpl(final LongChunk backingChunk, - final WritableLongChunk toReleaseChunk, - final long minKeyValue, - final long maxKeyValue) { + final WritableLongChunk toReleaseChunk, + final long minKeyValue, + final long maxKeyValue) { this.minKeyValue = minKeyValue; this.maxKeyValue = maxKeyValue; this.backingChunk = backingChunk; @@ -67,35 +64,31 @@ private OrderedKeysKeyRangesChunkImpl(final LongChunk backingC if (backingChunk.size() % 2 != 0) { throw new IllegalArgumentException("the backingChunk.size() must be a multiple of two (" - + backingChunk.size() + " % 2 != 0)"); + + backingChunk.size() + " % 2 != 0)"); } if (backingChunk.size() > 0) { if (this.minKeyValue > backingChunk.get(1)) { - throw new IllegalArgumentException( - "minKeyValue is only allowed to apply to first range in chunk (" + throw new IllegalArgumentException("minKeyValue is only allowed to apply to first range in chunk (" + this.minKeyValue + " is > " + backingChunk.get(1) + ")"); } if (this.maxKeyValue < backingChunk.get(backingChunk.size() - 2)) { - throw new IllegalArgumentException( - "maxKeyValue is only allowed to apply to last range in chunk (" - + this.maxKeyValue + " is < " + backingChunk.get(backingChunk.size() - 2) - + ")"); + throw new IllegalArgumentException("maxKeyValue is only allowed to apply to last range in chunk (" + + this.maxKeyValue + " is < " + backingChunk.get(backingChunk.size() - 2) + ")"); } } } private OrderedKeysKeyRangesChunkImpl(final LongChunk backingChunk, - final long minKeyValue, - final long maxKeyValue) { + final long minKeyValue, + final long maxKeyValue) { this(backingChunk, null, minKeyValue, maxKeyValue); } - private OrderedKeysKeyRangesChunkImpl( - final WritableLongChunk backingChunkToOwn, - final long minKeyValue, - final long maxKeyValue) { + private OrderedKeysKeyRangesChunkImpl(final WritableLongChunk backingChunkToOwn, + final long minKeyValue, + final long maxKeyValue) { this(backingChunkToOwn, backingChunkToOwn, minKeyValue, maxKeyValue); } @@ -105,8 +98,7 @@ private class OffsetHelper { public long currKeyValue = Math.max(backingChunk.get(offset), minKeyValue); /** - * Advances {@code offset} and {@code currKeyValue} to the new values after skipping - * {@code numberOfKeys} items. + * Advances {@code offset} and {@code currKeyValue} to the new values after skipping {@code numberOfKeys} items. * * @param numberOfKeys the number of items to skip * @return true iff we haven't fallen off the end of the container @@ -175,8 +167,7 @@ public OrderedKeys getNextOrderedKeysThrough(long maxKeyInclusive) { } // include this range if our maxKey is in it int newEndOffset = helper.offset; - if (newEndOffset < backingChunk.size() - && backingChunk.get(newEndOffset) <= maxKeyInclusive) { + if (newEndOffset < backingChunk.size() && backingChunk.get(newEndOffset) <= maxKeyInclusive) { newEndOffset += 2; } final int newLen = newEndOffset - newStartOffset; @@ -186,7 +177,7 @@ public OrderedKeys getNextOrderedKeysThrough(long maxKeyInclusive) { } pendingClose = new OrderedKeysKeyRangesChunkImpl( - backingChunk.slice(newStartOffset, newLen), newMinKeyValue, maxKeyInclusive); + backingChunk.slice(newStartOffset, newLen), newMinKeyValue, maxKeyInclusive); return pendingClose; } @@ -209,15 +200,14 @@ public OrderedKeys getNextOrderedKeysWithLength(long numberOfKeys) { } pendingClose = new OrderedKeysKeyRangesChunkImpl( - backingChunk.slice(newStartOffset, newLen), newMinKeyValue, newMaxKeyValue); + backingChunk.slice(newStartOffset, newLen), newMinKeyValue, newMaxKeyValue); return pendingClose; } @Override public boolean advance(long nextKey) { nextKey = Math.max(helper.currKeyValue, nextKey); - final int newEndOffset = - OrderedChunkUtils.findInChunk(backingChunk, nextKey, helper.offset, + final int newEndOffset = OrderedChunkUtils.findInChunk(backingChunk, nextKey, helper.offset, backingChunk.size()); helper.offset = newEndOffset - (newEndOffset % 2); boolean hasMore = helper.offset < backingChunk.size(); @@ -268,8 +258,7 @@ public OrderedKeys.Iterator getOrderedKeysIterator() { } @Override - public OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, - final long length) { + public OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, final long length) { if (length <= 0) { return OrderedKeys.EMPTY; } @@ -290,9 +279,8 @@ public OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, return OrderedKeys.EMPTY; } - return new OrderedKeysKeyRangesChunkImpl(backingChunk.slice(newStartOffset, newLen), - newMinKeyValue, - newMaxKeyValue); + return new OrderedKeysKeyRangesChunkImpl(backingChunk.slice(newStartOffset, newLen), newMinKeyValue, + newMaxKeyValue); } @Override @@ -306,8 +294,7 @@ public OrderedKeys getOrderedKeysByKeyRange(long startKeyInclusive, long endKeyI int newEndOffset = OrderedChunkUtils.findInChunk(backingChunk, endKeyInclusive); newEndOffset += newEndOffset % 2; // include range if point at end // check if range begins with our inclusive key - if (newEndOffset < backingChunk.size() - && backingChunk.get(newEndOffset) == endKeyInclusive) { + if (newEndOffset < backingChunk.size() && backingChunk.get(newEndOffset) == endKeyInclusive) { newEndOffset += 2; } @@ -316,9 +303,8 @@ public OrderedKeys getOrderedKeysByKeyRange(long startKeyInclusive, long endKeyI return OrderedKeys.EMPTY; } - return new OrderedKeysKeyRangesChunkImpl(backingChunk.slice(newStartOffset, newLen), - startKeyInclusive, - endKeyInclusive); + return new OrderedKeysKeyRangesChunkImpl(backingChunk.slice(newStartOffset, newLen), startKeyInclusive, + endKeyInclusive); } @Override @@ -332,23 +318,20 @@ public Index asIndex() { final long chunkLast = backingChunk.get(backingChunk.size() - 1); final boolean specialStart = minKeyValue > chunkFirst; final boolean specialEnd = maxKeyValue < chunkLast; - builder.setDomain(specialStart ? minKeyValue : chunkFirst, - specialEnd ? maxKeyValue : chunkLast); + builder.setDomain(specialStart ? minKeyValue : chunkFirst, specialEnd ? maxKeyValue : chunkLast); if (specialStart || (specialEnd && backingChunk.size() == 2)) { - builder.appendRange(Math.max(minKeyValue, backingChunk.get(0)), - Math.min(maxKeyValue, backingChunk.get(1))); + builder.appendRange(Math.max(minKeyValue, backingChunk.get(0)), Math.min(maxKeyValue, backingChunk.get(1))); } final int startOffset = specialStart ? 2 : 0; - // note it me be true that innerLength < 0 if there is a single range and both min and max - // keys restrict + // note it me be true that innerLength < 0 if there is a single range and both min and max keys restrict final int innerLength = backingChunk.size() - startOffset - (specialEnd ? 2 : 0); if (innerLength > 0) { builder.appendOrderedKeyRangesChunk(backingChunk.slice(startOffset, innerLength)); } if (specialEnd && backingChunk.size() > 2) { builder.appendRange(backingChunk.get(backingChunk.size() - 2), - Math.min(maxKeyValue, backingChunk.get(backingChunk.size() - 1))); + Math.min(maxKeyValue, backingChunk.get(backingChunk.size() - 1))); } return builder.getIndex(); } @@ -361,9 +344,8 @@ public LongChunk asKeyIndicesChunk() { if (asKeyIndicesChunk == null) { final long chunkSize = size(); if (chunkSize > LongChunk.MAXIMUM_SIZE) { - throw new SizeException( - "Cannot create LongChunk; too many values.", size(), - LongChunk.MAXIMUM_SIZE); + throw new SizeException("Cannot create LongChunk; too many values.", size(), + LongChunk.MAXIMUM_SIZE); } asKeyIndicesChunk = WritableLongChunk.makeWritableChunk(Math.toIntExact(chunkSize)); fillKeyIndicesChunk(asKeyIndicesChunk); @@ -381,7 +363,7 @@ public LongChunk asKeyRangesChunk() { backingChunk.copyToChunk(0, asKeyRangesChunk, 0, backingChunk.size()); asKeyRangesChunk.set(0, Math.max(minKeyValue, asKeyRangesChunk.get(0))); asKeyRangesChunk.set(backingChunk.size() - 1, - Math.min(maxKeyValue, asKeyRangesChunk.get(backingChunk.size() - 1))); + Math.min(maxKeyValue, asKeyRangesChunk.get(backingChunk.size() - 1))); } return asKeyRangesChunk; } @@ -412,8 +394,7 @@ public boolean isEmpty() { @Override public long firstKey() { - return backingChunk.size() == 0 ? Index.NULL_KEY - : Math.max(minKeyValue, backingChunk.get(0)); + return backingChunk.size() == 0 ? Index.NULL_KEY : Math.max(minKeyValue, backingChunk.get(0)); } @Override @@ -444,8 +425,7 @@ public long getAverageRunLengthEstimate() { return numRanges == 0 ? 1 : Math.max(1, size() / numRanges); } - private boolean forEachInRange(final long start, final long endInclusive, - final LongAbortableConsumer lc) { + private boolean forEachInRange(final long start, final long endInclusive, final LongAbortableConsumer lc) { for (long v = start; v <= endInclusive; ++v) { if (!lc.accept(v)) { return false; @@ -463,14 +443,14 @@ public boolean forEachLong(final LongAbortableConsumer lc) { final long e0 = backingChunk.get(1); if (backingChunk.size() == 2) { return forEachInRange( - Math.max(minKeyValue, s0), - Math.min(maxKeyValue, e0), - lc); + Math.max(minKeyValue, s0), + Math.min(maxKeyValue, e0), + lc); } if (!forEachInRange( - Math.max(minKeyValue, s0), - e0, - lc)) { + Math.max(minKeyValue, s0), + e0, + lc)) { return false; } int i = 2; @@ -484,9 +464,9 @@ public boolean forEachLong(final LongAbortableConsumer lc) { final long s = backingChunk.get(i); final long e = backingChunk.get(i + 1); return forEachInRange( - s, - Math.min(maxKeyValue, e), - lc); + s, + Math.min(maxKeyValue, e), + lc); } @Override @@ -498,12 +478,12 @@ public boolean forEachLongRange(final LongRangeAbortableConsumer lrac) { final long e0 = backingChunk.get(1); if (backingChunk.size() == 2) { return lrac.accept( - Math.max(minKeyValue, s0), - Math.min(maxKeyValue, e0)); + Math.max(minKeyValue, s0), + Math.min(maxKeyValue, e0)); } if (!lrac.accept( - Math.max(minKeyValue, s0), - e0)) { + Math.max(minKeyValue, s0), + e0)) { return false; } int i = 2; @@ -517,8 +497,8 @@ public boolean forEachLongRange(final LongRangeAbortableConsumer lrac) { final long s = backingChunk.get(i); final long e = backingChunk.get(i + 1); return lrac.accept( - s, - Math.min(maxKeyValue, e)); + s, + Math.min(maxKeyValue, e)); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/OutOfKeySpaceException.java b/DB/src/main/java/io/deephaven/db/v2/utils/OutOfKeySpaceException.java index 4c30de274b3..087f7ae82bf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/OutOfKeySpaceException.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/OutOfKeySpaceException.java @@ -3,9 +3,9 @@ import io.deephaven.UncheckedDeephavenException; /** - * Some operations can pre-compute the necessary key-space needed to correctly maintain the intended - * result. If the key-space exceeds Long.MAX_VALUE then it may throw this exception with additional - * details and suggestions to work-around this limit. + * Some operations can pre-compute the necessary key-space needed to correctly maintain the intended result. If the + * key-space exceeds Long.MAX_VALUE then it may throw this exception with additional details and suggestions to + * work-around this limit. */ public class OutOfKeySpaceException extends UncheckedDeephavenException { public OutOfKeySpaceException(String reason) { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/PerformanceQueries.java b/DB/src/main/java/io/deephaven/db/v2/utils/PerformanceQueries.java index 6ac14d22178..863247d4e1f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/PerformanceQueries.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/PerformanceQueries.java @@ -19,8 +19,8 @@ public class PerformanceQueries { *

    * *

    - * The query performance log contains data on how long each query takes to run. Examples of what - * constitutes one individual query, for performance logging purposes, include: + * The query performance log contains data on how long each query takes to run. Examples of what constitutes one + * individual query, for performance logging purposes, include: *

      *
    • A new command in the console (i.e. type something, then press the return key)
    • *
    • A sort, filter, or custom column generated by a UI
    • @@ -36,24 +36,23 @@ public static Table queryPerformance(final long evaluationNumber) { final long workerHeapSizeBytes = getWorkerHeapSizeBytes(); Table queryPerformanceLog = TableLoggers.queryPerformanceLog() - .where(whereConditionForEvaluationNumber(evaluationNumber)) - .updateView( - "WorkerHeapSize = " + workerHeapSizeBytes + "L", - "TimeSecs = nanosToMillis(EndTime - StartTime) / 1000d", // How long this query ran - // for, in seconds - "NetMemoryChange = FreeMemoryChange - TotalMemoryChange", - "QueryMemUsed = TotalMemoryUsed - TotalMemoryFree", // Memory in use by the query. - // (Only includes active heap - // memory.) - "QueryMemUsedPct = QueryMemUsed / WorkerHeapSize", // Memory usage as a percenage of - // max heap size (-Xmx) - "QueryMemFree = WorkerHeapSize - QueryMemUsed" // Remaining memory until the query - // runs into the max heap size - ) - .moveUpColumns( - "ProcessUniqueId", "EvaluationNumber", - "QueryMemUsed", "QueryMemFree", "QueryMemUsedPct", - "EndTime", "TimeSecs", "NetMemoryChange"); + .where(whereConditionForEvaluationNumber(evaluationNumber)) + .updateView( + "WorkerHeapSize = " + workerHeapSizeBytes + "L", + "TimeSecs = nanosToMillis(EndTime - StartTime) / 1000d", // How long this query ran for, in + // seconds + "NetMemoryChange = FreeMemoryChange - TotalMemoryChange", + "QueryMemUsed = TotalMemoryUsed - TotalMemoryFree", // Memory in use by the query. (Only + // includes active heap memory.) + "QueryMemUsedPct = QueryMemUsed / WorkerHeapSize", // Memory usage as a percenage of max heap + // size (-Xmx) + "QueryMemFree = WorkerHeapSize - QueryMemUsed" // Remaining memory until the query runs into the + // max heap size + ) + .moveUpColumns( + "ProcessUniqueId", "EvaluationNumber", + "QueryMemUsed", "QueryMemFree", "QueryMemUsedPct", + "EndTime", "TimeSecs", "NetMemoryChange"); if (formatPctColumns) { queryPerformanceLog = formatColumnsAsPct(queryPerformanceLog, "QueryMemUsedPct"); } @@ -62,14 +61,13 @@ public static Table queryPerformance(final long evaluationNumber) { /** *

      - * Takes in a query id and returns a view for that query's individual operations's performance - * data. + * Takes in a query id and returns a view for that query's individual operations's performance data. *

      * *

      - * The query operation performance log contains data on how long each individual operation of a - * query (where(), update(), naturalJoin(), etc., as well as internal functions) takes to - * execute, and the change in resource consumption while each was executing. + * The query operation performance log contains data on how long each individual operation of a query (where(), + * update(), naturalJoin(), etc., as well as internal functions) takes to execute, and the change in resource + * consumption while each was executing. *

      * * @param evaluationNumber evaluation number @@ -78,16 +76,15 @@ public static Table queryPerformance(final long evaluationNumber) { @ScriptApi public static Table queryOperationPerformance(final long evaluationNumber) { final Table queryOps = TableLoggers.queryOperationPerformanceLog() - .where(whereConditionForEvaluationNumber(evaluationNumber)) - .updateView( - "TimeSecs = nanosToMillis(EndTime - StartTime) / 1000d", - "NetMemoryChange = FreeMemoryChange - TotalMemoryChange" // Change in memory usage - // delta while this query - // was executing - ) - .moveUpColumns( - "ProcessUniqueId", "EvaluationNumber", "OperationNumber", - "EndTime", "TimeSecs", "NetMemoryChange"); + .where(whereConditionForEvaluationNumber(evaluationNumber)) + .updateView( + "TimeSecs = nanosToMillis(EndTime - StartTime) / 1000d", + "NetMemoryChange = FreeMemoryChange - TotalMemoryChange" // Change in memory usage delta while + // this query was executing + ) + .moveUpColumns( + "ProcessUniqueId", "EvaluationNumber", "OperationNumber", + "EndTime", "TimeSecs", "NetMemoryChange"); return queryOps; } @@ -100,11 +97,10 @@ public static Table queryOperationPerformance(final long evaluationNumber) { * @param key key * @return process information */ - public static String processInfo(final String processInfoId, final String type, - final String key) { + public static String processInfo(final String processInfoId, final String type, final String key) { final Table processInfo = TableLoggers.processInfoLog() - .where("Id = `" + processInfoId + "`", "Type = `" + type + "`", "Key = `" + key + "`") - .select("Value"); + .where("Id = `" + processInfoId + "`", "Type = `" + type + "`", "Key = `" + key + "`") + .select("Value"); try { return (String) processInfo.getColumn(0).get(0); } catch (Exception e) { @@ -123,37 +119,29 @@ public static Table queryUpdatePerformance(final long evaluationNumber) { final String whereCondition = whereConditionForEvaluationNumber(evaluationNumber); final long workerHeapSizeBytes = getWorkerHeapSizeBytes(); Table queryUpdatePerformance = TableLoggers.updatePerformanceLog() - .where(whereCondition) - .updateView( - "WorkerHeapSize = " + workerHeapSizeBytes + "L", - "Ratio = EntryIntervalUsage / IntervalDurationNanos", // % of time during this - // interval that the operation - // was using CPU - "QueryMemUsed = TotalMemoryUsed - TotalMemoryFree", // Memory in use by the query. - // (Only includes active heap - // memory.) - "QueryMemUsedPct = QueryMemUsed / WorkerHeapSize", // Memory usage as a percenage of - // the max heap size (-Xmx) - "QueryMemFree = WorkerHeapSize - QueryMemUsed", // Remaining memory until the query - // runs into the max heap size - "NRows = EntryIntervalAdded + EntryIntervalRemoved + EntryIntervalModified", // Total - // number - // of - // changed - // rows - "RowsPerSec = round(NRows / IntervalDurationNanos * 1.0e9)", // Average rate data is - // ticking at - "RowsPerCPUSec = round(NRows / EntryIntervalUsage * 1.0e9)" // Approximation of how - // fast CPU handles row - // changes - ) - .moveUpColumns( - "ProcessUniqueId", "EvaluationNumber", "OperationNumber", - "Ratio", "QueryMemUsed", "QueryMemUsedPct", "IntervalEndTime", - "RowsPerSec", "RowsPerCPUSec", "EntryDescription"); + .where(whereCondition) + .updateView( + "WorkerHeapSize = " + workerHeapSizeBytes + "L", + "Ratio = EntryIntervalUsage / IntervalDurationNanos", // % of time during this interval that the + // operation was using CPU + "QueryMemUsed = TotalMemoryUsed - TotalMemoryFree", // Memory in use by the query. (Only + // includes active heap memory.) + "QueryMemUsedPct = QueryMemUsed / WorkerHeapSize", // Memory usage as a percenage of the max + // heap size (-Xmx) + "QueryMemFree = WorkerHeapSize - QueryMemUsed", // Remaining memory until the query runs into + // the max heap size + "NRows = EntryIntervalAdded + EntryIntervalRemoved + EntryIntervalModified", // Total number of + // changed rows + "RowsPerSec = round(NRows / IntervalDurationNanos * 1.0e9)", // Average rate data is ticking at + "RowsPerCPUSec = round(NRows / EntryIntervalUsage * 1.0e9)" // Approximation of how fast CPU + // handles row changes + ) + .moveUpColumns( + "ProcessUniqueId", "EvaluationNumber", "OperationNumber", + "Ratio", "QueryMemUsed", "QueryMemUsedPct", "IntervalEndTime", + "RowsPerSec", "RowsPerCPUSec", "EntryDescription"); if (formatPctColumns) { - queryUpdatePerformance = - formatColumnsAsPct(queryUpdatePerformance, "Ratio", "QueryMemUsedPct"); + queryUpdatePerformance = formatColumnsAsPct(queryUpdatePerformance, "Ratio", "QueryMemUsedPct"); } return queryUpdatePerformance; } @@ -170,21 +158,21 @@ public static Map queryUpdatePerformanceMap(final long evaluation resultMap.put("QueryUpdatePerformance", qup); final Table worstInterval = qup - .by("IntervalStartTime", "IntervalDurationNanos") - .sort("IntervalDurationNanos") - .tail(1) - .ungroup() - .view("IntervalStartTime", - "IntervalEndTime", - "EntryId", - "EntryDescription", - "IntervalDurationNanos", - "Ratio", - "EntryIntervalUsage", - "EntryIntervalAdded", - "EntryIntervalRemoved", - "EntryIntervalModified", - "NRows"); + .by("IntervalStartTime", "IntervalDurationNanos") + .sort("IntervalDurationNanos") + .tail(1) + .ungroup() + .view("IntervalStartTime", + "IntervalEndTime", + "EntryId", + "EntryDescription", + "IntervalDurationNanos", + "Ratio", + "EntryIntervalUsage", + "EntryIntervalAdded", + "EntryIntervalRemoved", + "EntryIntervalModified", + "NRows"); resultMap.put("WorstInterval", worstInterval); @@ -192,24 +180,21 @@ public static Map queryUpdatePerformanceMap(final long evaluation final Table updateWorst = qup.sortDescending("Ratio"); resultMap.put("UpdateWorst", updateWorst); - // Create a table with updates from the most recent performance recording. interval at the - // top. (Within each + // Create a table with updates from the most recent performance recording. interval at the top. (Within each // interval, operations are still sorted with the greatest Ratio at the top.) - final Table updateMostRecent = - updateWorst.sortDescending("IntervalEndTime").moveUpColumns("IntervalEndTime"); + final Table updateMostRecent = updateWorst.sortDescending("IntervalEndTime").moveUpColumns("IntervalEndTime"); resultMap.put("UpdateMostRecent", updateMostRecent); // Create a table that summarizes the update performance data within each interval Table updateAggregate = qup.by( - AggCombo( - AggSum("NRows", "EntryIntervalUsage"), - AggFirst("QueryMemUsed", "WorkerHeapSize", "QueryMemUsedPct", - "IntervalDurationNanos")), - "IntervalStartTime", "IntervalEndTime", "ProcessUniqueId") - .updateView("Ratio = EntryIntervalUsage / IntervalDurationNanos") - .moveUpColumns("IntervalStartTime", "IntervalEndTime", "Ratio"); + AggCombo( + AggSum("NRows", "EntryIntervalUsage"), + AggFirst("QueryMemUsed", "WorkerHeapSize", "QueryMemUsedPct", "IntervalDurationNanos")), + "IntervalStartTime", "IntervalEndTime", "ProcessUniqueId") + .updateView("Ratio = EntryIntervalUsage / IntervalDurationNanos") + .moveUpColumns("IntervalStartTime", "IntervalEndTime", "Ratio"); if (formatPctColumns) { updateAggregate = formatColumnsAsPct(updateAggregate, "Ratio", "QueryMemUsedPct"); } @@ -217,16 +202,12 @@ public static Map queryUpdatePerformanceMap(final long evaluation final Table updateSummaryStats = updateAggregate.by( - AggCombo( - AggPct(0.99, "Ratio_99_Percentile = Ratio", - "QueryMemUsedPct_99_Percentile = QueryMemUsedPct"), - AggPct(0.90, "Ratio_90_Percentile = Ratio", - "QueryMemUsedPct_90_Percentile = QueryMemUsedPct"), - AggPct(0.75, "Ratio_75_Percentile = Ratio", - "QueryMemUsedPct_75_Percentile = QueryMemUsedPct"), - AggPct(0.50, "Ratio_50_Percentile = Ratio", - "QueryMemUsedPct_50_Percentile = QueryMemUsedPct"), - AggMax("Ratio_Max = Ratio", "QueryMemUsedPct_Max = QueryMemUsedPct"))); + AggCombo( + AggPct(0.99, "Ratio_99_Percentile = Ratio", "QueryMemUsedPct_99_Percentile = QueryMemUsedPct"), + AggPct(0.90, "Ratio_90_Percentile = Ratio", "QueryMemUsedPct_90_Percentile = QueryMemUsedPct"), + AggPct(0.75, "Ratio_75_Percentile = Ratio", "QueryMemUsedPct_75_Percentile = QueryMemUsedPct"), + AggPct(0.50, "Ratio_50_Percentile = Ratio", "QueryMemUsedPct_50_Percentile = QueryMemUsedPct"), + AggMax("Ratio_Max = Ratio", "QueryMemUsedPct_Max = QueryMemUsedPct"))); resultMap.put("UpdateSummaryStats", updateSummaryStats); return resultMap; @@ -241,8 +222,7 @@ private static Table formatColumnsAsPct(final Table t, final String... cols) { } private static long getWorkerHeapSizeBytes() { - final OptionalLong opt = - MemoryTableLoggers.getInstance().getProcessInfo().getMemoryInfo().heap().max(); + final OptionalLong opt = MemoryTableLoggers.getInstance().getProcessInfo().getMemoryInfo().heap().max(); return opt.orElse(0); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/RangePriorityQueueBuilder.java b/DB/src/main/java/io/deephaven/db/v2/utils/RangePriorityQueueBuilder.java index 82fd10400d3..531e06f3887 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/RangePriorityQueueBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/RangePriorityQueueBuilder.java @@ -9,22 +9,17 @@ /** * A RandomBuilder type that uses a priority queue of ranges. * - * Each range entered into the Index is stored in a priority queue, backed by two long arrays. One - * array contains the start elements, the second array contains the end elements. The priority - * function is the start element. + * Each range entered into the Index is stored in a priority queue, backed by two long arrays. One array contains the + * start elements, the second array contains the end elements. The priority function is the start element. * - * We may have many overlapping ranges in the priority queue; as an optimization, if two adjacent - * ranges are entered into the queue consecutively, the range is not stored in the queue more than - * once. + * We may have many overlapping ranges in the priority queue; as an optimization, if two adjacent ranges are entered + * into the queue consecutively, the range is not stored in the queue more than once. */ public class RangePriorityQueueBuilder { - private static final int doublingAllocThreshold = - Configuration.getInstance().getIntegerForClassWithDefault( + private static final int doublingAllocThreshold = Configuration.getInstance().getIntegerForClassWithDefault( MixedBuilder.class, "doublingAllocThreshold", 128 * 1024); - // Things are nicer (integer division will be bit shift) if this is a power of 2, but it is not - // mandatory. - private static final int linearAllocStep = - Configuration.getInstance().getIntegerForClassWithDefault( + // Things are nicer (integer division will be bit shift) if this is a power of 2, but it is not mandatory. + private static final int linearAllocStep = Configuration.getInstance().getIntegerForClassWithDefault( MixedBuilder.class, "linearAllocStep", 128 * 1024); /** The range start keys, slot 0 is unused. */ @@ -36,10 +31,9 @@ public class RangePriorityQueueBuilder { private int lastEntered = -1; /** - * The size of the queue (invariant: size < start.length - 1). Note since we don't use element 0 - * in start and end arrays, this size does not match the normal invariant in array access where - * the last element used is an array a[] is a[size - 1]; in our case the last element used is - * a[size]. + * The size of the queue (invariant: size < start.length - 1). Note since we don't use element 0 in start and end + * arrays, this size does not match the normal invariant in array access where the last element used is an array a[] + * is a[size - 1]; in our case the last element used is a[size]. */ private int size = 0; @@ -100,10 +94,9 @@ private void ensureCapacityFor(final int lastIndex) { */ private void enter(final long startKey, final long endKey) { if (lastEntered >= 1 && - endKey >= start[lastEntered] - 1 && - startKey <= end[lastEntered] + 1) { - // the endPosition is after the start position, and the start position is before the end - // position, + endKey >= start[lastEntered] - 1 && + startKey <= end[lastEntered] + 1) { + // the endPosition is after the start position, and the start position is before the end position, // so we overlap this range if (endKey > end[lastEntered]) { end[lastEntered] = endKey; @@ -233,8 +226,7 @@ private void fixDown(@SuppressWarnings("SameParameterValue") int itemIndex) { } } - private void populateSequentialBuilder( - final TreeIndexImpl.SequentialBuilder sequentialBuilder) { + private void populateSequentialBuilder(final TreeIndexImpl.SequentialBuilder sequentialBuilder) { long lastEnd = -1; while (!isEmpty()) { long firstKey = topStart(); @@ -255,8 +247,7 @@ private void populateSequentialBuilder( } private TreeIndexImpl getTreeIndexImplInternal() { - final TreeIndexImpl.SequentialBuilder sequentialBuilder = - new TreeIndexImplSequentialBuilder(); + final TreeIndexImpl.SequentialBuilder sequentialBuilder = new TreeIndexImplSequentialBuilder(); populateSequentialBuilder(sequentialBuilder); return sequentialBuilder.getTreeIndexImpl(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/ReadOnlyIndex.java b/DB/src/main/java/io/deephaven/db/v2/utils/ReadOnlyIndex.java index 82a35ea0f25..a07e7288035 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/ReadOnlyIndex.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/ReadOnlyIndex.java @@ -44,9 +44,8 @@ public interface ReadOnlyIndex extends OrderedKeys, SafeCloseable { /** * Returns an Index with the positions of keys in this Index. * - * This can be thought of as an iterative find() over the values in keys, but all keys - * must exist within this index, because an Index result can not represent negative - * values. + * This can be thought of as an iterative find() over the values in keys, but all keys must exist + * within this index, because an Index result can not represent negative values. * * @param keys the keys to find positions for * @return a new Index containing the positions of the keys in this index @@ -54,12 +53,10 @@ public interface ReadOnlyIndex extends OrderedKeys, SafeCloseable { Index invert(ReadOnlyIndex keys); /** - * Returns the positions of keys in the current set as an Index, stopping at - * maximumPosition. + * Returns the positions of keys in the current set as an Index, stopping at maximumPosition. * - * This can be thought of as an iterative find() over the values in keys, but all keys - * must exist within this index, because an Index result can not represent negative - * values. + * This can be thought of as an iterative find() over the values in keys, but all keys must exist + * within this index, because an Index result can not represent negative values. * * @param keys the keys to find positions for * @param maximumPosition the largest position for which we will find a key @@ -68,9 +65,8 @@ public interface ReadOnlyIndex extends OrderedKeys, SafeCloseable { Index invert(ReadOnlyIndex keys, long maximumPosition); /** - * For the given keys Index, under the assertion that none of them are present in the current - * index, return the tentative insertion points in the current index with the count for each of - * them + * For the given keys Index, under the assertion that none of them are present in the current index, return the + * tentative insertion points in the current index with the count for each of them * * @param keys the keys to identify insertion locations * @return two TLongArrayLists; [0] contains the positions, [1] contains the counts. @@ -107,8 +103,8 @@ default boolean overlaps(@NotNull ReadOnlyIndex index) { boolean subsetOf(@NotNull ReadOnlyIndex other); /** - * Returns a new index representing the keys of the current set not present inside indexToRemove - * This operation is equivalent to set difference. This index is not modified. + * Returns a new index representing the keys of the current set not present inside indexToRemove This operation is + * equivalent to set difference. This index is not modified. */ Index minus(ReadOnlyIndex indexToRemove); @@ -132,9 +128,8 @@ default boolean overlaps(@NotNull ReadOnlyIndex index) { /** * Return a grouping that contains keys that match the values in keySet. * - * @param keys a set of values that keyColumns should match. For a single keyColumns, the values - * within the set are the values that we would like to find. For multiple keyColumns, the - * values are SmartKeys. + * @param keys a set of values that keyColumns should match. For a single keyColumns, the values within the set are + * the values that we would like to find. For multiple keyColumns, the values are SmartKeys. * @param tupleSource the tuple factory for the keyColumns * @return an Map from keys to Indices, for each of the keys in keySet and this Index. */ @@ -143,9 +138,8 @@ default boolean overlaps(@NotNull ReadOnlyIndex index) { /** * Return a subIndex that contains indices that match the values in keySet. * - * @param keySet a set of values that keyColumns should match. For a single keyColumns, the - * values within the set are the values that we would like to find. For multiple - * keyColumns, the values are SmartKeys. + * @param keySet a set of values that keyColumns should match. For a single keyColumns, the values within the set + * are the values that we would like to find. For multiple keyColumns, the values are SmartKeys. * @param tupleSource the tuple factory for the keyColumn * @return an Index containing only keys that match keySet. */ @@ -160,21 +154,19 @@ interface RangeIterator extends SafeCloseable { /** *

      - * Advance the current iterator position until {@code currentRangeStart()} and - * {@code currentRangeEnd()} are both greater than or equal to ‘v’. This may or may not move - * the iterator to the next range: if ‘v’ is inside the current range (but to the right of - * {@code currentRangeStart()}, this will simply advance {@code currentRangeStart()}. - * Returns true if the operation was successful. Otherwise, returns false. In this case the - * iteration is over and the iterator is exhausted (calls to {@code hasNext()} will return - * false, any other operation is undefined). + * Advance the current iterator position until {@code currentRangeStart()} and {@code currentRangeEnd()} are + * both greater than or equal to ‘v’. This may or may not move the iterator to the next range: if ‘v’ is inside + * the current range (but to the right of {@code currentRangeStart()}, this will simply advance + * {@code currentRangeStart()}. Returns true if the operation was successful. Otherwise, returns false. In this + * case the iteration is over and the iterator is exhausted (calls to {@code hasNext()} will return false, any + * other operation is undefined). *

      * *

      - * Although calls to {@code advance()} may be interleaved with calls to - * {@code hasNext()}/{@code next()} if necessary, this is not the common case, as they are - * separate protocols having little to do with each other. In particular, when iterating - * with {@code advance()}, you do not use next() to bring the next range into view, even at - * the start of the iteration. Many common usages only involve calls to advance(). + * Although calls to {@code advance()} may be interleaved with calls to {@code hasNext()}/{@code next()} if + * necessary, this is not the common case, as they are separate protocols having little to do with each other. + * In particular, when iterating with {@code advance()}, you do not use next() to bring the next range into + * view, even at the start of the iteration. Many common usages only involve calls to advance(). *

      * *

      @@ -205,11 +197,10 @@ interface RangeIterator extends SafeCloseable { boolean advance(long v); /** - * Given an iterator state with a current range of [start, end], and a value v such that - * start <= v <= end, postpone(v) makes the iterator current range [v, end]. This call is - * useful to code that may need to process parts of ranges from different call sites from - * the site iterator. The results of this call are undefined if the value provided is not - * contained in the current range. + * Given an iterator state with a current range of [start, end], and a value v such that start <= v <= end, + * postpone(v) makes the iterator current range [v, end]. This call is useful to code that may need to process + * parts of ranges from different call sites from the site iterator. The results of this call are undefined if + * the value provided is not contained in the current range. * * @param v A value contained in the current iterator range * @@ -268,10 +259,9 @@ interface TargetComparator { interface Iterator extends PrimitiveIterator.OfLong, SafeCloseable { /** - * Starting from the current next iterator position, provide each value to the consumer, - * until either the iterator is exhausted or a call to lc.accept returns false; ie, if the - * consumer returns false for a value, stops after that value (does not provide any values - * after that). + * Starting from the current next iterator position, provide each value to the consumer, until either the + * iterator is exhausted or a call to lc.accept returns false; ie, if the consumer returns false for a value, + * stops after that value (does not provide any values after that). * * @param lc the consumer. * @return false if the consumer ever returned false, true otherwise. @@ -291,13 +281,12 @@ default boolean forEachLong(final LongAbortableConsumer lc) { } /** - * Provide each value contained in this index, in increased sorted order to the consumer. If the - * consumer returns false for a key, stops after that key (does not provide any keys after that - * key). + * Provide each value contained in this index, in increased sorted order to the consumer. If the consumer returns + * false for a key, stops after that key (does not provide any keys after that key). * * @param lc the consumer. - * @return false if the consumer returned false at some point, true if the consumer always - * returned true and all values in the index were consumed. + * @return false if the consumer returned false at some point, true if the consumer always returned true and all + * values in the index were consumed. */ boolean forEachLong(LongAbortableConsumer lc); @@ -334,20 +323,17 @@ interface SearchIterator extends Iterator { /** *

      - * Advance the current iterator position until {@code currentValue()} is greater than or - * equal to ‘v’. The operation is a no-op (and returns true) if currentValue() is already >= - * 'v'. Returns true if the operation was successful. Otherwise, returns false. In this case - * the iteration is over and the iterator is exhausted; calls to {@code hasNext()} will - * return false, any other operation is undefined. + * Advance the current iterator position until {@code currentValue()} is greater than or equal to ‘v’. The + * operation is a no-op (and returns true) if currentValue() is already >= 'v'. Returns true if the operation + * was successful. Otherwise, returns false. In this case the iteration is over and the iterator is exhausted; + * calls to {@code hasNext()} will return false, any other operation is undefined. *

      * *

      - * Although calls to {@code advance()} may be interleaved with calls to - * {@code hasNext()}/{@code next()} if necessary, this is not the common case, as they are - * separate protocols having little to do with each other. In particular, when iterating - * with {@code advance()}, you do not use next() to bring the value you advanced to into - * view, even at the start of the iteration. Many common usages only involve calls to - * advance(). + * Although calls to {@code advance()} may be interleaved with calls to {@code hasNext()}/{@code next()} if + * necessary, this is not the common case, as they are separate protocols having little to do with each other. + * In particular, when iterating with {@code advance()}, you do not use next() to bring the value you advanced + * to into view, even at the start of the iteration. Many common usages only involve calls to advance(). *

      * * @param v a value to search forward from the current iterator position @@ -358,30 +344,27 @@ interface SearchIterator extends Iterator { /** *

      - * Advance the current iterator (start) position while the current value maintains - * comp.compareTargetTo(v, dir) > 0. If next to the last such value there is a value for - * which comp.compareTargetTo(v, dir) < 0, or no further values exist, then that last value - * satisfying comp,.compareTargetTo(v, dir) > 0 is left as the current position and - * returned. If there are any elements for which comp.compareTargetTo(v, dir) == 0, one of - * such elements, no guarantee which one, is left as the current position and returned. If - * at call entry the iterator was exhausted, -1 is returned. If at call entry the iterator - * was just constructed and had never been advanced, it is moved to the first element (which - * becomes the current value). If the current value v is such that comp.compareTargetTo(v, - * dir) < 0, -1 is returned and the current position is not moved. + * Advance the current iterator (start) position while the current value maintains comp.compareTargetTo(v, dir) + * > 0. If next to the last such value there is a value for which comp.compareTargetTo(v, dir) < 0, or no + * further values exist, then that last value satisfying comp,.compareTargetTo(v, dir) > 0 is left as the + * current position and returned. If there are any elements for which comp.compareTargetTo(v, dir) == 0, one of + * such elements, no guarantee which one, is left as the current position and returned. If at call entry the + * iterator was exhausted, -1 is returned. If at call entry the iterator was just constructed and had never been + * advanced, it is moved to the first element (which becomes the current value). If the current value v is such + * that comp.compareTargetTo(v, dir) < 0, -1 is returned and the current position is not moved. *

      * *

      - * Part of the contract of this method is that comp.compareTargetTo will only be called with - * values that are in the underlying container. + * Part of the contract of this method is that comp.compareTargetTo will only be called with values that are in + * the underlying container. *

      * * @param comp a comparator used to search forward from the current iterator position * @param dir a direction to search for comp, either +1 for forward or -1 for backward. - * @return -1 if the iterator was exhausted at entry or the target was to the left of the - * initial position at the time of the call, in which case the iterator is not - * changed; the resulting current position otherwise. In this later case the current - * position is guaranteed to satisfy comp.compareTargetTo(v, dir) >= 0 and if also - * comp.compareTargetTo(v, dir) > 0, then v is the biggest such value for which + * @return -1 if the iterator was exhausted at entry or the target was to the left of the initial position at + * the time of the call, in which case the iterator is not changed; the resulting current position + * otherwise. In this later case the current position is guaranteed to satisfy comp.compareTargetTo(v, + * dir) >= 0 and if also comp.compareTargetTo(v, dir) > 0, then v is the biggest such value for which * comp.compareTargetTo(v, dir) > 0. */ long binarySearchValue(TargetComparator comp, int dir); @@ -457,7 +440,7 @@ default Index subindexByPos(Index posIndex) { } iter.getNextOrderedKeysWithLength(end + 1 - currentOffset.longValue()) - .forAllLongRanges(builder::appendRange); + .forAllLongRanges(builder::appendRange); currentOffset.setValue(end + 1); return iter.hasMore(); }); @@ -491,22 +474,22 @@ default Index subindexByPos(Index posIndex) { long lastKeyPrev(); /** - * Returns the position in [0..(size-1)] where the key is found. If not found, then return - * (-(position it would be) - 1), a la Array.binarySearch. + * Returns the position in [0..(size-1)] where the key is found. If not found, then return (-(position it would be) + * - 1), a la Array.binarySearch. * * @param key the key to search for - * @return a position from [0..(size-1)] if the key was found. If the key was not found, then - * (-position - 1) as in Array.binarySearch. + * @return a position from [0..(size-1)] if the key was found. If the key was not found, then (-position - 1) as in + * Array.binarySearch. */ long find(long key); /** - * Returns the position in [0..(size-1)] where the key is found in the previous index. If not - * found, then return (-(position it would be) - 1), as in Array.binarySearch. + * Returns the position in [0..(size-1)] where the key is found in the previous index. If not found, then return + * (-(position it would be) - 1), as in Array.binarySearch. * * @param key the key to search for - * @return a position from [0..(size-1)] if the key was found. If the key was not found, then - * (-position - 1) as in Array.binarySearch. + * @return a position from [0..(size-1)] if the key was found. If the key was not found, then (-position - 1) as in + * Array.binarySearch. */ long findPrev(long key); @@ -530,8 +513,8 @@ default Index subindexByPos(Index posIndex) { long size(); /** - * Returns whether or not this index is flat. Unlike a table, this is a mutable property; which - * may change from step to step. + * Returns whether or not this index is flat. Unlike a table, this is a mutable property; which may change from step + * to step. * * @return true if the index keys are continguous and start at zero. */ diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndex.java b/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndex.java index 5a7f58e3712..5828a8eb8cf 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndex.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndex.java @@ -18,22 +18,19 @@ import org.jetbrains.annotations.NotNull; /** - * A RedirectionIndex can be in one of two states: tracking prev values or not. The typical - * lifecycle looks like this: + * A RedirectionIndex can be in one of two states: tracking prev values or not. The typical lifecycle looks like this: *
        - *
      1. A RedirectionIndex is created with an initial map, but not tracking prev values. In this - * state, get() and getPrev() behave identically; put() and remove() affect current values but do no - * "prev value" tracking. - *
      2. Prev value tracking begins when the caller calls startTrackingPrevValues(). Immediately after - * this call, the data is logically "forked": getPrev() will still refer to the same set of entries - * as before; this set will be frozen until the end of the generation. - *
      3. Additionally, a terminal listener will be registered so that the prev map will be updated at + *
      4. A RedirectionIndex is created with an initial map, but not tracking prev values. In this state, get() and + * getPrev() behave identically; put() and remove() affect current values but do no "prev value" tracking. + *
      5. Prev value tracking begins when the caller calls startTrackingPrevValues(). Immediately after this call, the data + * is logically "forked": getPrev() will still refer to the same set of entries as before; this set will be frozen until * the end of the generation. - *
      6. Meanwhile, get(), put(), and remove() will logically refer to a fork of that map: it will - * initially have the same entries as prev, but it will diverge over time as the caller does put() - * and remove() operations. - *
      7. At the end of the generation (when the TerminalListener runs), the prev set is (logically) - * discarded, prev gets current, and current becomes the new fork of the map. + *
      8. Additionally, a terminal listener will be registered so that the prev map will be updated at the end of the + * generation. + *
      9. Meanwhile, get(), put(), and remove() will logically refer to a fork of that map: it will initially have the same + * entries as prev, but it will diverge over time as the caller does put() and remove() operations. + *
      10. At the end of the generation (when the TerminalListener runs), the prev set is (logically) discarded, prev gets + * current, and current becomes the new fork of the map. *
      */ public interface RedirectionIndex { @@ -48,8 +45,7 @@ interface FillContext extends Context { FillContext DEFAULT_FILL_INSTANCE = new FillContext() {}; - default FillContext makeFillContext(final int chunkCapacity, - final SharedContext sharedContext) { + default FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { return DEFAULT_FILL_INSTANCE; } @@ -61,9 +57,9 @@ default FillContext makeFillContext(final int chunkCapacity, * @param keysToMap the keys to lookup in this redirection index */ default void fillChunk( - @NotNull final FillContext fillContext, - @NotNull final WritableLongChunk mappedKeysOut, - @NotNull final OrderedKeys keysToMap) { + @NotNull final FillContext fillContext, + @NotNull final WritableLongChunk mappedKeysOut, + @NotNull final OrderedKeys keysToMap) { // Assume that caller provided a chunk large enough to use. mappedKeysOut.setSize(0); keysToMap.forEachLong((final long k) -> { @@ -73,9 +69,9 @@ default void fillChunk( } default void fillChunkUnordered( - @NotNull final FillContext fillContext, - @NotNull final WritableLongChunk mappedKeysOut, - @NotNull final LongChunk keysToMap) { + @NotNull final FillContext fillContext, + @NotNull final WritableLongChunk mappedKeysOut, + @NotNull final LongChunk keysToMap) { // Assume that caller provided a chunk large enough to use. mappedKeysOut.setSize(0); for (int ii = 0; ii < keysToMap.size(); ++ii) { @@ -84,9 +80,9 @@ default void fillChunkUnordered( } default void fillPrevChunk( - @NotNull final FillContext fillContext, - @NotNull final WritableLongChunk mappedKeysOut, - @NotNull final OrderedKeys keysToMap) { + @NotNull final FillContext fillContext, + @NotNull final WritableLongChunk mappedKeysOut, + @NotNull final OrderedKeys keysToMap) { // Assume that caller provided a chunk large enough to use. mappedKeysOut.setSize(0); keysToMap.forEachLong((final long k) -> { @@ -136,12 +132,10 @@ default WritableChunkSink.FillFromContext makeFillFromContext(int chunkCapacity) WritableChunkSink.FillFromContext EMPTY_CONTEXT = new WritableChunkSink.FillFromContext() {}; /** - * Our default, inefficient, implementation. Inheritors who care should provide a better - * implementation. + * Our default, inefficient, implementation. Inheritors who care should provide a better implementation. */ - default void fillFromChunk(@NotNull WritableChunkSink.FillFromContext context, - @NotNull Chunk src, - @NotNull OrderedKeys orderedKeys) { + default void fillFromChunk(@NotNull WritableChunkSink.FillFromContext context, @NotNull Chunk src, + @NotNull OrderedKeys orderedKeys) { final MutableInt offset = new MutableInt(); final LongChunk valuesLongChunk = src.asLongChunk(); orderedKeys.forAllLongs(key -> { @@ -177,8 +171,7 @@ interface Factory { RedirectionIndex createRedirectionIndex(TLongLongMap map); } - String USE_LOCK_FREE_IMPL_PROPERTY_NAME = - RedirectionIndex.class.getSimpleName() + "." + "useLockFreeImpl"; + String USE_LOCK_FREE_IMPL_PROPERTY_NAME = RedirectionIndex.class.getSimpleName() + "." + "useLockFreeImpl"; Factory FACTORY = new RedirectionIndexLockFreeFactory(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndexLockFreeImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndexLockFreeImpl.java index be2cfc84214..bfb7c59600d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndexLockFreeImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndexLockFreeImpl.java @@ -19,117 +19,102 @@ import org.jetbrains.annotations.NotNull; /** - * This is a lock-free implementation of a RedirectionIndex. The rules for using this class are as - * follows. + * This is a lock-free implementation of a RedirectionIndex. The rules for using this class are as follows. * - * Users of this class fall into two roles: 1. Readers (snapshotters), of which there can be many. - * 2. Writers, of which there can be only one. + * Users of this class fall into two roles: 1. Readers (snapshotters), of which there can be many. 2. Writers, of which + * there can be only one. * - * Responsibilities of Readers: 1. Before you start, read the LogicalClock, note its value, and also - * note whether it is in the Idle or Update phase. 2. If you are in the "Idle" phase, then all your - * calls should be to get(). 3. If you are in the "Update" phase, then all your calls should be to - * getPrev(). 4. You must never call a mutating operation like put() or remove(). 5. When you are - * done reading all the data you wanted, read the LogicalClock again. If it has the same value (both - * generation and phase) as when you started, then you can rely on the data you have read. - * Otherwise, the data you have is garbage, and you need to throw it all away and try again at step - * 1. + * Responsibilities of Readers: 1. Before you start, read the LogicalClock, note its value, and also note whether it is + * in the Idle or Update phase. 2. If you are in the "Idle" phase, then all your calls should be to get(). 3. If you are + * in the "Update" phase, then all your calls should be to getPrev(). 4. You must never call a mutating operation like + * put() or remove(). 5. When you are done reading all the data you wanted, read the LogicalClock again. If it has the + * same value (both generation and phase) as when you started, then you can rely on the data you have read. Otherwise, + * the data you have is garbage, and you need to throw it all away and try again at step 1. * - * Responsibilities of the Writer: 1. There must be only one Writer. 2. The Writer controls the - * LogicalClock. Put another way, unlike the Reader, the Writer does not have to worry about a - * LogicalClock transition that would invalidate its work. 3. The Writer may call read operations - * (get() or getPrev()) at any time, i.e. regardless of phase (Idle or Update), and they will always - * provide valid data. 4. The Writer must only call mutating operations (like put() or remove()) - * during an Update phase. The Writer MUST NOT call these mutating operations during an Idle phase. - * 5. The Writer has no special responsibility when transitioning the LogicalClock from Idle to - * Update. 6. However, upon the transition from Update to Idle, the Writer does have an additional - * responsibility. Namely, the writer must first transition the LogicalClock (from Update generation - * N to Idle generation N+1), then call our method commitUpdates(). A logical place to do this might - * be the Terminal Listener. + * Responsibilities of the Writer: 1. There must be only one Writer. 2. The Writer controls the LogicalClock. Put + * another way, unlike the Reader, the Writer does not have to worry about a LogicalClock transition that would + * invalidate its work. 3. The Writer may call read operations (get() or getPrev()) at any time, i.e. regardless of + * phase (Idle or Update), and they will always provide valid data. 4. The Writer must only call mutating operations + * (like put() or remove()) during an Update phase. The Writer MUST NOT call these mutating operations during an Idle + * phase. 5. The Writer has no special responsibility when transitioning the LogicalClock from Idle to Update. 6. + * However, upon the transition from Update to Idle, the Writer does have an additional responsibility. Namely, the + * writer must first transition the LogicalClock (from Update generation N to Idle generation N+1), then call our method + * commitUpdates(). A logical place to do this might be the Terminal Listener. * * Rationale that this code implements correct lock-free behavior: * * Section I: The perspective of the Reader. * - * There are three cases: 1. When the LogicalClock has been in the Idle phase for the entirety of - * "Reader Responsibilities" steps 1-5 above. 2. When it has been in the Update phase for the - * entirety. 3. When it started in one phase and ended in the other. + * There are three cases: 1. When the LogicalClock has been in the Idle phase for the entirety of "Reader + * Responsibilities" steps 1-5 above. 2. When it has been in the Update phase for the entirety. 3. When it started in + * one phase and ended in the other. * * We discuss these in reverse order. * - * For #3, our only responsibility is to not crash, corrupt memory, or go into an infinite loop. The - * get() / getPrev() methods don't do any memory writes (so they can't corrupt memory). The argument - * that they can't crash or get into an infinite loop can only be made by looking at the code. I - * won't try to justify it here, but hopefully we can convince ourselves that this is so. + * For #3, our only responsibility is to not crash, corrupt memory, or go into an infinite loop. The get() / getPrev() + * methods don't do any memory writes (so they can't corrupt memory). The argument that they can't crash or get into an + * infinite loop can only be made by looking at the code. I won't try to justify it here, but hopefully we can convince + * ourselves that this is so. * - * For #2, the Reader is only calling getPrev(), which only accesses 'baseline'. The Writer's last - * write to 'baseline' happened before it set the logical clock to Update. LogicalClock being - * volatile means that these writes have been 'released'. Meanwhile, the Reader's first read to - * 'baseline' happens after it read the Update state from the LogicalClock (and therefore has - * 'acquire' semantics). + * For #2, the Reader is only calling getPrev(), which only accesses 'baseline'. The Writer's last write to 'baseline' + * happened before it set the logical clock to Update. LogicalClock being volatile means that these writes have been + * 'released'. Meanwhile, the Reader's first read to 'baseline' happens after it read the Update state from the + * LogicalClock (and therefore has 'acquire' semantics). * - * #1 is the most complicated. For #1, the Reader is only calling get(), which accesses both - * 'baseline' and 'updates'. Importantly, it consults 'updates' first, and only accesses 'baseline' - * for keys that are not in 'updates'. The Writer's last write to 'updates' happened before it set - * the logical clock to Idle (thus these writes have been released). Meanwhile, the Reader's first - * read from 'updates' happens after it read the Idle state from the LogicalClock (and therefore has - * 'acquire' semantics). Calls to get(key) where key exists in the 'updates' map are thus correct. - * We now concern ourselves with calls to get(key) where key does not exist in 'updates'. + * #1 is the most complicated. For #1, the Reader is only calling get(), which accesses both 'baseline' and 'updates'. + * Importantly, it consults 'updates' first, and only accesses 'baseline' for keys that are not in 'updates'. The + * Writer's last write to 'updates' happened before it set the logical clock to Idle (thus these writes have been + * released). Meanwhile, the Reader's first read from 'updates' happens after it read the Idle state from the + * LogicalClock (and therefore has 'acquire' semantics). Calls to get(key) where key exists in the 'updates' map are + * thus correct. We now concern ourselves with calls to get(key) where key does not exist in 'updates'. * - * The Writer spends the first part of the Idle cycle busily copying data from 'updates' back into - * 'baseline'. These updates have these properties: 1. It does not disturb entries whose keys are - * are not in 'updates'. 2. When it writes to the buckets in 'baseline' it changes key slots from - * either 'deletedSlot' or 'emptySlot' to some new key NEWKEY. We can argue that these writes do not - * affect the operation of the Reader, so it doesn't matter whether the Reader sees them or not: a) - * Importantly, the Reader is never looking for NEWKEY, because the Reader would have satisfied any - * search for NEWKEY from the 'updates' map, which it consulted first. b) If NEWKEY replaces a - * 'deletedSlot', then the Reader will skip over it as it does its probe. Because NEWKEY != key, the - * Reader's logic is the same whether it seems NEWKEY or deletedSlot. c) If NEWKEY replaces an - * 'emptySlot', then the Reader will probe further than it otherwise would have, but this search - * will be ultimately futile, because it will eventually reach an emptySlot. 3. When it is finally - * done copying values over, the Writer will write null to the keysAndValues array inside 'updates' - * (this is a volatile write). Writer's last write to 'baseline' happened before it wrote that null. - * When Reader consults 'updates' and finds a null there, it will also see all the writes made to - * 'baseline' (acquire semantics). 4. Writer may need to do a rehash. If it does, it will prepare - * the hashed array off to the side and then write it to 'baseline.keysAndValues' with a volatile - * write. When reader reads 'baseline.keysAndValues' (volatile read) it will either see the old - * array or the fully-populated new one. + * The Writer spends the first part of the Idle cycle busily copying data from 'updates' back into 'baseline'. These + * updates have these properties: 1. It does not disturb entries whose keys are are not in 'updates'. 2. When it writes + * to the buckets in 'baseline' it changes key slots from either 'deletedSlot' or 'emptySlot' to some new key NEWKEY. We + * can argue that these writes do not affect the operation of the Reader, so it doesn't matter whether the Reader sees + * them or not: a) Importantly, the Reader is never looking for NEWKEY, because the Reader would have satisfied any + * search for NEWKEY from the 'updates' map, which it consulted first. b) If NEWKEY replaces a 'deletedSlot', then the + * Reader will skip over it as it does its probe. Because NEWKEY != key, the Reader's logic is the same whether it seems + * NEWKEY or deletedSlot. c) If NEWKEY replaces an 'emptySlot', then the Reader will probe further than it otherwise + * would have, but this search will be ultimately futile, because it will eventually reach an emptySlot. 3. When it is + * finally done copying values over, the Writer will write null to the keysAndValues array inside 'updates' (this is a + * volatile write). Writer's last write to 'baseline' happened before it wrote that null. When Reader consults 'updates' + * and finds a null there, it will also see all the writes made to 'baseline' (acquire semantics). 4. Writer may need to + * do a rehash. If it does, it will prepare the hashed array off to the side and then write it to + * 'baseline.keysAndValues' with a volatile write. When reader reads 'baseline.keysAndValues' (volatile read) it will + * either see the old array or the fully-populated new one. * * Section II: The perspective of the Writer: * - * From the Writer's own perspective, the data structure is always coherent, because the Writer is - * the only one writing to it. The only considerations are the Writer's responsibilities to the - * Reader. They are outlined in the above section "Responsibilities of the Writer". The one thing - * that needs to be explained further is what happens in commitUpdates(). + * From the Writer's own perspective, the data structure is always coherent, because the Writer is the only one writing + * to it. The only considerations are the Writer's responsibilities to the Reader. They are outlined in the above + * section "Responsibilities of the Writer". The one thing that needs to be explained further is what happens in + * commitUpdates(). * - * In commitUpdates() we iterate over the 'updates' hashtable and insert entries into the 'baseline' - * hashtable. If we need to rehash, we do so in a way that the Reader sees either the old buckets - * array or the new buckets array--we need to avoid the case where it sees some intermediate array - * which is only partially populated. To make this simple, we populate the new array off to the side - * and do a volatile write to store its reference. The Reader's next read of it is a volatile read, - * and it will pick it up then. + * In commitUpdates() we iterate over the 'updates' hashtable and insert entries into the 'baseline' hashtable. If we + * need to rehash, we do so in a way that the Reader sees either the old buckets array or the new buckets array--we need + * to avoid the case where it sees some intermediate array which is only partially populated. To make this simple, we + * populate the new array off to the side and do a volatile write to store its reference. The Reader's next read of it + * is a volatile read, and it will pick it up then. * - * When commitUpdates() is done, it writes a null to the 'updates.keysAndValues'. At this point all - * writes to the 'baseline' hashtable are finished as of the time of the write of the null. Next - * time the Reader reads this reference and finds it null, this will be an acquire and all the - * values in 'baseline' will be visible. + * When commitUpdates() is done, it writes a null to the 'updates.keysAndValues'. At this point all writes to the + * 'baseline' hashtable are finished as of the time of the write of the null. Next time the Reader reads this reference + * and finds it null, this will be an acquire and all the values in 'baseline' will be visible. * - * That takes care of the transition from Update to Idle. Regarding the transition from Idle to - * Update, the caller does not have any special responsibility, but the first call to put() inside - * an Update generation causes a new 'keysAndValues' array to be generated, which the Reader will - * start to see next time it looks. + * That takes care of the transition from Update to Idle. Regarding the transition from Idle to Update, the caller does + * not have any special responsibility, but the first call to put() inside an Update generation causes a new + * 'keysAndValues' array to be generated, which the Reader will start to see next time it looks. */ public class RedirectionIndexLockFreeImpl implements RedirectionIndex { - private static final float LOAD_FACTOR = (float) Configuration.getInstance() - .getDoubleWithDefault("RedirectionIndexK4V4Impl.loadFactor", 0.5); + private static final float LOAD_FACTOR = + (float) Configuration.getInstance().getDoubleWithDefault("RedirectionIndexK4V4Impl.loadFactor", 0.5); /** - * The special "key not found" value used in the 'baseline' map is -1. However, for the - * 'updates' map, the "key not found" value is -2. This allows us to use the updates map to - * remember removals and account for them properly. + * The special "key not found" value used in the 'baseline' map is -1. However, for the 'updates' map, the "key not + * found" value is -2. This allows us to use the updates map to remember removals and account for them properly. */ private static final long BASELINE_KEY_NOT_FOUND = -1L; static { - Assert.eq(BASELINE_KEY_NOT_FOUND, "BASELINE_KEY_NOT_FOUND", Index.NULL_KEY, - "Index.NULL_KEY"); + Assert.eq(BASELINE_KEY_NOT_FOUND, "BASELINE_KEY_NOT_FOUND", Index.NULL_KEY, "Index.NULL_KEY"); } private static final long UPDATES_KEY_NOT_FOUND = -2L; @@ -146,24 +131,20 @@ public class RedirectionIndexLockFreeImpl implements RedirectionIndex { RedirectionIndexLockFreeImpl(TNullableLongLongMap map) { this.baseline = map; - // Initially, baseline == updates (i.e. they point to the same object). They will continue - // to point to the same - // object until the first terminal listener notification after prev tracking is turned on - // (via + // Initially, baseline == updates (i.e. they point to the same object). They will continue to point to the same + // object until the first terminal listener notification after prev tracking is turned on (via // startTrackingPrevValues()). this.updates = map; this.updateCommitter = null; } /** - * Commits the 'updates' map into the 'baseline' map, then resets the 'updates' map to empty. - * The only caller should be Writer@Idle, via the TerminalNotification. + * Commits the 'updates' map into the 'baseline' map, then resets the 'updates' map to empty. The only caller should + * be Writer@Idle, via the TerminalNotification. */ private static void commitUpdates(@NotNull final RedirectionIndexLockFreeImpl instance) { - // This only gets called by the UpdateCommitter, and only as a result of a terminal listener - // notification (which - // in turn can only happen once prev tracking has been turned on). We copy updates to - // baseline and reset the + // This only gets called by the UpdateCommitter, and only as a result of a terminal listener notification (which + // in turn can only happen once prev tracking has been turned on). We copy updates to baseline and reset the // updates map. final TNullableLongLongMap updates = instance.updates; final TNullableLongLongMap baseline = instance.baseline; @@ -180,11 +161,10 @@ private static void commitUpdates(@NotNull final RedirectionIndexLockFreeImpl in } /** - * Gets the current value. Works correctly for Readers@Idle and Writer@Update. Readers@Update - * should be calling getPrev(); meanwhile Writer@Idle shouldn't be calling anything. The only - * Reader@Update who calls this should be one where the clock transitioned from Idle to Update - * while they were already running. Such a Reader should, upon eventually noticing that this - * transition happened, throw away their work and start again. + * Gets the current value. Works correctly for Readers@Idle and Writer@Update. Readers@Update should be calling + * getPrev(); meanwhile Writer@Idle shouldn't be calling anything. The only Reader@Update who calls this should be + * one where the clock transitioned from Idle to Update while they were already running. Such a Reader should, upon + * eventually noticing that this transition happened, throw away their work and start again. */ @Override public final long get(long key) { @@ -193,8 +173,7 @@ public final long get(long key) { } final long result = updates.get(key); if (result != UPDATES_KEY_NOT_FOUND) { - // The prior value from updates is either some ordinary previous value, or - // BASELINE_KEY_NOT_FOUND. + // The prior value from updates is either some ordinary previous value, or BASELINE_KEY_NOT_FOUND. // In either case, return it to the caller. return result; } @@ -203,11 +182,10 @@ public final long get(long key) { } /** - * Gets the previous value. Works correctly for Readers@Update and Writer@Update. Readers@Idle - * should be calling get(); meanwhile Writer@Idle shouldn't be calling anything. The only - * Reader@Idle who calls this should be one who encountered a transition from Update to Idle - * while they were already running. Such a Reader should, upon eventually noticing that this - * transition happened, throw away their work and start again. + * Gets the previous value. Works correctly for Readers@Update and Writer@Update. Readers@Idle should be calling + * get(); meanwhile Writer@Idle shouldn't be calling anything. The only Reader@Idle who calls this should be one who + * encountered a transition from Update to Idle while they were already running. Such a Reader should, upon + * eventually noticing that this transition happened, throw away their work and start again. */ @Override public final long getPrev(long key) { @@ -218,8 +196,8 @@ public final long getPrev(long key) { } /** - * Puts a value. Works correctly for Writer@Update. Readers should never call this; Writers@Idle - * shouldn't be calling anything. + * Puts a value. Works correctly for Writer@Update. Readers should never call this; Writers@Idle shouldn't be + * calling anything. */ @Override public final long put(long key, long value) { @@ -230,13 +208,12 @@ public final long put(long key, long value) { } /** - * Removes a key. Works correctly for Writer@Update. Readers should never call this; - * Writers@Idle shouldn't be calling anything. + * Removes a key. Works correctly for Writer@Update. Readers should never call this; Writers@Idle shouldn't be + * calling anything. */ @Override public long remove(long key) { - // A removal is modeled simply as a put, into the updates table, of the baseline - // "key_not_found" value. + // A removal is modeled simply as a put, into the updates table, of the baseline "key_not_found" value. return putImpl(key, BASELINE_KEY_NOT_FOUND); } @@ -265,8 +242,7 @@ private long putImpl(long key, long value) { updateCommitter.maybeActivate(); } final long result = updates.put(key, value); - // The prior value from updates is either some legit previous value, or - // BASELINE_KEY_NOT_FOUND. + // The prior value from updates is either some legit previous value, or BASELINE_KEY_NOT_FOUND. // In either case, return it to the caller. if (result != UPDATES_KEY_NOT_FOUND) { return result; @@ -275,9 +251,8 @@ private long putImpl(long key, long value) { } @Override - public void fillFromChunk(@NotNull WritableChunkSink.FillFromContext context, - @NotNull Chunk src, - @NotNull OrderedKeys orderedKeys) { + public void fillFromChunk(@NotNull WritableChunkSink.FillFromContext context, @NotNull Chunk src, + @NotNull OrderedKeys orderedKeys) { if (updateCommitter != null) { updateCommitter.maybeActivate(); } @@ -291,7 +266,7 @@ public void fillFromChunk(@NotNull WritableChunkSink.FillFromContext context, } private static final int hashBucketWidth = Configuration.getInstance() - .getIntegerForClassWithDefault(RedirectionIndexLockFreeImpl.class, "hashBucketWidth", 1); + .getIntegerForClassWithDefault(RedirectionIndexLockFreeImpl.class, "hashBucketWidth", 1); @NotNull private static TNullableLongLongMap createUpdateMap() { @@ -305,7 +280,7 @@ static TNullableLongLongMap createMapWithCapacity(int initialCapacity) { @NotNull private static TNullableLongLongMap createMapWithCapacity(int initialCapacity, float loadFactor, - long noEntryValue) { + long noEntryValue) { switch (hashBucketWidth) { case 1: return new HashMapLockFreeK1V1(initialCapacity, loadFactor, noEntryValue); @@ -314,8 +289,7 @@ private static TNullableLongLongMap createMapWithCapacity(int initialCapacity, f case 4: return new HashMapLockFreeK4V4(initialCapacity, loadFactor, noEntryValue); default: - throw new UnsupportedOperationException( - "Unsupported hashBucketWidth setting: " + hashBucketWidth); + throw new UnsupportedOperationException("Unsupported hashBucketWidth setting: " + hashBucketWidth); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndexUtilities.java b/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndexUtilities.java index 38c0142e46b..4dc36724e83 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndexUtilities.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/RedirectionIndexUtilities.java @@ -3,9 +3,8 @@ public class RedirectionIndexUtilities { private static final long SEARCH_ITERATOR_THRESHOLD = 512; - static void applyRedirectionShift(final RedirectionIndex redirectionIndex, - final ReadOnlyIndex filterIndex, - final IndexShiftData shiftData) { + static void applyRedirectionShift(final RedirectionIndex redirectionIndex, final ReadOnlyIndex filterIndex, + final IndexShiftData shiftData) { final IndexShiftData.SingleElementShiftCallback applyOneShift = (key, delta) -> { final long oldKey = redirectionIndex.remove(key); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/RefCountedCow.java b/DB/src/main/java/io/deephaven/db/v2/utils/RefCountedCow.java index 6aad5e6daed..d8716fc6ba3 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/RefCountedCow.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/RefCountedCow.java @@ -9,15 +9,14 @@ /** *

      - * This class enables a pattern of use where objects can be shared from multiple - * references/identities while they are used read-only. + * This class enables a pattern of use where objects can be shared from multiple references/identities while they are + * used read-only. * * Note this is not thread safe. * - * A class derives from this class and users of it call getWriteRef() to obtain a reference that can - * be modified. That may return the same object (with an increased ref count), or may return a deep - * copy of it if other readers of the object exist. Effectively this creates a copy-on-write sharing - * strategy. + * A class derives from this class and users of it call getWriteRef() to obtain a reference that can be modified. That + * may return the same object (with an increased ref count), or may return a deep copy of it if other readers of the + * object exist. Effectively this creates a copy-on-write sharing strategy. *

      * *

      @@ -62,25 +61,24 @@ *

      * *

      - * Note this implementation does minimal concurrency protection, since it assumes it will run under - * the protection mechanisms of live update table and its clock, ie, reads can concurrently access - * objects being mutated, but will realize near the end their operation was invalidated by a clock - * change and will toss their results. + * Note this implementation does minimal concurrency protection, since it assumes it will run under the protection + * mechanisms of live update table and its clock, ie, reads can concurrently access objects being mutated, but will + * realize near the end their operation was invalidated by a clock change and will toss their results. *

      * * @param A class that will extend us, to get RefCounted functionality. */ public abstract class RefCountedCow { private static final boolean debug = RspArray.debug || - Configuration.getInstance().getBooleanForClassWithDefault( - RefCountedCow.class, "debug", false); + Configuration.getInstance().getBooleanForClassWithDefault( + RefCountedCow.class, "debug", false); /** - * Field updater for refCount, so we can avoid creating an - * {@link java.util.concurrent.atomic.AtomicInteger} for each instance. + * Field updater for refCount, so we can avoid creating an {@link java.util.concurrent.atomic.AtomicInteger} for + * each instance. */ private static final AtomicIntegerFieldUpdater REFCOUNT_UPDATER = - AtomicIntegerFieldUpdater.newUpdater(RefCountedCow.class, "refCount"); + AtomicIntegerFieldUpdater.newUpdater(RefCountedCow.class, "refCount"); /** * The actual value of our reference count. @@ -122,13 +120,13 @@ public final void acquire() { } /** - * Obtain a new reference to this object; the reference count will increase. This operation is - * cheap and does not do a deep copy of the object's payload; if a mutator is called through - * this reference, the reference will make a copy of its payload first, before applying the - * mutation, to keep other read only accessor of the previously shared payload unnaffected. + * Obtain a new reference to this object; the reference count will increase. This operation is cheap and does not do + * a deep copy of the object's payload; if a mutator is called through this reference, the reference will make a + * copy of its payload first, before applying the mutation, to keep other read only accessor of the previously + * shared payload unnaffected. * - * Note this assumes a pattern of use for derived classes where mutators return a reference, - * which may or may not point to the same object on which the mutation was called. + * Note this assumes a pattern of use for derived classes where mutators return a reference, which may or may not + * point to the same object on which the mutation was called. * * Also note this is not thread safe. * @@ -168,9 +166,8 @@ public final int refCount() { public abstract T deepCopy(); /** - * Derived classes should implement self() by simply "return this" of the right type. This - * method exists only as an implementation artifact for a type safe implementation of the - * curiously recurring generic pattern. + * Derived classes should implement self() by simply "return this" of the right type. This method exists only as an + * implementation artifact for a type safe implementation of the curiously recurring generic pattern. * * @return this object, with the right, most derived type. */ @@ -187,8 +184,8 @@ protected void notifyBeforeAcquire() {} protected void notifyAfterRelease() {} /** - * Obtain a reference to this object that can be modified without affecting other references. - * Note this is not thread safe. + * Obtain a reference to this object that can be modified without affecting other references. Note this is not + * thread safe. * * @return If this object is shared, a deep copy of this object, otherwise the object itself. */ @@ -198,13 +195,12 @@ public T getWriteRef() { Assert.gtZero(count, "count"); } return (count > 1) - ? deepCopy() - : self(); + ? deepCopy() + : self(); } /** - * Query whether this object will copy itself first before mutations. Note this is not thread - * safe. + * Query whether this object will copy itself first before mutations. Note this is not thread safe. * * @return true if this object is not shared and can be mutated directly */ diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/RspBitmapSequentialBuilder.java b/DB/src/main/java/io/deephaven/db/v2/utils/RspBitmapSequentialBuilder.java index f681eaf09c1..d19ca089e85 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/RspBitmapSequentialBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/RspBitmapSequentialBuilder.java @@ -54,7 +54,7 @@ public void appendKey(final long v) { if (pendingStart != -1) { if (check && v <= pendingEnd) { throw new IllegalArgumentException(outOfOrderKeyErrorMsg + - "last=" + pendingEnd + " while appending value=" + v); + "last=" + pendingEnd + " while appending value=" + v); } if (pendingEnd + 1 == v) { pendingEnd = v; @@ -75,8 +75,7 @@ public void appendRange(final long start, final long end) { if (pendingStart != -1) { if (check && start <= pendingEnd) { throw new IllegalArgumentException(outOfOrderKeyErrorMsg + - "last=" + pendingEnd + " while appending range start=" + start + ", end=" - + end); + "last=" + pendingEnd + " while appending range start=" + start + ", end=" + end); } if (pendingEnd + 1 == start) { pendingEnd = end; @@ -89,8 +88,7 @@ public void appendRange(final long start, final long end) { } @Override - public void appendTreeIndexImpl(final long shiftAmount, final TreeIndexImpl ix, - final boolean acquire) { + public void appendTreeIndexImpl(final long shiftAmount, final TreeIndexImpl ix, final boolean acquire) { if (ix.ixIsEmpty()) { return; } @@ -130,8 +128,8 @@ protected void flushRangeToPendingContainer(final long start, final long end) { final long pendingContainerBlockKey = highBits(pendingContainerKey); if (pendingContainerKey != -1 && pendingContainerBlockKey == highStart) { // short path. if (pendingContainer == null) { - pendingContainer = containerForLowValueAndRange( - lowBitsAsInt(pendingContainerKey), lowStart, lowEnd); + pendingContainer = + containerForLowValueAndRange(lowBitsAsInt(pendingContainerKey), lowStart, lowEnd); pendingContainerKey = highBits(pendingContainerKey); } else { pendingContainer = pendingContainer.iappend(lowStart, lowEnd + 1); @@ -141,7 +139,7 @@ protected void flushRangeToPendingContainer(final long start, final long end) { if (pendingContainerKey != -1) { if (check && pendingContainerKey > highStart) { throw new IllegalStateException(outOfOrderKeyErrorMsg + - "last=" + end + " while appending value=" + pendingContainer.last()); + "last=" + end + " while appending value=" + pendingContainer.last()); } flushPendingContainer(); } @@ -165,8 +163,7 @@ protected void flushRangeToPendingContainer(final long start, final long end) { // * a block for an initial container. // * a full block span // * a block for a final container. - // Note we must have at least two of these, given code above already handled the case for a - // single block range. + // Note we must have at least two of these, given code above already handled the case for a single block range. // If we don't have a particular one, we set its key to -1. final long initialContainerKey; final int initialContainerStart; @@ -223,12 +220,10 @@ protected void flushRangeToPendingContainer(final long start, final long end) { if (pendingContainerKey != -1 && highBits(pendingContainerKey) == initialContainerKey) { if (pendingContainer == null) { pendingContainer = containerForLowValueAndRange( - lowBitsAsInt(pendingContainerKey), initialContainerStart, - initialContainerEnd); + lowBitsAsInt(pendingContainerKey), initialContainerStart, initialContainerEnd); pendingContainerKey = highBits(pendingContainerKey); } else { - pendingContainer = - pendingContainer.iappend(initialContainerStart, initialContainerEnd + 1); + pendingContainer = pendingContainer.iappend(initialContainerStart, initialContainerEnd + 1); } flushPendingContainer(); } else { @@ -236,7 +231,7 @@ protected void flushRangeToPendingContainer(final long start, final long end) { flushPendingContainer(); } final Container initialContainer = - Container.rangeOfOnes(initialContainerStart, initialContainerEnd + 1); + Container.rangeOfOnes(initialContainerStart, initialContainerEnd + 1); ensureRb(); rb.appendContainerUnsafeNoWriteCheck(initialContainerKey, initialContainer); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/RuntimeMemory.java b/DB/src/main/java/io/deephaven/db/v2/utils/RuntimeMemory.java index f517028654c..3c8ec2f2d7d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/RuntimeMemory.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/RuntimeMemory.java @@ -10,15 +10,14 @@ * Cache memory utilization. * *

      - * >Calling Runtime.getRuntime().getFreeMemory() is expensive; and we may do it a lot when we have - * automatically computed tables, such as in a byExternal. Instead of calling the runtime directly - * from the performance instrumentation framework, we call this class's methods; which cache the - * result for a configurable number of milliseconds to avoid repeated calls that are not likely any - * different./p> + * >Calling Runtime.getRuntime().getFreeMemory() is expensive; and we may do it a lot when we have automatically + * computed tables, such as in a byExternal. Instead of calling the runtime directly from the performance + * instrumentation framework, we call this class's methods; which cache the result for a configurable number of + * milliseconds to avoid repeated calls that are not likely any different./p> * *

      - * A dditionally, we log our JVM heap usage on a regular basis; to enable users to quickly examine - * their worker logs and understand memory issues. + * A dditionally, we log our JVM heap usage on a regular basis; to enable users to quickly examine their worker logs and + * understand memory issues. *

      */ public class RuntimeMemory { @@ -51,11 +50,9 @@ public class RuntimeMemory { private RuntimeMemory(Logger log) { this.log = log; this.runtime = Runtime.getRuntime(); - logInterval = Configuration.getInstance() - .getIntegerWithDefault("RuntimeMemory.logIntervalMillis", 60 * 1000); + logInterval = Configuration.getInstance().getIntegerWithDefault("RuntimeMemory.logIntervalMillis", 60 * 1000); this.nextLog = System.currentTimeMillis() + logInterval; - cacheInterval = Configuration.getInstance() - .getIntegerWithDefault("RuntimeMemory.cacheIntervalMillis", 1); + cacheInterval = Configuration.getInstance().getIntegerWithDefault("RuntimeMemory.cacheIntervalMillis", 1); maxMemory = runtime.maxMemory(); commaFormat = new DecimalFormat(); @@ -111,10 +108,9 @@ private void maybeUpdateValues() { nextCheck = now + cacheInterval; } if (logInterval > 0 && now >= nextLog) { - log.info().append("Jvm Heap: ").append(commaFormat.format(lastFreeMemory)) - .append(" Free / ") - .append(commaFormat.format(lastTotalMemory)).append(" Total (") - .append(commaFormat.format(maxMemory)).append(" Max)").endl(); + log.info().append("Jvm Heap: ").append(commaFormat.format(lastFreeMemory)).append(" Free / ") + .append(commaFormat.format(lastTotalMemory)).append(" Total (") + .append(commaFormat.format(maxMemory)).append(" Max)").endl(); nextLog = now + logInterval; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/ShiftData.java b/DB/src/main/java/io/deephaven/db/v2/utils/ShiftData.java index 6b9dc7099d5..fa7b20eb076 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/ShiftData.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/ShiftData.java @@ -35,14 +35,12 @@ public ShiftData(Index index, Index removed, Index added) { addedIt.next(); int startOffset = (int) addedIt.currentRangeStart(); int endOffset = (int) addedIt.currentRangeEnd(); - while (removedIndex < removedPositions.size() - && removedPositions.get(removedIndex) < startOffset) { + while (removedIndex < removedPositions.size() && removedPositions.get(removedIndex) < startOffset) { removeRange(removedPositions.get(removedIndex), removedCount.get(removedIndex)); removedIndex++; } int deleteCount = 0; - while (removedIndex < removedPositions.size() - && removedPositions.get(removedIndex) <= endOffset) { + while (removedIndex < removedPositions.size() && removedPositions.get(removedIndex) <= endOffset) { deleteCount += removedCount.get(removedIndex); removedIndex++; } @@ -53,10 +51,8 @@ public ShiftData(Index index, Index removed, Index added) { removedIndex++; } if (runningSize > 0) { - if (startIndex - .get(runningSize - 1) <= (index.size() - added.size() + removed.size() - 1)) { - endIndex.set(runningSize - 1, - (int) (index.size() - added.size() + removed.size() - 1)); + if (startIndex.get(runningSize - 1) <= (index.size() - added.size() + removed.size() - 1)) { + endIndex.set(runningSize - 1, (int) (index.size() - added.size() + removed.size() - 1)); } else { runningSize--; } @@ -77,7 +73,7 @@ void addRange(long firstIndex, long lastIndex, long deletionCount) { runningOffset = lastIndex + runningOffset + 1 - (deletionCount + firstIndex); if (runningSize > 0 && ((newStartIndex + runningOffset) == (startIndex.get(runningSize - 1) - + offsets.get(runningSize - 1)))) { + + offsets.get(runningSize - 1)))) { startIndex.set(runningSize - 1, newStartIndex); offsets.set(runningSize - 1, runningOffset); } else { @@ -96,8 +92,8 @@ void removeRange(long firstIndex, long count) { long newStartIndex = firstIndex - runningOffset + count; runningOffset = runningOffset - count; - if (runningSize > 0 && (newStartIndex + runningOffset == startIndex.get(runningSize - 1) - + offsets.get(runningSize - 1))) { + if (runningSize > 0 + && (newStartIndex + runningOffset == startIndex.get(runningSize - 1) + offsets.get(runningSize - 1))) { startIndex.set(runningSize - 1, newStartIndex); offsets.set(runningSize - 1, runningOffset); } else { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/ShiftedOrderedKeys.java b/DB/src/main/java/io/deephaven/db/v2/utils/ShiftedOrderedKeys.java index e2ace052105..588a9dc1485 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/ShiftedOrderedKeys.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/ShiftedOrderedKeys.java @@ -21,7 +21,7 @@ public static OrderedKeys wrap(OrderedKeys toWrap, long shiftAmount) { private ShiftedOrderedKeys(final OrderedKeys wrappedOK, final long shiftAmount) { Assert.assertion(!(wrappedOK instanceof ShiftedOrderedKeys), - "Wrapped Ordered Keys must not be a ShiftedOrderedKeys"); + "Wrapped Ordered Keys must not be a ShiftedOrderedKeys"); this.shiftAmount = shiftAmount; this.wrappedOK = wrappedOK; } @@ -75,8 +75,7 @@ public long peekNextKey() { @Override public OrderedKeys getNextOrderedKeysThrough(long maxKeyInclusive) { - reusableOK.reset(wrappedIt.getNextOrderedKeysThrough(maxKeyInclusive - shiftAmount), - shiftAmount); + reusableOK.reset(wrappedIt.getNextOrderedKeysThrough(maxKeyInclusive - shiftAmount), shiftAmount); return reusableOK; } @@ -104,14 +103,13 @@ public Iterator getOrderedKeysIterator() { @Override public OrderedKeys getOrderedKeysByPosition(long startPositionInclusive, long length) { - return wrap(wrappedOK.getOrderedKeysByPosition(startPositionInclusive, length), - shiftAmount); + return wrap(wrappedOK.getOrderedKeysByPosition(startPositionInclusive, length), shiftAmount); } @Override public OrderedKeys getOrderedKeysByKeyRange(long startKeyInclusive, long endKeyInclusive) { - return wrap(wrappedOK.getOrderedKeysByKeyRange(startKeyInclusive - shiftAmount, - endKeyInclusive - shiftAmount), shiftAmount); + return wrap(wrappedOK.getOrderedKeysByKeyRange(startKeyInclusive - shiftAmount, endKeyInclusive - shiftAmount), + shiftAmount); } @Override @@ -163,8 +161,7 @@ public boolean forEachLong(LongAbortableConsumer consumer) { @Override public boolean forEachLongRange(LongRangeAbortableConsumer consumer) { - return wrappedOK - .forEachLongRange((s, e) -> consumer.accept(s + shiftAmount, e + shiftAmount)); + return wrappedOK.forEachLongRange((s, e) -> consumer.accept(s + shiftAmount, e + shiftAmount)); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/SortedIndex.java b/DB/src/main/java/io/deephaven/db/v2/utils/SortedIndex.java index 20787ab70c3..f2db10d3730 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/SortedIndex.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/SortedIndex.java @@ -39,8 +39,7 @@ private static class MappingInfo { private final Map mapping; private final long creationTick; - private MappingInfo(Map mapping, long creationTick, - TupleSource tupleSource) { + private MappingInfo(Map mapping, long creationTick, TupleSource tupleSource) { this.mapping = mapping; this.creationTick = creationTick; this.tupleSource = tupleSource; @@ -107,11 +106,10 @@ public Index invert(ReadOnlyIndex keys) { } /** - * The only used implementation of invert is in the TreeIndex, really the guts of it are in - * BspNodeIndex. + * The only used implementation of invert is in the TreeIndex, really the guts of it are in BspNodeIndex. * - * This version is inefficient as it simply performs O(keys) find operations; which is O(keys * - * lg size), because there is no memory about what you've already found. + * This version is inefficient as it simply performs O(keys) find operations; which is O(keys * lg size), because + * there is no memory about what you've already found. * * It serves as a reasonable reference for what the invert operation is "meant" to do. * @@ -142,22 +140,20 @@ private Map lookupMapping(List columnSourceKey) { } private static Map lookupMapping( - WeakHashMap, MappingInfo> mappings, - WeakHashMap, MappingInfo> ephemeralMappings, - List columnSourceKey) { - final Map immutableMapping = - lookupImmutableMapping(mappings, columnSourceKey); + WeakHashMap, MappingInfo> mappings, + WeakHashMap, MappingInfo> ephemeralMappings, + List columnSourceKey) { + final Map immutableMapping = lookupImmutableMapping(mappings, columnSourceKey); if (immutableMapping != null) return immutableMapping; return lookupEphemeralMapping(columnSourceKey, ephemeralMappings); } private static Map lookupPrevMapping( - WeakHashMap, MappingInfo> mappings, - WeakHashMap, MappingInfo> ephemeralPrevMappings, - List columnSourceKey) { - final Map immutableMapping = - lookupImmutableMapping(mappings, columnSourceKey); + WeakHashMap, MappingInfo> mappings, + WeakHashMap, MappingInfo> ephemeralPrevMappings, + List columnSourceKey) { + final Map immutableMapping = lookupImmutableMapping(mappings, columnSourceKey); if (immutableMapping != null) return immutableMapping; return lookupEphemeralMapping(columnSourceKey, ephemeralPrevMappings); @@ -168,8 +164,8 @@ private Map lookupPrevMapping(List columnSourceKey) } private static Map lookupImmutableMapping( - WeakHashMap, MappingInfo> mappings, - List columnSourceKey) { + WeakHashMap, MappingInfo> mappings, + List columnSourceKey) { if (mappings == null) { return null; } @@ -181,7 +177,7 @@ private static Map lookupImmutableMapping( } private static Map lookupEphemeralMapping(List columnSourceKey, - WeakHashMap, MappingInfo> groupingMap) { + WeakHashMap, MappingInfo> groupingMap) { if (groupingMap == null) { return null; } @@ -227,8 +223,7 @@ protected void updateGroupingOnInsert(long key) { /** * On an insertion operation, we clear all of our mappings so that they are not out-of-date. */ - protected void updateGroupingOnInsert(final LongChunk keys, final int offset, - final int length) { + protected void updateGroupingOnInsert(final LongChunk keys, final int offset, final int length) { clearMappings(); } @@ -256,8 +251,7 @@ protected void updateGroupingOnRemoveRange(final long start, final long end) { /** * On an insertion operation, we clear all of our mappings so that they are not out-of-date. */ - protected void updateGroupingOnRemove(final LongChunk keys, final int offset, - final int length) { + protected void updateGroupingOnRemove(final LongChunk keys, final int offset, final int length) { clearMappings(); } @@ -270,15 +264,14 @@ protected void updateGroupingOnRetain(final Index intersected) { } public static Map getGrouping( - final Index thisIndex, - UnaryOperator indexOp, - WeakHashMap, MappingInfo> mappings, - WeakHashMap, MappingInfo> ephemeralMappings, - TupleSource tupleSource) { + final Index thisIndex, + UnaryOperator indexOp, + WeakHashMap, MappingInfo> mappings, + WeakHashMap, MappingInfo> ephemeralMappings, + TupleSource tupleSource) { // noinspection unchecked final List sourcesKey = tupleSource.getColumnSources(); - final Map lookupResult = - lookupMapping(mappings, ephemeralMappings, sourcesKey); + final Map lookupResult = lookupMapping(mappings, ephemeralMappings, sourcesKey); if (lookupResult != null) { return lookupResult; } @@ -287,8 +280,7 @@ public static Map getGrouping( BiConsumer resultCollector = result::put; - collectGrouping(thisIndex, indexOp, mappings, ephemeralMappings, resultCollector, - tupleSource, sourcesKey); + collectGrouping(thisIndex, indexOp, mappings, ephemeralMappings, resultCollector, tupleSource, sourcesKey); return result; } @@ -306,12 +298,10 @@ public Map getGrouping(TupleSource tupleSource) { final BiConsumer resultCollector = result::put; - collectGrouping(this, this::intersect, mappings, ephemeralMappings, resultCollector, - tupleSource, sourcesKey); + collectGrouping(this, this::intersect, mappings, ephemeralMappings, resultCollector, tupleSource, sourcesKey); - // TODO: We need to do something better than weakly-reachable Lists here. Keys - // are probably cleaned up well before we want right now. Values only cleaned up on access. - // Both are sub-par. + // TODO: We need to do something better than weakly-reachable Lists here. Keys are probably + // cleaned up well before we want right now. Values only cleaned up on access. Both are sub-par. if (areColumnsImmutable(sourcesKey)) { if (mappings == null) { mappings = new WeakHashMap<>(); @@ -321,8 +311,7 @@ public Map getGrouping(TupleSource tupleSource) { if (ephemeralMappings == null) { ephemeralMappings = new WeakHashMap<>(); } - ephemeralMappings.put(sourcesKey, - new MappingInfo(result, LogicalClock.DEFAULT.currentStep(), tupleSource)); + ephemeralMappings.put(sourcesKey, new MappingInfo(result, LogicalClock.DEFAULT.currentStep(), tupleSource)); } return result; @@ -345,13 +334,13 @@ public void copyImmutableGroupings(TupleSource source, TupleSource dest) { } private static void collectGrouping( - final Index thisIndex, - final UnaryOperator indexOp, - WeakHashMap, MappingInfo> mappings, - WeakHashMap, MappingInfo> ephemeralMappings, - final BiConsumer resultCollector, - final TupleSource tupleSource, - final List keyColumns) { + final Index thisIndex, + final UnaryOperator indexOp, + WeakHashMap, MappingInfo> mappings, + WeakHashMap, MappingInfo> ephemeralMappings, + final BiConsumer resultCollector, + final TupleSource tupleSource, + final List keyColumns) { if (keyColumns.isEmpty()) { resultCollector.accept(EmptyTuple.INSTANCE, thisIndex.clone()); } else if (keyColumns.size() == 1 && keyColumns.get(0).getGroupToRange() != null) { @@ -364,25 +353,23 @@ private static void collectGrouping( } } } else { - final long columnsWithGrouping = - keyColumns.stream().filter(cs -> cs.getGroupToRange() != null).count(); + final long columnsWithGrouping = keyColumns.stream().filter(cs -> cs.getGroupToRange() != null).count(); final boolean canUseAllConstituents = columnsWithGrouping == keyColumns.size(); final boolean canUseAnyConstituents = columnsWithGrouping > 0; if (canUseAllConstituents) { // we can generate a grouping using just the pre-existing groupings - generateGrouping(indexOp, resultCollector, tupleSource, keyColumns, 0, - new Object[keyColumns.size()], null); + generateGrouping(indexOp, resultCollector, tupleSource, keyColumns, 0, new Object[keyColumns.size()], + null); } else if (canUseAnyConstituents) { - generatePartialGrouping(thisIndex, indexOp, mappings, ephemeralMappings, - resultCollector, tupleSource, keyColumns); + generatePartialGrouping(thisIndex, indexOp, mappings, ephemeralMappings, resultCollector, tupleSource, + keyColumns); } else { final Map resultBuilder = new LinkedHashMap<>(); for (final Index.Iterator iterator = thisIndex.iterator(); iterator.hasNext();) { final long next = iterator.nextLong(); final Object key = tupleSource.createTuple(next); - resultBuilder.computeIfAbsent(key, k -> Index.FACTORY.getSequentialBuilder()) - .appendKey(next); + resultBuilder.computeIfAbsent(key, k -> Index.FACTORY.getSequentialBuilder()).appendKey(next); } resultBuilder.forEach((k, v) -> resultCollector.accept(k, v.getIndex())); } @@ -390,48 +377,45 @@ private static void collectGrouping( } private static void generatePartialGrouping( - final Index thisIndex, - final UnaryOperator indexOp, - final WeakHashMap, MappingInfo> mappings, - final WeakHashMap, MappingInfo> ephemeralMappings, - final BiConsumer resultCollector, TupleSource tupleSource, - List keyColumns) { + final Index thisIndex, + final UnaryOperator indexOp, + final WeakHashMap, MappingInfo> mappings, + final WeakHashMap, MappingInfo> ephemeralMappings, + final BiConsumer resultCollector, TupleSource tupleSource, List keyColumns) { // we can generate the grouping partially from our constituents - final ColumnSource[] groupedKeyColumns = keyColumns.stream() - .filter(cs -> cs.getGroupToRange() != null).toArray(ColumnSource[]::new); - final ColumnSource[] notGroupedKeyColumns = keyColumns.stream() - .filter(cs -> cs.getGroupToRange() == null).toArray(ColumnSource[]::new); + final ColumnSource[] groupedKeyColumns = + keyColumns.stream().filter(cs -> cs.getGroupToRange() != null).toArray(ColumnSource[]::new); + final ColumnSource[] notGroupedKeyColumns = + keyColumns.stream().filter(cs -> cs.getGroupToRange() == null).toArray(ColumnSource[]::new); - final TupleSource groupedTupleSource = - TupleSourceFactory.makeTupleSource(groupedKeyColumns); + final TupleSource groupedTupleSource = TupleSourceFactory.makeTupleSource(groupedKeyColumns); final Map groupedColumnsGrouping = - getGrouping(thisIndex, indexOp, mappings, ephemeralMappings, groupedTupleSource); - generatePartialGroupingSecondHalf(groupedKeyColumns, notGroupedKeyColumns, - groupedTupleSource, groupedColumnsGrouping, - resultCollector, tupleSource, keyColumns); + getGrouping(thisIndex, indexOp, mappings, ephemeralMappings, groupedTupleSource); + generatePartialGroupingSecondHalf(groupedKeyColumns, notGroupedKeyColumns, groupedTupleSource, + groupedColumnsGrouping, + resultCollector, tupleSource, keyColumns); } - private void generatePartialGrouping(BiConsumer resultCollector, - TupleSource tupleSource, List keyColumns) { + private void generatePartialGrouping(BiConsumer resultCollector, TupleSource tupleSource, + List keyColumns) { // we can generate the grouping partially from our constituents - final ColumnSource[] groupedKeyColumns = keyColumns.stream() - .filter(cs -> cs.getGroupToRange() != null).toArray(ColumnSource[]::new); - final ColumnSource[] notGroupedKeyColumns = keyColumns.stream() - .filter(cs -> cs.getGroupToRange() == null).toArray(ColumnSource[]::new); + final ColumnSource[] groupedKeyColumns = + keyColumns.stream().filter(cs -> cs.getGroupToRange() != null).toArray(ColumnSource[]::new); + final ColumnSource[] notGroupedKeyColumns = + keyColumns.stream().filter(cs -> cs.getGroupToRange() == null).toArray(ColumnSource[]::new); - final TupleSource groupedTupleSource = - TupleSourceFactory.makeTupleSource(groupedKeyColumns); + final TupleSource groupedTupleSource = TupleSourceFactory.makeTupleSource(groupedKeyColumns); final Map groupedColumnsGrouping = getGrouping(groupedTupleSource); - generatePartialGroupingSecondHalf(groupedKeyColumns, notGroupedKeyColumns, - groupedTupleSource, groupedColumnsGrouping, - resultCollector, tupleSource, keyColumns); + generatePartialGroupingSecondHalf(groupedKeyColumns, notGroupedKeyColumns, groupedTupleSource, + groupedColumnsGrouping, + resultCollector, tupleSource, keyColumns); } private static void generatePartialGroupingSecondHalf( - final ColumnSource[] groupedKeyColumns, final ColumnSource[] notGroupedKeyColumns, - final TupleSource groupedTupleSource, final Map groupedColumnsGrouping, - final BiConsumer resultCollector, final TupleSource tupleSource, - final List keyColumns) { + final ColumnSource[] groupedKeyColumns, final ColumnSource[] notGroupedKeyColumns, + final TupleSource groupedTupleSource, final Map groupedColumnsGrouping, + final BiConsumer resultCollector, final TupleSource tupleSource, + final List keyColumns) { final Map resultBuilder = new LinkedHashMap<>(); final int[] groupedKeysIndices = new int[groupedKeyColumns.length]; @@ -454,7 +438,7 @@ private static void generatePartialGroupingSecondHalf( for (int ii = 0; ii < groupedKeysIndices.length; ++ii) { // noinspection unchecked partialKeyValues[groupedKeysIndices[ii]] = - groupedTupleSource.exportElementReinterpreted(groupedTuple, ii); + groupedTupleSource.exportElementReinterpreted(groupedTuple, ii); } } @@ -463,32 +447,29 @@ private static void generatePartialGroupingSecondHalf( final long next = iterator.nextLong(); for (int ii = 0; ii < notGroupedKeysIndices.length; ++ii) { - partialKeyValues[notGroupedKeysIndices[ii]] = - notGroupedKeyColumns[ii].get(next); + partialKeyValues[notGroupedKeysIndices[ii]] = notGroupedKeyColumns[ii].get(next); } - resultBuilder.computeIfAbsent( - tupleSource.createTupleFromReinterpretedValues(partialKeyValues), - k -> Index.FACTORY.getSequentialBuilder()).appendKey(next); + resultBuilder.computeIfAbsent(tupleSource.createTupleFromReinterpretedValues(partialKeyValues), + k -> Index.FACTORY.getSequentialBuilder()).appendKey(next); } } resultBuilder.forEach((k, v) -> resultCollector.accept(k, v.getIndex())); } - private void generatePartialGroupingForKeySet(BiConsumer resultCollector, - TupleSource tupleSource, List keyColumns, Set keys) { + private void generatePartialGroupingForKeySet(BiConsumer resultCollector, TupleSource tupleSource, + List keyColumns, Set keys) { // we can generate the grouping partially from our constituents - final ColumnSource[] groupedKeyColumns = keyColumns.stream() - .filter(cs -> cs.getGroupToRange() != null).toArray(ColumnSource[]::new); - final ColumnSource[] notGroupedKeyColumns = keyColumns.stream() - .filter(cs -> cs.getGroupToRange() == null).toArray(ColumnSource[]::new); + final ColumnSource[] groupedKeyColumns = + keyColumns.stream().filter(cs -> cs.getGroupToRange() != null).toArray(ColumnSource[]::new); + final ColumnSource[] notGroupedKeyColumns = + keyColumns.stream().filter(cs -> cs.getGroupToRange() == null).toArray(ColumnSource[]::new); Require.gtZero(groupedKeyColumns.length, "groupedKeyColumns.length"); Require.gtZero(notGroupedKeyColumns.length, "notGroupedKeyColumns.length"); - final TupleSource groupedTupleSource = - TupleSourceFactory.makeTupleSource(groupedKeyColumns); + final TupleSource groupedTupleSource = TupleSourceFactory.makeTupleSource(groupedKeyColumns); final Map resultBuilder = new LinkedHashMap<>(); @@ -506,23 +487,19 @@ private void generatePartialGroupingForKeySet(BiConsumer resultCo final Set groupPruningSet = new HashSet<>(); if (groupedKeysIndices.length == 1) { // noinspection unchecked - keys.forEach(x -> groupPruningSet - .add(tupleSource.exportElementReinterpreted(x, groupedKeysIndices[0]))); + keys.forEach(x -> groupPruningSet.add(tupleSource.exportElementReinterpreted(x, groupedKeysIndices[0]))); } else { final Object[] groupingKeyValues = new Object[groupedKeysIndices.length]; keys.forEach(x -> { for (int ii = 0; ii < groupingKeyValues.length; ++ii) { // noinspection unchecked - groupingKeyValues[ii] = - tupleSource.exportElementReinterpreted(x, groupedKeysIndices[ii]); + groupingKeyValues[ii] = tupleSource.exportElementReinterpreted(x, groupedKeysIndices[ii]); } - groupPruningSet - .add(groupedTupleSource.createTupleFromReinterpretedValues(groupingKeyValues)); + groupPruningSet.add(groupedTupleSource.createTupleFromReinterpretedValues(groupingKeyValues)); }); } - final Map groupedColumnsGrouping = - getGroupingForKeySet(groupPruningSet, groupedTupleSource); + final Map groupedColumnsGrouping = getGroupingForKeySet(groupPruningSet, groupedTupleSource); final Object[] lookupKeyValues = new Object[keyColumns.size()]; @@ -538,7 +515,7 @@ private void generatePartialGroupingForKeySet(BiConsumer resultCo for (int ii = 0; ii < groupedKeysIndices.length; ++ii) { // noinspection unchecked lookupKeyValues[groupedKeysIndices[ii]] = - groupedTupleSource.exportElementReinterpreted(entry.getKey(), ii); + groupedTupleSource.exportElementReinterpreted(entry.getKey(), ii); } } @@ -555,7 +532,7 @@ private void generatePartialGroupingForKeySet(BiConsumer resultCo } final SequentialBuilder indexForKey = - resultBuilder.computeIfAbsent(key, k -> Index.FACTORY.getSequentialBuilder()); + resultBuilder.computeIfAbsent(key, k -> Index.FACTORY.getSequentialBuilder()); indexForKey.appendKey(next); } } @@ -575,8 +552,7 @@ public Map getGroupingForKeySet(Set keys, TupleSource tup @Override public Index getSubIndexForKeySet(Set keys, TupleSource tupleSource) { final IndexBuilder indexBuilder = Index.FACTORY.getBuilder(); - final BiConsumer resultCollector = - (key, index) -> indexBuilder.addIndex(index); + final BiConsumer resultCollector = (key, index) -> indexBuilder.addIndex(index); collectGroupingForKeySet(keys, tupleSource, resultCollector); @@ -584,7 +560,7 @@ public Index getSubIndexForKeySet(Set keys, TupleSource tupleSource) { } private void collectGroupingForKeySet(Set keys, TupleSource tupleSource, - BiConsumer resultCollector) { + BiConsumer resultCollector) { // noinspection unchecked final List keyColumns = tupleSource.getColumnSources(); if (keyColumns.isEmpty()) { @@ -592,23 +568,21 @@ private void collectGroupingForKeySet(Set keys, TupleSource tupleSource, } else if (keyColumns.size() == 1 && keyColumns.get(0).getGroupToRange() != null) { @SuppressWarnings("unchecked") final Map sourceGrouping = keyColumns.get(0).getGroupToRange(); - sourceGrouping.entrySet().stream() - .filter(objectIndexEntry -> keys.contains(objectIndexEntry.getKey())) - .forEach(objectIndexEntry -> { - final Index resultIndex = objectIndexEntry.getValue().intersect(this); - if (resultIndex.size() > 0) { - resultCollector.accept(objectIndexEntry.getKey(), resultIndex); - } - }); + sourceGrouping.entrySet().stream().filter(objectIndexEntry -> keys.contains(objectIndexEntry.getKey())) + .forEach(objectIndexEntry -> { + final Index resultIndex = objectIndexEntry.getValue().intersect(this); + if (resultIndex.size() > 0) { + resultCollector.accept(objectIndexEntry.getKey(), resultIndex); + } + }); } else { - final long columnsWithGrouping = - keyColumns.stream().filter(cs -> cs.getGroupToRange() != null).count(); + final long columnsWithGrouping = keyColumns.stream().filter(cs -> cs.getGroupToRange() != null).count(); final boolean canUseAllConstituents = columnsWithGrouping == keyColumns.size(); final boolean canUseAnyConstituents = columnsWithGrouping > 0; if (canUseAllConstituents) { - generateGrouping(resultCollector, tupleSource, keyColumns, 0, - new Object[keyColumns.size()], null, keys); + generateGrouping(resultCollector, tupleSource, keyColumns, 0, new Object[keyColumns.size()], null, + keys); } else if (canUseAnyConstituents) { generatePartialGroupingForKeySet(resultCollector, tupleSource, keyColumns, keys); } else { @@ -617,28 +591,25 @@ private void collectGroupingForKeySet(Set keys, TupleSource tupleSource, final long next = iterator.nextLong(); final Object key = tupleSource.createTuple(next); if (keys.contains(key)) { - resultBuilder - .computeIfAbsent(key, k -> Index.FACTORY.getSequentialBuilder()) - .appendKey(next); + resultBuilder.computeIfAbsent(key, k -> Index.FACTORY.getSequentialBuilder()).appendKey(next); } } - for (Map.Entry objectIndexBuilderEntry : resultBuilder - .entrySet()) { + for (Map.Entry objectIndexBuilderEntry : resultBuilder.entrySet()) { resultCollector.accept(objectIndexBuilderEntry.getKey(), - objectIndexBuilderEntry.getValue().getIndex()); + objectIndexBuilderEntry.getValue().getIndex()); } } } } private static void generateGrouping( - UnaryOperator indexOp, - BiConsumer resultCollector, - TupleSource tupleSource, - List keyColumns, - int position, - Object[] partialValues, - Index partiallyIntersectedIndex) { + UnaryOperator indexOp, + BiConsumer resultCollector, + TupleSource tupleSource, + List keyColumns, + int position, + Object[] partialValues, + Index partiallyIntersectedIndex) { for (Object objectEntry : keyColumns.get(position).getGroupToRange().entrySet()) { // noinspection unchecked final Map.Entry entry = (Map.Entry) objectEntry; @@ -651,34 +622,31 @@ private static void generateGrouping( } if (subIndex.nonempty()) { if (position == keyColumns.size() - 1) { - // we're at the very last bit, so we should start shoving our tuples into the - // result map - resultCollector.accept( - tupleSource.createTupleFromReinterpretedValues(partialValues), subIndex); + // we're at the very last bit, so we should start shoving our tuples into the result map + resultCollector.accept(tupleSource.createTupleFromReinterpretedValues(partialValues), subIndex); } else { - generateGrouping(indexOp, resultCollector, tupleSource, keyColumns, - position + 1, partialValues, subIndex); + generateGrouping(indexOp, resultCollector, tupleSource, keyColumns, position + 1, partialValues, + subIndex); } } } } - private void generateGrouping(BiConsumer resultCollector, - TupleSource tupleSource, List keyColumns, int position, - Object[] partialValues, Index partiallyIntersectedIndex, Set keyRestriction) { + private void generateGrouping(BiConsumer resultCollector, TupleSource tupleSource, + List keyColumns, int position, Object[] partialValues, Index partiallyIntersectedIndex, + Set keyRestriction) { final boolean finalPosition = position == keyColumns.size() - 1; final List subSources = keyColumns.subList(0, position + 1); - final TupleSource subTupleSource = TupleSourceFactory - .makeTupleSource(subSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY)); + final TupleSource subTupleSource = + TupleSourceFactory.makeTupleSource(subSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY)); final Set pruningSet; if (finalPosition) { pruningSet = keyRestriction; } else if (position == 0) { // noinspection unchecked - pruningSet = - keyRestriction.stream().map(x -> tupleSource.exportElementReinterpreted(x, 0)) + pruningSet = keyRestriction.stream().map(x -> tupleSource.exportElementReinterpreted(x, 0)) .collect(Collectors.toCollection(HashSet::new)); } else { pruningSet = new HashSet<>(); @@ -693,8 +661,7 @@ private void generateGrouping(BiConsumer resultCollector, } // noinspection unchecked - final Map groupToRange = - (Map) keyColumns.get(position).getGroupToRange(); + final Map groupToRange = (Map) keyColumns.get(position).getGroupToRange(); final Object[] pruningKey = Arrays.copyOf(partialValues, position + 1); for (Map.Entry entry : groupToRange.entrySet()) { pruningKey[position] = partialValues[position] = entry.getKey(); @@ -712,8 +679,7 @@ private void generateGrouping(BiConsumer resultCollector, tuple = null; } else { - if (!pruningSet - .contains(subTupleSource.createTupleFromReinterpretedValues(pruningKey))) { + if (!pruningSet.contains(subTupleSource.createTupleFromReinterpretedValues(pruningKey))) { continue; } @@ -729,12 +695,11 @@ private void generateGrouping(BiConsumer resultCollector, if (subIndex.nonempty()) { if (finalPosition) { - // we're at the very last bit, so we should start shoving our smart keys into - // the result map + // we're at the very last bit, so we should start shoving our smart keys into the result map resultCollector.accept(tuple, subIndex); } else { - generateGrouping(resultCollector, tupleSource, keyColumns, position + 1, - partialValues, subIndex, keyRestriction); + generateGrouping(resultCollector, tupleSource, keyColumns, position + 1, partialValues, subIndex, + keyRestriction); } } } @@ -756,14 +721,11 @@ public Map getPrevGrouping(TupleSource tupleSource) { for (final Index.Iterator iterator = this.iterator(); iterator.hasNext();) { final long next = iterator.nextLong(); final Object key = tupleSource.createPreviousTuple(next); - resultBuilder.computeIfAbsent(key, k -> Index.FACTORY.getSequentialBuilder()) - .appendKey(next); + resultBuilder.computeIfAbsent(key, k -> Index.FACTORY.getSequentialBuilder()).appendKey(next); } result = new LinkedHashMap<>(); - for (Map.Entry objectIndexBuilderEntry : resultBuilder - .entrySet()) { - result.put(objectIndexBuilderEntry.getKey(), - objectIndexBuilderEntry.getValue().getIndex()); + for (Map.Entry objectIndexBuilderEntry : resultBuilder.entrySet()) { + result.put(objectIndexBuilderEntry.getKey(), objectIndexBuilderEntry.getValue().getIndex()); } } if (areColumnsImmutable(sourcesKey)) { @@ -776,7 +738,7 @@ public Map getPrevGrouping(TupleSource tupleSource) { ephemeralPrevMappings = new WeakHashMap<>(); } ephemeralPrevMappings.put(sourcesKey, - new MappingInfo(result, LogicalClock.DEFAULT.currentStep(), tupleSource)); + new MappingInfo(result, LogicalClock.DEFAULT.currentStep(), tupleSource)); } return result; } @@ -788,8 +750,7 @@ public boolean hasGrouping(ColumnSource... keyColumns) { } final List sourcesKey = Arrays.asList(keyColumns); final Map groupingCandidate = lookupMapping(sourcesKey); - return groupingCandidate != null - || keyColumns.length == 1 && keyColumns[0].getGroupToRange() != null; + return groupingCandidate != null || keyColumns.length == 1 && keyColumns[0].getGroupToRange() != null; } private boolean areColumnsImmutable(List sourcesKey) { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/StaticSingleValueRedirectionIndexImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/StaticSingleValueRedirectionIndexImpl.java index 6ea45a2937c..c368da7cbfa 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/StaticSingleValueRedirectionIndexImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/StaticSingleValueRedirectionIndexImpl.java @@ -47,9 +47,9 @@ public String toString() { @Override public void fillChunk( - @NotNull final FillContext fillContext, - @NotNull final WritableLongChunk mappedKeysOut, - @NotNull final OrderedKeys keysToMap) { + @NotNull final FillContext fillContext, + @NotNull final WritableLongChunk mappedKeysOut, + @NotNull final OrderedKeys keysToMap) { final int sz = keysToMap.intSize(); mappedKeysOut.setSize(sz); mappedKeysOut.fillWithValue(0, sz, value); @@ -57,9 +57,9 @@ public void fillChunk( @Override public void fillPrevChunk( - @NotNull final FillContext fillContext, - @NotNull final WritableLongChunk mappedKeysOut, - @NotNull final OrderedKeys keysToMap) { + @NotNull final FillContext fillContext, + @NotNull final WritableLongChunk mappedKeysOut, + @NotNull final OrderedKeys keysToMap) { // no prev fillChunk(fillContext, mappedKeysOut, keysToMap); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/SyncTableFilter.java b/DB/src/main/java/io/deephaven/db/v2/utils/SyncTableFilter.java index a9aa01833bf..4d0b23dde18 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/SyncTableFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/SyncTableFilter.java @@ -32,34 +32,31 @@ /** * Return the rows with the highest commonly available ID from multiple tables. * - * The Deephaven system does not provide cross table (or partition) transactions. However, you may - * have a producer that generates more than one table, which you would like to then use in a - * coordinated fashion. The Deephaven system preserves order within one partition, but the relative - * order of tables can not be guaranteed. The tailers, DIS, and other infrastructure process each - * partition independently to provide maximum throughput; and thus a row that logged later may - * appear in your query before another row that was logged earlier within a different partition. + * The Deephaven system does not provide cross table (or partition) transactions. However, you may have a producer that + * generates more than one table, which you would like to then use in a coordinated fashion. The Deephaven system + * preserves order within one partition, but the relative order of tables can not be guaranteed. The tailers, DIS, and + * other infrastructure process each partition independently to provide maximum throughput; and thus a row that logged + * later may appear in your query before another row that was logged earlier within a different partition. * - * If you tag each row with a long column that can be used to correlate the two tables, the - * SyncTableFilter can release only rows with matching values in all of the input tables. For - * example, if you have input tables "a" "b" and "c", with a key of "USym", if "a" has rows for SPY - * with IDs of 1, 2, and 3, but "b" and "c" only have rows for "2", the filter will pass through the - * rows with ID 2. When both "b" and "c" have SPY rows for 3, then the rows for ID 2 are removed and - * the rows for ID 3 are added to the result tables. + * If you tag each row with a long column that can be used to correlate the two tables, the SyncTableFilter can release + * only rows with matching values in all of the input tables. For example, if you have input tables "a" "b" and "c", + * with a key of "USym", if "a" has rows for SPY with IDs of 1, 2, and 3, but "b" and "c" only have rows for "2", the + * filter will pass through the rows with ID 2. When both "b" and "c" have SPY rows for 3, then the rows for ID 2 are + * removed and the rows for ID 3 are added to the result tables. * - * The SyncTableFilter is configured through the Builder inner class. Tables are added to the - * builder, providing a name for each table. The return value is a TableMap of the results, each - * filtered input table is accessible according to the name provided to the builder. + * The SyncTableFilter is configured through the Builder inner class. Tables are added to the builder, providing a name + * for each table. The return value is a TableMap of the results, each filtered input table is accessible according to + * the name provided to the builder. * - * For each key, only pass through the rows that have the minimum value of an ID across all tables. - * The IDs must be monotonically increasing for each key. Your underlying tables must make use of - * transactions such that all rows for a given ID appear in a input table at once. Please consult - * your Deephaven representative before deploying a query that includes this filter to ensure that - * the assumptions are not violated. + * For each key, only pass through the rows that have the minimum value of an ID across all tables. The IDs must be + * monotonically increasing for each key. Your underlying tables must make use of transactions such that all rows for a + * given ID appear in a input table at once. Please consult your Deephaven representative before deploying a query that + * includes this filter to ensure that the assumptions are not violated. */ public class SyncTableFilter { private static final int CHUNK_SIZE = - Configuration.getInstance().getIntegerWithDefault("SyncTableFilter.chunkSize", 1 << 16); + Configuration.getInstance().getIntegerWithDefault("SyncTableFilter.chunkSize", 1 << 16); private final List tables; private final QueryTable[] results; private final Index[] resultIndex; @@ -95,17 +92,16 @@ private static abstract class SyncDescription { private static class SyncTableDescription extends SyncDescription { final Table table; - SyncTableDescription(final String name, final Table table, final String idColumn, - final String[] keyColumns) { + SyncTableDescription(final String name, final Table table, final String idColumn, final String[] keyColumns) { super(name, idColumn, keyColumns); this.table = table; if (!table.hasColumns(idColumn)) { throw new IllegalArgumentException( - "Table \"" + name + "\" does not have ID column \"" + idColumn + "\""); + "Table \"" + name + "\" does not have ID column \"" + idColumn + "\""); } if (!table.hasColumns(keyColumns)) { - throw new IllegalArgumentException("Table \"" + name - + "\" does not have key columns \"" + Arrays.toString(keyColumns) + "\""); + throw new IllegalArgumentException( + "Table \"" + name + "\" does not have key columns \"" + Arrays.toString(keyColumns) + "\""); } } } @@ -114,7 +110,7 @@ private static class SyncTableMapDescription extends SyncDescription { final TableMap tableMap; SyncTableMapDescription(final String name, final TableMap tableMap, final String idColumn, - final String[] keyColumns) { + final String[] keyColumns) { super(name, idColumn, keyColumns); this.tableMap = tableMap; } @@ -149,21 +145,21 @@ private SyncTableFilter(final List tables) { for (int ii = 0; ii < tableCount; ++ii) { final SyncTableDescription std = tables.get(ii); - final ColumnSource[] sources = Arrays.stream(std.keyColumns) - .map(std.table::getColumnSource).toArray(ColumnSource[]::new); + final ColumnSource[] sources = + Arrays.stream(std.keyColumns).map(std.table::getColumnSource).toArray(ColumnSource[]::new); if (ii == 0) { keySourcePrototype = sources; } else { if (sources.length != keySourcePrototype.length) { - throw new IllegalArgumentException("Key sources are not compatible for " - + std.name + " (" + (typeString(sources)) + ") and " + tables.get(0).name - + "(" + (typeString(keySourcePrototype)) + ")"); + throw new IllegalArgumentException( + "Key sources are not compatible for " + std.name + " (" + (typeString(sources)) + ") and " + + tables.get(0).name + "(" + (typeString(keySourcePrototype)) + ")"); } for (int cc = 0; cc < sources.length; cc++) { if (keySourcePrototype[cc].getChunkType() != sources[cc].getChunkType()) { - throw new IllegalArgumentException("Key sources are not compatible for " - + std.name + " (" + (typeString(sources)) + ") and " - + tables.get(0).name + "(" + (typeString(keySourcePrototype)) + ")"); + throw new IllegalArgumentException( + "Key sources are not compatible for " + std.name + " (" + (typeString(sources)) + + ") and " + tables.get(0).name + "(" + (typeString(keySourcePrototype)) + ")"); } } } @@ -171,11 +167,10 @@ private SyncTableFilter(final List tables) { keySources[ii] = TupleSourceFactory.makeTupleSource(sources); idSources.add(std.table.getColumnSource(std.idColumn, long.class)); resultIndex[ii] = Index.FACTORY.getEmptyIndex(); - results[ii] = (QueryTable) ((QueryTable) std.table).getSubTable(resultIndex[ii], null, - mergedListener); + results[ii] = (QueryTable) ((QueryTable) std.table).getSubTable(resultIndex[ii], null, mergedListener); - final ListenerRecorder listenerRecorder = new ListenerRecorder( - "SyncTableFilter(" + std.name + ")", (DynamicTable) std.table, results[ii]); + final ListenerRecorder listenerRecorder = + new ListenerRecorder("SyncTableFilter(" + std.name + ")", (DynamicTable) std.table, results[ii]); ((DynamicTable) std.table).listenForUpdates(listenerRecorder); recorders.add(listenerRecorder); @@ -213,15 +208,12 @@ protected void process() { if (recorder.getNotificationStep() == currentStep) { // we are valid if (recorder.getRemoved().nonempty()) { - throw new IllegalStateException( - "Can not process removed rows in SyncTableFilter!"); + throw new IllegalStateException("Can not process removed rows in SyncTableFilter!"); } if (recorder.getShifted().nonempty()) { - throw new IllegalStateException( - "Can not process shifted rows in SyncTableFilter!"); + throw new IllegalStateException("Can not process shifted rows in SyncTableFilter!"); } - final Index addedAndModified = - recorder.getAdded().union(recorder.getModified()); + final Index addedAndModified = recorder.getAdded().union(recorder.getModified()); consumeRows(rr, addedAndModified); } } @@ -245,8 +237,7 @@ protected void process() { continue; } if (!keysToRefilter.contains(key)) { - // if we did not refilter this key; then we should add the currently matched - // values, + // if we did not refilter this key; then we should add the currently matched values, // otherwise we ignore them because they have already been superseded final Index newlyMatchedRows = state.currentIdBuilder.getIndex(); state.matchedRows.insert(newlyMatchedRows); @@ -296,16 +287,15 @@ private void doMatch(final int tableIndex, final KeyState state, final long matc final Index.SequentialBuilder matchedBuilder = Index.FACTORY.getSequentialBuilder(); final Index.SequentialBuilder pendingBuilder = Index.FACTORY.getSequentialBuilder(); final WritableLongChunk keyIndices = - WritableLongChunk.makeWritableChunk(CHUNK_SIZE); + WritableLongChunk.makeWritableChunk(CHUNK_SIZE); try (final OrderedKeys.Iterator okit = state.pendingRows.getOrderedKeysIterator(); - final ColumnSource.GetContext getContext = - idSources.get(tableIndex).makeGetContext(CHUNK_SIZE)) { + final ColumnSource.GetContext getContext = idSources.get(tableIndex).makeGetContext(CHUNK_SIZE)) { while (okit.hasMore()) { final OrderedKeys chunkOk = okit.getNextOrderedKeysWithLength(CHUNK_SIZE); chunkOk.fillKeyIndicesChunk(keyIndices); final LongChunk idChunk = - idSources.get(tableIndex).getChunk(getContext, chunkOk).asLongChunk(); + idSources.get(tableIndex).getChunk(getContext, chunkOk).asLongChunk(); for (int ii = 0; ii < idChunk.size(); ++ii) { final long id = idChunk.get(ii); if (id > matchValue) { @@ -371,12 +361,12 @@ private Pair, HashSet> processPendingKeys() { final long maxUnprocessed = keyStates[0].unprocessedIds.get(rp); boolean foundInAllTables = true; for (int tt = 1; tt < tableCount; ++tt) { - while (checkRps[tt - 1] >= 0 && keyStates[tt].unprocessedIds - .get(checkRps[tt - 1]) > maxUnprocessed) { + while (checkRps[tt - 1] >= 0 + && keyStates[tt].unprocessedIds.get(checkRps[tt - 1]) > maxUnprocessed) { checkRps[tt - 1]--; } - if (checkRps[tt - 1] < 0 || keyStates[tt].unprocessedIds - .get(checkRps[tt - 1]) != maxUnprocessed) { + if (checkRps[tt - 1] < 0 + || keyStates[tt].unprocessedIds.get(checkRps[tt - 1]) != maxUnprocessed) { foundInAllTables = false; break; } @@ -384,8 +374,7 @@ private Pair, HashSet> processPendingKeys() { if (!foundInAllTables) { rp--; } else { - // this is the ID, yay, we'll need to refilter the pending rows in each - // table + // this is the ID, yay, we'll need to refilter the pending rows in each table minimumid.put(pendingKey, maxUnprocessed); keysToRefilter.add(pendingKey); // we need to clear out unprocessed IDS <= maxUnprocessed @@ -405,13 +394,12 @@ private Pair, HashSet> processPendingKeys() { private void consumeRows(final int tableIndex, final Index index) { // in Treasure the TupleSource will handle chunks better final WritableObjectChunk valuesChunk = WritableObjectChunk.makeWritableChunk(CHUNK_SIZE); - final WritableLongChunk idChunk = - WritableLongChunk.makeWritableChunk(CHUNK_SIZE); + final WritableLongChunk idChunk = WritableLongChunk.makeWritableChunk(CHUNK_SIZE); final WritableLongChunk keyIndicesChunk = - WritableLongChunk.makeWritableChunk(CHUNK_SIZE); + WritableLongChunk.makeWritableChunk(CHUNK_SIZE); final ColumnSource idSource = idSources.get(tableIndex); try (final OrderedKeys.Iterator okIt = index.getOrderedKeysIterator(); - final ColumnSource.FillContext fillContext = idSource.makeFillContext(CHUNK_SIZE)) { + final ColumnSource.FillContext fillContext = idSource.makeFillContext(CHUNK_SIZE)) { while (okIt.hasMore()) { final OrderedKeys chunkOk = okIt.getNextOrderedKeysWithLength(CHUNK_SIZE); chunkOk.fillKeyIndicesChunk(keyIndicesChunk); @@ -424,14 +412,13 @@ private void consumeRows(final int tableIndex, final Index index) { }); idSource.fillChunk(fillContext, idChunk, chunkOk); - // TODO: when we are in Treasure, we should sort this so we do not need to - // repeatedly look things up + // TODO: when we are in Treasure, we should sort this so we do not need to repeatedly look things up // TODO: In Treasure we can also use current factories for our index for (int ii = 0; ii < idChunk.size(); ++ii) { final Object key = valuesChunk.get(ii); pendingKeys.add(key); final KeyState currentState = - objectToState.get(tableIndex).computeIfAbsent(key, (k) -> new KeyState()); + objectToState.get(tableIndex).computeIfAbsent(key, (k) -> new KeyState()); if (currentState.unprocessedBuilder == null) { currentState.unprocessedBuilder = Index.FACTORY.getSequentialBuilder(); } @@ -453,8 +440,7 @@ private void consumeRows(final int tableIndex, final Index index) { if (unprocessedCount == 0) { currentState.unprocessedIds.add(id); } else { - final long lastUnprocessed = - currentState.unprocessedIds.get(unprocessedCount - 1); + final long lastUnprocessed = currentState.unprocessedIds.get(unprocessedCount - 1); if (lastUnprocessed != id) { if (lastUnprocessed < id) { currentState.unprocessedIds.add(id); @@ -470,8 +456,7 @@ private void consumeRows(final int tableIndex, final Index index) { } private static String typeString(final ColumnSource[] sources) { - return Arrays.stream(sources).map(cs -> cs.getType().getSimpleName()) - .collect(Collectors.joining(", ")); + return Arrays.stream(sources).map(cs -> cs.getType().getSimpleName()).collect(Collectors.joining(", ")); } private TableMap getTableMap() { @@ -482,8 +467,7 @@ private TableMap getTableMap() { return map; } - private static class InsertKeySetNotification extends AbstractNotification - implements NotificationQueue.Dependency { + private static class InsertKeySetNotification extends AbstractNotification implements NotificationQueue.Dependency { private final Map> pendingPartitions; private final LocalTableMap result; @NotNull @@ -495,24 +479,22 @@ private static class InsertKeySetNotification extends AbstractNotification long notificationCompletedClock = -1; long queuedNotificationClock = -1; - private InsertKeySetNotification(Map> pendingPartitions, - LocalTableMap result, List descriptions) { + private InsertKeySetNotification(Map> pendingPartitions, LocalTableMap result, + List descriptions) { super(false); this.pendingPartitions = pendingPartitions; this.result = result; this.descriptions = descriptions; - dependencies = - descriptions.stream().map(stmd -> ((NotificationQueue.Dependency) stmd.tableMap)) + dependencies = descriptions.stream().map(stmd -> ((NotificationQueue.Dependency) stmd.tableMap)) .collect(Collectors.toList()); allKeys = descriptions.stream().map(desc -> desc.name).collect(Collectors.toSet()); } @Override public boolean canExecute(final long step) { - return dependencies.stream() - .allMatch((final NotificationQueue.Dependency dep) -> dep.satisfied(step)); + return dependencies.stream().allMatch((final NotificationQueue.Dependency dep) -> dep.satisfied(step)); } @Override @@ -532,21 +514,21 @@ private void notifyChanges() { synchronized (this) { if (notificationClock == currentStep) { throw new IllegalStateException( - "MergedListener was fired before both all listener records completed: listener=" - + System.identityHashCode(this) + ", currentStep=" + currentStep); + "MergedListener was fired before both all listener records completed: listener=" + + System.identityHashCode(this) + ", currentStep=" + currentStep); } - // we've already got something in the notification queue that has not yet been - // executed for the current step. + // we've already got something in the notification queue that has not yet been executed for the current + // step. if (queuedNotificationClock == currentStep) { return; } // Otherwise we should have already flushed that notification. Assert.assertion(queuedNotificationClock == notificationClock, - "queuedNotificationClock == notificationClock", queuedNotificationClock, - "queuedNotificationClock", notificationClock, "notificationClock", currentStep, - "currentStep", this, "MergedListener"); + "queuedNotificationClock == notificationClock", queuedNotificationClock, + "queuedNotificationClock", notificationClock, "notificationClock", currentStep, "currentStep", + this, "MergedListener"); queuedNotificationClock = currentStep; LiveTableMonitor.DEFAULT.addNotification(this); @@ -554,8 +536,7 @@ private void notifyChanges() { } private void createFullyPopulatedKeys() { - for (Iterator>> it = - pendingPartitions.entrySet().iterator(); it.hasNext();) { + for (Iterator>> it = pendingPartitions.entrySet().iterator(); it.hasNext();) { final Map.Entry> partitionKeyAndPopulatedNames = it.next(); if (!partitionKeyAndPopulatedNames.getValue().equals(allKeys)) { continue; @@ -563,17 +544,15 @@ private void createFullyPopulatedKeys() { final Object partitionKey = partitionKeyAndPopulatedNames.getKey(); - final List syncTableDescriptions = descriptions.stream() - .map(stmd -> stmd.forPartition(partitionKey)).collect(Collectors.toList()); - final TableMap syncFiltered = - new SyncTableFilter(syncTableDescriptions).getTableMap(); + final List syncTableDescriptions = + descriptions.stream().map(stmd -> stmd.forPartition(partitionKey)).collect(Collectors.toList()); + final TableMap syncFiltered = new SyncTableFilter(syncTableDescriptions).getTableMap(); result.addParentReference(syncFiltered); for (Object tableName : syncFiltered.getKeySet()) { final SmartKey transformedKey; if (partitionKey instanceof SmartKey) { final Object[] partitionKeyArray = ((SmartKey) partitionKey).values_; - final Object[] newKey = - Arrays.copyOf(partitionKeyArray, partitionKeyArray.length + 1); + final Object[] newKey = Arrays.copyOf(partitionKeyArray, partitionKeyArray.length + 1); newKey[newKey.length - 1] = tableName; transformedKey = new SmartKey(newKey); } else { @@ -601,8 +580,8 @@ public boolean satisfied(final long step) { @Override public LogOutput append(LogOutput output) { - return output.append("SyncTableFilter.InsertKeyNotification{") - .append(System.identityHashCode(this)).append("}"); + return output.append("SyncTableFilter.InsertKeyNotification{").append(System.identityHashCode(this)) + .append("}"); } } @@ -618,7 +597,7 @@ private static TableMap createTableMapAdapter(List desc final InsertKeySetNotification notification = - new InsertKeySetNotification(pendingPartitions, result, descriptions); + new InsertKeySetNotification(pendingPartitions, result, descriptions); final TableMap.KeyListener[] keyListeners = new TableMap.KeyListener[mapCount]; for (int ii = 0; ii < descriptions.size(); ++ii) { @@ -643,10 +622,9 @@ private static TableMap createTableMapAdapter(List desc return result; } - private static void markTablePopulated(Map> pendingPartitions, String name, - Object key) { - final Set presentTables = pendingPartitions.computeIfAbsent(key, - (k) -> Collections.newSetFromMap(new ConcurrentHashMap<>())); + private static void markTablePopulated(Map> pendingPartitions, String name, Object key) { + final Set presentTables = + pendingPartitions.computeIfAbsent(key, (k) -> Collections.newSetFromMap(new ConcurrentHashMap<>())); presentTables.add(name); } @@ -654,11 +632,10 @@ private static void markTablePopulated(Map> pendingPartition * Produce a TableMap of synchronized tables. * *

      - * You may include either Tables or TableMaps, but not both. When Tables are included, the - * result of the build call is a TableMap with a String key that corresponds to the name of the - * input table. When TableMaps are added, the result is a TableMap with composite keys - * (SmartKeys) that are prefixed with the keys from the input TableMap, with a last element that - * is the name of the source passed to the builder. + * You may include either Tables or TableMaps, but not both. When Tables are included, the result of the build call + * is a TableMap with a String key that corresponds to the name of the input table. When TableMaps are added, the + * result is a TableMap with composite keys (SmartKeys) that are prefixed with the keys from the input TableMap, + * with a last element that is the name of the source passed to the builder. *

      */ public static class Builder { @@ -692,15 +669,13 @@ public Builder(final String defaultId, final String... defaultKeys) { * @param name the key of the Table in our output TableMap. * @param table the Table to add * @param idColumn the name of the ID column in the table, must be a long - * @param keyColumns the key columns, each key is coordinated independently of the other - * keys + * @param keyColumns the key columns, each key is coordinated independently of the other keys * @return this builder */ public Builder addTable(final String name, final Table table, final String idColumn, - final String... keyColumns) { + final String... keyColumns) { if (!tableMaps.isEmpty()) { - throw new IllegalArgumentException( - "Can not mix Tables and TableMaps in a SyncTableFilter"); + throw new IllegalArgumentException("Can not mix Tables and TableMaps in a SyncTableFilter"); } tables.add(new SyncTableDescription(name, table, idColumn, keyColumns)); return this; @@ -709,17 +684,16 @@ public Builder addTable(final String name, final Table table, final String idCol private void checkDefaultsInitialized(final String type) { if (defaultId == null) { throw new IllegalArgumentException("Can not specify " + type - + " without an ID column unless the default ID has been set on the builder!"); + + " without an ID column unless the default ID has been set on the builder!"); } if (defaultKeys == null) { throw new IllegalArgumentException("Can not specify " + type - + " without a key columns unless the default keys have been set on the builder!"); + + " without a key columns unless the default keys have been set on the builder!"); } } /** - * Add a table to the set of tables to be synchronized, using this builder's default ID and - * key column names. + * Add a table to the set of tables to be synchronized, using this builder's default ID and key column names. * * @param name the key of the Table in our output TableMap. * @param table the Table to add @@ -750,16 +724,13 @@ public Builder addTableMap(String name, TableMap tableMap) { * @param name the key of the Table in our output TableMap. * @param tableMap the TableMap to add * @param idColumn the name of the ID column in the table, must be a long - * @param keyColumns the key columns, each key is coordinated independently of the other - * keys + * @param keyColumns the key columns, each key is coordinated independently of the other keys * * @return this builder */ - public Builder addTableMap(String name, TableMap tableMap, final String idColumn, - final String... keyColumns) { + public Builder addTableMap(String name, TableMap tableMap, final String idColumn, final String... keyColumns) { if (!tables.isEmpty()) { - throw new IllegalArgumentException( - "Can not mix Tables and TableMaps in a SyncTableFilter"); + throw new IllegalArgumentException("Can not mix Tables and TableMaps in a SyncTableFilter"); } tableMaps.add(new SyncTableMapDescription(name, tableMap, idColumn, keyColumns)); return this; @@ -803,7 +774,7 @@ public TableMap build() { return createTableMapAdapter(tableMaps); } else { throw new IllegalArgumentException( - "You must specify tables or TableMaps as parameters to the SyncTableFilter.Builder"); + "You must specify tables or TableMaps as parameters to the SyncTableFilter.Builder"); } } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/TableBuilder.java b/DB/src/main/java/io/deephaven/db/v2/utils/TableBuilder.java index 229fbb109fa..5a603722159 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/TableBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/TableBuilder.java @@ -44,11 +44,9 @@ public int rowCount() { } /** - * Adds a row to the table. Items will be inserted into the row the order they are put into this - * method + * Adds a row to the table. Items will be inserted into the row the order they are put into this method * - * @param items The items that will appear in the row. Must be the same amount of items as - * columns + * @param items The items that will appear in the row. Must be the same amount of items as columns */ public void addRow(Object... items) { checkRow(items); @@ -64,15 +62,14 @@ private void checkRow(Object[] items) { List colTypes = def.getColumnTypes(); if (items.length != colTypes.size()) { throw new IllegalArgumentException( - "Incorrect column count: expected " + colTypes.size() + " got " + items.length); + "Incorrect column count: expected " + colTypes.size() + " got " + items.length); } for (int i = 0; i < colTypes.size(); i++) { // noinspection unchecked if (items[i] != null && !TypeUtils.getUnboxedTypeIfBoxed(colTypes.get(i)) - .isAssignableFrom(TypeUtils.getUnboxedTypeIfBoxed(items[i].getClass()))) { - throw new IllegalArgumentException( - "Incorrect type for column " + def.getColumnNames().get(i) + .isAssignableFrom(TypeUtils.getUnboxedTypeIfBoxed(items[i].getClass()))) { + throw new IllegalArgumentException("Incorrect type for column " + def.getColumnNames().get(i) + ": expected " + colTypes.get(i).getName() + " got " + items[i].getClass().getName()); } @@ -89,8 +86,8 @@ public Table build() { Map> map = new LinkedHashMap<>(); for (ColumnDefinition columnDefinition : def.getColumns()) { // noinspection unchecked - map.put(columnDefinition.getName(), ArrayBackedColumnSource - .getMemoryColumnSource(rows.size(), columnDefinition.getDataType())); + map.put(columnDefinition.getName(), + ArrayBackedColumnSource.getMemoryColumnSource(rows.size(), columnDefinition.getDataType())); } // Re-write column oriented diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/TableLoggers.java b/DB/src/main/java/io/deephaven/db/v2/utils/TableLoggers.java index ca244fc85b7..79ce1830e2a 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/TableLoggers.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/TableLoggers.java @@ -4,8 +4,7 @@ import io.deephaven.util.annotations.ScriptApi; /** - * Tools to obtain internal, Deephaven logs as tables. These tables include query logs and - * performance logs. + * Tools to obtain internal, Deephaven logs as tables. These tables include query logs and performance logs. */ public class TableLoggers { /** @@ -21,8 +20,8 @@ public static QueryTable updatePerformanceLog() { } /** - * Return a table with query performance data. Individual sub-operations in the query are - * referenced in QueryOperationPerformanceLog. + * Return a table with query performance data. Individual sub-operations in the query are referenced in + * QueryOperationPerformanceLog. * * @return A table with query performance data. */ diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/TailInitializationFilter.java b/DB/src/main/java/io/deephaven/db/v2/utils/TailInitializationFilter.java index b0d83089b9f..c70f135350e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/TailInitializationFilter.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/TailInitializationFilter.java @@ -15,24 +15,20 @@ import java.util.function.LongUnaryOperator; /** - * For an Intraday restart, we often know that all data of interest must take place within a fixed - * period of time. Rather than processing all of the data, we can binary search in each partition to - * find the relevant rows based on a Timestamp. + * For an Intraday restart, we often know that all data of interest must take place within a fixed period of time. + * Rather than processing all of the data, we can binary search in each partition to find the relevant rows based on a + * Timestamp. * - * This is only designed to operate against a source table, if any rows are modified or removed from - * the table, then the Listener throws an IllegalStateException. Each contiguous range of indices is - * assumed to be a partition. If you filter or otherwise alter the source table before calling - * TailInitializationFilter, this assumption will be violated and the resulting table will not be - * filtered as desired. + * This is only designed to operate against a source table, if any rows are modified or removed from the table, then the + * Listener throws an IllegalStateException. Each contiguous range of indices is assumed to be a partition. If you + * filter or otherwise alter the source table before calling TailInitializationFilter, this assumption will be violated + * and the resulting table will not be filtered as desired. * - * Once initialized, the filter returns all new rows, rows that have already been passed are not - * removed or modified. + * Once initialized, the filter returns all new rows, rows that have already been passed are not removed or modified. * - * The input must be sorted by Timestamp, or the resulting table is undefined. Null timestamps are - * not permitted. + * The input must be sorted by Timestamp, or the resulting table is undefined. Null timestamps are not permitted. * - * For consistency, the last value of each partition is used to determine the threshold for that - * partition. + * For consistency, the last value of each partition is used to determine the threshold for that partition. */ public class TailInitializationFilter { /** @@ -40,12 +36,10 @@ public class TailInitializationFilter { * * @param table the source table to filter * @param timestampName the name of the timestamp column - * @param period interval between the last row in a partition (as converted by - * DBTimeUtils.expressionToNanos) + * @param period interval between the last row in a partition (as converted by DBTimeUtils.expressionToNanos) * @return a table with only the most recent values in each partition */ - public static Table mostRecent(final Table table, final String timestampName, - final String period) { + public static Table mostRecent(final Table table, final String timestampName, final String period) { return mostRecent(table, timestampName, DBTimeUtils.expressionToNanos(period)); } @@ -57,37 +51,30 @@ public static Table mostRecent(final Table table, final String timestampName, * @param nanos interval between the last row in a partition, in nanoseconds * @return a table with only the most recent values in each partition */ - public static Table mostRecent(final Table table, final String timestampName, - final long nanos) { - return QueryPerformanceRecorder.withNugget("TailInitializationFilter(" + nanos + ")", - () -> { - final ColumnSource timestampSource = - table.getColumnSource(timestampName, DBDateTime.class); - if (timestampSource.allowsReinterpret(long.class)) { - // noinspection unchecked - return mostRecentLong(table, timestampSource.reinterpret(long.class), nanos); - } else { - // noinspection unchecked - return mostRecentDateTime(table, timestampSource, nanos); - } - }); + public static Table mostRecent(final Table table, final String timestampName, final long nanos) { + return QueryPerformanceRecorder.withNugget("TailInitializationFilter(" + nanos + ")", () -> { + final ColumnSource timestampSource = table.getColumnSource(timestampName, DBDateTime.class); + if (timestampSource.allowsReinterpret(long.class)) { + // noinspection unchecked + return mostRecentLong(table, timestampSource.reinterpret(long.class), nanos); + } else { + // noinspection unchecked + return mostRecentDateTime(table, timestampSource, nanos); + } + }); } - private static Table mostRecentLong(final Table table, final ColumnSource reinterpret, - final long nanos) { + private static Table mostRecentLong(final Table table, final ColumnSource reinterpret, final long nanos) { return mostRecentLong(table, reinterpret::getLong, nanos); } - private static Table mostRecentDateTime(final Table table, final ColumnSource cs, - final long nanos) { + private static Table mostRecentDateTime(final Table table, final ColumnSource cs, final long nanos) { return mostRecentLong(table, (idx) -> cs.get(idx).getNanos(), nanos); } - private static Table mostRecentLong(final Table table, final LongUnaryOperator getValue, - final long nanos) { + private static Table mostRecentLong(final Table table, final LongUnaryOperator getValue, final long nanos) { final Index.SequentialBuilder builder = Index.FACTORY.getSequentialBuilder(); - // we are going to binary search each partition of this table, because the different - // partitions have + // we are going to binary search each partition of this table, because the different partitions have // non-contiguous indices, but values within a partition are contiguous indices. table.getIndex().forEachLongRange((s, e) -> { final long lastValue = getValue.applyAsLong(e); @@ -115,20 +102,19 @@ private static Table mostRecentLong(final Table table, final LongUnaryOperator g return true; }); final Index resultIndex = builder.getIndex(); - final QueryTable result = - new QueryTable(table.getDefinition(), resultIndex, table.getColumnSourceMap()); + final QueryTable result = new QueryTable(table.getDefinition(), resultIndex, table.getColumnSourceMap()); if (table.isLive()) { // TODO: Assert AddOnly in T+, propagate AddOnly in Treasure - final InstrumentedListener listener = new BaseTable.ListenerImpl( - "TailInitializationFilter", (DynamicTable) table, result) { - @Override - public void onUpdate(Index added, Index removed, Index modified) { - Assert.assertion(removed.empty(), "removed.empty()"); - Assert.assertion(modified.empty(), "modified.empty()"); - resultIndex.insert(added); - result.notifyListeners(added.clone(), removed, modified); - } - }; + final InstrumentedListener listener = + new BaseTable.ListenerImpl("TailInitializationFilter", (DynamicTable) table, result) { + @Override + public void onUpdate(Index added, Index removed, Index modified) { + Assert.assertion(removed.empty(), "removed.empty()"); + Assert.assertion(modified.empty(), "modified.empty()"); + resultIndex.insert(added); + result.notifyListeners(added.clone(), removed, modified); + } + }; ((DynamicTable) table).listenForUpdates(listener, false); } return result; diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/TerminalNotification.java b/DB/src/main/java/io/deephaven/db/v2/utils/TerminalNotification.java index 619eb4447ae..7983404fd2f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/TerminalNotification.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/TerminalNotification.java @@ -7,7 +7,6 @@ protected TerminalNotification() { @Override public boolean canExecute(final long step) { - throw new UnsupportedOperationException( - "Terminal notifications do not have dependency information."); + throw new UnsupportedOperationException("Terminal notifications do not have dependency information."); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/TickingSingleValueRedirectionIndexImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/TickingSingleValueRedirectionIndexImpl.java index 02adfd23891..ab62f615af7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/TickingSingleValueRedirectionIndexImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/TickingSingleValueRedirectionIndexImpl.java @@ -69,9 +69,9 @@ public String toString() { @Override public void fillChunk( - @NotNull final FillContext fillContext, - @NotNull final WritableLongChunk mappedKeysOut, - @NotNull final OrderedKeys keysToMap) { + @NotNull final FillContext fillContext, + @NotNull final WritableLongChunk mappedKeysOut, + @NotNull final OrderedKeys keysToMap) { final int sz = keysToMap.intSize(); mappedKeysOut.setSize(sz); mappedKeysOut.fillWithValue(0, sz, value); @@ -79,13 +79,11 @@ public void fillChunk( @Override public void fillPrevChunk( - @NotNull FillContext fillContext, - @NotNull WritableLongChunk mappedKeysOut, - @NotNull OrderedKeys keysToMap) { + @NotNull FillContext fillContext, + @NotNull WritableLongChunk mappedKeysOut, + @NotNull OrderedKeys keysToMap) { final long fillValue = - (updatedClockTick > 0 && updatedClockTick == LogicalClock.DEFAULT.currentStep()) - ? prevValue - : value; + (updatedClockTick > 0 && updatedClockTick == LogicalClock.DEFAULT.currentStep()) ? prevValue : value; final int sz = keysToMap.intSize(); mappedKeysOut.setSize(sz); mappedKeysOut.fillWithValue(0, sz, fillValue); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndex.java b/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndex.java index 2c42088401b..faa4a4770e0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndex.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndex.java @@ -31,8 +31,8 @@ public class TreeIndex extends SortedIndex implements ImplementedByTreeIndexImpl private TreeIndexImpl impl; private transient TreeIndexImpl prevImpl; /** - * Protects prevImpl. Only updated in checkPrev() and initializePreviousValue() (this later - * supposed to be used only right after the constructor, in special cases). + * Protects prevImpl. Only updated in checkPrev() and initializePreviousValue() (this later supposed to be used only + * right after the constructor, in special cases). */ private transient volatile long changeTimeStep; @@ -147,8 +147,7 @@ public void insertRange(final long startKey, final long endKey) { } @Override - public void insert(final LongChunk keys, final int offset, - final int length) { + public void insert(final LongChunk keys, final int offset, final int length) { Assert.leq(offset + length, "offset + length", keys.size(), "keys.size()"); if (trace) pre("insert(chunk)"); @@ -162,8 +161,7 @@ public void insert(final LongChunk keys, final int offset, @Override public void insert(final ReadOnlyIndex added) { if (trace) - pre("insert(added_" - + (added == null ? "id=-1" : ((ImplementedByTreeIndexImpl) added).strid()) + ")"); + pre("insert(added_" + (added == null ? "id=-1" : ((ImplementedByTreeIndexImpl) added).strid()) + ")"); if (added != null) { checkPrevForWrite(); assign(impl.ixInsert(getImpl(added))); @@ -196,8 +194,7 @@ public void removeRange(final long startKey, final long endKey) { } @Override - public void remove(final LongChunk keys, final int offset, - final int length) { + public void remove(final LongChunk keys, final int offset, final int length) { Assert.leq(offset + length, "offset + length", keys.size(), "keys.size()"); if (trace) pre("remove(chunk)"); @@ -497,7 +494,7 @@ public void compact() { public void update(final ReadOnlyIndex added, final ReadOnlyIndex removed) { if (trace) pre("update(added_" + ((ImplementedByTreeIndexImpl) added).strid() + ", removed_" - + ((ImplementedByTreeIndexImpl) removed).strid() + ")"); + + ((ImplementedByTreeIndexImpl) removed).strid() + ")"); checkPrevForWrite(); assign(impl.ixUpdate(getImpl(added), getImpl(removed))); super.onUpdate(added, removed); @@ -594,8 +591,7 @@ public Index union(final ReadOnlyIndex set) { return ans; } - private static class IndexRandomBuilder extends TreeIndexImplRandomBuilder - implements Index.RandomBuilder { + private static class IndexRandomBuilder extends TreeIndexImplRandomBuilder implements Index.RandomBuilder { @Override public Index getIndex() { return new TreeIndex(getTreeIndexImpl()); @@ -607,7 +603,7 @@ public static Index.RandomBuilder makeRandomBuilder() { } private abstract static class IndexSequentialBuilderBase extends TreeIndexImplSequentialBuilder - implements Index.SequentialBuilder { + implements Index.SequentialBuilder { @Override public void appendIndex(final ReadOnlyIndex ix) { appendIndexWithOffset(ix, 0); @@ -616,8 +612,7 @@ public void appendIndex(final ReadOnlyIndex ix) { @Override public void appendIndexWithOffset(final ReadOnlyIndex ix, final long shiftAmount) { if (ix instanceof ImplementedByTreeIndexImpl) { - appendTreeIndexImpl(shiftAmount, ((ImplementedByTreeIndexImpl) ix).getImpl(), - false); + appendTreeIndexImpl(shiftAmount, ((ImplementedByTreeIndexImpl) ix).getImpl(), false); return; } ix.forAllLongRanges((start, end) -> { @@ -638,7 +633,7 @@ public static Index.SequentialBuilder makeSequentialBuilder() { } private static class CurrentOnlyIndexRandomBuilder extends TreeIndexImplRandomBuilder - implements Index.RandomBuilder { + implements Index.RandomBuilder { @Override public Index getIndex() { return new CurrentOnlyIndex(getTreeIndexImpl()); @@ -713,12 +708,10 @@ public void validate(final String failMsg) { final long start = it.currentRangeStart(); final long end = it.currentRangeEnd(); Assert.assertion(start >= 0, m + "start >= 0", start, "start", this, "index"); - Assert.assertion(end >= start, m + "end >= start", start, "start", end, "end", this, - "index"); - Assert.assertion(start > lastEnd, m + "start > lastEnd", start, "start", lastEnd, - "lastEnd", this, "index"); - Assert.assertion(start > lastEnd + 1, m + "start > lastEnd + 1", start, "start", - lastEnd, "lastEnd", this, "index"); + Assert.assertion(end >= start, m + "end >= start", start, "start", end, "end", this, "index"); + Assert.assertion(start > lastEnd, m + "start > lastEnd", start, "start", lastEnd, "lastEnd", this, "index"); + Assert.assertion(start > lastEnd + 1, m + "start > lastEnd + 1", start, "start", lastEnd, "lastEnd", this, + "index"); lastEnd = end; totalSize += ((end - start) + 1); @@ -761,20 +754,16 @@ public void writeExternal(@NotNull final ObjectOutput out) throws IOException { ExternalizableIndexUtils.writeExternalCompressedDeltas(out, this); } - // If we've got a nasty bug, it can be useful to write the serialized version of indices when we - // detect the bug; - // because the creation of these things is so darn path dependent. We can't actually serialize - // the Index; because - // the representation that we'll write will be completely different (and likely saner) than what - // we have in-memory + // If we've got a nasty bug, it can be useful to write the serialized version of indices when we detect the bug; + // because the creation of these things is so darn path dependent. We can't actually serialize the Index; because + // the representation that we'll write will be completely different (and likely saner) than what we have in-memory // at any given point in time. public void writeImpl(ObjectOutput out) throws IOException { out.writeObject(impl); } @Override - public void readExternal(@NotNull final ObjectInput in) - throws IOException, ClassNotFoundException { + public void readExternal(@NotNull final ObjectInput in) throws IOException, ClassNotFoundException { try (final Index readIndex = ExternalizableIndexUtils.readExternalCompressedDelta(in)) { insert(readIndex); } @@ -795,8 +784,7 @@ public OrderedKeys.Iterator getOrderedKeysIterator() { } @Override - public OrderedKeys getOrderedKeysByKeyRange(final long startKeyInclusive, - final long endKeyInclusive) { + public OrderedKeys getOrderedKeysByKeyRange(final long startKeyInclusive, final long endKeyInclusive) { return impl.ixGetOrderedKeysByKeyRange(startKeyInclusive, endKeyInclusive); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndexImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndexImpl.java index abb058d137f..066ca62963e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndexImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndexImpl.java @@ -29,8 +29,7 @@ public interface TreeIndexImpl { TreeIndexImpl ixInsertRange(long startKey, long endKey); @FinalDefault - default TreeIndexImpl ixInsert(final LongChunk keys, final int offset, - final int length) { + default TreeIndexImpl ixInsert(final LongChunk keys, final int offset, final int length) { if (length <= 1) { if (length == 0) { return this; @@ -59,8 +58,7 @@ default TreeIndexImpl ixInsert(final LongChunk keys, final in TreeIndexImpl ixRemoveRange(long startKey, long endKey); @FinalDefault - default TreeIndexImpl ixRemove(final LongChunk keys, final int offset, - final int length) { + default TreeIndexImpl ixRemove(final LongChunk keys, final int offset, final int length) { if (ixIsEmpty()) { return this; } @@ -173,16 +171,15 @@ default void ixValidate() { } /** - * Produce a {@link TreeIndexImpl} from a slice of a {@link LongChunk} of - * {@link OrderedKeyIndices}. + * Produce a {@link TreeIndexImpl} from a slice of a {@link LongChunk} of {@link OrderedKeyIndices}. * * @param keys The {@link LongChunk} of {@link OrderedKeyIndices} to build from * @param offset The offset in {@code keys} to begin building from * @param length The number of keys to include * @return A new {@link TreeIndexImpl} containing the specified slice of {@code keys} */ - static TreeIndexImpl fromChunk(final LongChunk keys, final int offset, - final int length, final boolean disposable) { + static TreeIndexImpl fromChunk(final LongChunk keys, final int offset, final int length, + final boolean disposable) { if (length == 0) { return EMPTY; } @@ -194,8 +191,7 @@ static TreeIndexImpl fromChunk(final LongChunk keys, final in return SingleRange.make(first, last); } - final TreeIndexImplSequentialBuilder builder = - new TreeIndexImplSequentialBuilder(disposable); + final TreeIndexImplSequentialBuilder builder = new TreeIndexImplSequentialBuilder(disposable); builder.appendKey(first); for (int ki = offset + 1; ki < lastOffsetInclusive; ++ki) { builder.appendKey(keys.get(ki)); @@ -229,14 +225,14 @@ public TreeIndexImpl ixInsertRange(final long startKey, final long endKey) { } @Override - public TreeIndexImpl ixInsertSecondHalf(final LongChunk keys, - final int offset, final int length) { + public TreeIndexImpl ixInsertSecondHalf(final LongChunk keys, final int offset, + final int length) { return fromChunk(keys, offset, length, false); } @Override - public TreeIndexImpl ixRemoveSecondHalf(final LongChunk keys, - final int offset, final int length) { + public TreeIndexImpl ixRemoveSecondHalf(final LongChunk keys, final int offset, + final int length) { throw new IllegalStateException(); } @@ -291,8 +287,7 @@ public long ixFind(long key) { } @Override - public void ixGetKeysForPositions(PrimitiveIterator.OfLong inputPositions, - LongConsumer outputKeys) { + public void ixGetKeysForPositions(PrimitiveIterator.OfLong inputPositions, LongConsumer outputKeys) { while (inputPositions.hasNext()) { inputPositions.nextLong(); outputKeys.accept(Index.NULL_KEY); @@ -418,8 +413,7 @@ public OrderedKeys ixGetOrderedKeysByPosition(long startPositionInclusive, long } @Override - public OrderedKeys ixGetOrderedKeysByKeyRange(long startKeyInclusive, - long endKeyInclusive) { + public OrderedKeys ixGetOrderedKeysByKeyRange(long startKeyInclusive, long endKeyInclusive) { return OrderedKeys.EMPTY; } @@ -464,8 +458,8 @@ public String toString() { interface SequentialBuilder extends LongRangeConsumer { boolean check = - Configuration.getInstance().getBooleanForClassWithDefault( - TreeIndexImpl.class, "sequentialBuilderCheck", true); + Configuration.getInstance().getBooleanForClassWithDefault( + TreeIndexImpl.class, "sequentialBuilderCheck", true); String outOfOrderKeyErrorMsg = "Out of order key(s) in sequential builder: "; @@ -477,8 +471,7 @@ default void setDomain(long minKey, long maxKey) {} void appendRange(long firstKey, long lastKey); - default void appendTreeIndexImpl(final long shiftAmount, final TreeIndexImpl ix, - final boolean acquire) { + default void appendTreeIndexImpl(final long shiftAmount, final TreeIndexImpl ix, final boolean acquire) { ix.ixForEachLongRange((final long start, final long last) -> { appendRange(start + shiftAmount, last + shiftAmount); return true; @@ -522,8 +515,8 @@ static TreeIndexImpl twoRanges(final long s1, final long e1, final long s2, fina SortedRanges sr = SortedRanges.tryMakeForKnownRangeKnownCount(4, s1, e2); if (sr != null) { sr = sr.appendRangeUnsafe(s1, e1) - .appendRangeUnsafe(s2, e2) - .tryCompactUnsafe(4); + .appendRangeUnsafe(s2, e2) + .tryCompactUnsafe(4); return sr; } final RspBitmap ans = new RspBitmap(s1, e1); @@ -538,7 +531,7 @@ static TreeIndexImpl twoRanges(final long s1, final long e1, final long s2, fina // are of type RspBitmap already. static RspBitmap asRspBitmap(final TreeIndexImpl t) { return (t instanceof RspBitmap) - ? (RspBitmap) t - : t.ixToRspOnNew(); + ? (RspBitmap) t + : t.ixToRspOnNew(); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndexImplSequentialBuilder.java b/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndexImplSequentialBuilder.java index 7ac04f4684c..75520b8ad47 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndexImplSequentialBuilder.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/TreeIndexImplSequentialBuilder.java @@ -70,8 +70,7 @@ public RspBitmap getRspBitmap() { } @Override - public void appendTreeIndexImpl(final long shiftAmount, final TreeIndexImpl ix, - final boolean acquire) { + public void appendTreeIndexImpl(final long shiftAmount, final TreeIndexImpl ix, final boolean acquire) { if (ix.ixIsEmpty()) { return; } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/UpdatePerformanceTracker.java b/DB/src/main/java/io/deephaven/db/v2/utils/UpdatePerformanceTracker.java index 09a74e808f2..7d9d9db0193 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/UpdatePerformanceTracker.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/UpdatePerformanceTracker.java @@ -33,40 +33,36 @@ import static io.deephaven.db.tables.lang.DBLanguageFunctionUtil.plus; /** - * This tool is meant to track periodic update events that take place in a LiveTableMonitor. This - * generally includes (1) LiveTable.refresh() invocations (2) DynamicTable Listener notifications - * (see InstrumentedListener) + * This tool is meant to track periodic update events that take place in a LiveTableMonitor. This generally includes (1) + * LiveTable.refresh() invocations (2) DynamicTable Listener notifications (see InstrumentedListener) * - * Note: Regarding thread safety, this class interacts with a singleton LiveTableMonitor and expects - * all calls to getEntry(), Entry.onUpdateStart(), and Entry.onUpdateEnd() to be performed while - * protected by the LTM's live jobs synchronizer. + * Note: Regarding thread safety, this class interacts with a singleton LiveTableMonitor and expects all calls to + * getEntry(), Entry.onUpdateStart(), and Entry.onUpdateEnd() to be performed while protected by the LTM's live jobs + * synchronizer. */ public class UpdatePerformanceTracker { - private static final long REPORT_INTERVAL_MILLIS = - Configuration.getInstance().getLongForClassWithDefault( + private static final long REPORT_INTERVAL_MILLIS = Configuration.getInstance().getLongForClassWithDefault( UpdatePerformanceTracker.class, "reportIntervalMillis", 60 * 1000L); // aggregate update performance entries less than 500us by default private static final QueryPerformanceLogThreshold LOG_THRESHOLD = - new QueryPerformanceLogThreshold("Update", 500_000L); + new QueryPerformanceLogThreshold("Update", 500_000L); private static volatile UpdatePerformanceTracker INSTANCE; private static boolean started = false; private boolean unitTestMode = false; private final Entry aggregatedSmallUpdatesEntry = - new Entry(QueryConstants.NULL_INT, QueryConstants.NULL_INT, QueryConstants.NULL_INT, - "Aggregated Small Updates", null); + new Entry(QueryConstants.NULL_INT, QueryConstants.NULL_INT, QueryConstants.NULL_INT, + "Aggregated Small Updates", null); public static UpdatePerformanceTracker getInstance() { if (INSTANCE == null) { synchronized (UpdatePerformanceTracker.class) { if (INSTANCE == null) { - final TableDefinition tableDefinition = - UpdatePerformanceLogLogger.getTableDefinition(); - final String processInfoId = - MemoryTableLoggers.getInstance().getProcessInfo().getId().value(); + final TableDefinition tableDefinition = UpdatePerformanceLogLogger.getTableDefinition(); + final String processInfoId = MemoryTableLoggers.getInstance().getProcessInfo().getId().value(); INSTANCE = new UpdatePerformanceTracker(processInfoId, - LoggerFactory.getLogger(UpdatePerformanceTracker.class), tableDefinition); + LoggerFactory.getLogger(UpdatePerformanceTracker.class), tableDefinition); } } } @@ -80,12 +76,12 @@ public static UpdatePerformanceTracker getInstance() { private final Queue> entries = new LinkedBlockingDeque<>(); private UpdatePerformanceTracker( - @NotNull final String processInfoId, - @NotNull final Logger logger, - @NotNull final TableDefinition logTableDefinition) { + @NotNull final String processInfoId, + @NotNull final Logger logger, + @NotNull final TableDefinition logTableDefinition) { this.logger = logger; tableLogger = new MemoryTableLogger<>( - logger, new UpdatePerformanceLogLogger(processInfoId), logTableDefinition); + logger, new UpdatePerformanceLogLogger(processInfoId), logTableDefinition); } private void startThread() { @@ -118,9 +114,9 @@ public void run() { // ignore } LiveTableMonitor.DEFAULT.exclusiveLock().doLocked( - () -> finishInterval(intervalStartTimeMillis, - System.currentTimeMillis(), - System.nanoTime() - intervalStartTimeNanos)); + () -> finishInterval(intervalStartTimeMillis, + System.currentTimeMillis(), + System.nanoTime() - intervalStartTimeNanos)); } } } @@ -147,11 +143,11 @@ public final Entry getEntry(final String description) { effectiveDescription = description; } entryMu.setValue(new Entry( - entryIdCounter.getAndIncrement(), - evaluationNumber, - operationNumber, - effectiveDescription, - QueryPerformanceRecorder.getCallerLine())); + entryIdCounter.getAndIncrement(), + evaluationNumber, + operationNumber, + effectiveDescription, + QueryPerformanceRecorder.getCallerLine())); }); final Entry entry = entryMu.getValue(); if (!unitTestMode) { @@ -162,25 +158,23 @@ public final Entry getEntry(final String description) { } /** - * Do entry maintenance, generate an interval performance report table for all active entries, - * and reset for the next interval. Note: This method is only called under the - * LiveTableMonitor instance's lock. This ensures exclusive access to the entries, and also - * prevents any other thread from removing from entries. + * Do entry maintenance, generate an interval performance report table for all active entries, and reset for the + * next interval. Note: This method is only called under the LiveTableMonitor instance's lock. This ensures + * exclusive access to the entries, and also prevents any other thread from removing from entries. * * @param intervalStartTimeMillis interval start time in millis * @param intervalEndTimeMillis interval end time in millis * @param intervalDurationNanos interval duration in nanos */ - private void finishInterval(final long intervalStartTimeMillis, - final long intervalEndTimeMillis, final long intervalDurationNanos) { + private void finishInterval(final long intervalStartTimeMillis, final long intervalEndTimeMillis, + final long intervalDurationNanos) { /* - * Visit all entry references. For entries that no longer exist: Remove by index from the - * entry list. For entries that still exist: If the entry had non-zero usage in this - * interval, add it to the report. Reset the entry for the next interval. + * Visit all entry references. For entries that no longer exist: Remove by index from the entry list. For + * entries that still exist: If the entry had non-zero usage in this interval, add it to the report. Reset the + * entry for the next interval. */ final IntervalLevelDetails intervalLevelDetails = - new IntervalLevelDetails(intervalStartTimeMillis, intervalEndTimeMillis, - intervalDurationNanos); + new IntervalLevelDetails(intervalStartTimeMillis, intervalEndTimeMillis, intervalDurationNanos); boolean encounteredErrorLoggingToMemory = false; @@ -194,25 +188,23 @@ private void finishInterval(final long intervalStartTimeMillis, if (entry.shouldLogEntryInterval()) { encounteredErrorLoggingToMemory = - logToMemory(intervalLevelDetails, entry, encounteredErrorLoggingToMemory); + logToMemory(intervalLevelDetails, entry, encounteredErrorLoggingToMemory); } else if (entry.intervalInvocationCount > 0) { if (entry.totalUsedMemory > aggregatedSmallUpdatesEntry.totalUsedMemory) { aggregatedSmallUpdatesEntry.totalUsedMemory = entry.totalUsedMemory; } if (aggregatedSmallUpdatesEntry.intervalInvocationCount == 0 - || aggregatedSmallUpdatesEntry.totalFreeMemory > entry.totalFreeMemory) { + || aggregatedSmallUpdatesEntry.totalFreeMemory > entry.totalFreeMemory) { aggregatedSmallUpdatesEntry.totalFreeMemory = entry.totalFreeMemory; } aggregatedSmallUpdatesEntry.intervalUsageNanos += entry.intervalUsageNanos; - aggregatedSmallUpdatesEntry.intervalInvocationCount += - entry.intervalInvocationCount; + aggregatedSmallUpdatesEntry.intervalInvocationCount += entry.intervalInvocationCount; aggregatedSmallUpdatesEntry.intervalCpuNanos = - plus(aggregatedSmallUpdatesEntry.intervalCpuNanos, entry.intervalCpuNanos); + plus(aggregatedSmallUpdatesEntry.intervalCpuNanos, entry.intervalCpuNanos); aggregatedSmallUpdatesEntry.intervalUserCpuNanos = - plus(aggregatedSmallUpdatesEntry.intervalUserCpuNanos, - entry.intervalUserCpuNanos); + plus(aggregatedSmallUpdatesEntry.intervalUserCpuNanos, entry.intervalUserCpuNanos); aggregatedSmallUpdatesEntry.intervalAdded += entry.intervalAdded; aggregatedSmallUpdatesEntry.intervalRemoved += entry.intervalRemoved; @@ -220,32 +212,28 @@ private void finishInterval(final long intervalStartTimeMillis, aggregatedSmallUpdatesEntry.intervalShifted += entry.intervalShifted; aggregatedSmallUpdatesEntry.intervalAllocatedBytes = - plus(aggregatedSmallUpdatesEntry.intervalAllocatedBytes, - entry.intervalAllocatedBytes); + plus(aggregatedSmallUpdatesEntry.intervalAllocatedBytes, entry.intervalAllocatedBytes); aggregatedSmallUpdatesEntry.intervalPoolAllocatedBytes = - plus(aggregatedSmallUpdatesEntry.intervalPoolAllocatedBytes, - entry.intervalPoolAllocatedBytes); + plus(aggregatedSmallUpdatesEntry.intervalPoolAllocatedBytes, entry.intervalPoolAllocatedBytes); } entry.reset(); } if (aggregatedSmallUpdatesEntry.intervalInvocationCount > 0) { - logToMemory(intervalLevelDetails, aggregatedSmallUpdatesEntry, - encounteredErrorLoggingToMemory); + logToMemory(intervalLevelDetails, aggregatedSmallUpdatesEntry, encounteredErrorLoggingToMemory); aggregatedSmallUpdatesEntry.reset(); } } private boolean logToMemory(final IntervalLevelDetails intervalLevelDetails, - final Entry entry, - final boolean encounteredErrorLoggingToMemory) { + final Entry entry, + final boolean encounteredErrorLoggingToMemory) { if (!encounteredErrorLoggingToMemory) { try { tableLogger.getTableLogger().log(intervalLevelDetails, entry); } catch (IOException e) { // Don't want to log this more than once in a report - logger.error().append("Error sending UpdatePerformanceLog data to memory").append(e) - .endl(); + logger.error().append("Error sending UpdatePerformanceLog data to memory").append(e).endl(); return true; } } @@ -261,7 +249,7 @@ public static class IntervalLevelDetails { private final long intervalDurationNanos; IntervalLevelDetails(final long intervalStartTimeMillis, final long intervalEndTimeMillis, - final long intervalDurationNanos) { + final long intervalDurationNanos) { this.intervalStartTimeMillis = intervalStartTimeMillis; this.intervalEndTimeMillis = intervalEndTimeMillis; this.intervalDurationNanos = intervalDurationNanos; @@ -316,7 +304,7 @@ public static class Entry implements LogOutputAppendable { private long totalUsedMemory; private Entry(final int id, final int evaluationNumber, final int operationNumber, - final String description, final String callerLine) { + final String description, final String callerLine) { this.id = id; this.evaluationNumber = evaluationNumber; this.operationNumber = operationNumber; @@ -328,17 +316,15 @@ public final void onUpdateStart() { ++intervalInvocationCount; startAllocatedBytes = ThreadProfiler.DEFAULT.getCurrentThreadAllocatedBytes(); - startPoolAllocatedBytes = - QueryPerformanceRecorder.getPoolAllocatedBytesForCurrentThread(); + startPoolAllocatedBytes = QueryPerformanceRecorder.getPoolAllocatedBytesForCurrentThread(); startUserCpuNanos = ThreadProfiler.DEFAULT.getCurrentThreadUserTime(); startCpuNanos = ThreadProfiler.DEFAULT.getCurrentThreadCpuTime(); startTimeNanos = System.nanoTime(); } - public final void onUpdateStart(final Index added, final Index removed, - final Index modified, - final IndexShiftData shifted) { + public final void onUpdateStart(final Index added, final Index removed, final Index modified, + final IndexShiftData shifted) { intervalAdded += added.size(); intervalRemoved += removed.size(); intervalModified += modified.size(); @@ -358,18 +344,16 @@ public final void onUpdateStart(long added, long removed, long modified, long sh public final void onUpdateEnd() { intervalUserCpuNanos = plus(intervalUserCpuNanos, - minus(ThreadProfiler.DEFAULT.getCurrentThreadUserTime(), startUserCpuNanos)); - intervalCpuNanos = plus(intervalCpuNanos, - minus(ThreadProfiler.DEFAULT.getCurrentThreadCpuTime(), startCpuNanos)); + minus(ThreadProfiler.DEFAULT.getCurrentThreadUserTime(), startUserCpuNanos)); + intervalCpuNanos = + plus(intervalCpuNanos, minus(ThreadProfiler.DEFAULT.getCurrentThreadCpuTime(), startCpuNanos)); intervalUsageNanos += System.nanoTime() - startTimeNanos; intervalPoolAllocatedBytes = plus(intervalPoolAllocatedBytes, - minus(QueryPerformanceRecorder.getPoolAllocatedBytesForCurrentThread(), - startPoolAllocatedBytes)); + minus(QueryPerformanceRecorder.getPoolAllocatedBytesForCurrentThread(), startPoolAllocatedBytes)); intervalAllocatedBytes = plus(intervalAllocatedBytes, - minus(ThreadProfiler.DEFAULT.getCurrentThreadAllocatedBytes(), - startAllocatedBytes)); + minus(ThreadProfiler.DEFAULT.getCurrentThreadAllocatedBytes(), startAllocatedBytes)); startAllocatedBytes = 0; startPoolAllocatedBytes = 0; @@ -411,25 +395,21 @@ public String toString() { @Override public LogOutput append(final LogOutput logOutput) { - return logOutput.append("Entry{").append(", id=").append(id) - .append(", evaluationNumber=").append(evaluationNumber).append(", operationNumber=") - .append(operationNumber).append(", description='").append(description).append('\'') - .append(", callerLine='").append(callerLine).append('\'') - .append(", intervalUsageNanos=").append(intervalUsageNanos) - .append(", intervalInvocationCount=").append(intervalInvocationCount) - .append(", intervalCpuNanos=").append(intervalCpuNanos) - .append(", intervalUserCpuNanos=").append(intervalUserCpuNanos) - .append(", intervalAdded=").append(intervalAdded).append(", intervalRemoved=") - .append(intervalRemoved).append(", intervalModified=").append(intervalModified) - .append(", intervalShifted=").append(intervalShifted) - .append(", intervalAllocatedBytes=").append(intervalAllocatedBytes) - .append(", intervalPoolAllocatedBytes=").append(intervalPoolAllocatedBytes) - .append(", startCpuNanos=").append(startCpuNanos).append(", startUserCpuNanos=") - .append(startUserCpuNanos).append(", startTimeNanos=").append(startTimeNanos) - .append(", startAllocatedBytes=").append(startAllocatedBytes) - .append(", startPoolAllocatedBytes=").append(startPoolAllocatedBytes) - .append(", totalUsedMemory=").append(totalUsedMemory).append(", totalFreeMemory=") - .append(totalFreeMemory).append('}'); + return logOutput.append("Entry{").append(", id=").append(id).append(", evaluationNumber=") + .append(evaluationNumber).append(", operationNumber=").append(operationNumber) + .append(", description='").append(description).append('\'').append(", callerLine='") + .append(callerLine).append('\'').append(", intervalUsageNanos=").append(intervalUsageNanos) + .append(", intervalInvocationCount=").append(intervalInvocationCount).append(", intervalCpuNanos=") + .append(intervalCpuNanos).append(", intervalUserCpuNanos=").append(intervalUserCpuNanos) + .append(", intervalAdded=").append(intervalAdded).append(", intervalRemoved=") + .append(intervalRemoved).append(", intervalModified=").append(intervalModified) + .append(", intervalShifted=").append(intervalShifted).append(", intervalAllocatedBytes=") + .append(intervalAllocatedBytes).append(", intervalPoolAllocatedBytes=") + .append(intervalPoolAllocatedBytes).append(", startCpuNanos=").append(startCpuNanos) + .append(", startUserCpuNanos=").append(startUserCpuNanos).append(", startTimeNanos=") + .append(startTimeNanos).append(", startAllocatedBytes=").append(startAllocatedBytes) + .append(", startPoolAllocatedBytes=").append(startPoolAllocatedBytes).append(", totalUsedMemory=") + .append(totalUsedMemory).append(", totalFreeMemory=").append(totalFreeMemory).append('}'); } public int getId() { @@ -501,15 +481,14 @@ public long getIntervalInvocationCount() { } /** - * Suppress de minimus update entry intervals using the properties defined in the - * QueryPerformanceNugget class. + * Suppress de minimus update entry intervals using the properties defined in the QueryPerformanceNugget class. * - * @return if this nugget is significant enough to be logged, otherwise it is aggregated - * into the small update entry + * @return if this nugget is significant enough to be logged, otherwise it is aggregated into the small update + * entry */ boolean shouldLogEntryInterval() { return intervalInvocationCount > 0 && - LOG_THRESHOLD.shouldLog(getIntervalUsageNanos()); + LOG_THRESHOLD.shouldLog(getIntervalUsageNanos()); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/UpdateSizeCalculator.java b/DB/src/main/java/io/deephaven/db/v2/utils/UpdateSizeCalculator.java index 127ccb3ef26..13d2f66b85e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/UpdateSizeCalculator.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/UpdateSizeCalculator.java @@ -5,14 +5,14 @@ /** * Calculate the size of the chunks needed to process an update. * - * The assumption is that the operation processes removed, modified, and added values sequentially - * (not concurrently); so the largest add/modified/removed index is all that is needed at one time. - * The effective shifts size is also included in the required update size. + * The assumption is that the operation processes removed, modified, and added values sequentially (not concurrently); + * so the largest add/modified/removed index is all that is needed at one time. The effective shifts size is also + * included in the required update size. */ public class UpdateSizeCalculator { /** - * Return the size of chunk needed to process this update (removed, modified, then added - * sequentially not concurrently). + * Return the size of chunk needed to process this update (removed, modified, then added sequentially not + * concurrently). * * @param upstream the update to process * @param chunkSize the maximum chunk size (a maximum for our size) @@ -20,8 +20,8 @@ public class UpdateSizeCalculator { * @return an appropriate maximum chunk size for this update */ public static int chunkSize(ShiftAwareListener.Update upstream, int chunkSize) { - final long updateSize = Math.max(Math.max(upstream.added.size(), upstream.removed.size()), - upstream.modified.size()); + final long updateSize = + Math.max(Math.max(upstream.added.size(), upstream.removed.size()), upstream.modified.size()); return chunkSize(updateSize, upstream.shifted, chunkSize); } @@ -38,7 +38,6 @@ public static int chunkSize(long updateSize, IndexShiftData shifted, int chunkSi if (updateSize >= chunkSize) { return chunkSize; } - return (int) Math.min(chunkSize, - Math.max(updateSize, shifted.getEffectiveSizeClamped(chunkSize))); + return (int) Math.min(chunkSize, Math.max(updateSize, shifted.getEffectiveSizeClamped(chunkSize))); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/WrappedIndexRedirectionIndexImpl.java b/DB/src/main/java/io/deephaven/db/v2/utils/WrappedIndexRedirectionIndexImpl.java index 9e8264c3f96..87aa0b6238e 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/WrappedIndexRedirectionIndexImpl.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/WrappedIndexRedirectionIndexImpl.java @@ -51,92 +51,81 @@ public void close() { } /* - * TODO: Switch to this version if we ever uncomment the override for fillChunkUnordered. - * private static final class FillContext implements RedirectionIndex.FillContext { + * TODO: Switch to this version if we ever uncomment the override for fillChunkUnordered. private static final class + * FillContext implements RedirectionIndex.FillContext { * * private final int chunkCapacity; private final WritableLongChunk indexPositions; * - * private LongIntTimsortKernel.LongIntSortKernelContext - * sortKernelContext; private WritableIntChunk unorderedFillChunkPositions; - * private WritableLongChunk unorderedFillMappedKeys; + * private LongIntTimsortKernel.LongIntSortKernelContext sortKernelContext; private + * WritableIntChunk unorderedFillChunkPositions; private WritableLongChunk + * unorderedFillMappedKeys; * - * private FillContext(final int chunkCapacity) { this.chunkCapacity = chunkCapacity; - * indexPositions = WritableLongChunk.makeWritableChunk(chunkCapacity); } + * private FillContext(final int chunkCapacity) { this.chunkCapacity = chunkCapacity; indexPositions = + * WritableLongChunk.makeWritableChunk(chunkCapacity); } * - * private void ensureUnorderedFillFieldsInitialized() { if (sortKernelContext == null) { - * sortKernelContext = LongIntTimsortKernel.createContext(chunkCapacity); } if - * (unorderedFillChunkPositions == null) { unorderedFillChunkPositions = - * WritableIntChunk.makeWritableChunk(chunkCapacity); } if (unorderedFillMappedKeys == null) { - * unorderedFillMappedKeys = WritableLongChunk.makeWritableChunk(chunkCapacity); } } + * private void ensureUnorderedFillFieldsInitialized() { if (sortKernelContext == null) { sortKernelContext = + * LongIntTimsortKernel.createContext(chunkCapacity); } if (unorderedFillChunkPositions == null) { + * unorderedFillChunkPositions = WritableIntChunk.makeWritableChunk(chunkCapacity); } if (unorderedFillMappedKeys == + * null) { unorderedFillMappedKeys = WritableLongChunk.makeWritableChunk(chunkCapacity); } } * * @Override public void close() { indexPositions.close(); if (sortKernelContext != null) { - * sortKernelContext.close(); } if (unorderedFillChunkPositions != null) { - * unorderedFillChunkPositions.close(); } if (unorderedFillMappedKeys != null) { - * unorderedFillMappedKeys.close(); } } } + * sortKernelContext.close(); } if (unorderedFillChunkPositions != null) { unorderedFillChunkPositions.close(); } if + * (unorderedFillMappedKeys != null) { unorderedFillMappedKeys.close(); } } } */ @Override public FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) { - // NB: No need to implement sharing at this level. ReadOnlyRedirectedColumnSource uses a - // SharedContext to share + // NB: No need to implement sharing at this level. ReadOnlyRedirectedColumnSource uses a SharedContext to share // RedirectionIndex lookup results. return new FillContext(chunkCapacity); } @Override public void fillChunk(@NotNull final RedirectionIndex.FillContext fillContext, - @NotNull final WritableLongChunk mappedKeysOut, - @NotNull final OrderedKeys keysToMap) { - final WritableLongChunk indexPositions = - ((FillContext) fillContext).indexPositions; + @NotNull final WritableLongChunk mappedKeysOut, + @NotNull final OrderedKeys keysToMap) { + final WritableLongChunk indexPositions = ((FillContext) fillContext).indexPositions; keysToMap.fillKeyIndicesChunk(indexPositions); - wrappedIndex.getKeysForPositions(new LongChunkIterator(indexPositions), - new LongChunkAppender(mappedKeysOut)); + wrappedIndex.getKeysForPositions(new LongChunkIterator(indexPositions), new LongChunkAppender(mappedKeysOut)); mappedKeysOut.setSize(keysToMap.intSize()); } @Override public void fillPrevChunk(@NotNull final RedirectionIndex.FillContext fillContext, - @NotNull final WritableLongChunk mappedKeysOut, - @NotNull final OrderedKeys keysToMap) { - final WritableLongChunk indexPositions = - ((FillContext) fillContext).indexPositions; + @NotNull final WritableLongChunk mappedKeysOut, + @NotNull final OrderedKeys keysToMap) { + final WritableLongChunk indexPositions = ((FillContext) fillContext).indexPositions; keysToMap.fillKeyIndicesChunk(indexPositions); try (final ReadOnlyIndex prevWrappedIndex = wrappedIndex.getPrevIndex()) { prevWrappedIndex.getKeysForPositions(new LongChunkIterator(indexPositions), - new LongChunkAppender(mappedKeysOut)); + new LongChunkAppender(mappedKeysOut)); } mappedKeysOut.setSize(keysToMap.intSize()); } /* - * TODO: Uncomment and test this if we ever start using WrappedIndexRedirectionIndexImpl for - * unordered reads. + * TODO: Uncomment and test this if we ever start using WrappedIndexRedirectionIndexImpl for unordered reads. * - * @Override public void fillChunkUnordered(@NotNull final RedirectionIndex.FillContext - * fillContext, + * @Override public void fillChunkUnordered(@NotNull final RedirectionIndex.FillContext fillContext, * * @NotNull final WritableLongChunk mappedKeysOut, * - * @NotNull final LongChunk keysToMap) { final FillContext typedFillContext = - * (FillContext) fillContext; typedFillContext.ensureUnorderedFillFieldsInitialized(); final - * WritableLongChunk indexPositions = typedFillContext.indexPositions; final - * LongIntTimsortKernel.LongIntSortKernelContext sortKernelContext = - * typedFillContext.sortKernelContext; final WritableIntChunk - * outputChunkPositions = typedFillContext.unorderedFillChunkPositions; final - * WritableLongChunk orderedMappedKeys = typedFillContext.unorderedFillMappedKeys; - * final int chunkSize = keysToMap.size(); + * @NotNull final LongChunk keysToMap) { final FillContext typedFillContext = (FillContext) fillContext; + * typedFillContext.ensureUnorderedFillFieldsInitialized(); final WritableLongChunk indexPositions = + * typedFillContext.indexPositions; final LongIntTimsortKernel.LongIntSortKernelContext + * sortKernelContext = typedFillContext.sortKernelContext; final WritableIntChunk + * outputChunkPositions = typedFillContext.unorderedFillChunkPositions; final WritableLongChunk + * orderedMappedKeys = typedFillContext.unorderedFillMappedKeys; final int chunkSize = keysToMap.size(); * - * indexPositions.copyFromTypedChunk(keysToMap, 0, 0, chunkSize); - * indexPositions.setSize(chunkSize); outputChunkPositions.setSize(chunkSize); - * ChunkUtils.fillInOrder(outputChunkPositions); LongIntTimsortKernel.sort(sortKernelContext, - * outputChunkPositions, indexPositions); + * indexPositions.copyFromTypedChunk(keysToMap, 0, 0, chunkSize); indexPositions.setSize(chunkSize); + * outputChunkPositions.setSize(chunkSize); ChunkUtils.fillInOrder(outputChunkPositions); + * LongIntTimsortKernel.sort(sortKernelContext, outputChunkPositions, indexPositions); * * wrappedIndex.getKeysForPositions(new LongChunkIterator(indexPositions), new * LongChunkAppender(orderedMappedKeys)); orderedMappedKeys.setSize(chunkSize); * - * mappedKeysOut.setSize(chunkSize); LongPermuteKernel.permute(orderedMappedKeys, - * outputChunkPositions, mappedKeysOut); } + * mappedKeysOut.setSize(chunkSize); LongPermuteKernel.permute(orderedMappedKeys, outputChunkPositions, + * mappedKeysOut); } */ @Override @@ -156,17 +145,15 @@ public String toString() { long positionStart = 0; - for (final Index.RangeIterator rangeIterator = wrappedIndex.rangeIterator(); rangeIterator - .hasNext();) { + for (final Index.RangeIterator rangeIterator = wrappedIndex.rangeIterator(); rangeIterator.hasNext();) { if (positionStart > 0) { builder.append(", "); } final long rangeStart = rangeIterator.currentRangeStart(); final long length = rangeIterator.currentRangeEnd() - rangeStart + 1; if (length > 1) { - builder.append(rangeIterator.currentRangeStart()).append("-") - .append(positionStart + length - 1).append(" -> ").append(rangeStart) - .append("-").append(rangeIterator.currentRangeEnd()); + builder.append(rangeIterator.currentRangeStart()).append("-").append(positionStart + length - 1) + .append(" -> ").append(rangeStart).append("-").append(rangeIterator.currentRangeEnd()); } else { builder.append(positionStart).append(" -> ").append(rangeStart); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/codegen/CodeGenerator.java b/DB/src/main/java/io/deephaven/db/v2/utils/codegen/CodeGenerator.java index f84dbd581da..b0a5b048d59 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/codegen/CodeGenerator.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/codegen/CodeGenerator.java @@ -27,13 +27,11 @@ public static CodeGenerator indent(Object... args) { /** * The tail wagging the dog: the proper method signature for this method is - * {@code CodeGenerator samelineBlock(String prefix, Object... args)} But when I do that, - * IntelliJ by default litters up the code with parameter hints, which (if the programmer - * doesn't turn them off), makes the templated code much more unreadable. So instead we just - * pull out the parameter from here. + * {@code CodeGenerator samelineBlock(String prefix, Object... args)} But when I do that, IntelliJ by default + * litters up the code with parameter hints, which (if the programmer doesn't turn them off), makes the templated + * code much more unreadable. So instead we just pull out the parameter from here. * - * @param args A prefix (of type String) like "else", followed by an arbitrary number of - * template lines. + * @param args A prefix (of type String) like "else", followed by an arbitrary number of template lines. * @return The new component. */ public static CodeGenerator samelineBlock(Object... args) { @@ -71,7 +69,7 @@ private static CodeGenerator createSlice(final Object[] args, final int start) { continue; } throw new UnsupportedOperationException( - "Value " + o + " is of unsupported type " + o.getClass().getSimpleName()); + "Value " + o + " is of unsupported type " + o.getClass().getSimpleName()); } return new Container(items); } @@ -80,8 +78,7 @@ public final void replace(String metaVariable, String replacement) { final String bracketed = "[[" + metaVariable + "]]"; final int count = replaceBracketed(bracketed, replacement); if (count == 0) { - throw new UnsupportedOperationException( - "Couldn't find any instances of metavariable " + metaVariable); + throw new UnsupportedOperationException("Couldn't find any instances of metavariable " + metaVariable); } } @@ -92,8 +89,7 @@ public final CodeGenerator activateOptional(String tag) { throw new UnsupportedOperationException("Can't find optional tag: " + tag); } if (allOptionals.size() > 1) { - throw new UnsupportedOperationException( - "There are multiple instances of optional tag: " + tag); + throw new UnsupportedOperationException("There are multiple instances of optional tag: " + tag); } return allOptionals.get(0).activate(); } @@ -111,8 +107,7 @@ public final CodeGenerator instantiateNewRepeated(String tag) { throw new UnsupportedOperationException("Can't find repeated tag: " + tag); } if (allRepeateds.size() > 1) { - throw new UnsupportedOperationException( - "There are multiple instances of repeated tag: " + tag); + throw new UnsupportedOperationException("There are multiple instances of repeated tag: " + tag); } return allRepeateds.get(0).instantiateNew(); } @@ -135,8 +130,7 @@ public void assertNoUnresolvedVariables() { final Pattern p = Pattern.compile("\\[\\[.+?]]"); findUnresolved(p, unresolvedVariables); if (unresolvedVariables.size() > 0) { - throw new UnsupportedOperationException( - "The following variables are still unresolved: " + + throw new UnsupportedOperationException("The following variables are still unresolved: " + makeCommaSeparatedList(unresolvedVariables)); } } @@ -168,8 +162,7 @@ class Container extends CodeGenerator { @Override Container cloneMe() { - return new Container( - Arrays.stream(items).map(CodeGenerator::cloneMe).toArray(CodeGenerator[]::new)); + return new Container(Arrays.stream(items).map(CodeGenerator::cloneMe).toArray(CodeGenerator[]::new)); } @Override @@ -203,7 +196,7 @@ public void appendToBuilder(StringBuilder sb, String indent, String[] separatorH @Override public CodeGenerator freezeHelper() { final CodeGenerator[] newItems = - Arrays.stream(items).map(CodeGenerator::freezeHelper).toArray(CodeGenerator[]::new); + Arrays.stream(items).map(CodeGenerator::freezeHelper).toArray(CodeGenerator[]::new); return new Container(newItems); } } @@ -280,8 +273,8 @@ public CodeGenerator freezeHelper() { final class Block extends CodeGenerator { /** - * The {@code prefix} is used for special indentation, e.g. in the "else" part of an if-else - * statement, or the "catch" part of a try-catch block. + * The {@code prefix} is used for special indentation, e.g. in the "else" part of an if-else statement, or the + * "catch" part of a try-catch block. */ private final String prefix; private final CodeGenerator inner; @@ -522,7 +515,7 @@ public void appendToBuilder(StringBuilder sb, String indent, String[] separatorH @Override public CodeGenerator freezeHelper() { final CodeGenerator[] frozen = - instances.stream().map(CodeGenerator::freezeHelper).toArray(CodeGenerator[]::new); + instances.stream().map(CodeGenerator::freezeHelper).toArray(CodeGenerator[]::new); return new Container(frozen); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/codegen/TypeAnalyzer.java b/DB/src/main/java/io/deephaven/db/v2/utils/codegen/TypeAnalyzer.java index ea74d5cd0e1..e25e89d856d 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/codegen/TypeAnalyzer.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/codegen/TypeAnalyzer.java @@ -4,10 +4,9 @@ import io.deephaven.util.type.TypeUtils; /** - * When {@code type} is either a primitive type or a boxed type, {@code dbPrimitiveType} is the type - * used by the DB to represent it -- that is, Boolean for either boolean or Boolean results, but the - * primitive version of all other primitive/boxed types. For all other types (e.g. misc Objects), - * {@code dbPrimitiveType} is null. + * When {@code type} is either a primitive type or a boxed type, {@code dbPrimitiveType} is the type used by the DB to + * represent it -- that is, Boolean for either boolean or Boolean results, but the primitive version of all other + * primitive/boxed types. For all other types (e.g. misc Objects), {@code dbPrimitiveType} is null. **/ public class TypeAnalyzer { public static TypeAnalyzer create(Class type) { @@ -29,15 +28,12 @@ public static TypeAnalyzer create(Class type) { dbPrimitiveType = TypeUtils.getUnboxedType(type); if (dbPrimitiveType == null) { chunkVariableBase = "ObjectChunk"; - chunkExtendsGenericArgs = - String.format("<%s, ? extends Attributes.Values>", type.getCanonicalName()); - chunkSuperGenericArgs = - String.format("<%s, ? super Attributes.Values>", type.getCanonicalName()); + chunkExtendsGenericArgs = String.format("<%s, ? extends Attributes.Values>", type.getCanonicalName()); + chunkSuperGenericArgs = String.format("<%s, ? super Attributes.Values>", type.getCanonicalName()); chunkTypeString = "Object"; } else { final String simpleName = dbPrimitiveType.getSimpleName(); - final String camelCasedName = - Character.toUpperCase(simpleName.charAt(0)) + simpleName.substring(1); + final String camelCasedName = Character.toUpperCase(simpleName.charAt(0)) + simpleName.substring(1); chunkVariableBase = camelCasedName + "Chunk"; chunkExtendsGenericArgs = ""; chunkSuperGenericArgs = ""; @@ -46,15 +42,13 @@ public static TypeAnalyzer create(Class type) { } final String returnTypeName = - dbPrimitiveType == null ? type.getCanonicalName() : dbPrimitiveType.getName(); + dbPrimitiveType == null ? type.getCanonicalName() : dbPrimitiveType.getName(); final String readChunkVariableType = chunkVariableBase + chunkExtendsGenericArgs; - final String writeChunkVariableType = - "Writable" + chunkVariableBase + chunkSuperGenericArgs; + final String writeChunkVariableType = "Writable" + chunkVariableBase + chunkSuperGenericArgs; final String asReadMethod = "as" + chunkVariableBase; final String asWritableMethod = "asWritable" + chunkVariableBase; - return new TypeAnalyzer(type, dbPrimitiveType, returnTypeName, chunkTypeString, - readChunkVariableType, - writeChunkVariableType, asReadMethod, asWritableMethod); + return new TypeAnalyzer(type, dbPrimitiveType, returnTypeName, chunkTypeString, readChunkVariableType, + writeChunkVariableType, asReadMethod, asWritableMethod); } public final Class type; @@ -67,9 +61,9 @@ public static TypeAnalyzer create(Class type) { public final String asWritableChunkMethodName; private TypeAnalyzer(final Class type, final Class dbPrimitiveType, final String typeString, - final String chunkTypeString, - final String readChunkVariableType, final String writableChunkVariableType, - final String asReadChunkMethodName, final String asWritableChunkMethodName) { + final String chunkTypeString, + final String readChunkVariableType, final String writableChunkVariableType, + final String asReadChunkMethodName, final String asWritableChunkMethodName) { this.type = type; this.dbPrimitiveType = dbPrimitiveType; this.typeString = typeString; @@ -81,21 +75,21 @@ private TypeAnalyzer(final Class type, final Class dbPrimitiveType, final String } /** - * A DhFormulaColumn will unbox the result of any formula that returns a boxed type (except - * Boolean). If the formula returns null, this could trigger a NullPointerException. + * A DhFormulaColumn will unbox the result of any formula that returns a boxed type (except Boolean). If the formula + * returns null, this could trigger a NullPointerException. * * @param formulaString The formula to potentially wrap with a cast function */ public String wrapWithCastIfNecessary(String formulaString) { if (type.isPrimitive() // Implies dbPrimitiveType.equals(type); no risk of NPE - || dbPrimitiveType == Boolean.class // No risk of NPE - || dbPrimitiveType == null) // Return type is not a primitive or boxed type + || dbPrimitiveType == Boolean.class // No risk of NPE + || dbPrimitiveType == null) // Return type is not a primitive or boxed type { return formulaString; // No need to cast } // Otherwise, perform perform a null-safe unboxing cast - return DBLanguageFunctionUtil.class.getCanonicalName() + '.' + dbPrimitiveType.getName() - + "Cast(" + formulaString + ')'; + return DBLanguageFunctionUtil.class.getCanonicalName() + '.' + dbPrimitiveType.getName() + "Cast(" + + formulaString + ')'; } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/compact/CompactKernel.java b/DB/src/main/java/io/deephaven/db/v2/utils/compact/CompactKernel.java index 41347fe32ed..67d6941c6a9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/compact/CompactKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/compact/CompactKernel.java @@ -4,72 +4,65 @@ public interface CompactKernel { /** - * Compacts values into the front of the chunk, retaining only values where the parallel - * retainValues chunk has a true value. + * Compacts values into the front of the chunk, retaining only values where the parallel retainValues chunk has a + * true value. * * @param values a chunk of values, input and output * @param retainValues the values to retain */ - void compact(WritableChunk values, - BooleanChunk retainValues); + void compact(WritableChunk values, BooleanChunk retainValues); /** - * Sort valuesChunk, eliminate duplicates, and write the number of times a value occurred into - * the parallel slot within counts. null values are removed from the chunk. + * Sort valuesChunk, eliminate duplicates, and write the number of times a value occurred into the parallel slot + * within counts. null values are removed from the chunk. * * @param valueChunk a chunk of values, input and output - * @param counts an output chunk parallel to valueChunk with the number of times a value - * occurred + * @param counts an output chunk parallel to valueChunk with the number of times a value occurred */ default void compactAndCount(WritableChunk valueChunk, - WritableIntChunk counts) { + WritableIntChunk counts) { compactAndCount(valueChunk, counts, false); } /** - * Sort valuesChunk, eliminate duplicates, and write the number of times a value occurred into - * the parallel slot within counts. + * Sort valuesChunk, eliminate duplicates, and write the number of times a value occurred into the parallel slot + * within counts. * * @param valueChunk a chunk of values, input and output - * @param counts an output chunk parallel to valueChunk with the number of times a value - * occurred + * @param counts an output chunk parallel to valueChunk with the number of times a value occurred * @param countNull if the compaction should count nulls or not */ void compactAndCount(WritableChunk valueChunk, - WritableIntChunk counts, boolean countNull); + WritableIntChunk counts, boolean countNull); /** - * For each run in valuesChunk, sort it, eliminate duplicates, and write the number of times a - * value occurred into the parallel slot within counts. null values are removed from the chunk. + * For each run in valuesChunk, sort it, eliminate duplicates, and write the number of times a value occurred into + * the parallel slot within counts. null values are removed from the chunk. * * @param valueChunk a chunk of values, input and output - * @param counts an output chunk parallel to valueChunk with the number of times a value - * occurred + * @param counts an output chunk parallel to valueChunk with the number of times a value occurred * @param startPositions the start of each run * @param lengths the length of each run, input and output */ default void compactAndCount(WritableChunk valueChunk, - WritableIntChunk counts, - IntChunk startPositions, - WritableIntChunk lengths) { + WritableIntChunk counts, IntChunk startPositions, + WritableIntChunk lengths) { compactAndCount(valueChunk, counts, startPositions, lengths, false); } /** - * For each run in valuesChunk, sort it, eliminate duplicates, and write the number of times a - * value occurred into the parallel slot within counts. + * For each run in valuesChunk, sort it, eliminate duplicates, and write the number of times a value occurred into + * the parallel slot within counts. * * @param valueChunk a chunk of values, input and output - * @param counts an output chunk parallel to valueChunk with the number of times a value - * occurred + * @param counts an output chunk parallel to valueChunk with the number of times a value occurred * @param startPositions the start of each run * @param lengths the length of each run, input and output * @param countNull if the compaction should count nulls or not */ void compactAndCount(WritableChunk valueChunk, - WritableIntChunk counts, - IntChunk startPositions, - WritableIntChunk lengths, boolean countNull); + WritableIntChunk counts, IntChunk startPositions, + WritableIntChunk lengths, boolean countNull); static CompactKernel makeCompact(ChunkType chunkType) { switch (chunkType) { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/copy/CopyKernel.java b/DB/src/main/java/io/deephaven/db/v2/utils/copy/CopyKernel.java index bad2c81e3b4..d5364aa62f5 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/copy/CopyKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/copy/CopyKernel.java @@ -39,20 +39,19 @@ static CopyKernel makeCopyKernel(ChunkType chunkType) { * @param output the output chunk * @param baseInput the input array to use when bit in useOverInput is zero (array) * @param overInput the input array to use when bit in useOverInput is one (array) - * @param useOverInput the bitset array to indicate whether to use baseInput or overInput for - * each element + * @param useOverInput the bitset array to indicate whether to use baseInput or overInput for each element * @param srcOffset the offset in baseInput/overInput * @param dstOffset the offset in output * @param length the number of elements to copy */ - void conditionalCopy(WritableChunk output, Object baseInput, - Object overInput, long[] useOverInput, - int srcOffset, int dstOffset, int length); + void conditionalCopy(WritableChunk output, Object baseInput, Object overInput, + long[] useOverInput, + int srcOffset, int dstOffset, int length); class Utils { /** - * Returns the index of the first bit that is set to {@code true} that occurs on or after - * the specified starting index and up to but not including the specified word index. + * Returns the index of the first bit that is set to {@code true} that occurs on or after the specified starting + * index and up to but not including the specified word index. *

      * If no such bit exists then {@code endIndex} is returned. * @@ -60,8 +59,7 @@ class Utils { * @param fromIndex the index to start checking from (inclusive) * @param endIndex the index to stop checking from (exclusive) * @param flipWords if true return first false bit set instead of the first true bit set - * @return the index of the next set bit, any value {@code >= endIndex} is returned if no - * such bit exists + * @return the index of the next set bit, any value {@code >= endIndex} is returned if no such bit exists */ static int nextSetBit(long[] words, int fromIndex, int endIndex, boolean flipWords) { if (fromIndex >= endIndex) { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/FreezeByCountOperator.java b/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/FreezeByCountOperator.java index eedf8d554ef..6da78b4e0fe 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/FreezeByCountOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/FreezeByCountOperator.java @@ -24,11 +24,9 @@ public FreezeByCountOperator() { @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, - IntChunk destinations, - IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int position = startPositions.get(ii); final int destination = destinations.get(position); @@ -41,11 +39,9 @@ public void addChunk(BucketedContext context, Chunk @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, - IntChunk destinations, - IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { for (int ii = 0; ii < startPositions.size(); ++ii) { final int position = startPositions.get(ii); final int destination = destinations.get(position); @@ -54,9 +50,8 @@ public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, long destination) { + public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { if (chunkSize != 1) { throw new IllegalStateException("FreezeBy only allows one row per state!"); } @@ -65,9 +60,8 @@ public boolean addChunk(SingletonContext context, int chunkSize, } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, - LongChunk inputIndices, long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { setEmpty(destination); return false; } @@ -98,8 +92,7 @@ private void setFilled(long destination) { private void setEmpty(long destination) { final byte count = rowCount.getAndSetUnsafe(destination, (byte) 0); if (count != 1) { - throw new IllegalStateException( - "FreezeBy only allows one row per state, old count: " + count); + throw new IllegalStateException("FreezeBy only allows one row per state, old count: " + count); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/FreezeByOperator.java b/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/FreezeByOperator.java index 876b457ec48..c50e167ad76 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/FreezeByOperator.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/FreezeByOperator.java @@ -21,8 +21,7 @@ public class FreezeByOperator implements IterativeChunkedAggregationOperator { private final String name; private final FreezeByHelper helper; - public FreezeByOperator(Class type, String resultName, - FreezeByCountOperator freezeByCountOperator) { + public FreezeByOperator(Class type, String resultName, FreezeByCountOperator freezeByCountOperator) { resultSource = ArrayBackedColumnSource.getMemoryColumnSource(0, type); name = resultName; helper = makeHelper(resultSource, freezeByCountOperator); @@ -30,53 +29,42 @@ public FreezeByOperator(Class type, String resultName, @Override public void addChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, - IntChunk destinations, - IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) { + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) { helper.addChunk(values, startPositions, destinations, length); } @Override public void removeChunk(BucketedContext context, Chunk values, - LongChunk inputIndices, - IntChunk destinations, - IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) {} + LongChunk inputIndices, IntChunk destinations, + IntChunk startPositions, IntChunk length, + WritableBooleanChunk stateModified) {} @Override - public void modifyChunk(BucketedContext context, - Chunk previousValues, - Chunk newValues, - LongChunk postShiftIndices, - IntChunk destinations, - IntChunk startPositions, - IntChunk length, - WritableBooleanChunk stateModified) {} + public void modifyChunk(BucketedContext context, Chunk previousValues, + Chunk newValues, LongChunk postShiftIndices, + IntChunk destinations, IntChunk startPositions, + IntChunk length, WritableBooleanChunk stateModified) {} @Override - public boolean addChunk(SingletonContext context, int chunkSize, - Chunk values, - LongChunk inputIndices, long destination) { + public boolean addChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { helper.addChunk(values, destination); return false; } @Override - public boolean removeChunk(SingletonContext context, int chunkSize, - Chunk values, - LongChunk inputIndices, long destination) { + public boolean removeChunk(SingletonContext context, int chunkSize, Chunk values, + LongChunk inputIndices, long destination) { return false; } @Override public boolean modifyChunk(SingletonContext context, int chunkSize, - Chunk previousValues, - Chunk newValues, - LongChunk postShiftIndices, long destination) { + Chunk previousValues, Chunk newValues, + LongChunk postShiftIndices, long destination) { return false; } @@ -97,14 +85,13 @@ public void startTrackingPrevValues() { @Override public void propagateUpdates(@NotNull ShiftAwareListener.Update downstream, - @NotNull ReadOnlyIndex newDestinations) { + @NotNull ReadOnlyIndex newDestinations) { if (downstream.removed.nonempty()) { helper.clearIndex(downstream.removed); } } - private static FreezeByHelper makeHelper(WritableSource source, - FreezeByCountOperator rowCount) { + private static FreezeByHelper makeHelper(WritableSource source, FreezeByCountOperator rowCount) { switch (source.getChunkType()) { default: case Boolean: @@ -135,9 +122,8 @@ private static FreezeByHelper makeHelper(WritableSource source, } interface FreezeByHelper { - void addChunk(Chunk values, - IntChunk startPositions, - IntChunk destinations, IntChunk length); + void addChunk(Chunk values, IntChunk startPositions, + IntChunk destinations, IntChunk length); void addChunk(Chunk values, long destination); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/ReplicateFreezeBy.java b/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/ReplicateFreezeBy.java index e5c8753f1e3..a5245ea90a0 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/ReplicateFreezeBy.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/freezeby/ReplicateFreezeBy.java @@ -13,45 +13,43 @@ public class ReplicateFreezeBy { public static void main(String[] args) throws IOException { - final List results = ReplicatePrimitiveCode - .charToAllButBoolean(CharFreezeByHelper.class, ReplicatePrimitiveCode.MAIN_SRC); + final List results = + ReplicatePrimitiveCode.charToAllButBoolean(CharFreezeByHelper.class, ReplicatePrimitiveCode.MAIN_SRC); - final Optional longResult = - results.stream().filter(s -> s.contains("Long")).findFirst(); + final Optional longResult = results.stream().filter(s -> s.contains("Long")).findFirst(); // noinspection OptionalGetWithoutIsPresent fixupLong(longResult.get()); - final String objectResult = ReplicatePrimitiveCode.charToObject(CharFreezeByHelper.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String objectResult = + ReplicatePrimitiveCode.charToObject(CharFreezeByHelper.class, ReplicatePrimitiveCode.MAIN_SRC); fixupObject(objectResult); - final String booleanResult = ReplicatePrimitiveCode.charToBoolean(CharFreezeByHelper.class, - ReplicatePrimitiveCode.MAIN_SRC); + final String booleanResult = + ReplicatePrimitiveCode.charToBoolean(CharFreezeByHelper.class, ReplicatePrimitiveCode.MAIN_SRC); fixupBoolean(booleanResult); } private static void fixupObject(String objectResult) throws IOException { final File objectFile = new File(objectResult); final List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); - final List newLines = - ReplicateUtilities.replaceRegion(lines, "clearIndex", Collections - .singletonList(" removed.forAllLongs(idx -> resultSource.set(idx, null));")); + final List newLines = ReplicateUtilities.replaceRegion(lines, "clearIndex", + Collections.singletonList(" removed.forAllLongs(idx -> resultSource.set(idx, null));")); FileUtils.writeLines(objectFile, newLines); } private static void fixupBoolean(String booleanResult) throws IOException { final File booleanFile = new File(booleanResult); final List lines = FileUtils.readLines(booleanFile, Charset.defaultCharset()); - final List newLines = ReplicateUtilities.globalReplacements(lines, - "final BooleanChunk asBoolean = values.asBooleanChunk", - "final ObjectChunk asBoolean = values.asObjectChunk"); + final List newLines = + ReplicateUtilities.globalReplacements(lines, "final BooleanChunk asBoolean = values.asBooleanChunk", + "final ObjectChunk asBoolean = values.asObjectChunk"); FileUtils.writeLines(booleanFile, newLines); } private static void fixupLong(String longResult) throws IOException { final File longFile = new File(longResult); final List lines = FileUtils.readLines(longFile, Charset.defaultCharset()); - final List newLines = ReplicateUtilities.globalReplacements(0, lines, - "LongArraySource", "AbstractLongArraySource"); + final List newLines = + ReplicateUtilities.globalReplacements(0, lines, "LongArraySource", "AbstractLongArraySource"); FileUtils.writeLines(longFile, newLines); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/metrics/MetricsManager.java b/DB/src/main/java/io/deephaven/db/v2/utils/metrics/MetricsManager.java index 325fe134137..8bbc3256ffc 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/metrics/MetricsManager.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/metrics/MetricsManager.java @@ -14,19 +14,17 @@ public class MetricsManager { private static final Logger log = LoggerFactory.getLogger(MetricsManager.class); public static final boolean enabled = Configuration.getInstance().getBooleanForClassWithDefault( - MetricsManager.class, "enabled", false); - private static final boolean toStdout = - Configuration.getInstance().getBooleanForClassWithDefault( + MetricsManager.class, "enabled", false); + private static final boolean toStdout = Configuration.getInstance().getBooleanForClassWithDefault( MetricsManager.class, "toStdout", false); private static final long logPeriodNanos = - 1_000_000_000L * Configuration.getInstance().getIntegerForClassWithDefault( - MetricsManager.class, "logPeriodSeconds", 120); - private static final boolean periodicUpdates = - Configuration.getInstance().getBooleanForClassWithDefault( + 1_000_000_000L * Configuration.getInstance().getIntegerForClassWithDefault( + MetricsManager.class, "logPeriodSeconds", 120); + private static final boolean periodicUpdates = Configuration.getInstance().getBooleanForClassWithDefault( MetricsManager.class, "periodicUpdates", false); - private static final int maxMetricsPerType = Configuration.getInstance() - .getIntegerForClassWithDefault(MetricsManager.class, "maxMetricsPerType", 256); + private static final int maxMetricsPerType = + Configuration.getInstance().getIntegerForClassWithDefault(MetricsManager.class, "maxMetricsPerType", 256); // This class is a singleton. public static final MetricsManager instance = new MetricsManager(); @@ -56,22 +54,16 @@ private static abstract class MetricsFamily { // They are updated atomically together when metrics are added. protected int count; protected final String[] names = new String[maxMetricsPerType]; - protected final TObjectIntMap nameToMetricId = - new TObjectIntHashMap<>(maxMetricsPerType); + protected final TObjectIntMap nameToMetricId = new TObjectIntHashMap<>(maxMetricsPerType); // Threads have their own version of the counters in a ThreadLocal. // We need to know all of them to do updates and other operations, // so we keep them in a collection. - // Reading and writing to this collection is done synchronizing on the collection object - // itself. - // TODO: We should consider using a container of weak references to avoid holding to - // counters for threads that - // won't update them anymore; that means a fair bit of additional book keeping tho since - // those counts would have - // to live somewhere (a separate deceased count?) plus we would need to reclaim weak - // references gone null etc. - private final ArrayList pendingPerThreadCounters = - new ArrayList<>(maxMetricsPerType); + // Reading and writing to this collection is done synchronizing on the collection object itself. + // TODO: We should consider using a container of weak references to avoid holding to counters for threads that + // won't update them anymore; that means a fair bit of additional book keeping tho since those counts would have + // to live somewhere (a separate deceased count?) plus we would need to reclaim weak references gone null etc. + private final ArrayList pendingPerThreadCounters = new ArrayList<>(maxMetricsPerType); private ThreadLocal counters = ThreadLocal.withInitial(() -> { if (!enabled) { return null; @@ -89,12 +81,11 @@ int registerMetric(final String name) { } synchronized (this) { if (count == maxMetricsPerType) { - throw new IllegalStateException("Max number of " + familyName() + " metrics (=" - + maxMetricsPerType + ") already reached!"); + throw new IllegalStateException( + "Max number of " + familyName() + " metrics (=" + maxMetricsPerType + ") already reached!"); } if (nameToMetricId.containsKey(name)) { - throw new IllegalArgumentException( - familyName() + " name=" + name + " already exists!"); + throw new IllegalArgumentException(familyName() + " name=" + name + " already exists!"); } final int id = count++; nameToMetricId.put(name, id); @@ -106,10 +97,8 @@ int registerMetric(final String name) { // The member variables in the block below are only accessed from the timer thread. protected final ArrayType countersSnapshot = makeMetricsArray(); protected int snapshotCount; - protected final String[] namesSortedSnapshot = new String[maxMetricsPerType]; // we log in - // alphabetical - // metric name - // order. + protected final String[] namesSortedSnapshot = new String[maxMetricsPerType]; // we log in alphabetical metric + // name order. protected final ArrayList perThreadCounters = new ArrayList<>(); void snapshotCounters() { @@ -118,9 +107,9 @@ void snapshotCounters() { if (count > snapshotCount) { // get what we are missing from the previous go around. System.arraycopy( - names, snapshotCount, - namesSortedSnapshot, snapshotCount, - count - snapshotCount); + names, snapshotCount, + namesSortedSnapshot, snapshotCount, + count - snapshotCount); snapshotCount = count; needsToSort = true; } @@ -143,8 +132,7 @@ void snapshotCounters() { // Note we don't have any protection against races here: // we don't use synchronized blocks neither we access volatile variables. // Strictly speaking, in the Java Memory Model we are not guaranteed to see any updates. - // We do know however that this code will see updates in an Intel x64 + HotSpot - // platform, + // We do know however that this code will see updates in an Intel x64 + HotSpot platform, // due to how that particular implementation is known to work. for (final ArrayType threadCounters : perThreadCounters) { accumulateSnapshot(threadCounters, countersSnapshot, snapshotCount); @@ -152,16 +140,13 @@ void snapshotCounters() { } // Note this is very crude and not intended to implement rate-type counters; - // it is intended only for restart-of-test-iteration type scenarios (eg, JMH benchmark - // iteration teardown). + // it is intended only for restart-of-test-iteration type scenarios (eg, JMH benchmark iteration teardown). // The correct implementation of rate-type counters needs to avoid losing updates; // the looping below is prone to lose updates: a value not included in the previous update // might be cleared thus preventing it from being included in the next update. - // Proper implementation of rate-type counters can be done with a double-buffering approach, - // atomic-swapping + // Proper implementation of rate-type counters can be done with a double-buffering approach, atomic-swapping // of a second, zeroed counters array buffer during update. - // We avoid the additional complexity and performance costs of that since we don't have a - // need for rate + // We avoid the additional complexity and performance costs of that since we don't have a need for rate // counters at the moment. void bluntResetCounters() { final int size; @@ -266,86 +251,84 @@ protected void accumulateSnapshot(final long[] src, final long[] dst, final int } }; - private final MetricsFamily longCounterLog2HistogramMetrics = - new MetricsFamily() { - @Override - protected int[][] makeMetricsArray() { - return new int[maxMetricsPerType][65]; - } + private final MetricsFamily longCounterLog2HistogramMetrics = new MetricsFamily() { + @Override + protected int[][] makeMetricsArray() { + return new int[maxMetricsPerType][65]; + } - @Override - protected String familyName() { - return "LongCounterLog2Histogram"; - } + @Override + protected String familyName() { + return "LongCounterLog2Histogram"; + } - @Override - protected void clear(final int[][] counters, final int size) { - for (int i = 0; i < size; ++i) { - for (int j = 0; j < 65; ++j) { - counters[i][j] = 0; - } + @Override + protected void clear(final int[][] counters, final int size) { + for (int i = 0; i < size; ++i) { + for (int j = 0; j < 65; ++j) { + counters[i][j] = 0; } } + } - @Override - protected void accumulateSnapshot(final int[][] src, final int[][] dst, - final int size) { - for (int i = 0; i < size; ++i) { - for (int j = 0; j < 65; ++j) { - dst[i][j] += src[i][j]; - } + @Override + protected void accumulateSnapshot(final int[][] src, final int[][] dst, final int size) { + for (int i = 0; i < size; ++i) { + for (int j = 0; j < 65; ++j) { + dst[i][j] += src[i][j]; } } + } - @Override - protected void log(final String updateTag, final Consumer logger) { - final String prefix = "Metrics " + familyName() + " " + updateTag + ": "; - if (snapshotCount == 0) { - final String s = prefix + "No counters defined."; - logger.accept(s); - return; - } - final TObjectIntHashMap nameToMetricIdCopy; - synchronized (this) { - nameToMetricIdCopy = new TObjectIntHashMap<>(nameToMetricId); - } - for (int i = 0; i < snapshotCount; ++i) { - long nsamples = 0; - final StringBuilder sb = new StringBuilder(prefix); - final String name = namesSortedSnapshot[i]; - // We will log our histogram as a sequence of strings "msb:count" - // where count is the total number of times samples with - // msb as its most significant bit were sampled. - // For instance: in the output "[ ..., 3:7, ...]" the element - // "3:7" means values x such that 2^3 <= x < 2^4 were sampled 7 times. - // In histogram terms, 7 values in the interval [ 2^3, 2^4 - 1 ] were sampled. - final String key = "|key: i:n => 2^i <= x < 2^(i+1), z:n => x = 0.| "; - sb.append(key).append(name).append("={ "); - final int metricId = nameToMetricIdCopy.get(name); - boolean haveBefore = false; - for (int j = 64; j >= 0; --j) { - final int v = countersSnapshot[metricId][j]; - if (v == 0) { - continue; - } - if (haveBefore) { - sb.append(", "); - } - if (j == 64) { - sb.append("z"); - } else { - final int msb = 63 - j; - sb.append(msb); - } - sb.append(":").append(v); - haveBefore = true; - nsamples += v; + @Override + protected void log(final String updateTag, final Consumer logger) { + final String prefix = "Metrics " + familyName() + " " + updateTag + ": "; + if (snapshotCount == 0) { + final String s = prefix + "No counters defined."; + logger.accept(s); + return; + } + final TObjectIntHashMap nameToMetricIdCopy; + synchronized (this) { + nameToMetricIdCopy = new TObjectIntHashMap<>(nameToMetricId); + } + for (int i = 0; i < snapshotCount; ++i) { + long nsamples = 0; + final StringBuilder sb = new StringBuilder(prefix); + final String name = namesSortedSnapshot[i]; + // We will log our histogram as a sequence of strings "msb:count" + // where count is the total number of times samples with + // msb as its most significant bit were sampled. + // For instance: in the output "[ ..., 3:7, ...]" the element + // "3:7" means values x such that 2^3 <= x < 2^4 were sampled 7 times. + // In histogram terms, 7 values in the interval [ 2^3, 2^4 - 1 ] were sampled. + final String key = "|key: i:n => 2^i <= x < 2^(i+1), z:n => x = 0.| "; + sb.append(key).append(name).append("={ "); + final int metricId = nameToMetricIdCopy.get(name); + boolean haveBefore = false; + for (int j = 64; j >= 0; --j) { + final int v = countersSnapshot[metricId][j]; + if (v == 0) { + continue; } - sb.append(" }, nsamples=").append(nsamples); - logger.accept(sb.toString()); + if (haveBefore) { + sb.append(", "); + } + if (j == 64) { + sb.append("z"); + } else { + final int msb = 63 - j; + sb.append(msb); + } + sb.append(":").append(v); + haveBefore = true; + nsamples += v; } + sb.append(" }, nsamples=").append(nsamples); + logger.accept(sb.toString()); } - }; + } + }; int registerIntCounterMetric(final String name) { if (!enabled) { @@ -371,8 +354,7 @@ int registerLongCounterLog2HistogramMetric(final String name) { return longCounterLog2HistogramMetrics.registerMetric(name); } - // This is part of the fast path. We should avoid as much as possible holding up the calling - // thread. + // This is part of the fast path. We should avoid as much as possible holding up the calling thread. void sampleIntCounter(final int id, final long n) { if (!enabled) { return; @@ -381,8 +363,7 @@ void sampleIntCounter(final int id, final long n) { threadMetrics[id] += n; } - // This is part of the fast path. We should avoid as much as possible holding up the calling - // thread. + // This is part of the fast path. We should avoid as much as possible holding up the calling thread. void sampleLongCounter(final int id, final long n) { if (!enabled) { return; @@ -391,8 +372,7 @@ void sampleLongCounter(final int id, final long n) { threadMetrics[id] += n; } - // This is part of the fast path. We should avoid as much as possible holding up the calling - // thread. + // This is part of the fast path. We should avoid as much as possible holding up the calling thread. void sampleLongCounterLog2HistogramCount(final int id, final long v) { if (!enabled) { return; @@ -415,8 +395,7 @@ public void update(final String updateTag, final Consumer logger) { } // Note this is very crude and not intended to implement rate-type counters; - // it is intended only for restart-of-test-iteration type scenarios (eg, JMH benchmark iteration - // teardown) + // it is intended only for restart-of-test-iteration type scenarios (eg, JMH benchmark iteration teardown) // See comment in the implementation of the methods called below. public void bluntResetAllCounters() { intCounterMetrics.bluntResetCounters(); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/DisposableRspBitmap.java b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/DisposableRspBitmap.java index 395387e3a73..a42758d766b 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/DisposableRspBitmap.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/DisposableRspBitmap.java @@ -3,8 +3,7 @@ import io.deephaven.db.v2.utils.rsp.container.Container; /** - * "Disposable" version of {@link RspBitmap}, which allows other instances of {@link RspBitmap} to - * steal its containers. + * "Disposable" version of {@link RspBitmap}, which allows other instances of {@link RspBitmap} to steal its containers. */ public final class DisposableRspBitmap extends RspBitmap { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/IndexRangeIteratorView.java b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/IndexRangeIteratorView.java index 92d81e559c8..91377f2f008 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/IndexRangeIteratorView.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/IndexRangeIteratorView.java @@ -18,8 +18,7 @@ public class IndexRangeIteratorView implements SearchRangeIterator { private boolean noMore; private boolean itFinished; - public IndexRangeIteratorView(final Index.RangeIterator it, final long offset, - final long rangesEnd) { + public IndexRangeIteratorView(final Index.RangeIterator it, final long offset, final long rangesEnd) { this.it = it; this.offset = offset; this.rangesEnd = rangesEnd; diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspArray.java b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspArray.java index d918c359695..2130f1e1805 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspArray.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspArray.java @@ -27,19 +27,19 @@ /** * *

      - * A set representation for long values using Regular Space Partitioning (RSP) of the long space in - * "blocks" of (2^16) elements. + * A set representation for long values using Regular Space Partitioning (RSP) of the long space in "blocks" of (2^16) + * elements. *

      * *

      - * Modeled heavily after roaringbitmap.RoaringArray (keeping API method names and semantics as much - * as possible), with modifications for: + * Modeled heavily after roaringbitmap.RoaringArray (keeping API method names and semantics as much as possible), with + * modifications for: *

      * *
        *
      1. Full "unsigned long" 64 bit range (as opposed to 32 bit in RoaringArray).
      2. - *
      3. Spans of all bits set ("AllSet") that can be arbitrarily big (ie, not constrained to 2^16 = - * RB Container size).
      4. + *
      5. Spans of all bits set ("AllSet") that can be arbitrarily big (ie, not constrained to 2^16 = RB Container + * size).
      6. *
      * *

      @@ -52,15 +52,14 @@ * *

        *
      • A "block" is a particular interval [n*2^16, (n+1)*2^16 - 1] of the long domain.
      • - *
      • A "span" is a partition of the domain consisting of one or more consecutive blocks;
      • a - * span is a subset of the domain represented by an interval [n*2^16, (n+m)*2^16 - 1], m >= 1. - *
      • Full blocks are blocks whose domain are fully contained in the set, ie, the set contains - * every possible value in the block's interval (as a bitmap, it would be "all ones").
      • - *
      • Spans of full blocks are represented by a single "full blocks span" object (just a Long) - * which knows how many 2^16 ranges it has (it's "full blocks span len" ("flen") is the number of - * full blocks in the span).
      • - *
      • Individual blocks that are not completely full are stored in an RB Container; their "full - * blocks span len" is zero.
      • + *
      • A "span" is a partition of the domain consisting of one or more consecutive blocks;
      • a span is a subset of + * the domain represented by an interval [n*2^16, (n+m)*2^16 - 1], m >= 1. + *
      • Full blocks are blocks whose domain are fully contained in the set, ie, the set contains every possible value in + * the block's interval (as a bitmap, it would be "all ones").
      • + *
      • Spans of full blocks are represented by a single "full blocks span" object (just a Long) which knows how many + * 2^16 ranges it has (it's "full blocks span len" ("flen") is the number of full blocks in the span).
      • + *
      • Individual blocks that are not completely full are stored in an RB Container; their "full blocks span len" is + * zero.
      • *
      * *

      @@ -68,12 +67,12 @@ *

      * *
        - *
      • a long[] spanInfos array that contains the information for the offset to the - * values in the span, which we call the span's "key". For instance, a full block span that - * represents all the long values in [65536, 196607] has as its key the value 65536.
      • - *
      • an Object[] spans array that contains the actual spans. At the most basic level, - * a span can be either a full block span or a container of values (but there is nuance in exactly - * how to represent them, see below).
      • + *
      • a long[] spanInfos array that contains the information for the offset to the values in the span, + * which we call the span's "key". For instance, a full block span that represents all the long values in [65536, + * 196607] has as its key the value 65536.
      • + *
      • an Object[] spans array that contains the actual spans. At the most basic level, a span can be + * either a full block span or a container of values (but there is nuance in exactly how to represent them, see + * below).
      • *
      * *

      @@ -81,61 +80,56 @@ *

      * *

      - * The long[] spanInfos and Object[] spans data members of this class are - * used, combined, to represent the offset (key) and span values in the set, against that offset. - * The two arrays are used, together, as parallel arrays and the information for a given conceptual - * span is contained in both of them for the same corresponding index i. + * The long[] spanInfos and Object[] spans data members of this class are used, combined, to + * represent the offset (key) and span values in the set, against that offset. The two arrays are used, together, as + * parallel arrays and the information for a given conceptual span is contained in both of them for the same + * corresponding index i. *

      * *

      - * There are two basic cases for a span: it is either a full blocks span, containing a >=1 number of - * full blocks, or it is a container, containing individual values in the particular 2^16 block - * corresponding to the span's key. + * There are two basic cases for a span: it is either a full blocks span, containing a >=1 number of full blocks, or it + * is a container, containing individual values in the particular 2^16 block corresponding to the span's key. *

      * *

      - * There are four ways total that these two cases can be represented between the long in the - * `spanInfos` array and the Object in the `spans` array. Given a span at position `i`: + * There are four ways total that these two cases can be represented between the long in the `spanInfos` array and the + * Object in the `spans` array. Given a span at position `i`: *

      * *
        - *
      1. If the corresponding Object spans[i] is of type Long, then the - * long spanInfos[i] value is the key for the span (with its lower 16 bits as zero), - * and the Long value represents how many full blocks are present. Example, the set [ 0, 2^50 - 1 ] - * is represented as spanInfo==0 and span==Long(2^34).
      2. + *
      3. If the corresponding Object spans[i] is of type Long, then the long spanInfos[i] value + * is the key for the span (with its lower 16 bits as zero), and the Long value represents how many full blocks are + * present. Example, the set [ 0, 2^50 - 1 ] is represented as spanInfo==0 and span==Long(2^34).
      4. * - *
      5. As an optimization to conserve memory, if the Object spans[i] is the Object reference with - * value FULL_BLOCK_SPAN_MARKER (a singleton and final marker Object defined statically - * in this file). then the upper 48 bits of the long spanInfo[i] value represent the - * key for the span, and the lower 16 bits of the long spanInfo[i] value represent the - * full block span length. Example, the set [ 65536, 196607 ] is represented by - * spanInfo==65538 and span==FULL_BLOCK_SPAN_MARKER (note - * 196607 == 65536*3 - 1, so the set is 2 full blocks, and - * 65538 == 65536 | 2.
      6. + *
      7. As an optimization to conserve memory, if the Object spans[i] is the Object reference with value + * FULL_BLOCK_SPAN_MARKER (a singleton and final marker Object defined statically in this file). then the + * upper 48 bits of the long spanInfo[i] value represent the key for the span, and the lower 16 bits of the + * long spanInfo[i] value represent the full block span length. Example, the set [ 65536, 196607 ] is + * represented by spanInfo==65538 and span==FULL_BLOCK_SPAN_MARKER (note + * 196607 == 65536*3 - 1, so the set is 2 full blocks, and 65538 == 65536 | 2.
      8. * - *
      9. If the corresponding Object spans[i] is null, then the - * long spanInfos[i] represents the single value present in the span (note in this - * case, its upper 16 bits still corresponds to its key). Example, the set { 65537 } is represented - * by spanInfo==65537 and span==null.
      10. + *
      11. If the corresponding Object spans[i] is null, then the long spanInfos[i] represents the + * single value present in the span (note in this case, its upper 16 bits still corresponds to its key). Example, the + * set { 65537 } is represented by spanInfo==65537 and span==null.
      12. * *
      13. If the corresponding Object spans[i] is of type short[] or of type - * Container, then it represents a container of multiple values in a single block (but - * not all of the possible values in the block, since in that case it would be a full block span as - * above). In this case the higher 48 bits of its corresponding spanInfo represent the key for the - * span. Depending on the actual type of span there are two subcases: + * Container, then it represents a container of multiple values in a single block (but not all of the + * possible values in the block, since in that case it would be a full block span as above). In this case the higher 48 + * bits of its corresponding spanInfo represent the key for the span. Depending on the actual type of span there are two + * subcases: * *
          - *
        1. If spans[i] is of type Container, then the values in the - * roaringbitmaps container object are part of the set, considered against its key offset. The key - * is represented in the higher 48 bits of its corresponding spaninfo. The lower 16 bits of spanInfo - * are zero in this case. Example, the set [ 100,000-100,010, 100,020-100,030 ] is represented by - * spaInfo==65536, span==RunContainer({34464-34474, 34484-34494})
        2. + *
        3. If spans[i] is of type Container, then the values in the roaringbitmaps container + * object are part of the set, considered against its key offset. The key is represented in the higher 48 bits of its + * corresponding spaninfo. The lower 16 bits of spanInfo are zero in this case. Example, the set [ 100,000-100,010, + * 100,020-100,030 ] is represented by spaInfo==65536, + * span==RunContainer({34464-34474, 34484-34494})
        4. * - *
        5. If spans[i] is of type short[], then an ArrayContainer - * with the short[] contents needs to be reconstructed. The lower 16 bits of the - * spanInfo value are used to represent the other data members of ArrayContainer. This case exists - * as an optimization to reduce memory utilization for sparse blocks. For details of this - * reconstruction please see the code for the definition of the SpanView class below.
        6. + *
        7. If spans[i] is of type short[], then an ArrayContainer with the + * short[] contents needs to be reconstructed. The lower 16 bits of the spanInfo value are used to + * represent the other data members of ArrayContainer. This case exists as an optimization to reduce memory utilization + * for sparse blocks. For details of this reconstruction please see the code for the definition of the SpanView class + * below.
        8. *
        *
      14. *
      @@ -145,56 +139,49 @@ *

      * *
        - *
      • Our version of RB Container supports a "shared" boolean flag that is used to implement - * copy-on-write (COW) semantics and allow operation results to share containers in COW - * fashion.
      • - *
      • We extended the Container class hierarchy to include specializations for empty, single value, - * single range, and two values containers. These are immutable; empty is used only as a way to - * return empty results, and are never actual stored in the spans array. For details, please see the - * Container class definition and derived class hierarchy.
      • + *
      • Our version of RB Container supports a "shared" boolean flag that is used to implement copy-on-write (COW) + * semantics and allow operation results to share containers in COW fashion.
      • + *
      • We extended the Container class hierarchy to include specializations for empty, single value, single range, and + * two values containers. These are immutable; empty is used only as a way to return empty results, and are never actual + * stored in the spans array. For details, please see the Container class definition and derived class hierarchy.
      • *
      */ public abstract class RspArray extends RefCountedCow { public static final boolean debug = - Configuration.getInstance().getBooleanForClassWithDefault(RspArray.class, "debug", false); + Configuration.getInstance().getBooleanForClassWithDefault(RspArray.class, "debug", false); - private static final int doublingAllocThreshold = - Configuration.getInstance().getIntegerForClassWithDefault( + private static final int doublingAllocThreshold = Configuration.getInstance().getIntegerForClassWithDefault( RspArray.class, "doublingAllocThreshold", 1024); // minimum growth size after passing doubling alloc threshold - private static final int linearAllocStep = - Configuration.getInstance().getIntegerForClassWithDefault( + private static final int linearAllocStep = Configuration.getInstance().getIntegerForClassWithDefault( RspArray.class, "linearAllocStep", 1024); - // after doublingAllocThreshold, growth rate is (1 + 2^-n) with minimum step size of - // linearAllocStep (all rounded to nearest multiple of 1024) - private static final int logarithmicAllocGrowthRate = - Configuration.getInstance().getIntegerForClassWithDefault( + // after doublingAllocThreshold, growth rate is (1 + 2^-n) with minimum step size of linearAllocStep (all rounded to + // nearest multiple of 1024) + private static final int logarithmicAllocGrowthRate = Configuration.getInstance().getIntegerForClassWithDefault( RspArray.class, "logarithmicAllocGrowthRate", 4); - // when size > accNullThreshold, the cardinality cache array is populated, otherwise is kept - // null. + // when size > accNullThreshold, the cardinality cache array is populated, otherwise is kept null. static final int accNullThreshold = Configuration.getInstance().getIntegerForClassWithDefault( - RspArray.class, "accNullThreshold", 8); + RspArray.class, "accNullThreshold", 8); static { Assert.assertion(0 <= logarithmicAllocGrowthRate && logarithmicAllocGrowthRate < 32, - "RspArray.logarithmicAllocGrowthRate must be >= 0 and < 32"); + "RspArray.logarithmicAllocGrowthRate must be >= 0 and < 32"); } // BLOCK_SIZE should be a power of 2. public static final int BLOCK_SIZE = Container.MAX_RANGE; // BLOCK_LAST is used both as the last valid position in a block, and as a bitmask to get // the block offset for a long value. - // Note that since BLOCK_SIZE is a power of 2, BLOCK_SIZE - 1 is an all-bits-one mask for the - // bits + // Note that since BLOCK_SIZE is a power of 2, BLOCK_SIZE - 1 is an all-bits-one mask for the bits // below the single most significant bit in BLOCK_SIZE. public static final int BLOCK_LAST = (BLOCK_SIZE - 1); public static final int BITS_PER_BLOCK = Integer.numberOfTrailingZeros(BLOCK_SIZE); static { Assert.assertion(Integer.bitCount(BLOCK_SIZE) == 1, - "RspArray.BITS_PER_BLOCK should be a power of 2."); + "RspArray.BITS_PER_BLOCK should be a power of 2."); Assert.assertion((BLOCK_LAST & (BLOCK_LAST + 1)) == 0, - "BLOCK_LAST is not a bitmask."); + "BLOCK_LAST is not a bitmask."); } protected boolean shareContainers() { @@ -202,8 +189,8 @@ protected boolean shareContainers() { } protected abstract T make(final RspArray src, - final int startIdx, final long startOffset, - final int endIdx, final long endOffset); + final int startIdx, final long startOffset, + final int endIdx, final long endOffset); protected abstract T make(); @@ -224,27 +211,25 @@ public static long divBlockSize(final long v) { } public static int modBlockSize(final long v) { - // modulo by a constant power of two can't be optimized if the compiler can't tell if v is - // negative; + // modulo by a constant power of two can't be optimized if the compiler can't tell if v is negative; // we know it isn't. return (int) (v & (long) BLOCK_LAST); } /** - * Array of keys (in the long's higher 48 bits) and other span data (in the long's lower 16 - * bits) parallel to the spans array, mapping the long value in a given array position to the - * corresponding span in the same position. Please see the documentation for this class for - * details of the different cases for the lower 16 bits, depending on the type of span. + * Array of keys (in the long's higher 48 bits) and other span data (in the long's lower 16 bits) parallel to the + * spans array, mapping the long value in a given array position to the corresponding span in the same position. + * Please see the documentation for this class for details of the different cases for the lower 16 bits, depending + * on the type of span. * - * Values are kept in unsigned sorted order according to higher 16 bits to enable binary search - * of keys. + * Values are kept in unsigned sorted order according to higher 16 bits to enable binary search of keys. */ protected long[] spanInfos; /** - * Array of Spans parallel to the spanInfos array, mapping the same index to the corresponding - * span for the spanInfo. Please see the documentation for this class for the different possible - * types allowed and their meanings. + * Array of Spans parallel to the spanInfos array, mapping the same index to the corresponding span for the + * spanInfo. Please see the documentation for this class for the different possible types allowed and their + * meanings. */ protected Object[] spans; @@ -321,8 +306,8 @@ protected void setFullBlockSpanRaw(final int i, final long key, final long flen) } protected static void setFullBlockSpanRaw( - final int i, final long[] spanInfos, final Object[] spans, - final long key, final long flen) { + final int i, final long[] spanInfos, final Object[] spans, + final long key, final long flen) { if (key < 0 || flen <= 0) { throw new IllegalArgumentException("i=" + i + ", key=" + key + ", flen=" + flen); } @@ -343,14 +328,12 @@ protected void setFullBlockSpan(final int i, final long key, final long flen) { public static long getPackedInfoLowBits(final ArrayContainer ac) { final long sharedBit = ac.isShared() ? SPANINFO_ARRAYCONTAINER_SHARED_BITMASK : 0L; - final long cardinalityBits = - SPANINFO_ARRAYCONTAINER_CARDINALITY_BITMASK & (long) ac.getCardinality(); + final long cardinalityBits = SPANINFO_ARRAYCONTAINER_CARDINALITY_BITMASK & (long) ac.getCardinality(); return sharedBit | cardinalityBits; } protected static void setContainerSpanRaw( - final long[] spanInfos, final Object[] spans, final int i, final long key, - final Container container) { + final long[] spanInfos, final Object[] spans, final int i, final long key, final Container container) { if (container instanceof ArrayContainer) { final ArrayContainer ac = (ArrayContainer) container; spanInfos[i] = key | getPackedInfoLowBits(ac); @@ -367,7 +350,7 @@ protected void setContainerSpanRaw(final int i, final long key, final Container } protected void appendSharedContainer( - final RspArray other, final long otherSpanInfo, final Container container) { + final RspArray other, final long otherSpanInfo, final Container container) { if (size > 0) { tryOptimizeContainer(size - 1); } @@ -377,8 +360,7 @@ protected void appendSharedContainer( } protected void appendSharedContainerMaybePacked( - final RspArray other, final int otherIdx, final long otherSpanInfo, - final Object otherContainer) { + final RspArray other, final int otherIdx, final long otherSpanInfo, final Object otherContainer) { if (size > 0) { tryOptimizeContainer(size - 1); } @@ -388,8 +370,7 @@ protected void appendSharedContainerMaybePacked( } protected void setSharedContainerMaybePackedRaw( - final int i, final RspArray src, final int srcIdx, final long srcSpanInfo, - final Object srcContainer) { + final int i, final RspArray src, final int srcIdx, final long srcSpanInfo, final Object srcContainer) { if (srcContainer instanceof short[]) { spanInfos[i] = (src.spanInfos[srcIdx] |= SPANINFO_ARRAYCONTAINER_SHARED_BITMASK); spans[i] = srcContainer; @@ -399,35 +380,34 @@ protected void setSharedContainerMaybePackedRaw( } protected void insertSharedContainer( - final int i, final RspArray other, final long otherSpanInfo, - final Container otherContainer) { + final int i, final RspArray other, final long otherSpanInfo, final Container otherContainer) { open(i); setSharedContainerRaw(i, other, otherSpanInfo, otherContainer); modifiedSpan(i); } protected void setSharedContainerRaw( - final int i, final RspArray other, final long key, final Container container) { + final int i, final RspArray other, final long key, final Container container) { setContainerSpanRaw(i, key, other.shareContainer(container)); } protected void copyKeyAndSpanStealingContainers( - final int srcIdx, final long[] srcSpanInfos, final Object[] srcSpans, - final int dstIdx, final long[] dstSpanInfos, final Object[] dstSpans) { + final int srcIdx, final long[] srcSpanInfos, final Object[] srcSpans, + final int dstIdx, final long[] dstSpanInfos, final Object[] dstSpans) { dstSpanInfos[dstIdx] = srcSpanInfos[srcIdx]; dstSpans[dstIdx] = srcSpans[srcIdx]; } private static final long SPANINFO_ARRAYCONTAINER_SHARED_BITMASK = (1L << 15); private static final long SPANINFO_ARRAYCONTAINER_CARDINALITY_BITMASK = - ~SPANINFO_ARRAYCONTAINER_SHARED_BITMASK & (long) BLOCK_LAST; + ~SPANINFO_ARRAYCONTAINER_SHARED_BITMASK & (long) BLOCK_LAST; // shiftAmount is a multiple of BLOCK_SIZE. protected void copyKeyAndSpanMaybeSharing( - final long shiftAmount, - final RspArray src, final int srcIdx, - final long[] dstSpanInfos, final Object[] dstSpans, final int dstIdx, - final boolean tryShare) { + final long shiftAmount, + final RspArray src, final int srcIdx, + final long[] dstSpanInfos, final Object[] dstSpans, final int dstIdx, + final boolean tryShare) { Object span = src.spans[srcIdx]; if (tryShare && src.shareContainers()) { if (span instanceof short[]) { @@ -442,8 +422,8 @@ protected void copyKeyAndSpanMaybeSharing( } protected void copyKeyAndSpanMaybeSharing( - final RspArray src, final int srcIdx, - final long[] dstSpanInfos, final Object[] dstSpans, final int dstIdx) { + final RspArray src, final int srcIdx, + final long[] dstSpanInfos, final Object[] dstSpans, final int dstIdx) { copyKeyAndSpanMaybeSharing(0, src, srcIdx, dstSpanInfos, dstSpans, dstIdx, true); } @@ -453,7 +433,7 @@ protected void setContainerSpan(final int i, final long key, final Container c) } protected void setContainerSpan(final Container oldContainer, final int i, final long key, - final Container newContainer) { + final Container newContainer) { if (oldContainer != newContainer || oldContainer instanceof ArrayContainer) { setContainerSpanRaw(i, key, newContainer); } @@ -468,11 +448,10 @@ protected void setContainerSpan(final Container oldContainer, final int i, final // that needs to do some computations on spans; to avoid keeping any live references // to objects it is an AutoCloseable. protected static final class SpanView extends ArrayContainer - implements AutoCloseable { + implements AutoCloseable { private final SpanViewRecycler recycler; // The original array and index for which we loaded; we need to keep the reference - // for the cases where we need to update it (eg, setting a copy on write shared flag for an - // ArrayContainer + // for the cases where we need to update it (eg, setting a copy on write shared flag for an ArrayContainer // stored as short[]). private RspArray arr; private int arrIdx; @@ -526,8 +505,7 @@ public void init(final RspArray arr, final int arrIdx) { init(arr, arrIdx, arr.spanInfos[arrIdx], arr.spans[arrIdx]); } - public void init(final RspArray arr, final int arrIdx, final long spanInfo, - final Object span) { + public void init(final RspArray arr, final int arrIdx, final long spanInfo, final Object span) { this.arr = arr; this.arrIdx = arrIdx; this.spanInfo = spanInfo; @@ -559,21 +537,19 @@ protected static boolean isSingletonSpan(final Object o) { } /** - * Cache of accumulated cardinalities. Parallel array to keys and spans. acc[i] == total - * cardinality for { span[0], span[1], ..., span[i] }. Should be updated by clients after - * mutating operations by calling ensureCardinalityCache, so that public methods on entry can - * assume it is up to date, ie maxAccIdx == size - 1 is a class invariant. Note this class own - * mutators do not update the cache themselves as clients can perform a series of update - * operations and only call ensureCardinalityCache at the end. + * Cache of accumulated cardinalities. Parallel array to keys and spans. acc[i] == total cardinality for { span[0], + * span[1], ..., span[i] }. Should be updated by clients after mutating operations by calling + * ensureCardinalityCache, so that public methods on entry can assume it is up to date, ie maxAccIdx == size - 1 is + * a class invariant. Note this class own mutators do not update the cache themselves as clients can perform a + * series of update operations and only call ensureCardinalityCache at the end. * * For a small number of keys, this is not created an kept null. */ long[] acc; /** - * If acc != null, highest index in acc that is valid, -1 if none. if acc == null: * if - * cardinality fits in an int, the actual cardinality. * if cardinality does not fit in an int, - * -1. + * If acc != null, highest index in acc that is valid, -1 if none. if acc == null: * if cardinality fits in an int, + * the actual cardinality. * if cardinality does not fit in an int, -1. */ int cardData; @@ -710,9 +686,9 @@ protected RspArray(final RspArray other) { // shiftAmount is a multiple of BLOCK_SIZE. private void copySharingSpansFrom(final RspArray other, final long shiftAmount) { final int newSize = - (other.size >= 2 * INITIAL_CAPACITY && other.size < other.spanInfos.length / 2) - ? other.spanInfos.length / 2 - : other.spanInfos.length; + (other.size >= 2 * INITIAL_CAPACITY && other.size < other.spanInfos.length / 2) + ? other.spanInfos.length / 2 + : other.spanInfos.length; spanInfos = new long[newSize]; spans = new Object[newSize]; for (int i = 0; i < other.size; ++i) { @@ -736,24 +712,21 @@ private void maybeSetAcc(final int i, final long accumCard) { } public RspArray( - final RspArray src, - final int startIdx, final long startOffset, - final int endIdx, final long endOffset) { - // an initial full block span that needs to be split may result in a sequence of spans as - // follows, + final RspArray src, + final int startIdx, final long startOffset, + final int endIdx, final long endOffset) { + // an initial full block span that needs to be split may result in a sequence of spans as follows, // any of which may or may not be present: // (a) an initial container. // (b) an intermediate full block span. // (c) an ending container (this will only exist if endIdx == startIdx). - // the following variables represent the computed cardinality (or full span len) of each, - // zero if not present. + // the following variables represent the computed cardinality (or full span len) of each, zero if not present. int startSplitInitialContainerCard = 0; long startSplitIntermediateFullBlockSpanLen = 0; long startSplitIntermediateFullBlockSpanCard = 0; int startSplitEndingContainerCard = 0; final Object firstSpan = src.spans[startIdx]; - // If either of the spans at startIndex or endIndex are a full block span of more than one - // block + // If either of the spans at startIndex or endIndex are a full block span of more than one block // that needs to be broken into an RB container and the remaining full block span, // that would affect our resulting size. int sz = endIdx - startIdx + 1; @@ -785,8 +758,7 @@ public RspArray( if (containerStart == containerEndInclusive) { setSingletonSpanRaw(0, startKey); } else { - final Container c = - Container.rangeOfOnes(containerStart, containerEndInclusive + 1); + final Container c = Container.rangeOfOnes(containerStart, containerEndInclusive + 1); setContainerSpanRaw(0, keyForFirstBlock, c); } } @@ -798,8 +770,7 @@ public RspArray( } else { resultingCardFromFirstSpan = flenFirstSpan * BLOCK_SIZE - startOffset; } - int n = 0; // how many containers we end up with after (potentially) splitting the - // first. + int n = 0; // how many containers we end up with after (potentially) splitting the first. final long startOffsetModBlockSize = RspArray.modBlockSize(startOffset); if (startOffsetModBlockSize == 0) { startSplitInitialContainerCard = 0; @@ -820,8 +791,7 @@ public RspArray( sz += n - 1; } boolean lastSpanIsFull = false; // will set below to true if we find out otherwise. - long deltaLast = 0; // cardinality of the span(s) resulting from the split of a last full - // block span. + long deltaLast = 0; // cardinality of the span(s) resulting from the split of a last full block span. int copyLastIdx = endIdx; if (endIdx > startIdx && endOffset < src.getSpanCardinalityAtIndexMaybeAcc(endIdx) - 1) { copyLastIdx = endIdx - 1; @@ -855,8 +825,7 @@ public RspArray( if (startSplitInitialContainerCard == 1) { setSingletonSpanRaw(0, nextKey | BLOCK_LAST); } else { - final Container c = Container - .rangeOfOnes(BLOCK_SIZE - startSplitInitialContainerCard, BLOCK_SIZE); + final Container c = Container.rangeOfOnes(BLOCK_SIZE - startSplitInitialContainerCard, BLOCK_SIZE); setContainerSpanRaw(0, nextKey, c); } nextKey = nextKey(nextKey); @@ -894,8 +863,7 @@ public RspArray( if (isSingletonSpan(spanSrc)) { if (startOffset != 0) { throw new IllegalArgumentException( - "startOffset=" + startOffset - + " and span at startIdx has a single element."); + "startOffset=" + startOffset + " and span at startIdx has a single element."); } setSingletonSpanRaw(0, src.getSingletonSpanValue(startIdx)); accSum = 1; @@ -907,11 +875,10 @@ public RspArray( final int card = (int) src.getSpanCardinalityAtIndexMaybeAcc(startIdx); if (endOffset + 1 < card) { if (startOffset == endOffset) { - setSingletonSpanRaw(0, startIdxKey - | unsignedShortToInt(csrc.select((int) startOffset))); + setSingletonSpanRaw(0, + startIdxKey | unsignedShortToInt(csrc.select((int) startOffset))); } else { - final Container c = - csrc.select((int) startOffset, (int) (endOffset + 1)); + final Container c = csrc.select((int) startOffset, (int) (endOffset + 1)); setContainerSpanRaw(0, startIdxKey, c); } accSum = endOffset - startOffset + 1; @@ -927,8 +894,7 @@ public RspArray( final int card = (int) src.getSpanCardinalityAtIndexMaybeAcc(startIdx); final int startOffsetInt = (int) startOffset; if (startOffsetInt + 1 == card) { - setSingletonSpanRaw(0, - startIdxKey | unsignedShortToInt(csrc.select(startOffsetInt))); + setSingletonSpanRaw(0, startIdxKey | unsignedShortToInt(csrc.select(startOffsetInt))); } else { final Container c = csrc.select(startOffsetInt, card); setContainerSpanRaw(0, startIdxKey, c); @@ -1014,10 +980,8 @@ public RspArray( maybeSetAcc(i, accSum); } } else { - // Can't happen; a single element span should have been copied over in its entirety - // earlier. - throw new IllegalStateException( - "endIdx=" + endIdx + ", endOffset=" + endOffset + ", key=" + srcKey); + // Can't happen; a single element span should have been copied over in its entirety earlier. + throw new IllegalStateException("endIdx=" + endIdx + ", endOffset=" + endOffset + ", key=" + srcKey); } } if (acc == null) { @@ -1099,37 +1063,36 @@ default long spanKey() { Object span(); /** - * Advances the pointer to the next span in the linear sequence. If the span before the call - * was the last one, a subsequent call to hasNext will return false. + * Advances the pointer to the next span in the linear sequence. If the span before the call was the last one, a + * subsequent call to hasNext will return false. */ void next(); /** - * This method should be called: * After the pointer is created and before calling any other - * methods; if it returns false, calling any other methods results in undefined behavior. * - * Right after a call to any advance method, similar to above. + * This method should be called: * After the pointer is created and before calling any other methods; if it + * returns false, calling any other methods results in undefined behavior. * Right after a call to any advance + * method, similar to above. * * @return true if the pointer currently points to a valid span. */ boolean hasNext(); /** - * Advances the pointer forward to the next span in the sequence whose interval could have - * it include the key argument. + * Advances the pointer forward to the next span in the sequence whose interval could have it include the key + * argument. * - * More specifically, the current span position is effectively advanced forward as long as - * the provided key is bigger than the right endpoint for the current span. + * More specifically, the current span position is effectively advanced forward as long as the provided key is + * bigger than the right endpoint for the current span. * * This operation is O(log(cardinality)). * - * Note this may not move the pointer if the current span already satisfies the constraint, - * or it may invalidate the pointer if the key is to the right of the last valid span. Note - * also advance should only be called on a non-empty cursor, after having called hasNext() - * and next() at least once. + * Note this may not move the pointer if the current span already satisfies the constraint, or it may invalidate + * the pointer if the key is to the right of the last valid span. Note also advance should only be called on a + * non-empty cursor, after having called hasNext() and next() at least once. * * @param key key to search forward from the current span position. - * @return false if the cursor is exhausted and there was no span satisfying the restriction - * found, true otherwise. + * @return false if the cursor is exhausted and there was no span satisfying the restriction found, true + * otherwise. */ boolean advance(long key); @@ -1150,16 +1113,16 @@ public interface SpanCursorForward extends SpanCursor { void prev(); /** - * Advances the pointer forward to the last span in the sequence whose interval range has a - * value v such that comp.directionToTargetFrom(v) >= 0. + * Advances the pointer forward to the last span in the sequence whose interval range has a value v such that + * comp.directionToTargetFrom(v) >= 0. * * This operation is O(log(cardinality)). * - * This operation never invalidates a valid cursor, it may only move it forward from its - * current position but never exhaust it. + * This operation never invalidates a valid cursor, it may only move it forward from its current position but + * never exhaust it. * - * Note also search should only be called on a non-empty cursor, after having called - * hasNext() and next() at least once. + * Note also search should only be called on a non-empty cursor, after having called hasNext() and next() at + * least once. * * @param comp a Comparator used to search forward from the current span position. */ @@ -1261,8 +1224,7 @@ public RspRangeIterator getRangeIterator() { return new RspRangeIterator(new SpanCursorForwardImpl(this)); } - public RspRangeBatchIterator getRangeBatchIterator(final long initialSeek, - final long maxCount) { + public RspRangeBatchIterator getRangeBatchIterator(final long initialSeek, final long maxCount) { return new RspRangeBatchIterator(new SpanCursorForwardImpl(this), initialSeek, maxCount); } @@ -1372,8 +1334,7 @@ protected void ensureSizeCanGrowBy(final int n) { newCapacity = 2 * newCapacity; } while (newCapacity < minCapacity) { - final int rawStep = - Math.max(linearAllocStep, newCapacity >> logarithmicAllocGrowthRate); + final int rawStep = Math.max(linearAllocStep, newCapacity >> logarithmicAllocGrowthRate); newCapacity += (rawStep + 1023) & (~1023); } realloc(newCapacity); @@ -1395,8 +1356,8 @@ private void realloc(final int newCapacity) { } /** - * @param compactFactor if k == 0, compact if count < capacity. k > 0, compact if (capacity - - * count > (capacity >> k). + * @param compactFactor if k == 0, compact if count < capacity. k > 0, compact if (capacity - count > (capacity >> + * k). */ public void tryCompactUnsafe(final int compactFactor) { if (compactFactor == 0) { @@ -1454,13 +1415,11 @@ public static long getFullBlockSpanLen(final long spanInfo, final Object span) { } /** - * @return if the key is included in some existing span, returns the index of that span. if the - * key is not included in any existing span, returns -(p - 1) where p is the position a - * span for the key would be inserted. + * @return if the key is included in some existing span, returns the index of that span. if the key is not included + * in any existing span, returns -(p - 1) where p is the position a span for the key would be inserted. * - * Note that, since a span's covered interval may include multiple blocks, a key - * contained by a span may be different than its first key (if the span includes more - * than one block). + * Note that, since a span's covered interval may include multiple blocks, a key contained by a span may be + * different than its first key (if the span includes more than one block). */ public int getSpanIndex(final long key) { return getSpanIndex(0, key); @@ -1613,7 +1572,7 @@ private static boolean shouldOptimize(final Container c) { return true; } if (!(c instanceof ArrayContainer) && - !(c instanceof BitmapContainer)) { + !(c instanceof BitmapContainer)) { return false; } final int card = c.getCardinality(); @@ -1713,15 +1672,13 @@ private void checkCompact() { } /** - * Collapse an inner range of spans, by overwriting it with a range of spans from a certain - * later position till the end, and reducing size accordingly to the number of spans removed - * ({@code size -= isrc - idst}). The resulting array will remove all spans between the original - * values at {@code idst .. (isrc - 1)} inclusive. + * Collapse an inner range of spans, by overwriting it with a range of spans from a certain later position till the + * end, and reducing size accordingly to the number of spans removed ({@code size -= isrc - idst}). The resulting + * array will remove all spans between the original values at {@code idst .. (isrc - 1)} inclusive. * * @param idst specifies the beginning position where the source range will move. - * @param isrc specifies the source range to copy over as [isrc, size) (eg, from isrc inclusive - * till the end). If isrc == size no actual spans are copied, resulting in a size - * reduction only. + * @param isrc specifies the source range to copy over as [isrc, size) (eg, from isrc inclusive till the end). If + * isrc == size no actual spans are copied, resulting in a size reduction only. */ private void collapseRange(final int idst, final int isrc) { int newSize = size - (isrc - idst); @@ -1758,16 +1715,15 @@ private void collapseRange(final int idst, final int isrc) { /** * - * @param newSpanIdx an index, as returned by getSpanAtIndex(k). Note this can be negative, in - * which case this is an insertion (existing elements pushed to the right as necessary). + * @param newSpanIdx an index, as returned by getSpanAtIndex(k). Note this can be negative, in which case this is an + * insertion (existing elements pushed to the right as necessary). * @param newSpanKey the key. * @param newSpanFlen the number of 2^16 intervals. * * @return the (positive) index where the span was actually inserted. */ - public int setOrInsertFullBlockSpanAtIndex(final int newSpanIdx, final long newSpanKey, - final long newSpanFlen, - final MutableObject madeNullSpansMu) { + public int setOrInsertFullBlockSpanAtIndex(final int newSpanIdx, final long newSpanKey, final long newSpanFlen, + final MutableObject madeNullSpansMu) { final int ii; // set or insert position. long newflen = newSpanFlen; // may grow if we merge to our right. final int idxForFirstKeyBigger; // first index for a key bigger than newSpanKey. @@ -1786,15 +1742,13 @@ public int setOrInsertFullBlockSpanAtIndex(final int newSpanIdx, final long newS if (idxForFirstKeyBigger >= size) { lastIdx = ii; } else { - final long newSpanLastKey = newSpanKey + (newSpanFlen - 1) * BLOCK_SIZE; // New span's - // last key. + final long newSpanLastKey = newSpanKey + (newSpanFlen - 1) * BLOCK_SIZE; // New span's last key. final int j = getSpanIndex(idxForFirstKeyBigger, newSpanLastKey); final int idxForLastKeyInsideNewSpan; if (j >= 0) { idxForLastKeyInsideNewSpan = j; } else { - // One before (-j-1), which is the first position whose key is > newSpanLastKey. - // Note this may be -1. + // One before (-j-1), which is the first position whose key is > newSpanLastKey. Note this may be -1. idxForLastKeyInsideNewSpan = -j - 2; } // We may need to merge with a full block span extending to the right. @@ -1804,11 +1758,9 @@ public int setOrInsertFullBlockSpanAtIndex(final int newSpanIdx, final long newS final long rightSpanInfo = spanInfos[idxForFirstKeyOutsideNewSpan]; final long rightKey = spanInfoToKey(rightSpanInfo); if (rightKey - newSpanLastKey <= BLOCK_SIZE) { - final long rightLen = - getFullBlockSpanLen(rightSpanInfo, spans[idxForFirstKeyOutsideNewSpan]); + final long rightLen = getFullBlockSpanLen(rightSpanInfo, spans[idxForFirstKeyOutsideNewSpan]); if (rightLen > 0) { - final long rightSpanLastKey = - getKeyForLastBlockInFullSpan(rightKey, rightLen); + final long rightSpanLastKey = getKeyForLastBlockInFullSpan(rightKey, rightLen); if (rightSpanLastKey > newSpanLastKey) { newflen += distanceInBlocks(newSpanLastKey, rightSpanLastKey); rightDone = true; @@ -1819,11 +1771,9 @@ public int setOrInsertFullBlockSpanAtIndex(final int newSpanIdx, final long newS } if (!rightDone) { if (idxForLastKeyInsideNewSpan >= 0) { - // we did not merge with a full block span to the right; we may need to absorb - // some len. + // we did not merge with a full block span to the right; we may need to absorb some len. final long spanInfo = spanInfos[idxForLastKeyInsideNewSpan]; - final long len = - getFullBlockSpanLen(spanInfo, spans[idxForLastKeyInsideNewSpan]); + final long len = getFullBlockSpanLen(spanInfo, spans[idxForLastKeyInsideNewSpan]); if (len > 0) { final long spanKey = spanInfoToKey(spanInfo); final long spanLastKey = getKeyForLastBlockInFullSpan(spanKey, len); @@ -1927,8 +1877,7 @@ private void tryOptimizeContainer(final int i) { final short[] contents = (short[]) o; if (contents.length < 3 || contents.length > 12) { final long spanInfo = spanInfos[i]; - try (SpanView res = - workDataPerThread.get().borrowSpanView(this, i, spanInfo, contents)) { + try (SpanView res = workDataPerThread.get().borrowSpanView(this, i, spanInfo, contents)) { final Container c = res.getContainer(); final Container prevContainer = c.runOptimize(); if (prevContainer != c) { @@ -1995,8 +1944,8 @@ private void open(final int i) { } /** - * Insert a full block span at position i with key k, pushing the existing elements to the - * right. The caller should ensure that the key order is preserved by this operation. + * Insert a full block span at position i with key k, pushing the existing elements to the right. The caller should + * ensure that the key order is preserved by this operation. * * @param i position in which to insert * @param key key for the span to be inserted @@ -2008,8 +1957,8 @@ public void insertFullBlockSpanAtIndex(final int i, final long key, final long f } /** - * Insert a new singleton span at position i with key k, pushing the existing elements to the - * right. The caller should ensure that the key order is preserved by this operation. + * Insert a new singleton span at position i with key k, pushing the existing elements to the right. The caller + * should ensure that the key order is preserved by this operation. * * @param i position in which to insert * @param value the singleton value for the span to be inserted @@ -2020,8 +1969,8 @@ public void insertSingletonAtIndex(final int i, final long value) { } /** - * Insert a container at position i with key k, pushing the existing elements to the right. The - * caller should ensure that the key order is preserved by this operation. + * Insert a container at position i with key k, pushing the existing elements to the right. The caller should ensure + * that the key order is preserved by this operation. * * @param i position in which to insert * @param key key for the span to be inserted @@ -2054,11 +2003,9 @@ public void replaceSpanAtIndex(final int i, final ArraysBuf buf) { modifiedSpan(i); } - // Modeled after the version in RB Util class, which in turn is modeled on the - // Arrays.binarySearch API. + // Modeled after the version in RB Util class, which in turn is modeled on the Arrays.binarySearch API. // Like in them, toIndex is exclusive. - static int unsignedBinarySearch(final IntToLongFunction fun, final int fromIndex, - final int toIndex, final long k) { + static int unsignedBinarySearch(final IntToLongFunction fun, final int fromIndex, final int toIndex, final long k) { // next line accelerates the possibly common case where the value would // be inserted at the end if (toIndex > 0 && Long.compareUnsigned(fun.applyAsLong(toIndex - 1), k) < 0) { @@ -2132,8 +2079,7 @@ private int getIndexForRankWithAcc(final int fromIndex, final long pos) { return (i < 0) ? -i - 1 : i; } - private int getIndexForRankNoAcc(final int fromIndex, final long pos, - final MutableLong prevCardMu) { + private int getIndexForRankNoAcc(final int fromIndex, final long pos, final MutableLong prevCardMu) { int i = fromIndex; final long posp1 = pos + 1; long card = (prevCardMu == null) ? 0 : prevCardMu.longValue(); @@ -2189,8 +2135,7 @@ public long get(final long pos) { return get(rankIndex, pos - prevCard); } - public void getKeysForPositions(final PrimitiveIterator.OfLong inputPositions, - final LongConsumer outputKeys) { + public void getKeysForPositions(final PrimitiveIterator.OfLong inputPositions, final LongConsumer outputKeys) { int fromIndex = 0; final long cardinality = isCardinalityCached() ? getCardinality() : -1; final MutableLong prevCardMu = (acc == null) ? new MutableLong(0) : null; @@ -2229,8 +2174,7 @@ long get(final int idx, final long offset) { try (SpanView view = workDataPerThread.get().borrowSpanView(this, idx)) { if (view.isSingletonSpan()) { if (offset != 0) { - throw new IllegalArgumentException( - "Invalid offset=" + offset + " for index=" + idx); + throw new IllegalArgumentException("Invalid offset=" + offset + " for index=" + idx); } return view.getSingletonSpanValue(); } @@ -2242,8 +2186,7 @@ long get(final int idx, final long offset) { // flen == 0 final int sv = (int) offset; if (sv != offset) { - throw new IllegalArgumentException( - "Invalid offset=" + offset + " for index=" + idx); + throw new IllegalArgumentException("Invalid offset=" + offset + " for index=" + idx); } final short lowBits = view.getContainer().select(sv); return paste(highBits, lowBits); @@ -2258,8 +2201,7 @@ final long cardinalityBeforeMaybeAcc(final int idx) { return cardinalityBeforeMaybeAcc(idx, 0, 0); } - final long cardinalityBeforeNoAcc(final int idx, final int knownIdx, - final long knownBeforeCard) { + final long cardinalityBeforeNoAcc(final int idx, final int knownIdx, final long knownBeforeCard) { int i = knownIdx; long card = knownBeforeCard; while (i < idx) { @@ -2269,8 +2211,7 @@ final long cardinalityBeforeNoAcc(final int idx, final int knownIdx, return card; } - final long cardinalityBeforeMaybeAcc(final int idx, final int knownIdx, - final long knownBeforeCard) { + final long cardinalityBeforeMaybeAcc(final int idx, final int knownIdx, final long knownBeforeCard) { if (acc != null) { return cardinalityBeforeWithAcc(idx); } @@ -2297,10 +2238,8 @@ interface FindOutput { } // returns false if val is to the left of the first value in the span at startIdx; - // otherwise calls setResult on out with the appropriate position for val or the first position - // after it. - boolean findOrNext(final int startIdx, final int endIdxExclusive, final long val, - final FindOutput out) { + // otherwise calls setResult on out with the appropriate position for val or the first position after it. + boolean findOrNext(final int startIdx, final int endIdxExclusive, final long val, final FindOutput out) { final int ki = getSpanIndex(startIdx, endIdxExclusive, highBits(val)); if (ki < 0) { final int i = ~ki; @@ -2354,10 +2293,8 @@ boolean findOrNext(final int startIdx, final int endIdxExclusive, final long val } // returns false if val is to the left of the first value in the span at startIdx; - // otherwise calls setResult on out with the appropriate position for val or the last position - // before it. - boolean findOrPrev(final int startIdx, final int endIdxExclusive, final long val, - final FindOutput out) { + // otherwise calls setResult on out with the appropriate position for val or the last position before it. + boolean findOrPrev(final int startIdx, final int endIdxExclusive, final long val, final FindOutput out) { final int ki = getSpanIndex(startIdx, endIdxExclusive, highBits(val)); if (ki < 0) { final int i = ~ki; @@ -2469,7 +2406,7 @@ public boolean subsetOf(final RspArray other) { } // other is not empty either. if (isCardinalityCached() && other.isCardinalityCached() && - getCardinality() > other.getCardinality()) { + getCardinality() > other.getCardinality()) { return false; } return subsetOf(this, other); @@ -2494,8 +2431,7 @@ private static boolean subsetOf(final RspArray r1, final RspArray r2) { } final long kend1 = getKeyForLastBlockInSpan(k1, flen1); final long k2 = view2.getKey(); - // Note getKeyForLastBlockInSpan works both for full block spans and rb - // containers + // Note getKeyForLastBlockInSpan works both for full block spans and rb containers // (in that later case it just returns the single block key). final long kend2 = getKeyForLastBlockInSpan(k2, flen2); if (uLess(kend2, kend1)) { @@ -2620,8 +2556,7 @@ private static long getKeyForLastBlockInFullSpan(final long spanKey, final long } /** - * Returns true if any value in this RspArray is contained inside the range [first, last], false - * otherwise. + * Returns true if any value in this RspArray is contained inside the range [first, last], false otherwise. * * @param first First value in the range to check * @param last Last value in the range to check @@ -2684,7 +2619,7 @@ public int overlapsRange(final int iStart, final long start, final long end) { private static boolean overlaps(final RspArray r1, final RspArray r2) { if (r1.keyForLastBlock() < r2.keyForFirstBlock() - || r2.keyForLastBlock() < r1.keyForFirstBlock()) { + || r2.keyForLastBlock() < r1.keyForFirstBlock()) { return false; } int p2 = 0; @@ -2770,16 +2705,16 @@ private static boolean overlaps(final RspArray r1, final RspArray r2) { /** * OrEquals a single span into this container. * - * @param shiftAmount an amount to shift the keys in the other container; shiftAmount should be - * a multiple of BLOCK_SIZE. + * @param shiftAmount an amount to shift the keys in the other container; shiftAmount should be a multiple of + * BLOCK_SIZE. * @param other the other RspArray to ask for container sharing * @param otherIdx the index into other for the span to apply or to. * @param startPos the first position to start looking for orKey in this container. * @return the index in this container to continue searches for keys after (orKey, orSpan). */ private int orEqualsSpan(final long shiftAmount, final RspArray other, final int otherIdx, - final int startPos, final MutableObject sortedRangesMu, - final WorkData wd) { + final int startPos, final MutableObject sortedRangesMu, + final WorkData wd) { final Object otherSpan = other.spans[otherIdx]; final long otherSpanInfo = other.getSpanInfo(otherIdx) + shiftAmount; try (SpanView otherView = wd.borrowSpanView(other, otherIdx, otherSpanInfo, otherSpan)) { @@ -2787,10 +2722,8 @@ private int orEqualsSpan(final long shiftAmount, final RspArray other, final int final long otherflen = otherView.getFullBlockSpanLen(); final int orIdx = getSpanIndex(startPos, otherKey); if (otherflen > 0) { - final int j = - setOrInsertFullBlockSpanAtIndex(orIdx, otherKey, otherflen, sortedRangesMu); - // can't increment for return since it may have been absorbed by a longer full block - // span. + final int j = setOrInsertFullBlockSpanAtIndex(orIdx, otherKey, otherflen, sortedRangesMu); + // can't increment for return since it may have been absorbed by a longer full block span. return j; } if (orIdx < 0) { @@ -2855,8 +2788,7 @@ private int orEqualsSpan(final long shiftAmount, final RspArray other, final int } } if (orResultContainer.isAllOnes()) { - final int j = - setOrInsertFullBlockSpanAtIndex(orIdx, otherKey, 1, sortedRangesMu); + final int j = setOrInsertFullBlockSpanAtIndex(orIdx, otherKey, 1, sortedRangesMu); // can't increment since it may have been merged with another span. return j; } @@ -2896,8 +2828,7 @@ void setIntArray(final int[] arr) { SortedRangesInt getSortedRanges() { if (sortedRangesInt == null) { sortedRangesInt = new SortedRangesInt( - Math.max(16 * 4 * 1024 / Integer.BYTES, SortedRanges.INT_DENSE_MAX_CAPACITY), - 0); + Math.max(16 * 4 * 1024 / Integer.BYTES, SortedRanges.INT_DENSE_MAX_CAPACITY), 0); } sortedRangesInt.clear(); return sortedRangesInt; @@ -2906,8 +2837,7 @@ SortedRangesInt getSortedRanges() { SortedRangesInt getMadeNullSortedRanges() { if (madeNullSortedRanges == null) { madeNullSortedRanges = new SortedRangesInt( - Math.max(16 * 4 * 1024 / Integer.BYTES, SortedRanges.INT_DENSE_MAX_CAPACITY), - 0); + Math.max(16 * 4 * 1024 / Integer.BYTES, SortedRanges.INT_DENSE_MAX_CAPACITY), 0); } madeNullSortedRanges.clear(); return madeNullSortedRanges; @@ -2922,8 +2852,7 @@ ArraysBuf getArraysBuf(final int minCapacity) { return rspArraysBuf; } - public SpanView borrowSpanView(final RspArray arr, final int arrIdx, final long spanInfo, - final Object span) { + public SpanView borrowSpanView(final RspArray arr, final int arrIdx, final long spanInfo, final Object span) { final SpanView sv = borrowSpanView(); sv.init(arr, arrIdx, spanInfo, span); return sv; @@ -2953,8 +2882,7 @@ public void returnSpanView(final SpanView sv) { } } - protected static final ThreadLocal workDataPerThread = - ThreadLocal.withInitial(WorkData::new); + protected static final ThreadLocal workDataPerThread = ThreadLocal.withInitial(WorkData::new); private static final class WorkDataHolder { private WorkData wd = null; @@ -2968,8 +2896,8 @@ public WorkData get() { } /** - * For every element in other, add element to this RspArray. The argument won't be modified - * (with the possible exclusion of sharing some of its containers Copy On Write). + * For every element in other, add element to this RspArray. The argument won't be modified (with the possible + * exclusion of sharing some of its containers Copy On Write). * * @param other the RspArray to add to this. */ @@ -2978,9 +2906,9 @@ public void orEqualsUnsafeNoWriteCheck(final RspArray other) { } /** - * For every element in other, add (element + shiftAmount) to this RspArray. Note shiftAmount is - * assumed to be a multiple of BLOCK_SIZE. The argument won't be modified (with the possible - * exclusion of sharing some of its containers Copy On Write). + * For every element in other, add (element + shiftAmount) to this RspArray. Note shiftAmount is assumed to be a + * multiple of BLOCK_SIZE. The argument won't be modified (with the possible exclusion of sharing some of its + * containers Copy On Write). * * @param shiftAmount the amount to add to each key in other before insertion * @param other the base keys to add in the (key + shiftAmount) formula for insertion. @@ -3010,8 +2938,7 @@ public void orEqualsShiftedUnsafeNoWriteCheck(final long shiftAmount, final RspA // Total number of elements stored in idxPairs array; should always be even. int idxPairsCount = 0; - // As we check containers in others, the indices of the ones that were taken care of by the - // first + // As we check containers in others, the indices of the ones that were taken care of by the first // pass, and therefore can be skipped by the second pass, are stored here. SortedRanges secondPassSkips = wd.getSortedRanges(); boolean tryAddToSecondPassSkips = true; @@ -3094,19 +3021,16 @@ public void orEqualsShiftedUnsafeNoWriteCheck(final long shiftAmount, final RspA final int thisIdx = idxPairs[--idxPairsCount]; final int otherIdx = idxPairs[--idxPairsCount]; for (int i = lastMoveRangeIdx; i >= thisIdx; --i) { - copyKeyAndSpanStealingContainers(i, spanInfos, spans, dstIdx, newSpanInfos, - newSpans); + copyKeyAndSpanStealingContainers(i, spanInfos, spans, dstIdx, newSpanInfos, newSpans); --dstIdx; } - copyKeyAndSpanMaybeSharing(shiftAmount, other, otherIdx, newSpanInfos, newSpans, - dstIdx, true); + copyKeyAndSpanMaybeSharing(shiftAmount, other, otherIdx, newSpanInfos, newSpans, dstIdx, true); --dstIdx; lastMoveRangeIdx = thisIdx - 1; } if (!inPlace) { for (int i = lastMoveRangeIdx; i >= 0; --i) { - copyKeyAndSpanStealingContainers(i, spanInfos, spans, dstIdx, newSpanInfos, - newSpans); + copyKeyAndSpanStealingContainers(i, spanInfos, spans, dstIdx, newSpanInfos, newSpans); --dstIdx; } spanInfos = newSpanInfos; @@ -3144,8 +3068,7 @@ public void orEqualsShiftedUnsafeNoWriteCheck(final long shiftAmount, final RspA collectRemovedIndicesIfAny(sortedRangesMu); } - protected void markIndexAsRemoved(final MutableObject madeNullSpansMu, - final int index) { + protected void markIndexAsRemoved(final MutableObject madeNullSpansMu, final int index) { spanInfos[index] = -1; modifiedSpan(index); SortedRanges madeNullSpans = madeNullSpansMu.getValue(); @@ -3156,7 +3079,7 @@ protected void markIndexAsRemoved(final MutableObject madeNullSpan } protected void markIndexRangeAsRemoved( - final MutableObject madeNullSpansMu, final int iFirst, final int iLast) { + final MutableObject madeNullSpansMu, final int iFirst, final int iLast) { for (int i = iFirst; i <= iLast; ++i) { spanInfos[i] = -1; } @@ -3186,12 +3109,11 @@ protected MutableObject getWorkSortedRangesMutableObject(final Wor * @param startPos the first position to start looking for orKey in this container. * @param other RspArray for the span to remove. * @param otherIdx the index of the span to remove in the other RspArray. - * @return the index in our parallel arrays to continue searches for keys after (removeFirstKey, - * removeSpan). + * @return the index in our parallel arrays to continue searches for keys after (removeFirstKey, removeSpan). */ private int andNotEqualsSpan(final int startPos, final RspArray other, final int otherIdx, - final MutableObject madeNullSpansMu, - final WorkData wd) { + final MutableObject madeNullSpansMu, + final WorkData wd) { try (SpanView otherView = wd.borrowSpanView(other, otherIdx)) { final long removeKey = otherView.getKey(); final long removeflen = otherView.getFullBlockSpanLen(); @@ -3232,16 +3154,13 @@ private int andNotEqualsSpan(final int startPos, final RspArray other, final int if (uLess(firstKey, removeKey)) { if (uLess(removeKey, endKey)) { final ArraysBuf buf = wd.getArraysBuf(3); - buf.pushFullBlockSpan(firstKey, - distanceInBlocks(firstKey, removeKey)); + buf.pushFullBlockSpan(firstKey, distanceInBlocks(firstKey, removeKey)); buf.pushContainer(keyNotContainer, notContainer); - buf.pushFullBlockSpan(removeKey + BLOCK_SIZE, - distanceInBlocks(removeKey, endKey)); + buf.pushFullBlockSpan(removeKey + BLOCK_SIZE, distanceInBlocks(removeKey, endKey)); replaceSpanAtIndex(i, buf); } else { final ArraysBuf buf = wd.getArraysBuf(2); - buf.pushFullBlockSpan(firstKey, - distanceInBlocks(firstKey, removeKey)); + buf.pushFullBlockSpan(firstKey, distanceInBlocks(firstKey, removeKey)); buf.pushContainer(keyNotContainer, notContainer); replaceSpanAtIndex(i, buf); } @@ -3250,8 +3169,7 @@ private int andNotEqualsSpan(final int startPos, final RspArray other, final int if (uLess(removeKey, endKey)) { final ArraysBuf buf = wd.getArraysBuf(2); buf.pushContainer(keyNotContainer, notContainer); - buf.pushFullBlockSpan(removeKey + BLOCK_SIZE, - distanceInBlocks(removeKey, endKey)); + buf.pushFullBlockSpan(removeKey + BLOCK_SIZE, distanceInBlocks(removeKey, endKey)); replaceSpanAtIndex(i, buf); } else if (notContainer == null) { setSingletonSpan(i, keyNotContainer); @@ -3370,8 +3288,7 @@ public void andNotEqualsUnsafeNoWriteCheck(final RspArray other) { return; } final long keyForFirstBlock = keyForFirstBlock(); - if (other.keyForLastBlock() < keyForFirstBlock - || keyForLastBlock() < other.keyForFirstBlock()) { + if (other.keyForLastBlock() < keyForFirstBlock || keyForLastBlock() < other.keyForFirstBlock()) { return; } int startPos = 0; @@ -3462,8 +3379,8 @@ private void compactRemovedUnsafeNoWriteCheck() { } private int andEqualsSpan(final RspArray other, final int otherIdx, - final int startPos, final MutableObject madeNullSpansMu, - final WorkData wd) { + final int startPos, final MutableObject madeNullSpansMu, + final WorkData wd) { try (SpanView otherView = wd.borrowSpanView(other, otherIdx)) { final long andflen = otherView.getFullBlockSpanLen(); final long andKey = otherView.getKey(); @@ -3486,8 +3403,7 @@ private int andEqualsSpan(final RspArray other, final int otherIdx, final long lastKey = getKeyForLastBlockInSpan(ourKey, flen); // andIdx > 0, therefore andKey is contained in this span. if (uLess(ourKey, andKey)) { - // when this method is called from andEquals, given the previous pruning - // this + // when this method is called from andEquals, given the previous pruning this // case can't be hit. if (uLess(andKey, lastKey)) { final ArraysBuf buf = wd.getArraysBuf(3); @@ -3497,8 +3413,7 @@ private int andEqualsSpan(final RspArray other, final int otherIdx, } else { buf.pushSharedContainer(other, andKey, otherView.getContainer()); } - buf.pushFullBlockSpan(nextKey(andKey), - distanceInBlocks(andKey, lastKey)); + buf.pushFullBlockSpan(nextKey(andKey), distanceInBlocks(andKey, lastKey)); replaceSpanAtIndex(andIdx, buf); } else { final ArraysBuf buf = wd.getArraysBuf(2); @@ -3512,8 +3427,7 @@ private int andEqualsSpan(final RspArray other, final int otherIdx, } } else if (uLess(andKey, lastKey)) { final ArraysBuf buf = wd.getArraysBuf(2); - // when this method is called from andEquals, given the previous pruning - // this + // when this method is called from andEquals, given the previous pruning this // case can't be hit. if (otherView.isSingletonSpan()) { buf.pushSingletonSpan(otherView.getSingletonSpanValue()); @@ -3529,12 +3443,10 @@ private int andEqualsSpan(final RspArray other, final int otherIdx, } return andIdx + 1; } - Container result = null; // if result stays null, the result is empty and we should - // remove this span. + Container result = null; // if result stays null, the result is empty and we should remove this span. Container ourContainer = null; // Container operations may return copy on write copies in either direction; - // when a ContainerResource is used it can't be freed until we are certain there is - // no + // when a ContainerResource is used it can't be freed until we are certain there is no // outstanding reference to a container it may be holding. if (ourView.isSingletonSpan()) { final long ourValue = ourView.getSingletonSpanValue(); @@ -3642,8 +3554,7 @@ void pushFullBlockSpan(final long key, final long flen) { } /** - * Intersects this RspArray with the argument, leaving the result on this RspArray. The argument - * won't be modified. + * Intersects this RspArray with the argument, leaving the result on this RspArray. The argument won't be modified. * * @param other an RspArray. */ @@ -3762,8 +3673,7 @@ public void applyKeyOffset(final long offset) { } // end is inclusive - private void appendSpanIntersectionByKeyRange(final RspArray r, final int i, final long start, - final long end) { + private void appendSpanIntersectionByKeyRange(final RspArray r, final int i, final long start, final long end) { final Object span = spans[i]; final long spanInfo = spanInfos[i]; final long flen = getFullBlockSpanLen(spanInfo, span); @@ -3786,9 +3696,7 @@ private void appendSpanIntersectionByKeyRange(final RspArray r, final int i, fin if (cs == ce) { r.appendSingletonSpan(resultStart); } else { - r.appendContainer(resultStartHiBits, Container.rangeOfOnes(cs, ce + 1 /* - * exclusive - */)); + r.appendContainer(resultStartHiBits, Container.rangeOfOnes(cs, ce + 1 /* exclusive */)); } return; } @@ -3817,9 +3725,7 @@ private void appendSpanIntersectionByKeyRange(final RspArray r, final int i, fin if (e == 0) { r.appendSingletonSpan(keyAfterMid); } else { - r.appendContainer(keyAfterMid, Container.rangeOfOnes(0, e + 1 /* - * exclusive - */)); + r.appendContainer(keyAfterMid, Container.rangeOfOnes(0, e + 1 /* exclusive */)); } } } @@ -3866,8 +3772,8 @@ private void appendSpanIntersectionByKeyRange(final RspArray r, final int i, fin } } - boolean forEachLongInSpanWithOffsetAndMaxCount(final int i, final long offset, - LongAbortableConsumer lc, final long maxCount) { + boolean forEachLongInSpanWithOffsetAndMaxCount(final int i, final long offset, LongAbortableConsumer lc, + final long maxCount) { final MutableLong n = new MutableLong(0); forEachLongInSpanWithOffset(i, offset, (final long v) -> { if (!lc.accept(v)) { @@ -3876,8 +3782,7 @@ boolean forEachLongInSpanWithOffsetAndMaxCount(final int i, final long offset, n.increment(); return n.longValue() < maxCount; }); - return n.longValue() >= maxCount; // The only way we get to maxCount is if lc never returns - // false above. + return n.longValue() >= maxCount; // The only way we get to maxCount is if lc never returns false above. } boolean forEachLongInSpanWithOffset(final int i, final long offset, LongAbortableConsumer lc) { @@ -3900,13 +3805,12 @@ boolean forEachLongInSpanWithOffset(final int i, final long offset, LongAbortabl } final Container c = view.getContainer(); final boolean wantMore = c.forEach( - (int) offset, (short v) -> lc.accept(key | unsignedShortToLong(v))); + (int) offset, (short v) -> lc.accept(key | unsignedShortToLong(v))); return wantMore; } } - boolean forEachLongInSpanWithMaxCount(final int i, LongAbortableConsumer lc, - final long maxCount) { + boolean forEachLongInSpanWithMaxCount(final int i, LongAbortableConsumer lc, final long maxCount) { final MutableLong n = new MutableLong(0); forEachLongInSpan(i, (final long v) -> { if (!lc.accept(v)) { @@ -3915,8 +3819,7 @@ boolean forEachLongInSpanWithMaxCount(final int i, LongAbortableConsumer lc, n.increment(); return n.longValue() < maxCount; }); - return n.longValue() >= maxCount; // The only way we get to maxCount is if lc never returns - // false above. + return n.longValue() >= maxCount; // The only way we get to maxCount is if lc never returns false above. } boolean forEachLongInSpan(final int i, LongAbortableConsumer lc) { @@ -3939,7 +3842,7 @@ boolean forEachLongInSpan(final int i, LongAbortableConsumer lc) { } final Container c = view.getContainer(); final boolean wantMore = c.forEach( - (short v) -> lc.accept(key | unsignedShortToLong(v))); + (short v) -> lc.accept(key | unsignedShortToLong(v))); return wantMore; } } @@ -3954,16 +3857,15 @@ public boolean forEachLong(final LongAbortableConsumer lc) { } public boolean forEachLongRangeInSpanWithOffsetAndMaxCardinality( - final int i, final long offset, final long maxCardinality, - final LongRangeAbortableConsumer larc) { + final int i, final long offset, final long maxCardinality, + final LongRangeAbortableConsumer larc) { if (maxCardinality <= 0) { return true; } try (SpanView view = workDataPerThread.get().borrowSpanView(this, i)) { if (view.isSingletonSpan()) { if (offset != 0) { - throw new IllegalArgumentException( - "offset=" + offset + " and single key span."); + throw new IllegalArgumentException("offset=" + offset + " and single key span."); } final long v = view.getSingletonSpanValue(); return larc.accept(v, v); @@ -4008,12 +3910,11 @@ public boolean forEachLongRangeInSpanWithOffsetAndMaxCardinality( } boolean forEachLongRangeInSpanWithOffset(final int i, final long offset, - final LongRangeAbortableConsumer larc) { + final LongRangeAbortableConsumer larc) { try (SpanView view = workDataPerThread.get().borrowSpanView(this, i)) { if (view.isSingletonSpan()) { if (offset != 0) { - throw new IllegalArgumentException( - "offset=" + offset + " and single key span."); + throw new IllegalArgumentException("offset=" + offset + " and single key span."); } final long v = view.getSingletonSpanValue(); return larc.accept(v, v); @@ -4043,7 +3944,7 @@ boolean forEachLongRangeInSpanWithOffset(final int i, final long offset, } static LongRangeAbortableConsumer makeAdjacentRangesCollapsingWrapper(final long[] pendingRange, - final LongRangeAbortableConsumer lrac) { + final LongRangeAbortableConsumer lrac) { pendingRange[0] = -2; pendingRange[1] = -2; final LongRangeAbortableConsumer wrapper = (final long start, final long end) -> { @@ -4072,8 +3973,7 @@ public boolean forEachLongRange(final LongRangeAbortableConsumer lrac) { return forEachLongRangeInSpanWithOffset(0, 0, lrac); } final long[] pendingRange = new long[2]; - final LongRangeAbortableConsumer wrapper = - makeAdjacentRangesCollapsingWrapper(pendingRange, lrac); + final LongRangeAbortableConsumer wrapper = makeAdjacentRangesCollapsingWrapper(pendingRange, lrac); for (int i = 0; i < size; ++i) { if (!forEachLongRangeInSpanWithOffset(i, 0, wrapper)) { return false; @@ -4099,8 +3999,7 @@ protected T subrangeByKeyInternal(final long start, final long end) { final long endHighBits = highBits(end); int ikend = getSpanIndex(endHighBits); if (ikend < 0) { - // If end is not an exact match, the range cannot span beyond the previous position - // returned. + // If end is not an exact match, the range cannot span beyond the previous position returned. ikend = -ikend - 2; if (ikend < 0) { return r; @@ -4170,9 +4069,9 @@ protected T subrangeByPosInternal(final long firstPos, final long lastPos) { // rsEnd is inclusive. static private void setToRangeOfOnesMinusRangeForKey( - ArraysBuf buf, - final long kHigh, - final long rsStart, final long rsEnd) { + ArraysBuf buf, + final long kHigh, + final long rsStart, final long rsEnd) { final int crsStart = (int) (rsStart - kHigh); final int crsEnd = (int) (rsEnd - kHigh); if (crsStart > 0) { @@ -4205,16 +4104,14 @@ static private void setToRangeOfOnesMinusRangeForKey( * @param key block key for span i * @param start start of range to remove (may be outside of span) * @param end end of range to remove (may be outside of span) - * @param madeNullSpansMu where to store the indices of spans that were made null because they - * ended up empty; these should be collected later by the caller. - * @return if >= 0, the index of the last span where the removal effectively happened. if < 0, - * ~index for the span where to continue the removals, after a span was effectively - * eliminated. + * @param madeNullSpansMu where to store the indices of spans that were made null because they ended up empty; these + * should be collected later by the caller. + * @return if >= 0, the index of the last span where the removal effectively happened. if < 0, ~index for the span + * where to continue the removals, after a span was effectively eliminated. */ - private int removeRangeInSpan(final int i, final long spanInfo, final long key, - final long start, final long end, - final MutableObject madeNullSpansMu, - final WorkData wd) { + private int removeRangeInSpan(final int i, final long spanInfo, final long key, final long start, final long end, + final MutableObject madeNullSpansMu, + final WorkData wd) { final Object span = spans[i]; try (SpanView view = wd.borrowSpanView(this, i, spanInfo, span)) { final long flen = view.getFullBlockSpanLen(); @@ -4303,8 +4200,8 @@ public void removeRangeUnsafeNoWriteCheck(final long start, final long end) { } private int removeRange(final int fromIdx, final long start, final long end, - final MutableObject madeNullSpansMu, - final WorkData wd) { + final MutableObject madeNullSpansMu, + final WorkData wd) { final long startHiBits = highBits(start); int i = getSpanIndex(fromIdx, startHiBits); if (i < 0) { @@ -4335,8 +4232,7 @@ private int removeRange(final int fromIdx, final long start, final long end, public void removeRangesUnsafeNoWriteCheck(final Index.RangeIterator rit) { try { final WorkData wd = workDataPerThread.get(); - final MutableObject madeNullSpansMu = - getWorkSortedRangesMutableObject(wd); + final MutableObject madeNullSpansMu = getWorkSortedRangesMutableObject(wd); int i = 0; while (rit.hasNext()) { rit.next(); @@ -4355,13 +4251,11 @@ public void removeRangesUnsafeNoWriteCheck(final Index.RangeIterator rit) { // Neither this nor other can be empty. // shiftAmount should be a multiple of BLOCK_SIZE. - boolean tryAppendShiftedUnsafeNoWriteCheck(final long shiftAmount, final RspArray other, - final boolean acquire) { + boolean tryAppendShiftedUnsafeNoWriteCheck(final long shiftAmount, final RspArray other, final boolean acquire) { if (RspArray.debug) { if (size == 0 || other.size == 0) { throw new IllegalArgumentException( - "Append called for empty argument: size=" + size + ", other.size=" - + other.size); + "Append called for empty argument: size=" + size + ", other.size=" + other.size); } } final long otherFirstSpanInfo = other.spanInfos[0]; @@ -4505,11 +4399,9 @@ boolean validate(final String strArg, final boolean doAssert, final boolean isUn return false; } final Object s = spans[i]; - if (s != null && s != FULL_BLOCK_SPAN_MARKER && !(s instanceof short[]) - && (sInfo & BLOCK_LAST) != 0) { + if (s != null && s != FULL_BLOCK_SPAN_MARKER && !(s instanceof short[]) && (sInfo & BLOCK_LAST) != 0) { if (doAssert) { - final String m = str + ": lower 16 bits of spanInfo non-zero i=" + i - + ", sInfo=" + sInfo; + final String m = str + ": lower 16 bits of spanInfo non-zero i=" + i + ", sInfo=" + sInfo; Assert.assertion(false, m); } return false; @@ -4517,9 +4409,8 @@ boolean validate(final String strArg, final boolean doAssert, final boolean isUn if (!firstTime) { if (!uLess(lastSpanLastBlockKey, k)) { if (doAssert) { - final String m = str + ": non-increasing key found i=" + i + ", k=" + k - + - ", lastSpanLastBlockKey=" + lastSpanLastBlockKey + ", size=" + size; + final String m = str + ": non-increasing key found i=" + i + ", k=" + k + + ", lastSpanLastBlockKey=" + lastSpanLastBlockKey + ", size=" + size; Assert.assertion(false, m); } return false; @@ -4529,8 +4420,7 @@ boolean validate(final String strArg, final boolean doAssert, final boolean isUn if (flen > 0) { if (lastSpanWasFullBlock && k - lastSpanLastBlockKey <= BLOCK_SIZE) { if (doAssert) { - final String m = str + ": consecutive full block spans found i=" + i - + ", size=" + size; + final String m = str + ": consecutive full block spans found i=" + i + ", size=" + size; Assert.assertion(false, m); } return false; @@ -4540,9 +4430,9 @@ boolean validate(final String strArg, final boolean doAssert, final boolean isUn if (s != null) { if (!(s instanceof Container || s instanceof short[])) { if (doAssert) { - final String m = str + ": can't cast s=" + s + " of class " - + s.getClass().getSimpleName() + - " to Container or short[] when !(flen > 0)."; + final String m = + str + ": can't cast s=" + s + " of class " + s.getClass().getSimpleName() + + " to Container or short[] when !(flen > 0)."; Assert.assertion(false, m); } return false; @@ -4550,24 +4440,22 @@ boolean validate(final String strArg, final boolean doAssert, final boolean isUn final Container c = view.getContainer(); if (c.isEmpty()) { if (doAssert) { - final String m = - str + ": empty RB container found i=" + i + ", size=" + size; + final String m = str + ": empty RB container found i=" + i + ", size=" + size; Assert.assertion(false, m); } return false; } if (c.isAllOnes()) { if (doAssert) { - final String m = - str + ": full RB container found i=" + i + ", size=" + size; + final String m = str + ": full RB container found i=" + i + ", size=" + size; Assert.assertion(false, m); } return false; } if (c.isSingleElement()) { if (doAssert) { - final String m = str + ": singleton container found i=" + i - + ", type=" + c.getClass().getSimpleName(); + final String m = str + ": singleton container found i=" + i + ", type=" + + c.getClass().getSimpleName(); Assert.assertion(false, m); } return false; @@ -4582,10 +4470,8 @@ boolean validate(final String strArg, final boolean doAssert, final boolean isUn final long dCard = acc[i] - prevCard; final long c = getSpanCardinalityAtIndex(i); if (dCard != c) { - final String m = str + ": acc cardinality mismatch, isUnsafe=" - + isUnsafe + " at i=" + i - + ", prevCard=" + prevCard + ", dCard=" + dCard + ", c=" + c - + ", size=" + size; + final String m = str + ": acc cardinality mismatch, isUnsafe=" + isUnsafe + " at i=" + i + + ", prevCard=" + prevCard + ", dCard=" + dCard + ", c=" + c + ", size=" + size; Assert.assertion(false, m); } } @@ -4598,7 +4484,7 @@ boolean validate(final String strArg, final boolean doAssert, final boolean isUn final long cardinality = calculateCardinality(); if (cardinality != (long) cardData) { final String m = str + ": acc == null && cardData (=" + cardData + - ") != cardinality (=" + cardinality + ")"; + ") != cardinality (=" + cardinality + ")"; Assert.assertion(false, m); } @@ -4606,11 +4492,10 @@ boolean validate(final String strArg, final boolean doAssert, final boolean isUn return true; } - public OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, - final long length) { + public OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, final long length) { if (startPositionInclusive < 0) { throw new IllegalArgumentException( - ("startPositionInclusive=" + startPositionInclusive + " should be >=0.")); + ("startPositionInclusive=" + startPositionInclusive + " should be >=0.")); } final long endPositionInclusive; if (isCardinalityCached()) { @@ -4656,19 +4541,16 @@ public OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, } } final long startOffset = startPositionInclusive - cardBeforeStart; - return new RspOrderedKeys(this, startIdx, startOffset, cardBeforeStart, endIdx, endOffset, - cardBeforeEnd); + return new RspOrderedKeys(this, startIdx, startOffset, cardBeforeStart, endIdx, endOffset, cardBeforeEnd); } - public OrderedKeys getOrderedKeysByKeyRange(final long startValueInclusive, - final long endValueInclusive) { + public OrderedKeys getOrderedKeysByKeyRange(final long startValueInclusive, final long endValueInclusive) { if (isEmpty() || endValueInclusive < startValueInclusive) { return OrderedKeys.EMPTY; } final long lastSpanCardinality = getSpanCardinalityAtIndexMaybeAcc(size - 1); - return getOrderedKeysByKeyRangeConstrainedToIndexAndOffsetRange(startValueInclusive, - endValueInclusive, - 0, 0, 0, size - 1, lastSpanCardinality - 1); + return getOrderedKeysByKeyRangeConstrainedToIndexAndOffsetRange(startValueInclusive, endValueInclusive, + 0, 0, 0, size - 1, lastSpanCardinality - 1); } public RspOrderedKeys asOrderedKeys() { @@ -4677,15 +4559,15 @@ public RspOrderedKeys asOrderedKeys() { } final long lastSpanCard = getSpanCardinalityAtIndexMaybeAcc(size - 1); return new RspOrderedKeys(this, - 0, 0, 0, - size - 1, lastSpanCard - 1, getCardinality() - lastSpanCard); + 0, 0, 0, + size - 1, lastSpanCard - 1, getCardinality() - lastSpanCard); } // endIdx and endOffsetIn are inclusive. OrderedKeys getOrderedKeysByKeyRangeConstrainedToIndexAndOffsetRange( - final long startValue, final long endValue, - final int startIdx, final long startOffsetIn, final long cardBeforeStartIdx, - final int endIdx, final long endOffsetIn) { + final long startValue, final long endValue, + final int startIdx, final long startOffsetIn, final long cardBeforeStartIdx, + final int endIdx, final long endOffsetIn) { final long startKey = highBits(startValue); int startKeyIdx = getSpanIndex(startIdx, startKey); if (startKeyIdx < 0) { @@ -4702,13 +4584,12 @@ OrderedKeys getOrderedKeysByKeyRangeConstrainedToIndexAndOffsetRange( endKeyIdx = -endKeyIdx - 2; } final BeforeCardContext beforeCardCtx = (acc == null) - ? new BeforeCardContext(startIdx, cardBeforeStartIdx) - : null; + ? new BeforeCardContext(startIdx, cardBeforeStartIdx) + : null; long cardBeforeStartKeyIdx = cardinalityBeforeMaybeAcc(startKeyIdx, beforeCardCtx); long absoluteStartPos = findInSpan(startKeyIdx, startValue, cardBeforeStartKeyIdx); if (absoluteStartPos < 0) { - // the following result can't be outside of valid pos space or we would have returned - // above. + // the following result can't be outside of valid pos space or we would have returned above. absoluteStartPos = -absoluteStartPos - 1; if (absoluteStartPos == getCardinality()) { return OrderedKeys.EMPTY; @@ -4725,8 +4606,7 @@ OrderedKeys getOrderedKeysByKeyRangeConstrainedToIndexAndOffsetRange( if (absoluteEndPos < 0) { return OrderedKeys.EMPTY; } - final long totalCardAtEndKeyIdx = - cardBeforeEndKeyIdx + getSpanCardinalityAtIndexMaybeAcc(endKeyIdx); + final long totalCardAtEndKeyIdx = cardBeforeEndKeyIdx + getSpanCardinalityAtIndexMaybeAcc(endKeyIdx); final long lastValidPos = totalCardAtEndKeyIdx - 1; if (absoluteEndPos > lastValidPos) { absoluteEndPos = lastValidPos; @@ -4756,8 +4636,8 @@ OrderedKeys getOrderedKeysByKeyRangeConstrainedToIndexAndOffsetRange( endOffsetOut = endOffsetIn; } return new RspOrderedKeys(this, - startKeyIdx, startOffsetOut, cardBeforeStartKeyIdx, - endKeyIdx, endOffsetOut, cardBeforeEndKeyIdx); + startKeyIdx, startOffsetOut, cardBeforeStartKeyIdx, + endKeyIdx, endOffsetOut, cardBeforeEndKeyIdx); } public OrderedKeys.Iterator getOrderedKeysIterator() { @@ -4852,8 +4732,7 @@ public long rangesCountUpperBound(final int startIdx, final int endIdx) { if (isSingletonSpan(s)) { ++nRanges; } else if (s instanceof io.deephaven.db.v2.utils.rsp.container.RunContainer) { - nRanges += - ((io.deephaven.db.v2.utils.rsp.container.RunContainer) s).numberOfRanges(); + nRanges += ((io.deephaven.db.v2.utils.rsp.container.RunContainer) s).numberOfRanges(); } else if (s instanceof io.deephaven.db.v2.utils.rsp.container.SingleRangeContainer) { nRanges += 1; } else if (s instanceof io.deephaven.db.v2.utils.rsp.container.TwoValuesContainer) { @@ -4927,31 +4806,29 @@ public double containerOverhead() { } public void sampleMetrics( - final LongConsumer rspParallelArraysSizeUsed, - final LongConsumer rspParallelArraysSizeUnused, - final LongConsumer arrayContainersBytesAllocated, - final LongConsumer arrayContainersBytesUnused, - final LongConsumer arrayContainersCardinality, - final LongConsumer arrayContainersCount, - final LongConsumer bitmapContainersBytesAllocated, - final LongConsumer bitmapContainersBytesUnused, - final LongConsumer bitmapContainersCardinality, - final LongConsumer bitmapContainersCount, - final LongConsumer runContainersBytesAllocated, - final LongConsumer runContainersBytesUnused, - final LongConsumer runContainersCardinality, - final LongConsumer runContainersCount, - final LongConsumer runContainersRunsCount, - final LongConsumer singleRangeContainersCount, - final LongConsumer singleRangeContainerCardinality, - final LongConsumer singletonContainersCount, - final LongConsumer twoValuesContainerCount) { + final LongConsumer rspParallelArraysSizeUsed, + final LongConsumer rspParallelArraysSizeUnused, + final LongConsumer arrayContainersBytesAllocated, + final LongConsumer arrayContainersBytesUnused, + final LongConsumer arrayContainersCardinality, + final LongConsumer arrayContainersCount, + final LongConsumer bitmapContainersBytesAllocated, + final LongConsumer bitmapContainersBytesUnused, + final LongConsumer bitmapContainersCardinality, + final LongConsumer bitmapContainersCount, + final LongConsumer runContainersBytesAllocated, + final LongConsumer runContainersBytesUnused, + final LongConsumer runContainersCardinality, + final LongConsumer runContainersCount, + final LongConsumer runContainersRunsCount, + final LongConsumer singleRangeContainersCount, + final LongConsumer singleRangeContainerCardinality, + final LongConsumer singletonContainersCount, + final LongConsumer twoValuesContainerCount) { rspParallelArraysSizeUsed.accept(size); rspParallelArraysSizeUnused.accept(spanInfos.length - size); - // TODO: It would be much more efficient to accumulate multiple samples (perhaps one array - // of them per Metric), - // and then provide them to the metric in one call, to prevent multiple volatile - // assignments. + // TODO: It would be much more efficient to accumulate multiple samples (perhaps one array of them per Metric), + // and then provide them to the metric in one call, to prevent multiple volatile assignments. for (int i = 0; i < size; ++i) { final Object o = spans[i]; if (isSingletonSpan(o)) { @@ -5019,8 +4896,8 @@ protected final TreeIndexImpl tryCompact() { } else if (card > SortedRanges.LONG_DENSE_MAX_CAPACITY) { return null; } - SortedRanges sr = SortedRanges.tryMakeForKnownRangeUnknownMaxCapacity( - SortedRanges.LONG_DENSE_MAX_CAPACITY, first, last, true); + SortedRanges sr = SortedRanges.tryMakeForKnownRangeUnknownMaxCapacity(SortedRanges.LONG_DENSE_MAX_CAPACITY, + first, last, true); try (RspRangeIterator it = getRangeIterator()) { while (it.hasNext()) { it.next(); diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspBitmap.java b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspBitmap.java index 580f79a8172..0ded999a324 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspBitmap.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspBitmap.java @@ -32,9 +32,9 @@ private RspBitmap(final RspBitmap other) { } public RspBitmap( - final RspArray src, - final int startIdx, final long startOffset, - final int endIdx, final long endOffset) { + final RspArray src, + final int startIdx, final long startOffset, + final int endIdx, final long endOffset) { super(src, startIdx, startOffset, endIdx, endOffset); } @@ -52,8 +52,8 @@ public static RspBitmap makeSingle(final long v) { @Override protected final RspBitmap make(final RspArray src, - final int startIdx, final long startOffset, - final int endIdx, final long endOffset) { + final int startIdx, final long startOffset, + final int endIdx, final long endOffset) { return new RspBitmap(src, startIdx, startOffset, endIdx, endOffset); } @@ -106,8 +106,7 @@ public long last() { private final static class AddCtx { long key; int index; - Container c; // The RB Container, or null if key corresponds to a full block span or single - // key. + Container c; // The RB Container, or null if key corresponds to a full block span or single key. } static Container containerForTwoValues(final long v1, final long v2) { @@ -120,15 +119,14 @@ static Container containerForTwoValues(final long v1, final long v2) { return Container.twoValues(lowBitsAsShort(v2), lowBitsAsShort(v1)); } - public RspBitmap addValuesUnsafe(final LongChunk values, final int offset, - final int length) { + public RspBitmap addValuesUnsafe(final LongChunk values, final int offset, final int length) { final RspBitmap rb = writeCheck(); rb.addValuesUnsafeNoWriteCheck(values, offset, length); return rb; } - public void addValuesUnsafeNoWriteCheck(final LongChunk values, - final int offset, final int length) { + public void addValuesUnsafeNoWriteCheck(final LongChunk values, final int offset, + final int length) { int lengthFromThisSpan; final WorkData wd = workDataPerThread.get(); final MutableObject sortedRangesMu = getWorkSortedRangesMutableObject(wd); @@ -138,7 +136,7 @@ public void addValuesUnsafeNoWriteCheck(final LongChunk value final long value = values.get(vi + offset); final long highBits = highBits(value); lengthFromThisSpan = countContiguousHighBitsMatches( - values, vi + offset + 1, length - vi - 1, highBits) + 1; + values, vi + offset + 1, length - vi - 1, highBits) + 1; final int spanIndexRaw = getSpanIndex(spanIndex, highBits); Container container = null; boolean existing = false; @@ -156,10 +154,9 @@ public void addValuesUnsafeNoWriteCheck(final LongChunk value existing = true; } final Container result = createOrUpdateContainerForValues( - values, vi + offset, lengthFromThisSpan, existing, spanIndex, container); + values, vi + offset, lengthFromThisSpan, existing, spanIndex, container); if (result != null && result.isAllOnes()) { - spanIndex = - setOrInsertFullBlockSpanAtIndex(spanIndexRaw, highBits, 1, sortedRangesMu); + spanIndex = setOrInsertFullBlockSpanAtIndex(spanIndexRaw, highBits, 1, sortedRangesMu); } else if (!existing) { if (result == null) { insertSingletonAtIndex(spanIndex, value); @@ -175,8 +172,8 @@ public void addValuesUnsafeNoWriteCheck(final LongChunk value } private static int countContiguousHighBitsMatches(final LongChunk values, - final int offset, final int length, - final long highBits) { + final int offset, final int length, + final long highBits) { for (int vi = 0; vi < length; ++vi) { if (highBits(values.get(vi + offset)) != highBits) { return vi; @@ -185,12 +182,11 @@ private static int countContiguousHighBitsMatches(final LongChunk values, - final int offset, final int length, - final boolean existing, - final int keyIdx, - Container container) { + private Container createOrUpdateContainerForValues(@NotNull final LongChunk values, + final int offset, final int length, + final boolean existing, + final int keyIdx, + Container container) { final long firstValue = values.get(offset); if (length == 1) { // We're adding only one value @@ -230,7 +226,7 @@ private Container createOrUpdateContainerForValues( } if (container == null) { return new RunContainer(lowBitsAsInt(firstValue), lowBitsAsInt(lastValue) + 1) - .iset(lowBitsAsShort(getSingletonSpanValue(keyIdx))); + .iset(lowBitsAsShort(getSingletonSpanValue(keyIdx))); } return container.iadd(lowBitsAsInt(firstValue), lowBitsAsInt(lastValue) + 1); } @@ -241,9 +237,9 @@ private Container createOrUpdateContainerForValues( } if (container == null) { return new ArrayContainer(3) - .iset(lowBitsAsShort(firstValue)) - .iset(lowBitsAsShort(lastValue)) - .iset(lowBitsAsShort(spanInfos[keyIdx])); + .iset(lowBitsAsShort(firstValue)) + .iset(lowBitsAsShort(lastValue)) + .iset(lowBitsAsShort(spanInfos[keyIdx])); } return container.iset(lowBitsAsShort(firstValue)).iset(lowBitsAsShort(lastValue)); } @@ -258,7 +254,7 @@ private Container createOrUpdateContainerForValues( } private static Container makeValuesContainer(final LongChunk values, - final int offset, final int length) { + final int offset, final int length) { if (length <= ArrayContainer.SWITCH_CONTAINER_CARDINALITY_THRESHOLD) { final short[] valuesArray = new short[length]; for (int vi = 0; vi < length; ++vi) { @@ -274,8 +270,8 @@ private static Container makeValuesContainer(final LongChunk } private static Container addValuesToContainer(final LongChunk values, - final int offset, final int length, - Container container) { + final int offset, final int length, + Container container) { if (container.getCardinality() <= length / 2) { return makeValuesContainer(values, offset, length).ior(container); } @@ -291,8 +287,7 @@ public RspBitmap add(final long val) { return rb; } - // Does not update cardinality cache. Caller must ensure finishMutations() is called before - // calling + // Does not update cardinality cache. Caller must ensure finishMutations() is called before calling // any operation depending on the cardinality cache being up to date. public RspBitmap addUnsafe(final long val) { final RspBitmap rb = writeCheck(); @@ -339,8 +334,7 @@ public void appendRangeUnsafeNoWriteCheck(final long sHigh, final long start, fi appendRangeUnsafeNoWriteCheck(sHigh, start, highBits(end), end); } - private void appendRangeUnsafeNoWriteCheck(final long sHigh, final long start, final long eHigh, - final long end) { + private void appendRangeUnsafeNoWriteCheck(final long sHigh, final long start, final long eHigh, final long end) { final int sLow = lowBitsAsInt(start); final int eLow = lowBitsAsInt(end); if (sHigh == eHigh) { @@ -381,14 +375,12 @@ public RspBitmap appendRange(final long start, final long end) { } // end is inclusive. - // Does not update cardinality cache. Caller must ensure finishMutations() is called before - // calling + // Does not update cardinality cache. Caller must ensure finishMutations() is called before calling // any operation depending on the cardinality cache being up to date. public RspBitmap appendRangeUnsafe(final long start, final long end) { if (start > end) { if (Index.BAD_RANGES_AS_ERROR) { - throw new IllegalArgumentException( - "bad range start=" + start + " > end=" + end + "."); + throw new IllegalArgumentException("bad range start=" + start + " > end=" + end + "."); } return this; } @@ -444,8 +436,7 @@ public void appendUnsafeNoWriteCheck(final long v) { return; } if (keyForLastBlock != sHigh) { - throw new IllegalArgumentException( - "Can't append v=" + v + " when keyForLastBlock=" + keyForLastBlock); + throw new IllegalArgumentException("Can't append v=" + v + " when keyForLastBlock=" + keyForLastBlock); } final int lastIndex = size - 1; @@ -485,12 +476,11 @@ public void appendUnsafeNoWriteCheck(final long v) { * @param startHighBits the high bits of the start position for the range provided. * @param start the start position for the range provided. * @param startLowBits the low bits of the start of the range to add. 0 <= start < BLOCK_SIZE - * @param endLowBits the low bits of the end (inclusive) of the range to add. 0 <= end < - * BLOCK_SIZE + * @param endLowBits the low bits of the end (inclusive) of the range to add. 0 <= end < BLOCK_SIZE * @return the index of the span where the interval was added. */ private int singleBlockAddRange(final int startPos, final long startHighBits, final long start, - final int startLowBits, final int endLowBits) { + final int startLowBits, final int endLowBits) { final int endExclusive = endLowBits + 1; final int i = getSpanIndex(startPos, start); if (endExclusive - startLowBits == BLOCK_SIZE) { @@ -501,8 +491,7 @@ private int singleBlockAddRange(final int startPos, final long startHighBits, fi if (startLowBits == endLowBits) { insertSingletonAtIndex(j, start); } else { - insertContainerAtIndex(j, startHighBits, - Container.rangeOfOnes(startLowBits, endExclusive)); + insertContainerAtIndex(j, startHighBits, Container.rangeOfOnes(startLowBits, endExclusive)); } return j; } @@ -553,16 +542,15 @@ private int singleBlockAddRange(final int startPos, final long startHighBits, fi /** - * Appends the provided (start, end) range, relative to the given key, to this array. - * Prerequisite: keyForLastBlock() <= k + * Appends the provided (start, end) range, relative to the given key, to this array. Prerequisite: + * keyForLastBlock() <= k * * @param k the key to use for the range provided. * @param start the start of the range to add. 0 <= start < BLOCK_SIZE * @param end the end (inclusive) of the range to add. 0 <= end < BLOCK_SIZE * @return the index of the span where the interval was added. */ - private int singleBlockAppendRange(final long kHigh, final long k, final int start, - final int end) { + private int singleBlockAppendRange(final long kHigh, final long k, final int start, final int end) { final int endExclusive = end + 1; long keyForLastBlock = 0; if (isEmpty() || (keyForLastBlock = keyForLastBlock()) < kHigh) { @@ -581,12 +569,10 @@ private int singleBlockAppendRange(final long kHigh, final long k, final int sta if (keyForLastBlock == kHigh) { final int pos = size() - 1; final Object span = spans[pos]; - if (!RspArray.isFullBlockSpan(span)) { // if it is a full block span, we already have - // the range. + if (!RspArray.isFullBlockSpan(span)) { // if it is a full block span, we already have the range. final Container result; Container container = null; - try (SpanView view = - workDataPerThread.get().borrowSpanView(this, pos, spanInfos[pos], span)) { + try (SpanView view = workDataPerThread.get().borrowSpanView(this, pos, spanInfos[pos], span)) { if (view.isSingletonSpan()) { final long single = view.getSingletonSpanValue(); result = containerForLowValueAndRange(lowBitsAsInt(single), start, end); @@ -602,13 +588,11 @@ private int singleBlockAppendRange(final long kHigh, final long k, final int sta } return pos; } - throw new IllegalArgumentException( - "Can't append range (k=" + k + ", start=" + start + ", end=" + end + + throw new IllegalArgumentException("Can't append range (k=" + k + ", start=" + start + ", end=" + end + ") when keyForLastBlock=" + keyForLastBlock); } - public static Container containerForLowValueAndRange(final int val, final int start, - final int end) { + public static Container containerForLowValueAndRange(final int val, final int start, final int end) { if (end == start) { return containerForTwoValues(val, start); } @@ -660,8 +644,7 @@ private int getSetOrInsertIdx(final int startIdx, final long keyToInsert) { public RspBitmap addRangeUnsafe(final long start, final long end) { if (start > end) { if (Index.BAD_RANGES_AS_ERROR) { - throw new IllegalArgumentException( - "bad range start=" + start + " > end=" + end + "."); + throw new IllegalArgumentException("bad range start=" + start + " > end=" + end + "."); } return this; } @@ -677,8 +660,7 @@ public void addRangeUnsafeNoWriteCheck(final long first, final long last) { public int addRangeUnsafeNoWriteCheck(final int fromIdx, final long start, final long end) { if (start > end) { if (Index.BAD_RANGES_AS_ERROR) { - throw new IllegalArgumentException( - "bad range start=" + start + " > end=" + end + "."); + throw new IllegalArgumentException("bad range start=" + start + " > end=" + end + "."); } return -1; } @@ -708,11 +690,11 @@ public int addRangeUnsafeNoWriteCheck(final int fromIdx, final long start, final } if (eLow < BLOCK_LAST) { final int j = setOrInsertFullBlockSpanAtIndex( - idxForFull, sHighNext, RspArray.distanceInBlocks(sHighNext, eHigh), null); + idxForFull, sHighNext, RspArray.distanceInBlocks(sHighNext, eHigh), null); return singleBlockAddRange(j, eHigh, eHigh, 0, eLow); } return setOrInsertFullBlockSpanAtIndex( - idxForFull, sHighNext, RspArray.distanceInBlocks(sHighNext, eHigh) + 1, null); + idxForFull, sHighNext, RspArray.distanceInBlocks(sHighNext, eHigh) + 1, null); } @@ -911,8 +893,8 @@ private static RspBitmap orImpl(final RspBitmap r1, final RspBitmap r2) { } /** - * Return the logical or of two bitmaps as a new bitmap. This is equivalent to the union of the - * two bitmaps as sets. The arguments won't be modified. + * Return the logical or of two bitmaps as a new bitmap. This is equivalent to the union of the two bitmaps as sets. + * The arguments won't be modified. * * @param b1 a bitmap * @param b2 a bitmap @@ -943,19 +925,17 @@ public RspBitmap orEqualsShifted(final long shiftAmount, final RspBitmap other) } /** - * Add every element on other to this bitmap. Does not update cardinality cache. Caller must - * ensure finishMutations() is called before any operation depending on the cardinality cache - * being up to date are called. + * Add every element on other to this bitmap. Does not update cardinality cache. Caller must ensure + * finishMutations() is called before any operation depending on the cardinality cache being up to date are called. */ public RspBitmap orEqualsUnsafe(final RspBitmap other) { return orEqualsShiftedUnsafe(0, other); } /** - * For every key on other, add (key + shiftAmount) to this bitmap. Note shiftAmount is assumed - * to be a multiple of BLOCK_SIZE. Does not update cardinality cache. Caller must ensure - * finishMutations() is called before any operation depending on the cardinality cache being up - * to date are called. + * For every key on other, add (key + shiftAmount) to this bitmap. Note shiftAmount is assumed to be a multiple of + * BLOCK_SIZE. Does not update cardinality cache. Caller must ensure finishMutations() is called before any + * operation depending on the cardinality cache being up to date are called. */ public RspBitmap orEqualsShiftedUnsafe(final long shiftAmount, final RspBitmap other) { if (other.isEmpty()) { @@ -966,8 +946,7 @@ public RspBitmap orEqualsShiftedUnsafe(final long shiftAmount, final RspBitmap o return rb; } - public void appendShiftedUnsafeNoWriteCheck(final long shiftAmount, final RspArray other, - final boolean acquire) { + public void appendShiftedUnsafeNoWriteCheck(final long shiftAmount, final RspArray other, final boolean acquire) { if ((shiftAmount & BLOCK_LAST) == 0) { if (tryAppendShiftedUnsafeNoWriteCheck(shiftAmount, other, acquire)) { return; @@ -980,9 +959,8 @@ public void appendShiftedUnsafeNoWriteCheck(final long shiftAmount, final RspArr return; } throw new IllegalArgumentException( - "Cannot append index with shiftAmount=" + shiftAmount + ", firstKey=" - + other.firstValue() + - " when our lastValue=" + lastValue()); + "Cannot append index with shiftAmount=" + shiftAmount + ", firstKey=" + other.firstValue() + + " when our lastValue=" + lastValue()); } @@ -1008,8 +986,8 @@ private static RspBitmap andImpl(final RspBitmap r1, final RspBitmap r2) { } /** - * Return the logical and of two bitmaps as a new bitmap. This is equivalent to the intersection - * of the two bitmaps as sets. + * Return the logical and of two bitmaps as a new bitmap. This is equivalent to the intersection of the two bitmaps + * as sets. * * @param b1 a bitmap * @param b2 a bitmap @@ -1037,8 +1015,7 @@ public RspBitmap andEqualsUnsafe(final RspBitmap other) { } /** - * Return the logical result of r1 and not r2 as a new RspArray. The arguments won't be - * modified. + * Return the logical result of r1 and not r2 as a new RspArray. The arguments won't be modified. * * @param r1 an RspArray * @param r2 an RspArray @@ -1051,8 +1028,8 @@ public static RspBitmap andNotImpl(final RspBitmap r1, final RspBitmap r2) { } /** - * Return the logical result of r1 and not r2 as a new bitmap. This is equivalent to removing - * every element in b2 from b1. The arguments won't be modified. + * Return the logical result of r1 and not r2 as a new bitmap. This is equivalent to removing every element in b2 + * from b1. The arguments won't be modified. * * @param b1 a bitmap * @param b2 a bitmap @@ -1125,8 +1102,7 @@ public RspBitmap applyOffsetNoWriteCheck(final long offset) { } /** - * Apply an offset to every value in this bitmap, returning a new bitmap (original is not - * changed). + * Apply an offset to every value in this bitmap, returning a new bitmap (original is not changed). * * @param offset The offset to apply. */ @@ -1135,8 +1111,7 @@ public RspBitmap applyOffsetOnNew(final long offset) { } public RspBitmap applyOffsetImpl( - final long offset, final Supplier onZeroOffset, - final Supplier onAlignedOffset) { + final long offset, final Supplier onZeroOffset, final Supplier onAlignedOffset) { if (offset == 0) { return onZeroOffset.get(); } @@ -1159,8 +1134,7 @@ public RspBitmap applyOffsetImpl( return rb; } - public RspBitmap subrangeByPos(final long firstPos, final long lastPos, - final boolean returnNullIfEmptyResult) { + public RspBitmap subrangeByPos(final long firstPos, final long lastPos, final boolean returnNullIfEmptyResult) { final RspBitmap rb = subrangeByPosInternal(firstPos, lastPos); if (rb == null || rb.isEmpty()) { if (returnNullIfEmptyResult) { @@ -1176,8 +1150,7 @@ public RspBitmap subrangeByPos(final long firstPos, final long lastPos) { return subrangeByPos(firstPos, lastPos, false); } - public RspBitmap subrangeByValue(final long start, final long end, - final boolean returnNullIfEmptyResult) { + public RspBitmap subrangeByValue(final long start, final long end, final boolean returnNullIfEmptyResult) { if (isEmpty()) { if (returnNullIfEmptyResult) { return null; @@ -1201,8 +1174,7 @@ public RspBitmap subrangeByValue(final long start, final long end) { return subrangeByValue(start, end, false); } - public void invert(final LongRangeConsumer builder, final Index.RangeIterator it, - final long maxPos) { + public void invert(final LongRangeConsumer builder, final Index.RangeIterator it, final long maxPos) { if (!it.hasNext()) { return; } @@ -1215,8 +1187,7 @@ public void invert(final LongRangeConsumer builder, final Index.RangeIterator it final long startHiBits = highBits(it.currentRangeStart()); final int i = getSpanIndex(startIndex, startHiBits); if (i < 0) { - throw new IllegalArgumentException( - "invert for non-existing key:" + it.currentRangeStart()); + throw new IllegalArgumentException("invert for non-existing key:" + it.currentRangeStart()); } final long prevCap; if (acc == null) { @@ -1285,8 +1256,7 @@ public void invert(final LongRangeConsumer builder, final Index.RangeIterator it builder.accept(start, end - 1); }; final int rMaxPos = (int) uMin(maxPos - prevCap, BLOCK_SIZE); - final IndexRangeIteratorView rv = - new IndexRangeIteratorView(it, startHiBits, startHiBits + BLOCK_SIZE); + final IndexRangeIteratorView rv = new IndexRangeIteratorView(it, startHiBits, startHiBits + BLOCK_SIZE); final boolean maxReached = c.findRanges(rc, rv, rMaxPos); if (maxReached || rv.underlyingIterFinished()) { return; @@ -1388,7 +1358,7 @@ public RspBitmap ixInsertRange(final long startKey, final long endKey) { @Override public final TreeIndexImpl ixInsertSecondHalf(final LongChunk values, - final int offset, final int length) { + final int offset, final int length) { final RspBitmap ans = addValuesUnsafe(values, offset, length); ans.finishMutations(); return ans; @@ -1396,7 +1366,7 @@ public final TreeIndexImpl ixInsertSecondHalf(final LongChunk @Override public final TreeIndexImpl ixRemoveSecondHalf(final LongChunk values, - final int offset, final int length) { + final int offset, final int length) { return ixRemove(TreeIndexImpl.fromChunk(values, offset, length, true)); } @@ -1429,8 +1399,7 @@ public long ixGet(final long pos) { } @Override - public void ixGetKeysForPositions(final PrimitiveIterator.OfLong inputPositions, - final LongConsumer outputKeys) { + public void ixGetKeysForPositions(final PrimitiveIterator.OfLong inputPositions, final LongConsumer outputKeys) { getKeysForPositions(inputPositions, outputKeys); } @@ -1457,8 +1426,7 @@ public TreeIndexImpl ixInvertOnNew(final TreeIndexImpl keys, final long maximumP if (keys instanceof SingleRange) { final long pos = ixFind(keys.ixFirstKey()); if (pos < 0) { - throw new IllegalArgumentException( - "invert for non-existing key:" + keys.ixFirstKey()); + throw new IllegalArgumentException("invert for non-existing key:" + keys.ixFirstKey()); } if (pos > maximumPosition) { return TreeIndexImpl.EMPTY; @@ -1494,8 +1462,7 @@ public TreeIndexImpl ixSubindexByPosOnNew(final long startPos, final long endPos if (result == null) { return TreeIndexImpl.EMPTY; } - // subindexByPos tends to create small indices, it pays off to check for compacting the - // result. + // subindexByPos tends to create small indices, it pays off to check for compacting the result. return result.ixCompact(); } @@ -1509,8 +1476,7 @@ public TreeIndexImpl ixSubindexByKeyOnNew(long startKey, final long endKey) { if (result == null) { return TreeIndexImpl.EMPTY; } - // subindexByKey tends to create small indices, it pays off to check for compacting the - // result. + // subindexByKey tends to create small indices, it pays off to check for compacting the result. return result.ixCompact(); } @@ -1529,8 +1495,7 @@ public TreeIndexImpl ixUpdate(final TreeIndexImpl added, final TreeIndexImpl rem return getWriteRef().ixUpdateNoWriteCheck(added, removed); } - public TreeIndexImpl ixUpdateNoWriteCheck(final TreeIndexImpl added, - final TreeIndexImpl removed) { + public TreeIndexImpl ixUpdateNoWriteCheck(final TreeIndexImpl added, final TreeIndexImpl removed) { if (added instanceof SingleRange) { addRangeUnsafeNoWriteCheck(added.ixFirstKey(), added.ixLastKey()); if (removed instanceof SingleRange) { @@ -1629,8 +1594,7 @@ public TreeIndexImpl ixRetainNoWriteCheck(final TreeIndexImpl other) { } private TreeIndexImpl retainImpl(final TreeIndexImpl other, Supplier refSupplier) { - if (isEmpty() || other.ixIsEmpty() || last() < other.ixFirstKey() - || other.ixLastKey() < first()) { + if (isEmpty() || other.ixIsEmpty() || last() < other.ixFirstKey() || other.ixLastKey() < first()) { return TreeIndexImpl.EMPTY; } if (other instanceof SingleRange) { @@ -1645,8 +1609,7 @@ private TreeIndexImpl retainImpl(final TreeIndexImpl other, Supplier return retainImpl(o, refSupplier); } - private static TreeIndexImpl retainImpl(final RspBitmap other, - Supplier refSupplier) { + private static TreeIndexImpl retainImpl(final RspBitmap other, Supplier refSupplier) { final RspBitmap ans = refSupplier.get(); ans.andEqualsUnsafeNoWriteCheck(other); if (ans.isEmpty()) { @@ -1772,8 +1735,7 @@ public boolean subsetOf(final SortedRanges sr) { if (sr.isEmpty()) { return false; } - // Take the complement sr, and see if we have any elements in it, which would make the - // return false. + // Take the complement sr, and see if we have any elements in it, which would make the return false. // If no element of us is in the complement of sr, return true. if (first() < sr.first() || sr.last() < last()) { return false; @@ -2079,8 +2041,7 @@ public boolean advance(long v) { @Override public long binarySearchValue(Index.TargetComparator targetComparator, int direction) { - throw new UnsupportedOperationException( - "Reverse iterator does not support binary search."); + throw new UnsupportedOperationException("Reverse iterator does not support binary search."); } }; } @@ -2143,14 +2104,12 @@ public void ixValidate(final String failMsg) { } @Override - public OrderedKeys ixGetOrderedKeysByPosition(final long startPositionInclusive, - final long length) { + public OrderedKeys ixGetOrderedKeysByPosition(final long startPositionInclusive, final long length) { return getOrderedKeysByPosition(startPositionInclusive, length); } @Override - public OrderedKeys ixGetOrderedKeysByKeyRange(final long startKeyInclusive, - final long endKeyInclusive) { + public OrderedKeys ixGetOrderedKeysByKeyRange(final long startKeyInclusive, final long endKeyInclusive) { return getOrderedKeysByKeyRange(startKeyInclusive, endKeyInclusive); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspIterator.java b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspIterator.java index f86d3cb7e5e..80e7dd54677 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspIterator.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspIterator.java @@ -31,8 +31,7 @@ private interface SingleSpanIterator { // Resource to hold the container sit may be pointing to. private SpanView sitView; private static final int BUFSZ = - Configuration.getInstance().getIntegerForClassWithDefault(RspIterator.class, "bufferSize", - 122); + Configuration.getInstance().getIntegerForClassWithDefault(RspIterator.class, "bufferSize", 122); private boolean hasNext; RspIterator(final RspArray.SpanCursorForward p, final long firstSpanSkipCount) { @@ -97,8 +96,7 @@ public int copyTo(final long[] vs, final int offset, final int max) { return c; } - public int copyTo(final WritableLongChunk chunk, final int offset, - final int max) { + public int copyTo(final WritableLongChunk chunk, final int offset, final int max) { int c = 0; while (hasNext) { if (!sit.hasNext()) { @@ -120,8 +118,7 @@ private void nextSingleSpanIterator(final long skipCount) { final Object s = p.span(); if (RspArray.isSingletonSpan(s)) { if (skipCount != 0) { - throw new IllegalArgumentException( - "skipCount=" + skipCount + " and next span is single element"); + throw new IllegalArgumentException("skipCount=" + skipCount + " and next span is single element"); } final long singletonValue = RspArray.spanInfoToSingletonSpanValue(spanInfo); sitView.reset(); @@ -161,8 +158,8 @@ public int copyTo(final long[] vs, final int offset, final int max) { } @Override - public int copyTo(final WritableLongChunk chunk, - final int offset, final int max) { + public int copyTo(final WritableLongChunk chunk, final int offset, + final int max) { if (max <= 0 || v == -1) { return 0; } @@ -213,8 +210,8 @@ public int copyTo(final long vs[], final int offset, final int max) { } @Override - public int copyTo(final WritableLongChunk chunk, - final int offset, final int max) { + public int copyTo(final WritableLongChunk chunk, final int offset, + final int max) { int c = 0; final long last = Math.min(curr + max - 1, end); while (curr <= last) { @@ -242,8 +239,7 @@ private long longValue(final short v) { public long nextLong() { if (bi >= count) { if (buf == null) { - // Lazy initialize to avoid the allocation in the cases it might never be - // used + // Lazy initialize to avoid the allocation in the cases it might never be used // (eg, pure forEachLong consumption). buf = new short[BUFSZ]; } @@ -287,8 +283,7 @@ public int copyTo(final long[] vs, final int offset, final int max) { } @Override - public int copyTo(final WritableLongChunk chunk, final int offset, - final int max) { + public int copyTo(final WritableLongChunk chunk, final int offset, final int max) { int c = 0; if (buf == null) { // Lazy initialize to avoid the allocation in the cases it might never be used diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspOrderedKeys.java b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspOrderedKeys.java index 02950b118ad..f37bd49d790 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspOrderedKeys.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspOrderedKeys.java @@ -25,14 +25,13 @@ private static RspArray wrapRspArray(final RspArray arr) { } RspOrderedKeys( - final RspArray arr, - final int startIdx, final long startOffset, final long cardBeforeStartIdx, - final int endIdx, final long endOffset, final long cardBeforeEndIdx) { + final RspArray arr, + final int startIdx, final long startOffset, final long cardBeforeStartIdx, + final int endIdx, final long endOffset, final long cardBeforeEndIdx) { if (RspBitmap.debug) { if (endIdx < startIdx || - (endIdx == startIdx && endOffset < startOffset)) { - throw new IllegalArgumentException( - "Empty " + RspOrderedKeys.class.getSimpleName() + " :" + + (endIdx == startIdx && endOffset < startOffset)) { + throw new IllegalArgumentException("Empty " + RspOrderedKeys.class.getSimpleName() + " :" + "startIdx=" + startIdx + ", startOffset=" + startOffset + ", endIdx=" + endIdx + ", endOffset=" + endOffset); } @@ -110,15 +109,15 @@ public long rangesCountUpperBound() { public RspOrderedKeys copy(final RspOrderedKeys other) { return new RspOrderedKeys( - other.arr, - other.startIdx, other.startOffset, other.cardBeforeStartIdx, - other.endIdx, other.endOffset, other.cardBeforeEndIdx); + other.arr, + other.startIdx, other.startOffset, other.cardBeforeStartIdx, + other.endIdx, other.endOffset, other.cardBeforeEndIdx); } // For object reuse in order to avoid allocations. private void reset(final int startIdx, final long startOffset, final long cardBeforeStartIdx, - final int endIdx, final long endOffset, final long cardBeforeEndIdx, - final long firstKey) { + final int endIdx, final long endOffset, final long cardBeforeEndIdx, + final long firstKey) { this.startIdx = startIdx; this.endIdx = endIdx; this.startOffset = startOffset; @@ -147,9 +146,8 @@ public OrderedKeys getOrderedKeysByPosition(long startPositionInclusive, long le @Override public OrderedKeys getOrderedKeysByKeyRange(long startKeyInclusive, long endKeyInclusive) { - return arr.getOrderedKeysByKeyRangeConstrainedToIndexAndOffsetRange(startKeyInclusive, - endKeyInclusive, - startIdx, startOffset, cardBeforeStartIdx, endIdx, endOffset); + return arr.getOrderedKeysByKeyRangeConstrainedToIndexAndOffsetRange(startKeyInclusive, endKeyInclusive, + startIdx, startOffset, cardBeforeStartIdx, endIdx, endOffset); } @Override @@ -160,8 +158,7 @@ public Index asIndex() { @Override public void fillKeyIndicesChunk(final WritableLongChunk chunkToFill) { - final RspIterator it = - new RspIterator(new RspArray.SpanCursorForwardImpl(arr, startIdx), startOffset); + final RspIterator it = new RspIterator(new RspArray.SpanCursorForwardImpl(arr, startIdx), startOffset); int n = it.copyTo(chunkToFill, 0, intSize()); chunkToFill.setSize(n); } @@ -170,8 +167,7 @@ public void fillKeyIndicesChunk(final WritableLongChunk ch public void fillKeyRangesChunk(final WritableLongChunk chunkToFill) { chunkToFill.setSize(0); final RspRangeBatchIterator it = - new RspRangeBatchIterator(new RspArray.SpanCursorForwardImpl(arr, startIdx), - startOffset, size()); + new RspRangeBatchIterator(new RspArray.SpanCursorForwardImpl(arr, startIdx), startOffset, size()); int nRanges = 0; while (it.hasNext()) { final int n = it.fillRangeChunk(chunkToFill, 2 * nRanges); @@ -221,13 +217,11 @@ public boolean forEachLong(final LongAbortableConsumer lac) { public boolean forEachLongRange(final LongRangeAbortableConsumer lrac) { if (startIdx == endIdx) { final long remaining = endOffset - startOffset + 1; - return arr.forEachLongRangeInSpanWithOffsetAndMaxCardinality(startIdx, startOffset, - remaining, lrac); + return arr.forEachLongRangeInSpanWithOffsetAndMaxCardinality(startIdx, startOffset, remaining, lrac); } final long[] pendingRange = new long[2]; - final LongRangeAbortableConsumer wrapper = - RspArray.makeAdjacentRangesCollapsingWrapper(pendingRange, lrac); + final LongRangeAbortableConsumer wrapper = RspArray.makeAdjacentRangesCollapsingWrapper(pendingRange, lrac); if (!arr.forEachLongRangeInSpanWithOffset(startIdx, startOffset, wrapper)) { return false; } @@ -246,8 +240,7 @@ public boolean forEachLongRange(final LongRangeAbortableConsumer lrac) { return true; } - // Note unlike Index.Iterator, this Iterator will /not/ automatically release its underlying - // Index representation + // Note unlike Index.Iterator, this Iterator will /not/ automatically release its underlying Index representation // when iteration is exhausted. The API for OK.Iterator makes that impossible. static class Iterator implements OrderedKeys.Iterator { private static class OKWrapper extends RspOrderedKeys { @@ -261,8 +254,8 @@ public void close() { throw new IllegalStateException(); } // We purposely /do not/ close the RspOrderedKeys part as it will get reused. - // The API doc for Iterator states that clients should /never/ call close. So that - // we eneded up here means + // The API doc for Iterator states that clients should /never/ call close. So that we eneded up here + // means // there is some kind of bug. closeOrderedKeysAsChunkImpl(); } @@ -290,8 +283,7 @@ public void close() { private final int oksEndIdx; private final long oksEndOffset; - // cached value for the first key on the call to any getNext* method, or -1 if cache has not - // been populated yet. + // cached value for the first key on the call to any getNext* method, or -1 if cache has not been populated yet. private long nextKey; @@ -338,8 +330,7 @@ public long peekNextKey() { nextStartIdx = currStartIdx; nextStartOffset = currStartOffset; } else { - final long spanCardinalityAtCurrEndIdx = - arr.getSpanCardinalityAtIndexMaybeAcc(currEndIdx); + final long spanCardinalityAtCurrEndIdx = arr.getSpanCardinalityAtIndexMaybeAcc(currEndIdx); if (currEndOffset + 1 < spanCardinalityAtCurrEndIdx) { nextStartIdx = currEndIdx; nextStartOffset = currEndOffset + 1; @@ -363,19 +354,18 @@ public OrderedKeys getNextOrderedKeysThrough(final long maxKey) { return OrderedKeys.EMPTY; } currBuf.reset( - currStartIdx, currStartOffset, currCardBeforeStartIdx, - currEndIdx, currEndOffset, currCardBeforeEndIdx, - firstKey); + currStartIdx, currStartOffset, currCardBeforeStartIdx, + currEndIdx, currEndOffset, currCardBeforeEndIdx, + firstKey); return currBuf; } private int endIndex( - final int fromIndex, final long fromOffset, final long cardBeforeIndex, - final long deltaNumberOfKeys, final MutableLong prevCardMu) { + final int fromIndex, final long fromOffset, final long cardBeforeIndex, + final long deltaNumberOfKeys, final MutableLong prevCardMu) { final long cardTarget = cardBeforeIndex + fromOffset + deltaNumberOfKeys; if (prevCardMu == null) { - int j = RspArray.unsignedBinarySearch(idx -> arr.acc[idx], fromIndex, arr.size, - cardTarget); + int j = RspArray.unsignedBinarySearch(idx -> arr.acc[idx], fromIndex, arr.size, cardTarget); if (j < 0) { j = -j - 1; if (j == arr.size) { @@ -412,9 +402,9 @@ public OrderedKeys getNextOrderedKeysWithLength(final long desiredNumberOfKeys) } sizeLeft -= actualNumberOfKeys; currBuf.reset( - currStartIdx, currStartOffset, currCardBeforeStartIdx, - currEndIdx, currEndOffset, currCardBeforeEndIdx, - firstKey); + currStartIdx, currStartOffset, currCardBeforeStartIdx, + currEndIdx, currEndOffset, currCardBeforeEndIdx, + firstKey); nextKey = -1; return currBuf; } @@ -426,21 +416,19 @@ private long nextOrderedKeysWithLength(final long desiredNumberOfKeys) { } final MutableLong prevCardMu = (arr.acc == null) ? new MutableLong() : null; if (currEndIdx == -1) { - currEndIdx = endIndex(currStartIdx, currStartOffset, currCardBeforeStartIdx, - boundedNumberOfKeys, prevCardMu); + currEndIdx = endIndex(currStartIdx, currStartOffset, currCardBeforeStartIdx, boundedNumberOfKeys, + prevCardMu); if (currEndIdx == currStartIdx) { currCardBeforeEndIdx = currCardBeforeStartIdx; currEndOffset = currStartOffset + boundedNumberOfKeys - 1; } else { - currCardBeforeEndIdx = (prevCardMu != null) ? prevCardMu.longValue() - : arr.cardinalityBeforeWithAcc(currEndIdx); - final long spanCardAtStartIdx = - arr.getSpanCardinalityAtIndexMaybeAcc(currStartIdx); + currCardBeforeEndIdx = + (prevCardMu != null) ? prevCardMu.longValue() : arr.cardinalityBeforeWithAcc(currEndIdx); + final long spanCardAtStartIdx = arr.getSpanCardinalityAtIndexMaybeAcc(currStartIdx); final long cardAtStartIdx = currCardBeforeStartIdx + spanCardAtStartIdx; final long firstSpanCount = spanCardAtStartIdx - currStartOffset; final long deltaCount = currCardBeforeEndIdx - cardAtStartIdx; - final long remainingForEndSpan = - boundedNumberOfKeys - firstSpanCount - deltaCount; + final long remainingForEndSpan = boundedNumberOfKeys - firstSpanCount - deltaCount; currEndOffset = remainingForEndSpan - 1; } return boundedNumberOfKeys; @@ -455,17 +443,16 @@ private long nextOrderedKeysWithLength(final long desiredNumberOfKeys) { currStartIdxSpanCardinality = spanCardinality; keysAvailableInStartSpan = spanCardinality - currStartOffset; } else { - // currEndIdx + 1 < arr.size, otherwise we would have returned on the - // bounderNumberOfKeys <= 0 check. + // currEndIdx + 1 < arr.size, otherwise we would have returned on the bounderNumberOfKeys <= 0 check. if (RspArray.debug) { Assert.lt(currEndIdx + 1, "currEndIdx + 1", - arr.size, "arr.size"); + arr.size, "arr.size"); } currStartIdx = currEndIdx + 1; currStartOffset = 0; currCardBeforeStartIdx = currCardBeforeEndIdx + spanCardinality; - keysAvailableInStartSpan = currStartIdxSpanCardinality = - arr.getSpanCardinalityAtIndexMaybeAcc(currStartIdx); + keysAvailableInStartSpan = + currStartIdxSpanCardinality = arr.getSpanCardinalityAtIndexMaybeAcc(currStartIdx); } if (keysAvailableInStartSpan >= boundedNumberOfKeys) { currEndIdx = currStartIdx; @@ -473,13 +460,13 @@ private long nextOrderedKeysWithLength(final long desiredNumberOfKeys) { currCardBeforeEndIdx = currCardBeforeStartIdx; return boundedNumberOfKeys; } - currEndIdx = endIndex(currStartIdx, currStartOffset, currCardBeforeStartIdx, - boundedNumberOfKeys, prevCardMu); - currCardBeforeEndIdx = (prevCardMu != null) ? prevCardMu.longValue() - : arr.cardinalityBeforeWithAcc(currEndIdx); + currEndIdx = + endIndex(currStartIdx, currStartOffset, currCardBeforeStartIdx, boundedNumberOfKeys, prevCardMu); + currCardBeforeEndIdx = + (prevCardMu != null) ? prevCardMu.longValue() : arr.cardinalityBeforeWithAcc(currEndIdx); final long keysBeforeLastSpan = - keysAvailableInStartSpan + currCardBeforeEndIdx - currCardBeforeStartIdx - - currStartIdxSpanCardinality; + keysAvailableInStartSpan + currCardBeforeEndIdx - currCardBeforeStartIdx + - currStartIdxSpanCardinality; final long keysLeftInLastSpan = boundedNumberOfKeys - keysBeforeLastSpan; if (keysLeftInLastSpan <= 0) { throw new IllegalStateException("Internal error"); @@ -496,10 +483,10 @@ public boolean advance(final long toKey) { final int savedStartIdx = currStartIdx; final long savedStartOffset = currStartOffset; final boolean found = arr.findOrNext(currStartIdx, oksEndIdx + 1, toKey, - (final int index, final long offset) -> { - currStartIdx = index; - currStartOffset = offset; - }); + (final int index, final long offset) -> { + currStartIdx = index; + currStartOffset = offset; + }); final boolean revert; if (!found) { revert = true; @@ -519,8 +506,7 @@ public boolean advance(final long toKey) { } else { cardinalityUpAndIncludingPreviousEnd = currCardBeforeEndIdx + currEndOffset + 1; } - currCardBeforeStartIdx = - arr.cardinalityBeforeMaybeAcc(currStartIdx, savedStartIdx, currCardBeforeStartIdx); + currCardBeforeStartIdx = arr.cardinalityBeforeMaybeAcc(currStartIdx, savedStartIdx, currCardBeforeStartIdx); currEndIdx = -1; currEndOffset = -1; currCardBeforeEndIdx = -1; @@ -535,8 +521,7 @@ public boolean advance(final long toKey) { return true; } - // Updates curr{Start,End}{Idx,Offset} to a range starting on the position right after the - // end + // Updates curr{Start,End}{Idx,Offset} to a range starting on the position right after the end // at the time of the call, to the last position not greater than toKey. private boolean updateCurrThrough(final long toKey) { if (sizeLeft <= 0) { @@ -552,21 +537,20 @@ private boolean updateCurrThrough(final long toKey) { currStartIdx = currEndIdx; currStartOffset = currEndOffset + 1; } else { - // currEndIdx + 1 < arr.size, otherwise we would have returned on the sizeLeft - // <= 0 check. + // currEndIdx + 1 < arr.size, otherwise we would have returned on the sizeLeft <= 0 check. if (RspArray.debug) { Assert.lt(currEndIdx + 1, "currEndIdx + 1", - arr.size, "arr.size"); + arr.size, "arr.size"); } currStartIdx = currEndIdx + 1; currStartOffset = 0; } } final boolean found = arr.findOrPrev(currStartIdx, oksEndIdx + 1, toKey, - (final int index, final long offset) -> { - currEndIdx = index; - currEndOffset = offset; - }); + (final int index, final long offset) -> { + currEndIdx = index; + currEndOffset = offset; + }); if (!found || (currEndIdx == currStartIdx && currEndOffset < currStartOffset)) { currStartIdx = savedStartIdx; currStartOffset = savedStartOffset; @@ -578,11 +562,9 @@ private boolean updateCurrThrough(final long toKey) { currEndOffset = oksEndOffset; } if (savedEndIdx != -1) { - currCardBeforeStartIdx = - arr.cardinalityBeforeMaybeAcc(currStartIdx, savedEndIdx, currCardBeforeEndIdx); + currCardBeforeStartIdx = arr.cardinalityBeforeMaybeAcc(currStartIdx, savedEndIdx, currCardBeforeEndIdx); } - currCardBeforeEndIdx = - arr.cardinalityBeforeMaybeAcc(currEndIdx, currStartIdx, currCardBeforeStartIdx); + currCardBeforeEndIdx = arr.cardinalityBeforeMaybeAcc(currEndIdx, currStartIdx, currCardBeforeStartIdx); final long cardinalityBeforeStart = currCardBeforeStartIdx + currStartOffset; final long cardinalityBeforeEnd = currCardBeforeEndIdx + currEndOffset; sizeLeft -= cardinalityBeforeEnd - cardinalityBeforeStart + 1; diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspRangeBatchIterator.java b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspRangeBatchIterator.java index 91de1d48a06..c9b61f08c45 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspRangeBatchIterator.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspRangeBatchIterator.java @@ -13,16 +13,14 @@ public class RspRangeBatchIterator implements SafeCloseable { private RspArray.SpanCursorForward p; - // Iterator pointing to the next value to deliver in the current RB Container if there is one, - // null otherwise. + // Iterator pointing to the next value to deliver in the current RB Container if there is one, null otherwise. private SearchRangeIterator ri; // To hold the container to which ri refers. private SpanView riView; private boolean moreSpans; // if true, p has not been exhausted yet. private static final int BUF_SZ = Configuration.getInstance().getIntegerForClassWithDefault( - RspRangeBatchIterator.class, "shortBufSize", 2 * 32); // 32 shorts is an IA64 cache line - // size. + RspRangeBatchIterator.class, "shortBufSize", 2 * 32); // 32 shorts is an IA64 cache line size. private short[] buf = new short[BUF_SZ]; private int bufOffset = 0; private int bufCount = 0; @@ -30,8 +28,7 @@ public class RspRangeBatchIterator implements SafeCloseable { private long pendingStartOffset = 0; private long remaining; - public RspRangeBatchIterator(final RspArray.SpanCursorForward p, final long startOffset, - final long maxCount) { + public RspRangeBatchIterator(final RspArray.SpanCursorForward p, final long startOffset, final long maxCount) { if (!p.hasNext() || maxCount <= 0) { p.release(); this.p = null; @@ -60,8 +57,7 @@ public RspRangeBatchIterator(final RspArray.SpanCursorForward p, final long star return; } riView.init(p.arr(), p.arrIdx(), spanInfo, s); - ri = riView.getContainer() - .getShortRangeIterator((int) ((long) (Integer.MAX_VALUE) & startOffset)); + ri = riView.getContainer().getShortRangeIterator((int) ((long) (Integer.MAX_VALUE) & startOffset)); bufKey = spanInfoToKey(spanInfo); if (!ri.hasNext()) { throw new IllegalStateException("Illegal offset"); @@ -80,8 +76,8 @@ public boolean hasNext() { } private int flushBufToChunk( - final WritableLongChunk chunk, final int chunkOffset, - final int chunkDelta, final int chunkMaxCount) { + final WritableLongChunk chunk, final int chunkOffset, final int chunkDelta, + final int chunkMaxCount) { final int bufDelta = Math.min(chunkMaxCount - chunkDelta, bufCount); int i = 0; while (remaining > 0 && i < bufDelta) { @@ -121,15 +117,14 @@ private void loadBuffer() { private static final short BLOCK_LAST_AS_SHORT = (short) -1; /** - * Fill a writable long chunk with pairs of range boundaries (start, endInclusive) starting from - * the next iterator position forward. + * Fill a writable long chunk with pairs of range boundaries (start, endInclusive) starting from the next iterator + * position forward. * * @param chunk A writable chunk to fill * @param chunkOffset An offset inside the chunk to the position to start writing * @return The count of ranges written (which matches 2 times the number of elements written). */ - public int fillRangeChunk(final WritableLongChunk chunk, - final int chunkOffset) { + public int fillRangeChunk(final WritableLongChunk chunk, final int chunkOffset) { final int chunkMaxCount = chunk.capacity(); int chunkDelta = 0; // first, flush any leftovers in buf from previous calls. @@ -144,15 +139,15 @@ public int fillRangeChunk(final WritableLongChunk chunk, return chunkDelta / 2; } keyForPrevRangeEndAtSpanBoundary = - (buf[bufOffset - 1] == BLOCK_LAST_AS_SHORT) ? bufKey + BLOCK_SIZE : -1; + (buf[bufOffset - 1] == BLOCK_LAST_AS_SHORT) ? bufKey + BLOCK_SIZE : -1; } while (true) { while (ri != null) { if (bufCount == 0) { loadBuffer(); if (keyForPrevRangeEndAtSpanBoundary != -1 && - keyForPrevRangeEndAtSpanBoundary == bufKey && - buf[0] == (short) 0) { + keyForPrevRangeEndAtSpanBoundary == bufKey && + buf[0] == (short) 0) { long v = unsignedShortToLong(buf[1]); long delta = v + 1; if (delta >= remaining) { @@ -180,16 +175,13 @@ public int fillRangeChunk(final WritableLongChunk chunk, return chunkDelta / 2; } if (ri == null) { - // we can't return even if max had been reached: we may need to merge a later - // range. + // we can't return even if max had been reached: we may need to merge a later range. if (bufCount > 0) { // the only way we have leftover buf is if not all of it fit into chunk. return chunkDelta / 2; } keyForPrevRangeEndAtSpanBoundary = - (bufOffset > 0 && buf[bufOffset - 1] == BLOCK_LAST_AS_SHORT) - ? bufKey + BLOCK_SIZE - : -1; + (bufOffset > 0 && buf[bufOffset - 1] == BLOCK_LAST_AS_SHORT) ? bufKey + BLOCK_SIZE : -1; break; } if (bufCount > 0) { @@ -204,8 +196,7 @@ public int fillRangeChunk(final WritableLongChunk chunk, final long sk = spanInfoToKey(spanInfo); final long d; // do we need to merge a previously stored span last range? - if (keyForPrevRangeEndAtSpanBoundary != -1 - && keyForPrevRangeEndAtSpanBoundary == sk) { + if (keyForPrevRangeEndAtSpanBoundary != -1 && keyForPrevRangeEndAtSpanBoundary == sk) { d = Math.min(remaining, slen * BLOCK_SIZE); chunk.set(chunkOffset + chunkDelta - 1, sk + d - 1); } else { @@ -231,8 +222,7 @@ public int fillRangeChunk(final WritableLongChunk chunk, return chunkDelta / 2; } p.next(); - // This span can't be a full block span: it would have been merged with the previous - // one. + // This span can't be a full block span: it would have been merged with the previous one. // Therefore at this point we know p.span() is an RB Container. s = p.span(); } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspRangeIterator.java b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspRangeIterator.java index c199ea3fdd7..68a0be8a234 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspRangeIterator.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspRangeIterator.java @@ -14,8 +14,7 @@ public class RspRangeIterator implements LongRangeIterator, SafeCloseable { private RspArray.SpanCursorForward p; - // Iterator pointing to the next value to deliver in the current RB Container if there is one, - // null otherwise. + // Iterator pointing to the next value to deliver in the current RB Container if there is one, null otherwise. private SearchRangeIterator ri; // To hold the container on which ri is based. private SpanView riView; @@ -48,14 +47,12 @@ private void setFinished() { } /** - * There is a lot of complexity here because we may need to merge adjacent ranges belonging to - * different, consecutive blocks. + * There is a lot of complexity here because we may need to merge adjacent ranges belonging to different, + * consecutive blocks. */ private void nextInterval() { - // if hasPrev is true, we have accumulated in [start, end] a range that we can't deliver - // yet, - // as end corresponds exactly with the last element in a block interval, which may need to - // be merged + // if hasPrev is true, we have accumulated in [start, end] a range that we can't deliver yet, + // as end corresponds exactly with the last element in a block interval, which may need to be merged // with the next range. boolean hasPrev = false; long spanInfo = p.spanInfo(); @@ -117,8 +114,7 @@ private void nextInterval() { nextValid = true; return; } - // This span can't be a full block span: it would have been merged with the previous - // one. + // This span can't be a full block span: it would have been merged with the previous one. // Therefore at this point we know p.span() is an RB Container. hasPrev = true; s = p.span(); @@ -131,8 +127,7 @@ private void nextInterval() { riView.init(p.arr(), p.arrIdx(), spanInfo, s); ri = riView.getContainer().getShortRangeIterator(0); } - // ri.hasNext() has to be true by construction; this container can't be empty or it - // wouldn't be present. + // ri.hasNext() has to be true by construction; this container can't be empty or it wouldn't be present. ri.hasNext(); // we call it for its potential side effects. ri.next(); } @@ -148,8 +143,7 @@ private long peekNextStart() { if (isSingletonSpan(s) || RspArray.getFullBlockSpanLen(spanInfo, s) > 0) { return spanKey; } - try (SpanView res = - workDataPerThread.get().borrowSpanView(p.arr(), p.arrIdx(), spanInfo, s)) { + try (SpanView res = workDataPerThread.get().borrowSpanView(p.arr(), p.arrIdx(), spanInfo, s)) { return spanKey | (long) res.getContainer().first(); } } @@ -180,9 +174,8 @@ public void postpone(final long v) { } /** - * This method should be called: * After the iterator is created and before calling any other - * methods; it returns false, calling any other methods results in undefined behavior. * Right - * after a call to next, similar to above. + * This method should be called: * After the iterator is created and before calling any other methods; it returns + * false, calling any other methods results in undefined behavior. * Right after a call to next, similar to above. * * @return true if a call to next leads to a valid range to be read from start() and end(). */ @@ -211,17 +204,17 @@ public boolean forEachLongRange(final LongRangeAbortableConsumer lrc) { } /** - * Advance the current iterator position while the current range end is less than key. This - * results in either (a) true, leaving a current range whose start value is greater or equal - * than key, or (b) false, leaving an exhausted, invalid iterator. + * Advance the current iterator position while the current range end is less than key. This results in either (a) + * true, leaving a current range whose start value is greater or equal than key, or (b) false, leaving an exhausted, + * invalid iterator. * - * Note if the iterator is not exhausted, true is returned and the satisfying range is left as - * the iterator's current range: no need to call next to get to it. Also note the iterator may - * not move if at call entry the current range already satisfies (a). + * Note if the iterator is not exhausted, true is returned and the satisfying range is left as the iterator's + * current range: no need to call next to get to it. Also note the iterator may not move if at call entry the + * current range already satisfies (a). * - * If this method returns false, it implies the iterator has been exhausted, the current range - * is invalid, and subsequent calls to hasNext will return false; there is no guarantee as to - * where the start and end positions are left in this case. + * If this method returns false, it implies the iterator has been exhausted, the current range is invalid, and + * subsequent calls to hasNext will return false; there is no guarantee as to where the start and end positions are + * left in this case. * * @param key a key to search forward from the current iterator position * @return true if case (a), false if case (b). @@ -286,11 +279,11 @@ public boolean advance(final long key) { /** * Advance the current iterator (start) position to the rightmost (last) value v that maintains - * comp.directionToTargetFrom(v) >= 0. I.e, either hasNext() returns false after this call, or - * the next value in the iterator nv would be such that comp.directionToTargetFrom(nv) < 0. + * comp.directionToTargetFrom(v) >= 0. I.e, either hasNext() returns false after this call, or the next value in the + * iterator nv would be such that comp.directionToTargetFrom(nv) < 0. * - * Note this method should be called only after calling hasNext() and next() at least once, eg, - * from a valid current position in a non-empty and also non-exhausted iterator. + * Note this method should be called only after calling hasNext() and next() at least once, eg, from a valid current + * position in a non-empty and also non-exhausted iterator. * * @param comp a comparator used to search forward from the current iterator position * @@ -341,8 +334,7 @@ public void search(final Comparator comp) { } } final long spanKey = p.spanKey(); - final ContainerUtil.TargetComparator rcomp = - (int v) -> comp.directionToTargetFrom(spanKey | v); + final ContainerUtil.TargetComparator rcomp = (int v) -> comp.directionToTargetFrom(spanKey | v); final boolean found = ri.search(rcomp); if (found) { nextInterval(); @@ -364,17 +356,15 @@ public void close() { } /** - * Create a RangeIterator that is a view into this iterator; the returned rangeIterator has - * current start() - startOffset as it initial start value (note the iterator needs to have a - * valid current position at the time of the call). The returned RangeIterator includes all the - * ranges until the end parameter (exclusive), and as it advances it will make the underlying - * iterator advance. Once the RangeIterator is exhausted, the underlying iterator will have a - * current value that is one after the last range returned by the range iterator (not this may + * Create a RangeIterator that is a view into this iterator; the returned rangeIterator has current start() - + * startOffset as it initial start value (note the iterator needs to have a valid current position at the time of + * the call). The returned RangeIterator includes all the ranges until the end parameter (exclusive), and as it + * advances it will make the underlying iterator advance. Once the RangeIterator is exhausted, the underlying + * iterator will have a current value that is one after the last range returned by the range iterator (not this may * have been truncated to a partial, still valid, range). * * @param startOffset The resulting range iterator returns ranges offset with this value. - * @param rangesEnd boundary (exclusive) on the underlying iterator ranges for the ranges - * returned. + * @param rangesEnd boundary (exclusive) on the underlying iterator ranges for the ranges returned. * @return */ public RangeIteratorView rangeIteratorView(final long startOffset, final long rangesEnd) { @@ -393,8 +383,7 @@ public static class RangeIteratorView implements SearchRangeIterator { private boolean noMore; private boolean itFinished; - public RangeIteratorView(final RspRangeIterator it, final long offset, - final long rangesEnd) { + public RangeIteratorView(final RspRangeIterator it, final long offset, final long rangesEnd) { this.it = it; this.offset = offset; this.rangesEnd = rangesEnd; @@ -459,8 +448,7 @@ public void next() { @Override public boolean advance(int v) { - throw new UnsupportedOperationException( - "advance is not supported on RangeIteratorView"); + throw new UnsupportedOperationException("advance is not supported on RangeIteratorView"); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspReverseIterator.java b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspReverseIterator.java index ab425026af9..cc78369d663 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspReverseIterator.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/rsp/RspReverseIterator.java @@ -9,8 +9,7 @@ public class RspReverseIterator implements SafeCloseable { private RspArray.SpanCursor rp; - // Iterator pointing to the next value to deliver in the current RB Container if there is one, - // null otherwise. + // Iterator pointing to the next value to deliver in the current RB Container if there is one, null otherwise. private ShortAdvanceIterator ri; // Resource to hold the container that ri points to. private SpanView riView; @@ -99,17 +98,16 @@ private void computeNextFromFullSpan() { } /** - * @return current iterator value, without advancing it. A valid call to next() should have - * happened before calling this method. + * @return current iterator value, without advancing it. A valid call to next() should have happened before calling + * this method. */ public long current() { return current; } /** - * This method should be called: * After the iterator is created and before calling any other - * methods; if it returns false, calling any other methods results in undefined behavior. * - * Right after a call to next, similar to above. + * This method should be called: * After the iterator is created and before calling any other methods; if it returns + * false, calling any other methods results in undefined behavior. * Right after a call to next, similar to above. * * @return true if a call to next leads to a valid next iterator value. */ @@ -137,8 +135,7 @@ private void setAdvanceOverranState() { if (flen > 0) { current = key; } else { - try (SpanView res = - workDataPerThread.get().borrowSpanView(rp.arr(), rp.arrIdx(), spanInfo, span)) { + try (SpanView res = workDataPerThread.get().borrowSpanView(rp.arr(), rp.arrIdx(), spanInfo, span)) { current = key | res.getContainer().first(); } } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/singlerange/SingleRange.java b/DB/src/main/java/io/deephaven/db/v2/utils/singlerange/SingleRange.java index 7b8dfaeb29c..92b7e4cd1c7 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/singlerange/SingleRange.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/singlerange/SingleRange.java @@ -148,8 +148,8 @@ public final TreeIndexImpl ixInsertRange(final long startKey, final long endKey) } if (overlapsOrAdjacentToRange(startKey, endKey)) { return make( - Math.min(startKey, rangeStart()), - Math.max(endKey, rangeEnd())); + Math.min(startKey, rangeStart()), + Math.max(endKey, rangeEnd())); } if (startKey < rangeStart()) { return TreeIndexImpl.twoRanges(startKey, endKey, rangeStart(), rangeEnd()); @@ -158,15 +158,14 @@ public final TreeIndexImpl ixInsertRange(final long startKey, final long endKey) } @Override - public final TreeIndexImpl ixInsertSecondHalf( - final LongChunk keys, final int offset, final int length) { - return TreeIndexImpl.fromChunk(keys, offset, length, false).ixInsertRange(rangeStart(), - rangeEnd()); + public final TreeIndexImpl ixInsertSecondHalf(final LongChunk keys, final int offset, + final int length) { + return TreeIndexImpl.fromChunk(keys, offset, length, false).ixInsertRange(rangeStart(), rangeEnd()); } @Override - public final TreeIndexImpl ixRemoveSecondHalf( - final LongChunk keys, final int offset, final int length) { + public final TreeIndexImpl ixRemoveSecondHalf(final LongChunk keys, final int offset, + final int length) { return ixRemove(TreeIndexImpl.fromChunk(keys, offset, length, true)); } @@ -178,8 +177,7 @@ public final TreeIndexImpl ixAppendRange(final long startKey, final long endKey) if (rangeEnd() + 1 == startKey) { return make(rangeStart(), endKey); } - throw new IllegalStateException( - "startKey(=" + startKey + ") < rangeEnd(=" + rangeEnd() + ")"); + throw new IllegalStateException("startKey(=" + startKey + ") < rangeEnd(=" + rangeEnd() + ")"); } @Override @@ -200,8 +198,7 @@ public final TreeIndexImpl ixRemove(final long key) { } @Override - public final TreeIndexImpl ixSubindexByPosOnNew(final long startPos, - final long endPosExclusive) { + public final TreeIndexImpl ixSubindexByPosOnNew(final long startPos, final long endPosExclusive) { final long endPos = endPosExclusive - 1; // make inclusive. if (endPos < startPos || endPos < 0) { return TreeIndexImpl.EMPTY; @@ -215,8 +212,8 @@ public final TreeIndexImpl ixSubindexByPosOnNew(final long startPos, return ixCowRef(); } return make( - Math.max(rangeStart() + startPos, rangeStart()), - Math.min(rangeStart() + endPos, rangeEnd())); + Math.max(rangeStart() + startPos, rangeStart()), + Math.min(rangeStart() + endPos, rangeEnd())); } @Override @@ -228,8 +225,8 @@ public final TreeIndexImpl ixSubindexByKeyOnNew(final long startKey, final long return ixCowRef(); } return make( - Math.max(startKey, rangeStart()), - Math.min(endKey, rangeEnd())); + Math.max(startKey, rangeStart()), + Math.min(endKey, rangeEnd())); } @Override @@ -242,7 +239,7 @@ public final long ixGet(final long pos) { @Override public final void ixGetKeysForPositions(final PrimitiveIterator.OfLong inputPositions, - final LongConsumer outputKeys) { + final LongConsumer outputKeys) { final long sz = ixCardinality(); while (inputPositions.hasNext()) { final long pos = inputPositions.nextLong(); @@ -332,7 +329,7 @@ public long binarySearchValue(Index.TargetComparator tc, final int dir) { return -1; } return curr = IndexUtilities.rangeSearch(curr, last, - (long k) -> tc.compareTargetTo(k, dir)); + (long k) -> tc.compareTargetTo(k, dir)); } } @@ -382,8 +379,7 @@ public boolean advance(long v) { @Override public long binarySearchValue(Index.TargetComparator targetComparator, int direction) { - throw new UnsupportedOperationException( - "Reverse iterator does not support binary search."); + throw new UnsupportedOperationException("Reverse iterator does not support binary search."); } } @@ -458,8 +454,7 @@ public final boolean ixIsEmpty() { @Override public final TreeIndexImpl ixUpdate(final TreeIndexImpl added, final TreeIndexImpl removed) { - if (removed.ixIsEmpty() || removed.ixLastKey() < rangeStart() - || removed.ixFirstKey() > rangeEnd()) { + if (removed.ixIsEmpty() || removed.ixLastKey() < rangeStart() || removed.ixFirstKey() > rangeEnd()) { if (added.ixIsEmpty()) { return this; } @@ -484,8 +479,8 @@ public final TreeIndexImpl ixUpdate(final TreeIndexImpl added, final TreeIndexIm } final RspBitmap ans = toRsp(); ans.updateUnsafeNoWriteCheck( - TreeIndexImpl.asRspBitmap(added), - TreeIndexImpl.asRspBitmap(removed)); + TreeIndexImpl.asRspBitmap(added), + TreeIndexImpl.asRspBitmap(removed)); if (ans.isEmpty()) { return TreeIndexImpl.EMPTY; } @@ -499,8 +494,7 @@ public final TreeIndexImpl ixRemove(final TreeIndexImpl removed) { } private TreeIndexImpl minus(final TreeIndexImpl removed) { - if (removed.ixIsEmpty() || removed.ixLastKey() < rangeStart() - || removed.ixFirstKey() > rangeEnd()) { + if (removed.ixIsEmpty() || removed.ixLastKey() < rangeStart() || removed.ixFirstKey() > rangeEnd()) { return this; } if (ixSubsetOf(removed)) { @@ -666,7 +660,7 @@ public final TreeIndexImpl ixShiftInPlace(final long shiftAmount) { @Override public final TreeIndexImpl ixInsert(final TreeIndexImpl added) { if (added.ixIsEmpty() || - (rangeStart() <= added.ixFirstKey() && added.ixLastKey() <= rangeEnd())) { + (rangeStart() <= added.ixFirstKey() && added.ixLastKey() <= rangeEnd())) { return this; } if (added instanceof SingleRange) { @@ -677,8 +671,7 @@ public final TreeIndexImpl ixInsert(final TreeIndexImpl added) { } @Override - public final TreeIndexImpl ixInsertWithShift(final long shiftAmount, - final TreeIndexImpl other) { + public final TreeIndexImpl ixInsertWithShift(final long shiftAmount, final TreeIndexImpl other) { if (other.ixIsEmpty()) { return this; } @@ -694,8 +687,7 @@ public final TreeIndexImpl ixInsertWithShift(final long shiftAmount, } @Override - public final OrderedKeys ixGetOrderedKeysByPosition(final long startPositionInclusive, - final long length) { + public final OrderedKeys ixGetOrderedKeysByPosition(final long startPositionInclusive, final long length) { if (startPositionInclusive >= ixCardinality() || length == 0) { return OrderedKeys.EMPTY; } @@ -705,16 +697,15 @@ public final OrderedKeys ixGetOrderedKeysByPosition(final long startPositionIncl } @Override - public final OrderedKeys ixGetOrderedKeysByKeyRange(final long startKeyInclusive, - final long endKeyInclusive) { + public final OrderedKeys ixGetOrderedKeysByKeyRange(final long startKeyInclusive, final long endKeyInclusive) { if (startKeyInclusive > rangeEnd() || - endKeyInclusive < rangeStart() || - endKeyInclusive < startKeyInclusive) { + endKeyInclusive < rangeStart() || + endKeyInclusive < startKeyInclusive) { return OrderedKeys.EMPTY; } return new SingleRangeOrderedKeys( - Math.max(startKeyInclusive, rangeStart()), - Math.min(endKeyInclusive, rangeEnd())); + Math.max(startKeyInclusive, rangeStart()), + Math.min(endKeyInclusive, rangeEnd())); } @Override diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/singlerange/SingleRangeMixin.java b/DB/src/main/java/io/deephaven/db/v2/utils/singlerange/SingleRangeMixin.java index 3678613e7bc..630bb4ba5a9 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/singlerange/SingleRangeMixin.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/singlerange/SingleRangeMixin.java @@ -21,8 +21,7 @@ default boolean forEachLongRange(final LongRangeAbortableConsumer larc) { return larc.accept(rangeStart(), rangeEnd()); } - default OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, - final long length) { + default OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, final long length) { if (startPositionInclusive >= size() || length == 0) { return OrderedKeys.EMPTY; } @@ -31,16 +30,15 @@ default OrderedKeys getOrderedKeysByPosition(final long startPositionInclusive, return new SingleRangeOrderedKeys(s, e); } - default OrderedKeys getOrderedKeysByKeyRange(final long startKeyInclusive, - final long endKeyInclusive) { + default OrderedKeys getOrderedKeysByKeyRange(final long startKeyInclusive, final long endKeyInclusive) { if (startKeyInclusive > rangeEnd() || - endKeyInclusive < rangeStart() || - endKeyInclusive < startKeyInclusive) { + endKeyInclusive < rangeStart() || + endKeyInclusive < startKeyInclusive) { return OrderedKeys.EMPTY; } return new SingleRangeOrderedKeys( - Math.max(startKeyInclusive, rangeStart()), - Math.min(endKeyInclusive, rangeEnd())); + Math.max(startKeyInclusive, rangeStart()), + Math.min(endKeyInclusive, rangeEnd())); } default OrderedKeys.Iterator getOrderedKeysIterator() { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRanges.java b/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRanges.java index 59e7385b221..d09f0fc33a1 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRanges.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRanges.java @@ -17,7 +17,7 @@ public abstract class SortedRanges extends RefCountedCow implements TreeIndexImpl { private static final IntCounterMetric sortedRangesToRspConversions = - new IntCounterMetric("sortedRangesToRspConversions"); + new IntCounterMetric("sortedRangesToRspConversions"); public abstract SortedRanges deepCopy(); @@ -25,73 +25,59 @@ public final SortedRanges self() { return this; } - protected static final int INITIAL_SIZE = - Configuration.getInstance().getIntegerForClassWithDefault( + protected static final int INITIAL_SIZE = Configuration.getInstance().getIntegerForClassWithDefault( SortedRanges.class, "initialSize", 2); public static final boolean DEBUG = Configuration.getInstance().getBooleanForClassWithDefault( - SortedRanges.class, "debug", false); + SortedRanges.class, "debug", false); - public static final int LONG_DENSE_MAX_CAPACITY = - Configuration.getInstance().getIntegerForClassWithDefault( + public static final int LONG_DENSE_MAX_CAPACITY = Configuration.getInstance().getIntegerForClassWithDefault( SortedRanges.class, "longDenseMaxCapacity", 256); - public static final int LONG_SPARSE_MAX_CAPACITY = - Configuration.getInstance().getIntegerForClassWithDefault( + public static final int LONG_SPARSE_MAX_CAPACITY = Configuration.getInstance().getIntegerForClassWithDefault( SortedRanges.class, "longSparseCapacity", 4096); - public static final int INT_DENSE_MAX_CAPACITY = - Configuration.getInstance().getIntegerForClassWithDefault( - SortedRanges.class, "intDenseMaxCapacity", - arraySizeRoundingInt(2 * LONG_DENSE_MAX_CAPACITY)); + public static final int INT_DENSE_MAX_CAPACITY = Configuration.getInstance().getIntegerForClassWithDefault( + SortedRanges.class, "intDenseMaxCapacity", arraySizeRoundingInt(2 * LONG_DENSE_MAX_CAPACITY)); - public static final int INT_SPARSE_MAX_CAPACITY = - Configuration.getInstance().getIntegerForClassWithDefault( - SortedRanges.class, "intSparseMaxCapacity", - arraySizeRoundingInt(2 * LONG_SPARSE_MAX_CAPACITY)); + public static final int INT_SPARSE_MAX_CAPACITY = Configuration.getInstance().getIntegerForClassWithDefault( + SortedRanges.class, "intSparseMaxCapacity", arraySizeRoundingInt(2 * LONG_SPARSE_MAX_CAPACITY)); - public static final int SHORT_MAX_CAPACITY = - Configuration.getInstance().getIntegerForClassWithDefault( - SortedRanges.class, "shortMaxCapacity", 4096 - 6); // 12 bytes of array object overhead - // = 6 shorts + public static final int SHORT_MAX_CAPACITY = Configuration.getInstance().getIntegerForClassWithDefault( + SortedRanges.class, "shortMaxCapacity", 4096 - 6); // 12 bytes of array object overhead = 6 shorts public static final int ELEMENTS_PER_BLOCK_DENSE_THRESHOLD = - Configuration.getInstance().getIntegerForClassWithDefault( - SortedRanges.class, "elementsPerBlockDenseThreshold", 16); + Configuration.getInstance().getIntegerForClassWithDefault( + SortedRanges.class, "elementsPerBlockDenseThreshold", 16); public static final int MAX_CAPACITY = Math.max(INT_SPARSE_MAX_CAPACITY, SHORT_MAX_CAPACITY); static { Assert.assertion(ELEMENTS_PER_BLOCK_DENSE_THRESHOLD >= 2, - "ELEMENTS_PER_BLOCK_DENSE_THRESHOLD >= 2"); + "ELEMENTS_PER_BLOCK_DENSE_THRESHOLD >= 2"); Assert.assertion(LONG_DENSE_MAX_CAPACITY <= LONG_SPARSE_MAX_CAPACITY, - "LONG_DENSE_MAX_CAPACITY <= LONG_SPARSE_MAX_CAPACITY"); + "LONG_DENSE_MAX_CAPACITY <= LONG_SPARSE_MAX_CAPACITY"); Assert.assertion(INT_DENSE_MAX_CAPACITY <= INT_SPARSE_MAX_CAPACITY, - "INT_DENSE_MAX_CAPACITY <= INT_SPARSE_MAX_CAPACITY"); + "INT_DENSE_MAX_CAPACITY <= INT_SPARSE_MAX_CAPACITY"); Assert.assertion(LONG_SPARSE_MAX_CAPACITY <= INT_SPARSE_MAX_CAPACITY, - "LONG_SPARSE_MAX_CAPACITY <= INT_SPARSE_MAX_CAPACITY"); + "LONG_SPARSE_MAX_CAPACITY <= INT_SPARSE_MAX_CAPACITY"); Assert.assertion(LONG_DENSE_MAX_CAPACITY <= INT_DENSE_MAX_CAPACITY, - "LONG_DENSE_MAX_CAPACITY <= INT_DENSE_MAX_CAPACITY"); + "LONG_DENSE_MAX_CAPACITY <= INT_DENSE_MAX_CAPACITY"); } // *_EXTENT properties must be a power of two. - protected static final int LONG_EXTENT = - Configuration.getInstance().getIntegerForClassWithDefault( + protected static final int LONG_EXTENT = Configuration.getInstance().getIntegerForClassWithDefault( SortedRanges.class, "longExtent", 64); - protected static final int INT_EXTENT = - Configuration.getInstance().getIntegerForClassWithDefault( + protected static final int INT_EXTENT = Configuration.getInstance().getIntegerForClassWithDefault( SortedRanges.class, "intExtent", 2 * LONG_EXTENT); - protected static final int SHORT_EXTENT = - Configuration.getInstance().getIntegerForClassWithDefault( + protected static final int SHORT_EXTENT = Configuration.getInstance().getIntegerForClassWithDefault( SortedRanges.class, "shortExtent", 2 * INT_EXTENT); - protected static final boolean POOL_ARRAYS = - Configuration.getInstance().getBooleanForClassWithDefault( + protected static final boolean POOL_ARRAYS = Configuration.getInstance().getBooleanForClassWithDefault( SortedRanges.class, "poolArrays", false); - public static final boolean USE_RANGES_ARRAY = - Configuration.getInstance().getBooleanForClassWithDefault( + public static final boolean USE_RANGES_ARRAY = Configuration.getInstance().getBooleanForClassWithDefault( SortedRanges.class, "useRangesArray", true); // Example: @@ -131,8 +117,8 @@ public static SortedRanges makeEmpty() { } public static SortedRanges tryMakeForKnownRangeFinalCapacityLowerBound( - final int initialCapacity, final int finalCapacityLowerBound, final long first, - final long last, final boolean isDense) { + final int initialCapacity, final int finalCapacityLowerBound, final long first, final long last, + final boolean isDense) { final long range = last - first; final long offset = first; if (range <= Short.MAX_VALUE) { @@ -156,19 +142,16 @@ public static SortedRanges tryMakeForKnownRangeFinalCapacityLowerBound( } // Implicitly this is unknown max capacity. - public static SortedRanges makeForKnownRange(final long first, final long last, - final boolean isDense) { + public static SortedRanges makeForKnownRange(final long first, final long last, final boolean isDense) { return tryMakeForKnownRangeUnknownMaxCapacity(INITIAL_SIZE, first, last, isDense); } - public static SortedRanges tryMakeForKnownRangeUnknownMaxCapacity(final int initialCapacity, - final long first, final long last, final boolean isDense) { - return tryMakeForKnownRangeFinalCapacityLowerBound(initialCapacity, initialCapacity, first, - last, isDense); + public static SortedRanges tryMakeForKnownRangeUnknownMaxCapacity(final int initialCapacity, final long first, + final long last, final boolean isDense) { + return tryMakeForKnownRangeFinalCapacityLowerBound(initialCapacity, initialCapacity, first, last, isDense); } - public static SortedRanges tryMakeForKnownRangeKnownCount(final int count, final long first, - final long last) { + public static SortedRanges tryMakeForKnownRangeKnownCount(final int count, final long first, final long last) { final boolean isDense = isDenseLongSample(first, last, count); return tryMakeForKnownRangeFinalCapacityLowerBound(count, count, first, last, isDense); } @@ -618,8 +601,7 @@ public final void closeImpl() { } } - public static final class RangeIterator extends RangeIteratorBase - implements Index.RangeIterator { + public static final class RangeIterator extends RangeIteratorBase implements Index.RangeIterator { private RangeIterator(final SortedRanges sar) { super(sar); } @@ -665,8 +647,7 @@ public Index.RangeIterator getRangeIterator() { return new RangeIterator(this); } - private static final class SearchIterator extends RangeIteratorBase - implements Index.SearchIterator { + private static final class SearchIterator extends RangeIteratorBase implements Index.SearchIterator { private boolean pendingNext = false; private SearchIterator(final SortedRanges sar) { @@ -715,10 +696,10 @@ public boolean advance(long v) { } private static int searchPos( - final MutableLong outData, - final SortedRanges sar, - final IndexUtilities.Comparator comp, final int startPos) { // startPos points to the - // beginning of a range. + final MutableLong outData, + final SortedRanges sar, + final IndexUtilities.Comparator comp, final int startPos) { // startPos points to the beginning of a + // range. final long endPosUnpackedData = sar.unpackedGet(sar.count - 1); final long endPosUnpackedValue = Math.abs(endPosUnpackedData); int c = comp.directionToTargetFrom(endPosUnpackedValue); @@ -784,7 +765,7 @@ public long binarySearchValue(final ReadOnlyIndex.TargetComparator comp, final i return -1; } currRangeStart = IndexUtilities.rangeSearch(currRangeStart, currRangeEnd, - (final long v) -> comp.compareTargetTo(v, dir)); + (final long v) -> comp.compareTargetTo(v, dir)); return currRangeStart; } if (sar == null || nextRangeIdx == sar.count) { @@ -817,7 +798,7 @@ public long binarySearchValue(final ReadOnlyIndex.TargetComparator comp, final i if (nextNeg) { final long searchEndValue = -nextData - 1; currRangeStart = IndexUtilities.rangeSearch(data, searchEndValue, - (final long v) -> comp.compareTargetTo(v, dir)); + (final long v) -> comp.compareTargetTo(v, dir)); currRangeEnd = -nextData; nextRangeIdx = next + 1; return currRangeStart; @@ -960,8 +941,7 @@ public boolean advance(final long v) { @Override public long binarySearchValue(ReadOnlyIndex.TargetComparator comp, int dir) { - throw new UnsupportedOperationException( - "Reverse iterator does not support binary search."); + throw new UnsupportedOperationException("Reverse iterator does not support binary search."); } } @@ -974,7 +954,7 @@ public final long getCardinality() { } public final void getKeysForPositions(final PrimitiveIterator.OfLong inputPositions, - final LongConsumer outputKeys) { + final LongConsumer outputKeys) { if (!inputPositions.hasNext()) { return; } @@ -1141,11 +1121,9 @@ public final boolean overlapsRange(final long start, final long end) { } // startIdx is the array position index where to begin the search for packedStart. - // returns -1 if this array overlaps the provided range, or if it doesn't, returns the array - // position index + // returns -1 if this array overlaps the provided range, or if it doesn't, returns the array position index // where to begin a subsequent call for a later range that might overlap. - private int overlapsRangeInternal(final int startIdx, final long packedStart, - final long packedEnd) { + private int overlapsRangeInternal(final int startIdx, final long packedStart, final long packedEnd) { final int iStart = absRawBinarySearch(packedStart, startIdx, count - 1); // is < count since we know start < end < first(). final long iStartData = packedGet(iStart); @@ -1205,10 +1183,8 @@ public final SortedRanges retainRange(long start, long end) { return ans; } - // Guarantee for the caller: if this method returns null, no state change has been made on the - // this object. - private SortedRanges packedAppend(final long packedData, final long unpackedData, - final boolean writeCheck) { + // Guarantee for the caller: if this method returns null, no state change has been made on the this object. + private SortedRanges packedAppend(final long packedData, final long unpackedData, final boolean writeCheck) { SortedRanges ans = ensureCanAppend(count, unpackedData, writeCheck); if (ans == null) { return null; @@ -1221,8 +1197,7 @@ private SortedRanges packedAppend(final long packedData, final long unpackedData return ans; } - // Guarantee for the caller: if this method returns null, no state change has been made on the - // this object. + // Guarantee for the caller: if this method returns null, no state change has been made on the this object. private SortedRanges unpackedAppend(final long unpackedData, final boolean writeCheck) { SortedRanges ans = ensureCanAppend(count, unpackedData, writeCheck); if (ans == null) { @@ -1232,11 +1207,10 @@ private SortedRanges unpackedAppend(final long unpackedData, final boolean write return ans; } - // Guarantee for the caller: if this method returns null, no state change has been made on the - // this object. + // Guarantee for the caller: if this method returns null, no state change has been made on the this object. private SortedRanges packedAppend2( - final long packedData1, final long packedData2, final long unpackedData1, - final long unpackedData2, final boolean writeCheck) { + final long packedData1, final long packedData2, final long unpackedData1, final long unpackedData2, + final boolean writeCheck) { SortedRanges ans = ensureCanAppend(count + 1, unpackedData2, writeCheck); if (ans == null) { return null; @@ -1254,15 +1228,13 @@ private SortedRanges packedAppend2( // required on entry: out.canWrite(). // sar.first() <= start && end <= sar.last() // returns null if we exceed maxCapacity in the process of building the answer - // (which can happen if you have, say, a big single range and retain a gazillion individual - // elements). - // Writes to iStartOut an array position index into sar where to continue the intersection for - // ranges after + // (which can happen if you have, say, a big single range and retain a gazillion individual elements). + // Writes to iStartOut an array position index into sar where to continue the intersection for ranges after // the one provided. private static SortedRanges intersectRangeImplStep( - SortedRanges out, - final SortedRanges sar, - final int iStart, final long start, final long end, final MutableInt iStartOut) { + SortedRanges out, + final SortedRanges sar, + final int iStart, final long start, final long end, final MutableInt iStartOut) { if (!out.fits(start, end)) { return null; } @@ -1356,10 +1328,10 @@ private static SortedRanges intersectRangeImplStep( } private static ThreadLocal workSortedRangesLongPerThread = - ThreadLocal.withInitial(() -> new SortedRangesLong(new long[MAX_CAPACITY], 0, 0)); + ThreadLocal.withInitial(() -> new SortedRangesLong(new long[MAX_CAPACITY], 0, 0)); private static boolean forEachLongRangeFromLongRangesArray( - final long[] arr, final int count, final LongRangeAbortableConsumer lrac) { + final long[] arr, final int count, final LongRangeAbortableConsumer lrac) { long pendingStart = -1; for (int i = 0; i < count; ++i) { final long data = arr[i]; @@ -1381,8 +1353,7 @@ private static boolean forEachLongRangeFromLongRangesArray( return true; } - private static TreeIndexImpl makeRspBitmapFromLongRangesArray(final long[] ranges, - final int count) { + private static TreeIndexImpl makeRspBitmapFromLongRangesArray(final long[] ranges, final int count) { final RspBitmapSequentialBuilder builder = new RspBitmapSequentialBuilder(); forEachLongRangeFromLongRangesArray(ranges, count, (final long start, final long end) -> { builder.appendRange(start, end); @@ -1428,7 +1399,7 @@ public final boolean isSparse() { } private static TreeIndexImpl makeTreeIndexImplFromLongRangesArray( - final long[] ranges, final int count, final long card, final SortedRanges out) { + final long[] ranges, final int count, final long card, final SortedRanges out) { if (count == 0) { return TreeIndexImpl.EMPTY; } @@ -1496,22 +1467,22 @@ private static TreeIndexImpl makeTreeIndexImplFromLongRangesArray( // Neither argument can be empty. private static SortedRangesLong intersect( - final SortedRanges sr, - final TreeIndexImpl tix) { + final SortedRanges sr, + final TreeIndexImpl tix) { return intersect(sr, tix, false); } // Neither argument can be empty. private static SortedRangesLong intersect( - final SortedRanges sr, - final TreeIndexImpl tix, - final boolean takeComplement) { + final SortedRanges sr, + final TreeIndexImpl tix, + final boolean takeComplement) { final SortedRangesLong res = workSortedRangesLongPerThread.get(); res.reset(); try (ReadOnlyIndex.RangeIterator it1 = sr.getRangeIterator(); - ReadOnlyIndex.RangeIterator it2 = takeComplement - ? new ComplementRangeIterator(tix.ixRangeIterator()) - : tix.ixRangeIterator()) { + ReadOnlyIndex.RangeIterator it2 = takeComplement + ? new ComplementRangeIterator(tix.ixRangeIterator()) + : tix.ixRangeIterator()) { it1.next(); it2.next(); long s1 = it1.currentRangeStart(); @@ -1625,7 +1596,7 @@ private static SortedRangesLong union(final SortedRanges sr1, final SortedRanges final SortedRangesLong res = workSortedRangesLongPerThread.get(); res.reset(); try (ReadOnlyIndex.RangeIterator it1 = sr1.getRangeIterator(); - ReadOnlyIndex.RangeIterator it2 = sr2.getRangeIterator()) { + ReadOnlyIndex.RangeIterator it2 = sr2.getRangeIterator()) { it1.next(); it2.next(); long s1 = it1.currentRangeStart(); @@ -1758,8 +1729,7 @@ final TreeIndexImpl retain(final TreeIndexImpl tix) { return makeTreeIndexImplFromLongRangesArray(sr.data, sr.count, sr.cardinality, this); } - private static boolean retainLegacy(final MutableObject sarOut, - final TreeIndexImpl tix) { + private static boolean retainLegacy(final MutableObject sarOut, final TreeIndexImpl tix) { try (ReadOnlyIndex.RangeIterator rangeIter = tix.ixRangeIterator()) { SortedRanges sar = sarOut.getValue(); final long first = sar.first(); @@ -1984,21 +1954,20 @@ public static TreeIndexImpl unionOnNew(final SortedRanges sar, final SortedRange return makeTreeIndexImplFromLongRangesArray(sr.data, sr.count, sr.cardinality, null); } - public static SortedRanges unionOnNewLegacy(final SortedRanges sar, - final SortedRanges otherSar) { + public static SortedRanges unionOnNewLegacy(final SortedRanges sar, final SortedRanges otherSar) { final long unionFirst = Math.min(sar.first(), otherSar.first()); final long unionLast = Math.max(sar.last(), otherSar.last()); final int count = sar.count(); final int otherCount = otherSar.count(); final SortedRanges out = SortedRanges.tryMakeForKnownRangeFinalCapacityLowerBound( - Math.max(count, otherCount), - count + otherCount, - unionFirst, - unionLast, - sar.isDense() && otherSar.isDense()); + Math.max(count, otherCount), + count + otherCount, + unionFirst, + unionLast, + sar.isDense() && otherSar.isDense()); if (out != null) { try (final Index.RangeIterator sarIter = sar.getRangeIterator(); - final Index.RangeIterator otherIter = otherSar.getRangeIterator()) { + final Index.RangeIterator otherIter = otherSar.getRangeIterator()) { SortedRanges.unionOnNewHelper(out, sarIter, otherIter); } } @@ -2007,7 +1976,7 @@ public static SortedRanges unionOnNewLegacy(final SortedRanges sar, // {riter1, riter2}.hasNext() true on entry. private static void unionOnNewHelper(SortedRanges out, final Index.RangeIterator riter1, - final Index.RangeIterator riter2) { + final Index.RangeIterator riter2) { riter1.next(); long start1 = riter1.currentRangeStart(); long end1 = riter1.currentRangeEnd(); @@ -2099,7 +2068,7 @@ public final TreeIndexImpl insertImpl(final SortedRanges other, final boolean wr final SortedRangesLong sr = union(this, other); if (sr != null) { return makeTreeIndexImplFromLongRangesArray(sr.data, sr.count, sr.cardinality, - (!writeCheck || canWrite()) ? this : null); + (!writeCheck || canWrite()) ? this : null); } } final RspBitmap rb = ixToRspOnNew(); @@ -2111,11 +2080,10 @@ public final TreeIndexImpl insertImpl(final SortedRanges other, final boolean wr // Assumption: none of the provided SortedRanges are empty. // We can't offer a guarantee of returning false means we didn't modify out; // we /can/ offer the guarantee that, under a false return, the partial result - // left in sarHolder can be used to repeat the operation (presumably on a different - // TreeIndexImpl type) + // left in sarHolder can be used to repeat the operation (presumably on a different TreeIndexImpl type) // to produce the correct result. - private static boolean insertInternal(final MutableObject sarHolder, - final SortedRanges other, final boolean writeCheckArg) { + private static boolean insertInternal(final MutableObject sarHolder, final SortedRanges other, + final boolean writeCheckArg) { int iOther = 0; long pendingStart = -1; SortedRanges sar = sarHolder.getValue(); @@ -2134,8 +2102,8 @@ private static boolean insertInternal(final MutableObject sarHolde final long deltaCard = endPacked - startPacked + 1; iAdd.setValue(sar.absRawBinarySearch(startPacked, iAdd.intValue(), sar.count - 1)); final SortedRanges ans = addRangePackedWithStart( - sar, iAdd.intValue(), startPacked, endPacked, pendingStart, -iData, deltaCard, - iAdd, writeCheck); + sar, iAdd.intValue(), startPacked, endPacked, pendingStart, -iData, deltaCard, iAdd, + writeCheck); if (ans == null) { sarHolder.setValue(sar); return false; @@ -2152,10 +2120,9 @@ private static boolean insertInternal(final MutableObject sarHolde } else { if (pendingStart != -1) { final long pendingStartPacked = sar.pack(pendingStart); - iAdd.setValue( - sar.absRawBinarySearch(pendingStartPacked, iAdd.intValue(), sar.count - 1)); + iAdd.setValue(sar.absRawBinarySearch(pendingStartPacked, iAdd.intValue(), sar.count - 1)); final SortedRanges ans = addPackedWithStart( - sar, iAdd.intValue(), pendingStartPacked, pendingStart, iAdd, writeCheck); + sar, iAdd.intValue(), pendingStartPacked, pendingStart, iAdd, writeCheck); if (ans == null) { sarHolder.setValue(sar); return false; @@ -2175,10 +2142,9 @@ private static boolean insertInternal(final MutableObject sarHolde } if (pendingStart != -1) { final long pendingStartPacked = sar.pack(pendingStart); - final int iStart = - sar.absRawBinarySearch(pendingStartPacked, iAdd.intValue(), sar.count - 1); + final int iStart = sar.absRawBinarySearch(pendingStartPacked, iAdd.intValue(), sar.count - 1); final SortedRanges ans = addPackedWithStart( - sar, iStart, pendingStartPacked, pendingStart, null, writeCheck); + sar, iStart, pendingStartPacked, pendingStart, null, writeCheck); if (ans == null) { sarHolder.setValue(sar); return false; @@ -2193,12 +2159,10 @@ private static boolean insertInternal(final MutableObject sarHolde // We can't offer a guarantee of returning null means we didn't modify sar; // we /can/ offer the guarantee that, under a false return, the partial result - // left in sarOut can be used to repeat the operation (presumably on a different TreeIndexImpl - // type) + // left in sarOut can be used to repeat the operation (presumably on a different TreeIndexImpl type) // to produce the correct result. // !isEmpty() && rit.hasNext() true on entry. - static boolean removeLegacy(final MutableObject sarOut, - final Index.RangeIterator rit) { + static boolean removeLegacy(final MutableObject sarOut, final Index.RangeIterator rit) { try { final MutableInt iRm = new MutableInt(0); SortedRanges sar = sarOut.getValue(); @@ -2223,7 +2187,7 @@ static boolean removeLegacy(final MutableObject sarOut, int i = iRm.intValue(); i = sar.absRawBinarySearch(packedStart, i, sar.count - 1); final SortedRanges ans = removeRangePackedWithStart( - sar, i, packedStart, packedEnd, start, end, iRm, writeCheck); + sar, i, packedStart, packedEnd, start, end, iRm, writeCheck); if (ans == null) { sarOut.setValue(sar); return false; @@ -2245,8 +2209,7 @@ static boolean removeLegacy(final MutableObject sarOut, } // !isEmpty() on entry. - public final TreeIndexImpl invertRangeOnNew(final long start, final long end, - final long maxPosition) { + public final TreeIndexImpl invertRangeOnNew(final long start, final long end, final long maxPosition) { final long packedStart = pack(start); int i = 0; long pos = 0; @@ -2266,8 +2229,7 @@ public final TreeIndexImpl invertRangeOnNew(final long start, final long end, if (resultStart > maxPosition) { return TreeIndexImpl.EMPTY; } - final long resultEnd = - Math.min(rangeOffsetPos + packedEnd - pendingStart, maxPosition); + final long resultEnd = Math.min(rangeOffsetPos + packedEnd - pendingStart, maxPosition); return SingleRange.make(resultStart, resultEnd); } pos += rangeEnd - pendingStart; @@ -2292,7 +2254,7 @@ public final TreeIndexImpl invertRangeOnNew(final long start, final long end, return null; } return SingleRange.make( - pos, Math.min(maxPosition, pos + packedEnd - data)); + pos, Math.min(maxPosition, pos + packedEnd - data)); } ++pos; pendingStart = data; @@ -2311,9 +2273,9 @@ public final TreeIndexImpl invertRangeOnNew(final long start, final long end, // !isEmpty() && rit.hasNext() true on entry. public final boolean invertOnNew( - final Index.RangeIterator rit, - final TreeIndexImplSequentialBuilder builder, - final long maxPosition) { + final Index.RangeIterator rit, + final TreeIndexImplSequentialBuilder builder, + final long maxPosition) { rit.next(); long start = rit.currentRangeStart(); long end = rit.currentRangeEnd(); @@ -2336,8 +2298,7 @@ public final boolean invertOnNew( if (resultStart > maxPosition) { return true; } - final long resultEnd = - Math.min(rangeOffsetPos + packedEnd - pendingStart, maxPosition); + final long resultEnd = Math.min(rangeOffsetPos + packedEnd - pendingStart, maxPosition); builder.appendRange(resultStart, resultEnd); if (resultEnd == maxPosition || !rit.hasNext()) { return true; @@ -2418,7 +2379,7 @@ public final OrderedKeys getOrderedKeysByPosition(final long pos, long length) { } public final OrderedKeys getOrderedKeysByPositionWithStart( - final long iStartPos, final int istart, final long startPosForOK, final long lengthForOK) { + final long iStartPos, final int istart, final long startPosForOK, final long lengthForOK) { int i = istart; long iPos = iStartPos; long iData = packedGet(i); @@ -2444,8 +2405,8 @@ public final OrderedKeys getOrderedKeysByPositionWithStart( startOffset = 0; endIdx = i; endOffset = 0; - return new SortedRangesOrderedKeys(this, startPosForOK, startIdx, - startOffset, endIdx, endOffset, 1L); + return new SortedRangesOrderedKeys(this, startPosForOK, startIdx, startOffset, endIdx, + endOffset, 1L); } final long nextData = packedGet(i + 1); if (nextData < 0) { @@ -2469,8 +2430,8 @@ public final OrderedKeys getOrderedKeysByPositionWithStart( if (iPos >= endPositionInclusive) { endIdx = startIdx; endOffset = startOffset + lengthForOK - 1; - return new SortedRangesOrderedKeys(this, startPosForOK, startIdx, startOffset, endIdx, - endOffset, lengthForOK); + return new SortedRangesOrderedKeys(this, startPosForOK, startIdx, startOffset, endIdx, endOffset, + lengthForOK); } i = startIdx + 1; ++iPos; @@ -2514,8 +2475,7 @@ public final OrderedKeys getOrderedKeysByPositionWithStart( iData = packedGet(i); iNeg = iData < 0; } - return new SortedRangesOrderedKeys(this, startPosForOK, startIdx, startOffset, endIdx, - endOffset, lengthForOK); + return new SortedRangesOrderedKeys(this, startPosForOK, startIdx, startOffset, endIdx, endOffset, lengthForOK); } public final OrderedKeys getOrderedKeysByKeyRange(final long start, final long end) { @@ -2536,7 +2496,7 @@ public final OrderedKeys getOrderedKeysByKeyRange(final long start, final long e } final OrderedKeys getOrderedKeysByKeyRangePackedWithStart( - final long iStartPos, final int iStart, final long packedStart, final long packedEnd) { + final long iStartPos, final int iStart, final long packedStart, final long packedEnd) { int i = iStart; long iPos = iStartPos; long iData = packedGet(i); @@ -2573,10 +2533,10 @@ final OrderedKeys getOrderedKeysByKeyRangePackedWithStart( final long iNextValue = -iNextData; if (iNextValue >= packedEnd) { return new SortedRangesOrderedKeys( - this, iPos, - iNext, iData - iNextValue, - iNext, packedEnd - iNextValue, - packedEnd - iData + 1); + this, iPos, + iNext, iData - iNextValue, + iNext, packedEnd - iNextValue, + packedEnd - iData + 1); } startIdx = iNext; startOffset = iData - iNextValue; @@ -2612,25 +2572,25 @@ final OrderedKeys getOrderedKeysByKeyRangePackedWithStart( if (iValue >= packedEnd) { final long endOffset = packedEnd - iValue; return new SortedRangesOrderedKeys( - this, startPos, startIdx, startOffset, i, endOffset, - packedEnd - pendingStart + iPos - startPos + 1); + this, startPos, startIdx, startOffset, i, endOffset, + packedEnd - pendingStart + iPos - startPos + 1); } iPos += iValue - pendingStart; pendingStart = -1; } else { if (iData > packedEnd) { return new SortedRangesOrderedKeys( - this, startPos, - startIdx, startOffset, - i - 1, 0, - iPos - startPos + 1); + this, startPos, + startIdx, startOffset, + i - 1, 0, + iPos - startPos + 1); } ++iPos; pendingStart = iData; } if (i + 1 >= count) { return new SortedRangesOrderedKeys( - this, startPos, startIdx, startOffset, i, 0, iPos - startPos + 1); + this, startPos, startIdx, startOffset, i, 0, iPos - startPos + 1); } ++i; iData = packedGet(i); @@ -2643,7 +2603,7 @@ public final OrderedKeys.Iterator getOrderedKeysIterator() { return OrderedKeys.Iterator.EMPTY; } return new SortedRangesOrderedKeys.Iterator( - new SortedRangesOrderedKeys(this)); + new SortedRangesOrderedKeys(this)); } public final long getAverageRunLengthEstimate() { @@ -2664,7 +2624,7 @@ public final long getAverageRunLengthEstimate() { } private static SortedRanges intersectLegacy( - final SortedRanges sar, final long last, final Index.RangeIterator rangeIter) { + final SortedRanges sar, final long last, final Index.RangeIterator rangeIter) { try { // We could do better wrt offset... SortedRanges out = sar.makeMyTypeAndOffset(sar.count); @@ -2827,8 +2787,8 @@ protected static int shortArrayCapacityForLastIndex(final int lastIndex) { } protected static int intArrayCapacityForLastIndex(final int lastIndex, final boolean isDense) { - final int c = capacityForLastIndex(lastIndex, INT_EXTENT, - isDense ? INT_DENSE_MAX_CAPACITY : INT_SPARSE_MAX_CAPACITY); + final int c = + capacityForLastIndex(lastIndex, INT_EXTENT, isDense ? INT_DENSE_MAX_CAPACITY : INT_SPARSE_MAX_CAPACITY); if (c == 0) { return 0; } @@ -2837,7 +2797,7 @@ protected static int intArrayCapacityForLastIndex(final int lastIndex, final boo protected static int longArrayCapacityForLastIndex(final int lastIndex, final boolean isDense) { return capacityForLastIndex(lastIndex, LONG_EXTENT, - isDense ? LONG_DENSE_MAX_CAPACITY : LONG_SPARSE_MAX_CAPACITY); + isDense ? LONG_DENSE_MAX_CAPACITY : LONG_SPARSE_MAX_CAPACITY); } protected long cardinality; @@ -2876,7 +2836,7 @@ protected final long absPackedGet(final int i) { protected abstract int dataLength(); protected abstract SortedRanges ensureCanAppend(int newLastPosition, long unpackedNewLastKey, - final boolean writeCheck); + final boolean writeCheck); protected abstract void moveData(int srcPos, int dstPos, int len); @@ -2896,8 +2856,7 @@ protected abstract SortedRanges ensureCanAppend(int newLastPosition, long unpack protected abstract SortedRanges tryPackFor(long first, long last, int maxPos, boolean isDense); - protected final SortedRanges tryPackWithNewLast(final long newLastKey, int maxPos, - final boolean isDense) { + protected final SortedRanges tryPackWithNewLast(final long newLastKey, int maxPos, final boolean isDense) { return tryPackFor(first(), newLastKey, maxPos, isDense); } @@ -2908,8 +2867,7 @@ protected final SortedRanges tryPackWithNewLast(final long newLastKey, int maxPo public abstract int bytesUsed(); /** - * @param k if k == 0, compact if count < capacity. k > 0, compact if (capacity - count > - * (capacity >> k). + * @param k if k == 0, compact if count < capacity. k > 0, compact if (capacity - count > (capacity >> k). */ public abstract SortedRanges tryCompactUnsafe(int k); @@ -2921,8 +2879,7 @@ public final SortedRanges tryCompact(final int k) { } // Return a capacity that can contain lastIndex. - private static int capacityForLastIndex(final int lastIndex, final int extent, - final int maxCapacity) { + private static int capacityForLastIndex(final int lastIndex, final int extent, final int maxCapacity) { if (lastIndex >= maxCapacity) { return 0; } @@ -2958,15 +2915,14 @@ protected static boolean isLongAllocationSize(final int length) { } protected static boolean isIntAllocationSize(final int length) { - final int beforeRounding = length - 1; // space for 1 int after a 12 byte object header to - // an 8-byte boundary. + final int beforeRounding = length - 1; // space for 1 int after a 12 byte object header to an 8-byte boundary. return isAllocationSize(beforeRounding, INT_EXTENT); } protected static boolean isShortAllocationSize(final int length) { - final int beforeRounding = length - 2; // space for 2 shorts after a 12 byte object header - // to an 8-byte boundary. + final int beforeRounding = length - 2; // space for 2 shorts after a 12 byte object header to an 8-byte + // boundary. return isAllocationSize(beforeRounding, SHORT_EXTENT); } @@ -2978,19 +2934,17 @@ private static boolean isAllocationSize(final int beforeRounding, final int exte } /** - * Run a binary search over the ranges in [pos, count). Assumes pos points to a position of a - * range start (eg, value at pos can't be negative). + * Run a binary search over the ranges in [pos, count). Assumes pos points to a position of a range start (eg, value + * at pos can't be negative). * * Assumes count > startPos on entry. * * @param unpackedTarget The (unpacked) target value to search for. - * @param startPos A position in our array pointing to the start of a range from where to start - * the search. - * @return r >= 0 if the target value is present. r is the position of the start of a range - * containing the target value. r < 0 if the target value is not present. pos = -r - 1 - * (== ~r) is the position where the target value would be inserted; this could be the - * start of a range which would be expanded in the target value where added, or the - * position where it would have to go as a single value range pushing the ranges from + * @param startPos A position in our array pointing to the start of a range from where to start the search. + * @return r >= 0 if the target value is present. r is the position of the start of a range containing the target + * value. r < 0 if the target value is not present. pos = -r - 1 (== ~r) is the position where the target + * value would be inserted; this could be the start of a range which would be expanded in the target value + * where added, or the position where it would have to go as a single value range pushing the ranges from * there to the right. */ final int unpackedBinarySearch(final long unpackedTarget, final int startPos) { @@ -3058,9 +3012,8 @@ private int packedBinarySearch(final long packedTarget, final int startPos) { * @param packedTarget The (packed) target value to search for. * @param startIdx A position in our array pointing to where to start the search. * @param endIdx last position (inclusive) for the search. - * @return A position where either a range containing the target already exists, or where it - * would be extended, or a new range inserted if not. Note this may never be negative - * but it might be endIdx + 1. + * @return A position where either a range containing the target already exists, or where it would be extended, or a + * new range inserted if not. Note this may never be negative but it might be endIdx + 1. */ final int absRawBinarySearch(final long packedTarget, final int startIdx, final int endIdx) { final long absEndPosPackedValue = absPackedGet(endIdx); @@ -3076,8 +3029,7 @@ final int absRawBinarySearch(final long packedTarget, final int startIdx, final return startIdx; } int minPos = startIdx; - // at this point, we know absPackedGet(minPos) < packedTarget && packedTarget < - // absPackedGet(maxPos). + // at this point, we know absPackedGet(minPos) < packedTarget && packedTarget < absPackedGet(maxPos). while (maxPos - minPos > packedValuesPerCacheLine()) { int midPos = (minPos + maxPos) / 2; final long absMidPosPackedValue = absPackedGet(midPos); @@ -3103,7 +3055,7 @@ final int absRawBinarySearch(final long packedTarget, final int startIdx, final } protected abstract SortedRanges checkSizeAndMoveData( - final int srcPos, final int dstPos, final int len, final long first, boolean writeCheck); + final int srcPos, final int dstPos, final int len, final long first, boolean writeCheck); // Note the returned SortedRangesTreeIndexImpl might have a different offset. // packedData >= 0 on entry. @@ -3129,8 +3081,7 @@ private SortedRanges open(final int pos, final long packedData, final boolean wr // packedData < 0 on entry. private SortedRanges openNeg(final int pos, final long packedData, final boolean writeCheck) { final long offset = unpack(0); - final SortedRanges ans = - checkSizeAndMoveData(pos, pos + 1, count - pos, first(), writeCheck); + final SortedRanges ans = checkSizeAndMoveData(pos, pos + 1, count - pos, first(), writeCheck); if (ans == null) { return null; } else if (ans == this) { @@ -3147,8 +3098,7 @@ private SortedRanges openNeg(final int pos, final long packedData, final boolean // Note the returned SortedRangesTreeIndexImpl might have a different offset. // packedData1 > 0 && packedData2 < 0 on entry. - private SortedRanges open(final int pos, final long packedData1, final long packedData2, - final boolean writeCheck) { + private SortedRanges open(final int pos, final long packedData1, final long packedData2, final boolean writeCheck) { final long first = (pos == 0) ? unpack(packedData1) : first(); final long offset = unpack(0); final SortedRanges ans = checkSizeAndMoveData(pos, pos + 1, count - pos, first, writeCheck); @@ -3171,10 +3121,9 @@ private SortedRanges open(final int pos, final long packedData1, final long pack // Note the returned SortedRangesTreeIndexImpl might have a different offset. // packedData1 < 0 && packedData2 > 0 on entry. private SortedRanges openNeg(final int pos, final long packedData1, final long packedData2, - final boolean writeCheck) { + final boolean writeCheck) { final long offset = unpack(0); - final SortedRanges ans = - checkSizeAndMoveData(pos, pos + 1, count - pos, first(), writeCheck); + final SortedRanges ans = checkSizeAndMoveData(pos, pos + 1, count - pos, first(), writeCheck); if (ans == null) { return null; } else if (ans == this) { @@ -3194,7 +3143,7 @@ private SortedRanges openNeg(final int pos, final long packedData1, final long p // Note the returned SortedRangesTreeIndexImpl might have a different offset. // packedData1 >= 0 && packedData2 < 0 on entry. private SortedRanges open2(final int pos, final long packedData1, final long packedData2, - final boolean writeCheck) { + final boolean writeCheck) { final long first = (pos == 0) ? unpack(packedData1) : first(); final long offset = unpack(0); final SortedRanges ans = checkSizeAndMoveData(pos, pos + 2, count - pos, first, writeCheck); @@ -3217,10 +3166,9 @@ private SortedRanges open2(final int pos, final long packedData1, final long pac // Note the returned SortedRangesTreeIndexImpl might have a different offset. // packedData1 < 0 && packedData2 > 0 on entry. private SortedRanges open2Neg(final int pos, final long packedData1, final long packedData2, - final boolean writeCheck) { + final boolean writeCheck) { final long offset = unpack(0); - final SortedRanges ans = - checkSizeAndMoveData(pos, pos + 2, count - pos, first(), writeCheck); + final SortedRanges ans = checkSizeAndMoveData(pos, pos + 2, count - pos, first(), writeCheck); if (ans == null) { return null; } else if (ans == this) { @@ -3247,8 +3195,8 @@ protected final void close2(final int pos) { count -= 2; } - protected static SortedRanges addPacked(SortedRanges sar, final long packedValue, - final long value, final boolean writeCheck) { + protected static SortedRanges addPacked(SortedRanges sar, final long packedValue, final long value, + final boolean writeCheck) { if (sar.count == 0) { if (writeCheck) { sar = sar.getWriteRef(); @@ -3263,11 +3211,10 @@ protected static SortedRanges addPacked(SortedRanges sar, final long packedValue } // sar.count > 0 assumed on entry. - // if iStartOut != null, this method stores in iStartOut the position from where to continue - // adding later values. + // if iStartOut != null, this method stores in iStartOut the position from where to continue adding later values. protected static SortedRanges addPackedWithStart( - SortedRanges sar, final int iStart, final long packedValue, final long value, - final MutableInt iStartOut, final boolean writeCheck) { + SortedRanges sar, final int iStart, final long packedValue, final long value, + final MutableInt iStartOut, final boolean writeCheck) { int i = iStart; if (i == sar.count) { int j = sar.count - 1; @@ -3453,8 +3400,8 @@ final void collapse(final int iDst, final int iSrc) { } protected static SortedRanges addRangePacked( - SortedRanges sar, long packedStart, long packedEnd, final long start, final long end, - final boolean writeCheck) { + SortedRanges sar, long packedStart, long packedEnd, final long start, final long end, + final boolean writeCheck) { final long deltaCard = packedEnd - packedStart + 1; if (deltaCard == 1) { return addPacked(sar, packedStart, start, writeCheck); @@ -3474,18 +3421,16 @@ protected static SortedRanges addRangePacked( } final int iStart = sar.absRawBinarySearch(packedStart, 0, sar.count - 1); - return addRangePackedWithStart(sar, iStart, packedStart, packedEnd, start, end, deltaCard, - null, writeCheck); + return addRangePackedWithStart(sar, iStart, packedStart, packedEnd, start, end, deltaCard, null, writeCheck); } // Assumption: sar is not empty. // packedStart != packedEnd assumed on entry, - // if iStartOut != null, this method stores in iStartOut the position from where to continue - // adding later ranges. + // if iStartOut != null, this method stores in iStartOut the position from where to continue adding later ranges. protected static SortedRanges addRangePackedWithStart( - SortedRanges sar, int iStart, - long packedStart, long packedEnd, final long start, final long end, - long deltaCard, final MutableInt iStartOut, final boolean writeCheck) { + SortedRanges sar, int iStart, + long packedStart, long packedEnd, final long start, final long end, + long deltaCard, final MutableInt iStartOut, final boolean writeCheck) { if (iStart == sar.count) { int j = sar.count - 1; long jData = sar.packedGet(j); @@ -3542,8 +3487,7 @@ protected static SortedRanges addRangePackedWithStart( return sar; } - // we will find a beginning of range (or single) with no intersection to the range to be - // added, + // we will find a beginning of range (or single) with no intersection to the range to be added, // which might result in adjusting iStart, packedStart and deltaCard. boolean mergeToLeftRange = false; boolean mergeToLeftSingle = false; @@ -3590,8 +3534,7 @@ protected static SortedRanges addRangePackedWithStart( mergeToLeftSingle = true; } else { if (packedEnd <= -iStartData) { - // the whole [packedStart, packedEnd] range was contained in an existing - // range. + // the whole [packedStart, packedEnd] range was contained in an existing range. if (iStartOut != null) { iStartOut.setValue(iStart - 1); } @@ -3664,8 +3607,7 @@ protected static SortedRanges addRangePackedWithStart( // iStart now points to the beginning of a range (or single). - // We will find the last beginning of range (or single) with no intersection to the range to - // be added + // We will find the last beginning of range (or single) with no intersection to the range to be added // and store it in iEnd; we may need to adjust packedEnd and deltaCard. int iEnd; long pendingStart = -1; @@ -3857,16 +3799,15 @@ protected static SortedRanges addRangePackedWithStart( return sar; } - private static SortedRanges appendUnpacked(final SortedRanges sar, final long value, - final boolean writeCheck) { + private static SortedRanges appendUnpacked(final SortedRanges sar, final long value, final boolean writeCheck) { if (!sar.fits(value)) { return null; } return appendPacked(sar, sar.pack(value), value, writeCheck); } - protected static SortedRanges appendPacked(SortedRanges sar, final long packedValue, - final long value, final boolean writeCheck) { + protected static SortedRanges appendPacked(SortedRanges sar, final long packedValue, final long value, + final boolean writeCheck) { if (sar.count == 0) { if (writeCheck) { sar = sar.getWriteRef(); @@ -3883,8 +3824,7 @@ protected static SortedRanges appendPacked(SortedRanges sar, final long packedVa final long lastValue = lastNeg ? -lastData : lastData; if (packedValue <= lastValue + 1) { if (packedValue <= lastValue) { - throw new IllegalArgumentException( - "Trying to append v=" + packedValue + " when last=" + lastValue); + throw new IllegalArgumentException("Trying to append v=" + packedValue + " when last=" + lastValue); } // packedValue == lastValue + 1. if (lastNeg) { @@ -3917,7 +3857,7 @@ protected static SortedRanges appendPacked(SortedRanges sar, final long packedVa } protected static SortedRanges appendRangeUnpacked( - final SortedRanges sar, final long start, final long end, final boolean writeCheck) { + final SortedRanges sar, final long start, final long end, final boolean writeCheck) { if (!sar.fits(start, end)) { return null; } @@ -3925,9 +3865,9 @@ protected static SortedRanges appendRangeUnpacked( } protected static SortedRanges appendRangePacked( - SortedRanges sar, - final long packedStart, final long packedEnd, final long start, final long end, - final boolean writeCheck) { + SortedRanges sar, + final long packedStart, final long packedEnd, final long start, final long end, + final boolean writeCheck) { final long deltaCard = packedEnd - packedStart + 1; if (deltaCard == 1) { return appendPacked(sar, packedStart, start, writeCheck); @@ -3950,8 +3890,7 @@ protected static SortedRanges appendRangePacked( if (packedStart <= lastValue + 1) { if (packedStart <= lastValue) { throw new IllegalArgumentException( - "Trying to append start=" + packedStart + " end=" + packedEnd + " when last=" - + lastValue); + "Trying to append start=" + packedStart + " end=" + packedEnd + " when last=" + lastValue); } // packedValue == lastValue + 1. if (lastNeg) { @@ -3983,8 +3922,7 @@ protected static SortedRanges appendRangePacked( return sar; } - protected static SortedRanges removePacked(SortedRanges sar, final long packedValue, - final long value) { + protected static SortedRanges removePacked(SortedRanges sar, final long packedValue, final long value) { if (sar.count == 0) { return sar; } @@ -4149,22 +4087,18 @@ protected static SortedRanges removePacked(SortedRanges sar, final long packedVa } protected static SortedRanges removeRangePacked( - SortedRanges sar, final long packedStart, final long packedEnd, final long start, - final long end) { + SortedRanges sar, final long packedStart, final long packedEnd, final long start, final long end) { final int iStart = sar.absRawBinarySearch(packedStart, 0, sar.count - 1); - return removeRangePackedWithStart(sar, iStart, packedStart, packedEnd, start, end, null, - true); + return removeRangePackedWithStart(sar, iStart, packedStart, packedEnd, start, end, null, true); } - // if iStartOut != null, this method stores in iStartOut the position from where to continue - // removing later ranges. + // if iStartOut != null, this method stores in iStartOut the position from where to continue removing later ranges. protected static SortedRanges removeRangePackedWithStart( - SortedRanges sar, int iStart, - final long packedStart, final long packedEnd, - final long start, final long end, - final MutableInt iStartOut, final boolean writeCheck) { - // iStart will be adjusted to be the start index of the positions to be eliminated from the - // array. + SortedRanges sar, int iStart, + final long packedStart, final long packedEnd, + final long start, final long end, + final MutableInt iStartOut, final boolean writeCheck) { + // iStart will be adjusted to be the start index of the positions to be eliminated from the array. if (iStart >= sar.count) { if (iStartOut != null) { iStartOut.setValue(sar.count); @@ -4238,8 +4172,7 @@ protected static SortedRanges removeRangePackedWithStart( int i = iStart; boolean iNeg = iStartNeg; long iValue = iStartValue; - // iEndExclusive will be set to mark the end index (exclusive) of the positions to be - // eliminated from the array. + // iEndExclusive will be set to mark the end index (exclusive) of the positions to be eliminated from the array. int iEndExclusive = -1; while (true) { if (packedEnd <= iValue) { @@ -4318,8 +4251,7 @@ protected static SortedRanges removeRangePackedWithStart( if (iStartOut != null) { iStartOut.setValue(iStart + 1); } - } else { // len == 0; it can't be the case that len==1 if we are truncating at both - // sides. + } else { // len == 0; it can't be the case that len==1 if we are truncating at both sides. sar = sar.open2Neg(iStart, -(packedStart - 1), packedEnd + 1, writeCheck); if (sar == null) { return null; @@ -4426,13 +4358,11 @@ protected final void validate(String strArg, final long iv1, final long iv2) { } else { if (ei) { if (eprev) { - throw new IllegalStateException( - cmsg + ": two consecutive negatives i=" + i); + throw new IllegalStateException(cmsg + ": two consecutive negatives i=" + i); } long delta = vi - vprev; if (delta < 1) { - throw new IllegalStateException( - cmsg + ": range delta=" + delta + " at i=" + i); + throw new IllegalStateException(cmsg + ": range delta=" + delta + " at i=" + i); } sz += delta; } else { @@ -4449,8 +4379,7 @@ protected final void validate(String strArg, final long iv1, final long iv2) { vprev = vi; } if (sz != cardinality) { - throw new IllegalStateException( - msg + " wrong cardinality=" + cardinality + " should be " + sz); + throw new IllegalStateException(msg + " wrong cardinality=" + cardinality + " should be " + sz); } } @@ -4462,8 +4391,7 @@ static void checkEquals(final TreeIndexImpl expected, final TreeIndexImpl ans) { checkEquals(expected, ans, null); } - static void checkEquals(final TreeIndexImpl expected, final TreeIndexImpl ans, - final TreeIndexImpl orig) { + static void checkEquals(final TreeIndexImpl expected, final TreeIndexImpl ans, final TreeIndexImpl orig) { ans.ixValidate(); final long expCard = expected.ixCardinality(); final long ansCard = ans.ixCardinality(); @@ -4472,11 +4400,11 @@ static void checkEquals(final TreeIndexImpl expected, final TreeIndexImpl ans, final boolean ansSubset = ans.ixSubsetOf(expected); if (failedCard || !expSubset || !ansSubset) { throw new IllegalStateException( - (failedCard ? "cardinality" : "subset") + - " check failed for for " + - "expected(" + expCard + ")=" + expected + - ", ans(" + ansCard + ")=" + ans + - ((orig == null) ? "" : ", orig(" + orig.ixCardinality() + ")=" + orig)); + (failedCard ? "cardinality" : "subset") + + " check failed for for " + + "expected(" + expCard + ")=" + expected + + ", ans(" + ansCard + ")=" + ans + + ((orig == null) ? "" : ", orig(" + orig.ixCardinality() + ")=" + orig)); } } @@ -4520,14 +4448,14 @@ public final TreeIndexImpl ixInsertRange(final long startKey, final long endKey) } @Override - public final TreeIndexImpl ixInsertSecondHalf( - final LongChunk keys, final int offset, final int length) { + public final TreeIndexImpl ixInsertSecondHalf(final LongChunk keys, final int offset, + final int length) { return ixInsert(TreeIndexImpl.fromChunk(keys, offset, length, true)); } @Override - public final TreeIndexImpl ixRemoveSecondHalf( - final LongChunk keys, final int offset, final int length) { + public final TreeIndexImpl ixRemoveSecondHalf(final LongChunk keys, final int offset, + final int length) { return ixRemove(TreeIndexImpl.fromChunk(keys, offset, length, true)); } @@ -4582,8 +4510,7 @@ public final boolean ixForEachLongRange(LongRangeAbortableConsumer lrac) { } @Override - public final TreeIndexImpl ixSubindexByPosOnNew(final long startPos, - final long endPosExclusive) { + public final TreeIndexImpl ixSubindexByPosOnNew(final long startPos, final long endPosExclusive) { if (endPosExclusive <= startPos || endPosExclusive <= 0 || startPos >= getCardinality()) { return TreeIndexImpl.EMPTY; } @@ -4610,7 +4537,7 @@ public final long ixGet(final long pos) { @Override public final void ixGetKeysForPositions(final PrimitiveIterator.OfLong inputPositions, - final LongConsumer outputKeys) { + final LongConsumer outputKeys) { getKeysForPositions(inputPositions, outputKeys); } @@ -4656,8 +4583,7 @@ public final TreeIndexImpl ixUpdate(final TreeIndexImpl added, final TreeIndexIm } if (!removed.ixIsEmpty()) { if (removed instanceof SingleRange) { - final TreeIndexImpl removeResult = - ixRemoveRange(removed.ixFirstKey(), removed.ixLastKey()); + final TreeIndexImpl removeResult = ixRemoveRange(removed.ixFirstKey(), removed.ixLastKey()); return removeResult.ixInsert(added); } final TreeIndexImpl ans = remove(removed); @@ -4717,8 +4643,7 @@ public final TreeIndexImpl remove(final TreeIndexImpl removed) { if (sr == null) { return null; } - return makeTreeIndexImplFromLongRangesArray(sr.data, sr.count, sr.cardinality, - canWrite() ? this : null); + return makeTreeIndexImplFromLongRangesArray(sr.data, sr.count, sr.cardinality, canWrite() ? this : null); } @Override @@ -4742,9 +4667,9 @@ public final TreeIndexImpl ixRemoveRange(final long startKey, final long endKey) @Override public final TreeIndexImpl ixRetain(final TreeIndexImpl toIntersect) { if (toIntersect.ixIsEmpty() || - isEmpty() || - toIntersect.ixLastKey() < first() || - last() < toIntersect.ixFirstKey()) { + isEmpty() || + toIntersect.ixLastKey() < first() || + last() < toIntersect.ixFirstKey()) { return TreeIndexImpl.EMPTY; } if (!canWrite()) { @@ -4793,8 +4718,8 @@ public final TreeIndexImpl ixIntersectOnNew(final TreeIndexImpl toIntersect) { public final TreeIndexImpl intersectOnNew(final TreeIndexImpl toIntersect) { if (isEmpty() || toIntersect.ixIsEmpty() || - last() < toIntersect.ixFirstKey() || - toIntersect.ixLastKey() < first()) { + last() < toIntersect.ixFirstKey() || + toIntersect.ixLastKey() < first()) { return TreeIndexImpl.EMPTY; } if (!USE_RANGES_ARRAY) { @@ -4873,8 +4798,8 @@ public final TreeIndexImpl ixMinusOnNew(final TreeIndexImpl other) { return TreeIndexImpl.EMPTY; } if (other.ixIsEmpty() || - last() < other.ixFirstKey() || - other.ixLastKey() < first()) { + last() < other.ixFirstKey() || + other.ixLastKey() < first()) { return cowRef(); } if (other instanceof SingleRange) { @@ -4945,8 +4870,7 @@ public final TreeIndexImpl ixShiftInPlace(final long shiftAmount) { } @Override - public final TreeIndexImpl ixInsertWithShift(final long shiftAmount, - final TreeIndexImpl other) { + public final TreeIndexImpl ixInsertWithShift(final long shiftAmount, final TreeIndexImpl other) { if (other.ixIsEmpty()) { return this; } @@ -5013,14 +4937,12 @@ public final TreeIndexImpl ixInsert(final TreeIndexImpl added) { } @Override - public final OrderedKeys ixGetOrderedKeysByPosition(final long startPositionInclusive, - final long length) { + public final OrderedKeys ixGetOrderedKeysByPosition(final long startPositionInclusive, final long length) { return getOrderedKeysByPosition(startPositionInclusive, length); } @Override - public final OrderedKeys ixGetOrderedKeysByKeyRange(final long startKeyInclusive, - final long endKeyInclusive) { + public final OrderedKeys ixGetOrderedKeysByKeyRange(final long startKeyInclusive, final long endKeyInclusive) { return getOrderedKeysByKeyRange(startKeyInclusive, endKeyInclusive); } @@ -5061,8 +4983,7 @@ public final TreeIndexImpl ixInvertOnNew(final TreeIndexImpl keys, final long ma return TreeIndexImpl.EMPTY; } if (keys instanceof SingleRange) { - final TreeIndexImpl r = - invertRangeOnNew(keys.ixFirstKey(), keys.ixLastKey(), maxPosition); + final TreeIndexImpl r = invertRangeOnNew(keys.ixFirstKey(), keys.ixLastKey(), maxPosition); if (r != null) { return r; } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesOrderedKeys.java b/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesOrderedKeys.java index 4de669caaf8..a80d471c2de 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesOrderedKeys.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesOrderedKeys.java @@ -39,11 +39,11 @@ public class SortedRangesOrderedKeys extends OrderedKeysAsChunkImpl { } SortedRangesOrderedKeys( - final SortedRanges sar, - final long startPos, - final int startIdx, final long startOffset, - final int endIdx, final long endOffset, - final long size) { + final SortedRanges sar, + final long startPos, + final int startIdx, final long startOffset, + final int endIdx, final long endOffset, + final long size) { sar.acquire(); this.startPos = startPos; this.sar = sar; @@ -171,8 +171,7 @@ public Index asIndex() { } @Override - public void fillKeyIndicesChunk( - final WritableLongChunk chunkToFill) { + public void fillKeyIndicesChunk(final WritableLongChunk chunkToFill) { chunkToFill.setSize(0); forEachLong((final long key) -> { chunkToFill.add(key); @@ -181,8 +180,7 @@ public void fillKeyIndicesChunk( } @Override - public void fillKeyRangesChunk( - final WritableLongChunk chunkToFill) { + public void fillKeyRangesChunk(final WritableLongChunk chunkToFill) { chunkToFill.setSize(0); forEachLongRange((final long start, final long end) -> { chunkToFill.add(start); @@ -349,7 +347,7 @@ public long rangesCountUpperBound() { } private void reset(final long startPos, final int startIdx, final long startOffset, - final int endIdx, final long endOffset, final long size) { + final int endIdx, final long endOffset, final long size) { if (sar != null) { closeOrderedKeysAsChunkImpl(); } @@ -374,8 +372,8 @@ public void close() { throw new IllegalStateException(); } // We purposely /do not/ close the RspOrderedKeys part as it will get reused. - // The API doc for Iterator states that clients should /never/ call close. So that - // we eneded up here means + // The API doc for Iterator states that clients should /never/ call close. So that we eneded up here + // means // there is some kind of bug. closeOrderedKeysAsChunkImpl(); } @@ -399,8 +397,7 @@ public void close() { private final int oksEndIdx; private final long oksEndOffset; - // cached value for the first key on the call to any getNext* method, or -1 if cache has not - // been populated yet. + // cached value for the first key on the call to any getNext* method, or -1 if cache has not been populated yet. private long nextKey; private long pendingAdvanceSize = 0; @@ -456,8 +453,7 @@ public long peekNextKey() { return nextKey; } - // Updates curr{Start,End}{Idx,Offset} to a range starting on the position right after the - // end + // Updates curr{Start,End}{Idx,Offset} to a range starting on the position right after the end // at the time of the call, to the last position not greater than toKey. // Returns the size of the resulting OK. private long updateCurrThrough(final long toKey) { @@ -472,11 +468,10 @@ private long updateCurrThrough(final long toKey) { currStartOffset = currEndOffset + 1; } else { currStartIdx = currEndIdx + 1; - // currEndIdx + 1 < sar.count, otherwise we would have returned on the hasMore() - // check above. + // currEndIdx + 1 < sar.count, otherwise we would have returned on the hasMore() check above. if (SortedRanges.DEBUG) { Assert.lt(currEndIdx + 1, "currEndIdx + 1", - sar.count, "sar.count"); + sar.count, "sar.count"); } if (currStartIdx + 1 < sar.count) { final long nextData = sar.packedGet(currStartIdx + 1); @@ -593,8 +588,8 @@ public OrderedKeys getNextOrderedKeysThrough(final long maxKey) { return OrderedKeys.EMPTY; } nextKey = -1; - currBuf.reset(currBuf.startPos + currBuf.size + pendingAdvanceSize, currStartIdx, - currStartOffset, currEndIdx, currEndOffset, sz); + currBuf.reset(currBuf.startPos + currBuf.size + pendingAdvanceSize, currStartIdx, currStartOffset, + currEndIdx, currEndOffset, sz); pendingAdvanceSize = 0; sizeLeft -= sz; return currBuf; @@ -726,8 +721,8 @@ public OrderedKeys getNextOrderedKeysWithLength(final long desiredLen) { } nextOrderedKeysWithLength(actualLen); nextKey = -1; - currBuf.reset(currBuf.startPos + currBuf.size + pendingAdvanceSize, currStartIdx, - currStartOffset, currEndIdx, currEndOffset, actualLen); + currBuf.reset(currBuf.startPos + currBuf.size + pendingAdvanceSize, currStartIdx, currStartOffset, + currEndIdx, currEndOffset, actualLen); pendingAdvanceSize = 0; sizeLeft -= actualLen; return currBuf; @@ -736,8 +731,8 @@ public OrderedKeys getNextOrderedKeysWithLength(final long desiredLen) { @Override public boolean advance(final long toKey) { if (sizeLeft == 0 || - toKey <= 0 || - (currEndIdx != -1 && toKey <= currBuf.lastKey())) { + toKey <= 0 || + (currEndIdx != -1 && toKey <= currBuf.lastKey())) { return sizeLeft > 0; } final long sz = updateCurrThrough(toKey - 1); @@ -759,14 +754,13 @@ private void ifDebugValidate() { } } - private static void validateOffset(final String m, final int idx, final long offset, - final SortedRanges sr) { + private static void validateOffset(final String m, final int idx, final long offset, final SortedRanges sr) { final long v = sr.unpackedGet(idx); final BiConsumer fail = (final String prefix, final String suffix) -> { throw new IllegalStateException(m + - ((prefix != null && prefix.length() > 0) ? (" " + prefix) : "") + - ": idx=" + idx + ", v=" + v + ", offset=" + offset + - ((suffix != null && suffix.length() > 0) ? (", " + suffix) : "")); + ((prefix != null && prefix.length() > 0) ? (" " + prefix) : "") + + ": idx=" + idx + ", v=" + v + ", offset=" + offset + + ((suffix != null && suffix.length() > 0) ? (", " + suffix) : "")); }; if (v >= 0) { if (offset != 0) { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesPacked.java b/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesPacked.java index 8b0a5aceedc..368a1f7c96f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesPacked.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesPacked.java @@ -46,14 +46,12 @@ protected SortedRangesPacked(final ArrayType data, final long offset) { this.offset = offset; } - protected SortedRangesPacked(final ArrayType data, final long offset, final int count, - final long cardinality) { + protected SortedRangesPacked(final ArrayType data, final long offset, final int count, final long cardinality) { super(data, count, cardinality); this.offset = offset; } - protected abstract SortedRanges makeMyType(ArrayType data, long offset, int count, - long cardinality); + protected abstract SortedRanges makeMyType(ArrayType data, long offset, int count, long cardinality); protected final long offset() { return offset; @@ -71,8 +69,7 @@ public final SortedRanges addInternal(final long v, final boolean writeCheck) { return addPacked(this, packedValue, v, writeCheck); } - public final SortedRanges addRangeInternal(final long start, final long end, - final boolean writeCheck) { + public final SortedRanges addRangeInternal(final long start, final long end, final boolean writeCheck) { final long packedStart = pack(start); if (packedStart >= 0) { final long packedEnd = pack(end); @@ -99,8 +96,7 @@ public final SortedRanges appendInternal(final long v, final boolean writeCheck) return appendPacked(this, packedValue, v, writeCheck); } - public final SortedRanges appendRangeInternal(final long start, final long end, - final boolean writeCheck) { + public final SortedRanges appendRangeInternal(final long start, final long end, final boolean writeCheck) { final long packedStart = pack(start); if (packedStart >= 0) { final long packedEnd = pack(end); @@ -139,8 +135,8 @@ public final SortedRanges removeRangeInternal(final long start, final long end) } protected abstract void rebaseAndShift( - final ArrayType dataOut, final long newOffset, final long shiftOffset, - final SortedRangesTyped sar, final long first); + final ArrayType dataOut, final long newOffset, final long shiftOffset, + final SortedRangesTyped sar, final long first); @Override public SortedRanges applyShift(final long shiftOffset) { @@ -154,8 +150,7 @@ public SortedRanges applyShift(final long shiftOffset) { return applyShiftImpl(shiftOffset, v, !canWrite()); } - private SortedRanges applyShiftImpl(final long shiftOffset, final long first, - final boolean isNew) { + private SortedRanges applyShiftImpl(final long shiftOffset, final long first, final boolean isNew) { long newOffset = offset + shiftOffset; if (newOffset >= first) { if (isNew) { @@ -193,8 +188,7 @@ public SortedRanges applyShiftOnNew(final long shiftOffset) { return applyShiftImpl(shiftOffset, v, true); } - // try to convert this to a SortedArrayLong with enough space to accomodate deltaCapacity - // elements + // try to convert this to a SortedArrayLong with enough space to accomodate deltaCapacity elements // in its data array. private SortedRangesLong tryConvertToSrLong(final int deltaCapacity) { if (count == 0) { @@ -205,7 +199,7 @@ private SortedRangesLong tryConvertToSrLong(final int deltaCapacity) { } final int desiredCap = count + deltaCapacity; final int capacity = longArrayCapacityForLastIndex( - desiredCap - 1, isDenseLongSample(first(), last(), count)); + desiredCap - 1, isDenseLongSample(first(), last(), count)); if (capacity == 0) { return null; } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesTyped.java b/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesTyped.java index 8b24e89bca9..78838c1302f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesTyped.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesTyped.java @@ -7,8 +7,7 @@ public abstract class SortedRangesTyped extends SortedRanges { protected abstract int capacityForLastIndex(int i, boolean isDense); - protected abstract SortedRanges tryMakePackedType(final int maxPos, final long first, - boolean isDense); + protected abstract SortedRanges tryMakePackedType(final int maxPos, final long first, boolean isDense); protected abstract SortedRangesTyped makeMyTypeAndOffset(int initialCapacity); @@ -30,16 +29,15 @@ protected SortedRangesTyped(final ArrayType data, final int count, final long ca this.cardinality = cardinality; } - protected final void copyDataForMoveToNew(final SortedRanges srOut, final int srcPos, - final int dstPos, final int len) { + protected final void copyDataForMoveToNew(final SortedRanges srOut, final int srcPos, final int dstPos, + final int len) { for (int i = 0; i < srcPos; ++i) { srOut.unpackedSet(i, unpackedGet(i)); } copyData(srOut, srcPos, dstPos, len); } - protected final void copyData(final SortedRanges srOut, final int srcPos, final int dstPos, - final int len) { + protected final void copyData(final SortedRanges srOut, final int srcPos, final int dstPos, final int len) { for (int i = 0; i < len; ++i) { srOut.unpackedSet(dstPos + i, unpackedGet(srcPos + i)); } @@ -74,8 +72,7 @@ public final SortedRanges tryCompactUnsafe(final int k) { @Override protected final SortedRanges checkSizeAndMoveData( - final int srcPos, final int dstPos, final int len, final long first, - final boolean writeCheck) { + final int srcPos, final int dstPos, final int len, final long first, final boolean writeCheck) { final int maxPos = dstPos + len - 1; if (maxPos < dataLength()) { if (!writeCheck || canWrite()) { @@ -118,8 +115,8 @@ protected final SortedRanges checkSizeAndMoveData( } @Override - protected final SortedRanges ensureCanAppend(final int newLastPos, - final long unpackedNewLastKey, final boolean writeCheck) { + protected final SortedRanges ensureCanAppend(final int newLastPos, final long unpackedNewLastKey, + final boolean writeCheck) { if (newLastPos < dataLength()) { return writeCheck ? getWriteRef() : this; } diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/unboxer/ChunkUnboxer.java b/DB/src/main/java/io/deephaven/db/v2/utils/unboxer/ChunkUnboxer.java index d6acbc6b882..a2210ff1072 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/unboxer/ChunkUnboxer.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/unboxer/ChunkUnboxer.java @@ -28,8 +28,8 @@ public interface UnboxerKernel extends Context { */ Chunk unbox(ObjectChunk boxedPrimitives); - void unboxTo(ObjectChunk boxedPrimitives, - WritableChunk primitives, int sourceOffset, int destOffset); + void unboxTo(ObjectChunk boxedPrimitives, WritableChunk primitives, + int sourceOffset, int destOffset); } public static UnboxerKernel getUnboxer(ChunkType type, int capacity) { diff --git a/DB/src/main/java/io/deephaven/db/v2/utils/unboxer/ReplicateUnboxerKernel.java b/DB/src/main/java/io/deephaven/db/v2/utils/unboxer/ReplicateUnboxerKernel.java index 3e7026325e6..4722ffa7d8f 100644 --- a/DB/src/main/java/io/deephaven/db/v2/utils/unboxer/ReplicateUnboxerKernel.java +++ b/DB/src/main/java/io/deephaven/db/v2/utils/unboxer/ReplicateUnboxerKernel.java @@ -6,7 +6,6 @@ public class ReplicateUnboxerKernel { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(CharUnboxer.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharUnboxer.class, ReplicatePrimitiveCode.MAIN_SRC); } } diff --git a/DB/src/main/java/io/deephaven/gui/color/Color.java b/DB/src/main/java/io/deephaven/gui/color/Color.java index 5979cbb7994..cefbc60b8c9 100644 --- a/DB/src/main/java/io/deephaven/gui/color/Color.java +++ b/DB/src/main/java/io/deephaven/gui/color/Color.java @@ -27,9 +27,8 @@ public class Color implements Paint, Serializable { * * Colors are specified by name or hex value. * - * Hex values are parsed as follows: first two digits set the Red component of the color; second - * two digits set the Green component; third two the Blue. Hex values must have a "#" in front, - * e.g. "#001122" + * Hex values are parsed as follows: first two digits set the Red component of the color; second two digits set the + * Green component; third two the Blue. Hex values must have a "#" in front, e.g. "#001122" * * For available names, see {@link Color} and {@link #colorNames}. * @@ -54,11 +53,10 @@ public Color(@SuppressWarnings("ConstantConditions") final String color) { } /** - * Creates a Color with the specified red, green, and blue values in the range (0 - 255). Alpha - * is defaulted to 255. + * Creates a Color with the specified red, green, and blue values in the range (0 - 255). Alpha is defaulted to 255. * - * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of - * the range 0 to 255, inclusive + * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of the range 0 to 255, + * inclusive * @param r the red component * @param g the green component * @param b the blue component @@ -68,11 +66,11 @@ public Color(final int r, final int g, final int b) { } /** - * Creates a Color with the specified red, green, blue, and alpha values in the range (0 - 255). - * The lower the alpha, the more transparent the color. + * Creates a Color with the specified red, green, blue, and alpha values in the range (0 - 255). The lower the + * alpha, the more transparent the color. * - * @throws IllegalArgumentException if {@code r}, {@code g} {@code b}, or {@code a} values are - * outside of the range 0 to 255, inclusive + * @throws IllegalArgumentException if {@code r}, {@code g} {@code b}, or {@code a} values are outside of the range + * 0 to 255, inclusive * @param r the red component * @param g the green component * @param b the blue component @@ -83,9 +81,8 @@ public Color(final int r, final int g, final int b, final int a) { } /** - * Creates a Color with the specified combined {@code rgb} value consisting of the red component - * in bits 16-23, the green component in bits 8-15, and the blue component in bits 0-7. Alpha is - * defaulted to 255. + * Creates a Color with the specified combined {@code rgb} value consisting of the red component in bits 16-23, the + * green component in bits 8-15, and the blue component in bits 0-7. Alpha is defaulted to 255. * * @param rgb the combined RGB components */ @@ -94,24 +91,23 @@ public Color(final int rgb) { } /** - * Creates a Color with the specified combined {@code rgba} value consisting of the alpha - * component in bits 24-31, the red component in bits 16-23, the green component in bits 8-15, - * and the blue component in bits 0-7. If {@code hasAlpha} is false, alpha is defaulted to 255. + * Creates a Color with the specified combined {@code rgba} value consisting of the alpha component in bits 24-31, + * the red component in bits 16-23, the green component in bits 8-15, and the blue component in bits 0-7. If + * {@code hasAlpha} is false, alpha is defaulted to 255. * * @param rgba the combined rbga components - * @param hasAlpha if true, {@code rgba} is parsed with an alpha component. Otherwise, alpha - * defaults to 255 + * @param hasAlpha if true, {@code rgba} is parsed with an alpha component. Otherwise, alpha defaults to 255 */ public Color(final int rgba, final boolean hasAlpha) { color = new java.awt.Color(rgba, hasAlpha); } /** - * Creates a Color with the specified red, green, and blue values in the range (0.0 - 1.0). - * Alpha is defaulted to 1.0. The lower the alpha, the more transparent the color. + * Creates a Color with the specified red, green, and blue values in the range (0.0 - 1.0). Alpha is defaulted to + * 1.0. The lower the alpha, the more transparent the color. * - * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of - * the range 0.0 to 1.0, inclusive + * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of the range 0.0 to 1.0, + * inclusive * @param r the red component * @param g the green component * @param b the blue component @@ -121,11 +117,11 @@ public Color(final float r, final float g, final float b) { } /** - * Creates a Color with the specified red, green, blue, and alpha values in the range (0.0 - - * 1.0). The lower the alpha, the more transparent the color. + * Creates a Color with the specified red, green, blue, and alpha values in the range (0.0 - 1.0). The lower the + * alpha, the more transparent the color. * - * @throws IllegalArgumentException if {@code r}, {@code g}, {@code b}, {@code a} values are - * outside of the range 0.0 to 1.0, inclusive + * @throws IllegalArgumentException if {@code r}, {@code g}, {@code b}, {@code a} values are outside of the range + * 0.0 to 1.0, inclusive * @param r the red component * @param g the green component * @param b the blue component @@ -141,8 +137,8 @@ public Color(final float r, final float g, final float b, final float a) { @Override public String toString() { - return "Color{" + color.getRed() + "," + color.getGreen() + "," + color.getBlue() + "," - + color.getAlpha() + "}"; + return "Color{" + color.getRed() + "," + color.getGreen() + "," + color.getBlue() + "," + color.getAlpha() + + "}"; } @@ -153,9 +149,8 @@ public String toString() { * * Colors are specified by name or hex value. * - * Hex values are parsed as follows: first two digits set the Red component of the color; second - * two digits set the Green component; third two the Blue. Hex values must have a "#" in front, - * e.g. "#001122" + * Hex values are parsed as follows: first two digits set the Red component of the color; second two digits set the + * Green component; third two the Blue. Hex values must have a "#" in front, e.g. "#001122" * * For available names, see {@link Color} and {@link #colorNames} * @@ -170,8 +165,8 @@ public static Color color(final String color) { /** * Creates a Color with the specified red, green, blue, and alpha values. * - * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of - * the range 0 to 255, inclusive + * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of the range 0 to 255, + * inclusive * @param r the red component in the range (0 - 255). * @param g the green component in the range (0 - 255). * @param b the blue component in the range (0 - 255). @@ -184,8 +179,8 @@ public static Color colorRGB(final int r, final int g, final int b) { /** * Creates a Color with the specified red, green, blue, and alpha values. * - * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of - * the range 0 to 255, inclusive + * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of the range 0 to 255, + * inclusive * @param r the red component in the range (0 - 255). * @param g the green component in the range (0 - 255). * @param b the blue component in the range (0 - 255). @@ -199,9 +194,9 @@ public static Color colorRGB(final int r, final int g, final int b, final int a) /** * Creates a Color with the specified red, green, blue, and alpha values. * - * @param rgb the combined rbga components consisting of the alpha component in bits 24-31, the - * red component in bits 16-23, the green component in bits 8-15, and the blue component - * in bits 0-7. Alpha is defaulted to 255. + * @param rgb the combined rbga components consisting of the alpha component in bits 24-31, the red component in + * bits 16-23, the green component in bits 8-15, and the blue component in bits 0-7. Alpha is defaulted to + * 255. * @return Color with the specified RGB value */ public static Color colorRGB(final int rgb) { @@ -211,11 +206,10 @@ public static Color colorRGB(final int rgb) { /** * Creates a Color with the specified red, green, blue, and alpha values. * - * @param rgba the combined rbga components consisting of the alpha component in bits 24-31, the - * red component in bits 16-23, the green component in bits 8-15, and the blue component - * in bits 0-7. If {@code hasAlpha} is false, alpha is set to 255. - * @param hasAlpha if true, {@code rgba} is parsed with an alpha component. Otherwise, alpha - * defaults to 255 + * @param rgba the combined rbga components consisting of the alpha component in bits 24-31, the red component in + * bits 16-23, the green component in bits 8-15, and the blue component in bits 0-7. If {@code hasAlpha} is + * false, alpha is set to 255. + * @param hasAlpha if true, {@code rgba} is parsed with an alpha component. Otherwise, alpha defaults to 255 * @return Color with the specified RGBA value */ public static Color colorRGB(final int rgba, final boolean hasAlpha) { @@ -225,8 +219,8 @@ public static Color colorRGB(final int rgba, final boolean hasAlpha) { /** * Creates a Color with the specified red, green, blue, and alpha values. * - * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of - * the range 0.0 to 1.0, inclusive + * @throws IllegalArgumentException if {@code r}, {@code g} or {@code b} values are outside of the range 0.0 to 1.0, + * inclusive * @param r the red component in the range (0.0 - 1.0). * @param g the green component in the range (0.0 - 1.0). * @param b the blue component in the range (0.0 - 1.0). @@ -239,13 +233,12 @@ public static Color colorRGB(final float r, final float g, final float b) { /** * Creates a Color with the specified red, green, blue, and alpha values. * - * @throws IllegalArgumentException if {@code r}, {@code g}, {@code b}, {@code a} values are - * outside of the range 0.0 to 1.0, inclusive + * @throws IllegalArgumentException if {@code r}, {@code g}, {@code b}, {@code a} values are outside of the range + * 0.0 to 1.0, inclusive * @param r the red component in the range (0.0 - 1.0). * @param g the green component in the range (0.0 - 1.0). * @param b the blue component in the range (0.0 - 1.0). - * @param a the alpha component in the range (0.0-1.0). The lower the alpha, the more - * transparent the color. + * @param a the alpha component in the range (0.0-1.0). The lower the alpha, the more transparent the color. * @return Color with the specified RGBA values */ public static Color colorRGB(final float r, final float g, final float b, final float a) { @@ -253,11 +246,11 @@ public static Color colorRGB(final float r, final float g, final float b, final } /** - * Creates a Color with the specified hue, saturation, lightness, and alpha. The lower the - * alpha, the more transparent the color. + * Creates a Color with the specified hue, saturation, lightness, and alpha. The lower the alpha, the more + * transparent the color. * - * @throws IllegalArgumentException if {@code s} or {@code l} values are outside of the range - * 0.0 to 100.0, inclusive + * @throws IllegalArgumentException if {@code s} or {@code l} values are outside of the range 0.0 to 100.0, + * inclusive * @param h the hue component, as a degree on the color wheel * @param s the saturation component, as a percentage * @param l the lightness component, as a percentage @@ -268,11 +261,11 @@ public static Color colorHSL(final float h, final float s, final float l) { } /** - * Creates a Color with the specified hue, saturation, lightness, and alpha. The lower the - * alpha, the more transparent the color. + * Creates a Color with the specified hue, saturation, lightness, and alpha. The lower the alpha, the more + * transparent the color. * - * @throws IllegalArgumentException if {@code s} or {@code l} values are outside of the range - * 0.0 to 100.0, inclusive or if {@code a} is outside of the range 0.0 to 1.0, inclusive + * @throws IllegalArgumentException if {@code s} or {@code l} values are outside of the range 0.0 to 100.0, + * inclusive or if {@code a} is outside of the range 0.0 to 1.0, inclusive * @param h the hue component, as a degree on the color wheel * @param s the saturation component, as a percentage * @param l the lightness component, as a percentage diff --git a/DB/src/main/java/io/deephaven/gui/color/ColorPaletteAlgorithms.java b/DB/src/main/java/io/deephaven/gui/color/ColorPaletteAlgorithms.java index 5703af0ffb3..b3fb5b925d6 100644 --- a/DB/src/main/java/io/deephaven/gui/color/ColorPaletteAlgorithms.java +++ b/DB/src/main/java/io/deephaven/gui/color/ColorPaletteAlgorithms.java @@ -12,26 +12,22 @@ * Pre-made {@link ColorPaletteAlgorithm}s. */ public enum ColorPaletteAlgorithms implements ColorPaletteAlgorithm { - // GOLDEN algorithm taken from - // http://martin.ankerl.com/2009/12/09/how-to-create-random-colors-programmatically/ + // GOLDEN algorithm taken from http://martin.ankerl.com/2009/12/09/how-to-create-random-colors-programmatically/ /** * Golden ratio algorithm. Rotates hue values by the inverse golden ratio. */ GOLDEN(c -> { final double goldenRatioConjugate = 0.618033988749895; java.awt.Color color = c.javaColor(); - final float[] hsbValues = - java.awt.Color.RGBtoHSB(color.getRed(), color.getGreen(), color.getBlue(), null); + final float[] hsbValues = java.awt.Color.RGBtoHSB(color.getRed(), color.getGreen(), color.getBlue(), null); hsbValues[0] += goldenRatioConjugate; hsbValues[0] %= 1; - color = - new java.awt.Color(java.awt.Color.HSBtoRGB(hsbValues[0], hsbValues[1], hsbValues[2])); + color = new java.awt.Color(java.awt.Color.HSBtoRGB(hsbValues[0], hsbValues[1], hsbValues[2])); return new Color(color.getRed(), color.getGreen(), color.getBlue()); }, new Color(0, 46, 200)), - // TRIAD_MIXING taken from - // http://devmag.org.za/2012/07/29/how-to-choose-colours-procedurally-algorithms/ + // TRIAD_MIXING taken from http://devmag.org.za/2012/07/29/how-to-choose-colours-procedurally-algorithms/ /** * Triad mixing algorithm. Randomly generates and mixes 3 colors and their RGB components. */ @@ -58,12 +54,12 @@ public enum ColorPaletteAlgorithms implements ColorPaletteAlgorithm { } return new Color( - (mixRatio1 * c1.javaColor().getRed() + mixRatio2 * c2.javaColor().getRed() - + mixRatio3 * c3.javaColor().getRed()) % 1, - (mixRatio1 * c1.javaColor().getGreen() + mixRatio2 * c2.javaColor().getGreen() - + mixRatio3 * c3.javaColor().getGreen()) % 1, - (mixRatio1 * c1.javaColor().getBlue() + mixRatio2 * c2.javaColor().getBlue() - + mixRatio3 * c3.javaColor().getBlue()) % 1); + (mixRatio1 * c1.javaColor().getRed() + mixRatio2 * c2.javaColor().getRed() + + mixRatio3 * c3.javaColor().getRed()) % 1, + (mixRatio1 * c1.javaColor().getGreen() + mixRatio2 * c2.javaColor().getGreen() + + mixRatio3 * c3.javaColor().getGreen()) % 1, + (mixRatio1 * c1.javaColor().getBlue() + mixRatio2 * c2.javaColor().getBlue() + + mixRatio3 * c3.javaColor().getBlue()) % 1); }, null); private static final Random randy = new Random(134235434); @@ -97,7 +93,7 @@ private static Random randy() { * @return algorithm corresponding to the given {@code name} */ public static ColorPaletteAlgorithms colorPaletteAlgorithm( - @SuppressWarnings("ConstantConditions") final String name) { + @SuppressWarnings("ConstantConditions") final String name) { if (name == null) { throw new IllegalArgumentException("Color palette algorithm can not be null"); } @@ -105,8 +101,7 @@ public static ColorPaletteAlgorithms colorPaletteAlgorithm( try { return valueOf(name.trim().toUpperCase()); } catch (IllegalArgumentException e) { - throw new UnsupportedOperationException( - "Color palette algorithm " + name + " not found"); + throw new UnsupportedOperationException("Color palette algorithm " + name + " not found"); } } diff --git a/DB/src/main/java/io/deephaven/gui/color/Colors.java b/DB/src/main/java/io/deephaven/gui/color/Colors.java index 8c60116f1e4..ffc653e5ce9 100644 --- a/DB/src/main/java/io/deephaven/gui/color/Colors.java +++ b/DB/src/main/java/io/deephaven/gui/color/Colors.java @@ -11,450 +11,466 @@ * * The colors are the named colors commonly supported by HTML browsers. * - * Methods and colors moved from DBColorUtil to here. They are left in DBColorUtil for backward - * compatibility. TODO (deephaven/deephaven-core/issues/175): Move this to a new module and package + * Methods and colors moved from DBColorUtil to here. They are left in DBColorUtil for backward compatibility. TODO + * (deephaven/deephaven-core/issues/175): Move this to a new module and package */ public enum Colors { // color constants from https://www.w3schools.com/colors/colorsnames.asp - ALICEBLUE("#F0F8FF"), ANTIQUEWHITE("#FAEBD7"), AQUA("#00FFFF"), AQUAMARINE("#7FFFD4"), AZURE( - "#F0FFFF"), BEIGE("#F5F5DC"), BISQUE("#FFE4C4"), BLACK("#000000"), BLANCHEDALMOND( - "#FFEBCD"), BLUE("#0000FF"), BLUEVIOLET("#8A2BE2"), BROWN("#A52A2A"), BURLYWOOD( - "#DEB887"), CADETBLUE("#5F9EA0"), CHARTREUSE("#7FFF00"), CHOCOLATE( - "#D2691E"), CORAL("#FF7F50"), CORNFLOWERBLUE("#6495ED"), CORNSILK( - "#FFF8DC"), CRIMSON("#DC143C"), CYAN("#00FFFF"), DARKBLUE( - "#00008B"), DARKCYAN("#008B8B"), DARKGOLDENROD("#B8860B"), DARKGRAY( - "#A9A9A9"), DARKGREY("#A9A9A9"), DARKGREEN("#006400"), DARKKHAKI( - "#BDB76B"), DARKMAGENTA("#8B008B"), DARKOLIVEGREEN( - "#556B2F"), DARKORANGE("#FF8C00"), DARKORCHID( - "#9932CC"), DARKRED("#8B0000"), DARKSALMON( - "#E9967A"), DARKSEAGREEN("#8FBC8F"), DARKSLATEBLUE( - "#483D8B"), DARKSLATEGRAY( - "#2F4F4F"), DARKSLATEGREY( - "#2F4F4F"), DARKTURQUOISE( - "#00CED1"), DARKVIOLET( - "#9400D3"), DEEPPINK( - "#FF1493"), DEEPSKYBLUE( - "#00BFFF"), DIMGRAY( - "#696969"), DIMGREY( - "#696969"), DODGERBLUE( - "#1E90FF"), FIREBRICK( - "#B22222"), FLORALWHITE( - "#FFFAF0"), FORESTGREEN( - "#228B22"), FUCHSIA( - "#FF00FF"), GAINSBORO( - "#DCDCDC"), GHOSTWHITE( - "#F8F8FF"), GOLD( - "#FFD700"), GOLDENROD( - "#DAA520"), GRAY( - "#808080"), GREY( - "#808080"), GREEN( - "#008000"), GREENYELLOW( - "#ADFF2F"), HONEYDEW( - "#F0FFF0"), HOTPINK( - "#FF69B4"), INDIANRED( - "#CD5C5C"), INDIGO( - "#4B0082"), IVORY( - "#FFFFF0"), KHAKI( - "#F0E68C"), LAVENDER( - "#E6E6FA"), LAVENDERBLUSH( - "#FFF0F5"), LAWNGREEN( - "#7CFC00"), LEMONCHIFFON( - "#FFFACD"), LIGHTBLUE( - "#ADD8E6"), LIGHTCORAL( - "#F08080"), LIGHTCYAN( - "#E0FFFF"), LIGHTGOLDENRODYELLOW( - "#FAFAD2"), LIGHTGRAY( - "#D3D3D3"), LIGHTGREY( - "#D3D3D3"), LIGHTGREEN( - "#90EE90"), LIGHTPINK( - "#FFB6C1"), LIGHTSALMON( - "#FFA07A"), LIGHTSEAGREEN( - "#20B2AA"), LIGHTSKYBLUE( - "#87CEFA"), LIGHTSLATEGRAY( - "#778899"), LIGHTSLATEGREY( - "#778899"), LIGHTSTEELBLUE( - "#B0C4DE"), LIGHTYELLOW( - "#FFFFE0"), LIME( - "#00FF00"), LIMEGREEN( - "#32CD32"), LINEN( - "#FAF0E6"), MAGENTA( - "#FF00FF"), MAROON( - "#800000"), MEDIUMAQUAMARINE( - "#66CDAA"), MEDIUMBLUE( - "#0000CD"), MEDIUMORCHID( - "#BA55D3"), MEDIUMPURPLE( - "#9370DB"), MEDIUMSEAGREEN( - "#3CB371"), MEDIUMSLATEBLUE( - "#7B68EE"), MEDIUMSPRINGGREEN( - "#00FA9A"), MEDIUMTURQUOISE( - "#48D1CC"), MEDIUMVIOLETRED( - "#C71585"), MIDNIGHTBLUE( - "#191970"), MINTCREAM( - "#F5FFFA"), MISTYROSE( - "#FFE4E1"), MOCCASIN( - "#FFE4B5"), NAVAJOWHITE( - "#FFDEAD"), NAVY( - "#000080"), OLDLACE( - "#FDF5E6"), OLIVE( - "#808000"), OLIVEDRAB( - "#6B8E23"), ORANGE( - "#FFA500"), ORANGERED( - "#FF4500"), ORCHID( - "#DA70D6"), PALEGOLDENROD( - "#EEE8AA"), PALEGREEN( - "#98FB98"), PALETURQUOISE( - "#AFEEEE"), PALEVIOLETRED( - "#DB7093"), PAPAYAWHIP( - "#FFEFD5"), PEACHPUFF( - "#FFDAB9"), PERU( - "#CD853F"), PINK( - "#FFC0CB"), PLUM( - "#DDA0DD"), POWDERBLUE( - "#B0E0E6"), PURPLE( - "#800080"), REBECCAPURPLE( - "#663399"), RED( - "#FF0000"), ROSYBROWN( - "#BC8F8F"), ROYALBLUE( - "#4169E1"), SADDLEBROWN( - "#8B4513"), SALMON( - "#FA8072"), SANDYBROWN( - "#F4A460"), SEAGREEN( - "#2E8B57"), SEASHELL( - "#FFF5EE"), SIENNA( - "#A0522D"), SILVER( - "#C0C0C0"), SKYBLUE( - "#87CEEB"), SLATEBLUE( - "#6A5ACD"), SLATEGRAY( - "#708090"), SLATEGREY( - "#708090"), SNOW( - "#FFFAFA"), SPRINGGREEN( - "#00FF7F"), STEELBLUE( - "#4682B4"), TAN( - "#D2B48C"), TEAL( - "#008080"), THISTLE( - "#D8BFD8"), TOMATO( - "#FF6347"), TURQUOISE( - "#40E0D0"), VIOLET( - "#EE82EE"), WHEAT( - "#F5DEB3"), WHITE( - "#FFFFFF"), WHITESMOKE( - "#F5F5F5"), YELLOW( - "#FFFF00"), YELLOWGREEN( - "#9ACD32"), + ALICEBLUE("#F0F8FF"), ANTIQUEWHITE("#FAEBD7"), AQUA("#00FFFF"), AQUAMARINE("#7FFFD4"), AZURE("#F0FFFF"), BEIGE( + "#F5F5DC"), BISQUE("#FFE4C4"), BLACK("#000000"), BLANCHEDALMOND("#FFEBCD"), BLUE( + "#0000FF"), BLUEVIOLET("#8A2BE2"), BROWN("#A52A2A"), BURLYWOOD("#DEB887"), CADETBLUE( + "#5F9EA0"), CHARTREUSE("#7FFF00"), CHOCOLATE("#D2691E"), CORAL("#FF7F50"), CORNFLOWERBLUE( + "#6495ED"), CORNSILK("#FFF8DC"), CRIMSON("#DC143C"), CYAN("#00FFFF"), DARKBLUE( + "#00008B"), DARKCYAN("#008B8B"), DARKGOLDENROD("#B8860B"), DARKGRAY( + "#A9A9A9"), DARKGREY("#A9A9A9"), DARKGREEN("#006400"), DARKKHAKI( + "#BDB76B"), DARKMAGENTA("#8B008B"), DARKOLIVEGREEN( + "#556B2F"), DARKORANGE("#FF8C00"), DARKORCHID( + "#9932CC"), DARKRED("#8B0000"), DARKSALMON( + "#E9967A"), DARKSEAGREEN( + "#8FBC8F"), DARKSLATEBLUE( + "#483D8B"), DARKSLATEGRAY( + "#2F4F4F"), DARKSLATEGREY( + "#2F4F4F"), DARKTURQUOISE( + "#00CED1"), DARKVIOLET( + "#9400D3"), DEEPPINK( + "#FF1493"), DEEPSKYBLUE( + "#00BFFF"), DIMGRAY( + "#696969"), DIMGREY( + "#696969"), DODGERBLUE( + "#1E90FF"), FIREBRICK( + "#B22222"), FLORALWHITE( + "#FFFAF0"), FORESTGREEN( + "#228B22"), FUCHSIA( + "#FF00FF"), GAINSBORO( + "#DCDCDC"), GHOSTWHITE( + "#F8F8FF"), GOLD( + "#FFD700"), GOLDENROD( + "#DAA520"), GRAY( + "#808080"), GREY( + "#808080"), GREEN( + "#008000"), GREENYELLOW( + "#ADFF2F"), HONEYDEW( + "#F0FFF0"), HOTPINK( + "#FF69B4"), INDIANRED( + "#CD5C5C"), INDIGO( + "#4B0082"), IVORY( + "#FFFFF0"), KHAKI( + "#F0E68C"), LAVENDER( + "#E6E6FA"), LAVENDERBLUSH( + "#FFF0F5"), LAWNGREEN( + "#7CFC00"), LEMONCHIFFON( + "#FFFACD"), LIGHTBLUE( + "#ADD8E6"), LIGHTCORAL( + "#F08080"), LIGHTCYAN( + "#E0FFFF"), LIGHTGOLDENRODYELLOW( + "#FAFAD2"), LIGHTGRAY( + "#D3D3D3"), LIGHTGREY( + "#D3D3D3"), LIGHTGREEN( + "#90EE90"), LIGHTPINK( + "#FFB6C1"), LIGHTSALMON( + "#FFA07A"), LIGHTSEAGREEN( + "#20B2AA"), LIGHTSKYBLUE( + "#87CEFA"), LIGHTSLATEGRAY( + "#778899"), LIGHTSLATEGREY( + "#778899"), LIGHTSTEELBLUE( + "#B0C4DE"), LIGHTYELLOW( + "#FFFFE0"), LIME( + "#00FF00"), LIMEGREEN( + "#32CD32"), LINEN( + "#FAF0E6"), MAGENTA( + "#FF00FF"), MAROON( + "#800000"), MEDIUMAQUAMARINE( + "#66CDAA"), MEDIUMBLUE( + "#0000CD"), MEDIUMORCHID( + "#BA55D3"), MEDIUMPURPLE( + "#9370DB"), MEDIUMSEAGREEN( + "#3CB371"), MEDIUMSLATEBLUE( + "#7B68EE"), MEDIUMSPRINGGREEN( + "#00FA9A"), MEDIUMTURQUOISE( + "#48D1CC"), MEDIUMVIOLETRED( + "#C71585"), MIDNIGHTBLUE( + "#191970"), MINTCREAM( + "#F5FFFA"), MISTYROSE( + "#FFE4E1"), MOCCASIN( + "#FFE4B5"), NAVAJOWHITE( + "#FFDEAD"), NAVY( + "#000080"), OLDLACE( + "#FDF5E6"), OLIVE( + "#808000"), OLIVEDRAB( + "#6B8E23"), ORANGE( + "#FFA500"), ORANGERED( + "#FF4500"), ORCHID( + "#DA70D6"), PALEGOLDENROD( + "#EEE8AA"), PALEGREEN( + "#98FB98"), PALETURQUOISE( + "#AFEEEE"), PALEVIOLETRED( + "#DB7093"), PAPAYAWHIP( + "#FFEFD5"), PEACHPUFF( + "#FFDAB9"), PERU( + "#CD853F"), PINK( + "#FFC0CB"), PLUM( + "#DDA0DD"), POWDERBLUE( + "#B0E0E6"), PURPLE( + "#800080"), REBECCAPURPLE( + "#663399"), RED( + "#FF0000"), ROSYBROWN( + "#BC8F8F"), ROYALBLUE( + "#4169E1"), SADDLEBROWN( + "#8B4513"), SALMON( + "#FA8072"), SANDYBROWN( + "#F4A460"), SEAGREEN( + "#2E8B57"), SEASHELL( + "#FFF5EE"), SIENNA( + "#A0522D"), SILVER( + "#C0C0C0"), SKYBLUE( + "#87CEEB"), SLATEBLUE( + "#6A5ACD"), SLATEGRAY( + "#708090"), SLATEGREY( + "#708090"), SNOW( + "#FFFAFA"), SPRINGGREEN( + "#00FF7F"), STEELBLUE( + "#4682B4"), TAN( + "#D2B48C"), TEAL( + "#008080"), THISTLE( + "#D8BFD8"), TOMATO( + "#FF6347"), TURQUOISE( + "#40E0D0"), VIOLET( + "#EE82EE"), WHEAT( + "#F5DEB3"), WHITE( + "#FFFFFF"), WHITESMOKE( + "#F5F5F5"), YELLOW( + "#FFFF00"), YELLOWGREEN( + "#9ACD32"), // Legacy non-html-standard colors. Duplicates removed. Conflicts resolved with a DB_ prefix. - VIVID_RED(231, 47, 39), VIVID_YELLOWRED(238, 113, 25), VIVID_YELLOW(255, 200, - 8), VIVID_GREENYELLOW(170, 198, 27), VIVID_GREEN(19, 166, 50), VIVID_BLUEGREEN(4, 148, - 87), VIVID_BLUE(1, 134, 141), VIVID_PURPLEBLUE(3, 86, 155), VIVID_PURPLE(46, 20, - 141), VIVID_REDPURPLE(204, 63, 92), STRONG_RED(207, 46, 49), STRONG_YELLOWRED(226, - 132, 45), STRONG_YELLOW(227, 189, 28), STRONG_GREENYELLOW(162, 179, - 36), STRONG_GREEN(18, 154, 47), STRONG_BLUEGREEN(6, 134, 84), STRONG_BLUE(3, - 130, 122), STRONG_PURPLEBLUE(6, 113, 148), STRONG_PURPLE(92, 104, - 163), STRONG_REDPURPLE(175, 92, 87), BRIGHT_RED(231, 108, - 86), BRIGHT_YELLOWRED(241, 176, 102), BRIGHT_YELLOW(255, 228, - 15), BRIGHT_GREENYELLOW(169, 199, 35), BRIGHT_GREEN(88, 171, - 45), BRIGHT_BLUEGREEN(43, 151, 89), BRIGHT_BLUE(0, 147, - 159), BRIGHT_PURPLEBLUE(59, 130, - 157), BRIGHT_PURPLE(178, 137, - 166), BRIGHT_REDPURPLE(209, 100, - 109), PALE_RED(233, 163, - 144), PALE_YELLOWRED(242, 178, - 103), PALE_YELLOW(255, 236, - 79), PALE_GREENYELLOW(219, - 220, 93), PALE_GREEN( - 155, 196, - 113), PALE_BLUEGREEN( - 146, 198, - 131), PALE_BLUE( - 126, 188, - 209), PALE_PURPLEBLUE( - 147, - 184, - 213), PALE_PURPLE( - 197, - 188, - 213), PALE_REDPURPLE( - 218, - 176, - 176), VERYPALE_RED( - 236, - 217, - 202), VERYPALE_YELLOWRED( - 245, - 223, - 181), VERYPALE_YELLOW( - 249, - 239, - 189), VERYPALE_GREENYELLOW( - 228, - 235, - 191), VERYPALE_GREEN( - 221, - 232, - 207), VERYPALE_BLUEGREEN( - 209, - 234, - 211), VERYPALE_BLUE( - 194, - 222, - 242), VERYPALE_PURPLEBLUE( - 203, - 215, - 232), VERYPALE_PURPLE( - 224, - 218, - 230), VERYPALE_REDPURPLE( - 235, - 219, - 224), LIGHTGRAYISH_RED( - 213, - 182, - 166), LIGHTGRAYISH_YELLOWRED( - 218, - 196, - 148), LIGHTGRAYISH_YELLOW( - 233, - 227, - 143), LIGHTGRAYISH_GREENYELLOW( - 209, - 213, - 165), LIGHTGRAYISH_GREEN( - 179, - 202, - 157), LIGHTGRAYISH_BLUEGREEN( - 166, - 201, - 163), LIGHTGRAYISH_BLUE( - 127, - 175, - 166), LIGHTGRAYISH_PURPLEBLUE( - 165, - 184, - 199), LIGHTGRAYISH_PURPLE( - 184, - 190, - 189), LIGHTGRAYISH_REDPURPLE( - 206, - 185, - 179), LIGHT_RED( - 211, - 142, - 110), LIGHT_YELLOWRED( - 215, - 145, - 96), LIGHT_YELLOW( - 255, - 203, - 88), LIGHT_GREENYELLOW( - 195, - 202, - 101), LIGHT_GREEN( - 141, - 188, - 90), LIGHT_BLUEGREEN( - 140, - 195, - 110), LIGHT_BLUE( - 117, - 173, - 169), LIGHT_PURPLEBLUE( - 138, - 166, - 187), LIGHT_PURPLE( - 170, - 165, - 199), LIGHT_REDPURPLE( - 205, - 154, - 149), GRAYISH_RED( - 171, - 131, - 115), GRAYISH_YELLOWRED( - 158, - 128, - 110), GRAYISH_YELLOW( - 148, - 133, - 105), GRAYISH_GREENYELLOW( - 144, - 135, - 96), GRAYISH_GREEN( - 143, - 162, - 121), GRAYISH_BLUEGREEN( - 122, - 165, - 123), GRAYISH_BLUE( - 130, - 154, - 145), GRAYISH_PURPLEBLUE( - 133, - 154, - 153), GRAYISH_PURPLE( - 151, - 150, - 139), GRAYISH_REDPURPLE( - 160, - 147, - 131), DULL_RED( - 162, - 88, - 61), DULL_YELLOWRED( - 167, - 100, - 67), DULL_YELLOW( - 139, - 117, - 65), DULL_GREENYELLOW( - 109, - 116, - 73), DULL_GREEN( - 88, - 126, - 61), DULL_BLUEGREEN( - 39, - 122, - 62), DULL_BLUE( - 24, - 89, - 63), DULL_PURPLEBLUE( - 53, - 109, - 98), DULL_PURPLE( - 44, - 77, - 143), DULL_REDPURPLE( - 115, - 71, - 79), DEEP_RED( - 172, - 36, - 48), DEEP_YELLOWRED( - 169, - 87, - 49), DEEP_YELLOW( - 156, - 137, - 37), DEEP_GREENYELLOW( - 91, - 132, - 47), DEEP_GREEN( - 20, - 114, - 48), DEEP_BLUEGREEN( - 23, - 106, - 43), DEEP_BLUE( - 20, - 88, - 60), DEEP_PURPLEBLUE( - 8, - 87, - 107), DEEP_PURPLE( - 58, - 55, - 119), DEEP_REDPURPLE( - 111, - 61, - 56), DARK_RED( - 116, - 47, - 50), DARK_YELLOWRED( - 115, - 63, - 44), DARK_YELLOW( - 103, - 91, - 44), DARK_GREENYELLOW( - 54, - 88, - 48), DARK_GREEN( - 30, - 98, - 50), DARK_BLUEGREEN( - 27, - 86, - 49), DARK_BLUE( - 18, - 83, - 65), DARK_PURPLEBLUE( - 16, - 76, - 84), DARK_PURPLE( - 40, - 57, - 103), DARK_REDPURPLE( - 88, - 60, - 50), DARKGRAYISH_RED( - 79, - 46, - 43), DARKGRAYISH_YELLOWRED( - 85, - 55, - 43), DARKGRAYISH_YELLOW( - 75, - 63, - 45), DARKGRAYISH_GREENYELLOW( - 44, - 60, - 49), DARKGRAYISH_GREEN( - 34, - 62, - 51), DARKGRAYISH_BLUEGREEN( - 31, - 56, - 45), DARKGRAYISH_BLUE( - 29, - 60, - 47), DARKGRAYISH_PURPLEBLUE( - 25, - 62, - 63), DARKGRAYISH_PURPLE( - 34, - 54, - 68), DARKGRAYISH_REDPURPLE( - 53, - 52, - 48), GRAY1( - 28, - 28, - 28), GRAY2( - 56, - 56, - 56), GRAY3( - 84, - 84, - 84), GRAY4( - 112, - 112, - 112), GRAY5( - 140, - 140, - 140), GRAY6( - 168, - 168, - 168), GRAY7( - 196, - 196, - 196), GRAY8( - 224, - 224, - 224), DB_PINK( - 255, - 175, - 175), DB_ORANGE( - 255, - 200, - 0), DB_GREEN( - 0, - 255, - 0), NO_FORMATTING( - 0, - 0, - 0); + VIVID_RED(231, 47, 39), VIVID_YELLOWRED(238, 113, 25), VIVID_YELLOW(255, 200, 8), VIVID_GREENYELLOW(170, 198, + 27), VIVID_GREEN(19, 166, 50), VIVID_BLUEGREEN(4, 148, 87), VIVID_BLUE(1, 134, 141), VIVID_PURPLEBLUE(3, 86, + 155), VIVID_PURPLE(46, 20, 141), VIVID_REDPURPLE(204, 63, 92), STRONG_RED(207, 46, + 49), STRONG_YELLOWRED(226, 132, 45), STRONG_YELLOW(227, 189, 28), STRONG_GREENYELLOW(162, + 179, 36), STRONG_GREEN(18, 154, 47), STRONG_BLUEGREEN(6, 134, 84), STRONG_BLUE(3, + 130, 122), STRONG_PURPLEBLUE(6, 113, 148), STRONG_PURPLE(92, 104, + 163), STRONG_REDPURPLE(175, 92, 87), BRIGHT_RED(231, 108, + 86), BRIGHT_YELLOWRED(241, 176, 102), BRIGHT_YELLOW(255, + 228, + 15), BRIGHT_GREENYELLOW(169, 199, 35), BRIGHT_GREEN( + 88, 171, 45), BRIGHT_BLUEGREEN(43, 151, + 89), BRIGHT_BLUE(0, 147, + 159), BRIGHT_PURPLEBLUE(59, + 130, + 157), BRIGHT_PURPLE( + 178, 137, + 166), BRIGHT_REDPURPLE( + 209, + 100, + 109), PALE_RED( + 233, + 163, + 144), PALE_YELLOWRED( + 242, + 178, + 103), PALE_YELLOW( + 255, + 236, + 79), PALE_GREENYELLOW( + 219, + 220, + 93), PALE_GREEN( + 155, + 196, + 113), PALE_BLUEGREEN( + 146, + 198, + 131), PALE_BLUE( + 126, + 188, + 209), PALE_PURPLEBLUE( + 147, + 184, + 213), PALE_PURPLE( + 197, + 188, + 213), PALE_REDPURPLE( + 218, + 176, + 176), VERYPALE_RED( + 236, + 217, + 202), VERYPALE_YELLOWRED( + 245, + 223, + 181), VERYPALE_YELLOW( + 249, + 239, + 189), VERYPALE_GREENYELLOW( + 228, + 235, + 191), VERYPALE_GREEN( + 221, + 232, + 207), VERYPALE_BLUEGREEN( + 209, + 234, + 211), VERYPALE_BLUE( + 194, + 222, + 242), VERYPALE_PURPLEBLUE( + 203, + 215, + 232), VERYPALE_PURPLE( + 224, + 218, + 230), VERYPALE_REDPURPLE( + 235, + 219, + 224), LIGHTGRAYISH_RED( + 213, + 182, + 166), LIGHTGRAYISH_YELLOWRED( + 218, + 196, + 148), LIGHTGRAYISH_YELLOW( + 233, + 227, + 143), LIGHTGRAYISH_GREENYELLOW( + 209, + 213, + 165), LIGHTGRAYISH_GREEN( + 179, + 202, + 157), LIGHTGRAYISH_BLUEGREEN( + 166, + 201, + 163), LIGHTGRAYISH_BLUE( + 127, + 175, + 166), LIGHTGRAYISH_PURPLEBLUE( + 165, + 184, + 199), LIGHTGRAYISH_PURPLE( + 184, + 190, + 189), LIGHTGRAYISH_REDPURPLE( + 206, + 185, + 179), LIGHT_RED( + 211, + 142, + 110), LIGHT_YELLOWRED( + 215, + 145, + 96), LIGHT_YELLOW( + 255, + 203, + 88), LIGHT_GREENYELLOW( + 195, + 202, + 101), LIGHT_GREEN( + 141, + 188, + 90), LIGHT_BLUEGREEN( + 140, + 195, + 110), LIGHT_BLUE( + 117, + 173, + 169), LIGHT_PURPLEBLUE( + 138, + 166, + 187), LIGHT_PURPLE( + 170, + 165, + 199), LIGHT_REDPURPLE( + 205, + 154, + 149), GRAYISH_RED( + 171, + 131, + 115), GRAYISH_YELLOWRED( + 158, + 128, + 110), GRAYISH_YELLOW( + 148, + 133, + 105), GRAYISH_GREENYELLOW( + 144, + 135, + 96), GRAYISH_GREEN( + 143, + 162, + 121), GRAYISH_BLUEGREEN( + 122, + 165, + 123), GRAYISH_BLUE( + 130, + 154, + 145), GRAYISH_PURPLEBLUE( + 133, + 154, + 153), GRAYISH_PURPLE( + 151, + 150, + 139), GRAYISH_REDPURPLE( + 160, + 147, + 131), DULL_RED( + 162, + 88, + 61), DULL_YELLOWRED( + 167, + 100, + 67), DULL_YELLOW( + 139, + 117, + 65), DULL_GREENYELLOW( + 109, + 116, + 73), DULL_GREEN( + 88, + 126, + 61), DULL_BLUEGREEN( + 39, + 122, + 62), DULL_BLUE( + 24, + 89, + 63), DULL_PURPLEBLUE( + 53, + 109, + 98), DULL_PURPLE( + 44, + 77, + 143), DULL_REDPURPLE( + 115, + 71, + 79), DEEP_RED( + 172, + 36, + 48), DEEP_YELLOWRED( + 169, + 87, + 49), DEEP_YELLOW( + 156, + 137, + 37), DEEP_GREENYELLOW( + 91, + 132, + 47), DEEP_GREEN( + 20, + 114, + 48), DEEP_BLUEGREEN( + 23, + 106, + 43), DEEP_BLUE( + 20, + 88, + 60), DEEP_PURPLEBLUE( + 8, + 87, + 107), DEEP_PURPLE( + 58, + 55, + 119), DEEP_REDPURPLE( + 111, + 61, + 56), DARK_RED( + 116, + 47, + 50), DARK_YELLOWRED( + 115, + 63, + 44), DARK_YELLOW( + 103, + 91, + 44), DARK_GREENYELLOW( + 54, + 88, + 48), DARK_GREEN( + 30, + 98, + 50), DARK_BLUEGREEN( + 27, + 86, + 49), DARK_BLUE( + 18, + 83, + 65), DARK_PURPLEBLUE( + 16, + 76, + 84), DARK_PURPLE( + 40, + 57, + 103), DARK_REDPURPLE( + 88, + 60, + 50), DARKGRAYISH_RED( + 79, + 46, + 43), DARKGRAYISH_YELLOWRED( + 85, + 55, + 43), DARKGRAYISH_YELLOW( + 75, + 63, + 45), DARKGRAYISH_GREENYELLOW( + 44, + 60, + 49), DARKGRAYISH_GREEN( + 34, + 62, + 51), DARKGRAYISH_BLUEGREEN( + 31, + 56, + 45), DARKGRAYISH_BLUE( + 29, + 60, + 47), DARKGRAYISH_PURPLEBLUE( + 25, + 62, + 63), DARKGRAYISH_PURPLE( + 34, + 54, + 68), DARKGRAYISH_REDPURPLE( + 53, + 52, + 48), GRAY1( + 28, + 28, + 28), GRAY2( + 56, + 56, + 56), GRAY3( + 84, + 84, + 84), GRAY4( + 112, + 112, + 112), GRAY5( + 140, + 140, + 140), GRAY6( + 168, + 168, + 168), GRAY7( + 196, + 196, + 196), GRAY8( + 224, + 224, + 224), DB_PINK( + 255, + 175, + 175), DB_ORANGE( + 255, + 200, + 0), DB_GREEN( + 0, + 255, + 0), NO_FORMATTING( + 0, + 0, + 0); private final Color color; private final long columnFormat; @@ -518,8 +534,8 @@ public static long toLong(final long color) { */ public static long toLong(final Color color) { return color == null ? 0 - : backgroundForegroundAuto(color.javaColor().getRed(), color.javaColor().getGreen(), - color.javaColor().getBlue()); + : backgroundForegroundAuto(color.javaColor().getRed(), color.javaColor().getGreen(), + color.javaColor().getBlue()); } /** @@ -534,8 +550,7 @@ public static long toLong(final String color) { } /** - * Creates a table format encoding with background color equal to the input RGB and unformatted - * foreground. + * Creates a table format encoding with background color equal to the input RGB and unformatted foreground. * * @param r red component * @param g green component @@ -544,9 +559,9 @@ public static long toLong(final String color) { */ public static long background(long r, long g, long b) { return (0x01L << 56) | - (r << 48) | - (g << 40) | - (b << 32); + (r << 48) | + (g << 40) | + (b << 32); } /** @@ -557,8 +572,7 @@ public static long bg(long r, long g, long b) { } /** - * Creates a table format encoding with foreground color equal to the input RGB and unformatted - * background. + * Creates a table format encoding with foreground color equal to the input RGB and unformatted background. * * @param r red component * @param g green component @@ -567,9 +581,9 @@ public static long bg(long r, long g, long b) { */ public static long foreground(long r, long g, long b) { return (0x01L << 24) | - (r << 16) | - (g << 8) | - (b); + (r << 16) | + (g << 8) | + (b); } /** @@ -590,8 +604,7 @@ public static long fg(long r, long g, long b) { * @param fgb blue component of the foreground color * @return table format encoding with specified foreground and background colors */ - public static long backgroundForeground(long bgr, long bgg, long bgb, long fgr, long fgg, - long fgb) { + public static long backgroundForeground(long bgr, long bgg, long bgb, long fgr, long fgg, long fgb) { return bg(bgr, bgg, bgb) | fg(fgr, fgg, fgb); } @@ -603,8 +616,8 @@ public static long bgfg(long bgr, long bgg, long bgb, long fgr, long fgg, long f } /** - * Creates a table format encoding with specified background color and automatically chosen - * contrasting foreground color. + * Creates a table format encoding with specified background color and automatically chosen contrasting foreground + * color. * * @param bgr red component of the background color * @param bgg green component of the background color @@ -625,16 +638,15 @@ public static long bgfga(long bgr, long bgg, long bgb) { } /** - * Creates a table format encoding for the heat map at {@code value}. A contrasting foreground - * color is automatically chosen. + * Creates a table format encoding for the heat map at {@code value}. A contrasting foreground color is + * automatically chosen. * * @param value determines the color used by its location in the heat map's range * @param min minimum value of the heat map range * @param max maximum value of the heat map range * @param bg1 background color at or below the minimum value of the heat map * @param bg2 background color at or above the maximum value of the heat map - * @return table format encoding with background color and auto-generated foreground color - * determined by a heat map + * @return table format encoding with background color and auto-generated foreground color determined by a heat map */ public static long heatmap(double value, double min, double max, long bg1, long bg2) { if (value <= min) { @@ -653,7 +665,7 @@ public static long heatmap(double value, double min, double max, long bg1, long long b2 = (bg2 >> 32) & 0xFF; return bgfga((long) (r1 + pert * (r2 - r1)), (long) (g1 + pert * (g2 - g1)), - (long) (b1 + pert * (b2 - b1))); + (long) (b1 + pert * (b2 - b1))); } } @@ -683,8 +695,7 @@ public static long heatmapForeground(double value, double min, double max, long long g2 = (fg2 >> 8) & 0xFF; long b2 = (fg2) & 0xFF; - return fg((long) (r1 + pert * (r2 - r1)), (long) (g1 + pert * (g2 - g1)), - (long) (b1 + pert * (b2 - b1))); + return fg((long) (r1 + pert * (r2 - r1)), (long) (g1 + pert * (g2 - g1)), (long) (b1 + pert * (b2 - b1))); } } @@ -698,8 +709,7 @@ public static long heatmapFg(double value, double min, double max, long fg1, lon // ------------------------------------------------------------------------------------------------------------------------------------------------------------------ /** - * Creates a table format encoding with specified background color and an unformatted - * foreground. + * Creates a table format encoding with specified background color and an unformatted foreground. * * @param color color encoding * @return table format encoding with specified background color and unformatted foreground diff --git a/DB/src/main/java/io/deephaven/gui/table/QuickFilterMode.java b/DB/src/main/java/io/deephaven/gui/table/QuickFilterMode.java index 7aec0271191..f6146e34fc1 100644 --- a/DB/src/main/java/io/deephaven/gui/table/QuickFilterMode.java +++ b/DB/src/main/java/io/deephaven/gui/table/QuickFilterMode.java @@ -1,8 +1,8 @@ package io.deephaven.gui.table; public enum QuickFilterMode { - NORMAL("Normal"), NUMERIC("Numeric Only"), MULTI("Multi-column"), AND("AND (String Only)"), OR( - "OR"), WILDCARD("Wildcard"), REGEX("Regex"); + NORMAL("Normal"), NUMERIC("Numeric Only"), MULTI("Multi-column"), AND("AND (String Only)"), OR("OR"), WILDCARD( + "Wildcard"), REGEX("Regex"); private final String displayName; diff --git a/DB/src/main/java/io/deephaven/gui/table/filters/Condition.java b/DB/src/main/java/io/deephaven/gui/table/filters/Condition.java index dd5cd905f37..5dc2e9ba34d 100644 --- a/DB/src/main/java/io/deephaven/gui/table/filters/Condition.java +++ b/DB/src/main/java/io/deephaven/gui/table/filters/Condition.java @@ -5,30 +5,28 @@ package io.deephaven.gui.table.filters; /** - * The set of Conditions that can be used with various {@link FilterData} types. Note that not all - * {@link FilterData} classes support all types. + * The set of Conditions that can be used with various {@link FilterData} types. Note that not all {@link FilterData} + * classes support all types. */ public enum Condition { EQUALS("equals", true), NOT_EQUALS("not equals", false), // Strings - INCLUDES("includes", true), NOT_INCLUDES("not includes", false), EQUALS_MATCH_CASE( - "equals (casesen)", - true), NOT_EQUALS_MATCH_CASE("not equals (casesen)", false), INCLUDES_MATCH_CASE( - "includes (casesen)", true), NOT_INCLUDES_MATCH_CASE("not includes (casesen)", false), + INCLUDES("includes", true), NOT_INCLUDES("not includes", false), EQUALS_MATCH_CASE("equals (casesen)", + true), NOT_EQUALS_MATCH_CASE("not equals (casesen)", false), INCLUDES_MATCH_CASE("includes (casesen)", + true), NOT_INCLUDES_MATCH_CASE("not includes (casesen)", false), // Numbers and Dates - LESS_THAN("less than", false), GREATER_THAN("greater than", false), LESS_THAN_OR_EQUAL( - "less than or equal to", false), GREATER_THAN_OR_EQUAL("greater than or equal to", false), + LESS_THAN("less than", false), GREATER_THAN("greater than", false), LESS_THAN_OR_EQUAL("less than or equal to", + false), GREATER_THAN_OR_EQUAL("greater than or equal to", false), // Numbers - EQUALS_ABS("equals (abs)", true), NOT_EQUALS_ABS("not equals (abs)", false), LESS_THAN_ABS( - "less than (abs)", false), GREATER_THAN_ABS("greater than (abs)", false), + EQUALS_ABS("equals (abs)", true), NOT_EQUALS_ABS("not equals (abs)", false), LESS_THAN_ABS("less than (abs)", + false), GREATER_THAN_ABS("greater than (abs)", false), // Lists - INCLUDED_IN("included in list", true), INCLUDED_IN_MATCH_CASE("included in list (casesen)", - true), NOT_INCLUDED_IN("not included in list", - false), NOT_INCLUDED_IN_MATCH_CASE("not included in list (casesen)", false); + INCLUDED_IN("included in list", true), INCLUDED_IN_MATCH_CASE("included in list (casesen)", true), NOT_INCLUDED_IN( + "not included in list", false), NOT_INCLUDED_IN_MATCH_CASE("not included in list (casesen)", false); public final String description; public final boolean defaultOr; diff --git a/DB/src/main/java/io/deephaven/libs/primitives/Comparators.java b/DB/src/main/java/io/deephaven/libs/primitives/Comparators.java index 86c4c5ec0b8..87b0e3eb3bd 100644 --- a/DB/src/main/java/io/deephaven/libs/primitives/Comparators.java +++ b/DB/src/main/java/io/deephaven/libs/primitives/Comparators.java @@ -20,8 +20,7 @@ class Comparators { public static class AscendingOrderComparator implements Comparator { private static AscendingOrderComparator comparator = new AscendingOrderComparator<>(); - private static Comparator reverseComparator = - new AscendingOrderComparator<>().reversed(); + private static Comparator reverseComparator = new AscendingOrderComparator<>().reversed(); private AscendingOrderComparator() {} @@ -41,10 +40,8 @@ public int compare(final T o1, final T o2) { public static class AscendingOrderComparatorNumber implements Comparator { - private static AscendingOrderComparatorNumber comparatorNumber = - new AscendingOrderComparatorNumber<>(); - private static Comparator reverseComparatorNumber = - new AscendingOrderComparatorNumber<>().reversed(); + private static AscendingOrderComparatorNumber comparatorNumber = new AscendingOrderComparatorNumber<>(); + private static Comparator reverseComparatorNumber = new AscendingOrderComparatorNumber<>().reversed(); private AscendingOrderComparatorNumber() {} @@ -71,19 +68,16 @@ static int compare(final T o1, final T o2) { return -1; } else if (dhNull2) { return 1; - } else if (o1.getClass() == o2.getClass() - && Comparable.class.isAssignableFrom(o1.getClass())) { + } else if (o1.getClass() == o2.getClass() && Comparable.class.isAssignableFrom(o1.getClass())) { return ((Comparable) o1).compareTo(o2); } else if (o1.getClass() == Double.class && o2.getClass() == Long.class) { return compareDoubleAndLong((Double) o1, (Long) o2); } else if (o1.getClass() == Long.class && o2.getClass() == Double.class) { return -compareDoubleAndLong((Double) o2, (Long) o1); - } else if (Number.class.isAssignableFrom(o1.getClass()) - && Number.class.isAssignableFrom(o2.getClass())) { + } else if (Number.class.isAssignableFrom(o1.getClass()) && Number.class.isAssignableFrom(o2.getClass())) { return Double.compare(((Number) o1).doubleValue(), ((Number) o2).doubleValue()); } else { - throw new IllegalArgumentException( - "Can not compare classes : " + o1.getClass() + " and " + o2.getClass()); + throw new IllegalArgumentException("Can not compare classes : " + o1.getClass() + " and " + o2.getClass()); } } @@ -95,8 +89,7 @@ private static int compareDoubleAndLong(final Double o1, final Long o2) { return 0; } else if (o1Prim == NULL_DOUBLE) { return -1; - } else if (o2Prim == NULL_LONG || Double.isNaN(o1Prim)) {// As Double.NaN is considered the - // greatest + } else if (o2Prim == NULL_LONG || Double.isNaN(o1Prim)) {// As Double.NaN is considered the greatest return 1; } else { if (o1PrimLong < o2Prim) { diff --git a/DB/src/main/java/io/deephaven/libs/primitives/ComparePrimitives.java b/DB/src/main/java/io/deephaven/libs/primitives/ComparePrimitives.java index d3c05e8766e..5df3437272b 100644 --- a/DB/src/main/java/io/deephaven/libs/primitives/ComparePrimitives.java +++ b/DB/src/main/java/io/deephaven/libs/primitives/ComparePrimitives.java @@ -170,8 +170,7 @@ public static short max(final short v1, final byte v2) { * @return maximum of the valid input values. If both inputs are invalid, null is returned. */ public static short max(final short v1, final short v2) { - return ShortPrimitives.isNull(v1) ? v2 - : ShortPrimitives.isNull(v2) ? v1 : v1 < v2 ? v2 : v1; + return ShortPrimitives.isNull(v1) ? v2 : ShortPrimitives.isNull(v2) ? v1 : v1 < v2 ? v2 : v1; } /** @@ -237,8 +236,7 @@ public static short min(final short v1, final byte v2) { * @return minimum of the valid input values. If both inputs are invalid, null is returned. */ public static short min(final short v1, final short v2) { - return ShortPrimitives.isNull(v1) ? v2 - : ShortPrimitives.isNull(v2) ? v1 : v1 > v2 ? v2 : v1; + return ShortPrimitives.isNull(v1) ? v2 : ShortPrimitives.isNull(v2) ? v1 : v1 > v2 ? v2 : v1; } /** @@ -296,8 +294,7 @@ public static double min(final short v1, final double v2) { * @return maximum of the valid input values. If both inputs are invalid, null is returned. */ public static int max(final int v1, final byte v2) { - return BytePrimitives.isNull(v2) ? v1 - : IntegerPrimitives.isNull(v1) ? v2 : v1 < v2 ? v2 : v1; + return BytePrimitives.isNull(v2) ? v1 : IntegerPrimitives.isNull(v1) ? v2 : v1 < v2 ? v2 : v1; } /** @@ -308,8 +305,7 @@ public static int max(final int v1, final byte v2) { * @return maximum of the valid input values. If both inputs are invalid, null is returned. */ public static int max(final int v1, final short v2) { - return ShortPrimitives.isNull(v2) ? v1 - : IntegerPrimitives.isNull(v1) ? v2 : v1 < v2 ? v2 : v1; + return ShortPrimitives.isNull(v2) ? v1 : IntegerPrimitives.isNull(v1) ? v2 : v1 < v2 ? v2 : v1; } /** @@ -320,8 +316,7 @@ public static int max(final int v1, final short v2) { * @return maximum of the valid input values. If both inputs are invalid, null is returned. */ public static int max(final int v1, final int v2) { - return IntegerPrimitives.isNull(v1) ? v2 - : IntegerPrimitives.isNull(v2) ? v1 : v1 < v2 ? v2 : v1; + return IntegerPrimitives.isNull(v1) ? v2 : IntegerPrimitives.isNull(v2) ? v1 : v1 < v2 ? v2 : v1; } /** @@ -366,8 +361,7 @@ public static double max(final int v1, final double v2) { * @return minimum of the valid input values. If both inputs are invalid, null is returned. */ public static int min(final int v1, final byte v2) { - return BytePrimitives.isNull(v2) ? v1 - : IntegerPrimitives.isNull(v1) ? v2 : v1 > v2 ? v2 : v1; + return BytePrimitives.isNull(v2) ? v1 : IntegerPrimitives.isNull(v1) ? v2 : v1 > v2 ? v2 : v1; } /** @@ -378,8 +372,7 @@ public static int min(final int v1, final byte v2) { * @return minimum of the valid input values. If both inputs are invalid, null is returned. */ public static int min(final int v1, final short v2) { - return ShortPrimitives.isNull(v2) ? v1 - : IntegerPrimitives.isNull(v1) ? v2 : v1 > v2 ? v2 : v1; + return ShortPrimitives.isNull(v2) ? v1 : IntegerPrimitives.isNull(v1) ? v2 : v1 > v2 ? v2 : v1; } /** @@ -390,8 +383,7 @@ public static int min(final int v1, final short v2) { * @return minimum of the valid input values. If both inputs are invalid, null is returned. */ public static int min(final int v1, final int v2) { - return IntegerPrimitives.isNull(v1) ? v2 - : IntegerPrimitives.isNull(v2) ? v1 : v1 > v2 ? v2 : v1; + return IntegerPrimitives.isNull(v1) ? v2 : IntegerPrimitives.isNull(v2) ? v1 : v1 > v2 ? v2 : v1; } /** @@ -460,8 +452,7 @@ public static long max(final long v1, final long v2) { * @return maximum of the valid input values. If both inputs are invalid, null is returned. */ public static long max(final long v1, final int v2) { - return IntegerPrimitives.isNull(v2) ? v1 - : LongPrimitives.isNull(v1) ? v2 : v1 < v2 ? v2 : v1; + return IntegerPrimitives.isNull(v2) ? v1 : LongPrimitives.isNull(v1) ? v2 : v1 < v2 ? v2 : v1; } /** @@ -528,8 +519,7 @@ public static long min(final long v1, final long v2) { * @return minimum of the valid input values. If both inputs are invalid, null is returned. */ public static long min(final long v1, final int v2) { - return IntegerPrimitives.isNull(v2) ? v1 - : LongPrimitives.isNull(v1) ? v2 : v1 > v2 ? v2 : v1; + return IntegerPrimitives.isNull(v2) ? v1 : LongPrimitives.isNull(v1) ? v2 : v1 > v2 ? v2 : v1; } /** @@ -577,7 +567,7 @@ public static double min(final long v1, final double v2) { */ public static float max(final float v1, final byte v2) { return BytePrimitives.isNull(v2) ? (Float.isNaN(v1) ? NULL_FLOAT : v1) - : (Float.isNaN(v1) || FloatPrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; + : (Float.isNaN(v1) || FloatPrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; } /** @@ -589,7 +579,7 @@ public static float max(final float v1, final byte v2) { */ public static float max(final float v1, final short v2) { return ShortPrimitives.isNull(v2) ? (Float.isNaN(v1) ? NULL_FLOAT : v1) - : (Float.isNaN(v1) || FloatPrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; + : (Float.isNaN(v1) || FloatPrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; } /** @@ -629,8 +619,7 @@ public static float max(final float v1, final float v2) { final boolean isV2Null = FloatPrimitives.isNull(v2); final boolean isV2Invalid = isV2NaN || isV2Null; - return isV1Invalid ? (isV2Invalid ? NULL_FLOAT : v2) - : (isV2Invalid ? v1 : v1 < v2 ? v2 : v1); + return isV1Invalid ? (isV2Invalid ? NULL_FLOAT : v2) : (isV2Invalid ? v1 : v1 < v2 ? v2 : v1); } /** @@ -654,7 +643,7 @@ public static double max(final float v1, final double v2) { */ public static float min(final float v1, final byte v2) { return BytePrimitives.isNull(v2) ? (Float.isNaN(v1) ? NULL_FLOAT : v1) - : (Float.isNaN(v1) || FloatPrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; + : (Float.isNaN(v1) || FloatPrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; } /** @@ -666,7 +655,7 @@ public static float min(final float v1, final byte v2) { */ public static float min(final float v1, final short v2) { return ShortPrimitives.isNull(v2) ? (Float.isNaN(v1) ? NULL_FLOAT : v1) - : (Float.isNaN(v1) || FloatPrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; + : (Float.isNaN(v1) || FloatPrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; } /** @@ -706,8 +695,7 @@ public static float min(final float v1, final float v2) { final boolean isV2Null = FloatPrimitives.isNull(v2); final boolean isV2Invalid = isV2NaN || isV2Null; - return isV1Invalid ? (isV2Invalid ? NULL_FLOAT : v2) - : (isV2Invalid ? v1 : v1 > v2 ? v2 : v1); + return isV1Invalid ? (isV2Invalid ? NULL_FLOAT : v2) : (isV2Invalid ? v1 : v1 > v2 ? v2 : v1); } /** @@ -733,7 +721,7 @@ public static double min(final float v1, final double v2) { */ public static double max(final double v1, final byte v2) { return BytePrimitives.isNull(v2) ? (Double.isNaN(v1) ? NULL_DOUBLE : v1) - : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; + : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; } /** @@ -745,7 +733,7 @@ public static double max(final double v1, final byte v2) { */ public static double max(final double v1, final short v2) { return ShortPrimitives.isNull(v2) ? (Double.isNaN(v1) ? NULL_DOUBLE : v1) - : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; + : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; } /** @@ -757,7 +745,7 @@ public static double max(final double v1, final short v2) { */ public static double max(final double v1, final int v2) { return IntegerPrimitives.isNull(v2) ? (Double.isNaN(v1) ? NULL_DOUBLE : v1) - : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; + : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 < v2 ? v2 : v1; } /** @@ -778,9 +766,8 @@ public static double max(final double v1, final long v2) { try { return returnValue(v2); } catch (final Casting.LosingPrecisionWhileCastingException uoe) { - throw new Casting.LosingPrecisionWhileCastingException("Not supported: max(" - + v1 + ", " + v2 - + "), because the result loses the precision while being cast to double."); + throw new Casting.LosingPrecisionWhileCastingException("Not supported: max(" + v1 + ", " + v2 + + "), because the result loses the precision while being cast to double."); } } else { return v1; @@ -803,8 +790,7 @@ public static double max(final double v1, final float v2) { final boolean isV2Null = FloatPrimitives.isNull(v2); final boolean isV2Invalid = isV2NaN || isV2Null; - return isV1Invalid ? (isV2Invalid ? NULL_DOUBLE : v2) - : (isV2Invalid ? v1 : v1 < v2 ? v2 : v1); + return isV1Invalid ? (isV2Invalid ? NULL_DOUBLE : v2) : (isV2Invalid ? v1 : v1 < v2 ? v2 : v1); } /** @@ -822,8 +808,7 @@ public static double max(final double v1, final double v2) { final boolean isV2Null = DoublePrimitives.isNull(v2); final boolean isV2Invalid = isV2NaN || isV2Null; - return isV1Invalid ? (isV2Invalid ? NULL_DOUBLE : v2) - : (isV2Invalid ? v1 : v1 < v2 ? v2 : v1); + return isV1Invalid ? (isV2Invalid ? NULL_DOUBLE : v2) : (isV2Invalid ? v1 : v1 < v2 ? v2 : v1); } /** @@ -835,7 +820,7 @@ public static double max(final double v1, final double v2) { */ public static double min(final double v1, final byte v2) { return BytePrimitives.isNull(v2) ? (Double.isNaN(v1) ? NULL_DOUBLE : v1) - : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; + : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; } /** @@ -847,7 +832,7 @@ public static double min(final double v1, final byte v2) { */ public static double min(final double v1, final short v2) { return ShortPrimitives.isNull(v2) ? (Double.isNaN(v1) ? NULL_DOUBLE : v1) - : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; + : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; } /** @@ -859,7 +844,7 @@ public static double min(final double v1, final short v2) { */ public static double min(final double v1, final int v2) { return IntegerPrimitives.isNull(v2) ? (Double.isNaN(v1) ? NULL_DOUBLE : v1) - : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; + : (Double.isNaN(v1) || DoublePrimitives.isNull(v1)) ? v2 : v1 > v2 ? v2 : v1; } /** @@ -880,9 +865,8 @@ public static double min(final double v1, final long v2) { try { return returnValue(v2); } catch (final Casting.LosingPrecisionWhileCastingException uoe) { - throw new Casting.LosingPrecisionWhileCastingException("Not supported: min(" - + v1 + ", " + v2 - + "), because the result loses the precision while being cast to double."); + throw new Casting.LosingPrecisionWhileCastingException("Not supported: min(" + v1 + ", " + v2 + + "), because the result loses the precision while being cast to double."); } } else { @@ -899,8 +883,7 @@ private static double returnValue(final long v) { if (isLosingPrecision(v)) { // throw error throw new Casting.LosingPrecisionWhileCastingException( - "Not supported because the value, " + v - + ", loses the precision while being cast to double."); + "Not supported because the value, " + v + ", loses the precision while being cast to double."); } else { return v; } @@ -921,8 +904,7 @@ public static double min(final double v1, final double v2) { final boolean isV2Null = DoublePrimitives.isNull(v2); final boolean isV2Invalid = isV2NaN || isV2Null; - return isV1Invalid ? (isV2Invalid ? NULL_DOUBLE : v2) - : (isV2Invalid ? v1 : v1 > v2 ? v2 : v1); + return isV1Invalid ? (isV2Invalid ? NULL_DOUBLE : v2) : (isV2Invalid ? v1 : v1 > v2 ? v2 : v1); } /** @@ -940,7 +922,6 @@ public static double min(final double v1, final float v2) { final boolean isV2Null = FloatPrimitives.isNull(v2); final boolean isV2Invalid = isV2NaN || isV2Null; - return isV1Invalid ? (isV2Invalid ? NULL_DOUBLE : v2) - : (isV2Invalid ? v1 : v1 > v2 ? v2 : v1); + return isV1Invalid ? (isV2Invalid ? NULL_DOUBLE : v2) : (isV2Invalid ? v1 : v1 > v2 ? v2 : v1); } } diff --git a/DB/src/main/java/io/deephaven/libs/primitives/Replicate.java b/DB/src/main/java/io/deephaven/libs/primitives/Replicate.java index 8455299cf5c..2d1ab2b8115 100644 --- a/DB/src/main/java/io/deephaven/libs/primitives/Replicate.java +++ b/DB/src/main/java/io/deephaven/libs/primitives/Replicate.java @@ -14,21 +14,18 @@ import java.util.List; /** - * Autogenerates primitives from template java files (e.g. CharacterPrimitives, - * ShortNumericPrimitives, FloatNumericPrimitives, and FlotFpPrimitives). + * Autogenerates primitives from template java files (e.g. CharacterPrimitives, ShortNumericPrimitives, + * FloatNumericPrimitives, and FlotFpPrimitives). */ public class Replicate { public static void main(String[] args) throws IOException { - List files = ReplicatePrimitiveCode.charToAllButBoolean(CharacterPrimitives.class, - ReplicatePrimitiveCode.MAIN_SRC); + List files = + ReplicatePrimitiveCode.charToAllButBoolean(CharacterPrimitives.class, ReplicatePrimitiveCode.MAIN_SRC); fixup(files); - ReplicatePrimitiveCode.shortToAllIntegralTypes(ShortNumericPrimitives.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatNumericPrimitives.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatFpPrimitives.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.shortToAllIntegralTypes(ShortNumericPrimitives.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatNumericPrimitives.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.floatToAllFloatingPoints(FloatFpPrimitives.class, ReplicatePrimitiveCode.MAIN_SRC); } private static void fixup(List files) throws IOException { diff --git a/DB/src/main/java/io/deephaven/process/ProcessInfoConfig.java b/DB/src/main/java/io/deephaven/process/ProcessInfoConfig.java index 021621bfeeb..02f709ce5d6 100644 --- a/DB/src/main/java/io/deephaven/process/ProcessInfoConfig.java +++ b/DB/src/main/java/io/deephaven/process/ProcessInfoConfig.java @@ -21,12 +21,11 @@ public class ProcessInfoConfig { public static final String PROCESS_INFO_ID_KEY = "process.info.id"; /** - * The lookup key to see if {@link ProcessInfo#getSystemInfo()} is enabled. If not present, will - * default to {@link #SYSTEM_INFO_ENABLED_DEFAULT}. + * The lookup key to see if {@link ProcessInfo#getSystemInfo()} is enabled. If not present, will default to + * {@link #SYSTEM_INFO_ENABLED_DEFAULT}. */ @SuppressWarnings("WeakerAccess") - public static final String PROCESS_INFO_SYSTEM_INFO_ENABLED_KEY = - "process.info.system-info.enabled"; + public static final String PROCESS_INFO_SYSTEM_INFO_ENABLED_KEY = "process.info.system-info.enabled"; /** * The default value to see if {@link ProcessInfo#getSystemInfo()} is enabled. @@ -64,27 +63,26 @@ public static String getThisProcessIdValue() { return localThisProcessId == null ? null : localThisProcessId.value(); } - public static synchronized ProcessInfo createForCurrentProcess(Configuration config) - throws IOException { + public static synchronized ProcessInfo createForCurrentProcess(Configuration config) throws IOException { if (thisProcessId != null) { throw new IllegalStateException("ProcessInfo already created with ID " + thisProcessId); } final Path path = Paths - .get(config.getStringWithDefault(HOST_PATH_INFO_DIR_KEY, HOST_PATH_INFO_DIR_DEFAULT)); + .get(config.getStringWithDefault(HOST_PATH_INFO_DIR_KEY, HOST_PATH_INFO_DIR_DEFAULT)); final SplayedPath hostPathSplayed = new SplayedPath(path, TRIM, IS_VALUE_BASED); final Builder builder = ImmutableProcessInfo.builder() - .id(thisProcessId = ProcessUniqueId - .of(config.getStringWithDefault(PROCESS_INFO_ID_KEY, STATIC_UUID.toString()))) - .runtimeInfo(RuntimeMxBeanInfo.of(ManagementFactory.getRuntimeMXBean())) - .environmentVariables(EnvironmentVariables.of()) - .threadInfo(ThreadMxBeanInfo.of(ManagementFactory.getThreadMXBean())) - .memoryInfo(MemoryMxBeanInfo.of(ManagementFactory.getMemoryMXBean())) - .memoryPoolsInfo(MemoryPoolsMxBeanInfo.of(ManagementFactory.getMemoryPoolMXBeans())) - .applicationArguments(ApplicationArguments.of(Collections.emptyList())) // blerg, todo - .applicationConfig(ApplicationConfig.of(Collections.emptyMap())) // todo - .hostPathInfo(_HostPathInfo.of(hostPathSplayed)); + .id(thisProcessId = ProcessUniqueId + .of(config.getStringWithDefault(PROCESS_INFO_ID_KEY, STATIC_UUID.toString()))) + .runtimeInfo(RuntimeMxBeanInfo.of(ManagementFactory.getRuntimeMXBean())) + .environmentVariables(EnvironmentVariables.of()) + .threadInfo(ThreadMxBeanInfo.of(ManagementFactory.getThreadMXBean())) + .memoryInfo(MemoryMxBeanInfo.of(ManagementFactory.getMemoryMXBean())) + .memoryPoolsInfo(MemoryPoolsMxBeanInfo.of(ManagementFactory.getMemoryPoolMXBeans())) + .applicationArguments(ApplicationArguments.of(Collections.emptyList())) // blerg, todo + .applicationConfig(ApplicationConfig.of(Collections.emptyMap())) // todo + .hostPathInfo(_HostPathInfo.of(hostPathSplayed)); if (config.getBooleanWithDefault(PROCESS_INFO_SYSTEM_INFO_ENABLED_KEY, - SYSTEM_INFO_ENABLED_DEFAULT)) { + SYSTEM_INFO_ENABLED_DEFAULT)) { builder.systemInfo(SystemInfoOshi.forCurrentProcess()); } return builder.build(); diff --git a/DB/src/main/java/io/deephaven/process/ProcessInfoStoreDBImpl.java b/DB/src/main/java/io/deephaven/process/ProcessInfoStoreDBImpl.java index 46f5c9883b9..85f6d32ca87 100644 --- a/DB/src/main/java/io/deephaven/process/ProcessInfoStoreDBImpl.java +++ b/DB/src/main/java/io/deephaven/process/ProcessInfoStoreDBImpl.java @@ -52,8 +52,7 @@ public void visit(final String key, String value) { } } - private void log(final String type, final String key, final String value) - throws IOException { + private void log(final String type, final String key, final String value) throws IOException { logger.log(id.value(), type, key, value); } } diff --git a/DB/src/main/java/io/deephaven/python/PyModuleFromResource.java b/DB/src/main/java/io/deephaven/python/PyModuleFromResource.java index 20548e8ac77..2bcfa81fd5a 100644 --- a/DB/src/main/java/io/deephaven/python/PyModuleFromResource.java +++ b/DB/src/main/java/io/deephaven/python/PyModuleFromResource.java @@ -10,19 +10,19 @@ /** * Allows us to create a {@link PyObject} from a resource. * - * This sort of functionality is needed because python is unable to natively find java resource - * modules, since java resources aren't exposed to the filesystem. + * This sort of functionality is needed because python is unable to natively find java resource modules, since java + * resources aren't exposed to the filesystem. * - * Note: we could implement a custom module loader, via https://www.python.org/dev/peps/pep-0302/, - * if we wanted to resolve java resources more natively. + * Note: we could implement a custom module loader, via https://www.python.org/dev/peps/pep-0302/, if we wanted to + * resolve java resources more natively. */ public class PyModuleFromResource { public static PyObject load(Class clazz, String moduleName, String resource) { final String contents; try { contents = Resources.toString( - Resources.getResource(clazz, resource), - StandardCharsets.UTF_8); + Resources.getResource(clazz, resource), + StandardCharsets.UTF_8); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/DB/src/main/java/io/deephaven/shadow/javaparser/com/github/javaparser/ExpressionParser.java b/DB/src/main/java/io/deephaven/shadow/javaparser/com/github/javaparser/ExpressionParser.java index fb511cc589a..332f6ec984a 100644 --- a/DB/src/main/java/io/deephaven/shadow/javaparser/com/github/javaparser/ExpressionParser.java +++ b/DB/src/main/java/io/deephaven/shadow/javaparser/com/github/javaparser/ExpressionParser.java @@ -6,13 +6,11 @@ import java.io.StringReader; /** - * Helpful class which parses expressions and performs extra "this is exactly one expression" - * validation + * Helpful class which parses expressions and performs extra "this is exactly one expression" validation */ public class ExpressionParser { public static Expression parseExpression(String expression) throws ParseException, IOException { - // nothing static here, so no synchronization needed (javaparser used to create a - // static-only parser instance) + // nothing static here, so no synchronization needed (javaparser used to create a static-only parser instance) StringReader sr = new StringReader(expression); final ASTParser parser = new ASTParser(sr); final Expression expr = parser.Expression(); @@ -26,20 +24,19 @@ public static Expression parseExpression(String expression) throws ParseExceptio token = token.next; continue; } - // there was a leftover token; javacc recognizes the token, but there's no valid ast - // that it can construct. + // there was a leftover token; javacc recognizes the token, but there's no valid ast that it can construct. // we don't notice that, however, since we throw it way. throw new IllegalArgumentException( - "Invalid expression " + expression + " was already terminated after " + expr); + "Invalid expression " + expression + " was already terminated after " + expr); } - // unlikely for there to be tokens left w/out parser.token.next already being non-null (we - // would already have thrown...) + // unlikely for there to be tokens left w/out parser.token.next already being non-null (we would already have + // thrown...) int test; try { while ((test = sr.read()) != -1) { if (!Character.isWhitespace(test)) { - throw new IllegalArgumentException("Invalid expression " + expression - + " was already terminated after " + expr); + throw new IllegalArgumentException( + "Invalid expression " + expression + " was already terminated after " + expr); } } sr.close(); diff --git a/DB/src/main/java/io/deephaven/stream/StreamConsumer.java b/DB/src/main/java/io/deephaven/stream/StreamConsumer.java index d99461ae25f..2f0687845ea 100644 --- a/DB/src/main/java/io/deephaven/stream/StreamConsumer.java +++ b/DB/src/main/java/io/deephaven/stream/StreamConsumer.java @@ -10,20 +10,19 @@ public interface StreamConsumer extends StreamFailureConsumer { /** *

      - * Accept a batch of rows splayed across per-column {@link WritableChunk chunks} of - * {@link Values values}. + * Accept a batch of rows splayed across per-column {@link WritableChunk chunks} of {@link Values values}. * *

      * Ownership of {@code data} passes to the consumer, which must be sure to - * {@link io.deephaven.db.v2.sources.chunk.util.pools.PoolableChunk#close close} each chunk when - * it's no longer needed. + * {@link io.deephaven.db.v2.sources.chunk.util.pools.PoolableChunk#close close} each chunk when it's no longer + * needed. * *

      - * Implementations will generally have a mechanism for determining the expected number and type - * of input chunks, but this is not dictated at the interface level. + * Implementations will generally have a mechanism for determining the expected number and type of input chunks, but + * this is not dictated at the interface level. * - * @param data Per-column {@link WritableChunk chunks} of {@link Values values}. Must all have - * the same {@link WritableChunk#size() size}. + * @param data Per-column {@link WritableChunk chunks} of {@link Values values}. Must all have the same + * {@link WritableChunk#size() size}. */ @SuppressWarnings("unchecked") // There's no actual possibility of heap-pollution, here. void accept(@NotNull WritableChunk... data); diff --git a/DB/src/main/java/io/deephaven/stream/StreamFailureConsumer.java b/DB/src/main/java/io/deephaven/stream/StreamFailureConsumer.java index f2eb8a5331e..ea2de88bc5b 100644 --- a/DB/src/main/java/io/deephaven/stream/StreamFailureConsumer.java +++ b/DB/src/main/java/io/deephaven/stream/StreamFailureConsumer.java @@ -3,8 +3,7 @@ import org.jetbrains.annotations.NotNull; /** - * An interface for accepting failures from an incoming stream in order to propagate them to - * downstream tables. + * An interface for accepting failures from an incoming stream in order to propagate them to downstream tables. */ public interface StreamFailureConsumer { /** diff --git a/DB/src/main/java/io/deephaven/stream/StreamPublisher.java b/DB/src/main/java/io/deephaven/stream/StreamPublisher.java index 385a159b0bb..5dac583ee41 100644 --- a/DB/src/main/java/io/deephaven/stream/StreamPublisher.java +++ b/DB/src/main/java/io/deephaven/stream/StreamPublisher.java @@ -10,14 +10,12 @@ public interface StreamPublisher { /** *

      - * Register a {@link StreamConsumer consumer} whose - * {@link StreamConsumer#accept(WritableChunk[]) accept} method will be used when sufficient - * data is accumulated or on {@link #flush()}. + * Register a {@link StreamConsumer consumer} whose {@link StreamConsumer#accept(WritableChunk[]) accept} method + * will be used when sufficient data is accumulated or on {@link #flush()}. * *

      - * {@code consumer} must typically be primed to expect the same - * {@link io.deephaven.db.v2.sources.chunk.ChunkType chunk types} that this produces, in the - * same order. + * {@code consumer} must typically be primed to expect the same {@link io.deephaven.db.v2.sources.chunk.ChunkType + * chunk types} that this produces, in the same order. * * @param consumer The consumer * @throws IllegalStateException If a consumer has already been registered for this producer @@ -25,8 +23,8 @@ public interface StreamPublisher { void register(@NotNull StreamConsumer consumer); /** - * Flush any accumulated data in this publisher to the {@link StreamConsumer consumer}, by - * invoking its {@link StreamConsumer#accept(WritableChunk[]) accept} method. + * Flush any accumulated data in this publisher to the {@link StreamConsumer consumer}, by invoking its + * {@link StreamConsumer#accept(WritableChunk[]) accept} method. */ void flush(); } diff --git a/DB/src/main/java/io/deephaven/stream/StreamToTableAdapter.java b/DB/src/main/java/io/deephaven/stream/StreamToTableAdapter.java index 42882d30b9a..3442fcba1b0 100644 --- a/DB/src/main/java/io/deephaven/stream/StreamToTableAdapter.java +++ b/DB/src/main/java/io/deephaven/stream/StreamToTableAdapter.java @@ -44,16 +44,11 @@ public class StreamToTableAdapter implements SafeCloseable, LiveTable, StreamCon private final Index index; private final SwitchColumnSource[] switchSources; - /** - * To start out when we have no data, we use null value column sources which are cheap and - * singletons. - */ + /** To start out when we have no data, we use null value column sources which are cheap and singletons. */ private final NullValueColumnSource[] nullColumnSources; - // we accumulate data into buffer from the ingester thread; capture it into current on the LTM - // thread; move it into - // prev after one cycle, and then then the cycle after that we clear out the chunks and reuse - // them for the buffers + // we accumulate data into buffer from the ingester thread; capture it into current on the LTM thread; move it into + // prev after one cycle, and then then the cycle after that we clear out the chunks and reuse them for the buffers // they all start out null in the constructor private ChunkColumnSource[] bufferChunkSources; private ChunkColumnSource[] currentChunkSources; @@ -63,8 +58,8 @@ public class StreamToTableAdapter implements SafeCloseable, LiveTable, StreamCon private List enqueuedFailure; public StreamToTableAdapter(@NotNull final TableDefinition tableDefinition, - @NotNull final StreamPublisher streamPublisher, - @NotNull final LiveTableRegistrar liveTableRegistrar) { + @NotNull final StreamPublisher streamPublisher, + @NotNull final LiveTableRegistrar liveTableRegistrar) { this.tableDefinition = tableDefinition; this.streamPublisher = streamPublisher; this.liveTableRegistrar = liveTableRegistrar; @@ -115,27 +110,24 @@ public ChunkType chunkTypeForIndex(int idx) { * @return an array of writable chunks */ public static WritableChunk[] makeChunksForDefinition(TableDefinition definition, int size) { - return definition.getColumnStream().map(cd -> makeChunk(cd, size)) - .toArray(WritableChunk[]::new); + return definition.getColumnStream().map(cd -> makeChunk(cd, size)).toArray(WritableChunk[]::new); } @NotNull private static ChunkColumnSource[] makeChunkSources(TableDefinition tableDefinition) { final TLongArrayList offsets = new TLongArrayList(); return tableDefinition.getColumnStream().map(cd -> makeChunkSourceForColumn(offsets, cd)) - .toArray(ChunkColumnSource[]::new); + .toArray(ChunkColumnSource[]::new); } @NotNull - private static ChunkColumnSource makeChunkSourceForColumn(TLongArrayList offsets, - ColumnDefinition cd) { + private static ChunkColumnSource makeChunkSourceForColumn(TLongArrayList offsets, ColumnDefinition cd) { final Class replacementType = replacementType(cd.getDataType()); if (replacementType != null) { - return ChunkColumnSource.make(ChunkType.fromElementType(replacementType), - replacementType, null, offsets); + return ChunkColumnSource.make(ChunkType.fromElementType(replacementType), replacementType, null, offsets); } else { - return ChunkColumnSource.make(ChunkType.fromElementType(cd.getDataType()), - cd.getDataType(), cd.getComponentType(), offsets); + return ChunkColumnSource.make(ChunkType.fromElementType(cd.getDataType()), cd.getDataType(), + cd.getComponentType(), offsets); } } @@ -155,15 +147,12 @@ private static ChunkType chunkTypeForColumn(ColumnDefinition cd) { } @NotNull - private static NullValueColumnSource[] makeNullColumnSources( - TableDefinition tableDefinition) { - return tableDefinition.getColumnStream() - .map(StreamToTableAdapter::makeNullValueColumnSourceFromDefinition) - .toArray(NullValueColumnSource[]::new); + private static NullValueColumnSource[] makeNullColumnSources(TableDefinition tableDefinition) { + return tableDefinition.getColumnStream().map(StreamToTableAdapter::makeNullValueColumnSourceFromDefinition) + .toArray(NullValueColumnSource[]::new); } - private static NullValueColumnSource makeNullValueColumnSourceFromDefinition( - ColumnDefinition cd) { + private static NullValueColumnSource makeNullValueColumnSourceFromDefinition(ColumnDefinition cd) { final Class replacementType = replacementType(cd.getDataType()); if (replacementType != null) { return NullValueColumnSource.getInstance(replacementType, null); @@ -175,12 +164,12 @@ private static NullValueColumnSource makeNullValueColumnSourceFromDefinition( @NotNull private static SwitchColumnSource[] makeSwitchSources(TableDefinition definition, - NullValueColumnSource[] wrapped, Map> visibleSourcesMap) { + NullValueColumnSource[] wrapped, Map> visibleSourcesMap) { final SwitchColumnSource[] switchSources = new SwitchColumnSource[wrapped.length]; final ColumnDefinition[] columns = definition.getColumns(); for (int ii = 0; ii < wrapped.length; ++ii) { - final SwitchColumnSource switchSource = new SwitchColumnSource<>(wrapped[ii], - StreamToTableAdapter::maybeClearChunkColumnSource); + final SwitchColumnSource switchSource = + new SwitchColumnSource<>(wrapped[ii], StreamToTableAdapter::maybeClearChunkColumnSource); final ColumnSource visibleSource; if (columns[ii].getDataType() == DBDateTime.class) { @@ -205,8 +194,8 @@ private static void maybeClearChunkColumnSource(ColumnSource cs) { } /** - * We change the inner columns to long and byte for DBDateTime and Boolean, respectively. We - * expect our ingesters to pass us these primitive chunks for those types. + * We change the inner columns to long and byte for DBDateTime and Boolean, respectively. We expect our ingesters to + * pass us these primitive chunks for those types. * * @param columnType the type of the outer column * @@ -248,13 +237,13 @@ public void refresh() { private void doRefresh() { synchronized (this) { - // if we have an enqueued failure we want to process it first, before we allow the - // streamPublisher to flush itself + // if we have an enqueued failure we want to process it first, before we allow the streamPublisher to flush + // itself if (enqueuedFailure != null) { throw new UncheckedDeephavenException( - MultiException.maybeWrapInMultiException( - "Multiple errors encountered while ingesting stream", - enqueuedFailure.toArray(new Exception[0]))); + MultiException.maybeWrapInMultiException( + "Multiple errors encountered while ingesting stream", + enqueuedFailure.toArray(new Exception[0]))); } } @@ -296,8 +285,7 @@ private void doRefresh() { index.removeRange(newSize, oldSize - 1); } - table.notifyListeners( - new ShiftAwareListener.Update(Index.CURRENT_FACTORY.getFlatIndex(newSize), + table.notifyListeners(new ShiftAwareListener.Update(Index.CURRENT_FACTORY.getFlatIndex(newSize), Index.CURRENT_FACTORY.getFlatIndex(oldSize), Index.CURRENT_FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); } @@ -311,12 +299,11 @@ public final void accept(@NotNull final WritableChunk... data bufferChunkSources = makeChunkSources(tableDefinition); } if (data.length != bufferChunkSources.length) { - // TODO: Our error handling should be better when in the ingester thread; since it - // seems proper to kill + // TODO: Our error handling should be better when in the ingester thread; since it seems proper to kill // the ingester, and also notify downstream tables // https://github.com/deephaven/deephaven-core/issues/934 - throw new IllegalStateException("StreamConsumer data length = " + data.length - + " chunks, expected " + bufferChunkSources.length); + throw new IllegalStateException("StreamConsumer data length = " + data.length + " chunks, expected " + + bufferChunkSources.length); } for (int ii = 0; ii < data.length; ++ii) { Assert.eq(data[0].size(), "data[0].size()", data[ii].size(), "data[ii].size()"); diff --git a/DB/src/main/java/io/deephaven/util/calendar/AbstractBusinessCalendar.java b/DB/src/main/java/io/deephaven/util/calendar/AbstractBusinessCalendar.java index 59e9224d16b..056ef9edf0a 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/AbstractBusinessCalendar.java +++ b/DB/src/main/java/io/deephaven/util/calendar/AbstractBusinessCalendar.java @@ -12,8 +12,7 @@ import java.util.ArrayList; import java.util.List; -public abstract class AbstractBusinessCalendar extends AbstractCalendar - implements BusinessCalendar { +public abstract class AbstractBusinessCalendar extends AbstractCalendar implements BusinessCalendar { public boolean isBusinessDay(final DBDateTime time) { return fractionOfStandardBusinessDay(time) > 0.0; @@ -471,8 +470,7 @@ public double diffNonBusinessDay(final DBDateTime start, final DBDateTime end) { return QueryConstants.NULL_DOUBLE; } - return (double) diffNonBusinessNanos(start, end) - / (double) standardBusinessDayLengthNanos(); + return (double) diffNonBusinessNanos(start, end) / (double) standardBusinessDayLengthNanos(); } public int numberOfBusinessDays(DBDateTime start, DBDateTime end) { @@ -481,7 +479,7 @@ public int numberOfBusinessDays(DBDateTime start, DBDateTime end) { public int numberOfBusinessDays(DBDateTime start, DBDateTime end, final boolean endInclusive) { return numberOfBusinessDays(start == null ? null : start.toDateString(timeZone()), - end == null ? null : end.toDateString(timeZone()), endInclusive); + end == null ? null : end.toDateString(timeZone()), endInclusive); } public int numberOfBusinessDays(String start, String end) { @@ -515,36 +513,31 @@ public int numberOfNonBusinessDays(DBDateTime start, DBDateTime end) { return numberOfNonBusinessDays(start, end, false); } - public int numberOfNonBusinessDays(DBDateTime start, DBDateTime end, - final boolean endInclusive) { + public int numberOfNonBusinessDays(DBDateTime start, DBDateTime end, final boolean endInclusive) { return numberOfNonBusinessDays(start == null ? null : start.toDateString(timeZone()), - end == null ? null : end.toDateString(timeZone()), endInclusive); + end == null ? null : end.toDateString(timeZone()), endInclusive); } public int numberOfNonBusinessDays(final String start, final String end) { return numberOfNonBusinessDays(start, end, false); } - public int numberOfNonBusinessDays(final String start, final String end, - final boolean endInclusive) { + public int numberOfNonBusinessDays(final String start, final String end, final boolean endInclusive) { if (start == null || end == null) { return QueryConstants.NULL_INT; } - return numberOfDays(start, end, endInclusive) - - numberOfBusinessDays(start, end, endInclusive); + return numberOfDays(start, end, endInclusive) - numberOfBusinessDays(start, end, endInclusive); } public double fractionOfStandardBusinessDay(final DBDateTime time) { final BusinessSchedule businessDate = getBusinessSchedule(time); - return businessDate == null ? 0.0 - : (double) businessDate.getLOBD() / (double) standardBusinessDayLengthNanos(); + return businessDate == null ? 0.0 : (double) businessDate.getLOBD() / (double) standardBusinessDayLengthNanos(); } public double fractionOfStandardBusinessDay(final String date) { final BusinessSchedule businessDate = getBusinessSchedule(date); - return businessDate == null ? 0.0 - : (double) businessDate.getLOBD() / (double) standardBusinessDayLengthNanos(); + return businessDate == null ? 0.0 : (double) businessDate.getLOBD() / (double) standardBusinessDayLengthNanos(); } public double fractionOfBusinessDayRemaining(final DBDateTime time) { @@ -558,8 +551,7 @@ public double fractionOfBusinessDayRemaining(final DBDateTime time) { } long businessDaySoFar = businessDate.businessTimeElapsed(time); - return (double) (businessDate.getLOBD() - businessDaySoFar) - / (double) businessDate.getLOBD(); + return (double) (businessDate.getLOBD() - businessDaySoFar) / (double) businessDate.getLOBD(); } public double fractionOfBusinessDayComplete(final DBDateTime time) { @@ -582,8 +574,7 @@ public boolean isLastBusinessDayOfMonth(final String date) { String nextBusAfterDate = nextBusinessDay(date); // covers case December to January - return (DateStringUtils.monthOfYear(date) - - DateStringUtils.monthOfYear(nextBusAfterDate)) != 0; + return (DateStringUtils.monthOfYear(date) - DateStringUtils.monthOfYear(nextBusAfterDate)) != 0; } public boolean isLastBusinessDayOfWeek(final DBDateTime time) { @@ -596,8 +587,7 @@ public boolean isLastBusinessDayOfWeek(final String date) { } String nextBusinessDay = nextBusinessDay(date); - return dayOfWeek(date).compareTo(dayOfWeek(nextBusinessDay)) > 0 - || numberOfDays(date, nextBusinessDay) > 6; + return dayOfWeek(date).compareTo(dayOfWeek(nextBusinessDay)) > 0 || numberOfDays(date, nextBusinessDay) > 6; } } diff --git a/DB/src/main/java/io/deephaven/util/calendar/AbstractCalendar.java b/DB/src/main/java/io/deephaven/util/calendar/AbstractCalendar.java index 1dfd5c21e6a..2c2bfe666d4 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/AbstractCalendar.java +++ b/DB/src/main/java/io/deephaven/util/calendar/AbstractCalendar.java @@ -25,8 +25,7 @@ public String previousDay(final DBDateTime time, final int days) { return null; } - final LocalDate t = - DBTimeUtils.getZonedDateTime(time, timeZone()).toLocalDate().minusDays(days); + final LocalDate t = DBTimeUtils.getZonedDateTime(time, timeZone()).toLocalDate().minusDays(days); return DateStringUtils.format(t); } @@ -53,8 +52,7 @@ public String nextDay(final DBDateTime time, final int days) { return null; } - final LocalDate t = - DBTimeUtils.getZonedDateTime(time, timeZone()).toLocalDate().plusDays(days); + final LocalDate t = DBTimeUtils.getZonedDateTime(time, timeZone()).toLocalDate().plusDays(days); return DateStringUtils.format(t); } @@ -110,10 +108,9 @@ public int numberOfDays(final DBDateTime start, final DBDateTime end) { return numberOfDays(start, end, false); } - public int numberOfDays(final DBDateTime start, final DBDateTime end, - final boolean endInclusive) { + public int numberOfDays(final DBDateTime start, final DBDateTime end, final boolean endInclusive) { return numberOfDays(start == null ? null : start.toDateString(timeZone()), - end == null ? null : end.toDateString(timeZone()), endInclusive); + end == null ? null : end.toDateString(timeZone()), endInclusive); } public int numberOfDays(final String start, final String end) { diff --git a/DB/src/main/java/io/deephaven/util/calendar/BusinessCalendar.java b/DB/src/main/java/io/deephaven/util/calendar/BusinessCalendar.java index 3a5213dc245..4d62bcf620d 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/BusinessCalendar.java +++ b/DB/src/main/java/io/deephaven/util/calendar/BusinessCalendar.java @@ -26,8 +26,8 @@ public interface BusinessCalendar extends Calendar { List getDefaultBusinessPeriods(); /** - * Gets business schedules for dates that are different from the defaults. This returns all - * dates that are defined as a holiday for the calendar. + * Gets business schedules for dates that are different from the defaults. This returns all dates that are defined + * as a holiday for the calendar. * * @return a map of dates and to their business periods */ @@ -52,8 +52,8 @@ default boolean isBusinessDay() { } /** - * Is the day of the week a business day? A business day is a day that has a business schedule - * with one or more business periods defined. + * Is the day of the week a business day? A business day is a day that has a business schedule with one or more + * business periods defined. * * @param day a day of the week * @return true if the day is a business day; false otherwise. @@ -85,8 +85,8 @@ default boolean isBusinessDay() { boolean isBusinessDay(final LocalDate date); /** - * Determines if the specified time is a business time. If the time falls between business - * periods, false will be returned. + * Determines if the specified time is a business time. If the time falls between business periods, false will be + * returned. * * @param time time * @return true if the specified time is a business time; otherwise, false. @@ -103,8 +103,8 @@ default String previousBusinessDay() { } /** - * Gets the business date {@code days} business days before the current day. If {@code days} is - * zero and today is not a business day, null is returned. + * Gets the business date {@code days} business days before the current day. If {@code days} is zero and today is + * not a business day, null is returned. * * @param days number of days * @return the business date {@code days} business days before the current day @@ -122,8 +122,8 @@ default String previousBusinessDay(int days) { String previousBusinessDay(final DBDateTime time); /** - * Gets the business date {@code days} business days before input {@code time}. If {@code days} - * is zero and the day is not a business day, null is returned. + * Gets the business date {@code days} business days before input {@code time}. If {@code days} is zero and the day + * is not a business day, null is returned. * * @param time time; if null, return null * @param days number of days @@ -140,8 +140,8 @@ default String previousBusinessDay(int days) { String previousBusinessDay(String date); /** - * Gets the business date {@code days} business days before input {@code date}. If {@code days} - * is zero and the day is not a business day, null is returned. + * Gets the business date {@code days} business days before input {@code date}. If {@code days} is zero and the day + * is not a business day, null is returned. * * @param date date; if null, return null * @param days number of days @@ -222,8 +222,8 @@ default String previousNonBusinessDay() { } /** - * Gets the non-business date {@code days} non-business days before the current day. If - * {@code days} is zero and the day is a business day, null is returned. + * Gets the non-business date {@code days} non-business days before the current day. If {@code days} is zero and the + * day is a business day, null is returned. * * @param days number of days * @return the non-business date {@code days} non-business days before the current day @@ -241,8 +241,8 @@ default String previousNonBusinessDay(int days) { String previousNonBusinessDay(final DBDateTime time); /** - * Gets the non-business date {@code days} non-business days before input {@code time}. If - * {@code days} is zero and the day is a business day, null is returned. + * Gets the non-business date {@code days} non-business days before input {@code time}. If {@code days} is zero and + * the day is a business day, null is returned. * * @param time time; if null, return null * @param days number of days @@ -259,8 +259,8 @@ default String previousNonBusinessDay(int days) { String previousNonBusinessDay(String date); /** - * Gets the non-business date {@code days} non-business days before input {@code date}. If - * {@code days} is zero and the day is a business day, null is returned. + * Gets the non-business date {@code days} non-business days before input {@code date}. If {@code days} is zero and + * the day is a business day, null is returned. * * @param date date; if null, return null * @param days number of days @@ -278,8 +278,8 @@ default String nextBusinessDay() { } /** - * Gets the business date {@code days} business days after the current day. If {@code days} is - * zero and today is not a business day, null is returned. + * Gets the business date {@code days} business days after the current day. If {@code days} is zero and today is not + * a business day, null is returned. * * @param days number of days * @return the business date {@code days} business days after the current day @@ -297,8 +297,8 @@ default String nextBusinessDay(int days) { String nextBusinessDay(final DBDateTime time); /** - * Gets the business date {@code days} business days after input {@code time}. If {@code days} - * is zero and the day is not a business day, null is returned. + * Gets the business date {@code days} business days after input {@code time}. If {@code days} is zero and the day + * is not a business day, null is returned. * * @param time time; if null, return null * @param days number of days @@ -315,8 +315,8 @@ default String nextBusinessDay(int days) { String nextBusinessDay(String date); /** - * Gets the business date {@code days} business days after input {@code date}. If {@code days} - * is zero and the day is not a business day, null is returned. + * Gets the business date {@code days} business days after input {@code date}. If {@code days} is zero and the day + * is not a business day, null is returned. * * @param date date; if null, return null * @param days number of days @@ -336,8 +336,7 @@ default BusinessSchedule nextBusinessSchedule() { /** * Gets the business schedule {@code days} days after the current day. * - * If the current day is null, assumes the implementation of getBusinessSchedule(null) returns - * null. + * If the current day is null, assumes the implementation of getBusinessSchedule(null) returns null. * * @param days number of days * @return the next closest business schedule after the current day @@ -357,8 +356,7 @@ default BusinessSchedule nextBusinessSchedule(int days) { /** * Gets the business schedule {@code days} days after input {@code time}. * - * If {@code date} is null, assumes the implementation of getBusinessSchedule(null) returns - * null. + * If {@code date} is null, assumes the implementation of getBusinessSchedule(null) returns null. * * @param time time; if null, return null * @param days number of days @@ -379,8 +377,7 @@ default BusinessSchedule nextBusinessSchedule(int days) { /** * Gets the business schedule {@code days} days after input {@code date}. * - * If {@code date} is null, assumes the implementation of getBusinessSchedule(null) returns - * null. + * If {@code date} is null, assumes the implementation of getBusinessSchedule(null) returns null. * * @param date date; if null, return null * @param days number of days @@ -398,8 +395,8 @@ default String nextNonBusinessDay() { } /** - * Gets the non-business date {@code days} non-business days after the current day. If - * {@code days} is zero and the day is a business day, null is returned. + * Gets the non-business date {@code days} non-business days after the current day. If {@code days} is zero and the + * day is a business day, null is returned. * * @param days number of days * @return the non-business date {@code days} non-business days after the current day @@ -417,8 +414,8 @@ default String nextNonBusinessDay(int days) { String nextNonBusinessDay(final DBDateTime time); /** - * Gets the non-business date {@code days} non-business days after input {@code time}. If - * {@code days} is zero and the day is a business day, null is returned. + * Gets the non-business date {@code days} non-business days after input {@code time}. If {@code days} is zero and + * the day is a business day, null is returned. * * @param time time; if null, return null * @param days number of days @@ -435,8 +432,8 @@ default String nextNonBusinessDay(int days) { String nextNonBusinessDay(String date); /** - * Gets the non-business date {@code days} non-business days after input {@code date}. If - * {@code days} is zero and the day is a business day, null is returned. + * Gets the non-business date {@code days} non-business days after input {@code date}. If {@code days} is zero and + * the day is a business day, null is returned. * * @param date date; if null, return null * @param days number of days @@ -447,8 +444,8 @@ default String nextNonBusinessDay(int days) { /** * Returns the business days between {@code start} and {@code end}, inclusive. * - * Because no time information (e.g., hours, minutes, seconds) is returned, the corresponding - * days for {@code start} and {@code end} will be included if they are business days. + * Because no time information (e.g., hours, minutes, seconds) is returned, the corresponding days for {@code start} + * and {@code end} will be included if they are business days. * * @param start start time; if null, return empty array * @param end end time; if null, return empty array @@ -459,8 +456,8 @@ default String nextNonBusinessDay(int days) { /** * Returns the business days between {@code start} and {@code end}, inclusive. * - * Because no time information (e.g., hours, minutes, seconds) is returned, the corresponding - * days for {@code start} and {@code end} will be included if they are business days. + * Because no time information (e.g., hours, minutes, seconds) is returned, the corresponding days for {@code start} + * and {@code end} will be included if they are business days. * * @param start start time; if null, return empty array * @param end end time; if null, return empty array @@ -471,8 +468,8 @@ default String nextNonBusinessDay(int days) { /** * Returns the non-business days between {@code start} and {@code end}, inclusive. * - * Because no time information (e.g., hours, minutes, seconds) is returned, the corresponding - * days for {@code start} and {@code end} will be included if they are non-business days. + * Because no time information (e.g., hours, minutes, seconds) is returned, the corresponding days for {@code start} + * and {@code end} will be included if they are non-business days. * * @param start start time; if null, return empty array * @param end end time; if null, return empty array @@ -483,8 +480,8 @@ default String nextNonBusinessDay(int days) { /** * Returns the non-business days between {@code start} and {@code end}, inclusive. * - * Because no time information (e.g., hours, minutes, seconds) is returned, the corresponding - * days for {@code start} and {@code end} will be included if they are non-business days. + * Because no time information (e.g., hours, minutes, seconds) is returned, the corresponding days for {@code start} + * and {@code end} will be included if they are non-business days. * * @param start start time; if null, return empty array * @param end end time; if null, return empty array @@ -513,30 +510,25 @@ default String nextNonBusinessDay(int days) { * * @param start start time; if null, return NULL_LONG * @param end end time; if null, return NULL_LONG - * @return the amount of non-business time in nanoseconds between the {@code start} and - * {@code end} + * @return the amount of non-business time in nanoseconds between the {@code start} and {@code end} */ long diffNonBusinessNanos(final DBDateTime start, final DBDateTime end); /** - * Returns the amount of business time in standard business days between {@code start} and - * {@code end}. + * Returns the amount of business time in standard business days between {@code start} and {@code end}. * * @param start start time; if null, return NULL_LONG * @param end end time; if null, return NULL_LONG - * @return the amount of business time in standard business days between the {@code start} and - * {@code end} + * @return the amount of business time in standard business days between the {@code start} and {@code end} */ double diffBusinessDay(final DBDateTime start, final DBDateTime end); /** - * Returns the amount of non-business time in standard business days between {@code start} and - * {@code end}. + * Returns the amount of non-business time in standard business days between {@code start} and {@code end}. * * @param start start time; if null, return NULL_LONG * @param end end time; if null, return NULL_LONG - * @return the amount of non-business time in standard business days between the {@code start} - * and {@code end} + * @return the amount of non-business time in standard business days between the {@code start} and {@code end} */ double diffNonBusinessDay(final DBDateTime start, final DBDateTime end); @@ -545,8 +537,7 @@ default String nextNonBusinessDay(int days) { * * @param start start; if null, return null * @param end end; if null, return null - * @return the amount of business time in business years between the {@code start} and - * {@code end} + * @return the amount of business time in business years between the {@code start} and {@code end} */ double diffBusinessYear(DBDateTime start, DBDateTime end); @@ -555,8 +546,7 @@ default String nextNonBusinessDay(int days) { * * @param start start time; if null, return NULL_INT * @param end end time; if null, return NULL_INT - * @return number of business days between the {@code start} and {@code end}, inclusive and - * exclusive respectively. + * @return number of business days between the {@code start} and {@code end}, inclusive and exclusive respectively. */ int numberOfBusinessDays(DBDateTime start, DBDateTime end); @@ -566,8 +556,8 @@ default String nextNonBusinessDay(int days) { * @param start start time; if null, return NULL_LONG * @param end end time; if null, return NULL_LONG * @param endInclusive whether to treat the {@code end} inclusive or exclusively - * @return number of business days between the {@code start} and {@code end}, inclusive and - * {@code endInclusive} respectively. + * @return number of business days between the {@code start} and {@code end}, inclusive and {@code endInclusive} + * respectively. */ int numberOfBusinessDays(DBDateTime start, DBDateTime end, final boolean endInclusive); @@ -576,8 +566,7 @@ default String nextNonBusinessDay(int days) { * * @param start start time; if null, return NULL_INT * @param end end time; if null, return NULL_INT - * @return number of business days between the {@code start} and {@code end}, inclusive and - * exclusive respectively. + * @return number of business days between the {@code start} and {@code end}, inclusive and exclusive respectively. */ int numberOfBusinessDays(String start, String end); @@ -587,8 +576,8 @@ default String nextNonBusinessDay(int days) { * @param start start time; if null, return NULL_INT * @param end end time; if null, return NULL_INT * @param endInclusive whether to treat the {@code end} inclusive or exclusively - * @return number of business days between the {@code start} and {@code end}, inclusive and - * {@code endInclusive} respectively. + * @return number of business days between the {@code start} and {@code end}, inclusive and {@code endInclusive} + * respectively. */ int numberOfBusinessDays(String start, String end, final boolean endInclusive); @@ -597,8 +586,7 @@ default String nextNonBusinessDay(int days) { * * @param start start time; if null, return NULL_INT * @param end end time; if null, return NULL_INT - * @return number of business days between the {@code start} and {@code end}, inclusive and - * exclusive respectively. + * @return number of business days between the {@code start} and {@code end}, inclusive and exclusive respectively. */ int numberOfNonBusinessDays(DBDateTime start, DBDateTime end); @@ -608,8 +596,8 @@ default String nextNonBusinessDay(int days) { * @param start start time; if null, return NULL_LONG * @param end end time; if null, return NULL_LONG * @param endInclusive whether to treat the {@code end} inclusive or exclusively - * @return number of business days between the {@code start} and {@code end}, inclusive and - * {@code endInclusive} respectively. + * @return number of business days between the {@code start} and {@code end}, inclusive and {@code endInclusive} + * respectively. */ int numberOfNonBusinessDays(DBDateTime start, DBDateTime end, final boolean endInclusive); @@ -628,30 +616,27 @@ default String nextNonBusinessDay(int days) { * @param start start time; if null, return NULL_INT * @param end end time; if null, return NULL_INT * @param endInclusive whether to treat the {@code end} inclusive or exclusively - * @return number of non-business days between the {@code start} and {@code end}, inclusive and - * {@code endInclusive} respectively. + * @return number of non-business days between the {@code start} and {@code end}, inclusive and {@code endInclusive} + * respectively. */ int numberOfNonBusinessDays(final String start, final String end, final boolean endInclusive); /** - * Returns the ratio of the current day's business day length and the standard business day - * length. For example, a holiday has zero business time and will therefore return 0.0. A normal - * business day will be of the standard length and will therefore return 1.0. A half day holiday - * will return 0.5. + * Returns the ratio of the current day's business day length and the standard business day length. For example, a + * holiday has zero business time and will therefore return 0.0. A normal business day will be of the standard + * length and will therefore return 1.0. A half day holiday will return 0.5. * * @see BusinessCalendar#fractionOfBusinessDayRemaining(DBDateTime) - * @return ratio of the business day length and the standard business day length for the current - * day + * @return ratio of the business day length and the standard business day length for the current day */ default double fractionOfStandardBusinessDay() { return fractionOfStandardBusinessDay(currentDay()); } /** - * For the given date, returns the ratio of the business day length and the standard business - * day length. For example, a holiday has zero business time and will therefore return 0.0. A - * normal business day will be of the standard length and will therefore return 1.0. A half day - * holiday will return 0.5. + * For the given date, returns the ratio of the business day length and the standard business day length. For + * example, a holiday has zero business time and will therefore return 0.0. A normal business day will be of the + * standard length and will therefore return 1.0. A half day holiday will return 0.5. * * @see BusinessCalendar#fractionOfBusinessDayRemaining(DBDateTime) * @param time time; if null, return 0 @@ -660,10 +645,9 @@ default double fractionOfStandardBusinessDay() { double fractionOfStandardBusinessDay(final DBDateTime time); /** - * For the given date, returns the ratio of the business day length and the standard business - * day length. For example, a holiday has zero business time and will therefore return 0.0. A - * normal business day will be of the standard length and will therefore return 1.0. A half day - * holiday will return 0.5. + * For the given date, returns the ratio of the business day length and the standard business day length. For + * example, a holiday has zero business time and will therefore return 0.0. A normal business day will be of the + * standard length and will therefore return 1.0. A half day holiday will return 0.5. * * @see BusinessCalendar#fractionOfBusinessDayRemaining(DBDateTime) * @param date date; if null, return 0 @@ -691,8 +675,8 @@ default double fractionOfStandardBusinessDay() { * Is the time on the last business day of the month with business time remaining? * * @param time time - * @return true if {@code time} is on the last business day of the month with business time - * remaining; false otherwise. + * @return true if {@code time} is on the last business day of the month with business time remaining; false + * otherwise. */ boolean isLastBusinessDayOfMonth(final DBDateTime time); @@ -726,8 +710,8 @@ default boolean isLastBusinessDayOfWeek() { * Is the time on the last business day of the week with business time remaining? * * @param time time - * @return true if {@code time} is on the last business day of the week with business time - * remaining; false otherwise. + * @return true if {@code time} is on the last business day of the week with business time remaining; false + * otherwise. */ boolean isLastBusinessDayOfWeek(final DBDateTime time); @@ -767,8 +751,7 @@ default boolean isLastBusinessDayOfWeek() { BusinessSchedule getBusinessDay(final LocalDate date); /** - * Gets the indicated business day's schedule. {@code getBusinessSchedule(null)} returns - * {@code null}. + * Gets the indicated business day's schedule. {@code getBusinessSchedule(null)} returns {@code null}. * * @param time time * @return the corresponding BusinessSchedule of {@code time}; null if time is null @@ -776,8 +759,7 @@ default boolean isLastBusinessDayOfWeek() { BusinessSchedule getBusinessSchedule(final DBDateTime time); /** - * Gets the indicated business day's schedule. {@code getBusinessSchedule(null)} returns - * {@code null}. + * Gets the indicated business day's schedule. {@code getBusinessSchedule(null)} returns {@code null}. * * @param date date * @return the corresponding BusinessSchedule of {@code date} @@ -785,8 +767,7 @@ default boolean isLastBusinessDayOfWeek() { BusinessSchedule getBusinessSchedule(String date); /** - * Gets the indicated business day's schedule. {@code getBusinessSchedule(null)} returns - * {@code null}. + * Gets the indicated business day's schedule. {@code getBusinessSchedule(null)} returns {@code null}. * * @param date date * @return the corresponding BusinessSchedule of {@code date} diff --git a/DB/src/main/java/io/deephaven/util/calendar/BusinessPeriod.java b/DB/src/main/java/io/deephaven/util/calendar/BusinessPeriod.java index fa02f208527..83a1b45d751 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/BusinessPeriod.java +++ b/DB/src/main/java/io/deephaven/util/calendar/BusinessPeriod.java @@ -23,13 +23,11 @@ public class BusinessPeriod implements Serializable { this.endTime = endTime; if (startTime == null || endTime == null) { - throw new IllegalArgumentException( - "Null argument: startTime=" + startTime + " endTime=" + endTime); + throw new IllegalArgumentException("Null argument: startTime=" + startTime + " endTime=" + endTime); } if (startTime.getNanos() > endTime.getNanos()) { - throw new IllegalArgumentException( - "Start is after end: startTime=" + startTime + " endTime=" + endTime); + throw new IllegalArgumentException("Start is after end: startTime=" + startTime + " endTime=" + endTime); } } @@ -67,7 +65,6 @@ public long getLength() { * @return true if the time is in this period; otherwise, false. */ public boolean contains(final DBDateTime time) { - return time != null - && (startTime.getNanos() <= time.getNanos() && time.getNanos() <= endTime.getNanos()); + return time != null && (startTime.getNanos() <= time.getNanos() && time.getNanos() <= endTime.getNanos()); } } diff --git a/DB/src/main/java/io/deephaven/util/calendar/BusinessSchedule.java b/DB/src/main/java/io/deephaven/util/calendar/BusinessSchedule.java index cfdf6e4a06a..2d4b4c73371 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/BusinessSchedule.java +++ b/DB/src/main/java/io/deephaven/util/calendar/BusinessSchedule.java @@ -121,8 +121,8 @@ public DBDateTime getEndOfBusinessDay() { } /** - * Gets the length of the business day in nanoseconds. If the business day has multiple periods, - * only the time during the periods is counted. + * Gets the length of the business day in nanoseconds. If the business day has multiple periods, only the time + * during the periods is counted. * * @return length of the day in nanoseconds */ @@ -131,8 +131,8 @@ public long getLOBD() { } /** - * Gets the length of the business day in nanoseconds. If the business day has multiple periods, - * only the time during the periods is counted. + * Gets the length of the business day in nanoseconds. If the business day has multiple periods, only the time + * during the periods is counted. * * @return length of the day in nanoseconds */ @@ -166,8 +166,7 @@ public boolean isBusinessTime(final DBDateTime time) { } /** - * Returns the amount of business time in nanoseconds that has elapsed on the given day by the - * specified time. + * Returns the amount of business time in nanoseconds that has elapsed on the given day by the specified time. * * @param time time * @return business time in nanoseconds that has elapsed on the given day by the specified time diff --git a/DB/src/main/java/io/deephaven/util/calendar/Calendar.java b/DB/src/main/java/io/deephaven/util/calendar/Calendar.java index 7828f4c27fd..c4000d74b7e 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/Calendar.java +++ b/DB/src/main/java/io/deephaven/util/calendar/Calendar.java @@ -17,18 +17,17 @@ * To comply with the ISO-8601 standard for Dates, Strings should be of the form "yyyy-MM-dd", * * - * Methods on DBDateTime may not be precisely defined enough to return a DBDateTime, e.g nextDay(). - * In these cases, the method will return a String as discussed above. + * Methods on DBDateTime may not be precisely defined enough to return a DBDateTime, e.g nextDay(). In these cases, the + * method will return a String as discussed above. * * - * To maintain consistency, each calendar has two fields: a name, and a time zone. A calendar with - * the same schedule but a different time zone is considered a different calendar. + * To maintain consistency, each calendar has two fields: a name, and a time zone. A calendar with the same schedule but + * a different time zone is considered a different calendar. * * - * Frequently, the default implementation for methods on DBDateTimes is to call the corresponding - * method on a String with {@code DBDateTime.toDateString}. This can be slower than methods written - * explicitly for DBDateTimes. If performance is an issue, consider overriding these methods with - * other behavior. + * Frequently, the default implementation for methods on DBDateTimes is to call the corresponding method on a String + * with {@code DBDateTime.toDateString}. This can be slower than methods written explicitly for DBDateTimes. If + * performance is an issue, consider overriding these methods with other behavior. */ public interface Calendar { @@ -177,8 +176,7 @@ default String nextDay(final int days) { * * @param start start of a time range; if null, return {@code NULL_INT} * @param end end of a time range; if null, return {@code NULL_INT} - * @return the number days between {@code start} and {@code end}, inclusive and exclusive - * respectively. + * @return the number days between {@code start} and {@code end}, inclusive and exclusive respectively. */ int numberOfDays(final DBDateTime start, final DBDateTime end); @@ -188,8 +186,8 @@ default String nextDay(final int days) { * @param start start of a time range; if null, return {@code NULL_INT} * @param end end of a time range; if null, return {@code NULL_INT} * @param endInclusive whether to treat the {@code end} inclusive or exclusively - * @return the number of days between {@code start} and {@code end}, inclusive and - * {@code endInclusive} respectively. + * @return the number of days between {@code start} and {@code end}, inclusive and {@code endInclusive} + * respectively. */ int numberOfDays(final DBDateTime start, final DBDateTime end, final boolean endInclusive); @@ -198,8 +196,7 @@ default String nextDay(final int days) { * * @param start start of a time range; if null, return {@code NULL_INT} * @param end end of a time range; if null, return {@code NULL_INT} - * @return the number of days between {@code start} and {@code end}, inclusive and exclusive - * respectively. + * @return the number of days between {@code start} and {@code end}, inclusive and exclusive respectively. */ int numberOfDays(final String start, final String end); @@ -209,8 +206,8 @@ default String nextDay(final int days) { * @param start start of a time range; if null, return {@code NULL_INT} * @param end end of a time range; if null, return {@code NULL_INT} * @param endInclusive whether to treat the {@code end} inclusive or exclusively - * @return the number of days between {@code start} and {@code end}, inclusive and - * {@code endInclusive} respectively. + * @return the number of days between {@code start} and {@code end}, inclusive and {@code endInclusive} + * respectively. */ int numberOfDays(final String start, final String end, final boolean endInclusive); diff --git a/DB/src/main/java/io/deephaven/util/calendar/Calendars.java b/DB/src/main/java/io/deephaven/util/calendar/Calendars.java index 12870f65cde..3c4a5a99097 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/Calendars.java +++ b/DB/src/main/java/io/deephaven/util/calendar/Calendars.java @@ -34,8 +34,7 @@ public class Calendars implements Map { private static final String BUSINESS_CALENDAR_PROP_INTERNAL = "Calendar.internalPath"; private static final String BUSINESS_CALENDAR_PROP_USER = "Calendar.resourcePath"; private static final Calendars instance = new Calendars(); - private static final String defaultName = - Configuration.getInstance().getProperty("Calendar.default"); + private static final String defaultName = Configuration.getInstance().getProperty("Calendar.default"); /** * Gets the singleton map of business calendars. @@ -68,8 +67,7 @@ public static BusinessCalendar calendar(final String name) { /** * Returns a business calendar. * - * @return default business calendar. The deault is specified by the {@code Calendar.default} - * property. + * @return default business calendar. The deault is specified by the {@code Calendar.default} property. */ public static BusinessCalendar calendar() { return calendar(defaultName); @@ -114,24 +112,21 @@ private void loadProperty(final Configuration configuration, final String proper try { load(configuration, locations); } catch (NoSuchFileException e) { - logger.warn().append("Problem loading calendars. locations=").append(locations) - .append(e).endl(); + logger.warn().append("Problem loading calendars. locations=").append(locations).append(e).endl(); } } private void load(final Configuration configuration, final String businessCalendarLocations) - throws NoSuchFileException { + throws NoSuchFileException { final ResourceResolution resourceResolution = - new io.deephaven.util.files.ResourceResolution(configuration, ";", - businessCalendarLocations); + new io.deephaven.util.files.ResourceResolution(configuration, ";", businessCalendarLocations); final BiConsumer consumer = (URL, filePath) -> { try { final InputStream inputStream = URL.openStream(); if (inputStream != null) { final File calendarFile = inputStreamToFile(inputStream); - final BusinessCalendar businessCalendar = - DefaultBusinessCalendar.getInstance(calendarFile); + final BusinessCalendar businessCalendar = DefaultBusinessCalendar.getInstance(calendarFile); addCalendar(businessCalendar); calendarFile.deleteOnExit(); } else { @@ -140,8 +135,7 @@ private void load(final Configuration configuration, final String businessCalend } } catch (IOException e) { logger.warn("Problem loading calendar: locations=" + businessCalendarLocations, e); - throw new RuntimeException( - "Problem loading calendar: locations=" + businessCalendarLocations, e); + throw new RuntimeException("Problem loading calendar: locations=" + businessCalendarLocations, e); } }; @@ -153,8 +147,7 @@ private void load(final Configuration configuration, final String businessCalend throw e; } catch (IOException e) { logger.warn("Problem loading calendar: locations=" + businessCalendarLocations, e); - throw new RuntimeException( - "Problem loading calendar: locations=" + businessCalendarLocations, e); + throw new RuntimeException("Problem loading calendar: locations=" + businessCalendarLocations, e); } } @@ -171,8 +164,7 @@ private void addCalendar(final BusinessCalendar cal) { if (oldCalendar.equals(cal)) { return; } - throw new IllegalArgumentException( - "Multiple calendars have the same name: name='" + name + "'"); + throw new IllegalArgumentException("Multiple calendars have the same name: name='" + name + "'"); } put(name, cal); @@ -214,7 +206,7 @@ public boolean isEmpty() { @Override public boolean containsKey(Object key) { return !(key == null || !key.getClass().isAssignableFrom(String.class)) - && calendars.containsKey(((String) key).toUpperCase()); + && calendars.containsKey(((String) key).toUpperCase()); } @@ -271,7 +263,7 @@ public Set> entrySet() { private static File inputStreamToFile(@NotNull InputStream inputStream) throws IOException { File calendarFile = File.createTempFile("temp-file-name", ".calendar"); FileOutputStream outputStream = - new FileOutputStream(calendarFile); + new FileOutputStream(calendarFile); int read; byte[] bytes = new byte[1024]; diff --git a/DB/src/main/java/io/deephaven/util/calendar/DateStringUtils.java b/DB/src/main/java/io/deephaven/util/calendar/DateStringUtils.java index 66d773c3559..bb7921c297d 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/DateStringUtils.java +++ b/DB/src/main/java/io/deephaven/util/calendar/DateStringUtils.java @@ -18,16 +18,15 @@ * * To comply with the ISO-8601 standard for dates, Strings should be of the form "yyyy-MM-dd" * - * Quiet methods are functionally equivalent to their counterparts, but assume Date String validity - * to remove the overhead of checking. + * Quiet methods are functionally equivalent to their counterparts, but assume Date String validity to remove the + * overhead of checking. */ @SuppressWarnings("WeakerAccess") public class DateStringUtils { private DateStringUtils() {} private static final Locale DATE_STRING_LOCALE = new Locale("en", "US"); - private static final DateTimeFormatter DATE_STRING_FORMATTER = - DateTimeFormatter.ofPattern("uuuu-MM-dd") + private static final DateTimeFormatter DATE_STRING_FORMATTER = DateTimeFormatter.ofPattern("uuuu-MM-dd") .withLocale(DATE_STRING_LOCALE) .withChronology(IsoChronology.INSTANCE) .withResolverStyle(ResolverStyle.STRICT); @@ -101,7 +100,7 @@ public static String plusDaysQuiet(final String date, final int days) { */ static boolean isLeapYear(final int year) { return ((year % 4 == 0) && - (!(year % 100 == 0) || (year % 400) == 0)); + (!(year % 100 == 0) || (year % 400) == 0)); } /** @@ -125,8 +124,8 @@ public static boolean isBefore(final String date1, final String date2) { /** * Is one date before another? * - * This does not check that dates are formatted correctly. Could be disastrous if {@code date1} - * and {@code date2} are not ISO-8601 compliant! + * This does not check that dates are formatted correctly. Could be disastrous if {@code date1} and {@code date2} + * are not ISO-8601 compliant! * * @param date1 if {@code null} return false * @param date2 if {@code null} return false @@ -158,8 +157,8 @@ public static boolean isAfter(final String date1, final String date2) { /** * Is one date after another? * - * This does not check that dates are formatted correctly. Could be disastrous if {@code date1} - * and {@code date2} are not ISO-8601 compliant! + * This does not check that dates are formatted correctly. Could be disastrous if {@code date1} and {@code date2} + * are not ISO-8601 compliant! * * @param date1 if {@code null} return false * @param date2 if {@code null} return false @@ -185,8 +184,7 @@ public static int monthOfYear(final String date) { } /** - * Parses a string as a local date. If the string is not a valid ISO-8601 Date String, throws an - * exception. + * Parses a string as a local date. If the string is not a valid ISO-8601 Date String, throws an exception. * * This method can beused to verify that a date string is properly formed. * @@ -201,14 +199,13 @@ static LocalDate parseLocalDate(final String date) { final boolean matchesPattern = DATE_STRING_PATTERN.matcher(date).matches(); if (!matchesPattern) { throw new IllegalArgumentException( - "Text '" + date + "' could not be parsed as a date: format must be yyyy-MM-dd"); + "Text '" + date + "' could not be parsed as a date: format must be yyyy-MM-dd"); } try { return LocalDate.parse(date, DATE_STRING_FORMATTER); } catch (Exception e) { - throw new IllegalArgumentException( - "Text '" + date + "' could not be parsed as a date: " + e.getMessage()); + throw new IllegalArgumentException("Text '" + date + "' could not be parsed as a date: " + e.getMessage()); } } diff --git a/DB/src/main/java/io/deephaven/util/calendar/DefaultBusinessCalendar.java b/DB/src/main/java/io/deephaven/util/calendar/DefaultBusinessCalendar.java index d008fa8e7d5..62a1f2024cf 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/DefaultBusinessCalendar.java +++ b/DB/src/main/java/io/deephaven/util/calendar/DefaultBusinessCalendar.java @@ -30,8 +30,8 @@ /** * Default implementation for a {@link BusinessCalendar}. This implementation is thread safe. * - * Overrides many default {@link Calendar} and BusinessCalendar methods for improved performance. - * See the documentation of Calendar for details. + * Overrides many default {@link Calendar} and BusinessCalendar methods for improved performance. See the documentation + * of Calendar for details. */ public class DefaultBusinessCalendar extends AbstractBusinessCalendar implements Serializable { @@ -40,7 +40,7 @@ public class DefaultBusinessCalendar extends AbstractBusinessCalendar implements // our "null" holder for holidays private static final BusinessSchedule HOLIDAY = new Holiday(); private static final DateTimeFormatter HOLIDAY_PARSER = - DateTimeFormatter.ofPattern("yyyyMMdd").withLocale(new Locale("en", "US")); + DateTimeFormatter.ofPattern("yyyyMMdd").withLocale(new Locale("en", "US")); // each calendar has a name, timezone, and date string format private final String calendarName; @@ -59,15 +59,15 @@ public class DefaultBusinessCalendar extends AbstractBusinessCalendar implements @VisibleForTesting DefaultBusinessCalendar(final CalendarElements calendarElements) { - this(calendarElements.calendarName, calendarElements.timeZone, - calendarElements.lengthOfDefaultDayNanos, calendarElements.defaultBusinessPeriodStrings, - calendarElements.weekendDays, calendarElements.dates, calendarElements.holidays); + this(calendarElements.calendarName, calendarElements.timeZone, calendarElements.lengthOfDefaultDayNanos, + calendarElements.defaultBusinessPeriodStrings, calendarElements.weekendDays, calendarElements.dates, + calendarElements.holidays); } private DefaultBusinessCalendar(final String calendarName, final DBTimeZone timeZone, - final long lengthOfDefaultDayNanos, final List defaultBusinessPeriodStrings, - final Set weekendDays, final Map dates, - final Map holidays) { + final long lengthOfDefaultDayNanos, final List defaultBusinessPeriodStrings, + final Set weekendDays, final Map dates, + final Map holidays) { this.calendarName = calendarName; this.timeZone = timeZone; this.lengthOfDefaultDayNanos = lengthOfDefaultDayNanos; @@ -102,24 +102,23 @@ static CalendarElements constructCalendarElements(@NotNull final File calendarFi // Set the default values final Element defaultElement = getRequiredChild(root, "default", filePath); - calendarElements.defaultBusinessPeriodStrings = - getDefaultBusinessPeriodStrings(defaultElement, filePath); + calendarElements.defaultBusinessPeriodStrings = getDefaultBusinessPeriodStrings(defaultElement, filePath); calendarElements.weekendDays = getWeekendDays(defaultElement); // get the holidays/half days final Pair, Map> datePair = - getDates(root, calendarElements); + getDates(root, calendarElements); calendarElements.dates = datePair.getFirst(); calendarElements.holidays = datePair.getSecond(); calendarElements.lengthOfDefaultDayNanos = - parseStandardBusinessDayLengthNanos(calendarElements.defaultBusinessPeriodStrings); + parseStandardBusinessDayLengthNanos(calendarElements.defaultBusinessPeriodStrings); return calendarElements; } - private static Pair, Map> getDates( - final Element root, final CalendarElements calendarElements) { + private static Pair, Map> getDates(final Element root, + final CalendarElements calendarElements) { final Map holidays = new ConcurrentHashMap<>(); // initialize the calendar to the years placed in the calendar file int minYear = Integer.MAX_VALUE; @@ -142,8 +141,7 @@ private static Pair, Map businessPeriodsList = - holidayElement.getChildren("businessPeriod"); + final List businessPeriodsList = holidayElement.getChildren("businessPeriod"); final List businessPeriodStrings = new ArrayList<>(); for (Element busPeriod : businessPeriodsList) { String businessPeriod = getText(busPeriod); @@ -153,9 +151,8 @@ private static Pair, Map, Map newBusinessDay(date, calendarElements.weekendDays, - calendarElements.timeZone, - calendarElements.defaultBusinessPeriodStrings)); + date -> newBusinessDay(date, calendarElements.weekendDays, calendarElements.timeZone, + calendarElements.defaultBusinessPeriodStrings)); } } } @@ -192,8 +188,8 @@ private static Set getWeekendDays(@NotNull final Element defaultEleme return weekendDays; } - private static List getDefaultBusinessPeriodStrings( - @NotNull final Element defaultElement, final String filePath) { + private static List getDefaultBusinessPeriodStrings(@NotNull final Element defaultElement, + final String filePath) { final List defaultBusinessPeriodStrings = new ArrayList<>(); final List businessPeriods = defaultElement.getChildren("businessPeriod"); if (businessPeriods != null) { @@ -203,8 +199,7 @@ private static List getDefaultBusinessPeriodStrings( } } else { throw new IllegalArgumentException( - "Missing the 'businessPeriod' tag in the 'default' section in calendar file " - + filePath); + "Missing the 'businessPeriod' tag in the 'default' section in calendar file " + filePath); } return defaultBusinessPeriodStrings; @@ -217,10 +212,10 @@ private static Element getRootElement(File calendarFile) { doc = builder.build(calendarFile); } catch (JDOMException e) { throw new IllegalArgumentException( - "Could not initialize business calendar: Error parsing " + calendarFile.getName()); + "Could not initialize business calendar: Error parsing " + calendarFile.getName()); } catch (IOException e) { - throw new RuntimeException("Could not initialize business calendar: " - + calendarFile.getName() + " could not be loaded"); + throw new RuntimeException( + "Could not initialize business calendar: " + calendarFile.getName() + " could not be loaded"); } return doc.getRootElement(); @@ -237,14 +232,12 @@ private static DBTimeZone getTimeZone(@NotNull final Element root, final String } // throws an error if the child is missing - private static Element getRequiredChild(@NotNull final Element root, final String child, - final String filePath) { + private static Element getRequiredChild(@NotNull final Element root, final String child, final String filePath) { Element element = root.getChild(child); if (element != null) { return element; } else { - throw new IllegalArgumentException( - "Missing the " + child + " tag in calendar file " + filePath); + throw new IllegalArgumentException("Missing the " + child + " tag in calendar file " + filePath); } } @@ -256,14 +249,12 @@ private static LocalDate parseLocalDate(final String date) { return LocalDate.parse(date, HOLIDAY_PARSER); } catch (Exception ee) { throw new IllegalArgumentException( - "Malformed date string. Acceptable formats are yyyy-MM-dd and yyyyMMdd. s=" - + date); + "Malformed date string. Acceptable formats are yyyy-MM-dd and yyyyMMdd. s=" + date); } } } - private static long parseStandardBusinessDayLengthNanos( - final List defaultBusinessPeriodStrings) { + private static long parseStandardBusinessDayLengthNanos(final List defaultBusinessPeriodStrings) { long lengthOfDefaultDayNanos = 0; Pattern hhmm = Pattern.compile("\\d{2}:\\d{2}"); for (String businessPeriodString : defaultBusinessPeriodStrings) { @@ -275,27 +266,24 @@ private static long parseStandardBusinessDayLengthNanos( if (hhmm.matcher(open).matches() && hhmm.matcher(close).matches()) { String[] openingTimeHHMM = open.split(":"); String[] closingTimeHHMM = close.split(":"); - long defOpenTimeNanos = - (Integer.parseInt(openingTimeHHMM[0]) * DBTimeUtils.HOUR) + long defOpenTimeNanos = (Integer.parseInt(openingTimeHHMM[0]) * DBTimeUtils.HOUR) + (Integer.parseInt(openingTimeHHMM[1]) * DBTimeUtils.MINUTE); - long defClosingTimeNanos = - (Integer.parseInt(closingTimeHHMM[0]) * DBTimeUtils.HOUR) + long defClosingTimeNanos = (Integer.parseInt(closingTimeHHMM[0]) * DBTimeUtils.HOUR) + (Integer.parseInt(closingTimeHHMM[1]) * DBTimeUtils.MINUTE); lengthOfDefaultDayNanos += defClosingTimeNanos - defOpenTimeNanos; wellFormed = true; } } if (!wellFormed) { - throw new UnsupportedOperationException( - "Could not parse business period " + businessPeriodString); + throw new UnsupportedOperationException("Could not parse business period " + businessPeriodString); } } return lengthOfDefaultDayNanos; } - private static BusinessPeriod[] parseBusinessPeriods(final DBTimeZone timeZone, - final LocalDate date, final List businessPeriodStrings) { + private static BusinessPeriod[] parseBusinessPeriods(final DBTimeZone timeZone, final LocalDate date, + final List businessPeriodStrings) { final BusinessPeriod[] businessPeriods = new BusinessPeriod[businessPeriodStrings.size()]; final Pattern hhmm = Pattern.compile("\\d{2}[:]\\d{2}"); int i = 0; @@ -305,17 +293,16 @@ private static BusinessPeriod[] parseBusinessPeriods(final DBTimeZone timeZone, final String open = openClose[0]; String close = openClose[1]; if (hhmm.matcher(open).matches() && hhmm.matcher(close).matches()) { - final String tz = - timeZone.name().substring(timeZone.name().indexOf("_")).replace("_", " "); + final String tz = timeZone.name().substring(timeZone.name().indexOf("_")).replace("_", " "); final LocalDate closeDate; if (close.equals("24:00")) { // midnight closing time closeDate = date.plusDays(1); close = "00:00"; } else if (Integer.parseInt(open.replaceAll(":", "")) > Integer - .parseInt(close.replaceAll(":", ""))) { - throw new IllegalArgumentException("Can not parse business periods; open = " - + open + " is greater than close = " + close); + .parseInt(close.replaceAll(":", ""))) { + throw new IllegalArgumentException( + "Can not parse business periods; open = " + open + " is greater than close = " + close); } else { closeDate = date; } @@ -323,8 +310,7 @@ private static BusinessPeriod[] parseBusinessPeriods(final DBTimeZone timeZone, final String openDateStr = date.toString() + "T" + open + tz; final String closeDateStr = closeDate.toString() + "T" + close + tz; - businessPeriods[i++] = - new BusinessPeriod(DBTimeUtils.convertDateTime(openDateStr), + businessPeriods[i++] = new BusinessPeriod(DBTimeUtils.convertDateTime(openDateStr), DBTimeUtils.convertDateTime(closeDateStr)); } } @@ -371,7 +357,7 @@ public BusinessSchedule getBusinessDay(final DBDateTime time) { } final LocalDate localDate = LocalDate.ofYearDay(DBTimeUtils.year(time, timeZone()), - DBTimeUtils.dayOfYear(time, timeZone())); + DBTimeUtils.dayOfYear(time, timeZone())); return getBusinessSchedule(localDate); } @@ -399,7 +385,7 @@ public BusinessSchedule getBusinessSchedule(final DBDateTime time) { } final LocalDate localDate = LocalDate.ofYearDay(DBTimeUtils.year(time, timeZone()), - DBTimeUtils.dayOfYear(time, timeZone())); + DBTimeUtils.dayOfYear(time, timeZone())); return getBusinessSchedule(localDate); } @@ -426,9 +412,8 @@ private BusinessSchedule newBusinessDay(final LocalDate date) { return newBusinessDay(date, weekendDays, timeZone(), defaultBusinessPeriodStrings); } - private static BusinessSchedule newBusinessDay(final LocalDate date, - final Set weekendDays, final DBTimeZone timeZone, - final List businessPeriodStrings) { + private static BusinessSchedule newBusinessDay(final LocalDate date, final Set weekendDays, + final DBTimeZone timeZone, final List businessPeriodStrings) { if (date == null) { return null; } @@ -462,8 +447,7 @@ public long diffBusinessNanos(final DBDateTime start, final DBDateTime end) { DBDateTime startOfPeriod = businessPeriod.getStartTime(); // noinspection StatementWithEmptyBody - if (DBTimeUtils.isAfter(day, endOfPeriod) - || DBTimeUtils.isBefore(end, startOfPeriod)) { + if (DBTimeUtils.isAfter(day, endOfPeriod) || DBTimeUtils.isBefore(end, startOfPeriod)) { // continue } else if (!DBTimeUtils.isAfter(day, startOfPeriod)) { if (DBTimeUtils.isBefore(end, endOfPeriod)) { @@ -497,8 +481,7 @@ public double diffBusinessYear(final DBDateTime startTime, final DBDateTime endT while (!DBTimeUtils.isAfter(time, endTime)) { // get length of the business year final int startYear = DBTimeUtils.year(startTime, timeZone()); - final long businessYearLength = - cachedYearLengths.computeIfAbsent(startYear, this::getBusinessYearLength); + final long businessYearLength = cachedYearLengths.computeIfAbsent(startYear, this::getBusinessYearLength); final DBDateTime endOfYear = getFirstBusinessDateTimeOfYear(startYear + 1); final long yearDiff; @@ -521,8 +504,7 @@ private long getBusinessYearLength(final int year) { for (int j = 0; j < numDays; j++) { final int day = j + 1; - final BusinessSchedule businessDate = - getBusinessSchedule(LocalDate.ofYearDay(year, day)); + final BusinessSchedule businessDate = getBusinessSchedule(LocalDate.ofYearDay(year, day)); yearLength += businessDate.getLOBD(); } @@ -533,8 +515,7 @@ private DBDateTime getFirstBusinessDateTimeOfYear(final int year) { boolean isLeap = DateStringUtils.isLeapYear(year); int numDays = 365 + (isLeap ? 1 : 0); for (int j = 0; j < numDays; j++) { - final BusinessSchedule businessDate = - getBusinessSchedule(LocalDate.ofYearDay(year, j + 1)); + final BusinessSchedule businessDate = getBusinessSchedule(LocalDate.ofYearDay(year, j + 1)); if (!(businessDate instanceof Holiday)) { return businessDate.getSOBD(); } @@ -546,9 +527,9 @@ private DBDateTime getFirstBusinessDateTimeOfYear(final int year) { @Override public String toString() { return "DefaultBusinessCalendar{" + - "name='" + calendarName + '\'' + - ", timeZone=" + timeZone + - '}'; + "name='" + calendarName + '\'' + + ", timeZone=" + timeZone + + '}'; } @Override @@ -559,17 +540,17 @@ public boolean equals(Object o) { return false; DefaultBusinessCalendar that = (DefaultBusinessCalendar) o; return lengthOfDefaultDayNanos == that.lengthOfDefaultDayNanos && - Objects.equals(calendarName, that.calendarName) && - timeZone == that.timeZone && - Objects.equals(defaultBusinessPeriodStrings, that.defaultBusinessPeriodStrings) && - Objects.equals(weekendDays, that.weekendDays) && - Objects.equals(holidays, that.holidays); + Objects.equals(calendarName, that.calendarName) && + timeZone == that.timeZone && + Objects.equals(defaultBusinessPeriodStrings, that.defaultBusinessPeriodStrings) && + Objects.equals(weekendDays, that.weekendDays) && + Objects.equals(holidays, that.holidays); } @Override public int hashCode() { - return Objects.hash(calendarName, timeZone, lengthOfDefaultDayNanos, - defaultBusinessPeriodStrings, weekendDays, holidays); + return Objects.hash(calendarName, timeZone, lengthOfDefaultDayNanos, defaultBusinessPeriodStrings, weekendDays, + holidays); } static class CalendarElements { diff --git a/DB/src/main/java/io/deephaven/util/calendar/DefaultNoHolidayBusinessCalendar.java b/DB/src/main/java/io/deephaven/util/calendar/DefaultNoHolidayBusinessCalendar.java index d8d95eda085..ee306b82a5b 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/DefaultNoHolidayBusinessCalendar.java +++ b/DB/src/main/java/io/deephaven/util/calendar/DefaultNoHolidayBusinessCalendar.java @@ -16,8 +16,8 @@ public class DefaultNoHolidayBusinessCalendar extends AbstractBusinessCalendar { private final BusinessCalendar calendar; /** - * Creates a new Default24HourBusinessCalendar instance. Assumes that {@code calendar} is a - * {@link BusinessCalendar} with no holidays and a 24 hour business day. + * Creates a new Default24HourBusinessCalendar instance. Assumes that {@code calendar} is a {@link BusinessCalendar} + * with no holidays and a 24 hour business day. * * @param calendar {@link BusinessCalendar} with no holidays and a 24 hour business day */ diff --git a/DB/src/main/java/io/deephaven/util/calendar/StaticCalendarMethods.java b/DB/src/main/java/io/deephaven/util/calendar/StaticCalendarMethods.java index c68cd9f0aaa..1c29f0c9ef7 100644 --- a/DB/src/main/java/io/deephaven/util/calendar/StaticCalendarMethods.java +++ b/DB/src/main/java/io/deephaven/util/calendar/StaticCalendarMethods.java @@ -90,8 +90,7 @@ public static int numberOfDays(final DBDateTime start, final DBDateTime end) { return Calendars.calendar().numberOfDays(start, end); } - public static int numberOfDays(final DBDateTime start, final DBDateTime end, - final boolean endInclusive) { + public static int numberOfDays(final DBDateTime start, final DBDateTime end, final boolean endInclusive) { return Calendars.calendar().numberOfDays(start, end, endInclusive); } @@ -99,8 +98,7 @@ public static int numberOfDays(final String start, final String end) { return Calendars.calendar().numberOfDays(start, end); } - public static int numberOfDays(final String start, final String end, - final boolean endInclusive) { + public static int numberOfDays(final String start, final String end, final boolean endInclusive) { return Calendars.calendar().numberOfDays(start, end, endInclusive); } @@ -344,8 +342,7 @@ public static int numberOfNonBusinessDays(DBDateTime start, DBDateTime end) { return Calendars.calendar().numberOfNonBusinessDays(start, end); } - public static int numberOfNonBusinessDays(DBDateTime start, DBDateTime end, - boolean endInclusive) { + public static int numberOfNonBusinessDays(DBDateTime start, DBDateTime end, boolean endInclusive) { return Calendars.calendar().numberOfNonBusinessDays(start, end, endInclusive); } diff --git a/DB/src/main/java/io/deephaven/utils/ArrayParser.java b/DB/src/main/java/io/deephaven/utils/ArrayParser.java index 812f04c56f1..d2e70b324ff 100644 --- a/DB/src/main/java/io/deephaven/utils/ArrayParser.java +++ b/DB/src/main/java/io/deephaven/utils/ArrayParser.java @@ -10,8 +10,8 @@ import java.util.stream.Stream; /** - * A simple wrapper for string-to-array parsing. Parsers are kept in a cache per-delimiter so that - * we avoid recompiling the pattern regex. + * A simple wrapper for string-to-array parsing. Parsers are kept in a cache per-delimiter so that we avoid recompiling + * the pattern regex. */ public class ArrayParser { @@ -35,8 +35,7 @@ private void checkFormat(String value) { } final char start = value.charAt(0); if (start != '[' && start != '{' && start != '(') { - throw new InputMismatchException( - "Value submitted for Array parsing doesn't match needed format, " + + throw new InputMismatchException("Value submitted for Array parsing doesn't match needed format, " + "unexpected opening character: " + start); } final char end = value.charAt(value.length() - 1); @@ -82,24 +81,20 @@ public long[] getLongArray(String value, boolean strict) { } /** - * Create a properly typed array from the input string based upon the delimiter, given a - * supplier. + * Create a properly typed array from the input string based upon the delimiter, given a supplier. * * @param value The array string value * @param strict if strict processing should be used - * @param elementSupplier a supplier to convert a stream of element strings to items of the - * correct types + * @param elementSupplier a supplier to convert a stream of element strings to items of the correct types * @param the type * @return an array of values of the specified type */ - public T getArray(String value, boolean strict, - Function, T> elementSupplier) { + public T getArray(String value, boolean strict, Function, T> elementSupplier) { return elementSupplier.apply(toStringStream(value, strict)); } /** - * Convert the input string value to a stream of strings for each element based upon the - * delimiter. + * Convert the input string value to a stream of strings for each element based upon the delimiter. * * @param value the array as a string * @param strict if strict processing should be used @@ -116,11 +111,10 @@ private Stream toStringStream(String value, boolean strict) { } try { - return Arrays.stream(pattern.split(value.trim().substring(1, value.length() - 1))) - .map(String::trim); + return Arrays.stream(pattern.split(value.trim().substring(1, value.length() - 1))).map(String::trim); } catch (Exception e) { throw new IllegalArgumentException( - "Value submitted for Array parsing doesn't match needed format: " + value, e); + "Value submitted for Array parsing doesn't match needed format: " + value, e); } } @@ -143,9 +137,8 @@ public String[] toStringArray(String value, boolean strict) { */ public String encodeArray(double[] array) { return array == null - ? null - : "[" + Arrays.stream(array).mapToObj(Double::toString) - .collect(Collectors.joining(delimiter)) + "]"; + ? null + : "[" + Arrays.stream(array).mapToObj(Double::toString).collect(Collectors.joining(delimiter)) + "]"; } /** @@ -156,8 +149,7 @@ public String encodeArray(double[] array) { */ public String encodeArray(long[] array) { return array == null - ? null - : "[" + Arrays.stream(array).mapToObj(Long::toString) - .collect(Collectors.joining(delimiter)) + "]"; + ? null + : "[" + Arrays.stream(array).mapToObj(Long::toString).collect(Collectors.joining(delimiter)) + "]"; } } diff --git a/DB/src/test/java/io/deephaven/db/tables/QueryTableHugeSortTest.java b/DB/src/test/java/io/deephaven/db/tables/QueryTableHugeSortTest.java index 3a64bec6737..ad3aac7bcea 100644 --- a/DB/src/test/java/io/deephaven/db/tables/QueryTableHugeSortTest.java +++ b/DB/src/test/java/io/deephaven/db/tables/QueryTableHugeSortTest.java @@ -25,16 +25,14 @@ public void testHugeSort() { final int megaSortSize = SortHelpers.megaSortSize; final int sortChunkSize = SortHelpers.sortChunkSize; try { - // ideally we would sort something that is bigger than Integer.MAX_VALUE, but the test - // VMs can not handle that. - // So instead we adjust the mega sort parameters so that we'll exercise the code path - // anyway. + // ideally we would sort something that is bigger than Integer.MAX_VALUE, but the test VMs can not handle + // that. + // So instead we adjust the mega sort parameters so that we'll exercise the code path anyway. SortHelpers.megaSortSize = 1 << 24; // 16 Million SortHelpers.sortChunkSize = 1 << 21; // 2 Million final long tableSize = (long) SortHelpers.megaSortSize * 2L; - final Table bigTable = - TableTools.emptyTable(tableSize).updateView("SortCol=(byte)(ii%100)", "Sentinel=k"); + final Table bigTable = TableTools.emptyTable(tableSize).updateView("SortCol=(byte)(ii%100)", "Sentinel=k"); TableTools.show(bigTable); final long runSize1 = (tableSize + 99) / 100; @@ -42,7 +40,7 @@ public void testHugeSort() { final long firstSmallRun = tableSize % 100; final long runSizePivot = (firstSmallRun) * runSize1; System.out.println("RunSize: " + runSize1 + ", " + runSize2 + ", pivot: " + runSizePivot - + ", firstSmallRun: " + firstSmallRun); + + ", firstSmallRun: " + firstSmallRun); final long startTime = System.currentTimeMillis(); final Table sorted = bigTable.sort("SortCol"); @@ -58,8 +56,8 @@ public void testHugeSort() { QueryScope.addParam("firstSmallRun", firstSmallRun); final Table expected = TableTools.emptyTable(tableSize).updateView( - "SortCol=(byte)(ii < runSizePivot ? ii/runSize1 : ((ii - runSizePivot) / runSize2) + firstSmallRun)", - "Sentinel=(ii < runSizePivot) ? ((100 * (ii % runSize1)) + SortCol) : 100 * ((ii - runSizePivot) % runSize2) + SortCol"); + "SortCol=(byte)(ii < runSizePivot ? ii/runSize1 : ((ii - runSizePivot) / runSize2) + firstSmallRun)", + "Sentinel=(ii < runSizePivot) ? ((100 * (ii % runSize1)) + SortCol) : 100 * ((ii - runSizePivot) % runSize2) + SortCol"); TableTools.showWithIndex(expected); TstUtils.assertTableEquals(expected, sorted); @@ -78,12 +76,11 @@ public void testHugeGroupedSort() { QueryScope.addParam("captains", captains); QueryScope.addParam("segSize", segSize); - final Table grouped = TableTools.emptyTable(tableSize) - .updateView("Captain=captains[(int)(ii / segSize)]", "Sentinel=ii"); + final Table grouped = + TableTools.emptyTable(tableSize).updateView("Captain=captains[(int)(ii / segSize)]", "Sentinel=ii"); final Map gtr = new LinkedHashMap<>(); for (int ii = 0; ii < captains.length; ++ii) { - gtr.put(captains[ii], - Index.FACTORY.getIndexByRange(ii * segSize, (ii + 1) * segSize - 1)); + gtr.put(captains[ii], Index.FACTORY.getIndexByRange(ii * segSize, (ii + 1) * segSize - 1)); } System.out.println(gtr); ((AbstractColumnSource) (grouped.getColumnSource("Captain"))).setGroupToRange(gtr); @@ -99,7 +96,7 @@ public void testHugeGroupedSort() { Arrays.sort(sortedCaptains, Comparator.reverseOrder()); QueryScope.addParam("sortedCaptains", sortedCaptains); final Table sortedValues = TableTools.emptyTable(tableSize) - .updateView("Captain=sortedCaptains[(int)(ii / segSize)]", "Sentinel=ii"); + .updateView("Captain=sortedCaptains[(int)(ii / segSize)]", "Sentinel=ii"); System.out.println("Comparing tables:"); final long compareStart = System.currentTimeMillis(); diff --git a/DB/src/test/java/io/deephaven/db/tables/TestAppendableColumn.java b/DB/src/test/java/io/deephaven/db/tables/TestAppendableColumn.java index 3cc95cdd98d..d701d8487a5 100644 --- a/DB/src/test/java/io/deephaven/db/tables/TestAppendableColumn.java +++ b/DB/src/test/java/io/deephaven/db/tables/TestAppendableColumn.java @@ -27,8 +27,7 @@ public void testOverflow() throws IOException { data[i] = i / 4; } - final TableDefinition tableDefinition = - TableDefinition.of(ColumnDefinition.ofInt("v").withGrouping()); + final TableDefinition tableDefinition = TableDefinition.of(ColumnDefinition.ofInt("v").withGrouping()); Table table = TableTools.newTable(tableDefinition, TableTools.col("v", data)); File dest = new File(directory, "testOverflow.parquet"); ParquetTools.writeTable(table, dest, tableDefinition); @@ -38,10 +37,9 @@ public void testOverflow() throws IOException { assertNotNull(tableR.getColumnSource("v").getGroupToRange()); assertEquals(320000, tableR.getIndex().size()); assertEquals(80000, tableR.getColumnSource("v").getGroupToRange().size()); - assertEquals(80000, - tableR.getColumnSource("v").getValuesMapping(tableR.getIndex()).size()); + assertEquals(80000, tableR.getColumnSource("v").getValuesMapping(tableR.getIndex()).size()); assertEquals(80000, tableR.getColumnSource("v") - .getValuesMapping(tableR.getIndex().subindexByPos(0, tableR.size())).size()); + .getValuesMapping(tableR.getIndex().subindexByPos(0, tableR.size())).size()); final Map mapper = tableR.getColumnSource("v").getGroupToRange(); for (int i = 0; i < data.length / 4; i++) { assertEquals(mapper.get(i), Index.FACTORY.getIndexByRange(i * 4, i * 4 + 3)); diff --git a/DB/src/test/java/io/deephaven/db/tables/TestColumnDefinition.java b/DB/src/test/java/io/deephaven/db/tables/TestColumnDefinition.java index def7a9784c6..6014d40962a 100644 --- a/DB/src/test/java/io/deephaven/db/tables/TestColumnDefinition.java +++ b/DB/src/test/java/io/deephaven/db/tables/TestColumnDefinition.java @@ -19,8 +19,7 @@ public void testUtfAssumptions1() throws IOException { out.writeUTF("\0"); out.flush(); - final ObjectInputStream in = - new ObjectInputStream(new ByteArrayInputStream(outBytes.toByteArray())); + final ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(outBytes.toByteArray())); TestCase.assertEquals(4, in.available()); final String bad = in.readUTF(); TestCase.assertEquals(1, bad.length()); @@ -40,16 +39,14 @@ public void testUtfAssumptions2() throws IOException { out.writeByte(1); out.flush(); - final ObjectInputStream in1 = - new ObjectInputStream(new ByteArrayInputStream(outBytes.toByteArray())); + final ObjectInputStream in1 = new ObjectInputStream(new ByteArrayInputStream(outBytes.toByteArray())); TestCase.assertEquals(4, in1.available()); TestCase.assertEquals(2, in1.readUnsignedShort()); TestCase.assertEquals(MAGIC_NUMBER, in1.readByte()); TestCase.assertEquals(1, in1.readByte()); TestCase.assertEquals(0, in1.available()); - final ObjectInputStream in2 = - new ObjectInputStream(new ByteArrayInputStream(outBytes.toByteArray())); + final ObjectInputStream in2 = new ObjectInputStream(new ByteArrayInputStream(outBytes.toByteArray())); TestCase.assertEquals(4, in2.available()); try { in2.readUTF(); diff --git a/DB/src/test/java/io/deephaven/db/tables/TestMoveColumns.java b/DB/src/test/java/io/deephaven/db/tables/TestMoveColumns.java index a73e6b5341e..0a40ff0bab7 100644 --- a/DB/src/test/java/io/deephaven/db/tables/TestMoveColumns.java +++ b/DB/src/test/java/io/deephaven/db/tables/TestMoveColumns.java @@ -6,8 +6,7 @@ import java.util.stream.Collectors; public class TestMoveColumns extends TestCase { - private static final Table table = - TableTools.emptyTable(1).update("a=1", "b=2", "c=3", "d=4", "e=5"); + private static final Table table = TableTools.emptyTable(1).update("a=1", "b=2", "c=3", "d=4", "e=5"); private static final int numCols = table.getColumns().length; public void testMoveColumns() { @@ -138,14 +137,12 @@ public void testMoveDownColumns() { } private void checkColumnOrder(Table t, String expectedOrder) { - final String order = - t.getColumnSourceMap().keySet().stream().collect(Collectors.joining("")); + final String order = t.getColumnSourceMap().keySet().stream().collect(Collectors.joining("")); assertEquals(expectedOrder, order); } private void checkColumnValueOrder(Table t, String expectedOrder) { - final String order = - t.getColumnSourceMap().values().stream().mapToInt((col) -> col.getInt(0)) + final String order = t.getColumnSourceMap().values().stream().mapToInt((col) -> col.getInt(0)) .mapToObj(String::valueOf).collect(Collectors.joining("")); assertEquals(expectedOrder, order); } diff --git a/DB/src/test/java/io/deephaven/db/tables/dbarrays/DbArrayTest.java b/DB/src/test/java/io/deephaven/db/tables/dbarrays/DbArrayTest.java index 811498a3223..a6b06787221 100644 --- a/DB/src/test/java/io/deephaven/db/tables/dbarrays/DbArrayTest.java +++ b/DB/src/test/java/io/deephaven/db/tables/dbarrays/DbArrayTest.java @@ -21,8 +21,8 @@ public class DbArrayTest extends TestCase { public void testDbArrayColumnWrapper() { // noinspection unchecked DbArray dbArray = new DbArrayColumnWrapper( - ArrayBackedColumnSource.getMemoryColumnSourceUntyped(new String[] {"a", "b", "c"}), - Index.FACTORY.getIndexByRange(0, 2)); + ArrayBackedColumnSource.getMemoryColumnSourceUntyped(new String[] {"a", "b", "c"}), + Index.FACTORY.getIndexByRange(0, 2)); assertEquals(3, dbArray.size()); assertEquals("a", dbArray.get(0)); assertEquals("b", dbArray.get(1)); @@ -77,8 +77,7 @@ public void testDbArrayDirect() { assertEquals(null, dbArrayDirect.subArray(0, 1).get(-1)); assertEquals(2, dbArrayDirect.subArray(1, 3).size()); - assertEquals(Arrays.asList("b", "c"), - Arrays.asList(dbArrayDirect.subArray(1, 3).toArray())); + assertEquals(Arrays.asList("b", "c"), Arrays.asList(dbArrayDirect.subArray(1, 3).toArray())); assertEquals(null, dbArrayDirect.subArray(1, 3).get(2)); assertEquals(null, dbArrayDirect.subArray(0, 1).get(-1)); } @@ -86,8 +85,8 @@ public void testDbArrayDirect() { public void testSubArray() { // noinspection unchecked DbArray dbArray = new DbArrayColumnWrapper( - ArrayBackedColumnSource.getMemoryColumnSourceUntyped(new Object[] {10, 20, 30}), - Index.FACTORY.getIndexByRange(0, 2)); + ArrayBackedColumnSource.getMemoryColumnSourceUntyped(new Object[] {10, 20, 30}), + Index.FACTORY.getIndexByRange(0, 2)); for (int start = -4; start <= 4; start++) { for (int end = -1; end <= 7; end++) { @@ -116,8 +115,7 @@ public void testSubArray() { Object result[] = new Object[end - start]; for (int i = start; i < end; i++) { - result[i - start] = - (i < 0 || i >= dbArray.size()) ? null : dbArray.get(i); + result[i - start] = (i < 0 || i >= dbArray.size()) ? null : dbArray.get(i); } Object result2[] = new Object[end2 - start2]; @@ -145,8 +143,7 @@ private void checkSubArray(DbArray dbArray, int start, int end, Object result[]) } } - private void checkDoubleSubArray(DbArray dbArray, int start, int end, int start2, int end2, - Object result[]) { + private void checkDoubleSubArray(DbArray dbArray, int start, int end, int start2, int end2, Object result[]) { DbArray subArray = dbArray.subArray(start, end); subArray = subArray.subArray(start2, end2); Object array[] = subArray.toArray(); @@ -166,7 +163,7 @@ public void testSubArrayByPositions() { integerArraySource.set(ii, (ii + 1) * 10); } DbArray dbColumnArray = - new DbArrayColumnWrapper<>(integerArraySource, Index.FACTORY.getIndexByRange(0, 5)); + new DbArrayColumnWrapper<>(integerArraySource, Index.FACTORY.getIndexByRange(0, 5)); DbIntArray dbDirectArray = new DbIntArrayDirect(10, 20, 30, 40, 50, 60); Random random = new Random(42); @@ -182,9 +179,8 @@ public void testSubArrayByPositions() { } DbArray columnResult = - dbColumnArray.subArrayByPositions(positions.toArray(new long[positions.size()])); - DbIntArray directResult = - dbDirectArray.subArrayByPositions(positions.toArray(new long[positions.size()])); + dbColumnArray.subArrayByPositions(positions.toArray(new long[positions.size()])); + DbIntArray directResult = dbDirectArray.subArrayByPositions(positions.toArray(new long[positions.size()])); assertEquals(expected.size(), columnResult.size()); assertEquals(expected.size(), directResult.size()); @@ -197,16 +193,15 @@ public void testSubArrayByPositions() { } /** - * Verify that a DbArrayColumnWrapper can correctly invoke the 'getDirect' operation even when - * one of the column sources is null. + * Verify that a DbArrayColumnWrapper can correctly invoke the 'getDirect' operation even when one of the column + * sources is null. */ public void testGetDirect() { DbArrayDirect dbArrayDirect = new DbArrayDirect<>("a", "b", "c"); // noinspection unchecked DbArrayColumnWrapper dbArray = new DbArrayColumnWrapper( - ArrayBackedColumnSource - .getMemoryColumnSourceUntyped(new DbArrayBase[] {dbArrayDirect, null}), - Index.FACTORY.getIndexByRange(0, 1)); + ArrayBackedColumnSource.getMemoryColumnSourceUntyped(new DbArrayBase[] {dbArrayDirect, null}), + Index.FACTORY.getIndexByRange(0, 1)); DbArrayBase base = dbArray.getDirect(); assertEquals(2, base.intSize()); assertTrue(DbArrayDirect.class.isAssignableFrom(base.getClass())); diff --git a/DB/src/test/java/io/deephaven/db/tables/dbarrays/ReplicateTst.java b/DB/src/test/java/io/deephaven/db/tables/dbarrays/ReplicateTst.java index 6cb9b3268f9..4718aaa0bf1 100644 --- a/DB/src/test/java/io/deephaven/db/tables/dbarrays/ReplicateTst.java +++ b/DB/src/test/java/io/deephaven/db/tables/dbarrays/ReplicateTst.java @@ -10,7 +10,6 @@ public class ReplicateTst { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(DbCharArrayTest.class, - ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(DbCharArrayTest.class, ReplicatePrimitiveCode.TEST_SRC); } } diff --git a/DB/src/test/java/io/deephaven/db/tables/lang/DBLanguageParserDummyClass.java b/DB/src/test/java/io/deephaven/db/tables/lang/DBLanguageParserDummyClass.java index 2ce01f8b702..ef55ce4a9a5 100644 --- a/DB/src/test/java/io/deephaven/db/tables/lang/DBLanguageParserDummyClass.java +++ b/DB/src/test/java/io/deephaven/db/tables/lang/DBLanguageParserDummyClass.java @@ -79,8 +79,7 @@ public static int overloadedMethod(String arg) { public final InnerClass innerClassInstance = new InnerClass(); public final InnerClass2 innerClass2Instance = new InnerClass2(); - public static double[] interpolate(double[] x, double[] y, double[] xi, NestedEnum anEnumArg, - boolean extrapolate) { + public static double[] interpolate(double[] x, double[] y, double[] xi, NestedEnum anEnumArg, boolean extrapolate) { return new double[] {0}; } @@ -114,7 +113,7 @@ public class InnerInnerClass { public class InnerClass2 implements SubclassOfDBLanguageParserDummyClass { public final InnerClass innerClassAsInstanceOfAnotherInnerClass = - new InnerClass("InnerClass2.innerClassAsInstanceOfAnotherInnerClass.instancevar"); + new InnerClass("InnerClass2.innerClassAsInstanceOfAnotherInnerClass.instancevar"); } public static class StaticNestedClass implements SubclassOfDBLanguageParserDummyClass { diff --git a/DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageFunctionUtil.java b/DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageFunctionUtil.java index be959bb5519..f10d890f99f 100644 --- a/DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageFunctionUtil.java +++ b/DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageFunctionUtil.java @@ -1,6 +1,6 @@ /* - * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending GENERATED CODE - DO NOT MODIFY - * DIRECTLY This class generated by io.deephaven.db.tables.lang.DBLanguageFunctionGenerator + * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending GENERATED CODE - DO NOT MODIFY DIRECTLY This class + * generated by io.deephaven.db.tables.lang.DBLanguageFunctionGenerator */ package io.deephaven.db.tables.lang; @@ -26,42 +26,38 @@ public static void test_plus_int_int() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -80,42 +76,38 @@ public static void test_plus_int_double() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -134,15 +126,14 @@ public static void test_plus_int_long() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -155,20 +146,18 @@ public static void test_plus_int_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -187,15 +176,14 @@ public static void test_plus_int_float() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -208,20 +196,18 @@ public static void test_plus_int_float() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -240,42 +226,38 @@ public static void test_plus_int_char() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -294,42 +276,38 @@ public static void test_plus_int_byte() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -348,42 +326,38 @@ public static void test_plus_int_short() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -402,42 +376,38 @@ public static void test_plus_double_int() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -456,42 +426,38 @@ public static void test_plus_double_double() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -510,42 +476,38 @@ public static void test_plus_double_long() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -564,42 +526,38 @@ public static void test_plus_double_float() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -618,42 +576,38 @@ public static void test_plus_double_char() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -672,42 +626,38 @@ public static void test_plus_double_byte() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -726,42 +676,38 @@ public static void test_plus_double_short() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -780,15 +726,14 @@ public static void test_plus_long_int() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -801,20 +746,18 @@ public static void test_plus_long_int() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -833,42 +776,38 @@ public static void test_plus_long_double() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -887,15 +826,14 @@ public static void test_plus_long_long() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -908,20 +846,18 @@ public static void test_plus_long_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -940,15 +876,14 @@ public static void test_plus_long_float() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -961,20 +896,18 @@ public static void test_plus_long_float() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -993,15 +926,14 @@ public static void test_plus_long_char() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1014,20 +946,18 @@ public static void test_plus_long_char() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1046,15 +976,14 @@ public static void test_plus_long_byte() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1067,20 +996,18 @@ public static void test_plus_long_byte() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1099,15 +1026,14 @@ public static void test_plus_long_short() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1120,20 +1046,18 @@ public static void test_plus_long_short() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1152,15 +1076,14 @@ public static void test_plus_float_int() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1173,20 +1096,18 @@ public static void test_plus_float_int() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1205,42 +1126,38 @@ public static void test_plus_float_double() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1259,15 +1176,14 @@ public static void test_plus_float_long() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1280,20 +1196,18 @@ public static void test_plus_float_long() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1312,15 +1226,14 @@ public static void test_plus_float_float() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1333,20 +1246,18 @@ public static void test_plus_float_float() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1365,15 +1276,14 @@ public static void test_plus_float_char() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1386,20 +1296,18 @@ public static void test_plus_float_char() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1418,15 +1326,14 @@ public static void test_plus_float_byte() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1439,20 +1346,18 @@ public static void test_plus_float_byte() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1471,15 +1376,14 @@ public static void test_plus_float_short() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1492,20 +1396,18 @@ public static void test_plus_float_short() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1524,42 +1426,38 @@ public static void test_plus_char_int() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1578,42 +1476,38 @@ public static void test_plus_char_double() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1632,15 +1526,14 @@ public static void test_plus_char_long() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1653,20 +1546,18 @@ public static void test_plus_char_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1685,15 +1576,14 @@ public static void test_plus_char_float() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -1706,20 +1596,18 @@ public static void test_plus_char_float() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1738,42 +1626,38 @@ public static void test_plus_char_char() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1792,42 +1676,38 @@ public static void test_plus_char_byte() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1846,42 +1726,38 @@ public static void test_plus_char_short() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1900,42 +1776,38 @@ public static void test_plus_byte_int() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -1954,42 +1826,38 @@ public static void test_plus_byte_double() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2008,15 +1876,14 @@ public static void test_plus_byte_long() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -2029,20 +1896,18 @@ public static void test_plus_byte_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2061,15 +1926,14 @@ public static void test_plus_byte_float() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -2082,20 +1946,18 @@ public static void test_plus_byte_float() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2114,42 +1976,38 @@ public static void test_plus_byte_char() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2168,42 +2026,38 @@ public static void test_plus_byte_byte() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2222,42 +2076,38 @@ public static void test_plus_byte_short() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2276,42 +2126,38 @@ public static void test_plus_short_int() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2330,42 +2176,38 @@ public static void test_plus_short_double() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Double.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2384,15 +2226,14 @@ public static void test_plus_short_long() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Long.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -2405,20 +2246,18 @@ public static void test_plus_short_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2437,15 +2276,14 @@ public static void test_plus_short_float() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Float.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); @@ -2458,20 +2296,18 @@ public static void test_plus_short_float() { expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2490,42 +2326,38 @@ public static void test_plus_short_char() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2544,42 +2376,38 @@ public static void test_plus_short_byte() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2598,42 +2426,38 @@ public static void test_plus_short_short() { dbResult = DBLanguageFunctionUtil.plus(value1, value2); expectedResult = value1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(value1, value2), value1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(zero1, value2); expectedResult = zero1 + value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.plus(zero1, value2), zero1+value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.plus(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2652,42 +2476,38 @@ public static void test_minus_int_int() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2706,42 +2526,38 @@ public static void test_minus_int_double() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2760,15 +2576,14 @@ public static void test_minus_int_long() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); @@ -2781,20 +2596,18 @@ public static void test_minus_int_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2813,42 +2626,38 @@ public static void test_minus_int_float() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2867,42 +2676,38 @@ public static void test_minus_int_char() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2921,42 +2726,38 @@ public static void test_minus_int_byte() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -2975,42 +2776,38 @@ public static void test_minus_int_short() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3029,42 +2826,38 @@ public static void test_minus_double_int() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3083,42 +2876,38 @@ public static void test_minus_double_double() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3137,42 +2926,38 @@ public static void test_minus_double_long() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3191,42 +2976,38 @@ public static void test_minus_double_float() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3245,42 +3026,38 @@ public static void test_minus_double_char() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3299,42 +3076,38 @@ public static void test_minus_double_byte() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3353,42 +3126,38 @@ public static void test_minus_double_short() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3407,15 +3176,14 @@ public static void test_minus_long_int() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); @@ -3428,20 +3196,18 @@ public static void test_minus_long_int() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3460,42 +3226,38 @@ public static void test_minus_long_double() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3514,15 +3276,14 @@ public static void test_minus_long_long() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); @@ -3535,20 +3296,18 @@ public static void test_minus_long_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3567,42 +3326,38 @@ public static void test_minus_long_float() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3621,15 +3376,14 @@ public static void test_minus_long_char() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); @@ -3642,20 +3396,18 @@ public static void test_minus_long_char() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3674,15 +3426,14 @@ public static void test_minus_long_byte() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); @@ -3695,20 +3446,18 @@ public static void test_minus_long_byte() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3727,15 +3476,14 @@ public static void test_minus_long_short() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); @@ -3748,20 +3496,18 @@ public static void test_minus_long_short() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3780,42 +3526,38 @@ public static void test_minus_float_int() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3834,42 +3576,38 @@ public static void test_minus_float_double() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3888,42 +3626,38 @@ public static void test_minus_float_long() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3942,42 +3676,38 @@ public static void test_minus_float_float() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -3996,42 +3726,38 @@ public static void test_minus_float_char() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4050,42 +3776,38 @@ public static void test_minus_float_byte() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4104,42 +3826,38 @@ public static void test_minus_float_short() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4158,42 +3876,38 @@ public static void test_minus_char_int() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4212,42 +3926,38 @@ public static void test_minus_char_double() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4266,15 +3976,14 @@ public static void test_minus_char_long() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); @@ -4287,20 +3996,18 @@ public static void test_minus_char_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4319,42 +4026,38 @@ public static void test_minus_char_float() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4373,42 +4076,38 @@ public static void test_minus_char_char() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4427,42 +4126,38 @@ public static void test_minus_char_byte() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4481,42 +4176,38 @@ public static void test_minus_char_short() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4535,42 +4226,38 @@ public static void test_minus_byte_int() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4589,42 +4276,38 @@ public static void test_minus_byte_double() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4643,15 +4326,14 @@ public static void test_minus_byte_long() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); @@ -4664,20 +4346,18 @@ public static void test_minus_byte_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4696,42 +4376,38 @@ public static void test_minus_byte_float() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4750,42 +4426,38 @@ public static void test_minus_byte_char() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4804,42 +4476,38 @@ public static void test_minus_byte_byte() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4858,42 +4526,38 @@ public static void test_minus_byte_short() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4912,42 +4576,38 @@ public static void test_minus_short_int() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -4966,42 +4626,38 @@ public static void test_minus_short_double() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Double.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5020,15 +4676,14 @@ public static void test_minus_short_long() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Long.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); @@ -5041,20 +4696,18 @@ public static void test_minus_short_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5073,42 +4726,38 @@ public static void test_minus_short_float() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Float.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5127,42 +4776,38 @@ public static void test_minus_short_char() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5181,42 +4826,38 @@ public static void test_minus_short_byte() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5235,42 +4876,38 @@ public static void test_minus_short_short() { dbResult = DBLanguageFunctionUtil.minus(value1, value2); expectedResult = value1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(value1, value2), value1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(zero1, value2); expectedResult = zero1 - value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.minus(zero1, value2), zero1-value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.minus(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5289,42 +4926,38 @@ public static void test_times_int_int() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5343,42 +4976,38 @@ public static void test_times_int_double() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5397,15 +5026,14 @@ public static void test_times_int_long() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); @@ -5418,20 +5046,18 @@ public static void test_times_int_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5450,42 +5076,38 @@ public static void test_times_int_float() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5504,42 +5126,38 @@ public static void test_times_int_char() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5558,42 +5176,38 @@ public static void test_times_int_byte() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5612,42 +5226,38 @@ public static void test_times_int_short() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5666,42 +5276,38 @@ public static void test_times_double_int() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5720,42 +5326,38 @@ public static void test_times_double_double() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5774,42 +5376,38 @@ public static void test_times_double_long() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5828,42 +5426,38 @@ public static void test_times_double_float() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5882,42 +5476,38 @@ public static void test_times_double_char() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5936,42 +5526,38 @@ public static void test_times_double_byte() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -5990,42 +5576,38 @@ public static void test_times_double_short() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6044,15 +5626,14 @@ public static void test_times_long_int() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); @@ -6065,20 +5646,18 @@ public static void test_times_long_int() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6097,42 +5676,38 @@ public static void test_times_long_double() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6151,15 +5726,14 @@ public static void test_times_long_long() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); @@ -6172,20 +5746,18 @@ public static void test_times_long_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6204,42 +5776,38 @@ public static void test_times_long_float() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6258,15 +5826,14 @@ public static void test_times_long_char() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); @@ -6279,20 +5846,18 @@ public static void test_times_long_char() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6311,15 +5876,14 @@ public static void test_times_long_byte() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); @@ -6332,20 +5896,18 @@ public static void test_times_long_byte() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6364,15 +5926,14 @@ public static void test_times_long_short() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); @@ -6385,20 +5946,18 @@ public static void test_times_long_short() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6417,42 +5976,38 @@ public static void test_times_float_int() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6471,42 +6026,38 @@ public static void test_times_float_double() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6525,42 +6076,38 @@ public static void test_times_float_long() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6579,42 +6126,38 @@ public static void test_times_float_float() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6633,42 +6176,38 @@ public static void test_times_float_char() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6687,42 +6226,38 @@ public static void test_times_float_byte() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6741,42 +6276,38 @@ public static void test_times_float_short() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6795,42 +6326,38 @@ public static void test_times_char_int() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6849,42 +6376,38 @@ public static void test_times_char_double() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6903,15 +6426,14 @@ public static void test_times_char_long() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); @@ -6924,20 +6446,18 @@ public static void test_times_char_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -6956,42 +6476,38 @@ public static void test_times_char_float() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7010,42 +6526,38 @@ public static void test_times_char_char() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7064,42 +6576,38 @@ public static void test_times_char_byte() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7118,42 +6626,38 @@ public static void test_times_char_short() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7172,42 +6676,38 @@ public static void test_times_byte_int() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7226,42 +6726,38 @@ public static void test_times_byte_double() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7280,15 +6776,14 @@ public static void test_times_byte_long() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); @@ -7301,20 +6796,18 @@ public static void test_times_byte_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7333,42 +6826,38 @@ public static void test_times_byte_float() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7387,42 +6876,38 @@ public static void test_times_byte_char() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7441,42 +6926,38 @@ public static void test_times_byte_byte() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7495,42 +6976,38 @@ public static void test_times_byte_short() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7549,42 +7026,38 @@ public static void test_times_short_int() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7603,42 +7076,38 @@ public static void test_times_short_double() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Double.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7657,15 +7126,14 @@ public static void test_times_short_long() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Long.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); @@ -7678,20 +7146,18 @@ public static void test_times_short_long() { expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7710,42 +7176,38 @@ public static void test_times_short_float() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Float.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7764,42 +7226,38 @@ public static void test_times_short_char() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7818,42 +7276,38 @@ public static void test_times_short_byte() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7872,42 +7326,38 @@ public static void test_times_short_short() { dbResult = DBLanguageFunctionUtil.times(value1, value2); expectedResult = value1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(value1, value2), value1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(zero1, value2); expectedResult = zero1 * value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.times(zero1, value2), zero1*value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.times(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7926,42 +7376,38 @@ public static void test_divide_int_int() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -7980,42 +7426,38 @@ public static void test_divide_int_double() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8034,42 +7476,38 @@ public static void test_divide_int_long() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8088,42 +7526,38 @@ public static void test_divide_int_float() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8142,42 +7576,38 @@ public static void test_divide_int_char() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8196,42 +7626,38 @@ public static void test_divide_int_byte() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8250,42 +7676,38 @@ public static void test_divide_int_short() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8304,42 +7726,38 @@ public static void test_divide_double_int() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8358,42 +7776,38 @@ public static void test_divide_double_double() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8412,42 +7826,38 @@ public static void test_divide_double_long() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8466,42 +7876,38 @@ public static void test_divide_double_float() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8520,42 +7926,38 @@ public static void test_divide_double_char() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8574,42 +7976,38 @@ public static void test_divide_double_byte() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8628,42 +8026,38 @@ public static void test_divide_double_short() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8682,42 +8076,38 @@ public static void test_divide_long_int() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8736,42 +8126,38 @@ public static void test_divide_long_double() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8790,42 +8176,38 @@ public static void test_divide_long_long() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8844,42 +8226,38 @@ public static void test_divide_long_float() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8898,42 +8276,38 @@ public static void test_divide_long_char() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -8952,42 +8326,38 @@ public static void test_divide_long_byte() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9006,42 +8376,38 @@ public static void test_divide_long_short() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9060,42 +8426,38 @@ public static void test_divide_float_int() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9114,42 +8476,38 @@ public static void test_divide_float_double() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9168,42 +8526,38 @@ public static void test_divide_float_long() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9222,42 +8576,38 @@ public static void test_divide_float_float() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9276,42 +8626,38 @@ public static void test_divide_float_char() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9330,42 +8676,38 @@ public static void test_divide_float_byte() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9384,42 +8726,38 @@ public static void test_divide_float_short() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9438,42 +8776,38 @@ public static void test_divide_char_int() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9492,42 +8826,38 @@ public static void test_divide_char_double() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9546,42 +8876,38 @@ public static void test_divide_char_long() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9600,42 +8926,38 @@ public static void test_divide_char_float() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9654,42 +8976,38 @@ public static void test_divide_char_char() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9708,42 +9026,38 @@ public static void test_divide_char_byte() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9762,42 +9076,38 @@ public static void test_divide_char_short() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9816,42 +9126,38 @@ public static void test_divide_byte_int() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9870,42 +9176,38 @@ public static void test_divide_byte_double() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9924,42 +9226,38 @@ public static void test_divide_byte_long() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -9978,42 +9276,38 @@ public static void test_divide_byte_float() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10032,42 +9326,38 @@ public static void test_divide_byte_char() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10086,42 +9376,38 @@ public static void test_divide_byte_byte() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10140,42 +9426,38 @@ public static void test_divide_byte_short() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10194,42 +9476,38 @@ public static void test_divide_short_int() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10248,42 +9526,38 @@ public static void test_divide_short_double() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10302,42 +9576,38 @@ public static void test_divide_short_long() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10356,42 +9626,38 @@ public static void test_divide_short_float() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; + description = "Float.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10410,42 +9676,38 @@ public static void test_divide_short_char() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10464,42 +9726,38 @@ public static void test_divide_short_byte() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10518,42 +9776,38 @@ public static void test_divide_short_short() { dbResult = DBLanguageFunctionUtil.divide(value1, value2); expectedResult = value1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(value1, value2), value1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(zero1, value2); expectedResult = zero1 / (double) value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; + description = "Double.compare(DBLanguageFunctionUtil.divide(zero1, value2), zero1/(double)value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.divide(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10572,42 +9826,38 @@ public static void test_remainder_int_int() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10626,42 +9876,38 @@ public static void test_remainder_int_double() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10680,42 +9926,38 @@ public static void test_remainder_int_long() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10734,42 +9976,38 @@ public static void test_remainder_int_float() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, - QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10788,42 +10026,38 @@ public static void test_remainder_int_char() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10842,42 +10076,38 @@ public static void test_remainder_int_byte() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10896,42 +10126,38 @@ public static void test_remainder_int_short() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, - QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -10950,42 +10176,38 @@ public static void test_remainder_double_int() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11004,42 +10226,38 @@ public static void test_remainder_double_double() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11058,42 +10276,38 @@ public static void test_remainder_double_long() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11112,42 +10326,38 @@ public static void test_remainder_double_float() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11166,42 +10376,38 @@ public static void test_remainder_double_char() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11220,42 +10426,38 @@ public static void test_remainder_double_byte() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11274,42 +10476,38 @@ public static void test_remainder_double_short() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11328,42 +10526,38 @@ public static void test_remainder_long_int() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_INT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11382,42 +10576,38 @@ public static void test_remainder_long_double() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11436,42 +10626,38 @@ public static void test_remainder_long_long() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, - QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11490,42 +10676,38 @@ public static void test_remainder_long_float() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, - QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11544,42 +10726,38 @@ public static void test_remainder_long_char() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, - QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11598,42 +10776,38 @@ public static void test_remainder_long_byte() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, - QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11652,42 +10826,38 @@ public static void test_remainder_long_short() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, - QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11706,42 +10876,38 @@ public static void test_remainder_float_int() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, - QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11760,42 +10926,38 @@ public static void test_remainder_float_double() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11814,42 +10976,38 @@ public static void test_remainder_float_long() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, - QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11868,42 +11026,38 @@ public static void test_remainder_float_float() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, - QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11922,42 +11076,38 @@ public static void test_remainder_float_char() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, - QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -11976,42 +11126,38 @@ public static void test_remainder_float_byte() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, - QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12030,42 +11176,38 @@ public static void test_remainder_float_short() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, - QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12084,42 +11226,38 @@ public static void test_remainder_char_int() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12138,42 +11276,38 @@ public static void test_remainder_char_double() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12192,42 +11326,38 @@ public static void test_remainder_char_long() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, - QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12246,42 +11376,38 @@ public static void test_remainder_char_float() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, - QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12300,42 +11426,38 @@ public static void test_remainder_char_char() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, - QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12354,42 +11476,38 @@ public static void test_remainder_char_byte() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, - QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12408,42 +11526,38 @@ public static void test_remainder_char_short() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, - QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12462,42 +11576,38 @@ public static void test_remainder_byte_int() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = - DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12516,42 +11626,38 @@ public static void test_remainder_byte_double() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12570,42 +11676,38 @@ public static void test_remainder_byte_long() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, - QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12624,42 +11726,38 @@ public static void test_remainder_byte_float() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, - QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12678,42 +11776,38 @@ public static void test_remainder_byte_char() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, - QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12732,42 +11826,38 @@ public static void test_remainder_byte_byte() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, - QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12786,42 +11876,38 @@ public static void test_remainder_byte_short() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, - QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12840,42 +11926,38 @@ public static void test_remainder_short_int() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, - QueryConstants.NULL_INT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12894,42 +11976,38 @@ public static void test_remainder_short_double() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Double.compare(dbResult, expectedResult); - description = - "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Double.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, - QueryConstants.NULL_DOUBLE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE); expectedResult = QueryConstants.NULL_DOUBLE; compareResult = Double.compare(dbResult, expectedResult); description = - "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; + "Double.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE), QueryConstants.NULL_DOUBLE)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -12948,42 +12026,38 @@ public static void test_remainder_short_long() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Long.compare(dbResult, expectedResult); - description = - "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Long.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, - QueryConstants.NULL_LONG); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG); expectedResult = QueryConstants.NULL_LONG; compareResult = Long.compare(dbResult, expectedResult); description = - "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; + "Long.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG), QueryConstants.NULL_LONG)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -13002,42 +12076,38 @@ public static void test_remainder_short_float() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Float.compare(dbResult, expectedResult); - description = - "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Float.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, - QueryConstants.NULL_FLOAT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT); expectedResult = QueryConstants.NULL_FLOAT; compareResult = Float.compare(dbResult, expectedResult); description = - "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; + "Float.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), QueryConstants.NULL_FLOAT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -13056,42 +12126,38 @@ public static void test_remainder_short_char() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, - QueryConstants.NULL_CHAR); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -13110,42 +12176,38 @@ public static void test_remainder_short_byte() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, - QueryConstants.NULL_BYTE); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -13164,42 +12226,38 @@ public static void test_remainder_short_short() { dbResult = DBLanguageFunctionUtil.remainder(value1, value2); expectedResult = value1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(value1, value2), value1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(value1, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(zero1, value2); expectedResult = zero1 % value2; compareResult = Integer.compare(dbResult, expectedResult); - description = - "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; + description = "Integer.compare(DBLanguageFunctionUtil.remainder(zero1, value2), zero1%value2)"; TestCase.assertEquals(description, 0, compareResult); dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, value2), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); - dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, - QueryConstants.NULL_SHORT); + dbResult = DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT); expectedResult = QueryConstants.NULL_INT; compareResult = Integer.compare(dbResult, expectedResult); description = - "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; + "Integer.compare(DBLanguageFunctionUtil.remainder(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT), QueryConstants.NULL_INT)"; TestCase.assertEquals(description, 0, compareResult); } catch (Exception ex) { throw new RuntimeException( - "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, - ex); + "Comparison failure: dbResult=" + dbResult + ", expectedResult=" + expectedResult, ex); } } @@ -13216,8 +12274,7 @@ public static void test_compare_int_int_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_INT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_INT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13239,8 +12296,7 @@ public static void test_compare_int_double_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_DOUBLE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13262,8 +12318,7 @@ public static void test_compare_int_long_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_LONG)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_LONG)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13285,8 +12340,7 @@ public static void test_compare_int_float_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13308,8 +12362,7 @@ public static void test_compare_int_char_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_CHAR)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13331,8 +12384,7 @@ public static void test_compare_int_byte_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13354,8 +12406,7 @@ public static void test_compare_int_short_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13377,8 +12428,7 @@ public static void test_compare_double_int_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13400,8 +12450,8 @@ public static void test_compare_double_double_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_DOUBLE)); + TestCase.assertEquals(0, + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13424,7 +12474,7 @@ public static void test_compare_double_long_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_LONG)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13446,8 +12496,8 @@ public static void test_compare_double_float_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_FLOAT)); + TestCase.assertEquals(0, + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13470,7 +12520,7 @@ public static void test_compare_double_char_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_CHAR)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13493,7 +12543,7 @@ public static void test_compare_double_byte_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_BYTE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13515,8 +12565,8 @@ public static void test_compare_double_short_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_SHORT)); + TestCase.assertEquals(0, + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_SHORT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13538,8 +12588,7 @@ public static void test_compare_long_int_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_INT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_INT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13562,7 +12611,7 @@ public static void test_compare_long_double_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13584,8 +12633,7 @@ public static void test_compare_long_long_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13607,8 +12655,7 @@ public static void test_compare_long_float_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13630,8 +12677,7 @@ public static void test_compare_long_char_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_CHAR)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13653,8 +12699,7 @@ public static void test_compare_long_byte_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13676,8 +12721,7 @@ public static void test_compare_long_short_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_LONG, QueryConstants.NULL_SHORT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13699,8 +12743,7 @@ public static void test_compare_float_int_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_INT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13722,8 +12765,8 @@ public static void test_compare_float_double_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, - QueryConstants.NULL_DOUBLE)); + TestCase.assertEquals(0, + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_DOUBLE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13745,8 +12788,7 @@ public static void test_compare_float_long_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13769,7 +12811,7 @@ public static void test_compare_float_float_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13791,8 +12833,7 @@ public static void test_compare_float_char_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_CHAR)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13814,8 +12855,7 @@ public static void test_compare_float_byte_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_BYTE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13838,7 +12878,7 @@ public static void test_compare_float_short_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13860,8 +12900,7 @@ public static void test_compare_char_int_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_INT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13884,7 +12923,7 @@ public static void test_compare_char_double_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_DOUBLE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13906,8 +12945,7 @@ public static void test_compare_char_long_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_LONG)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13929,8 +12967,7 @@ public static void test_compare_char_float_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_FLOAT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13952,8 +12989,7 @@ public static void test_compare_char_char_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13975,8 +13011,7 @@ public static void test_compare_char_byte_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_BYTE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -13998,8 +13033,7 @@ public static void test_compare_char_short_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_CHAR, QueryConstants.NULL_SHORT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14021,8 +13055,7 @@ public static void test_compare_byte_int_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_INT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14045,7 +13078,7 @@ public static void test_compare_byte_double_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_DOUBLE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14067,8 +13100,7 @@ public static void test_compare_byte_long_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_LONG)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14090,8 +13122,7 @@ public static void test_compare_byte_float_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_FLOAT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14113,8 +13144,7 @@ public static void test_compare_byte_char_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_CHAR)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14136,8 +13166,7 @@ public static void test_compare_byte_byte_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14159,8 +13188,7 @@ public static void test_compare_byte_short_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14182,8 +13210,7 @@ public static void test_compare_short_int_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_INT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14205,8 +13232,8 @@ public static void test_compare_short_double_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, - QueryConstants.NULL_DOUBLE)); + TestCase.assertEquals(0, + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_DOUBLE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14228,8 +13255,7 @@ public static void test_compare_short_long_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_LONG)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14252,7 +13278,7 @@ public static void test_compare_short_float_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14274,8 +13300,7 @@ public static void test_compare_short_char_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_CHAR)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14297,8 +13322,7 @@ public static void test_compare_short_byte_compare() { TestCase.assertEquals(1, DBLanguageFunctionUtil.compareTo(Double.NaN, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); - TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE)); + TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); @@ -14321,7 +13345,7 @@ public static void test_compare_short_short_compare() { TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero1, zero2)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(zero2, zero1)); TestCase.assertEquals(0, - DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT)); + DBLanguageFunctionUtil.compareTo(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value1, value1)); TestCase.assertEquals(0, DBLanguageFunctionUtil.compareTo(value2, value2)); TestCase.assertEquals(-1, DBLanguageFunctionUtil.compareTo(value1, value2)); diff --git a/DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageParser.java b/DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageParser.java index 7dce89dcb42..22501f0eb95 100644 --- a/DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageParser.java +++ b/DB/src/test/java/io/deephaven/db/tables/lang/TestDBLanguageParser.java @@ -42,8 +42,7 @@ public void setUp() throws Exception { packageImports = new HashSet<>(); packageImports.add(Package.getPackage("java.lang")); - // Package.getPackage returns null if the class loader has yet to see a class from that - // package; force a load + // Package.getPackage returns null if the class loader has yet to see a class from that package; force a load Package tablePackage = Table.class.getPackage(); Assert.equals(tablePackage.getName(), "tablePackage.getName()", "io.deephaven.db.tables"); packageImports.add(tablePackage); @@ -314,9 +313,8 @@ public void testBinaryLiterals() throws Exception { } /** - * The query language will automatically convert an integer literal to a long literal if the - * value is too big to store as an int. (Normal Java would not do this; it would just result in - * a compilation error.) + * The query language will automatically convert an integer literal to a long literal if the value is too big to + * store as an int. (Normal Java would not do this; it would just result in a compilation error.) */ public void testAutoPromotedLiterals() throws Exception { String expression, resultExpression; @@ -378,128 +376,126 @@ public void testCharLiterals() throws Exception { public void testConvertBackticks() throws Exception { Require.equals( - DBLanguageParser.convertBackticks("`hello`"), - "convertBackticks(\"`hello`\")", - "\"hello\""); + DBLanguageParser.convertBackticks("`hello`"), + "convertBackticks(\"`hello`\")", + "\"hello\""); Require.equals( - DBLanguageParser.convertBackticks("`'`"), - "convertBackticks(\"`'`\")", - "\"'\""); + DBLanguageParser.convertBackticks("`'`"), + "convertBackticks(\"`'`\")", + "\"'\""); Require.equals( - DBLanguageParser.convertBackticks("`\"`"), - "convertBackticks(\"`\\\"`\")", - "\"\\\"\""); + DBLanguageParser.convertBackticks("`\"`"), + "convertBackticks(\"`\\\"`\")", + "\"\\\"\""); Require.equals( - DBLanguageParser.convertBackticks("\"`\""), - "convertBackticks(\"\\\"`\\\"\")", - "\"`\""); + DBLanguageParser.convertBackticks("\"`\""), + "convertBackticks(\"\\\"`\\\"\")", + "\"`\""); Require.equals( - DBLanguageParser.convertBackticks("`'\\\"'`"), - "convertBackticks(\"`'\\\\\\\"'`\")", - "\"'\\\"'\""); + DBLanguageParser.convertBackticks("`'\\\"'`"), + "convertBackticks(\"`'\\\\\\\"'`\")", + "\"'\\\"'\""); Require.equals( - DBLanguageParser.convertBackticks("\"`abc`\""), - "convertBackticks(\"\\\"`abc`\\\"\")", - "\"`abc`\""); + DBLanguageParser.convertBackticks("\"`abc`\""), + "convertBackticks(\"\\\"`abc`\\\"\")", + "\"`abc`\""); Require.equals( - DBLanguageParser.convertBackticks("\"`'abc`'\""), - "convertBackticks(\"\\\"`'abc`'\\\"\")", - "\"`'abc`'\""); + DBLanguageParser.convertBackticks("\"`'abc`'\""), + "convertBackticks(\"\\\"`'abc`'\\\"\")", + "\"`'abc`'\""); Require.equals( - DBLanguageParser.convertBackticks("\"'`\""), - "convertBackticks(\"\\\"'`\\\"\")", - "\"'`\""); + DBLanguageParser.convertBackticks("\"'`\""), + "convertBackticks(\"\\\"'`\\\"\")", + "\"'`\""); Require.equals( - DBLanguageParser.convertBackticks("`abc ` + \"def\" + \"`hij`\" + '`' + `'`"), - "convertBackticks(\"`abc ` + \\\"def\\\" + \\\"`hij`\\\" + '`' + `'`\")", - "\"abc \" + \"def\" + \"`hij`\" + '`' + \"'\""); + DBLanguageParser.convertBackticks("`abc ` + \"def\" + \"`hij`\" + '`' + `'`"), + "convertBackticks(\"`abc ` + \\\"def\\\" + \\\"`hij`\\\" + '`' + `'`\")", + "\"abc \" + \"def\" + \"`hij`\" + '`' + \"'\""); // test each type of quote, escaped and contained within itself Require.equals( - DBLanguageParser.convertBackticks("\"\\\"\""), - "convertBackticks(\"\\\"\\\\\\\"\\\"\")", - "\"\\\"\""); + DBLanguageParser.convertBackticks("\"\\\"\""), + "convertBackticks(\"\\\"\\\\\\\"\\\"\")", + "\"\\\"\""); Require.equals( - DBLanguageParser.convertBackticks("`\\``"), - "convertBackticks(\"`\\\\``\")", - "\"\\`\""); + DBLanguageParser.convertBackticks("`\\``"), + "convertBackticks(\"`\\\\``\")", + "\"\\`\""); Require.equals( - DBLanguageParser.convertBackticks("'\\''"), - "convertBackticks(\"'\\\\''\")", - "'\\''"); + DBLanguageParser.convertBackticks("'\\''"), + "convertBackticks(\"'\\\\''\")", + "'\\''"); // test tick and double quote both escaped within a string Require.equals( - DBLanguageParser.convertBackticks("`\"\\``"), - "convertBackticks(\"`\\\"\\\\``\")", - "\"\\\"\\`\""); + DBLanguageParser.convertBackticks("`\"\\``"), + "convertBackticks(\"`\\\"\\\\``\")", + "\"\\\"\\`\""); // here ` is unescaped, since it is within "s Require.equals( - DBLanguageParser.convertBackticks("\"\\\"`\""), - "convertBackticks(\"\\\"\\\\\\\"`\\\"\")", - "\"\\\"`\""); + DBLanguageParser.convertBackticks("\"\\\"`\""), + "convertBackticks(\"\\\"\\\\\\\"`\\\"\")", + "\"\\\"`\""); - // confirm that standard java escaping tools are sufficient to correctly escape strings for - // the DBLangParser + // confirm that standard java escaping tools are sufficient to correctly escape strings for the DBLangParser Require.equals( - DBLanguageParser.convertBackticks("\"" + StringEscapeUtils.escapeJava("`\"'\\") + "\""), - "convertBackticks(escapeJava(\"`\\\"'\\\\\"))", - "\"`\\\"'\\\\\""); + DBLanguageParser.convertBackticks("\"" + StringEscapeUtils.escapeJava("`\"'\\") + "\""), + "convertBackticks(escapeJava(\"`\\\"'\\\\\"))", + "\"`\\\"'\\\\\""); } public void testConvertSingleEquals() throws Exception { Require.equals( - DBLanguageParser.convertSingleEquals("a=b"), - "convertSingleEquals(\"a=b\")", - "a==b"); + DBLanguageParser.convertSingleEquals("a=b"), + "convertSingleEquals(\"a=b\")", + "a==b"); Require.equals( - DBLanguageParser.convertSingleEquals("a=b=c==d=e==f"), - "convertSingleEquals(\"a=b=c==d=e==f\")", - "a==b==c==d==e==f"); + DBLanguageParser.convertSingleEquals("a=b=c==d=e==f"), + "convertSingleEquals(\"a=b=c==d=e==f\")", + "a==b==c==d==e==f"); Require.equals( - DBLanguageParser.convertSingleEquals("'='"), - "convertSingleEquals(\"'='\")", - "'='"); + DBLanguageParser.convertSingleEquals("'='"), + "convertSingleEquals(\"'='\")", + "'='"); Require.equals( - DBLanguageParser.convertSingleEquals("'='='='"), - "convertSingleEquals(\"'='='='\")", - "'='=='='"); + DBLanguageParser.convertSingleEquals("'='='='"), + "convertSingleEquals(\"'='='='\")", + "'='=='='"); Require.equals( - DBLanguageParser.convertSingleEquals("'='='='=='='"), - "convertSingleEquals(\"'='='='=='='\")", - "'='=='='=='='"); + DBLanguageParser.convertSingleEquals("'='='='=='='"), + "convertSingleEquals(\"'='='='=='='\")", + "'='=='='=='='"); Require.equals( - DBLanguageParser.convertSingleEquals("a='='=b"), - "convertSingleEquals(\"a='='=b\")", - "a=='='==b"); + DBLanguageParser.convertSingleEquals("a='='=b"), + "convertSingleEquals(\"a='='=b\")", + "a=='='==b"); Require.equals( - DBLanguageParser.convertSingleEquals("\"a=b\""), - "convertSingleEquals(\"a=b\")", - "\"a=b\""); + DBLanguageParser.convertSingleEquals("\"a=b\""), + "convertSingleEquals(\"a=b\")", + "\"a=b\""); Require.equals( - DBLanguageParser.convertSingleEquals("\"a=b'\"='='"), - "convertSingleEquals(\"\\\"a=b'\\\"='='\")", - "\"a=b'\"=='='"); + DBLanguageParser.convertSingleEquals("\"a=b'\"='='"), + "convertSingleEquals(\"\\\"a=b'\\\"='='\")", + "\"a=b'\"=='='"); } /** - * Test casts. See - * Chapter 5, + * Test casts. See Chapter 5, * Conversions and Contexts, in the java language specification for more info. * * @see #testPrimitiveLiteralCasts() @@ -530,8 +526,8 @@ public void testMiscellaneousCasts() throws Exception { resultExpression = "doubleCast(intCast(myIntObj))"; check(expression, resultExpression, double.class, new String[] {"myIntObj"}); - expression = "(double)myIntObj"; // requires separate casts for unboxing & widening (see - // notes at testBoxedToPrimitiveCasts, or JLS) + expression = "(double)myIntObj"; // requires separate casts for unboxing & widening (see notes at + // testBoxedToPrimitiveCasts, or JLS) resultExpression = "doubleCast(intCast(myIntObj))"; check(expression, resultExpression, double.class, new String[] {"myIntObj"}); @@ -552,36 +548,34 @@ public void testMiscellaneousCasts() throws Exception { } /** - * This is an older version of {@link #testPrimitiveVariableCasts()}, operating with literals - * instead of variables. + * This is an older version of {@link #testPrimitiveVariableCasts()}, operating with literals instead of variables. */ public void testPrimitiveLiteralCasts() throws Exception { String expression, resultExpression; Collection> literals = Arrays.asList( - new Pair<>("42", int.class), - new Pair<>("42L", long.class), - new Pair<>("42f", float.class), - new Pair<>("42d", double.class), - new Pair<>("'c'", char.class)); + new Pair<>("42", int.class), + new Pair<>("42L", long.class), + new Pair<>("42f", float.class), + new Pair<>("42d", double.class), + new Pair<>("'c'", char.class)); Collection> targetTypes = Arrays.asList( - new Pair<>("char", char.class), - new Pair<>("byte", byte.class), - new Pair<>("short", short.class), - new Pair<>("int", int.class), - new Pair<>("float", float.class), - new Pair<>("double", double.class), - new Pair<>("long", long.class)); + new Pair<>("char", char.class), + new Pair<>("byte", byte.class), + new Pair<>("short", short.class), + new Pair<>("int", int.class), + new Pair<>("float", float.class), + new Pair<>("double", double.class), + new Pair<>("long", long.class)); /* - * Test casting from each possible numeric literal type (and char) to each of the other - * numeric types (and char). + * Test casting from each possible numeric literal type (and char) to each of the other numeric types (and + * char). * - * When casting to a primitive type, we replace the cast with a function call (e.g - * "(int)foo" to "intCast(foo)") + * When casting to a primitive type, we replace the cast with a function call (e.g "(int)foo" to "intCast(foo)") * - * The exception is the identity conversion, e.g. "(int)42" or "(double)42d". Since - * mid-2017, there is are no intermediate functions for these redundant conversions. + * The exception is the identity conversion, e.g. "(int)42" or "(double)42d". Since mid-2017, there is are no + * intermediate functions for these redundant conversions. */ for (Pair literal : literals) { for (Pair targetType : targetTypes) { @@ -589,8 +583,7 @@ public void testPrimitiveLiteralCasts() throws Exception { if (targetType.second == literal.second) { resultExpression = expression; } else { - resultExpression = targetType.first + "Cast(" + literal.first + ')'; // e.g. - // "intCast(42)" + resultExpression = targetType.first + "Cast(" + literal.first + ')'; // e.g. "intCast(42)" } check(expression, resultExpression, targetType.second, new String[] {}); } @@ -607,8 +600,7 @@ public void testPrimitiveLiteralCasts() throws Exception { } catch (DBLanguageParser.QueryLanguageParseException ignored) { } } catch (Throwable ex) { - throw new RuntimeException( - "Failed testing cast of boolean to " + targetType.second.getName(), ex); + throw new RuntimeException("Failed testing cast of boolean to " + targetType.second.getName(), ex); } } @@ -616,15 +608,13 @@ public void testPrimitiveLiteralCasts() throws Exception { for (Pair literal : literals) { try { try { - resultExpression = expression = "(boolean)" + literal.first; // e.g. - // "(boolean)42" + resultExpression = expression = "(boolean)" + literal.first; // e.g. "(boolean)42" check(expression, resultExpression, boolean.class, new String[] {}); fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ignored) { } } catch (Throwable ex) { - throw new RuntimeException( - "Failed testing cast of " + literal.second.getName() + " to boolean", ex); + throw new RuntimeException("Failed testing cast of " + literal.second.getName() + " to boolean", ex); } } @@ -634,11 +624,10 @@ public void testPrimitiveLiteralCasts() throws Exception { } /** - * Test conversions between various primitive types. This is a newer and more complete version - * of {@link #testPrimitiveLiteralCasts()}. (This one handles {@code byte} and {@code short}.) + * Test conversions between various primitive types. This is a newer and more complete version of + * {@link #testPrimitiveLiteralCasts()}. (This one handles {@code byte} and {@code short}.) *

      - * See table 5.5-A - * here. + * See table 5.5-A here. * * @see #testBoxedToPrimitiveCasts() */ @@ -646,31 +635,30 @@ public void testPrimitiveVariableCasts() throws Exception { String expression, resultExpression; Collection> numericAndCharVars = Arrays.asList( - new Pair<>("myByte", byte.class), - new Pair<>("myShort", short.class), - new Pair<>("myChar", char.class), - new Pair<>("myInt", int.class), - new Pair<>("myLong", long.class), - new Pair<>("myFloat", float.class), - new Pair<>("myDouble", double.class)); + new Pair<>("myByte", byte.class), + new Pair<>("myShort", short.class), + new Pair<>("myChar", char.class), + new Pair<>("myInt", int.class), + new Pair<>("myLong", long.class), + new Pair<>("myFloat", float.class), + new Pair<>("myDouble", double.class)); Collection> numericAndCharTypes = Arrays.asList( - new Pair<>("byte", byte.class), - new Pair<>("short", short.class), - new Pair<>("char", char.class), - new Pair<>("int", int.class), - new Pair<>("long", long.class), - new Pair<>("float", float.class), - new Pair<>("double", double.class)); + new Pair<>("byte", byte.class), + new Pair<>("short", short.class), + new Pair<>("char", char.class), + new Pair<>("int", int.class), + new Pair<>("long", long.class), + new Pair<>("float", float.class), + new Pair<>("double", double.class)); /* - * Test casting from each possible numeric literal type (and char) to each of the other - * numeric types (and char). + * Test casting from each possible numeric literal type (and char) to each of the other numeric types (and + * char). * - * When casting to a primitive type, we replace the cast with a function call (e.g - * "(int)foo" to "intCast(foo)") + * When casting to a primitive type, we replace the cast with a function call (e.g "(int)foo" to "intCast(foo)") * - * The exception is the identity conversion, e.g. "(int)myInt" or "(double)myDouble". Since - * mid-2017, there is are no intermediate functions for these redundant conversions. + * The exception is the identity conversion, e.g. "(int)myInt" or "(double)myDouble". Since mid-2017, there is + * are no intermediate functions for these redundant conversions. */ for (Pair var : numericAndCharVars) { for (Pair targetType : numericAndCharTypes) { @@ -678,8 +666,7 @@ public void testPrimitiveVariableCasts() throws Exception { if (targetType.second == var.second) { resultExpression = expression; } else { - resultExpression = targetType.first + "Cast(" + var.first + ')'; // e.g. - // "intCast(myDouble)" + resultExpression = targetType.first + "Cast(" + var.first + ')'; // e.g. "intCast(myDouble)" } check(expression, resultExpression, targetType.second, new String[] {var.first}); } @@ -690,16 +677,13 @@ public void testPrimitiveVariableCasts() throws Exception { try { try { expression = '(' + targetType.first + ")myBoolean"; // e.g. "(int)myBoolean" - resultExpression = targetType.first + "Cast(myBoolean)"; // e.g. - // "intCast(myBoolean)" - check(expression, resultExpression, targetType.second, - new String[] {"myBoolean"}); + resultExpression = targetType.first + "Cast(myBoolean)"; // e.g. "intCast(myBoolean)" + check(expression, resultExpression, targetType.second, new String[] {"myBoolean"}); fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ignored) { } } catch (Throwable ex) { - throw new RuntimeException( - "Failed testing cast of boolean to " + targetType.second.getName(), ex); + throw new RuntimeException("Failed testing cast of boolean to " + targetType.second.getName(), ex); } } @@ -707,15 +691,13 @@ public void testPrimitiveVariableCasts() throws Exception { for (Pair var : numericAndCharVars) { try { try { - resultExpression = expression = "(boolean)" + var.first; // e.g. - // "(boolean)myInt" + resultExpression = expression = "(boolean)" + var.first; // e.g. "(boolean)myInt" check(expression, resultExpression, boolean.class, new String[] {}); fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ignored) { } } catch (Throwable ex) { - throw new RuntimeException( - "Failed testing cast of " + var.second.getName() + " to boolean", ex); + throw new RuntimeException("Failed testing cast of " + var.second.getName() + " to boolean", ex); } } @@ -728,29 +710,25 @@ public void testPrimitiveVariableCasts() throws Exception { /** * Test conversions from boxed types to primitive types. *

      - * When casting from boxed types to primitive types, the target type must be at least as wide as - * the primitive type that the source (boxed) type represents. Thus there are two kinds of - * conversions from boxed types to primitive types: + * When casting from boxed types to primitive types, the target type must be at least as wide as the primitive type + * that the source (boxed) type represents. Thus there are two kinds of conversions from boxed types to primitive + * types: *

      - * 1. Unboxing-only conversions (e.g. Integer to int) 2. Unboxing and widening conversions (e.g. - * Integer to double) + * 1. Unboxing-only conversions (e.g. Integer to int) 2. Unboxing and widening conversions (e.g. Integer to double) *

      - * In the latter case, the language must explicitly cast an Integer to an int *before* casting - * it to a double. This follow's the language specification: when permitted, a non-identity - * conversion from a boxed type to a primitive type consists of an unboxing conversion followed - * by a widening conversion. + * In the latter case, the language must explicitly cast an Integer to an int *before* casting it to a double. This + * follow's the language specification: when permitted, a non-identity conversion from a boxed type to a primitive + * type consists of an unboxing conversion followed by a widening conversion. *

      * For example, this code: {@code (double) new Integer(42) } Should be parsed into: * {@code doubleCast(intCast(new Integer(42))) } *

      - * Otherwise, the compiler would see {@code doubleCast()} with an {@code Integer} argument, and - * rightly decide that {@code doubleCast(Object)} is a better choice than - * {@code doubleCast(int)}. But then we wind up running: {@code (double) anObject} when - * 'anObject' is actually an Integer. Java then tries a narrowing conversion from {@code Object} - * to {@code Double}, which fails. + * Otherwise, the compiler would see {@code doubleCast()} with an {@code Integer} argument, and rightly decide that + * {@code doubleCast(Object)} is a better choice than {@code doubleCast(int)}. But then we wind up running: + * {@code (double) anObject} when 'anObject' is actually an Integer. Java then tries a narrowing conversion from + * {@code Object} to {@code Double}, which fails. *

      - * See table 5.5-A - * here. + * See table 5.5-A here. * * @see #testPrimitiveLiteralCasts() * @see #testBoxedToPrimitiveCasts() @@ -758,24 +736,24 @@ public void testPrimitiveVariableCasts() throws Exception { public void testBoxedToPrimitiveCasts() throws Exception { String expression, resultExpression; - final List boxedTypes = - new ArrayList<>(io.deephaven.util.type.TypeUtils.BOXED_TYPES); - final List primitiveTypes = - new ArrayList<>(io.deephaven.util.type.TypeUtils.PRIMITIVE_TYPES); + final List boxedTypes = new ArrayList<>(io.deephaven.util.type.TypeUtils.BOXED_TYPES); + final List primitiveTypes = new ArrayList<>(io.deephaven.util.type.TypeUtils.PRIMITIVE_TYPES); for (int i = 0; i < io.deephaven.util.type.TypeUtils.BOXED_TYPES.size(); i++) { final Class boxedType = boxedTypes.get(i); - final String unboxedTypeName = - io.deephaven.util.type.TypeUtils.getUnboxedType(boxedType).getName(); // the name of - // the - // primitive - // type that - // this boxed - // type - // represents + final String unboxedTypeName = io.deephaven.util.type.TypeUtils.getUnboxedType(boxedType).getName(); // the + // name + // of + // the + // primitive + // type + // that + // this + // boxed + // type + // represents final String boxedTypeTestVarName = - "my" + Character.toUpperCase(unboxedTypeName.charAt(0)) - + unboxedTypeName.substring(1) + "Obj"; + "my" + Character.toUpperCase(unboxedTypeName.charAt(0)) + unboxedTypeName.substring(1) + "Obj"; for (int j = 0; j < io.deephaven.util.type.TypeUtils.PRIMITIVE_TYPES.size(); j++) { final Class primitiveType = primitiveTypes.get(j); @@ -787,39 +765,34 @@ public void testBoxedToPrimitiveCasts() throws Exception { } else if (i == j) { // Unboxing conversion only resultExpression = primitiveTypeName + "Cast(" + boxedTypeTestVarName + ')'; } else { // i != j; Unboxing and widening conversion - resultExpression = primitiveTypeName + "Cast(" + unboxedTypeName + "Cast(" - + boxedTypeTestVarName + "))"; + resultExpression = + primitiveTypeName + "Cast(" + unboxedTypeName + "Cast(" + boxedTypeTestVarName + "))"; } try { if (j < i - || (primitiveType == Character.TYPE && boxedType != Character.class) - || (boxedType == Boolean.class ^ primitiveType == Boolean.TYPE)) { + || (primitiveType == Character.TYPE && boxedType != Character.class) + || (boxedType == Boolean.class ^ primitiveType == Boolean.TYPE)) { /* - * Ensure we fail on conversions that the JLS disallows. Such as: 1) Trying - * to convert to a primitive type that is not sufficient to store the boxed - * type's data (i.e. j < i). The JLS does not permit such a conversion. 2) - * Trying to unbox anything other than a Character to a char. (However, char - * can be cast to wider (int, long, float, double). 3) Casting a Boolean to - * any primitive besides bool, or casting anything besides a Boolean to - * bool. + * Ensure we fail on conversions that the JLS disallows. Such as: 1) Trying to convert to a + * primitive type that is not sufficient to store the boxed type's data (i.e. j < i). The JLS + * does not permit such a conversion. 2) Trying to unbox anything other than a Character to a + * char. (However, char can be cast to wider (int, long, float, double). 3) Casting a Boolean to + * any primitive besides bool, or casting anything besides a Boolean to bool. * * Note that casting from less-specific types to any primitive is supported. */ try { - check(expression, resultExpression, primitiveType, - new String[] {boxedTypeTestVarName}); - fail( - "Should have thrown a DBLanguageParser.QueryLanguageParseException"); + check(expression, resultExpression, primitiveType, new String[] {boxedTypeTestVarName}); + fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ignored) { } } else { - check(expression, resultExpression, primitiveType, - new String[] {boxedTypeTestVarName}); + check(expression, resultExpression, primitiveType, new String[] {boxedTypeTestVarName}); } } catch (Throwable ex) { - throw new RuntimeException("Failed testing cast of " + boxedType.getName() - + " to " + primitiveType.getName() + " (i=" + i + ", j=" + j + ')', ex); + throw new RuntimeException("Failed testing cast of " + boxedType.getName() + " to " + + primitiveType.getName() + " (i=" + i + ", j=" + j + ')', ex); } } } @@ -840,8 +813,7 @@ public void testObjectToPrimitiveOrBoxedCasts() throws Exception { for (Class type : boxedAndPrimitiveTypes) { expression = '(' + type.getSimpleName() + ")myObject"; if (type.isPrimitive() && type != boolean.class) { - resultExpression = - io.deephaven.util.type.TypeUtils.getUnboxedType(type) + "Cast(myObject)"; + resultExpression = io.deephaven.util.type.TypeUtils.getUnboxedType(type) + "Cast(myObject)"; } else { resultExpression = expression; } @@ -969,8 +941,7 @@ public void testOperatorOverloading() throws Exception { public void testArrayOperatorOverloading() throws Exception { String expression = "myIntArray+myDoubleArray"; String resultExpression = "plusArray(myIntArray, myDoubleArray)"; - check(expression, resultExpression, new double[0].getClass(), - new String[] {"myDoubleArray", "myIntArray"}); + check(expression, resultExpression, new double[0].getClass(), new String[] {"myDoubleArray", "myIntArray"}); expression = "myIntArray+1"; resultExpression = "plusArray(myIntArray, 1)"; @@ -990,8 +961,7 @@ public void testArrayOperatorOverloading() throws Exception { expression = "myIntArray==myDoubleArray"; resultExpression = "eqArray(myIntArray, myDoubleArray)"; - check(expression, resultExpression, new boolean[0].getClass(), - new String[] {"myDoubleArray", "myIntArray"}); + check(expression, resultExpression, new boolean[0].getClass(), new String[] {"myDoubleArray", "myIntArray"}); expression = "myIntArray==1"; resultExpression = "eqArray(myIntArray, 1)"; @@ -1003,8 +973,7 @@ public void testArrayOperatorOverloading() throws Exception { expression = "myIntArray>myDoubleArray"; resultExpression = "greaterArray(myIntArray, myDoubleArray)"; - check(expression, resultExpression, new boolean[0].getClass(), - new String[] {"myDoubleArray", "myIntArray"}); + check(expression, resultExpression, new boolean[0].getClass(), new String[] {"myDoubleArray", "myIntArray"}); expression = "myIntArray>1"; resultExpression = "greaterArray(myIntArray, 1)"; @@ -1016,33 +985,31 @@ public void testArrayOperatorOverloading() throws Exception { expression = "myTestClassArray==myTestClassArray"; resultExpression = "eqArray(myTestClassArray, myTestClassArray)"; - check(expression, resultExpression, new boolean[0].getClass(), - new String[] {"myTestClassArray"}); + check(expression, resultExpression, new boolean[0].getClass(), new String[] {"myTestClassArray"}); expression = "myTestClassArray==myTestClass"; resultExpression = "eqArray(myTestClassArray, myTestClass)"; check(expression, resultExpression, new boolean[0].getClass(), - new String[] {"myTestClass", "myTestClassArray"}); + new String[] {"myTestClass", "myTestClassArray"}); expression = "myTestClass==myTestClassArray"; resultExpression = "eqArray(myTestClass, myTestClassArray)"; check(expression, resultExpression, new boolean[0].getClass(), - new String[] {"myTestClass", "myTestClassArray"}); + new String[] {"myTestClass", "myTestClassArray"}); expression = "myTestClassArray>myTestClassArray"; resultExpression = "greaterArray(myTestClassArray, myTestClassArray)"; - check(expression, resultExpression, new boolean[0].getClass(), - new String[] {"myTestClassArray"}); + check(expression, resultExpression, new boolean[0].getClass(), new String[] {"myTestClassArray"}); expression = "myTestClassArray>myTestClass"; resultExpression = "greaterArray(myTestClassArray, myTestClass)"; check(expression, resultExpression, new boolean[0].getClass(), - new String[] {"myTestClass", "myTestClassArray"}); + new String[] {"myTestClass", "myTestClassArray"}); expression = "myTestClass>myTestClassArray"; resultExpression = "greaterArray(myTestClass, myTestClassArray)"; check(expression, resultExpression, new boolean[0].getClass(), - new String[] {"myTestClass", "myTestClassArray"}); + new String[] {"myTestClass", "myTestClassArray"}); } public void testResolution() throws Exception { @@ -1091,8 +1058,7 @@ public void testResolution() throws Exception { check(expression, resultExpression, double.class, new String[] {"myInt"}); expression = "DBLanguageParserDummyClass.functionWithInterfacesAsArgTypes(`test`, 0)"; - resultExpression = - "DBLanguageParserDummyClass.functionWithInterfacesAsArgTypes(\"test\", 0)"; + resultExpression = "DBLanguageParserDummyClass.functionWithInterfacesAsArgTypes(\"test\", 0)"; check(expression, resultExpression, int.class, new String[] {}); } @@ -1137,32 +1103,25 @@ public void testResolution() throws Exception { // } /** - * Test implicit argument type conversions (e.g. primitive casts and converting DbArrays to Java - * arrays) + * Test implicit argument type conversions (e.g. primitive casts and converting DbArrays to Java arrays) */ public void testImplicitConversion() throws Exception { - String expression = - "testImplicitConversion1(myInt, myDouble, myLong, myInt, myDouble, myLong)"; + String expression = "testImplicitConversion1(myInt, myDouble, myLong, myInt, myDouble, myLong)"; String resultExpression = - "testImplicitConversion1(new double[] { doubleCast(myInt), myDouble, doubleCast(myLong), doubleCast(myInt), myDouble, doubleCast(myLong) })"; - check(expression, resultExpression, new double[0].getClass(), - new String[] {"myDouble", "myInt", "myLong"}); + "testImplicitConversion1(new double[] { doubleCast(myInt), myDouble, doubleCast(myLong), doubleCast(myInt), myDouble, doubleCast(myLong) })"; + check(expression, resultExpression, new double[0].getClass(), new String[] {"myDouble", "myInt", "myLong"}); expression = "testVarArgs(myInt, 'a', myDouble, 1.0, 5.0, myDouble)"; resultExpression = "testVarArgs(myInt, 'a', new double[] { myDouble, 1.0, 5.0, myDouble })"; - check(expression, resultExpression, new double[0].getClass(), - new String[] {"myDouble", "myInt"}); + check(expression, resultExpression, new double[0].getClass(), new String[] {"myDouble", "myInt"}); expression = "testVarArgs(myInt, 'a', myDoubleArray)"; resultExpression = "testVarArgs(myInt, 'a', myDoubleArray)"; - check(expression, resultExpression, new double[0].getClass(), - new String[] {"myDoubleArray", "myInt"}); + check(expression, resultExpression, new double[0].getClass(), new String[] {"myDoubleArray", "myInt"}); expression = "testImplicitConversion1(myDoubleDBArray)"; - resultExpression = - "testImplicitConversion1(ArrayUtils.nullSafeDbArrayToArray(myDoubleDBArray))"; - check(expression, resultExpression, new double[0].getClass(), - new String[] {"myDoubleDBArray"}); + resultExpression = "testImplicitConversion1(ArrayUtils.nullSafeDbArrayToArray(myDoubleDBArray))"; + check(expression, resultExpression, new double[0].getClass(), new String[] {"myDoubleDBArray"}); expression = "testImplicitConversion2(myInt, myInt)"; resultExpression = "testImplicitConversion2(new int[] { myInt, myInt })"; @@ -1173,61 +1132,47 @@ public void testImplicitConversion() throws Exception { check(expression, resultExpression, new Object[0].getClass(), new String[] {"myDBArray"}); // expression="testImplicitConversion3(myDoubleArray)"; // TODO: This test fails. - // resultExpression="testImplicitConversion3(myDoubleArray)"; // we should *not* convert - // from DbDoubleArray to Object[]! - // check(expression, resultExpression, new Object[0].getClass(), new - // String[]{"myDoubleArray"}); + // resultExpression="testImplicitConversion3(myDoubleArray)"; // we should *not* convert from DbDoubleArray to + // Object[]! + // check(expression, resultExpression, new Object[0].getClass(), new String[]{"myDoubleArray"}); expression = "testImplicitConversion3((Object) myDoubleArray)"; - resultExpression = "testImplicitConversion3((Object)myDoubleArray)"; // test a workaround - // for the above - check(expression, resultExpression, new Object[0].getClass(), - new String[] {"myDoubleArray"}); + resultExpression = "testImplicitConversion3((Object)myDoubleArray)"; // test a workaround for the above + check(expression, resultExpression, new Object[0].getClass(), new String[] {"myDoubleArray"}); expression = "testImplicitConversion3(myDoubleArray, myInt)"; resultExpression = "testImplicitConversion3(myDoubleArray, myInt)"; - check(expression, resultExpression, new Object[0].getClass(), - new String[] {"myDoubleArray", "myInt"}); + check(expression, resultExpression, new Object[0].getClass(), new String[] {"myDoubleArray", "myInt"}); expression = "testImplicitConversion4(myInt, myDBArray)"; - resultExpression = - "testImplicitConversion4(doubleCast(myInt), ArrayUtils.nullSafeDbArrayToArray(myDBArray))"; - check(expression, resultExpression, new Object[0].getClass(), - new String[] {"myDBArray", "myInt"}); + resultExpression = "testImplicitConversion4(doubleCast(myInt), ArrayUtils.nullSafeDbArrayToArray(myDBArray))"; + check(expression, resultExpression, new Object[0].getClass(), new String[] {"myDBArray", "myInt"}); expression = "testImplicitConversion4(myInt, myDBArray, myDouble)"; resultExpression = "testImplicitConversion4(doubleCast(myInt), myDBArray, myDouble)"; - check(expression, resultExpression, new Object[0].getClass(), - new String[] {"myDBArray", "myDouble", "myInt"}); + check(expression, resultExpression, new Object[0].getClass(), new String[] {"myDBArray", "myDouble", "myInt"}); expression = "testImplicitConversion4(myInt, myDouble, myDBArray)"; resultExpression = "testImplicitConversion4(doubleCast(myInt), myDouble, myDBArray)"; - check(expression, resultExpression, new Object[0].getClass(), - new String[] {"myDBArray", "myDouble", "myInt"}); + check(expression, resultExpression, new Object[0].getClass(), new String[] {"myDBArray", "myDouble", "myInt"}); - // expression="testImplicitConversion5(myDBArray)"; // TODO: This test fails (declared arg - // type is "DbArrayBase...") - // resultExpression="testImplicitConversion5(myDBArray)"; // vararg of DbArrayBase -- don't - // convert! + // expression="testImplicitConversion5(myDBArray)"; // TODO: This test fails (declared arg type is + // "DbArrayBase...") + // resultExpression="testImplicitConversion5(myDBArray)"; // vararg of DbArrayBase -- don't convert! // check(expression, resultExpression, new Object[0].getClass(), new String[]{"myDBArray"}); - expression = "testImplicitConversion5((DbArrayBase) myDBArray)"; // Workaround for the - // above. - resultExpression = "testImplicitConversion5((DbArrayBase)myDBArray)"; // vararg of - // DbArrayBase -- - // don't convert! + expression = "testImplicitConversion5((DbArrayBase) myDBArray)"; // Workaround for the above. + resultExpression = "testImplicitConversion5((DbArrayBase)myDBArray)"; // vararg of DbArrayBase -- don't convert! check(expression, resultExpression, new Object[0].getClass(), new String[] {"myDBArray"}); expression = "testImplicitConversion5(myDBArray, myDBArray)"; - resultExpression = "testImplicitConversion5(myDBArray, myDBArray)"; // vararg of DbArrayBase - // -- don't convert! + resultExpression = "testImplicitConversion5(myDBArray, myDBArray)"; // vararg of DbArrayBase -- don't convert! check(expression, resultExpression, new Object[0].getClass(), new String[] {"myDBArray"}); } /** - * Test calling the default methods from {@link Object}. (In the past, these were not recognized - * on interfaces.) + * Test calling the default methods from {@link Object}. (In the past, these were not recognized on interfaces.) */ public void testObjectMethods() throws Exception { // Call hashCode() on an Object @@ -1274,10 +1219,8 @@ public void testFieldAccess() throws Exception { resultExpression = "DBLanguageParserDummyClass.StaticNestedClass.staticVar"; check(expression, resultExpression, String.class, new String[] {}); - expression = - "DBLanguageParserDummyClass.StaticNestedClass.staticInstanceOfStaticClass.instanceVar"; - resultExpression = - "DBLanguageParserDummyClass.StaticNestedClass.staticInstanceOfStaticClass.instanceVar"; + expression = "DBLanguageParserDummyClass.StaticNestedClass.staticInstanceOfStaticClass.instanceVar"; + resultExpression = "DBLanguageParserDummyClass.StaticNestedClass.staticInstanceOfStaticClass.instanceVar"; check(expression, resultExpression, String.class, new String[] {}); expression = "new DBLanguageParserDummyClass.StaticNestedClass().instanceVar"; @@ -1286,8 +1229,7 @@ public void testFieldAccess() throws Exception { expression = "myDummyClass.InnerClass"; resultExpression = "myDummyClass.InnerClass"; - check(expression, resultExpression, DBLanguageParserDummyClass.InnerClass.class, - new String[] {"myDummyClass"}); + check(expression, resultExpression, DBLanguageParserDummyClass.InnerClass.class, new String[] {"myDummyClass"}); expression = "myDummyClass.innerClassInstance.staticVar"; resultExpression = "myDummyClass.innerClassInstance.staticVar"; @@ -1307,26 +1249,19 @@ public void testFieldAccess() throws Exception { expression = "myDummyClass.innerClassInstance.innerInnerClassInstance"; resultExpression = "myDummyClass.innerClassInstance.innerInnerClassInstance"; - check(expression, resultExpression, - DBLanguageParserDummyClass.InnerClass.InnerInnerClass.class, - new String[] {"myDummyClass"}); + check(expression, resultExpression, DBLanguageParserDummyClass.InnerClass.InnerInnerClass.class, + new String[] {"myDummyClass"}); - expression = - "myDummyClass.innerClassInstance.innerInnerClassInstance.innerInnerInstanceVar"; - resultExpression = - "myDummyClass.innerClassInstance.innerInnerClassInstance.innerInnerInstanceVar"; + expression = "myDummyClass.innerClassInstance.innerInnerClassInstance.innerInnerInstanceVar"; + resultExpression = "myDummyClass.innerClassInstance.innerInnerClassInstance.innerInnerInstanceVar"; check(expression, resultExpression, String.class, new String[] {"myDummyClass"}); expression = "myDummyClass.innerClass2Instance.innerClassAsInstanceOfAnotherInnerClass"; - resultExpression = - "myDummyClass.innerClass2Instance.innerClassAsInstanceOfAnotherInnerClass"; - check(expression, resultExpression, DBLanguageParserDummyClass.InnerClass.class, - new String[] {"myDummyClass"}); + resultExpression = "myDummyClass.innerClass2Instance.innerClassAsInstanceOfAnotherInnerClass"; + check(expression, resultExpression, DBLanguageParserDummyClass.InnerClass.class, new String[] {"myDummyClass"}); - expression = - "myDummyClass.innerClass2Instance.innerClassAsInstanceOfAnotherInnerClass.instanceVar"; - resultExpression = - "myDummyClass.innerClass2Instance.innerClassAsInstanceOfAnotherInnerClass.instanceVar"; + expression = "myDummyClass.innerClass2Instance.innerClassAsInstanceOfAnotherInnerClass.instanceVar"; + resultExpression = "myDummyClass.innerClass2Instance.innerClassAsInstanceOfAnotherInnerClass.instanceVar"; check(expression, resultExpression, String.class, new String[] {"myDummyClass"}); expression = "myDoubleArray.length"; @@ -1353,38 +1288,31 @@ public void testBadFieldAccess() throws Exception { String expression, resultExpression; PropertySaver p = new PropertySaver(); - p.setProperty("DBLanguageParser.verboseExceptionMessages", "false"); // Better to test with - // non-verbose messages + p.setProperty("DBLanguageParser.verboseExceptionMessages", "false"); // Better to test with non-verbose messages try { // First, test just bad field name try { expression = "myDummyInnerClass.staticVarThatDoesNotExist"; resultExpression = "myDummyInnerClass.staticVarThatDoesNotExist"; - check(expression, resultExpression, String.class, - new String[] {"myDummyInnerClass"}); + check(expression, resultExpression, String.class, new String[] {"myDummyInnerClass"}); fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ex) { if (!ex.getMessage().contains("Scope : myDummyInnerClass") || - !ex.getMessage().contains("Field Name : staticVarThatDoesNotExist")) { - fail("Useless exception message!\nOriginal exception:\n" - + ExceptionUtils.getStackTrace(ex)); + !ex.getMessage().contains("Field Name : staticVarThatDoesNotExist")) { + fail("Useless exception message!\nOriginal exception:\n" + ExceptionUtils.getStackTrace(ex)); } } // Then do the same thing on a class name (not a variable) try { - expression = - "DBLanguageParserDummyClass.StaticNestedClass.staticVarThatDoesNotExist"; - resultExpression = - "DBLanguageParserDummyClass.StaticNestedClass.staticVarThatDoesNotExist"; + expression = "DBLanguageParserDummyClass.StaticNestedClass.staticVarThatDoesNotExist"; + resultExpression = "DBLanguageParserDummyClass.StaticNestedClass.staticVarThatDoesNotExist"; check(expression, resultExpression, String.class, new String[] {}); fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ex) { - if (!ex.getMessage() - .contains("Scope : DBLanguageParserDummyClass.StaticNestedClass") || - !ex.getMessage().contains("Field Name : staticVarThatDoesNotExist")) { - fail("Useless exception message!\nOriginal exception:\n" - + ExceptionUtils.getStackTrace(ex)); + if (!ex.getMessage().contains("Scope : DBLanguageParserDummyClass.StaticNestedClass") || + !ex.getMessage().contains("Field Name : staticVarThatDoesNotExist")) { + fail("Useless exception message!\nOriginal exception:\n" + ExceptionUtils.getStackTrace(ex)); } } @@ -1392,61 +1320,52 @@ public void testBadFieldAccess() throws Exception { try { expression = "myDummyNonExistentInnerClass.staticVar"; resultExpression = "myDummyNonExistentInnerClass.staticVar"; - check(expression, resultExpression, String.class, - new String[] {"myDummyInnerClass"}); + check(expression, resultExpression, String.class, new String[] {"myDummyInnerClass"}); fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ex) { if (!ex.getMessage().contains("Scope : myDummyNonExistentInnerClass") || - !ex.getMessage().contains("Field Name : staticVar")) { - fail("Useless exception message!\nOriginal exception:\n" - + ExceptionUtils.getStackTrace(ex)); + !ex.getMessage().contains("Field Name : staticVar")) { + fail("Useless exception message!\nOriginal exception:\n" + ExceptionUtils.getStackTrace(ex)); } } try { expression = "DBLanguageParserNonExistentDummyClass.StaticNestedClass.staticVar"; - resultExpression = - "DBLanguageParserNonExistentDummyClass.StaticNestedClass.staticVar"; + resultExpression = "DBLanguageParserNonExistentDummyClass.StaticNestedClass.staticVar"; check(expression, resultExpression, String.class, new String[] {}); fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ex) { - if (!ex.getMessage().contains( - "Scope : DBLanguageParserNonExistentDummyClass.StaticNestedClass") || - !ex.getMessage().contains("Field Name : staticVar")) { - fail("Useless exception message!\nOriginal exception:\n" - + ExceptionUtils.getStackTrace(ex)); + if (!ex.getMessage().contains("Scope : DBLanguageParserNonExistentDummyClass.StaticNestedClass") || + !ex.getMessage().contains("Field Name : staticVar")) { + fail("Useless exception message!\nOriginal exception:\n" + ExceptionUtils.getStackTrace(ex)); } } /* - * Also test within a method call. This is essentially the case that prompted the fix. - * The actual issue with the expression is that the enclosing class name is omitted when - * trying to access the nested enum (NestedEnum.ONE), but the user experience was poor - * because the exception was very unclear. + * Also test within a method call. This is essentially the case that prompted the fix. The actual issue with + * the expression is that the enclosing class name is omitted when trying to access the nested enum + * (NestedEnum.ONE), but the user experience was poor because the exception was very unclear. * * There is a test of the proper expression in testComplexExpressions(). */ try { expression = - "io.deephaven.db.tables.lang.DBLanguageParserDummyClass.interpolate(myDoubleDBArray.toArray(),myDoubleDBArray.toArray(),new double[]{myDouble},io.deephaven.db.tables.lang.NestedEnum.ONE,false)[0]"; + "io.deephaven.db.tables.lang.DBLanguageParserDummyClass.interpolate(myDoubleDBArray.toArray(),myDoubleDBArray.toArray(),new double[]{myDouble},io.deephaven.db.tables.lang.NestedEnum.ONE,false)[0]"; resultExpression = - "io.deephaven.db.tables.lang.DBLanguageParserDummyClass.interpolate(myDoubleDBArray.toArray(),myDoubleDBArray.toArray(),new double[]{myDouble},io.deephaven.db.tables.lang.NestedEnum.ONE,false)[0]"; + "io.deephaven.db.tables.lang.DBLanguageParserDummyClass.interpolate(myDoubleDBArray.toArray(),myDoubleDBArray.toArray(),new double[]{myDouble},io.deephaven.db.tables.lang.NestedEnum.ONE,false)[0]"; check(expression, resultExpression, String.class, new String[] {}); fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ex) { - if (!ex.getMessage().contains("Scope : io.deephaven.db.tables.lang.NestedEnum") - || - !ex.getMessage().contains("Field Name : ONE")) { - fail("Useless exception message!\n\nOriginal exception:\n" - + ExceptionUtils.getStackTrace(ex)); + if (!ex.getMessage().contains("Scope : io.deephaven.db.tables.lang.NestedEnum") || + !ex.getMessage().contains("Field Name : ONE")) { + fail("Useless exception message!\n\nOriginal exception:\n" + ExceptionUtils.getStackTrace(ex)); } } - // Ensure that when we can't resolve a field, we explicitly state the scope type. (We've - // struggled + // Ensure that when we can't resolve a field, we explicitly state the scope type. (We've struggled // supporting customers in the past when the scope type was unclear.) try { expression = "myTable[myTable.length-1]"; @@ -1455,10 +1374,9 @@ public void testBadFieldAccess() throws Exception { fail("Should have thrown a DBLanguageParser.QueryLanguageParseException"); } catch (DBLanguageParser.QueryLanguageParseException ex) { if (!ex.getMessage().contains("Scope : myTable") || - !ex.getMessage().contains("Scope Type : " + Table.class.getCanonicalName()) || - !ex.getMessage().contains("Field Name : length")) { - fail("Useless exception message!\n\nOriginal exception:\n" - + ExceptionUtils.getStackTrace(ex)); + !ex.getMessage().contains("Scope Type : " + Table.class.getCanonicalName()) || + !ex.getMessage().contains("Field Name : length")) { + fail("Useless exception message!\n\nOriginal exception:\n" + ExceptionUtils.getStackTrace(ex)); } } @@ -1471,8 +1389,7 @@ public void testBadFieldAccess() throws Exception { public void testEnums() throws Exception { String expression = "myEnumValue"; String resultExpression = "myEnumValue"; - check(expression, resultExpression, DBLanguageParserDummyEnum.class, - new String[] {"myEnumValue"}); + check(expression, resultExpression, DBLanguageParserDummyEnum.class, new String[] {"myEnumValue"}); expression = "myEnumValue.getAttribute()"; resultExpression = "myEnumValue.getAttribute()"; @@ -1488,39 +1405,30 @@ public void testEnums() throws Exception { expression = "DBLanguageParserDummyInterface.AnEnum.THING_ONE"; resultExpression = "DBLanguageParserDummyInterface.AnEnum.THING_ONE"; - check(expression, resultExpression, DBLanguageParserDummyInterface.AnEnum.class, - new String[] {}); + check(expression, resultExpression, DBLanguageParserDummyInterface.AnEnum.class, new String[] {}); expression = "io.deephaven.db.tables.lang.DBLanguageParserDummyInterface.AnEnum.THING_ONE"; - resultExpression = - "io.deephaven.db.tables.lang.DBLanguageParserDummyInterface.AnEnum.THING_ONE"; - check(expression, resultExpression, DBLanguageParserDummyInterface.AnEnum.class, - new String[] {}); + resultExpression = "io.deephaven.db.tables.lang.DBLanguageParserDummyInterface.AnEnum.THING_ONE"; + check(expression, resultExpression, DBLanguageParserDummyInterface.AnEnum.class, new String[] {}); - expression = - "DBLanguageParserDummyClass.SubclassOfDBLanguageParserDummyClass.EnumInInterface.THING_ONE"; - resultExpression = - "DBLanguageParserDummyClass.SubclassOfDBLanguageParserDummyClass.EnumInInterface.THING_ONE"; + expression = "DBLanguageParserDummyClass.SubclassOfDBLanguageParserDummyClass.EnumInInterface.THING_ONE"; + resultExpression = "DBLanguageParserDummyClass.SubclassOfDBLanguageParserDummyClass.EnumInInterface.THING_ONE"; check(expression, resultExpression, - DBLanguageParserDummyClass.SubclassOfDBLanguageParserDummyClass.EnumInInterface.class, - new String[] {}); + DBLanguageParserDummyClass.SubclassOfDBLanguageParserDummyClass.EnumInInterface.class, new String[] {}); - expression = - "DBLanguageParserDummyClass.functionWithEnumAsArgs(DBLanguageParserDummyEnum.ONE)"; - resultExpression = - "DBLanguageParserDummyClass.functionWithEnumAsArgs(DBLanguageParserDummyEnum.ONE)"; + expression = "DBLanguageParserDummyClass.functionWithEnumAsArgs(DBLanguageParserDummyEnum.ONE)"; + resultExpression = "DBLanguageParserDummyClass.functionWithEnumAsArgs(DBLanguageParserDummyEnum.ONE)"; check(expression, resultExpression, int.class, new String[] {}); expression = - "DBLanguageParserDummyClass.functionWithEnumAsArgs(DBLanguageParserDummyInterface.AnEnum.THING_ONE)"; + "DBLanguageParserDummyClass.functionWithEnumAsArgs(DBLanguageParserDummyInterface.AnEnum.THING_ONE)"; resultExpression = - "DBLanguageParserDummyClass.functionWithEnumAsArgs(DBLanguageParserDummyInterface.AnEnum.THING_ONE)"; + "DBLanguageParserDummyClass.functionWithEnumAsArgs(DBLanguageParserDummyInterface.AnEnum.THING_ONE)"; check(expression, resultExpression, int.class, new String[] {}); - expression = - "DBLanguageParserDummyClass.functionWithEnumVarArgs(myEnumValue, DBLanguageParserDummyEnum.ONE)"; + expression = "DBLanguageParserDummyClass.functionWithEnumVarArgs(myEnumValue, DBLanguageParserDummyEnum.ONE)"; resultExpression = - "DBLanguageParserDummyClass.functionWithEnumVarArgs(myEnumValue, DBLanguageParserDummyEnum.ONE)"; + "DBLanguageParserDummyClass.functionWithEnumVarArgs(myEnumValue, DBLanguageParserDummyEnum.ONE)"; check(expression, resultExpression, int.class, new String[] {"myEnumValue"}); } @@ -1542,20 +1450,16 @@ public void testBoxing() throws Exception { check(expression, resultExpression, double.class, new String[] {"myIntObj", "myLong"}); expression = "myDoubleObj+myLongObj+3*4"; - resultExpression = - "plus(plus(myDoubleObj.doubleValue(), myLongObj.longValue()), times(3, 4))"; - check(expression, resultExpression, double.class, - new String[] {"myDoubleObj", "myLongObj"}); + resultExpression = "plus(plus(myDoubleObj.doubleValue(), myLongObj.longValue()), times(3, 4))"; + check(expression, resultExpression, double.class, new String[] {"myDoubleObj", "myLongObj"}); expression = "1==myIntObj"; resultExpression = "eq(1, myIntObj.intValue())"; check(expression, resultExpression, boolean.class, new String[] {"myIntObj"}); expression = "myInt>1+2*myIntObj/4 || myBooleanObj"; - resultExpression = - "greater(myInt, plus(1, divide(times(2, myIntObj.intValue()), 4)))||myBooleanObj"; - check(expression, resultExpression, boolean.class, - new String[] {"myBooleanObj", "myInt", "myIntObj"}); + resultExpression = "greater(myInt, plus(1, divide(times(2, myIntObj.intValue()), 4)))||myBooleanObj"; + check(expression, resultExpression, boolean.class, new String[] {"myBooleanObj", "myInt", "myIntObj"}); expression = "myIntObj+myString"; resultExpression = "myIntObj+myString"; @@ -1568,18 +1472,16 @@ public void testBoxing() throws Exception { public void testEqualsConversion() throws Exception { DBLanguageParser.Result result = - new DBLanguageParser("1==1", null, null, staticImports, null, null).getResult(); + new DBLanguageParser("1==1", null, null, staticImports, null, null).getResult(); assertEquals("eq(1, 1)", result.getConvertedExpression()); result = new DBLanguageParser("1=1", null, null, staticImports, null, null).getResult(); assertEquals("eq(1, 1)", result.getConvertedExpression()); - result = - new DBLanguageParser("`me`=`you`", null, null, staticImports, null, null).getResult(); + result = new DBLanguageParser("`me`=`you`", null, null, staticImports, null, null).getResult(); assertEquals("eq(\"me\", \"you\")", result.getConvertedExpression()); - result = new DBLanguageParser("1=1 || 2=2 && (3=3 && 4==4)", null, null, staticImports, - null, null).getResult(); + result = new DBLanguageParser("1=1 || 2=2 && (3=3 && 4==4)", null, null, staticImports, null, null).getResult(); assertEquals("eq(1, 1)||eq(2, 2)&&(eq(3, 3)&&eq(4, 4))", result.getConvertedExpression()); result = new DBLanguageParser("1<=1", null, null, staticImports, null, null).getResult(); @@ -1593,29 +1495,25 @@ public void testEqualsConversion() throws Exception { } /** - * In order to support the null values defined in {@link QueryConstants}, language parser - * converts the equality and relational operators into method calls. + * In order to support the null values defined in {@link QueryConstants}, language parser converts the equality and + * relational operators into method calls. */ public void testComparisonConversion() throws Exception { String expression = "myTestClass>myIntObj"; String resultExpression = "greater(myTestClass, myIntObj.intValue())"; - check(expression, resultExpression, boolean.class, - new String[] {"myIntObj", "myTestClass"}); + check(expression, resultExpression, boolean.class, new String[] {"myIntObj", "myTestClass"}); expression = "myTestClass>=myIntObj"; resultExpression = "greaterEquals(myTestClass, myIntObj.intValue())"; - check(expression, resultExpression, boolean.class, - new String[] {"myIntObj", "myTestClass"}); + check(expression, resultExpression, boolean.class, new String[] {"myIntObj", "myTestClass"}); expression = "myTestClass 0 ? myString=Double.toString(myDouble) ? `1` : `2` : new DBLanguageParserDummyClass().toString() }).count()"; + "java.util.stream.Stream.of(new String[] { `a`, `b`, `c`, myInt > 0 ? myString=Double.toString(myDouble) ? `1` : `2` : new DBLanguageParserDummyClass().toString() }).count()"; String resultExpression = - "java.util.stream.Stream.of(new String[] { \"a\", \"b\", \"c\", greater(myInt, 0) ? eq(myString, Double.toString(myDouble)) ? \"1\" : \"2\" : new DBLanguageParserDummyClass().toString() }).count()"; - check(expression, resultExpression, long.class, - new String[] {"myDouble", "myInt", "myString"}); + "java.util.stream.Stream.of(new String[] { \"a\", \"b\", \"c\", greater(myInt, 0) ? eq(myString, Double.toString(myDouble)) ? \"1\" : \"2\" : new DBLanguageParserDummyClass().toString() }).count()"; + check(expression, resultExpression, long.class, new String[] {"myDouble", "myInt", "myString"}); expression = "myDummyClass.innerClassInstance.staticVar == 1_000_000L" + - "? new int[] { java.util.stream.Stream.of(new String[] { `a`, `b`, `c`, myInt > 0 ? myString=Double.toString(myDouble) ? `1` : `2` : new DBLanguageParserDummyClass().toString() }).count() }" - + - ": myIntArray"; + "? new int[] { java.util.stream.Stream.of(new String[] { `a`, `b`, `c`, myInt > 0 ? myString=Double.toString(myDouble) ? `1` : `2` : new DBLanguageParserDummyClass().toString() }).count() }" + + + ": myIntArray"; resultExpression = "eq(myDummyClass.innerClassInstance.staticVar, 1_000_000L)" + - " ? new int[] { java.util.stream.Stream.of(new String[] { \"a\", \"b\", \"c\", greater(myInt, 0) ? eq(myString, Double.toString(myDouble)) ? \"1\" : \"2\" : new DBLanguageParserDummyClass().toString() }).count() }" - + - " : myIntArray"; + " ? new int[] { java.util.stream.Stream.of(new String[] { \"a\", \"b\", \"c\", greater(myInt, 0) ? eq(myString, Double.toString(myDouble)) ? \"1\" : \"2\" : new DBLanguageParserDummyClass().toString() }).count() }" + + + " : myIntArray"; check(expression, resultExpression, int[].class, - new String[] {"myDouble", "myDummyClass", "myInt", "myIntArray", "myString"}); + new String[] {"myDouble", "myDummyClass", "myInt", "myIntArray", "myString"}); // This comes from a strategy query: expression = - "min( abs(ExampleQuantity), (int)round(min(ExampleQuantity2, (`String1`.equals(ExampleStr) ? max(-ExampleQuantity3,0d) : max(ExampleQuantity3,0d))/ExampleQuantity4)))"; + "min( abs(ExampleQuantity), (int)round(min(ExampleQuantity2, (`String1`.equals(ExampleStr) ? max(-ExampleQuantity3,0d) : max(ExampleQuantity3,0d))/ExampleQuantity4)))"; resultExpression = - "min(abs(ExampleQuantity), intCast(round(min(ExampleQuantity2, divide((\"String1\".equals(ExampleStr) ? max(negate(ExampleQuantity3), 0d) : max(ExampleQuantity3, 0d)), ExampleQuantity4)))))"; - check(expression, resultExpression, int.class, new String[] {"ExampleQuantity", - "ExampleQuantity2", "ExampleQuantity3", "ExampleQuantity4", "ExampleStr"}); + "min(abs(ExampleQuantity), intCast(round(min(ExampleQuantity2, divide((\"String1\".equals(ExampleStr) ? max(negate(ExampleQuantity3), 0d) : max(ExampleQuantity3, 0d)), ExampleQuantity4)))))"; + check(expression, resultExpression, int.class, new String[] {"ExampleQuantity", "ExampleQuantity2", + "ExampleQuantity3", "ExampleQuantity4", "ExampleStr"}); // There is a test for an erroneous version of this expression in testBadFieldAccess(): expression = - "io.deephaven.db.tables.lang.DBLanguageParserDummyClass.interpolate(myDoubleDBArray.toArray(),myDoubleDBArray.toArray(),new double[]{myDouble},io.deephaven.db.tables.lang.DBLanguageParserDummyClass.NestedEnum.ONE,false)[0]"; + "io.deephaven.db.tables.lang.DBLanguageParserDummyClass.interpolate(myDoubleDBArray.toArray(),myDoubleDBArray.toArray(),new double[]{myDouble},io.deephaven.db.tables.lang.DBLanguageParserDummyClass.NestedEnum.ONE,false)[0]"; resultExpression = - "io.deephaven.db.tables.lang.DBLanguageParserDummyClass.interpolate(myDoubleDBArray.toArray(), myDoubleDBArray.toArray(), new double[] { myDouble }, io.deephaven.db.tables.lang.DBLanguageParserDummyClass.NestedEnum.ONE, false)[0]"; - check(expression, resultExpression, double.class, - new String[] {"myDouble", "myDoubleDBArray"}); + "io.deephaven.db.tables.lang.DBLanguageParserDummyClass.interpolate(myDoubleDBArray.toArray(), myDoubleDBArray.toArray(), new double[] { myDouble }, io.deephaven.db.tables.lang.DBLanguageParserDummyClass.NestedEnum.ONE, false)[0]"; + check(expression, resultExpression, double.class, new String[] {"myDouble", "myDoubleDBArray"}); // For good measure, same test as above w/ implicit array conversions: expression = - "DBLanguageParserDummyClass.interpolate(myDoubleDBArray,myDoubleDBArray,new double[]{myDouble},DBLanguageParserDummyClass.NestedEnum.ONE,false)[0]"; + "DBLanguageParserDummyClass.interpolate(myDoubleDBArray,myDoubleDBArray,new double[]{myDouble},DBLanguageParserDummyClass.NestedEnum.ONE,false)[0]"; resultExpression = - "DBLanguageParserDummyClass.interpolate(ArrayUtils.nullSafeDbArrayToArray(myDoubleDBArray), ArrayUtils.nullSafeDbArrayToArray(myDoubleDBArray), new double[] { myDouble }, DBLanguageParserDummyClass.NestedEnum.ONE, false)[0]"; - check(expression, resultExpression, double.class, - new String[] {"myDouble", "myDoubleDBArray"}); + "DBLanguageParserDummyClass.interpolate(ArrayUtils.nullSafeDbArrayToArray(myDoubleDBArray), ArrayUtils.nullSafeDbArrayToArray(myDoubleDBArray), new double[] { myDouble }, DBLanguageParserDummyClass.NestedEnum.ONE, false)[0]"; + check(expression, resultExpression, double.class, new String[] {"myDouble", "myDoubleDBArray"}); } public void testUnsupportedOperators() throws Exception { @@ -1991,147 +1874,147 @@ public void testUnsupportedOperators() throws Exception { public void testIsWideningPrimitiveConversion() { { Require.eqFalse(isWideningPrimitiveConversion(byte.class, byte.class), - "isWideningPrimitiveConversion(byte.class, byte.class)"); + "isWideningPrimitiveConversion(byte.class, byte.class)"); Require.eqFalse(isWideningPrimitiveConversion(byte.class, char.class), - "isWideningPrimitiveConversion(byte.class, char.class)"); + "isWideningPrimitiveConversion(byte.class, char.class)"); Require.eqTrue(isWideningPrimitiveConversion(byte.class, short.class), - "isWideningPrimitiveConversion(byte.class, short.class)"); + "isWideningPrimitiveConversion(byte.class, short.class)"); Require.eqTrue(isWideningPrimitiveConversion(byte.class, int.class), - "isWideningPrimitiveConversion(byte.class, int.class)"); + "isWideningPrimitiveConversion(byte.class, int.class)"); Require.eqTrue(isWideningPrimitiveConversion(byte.class, long.class), - "isWideningPrimitiveConversion(byte.class, long.class)"); + "isWideningPrimitiveConversion(byte.class, long.class)"); Require.eqTrue(isWideningPrimitiveConversion(byte.class, float.class), - "isWideningPrimitiveConversion(byte.class, float.class)"); + "isWideningPrimitiveConversion(byte.class, float.class)"); Require.eqTrue(isWideningPrimitiveConversion(byte.class, double.class), - "isWideningPrimitiveConversion(byte.class, double.class)"); + "isWideningPrimitiveConversion(byte.class, double.class)"); } { Require.eqFalse(isWideningPrimitiveConversion(short.class, byte.class), - "isWideningPrimitiveConversion(short.class, byte.class)"); + "isWideningPrimitiveConversion(short.class, byte.class)"); Require.eqFalse(isWideningPrimitiveConversion(short.class, short.class), - "isWideningPrimitiveConversion(short.class, short.class)"); + "isWideningPrimitiveConversion(short.class, short.class)"); Require.eqFalse(isWideningPrimitiveConversion(short.class, char.class), - "isWideningPrimitiveConversion(short.class, char.class)"); + "isWideningPrimitiveConversion(short.class, char.class)"); Require.eqTrue(isWideningPrimitiveConversion(short.class, int.class), - "isWideningPrimitiveConversion(short.class, int.class)"); + "isWideningPrimitiveConversion(short.class, int.class)"); Require.eqTrue(isWideningPrimitiveConversion(short.class, long.class), - "isWideningPrimitiveConversion(short.class, long.class)"); + "isWideningPrimitiveConversion(short.class, long.class)"); Require.eqTrue(isWideningPrimitiveConversion(short.class, float.class), - "isWideningPrimitiveConversion(short.class, float.class)"); + "isWideningPrimitiveConversion(short.class, float.class)"); Require.eqTrue(isWideningPrimitiveConversion(short.class, double.class), - "isWideningPrimitiveConversion(short.class, double.class)"); + "isWideningPrimitiveConversion(short.class, double.class)"); } { Require.eqFalse(isWideningPrimitiveConversion(char.class, byte.class), - "isWideningPrimitiveConversion(char.class, byte.class)"); + "isWideningPrimitiveConversion(char.class, byte.class)"); Require.eqFalse(isWideningPrimitiveConversion(char.class, short.class), - "isWideningPrimitiveConversion(char.class, short.class)"); + "isWideningPrimitiveConversion(char.class, short.class)"); Require.eqFalse(isWideningPrimitiveConversion(char.class, char.class), - "isWideningPrimitiveConversion(char.class, char.class)"); + "isWideningPrimitiveConversion(char.class, char.class)"); Require.eqTrue(isWideningPrimitiveConversion(char.class, int.class), - "isWideningPrimitiveConversion(char.class, int.class)"); + "isWideningPrimitiveConversion(char.class, int.class)"); Require.eqTrue(isWideningPrimitiveConversion(char.class, long.class), - "isWideningPrimitiveConversion(char.class, long.class)"); + "isWideningPrimitiveConversion(char.class, long.class)"); Require.eqTrue(isWideningPrimitiveConversion(char.class, float.class), - "isWideningPrimitiveConversion(char.class, float.class)"); + "isWideningPrimitiveConversion(char.class, float.class)"); Require.eqTrue(isWideningPrimitiveConversion(char.class, double.class), - "isWideningPrimitiveConversion(char.class, double.class)"); + "isWideningPrimitiveConversion(char.class, double.class)"); } { Require.eqFalse(isWideningPrimitiveConversion(int.class, byte.class), - "isWideningPrimitiveConversion(int.class, byte.class)"); + "isWideningPrimitiveConversion(int.class, byte.class)"); Require.eqFalse(isWideningPrimitiveConversion(int.class, short.class), - "isWideningPrimitiveConversion(int.class, short.class)"); + "isWideningPrimitiveConversion(int.class, short.class)"); Require.eqFalse(isWideningPrimitiveConversion(int.class, char.class), - "isWideningPrimitiveConversion(int.class, char.class)"); + "isWideningPrimitiveConversion(int.class, char.class)"); Require.eqFalse(isWideningPrimitiveConversion(int.class, int.class), - "isWideningPrimitiveConversion(int.class, int.class)"); + "isWideningPrimitiveConversion(int.class, int.class)"); Require.eqTrue(isWideningPrimitiveConversion(int.class, long.class), - "isWideningPrimitiveConversion(int.class, long.class)"); + "isWideningPrimitiveConversion(int.class, long.class)"); Require.eqTrue(isWideningPrimitiveConversion(int.class, float.class), - "isWideningPrimitiveConversion(int.class, float.class)"); + "isWideningPrimitiveConversion(int.class, float.class)"); Require.eqTrue(isWideningPrimitiveConversion(int.class, double.class), - "isWideningPrimitiveConversion(int.class, double.class)"); + "isWideningPrimitiveConversion(int.class, double.class)"); } { Require.eqFalse(isWideningPrimitiveConversion(long.class, byte.class), - "isWideningPrimitiveConversion(long.class, byte.class)"); + "isWideningPrimitiveConversion(long.class, byte.class)"); Require.eqFalse(isWideningPrimitiveConversion(long.class, short.class), - "isWideningPrimitiveConversion(long.class, short.class)"); + "isWideningPrimitiveConversion(long.class, short.class)"); Require.eqFalse(isWideningPrimitiveConversion(long.class, char.class), - "isWideningPrimitiveConversion(long.class, char.class)"); + "isWideningPrimitiveConversion(long.class, char.class)"); Require.eqFalse(isWideningPrimitiveConversion(long.class, int.class), - "isWideningPrimitiveConversion(long.class, int.class)"); + "isWideningPrimitiveConversion(long.class, int.class)"); Require.eqFalse(isWideningPrimitiveConversion(long.class, long.class), - "isWideningPrimitiveConversion(long.class, long.class)"); + "isWideningPrimitiveConversion(long.class, long.class)"); Require.eqTrue(isWideningPrimitiveConversion(long.class, float.class), - "isWideningPrimitiveConversion(long.class, float.class)"); + "isWideningPrimitiveConversion(long.class, float.class)"); Require.eqTrue(isWideningPrimitiveConversion(long.class, double.class), - "isWideningPrimitiveConversion(long.class, double.class)"); + "isWideningPrimitiveConversion(long.class, double.class)"); } { Require.eqFalse(isWideningPrimitiveConversion(float.class, byte.class), - "isWideningPrimitiveConversion(float.class, byte.class)"); + "isWideningPrimitiveConversion(float.class, byte.class)"); Require.eqFalse(isWideningPrimitiveConversion(float.class, short.class), - "isWideningPrimitiveConversion(float.class, short.class)"); + "isWideningPrimitiveConversion(float.class, short.class)"); Require.eqFalse(isWideningPrimitiveConversion(float.class, char.class), - "isWideningPrimitiveConversion(float.class, char.class)"); + "isWideningPrimitiveConversion(float.class, char.class)"); Require.eqFalse(isWideningPrimitiveConversion(float.class, int.class), - "isWideningPrimitiveConversion(float.class, int.class)"); + "isWideningPrimitiveConversion(float.class, int.class)"); Require.eqFalse(isWideningPrimitiveConversion(float.class, long.class), - "isWideningPrimitiveConversion(float.class, long.class)"); + "isWideningPrimitiveConversion(float.class, long.class)"); Require.eqTrue(isWideningPrimitiveConversion(float.class, double.class), - "isWideningPrimitiveConversion(float.class, double.class)"); + "isWideningPrimitiveConversion(float.class, double.class)"); } { Require.eqFalse(isWideningPrimitiveConversion(double.class, byte.class), - "isWideningPrimitiveConversion(double.class, byte.class)"); + "isWideningPrimitiveConversion(double.class, byte.class)"); Require.eqFalse(isWideningPrimitiveConversion(double.class, short.class), - "isWideningPrimitiveConversion(double.class, short.class)"); + "isWideningPrimitiveConversion(double.class, short.class)"); Require.eqFalse(isWideningPrimitiveConversion(double.class, char.class), - "isWideningPrimitiveConversion(double.class, char.class)"); + "isWideningPrimitiveConversion(double.class, char.class)"); Require.eqFalse(isWideningPrimitiveConversion(double.class, int.class), - "isWideningPrimitiveConversion(double.class, int.class)"); + "isWideningPrimitiveConversion(double.class, int.class)"); Require.eqFalse(isWideningPrimitiveConversion(double.class, long.class), - "isWideningPrimitiveConversion(double.class, long.class)"); + "isWideningPrimitiveConversion(double.class, long.class)"); Require.eqFalse(isWideningPrimitiveConversion(double.class, float.class), - "isWideningPrimitiveConversion(double.class, float.class)"); + "isWideningPrimitiveConversion(double.class, float.class)"); Require.eqFalse(isWideningPrimitiveConversion(double.class, double.class), - "isWideningPrimitiveConversion(double.class, long.class)"); + "isWideningPrimitiveConversion(double.class, long.class)"); } { Require.eqFalse(isWideningPrimitiveConversion(boolean.class, byte.class), - "isWideningPrimitiveConversion(boolean.class, byte.class)"); + "isWideningPrimitiveConversion(boolean.class, byte.class)"); Require.eqFalse(isWideningPrimitiveConversion(boolean.class, short.class), - "isWideningPrimitiveConversion(boolean.class, short.class)"); + "isWideningPrimitiveConversion(boolean.class, short.class)"); Require.eqFalse(isWideningPrimitiveConversion(boolean.class, char.class), - "isWideningPrimitiveConversion(boolean.class, char.class)"); + "isWideningPrimitiveConversion(boolean.class, char.class)"); Require.eqFalse(isWideningPrimitiveConversion(boolean.class, int.class), - "isWideningPrimitiveConversion(boolean.class, int.class)"); + "isWideningPrimitiveConversion(boolean.class, int.class)"); Require.eqFalse(isWideningPrimitiveConversion(boolean.class, long.class), - "isWideningPrimitiveConversion(boolean.class, long.class)"); + "isWideningPrimitiveConversion(boolean.class, long.class)"); Require.eqFalse(isWideningPrimitiveConversion(boolean.class, float.class), - "isWideningPrimitiveConversion(boolean.class, float.class)"); + "isWideningPrimitiveConversion(boolean.class, float.class)"); Require.eqFalse(isWideningPrimitiveConversion(boolean.class, double.class), - "isWideningPrimitiveConversion(boolean.class, double.class)"); + "isWideningPrimitiveConversion(boolean.class, double.class)"); Require.eqFalse(isWideningPrimitiveConversion(boolean.class, boolean.class), - "isWideningPrimitiveConversion(boolean.class, boolean.class)"); + "isWideningPrimitiveConversion(boolean.class, boolean.class)"); } } @@ -2146,18 +2029,15 @@ public void testClassExpr() throws Exception { expression = "DBLanguageParserDummyClass.class"; resultExpression = "DBLanguageParserDummyClass.class"; - check(expression, resultExpression, DBLanguageParserDummyClass.class.getClass(), - new String[] {}); + check(expression, resultExpression, DBLanguageParserDummyClass.class.getClass(), new String[] {}); expression = "DBLanguageParserDummyClass.InnerClass.class"; resultExpression = "DBLanguageParserDummyClass.InnerClass.class"; - check(expression, resultExpression, DBLanguageParserDummyClass.InnerClass.class.getClass(), - new String[] {}); + check(expression, resultExpression, DBLanguageParserDummyClass.InnerClass.class.getClass(), new String[] {}); expression = "DBLanguageParserDummyInterface.class"; resultExpression = "DBLanguageParserDummyInterface.class"; - check(expression, resultExpression, DBLanguageParserDummyInterface.class.getClass(), - new String[] {}); + check(expression, resultExpression, DBLanguageParserDummyInterface.class.getClass(), new String[] {}); } public void testInvalidExpr() throws Exception { @@ -2187,8 +2067,7 @@ public void testInvalidExpr() throws Exception { // this was getting picked up as invalid, so we're ensuring here that it is not an error. expression = "23 >= plus(System.currentTimeMillis(), 12)"; - check(expression, "greaterEquals(23, plus(System.currentTimeMillis(), 12))", boolean.class, - new String[0]); + check(expression, "greaterEquals(23, plus(System.currentTimeMillis(), 12))", boolean.class, new String[0]); } private void expectFailure(String expression, Class resultType) throws Exception { @@ -2199,10 +2078,10 @@ private void expectFailure(String expression, Class resultType) throws Exception } } - private void check(String expression, String resultExpression, Class resultType, - String resultVarsUsed[]) throws Exception { - DBLanguageParser.Result result = new DBLanguageParser(expression, packageImports, - classImports, staticImports, variables, variableParameterizedTypes).getResult(); + private void check(String expression, String resultExpression, Class resultType, String resultVarsUsed[]) + throws Exception { + DBLanguageParser.Result result = new DBLanguageParser(expression, packageImports, classImports, staticImports, + variables, variableParameterizedTypes).getResult(); assertEquals(resultType, result.getType()); assertEquals(resultExpression, result.getConvertedExpression()); diff --git a/DB/src/test/java/io/deephaven/db/tables/libs/QueryLibraryTest.java b/DB/src/test/java/io/deephaven/db/tables/libs/QueryLibraryTest.java index b842ff8f0a5..5e38bd2497e 100644 --- a/DB/src/test/java/io/deephaven/db/tables/libs/QueryLibraryTest.java +++ b/DB/src/test/java/io/deephaven/db/tables/libs/QueryLibraryTest.java @@ -24,25 +24,23 @@ protected void tearDown() throws Exception { public void testImportClass() { assertFalse(QueryLibrary.getImportStatement().build() - .contains("import java.util.concurrent.ConcurrentLinkedDeque;")); + .contains("import java.util.concurrent.ConcurrentLinkedDeque;")); QueryLibrary.importClass(ConcurrentLinkedDeque.class); assertTrue(QueryLibrary.getImportStatement().build() - .contains("import java.util.concurrent.ConcurrentLinkedDeque;")); + .contains("import java.util.concurrent.ConcurrentLinkedDeque;")); } public void testPackageClass() { - assertFalse( - QueryLibrary.getImportStatement().build().contains("import java.util.concurrent.*;")); + assertFalse(QueryLibrary.getImportStatement().build().contains("import java.util.concurrent.*;")); QueryLibrary.importPackage(Package.getPackage("java.util.concurrent")); - assertTrue( - QueryLibrary.getImportStatement().build().contains("import java.util.concurrent.*;")); + assertTrue(QueryLibrary.getImportStatement().build().contains("import java.util.concurrent.*;")); } public void testImportStatic() { assertFalse(QueryLibrary.getImportStatement().build() - .contains("import static java.util.concurrent.ConcurrentHashMap.*;")); + .contains("import static java.util.concurrent.ConcurrentHashMap.*;")); QueryLibrary.importStatic(ConcurrentHashMap.class); assertTrue(QueryLibrary.getImportStatement().build() - .contains("import static java.util.concurrent.ConcurrentHashMap.*;")); + .contains("import static java.util.concurrent.ConcurrentHashMap.*;")); } } diff --git a/DB/src/test/java/io/deephaven/db/tables/live/TestConstructSnapshot.java b/DB/src/test/java/io/deephaven/db/tables/live/TestConstructSnapshot.java index b4cd4a17cbf..278a3eed269 100644 --- a/DB/src/test/java/io/deephaven/db/tables/live/TestConstructSnapshot.java +++ b/DB/src/test/java/io/deephaven/db/tables/live/TestConstructSnapshot.java @@ -17,19 +17,18 @@ public Boolean usePreviousValues(long beforeClockValue) { } @Override - public boolean snapshotConsistent(final long currentClockValue, - final boolean usingPreviousValues) { + public boolean snapshotConsistent(final long currentClockValue, final boolean usingPreviousValues) { return true; } }; - Runnable snapshot_test = () -> ConstructSnapshot.callDataSnapshotFunction("snapshot test", - control, (usePrev, beforeClock) -> { - SleepUtil.sleep(1000); - if (ConstructSnapshot.concurrentAttemptInconsistent()) { - changed.increment(); - } - return true; - }); + Runnable snapshot_test = + () -> ConstructSnapshot.callDataSnapshotFunction("snapshot test", control, (usePrev, beforeClock) -> { + SleepUtil.sleep(1000); + if (ConstructSnapshot.concurrentAttemptInconsistent()) { + changed.increment(); + } + return true; + }); changed.setValue(0); final Thread t = new Thread(snapshot_test); diff --git a/DB/src/test/java/io/deephaven/db/tables/live/TestLiveTableMonitorLock.java b/DB/src/test/java/io/deephaven/db/tables/live/TestLiveTableMonitorLock.java index 11496ffdbb7..c39b0a455d3 100644 --- a/DB/src/test/java/io/deephaven/db/tables/live/TestLiveTableMonitorLock.java +++ b/DB/src/test/java/io/deephaven/db/tables/live/TestLiveTableMonitorLock.java @@ -19,16 +19,14 @@ public void testUpgradeFailures() throws InterruptedException { lock.sharedLock().doLocked(() -> { try { - lock.exclusiveLock() - .doLocked(() -> TestCase.fail("Unexpectedly upgraded successfully")); + lock.exclusiveLock().doLocked(() -> TestCase.fail("Unexpectedly upgraded successfully")); } catch (UnsupportedOperationException expected) { } }); lock.sharedLock().doLockedInterruptibly(() -> { try { - lock.exclusiveLock().doLockedInterruptibly( - () -> TestCase.fail("Unexpectedly upgraded successfully")); + lock.exclusiveLock().doLockedInterruptibly(() -> TestCase.fail("Unexpectedly upgraded successfully")); } catch (UnsupportedOperationException expected) { } }); @@ -114,8 +112,8 @@ public void testSharedLockHeld() { }; final MutableBoolean success = new MutableBoolean(false); TestCase.assertFalse(lock.sharedLock().isHeldByCurrentThread()); - lock.sharedLock().doLocked(() -> checkHeld.accept(() -> checkHeld.accept(() -> checkHeld - .accept(() -> checkHeld.accept(() -> checkHeld.accept(success::setTrue)))))); + lock.sharedLock().doLocked(() -> checkHeld.accept(() -> checkHeld + .accept(() -> checkHeld.accept(() -> checkHeld.accept(() -> checkHeld.accept(success::setTrue)))))); TestCase.assertFalse(lock.sharedLock().isHeldByCurrentThread()); TestCase.assertTrue(success.getValue()); } @@ -129,8 +127,8 @@ public void testExclusiveLockHeld() { }; final MutableBoolean success = new MutableBoolean(false); TestCase.assertFalse(lock.exclusiveLock().isHeldByCurrentThread()); - lock.exclusiveLock().doLocked(() -> checkHeld.accept(() -> checkHeld.accept(() -> checkHeld - .accept(() -> checkHeld.accept(() -> checkHeld.accept(success::setTrue)))))); + lock.exclusiveLock().doLocked(() -> checkHeld.accept(() -> checkHeld + .accept(() -> checkHeld.accept(() -> checkHeld.accept(() -> checkHeld.accept(success::setTrue)))))); TestCase.assertFalse(lock.exclusiveLock().isHeldByCurrentThread()); } diff --git a/DB/src/test/java/io/deephaven/db/tables/select/SelectFilterFactoryTest.java b/DB/src/test/java/io/deephaven/db/tables/select/SelectFilterFactoryTest.java index 375c4bae2af..45646c992ce 100644 --- a/DB/src/test/java/io/deephaven/db/tables/select/SelectFilterFactoryTest.java +++ b/DB/src/test/java/io/deephaven/db/tables/select/SelectFilterFactoryTest.java @@ -25,8 +25,7 @@ public class SelectFilterFactoryTest extends TestCase { public void testIn() { - assertEquals(MatchFilter.class, - SelectFilterFactory.getExpression("Opra in oprasOfInterest").getClass()); + assertEquals(MatchFilter.class, SelectFilterFactory.getExpression("Opra in oprasOfInterest").getClass()); } public void testSourceColumn() { @@ -36,20 +35,17 @@ public void testSourceColumn() { public void testInComplex() { - assertEquals(MatchFilter.class, - SelectFilterFactory.getExpression("Opra in opra1, opra2, opra3").getClass()); + assertEquals(MatchFilter.class, SelectFilterFactory.getExpression("Opra in opra1, opra2, opra3").getClass()); QueryScope.addParam("pmExpiry", "World"); - assertEquals(MatchFilter.class, - SelectFilterFactory.getExpression("amExpiry = pmExpiry").getClass()); + assertEquals(MatchFilter.class, SelectFilterFactory.getExpression("amExpiry = pmExpiry").getClass()); QueryScope.addParam("amExpiry", "Hello"); QueryScope.addParam("pmExpiry", "World"); - SelectFilter filter = - SelectFilterFactory.getExpression("Maturity in amExpiry,pmExpiry , \"AGAIN\" "); + SelectFilter filter = SelectFilterFactory.getExpression("Maturity in amExpiry,pmExpiry , \"AGAIN\" "); assertEquals(MatchFilter.class, filter.getClass()); - TableDefinition tableDef = new TableDefinition( - Collections.singletonList((Class) String.class), Collections.singletonList("Maturity")); + TableDefinition tableDef = new TableDefinition(Collections.singletonList((Class) String.class), + Collections.singletonList("Maturity")); filter.init(tableDef); Object[] values = ((MatchFilter) filter).getValues(); @@ -61,24 +57,23 @@ public void testInComplex() { filter = SelectFilterFactory.getExpression("Maturity in amExpiry,1 , \"AGAIN\" "); assertEquals(MatchFilter.class, filter.getClass()); tableDef = new TableDefinition(Collections.singletonList((Class) String.class), - Collections.singletonList("Maturity")); + Collections.singletonList("Maturity")); try { filter.init(tableDef); } catch (IllegalArgumentException e) { assertEquals(e.getMessage(), - "Failed to convert literal value <1> for column \"Maturity\" of type java.lang.String"); + "Failed to convert literal value <1> for column \"Maturity\" of type java.lang.String"); } } public void testIcase() { - Table t = TableTools.newTable(TableTools.col("Opra", "opra1", "opra2", "opra3", "Opra1", - "Opra2", "Opra3", "Opra4", null)); + Table t = TableTools + .newTable(TableTools.col("Opra", "opra1", "opra2", "opra3", "Opra1", "Opra2", "Opra3", "Opra4", null)); - SelectFilter f = - SelectFilterFactory.getExpression("Opra in `opra1`, `opra2`, `opra3`,`opra4`"); + SelectFilter f = SelectFilterFactory.getExpression("Opra in `opra1`, `opra2`, `opra3`,`opra4`"); f.init(t.getDefinition()); assertEquals(MatchFilter.class, f.getClass()); Index idx = f.filter(t.getIndex().clone(), t.getIndex(), t, false); @@ -121,10 +116,8 @@ public void testIcase() { assertEquals(4, idx.size()); t = TstUtils.testRefreshingTable( - TstUtils.c("Opra", "opra1", "opra2", "opra3", "Opra1", "Opra2", "Opra3", "Opra4", null, - "OpRa5"), - TstUtils.cG("Food", "Apple", "Orange", "bacon", "laffa", "pOtato", "carroT", "WafflE", - null, "Apple")); + TstUtils.c("Opra", "opra1", "opra2", "opra3", "Opra1", "Opra2", "Opra3", "Opra4", null, "OpRa5"), + TstUtils.cG("Food", "Apple", "Orange", "bacon", "laffa", "pOtato", "carroT", "WafflE", null, "Apple")); f = SelectFilterFactory.getExpression("Food icase in `apple`, `orange`, `bacon`,`LAFFA`"); f.init(t.getDefinition()); @@ -144,8 +137,7 @@ public void testIcase() { idx = f.filter(t.getIndex().clone(), t.getIndex(), t, false); assertEquals(3, idx.size()); - f = SelectFilterFactory - .getExpression("Food icase not in `apple`, `orange`, `bacon`,`LAFFA`"); + f = SelectFilterFactory.getExpression("Food icase not in `apple`, `orange`, `bacon`,`LAFFA`"); f.init(t.getDefinition()); assertEquals(MatchFilter.class, f.getClass()); idx = f.filter(t.getIndex().clone(), t.getIndex(), t, false); @@ -154,7 +146,7 @@ public void testIcase() { public void testBigIn() { final SelectFilter f = SelectFilterFactory.getExpression( - "USym in `A`,`AA`,`AABA`,`AAL`,`AAL1`,`AAN`,`AAOI`,`AAP`,`AAPL`,`AAT`,`AAWW`,`AAXN`,`AB`,`ABB`,`ABBV`,`ABC`,`ABEO`,`ABEV`,`ABG`,`ABMD`,`ABT`,`ABT1`,`ABX`,`ACAD`,`ACC`,`ACGL`,`ACH`,`ACHC`,`ACIA`,`ACIW`,`ACLS`,`ACM`,`ACN`,`ACOR`,`ACWI`,`ADBE`,`ADI`,`ADM`,`ADMP`,`ADNT`,`ADP`,`ADS`,`ADSK`,`ADTN`,`AEE`,`AEIS`,`AEM`,`AEO`,`AEP`,`AER`,`AERI`,`AES`,`AET`,`AFG`,`AFL`,`AFSI`,`AG`,`AGCO`,`AGIO`,`AGN`,`AGNC`,`AGO`,`AGQ`,`AGX`,`AHL`,`AHT`,`AI`,`AIG`,`AIMC`,`AIMT`,`AINV`,`AIR`,`AIV`,`AIZ`,`AJG`,`AJRD`,`AKAM`,`AKAO`,`AKCA`,`AKR`,`AKRX`,`AKS`,`AL`,`ALB`,`ALE`,`ALGN`,`ALGT`,`ALK`,`ALKS`,`ALL`,`ALLE`,`ALLT`,`ALLY`,`ALNY`,`ALRM`,`ALSN`,`ALV`,`ALV1`,`ALXN`,`AM`,`AMAG`,`AMAT`,`AMBA`,`AMBC`,`AMC`,`AMCX`,`AMD`,`AME`,`AMED`,`AMG`,`AMGN`,`AMJ`,`AMKR`,`AMLP`,`AMN`,`AMP`,`AMRN`,`AMRX`,`AMT`,`AMTD`,`AMWD`,`AMX`,`AMZN`,`AN`,`ANAB`,`ANDE`,`ANDX`,`ANET`,`ANF`,`ANGI`,`ANIK`,`ANSS`,`ANTM`,`AOBC`,`AON`,`AOS`,`AOSL`,`APA`,`APAM`,`APC`,`APD`,`APH`,`APO`,`APOG`,`APPN`,`APRN`,`APTI`,`APTV`,`APTV1`,`APU`,`AR`,`ARAY`,`ARCB`,`ARCC`,`ARCH`,`ARCO`,`ARE`,`ARII`,`ARLP`,`ARLP1`,`ARMK`,`ARNC`,`ARNC1`,`ARNC2`,`AROC`,`AROC1`,`ARRS`,`ARW`,`ARWR`,`ASB`,`ASGN`,`ASH`,`ASHR`,`ASML`,`ASNA`,`ASPS`,`ASRT`,`ASTE`,`ATEN`,`ATGE`,`ATH`,`ATHM`,`ATHN`,`ATI`,`ATO`,`ATR`,`ATSG`,`ATU`,`ATUS`,`ATVI`,`AU`,`AUY`,`AVA`,`AVAV`,`AVB`,`AVD`,`AVEO`,`AVGO`,`AVLR`,`AVNS`,`AVP`,`AVT`,`AVX`,`AVY`,`AVYA`,`AWI`,`AWK`,`AX`,`AXE`,`AXL`,`AXP`,`AXS`,`AXTA`,`AY`,`AYI`,`AYR`,`AYX`,`AZN`,`AZO`,`AZPN`,`AZUL`,`AZZ`,`BA`,`BABA`,`BAC`,`BAH`,`BAM`,`BANC`,`BAP`,`BAX`,`BB`,`BBBY`,`BBD`,`BBD1`,`BBL`,`BBT`,`BBW`,`BBY`,`BC`,`BCC`,`BCE`,`BCEI`,`BCO`,`BCOR`,`BCOV`,`BCS`,`BDC`,`BDN`,`BDX`,`BEAT`,`BECN`,`BEL`,`BEN`,`BERY`,`BG`,`BGCP`,`BGFV`,`BGG`,`BGNE`,`BGS`,`BHC`,`BHE`,`BHF`,`BHGE`,`BHGE1`,`BHP`,`BHR`,`BIB`,`BID`,`BIDU`,`BIG`,`BIIB`,`BIIB1`,`BILI`,`BITA`,`BJRI`,`BK`,`BKD`,`BKE`,`BKH`,`BKI`,`BKNG`,`BKS`,`BKU`,`BL`,`BLCM`,`BLD`,`BLDR`,`BLK`,`BLKB`,`BLL`,`BLMN`,`BLUE`,`BMA`,`BMI`,`BMO`,`BMRN`,`BMS`,`BMY`,`BNFT`,`BNS`,`BOH`,`BOJA`,`BOKF`,`BOKF1`,`BOOT`,`BOX`,`BP`,`BPI`,`BPL`,`BPMC`,`BPOP`,`BPT`,`BPY`,`BPY1`,`BR`,`BREW`,`BRFS`,`BRKB`,`BRKL`,`BRKR`,`BRKS`,`BRS`,`BRX`,`BSX`,`BTI`,`BTI1`,`BUD`,`BURL`,`BVN`,`BWA`,`BWXT`,`BX`,`BXP`,`BXS`,`BYD`,`BZH`,`BZUN`,`C`,`CA`,`CACC`,`CACI`,`CAG`,`CAH`,`CAI`,`CAKE`,`CAL`,`CALL`,`CALM`,`CALX`,`CAMP`,`CAR`,`CARA`,`CARB`,`CARG`,`CARS`,`CASA`,`CASY`,`CAT`,`CATM`,`CATO`,`CATY`,`CB`,`CBIO`,`CBL`,`CBOE`,`CBPO`,`CBRE`,`CBRL`,`CBS`,`CC`,`CCE`,`CCI`,`CCJ`,`CCK`,`CCL`,`CCMP`,`CCOI`,`CDAY`,`CDE`,`CDEV`,`CDK`,`CDNS`,`CDW`,`CE`,`CECO`,`CELG`,`CENT`,`CENX`,`CEO`,`CEQP`,`CERN`,`CERS`,`CEVA`,`CF`,`CFG`,`CFR`,`CFX`,`CG`,`CGNX`,`CHD`,`CHDN`,`CHE`,`CHEF`,`CHFC`,`CHGG`,`CHH`,`CHK`,`CHKP`,`CHL`,`CHRW`,`CHS`,`CHSP`,`CHTR`,`CHU`,`CHUY`,`CI`,`CIEN`,`CIM`,`CINF`,`CISN`,`CIT`,`CL`,`CLB`,`CLDR`,`CLF`,`CLFD`,`CLGX`,`CLH`,`CLI`,`CLMT`,`CLNE`,`CLNY`,`CLR`,`CLS`,`CLVS`,`CLX`,`CM`,`CMA`,`CMC`,`CMCM`,`CMCSA`,`CME`,`CMG`,`CMI`,`CMO`,`CMP`,`CMPR`,`CMS`,`CMTL`,`CNC`,`CNDT`,`CNHI`,`CNI`,`CNK`,`CNO`,`CNP`,`CNQ`,`CNX`,`CNX1`,`COF`,`COG`,`COHR`,`COHU`,`COHU1`,`COL`,`COLM`,`COMM`,`CONE`,`CONN`,`COOP1`,`COP`,`COR`,`CORE`,`CORT`,`COST`,`COT`,`COTY`,`COUP`,`CP`,`CPA`,`CPB`,`CPE`,`CPLG1`,`CPRT`,`CPT`,`CQP`,`CR`,`CRAY`,`CRC`,`CRCM`,`CREE`,`CRI`,`CRL`,`CRM`,`CRM1`,`CROX`,`CRR`,`CRS`,`CRSP`,`CRTO`,`CRUS`,`CRY`,`CRZO`,`CS`,`CSCO`,`CSFL`,`CSFL1`,`CSGS`,`CSII`,`CSIQ`,`CSOD`,`CSTE`,`CSTM`,`CSV`,`CSX`,`CTAS`,`CTB`,`CTL`,`CTL1`,`CTRL`,`CTRN`,`CTRP`,`CTSH`,`CTXS`,`CTXS1`,`CUBE`,`CVA`,`CVE`,`CVG`,`CVGW`,`CVI`,`CVIA1`,`CVLT`,`CVNA`,`CVRR`,`CVS`,`CVX`,`CWEN`,`CWH`,`CX`,`CX2`,`CXO`,`CXO1`,`CXP`,`CXW`,`CY`,`CYBR`,`CYD`,`CYH`,`CYOU`,`CZR`,`CZZ`,`D`,`DAL`,`DAN`,`DAR`,`DATA`,`DB`,`DBA`,`DBC`,`DBD`,`DBX`,`DCI`,`DCP`,`DDD`,`DDM`,`DDR1`,`DDR2`,`DDS`,`DE`,`DECK`,`DEI`,`DENN`,`DEO`,`DERM`,`DF`,`DFRG`,`DFS`,`DG`,`DGX`,`DHI`,`DHR`,`DHT`,`DIA`,`DIG`,`DIN`,`DIOD`,`DIS`,`DISCA`,`DISCK`,`DISH`,`DK`,`DKL`,`DKS`,`DLB`,`DLPH`,`DLR`,`DLTH`,`DLTR`,`DLX`,`DMRC`,`DNB`,`DNKN`,`DNOW`,`DNR`,`DO`,`DOCU`,`DOMO`,`DOOR`,`DORM`,`DOV`,`DOV1`,`DOX`,`DPLO`,`DPZ`,`DQ`,`DRE`,`DRH`,`DRI`,`DRQ`,`DSW`,`DSX`,`DTE`,`DUG`,`DUK`,`DUST`,`DVA`,`DVAX`,`DVMT`,`DVN`,`DVY`,`DWDP`,`DWDP1`,`DXC`,`DXC1`,`DXCM`,`DXD`,`DXD1`,`DXJ`,`DXPE`,`DY`,`E`,`EA`,`EAT`,`EBAY`,`EBIX`,`EBS`,`EBSB`,`ECA`,`ECHO`,`ECL`,`ECOL`,`ECOM`,`ECPG`,`ECYT`,`ED`,`EDC`,`EDIT`,`EDR`,`EDU`,`EDZ`,`EDZ1`,`EE`,`EEB`,`EEFT`,`EEM`,`EEP`,`EEV`,`EEV1`,`EFA`,`EFC`,`EFII`,`EFX`,`EGBN`,`EGHT`,`EGL`,`EGN`,`EGO`,`EGOV`,`EGP`,`EGRX`,`EHC`,`EIGI`,`EIX`,`EL`,`ELF`,`ELLI`,`ELY`,`EME`,`EMES`,`EMN`,`EMR`,`ENB`,`ENB1`,`ENDP`,`ENLC`,`ENLK`,`ENR`,`ENS`,`ENTG`,`ENV`,`ENVA`,`EOG`,`EPAM`,`EPAY`,`EPC`,`EPD`,`EPI`,`EPR`,`EQC`,`EQIX`,`EQM`,`EQM1`,`EQNR`,`EQR`,`EQT`,`EQT1`,`ERF`,`ERI`,`ERIC`,`ERJ`,`EROS`,`ERX`,`ERY`,`ERY1`,`ES`,`ESIO`,`ESL`,`ESND`,`ESNT`,`ESPR`,`ESRT`,`ESRX`,`ESS`,`ESV`,`ET`,`ET1`,`ET2`,`ETFC`,`ETH`,`ETM`,`ETN`,`ETP`,`ETP1`,`ETR`,`ETSY`,`EUO`,`EV`,`EVH`,`EVR`,`EVRG`,`EVRG1`,`EVTC`,`EW`,`EWA`,`EWBC`,`EWC`,`EWD`,`EWG`,`EWH`,`EWI`,`EWJ`,`EWM`,`EWQ`,`EWS`,`EWT`,`EWU`,`EWU1`,`EWW`,`EWY`,`EWZ`,`EXAS`,`EXC`,`EXEL`,`EXP`,`EXPD`,`EXPE`,`EXPR`,`EXR`,`EXTN`,`EXTR`,`EYE`,`EZA`,`EZPW`,`EZU`,`F`,`FANG`,`FARO`,`FAS`,`FAST`,`FAZ`,`FB`,`FBC`,`FBHS`,`FC`,`FCAU`,`FCAU3`,`FCEA`,`FCFS`,`FCN`,`FCX`,`FDC`,`FDP`,`FDS`,`FDX`,`FE`,`FELE`,`FET`,`FEYE`,`FEZ`,`FFBC`,`FFIV`,`FGEN`,`FGP`,`FHN`,`FI`,`FICO`,`FII`,`FIS`,`FISV`,`FIT`,`FITB`,`FIVE`,`FIVN`,`FIX`,`FIZZ`,`FL`,`FLDM`,`FLEX`,`FLIR`,`FLO`,`FLOW`,`FLR`,`FLS`,`FLT`,`FLXN`,`FLY`,`FMC`,`FMX`,`FN`,`FND`,`FNF`,`FNKO`,`FNSR`,`FNV`,`FOE`,`FOLD`,`FOMX`,`FOR`,`FORM`,`FOSL`,`FOX`,`FOXA`,`FPRX`,`FR`,`FRAN`,`FRC`,`FRED`,`FRFHF`,`FRGI`,`FRO`,`FRPT`,`FRSH`,`FRT`,`FSCT`,`FSLR`,`FSS`,`FTI`,`FTK`,`FTNT`,`FTR`,`FTR1`,`FTV`,`FUL`,`FULT`,`FUN`,`FWONA`,`FWRD`,`FXA`,`FXB`,`FXC`,`FXE`,`FXI`,`FXP`,`FXP1`,`FXY`,`G`,`GATX`,`GBT`,`GBX`,`GCAP`,`GCI`,`GCO`,`GD`,`GDDY`,`GDOT`,`GDS`,`GDX`,`GDXJ`,`GE`,`GEF`,`GEL`,`GEO`,`GEO1`,`GERN`,`GES`,`GG`,`GGAL`,`GGB`,`GGG`,`GHL`,`GIII`,`GIL`,`GILD`,`GILT`,`GIS`,`GLD`,`GLIB1`,`GLIBA`,`GLL`,`GLNG`,`GLOB`,`GLOG`,`GLPI`,`GLUU`,`GLW`,`GM`,`GME`,`GMED`,`GMLP`,`GNC`,`GNRC`,`GNTX`,`GNW`,`GOGL`,`GOGO`,`GOLD`,`GOOG`,`GOOGL`,`GOOS`,`GOV`,`GPC`,`GPI`,`GPK`,`GPN`,`GPOR`,`GPRE`,`GPRO`,`GPS`,`GRA`,`GREK`,`GRMN`,`GRPN`,`GRUB`,`GS`,`GSK`,`GSKY`,`GSM`,`GSVC`,`GT`,`GTLS`,`GTN`,`GTS`,`GTT`,`GTXI`,`GVA`,`GVA1`,`GWB`,`GWPH`,`GWR`,`GWRE`,`GWW`,`H`,`HA`,`HABT`,`HACK`,`HAIN`,`HAL`,`HALO`,`HAS`,`HBAN`,`HBI`,`HCA`,`HCI`,`HCLP`,`HCP`,`HCP1`,`HD`,`HDB`,`HDP`,`HDS`,`HE`,`HEAR`,`HEES`,`HELE`,`HEP`,`HES`,`HFC`,`HGV`,`HHC`,`HI`,`HIBB`,`HIFR`,`HIG`,`HII`,`HIIQ`,`HIMX`,`HIW`,`HLF`,`HLI`,`HLT`,`HLT1`,`HLX`,`HMC`,`HMHC`,`HMNY1`,`HMSY`,`HOG`,`HOLI`,`HOLX`,`HOMB`,`HOME`,`HON`,`HON1`,`HOPE`,`HOS`,`HOV`,`HP`,`HPE`,`HPE1`,`HPE2`,`HPP`,`HPQ`,`HPR`,`HPT`,`HQY`,`HR`,`HRB`,`HRI`,`HRL`,`HRS`,`HSBC`,`HSC`,`HSIC`,`HST`,`HSY`,`HT`,`HTA`,`HTH`,`HTHT`,`HTZ`,`HUBG`,`HUBS`,`HUM`,`HUN`,`HUYA`,`HXL`,`HYG`,`HZNP`,`HZO`,`I`,`IAC`,`IAU`,`IBB`,`IBKR`,`IBM`,`IBN`,`IBN1`,`ICE`,`ICHR`,`ICLR`,`ICON`,`ICPT`,`ICUI`,`IDA`,`IDCC`,`IDT`,`IDTI`,`IEO`,`IEP`,`IEX`,`IFF`,`IGT`,`IIVI`,`ILF`,`ILMN`,`ILPT`,`IMAX`,`IMGN`,`IMKTA`,`IMMR`,`IMMU`,`IMO`,`IMOS`,`IMPV`,`INAP`,`INCY`,`INFN`,`INFO`,`INFY`,`INFY1`,`ING`,`INGN`,`INGR`,`INN`,`INOV`,`INSM`,`INSY`,`INT`,`INTC`,`INTU`,`INVA`,`INXN`,`IONS`,`IP`,`IPG`,`IPGP`,`IPHI`,`IPHS`,`IQ`,`IQV`,`IR`,`IRBT`,`IRDM`,`IRM`,`ISRG`,`IT`,`ITB`,`ITCI`,`ITG`,`ITRI`,`ITT`,`ITUB`,`ITW`,`IVAC`,`IVR`,`IVV`,`IVZ`,`IWB`,`IWD`,`IWM`,`IWN`,`IWO`,`IYE`,`IYF`,`IYM`,`IYR`,`IYT`,`JACK`,`JAZZ`,`JBHT`,`JBL`,`JBLU`,`JBSS`,`JBT`,`JCI`,`JCI3`,`JCOM`,`JCP`,`JD`,`JEC`,`JEF`,`JJOFF`,`JKHY`,`JKS`,`JLL`,`JMEI`,`JNJ`,`JNK`,`JNPR`,`JNUG`,`JOE`,`JPM`,`JWN`,`K`,`KALU`,`KANG`,`KAR`,`KB`,`KBE`,`KBH`,`KBR`,`KDP`,`KDP1`,`KEM`,`KEX`,`KEY`,`KEYS`,`KEYW`,`KFRC`,`KGC`,`KHC`,`KIE`,`KIM`,`KIRK`,`KKR`,`KL`,`KLAC`,`KLIC`,`KLXI1`,`KMB`,`KMI`,`KMT`,`KMX`,`KN`,`KNDI`,`KNL`,`KNX`,`KNX1`,`KO`,`KODK`,`KOL`,`KOP`,`KORS`,`KR`,`KRA`,`KRC`,`KRE`,`KRO`,`KS`,`KSS`,`KSU`,`KW`,`KWR`,`L`,`LABL`,`LABU`,`LAD`,`LADR`,`LAMR`,`LAZ`,`LB`,`LBRDK`,`LBTYA`,`LBTYK`,`LC`,`LCII`,`LDOS`,`LE`,`LEA`,`LECO`,`LEG`,`LEN`,`LEN1`,`LFC`,`LFIN`,`LFUS`,`LGFA`,`LGFA1`,`LGIH`,`LGND`,`LH`,`LHCG`,`LHCG1`,`LHO`,`LII`,`LITE`,`LIVN`,`LKQ`,`LL`,`LLL`,`LLY`,`LM`,`LMNR`,`LMNX`,`LMT`,`LN`,`LNC`,`LNG`,`LNG1`,`LNN`,`LNT`,`LOCO`,`LOGI`,`LOGM`,`LOPE`,`LORL`,`LOW`,`LOXO`,`LPI`,`LPL`,`LPLA`,`LPNT`,`LPSN`,`LPT`,`LPX`,`LQD`,`LQDT`,`LRCX`,`LRN`,`LSCC`,`LSI`,`LSTR`,`LSXMA`,`LTC`,`LULU`,`LUV`,`LVS`,`LW`,`LXP`,`LYB`,`LYG`,`LYV`,`LZB`,`M`,`MA`,`MAA`,`MAC`,`MAIN`,`MAN`,`MANH`,`MANT`,`MANU`,`MAR`,`MAS`,`MAT`,`MATX`,`MB`,`MBFI`,`MBI`,`MBT`,`MBUU`,`MCD`,`MCF`,`MCHP`,`MCK`,`MCO`,`MCRI`,`MCS`,`MCY`,`MD`,`MDB`,`MDC`,`MDCO`,`MDGL`,`MDLZ`,`MDP`,`MDR`,`MDR1`,`MDR2`,`MDRX`,`MDSO`,`MDT`,`MDU`,`MDXG`,`MDY`,`MED`,`MEI`,`MELI`,`MEOH`,`MET`,`MET1`,`MFA`,`MFC`,`MFGP`,`MGA`,`MGI`,`MGLN`,`MGM`,`MGPI`,`MHK`,`MHLD`,`MHO`,`MIC`,`MIDD`,`MIK`,`MIME`,`MINI`,`MITL`,`MKC`,`MKSI`,`MKTX`,`MLCO`,`MLHR`,`MLM`,`MLNT`,`MLNX`,`MMC`,`MMLP`,`MMM`,`MMP`,`MMS`,`MMSI`,`MMYT`,`MNK`,`MNRO`,`MNST`,`MNTA`,`MO`,`MODN`,`MOH`,`MOMO`,`MOO`,`MOS`,`MOV`,`MPC`,`MPC1`,`MPC2`,`MPLX`,`MPW`,`MPWR`,`MRC`,`MRCY`,`MRK`,`MRO`,`MRTX`,`MRVL`,`MRVL1`,`MS`,`MSCI`,`MSFT`,`MSGN`,`MSI`,`MSM`,`MSTR`,`MT`,`MT1`,`MTB`,`MTCH`,`MTD`,`MTDR`,`MTG`,`MTH`,`MTN`,`MTOR`,`MTSI`,`MTW`,`MTZ`,`MU`,`MUR`,`MUSA`,`MX`,`MXIM`,`MXL`,`MXWL`,`MYGN`,`MYL`,`MZOR`,`NANO`,`NAT`,`NATI`,`NAV`,`NAVG`,`NAVI`,`NBIX`,`NBL`,`NBR`,`NCLH`,`NCMI`,`NCR`,`NCS`,`NDAQ`,`NDLS`,`NDSN`,`NDX`,`NE`,`NEE`,`NEM`,`NEU`,`NEWM`,`NEWR`,`NFBK`,`NFG`,`NFLX`,`NFX`,`NGD`,`NGG`,`NGL`,`NGVC`,`NHI`,`NHTC`,`NI`,`NJR`,`NKE`,`NKTR`,`NLS`,`NLSN`,`NLY`,`NLY1`,`NNN`,`NOC`,`NOK`,`NOV`,`NOVT`,`NOW`,`NPO`,`NPTN`,`NRG`,`NRP`,`NRZ`,`NS`,`NS1`,`NSC`,`NSIT`,`NSP`,`NTAP`,`NTCT`,`NTES`,`NTGR`,`NTLA`,`NTNX`,`NTR`,`NTR1`,`NTR2`,`NTRI`,`NTRS`,`NUAN`,`NUE`,`NUGT`,`NUS`,`NVCR`,`NVDA`,`NVGS`,`NVMI`,`NVO`,`NVRO`,`NVS`,`NVT`,`NWE`,`NWL`,`NWN`,`NWPX`,`NWS`,`NWSA`,`NXGN`,`NXPI`,`NXST`,`NXTM`,`NYCB`,`NYRT`,`NYRT3`,`NYT`,`O`,`OAK`,`OAS`,`OC`,`OCLR`,`OCN`,`ODFL`,`ODP`,`OEC`,`OEF`,`OFC`,`OFG`,`OGE`,`OGS`,`OHI`,`OI`,`OIH`,`OII`,`OILNF`,`OIS`,`OKE`,`OKTA`,`OLED`,`OLLI`,`OLN`,`OMC`,`OMCL`,`OMER`,`OMF`,`OMI`,`ON`,`ONCE`,`ONDK`,`OPK`,`ORA`,`ORAN`,`ORBK`,`ORCL`,`ORI`,`ORIG`,`ORLY`,`OSIS`,`OSK`,`OSPN`,`OSTK`,`OTEX`,`OUT`,`OXM`,`OXY`,`OZK`,`OZM`,`P`,`PAA`,`PAAS`,`PACW`,`PAG`,`PAGP`,`PAGP1`,`PAGS`,`PAH`,`PANW`,`PAYC`,`PAYX`,`PB`,`PBCT`,`PBCT1`,`PBF`,`PBFX`,`PBI`,`PBPB`,`PBR`,`PBYI`,`PCAR`,`PCG`,`PCH`,`PCH2`,`PCRX`,`PCTY`,`PCYG`,`PDCE`,`PDCO`,`PDD`,`PDFS`,`PDM`,`PE`,`PEB`,`PEG`,`PEGA`,`PEGI`,`PEI`,`PENN`,`PENN1`,`PEP`,`PERY`,`PETS`,`PF`,`PFE`,`PFG`,`PFPT`,`PG`,`PGJ`,`PGNX`,`PGR`,`PGRE`,`PGTI`,`PH`,`PHM`,`PI`,`PICO`,`PII`,`PINC`,`PIR`,`PKG`,`PKI`,`PKX`,`PLAB`,`PLAY`,`PLCE`,`PLD`,`PLD1`,`PLNT`,`PLT`,`PLXS`,`PM`,`PMT`,`PNC`,`PNFP`,`PNK`,`PNM`,`PNR`,`PNR1`,`PNW`,`PODD`,`POL`,`POOL`,`POR`,`POST`,`POWI`,`PPC`,`PPG`,`PPH`,`PPL`,`PRAA`,`PRAH`,`PRFT`,`PRGO`,`PRGS`,`PRI`,`PRLB`,`PRMW`,`PRO`,`PRSC`,`PRTA`,`PRTK`,`PRTY`,`PRU`,`PSA`,`PSB`,`PSEC`,`PSMT`,`PSTG`,`PSX`,`PSXP`,`PTC`,`PTEN`,`PTLA`,`PTR`,`PTR1`,`PTR2`,`PUK`,`PVG`,`PVH`,`PVTL`,`PWR`,`PX`,`PXD`,`PYPL`,`PZZA`,`QADA`,`QCOM`,`QD`,`QDEL`,`QEP`,`QID`,`QID1`,`QIWI`,`QLD`,`QLYS`,`QNST`,`QQQ`,`QRTEA`,`QRVO`,`QSR`,`QTWO`,`QUAD`,`QUOT`,`QURE`,`R`,`RACE`,`RAD`,`RAIL`,`RAMP`,`RAVN`,`RBA`,`RBBN`,`RBC`,`RBS`,`RCI`,`RCII`,`RCL`,`RDC`,`RDFN`,`RDN`,`RDSA`,`RDSB`,`RDUS`,`RDWR`,`RE`,`REG`,`REGI`,`REGN`,`REN`,`REPH`,`RES`,`RETA`,`REV`,`RF`,`RGA`,`RGEN`,`RGLD`,`RGNX`,`RGR`,`RH`,`RHI`,`RHT`,`RIG`,`RIO`,`RIOT`,`RJF`,`RL`,`RLGY`,`RLI`,`RLJ`,`RMBS`,`RMD`,`RMTI`,`RNG`,`RNR`,`ROCK`,`ROG`,`ROIC`,`ROK`,`ROKU`,`ROP`,`ROST`,`RP`,`RPAI`,`RPD`,`RPM`,`RRC`,`RRD`,`RRGB`,`RRR`,`RS`,`RSG`,`RST`,`RSX`,`RTEC`,`RTN`,`RTRX`,`RUBI`,`RUN`,`RUSHA`,`RUSL`,`RUTH`,`RWR`,`RXN`,`RY`,`RYAAY`,`RYAM`,`RYN`,`S`,`SA`,`SABR`,`SAFM`,`SAGE`,`SAH`,`SAIA`,`SAIC`,`SAIL`,`SAM`,`SAN`,`SAN1`,`SANM`,`SAP`,`SASR`,`SATS`,`SAVE`,`SBAC`,`SBGI`,`SBGL`,`SBGL2`,`SBH`,`SBNY`,`SBRA`,`SBUX`,`SC`,`SCCO`,`SCG`,`SCHD`,`SCHL`,`SCHN`,`SCHW`,`SCI`,`SCL`,`SCO`,`SCS`,`SCSC`,`SD`,`SDRL1`,`SDS`,`SDS1`,`SE`,`SEAS`,`SEDG`,`SEE`,`SEIC`,`SEM`,`SEMG`,`SEND`,`SEP`,`SERV`,`SERV1`,`SF`,`SFIX`,`SFL`,`SFLY`,`SFM`,`SFUN`,`SGEN`,`SGH`,`SGMO`,`SGMS`,`SH`,`SHAK`,`SHEN`,`SHLD`,`SHLX`,`SHO`,`SHOO`,`SHOO1`,`SHOP`,`SHPG`,`SHW`,`SIG`,`SIGM`,`SIGM1`,`SIL`,`SIMO`,`SINA`,`SINA2`,`SIR`,`SIRI`,`SITC`,`SITC1`,`SITC2`,`SIVB`,`SIX`,`SJI`,`SJM`,`SKF`,`SKT`,`SKX`,`SKYW`,`SLAB`,`SLB`,`SLCA`,`SLG`,`SLGN`,`SLM`,`SLV`,`SLX`,`SM`,`SMCI`,`SMG`,`SMH`,`SMLP`,`SMN`,`SMN1`,`SMTC`,`SN`,`SNA`,`SNAP`,`SNBR`,`SNCR`,`SNE`,`SNH`,`SNHY`,`SNN`,`SNP`,`SNPS`,`SNV`,`SNX`,`SNX1`,`SNY`,`SO`,`SODA`,`SOGO`,`SOHU`,`SON`,`SONC`,`SONO`,`SPA`,`SPB`,`SPB1`,`SPG`,`SPGI`,`SPH`,`SPLK`,`SPN`,`SPOT`,`SPPI`,`SPR`,`SPTN`,`SPWR`,`SPX`,`SPXC`,`SPXL`,`SPXS`,`SPXU`,`SPXU1`,`SPY`,`SQ`,`SQM`,`SQM1`,`SQQQ`,`SQQQ1`,`SRC`,`SRC1`,`SRCI`,`SRCL`,`SRE`,`SRG`,`SRI`,`SRPT`,`SRS`,`SSB`,`SSC`,`SSD`,`SSL`,`SSNC`,`SSO`,`SSP`,`SSRM`,`SSTK`,`SSW`,`SSYS`,`ST`,`STAY`,`STI`,`STKL`,`STLD`,`STM`,`STMP`,`STNG`,`STON`,`STOR`,`STRA`,`STRA1`,`STT`,`STWD`,`STX`,`STZ`,`SU`,`SUI`,`SUM`,`SUN`,`SUP`,`SVU`,`SVU1`,`SVXY`,`SVXY1`,`SWCH`,`SWIR`,`SWK`,`SWKS`,`SWM`,`SWN`,`SWX`,`SXC`,`SYF`,`SYK`,`SYKE`,`SYMC`,`SYNA`,`SYNH`,`SYX`,`SYY`,`T`,`T1`,`TACO`,`TAHO`,`TAL`,`TAP`,`TBI`,`TBT`,`TCBI`,`TCF`,`TCO`,`TCP`,`TCS`,`TCX`,`TD`,`TDC`,`TDG`,`TDOC`,`TDS`,`TEAM`,`TECD`,`TECK`,`TECL`,`TEF`,`TEL`,`TEN`,`TEO`,`TER`,`TERP`,`TEVA`,`TEX`,`TFSL`,`TGE`,`TGE1`,`TGI`,`TGNA`,`TGP`,`TGT`,`TGTX`,`THC`,`THO`,`THRM`,`THS`,`TIF`,`TISI`,`TITN`,`TIVO`,`TJX`,`TK`,`TKR`,`TLRD`,`TLRY`,`TLT`,`TLYS`,`TM`,`TMF`,`TMHC`,`TMHC1`,`TMK`,`TMO`,`TMST`,`TMUS`,`TNA`,`TNC`,`TNDM`,`TNET`,`TOL`,`TOO`,`TOT`,`TOWN`,`TPC`,`TPH`,`TPR`,`TPX`,`TQQQ`,`TRCO`,`TREE`,`TREX`,`TRGP`,`TRI`,`TRIP`,`TRMB`,`TRMK`,`TRN`,`TROW`,`TROX`,`TRP`,`TRQ`,`TRS`,`TRTN`,`TRU`,`TRUE`,`TRV`,`TRVG`,`TRVN`,`TS`,`TSCO`,`TSE`,`TSEM`,`TSG`,`TSLA`,`TSLA1`,`TSM`,`TSN`,`TSRO`,`TSS`,`TTC`,`TTD`,`TTGT`,`TTM`,`TTMI`,`TTS`,`TTWO`,`TU`,`TUP`,`TUR`,`TUSK`,`TV`,`TVPT`,`TVTY`,`TWI`,`TWLO`,`TWM`,`TWNK`,`TWO`,`TWO1`,`TWOU`,`TWTR`,`TX`,`TXMD`,`TXN`,`TXRH`,`TXT`,`TYL`,`TYPE`,`TZOO`,`UA`,`UAA`,`UAL`,`UBNT`,`UCO`,`UCO2`,`UCTT`,`UDR`,`UEPS`,`UFS`,`UGI`,`UHS`,`UIHC`,`UIS`,`UL`,`ULTA`,`ULTI`,`UN`,`UNFI`,`UNG`,`UNG1`,`UNH`,`UNIT`,`UNM`,`UNP`,`UNT`,`UNVR`,`UPLD`,`UPRO`,`UPS`,`URBN`,`URE`,`URI`,`USAC`,`USB`,`USCR`,`USFD`,`USG`,`USM`,`USO`,`USPH`,`UTHR`,`UTX`,`UUP`,`UVE`,`UVXY`,`UVXY1`,`UVXY2`,`UVXY3`,`UWM`,`UYG`,`UYM`,`V`,`VAC`,`VAC1`,`VALE`,`VAR`,`VC`,`VCRA`,`VDE`,`VECO`,`VEEV`,`VEON`,`VER`,`VFC`,`VG`,`VGK`,`VGR`,`VGR1`,`VIAB`,`VIAV`,`VICR`,`VIG`,`VIIX`,`VIPS`,`VIRT`,`VIXY`,`VIXY2`,`VJET`,`VKTX`,`VLO`,`VLP`,`VLRS`,`VLY`,`VMC`,`VMI`,`VMW`,`VNCE`,`VNDA`,`VNET`,`VNO`,`VNOM`,`VNQ`,`VOD`,`VOO`,`VOYA`,`VRA`,`VRNS`,`VRNT`,`VRSK`,`VRSN`,`VRTX`,`VSAT`,`VSH`,`VSI`,`VSLR`,`VSM`,`VST`,`VST1`,`VSTO`,`VTI`,`VTL`,`VTR`,`VVC`,`VVV`,`VWO`,`VXX`,`VXX2`,`VXXB`,`VXZ`,`VXZB`,`VYM`,`VZ`,`VZ1`,`W`,`WAB`,`WABC`,`WAGE`,`WAL`,`WAT`,`WATT`,`WB`,`WBA`,`WBAI`,`WBC`,`WBS`,`WCC`,`WCG`,`WCN`,`WDAY`,`WDC`,`WDFC`,`WDR`,`WEC`,`WELL`,`WEN`,`WERN`,`WES`,`WETF`,`WEX`,`WFC`,`WFT`,`WGO`,`WH`,`WHR`,`WIFI`,`WIN`,`WIN1`,`WING`,`WIRE`,`WIT`,`WIX`,`WK`,`WLH`,`WLK`,`WLKP`,`WLL`,`WLL1`,`WLTW`,`WM`,`WMB`,`WMB1`,`WMS`,`WMT`,`WNC`,`WOR`,`WP`,`WPC`,`WPG`,`WPM`,`WPP`,`WPX`,`WRB`,`WRD`,`WRE`,`WRI`,`WRK`,`WRLD`,`WSM`,`WSO`,`WTFC`,`WTR`,`WTW`,`WU`,`WUBA`,`WWD`,`WWE`,`WWW`,`WY`,`WYND`,`WYND1`,`WYNN`,`X`,`XBI`,`XEC`,`XEL`,`XENT`,`XES`,`XHB`,`XLB`,`XLE`,`XLF`,`XLI`,`XLK`,`XLNX`,`XLP`,`XLRN`,`XLU`,`XLV`,`XLY`,`XME`,`XNET`,`XOM`,`XON`,`XON2`,`XONE`,`XOP`,`XOXO`,`XPER`,`XPO`,`XRAY`,`XRT`,`XRX`,`XRX1`,`XRX2`,`XYL`,`YCS`,`YELP`,`YEXT`,`YNDX`,`YPF`,`YRCW`,`YRD`,`YUM`,`YUM1`,`YUMC`,`YY`,`Z`,`ZAGG`,`ZAYO`,`ZBH`,`ZBRA`,`ZEN`,`ZG`,`ZGNX`,`ZION`,`ZNGA`,`ZNH`,`ZOES`,`ZS`,`ZSL`,`ZTO`,`ZTS`,`ZUMZ`,`ZUO`"); + "USym in `A`,`AA`,`AABA`,`AAL`,`AAL1`,`AAN`,`AAOI`,`AAP`,`AAPL`,`AAT`,`AAWW`,`AAXN`,`AB`,`ABB`,`ABBV`,`ABC`,`ABEO`,`ABEV`,`ABG`,`ABMD`,`ABT`,`ABT1`,`ABX`,`ACAD`,`ACC`,`ACGL`,`ACH`,`ACHC`,`ACIA`,`ACIW`,`ACLS`,`ACM`,`ACN`,`ACOR`,`ACWI`,`ADBE`,`ADI`,`ADM`,`ADMP`,`ADNT`,`ADP`,`ADS`,`ADSK`,`ADTN`,`AEE`,`AEIS`,`AEM`,`AEO`,`AEP`,`AER`,`AERI`,`AES`,`AET`,`AFG`,`AFL`,`AFSI`,`AG`,`AGCO`,`AGIO`,`AGN`,`AGNC`,`AGO`,`AGQ`,`AGX`,`AHL`,`AHT`,`AI`,`AIG`,`AIMC`,`AIMT`,`AINV`,`AIR`,`AIV`,`AIZ`,`AJG`,`AJRD`,`AKAM`,`AKAO`,`AKCA`,`AKR`,`AKRX`,`AKS`,`AL`,`ALB`,`ALE`,`ALGN`,`ALGT`,`ALK`,`ALKS`,`ALL`,`ALLE`,`ALLT`,`ALLY`,`ALNY`,`ALRM`,`ALSN`,`ALV`,`ALV1`,`ALXN`,`AM`,`AMAG`,`AMAT`,`AMBA`,`AMBC`,`AMC`,`AMCX`,`AMD`,`AME`,`AMED`,`AMG`,`AMGN`,`AMJ`,`AMKR`,`AMLP`,`AMN`,`AMP`,`AMRN`,`AMRX`,`AMT`,`AMTD`,`AMWD`,`AMX`,`AMZN`,`AN`,`ANAB`,`ANDE`,`ANDX`,`ANET`,`ANF`,`ANGI`,`ANIK`,`ANSS`,`ANTM`,`AOBC`,`AON`,`AOS`,`AOSL`,`APA`,`APAM`,`APC`,`APD`,`APH`,`APO`,`APOG`,`APPN`,`APRN`,`APTI`,`APTV`,`APTV1`,`APU`,`AR`,`ARAY`,`ARCB`,`ARCC`,`ARCH`,`ARCO`,`ARE`,`ARII`,`ARLP`,`ARLP1`,`ARMK`,`ARNC`,`ARNC1`,`ARNC2`,`AROC`,`AROC1`,`ARRS`,`ARW`,`ARWR`,`ASB`,`ASGN`,`ASH`,`ASHR`,`ASML`,`ASNA`,`ASPS`,`ASRT`,`ASTE`,`ATEN`,`ATGE`,`ATH`,`ATHM`,`ATHN`,`ATI`,`ATO`,`ATR`,`ATSG`,`ATU`,`ATUS`,`ATVI`,`AU`,`AUY`,`AVA`,`AVAV`,`AVB`,`AVD`,`AVEO`,`AVGO`,`AVLR`,`AVNS`,`AVP`,`AVT`,`AVX`,`AVY`,`AVYA`,`AWI`,`AWK`,`AX`,`AXE`,`AXL`,`AXP`,`AXS`,`AXTA`,`AY`,`AYI`,`AYR`,`AYX`,`AZN`,`AZO`,`AZPN`,`AZUL`,`AZZ`,`BA`,`BABA`,`BAC`,`BAH`,`BAM`,`BANC`,`BAP`,`BAX`,`BB`,`BBBY`,`BBD`,`BBD1`,`BBL`,`BBT`,`BBW`,`BBY`,`BC`,`BCC`,`BCE`,`BCEI`,`BCO`,`BCOR`,`BCOV`,`BCS`,`BDC`,`BDN`,`BDX`,`BEAT`,`BECN`,`BEL`,`BEN`,`BERY`,`BG`,`BGCP`,`BGFV`,`BGG`,`BGNE`,`BGS`,`BHC`,`BHE`,`BHF`,`BHGE`,`BHGE1`,`BHP`,`BHR`,`BIB`,`BID`,`BIDU`,`BIG`,`BIIB`,`BIIB1`,`BILI`,`BITA`,`BJRI`,`BK`,`BKD`,`BKE`,`BKH`,`BKI`,`BKNG`,`BKS`,`BKU`,`BL`,`BLCM`,`BLD`,`BLDR`,`BLK`,`BLKB`,`BLL`,`BLMN`,`BLUE`,`BMA`,`BMI`,`BMO`,`BMRN`,`BMS`,`BMY`,`BNFT`,`BNS`,`BOH`,`BOJA`,`BOKF`,`BOKF1`,`BOOT`,`BOX`,`BP`,`BPI`,`BPL`,`BPMC`,`BPOP`,`BPT`,`BPY`,`BPY1`,`BR`,`BREW`,`BRFS`,`BRKB`,`BRKL`,`BRKR`,`BRKS`,`BRS`,`BRX`,`BSX`,`BTI`,`BTI1`,`BUD`,`BURL`,`BVN`,`BWA`,`BWXT`,`BX`,`BXP`,`BXS`,`BYD`,`BZH`,`BZUN`,`C`,`CA`,`CACC`,`CACI`,`CAG`,`CAH`,`CAI`,`CAKE`,`CAL`,`CALL`,`CALM`,`CALX`,`CAMP`,`CAR`,`CARA`,`CARB`,`CARG`,`CARS`,`CASA`,`CASY`,`CAT`,`CATM`,`CATO`,`CATY`,`CB`,`CBIO`,`CBL`,`CBOE`,`CBPO`,`CBRE`,`CBRL`,`CBS`,`CC`,`CCE`,`CCI`,`CCJ`,`CCK`,`CCL`,`CCMP`,`CCOI`,`CDAY`,`CDE`,`CDEV`,`CDK`,`CDNS`,`CDW`,`CE`,`CECO`,`CELG`,`CENT`,`CENX`,`CEO`,`CEQP`,`CERN`,`CERS`,`CEVA`,`CF`,`CFG`,`CFR`,`CFX`,`CG`,`CGNX`,`CHD`,`CHDN`,`CHE`,`CHEF`,`CHFC`,`CHGG`,`CHH`,`CHK`,`CHKP`,`CHL`,`CHRW`,`CHS`,`CHSP`,`CHTR`,`CHU`,`CHUY`,`CI`,`CIEN`,`CIM`,`CINF`,`CISN`,`CIT`,`CL`,`CLB`,`CLDR`,`CLF`,`CLFD`,`CLGX`,`CLH`,`CLI`,`CLMT`,`CLNE`,`CLNY`,`CLR`,`CLS`,`CLVS`,`CLX`,`CM`,`CMA`,`CMC`,`CMCM`,`CMCSA`,`CME`,`CMG`,`CMI`,`CMO`,`CMP`,`CMPR`,`CMS`,`CMTL`,`CNC`,`CNDT`,`CNHI`,`CNI`,`CNK`,`CNO`,`CNP`,`CNQ`,`CNX`,`CNX1`,`COF`,`COG`,`COHR`,`COHU`,`COHU1`,`COL`,`COLM`,`COMM`,`CONE`,`CONN`,`COOP1`,`COP`,`COR`,`CORE`,`CORT`,`COST`,`COT`,`COTY`,`COUP`,`CP`,`CPA`,`CPB`,`CPE`,`CPLG1`,`CPRT`,`CPT`,`CQP`,`CR`,`CRAY`,`CRC`,`CRCM`,`CREE`,`CRI`,`CRL`,`CRM`,`CRM1`,`CROX`,`CRR`,`CRS`,`CRSP`,`CRTO`,`CRUS`,`CRY`,`CRZO`,`CS`,`CSCO`,`CSFL`,`CSFL1`,`CSGS`,`CSII`,`CSIQ`,`CSOD`,`CSTE`,`CSTM`,`CSV`,`CSX`,`CTAS`,`CTB`,`CTL`,`CTL1`,`CTRL`,`CTRN`,`CTRP`,`CTSH`,`CTXS`,`CTXS1`,`CUBE`,`CVA`,`CVE`,`CVG`,`CVGW`,`CVI`,`CVIA1`,`CVLT`,`CVNA`,`CVRR`,`CVS`,`CVX`,`CWEN`,`CWH`,`CX`,`CX2`,`CXO`,`CXO1`,`CXP`,`CXW`,`CY`,`CYBR`,`CYD`,`CYH`,`CYOU`,`CZR`,`CZZ`,`D`,`DAL`,`DAN`,`DAR`,`DATA`,`DB`,`DBA`,`DBC`,`DBD`,`DBX`,`DCI`,`DCP`,`DDD`,`DDM`,`DDR1`,`DDR2`,`DDS`,`DE`,`DECK`,`DEI`,`DENN`,`DEO`,`DERM`,`DF`,`DFRG`,`DFS`,`DG`,`DGX`,`DHI`,`DHR`,`DHT`,`DIA`,`DIG`,`DIN`,`DIOD`,`DIS`,`DISCA`,`DISCK`,`DISH`,`DK`,`DKL`,`DKS`,`DLB`,`DLPH`,`DLR`,`DLTH`,`DLTR`,`DLX`,`DMRC`,`DNB`,`DNKN`,`DNOW`,`DNR`,`DO`,`DOCU`,`DOMO`,`DOOR`,`DORM`,`DOV`,`DOV1`,`DOX`,`DPLO`,`DPZ`,`DQ`,`DRE`,`DRH`,`DRI`,`DRQ`,`DSW`,`DSX`,`DTE`,`DUG`,`DUK`,`DUST`,`DVA`,`DVAX`,`DVMT`,`DVN`,`DVY`,`DWDP`,`DWDP1`,`DXC`,`DXC1`,`DXCM`,`DXD`,`DXD1`,`DXJ`,`DXPE`,`DY`,`E`,`EA`,`EAT`,`EBAY`,`EBIX`,`EBS`,`EBSB`,`ECA`,`ECHO`,`ECL`,`ECOL`,`ECOM`,`ECPG`,`ECYT`,`ED`,`EDC`,`EDIT`,`EDR`,`EDU`,`EDZ`,`EDZ1`,`EE`,`EEB`,`EEFT`,`EEM`,`EEP`,`EEV`,`EEV1`,`EFA`,`EFC`,`EFII`,`EFX`,`EGBN`,`EGHT`,`EGL`,`EGN`,`EGO`,`EGOV`,`EGP`,`EGRX`,`EHC`,`EIGI`,`EIX`,`EL`,`ELF`,`ELLI`,`ELY`,`EME`,`EMES`,`EMN`,`EMR`,`ENB`,`ENB1`,`ENDP`,`ENLC`,`ENLK`,`ENR`,`ENS`,`ENTG`,`ENV`,`ENVA`,`EOG`,`EPAM`,`EPAY`,`EPC`,`EPD`,`EPI`,`EPR`,`EQC`,`EQIX`,`EQM`,`EQM1`,`EQNR`,`EQR`,`EQT`,`EQT1`,`ERF`,`ERI`,`ERIC`,`ERJ`,`EROS`,`ERX`,`ERY`,`ERY1`,`ES`,`ESIO`,`ESL`,`ESND`,`ESNT`,`ESPR`,`ESRT`,`ESRX`,`ESS`,`ESV`,`ET`,`ET1`,`ET2`,`ETFC`,`ETH`,`ETM`,`ETN`,`ETP`,`ETP1`,`ETR`,`ETSY`,`EUO`,`EV`,`EVH`,`EVR`,`EVRG`,`EVRG1`,`EVTC`,`EW`,`EWA`,`EWBC`,`EWC`,`EWD`,`EWG`,`EWH`,`EWI`,`EWJ`,`EWM`,`EWQ`,`EWS`,`EWT`,`EWU`,`EWU1`,`EWW`,`EWY`,`EWZ`,`EXAS`,`EXC`,`EXEL`,`EXP`,`EXPD`,`EXPE`,`EXPR`,`EXR`,`EXTN`,`EXTR`,`EYE`,`EZA`,`EZPW`,`EZU`,`F`,`FANG`,`FARO`,`FAS`,`FAST`,`FAZ`,`FB`,`FBC`,`FBHS`,`FC`,`FCAU`,`FCAU3`,`FCEA`,`FCFS`,`FCN`,`FCX`,`FDC`,`FDP`,`FDS`,`FDX`,`FE`,`FELE`,`FET`,`FEYE`,`FEZ`,`FFBC`,`FFIV`,`FGEN`,`FGP`,`FHN`,`FI`,`FICO`,`FII`,`FIS`,`FISV`,`FIT`,`FITB`,`FIVE`,`FIVN`,`FIX`,`FIZZ`,`FL`,`FLDM`,`FLEX`,`FLIR`,`FLO`,`FLOW`,`FLR`,`FLS`,`FLT`,`FLXN`,`FLY`,`FMC`,`FMX`,`FN`,`FND`,`FNF`,`FNKO`,`FNSR`,`FNV`,`FOE`,`FOLD`,`FOMX`,`FOR`,`FORM`,`FOSL`,`FOX`,`FOXA`,`FPRX`,`FR`,`FRAN`,`FRC`,`FRED`,`FRFHF`,`FRGI`,`FRO`,`FRPT`,`FRSH`,`FRT`,`FSCT`,`FSLR`,`FSS`,`FTI`,`FTK`,`FTNT`,`FTR`,`FTR1`,`FTV`,`FUL`,`FULT`,`FUN`,`FWONA`,`FWRD`,`FXA`,`FXB`,`FXC`,`FXE`,`FXI`,`FXP`,`FXP1`,`FXY`,`G`,`GATX`,`GBT`,`GBX`,`GCAP`,`GCI`,`GCO`,`GD`,`GDDY`,`GDOT`,`GDS`,`GDX`,`GDXJ`,`GE`,`GEF`,`GEL`,`GEO`,`GEO1`,`GERN`,`GES`,`GG`,`GGAL`,`GGB`,`GGG`,`GHL`,`GIII`,`GIL`,`GILD`,`GILT`,`GIS`,`GLD`,`GLIB1`,`GLIBA`,`GLL`,`GLNG`,`GLOB`,`GLOG`,`GLPI`,`GLUU`,`GLW`,`GM`,`GME`,`GMED`,`GMLP`,`GNC`,`GNRC`,`GNTX`,`GNW`,`GOGL`,`GOGO`,`GOLD`,`GOOG`,`GOOGL`,`GOOS`,`GOV`,`GPC`,`GPI`,`GPK`,`GPN`,`GPOR`,`GPRE`,`GPRO`,`GPS`,`GRA`,`GREK`,`GRMN`,`GRPN`,`GRUB`,`GS`,`GSK`,`GSKY`,`GSM`,`GSVC`,`GT`,`GTLS`,`GTN`,`GTS`,`GTT`,`GTXI`,`GVA`,`GVA1`,`GWB`,`GWPH`,`GWR`,`GWRE`,`GWW`,`H`,`HA`,`HABT`,`HACK`,`HAIN`,`HAL`,`HALO`,`HAS`,`HBAN`,`HBI`,`HCA`,`HCI`,`HCLP`,`HCP`,`HCP1`,`HD`,`HDB`,`HDP`,`HDS`,`HE`,`HEAR`,`HEES`,`HELE`,`HEP`,`HES`,`HFC`,`HGV`,`HHC`,`HI`,`HIBB`,`HIFR`,`HIG`,`HII`,`HIIQ`,`HIMX`,`HIW`,`HLF`,`HLI`,`HLT`,`HLT1`,`HLX`,`HMC`,`HMHC`,`HMNY1`,`HMSY`,`HOG`,`HOLI`,`HOLX`,`HOMB`,`HOME`,`HON`,`HON1`,`HOPE`,`HOS`,`HOV`,`HP`,`HPE`,`HPE1`,`HPE2`,`HPP`,`HPQ`,`HPR`,`HPT`,`HQY`,`HR`,`HRB`,`HRI`,`HRL`,`HRS`,`HSBC`,`HSC`,`HSIC`,`HST`,`HSY`,`HT`,`HTA`,`HTH`,`HTHT`,`HTZ`,`HUBG`,`HUBS`,`HUM`,`HUN`,`HUYA`,`HXL`,`HYG`,`HZNP`,`HZO`,`I`,`IAC`,`IAU`,`IBB`,`IBKR`,`IBM`,`IBN`,`IBN1`,`ICE`,`ICHR`,`ICLR`,`ICON`,`ICPT`,`ICUI`,`IDA`,`IDCC`,`IDT`,`IDTI`,`IEO`,`IEP`,`IEX`,`IFF`,`IGT`,`IIVI`,`ILF`,`ILMN`,`ILPT`,`IMAX`,`IMGN`,`IMKTA`,`IMMR`,`IMMU`,`IMO`,`IMOS`,`IMPV`,`INAP`,`INCY`,`INFN`,`INFO`,`INFY`,`INFY1`,`ING`,`INGN`,`INGR`,`INN`,`INOV`,`INSM`,`INSY`,`INT`,`INTC`,`INTU`,`INVA`,`INXN`,`IONS`,`IP`,`IPG`,`IPGP`,`IPHI`,`IPHS`,`IQ`,`IQV`,`IR`,`IRBT`,`IRDM`,`IRM`,`ISRG`,`IT`,`ITB`,`ITCI`,`ITG`,`ITRI`,`ITT`,`ITUB`,`ITW`,`IVAC`,`IVR`,`IVV`,`IVZ`,`IWB`,`IWD`,`IWM`,`IWN`,`IWO`,`IYE`,`IYF`,`IYM`,`IYR`,`IYT`,`JACK`,`JAZZ`,`JBHT`,`JBL`,`JBLU`,`JBSS`,`JBT`,`JCI`,`JCI3`,`JCOM`,`JCP`,`JD`,`JEC`,`JEF`,`JJOFF`,`JKHY`,`JKS`,`JLL`,`JMEI`,`JNJ`,`JNK`,`JNPR`,`JNUG`,`JOE`,`JPM`,`JWN`,`K`,`KALU`,`KANG`,`KAR`,`KB`,`KBE`,`KBH`,`KBR`,`KDP`,`KDP1`,`KEM`,`KEX`,`KEY`,`KEYS`,`KEYW`,`KFRC`,`KGC`,`KHC`,`KIE`,`KIM`,`KIRK`,`KKR`,`KL`,`KLAC`,`KLIC`,`KLXI1`,`KMB`,`KMI`,`KMT`,`KMX`,`KN`,`KNDI`,`KNL`,`KNX`,`KNX1`,`KO`,`KODK`,`KOL`,`KOP`,`KORS`,`KR`,`KRA`,`KRC`,`KRE`,`KRO`,`KS`,`KSS`,`KSU`,`KW`,`KWR`,`L`,`LABL`,`LABU`,`LAD`,`LADR`,`LAMR`,`LAZ`,`LB`,`LBRDK`,`LBTYA`,`LBTYK`,`LC`,`LCII`,`LDOS`,`LE`,`LEA`,`LECO`,`LEG`,`LEN`,`LEN1`,`LFC`,`LFIN`,`LFUS`,`LGFA`,`LGFA1`,`LGIH`,`LGND`,`LH`,`LHCG`,`LHCG1`,`LHO`,`LII`,`LITE`,`LIVN`,`LKQ`,`LL`,`LLL`,`LLY`,`LM`,`LMNR`,`LMNX`,`LMT`,`LN`,`LNC`,`LNG`,`LNG1`,`LNN`,`LNT`,`LOCO`,`LOGI`,`LOGM`,`LOPE`,`LORL`,`LOW`,`LOXO`,`LPI`,`LPL`,`LPLA`,`LPNT`,`LPSN`,`LPT`,`LPX`,`LQD`,`LQDT`,`LRCX`,`LRN`,`LSCC`,`LSI`,`LSTR`,`LSXMA`,`LTC`,`LULU`,`LUV`,`LVS`,`LW`,`LXP`,`LYB`,`LYG`,`LYV`,`LZB`,`M`,`MA`,`MAA`,`MAC`,`MAIN`,`MAN`,`MANH`,`MANT`,`MANU`,`MAR`,`MAS`,`MAT`,`MATX`,`MB`,`MBFI`,`MBI`,`MBT`,`MBUU`,`MCD`,`MCF`,`MCHP`,`MCK`,`MCO`,`MCRI`,`MCS`,`MCY`,`MD`,`MDB`,`MDC`,`MDCO`,`MDGL`,`MDLZ`,`MDP`,`MDR`,`MDR1`,`MDR2`,`MDRX`,`MDSO`,`MDT`,`MDU`,`MDXG`,`MDY`,`MED`,`MEI`,`MELI`,`MEOH`,`MET`,`MET1`,`MFA`,`MFC`,`MFGP`,`MGA`,`MGI`,`MGLN`,`MGM`,`MGPI`,`MHK`,`MHLD`,`MHO`,`MIC`,`MIDD`,`MIK`,`MIME`,`MINI`,`MITL`,`MKC`,`MKSI`,`MKTX`,`MLCO`,`MLHR`,`MLM`,`MLNT`,`MLNX`,`MMC`,`MMLP`,`MMM`,`MMP`,`MMS`,`MMSI`,`MMYT`,`MNK`,`MNRO`,`MNST`,`MNTA`,`MO`,`MODN`,`MOH`,`MOMO`,`MOO`,`MOS`,`MOV`,`MPC`,`MPC1`,`MPC2`,`MPLX`,`MPW`,`MPWR`,`MRC`,`MRCY`,`MRK`,`MRO`,`MRTX`,`MRVL`,`MRVL1`,`MS`,`MSCI`,`MSFT`,`MSGN`,`MSI`,`MSM`,`MSTR`,`MT`,`MT1`,`MTB`,`MTCH`,`MTD`,`MTDR`,`MTG`,`MTH`,`MTN`,`MTOR`,`MTSI`,`MTW`,`MTZ`,`MU`,`MUR`,`MUSA`,`MX`,`MXIM`,`MXL`,`MXWL`,`MYGN`,`MYL`,`MZOR`,`NANO`,`NAT`,`NATI`,`NAV`,`NAVG`,`NAVI`,`NBIX`,`NBL`,`NBR`,`NCLH`,`NCMI`,`NCR`,`NCS`,`NDAQ`,`NDLS`,`NDSN`,`NDX`,`NE`,`NEE`,`NEM`,`NEU`,`NEWM`,`NEWR`,`NFBK`,`NFG`,`NFLX`,`NFX`,`NGD`,`NGG`,`NGL`,`NGVC`,`NHI`,`NHTC`,`NI`,`NJR`,`NKE`,`NKTR`,`NLS`,`NLSN`,`NLY`,`NLY1`,`NNN`,`NOC`,`NOK`,`NOV`,`NOVT`,`NOW`,`NPO`,`NPTN`,`NRG`,`NRP`,`NRZ`,`NS`,`NS1`,`NSC`,`NSIT`,`NSP`,`NTAP`,`NTCT`,`NTES`,`NTGR`,`NTLA`,`NTNX`,`NTR`,`NTR1`,`NTR2`,`NTRI`,`NTRS`,`NUAN`,`NUE`,`NUGT`,`NUS`,`NVCR`,`NVDA`,`NVGS`,`NVMI`,`NVO`,`NVRO`,`NVS`,`NVT`,`NWE`,`NWL`,`NWN`,`NWPX`,`NWS`,`NWSA`,`NXGN`,`NXPI`,`NXST`,`NXTM`,`NYCB`,`NYRT`,`NYRT3`,`NYT`,`O`,`OAK`,`OAS`,`OC`,`OCLR`,`OCN`,`ODFL`,`ODP`,`OEC`,`OEF`,`OFC`,`OFG`,`OGE`,`OGS`,`OHI`,`OI`,`OIH`,`OII`,`OILNF`,`OIS`,`OKE`,`OKTA`,`OLED`,`OLLI`,`OLN`,`OMC`,`OMCL`,`OMER`,`OMF`,`OMI`,`ON`,`ONCE`,`ONDK`,`OPK`,`ORA`,`ORAN`,`ORBK`,`ORCL`,`ORI`,`ORIG`,`ORLY`,`OSIS`,`OSK`,`OSPN`,`OSTK`,`OTEX`,`OUT`,`OXM`,`OXY`,`OZK`,`OZM`,`P`,`PAA`,`PAAS`,`PACW`,`PAG`,`PAGP`,`PAGP1`,`PAGS`,`PAH`,`PANW`,`PAYC`,`PAYX`,`PB`,`PBCT`,`PBCT1`,`PBF`,`PBFX`,`PBI`,`PBPB`,`PBR`,`PBYI`,`PCAR`,`PCG`,`PCH`,`PCH2`,`PCRX`,`PCTY`,`PCYG`,`PDCE`,`PDCO`,`PDD`,`PDFS`,`PDM`,`PE`,`PEB`,`PEG`,`PEGA`,`PEGI`,`PEI`,`PENN`,`PENN1`,`PEP`,`PERY`,`PETS`,`PF`,`PFE`,`PFG`,`PFPT`,`PG`,`PGJ`,`PGNX`,`PGR`,`PGRE`,`PGTI`,`PH`,`PHM`,`PI`,`PICO`,`PII`,`PINC`,`PIR`,`PKG`,`PKI`,`PKX`,`PLAB`,`PLAY`,`PLCE`,`PLD`,`PLD1`,`PLNT`,`PLT`,`PLXS`,`PM`,`PMT`,`PNC`,`PNFP`,`PNK`,`PNM`,`PNR`,`PNR1`,`PNW`,`PODD`,`POL`,`POOL`,`POR`,`POST`,`POWI`,`PPC`,`PPG`,`PPH`,`PPL`,`PRAA`,`PRAH`,`PRFT`,`PRGO`,`PRGS`,`PRI`,`PRLB`,`PRMW`,`PRO`,`PRSC`,`PRTA`,`PRTK`,`PRTY`,`PRU`,`PSA`,`PSB`,`PSEC`,`PSMT`,`PSTG`,`PSX`,`PSXP`,`PTC`,`PTEN`,`PTLA`,`PTR`,`PTR1`,`PTR2`,`PUK`,`PVG`,`PVH`,`PVTL`,`PWR`,`PX`,`PXD`,`PYPL`,`PZZA`,`QADA`,`QCOM`,`QD`,`QDEL`,`QEP`,`QID`,`QID1`,`QIWI`,`QLD`,`QLYS`,`QNST`,`QQQ`,`QRTEA`,`QRVO`,`QSR`,`QTWO`,`QUAD`,`QUOT`,`QURE`,`R`,`RACE`,`RAD`,`RAIL`,`RAMP`,`RAVN`,`RBA`,`RBBN`,`RBC`,`RBS`,`RCI`,`RCII`,`RCL`,`RDC`,`RDFN`,`RDN`,`RDSA`,`RDSB`,`RDUS`,`RDWR`,`RE`,`REG`,`REGI`,`REGN`,`REN`,`REPH`,`RES`,`RETA`,`REV`,`RF`,`RGA`,`RGEN`,`RGLD`,`RGNX`,`RGR`,`RH`,`RHI`,`RHT`,`RIG`,`RIO`,`RIOT`,`RJF`,`RL`,`RLGY`,`RLI`,`RLJ`,`RMBS`,`RMD`,`RMTI`,`RNG`,`RNR`,`ROCK`,`ROG`,`ROIC`,`ROK`,`ROKU`,`ROP`,`ROST`,`RP`,`RPAI`,`RPD`,`RPM`,`RRC`,`RRD`,`RRGB`,`RRR`,`RS`,`RSG`,`RST`,`RSX`,`RTEC`,`RTN`,`RTRX`,`RUBI`,`RUN`,`RUSHA`,`RUSL`,`RUTH`,`RWR`,`RXN`,`RY`,`RYAAY`,`RYAM`,`RYN`,`S`,`SA`,`SABR`,`SAFM`,`SAGE`,`SAH`,`SAIA`,`SAIC`,`SAIL`,`SAM`,`SAN`,`SAN1`,`SANM`,`SAP`,`SASR`,`SATS`,`SAVE`,`SBAC`,`SBGI`,`SBGL`,`SBGL2`,`SBH`,`SBNY`,`SBRA`,`SBUX`,`SC`,`SCCO`,`SCG`,`SCHD`,`SCHL`,`SCHN`,`SCHW`,`SCI`,`SCL`,`SCO`,`SCS`,`SCSC`,`SD`,`SDRL1`,`SDS`,`SDS1`,`SE`,`SEAS`,`SEDG`,`SEE`,`SEIC`,`SEM`,`SEMG`,`SEND`,`SEP`,`SERV`,`SERV1`,`SF`,`SFIX`,`SFL`,`SFLY`,`SFM`,`SFUN`,`SGEN`,`SGH`,`SGMO`,`SGMS`,`SH`,`SHAK`,`SHEN`,`SHLD`,`SHLX`,`SHO`,`SHOO`,`SHOO1`,`SHOP`,`SHPG`,`SHW`,`SIG`,`SIGM`,`SIGM1`,`SIL`,`SIMO`,`SINA`,`SINA2`,`SIR`,`SIRI`,`SITC`,`SITC1`,`SITC2`,`SIVB`,`SIX`,`SJI`,`SJM`,`SKF`,`SKT`,`SKX`,`SKYW`,`SLAB`,`SLB`,`SLCA`,`SLG`,`SLGN`,`SLM`,`SLV`,`SLX`,`SM`,`SMCI`,`SMG`,`SMH`,`SMLP`,`SMN`,`SMN1`,`SMTC`,`SN`,`SNA`,`SNAP`,`SNBR`,`SNCR`,`SNE`,`SNH`,`SNHY`,`SNN`,`SNP`,`SNPS`,`SNV`,`SNX`,`SNX1`,`SNY`,`SO`,`SODA`,`SOGO`,`SOHU`,`SON`,`SONC`,`SONO`,`SPA`,`SPB`,`SPB1`,`SPG`,`SPGI`,`SPH`,`SPLK`,`SPN`,`SPOT`,`SPPI`,`SPR`,`SPTN`,`SPWR`,`SPX`,`SPXC`,`SPXL`,`SPXS`,`SPXU`,`SPXU1`,`SPY`,`SQ`,`SQM`,`SQM1`,`SQQQ`,`SQQQ1`,`SRC`,`SRC1`,`SRCI`,`SRCL`,`SRE`,`SRG`,`SRI`,`SRPT`,`SRS`,`SSB`,`SSC`,`SSD`,`SSL`,`SSNC`,`SSO`,`SSP`,`SSRM`,`SSTK`,`SSW`,`SSYS`,`ST`,`STAY`,`STI`,`STKL`,`STLD`,`STM`,`STMP`,`STNG`,`STON`,`STOR`,`STRA`,`STRA1`,`STT`,`STWD`,`STX`,`STZ`,`SU`,`SUI`,`SUM`,`SUN`,`SUP`,`SVU`,`SVU1`,`SVXY`,`SVXY1`,`SWCH`,`SWIR`,`SWK`,`SWKS`,`SWM`,`SWN`,`SWX`,`SXC`,`SYF`,`SYK`,`SYKE`,`SYMC`,`SYNA`,`SYNH`,`SYX`,`SYY`,`T`,`T1`,`TACO`,`TAHO`,`TAL`,`TAP`,`TBI`,`TBT`,`TCBI`,`TCF`,`TCO`,`TCP`,`TCS`,`TCX`,`TD`,`TDC`,`TDG`,`TDOC`,`TDS`,`TEAM`,`TECD`,`TECK`,`TECL`,`TEF`,`TEL`,`TEN`,`TEO`,`TER`,`TERP`,`TEVA`,`TEX`,`TFSL`,`TGE`,`TGE1`,`TGI`,`TGNA`,`TGP`,`TGT`,`TGTX`,`THC`,`THO`,`THRM`,`THS`,`TIF`,`TISI`,`TITN`,`TIVO`,`TJX`,`TK`,`TKR`,`TLRD`,`TLRY`,`TLT`,`TLYS`,`TM`,`TMF`,`TMHC`,`TMHC1`,`TMK`,`TMO`,`TMST`,`TMUS`,`TNA`,`TNC`,`TNDM`,`TNET`,`TOL`,`TOO`,`TOT`,`TOWN`,`TPC`,`TPH`,`TPR`,`TPX`,`TQQQ`,`TRCO`,`TREE`,`TREX`,`TRGP`,`TRI`,`TRIP`,`TRMB`,`TRMK`,`TRN`,`TROW`,`TROX`,`TRP`,`TRQ`,`TRS`,`TRTN`,`TRU`,`TRUE`,`TRV`,`TRVG`,`TRVN`,`TS`,`TSCO`,`TSE`,`TSEM`,`TSG`,`TSLA`,`TSLA1`,`TSM`,`TSN`,`TSRO`,`TSS`,`TTC`,`TTD`,`TTGT`,`TTM`,`TTMI`,`TTS`,`TTWO`,`TU`,`TUP`,`TUR`,`TUSK`,`TV`,`TVPT`,`TVTY`,`TWI`,`TWLO`,`TWM`,`TWNK`,`TWO`,`TWO1`,`TWOU`,`TWTR`,`TX`,`TXMD`,`TXN`,`TXRH`,`TXT`,`TYL`,`TYPE`,`TZOO`,`UA`,`UAA`,`UAL`,`UBNT`,`UCO`,`UCO2`,`UCTT`,`UDR`,`UEPS`,`UFS`,`UGI`,`UHS`,`UIHC`,`UIS`,`UL`,`ULTA`,`ULTI`,`UN`,`UNFI`,`UNG`,`UNG1`,`UNH`,`UNIT`,`UNM`,`UNP`,`UNT`,`UNVR`,`UPLD`,`UPRO`,`UPS`,`URBN`,`URE`,`URI`,`USAC`,`USB`,`USCR`,`USFD`,`USG`,`USM`,`USO`,`USPH`,`UTHR`,`UTX`,`UUP`,`UVE`,`UVXY`,`UVXY1`,`UVXY2`,`UVXY3`,`UWM`,`UYG`,`UYM`,`V`,`VAC`,`VAC1`,`VALE`,`VAR`,`VC`,`VCRA`,`VDE`,`VECO`,`VEEV`,`VEON`,`VER`,`VFC`,`VG`,`VGK`,`VGR`,`VGR1`,`VIAB`,`VIAV`,`VICR`,`VIG`,`VIIX`,`VIPS`,`VIRT`,`VIXY`,`VIXY2`,`VJET`,`VKTX`,`VLO`,`VLP`,`VLRS`,`VLY`,`VMC`,`VMI`,`VMW`,`VNCE`,`VNDA`,`VNET`,`VNO`,`VNOM`,`VNQ`,`VOD`,`VOO`,`VOYA`,`VRA`,`VRNS`,`VRNT`,`VRSK`,`VRSN`,`VRTX`,`VSAT`,`VSH`,`VSI`,`VSLR`,`VSM`,`VST`,`VST1`,`VSTO`,`VTI`,`VTL`,`VTR`,`VVC`,`VVV`,`VWO`,`VXX`,`VXX2`,`VXXB`,`VXZ`,`VXZB`,`VYM`,`VZ`,`VZ1`,`W`,`WAB`,`WABC`,`WAGE`,`WAL`,`WAT`,`WATT`,`WB`,`WBA`,`WBAI`,`WBC`,`WBS`,`WCC`,`WCG`,`WCN`,`WDAY`,`WDC`,`WDFC`,`WDR`,`WEC`,`WELL`,`WEN`,`WERN`,`WES`,`WETF`,`WEX`,`WFC`,`WFT`,`WGO`,`WH`,`WHR`,`WIFI`,`WIN`,`WIN1`,`WING`,`WIRE`,`WIT`,`WIX`,`WK`,`WLH`,`WLK`,`WLKP`,`WLL`,`WLL1`,`WLTW`,`WM`,`WMB`,`WMB1`,`WMS`,`WMT`,`WNC`,`WOR`,`WP`,`WPC`,`WPG`,`WPM`,`WPP`,`WPX`,`WRB`,`WRD`,`WRE`,`WRI`,`WRK`,`WRLD`,`WSM`,`WSO`,`WTFC`,`WTR`,`WTW`,`WU`,`WUBA`,`WWD`,`WWE`,`WWW`,`WY`,`WYND`,`WYND1`,`WYNN`,`X`,`XBI`,`XEC`,`XEL`,`XENT`,`XES`,`XHB`,`XLB`,`XLE`,`XLF`,`XLI`,`XLK`,`XLNX`,`XLP`,`XLRN`,`XLU`,`XLV`,`XLY`,`XME`,`XNET`,`XOM`,`XON`,`XON2`,`XONE`,`XOP`,`XOXO`,`XPER`,`XPO`,`XRAY`,`XRT`,`XRX`,`XRX1`,`XRX2`,`XYL`,`YCS`,`YELP`,`YEXT`,`YNDX`,`YPF`,`YRCW`,`YRD`,`YUM`,`YUM1`,`YUMC`,`YY`,`Z`,`ZAGG`,`ZAYO`,`ZBH`,`ZBRA`,`ZEN`,`ZG`,`ZGNX`,`ZION`,`ZNGA`,`ZNH`,`ZOES`,`ZS`,`ZSL`,`ZTO`,`ZTS`,`ZUMZ`,`ZUO`"); assertEquals(MatchFilter.class, f.getClass()); } @@ -166,7 +158,7 @@ public void testInDateTimes() { DBDateTime tues = DBTimeUtils.convertDateTime("2018-05-01T10:00:00 NY"); DBDateTime thurs = DBTimeUtils.convertDateTime("2018-05-03T10:00:00 NY"); Table t = TableTools.newTable(TableTools.col("Timestamp", new DBDateTime(mon.getNanos()), - new DBDateTime(tues.getNanos()), new DBDateTime(thurs.getNanos()))); + new DBDateTime(tues.getNanos()), new DBDateTime(thurs.getNanos()))); // match one item SelectFilter f = SelectFilterFactory.getExpression("Timestamp in '" + mon + "'"); f.init(t.getDefinition()); @@ -200,70 +192,67 @@ public void testInDateTimes() { } public void testTypeInference() { - checkResult("1", true, true, true, true, true, true, false, true, true, (byte) 1, (short) 1, - 1, 1, new BigInteger("1"), 1.0, new BigDecimal("1"), '1'); - checkResult("-11", true, true, true, true, true, true, false, true, false, (byte) -11, - (short) -11, -11, -11, new BigInteger("-11"), -11.0, new BigDecimal("-11"), '0'); - checkResult("1.", false, false, false, false, false, true, false, true, false, (byte) 1, - (short) 1, 1, 1, null, 1.0, new BigDecimal("1.0"), '0'); - checkResult("-11.", false, false, false, false, false, true, false, true, false, (byte) 1, - (short) 1, 1, 1, null, -11.0, new BigDecimal("-11.0"), '0'); - checkResult("1.1", false, false, false, false, false, true, false, true, false, (byte) 1, - (short) 1, 1, 1, null, 1.1, new BigDecimal("1.1"), '0'); - checkResult("1.01", false, false, false, false, false, true, false, true, false, (byte) 1, - (short) 1, 1, 1, null, 1.01, new BigDecimal("1.01"), '0'); - checkResult("128", false, true, true, true, true, true, false, true, false, (byte) 1, - (short) 128, 128, 128, new BigInteger("128"), 128, new BigDecimal("128"), '0'); - checkResult("-128", true, true, true, true, true, true, false, true, false, (byte) -128, - (short) -128, -128, -128, new BigInteger("-128"), -128, new BigDecimal("-128"), '0'); - checkResult("32768", false, false, true, true, true, true, false, true, false, (byte) 0, - (short) 0, 32768, 32768, new BigInteger("32768"), 32768, new BigDecimal("32768"), '0'); - checkResult("-32768", false, true, true, true, true, true, false, true, false, (byte) 0, - (short) -32768, -32768, -32768, new BigInteger("-32768"), -32768, - new BigDecimal("-32768"), '0'); - checkResult("2147483648", false, false, false, true, true, true, false, true, false, - (byte) 0, (short) 0, 0, 2147483648l, new BigInteger("2147483648"), 2147483648l, - new BigDecimal("2147483648"), '0'); - checkResult("-2147483648", false, false, true, true, true, true, false, true, false, - (byte) 0, (short) 0, -2147483648, -2147483648, new BigInteger("-2147483648"), - -2147483648, new BigDecimal("-2147483648"), '0'); - - checkResult("true", false, false, false, false, false, false, true, false, false, (byte) 0, - (short) 0, 0, 0, null, 0, null, '0'); - checkResult("True", false, false, false, false, false, false, true, false, false, (byte) 0, - (short) 0, 0, 0, null, 0, null, '0'); - checkResult("TrUe", false, false, false, false, false, false, true, false, false, (byte) 0, - (short) 0, 0, 0, null, 0, null, '0'); - checkResult("TRUE", false, false, false, false, false, false, true, false, false, (byte) 0, - (short) 0, 0, 0, null, 0, null, '0'); - checkResult("tru3", false, false, false, false, false, false, false, false, false, (byte) 0, - (short) 0, 0, 0, null, 0, null, '0'); - checkResult("false", false, false, false, false, false, false, true, false, false, (byte) 0, - (short) 0, 0, 0, null, 0, null, '0'); - checkResult("False", false, false, false, false, false, false, true, false, false, (byte) 0, - (short) 0, 0, 0, null, 0, null, '0'); - checkResult("FaLsE", false, false, false, false, false, false, true, false, false, (byte) 0, - (short) 0, 0, 0, null, 0, null, '0'); - checkResult("FALSE", false, false, false, false, false, false, true, false, false, (byte) 0, - (short) 0, 0, 0, null, 0, null, '0'); - checkResult("FALS3", false, false, false, false, false, false, false, false, false, - (byte) 0, (short) 0, 0, 0, null, 0, null, '0'); + checkResult("1", true, true, true, true, true, true, false, true, true, (byte) 1, (short) 1, 1, 1, + new BigInteger("1"), 1.0, new BigDecimal("1"), '1'); + checkResult("-11", true, true, true, true, true, true, false, true, false, (byte) -11, (short) -11, -11, -11, + new BigInteger("-11"), -11.0, new BigDecimal("-11"), '0'); + checkResult("1.", false, false, false, false, false, true, false, true, false, (byte) 1, (short) 1, 1, 1, null, + 1.0, new BigDecimal("1.0"), '0'); + checkResult("-11.", false, false, false, false, false, true, false, true, false, (byte) 1, (short) 1, 1, 1, + null, -11.0, new BigDecimal("-11.0"), '0'); + checkResult("1.1", false, false, false, false, false, true, false, true, false, (byte) 1, (short) 1, 1, 1, null, + 1.1, new BigDecimal("1.1"), '0'); + checkResult("1.01", false, false, false, false, false, true, false, true, false, (byte) 1, (short) 1, 1, 1, + null, 1.01, new BigDecimal("1.01"), '0'); + checkResult("128", false, true, true, true, true, true, false, true, false, (byte) 1, (short) 128, 128, 128, + new BigInteger("128"), 128, new BigDecimal("128"), '0'); + checkResult("-128", true, true, true, true, true, true, false, true, false, (byte) -128, (short) -128, -128, + -128, new BigInteger("-128"), -128, new BigDecimal("-128"), '0'); + checkResult("32768", false, false, true, true, true, true, false, true, false, (byte) 0, (short) 0, 32768, + 32768, new BigInteger("32768"), 32768, new BigDecimal("32768"), '0'); + checkResult("-32768", false, true, true, true, true, true, false, true, false, (byte) 0, (short) -32768, -32768, + -32768, new BigInteger("-32768"), -32768, new BigDecimal("-32768"), '0'); + checkResult("2147483648", false, false, false, true, true, true, false, true, false, (byte) 0, (short) 0, 0, + 2147483648l, new BigInteger("2147483648"), 2147483648l, new BigDecimal("2147483648"), '0'); + checkResult("-2147483648", false, false, true, true, true, true, false, true, false, (byte) 0, (short) 0, + -2147483648, -2147483648, new BigInteger("-2147483648"), -2147483648, new BigDecimal("-2147483648"), + '0'); + + checkResult("true", false, false, false, false, false, false, true, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); + checkResult("True", false, false, false, false, false, false, true, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); + checkResult("TrUe", false, false, false, false, false, false, true, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); + checkResult("TRUE", false, false, false, false, false, false, true, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); + checkResult("tru3", false, false, false, false, false, false, false, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); + checkResult("false", false, false, false, false, false, false, true, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); + checkResult("False", false, false, false, false, false, false, true, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); + checkResult("FaLsE", false, false, false, false, false, false, true, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); + checkResult("FALSE", false, false, false, false, false, false, true, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); + checkResult("FALS3", false, false, false, false, false, false, false, false, false, (byte) 0, (short) 0, 0, 0, + null, 0, null, '0'); checkDateRange("18:43", makeDateTime("18:43"), makeDateTime("18:44")); checkDateRange("18:43:40", makeDateTime("18:43:40"), makeDateTime("18:43:41")); checkDateRange("18:43:40.100", makeDateTime("18:43:40.100"), makeDateTime("18:43:40.101")); checkDateRange("2018-03-25 NY", DBTimeUtils.convertDateTime("2018-03-25 NY"), - DBTimeUtils.convertDateTime("2018-03-26 NY")); + DBTimeUtils.convertDateTime("2018-03-26 NY")); checkDateRange("2018-03-25T18:00 NY", DBTimeUtils.convertDateTime("2018-03-25T18:00 NY"), - DBTimeUtils.convertDateTime("2018-03-25T18:01 NY")); - checkDateRange("2018-03-25T18:00:00 NY", - DBTimeUtils.convertDateTime("2018-03-25T18:00:00 NY"), - DBTimeUtils.convertDateTime("2018-03-25T18:00:01 NY")); + DBTimeUtils.convertDateTime("2018-03-25T18:01 NY")); + checkDateRange("2018-03-25T18:00:00 NY", DBTimeUtils.convertDateTime("2018-03-25T18:00:00 NY"), + DBTimeUtils.convertDateTime("2018-03-25T18:00:01 NY")); } private DBDateTime makeDateTime(String timeStr) { - ZonedDateTime zdt = ZonedDateTime.now(ZoneId.of("America/New_York")) - .truncatedTo(ChronoUnit.DAYS).plus(DBTimeUtils.convertTime(timeStr), ChronoUnit.NANOS); + ZonedDateTime zdt = ZonedDateTime.now(ZoneId.of("America/New_York")).truncatedTo(ChronoUnit.DAYS) + .plus(DBTimeUtils.convertTime(timeStr), ChronoUnit.NANOS); return DBTimeUtils.millisToTime(zdt.toInstant().toEpochMilli()); } @@ -282,11 +271,11 @@ private void checkDateRange(String input, DBDateTime lowerDate, DBDateTime upper assertEquals(upperDate.getNanos(), inf.dateUpper.getNanos()); } - private void checkResult(String input, boolean isByte, boolean isShort, boolean isInt, - boolean isLong, boolean isBigInt, - boolean isBigDecimal, boolean isBool, boolean isDouble, boolean isChar, byte byteVal, - short shortVal, int intVal, long longVal, BigInteger biVal, - double doubleVal, BigDecimal decVal, char charVal) { + private void checkResult(String input, boolean isByte, boolean isShort, boolean isInt, boolean isLong, + boolean isBigInt, + boolean isBigDecimal, boolean isBool, boolean isDouble, boolean isChar, byte byteVal, short shortVal, + int intVal, long longVal, BigInteger biVal, + double doubleVal, BigDecimal decVal, char charVal) { SelectFilterFactory.InferenceResult inf = new SelectFilterFactory.InferenceResult(input); assertEquals(isByte, inf.isByte); assertEquals(isShort, inf.isShort); @@ -333,24 +322,23 @@ private void checkResult(String input, boolean isByte, boolean isShort, boolean public void testIncludesMatcher() { Table t = TableTools.newTable(TableTools.col("Phrase", - /* 0 */ "T1", - /* 1 */ "T2", - /* 2 */ "T3", - /* 3 */ "ABCt1DEF", - /* 4 */ "t2", - /* 5 */ "John is Hungry at time T-1", - /* 6 */ "Sometimes T1 is better than T2, but T-3 is always awful", - /* 7 */ "T1 is Not t2, and T3 is awesome", - /* 8 */ "John is Hungry at time t2", - /* 9 */ "Sometimes T1 is better than T2, but T3 is always awful", - /* 10 */ "ABCT1T2T3DEF", - /* 11 */ "John is Hungry at time T1 and Also t2 and T3!", - /* 12 */ " All of these T's are getting pretty tiring", - /* 13 */ "I amNot t2, and neither are you!", - /* 14 */ null)); - - SelectFilter f = - SelectFilterFactory.getExpression("Phrase icase includes any `T1`, `T2`, `T3`"); + /* 0 */ "T1", + /* 1 */ "T2", + /* 2 */ "T3", + /* 3 */ "ABCt1DEF", + /* 4 */ "t2", + /* 5 */ "John is Hungry at time T-1", + /* 6 */ "Sometimes T1 is better than T2, but T-3 is always awful", + /* 7 */ "T1 is Not t2, and T3 is awesome", + /* 8 */ "John is Hungry at time t2", + /* 9 */ "Sometimes T1 is better than T2, but T3 is always awful", + /* 10 */ "ABCT1T2T3DEF", + /* 11 */ "John is Hungry at time T1 and Also t2 and T3!", + /* 12 */ " All of these T's are getting pretty tiring", + /* 13 */ "I amNot t2, and neither are you!", + /* 14 */ null)); + + SelectFilter f = SelectFilterFactory.getExpression("Phrase icase includes any `T1`, `T2`, `T3`"); assertTrue("f instanceof StringContainsFilter", f instanceof StringContainsFilter); f.init(t.getDefinition()); Index result = f.filter(t.getIndex().clone(), t.getIndex(), t, false); diff --git a/DB/src/test/java/io/deephaven/db/tables/select/SelectFilterTest.java b/DB/src/test/java/io/deephaven/db/tables/select/SelectFilterTest.java index 1ce0315f39b..c6b6226bb60 100644 --- a/DB/src/test/java/io/deephaven/db/tables/select/SelectFilterTest.java +++ b/DB/src/test/java/io/deephaven/db/tables/select/SelectFilterTest.java @@ -44,39 +44,36 @@ public void testNeq() { } public void testGt() { - expect(FilterCondition.gt(FOO, V42), LongRangeFilter.class, - "LongRangeFilter(Foo in (42,9223372036854775807])"); + expect(FilterCondition.gt(FOO, V42), LongRangeFilter.class, "LongRangeFilter(Foo in (42,9223372036854775807])"); expect(FilterCondition.gt(V42, FOO), LongRangeFilter.class, - "LongRangeFilter(Foo in [-9223372036854775808,42))"); + "LongRangeFilter(Foo in [-9223372036854775808,42))"); expect(FilterCondition.gt(FOO, BAR), ConditionFilter.class, "Foo > Bar"); } public void testGte() { expect(FilterCondition.gte(FOO, V42), LongRangeFilter.class, - "LongRangeFilter(Foo in [42,9223372036854775807])"); + "LongRangeFilter(Foo in [42,9223372036854775807])"); expect(FilterCondition.gte(V42, FOO), LongRangeFilter.class, - "LongRangeFilter(Foo in [-9223372036854775808,42])"); + "LongRangeFilter(Foo in [-9223372036854775808,42])"); expect(FilterCondition.gte(FOO, BAR), ConditionFilter.class, "Foo >= Bar"); } public void testLt() { expect(FilterCondition.lt(FOO, V42), LongRangeFilter.class, - "LongRangeFilter(Foo in [-9223372036854775808,42))"); - expect(FilterCondition.lt(V42, FOO), LongRangeFilter.class, - "LongRangeFilter(Foo in (42,9223372036854775807])"); + "LongRangeFilter(Foo in [-9223372036854775808,42))"); + expect(FilterCondition.lt(V42, FOO), LongRangeFilter.class, "LongRangeFilter(Foo in (42,9223372036854775807])"); expect(FilterCondition.lt(FOO, BAR), ConditionFilter.class, "Foo < Bar"); } public void testLte() { expect(FilterCondition.lte(FOO, V42), LongRangeFilter.class, - "LongRangeFilter(Foo in [-9223372036854775808,42])"); + "LongRangeFilter(Foo in [-9223372036854775808,42])"); expect(FilterCondition.lte(V42, FOO), LongRangeFilter.class, - "LongRangeFilter(Foo in [42,9223372036854775807])"); + "LongRangeFilter(Foo in [42,9223372036854775807])"); expect(FilterCondition.lte(FOO, BAR), ConditionFilter.class, "Foo <= Bar"); } - private static void expect(Filter filter, Class clazz, - String expected) { + private static void expect(Filter filter, Class clazz, String expected) { SelectFilter impl = SelectFilter.of(filter); assertThat(impl).isInstanceOf(clazz); // SelectFilter doesn't necessary implement equals, so we need to use the string repr diff --git a/DB/src/test/java/io/deephaven/db/tables/utils/TestDBDateTime.java b/DB/src/test/java/io/deephaven/db/tables/utils/TestDBDateTime.java index 86b3392a500..e269c74b0b8 100644 --- a/DB/src/test/java/io/deephaven/db/tables/utils/TestDBDateTime.java +++ b/DB/src/test/java/io/deephaven/db/tables/utils/TestDBDateTime.java @@ -37,8 +37,7 @@ public void testAll() throws Exception { assertEquals(jodaDateTime, dateTime.getJodaDateTime()); - assertEquals(DBTimeZone.TZ_NY.getTimeZone(), - dateTime.getJodaDateTime(DBTimeZone.TZ_NY).getZone()); + assertEquals(DBTimeZone.TZ_NY.getTimeZone(), dateTime.getJodaDateTime(DBTimeZone.TZ_NY).getZone()); assertTrue(new DBDateTime(123456).equals(new DBDateTime(123456))); @@ -56,8 +55,7 @@ public void testInstant() { java.time.Instant target1 = java.time.Instant.ofEpochMilli(jodaDateTime.getMillis()); assertEquals(target1, dateTime1.getInstant()); - java.time.Instant target2 = - java.time.Instant.ofEpochSecond(jodaDateTime.getMillis() / 1000, 999123456); + java.time.Instant target2 = java.time.Instant.ofEpochSecond(jodaDateTime.getMillis() / 1000, 999123456); assertEquals(target2, dateTime2.getInstant()); } @@ -72,55 +70,46 @@ private long getMillisFromDateStr(SimpleDateFormat format, String dateStr) { public void testLastBusinessDateNy() { SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd"); - format.setTimeZone(TimeZones.TZ_NEWYORK); // stick with one timezone to ensure the test - // works properly + format.setTimeZone(TimeZones.TZ_NEWYORK); // stick with one timezone to ensure the test works properly String today; String dayBefore; - // Test that the overloaded methods match (this will break if we manage to straddle midnight - // while it's run!) - assertEquals(DBTimeUtils.lastBusinessDateNy(), - DBTimeUtils.lastBusinessDateNy(System.currentTimeMillis())); + // Test that the overloaded methods match (this will break if we manage to straddle midnight while it's run!) + assertEquals(DBTimeUtils.lastBusinessDateNy(), DBTimeUtils.lastBusinessDateNy(System.currentTimeMillis())); DBTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; // Test Monday-Friday today = "2013-11-18"; dayBefore = "2013-11-15"; - assertEquals(dayBefore, - DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); + assertEquals(dayBefore, DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); DBTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; // Test end of month today = "2013-11-01"; dayBefore = "2013-10-31"; - assertEquals(dayBefore, - DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); + assertEquals(dayBefore, DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); DBTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; // Test end of year today = "2012-01-01"; dayBefore = "2011-12-30"; - assertEquals(dayBefore, - DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); + assertEquals(dayBefore, DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); DBTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; // Test a holiday (2013 thanksgivig) today = "2013-11-28"; dayBefore = "2013-11-27"; - assertEquals(dayBefore, - DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); + assertEquals(dayBefore, DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); DBTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; today = "2013-11-29"; dayBefore = "2013-11-27"; - assertEquals(dayBefore, - DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); + assertEquals(dayBefore, DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); DBTimeUtils.endOfCurrentDateNyLastBusinessDay = 0; // Now test the current time // (Straight business calendar and the DBUtils codepath) - String fromCal = Calendars.calendar() - .previousBusinessDay(DBTimeUtils.millisToTime(System.currentTimeMillis())); + String fromCal = Calendars.calendar().previousBusinessDay(DBTimeUtils.millisToTime(System.currentTimeMillis())); assertEquals(DBTimeUtils.lastBusinessDateNy(), fromCal); // Test it a second time, since its cached assertEquals(DBTimeUtils.lastBusinessDateNy(), fromCal); @@ -129,15 +118,13 @@ public void testLastBusinessDateNy() { // Test cache rollover given times that advance a day today = "2013-11-26"; dayBefore = "2013-11-25"; - assertEquals(dayBefore, - DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); + assertEquals(dayBefore, DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today))); today = "2013-11-27"; dayBefore = "2013-11-26"; - assertEquals(dayBefore, - DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today) + 1)); // make sure - // it advances - // just past - // midnight + assertEquals(dayBefore, DBTimeUtils.lastBusinessDateNy(getMillisFromDateStr(format, today) + 1)); // make sure + // it advances + // just past + // midnight // Rolling back should not work -- we have cached a later day today = "2013-11-26"; @@ -163,8 +150,7 @@ public void testLastBusinessDateNy() { } public void testToDateString() { - DBDateTime dateTime = convertDateTime("2016-11-06T04:00 UTC"); // 11/6 is the last day of - // DST + DBDateTime dateTime = convertDateTime("2016-11-06T04:00 UTC"); // 11/6 is the last day of DST { // America/New_York String zoneId = "America/New_York"; diff --git a/DB/src/test/java/io/deephaven/db/tables/utils/TestDBDateTimeFormatter.java b/DB/src/test/java/io/deephaven/db/tables/utils/TestDBDateTimeFormatter.java index 5ea7d657211..b2c6921dfad 100644 --- a/DB/src/test/java/io/deephaven/db/tables/utils/TestDBDateTimeFormatter.java +++ b/DB/src/test/java/io/deephaven/db/tables/utils/TestDBDateTimeFormatter.java @@ -22,8 +22,7 @@ public void test1() { final boolean hasTime = true; final int subsecondDigits = 9; final boolean hasTZ = true; - DBDateTimeFormatter dtf = - new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); + DBDateTimeFormatter dtf = new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); assertEquals("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS %t", dtf.getPattern()); assertEquals("2015-06-13T14:12:11.123456789 MN", dtf.format(t, DBTimeZone.TZ_MN)); @@ -35,8 +34,7 @@ public void test2() { final boolean hasTime = true; final int subsecondDigits = 9; final boolean hasTZ = true; - DBDateTimeFormatter dtf = - new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); + DBDateTimeFormatter dtf = new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); assertEquals("yyyy-MM-dd HH:mm:ss.SSSSSSSSS %t", dtf.getPattern()); assertEquals("2015-06-13 14:12:11.123456789 MN", dtf.format(t, DBTimeZone.TZ_MN)); @@ -48,8 +46,7 @@ public void test3() { final boolean hasTime = true; final int subsecondDigits = 9; final boolean hasTZ = true; - DBDateTimeFormatter dtf = - new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); + DBDateTimeFormatter dtf = new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); assertEquals("HH:mm:ss.SSSSSSSSS %t", dtf.getPattern()); assertEquals("14:12:11.123456789 MN", dtf.format(t, DBTimeZone.TZ_MN)); @@ -61,8 +58,7 @@ public void test4() { final boolean hasTime = false; final int subsecondDigits = 9; final boolean hasTZ = true; - DBDateTimeFormatter dtf = - new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); + DBDateTimeFormatter dtf = new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); assertEquals("yyyy-MM-dd %t", dtf.getPattern()); assertEquals("2015-06-13 MN", dtf.format(t, DBTimeZone.TZ_MN)); @@ -74,8 +70,7 @@ public void test5() { final boolean hasTime = true; final int subsecondDigits = 4; final boolean hasTZ = true; - DBDateTimeFormatter dtf = - new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); + DBDateTimeFormatter dtf = new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); assertEquals("yyyy-MM-dd'T'HH:mm:ss.SSSS %t", dtf.getPattern()); assertEquals("2015-06-13T14:12:11.1234 MN", dtf.format(t, DBTimeZone.TZ_MN)); @@ -87,8 +82,7 @@ public void test6() { final boolean hasTime = true; final int subsecondDigits = 2; final boolean hasTZ = true; - DBDateTimeFormatter dtf = - new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); + DBDateTimeFormatter dtf = new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); assertEquals("yyyy-MM-dd'T'HH:mm:ss.SS %t", dtf.getPattern()); assertEquals("2015-06-13T14:12:11.12 MN", dtf.format(t, DBTimeZone.TZ_MN)); @@ -100,8 +94,7 @@ public void test7() { final boolean hasTime = true; final int subsecondDigits = 9; final boolean hasTZ = false; - DBDateTimeFormatter dtf = - new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); + DBDateTimeFormatter dtf = new DBDateTimeFormatter(isISO, hasDate, hasTime, subsecondDigits, hasTZ); assertEquals("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS", dtf.getPattern()); assertEquals("2015-06-13T14:12:11.123456789", dtf.format(t, DBTimeZone.TZ_MN)); diff --git a/DB/src/test/java/io/deephaven/db/tables/utils/TestDBTimeUtils.java b/DB/src/test/java/io/deephaven/db/tables/utils/TestDBTimeUtils.java index 20cfb584a9e..26014a8cc79 100644 --- a/DB/src/test/java/io/deephaven/db/tables/utils/TestDBTimeUtils.java +++ b/DB/src/test/java/io/deephaven/db/tables/utils/TestDBTimeUtils.java @@ -36,8 +36,7 @@ public void testNanos() throws Exception { assertEquals(jodaDateTime.getMillis() * 1000000 + 123456, DBTimeUtils.nanos(dateTime)); - assertEquals(io.deephaven.util.QueryConstants.NULL_LONG, - DBTimeUtils.nanos((DBDateTime) null)); + assertEquals(io.deephaven.util.QueryConstants.NULL_LONG, DBTimeUtils.nanos((DBDateTime) null)); } public void testMidnightConversion() throws Exception { @@ -48,11 +47,9 @@ public void testMidnightConversion() throws Exception { DBDateTime midnight = DBTimeUtils.dateAtMidnight(dateTime, DBTimeZone.TZ_NY); assertEquals(jodaMidnight.getMillis(), DBTimeUtils.millis(midnight)); - assertEquals(jodaMidnight.getMillis(), - DBTimeUtils.millisToDateAtMidnightNy(dateTime.getMillis()).getMillis()); + assertEquals(jodaMidnight.getMillis(), DBTimeUtils.millisToDateAtMidnightNy(dateTime.getMillis()).getMillis()); - assertNull( - DBTimeUtils.millisToDateAtMidnightNy(io.deephaven.util.QueryConstants.NULL_LONG)); + assertNull(DBTimeUtils.millisToDateAtMidnightNy(io.deephaven.util.QueryConstants.NULL_LONG)); } public void testIsBefore() throws Exception { @@ -90,13 +87,11 @@ public void testPlus() throws Exception { DBPeriod period = new DBPeriod("T1h"); - assertEquals(dateTime.getNanos() + 3600000000000L, - DBTimeUtils.plus(dateTime, period).getNanos()); + assertEquals(dateTime.getNanos() + 3600000000000L, DBTimeUtils.plus(dateTime, period).getNanos()); period = new DBPeriod("-T1h"); - assertEquals(dateTime.getNanos() - 3600000000000L, - DBTimeUtils.plus(dateTime, period).getNanos()); + assertEquals(dateTime.getNanos() - 3600000000000L, DBTimeUtils.plus(dateTime, period).getNanos()); // overflow plus @@ -129,13 +124,11 @@ public void testMinus() throws Exception { DBPeriod period = new DBPeriod("T1h"); - assertEquals(dateTime1.getNanos() - 3600000000000L, - DBTimeUtils.minus(dateTime1, period).getNanos()); + assertEquals(dateTime1.getNanos() - 3600000000000L, DBTimeUtils.minus(dateTime1, period).getNanos()); period = new DBPeriod("-T1h"); - assertEquals(dateTime1.getNanos() + 3600000000000L, - DBTimeUtils.minus(dateTime1, period).getNanos()); + assertEquals(dateTime1.getNanos() + 3600000000000L, DBTimeUtils.minus(dateTime1, period).getNanos()); @@ -212,12 +205,12 @@ public void testMillisToNanos() throws Exception { } /* - * public void testMillisToNanosWithHack() throws Exception { // For this to pass, - * ENABLE_MICROTIME_HACK in DBTimeUtils must be true (i.e. you have // to run the tests with - * -DDBTimeUtils.enableMicrotimeHack=true) assertEquals(1_000_000, - * DBTimeUtils.millisToNanos(1)); assertEquals(1_000_000_000, DBTimeUtils.millisToNanos(1_000)); - * assertEquals(1531315655_000_000_000L, DBTimeUtils.millisToNanos(1531315655_000L)); - * assertEquals(1531315655_000_000_000L, DBTimeUtils.millisToNanos(1531315655_000_000L)); } + * public void testMillisToNanosWithHack() throws Exception { // For this to pass, ENABLE_MICROTIME_HACK in + * DBTimeUtils must be true (i.e. you have // to run the tests with -DDBTimeUtils.enableMicrotimeHack=true) + * assertEquals(1_000_000, DBTimeUtils.millisToNanos(1)); assertEquals(1_000_000_000, + * DBTimeUtils.millisToNanos(1_000)); assertEquals(1531315655_000_000_000L, + * DBTimeUtils.millisToNanos(1531315655_000L)); assertEquals(1531315655_000_000_000L, + * DBTimeUtils.millisToNanos(1531315655_000_000L)); } */ public void testNanosToMillis() throws Exception { @@ -253,86 +246,60 @@ public void testConvertDateQuiet() throws Exception { assertEquals(LocalDate.of(2018, 1, 1), DBTimeUtils.convertDateQuiet("20180101")); assertEquals(LocalDate.of(2018, 12, 31), DBTimeUtils.convertDateQuiet("20181231")); - // extremities of the format (LocalDate can store a much larger range than this but we - // aren't that interested) + // extremities of the format (LocalDate can store a much larger range than this but we aren't that interested) assertEquals(LocalDate.of(0, 1, 1), DBTimeUtils.convertDateQuiet("0000-01-01")); assertEquals(LocalDate.of(9999, 12, 31), DBTimeUtils.convertDateQuiet("9999-12-31")); // other variants - assertEquals(LocalDate.of(2018, 1, 1), - DBTimeUtils.convertDateQuiet("01/01/2018", DBTimeUtils.DateStyle.MDY)); - assertEquals(LocalDate.of(2018, 12, 31), - DBTimeUtils.convertDateQuiet("12/31/2018", DBTimeUtils.DateStyle.MDY)); - assertEquals(LocalDate.of(2018, 12, 31), - DBTimeUtils.convertDateQuiet("12/31/18", DBTimeUtils.DateStyle.MDY)); - assertEquals(LocalDate.of(2024, 6, 25), - DBTimeUtils.convertDateQuiet("6/25/24", DBTimeUtils.DateStyle.MDY)); - assertEquals(LocalDate.of(2024, 6, 2), - DBTimeUtils.convertDateQuiet("6/2/24", DBTimeUtils.DateStyle.MDY)); - assertEquals(LocalDate.of(2024, 6, 2), - DBTimeUtils.convertDateQuiet("6/2/2024", DBTimeUtils.DateStyle.MDY)); - - assertEquals(LocalDate.of(2018, 1, 1), - DBTimeUtils.convertDateQuiet("01/01/2018", DBTimeUtils.DateStyle.DMY)); - assertEquals(LocalDate.of(2018, 12, 31), - DBTimeUtils.convertDateQuiet("31/12/2018", DBTimeUtils.DateStyle.DMY)); - assertEquals(LocalDate.of(2018, 12, 31), - DBTimeUtils.convertDateQuiet("31/12/18", DBTimeUtils.DateStyle.DMY)); - assertEquals(LocalDate.of(2024, 6, 25), - DBTimeUtils.convertDateQuiet("25/6/24", DBTimeUtils.DateStyle.DMY)); - assertEquals(LocalDate.of(2024, 6, 2), - DBTimeUtils.convertDateQuiet("2/6/24", DBTimeUtils.DateStyle.DMY)); - assertEquals(LocalDate.of(2024, 6, 2), - DBTimeUtils.convertDateQuiet("2/6/2024", DBTimeUtils.DateStyle.DMY)); - - - assertEquals(LocalDate.of(2018, 1, 1), - DBTimeUtils.convertDateQuiet("2018/01/01", DBTimeUtils.DateStyle.YMD)); - assertEquals(LocalDate.of(2018, 12, 31), - DBTimeUtils.convertDateQuiet("2018/12/31", DBTimeUtils.DateStyle.YMD)); - assertEquals(LocalDate.of(2018, 12, 31), - DBTimeUtils.convertDateQuiet("18/12/31", DBTimeUtils.DateStyle.YMD)); - assertEquals(LocalDate.of(2024, 6, 25), - DBTimeUtils.convertDateQuiet("24/6/25", DBTimeUtils.DateStyle.YMD)); - assertEquals(LocalDate.of(2024, 6, 2), - DBTimeUtils.convertDateQuiet("24/6/2", DBTimeUtils.DateStyle.YMD)); - assertEquals(LocalDate.of(2024, 6, 2), - DBTimeUtils.convertDateQuiet("2024/6/2", DBTimeUtils.DateStyle.YMD)); + assertEquals(LocalDate.of(2018, 1, 1), DBTimeUtils.convertDateQuiet("01/01/2018", DBTimeUtils.DateStyle.MDY)); + assertEquals(LocalDate.of(2018, 12, 31), DBTimeUtils.convertDateQuiet("12/31/2018", DBTimeUtils.DateStyle.MDY)); + assertEquals(LocalDate.of(2018, 12, 31), DBTimeUtils.convertDateQuiet("12/31/18", DBTimeUtils.DateStyle.MDY)); + assertEquals(LocalDate.of(2024, 6, 25), DBTimeUtils.convertDateQuiet("6/25/24", DBTimeUtils.DateStyle.MDY)); + assertEquals(LocalDate.of(2024, 6, 2), DBTimeUtils.convertDateQuiet("6/2/24", DBTimeUtils.DateStyle.MDY)); + assertEquals(LocalDate.of(2024, 6, 2), DBTimeUtils.convertDateQuiet("6/2/2024", DBTimeUtils.DateStyle.MDY)); + + assertEquals(LocalDate.of(2018, 1, 1), DBTimeUtils.convertDateQuiet("01/01/2018", DBTimeUtils.DateStyle.DMY)); + assertEquals(LocalDate.of(2018, 12, 31), DBTimeUtils.convertDateQuiet("31/12/2018", DBTimeUtils.DateStyle.DMY)); + assertEquals(LocalDate.of(2018, 12, 31), DBTimeUtils.convertDateQuiet("31/12/18", DBTimeUtils.DateStyle.DMY)); + assertEquals(LocalDate.of(2024, 6, 25), DBTimeUtils.convertDateQuiet("25/6/24", DBTimeUtils.DateStyle.DMY)); + assertEquals(LocalDate.of(2024, 6, 2), DBTimeUtils.convertDateQuiet("2/6/24", DBTimeUtils.DateStyle.DMY)); + assertEquals(LocalDate.of(2024, 6, 2), DBTimeUtils.convertDateQuiet("2/6/2024", DBTimeUtils.DateStyle.DMY)); + + + assertEquals(LocalDate.of(2018, 1, 1), DBTimeUtils.convertDateQuiet("2018/01/01", DBTimeUtils.DateStyle.YMD)); + assertEquals(LocalDate.of(2018, 12, 31), DBTimeUtils.convertDateQuiet("2018/12/31", DBTimeUtils.DateStyle.YMD)); + assertEquals(LocalDate.of(2018, 12, 31), DBTimeUtils.convertDateQuiet("18/12/31", DBTimeUtils.DateStyle.YMD)); + assertEquals(LocalDate.of(2024, 6, 25), DBTimeUtils.convertDateQuiet("24/6/25", DBTimeUtils.DateStyle.YMD)); + assertEquals(LocalDate.of(2024, 6, 2), DBTimeUtils.convertDateQuiet("24/6/2", DBTimeUtils.DateStyle.YMD)); + assertEquals(LocalDate.of(2024, 6, 2), DBTimeUtils.convertDateQuiet("2024/6/2", DBTimeUtils.DateStyle.YMD)); } public void testConvertLocalTimeQuiet() throws Exception { - assertEquals(java.time.LocalTime.of(12, 59, 59), - DBTimeUtils.convertLocalTimeQuiet("L12:59:59")); - assertEquals(java.time.LocalTime.of(0, 0, 0), - DBTimeUtils.convertLocalTimeQuiet("L00:00:00")); - assertEquals(java.time.LocalTime.of(23, 59, 59), - DBTimeUtils.convertLocalTimeQuiet("L23:59:59")); + assertEquals(java.time.LocalTime.of(12, 59, 59), DBTimeUtils.convertLocalTimeQuiet("L12:59:59")); + assertEquals(java.time.LocalTime.of(0, 0, 0), DBTimeUtils.convertLocalTimeQuiet("L00:00:00")); + assertEquals(java.time.LocalTime.of(23, 59, 59), DBTimeUtils.convertLocalTimeQuiet("L23:59:59")); - assertEquals(java.time.LocalTime.of(12, 59, 59), - DBTimeUtils.convertLocalTimeQuiet("L125959")); + assertEquals(java.time.LocalTime.of(12, 59, 59), DBTimeUtils.convertLocalTimeQuiet("L125959")); assertEquals(java.time.LocalTime.of(0, 0, 0), DBTimeUtils.convertLocalTimeQuiet("L000000")); - assertEquals(java.time.LocalTime.of(23, 59, 59), - DBTimeUtils.convertLocalTimeQuiet("L235959")); + assertEquals(java.time.LocalTime.of(23, 59, 59), DBTimeUtils.convertLocalTimeQuiet("L235959")); assertEquals(java.time.LocalTime.of(12, 0, 0), DBTimeUtils.convertLocalTimeQuiet("L12")); - assertEquals(java.time.LocalTime.of(12, 59, 0), - DBTimeUtils.convertLocalTimeQuiet("L12:59")); + assertEquals(java.time.LocalTime.of(12, 59, 0), DBTimeUtils.convertLocalTimeQuiet("L12:59")); assertEquals(java.time.LocalTime.of(12, 59, 59, 123_000_000), - DBTimeUtils.convertLocalTimeQuiet("L12:59:59.123")); + DBTimeUtils.convertLocalTimeQuiet("L12:59:59.123")); assertEquals(java.time.LocalTime.of(12, 59, 59, 123_456_000), - DBTimeUtils.convertLocalTimeQuiet("L12:59:59.123456")); + DBTimeUtils.convertLocalTimeQuiet("L12:59:59.123456")); assertEquals(java.time.LocalTime.of(12, 59, 59, 123_456_789), - DBTimeUtils.convertLocalTimeQuiet("L12:59:59.123456789")); + DBTimeUtils.convertLocalTimeQuiet("L12:59:59.123456789")); assertEquals(java.time.LocalTime.of(12, 0, 0), DBTimeUtils.convertLocalTimeQuiet("L12")); assertEquals(java.time.LocalTime.of(12, 59, 0), DBTimeUtils.convertLocalTimeQuiet("L1259")); - assertEquals(java.time.LocalTime.of(12, 59, 59, 123_000_000), - DBTimeUtils.convertLocalTimeQuiet("L125959.123")); + assertEquals(java.time.LocalTime.of(12, 59, 59, 123_000_000), DBTimeUtils.convertLocalTimeQuiet("L125959.123")); assertEquals(java.time.LocalTime.of(12, 59, 59, 123_456_000), - DBTimeUtils.convertLocalTimeQuiet("L125959.123456")); + DBTimeUtils.convertLocalTimeQuiet("L125959.123456")); assertEquals(java.time.LocalTime.of(12, 59, 59, 123_456_789), - DBTimeUtils.convertLocalTimeQuiet("L125959.123456789")); + DBTimeUtils.convertLocalTimeQuiet("L125959.123456789")); } public void testConvertDate() throws Exception { @@ -345,76 +312,60 @@ public void testConvertDate() throws Exception { } assertEquals("DBTimeUtils.convertDate(\"9999-12-31\")", - LocalDate.of(9999, 12, 31), - DBTimeUtils.convertDate("9999-12-31")); + LocalDate.of(9999, 12, 31), + DBTimeUtils.convertDate("9999-12-31")); } public void testConvertDateTimeQuiet() throws Exception { - assertEquals(new DBDateTime( - new DateTime("2010-01-01", DateTimeZone.forID("America/New_York")).getMillis() - * 1000000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01 NY")); - assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00", DateTimeZone.forID("America/New_York")) - .getMillis() * 1000000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00 NY")); - assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00.1", DateTimeZone.forID("America/New_York")) - .getMillis() * 1000000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.1 NY")); assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/New_York")) - .getMillis() * 1000000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.123 NY")); - assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/New_York")) - .getMillis() * 1000000 + 400000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.1234 NY")); - assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/New_York")) - .getMillis() * 1000000 + 456789), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.123456789 NY")); - + new DBDateTime( + new DateTime("2010-01-01", DateTimeZone.forID("America/New_York")).getMillis() * 1000000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01 NY")); assertEquals(new DBDateTime( - new DateTime("2010-01-01", DateTimeZone.forID("America/Chicago")).getMillis() - * 1000000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01 MN")); - assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00", DateTimeZone.forID("America/Chicago")) - .getMillis() * 1000000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00 MN")); - assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00.1", DateTimeZone.forID("America/Chicago")) - .getMillis() * 1000000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.1 MN")); + new DateTime("2010-01-01T12:00:00", DateTimeZone.forID("America/New_York")).getMillis() * 1000000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00 NY")); + assertEquals(new DBDateTime( + new DateTime("2010-01-01T12:00:00.1", DateTimeZone.forID("America/New_York")).getMillis() * 1000000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.1 NY")); + assertEquals(new DBDateTime( + new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/New_York")).getMillis() * 1000000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.123 NY")); + assertEquals(new DBDateTime( + new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/New_York")).getMillis() * 1000000 + + 400000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.1234 NY")); assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/Chicago")) - .getMillis() * 1000000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.123 MN")); + new DBDateTime( + new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/New_York")).getMillis() + * 1000000 + 456789), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.123456789 NY")); + assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/Chicago")) - .getMillis() * 1000000 + 400000), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.1234 MN")); + new DBDateTime(new DateTime("2010-01-01", DateTimeZone.forID("America/Chicago")).getMillis() * 1000000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01 MN")); + assertEquals(new DBDateTime( + new DateTime("2010-01-01T12:00:00", DateTimeZone.forID("America/Chicago")).getMillis() * 1000000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00 MN")); + assertEquals(new DBDateTime( + new DateTime("2010-01-01T12:00:00.1", DateTimeZone.forID("America/Chicago")).getMillis() * 1000000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.1 MN")); + assertEquals(new DBDateTime( + new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/Chicago")).getMillis() * 1000000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.123 MN")); + assertEquals(new DBDateTime( + new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/Chicago")).getMillis() * 1000000 + + 400000), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.1234 MN")); assertEquals( - new DBDateTime( - new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/Chicago")) - .getMillis() * 1000000 + 456789), - DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.123456789 MN")); + new DBDateTime( + new DateTime("2010-01-01T12:00:00.123", DateTimeZone.forID("America/Chicago")).getMillis() + * 1000000 + 456789), + DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.123456789 MN")); assertEquals(new DBDateTime(1503343549064106107L), - DBTimeUtils.convertDateTimeQuiet("2017-08-21T15:25:49.064106107 NY")); + DBTimeUtils.convertDateTimeQuiet("2017-08-21T15:25:49.064106107 NY")); - // assertEquals(new DBDateTime(new DateTime("2010-01-01T12:00:00.123", - // DateTimeZone.UTC).getMillis()*1000000), + // assertEquals(new DBDateTime(new DateTime("2010-01-01T12:00:00.123", DateTimeZone.UTC).getMillis()*1000000), // DBTimeUtils.convertDateTimeQuiet("2010-01-01T12:00:00.123+0000")); // assertEquals(new DBDateTime(new DateTime("2010-01-01T12:00:00.123", // DateTimeZone.forID("America/New_York")).getMillis()*1000000), @@ -434,51 +385,39 @@ public void testConvertDateTime() throws Exception { } assertEquals("DBTimeUtils.convertDateTime(\"2262-04-11T19:47:16.854775807 NY\").getNanos()", - Long.MAX_VALUE, - DBTimeUtils.convertDateTime("2262-04-11T19:47:16.854775807 NY").getNanos()); + Long.MAX_VALUE, + DBTimeUtils.convertDateTime("2262-04-11T19:47:16.854775807 NY").getNanos()); } public void testConvertTimeQuiet() throws Exception { - assertEquals(new LocalTime("12:00").getMillisOfDay() * 1000000L, - DBTimeUtils.convertTimeQuiet("12:00")); - assertEquals(new LocalTime("12:00:00").getMillisOfDay() * 1000000L, - DBTimeUtils.convertTimeQuiet("12:00:00")); + assertEquals(new LocalTime("12:00").getMillisOfDay() * 1000000L, DBTimeUtils.convertTimeQuiet("12:00")); + assertEquals(new LocalTime("12:00:00").getMillisOfDay() * 1000000L, DBTimeUtils.convertTimeQuiet("12:00:00")); assertEquals(new LocalTime("12:00:00.123").getMillisOfDay() * 1000000L, - DBTimeUtils.convertTimeQuiet("12:00:00.123")); + DBTimeUtils.convertTimeQuiet("12:00:00.123")); assertEquals(new LocalTime("12:00:00.123").getMillisOfDay() * 1000000L + 400000, - DBTimeUtils.convertTimeQuiet("12:00:00.1234")); + DBTimeUtils.convertTimeQuiet("12:00:00.1234")); assertEquals(new LocalTime("12:00:00.123").getMillisOfDay() * 1000000L + 456789, - DBTimeUtils.convertTimeQuiet("12:00:00.123456789")); + DBTimeUtils.convertTimeQuiet("12:00:00.123456789")); - assertEquals(new LocalTime("2:00").getMillisOfDay() * 1000000L, - DBTimeUtils.convertTimeQuiet("2:00")); - assertEquals(new LocalTime("2:00:00").getMillisOfDay() * 1000000L, - DBTimeUtils.convertTimeQuiet("2:00:00")); + assertEquals(new LocalTime("2:00").getMillisOfDay() * 1000000L, DBTimeUtils.convertTimeQuiet("2:00")); + assertEquals(new LocalTime("2:00:00").getMillisOfDay() * 1000000L, DBTimeUtils.convertTimeQuiet("2:00:00")); assertEquals(new LocalTime("2:00:00.123").getMillisOfDay() * 1000000L, - DBTimeUtils.convertTimeQuiet("2:00:00.123")); + DBTimeUtils.convertTimeQuiet("2:00:00.123")); assertEquals(new LocalTime("2:00:00.123").getMillisOfDay() * 1000000L + 400000, - DBTimeUtils.convertTimeQuiet("2:00:00.1234")); + DBTimeUtils.convertTimeQuiet("2:00:00.1234")); assertEquals(new LocalTime("2:00:00.123").getMillisOfDay() * 1000000L + 456789, - DBTimeUtils.convertTimeQuiet("2:00:00.123456789")); - - assertEquals(new LocalTime("2:00").getMillisOfDay() * 1000000L - + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, DBTimeUtils.convertTimeQuiet("3T2:00")); - assertEquals( - new LocalTime("2:00:00").getMillisOfDay() * 1000000L - + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, - DBTimeUtils.convertTimeQuiet("3T2:00:00")); - assertEquals( - new LocalTime("2:00:00.123").getMillisOfDay() * 1000000L - + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, - DBTimeUtils.convertTimeQuiet("3T2:00:00.123")); - assertEquals( - new LocalTime("2:00:00.123").getMillisOfDay() * 1000000L + 400000 - + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, - DBTimeUtils.convertTimeQuiet("3T2:00:00.1234")); - assertEquals( - new LocalTime("2:00:00.123").getMillisOfDay() * 1000000L + 456789 - + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, - DBTimeUtils.convertTimeQuiet("3T2:00:00.123456789")); + DBTimeUtils.convertTimeQuiet("2:00:00.123456789")); + + assertEquals(new LocalTime("2:00").getMillisOfDay() * 1000000L + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, + DBTimeUtils.convertTimeQuiet("3T2:00")); + assertEquals(new LocalTime("2:00:00").getMillisOfDay() * 1000000L + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, + DBTimeUtils.convertTimeQuiet("3T2:00:00")); + assertEquals(new LocalTime("2:00:00.123").getMillisOfDay() * 1000000L + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, + DBTimeUtils.convertTimeQuiet("3T2:00:00.123")); + assertEquals(new LocalTime("2:00:00.123").getMillisOfDay() * 1000000L + 400000 + + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, DBTimeUtils.convertTimeQuiet("3T2:00:00.1234")); + assertEquals(new LocalTime("2:00:00.123").getMillisOfDay() * 1000000L + 456789 + + 3L * 1000000 * DateUtil.MILLIS_PER_DAY, DBTimeUtils.convertTimeQuiet("3T2:00:00.123456789")); assertEquals(55549064106107L, DBTimeUtils.convertTimeQuiet("15:25:49.064106107")); } @@ -514,35 +453,26 @@ public void testConvertPeriod() throws Exception { public void testTimeFormat() throws Exception { assertEquals("12:00:00", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("12:00"))); assertEquals("12:00:00", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("12:00:00"))); - assertEquals("12:00:00.123000000", - DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("12:00:00.123"))); - assertEquals("12:00:00.123400000", - DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("12:00:00.1234"))); - assertEquals("12:00:00.123456789", - DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("12:00:00.123456789"))); + assertEquals("12:00:00.123000000", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("12:00:00.123"))); + assertEquals("12:00:00.123400000", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("12:00:00.1234"))); + assertEquals("12:00:00.123456789", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("12:00:00.123456789"))); assertEquals("2:00:00", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("2:00"))); assertEquals("2:00:00", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("2:00:00"))); - assertEquals("2:00:00.123000000", - DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("2:00:00.123"))); - assertEquals("2:00:00.123400000", - DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("2:00:00.1234"))); - assertEquals("2:00:00.123456789", - DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("2:00:00.123456789"))); + assertEquals("2:00:00.123000000", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("2:00:00.123"))); + assertEquals("2:00:00.123400000", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("2:00:00.1234"))); + assertEquals("2:00:00.123456789", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("2:00:00.123456789"))); assertEquals("3T2:00:00", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("3T2:00"))); assertEquals("3T2:00:00", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("3T2:00:00"))); - assertEquals("3T2:00:00.123000000", - DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("3T2:00:00.123"))); - assertEquals("3T2:00:00.123400000", - DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("3T2:00:00.1234"))); - assertEquals("3T2:00:00.123456789", - DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("3T2:00:00.123456789"))); + assertEquals("3T2:00:00.123000000", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("3T2:00:00.123"))); + assertEquals("3T2:00:00.123400000", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("3T2:00:00.1234"))); + assertEquals("3T2:00:00.123456789", DBTimeUtils.format(DBTimeUtils.convertTimeQuiet("3T2:00:00.123456789"))); } public void testFormatDate() throws Exception { - assertEquals("2010-01-01", DBTimeUtils - .formatDate(DBTimeUtils.convertDateTimeQuiet("2010-01-01 NY"), DBTimeZone.TZ_NY)); + assertEquals("2010-01-01", + DBTimeUtils.formatDate(DBTimeUtils.convertDateTimeQuiet("2010-01-01 NY"), DBTimeZone.TZ_NY)); } public void testLowerBin() { @@ -551,17 +481,14 @@ public void testLowerBin() { final long hour = 60 * minute; DBDateTime time = DBTimeUtils.convertDateTime("2010-06-15T06:14:01.2345 NY"); - assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:14:01 NY"), - DBTimeUtils.lowerBin(time, second)); - assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:10:00 NY"), - DBTimeUtils.lowerBin(time, 5 * minute)); - assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:00:00 NY"), - DBTimeUtils.lowerBin(time, hour)); + assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:14:01 NY"), DBTimeUtils.lowerBin(time, second)); + assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:10:00 NY"), DBTimeUtils.lowerBin(time, 5 * minute)); + assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:00:00 NY"), DBTimeUtils.lowerBin(time, hour)); assertEquals(null, DBTimeUtils.lowerBin(null, 5 * minute)); assertEquals(null, DBTimeUtils.lowerBin(time, io.deephaven.util.QueryConstants.NULL_LONG)); assertEquals(DBTimeUtils.lowerBin(time, second), - DBTimeUtils.lowerBin(DBTimeUtils.lowerBin(time, second), second)); + DBTimeUtils.lowerBin(DBTimeUtils.lowerBin(time, second), second)); } public void testLowerBinWithOffset() { @@ -571,13 +498,13 @@ public void testLowerBinWithOffset() { DBDateTime time = DBTimeUtils.convertDateTime("2010-06-15T06:14:01.2345 NY"); assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:11:00 NY"), - DBTimeUtils.lowerBin(time, 5 * minute, minute)); + DBTimeUtils.lowerBin(time, 5 * minute, minute)); assertEquals(null, DBTimeUtils.lowerBin(null, 5 * minute, minute)); assertEquals(null, DBTimeUtils.lowerBin(time, QueryConstants.NULL_LONG, minute)); assertEquals(null, DBTimeUtils.lowerBin(time, 5 * minute, QueryConstants.NULL_LONG)); assertEquals(DBTimeUtils.lowerBin(time, second, second), - DBTimeUtils.lowerBin(DBTimeUtils.lowerBin(time, second, second), second, second)); + DBTimeUtils.lowerBin(DBTimeUtils.lowerBin(time, second, second), second, second)); } public void testUpperBin() { @@ -586,17 +513,14 @@ public void testUpperBin() { final long hour = 60 * minute; DBDateTime time = DBTimeUtils.convertDateTime("2010-06-15T06:14:01.2345 NY"); - assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:14:02 NY"), - DBTimeUtils.upperBin(time, second)); - assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:15:00 NY"), - DBTimeUtils.upperBin(time, 5 * minute)); - assertEquals(DBTimeUtils.convertDateTime("2010-06-15T07:00:00 NY"), - DBTimeUtils.upperBin(time, hour)); + assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:14:02 NY"), DBTimeUtils.upperBin(time, second)); + assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:15:00 NY"), DBTimeUtils.upperBin(time, 5 * minute)); + assertEquals(DBTimeUtils.convertDateTime("2010-06-15T07:00:00 NY"), DBTimeUtils.upperBin(time, hour)); assertEquals(null, DBTimeUtils.upperBin(null, 5 * minute)); assertEquals(null, DBTimeUtils.upperBin(time, io.deephaven.util.QueryConstants.NULL_LONG)); assertEquals(DBTimeUtils.upperBin(time, second), - DBTimeUtils.upperBin(DBTimeUtils.upperBin(time, second), second)); + DBTimeUtils.upperBin(DBTimeUtils.upperBin(time, second), second)); } public void testUpperBinWithOffset() { @@ -606,14 +530,13 @@ public void testUpperBinWithOffset() { DBDateTime time = DBTimeUtils.convertDateTime("2010-06-15T06:14:01.2345 NY"); assertEquals(DBTimeUtils.convertDateTime("2010-06-15T06:16:00 NY"), - DBTimeUtils.upperBin(time, 5 * minute, minute)); + DBTimeUtils.upperBin(time, 5 * minute, minute)); assertEquals(null, DBTimeUtils.upperBin(null, 5 * minute, minute)); - assertEquals(null, - DBTimeUtils.upperBin(time, io.deephaven.util.QueryConstants.NULL_LONG, minute)); + assertEquals(null, DBTimeUtils.upperBin(time, io.deephaven.util.QueryConstants.NULL_LONG, minute)); assertEquals(null, DBTimeUtils.upperBin(time, 5 * minute, QueryConstants.NULL_LONG)); assertEquals(DBTimeUtils.upperBin(time, second, second), - DBTimeUtils.upperBin(DBTimeUtils.upperBin(time, second, second), second, second)); + DBTimeUtils.upperBin(DBTimeUtils.upperBin(time, second, second), second, second)); } public void testConvertJimDateTimeQuiet() { @@ -625,12 +548,9 @@ public void testConvertJimDateTimeQuiet() { public void testGetExcelDate() { DBDateTime time = DBTimeUtils.convertDateTime("2010-06-15T16:00:00 NY"); - assertTrue( - CompareUtils.doubleEquals(40344.666666666664, DBTimeUtils.getExcelDateTime(time))); - assertTrue(CompareUtils.doubleEquals(40344.625, - DBTimeUtils.getExcelDateTime(time, TimeZones.TZ_CHICAGO))); - assertTrue(CompareUtils.doubleEquals(40344.625, - DBTimeUtils.getExcelDateTime(time, DBTimeZone.TZ_MN))); + assertTrue(CompareUtils.doubleEquals(40344.666666666664, DBTimeUtils.getExcelDateTime(time))); + assertTrue(CompareUtils.doubleEquals(40344.625, DBTimeUtils.getExcelDateTime(time, TimeZones.TZ_CHICAGO))); + assertTrue(CompareUtils.doubleEquals(40344.625, DBTimeUtils.getExcelDateTime(time, DBTimeZone.TZ_MN))); } /** @@ -664,73 +584,57 @@ public int doTestAutoEpochToTime(long epoch) { public void testAutoEpochToTime() { long inTheYear2035 = 2057338800; - assertEquals("doTestAutoEpochToTime(inTheYear2035)", 2035, - doTestAutoEpochToTime(inTheYear2035)); + assertEquals("doTestAutoEpochToTime(inTheYear2035)", 2035, doTestAutoEpochToTime(inTheYear2035)); long inTheYear1993 = 731966400; - assertEquals("doTestAutoEpochToTime(inTheYear1993)", 1993, - doTestAutoEpochToTime(inTheYear1993)); + assertEquals("doTestAutoEpochToTime(inTheYear1993)", 1993, doTestAutoEpochToTime(inTheYear1993)); long inTheYear2013 = 1363114800; - assertEquals("doTestAutoEpochToTime(inTheYear2013)", 2013, - doTestAutoEpochToTime(inTheYear2013)); + assertEquals("doTestAutoEpochToTime(inTheYear2013)", 2013, doTestAutoEpochToTime(inTheYear2013)); long inTheYear1904 = -2057338800; - assertEquals("doTestAutoEpochToTime(inTheYear1904)", 1904, - doTestAutoEpochToTime(inTheYear1904)); + assertEquals("doTestAutoEpochToTime(inTheYear1904)", 1904, doTestAutoEpochToTime(inTheYear1904)); long inTheYear1946 = -731966400; - assertEquals("doTestAutoEpochToTime(inTheYear1946)", 1946, - doTestAutoEpochToTime(inTheYear1946)); + assertEquals("doTestAutoEpochToTime(inTheYear1946)", 1946, doTestAutoEpochToTime(inTheYear1946)); long inTheYear1926 = -1363114800; - assertEquals("doTestAutoEpochToTime(inTheYear1926)", 1926, - doTestAutoEpochToTime(inTheYear1926)); + assertEquals("doTestAutoEpochToTime(inTheYear1926)", 1926, doTestAutoEpochToTime(inTheYear1926)); } public void testConvertExpression() throws Exception { - assertEquals("_date0", - DBTimeUtils.convertExpression("'2010-01-01 NY'").getConvertedFormula()); + assertEquals("_date0", DBTimeUtils.convertExpression("'2010-01-01 NY'").getConvertedFormula()); assertEquals("_time0", DBTimeUtils.convertExpression("'12:00'").getConvertedFormula()); assertEquals("_period0", DBTimeUtils.convertExpression("'T1S'").getConvertedFormula()); assertEquals("'g'", DBTimeUtils.convertExpression("'g'").getConvertedFormula()); } public void testMicrosOfMilli() { - assertEquals(0, - DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40 NY"))); - assertEquals(0, - DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00 NY"))); - assertEquals(0, - DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.123 NY"))); - assertEquals(400, DBTimeUtils - .microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.1234 NY"))); - assertEquals(456, DBTimeUtils - .microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.123456 NY"))); - assertEquals(457, DBTimeUtils - .microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.1234567 NY"))); // this - // one - // should - // round - // up - assertEquals(457, DBTimeUtils - .microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.123456789 NY"))); // this - // one - // should - // round - // up + assertEquals(0, DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40 NY"))); + assertEquals(0, DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00 NY"))); + assertEquals(0, DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.123 NY"))); + assertEquals(400, DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.1234 NY"))); + assertEquals(456, DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.123456 NY"))); + assertEquals(457, DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.1234567 NY"))); // this + // one + // should + // round + // up + assertEquals(457, DBTimeUtils.microsOfMilliNy(DBTimeUtils.convertDateTime("2015-07-31T20:40:00.123456789 NY"))); // this + // one + // should + // round + // up } public void testZonedDateTime() { final DBDateTime dateTime1 = DBTimeUtils.convertDateTime("2015-07-31T20:40 NY"); - final ZonedDateTime zonedDateTime1 = ZonedDateTime.of(2015, 7, 31, 20, 40, 0, 0, - DBTimeZone.TZ_NY.getTimeZone().toTimeZone().toZoneId()); + final ZonedDateTime zonedDateTime1 = + ZonedDateTime.of(2015, 7, 31, 20, 40, 0, 0, DBTimeZone.TZ_NY.getTimeZone().toTimeZone().toZoneId()); assertEquals(zonedDateTime1, DBTimeUtils.getZonedDateTime(dateTime1, DBTimeZone.TZ_NY)); assertEquals(dateTime1, DBTimeUtils.toDateTime(zonedDateTime1)); final DBDateTime dateTime2 = DBTimeUtils.convertDateTime("2020-07-31T20:40 NY"); - assertEquals(dateTime2, - DBTimeUtils.toDateTime(DBTimeUtils.getZonedDateTime(dateTime2, DBTimeZone.TZ_NY))); + assertEquals(dateTime2, DBTimeUtils.toDateTime(DBTimeUtils.getZonedDateTime(dateTime2, DBTimeZone.TZ_NY))); final DBDateTime dateTime3 = DBTimeUtils.convertDateTime("2050-07-31T20:40 NY"); - assertEquals(dateTime3, - DBTimeUtils.toDateTime(DBTimeUtils.getZonedDateTime(dateTime3, DBTimeZone.TZ_NY))); + assertEquals(dateTime3, DBTimeUtils.toDateTime(DBTimeUtils.getZonedDateTime(dateTime3, DBTimeZone.TZ_NY))); } } diff --git a/DB/src/test/java/io/deephaven/db/tables/utils/TestParquetTools.java b/DB/src/test/java/io/deephaven/db/tables/utils/TestParquetTools.java index 558257c5c3c..0f4a8ca7cd4 100644 --- a/DB/src/test/java/io/deephaven/db/tables/utils/TestParquetTools.java +++ b/DB/src/test/java/io/deephaven/db/tables/utils/TestParquetTools.java @@ -44,7 +44,7 @@ */ public class TestParquetTools { private final static String testRoot = - Configuration.getInstance().getWorkspacePath() + File.separator + "TestParquetTools"; + Configuration.getInstance().getWorkspacePath() + File.separator + "TestParquetTools"; private final static File testRootFile = new File(testRoot); private static Table table1; @@ -54,25 +54,24 @@ public class TestParquetTools { @BeforeClass public static void setUpFirst() { table1 = new InMemoryTable( - new String[] {"StringKeys", "GroupedInts"}, - new Object[] { - new String[] {"key1", "key1", "key1", "key1", "key2", "key2", "key2", "key2", - "key2"}, - new int[] {1, 1, 2, 2, 2, 3, 3, 3, 3} - }); + new String[] {"StringKeys", "GroupedInts"}, + new Object[] { + new String[] {"key1", "key1", "key1", "key1", "key2", "key2", "key2", "key2", "key2"}, + new int[] {1, 1, 2, 2, 2, 3, 3, 3, 3} + }); emptyTable = new InMemoryTable( - new String[] {"Column1", "Column2"}, - new Object[] { - new String[] {}, - new byte[] {} - }); - brokenTable = (Table) Proxy.newProxyInstance(Table.class.getClassLoader(), - new Class[] {Table.class}, new InvocationHandler() { - @Override - public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { - throw new UnsupportedOperationException("This table is broken!"); - } - }); + new String[] {"Column1", "Column2"}, + new Object[] { + new String[] {}, + new byte[] {} + }); + brokenTable = (Table) Proxy.newProxyInstance(Table.class.getClassLoader(), new Class[] {Table.class}, + new InvocationHandler() { + @Override + public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { + throw new UnsupportedOperationException("This table is broken!"); + } + }); } @Before @@ -142,8 +141,7 @@ public void testWriteTable() { QueryLibrary.importClass(TestEnum.class); QueryLibrary.importClass(StringSetWrapper.class); QueryLibrary.importStatic(this.getClass()); - Table test = TableTools.emptyTable(10).select("enumC=TestEnum.values()[i]", - "enumSet=newSet(" + + Table test = TableTools.emptyTable(10).select("enumC=TestEnum.values()[i]", "enumSet=newSet(" + "toS(enumC_[(i + 9) % 10])," + "toS(enumC_[i])," + "toS(enumC_[(i+1)% 10]))"); @@ -153,33 +151,33 @@ public void testWriteTable() { assertEquals(10, test2.size()); assertEquals(2, test2.getColumns().length); assertEquals(Arrays.asList(toString((Enum[]) test.getColumn("enumC").get(0, 10))), - Arrays.asList(toString((Enum[]) test2.getColumn("enumC").get(0, 10)))); + Arrays.asList(toString((Enum[]) test2.getColumn("enumC").get(0, 10)))); StringSet[] objects = (StringSet[]) test.getColumn("enumSet").get(0, 10); StringSet[] objects1 = (StringSet[]) test2.getColumn("enumSet").get(0, 10); for (int i = 0; i < objects1.length; i++) { assertEquals(new HashSet(Arrays.asList(objects[i].values())), - new HashSet(Arrays.asList(objects1[i].values()))); + new HashSet(Arrays.asList(objects1[i].values()))); } test2.close(); test = TableTools.emptyTable(10).select("enumC=TestEnum.values()[i]", - "enumSet=EnumSet.of((TestEnum)enumC_[(i + 9) % 10],(TestEnum)enumC_[i],(TestEnum)enumC_[(i+1)% 10])"); + "enumSet=EnumSet.of((TestEnum)enumC_[(i + 9) % 10],(TestEnum)enumC_[i],(TestEnum)enumC_[(i+1)% 10])"); path = testRoot + File.separator + "Table3.parquet"; ParquetTools.writeTable(test, path); test2 = ParquetTools.readTable(path); assertEquals(10, test2.size()); assertEquals(2, test2.getColumns().length); assertEquals(Arrays.asList(test.getColumn("enumC").get(0, 10)), - Arrays.asList(test2.getColumn("enumC").get(0, 10))); + Arrays.asList(test2.getColumn("enumC").get(0, 10))); assertEquals(Arrays.asList(test.getColumn("enumSet").get(0, 10)), - Arrays.asList(test2.getColumn("enumSet").get(0, 10))); + Arrays.asList(test2.getColumn("enumSet").get(0, 10))); test2.close(); test = TableTools.newTable(TableDefinition.of( - ColumnDefinition.ofInt("anInt"), - ColumnDefinition.ofString("aString").withGrouping()), - col("anInt", 1, 2, 3), - col("aString", "ab", "ab", "bc")); + ColumnDefinition.ofInt("anInt"), + ColumnDefinition.ofString("aString").withGrouping()), + col("anInt", 1, 2, 3), + col("aString", "ab", "ab", "bc")); path = testRoot + File.separator + "Table4.parquet"; ParquetTools.writeTable(test, path); test2 = ParquetTools.readTable(new File(path)); @@ -206,8 +204,7 @@ public void testWriteTableNoColumns() { } catch (TableDataException expected) { } try { - ParquetTools.writeTables(new Table[] {source}, source.getDefinition(), - new File[] {dest}); + ParquetTools.writeTables(new Table[] {source}, source.getDefinition(), new File[] {dest}); TestCase.fail("Expected exception"); } catch (TableDataException expected) { } @@ -215,22 +212,21 @@ public void testWriteTableNoColumns() { @Test public void testWriteTableMissingColumns() { - // TODO (deephaven/deephaven-core/issues/321): Fix the apparent bug in the parquet table - // writer. + // TODO (deephaven/deephaven-core/issues/321): Fix the apparent bug in the parquet table writer. final Table nullTable = TableTools.emptyTable(10_000L).updateView( - "B = NULL_BYTE", - "C = NULL_CHAR", - "S = NULL_SHORT", - "I = NULL_INT", - "L = NULL_LONG", - "F = NULL_FLOAT", - "D = NULL_DOUBLE", - "Bl = (Boolean) null", - "Str = (String) null", - "DT = (DBDateTime) null"); + "B = NULL_BYTE", + "C = NULL_CHAR", + "S = NULL_SHORT", + "I = NULL_INT", + "L = NULL_LONG", + "F = NULL_FLOAT", + "D = NULL_DOUBLE", + "Bl = (Boolean) null", + "Str = (String) null", + "DT = (DBDateTime) null"); final File dest = new File(testRoot + File.separator + "Null.parquet"); - ParquetTools.writeTables(new Table[] {TableTools.emptyTable(10_000L)}, - nullTable.getDefinition(), new File[] {dest}); + ParquetTools.writeTables(new Table[] {TableTools.emptyTable(10_000L)}, nullTable.getDefinition(), + new File[] {dest}); final Table result = ParquetTools.readTable(dest); TstUtils.assertTableEquals(nullTable, result); result.close(); @@ -241,15 +237,14 @@ public void testWriteTableExceptions() throws IOException { new File(testRoot + File.separator + "unexpectedFile").createNewFile(); try { ParquetTools.writeTable(table1, - new File(testRoot + File.separator + "unexpectedFile" + File.separator + "Table1")); + new File(testRoot + File.separator + "unexpectedFile" + File.separator + "Table1")); TestCase.fail("Expected exception"); } catch (UncheckedDeephavenException e) { // Expected } new File(testRoot + File.separator + "Table1").mkdirs(); - new File(testRoot + File.separator + "Table1" + File.separator + "extraFile") - .createNewFile(); + new File(testRoot + File.separator + "Table1" + File.separator + "extraFile").createNewFile(); try { ParquetTools.writeTable(table1, new File(testRoot + File.separator + "Table1")); TestCase.fail("Expected exception"); @@ -259,19 +254,18 @@ public void testWriteTableExceptions() throws IOException { new File(testRoot + File.separator + "Nested").mkdirs(); try { ParquetTools.writeTable(brokenTable, - new File(testRoot + File.separator + "Nested" + File.separator + "Broken")); + new File(testRoot + File.separator + "Nested" + File.separator + "Broken")); TestCase.fail("Expected exception"); } catch (UnsupportedOperationException e) { // Expected exception } - TestCase.assertFalse( - new File(testRoot + File.separator + "Nested" + File.separator + "Broken").exists()); + TestCase.assertFalse(new File(testRoot + File.separator + "Nested" + File.separator + "Broken").exists()); TestCase.assertTrue(new File(testRoot + File.separator + "Nested").isDirectory()); new File(testRoot + File.separator + "Nested").setReadOnly(); try { ParquetTools.writeTable(brokenTable, - new File(testRoot + File.separator + "Nested" + File.separator + "Broken")); + new File(testRoot + File.separator + "Nested" + File.separator + "Broken")); TestCase.fail("Expected exception"); } catch (RuntimeException e) { // Expected exception @@ -304,8 +298,8 @@ private Table getAggregatedResultTable() { bid[ii] = (ii < 15) ? 98 : 99; bidSize[ii] = ii; } - final Table baseTable = newTable(stringCol("USym", symbol), doubleCol("Bid", bid), - doubleCol("BidSize", bidSize)); + final Table baseTable = + newTable(stringCol("USym", symbol), doubleCol("Bid", bid), doubleCol("BidSize", bidSize)); return baseTable.by("USym", "Bid").by("USym"); } @@ -326,27 +320,27 @@ public void testWriteAggregatedTable() { @Test public void testPartitionedRead() { ParquetTools.writeTable(table1, new File(testRootFile, - "Date=2021-07-20" + File.separator + "Num=200" + File.separator + "file1.parquet")); + "Date=2021-07-20" + File.separator + "Num=200" + File.separator + "file1.parquet")); ParquetTools.writeTable(table1, new File(testRootFile, - "Date=2021-07-20" + File.separator + "Num=100" + File.separator + "file2.parquet")); + "Date=2021-07-20" + File.separator + "Num=100" + File.separator + "file2.parquet")); ParquetTools.writeTable(table1, new File(testRootFile, - "Date=2021-07-21" + File.separator + "Num=300" + File.separator + "file3.parquet")); + "Date=2021-07-21" + File.separator + "Num=300" + File.separator + "file3.parquet")); final List allColumns = new ArrayList<>(); - allColumns.add(ColumnDefinition.fromGenericType("Date", String.class, - ColumnDefinition.COLUMNTYPE_PARTITIONING, null)); - allColumns.add(ColumnDefinition.fromGenericType("Num", int.class, - ColumnDefinition.COLUMNTYPE_PARTITIONING, null)); + allColumns.add( + ColumnDefinition.fromGenericType("Date", String.class, ColumnDefinition.COLUMNTYPE_PARTITIONING, null)); + allColumns.add( + ColumnDefinition.fromGenericType("Num", int.class, ColumnDefinition.COLUMNTYPE_PARTITIONING, null)); allColumns.addAll(table1.getDefinition().getColumnList()); final TableDefinition partitionedDefinition = new TableDefinition(allColumns); final Table result = ParquetTools.readPartitionedTableInferSchema( - KeyValuePartitionLayout.forParquet(testRootFile, 2), ParquetInstructions.EMPTY); + KeyValuePartitionLayout.forParquet(testRootFile, 2), ParquetInstructions.EMPTY); TestCase.assertEquals(partitionedDefinition, result.getDefinition()); final Table expected = TableTools.merge( - table1.updateView("Date=`2021-07-20`", "Num=100"), - table1.updateView("Date=`2021-07-20`", "Num=200"), - table1.updateView("Date=`2021-07-21`", "Num=300")).moveUpColumns("Date", "Num"); + table1.updateView("Date=`2021-07-20`", "Num=100"), + table1.updateView("Date=`2021-07-20`", "Num=200"), + table1.updateView("Date=`2021-07-21`", "Num=300")).moveUpColumns("Date", "Num"); TstUtils.assertTableEquals(expected, result); } } diff --git a/DB/src/test/java/io/deephaven/db/tables/utils/TestTableTools.java b/DB/src/test/java/io/deephaven/db/tables/utils/TestTableTools.java index 41d4809cdd4..1a2a0a9aef7 100644 --- a/DB/src/test/java/io/deephaven/db/tables/utils/TestTableTools.java +++ b/DB/src/test/java/io/deephaven/db/tables/utils/TestTableTools.java @@ -51,10 +51,9 @@ public class TestTableTools extends TestCase implements UpdateErrorReporter { private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = Configuration.getInstance() - .getBooleanForClassWithDefault(TestTableTools.class, "CompilerTools.logEnabled", false); + .getBooleanForClassWithDefault(TestTableTools.class, "CompilerTools.logEnabled", false); - private final static String TEST_ROOT = - Configuration.getInstance().getWorkspacePath() + "TestTableTools"; + private final static String TEST_ROOT = Configuration.getInstance().getWorkspacePath() + "TestTableTools"; private final static File TEST_ROOT_FILE = new File(TEST_ROOT); private UpdateErrorReporter oldReporter; @@ -88,25 +87,22 @@ public void setUp() throws Exception { TEST_ROOT_FILE.mkdirs(); table1 = testRefreshingTable(TstUtils.i(2, 3, 6, 7, 8, 10, 12, 15, 16), - TstUtils.c("StringKeys", "key1", "key1", "key1", "key1", "key2", "key2", "key2", "key2", - "key2"), - TstUtils.c("GroupedInts", 1, 1, 2, 2, 2, 3, 3, 3, 3)); + TstUtils.c("StringKeys", "key1", "key1", "key1", "key1", "key2", "key2", "key2", "key2", "key2"), + TstUtils.c("GroupedInts", 1, 1, 2, 2, 2, 3, 3, 3, 3)); table2 = testRefreshingTable(TstUtils.i(1, 3, 5, 10, 20, 30, 31, 32, 33), - TstUtils.c("StringKeys1", "key1", "key1", "key1", "key1", "key2", "key2", "key2", - "key2", "key2"), - TstUtils.c("GroupedInts1", 1, 1, 2, 2, 2, 3, 3, 3, 3)); + TstUtils.c("StringKeys1", "key1", "key1", "key1", "key1", "key2", "key2", "key2", "key2", "key2"), + TstUtils.c("GroupedInts1", 1, 1, 2, 2, 2, 3, 3, 3, 3)); table3 = new InMemoryTable( - new String[] {"StringKeys", "GroupedInts", "Doubles", "DBDateTime"}, - new Object[] { - new String[] {"key11", "key11", "key21", "key21", "key22"}, - new int[] {1, 2, 2, NULL_INT, 3}, - new double[] {2.342, 0.0932, Double.NaN, NULL_DOUBLE, 3}, - new DBDateTime[] {new DBDateTime(100), new DBDateTime(10000), null, - new DBDateTime(100000), new DBDateTime(1000000)} - }); - emptyTable = testRefreshingTable( - TstUtils.c("StringKeys", (Object) CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - TstUtils.c("GroupedInts", (Object) CollectionUtil.ZERO_LENGTH_BYTE_ARRAY)); + new String[] {"StringKeys", "GroupedInts", "Doubles", "DBDateTime"}, + new Object[] { + new String[] {"key11", "key11", "key21", "key21", "key22"}, + new int[] {1, 2, 2, NULL_INT, 3}, + new double[] {2.342, 0.0932, Double.NaN, NULL_DOUBLE, 3}, + new DBDateTime[] {new DBDateTime(100), new DBDateTime(10000), null, + new DBDateTime(100000), new DBDateTime(1000000)} + }); + emptyTable = testRefreshingTable(TstUtils.c("StringKeys", (Object) CollectionUtil.ZERO_LENGTH_STRING_ARRAY), + TstUtils.c("GroupedInts", (Object) CollectionUtil.ZERO_LENGTH_BYTE_ARRAY)); } @@ -142,8 +138,7 @@ public void tearDown() throws Exception { @Override public void reportUpdateError(Throwable t) throws IOException { - System.err - .println("Received error notification: " + new ExceptionDetails(t).getFullStackTrace()); + System.err.println("Received error notification: " + new ExceptionDetails(t).getFullStackTrace()); TestCase.fail(t.getMessage()); } @@ -151,12 +146,11 @@ public void reportUpdateError(Throwable t) throws IOException { public void testTableDividendsCSV() { final String fileDividends = "Sym,Type,Price,SecurityId\n" + - "GOOG, Dividend, 0.25, 200\n" + - "T, Dividend, 0.15, 300\n" + - " Z, Dividend, 0.18, 500"; + "GOOG, Dividend, 0.25, 200\n" + + "T, Dividend, 0.15, 300\n" + + " Z, Dividend, 0.18, 500"; try { - DynamicTable tableDividends = - readCsv(new ByteArrayInputStream(fileDividends.getBytes())); + DynamicTable tableDividends = readCsv(new ByteArrayInputStream(fileDividends.getBytes())); assertEquals(3, tableDividends.size()); assertEquals(4, tableDividends.getMeta().size()); assertTrue(0.15 == tableDividends.getColumn(2).getDouble(1)); @@ -170,12 +164,12 @@ public void testTableDividendsCSV() { @Test public void testTableDividendsCSVNoTrim() { final String fileDividends = "Sym,Type,Price,SecurityId\n" + - "GOOG, Dividend, 0.25, 200\n" + - "T, Dividend, 0.15, 300\n" + - " Z, Dividend, 0.18, 500"; + "GOOG, Dividend, 0.25, 200\n" + + "T, Dividend, 0.15, 300\n" + + " Z, Dividend, 0.18, 500"; try { Table tableDividends = io.deephaven.db.tables.utils.CsvHelpers - .readCsv(new ByteArrayInputStream(fileDividends.getBytes()), "DEFAULT"); + .readCsv(new ByteArrayInputStream(fileDividends.getBytes()), "DEFAULT"); assertEquals(3, tableDividends.size()); assertEquals(4, tableDividends.getMeta().size()); assertTrue(0.15 == tableDividends.getColumn(2).getDouble(1)); @@ -190,13 +184,13 @@ public void testTableDividendsCSVNoTrim() { @Test public void testCompressedCSV() throws IOException { final String contents = "A,B,C,D\n" - + "\"Hello World\",3.0,5,700\n" - + "\"Goodbye Cruel World\",3.1,1000000,800\n" - + "\"Hello World Again!\",4.0,20000000000,900\n"; + + "\"Hello World\",3.0,5,700\n" + + "\"Goodbye Cruel World\",3.1,1000000,800\n" + + "\"Hello World Again!\",4.0,20000000000,900\n"; final byte[] contentBytes = contents.getBytes(StandardCharsets.UTF_8); final byte[] contentTarBytes; try (final ByteArrayOutputStream bytesOut = new ByteArrayOutputStream(); - final TarArchiveOutputStream tarOut = new TarArchiveOutputStream(bytesOut)) { + final TarArchiveOutputStream tarOut = new TarArchiveOutputStream(bytesOut)) { final TarArchiveEntry tarEntry = new TarArchiveEntry("test.csv"); tarEntry.setSize(contentBytes.length); tarOut.putArchiveEntry(tarEntry); @@ -217,12 +211,11 @@ public void testCompressedCSV() throws IOException { @Test public void testUncompressedCSVFromPath() throws IOException { String contents = "A,B,C,D\n" - + "\"Hello World\",3.0,5,700\n" - + "\"Goodbye Cruel World\",3.1,1000000,800\n" - + "\"Hello World Again!\",4.0,20000000000,900\n"; + + "\"Hello World\",3.0,5,700\n" + + "\"Goodbye Cruel World\",3.1,1000000,800\n" + + "\"Hello World Again!\",4.0,20000000000,900\n"; - // Although this seems arbitrary, we want to make sure our file is large enough to possibly - // be a tar + // Although this seems arbitrary, we want to make sure our file is large enough to possibly be a tar while (contents.length() < TarConstants.DEFAULT_RCDSIZE) { contents += contents; } @@ -288,22 +281,22 @@ public void testMergeOfMismatchedTables() { TableTools.merge(table2, emptyTable); } catch (UnsupportedOperationException mismatchException) { TestCase.assertEquals( - "Column mismatch for table 1, missing columns: [GroupedInts1, StringKeys1], additional columns: [GroupedInts, StringKeys]", - mismatchException.getMessage()); + "Column mismatch for table 1, missing columns: [GroupedInts1, StringKeys1], additional columns: [GroupedInts, StringKeys]", + mismatchException.getMessage()); } try { TableTools.merge(table2, table2.updateView("S2=StringKeys1")); } catch (UnsupportedOperationException mismatchException) { TestCase.assertEquals("Column mismatch for table 1, additional columns: [S2]", - mismatchException.getMessage()); + mismatchException.getMessage()); } try { TableTools.merge(table2, table2.dropColumns("StringKeys1")); } catch (UnsupportedOperationException mismatchException) { TestCase.assertEquals("Column mismatch for table 1, missing columns: [StringKeys1]", - mismatchException.getMessage()); + mismatchException.getMessage()); } } @@ -323,8 +316,8 @@ public void testMergeWithWhere() { // Note that now we still have isUnionedTable(t_3_4_filtered) == true... Table t_all = TableTools.merge( // This will still include Col=`C`!!! - t_1_2_filtered, - t_3_4_filtered); + t_1_2_filtered, + t_3_4_filtered); TableTools.show(t_1_2); TableTools.show(t_3_4); @@ -333,8 +326,7 @@ public void testMergeWithWhere() { TableTools.show(t_all); assertEquals(t_all.size(), 3); - assertTrue(Arrays.equals((Object[]) t_all.getColumn("Col").getDirect(), - new String[] {"A", "B", "D"})); + assertTrue(Arrays.equals((Object[]) t_all.getColumn("Col").getDirect(), new String[] {"A", "B", "D"})); } @Test @@ -347,18 +339,17 @@ public void testLoadCsv() throws Exception { PrintWriter out = new PrintWriter(ba); - out.printf("colA%scolB%scolC%scolD%scolE%scolF%scolG%n", separator, separator, - separator, separator, separator, separator); - out.printf("\"mark1%smark2\"%s1%s1%s1%s%s(null)%strue%n", separator, separator, - separator, separator, separator, separator, separator); - out.printf("etti%s3%s6%s2%s%s(null)%sFALSE%n", separator, separator, separator, - separator, separator, separator); - out.printf("(null)%s(null)%s(null)%s(null)%s%s(null)%s(null)%n", separator, separator, - separator, separator, separator, separator); - out.printf("%s%s%s%s%s(null)%s%n", separator, separator, separator, separator, - separator, separator); - out.printf("test%s3%s7.0%stest%s%s(null)%sTRUE%n", separator, separator, separator, - separator, separator, separator); + out.printf("colA%scolB%scolC%scolD%scolE%scolF%scolG%n", separator, separator, separator, separator, + separator, separator); + out.printf("\"mark1%smark2\"%s1%s1%s1%s%s(null)%strue%n", separator, separator, separator, separator, + separator, separator, separator); + out.printf("etti%s3%s6%s2%s%s(null)%sFALSE%n", separator, separator, separator, separator, separator, + separator); + out.printf("(null)%s(null)%s(null)%s(null)%s%s(null)%s(null)%n", separator, separator, separator, separator, + separator, separator); + out.printf("%s%s%s%s%s(null)%s%n", separator, separator, separator, separator, separator, separator); + out.printf("test%s3%s7.0%stest%s%s(null)%sTRUE%n", separator, separator, separator, separator, separator, + separator); out.flush(); out.close(); @@ -401,8 +392,7 @@ public void testLoadCsv() throws Exception { assertEquals(null, table.getColumn("colA").get(2)); assertEquals(QueryConstants.NULL_INT, table.getColumn("colB").getInt(2)); - assertEquals(QueryConstants.NULL_DOUBLE, table.getColumn("colC").getDouble(2), - 0.0000001); + assertEquals(QueryConstants.NULL_DOUBLE, table.getColumn("colC").getDouble(2), 0.0000001); assertEquals(null, table.getColumn("colD").get(2)); assertEquals(null, table.getColumn("colE").get(2)); assertEquals(null, table.getColumn("colF").get(2)); @@ -425,8 +415,7 @@ public void testWriteCsv() throws Exception { // Ignore separators in double quotes using this regex String splitterPattern = Pattern.quote(separatorStr) + "(?=([^\"]*\"[^\"]*\")*[^\"]*$)"; - TableTools.writeCsv(tableToTest, filePath, false, DBTimeZone.TZ_DEFAULT, false, - separator, colNames); + TableTools.writeCsv(tableToTest, filePath, false, DBTimeZone.TZ_DEFAULT, false, separator, colNames); File csvFile = new File(filePath); Scanner csvReader = new Scanner(csvFile); @@ -445,9 +434,9 @@ public void testWriteCsv() throws Exception { // Use separatorCsvEscape and compare the values for (int j = 0; j < numCols; j++) { String valFromTable = tableToTest.getColumn(colNames[j]).get(i) == null - ? TableTools.nullToNullString(tableToTest.getColumn(colNames[j]).get(i)) - : CsvHelpers.separatorCsvEscape( - tableToTest.getColumn(colNames[j]).get(i).toString(), separatorStr); + ? TableTools.nullToNullString(tableToTest.getColumn(colNames[j]).get(i)) + : CsvHelpers.separatorCsvEscape(tableToTest.getColumn(colNames[j]).get(i).toString(), + separatorStr); assertEquals(valFromTable, csvLine[j]); } @@ -462,127 +451,114 @@ public void testWriteCsv() throws Exception { @Test public void testDiff() { assertEquals( - "Column x different from the expected set, first difference at row 1 encountered 2 expected null\n", - TableTools.diff(TableTools.newTable(intCol("x", 1, 2, 3)), - TableTools.newTable(intCol("x", 1, NULL_INT, NULL_INT)), 10)); + "Column x different from the expected set, first difference at row 1 encountered 2 expected null\n", + TableTools.diff(TableTools.newTable(intCol("x", 1, 2, 3)), + TableTools.newTable(intCol("x", 1, NULL_INT, NULL_INT)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered null expected 2\n", - TableTools.diff(TableTools.newTable(intCol("x", 1, NULL_INT, NULL_INT)), - TableTools.newTable(intCol("x", 1, 2, 3)), 10)); + "Column x different from the expected set, first difference at row 1 encountered null expected 2\n", + TableTools.diff(TableTools.newTable(intCol("x", 1, NULL_INT, NULL_INT)), + TableTools.newTable(intCol("x", 1, 2, 3)), 10)); - assertEquals("", TableTools.diff(TableTools.newTable(col("x", 1, 2, 3)), - TableTools.newTable(col("x", 1, 2, 3)), 10)); + assertEquals("", + TableTools.diff(TableTools.newTable(col("x", 1, 2, 3)), TableTools.newTable(col("x", 1, 2, 3)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered 2.0 expected null\n", - TableTools.diff(TableTools.newTable(col("x", 1.0, 2.0, 3.0)), - TableTools.newTable(col("x", 1.0, null, null)), 10)); + "Column x different from the expected set, first difference at row 1 encountered 2.0 expected null\n", + TableTools.diff(TableTools.newTable(col("x", 1.0, 2.0, 3.0)), + TableTools.newTable(col("x", 1.0, null, null)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered null expected 2.0\n", - TableTools.diff(TableTools.newTable(col("x", 1.0, null, null)), - TableTools.newTable(col("x", 1.0, 2.0, 3.0)), 10)); + "Column x different from the expected set, first difference at row 1 encountered null expected 2.0\n", + TableTools.diff(TableTools.newTable(col("x", 1.0, null, null)), + TableTools.newTable(col("x", 1.0, 2.0, 3.0)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered 2.0E-12 expected null\n", - TableTools.diff( - TableTools.newTable(col("x", 0.000000000001, 0.000000000002, 0.000000000003)), - TableTools.newTable(col("x", 0.000000000001, null, null)), 10)); + "Column x different from the expected set, first difference at row 1 encountered 2.0E-12 expected null\n", + TableTools.diff(TableTools.newTable(col("x", 0.000000000001, 0.000000000002, 0.000000000003)), + TableTools.newTable(col("x", 0.000000000001, null, null)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered 2.0E-12 expected null\n", - TableTools.diff( - TableTools.newTable(col("x", 0.000000000001, 0.000000000002, 0.000000000003)), - TableTools.newTable(col("x", 0.000000000002, null, null)), 10, - EnumSet.of(TableDiff.DiffItems.DoublesExact))); + "Column x different from the expected set, first difference at row 1 encountered 2.0E-12 expected null\n", + TableTools.diff(TableTools.newTable(col("x", 0.000000000001, 0.000000000002, 0.000000000003)), + TableTools.newTable(col("x", 0.000000000002, null, null)), 10, + EnumSet.of(TableDiff.DiffItems.DoublesExact))); assertEquals( - "Column x different from the expected set, first difference at row 0 encountered 1.0E-12 expected 2.0E-12 (difference = 1.0E-12)\n", - TableTools.diff( - TableTools.newTable(col("x", 0.000000000001, 0.000000000002, 0.000000000003)), - TableTools.newTable(col("x", 0.000000000002, null, null)), 10)); + "Column x different from the expected set, first difference at row 0 encountered 1.0E-12 expected 2.0E-12 (difference = 1.0E-12)\n", + TableTools.diff(TableTools.newTable(col("x", 0.000000000001, 0.000000000002, 0.000000000003)), + TableTools.newTable(col("x", 0.000000000002, null, null)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered null expected 2.0\n", - TableTools.diff(TableTools.newTable(floatCol("x", 1.0f, NULL_FLOAT, NULL_FLOAT)), - TableTools.newTable(floatCol("x", 1.0f, 2.0f, 3.0f)), 10)); + "Column x different from the expected set, first difference at row 1 encountered null expected 2.0\n", + TableTools.diff(TableTools.newTable(floatCol("x", 1.0f, NULL_FLOAT, NULL_FLOAT)), + TableTools.newTable(floatCol("x", 1.0f, 2.0f, 3.0f)), 10)); assertEquals("", TableTools.diff(TableTools.newTable(floatCol("x", 1, 2, 3)), - TableTools.newTable(floatCol("x", 1.0f, 2.0f, 3.0f)), 10)); + TableTools.newTable(floatCol("x", 1.0f, 2.0f, 3.0f)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered 2.0 expected null\n", - TableTools.diff(TableTools.newTable(floatCol("x", 1.0f, 2.0f, 3.0f)), - TableTools.newTable(floatCol("x", 1.0f, NULL_FLOAT, NULL_FLOAT)), 10)); + "Column x different from the expected set, first difference at row 1 encountered 2.0 expected null\n", + TableTools.diff(TableTools.newTable(floatCol("x", 1.0f, 2.0f, 3.0f)), + TableTools.newTable(floatCol("x", 1.0f, NULL_FLOAT, NULL_FLOAT)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered null expected 2.0\n", - TableTools.diff(TableTools.newTable(floatCol("x", 1.0f, NULL_FLOAT, NULL_FLOAT)), - TableTools.newTable(floatCol("x", 1.0f, 2.0f, 3.0f)), 10)); + "Column x different from the expected set, first difference at row 1 encountered null expected 2.0\n", + TableTools.diff(TableTools.newTable(floatCol("x", 1.0f, NULL_FLOAT, NULL_FLOAT)), + TableTools.newTable(floatCol("x", 1.0f, 2.0f, 3.0f)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered 2.0E-12 expected null\n", - TableTools.diff( - TableTools - .newTable(floatCol("x", 0.000000000001f, 0.000000000002f, 0.000000000003f)), - TableTools.newTable(floatCol("x", 0.000000000001f, NULL_FLOAT, NULL_FLOAT)), 10)); + "Column x different from the expected set, first difference at row 1 encountered 2.0E-12 expected null\n", + TableTools.diff(TableTools.newTable(floatCol("x", 0.000000000001f, 0.000000000002f, 0.000000000003f)), + TableTools.newTable(floatCol("x", 0.000000000001f, NULL_FLOAT, NULL_FLOAT)), 10)); assertEquals( - "Column x different from the expected set, first difference at row 1 encountered 2.0E-12 expected null\n", - TableTools.diff( - TableTools - .newTable(floatCol("x", 0.000000000001f, 0.000000000002f, 0.000000000003f)), - TableTools.newTable(floatCol("x", 0.000000000002f, NULL_FLOAT, NULL_FLOAT)), 10, - EnumSet.of(TableDiff.DiffItems.DoublesExact))); + "Column x different from the expected set, first difference at row 1 encountered 2.0E-12 expected null\n", + TableTools.diff(TableTools.newTable(floatCol("x", 0.000000000001f, 0.000000000002f, 0.000000000003f)), + TableTools.newTable(floatCol("x", 0.000000000002f, NULL_FLOAT, NULL_FLOAT)), 10, + EnumSet.of(TableDiff.DiffItems.DoublesExact))); assertEquals( - "Column x different from the expected set, first difference at row 0 encountered 1.0E-12 expected 2.0E-12 (difference = 1.0E-12)\n", - TableTools.diff( - TableTools - .newTable(floatCol("x", 0.000000000001f, 0.000000000002f, 0.000000000003f)), - TableTools.newTable(floatCol("x", 0.000000000002f, NULL_FLOAT, NULL_FLOAT)), 10)); + "Column x different from the expected set, first difference at row 0 encountered 1.0E-12 expected 2.0E-12 (difference = 1.0E-12)\n", + TableTools.diff(TableTools.newTable(floatCol("x", 0.000000000001f, 0.000000000002f, 0.000000000003f)), + TableTools.newTable(floatCol("x", 0.000000000002f, NULL_FLOAT, NULL_FLOAT)), 10)); } @Test public void testRoundDecimalColumns() { Table table = newTable( - col("String", "c", "e", "g"), - col("Int", 2, 4, 6), - col("Double", 1.2, 2.6, Double.NaN), - col("Float", 1.2f, 2.6f, Float.NaN)); + col("String", "c", "e", "g"), + col("Int", 2, 4, 6), + col("Double", 1.2, 2.6, Double.NaN), + col("Float", 1.2f, 2.6f, Float.NaN)); // Test whether we're rounding all columns properly Table roundedColumns = TableTools.roundDecimalColumns(table); assertTrue(Arrays.equals((String[]) roundedColumns.getColumn("String").getDirect(), - (String[]) table.getColumn("String").getDirect())); + (String[]) table.getColumn("String").getDirect())); assertTrue(Arrays.equals((int[]) roundedColumns.getColumn("Int").getDirect(), - (int[]) table.getColumn("Int").getDirect())); - assertEquals(Math.round((double) table.getColumn("Double").get(0)), - roundedColumns.getColumn("Double").get(0)); - assertEquals(Math.round((double) table.getColumn("Double").get(1)), - roundedColumns.getColumn("Double").get(1)); - assertEquals(Math.round((double) table.getColumn("Double").get(2)), - roundedColumns.getColumn("Double").get(2)); + (int[]) table.getColumn("Int").getDirect())); + assertEquals(Math.round((double) table.getColumn("Double").get(0)), roundedColumns.getColumn("Double").get(0)); + assertEquals(Math.round((double) table.getColumn("Double").get(1)), roundedColumns.getColumn("Double").get(1)); + assertEquals(Math.round((double) table.getColumn("Double").get(2)), roundedColumns.getColumn("Double").get(2)); // Cast these cause the DB rounds floats to longs assertEquals((long) Math.round((float) table.getColumn("Float").get(0)), - roundedColumns.getColumn("Float").get(0)); + roundedColumns.getColumn("Float").get(0)); assertEquals((long) Math.round((float) table.getColumn("Float").get(1)), - roundedColumns.getColumn("Float").get(1)); + roundedColumns.getColumn("Float").get(1)); assertEquals((long) Math.round((float) table.getColumn("Float").get(2)), - roundedColumns.getColumn("Float").get(2)); + roundedColumns.getColumn("Float").get(2)); - // Test whether it works when we specify the columns, by comparing to the validated results - // from before + // Test whether it works when we specify the columns, by comparing to the validated results from before Table specificRoundedColums = TableTools.roundDecimalColumns(table, "Double", "Float"); assertTrue(Arrays.equals((String[]) roundedColumns.getColumn("String").getDirect(), - (String[]) specificRoundedColums.getColumn("String").getDirect())); + (String[]) specificRoundedColums.getColumn("String").getDirect())); assertTrue(Arrays.equals((int[]) roundedColumns.getColumn("Int").getDirect(), - (int[]) specificRoundedColums.getColumn("Int").getDirect())); + (int[]) specificRoundedColums.getColumn("Int").getDirect())); assertTrue(Arrays.equals((long[]) roundedColumns.getColumn("Double").getDirect(), - (long[]) specificRoundedColums.getColumn("Double").getDirect())); + (long[]) specificRoundedColums.getColumn("Double").getDirect())); assertTrue(Arrays.equals((long[]) roundedColumns.getColumn("Float").getDirect(), - (long[]) specificRoundedColums.getColumn("Float").getDirect())); + (long[]) specificRoundedColums.getColumn("Float").getDirect())); // Test whether it works properly when we specify what NOT to round Table onlyOneRoundedColumn = TableTools.roundDecimalColumnsExcept(table, "Float"); assertTrue(Arrays.equals((String[]) roundedColumns.getColumn("String").getDirect(), - (String[]) onlyOneRoundedColumn.getColumn("String").getDirect())); + (String[]) onlyOneRoundedColumn.getColumn("String").getDirect())); assertTrue(Arrays.equals((int[]) table.getColumn("Int").getDirect(), - (int[]) onlyOneRoundedColumn.getColumn("Int").getDirect())); + (int[]) onlyOneRoundedColumn.getColumn("Int").getDirect())); assertTrue(Arrays.equals((long[]) roundedColumns.getColumn("Double").getDirect(), - (long[]) onlyOneRoundedColumn.getColumn("Double").getDirect())); + (long[]) onlyOneRoundedColumn.getColumn("Double").getDirect())); assertTrue(Arrays.equals((float[]) table.getColumn("Float").getDirect(), - (float[]) onlyOneRoundedColumn.getColumn("Float").getDirect())); + (float[]) onlyOneRoundedColumn.getColumn("Float").getDirect())); try { // Make sure we complain if you try to round the unroundable @@ -597,14 +573,13 @@ public void testDateTimeColumnHolder() throws Exception { // create two columns with the same data final DBDateTime[] data = new DBDateTime[] {new DBDateTime(100), new DBDateTime(100), null}; - final long[] longData = new long[] { - data[0] == null ? io.deephaven.util.QueryConstants.NULL_LONG : data[0].getNanos(), - data[1] == null ? io.deephaven.util.QueryConstants.NULL_LONG : data[1].getNanos(), - data[2] == null ? QueryConstants.NULL_LONG : data[2].getNanos()}; + final long[] longData = + new long[] {data[0] == null ? io.deephaven.util.QueryConstants.NULL_LONG : data[0].getNanos(), + data[1] == null ? io.deephaven.util.QueryConstants.NULL_LONG : data[1].getNanos(), + data[2] == null ? QueryConstants.NULL_LONG : data[2].getNanos()}; final ColumnHolder dateTimeCol = c("DateTimeColumn", data); - final ColumnHolder dateTimeCol2 = - ColumnHolder.getDateTimeColumnHolder("DateTimeColumn2", false, longData); + final ColumnHolder dateTimeCol2 = ColumnHolder.getDateTimeColumnHolder("DateTimeColumn2", false, longData); final Table table = TableTools.newTable(dateTimeCol, dateTimeCol2); @@ -644,20 +619,17 @@ public void testSimpleDiffRegression() { public void testMerge2() { Random random = new Random(0); int size = random.nextInt(10); - final QueryTable table1 = - TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), + final QueryTable table1 = TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), TstUtils.getRandomStringCol("Sym", size, random), TstUtils.getRandomIntCol("intCol", size, random), TstUtils.getRandomDoubleCol("doubleCol", size, random)); size = random.nextInt(10); - final QueryTable table2 = - TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), + final QueryTable table2 = TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), TstUtils.getRandomStringCol("Sym", size, random), TstUtils.getRandomIntCol("intCol", size, random), TstUtils.getRandomDoubleCol("doubleCol", size, random)); size = random.nextInt(10); - final QueryTable table3 = - TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), + final QueryTable table3 = TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), TstUtils.getRandomStringCol("Sym", size, random), TstUtils.getRandomIntCol("intCol", size, random), TstUtils.getRandomDoubleCol("doubleCol", size, random)); @@ -673,28 +645,25 @@ public void testMerge2() { public void testMergeIterative() { Random random = new Random(0); int size = 3; - final QueryTable table1 = - TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), + final QueryTable table1 = TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), TstUtils.getRandomStringCol("Sym", size, random), TstUtils.getRandomIntCol("intCol", size, random), TstUtils.getRandomDoubleCol("doubleCol", size, random)); size = 3; - final QueryTable table2 = - TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), + final QueryTable table2 = TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), TstUtils.getRandomStringCol("Sym", size, random), TstUtils.getRandomIntCol("intCol", size, random), TstUtils.getRandomDoubleCol("doubleCol", size, random)); size = 3; - final QueryTable table3 = - TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), + final QueryTable table3 = TstUtils.testRefreshingTable(TstUtils.getRandomIndex(0, size, random), TstUtils.getRandomStringCol("Sym", size, random), TstUtils.getRandomIntCol("intCol", size, random), TstUtils.getRandomDoubleCol("doubleCol", size, random)); size = 50; final QueryTable staticTable = TstUtils.testTable(TstUtils.getRandomIndex(0, size, random), - TstUtils.getRandomStringCol("Sym", size, random), - TstUtils.getRandomIntCol("intCol", size, random), - TstUtils.getRandomDoubleCol("doubleCol", size, random)); + TstUtils.getRandomStringCol("Sym", size, random), + TstUtils.getRandomIntCol("intCol", size, random), + TstUtils.getRandomDoubleCol("doubleCol", size, random)); EvalNugget en[] = new EvalNugget[] { new EvalNugget("Single Table Merge") { @@ -705,42 +674,37 @@ protected Table e() { new EvalNuggetSet("Merge No Sort") { protected Table e() { return TableTools.merge( - table1.updateView("lk=k"), staticTable.updateView("lk=k+100000000L"), - table2.updateView("lk=k+200000000L"), - table3.updateView("lk=k+300000000L")); + table1.updateView("lk=k"), staticTable.updateView("lk=k+100000000L"), + table2.updateView("lk=k+200000000L"), table3.updateView("lk=k+300000000L")); } }, new EvalNuggetSet("Merge Plus Sort") { protected Table e() { return TableTools.merge( - table1.updateView("lk=k"), staticTable.updateView("lk=k+100000000L"), - table2.updateView("lk=k+200000000L"), - table3.updateView("lk=k+300000000L")).sort("lk"); + table1.updateView("lk=k"), staticTable.updateView("lk=k+100000000L"), + table2.updateView("lk=k+200000000L"), table3.updateView("lk=k+300000000L")).sort("lk"); } }, new EvalNuggetSet("Double Merge Plus Sort") { protected Table e() { return TableTools.merge( - table1.updateView("lk=k"), staticTable.updateView("lk=k+100000000L"), - table2.updateView("lk=k+200000000L"), - table3.updateView("lk=k+300000000L"), - table3.updateView("lk=k+400000000L")).sort("lk"); + table1.updateView("lk=k"), staticTable.updateView("lk=k+100000000L"), + table2.updateView("lk=k+200000000L"), table3.updateView("lk=k+300000000L"), + table3.updateView("lk=k+400000000L")).sort("lk"); } }, new EvalNuggetSet("Triple Double Merge Plus Sort") { protected Table e() { return TableTools.merge( - table1.updateView("lk=k"), table1.updateView("lk=k+100000000L"), - staticTable.updateView("lk=k+200000000L"), - staticTable.updateView("lk=k+300000000L"), - table2.updateView("lk=k+400000000L"), - table2.updateView("lk=k+500000000L"), - table3.updateView("lk=k+600000000L"), - table3.updateView("lk=k+700000000L")).sort("lk"); + table1.updateView("lk=k"), table1.updateView("lk=k+100000000L"), + staticTable.updateView("lk=k+200000000L"), staticTable.updateView("lk=k+300000000L"), + table2.updateView("lk=k+400000000L"), table2.updateView("lk=k+500000000L"), + table3.updateView("lk=k+600000000L"), table3.updateView("lk=k+700000000L")).sort("lk"); } }, - EvalNugget.from(() -> TableTools.merge(TableTools.emptyTable(10), - table1.dropColumns("Sym", "intCol", "doubleCol")).update("A=1")) + EvalNugget.from(() -> TableTools + .merge(TableTools.emptyTable(10), table1.dropColumns("Sym", "intCol", "doubleCol")) + .update("A=1")) }; for (int i = 0; i < 20; i++) { @@ -771,25 +735,25 @@ public void testMergeIterative2() { TstUtils.ColumnInfo[] info1; final QueryTable table1 = getTable(random.nextInt(20), random, - info1 = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new StringGenerator(), - new IntGenerator(10, 100), - new DoubleGenerator(0, 100))); + info1 = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new StringGenerator(), + new IntGenerator(10, 100), + new DoubleGenerator(0, 100))); ColumnInfo[] info2; final QueryTable table2 = getTable(random.nextInt(10), random, - info2 = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new StringGenerator(), - new IntGenerator(10, 100), - new DoubleGenerator(0, 100))); + info2 = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new StringGenerator(), + new IntGenerator(10, 100), + new DoubleGenerator(0, 100))); ColumnInfo[] info3; final int size = random.nextInt(40); final QueryTable table3 = getTable(size, random, - info3 = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new StringGenerator(), - new IntGenerator(10, 100), - new DoubleGenerator(0, 100))); + info3 = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new StringGenerator(), + new IntGenerator(10, 100), + new DoubleGenerator(0, 100))); EvalNugget en[] = new EvalNugget[] { new EvalNugget("Single table merge") { @@ -800,36 +764,30 @@ protected Table e() { new EvalNuggetSet("Merge 3") { protected Table e() { return TableTools.merge(table1.updateView("lk=k"), - table2.updateView("lk=k+100000000L"), - table3.updateView("lk=k+200000000L")); + table2.updateView("lk=k+100000000L"), table3.updateView("lk=k+200000000L")); } }, new EvalNuggetSet("Merge Plus Sort") { protected Table e() { return TableTools.merge(table1.updateView("lk=k"), - table2.updateView("lk=k+100000000L"), - table3.updateView("lk=k+200000000L")).sort("lk"); + table2.updateView("lk=k+100000000L"), table3.updateView("lk=k+200000000L")).sort("lk"); } }, new EvalNuggetSet("Double Merge 3") { protected Table e() { return TableTools.merge(table1.updateView("lk=k"), - table2.updateView("lk=k+100000000L"), - table3.updateView("lk=k+200000000L"), - table1.updateView("lk=k+300000000L"), - table2.updateView("lk=k+400000000L"), - table3.updateView("lk=k+500000000L")); + table2.updateView("lk=k+100000000L"), table3.updateView("lk=k+200000000L"), + table1.updateView("lk=k+300000000L"), table2.updateView("lk=k+400000000L"), + table3.updateView("lk=k+500000000L")); } }, new EvalNuggetSet("Merge With Views") { protected Table e() { // noinspection ConstantConditions return TableTools.merge( - TableTools - .merge(table1.updateView("lk=k"), - table2.updateView("lk=k+100000000L")) - .view("Sym", "intCol", "lk"), - table3.updateView("lk=k+200000000L").view("Sym", "intCol", "lk")); + TableTools.merge(table1.updateView("lk=k"), table2.updateView("lk=k+100000000L")) + .view("Sym", "intCol", "lk"), + table3.updateView("lk=k+200000000L").view("Sym", "intCol", "lk")); } }, }; // TODO add a new comparison tool that matches rows by key and allows for random order @@ -844,24 +802,24 @@ protected Table e() { boolean mod3 = random.nextBoolean(); if (mod1) { - LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates - .generateTableUpdates(size, random, table1, info1)); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( + () -> GenerateTableUpdates.generateTableUpdates(size, random, table1, info1)); } else { clock.startUpdateCycle(); clock.completeUpdateCycle(); } if (mod2) { - LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates - .generateTableUpdates(size, random, table2, info2)); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( + () -> GenerateTableUpdates.generateTableUpdates(size, random, table2, info2)); } else { clock.startUpdateCycle(); clock.completeUpdateCycle(); } if (mod3) { - LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates - .generateTableUpdates(size, random, table3, info3)); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( + () -> GenerateTableUpdates.generateTableUpdates(size, random, table3, info3)); } else { clock.startUpdateCycle(); clock.completeUpdateCycle(); @@ -898,10 +856,9 @@ public void testMergeRecursive() { // This test does a merge, followed by a view, then another merge. @Test public void testMergeRecursive2() { - Table merge1 = TableTools - .merge(table1, - table2.renameColumns("GroupedInts=GroupedInts1", "StringKeys=StringKeys1")) - .view("StringKeys"); + Table merge1 = + TableTools.merge(table1, table2.renameColumns("GroupedInts=GroupedInts1", "StringKeys=StringKeys1")) + .view("StringKeys"); Table merge2 = TableTools.merge(merge1, table1.view("StringKeys")); Assert.assertEquals(table1.size() * 2 + table2.size(), merge2.size()); @@ -909,13 +866,10 @@ public void testMergeRecursive2() { tableRangesAreEqual(table1.view("StringKeys"), merge1, 0, 0, table1.size()); tableRangesAreEqual(table1.view("StringKeys"), merge2, 0, 0, table1.size()); - tableRangesAreEqual(table2.view("StringKeys=StringKeys1"), merge1, 0, table1.size(), - table2.size()); - tableRangesAreEqual(table2.view("StringKeys=StringKeys1"), merge2, 0, table1.size(), - table2.size()); + tableRangesAreEqual(table2.view("StringKeys=StringKeys1"), merge1, 0, table1.size(), table2.size()); + tableRangesAreEqual(table2.view("StringKeys=StringKeys1"), merge2, 0, table1.size(), table2.size()); - tableRangesAreEqual(table1.view("StringKeys"), merge2, 0, table1.size() + table2.size(), - table1.size()); + tableRangesAreEqual(table1.view("StringKeys"), merge2, 0, table1.size() + table2.size(), table1.size()); } @Test @@ -930,7 +884,7 @@ public void testUncollapsableMerge() { result = table1; else result = TableTools.merge(result, table1).updateView("GroupedInts=GroupedInts+1") - .updateView("GroupedInts=GroupedInts-1"); + .updateView("GroupedInts=GroupedInts-1"); Assert.assertEquals(table1.size() * (ii + 1), result.size()); } @@ -942,8 +896,7 @@ public void testUncollapsableMerge() { @Test public void testMergeWithNestedShift() { - // Test that an outer shift properly shifts index when inner shifts are also propagated to - // the index. + // Test that an outer shift properly shifts index when inner shifts are also propagated to the index. final QueryTable table = testRefreshingTable(i(1), c("Sentinel", 1)); // must be uncollapsable s.t. inner table shifts at the same time as outer table final Table m2 = TableTools.merge(table, table).updateView("Sentinel=Sentinel+1"); @@ -966,8 +919,7 @@ public void testMergeWithNestedShift() { @Test public void testMergeWithShiftBoundary() { - // Test that an outer shift properly shifts index when inner shifts are also propagated to - // the index. + // Test that an outer shift properly shifts index when inner shifts are also propagated to the index. final int ONE_MILLION = 1024 * 1024; final QueryTable table = testRefreshingTable(i(ONE_MILLION - 1), c("Sentinel", 1)); final QueryTable table2 = testRefreshingTable(i(0), c("Sentinel", 2)); @@ -994,12 +946,10 @@ public void testMergeWithShiftBoundary() { @Test public void testMergeShiftsEmptyTable() { - // Test that an outer shift properly shifts index when inner shifts are also propagated to - // the index. + // Test that an outer shift properly shifts index when inner shifts are also propagated to the index. final QueryTable table = testRefreshingTable(i(1), c("Sentinel", 1)); final QueryTable emptyTable = testRefreshingTable(i(), TstUtils.c("Sentinel")); - final Table m2 = - TableTools.merge(table, emptyTable, emptyTable).updateView("Sentinel=Sentinel+1"); + final Table m2 = TableTools.merge(table, emptyTable, emptyTable).updateView("Sentinel=Sentinel+1"); final EvalNugget[] ev = new EvalNugget[] { EvalNugget.from(() -> table), @@ -1034,12 +984,9 @@ public void testMergeShiftsEmptyTable() { @Test public void testMergeShiftBoundary() { // DH-11032 - // Test that when our inner table has a shift that is begins beyond the last key for our - // subtable (because - // it has been filtered and the reserved address space is less than the address space of the - // full unfiltered - // table) we do not remove elements that should not be removed. This is distilled from a - // broken fuzzer test. + // Test that when our inner table has a shift that is begins beyond the last key for our subtable (because + // it has been filtered and the reserved address space is less than the address space of the full unfiltered + // table) we do not remove elements that should not be removed. This is distilled from a broken fuzzer test. final QueryTable table1 = testRefreshingTable(i(10000, 65538), c("Sentinel", 1, 2)); final QueryTable table2 = testRefreshingTable(i(2), c("Sentinel", 3)); final Table table1Filtered = table1.where("Sentinel == 1"); @@ -1072,20 +1019,18 @@ public void testMergeShiftBoundary() { @Test public void testMergeDeepShifts() { - // Test that an outer shift properly shifts index when inner shifts are also propagated to - // the index. + // Test that an outer shift properly shifts index when inner shifts are also propagated to the index. final QueryTable table = testRefreshingTable(i(1), c("Sentinel", 1)); final QueryTable emptyTable = testRefreshingTable(i(), TstUtils.c("Sentinel")); - final Table m2 = - TableTools.merge(table, emptyTable, emptyTable, emptyTable, emptyTable, emptyTable) + final Table m2 = TableTools.merge(table, emptyTable, emptyTable, emptyTable, emptyTable, emptyTable) .updateView("Sentinel=Sentinel+1"); final EvalNugget[] ev = new EvalNugget[] { EvalNugget.from(() -> table), EvalNugget.from(() -> TableTools.merge(table, emptyTable, table, emptyTable)), - EvalNugget.from(() -> TableTools - .merge(table, emptyTable, emptyTable, emptyTable, emptyTable, emptyTable) - .updateView("Sentinel=Sentinel+1")), + EvalNugget + .from(() -> TableTools.merge(table, emptyTable, emptyTable, emptyTable, emptyTable, emptyTable) + .updateView("Sentinel=Sentinel+1")), EvalNugget.from(() -> TableTools.merge(m2, m2)), }; @@ -1120,16 +1065,15 @@ private void addRows(Random random, QueryTable table1) { size = random.nextInt(10); final Index newIndex = TstUtils.getRandomIndex(table1.getIndex().lastKey(), size, random); TstUtils.addToTable(table1, newIndex, TstUtils.getRandomStringCol("Sym", size, random), - TstUtils.getRandomIntCol("intCol", size, random), - TstUtils.getRandomDoubleCol("doubleCol", size, random)); + TstUtils.getRandomIntCol("intCol", size, random), + TstUtils.getRandomDoubleCol("doubleCol", size, random)); table1.notifyListeners(newIndex, TstUtils.i(), TstUtils.i()); } static void tableRangesAreEqual(Table table1, Table table2, long from1, long from2, long size) { Assert.assertEquals("", - io.deephaven.db.tables.utils.TableTools.diff( - table1.tail(table1.size() - from1).head(size), - table2.tail(table2.size() - from2).head(size), 10)); + io.deephaven.db.tables.utils.TableTools.diff(table1.tail(table1.size() - from1).head(size), + table2.tail(table2.size() - from2).head(size), 10)); } @Test @@ -1139,17 +1083,14 @@ public void testMergeWithEmptyTables() { tableRangesAreEqual(table1, result, 0, 0, table1.size()); result = TableTools.merge(TableTools.newTable(table1.getDefinition()), table1); tableRangesAreEqual(table1, result, 0, 0, table1.size()); - result = TableTools.merge(TableTools.newTable(table1.getDefinition()), emptyLikeTable1, - emptyLikeTable1); + result = TableTools.merge(TableTools.newTable(table1.getDefinition()), emptyLikeTable1, emptyLikeTable1); TestCase.assertEquals(0, result.size()); } @Test public void testMergeSorted() throws IOException { - Table table1 = - testTable(i(1, 3, 5, 6, 7), c("Key", "a", "c", "d", "e", "f")).updateView("Sentinel=k"); - Table table2 = - testTable(i(2, 4, 8, 9), c("Key", "b", "c", "g", "h")).updateView("Sentinel=k"); + Table table1 = testTable(i(1, 3, 5, 6, 7), c("Key", "a", "c", "d", "e", "f")).updateView("Sentinel=k"); + Table table2 = testTable(i(2, 4, 8, 9), c("Key", "b", "c", "g", "h")).updateView("Sentinel=k"); Table merged = TableTools.mergeSorted("Key", table1, table2); io.deephaven.db.tables.utils.TableTools.showWithIndex(merged); @@ -1168,8 +1109,7 @@ public void testMergeSorted2() throws IOException { int size = 50; for (int ii = 0; ii < 10; ++ii) { - final QueryTable table = getTable(false, size, random, - initColumnInfos(new String[] {"Key", "doubleCol"}, + final QueryTable table = getTable(false, size, random, initColumnInfos(new String[] {"Key", "doubleCol"}, new SortedIntGenerator(0, 100), new DoubleGenerator(0, 100))); tables.add(table.update("TableI=" + ii)); @@ -1203,15 +1143,15 @@ public void testMergeGetChunk() { final ColumnSource origCol = table.getColumnSource("Sentinel"); final ColumnSource.GetContext origContext = origCol.makeGetContext(numElements); final IntChunk origContent = usePrev - ? origCol.getPrevChunk(origContext, origIndex).asIntChunk() - : origCol.getChunk(origContext, origIndex).asIntChunk(); + ? origCol.getPrevChunk(origContext, origIndex).asIntChunk() + : origCol.getChunk(origContext, origIndex).asIntChunk(); // noinspection unchecked final ColumnSource resCol = result.getColumnSource("Sentinel"); final ColumnSource.GetContext resContext = resCol.makeGetContext(numElements * 3); final IntChunk resContent = usePrev - ? resCol.getPrevChunk(resContext, resIndex).asIntChunk() - : resCol.getChunk(resContext, resIndex).asIntChunk(); + ? resCol.getPrevChunk(resContext, resIndex).asIntChunk() + : resCol.getChunk(resContext, resIndex).asIntChunk(); Assert.assertEquals(numElements, origContent.size()); Assert.assertEquals(3 * numElements, resContent.size()); @@ -1226,15 +1166,13 @@ public void testMergeGetChunk() { result.listenForUpdates(new InstrumentedShiftAwareListener("") { @Override public void onUpdate(final Update upstream) { - Assert.assertTrue( - table.getIndex().intSize() > table.getIndex().getPrevIndex().intSize()); + Assert.assertTrue(table.getIndex().intSize() > table.getIndex().getPrevIndex().intSize()); validate.accept(false); validate.accept(true); } @Override - protected void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) {} + protected void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) {} }); for (int ii = 1; ii < 100; ++ii) { @@ -1260,15 +1198,15 @@ public void testMergeGetChunkEmpty() { final ColumnSource origCol = table.getColumnSource("Sentinel"); final ColumnSource.GetContext origContext = origCol.makeGetContext(numElements); final IntChunk origContent = usePrev - ? origCol.getPrevChunk(origContext, index).asIntChunk() - : origCol.getChunk(origContext, index).asIntChunk(); + ? origCol.getPrevChunk(origContext, index).asIntChunk() + : origCol.getChunk(origContext, index).asIntChunk(); // noinspection unchecked final ColumnSource resCol = result.getColumnSource("Sentinel"); final ColumnSource.GetContext resContext = resCol.makeGetContext(numElements * 3); final IntChunk resContent = usePrev - ? resCol.getPrevChunk(resContext, index).asIntChunk() - : resCol.getChunk(resContext, index).asIntChunk(); + ? resCol.getPrevChunk(resContext, index).asIntChunk() + : resCol.getChunk(resContext, index).asIntChunk(); Assert.assertEquals(0, origContent.size()); Assert.assertEquals(0, resContent.size()); @@ -1305,10 +1243,9 @@ public void testEmptyTable() throws IOException { @Test public void testMergeIndexShiftingPerformance() { final QueryTable testRefreshingTable = - TstUtils.testRefreshingTable(i(0), intCol("IntCol", 0), charCol("CharCol", 'a')); + TstUtils.testRefreshingTable(i(0), intCol("IntCol", 0), charCol("CharCol", 'a')); - final Table joined = - testRefreshingTable.view("CharCol").join(testRefreshingTable, "CharCol", "IntCol"); + final Table joined = testRefreshingTable.view("CharCol").join(testRefreshingTable, "CharCol", "IntCol"); final TableMap map = joined.byExternal("IntCol"); final Table merged = map.merge(); @@ -1330,8 +1267,7 @@ public void testMergeIndexShiftingPerformance() { addChars[ii] = (char) ('a' + ((firstNextIdx + ii) % 26)); } - addToTable(testRefreshingTable, addIndex, intCol("IntCol", addInts), - charCol("CharCol", addChars)); + addToTable(testRefreshingTable, addIndex, intCol("IntCol", addInts), charCol("CharCol", addChars)); testRefreshingTable.notifyListeners(addIndex, i(), i()); }); @@ -1339,11 +1275,10 @@ public void testMergeIndexShiftingPerformance() { final long stepDuration = end - stepStart; final long duration = end - start; stepStart = end; - System.out.println("Step=" + step + ", duration=" + duration + "ms, stepDuration=" - + stepDuration + "ms"); + System.out.println("Step=" + step + ", duration=" + duration + "ms, stepDuration=" + stepDuration + "ms"); if (duration > 30_000) { TestCase.fail( - "This test is expected to take around 5 seconds on a Mac with the new shift behavior, something is not right."); + "This test is expected to take around 5 seconds on a Mac with the new shift behavior, something is not right."); } } diff --git a/DB/src/test/java/io/deephaven/db/tables/utils/TestTypeUtils.java b/DB/src/test/java/io/deephaven/db/tables/utils/TestTypeUtils.java index 707a933db2d..974e0339be4 100644 --- a/DB/src/test/java/io/deephaven/db/tables/utils/TestTypeUtils.java +++ b/DB/src/test/java/io/deephaven/db/tables/utils/TestTypeUtils.java @@ -13,83 +13,50 @@ public class TestTypeUtils extends TestCase { public void testGetBoxedType() { - Require.equals(getBoxedType(boolean.class), "getBoxedType(boolean.class)", Boolean.class, - "Boolean.class"); - Require.equals(getBoxedType(byte.class), "getBoxedType(byte.class)", Byte.class, - "Byte.class"); - Require.equals(getBoxedType(short.class), "getBoxedType(short.class)", Short.class, - "Short.class"); - Require.equals(getBoxedType(char.class), "getBoxedType(char.class)", Character.class, - "Character.class"); - Require.equals(getBoxedType(int.class), "getBoxedType(int.class)", Integer.class, - "Integer.class"); - Require.equals(getBoxedType(long.class), "getBoxedType(long.class)", Long.class, - "Long.class"); - Require.equals(getBoxedType(float.class), "getBoxedType(float.class)", Float.class, - "Float.class"); - Require.equals(getBoxedType(double.class), "getBoxedType(double.class)", Double.class, - "Double.class"); - - Require.equals(getBoxedType(Boolean.class), "getBoxedType(Boolean.class)", Boolean.class, - "Boolean.class"); - Require.equals(getBoxedType(Byte.class), "getBoxedType(Byte.class)", Byte.class, - "Byte.class"); - Require.equals(getBoxedType(Short.class), "getBoxedType(Short.class)", Short.class, - "Short.class"); - Require.equals(getBoxedType(Character.class), "getBoxedType(Character.class)", - Character.class, "Character.class"); - Require.equals(getBoxedType(Integer.class), "getBoxedType(Integer.class)", Integer.class, - "Integer.class"); - Require.equals(getBoxedType(Long.class), "getBoxedType(Long.class)", Long.class, - "Long.class"); - Require.equals(getBoxedType(Float.class), "getBoxedType(Float.class)", Float.class, - "Float.class"); - Require.equals(getBoxedType(Double.class), "getBoxedType(Double.class)", Double.class, - "Double.class"); - - - Require.equals(getBoxedType(Object.class), "getBoxedType(Object.class)", Object.class, - "Object.class"); - Require.equals(getBoxedType(CharSequence.class), "getBoxedType(CharSequence.class)", - CharSequence.class, "CharSequence.class"); - Require.equals(getBoxedType(String.class), "getBoxedType(String.class)", String.class, - "String.class"); + Require.equals(getBoxedType(boolean.class), "getBoxedType(boolean.class)", Boolean.class, "Boolean.class"); + Require.equals(getBoxedType(byte.class), "getBoxedType(byte.class)", Byte.class, "Byte.class"); + Require.equals(getBoxedType(short.class), "getBoxedType(short.class)", Short.class, "Short.class"); + Require.equals(getBoxedType(char.class), "getBoxedType(char.class)", Character.class, "Character.class"); + Require.equals(getBoxedType(int.class), "getBoxedType(int.class)", Integer.class, "Integer.class"); + Require.equals(getBoxedType(long.class), "getBoxedType(long.class)", Long.class, "Long.class"); + Require.equals(getBoxedType(float.class), "getBoxedType(float.class)", Float.class, "Float.class"); + Require.equals(getBoxedType(double.class), "getBoxedType(double.class)", Double.class, "Double.class"); + + Require.equals(getBoxedType(Boolean.class), "getBoxedType(Boolean.class)", Boolean.class, "Boolean.class"); + Require.equals(getBoxedType(Byte.class), "getBoxedType(Byte.class)", Byte.class, "Byte.class"); + Require.equals(getBoxedType(Short.class), "getBoxedType(Short.class)", Short.class, "Short.class"); + Require.equals(getBoxedType(Character.class), "getBoxedType(Character.class)", Character.class, + "Character.class"); + Require.equals(getBoxedType(Integer.class), "getBoxedType(Integer.class)", Integer.class, "Integer.class"); + Require.equals(getBoxedType(Long.class), "getBoxedType(Long.class)", Long.class, "Long.class"); + Require.equals(getBoxedType(Float.class), "getBoxedType(Float.class)", Float.class, "Float.class"); + Require.equals(getBoxedType(Double.class), "getBoxedType(Double.class)", Double.class, "Double.class"); + + + Require.equals(getBoxedType(Object.class), "getBoxedType(Object.class)", Object.class, "Object.class"); + Require.equals(getBoxedType(CharSequence.class), "getBoxedType(CharSequence.class)", CharSequence.class, + "CharSequence.class"); + Require.equals(getBoxedType(String.class), "getBoxedType(String.class)", String.class, "String.class"); } public void testGetUnboxedType() { - Require.equals(getUnboxedType(Boolean.class), "getUnboxedType(Boolean.class)", - boolean.class, "boolean.class"); - Require.equals(getUnboxedType(Byte.class), "getUnboxedType(Byte.class)", byte.class, - "byte.class"); - Require.equals(getUnboxedType(Short.class), "getUnboxedType(Short.class)", short.class, - "short.class"); - Require.equals(getUnboxedType(Character.class), "getUnboxedType(Character.class)", - char.class, "char.class"); - Require.equals(getUnboxedType(Integer.class), "getUnboxedType(Integer.class)", int.class, - "int.class"); - Require.equals(getUnboxedType(Long.class), "getUnboxedType(Long.class)", long.class, - "long.class"); - Require.equals(getUnboxedType(Float.class), "getUnboxedType(Float.class)", float.class, - "float.class"); - Require.equals(getUnboxedType(Double.class), "getUnboxedType(Double.class)", double.class, - "double.class"); - - Require.equals(getUnboxedType(boolean.class), "getUnboxedType(boolean.class)", - boolean.class, "boolean.class"); - Require.equals(getUnboxedType(byte.class), "getUnboxedType(byte.class)", byte.class, - "byte.class"); - Require.equals(getUnboxedType(short.class), "getUnboxedType(short.class)", short.class, - "short.class"); - Require.equals(getUnboxedType(char.class), "getUnboxedType(char.class)", char.class, - "char.class"); - Require.equals(getUnboxedType(int.class), "getUnboxedType(int.class)", int.class, - "int.class"); - Require.equals(getUnboxedType(long.class), "getUnboxedType(long.class)", long.class, - "long.class"); - Require.equals(getUnboxedType(float.class), "getUnboxedType(float.class)", float.class, - "float.class"); - Require.equals(getUnboxedType(double.class), "getUnboxedType(double.class)", double.class, - "double.class"); + Require.equals(getUnboxedType(Boolean.class), "getUnboxedType(Boolean.class)", boolean.class, "boolean.class"); + Require.equals(getUnboxedType(Byte.class), "getUnboxedType(Byte.class)", byte.class, "byte.class"); + Require.equals(getUnboxedType(Short.class), "getUnboxedType(Short.class)", short.class, "short.class"); + Require.equals(getUnboxedType(Character.class), "getUnboxedType(Character.class)", char.class, "char.class"); + Require.equals(getUnboxedType(Integer.class), "getUnboxedType(Integer.class)", int.class, "int.class"); + Require.equals(getUnboxedType(Long.class), "getUnboxedType(Long.class)", long.class, "long.class"); + Require.equals(getUnboxedType(Float.class), "getUnboxedType(Float.class)", float.class, "float.class"); + Require.equals(getUnboxedType(Double.class), "getUnboxedType(Double.class)", double.class, "double.class"); + + Require.equals(getUnboxedType(boolean.class), "getUnboxedType(boolean.class)", boolean.class, "boolean.class"); + Require.equals(getUnboxedType(byte.class), "getUnboxedType(byte.class)", byte.class, "byte.class"); + Require.equals(getUnboxedType(short.class), "getUnboxedType(short.class)", short.class, "short.class"); + Require.equals(getUnboxedType(char.class), "getUnboxedType(char.class)", char.class, "char.class"); + Require.equals(getUnboxedType(int.class), "getUnboxedType(int.class)", int.class, "int.class"); + Require.equals(getUnboxedType(long.class), "getUnboxedType(long.class)", long.class, "long.class"); + Require.equals(getUnboxedType(float.class), "getUnboxedType(float.class)", float.class, "float.class"); + Require.equals(getUnboxedType(double.class), "getUnboxedType(double.class)", double.class, "double.class"); Require.eqNull(getUnboxedType(Object.class), "getUnboxedType(Object.class)"); @@ -152,10 +119,9 @@ public void testTypesSetOrdering() { // Ensure primitive types and boxed types have the same ordering Require.requirement( - Arrays.equals(PRIMITIVE_TYPES.toArray(), - BOXED_TYPES.stream().map(io.deephaven.util.type.TypeUtils::getUnboxedType) - .toArray()), - "Arrays.equals(PRIMITIVE_TYPES.toArray(), BOXED_TYPES.stream().map(TypeUtils::getUnboxedType).toArray())"); + Arrays.equals(PRIMITIVE_TYPES.toArray(), + BOXED_TYPES.stream().map(io.deephaven.util.type.TypeUtils::getUnboxedType).toArray()), + "Arrays.equals(PRIMITIVE_TYPES.toArray(), BOXED_TYPES.stream().map(TypeUtils::getUnboxedType).toArray())"); } public void testIsType() { @@ -195,8 +161,6 @@ public void testIsType() { public void testObjectToString() throws IOException { assertNull(io.deephaven.util.type.TypeUtils.objectToString(null)); // null input - assertEquals("STRING", io.deephaven.util.type.TypeUtils.objectToString("STRING")); // non - // null - // input + assertEquals("STRING", io.deephaven.util.type.TypeUtils.objectToString("STRING")); // non null input } } diff --git a/DB/src/test/java/io/deephaven/db/tables/utils/TestWindowCheck.java b/DB/src/test/java/io/deephaven/db/tables/utils/TestWindowCheck.java index 8175d1b8a07..be691194c83 100644 --- a/DB/src/test/java/io/deephaven/db/tables/utils/TestWindowCheck.java +++ b/DB/src/test/java/io/deephaven/db/tables/utils/TestWindowCheck.java @@ -44,9 +44,8 @@ public void tearDown() throws Exception { * * Time advances by one second per step, which randomly modifies the source table. * - * The WindowEvalNugget verifies the original columns are unchanged and that the value of the - * InWindow column is correct. A prev checker is added to ensure that getPrev works on the new - * table. + * The WindowEvalNugget verifies the original columns are unchanged and that the value of the InWindow column is + * correct. A prev checker is added to ensure that getPrev works on the new table. */ @Test public void testWindowCheckIterative() { @@ -57,12 +56,11 @@ public void testWindowCheckIterative() { final int size = 100; final DBDateTime startTime = DBTimeUtils.convertDateTime("2018-02-23T09:30:00 NY"); final DBDateTime endTime = DBTimeUtils.convertDateTime("2018-02-23T16:00:00 NY"); - final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Timestamp", "C1"}, + final QueryTable table = getTable(size, random, columnInfo = initColumnInfos(new String[] {"Timestamp", "C1"}, new TstUtils.UnsortedDateTimeGenerator(startTime, endTime, 0.01), new TstUtils.IntGenerator(1, 100))); - // Use a smaller step size so that the random walk on tableSize doesn't become unwieldy - // given the large number of steps. + // Use a smaller step size so that the random walk on tableSize doesn't become unwieldy given the large number + // of steps. final int stepSize = (int) Math.ceil(Math.sqrt(size)); final TestTimeProvider timeProvider = new TestTimeProvider(); @@ -89,24 +87,21 @@ public void testWindowCheckIterative() { if (combined) { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { advanceTime(timeProvider, en); - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, size, random, table, columnInfo); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, size, + random, table, columnInfo); }); TstUtils.validate("Step " + step, en); } else { - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> advanceTime(timeProvider, en)); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> advanceTime(timeProvider, en)); if (LiveTableTestCase.printTableUpdates) { - TstUtils.validate( - "Step = " + step + " time = " + new DBDateTime(timeProvider.now), en); + TstUtils.validate("Step = " + step + " time = " + new DBDateTime(timeProvider.now), en); } for (int ii = 0; ii < stepsPerTick; ++ii) { if (LiveTableTestCase.printTableUpdates) { System.out.println("Step " + step + "-" + ii); } - LiveTableTestCase.simulateShiftAwareStep(step + "-" + ii, stepSize, random, - table, columnInfo, en); + LiveTableTestCase.simulateShiftAwareStep(step + "-" + ii, stepSize, random, table, columnInfo, en); } } } @@ -131,12 +126,11 @@ public void testWindowCheckEmptyInitial() { timeProvider.now = startTime.getNanos(); final DBDateTime[] emptyDateTimeArray = new DBDateTime[0]; - final Table tableToCheck = - testRefreshingTable(i(), c("Timestamp", emptyDateTimeArray), intCol("Sentinel")); + final Table tableToCheck = testRefreshingTable(i(), c("Timestamp", emptyDateTimeArray), intCol("Sentinel")); - final Pair windowed = LiveTableMonitor.DEFAULT - .sharedLock().computeLocked(() -> WindowCheck.addTimeWindowInternal(timeProvider, - tableToCheck, "Timestamp", DBTimeUtils.SECOND * 60, "InWindow", false)); + final Pair windowed = LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> WindowCheck.addTimeWindowInternal(timeProvider, tableToCheck, "Timestamp", + DBTimeUtils.SECOND * 60, "InWindow", false)); TableTools.showWithIndex(windowed.first); @@ -170,8 +164,7 @@ class FailureListener extends InstrumentedShiftAwareListener { public void onUpdate(Update upstream) {} @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { exception = originalException; final StringWriter errors = new StringWriter(); originalException.printStackTrace(new PrintWriter(errors)); @@ -186,8 +179,8 @@ public void onFailureInternal(Throwable originalException, this.table = table; this.timeProvider = timeProvider; windowNanos = 300 * DBTimeUtils.SECOND; - windowed = WindowCheck.addTimeWindowInternal(timeProvider, table, "Timestamp", - windowNanos, "InWindow", false); + windowed = + WindowCheck.addTimeWindowInternal(timeProvider, table, "Timestamp", windowNanos, "InWindow", false); validator = TableUpdateValidator.make((QueryTable) windowed.first); ((QueryTable) windowed.first).listenForUpdates(windowedFailureListener); diff --git a/DB/src/test/java/io/deephaven/db/tables/verify/TestTableAssertions.java b/DB/src/test/java/io/deephaven/db/tables/verify/TestTableAssertions.java index 4e87fa41848..82358a4316a 100644 --- a/DB/src/test/java/io/deephaven/db/tables/verify/TestTableAssertions.java +++ b/DB/src/test/java/io/deephaven/db/tables/verify/TestTableAssertions.java @@ -33,40 +33,35 @@ public void after() throws Exception { @Test public void testStatic() { - final Table test = TableTools.newTable( - stringCol("Plant", "Apple", "Banana", "Carrot", "Daffodil"), - intCol("Int", 9, 3, 2, 1), - doubleCol("D1", QueryConstants.NULL_DOUBLE, Math.E, Math.PI, Double.NEGATIVE_INFINITY)); - - TestCase.assertSame(test, - TableAssertions.assertSorted("test", test, "Plant", SortingOrder.Ascending)); - TestCase.assertSame(test, - TableAssertions.assertSorted(test, "Int", SortingOrder.Descending)); + final Table test = TableTools.newTable(stringCol("Plant", "Apple", "Banana", "Carrot", "Daffodil"), + intCol("Int", 9, 3, 2, 1), + doubleCol("D1", QueryConstants.NULL_DOUBLE, Math.E, Math.PI, Double.NEGATIVE_INFINITY)); + + TestCase.assertSame(test, TableAssertions.assertSorted("test", test, "Plant", SortingOrder.Ascending)); + TestCase.assertSame(test, TableAssertions.assertSorted(test, "Int", SortingOrder.Descending)); try { TableAssertions.assertSorted("test", test, "D1", SortingOrder.Ascending); TestCase.fail("Table is not actually sorted by D1"); } catch (SortedAssertionFailure saf) { TestCase.assertEquals( - "Table violates sorted assertion, table description=test, column=D1, Ascending, 3.141592653589793 is out of order with respect to -Infinity!", - saf.getMessage()); + "Table violates sorted assertion, table description=test, column=D1, Ascending, 3.141592653589793 is out of order with respect to -Infinity!", + saf.getMessage()); } TestCase.assertEquals(SortingOrder.Ascending, - SortedColumnsAttribute.getOrderForColumn(test, "Plant").orElse(null)); + SortedColumnsAttribute.getOrderForColumn(test, "Plant").orElse(null)); TestCase.assertEquals(SortingOrder.Descending, - SortedColumnsAttribute.getOrderForColumn(test, "Int").orElse(null)); - TestCase.assertEquals(Optional.empty(), - SortedColumnsAttribute.getOrderForColumn(test, "D1")); + SortedColumnsAttribute.getOrderForColumn(test, "Int").orElse(null)); + TestCase.assertEquals(Optional.empty(), SortedColumnsAttribute.getOrderForColumn(test, "D1")); } @Test public void testRefreshing() { final QueryTable test = TstUtils.testRefreshingTable(i(10, 11, 12, 17), - stringCol("Plant", "Apple", "Banana", "Carrot", "Daffodil"), - intCol("Int", 9, 7, 5, 3)); + stringCol("Plant", "Apple", "Banana", "Carrot", "Daffodil"), + intCol("Int", 9, 7, 5, 3)); - final Table testPlant = - TableAssertions.assertSorted("test", test, "Plant", SortingOrder.Ascending); + final Table testPlant = TableAssertions.assertSorted("test", test, "Plant", SortingOrder.Ascending); final Table testInt = TableAssertions.assertSorted(test, "Int", SortingOrder.Descending); assertTableEquals(test, testPlant); @@ -83,7 +78,7 @@ public void testRefreshing() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(test, i(9, 13, 18), stringCol("Plant", "Aaple", "DAFODIL", "Forsythia"), - intCol("Int", 10, 4, 0)); + intCol("Int", 10, 4, 0)); TableTools.showWithIndex(test); test.notifyListeners(i(9, 13, 18), i(), i()); }); @@ -103,13 +98,12 @@ public void testIncrementalRandom(int seed, int size) { // noinspection rawtypes final ColumnInfo[] columnInfo; final QueryTable table = getTable(true, size, random, - columnInfo = initColumnInfos(new String[] {"SortValue", "Sentinel"}, - new TstUtils.SortedLongGenerator(0, 1_000_000_000L), - new TstUtils.IntGenerator(0, 100000))); + columnInfo = initColumnInfos(new String[] {"SortValue", "Sentinel"}, + new TstUtils.SortedLongGenerator(0, 1_000_000_000L), + new TstUtils.IntGenerator(0, 100000))); - // This code could give you some level of confidence that we actually do work as intended; - // but is hard to test + // This code could give you some level of confidence that we actually do work as intended; but is hard to test // try { // final Random random1 = new Random(0); // QueryScope.addParam("random1", random); @@ -121,29 +115,26 @@ public void testIncrementalRandom(int seed, int size) { new EvalNugget() { @Override protected Table e() { - return TableAssertions.assertSorted("table", table, "SortValue", - SortingOrder.Ascending); + return TableAssertions.assertSorted("table", table, "SortValue", SortingOrder.Ascending); } }, // new EvalNugget() { // @Override // protected Table e() { - // return TableAssertions.assertSorted("badTable", badTable, "RV", - // SortingOrder.Ascending); + // return TableAssertions.assertSorted("badTable", badTable, "RV", SortingOrder.Ascending); // } // }, new EvalNugget() { @Override protected Table e() { return TableAssertions.assertSorted("table sorted by sentinel", - table.sortDescending("Sentinel"), "Sentinel", SortingOrder.Descending); + table.sortDescending("Sentinel"), "Sentinel", SortingOrder.Descending); } }, }; for (int step = 0; step < maxSteps; step++) { - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( GenerateTableUpdates.DEFAULT_PROFILE, size, random, table, columnInfo)); validate(en); } diff --git a/DB/src/test/java/io/deephaven/db/util/JpyPlaypen.java b/DB/src/test/java/io/deephaven/db/util/JpyPlaypen.java index 007c1eeb6e8..d947c419ea1 100644 --- a/DB/src/test/java/io/deephaven/db/util/JpyPlaypen.java +++ b/DB/src/test/java/io/deephaven/db/util/JpyPlaypen.java @@ -16,7 +16,7 @@ public class JpyPlaypen { public static void main(String[] args) throws InterruptedException { System.setProperty("jpy.jdlLib", "/Library/Python/2.7/site-packages/jdl.so"); System.setProperty("jpy.pythonLib", - "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/libpython2.7.dylib"); + "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/libpython2.7.dylib"); System.setProperty("jpy.jpyLib", "/Library/Python/2.7/site-packages/jpy.so"); PyLib.startPython(); @@ -27,16 +27,14 @@ public static void main(String[] args) throws InterruptedException { Thread.sleep(1000); - PyObject s = - main.executeCode("s = 'Hello World'\nprint s\nii = 7\njj = 8.0\n", PyInputMode.SCRIPT); + PyObject s = main.executeCode("s = 'Hello World'\nprint s\nii = 7\njj = 8.0\n", PyInputMode.SCRIPT); if (s != null) { System.out.println("S = \"" + s + "\" (" + s.getClass().getCanonicalName() + ")"); Object objectValue = s.getObjectValue(); System.out.println("OV = \"" + objectValue + "\" (" - + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") - + ")"); + + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") + ")"); PyObject type = s.getAttribute("__class__"); System.out.println("Type = \"" + type); } else { @@ -48,8 +46,7 @@ public static void main(String[] args) throws InterruptedException { System.out.println("S = \"" + s + "\" (" + s.getClass().getCanonicalName() + ")"); Object objectValue = s.getObjectValue(); System.out.println("OV = \"" + objectValue + "\" (" - + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") - + ")"); + + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") + ")"); PyObject type = s.getAttribute("__class__"); System.out.println("Type = \"" + type); } else { @@ -61,8 +58,7 @@ public static void main(String[] args) throws InterruptedException { System.out.println("II = \"" + num + "\" (" + num.getClass().getCanonicalName() + ")"); Object objectValue = num.getObjectValue(); System.out.println("OV = \"" + objectValue + "\" (" - + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") - + ")"); + + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") + ")"); } else { System.out.println("S = null"); } @@ -72,20 +68,17 @@ public static void main(String[] args) throws InterruptedException { System.out.println("JJ = \"" + num + "\" (" + num.getClass().getCanonicalName() + ")"); Object objectValue = num.getObjectValue(); System.out.println("OV = \"" + objectValue + "\" (" - + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") - + ")"); + + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") + ")"); } else { System.out.println("S = null"); } s = main.executeCode("globals().keys()", PyInputMode.EXPRESSION); if (s != null) { - System.out.println( - "Globals().keys() = \"" + s + "\" (" + num.getClass().getCanonicalName() + ")"); + System.out.println("Globals().keys() = \"" + s + "\" (" + num.getClass().getCanonicalName() + ")"); Object objectValue = s.getType(); System.out.println("OV = \"" + objectValue + "\" (" - + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") - + ")"); + + (objectValue != null ? objectValue.getClass().getCanonicalName() : "(null)") + ")"); if (s.isList()) { System.out.println("isList()"); List asList = s.asList(); @@ -101,11 +94,10 @@ public static void main(String[] args) throws InterruptedException { s = main.executeCode("globals()", PyInputMode.EXPRESSION); if (s != null) { - System.out - .println("Globals() = \"" + s + "\" (" + num.getClass().getCanonicalName() + ")"); + System.out.println("Globals() = \"" + s + "\" (" + num.getClass().getCanonicalName() + ")"); PyObject type = s.getType(); - System.out.println("Type = \"" + type + "\" (" - + (type != null ? type.getClass().getCanonicalName() : "(null)") + ")"); + System.out.println( + "Type = \"" + type + "\" (" + (type != null ? type.getClass().getCanonicalName() : "(null)") + ")"); System.out.println("Dict: " + s.isDict()); diff --git a/DB/src/test/java/io/deephaven/db/util/ScriptEnginePlaypen.java b/DB/src/test/java/io/deephaven/db/util/ScriptEnginePlaypen.java index 5b5727e1f84..9ab049ddb95 100644 --- a/DB/src/test/java/io/deephaven/db/util/ScriptEnginePlaypen.java +++ b/DB/src/test/java/io/deephaven/db/util/ScriptEnginePlaypen.java @@ -13,15 +13,14 @@ public static void main(String[] args) throws ScriptException { ScriptEngineManager factory = new ScriptEngineManager(); for (ScriptEngineFactory sef : factory.getEngineFactories()) { - System.out.println( - sef.getEngineName() + ", " + sef.getLanguageName() + ", " + sef.getNames()); + System.out.println(sef.getEngineName() + ", " + sef.getLanguageName() + ", " + sef.getNames()); } ScriptEngine engine = factory.getEngineByName("scala"); if (engine instanceof IMain) { scala.collection.immutable.List emptyList = scala.collection.JavaConverters - .collectionAsScalaIterable((List) Collections.EMPTY_LIST).toList(); + .collectionAsScalaIterable((List) Collections.EMPTY_LIST).toList(); ((IMain) engine).bind("z", "Int", 5, emptyList); ((IMain) engine).bind("y", "Int", 6, emptyList); } else { diff --git a/DB/src/test/java/io/deephaven/db/util/TestComparisons.java b/DB/src/test/java/io/deephaven/db/util/TestComparisons.java index fa75c7a0c7d..d560d7e2c39 100644 --- a/DB/src/test/java/io/deephaven/db/util/TestComparisons.java +++ b/DB/src/test/java/io/deephaven/db/util/TestComparisons.java @@ -9,12 +9,10 @@ public class TestComparisons { public void testCharCharComparisons() { TestCase.assertTrue(DhCharComparisons.lt(QueryConstants.NULL_CHAR, 'A')); TestCase.assertTrue(DhCharComparisons.gt('A', QueryConstants.NULL_CHAR)); - TestCase - .assertTrue(DhCharComparisons.eq(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR)); + TestCase.assertTrue(DhCharComparisons.eq(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR)); TestCase.assertTrue(DhCharComparisons.lt('A', 'B')); TestCase.assertFalse(DhCharComparisons.gt('A', 'B')); - TestCase - .assertTrue(DhCharComparisons.eq(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR)); + TestCase.assertTrue(DhCharComparisons.eq(QueryConstants.NULL_CHAR, QueryConstants.NULL_CHAR)); TestCase.assertFalse(DhCharComparisons.eq('A', QueryConstants.NULL_CHAR)); } @@ -22,12 +20,10 @@ public void testCharCharComparisons() { public void testByteByteComparisons() { TestCase.assertTrue(DhByteComparisons.lt(QueryConstants.NULL_BYTE, (byte) 2)); TestCase.assertTrue(DhByteComparisons.gt((byte) 2, QueryConstants.NULL_BYTE)); - TestCase - .assertTrue(DhByteComparisons.eq(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE)); + TestCase.assertTrue(DhByteComparisons.eq(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE)); TestCase.assertTrue(DhByteComparisons.lt((byte) 2, (byte) 3)); TestCase.assertFalse(DhByteComparisons.gt((byte) 2, (byte) 3)); - TestCase - .assertTrue(DhByteComparisons.eq(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE)); + TestCase.assertTrue(DhByteComparisons.eq(QueryConstants.NULL_BYTE, QueryConstants.NULL_BYTE)); TestCase.assertFalse(DhByteComparisons.eq((byte) 2, QueryConstants.NULL_BYTE)); } @@ -35,12 +31,10 @@ public void testByteByteComparisons() { public void testShortShortComparisons() { TestCase.assertTrue(DhShortComparisons.lt(QueryConstants.NULL_SHORT, (short) 2)); TestCase.assertTrue(DhShortComparisons.gt((short) 2, QueryConstants.NULL_SHORT)); - TestCase.assertTrue( - DhShortComparisons.eq(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT)); + TestCase.assertTrue(DhShortComparisons.eq(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT)); TestCase.assertTrue(DhShortComparisons.lt((short) 2, (short) 3)); TestCase.assertFalse(DhShortComparisons.gt((short) 2, (short) 3)); - TestCase.assertTrue( - DhShortComparisons.eq(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT)); + TestCase.assertTrue(DhShortComparisons.eq(QueryConstants.NULL_SHORT, QueryConstants.NULL_SHORT)); TestCase.assertFalse(DhShortComparisons.eq((short) 2, QueryConstants.NULL_SHORT)); } @@ -59,12 +53,10 @@ public void testIntIntComparisons() { public void testLongLongComparisons() { TestCase.assertTrue(DhLongComparisons.lt(QueryConstants.NULL_LONG, 2)); TestCase.assertTrue(DhLongComparisons.gt(2, QueryConstants.NULL_LONG)); - TestCase - .assertTrue(DhLongComparisons.eq(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG)); + TestCase.assertTrue(DhLongComparisons.eq(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG)); TestCase.assertTrue(DhLongComparisons.lt(2, 3)); TestCase.assertFalse(DhLongComparisons.gt(2, 3)); - TestCase - .assertTrue(DhLongComparisons.eq(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG)); + TestCase.assertTrue(DhLongComparisons.eq(QueryConstants.NULL_LONG, QueryConstants.NULL_LONG)); TestCase.assertFalse(DhLongComparisons.eq(2, QueryConstants.NULL_LONG)); } @@ -72,33 +64,25 @@ public void testLongLongComparisons() { public void testFloatFloatComparisons() { TestCase.assertTrue(DhFloatComparisons.lt(QueryConstants.NULL_FLOAT, 1.0f)); TestCase.assertTrue(DhFloatComparisons.gt(2.0f, QueryConstants.NULL_FLOAT)); - TestCase.assertTrue( - DhFloatComparisons.eq(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT)); + TestCase.assertTrue(DhFloatComparisons.eq(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT)); TestCase.assertTrue(DhFloatComparisons.lt(2.0f, 100f)); TestCase.assertFalse(DhFloatComparisons.gt(2.0f, 100f)); - TestCase.assertTrue( - DhFloatComparisons.eq(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT)); + TestCase.assertTrue(DhFloatComparisons.eq(QueryConstants.NULL_FLOAT, QueryConstants.NULL_FLOAT)); TestCase.assertTrue(DhFloatComparisons.eq(Float.NaN, Float.NaN)); TestCase.assertTrue(DhFloatComparisons.lt(QueryConstants.NULL_FLOAT, Float.NaN)); TestCase.assertTrue(DhFloatComparisons.lt(1, Float.NaN)); - TestCase - .assertTrue(DhFloatComparisons.eq(Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY)); - TestCase - .assertTrue(DhFloatComparisons.eq(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); + TestCase.assertTrue(DhFloatComparisons.eq(Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY)); + TestCase.assertTrue(DhFloatComparisons.eq(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); TestCase.assertTrue(DhFloatComparisons.gt(Float.POSITIVE_INFINITY, 7f)); TestCase.assertTrue(DhFloatComparisons.lt(Float.NEGATIVE_INFINITY, 7f)); TestCase.assertTrue(DhFloatComparisons.lt(7f, Float.POSITIVE_INFINITY)); TestCase.assertTrue(DhFloatComparisons.gt(7f, Float.NEGATIVE_INFINITY)); TestCase.assertEquals(0, DhFloatComparisons.compare(0f, -0.0f)); TestCase.assertEquals(0, DhFloatComparisons.compare(-0.0f, 0.0f)); - TestCase - .assertTrue(DhFloatComparisons.lt(Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY)); - TestCase - .assertTrue(DhFloatComparisons.gt(Float.POSITIVE_INFINITY, Float.NEGATIVE_INFINITY)); - TestCase.assertEquals(0, - DhFloatComparisons.compare(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); - TestCase.assertEquals(0, - DhFloatComparisons.compare(Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY)); + TestCase.assertTrue(DhFloatComparisons.lt(Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY)); + TestCase.assertTrue(DhFloatComparisons.gt(Float.POSITIVE_INFINITY, Float.NEGATIVE_INFINITY)); + TestCase.assertEquals(0, DhFloatComparisons.compare(Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); + TestCase.assertEquals(0, DhFloatComparisons.compare(Float.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY)); TestCase.assertTrue(DhFloatComparisons.gt(Float.NaN, Float.POSITIVE_INFINITY)); TestCase.assertTrue(DhFloatComparisons.lt(Float.POSITIVE_INFINITY, Float.NaN)); } diff --git a/DB/src/test/java/io/deephaven/db/util/TestCompileSimpleFunction.java b/DB/src/test/java/io/deephaven/db/util/TestCompileSimpleFunction.java index 09d8fd6654c..daaa99b91cd 100644 --- a/DB/src/test/java/io/deephaven/db/util/TestCompileSimpleFunction.java +++ b/DB/src/test/java/io/deephaven/db/util/TestCompileSimpleFunction.java @@ -11,14 +11,12 @@ public class TestCompileSimpleFunction extends TestCase { public void testString() { - String res = DynamicCompileUtils - .compileSimpleFunction(String.class, "return \"Hello, world\"").get(); + String res = DynamicCompileUtils.compileSimpleFunction(String.class, "return \"Hello, world\"").get(); TestCase.assertEquals("Hello, world", res); } public void testImport() { - String res = - DynamicCompileUtils.compileSimpleFunction(String.class, "return currentDateNy()", + String res = DynamicCompileUtils.compileSimpleFunction(String.class, "return currentDateNy()", Collections.emptyList(), Collections.singleton(DBTimeUtils.class)).get(); TestCase.assertEquals(DBTimeUtils.currentDateNy(), res); } diff --git a/DB/src/test/java/io/deephaven/db/util/TestDBColorUtil.java b/DB/src/test/java/io/deephaven/db/util/TestDBColorUtil.java index c9aedc9e199..0d705b3953e 100644 --- a/DB/src/test/java/io/deephaven/db/util/TestDBColorUtil.java +++ b/DB/src/test/java/io/deephaven/db/util/TestDBColorUtil.java @@ -21,39 +21,33 @@ public void testRowFormatWhereNew() { testRowFormatWhere(t1.formatRowWhere("X > 5", "ALICEBLUE"), ALICEBLUE); testRowFormatWhere(t1.formatRowWhere("X > 5", "`#F0F8FF`"), ALICEBLUE); testRowFormatWhere(t1.formatRowWhere("X > 5", "`aliceblue`"), ALICEBLUE); - testRowFormatWhere( - t1.formatRowWhere("X > 5", "io.deephaven.gui.color.Color.color(`aliceblue`)"), - ALICEBLUE); + testRowFormatWhere(t1.formatRowWhere("X > 5", "io.deephaven.gui.color.Color.color(`aliceblue`)"), ALICEBLUE); } public void testRowFormatWhereOld() { testRowFormatWhere(t1.formatRowWhere("X > 5", "VIVID_RED"), VIVID_RED); testRowFormatWhere(t1.formatRowWhere("X > 5", "`VIVID_RED`"), VIVID_RED); - testRowFormatWhere( - t1.formatRowWhere("X > 5", "io.deephaven.gui.color.Color.color(`VIVID_RED`)"), - VIVID_RED); + testRowFormatWhere(t1.formatRowWhere("X > 5", "io.deephaven.gui.color.Color.color(`VIVID_RED`)"), VIVID_RED); } public void testFormatColumnsNew() { testFormatColumns(t1.formatColumns("X = i > 200 ? NO_FORMATTING : ALICEBLUE"), ALICEBLUE); testFormatColumns(t1.formatColumns("X = `#F0F8FF`"), ALICEBLUE); testFormatColumns(t1.formatColumns("X = `aliceblue`"), ALICEBLUE); - testFormatColumns(t1.formatColumns("X = io.deephaven.gui.color.Color.color(`aliceblue`)"), - ALICEBLUE); + testFormatColumns(t1.formatColumns("X = io.deephaven.gui.color.Color.color(`aliceblue`)"), ALICEBLUE); } public void testFormatColumnsOld() { testFormatColumns(t1.formatColumns("X = i > 200 ? NO_FORMATTING : VIVID_RED"), VIVID_RED); testFormatColumns(t1.formatColumns("X = `VIVID_RED`"), VIVID_RED); - testFormatColumns(t1.formatColumns("X = io.deephaven.gui.color.Color.color(`VIVID_RED`)"), - VIVID_RED); + testFormatColumns(t1.formatColumns("X = io.deephaven.gui.color.Color.color(`VIVID_RED`)"), VIVID_RED); } public void testBackground() { assertEquals("111111111111111111111111100000000000000000000000000000000", - Long.toBinaryString(DBColorUtil.background(Color.colorRGB(255, 255, 255)))); + Long.toBinaryString(DBColorUtil.background(Color.colorRGB(255, 255, 255)))); assertEquals("100000000000000000000000000000000000000000000000000000000", - Long.toBinaryString(DBColorUtil.background(Color.colorRGB(0, 0, 0)))); + Long.toBinaryString(DBColorUtil.background(Color.colorRGB(0, 0, 0)))); assertEquals(DBColorUtil.bg(ALICEBLUE), DBColorUtil.background(ALICEBLUE)); assertEquals(DBColorUtil.bg(ALICEBLUE), DBColorUtil.background("ALICEBLUE")); @@ -70,12 +64,11 @@ public void testBackground() { public void testForeground() { assertEquals("101111111111111111111111111", - Long.toBinaryString(DBColorUtil.foreground(Color.colorRGB(255, 255, 255)))); + Long.toBinaryString(DBColorUtil.foreground(Color.colorRGB(255, 255, 255)))); assertEquals("101000000000000000000000000", - Long.toBinaryString(DBColorUtil.foreground(Color.colorRGB(0, 0, 0)))); + Long.toBinaryString(DBColorUtil.foreground(Color.colorRGB(0, 0, 0)))); - assertEquals(DBColorUtil.fg(ALICEBLUE), - DBColorUtil.foreground(Color.colorRGB(240, 248, 255))); + assertEquals(DBColorUtil.fg(ALICEBLUE), DBColorUtil.foreground(Color.colorRGB(240, 248, 255))); assertEquals(DBColorUtil.fg(ALICEBLUE), DBColorUtil.foreground(ALICEBLUE)); assertEquals(DBColorUtil.fg(ALICEBLUE), DBColorUtil.foreground("ALICEBLUE")); assertEquals(DBColorUtil.fg(ALICEBLUE), DBColorUtil.foreground("#F0F8FF")); @@ -93,12 +86,11 @@ public void testForeground() { public void testForegroundOverride() { assertEquals("111111111111111111111111111", - Long.toBinaryString(DBColorUtil.foregroundOverride(Color.colorRGB(255, 255, 255)))); + Long.toBinaryString(DBColorUtil.foregroundOverride(Color.colorRGB(255, 255, 255)))); assertEquals("111000000000000000000000000", - Long.toBinaryString(DBColorUtil.foregroundOverride(Color.colorRGB(0, 0, 0)))); + Long.toBinaryString(DBColorUtil.foregroundOverride(Color.colorRGB(0, 0, 0)))); - assertEquals(DBColorUtil.fgo(ALICEBLUE), - DBColorUtil.foregroundOverride(Color.colorRGB(240, 248, 255))); + assertEquals(DBColorUtil.fgo(ALICEBLUE), DBColorUtil.foregroundOverride(Color.colorRGB(240, 248, 255))); assertEquals(DBColorUtil.fgo(ALICEBLUE), DBColorUtil.foregroundOverride(ALICEBLUE)); assertEquals(DBColorUtil.fgo(ALICEBLUE), DBColorUtil.foregroundOverride("ALICEBLUE")); assertEquals(DBColorUtil.fgo(ALICEBLUE), DBColorUtil.foregroundOverride("#F0F8FF")); @@ -120,12 +112,11 @@ public void testForegroundOverride() { public void testBackgroundOverride() { assertEquals("1111111111111111111111111100000000000000000000000000000000", - Long.toBinaryString(DBColorUtil.backgroundOverride(Color.colorRGB(255, 255, 255)))); + Long.toBinaryString(DBColorUtil.backgroundOverride(Color.colorRGB(255, 255, 255)))); assertEquals("1100000000000000000000000000000000000000000000000000000000", - Long.toBinaryString(DBColorUtil.backgroundOverride(Color.colorRGB(0, 0, 0)))); + Long.toBinaryString(DBColorUtil.backgroundOverride(Color.colorRGB(0, 0, 0)))); - assertEquals(DBColorUtil.bgo(ALICEBLUE), - DBColorUtil.backgroundOverride(Color.colorRGB(240, 248, 255))); + assertEquals(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.backgroundOverride(Color.colorRGB(240, 248, 255))); assertEquals(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.backgroundOverride(ALICEBLUE)); assertEquals(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.backgroundOverride("ALICEBLUE")); assertEquals(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.backgroundOverride("#F0F8FF")); @@ -146,23 +137,18 @@ public void testBackgroundOverride() { } public void testBackgroundForeground() { - assertEquals("111111111111111111111111100000101111111111111111111111111", - Long.toBinaryString(DBColorUtil.backgroundForeground(Color.colorRGB(255, 255, 255), - Color.colorRGB(255, 255, 255)))); - assertEquals("100000000000000000000000000000101000000000000000000000000", - Long.toBinaryString(DBColorUtil.backgroundForeground(Color.colorRGB(0, 0, 0), - Color.colorRGB(0, 0, 0)))); - - assertEquals(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE), DBColorUtil - .backgroundForeground(Color.colorRGB(240, 248, 255), Color.colorRGB(250, 235, 215))); - assertEquals(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE), - DBColorUtil.backgroundForeground(ALICEBLUE, ANTIQUEWHITE)); - assertEquals(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE), - DBColorUtil.backgroundForeground("#F0F8FF", "#FAEBD7")); + assertEquals("111111111111111111111111100000101111111111111111111111111", Long.toBinaryString( + DBColorUtil.backgroundForeground(Color.colorRGB(255, 255, 255), Color.colorRGB(255, 255, 255)))); + assertEquals("100000000000000000000000000000101000000000000000000000000", Long + .toBinaryString(DBColorUtil.backgroundForeground(Color.colorRGB(0, 0, 0), Color.colorRGB(0, 0, 0)))); + assertEquals(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE), - DBColorUtil.bgfg("#F0F8FF", "#FAEBD7")); + DBColorUtil.backgroundForeground(Color.colorRGB(240, 248, 255), Color.colorRGB(250, 235, 215))); assertEquals(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE), - DBColorUtil.bgfg(240, 248, 255, 250, 235, 215)); + DBColorUtil.backgroundForeground(ALICEBLUE, ANTIQUEWHITE)); + assertEquals(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE), DBColorUtil.backgroundForeground("#F0F8FF", "#FAEBD7")); + assertEquals(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE), DBColorUtil.bgfg("#F0F8FF", "#FAEBD7")); + assertEquals(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE), DBColorUtil.bgfg(240, 248, 255, 250, 235, 215)); t1.formatColumns("X = bgfg(ALICEBLUE,ANTIQUEWHITE)"); @@ -171,39 +157,35 @@ public void testBackgroundForeground() { t1.formatColumns("X = bgfg(240, 248, 255,250,235,215)"); t1.formatColumns("X = backgroundForeground(ALICEBLUE,ANTIQUEWHITE)"); - assertFalse( - DBColorUtil.isBackgroundSelectionOverride(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE))); - assertFalse( - DBColorUtil.isForegroundSelectionOverride(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE))); + assertFalse(DBColorUtil.isBackgroundSelectionOverride(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE))); + assertFalse(DBColorUtil.isForegroundSelectionOverride(DBColorUtil.bgfg(ALICEBLUE, ANTIQUEWHITE))); assertFalse(DBColorUtil.isBackgroundSelectionOverride( - DBColorUtil.bgfg(DBColorUtil.bg(ALICEBLUE), DBColorUtil.fg(ANTIQUEWHITE)))); + DBColorUtil.bgfg(DBColorUtil.bg(ALICEBLUE), DBColorUtil.fg(ANTIQUEWHITE)))); assertFalse(DBColorUtil.isForegroundSelectionOverride( - DBColorUtil.bgfg(DBColorUtil.bg(ALICEBLUE), DBColorUtil.fg(ANTIQUEWHITE)))); + DBColorUtil.bgfg(DBColorUtil.bg(ALICEBLUE), DBColorUtil.fg(ANTIQUEWHITE)))); assertTrue(DBColorUtil.isBackgroundSelectionOverride( - DBColorUtil.bgfg(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.fg(ANTIQUEWHITE)))); + DBColorUtil.bgfg(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.fg(ANTIQUEWHITE)))); assertFalse(DBColorUtil.isForegroundSelectionOverride( - DBColorUtil.bgfg(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.fg(ANTIQUEWHITE)))); + DBColorUtil.bgfg(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.fg(ANTIQUEWHITE)))); assertFalse(DBColorUtil.isBackgroundSelectionOverride( - DBColorUtil.bgfg(DBColorUtil.bg(ALICEBLUE), DBColorUtil.fgo(ANTIQUEWHITE)))); + DBColorUtil.bgfg(DBColorUtil.bg(ALICEBLUE), DBColorUtil.fgo(ANTIQUEWHITE)))); assertTrue(DBColorUtil.isForegroundSelectionOverride( - DBColorUtil.bgfg(DBColorUtil.bg(ALICEBLUE), DBColorUtil.fgo(ANTIQUEWHITE)))); + DBColorUtil.bgfg(DBColorUtil.bg(ALICEBLUE), DBColorUtil.fgo(ANTIQUEWHITE)))); assertTrue(DBColorUtil.isBackgroundSelectionOverride( - DBColorUtil.bgfg(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.fgo(ANTIQUEWHITE)))); + DBColorUtil.bgfg(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.fgo(ANTIQUEWHITE)))); assertTrue(DBColorUtil.isForegroundSelectionOverride( - DBColorUtil.bgfg(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.fgo(ANTIQUEWHITE)))); + DBColorUtil.bgfg(DBColorUtil.bgo(ALICEBLUE), DBColorUtil.fgo(ANTIQUEWHITE)))); } public void testBackgroundForegroundAuto() { assertEquals("111111111111111111111111100000001000000000000000000000000", - Long.toBinaryString(DBColorUtil.bgfga(255, 255, 255))); + Long.toBinaryString(DBColorUtil.bgfga(255, 255, 255))); assertEquals("100000000000000000000000000000001111000001110000011100000", - Long.toBinaryString(DBColorUtil.bgfga(0, 0, 0))); + Long.toBinaryString(DBColorUtil.bgfga(0, 0, 0))); - assertEquals(DBColorUtil.bgfga(ALICEBLUE), - DBColorUtil.backgroundForegroundAuto(Color.colorRGB(240, 248, 255))); + assertEquals(DBColorUtil.bgfga(ALICEBLUE), DBColorUtil.backgroundForegroundAuto(Color.colorRGB(240, 248, 255))); assertEquals(DBColorUtil.bgfga(ALICEBLUE), DBColorUtil.backgroundForegroundAuto(ALICEBLUE)); - assertEquals(DBColorUtil.bgfga(ALICEBLUE), - DBColorUtil.backgroundForegroundAuto("ALICEBLUE")); + assertEquals(DBColorUtil.bgfga(ALICEBLUE), DBColorUtil.backgroundForegroundAuto("ALICEBLUE")); assertEquals(DBColorUtil.bgfga(ALICEBLUE), DBColorUtil.backgroundForegroundAuto("#F0F8FF")); assertEquals(DBColorUtil.bgfga(ALICEBLUE), DBColorUtil.bgfga("#F0F8FF")); assertEquals(DBColorUtil.bgfga(ALICEBLUE), DBColorUtil.bgfga(240, 248, 255)); @@ -220,7 +202,7 @@ public void testBackgroundForegroundAuto() { public void testHeatmap() { assertEquals(DBColorUtil.bgfga(RED), DBColorUtil.heatmap(0, 0, 100, RED, BLUE)); assertEquals(DBColorUtil.bgfga(RED), - DBColorUtil.heatmap(0, 0, 100, Color.colorRGB(255, 0, 0), Color.colorRGB(0, 0, 255))); + DBColorUtil.heatmap(0, 0, 100, Color.colorRGB(255, 0, 0), Color.colorRGB(0, 0, 255))); assertEquals(DBColorUtil.bgfga(191, 0, 63), DBColorUtil.heatmap(25, 0, 100, RED, BLUE)); assertEquals(DBColorUtil.bgfga(127, 0, 127), DBColorUtil.heatmap(50, 0, 100, RED, BLUE)); assertEquals(DBColorUtil.bgfga(63, 0, 191), DBColorUtil.heatmap(75, 0, 100, RED, BLUE)); @@ -236,21 +218,19 @@ public void testHeatmap() { public void testHeatmapForeground() { assertEquals(DBColorUtil.fg(RED), DBColorUtil.heatmapForeground(0, 0, 100, RED, BLUE)); assertEquals(DBColorUtil.fg(BLUE), DBColorUtil.heatmapForeground(100, 0, 100, RED, BLUE)); - assertEquals(DBColorUtil.fg(127, 0, 127), - DBColorUtil.heatmapForeground(50, 0, 100, RED, BLUE)); - assertEquals(DBColorUtil.fg(127, 0, 127), - DBColorUtil.heatmapForeground(50, 0, 100, "RED", "BLUE")); + assertEquals(DBColorUtil.fg(127, 0, 127), DBColorUtil.heatmapForeground(50, 0, 100, RED, BLUE)); + assertEquals(DBColorUtil.fg(127, 0, 127), DBColorUtil.heatmapForeground(50, 0, 100, "RED", "BLUE")); assertEquals(DBColorUtil.heatmapFg(0, 0, 100, DBColorUtil.fg(RED), DBColorUtil.fg(BLUE)), - DBColorUtil.heatmapForeground(0, 0, 100, RED, BLUE)); + DBColorUtil.heatmapForeground(0, 0, 100, RED, BLUE)); assertEquals(DBColorUtil.heatmapFg(50, 0, 100, DBColorUtil.fg(RED), DBColorUtil.fg(BLUE)), - DBColorUtil.heatmapForeground(50, 0, 100, RED, BLUE)); + DBColorUtil.heatmapForeground(50, 0, 100, RED, BLUE)); assertEquals(DBColorUtil.heatmapFg(50, 0, 100, DBColorUtil.fg(RED), DBColorUtil.fg(BLUE)), - DBColorUtil.heatmapFg(50, 0, 100, "RED", "BLUE")); + DBColorUtil.heatmapFg(50, 0, 100, "RED", "BLUE")); assertEquals(DBColorUtil.heatmapFg(50, 0, 100, DBColorUtil.fg(RED), DBColorUtil.fg(BLUE)), - DBColorUtil.heatmapFg(50, 0, 100, RED, BLUE)); + DBColorUtil.heatmapFg(50, 0, 100, RED, BLUE)); assertEquals(DBColorUtil.heatmapFg(50, 0, 100, DBColorUtil.fg(RED), DBColorUtil.fg(BLUE)), - DBColorUtil.heatmapFg(50, 0, 100, "#FF0000", "BLUE")); + DBColorUtil.heatmapFg(50, 0, 100, "#FF0000", "BLUE")); t1.formatColumns("X = heatmapFg(100, 0, 100, RED, BLUE)"); t1.formatColumns("X = heatmapFg(100, 0, 100, `RED`, `BLUE`)"); @@ -273,8 +253,7 @@ public void testToLong() { assertEquals(DBColorUtil.bgfga(ALICEBLUE), DBColorUtil.toLong(Color.color("ALICEBLUE"))); assertEquals(0L, DBColorUtil.toLong((Color) null)); - assertEquals(DBColorUtil.bgfga(ALICEBLUE), - DBColorUtil.toLong(Color.colorRGB(240, 248, 255))); + assertEquals(DBColorUtil.bgfga(ALICEBLUE), DBColorUtil.toLong(Color.colorRGB(240, 248, 255))); assertEquals(0L, DBColorUtil.toLong((Color) null)); assertEquals(DBColorUtil.bgfga(0, 0, 0), DBColorUtil.toLong(Color.colorRGB(0, 0, 0))); @@ -296,10 +275,9 @@ public void testIsForegroundSet() { } private void testRowFormatWhere(final Table colorTable, final Color color) { - final long[] colorTableCol = colorTable - .getColumn( - ColumnFormattingValues.ROW_FORMAT_NAME + ColumnFormattingValues.TABLE_FORMAT_NAME) - .getLongs(0, size); + final long[] colorTableCol = + colorTable.getColumn(ColumnFormattingValues.ROW_FORMAT_NAME + ColumnFormattingValues.TABLE_FORMAT_NAME) + .getLongs(0, size); for (int i = 0; i < 6; i++) { // assertEquals(0L, colorTableCol[i]); @@ -311,7 +289,7 @@ private void testRowFormatWhere(final Table colorTable, final Color color) { private void testFormatColumns(final Table colorTable, final Color color) { final long[] colorTableCol = - colorTable.getColumn("X" + ColumnFormattingValues.TABLE_FORMAT_NAME).getLongs(0, size); + colorTable.getColumn("X" + ColumnFormattingValues.TABLE_FORMAT_NAME).getLongs(0, size); for (long aColorTableCol : colorTableCol) { assertEquals(DBColorUtil.toLong(color), aColorTableCol); } diff --git a/DB/src/test/java/io/deephaven/db/util/TestToMapListener.java b/DB/src/test/java/io/deephaven/db/util/TestToMapListener.java index 41e0f580fc0..3bea020b59d 100644 --- a/DB/src/test/java/io/deephaven/db/util/TestToMapListener.java +++ b/DB/src/test/java/io/deephaven/db/util/TestToMapListener.java @@ -10,16 +10,16 @@ public class TestToMapListener extends LiveTableTestCase { public void testToMap() { final QueryTable source = TstUtils.testRefreshingTable( - i(2, 4, 6, 8), - TstUtils.c("Sentinel", "A", "B", "C", "D"), - TstUtils.c("Sentinel2", "H", "I", "J", "K")); + i(2, 4, 6, 8), + TstUtils.c("Sentinel", "A", "B", "C", "D"), + TstUtils.c("Sentinel2", "H", "I", "J", "K")); io.deephaven.db.tables.utils.TableTools.show(source); final ColumnSource sentinelSource = source.getColumnSource("Sentinel"); final ColumnSource sentinel2Source = source.getColumnSource("Sentinel2"); final ToMapListener tml = ToMapListener.make(source, sentinelSource::get, - sentinelSource::getPrev, sentinel2Source::get, sentinel2Source::getPrev); + sentinelSource::getPrev, sentinel2Source::get, sentinel2Source::getPrev); source.listenForUpdates(tml); assertEquals("H", tml.get("A")); diff --git a/DB/src/test/java/io/deephaven/db/util/file/TestFileHandle.java b/DB/src/test/java/io/deephaven/db/util/file/TestFileHandle.java index ffd6eb91131..35a1e8312af 100644 --- a/DB/src/test/java/io/deephaven/db/util/file/TestFileHandle.java +++ b/DB/src/test/java/io/deephaven/db/util/file/TestFileHandle.java @@ -21,21 +21,19 @@ */ public class TestFileHandle { - private static final byte[] DATA = - new byte[] {(byte) -1, (byte) 1, Byte.MIN_VALUE, Byte.MAX_VALUE, (byte) 0}; + private static final byte[] DATA = new byte[] {(byte) -1, (byte) 1, Byte.MIN_VALUE, Byte.MAX_VALUE, (byte) 0}; private File file; private FileHandle FHUT; @Before public void setup() throws IOException { - file = File.createTempFile("TestFileHandle-", ".dat", - new File(Configuration.getInstance().getWorkspacePath())); + file = File.createTempFile("TestFileHandle-", ".dat", new File(Configuration.getInstance().getWorkspacePath())); FHUT = new FileHandle(FileChannel.open(file.toPath(), - StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, - StandardOpenOption.CREATE), - () -> { - }); + StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING, + StandardOpenOption.CREATE), + () -> { + }); } @After @@ -91,8 +89,8 @@ public void testFileHandle() throws IOException { } /** - * Utility for file deletion in unit tests. Note: Each attempt after the first failure is - * preceded by an invocation of the garbage collector. + * Utility for file deletion in unit tests. Note: Each attempt after the first failure is preceded by an + * invocation of the garbage collector. * * @param file The file to delete * @param maxRetries The number of retries diff --git a/DB/src/test/java/io/deephaven/db/util/file/TestTrackedFileHandleFactory.java b/DB/src/test/java/io/deephaven/db/util/file/TestTrackedFileHandleFactory.java index 602ae0bfbc8..2b0291e41d3 100644 --- a/DB/src/test/java/io/deephaven/db/util/file/TestTrackedFileHandleFactory.java +++ b/DB/src/test/java/io/deephaven/db/util/file/TestTrackedFileHandleFactory.java @@ -19,8 +19,8 @@ public class TestTrackedFileHandleFactory extends BaseCachedJMockTestCase { - private static final File FILE = new File(Configuration.getInstance().getWorkspacePath(), - "TestTrackedFileHandleFactory.dat"); + private static final File FILE = + new File(Configuration.getInstance().getWorkspacePath(), "TestTrackedFileHandleFactory.dat"); private static final int CAPACITY = 100; private static final double TARGET_USAGE_RATIO = 0.9; private static final int TARGET_USAGE_THRESHOLD = 90; @@ -103,8 +103,8 @@ public void testFull() throws IOException { handles[fhi] = FHCUT.readOnlyHandleCreator.invoke(FILE); assertIsSatisfied(); } - // Synchronous cleanup brings us down to threshold, but the handle that triggered the - // cleanup is recorded afterwards. + // Synchronous cleanup brings us down to threshold, but the handle that triggered the cleanup is recorded + // afterwards. TestCase.assertEquals(TARGET_USAGE_THRESHOLD + 1, FHCUT.getSize()); for (int fhi = 0; fhi < handles.length; ++fhi) { diff --git a/DB/src/test/java/io/deephaven/db/util/jpy/JpyConfigFlagTest.java b/DB/src/test/java/io/deephaven/db/util/jpy/JpyConfigFlagTest.java index 71790200ee6..793b2d76619 100644 --- a/DB/src/test/java/io/deephaven/db/util/jpy/JpyConfigFlagTest.java +++ b/DB/src/test/java/io/deephaven/db/util/jpy/JpyConfigFlagTest.java @@ -40,9 +40,9 @@ public void flags() { public void flagCoverage() { for (Field field : Diag.class.getDeclaredFields()) { if (Modifier.isPublic(field.getModifiers()) - && Modifier.isStatic(field.getModifiers()) - && Modifier.isFinal(field.getModifiers()) - && field.getType().equals(int.class)) { + && Modifier.isStatic(field.getModifiers()) + && Modifier.isFinal(field.getModifiers()) + && field.getType().equals(int.class)) { Assert.assertTrue(KNOWN.contains(field.getName())); } } diff --git a/DB/src/test/java/io/deephaven/db/util/jpy/JpyConfigLoaderTest.java b/DB/src/test/java/io/deephaven/db/util/jpy/JpyConfigLoaderTest.java index 83dce1ad3d3..076dac21df0 100644 --- a/DB/src/test/java/io/deephaven/db/util/jpy/JpyConfigLoaderTest.java +++ b/DB/src/test/java/io/deephaven/db/util/jpy/JpyConfigLoaderTest.java @@ -27,13 +27,13 @@ public void configDefaults() { JpyConfig config = load("jpy-config-defaults.prop"); JpyConfig jpyConfig = new JpyConfig( - DEFAULT_PROGRAM_NAME, - DEFAULT_PYTHON_HOME, - DEFAULT_PYTHON_LIB, - DEFAULT_JPY_LIB, - DEFAULT_JDL_LIB, - DEFAULT_EXTRA_PATHS, - DEFAULT_FLAGS); + DEFAULT_PROGRAM_NAME, + DEFAULT_PYTHON_HOME, + DEFAULT_PYTHON_LIB, + DEFAULT_JPY_LIB, + DEFAULT_JDL_LIB, + DEFAULT_EXTRA_PATHS, + DEFAULT_FLAGS); Assert.assertEquals(jpyConfig, config); } @@ -53,13 +53,13 @@ public void configNullJdlLib() { JpyConfig config = load("jpy-config-null-jdlLib.prop"); JpyConfig jpyConfig = new JpyConfig( - DEFAULT_PROGRAM_NAME, - DEFAULT_PYTHON_HOME, - DEFAULT_PYTHON_LIB, - DEFAULT_JPY_LIB, - null, - DEFAULT_EXTRA_PATHS, - DEFAULT_FLAGS); + DEFAULT_PROGRAM_NAME, + DEFAULT_PYTHON_HOME, + DEFAULT_PYTHON_LIB, + DEFAULT_JPY_LIB, + null, + DEFAULT_EXTRA_PATHS, + DEFAULT_FLAGS); Assert.assertEquals(jpyConfig, config); } @@ -69,13 +69,13 @@ public void configNullJpyLib() { JpyConfig config = load("jpy-config-null-jpyLib.prop"); JpyConfig jpyConfig = new JpyConfig( - DEFAULT_PROGRAM_NAME, - DEFAULT_PYTHON_HOME, - DEFAULT_PYTHON_LIB, - null, - DEFAULT_JDL_LIB, - DEFAULT_EXTRA_PATHS, - DEFAULT_FLAGS); + DEFAULT_PROGRAM_NAME, + DEFAULT_PYTHON_HOME, + DEFAULT_PYTHON_LIB, + null, + DEFAULT_JDL_LIB, + DEFAULT_EXTRA_PATHS, + DEFAULT_FLAGS); Assert.assertEquals(jpyConfig, config); } @@ -85,13 +85,13 @@ public void configNullPythonLib() { JpyConfig config = load("jpy-config-null-pythonLib.prop"); JpyConfig jpyConfig = new JpyConfig( - DEFAULT_PROGRAM_NAME, - DEFAULT_PYTHON_HOME, - null, - DEFAULT_JPY_LIB, - DEFAULT_JDL_LIB, - DEFAULT_EXTRA_PATHS, - DEFAULT_FLAGS); + DEFAULT_PROGRAM_NAME, + DEFAULT_PYTHON_HOME, + null, + DEFAULT_JPY_LIB, + DEFAULT_JDL_LIB, + DEFAULT_EXTRA_PATHS, + DEFAULT_FLAGS); Assert.assertEquals(jpyConfig, config); } @@ -101,13 +101,13 @@ public void configNullPythonHome() { JpyConfig config = load("jpy-config-null-pythonHome.prop"); JpyConfig jpyConfig = new JpyConfig( - DEFAULT_PROGRAM_NAME, - null, - DEFAULT_PYTHON_LIB, - DEFAULT_JPY_LIB, - DEFAULT_JDL_LIB, - DEFAULT_EXTRA_PATHS, - DEFAULT_FLAGS); + DEFAULT_PROGRAM_NAME, + null, + DEFAULT_PYTHON_LIB, + DEFAULT_JPY_LIB, + DEFAULT_JDL_LIB, + DEFAULT_EXTRA_PATHS, + DEFAULT_FLAGS); Assert.assertEquals(jpyConfig, config); } @@ -117,13 +117,13 @@ public void configNullProgramName() { JpyConfig config = load("jpy-config-null-programName.prop"); JpyConfig jpyConfig = new JpyConfig( - null, - DEFAULT_PYTHON_HOME, - DEFAULT_PYTHON_LIB, - DEFAULT_JPY_LIB, - DEFAULT_JDL_LIB, - DEFAULT_EXTRA_PATHS, - DEFAULT_FLAGS); + null, + DEFAULT_PYTHON_HOME, + DEFAULT_PYTHON_LIB, + DEFAULT_JPY_LIB, + DEFAULT_JDL_LIB, + DEFAULT_EXTRA_PATHS, + DEFAULT_FLAGS); Assert.assertEquals(jpyConfig, config); } @@ -133,13 +133,13 @@ public void configExtraPaths() { JpyConfig config = load("jpy-config-extra-paths.prop"); JpyConfig jpyConfig = new JpyConfig( - DEFAULT_PROGRAM_NAME, - DEFAULT_PYTHON_HOME, - DEFAULT_PYTHON_LIB, - DEFAULT_JPY_LIB, - DEFAULT_JDL_LIB, - Arrays.asList(Paths.get("/e1"), Paths.get("/e2"), Paths.get("/e3")), - DEFAULT_FLAGS); + DEFAULT_PROGRAM_NAME, + DEFAULT_PYTHON_HOME, + DEFAULT_PYTHON_LIB, + DEFAULT_JPY_LIB, + DEFAULT_JDL_LIB, + Arrays.asList(Paths.get("/e1"), Paths.get("/e2"), Paths.get("/e3")), + DEFAULT_FLAGS); Assert.assertEquals(jpyConfig, config); } @@ -149,13 +149,13 @@ public void configFlags() { JpyConfig config = load("jpy-config-flags.prop"); JpyConfig jpyConfig = new JpyConfig( - DEFAULT_PROGRAM_NAME, - DEFAULT_PYTHON_HOME, - DEFAULT_PYTHON_LIB, - DEFAULT_JPY_LIB, - DEFAULT_JDL_LIB, - DEFAULT_EXTRA_PATHS, - EnumSet.of(Flag.MEM, Flag.EXEC, Flag.JVM)); + DEFAULT_PROGRAM_NAME, + DEFAULT_PYTHON_HOME, + DEFAULT_PYTHON_LIB, + DEFAULT_JPY_LIB, + DEFAULT_JDL_LIB, + DEFAULT_EXTRA_PATHS, + EnumSet.of(Flag.MEM, Flag.EXEC, Flag.JVM)); Assert.assertEquals(jpyConfig, config); } @@ -211,17 +211,15 @@ public void configRelativeProgramName() { } private static JpyConfig load(String resource) { - // a bit hacky that we can't just pass resource as is, but we aren't allowed to pass our own - // class + // a bit hacky that we can't just pass resource as is, but we aren't allowed to pass our own class // context for configuration loading... Configuration configuration = loadConfig( - String.format("io/deephaven/db/util/jpy/%s", resource)); + String.format("io/deephaven/db/util/jpy/%s", resource)); return new JpyConfigLoader(configuration).asJpyConfig(); } private static Configuration loadConfig(String configFile) { - // todo: there should be a MUCH easier way to do this - very ugly b/c dependent on system - // props, + // todo: there should be a MUCH easier way to do this - very ugly b/c dependent on system props, // and Configuration not an interface... String existingValue = System.getProperty("Configuration.rootFile"); System.setProperty("Configuration.rootFile", configFile); diff --git a/DB/src/test/java/io/deephaven/db/util/liveness/TestLiveness.java b/DB/src/test/java/io/deephaven/db/util/liveness/TestLiveness.java index 537a22bc916..c32ab5922ca 100644 --- a/DB/src/test/java/io/deephaven/db/util/liveness/TestLiveness.java +++ b/DB/src/test/java/io/deephaven/db/util/liveness/TestLiveness.java @@ -43,15 +43,15 @@ public void tearDown() throws Exception { public void testRecursion() { // noinspection AutoBoxing final Table input = TstUtils.testRefreshingTable( - TstUtils.i(2, 3, 6, 7, 8, 10, 12, 15, 16), - TstUtils.c("GroupedInts", 1, 1, 2, 2, 2, 3, 3, 3, 3)); + TstUtils.i(2, 3, 6, 7, 8, 10, 12, 15, 16), + TstUtils.c("GroupedInts", 1, 1, 2, 2, 2, 3, 3, 3, 3)); Table result = null; for (int ii = 0; ii < 4096; ++ii) { if (result == null) { result = input; } else { result = TableTools.merge(result, input).updateView("GroupedInts=GroupedInts+1") - .updateView("GroupedInts=GroupedInts-1"); + .updateView("GroupedInts=GroupedInts-1"); } } } diff --git a/DB/src/test/java/io/deephaven/db/util/scripts/TestScriptRepository.java b/DB/src/test/java/io/deephaven/db/util/scripts/TestScriptRepository.java index 69fff8a6dfc..da24733cad0 100644 --- a/DB/src/test/java/io/deephaven/db/util/scripts/TestScriptRepository.java +++ b/DB/src/test/java/io/deephaven/db/util/scripts/TestScriptRepository.java @@ -54,8 +54,7 @@ public void setUp() throws Exception { git.commit().setMessage("Initial commit.").call(); - Git.cloneRepository().setDirectory(repo.toFile()) - .setURI(dirToImport.toAbsolutePath().toString()).call(); + Git.cloneRepository().setDirectory(repo.toFile()).setURI(dirToImport.toAbsolutePath().toString()).call(); StoredConfig config = git.getRepository().getConfig(); config.setString("remote", "origin", "url", repo.toAbsolutePath().toString()); @@ -76,24 +75,21 @@ public void testGetAvailableDisplayPathsWithPrefix() throws IOException, GitAPIE testGetAvailableScriptPaths(true, false); } - public void testGetAvailableDisplayPathsWithPrefixAndReset() - throws IOException, GitAPIException { + public void testGetAvailableDisplayPathsWithPrefixAndReset() throws IOException, GitAPIException { testGetAvailableScriptPaths(true, true); } - private void testGetAvailableScriptPaths(boolean prefixDisplayPathsWithRepoName, - boolean resetGitLockFiles) throws IOException, GitAPIException { + private void testGetAvailableScriptPaths(boolean prefixDisplayPathsWithRepoName, boolean resetGitLockFiles) + throws IOException, GitAPIException { StreamLoggerImpl logger = new StreamLoggerImpl(System.out, LogLevel.DEBUG); Path path = new File(tempDir + "/checkout").toPath(); - ScriptRepository scriptRepository = new ScriptRepository(logger, "Dummy", - Collections.singleton("*"), repo.toAbsolutePath().toString(), true, false, "origin", - "master", prefixDisplayPathsWithRepoName, path.toAbsolutePath(), resetGitLockFiles, - Paths.get(path.toAbsolutePath().toString(), "path1"), - Paths.get(path.toAbsolutePath().toString(), "path2")); - - Set result = - scriptRepository.getAvailableScriptDisplayPaths(ScriptPathLoaderState.NONE); + ScriptRepository scriptRepository = new ScriptRepository(logger, "Dummy", Collections.singleton("*"), + repo.toAbsolutePath().toString(), true, false, "origin", "master", prefixDisplayPathsWithRepoName, + path.toAbsolutePath(), resetGitLockFiles, Paths.get(path.toAbsolutePath().toString(), "path1"), + Paths.get(path.toAbsolutePath().toString(), "path2")); + + Set result = scriptRepository.getAvailableScriptDisplayPaths(ScriptPathLoaderState.NONE); ScriptPathLoaderState state = scriptRepository.getState(); String prefix = prefixDisplayPathsWithRepoName ? "Dummy/" : ""; @@ -134,16 +130,15 @@ public void testGetAvailableScriptPathsWithLockfileReset() throws IOException { testGetAvailableScriptPathsWithLockfile(true); } - private void testGetAvailableScriptPathsWithLockfile(final boolean resetLockFile) - throws IOException { + private void testGetAvailableScriptPathsWithLockfile(final boolean resetLockFile) throws IOException { final StreamLoggerImpl logger = new StreamLoggerImpl(System.out, LogLevel.DEBUG); // Need to create repo so there's a valid git in which to create a lockfile final Path path = new File(tempDir + "/checkout").toPath(); - new ScriptRepository(logger, "Dummy", Collections.singleton("*"), - repo.toAbsolutePath().toString(), true, false, "origin", "master", true, - path.toAbsolutePath(), false, Paths.get(path.toAbsolutePath().toString(), "path1"), - Paths.get(path.toAbsolutePath().toString(), "path2")); + new ScriptRepository(logger, "Dummy", Collections.singleton("*"), repo.toAbsolutePath().toString(), true, false, + "origin", "master", true, path.toAbsolutePath(), false, + Paths.get(path.toAbsolutePath().toString(), "path1"), + Paths.get(path.toAbsolutePath().toString(), "path2")); final Path gitPath = path.resolve(".git"); if (!gitPath.toFile().exists()) { @@ -156,11 +151,10 @@ private void testGetAvailableScriptPathsWithLockfile(final boolean resetLockFile } try { - new ScriptRepository(logger, "Dummy", Collections.singleton("*"), - repo.toAbsolutePath().toString(), true, false, "origin", "master", true, - path.toAbsolutePath(), resetLockFile, - Paths.get(path.toAbsolutePath().toString(), "path1"), - Paths.get(path.toAbsolutePath().toString(), "path2")); + new ScriptRepository(logger, "Dummy", Collections.singleton("*"), repo.toAbsolutePath().toString(), true, + false, "origin", "master", true, path.toAbsolutePath(), resetLockFile, + Paths.get(path.toAbsolutePath().toString(), "path1"), + Paths.get(path.toAbsolutePath().toString(), "path2")); if (!resetLockFile) { fail("Expected exception from script repo setup"); } diff --git a/DB/src/test/java/io/deephaven/db/util/serialization/TestSerializationUtils.java b/DB/src/test/java/io/deephaven/db/util/serialization/TestSerializationUtils.java index a40564bcbff..9fad59ea3dc 100644 --- a/DB/src/test/java/io/deephaven/db/util/serialization/TestSerializationUtils.java +++ b/DB/src/test/java/io/deephaven/db/util/serialization/TestSerializationUtils.java @@ -93,14 +93,13 @@ private void readObject(@NotNull final ObjectInputStream in) throws IOException @Test public void testAllTypes() throws Exception { // noinspection AutoBoxing - final ArrayTuple fullInput = - new ArrayTuple((byte) 1, (short) 2, 3, 4L, 5.0F, 6.0D, true, '7', "08", + final ArrayTuple fullInput = new ArrayTuple((byte) 1, (short) 2, 3, 4L, 5.0F, 6.0D, true, '7', "08", new DBDateTime(9), new Date(10), new ObjectObjectTuple("11-A", "11-B"), new ObjectObjectObjectTuple("12-X", "12-Y", "12-Z"), new EE(13), new SE(14)); - final ArrayTuple nullInput = new ArrayTuple(null, null, null, null, null, null, null, null, - null, null, null, null, null, null); + final ArrayTuple nullInput = + new ArrayTuple(null, null, null, null, null, null, null, null, null, null, null, null, null, null); final ByteArrayOutputStream bytesOut = new ByteArrayOutputStream(); final DataOutputStream dataOut = new DataOutputStream(bytesOut); @@ -122,13 +121,9 @@ public void testAllTypes() throws Exception { final ObjectInputStream objectIn = new ObjectInputStream(dataIn); final TIntObjectMap cachedReaders = new TIntObjectHashMap<>(); - TestCase.assertEquals(nullInput, - new ArrayTuple().initializeExternalStreaming(objectIn, cachedReaders)); - TestCase.assertEquals(fullInput, - new ArrayTuple().initializeExternalStreaming(objectIn, cachedReaders)); - TestCase.assertEquals(nullInput, - new ArrayTuple().initializeExternalStreaming(objectIn, cachedReaders)); - TestCase.assertEquals(fullInput, - new ArrayTuple().initializeExternalStreaming(objectIn, cachedReaders)); + TestCase.assertEquals(nullInput, new ArrayTuple().initializeExternalStreaming(objectIn, cachedReaders)); + TestCase.assertEquals(fullInput, new ArrayTuple().initializeExternalStreaming(objectIn, cachedReaders)); + TestCase.assertEquals(nullInput, new ArrayTuple().initializeExternalStreaming(objectIn, cachedReaders)); + TestCase.assertEquals(fullInput, new ArrayTuple().initializeExternalStreaming(objectIn, cachedReaders)); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/CountingTable.java b/DB/src/test/java/io/deephaven/db/v2/CountingTable.java index 56ccc9c6749..0cc8454e7d1 100644 --- a/DB/src/test/java/io/deephaven/db/v2/CountingTable.java +++ b/DB/src/test/java/io/deephaven/db/v2/CountingTable.java @@ -16,24 +16,23 @@ import java.util.Map; /** - * Utility for generating a table that counts the operations performed on its ColumnSource's. Used - * by the grouping tests to verify that we are not doing unnecessary work. + * Utility for generating a table that counts the operations performed on its ColumnSource's. Used by the grouping tests + * to verify that we are not doing unnecessary work. */ class CountingTable { @NotNull static QueryTable getCountingTable(QueryTable nonCountingTable) { Map countingSources = new LinkedHashMap<>(); nonCountingTable.getColumnSourceMap().entrySet().stream() - .forEach(x -> countingSources.put(x.getKey(), getCountingColumnSource(x.getValue()))); + .forEach(x -> countingSources.put(x.getKey(), getCountingColumnSource(x.getValue()))); return new QueryTable(nonCountingTable.getIndex(), countingSources); } - private static ColumnSource getCountingColumnSource( - final ColumnSource inputColumnSource) { + private static ColumnSource getCountingColumnSource(final ColumnSource inputColumnSource) { // noinspection unchecked return (ColumnSource) Proxy.newProxyInstance(IndexGroupingTest.class.getClassLoader(), - new Class[] {MethodCounter.class, ColumnSource.class}, - new CountingColumnSourceInvocationHandler(inputColumnSource)); + new Class[] {MethodCounter.class, ColumnSource.class}, + new CountingColumnSourceInvocationHandler(inputColumnSource)); } interface MethodCounter { @@ -75,17 +74,15 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl return null; case "getMethodCount": Require.eq(args.length, "args.length", 1, "1"); - Require.eq(method.getParameterCount(), "method.getParameterCount()", 1, - "1"); + Require.eq(method.getParameterCount(), "method.getParameterCount()", 1, "1"); if (method.getParameterTypes()[0].equals(Method.class)) { // noinspection SuspiciousMethodCalls return methodCounts.get(args[0]); } else if (method.getParameterTypes()[0].equals(String.class)) { - return methodCounts.entrySet().stream() - .filter(x -> x.getKey().getName().equals(args[0])) - .mapToInt(Map.Entry::getValue).sum(); + return methodCounts.entrySet().stream().filter(x -> x.getKey().getName().equals(args[0])) + .mapToInt(Map.Entry::getValue).sum(); } else { throw new UnsupportedOperationException(); } @@ -95,9 +92,8 @@ public Object invoke(Object proxy, Method method, Object[] args) throws Throwabl } // noinspection ConfusingArgumentToVarargsMethod - return wrappedColumnSource.getClass() - .getMethod(method.getName(), method.getParameterTypes()) - .invoke(wrappedColumnSource, args); + return wrappedColumnSource.getClass().getMethod(method.getName(), method.getParameterTypes()) + .invoke(wrappedColumnSource, args); } } } diff --git a/DB/src/test/java/io/deephaven/db/v2/ErrorListener.java b/DB/src/test/java/io/deephaven/db/v2/ErrorListener.java index ce5c8f3b9f3..e2796ab6127 100644 --- a/DB/src/test/java/io/deephaven/db/v2/ErrorListener.java +++ b/DB/src/test/java/io/deephaven/db/v2/ErrorListener.java @@ -16,8 +16,7 @@ public void onUpdate(final Update upstream) { } @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { this.originalException = originalException; } } diff --git a/DB/src/test/java/io/deephaven/db/v2/EvalNugget.java b/DB/src/test/java/io/deephaven/db/v2/EvalNugget.java index f27d32c6660..5dae34b2e28 100644 --- a/DB/src/test/java/io/deephaven/db/v2/EvalNugget.java +++ b/DB/src/test/java/io/deephaven/db/v2/EvalNugget.java @@ -60,8 +60,7 @@ public void onUpdate(final Update upstream) { } @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { exception = originalException; final StringWriter errors = new StringWriter(); if (description != null) { @@ -120,8 +119,7 @@ void showResult(String label, Table e) { } void checkDifferences(String msg, Table recomputed) { - TstUtils.assertTableEquals(msg, forComparison(recomputed), forComparison(originalValue), - diffItems()); + TstUtils.assertTableEquals(msg, forComparison(recomputed), forComparison(originalValue), diffItems()); } @NotNull @@ -139,20 +137,20 @@ public void show() { final Table originalForComparison = forComparison(originalValue); final int maxLines = 100; - final Pair diffPair = TableTools.diffPair(originalForComparison, - recomputedForComparison, maxLines, diffItems()); + final Pair diffPair = + TableTools.diffPair(originalForComparison, recomputedForComparison, maxLines, diffItems()); if (diffPair.getFirst().equals("")) { showResult("Recomputed Table:", recomputedTable); } else if (!diffPair.getFirst().equals("")) { - final long numTableRows = Math.min(maxLines, - Math.max(originalForComparison.size(), recomputedForComparison.size())); + final long numTableRows = + Math.min(maxLines, Math.max(originalForComparison.size(), recomputedForComparison.size())); final long firstRow = Math.max(0, diffPair.getSecond() - 5); - final long lastRow = Math.min(firstRow + numTableRows, - Math.min(firstRow + maxLines, diffPair.getSecond() + 5)); + final long lastRow = + Math.min(firstRow + numTableRows, Math.min(firstRow + maxLines, diffPair.getSecond() + 5)); - System.out.println("Recomputed Table Differs:\n" + diffPair.getFirst() - + "\nRecomputed Table Rows [" + firstRow + ", " + lastRow + "]:"); + System.out.println("Recomputed Table Differs:\n" + diffPair.getFirst() + "\nRecomputed Table Rows [" + + firstRow + ", " + lastRow + "]:"); TableTools.showWithIndex(recomputedForComparison, firstRow, lastRow + 1); System.out.println("Incremental Table Rows [" + firstRow + ", " + lastRow + "]:"); TableTools.showWithIndex(originalForComparison, firstRow, lastRow + 1); diff --git a/DB/src/test/java/io/deephaven/db/v2/FailureListener.java b/DB/src/test/java/io/deephaven/db/v2/FailureListener.java index 0588730bbb1..227f6d81445 100644 --- a/DB/src/test/java/io/deephaven/db/v2/FailureListener.java +++ b/DB/src/test/java/io/deephaven/db/v2/FailureListener.java @@ -12,7 +12,7 @@ public void onUpdate(final io.deephaven.db.v2.ShiftAwareListener.Update upstream @Override public void onFailureInternal(Throwable originalException, - io.deephaven.db.v2.utils.UpdatePerformanceTracker.Entry sourceEntry) { + io.deephaven.db.v2.utils.UpdatePerformanceTracker.Entry sourceEntry) { originalException.printStackTrace(); TestCase.fail(originalException.getMessage()); } diff --git a/DB/src/test/java/io/deephaven/db/v2/FuzzerPrintListener.java b/DB/src/test/java/io/deephaven/db/v2/FuzzerPrintListener.java index 50a6ccaf686..b5c486e2402 100644 --- a/DB/src/test/java/io/deephaven/db/v2/FuzzerPrintListener.java +++ b/DB/src/test/java/io/deephaven/db/v2/FuzzerPrintListener.java @@ -34,7 +34,7 @@ public void onUpdate(final Update upstream) { @Override public void onFailureInternal(Throwable originalException, - io.deephaven.db.v2.utils.UpdatePerformanceTracker.Entry sourceEntry) { + io.deephaven.db.v2.utils.UpdatePerformanceTracker.Entry sourceEntry) { System.out.println("Error for: " + description); originalException.printStackTrace(); } diff --git a/DB/src/test/java/io/deephaven/db/v2/FuzzerTest.java b/DB/src/test/java/io/deephaven/db/v2/FuzzerTest.java index f643facfa54..20a17f767bc 100644 --- a/DB/src/test/java/io/deephaven/db/v2/FuzzerTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/FuzzerTest.java @@ -34,10 +34,9 @@ @Category(SerialTest.class) public class FuzzerTest { private static final String TEST_ROOT = System.getProperty("devroot", "."); - private static final String DB_ROOT = - TEST_ROOT + "/tmp/" + FuzzerTest.class.getSimpleName() + "_DBRoot"; + private static final String DB_ROOT = TEST_ROOT + "/tmp/" + FuzzerTest.class.getSimpleName() + "_DBRoot"; private static final boolean REALTIME_FUZZER_ENABLED = - Configuration.getInstance().getBooleanWithDefault("FuzzerTest.realTime", false); + Configuration.getInstance().getBooleanWithDefault("FuzzerTest.realTime", false); JUnit4LiveTableTestCase framework = new JUnit4LiveTableTestCase(); @@ -105,10 +104,8 @@ private GroovyDeephavenSession getGroovySession() throws IOException { return getGroovySession(null); } - private GroovyDeephavenSession getGroovySession(@Nullable TimeProvider timeProvider) - throws IOException { - final GroovyDeephavenSession session = - new GroovyDeephavenSession(RunScripts.serviceLoader()); + private GroovyDeephavenSession getGroovySession(@Nullable TimeProvider timeProvider) throws IOException { + final GroovyDeephavenSession session = new GroovyDeephavenSession(RunScripts.serviceLoader()); QueryScope.setScope(session.newQueryScope()); return session; } @@ -119,10 +116,9 @@ public void testFuzzer() throws IOException, InterruptedException { } private void testFuzzerScriptFile(final long timeSeed, String s, boolean realtime) - throws IOException, InterruptedException { + throws IOException, InterruptedException { final Random timeRandom = new Random(timeSeed); - final String groovyString = - FileUtils.readTextFile(new File(Configuration.getInstance().getDevRootPath() + s)); + final String groovyString = FileUtils.readTextFile(new File(Configuration.getInstance().getDevRootPath() + s)); final DBDateTime fakeStart = DBTimeUtils.convertDateTime("2020-03-17T13:53:25.123456 NY"); final MutableLong now = new MutableLong(fakeStart.getNanos()); @@ -171,8 +167,8 @@ public void testInterestingFuzzerSeeds() throws IOException, InterruptedExceptio query.append(qf.getTablePreamble(fuzzDescriptor.tableSeed)); query.append(qf.generateQuery(fuzzDescriptor.tableSeed)); - System.out.println("Running test=======================\n TableSeed: " - + fuzzDescriptor.tableSeed + " QuerySeed: " + fuzzDescriptor.tableSeed); + System.out.println("Running test=======================\n TableSeed: " + fuzzDescriptor.tableSeed + + " QuerySeed: " + fuzzDescriptor.tableSeed); System.out.println(query.toString()); session.evaluateScript(query.toString()); @@ -208,8 +204,7 @@ public void testInterestingFuzzerSeeds() throws IOException, InterruptedExceptio @Test public void testLargeSetOfFuzzerQueriesRealtime() throws IOException, InterruptedException { - Assume.assumeTrue("Realtime Fuzzer can have a positive feedback loop.", - REALTIME_FUZZER_ENABLED); + Assume.assumeTrue("Realtime Fuzzer can have a positive feedback loop.", REALTIME_FUZZER_ENABLED); runLargeFuzzerSetWithSeed(DBDateTime.now().getNanos(), 0, 99, true, 120, 1000); } @@ -222,15 +217,14 @@ public void testLargeSetOfFuzzerQueriesSimTime() throws IOException, Interrupted try (final SafeCloseable ignored = LivenessScopeStack.open()) { System.out.println("// Segment: " + segment); final int firstRun = segment * 10; - runLargeFuzzerSetWithSeed(seed1 + iteration, firstRun, firstRun + 10, false, - 180, 0); + runLargeFuzzerSetWithSeed(seed1 + iteration, firstRun, firstRun + 10, false, 180, 0); } } } } - private void runLargeFuzzerSetWithSeed(long mainTestSeed, int firstRun, int lastRun, - boolean realtime, int stepsToRun, int sleepTime) throws IOException, InterruptedException { + private void runLargeFuzzerSetWithSeed(long mainTestSeed, int firstRun, int lastRun, boolean realtime, + int stepsToRun, int sleepTime) throws IOException, InterruptedException { final QueryFactory qf = new QueryFactory(); System.out.println("// TestSeed: " + mainTestSeed + "L"); @@ -258,8 +252,7 @@ private void runLargeFuzzerSetWithSeed(long mainTestSeed, int firstRun, int last final String query = qf.generateQuery(currentSeed); if (runNum >= firstRun) { - final StringBuilder sb = - new StringBuilder("//========================================\n"); + final StringBuilder sb = new StringBuilder("//========================================\n"); sb.append("// Seed: ").append(currentSeed).append("L\n\n"); sb.append(query).append("\n"); System.out.println(sb.toString()); @@ -289,13 +282,11 @@ private void runLargeFuzzerSetWithSeed(long mainTestSeed, int firstRun, int last // noinspection unchecked,OptionalGetWithoutIsPresent final long maxTableSize = session.getBinding().getVariables().values().stream() - .filter(x -> x instanceof Table).mapToLong(x -> ((Table) x).size()).max() - .getAsLong(); - System.out.println( - (System.currentTimeMillis() - startTime) + "ms: After Step = " + fstep + ", Used = " + .filter(x -> x instanceof Table).mapToLong(x -> ((Table) x).size()).max().getAsLong(); + System.out.println((System.currentTimeMillis() - startTime) + "ms: After Step = " + fstep + ", Used = " + commaFormat.format(usedMemory) + ", Free = " + commaFormat.format(freeMemory) - + " / Total Memory: " + commaFormat.format(totalMemory) + ", TimeTable Size = " - + timeTable.size() + ", Largest Table: " + maxTableSize); + + " / Total Memory: " + commaFormat.format(totalMemory) + ", TimeTable Size = " + timeTable.size() + + ", Largest Table: " + maxTableSize); if (realtime) { Thread.sleep(sleepTime); @@ -309,11 +300,8 @@ private void runLargeFuzzerSetWithSeed(long mainTestSeed, int firstRun, int last } final long loopEnd = System.currentTimeMillis(); - System.out.println( - "Elapsed time: " + (loopEnd - start) + "ms, loop: " + (loopEnd - loopStart) + "ms" - + (realtime ? "" - : (", sim: " - + (double) (now.longValue() - fakeStart.getNanos()) / DBTimeUtils.SECOND)) + System.out.println("Elapsed time: " + (loopEnd - start) + "ms, loop: " + (loopEnd - loopStart) + "ms" + + (realtime ? "" : (", sim: " + (double) (now.longValue() - fakeStart.getNanos()) / DBTimeUtils.SECOND)) + ", ttSize: " + timeTable.size()); } @@ -326,8 +314,7 @@ private void annotateBinding(GroovyDeephavenSession session) { }); } - private void addPrintListener(GroovyDeephavenSession session, final String variable, - List hardReferences) { + private void addPrintListener(GroovyDeephavenSession session, final String variable, List hardReferences) { final Table table = (Table) session.getVariable(variable); System.out.println(variable); TableTools.showWithIndex(table); @@ -339,8 +326,7 @@ private void addPrintListener(GroovyDeephavenSession session, final String varia } } - private void validateBindingTables(GroovyDeephavenSession session, - List hardReferences) { + private void validateBindingTables(GroovyDeephavenSession session, List hardReferences) { // noinspection unchecked session.getBinding().getVariables().forEach((k, v) -> { if (v instanceof QueryTable && ((QueryTable) v).isRefreshing()) { @@ -349,14 +335,13 @@ private void validateBindingTables(GroovyDeephavenSession session, }); } - private void validateBindingTableMapConstituents(GroovyDeephavenSession session, - List hardReferences) { + private void validateBindingTableMapConstituents(GroovyDeephavenSession session, List hardReferences) { // noinspection unchecked session.getBinding().getVariables().forEach((k, v) -> { if (v instanceof LocalTableMap && ((LocalTableMap) v).isRefreshing()) { for (final Object tablemapKey : ((LocalTableMap) v).getKeySet()) { addValidator(hardReferences, k.toString() + "_" + tablemapKey, - (QueryTable) ((LocalTableMap) v).get(tablemapKey)); + (QueryTable) ((LocalTableMap) v).get(tablemapKey)); } final TableMap.Listener listener = (key, table) -> { addValidator(hardReferences, k.toString() + "_" + key, (QueryTable) table); diff --git a/DB/src/test/java/io/deephaven/db/v2/GenerateTableUpdates.java b/DB/src/test/java/io/deephaven/db/v2/GenerateTableUpdates.java index 9a0270fd95f..0696c010e61 100644 --- a/DB/src/test/java/io/deephaven/db/v2/GenerateTableUpdates.java +++ b/DB/src/test/java/io/deephaven/db/v2/GenerateTableUpdates.java @@ -24,13 +24,13 @@ public class GenerateTableUpdates { static public void generateTableUpdates(int size, Random random, QueryTable table, - TstUtils.ColumnInfo[] columnInfo) { + TstUtils.ColumnInfo[] columnInfo) { final Index[] result = computeTableUpdates(size, random, table, columnInfo); table.notifyListeners(result[0], result[1], result[2]); } public static void generateAppends(final int size, Random random, QueryTable table, - TstUtils.ColumnInfo[] columnInfos) { + TstUtils.ColumnInfo[] columnInfos) { final long firstKey = table.getIndex().lastKey() + 1; final int randomSize = 1 + random.nextInt(size); final Index keysToAdd = Index.FACTORY.getIndexByRange(firstKey, firstKey + randomSize - 1); @@ -51,28 +51,26 @@ public static void generateAppends(final int size, Random random, QueryTable tab throw new RuntimeException(e); } } - table.notifyListeners(keysToAdd, Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex()); + table.notifyListeners(keysToAdd, Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex()); } static public Index[] computeTableUpdates(int size, Random random, QueryTable table, - TstUtils.ColumnInfo[] columnInfo) { + TstUtils.ColumnInfo[] columnInfo) { return computeTableUpdates(size, random, table, columnInfo, true, true, true); } static public Index[] computeTableUpdates(int size, Random random, QueryTable table, - TstUtils.ColumnInfo[] columnInfo, boolean add, boolean remove, boolean modify) { + TstUtils.ColumnInfo[] columnInfo, boolean add, boolean remove, boolean modify) { final Index keysToRemove; if (remove && table.getIndex().size() > 0) { - keysToRemove = TstUtils.selectSubIndexSet( - random.nextInt(table.getIndex().intSize() + 1), table.getIndex(), random); + keysToRemove = TstUtils.selectSubIndexSet(random.nextInt(table.getIndex().intSize() + 1), table.getIndex(), + random); } else { keysToRemove = TstUtils.i(); } final Index keysToAdd = - add ? TstUtils.newIndex(random.nextInt(size / 2 + 1), table.getIndex(), random) - : TstUtils.i(); + add ? TstUtils.newIndex(random.nextInt(size / 2 + 1), table.getIndex(), random) : TstUtils.i(); TstUtils.removeRows(table, keysToRemove); for (final Index.Iterator iterator = keysToRemove.iterator(); iterator.hasNext();) { final long next = iterator.nextLong(); @@ -83,8 +81,8 @@ static public Index[] computeTableUpdates(int size, Random random, QueryTable ta final Index keysToModify; if (modify && table.getIndex().size() > 0) { - keysToModify = TstUtils.selectSubIndexSet(random.nextInt((int) table.getIndex().size()), - table.getIndex(), random); + keysToModify = + TstUtils.selectSubIndexSet(random.nextInt((int) table.getIndex().size()), table.getIndex(), random); } else { keysToModify = TstUtils.i(); } @@ -126,8 +124,7 @@ public static class SimulationProfile { int MOD_ADDITIONAL_COLUMN = 50; // probability of modifying each column void validate() { - validateGroup(SHIFT_10_PERCENT_KEY_SPACE, SHIFT_10_PERCENT_POS_SPACE, - SHIFT_AGGRESSIVELY); + validateGroup(SHIFT_10_PERCENT_KEY_SPACE, SHIFT_10_PERCENT_POS_SPACE, SHIFT_AGGRESSIVELY); validateGroup(SHIFT_LIMIT_50_PERCENT); validateGroup(MOD_ADDITIONAL_COLUMN); } @@ -144,23 +141,22 @@ private void validateGroup(int... opts) { static public final SimulationProfile DEFAULT_PROFILE = new SimulationProfile(); - static public void generateShiftAwareTableUpdates(final SimulationProfile profile, - final int targetUpdateSize, - final Random random, final QueryTable table, - final TstUtils.ColumnInfo[] columnInfo) { + static public void generateShiftAwareTableUpdates(final SimulationProfile profile, final int targetUpdateSize, + final Random random, final QueryTable table, + final TstUtils.ColumnInfo[] columnInfo) { profile.validate(); try (final Index index = table.getIndex().clone()) { - final TstUtils.ColumnInfo[] mutableColumns = Arrays.stream(columnInfo) - .filter(ci -> !ci.immutable).toArray(TstUtils.ColumnInfo[]::new); + final TstUtils.ColumnInfo[] mutableColumns = + Arrays.stream(columnInfo).filter(ci -> !ci.immutable).toArray(TstUtils.ColumnInfo[]::new); final boolean hasImmutableColumns = columnInfo.length > mutableColumns.length; final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); // Removes in pre-shift keyspace. if (index.size() > 0) { - update.removed = TstUtils.selectSubIndexSet( - Math.min(index.intSize(), random.nextInt(targetUpdateSize)), index, random); + update.removed = TstUtils.selectSubIndexSet(Math.min(index.intSize(), random.nextInt(targetUpdateSize)), + index, random); index.remove(update.removed); // remove blatted and explicit removals } else { update.removed = TstUtils.i(); @@ -182,19 +178,15 @@ static public void generateShiftAwareTableUpdates(final SimulationProfile profil if (shiftBuilder.nonempty()) { minShift = lastDest.longValue() + 1 - first; maxShift = Math.max(minShift, - random.nextInt(100) < profile.SHIFT_LIMIT_50_PERCENT ? (len + 1) / 2 - : 2 * len); + random.nextInt(100) < profile.SHIFT_LIMIT_50_PERCENT ? (len + 1) / 2 : 2 * len); } else { - maxShift = - random.nextInt(100) < profile.SHIFT_LIMIT_50_PERCENT ? (len + 1) / 2 - : 2 * len; + maxShift = random.nextInt(100) < profile.SHIFT_LIMIT_50_PERCENT ? (len + 1) / 2 : 2 * len; minShift = -maxShift; } long shiftDelta = 0; while (shiftDelta == 0) { - shiftDelta = - Math.max(-first, minShift + nextLong(random, maxShift - minShift + 1)); + shiftDelta = Math.max(-first, minShift + nextLong(random, maxShift - minShift + 1)); } lastDest.setValue(last + shiftDelta); @@ -205,14 +197,12 @@ static public void generateShiftAwareTableUpdates(final SimulationProfile profil if (shiftStrategy < profile.SHIFT_10_PERCENT_KEY_SPACE && index.nonempty()) { // 10% of keyspace final long startKey = nextLong(random, index.lastKey() + 1); - final long lastKey = - Math.min(startKey + (long) (index.lastKey() * 0.1), index.lastKey()); + final long lastKey = Math.min(startKey + (long) (index.lastKey() * 0.1), index.lastKey()); shiftConsumer.accept(startKey, lastKey); } shiftStrategy -= profile.SHIFT_10_PERCENT_KEY_SPACE; - if (shiftStrategy >= 0 && shiftStrategy < profile.SHIFT_10_PERCENT_POS_SPACE - && index.nonempty()) { + if (shiftStrategy >= 0 && shiftStrategy < profile.SHIFT_10_PERCENT_POS_SPACE && index.nonempty()) { // 10% of keys final long startIdx = nextLong(random, index.size()); final long lastIdx = Math.min(index.size() - 1, startIdx + (index.size() / 10)); @@ -220,25 +210,21 @@ static public void generateShiftAwareTableUpdates(final SimulationProfile profil } shiftStrategy -= profile.SHIFT_10_PERCENT_POS_SPACE; - if (shiftStrategy >= 0 && shiftStrategy < profile.SHIFT_AGGRESSIVELY - && index.nonempty()) { + if (shiftStrategy >= 0 && shiftStrategy < profile.SHIFT_AGGRESSIVELY && index.nonempty()) { // aggressive shifting long currIdx = 0; while (currIdx < index.size()) { final long startIdx = currIdx + (nextLong(random, index.size() - currIdx)); - final long lastIdx = startIdx - + (long) (Math.sqrt(nextLong(random, index.size() - startIdx))); + final long lastIdx = startIdx + (long) (Math.sqrt(nextLong(random, index.size() - startIdx))); shiftConsumer.accept(index.get(startIdx), index.get(lastIdx)); - currIdx = 1 + lastIdx - + (long) (Math.sqrt(nextLong(random, index.size() - lastIdx))); + currIdx = 1 + lastIdx + (long) (Math.sqrt(nextLong(random, index.size() - lastIdx))); } } shiftStrategy -= profile.SHIFT_AGGRESSIVELY; } update.shifted = shiftBuilder.build(); - // Compute what data needs to be removed otherwise the shift generated would be invalid. - // We must also update + // Compute what data needs to be removed otherwise the shift generated would be invalid. We must also update // our cloned index so we can pick appropriate added and modified sets. final int preShiftIndexSize = index.intSize(); update.shifted.apply((start, end, delta) -> { @@ -246,7 +232,7 @@ static public void generateShiftAwareTableUpdates(final SimulationProfile profil final long blatStart = delta < 0 ? start + delta : end; final long blatEnd = delta < 0 ? start - 1 : end + delta; try (final Index blattedRows = - index.extract(Index.CURRENT_FACTORY.getIndexByRange(blatStart, blatEnd))) { + index.extract(Index.CURRENT_FACTORY.getIndexByRange(blatStart, blatEnd))) { update.removed.insert(blattedRows); } }); @@ -257,7 +243,7 @@ static public void generateShiftAwareTableUpdates(final SimulationProfile profil // Modifies and Adds in post-shift keyspace. if (index.nonempty()) { update.modified = TstUtils.selectSubIndexSet( - Math.min(index.intSize(), random.nextInt(targetUpdateSize * 2)), index, random); + Math.min(index.intSize(), random.nextInt(targetUpdateSize * 2)), index, random); } else { update.modified = TstUtils.i(); } @@ -270,27 +256,25 @@ static public void generateShiftAwareTableUpdates(final SimulationProfile profil update.modifiedColumnSet.clear(); final String mustModifyColumn = (mutableColumns.length == 0) ? null - : mutableColumns[random.nextInt(mutableColumns.length)].name; + : mutableColumns[random.nextInt(mutableColumns.length)].name; for (final TstUtils.ColumnInfo ci : columnInfo) { if (ci.name.equals(mustModifyColumn) - || (!ci.immutable && random.nextInt(100) < profile.MOD_ADDITIONAL_COLUMN)) { + || (!ci.immutable && random.nextInt(100) < profile.MOD_ADDITIONAL_COLUMN)) { modifiedColumns.add(ci.name); } } - update.modifiedColumnSet - .setAll(modifiedColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + update.modifiedColumnSet.setAll(modifiedColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); } - update.added = TstUtils - .newIndex(numRowsBlattedByShift + random.nextInt(targetUpdateSize), index, random); + update.added = TstUtils.newIndex(numRowsBlattedByShift + random.nextInt(targetUpdateSize), index, random); generateTableUpdates(update, random, table, columnInfo); } } static public void generateTableUpdates(final ShiftAwareListener.Update update, - final Random random, final QueryTable table, - final TstUtils.ColumnInfo[] columnInfo) { + final Random random, final QueryTable table, + final TstUtils.ColumnInfo[] columnInfo) { final Index index = table.getIndex(); if (LiveTableTestCase.printTableUpdates) { @@ -311,8 +295,7 @@ static public void generateTableUpdates(final ShiftAwareListener.Update update, // Shift data: update.shifted.apply((start, end, delta) -> { // Move data! - final Index.SearchIterator iter = - (delta < 0) ? index.searchIterator() : index.reverseIterator(); + final Index.SearchIterator iter = (delta < 0) ? index.searchIterator() : index.reverseIterator(); if (iter.advance((delta < 0) ? start : end)) { long idx = iter.currentValue(); do { diff --git a/DB/src/test/java/io/deephaven/db/v2/GroupingValidator.java b/DB/src/test/java/io/deephaven/db/v2/GroupingValidator.java index f2883539e37..1164bd811a5 100644 --- a/DB/src/test/java/io/deephaven/db/v2/GroupingValidator.java +++ b/DB/src/test/java/io/deephaven/db/v2/GroupingValidator.java @@ -15,9 +15,9 @@ import java.util.*; /** - * This class listens to a table and on each update verifies that the groupings returned by the - * table's index for a set of columns are still valid. It is meant to be used as part of a unit test - * for incremental updates, to ensure that stale groupings are not left between table updates. + * This class listens to a table and on each update verifies that the groupings returned by the table's index for a set + * of columns are still valid. It is meant to be used as part of a unit test for incremental updates, to ensure that + * stale groupings are not left between table updates. */ public class GroupingValidator extends InstrumentedShiftAwareListenerAdapter { private DynamicTable source; @@ -25,13 +25,11 @@ public class GroupingValidator extends InstrumentedShiftAwareListenerAdapter { private String context; private int validationCount = 0; - public GroupingValidator(String context, DynamicTable source, - ArrayList> groupingColumns) { + public GroupingValidator(String context, DynamicTable source, ArrayList> groupingColumns) { this(context, source, convertListToArray(groupingColumns)); } - static private Collection convertListToArray( - ArrayList> groupingColumns) { + static private Collection convertListToArray(ArrayList> groupingColumns) { Collection collectionOfArrays = new ArrayList<>(); for (ArrayList columnSet : groupingColumns) { collectionOfArrays.add(columnSet.toArray(new String[columnSet.size()])); @@ -39,8 +37,7 @@ static private Collection convertListToArray( return collectionOfArrays; } - private GroupingValidator(String context, DynamicTable source, - Collection groupingColumns) { + private GroupingValidator(String context, DynamicTable source, Collection groupingColumns) { super("grouping validator " + context, source, false); this.context = context; this.source = source; @@ -64,15 +61,14 @@ private void validatePrevGroupings(Collection groupingColumns, Index i } } - public static void validateGrouping(String[] groupingToCheck, Index index, DynamicTable source, - String context) { + public static void validateGrouping(String[] groupingToCheck, Index index, DynamicTable source, String context) { final ColumnSource[] groupColumns = getColumnSources(groupingToCheck, source); final TupleSource tupleSource = TupleSourceFactory.makeTupleSource(groupColumns); validateGrouping(groupingToCheck, index, source, context, index.getGrouping(tupleSource)); } - public static void validateGrouping(String[] groupingToCheck, Index index, DynamicTable source, - String context, Map grouping) { + public static void validateGrouping(String[] groupingToCheck, Index index, DynamicTable source, String context, + Map grouping) { final ColumnSource[] groupColumns = getColumnSources(groupingToCheck, source); for (Map.Entry objectIndexEntry : grouping.entrySet()) { for (Index.Iterator it = objectIndexEntry.getValue().iterator(); it.hasNext();) { @@ -85,23 +81,21 @@ public static void validateGrouping(String[] groupingToCheck, Index index, Dynam long next = it.nextLong(); Object key = getValue(groupColumns, next); Index keyIndex = grouping.get(key); - Assert.assertion(keyIndex != null, "keyIndex != null", next, "next", key, "key", - context, "context"); + Assert.assertion(keyIndex != null, "keyIndex != null", next, "next", key, "key", context, "context"); if (keyIndex != null) { - Assert.assertion(keyIndex.find(next) >= 0, "keyIndex.find(next) >= 0", next, "next", - key, "key", keyIndex, "keyIndex", context, "context"); + Assert.assertion(keyIndex.find(next) >= 0, "keyIndex.find(next) >= 0", next, "next", key, "key", + keyIndex, "keyIndex", context, "context"); } } } - public static void validateRestrictedGrouping(String[] groupingToCheck, Index index, - DynamicTable source, String context, Map grouping, Set validKeys) { + public static void validateRestrictedGrouping(String[] groupingToCheck, Index index, DynamicTable source, + String context, Map grouping, Set validKeys) { ColumnSource[] groupColumns = getColumnSources(groupingToCheck, source); for (Map.Entry objectIndexEntry : grouping.entrySet()) { final Object groupKey = objectIndexEntry.getKey(); - Assert.assertion(validKeys.contains(groupKey), - "validKeys.contains(objectIndexEntry.getKey())", groupKey, "groupKey", validKeys, - "validKeys"); + Assert.assertion(validKeys.contains(groupKey), "validKeys.contains(objectIndexEntry.getKey())", groupKey, + "groupKey", validKeys, "validKeys"); for (Index.Iterator it = objectIndexEntry.getValue().iterator(); it.hasNext();) { long next = it.nextLong(); checkGroupKey(groupColumns, next, groupKey, context); @@ -114,15 +108,13 @@ public static void validateRestrictedGrouping(String[] groupingToCheck, Index in Index keyIndex = grouping.get(key); if (validKeys.contains(key)) { - Assert.assertion(keyIndex != null, "keyIndex != null", next, "next", key, "key", - context, "context"); + Assert.assertion(keyIndex != null, "keyIndex != null", next, "next", key, "key", context, "context"); if (keyIndex != null) { - Assert.assertion(keyIndex.find(next) >= 0, "keyIndex.find(next) >= 0", next, - "next", key, "key", keyIndex, "keyIndex", context, "context"); + Assert.assertion(keyIndex.find(next) >= 0, "keyIndex.find(next) >= 0", next, "next", key, "key", + keyIndex, "keyIndex", context, "context"); } } else { - Assert.assertion(keyIndex == null, "keyIndex == null", next, "next", key, "key", - context, "context"); + Assert.assertion(keyIndex == null, "keyIndex == null", next, "next", key, "key", context, "context"); } } } @@ -142,32 +134,30 @@ private void validatePrevGrouping(String[] groupingToCheck, Index index) { long next = it.nextLong(); Object key = getPrevValue(groupColumns, next); Index keyIndex = grouping.get(key); - Assert.assertion(keyIndex != null, "keyIndex != null", next, "next", key, "key", - context, "context"); + Assert.assertion(keyIndex != null, "keyIndex != null", next, "next", key, "key", context, "context"); if (keyIndex != null) { - Assert.assertion(keyIndex.find(next) >= 0, "keyIndex.find(next) >= 0", next, "next", - key, "key", keyIndex, "keyIndex", context, "context"); + Assert.assertion(keyIndex.find(next) >= 0, "keyIndex.find(next) >= 0", next, "next", key, "key", + keyIndex, "keyIndex", context, "context"); } } } private static ColumnSource[] getColumnSources(String[] groupingToCheck, DynamicTable source) { - return Arrays.stream(groupingToCheck).map(source::getColumnSource) - .toArray(ColumnSource[]::new); + return Arrays.stream(groupingToCheck).map(source::getColumnSource).toArray(ColumnSource[]::new); } - static private void checkGroupKey(final ColumnSource[] groupColumns, final long next, - final Object key, final String context) { + static private void checkGroupKey(final ColumnSource[] groupColumns, final long next, final Object key, + final String context) { final Object value = getValue(groupColumns, next); - Assert.assertion(Objects.equals(value, key), "value.equals(key)", value, "value", key, - "key", context, "context"); + Assert.assertion(Objects.equals(value, key), "value.equals(key)", value, "value", key, "key", context, + "context"); } - static private void checkGroupPrevKey(final ColumnSource[] groupColumns, final long next, - final Object key, final String context) { + static private void checkGroupPrevKey(final ColumnSource[] groupColumns, final long next, final Object key, + final String context) { Object value = getPrevValue(groupColumns, next); - Assert.assertion(value == key || value.equals(key), "value.equals(key)", value, "value", - key, "key", context, "context"); + Assert.assertion(value == key || value.equals(key), "value.equals(key)", value, "value", key, "key", context, + "context"); } static private Object getValue(ColumnSource[] groupColumns, long next) { @@ -188,8 +178,7 @@ public void onUpdate(final Update upstream) { } @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { originalException.printStackTrace(); TestCase.fail("Failure for context " + context + ": " + originalException.getMessage()); } diff --git a/DB/src/test/java/io/deephaven/db/v2/IndexGroupingTest.java b/DB/src/test/java/io/deephaven/db/v2/IndexGroupingTest.java index c37014cfb94..f1708ae1dc2 100644 --- a/DB/src/test/java/io/deephaven/db/v2/IndexGroupingTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/IndexGroupingTest.java @@ -34,7 +34,7 @@ public class IndexGroupingTest extends LiveTableTestCase { private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = Configuration.getInstance() - .getBooleanForClassWithDefault(IndexGroupingTest.class, "CompilerTools.logEnabled", false); + .getBooleanForClassWithDefault(IndexGroupingTest.class, "CompilerTools.logEnabled", false); private boolean oldCompilerToolsLogEnabled; @@ -84,29 +84,25 @@ public void testGroupingWithImmutableColumns() { testGrouping(true, new Random(0), new MutableInt(50)); } - public void testGrouping(final boolean immutableColumns, final Random random, - final MutableInt numSteps) { + public void testGrouping(final boolean immutableColumns, final Random random, final MutableInt numSteps) { int size = 100; TstUtils.ColumnInfo[] columnInfo = new TstUtils.ColumnInfo[3]; if (immutableColumns) { // noinspection unchecked - columnInfo[0] = - new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f"), - "Sym", TstUtils.ColumnInfo.ColAttributes.Immutable); + columnInfo[0] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f"), "Sym", + TstUtils.ColumnInfo.ColAttributes.Immutable); // noinspection unchecked columnInfo[1] = new TstUtils.ColumnInfo(new TstUtils.IntGenerator(10, 100), "intCol", - TstUtils.ColumnInfo.ColAttributes.Immutable); + TstUtils.ColumnInfo.ColAttributes.Immutable); } else { // noinspection unchecked - columnInfo[0] = new TstUtils.ColumnInfo( - new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f"), "Sym"); + columnInfo[0] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f"), "Sym"); // noinspection unchecked columnInfo[1] = new TstUtils.ColumnInfo(new TstUtils.IntGenerator(10, 100), "intCol"); } // noinspection unchecked - columnInfo[2] = - new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); + columnInfo[2] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); final QueryTable queryTable = getTable(size, random, columnInfo); addGroupingValidator(queryTable, "queryTable"); @@ -114,52 +110,48 @@ public void testGrouping(final boolean immutableColumns, final Random random, final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { public Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.head(0)); + return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.head(0)); } }, new EvalNugget() { public Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.head(1)); + return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.head(1)); } }, new EvalNugget() { public Table e() { return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.update("intCol2 = intCol + 1")); + .computeLocked(() -> queryTable.update("intCol2 = intCol + 1")); } }, new EvalNugget() { public Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> queryTable.update("intCol2 = intCol + 1").select()); + return LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> queryTable.update("intCol2 = intCol + 1").select()); } }, new EvalNugget() { public Table e() { return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.view("Sym", "intCol2 = intCol + 1")); + .computeLocked(() -> queryTable.view("Sym", "intCol2 = intCol + 1")); } }, new EvalNugget() { public Table e() { return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.avgBy("Sym").sort("Sym")); + .computeLocked(() -> queryTable.avgBy("Sym").sort("Sym")); } }, new EvalNugget() { public Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.by("Sym", "intCol") - .sort("Sym", "intCol").view("doubleCol=max(doubleCol)")); + return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable + .by("Sym", "intCol").sort("Sym", "intCol").view("doubleCol=max(doubleCol)")); } }, new EvalNugget() { public Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.avgBy("Sym", "doubleCol") - .sort("Sym", "doubleCol").view("intCol=min(intCol)")); + return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable + .avgBy("Sym", "doubleCol").sort("Sym", "doubleCol").view("intCol=min(intCol)")); } }, }; @@ -168,21 +160,19 @@ public Table e() { addGroupingValidator((DynamicTable) en[ii].originalValue, "en[" + ii + "]"); } - Table by = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.avgBy("Sym")); + Table by = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.avgBy("Sym")); addGroupingValidator((DynamicTable) by, "by"); - Table avgBy = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.avgBy("Sym")); + Table avgBy = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.avgBy("Sym")); addGroupingValidator((DynamicTable) avgBy, "avgBy"); - Table avgBy1 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.avgBy("Sym", "intCol")); + Table avgBy1 = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> queryTable.avgBy("Sym", "intCol")); addGroupingValidator((DynamicTable) avgBy1, "avgBy1"); - Table merged = Require.neqNull(LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> TableTools.merge(queryTable)), "TableTools.merge(queryTable)"); + Table merged = Require.neqNull( + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> TableTools.merge(queryTable)), + "TableTools.merge(queryTable)"); addGroupingValidator((DynamicTable) merged, "merged"); Table updated = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> merged.update("HiLo = intCol > 50 ? `Hi` : `Lo`")); + .computeLocked(() -> merged.update("HiLo = intCol > 50 ? `Hi` : `Lo`")); addGroupingValidator((DynamicTable) updated, "updated"); final int maxSteps = numSteps.intValue(); // 8; @@ -192,8 +182,7 @@ public Table e() { TableTools.showWithIndex(queryTable); } for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) { - simulateShiftAwareStep("step == " + numSteps.intValue(), size, random, queryTable, - columnInfo, en); + simulateShiftAwareStep("step == " + numSteps.intValue(), size, random, queryTable, columnInfo, en); } // we don't need them after this test is done @@ -205,8 +194,7 @@ public Table e() { private ArrayList groupingValidators = new ArrayList<>(); private void addGroupingValidator(DynamicTable originalValue, String context) { - ArrayList> columnSets2 = - powerSet(originalValue.getColumnSourceMap().keySet()); + ArrayList> columnSets2 = powerSet(originalValue.getColumnSourceMap().keySet()); ArrayList columnNames = new ArrayList<>(); columnNames.addAll(originalValue.getColumnSourceMap().keySet()); columnSets2.add(columnNames); @@ -219,19 +207,16 @@ public void testCombinedGrouping() throws IOException { TstUtils.ColumnInfo[] columnInfo = new TstUtils.ColumnInfo[4]; // noinspection unchecked - columnInfo[0] = new TstUtils.ColumnInfo( - new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f"), "Sym", - TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); + columnInfo[0] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f"), "Sym", + TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); // noinspection unchecked - columnInfo[1] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>("q", "r", "s", "t"), - "Sym2", TstUtils.ColumnInfo.ColAttributes.Immutable, - TstUtils.ColumnInfo.ColAttributes.Grouped); + columnInfo[1] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>("q", "r", "s", "t"), "Sym2", + TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); // noinspection unchecked columnInfo[2] = new TstUtils.ColumnInfo(new TstUtils.IntGenerator(10, 100), "intCol", - TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); + TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); // noinspection unchecked - columnInfo[3] = - new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); + columnInfo[3] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); final QueryTable nonCountingTable = getTable(size, random, columnInfo); @@ -248,78 +233,67 @@ public void testCombinedGrouping() throws IOException { assertTrue(countingTable.getIndex().hasGrouping(sym2ColumnSource)); assertTrue(countingTable.getIndex().hasGrouping(intColumnSource)); assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource)); - assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, - sym2ColumnSource)); - assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, - doubleColumnSource)); + assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, sym2ColumnSource)); + assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, doubleColumnSource)); Map symGrouping = countingTable.getIndex().getGrouping(symColumnSource); assertEquals(0, ((CountingTable.MethodCounter) symColumnSource).getMethodCount("get")); - GroupingValidator.validateGrouping(new String[] {"Sym"}, countingTable.getIndex(), - countingTable, "sym", symGrouping); + GroupingValidator.validateGrouping(new String[] {"Sym"}, countingTable.getIndex(), countingTable, "sym", + symGrouping); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); Map intGrouping = countingTable.getIndex().getGrouping(intColumnSource); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("getInt")); - GroupingValidator.validateGrouping(new String[] {"intCol"}, countingTable.getIndex(), - countingTable, "intCol", intGrouping); + GroupingValidator.validateGrouping(new String[] {"intCol"}, countingTable.getIndex(), countingTable, "intCol", + intGrouping); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); - final TupleSource intSymTupleSource = - TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource); + final TupleSource intSymTupleSource = TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource); Map intSymGrouping = countingTable.getIndex().getGrouping(intSymTupleSource); assertEquals(0, ((CountingTable.MethodCounter) symColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("getInt")); - GroupingValidator.validateGrouping(new String[] {"intCol", "Sym"}, countingTable.getIndex(), - countingTable, "intCol+sym", intSymGrouping); + GroupingValidator.validateGrouping(new String[] {"intCol", "Sym"}, countingTable.getIndex(), countingTable, + "intCol+sym", intSymGrouping); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); final TupleSource intSymSym2TupleSource = - TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource, sym2ColumnSource); - Map intSymSym2Grouping = - countingTable.getIndex().getGrouping(intSymSym2TupleSource); + TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource, sym2ColumnSource); + Map intSymSym2Grouping = countingTable.getIndex().getGrouping(intSymSym2TupleSource); assertEquals(0, ((CountingTable.MethodCounter) symColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) sym2ColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("getInt")); - GroupingValidator.validateGrouping(new String[] {"intCol", "Sym", "Sym2"}, - countingTable.getIndex(), countingTable, "intCol+sym+sym2", intSymSym2Grouping); + GroupingValidator.validateGrouping(new String[] {"intCol", "Sym", "Sym2"}, countingTable.getIndex(), + countingTable, "intCol+sym+sym2", intSymSym2Grouping); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); - final TupleSource intSymDoubleTupleSource = TupleSourceFactory - .makeTupleSource(intColumnSource, symColumnSource, doubleColumnSource); - Map intSymDoubleGrouping = - countingTable.getIndex().getGrouping(intSymDoubleTupleSource); + final TupleSource intSymDoubleTupleSource = + TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource, doubleColumnSource); + Map intSymDoubleGrouping = countingTable.getIndex().getGrouping(intSymDoubleTupleSource); assertEquals(0, ((CountingTable.MethodCounter) symColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("getInt")); - assertEquals(countingTable.size(), - ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("get")); - assertEquals(0, - ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("getDouble")); - GroupingValidator.validateGrouping(new String[] {"intCol", "Sym", "doubleCol"}, - countingTable.getIndex(), countingTable, "intCol+sym+doubleCol", intSymDoubleGrouping); + assertEquals(countingTable.size(), ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("get")); + assertEquals(0, ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("getDouble")); + GroupingValidator.validateGrouping(new String[] {"intCol", "Sym", "doubleCol"}, countingTable.getIndex(), + countingTable, "intCol+sym+doubleCol", intSymDoubleGrouping); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); - final TupleSource intSymSym2DoubleTupleSource = TupleSourceFactory.makeTupleSource( - intColumnSource, symColumnSource, sym2ColumnSource, doubleColumnSource); - Map intSymSym2DoubleGrouping = - countingTable.getIndex().getGrouping(intSymSym2DoubleTupleSource); + final TupleSource intSymSym2DoubleTupleSource = TupleSourceFactory.makeTupleSource(intColumnSource, + symColumnSource, sym2ColumnSource, doubleColumnSource); + Map intSymSym2DoubleGrouping = countingTable.getIndex().getGrouping(intSymSym2DoubleTupleSource); assertEquals(0, ((CountingTable.MethodCounter) symColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) sym2ColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("getInt")); - assertEquals(countingTable.size(), - ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("get")); - assertEquals(0, - ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("getDouble")); + assertEquals(countingTable.size(), ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("get")); + assertEquals(0, ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("getDouble")); GroupingValidator.validateGrouping(new String[] {"intCol", "Sym", "Sym2", "doubleCol"}, - countingTable.getIndex(), countingTable, "intCol+sym+sym2+doubleCol", - intSymSym2DoubleGrouping); + countingTable.getIndex(), countingTable, "intCol+sym+sym2+doubleCol", intSymSym2DoubleGrouping); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); } @@ -329,19 +303,16 @@ public void testRestrictedGrouping() throws IOException { TstUtils.ColumnInfo[] columnInfo = new TstUtils.ColumnInfo[4]; // noinspection unchecked - columnInfo[0] = new TstUtils.ColumnInfo( - new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f"), "Sym", - TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); + columnInfo[0] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f"), "Sym", + TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); // noinspection unchecked - columnInfo[1] = new TstUtils.ColumnInfo( - new TstUtils.SetGenerator<>("q", "r", "s", "t", "u", "v"), "Sym2", - TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); + columnInfo[1] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>("q", "r", "s", "t", "u", "v"), "Sym2", + TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); // noinspection unchecked columnInfo[2] = new TstUtils.ColumnInfo(new TstUtils.IntGenerator(10, 100), "intCol", - TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); + TstUtils.ColumnInfo.ColAttributes.Immutable, TstUtils.ColumnInfo.ColAttributes.Grouped); // noinspection unchecked - columnInfo[3] = - new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); + columnInfo[3] = new TstUtils.ColumnInfo(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); final QueryTable nonCountingTable = getTable(size, random, columnInfo); @@ -358,97 +329,85 @@ public void testRestrictedGrouping() throws IOException { assertTrue(countingTable.getIndex().hasGrouping(sym2ColumnSource)); assertTrue(countingTable.getIndex().hasGrouping(intColumnSource)); assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource)); - assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, - sym2ColumnSource)); - assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, - sym2ColumnSource, doubleColumnSource)); - assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, - doubleColumnSource)); + assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, sym2ColumnSource)); + assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, sym2ColumnSource, + doubleColumnSource)); + assertFalse(countingTable.getIndex().hasGrouping(intColumnSource, symColumnSource, doubleColumnSource)); final TreeSet keySet = new TreeSet<>(Arrays.asList("a", "b")); - final Map symGrouping = - countingTable.getIndex().getGroupingForKeySet(keySet, symColumnSource); + final Map symGrouping = countingTable.getIndex().getGroupingForKeySet(keySet, symColumnSource); assertEquals(0, ((CountingTable.MethodCounter) symColumnSource).getMethodCount("get")); - GroupingValidator.validateRestrictedGrouping(new String[] {"Sym"}, countingTable.getIndex(), - countingTable, "sym", symGrouping, keySet); + GroupingValidator.validateRestrictedGrouping(new String[] {"Sym"}, countingTable.getIndex(), countingTable, + "sym", symGrouping, keySet); ((CountingTable.MethodCounter) symColumnSource).clear(); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); keySet.clear(); keySet.addAll(Arrays.asList(10, 20, 30, 40, 50, 60, 70, 80, 90)); - final Map intGrouping = - countingTable.getIndex().getGroupingForKeySet(keySet, intColumnSource); + final Map intGrouping = countingTable.getIndex().getGroupingForKeySet(keySet, intColumnSource); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("getInt")); - GroupingValidator.validateRestrictedGrouping(new String[] {"intCol"}, - countingTable.getIndex(), countingTable, "intCol", intGrouping, keySet); + GroupingValidator.validateRestrictedGrouping(new String[] {"intCol"}, countingTable.getIndex(), countingTable, + "intCol", intGrouping, keySet); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); keySet.clear(); - final TupleSource intSymFactory = - TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource); + final TupleSource intSymFactory = TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource); TstUtils.selectSubIndexSet(5, countingTable.getIndex(), random) - .forEach(row -> keySet.add(intSymFactory.createTuple(row))); + .forEach(row -> keySet.add(intSymFactory.createTuple(row))); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); - final Map intSymGrouping = countingTable.getIndex().getGroupingForKeySet( - keySet, TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource)); + final Map intSymGrouping = countingTable.getIndex().getGroupingForKeySet(keySet, + TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource)); assertEquals(0, ((CountingTable.MethodCounter) symColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("getInt")); - GroupingValidator.validateRestrictedGrouping(new String[] {"intCol", "Sym"}, - countingTable.getIndex(), countingTable, "intCol+sym", intSymGrouping, keySet); + GroupingValidator.validateRestrictedGrouping(new String[] {"intCol", "Sym"}, countingTable.getIndex(), + countingTable, "intCol+sym", intSymGrouping, keySet); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); keySet.clear(); - final TupleSource intSymDoubleFactory = TupleSourceFactory.makeTupleSource(intColumnSource, - symColumnSource, doubleColumnSource); + final TupleSource intSymDoubleFactory = + TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource, doubleColumnSource); TstUtils.selectSubIndexSet(5, countingTable.getIndex(), random) - .forEach(row -> keySet.add(intSymDoubleFactory.createTuple(row))); + .forEach(row -> keySet.add(intSymDoubleFactory.createTuple(row))); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); - final Map intSymDoubleGrouping = - countingTable.getIndex().getGroupingForKeySet(keySet, TupleSourceFactory - .makeTupleSource(intColumnSource, symColumnSource, doubleColumnSource)); + final Map intSymDoubleGrouping = countingTable.getIndex().getGroupingForKeySet(keySet, + TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource, doubleColumnSource)); assertEquals(0, ((CountingTable.MethodCounter) symColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("getInt")); long groupingSize = intSymDoubleGrouping.values().stream().mapToLong(Index::size).sum(); - assertEquals(groupingSize, - ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("get")); - assertEquals(0, - ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("getDouble")); + assertEquals(groupingSize, ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("get")); + assertEquals(0, ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("getDouble")); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); GroupingValidator.validateRestrictedGrouping(new String[] {"intCol", "Sym", "doubleCol"}, - countingTable.getIndex(), countingTable, "intCol+sym+doubleCol", intSymDoubleGrouping, - keySet); + countingTable.getIndex(), countingTable, "intCol+sym+doubleCol", intSymDoubleGrouping, keySet); keySet.clear(); - final TupleSource intSymSym2DoubleFactory = TupleSourceFactory.makeTupleSource( - intColumnSource, symColumnSource, sym2ColumnSource, doubleColumnSource); + final TupleSource intSymSym2DoubleFactory = TupleSourceFactory.makeTupleSource(intColumnSource, symColumnSource, + sym2ColumnSource, doubleColumnSource); TstUtils.selectSubIndexSet(5, countingTable.getIndex(), random) - .forEach(row -> keySet.add(intSymSym2DoubleFactory.createTuple(row))); + .forEach(row -> keySet.add(intSymSym2DoubleFactory.createTuple(row))); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); - final Map intSymSym2DoubleGrouping = countingTable.getIndex() - .getGroupingForKeySet(keySet, TupleSourceFactory.makeTupleSource(intColumnSource, - symColumnSource, sym2ColumnSource, doubleColumnSource)); + final Map intSymSym2DoubleGrouping = + countingTable.getIndex().getGroupingForKeySet(keySet, TupleSourceFactory + .makeTupleSource(intColumnSource, symColumnSource, sym2ColumnSource, doubleColumnSource)); assertEquals(0, ((CountingTable.MethodCounter) symColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) sym2ColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("get")); assertEquals(0, ((CountingTable.MethodCounter) intColumnSource).getMethodCount("getInt")); groupingSize = intSymSym2DoubleGrouping.values().stream().mapToLong(Index::size).sum(); - assertEquals(groupingSize, - ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("get")); - assertEquals(0, - ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("getDouble")); + assertEquals(groupingSize, ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("get")); + assertEquals(0, ((CountingTable.MethodCounter) doubleColumnSource).getMethodCount("getDouble")); countingTable.getColumnSources().forEach(x -> ((CountingTable.MethodCounter) x).clear()); - GroupingValidator.validateRestrictedGrouping( - new String[] {"intCol", "Sym", "Sym2", "doubleCol"}, countingTable.getIndex(), - countingTable, "intCol+sym+sym2+doubleCol", intSymSym2DoubleGrouping, keySet); + GroupingValidator.validateRestrictedGrouping(new String[] {"intCol", "Sym", "Sym2", "doubleCol"}, + countingTable.getIndex(), countingTable, "intCol+sym+sym2+doubleCol", intSymSym2DoubleGrouping, keySet); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/JUnit4QueryTableTestBase.java b/DB/src/test/java/io/deephaven/db/v2/JUnit4QueryTableTestBase.java index fcdece4687c..8c7e744bd1f 100644 --- a/DB/src/test/java/io/deephaven/db/v2/JUnit4QueryTableTestBase.java +++ b/DB/src/test/java/io/deephaven/db/v2/JUnit4QueryTableTestBase.java @@ -1,10 +1,9 @@ package io.deephaven.db.v2; /** - * When you want to extend QueryTableTestBase, but you need to use JUnit 4 annotations, - * like @Category or @RunWith(Suite.class), then instead of extending QueryTableTestBase, you should - * instead create a `JUnit4QueryTableTestBase field;`, and call setUp/tearDown in @Before/@After - * annotated methods. + * When you want to extend QueryTableTestBase, but you need to use JUnit 4 annotations, like @Category + * or @RunWith(Suite.class), then instead of extending QueryTableTestBase, you should instead create a + * `JUnit4QueryTableTestBase field;`, and call setUp/tearDown in @Before/@After annotated methods. * * We could probably implement this as a TestRule instead, but this works fine as-is. */ @@ -19,8 +18,7 @@ public void tearDown() throws Exception { super.tearDown(); } - // We use this class as a field in JUnit 4 tests which should not extend TestCase. This method - // is a no-op test + // We use this class as a field in JUnit 4 tests which should not extend TestCase. This method is a no-op test // method so when we are detected as a JUnit3 test, we do not fail public void testMethodSoThisIsValidJUnit3() {} } diff --git a/DB/src/test/java/io/deephaven/db/v2/LiveTableTestCase.java b/DB/src/test/java/io/deephaven/db/v2/LiveTableTestCase.java index f1401cfe9c8..8820ec4789d 100644 --- a/DB/src/test/java/io/deephaven/db/v2/LiveTableTestCase.java +++ b/DB/src/test/java/io/deephaven/db/v2/LiveTableTestCase.java @@ -29,7 +29,7 @@ abstract public class LiveTableTestCase extends BaseArrayTestCase implements UpdateErrorReporter { static public boolean printTableUpdates = Configuration.getInstance() - .getBooleanForClassWithDefault(LiveTableTestCase.class, "printTableUpdates", false); + .getBooleanForClassWithDefault(LiveTableTestCase.class, "printTableUpdates", false); private boolean oldMemoize; private UpdateErrorReporter oldReporter; @@ -62,8 +62,7 @@ protected void tearDown() throws Exception { @Override public void reportUpdateError(Throwable t) throws IOException { if (!expectError) { - System.err.println( - "Received error notification: " + new ExceptionDetails(t).getFullStackTrace()); + System.err.println("Received error notification: " + new ExceptionDetails(t).getFullStackTrace()); TestCase.fail(t.getMessage()); } if (errors == null) { @@ -109,32 +108,26 @@ public void allowingError(Runnable function, Predicate> errorsAc }, errorsAcceptable); } - protected static void simulateShiftAwareStep(int targetUpdateSize, Random random, - QueryTable table, TstUtils.ColumnInfo[] columnInfo, EvalNuggetInterface[] en) { + protected static void simulateShiftAwareStep(int targetUpdateSize, Random random, QueryTable table, + TstUtils.ColumnInfo[] columnInfo, EvalNuggetInterface[] en) { simulateShiftAwareStep("", targetUpdateSize, random, table, columnInfo, en); } - public static void simulateShiftAwareStep(final String ctxt, int targetUpdateSize, - Random random, QueryTable table, TstUtils.ColumnInfo[] columnInfo, - EvalNuggetInterface[] en) { - simulateShiftAwareStep(GenerateTableUpdates.DEFAULT_PROFILE, ctxt, targetUpdateSize, random, - table, columnInfo, en); + public static void simulateShiftAwareStep(final String ctxt, int targetUpdateSize, Random random, QueryTable table, + TstUtils.ColumnInfo[] columnInfo, EvalNuggetInterface[] en) { + simulateShiftAwareStep(GenerateTableUpdates.DEFAULT_PROFILE, ctxt, targetUpdateSize, random, table, columnInfo, + en); } - protected static void simulateShiftAwareStep( - final GenerateTableUpdates.SimulationProfile simulationProfile, final String ctxt, - int targetUpdateSize, Random random, QueryTable table, TstUtils.ColumnInfo[] columnInfo, - EvalNuggetInterface[] en) { - LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( - () -> GenerateTableUpdates.generateShiftAwareTableUpdates(simulationProfile, - targetUpdateSize, random, table, columnInfo)); + protected static void simulateShiftAwareStep(final GenerateTableUpdates.SimulationProfile simulationProfile, + final String ctxt, int targetUpdateSize, Random random, QueryTable table, TstUtils.ColumnInfo[] columnInfo, + EvalNuggetInterface[] en) { + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates + .generateShiftAwareTableUpdates(simulationProfile, targetUpdateSize, random, table, columnInfo)); TstUtils.validate(ctxt, en); - // The EvalNugget test cases end up generating very big listener DAGs, for at each step we - // create a brand new - // live incarnation of the table. This can make debugging a bit awkward, so sometimes it is - // convenient to - // prune the tree after each validation. The reason not to do it, however, is that this will - // sometimes expose + // The EvalNugget test cases end up generating very big listener DAGs, for at each step we create a brand new + // live incarnation of the table. This can make debugging a bit awkward, so sometimes it is convenient to + // prune the tree after each validation. The reason not to do it, however, is that this will sometimes expose // bugs with shared indices getting updated. // System.gc(); } diff --git a/DB/src/test/java/io/deephaven/db/v2/MultiColumnSortTest.java b/DB/src/test/java/io/deephaven/db/v2/MultiColumnSortTest.java index b04ea103f9b..11ebbc1d426 100644 --- a/DB/src/test/java/io/deephaven/db/v2/MultiColumnSortTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/MultiColumnSortTest.java @@ -48,49 +48,46 @@ private void testMultiColumnSort(int seed, int size) { final Random random = new Random(seed); final Table table = getTable(size, random, - initColumnInfos( - new String[] {"Sym", "intCol", "doubleCol", "floatCol", "longCol", "shortCol", - "byteCol", "charCol", "boolCol", "bigI", "bigD"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f", "g"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), - new TstUtils.FloatGenerator(100.0f, 2000.f), - new TstUtils.LongGenerator(), - new TstUtils.ShortGenerator(), - new TstUtils.ByteGenerator(), - new TstUtils.CharGenerator('A', 'Z'), - new TstUtils.BooleanGenerator(), - new TstUtils.BigIntegerGenerator(BigInteger.valueOf(100000), - BigInteger.valueOf(100100)), - new TstUtils.BigDecimalGenerator(BigInteger.valueOf(100000), - BigInteger.valueOf(100100)))); + initColumnInfos( + new String[] {"Sym", "intCol", "doubleCol", "floatCol", "longCol", "shortCol", "byteCol", + "charCol", "boolCol", "bigI", "bigD"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d", "e", "f", "g"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), + new TstUtils.FloatGenerator(100.0f, 2000.f), + new TstUtils.LongGenerator(), + new TstUtils.ShortGenerator(), + new TstUtils.ByteGenerator(), + new TstUtils.CharGenerator('A', 'Z'), + new TstUtils.BooleanGenerator(), + new TstUtils.BigIntegerGenerator(BigInteger.valueOf(100000), BigInteger.valueOf(100100)), + new TstUtils.BigDecimalGenerator(BigInteger.valueOf(100000), BigInteger.valueOf(100100)))); final List columnNames = new ArrayList<>(table.getColumnSourceMap().keySet()); doMultiColumnTest(table, SortPair.ascending("boolCol"), SortPair.descending("Sym")); for (String outerColumn : columnNames) { - final SortPair outerPair = random.nextBoolean() ? SortPair.ascending(outerColumn) - : SortPair.descending(outerColumn); + final SortPair outerPair = + random.nextBoolean() ? SortPair.ascending(outerColumn) : SortPair.descending(outerColumn); for (String innerColumn : columnNames) { if (innerColumn.equals(outerColumn)) { continue; } - final SortPair innerPair = random.nextBoolean() ? SortPair.ascending(innerColumn) - : SortPair.descending(innerColumn); + final SortPair innerPair = + random.nextBoolean() ? SortPair.ascending(innerColumn) : SortPair.descending(innerColumn); doMultiColumnTest(table, outerPair, innerPair); } } - // now let each type have a chance at being in the middle, but pick something else as the - // outer type + // now let each type have a chance at being in the middle, but pick something else as the outer type for (String middleColumn : columnNames) { final String outerColumn = oneOf(columnNames, middleColumn); final String innerColumn = oneOf(columnNames, middleColumn, outerColumn); - final SortPair outerPair = random.nextBoolean() ? SortPair.ascending(outerColumn) - : SortPair.descending(outerColumn); - final SortPair innerPair = random.nextBoolean() ? SortPair.ascending(innerColumn) - : SortPair.descending(innerColumn); + final SortPair outerPair = + random.nextBoolean() ? SortPair.ascending(outerColumn) : SortPair.descending(outerColumn); + final SortPair innerPair = + random.nextBoolean() ? SortPair.ascending(innerColumn) : SortPair.descending(innerColumn); doMultiColumnTest(table, outerPair, SortPair.ascending(middleColumn), innerPair); doMultiColumnTest(table, outerPair, SortPair.descending(middleColumn), innerPair); @@ -115,8 +112,7 @@ private void doMultiColumnTest(Table table, SortPair... sortPairs) { } private void checkSort(Table sorted, SortPair[] sortPairs) { - final String[] columns = - Arrays.stream(sortPairs).map(SortPair::getColumn).toArray(String[]::new); + final String[] columns = Arrays.stream(sortPairs).map(SortPair::getColumn).toArray(String[]::new); Object[] lastRow = sorted.getRecord(0, columns); @@ -129,13 +125,13 @@ private void checkSort(Table sorted, SortPair[] sortPairs) { final Comparable current = (Comparable) rowData[jj]; if (sortPairs[jj].getOrder() == SortingOrder.Ascending) { if (!leq(last, current)) { - TestCase.fail("Out of order[" + (ii - 1) + "]: !" + Arrays.toString(lastRow) - + " <= [" + ii + "] " + Arrays.toString(rowData)); + TestCase.fail("Out of order[" + (ii - 1) + "]: !" + Arrays.toString(lastRow) + " <= [" + ii + + "] " + Arrays.toString(rowData)); } } else { if (!geq(last, current)) { - TestCase.fail("Out of order[" + (ii - 1) + "]: !" + Arrays.toString(lastRow) - + " >= [" + ii + "] " + Arrays.toString(rowData)); + TestCase.fail("Out of order[" + (ii - 1) + "]: !" + Arrays.toString(lastRow) + " >= [" + ii + + "] " + Arrays.toString(rowData)); } } if (!Objects.equals(last, current)) { @@ -180,11 +176,9 @@ private boolean geq(Comparable last, Comparable current) { public void benchmarkTest() { { final EnumStringColumnGenerator enumStringCol1 = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("Enum1", 10000, 6, 6, - 0xB00FB00F); + (EnumStringColumnGenerator) BenchmarkTools.stringCol("Enum1", 10000, 6, 6, 0xB00FB00F); final EnumStringColumnGenerator enumStringCol2 = - (EnumStringColumnGenerator) BenchmarkTools.stringCol("Enum2", 1000, 6, 6, - 0xF00DF00D); + (EnumStringColumnGenerator) BenchmarkTools.stringCol("Enum2", 1000, 6, 6, 0xF00DF00D); final BenchmarkTableBuilder builder; final int actualSize = BenchmarkTools.sizeWithSparsity(25000000, 90); @@ -194,22 +188,21 @@ public void benchmarkTest() { builder = BenchmarkTools.persistentTableBuilder("Carlos", actualSize); final BenchmarkTable bmTable = builder - .setSeed(0xDEADBEEF) - .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)) - .addColumn(BenchmarkTools.numberCol("I1", int.class)) - .addColumn(BenchmarkTools.numberCol("D1", double.class, -10e6, 10e6)) - .addColumn(BenchmarkTools.numberCol("L1", long.class)) - .addColumn(enumStringCol1) - .addColumn(enumStringCol2) - .build(); + .setSeed(0xDEADBEEF) + .addColumn(BenchmarkTools.stringCol("PartCol", 4, 5, 7, 0xFEEDBEEF)) + .addColumn(BenchmarkTools.numberCol("I1", int.class)) + .addColumn(BenchmarkTools.numberCol("D1", double.class, -10e6, 10e6)) + .addColumn(BenchmarkTools.numberCol("L1", long.class)) + .addColumn(enumStringCol1) + .addColumn(enumStringCol2) + .build(); final long startGen = System.currentTimeMillis(); System.out.println(new Date(startGen) + " Generating Table."); final Table table = bmTable.getTable(); final long endGen = System.currentTimeMillis(); - System.out - .println(new Date(endGen) + " Completed generate in " + (endGen - startGen) + "ms"); + System.out.println(new Date(endGen) + " Completed generate in " + (endGen - startGen) + "ms"); final long startSort = System.currentTimeMillis(); System.out.println(new Date(startSort) + " Starting sort."); diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryFactory.java b/DB/src/test/java/io/deephaven/db/v2/QueryFactory.java index bada234c194..19d896a4595 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryFactory.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryFactory.java @@ -7,12 +7,10 @@ /** - * Class to generate query strings. You can use the along with the getTablePreamble() function to - * create queries. + * Class to generate query strings. You can use the along with the getTablePreamble() function to create queries. *

      *

      - * To use on a remote server jar up this class and stick it in /etc/sysconfig/deephaven.d/java_lib/ - * on all nodes. + * To use on a remote server jar up this class and stick it in /etc/sysconfig/deephaven.d/java_lib/ on all nodes. *

      * Example: *

      @@ -26,8 +24,7 @@ *

      * import io.deephaven.db.v2.QueryFactory *

      - * qf = new QueryFactory() queryPart1 = qf.getTablePreamble(123L) queryPart2 = - * qf.generateQuery(456L) + * qf = new QueryFactory() queryPart1 = qf.getTablePreamble(123L) queryPart2 = qf.generateQuery(456L) *

      * __groovySession.evaluate(queryPart1) __groovySession.evaluate(queryPart2) */ @@ -54,39 +51,36 @@ public class QueryFactory { // default values private static final String DEFAULT_TABLE_ONE = "randomValues"; private static final String DEFAULT_TABLE_TWO = "tickingValues"; - private static final String[] DEFAULT_COLUMN_NAMES = - {"Timestamp", "MyString", "MyInt", "MyLong", "MyFloat", "MyDouble", "MyBoolean", "MyChar", - "MyShort", "MyByte", "MyBigDecimal", "MyBigInteger"}; - private static final Class[] DEFAULT_COLUMN_TYPES = {DBDateTime.class, String.class, - Integer.class, Long.class, Float.class, Double.class, Boolean.class, Character.class, - short.class, byte.class, java.math.BigDecimal.class, java.math.BigInteger.class}; + private static final String[] DEFAULT_COLUMN_NAMES = {"Timestamp", "MyString", "MyInt", "MyLong", "MyFloat", + "MyDouble", "MyBoolean", "MyChar", "MyShort", "MyByte", "MyBigDecimal", "MyBigInteger"}; + private static final Class[] DEFAULT_COLUMN_TYPES = + {DBDateTime.class, String.class, Integer.class, Long.class, Float.class, Double.class, Boolean.class, + Character.class, short.class, byte.class, java.math.BigDecimal.class, java.math.BigInteger.class}; // Copy and modify this block of code if you want to disable an operation. - private static final String[] IMPLEMENTED_OPS = {"where", "merge", "flatten", "slice", "head", - "tail", "headPct", "tailPct", "reverse", "sort", "byOpp", "aggCombo", "byExternal"}; - private static final String[] CHANGING_AGG = {"AggSum", "AggVar", "AggStd", "AggArray", - "AggCount", "AggWAvg", "AggDistinct", "AggCountDistinct"}; + private static final String[] IMPLEMENTED_OPS = {"where", "merge", "flatten", "slice", "head", "tail", "headPct", + "tailPct", "reverse", "sort", "byOpp", "aggCombo", "byExternal"}; + private static final String[] CHANGING_AGG = + {"AggSum", "AggVar", "AggStd", "AggArray", "AggCount", "AggWAvg", "AggDistinct", "AggCountDistinct"}; private static final String[] CHANGING_BY = - {"avgBy", "sumBy", "stdBy", "varBy", "countBy", "medianBy", "percentileBy"}; + {"avgBy", "sumBy", "stdBy", "varBy", "countBy", "medianBy", "percentileBy"}; private static final String[] ROLLUP_AGG = - {"AggSum", "AggVar", "AggStd", "AggCount", "AggMin", "AggMax", "AggFirst", "AggLast"}; + {"AggSum", "AggVar", "AggStd", "AggCount", "AggMin", "AggMax", "AggFirst", "AggLast"}; private static final String[] SAFE_AGG = {"AggMin", "AggMax", "AggFirst", "AggLast"}; - private static final String[] SAFE_BY = - {"maxBy", "minBy", "firstBy", "lastBy", "sortedFirstBy", "sortedLastBy"}; + private static final String[] SAFE_BY = {"maxBy", "minBy", "firstBy", "lastBy", "sortedFirstBy", "sortedLastBy"}; private static final String[] FINAL_OPS = - {"selectDistinct", "byOperation", "aggCombo", "treeTable", "rollup", "applyToAllBy"}; - private static final HashMap DEFAULT_SWITCH_CONTROL = - new HashMap() { - { - put("supportedOps", IMPLEMENTED_OPS); - put("changingAgg", CHANGING_AGG); - put("changingBy", CHANGING_BY); - put("safeAgg", SAFE_AGG); - put("safeBy", SAFE_BY); - put("rollupAgg", ROLLUP_AGG); - put("finalOps", FINAL_OPS); + {"selectDistinct", "byOperation", "aggCombo", "treeTable", "rollup", "applyToAllBy"}; + private static final HashMap DEFAULT_SWITCH_CONTROL = new HashMap() { + { + put("supportedOps", IMPLEMENTED_OPS); + put("changingAgg", CHANGING_AGG); + put("changingBy", CHANGING_BY); + put("safeAgg", SAFE_AGG); + put("safeBy", SAFE_BY); + put("rollupAgg", ROLLUP_AGG); + put("finalOps", FINAL_OPS); - } - }; + } + }; private static final Set NUMERIC_TYPES = new HashSet() { { add(Integer.class); @@ -107,10 +101,9 @@ public class QueryFactory { }; @SuppressWarnings("WeakerAccess") - public QueryFactory(int numberOfOperations, boolean finalOperationChangesTypes, - boolean doSelectHalfWay, boolean doJoinMostOfTheWayIn, String firstTableName, - String secondTableName, String[] columnNames, Class[] columnTypes, - Map switchControlValues) { + public QueryFactory(int numberOfOperations, boolean finalOperationChangesTypes, boolean doSelectHalfWay, + boolean doJoinMostOfTheWayIn, String firstTableName, String secondTableName, String[] columnNames, + Class[] columnTypes, Map switchControlValues) { this.numberOfOperations = numberOfOperations; this.finalOperationChangesTypes = finalOperationChangesTypes; this.doSelectHalfWay = doSelectHalfWay; @@ -134,11 +127,9 @@ public QueryFactory(int numberOfOperations, boolean finalOperationChangesTypes, } @SuppressWarnings("WeakerAccess") - public QueryFactory(int numberOfOperations, boolean finalOperationChangesTypes, - boolean doSelectHalfWay) { - this(numberOfOperations, finalOperationChangesTypes, doSelectHalfWay, true, - DEFAULT_TABLE_ONE, DEFAULT_TABLE_TWO, DEFAULT_COLUMN_NAMES, DEFAULT_COLUMN_TYPES, - DEFAULT_SWITCH_CONTROL); + public QueryFactory(int numberOfOperations, boolean finalOperationChangesTypes, boolean doSelectHalfWay) { + this(numberOfOperations, finalOperationChangesTypes, doSelectHalfWay, true, DEFAULT_TABLE_ONE, + DEFAULT_TABLE_TWO, DEFAULT_COLUMN_NAMES, DEFAULT_COLUMN_TYPES, DEFAULT_SWITCH_CONTROL); } @SuppressWarnings("WeakerAccess") @@ -174,13 +165,12 @@ private String stringArrayToSingleArgumentList(Collection values) { * Adds the string "table[seedName]_opNum = table[seedName]_opNum-1" to the StringBuilder */ private void addNormalTableSegment(StringBuilder opChain, String nameSeed, int opNum) { - opChain.append("table").append(nameSeed).append("_").append(opNum).append(" = table") - .append(nameSeed).append("_").append(opNum - 1); + opChain.append("table").append(nameSeed).append("_").append(opNum).append(" = table").append(nameSeed) + .append("_").append(opNum - 1); } /** - * Create a new query from a given long seed value. This query will always be the same for the - * given seed value. + * Create a new query from a given long seed value. This query will always be the same for the given seed value. * * @param seed Seed you want to use. * @return String query @@ -197,8 +187,7 @@ public String generateQuery(long seed) { opChain.append(tableNameTwo).append(".select();\n"); } - opChain.append("table").append(nameSeed).append("_0.setAttribute(\"TESTSEED\",") - .append(seed).append(");\n"); + opChain.append("table").append(nameSeed).append("_0.setAttribute(\"TESTSEED\",").append(seed).append(");\n"); for (int opNum = 1; opNum <= numberOfOperations; ++opNum) { @@ -214,7 +203,7 @@ public String generateQuery(long seed) { if (finalOperationChangesTypes && opNum == numberOfOperations) { // Pick by or combo style String operation = this.switchControlValues.get("finalOps")[queryRandom - .nextInt(switchControlValues.get("finalOps").length)]; + .nextInt(switchControlValues.get("finalOps").length)]; switch (operation) { case "selectDistinct": addNormalTableSegment(opChain, nameSeed, opNum); @@ -222,12 +211,11 @@ public String generateQuery(long seed) { opChain.append(".selectDistinct(\"").append(col).append("\");\n"); break; case "byOperation": - addByOperation(opNum, opChain, queryRandom, - switchControlValues.get("changingBy"), nameSeed); + addByOperation(opNum, opChain, queryRandom, switchControlValues.get("changingBy"), nameSeed); break; case "aggCombo": - addComboOperation(opNum, opChain, queryRandom, - switchControlValues.get("changingAgg"), nameSeed); + addComboOperation(opNum, opChain, queryRandom, switchControlValues.get("changingAgg"), + nameSeed); break; case "treeTable": addTreeTableOperation(opNum, opChain, queryRandom, nameSeed); @@ -274,42 +262,36 @@ private void addSelectOperation(int opNum, StringBuilder opChain, String nameSee final String nextTableName = "table" + nameSeed + "_" + opNum; final String lastTableName = "table" + nameSeed + "_" + (opNum - 1); opChain.append(nextTableName).append("prime = "); - opChain.append("io.deephaven.db.v2.SelectOverheadLimiter.clampSelectOverhead(") - .append(lastTableName).append(", 10.0d);\n"); - opChain.append(nextTableName).append(" = ").append(nextTableName).append("prime") - .append(".select();\n"); + opChain.append("io.deephaven.db.v2.SelectOverheadLimiter.clampSelectOverhead(").append(lastTableName) + .append(", 10.0d);\n"); + opChain.append(nextTableName).append(" = ").append(nextTableName).append("prime").append(".select();\n"); } - private void addMergeOperation(int opNum, StringBuilder opChain, Random random, - String nameSeed) { + private void addMergeOperation(int opNum, StringBuilder opChain, Random random, String nameSeed) { final int style = random.nextInt(10);// Make the old style rare if (style != 0) { // make sure we have at least some values - opChain.append("map = merge(table").append(nameSeed).append("_").append(opNum - 1) - .append(", "); + opChain.append("map = merge(table").append(nameSeed).append("_").append(opNum - 1).append(", "); opChain.append(tableNameOne).append(".head(1L)").append(", "); opChain.append(tableNameTwo).append(".head(1L)").append(")"); - opChain.append(".byExternal(\"").append(columnNames[random.nextInt(columnNames.length)]) - .append("\");\n"); + opChain.append(".byExternal(\"").append(columnNames[random.nextInt(columnNames.length)]).append("\");\n"); opChain.append("table").append(nameSeed).append("_").append(opNum).append(" = "); opChain.append("map.asTable().merge();\n"); } else { opChain.append("table").append(nameSeed).append("_").append(opNum).append(" = merge("); if (random.nextInt(2) == 0) { - opChain.append("table").append(nameSeed).append("_").append(opNum - 1).append(",") - .append(tableNameTwo); + opChain.append("table").append(nameSeed).append("_").append(opNum - 1).append(",").append(tableNameTwo); } else { - opChain.append("table").append(nameSeed).append("_").append(opNum - 1).append(",") - .append(tableNameOne); + opChain.append("table").append(nameSeed).append("_").append(opNum - 1).append(",").append(tableNameOne); } opChain.append(");\n"); } } - private void addByOperation(int opNum, StringBuilder opChain, Random random, - String[] possibleByOperations, String nameSeed) { + private void addByOperation(int opNum, StringBuilder opChain, Random random, String[] possibleByOperations, + String nameSeed) { final String operation = possibleByOperations[random.nextInt(possibleByOperations.length)]; final int numberOfColumns = random.nextInt(3) + 1; @@ -331,71 +313,57 @@ private void addByOperation(int opNum, StringBuilder opChain, Random random, switch (operation) { case "avgBy": - opChain.append(".dropColumns(") - .append(stringArrayToMultipleStringArgumentList(nonNumericColumns)) - .append(").avgBy(").append(stringArrayToMultipleStringArgumentList(numericCols)) - .append(");\n"); + opChain.append(".dropColumns(").append(stringArrayToMultipleStringArgumentList(nonNumericColumns)) + .append(").avgBy(").append(stringArrayToMultipleStringArgumentList(numericCols)).append(");\n"); break; case "sumBy": - opChain.append(".dropColumns(") - .append(stringArrayToMultipleStringArgumentList(nonNumericColumns)) - .append(").sumBy(").append(stringArrayToMultipleStringArgumentList(numericCols)) - .append(");\n"); + opChain.append(".dropColumns(").append(stringArrayToMultipleStringArgumentList(nonNumericColumns)) + .append(").sumBy(").append(stringArrayToMultipleStringArgumentList(numericCols)).append(");\n"); break; case "stdBy": - opChain.append(".dropColumns(") - .append(stringArrayToMultipleStringArgumentList(nonNumericColumns)) - .append(").stdBy(").append(stringArrayToMultipleStringArgumentList(numericCols)) - .append(");\n"); + opChain.append(".dropColumns(").append(stringArrayToMultipleStringArgumentList(nonNumericColumns)) + .append(").stdBy(").append(stringArrayToMultipleStringArgumentList(numericCols)).append(");\n"); break; case "varBy": - opChain.append(".dropColumns(") - .append(stringArrayToMultipleStringArgumentList(nonNumericColumns)) - .append(").varBy(").append(stringArrayToMultipleStringArgumentList(numericCols)) - .append(");\n"); + opChain.append(".dropColumns(").append(stringArrayToMultipleStringArgumentList(nonNumericColumns)) + .append(").varBy(").append(stringArrayToMultipleStringArgumentList(numericCols)).append(");\n"); break; case "medianBy": - opChain.append(".medianBy(") - .append(stringArrayToMultipleStringArgumentList(anyCols)).append(");\n"); + opChain.append(".medianBy(").append(stringArrayToMultipleStringArgumentList(anyCols)).append(");\n"); break; case "countBy": - opChain.append(".countBy(").append(stringArrayToMultipleStringArgumentList(anyCols)) - .append(");\n"); + opChain.append(".countBy(").append(stringArrayToMultipleStringArgumentList(anyCols)).append(");\n"); break; case "maxBy": - opChain.append(".maxBy(").append(stringArrayToMultipleStringArgumentList(anyCols)) - .append(");\n"); + opChain.append(".maxBy(").append(stringArrayToMultipleStringArgumentList(anyCols)).append(");\n"); break; case "minBy": - opChain.append(".minBy(").append(stringArrayToMultipleStringArgumentList(anyCols)) - .append(");\n"); + opChain.append(".minBy(").append(stringArrayToMultipleStringArgumentList(anyCols)).append(");\n"); break; case "firstBy": - opChain.append(".firstBy(").append(stringArrayToMultipleStringArgumentList(anyCols)) - .append(");\n"); + opChain.append(".firstBy(").append(stringArrayToMultipleStringArgumentList(anyCols)).append(");\n"); break; case "lastBy": - opChain.append(".lastBy(").append(stringArrayToMultipleStringArgumentList(anyCols)) - .append(");\n"); + opChain.append(".lastBy(").append(stringArrayToMultipleStringArgumentList(anyCols)).append(");\n"); break; case "sortedFirstBy": - opChain.append(".by( new SortedFirstBy(") - .append(stringArrayToMultipleStringArgumentList(anyCols)).append("));\n"); + opChain.append(".by( new SortedFirstBy(").append(stringArrayToMultipleStringArgumentList(anyCols)) + .append("));\n"); break; case "sortedLastBy": - opChain.append(".by( new SortedLastBy(") - .append(stringArrayToMultipleStringArgumentList(anyCols)).append("));\n"); + opChain.append(".by( new SortedLastBy(").append(stringArrayToMultipleStringArgumentList(anyCols)) + .append("));\n"); break; case "percentileBy": @@ -404,15 +372,13 @@ private void addByOperation(int opNum, StringBuilder opChain, Random random, default: - throw new RuntimeException( - "By operation(" + operation + ") not found in switch statement"); + throw new RuntimeException("By operation(" + operation + ") not found in switch statement"); } } - private void addApplyToAllByOperation(int opNum, StringBuilder opChain, Random random, - String nameSeed) { + private void addApplyToAllByOperation(int opNum, StringBuilder opChain, Random random, String nameSeed) { addNormalTableSegment(opChain, nameSeed, opNum); // getDirect() is required for previous value to work. see IDS-6257 opChain.append(".applyToAllBy(\"each.getDirect().subArray(0L,1L)\","); @@ -430,11 +396,9 @@ private void addApplyToAllByOperation(int opNum, StringBuilder opChain, Random r } - private void addRollupOperation(int opNum, StringBuilder opChain, Random random, - String nameSeed) { + private void addRollupOperation(int opNum, StringBuilder opChain, Random random, String nameSeed) { addNormalTableSegment(opChain, nameSeed, opNum); - final ArrayList aggSet = - new ArrayList<>(Arrays.asList(switchControlValues.get("rollupAgg"))); + final ArrayList aggSet = new ArrayList<>(Arrays.asList(switchControlValues.get("rollupAgg"))); final ArrayList activeAggs = new ArrayList<>(); final ArrayList colSet = new ArrayList<>(numericColumns); final int numOfAggs = random.nextInt(colSet.size() - 1) + 1; @@ -464,55 +428,46 @@ private void addRollupOperation(int opNum, StringBuilder opChain, Random random, } - private void addTreeTableOperation(int opNum, StringBuilder opChain, Random random, - String nameSeed) { - StringBuilder previousTableName = - new StringBuilder("table").append(nameSeed).append("_").append(opNum - 1); + private void addTreeTableOperation(int opNum, StringBuilder opChain, Random random, String nameSeed) { + StringBuilder previousTableName = new StringBuilder("table").append(nameSeed).append("_").append(opNum - 1); String columnName = columnNames[random.nextInt(columnNames.length)]; - opChain.append("part2 = ").append(previousTableName).append(".selectDistinct(\"") - .append(columnName).append("\").update(\"Parent= (String) null\",\"ID= `T`+") - .append(columnName).append("\")"); + opChain.append("part2 = ").append(previousTableName).append(".selectDistinct(\"").append(columnName) + .append("\").update(\"Parent= (String) null\",\"ID= `T`+").append(columnName).append("\")"); opChain.append(".update("); for (int colNumber = 0; colNumber < columnNames.length; ++colNumber) { - opChain.append("\"").append(columnNames[colNumber]).append(" = (") - .append(columnTypes[colNumber].getName()).append(") null \","); + opChain.append("\"").append(columnNames[colNumber]).append(" = (").append(columnTypes[colNumber].getName()) + .append(") null \","); } opChain.deleteCharAt(opChain.length() - 1); opChain.append(");\n"); - opChain.append("atomicLong_").append(nameSeed) - .append(" = new java.util.concurrent.atomic.AtomicLong();\n"); - opChain.append("part1 = ").append(previousTableName).append(".update(\"ID=`a`+atomicLong_") - .append(nameSeed).append(".getAndIncrement()\","); + opChain.append("atomicLong_").append(nameSeed).append(" = new java.util.concurrent.atomic.AtomicLong();\n"); + opChain.append("part1 = ").append(previousTableName).append(".update(\"ID=`a`+atomicLong_").append(nameSeed) + .append(".getAndIncrement()\","); opChain.append(" \"Parent = `T`+").append(columnName).append("\");\n"); - opChain.append("tree").append(nameSeed) - .append(" = merge(part1,part2).treeTable(\"ID\",\"Parent\");\n"); + opChain.append("tree").append(nameSeed).append(" = merge(part1,part2).treeTable(\"ID\",\"Parent\");\n"); } - private void addByExternalOperation(int opNum, StringBuilder opChain, Random random, - String nameSeed) { - final StringBuilder mapName = - new StringBuilder("map").append(nameSeed).append("_").append(opNum); + private void addByExternalOperation(int opNum, StringBuilder opChain, Random random, String nameSeed) { + final StringBuilder mapName = new StringBuilder("map").append(nameSeed).append("_").append(opNum); final StringBuilder previousTableName = - new StringBuilder("table").append(nameSeed).append("_").append(opNum - 1); + new StringBuilder("table").append(nameSeed).append("_").append(opNum - 1); opChain.append(mapName).append(" = ").append(previousTableName).append(".byExternal(\"") - .append(columnNames[random.nextInt(columnNames.length)]).append("\");\n"); + .append(columnNames[random.nextInt(columnNames.length)]).append("\");\n"); opChain.append("table").append(nameSeed).append("_").append(opNum).append(" = "); - opChain.append(mapName).append(".getKeySet().length == 0 ? ").append(previousTableName) - .append(" : "); + opChain.append(mapName).append(".getKeySet().length == 0 ? ").append(previousTableName).append(" : "); opChain.append(mapName).append(".get(").append(mapName).append(".getKeySet()[0]);\n"); } - private void addJoinOperation(int opNum, StringBuilder opChain, Random random, - String nameSeed) { + private void addJoinOperation(int opNum, StringBuilder opChain, Random random, String nameSeed) { addNormalTableSegment(opChain, nameSeed, opNum); String joinCol = columnNames[random.nextInt(columnNames.length)]; ArrayList colsToJoin = new ArrayList<>(); @@ -532,7 +487,7 @@ private void addJoinOperation(int opNum, StringBuilder opChain, Random random, opChain.append(".dropColumns(").append(stringArrayToMultipleStringArgumentList(colsToJoin)) - .append(").flatten().join("); + .append(").flatten().join("); if (random.nextInt(2) == 0) { opChain.append(tableNameOne); @@ -540,13 +495,12 @@ private void addJoinOperation(int opNum, StringBuilder opChain, Random random, opChain.append(tableNameTwo); } - opChain.append(",\"").append(joinCol).append("\",") - .append(stringArrayToSingleArgumentList(colsToJoin)).append(");\n"); + opChain.append(",\"").append(joinCol).append("\",").append(stringArrayToSingleArgumentList(colsToJoin)) + .append(");\n"); } - private void addNaturalJoinOperation(int opNum, StringBuilder opChain, Random random, - String nameSeed) { + private void addNaturalJoinOperation(int opNum, StringBuilder opChain, Random random, String nameSeed) { final ArrayList columnsToDrop = new ArrayList<>(); final ArrayList columnsToMatch = new ArrayList<>(); @@ -564,16 +518,15 @@ private void addNaturalJoinOperation(int opNum, StringBuilder opChain, Random ra } addNormalTableSegment(opChain, nameSeed, opNum); - opChain.append(".dropColumns(") - .append(stringArrayToMultipleStringArgumentList(columnsToDrop)).append(")"); + opChain.append(".dropColumns(").append(stringArrayToMultipleStringArgumentList(columnsToDrop)).append(")"); opChain.append(".naturalJoin(table").append(nameSeed).append("_0.lastBy(") - .append(stringArrayToMultipleStringArgumentList(columnsToMatch)).append("),"); + .append(stringArrayToMultipleStringArgumentList(columnsToMatch)).append("),"); opChain.append(stringArrayToSingleArgumentList(columnsToMatch)).append(", ") - .append(stringArrayToSingleArgumentList(columnsToDrop)).append(");\n"); + .append(stringArrayToSingleArgumentList(columnsToDrop)).append(");\n"); } - private void addComboOperation(int opNum, StringBuilder opChain, Random random, - String[] possiblyComboOperation, String nameSeed) { + private void addComboOperation(int opNum, StringBuilder opChain, Random random, String[] possiblyComboOperation, + String nameSeed) { // combo style op // AggSum, AggVar, AggAvg, AggStd, AggArray, AggCount ArrayList aggSet = new ArrayList<>(Arrays.asList(possiblyComboOperation)); @@ -641,8 +594,7 @@ private void addComboOperation(int opNum, StringBuilder opChain, Random random, break; case "AggCount": - opChain.append("AggCount(\"").append(argLists.get(aggNum).get(0)) - .append("\"),"); + opChain.append("AggCount(\"").append(argLists.get(aggNum).get(0)).append("\"),"); safeOp = false; continue; @@ -677,20 +629,17 @@ private void addComboOperation(int opNum, StringBuilder opChain, Random random, break; case "AggWAvg": - // Can't use BigInteger or BigDecimal on here. Create a new set for this. Make - // sure the columns are renamed - String[] wightedAverageCols = - {"MyInt", "MyLong", "MyFloat", "MyDouble", "MyShort", "MyByte"}; - ArrayList otherColSet = - new ArrayList<>(Arrays.asList(wightedAverageCols)); + // Can't use BigInteger or BigDecimal on here. Create a new set for this. Make sure the columns are + // renamed + String[] wightedAverageCols = {"MyInt", "MyLong", "MyFloat", "MyDouble", "MyShort", "MyByte"}; + ArrayList otherColSet = new ArrayList<>(Arrays.asList(wightedAverageCols)); int otherColNum = 1; int numOfColumns = random.nextInt(otherColSet.size() - 1) + 1; opChain.append("AggWAvg(\"").append(otherColSet.get(0)).append("\","); Collections.shuffle(otherColSet, random); for (; otherColNum < numOfColumns; ++otherColNum) { String col = otherColSet.get(otherColNum); - opChain.append("\"other_").append(col).append(" = ").append(col) - .append("\","); + opChain.append("\"other_").append(col).append(" = ").append(col).append("\","); } opChain.deleteCharAt(opChain.length() - 1); opChain.append("),"); @@ -702,21 +651,18 @@ private void addComboOperation(int opNum, StringBuilder opChain, Random random, continue; default: - throw new RuntimeException( - "Have a bug in the aggCombo: " + activeAggs.get(aggNum) + " missing"); + throw new RuntimeException("Have a bug in the aggCombo: " + activeAggs.get(aggNum) + " missing"); } - opChain.append(stringArrayToMultipleStringArgumentList(argLists.get(aggNum))) - .append("),"); + opChain.append(stringArrayToMultipleStringArgumentList(argLists.get(aggNum))).append("),"); } opChain.deleteCharAt(opChain.length() - 1); opChain.append("))"); if (safeOp) - opChain.append(".join( table").append(nameSeed).append("_").append(opNum - 1) - .append(",\"").append(argLists.get(0).get(0)).append("\", ") - .append(stringArrayToSingleArgumentList(argLists.get(argLists.size() - 1))) - .append(");\n"); + opChain.append(".join( table").append(nameSeed).append("_").append(opNum - 1).append(",\"") + .append(argLists.get(0).get(0)).append("\", ") + .append(stringArrayToSingleArgumentList(argLists.get(argLists.size() - 1))).append(");\n"); } @@ -763,8 +709,7 @@ private String createWhereFilter(Random random) { case "char": case "Character": - filter.append("in(").append(colName).append(",'") - .append((char) (random.nextInt(27) + 97)).append("')"); + filter.append("in(").append(colName).append(",'").append((char) (random.nextInt(27) + 97)).append("')"); break; @@ -780,19 +725,17 @@ private String createWhereFilter(Random random) { default: - throw new RuntimeException( - "Column type not found:" + columnTypes[colNum].getSimpleName()); + throw new RuntimeException("Column type not found:" + columnTypes[colNum].getSimpleName()); } return filter.toString(); } - private void addNormalOperation(int opNum, StringBuilder opChain, Random random, - String nameSeed) { + private void addNormalOperation(int opNum, StringBuilder opChain, Random random, String nameSeed) { - String operation = switchControlValues.get("supportedOps")[random - .nextInt(switchControlValues.get("supportedOps").length)]; + String operation = + switchControlValues.get("supportedOps")[random.nextInt(switchControlValues.get("supportedOps").length)]; switch (operation) { @@ -857,13 +800,11 @@ private void addNormalOperation(int opNum, StringBuilder opChain, Random random, break; case "byOpp": - addByOperation(opNum, opChain, random, this.switchControlValues.get("safeBy"), - nameSeed); + addByOperation(opNum, opChain, random, this.switchControlValues.get("safeBy"), nameSeed); break; case "aggCombo": - addComboOperation(opNum, opChain, random, switchControlValues.get("safeAgg"), - nameSeed); + addComboOperation(opNum, opChain, random, switchControlValues.get("safeAgg"), nameSeed); break; case "join": @@ -896,69 +837,64 @@ public String getTablePreamble(Long tableSeed) { return "\n\nimport io.deephaven.db.v2.by.SortedFirstBy;\n" + - "import io.deephaven.db.v2.by.PercentileByStateFactoryImpl;\n" + - "import io.deephaven.db.v2.by.SortedLastBy;\n\n\n" + - "tableSeed = " + tableSeed + " as long;\n" + - "size = 100 as int;\n" + - "scale = 1000 as int;\n" + - "useRandomNullPoints = true as boolean;\n" + - "tableRandom = new Random(tableSeed) as Random;\n\n" + - "columnRandoms = new Random[11] as Random[];\n" + - "for(int colNum =0; colNum<11;++colNum) {\n" + - "\tseed = tableRandom.nextLong();\n" + - "\tSystem.out.println(\"column: \"+colNum+\"[Seed] \" + seed);\n" + - "\tcolumnRandoms[colNum] = new Random(seed);\n" + - "}\n\n" + - "tt = timeTable(\"00:00:00.1\");" + - "tickingValues = tt.update(\n" + - "\"MyString=new String(`a`+i)\",\n" + - "\"MyInt=new Integer(i)\",\n" + - "\"MyLong=new Long(i)\",\n" + - "\"MyDouble=new Double(i+i/10)\",\n" + - "\"MyFloat=new Float(i+i/10)\",\n" + - "\"MyBoolean=new Boolean(i%2==0)\",\n" + - "\"MyChar= new Character((char) ((i%26)+97))\",\n" + - "\"MyShort=new Short(Integer.toString(i%32767))\",\n" + - "\"MyByte= new java.lang.Byte(Integer.toString(i%127))\",\n" + - "\"MyBigDecimal= new java.math.BigDecimal(i+i/10)\",\n" + - "\"MyBigInteger= new java.math.BigInteger(Integer.toString(i))\"\n" + - ");\n" + - "\n" + - "nullPoints = new int[16] as int[];\n" + - "if (useRandomNullPoints) {\n" + - "\tfor (int k = 0; k < nullPoints.length; ++k) {\n" + - "\t\tnullPoints[k] = tableRandom.nextInt(60) + 4;\n" + - "\t}\n" + - "} else {\n" + - "\tnullPoints = [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] as int[];\n" - + - "}\n" + - "\n" + - "randomValues = emptyTable(size)\n" + - ".update(\"Timestamp= i%nullPoints[0] == 0 ? null : new DBDateTime(i*1_000_000_000L)\")\n" - + - ".update(\"MyString=(i%nullPoints[1] == 0 ? null : `a`+ (columnRandoms[0].nextInt(scale*2) - scale) )\",\n" - + - "\"MyInt=(i%nullPoints[2] == 0 ? null : columnRandoms[1].nextInt(scale*2) - scale )\",\n" - + - "\"MyLong=(i%nullPoints[3] ==0 ? null : (long)(columnRandoms[2].nextInt(scale*2) - scale))\",\n" - + - "\"MyFloat=(float)(i%nullPoints[4] == 0 ? null : i%nullPoints[5] == 0 ? 1.0F/0.0F: i%nullPoints[6] == 0 ? -1.0F/0.0F : (columnRandoms[3].nextFloat()-0.5)*scale)\",\n" - + - "\"MyDouble=(double)(i%nullPoints[7] == 0 ? null : i%nullPoints[8] == 0 ? 1.0D/0.0D: i%nullPoints[9] == 0 ? -1.0D/0.0D : (columnRandoms[4].nextDouble()-0.5)*scale)\",\n" - + - "\"MyBoolean = (i%nullPoints[10] == 0 ? null : columnRandoms[5].nextBoolean())\",\n" + - "\"MyChar = (i%nullPoints[11] == 0 ? null : new Character( (char) (columnRandoms[6].nextInt(27)+97) ) )\",\n" - + - "\"MyShort=(short)(i%nullPoints[12] == 0 ? null : columnRandoms[7].nextInt(scale*2) - scale )\",\n" - + - "\"MyByte=(Byte)(i%nullPoints[13] == 0 ? null : new Byte( Integer.toString( (int)( columnRandoms[8].nextInt(Byte.MAX_VALUE*2)-Byte.MAX_VALUE ) ) ) )\",\n" - + - "\"MyBigDecimal=(i%nullPoints[14] == 0 ? null : new java.math.BigDecimal( (columnRandoms[9].nextDouble()-0.5)*scale ))\",\n" - + - "\"MyBigInteger=(i%nullPoints[15] == 0 ? null : new java.math.BigInteger(Integer.toString(columnRandoms[10].nextInt(scale*2) - scale) ))\"\n" - + - ");\n\n"; + "import io.deephaven.db.v2.by.PercentileByStateFactoryImpl;\n" + + "import io.deephaven.db.v2.by.SortedLastBy;\n\n\n" + + "tableSeed = " + tableSeed + " as long;\n" + + "size = 100 as int;\n" + + "scale = 1000 as int;\n" + + "useRandomNullPoints = true as boolean;\n" + + "tableRandom = new Random(tableSeed) as Random;\n\n" + + "columnRandoms = new Random[11] as Random[];\n" + + "for(int colNum =0; colNum<11;++colNum) {\n" + + "\tseed = tableRandom.nextLong();\n" + + "\tSystem.out.println(\"column: \"+colNum+\"[Seed] \" + seed);\n" + + "\tcolumnRandoms[colNum] = new Random(seed);\n" + + "}\n\n" + + "tt = timeTable(\"00:00:00.1\");" + + "tickingValues = tt.update(\n" + + "\"MyString=new String(`a`+i)\",\n" + + "\"MyInt=new Integer(i)\",\n" + + "\"MyLong=new Long(i)\",\n" + + "\"MyDouble=new Double(i+i/10)\",\n" + + "\"MyFloat=new Float(i+i/10)\",\n" + + "\"MyBoolean=new Boolean(i%2==0)\",\n" + + "\"MyChar= new Character((char) ((i%26)+97))\",\n" + + "\"MyShort=new Short(Integer.toString(i%32767))\",\n" + + "\"MyByte= new java.lang.Byte(Integer.toString(i%127))\",\n" + + "\"MyBigDecimal= new java.math.BigDecimal(i+i/10)\",\n" + + "\"MyBigInteger= new java.math.BigInteger(Integer.toString(i))\"\n" + + ");\n" + + "\n" + + "nullPoints = new int[16] as int[];\n" + + "if (useRandomNullPoints) {\n" + + "\tfor (int k = 0; k < nullPoints.length; ++k) {\n" + + "\t\tnullPoints[k] = tableRandom.nextInt(60) + 4;\n" + + "\t}\n" + + "} else {\n" + + "\tnullPoints = [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19] as int[];\n" + + "}\n" + + "\n" + + "randomValues = emptyTable(size)\n" + + ".update(\"Timestamp= i%nullPoints[0] == 0 ? null : new DBDateTime(i*1_000_000_000L)\")\n" + + ".update(\"MyString=(i%nullPoints[1] == 0 ? null : `a`+ (columnRandoms[0].nextInt(scale*2) - scale) )\",\n" + + + "\"MyInt=(i%nullPoints[2] == 0 ? null : columnRandoms[1].nextInt(scale*2) - scale )\",\n" + + "\"MyLong=(i%nullPoints[3] ==0 ? null : (long)(columnRandoms[2].nextInt(scale*2) - scale))\",\n" + + "\"MyFloat=(float)(i%nullPoints[4] == 0 ? null : i%nullPoints[5] == 0 ? 1.0F/0.0F: i%nullPoints[6] == 0 ? -1.0F/0.0F : (columnRandoms[3].nextFloat()-0.5)*scale)\",\n" + + + "\"MyDouble=(double)(i%nullPoints[7] == 0 ? null : i%nullPoints[8] == 0 ? 1.0D/0.0D: i%nullPoints[9] == 0 ? -1.0D/0.0D : (columnRandoms[4].nextDouble()-0.5)*scale)\",\n" + + + "\"MyBoolean = (i%nullPoints[10] == 0 ? null : columnRandoms[5].nextBoolean())\",\n" + + "\"MyChar = (i%nullPoints[11] == 0 ? null : new Character( (char) (columnRandoms[6].nextInt(27)+97) ) )\",\n" + + + "\"MyShort=(short)(i%nullPoints[12] == 0 ? null : columnRandoms[7].nextInt(scale*2) - scale )\",\n" + + "\"MyByte=(Byte)(i%nullPoints[13] == 0 ? null : new Byte( Integer.toString( (int)( columnRandoms[8].nextInt(Byte.MAX_VALUE*2)-Byte.MAX_VALUE ) ) ) )\",\n" + + + "\"MyBigDecimal=(i%nullPoints[14] == 0 ? null : new java.math.BigDecimal( (columnRandoms[9].nextDouble()-0.5)*scale ))\",\n" + + + "\"MyBigInteger=(i%nullPoints[15] == 0 ? null : new java.math.BigInteger(Integer.toString(columnRandoms[10].nextInt(scale*2) - scale) ))\"\n" + + + ");\n\n"; } diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableAggregationTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableAggregationTest.java index 4e34d37e06e..4e95ed2defb 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableAggregationTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableAggregationTest.java @@ -75,30 +75,28 @@ public void tearDown() throws Exception { // region Static chunked by() tests private static Table individualStaticByTest(@NotNull final Table input, - @Nullable final AggregationControl aggregationControl, - @NotNull final String... keyColumns) { + @Nullable final AggregationControl aggregationControl, @NotNull final String... keyColumns) { final Table adjustedInput = input.update("__Pre_Agg_II__=ii"); final Table expectedKeys; final Table expected; { - // NB: We can't re-use SelectColumns across calls, hence the duplicate extraction for - // expressions and key names. + // NB: We can't re-use SelectColumns across calls, hence the duplicate extraction for expressions and key + // names. final SelectColumn[] keySelectColumns = SelectColumnFactory.getExpressions(keyColumns); - final String[] keyNames = Arrays.stream(keySelectColumns).map(SelectColumn::getName) - .distinct().toArray(String[]::new); + final String[] keyNames = + Arrays.stream(keySelectColumns).map(SelectColumn::getName).distinct().toArray(String[]::new); if (keyColumns.length == 0) { expectedKeys = TableTools.emptyTable(adjustedInput.size() > 0 ? 1 : 0); expected = adjustedInput; } else { final Set retainedColumns = - new LinkedHashSet<>(adjustedInput.getDefinition().getColumnNameMap().keySet()); + new LinkedHashSet<>(adjustedInput.getDefinition().getColumnNameMap().keySet()); retainedColumns.removeAll(Arrays.stream(keyNames).collect(Collectors.toSet())); - final SelectColumn[] allSelectColumns = Stream - .concat(Arrays.stream(keySelectColumns), - retainedColumns.stream().map(SourceColumn::new)) - .toArray(SelectColumn[]::new); + final SelectColumn[] allSelectColumns = + Stream.concat(Arrays.stream(keySelectColumns), retainedColumns.stream().map(SourceColumn::new)) + .toArray(SelectColumn[]::new); final Table adjustedInputWithAllColumns = adjustedInput.view(allSelectColumns); expectedKeys = adjustedInputWithAllColumns.selectDistinct(keyNames); expected = adjustedInputWithAllColumns.sort(keyNames); @@ -109,14 +107,14 @@ private static Table individualStaticByTest(@NotNull final Table input, final Table actual; { final SelectColumn[] keySelectColumns = SelectColumnFactory.getExpressions(keyColumns); - final String[] keyNames = Arrays.stream(keySelectColumns).map(SelectColumn::getName) - .distinct().toArray(String[]::new); + final String[] keyNames = + Arrays.stream(keySelectColumns).map(SelectColumn::getName).distinct().toArray(String[]::new); final Table aggregatedInput = ByAggregationFactory.by( - aggregationControl == null ? AggregationControl.DEFAULT : aggregationControl, - (QueryTable) adjustedInput, keySelectColumns); + aggregationControl == null ? AggregationControl.DEFAULT : aggregationControl, + (QueryTable) adjustedInput, keySelectColumns); actualKeys = keyNames.length == 0 - ? aggregatedInput.dropColumns(aggregatedInput.getDefinition().getColumnNamesArray()) - : aggregatedInput.view(keyNames); + ? aggregatedInput.dropColumns(aggregatedInput.getDefinition().getColumnNamesArray()) + : aggregatedInput.view(keyNames); actual = aggregatedInput.sort(keyNames).ungroup(); } @@ -129,17 +127,15 @@ private static Table individualStaticByTest(@NotNull final Table input, @Test public void testStaticNoKeyByWithChunks() { individualStaticByTest(emptyTable(0).update("A=Integer.toString(i % 5)", "B=i / 5"), null); - individualStaticByTest(emptyTable(10000).update("A=Integer.toString(i % 5)", "B=i / 5"), - null); + individualStaticByTest(emptyTable(10000).update("A=Integer.toString(i % 5)", "B=i / 5"), null); } @Test public void testStaticReinterpretableKeyByWithChunks() { final String nowName = "__now_" + Thread.currentThread().hashCode() + "__"; QueryScope.addParam(nowName, DBDateTime.now()); - final Table input = emptyTable(10000).update( - "A=ii % 100 == 0 ? null : plus(" + nowName + ", (long) (ii / 5))", - "B=ii % 100 == 0 ? null : (ii & 1) == 0"); + final Table input = emptyTable(10000).update("A=ii % 100 == 0 ? null : plus(" + nowName + ", (long) (ii / 5))", + "B=ii % 100 == 0 ? null : (ii & 1) == 0"); individualStaticByTest(input, null, "A", "B"); individualStaticByTest(input, null, "B", "A"); @@ -158,16 +154,13 @@ public int initialHashTableSize(@NotNull final Table table) { final Table input2 = emptyTable(10000).update("A=i", "B=i%2", "C=i%3"); final Table input3 = emptyTable(10000).update("D=i % 2048"); final Table input4 = emptyTable(10000).update("D=i % 4096"); - final Table input5 = - emptyTable(10000).update("E=(ii & 1) == 0 ? ii : (ii - 1 + 0xFFFFFFFFL)"); + final Table input5 = emptyTable(10000).update("E=(ii & 1) == 0 ? ii : (ii - 1 + 0xFFFFFFFFL)"); final Table input6 = emptyTable(10000).update("A=i", "B=i%2", "C=i%3", "D=ii"); - individualStaticByTest( - individualStaticByTest(individualStaticByTest(input1, control, "C"), control, "B"), - control, "A"); - individualStaticByTest( - individualStaticByTest(individualStaticByTest(input2, control, "C"), control, "B"), - control, "A"); + individualStaticByTest(individualStaticByTest(individualStaticByTest(input1, control, "C"), control, "B"), + control, "A"); + individualStaticByTest(individualStaticByTest(individualStaticByTest(input2, control, "C"), control, "B"), + control, "A"); individualStaticByTest(input3, control, "D"); individualStaticByTest(input4, control, "D"); individualStaticByTest(input5, control, "E"); @@ -188,8 +181,8 @@ public void testStaticGroupedByWithChunks() { @Test public void testStaticNameReusingByWithChunks() { - individualStaticByTest(emptyTable(10000).update("A=i"), null, "A=Integer.toString(A % 5)", - "A=A.hashCode()", "A=A / 2"); + individualStaticByTest(emptyTable(10000).update("A=i"), null, "A=Integer.toString(A % 5)", "A=A.hashCode()", + "A=A / 2"); } // endregion Static chunked by() tests @@ -207,40 +200,36 @@ private static class IncrementalFirstStaticAfterByResultSupplier implements Supp private final AtomicBoolean firstTime = new AtomicBoolean(true); - private IncrementalFirstStaticAfterByResultSupplier( - @NotNull final AggregationControl control, @NotNull final QueryTable input, - @NotNull String... columns) { + private IncrementalFirstStaticAfterByResultSupplier(@NotNull final AggregationControl control, + @NotNull final QueryTable input, @NotNull String... columns) { this.control = control; this.input = input; this.columns = columns; } /** - * Return an incremental by() result on first invocation, in order to establish the - * enclosing {@link EvalNugget}'s baseline "original table". Return a static by() result on - * subsequent invocations, in order to use the static implementation to validate the - * incremental implementation. Note that the static implementation is well tested by its own - * unit tests that don't rely on by(). + * Return an incremental by() result on first invocation, in order to establish the enclosing + * {@link EvalNugget}'s baseline "original table". Return a static by() result on subsequent invocations, in + * order to use the static implementation to validate the incremental implementation. Note that the static + * implementation is well tested by its own unit tests that don't rely on by(). * * @return The appropriate {@link Table} */ @Override public final Table get() { final SelectColumn[] keySelectColumns = SelectColumnFactory.getExpressions(columns); - final String[] keyNames = Arrays.stream(keySelectColumns).map(SelectColumn::getName) - .distinct().toArray(String[]::new); + final String[] keyNames = + Arrays.stream(keySelectColumns).map(SelectColumn::getName).distinct().toArray(String[]::new); if (firstTime.compareAndSet(true, false)) { return ByAggregationFactory.by(control, input, keySelectColumns).sort(keyNames); } - return ByAggregationFactory.by(control, (QueryTable) input.silent(), keySelectColumns) - .sort(keyNames); + return ByAggregationFactory.by(control, (QueryTable) input.silent(), keySelectColumns).sort(keyNames); } } private static EvalNugget incrementalByEvalNugget(@NotNull final AggregationControl control, - @NotNull final QueryTable input, @NotNull String... columns) { - final Supplier

    tableSupplier = - new IncrementalFirstStaticAfterByResultSupplier(control, input, columns); + @NotNull final QueryTable input, @NotNull String... columns) { + final Supplier
    tableSupplier = new IncrementalFirstStaticAfterByResultSupplier(control, input, columns); return new EvalNugget() { @Override protected final Table e() { @@ -249,8 +238,7 @@ protected final Table e() { }; } - private static EvalNugget incrementalByEvalNugget(@NotNull final QueryTable input, - @NotNull String... columns) { + private static EvalNugget incrementalByEvalNugget(@NotNull final QueryTable input, @NotNull String... columns) { return incrementalByEvalNugget(AggregationControl.DEFAULT, input, columns); } @@ -273,9 +261,9 @@ public void testIncrementalByDownstreamFromMerge() { // noinspection AutoBoxing QueryScope.addParam(tableIndexName, tableIndex); final QueryTable result = (QueryTable) parents[tableIndex].update( - "StrCol = Long.toString((long) (ii / 5))", - "IntCol = " + tableIndexName + " * 1_000_000 + i", - "TimeCol = ii % 100 == 0 ? null : plus(" + nowName + ", ii * 100)"); + "StrCol = Long.toString((long) (ii / 5))", + "IntCol = " + tableIndexName + " * 1_000_000 + i", + "TimeCol = ii % 100 == 0 ? null : plus(" + nowName + ", ii * 100)"); // Hide part of the table's index from downstream, initially. result.getIndex().removeRange(mergeChunkMultiple, 2 * mergeChunkMultiple - 1); return result; @@ -308,9 +296,8 @@ public boolean shouldProbeShift(long shiftSize, int numStates) { incrementalByEvalNugget(controlSize8, merged, "TimeCol"), incrementalByEvalNugget(controlShiftByProbing, merged, "TimeCol"), incrementalByEvalNugget(controlSize8, merged, - "TimeCol=isNull(TimeCol) ? NULL_LONG : TimeCol.getNanos()"), - incrementalByEvalNugget(merged, - "TimeCol=isNull(TimeCol) ? NULL_LONG : TimeCol.getNanos()"), + "TimeCol=isNull(TimeCol) ? NULL_LONG : TimeCol.getNanos()"), + incrementalByEvalNugget(merged, "TimeCol=isNull(TimeCol) ? NULL_LONG : TimeCol.getNanos()"), incrementalByEvalNugget(controlSize8, merged, "StrCol", "IntCol"), incrementalByEvalNugget(merged, "StrCol", "IntCol"), @@ -318,8 +305,7 @@ public boolean shouldProbeShift(long shiftSize, int numStates) { new EvalNugget() { @Override protected final Table e() { - return ByAggregationFactory.by(merged, "StrCol") - .update("IntColSum=sum(IntCol)"); + return ByAggregationFactory.by(merged, "StrCol").update("IntColSum=sum(IntCol)"); } } }; @@ -332,8 +318,7 @@ protected final Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { inputs[1].getIndex().removeRange(mergeChunkMultiple - 1_000, mergeChunkMultiple - 1); - inputs[1].notifyListeners(i(), ir(mergeChunkMultiple - 1_000, mergeChunkMultiple - 1), - i()); + inputs[1].notifyListeners(i(), ir(mergeChunkMultiple - 1_000, mergeChunkMultiple - 1), i()); }); TstUtils.validate(ens); @@ -358,8 +343,7 @@ protected final Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { inputs[4].modifiedColumnSet.clear(); inputs[4].modifiedColumnSet.setAll("StrCol"); - inputs[4].notifyListeners( - new ShiftAwareListener.Update(i(), i(), ir(0, mergeChunkMultiple / 2), + inputs[4].notifyListeners(new ShiftAwareListener.Update(i(), i(), ir(0, mergeChunkMultiple / 2), IndexShiftData.EMPTY, inputs[4].modifiedColumnSet)); }); TstUtils.validate(ens); @@ -367,8 +351,7 @@ protected final Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { inputs[4].modifiedColumnSet.clear(); inputs[4].modifiedColumnSet.setAll("IntCol"); - inputs[4].notifyListeners( - new ShiftAwareListener.Update(i(), i(), ir(0, mergeChunkMultiple / 2), + inputs[4].notifyListeners(new ShiftAwareListener.Update(i(), i(), ir(0, mergeChunkMultiple / 2), IndexShiftData.EMPTY, inputs[4].modifiedColumnSet)); }); TstUtils.validate(ens); @@ -376,8 +359,7 @@ protected final Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { inputs[4].modifiedColumnSet.clear(); inputs[4].modifiedColumnSet.setAll("TimeCol"); - inputs[4].notifyListeners( - new ShiftAwareListener.Update(i(), i(), ir(0, mergeChunkMultiple / 2), + inputs[4].notifyListeners(new ShiftAwareListener.Update(i(), i(), ir(0, mergeChunkMultiple / 2), IndexShiftData.EMPTY, inputs[4].modifiedColumnSet)); }); TstUtils.validate(ens); @@ -386,10 +368,10 @@ protected final Table e() { @Test public void testIncrementalNoKeyBy() { final QueryTable input1 = - (QueryTable) TableTools.emptyTable(100).update("StrCol=Long.toString(ii)", "IntCol=i"); + (QueryTable) TableTools.emptyTable(100).update("StrCol=Long.toString(ii)", "IntCol=i"); input1.setRefreshing(true); final QueryTable input2 = - (QueryTable) TableTools.emptyTable(100).update("StrCol=Long.toString(ii)", "IntCol=i"); + (QueryTable) TableTools.emptyTable(100).update("StrCol=Long.toString(ii)", "IntCol=i"); input2.getIndex().remove(input2.getIndex()); input2.setRefreshing(true); @@ -429,20 +411,20 @@ protected Table e() { TstUtils.validate(ens); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - input2.notifyListeners(new ShiftAwareListener.Update(i(0, 1), i(0, 1), i(), - IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); + input2.notifyListeners(new ShiftAwareListener.Update(i(0, 1), i(0, 1), i(), IndexShiftData.EMPTY, + ModifiedColumnSet.EMPTY)); }); TstUtils.validate(ens); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - input2.notifyListeners(new ShiftAwareListener.Update(i(), i(), i(2, 3), - IndexShiftData.EMPTY, ModifiedColumnSet.ALL)); + input2.notifyListeners( + new ShiftAwareListener.Update(i(), i(), i(2, 3), IndexShiftData.EMPTY, ModifiedColumnSet.ALL)); }); TstUtils.validate(ens); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - input2.notifyListeners(new ShiftAwareListener.Update(i(), i(), i(), - IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); + input2.notifyListeners( + new ShiftAwareListener.Update(i(), i(), i(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); }); TstUtils.validate(ens); } @@ -460,11 +442,9 @@ public void testStaticBy() { table.by("i"); TestCase.fail("Previous statement should have thrown an exception"); } catch (Exception e) { - TestCase.assertEquals("Failed to get expression for all matched patterns", - e.getMessage()); + TestCase.assertEquals("Failed to get expression for all matched patterns", e.getMessage()); TestCase.assertNotNull(e.getCause()); - TestCase.assertEquals("Invalid column name \"i\": \"i\" is a reserved keyword", - e.getCause().getMessage()); + TestCase.assertEquals("Invalid column name \"i\": \"i\" is a reserved keyword", e.getCause().getMessage()); } TestCase.assertEquals(0, table.by("j=i").size()); TestCase.assertEquals(1, table.by("j=i").getColumns().length); @@ -475,15 +455,14 @@ public void testStaticBy() { TestCase.assertEquals(1, table.by("j=i").getColumns().length); TestCase.assertEquals(int.class, table.by("j=i").getColumn("j").getType()); - table = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 2), - c("S", "c", "e", "g"), c("I", 2, 4, 6)); + table = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 2), c("S", "c", "e", "g"), + c("I", 2, 4, 6)); TestCase.assertEquals(3, table.by("S").size()); TestCase.assertEquals(2, table.by("S").getColumns().length); TestCase.assertEquals(String.class, table.by("S").getColumn("S").getType()); TestCase.assertEquals(DbIntArray.class, table.by("S").getColumn("I").getType()); - TestCase.assertEquals(Arrays.asList("c", "e", "g"), - Arrays.asList(table.by("S").getColumn("S").get(0, 3))); + TestCase.assertEquals(Arrays.asList("c", "e", "g"), Arrays.asList(table.by("S").getColumn("S").get(0, 3))); DbIntArray intGroups[] = (DbIntArray[]) table.by("S").getColumn("I").getDirect(); TestCase.assertEquals(3, intGroups.length); TestCase.assertEquals(1, intGroups[0].size()); @@ -494,15 +473,14 @@ public void testStaticBy() { TestCase.assertEquals(6, intGroups[2].get(0)); table = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 2), - c("S", "e", "c", "g"), - c("I", 4, 2, 6)); + c("S", "e", "c", "g"), + c("I", 4, 2, 6)); TestCase.assertEquals(3, table.by("S").size()); TestCase.assertEquals(2, table.by("S").getColumns().length); TestCase.assertEquals(String.class, table.by("S").getColumn("S").getType()); TestCase.assertEquals(DbIntArray.class, table.by("S").getColumn("I").getType()); - TestCase.assertEquals(Arrays.asList("e", "c", "g"), - Arrays.asList(table.by("S").getColumn("S").get(0, 3))); + TestCase.assertEquals(Arrays.asList("e", "c", "g"), Arrays.asList(table.by("S").getColumn("S").get(0, 3))); intGroups = (DbIntArray[]) table.by("S").getColumn("I").getDirect(); TestCase.assertEquals(3, intGroups.length); TestCase.assertEquals(1, intGroups[0].size()); @@ -513,9 +491,9 @@ public void testStaticBy() { TestCase.assertEquals(6, intGroups[2].get(0)); table = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 2), - c("S", "e", "c", "g"), - c("X", 4, 2, 6), - c("Y", 1, 2, 3)); + c("S", "e", "c", "g"), + c("X", 4, 2, 6), + c("Y", 1, 2, 3)); TestCase.assertEquals(3, table.by("Z=X+Y").size()); TestCase.assertEquals(4, table.by("Z=X+Y").getColumns().length); TestCase.assertEquals(DbArray.class, table.by("Z=X+Y").getColumn("S").getType()); @@ -530,13 +508,12 @@ public void testStaticBy() { TestCase.assertEquals("e", sValues[0].get(0)); TestCase.assertEquals("c", sValues[1].get(0)); TestCase.assertEquals("g", sValues[2].get(0)); - TestCase.assertEquals(Arrays.asList(5, 4, 9), - Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 3))); + TestCase.assertEquals(Arrays.asList(5, 4, 9), Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 3))); table = TstUtils.testRefreshingTable( - c("S", "e", "c", "g"), - c("X", 4, 2, 6), - c("Y", 4, 2, 2)); + c("S", "e", "c", "g"), + c("X", 4, 2, 6), + c("Y", 4, 2, 2)); TestCase.assertEquals(2, table.by("Z=X+Y").size()); TestCase.assertEquals(4, table.by("Z=X+Y").getColumns().length); TestCase.assertEquals(DbArray.class, table.by("Z=X+Y").getColumn("S").getType()); @@ -550,13 +527,12 @@ public void testStaticBy() { TestCase.assertEquals("e", sValues[0].get(0)); TestCase.assertEquals("c", sValues[1].get(0)); TestCase.assertEquals("g", sValues[0].get(1)); - TestCase.assertEquals(Arrays.asList(8, 4), - Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 2))); + TestCase.assertEquals(Arrays.asList(8, 4), Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 2))); table = TstUtils.testRefreshingTable( - c("S", "e", "c", "g"), - TstUtils.cG("X", 4, 2, 6), - c("Y", 4, 2, 2)); + c("S", "e", "c", "g"), + TstUtils.cG("X", 4, 2, 6), + c("Y", 4, 2, 2)); TestCase.assertEquals(2, table.by("Z=X+Y").size()); TestCase.assertEquals(4, table.by("Z=X+Y").getColumns().length); TestCase.assertEquals(DbArray.class, table.by("Z=X+Y").getColumn("S").getType()); @@ -570,13 +546,12 @@ public void testStaticBy() { TestCase.assertEquals("e", sValues[0].get(0)); TestCase.assertEquals("c", sValues[1].get(0)); TestCase.assertEquals("g", sValues[0].get(1)); - TestCase.assertEquals(Arrays.asList(8, 4), - Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 2))); + TestCase.assertEquals(Arrays.asList(8, 4), Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 2))); table = TstUtils.testRefreshingTable( - c("S", "e", "c", "g"), - c("X", 4, 2, 6), - TstUtils.cG("Y", 4, 2, 2)); + c("S", "e", "c", "g"), + c("X", 4, 2, 6), + TstUtils.cG("Y", 4, 2, 2)); TestCase.assertEquals(2, table.by("Z=X+Y").size()); TestCase.assertEquals(4, table.by("Z=X+Y").getColumns().length); TestCase.assertEquals(DbArray.class, table.by("Z=X+Y").getColumn("S").getType()); @@ -590,13 +565,12 @@ public void testStaticBy() { TestCase.assertEquals("e", sValues[0].get(0)); TestCase.assertEquals("c", sValues[1].get(0)); TestCase.assertEquals("g", sValues[0].get(1)); - TestCase.assertEquals(Arrays.asList(8, 4), - Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 2))); + TestCase.assertEquals(Arrays.asList(8, 4), Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 2))); table = TstUtils.testRefreshingTable( - c("S", "e", "c", "g"), - TstUtils.cG("X", 4, 2, 6), - TstUtils.cG("Y", 4, 3, 2)); + c("S", "e", "c", "g"), + TstUtils.cG("X", 4, 2, 6), + TstUtils.cG("Y", 4, 3, 2)); TestCase.assertEquals(2, table.by("Z=X+Y").size()); TestCase.assertEquals(4, table.by("Z=X+Y").getColumns().length); TestCase.assertEquals(DbArray.class, table.by("Z=X+Y").getColumn("S").getType()); @@ -610,19 +584,17 @@ public void testStaticBy() { TestCase.assertEquals("e", sValues[0].get(0)); TestCase.assertEquals("c", sValues[1].get(0)); TestCase.assertEquals("g", sValues[0].get(1)); - TestCase.assertEquals(Arrays.asList(8, 5), - Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 2))); + TestCase.assertEquals(Arrays.asList(8, 5), Arrays.asList(table.by("Z=X+Y").getColumn("Z").get(0, 2))); table = TstUtils.testRefreshingTable( - c("S", "c", null, "g"), - c("I", 2, 4, 6)); + c("S", "c", null, "g"), + c("I", 2, 4, 6)); TestCase.assertEquals(3, table.by("S").size()); TestCase.assertEquals(2, table.by("S").getColumns().length); TestCase.assertEquals(String.class, table.by("S").getColumn("S").getType()); TestCase.assertEquals(DbIntArray.class, table.by("S").getColumn("I").getType()); - TestCase.assertEquals(Arrays.asList("c", null, "g"), - Arrays.asList(table.by("S").getColumn("S").get(0, 3))); + TestCase.assertEquals(Arrays.asList("c", null, "g"), Arrays.asList(table.by("S").getColumn("S").get(0, 3))); intGroups = (DbIntArray[]) table.by("S").getColumn("I").getDirect(); TestCase.assertEquals(3, intGroups.length); TestCase.assertEquals(1, intGroups[0].size()); @@ -638,13 +610,13 @@ public void testStaticBy() { @Test public void testLastByIterative() { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bc", "aa", "aa"), - c("intCol", 10, 20, 30, 50), - c("doubleCol", 0.1, 0.2, 0.3, 0.5)); + c("Sym", "aa", "bc", "aa", "aa"), + c("intCol", 10, 20, 30, 50), + c("doubleCol", 0.1, 0.2, 0.3, 0.5)); final QueryTable queryTableGrouped = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bc", "aa", "aa"), - c("intCol", 10, 20, 30, 50), - c("doubleCol", 0.1, 0.2, 0.3, 0.5)); + c("Sym", "aa", "bc", "aa", "aa"), + c("intCol", 10, 20, 30, 50), + c("doubleCol", 0.1, 0.2, 0.3, 0.5)); final Table table = queryTable.select(); final Table tableGrouped = queryTableGrouped.select(); final EvalNugget[] en = new EvalNugget[] { @@ -699,8 +671,7 @@ public Table e() { } }}; LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(queryTable, i(7, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), - c("doubleCol", 2.1, 2.2)); + addToTable(queryTable, i(7, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), c("doubleCol", 2.1, 2.2)); queryTable.notifyListeners(i(7, 9), i(), i()); }); @@ -715,14 +686,13 @@ public void testFirstByLastByIncremental() { final TstUtils.ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "Sym2", "IntSet", "boolCol", "intCol", "doubleCol"}, - new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), - new SetGenerator<>("ee", "ff", "gg", "hh", "ii"), - new SetGenerator<>(1, 2), - new BooleanGenerator(), - new IntGenerator(0, 100), - new DoubleGenerator(0, 100))); + columnInfo = initColumnInfos(new String[] {"Sym", "Sym2", "IntSet", "boolCol", "intCol", "doubleCol"}, + new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), + new SetGenerator<>("ee", "ff", "gg", "hh", "ii"), + new SetGenerator<>(1, 2), + new BooleanGenerator(), + new IntGenerator(0, 100), + new DoubleGenerator(0, 100))); final EvalNuggetInterface en[] = new EvalNuggetInterface[] { EvalNugget.from(table::lastBy), @@ -732,18 +702,18 @@ public void testFirstByLastByIncremental() { EvalNugget.from(() -> table.sort("Sym", "intCol").lastBy("Sym").sort("Sym")), new UpdateValidatorNugget(table.sort("Sym", "intCol").firstBy("Sym")), new UpdateValidatorNugget(table.sort("Sym", "intCol").lastBy("Sym")), - EvalNugget.from(() -> table.sort("Sym", "intCol") - .by(new TrackingLastByStateFactoryImpl(), "Sym").sort("Sym")), - EvalNugget.from(() -> table.sort("Sym", "intCol") - .by(new TrackingFirstByStateFactoryImpl(), "Sym").sort("Sym")), + EvalNugget.from( + () -> table.sort("Sym", "intCol").by(new TrackingLastByStateFactoryImpl(), "Sym").sort("Sym")), + EvalNugget.from( + () -> table.sort("Sym", "intCol").by(new TrackingFirstByStateFactoryImpl(), "Sym").sort("Sym")), new io.deephaven.db.v2.QueryTableTestBase.TableComparator(table.lastBy("Sym"), - table.by(new TrackingLastByStateFactoryImpl(), "Sym")), + table.by(new TrackingLastByStateFactoryImpl(), "Sym")), new io.deephaven.db.v2.QueryTableTestBase.TableComparator(table.firstBy("Sym"), - table.by(new TrackingFirstByStateFactoryImpl(), "Sym")), + table.by(new TrackingFirstByStateFactoryImpl(), "Sym")), EvalNugget.from(() -> table.firstBy("boolCol").sort("boolCol")), EvalNugget.from(() -> table.firstBy("boolCol", "Sym").sort("boolCol", "Sym")), - EvalNugget.from(() -> table.firstBy("Sym", "Sym2", "IntSet", "boolCol").sort("Sym", - "Sym2", "IntSet", "boolCol")), + EvalNugget.from(() -> table.firstBy("Sym", "Sym2", "IntSet", "boolCol").sort("Sym", "Sym2", "IntSet", + "boolCol")), }; @@ -765,14 +735,13 @@ private void testFirstOrLastByStatic(int seed, int size) { final Random random = new Random(seed); final QueryTable table = getTable(false, size, random, - initColumnInfos( - new String[] {"Sym", "Sym2", "IntSet", "boolCol", "intCol", "doubleCol"}, - new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), - new SetGenerator<>("ee", "ff", "gg", "hh", "ii"), - new SetGenerator<>(1, 2), - new BooleanGenerator(), - new IntGenerator(0, 100), - new DoubleGenerator(0, 100))); + initColumnInfos(new String[] {"Sym", "Sym2", "IntSet", "boolCol", "intCol", "doubleCol"}, + new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), + new SetGenerator<>("ee", "ff", "gg", "hh", "ii"), + new SetGenerator<>(1, 2), + new BooleanGenerator(), + new IntGenerator(0, 100), + new DoubleGenerator(0, 100))); TableTools.showWithIndex(table); final Set firstSet = new HashSet<>(); @@ -784,29 +753,27 @@ private void testFirstOrLastByStatic(int seed, int size) { assertTableEquals(table.tail(1), table.lastBy()); assertTableEquals(table.head(1), table.firstBy()); - final Table expected = - table.update("First=firstSet.add(Sym)").where("First").dropColumns("First"); + final Table expected = table.update("First=firstSet.add(Sym)").where("First").dropColumns("First"); final Table firstBy = table.firstBy("Sym"); assertTableEquals(expected, firstBy); firstSet.clear(); final Table lastBy = table.lastBy("Sym").sort("Sym"); - final Table expectedLast = table.reverse().update("First=firstSet.add(Sym)").where("First") - .dropColumns("First").sort("Sym"); + final Table expectedLast = + table.reverse().update("First=firstSet.add(Sym)").where("First").dropColumns("First").sort("Sym"); assertTableEquals(expectedLast, lastBy); - final Table expectedFirstComposite = table - .update("First=skSet.add(new io.deephaven.datastructures.util.SmartKey(Sym, intCol))") - .where("First").dropColumns("First").moveUpColumns("Sym", "intCol"); + final Table expectedFirstComposite = + table.update("First=skSet.add(new io.deephaven.datastructures.util.SmartKey(Sym, intCol))") + .where("First").dropColumns("First").moveUpColumns("Sym", "intCol"); final Table firstByComposite = table.firstBy("Sym", "intCol"); assertTableEquals(expectedFirstComposite, firstByComposite); skSet.clear(); final Table lastByComposite = table.lastBy("Sym", "intCol").sort("Sym", "intCol"); - final Table expectedLastComposite = table.reverse() - .update("First=skSet.add(new io.deephaven.datastructures.util.SmartKey(Sym, intCol))") - .where("First").dropColumns("First").sort("Sym", "intCol") - .moveUpColumns("Sym", "intCol"); + final Table expectedLastComposite = + table.reverse().update("First=skSet.add(new io.deephaven.datastructures.util.SmartKey(Sym, intCol))") + .where("First").dropColumns("First").sort("Sym", "intCol").moveUpColumns("Sym", "intCol"); assertTableEquals(expectedLastComposite, lastByComposite); } @@ -815,13 +782,11 @@ private void powerSet(T[] elements, Consumer consumer) { powerSetInternal(0, included, elements, consumer); } - private void powerSetInternal(int depth, boolean[] included, T[] elements, - Consumer consumer) { + private void powerSetInternal(int depth, boolean[] included, T[] elements, Consumer consumer) { if (depth == included.length) { // noinspection unchecked - consumer.accept(IntStream.range(0, included.length).filter(i -> included[i]) - .mapToObj(i -> elements[i]) - .toArray(n -> (T[]) Array.newInstance(elements.getClass().getComponentType(), n))); + consumer.accept(IntStream.range(0, included.length).filter(i -> included[i]).mapToObj(i -> elements[i]) + .toArray(n -> (T[]) Array.newInstance(elements.getClass().getComponentType(), n))); return; } included[depth] = false; @@ -836,25 +801,25 @@ public void testKeyColumnTypes() { final int size = 10; - final QueryTable table = getTable(size, random, initColumnInfos( - new String[] {"Sym", "Date", "intCol", "doubleCol", "BooleanCol", "ByteCol", "CharCol", - "ShortCol", "FloatCol", "LongCol", "BigDecimalCol"}, - new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), - new UnsortedDateTimeLongGenerator(DBTimeUtils.convertDateTime("2018-10-15T09:30:00 NY"), - DBTimeUtils.convertDateTime("2018-10-15T16:00:00 NY")), - new IntGenerator(0, 100), - new DoubleGenerator(0, 100), - new BooleanGenerator(), - new ByteGenerator((byte) 65, (byte) 95), - new CharGenerator('a', 'z'), - new ShortGenerator(), - new FloatGenerator(), - new LongGenerator(), - new BigDecimalGenerator())); - - - final String[] columns = - table.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final QueryTable table = getTable(size, random, + initColumnInfos( + new String[] {"Sym", "Date", "intCol", "doubleCol", "BooleanCol", "ByteCol", "CharCol", + "ShortCol", "FloatCol", "LongCol", "BigDecimalCol"}, + new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), + new UnsortedDateTimeLongGenerator(DBTimeUtils.convertDateTime("2018-10-15T09:30:00 NY"), + DBTimeUtils.convertDateTime("2018-10-15T16:00:00 NY")), + new IntGenerator(0, 100), + new DoubleGenerator(0, 100), + new BooleanGenerator(), + new ByteGenerator((byte) 65, (byte) 95), + new CharGenerator('a', 'z'), + new ShortGenerator(), + new FloatGenerator(), + new LongGenerator(), + new BigDecimalGenerator())); + + + final String[] columns = table.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); table.lastBy("Date", "Sym"); @@ -866,9 +831,9 @@ public void testKeyColumnTypes() { @Test public void testLastBySumByIterative() { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bc", "ab", "bc"), - c("USym", "a", "b", "a", "b"), - c("intCol", 10, 20, 40, 60)); + c("Sym", "aa", "bc", "ab", "bc"), + c("USym", "a", "b", "a", "b"), + c("intCol", 10, 20, 40, 60)); final EvalNugget en[] = new EvalNugget[] { new EvalNugget() { public Table e() { @@ -889,9 +854,9 @@ public Table e() { TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), - c("Sym", "aa", "bc"), - c("USym", "a", "b"), - c("intCol", 70, 90)); + c("Sym", "aa", "bc"), + c("USym", "a", "b"), + c("intCol", 70, 90)); queryTable.notifyListeners(i(7, 9), i(), i()); }); @@ -903,8 +868,8 @@ public Table e() { @Test public void testAddOnlyLastAttribute() { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("USym", "a", "b", "a", "b"), - c("intCol", 10, 20, 40, 60)); + c("USym", "a", "b", "a", "b"), + c("intCol", 10, 20, 40, 60)); queryTable.setAttribute(Table.ADD_ONLY_TABLE_ATTRIBUTE, true); @@ -916,8 +881,8 @@ public void testAddOnlyLastAttribute() { LiveTableMonitor.DEFAULT.startCycleForUnitTests(); addToTable(queryTable, i(7, 9), - c("USym", "a", "b"), - c("intCol", 70, 90)); + c("USym", "a", "b"), + c("intCol", 70, 90)); queryTable.notifyListeners(i(7, 9), i(), i()); LiveTableMonitor.DEFAULT.completeCycleForUnitTests(); @@ -932,9 +897,9 @@ public void testAddOnlyLastAttribute() { @Test public void testIncrementalBy() { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bc", "aa", "aa"), - c("intCol", 10, 20, 30, 50), - c("doubleCol", 0.1, 0.2, 0.3, 0.5)); + c("Sym", "aa", "bc", "aa", "aa"), + c("intCol", 10, 20, 30, 50), + c("doubleCol", 0.1, 0.2, 0.3, 0.5)); final Table table = queryTable.select(); final EvalNugget en[] = new EvalNugget[] { new EvalNugget() { @@ -1019,44 +984,40 @@ public Table e() { } }; LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(queryTable, i(7, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), - c("doubleCol", 2.1, 2.2)); + addToTable(queryTable, i(7, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), c("doubleCol", 2.1, 2.2)); queryTable.notifyListeners(i(7, 9), i(), i()); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(queryTable, i(7, 9), c("Sym", "bc", "bc"), c("intCol", 21, 11), - c("doubleCol", 2.2, 2.3)); + addToTable(queryTable, i(7, 9), c("Sym", "bc", "bc"), c("intCol", 21, 11), c("doubleCol", 2.2, 2.3)); queryTable.notifyListeners(i(), i(), i(7, 9)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(queryTable, i(7, 9), c("Sym", "aa", "bc"), c("intCol", 20, 15), - c("doubleCol", 2.1, 2.3)); + addToTable(queryTable, i(7, 9), c("Sym", "aa", "bc"), c("intCol", 20, 15), c("doubleCol", 2.1, 2.3)); queryTable.notifyListeners(i(), i(), i(7, 9)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), c("Sym", "aa", "bc"), c("intCol", 20, 15), - c("doubleCol", Double.NEGATIVE_INFINITY, - Double.POSITIVE_INFINITY)); + c("doubleCol", Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY)); queryTable.notifyListeners(i(), i(), i(7, 9)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(7, 9), c("Sym", "aa", "bc"), c("intCol", 20, 15), - c("doubleCol", Double.POSITIVE_INFINITY, Double.NaN)); + c("doubleCol", Double.POSITIVE_INFINITY, Double.NaN)); queryTable.notifyListeners(i(), i(), i(7, 9)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(queryTable, i(7, 9), c("Sym", "aa", "bc"), c("intCol", 20, 15), - c("doubleCol", 1.2, 2.2)); + addToTable(queryTable, i(7, 9), c("Sym", "aa", "bc"), c("intCol", 20, 15), c("doubleCol", 1.2, 2.2)); queryTable.notifyListeners(i(), i(), i(7, 9)); }); TstUtils.validate(en); @@ -1076,10 +1037,10 @@ private static void incrementalByTestSuite2() { final ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), - new IntGenerator(0, 100), - new DoubleGenerator(0, 100))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), + new IntGenerator(0, 100), + new DoubleGenerator(0, 100))); final EvalNuggetInterface en[] = new EvalNuggetInterface[] { new EvalNugget() { @@ -1129,19 +1090,15 @@ public Table e() { }, new EvalNugget() { public Table e() { - return table.by("Sym").update("intColSum=sum(intCol)").ungroup() - .sort("Sym"); + return table.by("Sym").update("intColSum=sum(intCol)").ungroup().sort("Sym"); } }, new UpdateValidatorNugget(table.by().update("intColSum=sum(intCol)")), new UpdateValidatorNugget(table.by().update("intColSum=sum(intCol)").select()), new UpdateValidatorNugget(table.by().update("intColSum=sum(intCol)").ungroup()), - new UpdateValidatorNugget( - table.by("Sym").update("intColSum=sum(intCol)").ungroup()), - new UpdateValidatorNugget( - table.by("Sym").update("intColSum=cumsum(intCol)").ungroup()), - new UpdateValidatorNugget( - table.by("Sym").update("doubleColSum=cumsum(doubleCol)").ungroup()), + new UpdateValidatorNugget(table.by("Sym").update("intColSum=sum(intCol)").ungroup()), + new UpdateValidatorNugget(table.by("Sym").update("intColSum=cumsum(intCol)").ungroup()), + new UpdateValidatorNugget(table.by("Sym").update("doubleColSum=cumsum(doubleCol)").ungroup()), }; for (int step = 0; step < 100; ++step) { @@ -1165,21 +1122,18 @@ public Table e() { @Test public void testApplyToAllBy() { final Table table = TstUtils.testRefreshingTable(i(1, 5, 7, 8), - c("Sym", "aa", "bc", "aa", "aa"), - c("intCol", 10, 20, 30, 50), - c("doubleCol", 0.1, 0.2, 0.3, 0.5)); + c("Sym", "aa", "bc", "aa", "aa"), + c("intCol", 10, 20, 30, 50), + c("doubleCol", 0.1, 0.2, 0.3, 0.5)); Table result = table.avgBy("Sym"); TestCase.assertEquals(3, result.getColumns().length); TestCase.assertEquals(result.getColumns()[0].getName(), "Sym"); TestCase.assertEquals(result.getColumns()[1].getName(), "intCol"); TestCase.assertEquals(result.getColumns()[2].getName(), "doubleCol"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList("aa", "bc"), - Arrays.asList(result.getColumn("Sym").get(0, 2))); - TestCase.assertEquals(Arrays.asList(30.0, 20.0), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(0.3, .2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList("aa", "bc"), Arrays.asList(result.getColumn("Sym").get(0, 2))); + TestCase.assertEquals(Arrays.asList(30.0, 20.0), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(0.3, .2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); result = table.sumBy("Sym"); TestCase.assertEquals(3, result.getColumns().length); @@ -1187,12 +1141,9 @@ public void testApplyToAllBy() { TestCase.assertEquals(result.getColumns()[1].getName(), "intCol"); TestCase.assertEquals(result.getColumns()[2].getName(), "doubleCol"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList("aa", "bc"), - Arrays.asList(result.getColumn("Sym").get(0, 2))); - TestCase.assertEquals(Arrays.asList(90L, 20L), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(0.9, 0.2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList("aa", "bc"), Arrays.asList(result.getColumn("Sym").get(0, 2))); + TestCase.assertEquals(Arrays.asList(90L, 20L), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(0.9, 0.2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); result = table.stdBy("Sym"); TestCase.assertEquals(3, result.getColumns().length); @@ -1200,112 +1151,82 @@ public void testApplyToAllBy() { TestCase.assertEquals(result.getColumns()[1].getName(), "intCol"); TestCase.assertEquals(result.getColumns()[2].getName(), "doubleCol"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList("aa", "bc"), - Arrays.asList(result.getColumn("Sym").get(0, 2))); - TestCase.assertEquals(Arrays.asList(20.0, Double.NaN), - Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList("aa", "bc"), Arrays.asList(result.getColumn("Sym").get(0, 2))); + TestCase.assertEquals(Arrays.asList(20.0, Double.NaN), Arrays.asList(result.getColumn("intCol").get(0, 2))); TestCase.assertEquals(Arrays.asList(0.19999999999999996, Double.NaN), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + Arrays.asList(result.getColumn("doubleCol").get(0, 2))); result = table.minBy("Sym"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList(10, 20), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(0.1, .2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(10, 20), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(0.1, .2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); result = table.maxBy("Sym"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList(50, 20), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(0.5, .2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(50, 20), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(0.5, .2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); result = table.varBy("Sym"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList(400.0, Double.NaN), - Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(400.0, Double.NaN), Arrays.asList(result.getColumn("intCol").get(0, 2))); TestCase.assertEquals(Arrays.asList(0.03999999999999998, Double.NaN), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + Arrays.asList(result.getColumn("doubleCol").get(0, 2))); result = table.lastBy("Sym"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList(50, 20), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(.5, .2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList("aa", "bc"), - Arrays.asList(result.getColumn("Sym").get(0, 2))); + TestCase.assertEquals(Arrays.asList(50, 20), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(.5, .2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList("aa", "bc"), Arrays.asList(result.getColumn("Sym").get(0, 2))); result = table.lastBy("Sym", "Sym1=Sym"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList(50, 20), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(.5, .2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList("aa", "bc"), - Arrays.asList(result.getColumn("Sym").get(0, 2))); + TestCase.assertEquals(Arrays.asList(50, 20), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(.5, .2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList("aa", "bc"), Arrays.asList(result.getColumn("Sym").get(0, 2))); result = table.lastBy("intCol", "Sym1=Sym"); TestCase.assertEquals(result.size(), 4); - TestCase.assertEquals(Arrays.asList(10, 20, 30, 50), - Arrays.asList(result.getColumn("intCol").get(0, 4))); + TestCase.assertEquals(Arrays.asList(10, 20, 30, 50), Arrays.asList(result.getColumn("intCol").get(0, 4))); TestCase.assertEquals(Arrays.asList(0.1, 0.2, 0.3, 0.5), - Arrays.asList(result.getColumn("doubleCol").get(0, 4))); - TestCase.assertEquals(Arrays.asList("aa", "bc", "aa", "aa"), - Arrays.asList(result.getColumn("Sym").get(0, 4))); - TestCase.assertEquals(Arrays.asList("aa", "bc", "aa", "aa"), - Arrays.asList(result.getColumn("Sym1").get(0, 4))); + Arrays.asList(result.getColumn("doubleCol").get(0, 4))); + TestCase.assertEquals(Arrays.asList("aa", "bc", "aa", "aa"), Arrays.asList(result.getColumn("Sym").get(0, 4))); + TestCase.assertEquals(Arrays.asList("aa", "bc", "aa", "aa"), Arrays.asList(result.getColumn("Sym1").get(0, 4))); result = table.firstBy("Sym"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList(10, 20), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(0.1, .2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(10, 20), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(0.1, .2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); result = table.firstBy("Sym", "Sym1=Sym"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList(10, 20), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(0.1, .2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList("aa", "bc"), - Arrays.asList(result.getColumn("Sym").get(0, 2))); + TestCase.assertEquals(Arrays.asList(10, 20), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(0.1, .2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList("aa", "bc"), Arrays.asList(result.getColumn("Sym").get(0, 2))); result = table.firstBy("intCol", "Sym1=Sym"); TestCase.assertEquals(result.size(), 4); - TestCase.assertEquals(Arrays.asList(10, 20, 30, 50), - Arrays.asList(result.getColumn("intCol").get(0, 4))); + TestCase.assertEquals(Arrays.asList(10, 20, 30, 50), Arrays.asList(result.getColumn("intCol").get(0, 4))); TestCase.assertEquals(Arrays.asList(0.1, 0.2, 0.3, 0.5), - Arrays.asList(result.getColumn("doubleCol").get(0, 4))); - TestCase.assertEquals(Arrays.asList("aa", "bc", "aa", "aa"), - Arrays.asList(result.getColumn("Sym").get(0, 4))); - TestCase.assertEquals(Arrays.asList("aa", "bc", "aa", "aa"), - Arrays.asList(result.getColumn("Sym1").get(0, 4))); + Arrays.asList(result.getColumn("doubleCol").get(0, 4))); + TestCase.assertEquals(Arrays.asList("aa", "bc", "aa", "aa"), Arrays.asList(result.getColumn("Sym").get(0, 4))); + TestCase.assertEquals(Arrays.asList("aa", "bc", "aa", "aa"), Arrays.asList(result.getColumn("Sym1").get(0, 4))); result = table.view("intCol").avgBy(); TestCase.assertEquals(result.size(), 1); TestCase.assertEquals(1, result.getColumns().length); TestCase.assertEquals(result.getColumns()[0].getName(), "intCol"); - TestCase.assertEquals(Collections.singletonList(27.5), - Arrays.asList(result.getColumn("intCol").get(0, 1))); + TestCase.assertEquals(Collections.singletonList(27.5), Arrays.asList(result.getColumn("intCol").get(0, 1))); result = table.lastBy("Sym"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList(50, 20), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(.5, .2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList("aa", "bc"), - Arrays.asList(result.getColumn("Sym").get(0, 2))); + TestCase.assertEquals(Arrays.asList(50, 20), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(.5, .2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList("aa", "bc"), Arrays.asList(result.getColumn("Sym").get(0, 2))); result = table.firstBy("Sym"); TestCase.assertEquals(result.size(), 2); - TestCase.assertEquals(Arrays.asList(10, 20), - Arrays.asList(result.getColumn("intCol").get(0, 2))); - TestCase.assertEquals(Arrays.asList(0.1, .2), - Arrays.asList(result.getColumn("doubleCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(10, 20), Arrays.asList(result.getColumn("intCol").get(0, 2))); + TestCase.assertEquals(Arrays.asList(0.1, .2), Arrays.asList(result.getColumn("doubleCol").get(0, 2))); } @@ -1323,70 +1244,64 @@ public void testSumByStatic() { private void testSumByStatic(int size, boolean lotsOfStrings, boolean grouped) { final Random random = new Random(0); final List ea = Collections.emptyList(); - final QueryTable queryTable = getTable(false, size, random, initColumnInfos( - new String[] {"Sym", - "charCol", "byteCol", - "shortCol", "intCol", "longCol", - "doubleCol", - "doubleNanCol", - "boolCol", - "bigI", - "bigD" - }, - Arrays.asList( - grouped ? Collections.singletonList(ColumnInfo.ColAttributes.Grouped) : ea, ea, ea, - ea, ea, ea, ea, ea, ea, ea, ea), - lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), - new CharGenerator('a', 'z'), - new ByteGenerator(), - new ShortGenerator((short) -20000, (short) 20000, 0.1), - new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), - new LongGenerator(-100_000_000, 100_000_000), - new SetGenerator<>(10.1, 20.1, 30.1, -40.1), - new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), - new BooleanGenerator(0.5, 0.1), - new BigIntegerGenerator(0.1), - new BigDecimalGenerator(0.1))); + final QueryTable queryTable = getTable(false, size, random, initColumnInfos(new String[] {"Sym", + "charCol", "byteCol", + "shortCol", "intCol", "longCol", + "doubleCol", + "doubleNanCol", + "boolCol", + "bigI", + "bigD" + }, + Arrays.asList(grouped ? Collections.singletonList(ColumnInfo.ColAttributes.Grouped) : ea, ea, ea, ea, + ea, ea, ea, ea, ea, ea, ea), + lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), + new CharGenerator('a', 'z'), + new ByteGenerator(), + new ShortGenerator((short) -20000, (short) 20000, 0.1), + new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), + new LongGenerator(-100_000_000, 100_000_000), + new SetGenerator<>(10.1, 20.1, 30.1, -40.1), + new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), + new BooleanGenerator(0.5, 0.1), + new BigIntegerGenerator(0.1), + new BigDecimalGenerator(0.1))); if (LiveTableTestCase.printTableUpdates) { TableTools.showWithIndex(queryTable); } final Table result = queryTable.dropColumns("Sym").sumBy(); - final List updates = queryTable.getDefinition().getColumnNames().stream() - .filter(c -> !c.equals("Sym")).map(c -> c + "=" - + QueryTableAggregationTestFormulaStaticMethods.sumFunction(c) + "(" + c + ")") - .collect(Collectors.toList()); - final Table updateResult = - queryTable.dropColumns("Sym").by().update(Selectable.from(updates)); + final List updates = queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) + .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.sumFunction(c) + "(" + c + ")") + .collect(Collectors.toList()); + final Table updateResult = queryTable.dropColumns("Sym").by().update(Selectable.from(updates)); assertTableEquals(updateResult, result, TableDiff.DiffItems.DoublesExact); final Table resultKeyed = queryTable.sumBy("Sym"); final List updateKeyed = queryTable.getDefinition().getColumnNames().stream() - .filter(c -> !c.equals("Sym")).map(c -> c + "=" - + QueryTableAggregationTestFormulaStaticMethods.sumFunction(c) + "(" + c + ")") - .collect(Collectors.toList()); + .filter(c -> !c.equals("Sym")) + .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.sumFunction(c) + "(" + c + ")") + .collect(Collectors.toList()); final Table updateKeyedResult = queryTable.by("Sym").update(Selectable.from(updateKeyed)); assertTableEquals(updateKeyedResult, resultKeyed, TableDiff.DiffItems.DoublesExact); final Table resultAbs = queryTable.dropColumns("Sym").absSumBy(); - final List updatesAbs = queryTable.getDefinition().getColumnNames().stream() - .filter(c -> !c.equals("Sym")) - .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.absSumFunction(c, c)) - .collect(Collectors.toList()); - final Table updateResultAbs = - queryTable.dropColumns("Sym").by().update(Selectable.from(updatesAbs)); + final List updatesAbs = + queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) + .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.absSumFunction(c, c)) + .collect(Collectors.toList()); + final Table updateResultAbs = queryTable.dropColumns("Sym").by().update(Selectable.from(updatesAbs)); TableTools.show(resultAbs); TableTools.show(updateResultAbs); assertTableEquals(updateResultAbs, resultAbs, TableDiff.DiffItems.DoublesExact); final Table resultKeyedAbs = queryTable.absSumBy("Sym"); - final List updateKeyedAbs = queryTable.getDefinition().getColumnNames().stream() - .filter(c -> !c.equals("Sym")) - .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.absSumFunction(c, c)) - .collect(Collectors.toList()); - final Table updateKeyedResultAbs = - queryTable.by("Sym").update(Selectable.from(updateKeyedAbs)); + final List updateKeyedAbs = + queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) + .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.absSumFunction(c, c)) + .collect(Collectors.toList()); + final Table updateKeyedResultAbs = queryTable.by("Sym").update(Selectable.from(updateKeyedAbs)); assertTableEquals(updateKeyedResultAbs, resultKeyedAbs, TableDiff.DiffItems.DoublesExact); } @@ -1401,32 +1316,31 @@ public void testMinMaxByStatic() { private void testMinMaxByStatic(int size, boolean lotsOfStrings) { final Random random = new Random(0); - final QueryTable queryTable = getTable(false, size, random, initColumnInfos( - new String[] {"Sym", - "charCol", "byteCol", - "shortCol", "intCol", "longCol", - "doubleCol", - "doubleNanCol", - "boolCol", - "bigI", - "bigD", - "dt", - "boolCol" - }, - lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), - new CharGenerator('a', 'z'), - new ByteGenerator(), - new ShortGenerator((short) -20000, (short) 20000, 0.1), - new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), - new LongGenerator(-100_000_000, 100_000_000), - new SetGenerator<>(10.1, 20.1, 30.1, -40.1), - new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), - new BooleanGenerator(0.5, 0.1), - new BigIntegerGenerator(0.1), - new BigDecimalGenerator(0.1), - new UnsortedDateTimeGenerator(DBTimeUtils.convertDateTime("2019-12-17T00:00:00 NY"), - DBTimeUtils.convertDateTime("2019-12-17T23:59:59 NY"), 0.1), - new BooleanGenerator(0.4, 0.1))); + final QueryTable queryTable = getTable(false, size, random, initColumnInfos(new String[] {"Sym", + "charCol", "byteCol", + "shortCol", "intCol", "longCol", + "doubleCol", + "doubleNanCol", + "boolCol", + "bigI", + "bigD", + "dt", + "boolCol" + }, + lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), + new CharGenerator('a', 'z'), + new ByteGenerator(), + new ShortGenerator((short) -20000, (short) 20000, 0.1), + new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), + new LongGenerator(-100_000_000, 100_000_000), + new SetGenerator<>(10.1, 20.1, 30.1, -40.1), + new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), + new BooleanGenerator(0.5, 0.1), + new BigIntegerGenerator(0.1), + new BigDecimalGenerator(0.1), + new UnsortedDateTimeGenerator(DBTimeUtils.convertDateTime("2019-12-17T00:00:00 NY"), + DBTimeUtils.convertDateTime("2019-12-17T23:59:59 NY"), 0.1), + new BooleanGenerator(0.4, 0.1))); if (LiveTableTestCase.printTableUpdates) { TableTools.showWithIndex(queryTable); @@ -1434,23 +1348,23 @@ private void testMinMaxByStatic(int size, boolean lotsOfStrings) { final Table result = queryTable.minBy(); final List updates = queryTable.getDefinition().getColumnNames().stream() - .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.minFunction(c)) - .collect(Collectors.toList()); + .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.minFunction(c)) + .collect(Collectors.toList()); final Table updateResult = queryTable.by().update(Selectable.from(updates)); assertTableEquals(updateResult, result); final Table resultKeyed = queryTable.minBy("Sym"); final List updateKeyed = - queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) - .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.minFunction(c)) - .collect(Collectors.toList()); + queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) + .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.minFunction(c)) + .collect(Collectors.toList()); final Table updateKeyedResult = queryTable.by("Sym").update(Selectable.from(updateKeyed)); assertTableEquals(updateKeyedResult, resultKeyed); final Table resultMax = queryTable.maxBy(); final List updatesMax = queryTable.getDefinition().getColumnNames().stream() - .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.maxFunction(c)) - .collect(Collectors.toList()); + .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.maxFunction(c)) + .collect(Collectors.toList()); final Table updateResultMax = queryTable.by().update(Selectable.from(updatesMax)); TableTools.show(resultMax); TableTools.show(updateResultMax); @@ -1458,11 +1372,10 @@ private void testMinMaxByStatic(int size, boolean lotsOfStrings) { final Table resultKeyedMax = queryTable.maxBy("Sym"); final List updateKeyedMax = - queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) - .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.maxFunction(c)) - .collect(Collectors.toList()); - final Table updateKeyedResultMax = - queryTable.by("Sym").update(Selectable.from(updateKeyedMax)); + queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) + .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.maxFunction(c)) + .collect(Collectors.toList()); + final Table updateKeyedResultMax = queryTable.by("Sym").update(Selectable.from(updateKeyedMax)); assertTableEquals(updateKeyedResultMax, resultKeyedMax); } @@ -1477,50 +1390,46 @@ public void testAvgByStatic() { private void testAvgByStatic(int size, boolean lotsOfStrings) { final Random random = new Random(0); - final QueryTable queryTable = getTable(false, size, random, initColumnInfos( - new String[] {"Sym", - "charCol", "byteCol", - "shortCol", "intCol", "longCol", - "doubleCol", - "doubleNanCol", - "bigI", - "bigD" - }, - lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), - new CharGenerator('a', 'z'), - new ByteGenerator(), - new ShortGenerator((short) -20000, (short) 20000, 0.1), - new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), - new LongGenerator(-100_000_000, 100_000_000), - new SetGenerator<>(10.1, 20.1, 30.1, -40.1), - new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), - new BigIntegerGenerator(0.1), - new BigDecimalGenerator(0.1))); + final QueryTable queryTable = getTable(false, size, random, initColumnInfos(new String[] {"Sym", + "charCol", "byteCol", + "shortCol", "intCol", "longCol", + "doubleCol", + "doubleNanCol", + "bigI", + "bigD" + }, + lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), + new CharGenerator('a', 'z'), + new ByteGenerator(), + new ShortGenerator((short) -20000, (short) 20000, 0.1), + new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), + new LongGenerator(-100_000_000, 100_000_000), + new SetGenerator<>(10.1, 20.1, 30.1, -40.1), + new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), + new BigIntegerGenerator(0.1), + new BigDecimalGenerator(0.1))); if (LiveTableTestCase.printTableUpdates) { TableTools.showWithIndex(queryTable); } final Table result = queryTable.dropColumns("Sym").avgBy(); - final List updates = - queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) + final List updates = queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) .flatMap(c -> Stream.of( - c + "_Sum=" + QueryTableAggregationTestFormulaStaticMethods.sumFunction(c) + "(" - + c + ")", - c + "_Count=" + QueryTableAggregationTestFormulaStaticMethods.countFunction(c) - + "(" + c + ")", - avgExpr(c))) + c + "_Sum=" + QueryTableAggregationTestFormulaStaticMethods.sumFunction(c) + "(" + c + ")", + c + "_Count=" + QueryTableAggregationTestFormulaStaticMethods.countFunction(c) + "(" + c + ")", + avgExpr(c))) .collect(Collectors.toList()); final List sumsAndCounts = - queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) - .flatMap(c -> Stream.of(c + "_Sum", c + "_Count")).collect(Collectors.toList()); - final Table updateResult = queryTable.dropColumns("Sym").by() - .update(Selectable.from(updates)).dropColumns(sumsAndCounts); + queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) + .flatMap(c -> Stream.of(c + "_Sum", c + "_Count")).collect(Collectors.toList()); + final Table updateResult = + queryTable.dropColumns("Sym").by().update(Selectable.from(updates)).dropColumns(sumsAndCounts); assertTableEquals(updateResult, result, TableDiff.DiffItems.DoublesExact); final Table resultKeyed = queryTable.avgBy("Sym"); final Table updateKeyedResult = - queryTable.by("Sym").update(Selectable.from(updates)).dropColumns(sumsAndCounts); + queryTable.by("Sym").update(Selectable.from(updates)).dropColumns(sumsAndCounts); assertTableEquals(updateKeyedResult, resultKeyed, TableDiff.DiffItems.DoublesExact); } @@ -1535,47 +1444,43 @@ public void testVarByStatic() { private void testVarByStatic(int size, boolean lotsOfStrings) { final Random random = new Random(0); - final QueryTable queryTable = getTable(false, size, random, initColumnInfos( - new String[] {"Sym", - "charCol", - "byteCol", - "shortCol", "intCol", "longCol", - "doubleCol", - "doubleNanCol", - "bigI", - "bigD" - }, - lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), - new CharGenerator('a', 'z'), - new ByteGenerator(), - new ShortGenerator((short) -20000, (short) 20000, 0.1), - new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), - new LongGenerator(-100_000_000, 100_000_000), - new SetGenerator<>(10.1, 20.1, 30.1, -40.1), - new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), - new BigIntegerGenerator(0.1), - new BigDecimalGenerator(0.1))); + final QueryTable queryTable = getTable(false, size, random, initColumnInfos(new String[] {"Sym", + "charCol", + "byteCol", + "shortCol", "intCol", "longCol", + "doubleCol", + "doubleNanCol", + "bigI", + "bigD" + }, + lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), + new CharGenerator('a', 'z'), + new ByteGenerator(), + new ShortGenerator((short) -20000, (short) 20000, 0.1), + new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), + new LongGenerator(-100_000_000, 100_000_000), + new SetGenerator<>(10.1, 20.1, 30.1, -40.1), + new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), + new BigIntegerGenerator(0.1), + new BigDecimalGenerator(0.1))); if (LiveTableTestCase.printTableUpdates) { TableTools.showWithIndex(queryTable); } final Table result = queryTable.dropColumns("Sym").varBy(); - final List updates = - queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) + final List updates = queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.varFunction(c)) .collect(Collectors.toList()); - final Table updateResult = - queryTable.dropColumns("Sym").by().update(Selectable.from(updates)); - assertTableEquals(updateResult, result, TableDiff.DiffItems.DoublesExact, - TableDiff.DiffItems.DoubleFraction); + final Table updateResult = queryTable.dropColumns("Sym").by().update(Selectable.from(updates)); + assertTableEquals(updateResult, result, TableDiff.DiffItems.DoublesExact, TableDiff.DiffItems.DoubleFraction); final Table resultKeyed = queryTable.varBy("Sym"); final Table updateKeyedResult = queryTable.by("Sym").update(Selectable.from(updates)); TableTools.showWithIndex(queryTable.where("Sym=`mjku`")); assertTableEquals(updateKeyedResult, resultKeyed, TableDiff.DiffItems.DoublesExact, - TableDiff.DiffItems.DoubleFraction); + TableDiff.DiffItems.DoubleFraction); } @Test @@ -1589,38 +1494,35 @@ public void testStdByStatic() { private void testStdByStatic(int size, boolean lotsOfStrings) { final Random random = new Random(0); - final QueryTable queryTable = getTable(false, size, random, initColumnInfos( - new String[] {"Sym", - "charCol", - "byteCol", - "shortCol", "intCol", "longCol", - "doubleCol", - "doubleNanCol", - "bigI", - "bigD" - }, - lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), - new CharGenerator('a', 'z'), - new ByteGenerator(), - new ShortGenerator((short) -20000, (short) 20000, 0.1), - new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), - new LongGenerator(-100_000_000, 100_000_000), - new SetGenerator<>(10.1, 20.1, 30.1, -40.1), - new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), - new BigIntegerGenerator(0.1), - new BigDecimalGenerator(0.1))); + final QueryTable queryTable = getTable(false, size, random, initColumnInfos(new String[] {"Sym", + "charCol", + "byteCol", + "shortCol", "intCol", "longCol", + "doubleCol", + "doubleNanCol", + "bigI", + "bigD" + }, + lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), + new CharGenerator('a', 'z'), + new ByteGenerator(), + new ShortGenerator((short) -20000, (short) 20000, 0.1), + new IntGenerator(Integer.MIN_VALUE / 2, Integer.MAX_VALUE / 2, 0.01), + new LongGenerator(-100_000_000, 100_000_000), + new SetGenerator<>(10.1, 20.1, 30.1, -40.1), + new DoubleGenerator(-100000.0, 100000.0, 0.01, 0.001), + new BigIntegerGenerator(0.1), + new BigDecimalGenerator(0.1))); if (LiveTableTestCase.printTableUpdates) { TableTools.showWithIndex(queryTable); } final Table result = queryTable.dropColumns("Sym").stdBy(); - final List updates = - queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) + final List updates = queryTable.getDefinition().getColumnNames().stream().filter(c -> !c.equals("Sym")) .map(c -> c + "=" + QueryTableAggregationTestFormulaStaticMethods.stdFunction(c)) .collect(Collectors.toList()); - final Table updateResult = - queryTable.dropColumns("Sym").by().update(Selectable.from(updates)); + final Table updateResult = queryTable.dropColumns("Sym").by().update(Selectable.from(updates)); assertTableEquals(updateResult, result, TableDiff.DiffItems.DoublesExact); final Table resultKeyed = queryTable.stdBy("Sym"); @@ -1632,19 +1534,15 @@ private void testStdByStatic(int size, boolean lotsOfStrings) { private String avgExpr(String c) { if ("bigI".equals(c)) { return c + "=" + c + "_Count == 0 ? null : new java.math.BigDecimal(" + c - + "_Sum).divide(java.math.BigDecimal.valueOf(" + c - + "_Count), java.math.BigDecimal.ROUND_HALF_UP)"; + + "_Sum).divide(java.math.BigDecimal.valueOf(" + c + "_Count), java.math.BigDecimal.ROUND_HALF_UP)"; } if ("bigD".equals(c)) { - return c + "=" + c + "_Count == 0 ? null : " + c - + "_Sum.divide(java.math.BigDecimal.valueOf(" + c - + "_Count), java.math.BigDecimal.ROUND_HALF_UP)"; + return c + "=" + c + "_Count == 0 ? null : " + c + "_Sum.divide(java.math.BigDecimal.valueOf(" + c + + "_Count), java.math.BigDecimal.ROUND_HALF_UP)"; } // I would expect us to return a null for an average of nothing, but we instead return a NaN - // return c + "=" + c + "_Count == 0 ? null : ((double)" + c + "_Sum / (double)" + c + - // "_Count)"; - return c + "=((double)(" + c + "_Count == 0 ? 0.0 : " + c + "_Sum) / (double)" + c - + "_Count)"; + // return c + "=" + c + "_Count == 0 ? null : ((double)" + c + "_Sum / (double)" + c + "_Count)"; + return c + "=((double)(" + c + "_Count == 0 ? 0.0 : " + c + "_Sum) / (double)" + c + "_Count)"; } @Test @@ -1663,29 +1561,23 @@ public void testSumByIncremental() { } } - private void testSumByIncremental(final int size, final int seed, boolean grouped, - boolean lotsOfStrings) { + private void testSumByIncremental(final int size, final int seed, boolean grouped, boolean lotsOfStrings) { try (final SafeCloseable ignored = LivenessScopeStack.open(new LivenessScope(true), true)) { doTestSumByIncremental(size, seed, grouped, lotsOfStrings); } } - private void doTestSumByIncremental(final int size, final int seed, boolean grouped, - boolean lotsOfStrings) { + private void doTestSumByIncremental(final int size, final int seed, boolean grouped, boolean lotsOfStrings) { final Random random = new Random(seed); final ColumnInfo[] columnInfo; final List ea = Collections.emptyList(); - final List ga = - Collections.singletonList(ColumnInfo.ColAttributes.Grouped); - final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", - "charCol", - "byteCol", "shortCol", "intCol", "longCol", "bigI", "bigD", "doubleCol", - "doubleNanCol", "boolCol" - }, + final List ga = Collections.singletonList(ColumnInfo.ColAttributes.Grouped); + final QueryTable queryTable = getTable(size, random, columnInfo = initColumnInfos(new String[] {"Sym", + "charCol", + "byteCol", "shortCol", "intCol", "longCol", "bigI", "bigD", "doubleCol", "doubleNanCol", "boolCol" + }, Arrays.asList(grouped ? ga : ea, ea, ea, ea, ea, ea, ea, ea, ea, ea, ea), - lotsOfStrings ? new StringGenerator(1000000) - : new SetGenerator<>("a", "b", "c", "d"), + lotsOfStrings ? new StringGenerator(1000000) : new SetGenerator<>("a", "b", "c", "d"), new CharGenerator('a', 'z'), new ByteGenerator(), new ShortGenerator((short) -20000, (short) 20000, 0.1), @@ -1705,35 +1597,27 @@ private void doTestSumByIncremental(final int size, final int seed, boolean grou EvalNugget.from(() -> queryTable.dropColumns("Sym").sumBy()), EvalNugget.Sorted.from(() -> queryTable.sumBy("Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.sort("Sym").sumBy("Sym"), "Sym"), + EvalNugget.Sorted.from(() -> queryTable.dropColumns("Sym").sort("intCol").sumBy("intCol"), "intCol"), + EvalNugget.Sorted.from(() -> queryTable.sort("Sym", "intCol").sumBy("Sym", "intCol"), "Sym", "intCol"), + EvalNugget.Sorted.from(() -> queryTable.sort("Sym").update("x=intCol+1").sumBy("Sym"), "Sym"), + EvalNugget.Sorted.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1").dropColumns("Sym") + .sumBy("intCol"), "intCol"), EvalNugget.Sorted.from( - () -> queryTable.dropColumns("Sym").sort("intCol").sumBy("intCol"), "intCol"), - EvalNugget.Sorted.from( - () -> queryTable.sort("Sym", "intCol").sumBy("Sym", "intCol"), "Sym", "intCol"), - EvalNugget.Sorted - .from(() -> queryTable.sort("Sym").update("x=intCol+1").sumBy("Sym"), "Sym"), - EvalNugget.Sorted.from(() -> queryTable.sortDescending("intCol") - .update("x=intCol+1").dropColumns("Sym").sumBy("intCol"), "intCol"), - EvalNugget.Sorted.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1") - .sumBy("Sym", "intCol"), "Sym", "intCol"), - EvalNugget.Sorted.from( - () -> queryTable.sort("Sym", "intCol").update("x=intCol+1").sumBy("Sym"), - "Sym"), + () -> queryTable.sort("Sym", "intCol").update("x=intCol+1").sumBy("Sym", "intCol"), "Sym", + "intCol"), + EvalNugget.Sorted.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1").sumBy("Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.sort("Sym").absSumBy("Sym"), "Sym"), + EvalNugget.Sorted.from(() -> queryTable.dropColumns("Sym").sort("intCol").absSumBy("intCol"), "intCol"), + EvalNugget.Sorted.from(() -> queryTable.sort("Sym", "intCol").absSumBy("Sym", "intCol"), "Sym", + "intCol"), + EvalNugget.Sorted.from(() -> queryTable.sort("Sym").update("x=intCol+1").absSumBy("Sym"), "Sym"), + EvalNugget.Sorted.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1").dropColumns("Sym") + .absSumBy("intCol"), "intCol"), EvalNugget.Sorted.from( - () -> queryTable.dropColumns("Sym").sort("intCol").absSumBy("intCol"), - "intCol"), - EvalNugget.Sorted.from( - () -> queryTable.sort("Sym", "intCol").absSumBy("Sym", "intCol"), "Sym", - "intCol"), - EvalNugget.Sorted - .from(() -> queryTable.sort("Sym").update("x=intCol+1").absSumBy("Sym"), "Sym"), - EvalNugget.Sorted.from(() -> queryTable.sortDescending("intCol") - .update("x=intCol+1").dropColumns("Sym").absSumBy("intCol"), "intCol"), - EvalNugget.Sorted.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1") - .absSumBy("Sym", "intCol"), "Sym", "intCol"), - EvalNugget.Sorted.from( - () -> queryTable.sort("Sym", "intCol").update("x=intCol+1").absSumBy("Sym"), - "Sym"), + () -> queryTable.sort("Sym", "intCol").update("x=intCol+1").absSumBy("Sym", "intCol"), "Sym", + "intCol"), + EvalNugget.Sorted.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1").absSumBy("Sym"), + "Sym"), }; for (int step = 0; step < 50; step++) { @@ -1747,8 +1631,8 @@ private void doTestSumByIncremental(final int size, final int seed, boolean grou @Test public void testAbsSumBySimple() { final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), - col("BigI", BigInteger.valueOf(-1), BigInteger.valueOf(2), BigInteger.valueOf(-3)), - col("DoubleCol", -1.0, 2.0, -3.0), col("BoolCol", new Boolean[] {null, null, null})); + col("BigI", BigInteger.valueOf(-1), BigInteger.valueOf(2), BigInteger.valueOf(-3)), + col("DoubleCol", -1.0, 2.0, -3.0), col("BoolCol", new Boolean[] {null, null, null})); final Table result = table.absSumBy(); TableTools.show(result); @@ -1761,8 +1645,8 @@ public void testAbsSumBySimple() { TestCase.assertEquals(QueryConstants.NULL_LONG, result.getColumn("BoolCol").getLong(0)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("BigI", BigInteger.valueOf(5)), - col("DoubleCol", 5.0), col("BoolCol", true)); + TstUtils.addToTable(table, i(8), col("BigI", BigInteger.valueOf(5)), col("DoubleCol", 5.0), + col("BoolCol", true)); table.notifyListeners(i(8), i(), i()); }); show(result); @@ -1787,8 +1671,8 @@ public void testAbsSumBySimple() { TestCase.assertEquals(expected.doubleValue(), absSumDouble); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("BigI", BigInteger.valueOf(4)), - col("DoubleCol", 4.0), col("BoolCol", false)); + TstUtils.addToTable(table, i(8), col("BigI", BigInteger.valueOf(4)), col("DoubleCol", 4.0), + col("BoolCol", false)); table.notifyListeners(i(), i(), i(8)); }); show(result); @@ -1801,8 +1685,8 @@ public void testAbsSumBySimple() { TestCase.assertEquals(expected.doubleValue(), absSumDouble); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(10), col("BigI", BigInteger.valueOf(0)), - col("DoubleCol", Double.NaN), col("BoolCol", true)); + TstUtils.addToTable(table, i(10), col("BigI", BigInteger.valueOf(0)), col("DoubleCol", Double.NaN), + col("BoolCol", true)); table.notifyListeners(i(10), i(), i()); }); show(result); @@ -1826,9 +1710,8 @@ public void testAbsSumBySimple() { TestCase.assertEquals(0L, result.getColumn("BoolCol").getLong(0)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(12, 14), - col("BigI", BigInteger.valueOf(0), BigInteger.valueOf(0)), - doubleCol("DoubleCol", 0.0, 0.0), col("BoolCol", true, true)); + TstUtils.addToTable(table, i(12, 14), col("BigI", BigInteger.valueOf(0), BigInteger.valueOf(0)), + doubleCol("DoubleCol", 0.0, 0.0), col("BoolCol", true, true)); table.notifyListeners(i(12, 14), i(), i()); }); show(result); @@ -1837,8 +1720,7 @@ public void testAbsSumBySimple() { @Test public void testAbsSumByNull() { - final QueryTable table = - TstUtils.testRefreshingTable(i(2), intCol("IntCol", QueryConstants.NULL_INT), + final QueryTable table = TstUtils.testRefreshingTable(i(2), intCol("IntCol", QueryConstants.NULL_INT), floatCol("FloatCol", QueryConstants.NULL_FLOAT)); final Table result = table.absSumBy(); @@ -1872,8 +1754,7 @@ public void testAbsSumByNull() { @Test public void testAvgInfinities() { - final QueryTable table = - TstUtils.testRefreshingTable(i(2), intCol("IntCol", QueryConstants.NULL_INT), + final QueryTable table = TstUtils.testRefreshingTable(i(2), intCol("IntCol", QueryConstants.NULL_INT), floatCol("FloatCol", QueryConstants.NULL_FLOAT)); final Table result = table.avgBy(); @@ -1896,8 +1777,7 @@ public void testAvgInfinities() { TestCase.assertEquals(5.0, avgF); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(9), col("IntCol", 6), - floatCol("FloatCol", Float.POSITIVE_INFINITY)); + TstUtils.addToTable(table, i(9), col("IntCol", 6), floatCol("FloatCol", Float.POSITIVE_INFINITY)); table.notifyListeners(i(9), i(), i()); }); show(result); @@ -1907,8 +1787,7 @@ public void testAvgInfinities() { TestCase.assertEquals(Double.POSITIVE_INFINITY, avgF); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(10), col("IntCol", 7), - floatCol("FloatCol", Float.NEGATIVE_INFINITY)); + TstUtils.addToTable(table, i(10), col("IntCol", 7), floatCol("FloatCol", Float.NEGATIVE_INFINITY)); table.notifyListeners(i(10), i(), i()); }); show(result); @@ -1952,8 +1831,7 @@ public void testAvgInfinities() { @Test public void testVarInfinities() { - final QueryTable table = - TstUtils.testRefreshingTable(i(2), intCol("IntCol", QueryConstants.NULL_INT), + final QueryTable table = TstUtils.testRefreshingTable(i(2), intCol("IntCol", QueryConstants.NULL_INT), floatCol("FloatCol", QueryConstants.NULL_FLOAT)); final Table result = table.varBy(); @@ -1976,8 +1854,7 @@ public void testVarInfinities() { TestCase.assertEquals(0.5, varF); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(9), col("IntCol", 6), - floatCol("FloatCol", Float.POSITIVE_INFINITY)); + TstUtils.addToTable(table, i(9), col("IntCol", 6), floatCol("FloatCol", Float.POSITIVE_INFINITY)); table.notifyListeners(i(9), i(), i()); }); show(result); @@ -1987,8 +1864,7 @@ public void testVarInfinities() { TestCase.assertEquals(Double.NaN, varF); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(10), col("IntCol", 7), - floatCol("FloatCol", Float.NEGATIVE_INFINITY)); + TstUtils.addToTable(table, i(10), col("IntCol", 7), floatCol("FloatCol", Float.NEGATIVE_INFINITY)); table.notifyListeners(i(10), i(), i()); }); show(result); @@ -2042,30 +1918,26 @@ private void testAvgByIncremental(int size) { final Random random = new Random(0); final ColumnInfo columnInfo[]; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "intCol", "doubleCol", "floatCol", "bigI", "bigD", "byteCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1), - new FloatGenerator(0, 100), - new BigIntegerGenerator(), - new BigDecimalGenerator(), - new ByteGenerator())); + columnInfo = initColumnInfos( + new String[] {"Sym", "intCol", "doubleCol", "floatCol", "bigI", "bigD", "byteCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1), + new FloatGenerator(0, 100), + new BigIntegerGenerator(), + new BigDecimalGenerator(), + new ByteGenerator())); final EvalNugget en[] = new EvalNugget[] { EvalNugget.from(() -> queryTable.dropColumns("Sym").avgBy()), EvalNugget.from(() -> queryTable.sort("Sym").avgBy("Sym")), - EvalNugget.from(() -> queryTable.dropColumns("Sym").sort("intCol").avgBy("intCol") - .sort("intCol")), - EvalNugget.from(() -> queryTable.sort("Sym", "intCol").avgBy("Sym", "intCol") - .sort("Sym", "intCol")), - EvalNugget.from( - () -> queryTable.sort("Sym").update("x=intCol+1").avgBy("Sym").sort("Sym")), - EvalNugget.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1") - .dropColumns("Sym").avgBy("intCol").sort("intCol")), - EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1") - .avgBy("Sym", "intCol").sort("Sym", "intCol")), - EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1") - .avgBy("Sym").sort("Sym")), + EvalNugget.from(() -> queryTable.dropColumns("Sym").sort("intCol").avgBy("intCol").sort("intCol")), + EvalNugget.from(() -> queryTable.sort("Sym", "intCol").avgBy("Sym", "intCol").sort("Sym", "intCol")), + EvalNugget.from(() -> queryTable.sort("Sym").update("x=intCol+1").avgBy("Sym").sort("Sym")), + EvalNugget.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1").dropColumns("Sym") + .avgBy("intCol").sort("intCol")), + EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1").avgBy("Sym", "intCol") + .sort("Sym", "intCol")), + EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1").avgBy("Sym").sort("Sym")), }; for (int i = 0; i < 50; i++) { LiveTableTestCase.simulateShiftAwareStep(size, random, queryTable, columnInfo, en); @@ -2085,34 +1957,33 @@ private void testStdVarByIncremental(int size) { final Random random = new Random(0); final ColumnInfo columnInfo[]; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "byteCol", "charCol", "shortCol", "intCol", "longCol", - "floatCol", "doubleCol", "bigI", "bigD"}, - new SetGenerator<>("a", "b", "c", "d"), - new ByteGenerator((byte) (Byte.MIN_VALUE + 1), Byte.MAX_VALUE, 0.1), - new CharGenerator('a', 'z', 0.1), - new ShortGenerator((short) (Short.MIN_VALUE + 1), Short.MAX_VALUE, 0.1), - new IntGenerator(10, 100, 0.1), - new LongGenerator(-100, 100000, 0.1), - new FloatGenerator(0, 100, 0.1), - new DoubleGenerator(0, 100, 0.1), - new TstUtils.BigIntegerGenerator(), - new TstUtils.BigDecimalGenerator())); + columnInfo = initColumnInfos( + new String[] {"Sym", "byteCol", "charCol", "shortCol", "intCol", "longCol", "floatCol", + "doubleCol", "bigI", "bigD"}, + new SetGenerator<>("a", "b", "c", "d"), + new ByteGenerator((byte) (Byte.MIN_VALUE + 1), Byte.MAX_VALUE, 0.1), + new CharGenerator('a', 'z', 0.1), + new ShortGenerator((short) (Short.MIN_VALUE + 1), Short.MAX_VALUE, 0.1), + new IntGenerator(10, 100, 0.1), + new LongGenerator(-100, 100000, 0.1), + new FloatGenerator(0, 100, 0.1), + new DoubleGenerator(0, 100, 0.1), + new TstUtils.BigIntegerGenerator(), + new TstUtils.BigDecimalGenerator())); if (LiveTableTestCase.printTableUpdates) { TableTools.showWithIndex(queryTable); } final String integerCmp = - "DiffI=((isNull(doubleI) || isNaN(doubleI)) && isNull(bigI)) || (!isNull(bigI) && (doubleI - bigI.doubleValue() < (0.01 * doubleI)))"; - final String decimalCmp = integerCmp.replaceAll("DiffI", "DiffD") - .replaceAll("doubleI", "doubleD").replaceAll("bigI", "bigD"); + "DiffI=((isNull(doubleI) || isNaN(doubleI)) && isNull(bigI)) || (!isNull(bigI) && (doubleI - bigI.doubleValue() < (0.01 * doubleI)))"; + final String decimalCmp = + integerCmp.replaceAll("DiffI", "DiffD").replaceAll("doubleI", "doubleD").replaceAll("bigI", "bigD"); final Table trueForSyms = - queryTable.countBy("DiffI", "Sym").view("Sym", "DiffI=true", "DiffD=true").sort("Sym"); + queryTable.countBy("DiffI", "Sym").view("Sym", "DiffI=true", "DiffD=true").sort("Sym"); final Table bigAsDouble = queryTable - .view("Sym", "bigI", "bigD", "doubleI=bigI.doubleValue()", "doubleD=bigD.doubleValue()") - .sort("Sym"); + .view("Sym", "bigI", "bigD", "doubleI=bigI.doubleValue()", "doubleD=bigD.doubleValue()").sort("Sym"); final Table bigVsDoubleVar = bigAsDouble.varBy("Sym"); final Table doubleComparisonVar = bigVsDoubleVar.view("Sym", integerCmp, decimalCmp); final Table bigVsDoubleStd = bigAsDouble.stdBy("Sym"); @@ -2166,8 +2037,8 @@ public void show() { @Test public void testWeightedAvgByLong() { - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), col("Long1", 2L, 4L, 6L), - col("Long2", 1L, 2L, 3L)); + final QueryTable table = + TstUtils.testRefreshingTable(i(2, 4, 6), col("Long1", 2L, 4L, 6L), col("Long2", 1L, 2L, 3L)); final Table result = table.wavgBy("Long2"); TableTools.show(result); TestCase.assertEquals(1, result.size()); @@ -2178,8 +2049,7 @@ public void testWeightedAvgByLong() { TestCase.assertEquals(expected, wavg); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Long1", (long) Integer.MAX_VALUE), - col("Long2", 7L)); + TstUtils.addToTable(table, i(8), col("Long1", (long) Integer.MAX_VALUE), col("Long2", 7L)); table.notifyListeners(i(8), i(), i()); }); show(result); @@ -2205,19 +2075,19 @@ private void testWeightedAvgByIncremental(int size, int seed) { final Random random = new Random(seed); final ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "intCol", "intCol2", "doubleCol", "doubleNullCol", - "doubleCol2", "floatCol", "charCol", "byteCol", "shortCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100), - new IntGenerator(1, 1000), - new DoubleGenerator(0, 100), - new DoubleGenerator(0, 100, 0.1, 0.001), - new SetGenerator<>(10.1, 20.1, 30.1), - new FloatGenerator(0, 100, 0.1, 0.001), - new CharGenerator('a', 'z'), - new ByteGenerator(), - new ShortGenerator())); + columnInfo = initColumnInfos( + new String[] {"Sym", "intCol", "intCol2", "doubleCol", "doubleNullCol", "doubleCol2", + "floatCol", "charCol", "byteCol", "shortCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100), + new IntGenerator(1, 1000), + new DoubleGenerator(0, 100), + new DoubleGenerator(0, 100, 0.1, 0.001), + new SetGenerator<>(10.1, 20.1, 30.1), + new FloatGenerator(0, 100, 0.1, 0.001), + new CharGenerator('a', 'z'), + new ByteGenerator(), + new ShortGenerator())); if (LiveTableTestCase.printTableUpdates) { System.out.println("Original Source Table:"); @@ -2228,21 +2098,20 @@ private void testWeightedAvgByIncremental(int size, int seed) { // long columns result in overflows when doing randomized tests final EvalNuggetInterface[] en = new EvalNuggetInterface[] { EvalNugget.from(() -> queryTable.view("intCol", "doubleCol").wavgBy("intCol")), + EvalNugget.Sorted.from(() -> queryTable.view("intCol", "Sym", "doubleCol").wavgBy("intCol", "Sym"), + "Sym"), EvalNugget.Sorted.from( - () -> queryTable.view("intCol", "Sym", "doubleCol").wavgBy("intCol", "Sym"), - "Sym"), - EvalNugget.Sorted.from(() -> queryTable - .view("doubleCol", "intCol", "intCol2", "Sym").wavgBy("doubleCol", "Sym"), - "Sym"), + () -> queryTable.view("doubleCol", "intCol", "intCol2", "Sym").wavgBy("doubleCol", "Sym"), + "Sym"), EvalNugget.Sorted.from(() -> queryTable.wavgBy("doubleCol", "Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.wavgBy("floatCol", "Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.wavgBy("charCol", "Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.wavgBy("byteCol", "Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.wavgBy("shortCol", "Sym"), "Sym"), new TableComparator(queryTable.view("intCol2", "intCol").wavgBy("intCol2"), "wavg", - queryTable.updateView("W=intCol*intCol2").by() - .update("WSum=sum(W)", "C=sum(intCol2)", "intCol=WSum/C").view("intCol"), - "update"), + queryTable.updateView("W=intCol*intCol2").by() + .update("WSum=sum(W)", "C=sum(intCol2)", "intCol=WSum/C").view("intCol"), + "update"), }; for (int step = 0; step < 50; step++) { if (LiveTableTestCase.printTableUpdates) { @@ -2267,19 +2136,19 @@ private void testWeightedSumByIncremental(int size, int seed) { final Random random = new Random(seed); final ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "intCol", "intCol2", "doubleCol", "doubleNullCol", - "doubleCol2", "floatCol", "charCol", "byteCol", "shortCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100), - new IntGenerator(1, 1000), - new DoubleGenerator(0, 100), - new DoubleGenerator(0, 100, 0.1, 0.001), - new SetGenerator<>(10.1, 20.1, 30.1), - new FloatGenerator(0, 100, 0.1, 0.001), - new CharGenerator('a', 'z'), - new ByteGenerator(), - new ShortGenerator())); + columnInfo = initColumnInfos( + new String[] {"Sym", "intCol", "intCol2", "doubleCol", "doubleNullCol", "doubleCol2", + "floatCol", "charCol", "byteCol", "shortCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100), + new IntGenerator(1, 1000), + new DoubleGenerator(0, 100), + new DoubleGenerator(0, 100, 0.1, 0.001), + new SetGenerator<>(10.1, 20.1, 30.1), + new FloatGenerator(0, 100, 0.1, 0.001), + new CharGenerator('a', 'z'), + new ByteGenerator(), + new ShortGenerator())); if (LiveTableTestCase.printTableUpdates) { System.out.println("Original Source Table:"); @@ -2290,36 +2159,29 @@ private void testWeightedSumByIncremental(int size, int seed) { // long columns result in overflows when doing randomized tests final EvalNuggetInterface[] en = new EvalNuggetInterface[] { EvalNugget.from(() -> queryTable.view("intCol", "doubleCol").wsumBy("intCol")), + EvalNugget.Sorted.from(() -> queryTable.view("intCol", "Sym", "doubleCol").wsumBy("intCol", "Sym"), + "Sym"), EvalNugget.Sorted.from( - () -> queryTable.view("intCol", "Sym", "doubleCol").wsumBy("intCol", "Sym"), - "Sym"), - EvalNugget.Sorted.from(() -> queryTable - .view("doubleCol", "intCol", "intCol2", "Sym").wsumBy("doubleCol", "Sym"), - "Sym"), + () -> queryTable.view("doubleCol", "intCol", "intCol2", "Sym").wsumBy("doubleCol", "Sym"), + "Sym"), EvalNugget.Sorted.from(() -> queryTable.wsumBy("doubleCol", "Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.wsumBy("floatCol", "Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.wsumBy("charCol", "Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.wsumBy("byteCol", "Sym"), "Sym"), EvalNugget.Sorted.from(() -> queryTable.wsumBy("shortCol", "Sym"), "Sym"), new TableComparator(queryTable.view("intCol2", "intCol").wsumBy("intCol2"), "wsum", - queryTable.updateView("W=intCol*intCol2").by().update("intCol=(long)sum(W)") - .view("intCol"), - "update"), - new TableComparator(queryTable.view("intCol2", "doubleCol").wsumBy("intCol2"), - "wsum", - queryTable.updateView("W=doubleCol*intCol2").by().update("doubleCol=sum(W)") - .view("doubleCol"), - "update"), - new TableComparator(queryTable.view("doubleCol", "intCol").wsumBy("doubleCol"), - "wsum", - queryTable.updateView("W=doubleCol*intCol").by().update("intCol=sum(W)") - .view("intCol"), - "update"), - new TableComparator(queryTable.view("doubleCol", "doubleCol2").wsumBy("doubleCol2"), - "wsum", - queryTable.updateView("W=doubleCol*doubleCol2").by().update("doubleCol=sum(W)") - .view("doubleCol"), - "update"), + queryTable.updateView("W=intCol*intCol2").by().update("intCol=(long)sum(W)").view("intCol"), + "update"), + new TableComparator(queryTable.view("intCol2", "doubleCol").wsumBy("intCol2"), "wsum", + queryTable.updateView("W=doubleCol*intCol2").by().update("doubleCol=sum(W)").view("doubleCol"), + "update"), + new TableComparator(queryTable.view("doubleCol", "intCol").wsumBy("doubleCol"), "wsum", + queryTable.updateView("W=doubleCol*intCol").by().update("intCol=sum(W)").view("intCol"), + "update"), + new TableComparator(queryTable.view("doubleCol", "doubleCol2").wsumBy("doubleCol2"), "wsum", + queryTable.updateView("W=doubleCol*doubleCol2").by().update("doubleCol=sum(W)") + .view("doubleCol"), + "update"), }; for (int step = 0; step < 50; step++) { if (LiveTableTestCase.printTableUpdates) { @@ -2342,17 +2204,17 @@ private void testCountByIncremental(int size) { final Random random = new Random(0); final ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", - "n", "o", "p"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", + "p"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1))); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { EvalNugget.from(() -> queryTable.countBy("Count", "Sym").sort("Sym")), - new UpdateValidatorNugget(queryTable.sort("intCol").countBy("Count", "Sym") - .view("Count=Count * 2", "Sym")), - new UpdateValidatorNugget(queryTable.sort("doubleCol").avgBy("Sym") - .view("doubleCol=doubleCol*2", "intCol")), + new UpdateValidatorNugget( + queryTable.sort("intCol").countBy("Count", "Sym").view("Count=Count * 2", "Sym")), + new UpdateValidatorNugget( + queryTable.sort("doubleCol").avgBy("Sym").view("doubleCol=doubleCol*2", "intCol")), }; for (int i = 0; i < 100; i++) { @@ -2375,55 +2237,47 @@ private void testMinMaxByIncremental(int size, int seed) { final Random random = new Random(seed); final ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "intCol", "shortCol", "byteCol", "doubleCol", "Timestamp", - "boolCol", "betterDoubleCol", "floatCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100, 0.1), - new ShortGenerator((short) 10, (short) 100, 0.1), - new ByteGenerator((byte) 10, (byte) 100, 0.1), - new SetGenerator<>(10.1, 20.1, 30.1), - new UnsortedDateTimeGenerator(DBTimeUtils.convertDateTime("2020-01-01T00:00:00 NY"), - DBTimeUtils.convertDateTime("2020-01-25T00:00:00 NY")), - new BooleanGenerator(0.4, 0.2), - new DoubleGenerator(Double.MIN_NORMAL, Double.MIN_NORMAL, 0.05, 0.05), - new FloatGenerator(Float.MIN_NORMAL, Float.MIN_NORMAL, 0.05, 0.05))); + columnInfo = initColumnInfos( + new String[] {"Sym", "intCol", "shortCol", "byteCol", "doubleCol", "Timestamp", "boolCol", + "betterDoubleCol", "floatCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100, 0.1), + new ShortGenerator((short) 10, (short) 100, 0.1), + new ByteGenerator((byte) 10, (byte) 100, 0.1), + new SetGenerator<>(10.1, 20.1, 30.1), + new UnsortedDateTimeGenerator(DBTimeUtils.convertDateTime("2020-01-01T00:00:00 NY"), + DBTimeUtils.convertDateTime("2020-01-25T00:00:00 NY")), + new BooleanGenerator(0.4, 0.2), + new DoubleGenerator(Double.MIN_NORMAL, Double.MIN_NORMAL, 0.05, 0.05), + new FloatGenerator(Float.MIN_NORMAL, Float.MIN_NORMAL, 0.05, 0.05))); if (LiveTableTestCase.printTableUpdates) { showWithIndex(queryTable); } final EvalNuggetInterface[] en = new EvalNuggetInterface[] { EvalNugget.from(() -> queryTable.maxBy("Sym").sort("Sym")), EvalNugget.from(() -> queryTable.sort("Sym").maxBy("Sym")), - EvalNugget.from(() -> queryTable.dropColumns("Sym").sort("intCol").maxBy("intCol") - .sort("intCol")), - EvalNugget.from(() -> queryTable.sort("Sym", "intCol").maxBy("Sym", "intCol") - .sort("Sym", "intCol")), - EvalNugget.from( - () -> queryTable.sort("Sym").update("x=intCol+1").maxBy("Sym").sort("Sym")), - EvalNugget.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1") - .dropColumns("Sym").maxBy("intCol").sort("intCol")), - EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1") - .maxBy("Sym", "intCol").sort("Sym", "intCol")), - EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1") - .maxBy("Sym").sort("Sym")), + EvalNugget.from(() -> queryTable.dropColumns("Sym").sort("intCol").maxBy("intCol").sort("intCol")), + EvalNugget.from(() -> queryTable.sort("Sym", "intCol").maxBy("Sym", "intCol").sort("Sym", "intCol")), + EvalNugget.from(() -> queryTable.sort("Sym").update("x=intCol+1").maxBy("Sym").sort("Sym")), + EvalNugget.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1").dropColumns("Sym") + .maxBy("intCol").sort("intCol")), + EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1").maxBy("Sym", "intCol") + .sort("Sym", "intCol")), + EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1").maxBy("Sym").sort("Sym")), EvalNugget.from(() -> queryTable.minBy("Sym").sort("Sym")), EvalNugget.from(() -> queryTable.sort("Sym").minBy("Sym")), - EvalNugget.from(() -> queryTable.dropColumns("Sym").sort("intCol").minBy("intCol") - .sort("intCol")), - EvalNugget.from(() -> queryTable.sort("Sym", "intCol").minBy("Sym", "intCol") - .sort("Sym", "intCol")), - EvalNugget.from( - () -> queryTable.sort("Sym").update("x=intCol+1").minBy("Sym").sort("Sym")), - EvalNugget.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1") - .dropColumns("Sym").minBy("intCol").sort("intCol")), - EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1") - .minBy("Sym", "intCol").sort("Sym", "intCol")), - EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1") - .minBy("Sym").sort("Sym")), + EvalNugget.from(() -> queryTable.dropColumns("Sym").sort("intCol").minBy("intCol").sort("intCol")), + EvalNugget.from(() -> queryTable.sort("Sym", "intCol").minBy("Sym", "intCol").sort("Sym", "intCol")), + EvalNugget.from(() -> queryTable.sort("Sym").update("x=intCol+1").minBy("Sym").sort("Sym")), + EvalNugget.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1").dropColumns("Sym") + .minBy("intCol").sort("intCol")), + EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1").minBy("Sym", "intCol") + .sort("Sym", "intCol")), + EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1").minBy("Sym").sort("Sym")), new TableComparator(queryTable.maxBy("Sym").sort("Sym"), - queryTable.applyToAllBy("max(each)", "Sym").sort("Sym")), + queryTable.applyToAllBy("max(each)", "Sym").sort("Sym")), new TableComparator(queryTable.minBy("Sym").sort("Sym"), - queryTable.applyToAllBy("min(each)", "Sym").sort("Sym")), + queryTable.applyToAllBy("min(each)", "Sym").sort("Sym")), }; for (int step = 0; step < 50; step++) { if (LiveTableTestCase.printTableUpdates) { @@ -2445,115 +2299,104 @@ private void testMinMaxByAppend(int size) { final Random random = new Random(0); final ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100, 0.1), - new SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100, 0.1), + new SetGenerator<>(10.1, 20.1, 30.1))); if (LiveTableTestCase.printTableUpdates) { showWithIndex(queryTable); } final EvalNuggetInterface[] en = new EvalNuggetInterface[] { new EvalNugget() { public Table e() { - return queryTable.by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym") - .sort("Sym"); + return queryTable.by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym").sort("Sym"); } }, new EvalNugget() { public Table e() { return queryTable.dropColumns("Sym").update("x = k") - .by(new AddOnlyMinMaxByStateFactoryImpl(false), "intCol") - .sort("intCol"); + .by(new AddOnlyMinMaxByStateFactoryImpl(false), "intCol").sort("intCol"); } }, new EvalNugget() { public Table e() { return queryTable.updateView("x = k") - .by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym", "intCol") - .sort("Sym", "intCol"); + .by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym", "intCol").sort("Sym", "intCol"); } }, new EvalNugget() { public Table e() { - return queryTable.update("x=intCol+1") - .by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym").sort("Sym"); + return queryTable.update("x=intCol+1").by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym") + .sort("Sym"); } }, new EvalNugget() { public Table e() { return queryTable.update("x=intCol+1").dropColumns("Sym") - .by(new AddOnlyMinMaxByStateFactoryImpl(false), "intCol") - .sort("intCol"); + .by(new AddOnlyMinMaxByStateFactoryImpl(false), "intCol").sort("intCol"); } }, new EvalNugget() { public Table e() { return queryTable.update("x=intCol+1") - .by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym", "intCol") - .sort("Sym", "intCol"); + .by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym", "intCol").sort("Sym", "intCol"); } }, new EvalNugget() { public Table e() { - return queryTable.update("x=intCol+1") - .by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym").sort("Sym"); + return queryTable.update("x=intCol+1").by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym") + .sort("Sym"); } }, new EvalNugget() { public Table e() { - return queryTable.by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym") - .sort("Sym"); + return queryTable.by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym").sort("Sym"); } }, new EvalNugget() { public Table e() { return queryTable.dropColumns("Sym").update("x = k") - .by(new AddOnlyMinMaxByStateFactoryImpl(true), "intCol").sort("intCol"); + .by(new AddOnlyMinMaxByStateFactoryImpl(true), "intCol").sort("intCol"); } }, new EvalNugget() { public Table e() { return queryTable.updateView("x = k") - .by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym", "intCol") - .sort("Sym", "intCol"); + .by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym", "intCol").sort("Sym", "intCol"); } }, new EvalNugget() { public Table e() { - return queryTable.update("x=intCol+1") - .by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym").sort("Sym"); + return queryTable.update("x=intCol+1").by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym") + .sort("Sym"); } }, new EvalNugget() { public Table e() { return queryTable.update("x=intCol+1").dropColumns("Sym") - .by(new AddOnlyMinMaxByStateFactoryImpl(true), "intCol").sort("intCol"); + .by(new AddOnlyMinMaxByStateFactoryImpl(true), "intCol").sort("intCol"); } }, new EvalNugget() { public Table e() { return queryTable.update("x=intCol+1") - .by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym", "intCol") - .sort("Sym", "intCol"); + .by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym", "intCol").sort("Sym", "intCol"); } }, new EvalNugget() { public Table e() { - return queryTable.update("x=intCol+1") - .by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym").sort("Sym"); + return queryTable.update("x=intCol+1").by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym") + .sort("Sym"); } }, - new TableComparator( - queryTable.by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym").sort("Sym"), - queryTable.applyToAllBy("max(each)", "Sym").sort("Sym")), - new TableComparator( - queryTable.by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym").sort("Sym"), - queryTable.applyToAllBy("min(each)", "Sym").sort("Sym")), + new TableComparator(queryTable.by(new AddOnlyMinMaxByStateFactoryImpl(false), "Sym").sort("Sym"), + queryTable.applyToAllBy("max(each)", "Sym").sort("Sym")), + new TableComparator(queryTable.by(new AddOnlyMinMaxByStateFactoryImpl(true), "Sym").sort("Sym"), + queryTable.applyToAllBy("min(each)", "Sym").sort("Sym")), }; for (int step = 0; step < 50; step++) { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - final Index keysToAdd = - TstUtils.newIndex(random.nextInt(size / 2 + 1), queryTable.getIndex(), random); + final Index keysToAdd = TstUtils.newIndex(random.nextInt(size / 2 + 1), queryTable.getIndex(), random); final ColumnHolder[] columnAdditions = new ColumnHolder[columnInfo.length]; for (int column = 0; column < columnAdditions.length; column++) { columnAdditions[column] = columnInfo[column].populateMapAndC(keysToAdd, random); @@ -2578,11 +2421,11 @@ private void testMedianByIncremental(int size) { final Random random = new Random(0); final ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "floatCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1), - new FloatGenerator(0, 100.0f))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "floatCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1), + new FloatGenerator(0, 100.0f))); final Table withoutFloats = queryTable.dropColumns("floatCol"); if (LiveTableTestCase.printTableUpdates) { showWithIndex(queryTable); @@ -2592,16 +2435,12 @@ private void testMedianByIncremental(int size) { EvalNugget.from(() -> queryTable.view("doubleCol").medianBy()), EvalNugget.Sorted.from(() -> queryTable.medianBy("Sym"), "Sym"), new UpdateValidatorNugget(queryTable.medianBy("Sym")), - EvalNugget.from(() -> withoutFloats - .by(new PercentileByStateFactoryImpl(0.25), "Sym").sort("Sym")), - EvalNugget.from(() -> withoutFloats - .by(new PercentileByStateFactoryImpl(0.75), "Sym").sort("Sym")), - EvalNugget.from(() -> withoutFloats.by(new PercentileByStateFactoryImpl(0.1), "Sym") - .sort("Sym")), - EvalNugget.from(() -> withoutFloats - .by(new PercentileByStateFactoryImpl(0.99), "Sym").sort("Sym")), - EvalNugget.from(() -> withoutFloats.where("Sym=`a`") - .by(new PercentileByStateFactoryImpl(0.99), "Sym").sort("Sym")) + EvalNugget.from(() -> withoutFloats.by(new PercentileByStateFactoryImpl(0.25), "Sym").sort("Sym")), + EvalNugget.from(() -> withoutFloats.by(new PercentileByStateFactoryImpl(0.75), "Sym").sort("Sym")), + EvalNugget.from(() -> withoutFloats.by(new PercentileByStateFactoryImpl(0.1), "Sym").sort("Sym")), + EvalNugget.from(() -> withoutFloats.by(new PercentileByStateFactoryImpl(0.99), "Sym").sort("Sym")), + EvalNugget.from(() -> withoutFloats.where("Sym=`a`").by(new PercentileByStateFactoryImpl(0.99), "Sym") + .sort("Sym")) }; for (int step = 0; step < 50; step++) { if (LiveTableTestCase.printTableUpdates) { @@ -2616,25 +2455,22 @@ public void testTDigest() { final int size = 10000; final Random random = new Random(0); final QueryTable queryTable = getTable(size, random, - initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "floatCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100), - new DoubleGenerator(-10000, 10000, 0.05, 0.05), - new FloatGenerator(0, 100.0f))); + initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "floatCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100), + new DoubleGenerator(-10000, 10000, 0.05, 0.05), + new FloatGenerator(0, 100.0f))); - final Table aggregated = - ApproximatePercentile.approximatePercentile(queryTable.dropColumns("Sym"), 0.99); + final Table aggregated = ApproximatePercentile.approximatePercentile(queryTable.dropColumns("Sym"), 0.99); TableTools.showWithIndex(aggregated); - final Table aggregatedBySym = - ApproximatePercentile.approximatePercentile(queryTable, 0.99, "Sym"); + final Table aggregatedBySym = ApproximatePercentile.approximatePercentile(queryTable, 0.99, "Sym"); TableTools.showWithIndex(aggregatedBySym); checkTableP99(queryTable, aggregated); for (final String sym : new String[] {"a", "b", "c", "d"}) { System.out.println("Checking: " + sym); - checkTableP99(queryTable.where("Sym=`" + sym + "`"), - aggregatedBySym.where("Sym=`" + sym + "`")); + checkTableP99(queryTable.where("Sym=`" + sym + "`"), aggregatedBySym.where("Sym=`" + sym + "`")); } } @@ -2643,28 +2479,27 @@ public void testTDigestMulti() { final int size = 10000; final Random random = new Random(0); final QueryTable queryTable = getTable(size, random, - initColumnInfos(new String[] {"Sym", "doubleCol", "floatCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new DoubleGenerator(-10000, 10000, 0.05, 0.05), - new FloatGenerator(0, 100.0f))); + initColumnInfos(new String[] {"Sym", "doubleCol", "floatCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new DoubleGenerator(-10000, 10000, 0.05, 0.05), + new FloatGenerator(0, 100.0f))); final ApproximatePercentile.PercentileDefinition definition = - new ApproximatePercentile.PercentileDefinition("doubleCol").add(0.75, "DP75") - .add(0.95, "DP95").add(0.99, "DP99").add(0.999, "DP999").nextColumn("floatCol") - .add(0.75, "FP75").add(0.99, "FP99"); + new ApproximatePercentile.PercentileDefinition("doubleCol").add(0.75, "DP75").add(0.95, "DP95") + .add(0.99, "DP99").add(0.999, "DP999").nextColumn("floatCol").add(0.75, "FP75") + .add(0.99, "FP99"); final Table aggregated = - ApproximatePercentile.approximatePercentiles(queryTable.dropColumns("Sym"), definition); + ApproximatePercentile.approximatePercentiles(queryTable.dropColumns("Sym"), definition); TableTools.showWithIndex(aggregated); - final Table aggregatedBySym = - ApproximatePercentile.approximatePercentiles(queryTable, definition, "Sym"); + final Table aggregatedBySym = ApproximatePercentile.approximatePercentiles(queryTable, definition, "Sym"); TableTools.showWithIndex(aggregatedBySym); checkTableComboPercentiles(queryTable, aggregated); for (final String sym : new String[] {"a", "b", "c", "d"}) { System.out.println("Checking: " + sym); checkTableComboPercentiles(queryTable.where("Sym=`" + sym + "`"), - aggregatedBySym.where("Sym=`" + sym + "`")); + aggregatedBySym.where("Sym=`" + sym + "`")); } } @@ -2673,25 +2508,25 @@ public void testTDigestAccumulation() { final int size = 10000; final Random random = new Random(0); final QueryTable queryTable = getTable(size, random, - initColumnInfos(new String[] {"Sym", "doubleCol", "floatCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new DoubleGenerator(-10000, 10000, 0.05, 0.05), - new FloatGenerator(0, 100.0f))); + initColumnInfos(new String[] {"Sym", "doubleCol", "floatCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new DoubleGenerator(-10000, 10000, 0.05, 0.05), + new FloatGenerator(0, 100.0f))); final ApproximatePercentile.PercentileDefinition definition = - new ApproximatePercentile.PercentileDefinition("doubleCol").exposeDigest("Digest") - .add(0.95, "P95").setCompression(33); + new ApproximatePercentile.PercentileDefinition("doubleCol").exposeDigest("Digest").add(0.95, "P95") + .setCompression(33); final Table aggregated = - ApproximatePercentile.approximatePercentiles(queryTable.dropColumns("Sym"), definition); + ApproximatePercentile.approximatePercentiles(queryTable.dropColumns("Sym"), definition); TableTools.showWithIndex(aggregated); - final Table aggregatedBySym = ApproximatePercentile.approximatePercentiles(queryTable, - definition.setCompression(100), "Sym"); + final Table aggregatedBySym = + ApproximatePercentile.approximatePercentiles(queryTable, definition.setCompression(100), "Sym"); TableTools.showWithIndex(aggregatedBySym); final Table accumulated = aggregatedBySym.dropColumns("Sym").by() - .update("Digest=io.deephaven.db.v2.by.ApproximatePercentile.accumulateDigests(Digest)") - .update("P95=Digest.quantile(0.95)"); + .update("Digest=io.deephaven.db.v2.by.ApproximatePercentile.accumulateDigests(Digest)") + .update("P95=Digest.quantile(0.95)"); TableTools.show(accumulated); final double singleValue = aggregated.getColumn("P95").getDouble(0); @@ -2705,8 +2540,7 @@ public void testTDigestAccumulation() { } private void checkTableP99(Table queryTable, Table aggregated) { - final double[] dValues = - (double[]) queryTable.where("!Double.isNaN(doubleCol) && !isNull(doubleCol)") + final double[] dValues = (double[]) queryTable.where("!Double.isNaN(doubleCol) && !isNull(doubleCol)") .getColumn("doubleCol").getDirect(); Arrays.sort(dValues); final double dValue = dValues[(dValues.length * 99) / 100]; @@ -2715,8 +2549,8 @@ private void checkTableP99(Table queryTable, Table aggregated) { System.out.println("Double: " + dValue + ", " + dtValue + ", Error: " + derror); TestCase.assertTrue(derror < 0.005); // if we are within 1/2% we'll pass it - final float[] fValues = (float[]) queryTable - .where("!Float.isNaN(floatCol) && !isNull(floatCol)").getColumn("floatCol").getDirect(); + final float[] fValues = (float[]) queryTable.where("!Float.isNaN(floatCol) && !isNull(floatCol)") + .getColumn("floatCol").getDirect(); Arrays.sort(fValues); final float fValue = fValues[(fValues.length * 99) / 100]; final double ftValue = aggregated.getColumn("floatCol").getDouble(0); @@ -2724,8 +2558,7 @@ private void checkTableP99(Table queryTable, Table aggregated) { System.out.println("Float: " + fValue + ", " + ftValue + ", Error: " + ferror); TestCase.assertTrue(ferror < 0.005); // if we are within 1/2% we'll pass it - final int[] iValues = - (int[]) queryTable.where("!isNull(intCol)").getColumn("intCol").getDirect(); + final int[] iValues = (int[]) queryTable.where("!isNull(intCol)").getColumn("intCol").getDirect(); Arrays.sort(iValues); final float iValue = iValues[(iValues.length * 99) / 100]; final double itValue = aggregated.getColumn("intCol").getDouble(0); @@ -2735,8 +2568,7 @@ private void checkTableP99(Table queryTable, Table aggregated) { } private void checkTableComboPercentiles(Table queryTable, Table aggregated) { - final double[] dValues = - (double[]) queryTable.where("!Double.isNaN(doubleCol) && !isNull(doubleCol)") + final double[] dValues = (double[]) queryTable.where("!Double.isNaN(doubleCol) && !isNull(doubleCol)") .getColumn("doubleCol").getDirect(); Arrays.sort(dValues); final double dValue75 = dValues[(dValues.length * 75) / 100]; @@ -2754,12 +2586,11 @@ private void checkTableComboPercentiles(Table queryTable, Table aggregated) { final double dValue999 = dValues[(dValues.length * 999) / 1000]; final double dtValue999 = aggregated.getColumn("DP999").getDouble(0); final double derror999 = Math.abs((dValue999 - dtValue999) / dValue999); - System.out - .println("Double 99.9: " + dValue999 + ", " + dtValue999 + ", Error: " + derror999); + System.out.println("Double 99.9: " + dValue999 + ", " + dtValue999 + ", Error: " + derror999); TestCase.assertTrue(derror999 < 0.005); // if we are within 1/2% we'll pass it - final float[] fValues = (float[]) queryTable - .where("!Float.isNaN(floatCol) && !isNull(floatCol)").getColumn("floatCol").getDirect(); + final float[] fValues = (float[]) queryTable.where("!Float.isNaN(floatCol) && !isNull(floatCol)") + .getColumn("floatCol").getDirect(); Arrays.sort(fValues); final float fValue75 = fValues[(fValues.length * 75) / 100]; final double ftValue75 = aggregated.getColumn("FP75").getDouble(0); @@ -2780,15 +2611,15 @@ public void testTDigestIncremental() { final Random random = new Random(0); final ColumnInfo[] columnInfos; final QueryTable queryTable = getTable(size, random, - columnInfos = initColumnInfos(new String[] {"Sym", "doubleCol", "longCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new DoubleGenerator(-10000, 10000, 0.05, 0.05), - new LongGenerator(0, 1_000_000_000L))); + columnInfos = initColumnInfos(new String[] {"Sym", "doubleCol", "longCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new DoubleGenerator(-10000, 10000, 0.05, 0.05), + new LongGenerator(0, 1_000_000_000L))); final ApproximatePercentile.PercentileDefinition definition = - new ApproximatePercentile.PercentileDefinition("doubleCol").add(0.75, "DP75") - .add(0.95, "DP95").add(0.99, "DP99").add(0.999, "DP999").nextColumn("longCol") - .add(0.75, "LP75").add(0.95, "LP95").add(0.99, "FP99").add(0.999, "LP999"); + new ApproximatePercentile.PercentileDefinition("doubleCol").add(0.75, "DP75").add(0.95, "DP95") + .add(0.99, "DP99").add(0.999, "DP999").nextColumn("longCol").add(0.75, "LP75").add(0.95, "LP95") + .add(0.99, "FP99").add(0.999, "LP999"); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { @@ -2805,8 +2636,8 @@ void checkDifferences(String msg, Table recomputed) { TestCase.assertEquals(rc.getIndex(), i(0)); TestCase.assertEquals(ov.getIndex(), i(0)); - for (final Map.Entry columnSourceEntry : rc - .getColumnSourceMap().entrySet()) { + for (final Map.Entry columnSourceEntry : rc.getColumnSourceMap() + .entrySet()) { final String name = columnSourceEntry.getKey(); final ColumnSource rcs = columnSourceEntry.getValue(); final ColumnSource ocs = ov.getColumnSource(name); @@ -2814,12 +2645,11 @@ void checkDifferences(String msg, Table recomputed) { final double recomputedPercentile = rcs.getDouble(0); final double originalPercentile = ocs.getDouble(0); - final double error = Math.abs( - (recomputedPercentile - originalPercentile) / recomputedPercentile); + final double error = + Math.abs((recomputedPercentile - originalPercentile) / recomputedPercentile); if (error > .01) { throw new ComparisonFailure("Bad percentile for " + name, - Double.toString(recomputedPercentile), - Double.toString(originalPercentile)); + Double.toString(recomputedPercentile), Double.toString(originalPercentile)); } } } @@ -2827,8 +2657,7 @@ void checkDifferences(String msg, Table recomputed) { new EvalNugget.Sorted(new String[] {"Sym"}) { @Override protected Table e() { - return ApproximatePercentile.approximatePercentiles(queryTable, definition, - "Sym"); + return ApproximatePercentile.approximatePercentiles(queryTable, definition, "Sym"); } @Override @@ -2844,8 +2673,8 @@ void checkDifferences(String msg, Table recomputed) { TestCase.assertEquals(rc.getIndex(), i(0, 1, 2, 3)); TestCase.assertEquals(ov.getIndex(), i(0, 1, 2, 3)); - for (final Map.Entry columnSourceEntry : rc - .getColumnSourceMap().entrySet()) { + for (final Map.Entry columnSourceEntry : rc.getColumnSourceMap() + .entrySet()) { final String name = columnSourceEntry.getKey(); final ColumnSource rcs = columnSourceEntry.getValue(); final ColumnSource ocs = ov.getColumnSource(name); @@ -2859,14 +2688,12 @@ void checkDifferences(String msg, Table recomputed) { final double recomputedPercentile = rcs.getDouble(ii); final double originalPercentile = ocs.getDouble(ii); - final double error = - Math.abs((recomputedPercentile - originalPercentile) - / recomputedPercentile); + final double error = Math + .abs((recomputedPercentile - originalPercentile) / recomputedPercentile); if (error > .025) { - throw new ComparisonFailure( - "Bad percentile for " + name + ", error=" + error, - Double.toString(recomputedPercentile), - Double.toString(originalPercentile)); + throw new ComparisonFailure("Bad percentile for " + name + ", error=" + error, + Double.toString(recomputedPercentile), + Double.toString(originalPercentile)); } } } @@ -2881,8 +2708,7 @@ void checkDifferences(String msg, Table recomputed) { System.out.println("Step = " + step); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - final Index added = - Index.FACTORY.getIndexByRange(size * (fstep + 1), size * (fstep + 2) - 1); + final Index added = Index.FACTORY.getIndexByRange(size * (fstep + 1), size * (fstep + 2) - 1); queryTable.getIndex().insert(added); // Modifies and Adds in post-shift keyspace. @@ -2905,18 +2731,18 @@ public void testMedianTypes() { QueryScope.addParam("booleans", booleans); final Table table = emptyTable(10) - .update("Timestamp='2020-03-14T00:00:00 NY' + DBTimeUtils.MINUTE * i", - "MyString=Integer.toString(i)", - "MyInt=i", - "MyLong=ii", - "MyFloat=i * 1.0f", - "MyDouble=i * 1.0", - "MyBoolean = booleans[i % booleans.length]", - "MyChar = (char)('a' + i)", - "MyShort=(short)(10 + i)", - "MyByte=(byte)(20 + i)", - "MyBigDecimal=java.math.BigDecimal.TEN.add(java.math.BigDecimal.valueOf(i))", - "MyBigInteger=java.math.BigInteger.ZERO.add(java.math.BigInteger.valueOf(i))"); + .update("Timestamp='2020-03-14T00:00:00 NY' + DBTimeUtils.MINUTE * i", + "MyString=Integer.toString(i)", + "MyInt=i", + "MyLong=ii", + "MyFloat=i * 1.0f", + "MyDouble=i * 1.0", + "MyBoolean = booleans[i % booleans.length]", + "MyChar = (char)('a' + i)", + "MyShort=(short)(10 + i)", + "MyByte=(byte)(20 + i)", + "MyBigDecimal=java.math.BigDecimal.TEN.add(java.math.BigDecimal.valueOf(i))", + "MyBigInteger=java.math.BigInteger.ZERO.add(java.math.BigInteger.valueOf(i))"); TableTools.showWithIndex(table.getMeta()); TableTools.showWithIndex(table); @@ -2930,9 +2756,9 @@ public void testMedianTypes() { final Map expectedResults = new HashMap<>(); expectedResults.put("Timestamp", - new Object[] {DBTimeUtils.convertDateTime("2020-03-14T00:01:00 NY"), - DBTimeUtils.convertDateTime("2020-03-14T00:05:00 NY"), - DBTimeUtils.convertDateTime("2020-03-14T00:08:00 NY")}); + new Object[] {DBTimeUtils.convertDateTime("2020-03-14T00:01:00 NY"), + DBTimeUtils.convertDateTime("2020-03-14T00:05:00 NY"), + DBTimeUtils.convertDateTime("2020-03-14T00:08:00 NY")}); expectedResults.put("MyString", new Object[] {"1", "5", "8"}); expectedResults.put("MyInt", new Object[] {1, 4.5, 8}); expectedResults.put("MyLong", new Object[] {1L, 4.5, 8L}); @@ -2943,9 +2769,9 @@ public void testMedianTypes() { expectedResults.put("MyShort", new Object[] {(short) 11, (short) 15, (short) 18}); expectedResults.put("MyByte", new Object[] {(byte) 21, (byte) 25, (byte) 28}); expectedResults.put("MyBigDecimal", - new Object[] {BigDecimal.valueOf(11), BigDecimal.valueOf(15), BigDecimal.valueOf(18)}); + new Object[] {BigDecimal.valueOf(11), BigDecimal.valueOf(15), BigDecimal.valueOf(18)}); expectedResults.put("MyBigInteger", - new Object[] {BigInteger.valueOf(1), BigInteger.valueOf(5), BigInteger.valueOf(8)}); + new Object[] {BigInteger.valueOf(1), BigInteger.valueOf(5), BigInteger.valueOf(8)}); for (final Map.Entry check : expectedResults.entrySet()) { final String key = check.getKey(); @@ -2960,10 +2786,8 @@ public void testMedianTypes() { QueryScope.addParam("booleans", null); - final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(5, 10); - final Table updated = - table.update("KeyCol=`KeyCol`").where(incrementalReleaseFilter).select(); + final IncrementalReleaseFilter incrementalReleaseFilter = new IncrementalReleaseFilter(5, 10); + final Table updated = table.update("KeyCol=`KeyCol`").where(incrementalReleaseFilter).select(); final Table refreshing = updated.medianBy(); final Table refreshingKeys = updated.medianBy("KeyCol"); @@ -2995,8 +2819,7 @@ public void testCountBy() { newTable().countBy("i"); TestCase.fail("should throw an exception"); } catch (RuntimeException e) { - TestCase.assertEquals("Invalid column name \"i\": \"i\" is a reserved keyword", - e.getMessage()); + TestCase.assertEquals("Invalid column name \"i\": \"i\" is a reserved keyword", e.getMessage()); } Table table = newTable(); @@ -3015,9 +2838,9 @@ public void testCountBy() { table = newTable(c("x", 1, 2, 3)); TestCase.assertEquals(3, table.countBy("count", "x").size()); TestCase.assertEquals(Arrays.asList(1, 2, 3), - Arrays.asList(table.countBy("count", "x").getColumn("x").get(0, 3))); + Arrays.asList(table.countBy("count", "x").getColumn("x").get(0, 3))); TestCase.assertEquals(Arrays.asList(1L, 1L, 1L), - Arrays.asList(table.countBy("count", "x").getColumn("count").get(0, 3))); + Arrays.asList(table.countBy("count", "x").getColumn("count").get(0, 3))); TestCase.assertEquals(2, table.countBy("count", "x").getColumns().length); try { show(table.countBy("count", "x")); @@ -3032,21 +2855,21 @@ public void testCountBy() { } TestCase.assertEquals(3, table.countBy("count", "x", "y").size()); TestCase.assertEquals(Arrays.asList(1, 2, 3), - Arrays.asList(table.countBy("count", "x", "y").getColumn("x").get(0, 3))); + Arrays.asList(table.countBy("count", "x", "y").getColumn("x").get(0, 3))); TestCase.assertEquals(Arrays.asList(1, 2, 3), - Arrays.asList(table.countBy("count", "x", "y").getColumn("y").get(0, 3))); + Arrays.asList(table.countBy("count", "x", "y").getColumn("y").get(0, 3))); TestCase.assertEquals(Arrays.asList(1L, 3L, 2L), - Arrays.asList(table.countBy("count", "x", "y").getColumn("count").get(0, 3))); + Arrays.asList(table.countBy("count", "x", "y").getColumn("count").get(0, 3))); TestCase.assertEquals(3, table.countBy("count", "x", "y").getColumns().length); table = newTable(c("x", 1, 2, 3), c("y", 1, 2, 3)); TestCase.assertEquals(3, table.countBy("count", "x", "y").size()); TestCase.assertEquals(Arrays.asList(1, 2, 3), - Arrays.asList(table.countBy("count", "x", "y").getColumn("x").get(0, 3))); + Arrays.asList(table.countBy("count", "x", "y").getColumn("x").get(0, 3))); TestCase.assertEquals(Arrays.asList(1, 2, 3), - Arrays.asList(table.countBy("count", "x", "y").getColumn("y").get(0, 3))); + Arrays.asList(table.countBy("count", "x", "y").getColumn("y").get(0, 3))); TestCase.assertEquals(Arrays.asList(1L, 1L, 1L), - Arrays.asList(table.countBy("count", "x", "y").getColumn("count").get(0, 3))); + Arrays.asList(table.countBy("count", "x", "y").getColumn("count").get(0, 3))); TestCase.assertEquals(3, table.countBy("count", "x", "y").getColumns().length); try { show(table.countBy("count", "x", "y")); @@ -3069,8 +2892,7 @@ public void testSelectDistinct() { TestCase.assertEquals(3, result.size()); TestCase.assertEquals(3, result.getColumn("x").size()); TestCase.assertEquals(1, result.getColumns().length); - TestCase.assertEquals(Arrays.asList(1, 2, 3), - Arrays.asList(result.getColumn("x").get(0, 3))); + TestCase.assertEquals(Arrays.asList(1, 2, 3), Arrays.asList(result.getColumn("x").get(0, 3))); table = newTable(c("x", 1, 2, 2, 2, 3, 3), c("y", 1, 2, 2, 3, 3, 3)); System.out.println("Table:"); @@ -3079,15 +2901,13 @@ public void testSelectDistinct() { TestCase.assertEquals(3, result.size()); TestCase.assertEquals(3, result.getColumn("x").size()); TestCase.assertEquals(1, result.getColumns().length); - TestCase.assertEquals(Arrays.asList(1, 2, 3), - Arrays.asList(result.getColumn("x").get(0, 3))); + TestCase.assertEquals(Arrays.asList(1, 2, 3), Arrays.asList(result.getColumn("x").get(0, 3))); result = table.selectDistinct("y"); TestCase.assertEquals(3, result.size()); TestCase.assertEquals(3, result.getColumn("y").size()); TestCase.assertEquals(1, result.getColumns().length); - TestCase.assertEquals(Arrays.asList(1, 2, 3), - Arrays.asList(result.getColumn("y").get(0, 3))); + TestCase.assertEquals(Arrays.asList(1, 2, 3), Arrays.asList(result.getColumn("y").get(0, 3))); result = table.selectDistinct("x", "y"); show(result); @@ -3095,10 +2915,8 @@ public void testSelectDistinct() { TestCase.assertEquals(4, result.getColumn("x").size()); TestCase.assertEquals(4, result.getColumn("y").size()); TestCase.assertEquals(2, result.getColumns().length); - TestCase.assertEquals(Arrays.asList(1, 2, 2, 3), - Arrays.asList(result.getColumn("x").get(0, 4))); - TestCase.assertEquals(Arrays.asList(1, 2, 3, 3), - Arrays.asList(result.getColumn("y").get(0, 4))); + TestCase.assertEquals(Arrays.asList(1, 2, 2, 3), Arrays.asList(result.getColumn("x").get(0, 4))); + TestCase.assertEquals(Arrays.asList(1, 2, 3, 3), Arrays.asList(result.getColumn("y").get(0, 4))); } private class SelectDistinctEvalNugget implements EvalNuggetInterface { @@ -3112,17 +2930,15 @@ private class SelectDistinctEvalNugget implements EvalNuggetInterface { this.columns = columns; this.originalValue = e(); - ((QueryTable) originalValue) - .listenForUpdates(new InstrumentedShiftAwareListener("Failure Listener") { - @Override - public void onUpdate(final Update update) {} + ((QueryTable) originalValue).listenForUpdates(new InstrumentedShiftAwareListener("Failure Listener") { + @Override + public void onUpdate(final Update update) {} - @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { - exception = originalException; - } - }); + @Override + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { + exception = originalException; + } + }); } public Table e() { @@ -3133,15 +2949,12 @@ public void validate(final String msg) { Assert.assertNull(exception); // verify that if we recalculate from scratch the answer is the same final Table check1 = e(); - final String diff1 = - diff(originalValue, check1, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); + final String diff1 = diff(originalValue, check1, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); Assert.assertEquals(msg, "", diff1); // we can also check the table's validity against a countBy - final Table check2 = - sourceTable.countBy("__TEMP__", columns).dropColumns("__TEMP__").sort(columns); - final String diff2 = - diff(originalValue, check2, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); + final Table check2 = sourceTable.countBy("__TEMP__", columns).dropColumns("__TEMP__").sort(columns); + final String diff2 = diff(originalValue, check2, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); Assert.assertEquals(msg, "", diff2); } @@ -3161,8 +2974,7 @@ public void testSelectDistinctIncremental() { final int size = 20; final ColumnInfo[] columnInfo; - final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"C1", "C2"}, + final QueryTable table = getTable(size, random, columnInfo = initColumnInfos(new String[] {"C1", "C2"}, new SetGenerator<>("a", "b", "c", "d"), new SetGenerator<>(10, 20, 30))); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { @@ -3211,8 +3023,7 @@ public void testSelectDistinctUpdates() { TestCase.assertEquals(4, result.size()); TestCase.assertEquals(0, listener.getCount()); - // now let's remove one of our rows, but not the last one with a given value, also expecting - // no changes + // now let's remove one of our rows, but not the last one with a given value, also expecting no changes System.out.println("Removing original 1."); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { listener.reset(); @@ -3273,8 +3084,7 @@ public void testSelectDistinctUpdates() { TestCase.assertEquals(0, listener.getCount()); - // now modify it so that we generate a new key, but don't change the existing key's - // existence + // now modify it so that we generate a new key, but don't change the existing key's existence // and modify something, but keep the key the same System.out.println("Adding a 5, but not deleting what was at index."); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -3313,11 +3123,11 @@ public void testIds5942() { QueryScope.addParam("ids5942_scale", 1000); final Table randomValues = emptyTable(100) - .update("MyInt=(i%12==0 ? null : (int)(ids5942_scale*(Math.random()*2-1)))", - "MyBoolean=i%3==0 ? null : (i % 3 == 1)", - "MyDateTime=new DBDateTime(DBTimeUtils.convertDateTime(\"2020-01-28T00:00:00 NY\").getNanos() + 1000000000L * i)", - "MyBigDecimal=(i%21==0 ? null : new java.math.BigDecimal(ids5942_scale*(Math.random()*2-1)))", - "MyBigInteger=(i%22==0 ? null : new java.math.BigInteger(Integer.toString((int)(ids5942_scale*(Math.random()*2-1)))))"); + .update("MyInt=(i%12==0 ? null : (int)(ids5942_scale*(Math.random()*2-1)))", + "MyBoolean=i%3==0 ? null : (i % 3 == 1)", + "MyDateTime=new DBDateTime(DBTimeUtils.convertDateTime(\"2020-01-28T00:00:00 NY\").getNanos() + 1000000000L * i)", + "MyBigDecimal=(i%21==0 ? null : new java.math.BigDecimal(ids5942_scale*(Math.random()*2-1)))", + "MyBigInteger=(i%22==0 ? null : new java.math.BigInteger(Integer.toString((int)(ids5942_scale*(Math.random()*2-1)))))"); final Table result = randomValues.medianBy("MyInt"); @@ -3331,9 +3141,9 @@ public void testIds5944() { QueryScope.addParam("ids5944_scale", 1000); final Table randomValues = emptyTable(100) - .update("MyInt=(i%12==0 ? null : (int)(ids5944_scale*(Math.random()*2-1)))", - "MyBigDecimal=(i%21==0 ? null : new java.math.BigDecimal(ids5944_scale*(Math.random()*2-1)))", - "MyBigInteger=(i%22==0 ? null : new java.math.BigInteger(Integer.toString((int)(ids5944_scale*(Math.random()*2-1)))))"); + .update("MyInt=(i%12==0 ? null : (int)(ids5944_scale*(Math.random()*2-1)))", + "MyBigDecimal=(i%21==0 ? null : new java.math.BigDecimal(ids5944_scale*(Math.random()*2-1)))", + "MyBigInteger=(i%22==0 ? null : new java.math.BigInteger(Integer.toString((int)(ids5944_scale*(Math.random()*2-1)))))"); final Table result = randomValues.headBy(10, "MyInt"); @@ -3349,12 +3159,12 @@ public void testLastByNoKeyShift() { final Table last = reversedFlat.lastBy(); final InstrumentedShiftAwareListenerAdapter adapter = - new InstrumentedShiftAwareListenerAdapter((DynamicTable) reversedFlat, false) { - @Override - public void onUpdate(Update upstream) { - System.out.println(upstream); - } - }; + new InstrumentedShiftAwareListenerAdapter((DynamicTable) reversedFlat, false) { + @Override + public void onUpdate(Update upstream) { + System.out.println(upstream); + } + }; ((DynamicTable) reversedFlat).listenForUpdates(adapter); assertTableEquals(newTable(col("Sentinel", 0)), last); @@ -3375,8 +3185,8 @@ public void onUpdate(Update upstream) { @Test public void testFirstByShift() { - final QueryTable table = TstUtils.testRefreshingTable(i(1, 2, 4097), - intCol("Sentinel", 1, 2, 4097), col("Bucket", "A", "B", "A")); + final QueryTable table = TstUtils.testRefreshingTable(i(1, 2, 4097), intCol("Sentinel", 1, 2, 4097), + col("Bucket", "A", "B", "A")); final Table firstResult = table.firstBy("Bucket"); final Table lastResult = table.lastBy("Bucket"); @@ -3467,8 +3277,7 @@ public void testFirstLastByAttributes() { final Random random = new Random(0); final int size = 100; - final QueryTable table = getTable(size, random, - initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + final QueryTable table = getTable(size, random, initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), new IntGenerator(0, 100), new DoubleGenerator(0, 100))); @@ -3482,9 +3291,10 @@ public void testFirstLastByAttributes() { Table result = table.lastBy("Sym"); if (SystemicObjectTracker.isSystemicObjectMarkingEnabled()) { TestCase.assertEquals(2, result.getAttributes().size()); - TestCase.assertEquals(new LinkedHashSet<>( - Arrays.asList(Table.SYSTEMIC_TABLE_ATTRIBUTE, Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)), - result.getAttributes().keySet()); + TestCase.assertEquals( + new LinkedHashSet<>( + Arrays.asList(Table.SYSTEMIC_TABLE_ATTRIBUTE, Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)), + result.getAttributes().keySet()); } else { TestCase.assertEquals(1, result.getAttributes().size()); } @@ -3493,9 +3303,10 @@ public void testFirstLastByAttributes() { result = table.firstBy("Sym"); if (SystemicObjectTracker.isSystemicObjectMarkingEnabled()) { TestCase.assertEquals(2, result.getAttributes().size()); - TestCase.assertEquals(new LinkedHashSet<>( - Arrays.asList(Table.SYSTEMIC_TABLE_ATTRIBUTE, Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)), - result.getAttributes().keySet()); + TestCase.assertEquals( + new LinkedHashSet<>( + Arrays.asList(Table.SYSTEMIC_TABLE_ATTRIBUTE, Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)), + result.getAttributes().keySet()); } else { TestCase.assertEquals(1, result.getAttributes().size()); } @@ -3505,7 +3316,7 @@ public void testFirstLastByAttributes() { @Test public void testIds6220() { final QueryTable table = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 2), - cG("Key", "a", "b", "c"), c("I", 2, 4, 6)); + cG("Key", "a", "b", "c"), c("I", 2, 4, 6)); final IncrementalReleaseFilter filter = new IncrementalReleaseFilter(0, 10); final Table byTable = table.where(filter).by("Key"); TableTools.showWithIndex(byTable); @@ -3525,9 +3336,9 @@ public void testIds6203() { for (int ii = 0; ii < sentinels.length; ++ii) { sentinels[ii] = ii; } - final QueryTable table = TstUtils.testRefreshingTable( - Index.FACTORY.getIndexByRange(100, 100 + keyValues.length - 1), - stringCol("Key", keyValues), intCol("IntCol", sentinels)); + final QueryTable table = + TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(100, 100 + keyValues.length - 1), + stringCol("Key", keyValues), intCol("IntCol", sentinels)); final Table flat = table.flatten(); final TableMap map = flat.byExternal("Key"); @@ -3538,8 +3349,7 @@ public void testIds6203() { ((DynamicTable) table).listenForUpdates(printListener); final FuzzerPrintListener flatPrintListener = new FuzzerPrintListener("flat", flat, 0); ((DynamicTable) flat).listenForUpdates(flatPrintListener); - final FuzzerPrintListener subPrintListener = - new FuzzerPrintListener("subTable", subTable, 0); + final FuzzerPrintListener subPrintListener = new FuzzerPrintListener("subTable", subTable, 0); ((DynamicTable) subTable).listenForUpdates(subPrintListener); final int newSize = 5; @@ -3585,8 +3395,7 @@ public void testIds6203() { // polarity reversal LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index additions = Index.FACTORY.getIndexByRange(newSize * 2, newSize * 3 - 1); - final Index removals = - Index.FACTORY.getIndexByRange(6000 + newSize, 6000 + newSize * 3); + final Index removals = Index.FACTORY.getIndexByRange(6000 + newSize, 6000 + newSize * 3); TstUtils.addToTable(table, additions, col("Key", keys2), intCol("IntCol", sentinel2)); TstUtils.removeRows(table, removals); table.notifyListeners(additions, removals, i()); @@ -3611,8 +3420,7 @@ public void testIds6203() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index additions1 = Index.FACTORY.getIndexByRange(newSize * 3, newSize * 4 - 1); final Index additions2 = Index.FACTORY.getIndexByRange(7000, 7000 + newSize - 1); - final Index removals = - Index.FACTORY.getIndexByRange(6000 + newSize * 4, 6000 + newSize * 5 - 1); + final Index removals = Index.FACTORY.getIndexByRange(6000 + newSize * 4, 6000 + newSize * 5 - 1); TstUtils.addToTable(table, additions1, col("Key", keys2), intCol("IntCol", sentinel2)); TstUtils.addToTable(table, additions2, col("Key", keys2), intCol("IntCol", sentinel2)); TstUtils.removeRows(table, removals); @@ -3631,8 +3439,7 @@ public void testIds6203() { final Index removals2 = Index.FACTORY.getIndexByRange(7000, 7000 + newSize - 1); final Index allRemovals = removals1.union(removals2); - final Index additions = - Index.FACTORY.getIndexByRange(6000 + newSize * 4, 6000 + newSize * 5 - 1); + final Index additions = Index.FACTORY.getIndexByRange(6000 + newSize * 4, 6000 + newSize * 5 - 1); TstUtils.addToTable(table, additions, col("Key", keys2), intCol("IntCol", sentinel2)); TstUtils.removeRows(table, allRemovals); table.notifyListeners(additions, allRemovals, i()); @@ -3644,19 +3451,18 @@ public void testIds6203() { @Test public void testIds6321() { final QueryTable source = - TstUtils.testRefreshingTable(i(9, 10), col("Key", "A", "A"), intCol("Sentinel", 9, 10)); + TstUtils.testRefreshingTable(i(9, 10), col("Key", "A", "A"), intCol("Sentinel", 9, 10)); final FuzzerPrintListener soucePrinter = new FuzzerPrintListener("source", source); source.listenForUpdates(soucePrinter); final QueryTable exposedLastBy = ChunkedOperatorAggregationHelper.aggregation( - new FirstOrLastByAggregationFactory(false, "ExposedRedirectionIndex"), source, - SelectColumnFactory.getExpressions("Key")); + new FirstOrLastByAggregationFactory(false, "ExposedRedirectionIndex"), source, + SelectColumnFactory.getExpressions("Key")); final TableUpdateValidator validator = TableUpdateValidator.make(exposedLastBy); final QueryTable validatorResult = validator.getResultTable(); final FailureListener validatorListener = new FailureListener(); validatorResult.listenForUpdates(validatorListener); - final FuzzerPrintListener printListener = - new FuzzerPrintListener("exposedLastBy", exposedLastBy); + final FuzzerPrintListener printListener = new FuzzerPrintListener("exposedLastBy", exposedLastBy); exposedLastBy.listenForUpdates(printListener); System.out.println("Starting:"); @@ -3693,10 +3499,9 @@ public void testIds6321() { @Test public void testIds6332() { final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(10), - col("Value", BigInteger.valueOf(0), new BigInteger("100"), BigInteger.valueOf(100), - new BigInteger("100"), new BigInteger("100"), new BigInteger("100"), - new BigInteger("100"), new BigInteger("100"), new BigInteger("100"), - BigInteger.valueOf(200))); + col("Value", BigInteger.valueOf(0), new BigInteger("100"), BigInteger.valueOf(100), + new BigInteger("100"), new BigInteger("100"), new BigInteger("100"), new BigInteger("100"), + new BigInteger("100"), new BigInteger("100"), BigInteger.valueOf(200))); final Table percentile = source.by(new PercentileByStateFactoryImpl(0.25)); TableTools.show(percentile); TestCase.assertEquals(BigInteger.valueOf(100), percentile.getColumn("Value").get(0)); @@ -3714,12 +3519,10 @@ public void testIds6332() { @Test public void testIds6593() { final Table[][] resultSets = new Table[2][]; - final boolean substitutionWasEnabled = - ChunkedOperatorAggregationHelper.KEY_ONLY_SUBSTITUTION_ENABLED; + final boolean substitutionWasEnabled = ChunkedOperatorAggregationHelper.KEY_ONLY_SUBSTITUTION_ENABLED; try { for (final boolean substituteForThisIteration : new boolean[] {false, true}) { - ChunkedOperatorAggregationHelper.KEY_ONLY_SUBSTITUTION_ENABLED = - substituteForThisIteration; + ChunkedOperatorAggregationHelper.KEY_ONLY_SUBSTITUTION_ENABLED = substituteForThisIteration; final DynamicTable source = (DynamicTable) emptyTable(100).updateView("A=i%10"); source.getIndex().removeRange(50, 100); source.setRefreshing(true); @@ -3735,8 +3538,8 @@ public void testIds6593() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { source.getIndex().insertRange(50, 100); - source.notifyListeners(new ShiftAwareListener.Update(ir(50, 100), i(), i(), - IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); + source.notifyListeners(new ShiftAwareListener.Update(ir(50, 100), i(), i(), IndexShiftData.EMPTY, + ModifiedColumnSet.EMPTY)); }); } } finally { diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableAggregationTestFormulaStaticMethods.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableAggregationTestFormulaStaticMethods.java index c09caa91f1c..70c70784aea 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableAggregationTestFormulaStaticMethods.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableAggregationTestFormulaStaticMethods.java @@ -385,9 +385,9 @@ public static BigDecimal varBigInt(DbArray values) { return null; } final BigDecimal countMinus1 = BigDecimal.valueOf(count - 1); - return new BigDecimal(sum2).subtract( - new BigDecimal(sum.pow(2)).divide(BigDecimal.valueOf(count), BigDecimal.ROUND_HALF_UP)) - .divide(countMinus1, BigDecimal.ROUND_HALF_UP); + return new BigDecimal(sum2) + .subtract(new BigDecimal(sum.pow(2)).divide(BigDecimal.valueOf(count), BigDecimal.ROUND_HALF_UP)) + .divide(countMinus1, BigDecimal.ROUND_HALF_UP); } public static BigDecimal varBigDec(DbArray values) { @@ -408,8 +408,8 @@ public static BigDecimal varBigDec(DbArray values) { return null; } final BigDecimal countMinus1 = BigDecimal.valueOf(count - 1); - return sum2.subtract(sum.pow(2).divide(BigDecimal.valueOf(count), BigDecimal.ROUND_HALF_UP)) - .divide(countMinus1, BigDecimal.ROUND_HALF_UP); + return sum2.subtract(sum.pow(2).divide(BigDecimal.valueOf(count), BigDecimal.ROUND_HALF_UP)).divide(countMinus1, + BigDecimal.ROUND_HALF_UP); } public static char minChar(DbCharArray values) { @@ -496,8 +496,8 @@ public static double minDouble(DbDoubleArray values) { int count = 0; for (int ii = 0; ii < values.size(); ++ii) { final double v = values.get(ii); - if (v != QueryConstants.NULL_DOUBLE) { // TODO: the existing aggregator doesn't handle - // this && !Double.isNaN(v)) { + if (v != QueryConstants.NULL_DOUBLE) { // TODO: the existing aggregator doesn't handle this && + // !Double.isNaN(v)) { if (count++ == 0) { min = v; } else if (DhDoubleComparisons.lt(v, min)) { @@ -516,8 +516,8 @@ public static double maxDouble(DbDoubleArray values) { int count = 0; for (int ii = 0; ii < values.size(); ++ii) { final double v = values.get(ii); - if (v != QueryConstants.NULL_DOUBLE) { // TODO: the existing aggregator doesn't handle - // this && !Double.isNaN(v)) { + if (v != QueryConstants.NULL_DOUBLE) { // TODO: the existing aggregator doesn't handle this && + // !Double.isNaN(v)) { if (count++ == 0) { min = v; } else if (DhDoubleComparisons.gt(v, min)) { @@ -531,33 +531,24 @@ public static double maxDouble(DbDoubleArray values) { static String sumFunction(String col) { switch (col) { case "charCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumChar"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumChar"; case "boolCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumBool"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumBool"; case "byteCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumByte"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumByte"; case "shortCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumShort"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumShort"; case "intCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumInt"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumInt"; case "bigI": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumBigInt"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumBigInt"; case "bigD": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumBigDec"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumBigDec"; case "doubleCol": case "doubleNanCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumDouble"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumDouble"; case "floatCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumFloat"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumFloat"; default: return "(long)sum"; } @@ -566,15 +557,13 @@ static String sumFunction(String col) { static String minFunction(String col) { switch (col) { case "charCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".minChar(" + col + ")"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".minChar(" + col + ")"; case "doubleNanCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".minDouble(" + col + ")"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".minDouble(" + col + + ")"; case "Sym": - return "(String)" - + QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".minObj(" + col + ")"; + return "(String)" + QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".minObj(" + + col + ")"; default: return GroovyStaticImports.class.getCanonicalName() + ".min(" + col + ")"; } @@ -583,15 +572,13 @@ static String minFunction(String col) { static String maxFunction(String col) { switch (col) { case "charCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".maxChar(" + col + ")"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".maxChar(" + col + ")"; case "doubleNanCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".maxDouble(" + col + ")"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".maxDouble(" + col + + ")"; case "Sym": - return "(String)" - + QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".maxObj(" + col + ")"; + return "(String)" + QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".maxObj(" + + col + ")"; default: return GroovyStaticImports.class.getCanonicalName() + ".max(" + col + ")"; } @@ -600,14 +587,13 @@ static String maxFunction(String col) { static String varFunction(String col) { switch (col) { case "charCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".varChar(" + col + ")"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".varChar(" + col + ")"; case "bigI": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".varBigInt(" + col + ")"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".varBigInt(" + col + + ")"; case "bigD": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".varBigDec(" + col + ")"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".varBigDec(" + col + + ")"; default: return "var(" + col + ")"; } @@ -626,41 +612,31 @@ static String stdFunction(String col) { static String countFunction(String col) { switch (col) { case "charCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".countChar"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".countChar"; case "byteCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".countByte"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".countByte"; case "shortCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".countShort"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".countShort"; case "intCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".countInt"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".countInt"; case "bigI": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".countObject"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".countObject"; case "bigD": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".countObject"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".countObject"; case "doubleCol": case "doubleNanCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".countDouble"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".countDouble"; case "floatCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".countFloat"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".countFloat"; case "longCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".countLong"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".countLong"; default: throw new IllegalArgumentException(); } } static String absSumFunction(String col, String expr) { - final String className = - QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName(); + final String className = QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName(); switch (col) { case "charCol": return className + ".sumChar(" + expr + ")"; @@ -678,11 +654,11 @@ static String absSumFunction(String col, String expr) { return className + ".absSumBigDec(" + expr + ")"; case "doubleCol": case "doubleNanCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumDouble(" + className + ".abs(" + expr + "))"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumDouble(" + + className + ".abs(" + expr + "))"; case "floatCol": - return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() - + ".sumFloat(" + className + ".abs(" + expr + "))"; + return QueryTableAggregationTestFormulaStaticMethods.class.getCanonicalName() + ".sumFloat(" + className + + ".abs(" + expr + "))"; default: return "(long)sum(" + className + ".abs(" + expr + "))"; } diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableAjTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableAjTest.java index bf6ab7721af..7c2fc5754d9 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableAjTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableAjTest.java @@ -63,12 +63,12 @@ public void tearDown() throws Exception { @Test public void testAjConflict() { final Table left = TableTools.newTable( - c("Bucket", "A", "B", "A", "C", "D", "A"), - longCol("LeftStamp", 1L, 10L, 50L, 3L, 4L, 60L)); + c("Bucket", "A", "B", "A", "C", "D", "A"), + longCol("LeftStamp", 1L, 10L, 50L, 3L, 4L, 60L)); final Table right = TableTools.newTable( - c("Bucket", "A", "B", "A", "B", "A", "D", "E"), - longCol("RightStamp", 1L, 5L, 10L, 25L, 50L, 5L, 3L), - intCol("Sentinel", 1, 2, 3, 4, 5, 6, 7)); + c("Bucket", "A", "B", "A", "B", "A", "D", "E"), + longCol("RightStamp", 1L, 5L, 10L, 25L, 50L, 5L, 3L), + intCol("Sentinel", 1, 2, 3, 4, 5, 6, 7)); try { left.aj(right, "LeftStamp=RightStamp"); @@ -81,8 +81,8 @@ public void testAjConflict() { @Test public void testAjNull() { final Table left = TableTools.newTable( - c("Bucket", "A", "B", "A", "C", "D", "A"), - longCol("LeftStamp", 1L, 10L, 50L, 3L, 4L, 60L)); + c("Bucket", "A", "B", "A", "C", "D", "A"), + longCol("LeftStamp", 1L, 10L, 50L, 3L, 4L, 60L)); try { left.aj(null, "LeftStamp=RightStamp"); @@ -111,12 +111,12 @@ public void testAjStaticGrouped() { public void testAjStatic(MakeColumn leftMaker, MakeColumn rightMaker) { final Table left = TstUtils.testTable( - leftMaker.make("Bucket", "A", "B", "A", "C", "D", "A"), - longCol("LeftStamp", 1L, 10L, 50L, 3L, 4L, 60L)); + leftMaker.make("Bucket", "A", "B", "A", "C", "D", "A"), + longCol("LeftStamp", 1L, 10L, 50L, 3L, 4L, 60L)); final Table right = TstUtils.testTable( - rightMaker.make("Bucket", "A", "B", "A", "B", "A", "D", "E"), - longCol("RightStamp", 1L, 5L, 10L, 25L, 50L, 5L, 3L), - intCol("Sentinel", 1, 2, 3, 4, 5, 6, 7)); + rightMaker.make("Bucket", "A", "B", "A", "B", "A", "D", "E"), + longCol("RightStamp", 1L, 5L, 10L, 25L, 50L, 5L, 3L), + intCol("Sentinel", 1, 2, 3, 4, 5, 6, 7)); System.out.println("Left"); TableTools.show(left); @@ -127,49 +127,48 @@ public void testAjStatic(MakeColumn leftMaker, MakeColumn rightMaker) { System.out.println("Result"); TableTools.showWithIndex(result); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - result.getDefinition().getColumnNames()); + result.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals(new int[] {1, 2, 5, NULL_INT, NULL_INT, 5}, - intColumn(result, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {1, 2, 5, NULL_INT, NULL_INT, 5}, intColumn(result, "Sentinel")); final Table ltResult = left.aj(right, "Bucket,LeftStampRightStamp", "Sentinel"); System.out.println("Reverse Result GT"); TableTools.showWithIndex(reverseResultGt); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - reverseResultGt.getDefinition().getColumnNames()); + reverseResultGt.getDefinition().getColumnNames()); BaseArrayTestCase.assertEquals(new int[] {3, 4, NULL_INT, NULL_INT, 6, NULL_INT}, - intColumn(reverseResultGt, "Sentinel")); + intColumn(reverseResultGt, "Sentinel")); } @Test public void testAjBoolean() { final Table left = TableTools.newTable( - c("Bucket", "A", "A", "B", "A", "B", "C", "C", "A"), - c("LeftStamp", true, false, true, false, false, true, false, null)); + c("Bucket", "A", "A", "B", "A", "B", "C", "C", "A"), + c("LeftStamp", true, false, true, false, false, true, false, null)); final Table right = TableTools.newTable( - c("Bucket", "A", "A", "A", "B", "C"), - c("RightStamp", null, false, true, true, false), - intCol("Sentinel", 1, 2, 3, 4, 5)); + c("Bucket", "A", "A", "A", "B", "C"), + c("RightStamp", null, false, true, true, false), + intCol("Sentinel", 1, 2, 3, 4, 5)); System.out.println("Left"); TableTools.show(left); @@ -180,39 +179,35 @@ public void testAjBoolean() { System.out.println("Result"); TableTools.showWithIndex(result); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - result.getDefinition().getColumnNames()); + result.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals(new int[] {3, 2, 4, 2, NULL_INT, 5, 5, 1}, - intColumn(result, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {3, 2, 4, 2, NULL_INT, 5, 5, 1}, intColumn(result, "Sentinel")); final Table ltResult = left.aj(right, "Bucket,LeftStampRightStamp", "Sentinel"); System.out.println("Reverse Result GT"); TableTools.showWithIndex(reverseResultGt); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - reverseResultGt.getDefinition().getColumnNames()); + reverseResultGt.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals( - new int[] {NULL_INT, 3, NULL_INT, 3, 4, NULL_INT, NULL_INT, 2}, - intColumn(reverseResultGt, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {NULL_INT, 3, NULL_INT, 3, 4, NULL_INT, NULL_INT, 2}, + intColumn(reverseResultGt, "Sentinel")); } @Test @@ -221,13 +216,13 @@ public void testAjDateTime() { final DBDateTime second = DBTimeUtils.convertDateTime("2019-06-14T19:30:00 NY"); final Table left = TableTools.newTable( - c("Bucket", "A", "A", "B", "A", "B", "C", "C", "A"), - c("LeftStamp", second, first, second, first, first, second, first, null)); + c("Bucket", "A", "A", "B", "A", "B", "C", "C", "A"), + c("LeftStamp", second, first, second, first, first, second, first, null)); final Table right = TableTools.newTable( - c("Bucket", "A", "A", "A", "B", "C"), - c("RightStamp", null, first, second, second, first), - intCol("Sentinel", 1, 2, 3, 4, 5)); + c("Bucket", "A", "A", "A", "B", "C"), + c("RightStamp", null, first, second, second, first), + intCol("Sentinel", 1, 2, 3, 4, 5)); System.out.println("Left"); TableTools.show(left); @@ -238,92 +233,86 @@ public void testAjDateTime() { System.out.println("Result"); TableTools.showWithIndex(result); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - result.getDefinition().getColumnNames()); + result.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals(new int[] {3, 2, 4, 2, NULL_INT, 5, 5, 1}, - intColumn(result, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {3, 2, 4, 2, NULL_INT, 5, 5, 1}, intColumn(result, "Sentinel")); final Table ltResult = left.aj(right, "Bucket,LeftStampRightStamp", "Sentinel"); System.out.println("Reverse Result GT"); TableTools.showWithIndex(reverseResultGt); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - reverseResultGt.getDefinition().getColumnNames()); + reverseResultGt.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals( - new int[] {NULL_INT, 3, NULL_INT, 3, 4, NULL_INT, NULL_INT, 2}, - intColumn(reverseResultGt, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {NULL_INT, 3, NULL_INT, 3, 4, NULL_INT, NULL_INT, 2}, + intColumn(reverseResultGt, "Sentinel")); } @Test public void testAjEmpty() { final Table left = TableTools.newTable( - c("Bucket"), - intCol("LeftStamp")); + c("Bucket"), + intCol("LeftStamp")); final Table right = TableTools.newTable( - c("Bucket", "A", "A", "A", "B", "C"), - intCol("RightStamp", 1, 2, 3, 4, 5), - intCol("Sentinel", 1, 2, 3, 4, 5)); + c("Bucket", "A", "A", "A", "B", "C"), + intCol("RightStamp", 1, 2, 3, 4, 5), + intCol("Sentinel", 1, 2, 3, 4, 5)); final Table result = left.aj(right, "Bucket,LeftStamp=RightStamp", "Sentinel"); System.out.println("Result"); TableTools.showWithIndex(result); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - result.getDefinition().getColumnNames()); + result.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals(CollectionUtil.ZERO_LENGTH_INT_ARRAY, - intColumn(result, "Sentinel")); + BaseArrayTestCase.assertEquals(CollectionUtil.ZERO_LENGTH_INT_ARRAY, intColumn(result, "Sentinel")); } @Test public void testAjMissingState() { final Table left = TableTools.newTable( - c("Bucket", 1, 1, 2), - intCol("LeftStamp", 1, 1, 1)); + c("Bucket", 1, 1, 2), + intCol("LeftStamp", 1, 1, 1)); final Table right = TableTools.newTable( - c("Bucket", 2, 3), - intCol("RightStamp", 1, 1), - intCol("Sentinel", 1, 2)); + c("Bucket", 2, 3), + intCol("RightStamp", 1, 1), + intCol("Sentinel", 1, 2)); final Table result = left.aj(right, "Bucket,LeftStamp=RightStamp", "Sentinel"); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - result.getDefinition().getColumnNames()); + result.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals(new int[] {NULL_INT, NULL_INT, 1}, - intColumn(result, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {NULL_INT, NULL_INT, 1}, intColumn(result, "Sentinel")); final Table left2 = TableTools.newTable( - c("Bucket", 1, 2), - intCol("LeftStamp", 1, 1)); + c("Bucket", 1, 2), + intCol("LeftStamp", 1, 1)); final Table right2 = TableTools.newTable( - c("Bucket", 2, 3, 3), - intCol("RightStamp", 1, 1, 1), - intCol("Sentinel", 1, 2, 3)); + c("Bucket", 2, 3, 3), + intCol("RightStamp", 1, 1, 1), + intCol("Sentinel", 1, 2, 3)); final Table result2 = left2.aj(right2, "Bucket,LeftStamp=RightStamp", "Sentinel"); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - result.getDefinition().getColumnNames()); + result.getDefinition().getColumnNames()); BaseArrayTestCase.assertEquals(new int[] {NULL_INT, 1}, intColumn(result2, "Sentinel")); } @@ -331,13 +320,13 @@ public void testAjMissingState() { @Test public void testAjStrings() { final Table left = TableTools.newTable( - c("Bucket", "A", "A", "B", "A", "B", "C", "C", "A"), - c("LeftStamp", "t", "f", "t", "f", "f", "t", "f", null)); + c("Bucket", "A", "A", "B", "A", "B", "C", "C", "A"), + c("LeftStamp", "t", "f", "t", "f", "f", "t", "f", null)); final Table right = TableTools.newTable( - c("Bucket", "A", "A", "A", "B", "C"), - c("RightStamp", null, "f", "t", "t", "f"), - intCol("Sentinel", 1, 2, 3, 4, 5)); + c("Bucket", "A", "A", "A", "B", "C"), + c("RightStamp", null, "f", "t", "t", "f"), + intCol("Sentinel", 1, 2, 3, 4, 5)); System.out.println("Left"); TableTools.show(left); @@ -348,51 +337,47 @@ public void testAjStrings() { System.out.println("Result"); TableTools.showWithIndex(result); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - result.getDefinition().getColumnNames()); + result.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals(new int[] {3, 2, 4, 2, NULL_INT, 5, 5, 1}, - intColumn(result, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {3, 2, 4, 2, NULL_INT, 5, 5, 1}, intColumn(result, "Sentinel")); final Table ltResult = left.aj(right, "Bucket,LeftStampRightStamp", "Sentinel"); System.out.println("Reverse Result GT"); TableTools.showWithIndex(reverseResultGt); assertEquals(Arrays.asList("Bucket", "LeftStamp", "RightStamp", "Sentinel"), - reverseResultGt.getDefinition().getColumnNames()); + reverseResultGt.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals( - new int[] {NULL_INT, 3, NULL_INT, 3, 4, NULL_INT, NULL_INT, 2}, - intColumn(reverseResultGt, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {NULL_INT, 3, NULL_INT, 3, 4, NULL_INT, NULL_INT, 2}, + intColumn(reverseResultGt, "Sentinel")); } @Test public void testAjFloats() { final Table left = TableTools.newTable( - doubleCol("LeftStampD", 1.0, Double.NaN, NULL_DOUBLE, 2.0, 3.0, Double.NaN), - floatCol("LeftStampF", 1.0f, Float.NaN, NULL_FLOAT, 2.0f, 3.0f, Float.NaN)); + doubleCol("LeftStampD", 1.0, Double.NaN, NULL_DOUBLE, 2.0, 3.0, Double.NaN), + floatCol("LeftStampF", 1.0f, Float.NaN, NULL_FLOAT, 2.0f, 3.0f, Float.NaN)); final Table right = TableTools.newTable( - doubleCol("RightStampD", NULL_DOUBLE, 1.0, 2.5, 3.0, Double.NaN, Double.NaN), - floatCol("RightStampF", NULL_FLOAT, 1.0f, 2.5f, 3.0f, Float.NaN, Float.NaN), - intCol("Sentinel", 0, 1, 2, 3, 4, 5)); + doubleCol("RightStampD", NULL_DOUBLE, 1.0, 2.5, 3.0, Double.NaN, Double.NaN), + floatCol("RightStampF", NULL_FLOAT, 1.0f, 2.5f, 3.0f, Float.NaN, Float.NaN), + intCol("Sentinel", 0, 1, 2, 3, 4, 5)); System.out.println("Left"); TableTools.show(left); @@ -403,13 +388,12 @@ public void testAjFloats() { doFloatTest(left, right, "LeftStampF", "RightStampF"); } - private void doFloatTest(Table left, Table right, final String leftStamp, - final String rightStamp) { + private void doFloatTest(Table left, Table right, final String leftStamp, final String rightStamp) { final Table result = left.aj(right, leftStamp + "=" + rightStamp, "Sentinel"); System.out.println("Result"); TableTools.showWithIndex(result); assertEquals(Arrays.asList("LeftStampD", "LeftStampF", rightStamp, "Sentinel"), - result.getDefinition().getColumnNames()); + result.getDefinition().getColumnNames()); BaseArrayTestCase.assertEquals(new int[] {1, 5, 0, 1, 3, 5}, intColumn(result, "Sentinel")); @@ -417,34 +401,31 @@ private void doFloatTest(Table left, Table right, final String leftStamp, System.out.println("LT Result"); TableTools.showWithIndex(ltResult); assertEquals(Arrays.asList("LeftStampD", "LeftStampF", rightStamp, "Sentinel"), - ltResult.getDefinition().getColumnNames()); + ltResult.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals(new int[] {0, 3, NULL_INT, 1, 2, 3}, - intColumn(ltResult, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {0, 3, NULL_INT, 1, 2, 3}, intColumn(ltResult, "Sentinel")); final Table reverseResult = left.raj(right, leftStamp + "=" + rightStamp, "Sentinel"); System.out.println("Reverse Result"); TableTools.showWithIndex(reverseResult); assertEquals(Arrays.asList("LeftStampD", "LeftStampF", rightStamp, "Sentinel"), - reverseResult.getDefinition().getColumnNames()); + reverseResult.getDefinition().getColumnNames()); - BaseArrayTestCase.assertEquals(new int[] {1, 4, 0, 2, 3, 4}, - intColumn(reverseResult, "Sentinel")); + BaseArrayTestCase.assertEquals(new int[] {1, 4, 0, 2, 3, 4}, intColumn(reverseResult, "Sentinel")); final Table reverseResultGt = left.raj(right, leftStamp + ">" + rightStamp, "Sentinel"); System.out.println("Reverse Result GT"); TableTools.showWithIndex(reverseResultGt); assertEquals(Arrays.asList("LeftStampD", "LeftStampF", rightStamp, "Sentinel"), - reverseResultGt.getDefinition().getColumnNames()); + reverseResultGt.getDefinition().getColumnNames()); BaseArrayTestCase.assertEquals(new int[] {2, NULL_INT, 1, 2, 4, NULL_INT}, - intColumn(reverseResultGt, "Sentinel")); + intColumn(reverseResultGt, "Sentinel")); } - private void tickCheck(Table left, boolean key, final String stampColumn, - final String firstUnsorted, final String secondUnsorted) { - final QueryTable right = - TstUtils.testRefreshingTable(stringCol("SingleKey", "Key", "Key", "Key"), + private void tickCheck(Table left, boolean key, final String stampColumn, final String firstUnsorted, + final String secondUnsorted) { + final QueryTable right = TstUtils.testRefreshingTable(stringCol("SingleKey", "Key", "Key", "Key"), byteCol("ByteCol", (byte) 1, (byte) 2, (byte) 3), longCol("LongCol", 1, 2, 3), doubleCol("DoubleCol", 1, 2.0, 3), @@ -452,28 +433,28 @@ private void tickCheck(Table left, boolean key, final String stampColumn, stringCol("StringCol", "A", "B", "C")); final QueryTable result1 = - (QueryTable) left.aj(right, (key ? "SingleKey," : "") + stampColumn, "Dummy=LongCol"); + (QueryTable) left.aj(right, (key ? "SingleKey," : "") + stampColumn, "Dummy=LongCol"); try { base.setExpectError(true); - final io.deephaven.db.v2.ErrorListener listener = - new io.deephaven.db.v2.ErrorListener(result1); + final io.deephaven.db.v2.ErrorListener listener = new io.deephaven.db.v2.ErrorListener(result1); result1.listenForUpdates(listener); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(right, i(4, 5, 6), - stringCol("SingleKey", "Key", "Key", "Key"), - byteCol("ByteCol", (byte) 4, (byte) 6, (byte) 5), - longCol("LongCol", 4, 6, 5), - doubleCol("DoubleCol", 4, 6, 5), - stringCol("StringCol", "A", "D", "C"), - col("BoolCol", null, true, false)); + stringCol("SingleKey", "Key", "Key", "Key"), + byteCol("ByteCol", (byte) 4, (byte) 6, (byte) 5), + longCol("LongCol", 4, 6, 5), + doubleCol("DoubleCol", 4, 6, 5), + stringCol("StringCol", "A", "D", "C"), + col("BoolCol", null, true, false)); right.notifyListeners(i(4, 5, 6), i(), i()); }); assertNotNull(listener.originalException); - assertEquals("Right stamp columns must be sorted, but are not for " - + (key ? "Key " : "[] (zero key columns) ") + firstUnsorted + " came before " - + secondUnsorted, listener.originalException.getMessage()); + assertEquals( + "Right stamp columns must be sorted, but are not for " + (key ? "Key " : "[] (zero key columns) ") + + firstUnsorted + " came before " + secondUnsorted, + listener.originalException.getMessage()); } finally { base.setExpectError(false); } @@ -486,17 +467,16 @@ public void testAjRandomStatic() { for (int rightSize = 10; rightSize <= 10000; rightSize *= 10) { for (boolean reverse : new boolean[] {false, true}) { for (boolean noexact : new boolean[] {false, true}) { - System.out - .println("Seed=" + seed + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", reverse=" + reverse + ", noexact=" + noexact); + System.out.println("Seed=" + seed + ", leftSize=" + leftSize + ", rightSize=" + rightSize + + ", reverse=" + reverse + ", noexact=" + noexact); testAjRandomStatic(seed, leftSize, rightSize, reverse, noexact, - ColumnInfo.ColAttributes.None, ColumnInfo.ColAttributes.None); + ColumnInfo.ColAttributes.None, ColumnInfo.ColAttributes.None); testAjRandomStatic(seed, leftSize, rightSize, reverse, noexact, - ColumnInfo.ColAttributes.Grouped, ColumnInfo.ColAttributes.None); + ColumnInfo.ColAttributes.Grouped, ColumnInfo.ColAttributes.None); testAjRandomStatic(seed, leftSize, rightSize, reverse, noexact, - ColumnInfo.ColAttributes.None, ColumnInfo.ColAttributes.Grouped); + ColumnInfo.ColAttributes.None, ColumnInfo.ColAttributes.Grouped); testAjRandomStatic(seed, leftSize, rightSize, reverse, noexact, - ColumnInfo.ColAttributes.Grouped, ColumnInfo.ColAttributes.Grouped); + ColumnInfo.ColAttributes.Grouped, ColumnInfo.ColAttributes.Grouped); } } } @@ -504,28 +484,26 @@ public void testAjRandomStatic() { } } - private void testAjRandomStatic(int seed, int leftSize, int rightSize, boolean reverse, - boolean noexact, ColumnInfo.ColAttributes leftAttributes, - ColumnInfo.ColAttributes rightAttributes) { + private void testAjRandomStatic(int seed, int leftSize, int rightSize, boolean reverse, boolean noexact, + ColumnInfo.ColAttributes leftAttributes, ColumnInfo.ColAttributes rightAttributes) { final Random random = new Random(seed); final QueryTable leftTable = getTable(false, leftSize, random, - initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, - Arrays.asList(Collections.singletonList(leftAttributes), Collections.emptyList(), - Collections.emptyList()), - new TstUtils.SetGenerator<>("Alpha", "Bravo", "Charlie", "Delta"), - new TstUtils.IntGenerator(0, 10000), - new TstUtils.IntGenerator(10_000_000, 10_010_000))); + initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, + Arrays.asList(Collections.singletonList(leftAttributes), Collections.emptyList(), + Collections.emptyList()), + new TstUtils.SetGenerator<>("Alpha", "Bravo", "Charlie", "Delta"), + new TstUtils.IntGenerator(0, 10000), + new TstUtils.IntGenerator(10_000_000, 10_010_000))); final QueryTable rightTable = getTable(false, rightSize, random, - initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, - Arrays.asList(Collections.singletonList(rightAttributes), Collections.emptyList(), - Collections.emptyList()), - new TstUtils.SetGenerator<>("Alpha", "Bravo", "Charlie", "Echo"), - new TstUtils.SortedIntGenerator(0, 10000), - new TstUtils.IntGenerator(20_000_000, 20_010_000))); - - final String stampMatch = - "LeftStamp" + (noexact ? (reverse ? ">" : "<") : "=") + "RightStamp"; + initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, + Arrays.asList(Collections.singletonList(rightAttributes), Collections.emptyList(), + Collections.emptyList()), + new TstUtils.SetGenerator<>("Alpha", "Bravo", "Charlie", "Echo"), + new TstUtils.SortedIntGenerator(0, 10000), + new TstUtils.IntGenerator(20_000_000, 20_010_000))); + + final String stampMatch = "LeftStamp" + (noexact ? (reverse ? ">" : "<") : "=") + "RightStamp"; final Table result; if (reverse) { result = leftTable.raj(rightTable, stampMatch, "RightSentinel"); @@ -547,8 +525,7 @@ private void testAjRandomStatic(int seed, int leftSize, int rightSize, boolean r for (Object key : bucketResults.getKeySet()) { System.out.println("Bucket:" + key); - checkAjResult(leftBucket.get(key), rightBucket.get(key), bucketResults.get(key), - reverse, noexact); + checkAjResult(leftBucket.get(key), rightBucket.get(key), bucketResults.get(key), reverse, noexact); } } @@ -564,29 +541,26 @@ private void testAjRandomStaticOverflow(int seed, int leftSize, int rightSize) { final Random random = new Random(seed); final QueryTable leftTable = getTable(false, leftSize, random, - initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, - new TstUtils.StringGenerator(leftSize / 10), - new TstUtils.IntGenerator(0, 100000), - new TstUtils.IntGenerator(10_000_000, 10_010_000))); + initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, + new TstUtils.StringGenerator(leftSize / 10), + new TstUtils.IntGenerator(0, 100000), + new TstUtils.IntGenerator(10_000_000, 10_010_000))); final QueryTable rightTable = getTable(false, rightSize, random, - initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, - new TstUtils.StringGenerator(rightSize / 10), - new TstUtils.SortedIntGenerator(0, 100000), - new TstUtils.IntGenerator(20_000_000, 20_010_000))); + initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, + new TstUtils.StringGenerator(rightSize / 10), + new TstUtils.SortedIntGenerator(0, 100000), + new TstUtils.IntGenerator(20_000_000, 20_010_000))); - final Table result = AsOfJoinHelper.asOfJoin(QueryTableJoinTest.SMALL_LEFT_CONTROL, - leftTable, (QueryTable) rightTable.reverse(), - MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), - MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), SortingOrder.Descending, - true); + final Table result = AsOfJoinHelper.asOfJoin(QueryTableJoinTest.SMALL_LEFT_CONTROL, leftTable, + (QueryTable) rightTable.reverse(), MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), + MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), SortingOrder.Descending, true); final TableMap bucketResults = result.byExternal("Bucket"); final TableMap leftBucket = leftTable.byExternal("Bucket"); final TableMap rightBucket = rightTable.byExternal("Bucket"); for (Object key : bucketResults.getKeySet()) { - checkAjResult(leftBucket.get(key), rightBucket.get(key), bucketResults.get(key), true, - true); + checkAjResult(leftBucket.get(key), rightBucket.get(key), bucketResults.get(key), true, true); } } @@ -595,15 +569,12 @@ public void testAjRandomLeftIncrementalRightStatic() { for (int seed = 0; seed < 2; ++seed) { for (int leftSize = 10; leftSize <= 10000; leftSize *= 10) { for (int rightSize = 10; rightSize <= 10000; rightSize *= 10) { - System.out.println( - "Seed=" + seed + ", leftSize=" + leftSize + ", rightSize=" + rightSize); + System.out.println("Seed=" + seed + ", leftSize=" + leftSize + ", rightSize=" + rightSize); try (final SafeCloseable ignored = LivenessScopeStack.open()) { - testAjRandomIncremental(base.leftStep, seed, leftSize, rightSize, true, - false); + testAjRandomIncremental(base.leftStep, seed, leftSize, rightSize, true, false); } try (final SafeCloseable ignored = LivenessScopeStack.open()) { - testAjRandomIncremental(base.leftStepShift, seed, leftSize, rightSize, true, - false); + testAjRandomIncremental(base.leftStepShift, seed, leftSize, rightSize, true, false); } } } @@ -620,22 +591,18 @@ public void testAjRandomLeftStaticRightIncremental() { final int maximumNodeSize = 256; final int seedCount = 5; - for (int leftSize = initialTableSize; leftSize <= maximumTableSize; leftSize *= - tableMultiplier) { - for (int rightSize = initialTableSize; rightSize <= maximumTableSize; rightSize *= - tableMultiplier) { - for (int nodeSize = initialNodeSize; nodeSize <= maximumNodeSize; nodeSize *= - nodeMultiplier) { + for (int leftSize = initialTableSize; leftSize <= maximumTableSize; leftSize *= tableMultiplier) { + for (int rightSize = initialTableSize; rightSize <= maximumTableSize; rightSize *= tableMultiplier) { + for (int nodeSize = initialNodeSize; nodeSize <= maximumNodeSize; nodeSize *= nodeMultiplier) { for (int seed = 0; seed < seedCount; ++seed) { if (nodeSize / nodeMultiplier > rightSize) { continue; } - System.out.println("Seed=" + seed + ", nodeSize=" + nodeSize + ", leftSize=" - + leftSize + ", rightSize=" + rightSize); + System.out.println("Seed=" + seed + ", nodeSize=" + nodeSize + ", leftSize=" + leftSize + + ", rightSize=" + rightSize); try (final SafeCloseable ignored = LivenessScopeStack.open()) { - testAjRandomLeftStaticRightIncremental(seed, nodeSize, leftSize, - rightSize); + testAjRandomLeftStaticRightIncremental(seed, nodeSize, leftSize, rightSize); } } } @@ -656,35 +623,30 @@ public void testAjBothIncremental() { final long startTime = System.currentTimeMillis(); int configurations = 0; - for (int leftSize = initialTableSize; leftSize <= maximumTableSize; leftSize *= - tableMultiplier) { - for (int rightSize = initialTableSize; rightSize <= maximumTableSize; rightSize *= - tableMultiplier) { - for (int leftNodeSize = - initialNodeSize; leftNodeSize <= maximumNodeSize; leftNodeSize *= + for (int leftSize = initialTableSize; leftSize <= maximumTableSize; leftSize *= tableMultiplier) { + for (int rightSize = initialTableSize; rightSize <= maximumTableSize; rightSize *= tableMultiplier) { + for (int leftNodeSize = initialNodeSize; leftNodeSize <= maximumNodeSize; leftNodeSize *= nodeMultiplier) { if (leftNodeSize / nodeMultiplier > leftSize) { continue; } - for (int rightNodeSize = - initialNodeSize; rightNodeSize <= maximumNodeSize; rightNodeSize *= + for (int rightNodeSize = initialNodeSize; rightNodeSize <= maximumNodeSize; rightNodeSize *= nodeMultiplier) { if (rightNodeSize / nodeMultiplier > rightSize) { continue; } for (int seed = 0; seed < seedCount; ++seed) { - for (final JoinIncrement joinIncrement : new JoinIncrement[] { - base.leftRightStepShift, base.leftRightConcurrentStepShift}) { - System.out.println( - (System.currentTimeMillis() - startTime) + ": Seed=" + seed - + ", leftNodeSize=" + leftNodeSize + ", rightNodeSize=" - + rightNodeSize + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", joinIncrement=" + joinIncrement); + for (final JoinIncrement joinIncrement : new JoinIncrement[] {base.leftRightStepShift, + base.leftRightConcurrentStepShift}) { + System.out.println((System.currentTimeMillis() - startTime) + ": Seed=" + seed + + ", leftNodeSize=" + leftNodeSize + ", rightNodeSize=" + rightNodeSize + + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", joinIncrement=" + + joinIncrement); try (final SafeCloseable ignored = LivenessScopeStack.open()) { - testAjRandomBothIncremental(seed, leftNodeSize, rightNodeSize, - leftSize, rightSize, joinIncrement, int.class); + testAjRandomBothIncremental(seed, leftNodeSize, rightNodeSize, leftSize, rightSize, + joinIncrement, int.class); } } configurations++; @@ -694,8 +656,8 @@ public void testAjBothIncremental() { } } - System.out.println("Executed " + configurations + " configurations in " - + (System.currentTimeMillis() - startTime) + "ms"); + System.out.println( + "Executed " + configurations + " configurations in " + (System.currentTimeMillis() - startTime) + "ms"); } @Test @@ -711,35 +673,30 @@ public void testAjCharIncremental() { final long startTime = System.currentTimeMillis(); int configurations = 0; - for (int leftSize = initialTableSize; leftSize <= maximumTableSize; leftSize *= - tableMultiplier) { - for (int rightSize = initialTableSize; rightSize <= maximumTableSize; rightSize *= - tableMultiplier) { - for (int leftNodeSize = - initialNodeSize; leftNodeSize <= maximumNodeSize; leftNodeSize *= + for (int leftSize = initialTableSize; leftSize <= maximumTableSize; leftSize *= tableMultiplier) { + for (int rightSize = initialTableSize; rightSize <= maximumTableSize; rightSize *= tableMultiplier) { + for (int leftNodeSize = initialNodeSize; leftNodeSize <= maximumNodeSize; leftNodeSize *= nodeMultiplier) { if (leftNodeSize / nodeMultiplier > leftSize) { continue; } - for (int rightNodeSize = - initialNodeSize; rightNodeSize <= maximumNodeSize; rightNodeSize *= + for (int rightNodeSize = initialNodeSize; rightNodeSize <= maximumNodeSize; rightNodeSize *= nodeMultiplier) { if (rightNodeSize / nodeMultiplier > rightSize) { continue; } for (int seed = 0; seed < seedCount; ++seed) { - for (JoinIncrement joinIncrement : new JoinIncrement[] { - base.leftRightStepShift, base.leftRightConcurrentStepShift}) { - System.out.println( - (System.currentTimeMillis() - startTime) + ": Seed=" + seed - + ", leftNodeSize=" + leftNodeSize + ", rightNodeSize=" - + rightNodeSize + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", joinIncrement=" + joinIncrement); + for (JoinIncrement joinIncrement : new JoinIncrement[] {base.leftRightStepShift, + base.leftRightConcurrentStepShift}) { + System.out.println((System.currentTimeMillis() - startTime) + ": Seed=" + seed + + ", leftNodeSize=" + leftNodeSize + ", rightNodeSize=" + rightNodeSize + + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", joinIncrement=" + + joinIncrement); try (final SafeCloseable ignored = LivenessScopeStack.open()) { - testAjRandomBothIncremental(seed, leftNodeSize, rightNodeSize, - leftSize, rightSize, joinIncrement, char.class); + testAjRandomBothIncremental(seed, leftNodeSize, rightNodeSize, leftSize, rightSize, + joinIncrement, char.class); } } configurations++; @@ -749,8 +706,8 @@ public void testAjCharIncremental() { } } - System.out.println("Executed " + configurations + " configurations in " - + (System.currentTimeMillis() - startTime) + "ms"); + System.out.println( + "Executed " + configurations + " configurations in " + (System.currentTimeMillis() - startTime) + "ms"); } @Test @@ -766,68 +723,63 @@ public void testAjBothIncrementalOverflow() { final long startTime = System.currentTimeMillis(); int configurations = 0; - for (int leftSize = initialTableSize; leftSize <= maximumTableSize; leftSize *= - tableMultiplier) { - for (int rightSize = initialTableSize; rightSize <= maximumTableSize; rightSize *= - tableMultiplier) { - for (int leftNodeSize = - initialNodeSize; leftNodeSize <= maximumNodeSize; leftNodeSize *= + for (int leftSize = initialTableSize; leftSize <= maximumTableSize; leftSize *= tableMultiplier) { + for (int rightSize = initialTableSize; rightSize <= maximumTableSize; rightSize *= tableMultiplier) { + for (int leftNodeSize = initialNodeSize; leftNodeSize <= maximumNodeSize; leftNodeSize *= nodeMultiplier) { if (leftNodeSize / nodeMultiplier > leftSize) { continue; } - for (int rightNodeSize = - initialNodeSize; rightNodeSize <= maximumNodeSize; rightNodeSize *= + for (int rightNodeSize = initialNodeSize; rightNodeSize <= maximumNodeSize; rightNodeSize *= nodeMultiplier) { if (rightNodeSize / nodeMultiplier > rightSize) { continue; } for (int seed = 0; seed < seedCount; ++seed) { - for (JoinIncrement joinIncrement : new JoinIncrement[] { - base.leftRightStepShift, base.leftRightConcurrentStepShift}) { - System.out.println( - (System.currentTimeMillis() - startTime) + ": Seed=" + seed - + ", leftNodeSize=" + leftNodeSize + ", rightNodeSize=" - + rightNodeSize + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", joinIncrement=" + joinIncrement); + for (JoinIncrement joinIncrement : new JoinIncrement[] {base.leftRightStepShift, + base.leftRightConcurrentStepShift}) { + System.out.println((System.currentTimeMillis() - startTime) + ": Seed=" + seed + + ", leftNodeSize=" + leftNodeSize + ", rightNodeSize=" + rightNodeSize + + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", joinIncrement=" + + joinIncrement); final int fRightNodeSize = rightNodeSize; final int fLeftNodeSize = leftNodeSize; try (final SafeCloseable ignored = LivenessScopeStack.open()) { - testAjRandomIncrementalWithInitial(seed, leftNodeSize, - rightNodeSize, leftSize, rightSize, joinIncrement, true, - true, false, false, true, false, new JoinControl() { - @Override - int tableSizeForRightBuild(Table rightTable) { - return 1 << 2; - } - - @Override - int tableSizeForLeftBuild(Table leftTable) { - return 1 << 2; - } - - @Override - double getMaximumLoadFactor() { - return 20.0; - } - - @Override - double getTargetLoadFactor() { - return 19.0; - } - - @Override - int rightSsaNodeSize() { - return fRightNodeSize; - } - - @Override - int leftSsaNodeSize() { - return fLeftNodeSize; - } - }, int.class); + testAjRandomIncrementalWithInitial(seed, leftNodeSize, rightNodeSize, leftSize, + rightSize, joinIncrement, true, true, false, false, true, false, + new JoinControl() { + @Override + int tableSizeForRightBuild(Table rightTable) { + return 1 << 2; + } + + @Override + int tableSizeForLeftBuild(Table leftTable) { + return 1 << 2; + } + + @Override + double getMaximumLoadFactor() { + return 20.0; + } + + @Override + double getTargetLoadFactor() { + return 19.0; + } + + @Override + int rightSsaNodeSize() { + return fRightNodeSize; + } + + @Override + int leftSsaNodeSize() { + return fLeftNodeSize; + } + }, int.class); } } configurations++; @@ -837,42 +789,40 @@ int leftSsaNodeSize() { } } - System.out.println("Executed " + configurations + " configurations in " - + (System.currentTimeMillis() - startTime) + "ms"); + System.out.println( + "Executed " + configurations + " configurations in " + (System.currentTimeMillis() - startTime) + "ms"); } - private void testAjRandomLeftStaticRightIncremental(int seed, int nodeSize, int leftSize, - int rightSize) { - testAjRandomIncrementalWithInitial(seed, -1, nodeSize, leftSize, rightSize, - base.rightStepShift, false, true, false, true, true, true, int.class); + private void testAjRandomLeftStaticRightIncremental(int seed, int nodeSize, int leftSize, int rightSize) { + testAjRandomIncrementalWithInitial(seed, -1, nodeSize, leftSize, rightSize, base.rightStepShift, false, true, + false, true, true, true, int.class); } - private void testAjRandomBothIncremental(int seed, int leftNodeSize, int rightNodeSize, - int leftSize, int rightSize, JoinIncrement joinIncrement, Class stampType) { + private void testAjRandomBothIncremental(int seed, int leftNodeSize, int rightNodeSize, int leftSize, int rightSize, + JoinIncrement joinIncrement, Class stampType) { // zero keys - testAjRandomIncrementalWithInitial(seed, leftNodeSize, rightNodeSize, leftSize, rightSize, - joinIncrement, true, true, false, true, false, false, stampType); + testAjRandomIncrementalWithInitial(seed, leftNodeSize, rightNodeSize, leftSize, rightSize, joinIncrement, true, + true, false, true, false, false, stampType); // buckets - testAjRandomIncrementalWithInitial(seed, leftNodeSize, rightNodeSize, leftSize, rightSize, - joinIncrement, true, true, false, false, true, false, stampType); + testAjRandomIncrementalWithInitial(seed, leftNodeSize, rightNodeSize, leftSize, rightSize, joinIncrement, true, + true, false, false, true, false, stampType); } @SuppressWarnings("SameParameterValue") - private void testAjRandomIncrementalWithInitial(int seed, int leftNodeSize, int rightNodeSize, - int leftSize, int rightSize, JoinIncrement joinIncrement, boolean leftRefreshing, - boolean rightRefreshing, boolean initialOnly, boolean withZeroKeys, boolean withBuckets, - boolean withReverse, Class stampType) { - testAjRandomIncrementalWithInitial(seed, leftNodeSize, rightNodeSize, leftSize, rightSize, - joinIncrement, leftRefreshing, rightRefreshing, initialOnly, withZeroKeys, withBuckets, - withReverse, getJoinControlWithNodeSize(leftNodeSize, rightNodeSize), stampType); + private void testAjRandomIncrementalWithInitial(int seed, int leftNodeSize, int rightNodeSize, int leftSize, + int rightSize, JoinIncrement joinIncrement, boolean leftRefreshing, boolean rightRefreshing, + boolean initialOnly, boolean withZeroKeys, boolean withBuckets, boolean withReverse, Class stampType) { + testAjRandomIncrementalWithInitial(seed, leftNodeSize, rightNodeSize, leftSize, rightSize, joinIncrement, + leftRefreshing, rightRefreshing, initialOnly, withZeroKeys, withBuckets, withReverse, + getJoinControlWithNodeSize(leftNodeSize, rightNodeSize), stampType); } @SuppressWarnings("SameParameterValue") - private void testAjRandomIncrementalWithInitial(int seed, int leftNodeSize, int rightNodeSize, - int leftSize, int rightSize, JoinIncrement joinIncrement, boolean leftRefreshing, - boolean rightRefreshing, boolean initialOnly, boolean withZeroKeys, boolean withBuckets, - boolean withReverse, JoinControl control, Class stampType) { + private void testAjRandomIncrementalWithInitial(int seed, int leftNodeSize, int rightNodeSize, int leftSize, + int rightSize, JoinIncrement joinIncrement, boolean leftRefreshing, boolean rightRefreshing, + boolean initialOnly, boolean withZeroKeys, boolean withBuckets, boolean withReverse, JoinControl control, + Class stampType) { final Logger log = new StreamLoggerImpl(); final Random random = new Random(seed); @@ -884,15 +834,13 @@ private void testAjRandomIncrementalWithInitial(int seed, int leftNodeSize, int final Set set2; final int smallestSize = Math.min(leftSize, rightSize); if (smallSet.length > smallestSize / 4) { - set1 = Arrays.stream(smallSet).filter(x -> random.nextDouble() < 0.75) - .collect(Collectors.toSet()); - set2 = Arrays.stream(smallSet).filter(x -> random.nextDouble() < 0.75) - .collect(Collectors.toSet()); + set1 = Arrays.stream(smallSet).filter(x -> random.nextDouble() < 0.75).collect(Collectors.toSet()); + set2 = Arrays.stream(smallSet).filter(x -> random.nextDouble() < 0.75).collect(Collectors.toSet()); } else { - set1 = IntStream.range(0, smallestSize * 2).filter(x -> random.nextDouble() < 0.75) - .mapToObj(x -> "B" + x).collect(Collectors.toSet()); - set2 = IntStream.range(0, smallestSize * 2).filter(x -> random.nextDouble() < 0.75) - .mapToObj(x -> "B" + x).collect(Collectors.toSet()); + set1 = IntStream.range(0, smallestSize * 2).filter(x -> random.nextDouble() < 0.75).mapToObj(x -> "B" + x) + .collect(Collectors.toSet()); + set2 = IntStream.range(0, smallestSize * 2).filter(x -> random.nextDouble() < 0.75).mapToObj(x -> "B" + x) + .collect(Collectors.toSet()); } final Generator leftStampGenerator; @@ -912,23 +860,20 @@ private void testAjRandomIncrementalWithInitial(int seed, int leftNodeSize, int } final QueryTable leftTable = getTable(leftRefreshing, leftSize, random, - leftColumnInfo = - initColumnInfos(new String[] {"Truthiness", "Bucket", "LeftStamp", "LeftSentinel"}, - new BooleanGenerator(), - new SetGenerator<>(String.class, set1), - leftStampGenerator, - new TstUtils.IntGenerator(10_000_000, 10_010_000))); + leftColumnInfo = initColumnInfos(new String[] {"Truthiness", "Bucket", "LeftStamp", "LeftSentinel"}, + new BooleanGenerator(), + new SetGenerator<>(String.class, set1), + leftStampGenerator, + new TstUtils.IntGenerator(10_000_000, 10_010_000))); final ColumnInfo[] rightColumnInfo; final QueryTable rightTable = getTable(rightRefreshing, rightSize, random, - rightColumnInfo = initColumnInfos( - new String[] {"Truthiness", "Bucket", "RightStamp", "RightSentinel"}, - new BooleanGenerator(), - new TstUtils.SetGenerator<>(String.class, set2), - rightStampGenerator, - new TstUtils.IntGenerator(20_000_000, 20_010_000))); + rightColumnInfo = initColumnInfos(new String[] {"Truthiness", "Bucket", "RightStamp", "RightSentinel"}, + new BooleanGenerator(), + new TstUtils.SetGenerator<>(String.class, set2), + rightStampGenerator, + new TstUtils.IntGenerator(20_000_000, 20_010_000))); - final QueryTable rightSorted = - sortRight ? (QueryTable) rightTable.sort("RightStamp") : rightTable; + final QueryTable rightSorted = sortRight ? (QueryTable) rightTable.sort("RightStamp") : rightTable; if (LiveTableTestCase.printTableUpdates) { System.out.println("Left: "); @@ -937,32 +882,22 @@ private void testAjRandomIncrementalWithInitial(int seed, int leftNodeSize, int TableTools.showWithIndex(rightTable, 20); } - // we compare our initial values to the static case; which we have a separate test for. This - // is meant to give - // us some confidence in our initial algorithm, whcih we then use to compare the incrmental - // results. + // we compare our initial values to the static case; which we have a separate test for. This is meant to give + // us some confidence in our initial algorithm, whcih we then use to compare the incrmental results. if (withZeroKeys) { - doInitialAjComparison(leftTable, rightSorted, "LeftStamp=RightStamp", false, false, - control); - doInitialAjComparison(leftTable, rightSorted, "LeftStampRightStamp", true, true, - control); + doInitialAjComparison(leftTable, rightSorted, "LeftStamp=RightStamp", true, false, control); + doInitialAjComparison(leftTable, rightSorted, "LeftStamp>RightStamp", true, true, control); } } if (withBuckets) { - doInitialAjComparison(leftTable, rightSorted, "Bucket,LeftStamp=RightStamp", false, - false, control); - doInitialAjComparison(leftTable, rightSorted, "Bucket,LeftStampRightStamp", true, - true, control); + doInitialAjComparison(leftTable, rightSorted, "Bucket,LeftStamp=RightStamp", true, false, control); + doInitialAjComparison(leftTable, rightSorted, "Bucket,LeftStamp>RightStamp", true, true, control); } } @@ -972,80 +907,66 @@ private void testAjRandomIncrementalWithInitial(int seed, int leftNodeSize, int final QueryTable rightReversed = (QueryTable) rightSorted.reverse(); - final EvalNuggetInterface[] en = Stream.concat( - Stream.concat(!withZeroKeys ? Stream.empty() + final EvalNuggetInterface[] en = Stream.concat(Stream.concat(!withZeroKeys ? Stream.empty() : Stream.concat( - Stream.of( - // aj - EvalNugget - .from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightSorted, - MatchPairFactory.getExpressions("LeftStamp=RightStamp"), - MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), - SortingOrder.Ascending, false)), - // < aj - EvalNugget.from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, - rightSorted, - AjMatchPairFactory.getExpressions(false, "LeftStamp AsOfJoinHelper.asOfJoin(control, leftTable, rightReversed, + Stream.of( + // aj + EvalNugget.from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightSorted, MatchPairFactory.getExpressions("LeftStamp=RightStamp"), - MatchPairFactory.getExpressions("RightStamp", - "RightSentinel"), - SortingOrder.Descending, false)), - // > raj - EvalNugget.from( - () -> AsOfJoinHelper.asOfJoin(control, leftTable, rightReversed, - AjMatchPairFactory.getExpressions(true, - "LeftStamp>RightStamp").first, - MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), - SortingOrder.Descending, true)))), + MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), + SortingOrder.Ascending, false)), + // < aj + EvalNugget.from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightSorted, + AjMatchPairFactory.getExpressions(false, "LeftStamp AsOfJoinHelper.asOfJoin(control, leftTable, rightReversed, + MatchPairFactory.getExpressions("LeftStamp=RightStamp"), + MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), + SortingOrder.Descending, false)), + // > raj + EvalNugget.from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightReversed, + AjMatchPairFactory.getExpressions(true, "LeftStamp>RightStamp").first, + MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), + SortingOrder.Descending, true)))), !withBuckets ? Stream.empty() - : Stream.of( - // aj, with a bucket - EvalNugget - .from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightSorted, - MatchPairFactory.getExpressions("Truthiness", "Bucket", - "LeftStamp=RightStamp"), - MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), - SortingOrder.Ascending, false)), - EvalNugget - .from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightSorted, - MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), - MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), - SortingOrder.Ascending, false)), - // < aj, with a bucket - EvalNugget - .from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightSorted, - AjMatchPairFactory.getExpressions(false, "Bucket", - "LeftStamp AsOfJoinHelper.asOfJoin(control, leftTable, rightReversed, - MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), - MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), - SortingOrder.Descending, false)), - // > raj, with a bucket - EvalNugget.from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightReversed, - AjMatchPairFactory.getExpressions(true, "Bucket", - "LeftStamp>RightStamp").first, - MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), - SortingOrder.Descending, true)))) - .toArray(EvalNuggetInterface[]::new); + : Stream.of( + // aj, with a bucket + EvalNugget.from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightSorted, + MatchPairFactory.getExpressions("Truthiness", "Bucket", "LeftStamp=RightStamp"), + MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), + SortingOrder.Ascending, false)), + EvalNugget.from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightSorted, + MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), + MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), + SortingOrder.Ascending, false)), + // < aj, with a bucket + EvalNugget.from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightSorted, + AjMatchPairFactory.getExpressions(false, "Bucket", + "LeftStamp AsOfJoinHelper.asOfJoin(control, leftTable, rightReversed, + MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), + MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), + SortingOrder.Descending, false)), + // > raj, with a bucket + EvalNugget.from(() -> AsOfJoinHelper.asOfJoin(control, leftTable, rightReversed, + AjMatchPairFactory.getExpressions(true, "Bucket", "LeftStamp>RightStamp").first, + MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), + SortingOrder.Descending, true)))) + .toArray(EvalNuggetInterface[]::new); for (int step = 0; step < maxSteps; step++) { - System.out.println( - "Step = " + step + (leftNodeSize > 0 ? ", leftNodeSize=" + leftNodeSize : "") - + ", rightNodeSize=" + rightNodeSize + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", seed = " + seed + ", joinIncrement=" + joinIncrement); + System.out.println("Step = " + step + (leftNodeSize > 0 ? ", leftNodeSize=" + leftNodeSize : "") + + ", rightNodeSize=" + rightNodeSize + ", leftSize=" + leftSize + ", rightSize=" + rightSize + + ", seed = " + seed + ", joinIncrement=" + joinIncrement); if (LiveTableTestCase.printTableUpdates) { System.out.println("Left Table:" + leftTable.size()); showWithIndex(leftTable, 100); @@ -1060,18 +981,17 @@ private void testAjRandomIncrementalWithInitial(int seed, int leftNodeSize, int showWithIndex(rightReversed, 100); } } - joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, - rightColumnInfo, en, random); + joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, rightColumnInfo, en, random); } } - private void doInitialAjComparison(QueryTable leftTable, QueryTable rightTable, - String columnsToMatch, boolean reverse, boolean disallowMatch, JoinControl control) { + private void doInitialAjComparison(QueryTable leftTable, QueryTable rightTable, String columnsToMatch, + boolean reverse, boolean disallowMatch, JoinControl control) { final Logger log = new StreamLoggerImpl(); final Table staticResult = - reverse ? leftTable.silent().raj(rightTable.silent(), columnsToMatch, "RightSentinel") - : leftTable.silent().aj(rightTable.silent(), columnsToMatch, "RightSentinel"); + reverse ? leftTable.silent().raj(rightTable.silent(), columnsToMatch, "RightSentinel") + : leftTable.silent().aj(rightTable.silent(), columnsToMatch, "RightSentinel"); if (LiveTableTestCase.printTableUpdates) { System.out.println("Static: "); TableTools.showWithIndex(staticResult); @@ -1079,10 +999,10 @@ private void doInitialAjComparison(QueryTable leftTable, QueryTable rightTable, try (final SafeCloseable ignored = LivenessScopeStack.open()) { final Table refreshingResult = AsOfJoinHelper.asOfJoin(control, leftTable, - reverse ? ((QueryTable) rightTable.reverse()) : rightTable, - AjMatchPairFactory.getExpressions(reverse, columnsToMatch.split(",")).first, - MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), - reverse ? SortingOrder.Descending : SortingOrder.Ascending, disallowMatch); + reverse ? ((QueryTable) rightTable.reverse()) : rightTable, + AjMatchPairFactory.getExpressions(reverse, columnsToMatch.split(",")).first, + MatchPairFactory.getExpressions("RightStamp", "RightSentinel"), + reverse ? SortingOrder.Descending : SortingOrder.Ascending, disallowMatch); if (LiveTableTestCase.printTableUpdates) { System.out.println("Refreshing: "); @@ -1119,24 +1039,23 @@ public int leftChunkSize() { } - private void testAjRandomIncremental(JoinIncrement joinIncrement, int seed, int leftSize, - int rightSize, boolean leftRefreshing, boolean rightRefreshing) { + private void testAjRandomIncremental(JoinIncrement joinIncrement, int seed, int leftSize, int rightSize, + boolean leftRefreshing, boolean rightRefreshing) { final Random random = new Random(seed); final int maxSteps = 10; final ColumnInfo[] leftColumnInfo; final QueryTable leftTable = getTable(leftRefreshing, leftSize, random, - leftColumnInfo = initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, - new TstUtils.SetGenerator<>("Alpha", "Bravo", "Charlie", "Delta"), - new TstUtils.IntGenerator(0, 10000), - new TstUtils.IntGenerator(10_000_000, 10_010_000))); + leftColumnInfo = initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, + new TstUtils.SetGenerator<>("Alpha", "Bravo", "Charlie", "Delta"), + new TstUtils.IntGenerator(0, 10000), + new TstUtils.IntGenerator(10_000_000, 10_010_000))); final ColumnInfo[] rightColumnInfo; final QueryTable rightTable = getTable(rightRefreshing, rightSize, random, - rightColumnInfo = - initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, - new TstUtils.SetGenerator<>("Alpha", "Bravo", "Charlie", "Echo"), - new TstUtils.SortedIntGenerator(0, 10000), - new TstUtils.IntGenerator(20_000_000, 20_010_000))); + rightColumnInfo = initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, + new TstUtils.SetGenerator<>("Alpha", "Bravo", "Charlie", "Echo"), + new TstUtils.SortedIntGenerator(0, 10000), + new TstUtils.IntGenerator(20_000_000, 20_010_000))); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { new EvalNugget() { @@ -1166,44 +1085,39 @@ protected Table e() { new EvalNugget() { @Override protected Table e() { - return leftTable.aj(rightTable, "Bucket,LeftStamp=RightStamp", - "RightSentinel"); + return leftTable.aj(rightTable, "Bucket,LeftStamp=RightStamp", "RightSentinel"); } }, new EvalNugget() { @Override protected Table e() { - return leftTable.aj(rightTable, "Bucket,LeftStampRightStamp", - "RightSentinel"); + return leftTable.raj(rightTable, "Bucket,LeftStamp>RightStamp", "RightSentinel"); } } }; for (int step = 0; step < maxSteps; step++) { - System.out.println("Step = " + step + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", seed = " + seed + ", joinIncrement=" + joinIncrement); + System.out.println("Step = " + step + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", seed = " + + seed + ", joinIncrement=" + joinIncrement); if (LiveTableTestCase.printTableUpdates) { System.out.println("Left Table:" + leftTable.size()); showWithIndex(leftTable, 100); System.out.println("Right Table:" + rightTable.size()); showWithIndex(rightTable, 100); } - joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, - rightColumnInfo, en, random); + joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, rightColumnInfo, en, random); } } @@ -1220,55 +1134,50 @@ public void testAjRandomLeftIncrementalRightStaticOverflow() { final int leftSize = 32000; final int rightSize = 32000; final QueryTable leftTable = getTable(true, 100000, random, - leftColumnInfo = initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, - new TstUtils.StringGenerator(leftSize), - new TstUtils.IntGenerator(0, 100000), - new TstUtils.IntGenerator(10_000_000, 10_010_000))); + leftColumnInfo = initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, + new TstUtils.StringGenerator(leftSize), + new TstUtils.IntGenerator(0, 100000), + new TstUtils.IntGenerator(10_000_000, 10_010_000))); final ColumnInfo[] rightColumnInfo; final QueryTable rightTable = getTable(false, 100000, random, - rightColumnInfo = - initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, - new TstUtils.StringGenerator(leftSize), - new TstUtils.SortedIntGenerator(0, 100000), - new TstUtils.IntGenerator(20_000_000, 20_010_000))); + rightColumnInfo = initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, + new TstUtils.StringGenerator(leftSize), + new TstUtils.SortedIntGenerator(0, 100000), + new TstUtils.IntGenerator(20_000_000, 20_010_000))); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { new EvalNugget() { @Override protected Table e() { - return AsOfJoinHelper.asOfJoin(QueryTableJoinTest.SMALL_RIGHT_CONTROL, - leftTable, rightTable, - MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), - MatchPairFactory.getExpressions("RightSentinel"), - SortingOrder.Ascending, false); + return AsOfJoinHelper.asOfJoin(QueryTableJoinTest.SMALL_RIGHT_CONTROL, leftTable, rightTable, + MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), + MatchPairFactory.getExpressions("RightSentinel"), SortingOrder.Ascending, false); } }, }; for (int step = 0; step < maxSteps; step++) { - System.out.println("Step = " + step + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", seed = " + seed + ", joinIncrement=" + joinIncrement); + System.out.println("Step = " + step + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", seed = " + + seed + ", joinIncrement=" + joinIncrement); if (LiveTableTestCase.printTableUpdates) { System.out.println("Left Table:" + leftTable.size()); showWithIndex(leftTable, 100); System.out.println("Right Table:" + rightTable.size()); showWithIndex(rightTable, 100); } - joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, - rightColumnInfo, en, random); + joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, rightColumnInfo, en, random); } } - private void checkAjResult(Table leftTable, Table rightTable, Table result, boolean reverse, - boolean noexact) { + private void checkAjResult(Table leftTable, Table rightTable, Table result, boolean reverse, boolean noexact) { final TIntArrayList expectedStamp = new TIntArrayList(); final TIntArrayList expectedSentinel = new TIntArrayList(); final int[] leftStampArray = (int[]) leftTable.getColumn("LeftStamp").getDirect(); final int[] rightStampArray = rightTable == null ? CollectionUtil.ZERO_LENGTH_INT_ARRAY - : (int[]) rightTable.getColumn("RightStamp").getDirect(); + : (int[]) rightTable.getColumn("RightStamp").getDirect(); final int[] rightSentinelArray = rightTable == null ? CollectionUtil.ZERO_LENGTH_INT_ARRAY - : (int[]) rightTable.getColumn("RightSentinel").getDirect(); + : (int[]) rightTable.getColumn("RightSentinel").getDirect(); for (final int leftStamp : leftStampArray) { final int rightPosition = Arrays.binarySearch(rightStampArray, leftStamp); @@ -1285,7 +1194,7 @@ private void checkAjResult(Table leftTable, Table rightTable, Table result, bool positionToUse = rightPosition; if (reverse) { while (positionToUse < rightStampArray.length - && rightStampArray[positionToUse] == leftStamp) { + && rightStampArray[positionToUse] == leftStamp) { positionToUse++; } if (positionToUse == rightStampArray.length) { @@ -1294,8 +1203,7 @@ private void checkAjResult(Table leftTable, Table rightTable, Table result, bool continue; } } else { - while (positionToUse >= 0 - && rightStampArray[positionToUse] == leftStamp) { + while (positionToUse >= 0 && rightStampArray[positionToUse] == leftStamp) { positionToUse--; } if (positionToUse < 0) { @@ -1308,14 +1216,12 @@ private void checkAjResult(Table leftTable, Table rightTable, Table result, bool positionToUse = rightPosition; if (reverse) { while (positionToUse > 0 - && rightStampArray[positionToUse] == rightStampArray[positionToUse - - 1]) { + && rightStampArray[positionToUse] == rightStampArray[positionToUse - 1]) { positionToUse--; } } else { while (positionToUse < rightStampArray.length - 1 - && rightStampArray[positionToUse] == rightStampArray[positionToUse - + 1]) { + && rightStampArray[positionToUse] == rightStampArray[positionToUse + 1]) { positionToUse++; } } @@ -1335,7 +1241,7 @@ private void checkAjResult(Table leftTable, Table rightTable, Table result, bool QueryScope.addParam("__rightStampExpected", expectedStamp); QueryScope.addParam("__rightSentinelExpected", expectedSentinel); final Table expected = leftTable.update("RightStamp=__rightStampExpected.get(i)", - "RightSentinel=__rightSentinelExpected.get(i)"); + "RightSentinel=__rightSentinelExpected.get(i)"); if (LiveTableTestCase.printTableUpdates) { System.out.println("Left:"); @@ -1373,46 +1279,44 @@ public void testIds5293() { try { final Table staticOne = emptyTable(size) - .update( - "Timestamp= i%23 == 0 ? null : new DBDateTime(timeOffset + (long)(scale*(Math.random()*2-0.1))*100_000_000L)", - "OtherTimestamp= i%24 == 0 ? null : new DBDateTime(timeOffset + (long)(scale*(Math.random()*2-0.05))*100_000_000L)", - "MyString=(i%11==0? null : `a`+(int)(scale*(Math.random()*2-1)))", - "MyInt=(i%12==0 ? null : (int)(scale*(Math.random()*2-1)))", - "MyLong=(i%13==0 ? null : (long)(scale*(Math.random()*2-1)))", - "MyFloat=(float)(i%14==0 ? null : i%10==0 ? 1.0F/0.0F: i%5==0 ? -1.0F/0.0F : (float) scale*(Math.random()*2-1))", - "MyDouble=(double)(i%16==0 ? null : i%10==0 ? 1.0D/0.0D: i%5==0 ? -1.0D/0.0D : (double) scale*(Math.random()*2-1))", - "MyBoolean = (i%17==0 ? null : (int)(10*Math.random())%2==0)", - "MyChar = (i%18==0 ? null : new Character((char) (((26*Math.random())%26)+97)) )", - "MyShort=(short)(i%19==0 ? null : (int)(scale*(Math.random()*2-1)))", - "MyByte=(Byte)(i%19==0 ? null : new Byte( Integer.toString((int)(Byte.MAX_VALUE*(Math.random()*2-1)))))", - "MyBigDecimal=(i%21==0 ? null : new java.math.BigDecimal(scale*(Math.random()*2-1)))", - "MyBigInteger=(i%22==0 ? null : new java.math.BigInteger(Integer.toString((int)(scale*(Math.random()*2-1)))))"); + .update("Timestamp= i%23 == 0 ? null : new DBDateTime(timeOffset + (long)(scale*(Math.random()*2-0.1))*100_000_000L)", + "OtherTimestamp= i%24 == 0 ? null : new DBDateTime(timeOffset + (long)(scale*(Math.random()*2-0.05))*100_000_000L)", + "MyString=(i%11==0? null : `a`+(int)(scale*(Math.random()*2-1)))", + "MyInt=(i%12==0 ? null : (int)(scale*(Math.random()*2-1)))", + "MyLong=(i%13==0 ? null : (long)(scale*(Math.random()*2-1)))", + "MyFloat=(float)(i%14==0 ? null : i%10==0 ? 1.0F/0.0F: i%5==0 ? -1.0F/0.0F : (float) scale*(Math.random()*2-1))", + "MyDouble=(double)(i%16==0 ? null : i%10==0 ? 1.0D/0.0D: i%5==0 ? -1.0D/0.0D : (double) scale*(Math.random()*2-1))", + "MyBoolean = (i%17==0 ? null : (int)(10*Math.random())%2==0)", + "MyChar = (i%18==0 ? null : new Character((char) (((26*Math.random())%26)+97)) )", + "MyShort=(short)(i%19==0 ? null : (int)(scale*(Math.random()*2-1)))", + "MyByte=(Byte)(i%19==0 ? null : new Byte( Integer.toString((int)(Byte.MAX_VALUE*(Math.random()*2-1)))))", + "MyBigDecimal=(i%21==0 ? null : new java.math.BigDecimal(scale*(Math.random()*2-1)))", + "MyBigInteger=(i%22==0 ? null : new java.math.BigInteger(Integer.toString((int)(scale*(Math.random()*2-1)))))"); final Table staticTwo = emptyTable(size) - .update( - "Timestamp= i%23 == 0 ? null : new DBDateTime(timeOffset + (long)(scale*(Math.random()*2-0.1))*100_000_000L)", - "OtherTimestamp= i%24 == 0 ? null : new DBDateTime(timeOffset + (long)(scale*(Math.random()*2-0.05))*100_000_000L)", - "MyString=(i%11==0? null : `a`+(int)(scale*(Math.random()*2-1)))", - "MyInt=(i%12==0 ? null : (int)(scale*(Math.random()*2-1)))", - "MyLong=(i%13==0 ? null : (long)(scale*(Math.random()*2-1)))", - "MyFloat=(float)(i%14==0 ? null : i%10==0 ? 1.0F/0.0F: i%5==0 ? -1.0F/0.0F : (float) scale*(Math.random()*2-1))", - "MyDouble=(double)(i%16==0 ? null : i%10==0 ? 1.0D/0.0D: i%5==0 ? -1.0D/0.0D : (double) scale*(Math.random()*2-1))", - "MyBoolean = (i%17==0 ? null : (int)(10*Math.random())%2==0)", - "MyChar = (i%18==0 ? null : new Character((char) (((26*Math.random())%26)+97)) )", - "MyShort=(short)(i%19==0 ? null : (int)(scale*(Math.random()*2-1)))", - "MyByte=(Byte)(i%19==0 ? null : new Byte( Integer.toString((int)(Byte.MAX_VALUE*(Math.random()*2-1)))))", - "MyBigDecimal=(i%21==0 ? null : new java.math.BigDecimal(scale*(Math.random()*2-1)))", - "MyBigInteger=(i%22==0 ? null : new java.math.BigInteger(Integer.toString((int)(scale*(Math.random()*2-1)))))"); + .update("Timestamp= i%23 == 0 ? null : new DBDateTime(timeOffset + (long)(scale*(Math.random()*2-0.1))*100_000_000L)", + "OtherTimestamp= i%24 == 0 ? null : new DBDateTime(timeOffset + (long)(scale*(Math.random()*2-0.05))*100_000_000L)", + "MyString=(i%11==0? null : `a`+(int)(scale*(Math.random()*2-1)))", + "MyInt=(i%12==0 ? null : (int)(scale*(Math.random()*2-1)))", + "MyLong=(i%13==0 ? null : (long)(scale*(Math.random()*2-1)))", + "MyFloat=(float)(i%14==0 ? null : i%10==0 ? 1.0F/0.0F: i%5==0 ? -1.0F/0.0F : (float) scale*(Math.random()*2-1))", + "MyDouble=(double)(i%16==0 ? null : i%10==0 ? 1.0D/0.0D: i%5==0 ? -1.0D/0.0D : (double) scale*(Math.random()*2-1))", + "MyBoolean = (i%17==0 ? null : (int)(10*Math.random())%2==0)", + "MyChar = (i%18==0 ? null : new Character((char) (((26*Math.random())%26)+97)) )", + "MyShort=(short)(i%19==0 ? null : (int)(scale*(Math.random()*2-1)))", + "MyByte=(Byte)(i%19==0 ? null : new Byte( Integer.toString((int)(Byte.MAX_VALUE*(Math.random()*2-1)))))", + "MyBigDecimal=(i%21==0 ? null : new java.math.BigDecimal(scale*(Math.random()*2-1)))", + "MyBigInteger=(i%22==0 ? null : new java.math.BigInteger(Integer.toString((int)(scale*(Math.random()*2-1)))))"); final Table static2ts = staticTwo.sort("Timestamp"); for (final String column : columnNames) { TableTools.showWithIndex(static2ts); final Table resultZk = staticOne.aj(staticTwo.sort(column), column, - "Extra=OtherTimestamp,Extra2=MyLong,Check=" + column); + "Extra=OtherTimestamp,Extra2=MyLong,Check=" + column); TableTools.showWithIndex(resultZk); final Table resultTs = staticOne.aj(static2ts, column + ",Timestamp", - "Extra=OtherTimestamp,Extra2=MyLong,Check=" + column); + "Extra=OtherTimestamp,Extra2=MyLong,Check=" + column); TableTools.showWithIndex(resultTs); } } finally { @@ -1436,43 +1340,40 @@ public void testIds6898() { final int leftSize = 32000; final int rightSize = 32000; final QueryTable leftTable = getTable(true, 100000, random, - leftColumnInfo = - initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, - new TstUtils.StringGenerator(leftSize), - new TstUtils.IntGenerator(0, 100000), - new TstUtils.IntGenerator(10_000_000, 10_010_000))); + leftColumnInfo = initColumnInfos(new String[] {"Bucket", "LeftStamp", "LeftSentinel"}, + new TstUtils.StringGenerator(leftSize), + new TstUtils.IntGenerator(0, 100000), + new TstUtils.IntGenerator(10_000_000, 10_010_000))); final ColumnInfo[] rightColumnInfo; final QueryTable rightTable = getTable(true, 100000, random, - rightColumnInfo = - initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, - new TstUtils.StringGenerator(leftSize), - new TstUtils.SortedIntGenerator(0, 100000), - new TstUtils.IntGenerator(20_000_000, 20_010_000))); + rightColumnInfo = initColumnInfos(new String[] {"Bucket", "RightStamp", "RightSentinel"}, + new TstUtils.StringGenerator(leftSize), + new TstUtils.SortedIntGenerator(0, 100000), + new TstUtils.IntGenerator(20_000_000, 20_010_000))); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { new EvalNugget() { @Override protected Table e() { return AsOfJoinHelper.asOfJoin(QueryTableJoinTest.SMALL_RIGHT_CONTROL, - (QueryTable) leftTable.sort("LeftStamp"), rightTable, - MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), - MatchPairFactory.getExpressions("RightSentinel"), - SortingOrder.Ascending, false); + (QueryTable) leftTable.sort("LeftStamp"), rightTable, + MatchPairFactory.getExpressions("Bucket", "LeftStamp=RightStamp"), + MatchPairFactory.getExpressions("RightSentinel"), SortingOrder.Ascending, false); } }, }; for (int step = 0; step < maxSteps; step++) { - System.out.println("Step = " + step + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", seed = " + seed + ", joinIncrement=" + joinIncrement); + System.out.println("Step = " + step + ", leftSize=" + leftSize + ", rightSize=" + rightSize + + ", seed = " + seed + ", joinIncrement=" + joinIncrement); if (LiveTableTestCase.printTableUpdates) { System.out.println("Left Table:" + leftTable.size()); showWithIndex(leftTable, 100); System.out.println("Right Table:" + rightTable.size()); showWithIndex(rightTable, 100); } - joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, - rightColumnInfo, en, random); + joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, rightColumnInfo, en, + random); } } } diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableCrossJoinSmallRightBitsTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableCrossJoinSmallRightBitsTest.java index f53b1dc5d2e..5ea033187ac 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableCrossJoinSmallRightBitsTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableCrossJoinSmallRightBitsTest.java @@ -53,8 +53,7 @@ public void testZeroKeyOutOfKeySpace() { // we can fit if we use min right bits left.join(right, 1); } else { - left.join(right); // static - static should be OK because it always uses min - // right bits + left.join(right); // static - static should be OK because it always uses min right bits } } } @@ -82,8 +81,7 @@ public void testKeyColumnOutOfKeySpace() { // we can fit if we use min right bits left.join(right, "A=B", 1); } else { - left.join(right, "A=B"); // static - static should be OK because it always uses - // min right bits + left.join(right, "A=B"); // static - static should be OK because it always uses min right bits } } } @@ -104,9 +102,8 @@ public void testLeftGroupChangesOnRightShift() { for (int grp = 0; grp < sizes.length; ++grp) { int[] data = new int[sizes[grp]]; Arrays.fill(data, grp); - TstUtils.addToTable(rTable, - Index.FACTORY.getIndexByRange(grp * 10, grp * 10 + data.length - 1), - intCol("A", data)); + TstUtils.addToTable(rTable, Index.FACTORY.getIndexByRange(grp * 10, grp * 10 + data.length - 1), + intCol("A", data)); } final EvalNugget[] en = new EvalNugget[] { @@ -143,8 +140,8 @@ public void testLeftGroupChangesOnRightShift() { } public void testLeftGroupChangesOnRightShiftWithAllInnerShifts() { - // This test is similar to the above, but has at least one inner shift on every group (which - // hits different logic). + // This test is similar to the above, but has at least one inner shift on every group (which hits different + // logic). // On the step with the shift: // - one row to not change groups, but group gets smaller (grp 0) @@ -160,9 +157,8 @@ public void testLeftGroupChangesOnRightShiftWithAllInnerShifts() { for (int grp = 0; grp < sizes.length; ++grp) { int[] data = new int[sizes[grp]]; Arrays.fill(data, grp); - TstUtils.addToTable(rTable, - Index.FACTORY.getIndexByRange(grp * 10, grp * 10 + data.length - 1), - intCol("A", data)); + TstUtils.addToTable(rTable, Index.FACTORY.getIndexByRange(grp * 10, grp * 10 + data.length - 1), + intCol("A", data)); } final EvalNugget[] en = new EvalNugget[] { @@ -213,9 +209,8 @@ public void testLeftGroupChangesOnBothShift() { for (int grp = 0; grp < sizes.length; ++grp) { int[] data = new int[sizes[grp]]; Arrays.fill(data, grp); - TstUtils.addToTable(rTable, - Index.FACTORY.getIndexByRange(grp * 10, grp * 10 + data.length - 1), - intCol("A", data)); + TstUtils.addToTable(rTable, Index.FACTORY.getIndexByRange(grp * 10, grp * 10 + data.length - 1), + intCol("A", data)); } final EvalNugget[] en = new EvalNugget[] { @@ -255,8 +250,8 @@ public void testLeftGroupChangesOnBothShift() { } public void testLeftGroupChangesOnBothShiftWithInnerShifts() { - // This test is similar to the above, but has at least one inner shift on every group (which - // hits different logic). + // This test is similar to the above, but has at least one inner shift on every group (which hits different + // logic). // On the step with the shift: // - one row to not change groups, but group gets smaller (grp 0) @@ -272,9 +267,8 @@ public void testLeftGroupChangesOnBothShiftWithInnerShifts() { for (int grp = 0; grp < sizes.length; ++grp) { int[] data = new int[sizes[grp]]; Arrays.fill(data, grp); - TstUtils.addToTable(rTable, - Index.FACTORY.getIndexByRange(grp * 10, grp * 10 + data.length - 1), - intCol("A", data)); + TstUtils.addToTable(rTable, Index.FACTORY.getIndexByRange(grp * 10, grp * 10 + data.length - 1), + intCol("A", data)); } final EvalNugget[] en = new EvalNugget[] { diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableCrossJoinTestBase.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableCrossJoinTestBase.java index 7a6f714f500..5e10d6f58f3 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableCrossJoinTestBase.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableCrossJoinTestBase.java @@ -32,14 +32,12 @@ public QueryTableCrossJoinTestBase(int numRightBitsToReserve) { this.numRightBitsToReserve = numRightBitsToReserve; } - private TstUtils.ColumnInfo[] getIncrementalColumnInfo(final String prefix, - int numGroups) { + private TstUtils.ColumnInfo[] getIncrementalColumnInfo(final String prefix, int numGroups) { String[] names = new String[] {"Sym", "IntCol"}; - return initColumnInfos( - Arrays.stream(names).map(name -> prefix + name).toArray(String[]::new), - new TstUtils.IntGenerator(0, numGroups - 1), - new TstUtils.IntGenerator(10, 100000)); + return initColumnInfos(Arrays.stream(names).map(name -> prefix + name).toArray(String[]::new), + new TstUtils.IntGenerator(0, numGroups - 1), + new TstUtils.IntGenerator(10, 100000)); } public void testZeroKeyJoinBitExpansionOnAdd() { @@ -71,8 +69,7 @@ public void testZeroKeyJoinBitExpansionOnAdd() { TstUtils.validate(en); // One shift: the entire left row's sub-table - Assert.eq(listener.update.shifted.size(), "listener.update.shifted.size()", lTable.size(), - "lTable.size()"); + Assert.eq(listener.update.shifted.size(), "listener.update.shifted.size()", lTable.size(), "lTable.size()"); } public void testZeroKeyJoinBitExpansionOnBoundaryShift() { @@ -107,10 +104,10 @@ public void testZeroKeyJoinBitExpansionOnBoundaryShift() { }); TstUtils.validate(en); - // Two shifts: before upstream shift, upstream shift (note: post upstream shift not possible - // because it exceeds known keyspace range) - Assert.eq(listener.update.shifted.size(), "listener.update.shifted.size()", - 2 * lTable.size(), "2 * lTable.size()"); + // Two shifts: before upstream shift, upstream shift (note: post upstream shift not possible because it exceeds + // known keyspace range) + Assert.eq(listener.update.shifted.size(), "listener.update.shifted.size()", 2 * lTable.size(), + "2 * lTable.size()"); } public void testZeroKeyJoinBitExpansionWithInnerShift() { @@ -145,8 +142,8 @@ public void testZeroKeyJoinBitExpansionWithInnerShift() { TstUtils.validate(en); // Three shifts: before upstream shift, upstream shift, post upstream shift - Assert.eq(listener.update.shifted.size(), "listener.update.shifted.size()", - 3 * lTable.size(), "3 * lTable.size()"); + Assert.eq(listener.update.shifted.size(), "listener.update.shifted.size()", 3 * lTable.size(), + "3 * lTable.size()"); } public void testZeroKeyJoinCompoundShift() { @@ -198,7 +195,7 @@ public void testIncrementalZeroKeyJoin() { } private void testIncrementalZeroKeyJoin(final String ctxt, final int size, final int seed, - final MutableInt numSteps) { + final MutableInt numSteps) { final int leftSize = (int) Math.ceil(Math.sqrt(size)); final int maxSteps = numSteps.intValue(); @@ -211,10 +208,8 @@ private void testIncrementalZeroKeyJoin(final String ctxt, final int size, final final TstUtils.ColumnInfo[] rightColumns = getIncrementalColumnInfo("rt", numGroups); final QueryTable rightTicking = getTable(size, random, rightColumns); - final QueryTable leftStatic = - getTable(false, leftSize, random, getIncrementalColumnInfo("ls", numGroups)); - final QueryTable rightStatic = - getTable(false, size, random, getIncrementalColumnInfo("rs", numGroups)); + final QueryTable leftStatic = getTable(false, leftSize, random, getIncrementalColumnInfo("ls", numGroups)); + final QueryTable rightStatic = getTable(false, size, random, getIncrementalColumnInfo("rs", numGroups)); final EvalNugget[] en = new EvalNugget[] { // Zero-Key Joins @@ -227,16 +222,13 @@ private void testIncrementalZeroKeyJoin(final String ctxt, final int size, final LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final int stepInstructions = random.nextInt(); if (stepInstructions % 4 != 1) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, leftSize, random, leftTicking, - leftColumns); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, leftSize, + random, leftTicking, leftColumns); } if (stepInstructions % 4 != 0) { - // left size is sqrt right table size; which is a good update size for the right - // table - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, leftSize, random, rightTicking, - rightColumns); + // left size is sqrt right table size; which is a good update size for the right table + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, leftSize, + random, rightTicking, rightColumns); } }); TstUtils.validate(ctxt + " step == " + numSteps.getValue(), en); @@ -250,8 +242,7 @@ public void testSmallStaticJoin() { for (int rt = 0; rt < 2; ++rt) { boolean leftTicking = lt == 1; boolean rightTicking = rt == 1; - testStaticJoin(types, cardinality, types.length, types.length, leftTicking, - rightTicking); + testStaticJoin(types, cardinality, types.length, types.length, leftTicking, rightTicking); // force left build testStaticJoin(types, cardinality, 1, types.length, leftTicking, rightTicking); // force right build @@ -271,15 +262,14 @@ public void testLargeStaticJoin() { for (int rt = 0; rt < 2; ++rt) { boolean leftTicking = lt == 1; boolean rightTicking = rt == 1; - testStaticJoin(types, cardinality, types.length, types.length, leftTicking, - rightTicking); + testStaticJoin(types, cardinality, types.length, types.length, leftTicking, rightTicking); } } } // generate a table such that all pairs of types exist and are part of the cross-join - private void testStaticJoin(final String[] types, final int[] cardinality, int maxLeftType, - int maxRightType, boolean leftTicking, boolean rightTicking) { + private void testStaticJoin(final String[] types, final int[] cardinality, int maxLeftType, int maxRightType, + boolean leftTicking, boolean rightTicking) { Assert.eq(types.length, "types.length", cardinality.length, "cardinality.length"); long nextLeftRow = 0; @@ -313,8 +303,7 @@ private void testStaticJoin(final String[] types, final int[] cardinality, int m } expectedSize += leftSize * rightSize; - Assert.eqFalse(expectedByKey.containsKey(sharedKey), - "expectedByKey.containsKey(sharedKey)"); + Assert.eqFalse(expectedByKey.containsKey(sharedKey), "expectedByKey.containsKey(sharedKey)"); expectedByKey.put(sharedKey, new MutableLong((long) leftSize * rightSize)); } } @@ -322,22 +311,22 @@ private void testStaticJoin(final String[] types, final int[] cardinality, int m final QueryTable left; if (leftTicking) { left = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(nextLeftRow), - c("sharedKey", leftKeys.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), - c("leftData", leftData.toArray(new Long[] {}))); + c("sharedKey", leftKeys.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), + c("leftData", leftData.toArray(new Long[] {}))); } else { left = TstUtils.testTable(Index.FACTORY.getFlatIndex(nextLeftRow), - c("sharedKey", leftKeys.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), - c("leftData", leftData.toArray(new Long[] {}))); + c("sharedKey", leftKeys.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), + c("leftData", leftData.toArray(new Long[] {}))); } final QueryTable right; if (rightTicking) { right = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(nextRightRow), - c("sharedKey", rightKeys.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), - c("rightData", rightData.toArray(new Long[] {}))); + c("sharedKey", rightKeys.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), + c("rightData", rightData.toArray(new Long[] {}))); } else { right = TstUtils.testTable(Index.FACTORY.getFlatIndex(nextRightRow), - c("sharedKey", rightKeys.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), - c("rightData", rightData.toArray(new Long[] {}))); + c("sharedKey", rightKeys.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)), + c("rightData", rightData.toArray(new Long[] {}))); } final Table chunkedCrossJoin = left.join(right, "sharedKey", numRightBitsToReserve); @@ -372,8 +361,7 @@ private void testStaticJoin(final String[] types, final int[] cardinality, int m if (lastSharedKey.getValue() != null && lastSharedKey.getValue().equals(sharedKey)) { Assert.leq(lastLeftId.longValue(), "lastLeftId.longValue()", leftId, "leftId"); if (lastLeftId.longValue() == leftId) { - Assert.lt(lastRightId.longValue(), "lastRightId.longValue()", rightId, - "rightId"); + Assert.lt(lastRightId.longValue(), "lastRightId.longValue()", rightId, "rightId"); } } else { lastSharedKey.setValue(sharedKey); @@ -406,11 +394,9 @@ public void testStaticVsNaturalJoin() { public void testStaticVsNaturalJoin2() { final int size = 10000; - final QueryTable xqt = - new QueryTable(Index.FACTORY.getFlatIndex(size), Collections.emptyMap()); + final QueryTable xqt = new QueryTable(Index.FACTORY.getFlatIndex(size), Collections.emptyMap()); xqt.setRefreshing(true); - final QueryTable yqt = - new QueryTable(Index.FACTORY.getFlatIndex(size), Collections.emptyMap()); + final QueryTable yqt = new QueryTable(Index.FACTORY.getFlatIndex(size), Collections.emptyMap()); yqt.setRefreshing(true); final Table x = xqt.update("Col1=i"); @@ -445,19 +431,17 @@ public void testIncrementalOverflow() { } private void testIncrementalOverflow(final String ctxt, final int numGroups, final int seed, - final MutableInt numSteps) { + final MutableInt numSteps) { final int maxSteps = numSteps.intValue(); final Random random = new Random(seed); // Note: make our join helper think this left table might tick - final QueryTable leftNotTicking = - getTable(1000, random, getIncrementalColumnInfo("lt", numGroups)); + final QueryTable leftNotTicking = getTable(1000, random, getIncrementalColumnInfo("lt", numGroups)); final TstUtils.ColumnInfo[] leftColumns = getIncrementalColumnInfo("lt", numGroups); final QueryTable leftTicking = getTable(0, random, leftColumns); - final TstUtils.ColumnInfo[] leftShiftingColumns = - getIncrementalColumnInfo("lt", numGroups); + final TstUtils.ColumnInfo[] leftShiftingColumns = getIncrementalColumnInfo("lt", numGroups); final QueryTable leftShifting = getTable(1000, random, leftShiftingColumns); final TstUtils.ColumnInfo[] rightColumns = getIncrementalColumnInfo("rt", numGroups); @@ -482,14 +466,14 @@ int initialBuildSize() { final EvalNugget[] en = new EvalNugget[] { EvalNugget.from(() -> CrossJoinHelper.join(leftNotTicking, rightTicking, - MatchPairFactory.getExpressions("ltSym=rtSym"), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, numRightBitsToReserve, control)), + MatchPairFactory.getExpressions("ltSym=rtSym"), MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, + numRightBitsToReserve, control)), EvalNugget.from(() -> CrossJoinHelper.join(leftTicking, rightTicking, - MatchPairFactory.getExpressions("ltSym=rtSym"), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, numRightBitsToReserve, control)), + MatchPairFactory.getExpressions("ltSym=rtSym"), MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, + numRightBitsToReserve, control)), EvalNugget.from(() -> CrossJoinHelper.join(leftShifting, rightTicking, - MatchPairFactory.getExpressions("ltSym=rtSym"), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, numRightBitsToReserve, control)), + MatchPairFactory.getExpressions("ltSym=rtSym"), MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, + numRightBitsToReserve, control)), }; final int updateSize = (int) Math.ceil(Math.sqrt(numGroups)); @@ -501,8 +485,7 @@ int initialBuildSize() { TableTools.showWithIndex(rightTicking); } - final GenerateTableUpdates.SimulationProfile shiftingProfile = - new GenerateTableUpdates.SimulationProfile(); + final GenerateTableUpdates.SimulationProfile shiftingProfile = new GenerateTableUpdates.SimulationProfile(); shiftingProfile.SHIFT_10_PERCENT_POS_SPACE = 5; shiftingProfile.SHIFT_10_PERCENT_KEY_SPACE = 5; shiftingProfile.SHIFT_AGGRESSIVELY = 85; @@ -511,16 +494,14 @@ int initialBuildSize() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final int stepInstructions = random.nextInt(); if (stepInstructions % 4 != 1) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, updateSize, random, leftTicking, - leftColumns); - GenerateTableUpdates.generateShiftAwareTableUpdates(shiftingProfile, updateSize, - random, leftShifting, leftShiftingColumns); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + updateSize, random, leftTicking, leftColumns); + GenerateTableUpdates.generateShiftAwareTableUpdates(shiftingProfile, updateSize, random, + leftShifting, leftShiftingColumns); } if (stepInstructions % 4 != 0) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, updateSize, random, rightTicking, - rightColumns); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + updateSize, random, rightTicking, rightColumns); } }); @@ -536,8 +517,8 @@ public void testIncrementalWithKeyColumns() { } } - protected void testIncrementalWithKeyColumns(final String ctxt, final int initialSize, - final int seed, final MutableInt numSteps) { + protected void testIncrementalWithKeyColumns(final String ctxt, final int initialSize, final int seed, + final MutableInt numSteps) { final int maxSteps = numSteps.intValue(); final Random random = new Random(seed); @@ -548,18 +529,13 @@ protected void testIncrementalWithKeyColumns(final String ctxt, final int initia final TstUtils.ColumnInfo[] rightColumns = getIncrementalColumnInfo("rt", numGroups); final QueryTable rightTicking = getTable(initialSize, random, rightColumns); - final QueryTable leftStatic = - getTable(false, initialSize, random, getIncrementalColumnInfo("ls", numGroups)); - final QueryTable rightStatic = - getTable(false, initialSize, random, getIncrementalColumnInfo("rs", numGroups)); + final QueryTable leftStatic = getTable(false, initialSize, random, getIncrementalColumnInfo("ls", numGroups)); + final QueryTable rightStatic = getTable(false, initialSize, random, getIncrementalColumnInfo("rs", numGroups)); final EvalNugget[] en = new EvalNugget[] { - EvalNugget.from( - () -> leftTicking.join(rightTicking, "ltSym=rtSym", numRightBitsToReserve)), - EvalNugget.from( - () -> leftStatic.join(rightTicking, "lsSym=rtSym", numRightBitsToReserve)), - EvalNugget.from( - () -> leftTicking.join(rightStatic, "ltSym=rsSym", numRightBitsToReserve)), + EvalNugget.from(() -> leftTicking.join(rightTicking, "ltSym=rtSym", numRightBitsToReserve)), + EvalNugget.from(() -> leftStatic.join(rightTicking, "lsSym=rtSym", numRightBitsToReserve)), + EvalNugget.from(() -> leftTicking.join(rightStatic, "ltSym=rsSym", numRightBitsToReserve)), }; final int updateSize = (int) Math.ceil(Math.sqrt(initialSize)); @@ -579,14 +555,12 @@ protected void testIncrementalWithKeyColumns(final String ctxt, final int initia LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final int stepInstructions = random.nextInt(); if (stepInstructions % 4 != 1) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, updateSize, random, leftTicking, - leftColumns); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + updateSize, random, leftTicking, leftColumns); } if (stepInstructions % 4 != 0) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, updateSize, random, rightTicking, - rightColumns); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + updateSize, random, rightTicking, rightColumns); } }); @@ -603,10 +577,9 @@ public void testColumnSourceCanReuseContextWithSmallerOrderedKeys() { final int CHUNK_SIZE = 4; final ColumnSource column = jt.getColumnSource("I", int.class); try (final ColumnSource.FillContext context = column.makeFillContext(CHUNK_SIZE); - final WritableIntChunk dest = - WritableIntChunk.makeWritableChunk(CHUNK_SIZE); - final ResettableWritableIntChunk rdest = - ResettableWritableIntChunk.makeResettableChunk()) { + final WritableIntChunk dest = WritableIntChunk.makeWritableChunk(CHUNK_SIZE); + final ResettableWritableIntChunk rdest = + ResettableWritableIntChunk.makeResettableChunk()) { rdest.resetFromChunk(dest, 0, 4); column.fillChunk(context, rdest, jt.getIndex().subindexByPos(0, 4)); @@ -640,9 +613,9 @@ int initialBuildSize() { }; final EvalNugget[] en = new EvalNugget[] { - EvalNugget.from(() -> CrossJoinHelper.join(leftTicking, rightTicking, - MatchPairFactory.getExpressions("intCol"), - MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, numRightBitsToReserve, control)), + EvalNugget.from( + () -> CrossJoinHelper.join(leftTicking, rightTicking, MatchPairFactory.getExpressions("intCol"), + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY, numRightBitsToReserve, control)), }; if (LiveTableTestCase.printTableUpdates) { @@ -656,8 +629,7 @@ int initialBuildSize() { final long rightOffset = numSteps.getValue(); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(leftTicking, i(numSteps.getValue()), - longCol("intCol", numSteps.getValue())); + addToTable(leftTicking, i(numSteps.getValue()), longCol("intCol", numSteps.getValue())); ShiftAwareListener.Update up = new ShiftAwareListener.Update(); up.shifted = IndexShiftData.EMPTY; up.added = i(numSteps.getValue()); @@ -670,9 +642,8 @@ int initialBuildSize() { for (int i = 0; i <= numSteps.getValue(); ++i) { data[i] = i; } - addToTable(rightTicking, - Index.FACTORY.getIndexByRange(rightOffset, rightOffset + numSteps.getValue()), - longCol("intCol", data)); + addToTable(rightTicking, Index.FACTORY.getIndexByRange(rightOffset, rightOffset + numSteps.getValue()), + longCol("intCol", data)); TstUtils.removeRows(rightTicking, i(rightOffset - 1)); up = new ShiftAwareListener.Update(); @@ -684,8 +655,7 @@ int initialBuildSize() { if (numSteps.getValue() == 0) { up.modified = Index.FACTORY.getEmptyIndex(); } else { - up.modified = Index.FACTORY.getIndexByRange(rightOffset, - rightOffset + numSteps.getValue() - 1); + up.modified = Index.FACTORY.getIndexByRange(rightOffset, rightOffset + numSteps.getValue() - 1); } up.modifiedColumnSet = ModifiedColumnSet.ALL; rightTicking.notifyListeners(up); diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableFlattenTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableFlattenTest.java index 5dc6136e631..b936e78a73e 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableFlattenTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableFlattenTest.java @@ -26,10 +26,10 @@ private void testFlatten(int size) { final Random random = new Random(0); final TstUtils.ColumnInfo columnInfo[]; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 100, 0.1), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 100, 0.1), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); if (printTableUpdates) { showWithIndex(queryTable); } @@ -52,7 +52,7 @@ public Table e() { } }, new TableComparator(queryTable.updateView("i2=intCol*2").flatten(), - queryTable.flatten().updateView("i2=intCol*2")), + queryTable.flatten().updateView("i2=intCol*2")), }; for (int i = 0; i < 100; i++) { simulateShiftAwareStep(size, random, queryTable, columnInfo, en); @@ -80,8 +80,7 @@ public void testLegacyFlatten3() { data[7] = 101; data[8] = 102; data[9] = 104; - final QueryTable queryTable = - TstUtils.testRefreshingTable(i(data), longCol("intCol", data)); + final QueryTable queryTable = TstUtils.testRefreshingTable(i(data), longCol("intCol", data)); final TestHelper helper = new TestHelper<>(queryTable.flatten(), SimpleListener::new); @@ -102,11 +101,9 @@ public void testRemoveWithLowMod() { for (int ii = 0; ii < data.length; ++ii) { data[ii] = ii * 10; } - final QueryTable queryTable = - TstUtils.testRefreshingTable(indexByRange(0, 9), c("intCol", data)); + final QueryTable queryTable = TstUtils.testRefreshingTable(indexByRange(0, 9), c("intCol", data)); - final TestHelper helper = - new TestHelper<>(queryTable.flatten(), SimpleShiftAwareListener::new); + final TestHelper helper = new TestHelper<>(queryTable.flatten(), SimpleShiftAwareListener::new); helper.modAndValidate(() -> { addToTable(queryTable, i(0), c("intCol", 1)); @@ -121,8 +118,7 @@ public void testLegacyRemoveWithLowMod() { for (int ii = 0; ii < data.length; ++ii) { data[ii] = ii * 10; } - final QueryTable queryTable = - TstUtils.testRefreshingTable(indexByRange(0, 9), c("intCol", data)); + final QueryTable queryTable = TstUtils.testRefreshingTable(indexByRange(0, 9), c("intCol", data)); final TestHelper helper = new TestHelper<>(queryTable.flatten(), SimpleListener::new); @@ -139,11 +135,9 @@ public void testRemoveWithHighMod() { for (int ii = 0; ii < data.length; ++ii) { data[ii] = ii * 10; } - final QueryTable queryTable = - TstUtils.testRefreshingTable(indexByRange(0, 9), c("intCol", data)); + final QueryTable queryTable = TstUtils.testRefreshingTable(indexByRange(0, 9), c("intCol", data)); - final TestHelper helper = - new TestHelper<>(queryTable.flatten(), SimpleShiftAwareListener::new); + final TestHelper helper = new TestHelper<>(queryTable.flatten(), SimpleShiftAwareListener::new); helper.modAndValidate(() -> { addToTable(queryTable, i(8), c("intCol", 1)); @@ -159,8 +153,7 @@ public void testLegacyRemoveWithHighMod() { for (int ii = 0; ii < data.length; ++ii) { data[ii] = ii * 10; } - final QueryTable queryTable = - TstUtils.testRefreshingTable(indexByRange(0, 9), c("intCol", data)); + final QueryTable queryTable = TstUtils.testRefreshingTable(indexByRange(0, 9), c("intCol", data)); final TestHelper helper = new TestHelper<>(queryTable.flatten(), SimpleListener::new); @@ -184,11 +177,9 @@ public void testFlatten3() { data[7] = 101; data[8] = 102; data[9] = 104; - final QueryTable queryTable = - TstUtils.testRefreshingTable(i(data), longCol("intCol", data)); + final QueryTable queryTable = TstUtils.testRefreshingTable(i(data), longCol("intCol", data)); - final TestHelper helper = - new TestHelper<>(queryTable.flatten(), SimpleShiftAwareListener::new); + final TestHelper helper = new TestHelper<>(queryTable.flatten(), SimpleShiftAwareListener::new); helper.modAndValidate(() -> { addToTable(queryTable, i(70, 71, 78, 81), longCol("intCol", 70, 71, 78, 81)); @@ -200,10 +191,9 @@ public void testFlatten3() { @Test public void testFlattenModifications() { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("intCol", 10, 20, 40, 60)); + c("intCol", 10, 20, 40, 60)); - final TestHelper helper = - new TestHelper<>(queryTable.flatten(), SimpleShiftAwareListener::new); + final TestHelper helper = new TestHelper<>(queryTable.flatten(), SimpleShiftAwareListener::new); helper.modAndValidate(() -> { addToTable(queryTable, i(3), c("intCol", 30)); @@ -241,36 +231,33 @@ public interface ListenerFactory { } else if (listener instanceof ShiftAwareListener) { this.sourceTable.listenForUpdates((ShiftAwareListener) listener); } else { - throw new IllegalArgumentException( - "Listener type unsupported: " + listener.getClass().getName()); + throw new IllegalArgumentException("Listener type unsupported: " + listener.getClass().getName()); } validator = TableUpdateValidator.make(this.sourceTable); final QueryTable validatorTable = validator.getResultTable(); final ShiftAwareListener validatorTableListener = - new InstrumentedShiftAwareListenerAdapter(validatorTable, false) { - @Override - public void onUpdate(Update upstream) {} - - @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { - TestCase.fail(originalException.getMessage()); - } - }; + new InstrumentedShiftAwareListenerAdapter(validatorTable, false) { + @Override + public void onUpdate(Update upstream) {} + + @Override + public void onFailureInternal(Throwable originalException, + UpdatePerformanceTracker.Entry sourceEntry) { + TestCase.fail(originalException.getMessage()); + } + }; validatorTable.listenForUpdates(validatorTableListener); showWithIndex(sourceTable); } - void modAndValidate(final Runnable modTable, final Index added, final Index removed, - final Index modified) { + void modAndValidate(final Runnable modTable, final Index added, final Index removed, final Index modified) { modAndValidate(modTable, added, removed, modified, IndexShiftData.EMPTY); } - void modAndValidate(final Runnable modTable, final Index added, final Index removed, - final Index modified, - final IndexShiftData shifted) { + void modAndValidate(final Runnable modTable, final Index added, final Index removed, final Index modified, + final IndexShiftData shifted) { ++updateCount; LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(modTable::run); @@ -280,8 +267,7 @@ void modAndValidate(final Runnable modTable, final Index added, final Index remo Assert.assertEquals(0, shifted.size()); validate((SimpleListener) listener, updateCount, added, removed, modified); } else if (listener instanceof SimpleShiftAwareListener) { - validate((SimpleShiftAwareListener) listener, updateCount, added, removed, modified, - shifted); + validate((SimpleShiftAwareListener) listener, updateCount, added, removed, modified, shifted); } } } @@ -292,8 +278,7 @@ private static Index indexByRange(long firstKey, long lastKey) { private static IndexShiftData shiftDataByValues(long... values) { if (values.length % 3 != 0) { - throw new IllegalArgumentException( - "shift data is defined by triplets {start, end, shift}"); + throw new IllegalArgumentException("shift data is defined by triplets {start, end, shift}"); } IndexShiftData.Builder builder = new IndexShiftData.Builder(); for (int idx = 0; idx < values.length; idx += 3) { @@ -303,16 +288,15 @@ private static IndexShiftData shiftDataByValues(long... values) { } private static void validate(final SimpleListener listener, final long count, final Index added, - final Index removed, final Index modified) { + final Index removed, final Index modified) { Assert.assertEquals("simpleListener.getCount()", count, listener.getCount()); Assert.assertEquals("simpleListener.added", added, listener.added); Assert.assertEquals("simpleListener.removed", removed, listener.removed); Assert.assertEquals("simpleListener.modified", modified, listener.modified); } - private static void validate(final SimpleShiftAwareListener listener, final long count, - final Index added, - final Index removed, final Index modified, final IndexShiftData shifted) { + private static void validate(final SimpleShiftAwareListener listener, final long count, final Index added, + final Index removed, final Index modified, final IndexShiftData shifted) { Assert.assertEquals("simpleListener.getCount()", count, listener.getCount()); Assert.assertEquals("simpleListener.added", added, listener.update.added); Assert.assertEquals("simpleListener.removed", removed, listener.update.removed); @@ -344,8 +328,7 @@ public void show() throws IOException { } public void testFlattenFollowedBySumBy() { - // TODO: Write a test that just makes a RedirectedColumnSource with a wrapper, and - // fill/query it. + // TODO: Write a test that just makes a RedirectedColumnSource with a wrapper, and fill/query it. final QueryTable upstream = TstUtils.testRefreshingTable(ir(0, 100_000)); final Table input = upstream.updateView("A=ii", "B=ii % 1000", "C=ii % 2 == 0"); final Table odds = input.where("!C"); diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableJoinTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableJoinTest.java index f058be7b1dd..3e95e9e0b1a 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableJoinTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableJoinTest.java @@ -35,30 +35,29 @@ public void testAjIncremental() throws ParseException { for (JoinIncrement joinIncrement : joinIncrementors) { testAjIncremental(leftSize, rightSize, joinIncrement, seed, maxSteps); testAjIncrementalSimple(leftSize, rightSize, joinIncrement, seed, maxSteps); - testAjIncrementalSimple2(leftSize, rightSize, joinIncrement, seed, - maxSteps); + testAjIncrementalSimple2(leftSize, rightSize, joinIncrement, seed, maxSteps); } } } } } - private void testAjIncrementalSimple(int leftSize, int rightSize, JoinIncrement joinIncrement, - long seed, @SuppressWarnings("SameParameterValue") long maxSteps) { + private void testAjIncrementalSimple(int leftSize, int rightSize, JoinIncrement joinIncrement, long seed, + @SuppressWarnings("SameParameterValue") long maxSteps) { final Random random = new Random(seed); final TstUtils.ColumnInfo[] leftColumnInfo; final QueryTable leftTable = getTable(leftSize, random, - leftColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, - new TstUtils.SortedIntGenerator(1, 10000), - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 30))); + leftColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, + new TstUtils.SortedIntGenerator(1, 10000), + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 30))); final TstUtils.ColumnInfo[] rightColumnInfo; final QueryTable rightTable = getTable(rightSize, random, - rightColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, - new TstUtils.SortedIntGenerator(1, 10000), - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(20, 40))); + rightColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, + new TstUtils.SortedIntGenerator(1, 10000), + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(20, 40))); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { new EvalNugget() { @@ -68,15 +67,15 @@ public Table e() { }, new EvalNugget() { public Table e() { - return leftTable.aj(rightTable, "I1", "LI1=I1,LC1=C1,LC2=C2") - .update("I1=I1+0.25", "LI1 = (isNull(LI1) ? null : LI1+0.5)"); + return leftTable.aj(rightTable, "I1", "LI1=I1,LC1=C1,LC2=C2").update("I1=I1+0.25", + "LI1 = (isNull(LI1) ? null : LI1+0.5)"); } }, new TableComparator( - leftTable.aj(rightTable, "I1", "LI1=I1,LC1=C1,LC2=C2").update("I1=I1+0.25", - "LI1 = (isNull(LI1) ? null : LI1+0.5)"), - leftTable.aj(rightTable, "I1<=I1", "LI1=I1,LC1=C1,LC2=C2").update("I1=I1+0.25", - "LI1 = (isNull(LI1) ? null : LI1+0.5)")), + leftTable.aj(rightTable, "I1", "LI1=I1,LC1=C1,LC2=C2").update("I1=I1+0.25", + "LI1 = (isNull(LI1) ? null : LI1+0.5)"), + leftTable.aj(rightTable, "I1<=I1", "LI1=I1,LC1=C1,LC2=C2").update("I1=I1+0.25", + "LI1 = (isNull(LI1) ? null : LI1+0.5)")), new EvalNugget() { public Table e() { return leftTable.aj(rightTable, "C1,I1", "LI1=I1,LC1=C1,LC2=C2"); @@ -110,8 +109,8 @@ public Table e() { }, new EvalNugget() { public Table e() { - return leftTable.aj(rightTable, "I1("a", "b"), - new TstUtils.SetGenerator<>(10, 20, 30))); + leftColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, + new TstUtils.SortedIntGenerator(1, 1000), + new TstUtils.SetGenerator<>("a", "b"), + new TstUtils.SetGenerator<>(10, 20, 30))); final TstUtils.ColumnInfo[] rightColumnInfo; final QueryTable rightTable = getTable(rightSize, random, - rightColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, - new TstUtils.IntGenerator(1, 1000), - new TstUtils.SetGenerator<>("a", "b", "c"), - new TstUtils.SetGenerator<>(20, 30, 40))); + rightColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, + new TstUtils.IntGenerator(1, 1000), + new TstUtils.SetGenerator<>("a", "b", "c"), + new TstUtils.SetGenerator<>(20, 30, 40))); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { @@ -183,8 +181,7 @@ public Table e() { }, new EvalNugget() { public Table e() { - return leftTable.aj(rightTable.sort("I1"), "C1,C2,I1", - "LI1=I1,LC1=C1,LC2=C2"); + return leftTable.aj(rightTable.sort("I1"), "C1,C2,I1", "LI1=I1,LC1=C1,LC2=C2"); } }, new EvalNugget() { @@ -194,48 +191,44 @@ public Table e() { }, new EvalNugget() { public Table e() { - return leftTable.raj(rightTable.sort("I1"), "C1,I1", - "LI1=I1,LC1=C1,LC2=C2"); + return leftTable.raj(rightTable.sort("I1"), "C1,I1", "LI1=I1,LC1=C1,LC2=C2"); } }, new EvalNugget() { public Table e() { - return leftTable.raj(rightTable.sort("I1"), "C1,C2,I1", - "LI1=I1,LC1=C1,LC2=C2"); + return leftTable.raj(rightTable.sort("I1"), "C1,C2,I1", "LI1=I1,LC1=C1,LC2=C2"); } }, }; for (int step = 0; step < maxSteps; step++) { if (printTableUpdates) { - System.out.println("Simple2 Step i = " + step + ", leftSize=" + leftSize - + ", rightSize=" + rightSize); + System.out.println("Simple2 Step i = " + step + ", leftSize=" + leftSize + ", rightSize=" + rightSize); System.out.println("Left Table:"); showWithIndex(leftTable); System.out.println("Right Table:"); showWithIndex(rightTable); } - joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, - rightColumnInfo, en, random); + joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, rightColumnInfo, en, random); } } - private void testAjIncremental(int leftSize, int rightSize, JoinIncrement joinIncrement, - long seed, @SuppressWarnings("SameParameterValue") long maxSteps) throws ParseException { + private void testAjIncremental(int leftSize, int rightSize, JoinIncrement joinIncrement, long seed, + @SuppressWarnings("SameParameterValue") long maxSteps) throws ParseException { final Random random = new Random(seed); QueryScope.addParam("f", new SimpleDateFormat("dd HH:mm:ss")); final TstUtils.ColumnInfo[] leftColumnInfo; final QueryTable leftTable = getTable(leftSize, random, - leftColumnInfo = initColumnInfos(new String[] {"Date", "C1", "C2"}, - new TstUtils.DateGenerator(format.parse("2011-02-02"), format.parse("2011-02-03")), - new TstUtils.SetGenerator<>("a", "b"), - new TstUtils.SetGenerator<>(10, 20, 30))); + leftColumnInfo = initColumnInfos(new String[] {"Date", "C1", "C2"}, + new TstUtils.DateGenerator(format.parse("2011-02-02"), format.parse("2011-02-03")), + new TstUtils.SetGenerator<>("a", "b"), + new TstUtils.SetGenerator<>(10, 20, 30))); final TstUtils.ColumnInfo[] rightColumnInfo; final QueryTable rightTable = getTable(rightSize, random, - rightColumnInfo = initColumnInfos(new String[] {"Date", "C1", "C2"}, - new TstUtils.DateGenerator(format.parse("2011-02-02"), format.parse("2011-02-03")), - new TstUtils.SetGenerator<>("a", "b", "c"), - new TstUtils.SetGenerator<>(20, 30, 40))); + rightColumnInfo = initColumnInfos(new String[] {"Date", "C1", "C2"}, + new TstUtils.DateGenerator(format.parse("2011-02-02"), format.parse("2011-02-03")), + new TstUtils.SetGenerator<>("a", "b", "c"), + new TstUtils.SetGenerator<>(20, 30, 40))); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { @@ -245,124 +238,108 @@ public Table e() { }, new EvalNugget() { public Table e() { - return leftTable.aj(rightTable, "Date", "LDate=Date,LC1=C1,LC2=C2").update( - "Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); + return leftTable.aj(rightTable, "Date", "LDate=Date,LC1=C1,LC2=C2") + .update("Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); } }, new EvalNugget() { public Table e() { - return leftTable.aj(rightTable.updateView("RIdx=k"), "C1,Date", - "LDate=Date,LC1=C1,LC2=C2,RIdx").update("Date=f.format(Date)", - "LDate=isNull(LDate)?null:f.format(LDate)"); + return leftTable.aj(rightTable.updateView("RIdx=k"), "C1,Date", "LDate=Date,LC1=C1,LC2=C2,RIdx") + .update("Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); } }, new EvalNugget() { public Table e() { return leftTable.aj(rightTable.updateView("RIdx=k"), "C1,Date", - "LDate=Date,LC1=C1,LC2=C2,RIdx"); + "LDate=Date,LC1=C1,LC2=C2,RIdx"); } }, new EvalNugget() { public Table e() { - return leftTable.aj(rightTable.updateView("RIdx=k").sort("Date"), "C1,Date", - "LDate=Date,LC1=C1,LC2=C2,RIdx").update("Date=f.format(Date)", - "LDate=isNull(LDate)?null:f.format(LDate)"); + return leftTable + .aj(rightTable.updateView("RIdx=k").sort("Date"), "C1,Date", + "LDate=Date,LC1=C1,LC2=C2,RIdx") + .update("Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); } }, new EvalNugget() { public Table e() { - return leftTable - .aj(rightTable.sort("Date"), "C2,Date", "LDate=Date,LC1=C1,LC2=C2") - .update("Date=f.format(Date)", - "LDate=isNull(LDate)?null:f.format(LDate)"); + return leftTable.aj(rightTable.sort("Date"), "C2,Date", "LDate=Date,LC1=C1,LC2=C2") + .update("Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); } }, new EvalNugget() { public Table e() { - return leftTable - .aj(rightTable.sort("Date"), "C1,C2,Date", "LDate=Date,LC1=C1,LC2=C2") - .update("Date=f.format(Date)", - "LDate=isNull(LDate)?null:f.format(LDate)"); + return leftTable.aj(rightTable.sort("Date"), "C1,C2,Date", "LDate=Date,LC1=C1,LC2=C2") + .update("Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); } }, new EvalNugget() { public Table e() { - return leftTable.raj(rightTable.sort("Date"), "Date", - "LDate=Date,LC1=C1,LC2=C2"); + return leftTable.raj(rightTable.sort("Date"), "Date", "LDate=Date,LC1=C1,LC2=C2"); } }, new EvalNugget() { public Table e() { - return leftTable - .raj(rightTable.sort("Date"), "Date", "LDate=Date,LC1=C1,LC2=C2") - .update("Date=f.format(Date)", - "LDate=isNull(LDate)?null:f.format(LDate)"); + return leftTable.raj(rightTable.sort("Date"), "Date", "LDate=Date,LC1=C1,LC2=C2") + .update("Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); } }, new EvalNugget() { public Table e() { - return leftTable.raj(rightTable.sort("Date"), "C1,Date", - "LDate=Date,LC1=C1,LC2=C2"); + return leftTable.raj(rightTable.sort("Date"), "C1,Date", "LDate=Date,LC1=C1,LC2=C2"); } }, new EvalNugget() { public Table e() { - return leftTable - .raj(rightTable.sort("Date"), "C1,Date", "LDate=Date,LC1=C1,LC2=C2") - .update("Date=f.format(Date)", - "LDate=isNull(LDate)?null:f.format(LDate)"); + return leftTable.raj(rightTable.sort("Date"), "C1,Date", "LDate=Date,LC1=C1,LC2=C2") + .update("Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); } }, new EvalNugget() { public Table e() { - return leftTable - .raj(rightTable.sort("Date"), "C2,Date", "LDate=Date,LC1=C1,LC2=C2") - .update("Date=f.format(Date)", - "LDate=isNull(LDate)?null:f.format(LDate)"); + return leftTable.raj(rightTable.sort("Date"), "C2,Date", "LDate=Date,LC1=C1,LC2=C2") + .update("Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); } }, new EvalNugget() { public Table e() { - return leftTable - .raj(rightTable.sort("Date"), "C1,C2,Date", "LDate=Date,LC1=C1,LC2=C2") - .update("Date=f.format(Date)", - "LDate=isNull(LDate)?null:f.format(LDate)"); + return leftTable.raj(rightTable.sort("Date"), "C1,C2,Date", "LDate=Date,LC1=C1,LC2=C2") + .update("Date=f.format(Date)", "LDate=isNull(LDate)?null:f.format(LDate)"); } }, }; for (int step = 0; step < maxSteps; step++) { - // System.out.println("Date Step = " + step + ", leftSize=" + leftSize + ", rightSize=" - // + rightSize + ", seed = " + seed + ", step=" + joinIncrement); - joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, - rightColumnInfo, en, random); + // System.out.println("Date Step = " + step + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", + // seed = " + seed + ", step=" + joinIncrement); + joinIncrement.step(leftSize, rightSize, leftTable, rightTable, leftColumnInfo, rightColumnInfo, en, random); } } public void testAj() { Table table = testRefreshingTable( - c("Ticker", "AAPL", "IBM", "AAPL"), - c("Timestamp", 1L, 10L, 50L)); + c("Ticker", "AAPL", "IBM", "AAPL"), + c("Timestamp", 1L, 10L, 50L)); Table lookUpValue1 = testRefreshingTable( - c("Timestamp", 1L, 5L, 10L, 25L, 50L), - c("Ticker", "AAPL", "IBM", "AAPL", "IBM", "AAPL"), - c("OptionBid", .1, .2, .3, .4, .5)); + c("Timestamp", 1L, 5L, 10L, 25L, 50L), + c("Ticker", "AAPL", "IBM", "AAPL", "IBM", "AAPL"), + c("OptionBid", .1, .2, .3, .4, .5)); Table result = table.aj(lookUpValue1, "Ticker,Timestamp", "OptionBid"); - assertEquals(Arrays.asList("Ticker", "Timestamp", "OptionBid"), - result.getDefinition().getColumnNames()); + assertEquals(Arrays.asList("Ticker", "Timestamp", "OptionBid"), result.getDefinition().getColumnNames()); table = testRefreshingTable( - c("Timestamp", 1L, 10L, 50L)); + c("Timestamp", 1L, 10L, 50L)); lookUpValue1 = testRefreshingTable( - c("OptionTimestamp", 1L, 5L, 10L, 25L, 50L), - c("OptionBid", .1, .2, .3, .4, .5)); + c("OptionTimestamp", 1L, 5L, 10L, 25L, 50L), + c("OptionBid", .1, .2, .3, .4, .5)); result = table.aj(lookUpValue1, "Timestamp=OptionTimestamp", "OptionBid"); assertEquals(long.class, result.getColumn("OptionTimestamp").getType()); table = testRefreshingTable( - c("String", "c", "e", "g"), - c("Int", 2, 4, 6)); + c("String", "c", "e", "g"), + c("Int", 2, 4, 6)); lookUpValue1 = testRefreshingTable(c("indx", "a", "b", "c")); result = lookUpValue1.aj(table, "indx=String", "String,Int"); @@ -372,10 +349,8 @@ public void testAj() { assertEquals("indx", result.getColumns()[0].getName()); assertEquals("String", result.getColumns()[1].getName()); assertEquals("Int", result.getColumns()[2].getName()); - assertEquals(asList(null, null, "c"), - asList((Object[]) result.getColumn("String").getDirect())); - assertEquals(asList("a", "b", "c"), - asList((Object[]) result.getColumn("indx").getDirect())); + assertEquals(asList(null, null, "c"), asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("a", "b", "c"), asList((Object[]) result.getColumn("indx").getDirect())); assertEquals(asList(null, null, 2), asList((Object[]) result.getColumn("Int").get(0, 3))); result = lookUpValue1.aj(table, "indx=String", "Int,String"); @@ -392,10 +367,8 @@ public void testAj() { assertEquals("String", result.getColumns()[1].getName()); assertEquals("Int", result.getColumns()[2].getName()); assertEquals(3, result.getColumns().length); - assertEquals(asList("c", "c", "e"), - asList((Object[]) result.getColumn("String").getDirect())); - assertEquals(asList("c", "d", "e"), - asList((Object[]) result.getColumn("indx").getDirect())); + assertEquals(asList("c", "c", "e"), asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("c", "d", "e"), asList((Object[]) result.getColumn("indx").getDirect())); assertEquals(asList(2, 2, 4), asList((Object[]) result.getColumn("Int").get(0, 3))); lookUpValue1 = testRefreshingTable(c("indx", "h", "e", "a")); @@ -405,10 +378,8 @@ public void testAj() { assertEquals("String", result.getColumns()[1].getName()); assertEquals("Int", result.getColumns()[2].getName()); assertEquals(3, result.getColumns().length); - assertEquals(asList("g", "e", null), - asList((Object[]) result.getColumn("String").getDirect())); - assertEquals(asList("h", "e", "a"), - asList((Object[]) result.getColumn("indx").getDirect())); + assertEquals(asList("g", "e", null), asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("h", "e", "a"), asList((Object[]) result.getColumn("indx").getDirect())); assertEquals(asList(6, 4, null), asList((Object[]) result.getColumn("Int").get(0, 3))); @@ -418,10 +389,8 @@ public void testAj() { assertEquals("indx", result.getColumns()[0].getName()); assertEquals("String", result.getColumns()[1].getName()); assertEquals(2, result.getColumns().length); - assertEquals(asList("g", "e", null), - asList((Object[]) result.getColumn("String").getDirect())); - assertEquals(asList("h", "e", "a"), - asList((Object[]) result.getColumn("indx").getDirect())); + assertEquals(asList("g", "e", null), asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("h", "e", "a"), asList((Object[]) result.getColumn("indx").getDirect())); lookUpValue1 = testRefreshingTable(c("String", "h", "e", "a")); result = lookUpValue1.aj(table, "String", "xString=String,Int"); @@ -430,49 +399,45 @@ public void testAj() { assertEquals("xString", result.getColumns()[1].getName()); assertEquals("Int", result.getColumns()[2].getName()); assertEquals(3, result.getColumns().length); - assertEquals(asList("g", "e", null), - asList((Object[]) result.getColumn("xString").getDirect())); - assertEquals(asList("h", "e", "a"), - asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("g", "e", null), asList((Object[]) result.getColumn("xString").getDirect())); + assertEquals(asList("h", "e", "a"), asList((Object[]) result.getColumn("String").getDirect())); assertEquals(asList(6, 4, null), asList((Object[]) result.getColumn("Int").get(0, 3))); } public void testAjLt() { Table table = testRefreshingTable( - c("Ticker", "AAPL", "IBM", "AAPL"), - c("Timestamp", 1L, 10L, 50L)); + c("Ticker", "AAPL", "IBM", "AAPL"), + c("Timestamp", 1L, 10L, 50L)); Table lookUpValue1 = testRefreshingTable( - c("Timestamp", 1L, 5L, 10L, 25L, 50L), - c("Ticker", "AAPL", "IBM", "AAPL", "IBM", "AAPL"), - c("OptionBid", .1, .2, .3, .4, .5)); - - Table result = table.aj(lookUpValue1.renameColumns("TS2=Timestamp"), "Ticker,Timestamp table.notifyListeners(i(), i(), i(4, 5))); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> table.notifyListeners(i(), i(), i(4, 5))); System.out.println("Finished notifying listeners of modification."); TstUtils.validate(en); @@ -584,12 +537,12 @@ public Table e() { public void testAjNull() { final QueryTable left = TstUtils.testRefreshingTable(i(1, 2, 3, 4), - c("LInt", 2, 4, 6, 8), - c("LSentinel", "a", "b", "c", "d")); + c("LInt", 2, 4, 6, 8), + c("LSentinel", "a", "b", "c", "d")); final QueryTable right = TstUtils.testRefreshingTable(i(1, 2, 3, 4, 5, 6, 7, 8), - c("RInt", null, null, 3, 4, 5, 6, 7, 8), - c("RSentinel", "C1", "E2", "A3", "D4", "F5", "G6", "I7", "H8")); + c("RInt", null, null, 3, 4, 5, 6, 7, 8), + c("RSentinel", "C1", "E2", "A3", "D4", "F5", "G6", "I7", "H8")); System.out.println("Left:"); TableTools.show(left); @@ -600,28 +553,27 @@ public void testAjNull() { System.out.println("AJ:"); TableTools.show(aj); - assertEquals(asList("E2", "D4", "G6", "H8"), - asList((Object[]) aj.getColumn("RSentinel").getDirect())); + assertEquals(asList("E2", "D4", "G6", "H8"), asList((Object[]) aj.getColumn("RSentinel").getDirect())); System.out.println("AJ2:"); // let's swap the left and right final Table aj2 = right.sort("RSentinel").aj(left, "RInt=LInt", "LInt,LSentinel"); TableTools.show(aj2); assertEquals(asList("a", null, "b", null, "b", "c", "d", "c"), - asList((Object[]) aj2.getColumn("LSentinel").getDirect())); + asList((Object[]) aj2.getColumn("LSentinel").getDirect())); } public void testAjEmptyRight() { final QueryTable left = TstUtils.testRefreshingTable(i(1, 2, 3, 4), - c("Group", "g", "g", "g", "g"), - c("LInt", 2, 4, 6, 8), - c("LSentinel", "a", "b", "c", "d")); + c("Group", "g", "g", "g", "g"), + c("LInt", 2, 4, 6, 8), + c("LSentinel", "a", "b", "c", "d")); final QueryTable right = TstUtils.testRefreshingTable(i(), - col("Group", CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - intCol("RInt"), - c("RSentinel")); + col("Group", CollectionUtil.ZERO_LENGTH_STRING_ARRAY), + intCol("RInt"), + c("RSentinel")); System.out.println("Left:"); TableTools.show(left); @@ -632,8 +584,7 @@ public void testAjEmptyRight() { System.out.println("AJ:"); TableTools.show(aj); - assertEquals(asList(null, null, null, null), - asList((Object[]) aj.getColumn("RSentinel").getDirect())); + assertEquals(asList(null, null, null, null), asList((Object[]) aj.getColumn("RSentinel").getDirect())); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(left, i(2), c("Group", "h"), c("LInt", 4), c("LSentinel", "b")); @@ -641,46 +592,41 @@ public void testAjEmptyRight() { }); TableTools.show(aj); - assertEquals(asList(null, null, null, null), - asList((Object[]) aj.getColumn("RSentinel").getDirect())); + assertEquals(asList(null, null, null, null), asList((Object[]) aj.getColumn("RSentinel").getDirect())); } public void testRaj() { Table table = testRefreshingTable( - c("Ticker", "AAPL", "IBM", "AAPL"), - c("Timestamp", 1L, 10L, 50L)); + c("Ticker", "AAPL", "IBM", "AAPL"), + c("Timestamp", 1L, 10L, 50L)); Table lookUpValue1 = testRefreshingTable( - c("Timestamp", 1L, 5L, 10L, 25L, 50L), - c("Ticker", "AAPL", "IBM", "AAPL", "IBM", "AAPL"), - c("OptionBid", .1, .2, .3, .4, .5)); + c("Timestamp", 1L, 5L, 10L, 25L, 50L), + c("Ticker", "AAPL", "IBM", "AAPL", "IBM", "AAPL"), + c("OptionBid", .1, .2, .3, .4, .5)); Table result = table.raj(lookUpValue1, "Ticker,Timestamp", "OptionBid"); show(result, 10); - assertEquals(Arrays.asList("Ticker", "Timestamp", "OptionBid"), - result.getDefinition().getColumnNames()); + assertEquals(Arrays.asList("Ticker", "Timestamp", "OptionBid"), result.getDefinition().getColumnNames()); assertEquals(3, result.size()); assertEquals("Ticker", result.getColumns()[0].getName()); assertEquals("Timestamp", result.getColumns()[1].getName()); assertEquals("OptionBid", result.getColumns()[2].getName()); - assertEquals(asList("AAPL", "IBM", "AAPL"), - asList((Object[]) result.getColumn("Ticker").getDirect())); - assertEquals(asList(1L, 10L, 50L), - asList((Object[]) result.getColumn("Timestamp").get(0, 3))); - assertEquals(asList(.1, .4, .5), - asList((Object[]) result.getColumn("OptionBid").get(0, 3))); + assertEquals(asList("AAPL", "IBM", "AAPL"), asList((Object[]) result.getColumn("Ticker").getDirect())); + assertEquals(asList(1L, 10L, 50L), asList((Object[]) result.getColumn("Timestamp").get(0, 3))); + assertEquals(asList(.1, .4, .5), asList((Object[]) result.getColumn("OptionBid").get(0, 3))); table = testRefreshingTable( - c("Timestamp", 1L, 10L, 50L)); + c("Timestamp", 1L, 10L, 50L)); lookUpValue1 = testRefreshingTable( - c("OptionTimestamp", 1L, 5L, 10L, 25L, 50L), - c("OptionBid", .1, .2, .3, .4, .5)); + c("OptionTimestamp", 1L, 5L, 10L, 25L, 50L), + c("OptionBid", .1, .2, .3, .4, .5)); result = table.raj(lookUpValue1, "Timestamp=OptionTimestamp", "OptionBid"); assertEquals(long.class, result.getColumn("OptionTimestamp").getType()); table = testRefreshingTable( - c("String", "c", "e", "g"), - c("Int", 2, 4, 6)); + c("String", "c", "e", "g"), + c("Int", 2, 4, 6)); lookUpValue1 = testRefreshingTable(c("indx", "a", "b", "c")); result = lookUpValue1.raj(table, "indx=String", "String,Int"); @@ -690,10 +636,8 @@ public void testRaj() { assertEquals("indx", result.getColumns()[0].getName()); assertEquals("String", result.getColumns()[1].getName()); assertEquals("Int", result.getColumns()[2].getName()); - assertEquals(asList("c", "c", "c"), - asList((Object[]) result.getColumn("String").getDirect())); - assertEquals(asList("a", "b", "c"), - asList((Object[]) result.getColumn("indx").getDirect())); + assertEquals(asList("c", "c", "c"), asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("a", "b", "c"), asList((Object[]) result.getColumn("indx").getDirect())); assertEquals(asList(2, 2, 2), asList((Object[]) result.getColumn("Int").get(0, 3))); lookUpValue1 = testRefreshingTable(c("indx", "f", "g", "h")); @@ -705,10 +649,8 @@ public void testRaj() { assertEquals("indx", result.getColumns()[0].getName()); assertEquals("String", result.getColumns()[1].getName()); assertEquals("Int", result.getColumns()[2].getName()); - assertEquals(asList("g", "g", null), - asList((Object[]) result.getColumn("String").getDirect())); - assertEquals(asList("f", "g", "h"), - asList((Object[]) result.getColumn("indx").getDirect())); + assertEquals(asList("g", "g", null), asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("f", "g", "h"), asList((Object[]) result.getColumn("indx").getDirect())); assertEquals(asList(6, 6, null), asList((Object[]) result.getColumn("Int").get(0, 3))); result = lookUpValue1.raj(table, "indx=String", "Int,String"); @@ -731,10 +673,8 @@ public void testRaj() { assertEquals("String", result.getColumns()[1].getName()); assertEquals("Int", result.getColumns()[2].getName()); assertEquals(3, result.getColumns().length); - assertEquals(asList("c", "e", "e"), - asList((Object[]) result.getColumn("String").getDirect())); - assertEquals(asList("c", "d", "e"), - asList((Object[]) result.getColumn("indx").getDirect())); + assertEquals(asList("c", "e", "e"), asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("c", "d", "e"), asList((Object[]) result.getColumn("indx").getDirect())); assertEquals(asList(2, 4, 4), asList((Object[]) result.getColumn("Int").get(0, 3))); lookUpValue1 = testRefreshingTable(c("indx", "j", "e", "a")); @@ -744,10 +684,8 @@ public void testRaj() { assertEquals("String", result.getColumns()[1].getName()); assertEquals("Int", result.getColumns()[2].getName()); assertEquals(3, result.getColumns().length); - assertEquals(asList(null, "e", "c"), - asList((Object[]) result.getColumn("String").getDirect())); - assertEquals(asList("j", "e", "a"), - asList((Object[]) result.getColumn("indx").getDirect())); + assertEquals(asList(null, "e", "c"), asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("j", "e", "a"), asList((Object[]) result.getColumn("indx").getDirect())); assertEquals(asList(null, 4, 2), asList((Object[]) result.getColumn("Int").get(0, 3))); @@ -757,10 +695,8 @@ public void testRaj() { assertEquals("indx", result.getColumns()[0].getName()); assertEquals("String", result.getColumns()[1].getName()); assertEquals(2, result.getColumns().length); - assertEquals(asList(null, "e", "c"), - asList((Object[]) result.getColumn("String").getDirect())); - assertEquals(asList("j", "e", "a"), - asList((Object[]) result.getColumn("indx").getDirect())); + assertEquals(asList(null, "e", "c"), asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList("j", "e", "a"), asList((Object[]) result.getColumn("indx").getDirect())); lookUpValue1 = testRefreshingTable(c("String", "j", "e", "a")); result = lookUpValue1.raj(table, "String", "xString=String,Int"); @@ -769,10 +705,8 @@ public void testRaj() { assertEquals("xString", result.getColumns()[1].getName()); assertEquals("Int", result.getColumns()[2].getName()); assertEquals(3, result.getColumns().length); - assertEquals(asList(null, "e", "c"), - asList((Object[]) result.getColumn("xString").getDirect())); - assertEquals(asList("j", "e", "a"), - asList((Object[]) result.getColumn("String").getDirect())); + assertEquals(asList(null, "e", "c"), asList((Object[]) result.getColumn("xString").getDirect())); + assertEquals(asList("j", "e", "a"), asList((Object[]) result.getColumn("String").getDirect())); assertEquals(asList(null, 4, 2), asList((Object[]) result.getColumn("Int").get(0, 3))); } @@ -835,23 +769,22 @@ int tableSizeForLeftBuild(Table leftTable) { public void testAjRegression0() { - final QueryTable rightQueryTable = TstUtils.testRefreshingTable( - i(28, 36, 39, 42, 46, 49, 50, 51, 55, 56, 58, 64, 65, 66, 92, 96), - c("C1", "a", "a", "c", "a", "b", "a", "c", "b", "c", "a", "a", "c", "c", "a", "c", "c"), - c("I1", 168, 851, 255, 142, 884, 841, 877, 248, 191, 207, 163, 250, 982, 432, 466, 139), - c("Sentinel", 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160)); + final QueryTable rightQueryTable = + TstUtils.testRefreshingTable(i(28, 36, 39, 42, 46, 49, 50, 51, 55, 56, 58, 64, 65, 66, 92, 96), + c("C1", "a", "a", "c", "a", "b", "a", "c", "b", "c", "a", "a", "c", "c", "a", "c", "c"), + c("I1", 168, 851, 255, 142, 884, 841, 877, 248, 191, 207, 163, 250, 982, 432, 466, 139), + c("Sentinel", 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150, 160)); final QueryTable leftQueryTable = TstUtils.testRefreshingTable(i(10, 11, 12, 14, 16, 22), - c("C1", "b", "a", "a", "a", "a", "b"), - c("I1", 78, 85, 96, 263, 474, 876)); + c("C1", "b", "a", "a", "a", "a", "b"), + c("I1", 78, 85, 96, 263, 474, 876)); TableTools.showWithIndex(leftQueryTable); TableTools.showWithIndex(rightQueryTable); final Table sortedRightQueryTable = rightQueryTable.sort("I1"); TableTools.showWithIndex(sortedRightQueryTable); - final Table result = - leftQueryTable.aj(sortedRightQueryTable, "C1,I1", "LI1=I1,LC1=C1,Sentinel"); + final Table result = leftQueryTable.aj(sortedRightQueryTable, "C1,I1", "LI1=I1,LC1=C1,Sentinel"); TableTools.showWithIndex(result); assertEquals(100, result.getColumn("Sentinel").get(3)); @@ -862,22 +795,21 @@ public void testAjRegression0() { public void testAjRegression1() { final QueryTable rightQueryTable = - TstUtils.testRefreshingTable(i(1, 27, 28, 35, 41, 46, 49, 50, 51, 55, 56, 65), - c("C1", "b", "c", "b", "b", "c", "b", "a", "b", "c", "c", "c", "b"), - c("I1", 591, 5, 952, 43, 102, 18, 475, 821, 676, 191, 657, 982), - c("Sentinel", 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120)); + TstUtils.testRefreshingTable(i(1, 27, 28, 35, 41, 46, 49, 50, 51, 55, 56, 65), + c("C1", "b", "c", "b", "b", "c", "b", "a", "b", "c", "c", "c", "b"), + c("I1", 591, 5, 952, 43, 102, 18, 475, 821, 676, 191, 657, 982), + c("Sentinel", 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120)); final QueryTable leftQueryTable = TstUtils.testRefreshingTable(i(10, 11, 12, 14, 16, 22), - c("C1", "b", "a", "a", "a", "a", "b"), - c("I1", 78, 85, 96, 263, 474, 876)); + c("C1", "b", "a", "a", "a", "a", "b"), + c("I1", 78, 85, 96, 263, 474, 876)); TableTools.showWithIndex(leftQueryTable); TableTools.showWithIndex(rightQueryTable); final Table sortedRightQueryTable = rightQueryTable.sort("I1"); TableTools.showWithIndex(sortedRightQueryTable); - final Table result = - leftQueryTable.aj(sortedRightQueryTable, "C1,I1", "LI1=I1,LC1=C1,Sentinel"); + final Table result = leftQueryTable.aj(sortedRightQueryTable, "C1,I1", "LI1=I1,LC1=C1,Sentinel"); TableTools.showWithIndex(result); assertEquals(80, result.getColumn("Sentinel").get(5)); @@ -894,10 +826,8 @@ public void testJoin() { assertEquals(2, result.getColumns().length); assertEquals("X", result.getColumns()[0].getName()); assertEquals("Y", result.getColumns()[1].getName()); - assertEquals(Arrays.asList("a", "a", "b", "b", "c", "c"), - Arrays.asList(result.getColumn("X").get(0, 6))); - assertEquals(Arrays.asList("x", "y", "x", "y", "x", "y"), - Arrays.asList(result.getColumn("Y").get(0, 6))); + assertEquals(Arrays.asList("a", "a", "b", "b", "c", "c"), Arrays.asList(result.getColumn("X").get(0, 6))); + assertEquals(Arrays.asList("x", "y", "x", "y", "x", "y"), Arrays.asList(result.getColumn("Y").get(0, 6))); lTable = testRefreshingTable(c("X", "a", "b", "c")); rTable = testRefreshingTable(c("Y", "a", "b", "b"), c("Z", 1, 2, 3)); @@ -934,7 +864,7 @@ public void testJoin() { public void testLeftJoin() { Table table1 = newTable( - c("String", "c", "e", "g")); + c("String", "c", "e", "g")); Table table2 = newTable(c("String", "c", "e"), c("v", 1, 2), c("u", 3.0d, 4.0d)); try { table1.leftJoin(table2); @@ -958,8 +888,7 @@ public void testLeftJoin() { assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(DbIntArray.class, pairMatch.getColumns()[1].getType()); assertEquals(DbDoubleArray.class, pairMatch.getColumns()[2].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); DbIntArray[] vValues = (DbIntArray[]) pairMatch.getColumn("v").getDirect(); assertEquals(1, vValues[0].get(0)); assertEquals(2, vValues[1].get(0)); @@ -980,8 +909,7 @@ public void testLeftJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(DbIntArray.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); vValues = (DbIntArray[]) pairMatch.getColumn("v").getDirect(); assertEquals(1, vValues[0].get(0)); assertEquals(2, vValues[1].get(0)); @@ -998,8 +926,7 @@ public void testLeftJoin() { assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(DbDoubleArray.class, pairMatch.getColumns()[1].getType()); assertEquals(DbIntArray.class, pairMatch.getColumns()[2].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); vValues = (DbIntArray[]) pairMatch.getColumn("v").getDirect(); assertEquals(1, vValues[0].get(0)); assertEquals(2, vValues[1].get(0)); @@ -1033,8 +960,7 @@ public void testLeftJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(DbIntArray.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); vValues = (DbIntArray[]) pairMatch.getColumn("v").getDirect(); assertEquals(1, vValues[0].get(0)); assertEquals(2, vValues[1].get(0)); @@ -1056,10 +982,10 @@ public void testLeftJoin() { table1 = io.deephaven.db.tables.utils.TableTools.newTable( - c("String1", "c", "e", "g")); + c("String1", "c", "e", "g")); table2 = io.deephaven.db.tables.utils.TableTools.newTable( - c("String2", "c", "e"), c("v", 1, 2)); + c("String2", "c", "e"), c("v", 1, 2)); final Table noPairMatch = table1.leftJoin(table2); assertEquals(3, noPairMatch.size()); @@ -1070,11 +996,9 @@ public void testLeftJoin() { assertEquals(String.class, noPairMatch.getColumns()[0].getType()); assertEquals(DbArray.class, noPairMatch.getColumns()[1].getType()); assertEquals(DbIntArray.class, noPairMatch.getColumns()[2].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) noPairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) noPairMatch.getColumns()[0].getDirect())); // noinspection unchecked - final DbArray[] aggregateString = - (DbArray[]) noPairMatch.getColumn("String2").getDirect(); + final DbArray[] aggregateString = (DbArray[]) noPairMatch.getColumn("String2").getDirect(); assertEquals(asList("c", "e"), asList(aggregateString[0].toArray())); assertEquals(asList("c", "e"), asList(aggregateString[1].toArray())); assertEquals(asList("c", "e"), asList(aggregateString[2].toArray())); @@ -1092,8 +1016,7 @@ public void testLeftJoin() { assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(String.class, pairMatch.getColumns()[1].getType()); assertEquals(DbIntArray.class, pairMatch.getColumns()[2].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); final String[] stringColumn = (String[]) pairMatch.getColumn("String2").getDirect(); assertEquals("c", stringColumn[0]); @@ -1120,8 +1043,7 @@ public void testLeftJoin() { assertEquals(asList("c", "e"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(1, pairMatch.getColumn("v").getInt(0)); assertEquals(2, pairMatch.getColumn("v").getInt(1)); - assertEquals(asList("c", "e"), - asList((String[]) pairMatch.getColumn("String1").getDirect())); + assertEquals(asList("c", "e"), asList((String[]) pairMatch.getColumn("String1").getDirect())); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableNaturalJoinTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableNaturalJoinTest.java index b0607a9982a..7933f52c62e 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableNaturalJoinTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableNaturalJoinTest.java @@ -49,16 +49,15 @@ public void testNaturalJoinRehash() { int offset = 0; fillRehashKeys(offset, leftJoinKey, leftSentinel, rightJoinKey, rightSentinel); - final QueryTable leftTable = TstUtils.testRefreshingTable(stringCol("JoinKey", leftJoinKey), - intCol("LeftSentinel", leftSentinel)); - final QueryTable rightTable = TstUtils.testRefreshingTable( - stringCol("JoinKey", rightJoinKey), intCol("RightSentinel", rightSentinel)); + final QueryTable leftTable = + TstUtils.testRefreshingTable(stringCol("JoinKey", leftJoinKey), intCol("LeftSentinel", leftSentinel)); + final QueryTable rightTable = TstUtils.testRefreshingTable(stringCol("JoinKey", rightJoinKey), + intCol("RightSentinel", rightSentinel)); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { public Table e() { - return leftTable.naturalJoin(rightTable, "JoinKey", - "RJK=JoinKey,RightSentinel"); + return leftTable.naturalJoin(rightTable, "JoinKey", "RJK=JoinKey,RightSentinel"); } }, }; @@ -71,23 +70,21 @@ public Table e() { for (int step = 0; step < 40; step++) { - System.out.println("Step = " + step + ", leftSize=" + leftTable.size() + ", rightSize=" - + rightTable.size()); + System.out + .println("Step = " + step + ", leftSize=" + leftTable.size() + ", rightSize=" + rightTable.size()); offset += leftJoinKey.length; fillRehashKeys(offset, leftJoinKey, leftSentinel, rightJoinKey, rightSentinel); final int foffset = offset; LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - final Index addIndex = - Index.FACTORY.getIndexByRange(foffset, foffset + leftJoinKey.length - 1); + final Index addIndex = Index.FACTORY.getIndexByRange(foffset, foffset + leftJoinKey.length - 1); addToTable(leftTable, addIndex, stringCol("JoinKey", leftJoinKey), - intCol("LeftSentinel", leftSentinel)); + intCol("LeftSentinel", leftSentinel)); leftTable.notifyListeners(addIndex, i(), i()); - final Index.SequentialBuilder modIndexBuilder = - Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder modIndexBuilder = Index.FACTORY.getSequentialBuilder(); int slot = random.nextInt(foffset / 100); for (int ii = 0; ii < 100; ++ii) { @@ -117,17 +114,17 @@ public Table e() { }); addToTable(rightTable, addIndex, stringCol("JoinKey", rightJoinKey), - intCol("RightSentinel", rightSentinel)); + intCol("RightSentinel", rightSentinel)); addToTable(rightTable, modIndex, stringCol("JoinKey", rightModifications), - intCol("RightSentinel", rightModifySentinel)); + intCol("RightSentinel", rightModifySentinel)); rightTable.notifyListeners(addIndex, i(), modIndex); }); TstUtils.validate(en); } } - private void fillRehashKeys(int offset, String[] leftJoinKey, int[] leftSentinel, - String[] rightJoinKey, int[] rightSentinel) { + private void fillRehashKeys(int offset, String[] leftJoinKey, int[] leftSentinel, String[] rightJoinKey, + int[] rightSentinel) { for (int ii = 0; ii < leftJoinKey.length; ii++) { final int iio = ii + offset; leftJoinKey[ii] = Integer.toString(iio * 10); @@ -157,8 +154,7 @@ public void testNaturalJoinIncremental() { for (int leftSize : leftSizes) { for (int rightSize : rightSizes) { for (JoinIncrement joinIncrement : joinIncrementorsShift) { - testNaturalJoinIncremental(false, false, leftSize, rightSize, joinIncrement, - seed, maxSteps); + testNaturalJoinIncremental(false, false, leftSize, rightSize, joinIncrement, seed, maxSteps); } } } @@ -174,10 +170,9 @@ public void testNaturalJoinIncrementalOverflow() { for (long seed = 0; seed < 5; seed++) { for (int leftSize : leftSizes) { for (int rightSize : rightSizes) { - for (JoinIncrement joinIncrement : new JoinIncrement[] { - leftRightConcurrentStepShift}) { - testNaturalJoinIncremental(false, false, leftSize, rightSize, joinIncrement, - seed, maxSteps, QueryTableJoinTest.HIGH_LOAD_FACTOR_CONTROL); + for (JoinIncrement joinIncrement : new JoinIncrement[] {leftRightConcurrentStepShift}) { + testNaturalJoinIncremental(false, false, leftSize, rightSize, joinIncrement, seed, maxSteps, + QueryTableJoinTest.HIGH_LOAD_FACTOR_CONTROL); } } } @@ -195,8 +190,7 @@ public void testNaturalJoinLeftIncrementalRightStatic() { for (long seed = 0; seed < 1; seed++) { for (int leftSize : leftSizes) { for (int rightSize : rightSizes) { - testNaturalJoinIncremental(false, true, leftSize, rightSize, joinIncrement, - seed, maxSteps); + testNaturalJoinIncremental(false, true, leftSize, rightSize, joinIncrement, seed, maxSteps); } } } @@ -214,88 +208,83 @@ public void testNaturalJoinLeftStaticRightIncremental() { for (long seed = 0; seed < 5; seed++) { for (int leftSize : leftSizes) { for (int rightSize : rightSizes) { - testNaturalJoinIncremental(true, false, leftSize, rightSize, joinIncrement, - seed, maxSteps); + testNaturalJoinIncremental(true, false, leftSize, rightSize, joinIncrement, seed, maxSteps); } } } } } - private void testNaturalJoinIncremental(boolean leftStatic, boolean rightStatic, int leftSize, - int rightSize, JoinIncrement joinIncrement, long seed, long maxSteps) { - testNaturalJoinIncremental(leftStatic, rightStatic, leftSize, rightSize, joinIncrement, - seed, new MutableInt((int) maxSteps)); + private void testNaturalJoinIncremental(boolean leftStatic, boolean rightStatic, int leftSize, int rightSize, + JoinIncrement joinIncrement, long seed, long maxSteps) { + testNaturalJoinIncremental(leftStatic, rightStatic, leftSize, rightSize, joinIncrement, seed, + new MutableInt((int) maxSteps)); } - private void testNaturalJoinIncremental(boolean leftStatic, boolean rightStatic, int leftSize, - int rightSize, JoinIncrement joinIncrement, long seed, MutableInt numSteps) { - testNaturalJoinIncremental(leftStatic, rightStatic, leftSize, rightSize, joinIncrement, - seed, numSteps, new JoinControl()); + private void testNaturalJoinIncremental(boolean leftStatic, boolean rightStatic, int leftSize, int rightSize, + JoinIncrement joinIncrement, long seed, MutableInt numSteps) { + testNaturalJoinIncremental(leftStatic, rightStatic, leftSize, rightSize, joinIncrement, seed, numSteps, + new JoinControl()); } - private static void testNaturalJoinIncremental(boolean leftStatic, boolean rightStatic, - int leftSize, int rightSize, JoinIncrement joinIncrement, long seed, long maxSteps, - JoinControl control) { - testNaturalJoinIncremental(leftStatic, rightStatic, leftSize, rightSize, joinIncrement, - seed, new MutableInt((int) maxSteps), control); + private static void testNaturalJoinIncremental(boolean leftStatic, boolean rightStatic, int leftSize, int rightSize, + JoinIncrement joinIncrement, long seed, long maxSteps, JoinControl control) { + testNaturalJoinIncremental(leftStatic, rightStatic, leftSize, rightSize, joinIncrement, seed, + new MutableInt((int) maxSteps), control); } - private static void testNaturalJoinIncremental(boolean leftStatic, boolean rightStatic, - int leftSize, int rightSize, JoinIncrement joinIncrement, long seed, MutableInt numSteps, - JoinControl control) { + private static void testNaturalJoinIncremental(boolean leftStatic, boolean rightStatic, int leftSize, int rightSize, + JoinIncrement joinIncrement, long seed, MutableInt numSteps, JoinControl control) { final Random random = new Random(seed); final int maxSteps = numSteps.intValue(); final Logger log = new StreamLoggerImpl(); final TstUtils.ColumnInfo[] rightColumnInfo; final TstUtils.UniqueIntGenerator rightIntGenerator = - new TstUtils.UniqueIntGenerator(1, rightSize * (rightStatic ? 2 : 4)); + new TstUtils.UniqueIntGenerator(1, rightSize * (rightStatic ? 2 : 4)); final TstUtils.UniqueIntGenerator rightInt2Generator = - new TstUtils.UniqueIntGenerator(1, rightSize * (rightStatic ? 2 : 4)); + new TstUtils.UniqueIntGenerator(1, rightSize * (rightStatic ? 2 : 4)); final TstUtils.IntGenerator duplicateGenerator = new TstUtils.IntGenerator(100000, 100010); final List> generatorList = - Arrays.asList(rightIntGenerator, duplicateGenerator); + Arrays.asList(rightIntGenerator, duplicateGenerator); final TstUtils.Generator compositeGenerator = - new TstUtils.CompositeGenerator<>(generatorList, 0.9); + new TstUtils.CompositeGenerator<>(generatorList, 0.9); final QueryTable rightTable = getTable(!rightStatic, rightSize, random, - rightColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, - compositeGenerator, - new SetGenerator<>("a", "b"), - rightInt2Generator)); + rightColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, + compositeGenerator, + new SetGenerator<>("a", "b"), + rightInt2Generator)); final ColumnInfo[] leftColumnInfo; final QueryTable leftTable = getTable(!leftStatic, leftSize, random, - leftColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, - new FromUniqueIntGenerator(rightIntGenerator, new IntGenerator(20, 10000), 0.75), - new SetGenerator<>("a", "b", "c"), - new FromUniqueIntGenerator(rightInt2Generator, new IntGenerator(20, 10000), 0.75))); + leftColumnInfo = initColumnInfos(new String[] {"I1", "C1", "C2"}, + new FromUniqueIntGenerator(rightIntGenerator, new IntGenerator(20, 10000), 0.75), + new SetGenerator<>("a", "b", "c"), + new FromUniqueIntGenerator(rightInt2Generator, new IntGenerator(20, 10000), 0.75))); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { public Table e() { return NaturalJoinHelper.naturalJoin(leftTable, rightTable, - MatchPairFactory.getExpressions("I1"), - MatchPairFactory.getExpressions("LI1=I1", "LC1=C1", "LC2=C2"), false, - control); + MatchPairFactory.getExpressions("I1"), + MatchPairFactory.getExpressions("LI1=I1", "LC1=C1", "LC2=C2"), false, control); } }, new EvalNugget() { public Table e() { return NaturalJoinHelper.naturalJoin(leftTable, rightTable, - MatchPairFactory.getExpressions("C1", "I1"), - MatchPairFactory.getExpressions("LC2=C2"), false, control); + MatchPairFactory.getExpressions("C1", "I1"), MatchPairFactory.getExpressions("LC2=C2"), + false, control); } }, new EvalNugget() { public Table e() { - return NaturalJoinHelper.naturalJoin(leftTable, - (QueryTable) rightTable.update("Exists=true"), - MatchPairFactory.getExpressions("C1", "C2", "I1"), - MatchPairFactory.getExpressions("Exists"), false, control); + return NaturalJoinHelper.naturalJoin(leftTable, (QueryTable) rightTable.update("Exists=true"), + MatchPairFactory.getExpressions("C1", "C2", "I1"), + MatchPairFactory.getExpressions("Exists"), false, control); } }, }; @@ -311,16 +300,15 @@ public Table e() { for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) { if (printTableUpdates) { - System.out.println( - "Step = " + numSteps.intValue() + ", leftSize=" + leftSize + ", rightSize=" + System.out.println("Step = " + numSteps.intValue() + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", seed = " + seed + ", joinIncrement=" + joinIncrement); System.out.println("Left Table:" + leftTable.size()); showWithIndex(leftTable, 100); System.out.println("Right Table:" + rightTable.size()); showWithIndex(rightTable, 100); } - joinIncrement.step(leftStepSize, rightStepSize, leftTable, rightTable, leftColumnInfo, - rightColumnInfo, en, random); + joinIncrement.step(leftStepSize, rightStepSize, leftTable, rightTable, leftColumnInfo, rightColumnInfo, en, + random); } } @@ -330,21 +318,19 @@ public void testNaturalJoinMixedGrouping() { testNaturalJoinMixedGroupingLeftStatic(10000, 10000, 1, 10); } - private void testNaturalJoinMixedGroupingLeftStatic(int leftSize, int rightSize, long seed, - int steps) { + private void testNaturalJoinMixedGroupingLeftStatic(int leftSize, int rightSize, long seed, int steps) { final Random random = new Random(seed); - final QueryTable leftTable = getTable(false, leftSize, random, - initColumnInfos(new String[] {"I1", "C1", "C2"}, + final QueryTable leftTable = getTable(false, leftSize, random, initColumnInfos(new String[] {"I1", "C1", "C2"}, new ColumnInfo.ColAttributes[] {ColumnInfo.ColAttributes.Grouped}, new IntGenerator(1, rightSize * 10), new SetGenerator<>("a", "b", "c", "d", "e", "f"), new IntGenerator(1, 10))); final ColumnInfo[] rightColumnInfos = initColumnInfos(new String[] {"I1", "C1", "C2"}, - new ColumnInfo.ColAttributes[] {}, - new UniqueIntGenerator(1, rightSize * 10), - new SetGenerator<>("a", "b", "c", "d", "e"), - new IntGenerator(1, 10)); + new ColumnInfo.ColAttributes[] {}, + new UniqueIntGenerator(1, rightSize * 10), + new SetGenerator<>("a", "b", "c", "d", "e"), + new IntGenerator(1, 10)); final QueryTable rightTable = getTable(true, rightSize, random, rightColumnInfos); System.out.println("Left:"); @@ -358,8 +344,7 @@ private void testNaturalJoinMixedGroupingLeftStatic(int leftSize, int rightSize, TableTools.showWithIndex(result); final Table ungroupedResult = leftTable.update("I1=I1*10") - .naturalJoin(rightTable.update("I1=I1*10"), "I1", "LC1=C1,LC2=C2") - .update("I1=(int)(I1/10)"); + .naturalJoin(rightTable.update("I1=I1*10"), "I1", "LC1=C1,LC2=C2").update("I1=(int)(I1/10)"); System.out.println("Ungrouped Result:"); TableTools.showWithIndex(ungroupedResult); @@ -382,9 +367,8 @@ private void testNaturalJoinMixedGroupingLeftStatic(int leftSize, int rightSize, } LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, rightSize, random, rightTable, - rightColumnInfos); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, rightSize, + random, rightTable, rightColumnInfos); }); if (LiveTableTestCase.printTableUpdates) { @@ -416,8 +400,7 @@ private interface MakeLeftColumn { private void testNaturalJoinSimpleStatic(MakeLeftColumn lC) { final Table left = testTable(lC.make("Symbol", "A", "B", "C"), c("LeftSentinel", 1, 2, 3)); - final Table right = - newTable(c("Symbol", "A", "B", "D", "E", "F"), c("RightSentinel", 10, 11, 12, 13, 14), + final Table right = newTable(c("Symbol", "A", "B", "D", "E", "F"), c("RightSentinel", 10, 11, 12, 13, 14), c("RightObjectSentinel", 10, 11L, "12", "13", "14")); final Table cj = left.naturalJoin(right, "Symbol"); @@ -425,13 +408,12 @@ private void testNaturalJoinSimpleStatic(MakeLeftColumn lC) { assertEquals(new int[] {10, 11, NULL_INT}, intColumn(cj, "RightSentinel")); // the two wheres check for filling null keys final Table cjw = cj.where("RightObjectSentinel = null"); - final Table cjw2 = left.naturalJoin(SparseSelect.sparseSelect(right), "Symbol") - .where("RightObjectSentinel = null"); + final Table cjw2 = + left.naturalJoin(SparseSelect.sparseSelect(right), "Symbol").where("RightObjectSentinel = null"); TableTools.showWithIndex(cjw); TableTools.showWithIndex(cjw2); - final Table left2 = - newTable(lC.make("Symbol", "A", "B", "C", "A"), c("LeftSentinel", 1, 2, 3, 4)); + final Table left2 = newTable(lC.make("Symbol", "A", "B", "C", "A"), c("LeftSentinel", 1, 2, 3, 4)); final Table right2 = newTable(c("Symbol", "A", "B", "D"), c("RightSentinel", 10, 11, 12)); final Table cj2 = left2.naturalJoin(right2, "Symbol"); @@ -439,11 +421,9 @@ private void testNaturalJoinSimpleStatic(MakeLeftColumn lC) { assertEquals(new int[] {10, 11, NULL_INT, 10}, intColumn(cj2, "RightSentinel")); final int collision = 16384; - final Table left3 = - newTable(lC.make("Int", 10, collision + 10, collision * 2 + 10, collision * 3 + 10), + final Table left3 = newTable(lC.make("Int", 10, collision + 10, collision * 2 + 10, collision * 3 + 10), c("LeftSentinel", 1, 3, 3, 4)); - final Table right3 = newTable(c("Int", 10, collision + 10, collision * 4 + 10), - c("RightSentinel", 10, 11, 13)); + final Table right3 = newTable(c("Int", 10, collision + 10, collision * 4 + 10), c("RightSentinel", 10, 11, 13)); TableTools.show(left3); TableTools.show(right3); @@ -453,8 +433,8 @@ private void testNaturalJoinSimpleStatic(MakeLeftColumn lC) { assertEquals(new int[] {10, 11, NULL_INT, NULL_INT}, intColumn(cj3, "RightSentinel")); final Table left4 = newTable( - lC.make("String", "c", "e", "g"), - c("LeftSentinel", 1, 2, 3)); + lC.make("String", "c", "e", "g"), + c("LeftSentinel", 1, 2, 3)); final Table right4 = newTable(c("String", "c", "e"), c("RightSentinel", 10, 11)); final Table cj4 = left4.naturalJoin(right4, "String"); TableTools.showWithIndex(cj4); @@ -462,24 +442,24 @@ private void testNaturalJoinSimpleStatic(MakeLeftColumn lC) { final Table left5 = newTable( - lC.make("String", "c", "e", "g"), - c("LeftSentinel", 1, 2, 3)); + lC.make("String", "c", "e", "g"), + c("LeftSentinel", 1, 2, 3)); final Table right5 = newTable(c("RightSentinel", 10)); final Table cj5 = left5.naturalJoin(right5, ""); TableTools.showWithIndex(cj5); assertEquals(new int[] {10, 10, 10}, intColumn(cj5, "RightSentinel")); final Table left6 = newTable( - lC.make("String", "c", "e", "g"), - c("LeftSentinel", 1, 2, 3)); + lC.make("String", "c", "e", "g"), + c("LeftSentinel", 1, 2, 3)); final Table right6 = newTable(intCol("RightSentinel")); final Table cj6 = left6.naturalJoin(right6, ""); TableTools.showWithIndex(cj6); assertEquals(new int[] {NULL_INT, NULL_INT, NULL_INT}, intColumn(cj6, "RightSentinel")); final Table left7 = newTable( - lC.make("String", CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - intCol("LeftSentinel")); + lC.make("String", CollectionUtil.ZERO_LENGTH_STRING_ARRAY), + intCol("LeftSentinel")); final Table right7 = newTable(intCol("RightSentinel", 10, 11)); final Table cj7 = left7.naturalJoin(right7, ""); TableTools.showWithIndex(cj7); @@ -488,25 +468,23 @@ private void testNaturalJoinSimpleStatic(MakeLeftColumn lC) { // inactive right hand side state, build using the left final Table left8 = newTable(lC.make("Symbol", "A", "B", "C"), c("LeftSentinel", 1, 2, 3)); final Table right8 = newTable(c("Symbol", "A", "B", "D", "D", "E", "E", "D"), - c("RightSentinel", 10, 11, 12, 13, 14, 15, 16)); + c("RightSentinel", 10, 11, 12, 13, 14, 15, 16)); final Table cj8 = left8.naturalJoin(right8, "Symbol"); TableTools.showWithIndex(cj8); assertEquals(new int[] {10, 11, NULL_INT}, intColumn(cj8, "RightSentinel")); // inactive right hand side state, build using the right - final Table left9 = newTable(lC.make("Symbol", "A", "B", "C", "A", "B", "C"), - c("LeftSentinel", 1, 2, 3, 4, 5, 6)); + final Table left9 = + newTable(lC.make("Symbol", "A", "B", "C", "A", "B", "C"), c("LeftSentinel", 1, 2, 3, 4, 5, 6)); final Table right9 = newTable(c("Symbol", "A", "D", "D"), c("RightSentinel", 10, 11, 12)); final Table cj9 = left9.naturalJoin(right9, "Symbol"); TableTools.showWithIndex(cj9); - assertEquals(new int[] {10, NULL_INT, NULL_INT, 10, NULL_INT, NULL_INT}, - intColumn(cj9, "RightSentinel")); + assertEquals(new int[] {10, NULL_INT, NULL_INT, 10, NULL_INT, NULL_INT}, intColumn(cj9, "RightSentinel")); } public void testNaturalJoinDuplicateRights() { // build from right - final Table left = - testTable(c("Symbol", "A", "B", "C", "D"), c("LeftSentinel", 1, 2, 3, 4)); + final Table left = testTable(c("Symbol", "A", "B", "C", "D"), c("LeftSentinel", 1, 2, 3, 4)); final Table right = newTable(c("Symbol", "A", "A"), c("RightSentinel", 10, 11)); try { final Table cj = left.naturalJoin(right, "Symbol"); @@ -518,8 +496,7 @@ public void testNaturalJoinDuplicateRights() { // build from left final Table left2 = testTable(c("Symbol", "A", "B"), c("LeftSentinel", 1, 2)); - final Table right2 = - newTable(c("Symbol", "A", "A", "B", "C", "D"), c("RightSentinel", 10, 11, 12, 13, 14)); + final Table right2 = newTable(c("Symbol", "A", "A", "B", "C", "D"), c("RightSentinel", 10, 11, 12, 13, 14)); try { final Table cj2 = left2.naturalJoin(right2, "Symbol"); TableTools.showWithIndex(cj2); @@ -545,8 +522,9 @@ public void testNaturalJoinDuplicateRightsRefreshingRight() { // bad right key added final QueryTable right2 = testRefreshingTable(c("Symbol", "A"), c("RightSentinel", 10)); final Table cj2 = left.naturalJoin(right2, "Symbol"); - assertTableEquals(newTable(col("Symbol", "A", "B"), intCol("LeftSentinel", 1, 2), - intCol("RightSentinel", 10, NULL_INT)), cj2); + assertTableEquals( + newTable(col("Symbol", "A", "B"), intCol("LeftSentinel", 1, 2), intCol("RightSentinel", 10, NULL_INT)), + cj2); final ErrorListener listener = new ErrorListener((DynamicTable) cj2); ((DynamicTable) cj2).listenForUpdates(listener); @@ -578,8 +556,9 @@ public void testNaturalJoinDuplicateRightsRefreshingBoth() { // bad right key added final QueryTable right2 = testRefreshingTable(c("Symbol", "A"), c("RightSentinel", 10)); final Table cj2 = left.naturalJoin(right2, "Symbol"); - assertTableEquals(newTable(col("Symbol", "A", "B"), intCol("LeftSentinel", 1, 2), - intCol("RightSentinel", 10, NULL_INT)), cj2); + assertTableEquals( + newTable(col("Symbol", "A", "B"), intCol("LeftSentinel", 1, 2), intCol("RightSentinel", 10, NULL_INT)), + cj2); final ErrorListener listener = new ErrorListener((DynamicTable) cj2); ((DynamicTable) cj2).listenForUpdates(listener); @@ -597,8 +576,7 @@ public void testNaturalJoinDuplicateRightsRefreshingBoth() { public void testNaturalJoinReinterprets() { - final Table left = - testTable(c("JBool", true, false, null, true), c("LeftSentinel", 1, 2, 3, 4)); + final Table left = testTable(c("JBool", true, false, null, true), c("LeftSentinel", 1, 2, 3, 4)); final Table right = newTable(c("JBool", true, false, null), c("RightSentinel", 10, 11, 12)); final Table cj = left.naturalJoin(right, "JBool"); TableTools.showWithIndex(cj); @@ -607,29 +585,25 @@ public void testNaturalJoinReinterprets() { final DBDateTime time1 = DBTimeUtils.convertDateTime("2019-05-10T09:45:00 NY"); final DBDateTime time2 = DBTimeUtils.convertDateTime("2019-05-10T21:45:00 NY"); - final Table left2 = - testTable(c("JDate", time1, time2, null, time2), c("LeftSentinel", 1, 2, 3, 4)); - final Table right2 = - newTable(c("JDate", time2, time1, null), c("RightSentinel", 10, 11, 12)); + final Table left2 = testTable(c("JDate", time1, time2, null, time2), c("LeftSentinel", 1, 2, 3, 4)); + final Table right2 = newTable(c("JDate", time2, time1, null), c("RightSentinel", 10, 11, 12)); final Table cj2 = left2.naturalJoin(right2, "JDate"); TableTools.showWithIndex(cj2); assertEquals(new int[] {11, 10, 12, 10}, intColumn(cj2, "RightSentinel")); } public void testNaturalJoinFloats() { - final Table left = - testTable(floatCol("JF", 1.0f, 2.0f, Float.NaN, 3.0f), c("LeftSentinel", 1, 2, 3, 4)); - final Table right = - newTable(floatCol("JF", Float.NaN, 1.0f, 2.0f), c("RightSentinel", 10, 11, 12)); + final Table left = testTable(floatCol("JF", 1.0f, 2.0f, Float.NaN, 3.0f), c("LeftSentinel", 1, 2, 3, 4)); + final Table right = newTable(floatCol("JF", Float.NaN, 1.0f, 2.0f), c("RightSentinel", 10, 11, 12)); final Table cj = left.naturalJoin(right, "JF"); TableTools.showWithIndex(cj); assertEquals(new int[] {11, 12, 10, NULL_INT}, intColumn(cj, "RightSentinel")); - final Table left2 = testTable( - doubleCol("JD", 10.0, 20.0, Double.NaN, io.deephaven.util.QueryConstants.NULL_DOUBLE), - c("LeftSentinel", 1, 2, 3, 4)); - final Table right2 = newTable(doubleCol("JD", QueryConstants.NULL_DOUBLE, Double.NaN, 10.0), - c("RightSentinel", 10, 11, 12)); + final Table left2 = + testTable(doubleCol("JD", 10.0, 20.0, Double.NaN, io.deephaven.util.QueryConstants.NULL_DOUBLE), + c("LeftSentinel", 1, 2, 3, 4)); + final Table right2 = + newTable(doubleCol("JD", QueryConstants.NULL_DOUBLE, Double.NaN, 10.0), c("RightSentinel", 10, 11, 12)); final Table cj2 = left2.naturalJoin(right2, "JD"); TableTools.showWithIndex(cj2); assertEquals(new int[] {12, NULL_INT, 11, 10}, intColumn(cj2, "RightSentinel")); @@ -645,7 +619,7 @@ public void testNaturalJoinZeroKeys() { final Table cj = c0.naturalJoin(c1, ""); final DynamicTable emptyRightResult = - newTable(intCol("Left", 1, 2, 3), intCol("Right", NULL_INT, NULL_INT, NULL_INT)); + newTable(intCol("Left", 1, 2, 3), intCol("Right", NULL_INT, NULL_INT, NULL_INT)); assertTableEquals(emptyRightResult, cj); TableTools.showWithIndex(cj); @@ -657,8 +631,7 @@ public void testNaturalJoinZeroKeys() { TableTools.showWithIndex(cj); - final DynamicTable fourRightResult = - newTable(intCol("Left", 1, 2, 3), intCol("Right", 4, 4, 4)); + final DynamicTable fourRightResult = newTable(intCol("Left", 1, 2, 3), intCol("Right", 4, 4, 4)); assertTableEquals(fourRightResult, cj); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -679,8 +652,7 @@ public void testNaturalJoinZeroKeys() { TableTools.showWithIndex(cj); - final DynamicTable fiveResult = - newTable(intCol("Left", 1, 2, 3, 6), intCol("Right", 5, 5, 5, 5)); + final DynamicTable fiveResult = newTable(intCol("Left", 1, 2, 3, 6), intCol("Right", 5, 5, 5, 5)); assertTableEquals(fiveResult, cj); } @@ -693,16 +665,15 @@ public void testNaturalJoinZeroKeysStaticRight() { final Table c2 = newTable(intCol("Right", 4)); final Table cj1 = c0.naturalJoin(c1, ""); - assertTableEquals( - newTable(intCol("Left", 1, 2, 3), intCol("Right", NULL_INT, NULL_INT, NULL_INT)), cj1); + assertTableEquals(newTable(intCol("Left", 1, 2, 3), intCol("Right", NULL_INT, NULL_INT, NULL_INT)), cj1); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(c0, i(6), intCol("Left", 6)); c0.notifyListeners(i(6), i(), i()); }); TableTools.showWithIndex(cj1); - assertTableEquals(newTable(intCol("Left", 1, 2, 3, 6), - intCol("Right", NULL_INT, NULL_INT, NULL_INT, NULL_INT)), cj1); + assertTableEquals(newTable(intCol("Left", 1, 2, 3, 6), intCol("Right", NULL_INT, NULL_INT, NULL_INT, NULL_INT)), + cj1); final Table cj2 = c0.naturalJoin(c2, ""); assertTableEquals(newTable(intCol("Left", 1, 2, 3, 6), intCol("Right", 4, 4, 4, 4)), cj2); @@ -712,8 +683,7 @@ public void testNaturalJoinZeroKeysStaticRight() { }); TableTools.showWithIndex(cj1); - assertTableEquals(newTable(intCol("Left", 1, 2, 3, 6, 7), intCol("Right", 4, 4, 4, 4, 4)), - cj2); + assertTableEquals(newTable(intCol("Left", 1, 2, 3, 6, 7), intCol("Right", 4, 4, 4, 4, 4)), cj2); } @@ -726,7 +696,7 @@ public void testNaturalJoinZeroKeysStaticLeft() { final Table cj = c0.naturalJoin(c1, ""); final DynamicTable emptyRightResult = - newTable(intCol("Left", 1, 2, 3), intCol("Right", NULL_INT, NULL_INT, NULL_INT)); + newTable(intCol("Left", 1, 2, 3), intCol("Right", NULL_INT, NULL_INT, NULL_INT)); assertTableEquals(emptyRightResult, cj); TableTools.showWithIndex(cj); @@ -738,8 +708,7 @@ public void testNaturalJoinZeroKeysStaticLeft() { TableTools.showWithIndex(cj); - final DynamicTable fourRightResult = - newTable(intCol("Left", 1, 2, 3), intCol("Right", 4, 4, 4)); + final DynamicTable fourRightResult = newTable(intCol("Left", 1, 2, 3), intCol("Right", 4, 4, 4)); assertTableEquals(fourRightResult, cj); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -769,25 +738,24 @@ public void testNaturalJoin() { final Table lTable = TstUtils.testRefreshingTable( - c("String", "a", "b", "c"), - intCol("Int", 1, 2, 3)); + c("String", "a", "b", "c"), + intCol("Int", 1, 2, 3)); final Table rTable = TstUtils.testRefreshingTable( - c("String", "a", "b", "c"), - intCol("Int", 10, 20, 30)); + c("String", "a", "b", "c"), + intCol("Int", 10, 20, 30)); final Table result = lTable.naturalJoin(rTable, "String", "Int2=Int"); assertEquals(3, result.size()); assertEquals(3, result.getColumns().length); assertEquals("String", result.getColumns()[0].getName()); assertEquals("Int", result.getColumns()[1].getName()); assertEquals("Int2", result.getColumns()[2].getName()); - assertEquals(Arrays.asList("a", "b", "c"), - Arrays.asList(result.getColumn("String").get(0, 3))); + assertEquals(Arrays.asList("a", "b", "c"), Arrays.asList(result.getColumn("String").get(0, 3))); assertEquals(Arrays.asList(1, 2, 3), Arrays.asList(result.getColumn("Int").get(0, 3))); assertEquals(Arrays.asList(10, 20, 30), Arrays.asList(result.getColumn("Int2").get(0, 3))); Table table1 = TstUtils.testRefreshingTable( - c("String", "c", "e", "g")); + c("String", "c", "e", "g")); Table table2 = TstUtils.testRefreshingTable(c("String", "c", "e"), c("v", 1, 2)); Table pairMatch = table1.naturalJoin(table2, "String", "v"); @@ -797,13 +765,12 @@ public void testNaturalJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(int.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(asList(1, 2, null), asList(pairMatch.getColumn("v").get(0, 3))); table2 = TstUtils.testRefreshingTable( - c("String", "c", "e", "g"), c("v", 1, 2, 3)); + c("String", "c", "e", "g"), c("v", 1, 2, 3)); pairMatch = table1.naturalJoin(table2, "String", "v"); assertEquals(3, pairMatch.size()); @@ -812,8 +779,7 @@ public void testNaturalJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(int.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumn("v").get(0, 3))); pairMatch = table2.naturalJoin(table1, "String", ""); @@ -823,8 +789,7 @@ public void testNaturalJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(int.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumn("v").get(0, 3))); pairMatch = table1.naturalJoin(table2, "String=String", "v"); @@ -834,8 +799,7 @@ public void testNaturalJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(int.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumn("v").get(0, 3))); pairMatch = table2.naturalJoin(table1, "String=String", ""); @@ -846,18 +810,17 @@ public void testNaturalJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(int.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(1, pairMatch.getColumn("v").getInt(0)); assertEquals(2, pairMatch.getColumn("v").getInt(1)); assertEquals(3, pairMatch.getColumn("v").getInt(2)); table1 = TstUtils.testRefreshingTable( - c("String1", "c", "e", "g")); + c("String1", "c", "e", "g")); table2 = TstUtils.testRefreshingTable( - c("String2", "c", "e", "g"), c("v", 1, 2, 3)); + c("String2", "c", "e", "g"), c("v", 1, 2, 3)); pairMatch = table1.naturalJoin(table2, "String1=String2", "String2,v"); @@ -870,10 +833,8 @@ public void testNaturalJoin() { assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(String.class, pairMatch.getColumns()[1].getType()); assertEquals(int.class, pairMatch.getColumns()[2].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[1].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[1].getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumns()[2].get(0, 3))); @@ -887,10 +848,8 @@ public void testNaturalJoin() { assertEquals(String.class, pairMatch.getColumn("String1").getType()); assertEquals(String.class, pairMatch.getColumn("String2").getType()); assertEquals(int.class, pairMatch.getColumn("v").getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumn("String1").getDirect())); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumn("String2").getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumn("String1").getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumn("String2").getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumn("v").get(0, 3))); } @@ -912,8 +871,7 @@ public void testNaturalJoinInactive() { setExpectError(false); final QueryTable c0 = TstUtils.testRefreshingTable(c("USym0", "A", "C"), c("X", 1, 2)); - final QueryTable c1 = - TstUtils.testRefreshingTable(c("USym1", "A", "B", "B"), c("Y", 3, 4, 5)); + final QueryTable c1 = TstUtils.testRefreshingTable(c("USym1", "A", "B", "B"), c("Y", 3, 4, 5)); final Table cj = c0.naturalJoin(c1, "USym0=USym1", "Y"); @@ -958,50 +916,48 @@ public void testNaturalJoinInactive() { public void testNaturalJoinLeftIncrementalRightStaticSimple() { final QueryTable leftQueryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bc", "aa", "aa"), - c("ByteCol", (byte) 10, (byte) 20, (byte) 30, (byte) 50), - c("DoubleCol", 0.1, 0.2, 0.3, 0.5)); + c("Sym", "aa", "bc", "aa", "aa"), + c("ByteCol", (byte) 10, (byte) 20, (byte) 30, (byte) 50), + c("DoubleCol", 0.1, 0.2, 0.3, 0.5)); final QueryTable rightQueryTable = TstUtils.testTable(i(3, 6), - c("RSym", "aa", "bc"), - c("ByteCol", (byte) 10, (byte) 20), - c("RDoubleCol", 1.1, 2.2)); + c("RSym", "aa", "bc"), + c("ByteCol", (byte) 10, (byte) 20), + c("RDoubleCol", 1.1, 2.2)); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { public Table e() { - return leftQueryTable.naturalJoin(rightQueryTable, "ByteCol", - "RSym,RDoubleCol"); + return leftQueryTable.naturalJoin(rightQueryTable, "ByteCol", "RSym,RDoubleCol"); } } }; LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(leftQueryTable, i(3, 9), c("Sym", "aa", "aa"), - c("ByteCol", (byte) 20, (byte) 10), c("DoubleCol", 2.1, 2.2)); + addToTable(leftQueryTable, i(3, 9), c("Sym", "aa", "aa"), c("ByteCol", (byte) 20, (byte) 10), + c("DoubleCol", 2.1, 2.2)); System.out.println("Left Table Updated:"); showWithIndex(leftQueryTable); leftQueryTable.notifyListeners(i(3, 9), i(), i()); }); TstUtils.validate(en); - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> leftQueryTable.notifyListeners(i(), i(), i(1, 2, 4, 6))); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> leftQueryTable.notifyListeners(i(), i(), i(1, 2, 4, 6))); TstUtils.validate(en); } public void testNaturalJoinIterative() { final QueryTable leftQueryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bc", "aa", "aa"), - c("intCol", 10, 20, 30, 50), - c("doubleCol", 0.1, 0.2, 0.3, 0.5)); + c("Sym", "aa", "bc", "aa", "aa"), + c("intCol", 10, 20, 30, 50), + c("doubleCol", 0.1, 0.2, 0.3, 0.5)); final QueryTable rightQueryTable1 = TstUtils.testRefreshingTable(i(3, 6), - c("Sym", "aa", "bc"), - c("xCol", 11, 22), - c("yCol", 1.1, 2.2)); + c("Sym", "aa", "bc"), + c("xCol", 11, 22), + c("yCol", 1.1, 2.2)); final QueryTable rightQueryTable2 = TstUtils.testRefreshingTable(i(10, 20, 30), - c("Sym", "aa", "bc", "aa"), - c("xCol", 11, 20, 20), - c("yCol", 1.1, 2.2, 5.5)); + c("Sym", "aa", "bc", "aa"), + c("xCol", 11, 20, 20), + c("yCol", 1.1, 2.2, 5.5)); final EvalNugget[] en = new EvalNugget[] { @@ -1012,70 +968,61 @@ public Table e() { }, new EvalNugget() { public Table e() { - return leftQueryTable.naturalJoin(rightQueryTable2, "Sym,intCol=xCol", - "xCol,yCol"); + return leftQueryTable.naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol"); } }, new EvalNugget() { public Table e() { - return leftQueryTable.naturalJoin(rightQueryTable1, "Sym", "xCol,yCol") - .select(); + return leftQueryTable.naturalJoin(rightQueryTable1, "Sym", "xCol,yCol").select(); } }, new EvalNugget() { public Table e() { - return leftQueryTable - .naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol").select(); + return leftQueryTable.naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol").select(); } }, new EvalNugget() { public Table e() { - return leftQueryTable.naturalJoin(rightQueryTable1, "Sym", "xCol,yCol") - .update("q=xCol+yCol"); + return leftQueryTable.naturalJoin(rightQueryTable1, "Sym", "xCol,yCol").update("q=xCol+yCol"); } }, new EvalNugget() { public Table e() { - return leftQueryTable - .naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol") - .update("q=xCol+yCol"); + return leftQueryTable.naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol") + .update("q=xCol+yCol"); } }, new EvalNugget() { public Table e() { - return leftQueryTable.select().naturalJoin(rightQueryTable1, "Sym", - "xCol,yCol"); + return leftQueryTable.select().naturalJoin(rightQueryTable1, "Sym", "xCol,yCol"); } }, new EvalNugget() { public Table e() { - return leftQueryTable.select().naturalJoin(rightQueryTable2, - "Sym,intCol=xCol", "xCol,yCol"); + return leftQueryTable.select().naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol"); } }, new EvalNugget() { public Table e() { - return leftQueryTable.select() - .naturalJoin(rightQueryTable1, "Sym", "xCol,yCol") - .update("q=xCol+yCol"); + return leftQueryTable.select().naturalJoin(rightQueryTable1, "Sym", "xCol,yCol") + .update("q=xCol+yCol"); } }, new EvalNugget() { public Table e() { - return leftQueryTable.select() - .naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol").select(); + return leftQueryTable.select().naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol") + .select(); } }, new EvalNugget() { public Table e() { - return leftQueryTable.select() - .naturalJoin(rightQueryTable1, "Sym", "xCol,yCol").select(); + return leftQueryTable.select().naturalJoin(rightQueryTable1, "Sym", "xCol,yCol").select(); } }, new EvalNugget() { public Table e() { - return leftQueryTable.select() - .naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol").select(); + return leftQueryTable.select().naturalJoin(rightQueryTable2, "Sym,intCol=xCol", "xCol,yCol") + .select(); } }, }; @@ -1086,8 +1033,7 @@ public Table e() { TableTools.showWithIndex(rightQueryTable1); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(leftQueryTable, i(3, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), - c("doubleCol", 2.1, 2.2)); + addToTable(leftQueryTable, i(3, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), c("doubleCol", 2.1, 2.2)); System.out.println("Left Table Updated:"); showWithIndex(leftQueryTable); leftQueryTable.notifyListeners(i(3, 9), i(), i()); @@ -1095,15 +1041,13 @@ public Table e() { TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(leftQueryTable, i(1, 9), c("Sym", "bc", "aa"), c("intCol", 30, 11), - c("doubleCol", 2.1, 2.2)); + addToTable(leftQueryTable, i(1, 9), c("Sym", "bc", "aa"), c("intCol", 30, 11), c("doubleCol", 2.1, 2.2)); leftQueryTable.notifyListeners(i(), i(), i(1, 9)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(rightQueryTable1, i(3, 4), c("Sym", "ab", "ac"), c("xCol", 55, 33), - c("yCol", 6.6, 7.7)); + addToTable(rightQueryTable1, i(3, 4), c("Sym", "ab", "ac"), c("xCol", 55, 33), c("yCol", 6.6, 7.7)); rightQueryTable1.notifyListeners(i(4), i(), i(3)); }); TstUtils.validate(en); @@ -1111,8 +1055,8 @@ public Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { show(rightQueryTable2); addToTable(rightQueryTable2, i(20, 40), c("Sym", "aa", "bc"), - c("xCol", 30, 50), - c("yCol", 1.3, 1.5)); + c("xCol", 30, 50), + c("yCol", 1.3, 1.5)); show(rightQueryTable2); rightQueryTable2.notifyListeners(i(40), i(), i(20)); }); @@ -1120,30 +1064,27 @@ public Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(rightQueryTable1, i(4, 6), c("Sym", "bc", "aa"), c("xCol", 66, 44), - c("yCol", 7.6, 6.7)); + addToTable(rightQueryTable1, i(4, 6), c("Sym", "bc", "aa"), c("xCol", 66, 44), c("yCol", 7.6, 6.7)); rightQueryTable1.notifyListeners(i(), i(), i(4, 6)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(rightQueryTable1, i(4, 6), c("Sym", "bc", "aa"), c("xCol", 66, 44), - c("yCol", 7.7, 6.8)); + addToTable(rightQueryTable1, i(4, 6), c("Sym", "bc", "aa"), c("xCol", 66, 44), c("yCol", 7.7, 6.8)); rightQueryTable1.notifyListeners(i(), i(), i(4, 6)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(rightQueryTable1, i(4, 31), c("Sym", "aq", "bc"), c("xCol", 66, 44), - c("yCol", 7.5, 6.9)); + addToTable(rightQueryTable1, i(4, 31), c("Sym", "aq", "bc"), c("xCol", 66, 44), c("yCol", 7.5, 6.9)); rightQueryTable1.notifyListeners(i(31), i(), i(4)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable2, i(20, 30), c("Sym", "aa", "aa"), - c("xCol", 20, 30), - c("yCol", 3.1, 5.1)); + c("xCol", 20, 30), + c("yCol", 3.1, 5.1)); rightQueryTable2.notifyListeners(i(), i(), i(20, 30)); }); TstUtils.validate(en); @@ -1158,8 +1099,8 @@ public Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable2, i(40), c("Sym", "bc"), - c("xCol", 20), - c("yCol", 3.2)); + c("xCol", 20), + c("yCol", 3.2)); TstUtils.removeRows(rightQueryTable2, i(20, 30)); rightQueryTable2.notifyListeners(i(), i(20, 30), i(40)); }); @@ -1212,41 +1153,36 @@ private void dumpComplete(QueryTable queryTable, String... columns) { public void testNaturalJoinIterative2() { final QueryTable leftQueryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bc", "aa", "aa"), - c("intCol", 10, 20, 30, 50), - c("doubleCol", 0.1, 0.2, 0.3, 0.5)); + c("Sym", "aa", "bc", "aa", "aa"), + c("intCol", 10, 20, 30, 50), + c("doubleCol", 0.1, 0.2, 0.3, 0.5)); final QueryTable rightQueryTable2 = TstUtils.testRefreshingTable(i(10, 20, 30), - c("Sym", "aa", "bc", "aa"), - c("xCol", 11, 20, 20), - c("yCol", 1.1, 2.2, 5.5)); + c("Sym", "aa", "bc", "aa"), + c("xCol", 11, 20, 20), + c("yCol", 1.1, 2.2, 5.5)); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { public Table e() { - return leftQueryTable.naturalJoin(rightQueryTable2.lastBy("Sym"), "Sym", - "xCol,yCol"); + return leftQueryTable.naturalJoin(rightQueryTable2.lastBy("Sym"), "Sym", "xCol,yCol"); } }, new EvalNugget() { public Table e() { - return leftQueryTable - .naturalJoin(rightQueryTable2.lastBy("Sym"), "Sym", "xCol,yCol") - .select(); + return leftQueryTable.naturalJoin(rightQueryTable2.lastBy("Sym"), "Sym", "xCol,yCol").select(); } } }; LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(leftQueryTable, i(3, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), - c("doubleCol", 2.1, 2.2)); + addToTable(leftQueryTable, i(3, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), c("doubleCol", 2.1, 2.2)); leftQueryTable.notifyListeners(i(3, 9), i(), i()); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(leftQueryTable, i(1, 9), c("Sym", "bc", "aa"), c("intCol", 30, 11), - c("doubleCol", 2.1, 2.2)); + addToTable(leftQueryTable, i(1, 9), c("Sym", "bc", "aa"), c("intCol", 30, 11), c("doubleCol", 2.1, 2.2)); leftQueryTable.notifyListeners(i(), i(), i(1, 9)); }); TstUtils.validate(en); @@ -1254,8 +1190,8 @@ public Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { show(rightQueryTable2); addToTable(rightQueryTable2, i(20, 40), c("Sym", "aa", "bc"), - c("xCol", 30, 50), - c("yCol", 1.3, 1.5)); + c("xCol", 30, 50), + c("yCol", 1.3, 1.5)); show(rightQueryTable2); rightQueryTable2.notifyListeners(i(40), i(), i(20)); }); @@ -1263,16 +1199,16 @@ public Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable2, i(20, 30), c("Sym", "aa", "aa"), - c("xCol", 20, 30), - c("yCol", 3.1, 5.1)); + c("xCol", 20, 30), + c("yCol", 3.1, 5.1)); rightQueryTable2.notifyListeners(i(), i(), i(20, 30)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(rightQueryTable2, i(40), c("Sym", "bc"), - c("xCol", 20), - c("yCol", 3.2)); + c("xCol", 20), + c("yCol", 3.2)); TstUtils.removeRows(rightQueryTable2, i(20)); rightQueryTable2.notifyListeners(i(), i(20), i(40)); }); @@ -1287,11 +1223,11 @@ public Table e() { public void testNaturalJoinSortedData() { final QueryTable leftTable = TstUtils.testRefreshingTable( - c("Sym", "a", "b", "c"), - c("Size", 1, 2, 3)); + c("Sym", "a", "b", "c"), + c("Size", 1, 2, 3)); final QueryTable rightTable = TstUtils.testRefreshingTable( - c("Sym", "a", "b", "c"), - c("Qty", 10, 20, 30)); + c("Sym", "a", "b", "c"), + c("Qty", 10, 20, 30)); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { @@ -1301,48 +1237,44 @@ public Table e() { }, new EvalNugget() { public Table e() { - return leftTable.sortDescending("Size").naturalJoin(rightTable, "Sym", - "Qty"); + return leftTable.sortDescending("Size").naturalJoin(rightTable, "Sym", "Qty"); } }, new EvalNugget() { public Table e() { - return leftTable.sortDescending("Size") - .naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty"); + return leftTable.sortDescending("Size").naturalJoin(rightTable.sortDescending("Qty"), "Sym", + "Qty"); } }, new EvalNugget() { public Table e() { - return leftTable.naturalJoin(rightTable.sortDescending("Qty"), "Sym", - "Qty"); + return leftTable.naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty"); } }, new EvalNugget() { public Table e() { return leftTable.sortDescending("Size") - .naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty") - .update("x = Qty*Size"); + .naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty").update("x = Qty*Size"); } }, new EvalNugget() { public Table e() { return leftTable.sortDescending("Size") - .naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty") - .updateView("x = Qty*Size"); + .naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty").updateView("x = Qty*Size"); } }, new EvalNugget() { public Table e() { return leftTable.sortDescending("Size") - .naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty") - .view("Sym", "x = Qty*Size"); + .naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty") + .view("Sym", "x = Qty*Size"); } }, new EvalNugget() { public Table e() { return leftTable.sortDescending("Size") - .naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty") - .select("Sym", "x = Qty*Size"); + .naturalJoin(rightTable.sortDescending("Qty"), "Sym", "Qty") + .select("Sym", "x = Qty*Size"); } }, }; @@ -1351,42 +1283,42 @@ public Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(leftTable, i(0, 1, 2), - c("Sym", "c", "a", "b"), c("Size", 1, 2, 3)); + c("Sym", "c", "a", "b"), c("Size", 1, 2, 3)); leftTable.notifyListeners(i(), i(), i(0, 1, 2)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(rightTable, i(0, 1, 2), - c("Sym", "b", "c", "a"), c("Qty", 10, 20, 30)); + c("Sym", "b", "c", "a"), c("Qty", 10, 20, 30)); rightTable.notifyListeners(i(), i(), i(0, 1, 2)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(leftTable, i(0, 1, 2), - c("Sym", "a", "b", "c"), c("Size", 3, 1, 2)); + c("Sym", "a", "b", "c"), c("Size", 3, 1, 2)); leftTable.notifyListeners(i(), i(), i(0, 1, 2)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(rightTable, i(0, 1, 2), - c("Sym", "a", "b", "c"), c("Qty", 30, 10, 20)); + c("Sym", "a", "b", "c"), c("Qty", 30, 10, 20)); rightTable.notifyListeners(i(), i(), i(0, 1, 2)); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(leftTable, i(3, 4), - c("Sym", "d", "e"), c("Size", -1, 100)); + c("Sym", "d", "e"), c("Size", -1, 100)); leftTable.notifyListeners(i(3, 4), i(), i()); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(rightTable, i(3, 4), - c("Sym", "e", "d"), c("Qty", -10, 50)); + c("Sym", "e", "d"), c("Qty", -10, 50)); rightTable.notifyListeners(i(3, 4), i(), i()); }); TstUtils.validate(en); @@ -1395,14 +1327,13 @@ public Table e() { public void testExactJoin() { Table table1 = testRefreshingTable( - c("String", "c", "e", "g")); + c("String", "c", "e", "g")); try { table1.exactJoin(testRefreshingTable(c("String", "c", "e"), c("v", 1, 2)), "String"); TestCase.fail("Previous statement should have thrown an exception"); } catch (Exception e) { - assertEquals("Tables don't have one-to-one mapping - no mappings for key g.", - e.getMessage()); + assertEquals("Tables don't have one-to-one mapping - no mappings for key g.", e.getMessage()); } @@ -1415,8 +1346,7 @@ public void testExactJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(int.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumn("v").get(0, 3))); pairMatch = table2.exactJoin(table1, "String"); @@ -1426,8 +1356,7 @@ public void testExactJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(int.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumn("v").get(0, 3))); pairMatch = table1.exactJoin(table2, "String=String"); @@ -1437,8 +1366,7 @@ public void testExactJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(int.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumn("v").get(0, 3))); pairMatch = table2.exactJoin(table1, "String=String"); @@ -1449,8 +1377,7 @@ public void testExactJoin() { assertEquals("v", pairMatch.getColumns()[1].getName()); assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(int.class, pairMatch.getColumns()[1].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); assertEquals(1, pairMatch.getColumn("v").getInt(0)); assertEquals(2, pairMatch.getColumn("v").getInt(1)); assertEquals(3, pairMatch.getColumn("v").getInt(2)); @@ -1469,10 +1396,8 @@ public void testExactJoin() { assertEquals(String.class, pairMatch.getColumns()[0].getType()); assertEquals(String.class, pairMatch.getColumns()[1].getType()); assertEquals(int.class, pairMatch.getColumns()[2].getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[0].getDirect())); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumns()[1].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[0].getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumns()[1].getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumns()[2].get(0, 3))); @@ -1486,10 +1411,8 @@ public void testExactJoin() { assertEquals(String.class, pairMatch.getColumn("String1").getType()); assertEquals(String.class, pairMatch.getColumn("String2").getType()); assertEquals(int.class, pairMatch.getColumn("v").getType()); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumn("String1").getDirect())); - assertEquals(asList("c", "e", "g"), - asList((Object[]) pairMatch.getColumn("String2").getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumn("String1").getDirect())); + assertEquals(asList("c", "e", "g"), asList((Object[]) pairMatch.getColumn("String2").getDirect())); assertEquals(asList(1, 2, 3), asList(pairMatch.getColumn("v").get(0, 3))); } @@ -1524,8 +1447,8 @@ private void diskBackedTestHarness(BiConsumer testFunction) throws @NotNull private Table makeLeftDiskTable(File leftLocation) throws IOException { final TableDefinition leftDefinition = TableDefinition.of( - ColumnDefinition.ofString("Symbol"), - ColumnDefinition.ofInt("LeftSentinel")); + ColumnDefinition.ofString("Symbol"), + ColumnDefinition.ofInt("LeftSentinel")); final String[] leftSyms = new String[] {"Apple", "Banana", "Cantaloupe", "DragonFruit", "Apple", "Cantaloupe", "Banana", "Banana", "Cantaloupe"}; final Table leftTable = newTable(stringCol("Symbol", leftSyms)).update("LeftSentinel=i"); @@ -1536,11 +1459,10 @@ private Table makeLeftDiskTable(File leftLocation) throws IOException { @NotNull private Table makeRightDiskTable(File rightLocation) throws IOException { final TableDefinition rightDefinition = TableDefinition.of( - ColumnDefinition.ofString("Symbol"), - ColumnDefinition.ofInt("RightSentinel")); + ColumnDefinition.ofString("Symbol"), + ColumnDefinition.ofInt("RightSentinel")); final String[] rightSyms = new String[] {"Elderberry", "Apple", "Banana", "Cantaloupe"}; - final Table rightTable = - newTable(stringCol("Symbol", rightSyms)).update("RightSentinel=100+i"); + final Table rightTable = newTable(stringCol("Symbol", rightSyms)).update("RightSentinel=100+i"); ParquetTools.writeTable(rightTable, rightLocation, rightDefinition); return ParquetTools.readTable(rightLocation); } diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableSelectUpdateTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableSelectUpdateTest.java index a40cdaf7bf6..dc59be83723 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableSelectUpdateTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableSelectUpdateTest.java @@ -57,16 +57,13 @@ public void tearDown() throws Exception { @Test public void testSelectAndUpdate() { final QueryTable table1 = - (QueryTable) TstUtils.testRefreshingTable(i(2, 4, 6)).select("x = i*3", "y = \"\" + k"); + (QueryTable) TstUtils.testRefreshingTable(i(2, 4, 6)).select("x = i*3", "y = \"\" + k"); TestCase.assertEquals(3, table1.size()); TestCase - .assertEquals(Arrays.asList(0, 3, 6), - Arrays.asList(table1.getColumn("x").get(0, table1.size()))); - TestCase.assertEquals(Arrays.asList("2", "4", "6"), - Arrays.asList(table1.getColumn("y").get(0, table1.size()))); + .assertEquals(Arrays.asList(0, 3, 6), Arrays.asList(table1.getColumn("x").get(0, table1.size()))); + TestCase.assertEquals(Arrays.asList("2", "4", "6"), Arrays.asList(table1.getColumn("y").get(0, table1.size()))); - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); showWithIndex(table); QueryTable table2 = (QueryTable) table.select("x = x * 2", "z = y"); @@ -76,10 +73,8 @@ public void testSelectAndUpdate() { final Listener table2Listener = base.newListenerWithGlobals(table2); table2.listenForUpdates(table2Listener); TestCase - .assertEquals(Arrays.asList(2, 4, 6), - Arrays.asList(table2.getColumn("x").get(0, table2.size()))); - TestCase.assertEquals(Arrays.asList('a', 'b', 'c'), - Arrays.asList(table2.getColumn("z").get(0, table2.size()))); + .assertEquals(Arrays.asList(2, 4, 6), Arrays.asList(table2.getColumn("x").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList('a', 'b', 'c'), Arrays.asList(table2.getColumn("z").get(0, table2.size()))); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { show(table3); addToTable(table, i(7, 9), c("x", 4, 5), c("y", 'd', 'e')); @@ -90,14 +85,13 @@ public void testSelectAndUpdate() { TestCase.assertEquals(5, table.size()); TestCase.assertEquals(5, table2.size()); showWithIndex(table3); - TestCase.assertEquals(Arrays.asList(1, 2, 3, 4, 5), - Arrays.asList(table3.getColumn("q").get(0, table3.size()))); + TestCase.assertEquals(Arrays.asList(1, 2, 3, 4, 5), Arrays.asList(table3.getColumn("q").get(0, table3.size()))); TestCase.assertEquals(Arrays.asList(11, 12, 13, 14, 15), - Arrays.asList(table3.getColumn("p").get(0, table3.size()))); + Arrays.asList(table3.getColumn("p").get(0, table3.size()))); TestCase.assertEquals(Arrays.asList(2, 4, 6, 8, 10), - Arrays.asList(table2.getColumn("x").get(0, table2.size()))); + Arrays.asList(table2.getColumn("x").get(0, table2.size()))); TestCase.assertEquals(Arrays.asList('a', 'b', 'c', 'd', 'e'), - Arrays.asList(table2.getColumn("z").get(0, table2.size()))); + Arrays.asList(table2.getColumn("z").get(0, table2.size()))); TestCase.assertEquals(i(7, 9), base.added); TestCase.assertEquals(i(), base.removed); TestCase.assertEquals(i(), base.modified); @@ -110,9 +104,9 @@ public void testSelectAndUpdate() { TestCase.assertEquals(5, table2.size()); TestCase.assertEquals(Arrays.asList(2, 4, 6, 6, 20), - Arrays.asList(table2.getColumn("x").get(0, table2.size()))); + Arrays.asList(table2.getColumn("x").get(0, table2.size()))); TestCase.assertEquals(Arrays.asList('a', 'b', 'c', 'e', 'd'), - Arrays.asList(table2.getColumn("z").get(0, table2.size()))); + Arrays.asList(table2.getColumn("z").get(0, table2.size()))); TestCase.assertEquals(i(), base.added); TestCase.assertEquals(i(), base.removed); TestCase.assertEquals(i(7, 9), base.modified); @@ -125,11 +119,9 @@ public void testSelectAndUpdate() { TestCase.assertEquals(2, table2.size()); TestCase - .assertEquals(Arrays.asList(4, 20), - Arrays.asList(table2.getColumn("x").get(0, table2.size()))); + .assertEquals(Arrays.asList(4, 20), Arrays.asList(table2.getColumn("x").get(0, table2.size()))); TestCase - .assertEquals(Arrays.asList('b', 'd'), - Arrays.asList(table2.getColumn("z").get(0, table2.size()))); + .assertEquals(Arrays.asList('b', 'd'), Arrays.asList(table2.getColumn("z").get(0, table2.size()))); TestCase.assertEquals(i(), base.added); TestCase.assertEquals(i(2, 6, 7), base.removed); TestCase.assertEquals(i(), base.modified); @@ -142,35 +134,26 @@ public void testSelectAndUpdate() { TestCase.assertEquals(3, table.size()); TestCase.assertEquals(3, table2.size()); TestCase - .assertEquals(Arrays.asList(2, 44, 6), - Arrays.asList(table2.getColumn("x").get(0, table2.size()))); - TestCase.assertEquals(Arrays.asList('a', 'x', 'c'), - Arrays.asList(table2.getColumn("z").get(0, table2.size()))); + .assertEquals(Arrays.asList(2, 44, 6), Arrays.asList(table2.getColumn("x").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList('a', 'x', 'c'), Arrays.asList(table2.getColumn("z").get(0, table2.size()))); TestCase.assertEquals(i(2, 6), base.added); TestCase.assertEquals(i(9), base.removed); TestCase.assertEquals(i(4), base.modified); - final QueryTable table4 = - (QueryTable) TableTools.emptyTable(3).select("x = i*2", "y = \"\" + x"); + final QueryTable table4 = (QueryTable) TableTools.emptyTable(3).select("x = i*2", "y = \"\" + x"); TestCase - .assertEquals(Arrays.asList(0, 2, 4), - Arrays.asList(table4.getColumn("x").get(0, table4.size()))); - TestCase.assertEquals(Arrays.asList("0", "2", "4"), - Arrays.asList(table4.getColumn("y").get(0, table4.size()))); + .assertEquals(Arrays.asList(0, 2, 4), Arrays.asList(table4.getColumn("x").get(0, table4.size()))); + TestCase.assertEquals(Arrays.asList("0", "2", "4"), Arrays.asList(table4.getColumn("y").get(0, table4.size()))); final QueryTable table5 = (QueryTable) table4.update("z = x", "x = z + 1", "t = x - 3"); TestCase - .assertEquals(Arrays.asList(0, 2, 4), - Arrays.asList(table5.getColumn("z").get(0, table5.size()))); + .assertEquals(Arrays.asList(0, 2, 4), Arrays.asList(table5.getColumn("z").get(0, table5.size()))); TestCase - .assertEquals(Arrays.asList(1, 3, 5), - Arrays.asList(table5.getColumn("x").get(0, table5.size()))); + .assertEquals(Arrays.asList(1, 3, 5), Arrays.asList(table5.getColumn("x").get(0, table5.size()))); TestCase - .assertEquals(Arrays.asList(-2, 0, 2), - Arrays.asList(table5.getColumn("t").get(0, table5.size()))); + .assertEquals(Arrays.asList(-2, 0, 2), Arrays.asList(table5.getColumn("t").get(0, table5.size()))); - final QueryTable table6 = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); + final QueryTable table6 = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); table2 = (QueryTable) table6.update("z = x", "x = z + 1", "t = x - 3"); final Listener table2Listener2 = base.newListenerWithGlobals(table2); table2.listenForUpdates(table2Listener2); @@ -238,43 +221,38 @@ public static int callCounter(int x) { @Test public void testLazyUpdate() { - // Skip this test if we are using kernel formulas, because FormulaKernel ignores lazy, and - // therefore all these + // Skip this test if we are using kernel formulas, because FormulaKernel ignores lazy, and therefore all these // callCounts are all going to be wrong. if (DhFormulaColumn.useKernelFormulasProperty) { - // We'd rather use Assume.assumeFalse() here, but we can't because we're using an old - // JUnit in this file. + // We'd rather use Assume.assumeFalse() here, but we can't because we're using an old JUnit in this file. return; } QueryLibrary.importStatic(QueryTableSelectUpdateTest.class); QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("A", 1, 2, 3)); - table = (QueryTable) table.lazyUpdate( - "B=" + QueryTableSelectUpdateTest.class.getCanonicalName() + ".callCounter(A)"); + table = (QueryTable) table + .lazyUpdate("B=" + QueryTableSelectUpdateTest.class.getCanonicalName() + ".callCounter(A)"); TestCase.assertEquals(3, table.size()); TestCase.assertEquals(0, callCount); TestCase - .assertEquals(Arrays.asList(3, 6, 9), - Arrays.asList(table.getColumn("B").get(0, table.size()))); + .assertEquals(Arrays.asList(3, 6, 9), Arrays.asList(table.getColumn("B").get(0, table.size()))); TestCase.assertEquals(3, callCount); TestCase - .assertEquals(Arrays.asList(3, 6, 9), - Arrays.asList(table.getColumn("B").get(0, table.size()))); + .assertEquals(Arrays.asList(3, 6, 9), Arrays.asList(table.getColumn("B").get(0, table.size()))); TestCase.assertEquals(3, callCount); callCount = 0; - QueryTable table2 = - TstUtils.testRefreshingTable(i(2, 4, 6, 8, 10, 12), c("A", 1, 2, 3, 2, 3, 1)); - table2 = (QueryTable) table2.lazyUpdate( - "B=" + QueryTableSelectUpdateTest.class.getCanonicalName() + ".callCounter(A)"); + QueryTable table2 = TstUtils.testRefreshingTable(i(2, 4, 6, 8, 10, 12), c("A", 1, 2, 3, 2, 3, 1)); + table2 = (QueryTable) table2 + .lazyUpdate("B=" + QueryTableSelectUpdateTest.class.getCanonicalName() + ".callCounter(A)"); TestCase.assertEquals(6, table2.size()); TestCase.assertEquals(0, callCount); TestCase.assertEquals(Arrays.asList(3, 6, 9, 6, 9, 3), - Arrays.asList(table2.getColumn("B").get(0, table2.size()))); + Arrays.asList(table2.getColumn("B").get(0, table2.size()))); TestCase.assertEquals(3, callCount); TestCase.assertEquals(Arrays.asList(3, 6, 9, 6, 9, 3), - Arrays.asList(table2.getColumn("B").get(0, table2.size()))); + Arrays.asList(table2.getColumn("B").get(0, table2.size()))); TestCase.assertEquals(3, callCount); TestCase.assertEquals(3, table2.getColumnSource("B").getInt(2)); TestCase.assertEquals(3, table2.getColumnSource("B").get(2)); @@ -283,7 +261,7 @@ public void testLazyUpdate() { } private EvalNugget partialEvalNuggetFrom(Table sourceTable, boolean indexPositionChangesAllowed, - Supplier
    makeTable) { + Supplier
    makeTable) { return new PartialEvalNugget(sourceTable, indexPositionChangesAllowed) { @Override protected Table e() { @@ -310,8 +288,7 @@ public void onUpdate(Index added, Index removed, Index modified) { } @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { exception = originalException; } @@ -370,41 +347,37 @@ public void validate(final String msg) { List issues = new ArrayList<>(); StringBuilder result = new StringBuilder(); if (originalValue.size() != recomputedValue.size()) { - issues.add("Result table has size " + originalValue.size() + " vs. expected " - + recomputedValue.size()); + issues.add("Result table has size " + originalValue.size() + " vs. expected " + recomputedValue.size()); } if (indexPositionChangesAllowed) { - // we should make sure that our added + removed is equal to the source added + - // removed size + // we should make sure that our added + removed is equal to the source added + removed size long sourceSizeChange = listener1.added.size() - listener1.removed.size(); long resultSizeChange = listener2.added.size() - listener2.removed.size(); if (sourceSizeChange != resultSizeChange) { - issues.add("Source changed size by " + sourceSizeChange - + ", but result changed size by " + resultSizeChange); + issues.add("Source changed size by " + sourceSizeChange + ", but result changed size by " + + resultSizeChange); } } else { Index sourceAddedPositions = sourceTable.getIndex().invert(listener1.added); - Index sourceRemovedPositions = - sourceTable.getIndex().getPrevIndex().invert(listener1.removed); + Index sourceRemovedPositions = sourceTable.getIndex().getPrevIndex().invert(listener1.removed); Index sourceModifiedPositions = sourceTable.getIndex().invert(listener1.modified); Index resultAddedPositions = originalValue.getIndex().invert(listener2.added); - Index resultRemovedPositions = - originalValue.getIndex().getPrevIndex().invert(listener2.removed); + Index resultRemovedPositions = originalValue.getIndex().getPrevIndex().invert(listener2.removed); Index resultModifiedPositions = originalValue.getIndex().invert(listener2.modified); if (!sourceAddedPositions.equals(resultAddedPositions)) { issues.add("Source Positions Added, " + sourceAddedPositions - + ", does not match result positions added, " + resultAddedPositions); + + ", does not match result positions added, " + resultAddedPositions); } if (!sourceRemovedPositions.equals(resultRemovedPositions)) { issues.add("Source Positions Removed, " + sourceRemovedPositions - + ", does not match result positions removed, " + resultRemovedPositions); + + ", does not match result positions removed, " + resultRemovedPositions); } if (!sourceModifiedPositions.equals(resultModifiedPositions)) { issues.add("Source Positions Modified, " + sourceModifiedPositions - + ", does not match result positions modified, " + resultModifiedPositions); + + ", does not match result positions modified, " + resultModifiedPositions); } } @@ -415,14 +388,10 @@ public void validate(final String msg) { if (LiveTableTestCase.printTableUpdates) { System.out.println("Positions to validate: " + checkInvert); - final Index.SequentialBuilder originalBuilder = - Index.FACTORY.getSequentialBuilder(); - final Index.SequentialBuilder recomputedBuilder = - Index.FACTORY.getSequentialBuilder(); - checkInvert - .forEach(x -> originalBuilder.appendKey(originalValue.getIndex().get(x))); - checkInvert - .forEach(x -> recomputedBuilder.appendKey(recomputedValue.getIndex().get(x))); + final Index.SequentialBuilder originalBuilder = Index.FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder recomputedBuilder = Index.FACTORY.getSequentialBuilder(); + checkInvert.forEach(x -> originalBuilder.appendKey(originalValue.getIndex().get(x))); + checkInvert.forEach(x -> recomputedBuilder.appendKey(recomputedValue.getIndex().get(x))); System.out.println("Original Sub Table: " + checkInvert); TableTools.showWithIndex(originalValue.getSubTable(originalBuilder.getIndex())); @@ -430,13 +399,10 @@ public void validate(final String msg) { TableTools.showWithIndex(recomputedValue.getSubTable(recomputedBuilder.getIndex())); } - Map originalColumns = - originalValue.getColumnSourceMap(); - Map recomputedColumns = - recomputedValue.getColumnSourceMap(); + Map originalColumns = originalValue.getColumnSourceMap(); + Map recomputedColumns = recomputedValue.getColumnSourceMap(); - for (Map.Entry stringColumnSourceEntry : recomputedColumns - .entrySet()) { + for (Map.Entry stringColumnSourceEntry : recomputedColumns.entrySet()) { String columnName = stringColumnSourceEntry.getKey(); ColumnSource originalColumnSource = originalColumns.get(columnName); ColumnSource recomputedColumn = stringColumnSourceEntry.getValue(); @@ -448,10 +414,9 @@ public void validate(final String msg) { Object original = originalColumnSource.get(originalKey); Object recomputed = recomputedColumn.get(recomputedKey); - if (original != recomputed - && (original == null || !original.equals(recomputed))) { - issues.add("Mismatch at position " + position + "column " + columnName - + ": " + original + " != " + recomputed); + if (original != recomputed && (original == null || !original.equals(recomputed))) { + issues.add("Mismatch at position " + position + "column " + columnName + ": " + original + + " != " + recomputed); } } } @@ -474,13 +439,13 @@ public void validate(final String msg) { } private static final boolean RUN_SPARSE_REDIRECTION_UPDATE_TEST = - Configuration.getInstance().getBooleanWithDefault("runSparseRedirectionUpdateTest", false); + Configuration.getInstance().getBooleanWithDefault("runSparseRedirectionUpdateTest", false); @Test public void testSparseRedirectedUpdate() { // just skip this test, it is there to - // Assume.assumeTrue("We are purposefully skipping this very long running test that does not - // actually verify anything.", RUN_SPARSE_REDIRECTION_UPDATE_TEST); + // Assume.assumeTrue("We are purposefully skipping this very long running test that does not actually verify + // anything.", RUN_SPARSE_REDIRECTION_UPDATE_TEST); // the assumeTrue is not working, maybe because of the old junit version? if (RUN_SPARSE_REDIRECTION_UPDATE_TEST) { final boolean startUpdate = QueryTable.USE_REDIRECTED_COLUMNS_FOR_UPDATE; @@ -497,11 +462,9 @@ public void testSparseRedirectedUpdate() { private void doTestSparseRedirectedUpdate() { System.gc(); - final QueryTable leftTable = - new QueryTable(Index.FACTORY.getFlatIndex(99), Collections.emptyMap()); + final QueryTable leftTable = new QueryTable(Index.FACTORY.getFlatIndex(99), Collections.emptyMap()); leftTable.setRefreshing(true); - final QueryTable rightTable = - new QueryTable(Index.FACTORY.getFlatIndex(1), Collections.emptyMap()); + final QueryTable rightTable = new QueryTable(Index.FACTORY.getFlatIndex(1), Collections.emptyMap()); rightTable.setRefreshing(true); final Table leftWithKey = leftTable.updateView("Key=`a`", "LI=ii"); @@ -514,7 +477,7 @@ private void doTestSparseRedirectedUpdate() { System.gc(); System.gc(); final long startUsedMemory = - RuntimeMemory.getInstance().totalMemory() - RuntimeMemory.getInstance().freeMemory(); + RuntimeMemory.getInstance().totalMemory() - RuntimeMemory.getInstance().freeMemory(); for (int step = 0; step < 10000; ++step) { final int fstep = step; @@ -538,10 +501,9 @@ private void doTestSparseRedirectedUpdate() { final long freeMemory = RuntimeMemory.getInstance().freeMemory(); final long usedMemory = totalMemory - freeMemory; final long deltaUsed = usedMemory - startUsedMemory; - System.out - .println("Step = " + step + ", " + deltaUsed + "(" + usedMemory + "total) used, " - + freeMemory + " free / " + totalMemory + " total, " + " updated size=" - + updated.size() + ", memory/row=" + (deltaUsed / updated.size())); + System.out.println("Step = " + step + ", " + deltaUsed + "(" + usedMemory + "total) used, " + freeMemory + + " free / " + totalMemory + " total, " + " updated size=" + updated.size() + ", memory/row=" + + (deltaUsed / updated.size())); } } @@ -560,8 +522,7 @@ private void testUpdateIncremental(int seed, boolean useRedirection) { try { QueryTable.USE_REDIRECTED_COLUMNS_FOR_SELECT = useRedirection; QueryTable.USE_REDIRECTED_COLUMNS_FOR_UPDATE = useRedirection; - try (final SafeCloseable ignored = - LivenessScopeStack.open(new LivenessScope(true), true)) { + try (final SafeCloseable ignored = LivenessScopeStack.open(new LivenessScope(true), true)) { testUpdateIncremental(seed, new MutableInt(100)); } } finally { @@ -575,10 +536,10 @@ private void testUpdateIncremental(final int seed, MutableInt numSteps) { final TstUtils.ColumnInfo[] columnInfo; final int size = 25; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("a", "b", "c", "d", "e"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("a", "b", "c", "d", "e"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1))); final Table sortedTable = queryTable.sort("intCol"); @@ -586,74 +547,63 @@ private void testUpdateIncremental(final int seed, MutableInt numSteps) { EvalNugget.from(() -> queryTable.update("intCol=intCol * 2")), EvalNugget.from(() -> queryTable.update("intCol=intCol + doubleCol")), EvalNugget.from(() -> queryTable.update("newCol=intCol / 2", "newCol2=newCol * 4")), - EvalNugget.from( - () -> queryTable.update("newCol=intCol / 2").update("newCol2=newCol * 4")), + EvalNugget.from(() -> queryTable.update("newCol=intCol / 2").update("newCol2=newCol * 4")), EvalNugget.from(() -> queryTable.select("intCol=intCol * 2")), EvalNugget.from(() -> queryTable.select("intCol=intCol + doubleCol")), - EvalNugget.from( - () -> queryTable.select("newCol=intCol / 2").update("newCol2=newCol * 4")), + EvalNugget.from(() -> queryTable.select("newCol=intCol / 2").update("newCol2=newCol * 4")), EvalNugget.from(() -> sortedTable.update("intCol=intCol * 2")), EvalNugget.from(() -> sortedTable.update("intCol=intCol + doubleCol")), - EvalNugget - .from(() -> sortedTable.update("newCol=intCol / 2", "newCol2=newCol * 4")), - EvalNugget.from( - () -> sortedTable.update("newCol=intCol / 2").update("newCol2=newCol * 4")), + EvalNugget.from(() -> sortedTable.update("newCol=intCol / 2", "newCol2=newCol * 4")), + EvalNugget.from(() -> sortedTable.update("newCol=intCol / 2").update("newCol2=newCol * 4")), EvalNugget.from(() -> sortedTable.select("intCol=intCol * 2")), EvalNugget.from(() -> sortedTable.select("intCol=intCol + doubleCol")), - EvalNugget.from( - () -> sortedTable.select("newCol=intCol / 2").update("newCol2=newCol * 4")), - partialEvalNuggetFrom(queryTable, false, - () -> queryTable.update("newCol=intCol / 2", "newCol2=newCol_[i] * 4")), + EvalNugget.from(() -> sortedTable.select("newCol=intCol / 2").update("newCol2=newCol * 4")), partialEvalNuggetFrom(queryTable, false, - () -> queryTable.update("newCol=intCol / 2", "newCol2=newCol_[i] * newCol")), + () -> queryTable.update("newCol=intCol / 2", "newCol2=newCol_[i] * 4")), partialEvalNuggetFrom(queryTable, false, - () -> queryTable.update("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", - "repeatedCol=newCol_[i] * repeatedCol")), + () -> queryTable.update("newCol=intCol / 2", "newCol2=newCol_[i] * newCol")), partialEvalNuggetFrom(queryTable, false, - () -> queryTable.update("newCol2=intCol / 2", "newCol=newCol2_[i] + 7")), + () -> queryTable.update("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", + "repeatedCol=newCol_[i] * repeatedCol")), partialEvalNuggetFrom(queryTable, false, - () -> queryTable.update("newCol=intCol_[i]")), + () -> queryTable.update("newCol2=intCol / 2", "newCol=newCol2_[i] + 7")), + partialEvalNuggetFrom(queryTable, false, () -> queryTable.update("newCol=intCol_[i]")), partialEvalNuggetFrom(sortedTable, false, - () -> sortedTable.update("newCol=intCol / 2", "newCol2=newCol_[i] * 4")), + () -> sortedTable.update("newCol=intCol / 2", "newCol2=newCol_[i] * 4")), partialEvalNuggetFrom(sortedTable, false, - () -> sortedTable.update("newCol=intCol / 2", "newCol2=newCol_[i] * newCol")), + () -> sortedTable.update("newCol=intCol / 2", "newCol2=newCol_[i] * newCol")), partialEvalNuggetFrom(sortedTable, false, - () -> sortedTable.update("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", - "repeatedCol=newCol_[i] * repeatedCol")), + () -> sortedTable.update("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", + "repeatedCol=newCol_[i] * repeatedCol")), partialEvalNuggetFrom(sortedTable, false, - () -> sortedTable.update("newCol2=intCol / 2", "newCol=newCol2_[i] + 7")), - partialEvalNuggetFrom(sortedTable, false, - () -> sortedTable.update("newCol=intCol_[i]")), - partialEvalNuggetFrom(queryTable, true, - () -> queryTable.select("newCol=intCol_[i]")), + () -> sortedTable.update("newCol2=intCol / 2", "newCol=newCol2_[i] + 7")), + partialEvalNuggetFrom(sortedTable, false, () -> sortedTable.update("newCol=intCol_[i]")), + partialEvalNuggetFrom(queryTable, true, () -> queryTable.select("newCol=intCol_[i]")), partialEvalNuggetFrom(queryTable, true, - () -> queryTable.select("newCol=intCol / 2", "newCol2=newCol_[i] * 4")), + () -> queryTable.select("newCol=intCol / 2", "newCol2=newCol_[i] * 4")), partialEvalNuggetFrom(queryTable, true, - () -> queryTable.select("newCol=intCol / 2", "newCol2=newCol_[i] * newCol")), + () -> queryTable.select("newCol=intCol / 2", "newCol2=newCol_[i] * newCol")), partialEvalNuggetFrom(queryTable, true, - () -> queryTable.select("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", - "repeatedCol=newCol_[i] * repeatedCol")), + () -> queryTable.select("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", + "repeatedCol=newCol_[i] * repeatedCol")), partialEvalNuggetFrom(queryTable, true, - () -> queryTable.select("newCol2=intCol / 2", "newCol=newCol2_[i] + 7")), - partialEvalNuggetFrom(sortedTable, true, - () -> sortedTable.select("newCol=intCol_[i]")), + () -> queryTable.select("newCol2=intCol / 2", "newCol=newCol2_[i] + 7")), + partialEvalNuggetFrom(sortedTable, true, () -> sortedTable.select("newCol=intCol_[i]")), partialEvalNuggetFrom(sortedTable, true, - () -> sortedTable.select("newCol=intCol / 2", "newCol2=newCol_[i] * 4")), + () -> sortedTable.select("newCol=intCol / 2", "newCol2=newCol_[i] * 4")), partialEvalNuggetFrom(sortedTable, true, - () -> sortedTable.select("newCol=intCol / 2", "newCol2=newCol_[i] * newCol")), + () -> sortedTable.select("newCol=intCol / 2", "newCol2=newCol_[i] * newCol")), partialEvalNuggetFrom(sortedTable, true, - () -> sortedTable.select("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", - "repeatedCol=newCol_[i] * repeatedCol")), + () -> sortedTable.select("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", + "repeatedCol=newCol_[i] * repeatedCol")), partialEvalNuggetFrom(sortedTable, true, - () -> sortedTable.select("newCol2=intCol / 2", "newCol=newCol2_[i] + 7")), + () -> sortedTable.select("newCol2=intCol / 2", "newCol=newCol2_[i] + 7")), // This case is rather nasty, because we have an intermediate column to deal with. partialEvalNuggetFrom(queryTable, false, - () -> queryTable.update("newCol2=intCol / 2", "newCol=newCol2", - "newCol=newCol_[i] + 7")), + () -> queryTable.update("newCol2=intCol / 2", "newCol=newCol2", "newCol=newCol_[i] + 7")), partialEvalNuggetFrom(queryTable, false, - () -> queryTable.update("newCol=intCol / 2", "newCol=newCol_[i] + 7")), - new UpdateValidatorNugget( - queryTable.select("newCol=intCol / 2", "newCol=newCol_[i] + 7")), + () -> queryTable.update("newCol=intCol / 2", "newCol=newCol_[i] + 7")), + new UpdateValidatorNugget(queryTable.select("newCol=intCol / 2", "newCol=newCol_[i] + 7")), // Let's change the type of a column. EvalNugget.from(() -> queryTable.select("intCol = intCol/2")), EvalNugget.from(() -> queryTable.update("intCol = intCol/2")), @@ -661,11 +611,11 @@ private void testUpdateIncremental(final int seed, MutableInt numSteps) { EvalNugget.from(() -> queryTable.update("newCol = intCol > 50")), // Let's create a datetime and use it as an override partialEvalNuggetFrom(queryTable, false, - () -> queryTable.update("Time = new DBDateTime(0) + intCol * MINUTE") - .update("Diff = Time_[i]")), + () -> queryTable.update("Time = new DBDateTime(0) + intCol * MINUTE") + .update("Diff = Time_[i]")), partialEvalNuggetFrom(queryTable, true, - () -> queryTable.select("Time = new DBDateTime(0) + intCol * MINUTE") - .select("Time", "Diff = Time_[i]")), + () -> queryTable.select("Time = new DBDateTime(0) + intCol * MINUTE").select("Time", + "Diff = Time_[i]")), }; final int maxSteps = numSteps.intValue(); @@ -683,18 +633,17 @@ public void testUpdateIncrementalWithI() throws IOException { ColumnInfo columnInfo[]; int size = 50; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("a", "b", "c", "d", "e"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("a", "b", "c", "d", "e"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1))); EvalNuggetInterface[] en = new TableComparator[] { new TableComparator(queryTable.update("CI=i"), queryTable.update("CI=i")), new TableComparator(queryTable.update("CK=k"), queryTable.update("CK=k")), - new TableComparator(queryTable.update("CI=i", "CK=k"), - queryTable.update("CI=i").update("CK=k")), + new TableComparator(queryTable.update("CI=i", "CK=k"), queryTable.update("CI=i").update("CK=k")), new TableComparator(queryTable.update("CI=i", "PI=CI_[i-1]"), - queryTable.update("CI=i").update("PI=CI_[i-1]")), + queryTable.update("CI=i").update("PI=CI_[i-1]")), }; @@ -724,11 +673,9 @@ public void testUpdateEmptyTable() throws IOException { TestCase.assertEquals(2, table.size()); TestCase.assertEquals(2, table2.size()); show(table2); - TestCase.assertEquals(Arrays.asList(0, 3), - Arrays.asList(table2.getColumn("x").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList(0, 3), Arrays.asList(table2.getColumn("x").get(0, table2.size()))); TestCase - .assertEquals(Arrays.asList("7", "9"), - Arrays.asList(table2.getColumn("y").get(0, table2.size()))); + .assertEquals(Arrays.asList("7", "9"), Arrays.asList(table2.getColumn("y").get(0, table2.size()))); TestCase.assertEquals(base.added, i(7, 9)); TestCase.assertEquals(base.removed, i()); TestCase.assertEquals(base.modified, i()); @@ -757,10 +704,8 @@ public void testUpdateIndex() throws IOException { TestCase.assertEquals(2, table.size()); TestCase.assertEquals(2, table2.size()); show(table2); - TestCase.assertEquals(Arrays.asList(0, 1), - Arrays.asList(table2.getColumn("Position").get(0, table2.size()))); - TestCase.assertEquals(Arrays.asList("7", "9"), - Arrays.asList(table2.getColumn("Key").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList(0, 1), Arrays.asList(table2.getColumn("Position").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList("7", "9"), Arrays.asList(table2.getColumn("Key").get(0, table2.size()))); TestCase.assertEquals(base.added, i(7, 9)); TestCase.assertEquals(base.removed, i()); TestCase.assertEquals(base.modified, i()); @@ -772,10 +717,8 @@ public void testUpdateIndex() throws IOException { TestCase.assertEquals(2, table.size()); TestCase.assertEquals(2, table2.size()); show(table2); - TestCase.assertEquals(Arrays.asList(0, 1), - Arrays.asList(table2.getColumn("Position").get(0, table2.size()))); - TestCase.assertEquals(Arrays.asList("7", "9"), - Arrays.asList(table2.getColumn("Key").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList(0, 1), Arrays.asList(table2.getColumn("Position").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList("7", "9"), Arrays.asList(table2.getColumn("Key").get(0, table2.size()))); TestCase.assertEquals(base.added, i()); TestCase.assertEquals(base.removed, i()); TestCase.assertEquals(base.modified, i(9)); @@ -785,8 +728,7 @@ public void testUpdateIndex() throws IOException { public void testUpdateArrayColumns() throws IOException { QueryTable table = TstUtils.testRefreshingTable(i()); QueryTable table2 = (QueryTable) table.update("Position=i", "PrevI=Position_[i-1]"); - // QueryTable table2 = (QueryTable) table.update("Position=i", "Key=\"\" + k", - // "PrevI=Position_[i-1]"); + // QueryTable table2 = (QueryTable) table.update("Position=i", "Key=\"\" + k", "PrevI=Position_[i-1]"); ListenerWithGlobals listener = base.newListenerWithGlobals(table2); table2.listenForUpdates(listener); @@ -803,12 +745,9 @@ public void testUpdateArrayColumns() throws IOException { TestCase.assertEquals(2, table.size()); TestCase.assertEquals(2, table2.size()); show(table2); - TestCase.assertEquals(Arrays.asList(0, 1), - Arrays.asList(table2.getColumn("Position").get(0, table2.size()))); - // assertEquals(Arrays.asList("7", "9"), Arrays.asList(table2.getColumn("Key").get(0, - // table2.size()))); - TestCase.assertEquals(Arrays.asList(null, 0), - Arrays.asList(table2.getColumn("PrevI").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList(0, 1), Arrays.asList(table2.getColumn("Position").get(0, table2.size()))); + // assertEquals(Arrays.asList("7", "9"), Arrays.asList(table2.getColumn("Key").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList(null, 0), Arrays.asList(table2.getColumn("PrevI").get(0, table2.size()))); TestCase.assertEquals(i(7, 9), base.added); TestCase.assertEquals(i(), base.removed); TestCase.assertEquals(i(), base.modified); @@ -820,12 +759,9 @@ public void testUpdateArrayColumns() throws IOException { TestCase.assertEquals(2, table.size()); TestCase.assertEquals(2, table2.size()); show(table2); - TestCase.assertEquals(Arrays.asList(0, 1), - Arrays.asList(table2.getColumn("Position").get(0, table2.size()))); - // assertEquals(Arrays.asList("7", "9"), Arrays.asList(table2.getColumn("Key").get(0, - // table2.size()))); - TestCase.assertEquals(Arrays.asList(null, 0), - Arrays.asList(table2.getColumn("PrevI").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList(0, 1), Arrays.asList(table2.getColumn("Position").get(0, table2.size()))); + // assertEquals(Arrays.asList("7", "9"), Arrays.asList(table2.getColumn("Key").get(0, table2.size()))); + TestCase.assertEquals(Arrays.asList(null, 0), Arrays.asList(table2.getColumn("PrevI").get(0, table2.size()))); TestCase.assertEquals(i(), base.added); TestCase.assertEquals(i(), base.removed); TestCase.assertEquals(i(9), base.modified); @@ -840,8 +776,8 @@ public void testLargeStringConstants() { a = new String(new char[40000]).replace("\0", "A"); b = new String(new char[40000]).replace("\0", "B"); - x = TableTools.emptyTable(1).update("C=String.join(\"" + a - + "\", Integer.toString(new Random().nextInt()), \"" + b + "\")"); + x = TableTools.emptyTable(1) + .update("C=String.join(\"" + a + "\", Integer.toString(new Random().nextInt()), \"" + b + "\")"); TestCase.assertEquals(1, x.getColumns().length); } @@ -879,8 +815,7 @@ public void testStaticAddressSpace() { @Test public void testSelectReuse() { - final QueryTable table = - TstUtils.testRefreshingTable(i(1, 1L << 20 + 1), longCol("Value", 1, 2)); + final QueryTable table = TstUtils.testRefreshingTable(i(1, 1L << 20 + 1), longCol("Value", 1, 2)); final Table selected = table.select(); assertTableEquals(table, selected); @@ -889,8 +824,7 @@ public void testSelectReuse() { assertTableEquals(table, selected2.dropColumns("Value2")); TestCase.assertSame(selected.getColumnSource("Value"), selected2.getColumnSource("Value")); - TestCase.assertNotSame(withUpdateView.getColumnSource("Value2"), - selected2.getColumnSource("Value2")); + TestCase.assertNotSame(withUpdateView.getColumnSource("Value2"), selected2.getColumnSource("Value2")); TestCase.assertTrue(selected2.getColumnSource("Value2") instanceof LongSparseArraySource); assertTableEquals(prevTable(table), prevTable(selected)); @@ -924,16 +858,14 @@ public void testSparseSelect() { int size = 1000; for (int seed = 0; seed < 10; ++seed) { System.out.println(DBDateTime.now() + ": Size = " + size + ", seed=" + seed); - try (final SafeCloseable ignored = - LivenessScopeStack.open(new LivenessScope(true), true)) { + try (final SafeCloseable ignored = LivenessScopeStack.open(new LivenessScope(true), true)) { testSparseSelect(size, seed); } } size = 10000; for (int seed = 0; seed < 1; ++seed) { System.out.println(DBDateTime.now() + ": Size = " + size + ", seed=" + seed); - try (final SafeCloseable ignored = - LivenessScopeStack.open(new LivenessScope(true), true)) { + try (final SafeCloseable ignored = LivenessScopeStack.open(new LivenessScope(true), true)) { testSparseSelect(size, seed); } } @@ -944,20 +876,20 @@ private void testSparseSelect(int size, int seed) { final TstUtils.ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "intCol", "doubleCol", "boolCol", "floatCol", "longCol", - "charCol", "byteCol", "shortCol", "dbDateTime"}, - new SetGenerator<>("a", "b", "c", "d", "e"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1), - new BooleanGenerator(0.5, 0.1), - new FloatGenerator(-1000.0f, 1000.0f), - new LongGenerator(), - new CharGenerator('a', 'z'), - new ByteGenerator(), - new ShortGenerator(), - new UnsortedDateTimeGenerator(DBTimeUtils.convertDateTime("2019-01-10T00:00:00 NY"), - DBTimeUtils.convertDateTime("2019-01-20T00:00:00 NY")))); + columnInfo = initColumnInfos( + new String[] {"Sym", "intCol", "doubleCol", "boolCol", "floatCol", "longCol", "charCol", + "byteCol", "shortCol", "dbDateTime"}, + new SetGenerator<>("a", "b", "c", "d", "e"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1), + new BooleanGenerator(0.5, 0.1), + new FloatGenerator(-1000.0f, 1000.0f), + new LongGenerator(), + new CharGenerator('a', 'z'), + new ByteGenerator(), + new ShortGenerator(), + new UnsortedDateTimeGenerator(DBTimeUtils.convertDateTime("2019-01-10T00:00:00 NY"), + DBTimeUtils.convertDateTime("2019-01-20T00:00:00 NY")))); final Table sortedTable = queryTable.sort("intCol"); @@ -994,33 +926,31 @@ public Table e() { }, new EvalNugget() { public Table e() { - return SparseSelect.sparseSelect( - TableTools.merge(sortedTable, sortedTable, sortedTable, sortedTable)); + return SparseSelect + .sparseSelect(TableTools.merge(sortedTable, sortedTable, sortedTable, sortedTable)); } }, new EvalNugget() { public Table e() { - return SparseSelect.sparseSelect( - TableTools.merge(queryTable, queryTable, queryTable, queryTable)); + return SparseSelect + .sparseSelect(TableTools.merge(queryTable, queryTable, queryTable, queryTable)); } }, new EvalNugget() { public Table e() { - return SparseSelect.sparseSelect( - TableTools.merge(sortedTable, sortedTable, sortedTable, sortedTable) - .by("Sym").sort("Sym").ungroup()); + return SparseSelect + .sparseSelect(TableTools.merge(sortedTable, sortedTable, sortedTable, sortedTable) + .by("Sym").sort("Sym").ungroup()); } }, new TableComparator( - TableTools.merge(sortedTable, sortedTable, sortedTable, sortedTable).by("Sym") - .sort("Sym").ungroup(), - SparseSelect.sparseSelect( - TableTools.merge(sortedTable, sortedTable, sortedTable, sortedTable) - .by("Sym").sort("Sym").ungroup())), + TableTools.merge(sortedTable, sortedTable, sortedTable, sortedTable).by("Sym").sort("Sym") + .ungroup(), + SparseSelect.sparseSelect(TableTools.merge(sortedTable, sortedTable, sortedTable, sortedTable) + .by("Sym").sort("Sym").ungroup())), new TableComparator(queryTable, SparseSelect.sparseSelect(queryTable)), new TableComparator(queryTable, - SparseSelect.partialSparseSelect(queryTable, - Arrays.asList("shortCol", "dbDateTime"))), + SparseSelect.partialSparseSelect(queryTable, Arrays.asList("shortCol", "dbDateTime"))), new TableComparator(sortedTable, SparseSelect.sparseSelect(sortedTable)) }; @@ -1040,8 +970,7 @@ public void testSparseSelectWideIndex() { builder.appendKey(1L << ii); intVals[ii] = ii; } - final QueryTable table = - TstUtils.testRefreshingTable(builder.getIndex(), intCol("Value", intVals)); + final QueryTable table = TstUtils.testRefreshingTable(builder.getIndex(), intCol("Value", intVals)); final Table selected = SparseSelect.sparseSelect(table); final String diff = TableTools.diff(selected, table, 10); TestCase.assertEquals("", diff); @@ -1050,8 +979,7 @@ public void testSparseSelectWideIndex() { @Test public void testSparseSelectSkipMemoryColumns() { final int[] intVals = {1, 2, 3, 4, 5}; - final Table table = - TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(5), intCol("Value", intVals)) + final Table table = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(5), intCol("Value", intVals)) .update("V2=Value*2"); final Table selected = SparseSelect.sparseSelect(table); assertTableEquals(table, selected); @@ -1068,8 +996,7 @@ public void testSparseSelectSkipMemoryColumns() { @Test public void testSparseSelectReuse() { - final QueryTable table = - TstUtils.testRefreshingTable(i(1, 1L << 20 + 1), longCol("Value", 1, 2)); + final QueryTable table = TstUtils.testRefreshingTable(i(1, 1L << 20 + 1), longCol("Value", 1, 2)); final Table selected = SparseSelect.sparseSelect(table); @@ -1121,17 +1048,15 @@ public void testIds5212() { } /** - * In IDS-5614 it was observed that a dynamic table that starts out empty won't do its formula - * initialization (compilation, param grabbing etc) until later. This test confirms that this is - * fixed by setting a param to a valid value, calling QueryTable.update() and then setting it to - * a string value that won't compile. If the compilation/param grabbing is lazy, this test will - * fail. If you want to see the test fail under the old behavior, you would need to undo the - * bugfix that prevent this from happening: 1. In DHFormulaColumn.generateClassBody(), remove - * this 'if' guard and make the setting of params unconditional [adding this guard was the - * first, coarser bug fix] if (params == null) { // remove this if params = - * QueryScope.getDefaultInstance().getParams(userParams); // keep this line } 2. Conditionally - * invoke select.getDataView() (via SelectColumnLayer#getChunkSource()) only if it is needed in - * SelectColumnLayer#applyUpdate. + * In IDS-5614 it was observed that a dynamic table that starts out empty won't do its formula initialization + * (compilation, param grabbing etc) until later. This test confirms that this is fixed by setting a param to a + * valid value, calling QueryTable.update() and then setting it to a string value that won't compile. If the + * compilation/param grabbing is lazy, this test will fail. If you want to see the test fail under the old behavior, + * you would need to undo the bugfix that prevent this from happening: 1. In DHFormulaColumn.generateClassBody(), + * remove this 'if' guard and make the setting of params unconditional [adding this guard was the first, coarser bug + * fix] if (params == null) { // remove this if params = QueryScope.getDefaultInstance().getParams(userParams); // + * keep this line } 2. Conditionally invoke select.getDataView() (via SelectColumnLayer#getChunkSource()) only if it + * is needed in SelectColumnLayer#applyUpdate. */ @Test public void testEagerParamBinding() { diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableSliceTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableSliceTest.java index 187d7dcf24f..0bf59f3654c 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableSliceTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableSliceTest.java @@ -31,11 +31,11 @@ private void testSliceIncremental(final String ctxt, final int size) throws IOEx final Random random = new Random(0); final TstUtils.ColumnInfo columnInfo[]; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "Keys"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), - new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "Keys"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), + new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1))); final EvalNugget en[] = new EvalNugget[] { EvalNugget.from(() -> queryTable.head(0)), EvalNugget.from(() -> queryTable.update("x = Keys").head(0)), @@ -67,14 +67,12 @@ public Table e() { }, new EvalNugget() { public Table e() { - return queryTable.by("Sym").sort("Sym").head(0).update("x=sum(intCol)") - .head(0); + return queryTable.by("Sym").sort("Sym").head(0).update("x=sum(intCol)").head(0); } }, new EvalNugget() { public Table e() { - return queryTable.by("Sym", "intCol").sort("Sym", "intCol").head(0) - .update("x=intCol+1"); + return queryTable.by("Sym", "intCol").sort("Sym", "intCol").head(0).update("x=intCol+1"); } }, new EvalNugget() { @@ -115,14 +113,12 @@ public Table e() { }, new EvalNugget() { public Table e() { - return queryTable.by("Sym").sort("Sym").head(1).update("x=sum(intCol)") - .head(1); + return queryTable.by("Sym").sort("Sym").head(1).update("x=sum(intCol)").head(1); } }, new EvalNugget() { public Table e() { - return queryTable.by("Sym", "intCol").sort("Sym", "intCol").head(1) - .update("x=intCol+1"); + return queryTable.by("Sym", "intCol").sort("Sym", "intCol").head(1).update("x=intCol+1"); } }, new EvalNugget() { @@ -163,14 +159,12 @@ public Table e() { }, new EvalNugget() { public Table e() { - return queryTable.by("Sym").sort("Sym").head(10).update("x=sum(intCol)") - .head(10); + return queryTable.by("Sym").sort("Sym").head(10).update("x=sum(intCol)").head(10); } }, new EvalNugget() { public Table e() { - return queryTable.by("Sym", "intCol").sort("Sym", "intCol").head(10) - .update("x=intCol+1"); + return queryTable.by("Sym", "intCol").sort("Sym", "intCol").head(10).update("x=intCol+1"); } }, new EvalNugget() { @@ -211,14 +205,12 @@ public Table e() { }, new EvalNugget() { public Table e() { - return queryTable.by("Sym").sort("Sym").tail(0).update("x=sum(intCol)") - .tail(0); + return queryTable.by("Sym").sort("Sym").tail(0).update("x=sum(intCol)").tail(0); } }, new EvalNugget() { public Table e() { - return queryTable.by("Sym", "intCol").sort("Sym", "intCol").tail(0) - .update("x=intCol+1"); + return queryTable.by("Sym", "intCol").sort("Sym", "intCol").tail(0).update("x=intCol+1"); } }, new EvalNugget() { @@ -264,14 +256,12 @@ public Table e() { }, new EvalNugget() { public Table e() { - return queryTable.by("Sym").sort("Sym").tail(1).update("x=sum(intCol)") - .tail(1); + return queryTable.by("Sym").sort("Sym").tail(1).update("x=sum(intCol)").tail(1); } }, new EvalNugget() { public Table e() { - return queryTable.by("Sym", "intCol").sort("Sym", "intCol").tail(1) - .update("x=intCol+1"); + return queryTable.by("Sym", "intCol").sort("Sym", "intCol").tail(1).update("x=intCol+1"); } }, new EvalNugget() { @@ -312,14 +302,12 @@ public Table e() { }, new EvalNugget() { public Table e() { - return queryTable.by("Sym").sort("Sym").tail(10).update("x=sum(intCol)") - .tail(10); + return queryTable.by("Sym").sort("Sym").tail(10).update("x=sum(intCol)").tail(10); } }, new EvalNugget() { public Table e() { - return queryTable.by("Sym", "intCol").sort("Sym", "intCol").tail(10) - .update("x=intCol+1"); + return queryTable.by("Sym", "intCol").sort("Sym", "intCol").tail(10).update("x=intCol+1"); } }, new EvalNugget() { @@ -354,8 +342,7 @@ public Table e() { System.out.println("\n == Simple Step i = " + i); showWithIndex(queryTable); } - simulateShiftAwareStep(ctxt + " step == " + i, size, random, queryTable, columnInfo, - en); + simulateShiftAwareStep(ctxt + " step == " + i, size, random, queryTable, columnInfo, en); } } @@ -376,9 +363,8 @@ public void testTailWithGrowth() { Index added = Index.FACTORY.getIndexByRange(ii * jj, (ii + 1) * jj - 1); upTable.getIndex().insert(added); ShiftAwareListener.Update update = - new ShiftAwareListener.Update(added, Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, - ModifiedColumnSet.EMPTY); + new ShiftAwareListener.Update(added, Index.FACTORY.getEmptyIndex(), + Index.FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY); upTable.notifyListeners(update); }); @@ -388,15 +374,14 @@ public void testTailWithGrowth() { } public void testLongTail() { - final Table bigTable = - emptyTable(2 * (long) (Integer.MAX_VALUE)).updateView("I=i", "II=ii"); + final Table bigTable = emptyTable(2 * (long) (Integer.MAX_VALUE)).updateView("I=i", "II=ii"); final Table tailed = bigTable.tail(1); assertEquals(2L * Integer.MAX_VALUE - 1, tailed.getColumn("II").get(0)); } public void testZeroHead() { final QueryTable table = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(10, 35), - TableTools.charCol("letter", "abcdefghijklmnopqrstuvwxyz".toCharArray())); + TableTools.charCol("letter", "abcdefghijklmnopqrstuvwxyz".toCharArray())); final Table noRows = table.head(0); assertEquals(0, noRows.size()); assertFalse(noRows.isLive()); @@ -407,7 +392,7 @@ public void testZeroHead() { public void testSlice() { final QueryTable table = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(10, 35), - TableTools.charCol("letter", "abcdefghijklmnopqrstuvwxyz".toCharArray())); + TableTools.charCol("letter", "abcdefghijklmnopqrstuvwxyz".toCharArray())); doSliceTest(table, "abcdefghij", 0, 10); doSliceTest(table, "cdefghij", 2, 10); @@ -425,27 +410,25 @@ public void testSlice() { doSliceTest(table, "c", 2, 3); } - private void doSliceTest(QueryTable table, String expected, int firstPositionInclusive, - int lastPositionExclusive) { + private void doSliceTest(QueryTable table, String expected, int firstPositionInclusive, int lastPositionExclusive) { final StringBuilder chars = new StringBuilder(); table.slice(firstPositionInclusive, lastPositionExclusive).characterColumnIterator("letter") - .forEachRemaining((Procedure.UnaryChar) chars::append); + .forEachRemaining((Procedure.UnaryChar) chars::append); final String result = chars.toString(); assertEquals(expected, result); } public void testHeadTailPct() { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); assertEquals("", diff(table.headPct(0.5), - TstUtils.testRefreshingTable(i(2, 4), c("x", 1, 2), c("y", 'a', 'b')), 10)); + TstUtils.testRefreshingTable(i(2, 4), c("x", 1, 2), c("y", 'a', 'b')), 10)); assertEquals("", diff(table.tailPct(0.5), - TstUtils.testRefreshingTable(i(4, 6), c("x", 2, 3), c("y", 'b', 'c')), 10)); + TstUtils.testRefreshingTable(i(4, 6), c("x", 2, 3), c("y", 'b', 'c')), 10)); assertEquals("", diff(table.headPct(0.1), - TstUtils.testRefreshingTable(i(2), c("x", 1), c("y", 'a')), 10)); + TstUtils.testRefreshingTable(i(2), c("x", 1), c("y", 'a')), 10)); assertEquals("", diff(table.tailPct(0.1), - TstUtils.testRefreshingTable(i(6), c("x", 3), c("y", 'c')), 10)); + TstUtils.testRefreshingTable(i(6), c("x", 3), c("y", 'c')), 10)); } @@ -460,10 +443,10 @@ private void testHeadTailPctIncremental(final String ctxt, final int size) throw final Random random = new Random(0); final ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1))); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { public Table e() { @@ -488,8 +471,7 @@ public Table e() { }; final int steps = 8; for (int i = 0; i < steps; i++) { - simulateShiftAwareStep(ctxt + " step == " + i, size, random, queryTable, columnInfo, - en); + simulateShiftAwareStep(ctxt + " step == " + i, size, random, queryTable, columnInfo, en); } } } diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableSortTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableSortTest.java index 5cdc104dc51..498e13b1cea 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableSortTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableSortTest.java @@ -34,142 +34,112 @@ public class QueryTableSortTest extends QueryTableTestBase { public void testSort() { - final Table result0 = - newTable(c("Unsorted", 3.0, null, 2.0), c("DataToSort", "c", "a", "b")); + final Table result0 = newTable(c("Unsorted", 3.0, null, 2.0), c("DataToSort", "c", "a", "b")); show(result0.sort("Unsorted")); assertEquals(Arrays.asList(null, 2.0, 3.0), - Arrays.asList(result0.sort("Unsorted").getColumn("Unsorted").get(0, 3))); + Arrays.asList(result0.sort("Unsorted").getColumn("Unsorted").get(0, 3))); show(result0.sortDescending("Unsorted")); assertEquals(Arrays.asList(3.0, 2.0, null), - Arrays.asList(result0.sortDescending("Unsorted").getColumn("Unsorted").get(0, 3))); + Arrays.asList(result0.sortDescending("Unsorted").getColumn("Unsorted").get(0, 3))); Table result1 = newTable(c("Unsorted", 4.0, 3.0, 1.1, Double.NaN, 2.0, 1.0, 5.0), - c("DataToSort", "e", "d", "b", "g", "c", "a", "f")); + c("DataToSort", "e", "d", "b", "g", "c", "a", "f")); final Table nanSorted = result1.sort("Unsorted"); show(nanSorted); assertEquals(Arrays.asList(1.0, 1.1, 2.0, 3.0, 4.0, 5.0, Double.NaN), - Arrays.asList(nanSorted.getColumn("Unsorted").get(0, 7))); + Arrays.asList(nanSorted.getColumn("Unsorted").get(0, 7))); assertEquals(Arrays.asList("a", "b", "c", "d", "e", "f", "g"), - Arrays.asList(nanSorted.getColumn("DataToSort").get(0, 7))); + Arrays.asList(nanSorted.getColumn("DataToSort").get(0, 7))); result1 = newTable(c("Unsorted", 4.1f, 3.1f, 1.2f, Float.NaN, 2.1f, 1.1f, 5.1f), - c("DataToSort", "e", "d", "b", "g", "c", "a", "f")); + c("DataToSort", "e", "d", "b", "g", "c", "a", "f")); final Table nanFloatSorted = result1.sort("Unsorted"); System.out.println("result1"); show(result1); System.out.println("nanFloatedSorted"); show(nanFloatSorted); assertEquals(Arrays.asList(1.1f, 1.2f, 2.1f, 3.1f, 4.1f, 5.1f, Float.NaN), - Arrays.asList(nanFloatSorted.getColumn("Unsorted").get(0, 7))); + Arrays.asList(nanFloatSorted.getColumn("Unsorted").get(0, 7))); assertEquals(Arrays.asList("a", "b", "c", "d", "e", "f", "g"), - Arrays.asList(nanFloatSorted.getColumn("DataToSort").get(0, 7))); + Arrays.asList(nanFloatSorted.getColumn("DataToSort").get(0, 7))); - Table result = - newTable(c("Unsorted", 3, 1, 2), c("DataToSort", "c", "a", "b")).sort("DataToSort"); + Table result = newTable(c("Unsorted", 3, 1, 2), c("DataToSort", "c", "a", "b")).sort("DataToSort"); assertEquals(Arrays.asList(1, 2, 3), Arrays.asList(result.getColumn("Unsorted").get(0, 3))); - assertEquals(Arrays.asList("a", "b", "c"), - Arrays.asList(result.getColumn("DataToSort").get(0, 3))); - result = newTable(c("Unsorted", 3, 1, 2), c("DataToSort", "c", "a", "b")) - .sortDescending("DataToSort"); + assertEquals(Arrays.asList("a", "b", "c"), Arrays.asList(result.getColumn("DataToSort").get(0, 3))); + result = newTable(c("Unsorted", 3, 1, 2), c("DataToSort", "c", "a", "b")).sortDescending("DataToSort"); assertEquals(Arrays.asList(3, 2, 1), Arrays.asList(result.getColumn("Unsorted").get(0, 3))); - assertEquals(Arrays.asList("c", "b", "a"), - Arrays.asList(result.getColumn("DataToSort").get(0, 3))); - - result = - newTable(c("Unsorted", '3', '1', '2'), c("DataToSort", "c", "a", "b")).sort("Unsorted"); - assertEquals(Arrays.asList('1', '2', '3'), - Arrays.asList(result.getColumn("Unsorted").get(0, 3))); - assertEquals(Arrays.asList("a", "b", "c"), - Arrays.asList(result.getColumn("DataToSort").get(0, 3))); - result = newTable(c("Unsorted", '3', '1', '2'), c("DataToSort", "c", "a", "b")) - .sortDescending("Unsorted"); - assertEquals(Arrays.asList('3', '2', '1'), - Arrays.asList(result.getColumn("Unsorted").get(0, 3))); - assertEquals(Arrays.asList("c", "b", "a"), - Arrays.asList(result.getColumn("DataToSort").get(0, 3))); + assertEquals(Arrays.asList("c", "b", "a"), Arrays.asList(result.getColumn("DataToSort").get(0, 3))); + + result = newTable(c("Unsorted", '3', '1', '2'), c("DataToSort", "c", "a", "b")).sort("Unsorted"); + assertEquals(Arrays.asList('1', '2', '3'), Arrays.asList(result.getColumn("Unsorted").get(0, 3))); + assertEquals(Arrays.asList("a", "b", "c"), Arrays.asList(result.getColumn("DataToSort").get(0, 3))); + result = newTable(c("Unsorted", '3', '1', '2'), c("DataToSort", "c", "a", "b")).sortDescending("Unsorted"); + assertEquals(Arrays.asList('3', '2', '1'), Arrays.asList(result.getColumn("Unsorted").get(0, 3))); + assertEquals(Arrays.asList("c", "b", "a"), Arrays.asList(result.getColumn("DataToSort").get(0, 3))); final ColumnHolder c1 = TstUtils.cG("Unsorted", 3, 1, 2); final Table table = newTable(c1, c("DataToSort", "c", "a", "b")); result = table.sort("DataToSort"); assertEquals(Arrays.asList(1, 2, 3), Arrays.asList(result.getColumn("Unsorted").get(0, 3))); - assertEquals(Arrays.asList("a", "b", "c"), - Arrays.asList(result.getColumn("DataToSort").get(0, 3))); + assertEquals(Arrays.asList("a", "b", "c"), Arrays.asList(result.getColumn("DataToSort").get(0, 3))); final ColumnHolder c11 = TstUtils.cG("Unsorted", 3, 1, 2); result = newTable(c11, c("DataToSort", "c", "a", "b")).sortDescending("DataToSort"); assertEquals(Arrays.asList(3, 2, 1), Arrays.asList(result.getColumn("Unsorted").get(0, 3))); - assertEquals(Arrays.asList("c", "b", "a"), - Arrays.asList(result.getColumn("DataToSort").get(0, 3))); + assertEquals(Arrays.asList("c", "b", "a"), Arrays.asList(result.getColumn("DataToSort").get(0, 3))); final ColumnHolder c2 = TstUtils.cG("Unsorted", '3', '1', '2'); result = newTable(c2, c("DataToSort", "c", "a", "b")).sort("Unsorted"); - assertEquals(Arrays.asList('1', '2', '3'), - Arrays.asList(result.getColumn("Unsorted").get(0, 3))); - assertEquals(Arrays.asList("a", "b", "c"), - Arrays.asList(result.getColumn("DataToSort").get(0, 3))); + assertEquals(Arrays.asList('1', '2', '3'), Arrays.asList(result.getColumn("Unsorted").get(0, 3))); + assertEquals(Arrays.asList("a", "b", "c"), Arrays.asList(result.getColumn("DataToSort").get(0, 3))); final ColumnHolder c22 = TstUtils.cG("Unsorted", '3', '1', '2'); result = newTable(c22, c("DataToSort", "c", "a", "b")).sortDescending("Unsorted"); - assertEquals(Arrays.asList('3', '2', '1'), - Arrays.asList(result.getColumn("Unsorted").get(0, 3))); - assertEquals(Arrays.asList("c", "b", "a"), - Arrays.asList(result.getColumn("DataToSort").get(0, 3))); + assertEquals(Arrays.asList('3', '2', '1'), Arrays.asList(result.getColumn("Unsorted").get(0, 3))); + assertEquals(Arrays.asList("c", "b", "a"), Arrays.asList(result.getColumn("DataToSort").get(0, 3))); - final Table input = newTable(c("C1", 2, 4, 2, 4), c("C2", '1', '1', '2', '2'), - c("Witness", "a", "b", "c", "d")); + final Table input = + newTable(c("C1", 2, 4, 2, 4), c("C2", '1', '1', '2', '2'), c("Witness", "a", "b", "c", "d")); System.out.println("Input:"); TableTools.showWithIndex(input); result = input.sort("C1", "C2"); System.out.println("Result:"); TableTools.showWithIndex(result); assertEquals(Arrays.asList(2, 2, 4, 4), Arrays.asList(result.getColumn("C1").get(0, 4))); - assertEquals(Arrays.asList('1', '2', '1', '2'), - Arrays.asList(result.getColumn("C2").get(0, 4))); - assertEquals(Arrays.asList("a", "c", "b", "d"), - Arrays.asList(result.getColumn("Witness").get(0, 4))); + assertEquals(Arrays.asList('1', '2', '1', '2'), Arrays.asList(result.getColumn("C2").get(0, 4))); + assertEquals(Arrays.asList("a", "c", "b", "d"), Arrays.asList(result.getColumn("Witness").get(0, 4))); - result = newTable(c("C1", 2, 4, 2, 4), c("C2", '2', '2', '1', '1'), - c("Witness", "a", "b", "c", "d")).sort("C2", "C1"); + result = newTable(c("C1", 2, 4, 2, 4), c("C2", '2', '2', '1', '1'), c("Witness", "a", "b", "c", "d")).sort("C2", + "C1"); assertEquals(Arrays.asList(2, 4, 2, 4), Arrays.asList(result.getColumn("C1").get(0, 4))); - assertEquals(Arrays.asList('1', '1', '2', '2'), - Arrays.asList(result.getColumn("C2").get(0, 4))); - assertEquals(Arrays.asList("c", "d", "a", "b"), - Arrays.asList(result.getColumn("Witness").get(0, 4))); + assertEquals(Arrays.asList('1', '1', '2', '2'), Arrays.asList(result.getColumn("C2").get(0, 4))); + assertEquals(Arrays.asList("c", "d", "a", "b"), Arrays.asList(result.getColumn("Witness").get(0, 4))); - result = newTable(c("C1", 2, 4, 2, 4), c("C2", '1', '1', '2', '2'), - c("Witness", "a", "b", "c", "d")).sortDescending("C1", "C2"); + result = newTable(c("C1", 2, 4, 2, 4), c("C2", '1', '1', '2', '2'), c("Witness", "a", "b", "c", "d")) + .sortDescending("C1", "C2"); assertEquals(Arrays.asList(4, 4, 2, 2), Arrays.asList(result.getColumn("C1").get(0, 4))); - assertEquals(Arrays.asList('2', '1', '2', '1'), - Arrays.asList(result.getColumn("C2").get(0, 4))); - assertEquals(Arrays.asList("d", "b", "c", "a"), - Arrays.asList(result.getColumn("Witness").get(0, 4))); + assertEquals(Arrays.asList('2', '1', '2', '1'), Arrays.asList(result.getColumn("C2").get(0, 4))); + assertEquals(Arrays.asList("d", "b", "c", "a"), Arrays.asList(result.getColumn("Witness").get(0, 4))); - result = newTable(c("C1", 2, 4, 2, 4), c("C2", '2', '2', '1', '1'), - c("Witness", "a", "b", "c", "d")).sortDescending("C2", "C1"); + result = newTable(c("C1", 2, 4, 2, 4), c("C2", '2', '2', '1', '1'), c("Witness", "a", "b", "c", "d")) + .sortDescending("C2", "C1"); assertEquals(Arrays.asList(4, 2, 4, 2), Arrays.asList(result.getColumn("C1").get(0, 4))); - assertEquals(Arrays.asList('2', '2', '1', '1'), - Arrays.asList(result.getColumn("C2").get(0, 4))); - assertEquals(Arrays.asList("b", "a", "d", "c"), - Arrays.asList(result.getColumn("Witness").get(0, 4))); + assertEquals(Arrays.asList('2', '2', '1', '1'), Arrays.asList(result.getColumn("C2").get(0, 4))); + assertEquals(Arrays.asList("b", "a", "d", "c"), Arrays.asList(result.getColumn("Witness").get(0, 4))); final ColumnHolder c3 = TstUtils.cG("Unsorted", '3', '1', '2', null); result = newTable(c3, c("DataToSort", "c", "a", "b", "d")).sort("Unsorted"); show(result); - assertEquals(Arrays.asList(null, '1', '2', '3'), - Arrays.asList(result.getColumn("Unsorted").get(0, 4))); - assertEquals(Arrays.asList("d", "a", "b", "c"), - Arrays.asList(result.getColumn("DataToSort").get(0, 4))); + assertEquals(Arrays.asList(null, '1', '2', '3'), Arrays.asList(result.getColumn("Unsorted").get(0, 4))); + assertEquals(Arrays.asList("d", "a", "b", "c"), Arrays.asList(result.getColumn("DataToSort").get(0, 4))); final ColumnHolder c4 = TstUtils.cG("Unsorted", '3', '1', null, '2'); result = newTable(c4, c("DataToSort", "c", "a", "d", "b")).sortDescending("Unsorted"); - assertEquals(Arrays.asList('3', '2', '1', null), - Arrays.asList(result.getColumn("Unsorted").get(0, 4))); - assertEquals(Arrays.asList("c", "b", "a", "d"), - Arrays.asList(result.getColumn("DataToSort").get(0, 4))); + assertEquals(Arrays.asList('3', '2', '1', null), Arrays.asList(result.getColumn("Unsorted").get(0, 4))); + assertEquals(Arrays.asList("c", "b", "a", "d"), Arrays.asList(result.getColumn("DataToSort").get(0, 4))); } public void testSort2() { - final QueryTable table = - testRefreshingTable(i(10, 20, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); + final QueryTable table = testRefreshingTable(i(10, 20, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); final QueryTable sorted = (QueryTable) table.sort("A"); show(sorted); @@ -199,8 +169,7 @@ public void testSort2() { }); show(sorted); - assertTableEquals(testRefreshingTable(c("A", 1, 2, 3, 3), c("B", "A3", "b", "c", "C2")), - sorted); + assertTableEquals(testRefreshingTable(c("A", 1, 2, 3, 3), c("B", "A3", "b", "c", "C2")), sorted); final Table sortedTwice = sorted.sort("A"); assertSame(sorted, sortedTwice); @@ -222,25 +191,22 @@ public void testSort2() { public void testGroupedSortRefreshing() { final Table table = testRefreshingTable(Index.FACTORY.getFlatIndex(9), - cG("A", "Apple", "Apple", "Apple", "Banana", "Banana", "Banana", "Canteloupe", - "Canteloupe", "Canteloupe"), - c("Secondary", "C", "A", "B", "C", "A", "B", "C", "A", "B")).update("Sentinel=i"); + cG("A", "Apple", "Apple", "Apple", "Banana", "Banana", "Banana", "Canteloupe", "Canteloupe", + "Canteloupe"), + c("Secondary", "C", "A", "B", "C", "A", "B", "C", "A", "B")).update("Sentinel=i"); final QueryTable sorted = (QueryTable) table.dropColumns("Secondary").sortDescending("A"); show(sorted); - assertTableEquals( - newTable(col("A", "Canteloupe", "Canteloupe", "Canteloupe", "Banana", "Banana", - "Banana", "Apple", "Apple", "Apple"), col("Sentinel", 6, 7, 8, 3, 4, 5, 0, 1, 2)), - sorted); + assertTableEquals(newTable(col("A", "Canteloupe", "Canteloupe", "Canteloupe", "Banana", "Banana", "Banana", + "Apple", "Apple", "Apple"), col("Sentinel", 6, 7, 8, 3, 4, 5, 0, 1, 2)), sorted); - final QueryTable sorted2 = - (QueryTable) table.sort(SortPair.descending("A"), SortPair.ascending("Secondary")); + final QueryTable sorted2 = (QueryTable) table.sort(SortPair.descending("A"), SortPair.ascending("Secondary")); show(sorted2); assertTableEquals(newTable( - col("A", "Canteloupe", "Canteloupe", "Canteloupe", "Banana", "Banana", "Banana", - "Apple", "Apple", "Apple"), - col("Secondary", "A", "B", "C", "A", "B", "C", "A", "B", "C"), - col("Sentinel", 7, 8, 6, 4, 5, 3, 1, 2, 0)), sorted2); + col("A", "Canteloupe", "Canteloupe", "Canteloupe", "Banana", "Banana", "Banana", "Apple", "Apple", + "Apple"), + col("Secondary", "A", "B", "C", "A", "B", "C", "A", "B", "C"), + col("Sentinel", 7, 8, 6, 4, 5, 3, 1, 2, 0)), sorted2); } public void testGroupedSortHistorical() { @@ -259,11 +225,9 @@ private void testGroupedSortHistorical(int size) { } final Table grouped = - testTable(Index.FACTORY.getFlatIndex(values.length), cG("Captain", values)) - .update("Sentinel=i"); + testTable(Index.FACTORY.getFlatIndex(values.length), cG("Captain", values)).update("Sentinel=i"); final Table nogroups = - testTable(Index.FACTORY.getFlatIndex(values.length), c("Captain", values)) - .update("Sentinel=i"); + testTable(Index.FACTORY.getFlatIndex(values.length), c("Captain", values)).update("Sentinel=i"); final Table sortedGrouped = grouped.sortDescending("Captain"); final Table sortedNoGroups = nogroups.sortDescending("Captain"); @@ -273,17 +237,15 @@ private void testGroupedSortHistorical(int size) { } public void testSortBool() { - final QueryTable table = testRefreshingTable(i(10, 20, 30, 40, 50), - c("boolCol", false, true, null, true, false)); + final QueryTable table = + testRefreshingTable(i(10, 20, 30, 40, 50), c("boolCol", false, true, null, true, false)); final QueryTable sorted = (QueryTable) table.sort("boolCol"); show(sorted); - assertEquals("", - diff(sorted, testRefreshingTable(c("boolCol", null, false, false, true, true)), 10)); + assertEquals("", diff(sorted, testRefreshingTable(c("boolCol", null, false, false, true, true)), 10)); final QueryTable descending = (QueryTable) table.sort(SortPair.descending("boolCol")); show(descending); - assertEquals("", diff(descending, - testRefreshingTable(c("boolCol", true, true, false, false, null)), 10)); + assertEquals("", diff(descending, testRefreshingTable(c("boolCol", true, true, false, false, null)), 10)); } public void testSortIncremental2() { @@ -297,14 +259,13 @@ public void testMultiColumnRuns() { final Random random = new Random(0); final ColumnInfo columnInfo[]; final QueryTable queryTable = getTable(10000, random, - columnInfo = initColumnInfos( - new String[] {"bool1", "bool2", "bool3", "bool4", "bool5", "Sentinel"}, - new BooleanGenerator(0.25, 0.25), - new BooleanGenerator(0.50, 0.25), - new BooleanGenerator(), - new BooleanGenerator(), - new BooleanGenerator(), - new IntGenerator(0, 100000))); + columnInfo = initColumnInfos(new String[] {"bool1", "bool2", "bool3", "bool4", "bool5", "Sentinel"}, + new BooleanGenerator(0.25, 0.25), + new BooleanGenerator(0.50, 0.25), + new BooleanGenerator(), + new BooleanGenerator(), + new BooleanGenerator(), + new IntGenerator(0, 100000))); final EvalNugget en[] = new EvalNugget[] { EvalNugget.from(() -> queryTable.sort("bool1")), @@ -320,14 +281,12 @@ public Table e() { }, new EvalNugget() { public Table e() { - return queryTable.sortDescending("bool1", "bool2", "bool3", "bool4", - "bool5"); + return queryTable.sortDescending("bool1", "bool2", "bool3", "bool4", "bool5"); } }, new EvalNugget() { public Table e() { - return queryTable.sortDescending("bool5", "bool4", "bool3", "bool2", - "bool1"); + return queryTable.sortDescending("bool5", "bool4", "bool3", "bool2", "bool1"); } }, }; @@ -339,24 +298,23 @@ public Table e() { private ColumnInfo[] getIncrementalColumnInfo() { return initColumnInfos( - new String[] {"Sym", "intCol", "doubleCol", "floatCol", "longCol", "shortCol", - "byteCol", "charCol", "boolCol", "bigI", "bigD", "Keys"}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1), - new FloatGenerator(100.0f, 2000.f), - new LongGenerator(), - new ShortGenerator(), - new ByteGenerator(), - new CharGenerator('A', 'Z'), - new BooleanGenerator(), - new BigIntegerGenerator(BigInteger.valueOf(100000), BigInteger.valueOf(100100)), - new BigDecimalGenerator(BigInteger.valueOf(100000), BigInteger.valueOf(100100)), - new SortedLongGenerator(0, Long.MAX_VALUE - 1)); + new String[] {"Sym", "intCol", "doubleCol", "floatCol", "longCol", "shortCol", "byteCol", "charCol", + "boolCol", "bigI", "bigD", "Keys"}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1), + new FloatGenerator(100.0f, 2000.f), + new LongGenerator(), + new ShortGenerator(), + new ByteGenerator(), + new CharGenerator('A', 'Z'), + new BooleanGenerator(), + new BigIntegerGenerator(BigInteger.valueOf(100000), BigInteger.valueOf(100100)), + new BigDecimalGenerator(BigInteger.valueOf(100000), BigInteger.valueOf(100100)), + new SortedLongGenerator(0, Long.MAX_VALUE - 1)); } - private void testSortIncremental(final String ctxt, final int size, int seed, - MutableInt numSteps) { + private void testSortIncremental(final String ctxt, final int size, int seed, MutableInt numSteps) { final int maxSteps = numSteps.intValue(); final Random random = new Random(seed); final ColumnInfo[] columnInfo = getIncrementalColumnInfo(); @@ -366,19 +324,17 @@ private void testSortIncremental(final String ctxt, final int size, int seed, EvalNugget.from(() -> queryTable.sort("Sym")), EvalNugget.from(() -> queryTable.update("x = Keys").sortDescending("intCol")), EvalNugget.from(() -> queryTable.updateView("x = Keys").sort("Sym", "intCol")) - .hasUnstableColumns("x"), + .hasUnstableColumns("x"), EvalNugget.from(() -> queryTable.by("Sym").sort("Sym")), EvalNugget.from(() -> queryTable.by("Sym", "intCol").sort("Sym", "intCol")), EvalNugget.from(() -> queryTable.sort("Sym").update("x=intCol+1")), EvalNugget.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1")), EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1")), EvalNugget.from(() -> queryTable.by("Sym").sort("Sym").update("x=sum(intCol)")), - EvalNugget.from(() -> queryTable.by("Sym", "intCol").sort("Sym", "intCol") - .update("x=intCol+1")), - EvalNugget.from(() -> queryTable.sort(SortPair.ascending("Sym"), - SortPair.descending("intCol"))), - EvalNugget.from(() -> queryTable.sort(SortPair.ascending("Sym"), - SortPair.descending("intCol"), SortPair.ascending("doubleCol"))), + EvalNugget.from(() -> queryTable.by("Sym", "intCol").sort("Sym", "intCol").update("x=intCol+1")), + EvalNugget.from(() -> queryTable.sort(SortPair.ascending("Sym"), SortPair.descending("intCol"))), + EvalNugget.from(() -> queryTable.sort(SortPair.ascending("Sym"), SortPair.descending("intCol"), + SortPair.ascending("doubleCol"))), EvalNugget.from(() -> queryTable.sort(SortPair.ascending("floatCol"))), EvalNugget.from(() -> queryTable.sort(SortPair.ascending("doubleCol"))), EvalNugget.from(() -> queryTable.sort(SortPair.ascending("byteCol"))), @@ -402,8 +358,7 @@ private void testSortIncremental(final String ctxt, final int size, int seed, }; for (numSteps.setValue(0); numSteps.intValue() < maxSteps; numSteps.increment()) { - simulateShiftAwareStep(ctxt + " step == " + numSteps.getValue(), size, random, - queryTable, columnInfo, en); + simulateShiftAwareStep(ctxt + " step == " + numSteps.getValue(), size, random, queryTable, columnInfo, en); } } @@ -417,73 +372,62 @@ public void testSortPerformance() { // If decreasing sort, always add at beginning // Expected performance: very fast. Expect add ratio of 100%, others 0. performTests("simple increasing", 1, 0, 0, - ii -> ii * 10); + ii -> ii * 10); // sequence: 0, -10, -20, -30, -40 // increasing sort: always add at beginning // decreasing sort: always add at end // Expected performance: very fast. Expect add ratio of 100%, others 0. performTests("simple decreasing", 1, 0, 0, - ii -> -ii * 10); + ii -> -ii * 10); // sequence: 0, -10, 20, -30, 40, -50 - // (both increasing and decreasing sort) alternates between adding at end and adding at - // beginning + // (both increasing and decreasing sort) alternates between adding at end and adding at beginning // Expected performance: very fast. Expect add ratio of 100%, others 0. performTests("external alternating", 1, 0, 0, - ii -> (ii % 2) == 0 ? ii * 10 : -ii * 10); + ii -> (ii % 2) == 0 ? ii * 10 : -ii * 10); // (Just for the sake of the readability of this comment, assume "large" is 100) // sequence: 100, -99, 98, -97, 96, -95, 94, -93, ... // (both increasing and decreasing sort): always insert at the middle - // Expected performance: modest (sometimes getting lucky, sometimes being forced to move - // elements, sometimes + // Expected performance: modest (sometimes getting lucky, sometimes being forced to move elements, sometimes // being forced to re-spread). - // Expected performance: medium? Not really sure what numbers to use here. Just going to say - // 20, 20, 75. + // Expected performance: medium? Not really sure what numbers to use here. Just going to say 20, 20, 75. // The actual numbers will be a function of test size. performTests("internal always near median", 20, 20, 100, - ii -> (ii % 2) == 0 ? large - ii : -large + ii); + ii -> (ii % 2) == 0 ? large - ii : -large + ii); // sequence: large, large+1, large+2, ..., large+9, 0, 1, 2, 3, 4, ... // increasing sort: always insert at a position 10 before back // decreasing sort: insert at a position further and further away from the front // Expected performance: Should be very fast (just moving 10 elements each time). // Rationale: - // In the sort-ascending case, (once things get going), the new elements are to the right of - // the median, so the - // code will be operating in the forward direction and always simply push [large...large+9] - // to the right. + // In the sort-ascending case, (once things get going), the new elements are to the right of the median, so the + // code will be operating in the forward direction and always simply push [large...large+9] to the right. // - // In the sort-descending case, (once things get going), the new elements are to the left of - // the median, so - // the code will be operating in the reverse direction and always simply push - // [large+9...large] to the left. + // In the sort-descending case, (once things get going), the new elements are to the left of the median, so + // the code will be operating in the reverse direction and always simply push [large+9...large] to the left. // Ratios: 1 add, 0 remove, 10 modified. performTests("block of 10 at end", 1, 0, 10, - ii -> ii < 10 ? large + ii : ii - 10); + ii -> ii < 10 ? large + ii : ii - 10); // sequence: 0, 1, 2, ..., 9, large, large-1, large-2, ... // increasing sort: always insert at a position 10 after front // decreasing sort: insert at a position further and further away from the back - // Expected performance: Should be very fast (just moving 10 elements each time). Similar - // rationale to the above + // Expected performance: Should be very fast (just moving 10 elements each time). Similar rationale to the above // Ratios: 1 add, 0 remove, 10 modified. performTests("block of 10 at beginning", 1, 0, 10, - ii -> ii < 10 ? ii : large - ii + 10); + ii -> ii < 10 ? ii : large - ii + 10); } - private void performTests(String what, long addedRatioLimit, long removedRatioLimit, - long modifiedRatioLimit, - LongUnaryOperator generator) { - performTestsInDirection(what, addedRatioLimit, removedRatioLimit, modifiedRatioLimit, - generator, true); - performTestsInDirection(what, addedRatioLimit, removedRatioLimit, modifiedRatioLimit, - generator, false); + private void performTests(String what, long addedRatioLimit, long removedRatioLimit, long modifiedRatioLimit, + LongUnaryOperator generator) { + performTestsInDirection(what, addedRatioLimit, removedRatioLimit, modifiedRatioLimit, generator, true); + performTestsInDirection(what, addedRatioLimit, removedRatioLimit, modifiedRatioLimit, generator, false); } private void performTestsInDirection(String what, long addedRatioLimit, long removedRatioLimit, - long modifiedRatioLimit, LongUnaryOperator generator, boolean ascending) { + long modifiedRatioLimit, LongUnaryOperator generator, boolean ascending) { final int numValues = 10000; final long[] values = new long[numValues]; @@ -492,10 +436,10 @@ private void performTestsInDirection(String what, long addedRatioLimit, long rem } final QueryTable queryTable = TstUtils.testRefreshingTable(i(0), - c("intCol", values[0])); + c("intCol", values[0])); final QueryTable sorted = (QueryTable) (ascending ? queryTable.sort("intCol") - : queryTable.sortDescending("intCol")); + : queryTable.sortDescending("intCol")); final SimpleShiftAwareListener simpleListener = new SimpleShiftAwareListener(sorted); sorted.listenForUpdates(simpleListener); @@ -522,8 +466,7 @@ private void performTestsInDirection(String what, long addedRatioLimit, long rem final double removedRatio = removes / denominator; final double modifiedRatio = modifies / denominator; - final String description = - String.format("\"%s\": sort %s", what, ascending ? "ascending" : "descending"); + final String description = String.format("\"%s\": sort %s", what, ascending ? "ascending" : "descending"); System.out.println("Results for: " + description); System.out.println("Add Ratio: " + addedRatio); System.out.println("Removed Ratio: " + removedRatio); @@ -538,9 +481,9 @@ private void performTestsInDirection(String what, long addedRatioLimit, long rem public void testSortIncremental() { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bc", "aa", "aa"), - c("intCol", 10, 20, 30, 50), - c("doubleCol", 0.1, 0.2, 0.3, 0.5)); + c("Sym", "aa", "bc", "aa", "aa"), + c("intCol", 10, 20, 30, 50), + c("doubleCol", 0.1, 0.2, 0.3, 0.5)); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { EvalNugget.from(() -> queryTable.sort("Sym").view("Sym")), @@ -552,26 +495,21 @@ public void testSortIncremental() { EvalNugget.from(() -> queryTable.sortDescending("intCol").update("x=intCol+1")), EvalNugget.from(() -> queryTable.sort("Sym", "intCol").update("x=intCol+1")), EvalNugget.from(() -> queryTable.by("Sym").sort("Sym").update("x=sum(intCol)")), - EvalNugget.from(() -> queryTable.by("Sym", "intCol").sort("Sym", "intCol") - .update("x=intCol+1")), + EvalNugget.from(() -> queryTable.by("Sym", "intCol").sort("Sym", "intCol").update("x=intCol+1")), new TableComparator( - queryTable.updateView("ok=k").sort(SortPair.ascending("Sym"), - SortPair.descending("intCol")), - "Single Sort", - queryTable.updateView("ok=k").sortDescending("intCol").sort("Sym"), - "Double Sort") + queryTable.updateView("ok=k").sort(SortPair.ascending("Sym"), SortPair.descending("intCol")), + "Single Sort", queryTable.updateView("ok=k").sortDescending("intCol").sort("Sym"), + "Double Sort") }; LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(queryTable, i(3, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), - c("doubleCol", 2.1, 2.2)); + addToTable(queryTable, i(3, 9), c("Sym", "aa", "aa"), c("intCol", 20, 10), c("doubleCol", 2.1, 2.2)); queryTable.notifyListeners(i(3, 9), i(), i()); }); TstUtils.validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(queryTable, i(1, 9), c("Sym", "bc", "aa"), c("intCol", 30, 11), - c("doubleCol", 2.1, 2.2)); + addToTable(queryTable, i(1, 9), c("Sym", "bc", "aa"), c("intCol", 30, 11), c("doubleCol", 2.1, 2.2)); queryTable.notifyListeners(i(), i(), i(1, 9)); }); TstUtils.validate(en); @@ -588,10 +526,10 @@ public void testSortFloatIncremental() { final Random random = new Random(0); final ColumnInfo columnInfo[]; final QueryTable queryTable = getTable(100, random, - columnInfo = initColumnInfos(new String[] {"intCol", "doubleCol", "floatCol"}, - new IntGenerator(10, 10000), - new DoubleGenerator(0, 1000, 0.1, 0.1, 0.05, 0.05), - new FloatGenerator(0, 1000, 0.1, 0.1, 0.05, 0.05))); + columnInfo = initColumnInfos(new String[] {"intCol", "doubleCol", "floatCol"}, + new IntGenerator(10, 10000), + new DoubleGenerator(0, 1000, 0.1, 0.1, 0.05, 0.05), + new FloatGenerator(0, 1000, 0.1, 0.1, 0.05, 0.05))); final EvalNugget en[] = new EvalNugget[] { new EvalNugget() { public Table e() { @@ -616,20 +554,17 @@ public Table e() { }; final int steps = 50; // 8; for (int i = 0; i < steps; i++) { - simulateShiftAwareStep("floatSort step == " + i, 100, random, queryTable, columnInfo, - en); + simulateShiftAwareStep("floatSort step == " + i, 100, random, queryTable, columnInfo, en); } } public void testGrowingMergeReinterpret() { final QueryTable table = testRefreshingTable(i(1), c("Sentinel", 1)); - final Table viewed = - table.update("Timestamp='2019-04-11T09:30 NY' + (ii * 60L * 1000000000L)"); + final Table viewed = table.update("Timestamp='2019-04-11T09:30 NY' + (ii * 60L * 1000000000L)"); final Table sorted = TableTools.merge(viewed, viewed).sortDescending("Timestamp"); for (int ii = 2; ii < 10000; ++ii) { - // Use large enough indices that we blow beyond merge's initially reserved 64k - // key-space. + // Use large enough indices that we blow beyond merge's initially reserved 64k key-space. final int fii = 8059 * ii; LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(table, i(fii), c("Sentinel", fii)); @@ -654,37 +589,37 @@ private void doReinterpretTest(Table table) { final Table mergeSorted = merged.sort("Timestamp"); final TIntList sentinels = new TIntArrayList(); - mergeSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {100, 0, 104, 4, 108, 8, 101, 1, 105, - 5, 109, 9, 102, 2, 106, 6, 103, 3, 107, 7}), - sentinels); + mergeSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", + new TIntArrayList( + new int[] {100, 0, 104, 4, 108, 8, 101, 1, 105, 5, 109, 9, 102, 2, 106, 6, 103, 3, 107, 7}), + sentinels); sentinels.clear(); final Table mergeSorted2 = merged.sort("Timestamp"); - mergeSorted2.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {100, 0, 104, 4, 108, 8, 101, 1, 105, - 5, 109, 9, 102, 2, 106, 6, 103, 3, 107, 7}), - sentinels); + mergeSorted2.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", + new TIntArrayList( + new int[] {100, 0, 104, 4, 108, 8, 101, 1, 105, 5, 109, 9, 102, 2, 106, 6, 103, 3, 107, 7}), + sentinels); sentinels.clear(); final Table boolSorted = merged.sort("Truthiness"); - boolSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {102, 105, 108, 2, 5, 8, 101, 104, - 107, 1, 4, 7, 100, 103, 106, 109, 0, 3, 6, 9}), - sentinels); + boolSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", + new TIntArrayList( + new int[] {102, 105, 108, 2, 5, 8, 101, 104, 107, 1, 4, 7, 100, 103, 106, 109, 0, 3, 6, 9}), + sentinels); sentinels.clear(); // we are redirecting the union now, which should also be reinterpreted final Table boolInverseSorted = boolSorted.sortDescending("Timestamp"); - boolInverseSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {107, 7, 103, 3, 106, 6, 102, 2, 109, - 9, 105, 5, 101, 1, 108, 8, 104, 4, 100, 0}), - sentinels); + boolInverseSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", + new TIntArrayList( + new int[] {107, 7, 103, 3, 106, 6, 102, 2, 109, 9, 105, 5, 101, 1, 108, 8, 104, 4, 100, 0}), + sentinels); sentinels.clear(); } @@ -702,33 +637,33 @@ private void doReinterpretTestIncremental(Table table) { } final TIntList sentinels = new TIntArrayList(); - mergeSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {100, 0, 104, 4, 108, 8, 101, 1, 105, - 5, 109, 9, 102, 2, 106, 6, 103, 3, 107, 7}), - sentinels); + mergeSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", + new TIntArrayList( + new int[] {100, 0, 104, 4, 108, 8, 101, 1, 105, 5, 109, 9, 102, 2, 106, 6, 103, 3, 107, 7}), + sentinels); sentinels.clear(); - mergeSorted2.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {100, 0, 104, 4, 108, 8, 101, 1, 105, - 5, 109, 9, 102, 2, 106, 6, 103, 3, 107, 7}), - sentinels); + mergeSorted2.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", + new TIntArrayList( + new int[] {100, 0, 104, 4, 108, 8, 101, 1, 105, 5, 109, 9, 102, 2, 106, 6, 103, 3, 107, 7}), + sentinels); sentinels.clear(); - boolSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {102, 105, 108, 2, 5, 8, 101, 104, - 107, 1, 4, 7, 100, 103, 106, 109, 0, 3, 6, 9}), - sentinels); + boolSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", + new TIntArrayList( + new int[] {102, 105, 108, 2, 5, 8, 101, 104, 107, 1, 4, 7, 100, 103, 106, 109, 0, 3, 6, 9}), + sentinels); sentinels.clear(); - boolInverseSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {107, 7, 103, 3, 106, 6, 102, 2, 109, - 9, 105, 5, 101, 1, 108, 8, 104, 4, 100, 0}), - sentinels); + boolInverseSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", + new TIntArrayList( + new int[] {107, 7, 103, 3, 106, 6, 102, 2, 109, 9, 105, 5, 101, 1, 108, 8, 104, 4, 100, 0}), + sentinels); sentinels.clear(); } @@ -743,26 +678,20 @@ private void doSymbolTableTest(Table table) { TableTools.showWithIndex(symbolSorted); final TIntList sentinels = new TIntArrayList(); - symbolSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {0, 3, 6, 9, 1, 4, 7, 2, 5, 8}), - sentinels); + symbolSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", new TIntArrayList(new int[] {0, 3, 6, 9, 1, 4, 7, 2, 5, 8}), sentinels); sentinels.clear(); final Table tsSorted = table.sort("Timestamp"); TableTools.showWithIndex(tsSorted); - tsSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {0, 4, 8, 1, 5, 9, 2, 6, 3, 7}), - sentinels); + tsSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", new TIntArrayList(new int[] {0, 4, 8, 1, 5, 9, 2, 6, 3, 7}), sentinels); sentinels.clear(); final Table boolSorted = table.sort("Truthiness"); TableTools.showWithIndex(boolSorted); - boolSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {2, 5, 8, 1, 4, 7, 0, 3, 6, 9}), - sentinels); + boolSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", new TIntArrayList(new int[] {2, 5, 8, 1, 4, 7, 0, 3, 6, 9}), sentinels); sentinels.clear(); } @@ -790,10 +719,8 @@ private void doSymbolTableIncrementalTest(Table table) { TableTools.showWithIndex(symbolSorted); final TIntList sentinels = new TIntArrayList(); - symbolSorted.columnIterator("Sentinel") - .forEachRemaining(sentinel -> sentinels.add((int) sentinel)); - assertEquals("sentinels", new TIntArrayList(new int[] {0, 3, 6, 9, 1, 4, 7, 2, 5, 8}), - sentinels); + symbolSorted.columnIterator("Sentinel").forEachRemaining(sentinel -> sentinels.add((int) sentinel)); + assertEquals("sentinels", new TIntArrayList(new int[] {0, 3, 6, 9, 1, 4, 7, 2, 5, 8}), sentinels); sentinels.clear(); } @@ -801,23 +728,22 @@ private void diskBackedTestHarness(Consumer
    testFunction) throws IOExcept final File testDirectory = Files.createTempDirectory("SymbolTableTest").toFile(); final TableDefinition definition = TableDefinition.of( - ColumnDefinition.ofInt("Sentinel"), - ColumnDefinition.ofString("Symbol"), - ColumnDefinition.ofTime("Timestamp"), - ColumnDefinition.ofBoolean("Truthiness")); + ColumnDefinition.ofInt("Sentinel"), + ColumnDefinition.ofString("Symbol"), + ColumnDefinition.ofTime("Timestamp"), + ColumnDefinition.ofBoolean("Truthiness")); final String[] syms = new String[] {"Apple", "Banana", "Cantaloupe"}; final DBDateTime baseTime = DBTimeUtils.convertDateTime("2019-04-11T09:30 NY"); final long dateOffset[] = new long[] {0, 5, 10, 15, 1, 6, 11, 16, 2, 7}; - final Boolean booleans[] = - new Boolean[] {true, false, null, true, false, null, true, false, null, true, false}; + final Boolean booleans[] = new Boolean[] {true, false, null, true, false, null, true, false, null, true, false}; QueryScope.addParam("syms", syms); QueryScope.addParam("baseTime", baseTime); QueryScope.addParam("dateOffset", dateOffset); QueryScope.addParam("booleans", booleans); final Table source = emptyTable(10).updateView("Sentinel=i", "Symbol=syms[i % syms.length]", - "Timestamp=baseTime+dateOffset[i]*3600L*1000000000L", "Truthiness=booleans[i]"); + "Timestamp=baseTime+dateOffset[i]*3600L*1000000000L", "Truthiness=booleans[i]"); testDirectory.mkdirs(); final File dest = new File(testDirectory, "Table.parquet"); try { diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableStaticNaturalJoinRandomTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableStaticNaturalJoinRandomTest.java index 0f1f9f02a16..0e45e295518 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableStaticNaturalJoinRandomTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableStaticNaturalJoinRandomTest.java @@ -32,8 +32,8 @@ public class QueryTableStaticNaturalJoinRandomTest extends QueryTableTestBase { private final static boolean DO_STATIC_JOIN_PRINT = false; - private static void testNaturalJoinRandomStatic(int seed, int leftSize, int rightSize, - Class dataType, boolean grouped, boolean flattenLeft, @Nullable JoinControl control) { + private static void testNaturalJoinRandomStatic(int seed, int leftSize, int rightSize, Class dataType, + boolean grouped, boolean flattenLeft, @Nullable JoinControl control) { final Logger log = new StreamLoggerImpl(); final Random random = new Random(seed); @@ -53,45 +53,41 @@ private static void testNaturalJoinRandomStatic(int seed, int leftSize, int righ leftGenerator = new TstUtils.CharGenerator((char) 1, (char) rightSize); rightGenerator = new TstUtils.UniqueCharGenerator((char) 1, (char) rightSize); } else if (dataType == String.class) { - final TstUtils.UniqueStringGenerator uniqueStringGenerator = - new TstUtils.UniqueStringGenerator(); + final TstUtils.UniqueStringGenerator uniqueStringGenerator = new TstUtils.UniqueStringGenerator(); final Set duplicateRights = new HashSet<>(); while (duplicateRights.size() < ((rightSize * 0.1) / 10)) { duplicateRights.add("Dup-" + Long.toHexString(random.nextLong())); } - final List> generatorList = - Arrays.asList(uniqueStringGenerator, new TstUtils.SetGenerator<>( - duplicateRights.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + final List> generatorList = Arrays.asList(uniqueStringGenerator, + new TstUtils.SetGenerator<>(duplicateRights.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); rightGenerator = new TstUtils.CompositeGenerator<>(generatorList, 0.9); leftGenerator = new TstUtils.FromUniqueStringGenerator(uniqueStringGenerator, 0.5); } else if (dataType == SmartKey.class) { final TstUtils.UniqueSmartKeyGenerator uniqueSmartKeyGenerator = - new TstUtils.UniqueSmartKeyGenerator( - new TstUtils.LongGenerator(0, 2 * (long) Math.sqrt(rightSize)), - new TstUtils.IntGenerator(0, 2 * (int) Math.sqrt(rightSize))); - final TstUtils.SmartKeyGenerator defaultGenerator = new TstUtils.SmartKeyGenerator( - new TstUtils.LongGenerator(0, (long) Math.sqrt(rightSize)), - new TstUtils.IntGenerator(0, (int) Math.sqrt(rightSize))); + new TstUtils.UniqueSmartKeyGenerator(new TstUtils.LongGenerator(0, 2 * (long) Math.sqrt(rightSize)), + new TstUtils.IntGenerator(0, 2 * (int) Math.sqrt(rightSize))); + final TstUtils.SmartKeyGenerator defaultGenerator = + new TstUtils.SmartKeyGenerator(new TstUtils.LongGenerator(0, (long) Math.sqrt(rightSize)), + new TstUtils.IntGenerator(0, (int) Math.sqrt(rightSize))); rightGenerator = uniqueSmartKeyGenerator; - leftGenerator = new TstUtils.FromUniqueSmartKeyGenerator(uniqueSmartKeyGenerator, - defaultGenerator, 0.75); + leftGenerator = new TstUtils.FromUniqueSmartKeyGenerator(uniqueSmartKeyGenerator, defaultGenerator, 0.75); } else { throw new UnsupportedOperationException("Invalid Data Type: " + dataType); } final QueryTable rightTable = getTable(false, rightSize, random, - initColumnInfos(new String[] {"JoinKey", "RightSentinel"}, - rightGenerator, - new TstUtils.IntGenerator())); + initColumnInfos(new String[] {"JoinKey", "RightSentinel"}, + rightGenerator, + new TstUtils.IntGenerator())); final List leftKeyAttributes = - grouped ? Collections.singletonList(TstUtils.ColumnInfo.ColAttributes.Grouped) - : Collections.emptyList(); + grouped ? Collections.singletonList(TstUtils.ColumnInfo.ColAttributes.Grouped) + : Collections.emptyList(); final QueryTable leftTable = getTable(false, leftSize, random, - initColumnInfos(new String[] {"JoinKey", "LeftSentinel"}, - Arrays.asList(leftKeyAttributes, Collections.emptyList()), leftGenerator, - new TstUtils.IntGenerator())); + initColumnInfos(new String[] {"JoinKey", "LeftSentinel"}, + Arrays.asList(leftKeyAttributes, Collections.emptyList()), leftGenerator, + new TstUtils.IntGenerator())); String matchKeys = "JoinKey"; Table rightJoinTable = rightTable; @@ -100,8 +96,7 @@ private static void testNaturalJoinRandomStatic(int seed, int leftSize, int righ final String updateString; if (dataType == int.class) { - final TIntIntMap rightMap = - new TIntIntHashMap(rightTable.intSize(), 0.5f, -1, NULL_INT); + final TIntIntMap rightMap = new TIntIntHashMap(rightTable.intSize(), 0.5f, -1, NULL_INT); // noinspection unchecked final ColumnSource rightKey = rightTable.getColumnSource("JoinKey"); @@ -114,8 +109,7 @@ private static void testNaturalJoinRandomStatic(int seed, int leftSize, int righ QueryScope.addParam("rightMap", rightMap); updateString = "RightSentinel=rightMap.get(JoinKey)"; } else if (dataType == short.class) { - final TShortIntMap rightMap = - new TShortIntHashMap(rightTable.intSize(), 0.5f, (short) -1, NULL_INT); + final TShortIntMap rightMap = new TShortIntHashMap(rightTable.intSize(), 0.5f, (short) -1, NULL_INT); // noinspection unchecked final ColumnSource rightKey = rightTable.getColumnSource("JoinKey"); @@ -128,8 +122,7 @@ private static void testNaturalJoinRandomStatic(int seed, int leftSize, int righ QueryScope.addParam("rightMap", rightMap); updateString = "RightSentinel=rightMap.get(JoinKey)"; } else if (dataType == byte.class) { - final TByteIntMap rightMap = - new TByteIntHashMap(rightTable.intSize(), 0.5f, (byte) -1, NULL_INT); + final TByteIntMap rightMap = new TByteIntHashMap(rightTable.intSize(), 0.5f, (byte) -1, NULL_INT); // noinspection unchecked final ColumnSource rightKey = rightTable.getColumnSource("JoinKey"); @@ -142,8 +135,7 @@ private static void testNaturalJoinRandomStatic(int seed, int leftSize, int righ QueryScope.addParam("rightMap", rightMap); updateString = "RightSentinel=rightMap.get(JoinKey)"; } else if (dataType == char.class) { - final TCharIntMap rightMap = - new TCharIntHashMap(rightTable.intSize(), 0.5f, (char) -1, NULL_INT); + final TCharIntMap rightMap = new TCharIntHashMap(rightTable.intSize(), 0.5f, (char) -1, NULL_INT); // noinspection unchecked final ColumnSource rightKey = rightTable.getColumnSource("JoinKey"); @@ -183,13 +175,13 @@ private static void testNaturalJoinRandomStatic(int seed, int leftSize, int righ QueryScope.addParam("rightMap", rightMap); updateString = "RightSentinel=(int)(rightMap.getOrDefault(JoinKey, null))"; - leftJoinTable = leftTable.update("JoinLong=JoinKey.get(0)", "JoinInt=JoinKey.get(1)") - .dropColumns("JoinKey"); - rightJoinTable = rightTable.update("JoinLong=JoinKey.get(0)", "JoinInt=JoinKey.get(1)") - .dropColumns("JoinKey"); + leftJoinTable = + leftTable.update("JoinLong=JoinKey.get(0)", "JoinInt=JoinKey.get(1)").dropColumns("JoinKey"); + rightJoinTable = + rightTable.update("JoinLong=JoinKey.get(0)", "JoinInt=JoinKey.get(1)").dropColumns("JoinKey"); matchKeys = "JoinLong,JoinInt"; - updateFixup = x -> x.update("JoinLong=JoinKey.get(0)", "JoinInt=JoinKey.get(1)") - .view("LeftSentinel", "JoinLong", "JoinInt", "RightSentinel"); + updateFixup = x -> x.update("JoinLong=JoinKey.get(0)", "JoinInt=JoinKey.get(1)").view("LeftSentinel", + "JoinLong", "JoinInt", "RightSentinel"); } else { throw new UnsupportedOperationException(); } @@ -209,10 +201,9 @@ private static void testNaturalJoinRandomStatic(int seed, int leftSize, int righ if (control == null) { joined = leftJoinTable.naturalJoin(rightJoinTable, matchKeys, "RightSentinel"); } else { - joined = NaturalJoinHelper.naturalJoin((QueryTable) leftJoinTable, - (QueryTable) rightJoinTable, - MatchPairFactory.getExpressions(StringUtils.splitToCollection(matchKeys)), - MatchPairFactory.getExpressions("RightSentinel"), false, control); + joined = NaturalJoinHelper.naturalJoin((QueryTable) leftJoinTable, (QueryTable) rightJoinTable, + MatchPairFactory.getExpressions(StringUtils.splitToCollection(matchKeys)), + MatchPairFactory.getExpressions("RightSentinel"), false, control); } final Table updated = updateFixup.apply(leftTable.update(updateString)); @@ -236,11 +227,9 @@ public void testNaturalJoinRandomStatic() { for (int seed = 0; seed < 2; ++seed) { for (Class dataType : Arrays.asList(String.class, int.class, SmartKey.class)) { for (boolean grouped : Arrays.asList(Boolean.TRUE, Boolean.FALSE)) { - System.out.println( - "Seed = " + seed + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", type=" + dataType + ", grouped=" + grouped); - testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, - grouped, false, null); + System.out.println("Seed = " + seed + ", leftSize=" + leftSize + ", rightSize=" + rightSize + + ", type=" + dataType + ", grouped=" + grouped); + testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, grouped, false, null); } } } @@ -254,11 +243,9 @@ public void testNaturalJoinRandomSmallTypes() { for (int seed = 0; seed < 2; ++seed) { for (Class dataType : Arrays.asList(byte.class, char.class, short.class)) { for (boolean grouped : Arrays.asList(Boolean.TRUE, Boolean.FALSE)) { - System.out - .println("Seed = " + seed + ", leftSize=" + leftSize + ", rightSize=" - + rightSize + ", type=" + dataType + ", grouped=" + grouped); - testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, grouped, - false, null); + System.out.println("Seed = " + seed + ", leftSize=" + leftSize + ", rightSize=" + rightSize + + ", type=" + dataType + ", grouped=" + grouped); + testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, grouped, false, null); } } } @@ -271,12 +258,12 @@ public void testNaturalJoinRandomStaticOverflow() { for (int rightSize = 10_000; rightSize <= 100_000; rightSize *= 10) { for (int seed = 0; seed < 2; ++seed) { for (Class dataType : Arrays.asList(String.class, int.class, SmartKey.class)) { - System.out.println("Seed = " + seed + ", leftSize=" + leftSize - + ", rightSize=" + rightSize + ", type=" + dataType); - testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, false, - false, QueryTableJoinTest.SMALL_LEFT_CONTROL); - testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, false, - false, QueryTableJoinTest.SMALL_RIGHT_CONTROL); + System.out.println("Seed = " + seed + ", leftSize=" + leftSize + ", rightSize=" + rightSize + + ", type=" + dataType); + testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, false, false, + QueryTableJoinTest.SMALL_LEFT_CONTROL); + testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, false, false, + QueryTableJoinTest.SMALL_RIGHT_CONTROL); } } } @@ -290,21 +277,17 @@ public void testNaturalJoinRandomStaticRedirectionBuild() { for (int seed = 0; seed < 2; ++seed) { for (Class dataType : Collections.singletonList(int.class)) { for (boolean grouped : Arrays.asList(Boolean.TRUE, Boolean.FALSE)) { - for (JoinControl.RedirectionType redirectionType : JoinControl.RedirectionType - .values()) { - System.out.println( - "Seed = " + seed + ", leftSize=" + leftSize + ", rightSize=" + for (JoinControl.RedirectionType redirectionType : JoinControl.RedirectionType.values()) { + System.out.println("Seed = " + seed + ", leftSize=" + leftSize + ", rightSize=" + rightSize + ", type=" + dataType + ", grouped=" + grouped + ", redirectionType=" + redirectionType); - testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, - grouped, - redirectionType == JoinControl.RedirectionType.Contiguous, - new JoinControl() { - @Override - RedirectionType getRedirectionType(Table leftTable) { - return redirectionType; - } - }); + testNaturalJoinRandomStatic(seed, leftSize, rightSize, dataType, grouped, + redirectionType == JoinControl.RedirectionType.Contiguous, new JoinControl() { + @Override + RedirectionType getRedirectionType(Table leftTable) { + return redirectionType; + } + }); } } } diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableTest.java index 356a98addcb..6b6d1abd031 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableTest.java @@ -63,12 +63,11 @@ /** * Test of QueryTable functionality. * - * This test used to be a catch all, but at over 7,000 lines became unwieldy. It is still somewhat - * of a catch-all, but some specific classes of tests have been broken out. + * This test used to be a catch all, but at over 7,000 lines became unwieldy. It is still somewhat of a catch-all, but + * some specific classes of tests have been broken out. * - * See also {@link QueryTableAggregationTest}, {@link QueryTableJoinTest}, - * {@link QueryTableSelectUpdateTest}, {@link QueryTableFlattenTest}, and - * {@link QueryTableSortTest}. + * See also {@link QueryTableAggregationTest}, {@link QueryTableJoinTest}, {@link QueryTableSelectUpdateTest}, + * {@link QueryTableFlattenTest}, and {@link QueryTableSortTest}. */ @Category(OutOfBandTest.class) public class QueryTableTest extends QueryTableTestBase { @@ -109,13 +108,12 @@ public void testFormulaValidation() { } /** - * Test that the formula can see the internal variable that DBTimeUtils introduces here. (Prior - * to IDS-6532 this threw an exception). + * Test that the formula can see the internal variable that DBTimeUtils introduces here. (Prior to IDS-6532 this + * threw an exception). */ public void testIds6532() { final Table empty = emptyTable(5); - final SelectColumn sc = - SelectColumnFactory.getExpression("Result = '2020-03-15T09:45:00.000000000 UTC'"); + final SelectColumn sc = SelectColumnFactory.getExpression("Result = '2020-03-15T09:45:00.000000000 UTC'"); // First time ok final Table t1 = empty.select(sc); // Second time throws exception @@ -123,36 +121,32 @@ public void testIds6532() { } /** - * Test that the formula behaves correctly when there are are two initDefs() without an - * intervening request to compile the formula. Prior to the second update to IDS-6532, this - * threw an exception, although typically only by OpenAPI code (because that code uses - * validateSelect() whereas other code tends not to). The issue is that this sequence of - * operations works: + * Test that the formula behaves correctly when there are are two initDefs() without an intervening request to + * compile the formula. Prior to the second update to IDS-6532, this threw an exception, although typically only by + * OpenAPI code (because that code uses validateSelect() whereas other code tends not to). The issue is that this + * sequence of operations works: * * initDef() get compiled formula initDef() get compiled formula * - * But (prior to this change) this sequence of operations would not work: initDef() initDef() - * get compiled formula + * But (prior to this change) this sequence of operations would not work: initDef() initDef() get compiled formula * - * The reason the second one breaks is that (prior to this change), when using certain Time - * literals, the second initDef() changes Formula state in such a way that a subsequent - * compilation would not succeed. The reason this break was not observed in practice is that - * most usages are like the first example: there is a compilation request interposed between - * initDefs() and, thanks to formula caching, the second compilation uses the cached Formula + * The reason the second one breaks is that (prior to this change), when using certain Time literals, the second + * initDef() changes Formula state in such a way that a subsequent compilation would not succeed. The reason this + * break was not observed in practice is that most usages are like the first example: there is a compilation request + * interposed between initDefs() and, thanks to formula caching, the second compilation uses the cached Formula * object from the first compilation and doesn't actually invoke the compiler again. */ public void testIds6532_part2() { final Table empty = emptyTable(5); - final SelectColumn sc = - SelectColumnFactory.getExpression("Result = '2020-03-15T09:45:00.000000000 UTC'"); + final SelectColumn sc = SelectColumnFactory.getExpression("Result = '2020-03-15T09:45:00.000000000 UTC'"); empty.validateSelect(sc); empty.select(sc); } /** - * The formula generation code used to create internal variables called "__chunk" + columnName; - * it also created an internal variable called "__chunkPos". Prior to the change that fixed - * this, a formula compilation error can happen if the customer names their column "Pos". + * The formula generation code used to create internal variables called "__chunk" + columnName; it also created an + * internal variable called "__chunkPos". Prior to the change that fixed this, a formula compilation error can + * happen if the customer names their column "Pos". */ public void testIds6614() { final Table empty = emptyTable(5); @@ -161,13 +155,11 @@ public void testIds6614() { } /** - * Confirm that the system behaves correctly with select validation and the new "flatten" code - * QueryTable#select(). Prior to the change that fixed this, "validateSelect" would cause the - * SelectColumn to get associated with one index, but then select() would want to flatten that - * index, so a later initDef would try to associate it with a different index, and then the - * assertion would fail at AbstractFormulaColumn.java:86. The simple fix is that - * validateSelect() should copy its select columns before using them and then throw away the - * copies. + * Confirm that the system behaves correctly with select validation and the new "flatten" code QueryTable#select(). + * Prior to the change that fixed this, "validateSelect" would cause the SelectColumn to get associated with one + * index, but then select() would want to flatten that index, so a later initDef would try to associate it with a + * different index, and then the assertion would fail at AbstractFormulaColumn.java:86. The simple fix is that + * validateSelect() should copy its select columns before using them and then throw away the copies. */ public void testIds6760() { final Table t = emptyTable(10).select("II = ii").where("II > 5"); @@ -208,10 +200,10 @@ public void testViewIncremental() { final ColumnInfo[] columnInfo; final int size = 50; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("a", "b", "c", "d", "e"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("a", "b", "c", "d", "e"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1))); final Table sortedTable = queryTable.sort("intCol"); @@ -233,8 +225,7 @@ public Table e() { }, new EvalNugget() { public Table e() { - return queryTable.updateView("newCol=intCol / 2") - .updateView("newCol2=newCol * 4"); + return queryTable.updateView("newCol=intCol / 2").updateView("newCol2=newCol * 4"); } }, new EvalNugget() { @@ -249,8 +240,7 @@ public Table e() { }, new EvalNugget() { public Table e() { - return queryTable.view("newCol=intCol / 2") - .updateView("newCol2=newCol * 4"); + return queryTable.view("newCol=intCol / 2").updateView("newCol2=newCol * 4"); } }, new EvalNugget() { @@ -270,8 +260,7 @@ public Table e() { }, new EvalNugget() { public Table e() { - return sortedTable.updateView("newCol=intCol / 2") - .updateView("newCol2=newCol * 4"); + return sortedTable.updateView("newCol=intCol / 2").updateView("newCol2=newCol * 4"); } }, new EvalNugget() { @@ -286,81 +275,62 @@ public Table e() { }, new EvalNugget() { public Table e() { - return sortedTable.view("newCol=intCol / 2") - .updateView("newCol2=newCol * 4"); + return sortedTable.view("newCol=intCol / 2").updateView("newCol2=newCol * 4"); } }, - EvalNugget.from( - () -> queryTable.updateView("newCol=intCol / 2", "newCol2=newCol_[i-1] * 4")) - .hasUnstableColumns("newCol2"), - EvalNugget - .from(() -> queryTable.updateView("newCol=intCol / 2", - "newCol2=newCol_[i-1] * newCol")) - .hasUnstableColumns("newCol2"), + EvalNugget.from(() -> queryTable.updateView("newCol=intCol / 2", "newCol2=newCol_[i-1] * 4")) + .hasUnstableColumns("newCol2"), + EvalNugget.from(() -> queryTable.updateView("newCol=intCol / 2", "newCol2=newCol_[i-1] * newCol")) + .hasUnstableColumns("newCol2"), EvalNugget - .from(() -> queryTable.updateView("repeatedCol=doubleCol - 0.5", - "newCol=intCol / 2", "repeatedCol=newCol_[i-1] * repeatedCol")) - .hasUnstableColumns("repeatedCol"), - EvalNugget.from( - () -> queryTable.updateView("newCol2=intCol / 2", "newCol=newCol2_[i-1] + 7")) - .hasUnstableColumns("newCol"), + .from(() -> queryTable.updateView("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", + "repeatedCol=newCol_[i-1] * repeatedCol")) + .hasUnstableColumns("repeatedCol"), + EvalNugget.from(() -> queryTable.updateView("newCol2=intCol / 2", "newCol=newCol2_[i-1] + 7")) + .hasUnstableColumns("newCol"), EvalNugget.from(() -> queryTable.updateView("newCol=intCol_[i-1]")) - .hasUnstableColumns("newCol"), - EvalNugget.from( - () -> sortedTable.updateView("newCol=intCol / 2", "newCol2=newCol_[i-1] * 4")) - .hasUnstableColumns("newCol2"), - EvalNugget - .from(() -> sortedTable.updateView("newCol=intCol / 2", - "newCol2=newCol_[i-1] * newCol")) - .hasUnstableColumns("newCol2"), + .hasUnstableColumns("newCol"), + EvalNugget.from(() -> sortedTable.updateView("newCol=intCol / 2", "newCol2=newCol_[i-1] * 4")) + .hasUnstableColumns("newCol2"), + EvalNugget.from(() -> sortedTable.updateView("newCol=intCol / 2", "newCol2=newCol_[i-1] * newCol")) + .hasUnstableColumns("newCol2"), EvalNugget - .from(() -> sortedTable.updateView("repeatedCol=doubleCol - 0.5", - "newCol=intCol / 2", "repeatedCol=newCol_[i-1] * repeatedCol")) - .hasUnstableColumns("repeatedCol"), - EvalNugget.from( - () -> sortedTable.updateView("newCol2=intCol / 2", "newCol=newCol2_[i-1] + 7")) - .hasUnstableColumns("newCol"), + .from(() -> sortedTable.updateView("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", + "repeatedCol=newCol_[i-1] * repeatedCol")) + .hasUnstableColumns("repeatedCol"), + EvalNugget.from(() -> sortedTable.updateView("newCol2=intCol / 2", "newCol=newCol2_[i-1] + 7")) + .hasUnstableColumns("newCol"), EvalNugget.from(() -> sortedTable.updateView("newCol=intCol_[i-1]")) - .hasUnstableColumns("newCol"), + .hasUnstableColumns("newCol"), EvalNugget.from(() -> queryTable.view("newCol=intCol_[i-1]")) - .hasUnstableColumns("newCol"), + .hasUnstableColumns("newCol"), + EvalNugget.from(() -> queryTable.view("newCol=intCol / 2", "newCol2=newCol_[i-1] * 4")) + .hasUnstableColumns("newCol2"), + EvalNugget.from(() -> queryTable.view("newCol=intCol / 2", "newCol2=newCol_[i-1] * newCol")) + .hasUnstableColumns("newCol2"), EvalNugget - .from(() -> queryTable.view("newCol=intCol / 2", "newCol2=newCol_[i-1] * 4")) - .hasUnstableColumns("newCol2"), - EvalNugget - .from( - () -> queryTable.view("newCol=intCol / 2", "newCol2=newCol_[i-1] * newCol")) - .hasUnstableColumns("newCol2"), - EvalNugget - .from(() -> queryTable.view("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", - "repeatedCol=newCol_[i-1] * repeatedCol")) - .hasUnstableColumns("repeatedCol"), - EvalNugget - .from(() -> queryTable.view("newCol2=intCol / 2", "newCol=newCol2_[i-1] + 7")) - .hasUnstableColumns("newCol"), + .from(() -> queryTable.view("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", + "repeatedCol=newCol_[i-1] * repeatedCol")) + .hasUnstableColumns("repeatedCol"), + EvalNugget.from(() -> queryTable.view("newCol2=intCol / 2", "newCol=newCol2_[i-1] + 7")) + .hasUnstableColumns("newCol"), EvalNugget.from(() -> sortedTable.view("newCol=intCol_[i-1]")) - .hasUnstableColumns("newCol"), + .hasUnstableColumns("newCol"), + EvalNugget.from(() -> sortedTable.view("newCol=intCol / 2", "newCol2=newCol_[i-1] * 4")) + .hasUnstableColumns("newCol2"), + EvalNugget.from(() -> sortedTable.view("newCol=intCol / 2", "newCol2=newCol_[i-1] * newCol")) + .hasUnstableColumns("newCol2"), EvalNugget - .from(() -> sortedTable.view("newCol=intCol / 2", "newCol2=newCol_[i-1] * 4")) - .hasUnstableColumns("newCol2"), + .from(() -> sortedTable.view("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", + "repeatedCol=newCol_[i-1] * repeatedCol")) + .hasUnstableColumns("repeatedCol"), + EvalNugget.from(() -> sortedTable.view("newCol2=intCol / 2", "newCol=newCol2_[i-1] + 7")) + .hasUnstableColumns("newCol"), EvalNugget.from( - () -> sortedTable.view("newCol=intCol / 2", "newCol2=newCol_[i-1] * newCol")) - .hasUnstableColumns("newCol2"), - EvalNugget - .from(() -> sortedTable.view("repeatedCol=doubleCol - 0.5", "newCol=intCol / 2", - "repeatedCol=newCol_[i-1] * repeatedCol")) - .hasUnstableColumns("repeatedCol"), - EvalNugget - .from(() -> sortedTable.view("newCol2=intCol / 2", "newCol=newCol2_[i-1] + 7")) - .hasUnstableColumns("newCol"), - EvalNugget - .from(() -> queryTable.updateView("newCol2=intCol / 2", "newCol=newCol2", - "newCol=newCol_[i-1] + 7")) - .hasUnstableColumns("newCol"), - EvalNugget - .from( - () -> queryTable.updateView("newCol=intCol / 2", "newCol=newCol_[i-1] + 7")) - .hasUnstableColumns("newCol"), + () -> queryTable.updateView("newCol2=intCol / 2", "newCol=newCol2", "newCol=newCol_[i-1] + 7")) + .hasUnstableColumns("newCol"), + EvalNugget.from(() -> queryTable.updateView("newCol=intCol / 2", "newCol=newCol_[i-1] + 7")) + .hasUnstableColumns("newCol"), }; for (int i = 0; i < 10; i++) { @@ -375,27 +345,18 @@ public void testView() { QueryScope.addParam("MEF", 1.0); QueryScope.addParam("LnRatioStd", 1.0); QueryScope.addParam("VegaPer", 1.0); - TableTools.emptyTable(3) - .updateView("MinEdge = (IsIndex ? indexMinEdge : MEF * LnRatioStd) * VegaPer"); - - final QueryTable table0 = - (QueryTable) TableTools.emptyTable(3).view("x = i*2", "y = \"\" + x"); - assertEquals(Arrays.asList(0, 2, 4), - Arrays.asList(table0.getColumn("x").get(0, table0.size()))); - assertEquals(Arrays.asList("0", "2", "4"), - Arrays.asList(table0.getColumn("y").get(0, table0.size()))); - - final QueryTable table = - (QueryTable) table0.updateView("z = x + 1", "x = z + 1", "t = x - 3"); - assertEquals(Arrays.asList(1, 3, 5), - Arrays.asList(table.getColumn("z").get(0, table.size()))); - assertEquals(Arrays.asList(2, 4, 6), - Arrays.asList(table.getColumn("x").get(0, table.size()))); - assertEquals(Arrays.asList(-1, 1, 3), - Arrays.asList(table.getColumn("t").get(0, table.size()))); - - final QueryTable table1 = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); + TableTools.emptyTable(3).updateView("MinEdge = (IsIndex ? indexMinEdge : MEF * LnRatioStd) * VegaPer"); + + final QueryTable table0 = (QueryTable) TableTools.emptyTable(3).view("x = i*2", "y = \"\" + x"); + assertEquals(Arrays.asList(0, 2, 4), Arrays.asList(table0.getColumn("x").get(0, table0.size()))); + assertEquals(Arrays.asList("0", "2", "4"), Arrays.asList(table0.getColumn("y").get(0, table0.size()))); + + final QueryTable table = (QueryTable) table0.updateView("z = x + 1", "x = z + 1", "t = x - 3"); + assertEquals(Arrays.asList(1, 3, 5), Arrays.asList(table.getColumn("z").get(0, table.size()))); + assertEquals(Arrays.asList(2, 4, 6), Arrays.asList(table.getColumn("x").get(0, table.size()))); + assertEquals(Arrays.asList(-1, 1, 3), Arrays.asList(table.getColumn("t").get(0, table.size()))); + + final QueryTable table1 = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); final QueryTable table2 = (QueryTable) table1.updateView("z = x", "x = z + 1", "t = x - 3"); final Listener table2Listener = new ListenerWithGlobals(table2); table2.listenForUpdates(table2Listener); @@ -407,7 +368,7 @@ public void testView() { assertEquals(5, table1.size()); assertEquals(5, table2.size()); assertEquals("", io.deephaven.db.tables.utils.TableTools.diff(table2, - table1.update("z = x", "x = z + 1", "t = x - 3"), 10)); + table1.update("z = x", "x = z + 1", "t = x - 3"), 10)); assertEquals(added, i(7, 9)); assertEquals(modified, i()); assertEquals(removed, i()); @@ -419,7 +380,7 @@ public void testView() { assertEquals(5, table1.size()); assertEquals(5, table2.size()); assertEquals("", io.deephaven.db.tables.utils.TableTools.diff(table2, - table1.update("z = x", "x = z + 1", "t = x - 3"), 10)); + table1.update("z = x", "x = z + 1", "t = x - 3"), 10)); assertEquals(added, i()); assertEquals(modified, i(7, 9)); assertEquals(removed, i()); @@ -432,7 +393,7 @@ public void testView() { assertEquals(2, table1.size()); assertEquals(2, table2.size()); assertEquals("", io.deephaven.db.tables.utils.TableTools.diff(table2, - table1.update("z = x", "x = z + 1", "t = x - 3"), 10)); + table1.update("z = x", "x = z + 1", "t = x - 3"), 10)); assertEquals(added, i()); assertEquals(removed, i(2, 6, 7)); assertEquals(modified, i()); @@ -446,13 +407,12 @@ public void testView() { assertEquals(3, table1.size()); assertEquals(3, table2.size()); assertEquals("", io.deephaven.db.tables.utils.TableTools.diff(table2, - table1.update("z = x", "x = z + 1", "t = x - 3"), 10)); + table1.update("z = x", "x = z + 1", "t = x - 3"), 10)); assertEquals(added, i(2, 6)); assertEquals(removed, i(9)); assertEquals(modified, i(4)); - final QueryTable table3 = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); + final QueryTable table3 = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); final QueryTable table4 = (QueryTable) table3.view("z = x", "x = z + 1", "t = x - 3"); final Listener table4Listener = new ListenerWithGlobals(table4); table4.listenForUpdates(table4Listener); @@ -465,7 +425,7 @@ public void testView() { assertEquals(5, table3.size()); assertEquals(5, table4.size()); assertEquals("", io.deephaven.db.tables.utils.TableTools.diff(table4, - table3.select("z = x", "x = z + 1", "t = x - 3"), 10)); + table3.select("z = x", "x = z + 1", "t = x - 3"), 10)); assertEquals(added, i(7, 9)); assertEquals(modified, i()); assertEquals(removed, i()); @@ -479,7 +439,7 @@ public void testView() { assertEquals(5, table3.size()); assertEquals(5, table4.size()); assertEquals("", io.deephaven.db.tables.utils.TableTools.diff(table4, - table3.select("z = x", "x = z + 1", "t = x - 3"), 10)); + table3.select("z = x", "x = z + 1", "t = x - 3"), 10)); assertEquals(added, i()); assertEquals(modified, i(7, 9)); assertEquals(removed, i()); @@ -492,7 +452,7 @@ public void testView() { assertEquals(2, table4.size()); assertEquals(2, table3.size()); assertEquals("", io.deephaven.db.tables.utils.TableTools.diff(table4, - table3.select("z = x", "x = z + 1", "t = x - 3"), 10)); + table3.select("z = x", "x = z + 1", "t = x - 3"), 10)); assertEquals(added, i()); assertEquals(removed, i(2, 6, 7)); assertEquals(modified, i()); @@ -507,7 +467,7 @@ public void testView() { assertEquals(3, table1.size()); assertEquals(3, table3.size()); assertEquals("", io.deephaven.db.tables.utils.TableTools.diff(table4, - table3.select("z = x", "x = z + 1", "t = x - 3"), 10)); + table3.select("z = x", "x = z + 1", "t = x - 3"), 10)); assertEquals(added, i(2, 6)); assertEquals(removed, i(9)); assertEquals(modified, i(4)); @@ -522,35 +482,31 @@ public void testView1() { assertNull(t.updateView("nullD = NULL_DOUBLE + 0").getColumn("nullD").get(0)); assertEquals( - Arrays.asList(emptyTable(4).updateView("b1 = (i%2 = 0)?null:true") - .updateView("x = b1 == null?1:2").select("x").getColumn("x").get(0, 4)), - Arrays.asList(1, 2, 1, 2)); + Arrays.asList(emptyTable(4).updateView("b1 = (i%2 = 0)?null:true").updateView("x = b1 == null?1:2") + .select("x").getColumn("x").get(0, 4)), + Arrays.asList(1, 2, 1, 2)); Table table = newTable(3, Arrays.asList("String", "Int"), - Arrays.asList(TableTools.objColSource("c", "e", "g"), TableTools.colSource(2, 4, 6))); + Arrays.asList(TableTools.objColSource("c", "e", "g"), TableTools.colSource(2, 4, 6))); assertEquals(2, table.view(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumns().length); assertEquals(table.getColumns()[0].getName(), - table.view(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumns()[0].getName()); + table.view(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumns()[0].getName()); assertEquals(table.getColumns()[1].getName(), - table.view(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumns()[1].getName()); + table.view(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumns()[1].getName()); assertEquals(2, table.view("String", "Int").getColumns().length); - assertEquals(table.getColumns()[0].getName(), - table.view("String", "Int").getColumns()[0].getName()); - assertEquals(table.getColumns()[1].getName(), - table.view("String", "Int").getColumns()[1].getName()); + assertEquals(table.getColumns()[0].getName(), table.view("String", "Int").getColumns()[0].getName()); + assertEquals(table.getColumns()[1].getName(), table.view("String", "Int").getColumns()[1].getName()); assertEquals(2, table.view("Int", "String").getColumns().length); - assertEquals(table.getColumns()[0].getName(), - table.view("Int", "String").getColumns()[1].getName()); - assertEquals(table.getColumns()[1].getName(), - table.view("Int", "String").getColumns()[0].getName()); + assertEquals(table.getColumns()[0].getName(), table.view("Int", "String").getColumns()[1].getName()); + assertEquals(table.getColumns()[1].getName(), table.view("Int", "String").getColumns()[0].getName()); assertEquals(2, table.view("Int1=Int", "String1=String").getColumns().length); assertSame(table.getColumns()[0].getClass(), - table.view("Int1=Int", "String1=String").getColumns()[1].getClass()); + table.view("Int1=Int", "String1=String").getColumns()[1].getClass()); assertSame(table.getColumns()[1].getClass(), - table.view("Int1=Int", "String1=String").getColumns()[0].getClass()); + table.view("Int1=Int", "String1=String").getColumns()[0].getClass()); assertEquals("Int1", table.view("Int1=Int", "String1=String").getColumns()[0].getName()); assertEquals("String1", table.view("Int1=Int", "String1=String").getColumns()[1].getName()); @@ -569,8 +525,7 @@ public void testReinterpret() { final Table source = emptyTable(5).select("dt = nanosToTime(ii)", "n = ii"); final Table result = source.dateTimeColumnAsNanos("dt"); assertEquals((long[]) result.getColumn(0).getDirect(), LongStream.range(0, 5).toArray()); - final Table reflexive = - result.view(new ReinterpretedColumn<>("dt", long.class, "dt", DBDateTime.class)); + final Table reflexive = result.view(new ReinterpretedColumn<>("dt", long.class, "dt", DBDateTime.class)); assertEquals("", TableTools.diff(reflexive, source.dropColumns("n"), source.size())); final Table sortedSource = source.sortDescending("dt").dropColumns("dt"); final Table sortedResult = result.sortDescending("dt").dropColumns("dt"); @@ -579,16 +534,14 @@ public void testReinterpret() { public void testDropColumns() { final List colNames = Arrays.asList("String", "Int", "Double"); - final List colSources = Arrays.asList(TableTools.objColSource("c", "e", "g"), - colSource(2, 4, 6), colSource(1.0, 2.0, 3.0)); + final List colSources = + Arrays.asList(TableTools.objColSource("c", "e", "g"), colSource(2, 4, 6), colSource(1.0, 2.0, 3.0)); final Table table = newTable(3, colNames, colSources); assertEquals(3, table.dropColumns().getColumnSources().size()); Collection columnSourcesAfterDrop = table.getColumnSources(); - ColumnSource[] columnsAfterDrop = - columnSourcesAfterDrop.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + ColumnSource[] columnsAfterDrop = columnSourcesAfterDrop.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); Collection columnSources = table.dropColumns().getColumnSources(); - ColumnSource[] columns = - columnSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + ColumnSource[] columns = columnSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); assertSame(columns[0], columnsAfterDrop[0]); assertSame(columns[1], columnsAfterDrop[1]); assertSame(columns[2], columnsAfterDrop[2]); @@ -598,16 +551,13 @@ public void testDropColumns() { assertEquals(2, table.dropColumns("Int").getColumnSources().size()); columnSourcesAfterDrop = table.dropColumns("Int").getColumnSources(); - columnsAfterDrop = - columnSourcesAfterDrop.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + columnsAfterDrop = columnSourcesAfterDrop.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); columnSources = table.getColumnSources(); columns = columnSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); assertSame(columns[0], columnsAfterDrop[0]); assertSame(columns[2], columnsAfterDrop[1]); - assertSame(table.getColumnSource("String"), - table.dropColumns("Int").getColumnSource("String")); - assertSame(table.getColumnSource("Double"), - table.dropColumns("Int").getColumnSource("Double")); + assertSame(table.getColumnSource("String"), table.dropColumns("Int").getColumnSource("String")); + assertSame(table.getColumnSource("Double"), table.dropColumns("Int").getColumnSource("Double")); try { table.dropColumns("Int").getColumnSource("Int"); fail("Expected exception"); @@ -615,14 +565,12 @@ public void testDropColumns() { } columnSourcesAfterDrop = table.dropColumns("String", "Int").getColumnSources(); - columnsAfterDrop = - columnSourcesAfterDrop.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + columnsAfterDrop = columnSourcesAfterDrop.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); columnSources = table.getColumnSources(); columns = columnSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); assertEquals(1, table.dropColumns("String", "Int").getColumnSources().size()); assertSame(columns[2], columnsAfterDrop[0]); - assertSame(table.getColumnSource("Double"), - table.dropColumns("String", "Int").getColumnSource("Double")); + assertSame(table.getColumnSource("Double"), table.dropColumns("String", "Int").getColumnSource("Double")); try { table.dropColumns("String", "Int").getColumnSource("String"); fail("Expected exception"); @@ -636,13 +584,12 @@ public void testDropColumns() { assertEquals(1, table.dropColumns("String").dropColumns("Int").getColumns().length); columnSourcesAfterDrop = table.dropColumns("String").dropColumns("Int").getColumnSources(); - columnsAfterDrop = - columnSourcesAfterDrop.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + columnsAfterDrop = columnSourcesAfterDrop.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); columnSources = table.getColumnSources(); columns = columnSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); assertSame(columnsAfterDrop[0], columns[2]); assertSame(table.getColumnSource("Double"), - table.dropColumns("String").dropColumns("Int").getColumnSource("Double")); + table.dropColumns("String").dropColumns("Int").getColumnSource("Double")); try { table.dropColumns("String").dropColumns("Int").getColumnSource("String"); fail("Expected exception"); @@ -657,56 +604,47 @@ public void testDropColumns() { try { table.dropColumns(Collections.singletonList("DoesNotExist")); } catch (RuntimeException e) { - assertEquals( - "Unknown columns: [DoesNotExist], available columns = [String, Int, Double]", - e.getMessage()); + assertEquals("Unknown columns: [DoesNotExist], available columns = [String, Int, Double]", e.getMessage()); } try { table.dropColumns(Arrays.asList("Int", "DoesNotExist")); } catch (RuntimeException e) { - assertEquals( - "Unknown columns: [DoesNotExist], available columns = [String, Int, Double]", - e.getMessage()); + assertEquals("Unknown columns: [DoesNotExist], available columns = [String, Int, Double]", e.getMessage()); } } public void testRenameColumns() { final Table table = newTable(3, - Arrays.asList("String", "Int", "Double"), - Arrays.asList(TableTools.objColSource("c", "e", "g"), colSource(2, 4, 6), - colSource(1.0, 2.0, 3.0))); - assertEquals(3, - table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSources().size()); + Arrays.asList("String", "Int", "Double"), + Arrays.asList(TableTools.objColSource("c", "e", "g"), colSource(2, 4, 6), colSource(1.0, 2.0, 3.0))); + assertEquals(3, table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSources().size()); final Collection columnSources = table.getColumnSources(); - final ColumnSource[] columns = - columnSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + final ColumnSource[] columns = columnSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); final Collection renamedColumnSources = - table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSources(); + table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSources(); final ColumnSource[] renamedColumns = - renamedColumnSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + renamedColumnSources.toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); assertSame(columns[0], renamedColumns[0]); assertSame(columns[1], renamedColumns[1]); assertSame(columns[2], renamedColumns[2]); assertSame(table.getColumnSource("String"), - table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSource("String")); + table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSource("String")); assertSame(table.getColumnSource("Int"), - table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSource("Int")); + table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSource("Int")); assertSame(table.getColumnSource("Double"), - table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSource("Double")); + table.renameColumns(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getColumnSource("Double")); assertEquals(3, table.renameColumns("NewInt=Int").getColumns().length); assertEquals(table.getColumnSources().toArray()[0], - table.renameColumns("NewInt=Int").getColumnSources().toArray()[0]); + table.renameColumns("NewInt=Int").getColumnSources().toArray()[0]); assertTrue(Arrays.equals((int[]) table.getColumns()[1].getDirect(), - (int[]) table.renameColumns("NewInt=Int").getColumns()[1].getDirect())); + (int[]) table.renameColumns("NewInt=Int").getColumns()[1].getDirect())); assertEquals(table.getColumnSources().toArray()[2], - table.renameColumns("NewInt=Int").getColumnSources().toArray()[2]); - assertEquals(table.getColumnSource("String"), - table.renameColumns("NewInt=Int").getColumnSource("String")); + table.renameColumns("NewInt=Int").getColumnSources().toArray()[2]); + assertEquals(table.getColumnSource("String"), table.renameColumns("NewInt=Int").getColumnSource("String")); assertTrue(Arrays.equals((int[]) table.getColumn("Int").getDirect(), - (int[]) table.renameColumns("NewInt=Int").getColumn("NewInt").getDirect())); - assertEquals(table.getColumnSource("Double"), - table.renameColumns("NewInt=Int").getColumnSource("Double")); + (int[]) table.renameColumns("NewInt=Int").getColumn("NewInt").getDirect())); + assertEquals(table.getColumnSource("Double"), table.renameColumns("NewInt=Int").getColumnSource("Double")); try { table.renameColumns("NewInt=Int").getColumn("Int"); fail("Expected exception"); @@ -715,18 +653,17 @@ public void testRenameColumns() { assertEquals(3, table.renameColumns("NewInt=Int", "NewString=String").getColumns().length); assertTrue(Arrays.equals((String[]) table.getColumns()[0].getDirect(), - (String[]) table.renameColumns("NewInt=Int", "NewString=String").getColumns()[0] - .getDirect())); + (String[]) table.renameColumns("NewInt=Int", "NewString=String").getColumns()[0].getDirect())); assertTrue(Arrays.equals((int[]) table.getColumns()[1].getDirect(), - (int[]) table.renameColumns("NewInt=Int").getColumns()[1].getDirect())); + (int[]) table.renameColumns("NewInt=Int").getColumns()[1].getDirect())); assertEquals(table.getColumnSources().toArray()[2], - table.renameColumns("NewInt=Int", "NewString=String").getColumnSources().toArray()[2]); - assertTrue(Arrays.equals((String[]) table.getColumn("String").getDirect(), (String[]) table - .renameColumns("NewInt=Int", "NewString=String").getColumn("NewString").getDirect())); + table.renameColumns("NewInt=Int", "NewString=String").getColumnSources().toArray()[2]); + assertTrue(Arrays.equals((String[]) table.getColumn("String").getDirect(), + (String[]) table.renameColumns("NewInt=Int", "NewString=String").getColumn("NewString").getDirect())); assertTrue(Arrays.equals((int[]) table.getColumn("Int").getDirect(), - (int[]) table.renameColumns("NewInt=Int").getColumn("NewInt").getDirect())); + (int[]) table.renameColumns("NewInt=Int").getColumn("NewInt").getDirect())); assertEquals(table.getColumnSource("Double"), - table.renameColumns("NewInt=Int", "NewString=String").getColumnSource("Double")); + table.renameColumns("NewInt=Int", "NewString=String").getColumnSource("Double")); try { table.renameColumns("NewInt=Int", "NewString=String").getColumn("Int"); fail("Expected exception"); @@ -738,21 +675,19 @@ public void testRenameColumns() { } catch (RuntimeException ignored) { } - assertEquals(3, table.renameColumns("NewInt=Int").renameColumns("NewString=String") - .getColumns().length); + assertEquals(3, table.renameColumns("NewInt=Int").renameColumns("NewString=String").getColumns().length); assertTrue(Arrays.equals((String[]) table.getColumns()[0].getDirect(), - (String[]) table.renameColumns("NewInt=Int", "NewString=String").getColumns()[0] - .getDirect())); + (String[]) table.renameColumns("NewInt=Int", "NewString=String").getColumns()[0].getDirect())); assertTrue(Arrays.equals((int[]) table.getColumns()[1].getDirect(), - (int[]) table.renameColumns("NewInt=Int").getColumns()[1].getDirect())); - assertEquals(table.getColumnSources().toArray()[2], table.renameColumns("NewInt=Int") - .renameColumns("NewString=String").getColumnSources().toArray()[2]); - assertTrue(Arrays.equals((String[]) table.getColumn("String").getDirect(), (String[]) table - .renameColumns("NewInt=Int", "NewString=String").getColumn("NewString").getDirect())); + (int[]) table.renameColumns("NewInt=Int").getColumns()[1].getDirect())); + assertEquals(table.getColumnSources().toArray()[2], + table.renameColumns("NewInt=Int").renameColumns("NewString=String").getColumnSources().toArray()[2]); + assertTrue(Arrays.equals((String[]) table.getColumn("String").getDirect(), + (String[]) table.renameColumns("NewInt=Int", "NewString=String").getColumn("NewString").getDirect())); assertTrue(Arrays.equals((int[]) table.getColumn("Int").getDirect(), - (int[]) table.renameColumns("NewInt=Int").getColumn("NewInt").getDirect())); - assertEquals(table.getColumnSource("Double"), table.renameColumns("NewInt=Int") - .renameColumns("NewString=String").getColumnSource("Double")); + (int[]) table.renameColumns("NewInt=Int").getColumn("NewInt").getDirect())); + assertEquals(table.getColumnSource("Double"), + table.renameColumns("NewInt=Int").renameColumns("NewString=String").getColumnSource("Double")); try { table.renameColumns("NewInt=Int").renameColumns("NewString=String").getColumn("Int"); fail("Expected exception"); @@ -771,24 +706,21 @@ public void testRenameColumnsIncremental() { final int size = 100; final TstUtils.ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); final EvalNugget en[] = new EvalNugget[] { EvalNugget.from(() -> queryTable.renameColumns(CollectionUtil.listFromArray())), EvalNugget.from(() -> queryTable.renameColumns("Symbol=Sym")), EvalNugget.from(() -> queryTable.renameColumns("Symbol=Sym", "Symbols=Sym")), - EvalNugget.from(() -> queryTable.renameColumns("Sym2=Sym", "intCol2=intCol", - "doubleCol2=doubleCol")), + EvalNugget.from(() -> queryTable.renameColumns("Sym2=Sym", "intCol2=intCol", "doubleCol2=doubleCol")), }; // Verify our assumption that columns can be renamed at most once. - Assert.assertTrue( - queryTable.renameColumns("Symbol=Sym", "Symbols=Sym").hasColumns("Symbols")); - Assert.assertFalse( - queryTable.renameColumns("Symbol=Sym", "Symbols=Sym").hasColumns("Symbol")); + Assert.assertTrue(queryTable.renameColumns("Symbol=Sym", "Symbols=Sym").hasColumns("Symbols")); + Assert.assertFalse(queryTable.renameColumns("Symbol=Sym", "Symbols=Sym").hasColumns("Symbol")); final int steps = 100; for (int i = 0; i < steps; i++) { @@ -806,24 +738,22 @@ public void testStringContainsFilter() { final int size = 500; final ColumnInfo[] columnInfo; - final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"S1", "S2"}, + final QueryTable table = getTable(size, random, columnInfo = initColumnInfos(new String[] {"S1", "S2"}, new StringGenerator(), new StringGenerator())); final EvalNuggetInterface en[] = new EvalNuggetInterface[] { new TableComparator(table.where(filter.apply("S1.contains(`aab`)")), - table.where(new StringContainsFilter("S1", "aab"))), + table.where(new StringContainsFilter("S1", "aab"))), new TableComparator(table.where(filter.apply("S2.contains(`m`)")), - table.where(new StringContainsFilter("S2", "m"))), + table.where(new StringContainsFilter("S2", "m"))), new TableComparator(table.where(filter.apply("!S2.contains(`ma`)")), - table.where( - new StringContainsFilter(MatchFilter.MatchType.Inverted, "S2", "ma"))), + table.where(new StringContainsFilter(MatchFilter.MatchType.Inverted, "S2", "ma"))), new TableComparator(table.where(filter.apply("S2.toLowerCase().contains(`ma`)")), - table.where(new StringContainsFilter(MatchFilter.CaseSensitivity.IgnoreCase, - MatchFilter.MatchType.Regular, "S2", "mA"))), + table.where(new StringContainsFilter(MatchFilter.CaseSensitivity.IgnoreCase, + MatchFilter.MatchType.Regular, "S2", "mA"))), new TableComparator(table.where(filter.apply("S2.contains(`mA`)")), - table.where(new StringContainsFilter("S2", "mA"))), + table.where(new StringContainsFilter("S2", "mA"))), }; for (int i = 0; i < 500; i++) { @@ -832,17 +762,16 @@ public void testStringContainsFilter() { } public void testDoubleRangeFilterSimple() { - final Table t = TableTools - .newTable(doubleCol("DV", 1.0, 2.0, -3.0, Double.NaN, QueryConstants.NULL_DOUBLE, 6.0, - Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 9.0)) - .update("IV=i+1"); + final Table t = TableTools.newTable(doubleCol("DV", 1.0, 2.0, -3.0, Double.NaN, QueryConstants.NULL_DOUBLE, 6.0, + Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, 9.0)).update("IV=i+1"); Table leq1 = t.where("DV <= 1.0"); Table leq1b = t.where("DV <= 1.0 && true"); assertTableEquals(leq1b, leq1); - assertTableEquals(TableTools.newTable( - doubleCol("DV", 1.0, -3.0, QueryConstants.NULL_DOUBLE, Double.NEGATIVE_INFINITY), - intCol("IV", 1, 3, 5, 8)), leq1); + assertTableEquals( + TableTools.newTable(doubleCol("DV", 1.0, -3.0, QueryConstants.NULL_DOUBLE, Double.NEGATIVE_INFINITY), + intCol("IV", 1, 3, 5, 8)), + leq1); Table geq1 = t.where("DV >= 1.0"); Table geq1b = t.where("DV >= 1.0 && true"); @@ -850,21 +779,19 @@ public void testDoubleRangeFilterSimple() { TableTools.showWithIndex(geq1b); assertTableEquals(geq1b, geq1); - assertTableEquals(TableTools.newTable( - doubleCol("DV", 1.0, 2.0, Double.NaN, 6.0, Double.POSITIVE_INFINITY, 9.0), - intCol("IV", 1, 2, 4, 6, 7, 9)), geq1); + assertTableEquals(TableTools.newTable(doubleCol("DV", 1.0, 2.0, Double.NaN, 6.0, Double.POSITIVE_INFINITY, 9.0), + intCol("IV", 1, 2, 4, 6, 7, 9)), geq1); } public void testLongRangeFilterSimple() { - final Table t = TableTools - .newTable(longCol("LV", 1, 2, -3, Long.MAX_VALUE, QueryConstants.NULL_LONG, 6)) - .update("IV=i+1"); + final Table t = TableTools.newTable(longCol("LV", 1, 2, -3, Long.MAX_VALUE, QueryConstants.NULL_LONG, 6)) + .update("IV=i+1"); Table leq1 = t.where("LV <= 1"); Table leq1b = t.where("LV <= 1 && true"); assertTableEquals(leq1b, leq1); - assertTableEquals(TableTools.newTable(longCol("LV", 1, -3, QueryConstants.NULL_LONG), - intCol("IV", 1, 3, 5)), leq1); + assertTableEquals(TableTools.newTable(longCol("LV", 1, -3, QueryConstants.NULL_LONG), intCol("IV", 1, 3, 5)), + leq1); Table geq1 = t.where("LV >= 1"); Table geq1b = t.where("LV >= 1 && true"); @@ -872,18 +799,14 @@ public void testLongRangeFilterSimple() { TableTools.showWithIndex(geq1b); assertTableEquals(geq1b, geq1); - assertTableEquals( - TableTools.newTable(longCol("LV", 1, 2, Long.MAX_VALUE, 6), intCol("IV", 1, 2, 4, 6)), - geq1); + assertTableEquals(TableTools.newTable(longCol("LV", 1, 2, Long.MAX_VALUE, 6), intCol("IV", 1, 2, 4, 6)), geq1); } public void testComparableRangeFilterSimple() { - final Table t = TableTools - .newTable(longCol("LV", 1, 2, -3, Long.MAX_VALUE, QueryConstants.NULL_LONG, 6)) - .update("IV=i+1", "LV=LV==null ? null : java.math.BigInteger.valueOf(LV)"); + final Table t = TableTools.newTable(longCol("LV", 1, 2, -3, Long.MAX_VALUE, QueryConstants.NULL_LONG, 6)) + .update("IV=i+1", "LV=LV==null ? null : java.math.BigInteger.valueOf(LV)"); Table leq1 = t.where("LV <= 1"); - Table leq1b = - t.where("io.deephaven.db.util.DhObjectComparisons.leq(LV, java.math.BigInteger.ONE)"); + Table leq1b = t.where("io.deephaven.db.util.DhObjectComparisons.leq(LV, java.math.BigInteger.ONE)"); Table leq1c = t.where("LV <= java.math.BigInteger.ONE"); assertTableEquals(leq1b, leq1); @@ -891,8 +814,7 @@ public void testComparableRangeFilterSimple() { assertTableEquals(TableTools.newTable(intCol("IV", 1, 3, 5)), leq1.dropColumns("LV")); Table geq1 = t.where("LV >= 1"); - Table geq1b = - t.where("io.deephaven.db.util.DhObjectComparisons.geq(LV, java.math.BigInteger.ONE)"); + Table geq1b = t.where("io.deephaven.db.util.DhObjectComparisons.geq(LV, java.math.BigInteger.ONE)"); Table geq1c = t.where("LV >= java.math.BigInteger.ONE"); TableTools.showWithIndex(geq1); TableTools.showWithIndex(geq1b); @@ -910,34 +832,34 @@ public void testDoubleRangeFilter() { final ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"D1", "D2", "F1", "F2"}, - new DoubleGenerator(), - new DoubleGenerator(0, 1000, 0.1, 0.1), - new FloatGenerator(), - new FloatGenerator(0, 1000, 0.1, 0.1))); + columnInfo = initColumnInfos(new String[] {"D1", "D2", "F1", "F2"}, + new DoubleGenerator(), + new DoubleGenerator(0, 1000, 0.1, 0.1), + new FloatGenerator(), + new FloatGenerator(0, 1000, 0.1, 0.1))); final EvalNuggetInterface en[] = new EvalNuggetInterface[] { new TableComparator(table.where(filter.apply("D1 > 500 && D1 <= 501")), - table.where(new DoubleRangeFilter("D1", 500, 501, false, true))), + table.where(new DoubleRangeFilter("D1", 500, 501, false, true))), new TableComparator(table.where(filter.apply("D1 > 500.7 && D1 <= 500.8")), - table.where(new DoubleRangeFilter("D1", 500.7, 500.8, false, true))), + table.where(new DoubleRangeFilter("D1", 500.7, 500.8, false, true))), new TableComparator(table.where(filter.apply("D2 >= 250.02 && D2 < 250.03")), - table.where(DoubleRangeFilter.makeRange("D2", "250.02"))), + table.where(DoubleRangeFilter.makeRange("D2", "250.02"))), new TableComparator(table.where(filter.apply("F1 > 500 && F1 <= 501")), - table.where(new FloatRangeFilter("F1", 500, 501, false, true))), + table.where(new FloatRangeFilter("F1", 500, 501, false, true))), new TableComparator(table.where(filter.apply("F1 > 500.7 && F1 <= 500.8")), - table.where(new FloatRangeFilter("F1", 500.7f, 500.8f, false, true))), + table.where(new FloatRangeFilter("F1", 500.7f, 500.8f, false, true))), new TableComparator(table.where(filter.apply("F2 >= 250.02 && F2 < 250.03")), - table.where(FloatRangeFilter.makeRange("F2", "250.02"))), + table.where(FloatRangeFilter.makeRange("F2", "250.02"))), new TableComparator(table.where(filter.apply("F1 <= -250.02 && F1 > -250.03")), - table.where(FloatRangeFilter.makeRange("F1", "-250.02"))), + table.where(FloatRangeFilter.makeRange("F1", "-250.02"))), new TableComparator(table.where(filter.apply("F1 <= -37.0002 && F1 > -37.0003")), - table.where(FloatRangeFilter.makeRange("F1", "-37.0002"))), + table.where(FloatRangeFilter.makeRange("F1", "-37.0002"))), new TableComparator(table.where(filter.apply("D1 <= -250.02 && D1 > -250.03")), - table.where(DoubleRangeFilter.makeRange("D1", "-250.02"))), + table.where(DoubleRangeFilter.makeRange("D1", "-250.02"))), new TableComparator(table.where(filter.apply("D1 <= -37.0002 && D1 > -37.0003")), - table.where(DoubleRangeFilter.makeRange("D1", "-37.0002"))) + table.where(DoubleRangeFilter.makeRange("D1", "-37.0002"))) }; for (int i = 0; i < 500; i++) { @@ -956,51 +878,49 @@ public void testDateTimeRangeFilter() { final ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Timestamp", "Ts2", "Sentinel"}, - new UnsortedDateTimeGenerator(startTime, endTime), - new UnsortedDateTimeLongGenerator(startTime, endTime), - new IntGenerator(0, 1000))); + columnInfo = initColumnInfos(new String[] {"Timestamp", "Ts2", "Sentinel"}, + new UnsortedDateTimeGenerator(startTime, endTime), + new UnsortedDateTimeLongGenerator(startTime, endTime), + new IntGenerator(0, 1000))); final DBDateTime lower = DBTimeUtils.plus(startTime, DBTimeUtils.SECOND); final DBDateTime upper = DBTimeUtils.plus(startTime, DBTimeUtils.SECOND * 2); final EvalNuggetInterface en[] = new EvalNuggetInterface[] { new TableComparator( - table.where(filter.apply("Timestamp >= '" + lower.toString() - + "' && Timestamp <= '" + upper.toString() + "'")), - "Condition", - table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, true)), - "Range"), + table.where(filter.apply( + "Timestamp >= '" + lower.toString() + "' && Timestamp <= '" + upper.toString() + "'")), + "Condition", table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, true)), + "Range"), new TableComparator( - table.where(filter.apply("Timestamp >= '" + lower.toString() - + "' && Timestamp < '" + upper.toString() + "'")), - table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, false))), + table.where(filter.apply( + "Timestamp >= '" + lower.toString() + "' && Timestamp < '" + upper.toString() + "'")), + table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, false))), new TableComparator( - table.where(filter.apply("Timestamp > '" + lower.toString() - + "' && Timestamp <= '" + upper.toString() + "'")), - table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, true))), + table.where(filter.apply( + "Timestamp > '" + lower.toString() + "' && Timestamp <= '" + upper.toString() + "'")), + table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, true))), new TableComparator( - table.where(filter.apply("Timestamp > '" + lower.toString() - + "' && Timestamp < '" + upper.toString() + "'")), - table.where(new DateTimeRangeFilter("Timestamp", lower, upper, false, false))), + table.where(filter.apply( + "Timestamp > '" + lower.toString() + "' && Timestamp < '" + upper.toString() + "'")), + table.where(new DateTimeRangeFilter("Timestamp", lower, upper, false, false))), new TableComparator( - table.where(filter.apply( - "Ts2 >= '" + lower.toString() + "' && Ts2 <= '" + upper.toString() + "'")), - "Condition", - table.where(new DateTimeRangeFilter("Ts2", lower, upper, true, true)), "Range"), + table.where( + filter.apply("Ts2 >= '" + lower.toString() + "' && Ts2 <= '" + upper.toString() + "'")), + "Condition", table.where(new DateTimeRangeFilter("Ts2", lower, upper, true, true)), "Range"), new TableComparator( - table.where(filter.apply( - "Ts2 >= '" + lower.toString() + "' && Ts2 < '" + upper.toString() + "'")), - table.where(new DateTimeRangeFilter("Ts2", lower, upper, true, false))), + table.where( + filter.apply("Ts2 >= '" + lower.toString() + "' && Ts2 < '" + upper.toString() + "'")), + table.where(new DateTimeRangeFilter("Ts2", lower, upper, true, false))), new TableComparator( - table.where(filter.apply( - "Ts2 > '" + lower.toString() + "' && Ts2 <= '" + upper.toString() + "'")), - table.where(new DateTimeRangeFilter("Ts2", lower, upper, true, true))), + table.where( + filter.apply("Ts2 > '" + lower.toString() + "' && Ts2 <= '" + upper.toString() + "'")), + table.where(new DateTimeRangeFilter("Ts2", lower, upper, true, true))), new TableComparator( - table.where(filter.apply( - "Ts2 > '" + lower.toString() + "' && Ts2 < '" + upper.toString() + "'")), - table.where(new DateTimeRangeFilter("Ts2", lower, upper, false, false))) + table.where( + filter.apply("Ts2 > '" + lower.toString() + "' && Ts2 < '" + upper.toString() + "'")), + table.where(new DateTimeRangeFilter("Ts2", lower, upper, false, false))) }; for (int i = 0; i < 500; i++) { @@ -1019,32 +939,31 @@ public void testDateTimeRangeFilterNulls() { final ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Timestamp", "Sentinel"}, - new UnsortedDateTimeGenerator(startTime, endTime, 0.1), - new IntGenerator(0, 1000))); + columnInfo = initColumnInfos(new String[] {"Timestamp", "Sentinel"}, + new UnsortedDateTimeGenerator(startTime, endTime, 0.1), + new IntGenerator(0, 1000))); final DBDateTime lower = DBTimeUtils.plus(startTime, DBTimeUtils.SECOND); final DBDateTime upper = DBTimeUtils.plus(startTime, DBTimeUtils.SECOND * 2); final EvalNuggetInterface en[] = new EvalNuggetInterface[] { new TableComparator( - table.where(filter.apply("Timestamp >= '" + lower.toString() - + "' && Timestamp <= '" + upper.toString() + "'")), - "Condition", - table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, true)), - "Range"), + table.where(filter.apply( + "Timestamp >= '" + lower.toString() + "' && Timestamp <= '" + upper.toString() + "'")), + "Condition", table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, true)), + "Range"), new TableComparator( - table.where(filter.apply("Timestamp >= '" + lower.toString() - + "' && Timestamp < '" + upper.toString() + "'")), - table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, false))), + table.where(filter.apply( + "Timestamp >= '" + lower.toString() + "' && Timestamp < '" + upper.toString() + "'")), + table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, false))), new TableComparator( - table.where(filter.apply("Timestamp > '" + lower.toString() - + "' && Timestamp <= '" + upper.toString() + "'")), - table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, true))), + table.where(filter.apply( + "Timestamp > '" + lower.toString() + "' && Timestamp <= '" + upper.toString() + "'")), + table.where(new DateTimeRangeFilter("Timestamp", lower, upper, true, true))), new TableComparator( - table.where(filter.apply("Timestamp > '" + lower.toString() - + "' && Timestamp < '" + upper.toString() + "'")), - table.where(new DateTimeRangeFilter("Timestamp", lower, upper, false, false))), + table.where(filter.apply( + "Timestamp > '" + lower.toString() + "' && Timestamp < '" + upper.toString() + "'")), + table.where(new DateTimeRangeFilter("Timestamp", lower, upper, false, false))), }; for (int i = 0; i < 500; i++) { @@ -1054,8 +973,8 @@ public void testDateTimeRangeFilterNulls() { public void testReverse() { final QueryTable table = testRefreshingTable(i(1, 2, 3), - c("Ticker", "AAPL", "IBM", "TSLA"), - c("Timestamp", 1L, 10L, 50L)); + c("Ticker", "AAPL", "IBM", "TSLA"), + c("Timestamp", 1L, 10L, 50L)); final Table reversed = table.reverse(); show(reversed); @@ -1064,7 +983,7 @@ public void testReverse() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final ColumnHolder[] columnAdditions = - new ColumnHolder[] {c("Ticker", "SPY", "VXX"), c("Timestamp", 60L, 70L)}; + new ColumnHolder[] {c("Ticker", "SPY", "VXX"), c("Timestamp", 60L, 70L)}; TstUtils.addToTable(table, i(2048, 2049), columnAdditions); table.notifyListeners(i(2048, 2049), i(), i()); }); @@ -1074,36 +993,33 @@ public void testReverse() { checkReverse(table, reversed, "Ticker"); // noinspection unchecked - assertEquals("TSLA", - reversed.getColumnSource("Ticker").getPrev(reversed.getIndex().getPrevIndex().get(0))); + assertEquals("TSLA", reversed.getColumnSource("Ticker").getPrev(reversed.getIndex().getPrevIndex().get(0))); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { }); // noinspection unchecked - assertEquals("VXX", - reversed.getColumnSource("Ticker").getPrev(reversed.getIndex().getPrevIndex().get(0))); + assertEquals("VXX", reversed.getColumnSource("Ticker").getPrev(reversed.getIndex().getPrevIndex().get(0))); final ColumnSource longIdentityColumnSource = - new AbstractColumnSource.DefaultedImmutable(long.class) { - @Override - public void startTrackingPrevValues() { /* nothing to do */ } + new AbstractColumnSource.DefaultedImmutable(long.class) { + @Override + public void startTrackingPrevValues() { /* nothing to do */ } - @Override - public Long get(long index) { - return getLong(index); - } + @Override + public Long get(long index) { + return getLong(index); + } - @Override - public long getLong(long index) { - return index; - } - }; + @Override + public long getLong(long index) { + return index; + } + }; - final QueryTable bigTable = - new QueryTable(i(0, (long) Integer.MAX_VALUE, (long) Integer.MAX_VALUE * 2L), + final QueryTable bigTable = new QueryTable(i(0, (long) Integer.MAX_VALUE, (long) Integer.MAX_VALUE * 2L), Collections.singletonMap("LICS", longIdentityColumnSource)); bigTable.setRefreshing(true); final Table bigReversed = bigTable.reverse(); @@ -1130,7 +1046,7 @@ public long getLong(long index) { public void testReverse2() { final QueryTable table = testRefreshingTable(i(1), - c("Timestamp", 1L)); + c("Timestamp", 1L)); final Table reversed = table.reverse(); show(reversed); @@ -1138,8 +1054,7 @@ public void testReverse2() { checkReverse(table, reversed, "Timestamp"); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - final ColumnHolder[] columnAdditions = - new ColumnHolder[] {c("Timestamp", 2048L, 2049L)}; + final ColumnHolder[] columnAdditions = new ColumnHolder[] {c("Timestamp", 2048L, 2049L)}; TstUtils.addToTable(table, i(2048, 2049), columnAdditions); table.notifyListeners(i(2048, 2049), i(), i()); }); @@ -1238,16 +1153,15 @@ public void testReverseIncremental() throws ParseException { final int bitSize = 1000; final ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Date", "C1", "C2", "KEY"}, - new DateGenerator(format.parse("2011-02-02"), format.parse("2011-02-03")), - new SetGenerator<>("a", "b"), - new SetGenerator<>(10, 20, 30), - new SortedBigIntegerGenerator(bitSize))); + columnInfo = initColumnInfos(new String[] {"Date", "C1", "C2", "KEY"}, + new DateGenerator(format.parse("2011-02-02"), format.parse("2011-02-03")), + new SetGenerator<>("a", "b"), + new SetGenerator<>(10, 20, 30), + new SortedBigIntegerGenerator(bitSize))); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { EvalNugget.from(() -> table.update("K=KEY").reverse()), - new TableComparator(table.update("K=KEY").reverse(), - table.update("K=KEY").sortDescending("K")) + new TableComparator(table.update("K=KEY").reverse(), table.update("K=KEY").sortDescending("K")) }; final int updateSize = (int) Math.ceil(Math.sqrt(size)); @@ -1257,8 +1171,7 @@ public void testReverseIncremental() throws ParseException { } public void testSnapshot() { - final QueryTable right = - testRefreshingTable(i(10, 25, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); + final QueryTable right = testRefreshingTable(i(10, 25, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); final QueryTable left1 = testRefreshingTable(c("T", 1)); final Table expected = right.naturalJoin(left1, "", "T"); TableTools.showWithIndex(expected); @@ -1285,8 +1198,8 @@ public void testSnapshot() { left2.notifyListeners(i(3), i(), i()); }); show(snapshot, 50); - final Table expect2 = newTable(c("A", 3, 30, 1, 2, 50), c("B", "c", "aa", "a", "b", "bc"), - c("T", 5, 5, 5, 5, 5)); + final Table expect2 = + newTable(c("A", 3, 30, 1, 2, 50), c("B", "c", "aa", "a", "b", "bc"), c("T", 5, 5, 5, 5, 5)); assertTableEquals(expect2, snapshot); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -1308,26 +1221,25 @@ public void testSnapshot() { } public void testSnapshotHistorical() { - final QueryTable right = - testRefreshingTable(i(10, 25, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); + final QueryTable right = testRefreshingTable(i(10, 25, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); final QueryTable left1 = testRefreshingTable(c("T", 1)); show(left1.snapshotHistory(right)); assertEquals("", diff(left1.snapshotHistory(right), - testRefreshingTable(c("T", 1, 1, 1), c("A", 3, 1, 2), c("B", "c", "a", "b")), 10)); + testRefreshingTable(c("T", 1, 1, 1), c("A", 3, 1, 2), c("B", "c", "a", "b")), 10)); final QueryTable left2 = testRefreshingTable(c("T", 1, 2)); final Table snapshot = left2.snapshotHistory(right); show(snapshot); - assertEquals("", diff(snapshot, testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2), - c("A", 3, 1, 2, 3, 1, 2), c("B", "c", "a", "b", "c", "a", "b")), 10)); + assertEquals("", diff(snapshot, testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2), c("A", 3, 1, 2, 3, 1, 2), + c("B", "c", "a", "b", "c", "a", "b")), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(right, i(20, 40), c("A", 30, 50), c("B", "aa", "bc")); right.notifyListeners(i(20, 40), i(), i()); }); show(snapshot, 50); - assertEquals("", diff(snapshot, testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2), - c("A", 3, 1, 2, 3, 1, 2), c("B", "c", "a", "b", "c", "a", "b")), 10)); + assertEquals("", diff(snapshot, testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2), c("A", 3, 1, 2, 3, 1, 2), + c("B", "c", "a", "b", "c", "a", "b")), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(left2, i(3), c("T", 5)); @@ -1335,11 +1247,11 @@ public void testSnapshotHistorical() { }); show(snapshot, 50); assertEquals("", - diff(snapshot, - testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2, 5, 5, 5, 5, 5), - c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50), - c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc")), - 10)); + diff(snapshot, + testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2, 5, 5, 5, 5, 5), + c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50), + c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc")), + 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { removeRows(right, i(10, 20, 30)); @@ -1348,19 +1260,17 @@ public void testSnapshotHistorical() { }); show(snapshot, 50); assertEquals("", diff(snapshot, testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2, 5, 5, 5, 5, 5), - c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50), - c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc")), 10)); + c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50), + c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc")), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(left2, i(4, 5), c("T", 7, 8)); left2.notifyListeners(i(4, 5), i(), i()); }); show(snapshot, 50); - assertEquals("", diff(snapshot, testRefreshingTable( - c("T", 1, 1, 1, 2, 2, 2, 5, 5, 5, 5, 5, 7, 7, 8, 8), - c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50, 11, 50, 11, 50), - c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc", "A", "bc", "A", "bc")), - 10)); + assertEquals("", diff(snapshot, testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2, 5, 5, 5, 5, 5, 7, 7, 8, 8), + c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50, 11, 50, 11, 50), + c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc", "A", "bc", "A", "bc")), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index rowsToRemove = right.getIndex().clone(); @@ -1368,22 +1278,18 @@ public void testSnapshotHistorical() { right.notifyListeners(i(), rowsToRemove, i()); }); show(snapshot, 50); - assertEquals("", diff(snapshot, testRefreshingTable( - c("T", 1, 1, 1, 2, 2, 2, 5, 5, 5, 5, 5, 7, 7, 8, 8), - c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50, 11, 50, 11, 50), - c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc", "A", "bc", "A", "bc")), - 10)); + assertEquals("", diff(snapshot, testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2, 5, 5, 5, 5, 5, 7, 7, 8, 8), + c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50, 11, 50, 11, 50), + c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc", "A", "bc", "A", "bc")), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(left2, i(6), c("T", 9)); left2.notifyListeners(i(6), i(), i()); }); show(snapshot, 50); - assertEquals("", diff(snapshot, testRefreshingTable( - c("T", 1, 1, 1, 2, 2, 2, 5, 5, 5, 5, 5, 7, 7, 8, 8), - c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50, 11, 50, 11, 50), - c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc", "A", "bc", "A", "bc")), - 10)); + assertEquals("", diff(snapshot, testRefreshingTable(c("T", 1, 1, 1, 2, 2, 2, 5, 5, 5, 5, 5, 7, 7, 8, 8), + c("A", 3, 1, 2, 3, 1, 2, 3, 30, 1, 2, 50, 11, 50, 11, 50), + c("B", "c", "a", "b", "c", "a", "b", "c", "aa", "a", "b", "bc", "A", "bc", "A", "bc")), 10)); } @@ -1394,59 +1300,43 @@ public void testSnapshotDependencies() { QueryScope.addParam("testSnapshotDependenciesCounter", new AtomicInteger()); final Table snappedFirst = left.snapshot(right); - final Table snappedDep = - snappedFirst.select("B=testSnapshotDependenciesCounter.incrementAndGet()"); + final Table snappedDep = snappedFirst.select("B=testSnapshotDependenciesCounter.incrementAndGet()"); final Table snappedOfSnap = left.snapshot(snappedDep); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); }); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(left, i(2), c("T", 2)); left.notifyListeners(i(2), i(), i()); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // this will do the notification for left; at which point we can do the first snapshot boolean flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // now we should flush the select flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // now we should flush the second snapshot flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertFalse(flushed); @@ -1465,18 +1355,14 @@ public void testSnapshotIncrementalDependencies() { QueryScope.addParam("testSnapshotDependenciesCounter", new AtomicInteger()); final Table snappedFirst = left.snapshotIncremental(right); - final Table snappedDep = - snappedFirst.select("B=testSnapshotDependenciesCounter.incrementAndGet()"); + final Table snappedDep = snappedFirst.select("B=testSnapshotDependenciesCounter.incrementAndGet()"); final Table snappedOfSnap = left.snapshotIncremental(snappedDep); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { System.out.println("Checking everything is satisfied with no updates."); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); System.out.println("Simple Update Cycle Complete."); }); @@ -1486,12 +1372,9 @@ public void testSnapshotIncrementalDependencies() { left.notifyListeners(i(2), i(), i()); System.out.println("Checking initial satisfaction."); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); System.out.println("Flushing first notification."); // this will do the notification for left @@ -1499,25 +1382,18 @@ public void testSnapshotIncrementalDependencies() { System.out.println("Checking satisfaction after #1."); TestCase.assertTrue(flushed); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); - - System.out - .println("Flushing second notification, which should be our listener recorder"); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + + System.out.println("Flushing second notification, which should be our listener recorder"); flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #2."); TestCase.assertTrue(flushed); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); System.out.println("Flushing third notification, which should be our merged listener"); @@ -1525,42 +1401,30 @@ public void testSnapshotIncrementalDependencies() { flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); // this will do the merged notification; which means the snaphsot is satisfied TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // now we should flush the select flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // now we should flush the second snapshot recorder flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // now we should flush the second snapshot merged listener flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // nothing left flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); @@ -1577,38 +1441,28 @@ public void testSnapshotIncrementalDependencies() { right.notifyListeners(i(2), i(), i()); System.out.println("Checking initial satisfaction."); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); System.out.println("Flushing first notification."); - // this will do the notification for right; at which point we can should get the update - // going through + // this will do the notification for right; at which point we can should get the update going through boolean flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #1."); TestCase.assertTrue(flushed); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); System.out.println("Flushing second notification, which should be our merged listener"); flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #2."); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // nothing left flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); @@ -1629,90 +1483,65 @@ public void testSnapshotIncrementalDependencies() { left.notifyListeners(i(3), i(), i()); System.out.println("Checking initial satisfaction."); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); System.out.println("Flushing first notification."); boolean flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #1."); TestCase.assertTrue(flushed); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); - - System.out.println( - "Flushing second notification, which should be the recorder for our second snapshot"); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + + System.out.println("Flushing second notification, which should be the recorder for our second snapshot"); flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #2."); TestCase.assertTrue(flushed); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); System.out.println("Flushing third notification, which should be our right recorder"); flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #3."); TestCase.assertTrue(flushed); - TestCase.assertFalse( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); System.out.println("Flushing fourth notification, which should be our MergedListener"); flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); System.out.println("Checking satisfaction after #4."); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // now we should flush the select flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // now we should flush the second snapshot recorder flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // now we should flush the second snapshot merged listener flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase.assertTrue( - ((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertTrue( - ((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedFirst).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedDep).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) snappedOfSnap).satisfied(LogicalClock.DEFAULT.currentStep())); // nothing left flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); @@ -1725,116 +1554,91 @@ public void testSnapshotIncrementalDependencies() { } public void testWhereInDependency() { - final QueryTable tableToFilter = - testRefreshingTable(i(10, 11, 12, 13, 14, 15), c("A", 1, 2, 3, 4, 5, 6), + final QueryTable tableToFilter = testRefreshingTable(i(10, 11, 12, 13, 14, 15), c("A", 1, 2, 3, 4, 5, 6), c("B", 2, 4, 6, 8, 10, 12), c("C", 'a', 'b', 'c', 'd', 'e', 'f')); - final QueryTable setTable = - testRefreshingTable(i(100, 101, 102), c("A", 1, 2, 3), c("B", 2, 4, 6)); + final QueryTable setTable = testRefreshingTable(i(100, 101, 102), c("A", 1, 2, 3), c("B", 2, 4, 6)); final Table setTable1 = setTable.where("A > 2"); final Table setTable2 = setTable.where("B > 6"); final DynamicWhereFilter dynamicFilter1 = - new DynamicWhereFilter(setTable1, true, MatchPairFactory.getExpressions("A")); + new DynamicWhereFilter(setTable1, true, MatchPairFactory.getExpressions("A")); final DynamicWhereFilter dynamicFilter2 = - new DynamicWhereFilter(setTable2, true, MatchPairFactory.getExpressions("B")); + new DynamicWhereFilter(setTable2, true, MatchPairFactory.getExpressions("B")); - final SelectFilter composedFilter = - DisjunctiveFilter.makeDisjunctiveFilter(dynamicFilter1, dynamicFilter2); + final SelectFilter composedFilter = DisjunctiveFilter.makeDisjunctiveFilter(dynamicFilter1, dynamicFilter2); final Table composed = tableToFilter.where(composedFilter); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); }); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(setTable, i(103), c("A", 5), c("B", 8)); setTable.notifyListeners(i(103), i(), i()); - TestCase.assertFalse( - ((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); - // this will do the notification for table; which should first fire the recorder for - // setTable1 + // this will do the notification for table; which should first fire the recorder for setTable1 LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); - // this will do the notification for table; which should first fire the recorder for - // setTable2 + // this will do the notification for table; which should first fire the recorder for setTable2 LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); - // this will do the notification for table; which should first fire the merged listener - // for 1 + // this will do the notification for table; which should first fire the merged listener for 1 boolean flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // the next notification should be the merged listener for setTable2 flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // the dynamicFilter1 updates flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // the dynamicFilter2 updates flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // now that both filters are complete, we can run the composed listener flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // and we are done flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); @@ -1843,24 +1647,23 @@ public void testWhereInDependency() { TableTools.show(composed); - final Table expected = TableTools.newTable(intCol("A", 3, 4, 5), intCol("B", 6, 8, 10), - charCol("C", 'c', 'd', 'e')); + final Table expected = + TableTools.newTable(intCol("A", 3, 4, 5), intCol("B", 6, 8, 10), charCol("C", 'c', 'd', 'e')); TestCase.assertEquals("", TableTools.diff(composed, expected, 10)); } public void testWhereInScope() { final DynamicTable toBeFiltered = TstUtils.testRefreshingTable( - TableTools.col("Key", "A", "B", "C", "D", "E"), - TableTools.intCol("Value", 1, 2, 3, 4, 5)); + TableTools.col("Key", "A", "B", "C", "D", "E"), + TableTools.intCol("Value", 1, 2, 3, 4, 5)); // The setScope will own the set table. rc == 1 final SafeCloseable setScope = LivenessScopeStack.open(); final DynamicTable setTable = TstUtils.testRefreshingTable(TableTools.stringCol("Key")); // Owned by setScope, rc == 1 - // It will also manage setTable whose rc == 3 after (1 SwapListener, one ListenerImpl from - // by) + // It will also manage setTable whose rc == 3 after (1 SwapListener, one ListenerImpl from by) final Table whereIn = toBeFiltered.whereIn(setTable, "Key"); // Manage it rcs == (2,3) @@ -1902,13 +1705,11 @@ public void testWhereInScope() { } public void testSnapshotIncremental() { - QueryTable right = - testRefreshingTable(i(10, 25, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); + QueryTable right = testRefreshingTable(i(10, 25, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); QueryTable left = testRefreshingTable(c("T", 1)); Table empty = left.snapshotIncremental(right); show(empty); - assertEquals("", - diff(empty, testRefreshingTable(intCol("A"), stringCol("B"), intCol("T")), 10)); + assertEquals("", diff(empty, testRefreshingTable(intCol("A"), stringCol("B"), intCol("T")), 10)); final QueryTable left2 = testRefreshingTable(c("T", 1, 2)); @@ -1917,8 +1718,7 @@ public void testSnapshotIncremental() { show(snapshot); System.out.println("Initial prev:"); show(prevTable(snapshot)); - assertEquals("", - diff(snapshot, testRefreshingTable(intCol("A"), stringCol("B"), intCol("T")), 10)); + assertEquals("", diff(snapshot, testRefreshingTable(intCol("A"), stringCol("B"), intCol("T")), 10)); final ListenerWithGlobals listener; snapshot.listenForUpdates(listener = new ListenerWithGlobals(snapshot)); @@ -1929,8 +1729,7 @@ public void testSnapshotIncremental() { right.notifyListeners(i(20, 40), i(), i()); }); show(snapshot, 50); - assertEquals("", - diff(snapshot, testRefreshingTable(intCol("A"), stringCol("B"), intCol("T")), 10)); + assertEquals("", diff(snapshot, testRefreshingTable(intCol("A"), stringCol("B"), intCol("T")), 10)); assertEquals(listener.getCount(), 0); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -1938,8 +1737,9 @@ public void testSnapshotIncremental() { left2.notifyListeners(i(3), i(), i()); }); show(snapshot, 50); - assertEquals("", diff(snapshot, testRefreshingTable(c("A", 3, 30, 1, 2, 50), - c("B", "c", "aa", "a", "b", "bc"), c("T", 5, 5, 5, 5, 5)), 10)); + assertEquals("", diff(snapshot, + testRefreshingTable(c("A", 3, 30, 1, 2, 50), c("B", "c", "aa", "a", "b", "bc"), c("T", 5, 5, 5, 5, 5)), + 10)); assertEquals(listener.getCount(), 1); assertEquals(right.getIndex(), added); assertEquals(i(), modified); @@ -1953,9 +1753,9 @@ public void testSnapshotIncremental() { }); showWithIndex(snapshot, 50); assertEquals("", diff(snapshot, testRefreshingTable( - c("A", 3, 30, 1, 2, 50), - c("B", "c", "aa", "a", "b", "bc"), - c("T", 5, 5, 5, 5, 5)), 10)); + c("A", 3, 30, 1, 2, 50), + c("B", "c", "aa", "a", "b", "bc"), + c("T", 5, 5, 5, 5, 5)), 10)); assertEquals(listener.getCount(), 0); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -1967,8 +1767,8 @@ public void testSnapshotIncremental() { System.out.println("Snapshot Table:"); showWithIndex(snapshot, 50); - assertEquals("", diff(snapshot, - testRefreshingTable(c("A", 11, 50, 34), c("B", "A", "bc", "Q"), c("T", 8, 5, 8)), 10)); + assertEquals("", + diff(snapshot, testRefreshingTable(c("A", 11, 50, 34), c("B", "A", "bc", "Q"), c("T", 8, 5, 8)), 10)); assertEquals(listener.getCount(), 1); assertEquals(i(75), added); assertEquals(i(25), modified); @@ -1994,32 +1794,30 @@ public void testSnapshotIncrementalBigInitial() { } public void testSnapshotIncrementalPrev() { - final QueryTable right = - testRefreshingTable(i(10, 25, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); + final QueryTable right = testRefreshingTable(i(10, 25, 30), c("A", 3, 1, 2), c("B", "c", "a", "b")); final QueryTable left = testRefreshingTable(c("T", 1, 2)); final QueryTable snapshot = (QueryTable) left.snapshotIncremental(right, true); final TableUpdateValidator validator = TableUpdateValidator.make(snapshot); final QueryTable validatorTable = validator.getResultTable(); final ShiftAwareListener validatorTableListener = - new InstrumentedShiftAwareListenerAdapter(validatorTable, false) { - @Override - public void onUpdate(Update upstream) {} - - @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { - TestCase.fail(originalException.getMessage()); - } - }; + new InstrumentedShiftAwareListenerAdapter(validatorTable, false) { + @Override + public void onUpdate(Update upstream) {} + + @Override + public void onFailureInternal(Throwable originalException, + UpdatePerformanceTracker.Entry sourceEntry) { + TestCase.fail(originalException.getMessage()); + } + }; validatorTable.listenForUpdates(validatorTableListener); System.out.println("Initial table:"); show(snapshot); System.out.println("Initial prev:"); show(prevTable(snapshot)); - final QueryTable firstResult = - testRefreshingTable(c("A", 3, 1, 2), c("B", "c", "a", "b"), c("T", 2, 2, 2)); + final QueryTable firstResult = testRefreshingTable(c("A", 3, 1, 2), c("B", "c", "a", "b"), c("T", 2, 2, 2)); assertEquals("", diff(snapshot, firstResult, 10)); assertEquals("", diff(prevTable(snapshot), firstResult, 10)); @@ -2047,8 +1845,8 @@ public void onFailureInternal(Throwable originalException, }); show(snapshot, 50); - final QueryTable secondResult = testRefreshingTable(c("A", 3, 30, 1, 2, 50), - c("B", "c", "aa", "a", "b", "bc"), c("T", 2, 5, 2, 2, 5)); + final QueryTable secondResult = + testRefreshingTable(c("A", 3, 30, 1, 2, 50), c("B", "c", "aa", "a", "b", "bc"), c("T", 2, 5, 2, 2, 5)); assertEquals("", diff(snapshot, secondResult, 10)); assertEquals(listener.getCount(), 1); assertEquals(i(20, 40), listener.update.added); @@ -2071,8 +1869,7 @@ public void onFailureInternal(Throwable originalException, left.notifyListeners(i(4, 5), i(), i()); }); - final QueryTable thirdResult = - testRefreshingTable(c("A", 11, 50, 34), c("B", "A", "bc", "Q"), c("T", 8, 5, 8)); + final QueryTable thirdResult = testRefreshingTable(c("A", 11, 50, 34), c("B", "A", "bc", "Q"), c("T", 8, 5, 8)); assertEquals("", diff(snapshot, thirdResult, 10)); assertEquals(listener.getCount(), 1); assertEquals(i(75), listener.update.added); @@ -2092,7 +1889,7 @@ public void onFailureInternal(Throwable originalException, }); final QueryTable fourthResult = - testRefreshingTable(c("A", 12, 50, 34), c("B", "R", "bc", "Q"), c("T", 9, 5, 8)); + testRefreshingTable(c("A", 12, 50, 34), c("B", "R", "bc", "Q"), c("T", 9, 5, 8)); assertEquals("", diff(snapshot, fourthResult, 10)); assertEquals(listener.getCount(), 1); assertEquals(i(), listener.update.added); @@ -2113,14 +1910,13 @@ public void testSnapshotIncrementalRandom() { final int filteredSize = 500; final Random random = new Random(0); - final QueryTable stampTable = - getTable(stampSize, random, stampInfo = initColumnInfos(new String[] {"Stamp"}, + final QueryTable stampTable = getTable(stampSize, random, stampInfo = initColumnInfos(new String[] {"Stamp"}, new IntGenerator(0, 100))); final QueryTable rightTable = getTable(stampSize, random, - rightInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), - new IntGenerator(0, 100), - new DoubleGenerator(0, 100))); + rightInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), + new IntGenerator(0, 100), + new DoubleGenerator(0, 100))); final QueryTable snapshot = (QueryTable) stampTable.snapshotIncremental(rightTable); @@ -2144,32 +1940,29 @@ public void testSnapshotIncrementalRandom() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { if (printTableUpdates) { - System.out.println("Step = " + fstep + ", modStamp=" + modStamp - + ", modRight=" + modRight + ", modifyRightFirst=" + modifyRightFirst); + System.out.println("Step = " + fstep + ", modStamp=" + modStamp + ", modRight=" + modRight + + ", modifyRightFirst=" + modifyRightFirst); } if (modifyRightFirst) { - GenerateTableUpdates.generateTableUpdates(filteredSize, random, rightTable, - rightInfo); + GenerateTableUpdates.generateTableUpdates(filteredSize, random, rightTable, rightInfo); } if (modStamp) { final long lastStamp = stampTable.getIndex().lastKey(); final int numAdditions = 1 + random.nextInt(stampSize); final Index stampsToAdd = - Index.FACTORY.getIndexByRange(lastStamp + 1, lastStamp + numAdditions); + Index.FACTORY.getIndexByRange(lastStamp + 1, lastStamp + numAdditions); final ColumnHolder[] columnAdditions = new ColumnHolder[stampInfo.length]; for (int ii = 0; ii < columnAdditions.length; ii++) { - columnAdditions[ii] = - stampInfo[ii].populateMapAndC(stampsToAdd, random); + columnAdditions[ii] = stampInfo[ii].populateMapAndC(stampsToAdd, random); } TstUtils.addToTable(stampTable, stampsToAdd, columnAdditions); stampTable.notifyListeners(stampsToAdd, Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex()); + Index.FACTORY.getEmptyIndex()); } if (!modifyRightFirst && modRight) { - GenerateTableUpdates.generateTableUpdates(filteredSize, random, rightTable, - rightInfo); + GenerateTableUpdates.generateTableUpdates(filteredSize, random, rightTable, rightInfo); } }); if (modStamp) { @@ -2184,11 +1977,9 @@ public void testSnapshotIncrementalRandom() { System.out.println("Snapshot Modified: " + simpleListener.modified); final Index coalAdded = coalescingListener.indexUpdateCoalescer.takeAdded(); System.out.println("Right Coalesced Added: " + coalAdded); - final Index coalRemoved = - coalescingListener.indexUpdateCoalescer.takeRemoved(); + final Index coalRemoved = coalescingListener.indexUpdateCoalescer.takeRemoved(); System.out.println("Right Coalesced Removed: " + coalRemoved); - final Index coalModified = - coalescingListener.indexUpdateCoalescer.takeModified(); + final Index coalModified = coalescingListener.indexUpdateCoalescer.takeModified(); System.out.println("Right Coalesced Modified: " + coalModified); final Index modified = simpleListener.added.union(simpleListener.modified); @@ -2197,8 +1988,8 @@ public void testSnapshotIncrementalRandom() { System.out.println("Unmodified: " + unmodified); // verify the modified stamps - final int lastStamp = stampTable.getColumnSource("Stamp") - .getInt(stampTable.getIndex().lastKey()); + final int lastStamp = + stampTable.getColumnSource("Stamp").getInt(stampTable.getIndex().lastKey()); @SuppressWarnings("unchecked") final ColumnSource stamps = snapshot.getColumnSource("Stamp"); for (final Index.Iterator it = modified.iterator(); it.hasNext();) { @@ -2229,8 +2020,7 @@ public void testSnapshotIncrementalRandom() { } // make sure everything from the right table matches the snapshot - lastSnapshot = - new QueryTable(snapshot.getIndex().clone(), snapshot.getColumnSourceMap()); + lastSnapshot = new QueryTable(snapshot.getIndex().clone(), snapshot.getColumnSourceMap()); lastIndex = rightTable.getIndex().clone(); // the coalescing listener can be reset coalescingListener.reset(); @@ -2252,18 +2042,16 @@ public void testSelectModifications() { testShiftingModifications(arg -> (QueryTable) arg.select()); } - static void testLegacyFlattenModifications( - io.deephaven.base.Function.Unary function) { + static void testLegacyFlattenModifications(io.deephaven.base.Function.Unary function) { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("intCol", 10, 20, 40, 60)); + c("intCol", 10, 20, 40, 60)); final QueryTable selected = function.call(queryTable); final SimpleShiftAwareListener simpleListener = new SimpleShiftAwareListener(selected); selected.listenForUpdates(simpleListener); final Supplier newUpdate = - () -> new ShiftAwareListener.Update(i(), i(), i(), IndexShiftData.EMPTY, - ModifiedColumnSet.EMPTY); + () -> new ShiftAwareListener.Update(i(), i(), i(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3), c("intCol", 30)); @@ -2275,17 +2063,12 @@ static void testLegacyFlattenModifications( }); Assert.assertEquals("simpleListener.getCount() == 1", 1, simpleListener.getCount()); - Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, - simpleListener.update.shifted.size()); - Assert.assertEquals("simpleListener.update.added.size() = 1", 1, - simpleListener.update.added.size()); - Assert.assertEquals("simpleListener.update.removed.size() = 1", 1, - simpleListener.update.removed.size()); - Assert.assertEquals("simpleListener.update.modified.size() = 0", 0, - simpleListener.update.modified.size()); + Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, simpleListener.update.shifted.size()); + Assert.assertEquals("simpleListener.update.added.size() = 1", 1, simpleListener.update.added.size()); + Assert.assertEquals("simpleListener.update.removed.size() = 1", 1, simpleListener.update.removed.size()); + Assert.assertEquals("simpleListener.update.modified.size() = 0", 0, simpleListener.update.modified.size()); Assert.assertEquals("simpleListener.update.added = {1}", i(1), simpleListener.update.added); - Assert.assertEquals("simpleListener.update.removed = {1}", i(1), - simpleListener.update.removed); + Assert.assertEquals("simpleListener.update.removed = {1}", i(1), simpleListener.update.removed); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3), c("intCol", 30)); @@ -2296,16 +2079,11 @@ static void testLegacyFlattenModifications( }); Assert.assertEquals("simpleListener.getCount() == 2", 2, simpleListener.getCount()); - Assert.assertEquals("simpleListener.update.added.size() = 0", 0, - simpleListener.update.added.size()); - Assert.assertEquals("simpleListener.update.removed.size() = 0", 0, - simpleListener.update.removed.size()); - Assert.assertEquals("simpleListener.update.modified.size() = 1", 1, - simpleListener.update.modified.size()); - Assert.assertEquals("simpleListener.update.modified = {1}", i(1), - simpleListener.update.modified); - Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, - simpleListener.update.shifted.size()); + Assert.assertEquals("simpleListener.update.added.size() = 0", 0, simpleListener.update.added.size()); + Assert.assertEquals("simpleListener.update.removed.size() = 0", 0, simpleListener.update.removed.size()); + Assert.assertEquals("simpleListener.update.modified.size() = 1", 1, simpleListener.update.modified.size()); + Assert.assertEquals("simpleListener.update.modified = {1}", i(1), simpleListener.update.modified); + Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, simpleListener.update.shifted.size()); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3, 5), c("intCol", 30, 50)); @@ -2317,23 +2095,18 @@ static void testLegacyFlattenModifications( }); Assert.assertEquals("simpleListener.getCount() == 3", 3, simpleListener.getCount()); - Assert.assertEquals("simpleListener.update.removed.size() = 0", 0, - simpleListener.update.removed.size()); - Assert.assertEquals("simpleListener.update.added.size() = 1", 1, - simpleListener.update.added.size()); + Assert.assertEquals("simpleListener.update.removed.size() = 0", 0, simpleListener.update.removed.size()); + Assert.assertEquals("simpleListener.update.added.size() = 1", 1, simpleListener.update.added.size()); Assert.assertEquals("simpleListener.update.added = {3}", i(3), simpleListener.update.added); - Assert.assertEquals("simpleListener.update.modified.size() = 1", 1, - simpleListener.update.modified.size()); - Assert.assertEquals("simpleListener.update.modified = {1}", i(1), - simpleListener.update.modified); - Assert.assertEquals("simpleListener.update.shifted.size() = 1", 1, - simpleListener.update.shifted.size()); + Assert.assertEquals("simpleListener.update.modified.size() = 1", 1, simpleListener.update.modified.size()); + Assert.assertEquals("simpleListener.update.modified = {1}", i(1), simpleListener.update.modified); + Assert.assertEquals("simpleListener.update.shifted.size() = 1", 1, simpleListener.update.shifted.size()); Assert.assertEquals("simpleListener.update.shifted.getBeginRange(0) = 3", 3, - simpleListener.update.shifted.getBeginRange(0)); + simpleListener.update.shifted.getBeginRange(0)); Assert.assertEquals("simpleListener.update.shifted.getEndRange(0) = 3", 3, - simpleListener.update.shifted.getEndRange(0)); + simpleListener.update.shifted.getEndRange(0)); Assert.assertEquals("simpleListener.update.shifted.getShiftDelta(0) = 1", 1, - simpleListener.update.shifted.getShiftDelta(0)); + simpleListener.update.shifted.getShiftDelta(0)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { removeRows(queryTable, i(4)); @@ -2343,38 +2116,31 @@ static void testLegacyFlattenModifications( }); Assert.assertEquals("simpleListener.getCount() == 4", 4, simpleListener.getCount()); - Assert.assertEquals("simpleListener.update.added.size() = 0", 0, - simpleListener.update.added.size()); - Assert.assertEquals("simpleListener.update.removed.size() = 1", 1, - simpleListener.update.removed.size()); - Assert.assertEquals("simpleListener.update.removed = {2}", i(2), - simpleListener.update.removed); - Assert.assertEquals("simpleListener.update.modified.size() = 0", 0, - simpleListener.update.modified.size()); - Assert.assertEquals("simpleListener.update.shifted.size() = 1", 1, - simpleListener.update.shifted.size()); + Assert.assertEquals("simpleListener.update.added.size() = 0", 0, simpleListener.update.added.size()); + Assert.assertEquals("simpleListener.update.removed.size() = 1", 1, simpleListener.update.removed.size()); + Assert.assertEquals("simpleListener.update.removed = {2}", i(2), simpleListener.update.removed); + Assert.assertEquals("simpleListener.update.modified.size() = 0", 0, simpleListener.update.modified.size()); + Assert.assertEquals("simpleListener.update.shifted.size() = 1", 1, simpleListener.update.shifted.size()); Assert.assertEquals("simpleListener.update.shifted.getBeginRange(0) = 3", 3, - simpleListener.update.shifted.getBeginRange(0)); + simpleListener.update.shifted.getBeginRange(0)); Assert.assertEquals("simpleListener.update.shifted.getEndRange(0) = 4", 4, - simpleListener.update.shifted.getEndRange(0)); + simpleListener.update.shifted.getEndRange(0)); Assert.assertEquals("simpleListener.update.shifted.getShiftDelta(0) = -1", -1, - simpleListener.update.shifted.getShiftDelta(0)); + simpleListener.update.shifted.getShiftDelta(0)); simpleListener.close(); } - static void testShiftingModifications( - io.deephaven.base.Function.Unary function) { + static void testShiftingModifications(io.deephaven.base.Function.Unary function) { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("intCol", 10, 20, 40, 60)); + c("intCol", 10, 20, 40, 60)); final QueryTable selected = function.call(queryTable); final SimpleShiftAwareListener simpleListener = new SimpleShiftAwareListener(selected); selected.listenForUpdates(simpleListener); final Supplier newUpdate = - () -> new ShiftAwareListener.Update(i(), i(), i(), IndexShiftData.EMPTY, - ModifiedColumnSet.EMPTY); + () -> new ShiftAwareListener.Update(i(), i(), i(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3), c("intCol", 30)); @@ -2386,17 +2152,12 @@ static void testShiftingModifications( }); Assert.assertEquals("simpleListener.getCount() == 1", 1, simpleListener.getCount()); - Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, - simpleListener.update.shifted.size()); - Assert.assertEquals("simpleListener.update.added.size() = 1", 1, - simpleListener.update.added.size()); - Assert.assertEquals("simpleListener.update.removed.size() = 1", 1, - simpleListener.update.removed.size()); - Assert.assertEquals("simpleListener.update.modified.size() = 0", 0, - simpleListener.update.modified.size()); + Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, simpleListener.update.shifted.size()); + Assert.assertEquals("simpleListener.update.added.size() = 1", 1, simpleListener.update.added.size()); + Assert.assertEquals("simpleListener.update.removed.size() = 1", 1, simpleListener.update.removed.size()); + Assert.assertEquals("simpleListener.update.modified.size() = 0", 0, simpleListener.update.modified.size()); Assert.assertEquals("simpleListener.update.added = {3}", i(3), simpleListener.update.added); - Assert.assertEquals("simpleListener.update.removed = {2}", i(2), - simpleListener.update.removed); + Assert.assertEquals("simpleListener.update.removed = {2}", i(2), simpleListener.update.removed); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3), c("intCol", 30)); @@ -2407,16 +2168,11 @@ static void testShiftingModifications( }); Assert.assertEquals("simpleListener.getCount() == 2", 2, simpleListener.getCount()); - Assert.assertEquals("simpleListener.update.added.size() = 0", 0, - simpleListener.update.added.size()); - Assert.assertEquals("simpleListener.update.removed.size() = 0", 0, - simpleListener.update.removed.size()); - Assert.assertEquals("simpleListener.update.modified.size() = 1", 1, - simpleListener.update.modified.size()); - Assert.assertEquals("simpleListener.update.modified = {3}", i(3), - simpleListener.update.modified); - Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, - simpleListener.update.shifted.size()); + Assert.assertEquals("simpleListener.update.added.size() = 0", 0, simpleListener.update.added.size()); + Assert.assertEquals("simpleListener.update.removed.size() = 0", 0, simpleListener.update.removed.size()); + Assert.assertEquals("simpleListener.update.modified.size() = 1", 1, simpleListener.update.modified.size()); + Assert.assertEquals("simpleListener.update.modified = {3}", i(3), simpleListener.update.modified); + Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, simpleListener.update.shifted.size()); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(queryTable, i(3, 5), c("intCol", 30, 50)); @@ -2428,17 +2184,12 @@ static void testShiftingModifications( }); Assert.assertEquals("simpleListener.getCount() == 3", 3, simpleListener.getCount()); - Assert.assertEquals("simpleListener.update.removed.size() = 0", 0, - simpleListener.update.removed.size()); - Assert.assertEquals("simpleListener.update.added.size() = 1", 1, - simpleListener.update.added.size()); + Assert.assertEquals("simpleListener.update.removed.size() = 0", 0, simpleListener.update.removed.size()); + Assert.assertEquals("simpleListener.update.added.size() = 1", 1, simpleListener.update.added.size()); Assert.assertEquals("simpleListener.update.added = {5}", i(5), simpleListener.update.added); - Assert.assertEquals("simpleListener.update.modified.size() = 1", 1, - simpleListener.update.modified.size()); - Assert.assertEquals("simpleListener.update.modified = {1}", i(3), - simpleListener.update.modified); - Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, - simpleListener.update.shifted.size()); + Assert.assertEquals("simpleListener.update.modified.size() = 1", 1, simpleListener.update.modified.size()); + Assert.assertEquals("simpleListener.update.modified = {1}", i(3), simpleListener.update.modified); + Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, simpleListener.update.shifted.size()); // Assert.assertEquals("simpleListener.update.shifted.getBeginRange(0) = 3", 3, // simpleListener.update.shifted.getBeginRange(0)); // Assert.assertEquals("simpleListener.update.shifted.getEndRange(0) = 3", 3, @@ -2454,16 +2205,11 @@ static void testShiftingModifications( }); Assert.assertEquals("simpleListener.getCount() == 4", 4, simpleListener.getCount()); - Assert.assertEquals("simpleListener.update.added.size() = 0", 0, - simpleListener.update.added.size()); - Assert.assertEquals("simpleListener.update.removed.size() = 1", 1, - simpleListener.update.removed.size()); - Assert.assertEquals("simpleListener.update.removed = {4}", i(4), - simpleListener.update.removed); - Assert.assertEquals("simpleListener.update.modified.size() = 0", 0, - simpleListener.update.modified.size()); - Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, - simpleListener.update.shifted.size()); + Assert.assertEquals("simpleListener.update.added.size() = 0", 0, simpleListener.update.added.size()); + Assert.assertEquals("simpleListener.update.removed.size() = 1", 1, simpleListener.update.removed.size()); + Assert.assertEquals("simpleListener.update.removed = {4}", i(4), simpleListener.update.removed); + Assert.assertEquals("simpleListener.update.modified.size() = 0", 0, simpleListener.update.modified.size()); + Assert.assertEquals("simpleListener.update.shifted.size() = 0", 0, simpleListener.update.shifted.size()); // Assert.assertEquals("simpleListener.update.shifted.getBeginRange(0) = 3", 3, // simpleListener.update.shifted.getBeginRange(0)); // Assert.assertEquals("simpleListener.update.shifted.getEndRange(0) = 4", 4, @@ -2476,77 +2222,67 @@ static void testShiftingModifications( public void testDbDateTimeColumns() { final QueryTable queryTable = TstUtils.testRefreshingTable( - c("Sym", "aa", "bc", "aa", "aa"), - c("Timestamp", DBTimeUtils.currentTime(), DBTimeUtils.currentTime(), - DBTimeUtils.currentTime(), DBTimeUtils.currentTime())); - assertEquals(queryTable.by("Sym").getDefinition().getColumn("Timestamp").getComponentType(), - DBDateTime.class); + c("Sym", "aa", "bc", "aa", "aa"), + c("Timestamp", DBTimeUtils.currentTime(), DBTimeUtils.currentTime(), DBTimeUtils.currentTime(), + DBTimeUtils.currentTime())); + assertEquals(queryTable.by("Sym").getDefinition().getColumn("Timestamp").getComponentType(), DBDateTime.class); show(queryTable.update("x = Timestamp_[0]")); - show( - queryTable.update("TimeinSeconds=round((max(Timestamp_)-min(Timestamp_))/1000000000)")); + show(queryTable.update("TimeinSeconds=round((max(Timestamp_)-min(Timestamp_))/1000000000)")); show(queryTable.by("Sym").view("Sym", "x = Timestamp[0]")); - show(queryTable.by("Sym").view("Sym", - "TimeinSeconds=round((max(Timestamp)-min(Timestamp))/1000000000)")); + show(queryTable.by("Sym").view("Sym", "TimeinSeconds=round((max(Timestamp)-min(Timestamp))/1000000000)")); } public void testUngroupingAgnostic() { Table table = testRefreshingTable(c("X", 1, 2, 3), - c("Y", new String[] {"a", "b", "c"}, CollectionUtil.ZERO_LENGTH_STRING_ARRAY, - new String[] {"d", "e"}), - c("Z", new int[] {4, 5, 6}, new int[0], new int[] {7, 8})); + c("Y", new String[] {"a", "b", "c"}, CollectionUtil.ZERO_LENGTH_STRING_ARRAY, new String[] {"d", "e"}), + c("Z", new int[] {4, 5, 6}, new int[0], new int[] {7, 8})); Table t1 = table.ungroup("Y", "Z"); assertEquals(5, t1.size()); assertEquals(Arrays.asList("X", "Y", "Z"), t1.getDefinition().getColumnNames()); assertEquals(Arrays.asList(1, 1, 1, 3, 3), Arrays.asList(t1.getColumn("X").get(0, 5))); - assertEquals(Arrays.asList("a", "b", "c", "d", "e"), - Arrays.asList(t1.getColumn("Y").get(0, 5))); + assertEquals(Arrays.asList("a", "b", "c", "d", "e"), Arrays.asList(t1.getColumn("Y").get(0, 5))); assertEquals(Arrays.asList(4, 5, 6, 7, 8), Arrays.asList(t1.getColumn("Z").get(0, 5))); t1 = table.ungroup(); assertEquals(5, t1.size()); assertEquals(Arrays.asList("X", "Y", "Z"), t1.getDefinition().getColumnNames()); assertEquals(Arrays.asList(1, 1, 1, 3, 3), Arrays.asList(t1.getColumn("X").get(0, 5))); - assertEquals(Arrays.asList("a", "b", "c", "d", "e"), - Arrays.asList(t1.getColumn("Y").get(0, 5))); + assertEquals(Arrays.asList("a", "b", "c", "d", "e"), Arrays.asList(t1.getColumn("Y").get(0, 5))); assertEquals(Arrays.asList(4, 5, 6, 7, 8), Arrays.asList(t1.getColumn("Z").get(0, 5))); table = testRefreshingTable(c("X", 1, 2, 3), - c("Y", new String[] {"a", "b", "c"}, new String[] {"d", "e"}, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - c("Z", new int[] {4, 5, 6}, new int[0], new int[] {7, 8})); + c("Y", new String[] {"a", "b", "c"}, new String[] {"d", "e"}, CollectionUtil.ZERO_LENGTH_STRING_ARRAY), + c("Z", new int[] {4, 5, 6}, new int[0], new int[] {7, 8})); try { table.ungroup(); } catch (Exception e) { assertEquals( - "Assertion failed: asserted sizes[i] == Array.getLength(arrayColumn.get(i)), instead referenceColumn == \"Y\", name == \"Z\", row == 1.", - e.getMessage()); + "Assertion failed: asserted sizes[i] == Array.getLength(arrayColumn.get(i)), instead referenceColumn == \"Y\", name == \"Z\", row == 1.", + e.getMessage()); } try { table.ungroup("Y", "Z"); } catch (Exception e) { assertEquals( - "Assertion failed: asserted sizes[i] == Array.getLength(arrayColumn.get(i)), instead referenceColumn == \"Y\", name == \"Z\", row == 1.", - e.getMessage()); + "Assertion failed: asserted sizes[i] == Array.getLength(arrayColumn.get(i)), instead referenceColumn == \"Y\", name == \"Z\", row == 1.", + e.getMessage()); } t1 = table.ungroup("Y"); assertEquals(5, t1.size()); assertEquals(Arrays.asList("X", "Y", "Z"), t1.getDefinition().getColumnNames()); assertEquals(Arrays.asList(1, 1, 1, 2, 2), Arrays.asList(t1.getColumn("X").get(0, 5))); - assertEquals(Arrays.asList("a", "b", "c", "d", "e"), - Arrays.asList(t1.getColumn("Y").get(0, 5))); + assertEquals(Arrays.asList("a", "b", "c", "d", "e"), Arrays.asList(t1.getColumn("Y").get(0, 5))); show(t1); show(t1.ungroup("Z")); t1 = t1.ungroup("Z"); assertEquals(9, t1.size()); assertEquals(Arrays.asList("X", "Y", "Z"), t1.getDefinition().getColumnNames()); - assertEquals(Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1), - Arrays.asList(t1.getColumn("X").get(0, 9))); + assertEquals(Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1), Arrays.asList(t1.getColumn("X").get(0, 9))); assertEquals(Arrays.asList("a", "a", "a", "b", "b", "b", "c", "c", "c"), - Arrays.asList(t1.getColumn("Y").get(0, 9))); - assertEquals(Arrays.asList(4, 5, 6, 4, 5, 6, 4, 5, 6), - Arrays.asList(t1.getColumn("Z").get(0, 9))); + Arrays.asList(t1.getColumn("Y").get(0, 9))); + assertEquals(Arrays.asList(4, 5, 6, 4, 5, 6, 4, 5, 6), Arrays.asList(t1.getColumn("Z").get(0, 9))); t1 = table.ungroup("Z"); @@ -2557,32 +2293,26 @@ public void testUngroupingAgnostic() { t1 = t1.ungroup("Y"); assertEquals(9, t1.size()); assertEquals(Arrays.asList("X", "Y", "Z"), t1.getDefinition().getColumnNames()); - assertEquals(Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1), - Arrays.asList(t1.getColumn("X").get(0, 9))); - assertEquals(Arrays.asList(4, 4, 4, 5, 5, 5, 6, 6, 6), - Arrays.asList(t1.getColumn("Z").get(0, 9))); + assertEquals(Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1), Arrays.asList(t1.getColumn("X").get(0, 9))); + assertEquals(Arrays.asList(4, 4, 4, 5, 5, 5, 6, 6, 6), Arrays.asList(t1.getColumn("Z").get(0, 9))); assertEquals(Arrays.asList("a", "b", "c", "a", "b", "c", "a", "b", "c"), - Arrays.asList(t1.getColumn("Y").get(0, 9))); + Arrays.asList(t1.getColumn("Y").get(0, 9))); } public void testUngroupConstructSnapshotOfBoxedNull() { - final Table t = testRefreshingTable(i(0)).update("X = new Integer[]{null, 2, 3}", - "Z = new Integer[]{4, 5, null}"); + final Table t = + testRefreshingTable(i(0)).update("X = new Integer[]{null, 2, 3}", "Z = new Integer[]{4, 5, null}"); final Table ungrouped = t.ungroup(); - try (final BarrageMessage snap = - ConstructSnapshot.constructBackplaneSnapshot(this, (BaseTable) ungrouped)) { + try (final BarrageMessage snap = ConstructSnapshot.constructBackplaneSnapshot(this, (BaseTable) ungrouped)) { assertEquals(snap.rowsAdded, i(0, 1, 2)); - assertEquals(snap.addColumnData[0].data.asIntChunk().get(0), - io.deephaven.util.QueryConstants.NULL_INT); - assertEquals(snap.addColumnData[1].data.asIntChunk().get(2), - io.deephaven.util.QueryConstants.NULL_INT); + assertEquals(snap.addColumnData[0].data.asIntChunk().get(0), io.deephaven.util.QueryConstants.NULL_INT); + assertEquals(snap.addColumnData[1].data.asIntChunk().get(2), io.deephaven.util.QueryConstants.NULL_INT); } } public void testUngroupableColumnSources() { - final Table table = - testRefreshingTable(c("X", 1, 1, 2, 2, 3, 3, 4, 4), c("Int", 1, 2, 3, 4, 5, 6, 7, null), + final Table table = testRefreshingTable(c("X", 1, 1, 2, 2, 3, 3, 4, 4), c("Int", 1, 2, 3, 4, 5, 6, 7, null), c("Double", 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, null, 0.45), c("String", "a", "b", "c", "d", "e", "f", "g", null)); final Table t1 = table.by("X"); @@ -2611,8 +2341,7 @@ public void testUngroupableColumnSources() { final String diff4 = diff(t8, t7, 10); Assert.assertEquals("UngroupableColumnSources do not match!", "", diff4); - final Table t9 = - t1.update("Array=new io.deephaven.db.tables.dbarrays.DbIntArrayDirect(19, 40)"); + final Table t9 = t1.update("Array=new io.deephaven.db.tables.dbarrays.DbIntArrayDirect(19, 40)"); final Table t10 = t9.ungroup(); final String diff5 = diff(t10, t6, 10); Assert.assertEquals("UngroupableColumnSources do not match!", "", diff5); @@ -2624,8 +2353,7 @@ public void testUngroupableColumnSources() { final int[] intDirect = (int[]) t2.getColumn("Int").getDirect(); System.out.println(Arrays.toString(intDirect)); - final int[] expected = - new int[] {1, 2, 3, 4, 5, 6, 7, io.deephaven.util.QueryConstants.NULL_INT}; + final int[] expected = new int[] {1, 2, 3, 4, 5, 6, 7, io.deephaven.util.QueryConstants.NULL_INT}; if (!Arrays.equals(expected, intDirect)) { System.out.println("Expected: " + Arrays.toString(expected)); @@ -2633,8 +2361,8 @@ public void testUngroupableColumnSources() { fail("Expected does not match direct value!"); } - int[] intPrevDirect = (int[]) IndexedDataColumn - .makePreviousColumn(t2.getIndex(), t2.getColumnSource("Int")).getDirect(); + int[] intPrevDirect = + (int[]) IndexedDataColumn.makePreviousColumn(t2.getIndex(), t2.getColumnSource("Int")).getDirect(); if (!Arrays.equals(expected, intPrevDirect)) { System.out.println("Expected: " + Arrays.toString(expected)); System.out.println("Prev: " + Arrays.toString(intPrevDirect)); @@ -2644,8 +2372,8 @@ public void testUngroupableColumnSources() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { }); - intPrevDirect = (int[]) IndexedDataColumn - .makePreviousColumn(t2.getIndex(), t2.getColumnSource("Int")).getDirect(); + intPrevDirect = + (int[]) IndexedDataColumn.makePreviousColumn(t2.getIndex(), t2.getColumnSource("Int")).getDirect(); if (!Arrays.equals(expected, intPrevDirect)) { System.out.println("Expected: " + Arrays.toString(expected)); System.out.println("Prev: " + Arrays.toString(intPrevDirect)); @@ -2656,13 +2384,12 @@ public void testUngroupableColumnSources() { public void testUngroupOverflow() { try (final ErrorExpectation errorExpectation = new ErrorExpectation()) { final QueryTable table = testRefreshingTable(i(5, 7), c("X", 1, 2), - c("Y", new String[] {"a", "b", "c"}, new String[] {"d", "e"})); + c("Y", new String[] {"a", "b", "c"}, new String[] {"d", "e"})); final QueryTable t1 = (QueryTable) table.ungroup("Y"); assertEquals(5, t1.size()); assertEquals(Arrays.asList("X", "Y"), t1.getDefinition().getColumnNames()); assertEquals(Arrays.asList(1, 1, 1, 2, 2), Arrays.asList(t1.getColumn("X").get(0, 5))); - assertEquals(Arrays.asList("a", "b", "c", "d", "e"), - Arrays.asList(t1.getColumn("Y").get(0, 5))); + assertEquals(Arrays.asList("a", "b", "c", "d", "e"), Arrays.asList(t1.getColumn("Y").get(0, 5))); final ErrorListener errorListener = new ErrorListener(table); t1.listenForUpdates(errorListener); @@ -2670,8 +2397,8 @@ public void testUngroupOverflow() { // This is too big, we should fail LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final long bigIndex = 1L << 55; - addToTable(table, i(bigIndex), intCol("X", 3), new ColumnHolder<>("Y", - String[].class, String.class, false, new String[] {"f"})); + addToTable(table, i(bigIndex), intCol("X", 3), + new ColumnHolder<>("Y", String[].class, String.class, false, new String[] {"f"})); table.notifyListeners(i(bigIndex), i(), i()); }); showWithIndex(t1); @@ -2682,10 +2409,8 @@ public void testUngroupOverflow() { if (!(errorListener.originalException instanceof IllegalStateException)) { fail("!(errorListener.originalException instanceof IllegalStateException)"); } - if (!(errorListener.originalException.getMessage() - .startsWith("Key overflow detected"))) { - fail( - "!errorListener.originalException.getMessage().startsWith(\"Key overflow detected\")"); + if (!(errorListener.originalException.getMessage().startsWith("Key overflow detected"))) { + fail("!errorListener.originalException.getMessage().startsWith(\"Key overflow detected\")"); } } } @@ -2695,48 +2420,46 @@ public void testUngroupWithRebase() { final int minimumUngroupBase = QueryTable.setMinimumUngroupBase(2); try { final QueryTable table = testRefreshingTable(i(5, 7), c("X", 1, 2), - c("Y", new String[] {"a", "b", "c"}, new String[] {"d", "e"})); + c("Y", new String[] {"a", "b", "c"}, new String[] {"d", "e"})); final QueryTable t1 = (QueryTable) table.ungroup("Y"); assertEquals(5, t1.size()); assertEquals(Arrays.asList("X", "Y"), t1.getDefinition().getColumnNames()); - assertEquals(Arrays.asList(1, 1, 1, 2, 2), - Ints.asList((int[]) t1.getColumn("X").getDirect())); + assertEquals(Arrays.asList(1, 1, 1, 2, 2), Ints.asList((int[]) t1.getColumn("X").getDirect())); assertEquals(Arrays.asList("a", "b", "c", "d", "e"), - Arrays.asList((String[]) t1.getColumn("Y").getDirect())); + Arrays.asList((String[]) t1.getColumn("Y").getDirect())); final TableUpdateValidator validator = TableUpdateValidator.make(t1); final QueryTable validatorTable = validator.getResultTable(); final ShiftAwareListener validatorTableListener = - new InstrumentedShiftAwareListenerAdapter(validatorTable, false) { - @Override - public void onUpdate(Update upstream) {} - - @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { - TestCase.fail(originalException.getMessage()); - } - }; + new InstrumentedShiftAwareListenerAdapter(validatorTable, false) { + @Override + public void onUpdate(Update upstream) {} + + @Override + public void onFailureInternal(Throwable originalException, + UpdatePerformanceTracker.Entry sourceEntry) { + TestCase.fail(originalException.getMessage()); + } + }; validatorTable.listenForUpdates(validatorTableListener); // This is too big, we should fail LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(table, i(9), c("X", 3), new ColumnHolder<>("Y", String[].class, - String.class, false, new String[] {"f", "g", "h", "i", "j", "k"})); + addToTable(table, i(9), c("X", 3), new ColumnHolder<>("Y", String[].class, String.class, false, + new String[] {"f", "g", "h", "i", "j", "k"})); table.notifyListeners(i(9), i(), i()); }); showWithIndex(t1); assertEquals(Arrays.asList("X", "Y"), t1.getDefinition().getColumnNames()); assertEquals(Arrays.asList(1, 1, 1, 2, 2, 3, 3, 3, 3, 3, 3), - Ints.asList((int[]) (t1.getColumn("X").getDirect()))); + Ints.asList((int[]) (t1.getColumn("X").getDirect()))); assertEquals(Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"), - Arrays.asList((String[]) t1.getColumn("Y").getDirect())); + Arrays.asList((String[]) t1.getColumn("Y").getDirect())); - assertEquals(Arrays.asList(1, 1, 1, 2, 2), Ints.asList((int[]) IndexedDataColumn - .makePreviousColumn(t1.getIndex(), t1.getColumnSource("X")).getDirect())); - assertEquals(Arrays.asList("a", "b", "c", "d", "e"), - Arrays.asList((String[]) IndexedDataColumn + assertEquals(Arrays.asList(1, 1, 1, 2, 2), Ints.asList( + (int[]) IndexedDataColumn.makePreviousColumn(t1.getIndex(), t1.getColumnSource("X")).getDirect())); + assertEquals(Arrays.asList("a", "b", "c", "d", "e"), Arrays.asList((String[]) IndexedDataColumn .makePreviousColumn(t1.getIndex(), t1.getColumnSource("Y")).getDirect())); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -2744,15 +2467,14 @@ public void onFailureInternal(Throwable originalException, assertEquals(Arrays.asList("X", "Y"), t1.getDefinition().getColumnNames()); assertEquals(Arrays.asList(1, 1, 1, 2, 2, 3, 3, 3, 3, 3, 3), - Ints.asList((int[]) t1.getColumn("X").getDirect())); + Ints.asList((int[]) t1.getColumn("X").getDirect())); assertEquals(Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"), - Arrays.asList((String[]) t1.getColumn("Y").getDirect())); - assertEquals(Arrays.asList(1, 1, 1, 2, 2, 3, 3, 3, 3, 3, 3), - Ints.asList((int[]) IndexedDataColumn - .makePreviousColumn(t1.getIndex(), t1.getColumnSource("X")).getDirect())); + Arrays.asList((String[]) t1.getColumn("Y").getDirect())); + assertEquals(Arrays.asList(1, 1, 1, 2, 2, 3, 3, 3, 3, 3, 3), Ints.asList( + (int[]) IndexedDataColumn.makePreviousColumn(t1.getIndex(), t1.getColumnSource("X")).getDirect())); assertEquals(Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"), - Arrays.asList((String[]) IndexedDataColumn - .makePreviousColumn(t1.getIndex(), t1.getColumnSource("Y")).getDirect())); + Arrays.asList((String[]) IndexedDataColumn + .makePreviousColumn(t1.getIndex(), t1.getColumnSource("Y")).getDirect())); } finally { QueryTable.setMinimumUngroupBase(minimumUngroupBase); } @@ -2769,12 +2491,12 @@ private void testUngroupIncremental(int tableSize, boolean nullFill) throws Pars final ColumnInfo[] columnInfo; final QueryTable table = getTable(tableSize, random, - columnInfo = initColumnInfos(new String[] {"Date", "C1", "C2", "C3"}, - new DateGenerator(format.parse("2011-02-02"), format.parse("2011-02-03")), - new SetGenerator<>("a", "b"), - new SetGenerator<>(10, 20, 30), - new SetGenerator<>(CollectionUtil.ZERO_LENGTH_STRING_ARRAY, new String[] {"a", "b"}, - new String[] {"a", "b", "c"}))); + columnInfo = initColumnInfos(new String[] {"Date", "C1", "C2", "C3"}, + new DateGenerator(format.parse("2011-02-02"), format.parse("2011-02-03")), + new SetGenerator<>("a", "b"), + new SetGenerator<>(10, 20, 30), + new SetGenerator<>(CollectionUtil.ZERO_LENGTH_STRING_ARRAY, new String[] {"a", "b"}, + new String[] {"a", "b", "c"}))); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { EvalNugget.from(() -> table.by().ungroup(nullFill)), @@ -2782,32 +2504,25 @@ private void testUngroupIncremental(int tableSize, boolean nullFill) throws Pars new UpdateValidatorNugget(table.by("C1").ungroup(nullFill)), EvalNugget.from(() -> table.by().ungroup(nullFill, "C1")), EvalNugget.from(() -> table.by("C1").sort("C1").ungroup(nullFill, "C2")), - EvalNugget.from(() -> table.by("C1").sort("C1").ungroup(nullFill, "C2") - .ungroup(nullFill, "Date")), - EvalNugget.from( - () -> table.by("C1").sort("C1").ungroup(nullFill, "C2").ungroup(nullFill)), + EvalNugget.from(() -> table.by("C1").sort("C1").ungroup(nullFill, "C2").ungroup(nullFill, "Date")), + EvalNugget.from(() -> table.by("C1").sort("C1").ungroup(nullFill, "C2").ungroup(nullFill)), EvalNugget.from(() -> table.by("C1", "C2").sort("C1", "C2").ungroup(nullFill)), - EvalNugget.from(() -> table.by() - .update("Date=Date.toArray()", "C1=C1.toArray()", "C2=C2.toArray()") - .ungroup(nullFill)), - EvalNugget.from(() -> table.by("C1") - .update("Date=Date.toArray()", "C2=C2.toArray()").sort("C1").ungroup(nullFill)), - EvalNugget.from(() -> table.by() - .update("Date=Date.toArray()", "C1=C1.toArray()", "C2=C2.toArray()") - .ungroup(nullFill, "C1")), - EvalNugget - .from(() -> table.by("C1").update("Date=Date.toArray()", "C2=C2.toArray()") - .sort("C1").ungroup(nullFill, "C2")), - EvalNugget - .from(() -> table.by("C1").update("Date=Date.toArray()", "C2=C2.toArray()") - .sort("C1").ungroup(nullFill, "C2").ungroup(nullFill, "Date")), - EvalNugget - .from(() -> table.by("C1").update("Date=Date.toArray()", "C2=C2.toArray()") - .sort("C1").ungroup(nullFill, "C2").ungroup(nullFill)), - EvalNugget.from(() -> table.by("C1", "C2").update("Date=Date.toArray()") - .sort("C1", "C2").ungroup(nullFill)), - EvalNugget.from(() -> table.by("C1") - .update("Date=Date.toArray()", "C2=C2.toArray()").sort("C1").ungroup(nullFill)), + EvalNugget.from(() -> table.by().update("Date=Date.toArray()", "C1=C1.toArray()", "C2=C2.toArray()") + .ungroup(nullFill)), + EvalNugget.from(() -> table.by("C1").update("Date=Date.toArray()", "C2=C2.toArray()").sort("C1") + .ungroup(nullFill)), + EvalNugget.from(() -> table.by().update("Date=Date.toArray()", "C1=C1.toArray()", "C2=C2.toArray()") + .ungroup(nullFill, "C1")), + EvalNugget.from(() -> table.by("C1").update("Date=Date.toArray()", "C2=C2.toArray()").sort("C1") + .ungroup(nullFill, "C2")), + EvalNugget.from(() -> table.by("C1").update("Date=Date.toArray()", "C2=C2.toArray()").sort("C1") + .ungroup(nullFill, "C2").ungroup(nullFill, "Date")), + EvalNugget.from(() -> table.by("C1").update("Date=Date.toArray()", "C2=C2.toArray()").sort("C1") + .ungroup(nullFill, "C2").ungroup(nullFill)), + EvalNugget.from( + () -> table.by("C1", "C2").update("Date=Date.toArray()").sort("C1", "C2").ungroup(nullFill)), + EvalNugget.from(() -> table.by("C1").update("Date=Date.toArray()", "C2=C2.toArray()").sort("C1") + .ungroup(nullFill)), EvalNugget.from(() -> table.view("C3").ungroup(nullFill)) }; @@ -2838,12 +2553,11 @@ private void testUngroupMismatch(int size, boolean nullFill) { for (int q = 0; q < 10; q++) { final ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol",}, - new SetGenerator<>("a", "b", "c", "d"), - new IntGenerator(10, 100))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol",}, + new SetGenerator<>("a", "b", "c", "d"), + new IntGenerator(10, 100))); - final Table mismatch = - table.by("Sym").sort("Sym").update("MyBoolean=boolArray", "MyDouble=doubleArray"); + final Table mismatch = table.by("Sym").sort("Sym").update("MyBoolean=boolArray", "MyDouble=doubleArray"); final EvalNugget en[] = new EvalNugget[] { new EvalNugget() { @@ -2862,132 +2576,118 @@ public Table e() { @SuppressWarnings("RedundantCast") public void testUngroupJoined_IDS6311() throws IOException, ClassNotFoundException { - final QueryTable left = TstUtils.testRefreshingTable(c("Letter", 'a', 'b', 'c', 'd'), - intCol("Value", 0, 1, 2, 3)); + final QueryTable left = + TstUtils.testRefreshingTable(c("Letter", 'a', 'b', 'c', 'd'), intCol("Value", 0, 1, 2, 3)); final QueryTable right = TstUtils.testRefreshingTable(c("Letter", '0', 'b'), - byteCol("BValue", (byte) 0, (byte) 1), - shortCol("SValue", (short) 0, (short) 1), - intCol("EulavI", 0, 1), - longCol("LValue", 0, 1), - floatCol("FValue", 0.0f, 1.1f), - doubleCol("DValue", 0.0d, 1.1d), - charCol("CCol", 'a', 'b'), - c("BoCol", true, false), - c("OCol", new Pair<>(0, 1), new Pair<>(2, 3))); + byteCol("BValue", (byte) 0, (byte) 1), + shortCol("SValue", (short) 0, (short) 1), + intCol("EulavI", 0, 1), + longCol("LValue", 0, 1), + floatCol("FValue", 0.0f, 1.1f), + doubleCol("DValue", 0.0d, 1.1d), + charCol("CCol", 'a', 'b'), + c("BoCol", true, false), + c("OCol", new Pair<>(0, 1), new Pair<>(2, 3))); final Table leftBy = left.by("Letter"); final Table rightBy = right.by("Letter"); final Table joined = leftBy.naturalJoin(rightBy, "Letter") - .updateView("BValue = ((i%2) == 0) ? null : BValue"); + .updateView("BValue = ((i%2) == 0) ? null : BValue"); QueryTable expected = TstUtils.testRefreshingTable(c("Letter", 'a', 'b', 'c', 'd'), - c("Value", (DbIntArray) new DbIntArrayDirect(0), (DbIntArray) new DbIntArrayDirect(1), - (DbIntArray) new DbIntArrayDirect(2), (DbIntArray) new DbIntArrayDirect(3)), - c("BValue", null, (DbByteArray) new DbByteArrayDirect((byte) 1), null, null), - c("SValue", null, (DbShortArray) new DbShortArrayDirect((short) 1), null, null), - c("EulavI", null, (DbIntArray) new DbIntArrayDirect(1), null, null), - c("LValue", null, (DbLongArray) new DbLongArrayDirect(1), null, null), - c("FValue", null, (DbFloatArray) new DbFloatArrayDirect(1.1f), null, null), - c("DValue", null, (DbDoubleArray) new DbDoubleArrayDirect(1.1d), null, null), - c("CCol", null, (DbCharArray) new DbCharArrayDirect('b'), null, null), - c("BoCol", null, (DbArray) new DbArrayDirect<>(false), null, null), - c("OCol", null, (DbArray>) new DbArrayDirect<>(new Pair<>(2, 3)), - null, null)); + c("Value", (DbIntArray) new DbIntArrayDirect(0), (DbIntArray) new DbIntArrayDirect(1), + (DbIntArray) new DbIntArrayDirect(2), (DbIntArray) new DbIntArrayDirect(3)), + c("BValue", null, (DbByteArray) new DbByteArrayDirect((byte) 1), null, null), + c("SValue", null, (DbShortArray) new DbShortArrayDirect((short) 1), null, null), + c("EulavI", null, (DbIntArray) new DbIntArrayDirect(1), null, null), + c("LValue", null, (DbLongArray) new DbLongArrayDirect(1), null, null), + c("FValue", null, (DbFloatArray) new DbFloatArrayDirect(1.1f), null, null), + c("DValue", null, (DbDoubleArray) new DbDoubleArrayDirect(1.1d), null, null), + c("CCol", null, (DbCharArray) new DbCharArrayDirect('b'), null, null), + c("BoCol", null, (DbArray) new DbArrayDirect<>(false), null, null), + c("OCol", null, (DbArray>) new DbArrayDirect<>(new Pair<>(2, 3)), null, null)); assertTableEquals(expected, joined); final Table ungrouped = joined.ungroup(true); expected = TstUtils.testRefreshingTable(c("Letter", 'a', 'b', 'c', 'd'), - c("Value", 0, 1, 2, 3), - byteCol("BValue", io.deephaven.util.QueryConstants.NULL_BYTE, (byte) 1, - io.deephaven.util.QueryConstants.NULL_BYTE, - io.deephaven.util.QueryConstants.NULL_BYTE), - shortCol("SValue", io.deephaven.util.QueryConstants.NULL_SHORT, (short) 1, - io.deephaven.util.QueryConstants.NULL_SHORT, - io.deephaven.util.QueryConstants.NULL_SHORT), - intCol("EulavI", io.deephaven.util.QueryConstants.NULL_INT, 1, - io.deephaven.util.QueryConstants.NULL_INT, - io.deephaven.util.QueryConstants.NULL_INT), - longCol("LValue", io.deephaven.util.QueryConstants.NULL_LONG, (long) 1, - io.deephaven.util.QueryConstants.NULL_LONG, - io.deephaven.util.QueryConstants.NULL_LONG), - floatCol("FValue", io.deephaven.util.QueryConstants.NULL_FLOAT, 1.1f, - io.deephaven.util.QueryConstants.NULL_FLOAT, - io.deephaven.util.QueryConstants.NULL_FLOAT), - doubleCol("DValue", io.deephaven.util.QueryConstants.NULL_DOUBLE, 1.1d, - io.deephaven.util.QueryConstants.NULL_DOUBLE, - io.deephaven.util.QueryConstants.NULL_DOUBLE), - charCol("CCol", io.deephaven.util.QueryConstants.NULL_CHAR, 'b', - io.deephaven.util.QueryConstants.NULL_CHAR, - io.deephaven.util.QueryConstants.NULL_CHAR), - c("BoCol", null, false, null, null), - c("OCol", null, new Pair<>(2, 3), null, null)); + c("Value", 0, 1, 2, 3), + byteCol("BValue", io.deephaven.util.QueryConstants.NULL_BYTE, (byte) 1, + io.deephaven.util.QueryConstants.NULL_BYTE, io.deephaven.util.QueryConstants.NULL_BYTE), + shortCol("SValue", io.deephaven.util.QueryConstants.NULL_SHORT, (short) 1, + io.deephaven.util.QueryConstants.NULL_SHORT, io.deephaven.util.QueryConstants.NULL_SHORT), + intCol("EulavI", io.deephaven.util.QueryConstants.NULL_INT, 1, + io.deephaven.util.QueryConstants.NULL_INT, io.deephaven.util.QueryConstants.NULL_INT), + longCol("LValue", io.deephaven.util.QueryConstants.NULL_LONG, (long) 1, + io.deephaven.util.QueryConstants.NULL_LONG, io.deephaven.util.QueryConstants.NULL_LONG), + floatCol("FValue", io.deephaven.util.QueryConstants.NULL_FLOAT, 1.1f, + io.deephaven.util.QueryConstants.NULL_FLOAT, io.deephaven.util.QueryConstants.NULL_FLOAT), + doubleCol("DValue", io.deephaven.util.QueryConstants.NULL_DOUBLE, 1.1d, + io.deephaven.util.QueryConstants.NULL_DOUBLE, io.deephaven.util.QueryConstants.NULL_DOUBLE), + charCol("CCol", io.deephaven.util.QueryConstants.NULL_CHAR, 'b', + io.deephaven.util.QueryConstants.NULL_CHAR, io.deephaven.util.QueryConstants.NULL_CHAR), + c("BoCol", null, false, null, null), + c("OCol", null, new Pair<>(2, 3), null, null)); assertTableEquals(expected, ungrouped); - // assertTableEquals only calls get(), we need to make sure the specialized get()s also work - // too. + // assertTableEquals only calls get(), we need to make sure the specialized get()s also work too. final long firstKey = ungrouped.getIndex().firstKey(); final long secondKey = ungrouped.getIndex().get(1); - assertEquals(io.deephaven.util.QueryConstants.NULL_BYTE, - ungrouped.getColumnSource("BValue").getByte(firstKey)); + assertEquals(io.deephaven.util.QueryConstants.NULL_BYTE, ungrouped.getColumnSource("BValue").getByte(firstKey)); assertEquals((byte) 1, ungrouped.getColumnSource("BValue").getByte(secondKey)); assertEquals(io.deephaven.util.QueryConstants.NULL_SHORT, - ungrouped.getColumnSource("SValue").getShort(firstKey)); + ungrouped.getColumnSource("SValue").getShort(firstKey)); assertEquals((short) 1, ungrouped.getColumnSource("SValue").getShort(secondKey)); - assertEquals(io.deephaven.util.QueryConstants.NULL_INT, - ungrouped.getColumnSource("EulavI").getInt(firstKey)); + assertEquals(io.deephaven.util.QueryConstants.NULL_INT, ungrouped.getColumnSource("EulavI").getInt(firstKey)); assertEquals(1, ungrouped.getColumnSource("EulavI").getInt(secondKey)); - assertEquals(io.deephaven.util.QueryConstants.NULL_LONG, - ungrouped.getColumnSource("LValue").getLong(firstKey)); + assertEquals(io.deephaven.util.QueryConstants.NULL_LONG, ungrouped.getColumnSource("LValue").getLong(firstKey)); assertEquals(1, ungrouped.getColumnSource("LValue").getLong(secondKey)); assertEquals(io.deephaven.util.QueryConstants.NULL_FLOAT, - ungrouped.getColumnSource("FValue").getFloat(firstKey)); + ungrouped.getColumnSource("FValue").getFloat(firstKey)); assertEquals(1.1f, ungrouped.getColumnSource("FValue").getFloat(secondKey)); assertEquals(io.deephaven.util.QueryConstants.NULL_DOUBLE, - ungrouped.getColumnSource("DValue").getDouble(firstKey)); + ungrouped.getColumnSource("DValue").getDouble(firstKey)); assertEquals(1.1d, ungrouped.getColumnSource("DValue").getDouble(secondKey)); - assertEquals(io.deephaven.util.QueryConstants.NULL_CHAR, - ungrouped.getColumnSource("CCol").getChar(firstKey)); + assertEquals(io.deephaven.util.QueryConstants.NULL_CHAR, ungrouped.getColumnSource("CCol").getChar(firstKey)); assertEquals('b', ungrouped.getColumnSource("CCol").getChar(secondKey)); // repeat with prev assertEquals(io.deephaven.util.QueryConstants.NULL_BYTE, - ungrouped.getColumnSource("BValue").getPrevByte(firstKey)); + ungrouped.getColumnSource("BValue").getPrevByte(firstKey)); assertEquals((byte) 1, ungrouped.getColumnSource("BValue").getPrevByte(secondKey)); assertEquals(io.deephaven.util.QueryConstants.NULL_SHORT, - ungrouped.getColumnSource("SValue").getPrevShort(firstKey)); + ungrouped.getColumnSource("SValue").getPrevShort(firstKey)); assertEquals((short) 1, ungrouped.getColumnSource("SValue").getPrevShort(secondKey)); assertEquals(io.deephaven.util.QueryConstants.NULL_INT, - ungrouped.getColumnSource("EulavI").getPrevInt(firstKey)); + ungrouped.getColumnSource("EulavI").getPrevInt(firstKey)); assertEquals(1, ungrouped.getColumnSource("EulavI").getPrevInt(secondKey)); assertEquals(io.deephaven.util.QueryConstants.NULL_LONG, - ungrouped.getColumnSource("LValue").getPrevLong(firstKey)); + ungrouped.getColumnSource("LValue").getPrevLong(firstKey)); assertEquals(1, ungrouped.getColumnSource("LValue").getPrevLong(secondKey)); assertEquals(io.deephaven.util.QueryConstants.NULL_FLOAT, - ungrouped.getColumnSource("FValue").getPrevFloat(firstKey)); + ungrouped.getColumnSource("FValue").getPrevFloat(firstKey)); assertEquals(1.1f, ungrouped.getColumnSource("FValue").getPrevFloat(secondKey)); assertEquals(io.deephaven.util.QueryConstants.NULL_DOUBLE, - ungrouped.getColumnSource("DValue").getPrevDouble(firstKey)); + ungrouped.getColumnSource("DValue").getPrevDouble(firstKey)); assertEquals(1.1d, ungrouped.getColumnSource("DValue").getPrevDouble(secondKey)); - assertEquals(QueryConstants.NULL_CHAR, - ungrouped.getColumnSource("CCol").getPrevChar(firstKey)); + assertEquals(QueryConstants.NULL_CHAR, ungrouped.getColumnSource("CCol").getPrevChar(firstKey)); assertEquals('b', ungrouped.getColumnSource("CCol").getPrevChar(secondKey)); assertEquals(null, ungrouped.getColumnSource("CCol").getPrev(firstKey)); @@ -2995,7 +2695,7 @@ public void testUngroupJoined_IDS6311() throws IOException, ClassNotFoundExcepti // This tests the NPE condition in the ungrouped column sources final Table snappy = InitialSnapshotTable.setupInitialSnapshotTable(ungrouped, - ConstructSnapshot.constructInitialSnapshot(this, (QueryTable) ungrouped)); + ConstructSnapshot.constructInitialSnapshot(this, (QueryTable) ungrouped)); assertTableEquals(expected, snappy); } @@ -3003,8 +2703,7 @@ private void testMemoize(QueryTable source, io.deephaven.base.Function.Unary op) { + private void testMemoize(QueryTable source, boolean withCopy, io.deephaven.base.Function.Unary op) { final Table result = op.call(source); final Table result2 = op.call(source); Assert.assertSame(result, result2); @@ -3020,21 +2719,20 @@ private void testMemoize(QueryTable source, boolean withCopy, } private void testNoMemoize(Table source, io.deephaven.base.Function.Unary op1, - io.deephaven.base.Function.Unary op2) { + io.deephaven.base.Function.Unary op2) { final Table result = op1.call(source); final Table result2 = op2.call(source); Assert.assertNotSame(result, result2); } private void testMemoize(Table source, io.deephaven.base.Function.Unary op1, - io.deephaven.base.Function.Unary op2) { + io.deephaven.base.Function.Unary op2) { final Table result = op1.call(source); final Table result2 = op2.call(source); Assert.assertSame(result, result2); } - private void testNoMemoize(QueryTable source, - io.deephaven.base.Function.Unary op) { + private void testNoMemoize(QueryTable source, io.deephaven.base.Function.Unary op) { final Table result = op.call(source); final Table result2 = op.call(source); Assert.assertNotSame(result, result2); @@ -3043,8 +2741,7 @@ private void testNoMemoize(QueryTable source, Assert.assertNotSame(result, result3); } - private void testNoMemoize(QueryTable source, QueryTable copy, - io.deephaven.base.Function.Unary op) { + private void testNoMemoize(QueryTable source, QueryTable copy, io.deephaven.base.Function.Unary op) { final Table result = op.call(source); final Table result2 = op.call(copy); Assert.assertNotSame(result, result2); @@ -3052,8 +2749,7 @@ private void testNoMemoize(QueryTable source, QueryTable copy, public void testMemoize() { final Random random = new Random(0); - final QueryTable source = getTable(1000, random, - initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + final QueryTable source = getTable(1000, random, initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), new IntGenerator(0, 100), new DoubleGenerator(0, 100))); @@ -3067,10 +2763,8 @@ public void testMemoize() { testMemoize(source, t -> t.view("intCol", "doubleCol")); testMemoize(source, t -> t.view("doubleCol", "intCol")); testNoMemoize(source, t -> t.view("doubleCol", "intCol"), t -> t.view("doubleCol")); - testNoMemoize(source, t -> t.view("doubleCol", "intCol"), - t -> t.view("intCol", "doubleCol")); - // we are not smart enough to handle formulas, because they might have different query - // scopes + testNoMemoize(source, t -> t.view("doubleCol", "intCol"), t -> t.view("intCol", "doubleCol")); + // we are not smart enough to handle formulas, because they might have different query scopes testNoMemoize(source, t -> t.view("doubleCol=doubleCol/2.0")); // we don't handle renames, because the SwitchColumn is hard to get right testNoMemoize(source, t -> t.view("intCol2=intCol", "doubleCol")); @@ -3087,23 +2781,19 @@ public void testMemoize() { testMemoize(source, t -> t.where("Sym=`aa`")); testMemoize(source, t -> t.where("Sym in `aa`, `bb`")); testMemoize(source, - t -> t.whereOneOf( - Collections - .singletonList(SelectFilterFactory.getExpression("Sym in `aa`, `bb`")), - Collections.singletonList(SelectFilterFactory.getExpression("intCol=7")))); - testMemoize(source, t -> t.where(DisjunctiveFilter.makeDisjunctiveFilter( - SelectFilterFactory.getExpressions("Sym in `aa`, `bb`", "intCol=7")))); - testMemoize(source, t -> t.where(ConjunctiveFilter.makeConjunctiveFilter( - SelectFilterFactory.getExpressions("Sym in `aa`, `bb`", "intCol=7")))); + t -> t.whereOneOf(Collections.singletonList(SelectFilterFactory.getExpression("Sym in `aa`, `bb`")), + Collections.singletonList(SelectFilterFactory.getExpression("intCol=7")))); + testMemoize(source, t -> t.where(DisjunctiveFilter + .makeDisjunctiveFilter(SelectFilterFactory.getExpressions("Sym in `aa`, `bb`", "intCol=7")))); + testMemoize(source, t -> t.where(ConjunctiveFilter + .makeConjunctiveFilter(SelectFilterFactory.getExpressions("Sym in `aa`, `bb`", "intCol=7")))); testNoMemoize(source, - t -> t.where(ConjunctiveFilter.makeConjunctiveFilter( - SelectFilterFactory.getExpressions("Sym in `aa`, `bb`", "intCol=7"))), - t -> t.where(DisjunctiveFilter.makeDisjunctiveFilter( - SelectFilterFactory.getExpressions("Sym in `aa`, `bb`", "intCol=7")))); - testNoMemoize(source, t -> t.where("Sym in `aa`, `bb`"), - t -> t.where("Sym not in `aa`, `bb`")); - testNoMemoize(source, t -> t.where("Sym in `aa`, `bb`"), - t -> t.where("Sym in `aa`, `cc`")); + t -> t.where(ConjunctiveFilter.makeConjunctiveFilter( + SelectFilterFactory.getExpressions("Sym in `aa`, `bb`", "intCol=7"))), + t -> t.where(DisjunctiveFilter.makeDisjunctiveFilter( + SelectFilterFactory.getExpressions("Sym in `aa`, `bb`", "intCol=7")))); + testNoMemoize(source, t -> t.where("Sym in `aa`, `bb`"), t -> t.where("Sym not in `aa`, `bb`")); + testNoMemoize(source, t -> t.where("Sym in `aa`, `bb`"), t -> t.where("Sym in `aa`, `cc`")); testNoMemoize(source, t -> t.where("Sym.startsWith(`a`)")); testMemoize(source, t -> t.countBy("Count", "Sym")); @@ -3113,25 +2803,22 @@ public void testMemoize() { testMemoize(source, t -> t.minBy("Sym")); testMemoize(source, t -> t.dropColumns("Sym")); testMemoize(source, t -> t.dropColumns("Sym", "intCol")); - testMemoize(source, t -> t.dropColumns("Sym", "intCol"), - t -> t.dropColumns("intCol", "Sym")); + testMemoize(source, t -> t.dropColumns("Sym", "intCol"), t -> t.dropColumns("intCol", "Sym")); testMemoize(source, t -> t.dropColumns("intCol", "Sym")); testNoMemoize(source, t -> t.dropColumns("Sym"), t -> t.dropColumns("intCol")); testMemoize(source, t -> t.maxBy("Sym")); - testMemoize(source, - t -> t.by(AggCombo(AggSum("intCol"), AggAbsSum("absInt=intCol"), - AggMax("doubleCol"), AggFirst("Sym"), AggCountDistinct("UniqueCountSym=Sym"), - AggDistinct("UniqueSym=Sym")))); + testMemoize(source, t -> t.by(AggCombo(AggSum("intCol"), AggAbsSum("absInt=intCol"), AggMax("doubleCol"), + AggFirst("Sym"), AggCountDistinct("UniqueCountSym=Sym"), AggDistinct("UniqueSym=Sym")))); testMemoize(source, t -> t.by(AggCombo(AggCountDistinct("UniqueCountSym=Sym")))); testMemoize(source, t -> t.by(AggCombo(AggCountDistinct(true, "UniqueCountSym=Sym")))); testNoMemoize(source, t -> t.by(AggCombo(AggCountDistinct("UniqueCountSym=Sym"))), - t -> t.by(AggCombo(AggCountDistinct(true, "UniqueCountSym=Sym")))); + t -> t.by(AggCombo(AggCountDistinct(true, "UniqueCountSym=Sym")))); testMemoize(source, t -> t.by(AggCombo(AggDistinct("UniqueSym=Sym")))); testMemoize(source, t -> t.by(AggCombo(AggDistinct(true, "UniqueSym=Sym")))); testNoMemoize(source, t -> t.by(AggCombo(AggCountDistinct("UniqueCountSym=Sym"))), - t -> t.by(AggCombo(AggDistinct("UniqueCountSym=Sym")))); + t -> t.by(AggCombo(AggDistinct("UniqueCountSym=Sym")))); testNoMemoize(source, t -> t.by(AggCombo(AggDistinct("UniqueSym=Sym"))), - t -> t.by(AggCombo(AggDistinct(true, "UniqueSym=Sym")))); + t -> t.by(AggCombo(AggDistinct(true, "UniqueSym=Sym")))); testNoMemoize(source, t -> t.countBy("Sym"), t -> t.countBy("Count", "Sym")); testNoMemoize(source, t -> t.sumBy("Sym"), t -> t.countBy("Count", "Sym")); testNoMemoize(source, t -> t.sumBy("Sym"), t -> t.avgBy("Sym")); @@ -3148,8 +2835,7 @@ public void testMemoize() { }); final Table withRestrictions = source.copy().restrictSortTo("intCol", "doubleCol"); - testNoMemoize(source, (QueryTable) withRestrictions, - t -> t.sort("intCol", "doubleCol")); + testNoMemoize(source, (QueryTable) withRestrictions, t -> t.sort("intCol", "doubleCol")); } finally { QueryTable.setMemoizeResults(old); } @@ -3200,8 +2886,8 @@ public void testWhereInGrouped() throws IOException { // noinspection unchecked final Map gtr = (Map) t.getIndex().getGrouping(symbol); ((AbstractColumnSource) symbol).setGroupToRange(gtr); - final Table result = t.whereIn(Table.GroupStrategy.CREATE_GROUPS, - t.where("Truthiness=true"), "Symbol", "Timestamp"); + final Table result = + t.whereIn(Table.GroupStrategy.CREATE_GROUPS, t.where("Truthiness=true"), "Symbol", "Timestamp"); TableTools.showWithIndex(result); }); } @@ -3210,25 +2896,24 @@ private void diskBackedTestHarness(Consumer
    testFunction) throws IOExcept final File testDirectory = Files.createTempDirectory("SymbolTableTest").toFile(); final TableDefinition definition = TableDefinition.of( - ColumnDefinition.ofInt("Sentinel"), - ColumnDefinition.ofString("Symbol").withGrouping(), - ColumnDefinition.ofTime("Timestamp"), - ColumnDefinition.ofBoolean("Truthiness")); + ColumnDefinition.ofInt("Sentinel"), + ColumnDefinition.ofString("Symbol").withGrouping(), + ColumnDefinition.ofTime("Timestamp"), + ColumnDefinition.ofBoolean("Truthiness")); final String[] syms = new String[] {"Apple", "Banana", "Cantaloupe"}; final DBDateTime baseTime = DBTimeUtils.convertDateTime("2019-04-11T09:30 NY"); final long dateOffset[] = new long[] {0, 5, 10, 15, 1, 6, 11, 16, 2, 7}; - final Boolean booleans[] = - new Boolean[] {true, false, null, true, false, null, true, false, null, true, false}; + final Boolean booleans[] = new Boolean[] {true, false, null, true, false, null, true, false, null, true, false}; QueryScope.addParam("syms", syms); QueryScope.addParam("baseTime", baseTime); QueryScope.addParam("dateOffset", dateOffset); QueryScope.addParam("booleans", booleans); final Table source = emptyTable(10) - .updateView("Sentinel=i", "Symbol=syms[i % syms.length]", - "Timestamp=baseTime+dateOffset[i]*3600L*1000000000L", "Truthiness=booleans[i]") - .by("Symbol").ungroup(); + .updateView("Sentinel=i", "Symbol=syms[i % syms.length]", + "Timestamp=baseTime+dateOffset[i]*3600L*1000000000L", "Truthiness=booleans[i]") + .by("Symbol").ungroup(); testDirectory.mkdirs(); final File dest = new File(testDirectory, "Table.parquet"); try { @@ -3242,10 +2927,9 @@ private void diskBackedTestHarness(Consumer
    testFunction) throws IOExcept } public void testIds7153() { - final QueryTable lTable = testRefreshingTable( - Index.FACTORY.getIndexByValues(10, 12, 14, 16), c("X", "a", "b", "c", "d")); - final QueryTable rTable = - testRefreshingTable(c("X", "a", "b", "c", "d"), c("R", 0, 1, 2, 3)); + final QueryTable lTable = + testRefreshingTable(Index.FACTORY.getIndexByValues(10, 12, 14, 16), c("X", "a", "b", "c", "d")); + final QueryTable rTable = testRefreshingTable(c("X", "a", "b", "c", "d"), c("R", 0, 1, 2, 3)); final MutableObject nj = new MutableObject<>(); final MutableObject ft = new MutableObject<>(); @@ -3264,25 +2948,17 @@ public void testIds7153() { nj.setValue((QueryTable) lTable.naturalJoin(rTable, "X")); try { - // The real test happens here. Off of the ltm thread we do an operation, one that - // supports concurrent - // instantiation, such that we use prev values when applicable. Assume the parent - // table has not ticked - // this cycle: 1) if the parent table pre-existed then we want to use prev values - // (to handle when parent - // is mid-tick but unpublished) 2) if the parent table was created this cycle, then - // A) prev values are - // undefined, B) it must have been created AFTER any of its dependencies may have - // ticked this cycle and + // The real test happens here. Off of the ltm thread we do an operation, one that supports concurrent + // instantiation, such that we use prev values when applicable. Assume the parent table has not ticked + // this cycle: 1) if the parent table pre-existed then we want to use prev values (to handle when parent + // is mid-tick but unpublished) 2) if the parent table was created this cycle, then A) prev values are + // undefined, B) it must have been created AFTER any of its dependencies may have ticked this cycle and // C) the table is not allowed to tick this cycle. - // The specific scenario we are trying to catch is when the parent re-uses data - // structures (i.e. index) - // from its parent, which have valid prev values, but the prev values must not be - // used during the first + // The specific scenario we are trying to catch is when the parent re-uses data structures (i.e. index) + // from its parent, which have valid prev values, but the prev values must not be used during the first // cycle. - final Thread offltm = - new Thread(() -> ft.setValue((QueryTable) nj.getValue().flatten())); + final Thread offltm = new Thread(() -> ft.setValue((QueryTable) nj.getValue().flatten())); offltm.start(); offltm.join(); } catch (final Exception e) { @@ -3296,16 +2972,13 @@ public void testIds7153() { } public void testNoCoalesceOnNotification() { - // SourceTable is an uncoalesced table that also has an idempotent "start" despite whether - // or not the coalesced - // table continues to be live and managed. When the source table ticks and it is currently - // uncoalesced, there - // was a regression inside of BaseTable#notifyListeners that would invoke getIndex() and - // cause this table to be - // coalesced and for the new result table to receive and propagate that update. IDS-7153 - // made it explicitly illegal - // to publish an update on a table's very first cycle if it was initiated under the LTM - // lock. It's also not great + // SourceTable is an uncoalesced table that also has an idempotent "start" despite whether or not the coalesced + // table continues to be live and managed. When the source table ticks and it is currently uncoalesced, there + // was a regression inside of BaseTable#notifyListeners that would invoke getIndex() and cause this table to be + // coalesced and for the new result table to receive and propagate that update. IDS-7153 made it explicitly + // illegal + // to publish an update on a table's very first cycle if it was initiated under the LTM lock. It's also not + // great // to coalesce a table and immediately make it garbage. // This regression check verifies that we do not see a lastNotificationStep != @@ -3316,8 +2989,8 @@ public void testNoCoalesceOnNotification() { final Supplier supplier = () -> TstUtils.testRefreshingTable(parentIndex); final UncoalescedTable table = new UncoalescedTable( - supplier.get().getDefinition(), - "mock un-coalesced table") { + supplier.get().getDefinition(), + "mock un-coalesced table") { @Override protected DynamicTable doCoalesce() { @@ -3339,8 +3012,7 @@ protected DynamicTable doCoalesce() { parentIndex.insert(update.added); table.notifyListeners(update); - Assert.assertEquals(LogicalClock.DEFAULT.currentStep(), - table.getLastNotificationStep()); + Assert.assertEquals(LogicalClock.DEFAULT.currentStep(), table.getLastNotificationStep()); }); } @@ -3448,8 +3120,8 @@ public void testRegressionIssue544() { // .view("Q=Q*i") // .sumBy() // - // The exception we were getting was: java.lang.IllegalArgumentException: keys argument has - // elements not in the index + // The exception we were getting was: java.lang.IllegalArgumentException: keys argument has elements not in the + // index // final Table t0 = newTable(byteCol("Q", (byte) 0)); final QueryTable t1 = TstUtils.testRefreshingTable(i(), intCol("T")); diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableTestBase.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableTestBase.java index 1722cfb4922..37fedca9960 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableTestBase.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableTestBase.java @@ -19,7 +19,7 @@ public abstract class QueryTableTestBase extends LiveTableTestCase { private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = Configuration.getInstance() - .getBooleanForClassWithDefault(QueryTableTestBase.class, "CompilerTools.logEnabled", false); + .getBooleanForClassWithDefault(QueryTableTestBase.class, "CompilerTools.logEnabled", false); protected final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd"); @@ -27,7 +27,7 @@ public abstract class QueryTableTestBase extends LiveTableTestCase { private boolean oldCheckLtm; private static final GenerateTableUpdates.SimulationProfile NO_SHIFT_PROFILE = - new GenerateTableUpdates.SimulationProfile(); + new GenerateTableUpdates.SimulationProfile(); static { NO_SHIFT_PROFILE.SHIFT_10_PERCENT_KEY_SPACE = 0; NO_SHIFT_PROFILE.SHIFT_10_PERCENT_POS_SPACE = 0; @@ -57,10 +57,9 @@ protected void tearDown() throws Exception { final JoinIncrement leftStep = new JoinIncrement() { @Override public void step(int leftSize, int rightSize, QueryTable leftTable, QueryTable rightTable, - TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, - EvalNuggetInterface[] en, Random random) { - simulateShiftAwareStep(NO_SHIFT_PROFILE, toString(), leftSize, random, leftTable, - leftColumnInfo, en); + TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, EvalNuggetInterface[] en, + Random random) { + simulateShiftAwareStep(NO_SHIFT_PROFILE, toString(), leftSize, random, leftTable, leftColumnInfo, en); } @Override @@ -71,8 +70,8 @@ public String toString() { final JoinIncrement leftStepShift = new JoinIncrement() { @Override public void step(int leftSize, int rightSize, QueryTable leftTable, QueryTable rightTable, - TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, - EvalNuggetInterface[] en, Random random) { + TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, EvalNuggetInterface[] en, + Random random) { simulateShiftAwareStep(toString(), leftSize, random, leftTable, leftColumnInfo, en); } @@ -84,10 +83,9 @@ public String toString() { final JoinIncrement rightStep = new JoinIncrement() { @Override public void step(int leftSize, int rightSize, QueryTable leftTable, QueryTable rightTable, - TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, - EvalNuggetInterface[] en, Random random) { - simulateShiftAwareStep(NO_SHIFT_PROFILE, toString(), rightSize, random, rightTable, - rightColumnInfo, en); + TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, EvalNuggetInterface[] en, + Random random) { + simulateShiftAwareStep(NO_SHIFT_PROFILE, toString(), rightSize, random, rightTable, rightColumnInfo, en); } @Override @@ -98,8 +96,8 @@ public String toString() { final JoinIncrement rightStepShift = new JoinIncrement() { @Override public void step(int leftSize, int rightSize, QueryTable leftTable, QueryTable rightTable, - TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, - EvalNuggetInterface[] en, Random random) { + TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, EvalNuggetInterface[] en, + Random random) { simulateShiftAwareStep(toString(), rightSize, random, rightTable, rightColumnInfo, en); } @@ -111,12 +109,10 @@ public String toString() { final JoinIncrement leftRightStep = new JoinIncrement() { @Override public void step(int leftSize, int rightSize, QueryTable leftTable, QueryTable rightTable, - TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, - EvalNuggetInterface[] en, Random random) { - simulateShiftAwareStep(NO_SHIFT_PROFILE, toString(), leftSize, random, leftTable, - leftColumnInfo, en); - simulateShiftAwareStep(NO_SHIFT_PROFILE, toString(), rightSize, random, rightTable, - rightColumnInfo, en); + TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, EvalNuggetInterface[] en, + Random random) { + simulateShiftAwareStep(NO_SHIFT_PROFILE, toString(), leftSize, random, leftTable, leftColumnInfo, en); + simulateShiftAwareStep(NO_SHIFT_PROFILE, toString(), rightSize, random, rightTable, rightColumnInfo, en); } @Override @@ -127,8 +123,8 @@ public String toString() { final JoinIncrement leftRightStepShift = new JoinIncrement() { @Override public void step(int leftSize, int rightSize, QueryTable leftTable, QueryTable rightTable, - TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, - EvalNuggetInterface[] en, Random random) { + TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, EvalNuggetInterface[] en, + Random random) { simulateShiftAwareStep(toString(), leftSize, random, leftTable, leftColumnInfo, en); simulateShiftAwareStep(toString(), rightSize, random, rightTable, rightColumnInfo, en); } @@ -142,15 +138,13 @@ public String toString() { final JoinIncrement leftRightConcurrentStepShift = new JoinIncrement() { @Override public void step(int leftSize, int rightSize, QueryTable leftTable, QueryTable rightTable, - TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, - EvalNuggetInterface[] en, Random random) { + TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, EvalNuggetInterface[] en, + Random random) { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, leftSize, random, leftTable, - leftColumnInfo); - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, rightSize, random, rightTable, - rightColumnInfo); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, leftSize, + random, leftTable, leftColumnInfo); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, rightSize, + random, rightTable, rightColumnInfo); }); } @@ -160,11 +154,9 @@ public String toString() { } }; - final JoinIncrement[] joinIncrementors = - new JoinIncrement[] {leftStep, rightStep, leftRightStep}; - final JoinIncrement[] joinIncrementorsShift = - new JoinIncrement[] {leftStep, rightStep, leftRightStep, leftStepShift, rightStepShift, - leftRightStepShift, leftRightConcurrentStepShift}; + final JoinIncrement[] joinIncrementors = new JoinIncrement[] {leftStep, rightStep, leftRightStep}; + final JoinIncrement[] joinIncrementorsShift = new JoinIncrement[] {leftStep, rightStep, leftRightStep, + leftStepShift, rightStepShift, leftRightStepShift, leftRightConcurrentStepShift}; protected Index added; protected Index removed; @@ -172,8 +164,8 @@ public String toString() { protected interface JoinIncrement { void step(int leftSize, int rightSize, QueryTable leftTable, QueryTable rightTable, - TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, - EvalNuggetInterface[] en, Random random); + TstUtils.ColumnInfo[] leftColumnInfo, TstUtils.ColumnInfo[] rightColumnInfo, EvalNuggetInterface[] en, + Random random); } public static class TableComparator implements EvalNuggetInterface { @@ -231,8 +223,7 @@ void reset() { @Override public void onUpdate(Index added, Index removed, Index modified) { freeResources(); - // Need to clone to save IndexShiftDataExpander indices that are destroyed at the end of - // the LTM cycle. + // Need to clone to save IndexShiftDataExpander indices that are destroyed at the end of the LTM cycle. this.added = added.clone(); this.removed = removed.clone(); this.modified = modified.clone(); @@ -242,11 +233,11 @@ public void onUpdate(Index added, Index removed, Index modified) { @Override public String toString() { return "SimpleListener{" + - "count=" + count + - ", added=" + added + - ", removed=" + removed + - ", modified=" + modified + - '}'; + "count=" + count + + ", added=" + added + + ", removed=" + removed + + ", modified=" + modified + + '}'; } public void freeResources() { @@ -274,8 +265,7 @@ public static int[] intColumn(Table table, String column) { protected static class CoalescingListener extends InstrumentedListenerAdapter { Index lastAdded, lastModified, lastRemoved; - Index.LegacyIndexUpdateCoalescer indexUpdateCoalescer = - new Index.LegacyIndexUpdateCoalescer(); + Index.LegacyIndexUpdateCoalescer indexUpdateCoalescer = new Index.LegacyIndexUpdateCoalescer(); protected CoalescingListener(DynamicTable source) { super(source, false); diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableTreeTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableTreeTest.java index a3bf67530cb..472eacb23c7 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableTreeTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableTreeTest.java @@ -58,24 +58,22 @@ public void testMemoize() { final Random random = new Random(0); final ParentChildGenerator parentChildGenerator = new ParentChildGenerator(0.25, 0); final QueryTable table = getTable(1000, random, - initColumnInfos(new String[] {"IDPair", "Sentinel", "Sentinel2", "Sym"}, - parentChildGenerator, - new IntGenerator(0, 1_000_000_000), - new IntGenerator(0, 1_000_000_000), - new SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"))); + initColumnInfos(new String[] {"IDPair", "Sentinel", "Sentinel2", "Sym"}, + parentChildGenerator, + new IntGenerator(0, 1_000_000_000), + new IntGenerator(0, 1_000_000_000), + new SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"))); - final Table prepared = - table.update("ID=IDPair.getId()", "Parent=IDPair.getParent()").dropColumns("IDPair"); + final Table prepared = table.update("ID=IDPair.getId()", "Parent=IDPair.getParent()").dropColumns("IDPair"); final Table tree = prepared.treeTable("ID", "Parent"); final boolean old = QueryTable.setMemoizeResults(true); try { testMemoize(tree, t -> TreeTableFilter.rawFilterTree(t, "Sym in `AAPL`, `TSLA`")); - testMemoize(tree, t -> TreeTableFilter.rawFilterTree(t, "Sym in `AAPL`, `TSLA`", - "Sentinel == 500000000")); + testMemoize(tree, t -> TreeTableFilter.rawFilterTree(t, "Sym in `AAPL`, `TSLA`", "Sentinel == 500000000")); testNoMemoize(tree, t -> TreeTableFilter.rawFilterTree(t, "Sentinel > Sentinel2/4")); testNoMemoize(tree, t -> TreeTableFilter.rawFilterTree(t, "Sym in `AAPL`, `TSLA`"), - t -> TreeTableFilter.rawFilterTree(t, "Sym in `AAPL`")); + t -> TreeTableFilter.rawFilterTree(t, "Sym in `AAPL`")); } finally { QueryTable.setMemoizeResults(old); } @@ -87,8 +85,7 @@ private void testMemoize(Table source, Function.Unary op) { Assert.assertSame(result, result2); } - private void testNoMemoize(Table source, Function.Unary op1, - Function.Unary op2) { + private void testNoMemoize(Table source, Function.Unary op1, Function.Unary op2) { final Table result = op1.call(source); final Table result2 = op2.call(source); Assert.assertNotSame(result, result2); @@ -102,17 +99,16 @@ private void testNoMemoize(Table source, Function.Unary op) { public void testTreeTableSimple() { final Table source = TableTools.newTable(col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), - col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2)); + col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2)); - final Table treed = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> source.treeTable("Sentinel", "Parent")); + final Table treed = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> source.treeTable("Sentinel", "Parent")); final String hierarchicalColumnName = getHierarchicalColumnName(treed); TableTools.showWithIndex(treed); assertEquals(2, treed.size()); - assertTrue(Arrays.equals(new int[] {NULL_INT, NULL_INT}, - (int[]) treed.getColumn("Parent").getDirect())); + assertTrue(Arrays.equals(new int[] {NULL_INT, NULL_INT}, (int[]) treed.getColumn("Parent").getDirect())); final Table child1 = getChildTable(treed, treed, hierarchicalColumnName, 0); assertNotNull(child1); @@ -141,29 +137,28 @@ public void testConcurrentInstantiation() throws ExecutionException, Interrupted try { final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(10), - col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), - col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2), - col("Extra", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j")); + col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), + col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2), + col("Extra", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j")); final QueryTable source2 = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(11), - col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), - col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2, NULL_INT), - col("Extra", "aa", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k")); + col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), + col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2, NULL_INT), + col("Extra", "aa", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k")); final QueryTable source3 = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(12), - col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12), - col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2, NULL_INT, 11), - col("Extra", "aa", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l")); + col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12), + col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2, NULL_INT, 11), + col("Extra", "aa", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l")); final Table rootExpected1 = source.where("isNull(Parent)"); final Table rootExpected2 = source2.where("isNull(Parent)"); final Table rootExpected3 = source3.where("isNull(Parent)"); final Supplier
    doTree = () -> source.treeTable("Sentinel", "Parent"); - final Table expect = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(doTree::get); + final Table expect = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(doTree::get); final Table expectOriginal = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> makeStatic(source).treeTable("Sentinel", "Parent")); + .computeLocked(() -> makeStatic(source).treeTable("Sentinel", "Parent")); final Table expect2 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> source2.treeTable("Sentinel", "Parent")); + .computeLocked(() -> source2.treeTable("Sentinel", "Parent")); final String hierarchicalColumnName = getHierarchicalColumnName(expect); @@ -171,28 +166,24 @@ public void testConcurrentInstantiation() throws ExecutionException, Interrupted final Table treed1 = pool.submit(doTree::get).get(); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expect, 0, 10, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expect, 0, 10, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); System.out.println("ORIGINAL TREED1"); dumpRollup(treed1, hierarchicalColumnName); - TstUtils.addToTable(source, i(0, 11), c("Sentinel", 1, 11), - c("Parent", NULL_INT, NULL_INT), c("Extra", "aa", "k")); + TstUtils.addToTable(source, i(0, 11), c("Sentinel", 1, 11), c("Parent", NULL_INT, NULL_INT), + c("Extra", "aa", "k")); final Table treed2 = pool.submit(doTree::get).get(); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expectOriginal, - true, false, 0, 10, hierarchicalColumnName, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2, expectOriginal, - true, false, 0, 10, hierarchicalColumnName, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expectOriginal, true, false, 0, 10, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2, expectOriginal, true, false, 0, 10, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - final TableMap map1 = - (TableMap) treed1.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); - final TableMap map2 = - (TableMap) treed2.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final TableMap map1 = (TableMap) treed1.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final TableMap map2 = (TableMap) treed2.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); assertNotNull(map1); assertNotNull(map2); @@ -208,17 +199,14 @@ public void testConcurrentInstantiation() throws ExecutionException, Interrupted assertTableEquals(rootExpected1.sortDescending("Sentinel"), sortedRoot1); assertTableEquals(rootExpected1.sortDescending("Sentinel"), sortedRoot2); - assertTableEquals(rootExpected1.sortDescending("Sentinel").sort("Extra"), - sortedSortedRoot1); - assertTableEquals(rootExpected1.sortDescending("Sentinel").sort("Extra"), - sortedSortedRoot2); + assertTableEquals(rootExpected1.sortDescending("Sentinel").sort("Extra"), sortedSortedRoot1); + assertTableEquals(rootExpected1.sortDescending("Sentinel").sort("Extra"), sortedSortedRoot2); source.notifyListeners(i(11), i(), i()); final Table treed3 = pool.submit(doTree::get).get(); - final TableMap map3 = - (TableMap) treed3.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final TableMap map3 = (TableMap) treed3.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); final Table root3 = map3.get(null); final Table sortedRoot3 = pool.submit(() -> root3.sortDescending("Sentinel")).get(); final Table sortedSortedRoot3 = pool.submit(() -> sortedRoot3.sort("Extra")).get(); @@ -227,21 +215,21 @@ public void testConcurrentInstantiation() throws ExecutionException, Interrupted TableTools.show(treed3); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect, treed1, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect, treed2, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect, treed1, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect, treed2, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); LiveTableMonitor.DEFAULT.completeCycleForUnitTests(); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed1, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed1, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); final Table expectedFinalSort = rootExpected2.sortDescending("Sentinel"); assertTableEquals(expectedFinalSort, sortedRoot1); @@ -256,28 +244,27 @@ public void testConcurrentInstantiation() throws ExecutionException, Interrupted LiveTableMonitor.DEFAULT.startCycleForUnitTests(); - final Table backwards1 = pool.submit(() -> TreeTableFilter - .rawFilterTree(treed1, "!isNull(Extra)").sortDescending("Extra")).get(); - final Table backwardsTree1a = - pool.submit(() -> backwards1.treeTable("Sentinel", "Parent")).get(); + final Table backwards1 = + pool.submit(() -> TreeTableFilter.rawFilterTree(treed1, "!isNull(Extra)").sortDescending("Extra")) + .get(); + final Table backwardsTree1a = pool.submit(() -> backwards1.treeTable("Sentinel", "Parent")).get(); final Table treed4 = pool.submit(doTree::get).get(); TstUtils.addToTable(source, i(12), c("Sentinel", 12), c("Parent", 11), c("Extra", "l")); - final Table backwards2 = pool.submit(() -> TreeTableFilter - .rawFilterTree(treed1, "!isNull(Extra)").sortDescending("Extra")).get(); - final Table backwardsTree1b = - pool.submit(() -> backwards1.treeTable("Sentinel", "Parent")).get(); - final Table backwardsTree2a = - pool.submit(() -> backwards2.treeTable("Sentinel", "Parent")).get(); + final Table backwards2 = + pool.submit(() -> TreeTableFilter.rawFilterTree(treed1, "!isNull(Extra)").sortDescending("Extra")) + .get(); + final Table backwardsTree1b = pool.submit(() -> backwards1.treeTable("Sentinel", "Parent")).get(); + final Table backwardsTree2a = pool.submit(() -> backwards2.treeTable("Sentinel", "Parent")).get(); final Table treed5 = pool.submit(doTree::get).get(); int ii = 1; for (Table treed : Arrays.asList(treed1, treed2, treed3, treed4, treed5)) { - doCompareWithChildrenForTrees("testConcurrentInstantiation" + ii++, expect, treed, - 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation" + ii++, expect, treed, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } final Table eleven1b = map1.get(11); @@ -288,14 +275,12 @@ public void testConcurrentInstantiation() throws ExecutionException, Interrupted final Table treed6 = pool.submit(doTree::get).get(); LiveTableMonitor.DEFAULT.flushAllNormalNotificationsForUnitTests(); - final Table backwardsTree1c = - pool.submit(() -> backwards1.treeTable("Sentinel", "Parent")).get(); - final Table backwardsTree2b = - pool.submit(() -> backwards2.treeTable("Sentinel", "Parent")).get(); - final Table backwards3 = pool.submit(() -> TreeTableFilter - .rawFilterTree(treed1, "!isNull(Extra)").sortDescending("Extra")).get(); - final Table backwardsTree3 = - pool.submit(() -> backwards3.treeTable("Sentinel", "Parent")).get(); + final Table backwardsTree1c = pool.submit(() -> backwards1.treeTable("Sentinel", "Parent")).get(); + final Table backwardsTree2b = pool.submit(() -> backwards2.treeTable("Sentinel", "Parent")).get(); + final Table backwards3 = + pool.submit(() -> TreeTableFilter.rawFilterTree(treed1, "!isNull(Extra)").sortDescending("Extra")) + .get(); + final Table backwardsTree3 = pool.submit(() -> backwards3.treeTable("Sentinel", "Parent")).get(); final Table root1a = map1.get(null); final Table root2a = map2.get(null); @@ -309,12 +294,9 @@ public void testConcurrentInstantiation() throws ExecutionException, Interrupted final Table eleven1c = map1.get(11); assertNotNull(eleven1c); - final TableMap map4 = - (TableMap) treed4.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); - final TableMap map5 = - (TableMap) treed5.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); - final TableMap map6 = - (TableMap) treed6.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final TableMap map4 = (TableMap) treed4.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final TableMap map5 = (TableMap) treed5.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final TableMap map6 = (TableMap) treed6.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); assertNotNull(map4.get(11)); assertNotNull(map5.get(11)); assertNotNull(map6.get(11)); @@ -324,51 +306,48 @@ public void testConcurrentInstantiation() throws ExecutionException, Interrupted ii = 1; for (Table treed : Arrays.asList(treed1, treed2, treed3, treed4, treed5, treed6)) { - doCompareWithChildrenForTrees("testConcurrentInstantiation" + ii++, expect, treed, - 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation" + ii++, expect, treed, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } - final Table backwardsExpected = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> source.sortDescending("Extra").treeTable("Sentinel", "Parent")); + final Table backwardsExpected = LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> source.sortDescending("Extra").treeTable("Sentinel", "Parent")); ii = 1; - for (Table treed : Arrays.asList(backwardsTree1a, backwardsTree1b, backwardsTree1c, - backwardsTree2a, backwardsTree2b, backwardsTree3)) { - doCompareWithChildrenForTrees("testConcurrentInstantiationBackward" + ii++, - backwardsExpected, treed, 0, 4, hierarchicalColumnName, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + for (Table treed : Arrays.asList(backwardsTree1a, backwardsTree1b, backwardsTree1c, backwardsTree2a, + backwardsTree2b, backwardsTree3)) { + doCompareWithChildrenForTrees("testConcurrentInstantiationBackward" + ii++, backwardsExpected, treed, 0, + 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } } finally { QueryTable.setMemoizeResults(oldMemoize); } } - public void testConcurrentInstantiationOfSort() - throws ExecutionException, InterruptedException { + public void testConcurrentInstantiationOfSort() throws ExecutionException, InterruptedException { final Logger log = new StreamLoggerImpl(System.out, LogLevel.DEBUG); final boolean oldMemoize = QueryTable.setMemoizeResults(false); try { final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(10), - col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), - col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2), - col("Extra", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j")); + col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), + col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2), + col("Extra", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j")); final QueryTable source2 = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(11), - col("Sentinel", 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12), - col("Parent", NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2, NULL_INT, 11), - col("Extra", "bb", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l")); + col("Sentinel", 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12), + col("Parent", NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2, NULL_INT, 11), + col("Extra", "bb", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l")); final java.util.function.Function doSort = t -> t.sortDescending("Extra"); - final java.util.function.Function doTree = - t -> t.treeTable("Sentinel", "Parent"); + final java.util.function.Function doTree = t -> t.treeTable("Sentinel", "Parent"); final java.util.function.Function doSortAndTree = doSort.andThen(doTree); - final Table expect = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> doSortAndTree.apply(source)); + final Table expect = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> doSortAndTree.apply(source)); final Table expectOriginal = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> doSortAndTree.apply(makeStatic(source))); + .computeLocked(() -> doSortAndTree.apply(makeStatic(source))); final Table expect2 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> doSortAndTree.apply(makeStatic(source2))); + .computeLocked(() -> doSortAndTree.apply(makeStatic(source2))); final String hierarchicalColumnName = getHierarchicalColumnName(expect); @@ -381,52 +360,48 @@ public void testConcurrentInstantiationOfSort() final Table treed1 = pool.submit(() -> doSortAndTree.apply(source)).get(); final Table sorted1 = pool.submit(() -> doSort.apply(source)).get(); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expect, 0, 10, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expect, 0, 10, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); TstUtils.removeRows(source, i(0)); - TstUtils.addToTable(source, i(1, 11, 12), c("Sentinel", 2, 11, 12), - c("Parent", NULL_INT, NULL_INT, 11), c("Extra", "bb", "k", "l")); + TstUtils.addToTable(source, i(1, 11, 12), c("Sentinel", 2, 11, 12), c("Parent", NULL_INT, NULL_INT, 11), + c("Extra", "bb", "k", "l")); final Table treed2a = pool.submit(() -> doSortAndTree.apply(source)).get(); final Table treed2b = pool.submit(() -> doTree.apply(sorted0)).get(); final Table treed2c = pool.submit(() -> doTree.apply(sorted1)).get(); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expectOriginal, - true, false, 0, 10, hierarchicalColumnName, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2a, expectOriginal, - true, false, 0, 10, hierarchicalColumnName, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2b, expectOriginal, - true, false, 0, 10, hierarchicalColumnName, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2c, expectOriginal, - true, false, 0, 10, hierarchicalColumnName, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expectOriginal, true, false, 0, 10, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2a, expectOriginal, true, false, 0, 10, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2b, expectOriginal, true, false, 0, 10, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2c, expectOriginal, true, false, 0, 10, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); source.notifyListeners(i(11, 12), i(0), i(1)); LiveTableMonitor.DEFAULT.flushAllNormalNotificationsForUnitTests(); // everything should have current values now - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expect2, false, - false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2a, expect2, false, - false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2b, expect2, false, - false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2c, expect2, false, - false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expect2, false, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2a, expect2, false, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2b, expect2, false, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2c, expect2, false, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); // but still have a previous value for things that are old - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expectOriginal, - true, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2a, expectOriginal, - true, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2b, expectOriginal, - true, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2c, expectOriginal, - true, false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed1, expectOriginal, true, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2a, expectOriginal, true, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2b, expectOriginal, true, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed2c, expectOriginal, true, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); // we now initialize things after the notification is complete final Table treed3a = pool.submit(() -> doSortAndTree.apply(source)).get(); @@ -441,15 +416,14 @@ public void testConcurrentInstantiationOfSort() dumpRollup(treed3c, hierarchicalColumnName); // everything should have current values now - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed3a, expect2, false, - false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed3b, expect2, false, - false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", treed3c, expect2, false, - false, 0, 4, hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - - // Note that previous is not defined to be the starting value, now that redirectToGet - // has been discontinued. + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed3a, expect2, false, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed3b, expect2, false, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", treed3c, expect2, false, false, 0, 4, + hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + + // Note that previous is not defined to be the starting value, now that redirectToGet has been discontinued. assertTableEquals(sorted0Original, prevTable(sorted0)); assertTableEquals(sorted0Original, prevTable(sorted1)); assertTableEquals(sorted2, sorted0); @@ -457,20 +431,20 @@ public void testConcurrentInstantiationOfSort() LiveTableMonitor.DEFAULT.completeCycleForUnitTests(); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed1, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2a, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2b, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2c, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3a, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3b, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3c, 0, 4, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed1, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2a, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2b, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed2c, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3a, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3b, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + doCompareWithChildrenForTrees("testConcurrentInstantiation", expect2, treed3c, 0, 4, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } finally { QueryTable.setMemoizeResults(oldMemoize); @@ -483,10 +457,10 @@ private Table makeStatic(QueryTable source) { public void testTreeTableStaticFilter() { final Table source = TableTools.newTable(intCol("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), - col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 6)); + col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 6)); - final Table treed = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> source.treeTable("Sentinel", "Parent")); + final Table treed = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> source.treeTable("Sentinel", "Parent")); TableTools.showWithIndex(treed); final String hierarchicalColumnName = getHierarchicalColumnName(treed); @@ -496,8 +470,7 @@ public void testTreeTableStaticFilter() { TableTools.showWithIndex(filtered); assertEquals(1, filtered.size()); - assertTrue(Arrays.equals(new int[] {NULL_INT, NULL_INT}, - (int[]) treed.getColumn("Parent").getDirect())); + assertTrue(Arrays.equals(new int[] {NULL_INT, NULL_INT}, (int[]) treed.getColumn("Parent").getDirect())); final Table child1 = getChildTable(filtered, filtered, hierarchicalColumnName, 0); assertNotNull(child1); @@ -507,11 +480,11 @@ public void testTreeTableStaticFilter() { public void testTreeTableSimpleFilter() { final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(10), - col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), - col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 6)); + col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), + col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 6)); - final Table treed = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> source.treeTable("Sentinel", "Parent")); + final Table treed = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> source.treeTable("Sentinel", "Parent")); TableTools.showWithIndex(treed); final String hierarchicalColumnName = getHierarchicalColumnName(treed); @@ -522,8 +495,7 @@ public void testTreeTableSimpleFilter() { TableTools.showWithIndex(filtered); assertEquals(1, filtered.size()); - assertTrue(Arrays.equals(new int[] {NULL_INT, NULL_INT}, - (int[]) treed.getColumn("Parent").getDirect())); + assertTrue(Arrays.equals(new int[] {NULL_INT, NULL_INT}, (int[]) treed.getColumn("Parent").getDirect())); final Table child1 = getChildTable(filtered, filtered, hierarchicalColumnName, 0); assertNotNull(child1); @@ -600,10 +572,9 @@ public void testTreeTableSimpleFilter() { public void testOrphanPromoterSimple() { final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(4), - col("Sentinel", 1, 2, 3, 4), col("Parent", NULL_INT, NULL_INT, 1, 5)); + col("Sentinel", 1, 2, 3, 4), col("Parent", NULL_INT, NULL_INT, 1, 5)); - final Table treed = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> TreeTableOrphanPromoter + final Table treed = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> TreeTableOrphanPromoter .promoteOrphans(source, "Sentinel", "Parent").treeTable("Sentinel", "Parent")); TableTools.showWithIndex(treed); assertEquals(3, treed.size()); @@ -635,16 +606,14 @@ public void testOrphanPromoterSimple() { private static String getHierarchicalColumnName(Table treed) { final HierarchicalTableInfo info = - (HierarchicalTableInfo) treed.getAttribute(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); + (HierarchicalTableInfo) treed.getAttribute(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); return info.getHierarchicalColumnName(); } - private Table getChildTable(Table root, Table treed, String hierarchicalColumnName, - long index) { + private Table getChildTable(Table root, Table treed, String hierarchicalColumnName, long index) { final Object childKey1 = treed.getColumn(hierarchicalColumnName).get(index); final Table table = - ((TableMap) root.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE)) - .get(childKey1); + ((TableMap) root.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE)).get(childKey1); if (table == null || table.isEmpty()) { return null; } @@ -653,9 +622,9 @@ private Table getChildTable(Table root, Table treed, String hierarchicalColumnNa public void testTreeTableEdgeCases() { final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(4), - col("Sentinel", 0, 1, 2, 3), - col("Filter", 0, 0, 0, 0), - col("Parent", NULL_INT, NULL_INT, NULL_INT, NULL_INT)); + col("Sentinel", 0, 1, 2, 3), + col("Filter", 0, 0, 0, 0), + col("Parent", NULL_INT, NULL_INT, NULL_INT, NULL_INT)); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { @@ -735,8 +704,7 @@ public String toString() { } } - public static class ParentChildGenerator - implements TstUtils.Generator { + public static class ParentChildGenerator implements TstUtils.Generator { final double rootFraction; final double createAsOrphanFraction; @@ -750,8 +718,7 @@ public ParentChildGenerator(double rootFraction, double createAsOrphanFraction) } @Override - public TreeMap populateMap(TreeMap values, - Index toAdd, Random random) { + public TreeMap populateMap(TreeMap values, Index toAdd, Random random) { final TreeMap result = new TreeMap<>(); for (final Index.Iterator it = toAdd.iterator(); it.hasNext();) { @@ -763,11 +730,10 @@ public TreeMap populateMap(TreeMap value return result; } - private void add(Random random, TreeMap values, - TreeMap result, long key) { + private void add(Random random, TreeMap values, TreeMap result, + long key) { if (values.containsKey(key)) { - // this is a modification, for now let's keep it actually the same, because - // otherwise it is hard + // this is a modification, for now let's keep it actually the same, because otherwise it is hard final IdParentPair existing = values.get(key); // final boolean isOrphan = orphans.containsKey(existing.id); @@ -801,8 +767,7 @@ private void add(Random random, TreeMap values, if (asOrphan) { satisfied = !orphans.isEmpty() && orphans.keySet().contains(parent); } else { - satisfied = - !parentToChild.isEmpty() && parentToChild.keySet().contains(parent); + satisfied = !parentToChild.isEmpty() && parentToChild.keySet().contains(parent); } nextIdx = (nextIdx + 1) % usedIds.size(); } while (!satisfied && nextIdx != startIdx); @@ -825,8 +790,8 @@ public void onRemove(long key, IdParentPair remove) { } private void doOrphan(int parentToOrphan) { - final Set orphanKeys = Require.neqNull(parentToChild.remove(parentToOrphan), - Integer.toString(parentToOrphan)); + final Set orphanKeys = + Require.neqNull(parentToChild.remove(parentToOrphan), Integer.toString(parentToOrphan)); orphans.put(parentToOrphan, orphanKeys); orphanKeys.forEach(this::doOrphan); } @@ -863,12 +828,11 @@ static abstract class HierarchicalTableEvalNugget extends EvalNugget { @Override void checkDifferences(String msg, Table recomputed) { - compareWithChildren(msg, originalValue, recomputed, - getHierarchicalColumnName(recomputed)); + compareWithChildren(msg, originalValue, recomputed, getHierarchicalColumnName(recomputed)); } abstract void compareWithChildren(String msg, Table originalValue, Table recomputed, - String hierarchicalColumnName); + String hierarchicalColumnName); @Override void showResult(String label, Table e) { @@ -887,10 +851,9 @@ static abstract class TreeTableEvalNugget extends HierarchicalTableEvalNugget { super(maxLevels, sortColumns); } - void compareWithChildren(String msg, Table originalValue, Table recomputed, - String hierarchicalColumnName) { - doCompareWithChildrenForTrees(msg, originalValue, recomputed, 0, maxLevels.get(), - hierarchicalColumnName, sortColumns); + void compareWithChildren(String msg, Table originalValue, Table recomputed, String hierarchicalColumnName) { + doCompareWithChildrenForTrees(msg, originalValue, recomputed, 0, maxLevels.get(), hierarchicalColumnName, + sortColumns); } } @@ -904,56 +867,49 @@ static abstract class RollupEvalNugget extends HierarchicalTableEvalNugget { super(maxLevels, sortColumns); } - void compareWithChildren(String msg, Table originalValue, Table recomputed, - String hierarchicalColumnName) { - doCompareWithChildrenForRollups(msg, originalValue, recomputed, 0, maxLevels.get(), - hierarchicalColumnName, sortColumns); + void compareWithChildren(String msg, Table originalValue, Table recomputed, String hierarchicalColumnName) { + doCompareWithChildrenForRollups(msg, originalValue, recomputed, 0, maxLevels.get(), hierarchicalColumnName, + sortColumns); } } - static private void doCompareWithChildrenForTrees(String msg, Table actualValue, - Table expectedValue, int levels, int maxLevels, String hierarchicalColumnName, - String[] sortColumns) { - doCompareWithChildrenForTrees(msg, actualValue, expectedValue, false, false, levels, - maxLevels, hierarchicalColumnName, sortColumns); + static private void doCompareWithChildrenForTrees(String msg, Table actualValue, Table expectedValue, int levels, + int maxLevels, String hierarchicalColumnName, String[] sortColumns) { + doCompareWithChildrenForTrees(msg, actualValue, expectedValue, false, false, levels, maxLevels, + hierarchicalColumnName, sortColumns); } - static private void doCompareWithChildrenForTrees(String msg, Table actualValue, - Table expectedValue, boolean actualPrev, boolean expectedPrev, int levels, int maxLevels, - String hierarchicalColumnName, String[] sortColumns) { + static private void doCompareWithChildrenForTrees(String msg, Table actualValue, Table expectedValue, + boolean actualPrev, boolean expectedPrev, int levels, int maxLevels, String hierarchicalColumnName, + String[] sortColumns) { doCompareWithChildren( - t -> (TableMap) actualValue - .getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), - t -> (TableMap) expectedValue - .getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), - msg, actualValue, expectedValue, actualPrev, expectedPrev, levels, maxLevels, - hierarchicalColumnName, sortColumns); + t -> (TableMap) actualValue.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), + t -> (TableMap) expectedValue.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), + msg, actualValue, expectedValue, actualPrev, expectedPrev, levels, maxLevels, hierarchicalColumnName, + sortColumns); } - static private void doCompareWithChildrenForRollups(String msg, Table originalValue, - Table recomputed, int levels, int maxLevels, String hierarchicalColumnName, - String[] sortColumns) { + static private void doCompareWithChildrenForRollups(String msg, Table originalValue, Table recomputed, int levels, + int maxLevels, String hierarchicalColumnName, String[] sortColumns) { doCompareWithChildren( - t -> (TableMap) t.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), - t -> (TableMap) t.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), - msg, originalValue, recomputed, false, false, levels, maxLevels, hierarchicalColumnName, - sortColumns); + t -> (TableMap) t.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), + t -> (TableMap) t.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE), + msg, originalValue, recomputed, false, false, levels, maxLevels, hierarchicalColumnName, sortColumns); } static private void doCompareWithChildren(Function.Unary actualMapSource, - Function.Unary expectedMapSource, - String msg, - Table actualValueIn, - Table expectedValueIn, - boolean actualPrev, - boolean expectedPrev, - int levels, - int maxLevels, - String hierarchicalColumnName, - String[] sortColumns) { + Function.Unary expectedMapSource, + String msg, + Table actualValueIn, + Table expectedValueIn, + boolean actualPrev, + boolean expectedPrev, + int levels, + int maxLevels, + String hierarchicalColumnName, + String[] sortColumns) { if (levels > maxLevels) { - throw new IllegalStateException( - "Refusing to validate levels " + levels + ", to prevent infinite looping!"); + throw new IllegalStateException("Refusing to validate levels " + levels + ", to prevent infinite looping!"); } Table actualValue = getDiffableTable(actualValueIn); @@ -964,16 +920,13 @@ static private void doCompareWithChildren(Function.Unary actual expectedValue = expectedValue.sort(sortColumns); } - final String diff = - diff(maybePrev(actualValue.dropColumns(hierarchicalColumnName), actualPrev), + final String diff = diff(maybePrev(actualValue.dropColumns(hierarchicalColumnName), actualPrev), maybePrev(expectedValue.dropColumns(hierarchicalColumnName), expectedPrev), 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); Assert.assertEquals(msg, "", diff); - final ColumnSource actualChildren = - columnOrPrev(actualValue, hierarchicalColumnName, actualPrev); - final ColumnSource expectedChildren = - columnOrPrev(expectedValue, hierarchicalColumnName, expectedPrev); + final ColumnSource actualChildren = columnOrPrev(actualValue, hierarchicalColumnName, actualPrev); + final ColumnSource expectedChildren = columnOrPrev(expectedValue, hierarchicalColumnName, expectedPrev); final TableMap actualMap = actualMapSource.call(actualValue); final TableMap expectedMap = expectedMapSource.call(expectedValue); @@ -1001,8 +954,8 @@ static private void doCompareWithChildren(Function.Unary actual assertEquals(ac == null || ac.size() == 0, ec == null || ec.size() == 0); if ((ac != null && ac.size() > 0) && (ec != null && ec.size() > 0)) { - doCompareWithChildren(actualMapSource, expectedMapSource, msg, ac, ec, actualPrev, - expectedPrev, levels + 1, maxLevels, hierarchicalColumnName, sortColumns); + doCompareWithChildren(actualMapSource, expectedMapSource, msg, ac, ec, actualPrev, expectedPrev, + levels + 1, maxLevels, hierarchicalColumnName, sortColumns); } } } @@ -1018,21 +971,18 @@ private static Table maybePrev(Table table, boolean usePrev) { private static ColumnSource columnOrPrev(Table table, String columnName, boolean usePrev) { // noinspection unchecked - return usePrev ? new PrevColumnSource(table.getColumnSource(columnName)) - : table.getColumnSource(columnName); + return usePrev ? new PrevColumnSource(table.getColumnSource(columnName)) : table.getColumnSource(columnName); } private static Index indexOrPrev(Table table, boolean usePrev) { return usePrev ? table.getIndex().getPrevIndex() : table.getIndex(); } - private void testTreeTableIncremental(final int size, final long seed, - final MutableInt numSteps) { + private void testTreeTableIncremental(final int size, final long seed, final MutableInt numSteps) { final Random random = new Random(seed); final ParentChildGenerator parentChildGenerator = new ParentChildGenerator(0.25, 0); - final TstUtils.ColumnInfo[] columnInfo = - initColumnInfos(new String[] {"IDPair", "Sentinel", "Sym"}, + final TstUtils.ColumnInfo[] columnInfo = initColumnInfos(new String[] {"IDPair", "Sentinel", "Sym"}, parentChildGenerator, new IntGenerator(0, 1_000_000_000), new SetGenerator<>("AAPL", "TSLA", "VXX", "SPY")); @@ -1044,8 +994,7 @@ private void testTreeTableIncremental(final int size, final long seed, TableTools.showWithIndex(table); } - final Table prepared = - table.update("ID=IDPair.getId()", "Parent=IDPair.getParent()").dropColumns("IDPair"); + final Table prepared = table.update("ID=IDPair.getId()", "Parent=IDPair.getParent()").dropColumns("IDPair"); if (LiveTableTestCase.printTableUpdates) { System.out.println("Original Prepared:"); @@ -1066,32 +1015,31 @@ public void show() { @Override protected Table e() { return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> prepared.treeTable("ID", "Parent")); + .computeLocked(() -> prepared.treeTable("ID", "Parent")); } }, new TreeTableEvalNugget(prepared) { @Override protected Table e() { return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> prepared.sort("Sym").treeTable("ID", "Parent")); + .computeLocked(() -> prepared.sort("Sym").treeTable("ID", "Parent")); } }, new TreeTableEvalNugget(prepared) { @Override protected Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> prepared.sort("Sentinel").treeTable("ID", "Parent")); + return LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> prepared.sort("Sentinel").treeTable("ID", "Parent")); } }, new TreeTableEvalNugget(prepared) { @Override protected Table e() { - return TreeTableFilter.filterTree(prepared.treeTable("ID", "Parent"), - "Sentinel % 2 == 1"); + return TreeTableFilter.filterTree(prepared.treeTable("ID", "Parent"), "Sentinel % 2 == 1"); } }, - EvalNugget.from(() -> TreeTableFilter - .rawFilterTree(prepared.treeTable("ID", "Parent"), "Sentinel % 2 == 1")), + EvalNugget.from( + () -> TreeTableFilter.rawFilterTree(prepared.treeTable("ID", "Parent"), "Sentinel % 2 == 1")), }; final int maxSteps = numSteps.intValue(); @@ -1115,13 +1063,12 @@ private void testOrphanPromoter(final int size, int seed, MutableInt numSteps) { final ColumnInfo[] columnInfo; final ParentChildGenerator parentChildGenerator = new ParentChildGenerator(0.25, 0.25); final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"IDPair", "Sentinel", "Sym"}, - parentChildGenerator, - new TstUtils.IntGenerator(0, 1_000_000_000), - new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"))); + columnInfo = initColumnInfos(new String[] {"IDPair", "Sentinel", "Sym"}, + parentChildGenerator, + new TstUtils.IntGenerator(0, 1_000_000_000), + new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"))); - final Table prepared = - table.update("ID=IDPair.getId()", "Parent=IDPair.getParent()").dropColumns("IDPair"); + final Table prepared = table.update("ID=IDPair.getId()", "Parent=IDPair.getParent()").dropColumns("IDPair"); final EvalNuggetInterface en[] = new EvalNuggetInterface[] { new EvalNuggetInterface() { @@ -1136,25 +1083,24 @@ public void show() { new EvalNugget() { @Override protected Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> TreeTableOrphanPromoter.promoteOrphans(prepared, "ID", "Parent")); + return LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> TreeTableOrphanPromoter.promoteOrphans(prepared, "ID", "Parent")); } }, new EvalNugget() { @Override protected Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> TreeTableOrphanPromoter.promoteOrphans( - prepared.where("Sentinel % 2 == 0"), "ID", "Parent")); + return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> TreeTableOrphanPromoter + .promoteOrphans(prepared.where("Sentinel % 2 == 0"), "ID", "Parent")); } }, new TreeTableEvalNugget(prepared) { @Override protected Table e() { return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> TreeTableOrphanPromoter - .promoteOrphans(prepared.where("Sentinel % 2 == 0"), "ID", "Parent") - .treeTable("ID", "Parent")); + .computeLocked(() -> TreeTableOrphanPromoter + .promoteOrphans(prepared.where("Sentinel % 2 == 0"), "ID", "Parent") + .treeTable("ID", "Parent")); } }, }; @@ -1167,35 +1113,32 @@ protected Table e() { } } - private static void dumpRollup(Table root, String hierarchicalColumnName, - String... labelColumns) { + private static void dumpRollup(Table root, String hierarchicalColumnName, String... labelColumns) { dumpRollup(root, false, hierarchicalColumnName, labelColumns); } - private static void dumpRollup(Table root, boolean usePrev, String hierarchicalColumnName, - String... labelColumns) { + private static void dumpRollup(Table root, boolean usePrev, String hierarchicalColumnName, String... labelColumns) { final HierarchicalTableInfo info = - (HierarchicalTableInfo) root.getAttribute(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); + (HierarchicalTableInfo) root.getAttribute(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); final TableMap map = info instanceof TreeTableInfo - ? (TableMap) root.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE) - : null; + ? (TableMap) root.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE) + : null; dumpRollup(root, map, usePrev, hierarchicalColumnName, labelColumns); } - private static void dumpRollup(Table root, TableMap childMap, boolean usePrev, - String hierarchicalColumnName, String... labelColumns) { + private static void dumpRollup(Table root, TableMap childMap, boolean usePrev, String hierarchicalColumnName, + String... labelColumns) { TableTools.showWithIndex(usePrev ? prevTable(root) : root, 101); final List labelSource = - Arrays.stream(labelColumns).map(root::getColumnSource).collect(Collectors.toList()); + Arrays.stream(labelColumns).map(root::getColumnSource).collect(Collectors.toList()); final ColumnSource children = columnOrPrev(root, hierarchicalColumnName, usePrev); - final TableMap map = childMap == null - ? (TableMap) root.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE) - : childMap; - final ReverseLookup reverseLookup = - (ReverseLookup) root.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE); + final TableMap map = + childMap == null ? (TableMap) root.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE) + : childMap; + final ReverseLookup reverseLookup = (ReverseLookup) root.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE); if (reverseLookup != null) { System.out.println("Reverse Lookup is set."); } else { @@ -1219,8 +1162,7 @@ private static void dumpRollup(Table root, TableMap childMap, boolean usePrev, continue; } - System.out.println(labelSource.stream().map(x -> (String) x.get(key)) - .collect(Collectors.joining(", "))); + System.out.println(labelSource.stream().map(x -> (String) x.get(key)).collect(Collectors.joining(", "))); dumpRollup(childTable, childMap, usePrev, hierarchicalColumnName, labelColumns); } } @@ -1244,43 +1186,42 @@ public void testRollupReverseLookup() { final Random random = new Random(0); final int size = 100; - final QueryTable table = getTable(size, random, initColumnInfos( - new String[] {"USym", "DateTime", "IntCol", "DoubleCol", "BoolCol", "BigIntCol", - "BigDecCol"}, - new SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), - new SetGenerator<>(DBTimeUtils.convertDateTime("2020-01-01T00:00:00 NY"), null, - DBTimeUtils.convertDateTime("2020-02-28T14:30:00 NY")), - new IntGenerator(0, 1_000_000), - new DoubleGenerator(-100, 100), - new BooleanGenerator(0.4, 0.1), - new BigIntegerGenerator(BigInteger.ZERO, BigInteger.valueOf(100), 0.1), - new BigDecimalGenerator(BigInteger.valueOf(-1000), BigInteger.valueOf(1000), 5, 0.1))); + final QueryTable table = getTable(size, random, + initColumnInfos( + new String[] {"USym", "DateTime", "IntCol", "DoubleCol", "BoolCol", "BigIntCol", "BigDecCol"}, + new SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), + new SetGenerator<>(DBTimeUtils.convertDateTime("2020-01-01T00:00:00 NY"), null, + DBTimeUtils.convertDateTime("2020-02-28T14:30:00 NY")), + new IntGenerator(0, 1_000_000), + new DoubleGenerator(-100, 100), + new BooleanGenerator(0.4, 0.1), + new BigIntegerGenerator(BigInteger.ZERO, BigInteger.valueOf(100), 0.1), + new BigDecimalGenerator(BigInteger.valueOf(-1000), BigInteger.valueOf(1000), 5, 0.1))); System.out.println("Source Data:"); TableTools.showWithIndex(table); - final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> table.rollup(comboAgg, "USym", "DateTime", "BoolCol", "BigIntCol", "BigDecCol")); + final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> table.rollup(comboAgg, "USym", "DateTime", "BoolCol", "BigIntCol", "BigDecCol")); verifyReverseLookup(rollup); - verifyReverseLookup(LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.rollup(comboAgg, "USym"))); - verifyReverseLookup(LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.rollup(comboAgg, "DateTime"))); - verifyReverseLookup(LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.rollup(comboAgg, "BoolCol"))); + verifyReverseLookup( + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> table.rollup(comboAgg, "USym"))); + verifyReverseLookup( + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> table.rollup(comboAgg, "DateTime"))); + verifyReverseLookup( + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> table.rollup(comboAgg, "BoolCol"))); } private void verifyReverseLookup(Table rollup) { final String columnName = - ((HierarchicalTableInfo) rollup.getAttribute(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE)) - .getHierarchicalColumnName(); + ((HierarchicalTableInfo) rollup.getAttribute(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE)) + .getHierarchicalColumnName(); verifyReverseLookup(rollup, columnName); } private void verifyReverseLookup(Table rollup, String columnName) { - final ReverseLookup rl = - (ReverseLookup) rollup.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE); + final ReverseLookup rl = (ReverseLookup) rollup.getAttribute(Table.REVERSE_LOOKUP_ATTRIBUTE); Assert.assertNotNull("rl", rl); final Set children = new LinkedHashSet<>(); final ColumnSource childSource = rollup.getColumnSource(columnName); @@ -1297,8 +1238,7 @@ private void verifyReverseLookup(Table rollup, String columnName) { TestCase.assertEquals(key, fromColumn); }); - final TableMap childMap = - (TableMap) rollup.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final TableMap childMap = (TableMap) rollup.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); for (final Object childKey : children) { verifyReverseLookup(childMap.get(childKey), columnName); } @@ -1309,8 +1249,8 @@ public void testRollupAbsSum() { } public void testRollupAverage() { - testSimpleRollup(AggCombo(AggAvg("IntCol", "DoubleCol", "BigIntCol", "BigDecCol", - "DoubleNanCol", "FloatNullCol"))); + testSimpleRollup( + AggCombo(AggAvg("IntCol", "DoubleCol", "BigIntCol", "BigDecCol", "DoubleNanCol", "FloatNullCol"))); } public void testRollupStd() { @@ -1338,24 +1278,19 @@ public void testRollupSortedFirst() { } public void testRollupCountDistinct() { - testSimpleRollup(AggCombo( - AggCountDistinct("IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); - testSimpleRollup(AggCombo( - AggCountDistinct(true, "IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); + testSimpleRollup(AggCombo(AggCountDistinct("IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); + testSimpleRollup( + AggCombo(AggCountDistinct(true, "IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); } public void testRollupDistinct() { - testSimpleRollup( - AggCombo(AggDistinct("IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); - testSimpleRollup(AggCombo( - AggDistinct(true, "IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); + testSimpleRollup(AggCombo(AggDistinct("IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); + testSimpleRollup(AggCombo(AggDistinct(true, "IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); } public void testRollupUnique() { - testSimpleRollup( - AggCombo(AggUnique("IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); - testSimpleRollup(AggCombo( - AggUnique(true, "IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); + testSimpleRollup(AggCombo(AggUnique("IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); + testSimpleRollup(AggCombo(AggUnique(true, "IntCol", "DoubleCol", "FloatNullCol", "StringCol", "BoolCol"))); } private void testSimpleRollup(ComboAggregateFactory comboAgg) { @@ -1363,29 +1298,27 @@ private void testSimpleRollup(ComboAggregateFactory comboAgg) { final int size = 10; final QueryTable table = getTable(size, random, - initColumnInfos( - new String[] {"USym", "Group", "IntCol", "DoubleCol", "DoubleNanCol", - "FloatNullCol", "StringCol", "BoolCol", "BigIntCol", "BigDecCol"}, - new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), - new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), - new TstUtils.IntGenerator(0, 1_000_000), - new TstUtils.DoubleGenerator(-100, 100), - new TstUtils.DoubleGenerator(-100, 100, 0.0, 0.1), - new TstUtils.FloatGenerator(-100, 100, 0.1, 0.0), - new TstUtils.SetGenerator<>("A", "B", "C", "D"), - new TstUtils.BooleanGenerator(.5, .1), - new TstUtils.BigIntegerGenerator(BigInteger.ZERO, BigInteger.valueOf(100), 0.1), - new TstUtils.BigDecimalGenerator(BigInteger.valueOf(-1000), - BigInteger.valueOf(1000), 5, 0.1))); + initColumnInfos( + new String[] {"USym", "Group", "IntCol", "DoubleCol", "DoubleNanCol", "FloatNullCol", + "StringCol", "BoolCol", "BigIntCol", "BigDecCol"}, + new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), + new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), + new TstUtils.IntGenerator(0, 1_000_000), + new TstUtils.DoubleGenerator(-100, 100), + new TstUtils.DoubleGenerator(-100, 100, 0.0, 0.1), + new TstUtils.FloatGenerator(-100, 100, 0.1, 0.0), + new TstUtils.SetGenerator<>("A", "B", "C", "D"), + new TstUtils.BooleanGenerator(.5, .1), + new TstUtils.BigIntegerGenerator(BigInteger.ZERO, BigInteger.valueOf(100), 0.1), + new TstUtils.BigDecimalGenerator(BigInteger.valueOf(-1000), BigInteger.valueOf(1000), 5, 0.1))); System.out.println("Source Data:"); TableTools.showWithIndex(table); - final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.rollup(comboAgg, "USym", "Group")); + final Table rollup = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> table.rollup(comboAgg, "USym", "Group")); - final Table fullBy = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> table.by(comboAgg)); + final Table fullBy = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> table.by(comboAgg)); System.out.println("Full By:"); TableTools.showWithIndex(fullBy); @@ -1396,8 +1329,8 @@ private void testSimpleRollup(ComboAggregateFactory comboAgg) { dumpRollup(rollup, getHierarchicalColumnName(rollup), "USym", "Group"); final List viewCols = new ArrayList<>(Arrays.asList("IntCol", "DoubleCol")); - for (final String maybeColumn : new String[] {"BigIntCol", "BigDecCol", "DoubleNanCol", - "FloatNullCol", "StringCol", "BoolCol"}) { + for (final String maybeColumn : new String[] {"BigIntCol", "BigDecCol", "DoubleNanCol", "FloatNullCol", + "StringCol", "BoolCol"}) { if (fullBy.hasColumns(maybeColumn)) { viewCols.add(maybeColumn); } @@ -1405,8 +1338,7 @@ private void testSimpleRollup(ComboAggregateFactory comboAgg) { final Table rollupClean = getDiffableTable(rollup).view(Selectable.from(viewCols)); - final String diff = - TableTools.diff(fullBy, rollupClean, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); + final String diff = TableTools.diff(fullBy, rollupClean, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); assertEquals("", diff); @@ -1417,18 +1349,17 @@ public void testRollupScope() { final int size = 10; final QueryTable table = getTable(size, random, - initColumnInfos(new String[] {"USym", "Group", "IntCol", "DoubleCol"}, - new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), - new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), - new TstUtils.IntGenerator(0, 1_000_000), - new TstUtils.DoubleGenerator(-100, 100))); + initColumnInfos(new String[] {"USym", "Group", "IntCol", "DoubleCol"}, + new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), + new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), + new TstUtils.IntGenerator(0, 1_000_000), + new TstUtils.DoubleGenerator(-100, 100))); final SafeCloseable scopeCloseable = LivenessScopeStack.open(); - final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> table.rollup(AggCombo(AggSum("IntCol", "DoubleCol")), "USym", "Group")); - final TableMap rootMap = - (TableMap) rollup.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> table.rollup(AggCombo(AggSum("IntCol", "DoubleCol")), "USym", "Group")); + final TableMap rootMap = (TableMap) rollup.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); final Table nextLevel = rootMap.get(SmartKey.EMPTY); assertNotNull(nextLevel); @@ -1449,12 +1380,12 @@ public void testRollupScope() { LiveTableMonitor.DEFAULT.exclusiveLock().doLocked(rollupManager::release); - // we should not be able to retainReference the rollup, because closing the scope should - // have decremented it to zero + // we should not be able to retainReference the rollup, because closing the scope should have decremented it to + // zero Assert.assertFalse(rollup.tryRetainReference()); - // we should not be able to retainReference the tablemap, because closing the scope should - // have decremented it to zero + // we should not be able to retainReference the tablemap, because closing the scope should have decremented it + // to zero Assert.assertFalse(rootMap.tryRetainReference()); Assert.assertFalse(nextLevel.tryRetainReference()); @@ -1465,12 +1396,12 @@ public void testTreeTableScope() { final int size = 10; final QueryTable table = getTable(size, random, - initColumnInfos(new String[] {"USym", "Group", "IntCol", "DoubleCol", "ParentCol"}, - new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), - new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), - new TstUtils.IntGenerator(1, 1_000_000), - new TstUtils.DoubleGenerator(-100, 100), - new TstUtils.IntGenerator(0, 0, 1.0))); + initColumnInfos(new String[] {"USym", "Group", "IntCol", "DoubleCol", "ParentCol"}, + new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), + new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), + new TstUtils.IntGenerator(1, 1_000_000), + new TstUtils.DoubleGenerator(-100, 100), + new TstUtils.IntGenerator(0, 0, 1.0))); final SafeCloseable scopeCloseable = LivenessScopeStack.open(); @@ -1501,24 +1432,21 @@ public void testTreeTableScope() { LiveTableMonitor.DEFAULT.exclusiveLock().doLocked(treeManager::release); - // we should not be able to retainReference the tree table, because closing the scope should - // have decremented it to zero + // we should not be able to retainReference the tree table, because closing the scope should have decremented it + // to zero Assert.assertFalse(treed.tryRetainReference()); Assert.assertFalse(promoted.tryRetainReference()); } public void testRollupScope2() { - final QueryTable table = - TstUtils.testRefreshingTable(i(), col("USym", CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - col("Group", CollectionUtil.ZERO_LENGTH_STRING_ARRAY), intCol("IntCol"), - doubleCol("DoubleCol")); + final QueryTable table = TstUtils.testRefreshingTable(i(), col("USym", CollectionUtil.ZERO_LENGTH_STRING_ARRAY), + col("Group", CollectionUtil.ZERO_LENGTH_STRING_ARRAY), intCol("IntCol"), doubleCol("DoubleCol")); final SafeCloseable scopeCloseable = LivenessScopeStack.open(); - final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> table.rollup(AggCombo(AggSum("IntCol", "DoubleCol")), "USym", "Group")); - final TableMap rootMap = - (TableMap) rollup.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> table.rollup(AggCombo(AggSum("IntCol", "DoubleCol")), "USym", "Group")); + final TableMap rootMap = (TableMap) rollup.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); final SingletonLivenessManager rollupManager = new SingletonLivenessManager(rollup); @@ -1534,7 +1462,7 @@ public void testRollupScope2() { LiveTableMonitor.DEFAULT.startCycleForUnitTests(); addToTable(table, i(0, 1), col("USym", "AAPL", "TSLA"), col("Group", "Terran", "Vulcan"), - intCol("IntCol", 1, 2), doubleCol("DoubleCol", .1, .2)); + intCol("IntCol", 1, 2), doubleCol("DoubleCol", .1, .2)); table.notifyListeners(i(0, 1), i(), i()); LiveTableMonitor.DEFAULT.completeCycleForUnitTests(); @@ -1553,12 +1481,12 @@ public void testRollupScope2() { LiveTableMonitor.DEFAULT.exclusiveLock().doLocked(getScope::close); LiveTableMonitor.DEFAULT.exclusiveLock().doLocked(rollupManager::release); - // we should not be able to retainReference the rollup, because closing the scope should - // have decremented it to zero + // we should not be able to retainReference the rollup, because closing the scope should have decremented it to + // zero Assert.assertFalse(rollup.tryRetainReference()); - // we should not be able to retainReference the tablemap, because closing the scope should - // have decremented it to zero + // we should not be able to retainReference the tablemap, because closing the scope should have decremented it + // to zero Assert.assertFalse(rootMap.tryRetainReference()); Assert.assertFalse(nextLevel.tryRetainReference()); @@ -1569,16 +1497,15 @@ public void testNullTypes() { final int size = 10; final QueryTable table = getTable(size, random, - initColumnInfos(new String[] {"USym", "Group", "IntCol", "DoubleCol", "StringCol"}, - new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), - new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), - new TstUtils.IntGenerator(0, 1_000_000), - new TstUtils.DoubleGenerator(-100, 100), - new TstUtils.SetGenerator<>("A", "B", "C", "D"))); + initColumnInfos(new String[] {"USym", "Group", "IntCol", "DoubleCol", "StringCol"}, + new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), + new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), + new TstUtils.IntGenerator(0, 1_000_000), + new TstUtils.DoubleGenerator(-100, 100), + new TstUtils.SetGenerator<>("A", "B", "C", "D"))); - final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.rollup(AggCombo(AggSum("DoubleCol"), AggFirst("StringCol")), - "USym", "Group", "IntCol")); + final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( + () -> table.rollup(AggCombo(AggSum("DoubleCol"), AggFirst("StringCol")), "USym", "Group", "IntCol")); TestCase.assertEquals(String.class, rollup.getColumnSource("USym").getType()); TestCase.assertEquals(String.class, rollup.getColumnSource("Group").getType()); TestCase.assertEquals(int.class, rollup.getColumnSource("IntCol").getType()); @@ -1618,22 +1545,20 @@ public void testRollupUniqueIncremental() { } private void testIncrementalSimple(ComboBy comboBy) { - final QueryTable table = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(6), - col("G1", "A", "A", "A", "B", "B", "B"), col("G2", "C", "C", "D", "D", "E", "E"), - col("IntCol", 1, 2, 3, 4, 5, 6)); + final QueryTable table = + TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(6), col("G1", "A", "A", "A", "B", "B", "B"), + col("G2", "C", "C", "D", "D", "E", "E"), col("IntCol", 1, 2, 3, 4, 5, 6)); final Table rollup = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.rollup(AggCombo(comboBy), "G1", "G2")); + .computeLocked(() -> table.rollup(AggCombo(comboBy), "G1", "G2")); dumpRollup(rollup, "G1", "G2"); - final Table fullBy = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.by(AggCombo(comboBy))); + final Table fullBy = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> table.by(AggCombo(comboBy))); final Table rollupClean = getDiffableTable(rollup).view("IntCol"); - final String diff = - TableTools.diff(fullBy, rollupClean, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); + final String diff = TableTools.diff(fullBy, rollupClean, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); assertEquals("", diff); @@ -1647,8 +1572,7 @@ private void testIncrementalSimple(ComboBy comboBy) { System.out.println("Expected:"); TableTools.showWithIndex(fullBy); - final String diff2 = - TableTools.diff(fullBy, rollupClean, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); + final String diff2 = TableTools.diff(fullBy, rollupClean, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); assertEquals("", diff2); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -1658,22 +1582,21 @@ private void testIncrementalSimple(ComboBy comboBy) { dumpRollup(rollup, "G1", "G2"); - final String diff3 = - TableTools.diff(fullBy, rollupClean, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); + final String diff3 = TableTools.diff(fullBy, rollupClean, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); assertEquals("", diff3); } public void testDuplicateAgg() { final Table simpleTable = TableTools.emptyTable(10).update( - "MyString=new String(`a`+i)", - "MyInt=new Integer(i)", - "MyLong=new Long(i)", - "MyDouble=new Double(i+i/10)", - "MyFloat=new Float(i+i/10)", - "MyBoolean=new Boolean(i%2==0)", - "MyChar= new Character((char) ((i%26)+97))", - "MyShort=new Short(Integer.toString(i%32767))", - "MyByte= new java.lang.Byte(Integer.toString(i%127))"); + "MyString=new String(`a`+i)", + "MyInt=new Integer(i)", + "MyLong=new Long(i)", + "MyDouble=new Double(i+i/10)", + "MyFloat=new Float(i+i/10)", + "MyBoolean=new Boolean(i%2==0)", + "MyChar= new Character((char) ((i%26)+97))", + "MyShort=new Short(Integer.toString(i%32767))", + "MyByte= new java.lang.Byte(Integer.toString(i%127))"); try { @@ -1690,62 +1613,55 @@ public void testRollupIncremental() { final int size = 100; final QueryTable table = getTable(size, random, columnInfo = initColumnInfos(new String[] { - "USym", "Group", "IntCol", "DoubleCol", "StringCol", "StringNulls", "BoolCol", - "DateTime", + "USym", "Group", "IntCol", "DoubleCol", "StringCol", "StringNulls", "BoolCol", "DateTime", "IntSet", "LongSet", "DoubleSet", "FloatSet", "CharSet", "ShortSet", "ByteSet"}, - new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), - new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), - new TstUtils.IntGenerator(0, 1_000_000), - new TstUtils.DoubleGenerator(-100, 100), - new TstUtils.SetGenerator<>("A", "B", "C", "D"), - new TstUtils.SetGenerator<>("A", "B", "C", "D", null), - new TstUtils.BooleanGenerator(.5, .1), - new TstUtils.UnsortedDateTimeGenerator( - DBTimeUtils.convertDateTime("2020-03-17T09:30:00 NY"), - DBTimeUtils.convertDateTime("2020-03-17T16:00:00 NY")), - new TstUtils.SetGenerator<>(0, 1, 2, 3, 4, 5, NULL_INT), - new TstUtils.SetGenerator<>(0L, 1L, 2L, 3L, 4L, 5L, NULL_LONG), - new TstUtils.SetGenerator<>(0.0D, 1.1D, 2.2D, 3.3D, 4.4D, 5.5D, NULL_DOUBLE), - new TstUtils.SetGenerator<>(0.0f, 1.1f, 2.2f, 3.3f, 4.4f, 5.5f, NULL_FLOAT), - new TstUtils.SetGenerator<>('a', 'b', 'c', 'd', 'e', NULL_CHAR), - new TstUtils.SetGenerator<>((short) 0, (short) 1, (short) 2, (short) 3, (short) 4, - (short) 5, NULL_SHORT), - new TstUtils.SetGenerator<>((byte) 0, (byte) 1, (byte) 2, (byte) 3, (byte) 4, (byte) 5, - NULL_BYTE))); + new TstUtils.SetGenerator<>("AAPL", "TSLA", "VXX", "SPY"), + new TstUtils.SetGenerator<>("Terran", "Vulcan", "Klingon", "Romulan"), + new TstUtils.IntGenerator(0, 1_000_000), + new TstUtils.DoubleGenerator(-100, 100), + new TstUtils.SetGenerator<>("A", "B", "C", "D"), + new TstUtils.SetGenerator<>("A", "B", "C", "D", null), + new TstUtils.BooleanGenerator(.5, .1), + new TstUtils.UnsortedDateTimeGenerator(DBTimeUtils.convertDateTime("2020-03-17T09:30:00 NY"), + DBTimeUtils.convertDateTime("2020-03-17T16:00:00 NY")), + new TstUtils.SetGenerator<>(0, 1, 2, 3, 4, 5, NULL_INT), + new TstUtils.SetGenerator<>(0L, 1L, 2L, 3L, 4L, 5L, NULL_LONG), + new TstUtils.SetGenerator<>(0.0D, 1.1D, 2.2D, 3.3D, 4.4D, 5.5D, NULL_DOUBLE), + new TstUtils.SetGenerator<>(0.0f, 1.1f, 2.2f, 3.3f, 4.4f, 5.5f, NULL_FLOAT), + new TstUtils.SetGenerator<>('a', 'b', 'c', 'd', 'e', NULL_CHAR), + new TstUtils.SetGenerator<>((short) 0, (short) 1, (short) 2, (short) 3, (short) 4, (short) 5, + NULL_SHORT), + new TstUtils.SetGenerator<>((byte) 0, (byte) 1, (byte) 2, (byte) 3, (byte) 4, (byte) 5, NULL_BYTE))); final ComboAggregateFactory rollupDefinition = AggCombo( - AggSum("IntCol", "DoubleCol"), - AggMin("MinInt=IntCol", "MinDT=DateTime"), - AggMax("MaxDouble=DoubleCol", "MaxDT=DateTime"), - AggAvg("IntAvg=IntCol", "DoubleAvg=DoubleCol"), - AggStd("IntStd=IntCol", "DoubleStd=DoubleCol"), - AggVar("IntVar=IntCol", "DoubleVar=DoubleCol"), - AggFirst("IntFirst=IntCol", "DoubleFirst=DoubleCol"), - AggLast("IntLast=IntCol", "DoubleLast=DoubleCol"), - AggCount("Count"), - AggCountDistinct("SCDistinct=StringCol", "CDBoolCol=BoolCol", "DTCDistinct=DateTime", - "CDIntCol=IntSet", "CDLongCol=LongSet", "CDDoubleCol=DoubleSet", - "CDFloatCol=FloatSet", "CDCharCol=CharSet", "CDShortCol=ShortSet", - "CDByteCol=ByteSet"), - AggDistinct("SDistinct=StringCol", "DistinctBoolCol=BoolCol", "DTDistinct=DateTime", - "DIntCol=IntSet", "DLongCol=LongSet", "DDoubleCol=DoubleSet", - "DFloatCol=FloatSet", "DCharCol=CharSet", "DShortCol=ShortSet", "DByteCol=ByteSet"), - AggUnique("SUnique=StringCol", "UniqueBoolCol=BoolCol", - "UIntCol=IntSet", "ULongCol=LongSet", "UDoubleCol=DoubleSet", - "UFloatCol=FloatSet", "UCharCol=CharSet", "UShortCol=ShortSet", "UByteCol=ByteSet"), - AggCountDistinct(true, "SCDistinctN=StringNulls", "CDBoolColN=BoolCol", - "CDNIntCol=IntSet", "CDNLongCol=LongSet", "CDNDoubleCol=DoubleSet", - "CDNFloatCol=FloatSet", "CDNCharCol=CharSet", "CDNShortCol=ShortSet", - "CDNByteCol=ByteSet"), - AggDistinct(true, "SDistinctN=StringNulls", "DistinctBoolColN=BoolCol", - "DNIntCol=IntSet", "DNLongCol=LongSet", "DNDoubleCol=DoubleSet", - "DNFloatCol=FloatSet", "DNCharCol=CharSet", "DNShortCol=ShortSet", - "DNByteCol=ByteSet"), - AggUnique(true, "SUniqueN=StringNulls", "UniqueBoolColN=BoolCol", - "UNIntCol=IntSet", "UNLongCol=LongSet", "UNDoubleCol=DoubleSet", - "UNFloatCol=FloatSet", "UNCharCol=CharSet", "UNShortCol=ShortSet", - "UNByteCol=ByteSet")); + AggSum("IntCol", "DoubleCol"), + AggMin("MinInt=IntCol", "MinDT=DateTime"), + AggMax("MaxDouble=DoubleCol", "MaxDT=DateTime"), + AggAvg("IntAvg=IntCol", "DoubleAvg=DoubleCol"), + AggStd("IntStd=IntCol", "DoubleStd=DoubleCol"), + AggVar("IntVar=IntCol", "DoubleVar=DoubleCol"), + AggFirst("IntFirst=IntCol", "DoubleFirst=DoubleCol"), + AggLast("IntLast=IntCol", "DoubleLast=DoubleCol"), + AggCount("Count"), + AggCountDistinct("SCDistinct=StringCol", "CDBoolCol=BoolCol", "DTCDistinct=DateTime", + "CDIntCol=IntSet", "CDLongCol=LongSet", "CDDoubleCol=DoubleSet", + "CDFloatCol=FloatSet", "CDCharCol=CharSet", "CDShortCol=ShortSet", "CDByteCol=ByteSet"), + AggDistinct("SDistinct=StringCol", "DistinctBoolCol=BoolCol", "DTDistinct=DateTime", + "DIntCol=IntSet", "DLongCol=LongSet", "DDoubleCol=DoubleSet", + "DFloatCol=FloatSet", "DCharCol=CharSet", "DShortCol=ShortSet", "DByteCol=ByteSet"), + AggUnique("SUnique=StringCol", "UniqueBoolCol=BoolCol", + "UIntCol=IntSet", "ULongCol=LongSet", "UDoubleCol=DoubleSet", + "UFloatCol=FloatSet", "UCharCol=CharSet", "UShortCol=ShortSet", "UByteCol=ByteSet"), + AggCountDistinct(true, "SCDistinctN=StringNulls", "CDBoolColN=BoolCol", + "CDNIntCol=IntSet", "CDNLongCol=LongSet", "CDNDoubleCol=DoubleSet", + "CDNFloatCol=FloatSet", "CDNCharCol=CharSet", "CDNShortCol=ShortSet", "CDNByteCol=ByteSet"), + AggDistinct(true, "SDistinctN=StringNulls", "DistinctBoolColN=BoolCol", + "DNIntCol=IntSet", "DNLongCol=LongSet", "DNDoubleCol=DoubleSet", + "DNFloatCol=FloatSet", "DNCharCol=CharSet", "DNShortCol=ShortSet", "DNByteCol=ByteSet"), + AggUnique(true, "SUniqueN=StringNulls", "UniqueBoolColN=BoolCol", + "UNIntCol=IntSet", "UNLongCol=LongSet", "UNDoubleCol=DoubleSet", + "UNFloatCol=FloatSet", "UNCharCol=CharSet", "UNShortCol=ShortSet", "UNByteCol=ByteSet")); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { new EvalNuggetInterface() { @Override @@ -1760,7 +1676,7 @@ public void show() { @Override protected Table e() { return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.rollup(rollupDefinition, "USym", "Group")); + .computeLocked(() -> table.rollup(rollupDefinition, "USym", "Group")); } @Override @@ -1769,8 +1685,9 @@ void showResult(String label, Table e) { dumpRollup(e, "USym", "Group"); } }, - new TableComparator(getDiffableTable(table.rollup(rollupDefinition)) - .dropColumns(RollupInfo.ROLLUP_COLUMN), table.by(rollupDefinition)) + new TableComparator( + getDiffableTable(table.rollup(rollupDefinition)).dropColumns(RollupInfo.ROLLUP_COLUMN), + table.by(rollupDefinition)) }; for (int step = 0; step < 100; step++) { @@ -1803,9 +1720,8 @@ public void testOrderTreeTable() { int nextHid = 11; long index = 2; - final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(1), - longCol("Sentinel", 1), stringCol("hid", "a"), stringCol("hpos", "1"), - col("open", true), doubleCol("rand", 1.0)); + final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(1), longCol("Sentinel", 1), + stringCol("hid", "a"), stringCol("hpos", "1"), col("open", true), doubleCol("rand", 1.0)); final List openHid = new ArrayList<>(); openHid.add("a"); @@ -1814,10 +1730,10 @@ public void testOrderTreeTable() { final Table orders = source - .lastBy("hpos", "hid") - .where("open") - .update("treeid=new io.deephaven.datastructures.util.SmartKey(hid, hpos)", - "parent=io.deephaven.db.v2.QueryTableTreeTest.getPrefix(hid, hpos)"); + .lastBy("hpos", "hid") + .where("open") + .update("treeid=new io.deephaven.datastructures.util.SmartKey(hid, hpos)", + "parent=io.deephaven.db.v2.QueryTableTreeTest.getPrefix(hid, hpos)"); final Table ordersTree = orders.treeTable("treeid", "parent"); final Table ordersFiltered = TreeTableFilter.filterTree(ordersTree, "rand > 0.8"); @@ -1846,8 +1762,7 @@ public void testOrderTreeTable() { hid = openHid.get(random.nextInt(openHid.size())); } - final List hpos = - hidToPos.computeIfAbsent(hid, (key) -> new ArrayList<>()); + final List hpos = hidToPos.computeIfAbsent(hid, (key) -> new ArrayList<>()); final String newHpos; if (hpos.isEmpty()) { // newHpos = random.nextBoolean() ? "1" : "1.1"; @@ -1855,8 +1770,8 @@ public void testOrderTreeTable() { } else { final String parentHpos = hpos.get(random.nextInt(hpos.size())); final int next = hpos.stream().filter(s -> s.startsWith(parentHpos + ".")) - .map(s -> s.substring(parentHpos.length() + 1).split("\\.")[0]) - .mapToInt(Integer::parseInt).max().orElse(0) + 1; + .map(s -> s.substring(parentHpos.length() + 1).split("\\.")[0]) + .mapToInt(Integer::parseInt).max().orElse(0) + 1; newHpos = parentHpos + "." + next; maxLevel = Math.max(maxLevel, 1 + StringUtils.countMatches(newHpos, ".")); } @@ -1864,7 +1779,7 @@ public void testOrderTreeTable() { final long newIndex = ++index; addToTable(source, i(newIndex), longCol("Sentinel", newIndex), col("hid", hid), - col("hpos", newHpos), col("open", true), col("rand", random.nextDouble())); + col("hpos", newHpos), col("open", true), col("rand", random.nextDouble())); builder.appendKey(newIndex); } else if (which < 0.1) { // close an order @@ -1878,8 +1793,8 @@ public void testOrderTreeTable() { } final long newIndex = ++index; - addToTable(source, i(newIndex), longCol("Sentinel", newIndex), col("hid", hid), - col("hpos", hpos), col("open", false), col("rand", random.nextDouble())); + addToTable(source, i(newIndex), longCol("Sentinel", newIndex), col("hid", hid), col("hpos", hpos), + col("open", false), col("rand", random.nextDouble())); builder.appendKey(newIndex); } else { // modify an order @@ -1892,8 +1807,8 @@ public void testOrderTreeTable() { final String hpos = validHpos.get(random.nextInt(validHpos.size())); final long newIndex = ++index; - addToTable(source, i(newIndex), longCol("Sentinel", newIndex), col("hid", hid), - col("hpos", hpos), col("open", true), col("rand", random.nextDouble())); + addToTable(source, i(newIndex), longCol("Sentinel", newIndex), col("hid", hid), col("hpos", hpos), + col("open", true), col("rand", random.nextDouble())); builder.appendKey(newIndex); } @@ -1909,17 +1824,17 @@ public void testOrderTreeTable() { final String hierarchicalColumnName = getHierarchicalColumnName(ordersFiltered); doCompareWithChildrenForTrees("step = " + step, ordersFiltered, - TreeTableFilter.filterTree(ordersTree, "rand > 0.8"), 0, maxLevel, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + TreeTableFilter.filterTree(ordersTree, "rand > 0.8"), 0, maxLevel, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); doCompareWithChildrenForTrees("step = " + step, ordersFiltered2, - TreeTableFilter.filterTree(ordersTree, "rand > 0.1"), 0, maxLevel, - hierarchicalColumnName, CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + TreeTableFilter.filterTree(ordersTree, "rand > 0.1"), 0, maxLevel, hierarchicalColumnName, + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); } } public void testIds6262() { final QueryTable table = TstUtils.testRefreshingTable(i(1), col("Sym", "A"), - col("BigI", new BigInteger[] {null}), col("BigD", new BigDecimal[] {null})); + col("BigI", new BigInteger[] {null}), col("BigD", new BigDecimal[] {null})); final Table rollup = table.rollup(AggCombo(AggVar("BigI", "BigD")), "Sym"); @@ -1929,9 +1844,8 @@ public void testIds6262() { assertNull(rollup.getColumn("BigD").get(0)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(table, i(2, 3), col("Sym", "A", "A"), - col("BigI", BigInteger.ZERO, BigInteger.ZERO), - col("BigD", BigDecimal.ZERO, BigDecimal.ZERO)); + addToTable(table, i(2, 3), col("Sym", "A", "A"), col("BigI", BigInteger.ZERO, BigInteger.ZERO), + col("BigD", BigDecimal.ZERO, BigDecimal.ZERO)); table.notifyListeners(i(2, 3), i(), i()); }); @@ -1943,18 +1857,17 @@ public void testIds6262() { public void testIds7773() { final QueryTable dataTable = TstUtils.testRefreshingTable( - stringCol("USym", "A"), - doubleCol("Value", NULL_DOUBLE), - byteCol("BValue", NULL_BYTE), - shortCol("SValue", NULL_SHORT), - intCol("IValue", NULL_INT), - longCol("LValue", NULL_LONG), - floatCol("FValue", NULL_FLOAT)); - - final Table rolledUp = dataTable.rollup( - AggCombo(AggAvg("Value", "BValue", "SValue", "IValue", "LValue", "FValue")), "USym"); - final TableMap rollupMap = - (TableMap) rolledUp.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); + stringCol("USym", "A"), + doubleCol("Value", NULL_DOUBLE), + byteCol("BValue", NULL_BYTE), + shortCol("SValue", NULL_SHORT), + intCol("IValue", NULL_INT), + longCol("LValue", NULL_LONG), + floatCol("FValue", NULL_FLOAT)); + + final Table rolledUp = + dataTable.rollup(AggCombo(AggAvg("Value", "BValue", "SValue", "IValue", "LValue", "FValue")), "USym"); + final TableMap rollupMap = (TableMap) rolledUp.getAttribute(Table.HIERARCHICAL_CHILDREN_TABLE_MAP_ATTRIBUTE); assertNotNull(rollupMap); final Table aTable = rollupMap.get(SmartKey.EMPTY); @@ -1963,13 +1876,13 @@ public void testIds7773() { // Start with Nulls and make sure we get NaN LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(1, 2), - stringCol("USym", "A", "A"), - doubleCol("Value", NULL_DOUBLE, NULL_DOUBLE), - byteCol("BValue", NULL_BYTE, NULL_BYTE), - shortCol("SValue", NULL_SHORT, NULL_SHORT), - intCol("IValue", NULL_INT, NULL_INT), - longCol("LValue", NULL_LONG, NULL_LONG), - floatCol("FValue", NULL_FLOAT, NULL_FLOAT)); + stringCol("USym", "A", "A"), + doubleCol("Value", NULL_DOUBLE, NULL_DOUBLE), + byteCol("BValue", NULL_BYTE, NULL_BYTE), + shortCol("SValue", NULL_SHORT, NULL_SHORT), + intCol("IValue", NULL_INT, NULL_INT), + longCol("LValue", NULL_LONG, NULL_LONG), + floatCol("FValue", NULL_FLOAT, NULL_FLOAT)); dataTable.notifyListeners(i(1, 2), i(), i()); }); @@ -1988,17 +1901,16 @@ public void testIds7773() { assertEquals(Double.NaN, aTable.getColumn("IValue").getDouble(0)); assertEquals(Double.NaN, aTable.getColumn("LValue").getDouble(0)); - // Add a real value 0, which used to be broken because the default value was 0 and resulted - // in a no change + // Add a real value 0, which used to be broken because the default value was 0 and resulted in a no change LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(3), - stringCol("USym", "A"), - doubleCol("Value", 0.0d), - byteCol("BValue", (byte) 0), - shortCol("SValue", (short) 0), - intCol("IValue", 0), - longCol("LValue", 0), - floatCol("FValue", 0.0f)); + stringCol("USym", "A"), + doubleCol("Value", 0.0d), + byteCol("BValue", (byte) 0), + shortCol("SValue", (short) 0), + intCol("IValue", 0), + longCol("LValue", 0), + floatCol("FValue", 0.0f)); dataTable.notifyListeners(i(3), i(), i()); }); @@ -2041,13 +1953,13 @@ public void testIds7773() { // Add a couple of real 0's and make sure we get a 0 LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(3, 4, 5), - stringCol("USym", "A", "A", "A"), - doubleCol("Value", 0.0d, 0.0d, 0.0d), - byteCol("BValue", (byte) 0, (byte) 0, (byte) 0), - shortCol("SValue", (short) 0, (short) 0, (short) 0), - intCol("IValue", 0, 0, 0), - longCol("LValue", 0, 0, 0), - floatCol("FValue", 0.0f, 0.0f, 0.0f)); + stringCol("USym", "A", "A", "A"), + doubleCol("Value", 0.0d, 0.0d, 0.0d), + byteCol("BValue", (byte) 0, (byte) 0, (byte) 0), + shortCol("SValue", (short) 0, (short) 0, (short) 0), + intCol("IValue", 0, 0, 0), + longCol("LValue", 0, 0, 0), + floatCol("FValue", 0.0f, 0.0f, 0.0f)); dataTable.notifyListeners(i(3, 4, 5), i(), i()); }); @@ -2068,13 +1980,13 @@ public void testIds7773() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(6), - stringCol("USym", "A"), - doubleCol("Value", 1.0d), - byteCol("BValue", (byte) 1), - shortCol("SValue", (short) 1), - intCol("IValue", 1), - longCol("LValue", 1), - floatCol("FValue", 1.0f)); + stringCol("USym", "A"), + doubleCol("Value", 1.0d), + byteCol("BValue", (byte) 1), + shortCol("SValue", (short) 1), + intCol("IValue", 1), + longCol("LValue", 1), + floatCol("FValue", 1.0f)); dataTable.notifyListeners(i(6), i(), i()); }); diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableWhereTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableWhereTest.java index 33690dd2764..3dbd81e201c 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableWhereTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableWhereTest.java @@ -52,21 +52,19 @@ public class QueryTableWhereTest extends QueryTableTestBase { @Test public void testWhere() { - java.util.function.Function filter = - ConditionFilter::createConditionFilter; - final QueryTable table = - testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); + java.util.function.Function filter = ConditionFilter::createConditionFilter; + final QueryTable table = testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); assertEquals("", diff(table.where(filter.apply("k%2 == 0")), table, 10)); assertEquals("", diff(table.where(filter.apply("i%2 == 0")), - testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); + testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); assertEquals("", diff(table.where(filter.apply("(y-'a') = 2")), - testRefreshingTable(i(2), c("x", 3), c("y", 'c')), 10)); + testRefreshingTable(i(2), c("x", 3), c("y", 'c')), 10)); final QueryTable whereResult = (QueryTable) table.where(filter.apply("x%2 == 1")); final Listener whereResultListener = new ListenerWithGlobals(whereResult); whereResult.listenForUpdates(whereResultListener); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); + testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(table, i(7, 9), c("x", 4, 5), c("y", 'd', 'e')); @@ -74,7 +72,7 @@ public void testWhere() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 6, 9), c("x", 1, 3, 5), c("y", 'a', 'c', 'e')), 10)); + testRefreshingTable(i(2, 6, 9), c("x", 1, 3, 5), c("y", 'a', 'c', 'e')), 10)); assertEquals(added, i(9)); assertEquals(removed, i()); assertEquals(modified, i()); @@ -85,7 +83,7 @@ public void testWhere() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 6, 7), c("x", 1, 3, 3), c("y", 'a', 'c', 'e')), 10)); + testRefreshingTable(i(2, 6, 7), c("x", 1, 3, 3), c("y", 'a', 'c', 'e')), 10)); assertEquals(added, i(7)); assertEquals(removed, i(9)); @@ -109,7 +107,7 @@ public void testWhere() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 4, 6), c("x", 1, 21, 3), c("y", 'a', 'x', 'c')), 10)); + testRefreshingTable(i(2, 4, 6), c("x", 1, 21, 3), c("y", 'a', 'x', 'c')), 10)); assertEquals(added, i(2, 4, 6)); assertEquals(removed, i()); @@ -118,8 +116,7 @@ public void testWhere() { } public void testWhereBiggerTable() { - final Table table = TableTools.emptyTable(100000) - .update("Sym=ii%2==0 ? `AAPL` : `BANANA`", "II=ii").select(); + final Table table = TableTools.emptyTable(100000).update("Sym=ii%2==0 ? `AAPL` : `BANANA`", "II=ii").select(); final Table filtered = table.where("Sym = (`AAPL`)"); assertTableEquals(TableTools.emptyTable(50000).update("Sym=`AAPL`", "II=ii*2"), filtered); TableTools.showWithIndex(filtered); @@ -134,24 +131,22 @@ public void testIandK() { } - // this has no changes from the original testWhere, it is just to make sure that we still work - // as a single clause + // this has no changes from the original testWhere, it is just to make sure that we still work as a single clause @Test public void testWhereOneOfSingle() { - final QueryTable table = - testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); + final QueryTable table = testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 'a', 'b', 'c')); assertEquals("", diff(table.whereOneOf(whereClause("k%2 == 0")), table, 10)); assertEquals("", diff(table.whereOneOf(whereClause("i%2 == 0")), - testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); + testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); assertEquals("", diff(table.whereOneOf(whereClause("(y-'a') = 2")), - testRefreshingTable(i(2), c("x", 3), c("y", 'c')), 10)); + testRefreshingTable(i(2), c("x", 3), c("y", 'c')), 10)); final QueryTable whereResult = (QueryTable) table.whereOneOf(whereClause("x%2 == 1")); final Listener whereResultListener = new ListenerWithGlobals(whereResult); whereResult.listenForUpdates(whereResultListener); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); + testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(table, i(7, 9), c("x", 4, 5), c("y", 'd', 'e')); @@ -159,7 +154,7 @@ public void testWhereOneOfSingle() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 6, 9), c("x", 1, 3, 5), c("y", 'a', 'c', 'e')), 10)); + testRefreshingTable(i(2, 6, 9), c("x", 1, 3, 5), c("y", 'a', 'c', 'e')), 10)); assertEquals(added, i(9)); assertEquals(removed, i()); assertEquals(modified, i()); @@ -170,7 +165,7 @@ public void testWhereOneOfSingle() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 6, 7), c("x", 1, 3, 3), c("y", 'a', 'c', 'e')), 10)); + testRefreshingTable(i(2, 6, 7), c("x", 1, 3, 3), c("y", 'a', 'c', 'e')), 10)); assertEquals(added, i(7)); assertEquals(removed, i(9)); @@ -194,7 +189,7 @@ public void testWhereOneOfSingle() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 4, 6), c("x", 1, 21, 3), c("y", 'a', 'x', 'c')), 10)); + testRefreshingTable(i(2, 4, 6), c("x", 1, 21, 3), c("y", 'a', 'x', 'c')), 10)); assertEquals(added, i(2, 4, 6)); assertEquals(removed, i()); @@ -205,21 +200,19 @@ public void testWhereOneOfSingle() { // adds a second clause @Test public void testWhereOneOfTwo() { - final QueryTable table = - testRefreshingTable(i(2, 4, 6, 8), c("x", 1, 2, 3, 4), c("y", 'a', 'b', 'c', 'f')); + final QueryTable table = testRefreshingTable(i(2, 4, 6, 8), c("x", 1, 2, 3, 4), c("y", 'a', 'b', 'c', 'f')); assertEquals("", diff(table.whereOneOf(whereClause("k%2 == 0")), table, 10)); assertEquals("", diff(table.whereOneOf(whereClause("i%2 == 0")), - testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); + testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", 'a', 'c')), 10)); assertEquals("", diff(table.whereOneOf(whereClause("(y-'a') = 2")), - testRefreshingTable(i(2), c("x", 3), c("y", 'c')), 10)); + testRefreshingTable(i(2), c("x", 3), c("y", 'c')), 10)); - final QueryTable whereResult = - (QueryTable) table.whereOneOf(whereClause("x%2 == 1"), whereClause("y=='f'")); + final QueryTable whereResult = (QueryTable) table.whereOneOf(whereClause("x%2 == 1"), whereClause("y=='f'")); final Listener whereResultListener = new ListenerWithGlobals(whereResult); whereResult.listenForUpdates(whereResultListener); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 6, 8), c("x", 1, 3, 4), c("y", 'a', 'c', 'f')), 10)); + testRefreshingTable(i(2, 6, 8), c("x", 1, 3, 4), c("y", 'a', 'c', 'f')), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -228,8 +221,7 @@ public void testWhereOneOfTwo() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 6, 8, 9), c("x", 1, 3, 4, 5), c("y", 'a', 'c', 'f', 'e')), - 10)); + testRefreshingTable(i(2, 6, 8, 9), c("x", 1, 3, 4, 5), c("y", 'a', 'c', 'f', 'e')), 10)); assertEquals(added, i(9)); assertEquals(removed, i()); assertEquals(modified, i()); @@ -240,8 +232,7 @@ public void testWhereOneOfTwo() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 6, 7, 8), c("x", 1, 3, 3, 4), c("y", 'a', 'c', 'e', 'f')), - 10)); + testRefreshingTable(i(2, 6, 7, 8), c("x", 1, 3, 3, 4), c("y", 'a', 'c', 'e', 'f')), 10)); assertEquals(added, i(7)); assertEquals(removed, i(9)); @@ -253,7 +244,7 @@ public void testWhereOneOfTwo() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(8), c("x", 4), c("y", 'f')), 10)); + testRefreshingTable(i(8), c("x", 4), c("y", 'f')), 10)); assertEquals(added, i()); assertEquals(removed, i(2, 6, 7)); @@ -266,8 +257,7 @@ public void testWhereOneOfTwo() { }); assertEquals("", diff(whereResult, - testRefreshingTable(i(2, 4, 6, 8), c("x", 1, 21, 3, 4), c("y", 'a', 'x', 'c', 'f')), - 10)); + testRefreshingTable(i(2, 4, 6, 8), c("x", 1, 21, 3, 4), c("y", 'a', 'x', 'c', 'f')), 10)); assertEquals(added, i(2, 4, 6)); assertEquals(removed, i()); @@ -276,121 +266,96 @@ public void testWhereOneOfTwo() { TableTools.showWithIndex(table); final Table usingStringArray = table.whereOneOf("x%3 == 0", "y=='f'"); assertEquals("", diff(usingStringArray, - testRefreshingTable(i(4, 6, 8), c("x", 21, 3, 4), c("y", 'x', 'c', 'f')), 10)); + testRefreshingTable(i(4, 6, 8), c("x", 21, 3, 4), c("y", 'x', 'c', 'f')), 10)); } @Test public void testWhereInDependency() { - final QueryTable tableToFilter = - testRefreshingTable(i(10, 11, 12, 13, 14, 15), c("A", 1, 2, 3, 4, 5, 6), + final QueryTable tableToFilter = testRefreshingTable(i(10, 11, 12, 13, 14, 15), c("A", 1, 2, 3, 4, 5, 6), c("B", 2, 4, 6, 8, 10, 12), c("C", 'a', 'b', 'c', 'd', 'e', 'f')); - final QueryTable setTable = - testRefreshingTable(i(100, 101, 102), c("A", 1, 2, 3), c("B", 2, 4, 6)); + final QueryTable setTable = testRefreshingTable(i(100, 101, 102), c("A", 1, 2, 3), c("B", 2, 4, 6)); final Table setTable1 = setTable.where("A > 2"); final Table setTable2 = setTable.where("B > 6"); final DynamicWhereFilter dynamicFilter1 = - new DynamicWhereFilter(setTable1, true, MatchPairFactory.getExpressions("A")); + new DynamicWhereFilter(setTable1, true, MatchPairFactory.getExpressions("A")); final DynamicWhereFilter dynamicFilter2 = - new DynamicWhereFilter(setTable2, true, MatchPairFactory.getExpressions("B")); + new DynamicWhereFilter(setTable2, true, MatchPairFactory.getExpressions("B")); - final SelectFilter composedFilter = - DisjunctiveFilter.makeDisjunctiveFilter(dynamicFilter1, dynamicFilter2); + final SelectFilter composedFilter = DisjunctiveFilter.makeDisjunctiveFilter(dynamicFilter1, dynamicFilter2); final Table composed = tableToFilter.where(composedFilter); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); }); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(setTable, i(103), c("A", 5), c("B", 8)); setTable.notifyListeners(i(103), i(), i()); - TestCase.assertFalse( - ((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); - // this will do the notification for table; which should first fire the recorder for - // setTable1 + // this will do the notification for table; which should first fire the recorder for setTable1 LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); - // this will do the notification for table; which should first fire the recorder for - // setTable2 + // this will do the notification for table; which should first fire the recorder for setTable2 LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); - // this will do the notification for table; which should first fire the merged listener - // for 1 + // this will do the notification for table; which should first fire the merged listener for 1 boolean flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase.assertFalse( - ((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // the next notification should be the merged listener for setTable2 flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // the dynamicFilter1 updates flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertFalse(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // the dynamicFilter2 updates flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertFalse(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // now that both filters are complete, we can run the composed listener flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); - TestCase - .assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable1).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) setTable2).satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter1.satisfied(LogicalClock.DEFAULT.currentStep())); TestCase.assertTrue(dynamicFilter2.satisfied(LogicalClock.DEFAULT.currentStep())); - TestCase - .assertTrue(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); + TestCase.assertTrue(((QueryTable) composed).satisfied(LogicalClock.DEFAULT.currentStep())); // and we are done flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); @@ -399,8 +364,8 @@ public void testWhereInDependency() { TableTools.show(composed); - final Table expected = TableTools.newTable(intCol("A", 3, 4, 5), intCol("B", 6, 8, 10), - charCol("C", 'c', 'd', 'e')); + final Table expected = + TableTools.newTable(intCol("A", 3, 4, 5), intCol("B", 6, 8, 10), charCol("C", 'c', 'd', 'e')); TestCase.assertEquals("", TableTools.diff(composed, expected, 10)); } @@ -408,13 +373,12 @@ public void testWhereInDependency() { @Test public void testWhereDynamicIn() { final QueryTable setTable = testRefreshingTable(i(2, 4, 6, 8), c("X", "A", "B", "C", "B")); - final QueryTable filteredTable = - testRefreshingTable(i(1, 2, 3, 4, 5), c("X", "A", "B", "C", "D", "E")); + final QueryTable filteredTable = testRefreshingTable(i(1, 2, 3, 4, 5), c("X", "A", "B", "C", "D", "E")); - final Table result = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> filteredTable.whereIn(setTable, "X")); - final Table resultInverse = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> filteredTable.whereNotIn(setTable, "X")); + final Table result = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> filteredTable.whereIn(setTable, "X")); + final Table resultInverse = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> filteredTable.whereNotIn(setTable, "X")); show(result); assertEquals(3, result.size()); assertEquals(asList("A", "B", "C"), asList((String[]) result.getColumn("X").getDirect())); @@ -427,8 +391,7 @@ public void testWhereDynamicIn() { }); show(result); assertEquals(4, result.size()); - assertEquals(asList("A", "B", "C", "A"), - asList((String[]) result.getColumn("X").getDirect())); + assertEquals(asList("A", "B", "C", "A"), asList((String[]) result.getColumn("X").getDirect())); assertEquals(2, resultInverse.size()); assertEquals(asList("D", "E"), asList((String[]) resultInverse.getColumn("X").getDirect())); @@ -438,8 +401,7 @@ public void testWhereDynamicIn() { }); showWithIndex(result); assertEquals(5, result.size()); - assertEquals(asList("A", "B", "C", "D", "A"), - asList((String[]) result.getColumn("X").getDirect())); + assertEquals(asList("A", "B", "C", "D", "A"), asList((String[]) result.getColumn("X").getDirect())); assertEquals(1, resultInverse.size()); assertEquals(asList("E"), asList((String[]) resultInverse.getColumn("X").getDirect())); } @@ -453,10 +415,8 @@ public void testWhereDynamicInIncremental() { final int filteredSize = 500; final Random random = new Random(0); - final QueryTable setTable = getTable(setSize, random, - setInfo = initColumnInfos( - new String[] {"Sym", "intCol", "doubleCol", "charCol", "byteCol", "floatCol", - "longCol", "shortCol"}, + final QueryTable setTable = getTable(setSize, random, setInfo = initColumnInfos( + new String[] {"Sym", "intCol", "doubleCol", "charCol", "byteCol", "floatCol", "longCol", "shortCol"}, new SetGenerator<>("aa", "bb", "bc", "cc", "dd"), new IntGenerator(-100, 100), new DoubleGenerator(0, 100), @@ -465,10 +425,8 @@ public void testWhereDynamicInIncremental() { new SetGenerator<>(1.0f, 2.0f, 3.3f, null), new LongGenerator(0, 1000), new ShortGenerator((short) 500, (short) 600))); - final QueryTable filteredTable = getTable(filteredSize, random, - filteredInfo = initColumnInfos( - new String[] {"Sym", "intCol", "doubleCol", "charCol", "byteCol", "floatCol", - "longCol", "shortCol"}, + final QueryTable filteredTable = getTable(filteredSize, random, filteredInfo = initColumnInfos( + new String[] {"Sym", "intCol", "doubleCol", "charCol", "byteCol", "floatCol", "longCol", "shortCol"}, new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), new IntGenerator(-100, 100), new DoubleGenerator(0, 100), @@ -505,18 +463,16 @@ public void testWhereDynamicInIncremental() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { if (modSet) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, setSize, random, setTable, - setInfo); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + setSize, random, setTable, setInfo); } }); validate(en); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { if (modFiltered) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, filteredSize, random, - filteredTable, filteredInfo); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + filteredSize, random, filteredTable, filteredInfo); } }); validate(en); @@ -530,26 +486,20 @@ public void testWhereDynamicInIncremental() { public void testWhereRefresh() { final Table t1 = TableTools.newTable(col("A", "b", "c", "d")); assertFalse(t1.isLive()); - final Table t2 = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t1.where("A in `b`")); + final Table t2 = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t1.where("A in `b`")); assertFalse(t2.isLive()); - final Table t3 = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t1.whereIn(t1, "A")); + final Table t3 = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t1.whereIn(t1, "A")); assertFalse(t3.isLive()); final Random random = new Random(0); - final QueryTable t4 = - getTable(10, random, initColumnInfos(new String[] {"B"}, new SetGenerator<>("a", "b"))); + final QueryTable t4 = getTable(10, random, initColumnInfos(new String[] {"B"}, new SetGenerator<>("a", "b"))); assertTrue(t4.isLive()); - final Table t5 = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t4.where("B in `b`")); + final Table t5 = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t4.where("B in `b`")); assertTrue(t5.isLive()); - final Table t6 = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t4.whereIn(t1, "B=A")); + final Table t6 = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t4.whereIn(t1, "B=A")); assertTrue(t6.isLive()); - final Table t7 = - LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t1.whereIn(t4, "A=B")); + final Table t7 = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> t1.whereIn(t4, "A=B")); assertTrue(t7.isLive()); } @@ -561,16 +511,16 @@ public void testWhereInDiamond() { final Random random = new Random(0); final QueryTable table = getTable(size, random, - filteredInfo = initColumnInfos(new String[] {"Sym", "intCol", "intCol2"}, - new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), - new IntGenerator(0, 100), - new IntGenerator(0, 100))); + filteredInfo = initColumnInfos(new String[] {"Sym", "intCol", "intCol2"}, + new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), + new IntGenerator(0, 100), + new IntGenerator(0, 100))); final EvalNugget en[] = new EvalNugget[] { new EvalNugget() { public Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> table.whereIn(table.where("intCol % 25 == 0"), "intCol2=intCol")); + return LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> table.whereIn(table.where("intCol % 25 == 0"), "intCol2=intCol")); } }, }; @@ -578,8 +528,8 @@ public Table e() { try { for (int i = 0; i < 100; i++) { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( - () -> GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, size, random, table, filteredInfo)); + () -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + size, random, table, filteredInfo)); validate(en); } } catch (Exception e) { @@ -591,8 +541,8 @@ public Table e() { public void testWhereInDiamond2() { final QueryTable table = testRefreshingTable(i(1, 2, 3), c("x", 1, 2, 3), c("y", 2, 4, 6)); final Table setTable = table.where("x % 2 == 0").dropColumns("y"); - final Table filteredTable = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.whereIn(setTable, "y=x")); + final Table filteredTable = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> table.whereIn(setTable, "y=x")); TableTools.show(filteredTable); @@ -635,10 +585,10 @@ public void testWhereOneOfIncremental() { final Random random = new Random(0); final QueryTable filteredTable = getTable(setSize, random, - filteredInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), - new IntGenerator(0, 100), - new DoubleGenerator(0, 100))); + filteredInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), + new IntGenerator(0, 100), + new DoubleGenerator(0, 100))); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { @@ -649,27 +599,25 @@ public Table e() { new EvalNugget() { public Table e() { return filteredTable.whereOneOf(whereClause("Sym in `aa`, `ee`"), - whereClause("intCol % 2 == 0")); + whereClause("intCol % 2 == 0")); } }, new EvalNugget() { public Table e() { - return filteredTable.whereOneOf( - whereClause("intCol % 2 == 0", "intCol % 2 == 1"), - whereClause("Sym in `aa`, `ee`")); + return filteredTable.whereOneOf(whereClause("intCol % 2 == 0", "intCol % 2 == 1"), + whereClause("Sym in `aa`, `ee`")); } }, new EvalNugget() { public Table e() { - return filteredTable.whereOneOf( - whereClause("intCol % 2 == 0", "Sym in `aa`, `ii`"), - whereClause("Sym in `aa`, `ee`")); + return filteredTable.whereOneOf(whereClause("intCol % 2 == 0", "Sym in `aa`, `ii`"), + whereClause("Sym in `aa`, `ee`")); } }, new EvalNugget() { public Table e() { - return filteredTable.whereOneOf(whereClause("intCol % 2 == 0"), - whereClause("intCol % 2 == 1"), whereClause("Sym in `aa`, `ee`")); + return filteredTable.whereOneOf(whereClause("intCol % 2 == 0"), whereClause("intCol % 2 == 1"), + whereClause("Sym in `aa`, `ee`")); } }, }; @@ -678,9 +626,9 @@ public Table e() { for (int i = 0; i < 100; i++) { System.out.println("Step = " + i); - LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates - .generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, - filteredSize, random, filteredTable, filteredInfo)); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( + () -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + filteredSize, random, filteredTable, filteredInfo)); validate(en); } } catch (Exception e) { @@ -701,8 +649,7 @@ public void testWhereWithExcessiveShifting() { final Random random = new Random(0); final QueryTable growingTable = testRefreshingTable(i(1), c("intCol", 1)); - final QueryTable randomTable = - getTable(setSize, random, filteredInfo = initColumnInfos(new String[] {"intCol"}, + final QueryTable randomTable = getTable(setSize, random, filteredInfo = initColumnInfos(new String[] {"intCol"}, new IntGenerator(0, 1 << 8))); final Table m2 = TableTools.merge(growingTable, randomTable).updateView("intCol=intCol*53"); @@ -716,9 +663,8 @@ public void testWhereWithExcessiveShifting() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(growingTable, i(fii), c("intCol", fii)); growingTable.notifyListeners(i(fii), i(), i()); - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, filteredSize, random, randomTable, - filteredInfo); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, filteredSize, + random, randomTable, filteredInfo); }); validate(en); } @@ -727,8 +673,8 @@ public void testWhereWithExcessiveShifting() { @Test public void testEmptyWhere() { final QueryTable source = testRefreshingTable(i(2, 4, 6, 8), - c("X", "A", "B", "C", "B"), - c("I", 1, 2, 4, 8)); + c("X", "A", "B", "C", "B"), + c("I", 1, 2, 4, 8)); final Table filtered = source.where(); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -750,21 +696,19 @@ public void testWhereBoolean() { final ColumnInfo[] columnInfo; final QueryTable filteredTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sentinel", "boolCol", "nullBoolCol"}, - new IntGenerator(0, 10000), - new BooleanGenerator(0.5), - new BooleanGenerator(0.5, 0.2))); + columnInfo = initColumnInfos(new String[] {"Sentinel", "boolCol", "nullBoolCol"}, + new IntGenerator(0, 10000), + new BooleanGenerator(0.5), + new BooleanGenerator(0.5, 0.2))); final EvalNugget[] en = new EvalNugget[] { EvalNugget.from(() -> filteredTable.where("boolCol")), EvalNugget.from(() -> filteredTable.where("!boolCol")), EvalNugget.from(() -> SparseSelect.sparseSelect(filteredTable).where("boolCol")), - EvalNugget.from(() -> SparseSelect.sparseSelect(filteredTable).sort("Sentinel") - .where("boolCol")), - EvalNugget.from(() -> SparseSelect.sparseSelect(filteredTable).sort("Sentinel") - .reverse().where("boolCol")), - EvalNugget - .from(() -> filteredTable.updateView("boolCol2=!!boolCol").where("boolCol2")), + EvalNugget.from(() -> SparseSelect.sparseSelect(filteredTable).sort("Sentinel").where("boolCol")), + EvalNugget.from( + () -> SparseSelect.sparseSelect(filteredTable).sort("Sentinel").reverse().where("boolCol")), + EvalNugget.from(() -> filteredTable.updateView("boolCol2=!!boolCol").where("boolCol2")), }; for (int step = 0; step < 100; ++step) { @@ -814,9 +758,8 @@ private TestChunkFilter(ChunkFilter actualFilter, long sleepDurationNanos) { } @Override - public void filter(Chunk values, - LongChunk keys, - WritableLongChunk results) { + public void filter(Chunk values, LongChunk keys, + WritableLongChunk results) { if (++invokes == 1) { latch.countDown(); } @@ -847,8 +790,8 @@ public void testInterFilterInterruption() { QueryScope.addParam("fastCounter", fastCounter); final long start = System.currentTimeMillis(); - final Table filtered = tableToFilter.where("slowCounter.applyAsInt(X) % 2 == 0", - "fastCounter.applyAsInt(X) % 3 == 0"); + final Table filtered = + tableToFilter.where("slowCounter.applyAsInt(X) % 2 == 0", "fastCounter.applyAsInt(X) % 3 == 0"); final long end = System.currentTimeMillis(); System.out.println("Duration: " + (end - start)); @@ -864,8 +807,7 @@ public void testInterFilterInterruption() { final Thread t = new Thread(() -> { final long start1 = System.currentTimeMillis(); try { - tableToFilter.where("slowCounter.applyAsInt(X) % 2 == 0", - "fastCounter.applyAsInt(X) % 3 == 0"); + tableToFilter.where("slowCounter.applyAsInt(X) % 2 == 0", "fastCounter.applyAsInt(X) % 3 == 0"); } catch (Exception e) { caught.setValue(e); } @@ -903,13 +845,13 @@ public void testChunkFilterInterruption() { final Table tableToFilter = TableTools.emptyTable(2_000_000).update("X=i"); final TestChunkFilter slowCounter = - new TestChunkFilter(IntRangeComparator.makeIntFilter(0, 1_000_000, true, false), 100); + new TestChunkFilter(IntRangeComparator.makeIntFilter(0, 1_000_000, true, false), 100); QueryScope.addParam("slowCounter", slowCounter); final long start = System.currentTimeMillis(); - final Index result = ChunkFilter.applyChunkFilter(tableToFilter.getIndex(), - tableToFilter.getColumnSource("X"), false, slowCounter); + final Index result = ChunkFilter.applyChunkFilter(tableToFilter.getIndex(), tableToFilter.getColumnSource("X"), + false, slowCounter); final long end = System.currentTimeMillis(); System.out.println("Duration: " + (end - start)); @@ -922,8 +864,8 @@ public void testChunkFilterInterruption() { final Thread t = new Thread(() -> { final long start1 = System.currentTimeMillis(); try { - ChunkFilter.applyChunkFilter(tableToFilter.getIndex(), - tableToFilter.getColumnSource("X"), false, slowCounter); + ChunkFilter.applyChunkFilter(tableToFilter.getIndex(), tableToFilter.getColumnSource("X"), false, + slowCounter); } catch (Exception e) { caught.setValue(e); } @@ -977,31 +919,29 @@ public void testComparableBinarySearch() { final ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"BD1", "D2", "L3", "CH", "DT"}, - new BigDecimalGenerator(BigInteger.ONE, BigInteger.TEN), - new DoubleGenerator(0.0, 100.0, 0, 0, 0, 0), - new LongGenerator(-100, 100, 0.01), - new CharGenerator('A', 'Z', 0.1), - new UnsortedDateTimeGenerator(DBTimeUtils.convertDateTime("2020-01-01T00:00:00 NY"), - DBTimeUtils.convertDateTime("2020-01-01T01:00:00 NY")))); - final String bigIntConversion = - "BI4=" + getClass().getCanonicalName() + ".convertToBigInteger(L3)"; - final Table augmentedInts = table.update(bigIntConversion, "D5=(double)L3", "I6=(int)L3", - "S7=(short)L3", "B8=(byte)L3"); + columnInfo = initColumnInfos(new String[] {"BD1", "D2", "L3", "CH", "DT"}, + new BigDecimalGenerator(BigInteger.ONE, BigInteger.TEN), + new DoubleGenerator(0.0, 100.0, 0, 0, 0, 0), + new LongGenerator(-100, 100, 0.01), + new CharGenerator('A', 'Z', 0.1), + new UnsortedDateTimeGenerator(DBTimeUtils.convertDateTime("2020-01-01T00:00:00 NY"), + DBTimeUtils.convertDateTime("2020-01-01T01:00:00 NY")))); + final String bigIntConversion = "BI4=" + getClass().getCanonicalName() + ".convertToBigInteger(L3)"; + final Table augmentedInts = + table.update(bigIntConversion, "D5=(double)L3", "I6=(int)L3", "S7=(short)L3", "B8=(byte)L3"); final Table augmentedFloats = table.update("F6=(float)D2"); final Table sortedBD1 = table.sort("BD1"); final Table sortedDT = table.sort("DT"); final Table sortedCH = table.sort("CH"); - final Table sortedD2 = - multiplyAssertSorted(augmentedFloats.sort("D2"), SortingOrder.Ascending, "F6"); - final Table sortedL3 = multiplyAssertSorted(augmentedInts.sort("L3"), - SortingOrder.Ascending, "BI4", "D5", "I6", "S7", "B8"); + final Table sortedD2 = multiplyAssertSorted(augmentedFloats.sort("D2"), SortingOrder.Ascending, "F6"); + final Table sortedL3 = + multiplyAssertSorted(augmentedInts.sort("L3"), SortingOrder.Ascending, "BI4", "D5", "I6", "S7", "B8"); final Table sortedBD1R = table.sortDescending("BD1"); - final Table sortedD2R = multiplyAssertSorted(augmentedFloats.sortDescending("D2"), - SortingOrder.Descending, "F6"); - final Table sortedL3R = multiplyAssertSorted(augmentedInts.sortDescending("L3"), - SortingOrder.Descending, "BI4", "D5", "I6", "S7", "B8"); + final Table sortedD2R = + multiplyAssertSorted(augmentedFloats.sortDescending("D2"), SortingOrder.Descending, "F6"); + final Table sortedL3R = multiplyAssertSorted(augmentedInts.sortDescending("L3"), SortingOrder.Descending, "BI4", + "D5", "I6", "S7", "B8"); final BigDecimal two = BigDecimal.valueOf(2); final BigDecimal nine = BigDecimal.valueOf(9); @@ -1012,84 +952,66 @@ public void testComparableBinarySearch() { QueryScope.addParam("nine", nine); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { - new TableComparator( - sortedBD1.where("BD1.compareTo(two) > 0 && BD1.compareTo(nine) < 0"), - sortedBD1 - .where(ComparableRangeFilter.makeForTest("BD1", two, nine, false, false))), + new TableComparator(sortedBD1.where("BD1.compareTo(two) > 0 && BD1.compareTo(nine) < 0"), + sortedBD1.where(ComparableRangeFilter.makeForTest("BD1", two, nine, false, false))), new TableComparator(sortedD2.where("D2 > 50 && D2 < 75"), - sortedD2.where(new DoubleRangeFilter("D2", 50.0, 75.0, false, false))), + sortedD2.where(new DoubleRangeFilter("D2", 50.0, 75.0, false, false))), new TableComparator(sortedL3.where("L3 > -50 && L3 < 80"), - sortedL3.where(new LongRangeFilter("L3", -50, 80, false, false))), + sortedL3.where(new LongRangeFilter("L3", -50, 80, false, false))), new TableComparator(sortedL3.where("L3 > -50 && L3 <= 80"), - sortedL3.where(new LongRangeFilter("L3", -50, 80, false, true))), + sortedL3.where(new LongRangeFilter("L3", -50, 80, false, true))), new TableComparator(sortedL3.where("L3 >= -50 && L3 < 80"), - sortedL3.where(new LongRangeFilter("L3", -50, 80, true, false))), + sortedL3.where(new LongRangeFilter("L3", -50, 80, true, false))), new TableComparator(sortedL3.where("L3 >= -50 && L3 <= 80"), - sortedL3.where(new LongRangeFilter("L3", -50, 80, true, true))), + sortedL3.where(new LongRangeFilter("L3", -50, 80, true, true))), new TableComparator(sortedL3.where("L3 * 2 >= -100"), sortedL3.where("L3 >= -50")), new TableComparator(sortedL3.where("L3 * 2 > -100"), sortedL3.where("L3 > -50")), new TableComparator(sortedL3.where("L3 * 2 < -100"), sortedL3.where("L3 < -50")), new TableComparator(sortedL3.where("L3 * 2 <= -100"), sortedL3.where("L3 <= -50")), - new TableComparator( - sortedBD1R.where("BD1.compareTo(two) > 0 && BD1.compareTo(nine) < 0"), - sortedBD1R - .where(ComparableRangeFilter.makeForTest("BD1", two, nine, false, false))), + new TableComparator(sortedBD1R.where("BD1.compareTo(two) > 0 && BD1.compareTo(nine) < 0"), + sortedBD1R.where(ComparableRangeFilter.makeForTest("BD1", two, nine, false, false))), new TableComparator(sortedD2R.where("D2 > 50 && D2 < 75"), - sortedD2R.where(new DoubleRangeFilter("D2", 50.0, 75.0, false, false))), - new TableComparator(sortedD2R.where("D2 > 50 && D2 <= 75"), - sortedD2R.where("F6 > 50", "F6 <= 75")), + sortedD2R.where(new DoubleRangeFilter("D2", 50.0, 75.0, false, false))), + new TableComparator(sortedD2R.where("D2 > 50 && D2 <= 75"), sortedD2R.where("F6 > 50", "F6 <= 75")), new TableComparator(sortedL3R.where("L3 > -50 && L3 < 80"), - sortedL3R.where(new LongRangeFilter("L3", -50, 80, false, false))), + sortedL3R.where(new LongRangeFilter("L3", -50, 80, false, false))), new TableComparator(sortedL3R.where("L3 > -50 && L3 <= 80"), - sortedL3R.where(new LongRangeFilter("L3", -50, 80, false, true))), + sortedL3R.where(new LongRangeFilter("L3", -50, 80, false, true))), new TableComparator(sortedL3R.where("L3 >= -50 && L3 < 80"), - sortedL3R.where(new LongRangeFilter("L3", -50, 80, true, false))), + sortedL3R.where(new LongRangeFilter("L3", -50, 80, true, false))), new TableComparator(sortedL3R.where("L3 >= -50 && L3 <= 80"), - sortedL3R.where(new LongRangeFilter("L3", -50, 80, true, true))), - new TableComparator(sortedL3R.where("L3 * 2 >= -100"), - sortedL3R.where("L3 >= -50")), + sortedL3R.where(new LongRangeFilter("L3", -50, 80, true, true))), + new TableComparator(sortedL3R.where("L3 * 2 >= -100"), sortedL3R.where("L3 >= -50")), new TableComparator(sortedL3R.where("L3 * 2 > -100"), sortedL3R.where("L3 > -50")), new TableComparator(sortedL3R.where("L3 * 2 < -100"), sortedL3R.where("L3 < -50")), - new TableComparator(sortedL3R.where("L3 * 2 <= -100"), - sortedL3R.where("L3 <= -50")), + new TableComparator(sortedL3R.where("L3 * 2 <= -100"), sortedL3R.where("L3 <= -50")), new TableComparator(sortedL3.where("L3 >= -50"), sortedL3.where("D5 >= -50")), new TableComparator(sortedL3.where("L3 > -100"), sortedL3.where("D5 > -100")), new TableComparator(sortedL3.where("L3 < -50"), sortedL3.where("D5 < -50")), new TableComparator(sortedL3.where("L3 <= -50"), sortedL3.where("D5 <= -50")), new TableComparator(sortedL3.where("L3 > 10 && L3 < 20"), - sortedL3.where(ComparableRangeFilter.makeForTest("BI4", BigInteger.valueOf(10), - BigInteger.valueOf(20), false, false))), + sortedL3.where(ComparableRangeFilter.makeForTest("BI4", BigInteger.valueOf(10), + BigInteger.valueOf(20), false, false))), new TableComparator(sortedL3R.where("L3 > 10 && L3 < 20"), - sortedL3R.where(ComparableRangeFilter.makeForTest("BI4", BigInteger.valueOf(10), - BigInteger.valueOf(20), false, false))), - new TableComparator(sortedL3.where("L3 <= 20"), "L3", sortedL3.where("BI4 <= 20"), - "BI4"), - new TableComparator(sortedL3R.where("L3 > 20"), "L3", sortedL3R.where("BI4 > 20"), - "BI4"), - new TableComparator(sortedL3.where("L3 < 20"), "L3", sortedL3.where("BI4 < 20"), - "BI4"), - new TableComparator(sortedL3R.where("L3 >= 20"), "L3", sortedL3R.where("BI4 >= 20"), - "BI4"), - new TableComparator(sortedL3R.where("L3 >= 20 && true"), - sortedL3R.where("I6 >= 20")), - new TableComparator(sortedL3R.where("L3 >= 20 && true"), - sortedL3R.where("B8 >= 20")), - new TableComparator(sortedL3R.where("L3 >= 20 && true"), - sortedL3R.where("S7 >= 20")), + sortedL3R.where(ComparableRangeFilter.makeForTest("BI4", BigInteger.valueOf(10), + BigInteger.valueOf(20), false, false))), + new TableComparator(sortedL3.where("L3 <= 20"), "L3", sortedL3.where("BI4 <= 20"), "BI4"), + new TableComparator(sortedL3R.where("L3 > 20"), "L3", sortedL3R.where("BI4 > 20"), "BI4"), + new TableComparator(sortedL3.where("L3 < 20"), "L3", sortedL3.where("BI4 < 20"), "BI4"), + new TableComparator(sortedL3R.where("L3 >= 20"), "L3", sortedL3R.where("BI4 >= 20"), "BI4"), + new TableComparator(sortedL3R.where("L3 >= 20 && true"), sortedL3R.where("I6 >= 20")), + new TableComparator(sortedL3R.where("L3 >= 20 && true"), sortedL3R.where("B8 >= 20")), + new TableComparator(sortedL3R.where("L3 >= 20 && true"), sortedL3R.where("S7 >= 20")), new TableComparator(sortedL3R.where("L3 < 20 && true"), sortedL3R.where("I6 < 20")), new TableComparator(sortedL3R.where("L3 < 20 && true"), sortedL3R.where("B8 < 20")), new TableComparator(sortedL3R.where("L3 < 20 && true"), sortedL3R.where("S7 < 20")), - new TableComparator( - sortedDT.where("DT == null || DT.getNanos() < " + filterTime.getNanos()), - sortedDT.where("DT < '" + filterTimeString + "'")), - new TableComparator( - sortedDT.where("DT != null && DT.getNanos() >= " + filterTime.getNanos()), - sortedDT.where("DT >= '" + filterTimeString + "'")), + new TableComparator(sortedDT.where("DT == null || DT.getNanos() < " + filterTime.getNanos()), + sortedDT.where("DT < '" + filterTimeString + "'")), + new TableComparator(sortedDT.where("DT != null && DT.getNanos() >= " + filterTime.getNanos()), + sortedDT.where("DT >= '" + filterTimeString + "'")), new TableComparator(sortedCH.where("true && CH > 'M'"), sortedCH.where("CH > 'M'")), - new TableComparator(sortedCH.where("CH==null || CH <= 'O'"), - sortedCH.where("CH <= 'O'")), - new TableComparator(sortedCH.where("true && CH >= 'Q'"), - sortedCH.where("CH >= 'Q'")), + new TableComparator(sortedCH.where("CH==null || CH <= 'O'"), sortedCH.where("CH <= 'O'")), + new TableComparator(sortedCH.where("true && CH >= 'Q'"), sortedCH.where("CH >= 'Q'")), new TableComparator(sortedCH.where("true && CH < 'F'"), sortedCH.where("CH < 'F'")), }; @@ -1147,20 +1069,15 @@ public void testCharRangeFilter() { TableTools.showWithIndex(rangeFiltered); TableTools.showWithIndex(standardFiltered); assertTableEquals(rangeFiltered, standardFiltered); - assertTableEquals(backwards.where("CH < '" + array[5] + "'"), - backwards.where("'" + array[5] + "' > CH")); - assertTableEquals(backwards.where("CH <= '" + array[5] + "'"), - backwards.where("'" + array[5] + "' >= CH")); - assertTableEquals(backwards.where("CH > '" + array[5] + "'"), - backwards.where("'" + array[5] + "' < CH")); - assertTableEquals(backwards.where("CH >= '" + array[5] + "'"), - backwards.where("'" + array[5] + "' <= CH")); + assertTableEquals(backwards.where("CH < '" + array[5] + "'"), backwards.where("'" + array[5] + "' > CH")); + assertTableEquals(backwards.where("CH <= '" + array[5] + "'"), backwards.where("'" + array[5] + "' >= CH")); + assertTableEquals(backwards.where("CH > '" + array[5] + "'"), backwards.where("'" + array[5] + "' < CH")); + assertTableEquals(backwards.where("CH >= '" + array[5] + "'"), backwards.where("'" + array[5] + "' <= CH")); } public void testSingleSidedRangeFilterSimple() { final Table table = TableTools.emptyTable(10).update("L1=ii"); - final String bigIntConversion = - "BI2=" + getClass().getCanonicalName() + ".convertToBigInteger(L1)"; + final String bigIntConversion = "BI2=" + getClass().getCanonicalName() + ".convertToBigInteger(L1)"; final Table augmented = table.update(bigIntConversion).sort("BI2"); final Table augmentedBackwards = table.update(bigIntConversion).sortDescending("BI2"); @@ -1170,11 +1087,9 @@ public void testSingleSidedRangeFilterSimple() { assertTableEquals(augmented.where("L1 >= 5"), augmented.where("BI2 >= 5")); assertTableEquals(augmentedBackwards.where("L1 < 5"), augmentedBackwards.where("BI2 < 5")); - assertTableEquals(augmentedBackwards.where("L1 <= 5"), - augmentedBackwards.where("BI2 <= 5")); + assertTableEquals(augmentedBackwards.where("L1 <= 5"), augmentedBackwards.where("BI2 <= 5")); assertTableEquals(augmentedBackwards.where("L1 > 5"), augmentedBackwards.where("BI2 > 5")); - assertTableEquals(augmentedBackwards.where("L1 >= 5"), - augmentedBackwards.where("BI2 >= 5")); + assertTableEquals(augmentedBackwards.where("L1 >= 5"), augmentedBackwards.where("BI2 >= 5")); } public void testComparableRangeFilter() { @@ -1184,10 +1099,9 @@ public void testComparableRangeFilter() { final ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"L1"}, new LongGenerator(90, 110, 0.1))); + columnInfo = initColumnInfos(new String[] {"L1"}, new LongGenerator(90, 110, 0.1))); - final String bigIntConversion = - "BI2=" + getClass().getCanonicalName() + ".convertToBigInteger(L1)"; + final String bigIntConversion = "BI2=" + getClass().getCanonicalName() + ".convertToBigInteger(L1)"; final Table augmented = table.update(bigIntConversion); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { @@ -1196,17 +1110,17 @@ public void testComparableRangeFilter() { new TableComparator(augmented.where("L1 >= 100"), augmented.where("BI2 >= 100")), new TableComparator(augmented.where("L1 <= 100"), augmented.where("BI2 <= 100")), new TableComparator(augmented.where("L1 > 95 && L1 <= 100"), - augmented.where(ComparableRangeFilter.makeForTest("BI2", BigInteger.valueOf(95), - BigInteger.valueOf(100), false, true))), + augmented.where(ComparableRangeFilter.makeForTest("BI2", BigInteger.valueOf(95), + BigInteger.valueOf(100), false, true))), new TableComparator(augmented.where("L1 > 95 && L1 < 100"), - augmented.where(ComparableRangeFilter.makeForTest("BI2", BigInteger.valueOf(95), - BigInteger.valueOf(100), false, false))), + augmented.where(ComparableRangeFilter.makeForTest("BI2", BigInteger.valueOf(95), + BigInteger.valueOf(100), false, false))), new TableComparator(augmented.where("L1 >= 95 && L1 < 100"), - augmented.where(ComparableRangeFilter.makeForTest("BI2", BigInteger.valueOf(95), - BigInteger.valueOf(100), true, false))), + augmented.where(ComparableRangeFilter.makeForTest("BI2", BigInteger.valueOf(95), + BigInteger.valueOf(100), true, false))), new TableComparator(augmented.where("L1 >= 95 && L1 <= 100"), - augmented.where(ComparableRangeFilter.makeForTest("BI2", BigInteger.valueOf(95), - BigInteger.valueOf(100), true, true))), + augmented.where(ComparableRangeFilter.makeForTest("BI2", BigInteger.valueOf(95), + BigInteger.valueOf(100), true, true))), }; for (int i = 0; i < 500; i++) { diff --git a/DB/src/test/java/io/deephaven/db/v2/QueryTableWouldMatchTest.java b/DB/src/test/java/io/deephaven/db/v2/QueryTableWouldMatchTest.java index 17239407cab..cd24f39cf2a 100644 --- a/DB/src/test/java/io/deephaven/db/v2/QueryTableWouldMatchTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/QueryTableWouldMatchTest.java @@ -20,28 +20,28 @@ public class QueryTableWouldMatchTest extends QueryTableTestBase { public void testMatch() { final QueryTable t1 = testRefreshingTable( - c("Text", "Hey", "Yo", "Lets go", "Dog", "Cat", "Cheese"), - c("Number", 0, 1, 2, 3, 4, 5), - c("Bool", true, false, true, true, false, false)); + c("Text", "Hey", "Yo", "Lets go", "Dog", "Cat", "Cheese"), + c("Number", 0, 1, 2, 3, 4, 5), + c("Bool", true, false, true, true, false, false)); - final QueryTable t1Matched = (QueryTable) t1.wouldMatch("HasAnE=Text.contains(`e`)", - "isGt3=Number > 3", "Compound=Bool || Text.length() < 5"); + final QueryTable t1Matched = (QueryTable) t1.wouldMatch("HasAnE=Text.contains(`e`)", "isGt3=Number > 3", + "Compound=Bool || Text.length() < 5"); final Listener t1MatchedListener = new ListenerWithGlobals(t1Matched); t1Matched.listenForUpdates(t1MatchedListener); show(t1Matched); assertEquals(Arrays.asList(true, false, true, false, false, true), - Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 6))); + Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 6))); assertEquals(Arrays.asList(false, false, false, false, true, true), - Arrays.asList(t1Matched.getColumn("isGt3").get(0, 6))); + Arrays.asList(t1Matched.getColumn("isGt3").get(0, 6))); assertEquals(Arrays.asList(true, true, true, true, true, false), - Arrays.asList(t1Matched.getColumn("Compound").get(0, 6))); + Arrays.asList(t1Matched.getColumn("Compound").get(0, 6))); // Add LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(t1, i(7, 9), c("Text", "Cake", "Zips For Fun"), - c("Number", 6, 1), - c("Bool", false, false)); + c("Number", 6, 1), + c("Bool", false, false)); t1.notifyListeners(i(7, 9), i(), i()); }); @@ -49,11 +49,11 @@ public void testMatch() { assertEquals(modified, i()); assertEquals(removed, i()); assertEquals(Arrays.asList(true, false, true, false, false, true, true, false), - Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 8))); + Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 8))); assertEquals(Arrays.asList(false, false, false, false, true, true, true, false), - Arrays.asList(t1Matched.getColumn("isGt3").get(0, 8))); + Arrays.asList(t1Matched.getColumn("isGt3").get(0, 8))); assertEquals(Arrays.asList(true, true, true, true, true, false, true, false), - Arrays.asList(t1Matched.getColumn("Compound").get(0, 8))); + Arrays.asList(t1Matched.getColumn("Compound").get(0, 8))); // Remove LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -65,18 +65,18 @@ public void testMatch() { assertEquals(modified, i()); assertEquals(removed, i(1, 3)); assertEquals(Arrays.asList(true, true, false, true, true, false), - Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 8))); + Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 8))); assertEquals(Arrays.asList(false, false, true, true, true, false), - Arrays.asList(t1Matched.getColumn("isGt3").get(0, 8))); + Arrays.asList(t1Matched.getColumn("isGt3").get(0, 8))); assertEquals(Arrays.asList(true, true, true, false, true, false), - Arrays.asList(t1Matched.getColumn("Compound").get(0, 8))); + Arrays.asList(t1Matched.getColumn("Compound").get(0, 8))); // Modify LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(t1, i(4, 5), - c("Text", "Kittie", "Bacon"), - c("Number", 2, 1), - c("Bool", true, true)); + c("Text", "Kittie", "Bacon"), + c("Number", 2, 1), + c("Bool", true, true)); t1.notifyListeners(i(), i(), i(4, 5)); }); @@ -84,18 +84,18 @@ public void testMatch() { assertEquals(modified, i(4, 5)); assertEquals(removed, i()); assertEquals(Arrays.asList(true, true, true, false, true, false), - Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 8))); + Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 8))); assertEquals(Arrays.asList(false, false, false, false, true, false), - Arrays.asList(t1Matched.getColumn("isGt3").get(0, 8))); + Arrays.asList(t1Matched.getColumn("isGt3").get(0, 8))); assertEquals(Arrays.asList(true, true, true, true, true, false), - Arrays.asList(t1Matched.getColumn("Compound").get(0, 8))); + Arrays.asList(t1Matched.getColumn("Compound").get(0, 8))); // All 3 LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(t1, i(0, 1, 4, 11), - c("Text", "Apple", "Bagel", "Boat", "YAY"), - c("Number", 100, -200, 300, 400), - c("Bool", true, false, false, true)); + c("Text", "Apple", "Bagel", "Boat", "YAY"), + c("Number", 100, -200, 300, 400), + c("Bool", true, false, false, true)); removeRows(t1, i(9, 5)); t1.notifyListeners(i(1, 11), i(9, 5), i(0, 4)); }); @@ -104,11 +104,11 @@ public void testMatch() { assertEquals(modified, i(0, 4)); assertEquals(removed, i(9, 5)); assertEquals(Arrays.asList(true, true, true, false, true, false), - Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 11))); + Arrays.asList(t1Matched.getColumn("HasAnE").get(0, 11))); assertEquals(Arrays.asList(true, false, false, true, true, true), - Arrays.asList(t1Matched.getColumn("isGt3").get(0, 11))); + Arrays.asList(t1Matched.getColumn("isGt3").get(0, 11))); assertEquals(Arrays.asList(true, false, true, true, true, true), - Arrays.asList(t1Matched.getColumn("Compound").get(0, 11))); + Arrays.asList(t1Matched.getColumn("Compound").get(0, 11))); } public void testMatchRefilter() { @@ -118,26 +118,26 @@ public void testMatchRefilter() { private void doTestMatchRefilter(boolean isLive) { final QueryTable t1 = testRefreshingTable( - c("Text", "Hey", "Yo", "Lets go", "Dog", "Cat", "Cheese"), - c("Number", 0, 1, 2, 3, 4, 5), - c("Bool", true, false, true, true, false, false)); + c("Text", "Hey", "Yo", "Lets go", "Dog", "Cat", "Cheese"), + c("Number", 0, 1, 2, 3, 4, 5), + c("Bool", true, false, true, true, false, false)); t1.setRefreshing(isLive); final QueryTable textTable = testRefreshingTable(c("Text", "Dog", "Cat")); final QueryTable numberTable = testRefreshingTable(c("Number", 0, 5)); - final WouldMatchPair sp1 = new WouldMatchPair("InText", - new DynamicWhereFilter(textTable, true, new MatchPair("Text", "Text"))); + final WouldMatchPair sp1 = + new WouldMatchPair("InText", new DynamicWhereFilter(textTable, true, new MatchPair("Text", "Text"))); final WouldMatchPair sp2 = new WouldMatchPair("InNum", - new DynamicWhereFilter(numberTable, true, new MatchPair("Number", "Number"))); + new DynamicWhereFilter(numberTable, true, new MatchPair("Number", "Number"))); final QueryTable t1Matched = (QueryTable) t1.wouldMatch(sp1, sp2); show(t1Matched); assertEquals(Arrays.asList(false, false, false, true, true, false), - Arrays.asList(t1Matched.getColumn("InText").get(0, 6))); + Arrays.asList(t1Matched.getColumn("InText").get(0, 6))); assertEquals(Arrays.asList(true, false, false, false, false, true), - Arrays.asList(t1Matched.getColumn("InNum").get(0, 6))); + Arrays.asList(t1Matched.getColumn("InNum").get(0, 6))); // Tick one filter table LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -146,9 +146,9 @@ private void doTestMatchRefilter(boolean isLive) { }); assertEquals(Arrays.asList(false, true, false, false, true, true), - Arrays.asList(t1Matched.getColumn("InText").get(0, 6))); + Arrays.asList(t1Matched.getColumn("InText").get(0, 6))); assertEquals(Arrays.asList(true, false, false, false, false, true), - Arrays.asList(t1Matched.getColumn("InNum").get(0, 6))); + Arrays.asList(t1Matched.getColumn("InNum").get(0, 6))); // Tick both of them LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -161,9 +161,9 @@ private void doTestMatchRefilter(boolean isLive) { }); assertEquals(Arrays.asList(true, false, true, false, true, false), - Arrays.asList(t1Matched.getColumn("InText").get(0, 6))); + Arrays.asList(t1Matched.getColumn("InText").get(0, 6))); assertEquals(Arrays.asList(false, false, true, false, false, true), - Arrays.asList(t1Matched.getColumn("InNum").get(0, 6))); + Arrays.asList(t1Matched.getColumn("InNum").get(0, 6))); if (isLive) { // Tick both of them, and the table itself @@ -172,9 +172,9 @@ private void doTestMatchRefilter(boolean isLive) { textTable.notifyListeners(i(), i(), i(0, 2)); addToTable(t1, i(0, 1, 4, 11), - c("Text", "Yo", "Hey", "Boat", "Yo"), - c("Number", 100, 1, 300, 0), - c("Bool", true, false, false, true)); + c("Text", "Yo", "Hey", "Boat", "Yo"), + c("Number", 100, 1, 300, 0), + c("Bool", true, false, false, true)); removeRows(t1, i(3)); t1.notifyListeners(i(11), i(3), i(0, 1, 4)); @@ -187,28 +187,28 @@ private void doTestMatchRefilter(boolean isLive) { show(numberTable); assertEquals(Arrays.asList(true, false, false, false, false, true), - Arrays.asList(t1Matched.getColumn("InText").get(0, 11))); + Arrays.asList(t1Matched.getColumn("InText").get(0, 11))); assertEquals(Arrays.asList(false, true, true, false, true, true), - Arrays.asList(t1Matched.getColumn("InNum").get(0, 11))); + Arrays.asList(t1Matched.getColumn("InNum").get(0, 11))); } } public void testMatchIterative() { final Random random = new Random(0xDEADDEAD); final ColumnInfo[] columnInfo = - initColumnInfos(new String[] {"Sym", "Stringy", "Inty", "Floaty", "Charry", "Booly"}, - new SetGenerator<>("AAPL", "GOOG", "GLD", "VXX"), - new StringGenerator(0xFEEDFEED), - new IntGenerator(10, 100), - new FloatGenerator(10.0f, 200.f), - new CharGenerator('A', 'Z'), - new BooleanGenerator()); + initColumnInfos(new String[] {"Sym", "Stringy", "Inty", "Floaty", "Charry", "Booly"}, + new SetGenerator<>("AAPL", "GOOG", "GLD", "VXX"), + new StringGenerator(0xFEEDFEED), + new IntGenerator(10, 100), + new FloatGenerator(10.0f, 200.f), + new CharGenerator('A', 'Z'), + new BooleanGenerator()); final QueryTable queryTable = getTable(500, random, columnInfo); final EvalNugget[] en = new EvalNugget[] { EvalNugget.from(() -> queryTable.wouldMatch("hasAG=Sym.contains(`G`)", - "BigHero6=Stringy.length()>=6 && Booly", "Mathy=(Inty+Floaty)/2 > 40")), + "BigHero6=Stringy.length()>=6 && Booly", "Mathy=(Inty+Floaty)/2 > 40")), }; for (int i = 0; i < 100; i++) { @@ -225,32 +225,30 @@ public void testMatchDynamicIterative() { final int filteredSize = 500; final Random random = new Random(0); - final QueryTable symSetTableBase = - getTable(setSize, random, symSetInfo = initColumnInfos(new String[] {"Sym"}, + final QueryTable symSetTableBase = getTable(setSize, random, symSetInfo = initColumnInfos(new String[] {"Sym"}, new SetGenerator<>("aa", "bb", "bc", "cc", "dd"))); final QueryTable numSetTableBase = - getTable(setSize, random, numSetInfo = initColumnInfos(new String[] {"intCol"}, - new IntGenerator(0, 100))); + getTable(setSize, random, numSetInfo = initColumnInfos(new String[] {"intCol"}, + new IntGenerator(0, 100))); final QueryTable symSetTable = (QueryTable) LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> symSetTableBase.selectDistinct("Sym")); + .computeLocked(() -> symSetTableBase.selectDistinct("Sym")); final QueryTable numSetTable = (QueryTable) LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> numSetTableBase.selectDistinct("intCol")); + .computeLocked(() -> numSetTableBase.selectDistinct("intCol")); final QueryTable matchTable = getTable(filteredSize, random, - filteredInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), - new IntGenerator(0, 100), - new DoubleGenerator(0, 100))); + filteredInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), + new IntGenerator(0, 100), + new DoubleGenerator(0, 100))); final EvalNugget[] en = new EvalNugget[] { EvalNugget.from(() -> { final WouldMatchPair sp1 = new WouldMatchPair("InSym", - new DynamicWhereFilter(symSetTable, true, new MatchPair("Sym", "Sym"))); - final WouldMatchPair sp2 = - new WouldMatchPair("InInt", new DynamicWhereFilter(numSetTable, true, - new MatchPair("intCol", "intCol"))); + new DynamicWhereFilter(symSetTable, true, new MatchPair("Sym", "Sym"))); + final WouldMatchPair sp2 = new WouldMatchPair("InInt", + new DynamicWhereFilter(numSetTable, true, new MatchPair("intCol", "intCol"))); return matchTable.wouldMatch(sp1, sp2); }) }; @@ -264,15 +262,13 @@ public void testMatchDynamicIterative() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { if (modSet) { if (doit == 0 || doit == 2) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, setSize, random, - symSetTableBase, symSetInfo); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + setSize, random, symSetTableBase, symSetInfo); } if (doit == 1 || doit == 2) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, setSize, random, - numSetTableBase, numSetInfo); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + setSize, random, numSetTableBase, numSetInfo); } } }); @@ -280,9 +276,8 @@ public void testMatchDynamicIterative() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { if (modFiltered) { - GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, filteredSize, random, matchTable, - filteredInfo); + GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + filteredSize, random, matchTable, filteredInfo); } }); validate(en); diff --git a/DB/src/test/java/io/deephaven/db/v2/SelectOverheadLimiter.java b/DB/src/test/java/io/deephaven/db/v2/SelectOverheadLimiter.java index b16de1d61c2..897a76d6621 100644 --- a/DB/src/test/java/io/deephaven/db/v2/SelectOverheadLimiter.java +++ b/DB/src/test/java/io/deephaven/db/v2/SelectOverheadLimiter.java @@ -16,14 +16,12 @@ import java.util.concurrent.atomic.AtomicInteger; /** - * The table {@link Table#select} or {@link Table#update} and operations produce sparse sources as - * of Treasure. If you have a sparse index, that means that you can have many blocks which only - * actually contain one or very few elements. The {@link #clampSelectOverhead(Table, double)} method - * is intended to precede a select or update operation, to limit the amount of memory overhead - * allowed. For tables that are relatively dense, the original indices are preserved. If the - * overhead exceeds the allowable factor, then the table is flattened before passing updates to - * select. Once a table is made flat, it will not revert to it's original address space but rather - * remain flat. + * The table {@link Table#select} or {@link Table#update} and operations produce sparse sources as of Treasure. If you + * have a sparse index, that means that you can have many blocks which only actually contain one or very few elements. + * The {@link #clampSelectOverhead(Table, double)} method is intended to precede a select or update operation, to limit + * the amount of memory overhead allowed. For tables that are relatively dense, the original indices are preserved. If + * the overhead exceeds the allowable factor, then the table is flattened before passing updates to select. Once a table + * is made flat, it will not revert to it's original address space but rather remain flat. */ public class SelectOverheadLimiter { @VisibleForTesting @@ -64,8 +62,7 @@ private long size() { } double overhead() { - final long minimumBlocks = - (size() + SparseConstants.BLOCK_SIZE - 1) / SparseConstants.BLOCK_SIZE; + final long minimumBlocks = (size() + SparseConstants.BLOCK_SIZE - 1) / SparseConstants.BLOCK_SIZE; return (double) blockCount() / (double) minimumBlocks; } @@ -107,21 +104,19 @@ public static Table clampSelectOverhead(Table input, double permittedOverhead) { final Index index = input.getIndex().clone(); final Map resultColumns = new LinkedHashMap<>(); // noinspection unchecked - input.getColumnSourceMap() - .forEach((name, cs) -> resultColumns.put(name, new SwitchColumnSource(cs))); + input.getColumnSourceMap().forEach((name, cs) -> resultColumns.put(name, new SwitchColumnSource(cs))); final QueryTable result = new QueryTable(index, resultColumns); - final MutableObject inputRecorder = new MutableObject<>( - new ListenerRecorder("clampSelectOverhead.input()", (DynamicTable) input, result)); + final MutableObject inputRecorder = + new MutableObject<>(new ListenerRecorder("clampSelectOverhead.input()", (DynamicTable) input, result)); ((DynamicTable) input).listenForUpdates(inputRecorder.getValue()); final List recorders = Collections.synchronizedList(new ArrayList<>()); recorders.add(inputRecorder.getValue()); final MergedListener mergedListener = new MergedListener(recorders, - Collections.singletonList((NotificationQueue.Dependency) input), "clampSelectOverhead", - result) { + Collections.singletonList((NotificationQueue.Dependency) input), "clampSelectOverhead", result) { Table flatResult = null; ListenerRecorder flatRecorder; ModifiedColumnSet.Transformer flatTransformer; @@ -130,8 +125,7 @@ public static Table clampSelectOverhead(Table input, double permittedOverhead) { { inputRecorder.getValue().setMergedListener(this); inputTransformer = ((BaseTable) input).newModifiedColumnSetTransformer(result, - result.getColumnSourceMap().keySet() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + result.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); } @Override @@ -143,8 +137,7 @@ protected void process() { index.insert(upstream.added); final ShiftAwareListener.Update copy = upstream.copy(); copy.modifiedColumnSet = result.getModifiedColumnSetForUpdates(); - flatTransformer.clearAndTransform(upstream.modifiedColumnSet, - copy.modifiedColumnSet); + flatTransformer.clearAndTransform(upstream.modifiedColumnSet, copy.modifiedColumnSet); result.notifyListeners(copy); return; } @@ -160,8 +153,7 @@ protected void process() { if (overheadTracker.overhead() <= permittedOverhead) { final ShiftAwareListener.Update copy = upstream.copy(); copy.modifiedColumnSet = result.getModifiedColumnSetForUpdates(); - inputTransformer.clearAndTransform(upstream.modifiedColumnSet, - copy.modifiedColumnSet); + inputTransformer.clearAndTransform(upstream.modifiedColumnSet, copy.modifiedColumnSet); result.notifyListeners(copy); return; } @@ -169,12 +161,11 @@ protected void process() { // we need to convert this to the flat table overheadTracker.clear(); flatResult = input.flatten(); - flatRecorder = new ListenerRecorder("clampSelectOverhead.flatResult()", - (DynamicTable) flatResult, result); + flatRecorder = + new ListenerRecorder("clampSelectOverhead.flatResult()", (DynamicTable) flatResult, result); flatRecorder.setMergedListener(this); flatTransformer = ((BaseTable) flatResult).newModifiedColumnSetTransformer(result, - result.getColumnSourceMap().keySet() - .toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); + result.getColumnSourceMap().keySet().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY)); ((DynamicTable) flatResult).listenForUpdates(flatRecorder); synchronized (recorders) { @@ -188,8 +179,7 @@ protected void process() { inputTransformer = null; // noinspection unchecked - resultColumns - .forEach((name, scs) -> scs.setNewCurrent(flatResult.getColumnSource(name))); + resultColumns.forEach((name, scs) -> scs.setNewCurrent(flatResult.getColumnSource(name))); index.clear(); index.insert(flatResult.getIndex()); diff --git a/DB/src/test/java/io/deephaven/db/v2/SimpleShiftAwareListener.java b/DB/src/test/java/io/deephaven/db/v2/SimpleShiftAwareListener.java index 9e384a7ec52..4c8e524ea7f 100644 --- a/DB/src/test/java/io/deephaven/db/v2/SimpleShiftAwareListener.java +++ b/DB/src/test/java/io/deephaven/db/v2/SimpleShiftAwareListener.java @@ -1,8 +1,7 @@ package io.deephaven.db.v2; /** - * A listener for use in unit tests that writes down the update it receives and counts how many it - * received. + * A listener for use in unit tests that writes down the update it receives and counts how many it received. */ public class SimpleShiftAwareListener extends InstrumentedShiftAwareListenerAdapter { public SimpleShiftAwareListener(DynamicTable source) { @@ -37,15 +36,15 @@ public void onUpdate(final Update upstream) { @Override public String toString() { return "SimpleShiftAwareListener{" + - "count=" + count + - (update == null ? "" - : (", added=" + update.added + - ", removed=" + update.removed + - ", modified=" + update.modified + - ", shifted=" + update.shifted + - ", modifiedColumnSet=" + update.modifiedColumnSet)) - + - '}'; + "count=" + count + + (update == null ? "" + : (", added=" + update.added + + ", removed=" + update.removed + + ", modified=" + update.modified + + ", shifted=" + update.shifted + + ", modifiedColumnSet=" + update.modifiedColumnSet)) + + + '}'; } public void close() { diff --git a/DB/src/test/java/io/deephaven/db/v2/StreamTableAggregationTest.java b/DB/src/test/java/io/deephaven/db/v2/StreamTableAggregationTest.java index cc658c97fbf..d5300d0adb7 100644 --- a/DB/src/test/java/io/deephaven/db/v2/StreamTableAggregationTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/StreamTableAggregationTest.java @@ -39,9 +39,9 @@ public class StreamTableAggregationTest extends JUnit4QueryTableTestBase { private static final long MAX_RANDOM_ITERATION_SIZE = 10_000; private final Table source = Table.of(EmptyTable.of(INPUT_SIZE) - .update("Sym = Long.toString(ii % 1000) + `_Sym`") - .update("Price = ii / 100 - (ii % 100)") - .update("Size = (long) (ii / 50 - (ii % 50))")); + .update("Sym = Long.toString(ii % 1000) + `_Sym`") + .update("Price = ii / 100 - (ii % 100)") + .update("Size = (long) (ii / 50 - (ii % 50))")); @Before @@ -58,13 +58,11 @@ public void tearDown() throws Exception { * Execute a table operator ending in an aggregation. * * @param operator The operator to apply - * @param windowed Whether the stream table index should be a sliding window (if {@code true}) - * or zero-based (if {@code false}) + * @param windowed Whether the stream table index should be a sliding window (if {@code true}) or zero-based (if + * {@code false}) */ - private void doOperatorTest(@NotNull final UnaryOperator
    operator, - final boolean windowed) { - final QueryTable normal = - new QueryTable(Index.FACTORY.getEmptyIndex(), source.getColumnSourceMap()); + private void doOperatorTest(@NotNull final UnaryOperator
    operator, final boolean windowed) { + final QueryTable normal = new QueryTable(Index.FACTORY.getEmptyIndex(), source.getColumnSourceMap()); normal.setRefreshing(true); final QueryTable addOnly = (QueryTable) normal.copy(); @@ -78,14 +76,11 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, } else { // Redirecting so we can present a zero-based Index from the stream table streamInternalIndex = Index.FACTORY.getEmptyIndex(); - final RedirectionIndex streamRedirections = - new WrappedIndexRedirectionIndexImpl(streamInternalIndex); + final RedirectionIndex streamRedirections = new WrappedIndexRedirectionIndexImpl(streamInternalIndex); // noinspection unchecked - streamSources = source.getColumnSourceMap().entrySet().stream() - .collect(Collectors.toMap( + streamSources = source.getColumnSourceMap().entrySet().stream().collect(Collectors.toMap( Map.Entry::getKey, - (entry -> new ReadOnlyRedirectedColumnSource(streamRedirections, - entry.getValue())), + (entry -> new ReadOnlyRedirectedColumnSource(streamRedirections, entry.getValue())), Assert::neverInvoked, LinkedHashMap::new)); } @@ -100,12 +95,11 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, final Table streamExpected = operator.apply(stream); TstUtils.assertTableEquals(expected, addOnlyExpected); TstUtils.assertTableEquals(expected, streamExpected); - TestCase.assertFalse(((BaseTable) streamExpected).isStream()); // Aggregation results are - // never stream tables + TestCase.assertFalse(((BaseTable) streamExpected).isStream()); // Aggregation results are never stream tables final PrimitiveIterator.OfLong refreshSizes = LongStream.concat( - LongStream.of(100, 0, 1, 2, 50, 0, 1000, 1, 0), - new Random().longs(0, MAX_RANDOM_ITERATION_SIZE)).iterator(); + LongStream.of(100, 0, 1, 2, 50, 0, 1000, 1, 0), + new Random().longs(0, MAX_RANDOM_ITERATION_SIZE)).iterator(); int step = 0; long usedSize = 0; @@ -113,22 +107,20 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, while (usedSize < INPUT_SIZE) { final long refreshSize = Math.min(INPUT_SIZE - usedSize, refreshSizes.nextLong()); final Index normalStepInserted = refreshSize == 0 - ? Index.CURRENT_FACTORY.getEmptyIndex() - : Index.CURRENT_FACTORY.getIndexByRange(usedSize, usedSize + refreshSize - 1); - final Index streamStepInserted = streamInternalIndex == null ? normalStepInserted - : refreshSize == 0 ? Index.CURRENT_FACTORY.getEmptyIndex() - : Index.CURRENT_FACTORY.getIndexByRange(0, refreshSize - 1); + : Index.CURRENT_FACTORY.getIndexByRange(usedSize, usedSize + refreshSize - 1); + final Index streamStepInserted = streamInternalIndex == null ? normalStepInserted + : refreshSize == 0 + ? Index.CURRENT_FACTORY.getEmptyIndex() + : Index.CURRENT_FACTORY.getIndexByRange(0, refreshSize - 1); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); try { LiveTableMonitor.DEFAULT.refreshLiveTableForUnitTests(() -> { if (normalStepInserted.nonempty()) { normal.getIndex().insert(normalStepInserted); - normal.notifyListeners( - new Update(normalStepInserted, Index.CURRENT_FACTORY.getEmptyIndex(), - Index.CURRENT_FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, - ModifiedColumnSet.EMPTY)); + normal.notifyListeners(new Update(normalStepInserted, Index.CURRENT_FACTORY.getEmptyIndex(), + Index.CURRENT_FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); } }); final Index finalStreamLastInserted = streamLastInserted; @@ -140,9 +132,8 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, } stream.getIndex().clear(); stream.getIndex().insert(streamStepInserted); - stream.notifyListeners(new Update(streamStepInserted, - finalStreamLastInserted, Index.CURRENT_FACTORY.getEmptyIndex(), - IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); + stream.notifyListeners(new Update(streamStepInserted, finalStreamLastInserted, + Index.CURRENT_FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); } }); } finally { @@ -152,8 +143,8 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, TstUtils.assertTableEquals(expected, addOnlyExpected); TstUtils.assertTableEquals(expected, streamExpected); } catch (ComparisonFailure e) { - System.err.printf("FAILURE: step %d, previousUsedSize %d, refreshSize %d%n", step, - usedSize, refreshSize); + System.err.printf("FAILURE: step %d, previousUsedSize %d, refreshSize %d%n", step, usedSize, + refreshSize); throw e; } @@ -346,58 +337,56 @@ public void testSortedLastByNoKeysWindowedObject() { @Test public void testComboBy() { doOperatorTest(table -> table.by(AggCombo( - AggFirst("FirstPrice=Price", "FirstSize=Size"), - AggLast("LastPrice=Price", "LastSize=Size"), - AggMin("MinPrice=Price", "MinSize=Size"), - AggMax("MaxPrice=Price", "MaxSize=Size"), - AggMed("MedPrice=Price", "MedSize=Size"), - AggSortedFirst("Price", "PriceSortedFirstSym=Sym", "PriceSortedFirstSize=Size"), - AggSortedLast("Price", "PriceSortedLastSym=Sym", "PriceSortedLastSize=Size"), - AggSortedFirst("Sym", "SymSortedFirstPrice=Price", "SymSortedFirstSize=Size"), - AggSortedLast("Sym", "SymSortedLastPrice=Price", "SymSortedLastSize=Size")), "Sym"), - false); + AggFirst("FirstPrice=Price", "FirstSize=Size"), + AggLast("LastPrice=Price", "LastSize=Size"), + AggMin("MinPrice=Price", "MinSize=Size"), + AggMax("MaxPrice=Price", "MaxSize=Size"), + AggMed("MedPrice=Price", "MedSize=Size"), + AggSortedFirst("Price", "PriceSortedFirstSym=Sym", "PriceSortedFirstSize=Size"), + AggSortedLast("Price", "PriceSortedLastSym=Sym", "PriceSortedLastSize=Size"), + AggSortedFirst("Sym", "SymSortedFirstPrice=Price", "SymSortedFirstSize=Size"), + AggSortedLast("Sym", "SymSortedLastPrice=Price", "SymSortedLastSize=Size")), "Sym"), false); } @Test public void testComboByNoKeys() { doOperatorTest(table -> table.by(AggCombo( - AggFirst("FirstSym=Sym", "FirstPrice=Price", "FirstSize=Size"), - AggLast("LastSym=Sym", "LastPrice=Price", "LastSize=Size"), - AggMin("MinSym=Sym", "MinPrice=Price", "MinSize=Size"), - AggMax("MaxSym=Sym", "MaxPrice=Price", "MaxSize=Size"), - AggMed("MedSym=Sym", "MedPrice=Price", "MedSize=Size"), - AggSortedFirst("Price", "PriceSortedFirstSym=Sym", "PriceSortedFirstSize=Size"), - AggSortedLast("Price", "PriceSortedLastSym=Sym", "PriceSortedLastSize=Size"), - AggSortedFirst("Sym", "SymSortedFirstPrice=Price", "SymSortedFirstSize=Size"), - AggSortedLast("Sym", "SymSortedLastPrice=Price", "SymSortedLastSize=Size"))), false); + AggFirst("FirstSym=Sym", "FirstPrice=Price", "FirstSize=Size"), + AggLast("LastSym=Sym", "LastPrice=Price", "LastSize=Size"), + AggMin("MinSym=Sym", "MinPrice=Price", "MinSize=Size"), + AggMax("MaxSym=Sym", "MaxPrice=Price", "MaxSize=Size"), + AggMed("MedSym=Sym", "MedPrice=Price", "MedSize=Size"), + AggSortedFirst("Price", "PriceSortedFirstSym=Sym", "PriceSortedFirstSize=Size"), + AggSortedLast("Price", "PriceSortedLastSym=Sym", "PriceSortedLastSize=Size"), + AggSortedFirst("Sym", "SymSortedFirstPrice=Price", "SymSortedFirstSize=Size"), + AggSortedLast("Sym", "SymSortedLastPrice=Price", "SymSortedLastSize=Size"))), false); } @Test public void testComboByWindowed() { doOperatorTest(table -> table.by(AggCombo( - AggFirst("FirstPrice=Price", "FirstSize=Size"), - AggLast("LastPrice=Price", "LastSize=Size"), - AggMin("MinPrice=Price", "MinSize=Size"), - AggMax("MaxPrice=Price", "MaxSize=Size"), - AggMed("MedPrice=Price", "MedSize=Size"), - AggSortedFirst("Price", "PriceSortedFirstSym=Sym", "PriceSortedFirstSize=Size"), - AggSortedLast("Price", "PriceSortedLastSym=Sym", "PriceSortedLastSize=Size"), - AggSortedFirst("Sym", "SymSortedFirstPrice=Price", "SymSortedFirstSize=Size"), - AggSortedLast("Sym", "SymSortedLastPrice=Price", "SymSortedLastSize=Size")), "Sym"), - true); + AggFirst("FirstPrice=Price", "FirstSize=Size"), + AggLast("LastPrice=Price", "LastSize=Size"), + AggMin("MinPrice=Price", "MinSize=Size"), + AggMax("MaxPrice=Price", "MaxSize=Size"), + AggMed("MedPrice=Price", "MedSize=Size"), + AggSortedFirst("Price", "PriceSortedFirstSym=Sym", "PriceSortedFirstSize=Size"), + AggSortedLast("Price", "PriceSortedLastSym=Sym", "PriceSortedLastSize=Size"), + AggSortedFirst("Sym", "SymSortedFirstPrice=Price", "SymSortedFirstSize=Size"), + AggSortedLast("Sym", "SymSortedLastPrice=Price", "SymSortedLastSize=Size")), "Sym"), true); } @Test public void testComboByNoKeysWindowed() { doOperatorTest(table -> table.by(AggCombo( - AggFirst("FirstSym=Sym", "FirstPrice=Price", "FirstSize=Size"), - AggLast("LastSym=Sym", "LastPrice=Price", "LastSize=Size"), - AggMin("MinSym=Sym", "MinPrice=Price", "MinSize=Size"), - AggMax("MaxSym=Sym", "MaxPrice=Price", "MaxSize=Size"), - AggMed("MedSym=Sym", "MedPrice=Price", "MedSize=Size"), - AggSortedFirst("Price", "PriceSortedFirstSym=Sym", "PriceSortedFirstSize=Size"), - AggSortedLast("Price", "PriceSortedLastSym=Sym", "PriceSortedLastSize=Size"), - AggSortedFirst("Sym", "SymSortedFirstPrice=Price", "SymSortedFirstSize=Size"), - AggSortedLast("Sym", "SymSortedLastPrice=Price", "SymSortedLastSize=Size"))), true); + AggFirst("FirstSym=Sym", "FirstPrice=Price", "FirstSize=Size"), + AggLast("LastSym=Sym", "LastPrice=Price", "LastSize=Size"), + AggMin("MinSym=Sym", "MinPrice=Price", "MinSize=Size"), + AggMax("MaxSym=Sym", "MaxPrice=Price", "MaxSize=Size"), + AggMed("MedSym=Sym", "MedPrice=Price", "MedSize=Size"), + AggSortedFirst("Price", "PriceSortedFirstSym=Sym", "PriceSortedFirstSize=Size"), + AggSortedLast("Price", "PriceSortedLastSym=Sym", "PriceSortedLastSize=Size"), + AggSortedFirst("Sym", "SymSortedFirstPrice=Price", "SymSortedFirstSize=Size"), + AggSortedLast("Sym", "SymSortedLastPrice=Price", "SymSortedLastSize=Size"))), true); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/StreamTableOperationsTest.java b/DB/src/test/java/io/deephaven/db/v2/StreamTableOperationsTest.java index 61a6a09e0f4..692c445e54b 100644 --- a/DB/src/test/java/io/deephaven/db/v2/StreamTableOperationsTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/StreamTableOperationsTest.java @@ -36,9 +36,9 @@ public class StreamTableOperationsTest extends JUnit4QueryTableTestBase { private static final long MAX_RANDOM_ITERATION_SIZE = 10_000; private final Table source = Table.of(EmptyTable.of(INPUT_SIZE) - .update("Sym = Long.toString(ii % 1000) + `_Sym`") - .update("Price = ii / 100 - (ii % 100)") - .update("Size = (long) (ii / 50 - (ii % 50))")); + .update("Sym = Long.toString(ii % 1000) + `_Sym`") + .update("Price = ii / 100 - (ii % 100)") + .update("Size = (long) (ii / 50 - (ii % 50))")); @Before @@ -55,14 +55,13 @@ public void tearDown() throws Exception { * Execute a table operator. * * @param operator The operator to apply - * @param windowed Whether the stream table index should be a sliding window (if {@code true}) - * or zero-based (if {@code false}) + * @param windowed Whether the stream table index should be a sliding window (if {@code true}) or zero-based (if + * {@code false}) * @param expectStreamResult Whether the result is expected to be a stream table */ - private void doOperatorTest(@NotNull final UnaryOperator
    operator, - final boolean windowed, final boolean expectStreamResult) { - final QueryTable normal = - new QueryTable(Index.FACTORY.getEmptyIndex(), source.getColumnSourceMap()); + private void doOperatorTest(@NotNull final UnaryOperator
    operator, final boolean windowed, + final boolean expectStreamResult) { + final QueryTable normal = new QueryTable(Index.FACTORY.getEmptyIndex(), source.getColumnSourceMap()); normal.setRefreshing(true); final Index streamInternalIndex; @@ -73,14 +72,11 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, } else { // Redirecting so we can present a zero-based Index from the stream table streamInternalIndex = Index.FACTORY.getEmptyIndex(); - final RedirectionIndex streamRedirections = - new WrappedIndexRedirectionIndexImpl(streamInternalIndex); + final RedirectionIndex streamRedirections = new WrappedIndexRedirectionIndexImpl(streamInternalIndex); // noinspection unchecked - streamSources = source.getColumnSourceMap().entrySet().stream() - .collect(Collectors.toMap( + streamSources = source.getColumnSourceMap().entrySet().stream().collect(Collectors.toMap( Map.Entry::getKey, - (entry -> new ReadOnlyRedirectedColumnSource(streamRedirections, - entry.getValue())), + (entry -> new ReadOnlyRedirectedColumnSource(streamRedirections, entry.getValue())), Assert::neverInvoked, LinkedHashMap::new)); } @@ -96,8 +92,8 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, TestCase.assertEquals(expectStreamResult, ((BaseTable) streamExpected).isStream()); final PrimitiveIterator.OfLong refreshSizes = LongStream.concat( - LongStream.of(100, 0, 1, 2, 50, 0, 1000, 1, 0), - new Random().longs(0, MAX_RANDOM_ITERATION_SIZE)).iterator(); + LongStream.of(100, 0, 1, 2, 50, 0, 1000, 1, 0), + new Random().longs(0, MAX_RANDOM_ITERATION_SIZE)).iterator(); int step = 0; long usedSize = 0; @@ -106,12 +102,12 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, while (usedSize < INPUT_SIZE) { final long refreshSize = Math.min(INPUT_SIZE - usedSize, refreshSizes.nextLong()); final Index normalStepInserted = refreshSize == 0 - ? Index.CURRENT_FACTORY.getEmptyIndex() - : Index.CURRENT_FACTORY.getIndexByRange(usedSize, usedSize + refreshSize - 1); - final Index streamStepInserted = streamInternalIndex == null ? normalStepInserted - : refreshSize == 0 ? Index.CURRENT_FACTORY.getEmptyIndex() - : Index.CURRENT_FACTORY.getIndexByRange(0, refreshSize - 1); + : Index.CURRENT_FACTORY.getIndexByRange(usedSize, usedSize + refreshSize - 1); + final Index streamStepInserted = streamInternalIndex == null ? normalStepInserted + : refreshSize == 0 + ? Index.CURRENT_FACTORY.getEmptyIndex() + : Index.CURRENT_FACTORY.getIndexByRange(0, refreshSize - 1); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); try { @@ -119,9 +115,8 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, LiveTableMonitor.DEFAULT.refreshLiveTableForUnitTests(() -> { if (normalStepInserted.nonempty() || finalNormalLastInserted.nonempty()) { normal.getIndex().update(normalStepInserted, finalNormalLastInserted); - normal.notifyListeners(new Update(normalStepInserted, - finalNormalLastInserted, Index.CURRENT_FACTORY.getEmptyIndex(), - IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); + normal.notifyListeners(new Update(normalStepInserted, finalNormalLastInserted, + Index.CURRENT_FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); } }); final Index finalStreamLastInserted = streamLastInserted; @@ -133,9 +128,8 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, } stream.getIndex().clear(); stream.getIndex().insert(streamStepInserted); - stream.notifyListeners(new Update(streamStepInserted, - finalStreamLastInserted, Index.CURRENT_FACTORY.getEmptyIndex(), - IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); + stream.notifyListeners(new Update(streamStepInserted, finalStreamLastInserted, + Index.CURRENT_FACTORY.getEmptyIndex(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); } }); } finally { @@ -144,8 +138,8 @@ private void doOperatorTest(@NotNull final UnaryOperator
    operator, try { TstUtils.assertTableEquals(expected, streamExpected); } catch (ComparisonFailure e) { - System.err.printf("FAILURE: step %d, previousUsedSize %d, refreshSize %d%n", step, - usedSize, refreshSize); + System.err.printf("FAILURE: step %d, previousUsedSize %d, refreshSize %d%n", step, usedSize, + refreshSize); throw e; } diff --git a/DB/src/test/java/io/deephaven/db/v2/TableMapTest.java b/DB/src/test/java/io/deephaven/db/v2/TableMapTest.java index d83c562bca5..35de3faf3ff 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TableMapTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/TableMapTest.java @@ -38,7 +38,7 @@ public class TableMapTest extends LiveTableTestCase { protected void setUp() throws Exception { if (null == ProcessEnvironment.tryGet()) { ProcessEnvironment.basicServerInitialization(Configuration.getInstance(), - "TestTransformableTableMapThenMerge", new StreamLoggerImpl()); + "TestTransformableTableMapThenMerge", new StreamLoggerImpl()); } super.setUp(); setExpectError(false); @@ -56,9 +56,9 @@ protected void tearDown() throws Exception { public void testMergeSimple() { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bb", "aa", "bb"), - c("intCol", 10, 20, 40, 60), - c("doubleCol", 0.1, 0.2, 0.4, 0.6)); + c("Sym", "aa", "bb", "aa", "bb"), + c("intCol", 10, 20, 40, 60), + c("doubleCol", 0.1, 0.2, 0.4, 0.6)); final Table withK = queryTable.update("K=k"); @@ -74,8 +74,7 @@ public void testMergeSimple() { assertEquals("", TableTools.diff(mergedByK, withK, 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(queryTable, i(3, 9), c("Sym", "cc", "cc"), c("intCol", 30, 90), - c("doubleCol", 2.3, 2.9)); + addToTable(queryTable, i(3, 9), c("Sym", "cc", "cc"), c("intCol", 30, 90), c("doubleCol", 2.3, 2.9)); queryTable.notifyListeners(i(3, 9), i(), i()); }); @@ -90,9 +89,9 @@ public void testMergeSimple() { public void testMergePopulate() { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bb", "aa", "bb"), - c("intCol", 10, 20, 40, 60), - c("doubleCol", 0.1, 0.2, 0.4, 0.6)); + c("Sym", "aa", "bb", "aa", "bb"), + c("intCol", 10, 20, 40, 60), + c("doubleCol", 0.1, 0.2, 0.4, 0.6)); final Table withK = queryTable.update("K=k"); @@ -110,8 +109,7 @@ public void testMergePopulate() { assertEquals("", TableTools.diff(mergedByK, withK, 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(queryTable, i(3, 9), c("Sym", "cc", "cc"), c("intCol", 30, 90), - c("doubleCol", 2.3, 2.9)); + addToTable(queryTable, i(3, 9), c("Sym", "cc", "cc"), c("intCol", 30, 90), c("doubleCol", 2.3, 2.9)); queryTable.notifyListeners(i(3, 9), i(), i()); }); @@ -137,24 +135,22 @@ private void testMergeIncremental(int seed) { final TstUtils.ColumnInfo[] columnInfo; final String[] syms = {"aa", "bb", "cc", "dd"}; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>(syms), - new TstUtils.IntGenerator(0, 20), - new TstUtils.DoubleGenerator(0, 100))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>(syms), + new TstUtils.IntGenerator(0, 20), + new TstUtils.DoubleGenerator(0, 100))); final EvalNugget en[] = new EvalNugget[] { new EvalNugget() { public Table e() { - return table.byExternal("Sym").populateKeys((Object[]) syms).merge() - .sort("Sym"); + return table.byExternal("Sym").populateKeys((Object[]) syms).merge().sort("Sym"); } }, new EvalNugget() { public Table e() { return table.byExternal("intCol") - .populateKeys( - IntStream.rangeClosed(0, 20).boxed().toArray(Object[]::new)) - .merge().sort("intCol"); + .populateKeys(IntStream.rangeClosed(0, 20).boxed().toArray(Object[]::new)).merge() + .sort("intCol"); } }, }; @@ -179,8 +175,7 @@ static class SizeNugget implements EvalNuggetInterface { @Override public void validate(String msg) { - Assert.equals(originalTable.size(), "originalTable.size()", computedTable.size(), - "computedTable.size()"); + Assert.equals(originalTable.size(), "originalTable.size()", computedTable.size(), "computedTable.size()"); } @Override @@ -196,16 +191,15 @@ public void testAsTable() { final TstUtils.ColumnInfo[] columnInfo; final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "Keys"}, - new TstUtils.SetGenerator<>("aa", "bb", "cc", "dd"), - new TstUtils.IntGenerator(0, 20), - new TstUtils.DoubleGenerator(0, 100), - new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "Keys"}, + new TstUtils.SetGenerator<>("aa", "bb", "cc", "dd"), + new TstUtils.IntGenerator(0, 20), + new TstUtils.DoubleGenerator(0, 100), + new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1))); final Table withK = table.update("K=Keys"); - final QueryTable rightTable = getTable(size, random, - initColumnInfos(new String[] {"Sym", "RightCol"}, + final QueryTable rightTable = getTable(size, random, initColumnInfos(new String[] {"Sym", "RightCol"}, new TstUtils.SetGenerator<>("aa", "bb", "cc", "dd"), new TstUtils.IntGenerator(100, 200))); @@ -221,20 +215,16 @@ public void testAsTable() { new EvalNugget() { public Table e() { return ((TransformableTableMap) table.update("K=Keys").byExternal("Sym") - .populateKeys("aa", "bb", "cc", "dd").asTable(false, false, false) - .update("K2=Keys*2").select("K", "K2", "Half=doubleCol/2", - "Sq=doubleCol*doubleCol", "Weight=intCol*doubleCol", "Sym")).merge() - .sort("K", "Sym"); + .populateKeys("aa", "bb", "cc", "dd").asTable(false, false, false).update("K2=Keys*2") + .select("K", "K2", "Half=doubleCol/2", "Sq=doubleCol*doubleCol", + "Weight=intCol*doubleCol", "Sym")).merge().sort("K", "Sym"); } }, new SizeNugget(table, asTable), - new QueryTableTest.TableComparator( - withK.naturalJoin(rightTable.lastBy("Sym"), "Sym").sort("K", "Sym"), - asTable.naturalJoin(rightTable.lastBy("Sym"), "Sym").coalesce().sort("K", - "Sym")), - new QueryTableTest.TableComparator( - withK.naturalJoin(rightTable.lastBy("Sym"), "Sym").sort("K", "Sym"), - asTable.naturalJoin(rightAsTable.lastBy(), "Sym").coalesce().sort("K", "Sym")), + new QueryTableTest.TableComparator(withK.naturalJoin(rightTable.lastBy("Sym"), "Sym").sort("K", "Sym"), + asTable.naturalJoin(rightTable.lastBy("Sym"), "Sym").coalesce().sort("K", "Sym")), + new QueryTableTest.TableComparator(withK.naturalJoin(rightTable.lastBy("Sym"), "Sym").sort("K", "Sym"), + asTable.naturalJoin(rightAsTable.lastBy(), "Sym").coalesce().sort("K", "Sym")), }; for (int i = 0; i < 100; i++) { @@ -245,8 +235,8 @@ public Table e() { public void testTransformTableMapThenMerge() { LiveTableMonitor.DEFAULT.resetForUnitTests(false, true, 0, 4, 10, 5); - final QueryTable sourceTable = TstUtils.testRefreshingTable(i(1), intCol("Key", 1), - intCol("Sentinel", 1), col("Sym", "a"), doubleCol("DoubleCol", 1.1)); + final QueryTable sourceTable = TstUtils.testRefreshingTable(i(1), intCol("Key", 1), intCol("Sentinel", 1), + col("Sym", "a"), doubleCol("DoubleCol", 1.1)); final TableMap tableMap = sourceTable.byExternal("Key"); @@ -260,16 +250,19 @@ protected Table e() { new EvalNugget() { @Override protected Table e() { - return tableMap.transformTables(t -> t.update("K2=Key * 2") - .update("K3=Key + K2").update("K5 = K3 + K2")).merge().sort("Key"); + return tableMap + .transformTables( + t -> t.update("K2=Key * 2").update("K3=Key + K2").update("K5 = K3 + K2")) + .merge().sort("Key"); } }, new EvalNugget() { @Override protected Table e() { - return tableMap.transformTablesWithMap(tableMap, - (l, r) -> l.naturalJoin(r.lastBy("Key"), "Key", "Sentinel2=Sentinel")) - .merge().sort("Key"); + return tableMap + .transformTablesWithMap(tableMap, + (l, r) -> l.naturalJoin(r.lastBy("Key"), "Key", "Sentinel2=Sentinel")) + .merge().sort("Key"); } } }; @@ -278,18 +271,15 @@ protected Table e() { final int iteration = ii + 1; LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final long baseLocation = iteration * 10; - final Index addIndex = - Index.FACTORY.getIndexByRange(baseLocation, baseLocation + 4); - final int[] sentinels = {iteration * 5, iteration * 5 + 1, iteration * 5 + 2, - iteration * 5 + 3, iteration * 5 + 4}; - addToTable(sourceTable, addIndex, - intCol("Key", 1, 3, iteration, iteration - 1, iteration * 2), - intCol("Sentinel", sentinels), col("Sym", "aa", "bb", "cc", "dd", "ee"), - doubleCol("DoubleCol", 2.2, 3.3, 4.4, 5.5, 6.6)); + final Index addIndex = Index.FACTORY.getIndexByRange(baseLocation, baseLocation + 4); + final int[] sentinels = + {iteration * 5, iteration * 5 + 1, iteration * 5 + 2, iteration * 5 + 3, iteration * 5 + 4}; + addToTable(sourceTable, addIndex, intCol("Key", 1, 3, iteration, iteration - 1, iteration * 2), + intCol("Sentinel", sentinels), col("Sym", "aa", "bb", "cc", "dd", "ee"), + doubleCol("DoubleCol", 2.2, 3.3, 4.4, 5.5, 6.6)); sourceTable.notifyListeners(addIndex, i(), i()); if (printTableUpdates) { - System.out - .println("Source Table, iteration=" + iteration + ", added=" + addIndex); + System.out.println("Source Table, iteration=" + iteration + ", added=" + addIndex); TableTools.showWithIndex(sourceTable); } }); @@ -299,9 +289,9 @@ protected Table e() { public void testAttributes() { final QueryTable queryTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bb", "aa", "bb"), - c("intCol", 10, 20, 40, 60), - c("doubleCol", 0.1, 0.2, 0.4, 0.6)); + c("Sym", "aa", "bb", "aa", "bb"), + c("intCol", 10, 20, 40, 60), + c("doubleCol", 0.1, 0.2, 0.4, 0.6)); queryTable.setAttribute(Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar"); @@ -313,24 +303,25 @@ public void testAttributes() { final Table asTable = tableMap.asTable(true, false, true); if (SystemicObjectTracker.isSystemicObjectMarkingEnabled()) { - TestCase.assertEquals(CollectionUtil.mapFromArray(String.class, Object.class, "quux", - "baz", Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.SYSTEMIC_TABLE_ATTRIBUTE, - Boolean.TRUE), asTable.getAttributes()); + TestCase.assertEquals( + CollectionUtil.mapFromArray(String.class, Object.class, "quux", "baz", + Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.SYSTEMIC_TABLE_ATTRIBUTE, Boolean.TRUE), + asTable.getAttributes()); } else { - TestCase.assertEquals(CollectionUtil.mapFromArray(String.class, Object.class, "quux", - "baz", Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar"), asTable.getAttributes()); + TestCase.assertEquals(CollectionUtil.mapFromArray(String.class, Object.class, "quux", "baz", + Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar"), asTable.getAttributes()); } Table merged = ((TransformableTableMap) asTable).merge(); if (SystemicObjectTracker.isSystemicObjectMarkingEnabled()) { - TestCase.assertEquals(CollectionUtil.mapFromArray(String.class, Object.class, "quux", - "baz", Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.MERGED_TABLE_ATTRIBUTE, true, - Table.SYSTEMIC_TABLE_ATTRIBUTE, Boolean.TRUE), merged.getAttributes()); + TestCase.assertEquals(CollectionUtil.mapFromArray(String.class, Object.class, "quux", "baz", + Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.MERGED_TABLE_ATTRIBUTE, true, + Table.SYSTEMIC_TABLE_ATTRIBUTE, Boolean.TRUE), merged.getAttributes()); } else { TestCase.assertEquals( - CollectionUtil.mapFromArray(String.class, Object.class, "quux", "baz", - Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.MERGED_TABLE_ATTRIBUTE, true), - merged.getAttributes()); + CollectionUtil.mapFromArray(String.class, Object.class, "quux", "baz", + Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.MERGED_TABLE_ATTRIBUTE, true), + merged.getAttributes()); } int tableCounter = 1; @@ -343,41 +334,38 @@ public void testAttributes() { asTable.getAttributes(); TestCase.fail("Get attributes is inconsistent!"); } catch (IllegalArgumentException e) { - TestCase.assertEquals("Underlying tables do not have consistent attributes.", - e.getMessage()); + TestCase.assertEquals("Underlying tables do not have consistent attributes.", e.getMessage()); } // the merged table just takes the set that is consistent merged = ((TransformableTableMap) asTable).merge(); if (SystemicObjectTracker.isSystemicObjectMarkingEnabled()) { - TestCase.assertEquals(CollectionUtil.mapFromArray(String.class, Object.class, "quux", - "baz", Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.MERGED_TABLE_ATTRIBUTE, true, - Table.SYSTEMIC_TABLE_ATTRIBUTE, Boolean.TRUE), merged.getAttributes()); + TestCase.assertEquals(CollectionUtil.mapFromArray(String.class, Object.class, "quux", "baz", + Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.MERGED_TABLE_ATTRIBUTE, true, + Table.SYSTEMIC_TABLE_ATTRIBUTE, Boolean.TRUE), merged.getAttributes()); } else { TestCase.assertEquals( - CollectionUtil.mapFromArray(String.class, Object.class, "quux", "baz", - Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.MERGED_TABLE_ATTRIBUTE, true), - merged.getAttributes()); + CollectionUtil.mapFromArray(String.class, Object.class, "quux", "baz", + Table.SORTABLE_COLUMNS_ATTRIBUTE, "bar", Table.MERGED_TABLE_ATTRIBUTE, true), + merged.getAttributes()); } } public void testJoinSanity() { final QueryTable left = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("USym", "aa", "bb", "aa", "bb"), - c("Sym", "aa_1", "bb_1", "aa_2", "bb_2"), - c("LeftSentinel", 10, 20, 40, 60)); + c("USym", "aa", "bb", "aa", "bb"), + c("Sym", "aa_1", "bb_1", "aa_2", "bb_2"), + c("LeftSentinel", 10, 20, 40, 60)); final QueryTable right = TstUtils.testRefreshingTable(i(3, 5, 7, 9), - c("USym", "aa", "bb", "aa", "bb"), - c("Sym", "aa_1", "bb_1", "aa_2", "bb_2"), - c("RightSentinel", 30, 50, 70, 90)); + c("USym", "aa", "bb", "aa", "bb"), + c("Sym", "aa_1", "bb_1", "aa_2", "bb_2"), + c("RightSentinel", 30, 50, 70, 90)); final TableMap leftMap = left.byExternal("USym"); final TableMap rightMap = right.byExternal("USym"); - final Table leftAsTable = - leftMap.asTableBuilder().sanityCheckJoin(true).allowCoalesce(false).build(); - final Table rightAsTable = - rightMap.asTableBuilder().sanityCheckJoin(true).allowCoalesce(false).build(); + final Table leftAsTable = leftMap.asTableBuilder().sanityCheckJoin(true).allowCoalesce(false).build(); + final Table rightAsTable = rightMap.asTableBuilder().sanityCheckJoin(true).allowCoalesce(false).build(); final Table result = leftAsTable.join(rightAsTable, "Sym", "RightSentinel"); @@ -395,8 +383,8 @@ public void testJoinSanity() { final Throwable throwable = throwables.get(0); TestCase.assertEquals(IllegalArgumentException.class, throwable.getClass()); TestCase.assertEquals( - "join([Sym]) Left join key \"aa_1\" exists in multiple TableMap keys, \"aa\" and \"bb\"", - throwable.getMessage()); + "join([Sym]) Left join key \"aa_1\" exists in multiple TableMap keys, \"aa\" and \"bb\"", + throwable.getMessage()); return true; }); @@ -404,23 +392,23 @@ public void testJoinSanity() { public void testDependencies() { final QueryTable sourceTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("USym", "aa", "bb", "aa", "bb"), - c("Sentinel", 10, 20, 40, 60)); + c("USym", "aa", "bb", "aa", "bb"), + c("Sentinel", 10, 20, 40, 60)); final TableMap result = sourceTable.byExternal("USym"); final Table aa = result.get("aa"); final Table aa2 = aa.update("S2=Sentinel * 2"); TableTools.show(aa2); - LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> TestCase - .assertTrue(((QueryTable) aa2).satisfied(LogicalClock.DEFAULT.currentStep()))); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( + () -> TestCase.assertTrue(((QueryTable) aa2).satisfied(LogicalClock.DEFAULT.currentStep()))); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(sourceTable, i(8), c("USym", "bb"), c("Sentinel", 80)); sourceTable.notifyListeners(i(8), i(), i()); TestCase.assertFalse(((QueryTable) aa2).satisfied(LogicalClock.DEFAULT.currentStep())); - // We need to flush one notification: one for the source table because we do not require - // an intermediate view table in this case + // We need to flush one notification: one for the source table because we do not require an intermediate + // view table in this case final boolean flushed = LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); TestCase.assertTrue(flushed); TestCase.assertTrue(((QueryTable) aa2).satisfied(LogicalClock.DEFAULT.currentStep())); @@ -433,20 +421,18 @@ public static class PauseHelper { @SuppressWarnings("unused") public T pauseValue(T retVal) { - System.out - .println((System.currentTimeMillis() - start) / 1000.0 + ": Reading: " + retVal); + System.out.println((System.currentTimeMillis() - start) / 1000.0 + ": Reading: " + retVal); synchronized (this) { while (!released) { try { - System.out.println((System.currentTimeMillis() - start) / 1000.0 - + ": Waiting for release of: " + retVal); + System.out.println( + (System.currentTimeMillis() - start) / 1000.0 + ": Waiting for release of: " + retVal); wait(5000); if (!released) { TestCase.fail("Not released!"); } - System.out.println((System.currentTimeMillis() - start) / 1000.0 - + ": Release of: " + retVal); + System.out.println((System.currentTimeMillis() - start) / 1000.0 + ": Release of: " + retVal); } catch (InterruptedException e) { TestCase.fail("Interrupted!"); } @@ -471,12 +457,12 @@ public void testCrossDependencies() { LiveTableMonitor.DEFAULT.resetForUnitTests(false, true, 0, 2, 0, 0); final QueryTable sourceTable = TstUtils.testRefreshingTable(i(1, 2), - c("USym", "aa", "bb"), - c("Sentinel", 10, 20)); + c("USym", "aa", "bb"), + c("Sentinel", 10, 20)); final QueryTable sourceTable2 = TstUtils.testRefreshingTable(i(3, 5), - c("USym2", "aa", "bb"), - c("Sentinel2", 30, 50)); + c("USym2", "aa", "bb"), + c("Sentinel2", 30, 50)); final TableMap result = sourceTable.byExternal("USym"); @@ -488,8 +474,7 @@ public void testCrossDependencies() { pauseHelper.release(); pauseHelper2.release(); - final TableMap result2 = - sourceTable2.update("SlowItDown=pauseHelper.pauseValue(k)").byExternal("USym2") + final TableMap result2 = sourceTable2.update("SlowItDown=pauseHelper.pauseValue(k)").byExternal("USym2") .transformTables(t -> t.update("SlowItDown2=pauseHelper2.pauseValue(2 * k)")); // pauseHelper.pause(); @@ -558,12 +543,12 @@ public void testCrossDependencies2() { LiveTableMonitor.DEFAULT.resetForUnitTests(false, true, 0, 2, 0, 0); final QueryTable sourceTable = TstUtils.testRefreshingTable(i(1, 2), - c("USym", "aa", "bb"), - c("Sentinel", 10, 20)); + c("USym", "aa", "bb"), + c("Sentinel", 10, 20)); final QueryTable sourceTable2 = TstUtils.testRefreshingTable(i(3, 5, 9), - c("USym2", "aa", "bb", "dd"), - c("Sentinel2", 30, 50, 90)); + c("USym2", "aa", "bb", "dd"), + c("Sentinel2", 30, 50, 90)); final TableMap result = sourceTable.byExternal("USym"); @@ -573,7 +558,7 @@ public void testCrossDependencies2() { pauseHelper.release(); final TableMap result2 = sourceTable2.byExternal("USym2") - .transformTables(t -> t.update("SlowItDown2=pauseHelper.pauseValue(2 * k)")); + .transformTables(t -> t.update("SlowItDown2=pauseHelper.pauseValue(2 * k)")); final TableMap joined = result.transformTablesWithMap(result2, (l, r) -> { System.out.println("Doing naturalJoin"); @@ -613,8 +598,7 @@ public void testTableMapScope() { } private void testTableMapScope(boolean refreshing) { - final DynamicTable table = - TableTools.newTable(TableTools.col("Key", "A", "B"), intCol("Value", 1, 2)); + final DynamicTable table = TableTools.newTable(TableTools.col("Key", "A", "B"), intCol("Value", 1, 2)); if (refreshing) { table.setRefreshing(true); } @@ -652,15 +636,13 @@ private void testMemoize(Table source, Function.Unary op) { testMemoize(source, op, op); } - private void testMemoize(Table source, Function.Unary op, - Function.Unary op2) { + private void testMemoize(Table source, Function.Unary op, Function.Unary op2) { final TableMap result = op.call(source); final TableMap result2 = op2.call(source); org.junit.Assert.assertSame(result, result2); } - private void testNoMemoize(Table source, Function.Unary op, - Function.Unary op2) { + private void testNoMemoize(Table source, Function.Unary op, Function.Unary op2) { final TableMap result = op.call(source); final TableMap result2 = op2.call(source); org.junit.Assert.assertNotSame(result, result2); @@ -668,8 +650,8 @@ private void testNoMemoize(Table source, Function.Unary op, public void testMemoize() { final QueryTable sourceTable = TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("USym", "aa", "bb", "aa", "bb"), - c("Sentinel", 10, 20, 40, 60)); + c("USym", "aa", "bb", "aa", "bb"), + c("Sentinel", 10, 20, 40, 60)); final boolean old = QueryTable.setMemoizeResults(true); try { @@ -677,10 +659,8 @@ public void testMemoize() { testMemoize(sourceTable, t -> t.byExternal("Sentinel")); testMemoize(sourceTable, t -> t.byExternal(true, "USym")); testMemoize(sourceTable, t -> t.byExternal(true, "Sentinel")); - testMemoize(sourceTable, t -> t.byExternal(false, "Sentinel"), - t -> t.byExternal("Sentinel")); - testNoMemoize(sourceTable, t -> t.byExternal(true, "Sentinel"), - t -> t.byExternal("Sentinel")); + testMemoize(sourceTable, t -> t.byExternal(false, "Sentinel"), t -> t.byExternal("Sentinel")); + testNoMemoize(sourceTable, t -> t.byExternal(true, "Sentinel"), t -> t.byExternal("Sentinel")); testNoMemoize(sourceTable, t -> t.byExternal("USym"), t -> t.byExternal("Sentinel")); } finally { QueryTable.setMemoizeResults(old); @@ -689,13 +669,13 @@ public void testMemoize() { public void testTableMapSupplierListeners() { final QueryTable base = TstUtils.testRefreshingTable(i(0, 1, 2, 3, 4, 5), - stringCol("Key", "Zero", "Zero", "One", "One", "One", "One"), - stringCol("Color", "Red", "Blue", "Red", "Blue", "Red", "Blue"), - intCol("Value", -1, 0, 1, 2, 3, 4)); + stringCol("Key", "Zero", "Zero", "One", "One", "One", "One"), + stringCol("Color", "Red", "Blue", "Red", "Blue", "Red", "Blue"), + intCol("Value", -1, 0, 1, 2, 3, 4)); final TableMap byKey = base.byExternal("Key"); final TableMapSupplier supplier = - new TableMapSupplier(byKey, Collections.singletonList(t -> t.where("Color=`Red`"))); + new TableMapSupplier(byKey, Collections.singletonList(t -> t.where("Color=`Red`"))); assertTableEquals(base.where("Color=`Red`"), supplier.merge()); @@ -706,36 +686,36 @@ public void testTableMapSupplierListeners() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index idx = i(6, 7, 8, 9); addToTable(base, idx, - stringCol("Key", "Two", "Two", "Two", "Two"), - stringCol("Color", "Red", "Blue", "Red", "Blue"), - intCol("Value", 5, 6, 7, 8)); + stringCol("Key", "Two", "Two", "Two", "Two"), + stringCol("Color", "Red", "Blue", "Red", "Blue"), + intCol("Value", 5, 6, 7, 8)); base.notifyListeners(idx, i(), i()); }); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index idx = i(10, 11, 12, 13); addToTable(base, idx, - stringCol("Key", "Three", "Three", "Three", "Three"), - stringCol("Color", "Red", "Red", "Red", "Blue"), - intCol("Value", 9, 10, 11, 12)); + stringCol("Key", "Three", "Three", "Three", "Three"), + stringCol("Color", "Red", "Red", "Red", "Blue"), + intCol("Value", 9, 10, 11, 12)); base.notifyListeners(idx, i(), i()); }); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index idx = i(14, 15, 16, 17); addToTable(base, idx, - stringCol("Key", "Four", "Four", "Four", "Four"), - stringCol("Color", "Blue", "Blue", "Blue", "Blue"), - intCol("Value", 13, 14, 15, 16)); + stringCol("Key", "Four", "Four", "Four", "Four"), + stringCol("Color", "Blue", "Blue", "Blue", "Blue"), + intCol("Value", 13, 14, 15, 16)); base.notifyListeners(idx, i(), i()); }); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index idx = i(18, 19, 20, 21); addToTable(base, idx, - stringCol("Key", "Four", "Four", "Four", "Four"), - stringCol("Color", "Blue", "Blue", "Blue", "Blue"), - intCol("Value", 9, 10, 11, 12)); + stringCol("Key", "Four", "Four", "Four", "Four"), + stringCol("Color", "Blue", "Blue", "Blue", "Blue"), + intCol("Value", 9, 10, 11, 12)); base.notifyListeners(idx, i(), i()); }); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestAggregatedSelect.java b/DB/src/test/java/io/deephaven/db/v2/TestAggregatedSelect.java index 2f5719d7508..ad9aa3e1a02 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestAggregatedSelect.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestAggregatedSelect.java @@ -49,9 +49,9 @@ public Table createTestTable() { FileUtils.deleteRecursively(tableDirectory); TableDefinition tableDefinition = TableDefinition.of( - ColumnDefinition.ofString("USym"), - ColumnDefinition.ofDouble("Bid"), - ColumnDefinition.ofDouble("BidSize")); + ColumnDefinition.ofString("USym"), + ColumnDefinition.ofDouble("Bid"), + ColumnDefinition.ofDouble("BidSize")); final int size = 40; @@ -68,10 +68,9 @@ public Table createTestTable() { tableDirectory.mkdirs(); final File dest = new File(tableDirectory, "Table.parquet"); ParquetTools.writeTable( - newTable(stringCol("USym", symbol), doubleCol("Bid", bid), - doubleCol("BidSize", bidSize)), - dest, - tableDefinition); + newTable(stringCol("USym", symbol), doubleCol("Bid", bid), doubleCol("BidSize", bidSize)), + dest, + tableDefinition); return ParquetTools.readTable(dest); } @@ -177,8 +176,7 @@ public void testSerializedAggregation() throws IOException, ClassNotFoundExcepti ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream); objectOutputStream.writeObject(toBeSerialized); - ByteArrayInputStream byteArrayInputStream = - new ByteArrayInputStream(byteArrayOutputStream.toByteArray()); + ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(byteArrayOutputStream.toByteArray()); ObjectInputStream objectInputStream = new ObjectInputStream(byteArrayInputStream); Table result = (Table) objectInputStream.readObject(); @@ -210,20 +208,17 @@ public void testSerializedAggregation() throws IOException, ClassNotFoundExcepti TestCase.assertEquals(expectedSize[ii], bidSizeArray.size()); TestCase.assertTrue(double.class.isAssignableFrom(bidArray.getComponentType())); - TestCase - .assertTrue(DbDoubleArray.class.isAssignableFrom(bidSizeArray.getComponentType())); + TestCase.assertTrue(DbDoubleArray.class.isAssignableFrom(bidSizeArray.getComponentType())); for (int jj = 0; jj < bidSizeArray.size(); ++jj) { DbDoubleArray bidSizeInnerArray = bidSizeArray.get(jj); - TestCase.assertTrue( - double.class.isAssignableFrom(bidSizeInnerArray.getComponentType())); + TestCase.assertTrue(double.class.isAssignableFrom(bidSizeInnerArray.getComponentType())); } } TestCase.assertEquals(98.0, DoubleNumericPrimitives.avg(bidColumn.get(0))); TestCase.assertEquals(98.5, DoubleNumericPrimitives.avg(bidColumn.get(1))); - TestCase.assertEquals(avgConsecutive(0, 7), - DoubleNumericPrimitives.avg(bidSizeColumn.get(0).get(0))); + TestCase.assertEquals(avgConsecutive(0, 7), DoubleNumericPrimitives.avg(bidSizeColumn.get(0).get(0))); Table checkPrimitives = result.update("BidAvg=avg(Bid)"); TableTools.show(checkPrimitives); @@ -231,8 +226,8 @@ public void testSerializedAggregation() throws IOException, ClassNotFoundExcepti private void dumpColumn(DataColumn dc) { boolean isArray = DbArrayBase.class.isAssignableFrom(dc.getType()); - System.out.println("Column Type: " + dc.getType().toString() + (isArray ? " (Array)" : "") - + ", ComponentType: " + dc.getComponentType()); + System.out.println("Column Type: " + dc.getType().toString() + (isArray ? " (Array)" : "") + ", ComponentType: " + + dc.getComponentType()); for (int ii = 0; ii < dc.size(); ++ii) { String prefix = dc.getName() + "[" + ii + "]"; @@ -248,10 +243,8 @@ private void dumpColumn(DataColumn dc) { private void dumpArray(String prefix, DbArrayBase dbArrayBase) { System.out.println(prefix + ": Array of " + dbArrayBase.getComponentType().toString()); String prefixsp = new String(new char[prefix.length()]).replace('\0', ' '); - final boolean containsArrays = - DbArrayBase.class.isAssignableFrom(dbArrayBase.getComponentType()); - final ArrayUtils.ArrayAccessor arrayAccessor = - ArrayUtils.getArrayAccessor(dbArrayBase.toArray()); + final boolean containsArrays = DbArrayBase.class.isAssignableFrom(dbArrayBase.getComponentType()); + final ArrayUtils.ArrayAccessor arrayAccessor = ArrayUtils.getArrayAccessor(dbArrayBase.toArray()); for (int jj = 0; jj < dbArrayBase.size(); ++jj) { if (containsArrays) { dumpArray(prefix + "[" + jj + "] ", (DbArrayBase) arrayAccessor.get(jj)); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestByExternal.java b/DB/src/test/java/io/deephaven/db/v2/TestByExternal.java index 8e7ee86b161..3b31276f911 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestByExternal.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestByExternal.java @@ -87,8 +87,7 @@ public void validate(String msg) { } else { final MatchFilter[] filters = new MatchFilter[groupByColumnSources.length]; for (int ii = 0; ii < groupByColumns.length; ++ii) { - filters[ii] = - new MatchFilter(groupByColumns[ii], ((SmartKey) key).values_[ii]); + filters[ii] = new MatchFilter(groupByColumns[ii], ((SmartKey) key).values_[ii]); } whereTable = originalTable.where(filters); } @@ -96,11 +95,10 @@ public void validate(String msg) { if (tableFromMap == null) { System.out.println("Missing key: " + key); } else { - System.out.println("Checking key: " + key + ", size: " + tableFromMap.size() - + " vs. " + whereTable.size()); + System.out.println( + "Checking key: " + key + ", size: " + tableFromMap.size() + " vs. " + whereTable.size()); } - final String diff = diff(tableFromMap, whereTable, 10, - EnumSet.of(TableDiff.DiffItems.DoublesExact)); + final String diff = diff(tableFromMap, whereTable, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); Assert.assertEquals(msg, "", diff); } } @@ -117,13 +115,11 @@ public void testByExternal() { final int size = 50; final TstUtils.ColumnInfo[] columnInfo = new TstUtils.ColumnInfo[3]; - columnInfo[0] = - new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), "Sym", + columnInfo[0] = new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), "Sym", TstUtils.ColumnInfo.ColAttributes.Immutable); columnInfo[1] = new TstUtils.ColumnInfo<>(new TstUtils.IntGenerator(10, 20), "intCol", - TstUtils.ColumnInfo.ColAttributes.Immutable); - columnInfo[2] = - new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); + TstUtils.ColumnInfo.ColAttributes.Immutable); + columnInfo[2] = new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); final QueryTable queryTable = getTable(size, random, columnInfo); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { @@ -140,8 +136,8 @@ public void testByExternal() { public void testErrorPropagation() { try (final ErrorExpectation ee = new ErrorExpectation()) { - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), - col("Key", "A", "B", "A"), intCol("Int", 2, 4, 6)); + final QueryTable table = + TstUtils.testRefreshingTable(i(2, 4, 6), col("Key", "A", "B", "A"), intCol("Int", 2, 4, 6)); final TableMap byKey = table.byExternal("Key"); @@ -182,8 +178,8 @@ public void testErrorPropagation() { } public void testNewKeysAfterResultReleased() { - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), col("Key", "A", "B", "A"), - intCol("Int", 2, 4, 6)); + final QueryTable table = + TstUtils.testRefreshingTable(i(2, 4, 6), col("Key", "A", "B", "A"), intCol("Int", 2, 4, 6)); final LivenessScope subTablesScope = new LivenessScope(); @@ -195,8 +191,7 @@ public void testNewKeysAfterResultReleased() { try (final SafeCloseable ignored2 = LivenessScopeStack.open()) { byKey = table.byExternal("Key"); - try ( - final SafeCloseable ignored3 = LivenessScopeStack.open(subTablesScope, false)) { + try (final SafeCloseable ignored3 = LivenessScopeStack.open(subTablesScope, false)) { tableA = byKey.get("A"); tableB = byKey.get("B"); } @@ -214,9 +209,7 @@ public void testNewKeysAfterResultReleased() { assertEquals("", TableTools.diff(tableB, table.where("Key=`B`"), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(9), col("Key", "C"), intCol("Int", 10)); // Added row, - // wants to - // make new + TstUtils.addToTable(table, i(9), col("Key", "C"), intCol("Int", 10)); // Added row, wants to make new // state table.notifyListeners(i(9), i(), i()); }); @@ -226,14 +219,9 @@ public void testNewKeysAfterResultReleased() { assertNull(byKey.get("C")); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 11)); // Modified - // row, wants - // to move - // from - // existent - // state to - // nonexistent - // state + TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 11)); // Modified row, wants to move + // from existent state to + // nonexistent state table.notifyListeners(i(), i(), i(8)); }); @@ -242,11 +230,8 @@ public void testNewKeysAfterResultReleased() { assertNull(byKey.get("C")); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 12)); // Modified - // row, - // staying in - // nonexistent - // state + TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 12)); // Modified row, staying in + // nonexistent state table.notifyListeners(i(), i(), i(8)); }); @@ -255,14 +240,9 @@ public void testNewKeysAfterResultReleased() { assertNull(byKey.get("C")); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 13)); // Modified - // row, wants - // to move - // from - // nonexistent - // state to - // existent - // state + TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 13)); // Modified row, wants to move + // from nonexistent state to + // existent state table.notifyListeners(i(), i(), i(8)); }); @@ -271,11 +251,8 @@ public void testNewKeysAfterResultReleased() { assertNull(byKey.get("C")); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 14)); // Modified - // row, - // staying in - // existent - // state + TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 14)); // Modified row, staying in + // existent state table.notifyListeners(i(), i(), i(8)); }); @@ -304,8 +281,8 @@ public void testNewKeysAfterResultReleased() { } public void testNewKeysBeforeResultReleased() { - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), col("Key", "A", "B", "A"), - intCol("Int", 2, 4, 6)); + final QueryTable table = + TstUtils.testRefreshingTable(i(2, 4, 6), col("Key", "A", "B", "A"), intCol("Int", 2, 4, 6)); try (final SafeCloseable ignored1 = LivenessScopeStack.open()) { @@ -327,9 +304,7 @@ public void testNewKeysBeforeResultReleased() { assertNull(byKey.get("C")); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(9), col("Key", "C"), intCol("Int", 10)); // Added row, - // makes new - // state + TstUtils.addToTable(table, i(9), col("Key", "C"), intCol("Int", 10)); // Added row, makes new state table.notifyListeners(i(9), i(), i()); }); @@ -339,13 +314,9 @@ public void testNewKeysBeforeResultReleased() { assertEquals("", TableTools.diff(tableC, table.where("Key=`C`"), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 11)); // Modified - // row, wants - // to move - // from - // original - // state to - // new state + TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 11)); // Modified row, wants to move + // from original state to new + // state table.notifyListeners(i(), i(), i(8)); }); @@ -354,10 +325,8 @@ public void testNewKeysBeforeResultReleased() { assertEquals("", TableTools.diff(tableC, table.where("Key=`C`"), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 12)); // Modified - // row, - // staying in - // new state + TstUtils.addToTable(table, i(8), col("Key", "C"), intCol("Int", 12)); // Modified row, staying in new + // state table.notifyListeners(i(), i(), i(8)); }); @@ -366,12 +335,8 @@ public void testNewKeysBeforeResultReleased() { assertEquals("", TableTools.diff(tableC, table.where("Key=`C`"), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 13)); // Modified - // row, wants - // to move - // from new - // state to - // original + TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 13)); // Modified row, wants to move + // from new state to original // state table.notifyListeners(i(), i(), i(8)); }); @@ -381,11 +346,8 @@ public void testNewKeysBeforeResultReleased() { assertEquals("", TableTools.diff(tableC, table.where("Key=`C`"), 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 14)); // Modified - // row, - // staying in - // original - // state + TstUtils.addToTable(table, i(8), col("Key", "B"), intCol("Int", 14)); // Modified row, staying in + // original state table.notifyListeners(i(), i(), i(8)); }); @@ -418,8 +380,7 @@ public static class SleepHelper { @SuppressWarnings("unused") public T sleepValue(long duration, T retVal) { - System.out - .println((System.currentTimeMillis() - start) / 1000.0 + ": Reading: " + retVal); + System.out.println((System.currentTimeMillis() - start) / 1000.0 + ": Reading: " + retVal); try { Thread.sleep(duration); } catch (InterruptedException ignored) { @@ -432,14 +393,14 @@ public void testReleaseRaceRollup() { setExpectError(false); final ExecutorService pool = Executors.newFixedThreadPool(1); - final QueryTable rawTable = TstUtils.testRefreshingTable(i(2, 4, 6), - col("Key", "A", "B", "A"), intCol("Int", 2, 4, 6), intCol("I2", 1, 2, 3)); + final QueryTable rawTable = TstUtils.testRefreshingTable(i(2, 4, 6), col("Key", "A", "B", "A"), + intCol("Int", 2, 4, 6), intCol("I2", 1, 2, 3)); QueryScope.addParam("sleepHelper", new SleepHelper()); // make it slow to read key final Table table = rawTable.updateView("Key = sleepHelper.sleepValue(0, Key)", "K2=1", - "Int=sleepHelper.sleepValue(250, Int)"); + "Int=sleepHelper.sleepValue(250, Int)"); final SingletonLivenessManager mapManager; @@ -461,8 +422,8 @@ public void testReleaseRaceRollup() { final MutableObject> mutableFuture = new MutableObject<>(); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(rawTable, i(10, 11, 12), col("Key", "C", "D", "E"), - intCol("Int", 8, 9, 10), intCol("I2", 6, 7, 8)); + TstUtils.addToTable(rawTable, i(10, 11, 12), col("Key", "C", "D", "E"), intCol("Int", 8, 9, 10), + intCol("I2", 6, 7, 8)); rawTable.notifyListeners(i(10, 11, 12), i(), i()); mutableFuture.setValue(pool.submit(() -> { @@ -518,27 +479,22 @@ private void testByExternalWithShifts(int seed) { final int size = 10; final TstUtils.ColumnInfo[] columnInfo = new TstUtils.ColumnInfo[3]; - columnInfo[0] = - new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), "Sym", + columnInfo[0] = new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), "Sym", TstUtils.ColumnInfo.ColAttributes.Immutable); columnInfo[1] = new TstUtils.ColumnInfo<>(new TstUtils.IntGenerator(10, 20), "intCol", - TstUtils.ColumnInfo.ColAttributes.Immutable); - columnInfo[2] = - new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); + TstUtils.ColumnInfo.ColAttributes.Immutable); + columnInfo[2] = new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); final QueryTable queryTable = getTable(size, random, columnInfo); - final Table simpleTable = - TableTools.newTable(TableTools.col("Sym", "a"), TableTools.intCol("intCol", 30), + final Table simpleTable = TableTools.newTable(TableTools.col("Sym", "a"), TableTools.intCol("intCol", 30), TableTools.doubleCol("doubleCol", 40.1)).updateView("K=-2L"); final Table source = TableTools.merge(simpleTable, queryTable.updateView("K=k")).flatten(); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { EvalNugget.Sorted.from(() -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> source.byExternal("Sym").merge()), "Sym"), - EvalNugget.Sorted.from( - () -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> source.where("Sym=`a`").byExternal("Sym").merge()), - "Sym"), + .computeLocked(() -> source.byExternal("Sym").merge()), "Sym"), + EvalNugget.Sorted.from(() -> LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> source.where("Sym=`a`").byExternal("Sym").merge()), "Sym"), }; final int steps = 50; diff --git a/DB/src/test/java/io/deephaven/db/v2/TestCodecColumns.java b/DB/src/test/java/io/deephaven/db/v2/TestCodecColumns.java index 4e42637e5c2..82411bd9ee4 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestCodecColumns.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestCodecColumns.java @@ -26,8 +26,7 @@ public class TestCodecColumns { // TODO: Figure out how to come up with a BigInteger of a specified width. // private static final ColumnDefinition FIXED_WIDTH_BIG_INTEGER_COLUMN_DEFINITION; // static { - // final ColumnDefinition definition = new ColumnDefinition<>("FWBI", - // BigInteger.class); + // final ColumnDefinition definition = new ColumnDefinition<>("FWBI", BigInteger.class); // definition.setObjectCodecClass(BigIntegerCodec.class.getName()); // definition.setObjectCodecArguments(null); // definition.setObjectWidth(11); @@ -44,45 +43,38 @@ public class TestCodecColumns { final ParquetInstructions.Builder readBuilder = new ParquetInstructions.Builder(); final ParquetInstructions.Builder writeBuilder = new ParquetInstructions.Builder(); VARIABLE_WIDTH_BYTE_ARRAY_COLUMN_DEFINITION = - ColumnDefinition.fromGenericType("VWBA", byte[].class, byte.class); + ColumnDefinition.fromGenericType("VWBA", byte[].class, byte.class); writeBuilder.addColumnCodec("VWBA", SimpleByteArrayCodec.class.getName()); readBuilder.addColumnCodec("VWBA", SimpleByteArrayCodec.class.getName()); - VARIABLE_WIDTH_COLUMN_DEFINITION_2 = - ColumnDefinition.fromGenericType("VWCD", ColumnDefinition.class); - readBuilder.addColumnCodec("VWCD", ExternalizableCodec.class.getName(), - ColumnDefinition.class.getName()); - FIXED_WIDTH_BYTE_ARRAY_COLUMN_DEFINITION = - ColumnDefinition.fromGenericType("FWBA", byte[].class, byte.class); + VARIABLE_WIDTH_COLUMN_DEFINITION_2 = ColumnDefinition.fromGenericType("VWCD", ColumnDefinition.class); + readBuilder.addColumnCodec("VWCD", ExternalizableCodec.class.getName(), ColumnDefinition.class.getName()); + FIXED_WIDTH_BYTE_ARRAY_COLUMN_DEFINITION = ColumnDefinition.fromGenericType("FWBA", byte[].class, byte.class); writeBuilder.addColumnCodec("FWBA", SimpleByteArrayCodec.class.getName(), "9"); readBuilder.addColumnCodec("FWBA", SimpleByteArrayCodec.class.getName(), "9"); - VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION = - ColumnDefinition.fromGenericType("VWBI", BigInteger.class); + VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION = ColumnDefinition.fromGenericType("VWBI", BigInteger.class); writeBuilder.addColumnCodec("VWBI", BigIntegerCodec.class.getName()); readBuilder.addColumnCodec("VWBI", BigIntegerCodec.class.getName()); - VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION_S = - ColumnDefinition.fromGenericType("VWBIS", BigInteger.class); + VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION_S = ColumnDefinition.fromGenericType("VWBIS", BigInteger.class); readBuilder.addColumnCodec("VWBIS", SerializableCodec.class.getName()); expectedReadInstructions = readBuilder.build(); writeInstructions = writeBuilder.build(); } private static final TableDefinition TABLE_DEFINITION = TableDefinition.of( - VARIABLE_WIDTH_BYTE_ARRAY_COLUMN_DEFINITION, - VARIABLE_WIDTH_COLUMN_DEFINITION_2, - FIXED_WIDTH_BYTE_ARRAY_COLUMN_DEFINITION, - VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION, - VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION_S); + VARIABLE_WIDTH_BYTE_ARRAY_COLUMN_DEFINITION, + VARIABLE_WIDTH_COLUMN_DEFINITION_2, + FIXED_WIDTH_BYTE_ARRAY_COLUMN_DEFINITION, + VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION, + VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION_S); private static final Table TABLE = TableTools.newTable(TABLE_DEFINITION, - TableTools.col("VWBA", new byte[] {0, 1, 2}, null, new byte[] {3, 4, 5, 6}), - TableTools.col("VWCD", null, VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION, - VARIABLE_WIDTH_BYTE_ARRAY_COLUMN_DEFINITION), - TableTools.col("FWBA", new byte[] {7, 8, 9, 10, 11, 12, 13, 14, 15}, - new byte[] {16, 17, 18, 19, 20, 21, 22, 23, 24}, - new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0}), - TableTools.col("VWBI", BigInteger.valueOf(91), BigInteger.valueOf(111111111111111L), null), - TableTools.col("VWBIS", BigInteger.valueOf(94), null, - BigInteger.valueOf(111111111111112L))); + TableTools.col("VWBA", new byte[] {0, 1, 2}, null, new byte[] {3, 4, 5, 6}), + TableTools.col("VWCD", null, VARIABLE_WIDTH_BIG_INTEGER_COLUMN_DEFINITION, + VARIABLE_WIDTH_BYTE_ARRAY_COLUMN_DEFINITION), + TableTools.col("FWBA", new byte[] {7, 8, 9, 10, 11, 12, 13, 14, 15}, + new byte[] {16, 17, 18, 19, 20, 21, 22, 23, 24}, new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0}), + TableTools.col("VWBI", BigInteger.valueOf(91), BigInteger.valueOf(111111111111111L), null), + TableTools.col("VWBIS", BigInteger.valueOf(94), null, BigInteger.valueOf(111111111111112L))); @Test public void doColumnsTest() throws IOException { @@ -91,13 +83,13 @@ public void doColumnsTest() throws IOException { try { ParquetTools.writeTable(TABLE, dest, TABLE.getDefinition(), writeInstructions); final MutableObject instructionsOut = new MutableObject<>(); - final Table result = ParquetTools.readParquetSchemaAndTable(dest, - ParquetInstructions.EMPTY, instructionsOut); + final Table result = + ParquetTools.readParquetSchemaAndTable(dest, ParquetInstructions.EMPTY, instructionsOut); TableTools.show(result); TestCase.assertEquals(TABLE_DEFINITION, result.getDefinition()); final ParquetInstructions readInstructions = instructionsOut.getValue(); - TestCase.assertTrue(ParquetInstructions - .sameColumnNamesAndCodecMappings(expectedReadInstructions, readInstructions)); + TestCase.assertTrue( + ParquetInstructions.sameColumnNamesAndCodecMappings(expectedReadInstructions, readInstructions)); TstUtils.assertTableEquals(TABLE, result); } finally { FileUtils.deleteRecursively(dir); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestColumnDescriptionInheritance.java b/DB/src/test/java/io/deephaven/db/v2/TestColumnDescriptionInheritance.java index 64849c58560..03e192649ff 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestColumnDescriptionInheritance.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestColumnDescriptionInheritance.java @@ -12,23 +12,23 @@ public class TestColumnDescriptionInheritance extends QueryTableTestBase { private Table genTestTable() { return TstUtils.testRefreshingTable(i(1, 2, 4, 6), - c("Sym", "aa", "bb", "cc", "dd"), - c("intCol", 10, 20, 40, 60), - c("doubleCol", 0.1, 0.2, 0.4, 0.6)); + c("Sym", "aa", "bb", "cc", "dd"), + c("intCol", 10, 20, 40, 60), + c("doubleCol", 0.1, 0.2, 0.4, 0.6)); } public void testMaybeCopyColumnDescriptions() { final Table sourceTable = genTestTable(); final Table withDescriptions = sourceTable - .withColumnDescription("Sym", "Symbol Column") - .withColumnDescription("doubleCol", "Double Column"); + .withColumnDescription("Sym", "Symbol Column") + .withColumnDescription("doubleCol", "Double Column"); System.out.println("Running basic \"maybeCopyColumnDescriptions\" tests..."); - final Table destTable = new QueryTable(sourceTable.getDefinition(), sourceTable.getIndex(), - sourceTable.getColumnSourceMap()); - final Map descriptionMap = (Map) withDescriptions - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); + final Table destTable = + new QueryTable(sourceTable.getDefinition(), sourceTable.getIndex(), sourceTable.getColumnSourceMap()); + final Map descriptionMap = + (Map) withDescriptions.getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); assertNotNull(descriptionMap); assertEquals(2, descriptionMap.size()); @@ -45,64 +45,64 @@ public void testMaybeCopyColumnDescriptions() { assertEquals(1, droppedColumnMap.size()); assertEquals(descriptionMap, withDescriptions - .flatten() - .sort("doubleCol") - .where("Sym in `aa`, `bb`, `cc`") - .reverse() - .firstBy("intCol") - .lastBy("doubleCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .flatten() + .sort("doubleCol") + .where("Sym in `aa`, `bb`, `cc`") + .reverse() + .firstBy("intCol") + .lastBy("doubleCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(descriptionMap, withDescriptions - .select("Sym", "doubleCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .select("Sym", "doubleCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(droppedColumnMap, withDescriptions - .select("Sym", "intCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .select("Sym", "intCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(droppedColumnMap, withDescriptions - .view("Sym", "New=doubleCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .view("Sym", "New=doubleCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(droppedColumnMap, withDescriptions - .dropColumns("doubleCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .dropColumns("doubleCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertNull(withDescriptions - .select("intCol") - .withColumnDescription("intCol", "This will be dropped") - .updateView("Sym=`abc`", "doubleCol=0.3") - .dropColumns("intCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .select("intCol") + .withColumnDescription("intCol", "This will be dropped") + .updateView("Sym=`abc`", "doubleCol=0.3") + .dropColumns("intCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); System.out.println("Running update-operation level column-description tests..."); assertEquals(descriptionMap, withDescriptions - .update("New=Sym", "New2=intCol + ` @ ` + doubleCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .update("New=Sym", "New2=intCol + ` @ ` + doubleCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(droppedColumnMap, withDescriptions - .update("New=Sym", "New2=intCol + ` @ ` + doubleCol", "doubleCol=intCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .update("New=Sym", "New2=intCol + ` @ ` + doubleCol", "doubleCol=intCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(descriptionMap, withDescriptions - .updateView("New=Sym", "New2=intCol + ` @ ` + doubleCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .updateView("New=Sym", "New2=intCol + ` @ ` + doubleCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(droppedColumnMap, withDescriptions - .updateView("New=Sym", "New2=intCol + ` @ ` + doubleCol", "doubleCol=intCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .updateView("New=Sym", "New2=intCol + ` @ ` + doubleCol", "doubleCol=intCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(descriptionMap, withDescriptions - .lazyUpdate("New=Sym", "New2=intCol + ` @ ` + doubleCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .lazyUpdate("New=Sym", "New2=intCol + ` @ ` + doubleCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(droppedColumnMap, withDescriptions - .lazyUpdate("New=Sym", "New2=intCol + ` @ ` + doubleCol", "doubleCol=intCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .lazyUpdate("New=Sym", "New2=intCol + ` @ ` + doubleCol", "doubleCol=intCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertNull(sourceTable - .updateView("Temp=Sym", "Sym=intCol", "intCol=doubleCol", "doubleCol=Temp") - .dropColumns("Temp") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .updateView("Temp=Sym", "Sym=intCol", "intCol=doubleCol", "doubleCol=Temp") + .dropColumns("Temp") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertNull(withDescriptions - .updateView("Temp=Sym", "Sym=intCol", "intCol=doubleCol", "doubleCol=Temp") - .dropColumns("Temp") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .updateView("Temp=Sym", "Sym=intCol", "intCol=doubleCol", "doubleCol=Temp") + .dropColumns("Temp") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); System.out.println("Running rename-operation level column-description tests..."); @@ -111,29 +111,29 @@ public void testMaybeCopyColumnDescriptions() { assertEquals(2, renamedColumnMap.size()); assertEquals(renamedColumnMap, withDescriptions - .renameColumns("RenamedSym=Sym") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .renameColumns("RenamedSym=Sym") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(descriptionMap, withDescriptions - .renameColumns("RenamedSym=Sym") - .renameColumns("Sym=RenamedSym") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .renameColumns("RenamedSym=Sym") + .renameColumns("Sym=RenamedSym") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(droppedColumnMap, withDescriptions - .renameColumns("RenamedSym=Sym") - .renameColumns("Sym=RenamedSym") - .select("Sym", "intCol") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .renameColumns("RenamedSym=Sym") + .renameColumns("Sym=RenamedSym") + .select("Sym", "intCol") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertNull(sourceTable - .renameColumns("RenamedSym=Sym") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .renameColumns("RenamedSym=Sym") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); System.out.println("Running join-operation level column-description tests..."); final Table rightTable = withDescriptions - .renameColumns("rightInt=intCol", "rightDouble=doubleCol") - .withColumnDescription("Sym", "Ignored Sym"); + .renameColumns("rightInt=intCol", "rightDouble=doubleCol") + .withColumnDescription("Sym", "Ignored Sym"); final Map rightMap = - (Map) rightTable.getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); + (Map) rightTable.getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE); assertNotNull(rightMap); assertEquals(2, rightMap.size()); @@ -142,19 +142,19 @@ public void testMaybeCopyColumnDescriptions() { assertEquals(3, joinedColumnMap.size()); assertEquals(joinedColumnMap, withDescriptions - .join(rightTable, "Sym", "rightInt,rightDouble") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .join(rightTable, "Sym", "rightInt,rightDouble") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(joinedColumnMap, withDescriptions - .naturalJoin(rightTable, "Sym", "rightInt,rightDouble") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .naturalJoin(rightTable, "Sym", "rightInt,rightDouble") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertEquals(joinedColumnMap, withDescriptions - .exactJoin(rightTable, "Sym", "rightInt,rightDouble") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .exactJoin(rightTable, "Sym", "rightInt,rightDouble") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); assertNull(sourceTable - .naturalJoin(sourceTable.renameColumns("rightInt=intCol", "rightDouble=doubleCol"), - "Sym", "rightInt,rightDouble") - .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); + .naturalJoin(sourceTable.renameColumns("rightInt=intCol", "rightDouble=doubleCol"), "Sym", + "rightInt,rightDouble") + .getAttribute(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)); System.out.println("Success"); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestColumnRenderersBuilder.java b/DB/src/test/java/io/deephaven/db/v2/TestColumnRenderersBuilder.java index c98397b709b..e51e0aa2cc7 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestColumnRenderersBuilder.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestColumnRenderersBuilder.java @@ -7,9 +7,9 @@ public class TestColumnRenderersBuilder extends TestCase { public static final String TEST_DIRECTIVE_ENUM = "A=DEFAULT,B=PROGRESS_BAR,C=PROGRESS_BAR,"; public static final String TEST_DIRECTIVE_CLASS = - "A=io.deephaven.gui.table.ColumnRenderer,B=io.deephaven.gui.table.TextAreaColumnRenderer,C=io.deephaven.console.events.ProgressRenderer,"; + "A=io.deephaven.gui.table.ColumnRenderer,B=io.deephaven.gui.table.TextAreaColumnRenderer,C=io.deephaven.console.events.ProgressRenderer,"; public static final String TEST_DIRECTIVE_MIXED = - "A=DEFAULT,B=io.deephaven.gui.table.TextAreaColumnRenderer,C=PROGRESS_BAR,"; + "A=DEFAULT,B=io.deephaven.gui.table.TextAreaColumnRenderer,C=PROGRESS_BAR,"; @Test public void testBuildDirectiveEnum() { @@ -30,23 +30,17 @@ public void testFromDirectiveEnum() { assertTrue(builder.isColumnRendererSet("C")); assertFalse(builder.isColumnRendererSet("D")); - assertEquals(ColumnRenderersBuilder.ColumnRendererType.DEFAULT, - builder.getRendererType("A")); - assertEquals(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR, - builder.getRendererType("B")); - assertEquals(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR, - builder.getRendererType("C")); + assertEquals(ColumnRenderersBuilder.ColumnRendererType.DEFAULT, builder.getRendererType("A")); + assertEquals(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR, builder.getRendererType("B")); + assertEquals(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR, builder.getRendererType("C")); assertNull(builder.getRendererType("D")); - assertEquals( - builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.DEFAULT), - builder.getRenderClassName("A")); - assertEquals( - builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR), - builder.getRenderClassName("B")); - assertEquals( - builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR), - builder.getRenderClassName("C")); + assertEquals(builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.DEFAULT), + builder.getRenderClassName("A")); + assertEquals(builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR), + builder.getRenderClassName("B")); + assertEquals(builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR), + builder.getRenderClassName("C")); assertNull(builder.getRenderClassName("D")); } @@ -75,10 +69,8 @@ public void testFromDirectiveClass() { assertNull(builder.getRendererType("D")); assertEquals("io.deephaven.gui.table.ColumnRenderer", builder.getRenderClassName("A")); - assertEquals("io.deephaven.gui.table.TextAreaColumnRenderer", - builder.getRenderClassName("B")); - assertEquals("io.deephaven.console.events.ProgressRenderer", - builder.getRenderClassName("C")); + assertEquals("io.deephaven.gui.table.TextAreaColumnRenderer", builder.getRenderClassName("B")); + assertEquals("io.deephaven.console.events.ProgressRenderer", builder.getRenderClassName("C")); assertNull(builder.getRenderClassName("D")); } @@ -101,21 +93,16 @@ public void testFromDirectiveMixed() { assertTrue(builder.isColumnRendererSet("C")); assertFalse(builder.isColumnRendererSet("D")); - assertEquals(ColumnRenderersBuilder.ColumnRendererType.DEFAULT, - builder.getRendererType("A")); + assertEquals(ColumnRenderersBuilder.ColumnRendererType.DEFAULT, builder.getRendererType("A")); assertNull(builder.getRendererType("B")); - assertEquals(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR, - builder.getRendererType("C")); + assertEquals(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR, builder.getRendererType("C")); assertNull(builder.getRendererType("D")); - assertEquals( - builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.DEFAULT), - builder.getRenderClassName("A")); - assertEquals("io.deephaven.gui.table.TextAreaColumnRenderer", - builder.getRenderClassName("B")); - assertEquals( - builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR), - builder.getRenderClassName("C")); + assertEquals(builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.DEFAULT), + builder.getRenderClassName("A")); + assertEquals("io.deephaven.gui.table.TextAreaColumnRenderer", builder.getRenderClassName("B")); + assertEquals(builder.getRenderClassForType(ColumnRenderersBuilder.ColumnRendererType.PROGRESS_BAR), + builder.getRenderClassName("C")); assertNull(builder.getRenderClassName("D")); } diff --git a/DB/src/test/java/io/deephaven/db/v2/TestComboBy.java b/DB/src/test/java/io/deephaven/db/v2/TestComboBy.java index c26d460b513..13072013e75 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestComboBy.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestComboBy.java @@ -43,7 +43,7 @@ public class TestComboBy extends LiveTableTestCase { private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = Configuration.getInstance() - .getBooleanForClassWithDefault(TestComboBy.class, "CompilerTools.logEnabled", false); + .getBooleanForClassWithDefault(TestComboBy.class, "CompilerTools.logEnabled", false); private boolean oldLogEnabled; private boolean oldCheckLtm; @@ -75,14 +75,12 @@ public void testBy() { assertEquals(10, table.size()); assertEquals(2, table.by("A").size()); - AggregationFormulaStateFactory minFactory = - new AggregationFormulaStateFactory("min(each)", "each"); - AggregationFormulaStateFactory maxFactory = - new AggregationFormulaStateFactory("max(each)", "each"); + AggregationFormulaStateFactory minFactory = new AggregationFormulaStateFactory("min(each)", "each"); + AggregationFormulaStateFactory maxFactory = new AggregationFormulaStateFactory("max(each)", "each"); ComboAggregateFactory minMaxFactory = - new ComboAggregateFactory(new ComboAggregateFactory.ComboByImpl(minFactory, "Min=B"), - new ComboAggregateFactory.ComboByImpl(maxFactory, "Max=B")); + new ComboAggregateFactory(new ComboAggregateFactory.ComboByImpl(minFactory, "Min=B"), + new ComboAggregateFactory.ComboByImpl(maxFactory, "Max=B")); Table minMax = table.by(minMaxFactory, "A"); show(minMax); @@ -94,9 +92,8 @@ public void testBy() { assertEquals(10, dc.get(0)); assertEquals(8, dc.get(1)); - ComboAggregateFactory doubleCountFactory = - new ComboAggregateFactory(new ComboAggregateFactory.CountComboBy("Count1"), - new ComboAggregateFactory.CountComboBy("Count2")); + ComboAggregateFactory doubleCountFactory = new ComboAggregateFactory( + new ComboAggregateFactory.CountComboBy("Count1"), new ComboAggregateFactory.CountComboBy("Count2")); Table doubleCounted = table.by(doubleCountFactory, "A"); show(doubleCounted); assertEquals(2, doubleCounted.size()); @@ -108,40 +105,37 @@ public void testBy() { assertEquals(6L, dc.get(0)); assertEquals(4L, dc.get(1)); - // Lets do some interesting incremental computations, as this is the use case that I'm - // really aiming at. For + // Lets do some interesting incremental computations, as this is the use case that I'm really aiming at. For // example, getting the count, and average on each update. // It would be nice to do a min and a max as well, - // which can often be efficient (but sometimes could also require linear work). That isn't - // related to this test + // which can often be efficient (but sometimes could also require linear work). That isn't related to this test // but more related to the underlying min and max. - // Interestingly, the factories appear to be single use. If you try to reuse a factory it - // fails with an NPE. + // Interestingly, the factories appear to be single use. If you try to reuse a factory it fails with an NPE. // minFactory = new AggregationFormulaStateFactory("min(each)", "each"); // maxFactory = new AggregationFormulaStateFactory("max(each)", "each"); ComboAggregateFactory summaryStatisticsFactory = AggCombo( - AggCount("Count"), - AggMin("MinB=B", "MinC=C"), - AggMed("MedB=B", "MedC=C"), - AggMax("MaxB=B", "MaxC=C"), - AggAvg("AvgB=B", "AvgC=C"), - AggStd("StdB=B", "StdC=C"), - AggSum("SumB=B", "SumC=C"), - AggCountDistinct("DistinctA=A"), - AggCountDistinct("DistinctB=B")); + AggCount("Count"), + AggMin("MinB=B", "MinC=C"), + AggMed("MedB=B", "MedC=C"), + AggMax("MaxB=B", "MaxC=C"), + AggAvg("AvgB=B", "AvgC=C"), + AggStd("StdB=B", "StdC=C"), + AggSum("SumB=B", "SumC=C"), + AggCountDistinct("DistinctA=A"), + AggCountDistinct("DistinctB=B")); ComboAggregateFactory percentilesFactory = AggCombo( - AggPct(0.25, "Pct01B=B", "Pct01C=C"), - AggPct(0.25, "Pct25B=B", "Pct25C=C"), - AggPct(0.75, "Pct75B=B", "Pct75C=C"), - AggPct(0.75, true, "Pct75T_B=B", "Pct75T_C=C"), - AggPct(0.75, false, "Pct75F_B=B", "Pct75F_C=C"), - AggPct(0.99, "Pct99B=B", "Pct99C=C"), - AggPct(0.50, "Pct50B=B", "Pct50C=C"), - AggPct(0.50, true, "Pct50T_B=B", "Pct50T_C=C"), - AggPct(0.50, false, "Pct50F_B=B", "Pct50F_C=C")); + AggPct(0.25, "Pct01B=B", "Pct01C=C"), + AggPct(0.25, "Pct25B=B", "Pct25C=C"), + AggPct(0.75, "Pct75B=B", "Pct75C=C"), + AggPct(0.75, true, "Pct75T_B=B", "Pct75T_C=C"), + AggPct(0.75, false, "Pct75F_B=B", "Pct75F_C=C"), + AggPct(0.99, "Pct99B=B", "Pct99C=C"), + AggPct(0.50, "Pct50B=B", "Pct50C=C"), + AggPct(0.50, true, "Pct50T_B=B", "Pct50T_C=C"), + AggPct(0.50, false, "Pct50F_B=B", "Pct50F_C=C")); Double[] doubles = new Double[10]; int bLength = Array.getLength(bHolder.data); @@ -168,46 +162,42 @@ public void testComboByMinMaxTypes() { final int size = 10; final ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "intCol", "shortCol", "byteCol", "longCol", "charCol", - "doubleCol", "floatCol", "DateTime", "BoolCol", "bigI", "bigD"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.ShortGenerator(), - new TstUtils.ByteGenerator(), - new TstUtils.LongGenerator(), - new TstUtils.IntGenerator(10, 100), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), - new TstUtils.FloatGenerator(0, 10.0f), - new TstUtils.UnsortedDateTimeGenerator(convertDateTime("2020-03-17T12:00:00 NY"), - convertDateTime("2020-03-18T12:00:00 NY")), - new TstUtils.BooleanGenerator(), - new TstUtils.BigIntegerGenerator(), - new TstUtils.BigDecimalGenerator())); + columnInfo = initColumnInfos( + new String[] {"Sym", "intCol", "shortCol", "byteCol", "longCol", "charCol", "doubleCol", + "floatCol", "DateTime", "BoolCol", "bigI", "bigD"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.ShortGenerator(), + new TstUtils.ByteGenerator(), + new TstUtils.LongGenerator(), + new TstUtils.IntGenerator(10, 100), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), + new TstUtils.FloatGenerator(0, 10.0f), + new TstUtils.UnsortedDateTimeGenerator(convertDateTime("2020-03-17T12:00:00 NY"), + convertDateTime("2020-03-18T12:00:00 NY")), + new TstUtils.BooleanGenerator(), + new TstUtils.BigIntegerGenerator(), + new TstUtils.BigDecimalGenerator())); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { - EvalNugget.from(() -> queryTable - .by(AggCombo(AggMin(queryTable.getDefinition().getColumnNamesArray())))), - EvalNugget.from(() -> queryTable - .by(AggCombo(AggMax(queryTable.getDefinition().getColumnNamesArray())))), + EvalNugget + .from(() -> queryTable.by(AggCombo(AggMin(queryTable.getDefinition().getColumnNamesArray())))), + EvalNugget + .from(() -> queryTable.by(AggCombo(AggMax(queryTable.getDefinition().getColumnNamesArray())))), new QueryTableTest.TableComparator( - queryTable - .by(AggCombo(AggMin(queryTable.getDefinition().getColumnNamesArray()))), - "AggCombo", - queryTable.minBy(), - "MinBy"), + queryTable.by(AggCombo(AggMin(queryTable.getDefinition().getColumnNamesArray()))), + "AggCombo", + queryTable.minBy(), + "MinBy"), EvalNugget.Sorted.from( - () -> queryTable.by( - AggCombo(AggMin(queryTable.getDefinition().getColumnNamesArray())), "Sym"), - "Sym"), + () -> queryTable.by(AggCombo(AggMin(queryTable.getDefinition().getColumnNamesArray())), "Sym"), + "Sym"), new QueryTableTest.TableComparator( - queryTable - .by(AggCombo(AggMin(queryTable.getDefinition().getColumnNamesArray())), - "Sym") - .sort("Sym"), - "AggCombo", - queryTable.minBy("Sym").sort("Sym"), - "MinBy"), + queryTable.by(AggCombo(AggMin(queryTable.getDefinition().getColumnNamesArray())), "Sym") + .sort("Sym"), + "AggCombo", + queryTable.minBy("Sym").sort("Sym"), + "MinBy"), }; final int steps = 100; // 8; for (int step = 0; step < steps; step++) { @@ -228,13 +218,13 @@ private void testComboByIncremental(final String ctxt, final int size) { Random random = new Random(0); ColumnInfo columnInfo[]; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "intCol", "intColNulls", "doubleCol", "doubleColNulls"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.IntGenerator(10, 100, .1), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1, QueryConstants.NULL_DOUBLE))); + columnInfo = + initColumnInfos(new String[] {"Sym", "intCol", "intColNulls", "doubleCol", "doubleColNulls"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.IntGenerator(10, 100, .1), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1, QueryConstants.NULL_DOUBLE))); QueryLibrary.importClass(TestComboBy.class); @@ -242,185 +232,169 @@ private void testComboByIncremental(final String ctxt, final int size) { EvalNuggetInterface en[] = new EvalNuggetInterface[] { new EvalNugget() { public Table e() { - return queryTable - .by(AggCombo(Agg(AggType.Avg, "MeanI=intCol", "MeanD=doubleCol"), - Agg(AggType.Std, "StdI=intCol", "StdD=doubleCol")), "Sym") - .sort("Sym"); + return queryTable.by(AggCombo(Agg(AggType.Avg, "MeanI=intCol", "MeanD=doubleCol"), + Agg(AggType.Std, "StdI=intCol", "StdD=doubleCol")), "Sym").sort("Sym"); } }, new EvalNugget() { public Table e() { - return queryTable.by(AggCombo( - AggFormula("min(each)", "each", "MinI=intCol", "MinD=doubleCol"), - AggFormula("max(each)", "each", "MaxI=intCol")), "Sym").sort("Sym"); + return queryTable.by(AggCombo(AggFormula("min(each)", "each", "MinI=intCol", "MinD=doubleCol"), + AggFormula("max(each)", "each", "MaxI=intCol")), "Sym").sort("Sym"); } }, new QueryTableTest.TableComparator( - queryTable.by("Sym").view("Sym", "MinI=min(intCol)", "MinD=min(doubleCol)") - .sort("Sym"), - "UpdateView", - queryTable.by(new ComboAggregateFactory( - Agg(new MinMaxByStateFactoryImpl(true), "MinI=intCol", "MinD=doubleCol")), - "Sym").sort("Sym"), - "ComboBy"), + queryTable.by("Sym").view("Sym", "MinI=min(intCol)", "MinD=min(doubleCol)").sort("Sym"), + "UpdateView", + queryTable.by( + new ComboAggregateFactory( + Agg(new MinMaxByStateFactoryImpl(true), "MinI=intCol", "MinD=doubleCol")), + "Sym").sort("Sym"), + "ComboBy"), new QueryTableTest.TableComparator( - queryTable.by("Sym").view("Sym", "MaxI=max(intCol)", "MaxD=max(doubleCol)") - .sort("Sym"), - "UpdateView", - queryTable - .by(AggCombo(Agg(AggType.Max, "MaxI=intCol", "MaxD=doubleCol")), "Sym") - .sort("Sym"), - "ComboBy"), + queryTable.by("Sym").view("Sym", "MaxI=max(intCol)", "MaxD=max(doubleCol)").sort("Sym"), + "UpdateView", + queryTable.by(AggCombo(Agg(AggType.Max, "MaxI=intCol", "MaxD=doubleCol")), "Sym").sort("Sym"), + "ComboBy"), new QueryTableTest.TableComparator( - queryTable.by("Sym").view("Sym", "MinI=min(intCol)", "MaxI=max(intCol)") - .sort("Sym"), - "UpdateView", - queryTable - .by(new ComboAggregateFactory( - Agg(new MinMaxByStateFactoryImpl(true), "MinI=intCol"), - Agg(new MinMaxByStateFactoryImpl(false), "MaxI=intCol")), "Sym") - .sort("Sym"), - "ComboBy"), + queryTable.by("Sym").view("Sym", "MinI=min(intCol)", "MaxI=max(intCol)").sort("Sym"), + "UpdateView", + queryTable.by(new ComboAggregateFactory(Agg(new MinMaxByStateFactoryImpl(true), "MinI=intCol"), + Agg(new MinMaxByStateFactoryImpl(false), "MaxI=intCol")), "Sym").sort("Sym"), + "ComboBy"), new QueryTableTest.TableComparator( - queryTable.by("Sym").view("Sym", "MinD=min(doubleCol)", "MaxD=max(doubleCol)") - .sort("Sym"), - "UpdateView", - queryTable - .by(AggCombo(Agg(new MinMaxByStateFactoryImpl(true), "MinD=doubleCol"), - Agg(new MinMaxByStateFactoryImpl(false), "MaxD=doubleCol")), "Sym") - .sort("Sym"), - "ComboBy"), + queryTable.by("Sym").view("Sym", "MinD=min(doubleCol)", "MaxD=max(doubleCol)").sort("Sym"), + "UpdateView", + queryTable.by(AggCombo(Agg(new MinMaxByStateFactoryImpl(true), "MinD=doubleCol"), + Agg(new MinMaxByStateFactoryImpl(false), "MaxD=doubleCol")), "Sym").sort("Sym"), + "ComboBy"), new QueryTableTest.TableComparator( - queryTable.by("Sym") - .view("Sym", "MinD=min(doubleCol)", "MaxI=max(intCol)", - "FirstD=first(doubleCol)", "LastI=last(intCol)") - .sort("Sym"), - "UpdateView", - queryTable.by(AggCombo( - AggMin("MinD=doubleCol"), - AggMax("MaxI=intCol"), - AggFirst("FirstD=doubleCol"), - AggLast("LastI=intCol")), "Sym").sort("Sym"), - "ComboBy"), + queryTable.by("Sym") + .view("Sym", "MinD=min(doubleCol)", "MaxI=max(intCol)", "FirstD=first(doubleCol)", + "LastI=last(intCol)") + .sort("Sym"), + "UpdateView", + queryTable.by(AggCombo( + AggMin("MinD=doubleCol"), + AggMax("MaxI=intCol"), + AggFirst("FirstD=doubleCol"), + AggLast("LastI=intCol")), "Sym").sort("Sym"), + "ComboBy"), new QueryTableTest.TableComparator( - queryTable.by("Sym") - .view("Sym", "MinD=min(doubleCol)", "MaxD=max(doubleCol)", - "MinI=min(intCol)", "MaxI=max(intCol)", "LastD=last(doubleCol)", - "FirstD=first(doubleCol)", "FirstI=first(intCol)", "LastI=last(intCol)") - .sort("Sym"), - "UpdateView", - queryTable.by(AggCombo( - AggMin("MinD=doubleCol"), - AggMax("MaxD=doubleCol"), - AggMin("MinI=intCol"), - AggMax("MaxI=intCol"), - AggLast("LastD=doubleCol"), - AggFirst("FirstD=doubleCol"), - AggFirst("FirstI=intCol"), - AggLast("LastI=intCol")), "Sym").sort("Sym"), - "ComboBy"), + queryTable.by("Sym") + .view("Sym", "MinD=min(doubleCol)", "MaxD=max(doubleCol)", "MinI=min(intCol)", + "MaxI=max(intCol)", "LastD=last(doubleCol)", "FirstD=first(doubleCol)", + "FirstI=first(intCol)", "LastI=last(intCol)") + .sort("Sym"), + "UpdateView", + queryTable.by(AggCombo( + AggMin("MinD=doubleCol"), + AggMax("MaxD=doubleCol"), + AggMin("MinI=intCol"), + AggMax("MaxI=intCol"), + AggLast("LastD=doubleCol"), + AggFirst("FirstD=doubleCol"), + AggFirst("FirstI=intCol"), + AggLast("LastI=intCol")), "Sym").sort("Sym"), + "ComboBy"), new QueryTableTest.TableComparator( - queryTable.by().view("MinD=min(doubleCol)", "MaxI=max(intCol)", - "MaxD=max(doubleCol)", "MinI=min(intCol)", - "FirstD=first(doubleCol)", "LastI=last(intCol)", "LastD=last(doubleCol)", - "FirstI=first(intCol)"), - "UpdateView", - queryTable.by(AggCombo( - AggMin("MinD=doubleCol"), - AggMax("MaxI=intCol"), - AggMax("MaxD=doubleCol"), - AggMin("MinI=intCol"), - AggFirst("FirstD=doubleCol"), - AggLast("LastI=intCol"), - AggLast("LastD=doubleCol"), - AggFirst("FirstI=intCol"))), - "ComboBy"), + queryTable.by().view("MinD=min(doubleCol)", "MaxI=max(intCol)", "MaxD=max(doubleCol)", + "MinI=min(intCol)", + "FirstD=first(doubleCol)", "LastI=last(intCol)", "LastD=last(doubleCol)", + "FirstI=first(intCol)"), + "UpdateView", + queryTable.by(AggCombo( + AggMin("MinD=doubleCol"), + AggMax("MaxI=intCol"), + AggMax("MaxD=doubleCol"), + AggMin("MinI=intCol"), + AggFirst("FirstD=doubleCol"), + AggLast("LastI=intCol"), + AggLast("LastD=doubleCol"), + AggFirst("FirstI=intCol"))), + "ComboBy"), new QueryTableTest.TableComparator( - queryTable.by("Sym") - .view("Sym", "AvgD=avg(doubleCol)", "SumD=sum(doubleCol)", - "VarD=var(doubleCol)", "StdD=std(doubleCol)", "intCol") - .sort("Sym"), - "UpdateView", - queryTable.by(new ComboAggregateFactory( - AggAvg("AvgD=doubleCol"), - AggSum("SumD=doubleCol"), - AggVar("VarD=doubleCol"), - AggStd("StdD=doubleCol"), - AggArray("intCol")), "Sym").sort("Sym"), - "ComboBy"), + queryTable.by("Sym") + .view("Sym", "AvgD=avg(doubleCol)", "SumD=sum(doubleCol)", "VarD=var(doubleCol)", + "StdD=std(doubleCol)", "intCol") + .sort("Sym"), + "UpdateView", + queryTable.by(new ComboAggregateFactory( + AggAvg("AvgD=doubleCol"), + AggSum("SumD=doubleCol"), + AggVar("VarD=doubleCol"), + AggStd("StdD=doubleCol"), + AggArray("intCol")), "Sym").sort("Sym"), + "ComboBy"), new QueryTableTest.TableComparator( - queryTable.by("Sym").view("Sym", - "MedD=median(doubleCol)", - "Pct01D=percentile(doubleCol, 0.01)", - "Pct01I=(int)TestComboBy.percentile(intCol, 0.01)", - "Pct05D=percentile(doubleCol, 0.05)", - "Pct05I=(int)TestComboBy.percentile(intCol, 0.05)", - "Pct25D=percentile(doubleCol, 0.25)", - "Pct25I=(int)TestComboBy.percentile(intCol, 0.25)", - "Pct50D=percentile(doubleCol, 0.50)", - "Pct50I=(int)TestComboBy.percentile(intCol, 0.50)", - "Pct65D=percentile(doubleCol, 0.65)", - "Pct65I=(int)TestComboBy.percentile(intCol, 0.65)", - "Pct90D=percentile(doubleCol, 0.90)", - "Pct90I=(int)TestComboBy.percentile(intCol, 0.90)", - "Pct99D=percentile(doubleCol, 0.99)", - "Pct99I=(int)TestComboBy.percentile(intCol, 0.99)").sort("Sym"), - queryTable.by(AggCombo( - AggMed("MedD=doubleCol"), - AggPct(0.01, "Pct01D=doubleCol", "Pct01I=intCol"), - AggPct(0.05, "Pct05D=doubleCol", "Pct05I=intCol"), - AggPct(0.25, "Pct25D=doubleCol", "Pct25I=intCol"), - AggPct(0.50, "Pct50D=doubleCol", "Pct50I=intCol"), - AggPct(0.65, "Pct65D=doubleCol", "Pct65I=intCol"), - AggPct(0.90, "Pct90D=doubleCol", "Pct90I=intCol"), - AggPct(0.99, "Pct99D=doubleCol", "Pct99I=intCol")), "Sym").sort("Sym")), + queryTable.by("Sym").view("Sym", + "MedD=median(doubleCol)", + "Pct01D=percentile(doubleCol, 0.01)", + "Pct01I=(int)TestComboBy.percentile(intCol, 0.01)", + "Pct05D=percentile(doubleCol, 0.05)", + "Pct05I=(int)TestComboBy.percentile(intCol, 0.05)", + "Pct25D=percentile(doubleCol, 0.25)", + "Pct25I=(int)TestComboBy.percentile(intCol, 0.25)", + "Pct50D=percentile(doubleCol, 0.50)", + "Pct50I=(int)TestComboBy.percentile(intCol, 0.50)", + "Pct65D=percentile(doubleCol, 0.65)", + "Pct65I=(int)TestComboBy.percentile(intCol, 0.65)", + "Pct90D=percentile(doubleCol, 0.90)", + "Pct90I=(int)TestComboBy.percentile(intCol, 0.90)", + "Pct99D=percentile(doubleCol, 0.99)", + "Pct99I=(int)TestComboBy.percentile(intCol, 0.99)").sort("Sym"), + queryTable.by(AggCombo( + AggMed("MedD=doubleCol"), + AggPct(0.01, "Pct01D=doubleCol", "Pct01I=intCol"), + AggPct(0.05, "Pct05D=doubleCol", "Pct05I=intCol"), + AggPct(0.25, "Pct25D=doubleCol", "Pct25I=intCol"), + AggPct(0.50, "Pct50D=doubleCol", "Pct50I=intCol"), + AggPct(0.65, "Pct65D=doubleCol", "Pct65I=intCol"), + AggPct(0.90, "Pct90D=doubleCol", "Pct90I=intCol"), + AggPct(0.99, "Pct99D=doubleCol", "Pct99I=intCol")), "Sym").sort("Sym")), new QueryTableTest.TableComparator( - queryTable.view("Sym", "intCol", "doubleCol").wavgBy("doubleCol", "Sym") - .renameColumns("WAvg=intCol"), - "WAvgBy", - queryTable.by(AggCombo( - AggWAvg("doubleCol", "WAvg=intCol")), "Sym"), - "AggWAvg"), + queryTable.view("Sym", "intCol", "doubleCol").wavgBy("doubleCol", "Sym") + .renameColumns("WAvg=intCol"), + "WAvgBy", + queryTable.by(AggCombo( + AggWAvg("doubleCol", "WAvg=intCol")), "Sym"), + "AggWAvg"), new QueryTableTest.TableComparator( - queryTable.view("Sym", "intCol", "doubleCol").countBy("Count"), "Count", - queryTable.by(AggCombo(reusedCount), CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - "AggCount"), + queryTable.view("Sym", "intCol", "doubleCol").countBy("Count"), "Count", + queryTable.by(AggCombo(reusedCount), CollectionUtil.ZERO_LENGTH_STRING_ARRAY), "AggCount"), new QueryTableTest.TableComparator( - queryTable.view("Sym", "intCol", "doubleCol").countBy("Count"), "Count", - queryTable.by(AggCombo(reusedCount), CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - "AggCount"), + queryTable.view("Sym", "intCol", "doubleCol").countBy("Count"), "Count", + queryTable.by(AggCombo(reusedCount), CollectionUtil.ZERO_LENGTH_STRING_ARRAY), "AggCount"), new QueryTableTestBase.TableComparator( - queryTable.by("Sym").view("Sym", - "cdi=countDistinct(intCol)", - "ddi=countDistinct(doubleCol)", - "cdiN=countDistinct(intColNulls, true)", - "ddiN=countDistinct(doubleColNulls, true)", - "dic=distinct(intCol, false, true)", - "did=distinct(doubleCol, false, true)", - "dicN=distinct(intColNulls, true, true)", - "didN=distinct(doubleColNulls, true, true)", - "uic=uniqueValue(intCol, false)", - "uid=uniqueValue(doubleCol, false)", - "uicN=uniqueValue(intColNulls, true)", - "uidN=uniqueValue(doubleColNulls, true)") - .sort("Sym"), - "countDistinctView", - queryTable.by(AggCombo(AggCountDistinct("cdi=intCol", "ddi=doubleCol"), - AggCountDistinct(true, "cdiN=intColNulls", "ddiN=doubleColNulls"), - AggDistinct("dic=intCol", "did=doubleCol"), - AggDistinct(true, "dicN=intColNulls", "didN=doubleColNulls"), - AggUnique("uic=intCol", "uid=doubleCol"), - AggUnique(true, "uicN=intColNulls", "uidN=doubleColNulls")), "Sym") - .sort("Sym"), - "AggCountDistinct") + queryTable.by("Sym").view("Sym", + "cdi=countDistinct(intCol)", + "ddi=countDistinct(doubleCol)", + "cdiN=countDistinct(intColNulls, true)", + "ddiN=countDistinct(doubleColNulls, true)", + "dic=distinct(intCol, false, true)", + "did=distinct(doubleCol, false, true)", + "dicN=distinct(intColNulls, true, true)", + "didN=distinct(doubleColNulls, true, true)", + "uic=uniqueValue(intCol, false)", + "uid=uniqueValue(doubleCol, false)", + "uicN=uniqueValue(intColNulls, true)", + "uidN=uniqueValue(doubleColNulls, true)") + .sort("Sym"), + "countDistinctView", + queryTable.by(AggCombo(AggCountDistinct("cdi=intCol", "ddi=doubleCol"), + AggCountDistinct(true, "cdiN=intColNulls", "ddiN=doubleColNulls"), + AggDistinct("dic=intCol", "did=doubleCol"), + AggDistinct(true, "dicN=intColNulls", "didN=doubleColNulls"), + AggUnique("uic=intCol", "uid=doubleCol"), + AggUnique(true, "uicN=intColNulls", "uidN=doubleColNulls")), "Sym") + .sort("Sym"), + "AggCountDistinct") }; final int steps = 100; // 8; for (int step = 0; step < steps; step++) { if (LiveTableTestCase.printTableUpdates) { System.out.println("Step = " + step); } - simulateShiftAwareStep(ctxt + " step == " + step, size, random, queryTable, columnInfo, - en); + simulateShiftAwareStep(ctxt + " step == " + step, size, random, queryTable, columnInfo, en); } } @@ -429,35 +403,32 @@ public void testComboByDoubleClaim() throws IOException { final Random random = new Random(0); final ColumnInfo columnInfo[]; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); final ComboBy reusedCount = AggCount("Count"); final EvalNuggetInterface en[] = new EvalNuggetInterface[] { new QueryTableTest.TableComparator( - queryTable.view("Sym", "intCol", "doubleCol").countBy("Count"), "Count", - queryTable.by(AggCombo(reusedCount), CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - "AggCount"), + queryTable.view("Sym", "intCol", "doubleCol").countBy("Count"), "Count", + queryTable.by(AggCombo(reusedCount), CollectionUtil.ZERO_LENGTH_STRING_ARRAY), "AggCount"), new QueryTableTest.TableComparator( - queryTable.view("Sym", "intCol", "doubleCol").countBy("Count"), "Count", - queryTable.by(AggCombo(reusedCount), CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - "AggCount") + queryTable.view("Sym", "intCol", "doubleCol").countBy("Count"), "Count", + queryTable.by(AggCombo(reusedCount), CollectionUtil.ZERO_LENGTH_STRING_ARRAY), "AggCount") }; final int steps = 100; // 8; for (int i = 0; i < steps; i++) { System.out.println("Abstract Table:"); show(queryTable); - simulateShiftAwareStep("double Claim" + " step == " + i, size, random, queryTable, - columnInfo, en); + simulateShiftAwareStep("double Claim" + " step == " + i, size, random, queryTable, columnInfo, en); } } public void testComboByDistinct() { QueryTable dataTable = TstUtils.testRefreshingTable( - intCol("Grp", 1, 2, 3, 4), - charCol("Let", 'a', 'b', 'c', 'd')); + intCol("Grp", 1, 2, 3, 4), + charCol("Let", 'a', 'b', 'c', 'd')); final Table tail = dataTable.tail(10); final Table result = tail.by(AggCombo(AggDistinct("Let")), "Grp"); @@ -473,8 +444,8 @@ public void testComboByDistinct() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index toAdd = i(4, 5, 6, 7); addToTable(dataTable, toAdd, - intCol("Grp", 1, 2, 3, 4), - charCol("Let", 'e', 'f', 'g', 'h')); + intCol("Grp", 1, 2, 3, 4), + charCol("Let", 'e', 'f', 'g', 'h')); dataTable.notifyListeners(toAdd, i(), i()); }); assertEquals(4, result.size()); @@ -486,8 +457,8 @@ public void testComboByDistinct() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index toAdd = i(8, 9, 10, 11); addToTable(dataTable, toAdd, - intCol("Grp", 1, 2, 3, 4), - charCol("Let", 'i', 'j', 'k', 'l')); + intCol("Grp", 1, 2, 3, 4), + charCol("Let", 'i', 'j', 'k', 'l')); dataTable.notifyListeners(toAdd, i(), i()); }); assertArrayEquals(new char[] {'e', 'i'}, cs.get(0).toArray()); @@ -498,8 +469,8 @@ public void testComboByDistinct() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index toAdd = i(12, 13, 14, 15); addToTable(dataTable, toAdd, - intCol("Grp", 1, 2, 3, 4), - charCol("Let", 'm', 'n', 'o', 'p')); + intCol("Grp", 1, 2, 3, 4), + charCol("Let", 'm', 'n', 'o', 'p')); dataTable.notifyListeners(toAdd, i(), i()); }); assertArrayEquals(new char[] {'i', 'm'}, cs.get(0).toArray()); @@ -546,14 +517,12 @@ public void testComboByDistinct() { public void testComboByCountDistinct() { QueryTable dataTable = TstUtils.testRefreshingTable( - c("USym", "AAPL", "AAPL", "AAPL", "GOOG", "GOOG", "SPY", "SPY", "SPY", "SPY", "VXX"), - longCol("Account", 1, 1, 2, 1, 3, 2, 4, 2, 5, 5), - intCol("Qty", 100, 100, 200, 300, 50, 100, 150, 200, 50, 50)); - - Table result = - dataTable.by(AggCombo(AggCountDistinct("Account", "Qty")), "USym").sort("USym"); - Table countNulls = - dataTable.by(AggCombo(AggCountDistinct(true, "Account", "Qty")), "USym").sort("USym"); + c("USym", "AAPL", "AAPL", "AAPL", "GOOG", "GOOG", "SPY", "SPY", "SPY", "SPY", "VXX"), + longCol("Account", 1, 1, 2, 1, 3, 2, 4, 2, 5, 5), + intCol("Qty", 100, 100, 200, 300, 50, 100, 150, 200, 50, 50)); + + Table result = dataTable.by(AggCombo(AggCountDistinct("Account", "Qty")), "USym").sort("USym"); + Table countNulls = dataTable.by(AggCombo(AggCountDistinct(true, "Account", "Qty")), "USym").sort("USym"); assertEquals(4, result.size()); assertArrayEquals(new Object[] {"AAPL", 2L, 2L}, result.getRecord(0)); assertArrayEquals(new Object[] {"GOOG", 2L, 2L}, result.getRecord(1)); @@ -563,9 +532,9 @@ public void testComboByCountDistinct() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(1, 10), - c("USym", "AAPL", "VXX"), - longCol("Account", QueryConstants.NULL_LONG, 1), - intCol("Qty", 100, QueryConstants.NULL_INT)); + c("USym", "AAPL", "VXX"), + longCol("Account", QueryConstants.NULL_LONG, 1), + intCol("Qty", 100, QueryConstants.NULL_INT)); dataTable.notifyListeners(i(10), i(), i(1)); }); @@ -577,9 +546,9 @@ public void testComboByCountDistinct() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(2), - c("USym", "AAPL"), - longCol("Account", QueryConstants.NULL_LONG), - intCol("Qty", 200)); + c("USym", "AAPL"), + longCol("Account", QueryConstants.NULL_LONG), + intCol("Qty", 200)); dataTable.notifyListeners(i(), i(), i(2)); }); @@ -590,9 +559,9 @@ public void testComboByCountDistinct() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(1, 2, 11), - c("USym", "AAPL", "AAPL", "SPY"), - longCol("Account", 1, 2, QueryConstants.NULL_LONG), - intCol("Qty", 100, 200, 200)); + c("USym", "AAPL", "AAPL", "SPY"), + longCol("Account", 1, 2, QueryConstants.NULL_LONG), + intCol("Qty", 100, 200, 200)); removeRows(dataTable, i(6)); dataTable.notifyListeners(i(11), i(6), i(1, 2)); @@ -611,17 +580,16 @@ public void testComboByAggUnique() { final DBDateTime dt3 = convertDateTime("2021-01-01T00:00:03.000 NY"); QueryTable dataTable = TstUtils.testRefreshingTable( - c("USym", "AAPL", "AAPL", "AAPL", /**/ "GOOG", "GOOG", /**/ "SPY", "SPY", "SPY", "SPY", - /**/ "VXX"), - longCol("Account", 1, 1, 2, /**/ 1, 3, /**/ 2, 4, 2, 5, /**/ 5), - intCol("Qty", 100, 100, 100, /**/ 300, 50, /**/ 100, 150, 200, 50, /**/ 50), - c("Whee", dt1, dt1, dt1, /**/ dt1, dt2, /**/ dt2, dt2, dt2, dt2, /**/ null)); + c("USym", "AAPL", "AAPL", "AAPL", /**/ "GOOG", "GOOG", /**/ "SPY", "SPY", "SPY", "SPY", /**/ "VXX"), + longCol("Account", 1, 1, 2, /**/ 1, 3, /**/ 2, 4, 2, 5, /**/ 5), + intCol("Qty", 100, 100, 100, /**/ 300, 50, /**/ 100, 150, 200, 50, /**/ 50), + c("Whee", dt1, dt1, dt1, /**/ dt1, dt2, /**/ dt2, dt2, dt2, dt2, /**/ null)); Table result = dataTable.by(AggCombo(AggUnique(false, null, -1, "Account", "Qty"), - AggUnique(false, null, dtdefault, "Whee")), "USym").sort("USym"); + AggUnique(false, null, dtdefault, "Whee")), "USym").sort("USym"); Table countNulls = dataTable.by(AggCombo(AggUnique(true, null, -1, "Account", "Qty"), - AggUnique(true, null, dtdefault, "Whee")), "USym").sort("USym"); + AggUnique(true, null, dtdefault, "Whee")), "USym").sort("USym"); assertEquals(4, result.size()); assertArrayEquals(new Object[] {"AAPL", -1L, 100, dt1}, result.getRecord(0)); @@ -632,10 +600,10 @@ public void testComboByAggUnique() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(2, 10), - c("USym", "AAPL", "VXX"), - longCol("Account", 1, 5), - intCol("Qty", 100, QueryConstants.NULL_INT), - c("Whee", (DBDateTime) null, (DBDateTime) null)); + c("USym", "AAPL", "VXX"), + longCol("Account", 1, 5), + intCol("Qty", 100, QueryConstants.NULL_INT), + c("Whee", (DBDateTime) null, (DBDateTime) null)); dataTable.notifyListeners(i(10), i(), i(2)); }); @@ -649,10 +617,10 @@ public void testComboByAggUnique() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(11), - c("USym", "USO"), - longCol("Account", 2), - intCol("Qty", 200), - c("Whee", dt1)); + c("USym", "USO"), + longCol("Account", 2), + intCol("Qty", 200), + c("Whee", dt1)); removeRows(dataTable, i(9, 10)); dataTable.notifyListeners(i(11), i(9, 10), i()); }); @@ -667,10 +635,10 @@ public void testComboByAggUnique() { // LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(11), - c("USym", "USO"), - longCol("Account", QueryConstants.NULL_LONG), - intCol("Qty", QueryConstants.NULL_INT), - c("Whee", dt2)); + c("USym", "USO"), + longCol("Account", QueryConstants.NULL_LONG), + intCol("Qty", QueryConstants.NULL_INT), + c("Whee", dt2)); dataTable.notifyListeners(i(), i(), i(11)); }); assertArrayEquals(new Object[] {"USO", null, null, dt2}, result.getRecord(3)); @@ -678,10 +646,10 @@ public void testComboByAggUnique() { // LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(dataTable, i(3, 4, 9, 10), - c("USym", "GOOG", "GOOG", "VXX", "VXX"), - longCol("Account", 2L, 2L, QueryConstants.NULL_LONG, 99), - intCol("Qty", 350, 350, 50, 50), - c("Whee", dt2, dt2, (DBDateTime) null, dt1)); + c("USym", "GOOG", "GOOG", "VXX", "VXX"), + longCol("Account", 2L, 2L, QueryConstants.NULL_LONG, 99), + intCol("Qty", 350, 350, 50, 50), + c("Whee", dt2, dt2, (DBDateTime) null, dt1)); dataTable.notifyListeners(i(9, 10), i(), i(3, 4)); }); @@ -695,84 +663,74 @@ public void testAggUniqueDefaultValues() { final DBDateTime dt2 = convertDateTime("2021-02-02T00:02:03.000 NY"); QueryTable dataTable = TstUtils.testRefreshingTable( - c("USym", "NoKey", "SingleVal", "NonUnique", "NonUnique"), - c("StringCol", null, "Apple", "Bacon", "Pancake"), - c("BoolCol", null, true, true, false), - c("DateTime", null, dt1, dt1, dt2), - charCol("CharCol", NULL_CHAR, 'a', 'b', 'c'), - byteCol("ByteCol", NULL_BYTE, (byte) 100, (byte) 110, (byte) 120), - shortCol("ShortCol", NULL_SHORT, (short) 1234, (short) 4321, (short) 1324), - intCol("IntCol", NULL_INT, 99999, 100000, 200000), - longCol("LongCol", NULL_LONG, 44444444L, 55555555L, 66666666L), - floatCol("FloatCol", NULL_FLOAT, 1.2345f, 2.3456f, 3.4567f), - doubleCol("DoubleCol", NULL_DOUBLE, 1.1E22d, 2.2E22d, 3.3E22d)); + c("USym", "NoKey", "SingleVal", "NonUnique", "NonUnique"), + c("StringCol", null, "Apple", "Bacon", "Pancake"), + c("BoolCol", null, true, true, false), + c("DateTime", null, dt1, dt1, dt2), + charCol("CharCol", NULL_CHAR, 'a', 'b', 'c'), + byteCol("ByteCol", NULL_BYTE, (byte) 100, (byte) 110, (byte) 120), + shortCol("ShortCol", NULL_SHORT, (short) 1234, (short) 4321, (short) 1324), + intCol("IntCol", NULL_INT, 99999, 100000, 200000), + longCol("LongCol", NULL_LONG, 44444444L, 55555555L, 66666666L), + floatCol("FloatCol", NULL_FLOAT, 1.2345f, 2.3456f, 3.4567f), + doubleCol("DoubleCol", NULL_DOUBLE, 1.1E22d, 2.2E22d, 3.3E22d)); // First try mixing column types and values Table result; expectException(IllegalArgumentException.class, - "Attempted to use no key/non unique values of incorrect types for aggregated columns!", - () -> dataTable - .by(AggCombo(AggUnique(false, -1, -2, "StringCol", "BoolCol", "DatTime", "CharCol", - "ByteCol", "ShortCol", "IntCol", "LongCol", "FloatCol", "DoubleCol")), "USym") - .sort("USym")); + "Attempted to use no key/non unique values of incorrect types for aggregated columns!", + () -> dataTable.by(AggCombo(AggUnique(false, -1, -2, "StringCol", "BoolCol", "DatTime", "CharCol", + "ByteCol", "ShortCol", "IntCol", "LongCol", "FloatCol", "DoubleCol")), "USym").sort("USym")); - result = dataTable.by(AggCombo(AggUnique(false, -1, -2, "ByteCol", "ShortCol", "IntCol", - "LongCol", "FloatCol", "DoubleCol")), "USym").sort("USym"); + result = dataTable.by( + AggCombo(AggUnique(false, -1, -2, "ByteCol", "ShortCol", "IntCol", "LongCol", "FloatCol", "DoubleCol")), + "USym").sort("USym"); // Byte out of range - testUniqueOutOfRangeParams(Byte.class, dataTable, ((short) Byte.MIN_VALUE - 1), - Byte.MIN_VALUE, ((short) Byte.MAX_VALUE + 1), Byte.MAX_VALUE, "ByteCol", "ShortCol", - "IntCol", "LongCol", "FloatCol", "DoubleCol"); - testUniqueOutOfRangeParams(Short.class, dataTable, ((int) Short.MIN_VALUE - 1), - Short.MIN_VALUE, ((int) Short.MAX_VALUE + 1), Short.MAX_VALUE, "ShortCol", "IntCol", - "LongCol", "FloatCol", "DoubleCol"); - testUniqueOutOfRangeParams(Integer.class, dataTable, ((long) Integer.MIN_VALUE - 1), - Integer.MIN_VALUE, ((long) Integer.MAX_VALUE + 1), Integer.MAX_VALUE, "IntCol", - "LongCol", "FloatCol", "DoubleCol"); - testUniqueOutOfRangeParams(Long.class, dataTable, - BigInteger.valueOf(Long.MIN_VALUE).subtract(BigInteger.ONE), Long.MIN_VALUE, - BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE), Long.MAX_VALUE, "LongCol", - "FloatCol", "DoubleCol"); + testUniqueOutOfRangeParams(Byte.class, dataTable, ((short) Byte.MIN_VALUE - 1), Byte.MIN_VALUE, + ((short) Byte.MAX_VALUE + 1), Byte.MAX_VALUE, "ByteCol", "ShortCol", "IntCol", "LongCol", "FloatCol", + "DoubleCol"); + testUniqueOutOfRangeParams(Short.class, dataTable, ((int) Short.MIN_VALUE - 1), Short.MIN_VALUE, + ((int) Short.MAX_VALUE + 1), Short.MAX_VALUE, "ShortCol", "IntCol", "LongCol", "FloatCol", "DoubleCol"); + testUniqueOutOfRangeParams(Integer.class, dataTable, ((long) Integer.MIN_VALUE - 1), Integer.MIN_VALUE, + ((long) Integer.MAX_VALUE + 1), Integer.MAX_VALUE, "IntCol", "LongCol", "FloatCol", "DoubleCol"); + testUniqueOutOfRangeParams(Long.class, dataTable, BigInteger.valueOf(Long.MIN_VALUE).subtract(BigInteger.ONE), + Long.MIN_VALUE, BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE), Long.MAX_VALUE, "LongCol", + "FloatCol", "DoubleCol"); } - private void testUniqueOutOfRangeParams(Class type, Table dataTable, Number invalidLow, - Number validLow, Number invalidHigh, Number validHigh, String... aggCols) { + private void testUniqueOutOfRangeParams(Class type, Table dataTable, Number invalidLow, Number validLow, + Number invalidHigh, Number validHigh, String... aggCols) { // Byte out of range expectException(IllegalArgumentException.class, - "Attempted to use no key values too small for " + type.getName() + "!", - () -> dataTable.by(AggCombo(AggUnique(false, invalidLow, -1, aggCols)), "USym") - .sort("USym")); + "Attempted to use no key values too small for " + type.getName() + "!", + () -> dataTable.by(AggCombo(AggUnique(false, invalidLow, -1, aggCols)), "USym").sort("USym")); expectException(IllegalArgumentException.class, - "Attempted to use no key values too large for " + type.getName() + "!", - () -> dataTable.by(AggCombo(AggUnique(false, invalidHigh, -1, aggCols)), "USym") - .sort("USym")); + "Attempted to use no key values too large for " + type.getName() + "!", + () -> dataTable.by(AggCombo(AggUnique(false, invalidHigh, -1, aggCols)), "USym").sort("USym")); expectException(IllegalArgumentException.class, - "Attempted to non unique values too small for " + type.getName() + "!", - () -> dataTable.by(AggCombo(AggUnique(false, -1, invalidLow, aggCols)), "USym") - .sort("USym")); + "Attempted to non unique values too small for " + type.getName() + "!", + () -> dataTable.by(AggCombo(AggUnique(false, -1, invalidLow, aggCols)), "USym").sort("USym")); expectException(IllegalArgumentException.class, - "Attempted to use non unique values too large for " + type.getName() + "!", - () -> dataTable.by(AggCombo(AggUnique(false, -1, invalidHigh, aggCols)), "USym") - .sort("USym")); + "Attempted to use non unique values too large for " + type.getName() + "!", + () -> dataTable.by(AggCombo(AggUnique(false, -1, invalidHigh, aggCols)), "USym").sort("USym")); dataTable.by(AggCombo(AggUnique(false, validLow, validLow, aggCols)), "USym").sort("USym"); - dataTable.by(AggCombo(AggUnique(false, validHigh, validHigh, aggCols)), "USym") - .sort("USym"); + dataTable.by(AggCombo(AggUnique(false, validHigh, validHigh, aggCols)), "USym").sort("USym"); } - private static void expectException(Class excType, String failMessage, - Runnable action) { + private static void expectException(Class excType, String failMessage, Runnable action) { try { action.run(); fail(failMessage); } catch (Throwable error) { if (error.getClass() != excType) { - fail("Unexpected exception type `" + error.getClass().getName() + "' expected '" - + excType.getName() + "'"); + fail("Unexpected exception type `" + error.getClass().getName() + "' expected '" + excType.getName() + + "'"); } } } diff --git a/DB/src/test/java/io/deephaven/db/v2/TestConcurrentInstantiation.java b/DB/src/test/java/io/deephaven/db/v2/TestConcurrentInstantiation.java index 39a6c8215de..ce8ab507749 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestConcurrentInstantiation.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestConcurrentInstantiation.java @@ -56,23 +56,22 @@ public class TestConcurrentInstantiation extends QueryTableTestBase { public void testTreeTableFilter() throws ExecutionException, InterruptedException { final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(10), - col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), - col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2)); - final Table treed = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> source.treeTable("Sentinel", "Parent")); + col("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), + col("Parent", NULL_INT, NULL_INT, 1, 1, 2, 3, 5, 5, 3, 2)); + final Table treed = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> source.treeTable("Sentinel", "Parent")); final Callable
    callable = - () -> TreeTableFilter.rawFilterTree(treed, "Sentinel in 4, 6, 9, 11, 12, 13, 14, 15"); + () -> TreeTableFilter.rawFilterTree(treed, "Sentinel in 4, 6, 9, 11, 12, 13, 14, 15"); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); final Table rawSorted = pool.submit(callable).get(); TableTools.show(rawSorted); - assertTrue(Arrays.equals(new int[] {1, 3, 4, 6, 9}, - (int[]) rawSorted.getColumn("Sentinel").getDirect())); + assertTrue(Arrays.equals(new int[] {1, 3, 4, 6, 9}, (int[]) rawSorted.getColumn("Sentinel").getDirect())); TstUtils.addToTable(source, i(10), c("Sentinel", 11), - c("Parent", 2)); + c("Parent", 2)); final Table table2 = pool.submit(callable).get(); assertEquals(TableTools.diff(rawSorted, table2, 20), ""); @@ -99,7 +98,7 @@ public void testTreeTableFilter() throws ExecutionException, InterruptedExceptio LiveTableMonitor.DEFAULT.completeCycleForUnitTests(); assertTrue(Arrays.equals(new int[] {1, 2, 3, 4, 6, 9, 10, 11, 12}, - (int[]) rawSorted.getColumn("Sentinel").getDirect())); + (int[]) rawSorted.getColumn("Sentinel").getDirect())); assertEquals(TableTools.diff(rawSorted, table2, 20), ""); assertEquals(TableTools.diff(table2, table3, 20), ""); assertEquals(TableTools.diff(table3, table4, 20), ""); @@ -108,10 +107,8 @@ public void testTreeTableFilter() throws ExecutionException, InterruptedExceptio public void testFlatten() throws ExecutionException, InterruptedException { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); - final Table tableStart = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); + final Table tableStart = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -142,12 +139,11 @@ public void testFlatten() throws ExecutionException, InterruptedException { } public void testUpdateView() throws ExecutionException, InterruptedException { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); - final Table tableStart = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), - c("y", "a", "b", "c"), c("z", 4, 8, 12)); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); + final Table tableStart = + TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c"), c("z", 4, 8, 12)); final Table tableUpdate = TstUtils.testRefreshingTable(i(2, 3, 4, 6), c("x", 1, 4, 2, 3), - c("y", "a", "d", "b", "c"), c("z", 4, 16, 8, 12)); + c("y", "a", "d", "b", "c"), c("z", 4, 16, 8, 12)); final Callable
    callable = () -> table.updateView("z=x*4"); @@ -179,12 +175,10 @@ public void testUpdateView() throws ExecutionException, InterruptedException { } public void testView() throws ExecutionException, InterruptedException { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); - final Table tableStart = - TstUtils.testRefreshingTable(i(2, 4, 6), c("y", "a", "b", "c"), c("z", 4, 8, 12)); - final Table tableUpdate = TstUtils.testRefreshingTable(i(2, 3, 4, 6), - c("y", "a", "d", "b", "c"), c("z", 4, 16, 8, 12)); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); + final Table tableStart = TstUtils.testRefreshingTable(i(2, 4, 6), c("y", "a", "b", "c"), c("z", 4, 8, 12)); + final Table tableUpdate = + TstUtils.testRefreshingTable(i(2, 3, 4, 6), c("y", "a", "d", "b", "c"), c("z", 4, 16, 8, 12)); final Callable
    callable = () -> table.view("y", "z=x*4"); @@ -216,12 +210,11 @@ public void testView() throws ExecutionException, InterruptedException { } public void testDropColumns() throws ExecutionException, InterruptedException { - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), - c("y", "a", "b", "c"), c("z", 4, 8, 12)); - final Table tableStart = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); - final Table tableUpdate = TstUtils.testRefreshingTable(i(2, 3, 4, 6), c("x", 1, 4, 2, 3), - c("y", "a", "d", "b", "c")); + final QueryTable table = + TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c"), c("z", 4, 8, 12)); + final Table tableStart = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); + final Table tableUpdate = + TstUtils.testRefreshingTable(i(2, 3, 4, 6), c("x", 1, 4, 2, 3), c("y", "a", "d", "b", "c")); final Callable
    callable = () -> table.dropColumns("z"); @@ -253,12 +246,12 @@ public void testDropColumns() throws ExecutionException, InterruptedException { } public void testWhere() throws ExecutionException, InterruptedException { - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), - c("y", "a", "b", "c"), c("z", true, false, true)); - final Table tableStart = TstUtils.testRefreshingTable(i(2, 6), c("x", 1, 3), - c("y", "a", "c"), c("z", true, true)); - final Table tableUpdate = TstUtils.testRefreshingTable(i(2, 3, 6), c("x", 1, 4, 3), - c("y", "a", "d", "c"), c("z", true, true, true)); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c"), + c("z", true, false, true)); + final Table tableStart = + TstUtils.testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", "a", "c"), c("z", true, true)); + final Table tableUpdate = TstUtils.testRefreshingTable(i(2, 3, 6), c("x", 1, 4, 3), c("y", "a", "d", "c"), + c("z", true, true, true)); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -288,12 +281,12 @@ public void testWhere() throws ExecutionException, InterruptedException { } public void testWhere2() throws ExecutionException, InterruptedException { - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), - c("y", "a", "b", "c"), c("z", true, false, true)); - final Table tableStart = TstUtils.testRefreshingTable(i(2, 6), c("x", 1, 3), - c("y", "a", "c"), c("z", true, true)); - final Table testUpdate = TstUtils.testRefreshingTable(i(3, 6), c("x", 4, 3), - c("y", "d", "c"), c("z", true, true)); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c"), + c("z", true, false, true)); + final Table tableStart = + TstUtils.testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", "a", "c"), c("z", true, true)); + final Table testUpdate = + TstUtils.testRefreshingTable(i(3, 6), c("x", 4, 3), c("y", "d", "c"), c("z", true, true)); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -329,16 +322,16 @@ public void testWhere2() throws ExecutionException, InterruptedException { public void testWhereDynamic() throws ExecutionException, InterruptedException { - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), - c("y", "a", "b", "c"), c("z", true, false, true)); - final Table tableStart = TstUtils.testRefreshingTable(i(2, 6), c("x", 1, 3), - c("y", "a", "c"), c("z", true, true)); - final Table testUpdate = TstUtils.testRefreshingTable(i(3, 6), c("x", 4, 3), - c("y", "d", "c"), c("z", true, true)); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c"), + c("z", true, false, true)); + final Table tableStart = + TstUtils.testRefreshingTable(i(2, 6), c("x", 1, 3), c("y", "a", "c"), c("z", true, true)); + final Table testUpdate = + TstUtils.testRefreshingTable(i(3, 6), c("x", 4, 3), c("y", "d", "c"), c("z", true, true)); final Table whereTable = TstUtils.testRefreshingTable(i(0), c("z", true)); - final DynamicWhereFilter filter = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> new DynamicWhereFilter(whereTable, true, MatchPairFactory.getExpressions("z"))); + final DynamicWhereFilter filter = LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> new DynamicWhereFilter(whereTable, true, MatchPairFactory.getExpressions("z"))); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -363,12 +356,10 @@ public void testWhereDynamic() throws ExecutionException, InterruptedException { } public void testSort() throws ExecutionException, InterruptedException { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); - final Table tableStart = - TstUtils.testRefreshingTable(i(1, 2, 3), c("x", 3, 2, 1), c("y", "c", "b", "a")); - final Table tableUpdate = TstUtils.testRefreshingTable(i(1, 2, 3, 4), c("x", 4, 3, 2, 1), - c("y", "d", "c", "b", "a")); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); + final Table tableStart = TstUtils.testRefreshingTable(i(1, 2, 3), c("x", 3, 2, 1), c("y", "c", "b", "a")); + final Table tableUpdate = + TstUtils.testRefreshingTable(i(1, 2, 3, 4), c("x", 4, 3, 2, 1), c("y", "d", "c", "b", "a")); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -398,16 +389,14 @@ public void testSort() throws ExecutionException, InterruptedException { } public void testReverse() throws ExecutionException, InterruptedException { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); - final Table tableStart = - TstUtils.testRefreshingTable(i(1, 2, 3), c("x", 3, 2, 1), c("y", "c", "b", "a")); - final Table tableUpdate = TstUtils.testRefreshingTable(i(1, 2, 3, 4), c("x", 4, 3, 2, 1), - c("y", "d", "c", "b", "a")); - final Table tableUpdate2 = TstUtils.testRefreshingTable(i(1, 2, 3, 4, 5), - c("x", 5, 4, 3, 2, 1), c("y", "e", "d", "c", "b", "a")); - final Table tableUpdate3 = TstUtils.testRefreshingTable(i(1, 2, 3, 4, 5, 6), - c("x", 6, 5, 4, 3, 2, 1), c("y", "f", "e", "d", "c", "b", "a")); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); + final Table tableStart = TstUtils.testRefreshingTable(i(1, 2, 3), c("x", 3, 2, 1), c("y", "c", "b", "a")); + final Table tableUpdate = + TstUtils.testRefreshingTable(i(1, 2, 3, 4), c("x", 4, 3, 2, 1), c("y", "d", "c", "b", "a")); + final Table tableUpdate2 = + TstUtils.testRefreshingTable(i(1, 2, 3, 4, 5), c("x", 5, 4, 3, 2, 1), c("y", "e", "d", "c", "b", "a")); + final Table tableUpdate3 = TstUtils.testRefreshingTable(i(1, 2, 3, 4, 5, 6), c("x", 6, 5, 4, 3, 2, 1), + c("y", "f", "e", "d", "c", "b", "a")); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -460,8 +449,7 @@ public void testReverse() throws ExecutionException, InterruptedException { public void testSortOfByExternal() throws ExecutionException, InterruptedException { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "a", "a")); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "a", "a")); final TableMap tm = table.byExternal("y"); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -470,8 +458,8 @@ public void testSortOfByExternal() throws ExecutionException, InterruptedExcepti table.notifyListeners(i(3), i(), i()); - // We need to flush two notifications: one for the source table and one for the "withView" - // table in the aggregation helper. + // We need to flush two notifications: one for the source table and one for the "withView" table in the + // aggregation helper. LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); @@ -494,16 +482,15 @@ public void testSortOfByExternal() throws ExecutionException, InterruptedExcepti public void testChain() throws ExecutionException, InterruptedException { - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), - c("y", "a", "b", "c"), c("z", true, false, true)); - final Table tableStart = TstUtils.testRefreshingTable(i(1, 3), c("x", 3, 1), - c("y", "c", "a"), c("z", true, true), c("u", 12, 4)); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c"), + c("z", true, false, true)); + final Table tableStart = TstUtils.testRefreshingTable(i(1, 3), c("x", 3, 1), c("y", "c", "a"), + c("z", true, true), c("u", 12, 4)); - final Table tableUpdate = TstUtils.testRefreshingTable(i(1, 2, 4), c("x", 4, 3, 1), - c("y", "d", "c", "a"), c("z", true, true, true), c("u", 16, 12, 4)); + final Table tableUpdate = TstUtils.testRefreshingTable(i(1, 2, 4), c("x", 4, 3, 1), c("y", "d", "c", "a"), + c("z", true, true, true), c("u", 16, 12, 4)); - final Callable
    callable = - () -> table.updateView("u=x*4").where("z").sortDescending("x"); + final Callable
    callable = () -> table.updateView("u=x*4").where("z").sortDescending("x"); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -535,14 +522,12 @@ public void testChain() throws ExecutionException, InterruptedException { } public void testReverseLookupListener() throws ExecutionException, InterruptedException { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c")); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); - final ReverseLookupListener rll = pool - .submit(() -> ReverseLookupListener.makeReverseLookupListenerWithSnapshot(table, "x")) - .get(); + final ReverseLookupListener rll = + pool.submit(() -> ReverseLookupListener.makeReverseLookupListenerWithSnapshot(table, "x")).get(); TestCase.assertEquals(rll.get(1), 2); TestCase.assertEquals(rll.get(2), 4); @@ -551,9 +536,8 @@ public void testReverseLookupListener() throws ExecutionException, InterruptedEx TstUtils.addToTable(table, i(4), c("x", 4), c("y", "d")); - final ReverseLookupListener rll2 = pool - .submit(() -> ReverseLookupListener.makeReverseLookupListenerWithSnapshot(table, "x")) - .get(); + final ReverseLookupListener rll2 = + pool.submit(() -> ReverseLookupListener.makeReverseLookupListenerWithSnapshot(table, "x")).get(); TestCase.assertEquals(rll2.get(1), 2); TestCase.assertEquals(rll2.get(2), 4); @@ -562,9 +546,8 @@ public void testReverseLookupListener() throws ExecutionException, InterruptedEx table.notifyListeners(i(), i(), i(4)); - final ReverseLookupListener rll3 = pool - .submit(() -> ReverseLookupListener.makeReverseLookupListenerWithSnapshot(table, "x")) - .get(); + final ReverseLookupListener rll3 = + pool.submit(() -> ReverseLookupListener.makeReverseLookupListenerWithSnapshot(table, "x")).get(); TestCase.assertEquals(rll3.get(1), 2); TestCase.assertEquals(rll3.get(2), rll.getNoEntryValue()); @@ -611,8 +594,8 @@ public void testIterative() { public void testIterativeQuickFilter() { final List> transformations = new ArrayList<>(); transformations.add(t -> t.where("boolCol2")); - transformations.add(t -> t.where(DisjunctiveFilter.makeDisjunctiveFilter( - SelectFilterFactory.expandQuickFilter(t, "10", QuickFilterMode.NORMAL)))); + transformations.add(t -> t.where(DisjunctiveFilter + .makeDisjunctiveFilter(SelectFilterFactory.expandQuickFilter(t, "10", QuickFilterMode.NORMAL)))); transformations.add(t -> t.sortDescending("doubleCol")); transformations.add(Table::flatten); testIterative(transformations); @@ -620,9 +603,9 @@ public void testIterativeQuickFilter() { public void testIterativeDisjunctiveCondition() { final List> transformations = new ArrayList<>(); - transformations.add(t -> t.where( - DisjunctiveFilter.makeDisjunctiveFilter(ConditionFilter.createConditionFilter("false"), - ConditionFilter.createConditionFilter("true")))); + transformations.add( + t -> t.where(DisjunctiveFilter.makeDisjunctiveFilter(ConditionFilter.createConditionFilter("false"), + ConditionFilter.createConditionFilter("true")))); testIterative(transformations); } @@ -630,8 +613,7 @@ private void testIterative(List> transformations) { testIterative(transformations, 0, new MutableInt(50)); } - private void testIterative(List> transformations, int seed, - MutableInt numSteps) { + private void testIterative(List> transformations, int seed, MutableInt numSteps) { final TstUtils.ColumnInfo[] columnInfos; final int size = 100; @@ -639,13 +621,12 @@ private void testIterative(List> transformations, int see final int maxSteps = numSteps.intValue(); final QueryTable table = getTable(size, random, - columnInfos = - initColumnInfos(new String[] {"Sym", "intCol", "boolCol", "boolCol2", "doubleCol"}, - new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), - new IntGenerator(0, 100), - new BooleanGenerator(), - new BooleanGenerator(), - new DoubleGenerator(0, 100))); + columnInfos = initColumnInfos(new String[] {"Sym", "intCol", "boolCol", "boolCol2", "doubleCol"}, + new SetGenerator<>("aa", "bb", "bc", "cc", "dd", "ee", "ff", "gg", "hh", "ii"), + new IntGenerator(0, 100), + new BooleanGenerator(), + new BooleanGenerator(), + new DoubleGenerator(0, 100))); final Callable
    complete = () -> { @@ -656,8 +637,7 @@ private void testIterative(List> transformations, int see return t; }; - final List, Function>> splitCallables = - new ArrayList<>(); + final List, Function>> splitCallables = new ArrayList<>(); for (int ii = 1; ii <= transformations.size() - 1; ++ii) { final int fii = ii; final Callable
    firstHalf = () -> { @@ -736,12 +716,10 @@ private void testIterative(List> transformations, int see } if (beforeStartAndBeforeUpdate) { - final List
    beforeStartAndBeforeUpdateSplitResults = - new ArrayList<>(splitCallables.size()); + final List
    beforeStartAndBeforeUpdateSplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeStartFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeUpdateSplit"); @@ -755,8 +733,7 @@ private void testIterative(List> transformations, int see beforeUpdateFirstHalf.add(pool.submit(splitCallable.first).get()); } - final Index[] updates = - GenerateTableUpdates.computeTableUpdates(size, random, table, columnInfos); + final Index[] updates = GenerateTableUpdates.computeTableUpdates(size, random, table, columnInfos); if (beforeNotify) { // after we update the underlying data, but before we notify @@ -767,12 +744,10 @@ private void testIterative(List> transformations, int see } if (beforeAndAfterUpdate) { - final List
    beforeAndAfterUpdateSplitResults = - new ArrayList<>(splitCallables.size()); + final List
    beforeAndAfterUpdateSplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeUpdateFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeAndAfterUpdateSplit"); @@ -783,12 +758,10 @@ private void testIterative(List> transformations, int see } if (beforeStartAndAfterUpdate) { - final List
    beforeStartAndAfterUpdateSplitResults = - new ArrayList<>(splitCallables.size()); + final List
    beforeStartAndAfterUpdateSplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeStartFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeStartAndAfterUpdate"); @@ -806,12 +779,10 @@ private void testIterative(List> transformations, int see table.notifyListeners(updates[0], updates[1], updates[2]); if (beforeAndAfterNotify) { - final List
    beforeAndAfterNotifySplitResults = - new ArrayList<>(splitCallables.size()); + final List
    beforeAndAfterNotifySplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeNotifyFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeAndAfterNotify"); @@ -821,12 +792,10 @@ private void testIterative(List> transformations, int see } if (beforeStartAndAfterNotify) { - final List
    beforeStartAndAfterNotifySplitResults = - new ArrayList<>(splitCallables.size()); + final List
    beforeStartAndAfterNotifySplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeStartFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeStartAndAfterNotify"); @@ -837,12 +806,10 @@ private void testIterative(List> transformations, int see } if (beforeUpdateAndAfterNotify) { - final List
    beforeUpdateAndAfterNotifySplitResults = - new ArrayList<>(splitCallables.size()); + final List
    beforeUpdateAndAfterNotifySplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeUpdateFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeUpdateAndAfterNotify"); @@ -870,18 +837,17 @@ private void testIterative(List> transformations, int see for (int newResult = lastResultSize; newResult < results.size(); ++newResult) { final DynamicTable dynamicTable = (DynamicTable) results.get(newResult); final InstrumentedShiftAwareListenerAdapter listener = - new InstrumentedShiftAwareListenerAdapter("errorListener", dynamicTable, - false) { - @Override - public void onUpdate(final Update upstream) {} - - @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { - originalException.printStackTrace(System.err); - TestCase.fail(originalException.getMessage()); - } - }; + new InstrumentedShiftAwareListenerAdapter("errorListener", dynamicTable, false) { + @Override + public void onUpdate(final Update upstream) {} + + @Override + public void onFailureInternal(Throwable originalException, + UpdatePerformanceTracker.Entry sourceEntry) { + originalException.printStackTrace(System.err); + TestCase.fail(originalException.getMessage()); + } + }; listeners.add(listener); dynamicTable.listenForUpdates(listener); } @@ -889,12 +855,10 @@ public void onFailureInternal(Throwable originalException, LiveTableMonitor.DEFAULT.completeCycleForUnitTests(); if (beforeStartAndAfterCycle) { - final List
    beforeStartAndAfterCycleSplitResults = - new ArrayList<>(splitCallables.size()); + final List
    beforeStartAndAfterCycleSplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeStartFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeStartAndAfterCycle"); @@ -905,8 +869,8 @@ public void onFailureInternal(Throwable originalException, for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; final Table splitResult = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> splitCallables.get(fSplitIndex).second - .apply(beforeStartFirstHalf.get(fSplitIndex))); + .computeLocked(() -> splitCallables.get(fSplitIndex).second + .apply(beforeStartFirstHalf.get(fSplitIndex))); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeStartAndAfterCycleLocked"); splitResult.setAttribute("SplitIndex", splitIndex); @@ -916,12 +880,10 @@ public void onFailureInternal(Throwable originalException, } if (beforeUpdateAndAfterCycle) { - final List
    beforeUpdateAndAfterCycleSplitResults = - new ArrayList<>(splitCallables.size()); + final List
    beforeUpdateAndAfterCycleSplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeUpdateFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeUpdateAndAfterCycle"); @@ -932,8 +894,8 @@ public void onFailureInternal(Throwable originalException, for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; final Table splitResult = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> splitCallables.get(fSplitIndex).second - .apply(beforeUpdateFirstHalf.get(fSplitIndex))); + .computeLocked(() -> splitCallables.get(fSplitIndex).second + .apply(beforeUpdateFirstHalf.get(fSplitIndex))); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeUpdateAndAfterCycleLocked"); splitResult.setAttribute("SplitIndex", splitIndex); @@ -943,12 +905,10 @@ public void onFailureInternal(Throwable originalException, } if (beforeNotifyAndAfterCycle) { - final List
    beforeNotifyAndAfterCycleSplitResults = - new ArrayList<>(splitCallables.size()); + final List
    beforeNotifyAndAfterCycleSplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeNotifyFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeNotifyAndAfterCycle"); @@ -957,8 +917,8 @@ public void onFailureInternal(Throwable originalException, } for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { - final Table splitResult = splitCallables.get(splitIndex).second - .apply(beforeNotifyFirstHalf.get(splitIndex)); + final Table splitResult = + splitCallables.get(splitIndex).second.apply(beforeNotifyFirstHalf.get(splitIndex)); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeNotifyAndAfterCycleLocked"); splitResult.setAttribute("SplitIndex", splitIndex); @@ -970,15 +930,12 @@ public void onFailureInternal(Throwable originalException, Assert.eqTrue(beforeAndAfterCycle, "beforeAndAfterCycle"); if (transformations.size() > 1) { - Assert.eqFalse(beforeCycleFirstHalf.isEmpty(), - "beforeCycleFirstHalf.isEmpty()"); - final List
    beforeAndAfterCycleSplitResults = - new ArrayList<>(splitCallables.size()); + Assert.eqFalse(beforeCycleFirstHalf.isEmpty(), "beforeCycleFirstHalf.isEmpty()"); + final List
    beforeAndAfterCycleSplitResults = new ArrayList<>(splitCallables.size()); for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; - final Table splitResult = - pool.submit(() -> splitCallables.get(fSplitIndex).second + final Table splitResult = pool.submit(() -> splitCallables.get(fSplitIndex).second .apply(beforeCycleFirstHalf.get(fSplitIndex))).get(); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeAndAfterCycle"); @@ -989,8 +946,8 @@ public void onFailureInternal(Throwable originalException, for (int splitIndex = 0; splitIndex < splitCallables.size(); ++splitIndex) { final int fSplitIndex = splitIndex; final Table splitResult = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> splitCallables.get(fSplitIndex).second - .apply(beforeCycleFirstHalf.get(fSplitIndex))); + .computeLocked(() -> splitCallables.get(fSplitIndex).second + .apply(beforeCycleFirstHalf.get(fSplitIndex))); splitResult.setAttribute("Step", i); splitResult.setAttribute("Type", "beforeAndAfterCycle"); splitResult.setAttribute("SplitIndex", splitIndex); @@ -1005,16 +962,14 @@ public void onFailureInternal(Throwable originalException, TableTools.showWithIndex(table); System.out.println("Standard Table: (" + Objects.hashCode(standard) + ")"); TableTools.showWithIndex(standard); - System.out.println( - "Verifying " + results.size() + " tables (size = " + standard.size() + ")"); + System.out.println("Verifying " + results.size() + " tables (size = " + standard.size() + ")"); } // now verify all the outstanding results for (Table checkTable : results) { String diff = diff(checkTable, standard, 10); if (!diff.isEmpty() && LiveTableTestCase.printTableUpdates) { - System.out - .println("Check Table: " + checkTable.getAttribute("Step") + ", " + + System.out.println("Check Table: " + checkTable.getAttribute("Step") + ", " + checkTable.getAttribute("Type") + ", splitIndex=" + checkTable.getAttribute("SplitIndex") + ", hash=" + Objects.hashCode(checkTable)); @@ -1031,8 +986,7 @@ public void onFailureInternal(Throwable originalException, } public void testSelectDistinct() throws ExecutionException, InterruptedException { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6, 8), c("y", "a", "b", "a", "c")); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6, 8), c("y", "a", "b", "a", "c")); final Table expected1 = newTable(c("y", "a", "b", "c")); final Table expected2 = newTable(c("y", "a", "d", "b", "c")); final Table expected2outOfOrder = newTable(c("y", "a", "b", "c", "d")); @@ -1115,8 +1069,7 @@ public void testSelectDistinctReset() throws ExecutionException, InterruptedExce QueryScope.addParam("barrierFunction", barrierFunction); try { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6, 8), c("y", "a", "b", "a", "c")); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6, 8), c("y", "a", "b", "a", "c")); final Table slowed = table.updateView("z=barrierFunction.apply(y)"); final Table expected1 = newTable(c("z", "a", "b")); @@ -1166,10 +1119,8 @@ public void testCountBy() throws Exception { public void testMinMaxBy() throws Exception { testByConcurrent(t -> t.maxBy("KeyColumn")); testByConcurrent(t -> t.minBy("KeyColumn")); - testByConcurrent(t -> t.by(new AddOnlyMinMaxByStateFactoryImpl(true), "KeyColumn"), true, - false, false, true); - testByConcurrent(t -> t.by(new AddOnlyMinMaxByStateFactoryImpl(false), "KeyColumn"), true, - false, false, true); + testByConcurrent(t -> t.by(new AddOnlyMinMaxByStateFactoryImpl(true), "KeyColumn"), true, false, false, true); + testByConcurrent(t -> t.by(new AddOnlyMinMaxByStateFactoryImpl(false), "KeyColumn"), true, false, false, true); } public void testFirstLastBy() throws Exception { @@ -1193,16 +1144,16 @@ public void testNoKeyBy() throws Exception { } public void testPercentileBy() throws Exception { - testByConcurrent(t -> t.dropColumns("KeyColumn").by(new PercentileByStateFactoryImpl(0.25)), - false, false, true, true); - testByConcurrent(t -> t.dropColumns("KeyColumn").by(new PercentileByStateFactoryImpl(0.75)), - false, false, true, true); + testByConcurrent(t -> t.dropColumns("KeyColumn").by(new PercentileByStateFactoryImpl(0.25)), false, false, true, + true); + testByConcurrent(t -> t.dropColumns("KeyColumn").by(new PercentileByStateFactoryImpl(0.75)), false, false, true, + true); testByConcurrent(t -> t.medianBy("KeyColumn")); } public void testAggCombo() throws Exception { - testByConcurrent(t -> t.by(AggCombo(AggAvg("AvgInt=IntCol"), AggCount("NumInts"), - AggSum("SumDouble=DoubleCol"), AggMax("MaxDouble=DoubleCol")), "KeyColumn")); + testByConcurrent(t -> t.by(AggCombo(AggAvg("AvgInt=IntCol"), AggCount("NumInts"), AggSum("SumDouble=DoubleCol"), + AggMax("MaxDouble=DoubleCol")), "KeyColumn")); } public void testWavgBy() throws Exception { @@ -1217,8 +1168,8 @@ private void testByConcurrent(Function function) throws Exception testByConcurrent(function, true, true, true, true); } - private void testByConcurrent(Function function, boolean hasKeys, - boolean withReset, boolean allowModifications, boolean haveBigNumerics) throws Exception { + private void testByConcurrent(Function function, boolean hasKeys, boolean withReset, + boolean allowModifications, boolean haveBigNumerics) throws Exception { setExpectError(false); final QueryTable table = makeByConcurrentBaseTable(haveBigNumerics); @@ -1240,8 +1191,7 @@ private void testByConcurrent(Function function, boolean hasKeys, try { return function.apply(slowed); } finally { - System.out - .println("Callable complete: " + (System.currentTimeMillis() - start)); + System.out.println("Callable complete: " + (System.currentTimeMillis() - start)); } }; } else { @@ -1249,13 +1199,13 @@ private void testByConcurrent(Function function, boolean hasKeys, callable = () -> function.apply(table); } - // We only care about the silent version of this table, as it's just a vessel to tick - // and ensure that the resultant table + // We only care about the silent version of this table, as it's just a vessel to tick and ensure that the + // resultant table // is computed using the appropriate version. final Table expected1 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> function.apply(table.silent()).select()); - final Table expected2 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> function.apply(table2)); + .computeLocked(() -> function.apply(table.silent()).select()); + final Table expected2 = + LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> function.apply(table2)); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -1293,10 +1243,10 @@ private void testByConcurrent(Function function, boolean hasKeys, System.out.println("Expected 1"); TableTools.show(expected1); - // The column sources are redirected, and the underlying table has been updated without - // a notification _yet_, - // so the column sources have _already_ changed and we are inside an update cycle, so - // the value of get() is indeterminate + // The column sources are redirected, and the underlying table has been updated without a notification + // _yet_, + // so the column sources have _already_ changed and we are inside an update cycle, so the value of get() is + // indeterminate // therefore this assert is not really a valid thing to do. // TstUtils.assertTableEquals(expected1, result2); final Table prevResult2a = prevTable(result2); @@ -1309,8 +1259,7 @@ private void testByConcurrent(Function function, boolean hasKeys, final Future
    future3 = pool.submit(callable); if (withReset) { - while (((QueryTable) slowed).getLastNotificationStep() != LogicalClock.DEFAULT - .currentStep()) { + while (((QueryTable) slowed).getLastNotificationStep() != LogicalClock.DEFAULT.currentStep()) { LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); } } @@ -1336,9 +1285,9 @@ private void testByConcurrent(Function function, boolean hasKeys, if (hasKeys) { TstUtils.assertTableEquals(expected2.sort("KeyColumn"), result1.sort("KeyColumn"), - TableDiff.DiffItems.DoublesExact); + TableDiff.DiffItems.DoublesExact); TstUtils.assertTableEquals(expected2.sort("KeyColumn"), result2.sort("KeyColumn"), - TableDiff.DiffItems.DoublesExact); + TableDiff.DiffItems.DoublesExact); } else { TstUtils.assertTableEquals(expected2, result1, TableDiff.DiffItems.DoublesExact); TstUtils.assertTableEquals(expected2, result2, TableDiff.DiffItems.DoublesExact); @@ -1374,13 +1323,13 @@ private void testByExternalConcurrent(boolean withReset) throws Exception { callable = () -> table.byExternal("KeyColumn"); } - // We only care about the silent version of this table, as it's just a vessel to tick and - // ensure that the resultant table + // We only care about the silent version of this table, as it's just a vessel to tick and ensure that the + // resultant table // is computed using the appropriate version. final Table expected1 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table.silent().byExternal("KeyColumn").merge().select()); + .computeLocked(() -> table.silent().byExternal("KeyColumn").merge().select()); final Table expected2 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> table2.silent().byExternal("KeyColumn").merge().select()); + .computeLocked(() -> table2.silent().byExternal("KeyColumn").merge().select()); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -1427,8 +1376,7 @@ private void testByExternalConcurrent(boolean withReset) throws Exception { final Future future3 = pool.submit(callable); if (withReset) { - while (((QueryTable) slowed).getLastNotificationStep() != LogicalClock.DEFAULT - .currentStep()) { + while (((QueryTable) slowed).getLastNotificationStep() != LogicalClock.DEFAULT.currentStep()) { LiveTableMonitor.DEFAULT.flushOneNotificationForUnitTests(); } } @@ -1457,32 +1405,28 @@ private void testByExternalConcurrent(boolean withReset) throws Exception { private QueryTable makeByConcurrentBaseTable(boolean haveBigNumerics) { final List columnHolders = new ArrayList<>(Arrays.asList( - c("KeyColumn", "a", "b", "a", "c"), - intCol("IntCol", 1, 2, 3, 4), - doubleCol("DoubleCol", 100.1, 200.2, 300.3, 400.4), - floatCol("FloatCol", 10.1f, 20.2f, 30.3f, 40.4f), - shortCol("ShortCol", (short) 10, (short) 20, (short) 30, (short) 40), - byteCol("ByteCol", (byte) 11, (byte) 12, (byte) 13, (byte) 14), - charCol("CharCol", 'A', 'B', 'C', 'D'), - longCol("LongCol", 10_000_000_000L, 20_000_000_000L, 30_000_000_000L, - 40_000_000_000L))); + c("KeyColumn", "a", "b", "a", "c"), + intCol("IntCol", 1, 2, 3, 4), + doubleCol("DoubleCol", 100.1, 200.2, 300.3, 400.4), + floatCol("FloatCol", 10.1f, 20.2f, 30.3f, 40.4f), + shortCol("ShortCol", (short) 10, (short) 20, (short) 30, (short) 40), + byteCol("ByteCol", (byte) 11, (byte) 12, (byte) 13, (byte) 14), + charCol("CharCol", 'A', 'B', 'C', 'D'), + longCol("LongCol", 10_000_000_000L, 20_000_000_000L, 30_000_000_000L, 40_000_000_000L))); if (haveBigNumerics) { - columnHolders - .add(col("BigDecCol", BigDecimal.valueOf(10000.1), BigDecimal.valueOf(20000.2), + columnHolders.add(col("BigDecCol", BigDecimal.valueOf(10000.1), BigDecimal.valueOf(20000.2), BigDecimal.valueOf(40000.3), BigDecimal.valueOf(40000.4))); - columnHolders - .add(col("BigIntCol", BigInteger.valueOf(100000), BigInteger.valueOf(200000), + columnHolders.add(col("BigIntCol", BigInteger.valueOf(100000), BigInteger.valueOf(200000), BigInteger.valueOf(300000), BigInteger.valueOf(400000))); } return TstUtils.testRefreshingTable(i(2, 4, 6, 8), - columnHolders.toArray(ColumnHolder.ZERO_LENGTH_COLUMN_HOLDER_ARRAY)); + columnHolders.toArray(ColumnHolder.ZERO_LENGTH_COLUMN_HOLDER_ARRAY)); } - private QueryTable makeByConcurrentStep2Table(boolean allowModifications, - boolean haveBigNumerics) { + private QueryTable makeByConcurrentStep2Table(boolean allowModifications, boolean haveBigNumerics) { final QueryTable table2 = makeByConcurrentBaseTable(haveBigNumerics); doByConcurrentAdditions(table2, haveBigNumerics); if (allowModifications) { @@ -1494,85 +1438,80 @@ private QueryTable makeByConcurrentStep2Table(boolean allowModifications, private void doByConcurrentModifications(QueryTable table, boolean haveBigNumerics) { final List columnHolders = new ArrayList<>(Arrays.asList( - c("KeyColumn", "b"), - intCol("IntCol", 7), - doubleCol("DoubleCol", 700.7), - floatCol("FloatCol", 70.7f), - shortCol("ShortCol", (short) 70), - byteCol("ByteCol", (byte) 17), - charCol("CharCol", 'E'), - longCol("LongCol", 70_000_000_000L))); + c("KeyColumn", "b"), + intCol("IntCol", 7), + doubleCol("DoubleCol", 700.7), + floatCol("FloatCol", 70.7f), + shortCol("ShortCol", (short) 70), + byteCol("ByteCol", (byte) 17), + charCol("CharCol", 'E'), + longCol("LongCol", 70_000_000_000L))); if (haveBigNumerics) { columnHolders.addAll(Arrays.asList( - col("BigDecCol", BigDecimal.valueOf(70000.7)), - col("BigIntCol", BigInteger.valueOf(700000)))); + col("BigDecCol", BigDecimal.valueOf(70000.7)), + col("BigIntCol", BigInteger.valueOf(700000)))); } - TstUtils.addToTable(table, i(8), - columnHolders.toArray(ColumnHolder.ZERO_LENGTH_COLUMN_HOLDER_ARRAY)); + TstUtils.addToTable(table, i(8), columnHolders.toArray(ColumnHolder.ZERO_LENGTH_COLUMN_HOLDER_ARRAY)); } private void doByConcurrentAdditions(QueryTable table, boolean haveBigNumerics) { final List columnHolders = new ArrayList<>(Arrays.asList( - c("KeyColumn", "d", "a"), - intCol("IntCol", 5, 6), - doubleCol("DoubleCol", 505.5, 600.6), - floatCol("FloatCol", 50.5f, 60.6f), - shortCol("ShortCol", (short) 50, (short) 60), - byteCol("ByteCol", (byte) 15, (byte) 16), - charCol("CharCol", 'E', 'F'), - longCol("LongCol", 50_000_000_000L, 60_000_000_000L))); + c("KeyColumn", "d", "a"), + intCol("IntCol", 5, 6), + doubleCol("DoubleCol", 505.5, 600.6), + floatCol("FloatCol", 50.5f, 60.6f), + shortCol("ShortCol", (short) 50, (short) 60), + byteCol("ByteCol", (byte) 15, (byte) 16), + charCol("CharCol", 'E', 'F'), + longCol("LongCol", 50_000_000_000L, 60_000_000_000L))); if (haveBigNumerics) { columnHolders.addAll(Arrays.asList( - col("BigDecCol", BigDecimal.valueOf(50000.5), BigDecimal.valueOf(60000.6)), - col("BigIntCol", BigInteger.valueOf(500000), BigInteger.valueOf(600000)))); + col("BigDecCol", BigDecimal.valueOf(50000.5), BigDecimal.valueOf(60000.6)), + col("BigIntCol", BigInteger.valueOf(500000), BigInteger.valueOf(600000)))); } - TstUtils.addToTable(table, i(5, 9), - columnHolders.toArray(ColumnHolder.ZERO_LENGTH_COLUMN_HOLDER_ARRAY)); + TstUtils.addToTable(table, i(5, 9), columnHolders.toArray(ColumnHolder.ZERO_LENGTH_COLUMN_HOLDER_ARRAY)); } public void testConstructSnapshotException() throws ExecutionException, InterruptedException { - final QueryTable table = - TstUtils.testRefreshingTable(i(2, 4, 6, 8), c("y", "a", "b", "c", "d")); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6, 8), c("y", "a", "b", "c", "d")); final Future future = pool.submit(() -> { final MutableObject result = new MutableObject<>(); ConstructSnapshot.callDataSnapshotFunction("testConstructSnapshotException", - ConstructSnapshot.makeSnapshotControl(false, table), (usePrev, clock) -> { - Assert.eqFalse(usePrev, "usePrev"); - final int size = table.intSize(); - final String[] result1 = new String[size]; - result.setValue(result1); - // on the first pass, we want to have an AAIOBE for the result1, which will - // occur, because 100ms - // into this sleep; the index size will increase by 1 - SleepUtil.sleep(1000); - - // and make sure the terrible thing has happened - if (result1.length == 4) { - Assert.eq(table.getIndex().size(), "table.getIndex().size()", 5); - } - - // noinspection unchecked - final ColumnSource cs = table.getColumnSource("y"); - - int ii = 0; - for (final Index.Iterator it = table.getIndex().iterator(); it.hasNext();) { - final long key = it.nextLong(); - result1[ii++] = cs.get(key); - } - - return true; - }); + ConstructSnapshot.makeSnapshotControl(false, table), (usePrev, clock) -> { + Assert.eqFalse(usePrev, "usePrev"); + final int size = table.intSize(); + final String[] result1 = new String[size]; + result.setValue(result1); + // on the first pass, we want to have an AAIOBE for the result1, which will occur, because 100ms + // into this sleep; the index size will increase by 1 + SleepUtil.sleep(1000); + + // and make sure the terrible thing has happened + if (result1.length == 4) { + Assert.eq(table.getIndex().size(), "table.getIndex().size()", 5); + } + + // noinspection unchecked + final ColumnSource cs = table.getColumnSource("y"); + + int ii = 0; + for (final Index.Iterator it = table.getIndex().iterator(); it.hasNext();) { + final long key = it.nextLong(); + result1[ii++] = cs.get(key); + } + + return true; + }); return result.getValue(); }); - // wait until we've had the future start, but before it's actually gotten completed, so we - // know that it is + // wait until we've had the future start, but before it's actually gotten completed, so we know that it is // going to be kicked off in the idle cycle SleepUtil.sleep(100); @@ -1591,12 +1530,11 @@ public void testConstructSnapshotException() throws ExecutionException, Interrup public void testStaticSnapshot() throws ExecutionException, InterruptedException { final Table emptyTable = TableTools.emptyTable(0); - final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), - c("y", "a", "b", "c"), c("z", true, false, true)); - final Table tableStart = - TableTools.newTable(c("x", 1, 2, 3), c("y", "a", "b", "c"), c("z", true, false, true)); - final Table tableUpdate = TableTools.newTable(c("x", 1, 4, 2, 3), - c("y", "a", "d", "b", "c"), c("z", true, true, false, true)); + final QueryTable table = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", "a", "b", "c"), + c("z", true, false, true)); + final Table tableStart = TableTools.newTable(c("x", 1, 2, 3), c("y", "a", "b", "c"), c("z", true, false, true)); + final Table tableUpdate = + TableTools.newTable(c("x", 1, 4, 2, 3), c("y", "a", "d", "b", "c"), c("z", true, true, false, true)); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); @@ -1643,8 +1581,7 @@ public void testSnapshotLiveness() { TstUtils.assertTableEquals(snap, right); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - final ShiftAwareListener.Update downstream = - new ShiftAwareListener.Update(i(1), i(), i(), + final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update(i(1), i(), i(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY); TstUtils.addToTable(right, downstream.added, c("x", 2)); right.notifyListeners(downstream); @@ -1652,8 +1589,7 @@ public void testSnapshotLiveness() { TstUtils.assertTableEquals(snap, prevTable(right)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - final ShiftAwareListener.Update downstream = - new ShiftAwareListener.Update(i(1), i(), i(), + final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update(i(1), i(), i(), IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY); TstUtils.addToTable(left, downstream.added); left.notifyListeners(downstream); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestDownsampledWhereFilter.java b/DB/src/test/java/io/deephaven/db/v2/TestDownsampledWhereFilter.java index db756343a0f..4a49cadc8f8 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestDownsampledWhereFilter.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestDownsampledWhereFilter.java @@ -26,15 +26,14 @@ public void testDownsampledWhere() throws IOException { int size = 1000; - final QueryTable table = getTable(false, size, random, - initColumnInfos(new String[] {"Timestamp", "doubleCol"}, + final QueryTable table = getTable(false, size, random, initColumnInfos(new String[] {"Timestamp", "doubleCol"}, new SortedDateTimeGenerator(DBTimeUtils.convertDateTime("2015-09-11T09:30:00 NY"), - DBTimeUtils.convertDateTime("2015-09-11T10:00:00 NY")), + DBTimeUtils.convertDateTime("2015-09-11T10:00:00 NY")), new DoubleGenerator(0, 100))); Table downsampled = table.where(new DownsampledWhereFilter("Timestamp", 60_000_000_000L)); - Table standardWay = table.updateView("TimeBin=upperBin(Timestamp, 60000000000)") - .lastBy("TimeBin").dropColumns("TimeBin"); + Table standardWay = + table.updateView("TimeBin=upperBin(Timestamp, 60000000000)").lastBy("TimeBin").dropColumns("TimeBin"); TableTools.showWithIndex(downsampled); TableTools.showWithIndex(standardWay); @@ -50,16 +49,15 @@ public void testDownsampledWhereLowerFirst() throws IOException { int size = 1000; - final QueryTable table = getTable(false, size, random, - initColumnInfos(new String[] {"Timestamp", "doubleCol"}, + final QueryTable table = getTable(false, size, random, initColumnInfos(new String[] {"Timestamp", "doubleCol"}, new SortedDateTimeGenerator(DBTimeUtils.convertDateTime("2015-09-11T09:30:00 NY"), - DBTimeUtils.convertDateTime("2015-09-11T10:00:00 NY")), + DBTimeUtils.convertDateTime("2015-09-11T10:00:00 NY")), new DoubleGenerator(0, 100))); Table downsampled = table.where(new DownsampledWhereFilter("Timestamp", 60_000_000_000L, - DownsampledWhereFilter.SampleOrder.LOWERFIRST)); - Table standardWay = table.updateView("TimeBin=lowerBin(Timestamp, 60000000000)") - .firstBy("TimeBin").dropColumns("TimeBin"); + DownsampledWhereFilter.SampleOrder.LOWERFIRST)); + Table standardWay = + table.updateView("TimeBin=lowerBin(Timestamp, 60000000000)").firstBy("TimeBin").dropColumns("TimeBin"); TableTools.showWithIndex(downsampled); TableTools.showWithIndex(standardWay); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestEvenlyDividedTableMap.java b/DB/src/test/java/io/deephaven/db/v2/TestEvenlyDividedTableMap.java index 04ea2c9dd8b..c7496879072 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestEvenlyDividedTableMap.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestEvenlyDividedTableMap.java @@ -18,14 +18,12 @@ public void testStatic() { public void testIncremental() { final QueryTable t = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(1000000)); - final Table tu = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.update("K=k*2")); - final Table tk2 = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> tu.update("K2=K*2")); + final Table tu = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> t.update("K=k*2")); + final Table tk2 = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> tu.update("K2=K*2")); final TableMap tm = EvenlyDividedTableMap.makeEvenlyDividedTableMap(tu, 16, 100000); assertEquals(10, tm.size()); final Table t2 = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> ((TransformableTableMap) tm.asTable().update("K2=K*2")).merge()); + .computeLocked(() -> ((TransformableTableMap) tm.asTable().update("K2=K*2")).merge()); TstUtils.assertTableEquals(tk2, t2); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { diff --git a/DB/src/test/java/io/deephaven/db/v2/TestKeyedTableListener.java b/DB/src/test/java/io/deephaven/db/v2/TestKeyedTableListener.java index 2e17a576c66..d371e16401b 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestKeyedTableListener.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestKeyedTableListener.java @@ -29,9 +29,9 @@ public void setUp() { LiveTableMonitor.DEFAULT.resetForUnitTests(false); this.mockListener = mock(KeyedTableListener.KeyUpdateListener.class); this.table = TstUtils.testRefreshingTable(TstUtils.i(0, 1, 2), - TstUtils.c("Key1", "A", "B", "C"), - TstUtils.c("Key2", 1, 2, 3), - TstUtils.c("Data", 1.0, 2.0, 3.0)); + TstUtils.c("Key1", "A", "B", "C"), + TstUtils.c("Key2", 1, 2, 3), + TstUtils.c("Data", 1.0, 2.0, 3.0)); this.aKey = new SmartKey("A", 1); this.bKey = new SmartKey("B", 2); this.cKey = new SmartKey("C", 3); @@ -65,9 +65,8 @@ public void testGetRow() { public void testNoChanges() { checking(new Expectations() { { - never(mockListener).update(with(any(KeyedTableListener.class)), - with(any(SmartKey.class)), with(any(long.class)), - with(any(KeyedTableListener.KeyEvent.class))); + never(mockListener).update(with(any(KeyedTableListener.class)), with(any(SmartKey.class)), + with(any(long.class)), with(any(KeyedTableListener.KeyEvent.class))); } }); keyedTableListener.subscribe(aKey, mockListener); @@ -75,7 +74,7 @@ public void testNoChanges() { keyedTableListener.subscribe(cKey, mockListener); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( - () -> table.notifyListeners(noAdded.clone(), noRemoved.clone(), noModified.clone())); + () -> table.notifyListeners(noAdded.clone(), noRemoved.clone(), noModified.clone())); keyedTableListener.unsubscribe(aKey, mockListener); keyedTableListener.unsubscribe(bKey, mockListener); @@ -86,16 +85,15 @@ public void testAdd() { final SmartKey newKey = new SmartKey("D", 4); checking(new Expectations() { { - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), - with(3L), with(KeyedTableListener.KeyEvent.ADDED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), with(3L), + with(KeyedTableListener.KeyEvent.ADDED)); } }); keyedTableListener.subscribe(newKey, mockListener); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index newAdd = TstUtils.i(3); - TstUtils.addToTable(table, newAdd, TstUtils.c("Key1", "D"), TstUtils.c("Key2", 4), - TstUtils.c("Data", 4.0)); + TstUtils.addToTable(table, newAdd, TstUtils.c("Key1", "D"), TstUtils.c("Key2", 4), TstUtils.c("Data", 4.0)); table.notifyListeners(newAdd, noRemoved.clone(), noModified.clone()); }); @@ -109,8 +107,8 @@ public void testAdd() { public void testRemoved() { checking(new Expectations() { { - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), - with(2L), with(KeyedTableListener.KeyEvent.REMOVED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), with(2L), + with(KeyedTableListener.KeyEvent.REMOVED)); } }); keyedTableListener.subscribe(cKey, mockListener); @@ -131,8 +129,8 @@ public void testRemoved() { public void testModify() { checking(new Expectations() { { - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), - with(2L), with(KeyedTableListener.KeyEvent.MODIFIED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), with(2L), + with(KeyedTableListener.KeyEvent.MODIFIED)); } }); @@ -144,7 +142,7 @@ public void testModify() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index newModified = TstUtils.i(2); TstUtils.addToTable(table, newModified, TstUtils.c("Key1", "C"), TstUtils.c("Key2", 3), - TstUtils.c("Data", 6.0)); + TstUtils.c("Data", 6.0)); table.notifyListeners(noAdded.clone(), noRemoved.clone(), newModified); }); @@ -160,10 +158,10 @@ public void testModifyChangedKey() { checking(new Expectations() { { - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), - with(2L), with(KeyedTableListener.KeyEvent.REMOVED)); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), - with(2L), with(KeyedTableListener.KeyEvent.ADDED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), with(2L), + with(KeyedTableListener.KeyEvent.REMOVED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), with(2L), + with(KeyedTableListener.KeyEvent.ADDED)); } }); @@ -174,7 +172,7 @@ public void testModifyChangedKey() { final Index newModified = TstUtils.i(2); // Add to table on an existing index is a modify TstUtils.addToTable(table, newModified, TstUtils.c("Key1", "C"), TstUtils.c("Key2", 4), - TstUtils.c("Data", 6.0)); + TstUtils.c("Data", 6.0)); table.notifyListeners(noAdded.clone(), noRemoved.clone(), newModified); }); @@ -196,14 +194,14 @@ public void testModifyKeyMoved() { checking(new Expectations() { { - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(bKey), - with(1L), with(KeyedTableListener.KeyEvent.REMOVED)); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), - with(2L), with(KeyedTableListener.KeyEvent.REMOVED)); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), - with(1L), with(KeyedTableListener.KeyEvent.ADDED)); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), - with(2L), with(KeyedTableListener.KeyEvent.ADDED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(bKey), with(1L), + with(KeyedTableListener.KeyEvent.REMOVED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), with(2L), + with(KeyedTableListener.KeyEvent.REMOVED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), with(1L), + with(KeyedTableListener.KeyEvent.ADDED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), with(2L), + with(KeyedTableListener.KeyEvent.ADDED)); } }); @@ -214,8 +212,8 @@ public void testModifyKeyMoved() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index newModified = TstUtils.i(1, 2); // Add to table on an existing index is a modify - TstUtils.addToTable(table, newModified, TstUtils.c("Key1", "C", "D"), - TstUtils.c("Key2", 3, 4), TstUtils.c("Data", 3.0, 4.0)); + TstUtils.addToTable(table, newModified, TstUtils.c("Key1", "C", "D"), TstUtils.c("Key2", 3, 4), + TstUtils.c("Data", 3.0, 4.0)); table.notifyListeners(noAdded.clone(), noRemoved.clone(), newModified); }); @@ -239,14 +237,14 @@ public void testModifyKeyMoved() { public void testModifySwap() { checking(new Expectations() { { - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(bKey), - with(1L), with(KeyedTableListener.KeyEvent.REMOVED)); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), - with(1L), with(KeyedTableListener.KeyEvent.ADDED)); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), - with(2L), with(KeyedTableListener.KeyEvent.REMOVED)); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(bKey), - with(2L), with(KeyedTableListener.KeyEvent.ADDED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(bKey), with(1L), + with(KeyedTableListener.KeyEvent.REMOVED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), with(1L), + with(KeyedTableListener.KeyEvent.ADDED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), with(2L), + with(KeyedTableListener.KeyEvent.REMOVED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(bKey), with(2L), + with(KeyedTableListener.KeyEvent.ADDED)); } }); keyedTableListener.subscribe(bKey, mockListener); @@ -254,8 +252,8 @@ public void testModifySwap() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index newModified = TstUtils.i(1, 2); - TstUtils.addToTable(table, newModified, TstUtils.c("Key1", "C", "B"), - TstUtils.c("Key2", 3, 2), TstUtils.c("Data", 3.0, 2.0)); + TstUtils.addToTable(table, newModified, TstUtils.c("Key1", "C", "B"), TstUtils.c("Key2", 3, 2), + TstUtils.c("Data", 3.0, 2.0)); table.notifyListeners(noAdded.clone(), noRemoved.clone(), newModified); }); @@ -276,14 +274,14 @@ public void testAddRemoveModify() { checking(new Expectations() { { - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(aKey), - with(0L), with(KeyedTableListener.KeyEvent.MODIFIED)); - never(mockListener).update(with(any(KeyedTableListener.class)), with(bKey), - with(1L), with(any(KeyedTableListener.KeyEvent.class))); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), - with(2L), with(KeyedTableListener.KeyEvent.REMOVED)); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), - with(4L), with(KeyedTableListener.KeyEvent.ADDED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(aKey), with(0L), + with(KeyedTableListener.KeyEvent.MODIFIED)); + never(mockListener).update(with(any(KeyedTableListener.class)), with(bKey), with(1L), + with(any(KeyedTableListener.KeyEvent.class))); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), with(2L), + with(KeyedTableListener.KeyEvent.REMOVED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), with(4L), + with(KeyedTableListener.KeyEvent.ADDED)); } }); @@ -298,11 +296,10 @@ public void testAddRemoveModify() { final Index newModified = TstUtils.i(0); TstUtils.addToTable(table, newModified, TstUtils.c("Key1", "A"), TstUtils.c("Key2", 1), - TstUtils.c("Data", 1.5)); + TstUtils.c("Data", 1.5)); final Index newAdd = TstUtils.i(4); - TstUtils.addToTable(table, newAdd, TstUtils.c("Key1", "D"), TstUtils.c("Key2", 4), - TstUtils.c("Data", 4.0)); + TstUtils.addToTable(table, newAdd, TstUtils.c("Key1", "D"), TstUtils.c("Key2", 4), TstUtils.c("Data", 4.0)); table.notifyListeners(newAdd, newRemoved, newModified); }); @@ -334,10 +331,10 @@ public void testRemoveAdd() { checking(new Expectations() { { - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), - with(2L), with(KeyedTableListener.KeyEvent.REMOVED)); - oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), - with(2L), with(KeyedTableListener.KeyEvent.ADDED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(cKey), with(2L), + with(KeyedTableListener.KeyEvent.REMOVED)); + oneOf(mockListener).update(with(any(KeyedTableListener.class)), with(newKey), with(2L), + with(KeyedTableListener.KeyEvent.ADDED)); } }); @@ -355,7 +352,7 @@ public void testRemoveAdd() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index newAdded = TstUtils.i(2); TstUtils.addToTable(table, newAdded, TstUtils.c("Key1", "D"), TstUtils.c("Key2", 4), - TstUtils.c("Data", 4.0)); + TstUtils.c("Data", 4.0)); table.notifyListeners(newAdded, noRemoved, noModified); }); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestListenerFailure.java b/DB/src/test/java/io/deephaven/db/v2/TestListenerFailure.java index a6cc95cf8ec..7a3bf473e98 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestListenerFailure.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestListenerFailure.java @@ -15,8 +15,8 @@ public class TestListenerFailure extends LiveTableTestCase { public void testListenerFailure() { final QueryTable source = TstUtils.testRefreshingTable(TstUtils.c("Str", "A", "B")); - final Table updated = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> source.update("UC=Str.toUpperCase()")); + final Table updated = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> source.update("UC=Str.toUpperCase()")); TableTools.showWithIndex(updated); @@ -45,11 +45,11 @@ public void testListenerFailure() { } try { - ((DynamicTable) updated).listenForUpdates( - new InstrumentedListenerAdapter("Dummy", (QueryTable) updated, false) { - @Override - public void onUpdate(Index added, Index removed, Index modified) {} - }, false); + ((DynamicTable) updated) + .listenForUpdates(new InstrumentedListenerAdapter("Dummy", (QueryTable) updated, false) { + @Override + public void onUpdate(Index added, Index removed, Index modified) {} + }, false); TestCase.fail("Should not be allowed to listen to failed table"); } catch (IllegalStateException ise) { assertEquals("Can not listen to failed table QueryTable", ise.getMessage()); @@ -74,8 +74,7 @@ public void testMemoCheck() { final QueryTable source = TstUtils.testRefreshingTable(TstUtils.c("Str", "A", "B")); final QueryTable viewed = (QueryTable) source.updateView("UC=Str.toUpperCase()"); - final Table filtered = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> viewed.where("UC=`A`")); + final Table filtered = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> viewed.where("UC=`A`")); TableTools.showWithIndex(filtered); @@ -86,8 +85,7 @@ public void testMemoCheck() { assertFalse(((DynamicTable) filtered).isFailed()); - final Table filteredAgain = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> viewed.where("UC=`A`")); + final Table filteredAgain = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> viewed.where("UC=`A`")); assertSame(filtered, filteredAgain); allowingError(() -> { @@ -107,12 +105,10 @@ public void testMemoCheck() { }); final Table filteredYetAgain = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> viewed.where("UC=`A`")); + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> viewed.where("UC=`A`")); assertNotSame(filtered, filteredYetAgain); assertFalse(((DynamicTable) filteredYetAgain).isFailed()); - assertTableEquals( - TableTools.newTable(TableTools.col("Str", "A"), TableTools.col("UC", "A")), - filteredYetAgain); + assertTableEquals(TableTools.newTable(TableTools.col("Str", "A"), TableTools.col("UC", "A")), filteredYetAgain); } private static boolean isFilterNpe(List throwables) { diff --git a/DB/src/test/java/io/deephaven/db/v2/TestMapCodecColumns.java b/DB/src/test/java/io/deephaven/db/v2/TestMapCodecColumns.java index cca841b32ad..758ff39cb9b 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestMapCodecColumns.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestMapCodecColumns.java @@ -33,65 +33,57 @@ public class TestMapCodecColumns { static { final ParquetInstructions.Builder builder = new ParquetInstructions.Builder(); // noinspection unchecked - STRING_MAP_COLUMN_DEFINITION = - ColumnDefinition.fromGenericType("StrStrMap", (Class) Map.class); + STRING_MAP_COLUMN_DEFINITION = ColumnDefinition.fromGenericType("StrStrMap", (Class) Map.class); builder.addColumnCodec("StrStrMap", StringStringMapCodec.class.getName()); // noinspection unchecked - BOOLEAN_MAP_COLUMN_DEFINITION = - ColumnDefinition.fromGenericType("StrBoolMap", (Class) Map.class); + BOOLEAN_MAP_COLUMN_DEFINITION = ColumnDefinition.fromGenericType("StrBoolMap", (Class) Map.class); builder.addColumnCodec("StrBoolMap", StringBooleanMapCodec.class.getName()); // noinspection unchecked - INT_MAP_COLUMN_DEFINITION = - ColumnDefinition.fromGenericType("StrIntMap", (Class) Map.class); + INT_MAP_COLUMN_DEFINITION = ColumnDefinition.fromGenericType("StrIntMap", (Class) Map.class); builder.addColumnCodec("StrIntMap", StringIntMapCodec.class.getName()); // noinspection unchecked - LONG_MAP_COLUMN_DEFINITION = - ColumnDefinition.fromGenericType("StrLongMap", (Class) Map.class); + LONG_MAP_COLUMN_DEFINITION = ColumnDefinition.fromGenericType("StrLongMap", (Class) Map.class); builder.addColumnCodec("StrLongMap", StringLongMapCodec.class.getName()); // noinspection unchecked - FLOAT_MAP_COLUMN_DEFINITION = - ColumnDefinition.fromGenericType("StrFloatMap", (Class) Map.class); + FLOAT_MAP_COLUMN_DEFINITION = ColumnDefinition.fromGenericType("StrFloatMap", (Class) Map.class); builder.addColumnCodec("StrFloatMap", StringFloatMapCodec.class.getName()); // noinspection unchecked - DOUBLE_MAP_COLUMN_DEFINITION = - ColumnDefinition.fromGenericType("StrDoubleMap", (Class) Map.class); + DOUBLE_MAP_COLUMN_DEFINITION = ColumnDefinition.fromGenericType("StrDoubleMap", (Class) Map.class); builder.addColumnCodec("StrDoubleMap", StringDoubleMapCodec.class.getName()); writeInstructions = builder.build(); } private static final TableDefinition TABLE_DEFINITION = TableDefinition.of( - STRING_MAP_COLUMN_DEFINITION, - BOOLEAN_MAP_COLUMN_DEFINITION, - DOUBLE_MAP_COLUMN_DEFINITION, - FLOAT_MAP_COLUMN_DEFINITION, - INT_MAP_COLUMN_DEFINITION, - LONG_MAP_COLUMN_DEFINITION); + STRING_MAP_COLUMN_DEFINITION, + BOOLEAN_MAP_COLUMN_DEFINITION, + DOUBLE_MAP_COLUMN_DEFINITION, + FLOAT_MAP_COLUMN_DEFINITION, + INT_MAP_COLUMN_DEFINITION, + LONG_MAP_COLUMN_DEFINITION); @SuppressWarnings("unchecked") private static final Table TABLE = TableTools.newTable(TABLE_DEFINITION, - TableTools.col("StrStrMap", - CollectionUtil.mapFromArray(String.class, String.class, "AK", "AV", "BK", "BV"), null, - Collections.singletonMap("Key", "Value")), - TableTools.col("StrBoolMap", - CollectionUtil.mapFromArray(String.class, Boolean.class, "True", true, "False", false, - "Null", null), - null, Collections.singletonMap("Truthiness", true)), - TableTools.col("StrDoubleMap", - CollectionUtil.mapFromArray(String.class, Double.class, "One", 1.0, "Two", 2.0, "Null", - null), - null, Collections.singletonMap("Pi", Math.PI)), - TableTools.col("StrFloatMap", - CollectionUtil.mapFromArray(String.class, Float.class, "Ten", 10.0f, "Twenty", 20.0f, - "Null", null), - null, Collections.singletonMap("e", (float) Math.E)), - TableTools.col("StrIntMap", - CollectionUtil.mapFromArray(String.class, Integer.class, "Million", 1_000_000, - "Billion", 1_000_000_000, "Null", null), - null, Collections.singletonMap("Negative", -1)), - TableTools.col("StrLongMap", - CollectionUtil.mapFromArray(String.class, Long.class, "Trillion", 1_000_000_000_000L, - "Billion", 1_000_000_000L, "Null", null), - null, Collections.singletonMap("Negative", -1L))); + TableTools.col("StrStrMap", CollectionUtil.mapFromArray(String.class, String.class, "AK", "AV", "BK", "BV"), + null, Collections.singletonMap("Key", "Value")), + TableTools.col("StrBoolMap", + CollectionUtil.mapFromArray(String.class, Boolean.class, "True", true, "False", false, "Null", + null), + null, Collections.singletonMap("Truthiness", true)), + TableTools.col("StrDoubleMap", + CollectionUtil.mapFromArray(String.class, Double.class, "One", 1.0, "Two", 2.0, "Null", null), null, + Collections.singletonMap("Pi", Math.PI)), + TableTools.col("StrFloatMap", + CollectionUtil.mapFromArray(String.class, Float.class, "Ten", 10.0f, "Twenty", 20.0f, "Null", null), + null, Collections.singletonMap("e", (float) Math.E)), + TableTools.col("StrIntMap", + CollectionUtil.mapFromArray(String.class, Integer.class, "Million", 1_000_000, "Billion", + 1_000_000_000, "Null", null), + null, Collections.singletonMap("Negative", -1)), + TableTools + .col("StrLongMap", + CollectionUtil.mapFromArray(String.class, Long.class, "Trillion", 1_000_000_000_000L, + "Billion", 1_000_000_000L, "Null", null), + null, Collections.singletonMap("Negative", -1L))); @Test public void doColumnsTest() throws IOException { diff --git a/DB/src/test/java/io/deephaven/db/v2/TestPartitionAwareSourceTable.java b/DB/src/test/java/io/deephaven/db/v2/TestPartitionAwareSourceTable.java index c3ae9e9fcd8..ff05aa34016 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestPartitionAwareSourceTable.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestPartitionAwareSourceTable.java @@ -42,28 +42,24 @@ /** * Tests for {@link PartitionAwareSourceTable}. */ -@SuppressWarnings({"AutoBoxing", "JUnit4AnnotatedMethodInJUnit3TestCase", - "AnonymousInnerClassMayBeStatic"}) +@SuppressWarnings({"AutoBoxing", "JUnit4AnnotatedMethodInJUnit3TestCase", "AnonymousInnerClassMayBeStatic"}) public class TestPartitionAwareSourceTable extends LiveTableTestCase { private static final int NUM_COLUMNS = 5; private static final ColumnDefinition PARTITIONING_COLUMN_DEFINITION = - ColumnDefinition.ofString("Date").withPartitioning(); - private static final ColumnDefinition BOOLEAN_COLUMN_DEFINITION = - ColumnDefinition.ofBoolean("Active"); + ColumnDefinition.ofString("Date").withPartitioning(); + private static final ColumnDefinition BOOLEAN_COLUMN_DEFINITION = ColumnDefinition.ofBoolean("Active"); private static final ColumnDefinition CHARACTER_COLUMN_DEFINITION = - ColumnDefinition.ofChar("Type").withGrouping(); - private static final ColumnDefinition INTEGER_COLUMN_DEFINITION = - ColumnDefinition.ofInt("Size"); - private static final ColumnDefinition DOUBLE_COLUMN_DEFINITION = - ColumnDefinition.ofDouble("Price"); + ColumnDefinition.ofChar("Type").withGrouping(); + private static final ColumnDefinition INTEGER_COLUMN_DEFINITION = ColumnDefinition.ofInt("Size"); + private static final ColumnDefinition DOUBLE_COLUMN_DEFINITION = ColumnDefinition.ofDouble("Price"); private static final TableDefinition TABLE_DEFINITION = TableDefinition.of( - PARTITIONING_COLUMN_DEFINITION, - BOOLEAN_COLUMN_DEFINITION, - CHARACTER_COLUMN_DEFINITION, - INTEGER_COLUMN_DEFINITION, - DOUBLE_COLUMN_DEFINITION); + PARTITIONING_COLUMN_DEFINITION, + BOOLEAN_COLUMN_DEFINITION, + CHARACTER_COLUMN_DEFINITION, + INTEGER_COLUMN_DEFINITION, + DOUBLE_COLUMN_DEFINITION); private static final String[] INTERNAL_PARTITIONS = {"0", "1", "2", "1", "0", "1"}; private static final String[] COLUMN_PARTITIONS = {"D0", "D1", "D0", "D3", "D2", "D0"}; @@ -96,8 +92,7 @@ public void setUp() throws Exception { componentFactory = mock(SourceTableComponentFactory.class); columnSourceManager = mock(ColumnSourceManager.class); columnSources = TABLE_DEFINITION.getColumnStream().map(cd -> { - final DeferredGroupingColumnSource mocked = - mock(DeferredGroupingColumnSource.class, cd.getName()); + final DeferredGroupingColumnSource mocked = mock(DeferredGroupingColumnSource.class, cd.getName()); checking(new Expectations() { { allowing(mocked).getType(); @@ -142,8 +137,8 @@ public void setUp() throws Exception { checking(new Expectations() { { - oneOf(componentFactory).createColumnSourceManager(with(true), - with(ColumnToCodecMappings.EMPTY), with(equal(TABLE_DEFINITION.getColumns()))); + oneOf(componentFactory).createColumnSourceManager(with(true), with(ColumnToCodecMappings.EMPTY), + with(equal(TABLE_DEFINITION.getColumns()))); will(returnValue(columnSourceManager)); oneOf(columnSourceManager).disableGrouping(); } @@ -151,8 +146,8 @@ public void setUp() throws Exception { expectedIndex = Index.FACTORY.getEmptyIndex(); - SUT = new PartitionAwareSourceTable(TABLE_DEFINITION, "", componentFactory, - locationProvider, LiveTableMonitor.DEFAULT); + SUT = new PartitionAwareSourceTable(TABLE_DEFINITION, "", componentFactory, locationProvider, + LiveTableMonitor.DEFAULT); assertIsSatisfied(); } @@ -169,13 +164,10 @@ public void tearDown() throws Exception { } } - private Map> getIncludedColumnsMap( - final int... indices) { + private Map> getIncludedColumnsMap(final int... indices) { return IntStream.of(indices) - .mapToObj( - ci -> new Pair<>(TABLE_DEFINITION.getColumns()[ci].getName(), columnSources[ci])) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, Assert::neverInvoked, - LinkedHashMap::new)); + .mapToObj(ci -> new Pair<>(TABLE_DEFINITION.getColumns()[ci].getName(), columnSources[ci])) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, Assert::neverInvoked, LinkedHashMap::new)); } private ImmutableTableLocationKey[] locationKeysSlice(final int... indexes) { @@ -188,7 +180,7 @@ private ImmutableTableLocationKey[] locationKeysSlice(final int... indexes) { private Set makePassingLocations(final int... indexes) { return Arrays.stream(indexes).mapToObj(li -> tableLocations[li]) - .collect(Collectors.toCollection(LinkedHashSet::new)); + .collect(Collectors.toCollection(LinkedHashSet::new)); } @Test @@ -227,20 +219,18 @@ private enum ConcurrentInstantiationType { } private void doInitializeCheck(final ImmutableTableLocationKey[] tableLocationKeys, - final Set expectPassFilters, final boolean throwException, - final boolean coalesceAndListen) { + final Set expectPassFilters, final boolean throwException, final boolean coalesceAndListen) { doInitializeCheck(tableLocationKeys, expectPassFilters, throwException, coalesceAndListen, - ConcurrentInstantiationType.Idle); + ConcurrentInstantiationType.Idle); } private void doInitializeCheck(final ImmutableTableLocationKey[] tableLocationKeys, - final Set expectPassFilters, final boolean throwException, - final boolean coalesceAndListen, - @NotNull final ConcurrentInstantiationType ciType) { + final Set expectPassFilters, final boolean throwException, final boolean coalesceAndListen, + @NotNull final ConcurrentInstantiationType ciType) { Assert.assertion(!(throwException && !coalesceAndListen), "!(throwException && !listen)"); final TableDataException exception = new TableDataException("test"); - final Index toAdd = Index.FACTORY.getIndexByRange(expectedIndex.lastKey() + 1, - expectedIndex.lastKey() + INDEX_INCREMENT); + final Index toAdd = + Index.FACTORY.getIndexByRange(expectedIndex.lastKey() + 1, expectedIndex.lastKey() + INDEX_INCREMENT); checking(new Expectations() { { @@ -248,10 +238,8 @@ private void doInitializeCheck(final ImmutableTableLocationKey[] tableLocationKe will(new CustomAction("Supply locations") { @Override public Object invoke(Invocation invocation) { - subscriptionBuffer = - (TableLocationSubscriptionBuffer) invocation.getParameter(0); - Arrays.stream(tableLocationKeys) - .forEach(subscriptionBuffer::handleTableLocationKey); + subscriptionBuffer = (TableLocationSubscriptionBuffer) invocation.getParameter(0); + Arrays.stream(tableLocationKeys).forEach(subscriptionBuffer::handleTableLocationKey); return null; } }); @@ -274,7 +262,7 @@ public Object invoke(Invocation invocation) { expectedIndex.insert(toAdd); if (coalesceAndListen) { if (ciType == ConcurrentInstantiationType.UpdatingClosed - || ciType == ConcurrentInstantiationType.UpdatingOpen) { + || ciType == ConcurrentInstantiationType.UpdatingOpen) { LiveTableMonitor.DEFAULT.startCycleForUnitTests(); } try { @@ -302,20 +290,20 @@ public Object invoke(Invocation invocation) { @Test public void testConcurrentInstantiationUpdating() { doInitializeCheck(locationKeysSlice(1, 3), makePassingLocations(1, 3), false, true, - ConcurrentInstantiationType.UpdatingClosed); + ConcurrentInstantiationType.UpdatingClosed); doRefreshChangedCheck(); } @Test public void testConcurrentInstantiationUpdatingWithInitialCycleRefresh() { doInitializeCheck(locationKeysSlice(1, 3), makePassingLocations(1, 3), false, true, - ConcurrentInstantiationType.UpdatingOpen); + ConcurrentInstantiationType.UpdatingOpen); doRefreshChangedCheck(); } private void doRefreshChangedCheck() { - final Index toAdd = Index.FACTORY.getIndexByRange(expectedIndex.lastKey() + 1, - expectedIndex.lastKey() + INDEX_INCREMENT); + final Index toAdd = + Index.FACTORY.getIndexByRange(expectedIndex.lastKey() + 1, expectedIndex.lastKey() + INDEX_INCREMENT); checking(new Expectations() { { oneOf(columnSourceManager).refresh(); @@ -327,7 +315,7 @@ private void doRefreshChangedCheck() { @Override public Object invoke(Invocation invocation) { final ShiftAwareListener.Update update = - (ShiftAwareListener.Update) invocation.getParameter(0); + (ShiftAwareListener.Update) invocation.getParameter(0); assertIndexEquals(toAdd, update.added); assertIndexEquals(Index.FACTORY.getEmptyIndex(), update.removed); assertIndexEquals(Index.FACTORY.getEmptyIndex(), update.modified); @@ -378,12 +366,11 @@ private void doRefreshExceptionCheck() { oneOf(columnSourceManager).refresh(); will(throwException(exception)); oneOf(listener).getErrorNotification(with(any(TableDataException.class)), - with(any(UpdatePerformanceTracker.Entry.class))); + with(any(UpdatePerformanceTracker.Entry.class))); will(new CustomAction("check exception") { @Override public Object invoke(Invocation invocation) { - assertEquals(exception, - ((Exception) invocation.getParameter(0)).getCause()); + assertEquals(exception, ((Exception) invocation.getParameter(0)).getCause()); return notification; } }); @@ -399,7 +386,7 @@ public Object invoke(Invocation invocation) { } private void doAddLocationsRefreshCheck(final ImmutableTableLocationKey[] tableLocationKeys, - final Set expectPassFilters) { + final Set expectPassFilters) { Arrays.stream(tableLocationKeys).forEach(subscriptionBuffer::handleTableLocationKey); expectPassFilters.forEach(tl -> checking(new Expectations() { @@ -417,8 +404,8 @@ public void testRedefinition() { } private void doTestRedefinition() { - // Note: We expect redefinition to make a new CSM, but no work until we force a coalesce by - // asking for column sources + // Note: We expect redefinition to make a new CSM, but no work until we force a coalesce by asking for column + // sources final ColumnDefinition[] includedColumns1 = new ColumnDefinition[] { PARTITIONING_COLUMN_DEFINITION, CHARACTER_COLUMN_DEFINITION, @@ -430,7 +417,7 @@ private void doTestRedefinition() { IntStream.range(0, includedColumns1.length).forEach(ci -> { final ColumnDefinition columnDefinition = includedColumns1[ci]; final ColumnSource columnSource = - mock(ColumnSource.class, "_CS_" + columnDefinition.getDataType().getSimpleName()); + mock(ColumnSource.class, "_CS_" + columnDefinition.getDataType().getSimpleName()); dataTypeToColumnSource.put(columnDefinition.getDataType(), columnSource); checking(new Expectations() { { @@ -446,8 +433,8 @@ private void doTestRedefinition() { // Setup the table checking(new Expectations() { { - oneOf(componentFactory).createColumnSourceManager(with(true), - with(ColumnToCodecMappings.EMPTY), with(equal(includedColumns1))); + oneOf(componentFactory).createColumnSourceManager(with(true), with(ColumnToCodecMappings.EMPTY), + with(equal(includedColumns1))); will(returnValue(columnSourceManager)); oneOf(columnSourceManager).disableGrouping(); } @@ -469,10 +456,10 @@ public Object invoke(Invocation invocation) { will(returnValue(Index.FACTORY.getEmptyIndex())); oneOf(columnSourceManager).getColumnSources(); will(returnValue( - Arrays.stream(includedColumns1) - .collect(Collectors.toMap(ColumnDefinition::getName, - cd -> dataTypeToColumnSource.get(cd.getDataType()), - Assert::neverInvoked, LinkedHashMap::new)))); + Arrays.stream(includedColumns1) + .collect(Collectors.toMap(ColumnDefinition::getName, + cd -> dataTypeToColumnSource.get(cd.getDataType()), Assert::neverInvoked, + LinkedHashMap::new)))); } }); assertEquals(NUM_COLUMNS - 1, dropColumnsResult1.getColumnSources().size()); @@ -490,14 +477,13 @@ public Object invoke(Invocation invocation) { }; checking(new Expectations() { { - oneOf(componentFactory).createColumnSourceManager(with(true), - with(ColumnToCodecMappings.EMPTY), with(equal(includedColumns2))); + oneOf(componentFactory).createColumnSourceManager(with(true), with(ColumnToCodecMappings.EMPTY), + with(equal(includedColumns2))); will(returnValue(columnSourceManager)); oneOf(columnSourceManager).disableGrouping(); } }); - final Table dropColumnsResult2 = - dropColumnsResult1.dropColumns(CHARACTER_COLUMN_DEFINITION.getName()); + final Table dropColumnsResult2 = dropColumnsResult1.dropColumns(CHARACTER_COLUMN_DEFINITION.getName()); assertIsSatisfied(); assertTrue(dropColumnsResult2 instanceof PartitionAwareSourceTable); // Force a coalesce and make sure it has the right columns @@ -514,10 +500,10 @@ public Object invoke(Invocation invocation) { will(returnValue(Index.FACTORY.getEmptyIndex())); oneOf(columnSourceManager).getColumnSources(); will(returnValue( - Arrays.stream(includedColumns2) - .collect(Collectors.toMap(ColumnDefinition::getName, - cd -> dataTypeToColumnSource.get(cd.getDataType()), - Assert::neverInvoked, LinkedHashMap::new)))); + Arrays.stream(includedColumns2) + .collect(Collectors.toMap(ColumnDefinition::getName, + cd -> dataTypeToColumnSource.get(cd.getDataType()), Assert::neverInvoked, + LinkedHashMap::new)))); } }); assertEquals(NUM_COLUMNS - 2, dropColumnsResult2.getColumnSources().size()); @@ -527,8 +513,7 @@ public Object invoke(Invocation invocation) { // Test 3: Rename a column // Nothing to setup for the table - the rename is deferred - final Table renameColumnsResult1 = - dropColumnsResult2.renameColumns("A=" + INTEGER_COLUMN_DEFINITION.getName()); + final Table renameColumnsResult1 = dropColumnsResult2.renameColumns("A=" + INTEGER_COLUMN_DEFINITION.getName()); assertIsSatisfied(); assertTrue(renameColumnsResult1 instanceof DeferredViewTable); // This will not force a coalesce, as dropColumnsResult2 is already coalesced. @@ -545,8 +530,8 @@ public Object invoke(Invocation invocation) { }; checking(new Expectations() { { - oneOf(componentFactory).createColumnSourceManager(with(true), - with(ColumnToCodecMappings.EMPTY), with(equal(includedColumns3))); + oneOf(componentFactory).createColumnSourceManager(with(true), with(ColumnToCodecMappings.EMPTY), + with(equal(includedColumns3))); will(returnValue(columnSourceManager)); oneOf(columnSourceManager).disableGrouping(); } @@ -568,10 +553,10 @@ public Object invoke(Invocation invocation) { will(returnValue(Index.FACTORY.getEmptyIndex())); oneOf(columnSourceManager).getColumnSources(); will(returnValue( - Arrays.stream(includedColumns3) - .collect(Collectors.toMap(ColumnDefinition::getName, - cd -> dataTypeToColumnSource.get(cd.getDataType()), - Assert::neverInvoked, LinkedHashMap::new)))); + Arrays.stream(includedColumns3) + .collect(Collectors.toMap(ColumnDefinition::getName, + cd -> dataTypeToColumnSource.get(cd.getDataType()), Assert::neverInvoked, + LinkedHashMap::new)))); } }); assertEquals(NUM_COLUMNS - 4, viewResult1.getColumnSources().size()); @@ -580,8 +565,8 @@ public Object invoke(Invocation invocation) { // Test 5: Add a new derived column on // Setup the table - final Table viewResult2 = viewResult1.updateView("SizeSquared=" - + INTEGER_COLUMN_DEFINITION.getName() + '*' + INTEGER_COLUMN_DEFINITION.getName()); + final Table viewResult2 = viewResult1.updateView( + "SizeSquared=" + INTEGER_COLUMN_DEFINITION.getName() + '*' + INTEGER_COLUMN_DEFINITION.getName()); assertTrue(viewResult2 instanceof DeferredViewTable); assertEquals(NUM_COLUMNS - 3, viewResult2.getColumnSources().size()); assertNotNull(viewResult2.getColumnSource(INTEGER_COLUMN_DEFINITION.getName())); @@ -604,8 +589,8 @@ public Object invoke(Invocation invocation) { @Test public void testSelectDistinctDate() { final Set passedLocations = makePassingLocations(1, 3, 5); - final String[] expectedDistinctDates = IntStream.of(1, 3, 5) - .mapToObj(li -> COLUMN_PARTITIONS[li]).distinct().toArray(String[]::new); + final String[] expectedDistinctDates = + IntStream.of(1, 3, 5).mapToObj(li -> COLUMN_PARTITIONS[li]).distinct().toArray(String[]::new); doInitializeCheck(locationKeysSlice(1, 3, 5), passedLocations, false, true); passedLocations.forEach(tl -> checking(new Expectations() { { @@ -623,8 +608,7 @@ public void testSelectDistinctDate() { final Table result = SUT.selectDistinct(PARTITIONING_COLUMN_DEFINITION.getName()); assertIsSatisfied(); // noinspection unchecked - final DataColumn distinctDateColumn = - result.getColumn(PARTITIONING_COLUMN_DEFINITION.getName()); + final DataColumn distinctDateColumn = result.getColumn(PARTITIONING_COLUMN_DEFINITION.getName()); assertEquals(expectedDistinctDates.length, distinctDateColumn.size()); final String[] distinctDates = (String[]) distinctDateColumn.getDirect(); Arrays.sort(expectedDistinctDates); @@ -638,13 +622,12 @@ public void testWhereDate() { checking(new Expectations() { { oneOf(componentFactory).createColumnSourceManager(true, ColumnToCodecMappings.EMPTY, - TABLE_DEFINITION.getColumns()); + TABLE_DEFINITION.getColumns()); will(returnValue(columnSourceManager)); oneOf(columnSourceManager).disableGrouping(); } }); - assertIndexEquals(expectedIndex, - SUT.where(PARTITIONING_COLUMN_DEFINITION.getName() + "=`D0`").getIndex()); + assertIndexEquals(expectedIndex, SUT.where(PARTITIONING_COLUMN_DEFINITION.getName() + "=`D0`").getIndex()); assertIsSatisfied(); } @@ -671,14 +654,12 @@ public Object invoke(@NotNull final Invocation invocation) { return new DummyContext(int.class, (int) invocation.getParameter(0)); } }); - allowing(columnSources[3]).getChunk(with(any(DummyContext.class)), - with(any(OrderedKeys.class))); + allowing(columnSources[3]).getChunk(with(any(DummyContext.class)), with(any(OrderedKeys.class))); will(new CustomAction("Fill dummy chunk") { @Override public Object invoke(@NotNull final Invocation invocation) { final WritableIntChunk destination = - ((DummyContext) invocation.getParameter(0)).sourceChunk - .asWritableIntChunk(); + ((DummyContext) invocation.getParameter(0)).sourceChunk.asWritableIntChunk(); final int length = ((OrderedKeys) invocation.getParameter(1)).intSize(); destination.fillWithValue(0, length, 1); destination.setSize(length); @@ -688,7 +669,7 @@ public Object invoke(@NotNull final Invocation invocation) { } }); assertIndexEquals(expectedIndex, SUT.where(INTEGER_COLUMN_DEFINITION.getName() + ">0") - .where(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getIndex()); + .where(CollectionUtil.ZERO_LENGTH_STRING_ARRAY).getIndex()); assertIsSatisfied(); } @@ -698,7 +679,7 @@ public void testWhereDateSize() { checking(new Expectations() { { oneOf(componentFactory).createColumnSourceManager(true, ColumnToCodecMappings.EMPTY, - TABLE_DEFINITION.getColumns()); + TABLE_DEFINITION.getColumns()); will(returnValue(columnSourceManager)); oneOf(columnSourceManager).disableGrouping(); allowing(columnSources[3]).getInt(with(any(long.class))); @@ -710,14 +691,12 @@ public Object invoke(@NotNull final Invocation invocation) { return new DummyContext(int.class, (int) invocation.getParameter(0)); } }); - allowing(columnSources[3]).getChunk(with(any(DummyContext.class)), - with(any(OrderedKeys.class))); + allowing(columnSources[3]).getChunk(with(any(DummyContext.class)), with(any(OrderedKeys.class))); will(new CustomAction("Fill dummy chunk") { @Override public Object invoke(@NotNull final Invocation invocation) { final WritableIntChunk destination = - ((DummyContext) invocation.getParameter(0)).sourceChunk - .asWritableIntChunk(); + ((DummyContext) invocation.getParameter(0)).sourceChunk.asWritableIntChunk(); final int length = ((OrderedKeys) invocation.getParameter(1)).intSize(); destination.fillWithValue(0, length, 1); destination.setSize(length); @@ -726,9 +705,9 @@ public Object invoke(@NotNull final Invocation invocation) { }); } }); - assertIndexEquals(expectedIndex, - SUT.where(PARTITIONING_COLUMN_DEFINITION.getName() + "=`D0`", - INTEGER_COLUMN_DEFINITION.getName() + ">0").getIndex()); + assertIndexEquals(expectedIndex, SUT + .where(PARTITIONING_COLUMN_DEFINITION.getName() + "=`D0`", INTEGER_COLUMN_DEFINITION.getName() + ">0") + .getIndex()); assertIsSatisfied(); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/TestPartitioningColumns.java b/DB/src/test/java/io/deephaven/db/v2/TestPartitioningColumns.java index ffba8fe3ea5..dcb7e0bb2c2 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestPartitioningColumns.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestPartitioningColumns.java @@ -34,25 +34,25 @@ public class TestPartitioningColumns { @Test public void testEverything() { final Table input = newTable( - charCol("Ch", 'C', 'A', 'B'), - byteCol("By", (byte) 1, (byte) 2, (byte) 3), - shortCol("Sh", (short) 1, (short) 2, (short) 3), - intCol("In", 1 << 20, 2 << 20, 3 << 20), - longCol("Lo", 1L << 36, 2L << 36, 3L << 36), - floatCol("Fl", 0.1f, 0.2f, 0.3f), - doubleCol("Do", 0.1, 0.2, 0.3), - dateTimeCol("DT", DBDateTime.now(), DBTimeUtils.plus(DBDateTime.now(), 1), - DBTimeUtils.plus(DBDateTime.now(), 2)), - stringCol("St", "ABC", "DEF", "GHI"), - col("Bo", Boolean.TRUE, Boolean.FALSE, Boolean.TRUE)); + charCol("Ch", 'C', 'A', 'B'), + byteCol("By", (byte) 1, (byte) 2, (byte) 3), + shortCol("Sh", (short) 1, (short) 2, (short) 3), + intCol("In", 1 << 20, 2 << 20, 3 << 20), + longCol("Lo", 1L << 36, 2L << 36, 3L << 36), + floatCol("Fl", 0.1f, 0.2f, 0.3f), + doubleCol("Do", 0.1, 0.2, 0.3), + dateTimeCol("DT", DBDateTime.now(), DBTimeUtils.plus(DBDateTime.now(), 1), + DBTimeUtils.plus(DBDateTime.now(), 2)), + stringCol("St", "ABC", "DEF", "GHI"), + col("Bo", Boolean.TRUE, Boolean.FALSE, Boolean.TRUE)); final RecordingLocationKeyFinder recordingLocationKeyFinder = - new RecordingLocationKeyFinder<>(); + new RecordingLocationKeyFinder<>(); final Map> partitions = new LinkedHashMap<>(); final String[] partitionKeys = input.getDefinition().getColumnNamesArray(); // noinspection unchecked final ColumnSource>[] partitionValueSources = - input.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + input.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); final int numColumns = partitionValueSources.length; input.getIndex().forAllLongs((final long indexKey) -> { for (int ci = 0; ci < numColumns; ++ci) { @@ -61,21 +61,19 @@ public void testEverything() { recordingLocationKeyFinder.accept(new SimpleTableLocationKey(partitions)); }); - final TableDefinition resultDefinition = - new TableDefinition(input.getDefinition().getColumnStream() + final TableDefinition resultDefinition = new TableDefinition(input.getDefinition().getColumnStream() .map(ColumnDefinition::withPartitioning).collect(Collectors.toList())); - final Table result = - new PartitionAwareSourceTable(resultDefinition, "TestPartitioningColumns", + final Table result = new PartitionAwareSourceTable(resultDefinition, "TestPartitioningColumns", RegionedTableComponentFactoryImpl.INSTANCE, new PollingTableLocationProvider<>( - StandaloneTableKey.getInstance(), - recordingLocationKeyFinder, - (tk, tlk, rs) -> { - final DummyTableLocation tl = new DummyTableLocation(tk, tlk); - tl.handleUpdate(Index.CURRENT_FACTORY.getFlatIndex(1), 1L); - return tl; - }, - null), + StandaloneTableKey.getInstance(), + recordingLocationKeyFinder, + (tk, tlk, rs) -> { + final DummyTableLocation tl = new DummyTableLocation(tk, tlk); + tl.handleUpdate(Index.CURRENT_FACTORY.getFlatIndex(1), 1L); + return tl; + }, + null), null); final Table expected = input.sort(input.getDefinition().getColumnNamesArray()); @@ -83,7 +81,7 @@ public void testEverything() { TstUtils.assertTableEquals(expected, result); final SelectFilter[] filters = input.getDefinition().getColumnStream() - .map(cd -> new MatchFilter(cd.getName(), (Object) null)).toArray(SelectFilter[]::new); + .map(cd -> new MatchFilter(cd.getName(), (Object) null)).toArray(SelectFilter[]::new); TstUtils.assertTableEquals(expected.where(filters), result.where(filters)); TstUtils.assertTableEquals(expected.selectDistinct(), result.selectDistinct()); @@ -92,7 +90,7 @@ public void testEverything() { private static final class DummyTableLocation extends AbstractTableLocation { protected DummyTableLocation(@NotNull final TableKey tableKey, - @NotNull final TableLocationKey tableLocationKey) { + @NotNull final TableLocationKey tableLocationKey) { super(tableKey, tableLocationKey, false); } @@ -124,56 +122,55 @@ public boolean exists() { @Nullable @Override - public METADATA_TYPE getMetadata( - @NotNull ColumnDefinition columnDefinition) { + public METADATA_TYPE getMetadata(@NotNull ColumnDefinition columnDefinition) { throw new UnsupportedOperationException(); } @Override public ColumnRegionChar makeColumnRegionChar( - @NotNull ColumnDefinition columnDefinition) { + @NotNull ColumnDefinition columnDefinition) { throw new UnsupportedOperationException(); } @Override public ColumnRegionByte makeColumnRegionByte( - @NotNull ColumnDefinition columnDefinition) { + @NotNull ColumnDefinition columnDefinition) { throw new UnsupportedOperationException(); } @Override public ColumnRegionShort makeColumnRegionShort( - @NotNull ColumnDefinition columnDefinition) { + @NotNull ColumnDefinition columnDefinition) { throw new UnsupportedOperationException(); } @Override public ColumnRegionInt makeColumnRegionInt( - @NotNull ColumnDefinition columnDefinition) { + @NotNull ColumnDefinition columnDefinition) { throw new UnsupportedOperationException(); } @Override public ColumnRegionLong makeColumnRegionLong( - @NotNull ColumnDefinition columnDefinition) { + @NotNull ColumnDefinition columnDefinition) { throw new UnsupportedOperationException(); } @Override public ColumnRegionFloat makeColumnRegionFloat( - @NotNull ColumnDefinition columnDefinition) { + @NotNull ColumnDefinition columnDefinition) { throw new UnsupportedOperationException(); } @Override public ColumnRegionDouble makeColumnRegionDouble( - @NotNull ColumnDefinition columnDefinition) { + @NotNull ColumnDefinition columnDefinition) { throw new UnsupportedOperationException(); } @Override public ColumnRegionObject makeColumnRegionObject( - @NotNull ColumnDefinition columnDefinition) { + @NotNull ColumnDefinition columnDefinition) { throw new UnsupportedOperationException(); } diff --git a/DB/src/test/java/io/deephaven/db/v2/TestReverseLookupListener.java b/DB/src/test/java/io/deephaven/db/v2/TestReverseLookupListener.java index 1084b21e006..da6c88f8068 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestReverseLookupListener.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestReverseLookupListener.java @@ -17,13 +17,13 @@ public class TestReverseLookupListener extends LiveTableTestCase { public void testSimple() { final BaseTable source = TstUtils.testRefreshingTable( - i(2, 4, 6, 8), - TstUtils.c("Sentinel", "A", "B", "C", "D"), - TstUtils.c("Sentinel2", "H", "I", "J", "K")); + i(2, 4, 6, 8), + TstUtils.c("Sentinel", "A", "B", "C", "D"), + TstUtils.c("Sentinel2", "H", "I", "J", "K")); io.deephaven.db.tables.utils.TableTools.show(source); final ReverseLookupListener reverseLookupListener = - ReverseLookupListener.makeReverseLookupListenerWithSnapshot(source, "Sentinel"); + ReverseLookupListener.makeReverseLookupListenerWithSnapshot(source, "Sentinel"); assertEquals(2, reverseLookupListener.get("A")); assertEquals(4, reverseLookupListener.get("B")); @@ -33,8 +33,7 @@ public void testSimple() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index keysToModify = Index.FACTORY.getIndexByValues(4); - TstUtils.addToTable(source, keysToModify, TstUtils.c("Sentinel", "E"), - TstUtils.c("Sentinel2", "L")); + TstUtils.addToTable(source, keysToModify, TstUtils.c("Sentinel", "E"), TstUtils.c("Sentinel2", "L")); source.notifyListeners(i(), i(), keysToModify); }); @@ -47,7 +46,7 @@ public void testSimple() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final Index keysToSwap = Index.FACTORY.getIndexByValues(4, 6); TstUtils.addToTable(source, keysToSwap, TstUtils.c("Sentinel", "C", "E"), - TstUtils.c("Sentinel2", "M", "N")); + TstUtils.c("Sentinel2", "M", "N")); source.notifyListeners(i(), i(), keysToSwap); }); @@ -67,8 +66,7 @@ private static class ReverseLookupEvalNugget implements EvalNuggetInterface { ReverseLookupEvalNugget(DynamicTable source, String... columns) { listener = ReverseLookupListener.makeReverseLookupListenerWithLock(source, columns); - this.columnSources = - Arrays.stream(columns).map(source::getColumnSource).toArray(ColumnSource[]::new); + this.columnSources = Arrays.stream(columns).map(source::getColumnSource).toArray(ColumnSource[]::new); this.source = source; } @@ -88,14 +86,12 @@ public void validate(String msg) { final Object expectedKey = TableTools.getKey(columnSources, row); final long checkRow = listener.get(expectedKey); if (row != checkRow) { - TestCase.fail("invalid row for " + expectedKey + " expected=" + row - + ", actual=" + checkRow); + TestCase.fail("invalid row for " + expectedKey + " expected=" + row + ", actual=" + checkRow); } currentMap.put(expectedKey, row); } - for (final Index.Iterator it = source.getIndex().getPrevIndex().iterator(); it - .hasNext();) { + for (final Index.Iterator it = source.getIndex().getPrevIndex().iterator(); it.hasNext();) { final long row = it.nextLong(); final Object expectedKey = TableTools.getPrevKey(columnSources, row); final long checkRow = listener.getPrev(expectedKey); @@ -147,17 +143,16 @@ public void testIncremental() { final TstUtils.ColumnInfo[] columnInfo; final int size = 100; - final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"C1", "C2"}, + final QueryTable table = getTable(size, random, columnInfo = initColumnInfos(new String[] {"C1", "C2"}, new TstUtils.UniqueStringGenerator(), new TstUtils.UniqueIntGenerator(1, 1000))); final EvalNuggetInterface en[] = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> new EvalNuggetInterface[] { - new ReverseLookupEvalNugget(table, "C1"), - new ReverseLookupEvalNugget(table, "C2"), - new ReverseLookupEvalNugget(table, "C1", "C2") - }); + .computeLocked(() -> new EvalNuggetInterface[] { + new ReverseLookupEvalNugget(table, "C1"), + new ReverseLookupEvalNugget(table, "C2"), + new ReverseLookupEvalNugget(table, "C1", "C2") + }); final int updateSize = (int) Math.ceil(Math.sqrt(size)); for (int step = 0; step < 100; ++step) { diff --git a/DB/src/test/java/io/deephaven/db/v2/TestSelectOverheadLimiter.java b/DB/src/test/java/io/deephaven/db/v2/TestSelectOverheadLimiter.java index cc553a42c83..8ed92f888f7 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestSelectOverheadLimiter.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestSelectOverheadLimiter.java @@ -19,11 +19,10 @@ @Category(OutOfBandTest.class) public class TestSelectOverheadLimiter extends LiveTableTestCase { public void testSelectOverheadLimiter() { - final QueryTable queryTable = - TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 100)); + final QueryTable queryTable = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 100)); final Table sentinelTable = queryTable.updateView("Sentinel=k"); final Table densified = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); + .computeLocked(() -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); assertEquals(densified.getIndex(), sentinelTable.getIndex()); assertTableEquals(sentinelTable, densified); @@ -65,11 +64,10 @@ public void testSelectOverheadLimiter() { } public void testShift() { - final QueryTable queryTable = - TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 100)); + final QueryTable queryTable = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 100)); final Table sentinelTable = queryTable.updateView("Sentinel=ii"); final Table densified = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); + .computeLocked(() -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); assertEquals(densified.getIndex(), sentinelTable.getIndex()); assertTableEquals(sentinelTable, densified); @@ -116,67 +114,51 @@ private void testByExternal(int seed) { final int size = 10; final TstUtils.ColumnInfo[] columnInfo = new TstUtils.ColumnInfo[3]; - columnInfo[0] = - new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), "Sym", + columnInfo[0] = new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), "Sym", TstUtils.ColumnInfo.ColAttributes.Immutable); columnInfo[1] = new TstUtils.ColumnInfo<>(new TstUtils.IntGenerator(10, 20), "intCol", - TstUtils.ColumnInfo.ColAttributes.Immutable); - columnInfo[2] = - new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); + TstUtils.ColumnInfo.ColAttributes.Immutable); + columnInfo[2] = new TstUtils.ColumnInfo<>(new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), "doubleCol"); final QueryTable queryTable = getTable(size, random, columnInfo); - final Table simpleTable = - TableTools.newTable(TableTools.col("Sym", "a"), TableTools.intCol("intCol", 30), + final Table simpleTable = TableTools.newTable(TableTools.col("Sym", "a"), TableTools.intCol("intCol", 30), TableTools.doubleCol("doubleCol", 40.1)).updateView("K=-2L"); - final Table source = LiveTableMonitor.DEFAULT.sharedLock().computeLocked( - () -> TableTools.merge(simpleTable, queryTable.updateView("K=k")).flatten()); + final Table source = LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> TableTools.merge(simpleTable, queryTable.updateView("K=k")).flatten()); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { - EvalNugget.Sorted - .from( - () -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.byExternal("Sym").merge(), 2.0)), - "Sym"), EvalNugget.Sorted.from( - () -> LiveTableMonitor.DEFAULT.sharedLock() + () -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.byExternal("Sym").merge(), 2.0)), + "Sym"), + EvalNugget.Sorted.from(() -> LiveTableMonitor.DEFAULT.sharedLock() .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.byExternal("Sym").merge(), 2.0).select()), - "Sym"), - EvalNugget.Sorted - .from( - () -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.byExternal("Sym").merge(), 4.0)), + .clampSelectOverhead(source.byExternal("Sym").merge(), 2.0).select()), "Sym"), - EvalNugget.Sorted - .from( - () -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.byExternal("Sym").merge(), 4.5)), + EvalNugget.Sorted.from( + () -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.byExternal("Sym").merge(), 4.0)), "Sym"), EvalNugget.Sorted.from( - () -> LiveTableMonitor.DEFAULT.sharedLock() + () -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.byExternal("Sym").merge(), 4.5)), + "Sym"), + EvalNugget.Sorted.from(() -> LiveTableMonitor.DEFAULT.sharedLock() .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.byExternal("Sym").merge(), 4.5).select()), - "Sym"), - EvalNugget.Sorted - .from( - () -> LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.byExternal("Sym").merge(), 5.0)), + .clampSelectOverhead(source.byExternal("Sym").merge(), 4.5).select()), "Sym"), EvalNugget.Sorted.from( - () -> LiveTableMonitor.DEFAULT - .sharedLock() + () -> LiveTableMonitor.DEFAULT.sharedLock().computeLocked( + () -> SelectOverheadLimiter.clampSelectOverhead(source.byExternal("Sym").merge(), 5.0)), + "Sym"), + EvalNugget.Sorted.from(() -> LiveTableMonitor.DEFAULT.sharedLock() .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.byExternal("Sym").merge(), 10.0).select()), - "Sym"), - EvalNugget.Sorted.from( - () -> LiveTableMonitor.DEFAULT.sharedLock() + .clampSelectOverhead(source.byExternal("Sym").merge(), 10.0).select()), + "Sym"), + EvalNugget.Sorted.from(() -> LiveTableMonitor.DEFAULT.sharedLock() .computeLocked(() -> SelectOverheadLimiter - .clampSelectOverhead(source.byExternal("Sym").merge(), 10.0).select()), - "Sym"), + .clampSelectOverhead(source.byExternal("Sym").merge(), 10.0).select()), + "Sym"), }; final int steps = 10; @@ -189,14 +171,13 @@ private void testByExternal(int seed) { } public void testScope() { - final QueryTable queryTable = - TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 100)); + final QueryTable queryTable = TstUtils.testRefreshingTable(Index.FACTORY.getIndexByRange(0, 100)); final SafeCloseable scopeCloseable = LivenessScopeStack.open(); final Table sentinelTable = queryTable.updateView("Sentinel=k"); final Table densified = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); + .computeLocked(() -> SelectOverheadLimiter.clampSelectOverhead(sentinelTable, 3.0)); assertEquals(densified.getIndex(), sentinelTable.getIndex()); assertTableEquals(sentinelTable, densified); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestSelectPreserveGrouping.java b/DB/src/test/java/io/deephaven/db/v2/TestSelectPreserveGrouping.java index 5ce34ce7028..598460a445e 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestSelectPreserveGrouping.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestSelectPreserveGrouping.java @@ -51,15 +51,13 @@ private static void cleanupPersistence(String root) { } public void testPreserveGrouping() { - final Table x = - TstUtils.testTable(TstUtils.cG("Sym", "AAPL", "AAPL", "BRK", "BRK", "TSLA", "TLSA"), + final Table x = TstUtils.testTable(TstUtils.cG("Sym", "AAPL", "AAPL", "BRK", "BRK", "TSLA", "TLSA"), intCol("Sentinel", 1, 2, 3, 4, 5, 6)); assertTrue(x.getIndex().hasGrouping(x.getColumnSource("Sym"))); assertFalse(x.getIndex().hasGrouping(x.getColumnSource("Sentinel"))); QueryScope.addParam("switchColumnValue", 1); - final Table xs = - x.select("Sym", "SentinelDoubled=Sentinel*2", "Foo=switchColumnValue", "Sentinel"); + final Table xs = x.select("Sym", "SentinelDoubled=Sentinel*2", "Foo=switchColumnValue", "Sentinel"); assertTableEquals(x, xs.view("Sym", "Sentinel")); assertTrue(xs.getIndex().hasGrouping(xs.getColumnSource("Sym"))); @@ -72,8 +70,7 @@ public void testPreserveDeferredGrouping() throws IOException { final File testDirectory = Files.createTempDirectory("DeferredGroupingTest").toFile(); final File dest = new File(testDirectory, "Table.parquet"); try { - final ColumnHolder symHolder = - TstUtils.cG("Sym", "AAPL", "AAPL", "BRK", "BRK", "TSLA", "TLSA"); + final ColumnHolder symHolder = TstUtils.cG("Sym", "AAPL", "AAPL", "BRK", "BRK", "TSLA", "TLSA"); final ColumnHolder sentinelHolder = intCol("Sentinel", 1, 2, 3, 4, 5, 6); final Map columns = new LinkedHashMap<>(); @@ -81,8 +78,8 @@ public void testPreserveDeferredGrouping() throws IOException { columns.put("Sym", TstUtils.getTreeMapColumnSource(index, symHolder)); columns.put("Sentinel", TstUtils.getTreeMapColumnSource(index, sentinelHolder)); final TableDefinition definition = TableDefinition.of( - ColumnDefinition.ofString("Sym").withGrouping(), - ColumnDefinition.ofInt("Sentinel")); + ColumnDefinition.ofString("Sym").withGrouping(), + ColumnDefinition.ofInt("Sentinel")); final Table x = new QueryTable(definition, index, columns); assertTrue(x.getDefinition().getColumn("Sym").isGrouping()); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestSimpleSourceTable.java b/DB/src/test/java/io/deephaven/db/v2/TestSimpleSourceTable.java index e9dd8de3aad..1bd6821ff16 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestSimpleSourceTable.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestSimpleSourceTable.java @@ -31,20 +31,17 @@ public class TestSimpleSourceTable extends LiveTableTestCase { private static final int NUM_COLUMNS = 4; - private static final ColumnDefinition BOOLEAN_COLUMN_DEFINITION = - ColumnDefinition.ofBoolean("Active"); + private static final ColumnDefinition BOOLEAN_COLUMN_DEFINITION = ColumnDefinition.ofBoolean("Active"); private static final ColumnDefinition CHARACTER_COLUMN_DEFINITION = - ColumnDefinition.ofChar("Type").withGrouping(); - private static final ColumnDefinition INTEGER_COLUMN_DEFINITION = - ColumnDefinition.ofInt("Size"); - private static final ColumnDefinition DOUBLE_COLUMN_DEFINITION = - ColumnDefinition.ofDouble("Price"); + ColumnDefinition.ofChar("Type").withGrouping(); + private static final ColumnDefinition INTEGER_COLUMN_DEFINITION = ColumnDefinition.ofInt("Size"); + private static final ColumnDefinition DOUBLE_COLUMN_DEFINITION = ColumnDefinition.ofDouble("Price"); private static final TableDefinition TABLE_DEFINITION = TableDefinition.of( - BOOLEAN_COLUMN_DEFINITION, - CHARACTER_COLUMN_DEFINITION, - INTEGER_COLUMN_DEFINITION, - DOUBLE_COLUMN_DEFINITION); + BOOLEAN_COLUMN_DEFINITION, + CHARACTER_COLUMN_DEFINITION, + INTEGER_COLUMN_DEFINITION, + DOUBLE_COLUMN_DEFINITION); private static final long INDEX_INCREMENT = 1000; @@ -68,8 +65,7 @@ public void setUp() throws Exception { componentFactory = mock(SourceTableComponentFactory.class); columnSourceManager = mock(ColumnSourceManager.class); columnSources = TABLE_DEFINITION.getColumnStream().map(cd -> { - final DeferredGroupingColumnSource mocked = - mock(DeferredGroupingColumnSource.class, cd.getName()); + final DeferredGroupingColumnSource mocked = mock(DeferredGroupingColumnSource.class, cd.getName()); checking(new Expectations() { { allowing(mocked).getType(); @@ -86,8 +82,7 @@ public void setUp() throws Exception { { allowing(locationProvider).getTableLocationKeys(); will(returnValue(Collections.singleton(StandaloneTableLocationKey.getInstance()))); - allowing(locationProvider) - .getTableLocation(with(StandaloneTableLocationKey.getInstance())); + allowing(locationProvider).getTableLocation(with(StandaloneTableLocationKey.getInstance())); will(returnValue(tableLocation)); allowing(tableLocation).supportsSubscriptions(); will(returnValue(true)); @@ -101,9 +96,9 @@ public void setUp() throws Exception { checking(new Expectations() { { oneOf(componentFactory).createColumnSourceManager( - with(false), - with(ColumnToCodecMappings.EMPTY), - with(equal(TABLE_DEFINITION.getColumns()))); + with(false), + with(ColumnToCodecMappings.EMPTY), + with(equal(TABLE_DEFINITION.getColumns()))); will(returnValue(columnSourceManager)); } }); @@ -122,31 +117,24 @@ public void tearDown() throws Exception { } private static ColumnDefinition[] getIncludedColumnDefs(final int... indices) { - return IntStream.of(indices).mapToObj(ci -> TABLE_DEFINITION.getColumns()[ci]) - .toArray(ColumnDefinition[]::new); + return IntStream.of(indices).mapToObj(ci -> TABLE_DEFINITION.getColumns()[ci]).toArray(ColumnDefinition[]::new); } private static String[] getIncludedColumnNames(final int... indices) { - return IntStream.of(indices).mapToObj(ci -> TABLE_DEFINITION.getColumns()[ci].getName()) - .toArray(String[]::new); + return IntStream.of(indices).mapToObj(ci -> TABLE_DEFINITION.getColumns()[ci].getName()).toArray(String[]::new); } - private static String[] getExcludedColumnNames(final TableDefinition currentDef, - final int... indices) { - final Set includedNames = - IntStream.of(indices).mapToObj(ci -> TABLE_DEFINITION.getColumns()[ci].getName()) - .collect(Collectors.toSet()); - return currentDef.getColumnStream().map(ColumnDefinition::getName) - .filter(n -> !includedNames.contains(n)).toArray(String[]::new); + private static String[] getExcludedColumnNames(final TableDefinition currentDef, final int... indices) { + final Set includedNames = IntStream.of(indices) + .mapToObj(ci -> TABLE_DEFINITION.getColumns()[ci].getName()).collect(Collectors.toSet()); + return currentDef.getColumnStream().map(ColumnDefinition::getName).filter(n -> !includedNames.contains(n)) + .toArray(String[]::new); } - private Map> getIncludedColumnsMap( - final int... indices) { + private Map> getIncludedColumnsMap(final int... indices) { return IntStream.of(indices) - .mapToObj( - ci -> new Pair<>(TABLE_DEFINITION.getColumns()[ci].getName(), columnSources[ci])) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, Assert::neverInvoked, - LinkedHashMap::new)); + .mapToObj(ci -> new Pair<>(TABLE_DEFINITION.getColumns()[ci].getName(), columnSources[ci])) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, Assert::neverInvoked, LinkedHashMap::new)); } @Test @@ -160,11 +148,11 @@ public void testInitializeException() { } private void doSingleLocationInitializeCheck(final boolean throwException, - @SuppressWarnings("SameParameterValue") final boolean coalesce) { + @SuppressWarnings("SameParameterValue") final boolean coalesce) { Assert.assertion(!(throwException && !coalesce), "!(throwException && !listen)"); final TableDataException exception = new TableDataException("test"); - final Index toAdd = Index.FACTORY.getIndexByRange(expectedIndex.lastKey() + 1, - expectedIndex.lastKey() + INDEX_INCREMENT); + final Index toAdd = + Index.FACTORY.getIndexByRange(expectedIndex.lastKey() + 1, expectedIndex.lastKey() + INDEX_INCREMENT); checking(new Expectations() { { @@ -206,8 +194,8 @@ public void testRedefinition() { } private void doTestRedefinition() { - // Note: We expect redefinition to make a new CSM, but no work until we force a coalesce by - // asking for column sources + // Note: We expect redefinition to make a new CSM, but no work until we force a coalesce by asking for column + // sources // Test 1: Drop a column // Setup the table @@ -215,14 +203,14 @@ private void doTestRedefinition() { checking(new Expectations() { { oneOf(componentFactory).createColumnSourceManager( - with(false), - with(ColumnToCodecMappings.EMPTY), - with(equal(getIncludedColumnDefs(includedColumnIndices1)))); + with(false), + with(ColumnToCodecMappings.EMPTY), + with(equal(getIncludedColumnDefs(includedColumnIndices1)))); will(returnValue(columnSourceManager)); } }); final Table dropColumnsResult1 = - SUT.dropColumns(getExcludedColumnNames(SUT.getDefinition(), includedColumnIndices1)); + SUT.dropColumns(getExcludedColumnNames(SUT.getDefinition(), includedColumnIndices1)); assertIsSatisfied(); assertTrue(dropColumnsResult1 instanceof SimpleSourceTable); // Force a coalesce and make sure it has the right columns @@ -248,14 +236,14 @@ private void doTestRedefinition() { checking(new Expectations() { { oneOf(componentFactory).createColumnSourceManager( - with(false), - with(ColumnToCodecMappings.EMPTY), - with(equal(getIncludedColumnDefs(includedColumnIndices2)))); + with(false), + with(ColumnToCodecMappings.EMPTY), + with(equal(getIncludedColumnDefs(includedColumnIndices2)))); will(returnValue(columnSourceManager)); } }); - final Table dropColumnsResult2 = dropColumnsResult1.dropColumns( - getExcludedColumnNames(dropColumnsResult1.getDefinition(), includedColumnIndices2)); + final Table dropColumnsResult2 = dropColumnsResult1 + .dropColumns(getExcludedColumnNames(dropColumnsResult1.getDefinition(), includedColumnIndices2)); assertIsSatisfied(); assertTrue(dropColumnsResult2 instanceof SimpleSourceTable); // Force a coalesce and make sure it has the right columns @@ -276,8 +264,7 @@ private void doTestRedefinition() { // Test 3: Rename a column // Nothing to setup for the table - the rename is deferred - final Table renameColumnsResult1 = - dropColumnsResult2.renameColumns("A=" + INTEGER_COLUMN_DEFINITION.getName()); + final Table renameColumnsResult1 = dropColumnsResult2.renameColumns("A=" + INTEGER_COLUMN_DEFINITION.getName()); assertIsSatisfied(); assertTrue(renameColumnsResult1 instanceof DeferredViewTable); // This will not force a coalesce, as dropColumnsResult2 is already coalesced. @@ -292,14 +279,13 @@ private void doTestRedefinition() { checking(new Expectations() { { oneOf(componentFactory).createColumnSourceManager( - with(false), - with(ColumnToCodecMappings.EMPTY), - with(equal(getIncludedColumnDefs(includedColumnIndices3)))); + with(false), + with(ColumnToCodecMappings.EMPTY), + with(equal(getIncludedColumnDefs(includedColumnIndices3)))); will(returnValue(columnSourceManager)); } }); - final Table viewResult1 = - dropColumnsResult2.view(getIncludedColumnNames(includedColumnIndices3)); + final Table viewResult1 = dropColumnsResult2.view(getIncludedColumnNames(includedColumnIndices3)); assertIsSatisfied(); assertTrue(viewResult1 instanceof SimpleSourceTable); // Force a coalesce and make sure it has the right columns @@ -319,8 +305,8 @@ private void doTestRedefinition() { // Test 5: Add a new derived column on // Setup the table - final Table viewResult2 = viewResult1.updateView("SizeSquared=" - + INTEGER_COLUMN_DEFINITION.getName() + '*' + INTEGER_COLUMN_DEFINITION.getName()); + final Table viewResult2 = viewResult1.updateView( + "SizeSquared=" + INTEGER_COLUMN_DEFINITION.getName() + '*' + INTEGER_COLUMN_DEFINITION.getName()); assertTrue(viewResult2 instanceof DeferredViewTable); assertEquals(NUM_COLUMNS - 2, viewResult2.getColumnSources().size()); assertNotNull(viewResult2.getColumnSource(INTEGER_COLUMN_DEFINITION.getName())); diff --git a/DB/src/test/java/io/deephaven/db/v2/TestSort.java b/DB/src/test/java/io/deephaven/db/v2/TestSort.java index 5abef7e825a..73c484fe5bd 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestSort.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestSort.java @@ -30,7 +30,7 @@ public class TestSort extends BaseArrayTestCase { private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = Configuration.getInstance() - .getBooleanForClassWithDefault(TestSort.class, "CompilerTools.logEnabled", false); + .getBooleanForClassWithDefault(TestSort.class, "CompilerTools.logEnabled", false); private boolean lastMemoize = false; private boolean oldCompilerToolsLogEnabled; @@ -57,14 +57,13 @@ interface ThrowingConsumer { void consume(A val) throws T; } - private void assertException(QueryTable t, - ThrowingConsumer r, String failMessage, Class excType) { + private void assertException(QueryTable t, ThrowingConsumer r, + String failMessage, Class excType) { try { r.consume(t); fail(failMessage); } catch (Exception e) { - assertTrue(e.getClass().toString() + " is not a " + excType.toString(), - excType.isInstance(e)); + assertTrue(e.getClass().toString() + " is not a " + excType.toString(), excType.isInstance(e)); } } @@ -98,29 +97,28 @@ public void testRestrictedSortingwhere() { source.restrictSortTo("Column1", "Column3"); assertException(source, (t) -> t.assertSortable(t.getDefinition().getColumnNamesArray()), - "Columns 1 and 3 should not be sortable.", NotSortableException.class); + "Columns 1 and 3 should not be sortable.", NotSortableException.class); source.assertSortable("Column1", "Column3"); QueryTable temp = (QueryTable) source.sort("Column3"); - assertException(temp, (t) -> t.sort("Column2"), "Column2 should not be sortable", - NotSortableException.class); - assertException(source, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); + assertException(temp, (t) -> t.sort("Column2"), "Column2 should not be sortable", NotSortableException.class); + assertException(source, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); // Check where() temp = (QueryTable) source.where("Column2 > 24"); temp.sort("Column3"); temp.sort("Column1"); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); temp.clearSortingRestrictions(); temp.assertSortable(temp.getDefinition().getColumnNamesArray()); temp.sort(temp.getDefinition().getColumnNamesArray()); assertException(source, (t) -> t.assertSortable(t.getDefinition().getColumnNamesArray()), - "Columns 1 and 3 should not be sortable.", NotSortableException.class); + "Columns 1 and 3 should not be sortable.", NotSortableException.class); temp = (QueryTable) temp.clearSortingRestrictions(); temp.assertSortable(temp.getDefinition().getColumnNamesArray()); @@ -136,10 +134,9 @@ public void testRestrictedSortingSelect() { // Check Select QueryTable temp = (QueryTable) source.select(); assertException(temp, (t) -> t.sort(t.getDefinition().getColumnNamesArray()), - "Columns 0 and 2 should not be sortable.", NotSortableException.class); + "Columns 0 and 2 should not be sortable.", NotSortableException.class); - testRestrictedSortingViewSelect( - (t, a) -> (QueryTable) (a == null || a.length <= 0 ? t.select() : t.select(a))); + testRestrictedSortingViewSelect((t, a) -> (QueryTable) (a == null || a.length <= 0 ? t.select() : t.select(a))); } public void testRestrictSortingView() { @@ -153,46 +150,45 @@ private void testRestrictedSortingViewSelect(BiFunction t.sortDescending("Column3"), - "Should not be able to sort by Column3", NoSuchColumnException.class); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column3"), "Should not be able to sort by Column3", + NoSuchColumnException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); - temp = func.apply(source, new String[] {"Column1a=Column1", "Column3=Column3+Column1", - "Column2=Column2", "Column5=Column3", "Column5=Column0"}); + temp = func.apply(source, new String[] {"Column1a=Column1", "Column3=Column3+Column1", "Column2=Column2", + "Column5=Column3", "Column5=Column0"}); temp.sort("Column1a"); - assertException(temp, (t) -> t.sortDescending("Column3"), - "Should not be able to sort by Column3", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column5"), - "Should not be able to sort by Column5", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); - - temp = func.apply(source, new String[] {"Column1a=Column1", "Column3=Column3+Column1", - "Column2=Column2", "Column5=Column3", "Column5=Column1"}); + assertException(temp, (t) -> t.sortDescending("Column3"), "Should not be able to sort by Column3", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column5"), "Should not be able to sort by Column5", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); + + temp = func.apply(source, new String[] {"Column1a=Column1", "Column3=Column3+Column1", "Column2=Column2", + "Column5=Column3", "Column5=Column1"}); temp.sort("Column1a"); temp.sort("Column5"); - assertException(temp, (t) -> t.sortDescending("Column3"), - "Should not be able to sort by Column3", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); - - temp = func.apply(source, - new String[] {"Column1a=Column1", "Column0=Column3", "Column2=Column2", - "Column5=Column3", "Column5=Column1", "Column1a=Column3+Column0"}); - assertException(temp, (t) -> t.sortDescending("Column1a"), - "Should not be able to sort by Column1a", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column3"), "Should not be able to sort by Column3", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); + + temp = func.apply(source, new String[] {"Column1a=Column1", "Column0=Column3", "Column2=Column2", + "Column5=Column3", "Column5=Column1", "Column1a=Column3+Column0"}); + assertException(temp, (t) -> t.sortDescending("Column1a"), "Should not be able to sort by Column1a", + NotSortableException.class); temp.sort("Column5"); temp.sort("Column0"); - assertException(temp, (t) -> t.sortDescending("Column3"), - "Should not be able to sort by Column3", NoSuchColumnException.class); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column3"), "Should not be able to sort by Column3", + NoSuchColumnException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); } public void testRestrictedSortingUpdate() { @@ -203,55 +199,53 @@ public void testRestrictedSortingUpdateView() { testRestrictedSortingUpdateUpdateView((t, a) -> (QueryTable) t.updateView(a)); } - private void testRestrictedSortingUpdateUpdateView( - BiFunction func) { + private void testRestrictedSortingUpdateUpdateView(BiFunction func) { final QueryTable source = generateSortTesterTable(4, 1024, new IntGenerator(1000)); // All columns should be sortable source.assertSortable(source.getDefinition().getColumnNamesArray()); source.restrictSortTo("Column1", "Column3"); - QueryTable temp = func.apply(source, new String[] {"Column1a=Column1", "Column0=Column3", - "Column2=Column2", "Column5=Column3", "Column5=Column1"}); + QueryTable temp = func.apply(source, new String[] {"Column1a=Column1", "Column0=Column3", "Column2=Column2", + "Column5=Column3", "Column5=Column1"}); temp.sort("Column1a"); temp.sort("Column5"); temp.sort("Column0"); temp.sortDescending("Column1"); temp.sortDescending("Column3"); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); - temp = func.apply(source, new String[] {"Column1a=Column1", "Column3=Column3+Column1", - "Column2=Column2", "Column5=Column3", "Column5=Column0"}); + temp = func.apply(source, new String[] {"Column1a=Column1", "Column3=Column3+Column1", "Column2=Column2", + "Column5=Column3", "Column5=Column0"}); temp.sort("Column1a"); - assertException(temp, (t) -> t.sortDescending("Column3"), - "Should not be able to sort by Column3", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column5"), - "Should not be able to sort by Column5", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); - - temp = func.apply(source, new String[] {"Column1a=Column1", "Column3=Column3+Column1", - "Column2=Column2", "Column5=Column3", "Column5=Column1"}); + assertException(temp, (t) -> t.sortDescending("Column3"), "Should not be able to sort by Column3", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column5"), "Should not be able to sort by Column5", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); + + temp = func.apply(source, new String[] {"Column1a=Column1", "Column3=Column3+Column1", "Column2=Column2", + "Column5=Column3", "Column5=Column1"}); temp.sort("Column1a"); temp.sort("Column5"); - assertException(temp, (t) -> t.sortDescending("Column3"), - "Should not be able to sort by Column3", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); - - temp = func.apply(source, - new String[] {"Column1a=Column1", "Column0=Column3", "Column2=Column2", - "Column5=Column3", "Column5=Column1", "Column1a=Column3+Column0"}); - assertException(temp, (t) -> t.sortDescending("Column1a"), - "Should not be able to sort by Column1a", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column3"), "Should not be able to sort by Column3", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); + + temp = func.apply(source, new String[] {"Column1a=Column1", "Column0=Column3", "Column2=Column2", + "Column5=Column3", "Column5=Column1", "Column1a=Column3+Column0"}); + assertException(temp, (t) -> t.sortDescending("Column1a"), "Should not be able to sort by Column1a", + NotSortableException.class); temp.sort("Column5"); temp.sort("Column0"); temp.sortDescending("Column3"); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column1a"), - "Should not be able to sort by Column1a", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column1a"), "Should not be able to sort by Column1a", + NotSortableException.class); } public void testRestrictedSortingDropColumns() { @@ -263,19 +257,19 @@ public void testRestrictedSortingDropColumns() { QueryTable temp = (QueryTable) source.dropColumns("Column3"); temp.sort("Column1"); - assertException(temp, (t) -> t.sortDescending("Column3"), - "Should not be able to sort by Column3", NoSuchColumnException.class); + assertException(temp, (t) -> t.sortDescending("Column3"), "Should not be able to sort by Column3", + NoSuchColumnException.class); temp = (QueryTable) temp.update("Column3=Column0"); temp.sort("Column1"); - assertException(temp, (t) -> t.sortDescending("Column3"), - "Should not be able to sort by Column3", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column3"), "Should not be able to sort by Column3", + NotSortableException.class); temp = (QueryTable) temp.restrictSortTo("Column3"); temp.sort("Column3"); - assertException(temp, (t) -> t.sortDescending("Column1"), - "Should not be able to sort by Column1", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column0"), - "Should not be able to sort by Column0", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column1"), "Should not be able to sort by Column1", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column0"), "Should not be able to sort by Column0", + NotSortableException.class); } public void testRestrictedSortingRenameColumns() { @@ -286,28 +280,28 @@ public void testRestrictedSortingRenameColumns() { source.restrictSortTo("Column1", "Column3"); source.sort("Column1"); source.sort("Column3"); - assertException(source, (t) -> t.sortDescending("Column0"), - "Should not be able to sort by Column0", NotSortableException.class); - assertException(source, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); + assertException(source, (t) -> t.sortDescending("Column0"), "Should not be able to sort by Column0", + NotSortableException.class); + assertException(source, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); - QueryTable temp = (QueryTable) source.renameColumns("Column0a=Column0", "Column1a=Column1", - "Column2a=Column2", "Column3a=Column3"); + QueryTable temp = (QueryTable) source.renameColumns("Column0a=Column0", "Column1a=Column1", "Column2a=Column2", + "Column3a=Column3"); temp.sort("Column1a"); temp.sort("Column3a"); - assertException(temp, (t) -> t.sortDescending("Column0a"), - "Should not be able to sort by Column0a", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column2a"), - "Should not be able to sort by Column2a", NotSortableException.class); - - assertException(temp, (t) -> t.sortDescending("Column0"), - "Should not be able to sort by Column0", NoSuchColumnException.class); - assertException(temp, (t) -> t.sortDescending("Column1"), - "Should not be able to sort by Column1", NoSuchColumnException.class); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NoSuchColumnException.class); - assertException(temp, (t) -> t.sortDescending("Column3"), - "Should not be able to sort by Column3", NoSuchColumnException.class); + assertException(temp, (t) -> t.sortDescending("Column0a"), "Should not be able to sort by Column0a", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column2a"), "Should not be able to sort by Column2a", + NotSortableException.class); + + assertException(temp, (t) -> t.sortDescending("Column0"), "Should not be able to sort by Column0", + NoSuchColumnException.class); + assertException(temp, (t) -> t.sortDescending("Column1"), "Should not be able to sort by Column1", + NoSuchColumnException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NoSuchColumnException.class); + assertException(temp, (t) -> t.sortDescending("Column3"), "Should not be able to sort by Column3", + NoSuchColumnException.class); temp = (QueryTable) temp.clearSortingRestrictions(); temp.sort("Column0a"); @@ -315,14 +309,14 @@ public void testRestrictedSortingRenameColumns() { temp.sort("Column2a"); temp.sort("Column3a"); - temp = (QueryTable) source.renameColumns("Column0=Column0", "Column1=Column1", - "Column2=Column2", "Column3=Column3"); + temp = (QueryTable) source.renameColumns("Column0=Column0", "Column1=Column1", "Column2=Column2", + "Column3=Column3"); temp.sort("Column1"); temp.sort("Column3"); - assertException(temp, (t) -> t.sortDescending("Column0"), - "Should not be able to sort by Column0", NotSortableException.class); - assertException(temp, (t) -> t.sortDescending("Column2"), - "Should not be able to sort by Column2", NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column0"), "Should not be able to sort by Column0", + NotSortableException.class); + assertException(temp, (t) -> t.sortDescending("Column2"), "Should not be able to sort by Column2", + NotSortableException.class); } private class MultiColumnSortHelper { @@ -648,8 +642,7 @@ private void sortTypeTester(int ncols, int size, DataGenerator dataGenerator) { sortTester(ncols, size, boxedData, source); } - private void sortMultiTester(int ncols, int size, boolean grouped, - StringGenerator dataGenerator) { + private void sortMultiTester(int ncols, int size, boolean grouped, StringGenerator dataGenerator) { System.out.println("Sorting table of size " + size + " with " + ncols + " columns."); ColumnHolder columnHolders[] = new ColumnHolder[ncols + 1]; @@ -660,8 +653,7 @@ private void sortMultiTester(int ncols, int size, boolean grouped, for (int jj = 0; jj < size; jj++) { data[jj] = dataGenerator.makeEntry(); } - columnHolders[ii] = - new ColumnHolder<>("Column" + ii, String.class, null, grouped, data); + columnHolders[ii] = new ColumnHolder<>("Column" + ii, String.class, null, grouped, data); boxedData[ii] = data; } @@ -681,8 +673,7 @@ private void sortTester(int ncols, int size, Comparable[][] columnData, Table so sortTester(ncols, size, columnData, source, true); } - private void sortTester(int ncols, int size, Comparable[][] columnData, Table source, - boolean isRefreshing) { + private void sortTester(int ncols, int size, Comparable[][] columnData, Table source, boolean isRefreshing) { ((QueryTable) source).setRefreshing(isRefreshing); // Now sort the table by the sentinel, which should just give us a simple ordering. @@ -727,8 +718,7 @@ private void sortTester(int ncols, int size, Comparable[][] columnData, Table so MultiColumnSortHelper multiColumnSortHelper = new MultiColumnSortHelper(columnData, ii); for (int jj = 0; jj < size; ++jj) { assertEquals(multiColumnSortHelper.getSentinel(jj), (int) colAscending.get(jj)); - assertEquals(multiColumnSortHelper.getReverseSentinel(jj), - (int) colDescending.get(jj)); + assertEquals(multiColumnSortHelper.getReverseSentinel(jj), (int) colDescending.get(jj)); } } } diff --git a/DB/src/test/java/io/deephaven/db/v2/TestStreamTableTools.java b/DB/src/test/java/io/deephaven/db/v2/TestStreamTableTools.java index 25a20355181..00059db64fe 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestStreamTableTools.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestStreamTableTools.java @@ -33,8 +33,7 @@ public void testStreamToAppendOnlyTable() { final DBDateTime dt3 = DBTimeUtils.convertDateTime("2021-08-11T11:22:00 NY"); final QueryTable streamTable = TstUtils.testRefreshingTable(i(1), intCol("I", 7), - doubleCol("D", Double.NEGATIVE_INFINITY), dateTimeCol("DT", dt1), - col("B", Boolean.TRUE)); + doubleCol("D", Double.NEGATIVE_INFINITY), dateTimeCol("DT", dt1), col("B", Boolean.TRUE)); streamTable.setAttribute(Table.STREAM_TABLE_ATTRIBUTE, true); final Table appendOnly = StreamTableTools.streamToAppendOnlyTable(streamTable); @@ -44,23 +43,23 @@ public void testStreamToAppendOnlyTable() { TestCase.assertTrue(appendOnly.isFlat()); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(streamTable, i(7), intCol("I", 1), doubleCol("D", Math.PI), - dateTimeCol("DT", dt2), col("B", true)); + TstUtils.addToTable(streamTable, i(7), intCol("I", 1), doubleCol("D", Math.PI), dateTimeCol("DT", dt2), + col("B", true)); streamTable.notifyListeners(i(7), i(), i()); }); - assertTableEquals(TableTools.newTable(intCol("I", 7, 1), - doubleCol("D", Double.NEGATIVE_INFINITY, Math.PI), dateTimeCol("DT", dt1, dt2), - col("B", true, true)), appendOnly); + assertTableEquals(TableTools.newTable(intCol("I", 7, 1), doubleCol("D", Double.NEGATIVE_INFINITY, Math.PI), + dateTimeCol("DT", dt1, dt2), col("B", true, true)), appendOnly); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(streamTable, i(7), intCol("I", 2), doubleCol("D", Math.E), - dateTimeCol("DT", dt3), col("B", false)); + TstUtils.addToTable(streamTable, i(7), intCol("I", 2), doubleCol("D", Math.E), dateTimeCol("DT", dt3), + col("B", false)); streamTable.notifyListeners(i(7), i(), i()); }); - assertTableEquals(TableTools.newTable(intCol("I", 7, 1, 2), - doubleCol("D", Double.NEGATIVE_INFINITY, Math.PI, Math.E), - dateTimeCol("DT", dt1, dt2, dt3), col("B", true, true, false)), appendOnly); + assertTableEquals( + TableTools.newTable(intCol("I", 7, 1, 2), doubleCol("D", Double.NEGATIVE_INFINITY, Math.PI, Math.E), + dateTimeCol("DT", dt1, dt2, dt3), col("B", true, true, false)), + appendOnly); } diff --git a/DB/src/test/java/io/deephaven/db/v2/TestSymbolTableCombiner.java b/DB/src/test/java/io/deephaven/db/v2/TestSymbolTableCombiner.java index 5f73dfbf44c..6b4008c234a 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestSymbolTableCombiner.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestSymbolTableCombiner.java @@ -29,19 +29,17 @@ private void testSymbolTableCombiner(int seed) { final Random random = new Random(seed); final TstUtils.ColumnInfo[] columnInfo; - final QueryTable symbolTable = getTable(size, random, columnInfo = initColumnInfos( - new String[] {SymbolTableSource.ID_COLUMN_NAME, SymbolTableSource.SYMBOL_COLUMN_NAME}, - new TstUtils.UniqueLongGenerator(1, 10000000), - new TstUtils.StringGenerator(34000))); + final QueryTable symbolTable = getTable(size, random, + columnInfo = initColumnInfos( + new String[] {SymbolTableSource.ID_COLUMN_NAME, SymbolTableSource.SYMBOL_COLUMN_NAME}, + new TstUtils.UniqueLongGenerator(1, 10000000), + new TstUtils.StringGenerator(34000))); // noinspection unchecked - final ColumnSource symbolSource = - symbolTable.getColumnSource(SymbolTableSource.SYMBOL_COLUMN_NAME); + final ColumnSource symbolSource = symbolTable.getColumnSource(SymbolTableSource.SYMBOL_COLUMN_NAME); // noinspection unchecked - final ColumnSource idSource = - symbolTable.getColumnSource(SymbolTableSource.ID_COLUMN_NAME); - final SymbolTableCombiner combiner = - new SymbolTableCombiner(new ColumnSource[] {symbolSource}, 128); + final ColumnSource idSource = symbolTable.getColumnSource(SymbolTableSource.ID_COLUMN_NAME); + final SymbolTableCombiner combiner = new SymbolTableCombiner(new ColumnSource[] {symbolSource}, 128); final IntegerSparseArraySource symbolMapper = new IntegerSparseArraySource(); combiner.addSymbols(symbolTable, symbolMapper); @@ -62,25 +60,25 @@ private void testSymbolTableCombiner(int seed) { assertEquals(expected, uniqueId); } - final ShiftAwareListener symbolTableListener = new InstrumentedShiftAwareListenerAdapter( - "SymbolTableCombiner Adapter", symbolTable, false) { - @Override - public void onUpdate(final Update upstream) { - assertIndexEquals(i(), upstream.removed); - assertIndexEquals(i(), upstream.modified); - assertTrue(upstream.shifted.empty()); - combiner.addSymbols(symbolTable, upstream.added, symbolMapper); - checkAdditions(symbolTable, symbolSource, idSource, symbolMapper, uniqueIdMap); - } - - @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { - originalException.printStackTrace(); - TestCase.fail(originalException.getMessage()); - super.onFailureInternal(originalException, sourceEntry); - } - }; + final ShiftAwareListener symbolTableListener = + new InstrumentedShiftAwareListenerAdapter("SymbolTableCombiner Adapter", symbolTable, false) { + @Override + public void onUpdate(final Update upstream) { + assertIndexEquals(i(), upstream.removed); + assertIndexEquals(i(), upstream.modified); + assertTrue(upstream.shifted.empty()); + combiner.addSymbols(symbolTable, upstream.added, symbolMapper); + checkAdditions(symbolTable, symbolSource, idSource, symbolMapper, uniqueIdMap); + } + + @Override + public void onFailureInternal(Throwable originalException, + UpdatePerformanceTracker.Entry sourceEntry) { + originalException.printStackTrace(); + TestCase.fail(originalException.getMessage()); + super.onFailureInternal(originalException, sourceEntry); + } + }; symbolTable.listenForUpdates(symbolTableListener); for (int step = 0; step < 750; step++) { @@ -88,16 +86,15 @@ public void onFailureInternal(Throwable originalException, System.out.println("Step = " + step + ", size=" + symbolTable.size()); } LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - final Index[] updates = GenerateTableUpdates.computeTableUpdates(size / 10, random, - symbolTable, columnInfo, true, false, false); + final Index[] updates = GenerateTableUpdates.computeTableUpdates(size / 10, random, symbolTable, + columnInfo, true, false, false); symbolTable.notifyListeners(updates[0], updates[1], updates[2]); }); } } private static void checkAdditions(QueryTable symbolTable, ColumnSource symbolSource, - ColumnSource idSource, IntegerSparseArraySource symbolMapper, - Map uniqueIdMap) { + ColumnSource idSource, IntegerSparseArraySource symbolMapper, Map uniqueIdMap) { for (final Index.Iterator it = symbolTable.getIndex().iterator(); it.hasNext();) { final long key = it.nextLong(); final String symbol = symbolSource.get(key); @@ -106,8 +103,8 @@ private static void checkAdditions(QueryTable symbolTable, ColumnSource final int uniqueId = symbolMapper.get(id); final Integer old = uniqueIdMap.put(symbol, uniqueId); if (old != null && old != uniqueId) { - throw new IllegalStateException("Inconsistent IDs for " + symbol + ", found " - + uniqueId + " previous value was " + old + ", row=" + key); + throw new IllegalStateException("Inconsistent IDs for " + symbol + ", found " + uniqueId + + " previous value was " + old + ", row=" + key); } } } diff --git a/DB/src/test/java/io/deephaven/db/v2/TestTableValidator.java b/DB/src/test/java/io/deephaven/db/v2/TestTableValidator.java index 67a96e589bf..b3b0231deb0 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestTableValidator.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestTableValidator.java @@ -20,10 +20,10 @@ public void testValidator() { final TstUtils.ColumnInfo[] columnInfo; final int size = 50; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); final EvalNugget[] en = new EvalNugget[] { new EvalNugget() { diff --git a/DB/src/test/java/io/deephaven/db/v2/TestTotalsTable.java b/DB/src/test/java/io/deephaven/db/v2/TestTotalsTable.java index 148dd9a71b4..02715cecd83 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestTotalsTable.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestTotalsTable.java @@ -24,7 +24,7 @@ public class TestTotalsTable extends LiveTableTestCase { private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = Configuration.getInstance() - .getBooleanForClassWithDefault(TestTotalsTable.class, "CompilerTools.logEnabled", false); + .getBooleanForClassWithDefault(TestTotalsTable.class, "CompilerTools.logEnabled", false); private boolean oldCompilerToolsLogEnabled; @@ -59,44 +59,38 @@ public void testTotalsTable() { final Random random = new Random(0); final QueryTable queryTable = getTable(size, random, - initColumnInfos( - new String[] {"Sym", "intCol", "intCol2", "doubleCol", "doubleNullCol", - "doubleCol2", "floatCol", "charCol", "byteCol", "shortCol"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.IntGenerator(1, 1000), - new TstUtils.DoubleGenerator(0, 100), - new TstUtils.DoubleGenerator(0, 100, 0.1, 0.001), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), - new TstUtils.FloatGenerator(0, 100, 0.1, 0.001), - new TstUtils.CharGenerator('a', 'z'), - new TstUtils.ByteGenerator(), - new TstUtils.ShortGenerator())); + initColumnInfos( + new String[] {"Sym", "intCol", "intCol2", "doubleCol", "doubleNullCol", "doubleCol2", + "floatCol", "charCol", "byteCol", "shortCol"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.IntGenerator(1, 1000), + new TstUtils.DoubleGenerator(0, 100), + new TstUtils.DoubleGenerator(0, 100, 0.1, 0.001), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), + new TstUtils.FloatGenerator(0, 100, 0.1, 0.001), + new TstUtils.CharGenerator('a', 'z'), + new TstUtils.ByteGenerator(), + new TstUtils.ShortGenerator())); final TotalsTableBuilder builder = new TotalsTableBuilder(); - final Table totals = LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> TotalsTableBuilder.makeTotalsTable(queryTable.setTotalsTable(builder))); + final Table totals = LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable.setTotalsTable(builder))); final Map resultColumns = totals.getColumnSourceMap(); assertEquals(1, totals.size()); - assertEquals(new LinkedHashSet<>(Arrays.asList("intCol", "intCol2", "doubleCol", - "doubleNullCol", "doubleCol2", "floatCol", "byteCol", "shortCol")), - resultColumns.keySet()); - - assertEquals( - (long) IntegerNumericPrimitives.sum((int[]) queryTable.getColumn("intCol").getDirect()), - totals.getColumn("intCol").get(0)); - assertEquals( - DoubleNumericPrimitives.sum((double[]) queryTable.getColumn("doubleCol").getDirect()), - totals.getColumn("doubleCol").get(0)); - assertEquals( - DoubleNumericPrimitives - .sum((double[]) queryTable.getColumn("doubleNullCol").getDirect()), - totals.getColumn("doubleNullCol").get(0)); - assertEquals("floatCol", - FloatNumericPrimitives.sum((float[]) queryTable.getColumn("floatCol").getDirect()), - (float) totals.getColumn("floatCol").get(0), 0.02); + assertEquals(new LinkedHashSet<>(Arrays.asList("intCol", "intCol2", "doubleCol", "doubleNullCol", "doubleCol2", + "floatCol", "byteCol", "shortCol")), resultColumns.keySet()); + + assertEquals((long) IntegerNumericPrimitives.sum((int[]) queryTable.getColumn("intCol").getDirect()), + totals.getColumn("intCol").get(0)); + assertEquals(DoubleNumericPrimitives.sum((double[]) queryTable.getColumn("doubleCol").getDirect()), + totals.getColumn("doubleCol").get(0)); + assertEquals(DoubleNumericPrimitives.sum((double[]) queryTable.getColumn("doubleNullCol").getDirect()), + totals.getColumn("doubleNullCol").get(0)); + assertEquals("floatCol", FloatNumericPrimitives.sum((float[]) queryTable.getColumn("floatCol").getDirect()), + (float) totals.getColumn("floatCol").get(0), 0.02); assertEquals(shortSum((short[]) queryTable.getColumn("shortCol").getDirect()), - totals.getColumn("shortCol").get(0)); + totals.getColumn("shortCol").get(0)); builder.setDefaultOperation("skip"); builder.setOperation("byteCol", "min"); @@ -104,15 +98,13 @@ public void testTotalsTable() { builder.setOperation("intCol2", "last"); final Table totals2 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); + .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); assertEquals(new LinkedHashSet<>(Arrays.asList("Sym", "intCol2", "byteCol")), - totals2.getColumnSourceMap().keySet()); - assertEquals( - ByteNumericPrimitives.min((byte[]) queryTable.getColumn("byteCol").getDirect()), - totals2.getColumn("byteCol").get(0)); + totals2.getColumnSourceMap().keySet()); + assertEquals(ByteNumericPrimitives.min((byte[]) queryTable.getColumn("byteCol").getDirect()), + totals2.getColumn("byteCol").get(0)); assertEquals(queryTable.getColumn("Sym").get(0), totals2.getColumn("Sym").get(0)); - assertEquals(queryTable.getColumn("intCol2").get(queryTable.size() - 1), - totals2.getColumn("intCol2").get(0)); + assertEquals(queryTable.getColumn("intCol2").get(queryTable.size() - 1), totals2.getColumn("intCol2").get(0)); builder.setOperation("byteCol", "max"); builder.setOperation("doubleCol", "var"); @@ -124,32 +116,30 @@ public void testTotalsTable() { final boolean old = QueryTable.setMemoizeResults(true); try { final Table totals3 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); + .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); assertEquals( - new LinkedHashSet<>( - Arrays.asList("Sym", "intCol2", "doubleCol", "doubleNullCol__Std", - "doubleNullCol__Count", "doubleCol2", "byteCol", "shortCol")), - totals3.getColumnSourceMap().keySet()); + new LinkedHashSet<>(Arrays.asList("Sym", "intCol2", "doubleCol", "doubleNullCol__Std", + "doubleNullCol__Count", "doubleCol2", "byteCol", "shortCol")), + totals3.getColumnSourceMap().keySet()); + assertEquals(ByteNumericPrimitives.max((byte[]) queryTable.getColumn("byteCol").getDirect()), + totals3.getColumn("byteCol").get(0)); assertEquals( - ByteNumericPrimitives.max((byte[]) queryTable.getColumn("byteCol").getDirect()), - totals3.getColumn("byteCol").get(0)); + DoubleNumericPrimitives + .var(new DbDoubleArrayDirect((double[]) queryTable.getColumn("doubleCol").getDirect())), + totals3.getColumn("doubleCol").get(0)); assertEquals( - DoubleNumericPrimitives.var(new DbDoubleArrayDirect( - (double[]) queryTable.getColumn("doubleCol").getDirect())), - totals3.getColumn("doubleCol").get(0)); - assertEquals( - DoubleNumericPrimitives.std(new DbDoubleArrayDirect( - (double[]) queryTable.getColumn("doubleNullCol").getDirect())), - totals3.getColumn("doubleNullCol__Std").get(0)); + DoubleNumericPrimitives + .std(new DbDoubleArrayDirect((double[]) queryTable.getColumn("doubleNullCol").getDirect())), + totals3.getColumn("doubleNullCol__Std").get(0)); assertEquals(queryTable.size(), totals3.getColumn("doubleNullCol__Count").get(0)); assertEquals( - DoubleNumericPrimitives.avg(new DbDoubleArrayDirect( - (double[]) queryTable.getColumn("doubleCol2").getDirect())), - totals3.getColumn("doubleCol2").get(0)); + DoubleNumericPrimitives + .avg(new DbDoubleArrayDirect((double[]) queryTable.getColumn("doubleCol2").getDirect())), + totals3.getColumn("doubleCol2").get(0)); assertEquals(queryTable.size(), (long) totals3.getColumn("shortCol").get(0)); final Table totals4 = LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); + .computeLocked(() -> TotalsTableBuilder.makeTotalsTable(queryTable, builder)); assertTrue(totals3 == totals4); } finally { QueryTable.setMemoizeResults(old); @@ -161,10 +151,8 @@ public void testTotalsTableIncremental() throws IOException { final Random random = new Random(0); final TstUtils.ColumnInfo columnInfo[]; - final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "intCol", "intCol2", "doubleCol", "doubleNullCol", - "doubleCol2", "shortCol"}, + final QueryTable queryTable = getTable(size, random, columnInfo = initColumnInfos( + new String[] {"Sym", "intCol", "intCol2", "doubleCol", "doubleNullCol", "doubleCol2", "shortCol"}, new TstUtils.SetGenerator<>("a", "b", "c", "d"), new TstUtils.IntGenerator(10, 100), new TstUtils.IntGenerator(1, 1000), @@ -178,22 +166,20 @@ public void testTotalsTableIncremental() throws IOException { public Table e() { final TotalsTableBuilder totalsTableBuilder = new TotalsTableBuilder(); return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> queryTable.setTotalsTable(totalsTableBuilder)); + .computeLocked(() -> queryTable.setTotalsTable(totalsTableBuilder)); } }, new EvalNugget() { public Table e() { final TotalsTableBuilder totalsTableBuilder = new TotalsTableBuilder(); - return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> TotalsTableBuilder + return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> TotalsTableBuilder .makeTotalsTable(queryTable.setTotalsTable(totalsTableBuilder))); } }, new EvalNugget() { public Table e() { final TotalsTableBuilder totalsTableBuilder = new TotalsTableBuilder(); - return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> TotalsTableBuilder + return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(() -> TotalsTableBuilder .makeTotalsTable(queryTable.setTotalsTable(totalsTableBuilder))); } }, diff --git a/DB/src/test/java/io/deephaven/db/v2/TestUngroupRebase.java b/DB/src/test/java/io/deephaven/db/v2/TestUngroupRebase.java index fe2e6a48bad..a187716c7ec 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TestUngroupRebase.java +++ b/DB/src/test/java/io/deephaven/db/v2/TestUngroupRebase.java @@ -22,16 +22,13 @@ public void testUngroupAgnosticRebase() throws IOException { int size = 9; Random random = new Random(0); - ColumnHolder arrayColumnHolder = - TstUtils.c("Y", new int[] {10, 20}, new int[] {110, 120, 130}); - final QueryTable table = - TstUtils.testRefreshingTable(TstUtils.c("X", 1, 3), arrayColumnHolder); + ColumnHolder arrayColumnHolder = TstUtils.c("Y", new int[] {10, 20}, new int[] {110, 120, 130}); + final QueryTable table = TstUtils.testRefreshingTable(TstUtils.c("X", 1, 3), arrayColumnHolder); EvalNugget en[] = new EvalNugget[] { new EvalNugget() { public Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(table::ungroup); + return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(table::ungroup); } }, }; @@ -70,16 +67,14 @@ public Table e() { // Time to start fresh again, so we can do an addition operation, // without having such a high base for the table. - arrayColumnHolder = TstUtils.c("Y", new int[] {10, 20}, new int[] {200}, - new int[] {110, 120, 130}, new int[] {310}); - final QueryTable table2 = - TstUtils.testRefreshingTable(TstUtils.c("X", 1, 2, 3, 4), arrayColumnHolder); + arrayColumnHolder = + TstUtils.c("Y", new int[] {10, 20}, new int[] {200}, new int[] {110, 120, 130}, new int[] {310}); + final QueryTable table2 = TstUtils.testRefreshingTable(TstUtils.c("X", 1, 2, 3, 4), arrayColumnHolder); en = new EvalNugget[] { new EvalNugget() { public Table e() { - return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(table2::ungroup); + return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked(table2::ungroup); } }, }; diff --git a/DB/src/test/java/io/deephaven/db/v2/TickSuppressorTest.java b/DB/src/test/java/io/deephaven/db/v2/TickSuppressorTest.java index ba14372452e..87071ff8508 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TickSuppressorTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/TickSuppressorTest.java @@ -24,23 +24,20 @@ public void testModifyToAddRemoves() { final ColumnInfo[] columnInfo; final int size = 50; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new SetGenerator<>("a", "b", "c", "d", "e"), - new IntGenerator(10, 100), - new SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new SetGenerator<>("a", "b", "c", "d", "e"), + new IntGenerator(10, 100), + new SetGenerator<>(10.1, 20.1, 30.1))); final Table sortedTable = queryTable.sort("intCol"); final EvalNugget[] en = new EvalNugget[] { - EvalNugget - .from(() -> TickSuppressor.convertModificationsToAddsAndRemoves(queryTable)), - EvalNugget - .from(() -> TickSuppressor.convertModificationsToAddsAndRemoves(sortedTable)), - EvalNugget.from(() -> TickSuppressor.convertModificationsToAddsAndRemoves( - TableTools.merge(queryTable, sortedTable))), + EvalNugget.from(() -> TickSuppressor.convertModificationsToAddsAndRemoves(queryTable)), + EvalNugget.from(() -> TickSuppressor.convertModificationsToAddsAndRemoves(sortedTable)), + EvalNugget.from(() -> TickSuppressor + .convertModificationsToAddsAndRemoves(TableTools.merge(queryTable, sortedTable))), EvalNugget.from(() -> TickSuppressor.convertModificationsToAddsAndRemoves( - queryTable.naturalJoin(queryTable.lastBy("Sym"), "Sym", - "intCol2=intCol,doubleCol2=doubleCol"))) + queryTable.naturalJoin(queryTable.lastBy("Sym"), "Sym", "intCol2=intCol,doubleCol2=doubleCol"))) }; for (int i = 0; i < 50; i++) { @@ -65,64 +62,49 @@ private void testRemoveSpuriousModificationsIterative(int seed, int size, int ma final ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, - columnInfo = - initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "boolCol", "boolCol2"}, - new SetGenerator<>("a", "b"), - new IntGenerator(0, 5), - new SetGenerator<>(10.1, 20.1, 30.1), - new BooleanGenerator(0.5, 0.0), - new BooleanGenerator(0.95, 0.0))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "boolCol", "boolCol2"}, + new SetGenerator<>("a", "b"), + new IntGenerator(0, 5), + new SetGenerator<>(10.1, 20.1, 30.1), + new BooleanGenerator(0.5, 0.0), + new BooleanGenerator(0.95, 0.0))); final Table sortedTable = queryTable.sort("intCol"); final EvalNugget[] en = new EvalNugget[] { EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(queryTable)), - EvalNugget.from( - () -> TickSuppressor.removeSpuriousModifications(queryTable.view("boolCol"))), - EvalNugget.from( - () -> TickSuppressor.removeSpuriousModifications(queryTable.view("boolCol2"))), - EvalNugget.from(() -> TickSuppressor - .removeSpuriousModifications(queryTable.view("boolCol", "boolCol2"))), + EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(queryTable.view("boolCol"))), + EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(queryTable.view("boolCol2"))), + EvalNugget + .from(() -> TickSuppressor.removeSpuriousModifications(queryTable.view("boolCol", "boolCol2"))), EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(sortedTable)), + EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(sortedTable.view("boolCol"))), + EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(sortedTable.view("boolCol2"))), EvalNugget.from( - () -> TickSuppressor.removeSpuriousModifications(sortedTable.view("boolCol"))), + () -> TickSuppressor.removeSpuriousModifications(sortedTable.view("boolCol", "boolCol2"))), EvalNugget.from( - () -> TickSuppressor.removeSpuriousModifications(sortedTable.view("boolCol2"))), - EvalNugget.from(() -> TickSuppressor - .removeSpuriousModifications(sortedTable.view("boolCol", "boolCol2"))), - EvalNugget.from(() -> TickSuppressor - .removeSpuriousModifications(TableTools.merge(queryTable, sortedTable))), - EvalNugget - .from(() -> TickSuppressor.removeSpuriousModifications(queryTable.naturalJoin( - queryTable.lastBy("Sym"), "Sym", "intCol2=intCol,doubleCol2=doubleCol"))), - EvalNugget - .from(() -> queryTable.naturalJoin( - TickSuppressor.removeSpuriousModifications( - queryTable.view("Sym", "boolCol").lastBy("Sym")), + () -> TickSuppressor.removeSpuriousModifications(TableTools.merge(queryTable, sortedTable))), + EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(queryTable + .naturalJoin(queryTable.lastBy("Sym"), "Sym", "intCol2=intCol,doubleCol2=doubleCol"))), + EvalNugget.from(() -> queryTable.naturalJoin( + TickSuppressor.removeSpuriousModifications(queryTable.view("Sym", "boolCol").lastBy("Sym")), "Sym", "jbc=boolCol")), - EvalNugget - .from(() -> queryTable.naturalJoin( - TickSuppressor.removeSpuriousModifications( - queryTable.view("Sym", "boolCol2").lastBy("Sym")), + EvalNugget.from(() -> queryTable.naturalJoin( + TickSuppressor.removeSpuriousModifications(queryTable.view("Sym", "boolCol2").lastBy("Sym")), "Sym", "jbc2=boolCol2")), EvalNugget.from(() -> queryTable.naturalJoin( - TickSuppressor.removeSpuriousModifications( - queryTable.view("Sym", "boolCol", "boolCol2").lastBy("Sym")), - "Sym", "jbc=boolCol,jbc2=boolCol2")), - EvalNugget - .from(() -> TickSuppressor.removeSpuriousModifications(queryTable.naturalJoin( TickSuppressor.removeSpuriousModifications( - queryTable.view("Sym", "boolCol").lastBy("Sym")), + queryTable.view("Sym", "boolCol", "boolCol2").lastBy("Sym")), + "Sym", "jbc=boolCol,jbc2=boolCol2")), + EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(queryTable.naturalJoin( + TickSuppressor.removeSpuriousModifications(queryTable.view("Sym", "boolCol").lastBy("Sym")), "Sym", "jbc=boolCol"))), - EvalNugget - .from(() -> TickSuppressor.removeSpuriousModifications(queryTable.naturalJoin( - TickSuppressor.removeSpuriousModifications( - queryTable.view("Sym", "boolCol2").lastBy("Sym")), + EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(queryTable.naturalJoin( + TickSuppressor.removeSpuriousModifications(queryTable.view("Sym", "boolCol2").lastBy("Sym")), "Sym", "jbc2=boolCol2"))), - EvalNugget - .from(() -> TickSuppressor.removeSpuriousModifications(queryTable.naturalJoin( + EvalNugget.from(() -> TickSuppressor.removeSpuriousModifications(queryTable.naturalJoin( TickSuppressor.removeSpuriousModifications( - queryTable.view("Sym", "boolCol", "boolCol2").lastBy("Sym")), + queryTable.view("Sym", "boolCol", "boolCol2").lastBy("Sym")), "Sym", "jbc=boolCol,jbc2=boolCol2"))) }; @@ -136,19 +118,17 @@ private void testRemoveSpuriousModificationsIterative(int seed, int size, int ma public void testRemoveSpuriousModifications() { final QueryTable input = TstUtils.testRefreshingTable(i(5, 10, 15), - intCol("SentinelA", 5, 10, 15), - intCol("SentinelB", 20, 30, 40)); + intCol("SentinelA", 5, 10, 15), + intCol("SentinelB", 20, 30, 40)); - final DynamicTable suppressed = - (DynamicTable) TickSuppressor.removeSpuriousModifications(input); + final DynamicTable suppressed = (DynamicTable) TickSuppressor.removeSpuriousModifications(input); final SimpleShiftAwareListener listener = new SimpleShiftAwareListener(suppressed); suppressed.listenForUpdates(listener); assertEquals(0, listener.getCount()); - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> input.notifyListeners(i(), i(), i(5))); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> input.notifyListeners(i(), i(), i(5))); assertEquals(0, listener.getCount()); @@ -162,10 +142,8 @@ public void testRemoveSpuriousModifications() { assertEquals(i(5), listener.update.modified); assertEquals(i(), listener.update.removed); assertEquals(IndexShiftData.EMPTY, listener.update.shifted); - assertFalse(listener.update.modifiedColumnSet - .containsAny(suppressed.newModifiedColumnSet("SentinelA"))); - assertTrue(listener.update.modifiedColumnSet - .containsAny(suppressed.newModifiedColumnSet("SentinelB"))); + assertFalse(listener.update.modifiedColumnSet.containsAny(suppressed.newModifiedColumnSet("SentinelA"))); + assertTrue(listener.update.modifiedColumnSet.containsAny(suppressed.newModifiedColumnSet("SentinelB"))); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(input, i(10, 15), intCol("SentinelA", 12, 15), intCol("SentinelB", 30, 40)); @@ -178,10 +156,8 @@ public void testRemoveSpuriousModifications() { assertEquals(i(10), listener.update.modified); assertEquals(i(5), listener.update.removed); assertEquals(IndexShiftData.EMPTY, listener.update.shifted); - assertTrue(listener.update.modifiedColumnSet - .containsAny(suppressed.newModifiedColumnSet("SentinelA"))); - assertFalse(listener.update.modifiedColumnSet - .containsAny(suppressed.newModifiedColumnSet("SentinelB"))); + assertTrue(listener.update.modifiedColumnSet.containsAny(suppressed.newModifiedColumnSet("SentinelA"))); + assertFalse(listener.update.modifiedColumnSet.containsAny(suppressed.newModifiedColumnSet("SentinelB"))); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(input, i(20), intCol("SentinelA", 20), intCol("SentinelB", 50)); @@ -193,9 +169,7 @@ public void testRemoveSpuriousModifications() { assertEquals(i(), listener.update.modified); assertEquals(i(), listener.update.removed); assertEquals(IndexShiftData.EMPTY, listener.update.shifted); - assertFalse(listener.update.modifiedColumnSet - .containsAny(suppressed.newModifiedColumnSet("SentinelA"))); - assertFalse(listener.update.modifiedColumnSet - .containsAny(suppressed.newModifiedColumnSet("SentinelB"))); + assertFalse(listener.update.modifiedColumnSet.containsAny(suppressed.newModifiedColumnSet("SentinelA"))); + assertFalse(listener.update.modifiedColumnSet.containsAny(suppressed.newModifiedColumnSet("SentinelB"))); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/TstUtils.java b/DB/src/test/java/io/deephaven/db/v2/TstUtils.java index 34743bb2170..82736f3192f 100644 --- a/DB/src/test/java/io/deephaven/db/v2/TstUtils.java +++ b/DB/src/test/java/io/deephaven/db/v2/TstUtils.java @@ -50,8 +50,7 @@ public static Index ir(final long firstKey, final long lastKey) { return Index.FACTORY.getIndexByRange(firstKey, lastKey); } - public static void addToTable(final Table table, final Index index, - final ColumnHolder... columnHolders) { + public static void addToTable(final Table table, final Index index, final ColumnHolder... columnHolders) { Require.requirement(table.isLive(), "table.isLive()"); if (table instanceof DynamicTable) { Require.requirement(((DynamicTable) table).isRefreshing(), "table.isRefreshing()"); @@ -65,22 +64,19 @@ public static void addToTable(final Table table, final Index index, final Object[] boxedArray = ArrayUtils.getBoxedArray(columnHolder.data); final Index colIndex = (boxedArray.length == 0) ? TstUtils.i() : index; if (colIndex.size() != boxedArray.length) { - throw new IllegalArgumentException( - columnHolder.name + ": Invalid data addition: index=" + colIndex.size() - + ", boxedArray=" + boxedArray.length); + throw new IllegalArgumentException(columnHolder.name + ": Invalid data addition: index=" + + colIndex.size() + ", boxedArray=" + boxedArray.length); } if (colIndex.size() == 0) { continue; } - if (columnSource instanceof DateTimeTreeMapSource - && columnHolder.dataType == long.class) { + if (columnSource instanceof DateTimeTreeMapSource && columnHolder.dataType == long.class) { final DateTimeTreeMapSource treeMapSource = (DateTimeTreeMapSource) columnSource; treeMapSource.add(colIndex, (Long[]) boxedArray); } else if (columnSource.getType() != columnHolder.dataType) { - throw new UnsupportedOperationException( - columnHolder.name + ": Adding invalid type: source.getType()=" + throw new UnsupportedOperationException(columnHolder.name + ": Adding invalid type: source.getType()=" + columnSource.getType() + ", columnHolder=" + columnHolder.dataType); } @@ -102,8 +98,8 @@ public static void addToTable(final Table table, final Index index, table.getIndex().insert(index); if (table.isFlat()) { - Assert.assertion(table.getIndex().isFlat(), "table.getIndex().isFlat()", - table.getIndex(), "table.getIndex()", index, "index"); + Assert.assertion(table.getIndex().isFlat(), "table.getIndex().isFlat()", table.getIndex(), + "table.getIndex()", index, "index"); } } @@ -114,8 +110,8 @@ public static void removeRows(Table table, Index index) { } table.getIndex().remove(index); if (table.isFlat()) { - Assert.assertion(table.getIndex().isFlat(), "table.getIndex().isFlat()", - table.getIndex(), "table.getIndex()", index, "index"); + Assert.assertion(table.getIndex().isFlat(), "table.getIndex().isFlat()", table.getIndex(), + "table.getIndex()", index, "index"); } for (ColumnSource columnSource : table.getColumnSources()) { if (columnSource instanceof TreeMapSource) { @@ -138,8 +134,7 @@ public static ColumnHolder getRandomStringCol(String colName, int size, Random r return c(colName, data); } - public static ColumnHolder getRandomStringArrayCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomStringArrayCol(String colName, int size, Random random, int maxSz) { final String data[][] = new String[size][]; for (int i = 0; i < data.length; i++) { final String[] v = new String[random.nextInt(maxSz)]; @@ -151,8 +146,7 @@ public static ColumnHolder getRandomStringArrayCol(String colName, int size, Ran return c(colName, data); } - public static ColumnHolder getRandomStringSetCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomStringSetCol(String colName, int size, Random random, int maxSz) { final StringSet data[] = new StringSet[size]; for (int i = 0; i < data.length; i++) { final String[] v = new String[random.nextInt(maxSz)]; @@ -238,8 +232,7 @@ public static ColumnHolder getRandomByteCol(String colName, int size, Random ran return new ColumnHolder(colName, false, data); } - public static ColumnHolder getRandomByteArrayCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomByteArrayCol(String colName, int size, Random random, int maxSz) { final byte data[][] = new byte[size][]; for (int i = 0; i < size; i++) { final byte[] b = new byte[random.nextInt(maxSz)]; @@ -249,8 +242,7 @@ public static ColumnHolder getRandomByteArrayCol(String colName, int size, Rando return c(colName, data); } - public static ColumnHolder getRandomBooleanArrayCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomBooleanArrayCol(String colName, int size, Random random, int maxSz) { final Boolean data[][] = new Boolean[size][]; for (int i = 0; i < size; i++) { final Boolean[] v = new Boolean[random.nextInt(maxSz)]; @@ -262,8 +254,7 @@ public static ColumnHolder getRandomBooleanArrayCol(String colName, int size, Ra return c(colName, data); } - public static ColumnHolder getRandomIntArrayCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomIntArrayCol(String colName, int size, Random random, int maxSz) { final int data[][] = new int[size][]; for (int i = 0; i < size; i++) { final int[] v = new int[random.nextInt(maxSz)]; @@ -275,8 +266,7 @@ public static ColumnHolder getRandomIntArrayCol(String colName, int size, Random return c(colName, data); } - public static ColumnHolder getRandomLongArrayCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomLongArrayCol(String colName, int size, Random random, int maxSz) { final long data[][] = new long[size][]; for (int i = 0; i < size; i++) { final long[] v = new long[random.nextInt(maxSz)]; @@ -288,8 +278,7 @@ public static ColumnHolder getRandomLongArrayCol(String colName, int size, Rando return c(colName, data); } - public static ColumnHolder getRandomShortArrayCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomShortArrayCol(String colName, int size, Random random, int maxSz) { final short data[][] = new short[size][]; for (int i = 0; i < size; i++) { final short[] v = new short[random.nextInt(maxSz)]; @@ -301,8 +290,7 @@ public static ColumnHolder getRandomShortArrayCol(String colName, int size, Rand return c(colName, data); } - public static ColumnHolder getRandomDoubleArrayCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomDoubleArrayCol(String colName, int size, Random random, int maxSz) { final double data[][] = new double[size][]; for (int i = 0; i < size; i++) { final double[] v = new double[random.nextInt(maxSz)]; @@ -314,8 +302,7 @@ public static ColumnHolder getRandomDoubleArrayCol(String colName, int size, Ran return c(colName, data); } - public static ColumnHolder getRandomFloatArrayCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomFloatArrayCol(String colName, int size, Random random, int maxSz) { final float data[][] = new float[size][]; for (int i = 0; i < size; i++) { final float[] v = new float[random.nextInt(maxSz)]; @@ -327,8 +314,7 @@ public static ColumnHolder getRandomFloatArrayCol(String colName, int size, Rand return c(colName, data); } - public static ColumnHolder getRandomCharArrayCol(String colName, int size, Random random, - int maxSz) { + public static ColumnHolder getRandomCharArrayCol(String colName, int size, Random random, int maxSz) { final char data[][] = new char[size][]; for (int i = 0; i < size; i++) { final char[] v = new char[random.nextInt(maxSz)]; @@ -393,11 +379,10 @@ static Index getInitialIndex(int size, Random random) { } public static Index selectSubIndexSet(int size, Index sourceIndex, Random random) { - Assert.assertion(size <= sourceIndex.size(), "size <= sourceIndex.size()", size, "size", - sourceIndex, "sourceIndex.size()"); + Assert.assertion(size <= sourceIndex.size(), "size <= sourceIndex.size()", size, "size", sourceIndex, + "sourceIndex.size()"); - // generate an array that is the size of our index, then shuffle it, and those are the - // positions we'll pick + // generate an array that is the size of our index, then shuffle it, and those are the positions we'll pick final Integer[] positions = new Integer[(int) sourceIndex.size()]; for (int ii = 0; ii < positions.length; ++ii) { positions[ii] = ii; @@ -416,12 +401,12 @@ public static Index selectSubIndexSet(int size, Index sourceIndex, Random random public static Index newIndex(int targetSize, Index sourceIndex, Random random) { final long maxKey = (sourceIndex.size() == 0 ? 0 : sourceIndex.lastKey()); final long emptySlots = maxKey - sourceIndex.size(); - final int slotsToFill = Math - .min(Math.min((int) (Math.max(0.0, ((random.nextGaussian() / 0.1) + 0.9)) * emptySlots), - targetSize), (int) emptySlots); + final int slotsToFill = Math.min( + Math.min((int) (Math.max(0.0, ((random.nextGaussian() / 0.1) + 0.9)) * emptySlots), targetSize), + (int) emptySlots); - final Index fillIn = selectSubIndexSet(slotsToFill, - Index.FACTORY.getIndexByRange(0, maxKey).minus(sourceIndex), random); + final Index fillIn = + selectSubIndexSet(slotsToFill, Index.FACTORY.getIndexByRange(0, maxKey).minus(sourceIndex), random); final int endSlots = targetSize - (int) fillIn.size(); @@ -429,22 +414,21 @@ public static Index newIndex(int targetSize, Index sourceIndex, Random random) { density = density < 0.1 ? 0.1 : density; density = density > 1 ? 1 : density; final long rangeSize = (long) ((1.0 / density) * endSlots); - final Index expansion = selectSubIndexSet(endSlots, - Index.FACTORY.getIndexByRange(maxKey + 1, maxKey + rangeSize + 1), random); + final Index expansion = + selectSubIndexSet(endSlots, Index.FACTORY.getIndexByRange(maxKey + 1, maxKey + rangeSize + 1), random); fillIn.insert(expansion); - Assert.assertion(fillIn.size() == targetSize, "fillIn.size() == targetSize", fillIn.size(), - "fillIn.size()", targetSize, "targetSize", endSlots, "endSlots", slotsToFill, - "slotsToFill"); + Assert.assertion(fillIn.size() == targetSize, "fillIn.size() == targetSize", fillIn.size(), "fillIn.size()", + targetSize, "targetSize", endSlots, "endSlots", slotsToFill, "slotsToFill"); return fillIn; } public static ColumnInfo[] initColumnInfos(String names[], Generator... generators) { if (names.length != generators.length) { - throw new IllegalArgumentException("names and generator lengths mismatch: " - + names.length + " != " + generators.length); + throw new IllegalArgumentException( + "names and generator lengths mismatch: " + names.length + " != " + generators.length); } final ColumnInfo[] result = new ColumnInfo[names.length]; @@ -455,11 +439,11 @@ public static ColumnInfo[] initColumnInfos(String names[], Generator... generato return result; } - public static ColumnInfo[] initColumnInfos(String names[], - ColumnInfo.ColAttributes attributes[], Generator... generators) { + public static ColumnInfo[] initColumnInfos(String names[], ColumnInfo.ColAttributes attributes[], + Generator... generators) { if (names.length != generators.length) { - throw new IllegalArgumentException("names and generator lengths mismatch: " - + names.length + " != " + generators.length); + throw new IllegalArgumentException( + "names and generator lengths mismatch: " + names.length + " != " + generators.length); } final ColumnInfo[] result = new ColumnInfo[names.length]; @@ -470,18 +454,18 @@ public static ColumnInfo[] initColumnInfos(String names[], return result; } - public static ColumnInfo[] initColumnInfos(String names[], - List> attributes, Generator... generators) { + public static ColumnInfo[] initColumnInfos(String names[], List> attributes, + Generator... generators) { if (names.length != generators.length) { - throw new IllegalArgumentException("names and generator lengths mismatch: " - + names.length + " != " + generators.length); + throw new IllegalArgumentException( + "names and generator lengths mismatch: " + names.length + " != " + generators.length); } final ColumnInfo[] result = new ColumnInfo[names.length]; for (int ii = 0; ii < result.length; ii++) { // noinspection unchecked result[ii] = new ColumnInfo(generators[ii], names[ii], - attributes.get(ii).toArray(ColumnInfo.ZERO_LENGTH_COLUMN_ATTRIBUTES_ARRAY)); + attributes.get(ii).toArray(ColumnInfo.ZERO_LENGTH_COLUMN_ATTRIBUTES_ARRAY)); } return result; } @@ -490,8 +474,7 @@ public static QueryTable getTable(int size, Random random, ColumnInfo columnInfo return getTable(true, size, random, columnInfos); } - public static QueryTable getTable(boolean refreshing, int size, Random random, - ColumnInfo columnInfos[]) { + public static QueryTable getTable(boolean refreshing, int size, Random random, ColumnInfo columnInfos[]) { final Index index = getInitialIndex(size, random); for (ColumnInfo columnInfo : columnInfos) { columnInfo.populateMap(index, random); @@ -538,7 +521,7 @@ public static QueryTable testFlatRefreshingTable(Index index, ColumnHolder... co public static QueryTable testRefreshingTable(ColumnHolder... columnHolders) { final Index index = columnHolders.length == 0 ? Index.FACTORY.getEmptyIndex() - : Index.FACTORY.getFlatIndex(Array.getLength(columnHolders[0].data)); + : Index.FACTORY.getFlatIndex(Array.getLength(columnHolders[0].data)); final Map columns = new LinkedHashMap<>(); for (ColumnHolder columnHolder : columnHolders) { columns.put(columnHolder.name, getTreeMapColumnSource(index, columnHolder)); @@ -555,8 +538,7 @@ public static ColumnSource getTreeMapColumnSource(Index index, ColumnHolder colu if (columnHolder instanceof ImmutableColumnHolder) { // noinspection unchecked result = new ImmutableTreeMapSource(columnHolder.dataType, index, boxedData); - } else if (columnHolder.dataType.equals(DBDateTime.class) - && columnHolder.data instanceof long[]) { + } else if (columnHolder.dataType.equals(DBDateTime.class) && columnHolder.data instanceof long[]) { result = new DateTimeTreeMapSource(index, (long[]) columnHolder.data); } else { // noinspection unchecked @@ -584,8 +566,7 @@ public static Table prevTable(Table table) { final Index index = table.getIndex().getPrevIndex(); final List> cols = new ArrayList<>(); - for (Map.Entry mapEntry : table.getColumnSourceMap() - .entrySet()) { + for (Map.Entry mapEntry : table.getColumnSourceMap().entrySet()) { final String name = mapEntry.getKey(); final ColumnSource columnSource = mapEntry.getValue(); final List data = new ArrayList<>(); @@ -597,28 +578,22 @@ public static Table prevTable(Table table) { } if (columnSource.getType() == int.class) { - cols.add(new ColumnHolder<>(name, false, - data.stream() - .mapToInt( - x -> x == null ? io.deephaven.util.QueryConstants.NULL_INT : (int) x) - .toArray())); + cols.add(new ColumnHolder<>(name, false, data.stream() + .mapToInt(x -> x == null ? io.deephaven.util.QueryConstants.NULL_INT : (int) x).toArray())); } else if (columnSource.getType() == long.class) { - cols.add(new ColumnHolder<>(name, false, - data.stream() - .mapToLong( - x -> x == null ? io.deephaven.util.QueryConstants.NULL_LONG : (long) x) - .toArray())); + cols.add(new ColumnHolder<>(name, false, data.stream() + .mapToLong(x -> x == null ? io.deephaven.util.QueryConstants.NULL_LONG : (long) x).toArray())); } else if (columnSource.getType() == boolean.class) { cols.add(ColumnHolder.createColumnHolder(name, false, - data.stream().map(x -> (Boolean) x).toArray(Boolean[]::new))); + data.stream().map(x -> (Boolean) x).toArray(Boolean[]::new))); } else if (columnSource.getType() == String.class) { cols.add(ColumnHolder.createColumnHolder(name, false, - data.stream().map(x -> (String) x).toArray(String[]::new))); + data.stream().map(x -> (String) x).toArray(String[]::new))); } else if (columnSource.getType() == double.class) { cols.add(new ColumnHolder<>(name, false, - data.stream().mapToDouble( - x -> x == null ? io.deephaven.util.QueryConstants.NULL_DOUBLE : (double) x) - .toArray())); + data.stream() + .mapToDouble(x -> x == null ? io.deephaven.util.QueryConstants.NULL_DOUBLE : (double) x) + .toArray())); } else if (columnSource.getType() == float.class) { final float[] floatArray = new float[data.size()]; for (int ii = 0; ii < data.size(); ++ii) { @@ -648,9 +623,8 @@ public static Table prevTable(Table table) { } cols.add(new ColumnHolder<>(name, false, shortArray)); } else { - cols.add(new ColumnHolder(name, columnSource.getType(), - columnSource.getComponentType(), false, data.toArray( - (Object[]) Array.newInstance(columnSource.getType(), data.size())))); + cols.add(new ColumnHolder(name, columnSource.getType(), columnSource.getComponentType(), false, + data.toArray((Object[]) Array.newInstance(columnSource.getType(), data.size())))); } } @@ -677,8 +651,7 @@ default void onMove(long oldKey, long newKey, U moved) {} public static abstract class AbstractReinterpretedGenerator implements Generator { @Override - public TreeMap populateMap(final TreeMap values, final Index toAdd, - final Random random) { + public TreeMap populateMap(final TreeMap values, final Index toAdd, final Random random) { final TreeMap result = new TreeMap<>(); toAdd.forAllLongs((final long nextKey) -> { final U value = nextValue(values, nextKey, random); @@ -821,9 +794,8 @@ public Class getType() { public static class BigIntegerGenerator extends AbstractGenerator { private static final BigInteger DEFAULT_FROM = - BigInteger.valueOf(Long.MIN_VALUE).multiply(BigInteger.valueOf(2)); - private static final BigInteger DEFAULT_TO = - BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.valueOf(2)); + BigInteger.valueOf(Long.MIN_VALUE).multiply(BigInteger.valueOf(2)); + private static final BigInteger DEFAULT_TO = BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.valueOf(2)); private final BigInteger to, from; private final double nullFraction; @@ -836,7 +808,7 @@ public BigIntegerGenerator() { public BigIntegerGenerator(double nullFraction) { this(BigInteger.valueOf(Long.MIN_VALUE).multiply(BigInteger.valueOf(2)), - BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.valueOf(2)), 0); + BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.valueOf(2)), 0); } public BigIntegerGenerator(BigInteger from, BigInteger to) { @@ -867,10 +839,9 @@ public BigInteger nextValue(TreeMap values, long key, Random r final BigInteger result = value.add(from); - Assert.assertion(result.compareTo(from) >= 0, "result.compareTo(from) >= 0", result, - "result", from, "from"); - Assert.assertion(result.compareTo(to) <= 0, "result.compareTo(to) <= 0", result, - "result", to, "to"); + Assert.assertion(result.compareTo(from) >= 0, "result.compareTo(from) >= 0", result, "result", from, + "from"); + Assert.assertion(result.compareTo(to) <= 0, "result.compareTo(to) <= 0", result, "result", to, "to"); return result; } @@ -884,10 +855,8 @@ public Class getType() { public static class BigDecimalGenerator extends AbstractGenerator { - static final BigInteger DEFAULT_FROM = - BigInteger.valueOf(Long.MIN_VALUE).multiply(BigInteger.valueOf(2)); - static final BigInteger DEFAULT_TO = - BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.valueOf(2)); + static final BigInteger DEFAULT_FROM = BigInteger.valueOf(Long.MIN_VALUE).multiply(BigInteger.valueOf(2)); + static final BigInteger DEFAULT_TO = BigInteger.valueOf(Long.MAX_VALUE).multiply(BigInteger.valueOf(2)); private final BigInteger to, from; private final BigDecimal toDecimal, fromDecimal; private final double nullFraction; @@ -907,8 +876,7 @@ public BigDecimalGenerator(BigInteger from, BigInteger to) { this(from, to, 10, 0); } - public BigDecimalGenerator(BigInteger from, BigInteger to, int decimalPlaces, - double nullFraction) { + public BigDecimalGenerator(BigInteger from, BigInteger to, int decimalPlaces, double nullFraction) { this.from = from; this.to = to; this.nullFraction = nullFraction; @@ -937,10 +905,9 @@ public BigDecimal nextValue(TreeMap values, long key, Random r final BigDecimal result = new BigDecimal(value, decimalPlaces).add(fromDecimal); - Assert.assertion(result.compareTo(fromDecimal) >= 0, "result.compareTo(from) >= 0", - result, "result", from, "from"); - Assert.assertion(result.compareTo(toDecimal) <= 0, "result.compareTo(to) <= 0", result, - "result", to, "to"); + Assert.assertion(result.compareTo(fromDecimal) >= 0, "result.compareTo(from) >= 0", result, "result", from, + "from"); + Assert.assertion(result.compareTo(toDecimal) <= 0, "result.compareTo(to) <= 0", result, "result", to, "to"); return result; } @@ -1124,8 +1091,8 @@ public DoubleGenerator(double from, double to, double nullFraction, double nanFr this(from, to, nullFraction, nanFraction, 0, 0); } - public DoubleGenerator(double from, double to, double nullFraction, double nanFraction, - double negInfFraction, double posInfFraction) { + public DoubleGenerator(double from, double to, double nullFraction, double nanFraction, double negInfFraction, + double posInfFraction) { this.from = from; this.to = to; this.nullFraction = nullFraction; @@ -1133,7 +1100,7 @@ public DoubleGenerator(double from, double to, double nullFraction, double nanFr this.negInfFraction = negInfFraction; this.posInfFraction = posInfFraction; Require.leq(nullFraction + nanFraction + negInfFraction + posInfFraction, - "nullFraction + nanFraction + negInfFraction + posInfFraction", 1.0, "1.0"); + "nullFraction + nanFraction + negInfFraction + posInfFraction", 1.0, "1.0"); } @Override @@ -1153,8 +1120,7 @@ public Double nextValue(TreeMap values, long key, Random random) { return Double.NEGATIVE_INFINITY; } - if (posInfFraction > 0 - && frac < (nullFraction + nanFraction + negInfFraction + posInfFraction)) { + if (posInfFraction > 0 && frac < (nullFraction + nanFraction + negInfFraction + posInfFraction)) { return Double.POSITIVE_INFINITY; } } @@ -1191,8 +1157,8 @@ public FloatGenerator(float from, float to, double nullFraction, double nanFract this(from, to, nullFraction, nanFraction, 0, 0); } - public FloatGenerator(float from, float to, double nullFraction, double nanFraction, - double negInfFraction, double posInfFraction) { + public FloatGenerator(float from, float to, double nullFraction, double nanFraction, double negInfFraction, + double posInfFraction) { this.from = from; this.to = to; this.nullFraction = nullFraction; @@ -1200,7 +1166,7 @@ public FloatGenerator(float from, float to, double nullFraction, double nanFract this.negInfFraction = negInfFraction; this.posInfFraction = posInfFraction; Require.leq(nullFraction + nanFraction + negInfFraction + posInfFraction, - "nullFraction + nanFraction + negInfFraction + posInfFraction", 1.0, "1.0"); + "nullFraction + nanFraction + negInfFraction + posInfFraction", 1.0, "1.0"); } @Override @@ -1220,8 +1186,7 @@ public Float nextValue(TreeMap values, long key, Random random) { return Float.NEGATIVE_INFINITY; } - if (posInfFraction > 0 - && frac < (nullFraction + nanFraction + negInfFraction + posInfFraction)) { + if (posInfFraction > 0 && frac < (nullFraction + nanFraction + negInfFraction + posInfFraction)) { return Float.POSITIVE_INFINITY; } } @@ -1356,8 +1321,7 @@ BigInteger makeValue(BigInteger floor, BigInteger ceiling, Random random) { candidate = null; } if (candidate == null) { - throw new RuntimeException( - String.format("Couldn't find a suitable BigInteger between %s and %s", + throw new RuntimeException(String.format("Couldn't find a suitable BigInteger between %s and %s", floor, ceiling)); } return floor.add(candidate); @@ -1469,8 +1433,7 @@ DBDateTime nextValue(TreeMap values, long key, Random random) } } - public static class UnsortedDateTimeLongGenerator - extends AbstractReinterpretedGenerator { + public static class UnsortedDateTimeLongGenerator extends AbstractReinterpretedGenerator { private final DBDateTime minTime; private final DBDateTime maxTime; private final double nullFrac; @@ -1479,8 +1442,7 @@ public UnsortedDateTimeLongGenerator(DBDateTime minTime, DBDateTime maxTime) { this(minTime, maxTime, 0); } - public UnsortedDateTimeLongGenerator(DBDateTime minTime, DBDateTime maxTime, - double nullFrac) { + public UnsortedDateTimeLongGenerator(DBDateTime minTime, DBDateTime maxTime, double nullFrac) { this.minTime = minTime; this.maxTime = maxTime; this.nullFrac = nullFrac; @@ -1537,8 +1499,7 @@ Date makeValue(Date floor, Date ceiling, Random random) { if (ceiling.getTime() < floor.getTime()) { throw new IllegalStateException("ceiling < floor: " + ceiling + " > " + floor); } - return new Date( - floor.getTime() + random.nextInt((int) (ceiling.getTime() - floor.getTime() + 1))); + return new Date(floor.getTime() + random.nextInt((int) (ceiling.getTime() - floor.getTime() + 1))); } @Override @@ -1547,8 +1508,7 @@ public Class getType() { } } - static abstract class AbstractSortedGenerator> - implements Generator { + static abstract class AbstractSortedGenerator> implements Generator { public TreeMap populateMap(TreeMap values, Index toAdd, Random random) { final TreeMap result = new TreeMap<>(); if (toAdd.size() == 0) @@ -1565,8 +1525,7 @@ public TreeMap populateMap(TreeMap values, Index toAdd, Random T currentFloor = firstFloorEntry == null ? minValue() : firstFloorEntry.getValue(); final Map.Entry firstCeilingEntry = values.ceilingEntry(firstKey); - T currentCeiling = - firstCeilingEntry == null ? maxValue() : firstCeilingEntry.getValue(); + T currentCeiling = firstCeilingEntry == null ? maxValue() : firstCeilingEntry.getValue(); while (iterator.hasNext()) { final long nextKey = iterator.nextLong(); @@ -1577,11 +1536,9 @@ public TreeMap populateMap(TreeMap values, Index toAdd, Random final T ceiling = ceilingEntry == null ? maxValue() : ceilingEntry.getValue(); if (!ceiling.equals(currentCeiling) || !floor.equals(currentFloor)) { - // we're past the end of the last run so we need to generate the values for the - // map - generateValues( - toAdd.intersect(Index.FACTORY.getIndexByRange(firstKey, lastKey)), - currentFloor, currentCeiling, result, random); + // we're past the end of the last run so we need to generate the values for the map + generateValues(toAdd.intersect(Index.FACTORY.getIndexByRange(firstKey, lastKey)), currentFloor, + currentCeiling, result, random); firstKey = nextKey; currentFloor = floor; currentCeiling = ceiling; @@ -1589,8 +1546,8 @@ public TreeMap populateMap(TreeMap values, Index toAdd, Random lastKey = nextKey; } - generateValues(toAdd.intersect(Index.FACTORY.getIndexByRange(firstKey, lastKey)), - currentFloor, currentCeiling, result, random); + generateValues(toAdd.intersect(Index.FACTORY.getIndexByRange(firstKey, lastKey)), currentFloor, + currentCeiling, result, random); values.putAll(result); @@ -1607,14 +1564,13 @@ private void checkSorted(TreeMap values) { T lastValue = minValue(); for (Map.Entry valueEntry : values.entrySet()) { final T value = valueEntry.getValue(); - Assert.assertion(value.compareTo(lastValue) >= 0, "value >= lastValue", value, - "value", lastValue, "lastValue", valueEntry.getKey(), "valueEntry.getKey"); + Assert.assertion(value.compareTo(lastValue) >= 0, "value >= lastValue", value, "value", lastValue, + "lastValue", valueEntry.getKey(), "valueEntry.getKey"); lastValue = value; } } - private void generateValues(Index toadd, T floor, T ceiling, TreeMap result, - Random random) { + private void generateValues(Index toadd, T floor, T ceiling, TreeMap result, Random random) { final int count = (int) toadd.size(); // noinspection unchecked final T[] values = (T[]) Array.newInstance(getType(), count); @@ -1660,8 +1616,7 @@ public TreeMap populateMap(TreeMap values, Index toAdd, Random return result; } - // TODO: update the callers so that as we remove rows, we also remove them from the - // usedValues set; + // TODO: update the callers so that as we remove rows, we also remove them from the usedValues set; // otherwise we can exhaust the set more easily than we should during an incremental update. T getNextUniqueValue(Set usedValues, TreeMap values, long key, Random random) { T candidate; @@ -1872,8 +1827,7 @@ static class UniqueSmartKeyGenerator extends AbstractUniqueGenerator { @Override public SmartKey nextValue(TreeMap values, long key, Random random) { // noinspection unchecked - return new SmartKey( - Arrays.stream(generators).map(g -> g.nextValue(null, key, random)).toArray()); + return new SmartKey(Arrays.stream(generators).map(g -> g.nextValue(null, key, random)).toArray()); } @Override @@ -1892,8 +1846,7 @@ static class SmartKeyGenerator extends AbstractGenerator { @Override public SmartKey nextValue(TreeMap values, long key, Random random) { // noinspection unchecked - return new SmartKey( - Arrays.stream(generators).map(g -> g.nextValue(null, key, random)).toArray()); + return new SmartKey(Arrays.stream(generators).map(g -> g.nextValue(null, key, random)).toArray()); } @Override @@ -1903,31 +1856,27 @@ public Class getType() { } static class FromUniqueStringGenerator extends AbstractFromUniqueGenerator { - FromUniqueStringGenerator(UniqueStringGenerator uniqueStringGenerator, - double existingFraction) { + FromUniqueStringGenerator(UniqueStringGenerator uniqueStringGenerator, double existingFraction) { this(uniqueStringGenerator, existingFraction, new StringGenerator()); } - FromUniqueStringGenerator(UniqueStringGenerator uniqueStringGenerator, - double existingFraction, AbstractGenerator defaultGenerator) { - super(String.class, uniqueStringGenerator, defaultGenerator, String[]::new, - existingFraction); + FromUniqueStringGenerator(UniqueStringGenerator uniqueStringGenerator, double existingFraction, + AbstractGenerator defaultGenerator) { + super(String.class, uniqueStringGenerator, defaultGenerator, String[]::new, existingFraction); } } static class FromUniqueSmartKeyGenerator extends AbstractFromUniqueGenerator { - FromUniqueSmartKeyGenerator(UniqueSmartKeyGenerator uniqueSmartKeyGenerator, - SmartKeyGenerator defaultGenerator, double existingFraction) { - super(SmartKey.class, uniqueSmartKeyGenerator, defaultGenerator, SmartKey[]::new, - existingFraction); + FromUniqueSmartKeyGenerator(UniqueSmartKeyGenerator uniqueSmartKeyGenerator, SmartKeyGenerator defaultGenerator, + double existingFraction) { + super(SmartKey.class, uniqueSmartKeyGenerator, defaultGenerator, SmartKey[]::new, existingFraction); } } static class FromUniqueIntGenerator extends AbstractFromUniqueGenerator { - FromUniqueIntGenerator(UniqueIntGenerator uniqueSmartKeyGenerator, - IntGenerator defaultGenerator, double existingFraction) { - super(Integer.class, uniqueSmartKeyGenerator, defaultGenerator, Integer[]::new, - existingFraction); + FromUniqueIntGenerator(UniqueIntGenerator uniqueSmartKeyGenerator, IntGenerator defaultGenerator, + double existingFraction) { + super(Integer.class, uniqueSmartKeyGenerator, defaultGenerator, Integer[]::new, existingFraction); } } @@ -1939,8 +1888,7 @@ static class CompositeGenerator implements Generator { CompositeGenerator(List> generators, double... fractions) { if (fractions.length != generators.size() - 1) { - throw new IllegalArgumentException( - "Generators must have one more element than fractions!"); + throw new IllegalArgumentException("Generators must have one more element than fractions!"); } final double sum = Arrays.stream(fractions).sum(); if (sum > 1.0) { @@ -1949,12 +1897,12 @@ static class CompositeGenerator implements Generator { final Generator firstGenerator = generators.get(0); for (Generator generator : generators) { if (!generator.getType().equals(firstGenerator.getType())) { - throw new IllegalArgumentException("Mismatched generator types: " - + generator.getType() + " vs. " + firstGenerator.getType()); + throw new IllegalArgumentException( + "Mismatched generator types: " + generator.getType() + " vs. " + firstGenerator.getType()); } if (!generator.getColumnType().equals(firstGenerator.getColumnType())) { - throw new IllegalArgumentException("Mismatched generator column types: " - + generator.getType() + " vs. " + firstGenerator.getType()); + throw new IllegalArgumentException("Mismatched generator column types: " + generator.getType() + + " vs. " + firstGenerator.getType()); } } this.generators = generators; @@ -1968,8 +1916,7 @@ public TreeMap populateMap(TreeMap values, Index toAdd, Random return result; } - final Index.SequentialBuilder[] builders = - new Index.SequentialBuilder[generators.size()]; + final Index.SequentialBuilder[] builders = new Index.SequentialBuilder[generators.size()]; for (int ii = 0; ii < builders.length; ++ii) { builders[ii] = Index.FACTORY.getSequentialBuilder(); } @@ -2022,8 +1969,7 @@ static class AbstractFromUniqueGenerator extends AbstractGenerator { T[] lastValues; AbstractFromUniqueGenerator(Class type, AbstractUniqueGenerator uniqueStringGenerator, - AbstractGenerator defaultGenerator, IntFunction arrayFactory, - double existingFraction) { + AbstractGenerator defaultGenerator, IntFunction arrayFactory, double existingFraction) { this.type = type; this.uniqueGenerator = uniqueStringGenerator; this.defaultGenerator = defaultGenerator; @@ -2036,8 +1982,7 @@ public T nextValue(TreeMap values, long key, Random random) { if (random.nextDouble() < existingFraction) { final int size = uniqueGenerator.getGeneratedValues().size(); if (size != lastSize) { - lastValues = - uniqueGenerator.getGeneratedValues().stream().toArray(arrayFactory); + lastValues = uniqueGenerator.getGeneratedValues().stream().toArray(arrayFactory); lastSize = lastValues.length; } if (size > 0) { @@ -2070,12 +2015,12 @@ enum ColAttributes { public ColumnInfo(Generator generator, String name, ColAttributes... colAttributes) { this(generator.getType(), generator.getColumnType(), generator, name, - Arrays.asList(colAttributes).contains(ColAttributes.Immutable), - Arrays.asList(colAttributes).contains(ColAttributes.Grouped), new TreeMap<>()); + Arrays.asList(colAttributes).contains(ColAttributes.Immutable), + Arrays.asList(colAttributes).contains(ColAttributes.Grouped), new TreeMap<>()); } - private ColumnInfo(Class dataType, Class type, Generator generator, String name, - boolean immutable, boolean grouped, TreeMap data) { + private ColumnInfo(Class dataType, Class type, Generator generator, String name, boolean immutable, + boolean grouped, TreeMap data) { this.dataType = dataType; this.type = type; this.generator = generator; @@ -2094,13 +2039,11 @@ public ColumnHolder c() { if (dataType == Long.class && type == DBDateTime.class) { Require.eqFalse(immutable, "immutable"); Require.eqFalse(grouped, "grouped"); - final long[] dataArray = - data.values().stream().map(x -> (Long) x).mapToLong(x -> x).toArray(); + final long[] dataArray = data.values().stream().map(x -> (Long) x).mapToLong(x -> x).toArray(); return ColumnHolder.getDateTimeColumnHolder(name, false, dataArray); } - final U[] dataArray = - data.values().toArray((U[]) Array.newInstance(dataType, data.size())); + final U[] dataArray = data.values().toArray((U[]) Array.newInstance(dataType, data.size())); if (immutable) { return new ImmutableColumnHolder<>(name, dataType, null, grouped, dataArray); } else if (grouped) { @@ -2123,8 +2066,7 @@ public void move(long from, long to) { ColumnHolder populateMapAndC(Index keysToModify, Random random) { final Collection newValues = populateMap(keysToModify, random).values(); // noinspection unchecked - final U[] valueArray = - newValues.toArray((U[]) Array.newInstance(dataType, newValues.size())); + final U[] valueArray = newValues.toArray((U[]) Array.newInstance(dataType, newValues.size())); if (grouped) { return TstUtils.cG(name, valueArray); } else { @@ -2197,26 +2139,25 @@ public void assertNotInvoked() { } } - public static void assertIndexEquals(@NotNull final Index expected, - @NotNull final Index actual) { + public static void assertIndexEquals(@NotNull final Index expected, @NotNull final Index actual) { try { TestCase.assertEquals(expected, actual); } catch (AssertionFailedError error) { System.err.println("Index equality check failed:" - + "\n\texpected: " + expected.toString() - + "\n]tactual: " + actual.toString() - + "\n]terror: " + error); + + "\n\texpected: " + expected.toString() + + "\n]tactual: " + actual.toString() + + "\n]terror: " + error); throw error; } } public static void assertTableEquals(@NotNull final Table expected, @NotNull final Table actual, - final TableDiff.DiffItems... itemsToSkip) { + final TableDiff.DiffItems... itemsToSkip) { assertTableEquals("", expected, actual, itemsToSkip); } public static void assertTableEquals(final String context, @NotNull final Table expected, - @NotNull final Table actual, final TableDiff.DiffItems... itemsToSkip) { + @NotNull final Table actual, final TableDiff.DiffItems... itemsToSkip) { if (itemsToSkip.length > 0) { assertTableEquals(context, expected, actual, EnumSet.of(itemsToSkip[0], itemsToSkip)); } else { @@ -2225,7 +2166,7 @@ public static void assertTableEquals(final String context, @NotNull final Table } public static void assertTableEquals(final String context, @NotNull final Table expected, - @NotNull final Table actual, final EnumSet itemsToSkip) { + @NotNull final Table actual, final EnumSet itemsToSkip) { final Pair diffPair = TableTools.diffPair(actual, expected, 10, itemsToSkip); if (diffPair.getFirst().equals("")) { return; @@ -2249,29 +2190,25 @@ public static void assertTableEquals(final String context, @NotNull final Table } final String actualString = baos.toString(); - throw new ComparisonFailure(context + "\n" + diffPair.getFirst(), expectedString, - actualString); + throw new ComparisonFailure(context + "\n" + diffPair.getFirst(), expectedString, actualString); } catch (IOException e) { throw new RuntimeException(e); } } /** - * Reruns test cases trying new seeds while minimizing the number of steps to catch the failing - * test. The test should mutate the provided MutableInt so that when it fails it is equal to the - * current step iteration. This allows the test to minimize the total number of steps per seed - * as it discovers better candidate parameters. + * Reruns test cases trying new seeds while minimizing the number of steps to catch the failing test. The test + * should mutate the provided MutableInt so that when it fails it is equal to the current step iteration. This + * allows the test to minimize the total number of steps per seed as it discovers better candidate parameters. * * @param test A test instance to tearDown and setUp between each test. * @param initialSeed The seed to begin using. * @param maxSeed The highest seed to try. * @param initialSteps Number of steps to start with. - * @param runner A method whose first param is the random seed to use, and second parameter is - * the number of steps. + * @param runner A method whose first param is the random seed to use, and second parameter is the number of steps. */ - public static void findMinimalTestCase(final LiveTableTestCase test, final int initialSeed, - final int maxSeed, - final int initialSteps, final BiConsumer runner) { + public static void findMinimalTestCase(final LiveTableTestCase test, final int initialSeed, final int maxSeed, + final int initialSteps, final BiConsumer runner) { final boolean origPrintTableUpdates = LiveTableTestCase.printTableUpdates; LiveTableTestCase.printTableUpdates = false; @@ -2284,8 +2221,8 @@ public static void findMinimalTestCase(final LiveTableTestCase test, final int i System.out.println("Best Run: bestSeed=" + bestSeed + " bestSteps=" + bestSteps); return; } - System.out.println("Running: seed=" + seed + " numSteps=" + maxSteps.intValue() - + " bestSeed=" + bestSeed + " bestSteps=" + bestSteps); + System.out.println("Running: seed=" + seed + " numSteps=" + maxSteps.intValue() + " bestSeed=" + bestSeed + + " bestSteps=" + bestSteps); if (seed != initialSeed) { try { test.tearDown(); @@ -2307,8 +2244,7 @@ public static void findMinimalTestCase(final LiveTableTestCase test, final int i bestSeed = seed; bestSteps = maxSteps.intValue() + 1; e.printStackTrace(); - System.out - .println("Candidate: seed=" + seed + " numSteps=" + (maxSteps.intValue() + 1)); + System.out.println("Candidate: seed=" + seed + " numSteps=" + (maxSteps.intValue() + 1)); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/UpdateValidatorNugget.java b/DB/src/test/java/io/deephaven/db/v2/UpdateValidatorNugget.java index fb54eae3f9a..e73e99b3113 100644 --- a/DB/src/test/java/io/deephaven/db/v2/UpdateValidatorNugget.java +++ b/DB/src/test/java/io/deephaven/db/v2/UpdateValidatorNugget.java @@ -33,20 +33,18 @@ public UpdateValidatorNugget(final QueryTable table) { private Throwable exception = null; // We should listen for failures on the table, and if we get any, the test case is no good. - private final ShiftAwareListener failureListener = - new InstrumentedShiftAwareListener("Failure Listener") { - @Override - public void onUpdate(Update update) {} - - @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { - exception = originalException; - final StringWriter errors = new StringWriter(); - originalException.printStackTrace(new PrintWriter(errors)); - TestCase.fail(errors.toString()); - } - }; + private final ShiftAwareListener failureListener = new InstrumentedShiftAwareListener("Failure Listener") { + @Override + public void onUpdate(Update update) {} + + @Override + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { + exception = originalException; + final StringWriter errors = new StringWriter(); + originalException.printStackTrace(new PrintWriter(errors)); + TestCase.fail(errors.toString()); + } + }; public void validate(final String msg) { Assert.assertNull(exception); diff --git a/DB/src/test/java/io/deephaven/db/v2/by/TestSortedFirstOrLastByFactory.java b/DB/src/test/java/io/deephaven/db/v2/by/TestSortedFirstOrLastByFactory.java index 060d37d3ea6..33e06584691 100644 --- a/DB/src/test/java/io/deephaven/db/v2/by/TestSortedFirstOrLastByFactory.java +++ b/DB/src/test/java/io/deephaven/db/v2/by/TestSortedFirstOrLastByFactory.java @@ -30,13 +30,12 @@ @Category(OutOfBandTest.class) public class TestSortedFirstOrLastByFactory extends LiveTableTestCase { - private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = - Configuration.getInstance().getBooleanForClassWithDefault( - TestSortedFirstOrLastByFactory.class, "CompilerTools.logEnabled", false); + private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = Configuration.getInstance() + .getBooleanForClassWithDefault(TestSortedFirstOrLastByFactory.class, "CompilerTools.logEnabled", false); private static final String[] colNames = new String[] {"Sym", "intCol", "doubleCol", "Keys"}; private static final boolean printTableUpdates = Configuration.getInstance() - .getBooleanForClassWithDefault(LiveTableTestCase.class, "printTableUpdates", false); + .getBooleanForClassWithDefault(LiveTableTestCase.class, "printTableUpdates", false); private boolean oldLogEnabled; private boolean oldCheckLtm; @@ -78,45 +77,39 @@ private void incrementalTest(int seed, int size, final String... sortColumns) { final Random random = new Random(seed); final TstUtils.ColumnInfo[] columnInfo; final QueryTable queryTable = getTable(size, random, columnInfo = initColumnInfos( - colNames, - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 100, 0.1), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), - new TstUtils.SortedLongGenerator(0, Integer.MAX_VALUE))); + colNames, + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 100, 0.1), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), + new TstUtils.SortedLongGenerator(0, Integer.MAX_VALUE))); if (printTableUpdates) { showWithIndex(queryTable); } final EvalNuggetInterface[] en = new EvalNuggetInterface[] { - EvalNugget - .from(() -> SortedBy.sortedFirstBy(queryTable.update("x=Keys"), sortColumns)), - EvalNugget - .from(() -> SortedBy.sortedLastBy(queryTable.update("x=Keys"), sortColumns)), + EvalNugget.from(() -> SortedBy.sortedFirstBy(queryTable.update("x=Keys"), sortColumns)), + EvalNugget.from(() -> SortedBy.sortedLastBy(queryTable.update("x=Keys"), sortColumns)), new QueryTableTest.TableComparator( - queryTable.sort(sortColumns).head(1), - SortedBy.sortedFirstBy(queryTable, sortColumns)), + queryTable.sort(sortColumns).head(1), + SortedBy.sortedFirstBy(queryTable, sortColumns)), new QueryTableTest.TableComparator( - SortedBy.sortedLastBy(queryTable, sortColumns), - queryTable.sort(sortColumns).tail(1)), - EvalNugget.Sorted.from( - () -> SortedBy.sortedFirstBy(queryTable.update("x=Keys"), sortColumns, "Sym"), - "Sym"), - EvalNugget.Sorted.from( - () -> SortedBy.sortedLastBy(queryTable.update("x=Keys"), sortColumns, "Sym"), - "Sym"), + SortedBy.sortedLastBy(queryTable, sortColumns), + queryTable.sort(sortColumns).tail(1)), + EvalNugget.Sorted.from(() -> SortedBy.sortedFirstBy(queryTable.update("x=Keys"), sortColumns, "Sym"), + "Sym"), + EvalNugget.Sorted.from(() -> SortedBy.sortedLastBy(queryTable.update("x=Keys"), sortColumns, "Sym"), + "Sym"), new QueryTableTest.TableComparator( - queryTable.sort(sortColumns).firstBy("Sym").sort("Sym"), - SortedBy.sortedFirstBy(queryTable, sortColumns, "Sym").sort("Sym")), + queryTable.sort(sortColumns).firstBy("Sym").sort("Sym"), + SortedBy.sortedFirstBy(queryTable, sortColumns, "Sym").sort("Sym")), new QueryTableTest.TableComparator( - queryTable.sort(sortColumns).lastBy("Sym").sort("Sym"), - queryTable - .by(AggCombo(AggSortedLast(sortColumns, "intCol", "doubleCol", "Keys")), - "Sym") - .sort("Sym")) + queryTable.sort(sortColumns).lastBy("Sym").sort("Sym"), + queryTable.by(AggCombo(AggSortedLast(sortColumns, "intCol", "doubleCol", "Keys")), "Sym") + .sort("Sym")) }; for (int step = 0; step < 100; step++) { if (LiveTableTestCase.printTableUpdates) { - System.out.println("Size = " + size + ", Seed = " + seed + ", Step = " + step - + ", sortColumns=" + Arrays.toString(sortColumns)); + System.out.println("Size = " + size + ", Seed = " + seed + ", Step = " + step + ", sortColumns=" + + Arrays.toString(sortColumns)); } simulateShiftAwareStep(size, random, queryTable, columnInfo, en); } @@ -124,9 +117,9 @@ private void incrementalTest(int seed, int size, final String... sortColumns) { public void testIds6445() { - final QueryTable source = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(5), - intCol("SFB", 2, 1, 2, 1, 2), intCol("Sentinel", 1, 2, 3, 4, 5), - col("DummyBucket", "A", "A", "A", "A", "A")); + final QueryTable source = + TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(5), intCol("SFB", 2, 1, 2, 1, 2), + intCol("Sentinel", 1, 2, 3, 4, 5), col("DummyBucket", "A", "A", "A", "A", "A")); // final FuzzerPrintListener pl = new FuzzerPrintListener("source", source); // source.listenForUpdates(pl); @@ -150,8 +143,7 @@ public void testIds6445() { TestCase.assertEquals(2, sfb.getColumn("Sentinel").get(0)); TestCase.assertEquals(2, bucketed.getColumn("Sentinel").get(0)); - // this part is the original bug, if we didn't change the actual value of the redirection - // index; because the + // this part is the original bug, if we didn't change the actual value of the redirection index; because the // shift modify combination left it at the same index; we would not notice the mdoification LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); @@ -163,8 +155,7 @@ public void testIds6445() { update.modifiedColumnSet.setAll("SFB"); addToTable(source, Index.FACTORY.getFlatIndex(6), intCol("SFB", 3, 2, 3, 2, 3, 2), - intCol("Sentinel", 6, 1, 2, 3, 4, 5), - col("DummyBucket", "A", "A", "A", "A", "A", "A")); + intCol("Sentinel", 6, 1, 2, 3, 4, 5), col("DummyBucket", "A", "A", "A", "A", "A", "A")); final IndexShiftData.Builder sb = new IndexShiftData.Builder(); sb.shiftRange(0, 4, 1); @@ -177,8 +168,8 @@ public void testIds6445() { tuvsfb.deepValidation(); tuvbuck.deepValidation(); - // i'm concerned that if we really modify a row, but we don't detect it in the shift, so - // here we are just shifting without modifications + // i'm concerned that if we really modify a row, but we don't detect it in the shift, so here we are just + // shifting without modifications LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); update.added = Index.FACTORY.getIndexByValues(0); @@ -189,8 +180,7 @@ public void testIds6445() { update.modifiedColumnSet.setAll("SFB"); addToTable(source, Index.FACTORY.getFlatIndex(7), intCol("SFB", 4, 3, 2, 3, 2, 3, 2), - intCol("Sentinel", 7, 6, 1, 2, 3, 4, 5), - col("DummyBucket", "A", "A", "A", "A", "A", "A", "A")); + intCol("Sentinel", 7, 6, 1, 2, 3, 4, 5), col("DummyBucket", "A", "A", "A", "A", "A", "A", "A")); final IndexShiftData.Builder sb = new IndexShiftData.Builder(); sb.shiftRange(0, 5, 1); @@ -216,8 +206,8 @@ public void testIds6445() { update.modifiedColumnSet.setAll("Sentinel"); addToTable(source, Index.FACTORY.getFlatIndex(8), intCol("SFB", 4, 4, 3, 2, 3, 2, 3, 2), - intCol("Sentinel", 8, 7, 6, 9, 2, 3, 4, 5), - col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A")); + intCol("Sentinel", 8, 7, 6, 9, 2, 3, 4, 5), + col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A")); final IndexShiftData.Builder sb = new IndexShiftData.Builder(); sb.shiftRange(0, 6, 1); @@ -242,10 +232,9 @@ public void testIds6445() { update.modifiedColumnSet.clear(); update.modifiedColumnSet.setAll("SFB"); - addToTable(source, Index.FACTORY.getFlatIndex(9), - intCol("SFB", 4, 4, 4, 3, 2, 3, 2, 3, 2), - intCol("Sentinel", 10, 8, 7, 6, 9, 2, 3, 4, 5), - col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A", "A")); + addToTable(source, Index.FACTORY.getFlatIndex(9), intCol("SFB", 4, 4, 4, 3, 2, 3, 2, 3, 2), + intCol("Sentinel", 10, 8, 7, 6, 9, 2, 3, 4, 5), + col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A", "A")); final IndexShiftData.Builder sb = new IndexShiftData.Builder(); sb.shiftRange(0, 7, 1); @@ -270,10 +259,9 @@ public void testIds6445() { update.modifiedColumnSet.clear(); update.modifiedColumnSet.setAll("SFB"); - addToTable(source, Index.FACTORY.getFlatIndex(10), - intCol("SFB", 4, 4, 4, 4, 1, 2, 3, 2, 3, 2), - intCol("Sentinel", 11, 10, 8, 7, 6, 9, 2, 3, 4, 5), - col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A")); + addToTable(source, Index.FACTORY.getFlatIndex(10), intCol("SFB", 4, 4, 4, 4, 1, 2, 3, 2, 3, 2), + intCol("Sentinel", 11, 10, 8, 7, 6, 9, 2, 3, 4, 5), + col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A")); final IndexShiftData.Builder sb = new IndexShiftData.Builder(); sb.shiftRange(0, 8, 1); @@ -298,10 +286,9 @@ public void testIds6445() { update.modifiedColumnSet.clear(); update.modifiedColumnSet.setAll("SFB", "Sentinel"); - addToTable(source, Index.FACTORY.getFlatIndex(11), - intCol("SFB", 4, 4, 4, 4, 4, 1, 2, 3, 2, 3, 2), - intCol("Sentinel", 12, 11, 10, 8, 7, 13, 9, 2, 3, 4, 5), - col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A")); + addToTable(source, Index.FACTORY.getFlatIndex(11), intCol("SFB", 4, 4, 4, 4, 4, 1, 2, 3, 2, 3, 2), + intCol("Sentinel", 12, 11, 10, 8, 7, 13, 9, 2, 3, 4, 5), + col("DummyBucket", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A", "A")); final IndexShiftData.Builder sb = new IndexShiftData.Builder(); sb.shiftRange(0, 9, 1); diff --git a/DB/src/test/java/io/deephaven/db/v2/hashing/TestKnVn.java b/DB/src/test/java/io/deephaven/db/v2/hashing/TestKnVn.java index 0c443c1c951..5e3885362cc 100644 --- a/DB/src/test/java/io/deephaven/db/v2/hashing/TestKnVn.java +++ b/DB/src/test/java/io/deephaven/db/v2/hashing/TestKnVn.java @@ -7,10 +7,9 @@ public class TestKnVn { /** - * Rationale: at its maximum capacity, the hashtable will have an long[Integer.MAX_VALUE] array. - * When it rehashes, it will need another such array to rehash into. So, number of bytes needed - * = 2 * sizeof(long) * Integer.MAX_VALUE = 32G. Let's round up and say you need a 40G heap to - * run this test. + * Rationale: at its maximum capacity, the hashtable will have an long[Integer.MAX_VALUE] array. When it rehashes, + * it will need another such array to rehash into. So, number of bytes needed = 2 * sizeof(long) * Integer.MAX_VALUE + * = 32G. Let's round up and say you need a 40G heap to run this test. */ private static final long MINIMUM_HEAP_SIZE_NEEDED_FOR_TEST = 40L << 30; private static final int HASHTABLE_SIZE_LOWER_BOUND_1 = 900_000_000; @@ -19,8 +18,8 @@ public class TestKnVn { private static final int HASHTABLE_SIZE_UPPER_BOUND = 1_000_000_000; /** - * This is a very long-running test which also needs a big heap. We should figure out how to - * configure things so this runs off to the side without disrupting other developers. + * This is a very long-running test which also needs a big heap. We should figure out how to configure things so + * this runs off to the side without disrupting other developers. */ @Test public void fillK1V1ToTheMax() { @@ -28,8 +27,8 @@ public void fillK1V1ToTheMax() { } /** - * This is a very long-running test which also needs a big heap. We should figure out how to - * configure things so this runs off to the side without disrupting other developers. + * This is a very long-running test which also needs a big heap. We should figure out how to configure things so + * this runs off to the side without disrupting other developers. */ @Test public void fillK2V2ToTheMax() { @@ -37,8 +36,8 @@ public void fillK2V2ToTheMax() { } /** - * This is a very long-running test which also needs a big heap. We should figure out how to - * configure things so this runs off to the side without disrupting other developers. + * This is a very long-running test which also needs a big heap. We should figure out how to configure things so + * this runs off to the side without disrupting other developers. */ @Test public void fillK4V4ToTheMax() { @@ -48,10 +47,8 @@ public void fillK4V4ToTheMax() { private static void fillToCapacity(TLongLongMap ht, final long lowerSizeBound) { final long maxMemory = Runtime.getRuntime().maxMemory(); if (maxMemory < MINIMUM_HEAP_SIZE_NEEDED_FOR_TEST) { - final String skipMessage = - String.format("Skipping test, because I want %fG of heap, but have only %fG%n", - (double) MINIMUM_HEAP_SIZE_NEEDED_FOR_TEST / (1 << 30), - (double) maxMemory / (1 << 30)); + final String skipMessage = String.format("Skipping test, because I want %fG of heap, but have only %fG%n", + (double) MINIMUM_HEAP_SIZE_NEEDED_FOR_TEST / (1 << 30), (double) maxMemory / (1 << 30)); Assume.assumeTrue(skipMessage, false); } long ii = 0; @@ -71,8 +68,7 @@ private static void fillToCapacity(TLongLongMap ht, final long lowerSizeBound) { for (; ii < HASHTABLE_SIZE_UPPER_BOUND; ++ii) { try { if ((ii % 10_000_000) == 0) { - System.out.printf("Made it to %d, and expecting it to hit max capacity soon%n", - ii); + System.out.printf("Made it to %d, and expecting it to hit max capacity soon%n", ii); } ht.put(ii * 11, ii * 17); } catch (UnsupportedOperationException uoe) { @@ -81,8 +77,7 @@ private static void fillToCapacity(TLongLongMap ht, final long lowerSizeBound) { } } TestCase.assertTrue(String.format( - "Expected hashtable to reject a 'put' as it got close to being full, but it accepted %d elements", - ii), - putFailed); + "Expected hashtable to reject a 'put' as it got close to being full, but it accepted %d elements", ii), + putFailed); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/locations/impl/SimpleTableLocationKey.java b/DB/src/test/java/io/deephaven/db/v2/locations/impl/SimpleTableLocationKey.java index 823c4924ffd..ac03c8850a1 100644 --- a/DB/src/test/java/io/deephaven/db/v2/locations/impl/SimpleTableLocationKey.java +++ b/DB/src/test/java/io/deephaven/db/v2/locations/impl/SimpleTableLocationKey.java @@ -26,14 +26,13 @@ public String getImplementationName() { @Override public LogOutput append(LogOutput logOutput) { return logOutput.append(getImplementationName()).append("[partitions=") - .append(PartitionsFormatter.INSTANCE, partitions).append(']'); + .append(PartitionsFormatter.INSTANCE, partitions).append(']'); } @Override public int compareTo(@NotNull final TableLocationKey other) { if (other instanceof SimpleTableLocationKey) { - return PartitionsComparator.INSTANCE.compare(partitions, - ((SimpleTableLocationKey) other).partitions); + return PartitionsComparator.INSTANCE.compare(partitions, ((SimpleTableLocationKey) other).partitions); } throw new ClassCastException("Cannot compare " + getClass() + " to " + other.getClass()); } @@ -46,6 +45,6 @@ public int hashCode() { @Override public boolean equals(final Object other) { return other == this || (other instanceof SimpleTableLocationKey - && partitions.equals(((SimpleTableLocationKey) other).partitions)); + && partitions.equals(((SimpleTableLocationKey) other).partitions)); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestFunctionConsistencyMonitor.java b/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestFunctionConsistencyMonitor.java index f68759ce342..023af3f4342 100644 --- a/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestFunctionConsistencyMonitor.java +++ b/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestFunctionConsistencyMonitor.java @@ -13,20 +13,15 @@ public class TestFunctionConsistencyMonitor { @Test public void testCurrentDateNy() { DBTimeUtils.currentDateNyOverride = "Aardvark"; - TestCase.assertEquals("Aardvark", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - try (final SafeCloseable ignored = - CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - TestCase.assertEquals("Aardvark", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { + TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); DBTimeUtils.currentDateNyOverride = "Armadillo"; - TestCase.assertEquals("Aardvark", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); } - TestCase.assertEquals("Armadillo", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + TestCase.assertEquals("Armadillo", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); DBTimeUtils.currentDateNyOverride = null; } @@ -35,22 +30,17 @@ public void testCurrentDateNy() { @Test public void testMidStreamRegistration() { DBTimeUtils.currentDateNyOverride = "Aardvark"; - TestCase.assertEquals("Aardvark", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); final AtomicInteger atomicInteger = new AtomicInteger(7); final FunctionConsistencyMonitor.ConsistentSupplier consistentInteger; - try (final SafeCloseable ignored = - CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - TestCase.assertEquals("Aardvark", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { + TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); DBTimeUtils.currentDateNyOverride = "Armadillo"; - TestCase.assertEquals("Aardvark", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + TestCase.assertEquals("Aardvark", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); - consistentInteger = - new CompositeTableDataServiceConsistencyMonitor.ConsistentSupplier<>( + consistentInteger = new CompositeTableDataServiceConsistencyMonitor.ConsistentSupplier<>( atomicInteger::getAndIncrement); TestCase.assertEquals((Integer) 7, consistentInteger.get()); TestCase.assertEquals((Integer) 7, consistentInteger.get()); @@ -59,8 +49,7 @@ public void testMidStreamRegistration() { TestCase.assertEquals((Integer) 8, consistentInteger.get()); - TestCase.assertEquals("Armadillo", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + TestCase.assertEquals("Armadillo", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); DBTimeUtils.currentDateNyOverride = null; } @@ -68,26 +57,21 @@ public void testMidStreamRegistration() { @Test public void testCurrentDateNyWithThreads() throws InterruptedException { DBTimeUtils.currentDateNyOverride = "Bobcat"; - TestCase.assertEquals("Bobcat", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + TestCase.assertEquals("Bobcat", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); final MutableObject mutableString = new MutableObject<>(); Thread t; - try (final SafeCloseable ignored = - CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - TestCase.assertEquals("Bobcat", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { + TestCase.assertEquals("Bobcat", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); t = new Thread(() -> { synchronized (mutableString) { // do nothing } - try (final SafeCloseable ignored2 = - CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - mutableString - .setValue(CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + try (final SafeCloseable ignored2 = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { + mutableString.setValue(CompositeTableDataServiceConsistencyMonitor.currentDateNy()); } }); synchronized (mutableString) { @@ -95,8 +79,7 @@ public void testCurrentDateNyWithThreads() throws InterruptedException { DBTimeUtils.currentDateNyOverride = "Bear"; } - TestCase.assertEquals("Bobcat", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + TestCase.assertEquals("Bobcat", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); } t.join(1000); @@ -109,19 +92,15 @@ public void testCurrentDateNyWithThreads() throws InterruptedException { final MutableBoolean mutableBoolean = new MutableBoolean(false); final MutableBoolean gotValueOnce = new MutableBoolean(false); - try (final SafeCloseable ignored = - CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { - TestCase.assertEquals("Bear", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { + TestCase.assertEquals("Bear", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); t = new Thread(() -> { - try (final SafeCloseable ignored2 = - CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { + try (final SafeCloseable ignored2 = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { synchronized (mutableString) { // do nothing } - mutableString - .setValue(CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + mutableString.setValue(CompositeTableDataServiceConsistencyMonitor.currentDateNy()); synchronized (gotValueOnce) { gotValueOnce.setTrue(); gotValueOnce.notifyAll(); @@ -137,8 +116,7 @@ public void testCurrentDateNyWithThreads() throws InterruptedException { } } mutableString3.setValue(DBTimeUtils.currentDateNy()); - mutableString2 - .setValue(CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + mutableString2.setValue(CompositeTableDataServiceConsistencyMonitor.currentDateNy()); } }); synchronized (mutableString) { @@ -146,12 +124,10 @@ public void testCurrentDateNyWithThreads() throws InterruptedException { DBTimeUtils.currentDateNyOverride = "Butterfly"; } - TestCase.assertEquals("Bear", - CompositeTableDataServiceConsistencyMonitor.currentDateNy()); + TestCase.assertEquals("Bear", CompositeTableDataServiceConsistencyMonitor.currentDateNy()); } - try (final SafeCloseable ignored = - CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { + try (final SafeCloseable ignored = CompositeTableDataServiceConsistencyMonitor.INSTANCE.start()) { while (true) { synchronized (gotValueOnce) { if (gotValueOnce.booleanValue()) { @@ -165,8 +141,7 @@ public void testCurrentDateNyWithThreads() throws InterruptedException { mutableBoolean.setTrue(); mutableBoolean.notifyAll(); } - TestCase.assertEquals("Buffalo", - CompositeTableDataServiceConsistencyMonitor.consistentDateNy()); + TestCase.assertEquals("Buffalo", CompositeTableDataServiceConsistencyMonitor.consistentDateNy()); } t.join(1000); diff --git a/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestGroupingProviders.java b/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestGroupingProviders.java index 02f9160b761..3f8e503d8ad 100644 --- a/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestGroupingProviders.java +++ b/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestGroupingProviders.java @@ -36,8 +36,7 @@ public class TestGroupingProviders { @Before public void setUp() throws Exception { - dataDirectory = - Files.createTempDirectory(Paths.get(""), "TestChunkedRegionedOperations-").toFile(); + dataDirectory = Files.createTempDirectory(Paths.get(""), "TestChunkedRegionedOperations-").toFile(); dataDirectory.deleteOnExit(); } @@ -73,10 +72,9 @@ public void testParallelMissingGroups() { } private void doTest(final boolean missingGroups) { - final Table raw = TableTools.emptyTable(26 * 10 * 1000).update( - "Part=String.format(`%04d`, (long)(ii/1000))", "Sym=(char)('A' + ii % 26)", "Other=ii"); - final Table[] partitions = - raw.byExternal("Part").transformTables(rp -> rp.by("Sym").ungroup()).values() + final Table raw = TableTools.emptyTable(26 * 10 * 1000).update("Part=String.format(`%04d`, (long)(ii/1000))", + "Sym=(char)('A' + ii % 26)", "Other=ii"); + final Table[] partitions = raw.byExternal("Part").transformTables(rp -> rp.by("Sym").ungroup()).values() .toArray(Table.ZERO_LENGTH_TABLE_ARRAY); if (!missingGroups) { @@ -86,91 +84,81 @@ private void doTest(final boolean missingGroups) { } final TableDefinition partitionedDataDefinition = TableDefinition.of( - ColumnDefinition.ofString("Part").withPartitioning(), - ColumnDefinition.ofChar("Sym").withGrouping(), - ColumnDefinition.ofLong("Other")); + ColumnDefinition.ofString("Part").withPartitioning(), + ColumnDefinition.ofChar("Sym").withGrouping(), + ColumnDefinition.ofLong("Other")); final TableDefinition partitionedMissingDataDefinition; if (missingGroups) { partitionedMissingDataDefinition = TableDefinition.of( - ColumnDefinition.ofString("Part").withPartitioning(), - ColumnDefinition.ofChar("Sym"), - ColumnDefinition.ofLong("Other")); + ColumnDefinition.ofString("Part").withPartitioning(), + ColumnDefinition.ofChar("Sym"), + ColumnDefinition.ofLong("Other")); } else { partitionedMissingDataDefinition = TableDefinition.of( - ColumnDefinition.ofString("Part").withPartitioning(), - ColumnDefinition.ofLong("Other")); + ColumnDefinition.ofString("Part").withPartitioning(), + ColumnDefinition.ofLong("Other")); } final String tableName = "TestTable"; ParquetTools.writeTable( - partitions[0], - new File(dataDirectory, - "IP" + File.separator + "0000" + File.separator + tableName + File.separator - + PARQUET_FILE_NAME), - partitionedDataDefinition); + partitions[0], + new File(dataDirectory, + "IP" + File.separator + "0000" + File.separator + tableName + File.separator + + PARQUET_FILE_NAME), + partitionedDataDefinition); ParquetTools.writeTable( - partitions[1], - new File(dataDirectory, - "IP" + File.separator + "0001" + File.separator + tableName + File.separator - + PARQUET_FILE_NAME), - partitionedDataDefinition); + partitions[1], + new File(dataDirectory, + "IP" + File.separator + "0001" + File.separator + tableName + File.separator + + PARQUET_FILE_NAME), + partitionedDataDefinition); ParquetTools.writeTable( - partitions[2], - new File(dataDirectory, - "IP" + File.separator + "0002" + File.separator + tableName + File.separator - + PARQUET_FILE_NAME), - partitionedMissingDataDefinition); + partitions[2], + new File(dataDirectory, + "IP" + File.separator + "0002" + File.separator + tableName + File.separator + + PARQUET_FILE_NAME), + partitionedMissingDataDefinition); ParquetTools.writeTable( - partitions[3], - new File(dataDirectory, - "IP" + File.separator + "0003" + File.separator + tableName + File.separator - + PARQUET_FILE_NAME), - partitionedMissingDataDefinition); + partitions[3], + new File(dataDirectory, + "IP" + File.separator + "0003" + File.separator + tableName + File.separator + + PARQUET_FILE_NAME), + partitionedMissingDataDefinition); ParquetTools.writeTables( - Arrays.copyOfRange(partitions, 4, partitions.length), - partitionedDataDefinition, - IntStream.range(4, 260) - .mapToObj(pcv -> new File(dataDirectory, - "IP" + File.separator + String.format("%04d", pcv) + File.separator + tableName - + File.separator + PARQUET_FILE_NAME)) - .toArray(File[]::new)); - // TODO (deephaven/deephaven-core/issues/321): Re-add this part of the test when the parquet - // bug is fixed + Arrays.copyOfRange(partitions, 4, partitions.length), + partitionedDataDefinition, + IntStream.range(4, 260) + .mapToObj(pcv -> new File(dataDirectory, + "IP" + File.separator + String.format("%04d", pcv) + File.separator + tableName + + File.separator + PARQUET_FILE_NAME)) + .toArray(File[]::new)); + // TODO (deephaven/deephaven-core/issues/321): Re-add this part of the test when the parquet bug is fixed ParquetTools.writeTable( - TableTools.emptyTable(0).updateView("Sym=NULL_CHAR", "Other=NULL_LONG"), - new File(dataDirectory, - "IP" + File.separator + "XXXX" + File.separator + tableName + File.separator - + PARQUET_FILE_NAME), - partitionedDataDefinition); + TableTools.emptyTable(0).updateView("Sym=NULL_CHAR", "Other=NULL_LONG"), + new File(dataDirectory, + "IP" + File.separator + "XXXX" + File.separator + tableName + File.separator + + PARQUET_FILE_NAME), + partitionedDataDefinition); if (!missingGroups) { // Put Sym back on for the partitions that dropped it. partitions[2] = partitions[2].updateView("Sym = NULL_CHAR"); partitions[3] = partitions[3].updateView("Sym = NULL_CHAR"); } - final Table expected = TableTools.merge(partitions).view("Part", "Sym", "Other"); // Column - // ordering - // was - // changed - // by - // by()/ungroup() - // above, - // restore - // it - // here. + final Table expected = TableTools.merge(partitions).view("Part", "Sym", "Other"); // Column ordering was changed + // by by()/ungroup() above, + // restore it here. final Table actual = ParquetTools.readPartitionedTable( - DeephavenNestedPartitionLayout.forParquet(dataDirectory, tableName, "Part", - ipn -> ipn.equals("IP")), - ParquetInstructions.EMPTY, - partitionedDataDefinition).coalesce(); + DeephavenNestedPartitionLayout.forParquet(dataDirectory, tableName, "Part", ipn -> ipn.equals("IP")), + ParquetInstructions.EMPTY, + partitionedDataDefinition).coalesce(); TstUtils.assertTableEquals(expected, actual); - TestCase.assertEquals(missingGroups, - actual.getColumnSource("Sym").getGroupToRange() == null); + TestCase.assertEquals(missingGroups, actual.getColumnSource("Sym").getGroupToRange() == null); TstUtils.assertTableEquals(expected.by("Sym").ungroup(), actual.by("Sym").ungroup()); } @@ -178,43 +166,39 @@ private void doTest(final boolean missingGroups) { @Test public void testParallelCollection() { final List observedOrder = Collections.synchronizedList(new ArrayList<>()); - final int[] intArray = - IntStream.range(0, 10000).parallel().peek(observedOrder::add).toArray(); + final int[] intArray = IntStream.range(0, 10000).parallel().peek(observedOrder::add).toArray(); for (int ii = 1; ii < intArray.length; ++ii) { TestCase.assertTrue(intArray[ii - 1] < intArray[ii]); } System.out.println("Out of order observed: " + IntStream.range(1, intArray.length) - .anyMatch(ii -> observedOrder.get(ii - 1) > observedOrder.get(ii))); + .anyMatch(ii -> observedOrder.get(ii - 1) > observedOrder.get(ii))); observedOrder.clear(); - final List integerList = - Arrays.stream(intArray).boxed().parallel().peek(observedOrder::add) + final List integerList = Arrays.stream(intArray).boxed().parallel().peek(observedOrder::add) .collect(Collectors.toList()); for (int ii = 0; ii < integerList.size(); ++ii) { TestCase.assertEquals(intArray[ii], integerList.get(ii).intValue()); } System.out.println("Out of order observed: " + IntStream.range(1, intArray.length) - .anyMatch(ii -> observedOrder.get(ii - 1) > observedOrder.get(ii))); + .anyMatch(ii -> observedOrder.get(ii - 1) > observedOrder.get(ii))); observedOrder.clear(); - final LinkedHashMap integerMap = - integerList.parallelStream().peek(observedOrder::add) - .collect(Collectors.toMap(Function.identity(), Function.identity(), - Assert::neverInvoked, LinkedHashMap::new)); + final LinkedHashMap integerMap = integerList.parallelStream().peek(observedOrder::add) + .collect(Collectors.toMap(Function.identity(), Function.identity(), Assert::neverInvoked, + LinkedHashMap::new)); System.out.println("Out of order observed: " + IntStream.range(1, intArray.length) - .anyMatch(ii -> observedOrder.get(ii - 1) > observedOrder.get(ii))); + .anyMatch(ii -> observedOrder.get(ii - 1) > observedOrder.get(ii))); observedOrder.clear(); final LinkedHashMap stringMap = - integerMap.entrySet().parallelStream().peek(e -> observedOrder.add(e.getKey())) - .collect(Collectors.toMap(e -> e.getKey().toString(), e -> e.getValue().toString(), - Assert::neverInvoked, LinkedHashMap::new)); + integerMap.entrySet().parallelStream().peek(e -> observedOrder.add(e.getKey())) + .collect(Collectors.toMap(e -> e.getKey().toString(), e -> e.getValue().toString(), + Assert::neverInvoked, LinkedHashMap::new)); System.out.println("Out of order observed: " + IntStream.range(1, intArray.length) - .anyMatch(ii -> observedOrder.get(ii - 1) > observedOrder.get(ii))); + .anyMatch(ii -> observedOrder.get(ii - 1) > observedOrder.get(ii))); observedOrder.clear(); - final int[] outputArray = - stringMap.values().parallelStream().mapToInt(Integer::parseInt).toArray(); + final int[] outputArray = stringMap.values().parallelStream().mapToInt(Integer::parseInt).toArray(); for (int ii = 0; ii < outputArray.length; ++ii) { TestCase.assertEquals(intArray[ii], outputArray[ii]); } diff --git a/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestKeyValuePartitionLayout.java b/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestKeyValuePartitionLayout.java index 90f8885716c..de521222173 100644 --- a/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestKeyValuePartitionLayout.java +++ b/DB/src/test/java/io/deephaven/db/v2/locations/impl/TestKeyValuePartitionLayout.java @@ -26,8 +26,7 @@ public class TestKeyValuePartitionLayout { @Before public void setUp() throws IOException { - dataDirectory = - Files.createTempDirectory(Paths.get(""), "TestChunkedRegionedOperations-").toFile(); + dataDirectory = Files.createTempDirectory(Paths.get(""), "TestChunkedRegionedOperations-").toFile(); dataDirectory.deleteOnExit(); } @@ -43,13 +42,11 @@ public void testFlat() throws IOException { Files.write(file1.toPath(), "Hello world!".getBytes()); Files.write(file2.toPath(), "Goodbye cruel world!".getBytes()); - final RecordingLocationKeyFinder recorder = - new RecordingLocationKeyFinder<>(); + final RecordingLocationKeyFinder recorder = new RecordingLocationKeyFinder<>(); new KeyValuePartitionLayout<>(dataDirectory, path -> true, - (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 0) - .findKeys(recorder); + (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 0).findKeys(recorder); final List results = - recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); + recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); TestCase.assertEquals(2, results.size()); @@ -69,13 +66,11 @@ public void testOneLevel() throws IOException { Files.write(file1.toPath(), "Hello world!".getBytes()); Files.write(file2.toPath(), "Goodbye cruel world!".getBytes()); - final RecordingLocationKeyFinder recorder = - new RecordingLocationKeyFinder<>(); + final RecordingLocationKeyFinder recorder = new RecordingLocationKeyFinder<>(); new KeyValuePartitionLayout<>(dataDirectory, path -> true, - (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 1) - .findKeys(recorder); + (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 1).findKeys(recorder); final List results = - recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); + recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); TestCase.assertEquals(2, results.size()); @@ -91,12 +86,12 @@ public void testOneLevel() throws IOException { @Test public void testThreeLevels() throws IOException { - final File file1 = new File(dataDirectory, "Country=US" + File.separator + "State=New York" - + File.separator + "City=New York" + File.separator + "file1"); - final File file2 = new File(dataDirectory, "Country=France" + File.separator - + "State=Grand Est" + File.separator + "City=Reims" + File.separator + "file2"); - final File file3 = new File(dataDirectory, "Country=France" + File.separator - + "State=Grand Est" + File.separator + "City=Strasbourg" + File.separator + "file3"); + final File file1 = new File(dataDirectory, "Country=US" + File.separator + "State=New York" + File.separator + + "City=New York" + File.separator + "file1"); + final File file2 = new File(dataDirectory, "Country=France" + File.separator + "State=Grand Est" + + File.separator + "City=Reims" + File.separator + "file2"); + final File file3 = new File(dataDirectory, "Country=France" + File.separator + "State=Grand Est" + + File.separator + "City=Strasbourg" + File.separator + "file3"); file1.getParentFile().mkdirs(); file2.getParentFile().mkdirs(); file3.getParentFile().mkdirs(); @@ -104,13 +99,11 @@ public void testThreeLevels() throws IOException { Files.write(file2.toPath(), "Goodbye cruel world!".getBytes()); Files.write(file3.toPath(), "Oui!".getBytes()); - final RecordingLocationKeyFinder recorder = - new RecordingLocationKeyFinder<>(); + final RecordingLocationKeyFinder recorder = new RecordingLocationKeyFinder<>(); new KeyValuePartitionLayout<>(dataDirectory, path -> true, - (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3) - .findKeys(recorder); + (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3).findKeys(recorder); final List results = - recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); + recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); TestCase.assertEquals(3, results.size()); @@ -137,12 +130,12 @@ public void testThreeLevels() throws IOException { @Test public void testTypesAndNameLegalization() throws IOException { - final File file1 = new File(dataDirectory, "A=2" + File.separator + "B 1=3.14" - + File.separator + " C=true" + File.separator + "file1"); - final File file2 = new File(dataDirectory, "A=1" + File.separator + "B 1=7.0" - + File.separator + " C=false" + File.separator + "file2"); - final File file3 = new File(dataDirectory, "A=1" + File.separator + "B 1=100" - + File.separator + " C=false" + File.separator + "file3"); + final File file1 = new File(dataDirectory, + "A=2" + File.separator + "B 1=3.14" + File.separator + " C=true" + File.separator + "file1"); + final File file2 = new File(dataDirectory, + "A=1" + File.separator + "B 1=7.0" + File.separator + " C=false" + File.separator + "file2"); + final File file3 = new File(dataDirectory, + "A=1" + File.separator + "B 1=100" + File.separator + " C=false" + File.separator + "file3"); file1.getParentFile().mkdirs(); file2.getParentFile().mkdirs(); file3.getParentFile().mkdirs(); @@ -150,13 +143,11 @@ public void testTypesAndNameLegalization() throws IOException { Files.write(file2.toPath(), "Goodbye cruel world!".getBytes()); Files.write(file3.toPath(), "Oui!".getBytes()); - final RecordingLocationKeyFinder recorder = - new RecordingLocationKeyFinder<>(); + final RecordingLocationKeyFinder recorder = new RecordingLocationKeyFinder<>(); new KeyValuePartitionLayout<>(dataDirectory, path -> true, - (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3) - .findKeys(recorder); + (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3).findKeys(recorder); final List results = - recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); + recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); TestCase.assertEquals(3, results.size()); @@ -183,15 +174,12 @@ public void testTypesAndNameLegalization() throws IOException { @Test public void testMaxDepthEmpty() throws IOException { - final File file1 = new File(dataDirectory, - "Country=US" + File.separator + "State=New York" + File.separator + "City=New York" - + File.separator + "Dummy=Nowhere" + File.separator + "file1"); - final File file2 = new File(dataDirectory, - "Country=France" + File.separator + "State=Grand Est" + File.separator + "City=Reims" - + File.separator + "Dummy=Nowhere" + File.separator + "file2"); - final File file3 = new File(dataDirectory, - "Country=France" + File.separator + "State=Grand Est" + File.separator - + "City=Strasbourg" + File.separator + "Dummy=Nowhere" + File.separator + "file3"); + final File file1 = new File(dataDirectory, "Country=US" + File.separator + "State=New York" + File.separator + + "City=New York" + File.separator + "Dummy=Nowhere" + File.separator + "file1"); + final File file2 = new File(dataDirectory, "Country=France" + File.separator + "State=Grand Est" + + File.separator + "City=Reims" + File.separator + "Dummy=Nowhere" + File.separator + "file2"); + final File file3 = new File(dataDirectory, "Country=France" + File.separator + "State=Grand Est" + + File.separator + "City=Strasbourg" + File.separator + "Dummy=Nowhere" + File.separator + "file3"); file1.getParentFile().mkdirs(); file2.getParentFile().mkdirs(); file3.getParentFile().mkdirs(); @@ -199,28 +187,25 @@ public void testMaxDepthEmpty() throws IOException { Files.write(file2.toPath(), "Goodbye cruel world!".getBytes()); Files.write(file3.toPath(), "Oui!".getBytes()); - final RecordingLocationKeyFinder recorder = - new RecordingLocationKeyFinder<>(); + final RecordingLocationKeyFinder recorder = new RecordingLocationKeyFinder<>(); new KeyValuePartitionLayout<>(dataDirectory, path -> true, - (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3) - .findKeys(recorder); + (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3).findKeys(recorder); final List results = - recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); + recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); TestCase.assertTrue(results.isEmpty()); } @Test public void testMaxDepth() throws IOException { - final File file1 = new File(dataDirectory, "Country=US" + File.separator + "State=New York" - + File.separator + "City=New York" + File.separator + "file1"); - final File file2 = new File(dataDirectory, "Country=France" + File.separator - + "State=Grand Est" + File.separator + "City=Reims" + File.separator + "file2"); - final File file3 = new File(dataDirectory, "Country=France" + File.separator - + "State=Grand Est" + File.separator + "City=Strasbourg" + File.separator + "file3"); - final File file4 = new File(dataDirectory, - "Country=France" + File.separator + "State=Grand Est" + File.separator - + "City=Strasbourg" + File.separator + "Dummy=Nowhere" + File.separator + "file4"); + final File file1 = new File(dataDirectory, "Country=US" + File.separator + "State=New York" + File.separator + + "City=New York" + File.separator + "file1"); + final File file2 = new File(dataDirectory, "Country=France" + File.separator + "State=Grand Est" + + File.separator + "City=Reims" + File.separator + "file2"); + final File file3 = new File(dataDirectory, "Country=France" + File.separator + "State=Grand Est" + + File.separator + "City=Strasbourg" + File.separator + "file3"); + final File file4 = new File(dataDirectory, "Country=France" + File.separator + "State=Grand Est" + + File.separator + "City=Strasbourg" + File.separator + "Dummy=Nowhere" + File.separator + "file4"); file1.getParentFile().mkdirs(); file2.getParentFile().mkdirs(); file3.getParentFile().mkdirs(); @@ -230,13 +215,11 @@ public void testMaxDepth() throws IOException { Files.write(file3.toPath(), "Oui!".getBytes()); Files.write(file4.toPath(), "Non!".getBytes()); - final RecordingLocationKeyFinder recorder = - new RecordingLocationKeyFinder<>(); + final RecordingLocationKeyFinder recorder = new RecordingLocationKeyFinder<>(); new KeyValuePartitionLayout<>(dataDirectory, path -> true, - (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3) - .findKeys(recorder); + (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3).findKeys(recorder); final List results = - recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); + recorder.getRecordedKeys().stream().sorted().collect(Collectors.toList()); TestCase.assertEquals(3, results.size()); @@ -247,12 +230,12 @@ public void testMaxDepth() throws IOException { @Test public void testMismatch() throws IOException { - final File file1 = new File(dataDirectory, "Country=US" + File.separator + "State=New York" - + File.separator + "City=New York" + File.separator + "file1"); - final File file2 = new File(dataDirectory, "Country=France" + File.separator - + "Region=Grand Est" + File.separator + "City=Reims" + File.separator + "file2"); - final File file3 = new File(dataDirectory, "Country=France" + File.separator - + "Region=Grand Est" + File.separator + "City=Strasbourg" + File.separator + "file3"); + final File file1 = new File(dataDirectory, "Country=US" + File.separator + "State=New York" + File.separator + + "City=New York" + File.separator + "file1"); + final File file2 = new File(dataDirectory, "Country=France" + File.separator + "Region=Grand Est" + + File.separator + "City=Reims" + File.separator + "file2"); + final File file3 = new File(dataDirectory, "Country=France" + File.separator + "Region=Grand Est" + + File.separator + "City=Strasbourg" + File.separator + "file3"); file1.getParentFile().mkdirs(); file2.getParentFile().mkdirs(); file3.getParentFile().mkdirs(); @@ -262,8 +245,7 @@ public void testMismatch() throws IOException { try { new KeyValuePartitionLayout<>(dataDirectory, path -> true, - (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3) - .findKeys(ftlk -> { + (path, partitions) -> new FileTableLocationKey(path.toFile(), 0, partitions), 3).findKeys(ftlk -> { }); TestCase.fail("Expected exception"); } catch (TableDataException expected) { diff --git a/DB/src/test/java/io/deephaven/db/v2/parquet/ParquetTableReadWriteTest.java b/DB/src/test/java/io/deephaven/db/v2/parquet/ParquetTableReadWriteTest.java index b039c8aa696..be15f94554e 100644 --- a/DB/src/test/java/io/deephaven/db/v2/parquet/ParquetTableReadWriteTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/parquet/ParquetTableReadWriteTest.java @@ -51,32 +51,32 @@ public void tearDown() { private static Table getTableFlat(int size, boolean includeSerializable) { QueryLibrary.importClass(ParquetTableWriter.SomeSillyTest.class); ArrayList columns = - new ArrayList<>(Arrays.asList("someStringColumn = i % 10 == 0?null:(`` + (i % 101))", - "nonNullString = `` + (i % 60)", - "nonNullPolyString = `` + (i % 600)", - "someIntColumn = i", - "someLongColumn = ii", - "someDoubleColumn = i*1.1", - "someFloatColumn = (float)(i*1.1)", - "someBoolColumn = i % 3 == 0?true:i%3 == 1?false:null", - "someShortColumn = (short)i", - "someByteColumn = (byte)i", - "someCharColumn = (char)i", - "someTime = DBDateTime.now() + i", - "someKey = `` + (int)(i /100)", - "nullKey = i < -1?`123`:null")); + new ArrayList<>(Arrays.asList("someStringColumn = i % 10 == 0?null:(`` + (i % 101))", + "nonNullString = `` + (i % 60)", + "nonNullPolyString = `` + (i % 600)", + "someIntColumn = i", + "someLongColumn = ii", + "someDoubleColumn = i*1.1", + "someFloatColumn = (float)(i*1.1)", + "someBoolColumn = i % 3 == 0?true:i%3 == 1?false:null", + "someShortColumn = (short)i", + "someByteColumn = (byte)i", + "someCharColumn = (char)i", + "someTime = DBDateTime.now() + i", + "someKey = `` + (int)(i /100)", + "nullKey = i < -1?`123`:null")); if (includeSerializable) { columns.add("someSerializable = new SomeSillyTest(i)"); } return TableTools.emptyTable(size).select( - Selectable.from(columns)); + Selectable.from(columns)); } private static Table getOneColumnTableFlat(int size) { QueryLibrary.importClass(ParquetTableWriter.SomeSillyTest.class); return TableTools.emptyTable(size).select( - // "someBoolColumn = i % 3 == 0?true:i%3 == 1?false:null" - "someIntColumn = i % 3 == 0 ? null:i"); + // "someBoolColumn = i % 3 == 0?true:i%3 == 1?false:null" + "someIntColumn = i % 3 == 0 ? null:i"); } private static Table getGroupedOneColumnTable(int size) { @@ -85,20 +85,19 @@ private static Table getGroupedOneColumnTable(int size) { QueryLibrary.importClass(StringSet.class); Table result = t.by("groupKey = i % 100 + (int)(i/10)"); result = result.select(result.getDefinition().getColumnNames().stream() - .map(name -> name.equals("groupKey") ? name - : (name + " = i % 5 == 0 ? null:(i%3 == 0?" + name + ".subArray(0,0):" + name - + ")")) - .toArray(String[]::new)); + .map(name -> name.equals("groupKey") ? name + : (name + " = i % 5 == 0 ? null:(i%3 == 0?" + name + ".subArray(0,0):" + name + ")")) + .toArray(String[]::new)); return result; } private static Table getEmptyArray(int size) { QueryLibrary.importClass(ParquetTableWriter.SomeSillyTest.class); return TableTools.emptyTable(size).select( - "someEmptyString = new String[0]", - "someEmptyInt = new int[0]", - "someEmptyBool = new Boolean[0]", - "someEmptyObject = new SomeSillyTest[0]"); + "someEmptyString = new String[0]", + "someEmptyInt = new int[0]", + "someEmptyBool = new Boolean[0]", + "someEmptyObject = new SomeSillyTest[0]"); } private static Table getGroupedTable(int size, boolean includeSerializable) { @@ -107,20 +106,19 @@ private static Table getGroupedTable(int size, boolean includeSerializable) { QueryLibrary.importClass(StringSet.class); Table result = t.by("groupKey = i % 100 + (int)(i/10)"); result = result.select(result.getDefinition().getColumnNames().stream() - .map(name -> name.equals("groupKey") ? name - : (name + " = i % 5 == 0 ? null:(i%3 == 0?" + name + ".subArray(0,0):" + name - + ")")) - .toArray(String[]::new)); + .map(name -> name.equals("groupKey") ? name + : (name + " = i % 5 == 0 ? null:(i%3 == 0?" + name + ".subArray(0,0):" + name + ")")) + .toArray(String[]::new)); result = result.update( - "someStringSet = (StringSet)new StringSetArrayWrapper( ((Object)nonNullString) == null?new String[0]:(String[])nonNullString.toArray())"); + "someStringSet = (StringSet)new StringSetArrayWrapper( ((Object)nonNullString) == null?new String[0]:(String[])nonNullString.toArray())"); result = result.update( - "largeStringSet = (StringSet)new StringSetArrayWrapper(((Object)nonNullPolyString) == null?new String[0]:(String[])nonNullPolyString.toArray())"); + "largeStringSet = (StringSet)new StringSetArrayWrapper(((Object)nonNullPolyString) == null?new String[0]:(String[])nonNullPolyString.toArray())"); result = result.update( - "someStringColumn = (String[])(((Object)someStringColumn) == null?null:someStringColumn.toArray())", - "nonNullString = (String[])(((Object)nonNullString) == null?null:nonNullString.toArray())", - "nonNullPolyString = (String[])(((Object)nonNullPolyString) == null?null:nonNullPolyString.toArray())", - "someBoolColumn = (Boolean[])(((Object)someBoolColumn) == null?null:someBoolColumn.toArray())", - "someTime = (DBDateTime[])(((Object)someTime) == null?null:someTime.toArray())"); + "someStringColumn = (String[])(((Object)someStringColumn) == null?null:someStringColumn.toArray())", + "nonNullString = (String[])(((Object)nonNullString) == null?null:nonNullString.toArray())", + "nonNullPolyString = (String[])(((Object)nonNullPolyString) == null?null:nonNullPolyString.toArray())", + "someBoolColumn = (Boolean[])(((Object)someBoolColumn) == null?null:someBoolColumn.toArray())", + "someTime = (DBDateTime[])(((Object)someTime) == null?null:someTime.toArray())"); return result; } @@ -188,10 +186,9 @@ public void vectorParquetFormat() { @Test public void groupingByLongKey() { final TableDefinition definition = TableDefinition.of( - ColumnDefinition.ofInt("someInt"), - ColumnDefinition.ofLong("someLong").withGrouping()); - final Table testTable = - ((QueryTable) TableTools.emptyTable(10).select("someInt = i", "someLong = ii % 3") + ColumnDefinition.ofInt("someInt"), + ColumnDefinition.ofLong("someLong").withGrouping()); + final Table testTable = ((QueryTable) TableTools.emptyTable(10).select("someInt = i", "someLong = ii % 3") .by("someLong").ungroup("someInt")).withDefinitionUnsafe(definition); final File dest = new File(rootFile, "ParquetTest_groupByLong_test.parquet"); ParquetTools.writeTable(testTable, dest); @@ -203,12 +200,10 @@ public void groupingByLongKey() { @Test public void groupingByStringKey() { final TableDefinition definition = TableDefinition.of( - ColumnDefinition.ofInt("someInt"), - ColumnDefinition.ofString("someString").withGrouping()); - final Table testTable = - ((QueryTable) TableTools.emptyTable(10).select("someInt = i", "someString = `foo`") - .where("i % 2 == 0").by("someString").ungroup("someInt")) - .withDefinitionUnsafe(definition); + ColumnDefinition.ofInt("someInt"), + ColumnDefinition.ofString("someString").withGrouping()); + final Table testTable = ((QueryTable) TableTools.emptyTable(10).select("someInt = i", "someString = `foo`") + .where("i % 2 == 0").by("someString").ungroup("someInt")).withDefinitionUnsafe(definition); final File dest = new File(rootFile, "ParquetTest_groupByString_test.parquet"); ParquetTools.writeTable(testTable, dest); final Table fromDisk = ParquetTools.readTable(dest); @@ -220,11 +215,11 @@ public void groupingByStringKey() { public void groupingByBigInt() { QueryLibrary.importClass(BigInteger.class); final TableDefinition definition = TableDefinition.of( - ColumnDefinition.ofInt("someInt"), - ColumnDefinition.fromGenericType("someBigInt", BigInteger.class).withGrouping()); + ColumnDefinition.ofInt("someInt"), + ColumnDefinition.fromGenericType("someBigInt", BigInteger.class).withGrouping()); final Table testTable = ((QueryTable) TableTools.emptyTable(10) - .select("someInt = i", "someBigInt = BigInteger.valueOf(i % 3)").where("i % 2 == 0") - .by("someBigInt").ungroup("someInt")).withDefinitionUnsafe(definition); + .select("someInt = i", "someBigInt = BigInteger.valueOf(i % 3)").where("i % 2 == 0").by("someBigInt") + .ungroup("someInt")).withDefinitionUnsafe(definition); final File dest = new File(rootFile, "ParquetTest_groupByBigInt_test.parquet"); ParquetTools.writeTable(testTable, dest); final Table fromDisk = ParquetTools.readTable(dest); diff --git a/DB/src/test/java/io/deephaven/db/v2/select/FormulaTestUtil.java b/DB/src/test/java/io/deephaven/db/v2/select/FormulaTestUtil.java index 16b40f0bcbe..43f5adf23e0 100644 --- a/DB/src/test/java/io/deephaven/db/v2/select/FormulaTestUtil.java +++ b/DB/src/test/java/io/deephaven/db/v2/select/FormulaTestUtil.java @@ -108,18 +108,16 @@ static void setUpQueryLibrary() { static void setUpQueryScope() { QueryScope.addParam("myBoolean", true); QueryScope.addParam("myString", Integer.toString(QUERYSCOPE_OBJ_BASE_VALUE)); - QueryScope.addParam("myCharSequence", - (CharSequence) Integer.toString(QUERYSCOPE_OBJ_BASE_VALUE)); + QueryScope.addParam("myCharSequence", (CharSequence) Integer.toString(QUERYSCOPE_OBJ_BASE_VALUE)); QueryScope.addParam("myObject", Object.class); QueryScope.addParam("myIntArray", BASE_VALUES.clone()); QueryScope.addParam("myDoubleArray", IntStream.of(BASE_VALUES).asDoubleStream().toArray()); QueryScope.addParam("myCharArray", ArrayUtils.getUnboxedArray( - IntStream.of(BASE_VALUES).mapToObj((anInt) -> (char) anInt).toArray(Character[]::new))); + IntStream.of(BASE_VALUES).mapToObj((anInt) -> (char) anInt).toArray(Character[]::new))); QueryScope.addParam("myDoubleObjArray", - IntStream.of(BASE_VALUES).asDoubleStream().boxed().toArray(Double[]::new)); - QueryScope.addParam("myIntegerObjArray", - IntStream.of(BASE_VALUES).boxed().toArray(Integer[]::new)); + IntStream.of(BASE_VALUES).asDoubleStream().boxed().toArray(Double[]::new)); + QueryScope.addParam("myIntegerObjArray", IntStream.of(BASE_VALUES).boxed().toArray(Integer[]::new)); QueryScope.addParam("myByteArray", IntStream.of(BASE_VALUES)); QueryScope.addParam("myBooleanObj", true); @@ -132,16 +130,15 @@ static void setUpQueryScope() { QueryScope.addParam("myDoubleObj", (double) QUERYSCOPE_OBJ_BASE_VALUE); QueryScope.addParam("myArrayList", - new ArrayList<>(IntStream.of(BASE_VALUES).boxed().collect(Collectors.toList()))); - QueryScope.addParam("myHashMap", new HashMap<>( - Collections.singletonMap(QUERYSCOPE_OBJ_BASE_VALUE, QUERYSCOPE_OBJ_BASE_VALUE))); - QueryScope.addParam("myDBArray", - new DbArrayDirect<>(IntStream.of(BASE_VALUES).boxed().toArray())); + new ArrayList<>(IntStream.of(BASE_VALUES).boxed().collect(Collectors.toList()))); + QueryScope.addParam("myHashMap", + new HashMap<>(Collections.singletonMap(QUERYSCOPE_OBJ_BASE_VALUE, QUERYSCOPE_OBJ_BASE_VALUE))); + QueryScope.addParam("myDBArray", new DbArrayDirect<>(IntStream.of(BASE_VALUES).boxed().toArray())); QueryScope.addParam("myEnumValue", TestFormulaColumnEnum.ONE); QueryScope.addParam("myObjectDBArray", DbArray.class); QueryScope.addParam("myIntDBArray", new DbIntArrayDirect(BASE_VALUES)); - QueryScope.addParam("myByteDBArray", new DbByteArrayDirect(ArrayUtils.getUnboxedArray( - IntStream.of(BASE_VALUES).boxed().map(Integer::byteValue).toArray(Byte[]::new)))); + QueryScope.addParam("myByteDBArray", new DbByteArrayDirect(ArrayUtils + .getUnboxedArray(IntStream.of(BASE_VALUES).boxed().map(Integer::byteValue).toArray(Byte[]::new)))); // QueryScope.addParam("myBooleanDBArray", DbBooleanArray.class); QueryScope.addParam("ExampleQuantity", 1); @@ -154,21 +151,17 @@ static void setUpQueryScope() { @NotNull static Table getTestDataTable() { return TableTools.newTable( - TableTools.col("BooleanCol", false, true, QueryConstants.NULL_BOOLEAN), - TableTools.charCol("CharCol", (char) BASE_VALUES[0], (char) BASE_VALUES[1], - QueryConstants.NULL_CHAR), - TableTools.byteCol("ByteCol", (byte) BASE_VALUES[0], (byte) BASE_VALUES[1], - QueryConstants.NULL_BYTE), - TableTools.shortCol("ShortCol", (short) BASE_VALUES[0], (short) BASE_VALUES[1], - QueryConstants.NULL_SHORT), - TableTools.intCol("IntCol", (int) BASE_VALUES[0], (int) BASE_VALUES[1], - QueryConstants.NULL_INT), - TableTools.longCol("LongCol", (long) BASE_VALUES[0], (long) BASE_VALUES[1], - QueryConstants.NULL_LONG), - TableTools.floatCol("FloatCol", (float) BASE_VALUES[0], (float) BASE_VALUES[1], - QueryConstants.NULL_FLOAT), - TableTools.doubleCol("DoubleCol", (double) BASE_VALUES[0], (double) BASE_VALUES[1], - QueryConstants.NULL_DOUBLE)); + TableTools.col("BooleanCol", false, true, QueryConstants.NULL_BOOLEAN), + TableTools.charCol("CharCol", (char) BASE_VALUES[0], (char) BASE_VALUES[1], QueryConstants.NULL_CHAR), + TableTools.byteCol("ByteCol", (byte) BASE_VALUES[0], (byte) BASE_VALUES[1], QueryConstants.NULL_BYTE), + TableTools.shortCol("ShortCol", (short) BASE_VALUES[0], (short) BASE_VALUES[1], + QueryConstants.NULL_SHORT), + TableTools.intCol("IntCol", (int) BASE_VALUES[0], (int) BASE_VALUES[1], QueryConstants.NULL_INT), + TableTools.longCol("LongCol", (long) BASE_VALUES[0], (long) BASE_VALUES[1], QueryConstants.NULL_LONG), + TableTools.floatCol("FloatCol", (float) BASE_VALUES[0], (float) BASE_VALUES[1], + QueryConstants.NULL_FLOAT), + TableTools.doubleCol("DoubleCol", (double) BASE_VALUES[0], (double) BASE_VALUES[1], + QueryConstants.NULL_DOUBLE)); } enum TestFormulaColumnEnum { diff --git a/DB/src/test/java/io/deephaven/db/v2/select/SelectColumnTest.java b/DB/src/test/java/io/deephaven/db/v2/select/SelectColumnTest.java index 5391e52198e..64090a6df21 100644 --- a/DB/src/test/java/io/deephaven/db/v2/select/SelectColumnTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/select/SelectColumnTest.java @@ -29,8 +29,7 @@ public void testRawString() { expect(Selectable.of(FOO, RawString.of("foo(X-13)")), SwitchColumn.class, "Foo=foo(X-13)"); } - private static void expect(Selectable selectable, Class clazz, - String expected) { + private static void expect(Selectable selectable, Class clazz, String expected) { SelectColumn impl = SelectColumn.of(selectable); assertThat(impl).isInstanceOf(clazz); // SelectColumn doesn't necessary implement equals, so we need to use the string repr diff --git a/DB/src/test/java/io/deephaven/db/v2/select/TestClockFilters.java b/DB/src/test/java/io/deephaven/db/v2/select/TestClockFilters.java index 4de551d5d99..a517dabf20d 100644 --- a/DB/src/test/java/io/deephaven/db/v2/select/TestClockFilters.java +++ b/DB/src/test/java/io/deephaven/db/v2/select/TestClockFilters.java @@ -21,14 +21,14 @@ public class TestClockFilters extends LiveTableTestCase { private final Table testInput3; { final Table testInputRangeA = newTable( - col("Timestamp", nanosToTime(1000L), nanosToTime(2000L), nanosToTime(3000L), - nanosToTime(1000L), nanosToTime(2000L), nanosToTime(3000L)), - intCol("Int", 1, 2, 3, 1, 2, 3)); + col("Timestamp", nanosToTime(1000L), nanosToTime(2000L), nanosToTime(3000L), nanosToTime(1000L), + nanosToTime(2000L), nanosToTime(3000L)), + intCol("Int", 1, 2, 3, 1, 2, 3)); testInput1 = merge(testInputRangeA, testInputRangeA, testInputRangeA); final Table testInputRangeB = newTable( - col("Timestamp", nanosToTime(2000L), nanosToTime(2000L), nanosToTime(3000L), - nanosToTime(2000L), nanosToTime(2000L), nanosToTime(3000L)), - intCol("Int", 2, 2, 3, 2, 2, 3)); + col("Timestamp", nanosToTime(2000L), nanosToTime(2000L), nanosToTime(3000L), nanosToTime(2000L), + nanosToTime(2000L), nanosToTime(3000L)), + intCol("Int", 2, 2, 3, 2, 2, 3)); testInput2 = merge(testInputRangeA, testInputRangeB, testInputRangeA); testInput3 = merge(testInputRangeA, testInputRangeB, testInputRangeB); } @@ -46,15 +46,14 @@ public void testSorted1() { clock.refresh(); filter.refresh(); }); - assertEquals(new int[] {1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2}, - (int[]) result.getColumn("Int").getDirect()); + assertEquals(new int[] {1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2}, (int[]) result.getColumn("Int").getDirect()); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { clock.refresh(); filter.refresh(); }); assertEquals(new int[] {1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3}, - (int[]) result.getColumn("Int").getDirect()); + (int[]) result.getColumn("Int").getDirect()); } public void testUnsorted1() { @@ -68,15 +67,14 @@ public void testUnsorted1() { clock.refresh(); filter.refresh(); }); - assertEquals(new int[] {1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2}, - (int[]) result.getColumn("Int").getDirect()); + assertEquals(new int[] {1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2}, (int[]) result.getColumn("Int").getDirect()); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { clock.refresh(); filter.refresh(); }); assertEquals(new int[] {1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3}, - (int[]) result.getColumn("Int").getDirect()); + (int[]) result.getColumn("Int").getDirect()); } public void testSorted2() { @@ -90,15 +88,14 @@ public void testSorted2() { clock.refresh(); filter.refresh(); }); - assertEquals(new int[] {1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2}, - (int[]) result.getColumn("Int").getDirect()); + assertEquals(new int[] {1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2}, (int[]) result.getColumn("Int").getDirect()); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { clock.refresh(); filter.refresh(); }); assertEquals(new int[] {1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3}, - (int[]) result.getColumn("Int").getDirect()); + (int[]) result.getColumn("Int").getDirect()); } public void testUnsorted2() { @@ -112,15 +109,14 @@ public void testUnsorted2() { clock.refresh(); filter.refresh(); }); - assertEquals(new int[] {1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2}, - (int[]) result.getColumn("Int").getDirect()); + assertEquals(new int[] {1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2}, (int[]) result.getColumn("Int").getDirect()); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { clock.refresh(); filter.refresh(); }); assertEquals(new int[] {1, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 3, 1, 2, 3, 1, 2, 3}, - (int[]) result.getColumn("Int").getDirect()); + (int[]) result.getColumn("Int").getDirect()); } public void testSorted3() { @@ -134,15 +130,14 @@ public void testSorted3() { clock.refresh(); filter.refresh(); }); - assertEquals(new int[] {1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2}, - (int[]) result.getColumn("Int").getDirect()); + assertEquals(new int[] {1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2}, (int[]) result.getColumn("Int").getDirect()); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { clock.refresh(); filter.refresh(); }); assertEquals(new int[] {1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3}, - (int[]) result.getColumn("Int").getDirect()); + (int[]) result.getColumn("Int").getDirect()); } public void testUnsorted3() { @@ -156,14 +151,13 @@ public void testUnsorted3() { clock.refresh(); filter.refresh(); }); - assertEquals(new int[] {1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2}, - (int[]) result.getColumn("Int").getDirect()); + assertEquals(new int[] {1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2}, (int[]) result.getColumn("Int").getDirect()); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { clock.refresh(); filter.refresh(); }); assertEquals(new int[] {1, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 3}, - (int[]) result.getColumn("Int").getDirect()); + (int[]) result.getColumn("Int").getDirect()); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/select/TestConditionFilterGeneration.java b/DB/src/test/java/io/deephaven/db/v2/select/TestConditionFilterGeneration.java index b46d47b4e3f..fe485744077 100644 --- a/DB/src/test/java/io/deephaven/db/v2/select/TestConditionFilterGeneration.java +++ b/DB/src/test/java/io/deephaven/db/v2/select/TestConditionFilterGeneration.java @@ -31,8 +31,7 @@ public void generateFile() throws FileNotFoundException { // @Test public void generateArrayFile() throws FileNotFoundException { - new ModelFileGenerator(FilterKernelArraySample.class) - .generateFile(getArrayClassDefString()); + new ModelFileGenerator(FilterKernelArraySample.class).generateFile(getArrayClassDefString()); } @Test @@ -42,8 +41,7 @@ public void validateFile() throws IOException { @Test public void validateArrayFile() throws IOException { - new ModelFileGenerator(FilterKernelArraySample.class) - .validateFile(getArrayClassDefString()); + new ModelFileGenerator(FilterKernelArraySample.class).validateFile(getArrayClassDefString()); } @NotNull @@ -53,8 +51,8 @@ private static String getClassDefString() { QueryScope.getScope().putParam("p3", "10"); final Table t = TableTools.emptyTable(10).select("v1 = (short)1", "v2 = 1.1"); - final ConditionFilter conditionFilter = (ConditionFilter) ConditionFilter - .createConditionFilter("`foo`.equals((p1+p2+v1+v2) + p3)"); + final ConditionFilter conditionFilter = + (ConditionFilter) ConditionFilter.createConditionFilter("`foo`.equals((p1+p2+v1+v2) + p3)"); conditionFilter.init(t.getDefinition()); return conditionFilter.getClassBodyStr(); } @@ -64,7 +62,7 @@ private static String getArrayClassDefString() { final Table t = TableTools.emptyTable(10).select("v1 = (short)1", "v2 = 1.1"); final ConditionFilter conditionFilter = - (ConditionFilter) ConditionFilter.createConditionFilter("v1_.size() == v2_.size()"); + (ConditionFilter) ConditionFilter.createConditionFilter("v1_.size() == v2_.size()"); conditionFilter.init(t.getDefinition()); return conditionFilter.getClassBodyStr(); } diff --git a/DB/src/test/java/io/deephaven/db/v2/select/TestFormulaColumn.java b/DB/src/test/java/io/deephaven/db/v2/select/TestFormulaColumn.java index 368a5400d9d..dc522a85714 100644 --- a/DB/src/test/java/io/deephaven/db/v2/select/TestFormulaColumn.java +++ b/DB/src/test/java/io/deephaven/db/v2/select/TestFormulaColumn.java @@ -42,7 +42,7 @@ public class TestFormulaColumn { private static final boolean ENABLE_COMPILER_TOOLS_LOGGING = Configuration.getInstance() - .getBooleanForClassWithDefault(TestFormulaColumn.class, "CompilerTools.logEnabled", false); + .getBooleanForClassWithDefault(TestFormulaColumn.class, "CompilerTools.logEnabled", false); @Parameterized.Parameters(name = "useKernelFormulasProperty = {0}") public static Collection data() { @@ -136,26 +136,19 @@ public void testVariables() { } /** - * FormulaColumns never return boxed types (except Boolean). If the formula itself evaluates to - * a boxed type, it will be unboxed. + * FormulaColumns never return boxed types (except Boolean). If the formula itself evaluates to a boxed type, it + * will be unboxed. */ @Test public void testReturnUnboxedType() { for (int row = 0; row < testDataTable.size(); row++) { - checkPrimitive(row, "new Byte(ByteCol)", - DBLanguageFunctionUtil.byteCast(BASE_VALUES[row])); - checkPrimitive(row, "new Short(ShortCol)", - DBLanguageFunctionUtil.shortCast(BASE_VALUES[row])); - checkPrimitive(row, "new Character(CharCol)", - DBLanguageFunctionUtil.charCast(BASE_VALUES[row])); - checkPrimitive(row, "new Integer(IntCol)", - DBLanguageFunctionUtil.intCast(BASE_VALUES[row])); - checkPrimitive(row, "new Long(LongCol)", - DBLanguageFunctionUtil.longCast(BASE_VALUES[row])); - checkPrimitive(row, "new Float(FloatCol)", - DBLanguageFunctionUtil.floatCast(BASE_VALUES[row])); - checkPrimitive(row, "new Double(DoubleCol)", - DBLanguageFunctionUtil.doubleCast(BASE_VALUES[row])); + checkPrimitive(row, "new Byte(ByteCol)", DBLanguageFunctionUtil.byteCast(BASE_VALUES[row])); + checkPrimitive(row, "new Short(ShortCol)", DBLanguageFunctionUtil.shortCast(BASE_VALUES[row])); + checkPrimitive(row, "new Character(CharCol)", DBLanguageFunctionUtil.charCast(BASE_VALUES[row])); + checkPrimitive(row, "new Integer(IntCol)", DBLanguageFunctionUtil.intCast(BASE_VALUES[row])); + checkPrimitive(row, "new Long(LongCol)", DBLanguageFunctionUtil.longCast(BASE_VALUES[row])); + checkPrimitive(row, "new Float(FloatCol)", DBLanguageFunctionUtil.floatCast(BASE_VALUES[row])); + checkPrimitive(row, "new Double(DoubleCol)", DBLanguageFunctionUtil.doubleCast(BASE_VALUES[row])); } } @@ -204,12 +197,10 @@ public void testArrayEvaluation() { @Test public void testNoInput() { - final String oldValue = - Configuration.getInstance().getProperty("CompilerTools.logEnabledDefault"); + final String oldValue = Configuration.getInstance().getProperty("CompilerTools.logEnabledDefault"); Configuration.getInstance().setProperty("CompilerTools.logEnabledDefault", "true"); try { - FormulaColumn formulaColumn = - FormulaColumn.createFormulaColumn("Foo", "(String)\"1234\""); + FormulaColumn formulaColumn = FormulaColumn.createFormulaColumn("Foo", "(String)\"1234\""); formulaColumn.initDef(Collections.emptyMap()); final String result = (String) formulaColumn.getDataView().get(0); assertEquals(result, "1234"); @@ -280,8 +271,7 @@ public void testResolution() { // // TODO: Make this test pass. // public void testMethodNameUsedInFormulaClass1() { - // /* A Formula's local "get() method will take precedence over one made available in a static - // import; + // /* A Formula's local "get() method will take precedence over one made available in a static import; // the parser should ensure that a method from a static import will not be masked by methods in // a generated Formula class. // (JLS 15.12.1; https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.1) @@ -293,8 +283,7 @@ public void testResolution() { // // // TODO: Make this test pass. // public void testMethodNameUsedInFormulaClass2() { - // /* A Formula's local "get() method will take precedence over one made available in a static - // import; + // /* A Formula's local "get() method will take precedence over one made available in a static import; // the parser should ensure that a method from a static import will not be masked by methods in // a generated Formula class. // (JLS 15.12.1; https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.1) @@ -342,8 +331,7 @@ public void testObjectConstruction() { @Test public void testException() { // Parse exceptions: - checkExpectingParseException("this_variable_definitely_has_not_been_defined"); // non-existent - // variable + checkExpectingParseException("this_variable_definitely_has_not_been_defined"); // non-existent variable // Runtime exceptions: checkExpectingEvaluationException("((Object) null).hashCode()"); // NPE @@ -353,14 +341,12 @@ public void testException() { QueryScope.addParam("sdf", new SimpleDateFormat("MM/dd/yy")); - FormulaColumn formulaColumn = - FormulaColumn.createFormulaColumn("Foo", "sdf.format(sdf.parse(`11/21/16`))"); + FormulaColumn formulaColumn = FormulaColumn.createFormulaColumn("Foo", "sdf.format(sdf.parse(`11/21/16`))"); formulaColumn.initDef(Collections.emptyMap()); final String result = (String) formulaColumn.getDataView().get(0); assertEquals(result, "11/21/16"); - formulaColumn = - FormulaColumn.createFormulaColumn("Foo", "sdf.format(sdf.parse(`11-21-16`))"); + formulaColumn = FormulaColumn.createFormulaColumn("Foo", "sdf.format(sdf.parse(`11-21-16`))"); formulaColumn.initDef(Collections.emptyMap()); Exception caught = null; @@ -377,8 +363,7 @@ public void testCasts() { double result; { - FormulaColumn formulaColumn = - FormulaColumn.createFormulaColumn("Foo", "(double)IntCol"); + FormulaColumn formulaColumn = FormulaColumn.createFormulaColumn("Foo", "(double)IntCol"); formulaColumn.initDef(availableColumns); formulaColumn.initInputs(testDataTable); @@ -393,8 +378,7 @@ public void testCasts() { } { - FormulaColumn formulaColumn = - FormulaColumn.createFormulaColumn("Foo", "(double)myIntObj"); + FormulaColumn formulaColumn = FormulaColumn.createFormulaColumn("Foo", "(double)myIntObj"); formulaColumn.initDef(availableColumns); result = formulaColumn.getDataView().getDouble(0); assertEquals((double) QUERYSCOPE_OBJ_BASE_VALUE, result); @@ -413,23 +397,21 @@ public void testCasts() { */ @Test public void testPrimitiveCasts() { - final List primitiveTypes = - new ArrayList<>(io.deephaven.util.type.TypeUtils.PRIMITIVE_TYPES); + final List primitiveTypes = new ArrayList<>(io.deephaven.util.type.TypeUtils.PRIMITIVE_TYPES); for (int i = 0; i < io.deephaven.util.type.TypeUtils.PRIMITIVE_TYPES.size(); i++) { final Class sourceType = primitiveTypes.get(i); final String sourceTypeName = sourceType.getName(); - final String sourceColName = Character.toUpperCase(sourceTypeName.charAt(0)) - + sourceTypeName.substring(1) + "Col"; + final String sourceColName = + Character.toUpperCase(sourceTypeName.charAt(0)) + sourceTypeName.substring(1) + "Col"; for (int j = 0; j < io.deephaven.util.type.TypeUtils.PRIMITIVE_TYPES.size(); j++) { final Class destType = primitiveTypes.get(j); final String destTypeName = destType.getName(); - String expression = "(" + destTypeName + ")" + sourceColName; // e.g. - // "TestCast=(int)myShortObj" + String expression = "(" + destTypeName + ")" + sourceColName; // e.g. "TestCast=(int)myShortObj" final Object[] expectedResults = - new Object[testDataTable.intSize("TestFormulaColumn.testPrimitiveCasts")]; + new Object[testDataTable.intSize("TestFormulaColumn.testPrimitiveCasts")]; if (destType == boolean.class) { expectedResults[0] = false; expectedResults[1] = true; @@ -442,21 +424,19 @@ public void testPrimitiveCasts() { for (int row = 0; row < testDataTable.size(); row++) { try { - if ( // booleans can only be cast to/from booleans; should be a compile time - // exception otherwise + if ( // booleans can only be cast to/from booleans; should be a compile time exception otherwise sourceType == boolean.class ^ destType == boolean.class - // also, we should hit a runtime exception casting null to boolean (NPE - // while unboxing) - || (sourceType == boolean.class && row == NULL_ROW_INDEX)) { + // also, we should hit a runtime exception casting null to boolean (NPE while unboxing) + || (sourceType == boolean.class && row == NULL_ROW_INDEX)) { checkExpectingException(row, expression); } else { checkPrimitive(row, expression, expectedResults[row]); } } catch (Throwable ex) { - throw new RuntimeException("Failed testing cast of " + sourceType.getName() - + " to " + destType.getName() + - " (i=" + i + ", j=" + j + ", row=" + row + "). Formula:\n" + expression, - ex); + throw new RuntimeException( + "Failed testing cast of " + sourceType.getName() + " to " + destType.getName() + + " (i=" + i + ", j=" + j + ", row=" + row + "). Formula:\n" + expression, + ex); } } } @@ -468,30 +448,23 @@ public void testPrimitiveCasts() { */ @Test public void testUnboxingCasts() { - final List boxedTypes = - new ArrayList<>(io.deephaven.util.type.TypeUtils.BOXED_TYPES); - final List primitiveTypes = - new ArrayList<>(io.deephaven.util.type.TypeUtils.PRIMITIVE_TYPES); + final List boxedTypes = new ArrayList<>(io.deephaven.util.type.TypeUtils.BOXED_TYPES); + final List primitiveTypes = new ArrayList<>(io.deephaven.util.type.TypeUtils.PRIMITIVE_TYPES); for (int i = 0; i < boxedTypes.size(); i++) { final Class sourceType = boxedTypes.get(i); final String sourceTypeName = sourceType.getSimpleName(); - final String unboxedSourceTypeName = - io.deephaven.util.type.TypeUtils.getUnboxedType(sourceType).getName(); + final String unboxedSourceTypeName = io.deephaven.util.type.TypeUtils.getUnboxedType(sourceType).getName(); final String unboxedSourceTypeNameProperCase = - Character.toUpperCase(unboxedSourceTypeName.charAt(0)) - + unboxedSourceTypeName.substring(1); + Character.toUpperCase(unboxedSourceTypeName.charAt(0)) + unboxedSourceTypeName.substring(1); final String sourceColName = unboxedSourceTypeNameProperCase + "Col"; - final String boxedTypeConstructorCall = - "new " + sourceTypeName + '(' + sourceColName + ')'; + final String boxedTypeConstructorCall = "new " + sourceTypeName + '(' + sourceColName + ')'; for (int j = 0; j < primitiveTypes.size(); j++) { Class destType = primitiveTypes.get(j); final String destTypeName = destType.getName(); final String destTypeCast = '(' + destTypeName + ')'; - final String expression = destTypeCast + boxedTypeConstructorCall; // e.g. - // "(double)new - // Integer(IntCol)" + final String expression = destTypeCast + boxedTypeConstructorCall; // e.g. "(double)new Integer(IntCol)" final Object[] expectedResults = new Object[3]; final String boxedTypeNullPointer, boxedQueryConstantsNull; @@ -500,8 +473,7 @@ public void testUnboxingCasts() { expectedResults[1] = true; expectedResults[2] = null; - boxedTypeNullPointer = null; // would be "(Boolean)null", but tests don't use - // this + boxedTypeNullPointer = null; // would be "(Boolean)null", but tests don't use this boxedQueryConstantsNull = null; // does not apply to booleans in the db. } else { // i != j; Unboxing/unboxing and widening conversions @@ -510,33 +482,29 @@ public void testUnboxingCasts() { expectedResults[2] = getBoxedBaseVal(2, destType); boxedTypeNullPointer = '(' + sourceTypeName + ")null"; // e.g. "(Byte)null" - boxedQueryConstantsNull = "new " + sourceTypeName + "(NULL_" - + unboxedSourceTypeName.toUpperCase() + ')'; // e.g. new Byte(NULL_BYTE) + boxedQueryConstantsNull = + "new " + sourceTypeName + "(NULL_" + unboxedSourceTypeName.toUpperCase() + ')'; // e.g. new + // Byte(NULL_BYTE) } boolean compileTimeUnsupportedConversion = - // only unboxing or unboxing+widening is allowed; i > j means i wider than j - i > j - // Booleans, and only Booleans, can be unboxed to booleans. Should be a - // parse exception otherwise - || sourceType == Boolean.class ^ destType == boolean.class - // also, Byte/Short can't be cast to char - || destType == char.class - && (sourceType == Byte.class || sourceType == Short.class); + // only unboxing or unboxing+widening is allowed; i > j means i wider than j + i > j + // Booleans, and only Booleans, can be unboxed to booleans. Should be a parse exception + // otherwise + || sourceType == Boolean.class ^ destType == boolean.class + // also, Byte/Short can't be cast to char + || destType == char.class && (sourceType == Byte.class || sourceType == Short.class); try { for (int row = 0; row < BASE_VALUES.length; row++) { try { if (compileTimeUnsupportedConversion) { checkExpectingParseException(row, expression); - } else if (sourceType == Boolean.class && row == NULL_ROW_INDEX) { // unboxing - // null - // reference - // causes - // runtime - // NPE - checkExpectingException(row, NullPointerException.class, - expression); + } else if (sourceType == Boolean.class && row == NULL_ROW_INDEX) { // unboxing null + // reference causes + // runtime NPE + checkExpectingException(row, NullPointerException.class, expression); } else { checkPrimitive(row, expression, expectedResults[row]); } @@ -546,27 +514,24 @@ public void testUnboxingCasts() { } if (!compileTimeUnsupportedConversion && !sourceType.equals(Boolean.class)) { - // Test unboxing a boxed QueryConstants null value. This should unbox, then - // convert. - // Thus we should have: (short)new Integer(NULL_INT) --> - // shortCast(intCast(new Integer(NULL_INT))) --> NULL_SHORT - final String conversionOfBoxedQCNull = - destTypeCast + boxedQueryConstantsNull; + // Test unboxing a boxed QueryConstants null value. This should unbox, then convert. + // Thus we should have: (short)new Integer(NULL_INT) --> shortCast(intCast(new + // Integer(NULL_INT))) --> NULL_SHORT + final String conversionOfBoxedQCNull = destTypeCast + boxedQueryConstantsNull; checkPrimitive(conversionOfBoxedQCNull, expectedResults[NULL_ROW_INDEX]); - // Test unboxing a null pointer of a boxed type. This should be converted - // into a + // Test unboxing a null pointer of a boxed type. This should be converted into a // QueryConstants null value of the appropriate type. - final String conversionOfNullBoxedType = - destTypeCast + boxedTypeNullPointer; + final String conversionOfNullBoxedType = destTypeCast + boxedTypeNullPointer; checkPrimitive(conversionOfNullBoxedType, expectedResults[NULL_ROW_INDEX]); } } catch (Throwable ex) { - throw new RuntimeException("Failed testing cast of " + sourceType.getName() - + " to " + destType.getName() + - " (i=" + i + ", j=" + j + "). Formula:\n" + expression, ex); + throw new RuntimeException( + "Failed testing cast of " + sourceType.getName() + " to " + destType.getName() + + " (i=" + i + ", j=" + j + "). Formula:\n" + expression, + ex); } } } @@ -589,7 +554,7 @@ public void testWrapWithCastIfNecessary() { private void testWrapWithCastHelper(final Class type, final String cast) { final String theFormula = "theFormula"; final String expected = cast == null ? theFormula - : DBLanguageFunctionUtil.class.getCanonicalName() + '.' + cast + '(' + theFormula + ')'; + : DBLanguageFunctionUtil.class.getCanonicalName() + '.' + cast + '(' + theFormula + ')'; final TypeAnalyzer ta = TypeAnalyzer.create(type); final String possiblyWrappedExpression = ta.wrapWithCastIfNecessary(theFormula); Assert.equals(possiblyWrappedExpression, "possiblyWrappedExpression", expected); @@ -694,13 +659,11 @@ private void checkPrimitive(String formulaString, Object expectedResult) { * Invokes the appropriate {@code check()} function for the boxed expected result * * @param formulaString The formula to check - * @param expectedResult The expected result. Must be a boxed type (e.g. {@link Integer}, - * {@link Character}, etc. + * @param expectedResult The expected result. Must be a boxed type (e.g. {@link Integer}, {@link Character}, etc. * @param index The index to check */ private void checkPrimitive(int index, String formulaString, Object expectedResult) { - Class unboxedType = - expectedResult == null ? null : TypeUtils.getUnboxedType(expectedResult.getClass()); + Class unboxedType = expectedResult == null ? null : TypeUtils.getUnboxedType(expectedResult.getClass()); if (unboxedType == byte.class) { check(index, formulaString, ((Number) expectedResult).byteValue()); @@ -730,14 +693,13 @@ private void checkExpectingException(int index, String formulaString) { } /** - * @param index The index of {@link #testDataTable} at which {@code formulaString} should be - * evaluateds - * @param exceptionTypeToExpect Expect an exception of this type, and not of any other (i.e. - * runtime or compilation) exception. If null, any {@code Exception} is expected. + * @param index The index of {@link #testDataTable} at which {@code formulaString} should be evaluateds + * @param exceptionTypeToExpect Expect an exception of this type, and not of any other (i.e. runtime or compilation) + * exception. If null, any {@code Exception} is expected. * @param formulaString The formula to evaluate */ - private void checkExpectingException(int index, - Class exceptionTypeToExpect, String formulaString) { + private void checkExpectingException(int index, Class exceptionTypeToExpect, + String formulaString) { final boolean expectSpecificInspection = exceptionTypeToExpect != null; @@ -760,7 +722,7 @@ private void checkExpectingException(int index, if (expectSpecificInspection) { if (!expectedExceptionWasThrown) { throw new AssertionFailure("Expected exception " + exceptionTypeToExpect.getName() - + " was not thrown; another exception was", theException); + + " was not thrown; another exception was", theException); } } else if (!anyExceptionWasThrown) { fail("Should have thrown an exception"); @@ -789,8 +751,7 @@ private void checkExpectingEvaluationException(int index, String formulaString) * @param t The throwable to check. Cannot be null. * @param exceptionType The type to check against */ - private static boolean involvesExceptionType(Throwable t, - Class exceptionType) { + private static boolean involvesExceptionType(Throwable t, Class exceptionType) { Assert.neqNull(t, "t"); for (; t != null; t = t.getCause()) { if (exceptionType.isAssignableFrom(t.getClass())) { @@ -807,8 +768,7 @@ private enum TestFormulaColumnEnum { @SuppressWarnings("WeakerAccess") public static class TestFormulaColumnAuxiliaryClass1 { - // 2017-10-07: Statically importing and calling this method will lead to a Formula class - // body we cannot compile + // 2017-10-07: Statically importing and calling this method will lead to a Formula class body we cannot compile private static long get(long arg) { return arg; } diff --git a/DB/src/test/java/io/deephaven/db/v2/select/TestFormulaColumnGeneration.java b/DB/src/test/java/io/deephaven/db/v2/select/TestFormulaColumnGeneration.java index e3f168ef492..9ad955c23bc 100644 --- a/DB/src/test/java/io/deephaven/db/v2/select/TestFormulaColumnGeneration.java +++ b/DB/src/test/java/io/deephaven/db/v2/select/TestFormulaColumnGeneration.java @@ -20,8 +20,7 @@ public class TestFormulaColumnGeneration { // code only needs to be run when the formula generation code changes, and only after // some human believes that the code is correct. When that happens the process // is: - // 1. Once the formula generation code is believed to be correct, uncomment @Test for this test - // case below. + // 1. Once the formula generation code is believed to be correct, uncomment @Test for this test case below. // 2. Run this test case once to generate new "golden master" files. // 3. Comment this "test" back out // 4. Confirm that the modified files pass the "validateFiles" case. @@ -30,8 +29,7 @@ public class TestFormulaColumnGeneration { public void generateFiles() throws FileNotFoundException { final DhFormulaColumn fc = (DhFormulaColumn) getFormulaColumn(); new ModelFileGenerator(FormulaSample.class).generateFile(fc.generateClassBody()); - new ModelFileGenerator(FormulaKernelSample.class) - .generateFile(fc.generateKernelClassBody()); + new ModelFileGenerator(FormulaKernelSample.class).generateFile(fc.generateKernelClassBody()); } @Before @@ -48,8 +46,7 @@ public void tearDown() { public void validateFiles() throws IOException { final DhFormulaColumn fc = (DhFormulaColumn) getFormulaColumn(); new ModelFileGenerator(FormulaSample.class).validateFile(fc.generateClassBody()); - new ModelFileGenerator(FormulaKernelSample.class) - .validateFile(fc.generateKernelClassBody()); + new ModelFileGenerator(FormulaKernelSample.class).validateFile(fc.generateKernelClassBody()); } @NotNull @@ -60,8 +57,7 @@ private static FormulaColumn getFormulaColumn() { // final DhFormulaColumn fc = new DhFormulaColumn("Value", "12345"); // final DhFormulaColumn fc = new DhFormulaColumn("Value", "k * i * ii"); // final DhFormulaColumn fc = new DhFormulaColumn("Value", "'2019-04-11T09:30 NY'"); - final FormulaColumn fc = - FormulaColumn.createFormulaColumn("Value", "I * II + q * ii + II_[i - 1]"); + final FormulaColumn fc = FormulaColumn.createFormulaColumn("Value", "I * II + q * ii + II_[i - 1]"); fc.initInputs(table); return fc; } diff --git a/DB/src/test/java/io/deephaven/db/v2/select/TestSelectFilterFactory.java b/DB/src/test/java/io/deephaven/db/v2/select/TestSelectFilterFactory.java index 0cef6fad92c..d143e4c79f2 100644 --- a/DB/src/test/java/io/deephaven/db/v2/select/TestSelectFilterFactory.java +++ b/DB/src/test/java/io/deephaven/db/v2/select/TestSelectFilterFactory.java @@ -33,11 +33,11 @@ public class TestSelectFilterFactory extends TestCase { protected void setUp() throws Exception { super.setUp(); table = TableTools.newTable( - TableTools.col(STRING_COLUMN, NORMAL_STRING, NEEDS_ESCAPE, NO_COMMAS_A, NO_COMMAS_B, - WITH_COMMAS_A, WITH_COMMAS_B), - TableTools.col(INTEGER_COLUMN, 0, 1, 2, 3, 4, 5), - TableTools.col(FLOAT_COLUMN, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0), - TableTools.col(BOOLEAN_COLUMN, true, false, true, false, true, false)); + TableTools.col(STRING_COLUMN, NORMAL_STRING, NEEDS_ESCAPE, NO_COMMAS_A, NO_COMMAS_B, WITH_COMMAS_A, + WITH_COMMAS_B), + TableTools.col(INTEGER_COLUMN, 0, 1, 2, 3, 4, 5), + TableTools.col(FLOAT_COLUMN, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0), + TableTools.col(BOOLEAN_COLUMN, true, false, true, false, true, false)); } public void testColumnNameInValueNormal() { @@ -106,7 +106,7 @@ public void testBackTicksMixedCommas() { public void testQuotesAndBackTicksMixedCommas() { String values = String.join(", ", wrapQuotes(NO_COMMAS_A), wrapQuotes(WITH_COMMAS_A), - wrapBackTicks(NO_COMMAS_B), wrapBackTicks(WITH_COMMAS_B)); + wrapBackTicks(NO_COMMAS_B), wrapBackTicks(WITH_COMMAS_B)); List result = runDelimitedExpression(STRING_COLUMN, values); assertFalse(result.contains(NORMAL_STRING)); assertFalse(result.contains(NEEDS_ESCAPE)); @@ -143,8 +143,7 @@ public void testBooleans() { } public void testUnmatchedQuoteNoCommas() { - final String values = - String.join(", ", unmatchedQuote(NO_COMMAS_A), wrapQuotes(NO_COMMAS_B)); + final String values = String.join(", ", unmatchedQuote(NO_COMMAS_A), wrapQuotes(NO_COMMAS_B)); try { runDelimitedExpression(STRING_COLUMN, values); fail("Expected FormulaCompilationException"); @@ -154,8 +153,7 @@ public void testUnmatchedQuoteNoCommas() { } public void testUnmatchedQuoteCommas() { - final String values = - String.join(", ", unmatchedQuote(WITH_COMMAS_A), wrapQuotes(WITH_COMMAS_B)); + final String values = String.join(", ", unmatchedQuote(WITH_COMMAS_A), wrapQuotes(WITH_COMMAS_B)); try { runDelimitedExpression(STRING_COLUMN, values); fail("Expected FormulaCompilationException"); @@ -165,8 +163,7 @@ public void testUnmatchedQuoteCommas() { } public void testUnmatchedBackTicksNoCommas() { - final String values = - String.join(", ", unmatchedBackTick(NO_COMMAS_A), wrapBackTicks(NO_COMMAS_B)); + final String values = String.join(", ", unmatchedBackTick(NO_COMMAS_A), wrapBackTicks(NO_COMMAS_B)); try { runDelimitedExpression(STRING_COLUMN, values); fail("Expected FormulaCompilationException"); @@ -176,8 +173,7 @@ public void testUnmatchedBackTicksNoCommas() { } public void testUnmatchedBackTicksCommas() { - final String values = - String.join(", ", unmatchedBackTick(WITH_COMMAS_A), wrapBackTicks(WITH_COMMAS_B)); + final String values = String.join(", ", unmatchedBackTick(WITH_COMMAS_A), wrapBackTicks(WITH_COMMAS_B)); try { runDelimitedExpression(STRING_COLUMN, values); fail("Expected FormulaCompilationException"); diff --git a/DB/src/test/java/io/deephaven/db/v2/sort/megamerge/MegaMergeTestUtils.java b/DB/src/test/java/io/deephaven/db/v2/sort/megamerge/MegaMergeTestUtils.java index 816fe78eb40..121b78f1030 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sort/megamerge/MegaMergeTestUtils.java +++ b/DB/src/test/java/io/deephaven/db/v2/sort/megamerge/MegaMergeTestUtils.java @@ -38,54 +38,45 @@ static String getRandomObject(Random random) { return random.nextDouble() < 0.1 ? null : Long.toString(random.nextInt()); } - static void doSort(boolean ascending, int chunkSize, - WritableCharChunk values, - WritableLongChunk keys) { + static void doSort(boolean ascending, int chunkSize, WritableCharChunk values, + WritableLongChunk keys) { if (ascending) { - try ( - final NullAwareCharLongTimsortKernel.CharLongSortKernelContext sortKernel = + try (final NullAwareCharLongTimsortKernel.CharLongSortKernelContext sortKernel = NullAwareCharLongTimsortKernel.createContext(chunkSize)) { NullAwareCharLongTimsortKernel.sort(sortKernel, keys, values); } } else { - try ( - final NullAwareCharLongTimsortDescendingKernel.CharLongSortKernelContext sortKernel = + try (final NullAwareCharLongTimsortDescendingKernel.CharLongSortKernelContext sortKernel = NullAwareCharLongTimsortDescendingKernel.createContext(chunkSize)) { NullAwareCharLongTimsortDescendingKernel.sort(sortKernel, keys, values); } } } - static void doSort(boolean ascending, int chunkSize, - WritableByteChunk values, - WritableLongChunk keys) { + static void doSort(boolean ascending, int chunkSize, WritableByteChunk values, + WritableLongChunk keys) { if (ascending) { - try ( - final ByteLongTimsortKernel.ByteLongSortKernelContext sortKernel = + try (final ByteLongTimsortKernel.ByteLongSortKernelContext sortKernel = ByteLongTimsortKernel.createContext(chunkSize)) { ByteLongTimsortKernel.sort(sortKernel, keys, values); } } else { - try ( - final ByteLongTimsortDescendingKernel.ByteLongSortKernelContext sortKernel = + try (final ByteLongTimsortDescendingKernel.ByteLongSortKernelContext sortKernel = ByteLongTimsortDescendingKernel.createContext(chunkSize)) { ByteLongTimsortDescendingKernel.sort(sortKernel, keys, values); } } } - static void doSort(boolean ascending, int chunkSize, - WritableShortChunk values, - WritableLongChunk keys) { + static void doSort(boolean ascending, int chunkSize, WritableShortChunk values, + WritableLongChunk keys) { if (ascending) { - try ( - final ShortLongTimsortKernel.ShortLongSortKernelContext sortKernel = + try (final ShortLongTimsortKernel.ShortLongSortKernelContext sortKernel = ShortLongTimsortKernel.createContext(chunkSize)) { ShortLongTimsortKernel.sort(sortKernel, keys, values); } } else { - try ( - final ShortLongTimsortDescendingKernel.ShortLongSortKernelContext sortKernel = + try (final ShortLongTimsortDescendingKernel.ShortLongSortKernelContext sortKernel = ShortLongTimsortDescendingKernel.createContext(chunkSize)) { ShortLongTimsortDescendingKernel.sort(sortKernel, keys, values); } @@ -93,52 +84,44 @@ static void doSort(boolean ascending, int chunkSize, } static void doSort(boolean ascending, int chunkSize, WritableIntChunk values, - WritableLongChunk keys) { + WritableLongChunk keys) { if (ascending) { - try ( - final IntLongTimsortKernel.IntLongSortKernelContext sortKernel = + try (final IntLongTimsortKernel.IntLongSortKernelContext sortKernel = IntLongTimsortKernel.createContext(chunkSize)) { IntLongTimsortKernel.sort(sortKernel, keys, values); } } else { - try ( - final IntLongTimsortDescendingKernel.IntLongSortKernelContext sortKernel = + try (final IntLongTimsortDescendingKernel.IntLongSortKernelContext sortKernel = IntLongTimsortDescendingKernel.createContext(chunkSize)) { IntLongTimsortDescendingKernel.sort(sortKernel, keys, values); } } } - static void doSort(boolean ascending, int chunkSize, - WritableLongChunk values, - WritableLongChunk keys) { + static void doSort(boolean ascending, int chunkSize, WritableLongChunk values, + WritableLongChunk keys) { if (ascending) { - try ( - final LongLongTimsortKernel.LongLongSortKernelContext sortKernel = + try (final LongLongTimsortKernel.LongLongSortKernelContext sortKernel = LongLongTimsortKernel.createContext(chunkSize)) { LongLongTimsortKernel.sort(sortKernel, keys, values); } } else { - try ( - final LongLongTimsortDescendingKernel.LongLongSortKernelContext sortKernel = + try (final LongLongTimsortDescendingKernel.LongLongSortKernelContext sortKernel = LongLongTimsortDescendingKernel.createContext(chunkSize)) { LongLongTimsortDescendingKernel.sort(sortKernel, keys, values); } } } - static void doSort(boolean ascending, int chunkSize, - WritableFloatChunk values, - WritableLongChunk keys) { + static void doSort(boolean ascending, int chunkSize, WritableFloatChunk values, + WritableLongChunk keys) { if (ascending) { - try ( - final FloatLongTimsortKernel.FloatLongSortKernelContext sortKernel = + try (final FloatLongTimsortKernel.FloatLongSortKernelContext sortKernel = FloatLongTimsortKernel.createContext(chunkSize)) { FloatLongTimsortKernel.sort(sortKernel, keys, values); } } else { - try ( - final FloatLongTimsortDescendingKernel.FloatLongSortKernelContext sortKernel = + try (final FloatLongTimsortDescendingKernel.FloatLongSortKernelContext sortKernel = FloatLongTimsortDescendingKernel.createContext(chunkSize)) { FloatLongTimsortDescendingKernel.sort(sortKernel, keys, values); } @@ -146,36 +129,30 @@ static void doSort(boolean ascending, int chunkSize, } - static void doSort(boolean ascending, int chunkSize, - WritableDoubleChunk values, - WritableLongChunk keys) { + static void doSort(boolean ascending, int chunkSize, WritableDoubleChunk values, + WritableLongChunk keys) { if (ascending) { - try ( - final DoubleLongTimsortKernel.DoubleLongSortKernelContext sortKernel = + try (final DoubleLongTimsortKernel.DoubleLongSortKernelContext sortKernel = DoubleLongTimsortKernel.createContext(chunkSize)) { DoubleLongTimsortKernel.sort(sortKernel, keys, values); } } else { - try ( - final DoubleLongTimsortDescendingKernel.DoubleLongSortKernelContext sortKernel = + try (final DoubleLongTimsortDescendingKernel.DoubleLongSortKernelContext sortKernel = DoubleLongTimsortDescendingKernel.createContext(chunkSize)) { DoubleLongTimsortDescendingKernel.sort(sortKernel, keys, values); } } } - static void doSort(boolean ascending, int chunkSize, - WritableObjectChunk values, - WritableLongChunk keys) { + static void doSort(boolean ascending, int chunkSize, WritableObjectChunk values, + WritableLongChunk keys) { if (ascending) { - try ( - final ObjectLongTimsortKernel.ObjectLongSortKernelContext sortKernel = + try (final ObjectLongTimsortKernel.ObjectLongSortKernelContext sortKernel = ObjectLongTimsortKernel.createContext(chunkSize)) { ObjectLongTimsortKernel.sort(sortKernel, keys, values); } } else { - try ( - final ObjectLongTimsortDescendingKernel.ObjectLongSortKernelContext sortKernel = + try (final ObjectLongTimsortDescendingKernel.ObjectLongSortKernelContext sortKernel = ObjectLongTimsortDescendingKernel.createContext(chunkSize)) { ObjectLongTimsortDescendingKernel.sort(sortKernel, keys, values); } diff --git a/DB/src/test/java/io/deephaven/db/v2/sort/timsort/MergeSort.java b/DB/src/test/java/io/deephaven/db/v2/sort/timsort/MergeSort.java index c4050e6f02d..d7685e3378d 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sort/timsort/MergeSort.java +++ b/DB/src/test/java/io/deephaven/db/v2/sort/timsort/MergeSort.java @@ -10,9 +10,9 @@ private static void swap(long[] x, int a, int b) { } public static void mergeSort(long[] src, - long[] dest, - int low, int high, int off, - ColumnComparatorFactory.IComparator c) { + long[] dest, + int low, int high, int off, + ColumnComparatorFactory.IComparator c) { int length = high - low; // Insertion sort on smallest arrays diff --git a/DB/src/test/java/io/deephaven/db/v2/sort/timsort/ReplicateSortKernelTests.java b/DB/src/test/java/io/deephaven/db/v2/sort/timsort/ReplicateSortKernelTests.java index d78b6c304b9..c596842853e 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sort/timsort/ReplicateSortKernelTests.java +++ b/DB/src/test/java/io/deephaven/db/v2/sort/timsort/ReplicateSortKernelTests.java @@ -22,31 +22,24 @@ public class ReplicateSortKernelTests { public static void main(String[] args) throws IOException { ReplicateSortKernel.main(args); - ReplicatePrimitiveCode.charToAllButBoolean(TestCharTimSortKernel.class, - ReplicatePrimitiveCode.TEST_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(BaseTestCharTimSortKernel.class, - ReplicatePrimitiveCode.TEST_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(CharSortKernelBenchmark.class, - ReplicatePrimitiveCode.BENCHMARK_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(TestCharTimSortKernel.class, ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(BaseTestCharTimSortKernel.class, ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(CharSortKernelBenchmark.class, ReplicatePrimitiveCode.BENCHMARK_SRC); ReplicatePrimitiveCode.charToAllButBoolean(CharPartitionKernelBenchmark.class, - ReplicatePrimitiveCode.BENCHMARK_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(TestCharPermuteKernel.class, - ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.BENCHMARK_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(TestCharPermuteKernel.class, ReplicatePrimitiveCode.TEST_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(TestCharLongMegaMerge.class, - ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(TestCharLongMegaMerge.class, ReplicatePrimitiveCode.TEST_SRC); - final String baseTestPath = ReplicatePrimitiveCode - .charToObject(BaseTestCharTimSortKernel.class, ReplicatePrimitiveCode.TEST_SRC); + final String baseTestPath = + ReplicatePrimitiveCode.charToObject(BaseTestCharTimSortKernel.class, ReplicatePrimitiveCode.TEST_SRC); fixupObject(baseTestPath); - ReplicatePrimitiveCode.charToObject(TestCharTimSortKernel.class, - ReplicatePrimitiveCode.TEST_SRC); - ReplicatePrimitiveCode.charToObject(CharSortKernelBenchmark.class, - ReplicatePrimitiveCode.BENCHMARK_SRC); + ReplicatePrimitiveCode.charToObject(TestCharTimSortKernel.class, ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.charToObject(CharSortKernelBenchmark.class, ReplicatePrimitiveCode.BENCHMARK_SRC); - final String objectMegaMergePath = ReplicatePrimitiveCode - .charToObject(TestCharLongMegaMerge.class, ReplicatePrimitiveCode.TEST_SRC); + final String objectMegaMergePath = + ReplicatePrimitiveCode.charToObject(TestCharLongMegaMerge.class, ReplicatePrimitiveCode.TEST_SRC); fixupObjectMegaMerge(objectMegaMergePath); } @@ -58,12 +51,11 @@ private static void fixupObject(String objectPath) throws IOException { lines.add(packageIndex + 2, "import java.util.Objects;"); - lines = - lines.stream().map(x -> x.replaceAll("ObjectChunk", "ObjectChunk")) + lines = lines.stream().map(x -> x.replaceAll("ObjectChunk", "ObjectChunk")) .collect(Collectors.toList()); - lines = fixupTupleColumnSource(ReplicateSortKernel.fixupObjectComparisons( - fixupMergesort(fixupGetJavaMultiComparator(fixupGetJavaComparator(lines))))); + lines = fixupTupleColumnSource(ReplicateSortKernel + .fixupObjectComparisons(fixupMergesort(fixupGetJavaMultiComparator(fixupGetJavaComparator(lines))))); FileUtils.writeLines(objectFile, lines); } @@ -73,8 +65,7 @@ private static void fixupObjectMegaMerge(String objectPath) throws IOException { List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = ReplicateUtilities.globalReplacements(lines, "ObjectArraySource\\(\\)", - "ObjectArraySource(String.class)", "ObjectChunk", - "ObjectChunk"); + "ObjectArraySource(String.class)", "ObjectChunk", "ObjectChunk"); FileUtils.writeLines(objectFile, lines); } @@ -82,38 +73,36 @@ private static void fixupObjectMegaMerge(String objectPath) throws IOException { @NotNull private static List fixupGetJavaComparator(List lines) { return ReplicateUtilities.applyFixup(lines, "getJavaComparator", - "(.*)Comparator.comparing\\(ObjectLongTuple::getFirstElement\\)(.*)", - m -> Arrays.asList(" // noinspection unchecked", - m.group(1) + "Comparator.comparing(x -> (Comparable)x.getFirstElement())" - + m.group(2))); + "(.*)Comparator.comparing\\(ObjectLongTuple::getFirstElement\\)(.*)", + m -> Arrays.asList(" // noinspection unchecked", + m.group(1) + "Comparator.comparing(x -> (Comparable)x.getFirstElement())" + m.group(2))); } @NotNull private static List fixupGetJavaMultiComparator(List lines) { return ReplicateUtilities.applyFixup(lines, "getJavaMultiComparator", - "(.*)Comparator.comparing\\(ObjectLongLongTuple::getFirstElement\\).thenComparing\\(ObjectLongLongTuple::getSecondElement\\)(.*)", - m -> Arrays.asList(" // noinspection unchecked", - m.group(1) - + "Comparator.comparing(x -> (Comparable)((ObjectLongLongTuple)x).getFirstElement()).thenComparing(x -> ((ObjectLongLongTuple)x).getSecondElement())" - + m.group(2))); + "(.*)Comparator.comparing\\(ObjectLongLongTuple::getFirstElement\\).thenComparing\\(ObjectLongLongTuple::getSecondElement\\)(.*)", + m -> Arrays.asList(" // noinspection unchecked", + m.group(1) + + "Comparator.comparing(x -> (Comparable)((ObjectLongLongTuple)x).getFirstElement()).thenComparing(x -> ((ObjectLongLongTuple)x).getSecondElement())" + + m.group(2))); } @NotNull private static List fixupMergesort(List lines) { - return ReplicateUtilities.applyFixup(lines, "mergesort", - "(.*)Object.compare\\((.*), (.*)\\)\\)(.*)", - m -> Arrays.asList(" // noinspection unchecked", - m.group(1) + "Objects.compare((Comparable)" + m.group(2) + ", (Comparable)" - + m.group(3) + ", Comparator.naturalOrder()))" + m.group(4))); + return ReplicateUtilities.applyFixup(lines, "mergesort", "(.*)Object.compare\\((.*), (.*)\\)\\)(.*)", + m -> Arrays.asList(" // noinspection unchecked", + m.group(1) + "Objects.compare((Comparable)" + m.group(2) + ", (Comparable)" + m.group(3) + + ", Comparator.naturalOrder()))" + m.group(4))); } @NotNull private static List fixupTupleColumnSource(List lines) { return ReplicateUtilities.replaceRegion(lines, "tuple column source", Arrays.asList( - " @Override", - " public Object get(long index) {", - " return javaTuples.get(((int)index) / 10).getFirstElement();", - " }")); + " @Override", + " public Object get(long index) {", + " return javaTuples.get(((int)index) / 10).getFirstElement();", + " }")); } diff --git a/DB/src/test/java/io/deephaven/db/v2/sort/timsort/TestFpTimsortComparisons.java b/DB/src/test/java/io/deephaven/db/v2/sort/timsort/TestFpTimsortComparisons.java index 44c09017a4f..7dd7b2c317a 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sort/timsort/TestFpTimsortComparisons.java +++ b/DB/src/test/java/io/deephaven/db/v2/sort/timsort/TestFpTimsortComparisons.java @@ -84,8 +84,7 @@ public void doubleDescGeq() { TestCase.assertFalse(DoubleLongTimsortDescendingKernel.geq(Double.NaN, 0.0)); TestCase.assertTrue(DoubleLongTimsortDescendingKernel.geq(Double.NaN, Double.NaN)); TestCase.assertFalse(DoubleLongTimsortDescendingKernel.geq(0.0, -1.0)); - TestCase - .assertFalse(DoubleLongTimsortDescendingKernel.geq(0.0, QueryConstants.NULL_DOUBLE)); + TestCase.assertFalse(DoubleLongTimsortDescendingKernel.geq(0.0, QueryConstants.NULL_DOUBLE)); TestCase.assertTrue(DoubleLongTimsortDescendingKernel.geq(0.0, Double.NaN)); TestCase.assertTrue(DoubleLongTimsortDescendingKernel.geq(-1.0, 0.0)); TestCase.assertTrue(DoubleLongTimsortDescendingKernel.geq(QueryConstants.NULL_DOUBLE, 0.0)); @@ -99,8 +98,7 @@ public void doubleDescLeq() { TestCase.assertTrue(DoubleLongTimsortDescendingKernel.leq(0.0, QueryConstants.NULL_DOUBLE)); TestCase.assertFalse(DoubleLongTimsortDescendingKernel.leq(0.0, Double.NaN)); TestCase.assertFalse(DoubleLongTimsortDescendingKernel.leq(-1.0, 0.0)); - TestCase - .assertFalse(DoubleLongTimsortDescendingKernel.leq(QueryConstants.NULL_DOUBLE, 0.0)); + TestCase.assertFalse(DoubleLongTimsortDescendingKernel.leq(QueryConstants.NULL_DOUBLE, 0.0)); } @Test diff --git a/DB/src/test/java/io/deephaven/db/v2/sort/timsort/TestTimSortKernel.java b/DB/src/test/java/io/deephaven/db/v2/sort/timsort/TestTimSortKernel.java index d883aba89e7..1a7907a5e66 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sort/timsort/TestTimSortKernel.java +++ b/DB/src/test/java/io/deephaven/db/v2/sort/timsort/TestTimSortKernel.java @@ -91,13 +91,12 @@ abstract static class SortMultiKernelStuff extends SortKernelStuff { } void performanceTest(GenerateTupleList generateValues, - Function, K> createKernelStuff, - Consumer runKernel, - Comparator comparator, - Function, M> createMergeStuff, - Consumer runMerge) { - for (int chunkSize = - INITIAL_PERFORMANCE_CHUNK_SIZE; chunkSize <= MAX_CHUNK_SIZE; chunkSize *= 2) { + Function, K> createKernelStuff, + Consumer runKernel, + Comparator comparator, + Function, M> createMergeStuff, + Consumer runMerge) { + for (int chunkSize = INITIAL_PERFORMANCE_CHUNK_SIZE; chunkSize <= MAX_CHUNK_SIZE; chunkSize *= 2) { System.out.println("Size = " + chunkSize); final PerfStats timStats = new PerfStats(100); @@ -125,8 +124,7 @@ void performanceTest(GenerateTupleList generateValues, } private void performanceTest(int chunkSize, GenerateTupleList tupleListGenerator, - Function, R> prepareFunction, Consumer timedFunction, - @Nullable PerfStats stats) { + Function, R> prepareFunction, Consumer timedFunction, @Nullable PerfStats stats) { for (int seed = 0; seed < PERFORMANCE_SEEDS; ++seed) { final Random random = new Random(seed); @@ -144,8 +142,8 @@ private void performanceTest(int chunkSize, GenerateTupleList tupleLis } } - void correctnessTest(int size, GenerateTupleList tupleListGenerator, - Comparator comparator, Function, SortKernelStuff> prepareFunction) { + void correctnessTest(int size, GenerateTupleList tupleListGenerator, Comparator comparator, + Function, SortKernelStuff> prepareFunction) { for (int seed = 0; seed < CORRECTNESS_SEEDS; ++seed) { System.out.println("Size = " + size + ", seed=" + seed); final Random random = new Random(seed); @@ -163,16 +161,15 @@ void correctnessTest(int size, GenerateTupleList tupleListGenerator, @FunctionalInterface interface PartitionKernelStuffFactory { - PartitionKernelStuff apply(List javaTuples, Index index, int chunkSize, - int nPartitions, boolean preserveEquality); + PartitionKernelStuff apply(List javaTuples, Index index, int chunkSize, int nPartitions, + boolean preserveEquality); } void partitionCorrectnessTest(int dataSize, int chunkSize, int nPartitions, - GenerateTupleList tupleListGenerator, Comparator comparator, - PartitionKernelStuffFactory prepareFunction) { + GenerateTupleList tupleListGenerator, Comparator comparator, + PartitionKernelStuffFactory prepareFunction) { for (int seed = 0; seed < CORRECTNESS_SEEDS; ++seed) { - System.out - .println("Size = " + dataSize + ", seed=" + seed + ", nPartitions=" + nPartitions); + System.out.println("Size = " + dataSize + ", seed=" + seed + ", nPartitions=" + nPartitions); final Random random = new Random(seed); final List javaTuples = tupleListGenerator.generate(random, dataSize); @@ -184,7 +181,7 @@ void partitionCorrectnessTest(int dataSize, int chunkSize, int nPartitions, final Index index = builder.getIndex(); final PartitionKernelStuff partitionStuff = - prepareFunction.apply(javaTuples, index, chunkSize, nPartitions, false); + prepareFunction.apply(javaTuples, index, chunkSize, nPartitions, false); partitionStuff.run(); @@ -192,8 +189,8 @@ void partitionCorrectnessTest(int dataSize, int chunkSize, int nPartitions, } } - void multiCorrectnessTest(int size, GenerateTupleList tupleListGenerator, - Comparator comparator, Function, SortKernelStuff> prepareFunction) { + void multiCorrectnessTest(int size, GenerateTupleList tupleListGenerator, Comparator comparator, + Function, SortKernelStuff> prepareFunction) { for (int seed = 0; seed < CORRECTNESS_SEEDS; ++seed) { System.out.println("Size = " + size + ", seed=" + seed); final Random random = new Random(seed); @@ -312,7 +309,7 @@ static float decrementFloatValue(Random random, float value) { } void dumpKeys(LongChunk chunk) { - System.out.println("[" + IntStream.range(0, chunk.size()).mapToObj(chunk::get) - .map(Object::toString).collect(Collectors.joining(",")) + "]"); + System.out.println("[" + IntStream.range(0, chunk.size()).mapToObj(chunk::get).map(Object::toString) + .collect(Collectors.joining(",")) + "]"); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/ArrayBackedColumnSourceTest.java b/DB/src/test/java/io/deephaven/db/v2/sources/ArrayBackedColumnSourceTest.java index 788eb0bfb71..796b8116eea 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/ArrayBackedColumnSourceTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/ArrayBackedColumnSourceTest.java @@ -66,8 +66,8 @@ public void fromStringArray() { @Test public void fromInstants() { - check(ArrayBackedColumnSourceTest::checkInstant, Type.instantType(), - Instant.ofEpochMilli(1), null, Instant.ofEpochMilli(3)); + check(ArrayBackedColumnSourceTest::checkInstant, Type.instantType(), Instant.ofEpochMilli(1), null, + Instant.ofEpochMilli(3)); } @Test @@ -93,14 +93,12 @@ private static void check(GenericType type, T... values) { check(Objects::equals, type, values); } - private static void check(BiPredicate comparison, GenericType type, - T... values) { + private static void check(BiPredicate comparison, GenericType type, T... values) { GenericArray array = GenericArray.of(type, values); ArrayBackedColumnSource columnSource = ArrayBackedColumnSource.from(array); int ix = 0; for (T left : values) { - assertThat(columnSource.get(ix++)) - .matches((Predicate) right -> comparison.test(left, right)); + assertThat(columnSource.get(ix++)).matches((Predicate) right -> comparison.test(left, right)); } check(comparison, (Type) type, values); } @@ -110,14 +108,13 @@ private static void check(BiPredicate comparison, Type type, T ArrayBackedColumnSource columnSource = ArrayBackedColumnSource.from(array); int ix = 0; for (T left : values) { - assertThat(columnSource.get(ix++)) - .matches((Predicate) right -> comparison.test(left, right)); + assertThat(columnSource.get(ix++)).matches((Predicate) right -> comparison.test(left, right)); } } private static boolean checkInstant(Instant instant, Object o) { return (instant == null && o == null) || - (instant != null && (o instanceof DBDateTime) - && instant.toEpochMilli() == ((DBDateTime) o).getMillis()); + (instant != null && (o instanceof DBDateTime) + && instant.toEpochMilli() == ((DBDateTime) o).getMillis()); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/ArrayGenerator.java b/DB/src/test/java/io/deephaven/db/v2/sources/ArrayGenerator.java index 6664aac3ef7..38d3eb8e6be 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/ArrayGenerator.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/ArrayGenerator.java @@ -15,10 +15,8 @@ public class ArrayGenerator { private static Map generatorWithSize = new ConcurrentHashMap<>(); - private static Map> adapterBuilder = - new ConcurrentHashMap<>(); - private static Map> chunkAdapterBuilder = - new ConcurrentHashMap<>(); + private static Map> adapterBuilder = new ConcurrentHashMap<>(); + private static Map> chunkAdapterBuilder = new ConcurrentHashMap<>(); static { generatorWithSize.put(boolean.class, ArrayGenerator::randomBooleans); @@ -142,8 +140,8 @@ public static long[] randomLongs(Random random, int size) { * @param objectType The type of the elements buit into the array * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static Object randomValues(Class objectType, Random random, int size, int spaceSize) { @@ -153,8 +151,8 @@ public static Object randomValues(Class objectType, Random random, int size, int /** * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static boolean[] randomBooleans(Random random, int size, int spaceSize) { @@ -168,8 +166,8 @@ public static boolean[] randomBooleans(Random random, int size, int spaceSize) { /** * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static byte[] randomBytes(Random random, int size, int spaceSize) { @@ -179,8 +177,8 @@ public static byte[] randomBytes(Random random, int size, int spaceSize) { /** * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static char[] randomChars(Random random, int size, int spaceSize) { @@ -190,8 +188,8 @@ public static char[] randomChars(Random random, int size, int spaceSize) { /** * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static int[] randomInts(Random random, int size, int spaceSize) { @@ -201,8 +199,8 @@ public static int[] randomInts(Random random, int size, int spaceSize) { /** * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static long[] randomLongs(Random random, int size, int spaceSize) { @@ -212,8 +210,8 @@ public static long[] randomLongs(Random random, int size, int spaceSize) { /** * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static short[] randomShorts(Random random, int size, int spaceSize) { @@ -223,8 +221,8 @@ public static short[] randomShorts(Random random, int size, int spaceSize) { /** * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static double[] randomDoubles(Random random, int size, int spaceSize) { @@ -234,8 +232,8 @@ public static double[] randomDoubles(Random random, int size, int spaceSize) { /** * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static float[] randomFloats(Random random, int size, int spaceSize) { @@ -250,17 +248,15 @@ public static float[] randomFloats(Random random, int size, int spaceSize) { /** * @param random An instance of the random number generator to be used * @param size The number of elements in the result - * @param spaceSize The size of the random values domain (the number of distinct possible values - * that may exist in the result + * @param spaceSize The size of the random values domain (the number of distinct possible values that may exist in + * the result * @return An array with random values */ public static String[] randomStrings(Random random, int size, int spaceSize) { - return Arrays.stream(randomInts(random, size, 0, spaceSize)).mapToObj(Integer::toString) - .toArray(String[]::new); + return Arrays.stream(randomInts(random, size, 0, spaceSize)).mapToObj(Integer::toString).toArray(String[]::new); } - public static boolean[] randomBooleans(Random random, int size, boolean minValue, - boolean maxValue) { + public static boolean[] randomBooleans(Random random, int size, boolean minValue, boolean maxValue) { boolean[] result = new boolean[size]; if (minValue == maxValue) { Arrays.fill(result, minValue); @@ -302,8 +298,7 @@ public static char[] randomChars(Random random, int size, char minValue, char ma return result; } - public static double[] randomDoubles(Random random, int size, double minValue, - double maxValue) { + public static double[] randomDoubles(Random random, int size, double minValue, double maxValue) { double[] result = new double[size]; if (maxValue <= 0) { maxValue = Double.MAX_VALUE; @@ -393,20 +388,17 @@ public static Object[] randomObjects(Random random, int size, int minValue, int return result; } - public static long[] indexDataGenerator(Random random, int size, double gapProbability, - int maxStep, int maxValue) { + public static long[] indexDataGenerator(Random random, int size, double gapProbability, int maxStep, int maxValue) { long result[] = new long[size]; if (size == 0) { return result; } - result[0] = - Math.max(0, Math.min(Math.abs(random.nextLong()), maxValue - (maxStep + 1) * size)); + result[0] = Math.max(0, Math.min(Math.abs(random.nextLong()), maxValue - (maxStep + 1) * size)); for (int i = 1; i < result.length; i++) { long l = result[i]; - if (random.nextDouble() < gapProbability - && result[i - 1] < maxValue - result.length + i - 2) { + if (random.nextDouble() < gapProbability && result[i - 1] < maxValue - result.length + i - 2) { result[i] = result[i - 1] + 2 + random.nextInt(maxStep); } else { result[i] = result[i - 1] + 1; @@ -416,9 +408,7 @@ public static long[] indexDataGenerator(Random random, int size, double gapProba } static public ArrayAdapter getAdapter(Object array) { - return adapterBuilder - .getOrDefault(array.getClass().getComponentType(), ObjectArrayAdapter::new) - .apply(array); + return adapterBuilder.getOrDefault(array.getClass().getComponentType(), ObjectArrayAdapter::new).apply(array); } @FunctionalInterface @@ -651,8 +641,7 @@ public void set(int i, T value) { } static public ChunkAdapter getChunkAdapter(Object chunk) { - return chunkAdapterBuilder.getOrDefault(chunk.getClass(), ObjectChunkAdapter::new) - .apply(chunk); + return chunkAdapterBuilder.getOrDefault(chunk.getClass(), ObjectChunkAdapter::new).apply(chunk); } public interface ChunkAdapter { diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/DateTimeTreeMapSource.java b/DB/src/test/java/io/deephaven/db/v2/sources/DateTimeTreeMapSource.java index e811f4a33c7..069d4dc11b7 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/DateTimeTreeMapSource.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/DateTimeTreeMapSource.java @@ -9,12 +9,11 @@ import java.util.Arrays; /** - * DateTime column source that wraps and delegates the storage to an {@code TreeMapSource}. - * This also provides an interface so this column can be interpreted as a long column (through - * UnboxedDateTimeTreeMapSource). + * DateTime column source that wraps and delegates the storage to an {@code TreeMapSource}. This also provides an + * interface so this column can be interpreted as a long column (through UnboxedDateTimeTreeMapSource). */ public class DateTimeTreeMapSource extends AbstractColumnSource - implements MutableColumnSourceGetDefaults.ForObject { + implements MutableColumnSourceGetDefaults.ForObject { private final TreeMapSource treeMapSource; private final UnboxedDateTimeTreeMapSource alternateColumnSource; @@ -107,14 +106,13 @@ public long getPrevLong(long index) { @Override public boolean allowsReinterpret( - @NotNull final Class alternateDataType) { + @NotNull final Class alternateDataType) { return alternateDataType == long.class; } @Override public ColumnSource doReinterpret( - @NotNull final Class alternateDataType) - throws IllegalArgumentException { + @NotNull final Class alternateDataType) throws IllegalArgumentException { // noinspection unchecked return (ColumnSource) alternateColumnSource; } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/ImmutableTreeMapSource.java b/DB/src/test/java/io/deephaven/db/v2/sources/ImmutableTreeMapSource.java index e154276a786..7e72ac02e60 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/ImmutableTreeMapSource.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/ImmutableTreeMapSource.java @@ -7,9 +7,8 @@ import io.deephaven.db.v2.utils.Index; /** - * A simple extension to the TreeMapSource that will not actually change any map values, and is thus - * immutable. We need to have an immutable source available for use with the IndexGroupingTest, and - * this fits the bill. + * A simple extension to the TreeMapSource that will not actually change any map values, and is thus immutable. We need + * to have an immutable source available for use with the IndexGroupingTest, and this fits the bill. */ public class ImmutableTreeMapSource extends TreeMapSource { public ImmutableTreeMapSource(Class type, Index index, T[] data) { diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/ReplicateSourcesAndChunkTest.java b/DB/src/test/java/io/deephaven/db/v2/sources/ReplicateSourcesAndChunkTest.java index 02714e1d9e0..57cea3391af 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/ReplicateSourcesAndChunkTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/ReplicateSourcesAndChunkTest.java @@ -22,33 +22,28 @@ public class ReplicateSourcesAndChunkTest { public static void main(String[] args) throws IOException { ReplicateSourcesAndChunks.main(args); - ReplicatePrimitiveCode.charToAllButBoolean(TestCharacterArraySource.class, - ReplicatePrimitiveCode.TEST_SRC); - fixupBooleanColumnSourceTest( - ReplicatePrimitiveCode.charToBooleanAsByte(TestCharacterArraySource.class, + ReplicatePrimitiveCode.charToAllButBoolean(TestCharacterArraySource.class, ReplicatePrimitiveCode.TEST_SRC); + fixupBooleanColumnSourceTest(ReplicatePrimitiveCode.charToBooleanAsByte(TestCharacterArraySource.class, ReplicatePrimitiveCode.TEST_SRC, Collections.emptyMap())); - fixupObjectColumnSourceTest(ReplicatePrimitiveCode - .charToObject(TestCharacterArraySource.class, ReplicatePrimitiveCode.TEST_SRC)); + fixupObjectColumnSourceTest( + ReplicatePrimitiveCode.charToObject(TestCharacterArraySource.class, ReplicatePrimitiveCode.TEST_SRC)); ReplicatePrimitiveCode.charToAllButBoolean(TestCharacterSparseArraySource.class, - ReplicatePrimitiveCode.TEST_SRC); - fixupBooleanColumnSourceTest( - ReplicatePrimitiveCode.charToBooleanAsByte(TestCharacterSparseArraySource.class, + ReplicatePrimitiveCode.TEST_SRC); + fixupBooleanColumnSourceTest(ReplicatePrimitiveCode.charToBooleanAsByte(TestCharacterSparseArraySource.class, ReplicatePrimitiveCode.TEST_SRC, Collections.emptyMap())); - fixupObjectColumnSourceTest(ReplicatePrimitiveCode - .charToObject(TestCharacterSparseArraySource.class, ReplicatePrimitiveCode.TEST_SRC)); + fixupObjectColumnSourceTest(ReplicatePrimitiveCode.charToObject(TestCharacterSparseArraySource.class, + ReplicatePrimitiveCode.TEST_SRC)); ReplicatePrimitiveCode.charToAll(TestCharChunk.class, ReplicatePrimitiveCode.TEST_SRC); - fixupChunkTest(ReplicatePrimitiveCode.charToObject(TestCharChunk.class, - ReplicatePrimitiveCode.TEST_SRC)); + fixupChunkTest(ReplicatePrimitiveCode.charToObject(TestCharChunk.class, ReplicatePrimitiveCode.TEST_SRC)); ReplicatePrimitiveCode.charToAllButBoolean(TestCharacterDeltaAwareColumnSource.class, - ReplicatePrimitiveCode.TEST_SRC); - fixupBooleanDeltaAwareColumnSourceTest( - ReplicatePrimitiveCode.charToBooleanAsByte(TestCharacterDeltaAwareColumnSource.class, - ReplicatePrimitiveCode.TEST_SRC, Collections.emptyMap())); - fixupObjectDeltaAwareColumnSourceTest(ReplicatePrimitiveCode.charToObject( - TestCharacterDeltaAwareColumnSource.class, ReplicatePrimitiveCode.TEST_SRC)); + ReplicatePrimitiveCode.TEST_SRC); + fixupBooleanDeltaAwareColumnSourceTest(ReplicatePrimitiveCode.charToBooleanAsByte( + TestCharacterDeltaAwareColumnSource.class, ReplicatePrimitiveCode.TEST_SRC, Collections.emptyMap())); + fixupObjectDeltaAwareColumnSourceTest(ReplicatePrimitiveCode + .charToObject(TestCharacterDeltaAwareColumnSource.class, ReplicatePrimitiveCode.TEST_SRC)); } private static void fixupObjectColumnSourceTest(String objectPath) throws IOException { @@ -57,13 +52,13 @@ private static void fixupObjectColumnSourceTest(String objectPath) throws IOExce lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = removeImport(lines, "\\s*import static.*QueryConstants.*;"); lines = globalReplacements(lines, "NULL_OBJECT", "null", - "ObjectChunk", "ObjectChunk", - "ObjectChunk", "ObjectChunk", - "source.getObject", "source.get", - "dest.getObject", "dest.get", - "source.getPrevObject", "source.getPrev", - "new ObjectArraySource\\(\\)", "new ObjectArraySource<>\\(String.class\\)", - "new ObjectSparseArraySource\\(\\)", "new ObjectSparseArraySource<>\\(String.class\\)"); + "ObjectChunk", "ObjectChunk", + "ObjectChunk", "ObjectChunk", + "source.getObject", "source.get", + "dest.getObject", "dest.get", + "source.getPrevObject", "source.getPrev", + "new ObjectArraySource\\(\\)", "new ObjectArraySource<>\\(String.class\\)", + "new ObjectSparseArraySource\\(\\)", "new ObjectSparseArraySource<>\\(String.class\\)"); lines = removeRegion(lines, "boxing imports"); FileUtils.writeLines(objectFile, lines); } @@ -73,46 +68,41 @@ private static void fixupBooleanColumnSourceTest(String booleanPath) throws IOEx List lines = FileUtils.readLines(booleanFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "BooleanChunk(\\s+)", "ObjectChunk$1", - "\\(BooleanChunk\\)", "\\(ObjectChunk\\)", - "\\(BooleanChunk\\)", "\\(ObjectChunk\\)", - "BooleanChunk(\\s+)", "ObjectChunk$1", - "BooleanChunk(\\s+)", "ObjectChunk$1", - "BooleanChunk<[?] extends Values>(\\s+)", "ObjectChunk$1", - "asBooleanChunk", "asObjectChunk", - "BooleanChunk.chunkWrap", "ObjectChunk.chunkWrap", - "BooleanChunkEquals", "ObjectChunkEquals", - "WritableBooleanChunk.makeWritableChunk", "WritableObjectChunk.makeWritableChunk"); - - lines = - simpleFixup(lines, "arrayFill", "NULL_BOOLEAN", "BooleanUtils.NULL_BOOLEAN_AS_BYTE"); + "BooleanChunk(\\s+)", "ObjectChunk$1", + "\\(BooleanChunk\\)", "\\(ObjectChunk\\)", + "\\(BooleanChunk\\)", "\\(ObjectChunk\\)", + "BooleanChunk(\\s+)", "ObjectChunk$1", + "BooleanChunk(\\s+)", "ObjectChunk$1", + "BooleanChunk<[?] extends Values>(\\s+)", "ObjectChunk$1", + "asBooleanChunk", "asObjectChunk", + "BooleanChunk.chunkWrap", "ObjectChunk.chunkWrap", + "BooleanChunkEquals", "ObjectChunkEquals", + "WritableBooleanChunk.makeWritableChunk", "WritableObjectChunk.makeWritableChunk"); + + lines = simpleFixup(lines, "arrayFill", "NULL_BOOLEAN", "BooleanUtils.NULL_BOOLEAN_AS_BYTE"); lines = simpleFixup(lines, "testsourcesink", "ChunkType.Boolean", "ChunkType.Object"); - lines = - applyFixup(lines, "fromsource", "(.*)checkFromSource\\((.*)byte fromSource(.*)\\) \\{", - m -> Collections.singletonList(m.group(1) + "checkFromSource(" + m.group(2) - + "Boolean fromSource" + m.group(3) + ") {")); - lines = - applyFixup(lines, "fromsource", "(.*)checkFromSource\\((.*)byte fromChunk(.*)\\) \\{", - m -> Collections.singletonList(m.group(1) + "checkFromSource(" + m.group(2) - + "Boolean fromChunk" + m.group(3) + ") {")); - lines = - applyFixup(lines, "fromvalues", "(.*)checkFromValues\\((.*)byte fromChunk(.*)\\) \\{", - m -> Collections.singletonList(m.group(1) + "checkFromValues(" + m.group(2) - + "Boolean fromChunk" + m.group(3) + ") {")); - lines = applyFixup(lines, "fromvalues", "(.*)fromValues, fromChunk\\);", - m -> Collections.singletonList(m.group(1) + lines = applyFixup(lines, "fromsource", "(.*)checkFromSource\\((.*)byte fromSource(.*)\\) \\{", + m -> Collections.singletonList( + m.group(1) + "checkFromSource(" + m.group(2) + "Boolean fromSource" + m.group(3) + ") {")); + lines = applyFixup(lines, "fromsource", "(.*)checkFromSource\\((.*)byte fromChunk(.*)\\) \\{", + m -> Collections.singletonList( + m.group(1) + "checkFromSource(" + m.group(2) + "Boolean fromChunk" + m.group(3) + ") {")); + lines = applyFixup(lines, "fromvalues", "(.*)checkFromValues\\((.*)byte fromChunk(.*)\\) \\{", + m -> Collections.singletonList( + m.group(1) + "checkFromValues(" + m.group(2) + "Boolean fromChunk" + m.group(3) + ") {")); + lines = applyFixup(lines, "fromvalues", "(.*)fromValues, fromChunk\\);", m -> Collections.singletonList(m + .group(1) + "fromValues == BooleanUtils.NULL_BOOLEAN_AS_BYTE ? null : fromValues == BooleanUtils.TRUE_BOOLEAN_AS_BYTE, fromChunk);")); lines = removeRegion(lines, "samecheck"); lines = addImport(lines, BooleanUtils.class); lines = addImport(lines, WritableObjectChunk.class); lines = addImport(lines, ObjectChunk.class); if (!booleanPath.contains("Sparse")) { - lines = removeImport(lines, - "import io.deephaven.db.v2.sources.chunk.Attributes.OrderedKeyRanges;"); + lines = removeImport(lines, "import io.deephaven.db.v2.sources.chunk.Attributes.OrderedKeyRanges;"); } lines = simpleFixup(lines, "elementGet", - "getBoolean", "getByte"); + "getBoolean", "getByte"); FileUtils.writeLines(booleanFile, lines); } @@ -121,33 +111,30 @@ private static void fixupChunkTest(String objectPath) throws IOException { final File objectFile = new File(objectPath); lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "ObjectChunk", "ObjectChunk", - "ObjectChunk", "ObjectChunk", - "ObjectChunkChunk", "ObjectChunkChunk", - "String\\[\\]", "byte[]"); + "ObjectChunk", "ObjectChunk", + "ObjectChunk", "ObjectChunk", + "ObjectChunkChunk", "ObjectChunkChunk", + "String\\[\\]", "byte[]"); lines = removeRegion(lines, "boxing imports"); FileUtils.writeLines(objectFile, lines); } - private static void fixupObjectDeltaAwareColumnSourceTest(String objectPath) - throws IOException { + private static void fixupObjectDeltaAwareColumnSourceTest(String objectPath) throws IOException { fixupObjectColumnSourceTest(objectPath); } - private static void fixupBooleanDeltaAwareColumnSourceTest(String booleanPath) - throws IOException { + private static void fixupBooleanDeltaAwareColumnSourceTest(String booleanPath) throws IOException { final File booleanFile = new File(booleanPath); List lines = FileUtils.readLines(booleanFile, Charset.defaultCharset()); lines = globalReplacements(lines, - "Map", "Map", // covers Map and HashMap - "source.getBoolean", "source.getByte", - "source.getPrevBoolean", "source.getPrevByte", - "NULL_BOOLEAN", "BooleanUtils.NULL_BOOLEAN_AS_BYTE", - "byte.class", "boolean.class", - "BooleanChunk", "ObjectChunk", - "asBooleanChunk", "asObjectChunk", - "values.get\\((.*)\\)", - "io.deephaven.db.util.BooleanUtils.booleanAsByte(values.get($1))"); + "Map", "Map", // covers Map and HashMap + "source.getBoolean", "source.getByte", + "source.getPrevBoolean", "source.getPrevByte", + "NULL_BOOLEAN", "BooleanUtils.NULL_BOOLEAN_AS_BYTE", + "byte.class", "boolean.class", + "BooleanChunk", "ObjectChunk", + "asBooleanChunk", "asObjectChunk", + "values.get\\((.*)\\)", "io.deephaven.db.util.BooleanUtils.booleanAsByte(values.get($1))"); lines = addImport(lines, BooleanUtils.class); lines = addImport(lines, ObjectChunk.class); FileUtils.writeLines(booleanFile, lines); diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/TreeMapSource.java b/DB/src/test/java/io/deephaven/db/v2/sources/TreeMapSource.java index 3c7232881d8..f29e80973db 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/TreeMapSource.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/TreeMapSource.java @@ -56,8 +56,7 @@ public synchronized void add(final Index index, T[] vs) { lastAdditionTime = currentStep; } if (index.size() != vs.length) { - throw new IllegalArgumentException( - "Index=" + index + ", data(" + vs.length + ")=" + Arrays.toString(vs)); + throw new IllegalArgumentException("Index=" + index + ", data(" + vs.length + ")=" + Arrays.toString(vs)); } index.forAllLongs(new LongConsumer() { @@ -94,20 +93,16 @@ public synchronized void shift(long startKeyInclusive, long endKeyInclusive, lon // Note: moving to the right, we need to start with rightmost data first. final long dir = shiftDelta > 0 ? -1 : 1; final long len = endKeyInclusive - startKeyInclusive + 1; - for (long offset = dir < 0 ? len - 1 : 0; dir < 0 ? offset >= 0 : offset < len; offset += - dir) { - data.put(startKeyInclusive + offset + shiftDelta, - data.remove(startKeyInclusive + offset)); + for (long offset = dir < 0 ? len - 1 : 0; dir < 0 ? offset >= 0 : offset < len; offset += dir) { + data.put(startKeyInclusive + offset + shiftDelta, data.remove(startKeyInclusive + offset)); } } @Override public synchronized T get(long index) { // If a test asks for a non-existent positive index something is wrong. - // We have to accept negative values, because e.g. a join may find no matching right key, in - // which case it - // has an empty redirection index entry that just gets passed through to the inner column - // source as -1. + // We have to accept negative values, because e.g. a join may find no matching right key, in which case it + // has an empty redirection index entry that just gets passed through to the inner column source as -1. if (index >= 0 && !data.containsKey(index)) throw new IllegalStateException("Asking for a non-existent key: " + index); return data.get(index); diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/UnboxedDateTimeTreeMapSource.java b/DB/src/test/java/io/deephaven/db/v2/sources/UnboxedDateTimeTreeMapSource.java index 12b3894168e..a0ee0d65aa1 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/UnboxedDateTimeTreeMapSource.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/UnboxedDateTimeTreeMapSource.java @@ -4,17 +4,15 @@ import io.deephaven.db.v2.utils.Index; /** - * Wrap a regular {@code TreeMapSource} to make it reinterpretable as a DBDateTime column - * source. + * Wrap a regular {@code TreeMapSource} to make it reinterpretable as a DBDateTime column source. */ -public class UnboxedDateTimeTreeMapSource extends UnboxedDateTimeColumnSource - implements ColumnSource { +public class UnboxedDateTimeTreeMapSource extends UnboxedDateTimeColumnSource implements ColumnSource { // the actual data storage private final TreeMapSource treeMapSource; public UnboxedDateTimeTreeMapSource(ColumnSource alternateColumnSource, - TreeMapSource treeMapSource) { + TreeMapSource treeMapSource) { super(alternateColumnSource); this.treeMapSource = treeMapSource; } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/chunk/RandomResetter.java b/DB/src/test/java/io/deephaven/db/v2/sources/chunk/RandomResetter.java index dd8c0bb91ac..39f178cbe7d 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/chunk/RandomResetter.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/chunk/RandomResetter.java @@ -26,8 +26,7 @@ static RandomResetter makeRandomResetter(ChunkType chunkType) { case Object: return new RandomResetter(ArrayGenerator::randomObjects); default: - throw new UnsupportedOperationException( - "Can't make RandomResetter for " + chunkType); + throw new UnsupportedOperationException("Can't make RandomResetter for " + chunkType); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/chunk/TestSharedContext.java b/DB/src/test/java/io/deephaven/db/v2/sources/chunk/TestSharedContext.java index 3d788a57bf3..9b487f48476 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/chunk/TestSharedContext.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/chunk/TestSharedContext.java @@ -17,8 +17,7 @@ public class TestSharedContext { - private static final class TestSharedContextKey - implements SharedContext.Key { + private static final class TestSharedContextKey implements SharedContext.Key { } private static final class TestResettableContext implements ResettableContext { @@ -138,11 +137,9 @@ public void testConditionFilterWithMoreComplexRedirections() { final String sortCol = "TS"; final String formulaCol = "F"; LiveTableMonitor.DEFAULT.exclusiveLock().doLocked(() -> { - final Table t1 = - t0.update(sortCol + "=i", formulaCol + "=" + cols[0] + "+" + cols[1]).reverse(); + final Table t1 = t0.update(sortCol + "=i", formulaCol + "=" + cols[0] + "+" + cols[1]).reverse(); final Table t1Filtered = t1.where(condition); - final Table t2 = - t1.sort(sortCol).naturalJoin(t1, sortCol, Strings.join(joinColumnsToAdd, ",")); + final Table t2 = t1.sort(sortCol).naturalJoin(t1, sortCol, Strings.join(joinColumnsToAdd, ",")); final Table t2Filtered = t2.where(joinedCondition).reverse(); assertEquals(t2.size(), t1.size()); final Consumer columnChecker = (final String col) -> { diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/chunk/TestSourceSink.java b/DB/src/test/java/io/deephaven/db/v2/sources/chunk/TestSourceSink.java index f784bbde0c1..8965b4a884a 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/chunk/TestSourceSink.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/chunk/TestSourceSink.java @@ -11,12 +11,11 @@ public class TestSourceSink { /** - * A variety of tests are possible here. As a first pass, we make a chunk of size 1000, fill - * elements 0-249 and 500-749 with random values, and then see if they come back. + * A variety of tests are possible here. As a first pass, we make a chunk of size 1000, fill elements 0-249 and + * 500-749 with random values, and then see if they come back. */ public static void runTests(ChunkType chunkType, IntFunction makeSink) { - final ChunkType containerType = - chunkType == ChunkType.Boolean ? ChunkType.Object : chunkType; + final ChunkType containerType = chunkType == ChunkType.Boolean ? ChunkType.Object : chunkType; final int chunkSize = 1000; // deliberately not a power of two, for fun. final int totalSize = chunkSize * 4; @@ -29,18 +28,18 @@ public static void runTests(ChunkType chunkType, IntFunction randomResetter.resetWithRandomValues(rng, chunkA, chunkSize); randomResetter.resetWithRandomValues(rng, chunkB, chunkSize); - final OrderedKeys keysA = OrderedKeys - .wrapKeyRangesChunkAsOrderedKeys(LongChunk.chunkWrap(new long[] {0, chunkSize - 1})); - final OrderedKeys keysB = OrderedKeys.wrapKeyRangesChunkAsOrderedKeys( - LongChunk.chunkWrap(new long[] {2 * chunkSize, 3 * chunkSize - 1})); + final OrderedKeys keysA = + OrderedKeys.wrapKeyRangesChunkAsOrderedKeys(LongChunk.chunkWrap(new long[] {0, chunkSize - 1})); + final OrderedKeys keysB = OrderedKeys + .wrapKeyRangesChunkAsOrderedKeys(LongChunk.chunkWrap(new long[] {2 * chunkSize, 3 * chunkSize - 1})); final WritableChunkSink.FillFromContext fromContext = sink.makeFillFromContext(chunkSize); sink.fillFromChunk(fromContext, chunkA, keysA); sink.fillFromChunk(fromContext, chunkB, keysB); // Get the whole thing back as one big chunk - final OrderedKeys keysAll = OrderedKeys.wrapKeyRangesChunkAsOrderedKeys( - LongChunk.chunkWrap(new long[] {0, 4 * chunkSize - 1})); + final OrderedKeys keysAll = + OrderedKeys.wrapKeyRangesChunkAsOrderedKeys(LongChunk.chunkWrap(new long[] {0, 4 * chunkSize - 1})); final ChunkSource.GetContext getContext = sink.makeGetContext(totalSize); final Chunk valuesAll = sink.getChunk(getContext, keysAll); @@ -56,9 +55,8 @@ public static void runTests(ChunkType chunkType, IntFunction equalsHelper("fourth chunk", ce, chunkNull, valuesAll, 3 * chunkSize, 4 * chunkSize - 1); } - private static void equalsHelper(String what, final ChunkEquals ce, - final Chunk expected, - final Chunk actual, final int actualFirst, final int actualLast) { + private static void equalsHelper(String what, final ChunkEquals ce, final Chunk expected, + final Chunk actual, final int actualFirst, final int actualLast) { final Chunk actualSlice = actual.slice(actualFirst, actualLast - actualFirst + 1); final boolean equals = ce.equalReduce(expected, actualSlice); TestCase.assertTrue(what, equals); diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/chunk/util/pools/TestChunkPooling.java b/DB/src/test/java/io/deephaven/db/v2/sources/chunk/util/pools/TestChunkPooling.java index 0da553fde01..d54d49c063c 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/chunk/util/pools/TestChunkPooling.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/chunk/util/pools/TestChunkPooling.java @@ -18,8 +18,7 @@ public void testTakeAndGiveWithTracking() { final List chunksToGive = new ArrayList<>(); for (ChunkType chunkType : ChunkType.values()) { for (int ci = 0; ci < 100; ++ci) { - for (int log2Capacity = - 0; log2Capacity <= ChunkPoolConstants.LARGEST_POOLED_CHUNK_LOG2_CAPACITY + for (int log2Capacity = 0; log2Capacity <= ChunkPoolConstants.LARGEST_POOLED_CHUNK_LOG2_CAPACITY + 1; ++log2Capacity) { chunksToGive.add(chunkType.makeWritableChunk(1 << log2Capacity)); } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/chunkcolumnsource/TestChunkColumnSource.java b/DB/src/test/java/io/deephaven/db/v2/sources/chunkcolumnsource/TestChunkColumnSource.java index a1e78faf715..29e00c2eafb 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/chunkcolumnsource/TestChunkColumnSource.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/chunkcolumnsource/TestChunkColumnSource.java @@ -30,10 +30,8 @@ public void tearDown() throws Exception { @Test public void testSimple() { - final WritableCharChunk charChunk1 = - WritableCharChunk.makeWritableChunk(1024); - final WritableCharChunk charChunk2 = - WritableCharChunk.makeWritableChunk(1024); + final WritableCharChunk charChunk1 = WritableCharChunk.makeWritableChunk(1024); + final WritableCharChunk charChunk2 = WritableCharChunk.makeWritableChunk(1024); for (int ii = 0; ii < 1024; ++ii) { charChunk1.set(ii, (char) (1024 + ii)); charChunk2.set(ii, (char) (2048 + ii)); @@ -51,8 +49,7 @@ public void testSimple() { TestCase.assertEquals(charChunk2.get(ii), columnSource.getChar(ii + 1024)); } - final WritableCharChunk destChunk = - WritableCharChunk.makeWritableChunk(2048); + final WritableCharChunk destChunk = WritableCharChunk.makeWritableChunk(2048); try (final ChunkSource.FillContext fillContext = columnSource.makeFillContext(2048)) { columnSource.fillChunk(fillContext, destChunk, OrderedKeys.forRange(0, 2047)); TestCase.assertEquals(2048, destChunk.size()); @@ -89,7 +86,7 @@ public void testSimple() { try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { final CharChunk values = - columnSource.getChunk(getContext, OrderedKeys.forRange(0, 2047)).asCharChunk(); + columnSource.getChunk(getContext, OrderedKeys.forRange(0, 2047)).asCharChunk(); TestCase.assertEquals(2048, values.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(charChunk1.get(ii), values.get(ii)); @@ -99,7 +96,7 @@ public void testSimple() { try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { final CharChunk values = - columnSource.getChunk(getContext, OrderedKeys.forRange(0, 1023)).asCharChunk(); + columnSource.getChunk(getContext, OrderedKeys.forRange(0, 1023)).asCharChunk(); TestCase.assertEquals(1024, values.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(charChunk1.get(ii), values.get(ii)); @@ -108,7 +105,7 @@ public void testSimple() { try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { final CharChunk values = - columnSource.getChunk(getContext, OrderedKeys.forRange(1024, 2047)).asCharChunk(); + columnSource.getChunk(getContext, OrderedKeys.forRange(1024, 2047)).asCharChunk(); TestCase.assertEquals(1024, values.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(charChunk2.get(ii), values.get(ii)); @@ -117,14 +114,14 @@ public void testSimple() { try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { final CharChunk values = - columnSource.getChunk(getContext, OrderedKeys.forRange(2047, 2047)).asCharChunk(); + columnSource.getChunk(getContext, OrderedKeys.forRange(2047, 2047)).asCharChunk(); TestCase.assertEquals(1, values.size()); TestCase.assertEquals(charChunk2.get(1023), values.get(0)); } try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { final CharChunk values = - columnSource.getChunk(getContext, OrderedKeys.forRange(10, 20)).asCharChunk(); + columnSource.getChunk(getContext, OrderedKeys.forRange(10, 20)).asCharChunk(); TestCase.assertEquals(11, values.size()); for (int ii = 0; ii <= 10; ++ii) { TestCase.assertEquals(charChunk1.get(ii + 10), values.get(ii)); @@ -133,7 +130,7 @@ public void testSimple() { try (final ChunkSource.GetContext getContext = columnSource.makeGetContext(2048)) { final CharChunk values = - columnSource.getChunk(getContext, OrderedKeys.forRange(1020, 1030)).asCharChunk(); + columnSource.getChunk(getContext, OrderedKeys.forRange(1020, 1030)).asCharChunk(); TestCase.assertEquals(11, values.size()); for (int ii = 0; ii <= 3; ++ii) { TestCase.assertEquals(charChunk1.get(ii + 1020), values.get(ii)); @@ -146,15 +143,11 @@ public void testSimple() { @Test public void testShared() { - final WritableLongChunk longChunk1 = - WritableLongChunk.makeWritableChunk(1024); - final WritableLongChunk longChunk2 = - WritableLongChunk.makeWritableChunk(1024); + final WritableLongChunk longChunk1 = WritableLongChunk.makeWritableChunk(1024); + final WritableLongChunk longChunk2 = WritableLongChunk.makeWritableChunk(1024); - final WritableDoubleChunk doubleChunk1 = - WritableDoubleChunk.makeWritableChunk(1024); - final WritableDoubleChunk doubleChunk2 = - WritableDoubleChunk.makeWritableChunk(1024); + final WritableDoubleChunk doubleChunk1 = WritableDoubleChunk.makeWritableChunk(1024); + final WritableDoubleChunk doubleChunk2 = WritableDoubleChunk.makeWritableChunk(1024); for (int ii = 0; ii < 1024; ++ii) { longChunk1.set(ii, 1024 + ii); @@ -165,13 +158,11 @@ public void testShared() { final TLongArrayList offsets = new TLongArrayList(); - final ChunkColumnSource longColumnSource = - ChunkColumnSource.make(ChunkType.Long, long.class, offsets); + final ChunkColumnSource longColumnSource = ChunkColumnSource.make(ChunkType.Long, long.class, offsets); longColumnSource.addChunk(longChunk1); longColumnSource.addChunk(longChunk2); - final ChunkColumnSource doubleColumnSource = - ChunkColumnSource.make(ChunkType.Double, double.class, offsets); + final ChunkColumnSource doubleColumnSource = ChunkColumnSource.make(ChunkType.Double, double.class, offsets); doubleColumnSource.addChunk(doubleChunk1); doubleColumnSource.addChunk(doubleChunk2); @@ -192,14 +183,10 @@ public void testShared() { } private void checkDoubles(WritableDoubleChunk doubleChunk1, - WritableDoubleChunk doubleChunk2, - ChunkColumnSource doubleColumnSource) { - final WritableDoubleChunk destChunk = - WritableDoubleChunk.makeWritableChunk(2048); - try (final ChunkSource.FillContext doubleFillContext = - doubleColumnSource.makeFillContext(2048)) { - doubleColumnSource.fillChunk(doubleFillContext, destChunk, - OrderedKeys.forRange(0, 2047)); + WritableDoubleChunk doubleChunk2, ChunkColumnSource doubleColumnSource) { + final WritableDoubleChunk destChunk = WritableDoubleChunk.makeWritableChunk(2048); + try (final ChunkSource.FillContext doubleFillContext = doubleColumnSource.makeFillContext(2048)) { + doubleColumnSource.fillChunk(doubleFillContext, destChunk, OrderedKeys.forRange(0, 2047)); TestCase.assertEquals(2048, destChunk.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(doubleChunk1.get(ii), destChunk.get(ii)); @@ -233,8 +220,8 @@ private void checkDoubles(WritableDoubleChunk doubleChunk1, } try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = doubleColumnSource - .getChunk(getContext, OrderedKeys.forRange(0, 2047)).asDoubleChunk(); + final DoubleChunk values = + doubleColumnSource.getChunk(getContext, OrderedKeys.forRange(0, 2047)).asDoubleChunk(); TestCase.assertEquals(2048, values.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(doubleChunk1.get(ii), values.get(ii)); @@ -243,8 +230,8 @@ private void checkDoubles(WritableDoubleChunk doubleChunk1, } try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = doubleColumnSource - .getChunk(getContext, OrderedKeys.forRange(0, 1023)).asDoubleChunk(); + final DoubleChunk values = + doubleColumnSource.getChunk(getContext, OrderedKeys.forRange(0, 1023)).asDoubleChunk(); TestCase.assertEquals(1024, values.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(doubleChunk1.get(ii), values.get(ii)); @@ -252,8 +239,8 @@ private void checkDoubles(WritableDoubleChunk doubleChunk1, } try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = doubleColumnSource - .getChunk(getContext, OrderedKeys.forRange(1024, 2047)).asDoubleChunk(); + final DoubleChunk values = + doubleColumnSource.getChunk(getContext, OrderedKeys.forRange(1024, 2047)).asDoubleChunk(); TestCase.assertEquals(1024, values.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(doubleChunk2.get(ii), values.get(ii)); @@ -261,15 +248,15 @@ private void checkDoubles(WritableDoubleChunk doubleChunk1, } try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = doubleColumnSource - .getChunk(getContext, OrderedKeys.forRange(2047, 2047)).asDoubleChunk(); + final DoubleChunk values = + doubleColumnSource.getChunk(getContext, OrderedKeys.forRange(2047, 2047)).asDoubleChunk(); TestCase.assertEquals(1, values.size()); TestCase.assertEquals(doubleChunk2.get(1023), values.get(0)); } try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = doubleColumnSource - .getChunk(getContext, OrderedKeys.forRange(10, 20)).asDoubleChunk(); + final DoubleChunk values = + doubleColumnSource.getChunk(getContext, OrderedKeys.forRange(10, 20)).asDoubleChunk(); TestCase.assertEquals(11, values.size()); for (int ii = 0; ii <= 10; ++ii) { TestCase.assertEquals(doubleChunk1.get(ii + 10), values.get(ii)); @@ -277,8 +264,8 @@ private void checkDoubles(WritableDoubleChunk doubleChunk1, } try (final ChunkSource.GetContext getContext = doubleColumnSource.makeGetContext(2048)) { - final DoubleChunk values = doubleColumnSource - .getChunk(getContext, OrderedKeys.forRange(1020, 1030)).asDoubleChunk(); + final DoubleChunk values = + doubleColumnSource.getChunk(getContext, OrderedKeys.forRange(1020, 1030)).asDoubleChunk(); TestCase.assertEquals(11, values.size()); for (int ii = 0; ii <= 3; ++ii) { TestCase.assertEquals(doubleChunk1.get(ii + 1020), values.get(ii)); @@ -290,11 +277,9 @@ private void checkDoubles(WritableDoubleChunk doubleChunk1, } private void checkLongs(WritableLongChunk longChunk1, - WritableLongChunk longChunk2, ChunkColumnSource longColumnSource) { - final WritableLongChunk destChunk = - WritableLongChunk.makeWritableChunk(2048); - try (final ChunkSource.FillContext longFillContext = - longColumnSource.makeFillContext(2048)) { + WritableLongChunk longChunk2, ChunkColumnSource longColumnSource) { + final WritableLongChunk destChunk = WritableLongChunk.makeWritableChunk(2048); + try (final ChunkSource.FillContext longFillContext = longColumnSource.makeFillContext(2048)) { longColumnSource.fillChunk(longFillContext, destChunk, OrderedKeys.forRange(0, 2047)); TestCase.assertEquals(2048, destChunk.size()); for (int ii = 0; ii < 1024; ++ii) { @@ -330,7 +315,7 @@ private void checkLongs(WritableLongChunk longChunk1, try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { final LongChunk values = - longColumnSource.getChunk(getContext, OrderedKeys.forRange(0, 2047)).asLongChunk(); + longColumnSource.getChunk(getContext, OrderedKeys.forRange(0, 2047)).asLongChunk(); TestCase.assertEquals(2048, values.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(longChunk1.get(ii), values.get(ii)); @@ -340,7 +325,7 @@ private void checkLongs(WritableLongChunk longChunk1, try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { final LongChunk values = - longColumnSource.getChunk(getContext, OrderedKeys.forRange(0, 1023)).asLongChunk(); + longColumnSource.getChunk(getContext, OrderedKeys.forRange(0, 1023)).asLongChunk(); TestCase.assertEquals(1024, values.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(longChunk1.get(ii), values.get(ii)); @@ -348,8 +333,8 @@ private void checkLongs(WritableLongChunk longChunk1, } try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { - final LongChunk values = longColumnSource - .getChunk(getContext, OrderedKeys.forRange(1024, 2047)).asLongChunk(); + final LongChunk values = + longColumnSource.getChunk(getContext, OrderedKeys.forRange(1024, 2047)).asLongChunk(); TestCase.assertEquals(1024, values.size()); for (int ii = 0; ii < 1024; ++ii) { TestCase.assertEquals(longChunk2.get(ii), values.get(ii)); @@ -357,15 +342,15 @@ private void checkLongs(WritableLongChunk longChunk1, } try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { - final LongChunk values = longColumnSource - .getChunk(getContext, OrderedKeys.forRange(2047, 2047)).asLongChunk(); + final LongChunk values = + longColumnSource.getChunk(getContext, OrderedKeys.forRange(2047, 2047)).asLongChunk(); TestCase.assertEquals(1, values.size()); TestCase.assertEquals(longChunk2.get(1023), values.get(0)); } try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { final LongChunk values = - longColumnSource.getChunk(getContext, OrderedKeys.forRange(10, 20)).asLongChunk(); + longColumnSource.getChunk(getContext, OrderedKeys.forRange(10, 20)).asLongChunk(); TestCase.assertEquals(11, values.size()); for (int ii = 0; ii <= 10; ++ii) { TestCase.assertEquals(longChunk1.get(ii + 10), values.get(ii)); @@ -373,8 +358,8 @@ private void checkLongs(WritableLongChunk longChunk1, } try (final ChunkSource.GetContext getContext = longColumnSource.makeGetContext(2048)) { - final LongChunk values = longColumnSource - .getChunk(getContext, OrderedKeys.forRange(1020, 1030)).asLongChunk(); + final LongChunk values = + longColumnSource.getChunk(getContext, OrderedKeys.forRange(1020, 1030)).asLongChunk(); TestCase.assertEquals(11, values.size()); for (int ii = 0; ii <= 3; ++ii) { TestCase.assertEquals(longChunk1.get(ii + 1020), values.get(ii)); @@ -399,8 +384,7 @@ private static Boolean makeExpectBoolean(int idx) { @Test public void testBooleanWrapper() { - final WritableByteChunk byteChunk = - WritableByteChunk.makeWritableChunk(32); + final WritableByteChunk byteChunk = WritableByteChunk.makeWritableChunk(32); for (int ii = 0; ii < byteChunk.size(); ++ii) { byteChunk.set(ii, BooleanUtils.booleanAsByte(makeExpectBoolean(ii))); } @@ -417,8 +401,7 @@ public void testBooleanWrapper() { TestCase.assertEquals(makeExpectBoolean(ii), wrapped.get(ii)); } - final WritableObjectChunk destChunk = - WritableObjectChunk.makeWritableChunk(2048); + final WritableObjectChunk destChunk = WritableObjectChunk.makeWritableChunk(2048); try (final ChunkSource.FillContext fillContext = wrapped.makeFillContext(32)) { wrapped.fillChunk(fillContext, destChunk, OrderedKeys.forRange(0, 31)); TestCase.assertEquals(32, destChunk.size()); @@ -429,7 +412,7 @@ public void testBooleanWrapper() { try (final ChunkSource.GetContext getContext = wrapped.makeGetContext(32)) { final ObjectChunk values = - wrapped.getChunk(getContext, OrderedKeys.forRange(1, 10)).asObjectChunk(); + wrapped.getChunk(getContext, OrderedKeys.forRange(1, 10)).asObjectChunk(); TestCase.assertEquals(10, values.size()); for (int ii = 1; ii <= 10; ++ii) { TestCase.assertEquals(makeExpectBoolean(ii), values.get(ii - 1)); @@ -438,14 +421,12 @@ public void testBooleanWrapper() { } private static DBDateTime makeExpectDateTime(int idx) { - return DBTimeUtils.plus(DBTimeUtils.convertDateTime("2021-07-27T09:00 NY"), - idx * 3600_000_000_000L); + return DBTimeUtils.plus(DBTimeUtils.convertDateTime("2021-07-27T09:00 NY"), idx * 3600_000_000_000L); } @Test public void testDateTimeWrapper() { - final WritableLongChunk longChunk = - WritableLongChunk.makeWritableChunk(32); + final WritableLongChunk longChunk = WritableLongChunk.makeWritableChunk(32); for (int ii = 0; ii < longChunk.size(); ++ii) { longChunk.set(ii, makeExpectDateTime(ii).getNanos()); } @@ -462,8 +443,7 @@ public void testDateTimeWrapper() { TestCase.assertEquals(makeExpectDateTime(ii), wrapped.get(ii)); } - final WritableObjectChunk destChunk = - WritableObjectChunk.makeWritableChunk(2048); + final WritableObjectChunk destChunk = WritableObjectChunk.makeWritableChunk(2048); try (final ChunkSource.FillContext fillContext = wrapped.makeFillContext(32)) { wrapped.fillChunk(fillContext, destChunk, OrderedKeys.forRange(0, 31)); TestCase.assertEquals(32, destChunk.size()); @@ -474,7 +454,7 @@ public void testDateTimeWrapper() { try (final ChunkSource.GetContext getContext = wrapped.makeGetContext(32)) { final ObjectChunk values = - wrapped.getChunk(getContext, OrderedKeys.forRange(1, 10)).asObjectChunk(); + wrapped.getChunk(getContext, OrderedKeys.forRange(1, 10)).asObjectChunk(); TestCase.assertEquals(10, values.size()); for (int ii = 1; ii <= 10; ++ii) { TestCase.assertEquals(makeExpectDateTime(ii), values.get(ii - 1)); @@ -484,18 +464,15 @@ public void testDateTimeWrapper() { @Test public void testClear() { - final WritableIntChunk intChunk1 = - WritableIntChunk.makeWritableChunk(64); - final WritableIntChunk intChunk2 = - WritableIntChunk.makeWritableChunk(64); + final WritableIntChunk intChunk1 = WritableIntChunk.makeWritableChunk(64); + final WritableIntChunk intChunk2 = WritableIntChunk.makeWritableChunk(64); for (int ii = 0; ii < 64; ++ii) { intChunk1.set(ii, 1024 + ii); intChunk2.set(ii, 2048 + ii); } - final ChunkColumnSource intColumnSource = - ChunkColumnSource.make(ChunkType.Int, int.class); + final ChunkColumnSource intColumnSource = ChunkColumnSource.make(ChunkType.Int, int.class); TestCase.assertEquals(QueryConstants.NULL_INT, intColumnSource.getInt(-1)); TestCase.assertEquals(QueryConstants.NULL_INT, intColumnSource.getInt(0)); @@ -505,8 +482,7 @@ public void testClear() { TestCase.assertEquals(QueryConstants.NULL_INT, intColumnSource.getInt(64)); try (final ChunkSource.GetContext context = intColumnSource.makeGetContext(64)) { - final IntChunk actual = - intColumnSource.getChunk(context, 0, 63).asIntChunk(); + final IntChunk actual = intColumnSource.getChunk(context, 0, 63).asIntChunk(); TestCase.assertTrue(IntChunkEquals.equalReduce(actual, intChunk1)); } @@ -520,8 +496,7 @@ public void testClear() { TestCase.assertEquals(QueryConstants.NULL_INT, intColumnSource.getInt(64)); try (final ChunkSource.GetContext context = intColumnSource.makeGetContext(64)) { - final IntChunk actual = - intColumnSource.getChunk(context, 0, 63).asIntChunk(); + final IntChunk actual = intColumnSource.getChunk(context, 0, 63).asIntChunk(); TestCase.assertTrue(IntChunkEquals.equalReduce(actual, intChunk2)); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/ReplicateRegionsAndRegionedSourcesTest.java b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/ReplicateRegionsAndRegionedSourcesTest.java index fe4bc249642..0b85a03e25d 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/ReplicateRegionsAndRegionedSourcesTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/ReplicateRegionsAndRegionedSourcesTest.java @@ -9,15 +9,13 @@ import java.io.IOException; /** - * Code generation for tests of {@link RegionedColumnSource} implementations as well as well as the - * primary region interfaces for some primitive types. + * Code generation for tests of {@link RegionedColumnSource} implementations as well as well as the primary region + * interfaces for some primitive types. */ public class ReplicateRegionsAndRegionedSourcesTest { public static void main(String... args) throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(TestRegionedColumnSourceChar.class, - ReplicatePrimitiveCode.TEST_SRC); - ReplicatePrimitiveCode.charToAllButBooleanAndByte(TstColumnRegionChar.class, - ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(TestRegionedColumnSourceChar.class, ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.charToAllButBooleanAndByte(TstColumnRegionChar.class, ReplicatePrimitiveCode.TEST_SRC); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestChunkedRegionedOperations.java b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestChunkedRegionedOperations.java index 80f5efbd25c..a1ebedf504f 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestChunkedRegionedOperations.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestChunkedRegionedOperations.java @@ -126,10 +126,10 @@ public void setUp() throws Exception { originalScope = QueryScope.getScope(); final QueryScope queryScope = new QueryScope.StandaloneImpl(); Arrays.stream(originalScope.getParams(originalScope.getParamNames())) - .forEach(p -> queryScope.putParam(p.getName(), p.getValue())); + .forEach(p -> queryScope.putParam(p.getName(), p.getValue())); queryScope.putParam("nowNanos", DBTimeUtils.currentTime().getNanos()); - queryScope.putParam("letters", IntStream.range('A', 'A' + 64) - .mapToObj(c -> new String(new char[] {(char) c})).toArray(String[]::new)); + queryScope.putParam("letters", + IntStream.range('A', 'A' + 64).mapToObj(c -> new String(new char[] {(char) c})).toArray(String[]::new)); queryScope.putParam("emptySymbolSet", new StringSetArrayWrapper()); queryScope.putParam("stripeSize", STRIPE_SIZE); QueryScope.setScope(queryScope); @@ -143,129 +143,126 @@ public void setUp() throws Exception { QueryLibrary.importStatic(BooleanUtils.class); final TableDefinition definition = TableDefinition.of( - ColumnDefinition.ofLong("II"), - ColumnDefinition.ofString("PC").withPartitioning(), - ColumnDefinition.ofByte("B"), - ColumnDefinition.ofChar("C"), - ColumnDefinition.ofShort("S"), - ColumnDefinition.ofInt("I"), - ColumnDefinition.ofLong("L"), - ColumnDefinition.ofFloat("F"), - ColumnDefinition.ofDouble("D"), - ColumnDefinition.ofBoolean("Bl"), - ColumnDefinition.ofString("Sym"), - ColumnDefinition.ofString("Str"), - ColumnDefinition.ofTime("DT"), - ColumnDefinition.fromGenericType("SymS", StringSet.class), - ColumnDefinition.fromGenericType("Ser", SimpleSerializable.class), - ColumnDefinition.fromGenericType("Ext", SimpleExternalizable.class), - ColumnDefinition.fromGenericType("Fix", BigInteger.class), - ColumnDefinition.fromGenericType("Var", BigInteger.class)); + ColumnDefinition.ofLong("II"), + ColumnDefinition.ofString("PC").withPartitioning(), + ColumnDefinition.ofByte("B"), + ColumnDefinition.ofChar("C"), + ColumnDefinition.ofShort("S"), + ColumnDefinition.ofInt("I"), + ColumnDefinition.ofLong("L"), + ColumnDefinition.ofFloat("F"), + ColumnDefinition.ofDouble("D"), + ColumnDefinition.ofBoolean("Bl"), + ColumnDefinition.ofString("Sym"), + ColumnDefinition.ofString("Str"), + ColumnDefinition.ofTime("DT"), + ColumnDefinition.fromGenericType("SymS", StringSet.class), + ColumnDefinition.fromGenericType("Ser", SimpleSerializable.class), + ColumnDefinition.fromGenericType("Ext", SimpleExternalizable.class), + ColumnDefinition.fromGenericType("Fix", BigInteger.class), + ColumnDefinition.fromGenericType("Var", BigInteger.class)); final ParquetInstructions parquetInstructions = new ParquetInstructions.Builder() - .addColumnCodec("Fix", BigIntegerCodec.class.getName(), "4") - .addColumnCodec("Var", BigIntegerCodec.class.getName()) - .useDictionary("Sym", true) - .setMaximumDictionaryKeys(100) // Force "Str" to use non-dictionary encoding - .build(); + .addColumnCodec("Fix", BigIntegerCodec.class.getName(), "4") + .addColumnCodec("Var", BigIntegerCodec.class.getName()) + .useDictionary("Sym", true) + .setMaximumDictionaryKeys(100) // Force "Str" to use non-dictionary encoding + .build(); final Table inputData = ((QueryTable) TableTools.emptyTable(TABLE_SIZE) - .update( - "II = ii") - .updateView( - "PC = Long.toString((long) (II / stripeSize))", - "B = II % 1000 == 0 ? NULL_BYTE : (byte) II", - "C = II % 27 == 26 ? NULL_CHAR : (char) ('A' + II % 27)", - "S = II % 30000 == 0 ? NULL_SHORT : (short) II", - "I = II % 512 == 0 ? NULL_INT : (int) II", - "L = II % 1024 == 0 ? NULL_LONG : II", - "F = II % 2048 == 0 ? NULL_FLOAT : (float) (II * 0.25)", - "D = II % 4096 == 0 ? NULL_DOUBLE : II * 1.25", - "Bl = II % 8192 == 0 ? null : II % 2 == 0", - "Sym = II % 64 == 0 ? null : Long.toString(II % 1000)", - "Str = II % 128 == 0 ? null : Long.toString(II)", - "DT = II % 256 == 0 ? null : new DBDateTime(nowNanos + II)", - "SymS = (StringSet) new StringSetArrayWrapper(letters[((int) II) % 64], letters[(((int) II) + 7) % 64])", - "Ser = II % 1024 == 0 ? null : new SimpleSerializable(II)", - "Ext = II % 1024 == 0 ? null : new SimpleExternalizable(II)", - "Fix = Sym == null ? null : new BigInteger(Sym, 10)", - "Var = Str == null ? null : new BigInteger(Str, 10)")) - .withDefinitionUnsafe(definition); + .update( + "II = ii") + .updateView( + "PC = Long.toString((long) (II / stripeSize))", + "B = II % 1000 == 0 ? NULL_BYTE : (byte) II", + "C = II % 27 == 26 ? NULL_CHAR : (char) ('A' + II % 27)", + "S = II % 30000 == 0 ? NULL_SHORT : (short) II", + "I = II % 512 == 0 ? NULL_INT : (int) II", + "L = II % 1024 == 0 ? NULL_LONG : II", + "F = II % 2048 == 0 ? NULL_FLOAT : (float) (II * 0.25)", + "D = II % 4096 == 0 ? NULL_DOUBLE : II * 1.25", + "Bl = II % 8192 == 0 ? null : II % 2 == 0", + "Sym = II % 64 == 0 ? null : Long.toString(II % 1000)", + "Str = II % 128 == 0 ? null : Long.toString(II)", + "DT = II % 256 == 0 ? null : new DBDateTime(nowNanos + II)", + "SymS = (StringSet) new StringSetArrayWrapper(letters[((int) II) % 64], letters[(((int) II) + 7) % 64])", + "Ser = II % 1024 == 0 ? null : new SimpleSerializable(II)", + "Ext = II % 1024 == 0 ? null : new SimpleExternalizable(II)", + "Fix = Sym == null ? null : new BigInteger(Sym, 10)", + "Var = Str == null ? null : new BigInteger(Str, 10)")) + .withDefinitionUnsafe(definition); // TODO: Add (Fixed|Variable)WidthObjectCodec columns final Table inputMissingData = ((QueryTable) TableTools.emptyTable(TABLE_SIZE) - .update( - "II = ii") - .updateView( - "PC = `N` + Long.toString((long) (II / stripeSize))", - "B = NULL_BYTE", - "C = NULL_CHAR", - "S = NULL_SHORT", - "I = NULL_INT", - "L = NULL_LONG", - "F = NULL_FLOAT", - "D = NULL_DOUBLE", - "Bl = (Boolean) null", - "Sym = (String) null", - "Str = (String) null", - "DT = (DBDateTime) null", - "SymS = (StringSet) null", - "Ser = (SimpleSerializable) null", - "Ext = (SimpleExternalizable) null", - "Fix = (BigInteger) null", - "Var = (BigInteger) null")).withDefinitionUnsafe(definition); - - dataDirectory = - Files.createTempDirectory(Paths.get(""), "TestChunkedRegionedOperations-").toFile(); + .update( + "II = ii") + .updateView( + "PC = `N` + Long.toString((long) (II / stripeSize))", + "B = NULL_BYTE", + "C = NULL_CHAR", + "S = NULL_SHORT", + "I = NULL_INT", + "L = NULL_LONG", + "F = NULL_FLOAT", + "D = NULL_DOUBLE", + "Bl = (Boolean) null", + "Sym = (String) null", + "Str = (String) null", + "DT = (DBDateTime) null", + "SymS = (StringSet) null", + "Ser = (SimpleSerializable) null", + "Ext = (SimpleExternalizable) null", + "Fix = (BigInteger) null", + "Var = (BigInteger) null")).withDefinitionUnsafe(definition); + + dataDirectory = Files.createTempDirectory(Paths.get(""), "TestChunkedRegionedOperations-").toFile(); dataDirectory.deleteOnExit(); - final TableDefinition partitionedDataDefinition = - new TableDefinition(inputData.getDefinition()); + final TableDefinition partitionedDataDefinition = new TableDefinition(inputData.getDefinition()); final TableDefinition partitionedMissingDataDefinition = - new TableDefinition(inputData.view("PC", "II").getDefinition()); + new TableDefinition(inputData.view("PC", "II").getDefinition()); final String tableName = "TestTable"; final TableMap partitionedInputData = inputData.byExternal("PC"); ParquetTools.writeParquetTables( - partitionedInputData.values().toArray(Table.ZERO_LENGTH_TABLE_ARRAY), - partitionedDataDefinition.getWritable(), - parquetInstructions, - Arrays.stream(partitionedInputData.getKeySet()) - .map(pcv -> new File(dataDirectory, - "IP" + File.separator + "P" + pcv + File.separator + tableName + File.separator - + PARQUET_FILE_NAME)) - .toArray(File[]::new), - CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - - final TableMap partitionedInputMissingData = - inputMissingData.view("PC", "II").byExternal("PC"); + partitionedInputData.values().toArray(Table.ZERO_LENGTH_TABLE_ARRAY), + partitionedDataDefinition.getWritable(), + parquetInstructions, + Arrays.stream(partitionedInputData.getKeySet()) + .map(pcv -> new File(dataDirectory, + "IP" + File.separator + "P" + pcv + File.separator + tableName + File.separator + + PARQUET_FILE_NAME)) + .toArray(File[]::new), + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + + final TableMap partitionedInputMissingData = inputMissingData.view("PC", "II").byExternal("PC"); ParquetTools.writeParquetTables( - partitionedInputMissingData.values().toArray(Table.ZERO_LENGTH_TABLE_ARRAY), - partitionedMissingDataDefinition.getWritable(), - parquetInstructions, - Arrays.stream(partitionedInputMissingData.getKeySet()) - .map(pcv -> new File(dataDirectory, - "IP" + File.separator + "P" + pcv + File.separator + tableName + File.separator - + PARQUET_FILE_NAME)) - .toArray(File[]::new), - CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + partitionedInputMissingData.values().toArray(Table.ZERO_LENGTH_TABLE_ARRAY), + partitionedMissingDataDefinition.getWritable(), + parquetInstructions, + Arrays.stream(partitionedInputMissingData.getKeySet()) + .map(pcv -> new File(dataDirectory, + "IP" + File.separator + "P" + pcv + File.separator + tableName + File.separator + + PARQUET_FILE_NAME)) + .toArray(File[]::new), + CollectionUtil.ZERO_LENGTH_STRING_ARRAY); expected = TableTools - .merge( - inputData.updateView("PC = `P` + PC"), - inputMissingData.updateView("PC = `P` + PC")) - .updateView( - "Bl_R = booleanAsByte(Bl)", - "DT_R = nanos(DT)"); + .merge( + inputData.updateView("PC = `P` + PC"), + inputMissingData.updateView("PC = `P` + PC")) + .updateView( + "Bl_R = booleanAsByte(Bl)", + "DT_R = nanos(DT)"); actual = ParquetTools.readPartitionedTable( - DeephavenNestedPartitionLayout.forParquet(dataDirectory, tableName, "PC", null), - ParquetInstructions.EMPTY, - partitionedDataDefinition).updateView( - new ReinterpretedColumn<>("Bl", Boolean.class, "Bl_R", byte.class), - new ReinterpretedColumn<>("DT", DBDateTime.class, "DT_R", long.class)) - .coalesce(); + DeephavenNestedPartitionLayout.forParquet(dataDirectory, tableName, "PC", null), + ParquetInstructions.EMPTY, + partitionedDataDefinition).updateView( + new ReinterpretedColumn<>("Bl", Boolean.class, "Bl_R", byte.class), + new ReinterpretedColumn<>("DT", DBDateTime.class, "DT_R", long.class)) + .coalesce(); } @After @@ -303,49 +300,43 @@ public void testEqual() { assertTableEquals(expected, actual); } - private static void assertChunkWiseEquals(@NotNull final Table expected, - @NotNull final Table actual, final int chunkCapacity) { + private static void assertChunkWiseEquals(@NotNull final Table expected, @NotNull final Table actual, + final int chunkCapacity) { boolean first = true; assertEquals(expected.size(), actual.size()); try (final SafeCloseableList closeables = new SafeCloseableList(); - final OrderedKeys.Iterator expectedIterator = - expected.getIndex().getOrderedKeysIterator(); - final OrderedKeys.Iterator actualIterator = - actual.getIndex().getOrderedKeysIterator()) { - final ChunkType[] chunkTypes = - expected.getDefinition().getColumnStream().map(ColumnDefinition::getDataType) + final OrderedKeys.Iterator expectedIterator = expected.getIndex().getOrderedKeysIterator(); + final OrderedKeys.Iterator actualIterator = actual.getIndex().getOrderedKeysIterator()) { + final ChunkType[] chunkTypes = expected.getDefinition().getColumnStream().map(ColumnDefinition::getDataType) .map(ChunkType::fromElementType).toArray(ChunkType[]::new); - final Equals[] equals = - Arrays.stream(chunkTypes).map(Equals::make).toArray(Equals[]::new); + final Equals[] equals = Arrays.stream(chunkTypes).map(Equals::make).toArray(Equals[]::new); // noinspection unchecked final WritableChunk[] expectedChunks = Arrays.stream(chunkTypes) - .map(ct -> ct.makeWritableChunk(chunkCapacity)).toArray(WritableChunk[]::new); + .map(ct -> ct.makeWritableChunk(chunkCapacity)).toArray(WritableChunk[]::new); // noinspection unchecked final WritableChunk[] actualChunks = Arrays.stream(chunkTypes) - .map(ct -> ct.makeWritableChunk(chunkCapacity)).toArray(WritableChunk[]::new); + .map(ct -> ct.makeWritableChunk(chunkCapacity)).toArray(WritableChunk[]::new); final ColumnSource[] expectedSources = - expected.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + expected.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); final ColumnSource[] actualSources = - actual.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + actual.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); - final ColumnSource.FillContext[] expectedContexts = Arrays.stream(expectedSources) - .map(cs -> closeables.add(cs.makeFillContext(chunkCapacity))) - .toArray(ColumnSource.FillContext[]::new); - final ColumnSource.FillContext[] actualContexts = Arrays.stream(actualSources) - .map(cs -> closeables.add(cs.makeFillContext(chunkCapacity))) - .toArray(ColumnSource.FillContext[]::new); + final ColumnSource.FillContext[] expectedContexts = + Arrays.stream(expectedSources).map(cs -> closeables.add(cs.makeFillContext(chunkCapacity))) + .toArray(ColumnSource.FillContext[]::new); + final ColumnSource.FillContext[] actualContexts = + Arrays.stream(actualSources).map(cs -> closeables.add(cs.makeFillContext(chunkCapacity))) + .toArray(ColumnSource.FillContext[]::new); assertEquals(expectedChunks.length, expectedContexts.length); assertEquals(actualChunks.length, actualContexts.length); while (expectedIterator.hasMore()) { assertTrue(actualIterator.hasMore()); - final OrderedKeys expectedKeys = - expectedIterator.getNextOrderedKeysWithLength(chunkCapacity); - final OrderedKeys actualKeys = - actualIterator.getNextOrderedKeysWithLength(chunkCapacity); + final OrderedKeys expectedKeys = expectedIterator.getNextOrderedKeysWithLength(chunkCapacity); + final OrderedKeys actualKeys = actualIterator.getNextOrderedKeysWithLength(chunkCapacity); for (int ci = 0; ci < expectedChunks.length; ++ci) { final Equals equal = equals[ci]; final WritableChunk expectedChunk = expectedChunks[ci]; @@ -354,10 +345,10 @@ private static void assertChunkWiseEquals(@NotNull final Table expected, if (first) { // Let's exercise the legacy get code, too, while we're in here - ((AbstractColumnSource) expectedSources[ci]) - .defaultFillChunk(expectedContexts[ci], expectedChunk, expectedKeys); - ((AbstractColumnSource) actualSources[ci]) - .defaultFillChunk(actualContexts[ci], actualChunk, actualKeys); + ((AbstractColumnSource) expectedSources[ci]).defaultFillChunk(expectedContexts[ci], + expectedChunk, expectedKeys); + ((AbstractColumnSource) actualSources[ci]).defaultFillChunk(actualContexts[ci], actualChunk, + actualKeys); assertEquals(expectedChunk.size(), actualChunk.size()); for (int ei = 0; ei < expectedChunk.size(); ++ei) { @@ -367,8 +358,7 @@ private static void assertChunkWiseEquals(@NotNull final Table expected, assertEquals(expectedKeys.size(), actualKeys.size()); - expectedSources[ci].fillChunk(expectedContexts[ci], expectedChunk, - expectedKeys); + expectedSources[ci].fillChunk(expectedContexts[ci], expectedChunk, expectedKeys); actualSources[ci].fillChunk(actualContexts[ci], actualChunk, actualKeys); assertEquals(expectedKeys.size(), expectedChunk.size()); @@ -408,8 +398,7 @@ static Equals make(@NotNull final ChunkType chunkType) { case Double: return (e, a, i) -> e.asDoubleChunk().get(i) == a.asDoubleChunk().get(i); case Object: - return (e, a, i) -> Objects.equals(e.asObjectChunk().get(i), - a.asObjectChunk().get(i)); + return (e, a, i) -> Objects.equals(e.asObjectChunk().get(i), a.asObjectChunk().get(i)); } throw new IllegalArgumentException("Unknown ChunkType " + chunkType); } @@ -432,26 +421,23 @@ public void testFullTableSmallChunks() { @Test public void testHalfDenseTableFullChunks() { - assertChunkWiseEquals(expected.where("(ii / 100) % 2 == 0"), - actual.where("(ii / 100) % 2 == 0"), expected.intSize()); + assertChunkWiseEquals(expected.where("(ii / 100) % 2 == 0"), actual.where("(ii / 100) % 2 == 0"), + expected.intSize()); } @Test public void testHalfDenseTableNormalChunks() { - assertChunkWiseEquals(expected.where("(ii / 100) % 2 == 0"), - actual.where("(ii / 100) % 2 == 0"), 4096); + assertChunkWiseEquals(expected.where("(ii / 100) % 2 == 0"), actual.where("(ii / 100) % 2 == 0"), 4096); } @Test public void testHalfDenseTableSmallChunks() { - assertChunkWiseEquals(expected.where("(ii / 100) % 2 == 0"), - actual.where("(ii / 100) % 2 == 0"), 8); + assertChunkWiseEquals(expected.where("(ii / 100) % 2 == 0"), actual.where("(ii / 100) % 2 == 0"), 8); } @Test public void testSparseTableFullChunks() { - assertChunkWiseEquals(expected.where("ii % 2 == 0"), actual.where("ii % 2 == 0"), - expected.intSize()); + assertChunkWiseEquals(expected.where("ii % 2 == 0"), actual.where("ii % 2 == 0"), expected.intSize()); } @Test @@ -466,18 +452,16 @@ public void testSparseTableSmallChunks() { @Test public void testEqualSymbols() { - // TODO (https://github.com/deephaven/deephaven-core/issues/949): Uncomment this once we - // write encoding stats + // TODO (https://github.com/deephaven/deephaven-core/issues/949): Uncomment this once we write encoding stats // //noinspection unchecked // final SymbolTableSource symbolTableSource = (SymbolTableSource) // actual.getColumnSource("Sym"); // // assertTrue(symbolTableSource.hasSymbolTable(actual.getIndex())); - // final Table symbolTable = symbolTableSource.getStaticSymbolTable(actual.getIndex(), - // false); + // final Table symbolTable = symbolTableSource.getStaticSymbolTable(actual.getIndex(), false); // - // assertTableEquals(expected.view("PC", "Sym").where("Sym != null").firstBy("PC", - // "Sym").dropColumns("PC"), symbolTable.view("Sym = Symbol").where("Sym != null")); + // assertTableEquals(expected.view("PC", "Sym").where("Sym != null").firstBy("PC", "Sym").dropColumns("PC"), + // symbolTable.view("Sym = Symbol").where("Sym != null")); // // final Table joined = actual // .updateView(new ReinterpretedColumn<>("Sym", String.class, "SymId", long.class)) diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestDbArrayUngroup.java b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestDbArrayUngroup.java index e0bc3599a07..f0fa81d1828 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestDbArrayUngroup.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestDbArrayUngroup.java @@ -32,13 +32,11 @@ public void tearDown() { @Test public void testUngroup() { - final Table theTable = - TableTools.emptyTable(20).update("A=`a`+i%10", "B=`b`+i%5", "C=`i`+i"); + final Table theTable = TableTools.emptyTable(20).update("A=`a`+i%10", "B=`b`+i%5", "C=`i`+i"); assertEquals(String.class, theTable.getDefinition().getColumn("C").getDataType()); final Table groupedTable = theTable.by("A", "B"); - assertTrue(DbArray.class - .isAssignableFrom(groupedTable.getDefinition().getColumn("C").getDataType())); + assertTrue(DbArray.class.isAssignableFrom(groupedTable.getDefinition().getColumn("C").getDataType())); assertEquals(String.class, groupedTable.getDefinition().getColumn("C").getComponentType()); final Table ungroupedTable = groupedTable.ungroup(); @@ -48,13 +46,11 @@ public void testUngroup() { ParquetTools.writeTable(groupedTable, dest); final Table actual = ParquetTools.readTable(dest); - assertTrue( - DbArray.class.isAssignableFrom(actual.getDefinition().getColumn("C").getDataType())); + assertTrue(DbArray.class.isAssignableFrom(actual.getDefinition().getColumn("C").getDataType())); assertEquals(String.class, actual.getDefinition().getColumn("C").getComponentType()); Table ungroupedActual = actual.ungroup(); - assertFalse(DbArray.class - .isAssignableFrom(ungroupedActual.getDefinition().getColumn("C").getDataType())); + assertFalse(DbArray.class.isAssignableFrom(ungroupedActual.getDefinition().getColumn("C").getDataType())); assertEquals(String.class, ungroupedActual.getDefinition().getColumn("C").getDataType()); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestRegionedColumnSourceDBDateTime.java b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestRegionedColumnSourceDBDateTime.java index cd3a68597ec..7f089d09057 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestRegionedColumnSourceDBDateTime.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestRegionedColumnSourceDBDateTime.java @@ -13,8 +13,8 @@ import org.junit.Test; @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") -public class TestRegionedColumnSourceDBDateTime extends - TstRegionedColumnSourceReferencing> { +public class TestRegionedColumnSourceDBDateTime + extends TstRegionedColumnSourceReferencing> { public TestRegionedColumnSourceDBDateTime() { super(ColumnRegionLong.class); @@ -36,10 +36,10 @@ public TestRegionedColumnSourceDBDateTime() { private ColumnSource SUT_AS_LONG; private void assertLookup(final long elementIndex, - final int expectedRegionIndex, - final DBDateTime output, - final boolean prev, - final boolean reinterpreted) { + final int expectedRegionIndex, + final DBDateTime output, + final boolean prev, + final boolean reinterpreted) { checking(new Expectations() { { oneOf(cr[expectedRegionIndex]).getReferencedRegion(); @@ -50,7 +50,7 @@ private void assertLookup(final long elementIndex, }); if (reinterpreted) { assertEquals(output == null ? QueryConstants.NULL_LONG : output.getNanos(), - prev ? SUT_AS_LONG.getPrevLong(elementIndex) : SUT_AS_LONG.getLong(elementIndex)); + prev ? SUT_AS_LONG.getPrevLong(elementIndex) : SUT_AS_LONG.getLong(elementIndex)); } else { assertEquals(output, prev ? SUT.getPrev(elementIndex) : SUT.get(elementIndex)); } @@ -76,25 +76,17 @@ public void testGet() { assertLookup(0L, 0, TEST_DATES[0], false, false); assertLookup(RegionedColumnSource.getLastElementIndex(0), 0, TEST_DATES[1], false, false); - assertLookup(RegionedColumnSource.getFirstElementIndex(1) + 1, 1, TEST_DATES[2], false, - false); - assertLookup(RegionedColumnSource.getLastElementIndex(1) - 1, 1, TEST_DATES[3], false, - false); - - assertLookup(RegionedColumnSource.getFirstElementIndex(4) + 2, 4, TEST_DATES[4], false, - false); - assertLookup(RegionedColumnSource.getLastElementIndex(4) - 2, 4, TEST_DATES[5], false, - false); - - assertLookup(RegionedColumnSource.getFirstElementIndex(8) + 3, 8, TEST_DATES[6], false, - false); - assertLookup(RegionedColumnSource.getLastElementIndex(8) - 3, 8, TEST_DATES[7], false, - false); - - assertLookup(RegionedColumnSource.getFirstElementIndex(9) + 4, 9, TEST_DATES[8], false, - false); - assertLookup(RegionedColumnSource.getLastElementIndex(9) - 4, 9, TEST_DATES[9], false, - false); + assertLookup(RegionedColumnSource.getFirstElementIndex(1) + 1, 1, TEST_DATES[2], false, false); + assertLookup(RegionedColumnSource.getLastElementIndex(1) - 1, 1, TEST_DATES[3], false, false); + + assertLookup(RegionedColumnSource.getFirstElementIndex(4) + 2, 4, TEST_DATES[4], false, false); + assertLookup(RegionedColumnSource.getLastElementIndex(4) - 2, 4, TEST_DATES[5], false, false); + + assertLookup(RegionedColumnSource.getFirstElementIndex(8) + 3, 8, TEST_DATES[6], false, false); + assertLookup(RegionedColumnSource.getLastElementIndex(8) - 3, 8, TEST_DATES[7], false, false); + + assertLookup(RegionedColumnSource.getFirstElementIndex(9) + 4, 9, TEST_DATES[8], false, false); + assertLookup(RegionedColumnSource.getLastElementIndex(9) - 4, 9, TEST_DATES[9], false, false); } @Override @@ -105,25 +97,17 @@ public void testGetPrev() { assertLookup(0L, 0, TEST_DATES[0], true, false); assertLookup(RegionedColumnSource.getLastElementIndex(0), 0, TEST_DATES[1], true, false); - assertLookup(RegionedColumnSource.getFirstElementIndex(1) + 1, 1, TEST_DATES[2], true, - false); - assertLookup(RegionedColumnSource.getLastElementIndex(1) - 1, 1, TEST_DATES[3], true, - false); - - assertLookup(RegionedColumnSource.getFirstElementIndex(4) + 2, 4, TEST_DATES[4], true, - false); - assertLookup(RegionedColumnSource.getLastElementIndex(4) - 2, 4, TEST_DATES[5], true, - false); - - assertLookup(RegionedColumnSource.getFirstElementIndex(8) + 3, 8, TEST_DATES[6], true, - false); - assertLookup(RegionedColumnSource.getLastElementIndex(8) - 3, 8, TEST_DATES[7], true, - false); - - assertLookup(RegionedColumnSource.getFirstElementIndex(9) + 4, 9, TEST_DATES[8], true, - false); - assertLookup(RegionedColumnSource.getLastElementIndex(9) - 4, 9, TEST_DATES[9], true, - false); + assertLookup(RegionedColumnSource.getFirstElementIndex(1) + 1, 1, TEST_DATES[2], true, false); + assertLookup(RegionedColumnSource.getLastElementIndex(1) - 1, 1, TEST_DATES[3], true, false); + + assertLookup(RegionedColumnSource.getFirstElementIndex(4) + 2, 4, TEST_DATES[4], true, false); + assertLookup(RegionedColumnSource.getLastElementIndex(4) - 2, 4, TEST_DATES[5], true, false); + + assertLookup(RegionedColumnSource.getFirstElementIndex(8) + 3, 8, TEST_DATES[6], true, false); + assertLookup(RegionedColumnSource.getLastElementIndex(8) - 3, 8, TEST_DATES[7], true, false); + + assertLookup(RegionedColumnSource.getFirstElementIndex(9) + 4, 9, TEST_DATES[8], true, false); + assertLookup(RegionedColumnSource.getLastElementIndex(9) - 4, 9, TEST_DATES[9], true, false); } @Test @@ -134,25 +118,17 @@ public void testGetReinterpreted() { assertLookup(0L, 0, TEST_DATES[0], false, true); assertLookup(RegionedColumnSource.getLastElementIndex(0), 0, TEST_DATES[1], false, true); - assertLookup(RegionedColumnSource.getFirstElementIndex(1) + 1, 1, TEST_DATES[2], false, - true); - assertLookup(RegionedColumnSource.getLastElementIndex(1) - 1, 1, TEST_DATES[3], false, - true); - - assertLookup(RegionedColumnSource.getFirstElementIndex(4) + 2, 4, TEST_DATES[4], false, - true); - assertLookup(RegionedColumnSource.getLastElementIndex(4) - 2, 4, TEST_DATES[5], false, - true); - - assertLookup(RegionedColumnSource.getFirstElementIndex(8) + 3, 8, TEST_DATES[6], false, - true); - assertLookup(RegionedColumnSource.getLastElementIndex(8) - 3, 8, TEST_DATES[7], false, - true); - - assertLookup(RegionedColumnSource.getFirstElementIndex(9) + 4, 9, TEST_DATES[8], false, - true); - assertLookup(RegionedColumnSource.getLastElementIndex(9) - 4, 9, TEST_DATES[9], false, - true); + assertLookup(RegionedColumnSource.getFirstElementIndex(1) + 1, 1, TEST_DATES[2], false, true); + assertLookup(RegionedColumnSource.getLastElementIndex(1) - 1, 1, TEST_DATES[3], false, true); + + assertLookup(RegionedColumnSource.getFirstElementIndex(4) + 2, 4, TEST_DATES[4], false, true); + assertLookup(RegionedColumnSource.getLastElementIndex(4) - 2, 4, TEST_DATES[5], false, true); + + assertLookup(RegionedColumnSource.getFirstElementIndex(8) + 3, 8, TEST_DATES[6], false, true); + assertLookup(RegionedColumnSource.getLastElementIndex(8) - 3, 8, TEST_DATES[7], false, true); + + assertLookup(RegionedColumnSource.getFirstElementIndex(9) + 4, 9, TEST_DATES[8], false, true); + assertLookup(RegionedColumnSource.getLastElementIndex(9) - 4, 9, TEST_DATES[9], false, true); } @Test @@ -163,20 +139,16 @@ public void testGetPrevReinterpreted() { assertLookup(0L, 0, TEST_DATES[0], true, true); assertLookup(RegionedColumnSource.getLastElementIndex(0), 0, TEST_DATES[1], true, true); - assertLookup(RegionedColumnSource.getFirstElementIndex(1) + 1, 1, TEST_DATES[2], true, - true); + assertLookup(RegionedColumnSource.getFirstElementIndex(1) + 1, 1, TEST_DATES[2], true, true); assertLookup(RegionedColumnSource.getLastElementIndex(1) - 1, 1, TEST_DATES[3], true, true); - assertLookup(RegionedColumnSource.getFirstElementIndex(4) + 2, 4, TEST_DATES[4], true, - true); + assertLookup(RegionedColumnSource.getFirstElementIndex(4) + 2, 4, TEST_DATES[4], true, true); assertLookup(RegionedColumnSource.getLastElementIndex(4) - 2, 4, TEST_DATES[5], true, true); - assertLookup(RegionedColumnSource.getFirstElementIndex(8) + 3, 8, TEST_DATES[6], true, - true); + assertLookup(RegionedColumnSource.getFirstElementIndex(8) + 3, 8, TEST_DATES[6], true, true); assertLookup(RegionedColumnSource.getLastElementIndex(8) - 3, 8, TEST_DATES[7], true, true); - assertLookup(RegionedColumnSource.getFirstElementIndex(9) + 4, 9, TEST_DATES[8], true, - true); + assertLookup(RegionedColumnSource.getFirstElementIndex(9) + 4, 9, TEST_DATES[8], true, true); assertLookup(RegionedColumnSource.getLastElementIndex(9) - 4, 9, TEST_DATES[9], true, true); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestRegionedColumnSourceManager.java b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestRegionedColumnSourceManager.java index 540c2059672..23db0366ac9 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestRegionedColumnSourceManager.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TestRegionedColumnSourceManager.java @@ -90,45 +90,43 @@ public void setUp() throws Exception { groupingColumnDefinition = ColumnDefinition.ofString("RCS_1").withGrouping(); normalColumnDefinition = ColumnDefinition.ofString("RCS_2"); - columnDefinitions = new ColumnDefinition[] {partitioningColumnDefinition, - groupingColumnDefinition, normalColumnDefinition}; + columnDefinitions = + new ColumnDefinition[] {partitioningColumnDefinition, groupingColumnDefinition, normalColumnDefinition}; columnSources = IntStream.range(0, NUM_COLUMNS) - .mapToObj(ci -> mock(RegionedColumnSource.class, columnDefinitions[ci].getName())) - .toArray(RegionedColumnSource[]::new); + .mapToObj(ci -> mock(RegionedColumnSource.class, columnDefinitions[ci].getName())) + .toArray(RegionedColumnSource[]::new); partitioningColumnSource = columnSources[PARTITIONING_INDEX]; groupingColumnSource = columnSources[GROUPING_INDEX]; normalColumnSource = columnSources[NORMAL_INDEX]; checking(new Expectations() { { - oneOf(componentFactory).createRegionedColumnSource( - with(same(partitioningColumnDefinition)), with(ColumnToCodecMappings.EMPTY)); + oneOf(componentFactory).createRegionedColumnSource(with(same(partitioningColumnDefinition)), + with(ColumnToCodecMappings.EMPTY)); will(returnValue(partitioningColumnSource)); - oneOf(componentFactory).createRegionedColumnSource( - with(same(groupingColumnDefinition)), with(ColumnToCodecMappings.EMPTY)); + oneOf(componentFactory).createRegionedColumnSource(with(same(groupingColumnDefinition)), + with(ColumnToCodecMappings.EMPTY)); will(returnValue(groupingColumnSource)); - oneOf(componentFactory).createRegionedColumnSource( - with(same(normalColumnDefinition)), with(ColumnToCodecMappings.EMPTY)); + oneOf(componentFactory).createRegionedColumnSource(with(same(normalColumnDefinition)), + with(ColumnToCodecMappings.EMPTY)); will(returnValue(normalColumnSource)); } }); columnLocations = new ColumnLocation[NUM_LOCATIONS][NUM_COLUMNS]; - IntStream.range(0, NUM_LOCATIONS) - .forEach(li -> IntStream.range(0, NUM_COLUMNS).forEach(ci -> { - final ColumnLocation cl = - columnLocations[li][ci] = mock(ColumnLocation.class, "CL_" + li + '_' + ci); - checking(new Expectations() { - { - allowing((cl)).getName(); - will(returnValue(columnDefinitions[ci].getName())); - } - }); - })); + IntStream.range(0, NUM_LOCATIONS).forEach(li -> IntStream.range(0, NUM_COLUMNS).forEach(ci -> { + final ColumnLocation cl = columnLocations[li][ci] = mock(ColumnLocation.class, "CL_" + li + '_' + ci); + checking(new Expectations() { + { + allowing((cl)).getName(); + will(returnValue(columnDefinitions[ci].getName())); + } + }); + })); - tableLocations = IntStream.range(0, NUM_LOCATIONS) - .mapToObj(li -> setUpTableLocation(li, "")).toArray(TableLocation[]::new); + tableLocations = IntStream.range(0, NUM_LOCATIONS).mapToObj(li -> setUpTableLocation(li, "")) + .toArray(TableLocation[]::new); tableLocation0A = tableLocations[0]; tableLocation1A = tableLocations[1]; tableLocation0B = tableLocations[2]; @@ -147,7 +145,7 @@ public void setUp() throws Exception { } private ImmutableTableLocationKey makeTableKey(@NotNull final String internalPartitionValue, - @NotNull final String columnPartitionValue) { + @NotNull final String columnPartitionValue) { final Map> partitions = new LinkedHashMap<>(); partitions.put(partitioningColumnDefinition.getName(), columnPartitionValue); partitions.put("__IP__", internalPartitionValue); @@ -198,7 +196,7 @@ public Object invoke(Invocation invocation) { private Map makeColumnSourceMap() { final Map result = new LinkedHashMap<>(); IntStream.range(0, columnDefinitions.length) - .forEachOrdered(ci -> result.put(columnDefinitions[ci].getName(), columnSources[ci])); + .forEachOrdered(ci -> result.put(columnDefinitions[ci].getName(), columnSources[ci])); return result; } @@ -240,8 +238,7 @@ public Object invoke(Invocation invocation) { will(new CustomAction("Capture grouping column grouping provider") { @Override public Object invoke(Invocation invocation) { - groupingColumnGroupingProvider = - (KeyRangeGroupingProvider) invocation.getParameter(0); + groupingColumnGroupingProvider = (KeyRangeGroupingProvider) invocation.getParameter(0); return null; } }); @@ -281,14 +278,12 @@ private void setSizeExpectations(final boolean refreshing, final long... sizes) { oneOf(tl).supportsSubscriptions(); will(returnValue(true)); - oneOf(tl).subscribe( - with(any(TableLocationUpdateSubscriptionBuffer.class))); + oneOf(tl).subscribe(with(any(TableLocationUpdateSubscriptionBuffer.class))); will(new CustomAction("Capture subscription buffer") { @Override public Object invoke(Invocation invocation) { subscriptionBuffers[li] = - (TableLocationUpdateSubscriptionBuffer) invocation - .getParameter(0); + (TableLocationUpdateSubscriptionBuffer) invocation.getParameter(0); subscriptionBuffers[li].handleUpdate(); return null; } @@ -320,18 +315,18 @@ public Object invoke(Invocation invocation) { IntStream.range(0, NUM_COLUMNS).forEach(ci -> checking(new Expectations() { { oneOf(columnSources[ci]).addRegion(with(columnDefinitions[ci]), - with(columnLocations[li][ci])); + with(columnLocations[li][ci])); will(returnValue(regionIndex)); } })); } newExpectedIndex.insertRange( - RegionedColumnSource.getFirstElementIndex(regionIndex), - RegionedColumnSource.getFirstElementIndex(regionIndex) + size - 1); - expectedPartitioningColumnGrouping - .computeIfAbsent(cp, cpk -> Index.FACTORY.getEmptyIndex()).insertRange( RegionedColumnSource.getFirstElementIndex(regionIndex), RegionedColumnSource.getFirstElementIndex(regionIndex) + size - 1); + expectedPartitioningColumnGrouping.computeIfAbsent(cp, cpk -> Index.FACTORY.getEmptyIndex()) + .insertRange( + RegionedColumnSource.getFirstElementIndex(regionIndex), + RegionedColumnSource.getFirstElementIndex(regionIndex) + size - 1); } }); expectedAddedIndex = newExpectedIndex.minus(expectedIndex); @@ -344,20 +339,17 @@ private void checkIndexes(@NotNull final Index addedIndex) { if (partitioningColumnGrouping == null) { assertTrue(expectedPartitioningColumnGrouping.isEmpty()); } else { - assertEquals(expectedPartitioningColumnGrouping.keySet(), - partitioningColumnGrouping.keySet()); - expectedPartitioningColumnGrouping - .forEach((final String columnPartition, final Index expectedGrouping) -> { - final Index grouping = partitioningColumnGrouping.get(columnPartition); - assertIndexEquals(expectedGrouping, grouping); - }); + assertEquals(expectedPartitioningColumnGrouping.keySet(), partitioningColumnGrouping.keySet()); + expectedPartitioningColumnGrouping.forEach((final String columnPartition, final Index expectedGrouping) -> { + final Index grouping = partitioningColumnGrouping.get(columnPartition); + assertIndexEquals(expectedGrouping, grouping); + }); } } @Test public void testStaticBasics() { - SUT = new RegionedColumnSourceManager(false, componentFactory, ColumnToCodecMappings.EMPTY, - columnDefinitions); + SUT = new RegionedColumnSourceManager(false, componentFactory, ColumnToCodecMappings.EMPTY, columnDefinitions); assertEquals(makeColumnSourceMap(), SUT.getColumnSources()); assertTrue(SUT.isEmpty()); @@ -366,8 +358,7 @@ public void testStaticBasics() { // Add a few locations Arrays.stream(tableLocations).limit(2).forEach(SUT::addLocation); - assertEquals(Arrays.stream(tableLocations).limit(2).collect(Collectors.toList()), - SUT.allLocations()); + assertEquals(Arrays.stream(tableLocations).limit(2).collect(Collectors.toList()), SUT.allLocations()); assertTrue(SUT.includedLocations().isEmpty()); // Try adding an identical duplicate @@ -377,8 +368,7 @@ public void testStaticBasics() { } catch (TableDataException expected) { maybePrintStackTrace(expected); } - assertEquals(Arrays.stream(tableLocations).limit(2).collect(Collectors.toList()), - SUT.allLocations()); + assertEquals(Arrays.stream(tableLocations).limit(2).collect(Collectors.toList()), SUT.allLocations()); assertTrue(SUT.includedLocations().isEmpty()); // Try adding an matching-but-not-identical duplicate @@ -388,14 +378,12 @@ public void testStaticBasics() { } catch (TableDataException expected) { maybePrintStackTrace(expected); } - assertEquals(Arrays.stream(tableLocations).limit(2).collect(Collectors.toList()), - SUT.allLocations()); + assertEquals(Arrays.stream(tableLocations).limit(2).collect(Collectors.toList()), SUT.allLocations()); assertTrue(SUT.includedLocations().isEmpty()); // Add the rest Arrays.stream(tableLocations).skip(2).forEach(SUT::addLocation); - assertEquals(Arrays.stream(tableLocations).collect(Collectors.toList()), - SUT.allLocations()); + assertEquals(Arrays.stream(tableLocations).collect(Collectors.toList()), SUT.allLocations()); assertTrue(SUT.includedLocations().isEmpty()); // Test refresh @@ -409,8 +397,7 @@ public void testStaticBasics() { @Test public void testStaticOverflow() { - SUT = new RegionedColumnSourceManager(false, componentFactory, ColumnToCodecMappings.EMPTY, - columnDefinitions); + SUT = new RegionedColumnSourceManager(false, componentFactory, ColumnToCodecMappings.EMPTY, columnDefinitions); // Add a location SUT.addLocation(tableLocation0A); @@ -434,8 +421,7 @@ public void testStaticOverflow() { @Test public void testRefreshing() { - SUT = new RegionedColumnSourceManager(true, componentFactory, ColumnToCodecMappings.EMPTY, - columnDefinitions); + SUT = new RegionedColumnSourceManager(true, componentFactory, ColumnToCodecMappings.EMPTY, columnDefinitions); assertEquals(makeColumnSourceMap(), SUT.getColumnSources()); assertTrue(SUT.isEmpty()); @@ -461,8 +447,7 @@ public void testRefreshing() { // Add a few locations Arrays.stream(tableLocations).limit(2).forEach(SUT::addLocation); - assertEquals(Arrays.stream(tableLocations).limit(2).collect(Collectors.toList()), - SUT.allLocations()); + assertEquals(Arrays.stream(tableLocations).limit(2).collect(Collectors.toList()), SUT.allLocations()); assertTrue(SUT.includedLocations().isEmpty()); // Refresh them @@ -503,35 +488,30 @@ public void testRefreshing() { // Add the rest Arrays.stream(tableLocations).skip(2).forEach(SUT::addLocation); - assertEquals(Arrays.stream(tableLocations).collect(Collectors.toList()), - SUT.allLocations()); + assertEquals(Arrays.stream(tableLocations).collect(Collectors.toList()), SUT.allLocations()); assertEquals(Arrays.asList(tableLocation0A, tableLocation1A), SUT.includedLocations()); // Test refresh with new locations included setSizeExpectations(true, 5, REGION_CAPACITY_IN_ELEMENTS, 5003, NULL_SIZE); checkIndexes(SUT.refresh()); - assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B), - SUT.includedLocations()); + assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B), SUT.includedLocations()); // Test no-op refresh setSizeExpectations(true, 5, REGION_CAPACITY_IN_ELEMENTS, 5003, NULL_SIZE); checkIndexes(SUT.refresh()); - assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B), - SUT.includedLocations()); + assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B), SUT.includedLocations()); // Test refresh with a location updated from null to not setSizeExpectations(true, 5, REGION_CAPACITY_IN_ELEMENTS, 5003, 2); checkIndexes(SUT.refresh()); - assertEquals( - Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), - SUT.includedLocations()); + assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), + SUT.includedLocations()); // Test refresh with a location updated setSizeExpectations(true, 5, REGION_CAPACITY_IN_ELEMENTS, 5003, 10000002); checkIndexes(SUT.refresh()); - assertEquals( - Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), - SUT.includedLocations()); + assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), + SUT.includedLocations()); // Test refresh with a size decrease setSizeExpectations(true, 5, REGION_CAPACITY_IN_ELEMENTS, 5003, 2); @@ -541,9 +521,8 @@ public void testRefreshing() { } catch (AssertionFailure expected) { maybePrintStackTrace(expected); } - assertEquals( - Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), - SUT.includedLocations()); + assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), + SUT.includedLocations()); // Test refresh with a location truncated setSizeExpectations(true, 5, REGION_CAPACITY_IN_ELEMENTS, 5003, NULL_SIZE); @@ -553,22 +532,19 @@ public void testRefreshing() { } catch (TableDataException expected) { maybePrintStackTrace(expected); } - assertEquals( - Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), - SUT.includedLocations()); + assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), + SUT.includedLocations()); // Test refresh with an overflow - setSizeExpectations(true, 5, REGION_CAPACITY_IN_ELEMENTS, 5003, - REGION_CAPACITY_IN_ELEMENTS + 1); + setSizeExpectations(true, 5, REGION_CAPACITY_IN_ELEMENTS, 5003, REGION_CAPACITY_IN_ELEMENTS + 1); try { checkIndexes(SUT.refresh()); fail("Expected exception"); } catch (TableDataException expected) { maybePrintStackTrace(expected); } - assertEquals( - Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), - SUT.includedLocations()); + assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), + SUT.includedLocations()); // Test refresh with an exception subscriptionBuffers[3].handleException(new TableDataException("TEST")); @@ -578,9 +554,8 @@ public void testRefreshing() { } catch (TableDataException expected) { assertEquals("TEST", expected.getCause().getMessage()); } - assertEquals( - Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), - SUT.includedLocations()); + assertEquals(Arrays.asList(tableLocation0A, tableLocation1A, tableLocation0B, tableLocation1B), + SUT.includedLocations()); } private static void maybePrintStackTrace(@NotNull final Exception e) { diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstColumnRegionPrimative.java b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstColumnRegionPrimative.java index f53424307f5..ad6be1a230e 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstColumnRegionPrimative.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstColumnRegionPrimative.java @@ -14,8 +14,7 @@ * Base class for testing {@link ColumnRegion} implementations. */ @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") -abstract class TstColumnRegionPrimative> - extends BaseArrayTestCase { +abstract class TstColumnRegionPrimative> extends BaseArrayTestCase { REGION_TYPE SUT; @@ -23,7 +22,7 @@ abstract class TstColumnRegionPrimative> - extends TstColumnRegionPrimative { + extends TstColumnRegionPrimative { Supplier regionSupplier; } diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstRegionedColumnSourcePrimitive.java b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstRegionedColumnSourcePrimitive.java index 9f1e9c2ad99..fc651e8061f 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstRegionedColumnSourcePrimitive.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstRegionedColumnSourcePrimitive.java @@ -27,25 +27,24 @@ */ @SuppressWarnings({"AnonymousInnerClassMayBeStatic", "JUnit4AnnotatedMethodInJUnit3TestCase"}) public abstract class TstRegionedColumnSourcePrimitive> - extends BaseCachedJMockTestCase { + extends BaseCachedJMockTestCase { static final byte[] TEST_BYTES = - new byte[] {NULL_BYTE, 0, 1, 2, Byte.MIN_VALUE + 1, Byte.MAX_VALUE, 100, 126, -56, -1}; + new byte[] {NULL_BYTE, 0, 1, 2, Byte.MIN_VALUE + 1, Byte.MAX_VALUE, 100, 126, -56, -1}; @SuppressWarnings("AutoBoxing") - static final Boolean[] TEST_BOOLEANS = new Boolean[] {NULL_BOOLEAN, true, false, NULL_BOOLEAN, - false, true, true, false, false, true}; - static final char[] TEST_CHARS = - new char[] {NULL_CHAR, 'A', 'B', 'C', 'D', '1', '2', '3', '4', '5'}; - static final short[] TEST_SHORTS = new short[] {NULL_SHORT, 0, 1, 2, Short.MIN_VALUE + 1, - Short.MAX_VALUE, 10000, 126, -5600, -1}; - static final int[] TEST_INTS = new int[] {NULL_INT, 0, 1, 2, Integer.MIN_VALUE + 1, - Integer.MAX_VALUE, 1000000000, 126, -560000000, -1}; - static final long[] TEST_LONGS = new long[] {NULL_LONG, 0, 1, 2, Long.MIN_VALUE + 1, - Long.MAX_VALUE, 1000000000000000000L, 12659, -5600000000000000000L, -1L}; - static final float[] TEST_FLOATS = new float[] {NULL_FLOAT, 0.1f, 1.2f, 2.3f, - Float.MIN_VALUE + 1.4f, Float.MAX_VALUE, 100.123f, 126000f, -56869.2f, -1.0f}; - static final double[] TEST_DOUBLES = new double[] {NULL_DOUBLE, 0.1, 1.2, 2.3, - Double.MIN_VALUE + 1.4, Double.MAX_VALUE, 100.123, 126000, -56869.2, -1.0}; + static final Boolean[] TEST_BOOLEANS = + new Boolean[] {NULL_BOOLEAN, true, false, NULL_BOOLEAN, false, true, true, false, false, true}; + static final char[] TEST_CHARS = new char[] {NULL_CHAR, 'A', 'B', 'C', 'D', '1', '2', '3', '4', '5'}; + static final short[] TEST_SHORTS = + new short[] {NULL_SHORT, 0, 1, 2, Short.MIN_VALUE + 1, Short.MAX_VALUE, 10000, 126, -5600, -1}; + static final int[] TEST_INTS = + new int[] {NULL_INT, 0, 1, 2, Integer.MIN_VALUE + 1, Integer.MAX_VALUE, 1000000000, 126, -560000000, -1}; + static final long[] TEST_LONGS = new long[] {NULL_LONG, 0, 1, 2, Long.MIN_VALUE + 1, Long.MAX_VALUE, + 1000000000000000000L, 12659, -5600000000000000000L, -1L}; + static final float[] TEST_FLOATS = new float[] {NULL_FLOAT, 0.1f, 1.2f, 2.3f, Float.MIN_VALUE + 1.4f, + Float.MAX_VALUE, 100.123f, 126000f, -56869.2f, -1.0f}; + static final double[] TEST_DOUBLES = new double[] {NULL_DOUBLE, 0.1, 1.2, 2.3, Double.MIN_VALUE + 1.4, + Double.MAX_VALUE, 100.123, 126000, -56869.2, -1.0}; REGION_TYPE[] cr; RegionedColumnSourceBase SUT; diff --git a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstRegionedColumnSourceReferencing.java b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstRegionedColumnSourceReferencing.java index c2f3b7efffb..8d1b3201a99 100644 --- a/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstRegionedColumnSourceReferencing.java +++ b/DB/src/test/java/io/deephaven/db/v2/sources/regioned/TstRegionedColumnSourceReferencing.java @@ -14,8 +14,7 @@ */ @SuppressWarnings({"AnonymousInnerClassMayBeStatic"}) public abstract class TstRegionedColumnSourceReferencing> - extends - TstRegionedColumnSourcePrimitive> { + extends TstRegionedColumnSourcePrimitive> { NATIVE_REGION_TYPE[] cr_n; diff --git a/DB/src/test/java/io/deephaven/db/v2/ssa/ReplicateSegmentedSortedArrayTests.java b/DB/src/test/java/io/deephaven/db/v2/ssa/ReplicateSegmentedSortedArrayTests.java index 7e4becd784c..d28a4cebada 100644 --- a/DB/src/test/java/io/deephaven/db/v2/ssa/ReplicateSegmentedSortedArrayTests.java +++ b/DB/src/test/java/io/deephaven/db/v2/ssa/ReplicateSegmentedSortedArrayTests.java @@ -13,10 +13,9 @@ public class ReplicateSegmentedSortedArrayTests { public static void main(String[] args) throws IOException { ReplicateSegmentedSortedArray.main(args); - ReplicatePrimitiveCode.charToAllButBoolean(TestCharSegmentedSortedArray.class, - ReplicatePrimitiveCode.TEST_SRC); - final String objectSsaTest = ReplicatePrimitiveCode - .charToObject(TestCharSegmentedSortedArray.class, ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(TestCharSegmentedSortedArray.class, ReplicatePrimitiveCode.TEST_SRC); + final String objectSsaTest = ReplicatePrimitiveCode.charToObject(TestCharSegmentedSortedArray.class, + ReplicatePrimitiveCode.TEST_SRC); fixupObjectSsaTest(objectSsaTest); } diff --git a/DB/src/test/java/io/deephaven/db/v2/ssa/SsaTestHelpers.java b/DB/src/test/java/io/deephaven/db/v2/ssa/SsaTestHelpers.java index 939f653a2fc..1a860fd556c 100644 --- a/DB/src/test/java/io/deephaven/db/v2/ssa/SsaTestHelpers.java +++ b/DB/src/test/java/io/deephaven/db/v2/ssa/SsaTestHelpers.java @@ -9,8 +9,7 @@ public class SsaTestHelpers { @NotNull public static TstUtils.SortedIntGenerator getGeneratorForChar() { - return new TstUtils.SortedIntGenerator((int) Character.MIN_VALUE + 1, - (int) Character.MAX_VALUE - 1); + return new TstUtils.SortedIntGenerator((int) Character.MIN_VALUE + 1, (int) Character.MAX_VALUE - 1); } public static Table prepareTestTableForChar(QueryTable table) { @@ -77,10 +76,9 @@ public static TstUtils.SortedIntGenerator getGeneratorForObject() { } public static Table prepareTestTableForObject(QueryTable table) { - // an update might be faster, but updateView ensures we break when object equality is not - // the same as == + // an update might be faster, but updateView ensures we break when object equality is not the same as == return LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> table.updateView("Value=String.format(`%06d`, Value)")); + .computeLocked(() -> table.updateView("Value=String.format(`%06d`, Value)")); } public static final class TestDescriptor { @@ -108,8 +106,7 @@ public void advance() { @Override public String toString() { - return "seed = " + seed + ", tableSize=" + tableSize + ", nodeSize=" + nodeSize - + ", step = " + step; + return "seed = " + seed + ", tableSize=" + tableSize + ", nodeSize=" + nodeSize + ", step = " + step; } public int seed() { diff --git a/DB/src/test/java/io/deephaven/db/v2/ssms/ReplicateSegmentedSortedMultisetTests.java b/DB/src/test/java/io/deephaven/db/v2/ssms/ReplicateSegmentedSortedMultisetTests.java index a8198925c02..072a60ff4ba 100644 --- a/DB/src/test/java/io/deephaven/db/v2/ssms/ReplicateSegmentedSortedMultisetTests.java +++ b/DB/src/test/java/io/deephaven/db/v2/ssms/ReplicateSegmentedSortedMultisetTests.java @@ -16,13 +16,13 @@ public static void main(String[] args) throws IOException { ReplicateSegmentedSortedMultiset.main(args); ReplicatePrimitiveCode.charToAllButBooleanAndFloats(TestCharSegmentedSortedMultiset.class, - ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.TEST_SRC); fixupFloatTests(ReplicatePrimitiveCode.charToFloat(TestCharSegmentedSortedMultiset.class, - ReplicatePrimitiveCode.TEST_SRC, null)); + ReplicatePrimitiveCode.TEST_SRC, null)); fixupFloatTests(ReplicatePrimitiveCode.charToDouble(TestCharSegmentedSortedMultiset.class, - ReplicatePrimitiveCode.TEST_SRC, null)); - final String objectSsaTest = ReplicatePrimitiveCode - .charToObject(TestCharSegmentedSortedMultiset.class, ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.TEST_SRC, null)); + final String objectSsaTest = ReplicatePrimitiveCode.charToObject(TestCharSegmentedSortedMultiset.class, + ReplicatePrimitiveCode.TEST_SRC); fixupObjectSsaTest(objectSsaTest); } @@ -37,10 +37,10 @@ private static void fixupObjectSsaTest(String objectPath) throws IOException { final File objectFile = new File(objectPath); List lines = FileUtils.readLines(objectFile, Charset.defaultCharset()); lines = globalReplacements(lines, "NULL_OBJECT", "null", - "new ObjectSegmentedSortedMultiset\\(nodeSize\\)", - "new ObjectSegmentedSortedMultiset(nodeSize, Object.class)", - "new ObjectSegmentedSortedMultiset\\(desc.nodeSize\\(\\)\\)", - "new ObjectSegmentedSortedMultiset(desc.nodeSize(), Object.class)"); + "new ObjectSegmentedSortedMultiset\\(nodeSize\\)", + "new ObjectSegmentedSortedMultiset(nodeSize, Object.class)", + "new ObjectSegmentedSortedMultiset\\(desc.nodeSize\\(\\)\\)", + "new ObjectSegmentedSortedMultiset(desc.nodeSize(), Object.class)"); lines = removeImport(lines, "\\s*import static.*QueryConstants.*;"); lines = removeRegion(lines, "SortFixupSanityCheck"); FileUtils.writeLines(objectFile, ReplicateUtilities.fixupChunkAttributes(lines)); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/ChunkUtilsTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/ChunkUtilsTest.java index 3af809d433b..52855fa9433 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/ChunkUtilsTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/ChunkUtilsTest.java @@ -22,8 +22,7 @@ public class ChunkUtilsTest { @Test public void testChunkMaxSizeDoesntOverflowLongAccumulator() { - // Note we accumulate using longs and need to verify adding the next value to the - // accumulator will not overflow. + // Note we accumulate using longs and need to verify adding the next value to the accumulator will not overflow. assertTrue((long) (Chunk.MAXIMUM_SIZE) * 2 <= Long.MAX_VALUE); } @@ -47,8 +46,7 @@ public void testConvertToRangesMaxChunkSizeExactLimit() { private void tryMaxChunkSizeTest(final long limit) { final LongChunk chunk = createChunk(0, 2); // mind the gap - final LongChunk newChunk = - (limit == 0) ? ChunkUtils.convertToOrderedKeyRanges(chunk) + final LongChunk newChunk = (limit == 0) ? ChunkUtils.convertToOrderedKeyRanges(chunk) : ChunkUtils.convertToOrderedKeyRanges(chunk, limit); validateChunk(newChunk, 0, 0, 2, 2); } @@ -56,59 +54,59 @@ private void tryMaxChunkSizeTest(final long limit) { @Test public void testConvertToRangesSingleton() { validateChunk(ChunkUtils.convertToOrderedKeyRanges(createChunk( - 42)), - 42, 42); + 42)), + 42, 42); } @Test public void testConvertToRangesSingleRange() { validateChunk(ChunkUtils.convertToOrderedKeyRanges(createChunk( - 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)), - 2, 11); + 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)), + 2, 11); } @Test public void testConvertToRangesMultipleRanges() { validateChunk(ChunkUtils.convertToOrderedKeyRanges(createChunk( - 2, 3, 4, 8, 10, 11, 12, 13, 15)), - 2, 4, 8, 8, 10, 13, 15, 15); + 2, 3, 4, 8, 10, 11, 12, 13, 15)), + 2, 4, 8, 8, 10, 13, 15, 15); } @Test public void testConvertToRangesExtremes() { final int MI = Integer.MAX_VALUE; validateChunk(ChunkUtils.convertToOrderedKeyRanges(createChunk( - 0, 1, 2, 3, 4, MI - 4, MI - 3, MI - 2, MI - 1, MI)), - 0, 4, MI - 4, MI); + 0, 1, 2, 3, 4, MI - 4, MI - 3, MI - 2, MI - 1, MI)), + 0, 4, MI - 4, MI); } @Test public void testConvertToIndicesSingleton() { validateChunk(ChunkUtils.convertToOrderedKeyIndices(createChunk( - 1022, 1022)), - 1022); + 1022, 1022)), + 1022); } @Test public void testConvertToIndicesSingleRange() { validateChunk(ChunkUtils.convertToOrderedKeyIndices(createChunk( - 512, 519)), - 512, 513, 514, 515, 516, 517, 518, 519); + 512, 519)), + 512, 513, 514, 515, 516, 517, 518, 519); } @Test public void testConvertToIndicesMultipleRanges() { validateChunk(ChunkUtils.convertToOrderedKeyIndices(createChunk( - 1, 5, 7, 7, 9, 12)), - 1, 2, 3, 4, 5, 7, 9, 10, 11, 12); + 1, 5, 7, 7, 9, 12)), + 1, 2, 3, 4, 5, 7, 9, 10, 11, 12); } @Test public void testConvertToIndicesExtremes() { final int MI = Integer.MAX_VALUE; validateChunk(ChunkUtils.convertToOrderedKeyIndices(createChunk( - 0, 4, MI - 4, MI)), - 0, 1, 2, 3, 4, MI - 4, MI - 3, MI - 2, MI - 1, MI); + 0, 4, MI - 4, MI)), + 0, 1, 2, 3, 4, MI - 4, MI - 3, MI - 2, MI - 1, MI); } @Test(expected = SizeException.class) @@ -125,8 +123,7 @@ public void testConvertRoundTripEveryOther() { for (int i = 0; i < indices.length; ++i) { indices[i] = ranges[i * 2] = ranges[i * 2 + 1] = 2 * i; } - validateChunk(createChunk(indices), - ChunkUtils.convertToOrderedKeyIndices(createChunk(ranges))); + validateChunk(createChunk(indices), ChunkUtils.convertToOrderedKeyIndices(createChunk(ranges))); } @Test @@ -135,11 +132,9 @@ public void testConvertRoundTripRandom() { for (final int avgElementsPerRange : new int[] {1, 4, 64, 1_000_000}) { for (final int sparsityFactor : new int[] {1, 10, 1_000}) { final Pair, LongChunk> chunks = - generateChunks(indexCount, avgElementsPerRange, sparsityFactor); - validateChunk(chunks.second, - ChunkUtils.convertToOrderedKeyIndices(chunks.first)); - validateChunk(chunks.first, - ChunkUtils.convertToOrderedKeyRanges(chunks.second)); + generateChunks(indexCount, avgElementsPerRange, sparsityFactor); + validateChunk(chunks.second, ChunkUtils.convertToOrderedKeyIndices(chunks.first)); + validateChunk(chunks.first, ChunkUtils.convertToOrderedKeyRanges(chunks.second)); } } } @@ -153,40 +148,35 @@ private LongChunk createChunk(final long... values) { return chunk; } - private void validateChunk(final LongChunk chunk, - final long... values) { + private void validateChunk(final LongChunk chunk, final long... values) { assertEquals(values.length, chunk.size()); for (int idx = 0; idx < values.length; ++idx) { assertEquals(values[idx], chunk.get(idx)); } } - private void validateChunk(final LongChunk expected, - final LongChunk actual) { + private void validateChunk(final LongChunk expected, final LongChunk actual) { assertEquals(expected.size(), actual.size()); for (int idx = 0; idx < expected.size(); ++idx) { assertEquals(expected.get(idx), actual.get(idx)); } } - private Pair, LongChunk> generateChunks( - final int indexCount, - final int avgElementsPerRange, - final int sparsityFactor) { + private Pair, LongChunk> generateChunks(final int indexCount, + final int avgElementsPerRange, + final int sparsityFactor) { final Random random = new Random(0); final long[] indexPoints = new long[indexCount]; - final int rangeCount = - Math.max(1, (indexCount + avgElementsPerRange / 2) / avgElementsPerRange); + final int rangeCount = Math.max(1, (indexCount + avgElementsPerRange / 2) / avgElementsPerRange); final long[] indexRanges = new long[rangeCount * 2]; long lastPos = 0; int remainingCount = indexCount; int j = 0; for (int i = 0; i < rangeCount - 1; i++) { // This is +2 to ensure that there is a gap between two ranges. - indexRanges[2 * i] = - lastPos + 2 + random.nextInt(2 * avgElementsPerRange - 1) * sparsityFactor; + indexRanges[2 * i] = lastPos + 2 + random.nextInt(2 * avgElementsPerRange - 1) * sparsityFactor; final int step = 1 + Math.max(0, Math.min(random.nextInt(2 * avgElementsPerRange - 1), - remainingCount - rangeCount)); + remainingCount - rangeCount)); lastPos = indexRanges[2 * i + 1] = indexRanges[2 * i] + step - 1; remainingCount -= step; indexPoints[j++] = indexRanges[2 * i]; diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/CumulativeUtilTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/CumulativeUtilTest.java index 625b1f53a17..f7427bda13c 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/CumulativeUtilTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/CumulativeUtilTest.java @@ -43,14 +43,12 @@ public void testRollingSum() { String rollingSum = "RollingSum"; Table t2 = CumulativeUtil.rollingSum(t, 5, rollingSum, "Row"); assertColumnEquals(t2, rollingSum, - new double[] {QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_DOUBLE, 15, 20, 25, 30, 35}); + new double[] {QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE, + QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE, 15, 20, 25, 30, 35}); t2 = CumulativeUtil.rollingSum(t, 3, rollingSum, "Row * 2"); - assertColumnEquals(t2, rollingSum, - new double[] {QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE, - QueryConstants.NULL_DOUBLE, 12, 18, 24, 30, 36, 42, 48}); + assertColumnEquals(t2, rollingSum, new double[] {QueryConstants.NULL_DOUBLE, QueryConstants.NULL_DOUBLE, + QueryConstants.NULL_DOUBLE, 12, 18, 24, 30, 36, 42, 48}); } private void assertColumnEquals(final Table t, final String column, final double[] values) { diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/ImmutableColumnHolder.java b/DB/src/test/java/io/deephaven/db/v2/utils/ImmutableColumnHolder.java index 67d1132924d..ecece9a6b1f 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/ImmutableColumnHolder.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/ImmutableColumnHolder.java @@ -11,7 +11,7 @@ public class ImmutableColumnHolder extends ColumnHolder { @SuppressWarnings("unchecked") public ImmutableColumnHolder(@NotNull final String name, @NotNull final Class dataType, - @Nullable final Class componentType, final boolean grouped, final T... data) { + @Nullable final Class componentType, final boolean grouped, final T... data) { super(name, dataType, componentType, grouped, data); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/IndexCoalescerTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/IndexCoalescerTest.java index 6b2ce35ee0c..e526cd4dbf0 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/IndexCoalescerTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/IndexCoalescerTest.java @@ -60,8 +60,7 @@ public void testMergeMCSFirstALL() { final Index origIndex = Index.CURRENT_FACTORY.getIndexByRange(10, 29); final ShiftAwareListener.Update agg = validateFinalIndex(origIndex, up); - Assert.equals(agg.modifiedColumnSet, "agg.modifiedColumnSet", ModifiedColumnSet.ALL, - "ModifiedColumnSet.ALL"); + Assert.equals(agg.modifiedColumnSet, "agg.modifiedColumnSet", ModifiedColumnSet.ALL, "ModifiedColumnSet.ALL"); Assert.equals(agg.modified, "agg.modified", i(4, 5, 6, 7)); } @@ -76,8 +75,7 @@ public void testMergeMCSSecondALL() { final Index origIndex = Index.CURRENT_FACTORY.getIndexByRange(10, 29); final ShiftAwareListener.Update agg = validateFinalIndex(origIndex, up); - Assert.equals(agg.modifiedColumnSet, "agg.modifiedColumnSet", ModifiedColumnSet.ALL, - "ModifiedColumnSet.ALL"); + Assert.equals(agg.modifiedColumnSet, "agg.modifiedColumnSet", ModifiedColumnSet.ALL, "ModifiedColumnSet.ALL"); Assert.equals(agg.modified, "agg.modified", i(4, 5, 6, 7)); } @@ -96,8 +94,7 @@ public void testMergeMCSUnion() { final ShiftAwareListener.Update agg = validateFinalIndex(origIndex, up); final ModifiedColumnSet expected = new ModifiedColumnSet(up[0].modifiedColumnSet); expected.setAll("B", "C"); - Assert.eqTrue(agg.modifiedColumnSet.containsAll(expected), - "agg.modifiedColumnSet.containsAll(expected)"); + Assert.eqTrue(agg.modifiedColumnSet.containsAll(expected), "agg.modifiedColumnSet.containsAll(expected)"); Assert.equals(agg.modified, "agg.modified", i(4, 5, 6, 7)); } @@ -110,9 +107,9 @@ public void testSuppressModifiedAdds() { final Index origIndex = i(); final ShiftAwareListener.Update agg = validateFinalIndex(origIndex, up); Assert.equals(agg.added, "agg.added", Index.CURRENT_FACTORY.getIndexByValues(10), - "Index.CURRENT_FACTORY.getIndexByValues(10)"); + "Index.CURRENT_FACTORY.getIndexByValues(10)"); Assert.equals(agg.modified, "agg.modified", Index.CURRENT_FACTORY.getEmptyIndex(), - "Index.CURRENT_FACTORY.getEmptyIndex()"); + "Index.CURRENT_FACTORY.getEmptyIndex()"); } @Test @@ -138,11 +135,11 @@ public void testRemovedThenAdded() { final Index origIndex = i(2); final ShiftAwareListener.Update agg = validateFinalIndex(origIndex, up); Assert.equals(agg.added, "agg.added", Index.CURRENT_FACTORY.getIndexByValues(2), - "Index.CURRENT_FACTORY.getIndexByValues(2)"); + "Index.CURRENT_FACTORY.getIndexByValues(2)"); Assert.equals(agg.removed, "agg.removed", Index.CURRENT_FACTORY.getIndexByValues(2), - "Index.CURRENT_FACTORY.getIndexByValues(2)"); + "Index.CURRENT_FACTORY.getIndexByValues(2)"); Assert.equals(agg.modified, "agg.modified", Index.CURRENT_FACTORY.getEmptyIndex(), - "Index.CURRENT_FACTORY.getEmptyIndex()"); + "Index.CURRENT_FACTORY.getEmptyIndex()"); } @Test @@ -156,11 +153,11 @@ public void testAddedThenRemoved() { final Index origIndex = i(); final ShiftAwareListener.Update agg = validateFinalIndex(origIndex, up); Assert.equals(agg.added, "agg.added", Index.CURRENT_FACTORY.getEmptyIndex(), - "Index.CURRENT_FACTORY.getEmptyIndex()"); + "Index.CURRENT_FACTORY.getEmptyIndex()"); Assert.equals(agg.removed, "agg.removed", Index.CURRENT_FACTORY.getEmptyIndex(), - "Index.CURRENT_FACTORY.getEmptyIndex()"); + "Index.CURRENT_FACTORY.getEmptyIndex()"); Assert.equals(agg.modified, "agg.modified", Index.CURRENT_FACTORY.getEmptyIndex(), - "Index.CURRENT_FACTORY.getEmptyIndex()"); + "Index.CURRENT_FACTORY.getEmptyIndex()"); } @Test @@ -233,7 +230,7 @@ public void testMergeWithDifferentMCS() { Assert.equals(agg.modified, "agg.modified", i(1, 2, 3, 4)); mcsTemplate.setAllDirty(); Assert.eqTrue(coalescer.modifiedColumnSet.containsAll(mcsTemplate), - "coalescer.modifiedColumnSet.containsAll(mcsTemplate)"); + "coalescer.modifiedColumnSet.containsAll(mcsTemplate)"); } @Test @@ -262,7 +259,7 @@ public void testRemoveAfterModify() { Assert.equals(agg.modified, "agg.modified", i(1, 4)); mcsTemplate.setAllDirty(); Assert.eqTrue(coalescer.modifiedColumnSet.containsAll(mcsTemplate), - "coalescer.modifiedColumnSet.containsAll(mcsTemplate)"); + "coalescer.modifiedColumnSet.containsAll(mcsTemplate)"); } @Test @@ -291,7 +288,7 @@ public void testShiftAfterModifyDirtyPerColumn() { Assert.equals(agg.modified, "agg.modified", i(4, 5, 6)); mcsTemplate.setAllDirty(); Assert.eqTrue(coalescer.modifiedColumnSet.containsAll(mcsTemplate), - "coalescer.modifiedColumnSet.containsAll(mcsTemplate)"); + "coalescer.modifiedColumnSet.containsAll(mcsTemplate)"); } @Test @@ -515,8 +512,7 @@ public void testFlattenRegress12() { agg.shifted.apply(index); index.insert(agg.added); Assert.eqTrue(index.isFlat(), "index.isFlat()"); - // The remainder of the first shift conflicts with a now non-shifting element; so there - // should be no shifts. + // The remainder of the first shift conflicts with a now non-shifting element; so there should be no shifts. Assert.eq(agg.shifted.size(), "agg.shifted.size()", 0); } @@ -545,34 +541,30 @@ public void testFlattenRegress13() { @Test public void testSortRegress1() { final ShiftAwareListener.Update[] up = newEmptyUpdates(4); - // {added={1073741825,1073741827-1073741828,1073741832-1073741833}, - // removed={1073741825-1073741827}, modified={1073741829,1073741831}, - // shifted={[1073741828,1073741828]-2}, modifiedColumnSet={Sym,doubleCol,Keys}} + // {added={1073741825,1073741827-1073741828,1073741832-1073741833}, removed={1073741825-1073741827}, + // modified={1073741829,1073741831}, shifted={[1073741828,1073741828]-2}, + // modifiedColumnSet={Sym,doubleCol,Keys}} up[0].added = i(1073741825, 1073741827, 1073741828, 1073741832, 1073741833); up[0].removed = i(1073741825, 1073741826, 1073741827); up[0].shifted = newShiftDataByTriplets(1073741828, 1073741828, -2); // {added={1073741827,1073741832}, removed={1073741825,1073741828,1073741832-1073741833}, - // modified={1073741825-1073741826,1073741829-1073741831}, - // shifted={[1073741826,1073741827]-1}, modifiedColumnSet={Sym}} + // modified={1073741825-1073741826,1073741829-1073741831}, shifted={[1073741826,1073741827]-1}, + // modifiedColumnSet={Sym}} up[1].added = i(1073741827, 1073741832); up[1].removed = i(1073741825, 1073741828, 1073741832, 1073741833); up[1].shifted = newShiftDataByTriplets(1073741826, 1073741827, -1); // {added={1073741823,1073741827,1073741832}, removed={1073741830}, // modified={1073741824-1073741826,1073741829,1073741831,1073741833}, - // shifted={[1073741825,1073741827]-1,[1073741832,1073741832]+1}, - // modifiedColumnSet={Sym,intCol,Keys}} + // shifted={[1073741825,1073741827]-1,[1073741832,1073741832]+1}, modifiedColumnSet={Sym,intCol,Keys}} up[2].added = i(1073741823, 1073741827, 1073741832); up[2].removed = i(1073741830); - up[2].shifted = - newShiftDataByTriplets(1073741825, 1073741827, -1, 1073741832, 1073741832, +1); + up[2].shifted = newShiftDataByTriplets(1073741825, 1073741827, -1, 1073741832, 1073741832, +1); // {added={1073741820-1073741823,1073741826,1073741830}, removed={1073741823,1073741827}, // modified={1073741819,1073741824-1073741825,1073741829,1073741831-1073741833}, - // shifted={[1073741824,1073741824]-5,[1073741825,1073741826]-1}, - // modifiedColumnSet={intCol,Keys}} + // shifted={[1073741824,1073741824]-5,[1073741825,1073741826]-1}, modifiedColumnSet={intCol,Keys}} up[3].added = i(1073741820, 1073741821, 1073741822, 1073741823, 1073741826, 1073741830); up[3].removed = i(1073741823, 1073741827); - up[3].shifted = - newShiftDataByTriplets(1073741824, 1073741824, -5, 1073741825, 1073741826, -1); + up[3].shifted = newShiftDataByTriplets(1073741824, 1073741824, -5, 1073741825, 1073741826, -1); final Index index = Index.CURRENT_FACTORY.getIndexByRange(1073741825, 1073741831); validateFinalIndex(index, up); @@ -589,19 +581,17 @@ private IndexShiftData newShiftDataByTriplets(long... values) { return builder.build(); } - private ShiftAwareListener.Update validateFinalIndex(final Index index, - final ShiftAwareListener.Update[] updates) { - final Index.IndexUpdateCoalescer coalescer = - new Index.IndexUpdateCoalescer(index, updates[0]); + private ShiftAwareListener.Update validateFinalIndex(final Index index, final ShiftAwareListener.Update[] updates) { + final Index.IndexUpdateCoalescer coalescer = new Index.IndexUpdateCoalescer(index, updates[0]); for (int i = 1; i < updates.length; ++i) { coalescer.update(updates[i]); } final ShiftAwareListener.Update agg = coalescer.coalesce(); try (final Index perUpdate = index.clone(); - final Index aggUpdate = index.clone(); - final Index perModify = Index.CURRENT_FACTORY.getEmptyIndex(); - final Index perAdded = Index.CURRENT_FACTORY.getEmptyIndex()) { + final Index aggUpdate = index.clone(); + final Index perModify = Index.CURRENT_FACTORY.getEmptyIndex(); + final Index perAdded = Index.CURRENT_FACTORY.getEmptyIndex()) { for (ShiftAwareListener.Update up : updates) { perAdded.remove(up.removed); @@ -636,18 +626,16 @@ private ShiftAwareListener.Update validateFinalIndex(final Index index, if (!iter.advance(beginRange + shiftDelta)) { return; } - Assert.eqTrue(iter.currentValue() >= beginRange, - "iter.currentValue() >= beginRange"); + Assert.eqTrue(iter.currentValue() >= beginRange, "iter.currentValue() >= beginRange"); } else { final Index.SearchIterator iter = myindex.reverseIterator(); if (!iter.advance(endRange + shiftDelta)) { return; } - Assert.eqTrue(iter.currentValue() <= endRange, - "iter.currentValue() <= endRange"); + Assert.eqTrue(iter.currentValue() <= endRange, "iter.currentValue() <= endRange"); } try (final Index sub = Index.CURRENT_FACTORY.getIndexByRange(beginRange, endRange); - final Index moving = myindex.extract(sub)) { + final Index moving = myindex.extract(sub)) { moving.shiftInPlace(shiftDelta); myindex.insert(moving); } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/IndexCreationRandomPerfTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/IndexCreationRandomPerfTest.java index 1258084074a..b98b6a564c7 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/IndexCreationRandomPerfTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/IndexCreationRandomPerfTest.java @@ -40,9 +40,9 @@ public long lastKey() { private static final boolean doLeavesTypeStats = true; static long runAndGetSamples( - final IndexLike.Factory ilf, - final int sz, final int runs, final PerfStats stats, - final String pfx, final boolean print) { + final IndexLike.Factory ilf, + final int sz, final int runs, final PerfStats stats, + final String pfx, final boolean print) { final Runtime rt = Runtime.getRuntime(); long lasts = 0; // to prevent the optimizer from eliminating unused steps. long tsum = 0; @@ -67,8 +67,7 @@ static long runAndGetSamples( tsum += dt; } if (print) { - System.out - .println(String.format("%s done in %.3f seconds, min delta memory used %7.3f Mb", + System.out.println(String.format("%s done in %.3f seconds, min delta memory used %7.3f Mb", pfx, tsum / 1000.0, minMb)); } return lasts; @@ -76,8 +75,7 @@ static long runAndGetSamples( static final String me = IndexCreationRandomPerfTest.class.getSimpleName(); - private static final IndexLike.Factory ilfs[] = - {IndexLike.mixedf, IndexLike.pqf, IndexLike.rspf}; + private static final IndexLike.Factory ilfs[] = {IndexLike.mixedf, IndexLike.pqf, IndexLike.rspf}; static double codeWarmup() { final int steps = 500; @@ -104,7 +102,7 @@ static void runStep(final String stepName, final int sz, final int runs, final b } for (IndexLike.Factory ilf : ilfs) { final String header = String.format("%-" + maxNameLen + "s %s", - ilf.name(), stepName + " sz=" + sz + " runs=" + runs); + ilf.name(), stepName + " sz=" + sz + " runs=" + runs); System.out.println(me + ": Running " + " " + header); final PerfStats stats = new PerfStats(runs); final String b = pfx + header; @@ -129,8 +127,7 @@ static void runStep(final String stepName, final int sz, final int runs, final b } } - static void run(final int warmupSz, final int warmupRuns, final int fullSz, - final int fullRuns) { + static void run(final int warmupSz, final int warmupRuns, final int fullSz, final int fullRuns) { runStep("warmup", warmupSz, warmupRuns, false); runStep("full test", fullSz, fullRuns, true); } @@ -184,8 +181,7 @@ public int percentile(final double p) { return samples.get(i > 0 ? i - 1 : 0); } - public static final int defaultPrintPs[] = - {0, 5, 10, 25, 40, 45, 50, 55, 60, 75, 90, 95, 99}; + public static final int defaultPrintPs[] = {0, 5, 10, 25, 40, 45, 50, 55, 60, 75, 90, 95, 99}; public void print(final String pfx) { print(pfx, defaultPrintPs); @@ -203,9 +199,9 @@ public void print(final String pfx, final int[] ps) { } public static void comparePrint( - final PerfStats p1, final String n1, - final PerfStats p2, final String n2, - final String pfx) { + final PerfStats p1, final String n1, + final PerfStats p2, final String n2, + final String pfx) { final StringBuilder sb = new StringBuilder(pfx); sb.append(n1); sb.append("/"); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/IndexCreationSeqPerfTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/IndexCreationSeqPerfTest.java index 28daeca71b8..a67bd165ef8 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/IndexCreationSeqPerfTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/IndexCreationSeqPerfTest.java @@ -59,9 +59,8 @@ public long lastKey() { } static long runAndGetSamples( - final IndexLike.Factory f, final Config c, final int sz, final int runs, - final PerfStats stats, - final String pfx, final boolean print) { + final IndexLike.Factory f, final Config c, final int sz, final int runs, final PerfStats stats, + final String pfx, final boolean print) { final Runtime rt = Runtime.getRuntime(); long lasts = 0; // to prevent the optimizer from eliminating unused steps. long tsum = 0; @@ -86,8 +85,7 @@ static long runAndGetSamples( tsum += dt; } if (print) { - System.out - .println(String.format("%s done in %.3f seconds, min delta memory used %.3f Mb", + System.out.println(String.format("%s done in %.3f seconds, min delta memory used %.3f Mb", pfx, tsum / 1000.0, minMb)); } return lasts; @@ -109,11 +107,9 @@ static double codeWarmup() { return sum / steps / lasts; } - static void runStep(final Config c, final String stepName, final int sz, final int runs, - final boolean print) { + static void runStep(final Config c, final String stepName, final int sz, final int runs, final boolean print) { for (IndexLike.Factory f : ilfs) { - System.out.println( - me + ": Running " + f.name() + " " + c.name + " " + stepName + " sz=" + sz); + System.out.println(me + ": Running " + f.name() + " " + c.name + " " + stepName + " sz=" + sz); final PerfStats sStats = new PerfStats(runs); final String pfx = me + " "; final String b = pfx + f.name() + " " + c.name + " " + stepName + " index len=" + sz; @@ -139,8 +135,7 @@ static void run(final Config c, final int warmupSz, final int fullSz, final int private static final Config[] configs = {c01, /* c10, c11 */ }; - private static final IndexLike.Factory ilfs[] = - {IndexLike.mixedf, IndexLike.pqf, IndexLike.rspf}; + private static final IndexLike.Factory ilfs[] = {IndexLike.mixedf, IndexLike.pqf, IndexLike.rspf}; public static void main(String[] args) { System.out.println(me + ": Running code warmup..."); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/IndexSequentialBuilderPerfTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/IndexSequentialBuilderPerfTest.java index af5cbd1274b..9329f141fea 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/IndexSequentialBuilderPerfTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/IndexSequentialBuilderPerfTest.java @@ -42,8 +42,7 @@ private static long doRspRun() { return bh; } - private static long runAndGetSamples(final int runs, final PerfStats stats, - final LongSupplier runner) { + private static long runAndGetSamples(final int runs, final PerfStats stats, final LongSupplier runner) { long trick = 0; // to prevent the optimizer from eliminating unused steps. final PerfMeasure pm = new PerfMeasure(false); for (int i = 0; i < runs; ++i) { @@ -79,8 +78,7 @@ private static void run(final int warmupRuns, final int fullRuns) { for (PerfTest t : ts) { clear(); for (int r = 0; r < runs.length; ++r) { - final String tname = - t.name + " " + ((r == 0) ? "warmup" : "full") + " size=" + sizeFormat(sz); + final String tname = t.name + " " + ((r == 0) ? "warmup" : "full") + " size=" + sizeFormat(sz); final long ts0 = System.nanoTime(); System.out.println("Running " + tname + " ..."); final int count = runs[r]; @@ -90,8 +88,8 @@ private static void run(final int warmupRuns, final int fullRuns) { } final long res = runAndGetSamples(count, stats, t.runner); final long ts1 = System.nanoTime(); - System.out.println(tname + " ran in " + secondsFormat((ts1 - ts0) / s2ns) - + ", optimizer trick result = " + res); + System.out.println( + tname + " ran in " + secondsFormat((ts1 - ts0) / s2ns) + ", optimizer trick result = " + res); if (stats != null) { stats.compute(); final double factor = 1 / s2ns; diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/IndexShiftDataExpanderTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/IndexShiftDataExpanderTest.java index 54d352e56ad..17e92738761 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/IndexShiftDataExpanderTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/IndexShiftDataExpanderTest.java @@ -12,18 +12,16 @@ public class IndexShiftDataExpanderTest { /** - * These tests names have a few qualities worth defining: - Major Shift: shift with no overlap - * in keyspace before and after (i.e. modified index from shift alone is empty) - Minor Shift: - * shift with overlap in keyspace before and after (i.e. modified index from shift alone is - * non-empty) - Inner Shift: shift occurs with valid elements below and above keyspace both - * before and after - NoAdd / NoRm: Other than the shift, either the added or removed - * (respectively) indexes will be empty. - WithGaps: There will be a gap on the leading edges of - * the shift. (need to carefully categorize items near gap) - Modified Inner/Outer: Rows - * inside/outside of a shift were modified. - Removed Inner/Overlapping: Rows inside/overlapping - * a shift were removed. + * These tests names have a few qualities worth defining: - Major Shift: shift with no overlap in keyspace before + * and after (i.e. modified index from shift alone is empty) - Minor Shift: shift with overlap in keyspace before + * and after (i.e. modified index from shift alone is non-empty) - Inner Shift: shift occurs with valid elements + * below and above keyspace both before and after - NoAdd / NoRm: Other than the shift, either the added or removed + * (respectively) indexes will be empty. - WithGaps: There will be a gap on the leading edges of the shift. (need to + * carefully categorize items near gap) - Modified Inner/Outer: Rows inside/outside of a shift were modified. - + * Removed Inner/Overlapping: Rows inside/overlapping a shift were removed. * - * The test case ideas were generated by drawing rectangles with various adds/removes on each - * side of a pre-shift and post-shift. + * The test case ideas were generated by drawing rectangles with various adds/removes on each side of a pre-shift + * and post-shift. */ // To reduce per-test overhead, we'll let JUnit take care of our context helper. @@ -43,8 +41,7 @@ public void after() { public void testMajorRInnerShift() { c.sourceIndex.insertRange(1, 1000); c.added.insertRange(200, 400); - c.removed.insertRange(401, 800); // not allowed to reorder -- so pretend things inbetween - // are gone + c.removed.insertRange(401, 800); // not allowed to reorder -- so pretend things inbetween are gone c.shifted.shiftRange(200, 400, 400); c.expectModified.insertRange(200, 400); c.expectModified.insertRange(600, 800); @@ -55,8 +52,7 @@ public void testMajorRInnerShift() { public void testMajorLInnerShift() { c.sourceIndex.insertRange(1, 1000); c.added.insertRange(600, 800); - c.removed.insertRange(200, 599); // not allowed to reorder -- so pretend things inbetween - // are gone + c.removed.insertRange(200, 599); // not allowed to reorder -- so pretend things inbetween are gone c.shifted.shiftRange(600, 800, -400); c.expectModified.insertRange(200, 400); c.expectModified.insertRange(600, 800); @@ -492,10 +488,9 @@ public void testInplaceAddRemove() { } /** - * 1. Add initial index state to {@code sourceIndex}. 2. Setup added/removed/modified/shifted as - * inputs to IndexShiftDataExpander. 3. Modify expected output ranges to expectAdded / - * expectRemoved / expectModified. 4. Profit by letting @Before / @After clear context and run - * validate automagically. + * 1. Add initial index state to {@code sourceIndex}. 2. Setup added/removed/modified/shifted as inputs to + * IndexShiftDataExpander. 3. Modify expected output ranges to expectAdded / expectRemoved / expectModified. 4. + * Profit by letting @Before / @After clear context and run validate automagically. */ private static class Context { public final Index sourceIndex = Index.FACTORY.getEmptyIndex(); @@ -519,8 +514,7 @@ public void validate() { final IndexShiftData shiftData = shifted.build(); shiftData.validate(); - final IndexShiftDataExpander expander = - new IndexShiftDataExpander(new ShiftAwareListener.Update( + final IndexShiftDataExpander expander = new IndexShiftDataExpander(new ShiftAwareListener.Update( added, removed, modified, shiftData, ModifiedColumnSet.ALL), sourceIndex); expander.validate(sourceIndex); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/IndexShiftDataTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/IndexShiftDataTest.java index f8bbefa4671..3a6ae8398f2 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/IndexShiftDataTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/IndexShiftDataTest.java @@ -198,8 +198,7 @@ public void testSmartCoalescingBuilder1() { dumbBuilder.shiftRange(4150, 4250, 10); final IndexShiftData disd = dumbBuilder.build(); - final IndexShiftData.SmartCoalescingBuilder scb = - new IndexShiftData.SmartCoalescingBuilder(index); + final IndexShiftData.SmartCoalescingBuilder scb = new IndexShiftData.SmartCoalescingBuilder(index); scb.shiftRange(50, 150, -10); scb.shiftRange(250, 350, -10); scb.shiftRange(4150, 4250, 10); @@ -228,8 +227,7 @@ public void testSmartCoalescingBuilder2() { dumbBuilder.shiftRange(4150, 4250, 10); final IndexShiftData disd = dumbBuilder.build(); - final IndexShiftData.SmartCoalescingBuilder scb = - new IndexShiftData.SmartCoalescingBuilder(index); + final IndexShiftData.SmartCoalescingBuilder scb = new IndexShiftData.SmartCoalescingBuilder(index); scb.shiftRange(50, 150, -10); scb.shiftRange(190, 210, -10); scb.shiftRange(250, 350, -10); @@ -258,8 +256,7 @@ public void testSmartCoalescingBuilder3() { dumbBuilder.shiftRange(1000, 4000, 10); final IndexShiftData disd = dumbBuilder.build(); - final IndexShiftData.SmartCoalescingBuilder scb = - new IndexShiftData.SmartCoalescingBuilder(index); + final IndexShiftData.SmartCoalescingBuilder scb = new IndexShiftData.SmartCoalescingBuilder(index); scb.shiftRange(1000, 4000, 10); final IndexShiftData sisd = scb.build(); @@ -286,8 +283,7 @@ public void testSmartCoalescingBuilder4() { dumbBuilder.shiftRange(4150, 4250, 10); final IndexShiftData disd = dumbBuilder.build(); - final IndexShiftData.SmartCoalescingBuilder scb = - new IndexShiftData.SmartCoalescingBuilder(index); + final IndexShiftData.SmartCoalescingBuilder scb = new IndexShiftData.SmartCoalescingBuilder(index); scb.shiftRange(20, 30, 10); scb.shiftRange(45, 45, -1); scb.shiftRange(250, 350, 10); @@ -323,8 +319,7 @@ private void testSmartCoalescingBuilder5and6(Index index) { dumbBuilder.shiftRange(4300, 4301, -1); final IndexShiftData disd = dumbBuilder.build(); - final IndexShiftData.SmartCoalescingBuilder scb = - new IndexShiftData.SmartCoalescingBuilder(index); + final IndexShiftData.SmartCoalescingBuilder scb = new IndexShiftData.SmartCoalescingBuilder(index); scb.shiftRange(250, 350, 10); scb.shiftRange(50, 150, 10); scb.shiftRange(30, 40, 10); @@ -349,8 +344,7 @@ public void testSmartCoalescingBuilder7() { dumbBuilder.shiftRange(13, 13, 3); final IndexShiftData disd = dumbBuilder.build(); - final IndexShiftData.SmartCoalescingBuilder scb = - new IndexShiftData.SmartCoalescingBuilder(index); + final IndexShiftData.SmartCoalescingBuilder scb = new IndexShiftData.SmartCoalescingBuilder(index); scb.shiftRange(1, 2, 2); scb.shiftRange(13, 13, 3); scb.shiftRange(10, 12, 1); @@ -379,8 +373,7 @@ public void testSmartCoalescingBuilder8() { dumbBuilder.shiftRange(38, 40, -3); final IndexShiftData disd = dumbBuilder.build(); - final IndexShiftData.SmartCoalescingBuilder scb = - new IndexShiftData.SmartCoalescingBuilder(index); + final IndexShiftData.SmartCoalescingBuilder scb = new IndexShiftData.SmartCoalescingBuilder(index); scb.shiftRange(16, 18, 1); scb.shiftRange(11, 14, 2); scb.shiftRange(9, 10, 1); @@ -409,8 +402,7 @@ public void testSmartCoalescingBuilder9() { dumbBuilder.shiftRange(20, 27, -1); final IndexShiftData disd = dumbBuilder.build(); - final IndexShiftData.SmartCoalescingBuilder scb = - new IndexShiftData.SmartCoalescingBuilder(index); + final IndexShiftData.SmartCoalescingBuilder scb = new IndexShiftData.SmartCoalescingBuilder(index); scb.shiftRange(3, 7, 1); scb.shiftRange(13, 14, 1); scb.shiftRange(20, 27, -1); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/IterPerformanceTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/IterPerformanceTest.java index f55f7d31a59..46eceea6bb7 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/IterPerformanceTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/IterPerformanceTest.java @@ -39,8 +39,7 @@ interface ValuesBuilder { ValuesBuilder builder(); - // Since tests will run multiple times, and creation time is high (higher than individual - // operations), + // Since tests will run multiple times, and creation time is high (higher than individual operations), // we create the base index one and clone it before every run of update. long getBaseCrc32(long toBeOred); @@ -212,8 +211,7 @@ private static class ValuesBuilder { this.clusterMid = clusterMid; } - long populateFirstArgStep(final int jumpPropOneIn, final int d, final int halfClusterWidth, - final Random r) { + long populateFirstArgStep(final int jumpPropOneIn, final int d, final int halfClusterWidth, final Random r) { final long k; if (r.nextInt(jumpPropOneIn) == 0) { k = clusterMid = halfClusterWidth + r.nextInt(d); @@ -225,7 +223,7 @@ long populateFirstArgStep(final int jumpPropOneIn, final int d, final int halfCl } void populateSecondArgStep(final int sizePropOneIn, final int sharePropOneIn, final long k, - int cluster1Mid, final int halfClusterWidth, final Random r) { + int cluster1Mid, final int halfClusterWidth, final Random r) { if (sizePropOneIn != 1 && r.nextInt(sizePropOneIn) != 0) { return; } @@ -241,9 +239,8 @@ void populateSecondArgStep(final int sizePropOneIn, final int sharePropOneIn, fi } private static class Config { - Config(final String name, final int min, final int max, final int clusterWidth, - final int sizePropOneIn, - final int sharePropOneIn, final int jumpPropOneIn) { + Config(final String name, final int min, final int max, final int clusterWidth, final int sizePropOneIn, + final int sharePropOneIn, final int jumpPropOneIn) { this.name = name; this.clusterWidth = clusterWidth; this.sizePropOneIn = sizePropOneIn; @@ -263,14 +260,14 @@ private static class Config { }; private static final Config sparse = - new Config("sparse", 10, 300000000, 50, 1, 1000, 25); + new Config("sparse", 10, 300000000, 50, 1, 1000, 25); private static final Config dense = - new Config("dense", 20, 30000000, 20, 1, 3, 20); + new Config("dense", 20, 30000000, 20, 1, 3, 20); private static final Config asymmetric = - new Config("asymmetric", 10, 300000000, 30000000, 160000, 1000, 25); + new Config("asymmetric", 10, 300000000, 30000000, 160000, 1000, 25); - public static void setupStrategy(final IterStrategy s, final int sz, final Config c, - final String pref, final boolean print) { + public static void setupStrategy(final IterStrategy s, final int sz, final Config c, final String pref, + final boolean print) { final int halfClusterWidth = c.clusterWidth / 2; final IterStrategy.ValuesBuilder b = s.builder(); final ValuesBuilder vb = new ValuesBuilder(c.min + halfClusterWidth, b); @@ -278,8 +275,7 @@ public static void setupStrategy(final IterStrategy s, final int sz, final Confi final int d = c.max - c.min + 1 - c.clusterWidth; for (int i = 0; i < sz; ++i) { final long k = vb.populateFirstArgStep(c.jumpPropOneIn, d, halfClusterWidth, r); - vb.populateSecondArgStep(c.sizePropOneIn, c.sharePropOneIn, k, vb.clusterMid, - halfClusterWidth, r); + vb.populateSecondArgStep(c.sizePropOneIn, c.sharePropOneIn, k, vb.clusterMid, halfClusterWidth, r); } b.done(); if (!print) { @@ -328,8 +324,8 @@ static double codeWarmup(final IterStrategy.Factory f) { static final double s2ns = 1e9; static void runStep( - final Config c, final int sn, final IterStrategy[] ss, - final String stepName, final int sz, final int runs, final boolean print) { + final Config c, final int sn, final IterStrategy[] ss, + final String stepName, final int sz, final int runs, final boolean print) { final Runtime rt = Runtime.getRuntime(); System.out.println(me + ": Running " + c.name + " " + stepName + " sz=" + nf(sz)); final String pfx = me + " "; @@ -342,9 +338,9 @@ static void runStep( final double dMb = pm.dm() / (1024.0 * 1024.0); if (print) { System.out.println(pfx + String.format( - "Building values for " + ss[si].toString() + - " done in %.3f secs, delta memory used %s", - pm.dt() / s2ns, mf(dMb))); + "Building values for " + ss[si].toString() + + " done in %.3f secs, delta memory used %s", + pm.dt() / s2ns, mf(dMb))); } pm.reset(); } @@ -362,7 +358,7 @@ static void runStep( System.out.println(pfx + "trick optimizer value = " + nf(trick)); if (si != 0) { PerfStats.comparePrint( - pStats, ss[0].toString(), sStats, ss[si].toString(), pfx); + pStats, ss[0].toString(), sStats, ss[si].toString(), pfx); } } final long t1 = System.nanoTime(); @@ -372,18 +368,18 @@ static void runStep( // Having separate warmup and full methods helps separate them in JProfiler. static void runStepWarmup(final Config c, final int sn, final IterStrategy ss[], - final int sz, final int runs) { + final int sz, final int runs) { runStep(c, sn, ss, "warmup", sz, runs, false); } static void runStepFull(final Config c, final int sn, final IterStrategy ss[], - final int sz, final int runs) { + final int sz, final int runs) { runStep(c, sn, ss, "full test", sz, runs, true); } static void run( - final Config c, final int sn, final IterStrategy[] ss, - final int warmupSz, final int warmupRuns, final int fullSz, final int fullRuns) { + final Config c, final int sn, final IterStrategy[] ss, + final int warmupSz, final int warmupRuns, final int fullSz, final int fullRuns) { runStepWarmup(c, sn, ss, warmupSz, warmupRuns); runStepFull(c, sn, ss, fullSz, fullRuns); } @@ -416,7 +412,7 @@ public static void main(String[] args) { final long t1 = System.nanoTime(); final long dt = t1 - t0; System.out.println(me + ": " + ss[si].toString() + " Code warmup ran in " + - dt / s2ns + " seconds, output=" + wo); + dt / s2ns + " seconds, output=" + wo); } final int warmupSz = 1 * 1000 * 1000; final int warmupRuns = 20; diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/ModelFileGenerator.java b/DB/src/test/java/io/deephaven/db/v2/utils/ModelFileGenerator.java index ee19bfdb456..a060948e2fb 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/ModelFileGenerator.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/ModelFileGenerator.java @@ -20,7 +20,7 @@ public ModelFileGenerator(Class classType) { private String getPath() { return Configuration.getInstance().getProperty("devroot") + - "/DB/src/test/java/" + classType.getCanonicalName().replace('.', '/') + ".java"; + "/DB/src/test/java/" + classType.getCanonicalName().replace('.', '/') + ".java"; } public void generateFile(final String rawClassDef) throws FileNotFoundException { @@ -35,14 +35,14 @@ public void validateFile(final String rawClassDef) throws IOException { final byte[] encoded = Files.readAllBytes(Paths.get(getPath())); final String currentVersion = new String(encoded); TestCase.assertEquals( - "Code generation results have changed - if you are comfortable with the change, run generateFile above to update the reference implementation", - currentVersion, processedClassDef); + "Code generation results have changed - if you are comfortable with the change, run generateFile above to update the reference implementation", + currentVersion, processedClassDef); } private String processClassDef(final String rawClassDef) { return "package io.deephaven.db.v2.select;\n" + - rawClassDef.replace("$CLASSNAME$", classType.getSimpleName()) - .replace("import static io.deephaven.numerics.suanshu.SuanShuIntegration.*;", ""); + rawClassDef.replace("$CLASSNAME$", classType.getSimpleName()) + .replace("import static io.deephaven.numerics.suanshu.SuanShuIntegration.*;", ""); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/OrderedKeysKeyRangesChunkImplTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/OrderedKeysKeyRangesChunkImplTest.java index 642c2356fad..f8dde154c3a 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/OrderedKeysKeyRangesChunkImplTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/OrderedKeysKeyRangesChunkImplTest.java @@ -16,13 +16,12 @@ public class OrderedKeysKeyRangesChunkImplTest extends OrderedKeysTestBase { @Override protected OrderedKeys create(long... values) { return OrderedKeys.takeKeyRangesChunkAndMakeOrderedKeys( - ChunkUtils.convertToOrderedKeyRanges(LongChunk.chunkWrap(values))); + ChunkUtils.convertToOrderedKeyRanges(LongChunk.chunkWrap(values))); } @Test public void testGetRelativePositionForCoverage() { - try (final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(6)) { + try (final WritableLongChunk chunk = WritableLongChunk.makeWritableChunk(6)) { chunk.setSize(0); chunk.add(0); chunk.add(3); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/OrderedKeysTestBase.java b/DB/src/test/java/io/deephaven/db/v2/utils/OrderedKeysTestBase.java index 8cf9f3963cc..e986da0d80d 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/OrderedKeysTestBase.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/OrderedKeysTestBase.java @@ -24,8 +24,7 @@ import static org.junit.Assert.*; /** - * OrderedKeys implementation tests can extend this to verify that OrderedKeys behavior is as - * expected. + * OrderedKeys implementation tests can extend this to verify that OrderedKeys behavior is as expected. */ public abstract class OrderedKeysTestBase { @@ -110,7 +109,7 @@ public static long[] shift(final long[] vs, final long offset) { public void testGetOrderedKeysByPositionEdgedAtBeginning() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byPos = OK.getOrderedKeysByPosition(0, 20)) { + final OrderedKeys byPos = OK.getOrderedKeysByPosition(0, 20)) { assertContentsByIndices(indicesFromRanges(shift(array(1, 10, 21, 30), offset0)), byPos); } } @@ -119,7 +118,7 @@ public void testGetOrderedKeysByPositionEdgedAtBeginning() { public void testGetOrderedKeysByPositionEdgeInMiddle() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byPos = OK.getOrderedKeysByPosition(10, 20)) { + final OrderedKeys byPos = OK.getOrderedKeysByPosition(10, 20)) { assertContentsByIndices(indicesFromRanges(shift(array(21, 40), offset0)), byPos); } } @@ -128,9 +127,8 @@ public void testGetOrderedKeysByPositionEdgeInMiddle() { public void testGetOrderedKeysByPositionMiddle() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byPos = OK.getOrderedKeysByPosition(8, 24)) { - assertContentsByIndices( - indicesFromRanges(shift(array(9, 10, 21, 40, 101, 102), offset0)), byPos); + final OrderedKeys byPos = OK.getOrderedKeysByPosition(8, 24)) { + assertContentsByIndices(indicesFromRanges(shift(array(9, 10, 21, 40, 101, 102), offset0)), byPos); } } @@ -138,9 +136,8 @@ public void testGetOrderedKeysByPositionMiddle() { public void testGetOrderedKeysByPositionEdgedAtEnd() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byPos = OK.getOrderedKeysByPosition(28, 44)) { - assertContentsByIndices(indicesFromRanges(shift(array(39, 40, 101, 142), offset0)), - byPos); + final OrderedKeys byPos = OK.getOrderedKeysByPosition(28, 44)) { + assertContentsByIndices(indicesFromRanges(shift(array(39, 40, 101, 142), offset0)), byPos); } } @@ -148,7 +145,7 @@ public void testGetOrderedKeysByPositionEdgedAtEnd() { public void testGetOrderedKeysByPositionBeginAtEnd() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byPos = OK.getOrderedKeysByPosition(72, 1024)) { + final OrderedKeys byPos = OK.getOrderedKeysByPosition(72, 1024)) { assertContentsByIndices(indicesFromRanges(), byPos); } } @@ -157,7 +154,7 @@ public void testGetOrderedKeysByPositionBeginAtEnd() { public void testGetOrderedKeysByPositionOverlapAtEnd() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byPos = OK.getOrderedKeysByPosition(60, 1024)) { + final OrderedKeys byPos = OK.getOrderedKeysByPosition(60, 1024)) { assertContentsByIndices(indicesFromRanges(shift(array(131, 142), offset0)), byPos); } } @@ -166,7 +163,7 @@ public void testGetOrderedKeysByPositionOverlapAtEnd() { public void testGetOrderedKeysByPositionBeyondEnd() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byPos = OK.getOrderedKeysByPosition(100, 1)) { + final OrderedKeys byPos = OK.getOrderedKeysByPosition(100, 1)) { assertContentsByIndices(indicesFromRanges(), byPos); } } @@ -175,7 +172,7 @@ public void testGetOrderedKeysByPositionBeyondEnd() { public void testGetOrderedKeysByPositionOverlapEntire() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byPos = OK.getOrderedKeysByPosition(0, 1024)) { + final OrderedKeys byPos = OK.getOrderedKeysByPosition(0, 1024)) { assertContentsByIndices(indices, byPos); } } @@ -184,7 +181,7 @@ public void testGetOrderedKeysByPositionOverlapEntire() { public void testGetOrderedKeysByPositionExactRange() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byPos = OK.getOrderedKeysByPosition(0, indices.length)) { + final OrderedKeys byPos = OK.getOrderedKeysByPosition(0, indices.length)) { assertContentsByIndices(indices, byPos); } } @@ -193,7 +190,7 @@ public void testGetOrderedKeysByPositionExactRange() { public void testGetOrderedKeysByRangeBeforeBeginning() { final long[] indices = indicesFromRanges(shift(array(21, 40, 101, 142), offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byKey = OK.getOrderedKeysByKeyRange(offset0, 20 + offset0)) { + final OrderedKeys byKey = OK.getOrderedKeysByKeyRange(offset0, 20 + offset0)) { assertContentsByIndices(indicesFromRanges(), byKey); } } @@ -202,7 +199,7 @@ public void testGetOrderedKeysByRangeBeforeBeginning() { public void testGetOrderedKeysByRangeOverlapBeginning() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byKey = OK.getOrderedKeysByKeyRange(offset0, offset0 + 5)) { + final OrderedKeys byKey = OK.getOrderedKeysByKeyRange(offset0, offset0 + 5)) { assertContentsByIndices(indicesFromRanges(offset0 + 1, offset0 + 5), byKey); } } @@ -211,7 +208,7 @@ public void testGetOrderedKeysByRangeOverlapBeginning() { public void testGetOrderedKeysByRangeEdgeAtBeginning() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 1, offset0 + 5)) { + final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 1, offset0 + 5)) { assertContentsByIndices(indicesFromRanges(offset0 + 1, offset0 + 5), byRange); } } @@ -220,9 +217,8 @@ public void testGetOrderedKeysByRangeEdgeAtBeginning() { public void testGetOrderedKeysByRangeEdgeInMiddle() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 10, offset0 + 30)) { - assertContentsByIndices(indicesFromRanges(shift(array(10, 10, 21, 30), offset0)), - byRange); + final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 10, offset0 + 30)) { + assertContentsByIndices(indicesFromRanges(shift(array(10, 10, 21, 30), offset0)), byRange); } } @@ -230,7 +226,7 @@ public void testGetOrderedKeysByRangeEdgeInMiddle() { public void testGetOrderedKeysByRangeMiddle() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 24, offset0 + 34)) { + final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 24, offset0 + 34)) { assertContentsByIndices(indicesFromRanges(offset0 + 24, offset0 + 34), byRange); } } @@ -239,7 +235,7 @@ public void testGetOrderedKeysByRangeMiddle() { public void testGetOrderedKeysByRangeEdgedAtEnd() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 120, offset0 + 142)) { + final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 120, offset0 + 142)) { assertContentsByIndices(indicesFromRanges(offset0 + 120, offset0 + 142), byRange); } } @@ -248,8 +244,7 @@ public void testGetOrderedKeysByRangeEdgedAtEnd() { public void testGetOrderedKeysByRangeBeginAtEnd() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byRange = - OK.getOrderedKeysByKeyRange(offset0 + 143, offset0 + 1024)) { + final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 143, offset0 + 1024)) { assertContentsByIndices(indicesFromRanges(), byRange); } } @@ -258,9 +253,8 @@ public void testGetOrderedKeysByRangeBeginAtEnd() { public void testGetOrderedKeysByRangeOverlapEnd() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 25, offset0 + 1024)) { - assertContentsByIndices(indicesFromRanges(shift(array(25, 40, 101, 142), offset0)), - byRange); + final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 25, offset0 + 1024)) { + assertContentsByIndices(indicesFromRanges(shift(array(25, 40, 101, 142), offset0)), byRange); } } @@ -268,8 +262,7 @@ public void testGetOrderedKeysByRangeOverlapEnd() { public void testGetOrderedKeysByRangeBeyondEnd() { final long[] indices = indicesFromRanges(shift(ranges0, offset0)); try (final OrderedKeys OK = create(indices); - final OrderedKeys byRange = - OK.getOrderedKeysByKeyRange(offset0 + 1024, offset0 + 2048)) { + final OrderedKeys byRange = OK.getOrderedKeysByKeyRange(offset0 + 1024, offset0 + 2048)) { assertContentsByIndices(indicesFromRanges(), byRange); } } @@ -279,12 +272,11 @@ public void testFillIndices() { for (long[] ranges : new long[][] {ranges0, ranges1, ranges2}) { final long[] indices = indicesFromRanges(shift(ranges, offset0)); try (final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(indices.length)) { + WritableLongChunk.makeWritableChunk(indices.length)) { try (final OrderedKeys OK = create(indices)) { OK.fillKeyIndicesChunk(chunk); } - final LongChunk expectedChunk = - LongChunk.chunkWrap(indices, 0, indices.length); + final LongChunk expectedChunk = LongChunk.chunkWrap(indices, 0, indices.length); assertChunksEqual(expectedChunk, chunk); } } @@ -297,7 +289,7 @@ public void testFillRanges() { final long[] ranges = rangesessess[r]; final long[] indices = indicesFromRanges(shift(ranges, offset0)); try (final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(indices.length)) { + WritableLongChunk.makeWritableChunk(indices.length)) { try (final OrderedKeys OK = create(indices)) { OK.fillKeyRangesChunk(chunk); } @@ -345,8 +337,7 @@ public void testIteratorAdvanceAndPeekOnlyAllElements() { assertEquals(indices[offset], it.peekNextKey()); final boolean expectMore = offset + 1 < indices.length; final String m = "offset==" + offset; - assertEquals(m, expectMore, - it.advance(expectMore ? indices[offset + 1] : indices[offset] + 1)); + assertEquals(m, expectMore, it.advance(expectMore ? indices[offset + 1] : indices[offset] + 1)); assertEquals(m, expectMore, it.hasMore()); } } @@ -381,7 +372,7 @@ public void testIteratorGetNextByLenOnly() { final long pos1 = it.getRelativePosition(); assertEquals(subOK.size(), pos1 - pos0); assertContentsByIndices(sliceLongArray(indices, offset, - Math.min(indices.length, offset + stepSize)), subOK); + Math.min(indices.length, offset + stepSize)), subOK); } } } @@ -393,8 +384,7 @@ public void testIteratorGetNextByKeyOnly() { final long[] indices = indicesFromRanges(ranges1); try (final OrderedKeys OK = create(indices)) { try (final OrderedKeys.Iterator it = OK.getOrderedKeysIterator()) { - for (long key = - indices[0] - (indices[0] % stepSize); key < indices[indices.length - 1]; key += + for (long key = indices[0] - (indices[0] % stepSize); key < indices[indices.length - 1]; key += stepSize) { final long endKey = key + stepSize - 1; final long pos0 = it.getRelativePosition(); @@ -407,8 +397,7 @@ public void testIteratorGetNextByKeyOnly() { startOffset = ~startOffset; int endOffset = Arrays.binarySearch(indices, endKey); endOffset = (endOffset < 0) ? ~endOffset : endOffset + 1; - assertContentsByIndices(m, sliceLongArray(indices, startOffset, endOffset), - subOK); + assertContentsByIndices(m, sliceLongArray(indices, startOffset, endOffset), subOK); } } } @@ -426,8 +415,7 @@ public void testIteratorAdvanceThenLen() { final long pos1 = it.getRelativePosition(); assertEquals(subOK.size(), pos1 - pos0); final int startOffset = Arrays.binarySearch(indices, 150); - assertContentsByIndices( - sliceLongArray(indices, startOffset, startOffset + stepSize), subOK); + assertContentsByIndices(sliceLongArray(indices, startOffset, startOffset + stepSize), subOK); } } } @@ -508,15 +496,15 @@ protected long[] sliceLongArray(final long[] src, final int startIndex, final in } protected void assertContentsByIndices(final long[] expected, - final OrderedKeys orderedKeys) { + final OrderedKeys orderedKeys) { assertContentsByIndices(null, expected, orderedKeys); } protected void assertContentsByIndices( - final String msg, final long[] expected, final OrderedKeys orderedKeys) { + final String msg, final long[] expected, final OrderedKeys orderedKeys) { final LongChunk expectedIndices = LongChunk.chunkWrap(expected); try (final WritableLongChunk expectedRanges = - ChunkUtils.convertToOrderedKeyRanges(expectedIndices)) { + ChunkUtils.convertToOrderedKeyRanges(expectedIndices)) { // size must be identical assertEquals(msg, expectedIndices.size(), orderedKeys.size()); @@ -529,21 +517,20 @@ protected void assertContentsByIndices( final MutableInt idx = new MutableInt(0); final Index ix = orderedKeys.asIndex(); assertTrue(msg, ix.forEachLong((value) -> { - assertEquals(msg + " && value==" + value, expectedIndices.get(idx.intValue()), - value); + assertEquals(msg + " && value==" + value, expectedIndices.get(idx.intValue()), value); idx.add(1); return true; })); // Check fillKeyIndices try (final WritableLongChunk writableOKIndices = - WritableLongChunk.makeWritableChunk(expectedIndices.size())) { + WritableLongChunk.makeWritableChunk(expectedIndices.size())) { orderedKeys.fillKeyIndicesChunk(writableOKIndices); assertChunksEqual(expectedIndices, writableOKIndices); } // Check fillKeyRanges try (final WritableLongChunk writableOKRanges = - WritableLongChunk.makeWritableChunk(expectedRanges.size())) { + WritableLongChunk.makeWritableChunk(expectedRanges.size())) { orderedKeys.fillKeyRangesChunk(writableOKRanges); assertChunksEqual(expectedRanges, writableOKRanges); } @@ -552,8 +539,7 @@ protected void assertContentsByIndices( if (expectedIndices.size() > 0) { // Check first and last key. assertEquals(msg, expectedIndices.get(0), orderedKeys.firstKey()); - assertEquals(msg, expectedIndices.get(expectedIndices.size() - 1), - orderedKeys.lastKey()); + assertEquals(msg, expectedIndices.get(expectedIndices.size() - 1), orderedKeys.lastKey()); // Check averageRunLength is reasonable (note: undefined if size is 0) final long runLen = orderedKeys.getAverageRunLengthEstimate(); @@ -562,9 +548,8 @@ protected void assertContentsByIndices( } } - protected void assertChunksEqual(final String msg, - final LongChunk expected, - final LongChunk actual) { + protected void assertChunksEqual(final String msg, final LongChunk expected, + final LongChunk actual) { assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); ++i) { assertEquals(msg, expected.get(i), actual.get(i)); @@ -572,17 +557,16 @@ protected void assertChunksEqual(final String msg, } protected void assertChunksEqual(final LongChunk expected, - final LongChunk actual) { + final LongChunk actual) { assertChunksEqual(null, expected, actual); } public static final long k2 = 65536; - public static final long[] ranges2 = {k2 + 10, k2 + 105, 2 * k2, 5 * k2 + 4, 7 * k2 - 2, - 7 * k2 + 1, 8 * k2 - 1, 8 * k2 - 1, 10 * k2, 12 * k2 + 3}; + public static final long[] ranges2 = {k2 + 10, k2 + 105, 2 * k2, 5 * k2 + 4, 7 * k2 - 2, 7 * k2 + 1, 8 * k2 - 1, + 8 * k2 - 1, 10 * k2, 12 * k2 + 3}; private interface IndexBoundaryTest { - void run(final String ctxt, final OrderedKeys ok, final Index ix, final long s, - final long e); + void run(final String ctxt, final OrderedKeys ok, final Index ix, final long s, final long e); } private void testIndexBoundaries(final long[] ranges, final IndexBoundaryTest test) { @@ -607,72 +591,72 @@ private void testIndexBoundaries(final long[] ranges, final IndexBoundaryTest te @Test public void testForEachLong() { - testIndexBoundaries(ranges2, (final String ctxt, final OrderedKeys ok, final Index ix, - final long s, final long e) -> { - final Index expected = ix.subindexByKey(s, e); - try (final OrderedKeys ok1 = ok.getOrderedKeysByKeyRange(s, e)) { - final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); - ok1.forEachLong((final long v) -> { - b.appendKey(v); - return true; + testIndexBoundaries(ranges2, + (final String ctxt, final OrderedKeys ok, final Index ix, final long s, final long e) -> { + final Index expected = ix.subindexByKey(s, e); + try (final OrderedKeys ok1 = ok.getOrderedKeysByKeyRange(s, e)) { + final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); + ok1.forEachLong((final long v) -> { + b.appendKey(v); + return true; + }); + final Index result = b.getIndex(); + assertEquals(ctxt, expected.size(), result.size()); + final Index d = expected.minus(result); + assertEquals(ctxt, 0, d.size()); + } }); - final Index result = b.getIndex(); - assertEquals(ctxt, expected.size(), result.size()); - final Index d = expected.minus(result); - assertEquals(ctxt, 0, d.size()); - } - }); } @Test public void testForAllLongs() { - testIndexBoundaries(ranges2, (final String ctxt, final OrderedKeys ok, final Index ix, - final long s, final long e) -> { - final Index expected = ix.subindexByKey(s, e); - try (final OrderedKeys ok1 = ok.getOrderedKeysByKeyRange(s, e)) { - final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); - ok1.forAllLongs(b::appendKey); - final Index result = b.getIndex(); - assertEquals(ctxt, expected.size(), result.size()); - final Index d = expected.minus(result); - assertEquals(ctxt, 0, d.size()); - } - }); + testIndexBoundaries(ranges2, + (final String ctxt, final OrderedKeys ok, final Index ix, final long s, final long e) -> { + final Index expected = ix.subindexByKey(s, e); + try (final OrderedKeys ok1 = ok.getOrderedKeysByKeyRange(s, e)) { + final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); + ok1.forAllLongs(b::appendKey); + final Index result = b.getIndex(); + assertEquals(ctxt, expected.size(), result.size()); + final Index d = expected.minus(result); + assertEquals(ctxt, 0, d.size()); + } + }); } @Test public void testForEachLongRange() { - testIndexBoundaries(ranges2, (final String ctxt, final OrderedKeys ok, final Index ix, - final long s, final long e) -> { - final Index expected = ix.subindexByKey(s, e); - try (final OrderedKeys ok1 = ok.getOrderedKeysByKeyRange(s, e)) { - final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); - ok1.forEachLongRange((final long start, final long end) -> { - b.appendRange(start, end); - return true; + testIndexBoundaries(ranges2, + (final String ctxt, final OrderedKeys ok, final Index ix, final long s, final long e) -> { + final Index expected = ix.subindexByKey(s, e); + try (final OrderedKeys ok1 = ok.getOrderedKeysByKeyRange(s, e)) { + final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); + ok1.forEachLongRange((final long start, final long end) -> { + b.appendRange(start, end); + return true; + }); + final Index result = b.getIndex(); + assertEquals(ctxt, expected.size(), result.size()); + final Index d = expected.minus(result); + assertEquals(ctxt, 0, d.size()); + } }); - final Index result = b.getIndex(); - assertEquals(ctxt, expected.size(), result.size()); - final Index d = expected.minus(result); - assertEquals(ctxt, 0, d.size()); - } - }); } @Test public void testForAllLongRanges() { - testIndexBoundaries(ranges2, (final String ctxt, final OrderedKeys ok, final Index ix, - final long s, final long e) -> { - final Index expected = ix.subindexByKey(s, e); - try (final OrderedKeys ok1 = ok.getOrderedKeysByKeyRange(s, e)) { - final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); - ok1.forAllLongRanges(b::appendRange); - final Index result = b.getIndex(); - assertEquals(ctxt, expected.size(), result.size()); - final Index d = expected.minus(result); - assertEquals(ctxt, 0, d.size()); - } - }); + testIndexBoundaries(ranges2, + (final String ctxt, final OrderedKeys ok, final Index ix, final long s, final long e) -> { + final Index expected = ix.subindexByKey(s, e); + try (final OrderedKeys ok1 = ok.getOrderedKeysByKeyRange(s, e)) { + final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); + ok1.forAllLongRanges(b::appendRange); + final Index result = b.getIndex(); + assertEquals(ctxt, expected.size(), result.size()); + final Index d = expected.minus(result); + assertEquals(ctxt, 0, d.size()); + } + }); } @@ -682,8 +666,7 @@ protected void advanceBug(final Index index) { final long subRegionBitMask = regionSize - 1; final int chunkCapacity = 4096; - LongStream.range(0, 20) - .forEach(ri -> index.insertRange(ri * regionSize, ri * regionSize + 99_999)); + LongStream.range(0, 20).forEach(ri -> index.insertRange(ri * regionSize, ri * regionSize + 99_999)); try (final OrderedKeys.Iterator outerOKI = index.getOrderedKeysIterator()) { while (outerOKI.hasMore()) { final OrderedKeys next = outerOKI.getNextOrderedKeysWithLength(chunkCapacity); @@ -701,8 +684,8 @@ protected void advanceBug(final Index index) { final long consumed = innerOKI.advanceAndGetPositionDistance(target); if (consumed + totalConsumed > chunkCapacity) { throw new IllegalStateException( - "Consumed " + consumed + ", after already consuming " - + totalConsumed + ", exceeds capacity " + chunkCapacity); + "Consumed " + consumed + ", after already consuming " + totalConsumed + + ", exceeds capacity " + chunkCapacity); } totalConsumed += consumed; } @@ -716,7 +699,7 @@ public void testAdvancePastEnd() { final long[] r = ranges2; final long[] indices = indicesFromRanges(r); try (final OrderedKeys ok = create(indices); - final OrderedKeys.Iterator okIter = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okIter = ok.getOrderedKeysIterator()) { final long last = ok.lastKey(); final OrderedKeys ok2 = okIter.getNextOrderedKeysThrough(last); assertFalse(okIter.hasMore()); @@ -729,7 +712,7 @@ public void testNextOrderedKeysThroughPastEnd() { final long[] r = ranges2; final long[] indices = indicesFromRanges(r); try (final OrderedKeys ok = create(indices); - final OrderedKeys.Iterator okIter = ok.getOrderedKeysIterator()) { + final OrderedKeys.Iterator okIter = ok.getOrderedKeysIterator()) { final long last = ok.lastKey(); final OrderedKeys ok2 = okIter.getNextOrderedKeysThrough(last); assertFalse(okIter.hasMore()); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/PerfMeasure.java b/DB/src/test/java/io/deephaven/db/v2/utils/PerfMeasure.java index 3d028d5e0e7..0e64793c0db 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/PerfMeasure.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/PerfMeasure.java @@ -72,8 +72,7 @@ public long getAllocatedBytes() { static { String s = "Used allocated memory readings using "; - java.lang.management.ThreadMXBean t = - java.lang.management.ManagementFactory.getThreadMXBean(); + java.lang.management.ThreadMXBean t = java.lang.management.ManagementFactory.getThreadMXBean(); if (t instanceof com.sun.management.ThreadMXBean) { amb = (com.sun.management.ThreadMXBean) t; amb.setThreadAllocatedMemoryEnabled(true); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/PerfStats.java b/DB/src/test/java/io/deephaven/db/v2/utils/PerfStats.java index 64763d524a0..9ccfc3ef32d 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/PerfStats.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/PerfStats.java @@ -70,8 +70,7 @@ public void print(final String pfx, final double factor, final int[] ps) { final double stddev = stddev() * factor; final StringBuffer sb = new StringBuffer(pfx); final NumberFormat nf = NumberFormat.getInstance(); - sb.append(String.format(" n=%s, avg=" + dFmt + " stddev=" + dFmt, nf.format(nsamples()), - avg, stddev)); + sb.append(String.format(" n=%s, avg=" + dFmt + " stddev=" + dFmt, nf.format(nsamples()), avg, stddev)); for (int p : ps) { final double pct = percentile(p) * factor; sb.append(String.format(", p[%d]=" + dFmt, p, pct)); @@ -80,9 +79,9 @@ public void print(final String pfx, final double factor, final int[] ps) { } public static void comparePrint( - final PerfStats p1, final String n1, - final PerfStats p2, final String n2, - final String pfx) { + final PerfStats p1, final String n1, + final PerfStats p2, final String n2, + final String pfx) { final StringBuilder sb = new StringBuilder(pfx); sb.append(n1); sb.append("/"); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/RedirectionIndexLockFreeTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/RedirectionIndexLockFreeTest.java index b64994189fb..23bc1dfa38a 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/RedirectionIndexLockFreeTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/RedirectionIndexLockFreeTest.java @@ -23,8 +23,7 @@ public class RedirectionIndexLockFreeTest extends LiveTableTestCase { private static final int testDurationInSeconds = 15; public void testRedirectionIndex() throws InterruptedException { - final RedirectionIndexLockFreeImpl index = - new RedirectionIndexLockFreeFactory().createRedirectionIndex(10); + final RedirectionIndexLockFreeImpl index = new RedirectionIndexLockFreeFactory().createRedirectionIndex(10); index.startTrackingPrevValues(); final long initialStep = LogicalClock.DEFAULT.currentStep(); Writer writer = new Writer("writer", initialStep, index); @@ -116,8 +115,7 @@ protected final void doOneIteration() { final long logicalClockStartValue = LogicalClock.DEFAULT.currentValue(); final long stepFromCycle = LogicalClock.getStep(logicalClockStartValue); final LogicalClock.State state = LogicalClock.getState(logicalClockStartValue); - final long step = - state == LogicalClock.State.Updating ? stepFromCycle - 1 : stepFromCycle; + final long step = state == LogicalClock.State.Updating ? stepFromCycle - 1 : stepFromCycle; final int keysInThisGeneration = (int) ((step - initialStep) * 1000 + 1000); final Random rng = new Random(step); @@ -130,13 +128,11 @@ protected final void doOneIteration() { final TLongArrayList mmExpect = new TLongArrayList(); final TLongArrayList mmActual = new TLongArrayList(); - // Look at the map in the reverse order of the writer, just to avoid any unintended - // synchronization. + // Look at the map in the reverse order of the writer, just to avoid any unintended synchronization. // These keys are expected to not exist. for (int ii = keys.length - 1; ii >= numKeysToInsert; --ii) { final long key = keys[ii]; - final long actualValue = - state == LogicalClock.State.Updating ? ix.getPrev(key) : ix.get(key); + final long actualValue = state == LogicalClock.State.Updating ? ix.getPrev(key) : ix.get(key); if (actualValue != -1) { mmKeys.add(key); mmExpect.add(-1); @@ -148,8 +144,7 @@ protected final void doOneIteration() { for (int ii = numKeysToInsert - 1; ii >= 0; --ii) { final long key = keys[ii]; final long expectedValue = step * oneBillion + ii; - final long actualValue = - state == LogicalClock.State.Updating ? ix.getPrev(key) : ix.get(key); + final long actualValue = state == LogicalClock.State.Updating ? ix.getPrev(key) : ix.get(key); if (expectedValue != actualValue) { mmKeys.add(key); mmExpect.add(expectedValue); @@ -186,9 +181,9 @@ public boolean hasFailed() { @Override public String toString() { return String.format( - "--- %s: iterations: %d, good update: %d, good idle: %d, bad update: %d, bad idle: %d, incoherent (no judgment): %d ---", - name, numIterations, goodUpdateCycles, goodIdleCycles, badUpdateCycles, - badIdleCycles, incoherentCycles); + "--- %s: iterations: %d, good update: %d, good idle: %d, bad update: %d, bad idle: %d, incoherent (no judgment): %d ---", + name, numIterations, goodUpdateCycles, goodIdleCycles, badUpdateCycles, badIdleCycles, + incoherentCycles); } } @@ -205,8 +200,7 @@ protected final void doOneIteration() { keysInThisGeneration.setValue((int) ((step - initialStep) * 1000 + 1000)); final Random rng = new Random(step); final int numKeysToInsert = rng.nextInt(keysInThisGeneration.getValue()); - // A bit of a waste because we only look at the first 'numKeysToInsert' keys, but - // that's ok. + // A bit of a waste because we only look at the first 'numKeysToInsert' keys, but that's ok. long[] keys = fillAndShuffle(rng, keysInThisGeneration.getValue()); final RedirectionIndexLockFreeImpl ix = index; for (int ii = 0; ii < numKeysToInsert; ++ii) { @@ -222,7 +216,7 @@ protected final void doOneIteration() { // waste some time doing something else final RedirectionIndexLockFreeImpl privateIndex = - new RedirectionIndexLockFreeFactory().createRedirectionIndex(10); + new RedirectionIndexLockFreeFactory().createRedirectionIndex(10); for (long ii = 0; ii < keysInThisGeneration.getValue() * 4; ++ii) { privateIndex.put(ii, ii); } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/RedirectionIndexTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/RedirectionIndexTest.java index 121422de9a9..66479c8318c 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/RedirectionIndexTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/RedirectionIndexTest.java @@ -13,13 +13,11 @@ public class RedirectionIndexTest extends LiveTableTestCase { private final Logger log = LoggerFactory.getLogger(RedirectionIndexTest.class); public void testBasic() { - final RedirectionIndex redirectionIndex = - RedirectionIndex.FACTORY.createRedirectionIndex(8); + final RedirectionIndex redirectionIndex = RedirectionIndex.FACTORY.createRedirectionIndex(8); for (int i = 0; i < 3; i++) { redirectionIndex.put(i, i * 2); } - final RedirectionIndex redirectionIndex1 = - RedirectionIndex.FACTORY.createRedirectionIndex(8); + final RedirectionIndex redirectionIndex1 = RedirectionIndex.FACTORY.createRedirectionIndex(8); for (int i = 0; i < 3; i++) { redirectionIndex1.put(i * 2, i * 4); } @@ -81,8 +79,7 @@ public void testContiguous() { redirectionIndex.put(ii, 200 + ii * 3); } - // Confirm that get() returns 200 + ii * 3; meanwhile getPrev() still returns 100 + ii * - // 2 + // Confirm that get() returns 200 + ii * 3; meanwhile getPrev() still returns 100 + ii * 2 for (int ii = 0; ii < 100; ++ii) { assertEquals(200 + ii * 3, redirectionIndex.get(ii)); } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/ShiftDataTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/ShiftDataTest.java index a2a06532c3f..c79210d3038 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/ShiftDataTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/ShiftDataTest.java @@ -109,15 +109,13 @@ public void shift(long start, long end, long offset) { index = getSortedIndex(1, 2, 3, 4, 5, 6, 7, 8); added = getSortedIndex(1, 3, 5, 7); removed = getSortedIndex(); - checkExpectations(index, removed, added, - new long[][] {{3, 3, 4}, {2, 2, 3}, {1, 1, 2}, {0, 0, 1}}); + checkExpectations(index, removed, added, new long[][] {{3, 3, 4}, {2, 2, 3}, {1, 1, 2}, {0, 0, 1}}); // was 2,4,6,8,10,12,16 index = getSortedIndex(1, 2, 3, 4, 8, 16); added = getSortedIndex(1, 3); removed = getSortedIndex(6, 10, 12); - checkExpectations(index, removed, added, - new long[][] {{3, 3, 1}, {1, 1, 2}, {0, 0, 1}, {6, 6, -1}}); + checkExpectations(index, removed, added, new long[][] {{3, 3, 1}, {1, 1, 2}, {0, 0, 1}, {6, 6, -1}}); // was 100,200,300,400,500,600,700 index = getSortedIndex(100, 200, 230, 240, 250, 260, 270, 500, 550, 700); @@ -174,8 +172,7 @@ public void testRandom() { Index added = getRandomIndex(20, 1, 10); Index removed = getRandomRemoves(initialIndex, 2); Index finalIndex = getFinalIndex(initialIndex, added, removed); - final long resultKeys[] = - new long[(int) Math.max(initialIndex.size(), finalIndex.size())]; + final long resultKeys[] = new long[(int) Math.max(initialIndex.size(), finalIndex.size())]; int pos = 0; for (Index.Iterator it = initialIndex.iterator(); it.hasNext();) { resultKeys[pos++] = it.nextLong(); @@ -197,8 +194,7 @@ public void shift(long start, long end, long offset) { }); Index addedPos = shiftData.getAddedPos(); - for (Index.Iterator iterator = addedPos.iterator(), valueIt = added.iterator(); iterator - .hasNext();) { + for (Index.Iterator iterator = addedPos.iterator(), valueIt = added.iterator(); iterator.hasNext();) { resultKeys[((int) iterator.nextLong())] = valueIt.nextLong(); } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/ShiftedOrderedKeysTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/ShiftedOrderedKeysTest.java index ba91746f40c..9f9beafaf7f 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/ShiftedOrderedKeysTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/ShiftedOrderedKeysTest.java @@ -13,8 +13,7 @@ protected OrderedKeys create(long... values) { for (int i = 0; i < values.length; ++i) { shifted[i] = values[i] + SHIFT; } - final OrderedKeys other = - OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(LongChunk.chunkWrap(shifted)); + final OrderedKeys other = OrderedKeys.wrapKeyIndicesChunkAsOrderedKeys(LongChunk.chunkWrap(shifted)); return ShiftedOrderedKeys.wrap(closeOnTearDownCase(other), -SHIFT); } @@ -22,8 +21,7 @@ protected OrderedKeys create(long... values) { @Override // The original test uses some large keys that overflow when shifted. public void testCanConstructOrderedKeys() { - final long[] indices = - indicesFromRanges(0, 4, Long.MAX_VALUE - 4 - SHIFT, Long.MAX_VALUE - SHIFT); + final long[] indices = indicesFromRanges(0, 4, Long.MAX_VALUE - 4 - SHIFT, Long.MAX_VALUE - SHIFT); try (final OrderedKeys OK = create(indices)) { assertContentsByIndices(indices, OK); } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/SingleRangeTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/SingleRangeTest.java index 8a5eb288914..753ae9b81b6 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/SingleRangeTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/SingleRangeTest.java @@ -116,12 +116,12 @@ public void testRemoveKey() { } private static void insertResultsInSingle( - final long start1, - final long end1, - final long start2, - final long end2, - final long startExpected, - final long endExpected) { + final long start1, + final long end1, + final long start2, + final long end2, + final long startExpected, + final long endExpected) { final TreeIndex ix = new TreeIndex(SingleRange.make(start1, end1)); Runnable check = () -> { assertTrue(ix.getImpl() instanceof SingleRange); @@ -163,11 +163,11 @@ public void testInsertIndexAndInsertRange() { } private static void removeResultsCheck( - final long start1, - final long end1, - final long start2, - final long end2, - final Consumer check) { + final long start1, + final long end1, + final long start2, + final long end2, + final Consumer check) { final TreeIndex ix = new TreeIndex(SingleRange.make(start1, end1)); final TreeIndex ix2 = new TreeIndex(SingleRange.make(start2, end2)); ix.remove(ix2); @@ -183,10 +183,10 @@ private static void removeResultsCheck( } private static void removeResultsCheck( - final long start1, - final long end1, - final long start2, - final long end2) { + final long start1, + final long end1, + final long start2, + final long end2) { final Consumer check; if (start1 < start2 && end2 < end1) { // hole. @@ -416,15 +416,12 @@ private static void checkBinarySearch(final TreeIndex ix) { assertEquals(end - start + 1, ix.size()); final Index.SearchIterator it = ix.searchIterator(); assertTrue(it.hasNext()); - assertEquals(-1, - it.binarySearchValue((final long key, final int dir) -> (int) ((start - 1) - key), 1)); + assertEquals(-1, it.binarySearchValue((final long key, final int dir) -> (int) ((start - 1) - key), 1)); for (long v = start; v <= end; ++v) { final long compValue = v; - assertEquals(v, it.binarySearchValue( - (final long key, final int dir) -> (int) ((compValue) - key), 1)); + assertEquals(v, it.binarySearchValue((final long key, final int dir) -> (int) ((compValue) - key), 1)); } - assertEquals(end, - it.binarySearchValue((final long key, final int dir) -> (int) ((end + 1) - key), 1)); + assertEquals(end, it.binarySearchValue((final long key, final int dir) -> (int) ((end + 1) - key), 1)); } @@ -447,11 +444,9 @@ public void testGetKeysForPositions() { final long[] positions = new long[] {0, 1, card - 2, card - 1}; final long[] expected = new long[] {10, 11, 29, 30}; final long[] result = new long[4]; - final WritableLongChunk resultsChunk = - WritableLongChunk.writableChunkWrap(result); + final WritableLongChunk resultsChunk = WritableLongChunk.writableChunkWrap(result); final LongChunk positionsChunk = WritableLongChunk.chunkWrap(positions); - ix.getKeysForPositions(new LongChunkIterator(positionsChunk), - new LongChunkAppender(resultsChunk)); + ix.getKeysForPositions(new LongChunkIterator(positionsChunk), new LongChunkAppender(resultsChunk)); assertArrayEquals(expected, result); } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/SortedIndexTestBase.java b/DB/src/test/java/io/deephaven/db/v2/utils/SortedIndexTestBase.java index 1534ec01849..37700eb8b72 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/SortedIndexTestBase.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/SortedIndexTestBase.java @@ -108,8 +108,7 @@ public void testInvert() { final long[] fullKeys = generateFullKeys(maxSize, generator); final Index fullIndex = getSortedIndex(fullKeys); - final Pair pp = - generateSubset(fullKeys, fullIndex, Integer.MAX_VALUE, generator); + final Pair pp = generateSubset(fullKeys, fullIndex, Integer.MAX_VALUE, generator); final Index subsetIndex = pp.first; final TLongList expected = pp.second; TestCase.assertEquals(subsetIndex.size(), expected.size()); @@ -143,8 +142,7 @@ public void testInvertWithMax() { final long[] fullKeys = generateFullKeys(maxSize, generator); final Index fullIndex = getSortedIndex(fullKeys); final int maxPosition = generator.nextInt(fullIndex.intSize()); - final Pair pp = - generateSubset(fullKeys, fullIndex, maxPosition, generator); + final Pair pp = generateSubset(fullKeys, fullIndex, maxPosition, generator); final Index subsetIndex = pp.first; final TLongList expected = pp.second; @@ -160,8 +158,7 @@ public void testInvertWithMax() { } } - private long[] generateFullKeys(@SuppressWarnings("SameParameterValue") int maxSize, - Random generator) { + private long[] generateFullKeys(@SuppressWarnings("SameParameterValue") int maxSize, Random generator) { final long[] fullKeys; switch (generator.nextInt(2)) { case 0: @@ -217,12 +214,10 @@ private long[] generateKeysMethod2(int maxSize, Random generator) { } /** - * Generate a subset of the keys in fullKeys up to maxPosition positions in using generator. - * Returns a pair containing the subset of fullKeys as an Index and the expected positions as a - * TLongList. + * Generate a subset of the keys in fullKeys up to maxPosition positions in using generator. Returns a pair + * containing the subset of fullKeys as an Index and the expected positions as a TLongList. */ - private Pair generateSubset(long[] fullKeys, Index fullIndex, int maxPosition, - Random generator) { + private Pair generateSubset(long[] fullKeys, Index fullIndex, int maxPosition, Random generator) { switch (generator.nextInt(2)) { case 0: return generateSubsetMethod1(fullKeys, fullIndex, maxPosition, generator); @@ -236,8 +231,8 @@ private Pair generateSubset(long[] fullKeys, Index fullIndex, /** * For each key, randomly flip a count as to whether it belongs in the output. */ - private Pair generateSubsetMethod1(long[] fullKeys, - @SuppressWarnings("unused") Index fullIndex, int maxPosition, Random generator) { + private Pair generateSubsetMethod1(long[] fullKeys, @SuppressWarnings("unused") Index fullIndex, + int maxPosition, Random generator) { final boolean subset[] = new boolean[(int) fullIndex.lastKey() + 1]; final double density = generator.nextDouble(); @@ -273,12 +268,11 @@ private Pair generateSubsetMethod1(long[] fullKeys, } /** - * For each run of the index, flip a coin to determine if it is included; then randomly select a - * start and end within each range. + * For each run of the index, flip a coin to determine if it is included; then randomly select a start and end + * within each range. */ - private Pair generateSubsetMethod2( - @SuppressWarnings("unused") long[] fullKeys, Index fullIndex, int maxPosition, - Random generator) { + private Pair generateSubsetMethod2(@SuppressWarnings("unused") long[] fullKeys, Index fullIndex, + int maxPosition, Random generator) { final boolean subset[] = new boolean[(int) fullKeys[fullKeys.length - 1] + 1]; final TLongList expected = new TLongArrayList(); @@ -402,14 +396,12 @@ private void testRangeByPos(long... keys) { for (int i = 0; i < keys.length + 2; i++) { for (int j = i; j < keys.length + 3; j++) { final int start = min(i, keys.length); - final long[] range = - Arrays.copyOfRange(keys, start, max(start, min(j, keys.length))); + final long[] range = Arrays.copyOfRange(keys, start, max(start, min(j, keys.length))); final Index subIndex = index.subindexByPos(i, j); try { compareIndexAndKeyValues(subIndex, range); } catch (AssertionError assertionError) { - System.err.println( - "index=" + index + ", subIndex=" + subIndex + ", i=" + i + ", j=" + j); + System.err.println("index=" + index + ", subIndex=" + subIndex + ", i=" + i + ", j=" + j); throw assertionError; } } @@ -453,7 +445,7 @@ public void testMinusSimple() { compareIndexAndKeyValues(result, doMinusSimple(allKeys, subKeys)); subKeys = stringToKeys( - "0-12159,12162-12163,12166-12167,12172-12175,12178-12179,12182-12325,12368-33805,33918-33977,33980-34109,34168-34169,34192-34193,34309-34312,34314,34317-34323,34356-34491,34494-34495,34502-34503,34506-34509,34512-34515,34520-34521,34524-34525,34528-34529,34540-34541,34544-34545,34548-34549,34552-34553,34574-34589,34602-34675,34678-34679,34688-34689,34694-34695,34700-34705,34716-34717,34722-34723,34732-34733,34738-34739,34774,34785,34791-34794,34796-34799,34801-34803,34807-34808,34813,34816,34828-34829,34856-34857,34869,34875-34884,34892-34899,34902-34925,34930-34932,34934-34938,34958-34959,34966-34973,35038-35065,35068-35075,35212-35363,35496-35511,35542-44097,44104-54271,54291,54304,54308-54310,54373-54749,54751-54756,54758-55040,55112,55114-55115,55117,55120-55213,55321-55322,55325-55326,55627,55630-55631,55634-55635,55638,55640-55643,55646-55647,55650-55651,55654-55655,55658-55659,55661-55690,55692-55698,55702-55710,55712-55713,55716-55717,55719-55960,56059-56134,56185-56186,56255-56257,56259,56341-56628,56695-56866,56878-56880,56882-57082,57105-65108,64977-66622,66625-66658,66661-66662,66665-66668,66671-66834,66837-66840"); + "0-12159,12162-12163,12166-12167,12172-12175,12178-12179,12182-12325,12368-33805,33918-33977,33980-34109,34168-34169,34192-34193,34309-34312,34314,34317-34323,34356-34491,34494-34495,34502-34503,34506-34509,34512-34515,34520-34521,34524-34525,34528-34529,34540-34541,34544-34545,34548-34549,34552-34553,34574-34589,34602-34675,34678-34679,34688-34689,34694-34695,34700-34705,34716-34717,34722-34723,34732-34733,34738-34739,34774,34785,34791-34794,34796-34799,34801-34803,34807-34808,34813,34816,34828-34829,34856-34857,34869,34875-34884,34892-34899,34902-34925,34930-34932,34934-34938,34958-34959,34966-34973,35038-35065,35068-35075,35212-35363,35496-35511,35542-44097,44104-54271,54291,54304,54308-54310,54373-54749,54751-54756,54758-55040,55112,55114-55115,55117,55120-55213,55321-55322,55325-55326,55627,55630-55631,55634-55635,55638,55640-55643,55646-55647,55650-55651,55654-55655,55658-55659,55661-55690,55692-55698,55702-55710,55712-55713,55716-55717,55719-55960,56059-56134,56185-56186,56255-56257,56259,56341-56628,56695-56866,56878-56880,56882-57082,57105-65108,64977-66622,66625-66658,66661-66662,66665-66668,66671-66834,66837-66840"); subIndex = getSortedIndex(subKeys); result = index.minus(subIndex); compareIndexAndKeyValues(result, doMinusSimple(allKeys, subKeys)); @@ -471,8 +463,7 @@ public void testUnionIntoFullLeaf() { indexBuilder1.addRange(start + ii * 3, start + ii * 3 + 1); } - // This, actually forces the split. We'll have short nodes (rather than ints) with the - // packing, because this + // This, actually forces the split. We'll have short nodes (rather than ints) with the packing, because this // range is less than 2^15. indexBuilder1.addRange(32000, 32001); @@ -509,7 +500,7 @@ public void testUnionIntoFullLeaf() { public void testFunnyOverLap() { // doTestFunnyOverlap("0-12159,12162-12163,12166-12167,12172-12175,12178-12179,12182-12325,12368-33805,33918-33977,33980-34109,34168-34169,34192-34193,34309-34312,34314,34317-34323,34356-34491,34494-34495,34502-34503,34506-34509,34512-34515,34520-34521,34524-34525,34528-34529,34540-34541,34544-34545,34548-34549,34552-34553,34574-34589,34602-34675,34678-34679,34688-34689,34694-34695,34700-34705,34716-34717,34722-34723,34732-34733,34738-34739,34774,34785,34791-34794,34796-34799,34801-34803,34807-34808,34813,34816,34828-34829,34856-34857,34869,34875-34884,34892-34899,34902-34925,34930-34932,34934-34938,34958-34959,34966-34973,35038-35065,35068-35075,35212-35363,35496-35511,35542-44097,44104-54271,54291,54304,54308-54310,54373-54749,54751-54756,54758-55040,55112,55114-55115,55117,55120-55213,55321-55322,55325-55326,55627,55630-55631,55634-55635,55638,55640-55643,55646-55647,55650-55651,55654-55655,55658-55659,55661-55690,55692-55698,55702-55710,55712-55713,55716-55717,55719-55960,56059-56134,56185-56186,56255-56257,56259,56341-56628,56695-56866,56878-56880,56882-57082,57105-65108,64977-66622,66625-66658,66661-66662,66665-66668,66671-66834,66837-66840"); doTestFunnyOverlap( - "0-6509,6510-6619,6620-17383,17384-18031,18158-47065,47082-47099,47104-47593,47616-56079,56080-71737,71858-83613,83616-83701,83719,83721-83749,83752-83761,83764,83769-86307,86308-87746,87762-87770,87774-87841,87845-87847,87853-87878,87880,87882-87933,87936-87950,87954-87956,87958-87967,87972-87980,87982,87984-87988,87991-88137,88139-88140,88167-88198,88228,88231-88289,88293,88299-88362,88364,88378-88381,88388-88389,88394-88395,88398-88399,88402-88405,88408-88415,88420-88427,88430-88437,88440-88441,88519,88521-88588,88597-92547,92672-93207,93224-95745,95630-102119,102284-106111,106124-106125,106134-106135,106137-106141,106157-106173,106323-106326,106330-106377,106379-106380,106382-106384,106386,106390-106395,106454-106665,106788-106855,106932-108809,108830-113235,113420-113547,113580-113587,113596-113643,113646-113771"); + "0-6509,6510-6619,6620-17383,17384-18031,18158-47065,47082-47099,47104-47593,47616-56079,56080-71737,71858-83613,83616-83701,83719,83721-83749,83752-83761,83764,83769-86307,86308-87746,87762-87770,87774-87841,87845-87847,87853-87878,87880,87882-87933,87936-87950,87954-87956,87958-87967,87972-87980,87982,87984-87988,87991-88137,88139-88140,88167-88198,88228,88231-88289,88293,88299-88362,88364,88378-88381,88388-88389,88394-88395,88398-88399,88402-88405,88408-88415,88420-88427,88430-88437,88440-88441,88519,88521-88588,88597-92547,92672-93207,93224-95745,95630-102119,102284-106111,106124-106125,106134-106135,106137-106141,106157-106173,106323-106326,106330-106377,106379-106380,106382-106384,106386,106390-106395,106454-106665,106788-106855,106932-108809,108830-113235,113420-113547,113580-113587,113596-113643,113646-113771"); } private void doTestFunnyOverlap(@SuppressWarnings("SameParameterValue") String input) { @@ -735,8 +726,7 @@ public void testMinusIndexOps() { final Index result = fullIndex.minus(subIndex); - compareIndexAndKeyValues(m2, result, - doMinusSimple(booleanSetToKeys(fullSet), booleanSetToKeys(subSet))); + compareIndexAndKeyValues(m2, result, doMinusSimple(booleanSetToKeys(fullSet), booleanSetToKeys(subSet))); } } @@ -784,9 +774,7 @@ private long[] doMinusSimple(long[] allKeys, long[] subKeys) { private void testRangeByKey(final String m, long... keys) { final Index index = getSortedIndex(keys); - for (long i = - (keys.length > 0 ? keys[0] - 2 : 1); i < (keys.length > 0 ? keys[keys.length - 1] : 0) - + 3; i++) { + for (long i = (keys.length > 0 ? keys[0] - 2 : 1); i < (keys.length > 0 ? keys[keys.length - 1] : 0) + 3; i++) { for (long j = i; j < (keys.length > 0 ? keys[keys.length - 1] : 0) + 3; j++) { final TLongArrayList data = new TLongArrayList(); for (int k = 0; k < keys.length; k++) { @@ -800,8 +788,7 @@ private void testRangeByKey(final String m, long... keys) { try { compareIndexAndKeyValues(m, subIndex, range); } catch (AssertionError assertionError) { - System.err.println( - "index=" + index + ", subIndex=" + subIndex + ", i=" + i + ", j=" + j); + System.err.println("index=" + index + ", subIndex=" + subIndex + ", i=" + i + ", j=" + j); throw assertionError; } } @@ -838,8 +825,8 @@ private void compareIndexAndKeyValues(final Index index, final long[] keys) { } private void compareIndexAndKeyValues(final String pfx, final Index index, final long[] keys) { - final String m = ((pfx != null && pfx.length() > 0) ? pfx + " " : "") + "index=" + index - + ", keys=" + a2s(keys); + final String m = + ((pfx != null && pfx.length() > 0) ? pfx + " " : "") + "index=" + index + ", keys=" + a2s(keys); final SortedIndex.SearchIterator iterator = index.searchIterator(); for (int i = 0; i < keys.length; i++) { assertTrue(m + " iterator shouldbn't be empty", iterator.hasNext()); @@ -889,8 +876,7 @@ public void testRandomInsertMinus() { for (int ii = 0; ii < 500; ++ii) { if (ii % printInterval == printInterval - 1) { - System.out - .println(ii + ": " + (System.currentTimeMillis() - startTime) + "ms: " + check); + System.out.println(ii + ": " + (System.currentTimeMillis() - startTime) + "ms: " + check); } final IndexBuilder builder = getFactory().getRandomBuilder(); @@ -931,8 +917,7 @@ public void testRandomInsertMinus() { checkB.validate(m); checkA.insert(checkB); checkA.validate(m); - Assert.assertion(checkA.equals(check), "checkA.equals(check)", check, "check", checkA, - "checkA"); + Assert.assertion(checkA.equals(check), "checkA.equals(check)", check, "check", checkA, "checkA"); } } @@ -941,43 +926,33 @@ public void testChunkInsertAndRemove() { final Supplier[] suppliers = new Supplier[] { () -> TreeIndexImpl.EMPTY - , () -> SingleRange.make(0, 0), - () -> SingleRange.make(4_000_000_000L, 4_000_000_000L), + , () -> SingleRange.make(0, 0), () -> SingleRange.make(4_000_000_000L, 4_000_000_000L), () -> SingleRange.make(RspArray.BLOCK_SIZE, 2 * RspArray.BLOCK_SIZE - 1), () -> SingleRange.make(RspArray.BLOCK_SIZE, 4 * RspArray.BLOCK_SIZE - 1), - () -> SingleRange.make(0, 9_999), - () -> SingleRange.make(4_000_000_000L, 4_000_009_999L) + () -> SingleRange.make(0, 9_999), () -> SingleRange.make(4_000_000_000L, 4_000_009_999L) , SortedRanges::makeEmpty, () -> SortedRanges.makeSingleElement(0), () -> SortedRanges.makeSingleElement(4_000_000_000L), - () -> SortedRanges.makeSingleRange(RspArray.BLOCK_SIZE, - 2 * RspArray.BLOCK_SIZE - 1), - () -> SortedRanges.makeSingleRange(RspArray.BLOCK_SIZE, - 2 * RspArray.BLOCK_SIZE - 100), - () -> SortedRanges.makeSingleRange(2 * RspArray.BLOCK_SIZE - 100, - 2 * RspArray.BLOCK_SIZE), - () -> SortedRanges.makeSingleRange(RspArray.BLOCK_SIZE, - 4 * RspArray.BLOCK_SIZE - 1), + () -> SortedRanges.makeSingleRange(RspArray.BLOCK_SIZE, 2 * RspArray.BLOCK_SIZE - 1), + () -> SortedRanges.makeSingleRange(RspArray.BLOCK_SIZE, 2 * RspArray.BLOCK_SIZE - 100), + () -> SortedRanges.makeSingleRange(2 * RspArray.BLOCK_SIZE - 100, 2 * RspArray.BLOCK_SIZE), + () -> SortedRanges.makeSingleRange(RspArray.BLOCK_SIZE, 4 * RspArray.BLOCK_SIZE - 1), () -> SortedRanges.makeSingleRange(0, 9_999), () -> SortedRanges.makeSingleRange(4_000_000_000L, 4_000_009_999L), () -> { - final TreeIndexImpl r = - SortedRanges.tryMakeForKnownRangeKnownCount(100, 10, 10_010); + final TreeIndexImpl r = SortedRanges.tryMakeForKnownRangeKnownCount(100, 10, 10_010); r.ixInsertRange(0, 100); r.ixInsert(256); r.ixInsertRange(1024, 9000); return r; }, () -> TreeIndexImpl.fromChunk(LongChunk.chunkWrap(new long[] {}), 0, 0, true), - () -> TreeIndexImpl.fromChunk(LongChunk.chunkWrap(new long[] {0, 1, 2, 3}), 0, 4, - true), - () -> TreeIndexImpl.fromChunk(LongChunk.chunkWrap(new long[] {0, 1, 2, 3, 4, 5, 6}), - 2, 3, true) + () -> TreeIndexImpl.fromChunk(LongChunk.chunkWrap(new long[] {0, 1, 2, 3}), 0, 4, true), + () -> TreeIndexImpl.fromChunk(LongChunk.chunkWrap(new long[] {0, 1, 2, 3, 4, 5, 6}), 2, 3, true) , RspBitmap::makeEmpty, () -> RspBitmap.makeSingleRange(0, 0), () -> RspBitmap.makeSingleRange(4_000_000_000L, 4_000_000_000L), () -> RspBitmap.makeSingleRange(RspArray.BLOCK_SIZE, 2 * RspArray.BLOCK_SIZE - 1), () -> RspBitmap.makeSingleRange(RspArray.BLOCK_SIZE, 2 * RspArray.BLOCK_SIZE - 100), - () -> RspBitmap.makeSingleRange(2 * RspArray.BLOCK_SIZE - 100, - 2 * RspArray.BLOCK_SIZE), + () -> RspBitmap.makeSingleRange(2 * RspArray.BLOCK_SIZE - 100, 2 * RspArray.BLOCK_SIZE), () -> RspBitmap.makeSingleRange(RspArray.BLOCK_SIZE, 4 * RspArray.BLOCK_SIZE - 1), () -> RspBitmap.makeEmpty().ixInsert(4_000_000_000L).ixInsert(4_000_000_002L), () -> RspBitmap.makeSingleRange(0, 9_999), @@ -996,10 +971,9 @@ public void testChunkInsertAndRemove() { }, () -> { final TreeIndexImpl r = RspBitmap.makeEmpty(); LongStream - .rangeClosed(RspArray.BLOCK_SIZE * 4, - RspArray.BLOCK_SIZE * 4 - + ArrayContainer.SWITCH_CONTAINER_CARDINALITY_THRESHOLD * 2) - .filter(l -> (l & 1) == 0).forEach(r::ixInsert); + .rangeClosed(RspArray.BLOCK_SIZE * 4, + RspArray.BLOCK_SIZE * 4 + ArrayContainer.SWITCH_CONTAINER_CARDINALITY_THRESHOLD * 2) + .filter(l -> (l & 1) == 0).forEach(r::ixInsert); return r; } }; @@ -1024,7 +998,7 @@ public void testChunkInsertAndRemove() { final Index actualAfterInsert2 = fromTreeIndexImpl(lhs.get()); try (final WritableLongChunk toBeSliced = - WritableLongChunk.makeWritableChunk(asKeyIndicesChunk.size() + 2048)) { + WritableLongChunk.makeWritableChunk(asKeyIndicesChunk.size() + 2048)) { toBeSliced.copyFromChunk(asKeyIndicesChunk, 0, 1024, asKeyIndicesChunk.size()); actualAfterInsert2.insert(toBeSliced, 1024, asKeyIndicesChunk.size()); } @@ -1042,7 +1016,7 @@ public void testChunkInsertAndRemove() { final Index actualAfterRemove2 = fromTreeIndexImpl(lhs.get()); try (final WritableLongChunk toBeSliced = - WritableLongChunk.makeWritableChunk(asKeyIndicesChunk.size() + 2048)) { + WritableLongChunk.makeWritableChunk(asKeyIndicesChunk.size() + 2048)) { toBeSliced.copyFromChunk(asKeyIndicesChunk, 0, 1024, asKeyIndicesChunk.size()); actualAfterRemove2.remove(toBeSliced, 1024, asKeyIndicesChunk.size()); } @@ -1058,8 +1032,7 @@ public void testChunkInsertAndRemove() { final Index rhsIndex = fromTreeIndexImpl(rhs.get()); lhsIndex.insert(rhsIndex); - lhsTreeIndexImpl.ixInsertSecondHalf(rhsIndex.asKeyIndicesChunk(), 0, - rhsIndex.intSize()); + lhsTreeIndexImpl.ixInsertSecondHalf(rhsIndex.asKeyIndicesChunk(), 0, rhsIndex.intSize()); assertEquals(lhsIndex, fromTreeIndexImpl(lhsTreeIndexImpl)); } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestColumnsToRowsTransform.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestColumnsToRowsTransform.java index 5f0328c175c..8c203138297 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestColumnsToRowsTransform.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestColumnsToRowsTransform.java @@ -18,33 +18,29 @@ public class TestColumnsToRowsTransform extends LiveTableTestCase { public void testStatic() { final Table in = TableTools.newTable(stringCol("Sym", "AAPL", "SPY"), intCol("Val1", 1, 2), - intCol("Val2", 3, 4), intCol("Val3", 5, 6)); + intCol("Val2", 3, 4), intCol("Val3", 5, 6)); final Table out = ColumnsToRowsTransform.columnsToRows(in, "Name", "Value", "Val1", "Val2"); TableTools.showWithIndex(out); - final Table ex1 = TableTools.newTable(stringCol("Sym", "AAPL", "AAPL", "SPY", "SPY"), - intCol("Val3", 5, 5, 6, 6), stringCol("Name", "Val1", "Val2", "Val1", "Val2"), - intCol("Value", 1, 3, 2, 4)); + final Table ex1 = + TableTools.newTable(stringCol("Sym", "AAPL", "AAPL", "SPY", "SPY"), intCol("Val3", 5, 5, 6, 6), + stringCol("Name", "Val1", "Val2", "Val1", "Val2"), intCol("Value", 1, 3, 2, 4)); assertTableEquals(ex1, out); - final Table out2 = - ColumnsToRowsTransform.columnsToRows(in, "Fribble", "Value", "Val1", "Val2", "Val3"); + final Table out2 = ColumnsToRowsTransform.columnsToRows(in, "Fribble", "Value", "Val1", "Val2", "Val3"); TableTools.showWithIndex(out2); - final Table ex2 = - TableTools.newTable(stringCol("Sym", "AAPL", "AAPL", "AAPL", "SPY", "SPY", "SPY"), + final Table ex2 = TableTools.newTable(stringCol("Sym", "AAPL", "AAPL", "AAPL", "SPY", "SPY", "SPY"), stringCol("Fribble", "Val1", "Val2", "Val3", "Val1", "Val2", "Val3"), intCol("Value", 1, 3, 5, 2, 4, 6)); assertTableEquals(ex2, out2); final Table out3 = ColumnsToRowsTransform.columnsToRows(in, "Label", "Value", - new String[] {"First", "Second", "Third"}, new String[] {"Val1", "Val2", "Val3"}); + new String[] {"First", "Second", "Third"}, new String[] {"Val1", "Val2", "Val3"}); TableTools.showWithIndex(out3); final int[] expected = {1, 3, 5, 2, 4, 6}; - final Table ex3 = - TableTools.newTable(stringCol("Sym", "AAPL", "AAPL", "AAPL", "SPY", "SPY", "SPY"), - stringCol("Label", "First", "Second", "Third", "First", "Second", "Third"), - intCol("Value", expected)); + final Table ex3 = TableTools.newTable(stringCol("Sym", "AAPL", "AAPL", "AAPL", "SPY", "SPY", "SPY"), + stringCol("Label", "First", "Second", "Third", "First", "Second", "Third"), intCol("Value", expected)); assertTableEquals(ex3, out3); final Iterator it = out3.columnIterator("Value"); final IntegerColumnIterator it2 = out3.integerColumnIterator("Value"); @@ -59,27 +55,25 @@ public void testStatic() { } assertEquals(expected.length, position); - final Table inMulti = TableTools.newTable(stringCol("Sym", "AAPL", "SPY"), - intCol("Val1", 1, 2), doubleCol("D1", 7.7, 8.8), doubleCol("D2", 9.9, 10.1), - intCol("Val2", 3, 4), intCol("Val3", 5, 6), doubleCol("D3", 11.11, 12.12)); + final Table inMulti = TableTools.newTable(stringCol("Sym", "AAPL", "SPY"), intCol("Val1", 1, 2), + doubleCol("D1", 7.7, 8.8), doubleCol("D2", 9.9, 10.1), intCol("Val2", 3, 4), intCol("Val3", 5, 6), + doubleCol("D3", 11.11, 12.12)); TableTools.show(inMulti); - final Table outMulti = - ColumnsToRowsTransform.columnsToRows(inMulti, "Name", new String[] {"IV", "DV"}, - new String[] {"Apple", "Banana", "Canteloupe"}, new String[][] { - new String[] {"Val1", "Val2", "Val3"}, new String[] {"D1", "D2", "D3"}}); + final Table outMulti = ColumnsToRowsTransform.columnsToRows(inMulti, "Name", new String[] {"IV", "DV"}, + new String[] {"Apple", "Banana", "Canteloupe"}, + new String[][] {new String[] {"Val1", "Val2", "Val3"}, new String[] {"D1", "D2", "D3"}}); TableTools.show(outMulti); - final Table expectMulti = TableTools.newTable( - col("Sym", "AAPL", "AAPL", "AAPL", "SPY", "SPY", "SPY"), - col("Name", "Apple", "Banana", "Canteloupe", "Apple", "Banana", "Canteloupe"), - intCol("IV", 1, 3, 5, 2, 4, 6), doubleCol("DV", 7.7, 9.9, 11.11, 8.8, 10.10, 12.12)); + final Table expectMulti = TableTools.newTable(col("Sym", "AAPL", "AAPL", "AAPL", "SPY", "SPY", "SPY"), + col("Name", "Apple", "Banana", "Canteloupe", "Apple", "Banana", "Canteloupe"), + intCol("IV", 1, 3, 5, 2, 4, 6), doubleCol("DV", 7.7, 9.9, 11.11, 8.8, 10.10, 12.12)); assertTableEquals(expectMulti, outMulti); } public void testBadSharedContext() { final Table in = TableTools.newTable(stringCol("Sym", "AAPL", "SPY", "TSLA", "VXX"), - intCol("Sentinel", 100, 101, 102, 103)); - final Table in2 = TableTools.newTable(stringCol("Sym", "VXX", "TSLA", "AAPL", "SPY"), - intCol("V1", 1, 2, 3, 4), intCol("V2", 5, 6, 7, 8)); + intCol("Sentinel", 100, 101, 102, 103)); + final Table in2 = TableTools.newTable(stringCol("Sym", "VXX", "TSLA", "AAPL", "SPY"), intCol("V1", 1, 2, 3, 4), + intCol("V2", 5, 6, 7, 8)); final Table joined = in.naturalJoin(in2, "Sym"); @@ -93,11 +87,9 @@ public void testBadSharedContext() { // noinspection unchecked final ColumnSource valueSource = filtered.getColumnSource("Value"); - try ( - final WritableIntChunk destination = - WritableIntChunk.makeWritableChunk(2); - final SharedContext sharedContext = SharedContext.makeSharedContext(); - final ChunkSource.FillContext f1 = valueSource.makeFillContext(2, sharedContext)) { + try (final WritableIntChunk destination = WritableIntChunk.makeWritableChunk(2); + final SharedContext sharedContext = SharedContext.makeSharedContext(); + final ChunkSource.FillContext f1 = valueSource.makeFillContext(2, sharedContext)) { valueSource.fillChunk(f1, destination, filtered.getIndex()); System.out.println(destination.get(0)); System.out.println(destination.get(1)); @@ -109,27 +101,24 @@ public void testBadSharedContext() { public void testTypeMismatch() { final Table in = TableTools.newTable(stringCol("Sym", "AAPL", "SPY"), intCol("Val1", 1, 2), - intCol("Val2", 3, 4), intCol("Val3", 5, 6), doubleCol("Val4", 7.0, 8.0)); + intCol("Val2", 3, 4), intCol("Val3", 5, 6), doubleCol("Val4", 7.0, 8.0)); try { ColumnsToRowsTransform.columnsToRows(in, "Name", "Value", "Val1", "Val2", "Val4"); TestCase.fail("Expected an exception for mismatched types."); } catch (IllegalArgumentException iae) { - TestCase.assertEquals("Incompatible transpose types Val1 is int, Val4 is double", - iae.getMessage()); + TestCase.assertEquals("Incompatible transpose types Val1 is int, Val4 is double", iae.getMessage()); } } public void testMisalignment() { final Table in = TableTools.newTable(stringCol("Sym", "AAPL", "SPY"), intCol("Val1", 1, 2), - intCol("Val2", 3, 4), intCol("Val3", 5, 6), doubleCol("Val4", 7.0, 8.0)); + intCol("Val2", 3, 4), intCol("Val3", 5, 6), doubleCol("Val4", 7.0, 8.0)); try { - ColumnsToRowsTransform.columnsToRows(in, "Name", new String[] {"Foo", "Bar"}, - new String[] {"A", "B"}, - new String[][] {new String[] {"Val1", "Val2"}, new String[] {"Val4"}}); + ColumnsToRowsTransform.columnsToRows(in, "Name", new String[] {"Foo", "Bar"}, new String[] {"A", "B"}, + new String[][] {new String[] {"Val1", "Val2"}, new String[] {"Val4"}}); TestCase.fail("Expected an exception for mismatched types."); } catch (IllegalArgumentException iae) { - TestCase.assertEquals("2 labels defined, but 1 transpose columns defined for Bar.", - iae.getMessage()); + TestCase.assertEquals("2 labels defined, but 1 transpose columns defined for Bar.", iae.getMessage()); } } @@ -144,17 +133,16 @@ private void testIncremental(int seed) { final TstUtils.ColumnInfo[] columnInfo; final int size = 30; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "D1", "D2", "D3", "I1", "I2", "I3", "I4", "I5"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), - new TstUtils.DoubleGenerator(0, 10), - new TstUtils.DoubleGenerator(10, 100), - new TstUtils.DoubleGenerator(100, 1000), - new TstUtils.IntGenerator(1000, 10000), - new TstUtils.IntGenerator(10000, 100000), - new TstUtils.IntGenerator(100000, 1000000), - new TstUtils.IntGenerator(1000000, 10000000), - new TstUtils.IntGenerator(10000000, 100000000))); + columnInfo = initColumnInfos(new String[] {"Sym", "D1", "D2", "D3", "I1", "I2", "I3", "I4", "I5"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), + new TstUtils.DoubleGenerator(0, 10), + new TstUtils.DoubleGenerator(10, 100), + new TstUtils.DoubleGenerator(100, 1000), + new TstUtils.IntGenerator(1000, 10000), + new TstUtils.IntGenerator(10000, 100000), + new TstUtils.IntGenerator(100000, 1000000), + new TstUtils.IntGenerator(1000000, 10000000), + new TstUtils.IntGenerator(10000000, 100000000))); final Map nameMap = new HashMap<>(); nameMap.put("I1", "EyeOne"); @@ -166,90 +154,76 @@ private void testIncremental(int seed) { QueryScope.addParam("nameMap", Collections.unmodifiableMap(nameMap)); final EvalNuggetInterface[] en = new EvalNuggetInterface[] { - EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", - "Value", "D1", "D2", "D3")), - EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", - "Value", "I1", "I2", "I3", "I4", "I5")), - EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", - "Value", "I1", "I2", "I3", "I4")), - EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", - "Value", "I1", "I2", "I3")), - EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", - "Value", "I1", "I2")), EvalNugget.from( - () -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1")), + () -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "D1", "D2", "D3")), + EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", + "I3", "I4", "I5")), + EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", + "I3", "I4")), + EvalNugget.from( + () -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3")), + EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2")), + EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1")), EvalNugget.from(() -> ColumnsToRowsTransform - .columnsToRows(queryTable.sort("I5"), "Name", "Value", "I1", "I2", "I3") - .where("Name in `I1`, `I3`")), + .columnsToRows(queryTable.sort("I5"), "Name", "Value", "I1", "I2", "I3") + .where("Name in `I1`, `I3`")), EvalNugget.from(() -> ColumnsToRowsTransform - .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") - .where("Name in `I1`, `I3`")), + .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3").where("Name in `I1`, `I3`")), EvalNugget.from(() -> ColumnsToRowsTransform - .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") - .where("Name in `I1`")), + .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3").where("Name in `I1`")), EvalNugget.from(() -> ColumnsToRowsTransform - .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") - .updateView("MappedVal=nameMap.get(Name)") - .where("MappedVal in `EyeOne` || Value > 50000")), + .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") + .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `EyeOne` || Value > 50000")), new QueryTableTestBase.TableComparator( - ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", - "I3"), - LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> queryTable - .update("Name=new String[]{`I1`, `I2`, `I3`}", - "Value=new int[]{I1, I2, I3}") - .dropColumns("I1", "I2", "I3").ungroup())), + ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3"), + LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> queryTable + .update("Name=new String[]{`I1`, `I2`, `I3`}", "Value=new int[]{I1, I2, I3}") + .dropColumns("I1", "I2", "I3").ungroup())), new QueryTableTestBase.TableComparator( - ColumnsToRowsTransform - .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") - .updateView("MappedVal=nameMap.get(Name)") - .where("MappedVal in `EyeOne` || Value > 50000"), - LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> queryTable - .update("Name=new String[]{`I1`, `I2`, `I3`}", - "Value=new int[]{I1, I2, I3}") - .dropColumns("I1", "I2", "I3").ungroup()) - .updateView("MappedVal=nameMap.get(Name)") - .where("MappedVal in `EyeOne` || Value > 50000")), + ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") + .updateView("MappedVal=nameMap.get(Name)") + .where("MappedVal in `EyeOne` || Value > 50000"), + LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> queryTable + .update("Name=new String[]{`I1`, `I2`, `I3`}", "Value=new int[]{I1, I2, I3}") + .dropColumns("I1", "I2", "I3").ungroup()) + .updateView("MappedVal=nameMap.get(Name)") + .where("MappedVal in `EyeOne` || Value > 50000")), new QueryTableTestBase.TableComparator( - ColumnsToRowsTransform - .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") - .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `EyeOne`"), - LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> queryTable - .update("Name=new String[]{`I1`, `I2`, `I3`}", - "Value=new int[]{I1, I2, I3}") - .dropColumns("I1", "I2", "I3").ungroup()) - .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `EyeOne`")), + ColumnsToRowsTransform.columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") + .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `EyeOne`"), + LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> queryTable + .update("Name=new String[]{`I1`, `I2`, `I3`}", "Value=new int[]{I1, I2, I3}") + .dropColumns("I1", "I2", "I3").ungroup()) + .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `EyeOne`")), EvalNugget.from(() -> ColumnsToRowsTransform - .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3") - .where("Value > 50000")), + .columnsToRows(queryTable, "Name", "Value", "I1", "I2", "I3").where("Value > 50000")), EvalNugget.from(() -> ColumnsToRowsTransform.columnsToRows(queryTable, "Name", - new String[] {"IV", "DV"}, new String[] {"First", "Second", "Third"}, - new String[][] {new String[] {"I1", "I2", "I3"}, - new String[] {"D1", "D2", "D3"}})), - new QueryTableTestBase.TableComparator( - ColumnsToRowsTransform.columnsToRows(queryTable, "Name", new String[] {"IV", "DV"}, new String[] {"First", "Second", "Third"}, - new String[][] {new String[] {"I1", "I2", "I3"}, - new String[] {"D1", "D2", "D3"}}), - LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> queryTable - .update("Name=new String[]{`First`, `Second`, `Third`}", - "IV=new int[]{I1, I2, I3}", "DV=new double[]{D1, D2, D3}") - .dropColumns("I1", "I2", "I3", "D1", "D2", "D3").ungroup())), + new String[][] {new String[] {"I1", "I2", "I3"}, new String[] {"D1", "D2", "D3"}})), + new QueryTableTestBase.TableComparator( + ColumnsToRowsTransform.columnsToRows(queryTable, "Name", new String[] {"IV", "DV"}, + new String[] {"First", "Second", "Third"}, + new String[][] {new String[] {"I1", "I2", "I3"}, new String[] {"D1", "D2", "D3"}}), + LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> queryTable + .update("Name=new String[]{`First`, `Second`, `Third`}", + "IV=new int[]{I1, I2, I3}", "DV=new double[]{D1, D2, D3}") + .dropColumns("I1", "I2", "I3", "D1", "D2", "D3").ungroup())), new QueryTableTestBase.TableComparator( - ColumnsToRowsTransform - .columnsToRows(queryTable, "Name", new String[] {"IV", "DV"}, - new String[] {"First", "Second", "Third"}, - new String[][] {new String[] {"I1", "I2", "I3"}, - new String[] {"D1", "D2", "D3"}}) - .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `AiTwo`"), - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> queryTable - .update("Name=new String[]{`First`, `Second`, `Third`}", - "IV=new int[]{I1, I2, I3}", "DV=new double[]{D1, D2, D3}") - .dropColumns("I1", "I2", "I3", "D1", "D2", "D3").ungroup() - .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `AiTwo`"))), + ColumnsToRowsTransform + .columnsToRows(queryTable, "Name", new String[] {"IV", "DV"}, + new String[] {"First", "Second", "Third"}, + new String[][] {new String[] {"I1", "I2", "I3"}, + new String[] {"D1", "D2", "D3"}}) + .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `AiTwo`"), + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> queryTable + .update("Name=new String[]{`First`, `Second`, `Third`}", "IV=new int[]{I1, I2, I3}", + "DV=new double[]{D1, D2, D3}") + .dropColumns("I1", "I2", "I3", "D1", "D2", "D3").ungroup() + .updateView("MappedVal=nameMap.get(Name)").where("MappedVal in `AiTwo`"))), }; for (int step = 0; step < 100; step++) { diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestDynamicTableWriter.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestDynamicTableWriter.java index eda17cbdf17..a686401ae9e 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestDynamicTableWriter.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestDynamicTableWriter.java @@ -35,11 +35,9 @@ public void tearDown() throws Exception { @Test public void testTypes() throws IOException { - final String[] names = - new String[] {"BC", "CC", "SC", "IC", "LC", "FC", "DC", "StrC", "BLC", "DTC", "BIC"}; - final Class[] types = - new Class[] {byte.class, char.class, short.class, int.class, long.class, float.class, - double.class, String.class, Boolean.class, DBDateTime.class, BigInteger.class}; + final String[] names = new String[] {"BC", "CC", "SC", "IC", "LC", "FC", "DC", "StrC", "BLC", "DTC", "BIC"}; + final Class[] types = new Class[] {byte.class, char.class, short.class, int.class, long.class, float.class, + double.class, String.class, Boolean.class, DBDateTime.class, BigInteger.class}; final DynamicTableWriter writer = new DynamicTableWriter(names, types); final LiveQueryTable result = writer.getTable(); @@ -52,23 +50,22 @@ public void testTypes() throws IOException { writer.getSetter("DC").setDouble(6.6); writer.getSetter("StrC", String.class).set("Seven"); writer.getSetter("BLC", Boolean.class).setBoolean(true); - writer.getSetter("DTC", DBDateTime.class) - .set(DBTimeUtils.convertDateTime("2020-09-16T07:55:00 NY")); + writer.getSetter("DTC", DBDateTime.class).set(DBTimeUtils.convertDateTime("2020-09-16T07:55:00 NY")); writer.getSetter("BIC", BigInteger.class).set(BigInteger.valueOf(8)); writer.writeRow(); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(result::refresh); final DynamicTable expected1 = newTable(byteCol("BC", (byte) 1), - charCol("CC", 'A'), - shortCol("SC", (short) 2), - intCol("IC", 3), - longCol("LC", 4), - floatCol("FC", 5.5f), - doubleCol("DC", 6.6), - stringCol("StrC", "Seven"), - col("BLC", true), - col("DTC", DBTimeUtils.convertDateTime("2020-09-16T07:55:00 NY")), - col("BIC", BigInteger.valueOf(8))); + charCol("CC", 'A'), + shortCol("SC", (short) 2), + intCol("IC", 3), + longCol("LC", 4), + floatCol("FC", 5.5f), + doubleCol("DC", 6.6), + stringCol("StrC", "Seven"), + col("BLC", true), + col("DTC", DBTimeUtils.convertDateTime("2020-09-16T07:55:00 NY")), + col("BIC", BigInteger.valueOf(8))); TstUtils.assertTableEquals(expected1, result); final Row row = writer.getRowWriter(); @@ -82,8 +79,7 @@ public void testTypes() throws IOException { row.getSetter("DC").setDouble(14.14); row.getSetter("StrC", String.class).set("Fifteen"); row.getSetter("BLC", Boolean.class).setBoolean(true); - row.getSetter("DTC", DBDateTime.class) - .set(DBTimeUtils.convertDateTime("2020-09-16T08:55:00 NY")); + row.getSetter("DTC", DBDateTime.class).set(DBTimeUtils.convertDateTime("2020-09-16T08:55:00 NY")); row.getSetter("BIC", BigInteger.class).set(BigInteger.valueOf(16)); row.setFlags(Row.Flags.StartTransaction); row.writeRow(); @@ -98,8 +94,7 @@ public void testTypes() throws IOException { row2.getSetter("DC").setDouble(22.22); row2.getSetter("StrC", String.class).set("Twenty Three"); row2.getSetter("BLC", Boolean.class).setBoolean(false); - row2.getSetter("DTC", DBDateTime.class) - .set(DBTimeUtils.convertDateTime("2020-09-16T09:55:00 NY")); + row2.getSetter("DTC", DBDateTime.class).set(DBTimeUtils.convertDateTime("2020-09-16T09:55:00 NY")); row2.getSetter("BIC", BigInteger.class).set(BigInteger.valueOf(24)); row2.setFlags(Row.Flags.StartTransaction); row2.writeRow(); @@ -117,8 +112,7 @@ public void testTypes() throws IOException { row3.getSetter("DC", double.class).set(30.30); row3.getSetter("StrC", String.class).set("Thirty One"); row3.getSetter("BLC", Boolean.class).set(null); - row3.getSetter("DTC", DBDateTime.class) - .set(DBTimeUtils.convertDateTime("2020-09-16T10:55:00 NY")); + row3.getSetter("DTC", DBDateTime.class).set(DBTimeUtils.convertDateTime("2020-09-16T10:55:00 NY")); row3.getSetter("BIC", BigInteger.class).set(BigInteger.valueOf(32)); row3.setFlags(Row.Flags.EndTransaction); row3.writeRow(); @@ -126,29 +120,27 @@ public void testTypes() throws IOException { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(result::refresh); final DynamicTable expected2 = newTable(byteCol("BC", (byte) 1, (byte) 17, (byte) 25), - charCol("CC", 'A', 'C', 'D'), - shortCol("SC", (short) 2, (short) 18, (short) 26), - intCol("IC", 3, 19, 27), - longCol("LC", 4, 20, 28), - floatCol("FC", 5.5f, 21.21f, 29.29f), - doubleCol("DC", 6.6, 22.22, 30.30), - stringCol("StrC", "Seven", "Twenty Three", "Thirty One"), - col("BLC", true, false, null), - col("DTC", DBTimeUtils.convertDateTime("2020-09-16T07:55:00 NY"), - DBTimeUtils.convertDateTime("2020-09-16T09:55:00 NY"), - DBTimeUtils.convertDateTime("2020-09-16T10:55:00 NY")), - col("BIC", BigInteger.valueOf(8), BigInteger.valueOf(24), BigInteger.valueOf(32))); + charCol("CC", 'A', 'C', 'D'), + shortCol("SC", (short) 2, (short) 18, (short) 26), + intCol("IC", 3, 19, 27), + longCol("LC", 4, 20, 28), + floatCol("FC", 5.5f, 21.21f, 29.29f), + doubleCol("DC", 6.6, 22.22, 30.30), + stringCol("StrC", "Seven", "Twenty Three", "Thirty One"), + col("BLC", true, false, null), + col("DTC", DBTimeUtils.convertDateTime("2020-09-16T07:55:00 NY"), + DBTimeUtils.convertDateTime("2020-09-16T09:55:00 NY"), + DBTimeUtils.convertDateTime("2020-09-16T10:55:00 NY")), + col("BIC", BigInteger.valueOf(8), BigInteger.valueOf(24), BigInteger.valueOf(32))); TstUtils.assertTableEquals(expected2, result); } @Test public void testNulls() throws IOException { - final String[] names = - new String[] {"BC", "CC", "SC", "IC", "LC", "FC", "DC", "StrC", "BLC", "DTC", "BIC"}; - final Class[] types = - new Class[] {byte.class, char.class, short.class, int.class, long.class, float.class, - double.class, String.class, Boolean.class, DBDateTime.class, BigInteger.class}; + final String[] names = new String[] {"BC", "CC", "SC", "IC", "LC", "FC", "DC", "StrC", "BLC", "DTC", "BIC"}; + final Class[] types = new Class[] {byte.class, char.class, short.class, int.class, long.class, float.class, + double.class, String.class, Boolean.class, DBDateTime.class, BigInteger.class}; final DynamicTableWriter writer = new DynamicTableWriter(names, types); final LiveQueryTable result = writer.getTable(); @@ -162,14 +154,14 @@ public void testNulls() throws IOException { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(result::refresh); final Table expected1 = newTable(byteCol("BC", (byte) 1), - charCol("CC", 'A'), - shortCol("SC", (short) 2), - intCol("IC", 3), - longCol("LC", 4), - floatCol("FC", 5.5f), - doubleCol("DC", QueryConstants.NULL_DOUBLE)) - .updateView("StrC=(String)null", "BLC=(Boolean)null", "DTC=(DBDateTime)null", - "BIC=(java.math.BigInteger)null"); + charCol("CC", 'A'), + shortCol("SC", (short) 2), + intCol("IC", 3), + longCol("LC", 4), + floatCol("FC", 5.5f), + doubleCol("DC", QueryConstants.NULL_DOUBLE)) + .updateView("StrC=(String)null", "BLC=(Boolean)null", "DTC=(DBDateTime)null", + "BIC=(java.math.BigInteger)null"); TstUtils.assertTableEquals(expected1, result); final Row row = writer.getRowWriter(); @@ -177,8 +169,7 @@ public void testNulls() throws IOException { row.getSetter("DC").setDouble(14.14); row.getSetter("StrC", String.class).set("Fifteen"); row.getSetter("BLC", Boolean.class).setBoolean(true); - row.getSetter("DTC", DBDateTime.class) - .set(DBTimeUtils.convertDateTime("2020-09-16T08:55:00 NY")); + row.getSetter("DTC", DBDateTime.class).set(DBTimeUtils.convertDateTime("2020-09-16T08:55:00 NY")); row.getSetter("BIC", BigInteger.class).set(BigInteger.valueOf(16)); row.setFlags(Row.Flags.SingleRow); row.writeRow(); @@ -186,16 +177,16 @@ public void testNulls() throws IOException { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(result::refresh); final DynamicTable expected2 = newTable(byteCol("BC", QueryConstants.NULL_BYTE), - charCol("CC", QueryConstants.NULL_CHAR), - shortCol("SC", QueryConstants.NULL_SHORT), - intCol("IC", QueryConstants.NULL_INT), - longCol("LC", QueryConstants.NULL_LONG), - floatCol("FC", QueryConstants.NULL_FLOAT), - doubleCol("DC", 14.14), - stringCol("StrC", "Fifteen"), - col("BLC", true), - col("DTC", DBTimeUtils.convertDateTime("2020-09-16T08:55:00 NY")), - col("BIC", BigInteger.valueOf(16))); + charCol("CC", QueryConstants.NULL_CHAR), + shortCol("SC", QueryConstants.NULL_SHORT), + intCol("IC", QueryConstants.NULL_INT), + longCol("LC", QueryConstants.NULL_LONG), + floatCol("FC", QueryConstants.NULL_FLOAT), + doubleCol("DC", 14.14), + stringCol("StrC", "Fifteen"), + col("BLC", true), + col("DTC", DBTimeUtils.convertDateTime("2020-09-16T08:55:00 NY")), + col("BIC", BigInteger.valueOf(16))); TstUtils.assertTableEquals(merge(expected1, expected2), result); } @@ -206,15 +197,14 @@ public void testTransactions() throws IOException { final Class[] columnTypes = new Class[] {String.class, int.class}; final DynamicTableWriter writer = new DynamicTableWriter(columnNames, columnTypes); final LiveQueryTable result = writer.getTable(); - TstUtils.assertTableEquals( - TableTools.newTable(TableTools.stringCol("A"), TableTools.intCol("B")), result); + TstUtils.assertTableEquals(TableTools.newTable(TableTools.stringCol("A"), TableTools.intCol("B")), result); addRow(writer, Row.Flags.SingleRow, "Fred", 1); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(result::refresh); final DynamicTable lonelyFred = - TableTools.newTable(TableTools.stringCol("A", "Fred"), TableTools.intCol("B", 1)); + TableTools.newTable(TableTools.stringCol("A", "Fred"), TableTools.intCol("B", 1)); TstUtils.assertTableEquals(lonelyFred, result); addRow(writer, Row.Flags.StartTransaction, "Barney", 2); @@ -229,9 +219,8 @@ public void testTransactions() throws IOException { TstUtils.assertTableEquals(lonelyFred, result); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(result::refresh); - final DynamicTable withRubbles = - TableTools.newTable(TableTools.stringCol("A", "Fred", "Barney", "Betty", "Bam-Bam"), - TableTools.intCol("B", 1, 2, 3, 4)); + final DynamicTable withRubbles = TableTools.newTable( + TableTools.stringCol("A", "Fred", "Barney", "Betty", "Bam-Bam"), TableTools.intCol("B", 1, 2, 3, 4)); TstUtils.assertTableEquals(withRubbles, result); addRow(writer, Row.Flags.StartTransaction, "Wilma", 5); @@ -244,14 +233,14 @@ public void testTransactions() throws IOException { addRow(writer, Row.Flags.EndTransaction, "Wilma", 7); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(result::refresh); - final DynamicTable allTogether = TableTools.newTable( - TableTools.stringCol("A", "Fred", "Barney", "Betty", "Bam-Bam", "Pebbles", "Wilma"), - TableTools.intCol("B", 1, 2, 3, 4, 6, 7)); + final DynamicTable allTogether = + TableTools.newTable(TableTools.stringCol("A", "Fred", "Barney", "Betty", "Bam-Bam", "Pebbles", "Wilma"), + TableTools.intCol("B", 1, 2, 3, 4, 6, 7)); TstUtils.assertTableEquals(allTogether, result); } private void addRow(DynamicTableWriter writer, Row.Flags startTransaction, String barney, int i) - throws IOException { + throws IOException { final Row rw = writer.getRowWriter(); rw.setFlags(startTransaction); rw.getSetter("A", String.class).set(barney); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestFreezeBy.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestFreezeBy.java index 9fd0fbd99f7..85d42a3e88c 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestFreezeBy.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestFreezeBy.java @@ -21,13 +21,13 @@ public class TestFreezeBy extends LiveTableTestCase { public void testSimpleTypes() { final DBDateTime timeBase = DBTimeUtils.convertDateTime("2020-09-10T09:00:00 NY"); QueryScope.addParam("freezeByTimeBase", timeBase); - final QueryTable input = TstUtils.testRefreshingTable(stringCol("Key", "A", "B", "C"), - intCol("Sentinel", 1, 2, 3)); - final List updates = Arrays.asList("SStr=Integer.toString(Sentinel)", - "SByte=(byte)Sentinel", "SChar=(char)('A' + (char)Sentinel)", "SShort=(short)Sentinel", - "SLong=(long)Sentinel", "SDouble=Sentinel/4", "SFloat=(float)(Sentinel/2)", - "SDateTime=freezeByTimeBase + (Sentinel * 3600L*1000000000L)", - "SBoolean=Sentinel%3==0?true:(Sentinel%3==1?false:null)"); + final QueryTable input = + TstUtils.testRefreshingTable(stringCol("Key", "A", "B", "C"), intCol("Sentinel", 1, 2, 3)); + final List updates = Arrays.asList("SStr=Integer.toString(Sentinel)", "SByte=(byte)Sentinel", + "SChar=(char)('A' + (char)Sentinel)", "SShort=(short)Sentinel", "SLong=(long)Sentinel", + "SDouble=Sentinel/4", "SFloat=(float)(Sentinel/2)", + "SDateTime=freezeByTimeBase + (Sentinel * 3600L*1000000000L)", + "SBoolean=Sentinel%3==0?true:(Sentinel%3==1?false:null)"); final Table inputUpdated = input.updateView(Selectable.from(updates)); final Table frozen = FreezeBy.freezeBy(inputUpdated, "Key"); TableTools.showWithIndex(frozen); @@ -56,37 +56,33 @@ public void testSimpleTypes() { TableTools.showWithIndex(frozen); assertTableEquals(TableTools.newTable(stringCol("Key", "B", "C"), intCol("Sentinel", 2, 3)) - .updateView(Selectable.from(updates)), frozen); + .updateView(Selectable.from(updates)), frozen); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(input, i(3, 4), stringCol("Key", "D", "A"), - intCol("Sentinel", 5, 6)); + TstUtils.addToTable(input, i(3, 4), stringCol("Key", "D", "A"), intCol("Sentinel", 5, 6)); input.notifyListeners(i(3, 4), i(), i()); }); TableTools.showWithIndex(frozen); - assertTableEquals(TableTools - .newTable(stringCol("Key", "A", "B", "C", "D"), intCol("Sentinel", 6, 2, 3, 5)) - .updateView(Selectable.from(updates)), frozen); + assertTableEquals(TableTools.newTable(stringCol("Key", "A", "B", "C", "D"), intCol("Sentinel", 6, 2, 3, 5)) + .updateView(Selectable.from(updates)), frozen); // swap two keys LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(input, i(3, 4), stringCol("Key", "A", "D"), - intCol("Sentinel", 7, 8)); + TstUtils.addToTable(input, i(3, 4), stringCol("Key", "A", "D"), intCol("Sentinel", 7, 8)); input.notifyListeners(i(), i(), i(4, 3)); }); TableTools.showWithIndex(frozen); - assertTableEquals(TableTools - .newTable(stringCol("Key", "A", "B", "C", "D"), intCol("Sentinel", 6, 2, 3, 5)) - .updateView(Selectable.from(updates)), frozen); + assertTableEquals(TableTools.newTable(stringCol("Key", "A", "B", "C", "D"), intCol("Sentinel", 6, 2, 3, 5)) + .updateView(Selectable.from(updates)), frozen); QueryScope.addParam("freezeByTimeBase", null); } public void testCompositeKeys() { final QueryTable input = TstUtils.testRefreshingTable(stringCol("Key", "A", "A", "C"), - intCol("Key2", 101, 102, 103), intCol("Sentinel", 1, 2, 3)); + intCol("Key2", 101, 102, 103), intCol("Sentinel", 1, 2, 3)); final Table frozen = FreezeBy.freezeBy(input, "Key", "Key2"); TableTools.showWithIndex(frozen); @@ -98,24 +94,23 @@ public void testCompositeKeys() { // swap two keys LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(input, i(0, 4), stringCol("Key", "A", "D"), - intCol("Key2", 101, 101), intCol("Sentinel", 4, 5)); + TstUtils.addToTable(input, i(0, 4), stringCol("Key", "A", "D"), intCol("Key2", 101, 101), + intCol("Sentinel", 4, 5)); input.notifyListeners(i(4), i(), i(0)); }); TableTools.showWithIndex(frozen); - assertTableEquals(TableTools.newTable(stringCol("Key", "A", "A", "C", "D"), - intCol("Key2", 101, 102, 103, 101), intCol("Sentinel", 1, 2, 3, 5)), frozen); + assertTableEquals(TableTools.newTable(stringCol("Key", "A", "A", "C", "D"), intCol("Key2", 101, 102, 103, 101), + intCol("Sentinel", 1, 2, 3, 5)), frozen); } public void testNoKeys() { - final QueryTable input = - TstUtils.testRefreshingTable(stringCol("Key", "A"), intCol("Sentinel", 1)); + final QueryTable input = TstUtils.testRefreshingTable(stringCol("Key", "A"), intCol("Sentinel", 1)); final Table frozen = FreezeBy.freezeBy(input); TableTools.showWithIndex(frozen); - final Table originalExpect = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.emptyTable(1).snapshot(input)); + final Table originalExpect = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> TableTools.emptyTable(1).snapshot(input)); assertTableEquals(input, originalExpect); final TableUpdateValidator tuv = TableUpdateValidator.make("frozen", (QueryTable) frozen); @@ -150,8 +145,8 @@ public void testNoKeys() { input.notifyListeners(i(2), i(), i()); }); TableTools.showWithIndex(frozen); - final Table newExpect = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.emptyTable(1).snapshot(input)); + final Table newExpect = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> TableTools.emptyTable(1).snapshot(input)); assertTableEquals(input, newExpect); assertTableEquals(newExpect, frozen); @@ -170,8 +165,8 @@ public void testNoKeys() { } public void testDuplicates() { - final QueryTable input = TstUtils.testRefreshingTable(stringCol("Key", "A", "B", "C"), - intCol("Sentinel", 1, 2, 3)); + final QueryTable input = + TstUtils.testRefreshingTable(stringCol("Key", "A", "B", "C"), intCol("Sentinel", 1, 2, 3)); try { FreezeBy.freezeBy(input); TestCase.fail("Expected exception."); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestFunctionBackedTableFactory.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestFunctionBackedTableFactory.java index d4f0862862c..d0f2cda5cef 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestFunctionBackedTableFactory.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestFunctionBackedTableFactory.java @@ -22,24 +22,22 @@ public void testIterative() { TstUtils.ColumnInfo columnInfo[]; int size = 50; final QueryTable queryTable = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); + columnInfo = initColumnInfos(new String[] {"Sym", "intCol", "doubleCol"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d", "e"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1))); - final Table functionBacked = - FunctionGeneratedTableFactory.create(() -> queryTable, queryTable); + final Table functionBacked = FunctionGeneratedTableFactory.create(() -> queryTable, queryTable); - final String diff = - diff(functionBacked, queryTable, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); + final String diff = diff(functionBacked, queryTable, 10, EnumSet.of(TableDiff.DiffItems.DoublesExact)); Assert.assertEquals("", diff); EvalNuggetInterface[] en = new EvalNuggetInterface[] { new QueryTableTest.TableComparator(functionBacked, queryTable), - // Note: disable update validation since the function backed table's prev values - // will always be incorrect + // Note: disable update validation since the function backed table's prev values will always be + // incorrect EvalNugget.from(() -> LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> functionBacked.update("Mult=intCol * doubleCol"))), + .computeLocked(() -> functionBacked.update("Mult=intCol * doubleCol"))), }; for (int i = 0; i < 75; i++) { diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestHashSetBackedTableFactory.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestHashSetBackedTableFactory.java index 023b71959a9..d805ac87661 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestHashSetBackedTableFactory.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestHashSetBackedTableFactory.java @@ -54,7 +54,7 @@ public void testIterative() { new EvalNugget() { public Table e() { return LiveTableMonitor.DEFAULT.exclusiveLock() - .computeLocked(() -> result.update("Arg0=Arg.substring(0, 1)")); + .computeLocked(() -> result.update("Arg0=Arg.substring(0, 1)")); } }, new UpdateValidatorNugget(result), @@ -101,8 +101,7 @@ private HashSet tableToSet(Table result) { final Map map = queryTable.getColumnSourceMap(); // noinspection unchecked - final ColumnSource[] columnSources = - (ColumnSource[]) new ColumnSource[map.size()]; + final ColumnSource[] columnSources = (ColumnSource[]) new ColumnSource[map.size()]; int ii = 0; for (ColumnSource cs : map.values()) { // noinspection unchecked diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestIncrementalReleaseFilter.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestIncrementalReleaseFilter.java index 03b7a032844..4c0ded8dd23 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestIncrementalReleaseFilter.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestIncrementalReleaseFilter.java @@ -22,12 +22,10 @@ public class TestIncrementalReleaseFilter extends LiveTableTestCase { public void testSimple() { - final Table source = - TableTools.newTable(TableTools.intCol("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); + final Table source = TableTools.newTable(TableTools.intCol("Sentinel", 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); TableTools.show(source); - final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(2, 1); + final IncrementalReleaseFilter incrementalReleaseFilter = new IncrementalReleaseFilter(2, 1); final Table filtered = source.where(incrementalReleaseFilter); TableTools.show(filtered); @@ -43,13 +41,11 @@ public void testSimple() { public void testBigTable() { final Table sourcePart = TableTools.emptyTable(1_000_000_000L); - final List
    sourceParts = - IntStream.range(0, 20).mapToObj(x -> sourcePart).collect(Collectors.toList()); + final List
    sourceParts = IntStream.range(0, 20).mapToObj(x -> sourcePart).collect(Collectors.toList()); final Table source = TableTools.merge(sourceParts); TableTools.show(source); - final IncrementalReleaseFilter incrementalReleaseFilter = - new IncrementalReleaseFilter(2, 10_000_000); + final IncrementalReleaseFilter incrementalReleaseFilter = new IncrementalReleaseFilter(2, 10_000_000); final Table filtered = source.where(incrementalReleaseFilter); final Table flattened = filtered.flatten(); @@ -95,12 +91,10 @@ public void testAutoTune2() { TableTools.show(source); final AutoTuningIncrementalReleaseFilter incrementalReleaseFilter = - new AutoTuningIncrementalReleaseFilter(0, 100, 1.1, true, - new ClockTimeProvider(new RealTimeClock())); + new AutoTuningIncrementalReleaseFilter(0, 100, 1.1, true, new ClockTimeProvider(new RealTimeClock())); final Table filtered = source.where(incrementalReleaseFilter); - final Table updated = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> filtered.update("I=ii")); + final Table updated = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> filtered.update("I=ii")); while (filtered.size() < source.size()) { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(incrementalReleaseFilter::refresh); @@ -115,13 +109,11 @@ private int testAutoTuneCycle(int cycleTime) { TableTools.show(source); final AutoTuningIncrementalReleaseFilter incrementalReleaseFilter = - new AutoTuningIncrementalReleaseFilter(0, 100, 1.1, true, - new ClockTimeProvider(new RealTimeClock())); + new AutoTuningIncrementalReleaseFilter(0, 100, 1.1, true, new ClockTimeProvider(new RealTimeClock())); final Table filtered = source.where(incrementalReleaseFilter); - final Table updated = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> filtered.update( - "I=io.deephaven.db.v2.utils.TestIncrementalReleaseFilter.sleepValue(100000, ii)")); + final Table updated = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> filtered + .update("I=io.deephaven.db.v2.utils.TestIncrementalReleaseFilter.sleepValue(100000, ii)")); int cycles = 0; while (filtered.size() < source.size()) { diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestKeyedArrayBackedMutableTable.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestKeyedArrayBackedMutableTable.java index 84e7bcb679f..39fe6b34777 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestKeyedArrayBackedMutableTable.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestKeyedArrayBackedMutableTable.java @@ -44,7 +44,7 @@ public void after() throws Exception { @Test public void testSimple() throws Exception { final Table input = TableTools.newTable(stringCol("Name", "Fred", "George", "Earl"), - stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso")); + stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso")); final KeyedArrayBackedMutableTable kabut = KeyedArrayBackedMutableTable.make(input, "Name"); final TableUpdateValidator validator = TableUpdateValidator.make("kabut", kabut); @@ -54,31 +54,27 @@ public void testSimple() throws Exception { assertTableEquals(input, kabut); - final MutableInputTable mutableInputTable = - (MutableInputTable) kabut.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); + final MutableInputTable mutableInputTable = (MutableInputTable) kabut.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); TestCase.assertNotNull(mutableInputTable); - final Table input2 = - TableTools.newTable(stringCol("Name", "Randy"), stringCol("Employer", "USGS")); + final Table input2 = TableTools.newTable(stringCol("Name", "Randy"), stringCol("Employer", "USGS")); handleDelayedRefresh(kabut, () -> mutableInputTable.add(input2)); assertTableEquals(TableTools.merge(input, input2), kabut); - final Table input3 = - TableTools.newTable(stringCol("Name", "Randy"), stringCol("Employer", "Tegridy")); + final Table input3 = TableTools.newTable(stringCol("Name", "Randy"), stringCol("Employer", "Tegridy")); handleDelayedRefresh(kabut, () -> mutableInputTable.add(input3)); assertTableEquals(TableTools.merge(input, input3), kabut); - final Table input4 = - TableTools.newTable(stringCol("Name", "George"), stringCol("Employer", "Cogswell")); + final Table input4 = TableTools.newTable(stringCol("Name", "George"), stringCol("Employer", "Cogswell")); handleDelayedRefresh(kabut, () -> mutableInputTable.add(input4)); TableTools.showWithIndex(kabut); assertTableEquals(TableTools.merge(input, input3, input4).lastBy("Name"), kabut); - final Table input5 = TableTools.newTable(stringCol("Name", "George"), - stringCol("Employer", "Spacely Sprockets")); + final Table input5 = + TableTools.newTable(stringCol("Name", "George"), stringCol("Employer", "Spacely Sprockets")); handleDelayedRefresh(kabut, () -> mutableInputTable.add(input5)); TableTools.showWithIndex(kabut); @@ -95,9 +91,9 @@ public void testSimple() throws Exception { TableTools.showWithIndex(kabut); final Table expected = TableTools.merge( - TableTools.merge(input, input3, input4, input5).update("Deleted=false"), - delete1.update("Employer=(String)null", "Deleted=true")) - .lastBy("Name").where("Deleted=false").dropColumns("Deleted"); + TableTools.merge(input, input3, input4, input5).update("Deleted=false"), + delete1.update("Employer=(String)null", "Deleted=true")) + .lastBy("Name").where("Deleted=false").dropColumns("Deleted"); TableTools.showWithIndex(expected); assertTableEquals(expected, kabut); @@ -106,10 +102,9 @@ public void testSimple() throws Exception { @Test public void testAppendOnly() throws Exception { final Table input = TableTools.newTable(stringCol("Name", "Fred", "George", "Earl"), - stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso")); + stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso")); - final AppendOnlyArrayBackedMutableTable aoabmt = - AppendOnlyArrayBackedMutableTable.make(input); + final AppendOnlyArrayBackedMutableTable aoabmt = AppendOnlyArrayBackedMutableTable.make(input); final TableUpdateValidator validator = TableUpdateValidator.make("aoabmt", aoabmt); final DynamicTable validatorResult = validator.getResultTable(); final FailureListener failureListener = new FailureListener(); @@ -118,11 +113,11 @@ public void testAppendOnly() throws Exception { assertTableEquals(input, aoabmt); final MutableInputTable mutableInputTable = - (MutableInputTable) aoabmt.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); + (MutableInputTable) aoabmt.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); TestCase.assertNotNull(mutableInputTable); - final Table input2 = TableTools.newTable(stringCol("Name", "Randy", "George"), - stringCol("Employer", "USGS", "Cogswell")); + final Table input2 = + TableTools.newTable(stringCol("Name", "Randy", "George"), stringCol("Employer", "USGS", "Cogswell")); handleDelayedRefresh(aoabmt, () -> mutableInputTable.add(input2)); assertTableEquals(TableTools.merge(input, input2), aoabmt); @@ -131,7 +126,7 @@ public void testAppendOnly() throws Exception { @Test public void testFilteredAndSorted() throws Exception { final Table input = TableTools.newTable(stringCol("Name", "Fred", "George", "Earl"), - stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso")); + stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso")); final KeyedArrayBackedMutableTable kabut = KeyedArrayBackedMutableTable.make(input, "Name"); final TableUpdateValidator validator = TableUpdateValidator.make("kabut", kabut); @@ -143,8 +138,7 @@ public void testFilteredAndSorted() throws Exception { final Table fs = kabut.where("Name.length() == 4").sort("Name"); - final MutableInputTable mutableInputTable = - (MutableInputTable) fs.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); + final MutableInputTable mutableInputTable = (MutableInputTable) fs.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); TestCase.assertNotNull(mutableInputTable); final Table delete = TableTools.newTable(stringCol("Name", "Fred")); @@ -156,7 +150,7 @@ public void testFilteredAndSorted() throws Exception { @Test public void testAddRows() throws Throwable { final Table input = TableTools.newTable(stringCol("Name", "Fred", "George", "Earl"), - stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso")); + stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso")); final KeyedArrayBackedMutableTable kabut = KeyedArrayBackedMutableTable.make(input, "Name"); final TableUpdateValidator validator = TableUpdateValidator.make("kabut", kabut); @@ -166,15 +160,13 @@ public void testAddRows() throws Throwable { assertTableEquals(input, kabut); - final MutableInputTable mutableInputTable = - (MutableInputTable) kabut.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); + final MutableInputTable mutableInputTable = (MutableInputTable) kabut.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); TestCase.assertNotNull(mutableInputTable); - final Table input2 = - TableTools.newTable(stringCol("Name", "Randy"), stringCol("Employer", "USGS")); + final Table input2 = TableTools.newTable(stringCol("Name", "Randy"), stringCol("Employer", "USGS")); - final Map randyMap = CollectionUtil.mapFromArray(String.class, Object.class, - "Name", "Randy", "Employer", "USGS"); + final Map randyMap = + CollectionUtil.mapFromArray(String.class, Object.class, "Name", "Randy", "Employer", "USGS"); final TestStatusListener listener = new TestStatusListener(); mutableInputTable.addRow(randyMap, true, listener); SleepUtil.sleep(100); @@ -184,12 +176,11 @@ public void testAddRows() throws Throwable { listener.waitForCompletion(); listener.assertSuccess(); - // TODO: should we throw the exception from the initial palce, should we defer edit checking - // to the LTM which + // TODO: should we throw the exception from the initial palce, should we defer edit checking to the LTM which // would make it consistent, but also slower to produce errors and uglier for reporting? final TestStatusListener listener2 = new TestStatusListener(); - final Map randyMap2 = CollectionUtil.mapFromArray(String.class, - Object.class, "Name", "Randy", "Employer", "Tegridy"); + final Map randyMap2 = + CollectionUtil.mapFromArray(String.class, Object.class, "Name", "Randy", "Employer", "Tegridy"); mutableInputTable.addRow(randyMap2, false, listener2); SleepUtil.sleep(100); listener2.assertIncomplete(); @@ -211,12 +202,11 @@ public void testAddBack() throws Exception { assertTableEquals(input, kabut); - final MutableInputTable mutableInputTable = - (MutableInputTable) kabut.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); + final MutableInputTable mutableInputTable = (MutableInputTable) kabut.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); TestCase.assertNotNull(mutableInputTable); - final Table input2 = TableTools.newTable(stringCol("Name", "George"), - stringCol("Employer", "Spacely Sprockets")); + final Table input2 = + TableTools.newTable(stringCol("Name", "George"), stringCol("Employer", "Spacely Sprockets")); handleDelayedRefresh(kabut, () -> mutableInputTable.add(input2)); assertTableEquals(input2, kabut); @@ -231,8 +221,8 @@ public void testAddBack() throws Exception { @Test public void testSetRows() { final Table input = TableTools.newTable(stringCol("Name", "Fred", "George", "Earl"), - stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso"), - stringCol("Spouse", "Wilma", "Jane", "Fran")); + stringCol("Employer", "Slate Rock and Gravel", "Spacely Sprockets", "Wesayso"), + stringCol("Spouse", "Wilma", "Jane", "Fran")); final KeyedArrayBackedMutableTable kabut = KeyedArrayBackedMutableTable.make(input, "Name"); final TableUpdateValidator validator = TableUpdateValidator.make("kabut", kabut); @@ -242,16 +232,15 @@ public void testSetRows() { assertTableEquals(input, kabut); - final MutableInputTable mutableInputTable = - (MutableInputTable) kabut.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); + final MutableInputTable mutableInputTable = (MutableInputTable) kabut.getAttribute(Table.INPUT_TABLE_ATTRIBUTE); TestCase.assertNotNull(mutableInputTable); final Table defaultValues = input.where("Name=`George`"); - final Table ex2 = TableTools.newTable(stringCol("Name", "George"), - stringCol("Employer", "Cogswell"), stringCol("Spouse", "Jane")); + final Table ex2 = TableTools.newTable(stringCol("Name", "George"), stringCol("Employer", "Cogswell"), + stringCol("Spouse", "Jane")); - final Map cogMap = CollectionUtil.mapFromArray(String.class, Object.class, - "Name", "George", "Employer", "Cogswell"); + final Map cogMap = + CollectionUtil.mapFromArray(String.class, Object.class, "Name", "George", "Employer", "Cogswell"); mutableInputTable.setRow(defaultValues, 0, cogMap); SleepUtil.sleep(100); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(kabut::refresh); @@ -299,9 +288,8 @@ private synchronized void assertSuccess() throws Throwable { } } - private synchronized void assertFailure( - @NotNull final Class errorClass, - @Nullable final String errorMessage) { + private synchronized void assertFailure(@NotNull final Class errorClass, + @Nullable final String errorMessage) { TestCase.assertFalse(success); TestCase.assertNotNull(error); TestCase.assertTrue(errorClass.isAssignableFrom(error.getClass())); @@ -312,7 +300,7 @@ private synchronized void assertFailure( } private void handleDelayedRefresh(final BaseArrayBackedMutableTable table, - final FunctionalInterfaces.ThrowingRunnable action) throws Exception { + final FunctionalInterfaces.ThrowingRunnable action) throws Exception { final Thread refreshThread; final CountDownLatch gate = new CountDownLatch(1); @@ -323,10 +311,8 @@ private void handleDelayedRefresh(final BaseArrayBackedMutableTable table, try { gate.await(); } catch (InterruptedException ignored) { - // If this unexpected interruption happens, the test thread may hang in - // action.run() - // indefinitely. Best to hope it's already queued the pending action and - // proceed with refresh. + // If this unexpected interruption happens, the test thread may hang in action.run() + // indefinitely. Best to hope it's already queued the pending action and proceed with refresh. } table.refresh(); }); @@ -341,7 +327,7 @@ private void handleDelayedRefresh(final BaseArrayBackedMutableTable table, refreshThread.join(); } catch (InterruptedException e) { throw new UncheckedDeephavenException( - "Interrupted unexpectedly while waiting for refresh cycle to complete", e); + "Interrupted unexpectedly while waiting for refresh cycle to complete", e); } } } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestReadOnlyRedirectedColumnSource.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestReadOnlyRedirectedColumnSource.java index 69d361d7b13..485f2bbe07c 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestReadOnlyRedirectedColumnSource.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestReadOnlyRedirectedColumnSource.java @@ -60,35 +60,34 @@ private Table makeTable() { is[i] = v; } final Table t = new InMemoryTable( - new String[] {"StringsCol", "IntsCol"}, - new Object[] {strs, is}); + new String[] {"StringsCol", "IntsCol"}, + new Object[] {strs, is}); ((DynamicNode) t).setRefreshing(true); return t; } private void doCheck( - final ColumnSource cs, - final OrderedKeys oks, - final WritableObjectChunk chunk, - final long offset) { + final ColumnSource cs, + final OrderedKeys oks, + final WritableObjectChunk chunk, + final long offset) { final MutableLong pos = new MutableLong(); oks.forAllLongs(k -> { final String s = (String) cs.get(k); - assertEquals("offset=" + (pos.intValue() + offset) + ", k=" + k, s, - chunk.get(pos.intValue())); + assertEquals("offset=" + (pos.intValue() + offset) + ", k=" + k, s, chunk.get(pos.intValue())); pos.increment(); }); } private Table doFillAndCheck( - final Table t, - final String col, - final WritableObjectChunk chunk, - final int sz) { + final Table t, + final String col, + final WritableObjectChunk chunk, + final int sz) { final ColumnSource cs = t.getColumnSource(col); final Index ix = t.getIndex(); try (final ColumnSource.FillContext fc = cs.makeFillContext(sz); - final OrderedKeys.Iterator it = ix.getOrderedKeysIterator()) { + final OrderedKeys.Iterator it = ix.getOrderedKeysIterator()) { long offset = 0; while (it.hasMore()) { final OrderedKeys oks = it.getNextOrderedKeysWithLength(sz); @@ -108,8 +107,7 @@ public void testFillChunk() { final IncrementalReleaseFilter incFilter = new IncrementalReleaseFilter(stepSz, stepSz); final Table live = t.where(incFilter).sort("IntsCol"); final int chunkSz = stepSz - 7; - final WritableObjectChunk chunk = - WritableObjectChunk.makeWritableChunk(chunkSz); + final WritableObjectChunk chunk = WritableObjectChunk.makeWritableChunk(chunkSz); while (live.size() < t.size()) { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(incFilter::refresh); doFillAndCheck(live, "StringsCol", chunk, chunkSz); @@ -118,8 +116,7 @@ public void testFillChunk() { @Test public void testMixedFillChunk() { - final Table a = - TableTools.emptyTable(1_000_000L).update("A=(long) (Math.random() * 1_000_000L)"); + final Table a = TableTools.emptyTable(1_000_000L).update("A=(long) (Math.random() * 1_000_000L)"); final Table ab = a.update("B=A % 2"); final Table expected = ab.by("B", "A").sort("A"); @@ -135,14 +132,14 @@ public void testIds6196() { final Boolean[] ids6196_values = new Boolean[] {true, null, false}; QueryScope.addParam("ids6196_values", ids6196_values); - final QueryTable qt = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(6), - intCol("IntVal", 0, 1, 2, 3, 4, 5)); + final QueryTable qt = + TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(6), intCol("IntVal", 0, 1, 2, 3, 4, 5)); - final Table a = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> qt - .update("I2=3+IntVal", "BoolVal=ids6196_values[IntVal % ids6196_values.length]")); + final Table a = LiveTableMonitor.DEFAULT.sharedLock().computeLocked( + () -> qt.update("I2=3+IntVal", "BoolVal=ids6196_values[IntVal % ids6196_values.length]")); TableTools.showWithIndex(a); final Table b = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> a.naturalJoin(a, "I2=IntVal", "BoolVal2=BoolVal")); + .computeLocked(() -> a.naturalJoin(a, "I2=IntVal", "BoolVal2=BoolVal")); TableTools.showWithIndex(b); final TByteList byteList = new TByteArrayList(6); @@ -161,15 +158,14 @@ public void testIds6196() { assertArrayEquals(expecteds, byteList.toArray()); try (final ChunkSource.GetContext context = reinterpretedB.makeGetContext(6)) { - final ByteChunk result = - reinterpretedB.getChunk(context, b.getIndex()).asByteChunk(); + final ByteChunk result = reinterpretedB.getChunk(context, b.getIndex()).asByteChunk(); final byte[] chunkResult = new byte[6]; result.copyToTypedArray(0, chunkResult, 0, 6); assertArrayEquals(expecteds, chunkResult); } final Table c = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> a.naturalJoin(b, "I2=IntVal", "BoolVal3=BoolVal2")); + .computeLocked(() -> a.naturalJoin(b, "I2=IntVal", "BoolVal3=BoolVal2")); TableTools.showWithIndex(c); final ColumnSource reinterpretedC = c.getColumnSource("BoolVal3").reinterpret(byte.class); byteList.clear(); @@ -191,29 +187,26 @@ public void testIds6196() { assertArrayEquals(nullBytes, byteList.toArray()); try (final ChunkSource.GetContext context = reinterpretedC.makeGetContext(6)) { - final ByteChunk result = - reinterpretedC.getChunk(context, b.getIndex()).asByteChunk(); + final ByteChunk result = reinterpretedC.getChunk(context, b.getIndex()).asByteChunk(); final byte[] chunkResult = new byte[6]; result.copyToTypedArray(0, chunkResult, 0, 6); assertArrayEquals(nullBytes, chunkResult); } try (final ChunkSource.GetContext context = reinterpretedC.makeGetContext(6)) { - final ByteChunk result = - reinterpretedC.getPrevChunk(context, b.getIndex()).asByteChunk(); + final ByteChunk result = reinterpretedC.getPrevChunk(context, b.getIndex()).asByteChunk(); final byte[] chunkResult = new byte[6]; result.copyToTypedArray(0, chunkResult, 0, 6); assertArrayEquals(nullBytes, chunkResult); } - final Table captured = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.emptyTable(1).snapshot(c)); + final Table captured = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> TableTools.emptyTable(1).snapshot(c)); TableTools.showWithIndex(captured); LiveTableMonitor.DEFAULT.startCycleForUnitTests(); TstUtils.addToTable(qt, Index.FACTORY.getFlatIndex(3), intCol("IntVal", 1, 2, 3)); - qt.notifyListeners(Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getFlatIndex(3)); + qt.notifyListeners(Index.FACTORY.getEmptyIndex(), Index.FACTORY.getEmptyIndex(), Index.FACTORY.getFlatIndex(3)); LiveTableMonitor.DEFAULT.flushAllNormalNotificationsForUnitTests(); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestSyncTableFilter.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestSyncTableFilter.java index 1efd04b242d..52228d53b7a 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestSyncTableFilter.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestSyncTableFilter.java @@ -30,11 +30,9 @@ protected void setUp() throws Exception { public void testSimple() { final QueryTable a = TstUtils.testRefreshingTable(longCol("ID", 1, 1, 2, 2, 3, 3), - intCol("Sentinel", 101, 102, 103, 104, 105, 106), - col("Key", "a", "a", "a", "a", "a", "a")); + intCol("Sentinel", 101, 102, 103, 104, 105, 106), col("Key", "a", "a", "a", "a", "a", "a")); final QueryTable b = TstUtils.testRefreshingTable(longCol("ID", 0, 0, 2, 2, 4, 4), - intCol("Sentinel", 201, 202, 203, 204, 205, 206), - col("Key", "a", "a", "a", "a", "a", "a")); + intCol("Sentinel", 201, 202, 203, 204, 205, 206), col("Key", "a", "a", "a", "a", "a", "a")); final SyncTableFilter.Builder builder = new SyncTableFilter.Builder("ID"); builder.addTable("a", a); @@ -49,31 +47,25 @@ public void testSimple() { TableTools.show(fa); TableTools.show(fb); - final DynamicTable ex1a = - newTable(longCol("ID", 2, 2), intCol("Sentinel", 103, 104), col("Key", "a", "a")); - final DynamicTable ex1b = - newTable(longCol("ID", 2, 2), intCol("Sentinel", 203, 204), col("Key", "a", "a")); + final DynamicTable ex1a = newTable(longCol("ID", 2, 2), intCol("Sentinel", 103, 104), col("Key", "a", "a")); + final DynamicTable ex1b = newTable(longCol("ID", 2, 2), intCol("Sentinel", 203, 204), col("Key", "a", "a")); assertEquals("", TableTools.diff(fa, ex1a, 10)); assertEquals("", TableTools.diff(fb, ex1b, 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(a, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 107, 108), - col("Key", "b", "b")); + TstUtils.addToTable(a, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); a.notifyListeners(i(10, 11), i(), i()); }); assertEquals("", TableTools.diff(fa, ex1a, 10)); assertEquals("", TableTools.diff(fb, ex1b, 10)); - final DynamicTable ex2a = - newTable(longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); - final DynamicTable ex2b = - newTable(longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); + final DynamicTable ex2a = newTable(longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); + final DynamicTable ex2b = newTable(longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(b, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 207, 208), - col("Key", "a", "a")); + TstUtils.addToTable(b, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); b.notifyListeners(i(10, 11), i(), i()); }); @@ -86,11 +78,9 @@ public void testSimple() { public void testSimpleAddAgain() { final QueryTable a = TstUtils.testRefreshingTable(longCol("ID", 1, 1, 2, 2, 3, 3), - intCol("Sentinel", 101, 102, 103, 104, 105, 106), - col("Key", "a", "a", "a", "a", "a", "a")); + intCol("Sentinel", 101, 102, 103, 104, 105, 106), col("Key", "a", "a", "a", "a", "a", "a")); final QueryTable b = TstUtils.testRefreshingTable(longCol("ID", 0, 0, 2, 2, 4, 4), - intCol("Sentinel", 201, 202, 203, 204, 205, 206), - col("Key", "a", "a", "a", "a", "a", "a")); + intCol("Sentinel", 201, 202, 203, 204, 205, 206), col("Key", "a", "a", "a", "a", "a", "a")); final SyncTableFilter.Builder builder = new SyncTableFilter.Builder("ID"); builder.addTable("a", a); @@ -105,34 +95,28 @@ public void testSimpleAddAgain() { TableTools.show(fa); TableTools.show(fb); - final DynamicTable ex1a = - newTable(longCol("ID", 2, 2), intCol("Sentinel", 103, 104), col("Key", "a", "a")); - final DynamicTable ex1b = - newTable(longCol("ID", 2, 2), intCol("Sentinel", 203, 204), col("Key", "a", "a")); + final DynamicTable ex1a = newTable(longCol("ID", 2, 2), intCol("Sentinel", 103, 104), col("Key", "a", "a")); + final DynamicTable ex1b = newTable(longCol("ID", 2, 2), intCol("Sentinel", 203, 204), col("Key", "a", "a")); assertEquals("", TableTools.diff(fa, ex1a, 10)); assertEquals("", TableTools.diff(fb, ex1b, 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(a, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 107, 108), - col("Key", "b", "b")); + TstUtils.addToTable(a, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); a.notifyListeners(i(10, 11), i(), i()); }); assertEquals("", TableTools.diff(fa, ex1a, 10)); assertEquals("", TableTools.diff(fb, ex1b, 10)); - final DynamicTable ex2a = newTable(longCol("ID", 5, 5, 5, 5), - intCol("Sentinel", 107, 108, 109, 110), col("Key", "b", "b", "c", "c")); - final DynamicTable ex2b = - newTable(longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); + final DynamicTable ex2a = newTable(longCol("ID", 5, 5, 5, 5), intCol("Sentinel", 107, 108, 109, 110), + col("Key", "b", "b", "c", "c")); + final DynamicTable ex2b = newTable(longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(b, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 207, 208), - col("Key", "a", "a")); + TstUtils.addToTable(b, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); b.notifyListeners(i(10, 11), i(), i()); - TstUtils.addToTable(a, i(12, 13), longCol("ID", 5, 5), intCol("Sentinel", 109, 110), - col("Key", "c", "c")); + TstUtils.addToTable(a, i(12, 13), longCol("ID", 5, 5), intCol("Sentinel", 109, 110), col("Key", "c", "c")); a.notifyListeners(i(12, 13), i(), i()); }); @@ -142,12 +126,11 @@ public void testSimpleAddAgain() { assertEquals("", TableTools.diff(fa, ex2a, 10)); assertEquals("", TableTools.diff(fb, ex2b, 10)); - final DynamicTable ex3b = newTable(longCol("ID", 5, 5, 5), - intCol("Sentinel", 207, 208, 209), col("Key", "a", "a", "a")); + final DynamicTable ex3b = + newTable(longCol("ID", 5, 5, 5), intCol("Sentinel", 207, 208, 209), col("Key", "a", "a", "a")); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(b, i(12, 13), longCol("ID", 5, 6), intCol("Sentinel", 209, 210), - col("Key", "a", "a")); + TstUtils.addToTable(b, i(12, 13), longCol("ID", 5, 6), intCol("Sentinel", 209, 210), col("Key", "a", "a")); b.notifyListeners(i(12, 13), i(), i()); }); @@ -158,8 +141,7 @@ public void testSimpleAddAgain() { assertEquals("", TableTools.diff(fb, ex3b, 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(a, i(14, 15), longCol("ID", 5, 6), intCol("Sentinel", 111, 112), - col("Key", "a", "a")); + TstUtils.addToTable(a, i(14, 15), longCol("ID", 5, 6), intCol("Sentinel", 111, 112), col("Key", "a", "a")); a.notifyListeners(i(14, 15), i(), i()); }); @@ -167,10 +149,8 @@ public void testSimpleAddAgain() { TableTools.showWithIndex(fa); TableTools.showWithIndex(fb); - final DynamicTable ex4a = - newTable(longCol("ID", 6), intCol("Sentinel", 112), col("Key", "a")); - final DynamicTable ex4b = - newTable(longCol("ID", 6), intCol("Sentinel", 210), col("Key", "a")); + final DynamicTable ex4a = newTable(longCol("ID", 6), intCol("Sentinel", 112), col("Key", "a")); + final DynamicTable ex4b = newTable(longCol("ID", 6), intCol("Sentinel", 210), col("Key", "a")); assertEquals("", TableTools.diff(fa, ex4a, 10)); assertEquals("", TableTools.diff(fb, ex4b, 10)); @@ -178,15 +158,12 @@ public void testSimpleAddAgain() { public void testNullAppearance() { final QueryTable a = TstUtils.testRefreshingTable( - longCol("ID", 1, 1, QueryConstants.NULL_LONG, QueryConstants.NULL_LONG, 3, 3), - intCol("Sentinel", 101, 102, 103, 104, 105, 106), - col("Key", "a", "a", "a", "a", "a", "a")); + longCol("ID", 1, 1, QueryConstants.NULL_LONG, QueryConstants.NULL_LONG, 3, 3), + intCol("Sentinel", 101, 102, 103, 104, 105, 106), col("Key", "a", "a", "a", "a", "a", "a")); final QueryTable b = TstUtils.testRefreshingTable(longCol("ID", 0, 0, 2, 2, 4, 4), - intCol("Sentinel", 201, 202, 203, 204, 205, 206), - col("Key", "a", "a", "a", "a", "a", "a")); + intCol("Sentinel", 201, 202, 203, 204, 205, 206), col("Key", "a", "a", "a", "a", "a", "a")); - final SyncTableFilter.Builder builder = - new SyncTableFilter.Builder().defaultId("ID").defaultKeys() + final SyncTableFilter.Builder builder = new SyncTableFilter.Builder().defaultId("ID").defaultKeys() .addTable("a", a) .addTable("b", b, "ID"); final TableMap result = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(builder::build); @@ -205,29 +182,22 @@ public void testNullAppearance() { assertEquals("", TableTools.diff(fb, empty, 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(a, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 107, 108), - col("Key", "b", "b")); - TstUtils.addToTable(a, i(2, 3), longCol("ID", 2, 2), intCol("Sentinel", 103, 104), - col("Key", "a", "a")); + TstUtils.addToTable(a, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); + TstUtils.addToTable(a, i(2, 3), longCol("ID", 2, 2), intCol("Sentinel", 103, 104), col("Key", "a", "a")); a.notifyListeners(i(10, 11), i(), i(2, 3)); }); - final DynamicTable ex1a = - newTable(longCol("ID", 2, 2), intCol("Sentinel", 103, 104), col("Key", "a", "a")); - final DynamicTable ex1b = - newTable(longCol("ID", 2, 2), intCol("Sentinel", 203, 204), col("Key", "a", "a")); + final DynamicTable ex1a = newTable(longCol("ID", 2, 2), intCol("Sentinel", 103, 104), col("Key", "a", "a")); + final DynamicTable ex1b = newTable(longCol("ID", 2, 2), intCol("Sentinel", 203, 204), col("Key", "a", "a")); assertEquals("", TableTools.diff(fa, ex1a, 10)); assertEquals("", TableTools.diff(fb, ex1b, 10)); - final DynamicTable ex2a = - newTable(longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); - final DynamicTable ex2b = - newTable(longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); + final DynamicTable ex2a = newTable(longCol("ID", 5, 5), intCol("Sentinel", 107, 108), col("Key", "b", "b")); + final DynamicTable ex2b = newTable(longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(b, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 207, 208), - col("Key", "a", "a")); + TstUtils.addToTable(b, i(10, 11), longCol("ID", 5, 5), intCol("Sentinel", 207, 208), col("Key", "a", "a")); b.notifyListeners(i(10, 11), i(), i()); }); @@ -239,13 +209,11 @@ public void testNullAppearance() { } public void testSimpleKeyed() { - final QueryTable a = - TstUtils.testRefreshingTable(longCol("ID", 1, 1, 2, 2, 3, 3, 4, 4, 5, 5), + final QueryTable a = TstUtils.testRefreshingTable(longCol("ID", 1, 1, 2, 2, 3, 3, 4, 4, 5, 5), intCol("Sentinel", 101, 102, 103, 104, 105, 106, 107, 108, 109, 110), col("Key", "a", "a", "b", "b", "a", "a", "a", "a", "b", "b")); final QueryTable b = TstUtils.testRefreshingTable(longCol("Ego", 0, 0, 2, 2, 4, 4), - intCol("Sentinel", 201, 202, 203, 204, 205, 206), - col("Klyuch", "a", "a", "b", "b", "a", "a")); + intCol("Sentinel", 201, 202, 203, 204, 205, 206), col("Klyuch", "a", "a", "b", "b", "a", "a")); final SyncTableFilter.Builder builder = new SyncTableFilter.Builder(); builder.addTable("a", a, "ID", "Key"); @@ -267,39 +235,37 @@ public void testSimpleKeyed() { TableTools.show(fa); TableTools.show(fb); - final DynamicTable ex1a = newTable(longCol("ID", 2, 2, 4, 4), - intCol("Sentinel", 103, 104, 107, 108), col("Key", "b", "b", "a", "a")); - final DynamicTable ex1b = newTable(longCol("Ego", 2, 2, 4, 4), - intCol("Sentinel", 203, 204, 205, 206), col("Klyuch", "b", "b", "a", "a")); + final DynamicTable ex1a = newTable(longCol("ID", 2, 2, 4, 4), intCol("Sentinel", 103, 104, 107, 108), + col("Key", "b", "b", "a", "a")); + final DynamicTable ex1b = newTable(longCol("Ego", 2, 2, 4, 4), intCol("Sentinel", 203, 204, 205, 206), + col("Klyuch", "b", "b", "a", "a")); assertEquals("", TableTools.diff(fa, ex1a, 10)); assertEquals("", TableTools.diff(fb, ex1b, 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { TstUtils.addToTable(b, i(10, 11), longCol("Ego", 5, 5), intCol("Sentinel", 207, 208), - col("Klyuch", "b", "c")); + col("Klyuch", "b", "c")); b.notifyListeners(i(10, 11), i(), i()); }); - final DynamicTable ex2a = newTable(longCol("ID", 4, 4, 5, 5), - intCol("Sentinel", 107, 108, 109, 110), col("Key", "a", "a", "b", "b")); - final DynamicTable ex2b = newTable(longCol("Ego", 4, 4, 5), - intCol("Sentinel", 205, 206, 207), col("Klyuch", "a", "a", "b")); + final DynamicTable ex2a = newTable(longCol("ID", 4, 4, 5, 5), intCol("Sentinel", 107, 108, 109, 110), + col("Key", "a", "a", "b", "b")); + final DynamicTable ex2b = + newTable(longCol("Ego", 4, 4, 5), intCol("Sentinel", 205, 206, 207), col("Klyuch", "a", "a", "b")); assertEquals("", TableTools.diff(fa, ex2a, 10)); assertEquals("", TableTools.diff(fb, ex2b, 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(a, i(20, 21), longCol("ID", 5, 5), intCol("Sentinel", 111, 112), - col("Key", "c", "c")); + TstUtils.addToTable(a, i(20, 21), longCol("ID", 5, 5), intCol("Sentinel", 111, 112), col("Key", "c", "c")); a.notifyListeners(i(20, 21), i(), i()); }); final DynamicTable ex3a = newTable(longCol("ID", 4, 4, 5, 5, 5, 5), - intCol("Sentinel", 107, 108, 109, 110, 111, 112), - col("Key", "a", "a", "b", "b", "c", "c")); - final DynamicTable ex3b = newTable(longCol("Ego", 4, 4, 5, 5), - intCol("Sentinel", 205, 206, 207, 208), col("Klyuch", "a", "a", "b", "c")); + intCol("Sentinel", 107, 108, 109, 110, 111, 112), col("Key", "a", "a", "b", "b", "c", "c")); + final DynamicTable ex3b = newTable(longCol("Ego", 4, 4, 5, 5), intCol("Sentinel", 205, 206, 207, 208), + col("Klyuch", "a", "a", "b", "c")); TableTools.showWithIndex(fa); TableTools.showWithIndex(fb); @@ -312,25 +278,21 @@ public void testSimpleKeyed() { TableTools.showWithIndex(a, 30); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - TstUtils.addToTable(a, i(20, 21), longCol("ID", 5, 5), intCol("Sentinel", 113, 114), - col("Key", "c", "c")); + TstUtils.addToTable(a, i(20, 21), longCol("ID", 5, 5), intCol("Sentinel", 113, 114), col("Key", "c", "c")); a.notifyListeners(i(), i(), i(20, 21)); }); final DynamicTable ex4a = newTable(longCol("ID", 4, 4, 5, 5, 5, 5), - intCol("Sentinel", 107, 108, 109, 110, 113, 114), - col("Key", "a", "a", "b", "b", "c", "c")); + intCol("Sentinel", 107, 108, 109, 110, 113, 114), col("Key", "a", "a", "b", "b", "c", "c")); assertEquals("", TableTools.diff(fa, ex4a, 10)); } public void testErrorPropagation() { - final QueryTable a = - TstUtils.testRefreshingTable(longCol("ID", 1, 1, 2, 2, 3, 3, 4, 4, 5, 5), + final QueryTable a = TstUtils.testRefreshingTable(longCol("ID", 1, 1, 2, 2, 3, 3, 4, 4, 5, 5), intCol("Sentinel", 101, 102, 103, 104, 105, 106, 107, 108, 109, 110), col("Key", "a", "a", "b", "b", "a", "a", "a", "a", "b", "b")); final QueryTable b = TstUtils.testRefreshingTable(longCol("Ego", 0, 0, 2, 2, 4, 4), - intCol("Sentinel", 201, 202, 203, 204, 205, 206), - col("Klyuch", "a", "a", "b", "b", "a", "a")); + intCol("Sentinel", 201, 202, 203, 204, 205, 206), col("Klyuch", "a", "a", "b", "b", "a", "a")); final SyncTableFilter.Builder builder = new SyncTableFilter.Builder(); builder.addTable("a", a, "ID", "Key"); @@ -355,24 +317,20 @@ public void testErrorPropagation() { TestCase.assertEquals(1, getUpdateErrors().size()); final Throwable throwable = throwables.get(0); TestCase.assertEquals(IllegalStateException.class, throwable.getClass()); - TestCase.assertEquals("Can not process removed rows in SyncTableFilter!", - throwable.getMessage()); + TestCase.assertEquals("Can not process removed rows in SyncTableFilter!", throwable.getMessage()); return true; }); assertNotNull(la.originalException); assertNotNull(lb.originalException); - assertEquals("Can not process removed rows in SyncTableFilter!", - la.originalException.getMessage()); - assertEquals("Can not process removed rows in SyncTableFilter!", - lb.originalException.getMessage()); + assertEquals("Can not process removed rows in SyncTableFilter!", la.originalException.getMessage()); + assertEquals("Can not process removed rows in SyncTableFilter!", lb.originalException.getMessage()); } public void testDependencies() { - final QueryTable a = TstUtils.testRefreshingTable(longCol("ID", 1), intCol("Sentinel", 101), - col("Key", "a")); - final QueryTable b = TstUtils.testRefreshingTable(longCol("Ego", 0, 1, 1), - intCol("Sentinel", 201, 202, 203), col("Klyuch", "a", "a", "b")); + final QueryTable a = TstUtils.testRefreshingTable(longCol("ID", 1), intCol("Sentinel", 101), col("Key", "a")); + final QueryTable b = TstUtils.testRefreshingTable(longCol("Ego", 0, 1, 1), intCol("Sentinel", 201, 202, 203), + col("Klyuch", "a", "a", "b")); final SyncTableFilter.Builder builder = new SyncTableFilter.Builder(); builder.addTable("a", a, "ID"); @@ -386,14 +344,14 @@ public void testDependencies() { fb.setAttribute("NAME", "b"); - final Table fau = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> fa.update("SentinelDoubled=Sentinel*2")); - final Table fbu = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> fb.update("SentinelDoubled=Sentinel*2")); - final Table joined = LiveTableMonitor.DEFAULT.sharedLock().computeLocked( - () -> fau.naturalJoin(fbu, "Key=Klyuch", "SB=Sentinel,SBD=SentinelDoubled")); - final Table sentSum = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> joined.update("SS=SBD+SentinelDoubled")); + final Table fau = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> fa.update("SentinelDoubled=Sentinel*2")); + final Table fbu = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> fb.update("SentinelDoubled=Sentinel*2")); + final Table joined = LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> fau.naturalJoin(fbu, "Key=Klyuch", "SB=Sentinel,SBD=SentinelDoubled")); + final Table sentSum = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> joined.update("SS=SBD+SentinelDoubled")); TableTools.showWithIndex(sentSum); @@ -427,38 +385,38 @@ public void testDependencies() { public void testTableMap() { final QueryTable source1 = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(10), - col("Partition", "A", "A", "B", "B", "C", "C", "C", "D", "D", "D"), - longCol("ID", 1, 2, /* B */ 1, 2, /* C */ 1, 2, 2, /* D */ 1, 2, 2), - intCol("Sentinel", 101, 102, 103, 104, 105, 106, 107, 108, 109, 110)); + col("Partition", "A", "A", "B", "B", "C", "C", "C", "D", "D", "D"), + longCol("ID", 1, 2, /* B */ 1, 2, /* C */ 1, 2, 2, /* D */ 1, 2, 2), + intCol("Sentinel", 101, 102, 103, 104, 105, 106, 107, 108, 109, 110)); final QueryTable source2 = TstUtils.testRefreshingTable(Index.FACTORY.getFlatIndex(5), - col("Division", "A", "A", "B", "C", "C"), - longCol("ID", 2, 3, 1, 2, 2), - intCol("Sentinel", 201, 202, 203, 204, 205)); + col("Division", "A", "A", "B", "C", "C"), + longCol("ID", 2, 3, 1, 2, 2), + intCol("Sentinel", 201, 202, 203, 204, 205)); final TableMap sm1 = source1.updateView("SK1=k").byExternal("Partition"); final TableMap sm2 = source2.updateView("SK2=k").byExternal("Division"); - final TableMap bykey = LiveTableMonitor.DEFAULT.sharedLock().computeLocked( - () -> new SyncTableFilter.Builder().addTable("source1", source1, "ID", "Partition") - .addTable("source2", source2, "ID", "Division").build()); + final TableMap bykey = LiveTableMonitor.DEFAULT.sharedLock() + .computeLocked(() -> new SyncTableFilter.Builder().addTable("source1", source1, "ID", "Partition") + .addTable("source2", source2, "ID", "Division").build()); final Table s1f = bykey.get("source1"); final Table s2f = bykey.get("source2"); TableTools.showWithIndex(s1f); TableTools.showWithIndex(s2f); - final TableMap filteredByPartition = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> new SyncTableFilter.Builder("ID").addTableMap("source1", sm1) - .addTableMap("source2", sm2, "ID").build()); + final TableMap filteredByPartition = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> new SyncTableFilter.Builder("ID") + .addTableMap("source1", sm1).addTableMap("source2", sm2, "ID").build()); for (Object key : filteredByPartition.getKeySet()) { System.out.println(key); TableTools.showWithIndex(filteredByPartition.get(key)); } - final TableMap s1tm = new FilteredTableMap(filteredByPartition, - sk -> ((SmartKey) sk).get(1).equals("source1"), sk -> ((SmartKey) sk).get(0)); - final TableMap s2tm = new FilteredTableMap(filteredByPartition, - sk -> ((SmartKey) sk).get(1).equals("source2"), sk -> ((SmartKey) sk).get(0)); + final TableMap s1tm = new FilteredTableMap(filteredByPartition, sk -> ((SmartKey) sk).get(1).equals("source1"), + sk -> ((SmartKey) sk).get(0)); + final TableMap s2tm = new FilteredTableMap(filteredByPartition, sk -> ((SmartKey) sk).get(1).equals("source2"), + sk -> ((SmartKey) sk).get(0)); final Table s1merged = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(s1tm::merge); final Table s2merged = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(s2tm::merge); @@ -470,7 +428,7 @@ public void testTableMap() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(source2, i(10, 11), col("Division", "D", "B"), longCol("ID", 2, 2), - intCol("Sentinel", 206, 207)); + intCol("Sentinel", 206, 207)); source2.notifyListeners(i(10, 11), i(), i()); }); @@ -482,10 +440,10 @@ public void testTableMap() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { addToTable(source2, i(12, 13), col("Division", "D", "E"), longCol("ID", 3, 3), - intCol("Sentinel", 208, 209)); + intCol("Sentinel", 208, 209)); source2.notifyListeners(i(12, 13), i(), i()); - addToTable(source1, i(10, 11, 12), col("Partition", "D", "D", "E"), - longCol("ID", 3, 4, 3), intCol("Sentinel", 111, 112, 113)); + addToTable(source1, i(10, 11, 12), col("Partition", "D", "D", "E"), longCol("ID", 3, 4, 3), + intCol("Sentinel", 111, 112, 113)); source1.notifyListeners(i(10, 11, 12), i(), i()); }); @@ -509,77 +467,71 @@ private void testTableMapRandomized(int seed) { final ColumnInfo[] columnInfoSet2; final int size = 10; final QueryTable source1Unfiltered = getTable(size, random, - columnInfo1 = initColumnInfos(new String[] {"Partition", "ID", "Sentinel", "Truthy"}, - new SetGenerator<>("a", "b", "c", "d", "e"), - new IncreasingSortedLongGenerator(2, 1000), - new IntGenerator(0, 1000000), - new BooleanGenerator())); + columnInfo1 = initColumnInfos(new String[] {"Partition", "ID", "Sentinel", "Truthy"}, + new SetGenerator<>("a", "b", "c", "d", "e"), + new IncreasingSortedLongGenerator(2, 1000), + new IntGenerator(0, 1000000), + new BooleanGenerator())); final QueryTable source2Unfiltered = getTable(size, random, - columnInfo2 = initColumnInfos(new String[] {"Partition", "ID", "Sentinel", "Truthy"}, - new SetGenerator<>("a", "b", "c", "d", "e"), - new IncreasingSortedLongGenerator(2, 1000), - new IntGenerator(0, 1000000), - new BooleanGenerator())); - - final QueryTable filterSet1 = - getTable(1, random, columnInfoSet1 = initColumnInfos(new String[] {"Partition"}, - new SetGenerator<>("a", "b", "c", "d", "e"))); - final QueryTable filterSet2 = - getTable(1, random, columnInfoSet2 = initColumnInfos(new String[] {"Partition"}, - new SetGenerator<>("a", "b", "c", "d", "e"))); - - final Table dummy = TableTools.newTable(col("Partition", "A"), longCol("ID", 0), - intCol("Sentinel", 12345678), col("Truthy", true)); - - final Table source1 = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> TableTools.merge(dummy, + columnInfo2 = initColumnInfos(new String[] {"Partition", "ID", "Sentinel", "Truthy"}, + new SetGenerator<>("a", "b", "c", "d", "e"), + new IncreasingSortedLongGenerator(2, 1000), + new IntGenerator(0, 1000000), + new BooleanGenerator())); + + final QueryTable filterSet1 = getTable(1, random, columnInfoSet1 = + initColumnInfos(new String[] {"Partition"}, new SetGenerator<>("a", "b", "c", "d", "e"))); + final QueryTable filterSet2 = getTable(1, random, columnInfoSet2 = + initColumnInfos(new String[] {"Partition"}, new SetGenerator<>("a", "b", "c", "d", "e"))); + + final Table dummy = TableTools.newTable(col("Partition", "A"), longCol("ID", 0), intCol("Sentinel", 12345678), + col("Truthy", true)); + + final Table source1 = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> TableTools.merge(dummy, source1Unfiltered.whereIn(filterSet1, "Partition").update("Truthy=!!Truthy"))); - final Table source2 = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> TableTools.merge(dummy, + final Table source2 = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> TableTools.merge(dummy, source2Unfiltered.whereIn(filterSet2, "Partition").update("Truthy=!!Truthy"))); final TableMap sm1 = source1.updateView("SK1=k").byExternal("Partition"); final TableMap sm2 = source2.updateView("SK2=k").byExternal("Partition"); - final TableMap bykey = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> new SyncTableFilter.Builder("ID", "Partition") - .addTable("source1", source1).addTable("source2", source2).build()); + final TableMap bykey = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> new SyncTableFilter.Builder("ID", "Partition") + .addTable("source1", source1).addTable("source2", source2).build()); final Table s1f = bykey.get("source1"); final Table s2f = bykey.get("source2"); final TableMap bykey2 = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> new SyncTableFilter.Builder("ID", "Partition", "Truthy") - .addTable("source1", source1).addTable("source2", source2).build()); + .computeLocked(() -> new SyncTableFilter.Builder("ID", "Partition", "Truthy") + .addTable("source1", source1).addTable("source2", source2).build()); final Table s1fKeyed = bykey2.get("source1"); final Table s2fKeyed = bykey2.get("source2"); - final TableMap filteredByPartition = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> new SyncTableFilter.Builder("ID").addTableMap("source1", sm1) - .addTableMap("source2", sm2).build()); - final TableMap filteredByPartitionKeyed = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> new SyncTableFilter.Builder("ID", "Truthy") - .addTableMap("source1", sm1).addTableMap("source2", sm2).build()); + final TableMap filteredByPartition = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> new SyncTableFilter.Builder("ID") + .addTableMap("source1", sm1).addTableMap("source2", sm2).build()); + final TableMap filteredByPartitionKeyed = + LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> new SyncTableFilter.Builder("ID", "Truthy") + .addTableMap("source1", sm1).addTableMap("source2", sm2).build()); - final TableMap s1tm = new FilteredTableMap(filteredByPartition, - sk -> ((SmartKey) sk).get(1).equals("source1"), sk -> ((SmartKey) sk).get(0)); - final TableMap s2tm = new FilteredTableMap(filteredByPartition, - sk -> ((SmartKey) sk).get(1).equals("source2"), sk -> ((SmartKey) sk).get(0)); + final TableMap s1tm = new FilteredTableMap(filteredByPartition, sk -> ((SmartKey) sk).get(1).equals("source1"), + sk -> ((SmartKey) sk).get(0)); + final TableMap s2tm = new FilteredTableMap(filteredByPartition, sk -> ((SmartKey) sk).get(1).equals("source2"), + sk -> ((SmartKey) sk).get(0)); final TableMap s1tmKeyed = new FilteredTableMap(filteredByPartitionKeyed, - sk -> ((SmartKey) sk).get(1).equals("source1"), sk -> ((SmartKey) sk).get(0)); + sk -> ((SmartKey) sk).get(1).equals("source1"), sk -> ((SmartKey) sk).get(0)); final TableMap s2tmKeyed = new FilteredTableMap(filteredByPartitionKeyed, - sk -> ((SmartKey) sk).get(1).equals("source2"), sk -> ((SmartKey) sk).get(0)); + sk -> ((SmartKey) sk).get(1).equals("source2"), sk -> ((SmartKey) sk).get(0)); final Table s1merged = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(s1tm::merge); final Table s2merged = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(s2tm::merge); final Table s1mergedSorted = s1merged.sort("SK1").dropColumns("SK1"); final Table s2mergedSorted = s2merged.sort("SK2").dropColumns("SK2"); - final Table s1KeyedMerged = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(s1tmKeyed::merge); - final Table s2KeyedMerged = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(s2tmKeyed::merge); + final Table s1KeyedMerged = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(s1tmKeyed::merge); + final Table s2KeyedMerged = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(s2tmKeyed::merge); final Table s1KeyedMergedSorted = s1KeyedMerged.sort("SK1").dropColumns("SK1"); final Table s2KeyedMergedSorted = s2KeyedMerged.sort("SK2").dropColumns("SK2"); @@ -603,8 +555,7 @@ private void testTableMapRandomized(int seed) { // append to table 1 GenerateTableUpdates.generateAppends(size, random, source1Unfiltered, columnInfo1); // append to table 2 - GenerateTableUpdates.generateAppends(size / 2, random, source2Unfiltered, - columnInfo2); + GenerateTableUpdates.generateAppends(size / 2, random, source2Unfiltered, columnInfo2); }); if (printTableUpdates) { @@ -641,8 +592,7 @@ public void onUpdate(Index added, Index removed, Index modified) { } @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { this.originalException = originalException; } } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestTailInitializationFilter.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestTailInitializationFilter.java index 0a649496188..d4a039d30eb 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestTailInitializationFilter.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestTailInitializationFilter.java @@ -26,17 +26,15 @@ public void testSimple() { final DBDateTime threshold2 = new DBDateTime(data[199] - DBTimeUtils.secondsToNanos(600)); final QueryTable input = TstUtils.testRefreshingTable(builder.getIndex(), - ColumnHolder.getDateTimeColumnHolder("Timestamp", false, data)); + ColumnHolder.getDateTimeColumnHolder("Timestamp", false, data)); final Table filtered = TailInitializationFilter.mostRecent(input, "Timestamp", "00:10:00"); TableTools.showWithIndex(filtered); assertEquals(44, filtered.size()); - final Table slice0_100_filtered = - input.slice(0, 100).where("Timestamp >= '" + threshold1 + "'"); - final Table slice100_200_filtered = - input.slice(100, 200).where("Timestamp >= '" + threshold2 + "'"); + final Table slice0_100_filtered = input.slice(0, 100).where("Timestamp >= '" + threshold1 + "'"); + final Table slice100_200_filtered = input.slice(100, 200).where("Timestamp >= '" + threshold2 + "'"); final Table expected = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.merge(slice0_100_filtered, slice100_200_filtered)); + .computeLocked(() -> TableTools.merge(slice0_100_filtered, slice100_200_filtered)); assertEquals("", TableTools.diff(filtered, expected, 10)); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { @@ -52,11 +50,10 @@ public void testSimple() { }); final Table slice100_102 = input.slice(100, 102); - final Table slice102_202_filtered = - input.slice(102, 202).where("Timestamp >= '" + threshold2 + "'"); + final Table slice102_202_filtered = input.slice(102, 202).where("Timestamp >= '" + threshold2 + "'"); final Table slice202_204 = input.slice(202, 204); - final Table expected2 = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> TableTools - .merge(slice0_100_filtered, slice100_102, slice102_202_filtered, slice202_204)); + final Table expected2 = LiveTableMonitor.DEFAULT.sharedLock().computeLocked( + () -> TableTools.merge(slice0_100_filtered, slice100_102, slice102_202_filtered, slice202_204)); assertEquals("", TableTools.diff(filtered, expected2, 10)); } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TestTimeSeriesFilter.java b/DB/src/test/java/io/deephaven/db/v2/utils/TestTimeSeriesFilter.java index 44de6758adf..bc667d558c3 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TestTimeSeriesFilter.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TestTimeSeriesFilter.java @@ -36,8 +36,7 @@ public void testSimple() { Table source = TableTools.newTable(TableTools.col("Timestamp", times)); io.deephaven.db.tables.utils.TableTools.show(source); - UnitTestTimeSeriesFilter timeSeriesFilter = - new UnitTestTimeSeriesFilter(startTime, "Timestamp", "00:00:05"); + UnitTestTimeSeriesFilter timeSeriesFilter = new UnitTestTimeSeriesFilter(startTime, "Timestamp", "00:00:05"); Table filtered = source.where(timeSeriesFilter); io.deephaven.db.tables.utils.TableTools.show(filtered); @@ -77,25 +76,23 @@ public void testIncremental() throws ParseException { int size = 100; final Date startDate = format.parse("2015-03-23"); Date endDate = format.parse("2015-03-24"); - final QueryTable table = getTable(size, random, - columnInfo = initColumnInfos(new String[] {"Date", "C1"}, + final QueryTable table = getTable(size, random, columnInfo = initColumnInfos(new String[] {"Date", "C1"}, new TstUtils.DateGenerator(startDate, endDate), new TstUtils.IntGenerator(1, 100))); final UnitTestTimeSeriesFilter unitTestTimeSeriesFilter = - new UnitTestTimeSeriesFilter(startDate.getTime(), "Date", "01:00:00"); - final ArrayList> filtersToRefresh = - new ArrayList<>(); + new UnitTestTimeSeriesFilter(startDate.getTime(), "Date", "01:00:00"); + final ArrayList> filtersToRefresh = new ArrayList<>(); EvalNugget en[] = new EvalNugget[] { new EvalNugget() { public Table e() { UnitTestTimeSeriesFilter unitTestTimeSeriesFilter1 = - new UnitTestTimeSeriesFilter(unitTestTimeSeriesFilter); + new UnitTestTimeSeriesFilter(unitTestTimeSeriesFilter); filtersToRefresh.add(new WeakReference<>(unitTestTimeSeriesFilter1)); - return LiveTableMonitor.DEFAULT.exclusiveLock().computeLocked( - () -> table.update("Date=new DBDateTime(Date.getTime() * 1000000L)") - .where(unitTestTimeSeriesFilter1)); + return LiveTableMonitor.DEFAULT.exclusiveLock() + .computeLocked(() -> table.update("Date=new DBDateTime(Date.getTime() * 1000000L)") + .where(unitTestTimeSeriesFilter1)); } }, }; @@ -109,8 +106,7 @@ public Table e() { LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { unitTestTimeSeriesFilter.incrementNow(3600 * 1000); - final ArrayList> collectedRefs = - new ArrayList<>(); + final ArrayList> collectedRefs = new ArrayList<>(); for (WeakReference ref : filtersToRefresh) { final UnitTestTimeSeriesFilter refreshFilter = ref.get(); if (refreshFilter == null) { diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/TreeIndexTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/TreeIndexTest.java index 67527d28333..81fe9e51913 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/TreeIndexTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/TreeIndexTest.java @@ -147,12 +147,12 @@ public void testSerialize() throws IOException, ClassNotFoundException { assertEquals(index, copy); index = indexFromString( - "0,2,4-5,8-9,12-17,19-20,22,25-29,33-38,41-45,47,49-54,56,58-59,61-73,75-78,80,82-85,89,91-104,106-110,112-114,116-117,120-132,135-136,138-141,143-144,146,148-154,157-159,161,163-166,168,170,172-189,193-196,198-206,209-210,213,215-235,237-245,248-250,252-262,264-267,269-271,273-276,278-281,284-289,291,293-294,296-301,303,305-307,309-317,319-345,347-354,356-366,368-378,380-387,389-393,395-401,403-409,411-421,424-428,430-437,439-460,462-463,465-473,475-488,490-497,499-503,505-510,512-519,521,523-531,533-540,542-543,545-562,564-570,572-595,597-642,644,646-664,666-670,674-675,677-680,682-689,691-718,720-740,742-764,766-801,803-821,823-826,828-831,833-840,842-887,890-892,894-901,903-928,930-951,953-957,959-964,966-996,998-1010,1013-1016,1018-1022,1024-1064,1066-1072,1074-1094,1096-1107,1109-1181,1183-1193,1195-1199,1201-1253,1255-1288,1290-1291,1293-1311,1313-1328,1330-1345,1347-1349,1352-1446,1448-1473,1475-1511,1513-1567,1569-1572,1574-1580,1582-1615,1617-1643,1645,1647-1687,1689-1691,1693-1695,1697-1698,1700-1709,1711-1732,1734,1735-1751,1753-1792,1794-1831,1833-1849,1851-1857,1859-1866,1868-1880,1882-1885,1887-1920,1922-1926,1931-2566"); + "0,2,4-5,8-9,12-17,19-20,22,25-29,33-38,41-45,47,49-54,56,58-59,61-73,75-78,80,82-85,89,91-104,106-110,112-114,116-117,120-132,135-136,138-141,143-144,146,148-154,157-159,161,163-166,168,170,172-189,193-196,198-206,209-210,213,215-235,237-245,248-250,252-262,264-267,269-271,273-276,278-281,284-289,291,293-294,296-301,303,305-307,309-317,319-345,347-354,356-366,368-378,380-387,389-393,395-401,403-409,411-421,424-428,430-437,439-460,462-463,465-473,475-488,490-497,499-503,505-510,512-519,521,523-531,533-540,542-543,545-562,564-570,572-595,597-642,644,646-664,666-670,674-675,677-680,682-689,691-718,720-740,742-764,766-801,803-821,823-826,828-831,833-840,842-887,890-892,894-901,903-928,930-951,953-957,959-964,966-996,998-1010,1013-1016,1018-1022,1024-1064,1066-1072,1074-1094,1096-1107,1109-1181,1183-1193,1195-1199,1201-1253,1255-1288,1290-1291,1293-1311,1313-1328,1330-1345,1347-1349,1352-1446,1448-1473,1475-1511,1513-1567,1569-1572,1574-1580,1582-1615,1617-1643,1645,1647-1687,1689-1691,1693-1695,1697-1698,1700-1709,1711-1732,1734,1735-1751,1753-1792,1794-1831,1833-1849,1851-1857,1859-1866,1868-1880,1882-1885,1887-1920,1922-1926,1931-2566"); copy = (Index) doSerDeser(index); assertEquals(index, copy); index = indexFromString( - "1-2,4,6,9-12,14-18,20-21,24-26,28-33,35,37-39,41-42,44,46-53,55-59,61-62,64,67,69,71-72,74-75,77-79,83-85,87-91,93-95,97,100-112,114,116,118,120-122,124-126,128-130,134-138,140-146,148-149,151,153-154,156,158-159,161-163,165-169,171-172,174-178,182-192,198-200,202,205,207-234,236-243,245,247-249,252-260,262-267,269,271-273,275-279,283-284,286-291,293-295,297-303,306-311,313,315-320,322,324-326,328-330,332-335,337-350,352-355,357,359-364,366,369-376,378-379,381-391,393,395-406,408-425,427-466,470-471,473-477,480-490,492-497,499-513,515-522,524-529,531-535,539-549,552-553,555-563,565-570,572-578,580-624,626-634,636-650,652-654,656-657,659,661-665,667-673,675-677,679,681,683-684,686,688-692,694-695,697-717,719-733,735-739,741-743,745-750,752-755,757-760,762-778,780,782-799,801-809,811-817,819-822,824-827,829-835,838-907,909-924,926-928,930-942,944-1049,1051,1053-1058,1060-1064,1066-1069,1071-1080,1082-1089,1091-1092,1094,1096-1098,1100-1102,1104-1109,1111-1121,1123-1142,1144-1156,1158-1162,1164-1169,1171-1175,1177-1190,1192-1195,1198-1199,1201-1211,1214,1216-1218,1220-1221,1223-1231,1233-1234,1236-1239,1241-1287,1289-1304,1306-1307,1309-1317,1319-1327,1329-1331,1333-1335,1337-1340,1342-1344,1346-1350,1352-1354,1356-1371,1373-1393,1395-1398,1400,1402-1479,1481-1486,1488-1490,1493,1495-1507,1509,1511-1543,1545-1550,1553-1556,1558-1564,1566-1582,1584,1586,1589-1590,1592-1615,1617-1626,1628-1634,1636-1643,1645,1647-1657,1659-1668,1670-1673,1675-1681,1683-1690,1693-1695,1697-1699,1701-1713,1715-1716,1718-1722,1724-1746,1748-1750,1753-1755,1757-1794,1796-1804,1806-1821,1823-1826,1828-1830,1832-1835,1837-1843,1846,1847-1856,1858-1894,1896-1908,1910-1916,1918-1924,1926,1928-1929,1936-2566"); + "1-2,4,6,9-12,14-18,20-21,24-26,28-33,35,37-39,41-42,44,46-53,55-59,61-62,64,67,69,71-72,74-75,77-79,83-85,87-91,93-95,97,100-112,114,116,118,120-122,124-126,128-130,134-138,140-146,148-149,151,153-154,156,158-159,161-163,165-169,171-172,174-178,182-192,198-200,202,205,207-234,236-243,245,247-249,252-260,262-267,269,271-273,275-279,283-284,286-291,293-295,297-303,306-311,313,315-320,322,324-326,328-330,332-335,337-350,352-355,357,359-364,366,369-376,378-379,381-391,393,395-406,408-425,427-466,470-471,473-477,480-490,492-497,499-513,515-522,524-529,531-535,539-549,552-553,555-563,565-570,572-578,580-624,626-634,636-650,652-654,656-657,659,661-665,667-673,675-677,679,681,683-684,686,688-692,694-695,697-717,719-733,735-739,741-743,745-750,752-755,757-760,762-778,780,782-799,801-809,811-817,819-822,824-827,829-835,838-907,909-924,926-928,930-942,944-1049,1051,1053-1058,1060-1064,1066-1069,1071-1080,1082-1089,1091-1092,1094,1096-1098,1100-1102,1104-1109,1111-1121,1123-1142,1144-1156,1158-1162,1164-1169,1171-1175,1177-1190,1192-1195,1198-1199,1201-1211,1214,1216-1218,1220-1221,1223-1231,1233-1234,1236-1239,1241-1287,1289-1304,1306-1307,1309-1317,1319-1327,1329-1331,1333-1335,1337-1340,1342-1344,1346-1350,1352-1354,1356-1371,1373-1393,1395-1398,1400,1402-1479,1481-1486,1488-1490,1493,1495-1507,1509,1511-1543,1545-1550,1553-1556,1558-1564,1566-1582,1584,1586,1589-1590,1592-1615,1617-1626,1628-1634,1636-1643,1645,1647-1657,1659-1668,1670-1673,1675-1681,1683-1690,1693-1695,1697-1699,1701-1713,1715-1716,1718-1722,1724-1746,1748-1750,1753-1755,1757-1794,1796-1804,1806-1821,1823-1826,1828-1830,1832-1835,1837-1843,1846,1847-1856,1858-1894,1896-1908,1910-1916,1918-1924,1926,1928-1929,1936-2566"); copy = (Index) doSerDeser(index); assertEquals(index, copy); } @@ -540,10 +540,8 @@ public void testAddIndex5() { // doCutDown(indexStrings); } - // This method will take one range out of one the index and run the test. If we find a range we - // can take out and - // still get a failure, recursively try again, so that we have a minimal set of index ranges - // that actually produce + // This method will take one range out of one the index and run the test. If we find a range we can take out and + // still get a failure, recursively try again, so that we have a minimal set of index ranges that actually produce // a failure for us. We don't need it during normal testing, so it is unused. @SuppressWarnings("unused") private void doCutDown(String[] indexStrings) { @@ -620,8 +618,7 @@ public void testRandomBuilder() { for (int step = 0; step < 1000; ++step) { final int size = random.nextInt(10); - final RangePriorityQueueBuilder priorityQueueBuilder = - new RangePriorityQueueBuilder(16); + final RangePriorityQueueBuilder priorityQueueBuilder = new RangePriorityQueueBuilder(16); final Index.RandomBuilder treeBuilder = TreeIndex.makeRandomBuilder(); values.clear(); @@ -784,8 +781,7 @@ private long rangeSize(TLongArrayList values, int start) { } - // This test would be way too brittle to include, even if I wasn't reading the deserialized - // stuff from a file. + // This test would be way too brittle to include, even if I wasn't reading the deserialized stuff from a file. // The following function will write something useful to read in. // String dumpSerialized(TreeIndex obj) { // try { @@ -1160,37 +1156,37 @@ public void testSimpleIteratorForEach() { final int[] count = new int[1]; final int[] voffset = new int[1]; final Predicate hasNextForIter = - (it) -> (count[0] > 0) || it.hasNext(); + (it) -> (count[0] > 0) || it.hasNext(); final Function nextForIter = - (it) -> { - if (count[0] == 0) { - voffset[0] = 0; - it.forEachLong((final long v) -> { - buf[voffset[0] + count[0]] = v; - ++count[0]; - return count[0] < buf.length; - }); - assertTrue(count[0] > 0); - } - final long v = buf[voffset[0]]; - ++voffset[0]; - --count[0]; - return v; - }; + (it) -> { + if (count[0] == 0) { + voffset[0] = 0; + it.forEachLong((final long v) -> { + buf[voffset[0] + count[0]] = v; + ++count[0]; + return count[0] < buf.length; + }); + assertTrue(count[0] > 0); + } + final long v = buf[voffset[0]]; + ++voffset[0]; + --count[0]; + return v; + }; final long[] end = new long[1]; final long[] curr = new long[1]; final Predicate hasNextForRanges = - (it) -> (curr[0] < end[0] || it.hasNext()); + (it) -> (curr[0] < end[0] || it.hasNext()); final Function nextForRanges = - (it) -> { - if (curr[0] >= end[0]) { - it.next(); - curr[0] = it.currentRangeStart(); - end[0] = it.currentRangeEnd() + 1; - } - return curr[0]++; - }; + (it) -> { + if (curr[0] >= end[0]) { + it.next(); + curr[0] = it.currentRangeStart(); + end[0] = it.currentRangeEnd() + 1; + } + return curr[0]++; + }; int r = 0; while (hasNextForIter.test(fit)) { final String m = "r=" + r; @@ -1224,14 +1220,14 @@ public void testGetAverageRunLengthEstimate() { public void testGetOrderedKeysByKeyRange() { final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); - final long[] vs = new long[] {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, - 65539, 65536 * 3, 65536 * 3 + 5}; + final long[] vs = new long[] {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, 65539, 65536 * 3, + 65536 * 3 + 5}; for (long v : vs) { b.appendKey(v); } final Index ix = b.getIndex(); - final long[] ends = new long[] {44, 45, 46, 58, 61, 62, 72, 65535, 65536, 65536 * 2, - 65536 * 3 - 1, 65536 * 3, 65536 * 3 + 1}; + final long[] ends = new long[] {44, 45, 46, 58, 61, 62, 72, 65535, 65536, 65536 * 2, 65536 * 3 - 1, 65536 * 3, + 65536 * 3 + 1}; final long start = 8; for (long end : ends) { final String m = "end==" + end; @@ -1258,8 +1254,8 @@ public void testGetOrderedKeysByKeyRange() { public void testGetOrderedKeysByPosition() { final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); - final long[] vs = new long[] {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, - 65539, 65536 * 3, 65536 * 3 + 5}; + final long[] vs = new long[] {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, 65539, 65536 * 3, + 65536 * 3 + 5}; for (long v : vs) { b.appendKey(v); } @@ -1270,8 +1266,7 @@ public void testGetOrderedKeysByPosition() { final String m = "startPos==" + startPos; for (long endPos = startPos; endPos <= sz; ++endPos) { final String m2 = m + " && endPos==" + endPos; - final OrderedKeys oks = - ix.getOrderedKeysByPosition(startPos, endPos - startPos + 1); + final OrderedKeys oks = ix.getOrderedKeysByPosition(startPos, endPos - startPos + 1); final Index ioks = oks.asIndex(); long n = 0; boolean firstTime = true; @@ -1295,14 +1290,13 @@ public void testGetOrderedKeysByPosition() { public void testFillChunk() { final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); - final long[] vs = new long[] {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, - 65539, 65536 * 3, 65536 * 3 + 5}; + final long[] vs = new long[] {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, 65539, 65536 * 3, + 65536 * 3 + 5}; for (long v : vs) { b.appendKey(v); } final Index ix = b.getIndex(); - final WritableLongChunk kixchunk = - WritableLongChunk.makeWritableChunk(vs.length); + final WritableLongChunk kixchunk = WritableLongChunk.makeWritableChunk(vs.length); ix.fillKeyIndicesChunk(kixchunk); assertEquals(vs.length, kixchunk.size()); } @@ -1310,8 +1304,8 @@ public void testFillChunk() { public void testBuilderAddKeys() { final Index.RandomBuilder b = Index.FACTORY.getRandomBuilder(); b.addKey(27); - final long[] vs = {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, 65539, - 65536 * 3, 65536 * 3 + 5}; + final long[] vs = + {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, 65539, 65536 * 3, 65536 * 3 + 5}; b.addKeys(new PrimitiveIterator.OfLong() { int vi = 0; @@ -1336,8 +1330,8 @@ public boolean hasNext() { public void testBuilderAppendKeys() { final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); b.appendKey(1); - final long[] vs = {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, 65539, - 65536 * 3, 65536 * 3 + 5}; + final long[] vs = + {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 65537, 65539, 65536 * 3, 65536 * 3 + 5}; b.appendKeys(new PrimitiveIterator.OfLong() { int vi = 0; @@ -1361,8 +1355,8 @@ public boolean hasNext() { public void testBuilderAddRanges() { final Index.RandomBuilder b = Index.FACTORY.getRandomBuilder(); - final long[] vs = {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 91, 65537, 65539, - 65536 * 3, 65536 * 3 + 5}; + final long[] vs = + {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 91, 65537, 65539, 65536 * 3, 65536 * 3 + 5}; b.addRanges(new LongRangeIterator() { int vi = -2; @@ -1398,8 +1392,8 @@ public long end() { public void testBuilderAppendRanges() { final Index.SequentialBuilder b = Index.FACTORY.getSequentialBuilder(); - final long[] vs = {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 91, 65537, 65539, - 65536 * 3, 65536 * 3 + 5}; + final long[] vs = + {3, 4, 5, 8, 10, 12, 29, 31, 44, 45, 46, 59, 60, 61, 72, 91, 65537, 65539, 65536 * 3, 65536 * 3 + 5}; b.appendRanges(new LongRangeIterator() { int vi = -2; @@ -1630,8 +1624,7 @@ public void testSearchIteratorRegression0() { final Index.SearchIterator it = ix0.searchIterator(); final long target0 = 10070; final MutableLong target = new MutableLong(target0); - final Index.TargetComparator comp = - (long key, int dir) -> Long.compare(target.longValue(), key) * dir; + final Index.TargetComparator comp = (long key, int dir) -> Long.compare(target.longValue(), key) * dir; final long r = it.binarySearchValue(comp, 1); assertEquals(target0, r); assertEquals(target0, it.currentValue()); @@ -1778,20 +1771,20 @@ public void testSubsetOfRegression0() { SortedRanges sr0 = new SortedRangesShort(64, 10); final Index ix0 = new TreeIndex(sr0); rvs2ix(ix0, new long[] { - 85, 88, 103, 121, 201, 204, 220, 258, 275, 296, 366, 370, 386, 409, 411, 453, 584, - 587, 602, 631, 661, 683, 714, -715, - 744, 750, 786, 791, 830, 836, 841, 885, 911, 941, 981, 993, 1024, 1052, 1054, 1089, - 1151, 1220, 1232, 1243, 1266, -1267, - 1296, 1355, 1403, 1429, 1431, 1442, 1466, 1477, 1533, 1556, 1559, 1577, 1579, 1589, - 1610, 1622, 1645, 1654, 1661, 1683, - 1695, 1784, 1790, 1830, 1876, 1914, 1926, 1972, 1980, 2016, 2075, 2082, 2150, 2154, - 2166, -2167, 2171, 2177, 2188, 2202, - 2217, -2218, 2246, 2257, 2266, 2278, -2279, 2282, 2288, 2294, 2300, 2302, -2303, - 2331, 2346, 2355, 2364, 2368, 2377, - 2412, 2421, 2442, 2465, 2472, -2473, 2488, 2497, 2500, 2504, 2511, 2529, -2530, - 2532, 2536, -2537, 2540, 2560, 2563, -2564, - 2573, -2574, 2588, 2595, -2596, 2598, 2601, 2612, 2627, -2628, 2647, 2664, 2670, - 2672, -2673, 2696, -2697, 2702, 2723, + 85, 88, 103, 121, 201, 204, 220, 258, 275, 296, 366, 370, 386, 409, 411, 453, 584, 587, 602, 631, 661, + 683, 714, -715, + 744, 750, 786, 791, 830, 836, 841, 885, 911, 941, 981, 993, 1024, 1052, 1054, 1089, 1151, 1220, 1232, + 1243, 1266, -1267, + 1296, 1355, 1403, 1429, 1431, 1442, 1466, 1477, 1533, 1556, 1559, 1577, 1579, 1589, 1610, 1622, 1645, + 1654, 1661, 1683, + 1695, 1784, 1790, 1830, 1876, 1914, 1926, 1972, 1980, 2016, 2075, 2082, 2150, 2154, 2166, -2167, 2171, + 2177, 2188, 2202, + 2217, -2218, 2246, 2257, 2266, 2278, -2279, 2282, 2288, 2294, 2300, 2302, -2303, 2331, 2346, 2355, 2364, + 2368, 2377, + 2412, 2421, 2442, 2465, 2472, -2473, 2488, 2497, 2500, 2504, 2511, 2529, -2530, 2532, 2536, -2537, 2540, + 2560, 2563, -2564, + 2573, -2574, 2588, 2595, -2596, 2598, 2601, 2612, 2627, -2628, 2647, 2664, 2670, 2672, -2673, 2696, + -2697, 2702, 2723, 2741, 2751, 2757, 2784, 2792, 2799, 2818, 2824, 2838, 2853, 2857, 2863 }); final TreeIndex ix1 = TreeIndex.makeEmptyRsp(); @@ -1803,23 +1796,17 @@ public void testIntersectRegression0() { Index ix0 = TreeIndex.makeEmptySr(); Index ix1 = TreeIndex.makeEmptyRsp(); ix0 = indexFromString( - "87,236,275,324,329,468,505,673,705,779,834,848,917,1017,1019,1062,1366,1405,1453,1575,1599,1757," - + - "1834,1853,1856,1895,1960,2098,2167,2218,2411,2606,2686,2842,2958,3225,3451,3509,3587,3601," - + - "3614,3722,3747,3807,3907,4061,4158,4371,4558,4590-4591,4732,4739,4757,4801,4894,5000,5312," - + - "5601,5755,5854,5901,6006,6029,6080,6117,6126,6176,6339-6340,6384,6431,6627,6903,6916,7159", - ix0); + "87,236,275,324,329,468,505,673,705,779,834,848,917,1017,1019,1062,1366,1405,1453,1575,1599,1757," + + "1834,1853,1856,1895,1960,2098,2167,2218,2411,2606,2686,2842,2958,3225,3451,3509,3587,3601," + + "3614,3722,3747,3807,3907,4061,4158,4371,4558,4590-4591,4732,4739,4757,4801,4894,5000,5312," + + "5601,5755,5854,5901,6006,6029,6080,6117,6126,6176,6339-6340,6384,6431,6627,6903,6916,7159", + ix0); ix1 = indexFromString( - "6849,6851,6856,6859,6863,6866,6875,6884,6888,6895,6900,6902-6903,6921,6923,6941,6949,6968," - + - "6974,6978-6979,6997,7007-7020,7022-7023,7025-7028,7030-7032,7034-7056,7058-7066,7068-7075," - + - "7077-7091,7094-7117,7119-7125,7127-7129,7131-7142,7144,7146-7168,7170-7172,7174-7180," - + - "7182-7187,7189-7211,7213-7224", - ix1); + "6849,6851,6856,6859,6863,6866,6875,6884,6888,6895,6900,6902-6903,6921,6923,6941,6949,6968," + + "6974,6978-6979,6997,7007-7020,7022-7023,7025-7028,7030-7032,7034-7056,7058-7066,7068-7075," + + "7077-7091,7094-7117,7119-7125,7127-7129,7131-7142,7144,7146-7168,7170-7172,7174-7180," + + "7182-7187,7189-7211,7213-7224", + ix1); final Index ix2 = ix0.intersect(ix1); ix2.validate(); } @@ -1883,53 +1870,30 @@ public void testSubsetOfMixed() { } public void testRemoveRegression0() { - final String ix0Str = - "201609,201631-201632,201671,201674,201705,201715-201716,201719,201724,201749,201782," + - "201789,201842,201865,201888,201892,201908,201918,201927,201935,201954,201961,201971,202012,202014," - + - "202022,202034,202082,202092,202137,202140,202142,202148,205512,205519,205539,205557,205561,205579," - + - "205613,205667,205724,205759,205799-205800,205818,205822,205841,205853,205866,205893,205934,209611," - + - "209618,209621,209636,209673,209702,209729,209731,209736,209748,209772,209823,209835,209886,209891," - + - "209894,209897,209926,209943,209980,209989,209995,210058,210079,210109,210149,210158,221440,221474," - + - "221486,221492,221527,221537,221555,221608,221612,221614,221644,221669,221680,221958,222006,222018," - + + final String ix0Str = "201609,201631-201632,201671,201674,201705,201715-201716,201719,201724,201749,201782," + + "201789,201842,201865,201888,201892,201908,201918,201927,201935,201954,201961,201971,202012,202014," + + "202022,202034,202082,202092,202137,202140,202142,202148,205512,205519,205539,205557,205561,205579," + + "205613,205667,205724,205759,205799-205800,205818,205822,205841,205853,205866,205893,205934,209611," + + "209618,209621,209636,209673,209702,209729,209731,209736,209748,209772,209823,209835,209886,209891," + + "209894,209897,209926,209943,209980,209989,209995,210058,210079,210109,210149,210158,221440,221474," + + "221486,221492,221527,221537,221555,221608,221612,221614,221644,221669,221680,221958,222006,222018," + "227097,227110,230227,230229,232133-233244,233246-235291"; - final String ix1Str = - "201719,201888,202012,202014,202156,202159,202179,202199,202212,202227,202279,202287," + - "202289,202301,202347,202451,202467,202480,202488,202537,202541,202571,202643,202681,202756,202784," - + - "202826,202844,202847,202911,203019,203116,203130,203163,203178,203182,203194-203195,203216-203217," - + - "203247,203308,203312,203326,203337,203368,203427-203428,203466,203470,203513,203524,203595,203625," - + - "203655,203681,203711,203719,203738,203758,203768,203776,203778,203785,203801,203875,203893,203940," - + - "203951,203987,204000,204025,204043,204085,204148,204171,204261,204289,204353,204383,204399,204474," - + - "204524,204529,204541,204556-204557,204559,204570,204611,204620,204641,204802,204871,204931-204932," - + - "204956,205035,205107,205114,205155,205167,205175,205183,205204,205226,205316,205357,205370,205401," - + - "205422-205423,205822,205934,209835,209886,210079,210158,219810,219815,219824,219871,219892,219916," - + - "219937,220009,220022,220025,220040,220052,220081,220117,220144,220182,220189,220192,220226,220232," - + - "220297,220306,220406,220415,220429,220435,220445,220477,220629,220800,220809,220901,220918,220922," - + - "220961,220967,220969,220976,220981,220989,220992,221182,221316,221346,221357,221403,221414,221420," - + - "221439-221440,221492,232136,232159,232169,232182,232186,232212-232213,232219,232252,232281,232306," - + - "232310,232314,232320,232338,232369,232374,232397,232402,232406,232424,232432,232436,232441,232444," - + - "232459,232490,232494,232499,232516-232518,232525,232543,232558,232580,232591,232598,232603,232640," - + - "232649,232660,232673,232677,232686,232709,232715,232721,232723,232739,232743,232757,232771,232773," - + + final String ix1Str = "201719,201888,202012,202014,202156,202159,202179,202199,202212,202227,202279,202287," + + "202289,202301,202347,202451,202467,202480,202488,202537,202541,202571,202643,202681,202756,202784," + + "202826,202844,202847,202911,203019,203116,203130,203163,203178,203182,203194-203195,203216-203217," + + "203247,203308,203312,203326,203337,203368,203427-203428,203466,203470,203513,203524,203595,203625," + + "203655,203681,203711,203719,203738,203758,203768,203776,203778,203785,203801,203875,203893,203940," + + "203951,203987,204000,204025,204043,204085,204148,204171,204261,204289,204353,204383,204399,204474," + + "204524,204529,204541,204556-204557,204559,204570,204611,204620,204641,204802,204871,204931-204932," + + "204956,205035,205107,205114,205155,205167,205175,205183,205204,205226,205316,205357,205370,205401," + + "205422-205423,205822,205934,209835,209886,210079,210158,219810,219815,219824,219871,219892,219916," + + "219937,220009,220022,220025,220040,220052,220081,220117,220144,220182,220189,220192,220226,220232," + + "220297,220306,220406,220415,220429,220435,220445,220477,220629,220800,220809,220901,220918,220922," + + "220961,220967,220969,220976,220981,220989,220992,221182,221316,221346,221357,221403,221414,221420," + + "221439-221440,221492,232136,232159,232169,232182,232186,232212-232213,232219,232252,232281,232306," + + "232310,232314,232320,232338,232369,232374,232397,232402,232406,232424,232432,232436,232441,232444," + + "232459,232490,232494,232499,232516-232518,232525,232543,232558,232580,232591,232598,232603,232640," + + "232649,232660,232673,232677,232686,232709,232715,232721,232723,232739,232743,232757,232771,232773," + "232776,232778,232783,232796,232810,232823"; Index ix0 = TreeIndex.makeEmptySr(); ix0 = indexFromString(ix0Str, ix0); @@ -1946,10 +1910,9 @@ public void testRemoveRegression0() { public void testMinusRegression1() { final Index ix0 = TreeIndex.makeEmptySr(); rvs2ix(ix0, new long[] { - 10, -12, 14, 16, 18, 20, 22, 24, 26, 28, -29, 31, 33, -34, 36, 38, -39, 41, 43, 45, - -47, 49, 51, 53, 55, 57, 59, 61, 63, -64, - 66, -67, 69, 71, -76, 78, -80, 82, -83, 85, -88, 90, -91, 93, -94, 96, 98, 100, 102, - -110, 112, -113 + 10, -12, 14, 16, 18, 20, 22, 24, 26, 28, -29, 31, 33, -34, 36, 38, -39, 41, 43, 45, -47, 49, 51, 53, 55, + 57, 59, 61, 63, -64, + 66, -67, 69, 71, -76, 78, -80, 82, -83, 85, -88, 90, -91, 93, -94, 96, 98, 100, 102, -110, 112, -113 }); final Index ix1 = TreeIndex.makeEmptySr(); rvs2ix(ix1, new long[] {24, 28, 31, 38, 51, 57, 59, 61, 74, 90, 93, 107, -108, 110}); @@ -1962,25 +1925,16 @@ public void testMinusRegression1() { } public void testMinusRegression2() { - final String ix0Str = - "88,103,121,258,275,366,370,409,411,584,587,602,683,714-715,744,750,791,836,981,1024," + - "1052,1054,1089,1151,1220,1243,1267,1296,1403,1429,1533,1556,1589,1661,1784,1790,1914,2150,2167,2171," - + - "2202,2288,2294,2300,2303,2331,2346,2355,2364,2368,2412,2421,2504,2511,2529-2530,2532,2563,2574,2595," - + + final String ix0Str = "88,103,121,258,275,366,370,409,411,584,587,602,683,714-715,744,750,791,836,981,1024," + + "1052,1054,1089,1151,1220,1243,1267,1296,1403,1429,1533,1556,1589,1661,1784,1790,1914,2150,2167,2171," + + "2202,2288,2294,2300,2303,2331,2346,2355,2364,2368,2412,2421,2504,2511,2529-2530,2532,2563,2574,2595," + "2628,2670,2673,2696,2741,2751,2757,2792,2799,2818"; - final String ix1Str = - "4,22,60,64,75,78,106,109,129,135,161,196,217,279,285,312,339,363,366,373,379,420,454," - + - "528,592,629,646,651,706,725,731,735,787,813,902,947,1002,1048,1064,1070,1099,1133,1135,1154,1159," - + - "1175,1183,1211,1223,1270,1301,1308,1323,1347,1357,1365,1367,1382,1430,1538,1558,1564,1587,1638,1641," - + - "1677-1678,1732,1761,1774,1804,1811,1813,1843,1856,1890,1903,1918,1927,1933,1942,2009,2068,2155,2184," - + - "2189,2206,2210,2221,2223,2230,2303,2310,2316,2329,2347,2352,2354,2380,2458,2474,2482,2489,2494,2503," - + + final String ix1Str = "4,22,60,64,75,78,106,109,129,135,161,196,217,279,285,312,339,363,366,373,379,420,454," + + "528,592,629,646,651,706,725,731,735,787,813,902,947,1002,1048,1064,1070,1099,1133,1135,1154,1159," + + "1175,1183,1211,1223,1270,1301,1308,1323,1347,1357,1365,1367,1382,1430,1538,1558,1564,1587,1638,1641," + + "1677-1678,1732,1761,1774,1804,1811,1813,1843,1856,1890,1903,1918,1927,1933,1942,2009,2068,2155,2184," + + "2189,2206,2210,2221,2223,2230,2303,2310,2316,2329,2347,2352,2354,2380,2458,2474,2482,2489,2494,2503," + "2622,2625,2631,2635,2640,2643-2644,2684,2686,2688,2690,2704,2725,2811,2829,2839"; Index ix0 = TreeIndex.makeEmptySr(); ix0 = indexFromString(ix0Str, ix0); @@ -2201,17 +2155,16 @@ public void testRemoveTime() { } } try (final Index result = outer.getIndex(); - final Index toRemove = inner.getIndex()) { + final Index toRemove = inner.getIndex()) { final long t0 = System.currentTimeMillis(); result.remove(toRemove); final long t1 = System.currentTimeMillis(); final double removeTimeSeconds = (t1 - t0) / 1000.0; - // With the O(n^2) implementation this took ~47 seconds on an Intel Core i9-8950HK 2.90 - // GHz. + // With the O(n^2) implementation this took ~47 seconds on an Intel Core i9-8950HK 2.90 GHz. // The O(n) implementation took 0.1 seconds on the same machine. final double reasonableThresholdIfWePauseSeconds = 8.0; assertTrue("removeTimeSeconds=" + removeTimeSeconds, - removeTimeSeconds < reasonableThresholdIfWePauseSeconds); + removeTimeSeconds < reasonableThresholdIfWePauseSeconds); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/UpdatePerformanceTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/UpdatePerformanceTest.java index bf4044aa0fd..49ea437824f 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/UpdatePerformanceTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/UpdatePerformanceTest.java @@ -33,8 +33,7 @@ interface Factory { Factory getFactory(); - // Since tests will run multiple times, and creation time is high (higher than individual - // operations), + // Since tests will run multiple times, and creation time is high (higher than individual operations), // we create the base index one and clone it before every run of update. void cloneBase(); @@ -52,8 +51,7 @@ interface Factory { long removeSize(); - long optimizerBait(); // some value calculated from the result to prevent the optimizer from - // removing the code. + long optimizerBait(); // some value calculated from the result to prevent the optimizer from removing the code. Runnable getRunner(int i); @@ -293,7 +291,7 @@ public long getBaseCrc32() { } public static void setupStrategy(final UpdateStrategy s, final int sz, - final TestValues.Config c, final String pref, final boolean print) { + final TestValues.Config c, final String pref, final boolean print) { final TestValues.Builder baseBuilder = s.baseBuilder(); final TestValues.Builder addBuilder = s.addBuilder(); final TestValues.Builder removeBuilder = s.removeBuilder(); @@ -307,8 +305,7 @@ public static void setupStrategy(final UpdateStrategy s, final int sz, System.out.println(pref + "remove size = " + nf(s.removeSize())); } - static long runAndGetSamples(final UpdateStrategy s, final int ri, final int runs, - final PerfStats stats) { + static long runAndGetSamples(final UpdateStrategy s, final int ri, final int runs, final PerfStats stats) { long trick = 0; // to prevent the optimizer from eliminating unused steps. final PerfMeasure pm = new PerfMeasure(false); for (int i = 0; i < runs; ++i) { @@ -351,8 +348,7 @@ static double codeWarmup(final UpdateStrategy.Factory f, int[] rs) { final static boolean runIndexParallel = true; final static boolean runIndexSequential = false; final static boolean runRspBitmap = true; - static final TestValues.Config configs[] = {TestValues.dense}; // { TestValues.sparse, - // TestValues.dense, + static final TestValues.Config configs[] = {TestValues.dense}; // { TestValues.sparse, TestValues.dense, // TestValues.asymmetric }; final static boolean doCrc32Check = true; @@ -360,9 +356,8 @@ static double codeWarmup(final UpdateStrategy.Factory f, int[] rs) { static final double s2ns = 1e9; static void runStep( - final TestValues.Config c, final int sn, final UpdateStrategy[] ss, final int[][] rs, - final String stepName, final int sz, final int runs, final boolean check, - final boolean print) { + final TestValues.Config c, final int sn, final UpdateStrategy[] ss, final int[][] rs, + final String stepName, final int sz, final int runs, final boolean check, final boolean print) { final Runtime rt = Runtime.getRuntime(); System.out.println(me + ": Running " + c.name + " " + stepName + " sz=" + nf(sz)); final String pfx = me + " "; @@ -375,9 +370,9 @@ static void runStep( final double dMb = pm.dm() / (1024.0 * 1024.0); if (print) { System.out.println(pfx + String.format( - "Building values for " + ss[si].getClass().getSimpleName() + - " done in %.3f secs, delta memory used %s", - pm.dt() / s2ns, mf(dMb))); + "Building values for " + ss[si].getClass().getSimpleName() + + " done in %.3f secs, delta memory used %s", + pm.dt() / s2ns, mf(dMb))); } pm.reset(); } @@ -396,8 +391,8 @@ static void runStep( System.out.println(pfx + "trick optimizer value = " + nf(trick)); if (!(si == 0 && ri == 0)) { PerfStats.comparePrint( - pStats, ss[0].getRunner(0).toString(), sStats, - ss[si].getRunner(rs[si][ri]).toString(), pfx); + pStats, ss[0].getRunner(0).toString(), sStats, ss[si].getRunner(rs[si][ri]).toString(), + pfx); } } } @@ -412,8 +407,8 @@ static void runStep( final long crc32 = ss[si].getBaseCrc32(); final long ct1 = System.nanoTime(); System.out.println( - pfx + ss[si].getRunner(rs[si][ri]).toString() + " crc32=" + nf(crc32) + - " done in " + (ct1 - ct0) / s2ns + " s."); + pfx + ss[si].getRunner(rs[si][ri]).toString() + " crc32=" + nf(crc32) + + " done in " + (ct1 - ct0) / s2ns + " s."); } } } @@ -421,22 +416,19 @@ static void runStep( } // Having separate warmup and full methods helps separate them in JProfiler. - static void runStepWarmup(final TestValues.Config c, final int sn, final UpdateStrategy ss[], - final int[][] rs, - final int sz, final int runs) { + static void runStepWarmup(final TestValues.Config c, final int sn, final UpdateStrategy ss[], final int[][] rs, + final int sz, final int runs) { runStep(c, sn, ss, rs, "warmup", sz, runs, false, false); } - static void runStepFull(final TestValues.Config c, final int sn, final UpdateStrategy ss[], - final int[][] rs, - final int sz, final int runs, final boolean check) { + static void runStepFull(final TestValues.Config c, final int sn, final UpdateStrategy ss[], final int[][] rs, + final int sz, final int runs, final boolean check) { runStep(c, sn, ss, rs, "full test", sz, runs, check, true); } static void run( - final TestValues.Config c, final int sn, final UpdateStrategy[] ss, final int[][] rs, - final int warmupSz, final int warmupRuns, final int fullSz, final int fullRuns, - final boolean check) { + final TestValues.Config c, final int sn, final UpdateStrategy[] ss, final int[][] rs, + final int warmupSz, final int warmupRuns, final int fullSz, final int fullRuns, final boolean check) { runStepWarmup(c, sn, ss, rs, warmupSz, warmupRuns); runStepFull(c, sn, ss, rs, fullSz, fullRuns, check); } @@ -470,8 +462,7 @@ public static void main(String[] args) { final double wo = codeWarmup(ss[si].getFactory(), rs[si]); final long t1 = System.nanoTime(); final long dt = t1 - t0; - System.out - .println(me + ": " + ss[si].getClass().getSimpleName() + " Code warmup ran in " + + System.out.println(me + ": " + ss[si].getClass().getSimpleName() + " Code warmup ran in " + dt / s2ns + " seconds, output=" + wo); } final int warmupSz = 1 * 1000 * 1000; diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/ValidationSet.java b/DB/src/test/java/io/deephaven/db/v2/utils/ValidationSet.java index 22688aeb86e..fd7f3226fd3 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/ValidationSet.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/ValidationSet.java @@ -87,9 +87,8 @@ public boolean execute(final long v) { }; public static final BiFunction overlapOp = - (h1, h2) -> !h1.forEach(v -> !h2.contains(v)); - public static final BiFunction subsetOfOp = - (h1, h2) -> h1.forEach(h2::contains); + (h1, h2) -> !h1.forEach(v -> !h2.contains(v)); + public static final BiFunction subsetOfOp = (h1, h2) -> h1.forEach(h2::contains); public static final Op subtractOp = new Op() { @Override diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/copy/ReplicateCopyKernelTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/copy/ReplicateCopyKernelTest.java index 32253fcdd9f..2ac47ee5a4e 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/copy/ReplicateCopyKernelTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/copy/ReplicateCopyKernelTest.java @@ -14,15 +14,15 @@ public class ReplicateCopyKernelTest { public static void main(String[] args) throws IOException { ReplicateCopyKernel.main(args); ReplicatePrimitiveCode.charToAll(TestCharCopyKernel.class, ReplicatePrimitiveCode.TEST_SRC); - fixupObjectCopyKernelTest(ReplicatePrimitiveCode.charToObject(TestCharCopyKernel.class, - ReplicatePrimitiveCode.TEST_SRC)); + fixupObjectCopyKernelTest( + ReplicatePrimitiveCode.charToObject(TestCharCopyKernel.class, ReplicatePrimitiveCode.TEST_SRC)); } private static void fixupObjectCopyKernelTest(String path) throws IOException { final File file = new File(path); List lines = FileUtils.readLines(file, Charset.defaultCharset()); lines = globalReplacements(lines, - "ObjectChunk", "ObjectChunk"); + "ObjectChunk", "ObjectChunk"); FileUtils.writeLines(file, lines); } } diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/rsp/RspBitmapTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/rsp/RspBitmapTest.java index a99933caf61..35ee2a9db60 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/rsp/RspBitmapTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/rsp/RspBitmapTest.java @@ -111,8 +111,7 @@ public void testMergeFullBlockSpans() { assertEquals(BLOCK_SIZE - 1, getContainerCardinality(i, rb)); } - // For each block except first and last, add the single missing element that would make it - // full. + // For each block except first and last, add the single missing element that would make it full. // Follow the order in the randomly shuffled blocks array. for (int i = 0; i < tgtflen - 2; ++i) { final int bi = blocks[i]; @@ -230,15 +229,14 @@ static class RandomSpansSomeFullSomeAlmostFull { public int nalmost; public final RspBitmap rb; - public RandomSpansSomeFullSomeAlmostFull(final int nblocks, final Random r, - final int firstBlock) { + public RandomSpansSomeFullSomeAlmostFull(final int nblocks, final Random r, final int firstBlock) { this(nblocks, r, firstBlock, 0, null); } public RandomSpansSomeFullSomeAlmostFull( - final int nblocks, final Random r, - final int firstBlock, final int nEmptyBlocks, - final TLongSet set) { + final int nblocks, final Random r, + final int firstBlock, final int nEmptyBlocks, + final TLongSet set) { full = new int[nblocks]; nfull = 0; almost = new int[nblocks]; @@ -262,7 +260,7 @@ public RandomSpansSomeFullSomeAlmostFull( final long blockEnd = blockStart + BLOCK_SIZE; // exclusive rb.addRange(blockStart, blockEnd - 1); assertEquals("i=" + i, - (i + 1) * BLOCK_SIZE, rb.getCardinality()); + (i + 1) * BLOCK_SIZE, rb.getCardinality()); assertEquals(i + 1, rb.getKvs().getFullBlocksCount()); if (set != null) { for (long vv = blockStart; vv < blockEnd; ++vv) { @@ -289,8 +287,8 @@ public RandomSpansSomeFullSomeAlmostFull( } } assertEquals("i=" + i, - nfull * BLOCK_SIZE + (i + 1) * (BLOCK_SIZE - 1), - rb.getCardinality()); + nfull * BLOCK_SIZE + (i + 1) * (BLOCK_SIZE - 1), + rb.getCardinality()); assertEquals(nfull, rb.getKvs().getFullBlocksCount()); } } @@ -448,10 +446,8 @@ private static void doTestStressConversionBetweenSpanTypes(final int seed) { ss.rb.validate(); } - private static RspBitmap getRandomSpansSomeFullSomeAlmostFull(final int nblocks, - final Random r) { - final RandomSpansSomeFullSomeAlmostFull ss = - new RandomSpansSomeFullSomeAlmostFull(nblocks, r); + private static RspBitmap getRandomSpansSomeFullSomeAlmostFull(final int nblocks, final Random r) { + final RandomSpansSomeFullSomeAlmostFull ss = new RandomSpansSomeFullSomeAlmostFull(nblocks, r); return ss.rb; } @@ -507,25 +503,24 @@ public void testSimpleReverseIterator() { assertTrue(!rit.hasNext()); } - private static RspBitmap getRandomRspBitmap(final int nblocks, final Random r, - final float threshold, int toRemoveBound) { + private static RspBitmap getRandomRspBitmap(final int nblocks, final Random r, final float threshold, + int toRemoveBound) { return getRandomRspBitmap(nblocks, r, threshold, toRemoveBound, 0, 0, null); } private static RspBitmap getRandomRspBitmap( - final int nblocks, final Random r, final float threshold, final int toRemoveBound, - final int firstBlock, final int nEmpty) { + final int nblocks, final Random r, final float threshold, final int toRemoveBound, + final int firstBlock, final int nEmpty) { return getRandomRspBitmap(nblocks, r, threshold, toRemoveBound, firstBlock, nEmpty, null); } private static RspBitmap getRandomRspBitmap( - final int nblocks, final Random r, final float threshold, final int toRemoveBound, - final int firstBlock, final int nEmpty, final TLongSet set) { + final int nblocks, final Random r, final float threshold, final int toRemoveBound, + final int firstBlock, final int nEmpty, final TLongSet set) { final RandomSpansSomeFullSomeAlmostFull ss = - new RandomSpansSomeFullSomeAlmostFull(nblocks, r, firstBlock, nEmpty, set); + new RandomSpansSomeFullSomeAlmostFull(nblocks, r, firstBlock, nEmpty, set); final RspBitmap rb = ss.rb; - // take some almost full blocks and remove a few elements from each as to create more ranges - // on each. + // take some almost full blocks and remove a few elements from each as to create more ranges on each. for (int i = 0; i < ss.nalmost; ++i) { final boolean removeFromThisOne = r.nextFloat() < threshold; if (!removeFromThisOne) { @@ -676,8 +671,7 @@ private static void doTestReverseIteratorAdvance(final int seed) { } } - private static void doTestReverseIteratorAdvanceOnePass(final int seed, final int passIdx, - final Random r) { + private static void doTestReverseIteratorAdvanceOnePass(final int seed, final int passIdx, final Random r) { final int nblocks = 50; final RspBitmap rb = getRandomRspBitmap(nblocks, r, 1 / 2.0f, 3); final RspRangeIterator forwardRangeIter = rb.getRangeIterator(); @@ -698,8 +692,7 @@ private static void doTestReverseIteratorAdvanceOnePass(final int seed, final in } final long blockMod = v & BLOCK_LAST; prevAtBlockBoundary = - (blockMod == 0 || blockMod == 1 || blockMod == (BLOCK_LAST - 1) - || blockMod == BLOCK_LAST); + (blockMod == 0 || blockMod == 1 || blockMod == (BLOCK_LAST - 1) || blockMod == BLOCK_LAST); final String m2 = m + " && v==" + v; final RspReverseIterator reverseIter = rb.getReverseIterator(); final boolean valid = reverseIter.advance(v); @@ -771,8 +764,8 @@ public void testReverseIteratorAdvanceCases() { assertFalse(reverseIter.hasNext()); } - private static void forceContainerType(final RspBitmap rb, final String containerName, - final int safeRangeStart, final long safeRangeEnd) { + private static void forceContainerType(final RspBitmap rb, final String containerName, final int safeRangeStart, + final long safeRangeEnd) { switch (containerName) { case "bitmap": { for (int v = safeRangeStart; v <= safeRangeEnd; v += 2) { @@ -789,8 +782,7 @@ private static void forceContainerType(final RspBitmap rb, final String containe } } - private static void checkReverseIteratorAdvance(final RspBitmap rb, final long[] alls, - final String m) { + private static void checkReverseIteratorAdvance(final RspBitmap rb, final long[] alls, final String m) { for (long v : alls) { final RspReverseIterator rit = rb.getReverseIterator(); final RspBitmap chopped = rb.subrangeByValue(0, v); @@ -813,8 +805,7 @@ private static void checkReverseIteratorAdvance(final RspBitmap rb, final long[] } } - private static void checkRangeIteratorAdvance(final RspBitmap rb, final long[] alls, - final String m) { + private static void checkRangeIteratorAdvance(final RspBitmap rb, final long[] alls, final String m) { for (long v : alls) { final String m2 = m + " && m.hashCode()==" + m.hashCode() + " && v==" + v; final RspRangeIterator rit = rb.getRangeIterator(); @@ -840,10 +831,8 @@ private static void checkRangeIteratorAdvance(final RspBitmap rb, final long[] a @Test public void testIteratorAdvanceBlockBoundaries() { - final long[] b0s = - new long[] {BLOCK_LAST - 4, BLOCK_LAST - 3, BLOCK_LAST - 2, BLOCK_LAST - 1, BLOCK_LAST}; - final long[] b1s = - new long[] {BLOCK_SIZE, BLOCK_SIZE + 1, BLOCK_SIZE + 2, BLOCK_SIZE + 3, BLOCK_SIZE + 4}; + final long[] b0s = new long[] {BLOCK_LAST - 4, BLOCK_LAST - 3, BLOCK_LAST - 2, BLOCK_LAST - 1, BLOCK_LAST}; + final long[] b1s = new long[] {BLOCK_SIZE, BLOCK_SIZE + 1, BLOCK_SIZE + 2, BLOCK_SIZE + 3, BLOCK_SIZE + 4}; final long[] alls = new long[b0s.length + b1s.length]; System.arraycopy(b0s, 0, alls, 0, b0s.length); System.arraycopy(b1s, 0, alls, b0s.length, b1s.length); @@ -880,8 +869,7 @@ public void testIteratorAdvanceBlockBoundaries() { rb.addRange(0, BLOCK_LAST); } if (b1 < b1bits) { - forceContainerType(rb, b1ContainerName, BLOCK_SIZE + 6, - BLOCK_SIZE + BLOCK_LAST); + forceContainerType(rb, b1ContainerName, BLOCK_SIZE + 6, BLOCK_SIZE + BLOCK_LAST); for (int i = 0; i < b1s.length; ++i) { if ((b1 & (1 << i)) != 0) { rb.add(b1s[i]); @@ -891,9 +879,9 @@ public void testIteratorAdvanceBlockBoundaries() { rb.addRange(BLOCK_SIZE, BLOCK_SIZE + BLOCK_LAST); } final String m = - "b0ContainerName.charAt(0)=='" + b0ContainerName.charAt(0) + - "' && b1ContainerName.charAt(0)=='" + b1ContainerName.charAt(0) + - "' && b0==" + b0 + " && b1==" + b1; + "b0ContainerName.charAt(0)=='" + b0ContainerName.charAt(0) + + "' && b1ContainerName.charAt(0)=='" + b1ContainerName.charAt(0) + + "' && b0==" + b0 + " && b1==" + b1; checkReverseIteratorAdvance(rb, alls, m); checkRangeIteratorAdvance(rb, alls, m); } @@ -1087,8 +1075,7 @@ private static void doTestGetFind2(final int seed) { final TLongSet set = ValidationSet.make(count); final String pfxMsg = "doTestGetFind2 seed == " + seed; System.out.println(pfxMsg); - final RspBitmap rb = - populateRandom(pfxMsg, new Random(seed), count, 10, 30000000, 150, 50, set, true); + final RspBitmap rb = populateRandom(pfxMsg, new Random(seed), count, 10, 30000000, 150, 50, set, true); assertEquals(set.size(), rb.getCardinality()); rb.validate(); final long[] arr = new long[set.size()]; @@ -1333,7 +1320,7 @@ private static void doTestSubrangeByKey(final int seed) { final int nblocks = 60; final TLongSet set = new TLongHashSet(); final RspBitmap rb = getRandomRspBitmap(nblocks, r, 0.90f, 2000, - 0, 0, set); + 0, 0, set); final long rbc = rb.getCardinality(); final int ntests = 20; final String m = "seed=" + seed; @@ -1369,8 +1356,8 @@ public void testSubrangeByKey2() { randomizedTest(RspBitmapTest::doTestSubrangeByKey2); } - private static void findRangeCheck(final String pfxMsg, final long[] arr, final RspBitmap rb, - final long start, final long end) { + private static void findRangeCheck(final String pfxMsg, final long[] arr, final RspBitmap rb, final long start, + final long end) { final RspBitmap result = rb.subrangeByValue(start, end); result.validate(pfxMsg); int sz = 0; @@ -1492,7 +1479,7 @@ private static void doTestAddAndAppendWorkTheSame(final int seed) { final int nblocks = 120; TLongSet set = new TLongHashSet(); final RspBitmap rb = getRandomRspBitmap(nblocks, r, 0.90f, 100, - 0, 0, set); + 0, 0, set); RspRangeIterator it = rb.getRangeIterator(); final RspBitmap rba = new RspBitmap(); while (it.hasNext()) { @@ -1517,7 +1504,7 @@ private static void doTestAddAndAppendRangeWorkTheSame(final int seed) { final int nblocks = 120; TLongSet set = new TLongHashSet(); final RspBitmap rb = getRandomRspBitmap(nblocks, r, 0.90f, 100, - 0, 0, set); + 0, 0, set); RspRangeIterator it = rb.getRangeIterator(); final RspBitmap rba = new RspBitmap(); while (it.hasNext()) { @@ -1539,7 +1526,7 @@ private static void doTestSubrangeByPos(final int seed) { final int nblocks = 120; TLongSet set = new TLongHashSet(); final RspBitmap rb = getRandomRspBitmap(nblocks, r, 0.90f, 100, - 0, 0, set); + 0, 0, set); final long[] vs = set.toArray(); Arrays.sort(vs); set = null; @@ -1598,8 +1585,8 @@ private static void doTestSubrangeByPos2(final int seed) { } } - private static void subrangeByPosRangeCheck(final String pfxMsg, final long[] arr, - final RspBitmap rb, final long pos0, final long pos1) { + private static void subrangeByPosRangeCheck(final String pfxMsg, final long[] arr, final RspBitmap rb, + final long pos0, final long pos1) { final RspBitmap result = rb.subrangeByPos(pos0, pos1); result.validate(pfxMsg); int sz = 0; @@ -1678,16 +1665,14 @@ private static void doTestOverlaps(final int seed) { final int nblocks1 = 60; final int nblocks2 = 30; final RspBitmap rb1 = getRandomRspBitmap(nblocks1, r, 0.90f, 2000, 0, nblocks1 / 5); - final RspBitmap rb2 = - getRandomRspBitmap(nblocks2, r, 0.90f, 2000, nblocks1 + 1, nblocks2 / 5); + final RspBitmap rb2 = getRandomRspBitmap(nblocks2, r, 0.90f, 2000, nblocks1 + 1, nblocks2 / 5); assertFalse(rb1.overlaps(rb2)); final int nadds = 200; final String m = "seed=" + seed; for (int i = 0; i < nadds; ++i) { final float f = i / (float) (nadds - 1); // between 0 and 1. final int b = (int) ((nblocks1 - 1) * f); // between 0 and (nblocks1 - 1). - final int firstBlock = nblocks1 - 1 - b; // make firstBlock further to the left on every - // iteration pass. + final int firstBlock = nblocks1 - 1 - b; // make firstBlock further to the left on every iteration pass. final float pFull = 0.3F; final boolean doFull = r.nextFloat() <= pFull; boolean expectOverlap = false; @@ -1698,8 +1683,7 @@ private static void doTestOverlaps(final int seed) { expectOverlap = true; } else { final long start = blockStart(firstBlock) + r.nextInt(BLOCK_SIZE); - final long end = start + 1 + r.nextInt(3); // note end may span a block boundary, if - // so, so be it. + final long end = start + 1 + r.nextInt(3); // note end may span a block boundary, if so, so be it. rb2copy.addRangeExclusiveEnd(start, end); for (long v = start; v < end; ++v) { if (rb1.contains(v)) { @@ -1793,7 +1777,7 @@ private static void doTestIteratorSearch(final int seed) { final int nblocks = 120; final TLongSet set = new TLongHashSet(); final RspBitmap rb = - getRandomRspBitmap(nblocks, r, 0.20f, 6000, 0, 12, set); + getRandomRspBitmap(nblocks, r, 0.20f, 6000, 0, 12, set); doTestIteratorSearchImpl(seed, rb, set); } @@ -1838,15 +1822,15 @@ private static void doTestIteratorSearchImpl(final int seed, final RspBitmap rb, } private static void testBiOp( - final int seed, - BiFunction bitmapFun, - BiFunction boolFun) { + final int seed, + BiFunction bitmapFun, + BiFunction boolFun) { final Random r = new Random(seed); final int nblocks = 60; final RspBitmap rb1 = - getRandomRspBitmap(nblocks, r, 0.90f, 2000, 0, 12); + getRandomRspBitmap(nblocks, r, 0.90f, 2000, 0, 12); final RspBitmap rb2 = - getRandomRspBitmap(nblocks, r, 0.90f, 2000, nblocks / 2, 12); + getRandomRspBitmap(nblocks, r, 0.90f, 2000, nblocks / 2, 12); final RspBitmap rbres = bitmapFun.apply(rb1, rb2); final String m = "seed=" + seed; rbres.validate(m); @@ -1860,8 +1844,7 @@ private static void testBiOp( final boolean in1 = rb1.contains(v); final boolean in2 = rb2.contains(v); final boolean boolResult = boolFun.apply(in1, in2); - assertEquals(m + ", v=" + v + ", inr=" + inr + ", in1=" + in1 + ", in2=" + in2, - boolResult, inr); + assertEquals(m + ", v=" + v + ", inr=" + inr + ", in1=" + in1 + ", in2=" + in2, boolResult, inr); } } final RspRangeIterator it1 = rb1.getRangeIterator(); @@ -1874,8 +1857,7 @@ private static void testBiOp( final boolean in1 = true; // rb1.contains(v); final boolean in2 = rb2.contains(v); final boolean boolResult = boolFun.apply(in1, in2); - assertEquals(m + ", v=" + v + ", inr=" + inr + ", in1=" + in1 + ", in2=" + in2, - boolResult, inr); + assertEquals(m + ", v=" + v + ", inr=" + inr + ", in1=" + in1 + ", in2=" + in2, boolResult, inr); } } final RspRangeIterator it2 = rb2.getRangeIterator(); @@ -1888,8 +1870,7 @@ private static void testBiOp( final boolean in1 = rb1.contains(v); final boolean in2 = true; // rb2.contains(v); final boolean boolResult = boolFun.apply(in1, in2); - assertEquals(m + ", v=" + v + ", inr=" + inr + ", in1=" + in1 + ", in2=" + in2, - boolResult, inr); + assertEquals(m + ", v=" + v + ", inr=" + inr + ", in1=" + in1 + ", in2=" + in2, boolResult, inr); } } } @@ -1897,23 +1878,21 @@ private static void testBiOp( @Test public void testOr() { final IntConsumer testFun = - (final int seed) -> testBiOp(seed, RspBitmap::or, (Boolean b1, Boolean b2) -> b1 || b2); + (final int seed) -> testBiOp(seed, RspBitmap::or, (Boolean b1, Boolean b2) -> b1 || b2); randomizedTest(testFun); } @Test public void testAnd() { final IntConsumer testFun = - (final int seed) -> testBiOp(seed, RspBitmap::and, - (Boolean b1, Boolean b2) -> b1 && b2); + (final int seed) -> testBiOp(seed, RspBitmap::and, (Boolean b1, Boolean b2) -> b1 && b2); randomizedTest(testFun); } @Test public void testAndNot() { final IntConsumer testFun = - (final int seed) -> testBiOp(seed, RspBitmap::andNot, - (Boolean b1, Boolean b2) -> b1 && !b2); + (final int seed) -> testBiOp(seed, RspBitmap::andNot, (Boolean b1, Boolean b2) -> b1 && !b2); randomizedTest(testFun); } @@ -1964,8 +1943,7 @@ private static class TestContext { private final boolean partialCheck; private final String pfxMsg; - TestContext(final String pfxMsg, final int count, final boolean fullCheck, - final boolean partialCheck) { + TestContext(final String pfxMsg, final int count, final boolean fullCheck, final boolean partialCheck) { set = makeSet(count); this.pfxMsg = pfxMsg; this.fullCheck = fullCheck; @@ -2120,12 +2098,12 @@ public void testRandomInserts() { } private static TestContext getRandom( - final String pfxMsg, - final int seed, - final int count, - final int vmin, final int vmax, - final int rangeMin, final int rangeMax, - final boolean fullCheck, final boolean partialCheck) { + final String pfxMsg, + final int seed, + final int count, + final int vmin, final int vmax, + final int rangeMin, final int rangeMax, + final boolean fullCheck, final boolean partialCheck) { final Random r = new Random(seed); final TestContext tc = new TestContext(pfxMsg, count, fullCheck, partialCheck); for (int i = 0; i < count; ++i) { @@ -2153,8 +2131,8 @@ private static void doTestRandomInserts(final int seed) { final String pfxMsg = "doTestRandomInserts seed==" + seed; System.out.println(pfxMsg); for (int topMax = startMax; topMax <= endMax; topMax *= 10) { - final TestContext tc = getRandom(pfxMsg, seed, count, topMin, topMax, rangeMin, - rangeMax, fullCheck, partialCheck); + final TestContext tc = + getRandom(pfxMsg, seed, count, topMin, topMax, rangeMin, rangeMax, fullCheck, partialCheck); tc.endCheck(); } } @@ -2174,8 +2152,8 @@ private static void doTestIterator2(final int seed) { final boolean partialCheck = false; final String pfxMsg = "doTestRangeIterator seed == " + seed; System.out.println(pfxMsg); - final TestContext tc = getRandom(pfxMsg, seed, count, topMin, topMax, rangeMin, rangeMax, - fullCheck, partialCheck); + final TestContext tc = + getRandom(pfxMsg, seed, count, topMin, topMax, rangeMin, rangeMax, fullCheck, partialCheck); tc.endCheck(); final RspBitmap rb = tc.bitmap(); final TLongSet set = new TLongHashSet(tc.set()); @@ -2208,8 +2186,7 @@ private static void doTestIterator3(final int seed) { final boolean fullCheck = false; final boolean partialCheck = false; final String msg = "doTestRangeIterator seed == " + seed; - final TestContext tc = getRandom(msg, seed, count, topMin, topMax, rangeMin, rangeMax, - fullCheck, partialCheck); + final TestContext tc = getRandom(msg, seed, count, topMin, topMax, rangeMin, rangeMax, fullCheck, partialCheck); tc.endCheck(); final RspBitmap rb = tc.bitmap(); final TLongSet set = tc.set(); @@ -2244,8 +2221,7 @@ private static void doTestReverseIterator(final int seed) { final boolean fullCheck = false; final boolean partialCheck = false; final String msg = "doTestRangeIterator seed == " + seed; - final TestContext tc = getRandom(msg, seed, count, topMin, topMax, rangeMin, rangeMax, - fullCheck, partialCheck); + final TestContext tc = getRandom(msg, seed, count, topMin, topMax, rangeMin, rangeMax, fullCheck, partialCheck); tc.endCheck(); final RspBitmap rb = tc.bitmap(); final TLongSet set = tc.set(); @@ -2315,8 +2291,8 @@ private static void compare(final String msgPfx, final TLongSet set, final RspBi if (!c) { // noinspection ConstantConditions assertTrue(msgPfx + " && v == " + v + " && j == " + j + - " && vslen == " + vs.length + ", node.size() == " + rb.getCardinality(), - c); + " && vslen == " + vs.length + ", node.size() == " + rb.getCardinality(), + c); } } minmax[min] = vs[0]; @@ -2332,8 +2308,7 @@ private static void compare(final String msgPfx, final TLongSet set, final RspBi }); } if (set.size() != rb.getCardinality()) { - System.out - .println("set.size() = " + set.size() + ", node.size() = " + rb.getCardinality()); + System.out.println("set.size() = " + set.size() + ", node.size() = " + rb.getCardinality()); long lastv = -2; int iti = 0; final RspRangeIterator it = rb.getRangeIterator(); @@ -2342,8 +2317,7 @@ private static void compare(final String msgPfx, final TLongSet set, final RspBi ++iti; final long s = it.start(); final long e = it.end(); - final String imsg = - msgPfx + " && iti == " + iti + " && s == " + s + " && e == " + e; + final String imsg = msgPfx + " && iti == " + iti + " && s == " + s + " && e == " + e; for (long v = s; v <= e; ++v) { assertTrue(imsg + " v == " + v, set.contains(v)); } @@ -2357,15 +2331,15 @@ private static void compare(final String msgPfx, final TLongSet set, final RspBi } private static RspBitmap populateRandom( - final String pfxMsg, final Random r, final int count, - final int min, final int max, final int clusterWidth, final int jumpPropOneIn) { + final String pfxMsg, final Random r, final int count, + final int min, final int max, final int clusterWidth, final int jumpPropOneIn) { return populateRandom(pfxMsg, r, count, min, max, clusterWidth, jumpPropOneIn, null, false); } private static RspBitmap populateRandom( - final String pfxMsg, final Random r, final int count, - final int min, final int max, final int clusterWidth, final int jumpPropOneIn, - final TLongSet set, @SuppressWarnings("SameParameterValue") final boolean check) { + final String pfxMsg, final Random r, final int count, + final int min, final int max, final int clusterWidth, final int jumpPropOneIn, + final TLongSet set, @SuppressWarnings("SameParameterValue") final boolean check) { if (set != null) { assertEquals(0, set.size()); } @@ -2411,8 +2385,8 @@ private static void doTestApplyOffset(final int seed) { for (long offset : offsets) { final TLongSet set = ValidationSet.make(count); final RspBitmap rb = - populateRandom(pfxMsg, r, count, - 10, 30000000, 150, 50, set, true); + populateRandom(pfxMsg, r, count, + 10, 30000000, 150, 50, set, true); final String m = pfxMsg + ", offset=" + offset; final RspBitmap orb = rb.applyOffsetOnNew(offset); orb.validate(m); @@ -2526,7 +2500,7 @@ private static long[] pick(final Random r, final int n, final long[] vs) { } private static void doTestInvertForPicks(final String m, final long rbc, final RspBitmap rb, - final long[] vs, final long[] picks) { + final long[] vs, final long[] picks) { final RspBitmap rbPicks = RspBitmap.makeEmpty(); final TLongSet setPicks = new TLongHashSet(picks.length); for (final long k : picks) { @@ -2536,9 +2510,9 @@ private static void doTestInvertForPicks(final String m, final long rbc, final R assertEquals(m, setPicks.size(), rbPicks.getCardinality()); final RspBitmap inverted = new RspBitmap(); rb.invert( - inverted::appendRangeUnsafeNoWriteCheck, - rbPicks.ixRangeIterator(), - rbc); + inverted::appendRangeUnsafeNoWriteCheck, + rbPicks.ixRangeIterator(), + rbc); inverted.finishMutationsAndOptimize(); inverted.validate(m); // assertEquals(m, setPicks.size(), inverted.getCardinality()); @@ -2578,7 +2552,7 @@ private static void doTestInvert(final int seed) { } TLongSet set = new TLongHashSet(); final RspBitmap rb = getRandomRspBitmap(nblocks, r, 0.10f, 2000, - 0, 0, set); + 0, 0, set); final long[] vs = set.toArray(); Arrays.sort(vs); set = null; @@ -2688,20 +2662,20 @@ public void testSomeSimpleOps() { final RspBitmap rbExpected = str2rb("43,146,367,376,434"); assertEquals(rbExpected, rb2); final RspBitmap noIntersection = strs2rb( - "12,14-19,21-23,25-26,28-31,33-34,36-37", - "39,41,45-53,55-56,58,60,62-65,67-72,74,76-78,80-81", - "83-84,86,88-90,92,94-96,98,100,102-104,106,108,110-112", - "114,116,118-119,121,123,125,127,129,131,133-134,136-138", - "140-144,147,149-150,152-153,155,157-160,162-163,165,167-170", - "172,174,176-177,179-180,182,184-186,188-189,191-193,195-197", - "199,201-202,204,206-207,209,211,213-214,216,218,220-221,223", - "225,227,229-236,238,240,242-243,245-246,248-252,254-255,257", - "259-260,262-266,268,270,272-275,277,279,281,283-287,289,291-294", - "296-300,302,304,306,308-311,313-318,320-322,324-325,327,329-330,332-336", - "338,340-341,343-347,349,351-355,357,359,361-362,364,366,369-370,372-373", - "375,378,380-384,386-387,389-393,395-396,398,400-401,403,405-407,409,411-412", - "414-415,417,419-420,422-423,425-426,428-429,431,433,435-438,440-441,443,445", - "447,449,451,453-455,457-464,466,468-469"); + "12,14-19,21-23,25-26,28-31,33-34,36-37", + "39,41,45-53,55-56,58,60,62-65,67-72,74,76-78,80-81", + "83-84,86,88-90,92,94-96,98,100,102-104,106,108,110-112", + "114,116,118-119,121,123,125,127,129,131,133-134,136-138", + "140-144,147,149-150,152-153,155,157-160,162-163,165,167-170", + "172,174,176-177,179-180,182,184-186,188-189,191-193,195-197", + "199,201-202,204,206-207,209,211,213-214,216,218,220-221,223", + "225,227,229-236,238,240,242-243,245-246,248-252,254-255,257", + "259-260,262-266,268,270,272-275,277,279,281,283-287,289,291-294", + "296-300,302,304,306,308-311,313-318,320-322,324-325,327,329-330,332-336", + "338,340-341,343-347,349,351-355,357,359,361-362,364,366,369-370,372-373", + "375,378,380-384,386-387,389-393,395-396,398,400-401,403,405-407,409,411-412", + "414-415,417,419-420,422-423,425-426,428-429,431,433,435-438,440-441,443,445", + "447,449,451,453-455,457-464,466,468-469"); final RspBitmap rb3 = rb.deepCopy(); rb3.andEquals(noIntersection); assertEquals(0, rb3.getCardinality()); @@ -2852,8 +2826,7 @@ public void testAndNotEqualsRegression0() { r1.add(10); r1.add(BLOCK_SIZE + 10); r1.add(2 * BLOCK_SIZE + 10); - final long r1Card = r1.getCardinality(); // This is critical: force the card cache to - // populate. + final long r1Card = r1.getCardinality(); // This is critical: force the card cache to populate. assertEquals(3L, r1Card); RspBitmap r2 = new RspBitmap(); r2.addRange(BLOCK_SIZE, BLOCK_SIZE * 3 - 1); @@ -2892,8 +2865,7 @@ public void testAddRange3() { final long end3 = 13 * BLOCK_SIZE - 1; rb.addRange(start3, end3); rb.validate(); - assertEquals(end - start + 1 + end2 - start2 + 1 + end3 - start3 + 1 + 1, - rb.getCardinality()); + assertEquals(end - start + 1 + end2 - start2 + 1 + end3 - start3 + 1 + 1, rb.getCardinality()); } @@ -3214,13 +3186,11 @@ private static void doTestUpdate(final int seed) { final int clusterWidth = 150; final int jumpOneIn = 50; final RspBitmap rb1 = - populateRandom(pfxMsg, r, count, 1, 10 * count, clusterWidth, jumpOneIn); + populateRandom(pfxMsg, r, count, 1, 10 * count, clusterWidth, jumpOneIn); final int steps = 300; for (int i = 0; i < steps; ++i) { - final RspBitmap rbPlus = - populateRandom(pfxMsg, r, count / 10, 1, 10 * count, clusterWidth, jumpOneIn); - final RspBitmap rbMinus = - populateRandom(pfxMsg, r, count / 10, 1, 10 * count, clusterWidth, jumpOneIn); + final RspBitmap rbPlus = populateRandom(pfxMsg, r, count / 10, 1, 10 * count, clusterWidth, jumpOneIn); + final RspBitmap rbMinus = populateRandom(pfxMsg, r, count / 10, 1, 10 * count, clusterWidth, jumpOneIn); rbPlus.andNotEquals(rbMinus); final String m = pfxMsg + ", i=" + i; final RspBitmap preCheck = rbPlus.deepCopy().andEquals(rbMinus); @@ -3252,9 +3222,8 @@ private static void doTestRangeBatchIteratorWithBigBitmaps(final int seed) { final int steps = 300; for (int i = 0; i < steps; ++i) { final RspBitmap rb = - populateRandom(pfxMsg, r, count, 1, 10 * count, clusterWidth, jumpOneIn); - final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(2 * count); + populateRandom(pfxMsg, r, count, 1, 10 * count, clusterWidth, jumpOneIn); + final WritableLongChunk chunk = WritableLongChunk.makeWritableChunk(2 * count); final RspRangeBatchIterator rbit = rb.getRangeBatchIterator(0, rb.getCardinality()); final int ret = rbit.fillRangeChunk(chunk, 0); final RspRangeIterator rit = rb.getRangeIterator(); @@ -3331,8 +3300,8 @@ private static void doTestRspRangeBatchIteratorWithInterestingSpanBoundaries(fin @Test public void testContains() { - final long[] vs = new long[] {3, 4, 5, 8, 10, 12, 29, 31, 44, 59, 60, 61, 72, 65537, 65539, - 65536 * 3, 65536 * 3 + 5}; + final long[] vs = + new long[] {3, 4, 5, 8, 10, 12, 29, 31, 44, 59, 60, 61, 72, 65537, 65539, 65536 * 3, 65536 * 3 + 5}; final RspBitmap rb = new RspBitmap(); for (long v : vs) { rb.add(v); @@ -3345,8 +3314,7 @@ public void testContains() { @Test public void testRangesCountUpperBound() { final RspBitmap rb = new RspBitmap(); - final int max = 4096; // max size of an RB ArrayContainer, sadly private on - // org.roaringbitmap pkg. + final int max = 4096; // max size of an RB ArrayContainer, sadly private on org.roaringbitmap pkg. final int initialCount = max - 1; for (int i = 0; i < 2 * initialCount; i += 2) { rb.add(i); // singleton ranges. @@ -3379,7 +3347,7 @@ private static void doTestBatchBuilderOp(final int seed, final String name, fina final int steps = 300; for (int i = 0; i < steps; ++i) { final RspBitmap rb = - populateRandom(m, r, count, 1, 10 * count, clusterWidth, jumpOneIn); + populateRandom(m, r, count, 1, 10 * count, clusterWidth, jumpOneIn); final RspBitmap result = new RspBitmap(); bop.bop(result, rb); result.finishMutations(); @@ -3395,14 +3363,13 @@ public void testAddRanges() { } private static void doTestAddRanges(final int seed) { - doTestBatchBuilderOp(seed, "appendRanges", - (out, in) -> out.addRangesUnsafeNoWriteCheck(in.ixRangeIterator())); + doTestBatchBuilderOp(seed, "appendRanges", (out, in) -> out.addRangesUnsafeNoWriteCheck(in.ixRangeIterator())); } @Test public void testRangeConstructor() { - final long[] boundaries = {BLOCK_SIZE, 2 * BLOCK_SIZE, 3 * BLOCK_SIZE, 4 * BLOCK_SIZE, - 8 * BLOCK_SIZE, 9 * BLOCK_SIZE}; + final long[] boundaries = + {BLOCK_SIZE, 2 * BLOCK_SIZE, 3 * BLOCK_SIZE, 4 * BLOCK_SIZE, 8 * BLOCK_SIZE, 9 * BLOCK_SIZE}; for (int i = 0; i < boundaries.length; ++i) { for (int j = i; j < boundaries.length; ++j) { final long ki = boundaries[i]; @@ -3523,10 +3490,10 @@ private static void doTestAndEquals(final int seed) { final String s = "run==" + run; final TLongSet set = new TLongHashSet(); final RspBitmap rb = getRandomRspBitmap(nblocks, r, 0.90f, 100, - 0, 0, set); + 0, 0, set); final TLongSet set2 = new TLongHashSet(); final RspBitmap rb2 = getRandomRspBitmap(nblocks, r, 0.90f, 100, - 0, 0, set2); + 0, 0, set2); rb.andEquals(rb2); final TLongSet iset = intersect(set, set2); assertEquals(s, iset.size(), rb.getCardinality()); @@ -3574,7 +3541,7 @@ private static void doTestGetKeysForPositions(final int seed) { for (int run = 0; run < runs; ++run) { final String s = "run==" + run; final RspBitmap rb = getRandomRspBitmap(nblocks, r, 0.90f, 100, - 0, 0); + 0, 0); final long card = rb.getCardinality(); final RspBitmap positions = new RspBitmap(); for (long j = 0; j < card; ++j) { @@ -3619,8 +3586,7 @@ public void test32kBoundaries() { public void testIteratorAdvanceRegression0() { final RspBitmap rb = new RspBitmap(); rb.add(BLOCK_SIZE + BLOCK_LAST - 3); - // 1 short of the last possible element on the block, on a range of its own, and not the - // first element. + // 1 short of the last possible element on the block, on a range of its own, and not the first element. rb.add(BLOCK_SIZE + BLOCK_LAST - 1); rb.add(2 * BLOCK_SIZE); final RspRangeIterator rangeIter = rb.getRangeIterator(); @@ -3735,8 +3701,8 @@ public void testReverseIteratorSkipsBlock() { @Test public void testReverseIteratorTwoFullBlockSpans() { RspBitmap rb = RspBitmap.makeEmpty(); - final long[][] ranges = new long[][] {new long[] {0, BLOCK_LAST}, - new long[] {2 * BLOCK_SIZE, 2 * BLOCK_SIZE + BLOCK_LAST}}; + final long[][] ranges = + new long[][] {new long[] {0, BLOCK_LAST}, new long[] {2 * BLOCK_SIZE, 2 * BLOCK_SIZE + BLOCK_LAST}}; rb = rb.addRange(0, BLOCK_LAST); rb = rb.addRange(2 * BLOCK_SIZE, 2 * BLOCK_SIZE + BLOCK_LAST); final RspReverseIterator it = rb.getReverseIterator(); @@ -3752,7 +3718,7 @@ public void testReverseIteratorTwoFullBlockSpans() { // prob.length + 1 == blocks.length private static void randomBlocks( - final RspBitmap[] rbs, final Random rand, final float[] probs, final RspBitmap[] blocks) { + final RspBitmap[] rbs, final Random rand, final float[] probs, final RspBitmap[] blocks) { BLOCKS: for (int i = 0; i < rbs.length; ++i) { final long blockOffset = i * BLOCK_SIZE; for (int b = 0; b < probs.length; ++b) { @@ -3785,8 +3751,7 @@ private static void someRandomBlocks(final Random rand, final RspBitmap[] rsps) randomBlocks(rsps, rand, probs, blocks); } - private static void testSimilarOp(final Random rand, final int blockCount, - BiConsumer op) { + private static void testSimilarOp(final Random rand, final int blockCount, BiConsumer op) { final RspBitmap[] rsps0 = new RspBitmap[blockCount]; someRandomBlocks(rand, rsps0); RspBitmap rsp0 = foldOr(rsps0); @@ -3808,7 +3773,7 @@ public void testSimilarRspsAndNot() { final Random rand = new Random(seed0); final int blockCount = 1024; testSimilarOp(rand, blockCount, - (final RspBitmap rb1, final RspBitmap rb2) -> rb1.andNotEqualsUnsafeNoWriteCheck(rb2)); + (final RspBitmap rb1, final RspBitmap rb2) -> rb1.andNotEqualsUnsafeNoWriteCheck(rb2)); } @Test @@ -3816,7 +3781,7 @@ public void testSimilarRspsAnd() { final Random rand = new Random(seed0 + 1); final int blockCount = 1024; testSimilarOp(rand, blockCount, - (final RspBitmap rb1, final RspBitmap rb2) -> rb1.andEqualsUnsafeNoWriteCheck(rb2)); + (final RspBitmap rb1, final RspBitmap rb2) -> rb1.andEqualsUnsafeNoWriteCheck(rb2)); } @Test @@ -3854,8 +3819,8 @@ public void testRemoveRanges() { final Random rand = new Random(seed0); final int blockCount = 1024; testSimilarOp(rand, blockCount, - (final RspBitmap rb1, final RspBitmap rb2) -> rb1 - .removeRangesUnsafeNoWriteCheck(rb2.ixRangeIterator())); + (final RspBitmap rb1, final RspBitmap rb2) -> rb1 + .removeRangesUnsafeNoWriteCheck(rb2.ixRangeIterator())); } @Test @@ -4210,10 +4175,10 @@ public void testAndEqualsSpanCoverage() { RspBitmap intersect = r1.deepCopy().andEquals(r0); intersect.validate(); RspBitmap expected = vs2rb( - 2 * BLOCK_SIZE, - 2 * BLOCK_SIZE + BLOCK_LAST, - 5 * BLOCK_SIZE, -(5 * BLOCK_SIZE + BLOCK_LAST), - 7 * BLOCK_SIZE, -(7 * BLOCK_SIZE + 2)); + 2 * BLOCK_SIZE, + 2 * BLOCK_SIZE + BLOCK_LAST, + 5 * BLOCK_SIZE, -(5 * BLOCK_SIZE + BLOCK_LAST), + 7 * BLOCK_SIZE, -(7 * BLOCK_SIZE + 2)); assertEquals(expected, intersect); } @@ -4248,10 +4213,9 @@ public void testAddRangeRegression2() { public void testAddValuesChunk() { RspBitmap r0 = RspBitmap.makeEmpty(); r0 = r0.addRange(0, BLOCK_SIZE + 9999).addRange(BLOCK_SIZE + 10001, BLOCK_SIZE + BLOCK_LAST) - .add(3 * BLOCK_SIZE); + .add(3 * BLOCK_SIZE); final long origCard = r0.getCardinality(); - try (final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(3)) { + try (final WritableLongChunk chunk = WritableLongChunk.makeWritableChunk(3)) { chunk.set(0, BLOCK_SIZE + 10000); chunk.set(1, BLOCK_SIZE + 10001); chunk.set(2, 2 * BLOCK_SIZE); @@ -4277,7 +4241,7 @@ private static void doTestOrderedKeysByValue(final int seed) { for (int nrun = 0; nrun < nruns; ++nrun) { final int nblocks = r.nextBoolean() ? 60 : RspArray.accNullThreshold; final RspBitmap rb = getRandomRspBitmap(nblocks, r, 0.90f, 2000, - 0, 0); + 0, 0); final long rbc = rb.getCardinality(); final int ntests = 200; final String m = "seed==" + seed + " && nrun==" + nrun; @@ -4289,7 +4253,7 @@ private static void doTestOrderedKeysByValue(final int seed) { final String m2 = m + " && t==" + t + " && start==" + start + " && end==" + end; RspBitmap rbs = rb.subrangeByValue(start, end); try (final RspRangeIterator it = rbs.getRangeIterator(); - final OrderedKeys ok = rb.getOrderedKeysByKeyRange(start, end)) { + final OrderedKeys ok = rb.getOrderedKeysByKeyRange(start, end)) { ok.forEachLongRange((final long rstart, final long rend) -> { final String m3 = m2 + " && rstart==" + rstart + " && rend==" + rend; assertTrue(m3, it.hasNext()); @@ -4302,8 +4266,8 @@ private static void doTestOrderedKeysByValue(final int seed) { } final long rbsCard = rbs.getCardinality(); try (final RspRangeIterator it = rbs.getRangeIterator(); - final OrderedKeys ok = rb.asOrderedKeys(); - final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { + final OrderedKeys ok = rb.asOrderedKeys(); + final OrderedKeys.Iterator okit = ok.getOrderedKeysIterator()) { final boolean wasOk = okit.advance(start); assertTrue(wasOk); final OrderedKeys ok2; @@ -4341,10 +4305,8 @@ public void testSequentialBuilderRegression0() { @Test public void testInvertNoAccForCoverage() { // 1 full block span in the middle - final RspBitmap rb = - vs2rb(10, BLOCK_SIZE, -(BLOCK_SIZE + BLOCK_LAST + 5), 3 * BLOCK_SIZE + 10); - final RspBitmap picks = - vs2rb(10, BLOCK_SIZE + 1, -(BLOCK_SIZE + BLOCK_LAST + 5), 3 * BLOCK_SIZE + 10); + final RspBitmap rb = vs2rb(10, BLOCK_SIZE, -(BLOCK_SIZE + BLOCK_LAST + 5), 3 * BLOCK_SIZE + 10); + final RspBitmap picks = vs2rb(10, BLOCK_SIZE + 1, -(BLOCK_SIZE + BLOCK_LAST + 5), 3 * BLOCK_SIZE + 10); final RspBitmapSequentialBuilder builder = new RspBitmapSequentialBuilder(); rb.invert(builder, picks.ixRangeIterator(), rb.getCardinality() - 1); final RspBitmap result = (RspBitmap) builder.getTreeIndexImpl(); @@ -4390,16 +4352,15 @@ public void testBinaryOpsWithFullSpans() { final int maxBlocksAsBitsSpec = 1 << nblocks; for (String opName : ops.keySet()) { final BiFunction op = ops.get(opName); - for (int firstBlocksAsBitsSpec = - 0; firstBlocksAsBitsSpec < maxBlocksAsBitsSpec; ++firstBlocksAsBitsSpec) { + for (int firstBlocksAsBitsSpec = 0; firstBlocksAsBitsSpec < maxBlocksAsBitsSpec; ++firstBlocksAsBitsSpec) { for (int secondBlocksAsBitsSpec = - 0; secondBlocksAsBitsSpec < maxBlocksAsBitsSpec; ++secondBlocksAsBitsSpec) { + 0; secondBlocksAsBitsSpec < maxBlocksAsBitsSpec; ++secondBlocksAsBitsSpec) { final RspBitmap r1 = fromBlocksAsBits(nblocks, firstBlocksAsBitsSpec); final RspBitmap r2 = fromBlocksAsBits(nblocks, secondBlocksAsBitsSpec); final RspBitmap result = op.apply(r1, r2); final String msg = opName - + " && firstBlocksAsBitsSpec==" + firstBlocksAsBitsSpec - + " && secondBlocksAsBitsSpec==" + secondBlocksAsBitsSpec; + + " && firstBlocksAsBitsSpec==" + firstBlocksAsBitsSpec + + " && secondBlocksAsBitsSpec==" + secondBlocksAsBitsSpec; result.validate(msg); final int resultBits; switch (opName) { diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/rsp/RspOrderedKeysTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/rsp/RspOrderedKeysTest.java index 4b8ae32c684..8c499428285 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/rsp/RspOrderedKeysTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/rsp/RspOrderedKeysTest.java @@ -50,7 +50,7 @@ public void testIteratorSimpleByKeys() { final OrderedKeys oks = it.getNextOrderedKeysThrough(v); if (oks.size() != 0) { try (final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(65536 * 6)) { + WritableLongChunk.makeWritableChunk(65536 * 6)) { oks.fillKeyRangesChunk(chunk); assertEquals(0, chunk.size() % 2); int i = 0; @@ -83,7 +83,7 @@ public void testIteratorSimpleByPosition() { final OrderedKeys oks = it.getNextOrderedKeysWithLength(len); if (oks.size() != 0) { try (final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(2 * (int) len)) { + WritableLongChunk.makeWritableChunk(2 * (int) len)) { oks.fillKeyRangesChunk(chunk); assertEquals(0, chunk.size() % 2); int i = 0; @@ -303,10 +303,9 @@ public void testFillKeyIndicesChunk() { final OrderedKeys.Iterator it = ok.getOrderedKeysIterator(); final int chunkSz = 13; try (final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(chunkSz)) { + WritableLongChunk.makeWritableChunk(chunkSz)) { while (it.hasMore()) { - final OrderedKeys okit = - it.getNextOrderedKeysWithLength(chunkSz); + final OrderedKeys okit = it.getNextOrderedKeysWithLength(chunkSz); chunk.setSize(chunkSz); okit.fillKeyIndicesChunk(chunk); final int sz = chunk.size(); @@ -354,16 +353,14 @@ public void testFillKeyRangesChunk() { final OrderedKeys.Iterator it = ok.getOrderedKeysIterator(); final int chunkSz = 13; try (final WritableLongChunk chunk = - WritableLongChunk.makeWritableChunk(chunkSz)) { + WritableLongChunk.makeWritableChunk(chunkSz)) { while (it.hasMore()) { - final OrderedKeys okit = - it.getNextOrderedKeysWithLength(chunkSz / 2); + final OrderedKeys okit = it.getNextOrderedKeysWithLength(chunkSz / 2); chunk.setSize(chunkSz); okit.fillKeyRangesChunk(chunk); final int sz = chunk.size(); for (int i = 0; i < sz; i += 2) { - res.addRangeUnsafeNoWriteCheck(chunk.get(i), - chunk.get(i + 1)); + res.addRangeUnsafeNoWriteCheck(chunk.get(i), chunk.get(i + 1)); } } } @@ -420,8 +417,7 @@ public void testForEachLongRange2() { @Test public void testInvariantRegression0() { final RspBitmap rb = new RspBitmap(); - rb.addValues(175, 225, 288, 351, 429, 523, 562, 131247, 131297, 131360, 131423, 131501, - 131595, 131634); + rb.addValues(175, 225, 288, 351, 429, 523, 562, 131247, 131297, 131360, 131423, 131501, 131595, 131634); final OrderedKeys.Iterator it = rb.getOrderedKeysIterator(); int maxKey = BLOCK_SIZE - 1; while (it.hasMore()) { @@ -506,8 +502,7 @@ public void testForEachLongRangeInSpanRegression0() { // Further, we need to stop in a partial range short of the full filled buf. rb.addRange(1, 2); rb.addRange(4, 5); - rb.addRange(7, 8); // we stop here (7 below in getOrderedKeysByRange), so that there is one - // range after us. + rb.addRange(7, 8); // we stop here (7 below in getOrderedKeysByRange), so that there is one range after us. rb.addRange(10, 11); final OrderedKeys ok = rb.getOrderedKeysByKeyRange(1, 7); ok.forAllLongRanges((final long start, final long end) -> { @@ -649,7 +644,7 @@ public void testAdvance() { } } try (final OrderedKeys.Iterator okIter = rb.getOrderedKeysIterator(); - final RspRangeIterator rit = rb.getRangeIterator()) { + final RspRangeIterator rit = rb.getRangeIterator()) { long prevEnd = -2; long lastRelativePos = -rb.getCardinality() - 1; int nrange = 0; @@ -710,8 +705,7 @@ public void testGetNextThroughSameContainerBack() { public void testIteratorGetNextOrderedKeysThroughRegression0() { RspBitmap rb = RspBitmap.makeSingleRange(10, 15); rb = rb.appendRange(BLOCK_SIZE + 1, BLOCK_SIZE + 10); - // rb has two containers, we are going to iterate to a maxKey of BLOCK_SIZE, which is not - // present + // rb has two containers, we are going to iterate to a maxKey of BLOCK_SIZE, which is not present // and if it was, would be at the beginning of the second container. try (OrderedKeys.Iterator it = rb.ixGetOrderedKeysIterator()) { assertTrue(it.hasMore()); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesOrderedKeysTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesOrderedKeysTest.java index 5dad98ca627..50f5cada41a 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesOrderedKeysTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesOrderedKeysTest.java @@ -10,8 +10,7 @@ public class SortedRangesOrderedKeysTest extends OrderedKeysTestBase { @Override protected OrderedKeys create(long... values) { - SortedRanges sar = - SortedRanges.makeForKnownRange(values[0], values[values.length - 1], true); + SortedRanges sar = SortedRanges.makeForKnownRange(values[0], values[values.length - 1], true); if (sar == null) { throw new IllegalStateException(); } @@ -136,7 +135,7 @@ public void testOkByOne() { @Test public void testOkNextWithLengthCase0() { final SortedRanges sr = sortedRangesFromString( - "0-21,23,25-32,34-38,40-43,45-48,50-63,65-66,68,70,72-73"); + "0-21,23,25-32,34-38,40-43,45-48,50-63,65-66,68,70,72-73"); assertNotNull(sr); for (int step = 1; step < 7; ++step) { final String m = "step==" + step; @@ -145,8 +144,7 @@ public void testOkNextWithLengthCase0() { while (okit.hasMore()) { final String m2 = m + " && accum==" + accum; final OrderedKeys ok = okit.getNextOrderedKeysWithLength(step); - final Index expected = - new TreeIndex(sr.ixSubindexByPosOnNew(accum, accum + step)); + final Index expected = new TreeIndex(sr.ixSubindexByPosOnNew(accum, accum + step)); final Index fromOk = ok.asIndex(); assertEquals(m2, expected.size(), fromOk.size()); assertTrue(m2, expected.subsetOf(fromOk)); @@ -174,10 +172,10 @@ public void testOrderedKeysByPositionCases() { sr = sr.add(7); sr = sr.addRange(9, 12); try (final SortedRangesOrderedKeys ok = new SortedRangesOrderedKeys(sr); - final SortedRangesOrderedKeys ok2 = - (SortedRangesOrderedKeys) ok.getOrderedKeysByPosition(3, 7); - final SortedRangesOrderedKeys ok3 = - (SortedRangesOrderedKeys) ok2.getOrderedKeysByPosition(2, 5)) { + final SortedRangesOrderedKeys ok2 = + (SortedRangesOrderedKeys) ok.getOrderedKeysByPosition(3, 7); + final SortedRangesOrderedKeys ok3 = + (SortedRangesOrderedKeys) ok2.getOrderedKeysByPosition(2, 5)) { ok.validate(); ok2.validate(); ok3.validate(); diff --git a/DB/src/test/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesTest.java b/DB/src/test/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesTest.java index fd4aaf2a8cd..392e246614b 100644 --- a/DB/src/test/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesTest.java +++ b/DB/src/test/java/io/deephaven/db/v2/utils/sortedranges/SortedRangesTest.java @@ -573,8 +573,7 @@ public void testRemoveRangeSimple() { assertEquals(3, sar.getCardinality()); } - private static final long[] sar0vs = - new long[] {2, -4, 6, -12, 20, 22, -23, 25, 27, 30, -31, 40, -60}; + private static final long[] sar0vs = new long[] {2, -4, 6, -12, 20, 22, -23, 25, 27, 30, -31, 40, -60}; private static long[][] vs2segments(final long[] vs) { final ArrayList buf = new ArrayList<>(); @@ -905,7 +904,7 @@ private static void compare(final String msgPfx, final TLongSet set, final Sorte if (!c) { // noinspection ConstantConditions assertTrue(msgPfx + " && v==" + v + " && j==" + j + " && vslen==" + vs.length + - ", node.size()==" + arr.getCardinality(), c); + ", node.size()==" + arr.getCardinality(), c); } } minmax[min] = vs[0]; @@ -923,8 +922,7 @@ private static void compare(final String msgPfx, final TLongSet set, final Sorte }); } if (set.size() != arr.getCardinality()) { - System.out - .println("set.size() = " + set.size() + ", arr.size() = " + arr.getCardinality()); + System.out.println("set.size() = " + set.size() + ", arr.size() = " + arr.getCardinality()); long lastv = -2; int iti = 0; final Index.RangeIterator it = arr.getRangeIterator(); @@ -947,9 +945,9 @@ private static void compare(final String msgPfx, final TLongSet set, final Sorte } private static SortedRanges populateRandom( - final String pfxMsg, final Random r, final int count, final int min, final int max, - final int clusterWidth, final int jumpPropOneIn, final TLongSet set, - @SuppressWarnings("SameParameterValue") final boolean check) { + final String pfxMsg, final Random r, final int count, final int min, final int max, + final int clusterWidth, final int jumpPropOneIn, final TLongSet set, + @SuppressWarnings("SameParameterValue") final boolean check) { assertEquals(0, set.size()); SortedRanges sar = new SortedRangesLong(2); final int halfClusterWidth = clusterWidth / 2; @@ -993,8 +991,8 @@ private static void doTestGetFind(final int seed) { final String pfxMsg = "doTestGetFind seed == " + seed; System.out.println(pfxMsg); final SortedRanges sar = populateRandom( - pfxMsg, new Random(seed), count, 10, 3000000, 150, 50, - set, true); + pfxMsg, new Random(seed), count, 10, 3000000, 150, 50, + set, true); assertEquals(set.size(), sar.getCardinality()); final long[] arr = new long[set.size()]; set.toArray(arr); @@ -1031,8 +1029,7 @@ public void testFindCases() { } private static void searchRangeCheck( - final String msg, final long start, final long end, final long value, final long prev, - final long result) { + final String msg, final long start, final long end, final long value, final long prev, final long result) { if (value < start) { assertEquals(msg, prev, result); } else if (value <= end) { @@ -1049,7 +1046,7 @@ public void testSearchIteratorBinarySearch() { long prev = -1L; final MutableLong mutVal = new MutableLong(-1L); final ReadOnlyIndex.TargetComparator comp = - (final long key, final int dir) -> Long.signum(dir * (mutVal.getValue() - key)); + (final long key, final int dir) -> Long.signum(dir * (mutVal.getValue() - key)); for (long[] segment : segments0) { final long start = segment[0]; final long end = segment[1]; @@ -1079,7 +1076,7 @@ public void testSearchIteratorBinarySearchCases() { final Index.SearchIterator sit = sar.getSearchIterator(); final long v = sar.last(); final ReadOnlyIndex.TargetComparator comp = - (final long key, final int dir) -> Long.signum(dir * (v - key)); + (final long key, final int dir) -> Long.signum(dir * (v - key)); final long r = sit.binarySearchValue(comp, 1); assertEquals(v, r); } @@ -1204,17 +1201,15 @@ public void testGetKeysForPositionsTypes() { final int seed = seed0 + run; final Random rand = new Random(seed); final String m = "run==" + run; - for (int count : new int[] {SortedRanges.LONG_DENSE_MAX_CAPACITY, - SortedRanges.INT_DENSE_MAX_CAPACITY}) { + for (int count : new int[] {SortedRanges.LONG_DENSE_MAX_CAPACITY, SortedRanges.INT_DENSE_MAX_CAPACITY}) { SortedRanges msar = populateRandom2( - null, rand, count, 0.33, 1L << 33, 11, 15, 1, 6); + null, rand, count, 0.33, 1L << 33, 11, 15, 1, 6); if (rand.nextBoolean()) { msar = msar.tryCompact(4); } final SortedRanges sar = msar; final double p = 0.1; - final TLongArrayList positions = - new TLongArrayList((int) Math.ceil(sar.getCardinality() * (p * 1.05))); + final TLongArrayList positions = new TLongArrayList((int) Math.ceil(sar.getCardinality() * (p * 1.05))); for (int i = 0; i < sar.getCardinality(); ++i) { if (rand.nextDouble() <= p) { positions.add(i); @@ -1246,7 +1241,7 @@ public void testSubArrayByKeyTypes() { for (int d : new int[] {-1, 0, +1}) { final int count = SortedRanges.LONG_DENSE_MAX_CAPACITY + d; SortedRanges sar = populateRandom2( - null, rand, count, 0.25, 1L << 33, 11, 15, 1, 6); + null, rand, count, 0.25, 1L << 33, 11, 15, 1, 6); if (rand.nextBoolean()) { sar = sar.tryCompact(4); } @@ -1274,8 +1269,7 @@ public void testSubArrayByKeyTypes() { try { SortedRanges.checkEquals(byIntersect, subSar); } catch (Exception ex) { - fail(m3 + ", exception " + ex.getClass().getSimpleName() + ": " - + ex.getMessage()); + fail(m3 + ", exception " + ex.getClass().getSimpleName() + ": " + ex.getMessage()); } } } @@ -1315,8 +1309,7 @@ public void testRetainRangeCases() { assertTrue(arr.containsRange(15, 20)); } - private static TLongArrayList subRangeByPos(final SortedRanges sar, final long startPos, - final long endPos) { + private static TLongArrayList subRangeByPos(final SortedRanges sar, final long startPos, final long endPos) { TLongArrayList a = new TLongArrayList(); long pos = -1; try (final Index.Iterator it = sar.getIterator()) { @@ -1541,7 +1534,7 @@ public void testIntersect() { // Ensure sarCopy wasn't modified. assertEquals(sar.getCardinality(), sarCopy.getCardinality()); try (final Index.Iterator sarIter = sar.getIterator(); - final Index.Iterator sarCopyIter = sarCopy.getIterator()) { + final Index.Iterator sarCopyIter = sarCopy.getIterator()) { while (sarIter.hasNext()) { assertTrue(sarCopyIter.hasNext()); assertEquals(sarIter.nextLong(), sarCopyIter.nextLong()); @@ -1638,16 +1631,15 @@ public void testInvertRange() { } } - private static final long[] vs0 = - new long[] {3, 6, 7, 9, 12, 20, 23, 40, 41, 51, 52, 53, 54, 55, 56, 57, 58, 59}; + private static final long[] vs0 = new long[] {3, 6, 7, 9, 12, 20, 23, 40, 41, 51, 52, 53, 54, 55, 56, 57, 58, 59}; private static final long[] vs1 = new long[] {1, 5, 32, 39}; private static final long[] vs2 = new long[] {1, 2, 5, 13, 61}; private static final long[] vs3 = new long[] {27, 28, 29, 30, 39, 40, 41, 59, 60, 61, 62}; private static final long[][] vss = new long[][] {sar0vs, vs0, vs1, vs2, vs3}; private static void checkInvert( - final String prefixMsg, final SortedRanges sar, final Index.RangeIterator ixrit, - final Index.Iterator ixit, final long maxPosition) { + final String prefixMsg, final SortedRanges sar, final Index.RangeIterator ixrit, final Index.Iterator ixit, + final long maxPosition) { final TreeIndexImplSequentialBuilder b = new TreeIndexImplSequentialBuilder(); final boolean r = sar.invertOnNew(ixrit, b, maxPosition); final String m = "maxPosition==" + maxPosition; @@ -1672,8 +1664,7 @@ public void testInvert0() { final SortedRanges sar = makeSortedArray0(); final long[] vs = vs0; final Index ix = Index.FACTORY.getIndexByValues(vs); - for (long maxPosition = 0; maxPosition <= ix.size(); ++maxPosition) { // go one over the - // last position. + for (long maxPosition = 0; maxPosition <= ix.size(); ++maxPosition) { // go one over the last position. final Index.RangeIterator ixit = ix.rangeIterator(); checkInvert("", sar, ixit, ix.iterator(), maxPosition); } @@ -1704,8 +1695,7 @@ public void testInvert() { } for (long maxPosition = 0; maxPosition <= sarCard + 2; ++maxPosition) { final String m4 = m3 + " && maxPosition==" + maxPosition; - checkInvert(m4 + " && ", sar, invOp.getRangeIterator(), invOp.getIterator(), - maxPosition); + checkInvert(m4 + " && ", sar, invOp.getRangeIterator(), invOp.getIterator(), maxPosition); } } } @@ -1744,8 +1734,8 @@ private static void doTestInsertOrUnionSimple(final boolean isInsert) { } } - private static void doTestInsertOrUnion(final String m2, final boolean isInsert, - final SortedRanges orig, final SortedRanges op1, final SortedRanges op2) { + private static void doTestInsertOrUnion(final String m2, final boolean isInsert, final SortedRanges orig, + final SortedRanges op1, final SortedRanges op2) { final TreeIndexImpl result; if (isInsert) { final TreeIndexImpl tix = op1.insertImpl(op2); @@ -1781,8 +1771,8 @@ private static void doTestInsertOrUnion(final String m2, final boolean isInsert, } } assertEquals(m2, - orig.getCardinality() + op2.getCardinality() - orig.intersectOnNew(op2).ixCardinality(), - result.ixCardinality()); + orig.getCardinality() + op2.getCardinality() - orig.intersectOnNew(op2).ixCardinality(), + result.ixCardinality()); } @Test @@ -1807,8 +1797,8 @@ public void testRetainAndIntersect() { if (isRetain) { result = op1.retain(op2); assertTrue(m2, result.ixIsEmpty() || - (result instanceof SingleRange) || - (result instanceof SortedRanges && op1 == result)); + (result instanceof SingleRange) || + (result instanceof SortedRanges && op1 == result)); } else { result = op1.intersectOnNew(op2); } @@ -1911,7 +1901,7 @@ public void testRemoveAndMinus() { private static SortedRanges vs2sar(final long[] vs) { SortedRanges sar = SortedRanges.tryMakeForKnownRangeFinalCapacityLowerBound( - vs.length, vs.length, 0, Math.abs(vs[vs.length - 1]), false); + vs.length, vs.length, 0, Math.abs(vs[vs.length - 1]), false); long pendingStart = -1; for (long v : vs) { if (v < 0) { @@ -2021,13 +2011,11 @@ public void testManyRanges() { assertEquals(sr.last(), sr.get(sr.getCardinality() - 1)); } - private static void checkEquals(final String m, final TLongArrayList vs, - final SortedRanges sr) { + private static void checkEquals(final String m, final TLongArrayList vs, final SortedRanges sr) { checkEquals(m, vs, vs.size(), sr); } - private static void checkEquals(final String m, final TLongArrayList vs, final int vsSize, - final SortedRanges sr) { + private static void checkEquals(final String m, final TLongArrayList vs, final int vsSize, final SortedRanges sr) { sr.validate(); assertEquals(m, vsSize, sr.getCardinality()); final MutableInt pos = new MutableInt(0); @@ -2060,14 +2048,12 @@ public void testTypeBoundaries() { final long lastData = sr.unpackedGet(sr.count - 1); final boolean lastIsNeg = lastData < 0; final long lastValue = lastIsNeg ? -lastData : lastData; - for (long newVal : new long[] {lastValue, lastValue + 1, lastValue + 2, - lastValue + 3}) { + for (long newVal : new long[] {lastValue, lastValue + 1, lastValue + 2, lastValue + 3}) { final String m3 = m2 + " && newVal==" + newVal; SortedRanges sr2 = sr.deepCopy(); sr2 = sr2.add(newVal); if (count == SortedRanges.SHORT_MAX_CAPACITY && - ((lastIsNeg && newVal > lastValue + 1) - || (!lastIsNeg && newVal > lastValue))) { + ((lastIsNeg && newVal > lastValue + 1) || (!lastIsNeg && newVal > lastValue))) { assertNull(m3, sr2); } else { if (newVal != lastValue) { @@ -2112,8 +2098,8 @@ public void testSubsetOfTypes() { ++i; } final TreeIndexImpl r = sr.ixMinusOnNew( - SingleRange.make( - sr2.first() - 2, sr2.last() + 2)); + SingleRange.make( + sr2.first() - 2, sr2.last() + 2)); assertNotNull(r); assertTrue(m2, r instanceof SortedRanges); sr2 = (SortedRanges) r; @@ -2165,17 +2151,16 @@ public void testUnionWithTypeChanges() { final int seed = seed0 + run; final Random rand = new Random(seed); final String m = "run==" + run; - for (int size : new int[] {SortedRanges.LONG_DENSE_MAX_CAPACITY, - SortedRanges.INT_DENSE_MAX_CAPACITY}) { + for (int size : new int[] {SortedRanges.LONG_DENSE_MAX_CAPACITY, SortedRanges.INT_DENSE_MAX_CAPACITY}) { final String m2 = m + " && size==" + size; SortedRanges msr1 = populateRandom2( - null, rand, size, 0.25, 2, 2, 5, 1, 5); + null, rand, size, 0.25, 2, 2, 5, 1, 5); if (rand.nextBoolean()) { msr1 = msr1.tryCompact(4); } final SortedRanges sr1 = msr1; SortedRanges msr2 = populateRandom2( - null, rand, size, 0.25, 2, 2, 5, 1, 5); + null, rand, size, 0.25, 2, 2, 5, 1, 5); if (rand.nextBoolean()) { msr2 = msr2.tryCompact(4); } @@ -2183,8 +2168,7 @@ public void testUnionWithTypeChanges() { final TreeIndexImpl union = SortedRanges.unionOnNew(sr1, sr2); for (SortedRanges sr : new SortedRanges[] {sr1, sr2}) { sr.forEachLongRange((final long start, final long end) -> { - assertTrue(m2 + " && start==" + start + " && end==" + end, - union.ixContainsRange(start, end)); + assertTrue(m2 + " && start==" + start + " && end==" + end, union.ixContainsRange(start, end)); return true; }); } @@ -2197,9 +2181,9 @@ public void testUnionWithTypeChanges() { return true; }); assertEquals(m2, - sr1.getCardinality() + sr2.getCardinality() - - sr1.intersectOnNew(sr2).ixCardinality(), - union.ixCardinality()); + sr1.getCardinality() + sr2.getCardinality() - + sr1.intersectOnNew(sr2).ixCardinality(), + union.ixCardinality()); } } @@ -2211,18 +2195,17 @@ public void testInsertWithTypeChanges() { final int seed = seed0 + run; final Random rand = new Random(seed); final String m = "run==" + run; - for (int size : new int[] {SortedRanges.LONG_DENSE_MAX_CAPACITY, - SortedRanges.INT_DENSE_MAX_CAPACITY}) { + for (int size : new int[] {SortedRanges.LONG_DENSE_MAX_CAPACITY, SortedRanges.INT_DENSE_MAX_CAPACITY}) { final String m2 = m + " && size==" + size; final int spaceBase = rand.nextBoolean() ? 0 : Short.MAX_VALUE / 32; SortedRanges msr1 = populateRandom2( - null, rand, size, 0.25, 2, 2, 5, spaceBase + 1, spaceBase + 7); + null, rand, size, 0.25, 2, 2, 5, spaceBase + 1, spaceBase + 7); if (rand.nextBoolean()) { msr1 = msr1.tryCompact(4); } final SortedRanges sr1 = msr1; SortedRanges msr2 = populateRandom2( - null, rand, size, 0.25, 2, 2, 5, spaceBase + 1, spaceBase + 7); + null, rand, size, 0.25, 2, 2, 5, spaceBase + 1, spaceBase + 7); if (rand.nextBoolean()) { msr2 = msr2.tryCompact(4); } @@ -2236,8 +2219,7 @@ public void testInsertWithTypeChanges() { result.ixValidate(); for (SortedRanges sr : new SortedRanges[] {sr1, sr2}) { sr.forEachLongRange((final long start, final long end) -> { - assertTrue(m2 + " && start==" + start + " && end==" + end, - result.ixContainsRange(start, end)); + assertTrue(m2 + " && start==" + start + " && end==" + end, result.ixContainsRange(start, end)); return true; }); } @@ -2250,9 +2232,9 @@ public void testInsertWithTypeChanges() { return true; }); assertEquals(m2, - sr1.getCardinality() + sr2.getCardinality() - - sr1.intersectOnNew(sr2).ixCardinality(), - result.ixCardinality()); + sr1.getCardinality() + sr2.getCardinality() - + sr1.intersectOnNew(sr2).ixCardinality(), + result.ixCardinality()); } } @@ -2264,17 +2246,16 @@ public void testIntersectWithTypeChanges() { final int seed = seed0 + run; final Random rand = new Random(seed); final String m = "run==" + run; - for (int size : new int[] {SortedRanges.LONG_DENSE_MAX_CAPACITY, - SortedRanges.INT_DENSE_MAX_CAPACITY}) { + for (int size : new int[] {SortedRanges.LONG_DENSE_MAX_CAPACITY, SortedRanges.INT_DENSE_MAX_CAPACITY}) { final String m2 = m + " && size==" + size; SortedRanges msr1 = populateRandom2( - null, rand, size, 0.25, 2, 11, 15, 1, 9); + null, rand, size, 0.25, 2, 11, 15, 1, 9); if (rand.nextBoolean()) { msr1 = msr1.tryCompact(4); } final SortedRanges sr1 = msr1; SortedRanges msr2 = populateRandom2( - null, rand, size, 0.25, 2, 7, 20, 1, 5); + null, rand, size, 0.25, 2, 7, 20, 1, 5); if (rand.nextBoolean()) { msr2 = msr2.tryCompact(4); } @@ -2311,7 +2292,7 @@ public void testGetFindTypes() { SortedRanges.SHORT_MAX_CAPACITY}) { final TLongArrayList arr = new TLongArrayList(count); SortedRanges sar = populateRandom2( - arr, rand, count, 0.25, 2, 7, 11, 1, 5); + arr, rand, count, 0.25, 2, 7, 11, 1, 5); if (rand.nextBoolean()) { sar = sar.tryCompact(4); } @@ -2351,7 +2332,7 @@ public void testRemoveSinglesTypes() { SortedRanges.SHORT_MAX_CAPACITY}) { final TLongArrayList arr = new TLongArrayList(count); SortedRanges sar = populateRandom2( - arr, rand, count, 0.25, 2, 7, 11, 1, 5); + arr, rand, count, 0.25, 2, 7, 11, 1, 5); if (rand.nextBoolean()) { sar = sar.tryCompact(4); } @@ -2393,15 +2374,14 @@ public void testRemoveRangesTypes() { SortedRanges.SHORT_MAX_CAPACITY}) { final TLongArrayList arr = new TLongArrayList(count); SortedRanges sar = populateRandom2( - arr, rand, count, 0.25, 2, 7, 11, 1, 5); + arr, rand, count, 0.25, 2, 7, 11, 1, 5); if (rand.nextBoolean()) { sar = sar.tryCompact(4); } final TLongArrayList arrCopy = new TLongArrayList(arr); while (arrCopy.size() > 1) { final int posRmStart = rand.nextInt(arrCopy.size() - 1); - final int posRmEnd = - posRmStart + 1 + rand.nextInt(arrCopy.size() - 1 - posRmStart); + final int posRmEnd = posRmStart + 1 + rand.nextInt(arrCopy.size() - 1 - posRmStart); final int cardRm = posRmEnd - posRmStart + 1; final long vRmStart = arrCopy.get(posRmStart); final long vRmEnd = arrCopy.get(posRmEnd); @@ -2429,12 +2409,12 @@ public void testUnionTypes() { for (int count : new int[] { SortedRanges.LONG_DENSE_MAX_CAPACITY, SortedRanges.INT_DENSE_MAX_CAPACITY}) { SortedRanges sr1 = populateRandom2( - null, rand, count, 0.25, 2, 11, 15, 1, 6); + null, rand, count, 0.25, 2, 11, 15, 1, 6); if (rand.nextBoolean()) { sr1 = sr1.tryCompact(4); } SortedRanges sr2 = populateRandom2( - null, rand, count, 0.25, 10, 14, 19, 3, 4); + null, rand, count, 0.25, 10, 14, 19, 3, 4); if (rand.nextBoolean()) { sr2 = sr2.tryCompact(4); } @@ -2466,8 +2446,8 @@ public void testUnionTypes() { } } assertEquals(m, sr1.getCardinality() + sr2.getCardinality() - - sr1.intersectOnNew(sr2).ixCardinality(), - result.ixCardinality()); + sr1.intersectOnNew(sr2).ixCardinality(), + result.ixCardinality()); } } } @@ -2484,7 +2464,7 @@ public void testIteratorTypes() { SortedRanges.SHORT_MAX_CAPACITY}) { final String m2 = m + " && count==" + count; SortedRanges sr = populateRandom2( - null, rand, count, 0.25, 2, 1, 10, 1, 6); + null, rand, count, 0.25, 2, 1, 10, 1, 6); if (rand.nextBoolean()) { sr = sr.tryCompact(4); } @@ -2514,7 +2494,7 @@ public void testSearchIteratorTypes() { SortedRanges.SHORT_MAX_CAPACITY}) { final String m2 = m + " && count==" + count; SortedRanges sr = populateRandom2( - null, rand, count, 0.25, 2, 1, 10, 1, 6); + null, rand, count, 0.25, 2, 1, 10, 1, 6); if (rand.nextBoolean()) { sr = sr.tryCompact(4); } @@ -2544,7 +2524,7 @@ public void testRangeIteratorTypes() { SortedRanges.SHORT_MAX_CAPACITY}) { final String m2 = m + " && count==" + count; SortedRanges sr = populateRandom2( - null, rand, count, 0.25, 2, 1, 10, 1, 6); + null, rand, count, 0.25, 2, 1, 10, 1, 6); if (rand.nextBoolean()) { sr = sr.tryCompact(4); } @@ -2600,7 +2580,7 @@ public void testGetOrderedKeysByKeyRange() { final String m2 = m + " && count==" + count; final long offset = (1L << 33) + 2; SortedRanges sr = populateRandom2(new SortedRangesInt(count, offset), - null, rand, count, 0.25, offset, 1, 11, 5, 11); + null, rand, count, 0.25, offset, 1, 11, 5, 11); if (rand.nextBoolean()) { sr = sr.tryCompact(4); } @@ -2615,7 +2595,7 @@ public void testGetOrderedKeysByKeyRange() { for (long end = start; end <= last + 1; ++end) { final String m4 = m3 + " && end==" + end; try (final OrderedKeys ok = sr.getOrderedKeysByKeyRange(start, end); - final Index ix = new TreeIndex(sr.ixSubindexByKeyOnNew(start, end))) { + final Index ix = new TreeIndex(sr.ixSubindexByKeyOnNew(start, end))) { assertEquals(m4, ix.firstKey(), ok.firstKey()); assertEquals(m4, ix.lastKey(), ok.lastKey()); assertEquals(m4, ix.size(), ok.size()); @@ -2627,20 +2607,15 @@ public void testGetOrderedKeysByKeyRange() { if (ok2End < ok2Start) { continue; } - final String m5 = - m4 + " && dStart==" + dStart + " && dEnd==" + dEnd; - final OrderedKeys ok2 = - ok.getOrderedKeysByKeyRange(ok2Start, ok2End); + final String m5 = m4 + " && dStart==" + dStart + " && dEnd==" + dEnd; + final OrderedKeys ok2 = ok.getOrderedKeysByKeyRange(ok2Start, ok2End); if (ok2End < ok.firstKey() || ok.lastKey() < ok2Start) { assertEquals(0, ok2.size()); continue; } - final long rStart = - Math.min(ok.lastKey(), Math.max(ok2Start, ok.firstKey())); - final long rEnd = - Math.max(ok.firstKey(), Math.min(ok2End, ok.lastKey())); - final Index ix2 = - new TreeIndex(sr.ixSubindexByKeyOnNew(rStart, rEnd)); + final long rStart = Math.min(ok.lastKey(), Math.max(ok2Start, ok.firstKey())); + final long rEnd = Math.max(ok.firstKey(), Math.min(ok2End, ok.lastKey())); + final Index ix2 = new TreeIndex(sr.ixSubindexByKeyOnNew(rStart, rEnd)); checkOkAgainstIndex(m5, ok2, ix2); } } @@ -2663,7 +2638,7 @@ public void testOrderedKeysNextWithLengthTypes() { SortedRanges.SHORT_MAX_CAPACITY}) { final String m2 = m + " && count==" + count; SortedRanges sr = populateRandom2( - null, rand, count, 0.25, 1L << 33, 1, 10, 1, 11); + null, rand, count, 0.25, 1L << 33, 1, 10, 1, 11); if (rand.nextBoolean()) { sr = sr.tryCompact(4); } @@ -2677,8 +2652,7 @@ public void testOrderedKeysNextWithLengthTypes() { final long okSrLastPos = sr.getCardinality() - 1 - dEnd; final long okSrCard = okSrLastPos - dStart + 1; try (final OrderedKeys okSr0 = sr.getOrderedKeysByPosition(dStart0, ok0SrCard); - final OrderedKeys okSr = - okSr0.getOrderedKeysByPosition(dStart - dStart0, okSrCard)) { + final OrderedKeys okSr = okSr0.getOrderedKeysByPosition(dStart - dStart0, okSrCard)) { try (final OrderedKeys.Iterator okit = okSr.getOrderedKeysIterator()) { long accum = 0; int i = 0; @@ -2693,9 +2667,8 @@ public void testOrderedKeysNextWithLengthTypes() { final OrderedKeys ok = okit.getNextOrderedKeysWithLength(step); final long okCard = Math.min(step, okSrCard - accum); assertEquals(m3, okCard, ok.size()); - final Index subSr = - new TreeIndex(sr.ixSubindexByPosOnNew(dStart + accum, - dStart + accum + okCard /* exclusive */)); + final Index subSr = new TreeIndex( + sr.ixSubindexByPosOnNew(dStart + accum, dStart + accum + okCard /* exclusive */)); assertEquals(m3, okCard, subSr.size()); checkOkAgainstIndex(m3, ok, subSr); accum += okCard; @@ -2719,7 +2692,7 @@ public void testOrderedKeysNextThroughTypes() { SortedRanges.SHORT_MAX_CAPACITY}) { final String m2 = m + " && count==" + count; SortedRanges sr = populateRandom2( - null, rand, count, 0.25, 2, 1, 11, 1, 11); + null, rand, count, 0.25, 2, 1, 11, 1, 11); if (rand.nextBoolean()) { sr = sr.tryCompact(4); } @@ -2740,9 +2713,8 @@ public void testOrderedKeysNextThroughTypes() { break; } final long okCard = Math.min(1 + rand.nextInt(step), okSrCard - accum); - final Index subSr = - new TreeIndex(sr.ixSubindexByPosOnNew(dStart + accum, - dStart + accum + okCard /* exclusive */)); + final Index subSr = new TreeIndex( + sr.ixSubindexByPosOnNew(dStart + accum, dStart + accum + okCard /* exclusive */)); final long last = subSr.lastKey(); final long target; if (!sr.contains(last + 1) && rand.nextBoolean()) { @@ -2762,21 +2734,20 @@ public void testOrderedKeysNextThroughTypes() { } private static SortedRanges populateRandom2( - final TLongArrayList vsOut, final Random rand, final int count, final double singlesDensity, - final long offset, - final int rangeLenMin, final int rangeLenMax, - final int spaceMin, final int spaceMax) { + final TLongArrayList vsOut, final Random rand, final int count, final double singlesDensity, + final long offset, + final int rangeLenMin, final int rangeLenMax, + final int spaceMin, final int spaceMax) { return populateRandom2(SortedRanges.makeEmpty(), - vsOut, rand, count, singlesDensity, offset, rangeLenMin, rangeLenMax, spaceMin, - spaceMax); + vsOut, rand, count, singlesDensity, offset, rangeLenMin, rangeLenMax, spaceMin, spaceMax); } private static SortedRanges populateRandom2( - final SortedRanges srIn, - final TLongArrayList vsOut, final Random rand, final int count, final double singlesDensity, - final long offset, - final int rangeLenMin, final int rangeLenMax, - final int spaceMin, final int spaceMax) { + final SortedRanges srIn, + final TLongArrayList vsOut, final Random rand, final int count, final double singlesDensity, + final long offset, + final int rangeLenMin, final int rangeLenMax, + final int spaceMin, final int spaceMax) { SortedRanges sr = srIn; final int dRangeLen = rangeLenMax - rangeLenMin; final int dSpace = spaceMax - spaceMin; @@ -2840,13 +2811,13 @@ private static SortedRanges rvs2sr(final long[] vs, SortedRanges sr) { } private static long[] rvs0 = new long[] { - 2, 4, -5, 8, -12, 14, -18, 20, 22, -23, 26, -27, 29, -30, 32, -36, 39, -43, 45, -47, 49, - 53, -56, 60, 62, -64, 66, -67, 69, 71, + 2, 4, -5, 8, -12, 14, -18, 20, 22, -23, 26, -27, 29, -30, 32, -36, 39, -43, 45, -47, 49, 53, -56, 60, 62, + -64, 66, -67, 69, 71, -73, 75, -80, 82, -83, 85, -89, 91, -92, 94, -98, 100, 102, -106, 109, 112, -113 }; private static long[] rvs1 = new long[] { - 14, 20, 22, 26, 29, 33, -34, 36, 39, 45, -46, 49, 55, 64, 67, 69, 71, -73, 78, -79, 82, - 86, -88, 91, 94, 102, -103, 109 + 14, 20, 22, 26, 29, 33, -34, 36, 39, 45, -46, 49, 55, 64, 67, 69, 71, -73, 78, -79, 82, 86, -88, 91, 94, + 102, -103, 109 }; @Test @@ -2868,8 +2839,7 @@ public void testIntersectsCase0() { } // check base.minus(minusArg) == result - private static void checkMinus(final SortedRanges base, final SortedRanges minusArg, - final SortedRanges result) { + private static void checkMinus(final SortedRanges base, final SortedRanges minusArg, final SortedRanges result) { result.forEachLong((final long v) -> { final String m = "v==" + v; assertTrue(m, base.contains(v)); @@ -2888,18 +2858,17 @@ public void testIntersectRegression0() { final SortedRanges sr0 = rvs2sr(new long[] { 1073741776, -1073741798, 1073741805, -1073741812, 1073741824, -1073741860}); final SortedRanges sr1 = rvs2sr(new long[] { - 1073741793, -1073741816, 1073741818, 1073741821, -1073741822, 1073741824, - -1073741854}); + 1073741793, -1073741816, 1073741818, 1073741821, -1073741822, 1073741824, -1073741854}); final TreeIndexImpl sr2 = sr0.intersectOnNew(sr1); sr2.ixValidate(); } @Test public void testMinusRegression2() { - final SortedRanges sr0 = rvs2sr(new long[] {0, -5, 8, 16, -19, 22, 27, 29, 34, -35, 38, 40, - 45, 48, -50, 53, -55, 60, -106}); - final SortedRanges sr1 = rvs2sr(new long[] {5, 16, 29, 34, -35, 40, 45, 50, 54, 62, 66, 77, - 80, -81, 83, -85, 88, -96, 98, 105}); + final SortedRanges sr0 = + rvs2sr(new long[] {0, -5, 8, 16, -19, 22, 27, 29, 34, -35, 38, 40, 45, 48, -50, 53, -55, 60, -106}); + final SortedRanges sr1 = + rvs2sr(new long[] {5, 16, 29, 34, -35, 40, 45, 50, 54, 62, 66, 77, 80, -81, 83, -85, 88, -96, 98, 105}); final TreeIndexImpl r = sr0.deepCopy().remove(sr1); assertNotNull(r); final SortedRanges sr2 = (SortedRanges) r; @@ -3064,8 +3033,7 @@ public void testInsertCoverage1() { } final TreeIndexImpl result = mu.getValue().insertImpl(sr1); if (result instanceof SingleRange) { - mu.setValue( - SortedRanges.makeSingleRange(result.ixFirstKey(), result.ixLastKey())); + mu.setValue(SortedRanges.makeSingleRange(result.ixFirstKey(), result.ixLastKey())); } else { assertTrue(m2, result instanceof SortedRanges); mu.setValue((SortedRanges) result); @@ -3098,7 +3066,7 @@ public void testAddRangeRandomRanges() { final String m2 = m + " && count==" + count; final int space = 11; SortedRanges sr = populateRandom2( - null, rand, count, 0.25, 2, 1, space, 1, space); + null, rand, count, 0.25, 2, 1, space, 1, space); if (rand.nextBoolean()) { sr = sr.tryCompact(4); } @@ -3134,7 +3102,7 @@ public void testAddAroundRangeBoundaries() { final String m2 = m + " && count==" + count; final int space = 11; SortedRanges sr = populateRandom2( - null, rand, count, 0.25, 2, 1, space, 1, space); + null, rand, count, 0.25, 2, 1, space, 1, space); if (rand.nextBoolean()) { sr = sr.tryCompact(4); } @@ -3144,7 +3112,7 @@ public void testAddAroundRangeBoundaries() { break; } final TreeIndexImpl complement = - SortedRanges.makeSingleRange(sr.first(), sr.last()).ixMinusOnNew(sr); + SortedRanges.makeSingleRange(sr.first(), sr.last()).ixMinusOnNew(sr); final Index.RangeIterator riter = complement.ixRangeIterator(); while (riter.hasNext()) { riter.next(); @@ -3219,9 +3187,9 @@ public void testUnionCoverage2() { SortedRanges sr0 = SortedRanges.makeSingleRange(10, 20).addRange(31, 40); SortedRanges sr1 = SortedRanges.makeSingleRange(20, 30).addRange(40, 50); final long expectedCard = - sr0.ixCardinality() + - sr1.ixCardinality() - - sr0.intersectOnNew(sr1).ixCardinality(); + sr0.ixCardinality() + + sr1.ixCardinality() - + sr0.intersectOnNew(sr1).ixCardinality(); for (SortedRanges arg : new SortedRanges[] {sr0, sr1}) { SortedRanges tis = (arg == sr0) ? sr1 : sr0; final TreeIndexImpl r = tis.ixUnionOnNew(arg); @@ -3238,9 +3206,9 @@ public void testUnionCoverage3() { SortedRanges sr0 = SortedRanges.makeSingleRange(10, 20).addRange(31, 40); SortedRanges sr1 = SortedRanges.makeSingleRange(20, 30); final long expectedCard = - sr0.ixCardinality() + - sr1.ixCardinality() - - sr0.intersectOnNew(sr1).ixCardinality(); + sr0.ixCardinality() + + sr1.ixCardinality() - + sr0.intersectOnNew(sr1).ixCardinality(); for (SortedRanges arg : new SortedRanges[] {sr0, sr1}) { SortedRanges tis = (arg == sr0) ? sr1 : sr0; final TreeIndexImpl r = tis.ixUnionOnNew(arg); @@ -3257,9 +3225,9 @@ public void testIntersectCoverage() { SortedRanges sr0 = SortedRanges.makeSingleRange(10, 20); SortedRanges sr1 = SortedRanges.makeSingleRange(20, 20); final long expectedCard = - sr0.ixCardinality() + - sr1.ixCardinality() - - sr0.ixUnionOnNew(sr1).ixCardinality(); + sr0.ixCardinality() + + sr1.ixCardinality() - + sr0.ixUnionOnNew(sr1).ixCardinality(); for (SortedRanges arg : new SortedRanges[] {sr0, sr1}) { SortedRanges tis = (arg == sr0) ? sr1 : sr0; final TreeIndexImpl r = tis.intersectOnNew(arg); @@ -3320,8 +3288,7 @@ public void testAppendRangeTypeChange() { SortedRangesShort srs0 = new SortedRangesShort(2, offset); SortedRanges sr = srs0.appendRange(offset, offset + 1); sr = sr.remove(offset + 1); - SortedRanges ans = - sr.appendRange(offset + Short.MAX_VALUE + 10, offset + Short.MAX_VALUE + 20); + SortedRanges ans = sr.appendRange(offset + Short.MAX_VALUE + 10, offset + Short.MAX_VALUE + 20); if (ans != null) { ans.validate(); } diff --git a/DB/src/test/java/io/deephaven/libs/primitives/ReplicateTst.java b/DB/src/test/java/io/deephaven/libs/primitives/ReplicateTst.java index 9e208e163db..2f68ba505dc 100644 --- a/DB/src/test/java/io/deephaven/libs/primitives/ReplicateTst.java +++ b/DB/src/test/java/io/deephaven/libs/primitives/ReplicateTst.java @@ -10,13 +10,11 @@ public class ReplicateTst { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(TestCharPrimitives.class, - ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(TestCharPrimitives.class, ReplicatePrimitiveCode.TEST_SRC); ReplicatePrimitiveCode.shortToAllIntegralTypes(TestShortNumericPrimitives.class, - ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.TEST_SRC); ReplicatePrimitiveCode.floatToAllFloatingPoints(TestFloatNumericPrimitives.class, - ReplicatePrimitiveCode.TEST_SRC); - ReplicatePrimitiveCode.floatToAllFloatingPoints(TestFloatFpPrimitives.class, - ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.TEST_SRC); + ReplicatePrimitiveCode.floatToAllFloatingPoints(TestFloatFpPrimitives.class, ReplicatePrimitiveCode.TEST_SRC); } } diff --git a/DB/src/test/java/io/deephaven/libs/primitives/TestAmbiguity.java b/DB/src/test/java/io/deephaven/libs/primitives/TestAmbiguity.java index a558bdc20a6..0d319d9d0fc 100644 --- a/DB/src/test/java/io/deephaven/libs/primitives/TestAmbiguity.java +++ b/DB/src/test/java/io/deephaven/libs/primitives/TestAmbiguity.java @@ -97,11 +97,10 @@ public void testMin() { assertEquals(1d, min(new double[] {1d, 2d, 3d}), 0); assertEquals(1d, min(1d, new Double(2), 3d), 0); - assertTrue(new BigInteger("1") - .equals(min(new BigInteger("2"), new BigInteger("1"), new BigInteger("3")))); + assertTrue(new BigInteger("1").equals(min(new BigInteger("2"), new BigInteger("1"), new BigInteger("3")))); - assertEquals(new BigDecimal("0.5236598874"), min(new BigInteger("2"), 10, 2.0, 5.6f, 5l, - (byte) 3, (short) 1, new BigDecimal("0.5236598874"))); + assertEquals(new BigDecimal("0.5236598874"), + min(new BigInteger("2"), 10, 2.0, 5.6f, 5l, (byte) 3, (short) 1, new BigDecimal("0.5236598874"))); assertEquals("A", min("A", "B")); @@ -111,8 +110,7 @@ public void testMin() { assertEquals(1, min((byte) 1, 2l)); assertEquals(1, min((byte) 1, 2f), 0); assertEquals(1, min((byte) 1, 2d), 0); - assertEquals(QueryConstants.NULL_INT, - min(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE)); + assertEquals(QueryConstants.NULL_INT, min(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE)); assertEquals(26l, min(QueryConstants.NULL_BYTE, 26l)); assertEquals((short) 1, min((short) 1, (byte) 2)); @@ -121,10 +119,8 @@ public void testMin() { assertEquals((short) 1, min((short) 1, 2l)); assertEquals((short) 1, min((short) 1, 2f), 0); assertEquals((short) 1, min((short) 1, 2d), 0); - assertEquals(QueryConstants.NULL_FLOAT, - min(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), 0d); - assertEquals(QueryConstants.NULL_FLOAT, - min(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), 0d); + assertEquals(QueryConstants.NULL_FLOAT, min(QueryConstants.NULL_FLOAT, QueryConstants.NULL_SHORT), 0d); + assertEquals(QueryConstants.NULL_FLOAT, min(QueryConstants.NULL_SHORT, QueryConstants.NULL_FLOAT), 0d); assertEquals(26l, min(QueryConstants.NULL_SHORT, 26l), 0d); assertEquals(1, min(1, (byte) 2)); @@ -133,8 +129,7 @@ public void testMin() { assertEquals(1, min(1, 2l)); assertEquals(1, min(1, 2f), 0); assertEquals(1, min(1, 2d), 0); - assertEquals(QueryConstants.NULL_INT, - min(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT)); + assertEquals(QueryConstants.NULL_INT, min(QueryConstants.NULL_INT, QueryConstants.NULL_SHORT)); assertEquals(26l, min(QueryConstants.NULL_INT, 26l)); assertEquals(1l, min(1l, (byte) 2)); @@ -143,10 +138,9 @@ public void testMin() { assertEquals(1l, min(1l, 2l)); assertEquals(1l, min(1l, 2f), 0); assertEquals(1l, min(1l, 2d), 0); - assertEquals(QueryConstants.NULL_DOUBLE, - min(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), 0d); + assertEquals(QueryConstants.NULL_DOUBLE, min(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), 0d); assertEquals(((double) QueryConstants.NULL_LONG) - 1d, - min(QueryConstants.NULL_LONG, ((double) QueryConstants.NULL_LONG) - 1d), 0d); + min(QueryConstants.NULL_LONG, ((double) QueryConstants.NULL_LONG) - 1d), 0d); assertEquals(1, min(1f, (byte) 2), 0); assertEquals(1, min(1f, (short) 2), 0); @@ -154,8 +148,7 @@ public void testMin() { assertEquals(1, min(1f, 2l), 0); assertEquals(1, min(1f, 2f), 0); assertEquals(1, min(1f, 2d), 0); - assertEquals(QueryConstants.NULL_DOUBLE, - min(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), 0d); + assertEquals(QueryConstants.NULL_DOUBLE, min(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), 0d); assertEquals(5l, min(QueryConstants.NULL_FLOAT, 5l), 0d); assertEquals(5l, min(Float.NaN, 5l), 0d); assertEquals(QueryConstants.NULL_DOUBLE, min(Float.NaN, QueryConstants.NULL_LONG), 0d); @@ -167,8 +160,7 @@ public void testMin() { assertEquals(1, min(1d, 2f), 0); assertEquals(1, min(1d, 2d), 0); - assertEquals(QueryConstants.NULL_DOUBLE, - min(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), 0d); + assertEquals(QueryConstants.NULL_DOUBLE, min(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), 0d); assertEquals(5l, min(QueryConstants.NULL_DOUBLE, 5l), 0d); assertEquals(1, min(1, new BigDecimal("2.5"))); @@ -187,29 +179,25 @@ public void testMin() { // Both values are near to min values assertEquals(Long.MIN_VALUE + 1, min(Long.MIN_VALUE + 1, -Double.MAX_VALUE + 1d, 0d)); assertEquals(new Double("-1.34758724E20"), - min(Long.MIN_VALUE + 1, new Double("-1.34758724E20"), Long.MAX_VALUE)); + min(Long.MIN_VALUE + 1, new Double("-1.34758724E20"), Long.MAX_VALUE)); assertEquals(new Double("-1.347587244542345673435434E20"), - min(Long.MIN_VALUE + 1, new Double("-1.347587244542345673435434E20"), Long.MAX_VALUE)); - // Positive and negative infinity values (Order matters when both Double.NEGATIVE_INFINITY - // and Float.NEGATIVE_INFINITY are present) - assertEquals(Double.NEGATIVE_INFINITY, min(Double.NEGATIVE_INFINITY, - Float.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); + min(Long.MIN_VALUE + 1, new Double("-1.347587244542345673435434E20"), Long.MAX_VALUE)); + // Positive and negative infinity values (Order matters when both Double.NEGATIVE_INFINITY and + // Float.NEGATIVE_INFINITY are present) + assertEquals(Double.NEGATIVE_INFINITY, min(Double.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); assertEquals(Float.NEGATIVE_INFINITY, - min(Float.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY, Double.NaN, Float.NaN)); + min(Float.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY, Double.NaN, Float.NaN)); assertEquals(Double.NEGATIVE_INFINITY, min(Long.MIN_VALUE, Double.NEGATIVE_INFINITY, 0d)); assertEquals(Double.NEGATIVE_INFINITY, min(Double.NEGATIVE_INFINITY, Long.MIN_VALUE, 0d)); - assertEquals(Double.NEGATIVE_INFINITY, - min(Long.MIN_VALUE + 1, Double.NEGATIVE_INFINITY, 0d)); - assertEquals(Double.NEGATIVE_INFINITY, - min(Double.NEGATIVE_INFINITY, Long.MIN_VALUE + 1, 0d)); + assertEquals(Double.NEGATIVE_INFINITY, min(Long.MIN_VALUE + 1, Double.NEGATIVE_INFINITY, 0d)); + assertEquals(Double.NEGATIVE_INFINITY, min(Double.NEGATIVE_INFINITY, Long.MIN_VALUE + 1, 0d)); assertEquals(Float.NEGATIVE_INFINITY, min(Integer.MIN_VALUE, Float.NEGATIVE_INFINITY), 0d); assertEquals(Float.NEGATIVE_INFINITY, min(Float.NEGATIVE_INFINITY, Integer.MIN_VALUE), 0d); - assertEquals(Float.NEGATIVE_INFINITY, min(Integer.MIN_VALUE + 1, Float.NEGATIVE_INFINITY), - 0d); - assertEquals(Float.NEGATIVE_INFINITY, min(Float.NEGATIVE_INFINITY, Integer.MIN_VALUE + 1), - 0d); + assertEquals(Float.NEGATIVE_INFINITY, min(Integer.MIN_VALUE + 1, Float.NEGATIVE_INFINITY), 0d); + assertEquals(Float.NEGATIVE_INFINITY, min(Float.NEGATIVE_INFINITY, Integer.MIN_VALUE + 1), 0d); assertEquals((int) Math.pow(2, 23), (float) Math.pow(2, 23), 0); // float value is just less than Integer.MAX_VALUE @@ -223,16 +211,15 @@ public void testMin() { assertEquals(1.23456E5f, min(1.23456E5f, 123456), 0d); // Both values are near to min values assertEquals(Integer.MIN_VALUE + 1, min(-Float.MAX_VALUE + 1, Integer.MIN_VALUE + 1), 0d); - assertEquals(new Float("-1.34758724E20"), - min(Integer.MIN_VALUE + 1, new Float("-1.34758724E20")), 0d); + assertEquals(new Float("-1.34758724E20"), min(Integer.MIN_VALUE + 1, new Float("-1.34758724E20")), 0d); assertEquals(new Float("-1.347587244542345673435434E20"), - min(Integer.MIN_VALUE + 1, new Float("-1.347587244542345673435434E20")), 0d); - // Positive and negative infinity values (Order matters when both Double.NEGATIVE_INFINITY - // and Float.NEGATIVE_INFINITY are present) - assertEquals(Double.NEGATIVE_INFINITY, min(Double.NEGATIVE_INFINITY, - Float.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); + min(Integer.MIN_VALUE + 1, new Float("-1.347587244542345673435434E20")), 0d); + // Positive and negative infinity values (Order matters when both Double.NEGATIVE_INFINITY and + // Float.NEGATIVE_INFINITY are present) + assertEquals(Double.NEGATIVE_INFINITY, min(Double.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); assertEquals(Float.NEGATIVE_INFINITY, - min(Float.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY, Double.NaN, Float.NaN)); + min(Float.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY, Double.NaN, Float.NaN)); assertEquals(-123.45d, min(-123.45d, -123l, 0d)); assertEquals(-123.45d, min(-123l, -123.45d, 0d)); @@ -243,8 +230,7 @@ public void testMin() { assertEquals(-0.2d, min(-0.2d, 0l, Long.MAX_VALUE)); assertEquals(-1.2d, min(-1.2d, -1l, Long.MAX_VALUE)); - assertEquals(QueryConstants.NULL_LONG, - min(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE)); + assertEquals(QueryConstants.NULL_LONG, min(QueryConstants.NULL_LONG, QueryConstants.NULL_BYTE)); assertEquals((double) -123.45f, min(-123.45f, -123), 0d); assertEquals((double) -123.45f, min(-123, -123.45f), 0d); @@ -256,9 +242,7 @@ public void testMin() { assertEquals(5l, min(Double.NaN, 5l), 0d); assertEquals(5d, min(5d, io.deephaven.util.QueryConstants.NULL_LONG), 0d); assertEquals(io.deephaven.util.QueryConstants.NULL_DOUBLE, - min(io.deephaven.util.QueryConstants.NULL_DOUBLE, - io.deephaven.util.QueryConstants.NULL_LONG), - 0d); + min(io.deephaven.util.QueryConstants.NULL_DOUBLE, io.deephaven.util.QueryConstants.NULL_LONG), 0d); assertEquals(NULL_DOUBLE, min(Double.NaN, io.deephaven.util.QueryConstants.NULL_LONG), 0d); // (long, double) @@ -270,11 +254,9 @@ public void testMin() { assertEquals(5l, min(5l, Double.NaN), 0d); assertEquals(5d, min(io.deephaven.util.QueryConstants.NULL_LONG, 5d), 0d); assertEquals(io.deephaven.util.QueryConstants.NULL_DOUBLE, - min(io.deephaven.util.QueryConstants.NULL_LONG, - io.deephaven.util.QueryConstants.NULL_DOUBLE), - 0d); + min(io.deephaven.util.QueryConstants.NULL_LONG, io.deephaven.util.QueryConstants.NULL_DOUBLE), 0d); assertEquals(io.deephaven.util.QueryConstants.NULL_DOUBLE, - min(io.deephaven.util.QueryConstants.NULL_LONG, Double.NaN), 0d); + min(io.deephaven.util.QueryConstants.NULL_LONG, Double.NaN), 0d); } @@ -283,48 +265,41 @@ public void testMax() { assertEquals((byte) 3, max((byte) 1, (byte) 2, (byte) 3)); assertEquals((byte) 3, max(new byte[] {1, 2, 3})); assertEquals((byte) 3, max((byte) 1, (byte) 2, (byte) 3)); - assertEquals(QueryConstants.NULL_INT, - max(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE)); + assertEquals(QueryConstants.NULL_INT, max(QueryConstants.NULL_INT, QueryConstants.NULL_BYTE)); assertEquals((short) (QueryConstants.NULL_BYTE - 1), - max(QueryConstants.NULL_BYTE, (short) (QueryConstants.NULL_BYTE - 1))); + max(QueryConstants.NULL_BYTE, (short) (QueryConstants.NULL_BYTE - 1))); assertEquals((short) 3, max((short) 1, (short) 2, (short) 3)); assertEquals((short) 3, max(new short[] {1, 2, 3})); assertEquals((short) 3, max((short) 1, (short) 2, (short) 3)); - assertEquals(QueryConstants.NULL_SHORT, - max(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE)); + assertEquals(QueryConstants.NULL_SHORT, max(QueryConstants.NULL_SHORT, QueryConstants.NULL_BYTE)); assertEquals(5, max(QueryConstants.NULL_SHORT, 5)); assertEquals(3, max(1, 2, 3)); assertEquals(3, max(1, 2, 3)); assertEquals(3, max(new int[] {1, 2, 3})); - assertEquals(QueryConstants.NULL_DOUBLE, - max(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), 0d); + assertEquals(QueryConstants.NULL_DOUBLE, max(QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT), 0d); assertEquals(5l, max(QueryConstants.NULL_INT, 5l)); assertEquals(3l, max(1l, 2l, 3l)); assertEquals(3l, max(new long[] {1, 2, 3})); assertEquals(3l, max(1l, 2l, 3l)); - assertEquals(QueryConstants.NULL_DOUBLE, - max(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), 0d); + assertEquals(QueryConstants.NULL_DOUBLE, max(QueryConstants.NULL_LONG, QueryConstants.NULL_FLOAT), 0d); assertEquals(5, max(QueryConstants.NULL_LONG, 5)); assertEquals(3f, max(1f, 2f, 3f), 0d); assertEquals(3f, max(new float[] {1, 2, 3}), 0d); assertEquals(3f, max(1f, 2f, 3f), 0d); - assertEquals(QueryConstants.NULL_DOUBLE, - max(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), 0d); + assertEquals(QueryConstants.NULL_DOUBLE, max(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_FLOAT), 0d); assertEquals(5l, max(QueryConstants.NULL_FLOAT, 5l), 0d); assertEquals(3d, max(1d, 2d, 3d), 0d); assertEquals(3d, max(new double[] {1d, 2d, 3d}), 0d); assertEquals(3d, max(1d, new Double(2), 3d), 0d); - assertEquals(QueryConstants.NULL_DOUBLE, - max(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), 0d); + assertEquals(QueryConstants.NULL_DOUBLE, max(QueryConstants.NULL_DOUBLE, QueryConstants.NULL_INT), 0d); assertEquals(5d, max(QueryConstants.NULL_DOUBLE, 5d), 0d); - assertTrue(new BigInteger("3") - .equals(max(new BigInteger("1"), new BigInteger("2"), new BigInteger("3")))); + assertTrue(new BigInteger("3").equals(max(new BigInteger("1"), new BigInteger("2"), new BigInteger("3")))); assertEquals((short) 10, max(new BigInteger("2"), 1, 2.0, 5.6f, 5l, (byte) 3, (short) 10)); @@ -376,8 +351,7 @@ public void testMax() { assertEquals(2d, max(1, new Double(2)), 0); // double value is just greater than Long.MAX_VALUE - assertEquals(9.223372036854776807E18, - max(9.223372036854776807E18, Long.MAX_VALUE, Double.NaN)); + assertEquals(9.223372036854776807E18, max(9.223372036854776807E18, Long.MAX_VALUE, Double.NaN)); // double value doesn't have enough precision assertEquals(1234567890L, max(1234567890L, 1.23456E9, 0d)); // double value is slightly higher than the long value @@ -388,30 +362,25 @@ public void testMax() { assertEquals(1234567891l, max(1.23456789E9, 1234567891L, 0d)); // Both values are near to min values assertEquals(Double.MAX_VALUE, max(Long.MAX_VALUE, Double.MAX_VALUE, 0d)); - assertEquals(new Double("1.34758724E20"), - max(Long.MAX_VALUE, new Double("1.34758724E20"), Double.NaN)); + assertEquals(new Double("1.34758724E20"), max(Long.MAX_VALUE, new Double("1.34758724E20"), Double.NaN)); assertEquals(new Double("1.347587244542345673435434E20"), - max(Long.MAX_VALUE, new Double("1.347587244542345673435434E20"), 0d)); - // Positive and negative infinity values (Order matters when both Double.POSITIVE_INFINITY - // and Float.POSITIVE_INFINITY are present) - assertEquals(Double.POSITIVE_INFINITY, max(Double.POSITIVE_INFINITY, - Float.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY)); + max(Long.MAX_VALUE, new Double("1.347587244542345673435434E20"), 0d)); + // Positive and negative infinity values (Order matters when both Double.POSITIVE_INFINITY and + // Float.POSITIVE_INFINITY are present) + assertEquals(Double.POSITIVE_INFINITY, max(Double.POSITIVE_INFINITY, Float.POSITIVE_INFINITY, + Double.NEGATIVE_INFINITY, Float.NEGATIVE_INFINITY)); assertEquals(Float.POSITIVE_INFINITY, - max(Float.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN, Float.NaN)); + max(Float.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN, Float.NaN)); assertEquals(Double.POSITIVE_INFINITY, max(Long.MAX_VALUE, Double.POSITIVE_INFINITY, 0d)); assertEquals(Double.POSITIVE_INFINITY, max(Double.POSITIVE_INFINITY, Long.MAX_VALUE, 0d)); - assertEquals(Double.POSITIVE_INFINITY, - max(Long.MAX_VALUE - 1, Double.POSITIVE_INFINITY, 0d)); - assertEquals(Double.POSITIVE_INFINITY, - max(Double.POSITIVE_INFINITY, Long.MAX_VALUE - 1, 0d)); + assertEquals(Double.POSITIVE_INFINITY, max(Long.MAX_VALUE - 1, Double.POSITIVE_INFINITY, 0d)); + assertEquals(Double.POSITIVE_INFINITY, max(Double.POSITIVE_INFINITY, Long.MAX_VALUE - 1, 0d)); assertEquals(Float.POSITIVE_INFINITY, max(Integer.MAX_VALUE, Float.POSITIVE_INFINITY), 0d); assertEquals(Float.POSITIVE_INFINITY, max(Float.POSITIVE_INFINITY, Integer.MAX_VALUE), 0d); - assertEquals(Float.POSITIVE_INFINITY, max(Integer.MAX_VALUE - 1, Float.POSITIVE_INFINITY), - 0d); - assertEquals(Float.POSITIVE_INFINITY, max(Float.POSITIVE_INFINITY, Integer.MAX_VALUE - 1), - 0d); + assertEquals(Float.POSITIVE_INFINITY, max(Integer.MAX_VALUE - 1, Float.POSITIVE_INFINITY), 0d); + assertEquals(Float.POSITIVE_INFINITY, max(Float.POSITIVE_INFINITY, Integer.MAX_VALUE - 1), 0d); // float value is just less than Integer.MAX_VALUE assertEquals(Integer.MAX_VALUE, max(2.14748344444E9f, Integer.MAX_VALUE), 0d); @@ -425,20 +394,19 @@ public void testMax() { assertEquals(1.23456E5f, max(1.23456E5f, 123456), 0d); // Both values are near to max values assertEquals(Float.MAX_VALUE, max(Float.MAX_VALUE, Integer.MAX_VALUE), 0d); - assertEquals(new Float("1.34758724E20"), - max(Integer.MAX_VALUE - 1, new Float("1.34758724E20")), 0d); + assertEquals(new Float("1.34758724E20"), max(Integer.MAX_VALUE - 1, new Float("1.34758724E20")), 0d); assertEquals(new Float("1.347587244542345673435434E20"), - max(Integer.MAX_VALUE - 1, new Float("1.347587244542345673435434E20")), 0d); + max(Integer.MAX_VALUE - 1, new Float("1.347587244542345673435434E20")), 0d); assertEquals(5f, max(5f, Integer.MIN_VALUE), 0d); assertEquals(5, max(-Float.MAX_VALUE, 5), 0d); assertEquals(QueryConstants.NULL_DOUBLE, max(-Float.MAX_VALUE, Integer.MIN_VALUE), 0d); - // Positive and negative infinity values (Order matters when both Double.NEGATIVE_INFINITY - // and Float.NEGATIVE_INFINITY are present) - assertEquals(Double.POSITIVE_INFINITY, max(Double.POSITIVE_INFINITY, - Float.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); + // Positive and negative infinity values (Order matters when both Double.NEGATIVE_INFINITY and + // Float.NEGATIVE_INFINITY are present) + assertEquals(Double.POSITIVE_INFINITY, max(Double.POSITIVE_INFINITY, Float.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, Float.POSITIVE_INFINITY)); assertEquals(Float.POSITIVE_INFINITY, - max(Float.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN, Float.NaN)); + max(Float.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN, Float.NaN)); assertEquals(-123l, max(-123.45d, -123l, Long.MIN_VALUE)); assertEquals(-123l, max(-123l, -123.45d, Long.MIN_VALUE)); @@ -455,9 +423,7 @@ public void testMax() { assertEquals(5l, max(io.deephaven.util.QueryConstants.NULL_DOUBLE, 5l), 0d); assertEquals(5d, max(5d, io.deephaven.util.QueryConstants.NULL_LONG), 0d); assertEquals(io.deephaven.util.QueryConstants.NULL_DOUBLE, - max(io.deephaven.util.QueryConstants.NULL_DOUBLE, - io.deephaven.util.QueryConstants.NULL_LONG), - 0d); + max(io.deephaven.util.QueryConstants.NULL_DOUBLE, io.deephaven.util.QueryConstants.NULL_LONG), 0d); // (long, double) assertEquals(9223372036853776384.0d, max(9223372036753776385l, 9223372036853776384.0d), 0d); @@ -467,11 +433,9 @@ public void testMax() { assertEquals(5l, max(5l, io.deephaven.util.QueryConstants.NULL_DOUBLE), 0d); assertEquals(5d, max(io.deephaven.util.QueryConstants.NULL_LONG, 5d), 0d); assertEquals(io.deephaven.util.QueryConstants.NULL_DOUBLE, - max(io.deephaven.util.QueryConstants.NULL_LONG, - io.deephaven.util.QueryConstants.NULL_DOUBLE), - 0d); + max(io.deephaven.util.QueryConstants.NULL_LONG, io.deephaven.util.QueryConstants.NULL_DOUBLE), 0d); assertEquals(io.deephaven.util.QueryConstants.NULL_DOUBLE, - max(io.deephaven.util.QueryConstants.NULL_LONG, Double.NaN), 0d); + max(io.deephaven.util.QueryConstants.NULL_LONG, Double.NaN), 0d); assertEquals(NULL_DOUBLE, max(Double.NaN, io.deephaven.util.QueryConstants.NULL_LONG), 0d); } @@ -508,9 +472,8 @@ public void testSort() { assertArrayEquals(expectedSortedPrimDoubles, sort(new double[] {1d, 2d, 3d}), 0); assertArrayEquals(expectedSortedPrimDoubles, sort(1d, new Double(2), 3d), 0); - assertArrayEquals( - new BigInteger[] {new BigInteger("1"), new BigInteger("2"), new BigInteger("3")}, - sort(new BigInteger("1"), new BigInteger("2"), new BigInteger("3"))); + assertArrayEquals(new BigInteger[] {new BigInteger("1"), new BigInteger("2"), new BigInteger("3")}, + sort(new BigInteger("1"), new BigInteger("2"), new BigInteger("3"))); assertArrayEquals(new Number[] {1, 2d}, sort(2d, 1)); @@ -526,11 +489,9 @@ public void testSortDescending() { assertArrayEquals(expectedSortedPrimBytes, sortDescending((byte) 1, (byte) 2, (byte) 3)); final short[] expectedSortedPrimShorts = new short[] {3, 2, 1}; - assertArrayEquals(expectedSortedPrimShorts, - sortDescending((short) 1, (short) 2, (short) 3)); + assertArrayEquals(expectedSortedPrimShorts, sortDescending((short) 1, (short) 2, (short) 3)); assertArrayEquals(expectedSortedPrimShorts, sortDescending(new short[] {1, 2, 3})); - assertArrayEquals(expectedSortedPrimShorts, - sortDescending((short) 1, (short) 2, (short) 3)); + assertArrayEquals(expectedSortedPrimShorts, sortDescending((short) 1, (short) 2, (short) 3)); final int[] expectedSortedPrimInts = new int[] {3, 2, 1}; assertArrayEquals(expectedSortedPrimInts, sortDescending(1, 2, 3)); @@ -553,9 +514,8 @@ public void testSortDescending() { assertArrayEquals(expectedSortedPrimDoubles, sortDescending(new double[] {1d, 2d, 3d}), 0); assertArrayEquals(expectedSortedPrimDoubles, sortDescending(1d, new Double(2), 3d), 0); - assertArrayEquals( - new BigInteger[] {new BigInteger("3"), new BigInteger("2"), new BigInteger("1")}, - sortDescending(new BigInteger("1"), new BigInteger("2"), new BigInteger("3"))); + assertArrayEquals(new BigInteger[] {new BigInteger("3"), new BigInteger("2"), new BigInteger("1")}, + sortDescending(new BigInteger("1"), new BigInteger("2"), new BigInteger("3"))); assertArrayEquals(new Number[] {2d, 1}, sortDescending(2d, 1)); diff --git a/DB/src/test/java/io/deephaven/libs/primitives/TestCasting.java b/DB/src/test/java/io/deephaven/libs/primitives/TestCasting.java index 4487f06aed5..f9e4db57853 100644 --- a/DB/src/test/java/io/deephaven/libs/primitives/TestCasting.java +++ b/DB/src/test/java/io/deephaven/libs/primitives/TestCasting.java @@ -28,14 +28,11 @@ public void testCastLong() { assertNull(castLong((DbIntArray) null)); assertNull(castLong((DbLongArray) null)); + assertEquals(new long[] {1, 2, 3, NULL_LONG}, castLong(new DbByteArrayDirect(new byte[] {1, 2, 3, NULL_BYTE}))); assertEquals(new long[] {1, 2, 3, NULL_LONG}, - castLong(new DbByteArrayDirect(new byte[] {1, 2, 3, NULL_BYTE}))); - assertEquals(new long[] {1, 2, 3, NULL_LONG}, - castLong(new DbShortArrayDirect(new short[] {1, 2, 3, NULL_SHORT}))); - assertEquals(new long[] {1, 2, 3, NULL_LONG}, - castLong(new DbIntArrayDirect(new int[] {1, 2, 3, NULL_INT}))); - assertEquals(new long[] {1, 2, 3, NULL_LONG}, - castLong(new DbLongArrayDirect(new long[] {1, 2, 3, NULL_LONG}))); + castLong(new DbShortArrayDirect(new short[] {1, 2, 3, NULL_SHORT}))); + assertEquals(new long[] {1, 2, 3, NULL_LONG}, castLong(new DbIntArrayDirect(new int[] {1, 2, 3, NULL_INT}))); + assertEquals(new long[] {1, 2, 3, NULL_LONG}, castLong(new DbLongArrayDirect(new long[] {1, 2, 3, NULL_LONG}))); } public void testCastDouble() { @@ -46,18 +43,12 @@ public void testCastDouble() { assertNull(castDouble((float[]) null)); assertNull(castDouble((double[]) null)); - assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new byte[] {1, 2, 3, NULL_BYTE})); - assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new short[] {1, 2, 3, NULL_SHORT})); - assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new int[] {1, 2, 3, NULL_INT})); - assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new long[] {1, 2, 3, NULL_LONG})); - assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new float[] {1, 2, 3, NULL_FLOAT})); - assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new double[] {1, 2, 3, NULL_DOUBLE})); + assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, castDouble(new byte[] {1, 2, 3, NULL_BYTE})); + assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, castDouble(new short[] {1, 2, 3, NULL_SHORT})); + assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, castDouble(new int[] {1, 2, 3, NULL_INT})); + assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, castDouble(new long[] {1, 2, 3, NULL_LONG})); + assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, castDouble(new float[] {1, 2, 3, NULL_FLOAT})); + assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, castDouble(new double[] {1, 2, 3, NULL_DOUBLE})); assertNull(castDouble((DbByteArray) null)); assertNull(castDouble((DbShortArray) null)); @@ -67,36 +58,34 @@ public void testCastDouble() { assertNull(castDouble((DbDoubleArray) null)); assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new DbByteArrayDirect(new byte[] {1, 2, 3, NULL_BYTE}))); + castDouble(new DbByteArrayDirect(new byte[] {1, 2, 3, NULL_BYTE}))); assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new DbShortArrayDirect(new short[] {1, 2, 3, NULL_SHORT}))); + castDouble(new DbShortArrayDirect(new short[] {1, 2, 3, NULL_SHORT}))); assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new DbIntArrayDirect(new int[] {1, 2, 3, NULL_INT}))); + castDouble(new DbIntArrayDirect(new int[] {1, 2, 3, NULL_INT}))); assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new DbLongArrayDirect(new long[] {1, 2, 3, NULL_LONG}))); + castDouble(new DbLongArrayDirect(new long[] {1, 2, 3, NULL_LONG}))); assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new DbFloatArrayDirect(new float[] {1, 2, 3, NULL_FLOAT}))); + castDouble(new DbFloatArrayDirect(new float[] {1, 2, 3, NULL_FLOAT}))); assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - castDouble(new DbDoubleArrayDirect(new double[] {1, 2, 3, NULL_DOUBLE}))); + castDouble(new DbDoubleArrayDirect(new double[] {1, 2, 3, NULL_DOUBLE}))); } public void testIntToDouble() { assertNull(intToDouble((int[]) null)); - assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - intToDouble(new int[] {1, 2, 3, NULL_INT})); + assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, intToDouble(new int[] {1, 2, 3, NULL_INT})); assertNull(intToDouble((DbIntArray) null)); assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - intToDouble(new DbIntArrayDirect(new int[] {1, 2, 3, NULL_INT})).toArray()); + intToDouble(new DbIntArrayDirect(new int[] {1, 2, 3, NULL_INT})).toArray()); } public void testLongToDouble() { assertNull(longToDouble((long[]) null)); - assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - longToDouble(new long[] {1, 2, 3, NULL_LONG})); + assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, longToDouble(new long[] {1, 2, 3, NULL_LONG})); assertNull(longToDouble((DbLongArray) null)); assertEquals(new double[] {1, 2, 3, NULL_DOUBLE}, - longToDouble(new DbLongArrayDirect(new long[] {1, 2, 3, NULL_LONG})).toArray()); + longToDouble(new DbLongArrayDirect(new long[] {1, 2, 3, NULL_LONG})).toArray()); } } diff --git a/DB/src/test/java/io/deephaven/libs/primitives/TestPrimitiveParseUtil.java b/DB/src/test/java/io/deephaven/libs/primitives/TestPrimitiveParseUtil.java index 51d9d9e70d5..55455c6c740 100644 --- a/DB/src/test/java/io/deephaven/libs/primitives/TestPrimitiveParseUtil.java +++ b/DB/src/test/java/io/deephaven/libs/primitives/TestPrimitiveParseUtil.java @@ -70,10 +70,8 @@ public void testParseUnsignedInt() { } // Test with radix - Require.eq(parseUnsignedInt("1000000", NULL_INT), - "parseUnsignedInt(\"1_000_000\", NULL_INT)", NULL_INT); - Require.eq(parseUnsignedInt("1000000", 10), "parseUnsignedInt(\"1_000_000\", 10)", - 1_000_000); + Require.eq(parseUnsignedInt("1000000", NULL_INT), "parseUnsignedInt(\"1_000_000\", NULL_INT)", NULL_INT); + Require.eq(parseUnsignedInt("1000000", 10), "parseUnsignedInt(\"1_000_000\", 10)", 1_000_000); } public void testParseLong() { @@ -93,8 +91,7 @@ public void testParseLong() { public void testParseUnsignedLong() { Require.eq(parseUnsignedLong(null), "parseUnsignedLong(null)", NULL_LONG); - Require.eq(parseUnsignedLong("4000000000"), "parseUnsignedLong(\"4000000000\")", - 4_000_000_000L); + Require.eq(parseUnsignedLong("4000000000"), "parseUnsignedLong(\"4000000000\")", 4_000_000_000L); try { parseUnsignedLong(""); @@ -109,10 +106,9 @@ public void testParseUnsignedLong() { } // Test with radix - Require.eq(parseUnsignedLong("4000000000", NULL_INT), - "parseUnsignedLong(\"4_000_000_000\", NULL_INT)", NULL_LONG); - Require.eq(parseUnsignedLong("4000000000", 10), "parseUnsignedLong(\"4_000_000_000\", 10)", - 4_000_000_000L); + Require.eq(parseUnsignedLong("4000000000", NULL_INT), "parseUnsignedLong(\"4_000_000_000\", NULL_INT)", + NULL_LONG); + Require.eq(parseUnsignedLong("4000000000", 10), "parseUnsignedLong(\"4_000_000_000\", 10)", 4_000_000_000L); } public void testParseDouble() { @@ -140,8 +136,7 @@ public void testParseFloat() { public void testParseBoolean() { Require.eq(parseBoolean(null), "parseBoolean(null)", NULL_BOOLEAN); Require.eq(parseBoolean("true"), "parseBoolean(\"true\")", Boolean.TRUE); - Require.eq(parseBoolean(""), "parseBoolean(\"\")", Boolean.FALSE); // Unlike numbers, no - // Exception for this one + Require.eq(parseBoolean(""), "parseBoolean(\"\")", Boolean.FALSE); // Unlike numbers, no Exception for this one } } diff --git a/DB/src/test/java/io/deephaven/qst/type/DbPrimitiveArrayTest.java b/DB/src/test/java/io/deephaven/qst/type/DbPrimitiveArrayTest.java index a203ff04595..b358a56809b 100644 --- a/DB/src/test/java/io/deephaven/qst/type/DbPrimitiveArrayTest.java +++ b/DB/src/test/java/io/deephaven/qst/type/DbPrimitiveArrayTest.java @@ -17,16 +17,15 @@ public class DbPrimitiveArrayTest { @Test public void types() - throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, - IllegalAccessException { + throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, IllegalAccessException { assertThat(DbPrimitiveArrayType.types()).containsExactlyInAnyOrder( - DbBooleanArray.type(), - DbByteArray.type(), - DbCharArray.type(), - DbShortArray.type(), - DbIntArray.type(), - DbLongArray.type(), - DbFloatArray.type(), - DbDoubleArray.type()); + DbBooleanArray.type(), + DbByteArray.type(), + DbCharArray.type(), + DbShortArray.type(), + DbIntArray.type(), + DbLongArray.type(), + DbFloatArray.type(), + DbDoubleArray.type()); } } diff --git a/DB/src/test/java/io/deephaven/stream/TestStreamToTableAdapter.java b/DB/src/test/java/io/deephaven/stream/TestStreamToTableAdapter.java index c6ac3c2f59f..fc9f13caefd 100644 --- a/DB/src/test/java/io/deephaven/stream/TestStreamToTableAdapter.java +++ b/DB/src/test/java/io/deephaven/stream/TestStreamToTableAdapter.java @@ -41,15 +41,14 @@ public void tearDown() throws Exception { @Test public void testSimple() { - final TableDefinition tableDefinition = - new TableDefinition(Arrays.asList(String.class, int.class, long.class, double.class), - Arrays.asList("S", "I", "L", "D")); + final TableDefinition tableDefinition = new TableDefinition( + Arrays.asList(String.class, int.class, long.class, double.class), Arrays.asList("S", "I", "L", "D")); final DynamicTable empty = TableTools.newTable(tableDefinition); final StreamPublisher streamPublisher = new DummyStreamPublisher(); final StreamToTableAdapter adapter = - new StreamToTableAdapter(tableDefinition, streamPublisher, LiveTableMonitor.DEFAULT); + new StreamToTableAdapter(tableDefinition, streamPublisher, LiveTableMonitor.DEFAULT); final DynamicTable result = adapter.table(); TstUtils.assertTableEquals(empty, result); @@ -93,8 +92,8 @@ public void testSimple() { TestCase.assertEquals(IndexShiftData.EMPTY, listener.getUpdate().shifted); TestCase.assertEquals(ModifiedColumnSet.EMPTY, listener.getUpdate().modifiedColumnSet); - final Table expect1 = TableTools.newTable(col("S", "Bill", "Ted"), intCol("I", 2, 3), - longCol("L", 4L, 5L), doubleCol("D", Math.PI, Math.E)); + final Table expect1 = TableTools.newTable(col("S", "Bill", "Ted"), intCol("I", 2, 3), longCol("L", 4L, 5L), + doubleCol("D", Math.PI, Math.E)); TstUtils.assertTableEquals(expect1, result); listener.reset(); @@ -165,8 +164,7 @@ public void testSimple() { TestCase.assertEquals(ModifiedColumnSet.EMPTY, listener.getUpdate().modifiedColumnSet); final Table expect2 = TableTools.newTable(col("S", "Ren", "Stimpy", "Jekyll", "Hyde"), - intCol("I", 7, 8, 13, 14), longCol("L", 9, 10, 15, 16), - doubleCol("D", 11.1, 12.2, 17.7, 18.8)); + intCol("I", 7, 8, 13, 14), longCol("L", 9, 10, 15, 16), doubleCol("D", 11.1, 12.2, 17.7, 18.8)); TstUtils.assertTableEquals(expect2, result); chunks[0] = woc = WritableObjectChunk.makeWritableChunk(2); @@ -196,8 +194,8 @@ public void testSimple() { TestCase.assertEquals(IndexShiftData.EMPTY, listener.getUpdate().shifted); TestCase.assertEquals(ModifiedColumnSet.EMPTY, listener.getUpdate().modifiedColumnSet); - final Table expect3 = TableTools.newTable(col("S", "Ben", "Jerry"), intCol("I", 19, 20), - longCol("L", 21, 22), doubleCol("D", 23.3, 24.4)); + final Table expect3 = TableTools.newTable(col("S", "Ben", "Jerry"), intCol("I", 19, 20), longCol("L", 21, 22), + doubleCol("D", 23.3, 24.4)); TstUtils.assertTableEquals(expect3, result); listener.reset(); @@ -218,15 +216,14 @@ public void testSimple() { @Test public void testWrappedTypes() { - final TableDefinition tableDefinition = - new TableDefinition(Arrays.asList(String.class, Boolean.class, DBDateTime.class), - Arrays.asList("S", "B", "D")); + final TableDefinition tableDefinition = new TableDefinition( + Arrays.asList(String.class, Boolean.class, DBDateTime.class), Arrays.asList("S", "B", "D")); final DynamicTable empty = TableTools.newTable(tableDefinition); final StreamPublisher streamPublisher = new DummyStreamPublisher(); final StreamToTableAdapter adapter = - new StreamToTableAdapter(tableDefinition, streamPublisher, LiveTableMonitor.DEFAULT); + new StreamToTableAdapter(tableDefinition, streamPublisher, LiveTableMonitor.DEFAULT); final DynamicTable result = adapter.table(); TstUtils.assertTableEquals(empty, result); @@ -238,8 +235,7 @@ public void testWrappedTypes() { TestCase.assertEquals(0, listener.getCount()); final WritableChunk[] chunks = new WritableChunk[3]; - final WritableObjectChunk woc = - WritableObjectChunk.makeWritableChunk(3); + final WritableObjectChunk woc = WritableObjectChunk.makeWritableChunk(3); chunks[0] = woc; woc.set(0, "Collins"); woc.set(1, "Armstrong"); @@ -272,7 +268,7 @@ public void testWrappedTypes() { TestCase.assertEquals(ModifiedColumnSet.EMPTY, listener.getUpdate().modifiedColumnSet); final Table expect1 = TableTools.newTable(col("S", "Collins", "Armstrong", "Aldrin"), - col("B", true, false, null), col("D", dt1, dt2, dt3)); + col("B", true, false, null), col("D", dt1, dt2, dt3)); TstUtils.assertTableEquals(expect1, result); listener.reset(); @@ -289,14 +285,14 @@ public void testWrappedTypes() { @Test public void testArrayTypes() { - final TableDefinition tableDefinition = new TableDefinition( - Arrays.asList(String[].class, int[].class), Arrays.asList("SA", "IA")); + final TableDefinition tableDefinition = + new TableDefinition(Arrays.asList(String[].class, int[].class), Arrays.asList("SA", "IA")); final DynamicTable empty = TableTools.newTable(tableDefinition); final StreamPublisher streamPublisher = new DummyStreamPublisher(); final StreamToTableAdapter adapter = - new StreamToTableAdapter(tableDefinition, streamPublisher, LiveTableMonitor.DEFAULT); + new StreamToTableAdapter(tableDefinition, streamPublisher, LiveTableMonitor.DEFAULT); final DynamicTable result = adapter.table(); TstUtils.assertTableEquals(empty, result); @@ -308,13 +304,11 @@ public void testArrayTypes() { TestCase.assertEquals(0, listener.getCount()); final WritableChunk[] chunks = new WritableChunk[2]; - final WritableObjectChunk woc = - WritableObjectChunk.makeWritableChunk(2); + final WritableObjectChunk woc = WritableObjectChunk.makeWritableChunk(2); chunks[0] = woc; woc.set(0, new String[] {"Gagarin", "Tereshkova"}); woc.set(1, new String[] {}); - final WritableObjectChunk wic = - WritableObjectChunk.makeWritableChunk(2); + final WritableObjectChunk wic = WritableObjectChunk.makeWritableChunk(2); chunks[1] = wic; wic.set(0, new int[] {1, 2, 3}); wic.set(1, new int[] {4, 5, 6}); @@ -333,9 +327,8 @@ public void testArrayTypes() { TestCase.assertEquals(ModifiedColumnSet.EMPTY, listener.getUpdate().modifiedColumnSet); final Table expect1 = TableTools.newTable( - col("SA", new String[] {"Gagarin", "Tereshkova"}, - CollectionUtil.ZERO_LENGTH_STRING_ARRAY), - col("IA", new int[] {1, 2, 3}, new int[] {4, 5, 6})); + col("SA", new String[] {"Gagarin", "Tereshkova"}, CollectionUtil.ZERO_LENGTH_STRING_ARRAY), + col("IA", new int[] {1, 2, 3}, new int[] {4, 5, 6})); TstUtils.assertTableEquals(expect1, result); listener.reset(); @@ -353,13 +346,13 @@ public void testArrayTypes() { @Test public void testBig() { final TableDefinition tableDefinition = - new TableDefinition(Collections.singletonList(long.class), Arrays.asList("L")); + new TableDefinition(Collections.singletonList(long.class), Arrays.asList("L")); final DynamicTable empty = TableTools.newTable(tableDefinition); final StreamPublisher streamPublisher = new DummyStreamPublisher(); final StreamToTableAdapter adapter = - new StreamToTableAdapter(tableDefinition, streamPublisher, LiveTableMonitor.DEFAULT); + new StreamToTableAdapter(tableDefinition, streamPublisher, LiveTableMonitor.DEFAULT); final DynamicTable result = adapter.table(); TstUtils.assertTableEquals(empty, result); diff --git a/DB/src/test/java/io/deephaven/test/junit4/JUnit4BaseArrayTestCase.java b/DB/src/test/java/io/deephaven/test/junit4/JUnit4BaseArrayTestCase.java index 71865e489dc..d09e809012c 100644 --- a/DB/src/test/java/io/deephaven/test/junit4/JUnit4BaseArrayTestCase.java +++ b/DB/src/test/java/io/deephaven/test/junit4/JUnit4BaseArrayTestCase.java @@ -3,10 +3,9 @@ import io.deephaven.base.testing.BaseArrayTestCase; /** - * When you want to extend BaseArrayTestCase, but you need to use JUnit 4 annotations, - * like @Category or @RunWith(Suite.class), then instead of extending BaseArrayTestCase, you should - * instead create a `JUnit4BaseArrayTestCase field;`, and call setUp/tearDown in @Before/@After - * annotated methods. + * When you want to extend BaseArrayTestCase, but you need to use JUnit 4 annotations, like @Category + * or @RunWith(Suite.class), then instead of extending BaseArrayTestCase, you should instead create a + * `JUnit4BaseArrayTestCase field;`, and call setUp/tearDown in @Before/@After annotated methods. * * We could probably implement this as a TestRule instead, but this works fine as-is. */ @@ -21,8 +20,7 @@ public void tearDown() throws Exception { super.tearDown(); } - // We use this class as a field in JUnit 4 tests which should not extend TestCase. This method - // is a no-op test + // We use this class as a field in JUnit 4 tests which should not extend TestCase. This method is a no-op test // method so when we are detected as a JUnit3 test, we do not fail public void testMethodSoThisIsValidJUnit3() {} } diff --git a/DB/src/test/java/io/deephaven/test/junit4/JUnit4LiveTableTestCase.java b/DB/src/test/java/io/deephaven/test/junit4/JUnit4LiveTableTestCase.java index 397193008ca..c4a07fe973d 100644 --- a/DB/src/test/java/io/deephaven/test/junit4/JUnit4LiveTableTestCase.java +++ b/DB/src/test/java/io/deephaven/test/junit4/JUnit4LiveTableTestCase.java @@ -3,10 +3,9 @@ import io.deephaven.db.v2.LiveTableTestCase; /** - * When you want to extend LiveTableTestCase, but you need to use JUnit 4 annotations, - * like @Category or @RunWith(Suite.class), then instead of extending LiveTableTestCase, you should - * instead create a `JUnit4LiveTableTestCase field;`, and call setUp/tearDown in @Before/@After - * annotated methods. + * When you want to extend LiveTableTestCase, but you need to use JUnit 4 annotations, like @Category + * or @RunWith(Suite.class), then instead of extending LiveTableTestCase, you should instead create a + * `JUnit4LiveTableTestCase field;`, and call setUp/tearDown in @Before/@After annotated methods. * * We could probably implement this as a TestRule instead, but this works fine as-is. */ @@ -21,8 +20,7 @@ public void tearDown() throws Exception { super.tearDown(); } - // We use this class as a field in JUnit 4 tests which should not extend TestCase. This method - // is a no-op test + // We use this class as a field in JUnit 4 tests which should not extend TestCase. This method is a no-op test // method so when we are detected as a JUnit3 test, we do not fail public void testMethodSoThisIsValidJUnit3() {} diff --git a/DB/src/test/java/io/deephaven/util/calendar/StaticCalendarMethodsTest.java b/DB/src/test/java/io/deephaven/util/calendar/StaticCalendarMethodsTest.java index 430cccea85c..33c46d77816 100644 --- a/DB/src/test/java/io/deephaven/util/calendar/StaticCalendarMethodsTest.java +++ b/DB/src/test/java/io/deephaven/util/calendar/StaticCalendarMethodsTest.java @@ -19,10 +19,8 @@ @Category(OutOfBandTest.class) public class StaticCalendarMethodsTest extends BaseArrayTestCase { private final BusinessCalendar calendar = Calendars.calendar(); - private final DBDateTime time1 = - DBTimeUtils.convertDateTime("2002-01-01T01:00:00.000000000 NY"); - private final DBDateTime time2 = - DBTimeUtils.convertDateTime("2002-01-21T01:00:00.000000000 NY"); + private final DBDateTime time1 = DBTimeUtils.convertDateTime("2002-01-01T01:00:00.000000000 NY"); + private final DBDateTime time2 = DBTimeUtils.convertDateTime("2002-01-21T01:00:00.000000000 NY"); private final String date1 = "2017-08-01"; private final String date2 = "2017-08-05"; @@ -45,19 +43,13 @@ public void testCalendarMethods() { assertEquals(calendar.nextDay(date2), StaticCalendarMethods.nextDay(date2)); assertEquals(calendar.nextDay(date2, 14), StaticCalendarMethods.nextDay(date2, 14)); - assertEquals(calendar.daysInRange(time1, time2), - StaticCalendarMethods.daysInRange(time1, time2)); - assertEquals(calendar.daysInRange(date1, date2), - StaticCalendarMethods.daysInRange(date1, date2)); + assertEquals(calendar.daysInRange(time1, time2), StaticCalendarMethods.daysInRange(time1, time2)); + assertEquals(calendar.daysInRange(date1, date2), StaticCalendarMethods.daysInRange(date1, date2)); - assertEquals(calendar.numberOfDays(time1, time2), - StaticCalendarMethods.numberOfDays(time1, time2)); - assertEquals(calendar.numberOfDays(time1, time2, true), - StaticCalendarMethods.numberOfDays(time1, time2, true)); - assertEquals(calendar.numberOfDays(date1, date2), - StaticCalendarMethods.numberOfDays(date1, date2)); - assertEquals(calendar.numberOfDays(date1, date2, true), - StaticCalendarMethods.numberOfDays(date1, date2, true)); + assertEquals(calendar.numberOfDays(time1, time2), StaticCalendarMethods.numberOfDays(time1, time2)); + assertEquals(calendar.numberOfDays(time1, time2, true), StaticCalendarMethods.numberOfDays(time1, time2, true)); + assertEquals(calendar.numberOfDays(date1, date2), StaticCalendarMethods.numberOfDays(date1, date2)); + assertEquals(calendar.numberOfDays(date1, date2, true), StaticCalendarMethods.numberOfDays(date1, date2, true)); assertEquals(calendar.dayOfWeek(), StaticCalendarMethods.dayOfWeek()); @@ -80,74 +72,63 @@ public void testCalendarMethodsTable() { assertEquals(calendar.name(), getVal(emptyTable(1).update("Name = name()"), "Name")); - assertEquals(calendar.currentDay(), - getVal(emptyTable(1).update("currentDay = currentDay()"), "currentDay")); + assertEquals(calendar.currentDay(), getVal(emptyTable(1).update("currentDay = currentDay()"), "currentDay")); assertEquals(calendar.previousDay(), - getVal(emptyTable(1).update("previousDay = previousDay()"), "previousDay")); + getVal(emptyTable(1).update("previousDay = previousDay()"), "previousDay")); assertEquals(calendar.previousDay(4), - getVal(emptyTable(1).update("previousDay = previousDay(4)"), "previousDay")); + getVal(emptyTable(1).update("previousDay = previousDay(4)"), "previousDay")); assertEquals(calendar.previousDay(time1), - getVal(emptyTable(1).update("previousDay = previousDay(time1)"), "previousDay")); + getVal(emptyTable(1).update("previousDay = previousDay(time1)"), "previousDay")); assertEquals(calendar.previousDay(time1, 4), - getVal(emptyTable(1).update("previousDay = previousDay(time1, 4)"), "previousDay")); + getVal(emptyTable(1).update("previousDay = previousDay(time1, 4)"), "previousDay")); assertEquals(calendar.previousDay(date1), - getVal(emptyTable(1).update("previousDay = previousDay(date1)"), "previousDay")); + getVal(emptyTable(1).update("previousDay = previousDay(date1)"), "previousDay")); assertEquals(calendar.previousDay(date1, 14), - getVal(emptyTable(1).update("previousDay = previousDay(date1, 14)"), "previousDay")); + getVal(emptyTable(1).update("previousDay = previousDay(date1, 14)"), "previousDay")); - assertEquals(calendar.nextDay(), - getVal(emptyTable(1).update("nextDay = nextDay()"), "nextDay")); - assertEquals(calendar.nextDay(4), - getVal(emptyTable(1).update("nextDay = nextDay(4)"), "nextDay")); - assertEquals(calendar.nextDay(time1), - getVal(emptyTable(1).update("nextDay = nextDay(time1)"), "nextDay")); + assertEquals(calendar.nextDay(), getVal(emptyTable(1).update("nextDay = nextDay()"), "nextDay")); + assertEquals(calendar.nextDay(4), getVal(emptyTable(1).update("nextDay = nextDay(4)"), "nextDay")); + assertEquals(calendar.nextDay(time1), getVal(emptyTable(1).update("nextDay = nextDay(time1)"), "nextDay")); assertEquals(calendar.nextDay(time1, 4), - getVal(emptyTable(1).update("nextDay = nextDay(time1, 4)"), "nextDay")); - assertEquals(calendar.nextDay(date1), - getVal(emptyTable(1).update("nextDay = nextDay(date1)"), "nextDay")); + getVal(emptyTable(1).update("nextDay = nextDay(time1, 4)"), "nextDay")); + assertEquals(calendar.nextDay(date1), getVal(emptyTable(1).update("nextDay = nextDay(date1)"), "nextDay")); assertEquals(calendar.nextDay(date1, 14), - getVal(emptyTable(1).update("nextDay = nextDay(date1, 14)"), "nextDay")); + getVal(emptyTable(1).update("nextDay = nextDay(date1, 14)"), "nextDay")); assertEquals(calendar.daysInRange(time1, time2), - (String[]) getVal(emptyTable(1).update("daysInRange = daysInRange(time1, time2)"), - "daysInRange")); + (String[]) getVal(emptyTable(1).update("daysInRange = daysInRange(time1, time2)"), "daysInRange")); assertEquals(calendar.daysInRange(date1, date2), - (String[]) getVal(emptyTable(1).update("daysInRange = daysInRange(date1, date2)"), - "daysInRange")); + (String[]) getVal(emptyTable(1).update("daysInRange = daysInRange(date1, date2)"), "daysInRange")); - assertEquals(calendar.numberOfDays(time1, time2), getVal( - emptyTable(1).update("numberOfDays = numberOfDays(time1, time2)"), "numberOfDays")); + assertEquals(calendar.numberOfDays(time1, time2), + getVal(emptyTable(1).update("numberOfDays = numberOfDays(time1, time2)"), "numberOfDays")); assertEquals(calendar.numberOfDays(time1, time2, true), - getVal(emptyTable(1).update("numberOfDays = numberOfDays(time1, time2, true)"), - "numberOfDays")); - assertEquals(calendar.numberOfDays(date1, date2), getVal( - emptyTable(1).update("numberOfDays = numberOfDays(date1, date2)"), "numberOfDays")); + getVal(emptyTable(1).update("numberOfDays = numberOfDays(time1, time2, true)"), "numberOfDays")); + assertEquals(calendar.numberOfDays(date1, date2), + getVal(emptyTable(1).update("numberOfDays = numberOfDays(date1, date2)"), "numberOfDays")); assertEquals(calendar.numberOfDays(date1, date2, true), - getVal(emptyTable(1).update("numberOfDays = numberOfDays(date1, date2, true)"), - "numberOfDays")); + getVal(emptyTable(1).update("numberOfDays = numberOfDays(date1, date2, true)"), "numberOfDays")); - assertEquals(calendar.dayOfWeek(), - getVal(emptyTable(1).update("dayOfWeek = dayOfWeek()"), "dayOfWeek")); + assertEquals(calendar.dayOfWeek(), getVal(emptyTable(1).update("dayOfWeek = dayOfWeek()"), "dayOfWeek")); assertEquals(calendar.dayOfWeek(time2), - getVal(emptyTable(1).update("dayOfWeek = dayOfWeek(time2)"), "dayOfWeek")); + getVal(emptyTable(1).update("dayOfWeek = dayOfWeek(time2)"), "dayOfWeek")); assertEquals(calendar.dayOfWeek(date2), - getVal(emptyTable(1).update("dayOfWeek = dayOfWeek(date2)"), "dayOfWeek")); + getVal(emptyTable(1).update("dayOfWeek = dayOfWeek(date2)"), "dayOfWeek")); - assertEquals(calendar.timeZone(), - getVal(emptyTable(1).update("timeZone = timeZone()"), "timeZone")); + assertEquals(calendar.timeZone(), getVal(emptyTable(1).update("timeZone = timeZone()"), "timeZone")); assertEquals(calendar.isBusinessDay(), - getVal(emptyTable(1).update("isBusinessDay = isBusinessDay()"), "isBusinessDay")); + getVal(emptyTable(1).update("isBusinessDay = isBusinessDay()"), "isBusinessDay")); assertEquals(calendar.isBusinessDay(time2), - getVal(emptyTable(1).update("isBusinessDay = isBusinessDay(time2)"), "isBusinessDay")); + getVal(emptyTable(1).update("isBusinessDay = isBusinessDay(time2)"), "isBusinessDay")); assertEquals(calendar.isBusinessDay(date2), - getVal(emptyTable(1).update("isBusinessDay = isBusinessDay(date2)"), "isBusinessDay")); + getVal(emptyTable(1).update("isBusinessDay = isBusinessDay(date2)"), "isBusinessDay")); } public void testBusinessCalendarMethods() { @@ -159,8 +140,7 @@ public void testBusinessCalendarMethods() { assertEquals(calendar.isBusinessDay(), StaticCalendarMethods.isBusinessDay()); assertEquals(calendar.isBusinessDay(time1), StaticCalendarMethods.isBusinessDay(time1)); assertEquals(calendar.isBusinessDay(date1), StaticCalendarMethods.isBusinessDay(date1)); - assertEquals(calendar.isBusinessDay(LocalDate.now()), - StaticCalendarMethods.isBusinessDay(LocalDate.now())); + assertEquals(calendar.isBusinessDay(LocalDate.now()), StaticCalendarMethods.isBusinessDay(LocalDate.now())); assertEquals(calendar.isBusinessTime(time1), StaticCalendarMethods.isBusinessTime(time1)); @@ -168,164 +148,125 @@ public void testBusinessCalendarMethods() { assertEquals(calendar.previousBusinessDay(), StaticCalendarMethods.previousBusinessDay()); - assertEquals(calendar.previousBusinessDay(12), - StaticCalendarMethods.previousBusinessDay(12)); - assertEquals(calendar.previousBusinessDay(time1), - StaticCalendarMethods.previousBusinessDay(time1)); - assertEquals(calendar.previousBusinessDay(time1, 6), - StaticCalendarMethods.previousBusinessDay(time1, 6)); - assertEquals(calendar.previousBusinessDay(date1), - StaticCalendarMethods.previousBusinessDay(date1)); - assertEquals(calendar.previousBusinessDay(date1, 16), - StaticCalendarMethods.previousBusinessDay(date1, 16)); + assertEquals(calendar.previousBusinessDay(12), StaticCalendarMethods.previousBusinessDay(12)); + assertEquals(calendar.previousBusinessDay(time1), StaticCalendarMethods.previousBusinessDay(time1)); + assertEquals(calendar.previousBusinessDay(time1, 6), StaticCalendarMethods.previousBusinessDay(time1, 6)); + assertEquals(calendar.previousBusinessDay(date1), StaticCalendarMethods.previousBusinessDay(date1)); + assertEquals(calendar.previousBusinessDay(date1, 16), StaticCalendarMethods.previousBusinessDay(date1, 16)); - assertEquals(calendar.previousBusinessSchedule(), - StaticCalendarMethods.previousBusinessSchedule()); - assertEquals(calendar.previousBusinessSchedule(12), - StaticCalendarMethods.previousBusinessSchedule(12)); - assertEquals(calendar.previousBusinessSchedule(time1), - StaticCalendarMethods.previousBusinessSchedule(time1)); + assertEquals(calendar.previousBusinessSchedule(), StaticCalendarMethods.previousBusinessSchedule()); + assertEquals(calendar.previousBusinessSchedule(12), StaticCalendarMethods.previousBusinessSchedule(12)); + assertEquals(calendar.previousBusinessSchedule(time1), StaticCalendarMethods.previousBusinessSchedule(time1)); assertEquals(calendar.previousBusinessSchedule(time1, 6), - StaticCalendarMethods.previousBusinessSchedule(time1, 6)); - assertEquals(calendar.previousBusinessSchedule(date1), - StaticCalendarMethods.previousBusinessSchedule(date1)); + StaticCalendarMethods.previousBusinessSchedule(time1, 6)); + assertEquals(calendar.previousBusinessSchedule(date1), StaticCalendarMethods.previousBusinessSchedule(date1)); assertEquals(calendar.previousBusinessSchedule(date1, 16), - StaticCalendarMethods.previousBusinessSchedule(date1, 16)); + StaticCalendarMethods.previousBusinessSchedule(date1, 16)); - assertEquals(calendar.previousNonBusinessDay(), - StaticCalendarMethods.previousNonBusinessDay()); - assertEquals(calendar.previousNonBusinessDay(12), - StaticCalendarMethods.previousNonBusinessDay(12)); - assertEquals(calendar.previousNonBusinessDay(time1), - StaticCalendarMethods.previousNonBusinessDay(time1)); - assertEquals(calendar.previousNonBusinessDay(time1, 6), - StaticCalendarMethods.previousNonBusinessDay(time1, 6)); - assertEquals(calendar.previousNonBusinessDay(date1), - StaticCalendarMethods.previousNonBusinessDay(date1)); + assertEquals(calendar.previousNonBusinessDay(), StaticCalendarMethods.previousNonBusinessDay()); + assertEquals(calendar.previousNonBusinessDay(12), StaticCalendarMethods.previousNonBusinessDay(12)); + assertEquals(calendar.previousNonBusinessDay(time1), StaticCalendarMethods.previousNonBusinessDay(time1)); + assertEquals(calendar.previousNonBusinessDay(time1, 6), StaticCalendarMethods.previousNonBusinessDay(time1, 6)); + assertEquals(calendar.previousNonBusinessDay(date1), StaticCalendarMethods.previousNonBusinessDay(date1)); assertEquals(calendar.previousNonBusinessDay(date1, 16), - StaticCalendarMethods.previousNonBusinessDay(date1, 16)); + StaticCalendarMethods.previousNonBusinessDay(date1, 16)); assertEquals(calendar.nextBusinessDay(), StaticCalendarMethods.nextBusinessDay()); assertEquals(calendar.nextBusinessDay(12), StaticCalendarMethods.nextBusinessDay(12)); assertEquals(calendar.nextBusinessDay(time1), StaticCalendarMethods.nextBusinessDay(time1)); - assertEquals(calendar.nextBusinessDay(time1, 6), - StaticCalendarMethods.nextBusinessDay(time1, 6)); + assertEquals(calendar.nextBusinessDay(time1, 6), StaticCalendarMethods.nextBusinessDay(time1, 6)); assertEquals(calendar.nextBusinessDay(date1), StaticCalendarMethods.nextBusinessDay(date1)); - assertEquals(calendar.nextBusinessDay(date1, 16), - StaticCalendarMethods.nextBusinessDay(date1, 16)); + assertEquals(calendar.nextBusinessDay(date1, 16), StaticCalendarMethods.nextBusinessDay(date1, 16)); assertEquals(calendar.nextBusinessSchedule(), StaticCalendarMethods.nextBusinessSchedule()); - assertEquals(calendar.nextBusinessSchedule(12), - StaticCalendarMethods.nextBusinessSchedule(12)); - assertEquals(calendar.nextBusinessSchedule(time1), - StaticCalendarMethods.nextBusinessSchedule(time1)); - assertEquals(calendar.nextBusinessSchedule(time1, 6), - StaticCalendarMethods.nextBusinessSchedule(time1, 6)); - assertEquals(calendar.nextBusinessSchedule(date1), - StaticCalendarMethods.nextBusinessSchedule(date1)); - assertEquals(calendar.nextBusinessSchedule(date1, 16), - StaticCalendarMethods.nextBusinessSchedule(date1, 16)); + assertEquals(calendar.nextBusinessSchedule(12), StaticCalendarMethods.nextBusinessSchedule(12)); + assertEquals(calendar.nextBusinessSchedule(time1), StaticCalendarMethods.nextBusinessSchedule(time1)); + assertEquals(calendar.nextBusinessSchedule(time1, 6), StaticCalendarMethods.nextBusinessSchedule(time1, 6)); + assertEquals(calendar.nextBusinessSchedule(date1), StaticCalendarMethods.nextBusinessSchedule(date1)); + assertEquals(calendar.nextBusinessSchedule(date1, 16), StaticCalendarMethods.nextBusinessSchedule(date1, 16)); assertEquals(calendar.nextNonBusinessDay(), StaticCalendarMethods.nextNonBusinessDay()); assertEquals(calendar.nextNonBusinessDay(12), StaticCalendarMethods.nextNonBusinessDay(12)); - assertEquals(calendar.nextNonBusinessDay(time1), - StaticCalendarMethods.nextNonBusinessDay(time1)); - assertEquals(calendar.nextNonBusinessDay(time1, 6), - StaticCalendarMethods.nextNonBusinessDay(time1, 6)); - assertEquals(calendar.nextNonBusinessDay(date1), - StaticCalendarMethods.nextNonBusinessDay(date1)); - assertEquals(calendar.nextNonBusinessDay(date1, 16), - StaticCalendarMethods.nextNonBusinessDay(date1, 16)); + assertEquals(calendar.nextNonBusinessDay(time1), StaticCalendarMethods.nextNonBusinessDay(time1)); + assertEquals(calendar.nextNonBusinessDay(time1, 6), StaticCalendarMethods.nextNonBusinessDay(time1, 6)); + assertEquals(calendar.nextNonBusinessDay(date1), StaticCalendarMethods.nextNonBusinessDay(date1)); + assertEquals(calendar.nextNonBusinessDay(date1, 16), StaticCalendarMethods.nextNonBusinessDay(date1, 16)); assertEquals(calendar.businessDaysInRange(time1, time2), - StaticCalendarMethods.businessDaysInRange(time1, time2)); + StaticCalendarMethods.businessDaysInRange(time1, time2)); assertEquals(calendar.businessDaysInRange(date1, date2), - StaticCalendarMethods.businessDaysInRange(date1, date2)); + StaticCalendarMethods.businessDaysInRange(date1, date2)); assertEquals(calendar.nonBusinessDaysInRange(time1, time2), - StaticCalendarMethods.nonBusinessDaysInRange(time1, time2)); + StaticCalendarMethods.nonBusinessDaysInRange(time1, time2)); assertEquals(calendar.nonBusinessDaysInRange(date1, date2), - StaticCalendarMethods.nonBusinessDaysInRange(date1, date2)); + StaticCalendarMethods.nonBusinessDaysInRange(date1, date2)); - assertEquals(calendar.standardBusinessDayLengthNanos(), - StaticCalendarMethods.standardBusinessDayLengthNanos()); + assertEquals(calendar.standardBusinessDayLengthNanos(), StaticCalendarMethods.standardBusinessDayLengthNanos()); - assertEquals(calendar.diffBusinessNanos(time1, time2), - StaticCalendarMethods.diffBusinessNanos(time1, time2)); + assertEquals(calendar.diffBusinessNanos(time1, time2), StaticCalendarMethods.diffBusinessNanos(time1, time2)); assertEquals(calendar.diffNonBusinessNanos(time1, time2), - StaticCalendarMethods.diffNonBusinessNanos(time1, time2)); - assertEquals(calendar.diffBusinessDay(time1, time2), - StaticCalendarMethods.diffBusinessDay(time1, time2)); - assertEquals(calendar.diffNonBusinessDay(time1, time2), - StaticCalendarMethods.diffNonBusinessDay(time1, time2)); - assertEquals(calendar.diffBusinessYear(time1, time2), - StaticCalendarMethods.diffBusinessYear(time1, time2)); + StaticCalendarMethods.diffNonBusinessNanos(time1, time2)); + assertEquals(calendar.diffBusinessDay(time1, time2), StaticCalendarMethods.diffBusinessDay(time1, time2)); + assertEquals(calendar.diffNonBusinessDay(time1, time2), StaticCalendarMethods.diffNonBusinessDay(time1, time2)); + assertEquals(calendar.diffBusinessYear(time1, time2), StaticCalendarMethods.diffBusinessYear(time1, time2)); assertEquals(calendar.numberOfBusinessDays(time1, time2), - StaticCalendarMethods.numberOfBusinessDays(time1, time2)); + StaticCalendarMethods.numberOfBusinessDays(time1, time2)); assertEquals(calendar.numberOfBusinessDays(time1, time2, true), - StaticCalendarMethods.numberOfBusinessDays(time1, time2, true)); + StaticCalendarMethods.numberOfBusinessDays(time1, time2, true)); assertEquals(calendar.numberOfBusinessDays(date1, date2), - StaticCalendarMethods.numberOfBusinessDays(date1, date2)); + StaticCalendarMethods.numberOfBusinessDays(date1, date2)); assertEquals(calendar.numberOfBusinessDays(date1, date2, true), - StaticCalendarMethods.numberOfBusinessDays(date1, date2, true)); + StaticCalendarMethods.numberOfBusinessDays(date1, date2, true)); assertEquals(calendar.numberOfNonBusinessDays(time1, time2), - StaticCalendarMethods.numberOfNonBusinessDays(time1, time2)); + StaticCalendarMethods.numberOfNonBusinessDays(time1, time2)); assertEquals(calendar.numberOfNonBusinessDays(time1, time2, true), - StaticCalendarMethods.numberOfNonBusinessDays(time1, time2, true)); + StaticCalendarMethods.numberOfNonBusinessDays(time1, time2, true)); assertEquals(calendar.numberOfNonBusinessDays(date1, date2), - StaticCalendarMethods.numberOfNonBusinessDays(date1, date2)); + StaticCalendarMethods.numberOfNonBusinessDays(date1, date2)); assertEquals(calendar.numberOfNonBusinessDays(date1, date2, true), - StaticCalendarMethods.numberOfNonBusinessDays(date1, date2, true)); + StaticCalendarMethods.numberOfNonBusinessDays(date1, date2, true)); - assertEquals(calendar.fractionOfStandardBusinessDay(), - StaticCalendarMethods.fractionOfStandardBusinessDay()); + assertEquals(calendar.fractionOfStandardBusinessDay(), StaticCalendarMethods.fractionOfStandardBusinessDay()); assertEquals(calendar.fractionOfStandardBusinessDay(time1), - StaticCalendarMethods.fractionOfStandardBusinessDay(time1)); + StaticCalendarMethods.fractionOfStandardBusinessDay(time1)); assertEquals(calendar.fractionOfStandardBusinessDay(date1), - StaticCalendarMethods.fractionOfStandardBusinessDay(date1)); + StaticCalendarMethods.fractionOfStandardBusinessDay(date1)); assertEquals(calendar.fractionOfBusinessDayRemaining(time1), - StaticCalendarMethods.fractionOfBusinessDayRemaining(time1)); + StaticCalendarMethods.fractionOfBusinessDayRemaining(time1)); assertEquals(calendar.fractionOfBusinessDayComplete(time1), - StaticCalendarMethods.fractionOfBusinessDayComplete(time1)); + StaticCalendarMethods.fractionOfBusinessDayComplete(time1)); - assertEquals(calendar.isLastBusinessDayOfMonth(), - StaticCalendarMethods.isLastBusinessDayOfMonth()); - assertEquals(calendar.isLastBusinessDayOfMonth(time1), - StaticCalendarMethods.isLastBusinessDayOfMonth(time1)); - assertEquals(calendar.isLastBusinessDayOfMonth(date1), - StaticCalendarMethods.isLastBusinessDayOfMonth(date1)); + assertEquals(calendar.isLastBusinessDayOfMonth(), StaticCalendarMethods.isLastBusinessDayOfMonth()); + assertEquals(calendar.isLastBusinessDayOfMonth(time1), StaticCalendarMethods.isLastBusinessDayOfMonth(time1)); + assertEquals(calendar.isLastBusinessDayOfMonth(date1), StaticCalendarMethods.isLastBusinessDayOfMonth(date1)); - assertEquals(calendar.isLastBusinessDayOfWeek(), - StaticCalendarMethods.isLastBusinessDayOfWeek()); - assertEquals(calendar.isLastBusinessDayOfWeek(time1), - StaticCalendarMethods.isLastBusinessDayOfWeek(time1)); - assertEquals(calendar.isLastBusinessDayOfWeek(date1), - StaticCalendarMethods.isLastBusinessDayOfWeek(date1)); + assertEquals(calendar.isLastBusinessDayOfWeek(), StaticCalendarMethods.isLastBusinessDayOfWeek()); + assertEquals(calendar.isLastBusinessDayOfWeek(time1), StaticCalendarMethods.isLastBusinessDayOfWeek(time1)); + assertEquals(calendar.isLastBusinessDayOfWeek(date1), StaticCalendarMethods.isLastBusinessDayOfWeek(date1)); - assertEquals(calendar.getBusinessSchedule(time1), - StaticCalendarMethods.getBusinessSchedule(time1)); - assertEquals(calendar.getBusinessSchedule(date1), - StaticCalendarMethods.getBusinessSchedule(date1)); + assertEquals(calendar.getBusinessSchedule(time1), StaticCalendarMethods.getBusinessSchedule(time1)); + assertEquals(calendar.getBusinessSchedule(date1), StaticCalendarMethods.getBusinessSchedule(date1)); assertEquals(calendar.getBusinessSchedule(LocalDate.now()), - StaticCalendarMethods.getBusinessSchedule(LocalDate.now())); + StaticCalendarMethods.getBusinessSchedule(LocalDate.now())); } public void testBusinessCalendarMethodsTable() { @@ -342,297 +283,226 @@ public void testBusinessCalendarMethodsTable() { assertEquals(calendar.isBusinessDay(), - getVal(emptyTable(1).update("isBusinessDay = isBusinessDay()"), "isBusinessDay")); + getVal(emptyTable(1).update("isBusinessDay = isBusinessDay()"), "isBusinessDay")); assertEquals(calendar.isBusinessDay(time2), - getVal(emptyTable(1).update("isBusinessDay = isBusinessDay(time2)"), "isBusinessDay")); + getVal(emptyTable(1).update("isBusinessDay = isBusinessDay(time2)"), "isBusinessDay")); assertEquals(calendar.isBusinessDay(date2), - getVal(emptyTable(1).update("isBusinessDay = isBusinessDay(date2)"), "isBusinessDay")); - assertEquals(calendar.isBusinessDay(localDate), getVal( - emptyTable(1).update("isBusinessDay = isBusinessDay(localDate)"), "isBusinessDay")); + getVal(emptyTable(1).update("isBusinessDay = isBusinessDay(date2)"), "isBusinessDay")); + assertEquals(calendar.isBusinessDay(localDate), + getVal(emptyTable(1).update("isBusinessDay = isBusinessDay(localDate)"), "isBusinessDay")); - assertEquals(calendar.isBusinessTime(time1), getVal( - emptyTable(1).update("isBusinessTime = isBusinessTime(time1)"), "isBusinessTime")); - assertEquals(calendar.isBusinessTime(time2), getVal( - emptyTable(1).update("isBusinessTime = isBusinessTime(time2)"), "isBusinessTime")); + assertEquals(calendar.isBusinessTime(time1), + getVal(emptyTable(1).update("isBusinessTime = isBusinessTime(time1)"), "isBusinessTime")); + assertEquals(calendar.isBusinessTime(time2), + getVal(emptyTable(1).update("isBusinessTime = isBusinessTime(time2)"), "isBusinessTime")); assertEquals(calendar.previousBusinessDay(), - getVal(emptyTable(1).update("previousBusinessDay = previousBusinessDay()"), - "previousBusinessDay")); + getVal(emptyTable(1).update("previousBusinessDay = previousBusinessDay()"), "previousBusinessDay")); assertEquals(calendar.previousBusinessDay(12), - getVal(emptyTable(1).update("previousBusinessDay = previousBusinessDay(12)"), - "previousBusinessDay")); - assertEquals(calendar.previousBusinessDay(time1), - getVal(emptyTable(1).update("previousBusinessDay = previousBusinessDay(time1)"), - "previousBusinessDay")); - assertEquals(calendar.previousBusinessDay(time1, 6), - getVal(emptyTable(1).update("previousBusinessDay = previousBusinessDay(time1, 6)"), - "previousBusinessDay")); - assertEquals(calendar.previousBusinessDay(date1), - getVal(emptyTable(1).update("previousBusinessDay = previousBusinessDay(date1)"), - "previousBusinessDay")); - assertEquals(calendar.previousBusinessDay(date1, 16), - getVal(emptyTable(1).update("previousBusinessDay = previousBusinessDay(date1, 16)"), - "previousBusinessDay")); + getVal(emptyTable(1).update("previousBusinessDay = previousBusinessDay(12)"), "previousBusinessDay")); + assertEquals(calendar.previousBusinessDay(time1), getVal( + emptyTable(1).update("previousBusinessDay = previousBusinessDay(time1)"), "previousBusinessDay")); + assertEquals(calendar.previousBusinessDay(time1, 6), getVal( + emptyTable(1).update("previousBusinessDay = previousBusinessDay(time1, 6)"), "previousBusinessDay")); + assertEquals(calendar.previousBusinessDay(date1), getVal( + emptyTable(1).update("previousBusinessDay = previousBusinessDay(date1)"), "previousBusinessDay")); + assertEquals(calendar.previousBusinessDay(date1, 16), getVal( + emptyTable(1).update("previousBusinessDay = previousBusinessDay(date1, 16)"), "previousBusinessDay")); assertEquals(calendar.previousBusinessSchedule(), - getVal(emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule()"), - "previousBusinessSchedule")); + getVal(emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule()"), + "previousBusinessSchedule")); assertEquals(calendar.previousBusinessSchedule(12), - getVal(emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule(12)"), - "previousBusinessSchedule")); + getVal(emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule(12)"), + "previousBusinessSchedule")); assertEquals(calendar.previousBusinessSchedule(time1), - getVal( - emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule(time1)"), - "previousBusinessSchedule")); + getVal(emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule(time1)"), + "previousBusinessSchedule")); assertEquals(calendar.previousBusinessSchedule(time1, 6), - getVal( - emptyTable(1) - .update("previousBusinessSchedule = previousBusinessSchedule(time1, 6)"), - "previousBusinessSchedule")); + getVal(emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule(time1, 6)"), + "previousBusinessSchedule")); assertEquals(calendar.previousBusinessSchedule(date1), - getVal( - emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule(date1)"), - "previousBusinessSchedule")); + getVal(emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule(date1)"), + "previousBusinessSchedule")); assertEquals(calendar.previousBusinessSchedule(date1, 16), - getVal( - emptyTable(1) - .update("previousBusinessSchedule = previousBusinessSchedule(date1, 16)"), - "previousBusinessSchedule")); - - - assertEquals(calendar.previousNonBusinessDay(), - getVal(emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay()"), - "previousNonBusinessDay")); - assertEquals(calendar.previousNonBusinessDay(12), - getVal(emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(12)"), - "previousNonBusinessDay")); + getVal(emptyTable(1).update("previousBusinessSchedule = previousBusinessSchedule(date1, 16)"), + "previousBusinessSchedule")); + + + assertEquals(calendar.previousNonBusinessDay(), getVal( + emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay()"), "previousNonBusinessDay")); + assertEquals(calendar.previousNonBusinessDay(12), getVal( + emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(12)"), "previousNonBusinessDay")); assertEquals(calendar.previousNonBusinessDay(time1), - getVal(emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(time1)"), - "previousNonBusinessDay")); + getVal(emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(time1)"), + "previousNonBusinessDay")); assertEquals(calendar.previousNonBusinessDay(time1, 6), - getVal( - emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(time1, 6)"), - "previousNonBusinessDay")); + getVal(emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(time1, 6)"), + "previousNonBusinessDay")); assertEquals(calendar.previousNonBusinessDay(date1), - getVal(emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(date1)"), - "previousNonBusinessDay")); + getVal(emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(date1)"), + "previousNonBusinessDay")); assertEquals(calendar.previousNonBusinessDay(date1, 16), - getVal( - emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(date1, 16)"), - "previousNonBusinessDay")); + getVal(emptyTable(1).update("previousNonBusinessDay = previousNonBusinessDay(date1, 16)"), + "previousNonBusinessDay")); assertEquals(calendar.nextBusinessDay(), - getVal(emptyTable(1).update("nextBusinessDay = nextBusinessDay()"), "nextBusinessDay")); - assertEquals(calendar.nextBusinessDay(12), getVal( - emptyTable(1).update("nextBusinessDay = nextBusinessDay(12)"), "nextBusinessDay")); - assertEquals(calendar.nextBusinessDay(time1), getVal( - emptyTable(1).update("nextBusinessDay = nextBusinessDay(time1)"), "nextBusinessDay")); + getVal(emptyTable(1).update("nextBusinessDay = nextBusinessDay()"), "nextBusinessDay")); + assertEquals(calendar.nextBusinessDay(12), + getVal(emptyTable(1).update("nextBusinessDay = nextBusinessDay(12)"), "nextBusinessDay")); + assertEquals(calendar.nextBusinessDay(time1), + getVal(emptyTable(1).update("nextBusinessDay = nextBusinessDay(time1)"), "nextBusinessDay")); assertEquals(calendar.nextBusinessDay(time1, 6), - getVal(emptyTable(1).update("nextBusinessDay = nextBusinessDay(time1, 6)"), - "nextBusinessDay")); - assertEquals(calendar.nextBusinessDay(date1), getVal( - emptyTable(1).update("nextBusinessDay = nextBusinessDay(date1)"), "nextBusinessDay")); + getVal(emptyTable(1).update("nextBusinessDay = nextBusinessDay(time1, 6)"), "nextBusinessDay")); + assertEquals(calendar.nextBusinessDay(date1), + getVal(emptyTable(1).update("nextBusinessDay = nextBusinessDay(date1)"), "nextBusinessDay")); assertEquals(calendar.nextBusinessDay(date1, 16), - getVal(emptyTable(1).update("nextBusinessDay = nextBusinessDay(date1, 16)"), - "nextBusinessDay")); + getVal(emptyTable(1).update("nextBusinessDay = nextBusinessDay(date1, 16)"), "nextBusinessDay")); assertEquals(calendar.nextBusinessSchedule(), - getVal(emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule()"), - "nextBusinessSchedule")); - assertEquals(calendar.nextBusinessSchedule(12), - getVal(emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(12)"), - "nextBusinessSchedule")); - assertEquals(calendar.nextBusinessSchedule(time1), - getVal(emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(time1)"), - "nextBusinessSchedule")); - assertEquals(calendar.nextBusinessSchedule(time1, 6), - getVal(emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(time1, 6)"), - "nextBusinessSchedule")); - assertEquals(calendar.nextBusinessSchedule(date1), - getVal(emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(date1)"), - "nextBusinessSchedule")); + getVal(emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule()"), "nextBusinessSchedule")); + assertEquals(calendar.nextBusinessSchedule(12), getVal( + emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(12)"), "nextBusinessSchedule")); + assertEquals(calendar.nextBusinessSchedule(time1), getVal( + emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(time1)"), "nextBusinessSchedule")); + assertEquals(calendar.nextBusinessSchedule(time1, 6), getVal( + emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(time1, 6)"), "nextBusinessSchedule")); + assertEquals(calendar.nextBusinessSchedule(date1), getVal( + emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(date1)"), "nextBusinessSchedule")); assertEquals(calendar.nextBusinessSchedule(date1, 16), - getVal(emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(date1, 16)"), - "nextBusinessSchedule")); + getVal(emptyTable(1).update("nextBusinessSchedule = nextBusinessSchedule(date1, 16)"), + "nextBusinessSchedule")); assertEquals(calendar.nextNonBusinessDay(), - getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay()"), - "nextNonBusinessDay")); + getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay()"), "nextNonBusinessDay")); assertEquals(calendar.nextNonBusinessDay(12), - getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(12)"), - "nextNonBusinessDay")); + getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(12)"), "nextNonBusinessDay")); assertEquals(calendar.nextNonBusinessDay(time1), - getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(time1)"), - "nextNonBusinessDay")); - assertEquals(calendar.nextNonBusinessDay(time1, 6), - getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(time1, 6)"), - "nextNonBusinessDay")); + getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(time1)"), "nextNonBusinessDay")); + assertEquals(calendar.nextNonBusinessDay(time1, 6), getVal( + emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(time1, 6)"), "nextNonBusinessDay")); assertEquals(calendar.nextNonBusinessDay(date1), - getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(date1)"), - "nextNonBusinessDay")); - assertEquals(calendar.nextNonBusinessDay(date1, 16), - getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(date1, 16)"), - "nextNonBusinessDay")); + getVal(emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(date1)"), "nextNonBusinessDay")); + assertEquals(calendar.nextNonBusinessDay(date1, 16), getVal( + emptyTable(1).update("nextNonBusinessDay = nextNonBusinessDay(date1, 16)"), "nextNonBusinessDay")); assertEquals(calendar.businessDaysInRange(time1, time2), - (String[]) getVal( - emptyTable(1).update("businessDaysInRange = businessDaysInRange(time1, time2)"), - "businessDaysInRange")); + (String[]) getVal(emptyTable(1).update("businessDaysInRange = businessDaysInRange(time1, time2)"), + "businessDaysInRange")); assertEquals(calendar.businessDaysInRange(date1, date2), - (String[]) getVal( - emptyTable(1).update("businessDaysInRange = businessDaysInRange(date1, date2)"), - "businessDaysInRange")); + (String[]) getVal(emptyTable(1).update("businessDaysInRange = businessDaysInRange(date1, date2)"), + "businessDaysInRange")); assertEquals(calendar.nonBusinessDaysInRange(time1, time2), - (String[]) getVal( - emptyTable(1) - .update("nonBusinessDaysInRange = nonBusinessDaysInRange(time1, time2)"), - "nonBusinessDaysInRange")); + (String[]) getVal(emptyTable(1).update("nonBusinessDaysInRange = nonBusinessDaysInRange(time1, time2)"), + "nonBusinessDaysInRange")); assertEquals(calendar.nonBusinessDaysInRange(date1, date2), - (String[]) getVal( - emptyTable(1) - .update("nonBusinessDaysInRange = nonBusinessDaysInRange(date1, date2)"), - "nonBusinessDaysInRange")); + (String[]) getVal(emptyTable(1).update("nonBusinessDaysInRange = nonBusinessDaysInRange(date1, date2)"), + "nonBusinessDaysInRange")); assertEquals(calendar.standardBusinessDayLengthNanos(), - getVal( - emptyTable(1) - .update("standardBusinessDayLengthNanos = standardBusinessDayLengthNanos()"), - "standardBusinessDayLengthNanos")); + getVal(emptyTable(1).update("standardBusinessDayLengthNanos = standardBusinessDayLengthNanos()"), + "standardBusinessDayLengthNanos")); - assertEquals(calendar.diffBusinessNanos(time1, time2), - getVal(emptyTable(1).update("diffBusinessNanos = diffBusinessNanos(time1, time2)"), - "diffBusinessNanos")); + assertEquals(calendar.diffBusinessNanos(time1, time2), getVal( + emptyTable(1).update("diffBusinessNanos = diffBusinessNanos(time1, time2)"), "diffBusinessNanos")); assertEquals(calendar.diffNonBusinessNanos(time1, time2), - getVal( - emptyTable(1).update("diffNonBusinessNanos = diffNonBusinessNanos(time1, time2)"), - "diffNonBusinessNanos")); + getVal(emptyTable(1).update("diffNonBusinessNanos = diffNonBusinessNanos(time1, time2)"), + "diffNonBusinessNanos")); assertEquals(calendar.diffBusinessDay(time1, time2), - getVal(emptyTable(1).update("diffBusinessDay = diffBusinessDay(time1, time2)"), - "diffBusinessDay")); - assertEquals(calendar.diffNonBusinessDay(time1, time2), - getVal(emptyTable(1).update("diffNonBusinessDay = diffNonBusinessDay(time1, time2)"), - "diffNonBusinessDay")); + getVal(emptyTable(1).update("diffBusinessDay = diffBusinessDay(time1, time2)"), "diffBusinessDay")); + assertEquals(calendar.diffNonBusinessDay(time1, time2), getVal( + emptyTable(1).update("diffNonBusinessDay = diffNonBusinessDay(time1, time2)"), "diffNonBusinessDay")); assertEquals(calendar.diffBusinessYear(time1, time2), - getVal(emptyTable(1).update("diffBusinessYear = diffBusinessYear(time1, time2)"), - "diffBusinessYear")); + getVal(emptyTable(1).update("diffBusinessYear = diffBusinessYear(time1, time2)"), "diffBusinessYear")); assertEquals(calendar.numberOfBusinessDays(time1, time2), - getVal( - emptyTable(1).update("numberOfBusinessDays = numberOfBusinessDays(time1, time2)"), - "numberOfBusinessDays")); + getVal(emptyTable(1).update("numberOfBusinessDays = numberOfBusinessDays(time1, time2)"), + "numberOfBusinessDays")); assertEquals(calendar.numberOfBusinessDays(time1, time2, true), - getVal( - emptyTable(1) - .update("numberOfBusinessDays = numberOfBusinessDays(time1, time2, true)"), - "numberOfBusinessDays")); + getVal(emptyTable(1).update("numberOfBusinessDays = numberOfBusinessDays(time1, time2, true)"), + "numberOfBusinessDays")); assertEquals(calendar.numberOfBusinessDays(date1, date2), - getVal( - emptyTable(1).update("numberOfBusinessDays = numberOfBusinessDays(date1, date2)"), - "numberOfBusinessDays")); + getVal(emptyTable(1).update("numberOfBusinessDays = numberOfBusinessDays(date1, date2)"), + "numberOfBusinessDays")); assertEquals(calendar.numberOfBusinessDays(date1, date2, true), - getVal( - emptyTable(1) - .update("numberOfBusinessDays = numberOfBusinessDays(date1, date2, true)"), - "numberOfBusinessDays")); + getVal(emptyTable(1).update("numberOfBusinessDays = numberOfBusinessDays(date1, date2, true)"), + "numberOfBusinessDays")); assertEquals(calendar.numberOfNonBusinessDays(time1, time2), - getVal( - emptyTable(1) - .update("numberOfNonBusinessDays = numberOfNonBusinessDays(time1, time2)"), - "numberOfNonBusinessDays")); + getVal(emptyTable(1).update("numberOfNonBusinessDays = numberOfNonBusinessDays(time1, time2)"), + "numberOfNonBusinessDays")); assertEquals(calendar.numberOfNonBusinessDays(time1, time2, true), - getVal( - emptyTable(1).update( - "numberOfNonBusinessDays = numberOfNonBusinessDays(time1, time2, true)"), - "numberOfNonBusinessDays")); + getVal(emptyTable(1).update("numberOfNonBusinessDays = numberOfNonBusinessDays(time1, time2, true)"), + "numberOfNonBusinessDays")); assertEquals(calendar.numberOfNonBusinessDays(date1, date2), - getVal( - emptyTable(1) - .update("numberOfNonBusinessDays = numberOfNonBusinessDays(date1, date2)"), - "numberOfNonBusinessDays")); + getVal(emptyTable(1).update("numberOfNonBusinessDays = numberOfNonBusinessDays(date1, date2)"), + "numberOfNonBusinessDays")); assertEquals(calendar.numberOfNonBusinessDays(date1, date2, true), - getVal( - emptyTable(1).update( - "numberOfNonBusinessDays = numberOfNonBusinessDays(date1, date2, true)"), - "numberOfNonBusinessDays")); + getVal(emptyTable(1).update("numberOfNonBusinessDays = numberOfNonBusinessDays(date1, date2, true)"), + "numberOfNonBusinessDays")); assertEquals(calendar.fractionOfStandardBusinessDay(), - getVal( - emptyTable(1) - .update("fractionOfStandardBusinessDay = fractionOfStandardBusinessDay()"), - "fractionOfStandardBusinessDay")); + getVal(emptyTable(1).update("fractionOfStandardBusinessDay = fractionOfStandardBusinessDay()"), + "fractionOfStandardBusinessDay")); assertEquals(calendar.fractionOfStandardBusinessDay(time1), - getVal( - emptyTable(1) - .update("fractionOfStandardBusinessDay = fractionOfStandardBusinessDay(time1)"), - "fractionOfStandardBusinessDay")); + getVal(emptyTable(1).update("fractionOfStandardBusinessDay = fractionOfStandardBusinessDay(time1)"), + "fractionOfStandardBusinessDay")); assertEquals(calendar.fractionOfStandardBusinessDay(date1), - getVal( - emptyTable(1) - .update("fractionOfStandardBusinessDay = fractionOfStandardBusinessDay(date1)"), - "fractionOfStandardBusinessDay")); + getVal(emptyTable(1).update("fractionOfStandardBusinessDay = fractionOfStandardBusinessDay(date1)"), + "fractionOfStandardBusinessDay")); assertEquals(calendar.fractionOfBusinessDayRemaining(time1), - getVal( - emptyTable(1).update( - "fractionOfBusinessDayRemaining = fractionOfBusinessDayRemaining(time1)"), - "fractionOfBusinessDayRemaining")); + getVal(emptyTable(1).update("fractionOfBusinessDayRemaining = fractionOfBusinessDayRemaining(time1)"), + "fractionOfBusinessDayRemaining")); assertEquals(calendar.fractionOfBusinessDayComplete(time1), - getVal( - emptyTable(1) - .update("fractionOfBusinessDayComplete = fractionOfBusinessDayComplete(time1)"), - "fractionOfBusinessDayComplete")); + getVal(emptyTable(1).update("fractionOfBusinessDayComplete = fractionOfBusinessDayComplete(time1)"), + "fractionOfBusinessDayComplete")); assertEquals(calendar.isLastBusinessDayOfMonth(), - getVal(emptyTable(1).update("isLastBusinessDayOfMonth = isLastBusinessDayOfMonth()"), - "isLastBusinessDayOfMonth")); + getVal(emptyTable(1).update("isLastBusinessDayOfMonth = isLastBusinessDayOfMonth()"), + "isLastBusinessDayOfMonth")); assertEquals(calendar.isLastBusinessDayOfMonth(time1), - getVal( - emptyTable(1).update("isLastBusinessDayOfMonth = isLastBusinessDayOfMonth(time1)"), - "isLastBusinessDayOfMonth")); + getVal(emptyTable(1).update("isLastBusinessDayOfMonth = isLastBusinessDayOfMonth(time1)"), + "isLastBusinessDayOfMonth")); assertEquals(calendar.isLastBusinessDayOfMonth(date1), - getVal( - emptyTable(1).update("isLastBusinessDayOfMonth = isLastBusinessDayOfMonth(date1)"), - "isLastBusinessDayOfMonth")); + getVal(emptyTable(1).update("isLastBusinessDayOfMonth = isLastBusinessDayOfMonth(date1)"), + "isLastBusinessDayOfMonth")); assertEquals(calendar.isLastBusinessDayOfWeek(), - getVal(emptyTable(1).update("isLastBusinessDayOfWeek = isLastBusinessDayOfWeek()"), - "isLastBusinessDayOfWeek")); + getVal(emptyTable(1).update("isLastBusinessDayOfWeek = isLastBusinessDayOfWeek()"), + "isLastBusinessDayOfWeek")); assertEquals(calendar.isLastBusinessDayOfWeek(time1), - getVal(emptyTable(1).update("isLastBusinessDayOfWeek = isLastBusinessDayOfWeek(time1)"), - "isLastBusinessDayOfWeek")); + getVal(emptyTable(1).update("isLastBusinessDayOfWeek = isLastBusinessDayOfWeek(time1)"), + "isLastBusinessDayOfWeek")); assertEquals(calendar.isLastBusinessDayOfWeek(date1), - getVal(emptyTable(1).update("isLastBusinessDayOfWeek = isLastBusinessDayOfWeek(date1)"), - "isLastBusinessDayOfWeek")); - - - assertEquals(calendar.getBusinessSchedule(time1), - getVal(emptyTable(1).update("getBusinessSchedule = getBusinessSchedule(time1)"), - "getBusinessSchedule")); - assertEquals(calendar.getBusinessSchedule(date1), - getVal(emptyTable(1).update("getBusinessSchedule = getBusinessSchedule(date1)"), - "getBusinessSchedule")); - assertEquals(calendar.getBusinessSchedule(localDate), - getVal(emptyTable(1).update("getBusinessSchedule = getBusinessSchedule(localDate)"), - "getBusinessSchedule")); + getVal(emptyTable(1).update("isLastBusinessDayOfWeek = isLastBusinessDayOfWeek(date1)"), + "isLastBusinessDayOfWeek")); + + + assertEquals(calendar.getBusinessSchedule(time1), getVal( + emptyTable(1).update("getBusinessSchedule = getBusinessSchedule(time1)"), "getBusinessSchedule")); + assertEquals(calendar.getBusinessSchedule(date1), getVal( + emptyTable(1).update("getBusinessSchedule = getBusinessSchedule(date1)"), "getBusinessSchedule")); + assertEquals(calendar.getBusinessSchedule(localDate), getVal( + emptyTable(1).update("getBusinessSchedule = getBusinessSchedule(localDate)"), "getBusinessSchedule")); } diff --git a/DB/src/test/java/io/deephaven/util/calendar/TestBusinessSchedule.java b/DB/src/test/java/io/deephaven/util/calendar/TestBusinessSchedule.java index e5f688483a1..83ffe405cc1 100644 --- a/DB/src/test/java/io/deephaven/util/calendar/TestBusinessSchedule.java +++ b/DB/src/test/java/io/deephaven/util/calendar/TestBusinessSchedule.java @@ -41,20 +41,15 @@ public void testBusinessSchedule() { assertEquals(DBTimeUtils.HOUR, single.getLOBD()); assertEquals(DBTimeUtils.HOUR, single.getLengthOfBusinessDay()); assertTrue(single.isBusinessDay()); - assertTrue( - single.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:00:00.000000000 NY"))); - assertTrue( - single.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:15:00.000000000 NY"))); - assertTrue( - single.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:00:00.000000000 NY"))); - assertFalse( - single.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:10:00.000000000 NY"))); - assertEquals(0L, single - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T01:00:00.000000000 NY"))); - assertEquals(DBTimeUtils.MINUTE * 30, single - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T10:30:00.000000000 NY"))); - assertEquals(DBTimeUtils.HOUR, single - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); + assertTrue(single.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:00:00.000000000 NY"))); + assertTrue(single.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:15:00.000000000 NY"))); + assertTrue(single.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:00:00.000000000 NY"))); + assertFalse(single.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:10:00.000000000 NY"))); + assertEquals(0L, single.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T01:00:00.000000000 NY"))); + assertEquals(DBTimeUtils.MINUTE * 30, + single.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T10:30:00.000000000 NY"))); + assertEquals(DBTimeUtils.HOUR, + single.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); // multi period @@ -67,24 +62,18 @@ public void testBusinessSchedule() { assertEquals(DBTimeUtils.HOUR * 6, multi.getLOBD()); assertEquals(DBTimeUtils.HOUR * 6, multi.getLengthOfBusinessDay()); assertTrue(multi.isBusinessDay()); - assertTrue( - multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:00:00.000000000 NY"))); - assertTrue( - multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:15:00.000000000 NY"))); - assertTrue( - multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:00:00.000000000 NY"))); - assertFalse( - multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:10:00.000000000 NY"))); - assertTrue( - multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T12:10:00.000000000 NY"))); - assertEquals(0L, multi - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T01:00:00.000000000 NY"))); - assertEquals(DBTimeUtils.MINUTE * 30, multi - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T10:30:00.000000000 NY"))); - assertEquals(DBTimeUtils.HOUR * 2, multi - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); - assertEquals(DBTimeUtils.HOUR * 2, multi - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); + assertTrue(multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:00:00.000000000 NY"))); + assertTrue(multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:15:00.000000000 NY"))); + assertTrue(multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:00:00.000000000 NY"))); + assertFalse(multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:10:00.000000000 NY"))); + assertTrue(multi.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T12:10:00.000000000 NY"))); + assertEquals(0L, multi.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T01:00:00.000000000 NY"))); + assertEquals(DBTimeUtils.MINUTE * 30, + multi.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T10:30:00.000000000 NY"))); + assertEquals(DBTimeUtils.HOUR * 2, + multi.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); + assertEquals(DBTimeUtils.HOUR * 2, + multi.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); final BusinessSchedule multi2 = new BusinessSchedule(period2, period1); assertEquals(new BusinessPeriod[] {period1, period2}, multi2.getBusinessPeriods()); @@ -95,24 +84,18 @@ public void testBusinessSchedule() { assertEquals(DBTimeUtils.HOUR * 6, multi2.getLOBD()); assertEquals(DBTimeUtils.HOUR * 6, multi2.getLengthOfBusinessDay()); assertTrue(multi2.isBusinessDay()); - assertTrue( - multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:00:00.000000000 NY"))); - assertTrue( - multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:15:00.000000000 NY"))); - assertTrue( - multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:00:00.000000000 NY"))); - assertFalse( - multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:10:00.000000000 NY"))); - assertTrue( - multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T12:10:00.000000000 NY"))); - assertEquals(0L, multi2 - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T01:00:00.000000000 NY"))); - assertEquals(DBTimeUtils.MINUTE * 30, multi2 - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T10:30:00.000000000 NY"))); - assertEquals(DBTimeUtils.HOUR * 2, multi2 - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); - assertEquals(DBTimeUtils.HOUR * 2, multi2 - .businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); + assertTrue(multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:00:00.000000000 NY"))); + assertTrue(multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T10:15:00.000000000 NY"))); + assertTrue(multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:00:00.000000000 NY"))); + assertFalse(multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T11:10:00.000000000 NY"))); + assertTrue(multi2.isBusinessTime(DBTimeUtils.convertDateTime("2017-03-11T12:10:00.000000000 NY"))); + assertEquals(0L, multi2.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T01:00:00.000000000 NY"))); + assertEquals(DBTimeUtils.MINUTE * 30, + multi2.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T10:30:00.000000000 NY"))); + assertEquals(DBTimeUtils.HOUR * 2, + multi2.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); + assertEquals(DBTimeUtils.HOUR * 2, + multi2.businessTimeElapsed(DBTimeUtils.convertDateTime("2017-03-11T13:00:00.000000000 NY"))); } } diff --git a/DB/src/test/java/io/deephaven/util/calendar/TestDefaultBusinessCalendar.java b/DB/src/test/java/io/deephaven/util/calendar/TestDefaultBusinessCalendar.java index b88105a3aed..350cb8df346 100644 --- a/DB/src/test/java/io/deephaven/util/calendar/TestDefaultBusinessCalendar.java +++ b/DB/src/test/java/io/deephaven/util/calendar/TestDefaultBusinessCalendar.java @@ -33,27 +33,26 @@ public void setUp() throws Exception { testCal = File.createTempFile("Test", ".calendar"); final FileWriter fw = new FileWriter(testCal); fw.write("\n" + - "\n" + - "\n" + - " TEST\n" + - " TZ_NY\n" + - " en\n" + - " US\n" + - " \n" + - " 09:30,16:00\n" + - " Saturday\n" + - " Sunday\n" + - " \n" + - ""); + " ~ Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + + " -->\n" + + "\n" + + "\n" + + " TEST\n" + + " TZ_NY\n" + + " en\n" + + " US\n" + + " \n" + + " 09:30,16:00\n" + + " Saturday\n" + + " Sunday\n" + + " \n" + + ""); fw.flush(); fw.close(); - test = new DefaultBusinessCalendar( - DefaultBusinessCalendar.constructCalendarElements(testCal)) { + test = new DefaultBusinessCalendar(DefaultBusinessCalendar.constructCalendarElements(testCal)) { @Override public String currentDay() { return curDay; @@ -379,21 +378,17 @@ public void testNumberOfDays() { startDate = null; assertEquals(USNYSE.numberOfDays(startDate, endDate), QueryConstants.NULL_INT); assertEquals(USNYSE.numberOfBusinessDays(startDate, endDate), QueryConstants.NULL_INT); - assertEquals(USNYSE.numberOfBusinessDays(startDate, endDate, true), - QueryConstants.NULL_INT); + assertEquals(USNYSE.numberOfBusinessDays(startDate, endDate, true), QueryConstants.NULL_INT); assertEquals(USNYSE.numberOfNonBusinessDays(startDate, endDate), QueryConstants.NULL_INT); - assertEquals(USNYSE.numberOfNonBusinessDays(startDate, endDate, true), - QueryConstants.NULL_INT); + assertEquals(USNYSE.numberOfNonBusinessDays(startDate, endDate, true), QueryConstants.NULL_INT); startDate = DBTimeUtils.convertDateTime("2014-02-18T01:00:00.000000000 NY"); endDate = null; assertEquals(USNYSE.numberOfDays(startDate, endDate), QueryConstants.NULL_INT); assertEquals(USNYSE.numberOfBusinessDays(startDate, endDate), QueryConstants.NULL_INT); - assertEquals(USNYSE.numberOfBusinessDays(startDate, endDate, true), - QueryConstants.NULL_INT); + assertEquals(USNYSE.numberOfBusinessDays(startDate, endDate, true), QueryConstants.NULL_INT); assertEquals(USNYSE.numberOfNonBusinessDays(startDate, endDate), QueryConstants.NULL_INT); - assertEquals(USNYSE.numberOfNonBusinessDays(startDate, endDate, true), - QueryConstants.NULL_INT); + assertEquals(USNYSE.numberOfNonBusinessDays(startDate, endDate, true), QueryConstants.NULL_INT); startDate = DBTimeUtils.convertDateTime("2014-02-18T01:00:00.000000000 NY"); endDate = DBTimeUtils.convertDateTime("2017-02-18T01:00:00.000000000 NY"); @@ -511,8 +506,7 @@ public void testIsBusinessDay() { } public void testIsBusinessTime() { - DBDateTime businessDayNotTime = - DBTimeUtils.convertDateTime("2016-08-31T01:00:00.000000000 NY"); + DBDateTime businessDayNotTime = DBTimeUtils.convertDateTime("2016-08-31T01:00:00.000000000 NY"); DBDateTime halfDayTime = DBTimeUtils.convertDateTime("2014-07-03T12:00:00.000000000 NY"); DBDateTime holiday = DBTimeUtils.convertDateTime("2002-01-01T01:00:00.000000000 NY"); DBDateTime holiday2 = DBTimeUtils.convertDateTime("2002-01-21T01:00:00.000000000 NY"); @@ -522,8 +516,7 @@ public void testIsBusinessTime() { assertFalse(USNYSE.isBusinessTime(holiday)); assertFalse(USNYSE.isBusinessTime(holiday2)); - DBDateTime businessDayTime = - DBTimeUtils.convertDateTime("2016-08-31T01:00:00.000000000 JP"); + DBDateTime businessDayTime = DBTimeUtils.convertDateTime("2016-08-31T01:00:00.000000000 JP"); halfDayTime = DBTimeUtils.convertDateTime("2006-01-04T11:00:00.000000000 JP"); holiday = DBTimeUtils.convertDateTime("2006-01-02T01:00:00.000000000 JP"); holiday2 = DBTimeUtils.convertDateTime("2007-12-23T01:00:00.000000000 JP"); @@ -592,15 +585,14 @@ public void testNextBusinessDay() { assertEquals(USNYSE.nextBusinessDay(day1, 2), "2016-09-02"); assertEquals(JPOSE.nextBusinessDay(day1JP, 2), "2016-09-02"); - assertEquals(USNYSE.nextBusinessDay( - DBTimeUtils.convertDateTime("2016-09-02T01:00:00.000000000 NY"), -2), "2016-08-31"); - assertEquals(JPOSE.nextBusinessDay( - DBTimeUtils.convertDateTime("2016-09-02T01:00:00.000000000 JP"), -2), "2016-08-31"); + assertEquals(USNYSE.nextBusinessDay(DBTimeUtils.convertDateTime("2016-09-02T01:00:00.000000000 NY"), -2), + "2016-08-31"); + assertEquals(JPOSE.nextBusinessDay(DBTimeUtils.convertDateTime("2016-09-02T01:00:00.000000000 JP"), -2), + "2016-08-31"); - assertEquals(USNYSE.nextBusinessDay( - DBTimeUtils.convertDateTime("2016-08-30T01:00:00.000000000 NY"), 0), "2016-08-30"); - assertNull(USNYSE - .nextBusinessDay(DBTimeUtils.convertDateTime("2016-08-28T01:00:00.000000000 NY"), 0)); + assertEquals(USNYSE.nextBusinessDay(DBTimeUtils.convertDateTime("2016-08-30T01:00:00.000000000 NY"), 0), + "2016-08-30"); + assertNull(USNYSE.nextBusinessDay(DBTimeUtils.convertDateTime("2016-08-28T01:00:00.000000000 NY"), 0)); // leap day day1 = DBTimeUtils.convertDateTime("2016-02-28T01:00:00.000000000 NY"); @@ -702,82 +694,57 @@ public void testNextBusinessSchedule() { DBDateTime day1 = DBTimeUtils.convertDateTime("2016-08-31T01:00:00.000000000 NY"); DBDateTime day1JP = DBTimeUtils.convertDateTime("2016-08-31T01:00:00.000000000 JP"); String day2 = "2016-09-01"; - assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); - assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); - - assertEquals(USNYSE.nextBusinessSchedule(day1, 2).getSOBD().toDateString(DBTimeZone.TZ_NY), - "2016-09-02"); - assertEquals(JPOSE.nextBusinessSchedule(day1JP, 2).getSOBD().toDateString(DBTimeZone.TZ_JP), - "2016-09-02"); - - assertEquals( - USNYSE - .nextBusinessSchedule( - DBTimeUtils.convertDateTime("2016-09-02T01:00:00.000000000 NY"), -2) - .getSOBD().toDateString(DBTimeZone.TZ_NY), - "2016-08-31"); - assertEquals( - JPOSE - .nextBusinessSchedule( - DBTimeUtils.convertDateTime("2016-09-02T01:00:00.000000000 JP"), -2) - .getSOBD().toDateString(DBTimeZone.TZ_JP), - "2016-08-31"); - - assertEquals( - USNYSE - .nextBusinessSchedule( - DBTimeUtils.convertDateTime("2016-08-30T01:00:00.000000000 NY"), 0) - .getSOBD().toDateString(DBTimeZone.TZ_NY), - "2016-08-30"); + assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); + + assertEquals(USNYSE.nextBusinessSchedule(day1, 2).getSOBD().toDateString(DBTimeZone.TZ_NY), "2016-09-02"); + assertEquals(JPOSE.nextBusinessSchedule(day1JP, 2).getSOBD().toDateString(DBTimeZone.TZ_JP), "2016-09-02"); + + assertEquals(USNYSE.nextBusinessSchedule(DBTimeUtils.convertDateTime("2016-09-02T01:00:00.000000000 NY"), -2) + .getSOBD().toDateString(DBTimeZone.TZ_NY), "2016-08-31"); + assertEquals(JPOSE.nextBusinessSchedule(DBTimeUtils.convertDateTime("2016-09-02T01:00:00.000000000 JP"), -2) + .getSOBD().toDateString(DBTimeZone.TZ_JP), "2016-08-31"); + + assertEquals(USNYSE.nextBusinessSchedule(DBTimeUtils.convertDateTime("2016-08-30T01:00:00.000000000 NY"), 0) + .getSOBD().toDateString(DBTimeZone.TZ_NY), "2016-08-30"); // leap day day1 = DBTimeUtils.convertDateTime("2016-02-28T01:00:00.000000000 NY"); day1JP = DBTimeUtils.convertDateTime("2016-02-28T01:00:00.000000000 JP"); day2 = "2016-02-29"; - assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); - assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); // new year day1 = DBTimeUtils.convertDateTime("2013-12-31T01:00:00.000000000 NY"); day1JP = DBTimeUtils.convertDateTime("2013-12-31T01:00:00.000000000 JP"); day2 = "2014-01-03"; - assertEquals(USNYSE.nextBusinessSchedule(USNYSE.nextBusinessDay(day1)).getSOBD() - .toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(USNYSE.nextBusinessSchedule(USNYSE.nextBusinessDay(day1)).getSOBD().toDateString(DBTimeZone.TZ_NY), + day2); day2 = "2014-01-01"; - assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); // Daylight savings starts in NY (UTC-7:00) at 2 AM 2017-03-12 // Japan doesn't observe day light savings day1 = DBTimeUtils.convertDateTime("2017-03-12T01:00:00.000000000 NY"); day1JP = DBTimeUtils.convertDateTime("2017-03-12T01:00:00.000000000 JP"); day2 = "2017-03-13"; - assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); - assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); // outside calendar range, so no day off for new years, but weekend should still be off day1 = DBTimeUtils.convertDateTime("2069-12-31T01:00:00.000000000 NY"); day1JP = DBTimeUtils.convertDateTime("2069-12-31T01:00:00.000000000 JP"); day2 = "2070-01-01"; - assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY) - .compareTo(day2), 0); - assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY).compareTo(day2), 0); + assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); day1 = DBTimeUtils.convertDateTime("2070-01-05T01:00:00.000000000 NY"); day1JP = DBTimeUtils.convertDateTime("2070-01-05T01:00:00.000000000 JP"); day2 = "2070-01-06"; - assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); - assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(JPOSE.nextBusinessSchedule(day1JP).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); day1 = null; assertNull(USNYSE.nextBusinessSchedule(day1)); @@ -807,54 +774,41 @@ public void testNextBusinessSchedule() { public void testNextBusinessScheduleString() { String day1 = "2016-08-31"; String day2 = "2016-09-01"; - assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); - assertEquals(JPOSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); - - assertEquals(USNYSE.nextBusinessSchedule(day1, 2).getSOBD().toDateString(DBTimeZone.TZ_NY), - "2016-09-02"); - assertEquals(JPOSE.nextBusinessSchedule(day1, 2).getSOBD().toDateString(DBTimeZone.TZ_JP), - "2016-09-02"); - - assertEquals( - USNYSE.nextBusinessSchedule("2016-09-02", -2).getSOBD().toDateString(DBTimeZone.TZ_NY), - "2016-08-31"); - assertEquals( - JPOSE.nextBusinessSchedule("2016-09-02", -2).getSOBD().toDateString(DBTimeZone.TZ_JP), - "2016-08-31"); - - assertEquals( - USNYSE.nextBusinessSchedule("2016-08-30", 0).getSOBD().toDateString(DBTimeZone.TZ_NY), - "2016-08-30"); + assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(JPOSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); + + assertEquals(USNYSE.nextBusinessSchedule(day1, 2).getSOBD().toDateString(DBTimeZone.TZ_NY), "2016-09-02"); + assertEquals(JPOSE.nextBusinessSchedule(day1, 2).getSOBD().toDateString(DBTimeZone.TZ_JP), "2016-09-02"); + + assertEquals(USNYSE.nextBusinessSchedule("2016-09-02", -2).getSOBD().toDateString(DBTimeZone.TZ_NY), + "2016-08-31"); + assertEquals(JPOSE.nextBusinessSchedule("2016-09-02", -2).getSOBD().toDateString(DBTimeZone.TZ_JP), + "2016-08-31"); + + assertEquals(USNYSE.nextBusinessSchedule("2016-08-30", 0).getSOBD().toDateString(DBTimeZone.TZ_NY), + "2016-08-30"); assertNull(USNYSE.nextBusinessSchedule((String) null, 0)); // leap day day1 = "2016-02-28"; day2 = "2016-02-29"; - assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); - assertEquals(JPOSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(JPOSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); // new year day1 = "2014-01-01"; day2 = "2014-01-02"; - assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); + assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); day1 = "2007-01-03"; day2 = "2007-01-04"; - assertEquals(JPOSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(JPOSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); // Daylight savings starts in NY (UTC-7:00) at 2 AM 2017-03-12 day1 = "2017-03-12"; day2 = "2017-03-13"; - assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); - assertEquals(JPOSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(USNYSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(JPOSE.nextBusinessSchedule(day1).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); day1 = null; assertNull(USNYSE.nextBusinessSchedule(day1)); @@ -877,15 +831,14 @@ public void testNextNonBusinessDay() { assertEquals(USNYSE.nextNonBusinessDay(day1, 2), "2016-09-04"); assertEquals(JPOSE.nextNonBusinessDay(day1JP, 2), "2016-09-04"); - assertEquals(USNYSE.nextNonBusinessDay( - DBTimeUtils.convertDateTime("2016-09-04T01:00:00.000000000 NY"), -2), "2016-08-28"); - assertEquals(JPOSE.nextNonBusinessDay( - DBTimeUtils.convertDateTime("2016-09-04T01:00:00.000000000 JP"), -2), "2016-08-28"); + assertEquals(USNYSE.nextNonBusinessDay(DBTimeUtils.convertDateTime("2016-09-04T01:00:00.000000000 NY"), -2), + "2016-08-28"); + assertEquals(JPOSE.nextNonBusinessDay(DBTimeUtils.convertDateTime("2016-09-04T01:00:00.000000000 JP"), -2), + "2016-08-28"); - assertNull(USNYSE.nextNonBusinessDay( - DBTimeUtils.convertDateTime("2016-08-30T01:00:00.000000000 NY"), 0)); - assertEquals(USNYSE.nextNonBusinessDay( - DBTimeUtils.convertDateTime("2016-08-28T01:00:00.000000000 NY"), 0), "2016-08-28"); + assertNull(USNYSE.nextNonBusinessDay(DBTimeUtils.convertDateTime("2016-08-30T01:00:00.000000000 NY"), 0)); + assertEquals(USNYSE.nextNonBusinessDay(DBTimeUtils.convertDateTime("2016-08-28T01:00:00.000000000 NY"), 0), + "2016-08-28"); // leap day day1 = DBTimeUtils.convertDateTime("2016-02-28T01:00:00.000000000 NY"); @@ -985,16 +938,14 @@ public void testLastBusinessDay() { assertEquals(USNYSE.previousBusinessDay(day2, 2), day1.toDateString(DBTimeZone.TZ_NY)); assertEquals(USNYSE.previousBusinessDay(day1, -2), day2.toDateString(DBTimeZone.TZ_NY)); - assertEquals(USNYSE.previousBusinessDay( - DBTimeUtils.convertDateTime("2016-08-30T15:00:00.000000000 NY"), 0), "2016-08-30"); - assertNull(USNYSE.previousBusinessDay( - DBTimeUtils.convertDateTime("2016-08-28T15:00:00.000000000 NY"), 0)); + assertEquals(USNYSE.previousBusinessDay(DBTimeUtils.convertDateTime("2016-08-30T15:00:00.000000000 NY"), 0), + "2016-08-30"); + assertNull(USNYSE.previousBusinessDay(DBTimeUtils.convertDateTime("2016-08-28T15:00:00.000000000 NY"), 0)); assertNull(USNYSE.previousNonBusinessDay((DBDateTime) null, 0)); - assertNull(USNYSE.previousNonBusinessDay( - DBTimeUtils.convertDateTime("2016-08-30T21:00:00.000000000 NY"), 0)); - assertEquals(USNYSE.previousNonBusinessDay( - DBTimeUtils.convertDateTime("2016-08-28T21:00:00.000000000 NY"), 0), "2016-08-28"); + assertNull(USNYSE.previousNonBusinessDay(DBTimeUtils.convertDateTime("2016-08-30T21:00:00.000000000 NY"), 0)); + assertEquals(USNYSE.previousNonBusinessDay(DBTimeUtils.convertDateTime("2016-08-28T21:00:00.000000000 NY"), 0), + "2016-08-28"); // leap day day1 = DBTimeUtils.convertDateTime("2016-02-29T21:00:00.000000000 NY"); @@ -1109,34 +1060,28 @@ public void testLastBusinessSchedule() { DBDateTime day1 = DBTimeUtils.convertDateTime("2016-08-30T01:00:00.000000000 NY"); DBDateTime day2 = DBTimeUtils.convertDateTime("2016-09-01T01:00:00.000000000 NY"); - assertEquals( - USNYSE.previousBusinessSchedule(day2, 2).getSOBD().toDateString(DBTimeZone.TZ_NY), - day1.toDateString(DBTimeZone.TZ_NY)); - assertEquals( - USNYSE.previousBusinessSchedule(day1, -2).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2.toDateString(DBTimeZone.TZ_NY)); - - assertEquals(USNYSE - .previousBusinessSchedule( - DBTimeUtils.convertDateTime("2016-08-30T15:00:00.000000000 NY"), 0) - .getSOBD().toDateString(DBTimeZone.TZ_NY), "2016-08-30"); + assertEquals(USNYSE.previousBusinessSchedule(day2, 2).getSOBD().toDateString(DBTimeZone.TZ_NY), + day1.toDateString(DBTimeZone.TZ_NY)); + assertEquals(USNYSE.previousBusinessSchedule(day1, -2).getSOBD().toDateString(DBTimeZone.TZ_NY), + day2.toDateString(DBTimeZone.TZ_NY)); + + assertEquals(USNYSE.previousBusinessSchedule(DBTimeUtils.convertDateTime("2016-08-30T15:00:00.000000000 NY"), 0) + .getSOBD().toDateString(DBTimeZone.TZ_NY), "2016-08-30"); assertNull(USNYSE.previousBusinessSchedule((DBDateTime) null, 0)); // leap day day1 = DBTimeUtils.convertDateTime("2016-02-29T21:00:00.000000000 NY"); day2 = DBTimeUtils.convertDateTime("2016-03-01T01:00:00.000000000 NY"); assertEquals(USNYSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_NY), - day1.toDateString(DBTimeZone.TZ_NY)); + day1.toDateString(DBTimeZone.TZ_NY)); // new year day1 = DBTimeUtils.convertDateTime("2013-12-26T01:00:00.000000000 NY"); day2 = DBTimeUtils.convertDateTime("2014-01-02T01:00:00.000000000 NY"); - assertEquals( - USNYSE.previousBusinessSchedule(day2, 7).getSOBD().toDateString(DBTimeZone.TZ_NY), - day1.toDateString(DBTimeZone.TZ_NY)); - assertEquals( - USNYSE.previousBusinessSchedule(day1, -7).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2.toDateString(DBTimeZone.TZ_NY)); + assertEquals(USNYSE.previousBusinessSchedule(day2, 7).getSOBD().toDateString(DBTimeZone.TZ_NY), + day1.toDateString(DBTimeZone.TZ_NY)); + assertEquals(USNYSE.previousBusinessSchedule(day1, -7).getSOBD().toDateString(DBTimeZone.TZ_NY), + day2.toDateString(DBTimeZone.TZ_NY)); day1 = null; assertNull(USNYSE.previousBusinessSchedule(day1)); @@ -1145,19 +1090,19 @@ public void testLastBusinessSchedule() { day1 = DBTimeUtils.convertDateTime("2016-08-31T21:00:00.000000000 JP"); day2 = DBTimeUtils.convertDateTime("2016-09-01T21:00:00.000000000 JP"); assertEquals(JPOSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_JP), - day1.toDateString(DBTimeZone.TZ_JP)); + day1.toDateString(DBTimeZone.TZ_JP)); // leap day day1 = DBTimeUtils.convertDateTime("2016-02-29T01:00:00.000000000 JP"); day2 = DBTimeUtils.convertDateTime("2016-03-01T01:00:00.000000000 JP"); assertEquals(JPOSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_JP), - day1.toDateString(DBTimeZone.TZ_JP)); + day1.toDateString(DBTimeZone.TZ_JP)); // new year day1 = DBTimeUtils.convertDateTime("2013-12-31T11:00:00.000000000 JP"); day2 = DBTimeUtils.convertDateTime("2014-01-01T11:00:00.000000000 JP"); assertEquals(JPOSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_JP), - day1.toDateString(DBTimeZone.TZ_JP)); + day1.toDateString(DBTimeZone.TZ_JP)); day1 = null; @@ -1167,58 +1112,40 @@ public void testLastBusinessSchedule() { public void testLastBusinessScheduleString() { String day1 = "2016-08-31"; String day2 = "2016-09-01"; - assertEquals(USNYSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_NY), - day1); - assertEquals(JPOSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_JP), - day1); + assertEquals(USNYSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_NY), day1); + assertEquals(JPOSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_JP), day1); - assertEquals(USNYSE.previousBusinessSchedule("2016-08-30", 0).getSOBD() - .toDateString(DBTimeZone.TZ_NY), "2016-08-30"); + assertEquals(USNYSE.previousBusinessSchedule("2016-08-30", 0).getSOBD().toDateString(DBTimeZone.TZ_NY), + "2016-08-30"); assertNull(USNYSE.previousBusinessSchedule((String) null, 0)); day1 = "2016-08-29"; - assertEquals( - USNYSE.previousBusinessSchedule(day2, 3).getSOBD().toDateString(DBTimeZone.TZ_NY), - day1); - assertEquals( - JPOSE.previousBusinessSchedule(day2, 3).getSOBD().toDateString(DBTimeZone.TZ_JP), day1); - assertEquals( - USNYSE.previousBusinessSchedule(day1, -3).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); - assertEquals( - JPOSE.previousBusinessSchedule(day1, -3).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(USNYSE.previousBusinessSchedule(day2, 3).getSOBD().toDateString(DBTimeZone.TZ_NY), day1); + assertEquals(JPOSE.previousBusinessSchedule(day2, 3).getSOBD().toDateString(DBTimeZone.TZ_JP), day1); + assertEquals(USNYSE.previousBusinessSchedule(day1, -3).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(JPOSE.previousBusinessSchedule(day1, -3).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); // leap day day1 = "2016-02-29"; day2 = "2016-03-01"; - assertEquals(USNYSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_NY), - day1); - assertEquals(JPOSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_JP), - day1); + assertEquals(USNYSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_NY), day1); + assertEquals(JPOSE.previousBusinessSchedule(day2).getSOBD().toDateString(DBTimeZone.TZ_JP), day1); // new year day1 = "2014-12-29"; day2 = "2014-12-31"; - assertEquals( - USNYSE.previousBusinessSchedule(day2, 2).getSOBD().toDateString(DBTimeZone.TZ_NY), - day1); - assertEquals( - JPOSE.previousBusinessSchedule(day2, 2).getSOBD().toDateString(DBTimeZone.TZ_JP), day1); - assertEquals( - USNYSE.previousBusinessSchedule(day1, -2).getSOBD().toDateString(DBTimeZone.TZ_NY), - day2); - assertEquals( - JPOSE.previousBusinessSchedule(day1, -2).getSOBD().toDateString(DBTimeZone.TZ_JP), - day2); + assertEquals(USNYSE.previousBusinessSchedule(day2, 2).getSOBD().toDateString(DBTimeZone.TZ_NY), day1); + assertEquals(JPOSE.previousBusinessSchedule(day2, 2).getSOBD().toDateString(DBTimeZone.TZ_JP), day1); + assertEquals(USNYSE.previousBusinessSchedule(day1, -2).getSOBD().toDateString(DBTimeZone.TZ_NY), day2); + assertEquals(JPOSE.previousBusinessSchedule(day1, -2).getSOBD().toDateString(DBTimeZone.TZ_JP), day2); // Daylight savings starts in NY (UTC-7:00) at 2 AM 2017-03-12 day1 = "2017-03-10"; day2 = "2017-03-13"; - assertEquals(USNYSE.previousBusinessSchedule(USNYSE.previousDay(USNYSE.previousDay(day2))) - .getSOBD().toDateString(DBTimeZone.TZ_NY), day1); - assertEquals(JPOSE.previousBusinessSchedule(JPOSE.previousDay(JPOSE.previousDay(day2))) - .getSOBD().toDateString(DBTimeZone.TZ_JP), day1); + assertEquals(USNYSE.previousBusinessSchedule(USNYSE.previousDay(USNYSE.previousDay(day2))).getSOBD() + .toDateString(DBTimeZone.TZ_NY), day1); + assertEquals(JPOSE.previousBusinessSchedule(JPOSE.previousDay(JPOSE.previousDay(day2))).getSOBD() + .toDateString(DBTimeZone.TZ_JP), day1); day1 = null; assertNull(USNYSE.previousBusinessSchedule(day1)); @@ -1285,8 +1212,7 @@ public void testDiff() { DBDateTime day2 = DBTimeUtils.convertDateTime("2016-09-01T01:00:00.000000000 NY"); assertEquals(USNYSE.diffDay(day1, day2), 1.0); assertEquals(USNYSE.diffNanos(day1, day2), DBTimeUtils.DAY); - assertEquals(JPOSE.diffYear(day1, day2), - (double) DBTimeUtils.DAY / (double) DBTimeUtils.YEAR); + assertEquals(JPOSE.diffYear(day1, day2), (double) DBTimeUtils.DAY / (double) DBTimeUtils.YEAR); } public void testBusinessTimeDiff() { @@ -1391,7 +1317,7 @@ public void testNonBusinessTimeDiff() { day2 = DBTimeUtils.convertDateTime("2017-01-23T16:00:00.000000000 JP"); assertEquals(JPOSE.diffNonBusinessNanos(day1, day2), 2 * DBTimeUtils.HOUR); assertEquals(JPOSE.diffNonBusinessDay(day1, day2), - ((double) (2 * DBTimeUtils.HOUR)) / (double) JPOSE.standardBusinessDayLengthNanos()); + ((double) (2 * DBTimeUtils.HOUR)) / (double) JPOSE.standardBusinessDayLengthNanos()); @@ -1682,11 +1608,9 @@ public void testMidnightClose() { assertEquals("2019-04-16", UTC.nextBusinessDay("2019-04-15")); assertEquals("2019-04-18", UTC.nextBusinessDay("2019-04-15", 3)); assertEquals("2019-08-19", - UTC.nextBusinessDay(DBTimeUtils.convertDateTime("2019-08-18T00:00:00.000000000 UTC"))); + UTC.nextBusinessDay(DBTimeUtils.convertDateTime("2019-08-18T00:00:00.000000000 UTC"))); - assertEquals("2019-05-16", - UTC.getBusinessSchedule("2019-05-16").getSOBD().toDateString(DBTimeZone.TZ_UTC)); - assertEquals("2019-05-17", - UTC.getBusinessSchedule("2019-05-16").getEOBD().toDateString(DBTimeZone.TZ_UTC)); + assertEquals("2019-05-16", UTC.getBusinessSchedule("2019-05-16").getSOBD().toDateString(DBTimeZone.TZ_UTC)); + assertEquals("2019-05-17", UTC.getBusinessSchedule("2019-05-16").getEOBD().toDateString(DBTimeZone.TZ_UTC)); } } diff --git a/DB/src/test/java/io/deephaven/util/calendar/TestDefaultNoHolidayBusinessCalendar.java b/DB/src/test/java/io/deephaven/util/calendar/TestDefaultNoHolidayBusinessCalendar.java index 4ed02810586..634b0fcbe4d 100644 --- a/DB/src/test/java/io/deephaven/util/calendar/TestDefaultNoHolidayBusinessCalendar.java +++ b/DB/src/test/java/io/deephaven/util/calendar/TestDefaultNoHolidayBusinessCalendar.java @@ -20,18 +20,18 @@ public void setUp() throws Exception { final File noNonBusinessDaysFile = File.createTempFile("noNonBusinessDays", ".calendar"); FileWriter fw = new FileWriter(noNonBusinessDaysFile); fw.write("\n" + - "\n" + - "\n" + - " noNonBusinessDays\n" + - " TZ_NY\n" + - " en\n" + - " US\n" + - " \n" + - " 09:30,16:00\n" + - " \n" + - ""); + " ~ Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + + " -->\n" + + "\n" + + "\n" + + " noNonBusinessDays\n" + + " TZ_NY\n" + + " en\n" + + " US\n" + + " \n" + + " 09:30,16:00\n" + + " \n" + + ""); fw.flush(); fw.close(); @@ -40,20 +40,20 @@ public void setUp() throws Exception { final File onlyWeekendsFile = File.createTempFile("onlyWeekends", ".calendar"); fw = new FileWriter(onlyWeekendsFile); fw.write("\n" + - "\n" + - "\n" + - " onlyWeekends\n" + - " TZ_NY\n" + - " en\n" + - " US\n" + - " \n" + - " 09:30,16:00\n" + - " Saturday\n" + - " Sunday\n" + - " \n" + - ""); + " ~ Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + + " -->\n" + + "\n" + + "\n" + + " onlyWeekends\n" + + " TZ_NY\n" + + " en\n" + + " US\n" + + " \n" + + " 09:30,16:00\n" + + " Saturday\n" + + " Sunday\n" + + " \n" + + ""); fw.flush(); fw.close(); @@ -62,47 +62,46 @@ public void setUp() throws Exception { final File onlyHolidaysFile = File.createTempFile("onlyHolidays", ".calendar"); fw = new FileWriter(onlyHolidaysFile); fw.write("\n" + - "\n" + - "\n" + - " onlyHolidays\n" + - " TZ_NY\n" + - " en\n" + - " US\n" + - " \n" + - " 09:30,16:00\n" + - " \n" + - " \n" + - " 20210215\n" + - " " + - ""); + " ~ Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + + " -->\n" + + "\n" + + "\n" + + " onlyHolidays\n" + + " TZ_NY\n" + + " en\n" + + " US\n" + + " \n" + + " 09:30,16:00\n" + + " \n" + + " \n" + + " 20210215\n" + + " " + + ""); fw.flush(); fw.close(); onlyHolidays = DefaultBusinessCalendar.getInstance(onlyWeekendsFile); - final File weekendsAndHolidaysFile = - File.createTempFile("weekendsAndHolidays", ".calendar"); + final File weekendsAndHolidaysFile = File.createTempFile("weekendsAndHolidays", ".calendar"); fw = new FileWriter(weekendsAndHolidaysFile); fw.write("\n" + - "\n" + - "\n" + - " weekendsAndHolidays\n" + - " TZ_NY\n" + - " en\n" + - " US\n" + - " \n" + - " 09:30,16:00\n" + - " Saturday\n" + - " Sunday\n" + - " \n" + - " \n" + - " 20210215\n" + - " " + - ""); + " ~ Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + + " -->\n" + + "\n" + + "\n" + + " weekendsAndHolidays\n" + + " TZ_NY\n" + + " en\n" + + " US\n" + + " \n" + + " 09:30,16:00\n" + + " Saturday\n" + + " Sunday\n" + + " \n" + + " \n" + + " 20210215\n" + + " " + + ""); fw.flush(); fw.close(); @@ -154,21 +153,21 @@ public void testNonBusinessDayMethods() { // ok } - assertEquals(noNonBusinessDays.nonBusinessDaysInRange("2010-01-01", "2019-01-01"), - new String[0]); - assertEquals(noNonBusinessDays.diffNonBusinessNanos( - DBTimeUtils.convertDateTime("2010-01-01T01:00:00.000000000 NY"), - DBTimeUtils.convertDateTime("2019-01-01T01:00:00.000000000 NY")), 0); + assertEquals(noNonBusinessDays.nonBusinessDaysInRange("2010-01-01", "2019-01-01"), new String[0]); + assertEquals( + noNonBusinessDays.diffNonBusinessNanos(DBTimeUtils.convertDateTime("2010-01-01T01:00:00.000000000 NY"), + DBTimeUtils.convertDateTime("2019-01-01T01:00:00.000000000 NY")), + 0); assertEquals(noNonBusinessDays.numberOfNonBusinessDays("2010-01-01", "2019-01-01"), 0); assertEquals(noNonBusinessDays.name(), "noNonBusinessDays"); assertEquals(noNonBusinessDays.timeZone(), DBTimeZone.TZ_NY); assertEquals(noNonBusinessDays.standardBusinessDayLengthNanos(), - 6 * DBTimeUtils.HOUR + (30 * DBTimeUtils.MINUTE)); + 6 * DBTimeUtils.HOUR + (30 * DBTimeUtils.MINUTE)); assertEquals(noNonBusinessDays.getBusinessSchedule("2019-06-26").getSOBD(), - onlyWeekends.getBusinessSchedule("2019-06-26").getSOBD()); + onlyWeekends.getBusinessSchedule("2019-06-26").getSOBD()); assertEquals(noNonBusinessDays.getBusinessSchedule("2019-06-26").getEOBD(), - onlyWeekends.getBusinessSchedule("2019-06-26").getEOBD()); + onlyWeekends.getBusinessSchedule("2019-06-26").getEOBD()); } } diff --git a/DHProcess/src/main/java/io/deephaven/process/BaseboardOshi.java b/DHProcess/src/main/java/io/deephaven/process/BaseboardOshi.java index e74ab59ab23..89929aa4838 100644 --- a/DHProcess/src/main/java/io/deephaven/process/BaseboardOshi.java +++ b/DHProcess/src/main/java/io/deephaven/process/BaseboardOshi.java @@ -59,21 +59,20 @@ public final void traverse(PropertyVisitor visitor) { public static BaseboardOshi from(Baseboard baseboard) { return ImmutableBaseboardOshi.builder() - .manufacturer(baseboard.getManufacturer()) - .model(baseboard.getModel()) - .version(baseboard.getVersion()) - .serialNumber(baseboard.getSerialNumber()) - .build(); + .manufacturer(baseboard.getManufacturer()) + .model(baseboard.getModel()) + .version(baseboard.getVersion()) + .serialNumber(baseboard.getSerialNumber()) + .build(); } /* - * public static BaseboardOshi from(PropertySet properties) { return - * Parser.INSTANCE.parse(properties); } + * public static BaseboardOshi from(PropertySet properties) { return Parser.INSTANCE.parse(properties); } * * enum Parser implements PropertySetParser { INSTANCE; * - * @Override public BaseboardOshi parse(PropertySet properties) { final Visitor builder = new - * Visitor(); builder.visitProperties(properties); return builder.build(); } } + * @Override public BaseboardOshi parse(PropertySet properties) { final Visitor builder = new Visitor(); + * builder.visitProperties(properties); return builder.build(); } } * * static class Visitor implements PropertyVisitor { * @@ -83,14 +82,13 @@ public static BaseboardOshi from(Baseboard baseboard) { * * @Override public void visit(String key, String value) { switch (key) { case MANUFACTURER: * builder.manufacturer(value); break; case MODEL: builder.model(value); break; case VERSION: - * builder.version(value); break; case SERIAL_NUMBER: builder.serialNumber(value); break; - * default: Error.INSTANCE.visit(key, value); } } + * builder.version(value); break; case SERIAL_NUMBER: builder.serialNumber(value); break; default: + * Error.INSTANCE.visit(key, value); } } * * @Override public void visit(String key, int value) { Error.INSTANCE.visit(key, value); } * * @Override public void visit(String key, long value) { Error.INSTANCE.visit(key, value); } * - * @Override public void visit(String key, boolean value) { Error.INSTANCE.visit(key, value); } - * } + * @Override public void visit(String key, boolean value) { Error.INSTANCE.visit(key, value); } } */ } diff --git a/DHProcess/src/main/java/io/deephaven/process/ComputerSystemOshi.java b/DHProcess/src/main/java/io/deephaven/process/ComputerSystemOshi.java index 572c67ad376..b9a4ab2c550 100644 --- a/DHProcess/src/main/java/io/deephaven/process/ComputerSystemOshi.java +++ b/DHProcess/src/main/java/io/deephaven/process/ComputerSystemOshi.java @@ -6,8 +6,8 @@ import oshi.hardware.ComputerSystem; /** - * The ComputerSystem represents the physical hardware, of a computer system/product and includes - * BIOS/firmware and a motherboard, logic board, etc. + * The ComputerSystem represents the physical hardware, of a computer system/product and includes BIOS/firmware and a + * motherboard, logic board, etc. */ @Value.Immutable @ProcessStyle @@ -62,29 +62,26 @@ public final void traverse(PropertyVisitor visitor) { public static ComputerSystemOshi from(ComputerSystem computerSystem) { return ImmutableComputerSystemOshi.builder() - .manufacturer(computerSystem.getManufacturer()) - .model(computerSystem.getModel()) - .firmware(FirmwareOshi.from(computerSystem.getFirmware())) - .baseboard(BaseboardOshi.from(computerSystem.getBaseboard())) - .build(); + .manufacturer(computerSystem.getManufacturer()) + .model(computerSystem.getModel()) + .firmware(FirmwareOshi.from(computerSystem.getFirmware())) + .baseboard(BaseboardOshi.from(computerSystem.getBaseboard())) + .build(); } /* - * public static ComputerSystemOshi from(PropertySet properties) { return - * Parser.INSTANCE.parse(properties); } + * public static ComputerSystemOshi from(PropertySet properties) { return Parser.INSTANCE.parse(properties); } * * enum Parser implements PropertySetParser { INSTANCE; * - * @Override public ComputerSystemOshi parse(PropertySet properties) { final - * ComputerSystemOshi.Visitor collector = new ComputerSystemOshi.Visitor(); - * collector.visitProperties(properties); return collector.build(); } } + * @Override public ComputerSystemOshi parse(PropertySet properties) { final ComputerSystemOshi.Visitor collector = + * new ComputerSystemOshi.Visitor(); collector.visitProperties(properties); return collector.build(); } } * * private static class Visitor implements PropertyVisitor { * - * private final ImmutableComputerSystemOshi.Builder builder = ImmutableComputerSystemOshi - * .builder(); private final BaseboardOshi.Visitor baseboardBuilder = new - * BaseboardOshi.Visitor(); private final FirmwareOshi.Visitor firmwareBuilder = new - * FirmwareOshi.Visitor(); + * private final ImmutableComputerSystemOshi.Builder builder = ImmutableComputerSystemOshi .builder(); private final + * BaseboardOshi.Visitor baseboardBuilder = new BaseboardOshi.Visitor(); private final FirmwareOshi.Visitor + * firmwareBuilder = new FirmwareOshi.Visitor(); * * * ComputerSystemOshi build() { return builder .baseboard(baseboardBuilder.build()) @@ -94,23 +91,19 @@ public static ComputerSystemOshi from(ComputerSystem computerSystem) { * builder.manufacturer(value); return; * * case MODEL: builder.model(value); return; } if (key.startsWith(BASEBOARD)) { - * baseboardBuilder.stripPrefix(BASEBOARD).visit(key, value); return; } if - * (key.startsWith(FIRMWARE)) { firmwareBuilder.stripPrefix(FIRMWARE).visit(key, value); return; - * } Error.INSTANCE.visit(key, value); } + * baseboardBuilder.stripPrefix(BASEBOARD).visit(key, value); return; } if (key.startsWith(FIRMWARE)) { + * firmwareBuilder.stripPrefix(FIRMWARE).visit(key, value); return; } Error.INSTANCE.visit(key, value); } * * @Override public void visit(String key, int value) { if (key.startsWith(BASEBOARD)) { - * baseboardBuilder.stripPrefix(BASEBOARD).visit(key, value); return; } if - * (key.startsWith(FIRMWARE)) { firmwareBuilder.stripPrefix(FIRMWARE).visit(key, value); return; - * } Error.INSTANCE.visit(key, value); } + * baseboardBuilder.stripPrefix(BASEBOARD).visit(key, value); return; } if (key.startsWith(FIRMWARE)) { + * firmwareBuilder.stripPrefix(FIRMWARE).visit(key, value); return; } Error.INSTANCE.visit(key, value); } * * @Override public void visit(String key, long value) { if (key.startsWith(BASEBOARD)) { - * baseboardBuilder.stripPrefix(BASEBOARD).visit(key, value); return; } if - * (key.startsWith(FIRMWARE)) { firmwareBuilder.stripPrefix(FIRMWARE).visit(key, value); return; - * } Error.INSTANCE.visit(key, value); } + * baseboardBuilder.stripPrefix(BASEBOARD).visit(key, value); return; } if (key.startsWith(FIRMWARE)) { + * firmwareBuilder.stripPrefix(FIRMWARE).visit(key, value); return; } Error.INSTANCE.visit(key, value); } * * @Override public void visit(String key, boolean value) { if (key.startsWith(BASEBOARD)) { - * baseboardBuilder.stripPrefix(BASEBOARD).visit(key, value); return; } if - * (key.startsWith(FIRMWARE)) { firmwareBuilder.stripPrefix(FIRMWARE).visit(key, value); return; - * } Error.INSTANCE.visit(key, value); } } + * baseboardBuilder.stripPrefix(BASEBOARD).visit(key, value); return; } if (key.startsWith(FIRMWARE)) { + * firmwareBuilder.stripPrefix(FIRMWARE).visit(key, value); return; } Error.INSTANCE.visit(key, value); } } */ } diff --git a/DHProcess/src/main/java/io/deephaven/process/FirmwareOshi.java b/DHProcess/src/main/java/io/deephaven/process/FirmwareOshi.java index bc15210352d..f358b2dadd7 100644 --- a/DHProcess/src/main/java/io/deephaven/process/FirmwareOshi.java +++ b/DHProcess/src/main/java/io/deephaven/process/FirmwareOshi.java @@ -69,22 +69,21 @@ public final void traverse(PropertyVisitor visitor) { public static FirmwareOshi from(Firmware firmware) { return ImmutableFirmwareOshi.builder() - .manufacturer(firmware.getManufacturer()) - .name(firmware.getName()) - .description(firmware.getDescription()) - .version(firmware.getVersion()) - .releaseDate(firmware.getReleaseDate()) - .build(); + .manufacturer(firmware.getManufacturer()) + .name(firmware.getName()) + .description(firmware.getDescription()) + .version(firmware.getVersion()) + .releaseDate(firmware.getReleaseDate()) + .build(); } /* - * public static FirmwareOshi from(PropertySet properties) { return - * Parser.INSTANCE.parse(properties); } + * public static FirmwareOshi from(PropertySet properties) { return Parser.INSTANCE.parse(properties); } * * enum Parser implements PropertySetParser { INSTANCE; * - * @Override public FirmwareOshi parse(PropertySet properties) { final Visitor parser = new - * Visitor(); parser.visitProperties(properties); return parser.build(); } } + * @Override public FirmwareOshi parse(PropertySet properties) { final Visitor parser = new Visitor(); + * parser.visitProperties(properties); return parser.build(); } } * * static class Visitor implements PropertyVisitor { * @@ -94,15 +93,13 @@ public static FirmwareOshi from(Firmware firmware) { * * @Override public void visit(String key, String value) { switch (key) { case MANUFACTURER: * builder.manufacturer(value); break; case NAME: builder.name(value); break; case DESCRIPTION: - * builder.description(value); break; case VERSION: builder.version(value); break; case - * RELEASE_DATE: builder.releaseDate(value); break; default: Error.INSTANCE.visit(key, value); } - * } + * builder.description(value); break; case VERSION: builder.version(value); break; case RELEASE_DATE: + * builder.releaseDate(value); break; default: Error.INSTANCE.visit(key, value); } } * * @Override public void visit(String key, int value) { Error.INSTANCE.visit(key, value); } * * @Override public void visit(String key, long value) { Error.INSTANCE.visit(key, value); } * - * @Override public void visit(String key, boolean value) { Error.INSTANCE.visit(key, value); } - * } + * @Override public void visit(String key, boolean value) { Error.INSTANCE.visit(key, value); } } */ } diff --git a/DHProcess/src/main/java/io/deephaven/process/MemoryMxBeanInfo.java b/DHProcess/src/main/java/io/deephaven/process/MemoryMxBeanInfo.java index 8c7b2b1c461..e5e4d00c702 100644 --- a/DHProcess/src/main/java/io/deephaven/process/MemoryMxBeanInfo.java +++ b/DHProcess/src/main/java/io/deephaven/process/MemoryMxBeanInfo.java @@ -14,9 +14,9 @@ public abstract class MemoryMxBeanInfo implements PropertySet { public static MemoryMxBeanInfo of(MemoryMXBean bean) { return ImmutableMemoryMxBeanInfo.builder() - .heap(MemoryUsageInfo.of(bean.getHeapMemoryUsage())) - .nonHeap(MemoryUsageInfo.of(bean.getNonHeapMemoryUsage())) - .build(); + .heap(MemoryUsageInfo.of(bean.getHeapMemoryUsage())) + .nonHeap(MemoryUsageInfo.of(bean.getNonHeapMemoryUsage())) + .build(); } @Value.Parameter diff --git a/DHProcess/src/main/java/io/deephaven/process/NetworkOshi.java b/DHProcess/src/main/java/io/deephaven/process/NetworkOshi.java index 68a22ba113c..43d1d89757c 100644 --- a/DHProcess/src/main/java/io/deephaven/process/NetworkOshi.java +++ b/DHProcess/src/main/java/io/deephaven/process/NetworkOshi.java @@ -81,14 +81,13 @@ public final void traverse(PropertyVisitor visitor) { public static NetworkOshi from(NetworkParams network) { return ImmutableNetworkOshi.builder() - .hostName(network.getHostName()) - .domainName(network.getDomainName().isEmpty() ? Optional.empty() - : Optional.of(network.getDomainName())) - .dnsServers(DnsServers.of(Arrays.asList(network.getDnsServers()))) - .ipv4DefaultGateway(network.getIpv4DefaultGateway().isEmpty() ? Optional.empty() - : Optional.of(network.getIpv4DefaultGateway())) - .ipv6DefaultGateway(network.getIpv6DefaultGateway().isEmpty() ? Optional.empty() - : Optional.of(network.getIpv6DefaultGateway())) - .build(); + .hostName(network.getHostName()) + .domainName(network.getDomainName().isEmpty() ? Optional.empty() : Optional.of(network.getDomainName())) + .dnsServers(DnsServers.of(Arrays.asList(network.getDnsServers()))) + .ipv4DefaultGateway(network.getIpv4DefaultGateway().isEmpty() ? Optional.empty() + : Optional.of(network.getIpv4DefaultGateway())) + .ipv6DefaultGateway(network.getIpv6DefaultGateway().isEmpty() ? Optional.empty() + : Optional.of(network.getIpv6DefaultGateway())) + .build(); } } diff --git a/DHProcess/src/main/java/io/deephaven/process/OperatingSystemOshi.java b/DHProcess/src/main/java/io/deephaven/process/OperatingSystemOshi.java index 81d1c15f30e..880c7248c18 100644 --- a/DHProcess/src/main/java/io/deephaven/process/OperatingSystemOshi.java +++ b/DHProcess/src/main/java/io/deephaven/process/OperatingSystemOshi.java @@ -41,11 +41,11 @@ public final void traverse(PropertyVisitor visitor) { public static OperatingSystemOshi from(OperatingSystem os) { return ImmutableOperatingSystemOshi.builder() - .family(os.getFamily()) - .manufacturer(os.getManufacturer()) - .version(OperatingSystemVersionOshi.from(os.getVersionInfo())) - .network(NetworkOshi.from(os.getNetworkParams())) - .pid(os.getProcessId()) - .build(); + .family(os.getFamily()) + .manufacturer(os.getManufacturer()) + .version(OperatingSystemVersionOshi.from(os.getVersionInfo())) + .network(NetworkOshi.from(os.getNetworkParams())) + .pid(os.getProcessId()) + .build(); } } diff --git a/DHProcess/src/main/java/io/deephaven/process/OperatingSystemVersionOshi.java b/DHProcess/src/main/java/io/deephaven/process/OperatingSystemVersionOshi.java index 2ee023ffe36..6513e0b7266 100644 --- a/DHProcess/src/main/java/io/deephaven/process/OperatingSystemVersionOshi.java +++ b/DHProcess/src/main/java/io/deephaven/process/OperatingSystemVersionOshi.java @@ -47,9 +47,9 @@ public final void traverse(PropertyVisitor visitor) { public static OperatingSystemVersionOshi from(OSVersionInfo info) { return ImmutableOperatingSystemVersionOshi.builder() - .version(Optional.ofNullable(info.getVersion())) - .codeName(Optional.ofNullable(info.getCodeName())) - .buildNumber(Optional.ofNullable(info.getBuildNumber())) - .build(); + .version(Optional.ofNullable(info.getVersion())) + .codeName(Optional.ofNullable(info.getCodeName())) + .buildNumber(Optional.ofNullable(info.getBuildNumber())) + .build(); } } diff --git a/DHProcess/src/main/java/io/deephaven/process/ProcessStyle.java b/DHProcess/src/main/java/io/deephaven/process/ProcessStyle.java index bcc7a3853c2..3652e9a01f8 100644 --- a/DHProcess/src/main/java/io/deephaven/process/ProcessStyle.java +++ b/DHProcess/src/main/java/io/deephaven/process/ProcessStyle.java @@ -4,8 +4,8 @@ import org.immutables.value.Value.Style.ImplementationVisibility; @Value.Style( - visibility = ImplementationVisibility.PACKAGE, - defaults = @Value.Immutable(copy = false), - strictBuilder = true) + visibility = ImplementationVisibility.PACKAGE, + defaults = @Value.Immutable(copy = false), + strictBuilder = true) public @interface ProcessStyle { } diff --git a/DHProcess/src/main/java/io/deephaven/process/RuntimeMxBeanInfo.java b/DHProcess/src/main/java/io/deephaven/process/RuntimeMxBeanInfo.java index 49badc854a2..40ef04c5153 100644 --- a/DHProcess/src/main/java/io/deephaven/process/RuntimeMxBeanInfo.java +++ b/DHProcess/src/main/java/io/deephaven/process/RuntimeMxBeanInfo.java @@ -17,12 +17,12 @@ public abstract class RuntimeMxBeanInfo implements PropertySet { public static RuntimeMxBeanInfo of(RuntimeMXBean bean) { return ImmutableRuntimeMxBeanInfo.builder() - .systemProperties(_SystemProperties.of(bean)) - .jvmArguments(_JvmArguments.of(bean)) - .managementSpecVersion(bean.getManagementSpecVersion()) - .isBootClassPathSupported(bean.isBootClassPathSupported()) - .startTime(bean.getStartTime()) - .build(); + .systemProperties(_SystemProperties.of(bean)) + .jvmArguments(_JvmArguments.of(bean)) + .managementSpecVersion(bean.getManagementSpecVersion()) + .isBootClassPathSupported(bean.isBootClassPathSupported()) + .startTime(bean.getStartTime()) + .build(); } @Value.Parameter diff --git a/DHProcess/src/main/java/io/deephaven/process/SystemCpuOshi.java b/DHProcess/src/main/java/io/deephaven/process/SystemCpuOshi.java index 935e7d56d36..1fca1134134 100644 --- a/DHProcess/src/main/java/io/deephaven/process/SystemCpuOshi.java +++ b/DHProcess/src/main/java/io/deephaven/process/SystemCpuOshi.java @@ -8,9 +8,8 @@ import oshi.hardware.CentralProcessor.ProcessorIdentifier; /** - * The Central Processing Unit (CPU) or the processor is the portion of a computer system that - * carries out the instructions of a computer program, and is the primary element carrying out the - * computer's functions. + * The Central Processing Unit (CPU) or the processor is the portion of a computer system that carries out the + * instructions of a computer program, and is the primary element carrying out the computer's functions. */ @Value.Immutable @ProcessStyle @@ -45,8 +44,8 @@ public abstract class SystemCpuOshi implements PropertySet { public abstract String getName(); /** - * Vendor frequency (in Hz), eg. for processor named Intel(R) Core(TM)2 Duo CPU T7300 @ 2.00GHz - * the vendor frequency is 2000000000. + * Vendor frequency (in Hz), eg. for processor named Intel(R) Core(TM)2 Duo CPU T7300 @ 2.00GHz the vendor frequency + * is 2000000000. * * @return Processor frequency, if known */ @@ -54,14 +53,13 @@ public abstract class SystemCpuOshi implements PropertySet { public abstract OptionalLong getVendorFreq(); /** - * Gets the Processor ID. This is a hexidecimal string representing an 8-byte value, normally - * obtained using the CPUID opcode with the EAX register set to 1. The first four bytes are the - * resulting contents of the EAX register, which is the Processor signature, represented in - * human-readable form by {@link #getIdentifier()} . The remaining four bytes are the contents - * of the EDX register, containing feature flags. + * Gets the Processor ID. This is a hexidecimal string representing an 8-byte value, normally obtained using the + * CPUID opcode with the EAX register set to 1. The first four bytes are the resulting contents of the EAX register, + * which is the Processor signature, represented in human-readable form by {@link #getIdentifier()} . The remaining + * four bytes are the contents of the EDX register, containing feature flags. * - * NOTE: The order of returned bytes is platform and software dependent. Values may be in either - * Big Endian or Little Endian order. + * NOTE: The order of returned bytes is platform and software dependent. Values may be in either Big Endian or + * Little Endian order. * * @return A string representing the Processor ID */ @@ -87,8 +85,8 @@ public abstract class SystemCpuOshi implements PropertySet { public abstract String getFamily(); /** - * Get the number of logical CPUs available for processing. This value may be higher than - * physical CPUs if hyperthreading is enabled. + * Get the number of logical CPUs available for processing. This value may be higher than physical CPUs if + * hyperthreading is enabled. * * @return The number of logical CPUs available. */ @@ -104,8 +102,7 @@ public abstract class SystemCpuOshi implements PropertySet { public abstract int getPhysicalProcessorCount(); /** - * Get the number of packages/sockets in the system. A single package may contain multiple - * cores. + * Get the number of packages/sockets in the system. A single package may contain multiple cores. * * @return The number of physical packages available. */ @@ -138,18 +135,18 @@ public final void traverse(PropertyVisitor visitor) { public static SystemCpuOshi from(CentralProcessor centralProcessor) { final ProcessorIdentifier identifier = centralProcessor.getProcessorIdentifier(); return ImmutableSystemCpuOshi.builder() - .vendor(identifier.getVendor()) - .name(identifier.getName()) - .processorID(identifier.getProcessorID()) - .stepping(identifier.getStepping()) - .model(identifier.getModel()) - .family(identifier.getFamily()) - .logicalProcessorCount(centralProcessor.getLogicalProcessorCount()) - .physicalProcessorCount(centralProcessor.getPhysicalProcessorCount()) - .physicalPackageCount(centralProcessor.getPhysicalPackageCount()) - .is64bit(identifier.isCpu64bit()) - .vendorFreq(identifier.getVendorFreq() == -1 ? OptionalLong.empty() - : OptionalLong.of(identifier.getVendorFreq())) - .build(); + .vendor(identifier.getVendor()) + .name(identifier.getName()) + .processorID(identifier.getProcessorID()) + .stepping(identifier.getStepping()) + .model(identifier.getModel()) + .family(identifier.getFamily()) + .logicalProcessorCount(centralProcessor.getLogicalProcessorCount()) + .physicalProcessorCount(centralProcessor.getPhysicalProcessorCount()) + .physicalPackageCount(centralProcessor.getPhysicalPackageCount()) + .is64bit(identifier.isCpu64bit()) + .vendorFreq(identifier.getVendorFreq() == -1 ? OptionalLong.empty() + : OptionalLong.of(identifier.getVendorFreq())) + .build(); } } diff --git a/DHProcess/src/main/java/io/deephaven/process/SystemInfoOshi.java b/DHProcess/src/main/java/io/deephaven/process/SystemInfoOshi.java index 70a0b341041..761d660effc 100644 --- a/DHProcess/src/main/java/io/deephaven/process/SystemInfoOshi.java +++ b/DHProcess/src/main/java/io/deephaven/process/SystemInfoOshi.java @@ -37,10 +37,10 @@ public void traverse(PropertyVisitor visitor) { public static SystemInfoOshi forCurrentProcess() { final SystemInfo info = new SystemInfo(); return ImmutableSystemInfoOshi.builder() - .operatingSystem(OperatingSystemOshi.from(info.getOperatingSystem())) - .computerSystem(ComputerSystemOshi.from(info.getHardware().getComputerSystem())) - .systemMemory(SystemMemoryOshi.from(info.getHardware().getMemory())) - .systemCpu(SystemCpuOshi.from(info.getHardware().getProcessor())) - .build(); + .operatingSystem(OperatingSystemOshi.from(info.getOperatingSystem())) + .computerSystem(ComputerSystemOshi.from(info.getHardware().getComputerSystem())) + .systemMemory(SystemMemoryOshi.from(info.getHardware().getMemory())) + .systemCpu(SystemCpuOshi.from(info.getHardware().getProcessor())) + .build(); } } diff --git a/DHProcess/src/main/java/io/deephaven/process/SystemMemoryOshi.java b/DHProcess/src/main/java/io/deephaven/process/SystemMemoryOshi.java index 91617c4e63e..d60e5f07c6d 100644 --- a/DHProcess/src/main/java/io/deephaven/process/SystemMemoryOshi.java +++ b/DHProcess/src/main/java/io/deephaven/process/SystemMemoryOshi.java @@ -22,8 +22,8 @@ public abstract class SystemMemoryOshi implements PropertySet { public abstract long getPhysicalTotal(); /** - * The current size of the paging/swap file(s), in bytes. If the paging/swap file can be - * extended, this is a soft limit. + * The current size of the paging/swap file(s), in bytes. If the paging/swap file can be extended, this is a soft + * limit. * * @return Total swap in bytes. */ @@ -47,9 +47,9 @@ public final void traverse(PropertyVisitor visitor) { public static SystemMemoryOshi from(GlobalMemory memory) { return ImmutableSystemMemoryOshi.builder() - .physicalTotal(memory.getTotal()) - .swapTotal(memory.getVirtualMemory().getSwapTotal()) - .pageSize(memory.getPageSize()) - .build(); + .physicalTotal(memory.getTotal()) + .swapTotal(memory.getVirtualMemory().getSwapTotal()) + .pageSize(memory.getPageSize()) + .build(); } } diff --git a/DHProcess/src/main/java/io/deephaven/process/ThreadMxBeanInfo.java b/DHProcess/src/main/java/io/deephaven/process/ThreadMxBeanInfo.java index ebe33afe16a..3bdc0eefc8d 100644 --- a/DHProcess/src/main/java/io/deephaven/process/ThreadMxBeanInfo.java +++ b/DHProcess/src/main/java/io/deephaven/process/ThreadMxBeanInfo.java @@ -9,22 +9,20 @@ @ProcessStyle public abstract class ThreadMxBeanInfo implements PropertySet { - private static final String CURRENT_THREAD_CPU_TIME_SUPPORTED = - "current-thread-cpu-time-supported"; + private static final String CURRENT_THREAD_CPU_TIME_SUPPORTED = "current-thread-cpu-time-supported"; private static final String OBJECT_MONITOR_USAGE_SUPPORTED = "object-monitor-usage-supported"; private static final String SYNCHRONIZER_USAGE_SUPPORTED = "synchronizer-usage-supported"; - private static final String THREAD_CONTENTION_MONITORING_SUPPORTED = - "thread-contention-monitoring-supported"; + private static final String THREAD_CONTENTION_MONITORING_SUPPORTED = "thread-contention-monitoring-supported"; private static final String THREAD_CPU_TIME_SUPPORTED = "thread-cpu-time-supported"; public static ThreadMxBeanInfo of(ThreadMXBean bean) { return ImmutableThreadMxBeanInfo.builder() - .isCurrentThreadCpuTimeSupported(bean.isCurrentThreadCpuTimeSupported()) - .isObjectMonitorUsageSupported(bean.isObjectMonitorUsageSupported()) - .isSynchronizerUsageSupported(bean.isSynchronizerUsageSupported()) - .isThreadContentionMonitoringSupported(bean.isThreadContentionMonitoringSupported()) - .isThreadCpuTimeSupported(bean.isThreadCpuTimeSupported()) - .build(); + .isCurrentThreadCpuTimeSupported(bean.isCurrentThreadCpuTimeSupported()) + .isObjectMonitorUsageSupported(bean.isObjectMonitorUsageSupported()) + .isSynchronizerUsageSupported(bean.isSynchronizerUsageSupported()) + .isThreadContentionMonitoringSupported(bean.isThreadContentionMonitoringSupported()) + .isThreadCpuTimeSupported(bean.isThreadCpuTimeSupported()) + .build(); } @Value.Parameter @@ -47,8 +45,7 @@ public final void traverse(PropertyVisitor visitor) { visitor.visit(CURRENT_THREAD_CPU_TIME_SUPPORTED, isCurrentThreadCpuTimeSupported()); visitor.visit(OBJECT_MONITOR_USAGE_SUPPORTED, isObjectMonitorUsageSupported()); visitor.visit(SYNCHRONIZER_USAGE_SUPPORTED, isSynchronizerUsageSupported()); - visitor.visit(THREAD_CONTENTION_MONITORING_SUPPORTED, - isThreadContentionMonitoringSupported()); + visitor.visit(THREAD_CONTENTION_MONITORING_SUPPORTED, isThreadContentionMonitoringSupported()); visitor.visit(THREAD_CPU_TIME_SUPPORTED, isThreadCpuTimeSupported()); } } diff --git a/DHProcess/src/main/java/io/deephaven/process/Wrapped.java b/DHProcess/src/main/java/io/deephaven/process/Wrapped.java index 2c4281fab6d..81dcbacbedb 100644 --- a/DHProcess/src/main/java/io/deephaven/process/Wrapped.java +++ b/DHProcess/src/main/java/io/deephaven/process/Wrapped.java @@ -7,12 +7,12 @@ // or on package/top-level class // This is just an example, adapt to your taste however you like @Value.Style( - // Detect names starting with underscore - typeAbstract = "_*", - // Generate without any suffix, just raw detected name - typeImmutable = "*", - // Make generated public, leave underscored as package private - visibility = ImplementationVisibility.PUBLIC, - defaults = @Value.Immutable(copy = false, builder = false)) + // Detect names starting with underscore + typeAbstract = "_*", + // Generate without any suffix, just raw detected name + typeImmutable = "*", + // Make generated public, leave underscored as package private + visibility = ImplementationVisibility.PUBLIC, + defaults = @Value.Immutable(copy = false, builder = false)) @interface Wrapped { } diff --git a/DHProcess/src/main/java/io/deephaven/process/_HostPathInfo.java b/DHProcess/src/main/java/io/deephaven/process/_HostPathInfo.java index b2547bd2102..42f360a7d47 100644 --- a/DHProcess/src/main/java/io/deephaven/process/_HostPathInfo.java +++ b/DHProcess/src/main/java/io/deephaven/process/_HostPathInfo.java @@ -7,9 +7,8 @@ /** * Represents a free-form {@link io.deephaven.properties.PropertySet} that is parsed via - * {@link SplayedPath#toStringMap()} for inclusion at {@link ProcessInfo#getHostPathInfo()}. This - * allows for a variety of use-cases where information can be attached to a host at install, - * upgrade, testing, or other time. + * {@link SplayedPath#toStringMap()} for inclusion at {@link ProcessInfo#getHostPathInfo()}. This allows for a variety + * of use-cases where information can be attached to a host at install, upgrade, testing, or other time. */ @Value.Immutable @Wrapped @@ -17,6 +16,6 @@ abstract class _HostPathInfo extends StringMapWrapper { static HostPathInfo of(SplayedPath splayedPath) throws IOException { return splayedPath.exists() ? HostPathInfo.of(splayedPath.toStringMap()) - : HostPathInfo.of(Collections.emptyMap()); + : HostPathInfo.of(Collections.emptyMap()); } } diff --git a/DHProcess/src/main/java/io/deephaven/properties/PropertySet.java b/DHProcess/src/main/java/io/deephaven/properties/PropertySet.java index 7a886f30284..c6687e77140 100644 --- a/DHProcess/src/main/java/io/deephaven/properties/PropertySet.java +++ b/DHProcess/src/main/java/io/deephaven/properties/PropertySet.java @@ -7,13 +7,12 @@ import java.util.Properties; /** - * A property set represents a set of property keys and values. The key is a String type, and the - * value can be of type int, long, boolean, or String. (Note: we may use a stronger type in the - * future for a key, and may expand the types that a value can be.) + * A property set represents a set of property keys and values. The key is a String type, and the value can be of type + * int, long, boolean, or String. (Note: we may use a stronger type in the future for a key, and may expand the types + * that a value can be.) * *

    - * A property set interface present read-only access to the keys and values via - * {@link #traverse(PropertyVisitor)}. + * A property set interface present read-only access to the keys and values via {@link #traverse(PropertyVisitor)}. * * @see PropertyVisitor */ @@ -33,9 +32,9 @@ static PropertySet of(List list) { /** * Traverse this property set and output the property key/values to the given visitor. *

    - * Callers should typically prefer to call {@link PropertyVisitor#visitProperties(PropertySet)}, - * as the inversion of logic allows the visitor (the more stateful object) to potentially - * perform initialization logic and traverse more efficiently. + * Callers should typically prefer to call {@link PropertyVisitor#visitProperties(PropertySet)}, as the inversion of + * logic allows the visitor (the more stateful object) to potentially perform initialization logic and traverse more + * efficiently. * * @param visitor the visitor * @see PropertyVisitor#visitProperties(PropertySet) diff --git a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitor.java b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitor.java index c35d3180aac..666ded81ca1 100644 --- a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitor.java +++ b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitor.java @@ -12,8 +12,7 @@ // todo: should we implement this interface w/ KeyPath or KeyPath-like keys instead? /** - * A property visitor is the generic interface for reading property keys and values from a - * {@link PropertySet}. + * A property visitor is the generic interface for reading property keys and values from a {@link PropertySet}. * * @see PropertySet */ @@ -75,9 +74,8 @@ static Map toStringMap(PropertySet properties) { void visit(String key, boolean value); /** - * By default, is equivalent to {@code properties.traverse(this)}. Implementations may choose to - * override this method, provided the property set is traversed, and this visitor receives all - * of the updates. + * By default, is equivalent to {@code properties.traverse(this)}. Implementations may choose to override this + * method, provided the property set is traversed, and this visitor receives all of the updates. * * @param properties the property set */ @@ -86,66 +84,63 @@ default void visitProperties(PropertySet properties) { } /** - * A helper method that recursively builds up the keys based on the provided key, and the keys - * of the property set. The majority of implementations should not override this. + * A helper method that recursively builds up the keys based on the provided key, and the keys of the property set. + * The majority of implementations should not override this. * * @param key the key * @param properties the property set */ default void visitProperties(String key, PropertySet properties) { new PropertyVisitorPrefixed(key + SEPARATOR, this) - .visitProperties(properties); + .visitProperties(properties); } // note: the following helper methods exhibit poor coding from a traditional sense - but it // makes traverse implementations much cleaner. /** - * A helper method that makes {@link PropertySet#traverse(PropertyVisitor)} implementations - * cleaner. Equivalent to {@code value.ifPresent(x -> visit(key, x))}. Must not be overridden. + * A helper method that makes {@link PropertySet#traverse(PropertyVisitor)} implementations cleaner. Equivalent to + * {@code value.ifPresent(x -> visit(key, x))}. Must not be overridden. * * @param key the key * @param value the optional value */ default void maybeVisit(String key, - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") Optional value) { + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") Optional value) { value.ifPresent(x -> visit(key, x)); } /** - * A helper method that makes {@link PropertySet#traverse(PropertyVisitor)} implementations - * cleaner. Equivalent to {@code value.ifPresent(x -> visit(key, x))}. Must not be overridden. + * A helper method that makes {@link PropertySet#traverse(PropertyVisitor)} implementations cleaner. Equivalent to + * {@code value.ifPresent(x -> visit(key, x))}. Must not be overridden. * * @param key the key * @param value the optional value */ - default void maybeVisit(String key, - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") OptionalInt value) { + default void maybeVisit(String key, @SuppressWarnings("OptionalUsedAsFieldOrParameterType") OptionalInt value) { value.ifPresent(x -> visit(key, x)); } /** - * A helper method that makes {@link PropertySet#traverse(PropertyVisitor)} implementations - * cleaner. Equivalent to {@code value.ifPresent(x -> visit(key, x))}. Must not be overridden. + * A helper method that makes {@link PropertySet#traverse(PropertyVisitor)} implementations cleaner. Equivalent to + * {@code value.ifPresent(x -> visit(key, x))}. Must not be overridden. * * @param key the key * @param value the optional value */ - default void maybeVisit(String key, - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") OptionalLong value) { + default void maybeVisit(String key, @SuppressWarnings("OptionalUsedAsFieldOrParameterType") OptionalLong value) { value.ifPresent(x -> visit(key, x)); } /** - * A helper method that makes {@link PropertySet#traverse(PropertyVisitor)} implementations - * cleaner. Equivalent to {@code properties.ifPresent(x -> visitProperties(key, x))}. Must not - * be overridden. + * A helper method that makes {@link PropertySet#traverse(PropertyVisitor)} implementations cleaner. Equivalent to + * {@code properties.ifPresent(x -> visitProperties(key, x))}. Must not be overridden. * * @param key the key * @param properties the optional value */ default void maybeVisitProperties(String key, - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") Optional properties) { + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") Optional properties) { properties.ifPresent(x -> visitProperties(key, x)); } } diff --git a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorError.java b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorError.java index d9d70c98e61..3a524d0ea5f 100644 --- a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorError.java +++ b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorError.java @@ -1,9 +1,9 @@ package io.deephaven.properties; /** - * A {@link PropertyVisitor} whose methods all throw {@link IllegalStateException} with the relevant - * key and value context. Meant to be a utility class that other {@link PropertyVisitor}s can use, - * for example in the case of parsing an unknown key. + * A {@link PropertyVisitor} whose methods all throw {@link IllegalStateException} with the relevant key and value + * context. Meant to be a utility class that other {@link PropertyVisitor}s can use, for example in the case of parsing + * an unknown key. */ public enum PropertyVisitorError implements PropertyVisitor { INSTANCE; @@ -14,28 +14,23 @@ public enum PropertyVisitorError implements PropertyVisitor { public void visit(String key, String value) { if (value.length() > MAX_UNTRUNCATED_STRING_LENGTH) { throw new IllegalStateException( - String.format("Unexpected key/string-value: %s='%s...' (truncated)", key, - value.substring(0, 128))); + String.format("Unexpected key/string-value: %s='%s...' (truncated)", key, value.substring(0, 128))); } - throw new IllegalStateException( - String.format("Unexpected key/string-value: %s='%s'", key, value)); + throw new IllegalStateException(String.format("Unexpected key/string-value: %s='%s'", key, value)); } @Override public void visit(String key, int value) { - throw new IllegalStateException( - String.format("Unexpected key/int-value: %s=%d", key, value)); + throw new IllegalStateException(String.format("Unexpected key/int-value: %s=%d", key, value)); } @Override public void visit(String key, long value) { - throw new IllegalStateException( - String.format("Unexpected key/long-value: %s=%dL", key, value)); + throw new IllegalStateException(String.format("Unexpected key/long-value: %s=%dL", key, value)); } @Override public void visit(String key, boolean value) { - throw new IllegalStateException( - String.format("Unexpected key/boolean-value: %s=%b", key, value)); + throw new IllegalStateException(String.format("Unexpected key/boolean-value: %s=%b", key, value)); } } diff --git a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorPrefixed.java b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorPrefixed.java index 235cf3644b1..48a275362f0 100644 --- a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorPrefixed.java +++ b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorPrefixed.java @@ -41,6 +41,6 @@ public void visit(String key, boolean value) { @Override public void visitProperties(String key, PropertySet properties) { new PropertyVisitorPrefixed(prefix + key + SEPARATOR, delegate) - .visitProperties(properties); + .visitProperties(properties); } } diff --git a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorStringBase.java b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorStringBase.java index 3be3f334305..6d05cdec4c2 100644 --- a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorStringBase.java +++ b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorStringBase.java @@ -4,8 +4,7 @@ import java.util.function.BiConsumer; /** - * An {@link PropertyVisitor} base which converts the non-String valued calls into - * {@link #visit(String, String)}. + * An {@link PropertyVisitor} base which converts the non-String valued calls into {@link #visit(String, String)}. */ public abstract class PropertyVisitorStringBase implements PropertyVisitor { @@ -43,8 +42,7 @@ public final void visit(String key, boolean value) { } /** - * Adapts a String {@link BiConsumer} into a {@link PropertyVisitor} via - * {@link PropertyVisitorStringBase}. + * Adapts a String {@link BiConsumer} into a {@link PropertyVisitor} via {@link PropertyVisitorStringBase}. */ public static class BiConsumerStringImpl extends PropertyVisitorStringBase { private final BiConsumer consumer; diff --git a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorStripPrefix.java b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorStripPrefix.java index 489557063ed..9e784350a9e 100644 --- a/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorStripPrefix.java +++ b/DHProcess/src/main/java/io/deephaven/properties/PropertyVisitorStripPrefix.java @@ -3,12 +3,12 @@ import java.util.Objects; /** - * A property visitor which expects and strips a given prefix from all keys, and then delegates to - * the given {@link PropertyVisitor}. + * A property visitor which expects and strips a given prefix from all keys, and then delegates to the given + * {@link PropertyVisitor}. * - * For example, if the delegate knows how to parse the property keys {@code foo} and {@code bar}, - * but the properties are currently in a different context {@code parent.foo} and - * {@code parent.bar}, a {@link PropertyVisitorStripPrefix} can be provide with prefix "parent.". + * For example, if the delegate knows how to parse the property keys {@code foo} and {@code bar}, but the properties are + * currently in a different context {@code parent.foo} and {@code parent.bar}, a {@link PropertyVisitorStripPrefix} can + * be provide with prefix "parent.". */ class PropertyVisitorStripPrefix implements PropertyVisitor { private final String prefix; @@ -24,8 +24,7 @@ class PropertyVisitorStripPrefix implements PropertyVisitor { private String strip(String key) { if (!key.startsWith(prefix)) { - throw new IllegalArgumentException( - String.format("Key '%s' does not start with prefix '%s'", key, prefix)); + throw new IllegalArgumentException(String.format("Key '%s' does not start with prefix '%s'", key, prefix)); } return key.substring(prefix.length()); } diff --git a/DHProcess/src/main/java/io/deephaven/properties/SplayedPath.java b/DHProcess/src/main/java/io/deephaven/properties/SplayedPath.java index a70535ff615..1b3864fa19e 100644 --- a/DHProcess/src/main/java/io/deephaven/properties/SplayedPath.java +++ b/DHProcess/src/main/java/io/deephaven/properties/SplayedPath.java @@ -19,17 +19,15 @@ import java.util.stream.Stream; /** - * A splayed path is a {@link PropertySet} where the fully resolved file paths represent property - * keys and the corresponding contents of each file represents the property value. It is meant to - * represent a standardized interface for property keys and values that can be read or written to - * from a variety of tools. It is straightforward in the sense that there is no order-dependent - * parsing logic. + * A splayed path is a {@link PropertySet} where the fully resolved file paths represent property keys and the + * corresponding contents of each file represents the property value. It is meant to represent a standardized interface + * for property keys and values that can be read or written to from a variety of tools. It is straightforward in the + * sense that there is no order-dependent parsing logic. */ public class SplayedPath { private static final char FS_SEPARATOR = '/'; - private static final Pattern SEPARATOR_PATTERN = - Pattern.compile(Character.toString(SEPARATOR), Pattern.LITERAL); + private static final Pattern SEPARATOR_PATTERN = Pattern.compile(Character.toString(SEPARATOR), Pattern.LITERAL); private static final String VALUE_NAME = "__value"; private static final Path VALUE_PATH = Paths.get(VALUE_NAME); @@ -38,20 +36,18 @@ public class SplayedPath { private final boolean trim; /** - * Property keys are essentially "flat" keys, and that doesn't mesh well with a filesystem - * directory/file structure when one key is a prefix of another key. For example, the JVM might - * have the system properties {@code file.encoding} and {@code file.encoding.pkg}. We can't have - * both {@code /file/encoding} and {@code /file/encoding/pkg} as files. To work - * around this, we can append a specific filename to the filesystem paths as such: - * {@code /file/encoding/__value} and {@code /file/encoding/pkg/__value}. + * Property keys are essentially "flat" keys, and that doesn't mesh well with a filesystem directory/file structure + * when one key is a prefix of another key. For example, the JVM might have the system properties + * {@code file.encoding} and {@code file.encoding.pkg}. We can't have both {@code /file/encoding} and + * {@code /file/encoding/pkg} as files. To work around this, we can append a specific filename to the + * filesystem paths as such: {@code /file/encoding/__value} and {@code /file/encoding/pkg/__value}. * *

    - * If writing/reading unrestricted properties (such as system properties), a value based - * approach should be taken. + * If writing/reading unrestricted properties (such as system properties), a value based approach should be taken. * *

    - * If writing/reading restricted properties (ie, if we place a no-prefix restriction on - * application properties), a non-value based approach can be taken. + * If writing/reading restricted properties (ie, if we place a no-prefix restriction on application properties), a + * non-value based approach can be taken. */ private final boolean isValueBased; @@ -61,8 +57,8 @@ public SplayedPath(Path path, boolean trim, boolean isValueBased) { this.path = Objects.requireNonNull(path); if (!path.getFileSystem().equals(FileSystems.getDefault())) { throw new UnsupportedOperationException(String.format( - "Expected path to be a default filesystem path. Instead is: %s", - path.getFileSystem())); + "Expected path to be a default filesystem path. Instead is: %s", + path.getFileSystem())); } this.trim = trim; this.isValueBased = isValueBased; @@ -77,8 +73,7 @@ public boolean exists() { private void check() throws IOException { if (!exists()) { - throw new IOException( - String.format("Path does not exist, or is not a directory: '%s'", path)); + throw new IOException(String.format("Path does not exist, or is not a directory: '%s'", path)); } } @@ -93,8 +88,7 @@ public void write(PropertySet properties) throws IOException { // If we need more fine-grained write access in the future, we may want to expose this. /** - * A {@link PropertyVisitor} writer to the splayed path. The visitor may throw - * {@link UncheckedIOException}s. + * A {@link PropertyVisitor} writer to the splayed path. The visitor may throw {@link UncheckedIOException}s. * *

    * Prefer {@link #write(PropertySet)} if applicable. @@ -145,7 +139,7 @@ public void readTo(PropertyVisitor visitor) throws IOException { continue; } final BasicFileAttributes attributes = Files - .readAttributes(key, BasicFileAttributes.class); + .readAttributes(key, BasicFileAttributes.class); if (!attributes.isRegularFile()) { continue; } @@ -160,8 +154,7 @@ public void readTo(PropertyVisitor visitor) throws IOException { // If we need more fine-grained read access in the future, we may want to expose this. /** - * A {@link PropertySet} reader of the splayed path. The property set may throw a - * {@link UncheckedIOException}s. + * A {@link PropertySet} reader of the splayed path. The property set may throw a {@link UncheckedIOException}s. * *

    * Prefer {@link #readTo(PropertyVisitor)} if applicable. @@ -169,8 +162,8 @@ public void readTo(PropertyVisitor visitor) throws IOException { * @return the property set */ /* - * public PropertySet asUnsafePropertySet() { return visitor -> { try { readTo(visitor); } catch - * (IOException e) { throw new UncheckedIOException(e); } }; } + * public PropertySet asUnsafePropertySet() { return visitor -> { try { readTo(visitor); } catch (IOException e) { + * throw new UncheckedIOException(e); } }; } */ // -------------------------------------------------------------------------------------------- @@ -180,22 +173,21 @@ private byte[] toBytes(String value) { } private String toString(byte[] bytes) { - return trim ? new String(bytes, StandardCharsets.UTF_8).trim() - : new String(bytes, StandardCharsets.UTF_8); + return trim ? new String(bytes, StandardCharsets.UTF_8).trim() : new String(bytes, StandardCharsets.UTF_8); } private String pathToString(Path key) { if (isValueBased) { if (!key.endsWith(VALUE_PATH)) { throw new IllegalStateException( - String.format("Expected path to be a value path, is not: '%s'", key)); + String.format("Expected path to be a value path, is not: '%s'", key)); } key = key.getParent(); } final String relative = path.relativize(key).toString(); if (relative.indexOf(SEPARATOR) != -1) { throw new IllegalStateException(String.format( - "Unable to translate path that has '%s' in it.", SEPARATOR)); + "Unable to translate path that has '%s' in it.", SEPARATOR)); } return relative.replace(FS_SEPARATOR, SEPARATOR); } diff --git a/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java b/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java index 853b942a3f8..f56f1476b0b 100644 --- a/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java +++ b/DataStructures/src/main/java/io/deephaven/datastructures/util/CollectionUtil.java @@ -30,8 +30,7 @@ public class CollectionUtil { public static final String[][] ZERO_LENGTH_STRING_ARRAY_ARRAY = new String[0][]; // ---------------------------------------------------------------- - public static Map unmodifiableMapFromArray(Class typeK, Class typeV, - Object... data) { + public static Map unmodifiableMapFromArray(Class typeK, Class typeV, Object... data) { try { return Collections.unmodifiableMap(mapFromArray(typeK, typeV, data)); } catch (RequirementFailure e) { @@ -65,8 +64,8 @@ public static Map mapFromArray(Class typeK, Class typeV, Obje // ---------------------------------------------------------------- @SuppressWarnings({"unchecked"}) - public static Map mapFromArray(Class typeK, Class typeV, - final boolean allowDuplicateKeys, Object... data) { + public static Map mapFromArray(Class typeK, Class typeV, final boolean allowDuplicateKeys, + Object... data) { Require.neqNull(data, "data"); Require.requirement(0 == data.length % 2, "0==data.length%2"); Map map = newSizedLinkedHashMap(data.length / 2); @@ -76,8 +75,8 @@ public static Map mapFromArray(Class typeK, Class typeV, Require.instanceOf(key, "key", typeK); } if (!allowDuplicateKeys) { - Require.requirement(false == map.containsKey(key), - "false==map.containsKey(data[nIndex])", key, "data[nIndex]"); + Require.requirement(false == map.containsKey(key), "false==map.containsKey(data[nIndex])", key, + "data[nIndex]"); } Object value = data[nIndex + 1]; if (null != value) { @@ -95,8 +94,7 @@ public static Map invertMap(Map sourceMap) { for (Map.Entry entry : sourceMap.entrySet()) { K key = entry.getValue(); V value = entry.getKey(); - Require.requirement(false == targetMap.containsKey(key), - "false==targetMap.containsKey(key)", key, "key"); + Require.requirement(false == targetMap.containsKey(key), "false==targetMap.containsKey(key)", key, "key"); targetMap.put(key, value); } return targetMap; @@ -113,12 +111,10 @@ public static Set setFromArray(E... data) { } // ---------------------------------------------------------------- - public static Set setFromArray(@NotNull final Class type, - @NotNull final Object... data) { + public static Set setFromArray(@NotNull final Class type, @NotNull final Object... data) { final Set set = newSizedLinkedHashSet(data.length); for (final Object elem : data) { - Require.requirement(elem == null || type.isInstance(elem), - "elem == null || type.isInstance(elem)"); + Require.requirement(elem == null || type.isInstance(elem), "elem == null || type.isInstance(elem)"); // noinspection unchecked Require.requirement(set.add((TYPE) elem), "set.add((TYPE)elem)"); } @@ -137,8 +133,8 @@ public static List listFromArray(E... data) { // ---------------------------------------------------------------- /** - * Returns an empty {@link HashMap} with a big enough capacity such that the given number of - * entries can be added without resizing. + * Returns an empty {@link HashMap} with a big enough capacity such that the given number of entries can be added + * without resizing. */ public static Map newSizedHashMap(int nEntries) { return new HashMap((nEntries + 1) * 4 / 3); @@ -146,8 +142,8 @@ public static Map newSizedHashMap(int nEntries) { // ---------------------------------------------------------------- /** - * Returns an empty {@link LinkedHashMap} with a big enough capacity such that the given number - * of entries can be added without resizing. + * Returns an empty {@link LinkedHashMap} with a big enough capacity such that the given number of entries can be + * added without resizing. */ public static Map newSizedLinkedHashMap(int nEntries) { return new LinkedHashMap((nEntries + 1) * 4 / 3); @@ -155,8 +151,8 @@ public static Map newSizedLinkedHashMap(int nEntries) { // ---------------------------------------------------------------- /** - * Returns an empty {@link HashSet} with a big enough capacity such that the given number of - * entries can be added without resizing. + * Returns an empty {@link HashSet} with a big enough capacity such that the given number of entries can be added + * without resizing. */ public static Set newSizedHashSet(int nEntries) { return new HashSet((nEntries + 1) * 4 / 3); @@ -164,8 +160,8 @@ public static Set newSizedHashSet(int nEntries) { // ---------------------------------------------------------------- /** - * Returns an empty {@link LinkedHashSet} with a big enough capacity such that the given number - * of entries can be added without resizing. + * Returns an empty {@link LinkedHashSet} with a big enough capacity such that the given number of entries can be + * added without resizing. */ public static Set newSizedLinkedHashSet(int nEntries) { return new LinkedHashSet((nEntries + 1) * 4 / 3); diff --git a/DataStructures/src/main/java/io/deephaven/datastructures/util/SmartKey.java b/DataStructures/src/main/java/io/deephaven/datastructures/util/SmartKey.java index 2f8e6a65ea9..ac862b3ae83 100644 --- a/DataStructures/src/main/java/io/deephaven/datastructures/util/SmartKey.java +++ b/DataStructures/src/main/java/io/deephaven/datastructures/util/SmartKey.java @@ -80,8 +80,7 @@ public Object get(int position) { return values_[position]; } - // A bit of nastiness and interface pollution so we can reuse the same key and array in a lower - // garbage way + // A bit of nastiness and interface pollution so we can reuse the same key and array in a lower garbage way public void updateHashCode() { hashCode_ = HashCodeUtil.createHashCode(values_); } diff --git a/DbTypes/src/main/java/io/deephaven/dbtypes/DbImage.java b/DbTypes/src/main/java/io/deephaven/dbtypes/DbImage.java index ca6f1bafba3..bf61e91244f 100644 --- a/DbTypes/src/main/java/io/deephaven/dbtypes/DbImage.java +++ b/DbTypes/src/main/java/io/deephaven/dbtypes/DbImage.java @@ -19,31 +19,28 @@ enum ImageScalingAlgorithm { DEFAULT(java.awt.Image.SCALE_DEFAULT), /** - * Image-scaling algorithm that gives higher priority to scaling speed than smoothness of - * the scaled image. + * Image-scaling algorithm that gives higher priority to scaling speed than smoothness of the scaled image. */ FAST(java.awt.Image.SCALE_FAST), /** - * Image-scaling algorithm that gives higher priority to image smoothness than scaling - * speed. + * Image-scaling algorithm that gives higher priority to image smoothness than scaling speed. */ SMOOTH(java.awt.Image.SCALE_SMOOTH), /** - * Image scaling algorithm embodied in the ReplicateScaleFilter class. The - * Image object is free to substitute a different filter that performs the same - * algorithm yet integrates more efficiently into the imaging infrastructure supplied by the - * toolkit. + * Image scaling algorithm embodied in the ReplicateScaleFilter class. The Image + * object is free to substitute a different filter that performs the same algorithm yet integrates more + * efficiently into the imaging infrastructure supplied by the toolkit. * * @see java.awt.image.ReplicateScaleFilter */ REPLICATE(java.awt.Image.SCALE_REPLICATE), /** - * Area Averaging image scaling algorithm. The image object is free to substitute a - * different filter that performs the same algorithm yet integrates more efficiently into - * the image infrastructure supplied by the toolkit. + * Area Averaging image scaling algorithm. The image object is free to substitute a different filter that + * performs the same algorithm yet integrates more efficiently into the image infrastructure supplied by the + * toolkit. * * @see java.awt.image.AreaAveragingScaleFilter */ @@ -103,8 +100,7 @@ int get() { Color getColor(final int x, final int y); /** - * Gets the red component in the range 0-255 in the default sRGB space for the pixel located at - * (x,y). + * Gets the red component in the range 0-255 in the default sRGB space for the pixel located at (x,y). * * @param x x-location in pixels. * @param y y-location in pixels. @@ -113,8 +109,7 @@ int get() { int getRed(final int x, final int y); /** - * Gets the green component in the range 0-255 in the default sRGB space for the pixel located - * at (x,y). + * Gets the green component in the range 0-255 in the default sRGB space for the pixel located at (x,y). * * @param x x-location in pixels. * @param y y-location in pixels. @@ -123,8 +118,7 @@ int get() { int getGreen(final int x, final int y); /** - * Gets the blue component in the range 0-255 in the default sRGB space for the pixel located at - * (x,y). + * Gets the blue component in the range 0-255 in the default sRGB space for the pixel located at (x,y). * * @param x x-location in pixels. * @param y y-location in pixels. @@ -133,8 +127,7 @@ int get() { int getBlue(final int x, final int y); /** - * Gets the gray-scale value in the range 0-255 in the default sRGB space for the pixel located - * at (x,y). + * Gets the gray-scale value in the range 0-255 in the default sRGB space for the pixel located at (x,y). * * @param x x-location in pixels. * @param y y-location in pixels. @@ -178,8 +171,7 @@ default DbImage resize(final int width, final int height) { * * @param width new image width in pixels. * @param height new image height in pixels. - * @param algo algorithm used to rescale the image. null causes the default to be - * usedl. + * @param algo algorithm used to rescale the image. null causes the default to be usedl. * @return resized image. */ DbImage resize(final int width, final int height, final ImageScalingAlgorithm algo); diff --git a/DbTypes/src/main/java/io/deephaven/dbtypes/FactoryInstances.java b/DbTypes/src/main/java/io/deephaven/dbtypes/FactoryInstances.java index 0fb46616add..04915a0c8c7 100644 --- a/DbTypes/src/main/java/io/deephaven/dbtypes/FactoryInstances.java +++ b/DbTypes/src/main/java/io/deephaven/dbtypes/FactoryInstances.java @@ -11,8 +11,7 @@ private FactoryInstances() {} static { try { - final String implClassName = - Configuration.getInstance().getProperty("DbTypes.DbFile.impl"); + final String implClassName = Configuration.getInstance().getProperty("DbTypes.DbFile.impl"); final Class c = Class.forName(implClassName); final Constructor constructor = c.getConstructor(); FILE_FACTORY = (DbFileFactory) constructor.newInstance(); @@ -29,8 +28,7 @@ static DbFileFactory getFileFactory() { static { try { - final String implClassName = - Configuration.getInstance().getProperty("DbTypes.DbImage.impl"); + final String implClassName = Configuration.getInstance().getProperty("DbTypes.DbImage.impl"); final Class c = Class.forName(implClassName); final Constructor constructor = c.getConstructor(); IMAGE_FACTORY = (DbImageFactory) constructor.newInstance(); diff --git a/DbTypes/src/test/java/io/deephaven/dbtypes/DbImageTest.java b/DbTypes/src/test/java/io/deephaven/dbtypes/DbImageTest.java index d27f7c9cc66..5986de9f149 100644 --- a/DbTypes/src/test/java/io/deephaven/dbtypes/DbImageTest.java +++ b/DbTypes/src/test/java/io/deephaven/dbtypes/DbImageTest.java @@ -9,7 +9,7 @@ public class DbImageTest extends BaseArrayTestCase { final private String fileName = Configuration.getInstance().getDevRootPath() - + "/DbTypesImpl/src/test/resources/io/deephaven/dbtypes/white.jpg"; + + "/DbTypesImpl/src/test/resources/io/deephaven/dbtypes/white.jpg"; public void testNewInstanceByteArray() throws IOException { final DbImage f = DbImage.newInstance(ImageIO.read(new java.io.File(fileName))); diff --git a/DbTypesImpl/src/main/java/io/deephaven/dbtypes/DbImageImpl.java b/DbTypesImpl/src/main/java/io/deephaven/dbtypes/DbImageImpl.java index 01b252b88d1..e411e8390f8 100644 --- a/DbTypesImpl/src/main/java/io/deephaven/dbtypes/DbImageImpl.java +++ b/DbTypesImpl/src/main/java/io/deephaven/dbtypes/DbImageImpl.java @@ -100,7 +100,7 @@ public String getType() { public byte[] getBytes() { if (bytes == null && image == null) { throw new IllegalStateException( - "Bytes and Image are both null. The zero argument constructor should only be used during deserialization."); + "Bytes and Image are both null. The zero argument constructor should only be used during deserialization."); } if (bytes == null) { @@ -122,7 +122,7 @@ public byte[] getBytes() { public BufferedImage getBufferedImage() { if (bytes == null && image == null) { throw new IllegalStateException( - "Bytes and Image are both null. The zero argument constructor should only be used during deserialization."); + "Bytes and Image are both null. The zero argument constructor should only be used during deserialization."); } if (image == null) { @@ -187,8 +187,7 @@ public void write(String file) throws IOException { @Override public void write(final String formatName, final String file) throws IOException { if (!ImageIO.write(getBufferedImage(), formatName, new java.io.File(file))) { - throw new IllegalArgumentException( - "No appropriate image writer found. formatName=" + formatName); + throw new IllegalArgumentException("No appropriate image writer found. formatName=" + formatName); } } @@ -201,7 +200,7 @@ public DbImageImpl subImage(final int x, final int y, final int w, final int h) @Override public DbImageImpl resize(final int width, final int height, final ImageScalingAlgorithm algo) { java.awt.Image tmp = getBufferedImage().getScaledInstance(width, height, - algo == null ? ImageScalingAlgorithm.DEFAULT.get() : algo.get()); + algo == null ? ImageScalingAlgorithm.DEFAULT.get() : algo.get()); BufferedImage resized = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); Graphics2D g2d = resized.createGraphics(); g2d.drawImage(tmp, 0, 0, null); @@ -272,17 +271,15 @@ public DbImage newInstance(BufferedImage image) { * * @param image input image. * @param formatName output format name (e.g. "JPEG") - * @return byte array representation of the image. This is the same as the contents of an image - * file of the specified type. + * @return byte array representation of the image. This is the same as the contents of an image file of the + * specified type. * @throws IOException problem loading the image. */ - static byte[] image2Bytes(@NotNull final BufferedImage image, final String formatName) - throws IOException { + static byte[] image2Bytes(@NotNull final BufferedImage image, final String formatName) throws IOException { final ByteArrayOutputStream bos = new ByteArrayOutputStream(); if (!ImageIO.write(image, formatName, bos)) { - throw new IllegalArgumentException( - "No appropriate image writer found. formatName=" + formatName); + throw new IllegalArgumentException("No appropriate image writer found. formatName=" + formatName); } bos.close(); diff --git a/DbTypesImpl/src/test/java/io/deephaven/dbtypes/DbFileImplTest.java b/DbTypesImpl/src/test/java/io/deephaven/dbtypes/DbFileImplTest.java index 542d0c810bd..048146714a6 100644 --- a/DbTypesImpl/src/test/java/io/deephaven/dbtypes/DbFileImplTest.java +++ b/DbTypesImpl/src/test/java/io/deephaven/dbtypes/DbFileImplTest.java @@ -11,7 +11,7 @@ public class DbFileImplTest extends BaseArrayTestCase { final private String file = Configuration.getInstance().getDevRootPath() - + "/DbTypesImpl/src/test/resources/io/deephaven/dbtypes/white.jpg"; + + "/DbTypesImpl/src/test/resources/io/deephaven/dbtypes/white.jpg"; final private String tempdir = Configuration.getInstance().getTempPath("DBFiletest"); public void testNullConstructors() throws IOException { @@ -80,8 +80,7 @@ public void testSerialization() throws IOException, ClassNotFoundException { final ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(f); oos.close(); - final ObjectInputStream ois = - new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray())); + final ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray())); final DbFileImpl fout = (DbFileImpl) ois.readObject(); assertEquals(f.getName(), fout.getName()); assertEquals(f.getType(), fout.getType()); diff --git a/DbTypesImpl/src/test/java/io/deephaven/dbtypes/DbImageImplTest.java b/DbTypesImpl/src/test/java/io/deephaven/dbtypes/DbImageImplTest.java index 9fa08230dbf..72119063e05 100644 --- a/DbTypesImpl/src/test/java/io/deephaven/dbtypes/DbImageImplTest.java +++ b/DbTypesImpl/src/test/java/io/deephaven/dbtypes/DbImageImplTest.java @@ -11,7 +11,7 @@ public class DbImageImplTest extends BaseArrayTestCase { final private String file = Configuration.getInstance().getDevRootPath() - + "/DbTypesImpl/src/test/resources/io/deephaven/dbtypes/white.jpg"; + + "/DbTypesImpl/src/test/resources/io/deephaven/dbtypes/white.jpg"; private void assertEquals(DbImage image1, DbImage image2) { assertEquals(image1.getName(), image2.getName()); @@ -181,8 +181,7 @@ public void testSerialization() throws IOException, ClassNotFoundException { final ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(image); oos.close(); - final ObjectInputStream ois = - new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray())); + final ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bos.toByteArray())); final DbImageImpl img = (DbImageImpl) ois.readObject(); assertNotNull(img); assertEquals(image, img); diff --git a/FishUtil/src/main/java/io/deephaven/util/DateUtil.java b/FishUtil/src/main/java/io/deephaven/util/DateUtil.java index 8ea984b9ce5..a0a6d94adc6 100644 --- a/FishUtil/src/main/java/io/deephaven/util/DateUtil.java +++ b/FishUtil/src/main/java/io/deephaven/util/DateUtil.java @@ -54,33 +54,27 @@ public class DateUtil { /** Three letter abbreviations of month names. (Jan==1, title case) */ public static final String[] MONTH_ABBREVIATIONS_3T = - {"Xxx", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"}; + {"Xxx", "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"}; /** Three letter abbreviations of month names. (Jan==1, upper case) */ public static final String[] MONTH_ABBREVIATIONS_3U = - {"XXX", "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"}; + {"XXX", "JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"}; /** Three letter abbreviations of month names. (Jan==1, lower case) */ public static final String[] MONTH_ABBREVIATIONS_3L = - {"xxx", "jan", "feb", "mar", "apr", "may", "jun", "jul", "aug", "sep", "oct", "nov", "dec"}; + {"xxx", "jan", "feb", "mar", "apr", "may", "jun", "jul", "aug", "sep", "oct", "nov", "dec"}; // some useful formatting objects /** Formats a year in YYYY format. */ - private static final DateFormat ms_dateFormatYear = - new ThreadSafeDateFormat(new SimpleDateFormat("yyyy")); + private static final DateFormat ms_dateFormatYear = new ThreadSafeDateFormat(new SimpleDateFormat("yyyy")); /** Formats a month in MM format. */ - private static final DateFormat ms_dateFormatMonth = - new ThreadSafeDateFormat(new SimpleDateFormat("MM")); + private static final DateFormat ms_dateFormatMonth = new ThreadSafeDateFormat(new SimpleDateFormat("MM")); /** Formats a day in DD format. */ - private static final DateFormat ms_dateFormatDay = - new ThreadSafeDateFormat(new SimpleDateFormat("dd")); + private static final DateFormat ms_dateFormatDay = new ThreadSafeDateFormat(new SimpleDateFormat("dd")); - private static final DateFormat ms_dateFormatHour = - new ThreadSafeDateFormat(new SimpleDateFormat("HH")); - private static final DateFormat ms_dateFormatMinute = - new ThreadSafeDateFormat(new SimpleDateFormat("mm")); - private static final DateFormat ms_dateFormatSecond = - new ThreadSafeDateFormat(new SimpleDateFormat("ss")); + private static final DateFormat ms_dateFormatHour = new ThreadSafeDateFormat(new SimpleDateFormat("HH")); + private static final DateFormat ms_dateFormatMinute = new ThreadSafeDateFormat(new SimpleDateFormat("mm")); + private static final DateFormat ms_dateFormatSecond = new ThreadSafeDateFormat(new SimpleDateFormat("ss")); /** @@ -163,8 +157,7 @@ public static String getDateAsYYYYMM(Date date) { */ public static String getDateAsYYYYMMDD(Date date) { Require.neqNull(date, "date"); - return ms_dateFormatYear.format(date) + ms_dateFormatMonth.format(date) - + ms_dateFormatDay.format(date); + return ms_dateFormatYear.format(date) + ms_dateFormatMonth.format(date) + ms_dateFormatDay.format(date); } // ---------------------------------------------------------------- @@ -182,8 +175,8 @@ public static String getDateAsYYYYMMDD(long timeInMillis) { */ public static String getDateAsYYYYdMMdDDTHHcMMcSS(Date date) { return ms_dateFormatYear.format(date) + "-" + ms_dateFormatMonth.format(date) + "-" - + ms_dateFormatDay.format(date) + "T" + ms_dateFormatHour.format(date) + ":" - + ms_dateFormatMinute.format(date) + ":" + ms_dateFormatSecond.format(date); + + ms_dateFormatDay.format(date) + "T" + ms_dateFormatHour.format(date) + ":" + + ms_dateFormatMinute.format(date) + ":" + ms_dateFormatSecond.format(date); } // ---------------------------------------------------------------- @@ -202,8 +195,7 @@ public static String getDateAsYYYYdMMdDDTHHcMMcSS(long timeInMillis) { */ public static String getDateAsMMDDYYYY(Date date) { Require.neqNull(date, "date"); - return ms_dateFormatMonth.format(date) + ms_dateFormatDay.format(date) - + ms_dateFormatYear.format(date); + return ms_dateFormatMonth.format(date) + ms_dateFormatDay.format(date) + ms_dateFormatYear.format(date); } // ---------------------------------------------------------------- @@ -233,27 +225,24 @@ public static String getYyyymmddIntAsPath(int nDateYyyymmdd) { // ---------------------------------------------------------------- /** - * Gets the download path, in [DownloadBaseDir]/sDataSubdir/YYYY/YYYYMM/YYYYMMDD format given a - * date (local timezone). + * Gets the download path, in [DownloadBaseDir]/sDataSubdir/YYYY/YYYYMM/YYYYMMDD format given a date (local + * timezone). */ - public static String getDateDownloadPath(PropertyFile configuration, String sDataSubdir, - Date date) { + public static String getDateDownloadPath(PropertyFile configuration, String sDataSubdir, Date date) { Require.nonempty(sDataSubdir, "sDataSubdir"); Require.neqNull(date, "date"); - return configuration.getProperty("DownloadBaseDir") + DIR_SEP + sDataSubdir + DIR_SEP - + getDateAsPath(date); + return configuration.getProperty("DownloadBaseDir") + DIR_SEP + sDataSubdir + DIR_SEP + getDateAsPath(date); } // ---------------------------------------------------------------- /** - * Gets the download path, in [DownloadBaseDir]/sDataSubdir/YYYY/YYYYMM/YYYYMMDD format given an - * integer in YYYYMMDD format. + * Gets the download path, in [DownloadBaseDir]/sDataSubdir/YYYY/YYYYMM/YYYYMMDD format given an integer in YYYYMMDD + * format. */ - public static String getYyyymmddIntDownloadPath(PropertyFile configuration, String sDataSubdir, - int nDateYyyymmdd) { + public static String getYyyymmddIntDownloadPath(PropertyFile configuration, String sDataSubdir, int nDateYyyymmdd) { Require.nonempty(sDataSubdir, "sDataSubdir"); return configuration.getProperty("DownloadBaseDir") + DIR_SEP + sDataSubdir + DIR_SEP - + getYyyymmddIntAsPath(nDateYyyymmdd); + + getYyyymmddIntAsPath(nDateYyyymmdd); } // ---------------------------------------------------------------- @@ -266,8 +255,7 @@ public static Date getDateYesterday() { // ---------------------------------------------------------------- /** - * Gets a date object representing the next day at the same hour (which may not be exactly 24 - * hours in the future). + * Gets a date object representing the next day at the same hour (which may not be exactly 24 hours in the future). */ public static Date getNextDaySameTime(Date baseline, TimeZone zone) { Require.neqNull(baseline, "baseline"); @@ -280,9 +268,9 @@ public static Date getNextDaySameTime(Date baseline, TimeZone zone) { // ---------------------------------------------------------------- /** - * Subtracts zero or more 24hr periods from the given date until the day of week for the - * resulting date (local timezone) is a valid day according to the mask. If the strict flag is - * true, optional days are not considered valid. + * Subtracts zero or more 24hr periods from the given date until the day of week for the resulting date (local + * timezone) is a valid day according to the mask. If the strict flag is true, optional days are not considered + * valid. *

    * See {@link #validateDayOfWeekMask}. */ @@ -293,8 +281,7 @@ public static Date getMostRecentValidDate(Date date, String sValidDaysMask, bool Calendar calendar = Calendar.getInstance(ms_localTimeZone); while (true) { calendar.setTime(date); - char chDayType = - sValidDaysMask.charAt(calendar.get(Calendar.DAY_OF_WEEK) - Calendar.SUNDAY); + char chDayType = sValidDaysMask.charAt(calendar.get(Calendar.DAY_OF_WEEK) - Calendar.SUNDAY); if (DAY_VALID == chDayType || (!bStrict && DAY_OPTIONAL == chDayType)) { break; } @@ -305,9 +292,8 @@ public static Date getMostRecentValidDate(Date date, String sValidDaysMask, bool // ---------------------------------------------------------------- /** - * Adds one or more 24hr periods from the given date until the day of week for the resulting - * date (local timezone) is a valid day according to the mask. If the strict flag is true, - * optional days are not considered valid. + * Adds one or more 24hr periods from the given date until the day of week for the resulting date (local timezone) + * is a valid day according to the mask. If the strict flag is true, optional days are not considered valid. *

    * See {@link #validateDayOfWeekMask}. */ @@ -319,8 +305,7 @@ public static Date getNextValidDate(Date date, String sValidDaysMask, boolean bS while (true) { date = new Date(date.getTime() + MILLIS_PER_DAY); calendar.setTime(date); - char chDayType = - sValidDaysMask.charAt(calendar.get(Calendar.DAY_OF_WEEK) - Calendar.SUNDAY); + char chDayType = sValidDaysMask.charAt(calendar.get(Calendar.DAY_OF_WEEK) - Calendar.SUNDAY); if (DAY_VALID == chDayType || (!bStrict && DAY_OPTIONAL == chDayType)) { break; } @@ -342,9 +327,8 @@ public static int getDayValidity(Date date, String sValidDaysMask) { // ---------------------------------------------------------------- /** - * Throws a requirement exception if the given day of week mask is not valid. There must be at - * least one valid day in the mask. If the strict flag is set, optional days are not considered - * valid. + * Throws a requirement exception if the given day of week mask is not valid. There must be at least one valid day + * in the mask. If the strict flag is set, optional days are not considered valid. *

    * See {@link #DAY_VALID}, {@link #DAY_INVALID}, {@link #DAY_OPTIONAL}, {@link #DAYMASK_STRICT}, * {@link #DAYMASK_NOT_STRICT} @@ -355,9 +339,8 @@ public static void validateDayOfWeekMask(String sValidDaysMask, boolean bStrict) int nValidDaysFound = 0; for (int nIndex = 0; nIndex < DAYS_PER_WEEK; nIndex++) { char chDayType = sValidDaysMask.charAt(nIndex); - Require.requirement( - DAY_INVALID == chDayType || DAY_VALID == chDayType || DAY_OPTIONAL == chDayType, - "DAY_INVALID==chDayType || DAY_VALID==chDayType || DAY_OPTIONAL==chDayType", 1); + Require.requirement(DAY_INVALID == chDayType || DAY_VALID == chDayType || DAY_OPTIONAL == chDayType, + "DAY_INVALID==chDayType || DAY_VALID==chDayType || DAY_OPTIONAL==chDayType", 1); if (DAY_VALID == chDayType || (!bStrict && DAY_OPTIONAL == chDayType)) { nValidDaysFound++; } @@ -393,7 +376,7 @@ public static int getDateAsYyyymmddInt(Date date) { Calendar calendar = Calendar.getInstance(ms_localTimeZone); calendar.setTime(date); return calendar.get(Calendar.YEAR) * 10000 + (calendar.get(Calendar.MONTH) + 1) * 100 - + calendar.get(Calendar.DAY_OF_MONTH); + + calendar.get(Calendar.DAY_OF_MONTH); } // ---------------------------------------------------------------- @@ -414,7 +397,7 @@ public static String formatYyyymmddStringAsIso(String sDateYyyymmdd) { Require.neqNull(sDateYyyymmdd, "sDateYyyymmdd"); Require.eq(sDateYyyymmdd.length(), "sDateYyyymmdd.length()", 8); return sDateYyyymmdd.substring(0, 4) + "-" + sDateYyyymmdd.substring(4, 6) + "-" - + sDateYyyymmdd.substring(6, 8); + + sDateYyyymmdd.substring(6, 8); } // ---------------------------------------------------------------- @@ -423,7 +406,7 @@ public static String formatYyyymmddStringAsUs(String sDateYyyymmdd) { Require.neqNull(sDateYyyymmdd, "sDateYyyymmdd"); Require.eq(sDateYyyymmdd.length(), "sDateYyyymmdd.length()", 8); return sDateYyyymmdd.substring(4, 6) + "/" + sDateYyyymmdd.substring(6, 8) + "/" - + sDateYyyymmdd.substring(0, 4); + + sDateYyyymmdd.substring(0, 4); } // ---------------------------------------------------------------- @@ -467,8 +450,7 @@ public static String formatddM3UyyyyStringAsIso(String sDateddM3Uyyyy) { String res; res = sDateddM3Uyyyy.substring(5); - int monthValue = - Arrays.asList(MONTH_ABBREVIATIONS_3U).indexOf(sDateddM3Uyyyy.substring(2, 5)); + int monthValue = Arrays.asList(MONTH_ABBREVIATIONS_3U).indexOf(sDateddM3Uyyyy.substring(2, 5)); res += "-" + ((monthValue < 10) ? "0" + monthValue : monthValue); res += "-" + (sDateddM3Uyyyy.substring(0, 2)); @@ -544,8 +526,7 @@ public static String formatIsoAsMMsDDsYYYYString(String sDateYYYYdMMdDD) { * @return date * @throws ParseException */ - public static String formatDateFromStringToString(String date, String sourceFormat, - String resultFormat) { + public static String formatDateFromStringToString(String date, String sourceFormat, String resultFormat) { final DateFormat sourceDateFormat = new SimpleDateFormat(sourceFormat); final DateFormat resultDateFormat = new SimpleDateFormat(resultFormat); return formatDateFromFormatToFormat(date, sourceDateFormat, resultDateFormat); @@ -561,7 +542,7 @@ public static String formatDateFromStringToString(String date, String sourceForm * @throws ParseException */ public static String formatDateFromFormatToFormat(String date, DateFormat sourceDateFormat, - DateFormat resultDateFormat) { + DateFormat resultDateFormat) { try { return resultDateFormat.format(sourceDateFormat.parse(date)); } catch (ParseException e) { @@ -575,11 +556,11 @@ public static String formatDateFromFormatToFormat(String date, DateFormat source /** * Returns the absolute timestamp of the most recent occurrence (before or exactly on the * referenceTimestamp) of a daily event. The time of day is taken from - * sPropertyNameRoot.time in "h:mm a" format. The time zone for calculations - * is taken from sPropertyNameRoot.timeZone. + * sPropertyNameRoot.time in "h:mm a" format. The time zone for calculations is taken from + * sPropertyNameRoot.timeZone. */ - public static Date getTimestampOfMostRecentDailyEvent(PropertyFile configuration, - String sPropertyNameRoot, Date referenceTimestamp) { + public static Date getTimestampOfMostRecentDailyEvent(PropertyFile configuration, String sPropertyNameRoot, + Date referenceTimestamp) { Require.nonempty(sPropertyNameRoot, "sPropertyNameRoot"); Require.neqNull(referenceTimestamp, "referenceTimestamp"); @@ -587,21 +568,18 @@ public static Date getTimestampOfMostRecentDailyEvent(PropertyFile configuration TimeZone timeZone = getTimeZoneOfEvent(configuration, sPropertyNameRoot); // get the time of day of the event from the system properties - Calendar eventTimestampCalendar = - buildEventTimestampCalendar(timeZone, sPropertyNameRoot, configuration); + Calendar eventTimestampCalendar = buildEventTimestampCalendar(timeZone, sPropertyNameRoot, configuration); // determine the exact timestamp of when the event happens today Calendar referenceTimestampCalendar = Calendar.getInstance(timeZone); referenceTimestampCalendar.setTime(referenceTimestamp); eventTimestampCalendar.set( - referenceTimestampCalendar.get(Calendar.YEAR), - referenceTimestampCalendar.get(Calendar.MONTH), - referenceTimestampCalendar.get(Calendar.DAY_OF_MONTH)); - - // if the event happens in the future, then the most recent occurrence was the one that - // happened one day ago - if (eventTimestampCalendar.getTimeInMillis() > referenceTimestampCalendar - .getTimeInMillis()) { + referenceTimestampCalendar.get(Calendar.YEAR), + referenceTimestampCalendar.get(Calendar.MONTH), + referenceTimestampCalendar.get(Calendar.DAY_OF_MONTH)); + + // if the event happens in the future, then the most recent occurrence was the one that happened one day ago + if (eventTimestampCalendar.getTimeInMillis() > referenceTimestampCalendar.getTimeInMillis()) { eventTimestampCalendar.add(Calendar.DAY_OF_MONTH, -1); } @@ -610,66 +588,60 @@ public static Date getTimestampOfMostRecentDailyEvent(PropertyFile configuration // ---------------------------------------------------------------- /** - * Returns the absolute timestamp of the occurrence of a daily event that happens in the same - * "day" as right now. The time of day of the event is taken from - * sPropertyNameRoot.time in "h:mm a" format. The time zone for calculations - * (and for determining the boundaries of "today") is taken from + * Returns the absolute timestamp of the occurrence of a daily event that happens in the same "day" as right now. + * The time of day of the event is taken from sPropertyNameRoot.time in "h:mm a" format. The + * time zone for calculations (and for determining the boundaries of "today") is taken from * sPropertyNameRoot.timeZone. */ - public static Date getTimestampOfEventToday(PropertyFile configuration, - String sPropertyNameRoot) { + public static Date getTimestampOfEventToday(PropertyFile configuration, String sPropertyNameRoot) { Require.nonempty(sPropertyNameRoot, "sPropertyNameRoot"); // get the time zone of the event from the system properties TimeZone timeZone = getTimeZoneOfEvent(configuration, sPropertyNameRoot); // get the time of day of the event from the system properties - Calendar eventTimestampCalendar = - buildEventTimestampCalendar(timeZone, sPropertyNameRoot, configuration); + Calendar eventTimestampCalendar = buildEventTimestampCalendar(timeZone, sPropertyNameRoot, configuration); // determine the exact timestamp of when the event happens today Calendar referenceTimestampCalendar = Calendar.getInstance(timeZone); eventTimestampCalendar.set( - referenceTimestampCalendar.get(Calendar.YEAR), - referenceTimestampCalendar.get(Calendar.MONTH), - referenceTimestampCalendar.get(Calendar.DAY_OF_MONTH)); + referenceTimestampCalendar.get(Calendar.YEAR), + referenceTimestampCalendar.get(Calendar.MONTH), + referenceTimestampCalendar.get(Calendar.DAY_OF_MONTH)); return eventTimestampCalendar.getTime(); } // ---------------------------------------------------------------- /** - * Returns the absolute timestamp of the occurrence of a daily event that happens in the same - * "day" as right now. The time of day of the event is taken from - * sPropertyNameRoot.time in "h:mm a" format. The time zone for calculations - * (and for determining the boundaries of "today") is taken from + * Returns the absolute timestamp of the occurrence of a daily event that happens in the same "day" as right now. + * The time of day of the event is taken from sPropertyNameRoot.time in "h:mm a" format. The + * time zone for calculations (and for determining the boundaries of "today") is taken from * sPropertyNameRoot.timeZone. */ - public static Date getTimestampOfEventToday(PropertyFile configuration, - String sPropertyNameRoot, long nNowMillis) { + public static Date getTimestampOfEventToday(PropertyFile configuration, String sPropertyNameRoot, long nNowMillis) { Require.nonempty(sPropertyNameRoot, "sPropertyNameRoot"); // get the time zone of the event from the system properties TimeZone timeZone = getTimeZoneOfEvent(configuration, sPropertyNameRoot); // get the time of day of the event from the system properties - Calendar eventTimestampCalendar = - buildEventTimestampCalendar(timeZone, sPropertyNameRoot, configuration); + Calendar eventTimestampCalendar = buildEventTimestampCalendar(timeZone, sPropertyNameRoot, configuration); // determine the exact timestamp of when the event happens today Calendar referenceTimestampCalendar = Calendar.getInstance(timeZone); referenceTimestampCalendar.setTimeInMillis(nNowMillis); eventTimestampCalendar.set( - referenceTimestampCalendar.get(Calendar.YEAR), - referenceTimestampCalendar.get(Calendar.MONTH), - referenceTimestampCalendar.get(Calendar.DAY_OF_MONTH)); + referenceTimestampCalendar.get(Calendar.YEAR), + referenceTimestampCalendar.get(Calendar.MONTH), + referenceTimestampCalendar.get(Calendar.DAY_OF_MONTH)); return eventTimestampCalendar.getTime(); } // ---------------------------------------------------------------- private static Calendar buildEventTimestampCalendar(TimeZone timeZone, String sPropertyNameRoot, - PropertyFile configuration) { + PropertyFile configuration) { String sTimeProperty = sPropertyNameRoot + ".time"; String sTime = configuration.getProperty(sTimeProperty); Calendar eventTimestampCalendar = Calendar.getInstance(timeZone); @@ -683,8 +655,8 @@ private static Calendar buildEventTimestampCalendar(TimeZone timeZone, String sP try { timeFormat.parse(sTime); } catch (ParseException e2) { - throw Require.exceptionNeverCaught("Value of property " + sTimeProperty + " (\"" - + sTime + "\") not in proper format (\"" + timeFormat.toPattern() + "\").", e2); + throw Require.exceptionNeverCaught("Value of property " + sTimeProperty + " (\"" + sTime + + "\") not in proper format (\"" + timeFormat.toPattern() + "\").", e2); } } return eventTimestampCalendar; @@ -692,11 +664,10 @@ private static Calendar buildEventTimestampCalendar(TimeZone timeZone, String sP // ---------------------------------------------------------------- /** - * Gets the timestamp of an event based upon a daily event and a date (retrieved from - * properties) + * Gets the timestamp of an event based upon a daily event and a date (retrieved from properties) */ public static Date getTimestampOfEvent(PropertyFile configuration, String sEventPropertyRoot, - String sDateProperty) { + String sDateProperty) { Require.nonempty(sEventPropertyRoot, "sEventPropertyRoot"); Require.nonempty(sDateProperty, "sDateProperty"); @@ -704,44 +675,40 @@ public static Date getTimestampOfEvent(PropertyFile configuration, String sEvent TimeZone timeZone = getTimeZoneOfEvent(configuration, sEventPropertyRoot); // get the time of day of the event from the system properties - Calendar eventTimestampCalendar = - buildEventTimestampCalendar(timeZone, sEventPropertyRoot, configuration); + Calendar eventTimestampCalendar = buildEventTimestampCalendar(timeZone, sEventPropertyRoot, configuration); // parse the date string and set the year, month, and day of the timestamp we are building - // note: time zone is irrelevant for the next step because we just want the numbers - we - // could use a regexp. + // note: time zone is irrelevant for the next step because we just want the numbers - we could use a regexp. SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); String sDate = configuration.getProperty(sDateProperty); try { dateFormat.parse(sDate); } catch (ParseException e) { - throw Require.exceptionNeverCaught(sDateProperty + " (\"" + sDate + "\") not in \"" - + dateFormat.toPattern() + "\" format.", e); + throw Require.exceptionNeverCaught( + sDateProperty + " (\"" + sDate + "\") not in \"" + dateFormat.toPattern() + "\" format.", e); } Calendar dateCalendar = dateFormat.getCalendar(); // set the year, month, and day - eventTimestampCalendar.set(dateCalendar.get(Calendar.YEAR), - dateCalendar.get(Calendar.MONTH), dateCalendar.get(Calendar.DAY_OF_MONTH)); + eventTimestampCalendar.set(dateCalendar.get(Calendar.YEAR), dateCalendar.get(Calendar.MONTH), + dateCalendar.get(Calendar.DAY_OF_MONTH)); return eventTimestampCalendar.getTime(); } // ---------------------------------------------------------------- /** - * Gets the timestamp of an event based upon a daily event and a date specified by year, month - * (jan=1), day + * Gets the timestamp of an event based upon a daily event and a date specified by year, month (jan=1), day */ - public static Date getTimestampOfEvent(PropertyFile configuration, String sEventPropertyRoot, - int nYear, int nMonth, int nDay) { + public static Date getTimestampOfEvent(PropertyFile configuration, String sEventPropertyRoot, int nYear, int nMonth, + int nDay) { Require.nonempty(sEventPropertyRoot, "sEventPropertyRoot"); // get the time zone of the event from the system properties TimeZone timeZone = getTimeZoneOfEvent(configuration, sEventPropertyRoot); // get the time of day of the event from the system properties - Calendar eventTimestampCalendar = - buildEventTimestampCalendar(timeZone, sEventPropertyRoot, configuration); + Calendar eventTimestampCalendar = buildEventTimestampCalendar(timeZone, sEventPropertyRoot, configuration); // set the year, month, and day eventTimestampCalendar.set(nYear, nMonth - 1, nDay); @@ -753,63 +720,55 @@ public static Date getTimestampOfEvent(PropertyFile configuration, String sEvent /** * Gets the timestamp of an event based upon a daily event and a date in YYYYMMDD format */ - public static Date getTimestampOfEvent(PropertyFile configuration, String sEventPropertyRoot, - int nYYYYMMDD) { + public static Date getTimestampOfEvent(PropertyFile configuration, String sEventPropertyRoot, int nYYYYMMDD) { Require.nonempty(sEventPropertyRoot, "sEventPropertyRoot"); - return getTimestampOfEvent(configuration, sEventPropertyRoot, nYYYYMMDD / 10000, - (nYYYYMMDD / 100) % 100, nYYYYMMDD % 100); + return getTimestampOfEvent(configuration, sEventPropertyRoot, nYYYYMMDD / 10000, (nYYYYMMDD / 100) % 100, + nYYYYMMDD % 100); } // ---------------------------------------------------------------- /** Gets the time zone associated with a particular daily event. */ - public static TimeZone getTimeZoneOfEvent(PropertyFile configuration, - String sPropertyNameRoot) { + public static TimeZone getTimeZoneOfEvent(PropertyFile configuration, String sPropertyNameRoot) { Require.nonempty(sPropertyNameRoot, "sPropertyNameRoot"); return TimeZone.getTimeZone(configuration.getProperty(sPropertyNameRoot + ".timeZone")); } // ---------------------------------------------------------------- /** - * Returns a date (noon in the local time zone) which is the date of the most recent occurrence - * (before or exactly on the referenceTimestamp) of the specified event, in the - * event's timezone. + * Returns a date (noon in the local time zone) which is the date of the most recent occurrence (before or exactly + * on the referenceTimestamp) of the specified event, in the event's timezone. */ - public static Date getDateOfMostRecentDailyEvent(PropertyFile configuration, - String sPropertyNameRoot, Date referenceTimestamp) { + public static Date getDateOfMostRecentDailyEvent(PropertyFile configuration, String sPropertyNameRoot, + Date referenceTimestamp) { Require.nonempty(sPropertyNameRoot, "sPropertyNameRoot"); Require.neqNull(referenceTimestamp, "referenceTimestamp"); - Date eventTimestamp = getTimestampOfMostRecentDailyEvent(configuration, sPropertyNameRoot, - referenceTimestamp); - Calendar sourceCalendar = - Calendar.getInstance(getTimeZoneOfEvent(configuration, sPropertyNameRoot)); + Date eventTimestamp = getTimestampOfMostRecentDailyEvent(configuration, sPropertyNameRoot, referenceTimestamp); + Calendar sourceCalendar = Calendar.getInstance(getTimeZoneOfEvent(configuration, sPropertyNameRoot)); sourceCalendar.setTime(eventTimestamp); Calendar targetCalendar = Calendar.getInstance(ms_localTimeZone); targetCalendar.clear(); targetCalendar.set(sourceCalendar.get(Calendar.YEAR), sourceCalendar.get(Calendar.MONTH), - sourceCalendar.get(Calendar.DAY_OF_MONTH), 12, 0, 0); + sourceCalendar.get(Calendar.DAY_OF_MONTH), 12, 0, 0); return targetCalendar.getTime(); } // ---------------------------------------------------------------- /** - * Returns a date (noon in the local time zone) which is the date of the most recent occurrence - * (before or exactly on the referenceTimestamp) of the specified event, in the - * event's timezone. If the (strict) valid days mask indicates that the date is not valid, days - * will be subtracted until the date is valid. + * Returns a date (noon in the local time zone) which is the date of the most recent occurrence (before or exactly + * on the referenceTimestamp) of the specified event, in the event's timezone. If the (strict) valid + * days mask indicates that the date is not valid, days will be subtracted until the date is valid. *

    * See {@link #validateDayOfWeekMask}. */ - public static Date getDateOfMostRecentDailyEvent(PropertyFile configuration, - String sPropertyNameRoot, Date referenceTimestamp, String sValidDaysMask) { + public static Date getDateOfMostRecentDailyEvent(PropertyFile configuration, String sPropertyNameRoot, + Date referenceTimestamp, String sValidDaysMask) { Require.nonempty(sPropertyNameRoot, "sPropertyNameRoot"); Require.neqNull(referenceTimestamp, "referenceTimestamp"); validateDayOfWeekMask(sValidDaysMask, DAYMASK_STRICT); Calendar calendar = Calendar.getInstance(ms_localTimeZone); - calendar.setTime( - getDateOfMostRecentDailyEvent(configuration, sPropertyNameRoot, referenceTimestamp)); + calendar.setTime(getDateOfMostRecentDailyEvent(configuration, sPropertyNameRoot, referenceTimestamp)); while (true) { - char chDayType = - sValidDaysMask.charAt(calendar.get(Calendar.DAY_OF_WEEK) - Calendar.SUNDAY); + char chDayType = sValidDaysMask.charAt(calendar.get(Calendar.DAY_OF_WEEK) - Calendar.SUNDAY); if (DAY_VALID == chDayType) { break; } @@ -821,9 +780,8 @@ public static Date getDateOfMostRecentDailyEvent(PropertyFile configuration, // ---------------------------------------------------------------- /** * Wraps a "daily event" as an object. The time of day of the event is taken from - * sPropertyNameRoot.time in "h:mm a" format. The time zone for calculations - * (and for determining the boundaries of "today") is taken from - * sPropertyNameRoot.timeZone. + * sPropertyNameRoot.time in "h:mm a" format. The time zone for calculations (and for + * determining the boundaries of "today") is taken from sPropertyNameRoot.timeZone. */ public static class DailyEvent { @@ -835,8 +793,8 @@ public DailyEvent(PropertyFile configuration, String sPropertyNameRoot) { Require.neqNull(configuration, "configuration"); Require.nonempty(sPropertyNameRoot, "sPropertyNameRoot"); try { - buildEventTimestampCalendar(getTimeZoneOfEvent(configuration, sPropertyNameRoot), - sPropertyNameRoot, configuration); + buildEventTimestampCalendar(getTimeZoneOfEvent(configuration, sPropertyNameRoot), sPropertyNameRoot, + configuration); } catch (RequirementFailure e) { throw e.adjustForDelegatingMethod(); } @@ -846,15 +804,14 @@ public DailyEvent(PropertyFile configuration, String sPropertyNameRoot) { // ------------------------------------------------------------ public long getTimestampOfEventToday(long nNow) { - return DateUtil.getTimestampOfEventToday(m_configuration, m_sPropertyNameRoot, nNow) - .getTime(); + return DateUtil.getTimestampOfEventToday(m_configuration, m_sPropertyNameRoot, nNow).getTime(); } // ------------------------------------------------------------ @Override public String toString() { return m_configuration.getProperty(m_sPropertyNameRoot + ".time") + ", " - + m_configuration.getProperty(m_sPropertyNameRoot + ".timeZone"); + + m_configuration.getProperty(m_sPropertyNameRoot + ".timeZone"); } } @@ -971,18 +928,18 @@ private static String internalFormatInterval(long tsInterval, int nThousands) { // ---------------------------------------------------------------- /** - * Formats the given microsecond timestamp with the given date formatter and then appends the - * last three microsend digits. + * Formats the given microsecond timestamp with the given date formatter and then appends the last three microsend + * digits. */ public static String formatWithTrailingMicros(DateFormat dateFormat, long nTimestampMicros) { return dateFormat.format(nTimestampMicros / DateUtil.MICROS_PER_MILLI) - + DateUtil.formatTrailingMicros(nTimestampMicros); + + DateUtil.formatTrailingMicros(nTimestampMicros); } // ---------------------------------------------------------------- /** - * Returns the last three digits of the given microsecond timestamp as a string, suitable for - * appending to a timestamp formatted to millisecond precision. + * Returns the last three digits of the given microsecond timestamp as a string, suitable for appending to a + * timestamp formatted to millisecond precision. */ public static String formatTrailingMicros(long nTimestampMicros) { nTimestampMicros = nTimestampMicros % 1000; diff --git a/FishUtil/src/main/java/io/deephaven/util/HungReportJob.java b/FishUtil/src/main/java/io/deephaven/util/HungReportJob.java index b41a1c35340..28548f44195 100644 --- a/FishUtil/src/main/java/io/deephaven/util/HungReportJob.java +++ b/FishUtil/src/main/java/io/deephaven/util/HungReportJob.java @@ -18,9 +18,9 @@ public class HungReportJob extends TimedJob { private final String actionMessage; public HungReportJob(@NotNull final Logger log, - @NotNull final String errorEmailRecipientAddress, - @NotNull final String monitoredJobName, - @NotNull final String actionMessage) { + @NotNull final String errorEmailRecipientAddress, + @NotNull final String monitoredJobName, + @NotNull final String actionMessage) { this.log = log; this.errorEmailRecipientAddress = errorEmailRecipientAddress; this.monitoredJobName = monitoredJobName; @@ -31,10 +31,10 @@ public HungReportJob(@NotNull final Logger log, public void timedOut() { try { new SMTPMailer().sendEmail(null, errorEmailRecipientAddress, - "[ERROR] " + monitoredJobName + " may be hung!", actionMessage); + "[ERROR] " + monitoredJobName + " may be hung!", actionMessage); } catch (IOException e) { log.warn().append("HungReportJob: Failed to send delay report email for monitored job ") - .append(monitoredJobName).append(": ").append(e).endl(); + .append(monitoredJobName).append(": ").append(e).endl(); } } } diff --git a/FishUtil/src/main/java/io/deephaven/util/Mailer.java b/FishUtil/src/main/java/io/deephaven/util/Mailer.java index 9c785e70a5e..267355ab9c6 100644 --- a/FishUtil/src/main/java/io/deephaven/util/Mailer.java +++ b/FishUtil/src/main/java/io/deephaven/util/Mailer.java @@ -9,14 +9,12 @@ import java.util.Map; public interface Mailer { - void sendEmail(String sender, String[] recipients, String subject, String msg) - throws IOException; + void sendEmail(String sender, String[] recipients, String subject, String msg) throws IOException; void sendEmail(String sender, String recipient, String subject, String msg) throws IOException; - void sendHTMLEmail(String sender, String recipient, String subject, String msg) - throws IOException; + void sendHTMLEmail(String sender, String recipient, String subject, String msg) throws IOException; void sendEmail(String sender, String recipient, String subject, String msg, - List> extraHeaderEntries) throws IOException; + List> extraHeaderEntries) throws IOException; } diff --git a/FishUtil/src/main/java/io/deephaven/util/OSUtil.java b/FishUtil/src/main/java/io/deephaven/util/OSUtil.java index 49201abb508..587415d0290 100644 --- a/FishUtil/src/main/java/io/deephaven/util/OSUtil.java +++ b/FishUtil/src/main/java/io/deephaven/util/OSUtil.java @@ -15,7 +15,7 @@ public class OSUtil { public enum OSFamily { LINUX(name -> name.startsWith("Linux")), WINDOWS(name -> name.contains("Windows")), MAC_OS( - name -> name.startsWith("Mac OS")), SOLARIS(name -> name.startsWith("SunOs")); + name -> name.startsWith("Mac OS")), SOLARIS(name -> name.startsWith("SunOs")); private final Predicate nameMatcher; @@ -30,14 +30,14 @@ private boolean matchesName(@NotNull final String osName) { public static OSFamily getOSFamily() { final String name = getOSName(); - final OSFamily[] matchingFamilies = Arrays.stream(OSFamily.values()) - .filter(family -> family.matchesName(name)).toArray(OSFamily[]::new); + final OSFamily[] matchingFamilies = + Arrays.stream(OSFamily.values()).filter(family -> family.matchesName(name)).toArray(OSFamily[]::new); if (matchingFamilies.length == 0) { throw new IllegalArgumentException("Unknown OS family for OS name " + name); } if (matchingFamilies.length > 1) { - throw new IllegalArgumentException("Ambiguous OS family for OS name " + name - + ", matches: " + Arrays.toString(matchingFamilies)); + throw new IllegalArgumentException( + "Ambiguous OS family for OS name " + name + ", matches: " + Arrays.toString(matchingFamilies)); } return matchingFamilies[0]; } diff --git a/FishUtil/src/main/java/io/deephaven/util/PidFileUtil.java b/FishUtil/src/main/java/io/deephaven/util/PidFileUtil.java index 8b36a75a789..5aaa32b3729 100644 --- a/FishUtil/src/main/java/io/deephaven/util/PidFileUtil.java +++ b/FishUtil/src/main/java/io/deephaven/util/PidFileUtil.java @@ -27,8 +27,7 @@ public class PidFileUtil { * @param configuration The configuration to use for property lookup * @throws IllegalStateException If pidFile exists or cannot be created/opened */ - public static void checkAndCreatePidFileForThisProcess( - @NotNull final Configuration configuration) { + public static void checkAndCreatePidFileForThisProcess(@NotNull final Configuration configuration) { checkAndCreatePidFileForProcessName(configuration, configuration.getProcessName()); } @@ -36,12 +35,11 @@ public static void checkAndCreatePidFileForThisProcess( * Atomically create a new file, and then write this process' PID to it. * * @param configuration The configuration to use for property lookup - * @param processName The name to be used for the per-process unique portion of the PID file's - * path + * @param processName The name to be used for the per-process unique portion of the PID file's path * @throws IllegalStateException If pidFile exists or cannot be created/opened */ - public static void checkAndCreatePidFileForProcessName( - @NotNull final Configuration configuration, @NotNull final String processName) { + public static void checkAndCreatePidFileForProcessName(@NotNull final Configuration configuration, + @NotNull final String processName) { final String directoryName = configuration.getProperty(PID_FILE_DIRECTORY_PROPERTY); checkAndCreatePidFile(new File(directoryName, processName + FILE_SUFFIX)); } @@ -85,7 +83,7 @@ public static void checkAndCreatePidFile(final File pidFile) throws IllegalState try { if (!pidFile.createNewFile()) { throw new IllegalStateException("Pid file " + pidFile - + " already exists - check running process and manually delete if necessary"); + + " already exists - check running process and manually delete if necessary"); } } catch (IOException e) { throw new IllegalStateException("Unable to create pid file " + pidFile, e); @@ -96,8 +94,7 @@ public static void checkAndCreatePidFile(final File pidFile) throws IllegalState try { fileWriter = new FileWriter(pidFile); } catch (IOException e) { - throw new IllegalStateException("Failed to open pid file " + pidFile + " for writing", - e); + throw new IllegalStateException("Failed to open pid file " + pidFile + " for writing", e); } try { diff --git a/FishUtil/src/main/java/io/deephaven/util/PropertyRetriever.java b/FishUtil/src/main/java/io/deephaven/util/PropertyRetriever.java index fc93e2e7e3c..5c5a78f1ced 100644 --- a/FishUtil/src/main/java/io/deephaven/util/PropertyRetriever.java +++ b/FishUtil/src/main/java/io/deephaven/util/PropertyRetriever.java @@ -17,49 +17,43 @@ import java.security.PrivilegedExceptionAction; /** - * Class to assist with retrieving properties such as passwords from environment variables, files, - * and properties. + * Class to assist with retrieving properties such as passwords from environment variables, files, and properties. */ public class PropertyRetriever { /** - * Return a property value from a set of possible locations, allowing for optional base64 - * decoding. The following order is used. + * Return a property value from a set of possible locations, allowing for optional base64 decoding. The following + * order is used. *

      - *
    • First, if an environment variable is provided, it is checked. if it exists, then it is - * returned, base64-decoded if requested.
    • - *
    • Next, the property file is checked if the property is provided (the configuration is - * checked for the provided property, and the resulting property value defines the filename). If - * it exists, the contents are read, base64-decoded if requested, then returned.
    • - *
    • Finally, the property is checked. If it exists it is base64-decoded if requested and - * returned.
    • - *
    • At least one of environmentVariable, fileProperty, or propertyName must be specified and - * exist.
    • - *
    • If both a property file and property name are provided and exist in the Configuration - * instance, an exception will be thrown.
    • + *
    • First, if an environment variable is provided, it is checked. if it exists, then it is returned, + * base64-decoded if requested.
    • + *
    • Next, the property file is checked if the property is provided (the configuration is checked for the provided + * property, and the resulting property value defines the filename). If it exists, the contents are read, + * base64-decoded if requested, then returned.
    • + *
    • Finally, the property is checked. If it exists it is base64-decoded if requested and returned.
    • + *
    • At least one of environmentVariable, fileProperty, or propertyName must be specified and exist.
    • + *
    • If both a property file and property name are provided and exist in the Configuration instance, an exception + * will be thrown.
    • *
    * * @param configuration the Configuration instance to check * @param propertyMeaning a user-friendly property meaning, included in thrown exceptions * @param environmentVariable an optional environment variable to check for the value - * @param fileProperty an optional Configuration property that specifies the file that contains - * the value + * @param fileProperty an optional Configuration property that specifies the file that contains the value * @param propertyName an optional Configuration property that specifies the value - * @param base64Encoded if true, the retrieved value is base64 decoded before being returned to - * the caller + * @param base64Encoded if true, the retrieved value is base64 decoded before being returned to the caller * @return the found value, base64-decoded if requested */ public static String getProperty(@NotNull final Configuration configuration, - @NotNull final String propertyMeaning, - @Nullable final String environmentVariable, - @Nullable final String fileProperty, - @Nullable final String propertyName, - final boolean base64Encoded) { + @NotNull final String propertyMeaning, + @Nullable final String environmentVariable, + @Nullable final String fileProperty, + @Nullable final String propertyName, + final boolean base64Encoded) { // This means a coding error on the caller's part if (environmentVariable == null && fileProperty == null && propertyName == null) { throw new ConfigurationException( - "No environment variable or properties defined to retrieve property for " - + propertyMeaning); + "No environment variable or properties defined to retrieve property for " + propertyMeaning); } // The environment variable takes precedence @@ -67,8 +61,7 @@ public static String getProperty(@NotNull final Configuration configuration, // If nothing was retrieved from the environment variable, then check the property and file if (propertyValue == null) { - propertyValue = getPropertyFromFileOrProperty(configuration, propertyMeaning, - fileProperty, propertyName); + propertyValue = getPropertyFromFileOrProperty(configuration, propertyMeaning, fileProperty, propertyName); } // If it's still null nothing could be found @@ -80,17 +73,15 @@ public static String getProperty(@NotNull final Configuration configuration, if (fileProperty != null) { propertyPossibilities - .append(propertyPossibilities.length() == 0 ? "filename property " - : " or filename property "); + .append(propertyPossibilities.length() == 0 ? "filename property " : " or filename property "); propertyPossibilities.append(fileProperty); } if (propertyName != null) { - propertyPossibilities - .append(propertyPossibilities.length() == 0 ? "property " : " or property "); + propertyPossibilities.append(propertyPossibilities.length() == 0 ? "property " : " or property "); propertyPossibilities.append(propertyName); } throw new ConfigurationException( - "No " + propertyMeaning + " set, please set " + propertyPossibilities.toString()); + "No " + propertyMeaning + " set, please set " + propertyPossibilities.toString()); } if (base64Encoded) { @@ -105,38 +96,34 @@ private static String getPropertyFromEnvironmentVariable(final String environmen } private static String getPropertyFromFileOrProperty(final Configuration configuration, - final String propertyMeaning, - final String fileProperty, - final String propertyName) { + final String propertyMeaning, + final String fileProperty, + final String propertyName) { try { - // We need permission to read the property and access the file, so this needs to be - // privileged. + // We need permission to read the property and access the file, so this needs to be privileged. return AccessController.doPrivileged((PrivilegedExceptionAction) () -> { if (fileProperty != null && configuration.hasProperty(fileProperty)) { if (propertyName != null && configuration.hasProperty(propertyName)) { - throw new IllegalArgumentException( - "Conflicting properties for " + propertyMeaning + " - both " + throw new IllegalArgumentException("Conflicting properties for " + propertyMeaning + " - both " + fileProperty + " and " + propertyName + " are set."); } final String propertyFilename = configuration.getProperty(fileProperty); final File propertyFile = new File(propertyFilename); try { - return new String(Files.readAllBytes(propertyFile.toPath()), - Charset.forName("UTF-8")).trim(); + return new String(Files.readAllBytes(propertyFile.toPath()), Charset.forName("UTF-8")).trim(); } catch (IOException e) { try (InputStream resourceAsStream = - PropertyRetriever.class.getResourceAsStream("/" + propertyFilename)) { + PropertyRetriever.class.getResourceAsStream("/" + propertyFilename)) { if (resourceAsStream == null) { - throw new ConfigurationException( - "Unable to open file " + propertyFilename + " specified by " - + fileProperty + " for " + propertyMeaning); + throw new ConfigurationException("Unable to open file " + propertyFilename + + " specified by " + fileProperty + " for " + propertyMeaning); } final BufferedReader bufferedReader = - new BufferedReader(new InputStreamReader(resourceAsStream)); + new BufferedReader(new InputStreamReader(resourceAsStream)); return bufferedReader.readLine(); } catch (IOException e2) { - throw new UncheckedIOException("Can not read property file " - + propertyFilename + " for " + propertyMeaning, e2); + throw new UncheckedIOException( + "Can not read property file " + propertyFilename + " for " + propertyMeaning, e2); } } } else if (propertyName != null && configuration.hasProperty(propertyName)) { diff --git a/FishUtil/src/main/java/io/deephaven/util/SMTPMailer.java b/FishUtil/src/main/java/io/deephaven/util/SMTPMailer.java index df5323f6594..e7a6ea1ece1 100644 --- a/FishUtil/src/main/java/io/deephaven/util/SMTPMailer.java +++ b/FishUtil/src/main/java/io/deephaven/util/SMTPMailer.java @@ -27,8 +27,7 @@ public class SMTPMailer implements Mailer { // Delay resolution of Configuration.getInstance() to avoid classloading circular dependency // issues. // See IDS-5126 for more details, but essentially: - // Config -> log4j -> SimpleMailAppender -> SMTPMailer -> Config -> PropertyInputStreamLoaderKV - // -> etcd read call + // Config -> log4j -> SimpleMailAppender -> SMTPMailer -> Config -> PropertyInputStreamLoaderKV -> etcd read call // PropertyInputStreamLoaderKV starts executor, Etcd Executor -> log4j private enum Props { INSTANCE; @@ -38,8 +37,7 @@ private enum Props { Props() { smtpMxDomain = Configuration.getInstance().getProperty("smtp.mx.domain"); - sendEmailDisabled = - Configuration.getInstance().getBooleanWithDefault("smtp.sendEmail.disabled", false); + sendEmailDisabled = Configuration.getInstance().getBooleanWithDefault("smtp.sendEmail.disabled", false); } } @@ -74,16 +72,15 @@ private String getMXRecord() { try { // Do a DNS lookup DirContext ictx = new InitialDirContext(); - Attributes attributes = - ictx.getAttributes("dns:/" + Props.INSTANCE.smtpMxDomain, new String[] {"MX"}); + Attributes attributes = ictx.getAttributes("dns:/" + Props.INSTANCE.smtpMxDomain, new String[] {"MX"}); Attribute attribute = attributes.get("MX"); // Otherwise, return the first MX record we find for (NamingEnumeration all = attribute.getAll(); all.hasMore();) { String mailhost = (String) all.next(); mailhost = mailhost.substring(1 + mailhost.indexOf(" "), mailhost.length() - 1); - // NOTE: DON'T LOG HERE, WE MIGHT ALREADY BE PART OF LOG_MAILER, AND IF THE QUEUE IS - // FULL WE NEVER ESCAPE! + // NOTE: DON'T LOG HERE, WE MIGHT ALREADY BE PART OF LOG_MAILER, AND IF THE QUEUE IS FULL WE NEVER + // ESCAPE! return mailhost; } } catch (Exception e) { @@ -95,8 +92,7 @@ private String getMXRecord() { } @Override - public void sendEmail(String sender, String[] recipients, String subject, String msg) - throws IOException { + public void sendEmail(String sender, String[] recipients, String subject, String msg) throws IOException { if (sender == null) { String hostname = InetAddress.getLocalHost().getHostName(); sender = System.getProperty("user.name") + "@" + hostname; @@ -108,26 +104,23 @@ public void sendEmail(String sender, String[] recipients, String subject, String } @Override - public void sendEmail(String sender, String recipient, String subject, String msg) - throws IOException { + public void sendEmail(String sender, String recipient, String subject, String msg) throws IOException { sendEmail(sender, recipient, subject, msg, null); } @Override - public void sendHTMLEmail(String sender, String recipient, String subject, String msg) - throws IOException { + public void sendHTMLEmail(String sender, String recipient, String subject, String msg) throws IOException { List> extraHeaderEntries = new ArrayList<>(); extraHeaderEntries.add(new AbstractMap.SimpleEntry<>("Mime-Version", "1.0;")); - extraHeaderEntries.add( - new AbstractMap.SimpleEntry<>("Content-Type", "text/html; charset=\"ISO-8859-1\";")); + extraHeaderEntries.add(new AbstractMap.SimpleEntry<>("Content-Type", "text/html; charset=\"ISO-8859-1\";")); extraHeaderEntries.add(new AbstractMap.SimpleEntry<>("Content-Transfer-Encoding", "7bit;")); sendEmail(sender, recipient, subject, msg, extraHeaderEntries); } @Override public void sendEmail(String sender, String recipient, String subject, String msg, - List> extraHeaderEntries) throws IOException { + List> extraHeaderEntries) throws IOException { if (Props.INSTANCE.sendEmailDisabled) { return; } @@ -173,11 +166,10 @@ public void sendEmail(String sender, String recipient, String subject, String ms private static long lastUpdateTime = 0; /** - * Bug reporter, sends mail but limits the mail to 1 email per second and automatically includes - * hostname. + * Bug reporter, sends mail but limits the mail to 1 email per second and automatically includes hostname. *

    - * Note there is no guarantee your email goes through because of the 1/second limit and because - * this function eats IOExceptions. + * Note there is no guarantee your email goes through because of the 1/second limit and because this function eats + * IOExceptions. *

    * * @param from "from" address, must not contain spaces, e.g. "RiskProfiler" @@ -185,8 +177,7 @@ public void sendEmail(String sender, String recipient, String subject, String ms * @param subject email subject line * @param message email body */ - public static void reportBug(final String from, final String to, final String subject, - final String message) { + public static void reportBug(final String from, final String to, final String subject, final String message) { // return if it's been less than 1 second since the last email final long now = System.currentTimeMillis(); synchronized (lastUpdateLock) { @@ -219,14 +210,14 @@ public static void reportBug(final String from, final String to, final String su // send mail try { new SMTPMailer().sendEmail(from, to, subject, - "bug report from " + hostname + ":\n\n" + message + addMessage); + "bug report from " + hostname + ":\n\n" + message + addMessage); } catch (IOException e) { // ignore it, we do not promise to deliver. } } - public void sendEmailWithAttachments(String sender, String[] recipients, String subject, - String msg, String attachmentPaths[]) throws Exception { + public void sendEmailWithAttachments(String sender, String[] recipients, String subject, String msg, + String attachmentPaths[]) throws Exception { // Create the email message MultiPartEmail email = new MultiPartEmail(); email.setHostName(getMXRecord()); @@ -251,8 +242,8 @@ public void sendEmailWithAttachments(String sender, String[] recipients, String email.send(); } - public void sendHTMLEmailWithInline(String sender, String recipients[], String subject, - String msg, String attachmentPaths[]) throws Exception { + public void sendHTMLEmailWithInline(String sender, String recipients[], String subject, String msg, + String attachmentPaths[]) throws Exception { Properties sessionProperties = System.getProperties(); sessionProperties.put("mail.smtp.host", getMXRecord()); Session session = Session.getDefaultInstance(sessionProperties, null); @@ -294,23 +285,20 @@ public void sendHTMLEmailWithInline(String sender, String recipients[], String s } /* - * public void sendEmail_Authenticated(String user, String password, String sender, String - * recipient, String subject, String body) throws IOException, javax.mail.MessagingException { - * String mailer = "zzz"; Transport tr = null; try { Properties props = System.getProperties(); - * props.put("mail.smtp.auth", "true"); + * public void sendEmail_Authenticated(String user, String password, String sender, String recipient, String + * subject, String body) throws IOException, javax.mail.MessagingException { String mailer = "zzz"; Transport tr = + * null; try { Properties props = System.getProperties(); props.put("mail.smtp.auth", "true"); * * // Get a Session object Session mailSession = Session.getDefaultInstance(props, null); * - * // construct the message Message msg = new MimeMessage(mailSession); msg.setFrom(new - * InternetAddress(sender)); + * // construct the message Message msg = new MimeMessage(mailSession); msg.setFrom(new InternetAddress(sender)); * - * msg.setRecipients(Message.RecipientType.TO, InternetAddress.parse(recipient, false)); - * msg.setSubject(subject); + * msg.setRecipients(Message.RecipientType.TO, InternetAddress.parse(recipient, false)); msg.setSubject(subject); * * msg.setText(body); msg.setHeader("X-Mailer", mailer); msg.setSentDate(new Date()); * - * tr = mailSession.getTransport("smtp"); tr.connect(SMTPHOST, user, password); - * msg.saveChanges(); tr.sendMessage(msg, msg.getAllRecipients()); tr.close(); } catch - * (Exception e) { e.printStackTrace(); } finally { if (tr != null) tr.close(); } } + * tr = mailSession.getTransport("smtp"); tr.connect(SMTPHOST, user, password); msg.saveChanges(); + * tr.sendMessage(msg, msg.getAllRecipients()); tr.close(); } catch (Exception e) { e.printStackTrace(); } finally { + * if (tr != null) tr.close(); } } */ } diff --git a/FishUtil/src/main/java/io/deephaven/util/ThreadSafeDateFormat.java b/FishUtil/src/main/java/io/deephaven/util/ThreadSafeDateFormat.java index 76635dd9980..34e562c0d03 100644 --- a/FishUtil/src/main/java/io/deephaven/util/ThreadSafeDateFormat.java +++ b/FishUtil/src/main/java/io/deephaven/util/ThreadSafeDateFormat.java @@ -18,9 +18,8 @@ // -------------------------------------------------------------------- /** - * Wraps a {@link DateFormat} to provide a minimal level of thread safety that DateFormat is lacking - * (namely, preventing simultaneous calls to {@link #format} from separate threads from interfering - * with each other). + * Wraps a {@link DateFormat} to provide a minimal level of thread safety that DateFormat is lacking (namely, preventing + * simultaneous calls to {@link #format} from separate threads from interfering with each other). */ public class ThreadSafeDateFormat extends DateFormat { private final DateFormat m_dateFormat; diff --git a/FishUtil/src/main/java/io/deephaven/util/Validate.java b/FishUtil/src/main/java/io/deephaven/util/Validate.java index d22debec76a..7825195280d 100644 --- a/FishUtil/src/main/java/io/deephaven/util/Validate.java +++ b/FishUtil/src/main/java/io/deephaven/util/Validate.java @@ -33,8 +33,7 @@ public static int validateInteger(String name, String s) throws NumberFormatExce return i; } - public static double validatePositiveDouble(String name, String s) - throws NumberFormatException { + public static double validatePositiveDouble(String name, String s) throws NumberFormatException { double d = 0; try { @@ -68,8 +67,8 @@ public static void validate(boolean b, String errorMsg) throws Exception { } } - public static void validateDouble(String name, double value, double min, double max, - boolean inclusiveMin, boolean inclusiveMax) throws Exception { + public static void validateDouble(String name, double value, double min, double max, boolean inclusiveMin, + boolean inclusiveMax) throws Exception { if (Double.isNaN(value)) { throw new Exception(name + " may not be NaN"); } @@ -91,8 +90,8 @@ public static void validateDouble(String name, double value, double min, double } } - public static void validateInteger(String name, int value, int min, int max, - boolean inclusiveMin, boolean inclusiveMax) throws Exception { + public static void validateInteger(String name, int value, int min, int max, boolean inclusiveMin, + boolean inclusiveMax) throws Exception { if (inclusiveMin && value < min) { throw new Exception(name + " must be greater than or equal to " + min); } diff --git a/FishUtil/src/main/java/io/deephaven/util/clock/MicroTimer.java b/FishUtil/src/main/java/io/deephaven/util/clock/MicroTimer.java index c7ba3fcad20..a98e13b22e8 100644 --- a/FishUtil/src/main/java/io/deephaven/util/clock/MicroTimer.java +++ b/FishUtil/src/main/java/io/deephaven/util/clock/MicroTimer.java @@ -9,8 +9,7 @@ public class MicroTimer { - private static final boolean isNative = - Configuration.getInstance().getBoolean("NIO.wireLagClock.native"); + private static final boolean isNative = Configuration.getInstance().getBoolean("NIO.wireLagClock.native"); private static long nanoTimeOffset = System.currentTimeMillis() * 1000000 - System.nanoTime(); @@ -66,10 +65,8 @@ public static void main(String[] args) { rdtscNative(); } - System.out - .println("-----------------------------------------------------------------------"); - System.out - .println("rdtscNative: " + (System.nanoTime() - startNanos) / COUNT + " nanos per"); + System.out.println("-----------------------------------------------------------------------"); + System.out.println("rdtscNative: " + (System.nanoTime() - startNanos) / COUNT + " nanos per"); System.out.println("rdtscNative: " + (rdtscNative() - cycles) / COUNT + " cycles per"); startNanos = System.nanoTime(); @@ -79,12 +76,9 @@ public static void main(String[] args) { currentTimeMicrosNative(); } - System.out - .println("-----------------------------------------------------------------------"); - System.out.println( - "currentTimeMicrosNative: " + (System.nanoTime() - startNanos) / COUNT + " nanos per"); - System.out.println( - "currentTimeMicrosNative: " + (rdtscNative() - cycles) / COUNT + " cycles per"); + System.out.println("-----------------------------------------------------------------------"); + System.out.println("currentTimeMicrosNative: " + (System.nanoTime() - startNanos) / COUNT + " nanos per"); + System.out.println("currentTimeMicrosNative: " + (rdtscNative() - cycles) / COUNT + " cycles per"); startNanos = System.nanoTime(); cycles = rdtscNative(); @@ -93,10 +87,8 @@ public static void main(String[] args) { System.nanoTime(); } - System.out - .println("-----------------------------------------------------------------------"); - System.out - .println("System.nanoTime: " + (System.nanoTime() - startNanos) / COUNT + " nanos per"); + System.out.println("-----------------------------------------------------------------------"); + System.out.println("System.nanoTime: " + (System.nanoTime() - startNanos) / COUNT + " nanos per"); System.out.println("System.nanoTime: " + (rdtscNative() - cycles) / COUNT + " cycles per"); startNanos = System.nanoTime(); @@ -106,11 +98,8 @@ public static void main(String[] args) { System.currentTimeMillis(); } - System.out - .println("-----------------------------------------------------------------------"); - System.out.println( - "System.currentTimeMillis: " + (System.nanoTime() - startNanos) / COUNT + " nanos per"); - System.out.println( - "System.currentTimeMillis: " + (rdtscNative() - cycles) / COUNT + " cycles per"); + System.out.println("-----------------------------------------------------------------------"); + System.out.println("System.currentTimeMillis: " + (System.nanoTime() - startNanos) / COUNT + " nanos per"); + System.out.println("System.currentTimeMillis: " + (rdtscNative() - cycles) / COUNT + " cycles per"); } } diff --git a/FishUtil/src/main/java/io/deephaven/util/loggers/AsyncAppender.java b/FishUtil/src/main/java/io/deephaven/util/loggers/AsyncAppender.java index 894a332e9ce..e710c8ef7b0 100644 --- a/FishUtil/src/main/java/io/deephaven/util/loggers/AsyncAppender.java +++ b/FishUtil/src/main/java/io/deephaven/util/loggers/AsyncAppender.java @@ -13,15 +13,15 @@ public void append(LoggingEvent event) { if (!(message instanceof String)) { event = new LoggingEvent(event.getFQNOfLoggerClass(), - event.getLogger(), - event.getTimeStamp(), - event.getLevel(), - message.toString(), - event.getThreadName(), - event.getThrowableInformation(), - event.getNDC(), - event.getLocationInformation(), - event.getProperties()); + event.getLogger(), + event.getTimeStamp(), + event.getLevel(), + message.toString(), + event.getThreadName(), + event.getThrowableInformation(), + event.getNDC(), + event.getLocationInformation(), + event.getProperties()); } super.append(event); diff --git a/FishUtil/src/main/java/io/deephaven/util/loggers/DailyRollingFileAppender.java b/FishUtil/src/main/java/io/deephaven/util/loggers/DailyRollingFileAppender.java index dc9fadbb195..6518bd9ed69 100644 --- a/FishUtil/src/main/java/io/deephaven/util/loggers/DailyRollingFileAppender.java +++ b/FishUtil/src/main/java/io/deephaven/util/loggers/DailyRollingFileAppender.java @@ -18,8 +18,7 @@ public DailyRollingFileAppender() { super(); } - public DailyRollingFileAppender(Layout layout, String filename, String datePattern) - throws IOException { + public DailyRollingFileAppender(Layout layout, String filename, String datePattern) throws IOException { super(layout, filename, datePattern); } @@ -27,8 +26,8 @@ public void activateOptions() { // i'll activate it myself... } - public synchronized void setFile(String fileName, boolean append, boolean bufferedIO, - int bufferSize) throws IOException { + public synchronized void setFile(String fileName, boolean append, boolean bufferedIO, int bufferSize) + throws IOException { LogLog.debug("setFile called: " + fileName + ", " + append); // It does not make sense to have immediate flush and bufferedIO. diff --git a/FishUtil/src/main/java/io/deephaven/util/loggers/Log4JTimedBufferedWriter.java b/FishUtil/src/main/java/io/deephaven/util/loggers/Log4JTimedBufferedWriter.java index b0fa3ea7fca..b202b55e24d 100644 --- a/FishUtil/src/main/java/io/deephaven/util/loggers/Log4JTimedBufferedWriter.java +++ b/FishUtil/src/main/java/io/deephaven/util/loggers/Log4JTimedBufferedWriter.java @@ -9,14 +9,13 @@ public class Log4JTimedBufferedWriter extends BufferedWriter { - private static final ArrayList writers_ = - new ArrayList(); + private static final ArrayList writers_ = new ArrayList(); private static Thread thread_ = null; private static boolean isDone_ = false; - private static final int flushTime_ = 3000; // cannot be a property due to log4j would need - // configuration which needs log4j + private static final int flushTime_ = 3000; // cannot be a property due to log4j would need configuration which + // needs log4j public Log4JTimedBufferedWriter(Writer out) { super(out); diff --git a/FishUtil/src/main/java/io/deephaven/util/loggers/LoggerUtil.java b/FishUtil/src/main/java/io/deephaven/util/loggers/LoggerUtil.java index e77b93fb321..df0aa8cb628 100644 --- a/FishUtil/src/main/java/io/deephaven/util/loggers/LoggerUtil.java +++ b/FishUtil/src/main/java/io/deephaven/util/loggers/LoggerUtil.java @@ -92,191 +92,188 @@ public static void append(final LogOutput out, final double v) { // ################################################################ /** - * Formats an arbitrary object, using a fast method if we can recognize the type, or toString - * otherwise. Usually you should make the object LogOutputAppendable or create an ObjFormatter, - * but if the object is truly of unknown type, this is better than calling toString() directly. - * Outputs "null" if the given object is null. + * Formats an arbitrary object, using a fast method if we can recognize the type, or toString otherwise. Usually you + * should make the object LogOutputAppendable or create an ObjFormatter, but if the object is truly of unknown type, + * this is better than calling toString() directly. Outputs "null" if the given object is null. */ - public static final LogOutput.ObjFormatter OBJECT_FORMATTER = - new LogOutput.ObjFormatter() { - @Override - public void format(LogOutput logOutput, Object object) { - Require.neqNull(logOutput, "logOutput"); - if (null == object) { - logOutput.append("null"); - } else if (object instanceof Throwable) { - logOutput.append((Throwable) object); - } else if (object instanceof LogOutputAppendable) { - ((LogOutputAppendable) object).append(logOutput); - } else if (object instanceof CharSequence) { - logOutput.append((CharSequence) object); - } else if (object instanceof byte[]) { - byte[] array = (byte[]) object; - logOutput.append('['); - for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.append(array[nIndex]); + public static final LogOutput.ObjFormatter OBJECT_FORMATTER = new LogOutput.ObjFormatter() { + @Override + public void format(LogOutput logOutput, Object object) { + Require.neqNull(logOutput, "logOutput"); + if (null == object) { + logOutput.append("null"); + } else if (object instanceof Throwable) { + logOutput.append((Throwable) object); + } else if (object instanceof LogOutputAppendable) { + ((LogOutputAppendable) object).append(logOutput); + } else if (object instanceof CharSequence) { + logOutput.append((CharSequence) object); + } else if (object instanceof byte[]) { + byte[] array = (byte[]) object; + logOutput.append('['); + for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append(']'); - } else if (object instanceof short[]) { - short[] array = (short[]) object; - logOutput.append('['); - for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.append(array[nIndex]); + logOutput.append(array[nIndex]); + } + logOutput.append(']'); + } else if (object instanceof short[]) { + short[] array = (short[]) object; + logOutput.append('['); + for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append(']'); - } else if (object instanceof int[]) { - int[] array = (int[]) object; - logOutput.append('['); - for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.append(array[nIndex]); + logOutput.append(array[nIndex]); + } + logOutput.append(']'); + } else if (object instanceof int[]) { + int[] array = (int[]) object; + logOutput.append('['); + for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append(']'); - } else if (object instanceof long[]) { - long[] array = (long[]) object; - logOutput.append('['); - for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.append(array[nIndex]); + logOutput.append(array[nIndex]); + } + logOutput.append(']'); + } else if (object instanceof long[]) { + long[] array = (long[]) object; + logOutput.append('['); + for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append(']'); - } else if (object instanceof char[]) { - char[] array = (char[]) object; - logOutput.append('['); - for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.append(array[nIndex]); + logOutput.append(array[nIndex]); + } + logOutput.append(']'); + } else if (object instanceof char[]) { + char[] array = (char[]) object; + logOutput.append('['); + for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append(']'); - } else if (object instanceof float[]) { - float[] array = (float[]) object; - logOutput.append('['); - for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.appendDouble(array[nIndex]); + logOutput.append(array[nIndex]); + } + logOutput.append(']'); + } else if (object instanceof float[]) { + float[] array = (float[]) object; + logOutput.append('['); + for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append(']'); - } else if (object instanceof double[]) { - double[] array = (double[]) object; - logOutput.append('['); - for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.appendDouble(array[nIndex]); + logOutput.appendDouble(array[nIndex]); + } + logOutput.append(']'); + } else if (object instanceof double[]) { + double[] array = (double[]) object; + logOutput.append('['); + for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append(']'); - } else if (object instanceof boolean[]) { - boolean[] array = (boolean[]) object; - logOutput.append('['); - for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.append(array[nIndex]); + logOutput.appendDouble(array[nIndex]); + } + logOutput.append(']'); + } else if (object instanceof boolean[]) { + boolean[] array = (boolean[]) object; + logOutput.append('['); + for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append(']'); - } else if (object.getClass().isArray()) { - Object[] array = (Object[]) object; - logOutput.append('['); - for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.append(OBJECT_FORMATTER, array[nIndex]); + logOutput.append(array[nIndex]); + } + logOutput.append(']'); + } else if (object.getClass().isArray()) { + Object[] array = (Object[]) object; + logOutput.append('['); + for (int nIndex = 0, nLength = array.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append(']'); - } else if (object instanceof Byte) { - logOutput.append((Byte) object); - } else if (object instanceof Short) { - logOutput.append((Short) object); - } else if (object instanceof Integer) { - logOutput.append((Integer) object); - } else if (object instanceof Long) { - logOutput.append((Long) object); - } else if (object instanceof Character) { - logOutput.append((Character) object); - } else if (object instanceof Float) { - logOutput.appendDouble((Float) object); - } else if (object instanceof Double) { - logOutput.appendDouble((Double) object); - } else if (object instanceof Boolean) { - logOutput.append((Boolean) object); - } else { - // note: we could also handle a ByteBuffer, but I'm not sure that - // LogOutput.append(ByteBuffer) is appropriate if you weren't expecting it. - logOutput.append(object.toString()); + logOutput.append(OBJECT_FORMATTER, array[nIndex]); } + logOutput.append(']'); + } else if (object instanceof Byte) { + logOutput.append((Byte) object); + } else if (object instanceof Short) { + logOutput.append((Short) object); + } else if (object instanceof Integer) { + logOutput.append((Integer) object); + } else if (object instanceof Long) { + logOutput.append((Long) object); + } else if (object instanceof Character) { + logOutput.append((Character) object); + } else if (object instanceof Float) { + logOutput.appendDouble((Float) object); + } else if (object instanceof Double) { + logOutput.appendDouble((Double) object); + } else if (object instanceof Boolean) { + logOutput.append((Boolean) object); + } else { + // note: we could also handle a ByteBuffer, but I'm not sure that + // LogOutput.append(ByteBuffer) is appropriate if you weren't expecting it. + logOutput.append(object.toString()); } - }; + } + }; /** * Formats as "" if the object is null, or " (object)" if not null. */ public static final LogOutput.ObjFormatter OPTIONAL_OBJECT_FORMATTER = - new LogOutput.ObjFormatter() { - @Override - public void format(LogOutput logOutput, Object object) { - Require.neqNull(logOutput, "logOutput"); - if (null != object) { - logOutput.append(" (").append(OBJECT_FORMATTER, object).append(")"); + new LogOutput.ObjFormatter() { + @Override + public void format(LogOutput logOutput, Object object) { + Require.neqNull(logOutput, "logOutput"); + if (null != object) { + logOutput.append(" (").append(OBJECT_FORMATTER, object).append(")"); + } } - } - }; + }; /** * Formats an exception as "className: message" just like {@link Throwable#toString()}. */ public static final LogOutput.ObjFormatter SIMPLE_EXCEPTION_FORMATTER = - new LogOutput.ObjFormatter() { - @Override - public void format(LogOutput logOutput, Throwable throwable) { - Require.neqNull(logOutput, "logOutput"); - if (null == throwable) { - logOutput.append("null"); - return; - } - logOutput.append(throwable.getClass().getName()); - String sMessage = throwable.getLocalizedMessage(); - if (null != sMessage) { - logOutput.append(": ").append(sMessage); + new LogOutput.ObjFormatter() { + @Override + public void format(LogOutput logOutput, Throwable throwable) { + Require.neqNull(logOutput, "logOutput"); + if (null == throwable) { + logOutput.append("null"); + return; + } + logOutput.append(throwable.getClass().getName()); + String sMessage = throwable.getLocalizedMessage(); + if (null != sMessage) { + logOutput.append(": ").append(sMessage); + } } - } - }; + }; /** Formats an int[] as "size:[a, b, c, ..]" or "null". */ - public static final LogOutput.ObjFormatter SIZE_INT_ARRAY_FORMATTER = - new LogOutput.ObjFormatter() { - @Override - public void format(LogOutput logOutput, int[] ints) { - if (null == ints) { - logOutput.append("null"); - } else { - logOutput.append(ints.length).append(":["); - for (int nIndex = 0, nLength = ints.length; nIndex < nLength; nIndex++) { - if (0 != nIndex) { - logOutput.append(", "); - } - logOutput.append(ints[nIndex]); + public static final LogOutput.ObjFormatter SIZE_INT_ARRAY_FORMATTER = new LogOutput.ObjFormatter() { + @Override + public void format(LogOutput logOutput, int[] ints) { + if (null == ints) { + logOutput.append("null"); + } else { + logOutput.append(ints.length).append(":["); + for (int nIndex = 0, nLength = ints.length; nIndex < nLength; nIndex++) { + if (0 != nIndex) { + logOutput.append(", "); } - logOutput.append("]"); + logOutput.append(ints[nIndex]); } + logOutput.append("]"); } - }; + } + }; // ################################################################ @@ -284,41 +281,37 @@ public void format(LogOutput logOutput, int[] ints) { /** * Appends a string in "0d 0h 0m 0.000'000'000s" format from a time interval in nanoseconds. */ - public static final LogOutput.LongFormatter FORMAT_INTERVAL_NANOS = - new LogOutput.LongFormatter() { - @Override - public void format(LogOutput logOutput, long tsInterval) { - internalFormatInterval(logOutput, tsInterval, 3); - } - }; + public static final LogOutput.LongFormatter FORMAT_INTERVAL_NANOS = new LogOutput.LongFormatter() { + @Override + public void format(LogOutput logOutput, long tsInterval) { + internalFormatInterval(logOutput, tsInterval, 3); + } + }; // ---------------------------------------------------------------- /** * Appends a string in "0d 0h 0m 0.000'000s" format from a time interval in microseconds. */ - public static final LogOutput.LongFormatter FORMAT_INTERVAL_MICROS = - new LogOutput.LongFormatter() { - @Override - public void format(LogOutput logOutput, long tsInterval) { - internalFormatInterval(logOutput, tsInterval, 2); - } - }; + public static final LogOutput.LongFormatter FORMAT_INTERVAL_MICROS = new LogOutput.LongFormatter() { + @Override + public void format(LogOutput logOutput, long tsInterval) { + internalFormatInterval(logOutput, tsInterval, 2); + } + }; // ---------------------------------------------------------------- /** * Appends a string in "0d 0h 0m 0.000s" format from a time interval in milliseconds. */ - public static final LogOutput.LongFormatter FORMAT_INTERVAL_MILLIS = - new LogOutput.LongFormatter() { - @Override - public void format(LogOutput logOutput, long tsInterval) { - internalFormatInterval(logOutput, tsInterval, 1); - } - }; + public static final LogOutput.LongFormatter FORMAT_INTERVAL_MILLIS = new LogOutput.LongFormatter() { + @Override + public void format(LogOutput logOutput, long tsInterval) { + internalFormatInterval(logOutput, tsInterval, 1); + } + }; // ---------------------------------------------------------------- - private static void internalFormatInterval(LogOutput logOutput, long tsInterval, - int nThousands) { + private static void internalFormatInterval(LogOutput logOutput, long tsInterval, int nThousands) { if (tsInterval < 0) { logOutput.append("-"); @@ -370,9 +363,9 @@ private static void internalFormatInterval(LogOutput logOutput, long tsInterval, // ################################################################ /** - * Attempt to log a line of items at level to log. Fails silently if any Throwable is thrown, - * including Throwable's one might ordinarily prefer not to catch (e.g. InterruptedException, - * subclasses of Error, etc). This is intended for use in processes that are shutting down. + * Attempt to log a line of items at level to log. Fails silently if any Throwable is thrown, including Throwable's + * one might ordinarily prefer not to catch (e.g. InterruptedException, subclasses of Error, etc). This is intended + * for use in processes that are shutting down. * * @param log * @param level diff --git a/FishUtil/src/main/java/io/deephaven/util/loggers/ProcessNameFileAppender.java b/FishUtil/src/main/java/io/deephaven/util/loggers/ProcessNameFileAppender.java index 99d1e43f197..d8ab91c7a16 100644 --- a/FishUtil/src/main/java/io/deephaven/util/loggers/ProcessNameFileAppender.java +++ b/FishUtil/src/main/java/io/deephaven/util/loggers/ProcessNameFileAppender.java @@ -27,8 +27,7 @@ public ProcessNameFileAppender() { } @SuppressWarnings("unused") - public ProcessNameFileAppender(Layout layout, String filename, String datePattern) - throws IOException { + public ProcessNameFileAppender(Layout layout, String filename, String datePattern) throws IOException { super(layout, filename, datePattern); } @@ -49,34 +48,23 @@ public void activateOptions() { if (logDirJvmProp != null) { setFile(logDirJvmProp + File.separator + mainClass + suffix + ".log"); } else { - setFile(System.getProperty("workspace") + "/../logs/" + mainClass + suffix + ".log"); // can't - // use - // a - // property - // here - // since - // configuration - // needs - // log4j - // and - // log4j - // would - // need + setFile(System.getProperty("workspace") + "/../logs/" + mainClass + suffix + ".log"); // can't use a + // property here since + // configuration needs + // log4j and log4j + // would need // configuration } super.activateOptions(); } - // the append below is a hack. we grep out std out logs to send an email when the process is - // completed. since we are now logging to a file we also need the email messages to go to the - // console + // the append below is a hack. we grep out std out logs to send an email when the process is completed. since we are + // now logging to a file we also need the email messages to go to the console public void append(LoggingEvent event) { if (!first) { - super.append(new LoggingEvent(event.getFQNOfLoggerClass(), event.getLogger(), - Level.INFO, "******************************** " + new Date() - + " *******************************", - null)); + super.append(new LoggingEvent(event.getFQNOfLoggerClass(), event.getLogger(), Level.INFO, + "******************************** " + new Date() + " *******************************", null)); first = true; } diff --git a/FishUtil/src/main/java/io/deephaven/util/loggers/SimpleMailAppender.java b/FishUtil/src/main/java/io/deephaven/util/loggers/SimpleMailAppender.java index 467ee2862e8..af9c5327961 100644 --- a/FishUtil/src/main/java/io/deephaven/util/loggers/SimpleMailAppender.java +++ b/FishUtil/src/main/java/io/deephaven/util/loggers/SimpleMailAppender.java @@ -45,18 +45,15 @@ protected void append(LoggingEvent loggingEvent) { final StringBuilder subjectB = new StringBuilder(); if (Configuration.getInstance().hasProperty("system.type")) { - subjectB.append("[").append(Configuration.getInstance().getProperty("system.type")) - .append("] "); + subjectB.append("[").append(Configuration.getInstance().getProperty("system.type")).append("] "); } - subjectB.append(level).append(" ") - .append(loggingEvent.getMessage().toString().replaceFirst(".*FATAL", "")); + subjectB.append(level).append(" ").append(loggingEvent.getMessage().toString().replaceFirst(".*FATAL", "")); final StringBuilder message = new StringBuilder(); message.append("/*----------------------------------------*/\n"); message.append("Host: ").append(hostname).append("\n"); - message.append("Config: ").append(Configuration.getConfFileNameFromProperties()) - .append("\n"); + message.append("Config: ").append(Configuration.getConfFileNameFromProperties()).append("\n"); message.append("Date: ").append(new Date()).append("\n"); message.append("User: ").append(System.getProperty("user.name")).append("\n"); message.append("Process: ").append(System.getProperty("process.name")).append("\n"); diff --git a/FishUtil/src/main/java/io/deephaven/util/process/BaseProcessEnvironment.java b/FishUtil/src/main/java/io/deephaven/util/process/BaseProcessEnvironment.java index a889836b784..36166f1b448 100644 --- a/FishUtil/src/main/java/io/deephaven/util/process/BaseProcessEnvironment.java +++ b/FishUtil/src/main/java/io/deephaven/util/process/BaseProcessEnvironment.java @@ -40,9 +40,9 @@ public abstract class BaseProcessEnvironment implements ProcessEnvironment { * @param log */ protected BaseProcessEnvironment(@NotNull final ShutdownManager shutdownManager, - @NotNull final FatalErrorReporter fatalErrorReporter, - @NotNull final String mainClassName, - @NotNull final Logger log) { + @NotNull final FatalErrorReporter fatalErrorReporter, + @NotNull final String mainClassName, + @NotNull final Logger log) { this.shutdownManager = Require.neqNull(shutdownManager, "shutdownManager"); this.fatalErrorReporter = Require.neqNull(fatalErrorReporter, "fatalErrorReporter"); this.mainClassName = Require.nonempty(mainClassName, "mainClassName"); @@ -68,9 +68,8 @@ public final String getMainClassName() { public final Logger getLog() { if (!Boolean.getBoolean("LoggerFactory.silenceOnProcessEnvironment")) { log.warn(new RuntimeException("Trace")) - .append( - "Logger being fetched via ProcessEnvironment instead of io.deephaven.internal.log.LoggerFactory") - .endl(); + .append("Logger being fetched via ProcessEnvironment instead of io.deephaven.internal.log.LoggerFactory") + .endl(); } return log; } diff --git a/FishUtil/src/main/java/io/deephaven/util/process/DefaultFatalErrorReporter.java b/FishUtil/src/main/java/io/deephaven/util/process/DefaultFatalErrorReporter.java index c588939704c..e39682f9278 100644 --- a/FishUtil/src/main/java/io/deephaven/util/process/DefaultFatalErrorReporter.java +++ b/FishUtil/src/main/java/io/deephaven/util/process/DefaultFatalErrorReporter.java @@ -21,17 +21,15 @@ public DefaultFatalErrorReporter() { @Override protected void reportImpl(@NotNull final String message, @NotNull final Throwable throwable, - boolean isUncaughtException) { - // Similar code has Thread.setDefaultUncaughtExceptionHandler(null); here "to prevent - // deadlocks." + boolean isUncaughtException) { + // Similar code has Thread.setDefaultUncaughtExceptionHandler(null); here "to prevent deadlocks." // I think our control over the actual System.exit() call is sufficient. - final boolean initiateShutdown = - !ProcessEnvironment.getGlobalShutdownManager().tasksInvoked(); + final boolean initiateShutdown = !ProcessEnvironment.getGlobalShutdownManager().tasksInvoked(); // It's a tricky proposition to try and write out to a io.deephaven.io.logger.Logger here. // Instead, we log to a PrintStream, ideally the original System.err. err.println(String.format("%s: %s", - initiateShutdown ? "Initiating shutdown due to" : "After shutdown initiated", message)); + initiateShutdown ? "Initiating shutdown due to" : "After shutdown initiated", message)); throwable.printStackTrace(err); if (initiateShutdown) { @@ -51,14 +49,13 @@ protected void reportImpl(@NotNull final String message, @NotNull final Throwabl } /** - * The semantics of {@link System#exit(int)} indicate that "This method never returns normally". - * We would like to preserve that property for our calls to {@link #report(String, Throwable)} - * even in the case where we aren't initiating - * {@link AsyncSystem#exit(String, int, PrintStream)}. + * The semantics of {@link System#exit(int)} indicate that "This method never returns normally". We would like to + * preserve that property for our calls to {@link #report(String, Throwable)} even in the case where we aren't + * initiating {@link AsyncSystem#exit(String, int, PrintStream)}. * *

    - * Note: the JVM will still exit once {@link System#exit(int)} finishes, even if the current - * thread is a non-daemon thread. + * Note: the JVM will still exit once {@link System#exit(int)} finishes, even if the current thread is a non-daemon + * thread. */ private void neverReturn() { // noinspection InfiniteLoopStatement diff --git a/FishUtil/src/main/java/io/deephaven/util/process/DefaultProcessEnvironment.java b/FishUtil/src/main/java/io/deephaven/util/process/DefaultProcessEnvironment.java index 860af42f6a9..61832651429 100644 --- a/FishUtil/src/main/java/io/deephaven/util/process/DefaultProcessEnvironment.java +++ b/FishUtil/src/main/java/io/deephaven/util/process/DefaultProcessEnvironment.java @@ -11,16 +11,14 @@ @SuppressWarnings("unused") public class DefaultProcessEnvironment extends BaseProcessEnvironment { - private DefaultProcessEnvironment(@NotNull final String mainClassName, - @NotNull final Logger log) { + private DefaultProcessEnvironment(@NotNull final String mainClassName, @NotNull final Logger log) { super(new ShutdownManagerImpl(), new DefaultFatalErrorReporter(), mainClassName, log); } @Override public void onStartup() { log.info().append(mainClassName).append(": starting up").endl(); - shutdownManager.registerTask(ShutdownManager.OrderingCategory.LAST, - new LoggerShutdownTask()); + shutdownManager.registerTask(ShutdownManager.OrderingCategory.LAST, new LoggerShutdownTask()); shutdownManager.addShutdownHookToRuntime(); } @@ -33,8 +31,8 @@ public void onShutdown() { static class Factory implements ProcessEnvironment.Factory { @Override - public ProcessEnvironment make(@NotNull final Configuration configuration, - @NotNull final String mainClassName, @NotNull final Logger log) { + public ProcessEnvironment make(@NotNull final Configuration configuration, @NotNull final String mainClassName, + @NotNull final Logger log) { return new DefaultProcessEnvironment(mainClassName, log); } } diff --git a/FishUtil/src/main/java/io/deephaven/util/process/FatalErrorReporterBase.java b/FishUtil/src/main/java/io/deephaven/util/process/FatalErrorReporterBase.java index b8ff7f78576..bc65f7f6f7b 100644 --- a/FishUtil/src/main/java/io/deephaven/util/process/FatalErrorReporterBase.java +++ b/FishUtil/src/main/java/io/deephaven/util/process/FatalErrorReporterBase.java @@ -21,16 +21,15 @@ private final class FatalException extends RuntimeException { } /** - * Report a fatal error in an implementation specific way. Implementations should invoke - * appropriate shutdown tasks and initiate process shutdown (e.g. via {@link System#exit(int)}). + * Report a fatal error in an implementation specific way. Implementations should invoke appropriate shutdown tasks + * and initiate process shutdown (e.g. via {@link System#exit(int)}). * * @param message the message * @param throwable the throwable * @param isFromUncaught true iff called from * {@link java.lang.Thread.UncaughtExceptionHandler#uncaughtException(Thread, Throwable)}. */ - protected abstract void reportImpl(@NotNull String message, @NotNull Throwable throwable, - boolean isFromUncaught); + protected abstract void reportImpl(@NotNull String message, @NotNull Throwable throwable, boolean isFromUncaught); @Override public final void report(@NotNull final String message, @NotNull final Throwable throwable) { @@ -44,10 +43,9 @@ public final void report(@NotNull final String message) { } @Override - public final void reportAsync(@NotNull final String message, - @NotNull final Throwable throwable) { - new Thread(() -> report(message, throwable), - Thread.currentThread().getName() + "-AsyncFatalErrorSignaller").start(); + public final void reportAsync(@NotNull final String message, @NotNull final Throwable throwable) { + new Thread(() -> report(message, throwable), Thread.currentThread().getName() + "-AsyncFatalErrorSignaller") + .start(); } @Override @@ -56,8 +54,7 @@ public final void reportAsync(@NotNull final String message) { } @Override - public final void uncaughtException(@NotNull final Thread thread, - @NotNull final Throwable throwable) { + public final void uncaughtException(@NotNull final Thread thread, @NotNull final Throwable throwable) { final String message = "Uncaught exception in thread " + thread.getName(); interceptors.forEach(interceptor -> interceptor.intercept(message, throwable)); reportImpl(message, throwable, true); diff --git a/FishUtil/src/main/java/io/deephaven/util/process/LoggerShutdownTask.java b/FishUtil/src/main/java/io/deephaven/util/process/LoggerShutdownTask.java index 1ff10aa4fed..ce8b6e4fffb 100644 --- a/FishUtil/src/main/java/io/deephaven/util/process/LoggerShutdownTask.java +++ b/FishUtil/src/main/java/io/deephaven/util/process/LoggerShutdownTask.java @@ -17,10 +17,9 @@ protected void shutdown() { LogManager.shutdown(); } - // I think I can replace all of the following (adapted from ServiceFactorImpl.finishLogging() - // and related methods) with - // LogManager.shutdown(). Keeping this around for posterity until I've got sufficient confidence - // in the new way: + // I think I can replace all of the following (adapted from ServiceFactorImpl.finishLogging() and related methods) + // with + // LogManager.shutdown(). Keeping this around for posterity until I've got sufficient confidence in the new way: // --------------------------------------------------------------------------------------------------------------------- // // private static void tryToFlushLo44jAppenders() { @@ -28,8 +27,8 @@ protected void shutdown() { // if (log4jAppender instanceof WriterAppender){ // ((WriterAppender)log4jAppender).setImmediateFlush(true); // log4jAppender.setLayout(new PatternLayout("")); - // ((WriterAppender)log4jAppender).append(new LoggingEvent("LoggerShutdownTask", , - // Level.INFO, null, null)); + // ((WriterAppender)log4jAppender).append(new LoggingEvent("LoggerShutdownTask", , Level.INFO, null, + // null)); // } // } // } @@ -37,27 +36,26 @@ protected void shutdown() { // @SuppressWarnings("unchecked") // private static Collection gatherLog4jAppenders() { // final HashSet appenders = new HashSet<>(); - // for (final Enumeration currentLoggers = - // LogManager.getCurrentLoggers(); currentLoggers.hasMoreElements();) { - // for (final Enumeration currentAppenders = - // currentLoggers.nextElement().getAllAppenders(); currentAppenders.hasMoreElements();) { + // for (final Enumeration currentLoggers = LogManager.getCurrentLoggers(); + // currentLoggers.hasMoreElements();) { + // for (final Enumeration currentAppenders = currentLoggers.nextElement().getAllAppenders(); + // currentAppenders.hasMoreElements();) { // addLog4jAppenders(appenders, currentAppenders.nextElement()); // } // } - // for (final Enumeration rootLoggerAppenders = - // LogManager.getRootLogger().getAllAppenders(); rootLoggerAppenders.hasMoreElements();) { + // for (final Enumeration rootLoggerAppenders = LogManager.getRootLogger().getAllAppenders(); + // rootLoggerAppenders.hasMoreElements();) { // addLog4jAppenders(appenders, rootLoggerAppenders.nextElement()); // } // return appenders; // } // // @SuppressWarnings("unchecked") - // private static void addLog4jAppenders(final Collection appenders, final Appender - // appender){ + // private static void addLog4jAppenders(final Collection appenders, final Appender appender){ // appenders.add(appender); // if (appender instanceof AppenderAttachable){ - // for (final Enumeration attachedAppenders = - // ((AppenderAttachable)appender).getAllAppenders(); attachedAppenders.hasMoreElements();) { + // for (final Enumeration attachedAppenders = ((AppenderAttachable)appender).getAllAppenders(); + // attachedAppenders.hasMoreElements();) { // addLog4jAppenders(appenders, attachedAppenders.nextElement()); // } // } diff --git a/FishUtil/src/main/java/io/deephaven/util/process/OnetimeShutdownTask.java b/FishUtil/src/main/java/io/deephaven/util/process/OnetimeShutdownTask.java index 8a48c815083..628746b5dc8 100644 --- a/FishUtil/src/main/java/io/deephaven/util/process/OnetimeShutdownTask.java +++ b/FishUtil/src/main/java/io/deephaven/util/process/OnetimeShutdownTask.java @@ -12,8 +12,7 @@ public abstract class OnetimeShutdownTask implements ShutdownManager.Task { /** - * NB: This doesn't need to be an AtomicBoolean, only a volatile boolean, but we use the object - * for its monitor. + * NB: This doesn't need to be an AtomicBoolean, only a volatile boolean, but we use the object for its monitor. */ private final AtomicBoolean isShutdown = new AtomicBoolean(false); diff --git a/FishUtil/src/main/java/io/deephaven/util/process/ProcessEnvironment.java b/FishUtil/src/main/java/io/deephaven/util/process/ProcessEnvironment.java index 0d11d22a453..fdf921a053c 100644 --- a/FishUtil/src/main/java/io/deephaven/util/process/ProcessEnvironment.java +++ b/FishUtil/src/main/java/io/deephaven/util/process/ProcessEnvironment.java @@ -13,8 +13,8 @@ import org.jetbrains.annotations.NotNull; /** - * Interface for installation-specific environment. Processes that use instances of this interface - * are responsible for determining when to call the various methods. + * Interface for installation-specific environment. Processes that use instances of this interface are responsible for + * determining when to call the various methods. * * All implementations must setup a ShutdownManager and an implementation of FatalErrorReporter. */ @@ -55,10 +55,9 @@ public interface ProcessEnvironment { void onStartup(); /** - * Hook for shutting down an installation-specific environment for a given process. This is - * intended for controlled invocation, rather than as part of a shutdown hook - - * {@code onStartup()} should setup any mandatory shutdown hooks, and code in said shutdown - * hooks should be idempotent w.r.t. onShutdown(). + * Hook for shutting down an installation-specific environment for a given process. This is intended for controlled + * invocation, rather than as part of a shutdown hook - {@code onStartup()} should setup any mandatory shutdown + * hooks, and code in said shutdown hooks should be idempotent w.r.t. onShutdown(). */ void onShutdown(); @@ -76,7 +75,7 @@ interface Factory { * @return A new instance of the appropriate ProcessEnvironment implementation */ ProcessEnvironment make(@NotNull Configuration configuration, @NotNull String mainClassName, - @NotNull Logger log); + @NotNull Logger log); } /** @@ -89,8 +88,8 @@ class GlobalHelper { */ private static volatile ProcessEnvironment instance; - private static void reportInstantiationError(@NotNull final String message, - @NotNull final Exception exception, final int exitStatus) { + private static void reportInstantiationError(@NotNull final String message, @NotNull final Exception exception, + final int exitStatus) { System.err.println(message + ": " + exception); exception.printStackTrace(System.err); System.exit(exitStatus); @@ -98,8 +97,7 @@ private static void reportInstantiationError(@NotNull final String message, } /** - * Accessor the for the global instance. Fails if the instance is null, in order to make - * programming errors clear. + * Accessor the for the global instance. Fails if the instance is null, in order to make programming errors clear. * * @return The global instance */ @@ -133,8 +131,8 @@ static Logger getGlobalLog() { } /** - * Get the global log if a global process environment has been installed, or else a logger that - * will output to {@link System#out}. + * Get the global log if a global process environment has been installed, or else a logger that will output to + * {@link System#out}. * * @return A logger that can safely be used by code that doesn't otherwise have access to one */ @@ -151,24 +149,21 @@ static Logger getDefaultLog() { */ static Logger getDefaultLog(Class clazz) { final ProcessEnvironment processEnvironment = ProcessEnvironment.tryGet(); - return processEnvironment != null ? processEnvironment.getLog() - : LoggerFactory.getLogger(clazz); + return processEnvironment != null ? processEnvironment.getLog() : LoggerFactory.getLogger(clazz); } /** - * Setter for the global instance. It is an error to invoke this without allowReplace if the - * global instance may already have been set. + * Setter for the global instance. It is an error to invoke this without allowReplace if the global instance may + * already have been set. * * @param instance The new global instance * @param allowReplace Whether to allow replacing an existing global instance * @return The global instance */ - static ProcessEnvironment set(@NotNull final ProcessEnvironment instance, - final boolean allowReplace) { + static ProcessEnvironment set(@NotNull final ProcessEnvironment instance, final boolean allowReplace) { synchronized (GlobalHelper.class) { if (!allowReplace && GlobalHelper.instance != null) { - throw new IllegalStateException( - "Can not replace ProcessEnvironment " + GlobalHelper.instance + + throw new IllegalStateException("Can not replace ProcessEnvironment " + GlobalHelper.instance + " with " + instance + " unless allowReplace=true"); } return GlobalHelper.instance = Require.neqNull(instance, "instance"); @@ -176,8 +171,7 @@ static ProcessEnvironment set(@NotNull final ProcessEnvironment instance, } /** - * Set the global instance to null, and reset its shutdown manager. Intended for use in unit - * tests, only. + * Set the global instance to null, and reset its shutdown manager. Intended for use in unit tests, only. */ static void clear() { synchronized (GlobalHelper.class) { @@ -187,9 +181,9 @@ static void clear() { } /** - * Instantiate (and set as the global instance) a ProcessEnvironment specified according to the - * following factory class name properties, in descending order of precedence: - * {@code mainClassName}.processEnvironmentFactory default.processEnvironmentFactory + * Instantiate (and set as the global instance) a ProcessEnvironment specified according to the following factory + * class name properties, in descending order of precedence: {@code mainClassName}.processEnvironmentFactory + * default.processEnvironmentFactory * * Checked exceptions thrown in this process always result in process termination. * @@ -199,76 +193,69 @@ static void clear() { * @return The new ProcessEnvironment */ @SuppressWarnings("ConstantConditions") - static @NotNull ProcessEnvironment instantiateFromConfiguration( - @NotNull final Configuration configuration, - @NotNull final String mainClassName, - @NotNull final Logger log) { + static @NotNull ProcessEnvironment instantiateFromConfiguration(@NotNull final Configuration configuration, + @NotNull final String mainClassName, + @NotNull final Logger log) { final String factoryPropertyNameSuffix = ".processEnvironmentFactory"; final String factoryClassName; try { - factoryClassName = - configuration.getStringWithDefault(mainClassName + factoryPropertyNameSuffix, + factoryClassName = configuration.getStringWithDefault(mainClassName + factoryPropertyNameSuffix, configuration.getProperty("default" + factoryPropertyNameSuffix)); } catch (PropertyException e) { - GlobalHelper.reportInstantiationError( - "Failed to get process environment factory name for " + mainClassName, e, -11); + GlobalHelper.reportInstantiationError("Failed to get process environment factory name for " + mainClassName, + e, -11); return null; } final Factory factory; try { factory = (Factory) Class.forName(factoryClassName).newInstance(); - } catch (IllegalAccessException | InstantiationException | ClassNotFoundException - | ClassCastException e) { + } catch (IllegalAccessException | InstantiationException | ClassNotFoundException | ClassCastException e) { GlobalHelper.reportInstantiationError( - "Failed to instantiate process environment factory " + factoryClassName, e, -12); + "Failed to instantiate process environment factory " + factoryClassName, e, -12); return null; } try { return factory.make(configuration, mainClassName, log); } catch (Exception e) { - GlobalHelper.reportInstantiationError( - "Unable to instantiate process environment for " + mainClassName, e, -13); + GlobalHelper.reportInstantiationError("Unable to instantiate process environment for " + mainClassName, e, + -13); return null; } } /** - * Do the basic installation most interactive (usually client) processes need: (1) - * instantiateFromConfiguration (2) set (3) Invoke onStartup() + * Do the basic installation most interactive (usually client) processes need: (1) instantiateFromConfiguration (2) + * set (3) Invoke onStartup() * * @param configuration The configuration * @param mainClassName The main class name * @param log The log * @return The ProcessEnvironment */ - static @NotNull ProcessEnvironment basicInteractiveProcessInitialization( - @NotNull final Configuration configuration, - @NotNull final String mainClassName, - @NotNull final Logger log) { - final ProcessEnvironment processEnvironment = - instantiateFromConfiguration(configuration, mainClassName, log); + static @NotNull ProcessEnvironment basicInteractiveProcessInitialization(@NotNull final Configuration configuration, + @NotNull final String mainClassName, + @NotNull final Logger log) { + final ProcessEnvironment processEnvironment = instantiateFromConfiguration(configuration, mainClassName, log); set(processEnvironment, false); processEnvironment.onStartup(); return processEnvironment; } /** - * Do the basic installation most server processes need: (1) - * basicInteractiveProcessInitialization (as a subset of what servers need) (2) Set current - * thread name as {@code mainClassName}.main (3) Install the fatal error reporter as default - * uncaught exception handler + * Do the basic installation most server processes need: (1) basicInteractiveProcessInitialization (as a subset of + * what servers need) (2) Set current thread name as {@code mainClassName}.main (3) Install the fatal error reporter + * as default uncaught exception handler * * @param configuration The configuration * @param mainClassName The main class name * @param log The log * @return The ProcessEnvironment */ - static @NotNull ProcessEnvironment basicServerInitialization( - @NotNull final Configuration configuration, - @NotNull final String mainClassName, - @NotNull final Logger log) { + static @NotNull ProcessEnvironment basicServerInitialization(@NotNull final Configuration configuration, + @NotNull final String mainClassName, + @NotNull final Logger log) { final ProcessEnvironment processEnvironment = - basicInteractiveProcessInitialization(configuration, mainClassName, log); + basicInteractiveProcessInitialization(configuration, mainClassName, log); Thread.currentThread().setName(mainClassName + ".main"); Thread.setDefaultUncaughtExceptionHandler(processEnvironment.getFatalErrorReporter()); return processEnvironment; diff --git a/FishUtil/src/main/java/io/deephaven/util/process/ShutdownManager.java b/FishUtil/src/main/java/io/deephaven/util/process/ShutdownManager.java index 9e515ea6998..4180b957ef6 100644 --- a/FishUtil/src/main/java/io/deephaven/util/process/ShutdownManager.java +++ b/FishUtil/src/main/java/io/deephaven/util/process/ShutdownManager.java @@ -15,21 +15,20 @@ public interface ShutdownManager { enum OrderingCategory { /** - * Tasks that should be kicked off before the rest. For example, disconnecting clients that - * may otherwise be poorly served during shutdown processing, or may cause shutdown delays - * by adding additional work. + * Tasks that should be kicked off before the rest. For example, disconnecting clients that may otherwise be + * poorly served during shutdown processing, or may cause shutdown delays by adding additional work. */ FIRST, /** - * Tasks that have no particular timeliness requirement. For example, flushing persistent - * stores to permanent storage. + * Tasks that have no particular timeliness requirement. For example, flushing persistent stores to permanent + * storage. */ MIDDLE, /** - * Tasks that should be dispatched after others. For example, shutting down a logger - * framework and flushing log data. + * Tasks that should be dispatched after others. For example, shutting down a logger framework and flushing log + * data. */ LAST } @@ -47,15 +46,14 @@ interface Task { } /** - * Add a shutdown hook to the runtime that will invoke all registered shutdown tasks, if they - * haven't previously been invoked. + * Add a shutdown hook to the runtime that will invoke all registered shutdown tasks, if they haven't previously + * been invoked. */ void addShutdownHookToRuntime(); /** - * Register task for shutdown invocation along with other tasks belonging to orderingCategory. - * Registration concurrent with invocation (that is, shutdown in progress) is not guaranteed to - * be effective. + * Register task for shutdown invocation along with other tasks belonging to orderingCategory. Registration + * concurrent with invocation (that is, shutdown in progress) is not guaranteed to be effective. * * @param orderingCategory * @param task @@ -63,8 +61,8 @@ interface Task { void registerTask(@NotNull OrderingCategory orderingCategory, @NotNull Task task); /** - * Remove the most recent registration of task with orderingCategory. De-registration concurrent - * with invocation (that is, shutdown in progress) is not guaranteed to be effective. + * Remove the most recent registration of task with orderingCategory. De-registration concurrent with invocation + * (that is, shutdown in progress) is not guaranteed to be effective. * * @param orderingCategory * @param task @@ -72,8 +70,7 @@ interface Task { void deregisterTask(@NotNull OrderingCategory orderingCategory, @NotNull Task task); /** - * Clear all shutdown tasks and reset internal state. Useful for unit tests, not safe for - * production use cases. + * Clear all shutdown tasks and reset internal state. Useful for unit tests, not safe for production use cases. */ void reset(); diff --git a/FishUtil/src/main/java/io/deephaven/util/process/ShutdownManagerImpl.java b/FishUtil/src/main/java/io/deephaven/util/process/ShutdownManagerImpl.java index d1aa08b4f6c..fb7d5aa7a01 100644 --- a/FishUtil/src/main/java/io/deephaven/util/process/ShutdownManagerImpl.java +++ b/FishUtil/src/main/java/io/deephaven/util/process/ShutdownManagerImpl.java @@ -23,9 +23,8 @@ import java.util.concurrent.atomic.AtomicBoolean; /** - * This is a helper class for keeping track of one-time shutdown tasks. Tasks are dispatched - * serially according to their ordering category (first, middle, last), and in LIFO (last in, first - * out) order within their category. + * This is a helper class for keeping track of one-time shutdown tasks. Tasks are dispatched serially according to their + * ordering category (first, middle, last), and in LIFO (last in, first out) order within their category. */ @SuppressWarnings("WeakerAccess") public class ShutdownManagerImpl implements ShutdownManager { @@ -35,14 +34,13 @@ public class ShutdownManagerImpl implements ShutdownManager { /** * Property for configuring "if all else fails" process halt, to prevent zombie processes. */ - private static final String SHUTDOWN_TIMEOUT_MILLIS_PROP = - "ShutdownManager.shutdownTimeoutMillis"; + private static final String SHUTDOWN_TIMEOUT_MILLIS_PROP = "ShutdownManager.shutdownTimeoutMillis"; /** * Timeout for "if all else fails" process halt, to prevent zombie processes. */ private final long SHUTDOWN_TIMEOUT_MILLIS = - Configuration.getInstance().getLongWithDefault(SHUTDOWN_TIMEOUT_MILLIS_PROP, -1); + Configuration.getInstance().getLongWithDefault(SHUTDOWN_TIMEOUT_MILLIS_PROP, -1); /** * Shutdown task stacks by ordering category. Note, EnumMaps iterate in ordinal order. @@ -50,9 +48,9 @@ public class ShutdownManagerImpl implements ShutdownManager { private final Map> tasksByOrderingCategory; { final EnumMap> taskStacksByOrderingCategoryTemp = - new EnumMap<>(OrderingCategory.class); + new EnumMap<>(OrderingCategory.class); Arrays.stream(OrderingCategory.values()) - .forEach(oc -> taskStacksByOrderingCategoryTemp.put(oc, new SynchronizedStack<>())); + .forEach(oc -> taskStacksByOrderingCategoryTemp.put(oc, new SynchronizedStack<>())); tasksByOrderingCategory = Collections.unmodifiableMap(taskStacksByOrderingCategoryTemp); } @@ -75,17 +73,13 @@ public void addShutdownHookToRuntime() { } @Override - public void registerTask(@NotNull final OrderingCategory orderingCategory, - @NotNull final Task task) { - tasksByOrderingCategory.get(Require.neqNull(orderingCategory, "orderingCategory")) - .push(task); + public void registerTask(@NotNull final OrderingCategory orderingCategory, @NotNull final Task task) { + tasksByOrderingCategory.get(Require.neqNull(orderingCategory, "orderingCategory")).push(task); } @Override - public void deregisterTask(@NotNull final OrderingCategory orderingCategory, - @NotNull final Task task) { - tasksByOrderingCategory.get(Require.neqNull(orderingCategory, "orderingCategory")) - .remove(task); + public void deregisterTask(@NotNull final OrderingCategory orderingCategory, @NotNull final Task task) { + tasksByOrderingCategory.get(Require.neqNull(orderingCategory, "orderingCategory")).remove(task); } @Override @@ -149,9 +143,9 @@ public synchronized void clear() { } /** - * Attempt to log a line of items. Fails silently if any Throwable is thrown, including - * Throwables one might ordinarily prefer not to catch (e.g. InterruptedException, subclasses of - * Error, etc). This is intended for use in processes that are shutting down. + * Attempt to log a line of items. Fails silently if any Throwable is thrown, including Throwables one might + * ordinarily prefer not to catch (e.g. InterruptedException, subclasses of Error, etc). This is intended for use in + * processes that are shutting down. */ public static void logShutdown(final LogLevel level, final Object... items) { try { @@ -165,16 +159,15 @@ public static void logShutdown(final LogLevel level, final Object... items) { } /** - * Watchdog thread that will halt the application if it fails to finish in the configured amount - * of time. + * Watchdog thread that will halt the application if it fails to finish in the configured amount of time. */ private void ensureTermination() { final long start = System.nanoTime(); final long deadline = start + TimeUnit.MILLISECONDS.toNanos(SHUTDOWN_TIMEOUT_MILLIS); for (long now = start; now < deadline; now = System.nanoTime()) { final long nanosRemaining = deadline - now; - final long millisRemainingRoundedUp = TimeUnit.NANOSECONDS - .toMillis(nanosRemaining + TimeUnit.MILLISECONDS.toNanos(1) - 1); + final long millisRemainingRoundedUp = + TimeUnit.NANOSECONDS.toMillis(nanosRemaining + TimeUnit.MILLISECONDS.toNanos(1) - 1); try { Thread.sleep(millisRemainingRoundedUp); } catch (InterruptedException ignored) { @@ -182,13 +175,12 @@ private void ensureTermination() { } final PrintStream destStdErr = PrintStreamGlobals.getErr(); - destStdErr.println("Halting due to shutdown delay greater than " + SHUTDOWN_TIMEOUT_MILLIS - + "ms. Thread dump:"); + destStdErr + .println("Halting due to shutdown delay greater than " + SHUTDOWN_TIMEOUT_MILLIS + "ms. Thread dump:"); try { ThreadDump.threadDump(destStdErr); destStdErr.println(); - destStdErr.println( - "Halted due to shutdown delay greater than " + SHUTDOWN_TIMEOUT_MILLIS + "ms"); + destStdErr.println("Halted due to shutdown delay greater than " + SHUTDOWN_TIMEOUT_MILLIS + "ms"); } catch (Throwable t) { destStdErr.println("Failed to generate thread dump: " + t); } finally { @@ -198,13 +190,11 @@ private void ensureTermination() { } /** - * If configured to do so, start a watchdog thread that will halt the application if it gets - * hung during shutdown. + * If configured to do so, start a watchdog thread that will halt the application if it gets hung during shutdown. */ private void installTerminator() { if (SHUTDOWN_TIMEOUT_MILLIS >= 0) { - final Thread terminator = - new Thread(this::ensureTermination, "ShutdownTimeoutTerminator"); + final Thread terminator = new Thread(this::ensureTermination, "ShutdownTimeoutTerminator"); terminator.setDaemon(true); terminator.start(); } diff --git a/FishUtil/src/main/java/io/deephaven/util/signals/SignalSender.java b/FishUtil/src/main/java/io/deephaven/util/signals/SignalSender.java index 728f9f63f7e..eb0c99f5f3b 100644 --- a/FishUtil/src/main/java/io/deephaven/util/signals/SignalSender.java +++ b/FishUtil/src/main/java/io/deephaven/util/signals/SignalSender.java @@ -25,8 +25,7 @@ public SignalSender(@NotNull final Logger log, final boolean useNative) { } /** - * Helper method - sends SIQQUIT to a process. If this process is a JVM, it will send a stack - * dump to stdout. + * Helper method - sends SIQQUIT to a process. If this process is a JVM, it will send a stack dump to stdout. * * @param processId The process ID to send the signal to * @return true on success, false on error @@ -73,8 +72,7 @@ public boolean suspend(final int processId) { * @return true on success, false on error */ private boolean sendSignal(final int processId, final SignalUtils.Signal signal) { - Require.gtZero(processId, "processId"); // Don't want to allow fancier usages for now. See - // 'man -s 2 kill'. + Require.gtZero(processId, "processId"); // Don't want to allow fancier usages for now. See 'man -s 2 kill'. Require.neqNull(signal, "signal"); final int rc; @@ -84,9 +82,8 @@ private boolean sendSignal(final int processId, final SignalUtils.Signal signal) try { rc = SignalUtils.sendSignalWithBinKill(processId, signal.getSignalName()); } catch (IOException e) { - log.error().append("sendSignal: Exception while using /bin/kill to send ") - .append(signal.toString()).append(" to processId ").append(processId) - .append(": ").append(e).endl(); + log.error().append("sendSignal: Exception while using /bin/kill to send ").append(signal.toString()) + .append(" to processId ").append(processId).append(": ").append(e).endl(); return false; } } @@ -94,11 +91,10 @@ private boolean sendSignal(final int processId, final SignalUtils.Signal signal) if (rc == 0) { return true; } - log.error().append("sendSignal: Error while using ") - .append(useNative ? "native code" : "/bin/kill") - .append(" to send ").append(signal.toString()) - .append(" to processId ").append(processId) - .append(": kill returned ").append(rc).endl(); + log.error().append("sendSignal: Error while using ").append(useNative ? "native code" : "/bin/kill") + .append(" to send ").append(signal.toString()) + .append(" to processId ").append(processId) + .append(": kill returned ").append(rc).endl(); return false; } diff --git a/FishUtil/src/main/java/io/deephaven/util/signals/SignalUtils.java b/FishUtil/src/main/java/io/deephaven/util/signals/SignalUtils.java index b3784063e91..d56439becee 100644 --- a/FishUtil/src/main/java/io/deephaven/util/signals/SignalUtils.java +++ b/FishUtil/src/main/java/io/deephaven/util/signals/SignalUtils.java @@ -21,18 +21,18 @@ public class SignalUtils { private static final int UNDEFINED_SIGNAL_NUMBER = Integer.MIN_VALUE; /** - * Supported signals. Be careful when adding new entries - as you can see, signal numbers don't - * always line up across operating systems. + * Supported signals. Be careful when adding new entries - as you can see, signal numbers don't always line up + * across operating systems. */ public enum Signal { - SIGINT("int", 2, 2, 2), SIGTERM("term", 15, 15, 15), SIGQUIT("quit", 3, 3, - 3), SIGKILL("kill", 9, 9, 9), SIGSTOP("stop", 19, 23, 17), SIGCONT("cont", 18, 25, 19); + SIGINT("int", 2, 2, 2), SIGTERM("term", 15, 15, 15), SIGQUIT("quit", 3, 3, 3), SIGKILL("kill", 9, 9, + 9), SIGSTOP("stop", 19, 23, 17), SIGCONT("cont", 18, 25, 19); private final String signalName; private final int signalNumber; Signal(final String signalName, final int linuxSignalNumber, final int solarisSignalNumber, - final int macOsSignalNumber) { + final int macOsSignalNumber) { this.signalName = signalName; switch (OPERATING_SYSTEM) { case LINUX: @@ -57,8 +57,7 @@ public String getSignalName() { public int getSignalNumber() { if (signalNumber == UNDEFINED_SIGNAL_NUMBER) { - throw new UnsupportedOperationException( - this + " is undefined on " + OPERATING_SYSTEM); + throw new UnsupportedOperationException(this + " is undefined on " + OPERATING_SYSTEM); } return signalNumber; } @@ -72,10 +71,8 @@ public int getSignalNumber() { * @return The exit value of the child process. */ @SuppressWarnings("WeakerAccess") - public static int sendSignalWithBinKill(final int processId, final String signalName) - throws IOException { - final ProcessBuilder pb = - new ProcessBuilder("/bin/kill", "-s", signalName, Integer.toString(processId)); + public static int sendSignalWithBinKill(final int processId, final String signalName) throws IOException { + final ProcessBuilder pb = new ProcessBuilder("/bin/kill", "-s", signalName, Integer.toString(processId)); final Process p = pb.start(); try { @@ -83,10 +80,8 @@ public static int sendSignalWithBinKill(final int processId, final String signal p.getInputStream().close(); p.getOutputStream().close(); } catch (IOException e) { - throw new AssertionError( - "sendSignalWithBinKill: unexpected exception while closing child process streams: " - + e.getMessage(), - e); + throw new AssertionError("sendSignalWithBinKill: unexpected exception while closing child process streams: " + + e.getMessage(), e); } while (true) { diff --git a/FishUtil/src/main/java/io/deephaven/util/threads/ThreadDump.java b/FishUtil/src/main/java/io/deephaven/util/threads/ThreadDump.java index 54bae2cca8e..6fa6d043caa 100644 --- a/FishUtil/src/main/java/io/deephaven/util/threads/ThreadDump.java +++ b/FishUtil/src/main/java/io/deephaven/util/threads/ThreadDump.java @@ -13,9 +13,8 @@ import java.lang.management.ThreadMXBean; /** - * A simple method for generating a Thread dump for this JVM; it doesn't do all the stuff that the - * kill -3 does; but you can easily run it from inside the JVM without having to send yourself a - * signal. + * A simple method for generating a Thread dump for this JVM; it doesn't do all the stuff that the kill -3 does; but you + * can easily run it from inside the JVM without having to send yourself a signal. */ public class ThreadDump { @SuppressWarnings("WeakerAccess") diff --git a/Generators/src/main/java/io/deephaven/db/plot/util/GenerateAxesPlotMethods.java b/Generators/src/main/java/io/deephaven/db/plot/util/GenerateAxesPlotMethods.java index aec8b0ea03e..d1b01d36ce6 100644 --- a/Generators/src/main/java/io/deephaven/db/plot/util/GenerateAxesPlotMethods.java +++ b/Generators/src/main/java/io/deephaven/db/plot/util/GenerateAxesPlotMethods.java @@ -54,8 +54,7 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableNumericDataArrayShort(" + variableName + ", " + PLOT_INFO_ID - + ")"; + return "new IndexableNumericDataArrayShort(" + variableName + ", " + PLOT_INFO_ID + ")"; } @Override @@ -77,8 +76,7 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableNumericDataArrayInt(" + variableName + ", " + PLOT_INFO_ID - + ")"; + return "new IndexableNumericDataArrayInt(" + variableName + ", " + PLOT_INFO_ID + ")"; } @Override @@ -100,8 +98,7 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableNumericDataArrayLong(" + variableName + ", " + PLOT_INFO_ID - + ")"; + return "new IndexableNumericDataArrayLong(" + variableName + ", " + PLOT_INFO_ID + ")"; } @Override @@ -123,8 +120,7 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableNumericDataArrayFloat(" + variableName + ", " + PLOT_INFO_ID - + ")"; + return "new IndexableNumericDataArrayFloat(" + variableName + ", " + PLOT_INFO_ID + ")"; } @Override @@ -146,8 +142,7 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableNumericDataArrayDouble(" + variableName + ", " + PLOT_INFO_ID - + ")"; + return "new IndexableNumericDataArrayDouble(" + variableName + ", " + PLOT_INFO_ID + ")"; } @Override @@ -169,8 +164,7 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableNumericDataArrayDate(" + variableName + ", " + PLOT_INFO_ID - + ")"; + return "new IndexableNumericDataArrayDate(" + variableName + ", " + PLOT_INFO_ID + ")"; } @Override @@ -192,8 +186,7 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableNumericDataArrayDBDateTime(" + variableName + ", " - + PLOT_INFO_ID + ")"; + return "new IndexableNumericDataArrayDBDateTime(" + variableName + ", " + PLOT_INFO_ID + ")"; } @Override @@ -215,8 +208,7 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableNumericDataArrayNumber<>(" + variableName + ", " + PLOT_INFO_ID - + ")"; + return "new IndexableNumericDataArrayNumber<>(" + variableName + ", " + PLOT_INFO_ID + ")"; } @Override @@ -225,8 +217,7 @@ public Boolean isTime() { } }); - // only supporting number types because supporting time types would cause generic erasure - // conflicts + // only supporting number types because supporting time types would cause generic erasure conflicts types.put("List", new Type() { @Override public String getGenericSignature(int index) { @@ -240,8 +231,7 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableNumericDataListNumber<>(" + variableName + ", " + PLOT_INFO_ID - + ")"; + return "new IndexableNumericDataListNumber<>(" + variableName + ", " + PLOT_INFO_ID + ")"; } @Override @@ -286,8 +276,8 @@ public String getVariableType(int index) { @Override public String getIndexableDataCode(String variableName) { - return "new IndexableDataArray<>(" + variableName + ".toArray(new Comparable[" - + variableName + ".size()]), new PlotInfo(this, seriesName))"; + return "new IndexableDataArray<>(" + variableName + ".toArray(new Comparable[" + variableName + + ".size()]), new PlotInfo(this, seriesName))"; } @Override @@ -300,15 +290,14 @@ public Boolean isTime() { } private static String javadocGenerics(final String[] variableNames, final Type[] variableTypes, - final String[] genericJavadocs) { + final String[] genericJavadocs) { StringBuilder result = new StringBuilder(); int genericIndex = 0; for (int i = 0; i < variableNames.length; i++) { final String sig = variableTypes[i].getGenericSignature(i); if (sig != null) { - result.append(genericJavadocs[genericIndex].replace("$GENERIC$", - "<" + sig.split(" ")[0] + ">")); + result.append(genericJavadocs[genericIndex].replace("$GENERIC$", "<" + sig.split(" ")[0] + ">")); genericIndex++; } } @@ -318,28 +307,26 @@ private static String javadocGenerics(final String[] variableNames, final Type[] private static String codeGenericSignature(final Type... variableTypes) { final String generics = IntStream.range(0, variableTypes.length) - .mapToObj(i -> variableTypes[i].getGenericSignature(i)) - .reduce(null, (a, b) -> a == null ? b : b == null ? a : a + "," + b); + .mapToObj(i -> variableTypes[i].getGenericSignature(i)) + .reduce(null, (a, b) -> a == null ? b : b == null ? a : a + "," + b); return generics == null ? "" : "<" + generics + ">"; } private static String codeArguments(final String[] variableNames, final Type[] variableTypes) { - Require.eq(variableNames.length, "variableNames.length", variableTypes.length, - "variableTypes.length"); + Require.eq(variableNames.length, "variableNames.length", variableTypes.length, "variableTypes.length"); return IntStream.range(0, variableNames.length) - .mapToObj(i -> "final " + variableTypes[i].getVariableType(i) + " " + variableNames[i]) - .reduce(null, (a, b) -> a == null ? b : a + ", " + b); + .mapToObj(i -> "final " + variableTypes[i].getVariableType(i) + " " + variableNames[i]) + .reduce(null, (a, b) -> a == null ? b : a + ", " + b); } private static String codeIndexable(final String[] variableNames, final Type[] variableTypes) { - Require.eq(variableNames.length, "variableNames.length", variableTypes.length, - "variableTypes.length"); + Require.eq(variableNames.length, "variableNames.length", variableTypes.length, "variableTypes.length"); return IntStream.range(0, variableNames.length) - .mapToObj(i -> variableTypes[i].getIndexableDataCode(variableNames[i])) - .reduce(null, (a, b) -> a == null ? b : a + ", " + b); + .mapToObj(i -> variableTypes[i].getIndexableDataCode(variableNames[i])) + .reduce(null, (a, b) -> a == null ? b : a + ", " + b); } private static String codeTimeAxis(final Type[] variableTypes, final int split) { @@ -355,8 +342,8 @@ private static String codeTimeAxis(final Type[] variableTypes, final int split) } return Arrays.stream(varTypes) - .map(vt -> vt == null ? null : vt.isTime().toString()) - .reduce(null, (a, b) -> a == null ? b : b == null ? a : a + ", " + b); + .map(vt -> vt == null ? null : vt.isTime().toString()) + .reduce(null, (a, b) -> a == null ? b : b == null ? a : a + ", " + b); } private static String codeTimeAxis(final Type[] variableTypes, final int split, final int def) { @@ -372,39 +359,37 @@ private static String codeTimeAxis(final Type[] variableTypes, final int split, } return Arrays.stream(varTypes) - .map(vt -> vt == null ? null : vt.isTime().toString()) - .reduce(null, (a, b) -> a == null ? b : b == null ? a : a + ", " + b); + .map(vt -> vt == null ? null : vt.isTime().toString()) + .reduce(null, (a, b) -> a == null ? b : b == null ? a : a + ", " + b); } private static String codeFunction(final boolean isInterface, final String[] variableNames, - final Type[] variableTypes, final String prototype, final String[] genericJavadocs, - final String returnTypeInterface, final String returnTypeImpl) { - return codeFunction(isInterface, variableNames, variableTypes, prototype, genericJavadocs, - returnTypeInterface, returnTypeImpl, -1); + final Type[] variableTypes, final String prototype, final String[] genericJavadocs, + final String returnTypeInterface, final String returnTypeImpl) { + return codeFunction(isInterface, variableNames, variableTypes, prototype, genericJavadocs, returnTypeInterface, + returnTypeImpl, -1); } private static String codeFunction(final boolean isInterface, final String[] variableNames, - final Type[] variableTypes, final String prototype, final String[] genericJavadocs, - final String returnTypeInterface, final String returnTypeImpl, final int split) { + final Type[] variableTypes, final String prototype, final String[] genericJavadocs, + final String returnTypeInterface, final String returnTypeImpl, final int split) { final String generic = codeGenericSignature(variableTypes); final String args = codeArguments(variableNames, variableTypes); final String indexable = codeIndexable(variableNames, variableTypes); final String timeAxis = codeTimeAxis(variableTypes, split); - final String yTimeAxis = - prototype.contains("$YTIMEAXIS$)") ? codeTimeAxis(variableTypes, split, 1) : ""; - final String zTimeAxis = - prototype.contains("$ZTIMEAXIS$)") ? codeTimeAxis(variableTypes, split, 2) : ""; + final String yTimeAxis = prototype.contains("$YTIMEAXIS$)") ? codeTimeAxis(variableTypes, split, 1) : ""; + final String zTimeAxis = prototype.contains("$ZTIMEAXIS$)") ? codeTimeAxis(variableTypes, split, 2) : ""; final String javadoc = javadocGenerics(variableNames, variableTypes, genericJavadocs); final String code = prototype - .replace("$GENERIC$", generic) - .replace("$ARGS$", args) - .replace("$INDEXABLE$", indexable) - .replace("$TIMEAXIS$", timeAxis) - .replace("$YTIMEAXIS$", yTimeAxis) - .replace("$ZTIMEAXIS$", zTimeAxis) - .replace("$JAVADOCS$", javadoc) - .replace("$RETURNTYPE$", isInterface ? returnTypeInterface : returnTypeImpl); + .replace("$GENERIC$", generic) + .replace("$ARGS$", args) + .replace("$INDEXABLE$", indexable) + .replace("$TIMEAXIS$", timeAxis) + .replace("$YTIMEAXIS$", yTimeAxis) + .replace("$ZTIMEAXIS$", zTimeAxis) + .replace("$JAVADOCS$", javadoc) + .replace("$RETURNTYPE$", isInterface ? returnTypeInterface : returnTypeImpl); final String rst; if (isInterface) { @@ -412,15 +397,14 @@ private static String codeFunction(final boolean isInterface, final String[] var int i2 = code.indexOf("{", i1); rst = " " + code.substring(0, i2).replace("public ", "").trim() + ";\n"; } else { - rst = " " - + code.substring(code.indexOf("public ")).replace("public ", "@Override public "); + rst = " " + code.substring(code.indexOf("public ")).replace("public ", "@Override public "); } return rst; } - private static ArrayList> constructTypePossibilities( - final String[][] variableTypes, final int depth, final ArrayList types) { + private static ArrayList> constructTypePossibilities(final String[][] variableTypes, + final int depth, final ArrayList types) { final ArrayList> result = new ArrayList<>(); if (depth >= variableTypes.length) { @@ -440,54 +424,48 @@ private static ArrayList> constructTypePossibilities( } private static String codeFunction(final boolean isInterface, final String[] variableNames, - final String[][] variableTypes, final String prototype, final String[] genericJavadocs, - final String returnTypeInterface, final String returnTypeImpl) { - Require.eq(variableNames.length, "variableNames.length", variableTypes.length, - "variableTypes.length"); + final String[][] variableTypes, final String prototype, final String[] genericJavadocs, + final String returnTypeInterface, final String returnTypeImpl) { + Require.eq(variableNames.length, "variableNames.length", variableTypes.length, "variableTypes.length"); final ArrayList> typePossibilities = - constructTypePossibilities(variableTypes, 0, new ArrayList<>()); + constructTypePossibilities(variableTypes, 0, new ArrayList<>()); return typePossibilities.stream() - .map(tp -> codeFunction(isInterface, variableNames, - tp.toArray(new Type[variableNames.length]), prototype, genericJavadocs, - returnTypeInterface, returnTypeImpl)) - .reduce("", (a, b) -> a + "\n" + b); + .map(tp -> codeFunction(isInterface, variableNames, tp.toArray(new Type[variableNames.length]), + prototype, genericJavadocs, returnTypeInterface, returnTypeImpl)) + .reduce("", (a, b) -> a + "\n" + b); } private static String codeFunctionRestrictedNumericalVariableTypes(final boolean isInterface, - final String[] variableNames, final int split, final String prototype, - final String[] genericJavadocs, - final String returnTypeInterface, final String returnTypeImpl) { + final String[] variableNames, final int split, final String prototype, final String[] genericJavadocs, + final String returnTypeInterface, final String returnTypeImpl) { - final ArrayList> typePossibilities = - constructRestrictedNumericalTypes(variableNames, split); + final ArrayList> typePossibilities = constructRestrictedNumericalTypes(variableNames, split); return typePossibilities.stream() - .map(tp -> codeFunction(isInterface, variableNames, - tp.toArray(new Type[variableNames.length]), prototype, genericJavadocs, - returnTypeInterface, returnTypeImpl, split)) - .reduce("", (a, b) -> a + "\n" + b); + .map(tp -> codeFunction(isInterface, variableNames, tp.toArray(new Type[variableNames.length]), + prototype, genericJavadocs, returnTypeInterface, returnTypeImpl, split)) + .reduce("", (a, b) -> a + "\n" + b); } private static String codeFunctionRestrictedNumericalVariableTypes(final boolean isInterface, - final String[] variableNames, final String[] variableTypes, final int split, - final String prototype, final String[] genericJavadocs, - final String returnTypeInterface, final String returnTypeImpl) { + final String[] variableNames, final String[] variableTypes, final int split, final String prototype, + final String[] genericJavadocs, + final String returnTypeInterface, final String returnTypeImpl) { final ArrayList> typePossibilities = - constructRestrictedNumericalTypes(variableNames, variableTypes, split); + constructRestrictedNumericalTypes(variableNames, variableTypes, split); return typePossibilities.stream() - .map(tp -> codeFunction(isInterface, variableNames, - tp.toArray(new Type[variableNames.length]), prototype, genericJavadocs, - returnTypeInterface, returnTypeImpl, split)) - .reduce("", (a, b) -> a + "\n" + b); + .map(tp -> codeFunction(isInterface, variableNames, tp.toArray(new Type[variableNames.length]), + prototype, genericJavadocs, returnTypeInterface, returnTypeImpl, split)) + .reduce("", (a, b) -> a + "\n" + b); } - private static ArrayList> constructRestrictedNumericalTypes( - final String[] variableNames, final int split) { + private static ArrayList> constructRestrictedNumericalTypes(final String[] variableNames, + final int split) { final ArrayList> result = new ArrayList<>(); final Map typeMap = getTypes(); @@ -543,8 +521,8 @@ private static ArrayList> constructRestrictedNumericalTypes( return result; } - private static ArrayList> constructRestrictedNumericalTypes( - final String[] variableNames, final String[] variableTypes, final int split) { + private static ArrayList> constructRestrictedNumericalTypes(final String[] variableNames, + final String[] variableTypes, final int split) { final ArrayList> result = new ArrayList<>(); final Map typeMap = getTypes(); @@ -606,274 +584,263 @@ private static ArrayList> constructRestrictedNumericalTypes( }; - private static final String[] numberTimeTypes = - new String[timeTypes.length + numberTypes.length]; + private static final String[] numberTimeTypes = new String[timeTypes.length + numberTypes.length]; static { System.arraycopy(timeTypes, 0, numberTimeTypes, 0, timeTypes.length); System.arraycopy(numberTypes, 0, numberTimeTypes, timeTypes.length, numberTypes.length); } - private static void generate(final boolean assertNoChange, final String file, - final boolean isInterface) throws IOException { + private static void generate(final boolean assertNoChange, final String file, final boolean isInterface) + throws IOException { final String headerMessage = "CODE BELOW HERE IS GENERATED -- DO NOT EDIT BY HAND"; final String headerComment = "//////////////////////////////"; final String headerSpace = " "; - final String header = - headerSpace + headerComment + " " + headerMessage + " " + headerComment; - final String header2 = headerSpace + headerComment - + " TO REGENERATE RUN GenerateAxesPlotMethods " + headerComment; - final String header3 = headerSpace + headerComment - + " AND THEN RUN GeneratePlottingConvenience " + headerComment; + final String header = headerSpace + headerComment + " " + headerMessage + " " + headerComment; + final String header2 = + headerSpace + headerComment + " TO REGENERATE RUN GenerateAxesPlotMethods " + headerComment; + final String header3 = + headerSpace + headerComment + " AND THEN RUN GeneratePlottingConvenience " + headerComment; StringBuilder code = new StringBuilder(header + "\n" + header2 + "\n" + header3 + "\n\n\n"); - code.append(codeFunction(isInterface, new String[] {"x", "y"}, - new String[][] {numberTimeTypes, numberTimeTypes}, - " /**\n" + - " * Creates an XY plot.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param x x-values\n" + - " * @param y y-values\n" + - "$JAVADOCS$" + - " * @return dataset created for plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ plot(final Comparable seriesName, $ARGS$) {\n" + - " return plot(seriesName, $INDEXABLE$, $TIMEAXIS$);\n" + - " }\n", - new String[] { - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n" - }, "XYDataSeries", "XYDataSeriesArray")); + code.append( + codeFunction(isInterface, new String[] {"x", "y"}, new String[][] {numberTimeTypes, numberTimeTypes}, + " /**\n" + + " * Creates an XY plot.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param x x-values\n" + + " * @param y y-values\n" + + "$JAVADOCS$" + + " * @return dataset created for plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ plot(final Comparable seriesName, $ARGS$) {\n" + + " return plot(seriesName, $INDEXABLE$, $TIMEAXIS$);\n" + + " }\n", + new String[] { + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n" + }, "XYDataSeries", "XYDataSeriesArray")); for (String c : numberTypes) { final String[] cs = {c}; - code.append(codeFunction(isInterface, - new String[] {"time", "open", "high", "low", "close"}, - new String[][] {timeTypes, cs, cs, cs, cs}, - " /**\n" + - " * Creates an open-high-low-close plot.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param time time data\n" + - " * @param open open data\n" + - " * @param high high data\n" + - " * @param low low data\n" + - " * @param close close data\n" + - "$JAVADOCS$" + - " * @return dataset created by the plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ ohlcPlot(final Comparable seriesName, $ARGS$) {\n" - + - " return ohlcPlot(seriesName, $INDEXABLE$);\n" + - " }\n", - new String[] { - " * @param $GENERIC$ open data type\n", - " * @param $GENERIC$ high data type\n", - " * @param $GENERIC$ low data type\n", - " * @param $GENERIC$ close data type\n", - }, "OHLCDataSeries", "OHLCDataSeriesArray")); + code.append(codeFunction(isInterface, new String[] {"time", "open", "high", "low", "close"}, + new String[][] {timeTypes, cs, cs, cs, cs}, + " /**\n" + + " * Creates an open-high-low-close plot.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param time time data\n" + + " * @param open open data\n" + + " * @param high high data\n" + + " * @param low low data\n" + + " * @param close close data\n" + + "$JAVADOCS$" + + " * @return dataset created by the plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ ohlcPlot(final Comparable seriesName, $ARGS$) {\n" + + " return ohlcPlot(seriesName, $INDEXABLE$);\n" + + " }\n", + new String[] { + " * @param $GENERIC$ open data type\n", + " * @param $GENERIC$ high data type\n", + " * @param $GENERIC$ low data type\n", + " * @param $GENERIC$ close data type\n", + }, "OHLCDataSeries", "OHLCDataSeriesArray")); } code.append(codeFunction(isInterface, new String[] {"x"}, new String[][] {numberTypes}, - " /**\n" + - " * Creates a histogram.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param x data\n" + - " * @param nbins number of bins\n" + - "$JAVADOCS$" + - " * @return dataset created by the plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ histPlot(final Comparable seriesName, $ARGS$, final int nbins) {\n" - + - " return histPlot(seriesName, PlotUtils.doubleTable(x, \"Y\"), \"Y\", nbins);\n" - + - " }\n", - new String[] { - " * @param $GENERIC$ data type\n", - }, "IntervalXYDataSeries", "IntervalXYDataSeriesArray")); + " /**\n" + + " * Creates a histogram.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param x data\n" + + " * @param nbins number of bins\n" + + "$JAVADOCS$" + + " * @return dataset created by the plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ histPlot(final Comparable seriesName, $ARGS$, final int nbins) {\n" + + + " return histPlot(seriesName, PlotUtils.doubleTable(x, \"Y\"), \"Y\", nbins);\n" + + " }\n", + new String[] { + " * @param $GENERIC$ data type\n", + }, "IntervalXYDataSeries", "IntervalXYDataSeriesArray")); code.append(codeFunction(isInterface, new String[] {"x"}, new String[][] {numberTypes}, - " /**\n" + - " * Creates a histogram.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param x data\n" + - " * @param rangeMin minimum of the range\n" + - " * @param rangeMax maximum of the range\n" + - " * @param nbins number of bins\n" + - "$JAVADOCS$" + - " * @return dataset created by the plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ histPlot(final Comparable seriesName, $ARGS$, final double rangeMin, final double rangeMax, final int nbins) {\n" - + - " return histPlot(seriesName, PlotUtils.doubleTable(x, \"Y\"), \"Y\", rangeMin, rangeMax, nbins);\n" - + - " }\n", - new String[] { - " * @param $GENERIC$ data type\n", - }, "IntervalXYDataSeries", "IntervalXYDataSeriesArray")); - - - code.append(codeFunctionRestrictedNumericalVariableTypes(isInterface, - new String[] {"x", "xLow", "xHigh", "y", "yLow", "yHigh"}, 3, - " /**\n" + - " * Creates an XY plot with error bars in both the x and y directions.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param x x-values\n" + - " * @param xLow low value in x dimension\n" + - " * @param xHigh high value in x dimension\n" + - " * @param y y-values\n" + - " * @param yLow low value in y dimension\n" + - " * @param yHigh high value in y dimension\n" + - "$JAVADOCS$" + - " * @return dataset created by the plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ errorBarXY(final Comparable seriesName, $ARGS$) {\n" - + - " return errorBarXY(seriesName, $INDEXABLE$, true, true, $TIMEAXIS$);\n" + - " }\n", - new String[] { - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n" - }, "XYErrorBarDataSeries", "XYErrorBarDataSeriesArray")); - - - code.append(codeFunctionRestrictedNumericalVariableTypes(isInterface, - new String[] {"x", "xLow", "xHigh", "y"}, 3, - " /**\n" + - " * Creates an XY plot with error bars in the x direction.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param x x-values\n" + - " * @param xLow low value in x dimension\n" + - " * @param xHigh high value in x dimension\n" + - " * @param y y-values\n" + - "$JAVADOCS$" + - " * @return dataset created by the plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ errorBarX(final Comparable seriesName, $ARGS$) {\n" - + - " return errorBarX(seriesName, $INDEXABLE$, true, false, $TIMEAXIS$);\n" + - " }\n", - new String[] { - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n" - }, "XYErrorBarDataSeries", "XYErrorBarDataSeriesArray")); + " /**\n" + + " * Creates a histogram.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param x data\n" + + " * @param rangeMin minimum of the range\n" + + " * @param rangeMax maximum of the range\n" + + " * @param nbins number of bins\n" + + "$JAVADOCS$" + + " * @return dataset created by the plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ histPlot(final Comparable seriesName, $ARGS$, final double rangeMin, final double rangeMax, final int nbins) {\n" + + + " return histPlot(seriesName, PlotUtils.doubleTable(x, \"Y\"), \"Y\", rangeMin, rangeMax, nbins);\n" + + + " }\n", + new String[] { + " * @param $GENERIC$ data type\n", + }, "IntervalXYDataSeries", "IntervalXYDataSeriesArray")); code.append(codeFunctionRestrictedNumericalVariableTypes(isInterface, - new String[] {"x", "y", "yLow", "yHigh"}, 1, - " /**\n" + - " * Creates an XY plot with error bars in the y direction.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param x x-values\n" + - " * @param y y-values\n" + - " * @param yLow low value in y dimension\n" + - " * @param yHigh high value in y dimension\n" + - "$JAVADOCS$" + - " * @return dataset created by the plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ errorBarY(final Comparable seriesName, $ARGS$) {\n" - + - " return errorBarY(seriesName, $INDEXABLE$, false, true, $TIMEAXIS$);\n" + - " }\n", - new String[] { - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n", - " * @param $GENERIC$ data type\n" - }, "XYErrorBarDataSeries", "XYErrorBarDataSeriesArray")); + new String[] {"x", "xLow", "xHigh", "y", "yLow", "yHigh"}, 3, + " /**\n" + + " * Creates an XY plot with error bars in both the x and y directions.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param x x-values\n" + + " * @param xLow low value in x dimension\n" + + " * @param xHigh high value in x dimension\n" + + " * @param y y-values\n" + + " * @param yLow low value in y dimension\n" + + " * @param yHigh high value in y dimension\n" + + "$JAVADOCS$" + + " * @return dataset created by the plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ errorBarXY(final Comparable seriesName, $ARGS$) {\n" + + " return errorBarXY(seriesName, $INDEXABLE$, true, true, $TIMEAXIS$);\n" + + " }\n", + new String[] { + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n" + }, "XYErrorBarDataSeries", "XYErrorBarDataSeriesArray")); + + + code.append( + codeFunctionRestrictedNumericalVariableTypes(isInterface, new String[] {"x", "xLow", "xHigh", "y"}, 3, + " /**\n" + + " * Creates an XY plot with error bars in the x direction.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param x x-values\n" + + " * @param xLow low value in x dimension\n" + + " * @param xHigh high value in x dimension\n" + + " * @param y y-values\n" + + "$JAVADOCS$" + + " * @return dataset created by the plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ errorBarX(final Comparable seriesName, $ARGS$) {\n" + + " return errorBarX(seriesName, $INDEXABLE$, true, false, $TIMEAXIS$);\n" + + " }\n", + new String[] { + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n" + }, "XYErrorBarDataSeries", "XYErrorBarDataSeriesArray")); + + + code.append( + codeFunctionRestrictedNumericalVariableTypes(isInterface, new String[] {"x", "y", "yLow", "yHigh"}, 1, + " /**\n" + + " * Creates an XY plot with error bars in the y direction.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param x x-values\n" + + " * @param y y-values\n" + + " * @param yLow low value in y dimension\n" + + " * @param yHigh high value in y dimension\n" + + "$JAVADOCS$" + + " * @return dataset created by the plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ errorBarY(final Comparable seriesName, $ARGS$) {\n" + + " return errorBarY(seriesName, $INDEXABLE$, false, true, $TIMEAXIS$);\n" + + " }\n", + new String[] { + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n", + " * @param $GENERIC$ data type\n" + }, "XYErrorBarDataSeries", "XYErrorBarDataSeriesArray")); code.append(codeFunctionRestrictedNumericalVariableTypes(isInterface, - new String[] {"categories", "values", "yLow", "yHigh"}, - new String[] {"Comparable", "List"}, 1, - " /**\n" + - " * Creates a category error bar plot with whiskers in the y direction.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param categories discrete data\n" + - " * @param values numeric data\n" + - " * @param yLow low value in y dimension\n" + - " * @param yHigh high value in y dimension\n" + - "$JAVADOCS$" + - " * @return dataset created by the plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ catErrorBar(final Comparable seriesName, $ARGS$) {\n" - + - " return catPlot(new CategoryErrorBarDataSeriesMap(this, dataSeries.nextId(), seriesName, $INDEXABLE$), null, null, $YTIMEAXIS$);\n" - + - " }\n", - new String[] { - " * @param $GENERIC$ type of the categorical data\n", - " * @param $GENERIC$ type of the numeric data\n", - " * @param $GENERIC$ type of the numeric data\n", - " * @param $GENERIC$ type of the numeric data\n", - " * @param $GENERIC$ type of the numeric data\n" - }, "CategoryDataSeries", "CategoryDataSeriesInternal")); + new String[] {"categories", "values", "yLow", "yHigh"}, new String[] {"Comparable", "List"}, + 1, + " /**\n" + + " * Creates a category error bar plot with whiskers in the y direction.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param categories discrete data\n" + + " * @param values numeric data\n" + + " * @param yLow low value in y dimension\n" + + " * @param yHigh high value in y dimension\n" + + "$JAVADOCS$" + + " * @return dataset created by the plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ catErrorBar(final Comparable seriesName, $ARGS$) {\n" + + " return catPlot(new CategoryErrorBarDataSeriesMap(this, dataSeries.nextId(), seriesName, $INDEXABLE$), null, null, $YTIMEAXIS$);\n" + + + " }\n", + new String[] { + " * @param $GENERIC$ type of the categorical data\n", + " * @param $GENERIC$ type of the numeric data\n", + " * @param $GENERIC$ type of the numeric data\n", + " * @param $GENERIC$ type of the numeric data\n", + " * @param $GENERIC$ type of the numeric data\n" + }, "CategoryDataSeries", "CategoryDataSeriesInternal")); code.append(codeFunction(isInterface, new String[] {"categories", "values"}, - new String[][] {{"Comparable", "List"}, numberTimeTypes}, - " /**\n" + - " * Creates a plot with discrete axis.\n" + - " * Discrete data must not have duplicates.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param categories discrete data\n" + - " * @param values numeric data\n" + - "$JAVADOCS$" + - " * @return dataset created for plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ catPlot(final Comparable seriesName, $ARGS$) {\n" - + - " return catPlot(seriesName, $INDEXABLE$, $YTIMEAXIS$);\n" + - " }\n", - new String[] { - " * @param $GENERIC$ type of the categorical data\n", - " * @param $GENERIC$ type of the numeric data\n" - }, "CategoryDataSeries", "CategoryDataSeriesInternal")); + new String[][] {{"Comparable", "List"}, numberTimeTypes}, + " /**\n" + + " * Creates a plot with discrete axis.\n" + + " * Discrete data must not have duplicates.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param categories discrete data\n" + + " * @param values numeric data\n" + + "$JAVADOCS$" + + " * @return dataset created for plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ catPlot(final Comparable seriesName, $ARGS$) {\n" + + " return catPlot(seriesName, $INDEXABLE$, $YTIMEAXIS$);\n" + + " }\n", + new String[] { + " * @param $GENERIC$ type of the categorical data\n", + " * @param $GENERIC$ type of the numeric data\n" + }, "CategoryDataSeries", "CategoryDataSeriesInternal")); code.append(codeFunction(isInterface, new String[] {"categories", "values"}, - new String[][] {{"Comparable", "List"}, numberTypes}, - " /**\n" + - " * Creates a pie plot.\n" + - " * Categorical data must not have duplicates.\n" + - " *\n" + - " * @param seriesName name of the created dataset\n" + - " * @param categories categories\n" + - " * @param values data values\n" + - "$JAVADOCS$" + - " * @return dataset created for plot\n" + - " */\n" + - " public $GENERIC$ $RETURNTYPE$ piePlot(final Comparable seriesName, $ARGS$) {\n" - + - " return piePlot(seriesName, $INDEXABLE$);\n" + - " }\n", - new String[] { - " * @param $GENERIC$ type of the categorical data\n", - " * @param $GENERIC$ type of the numeric data\n" - }, "CategoryDataSeries", "CategoryDataSeriesInternal")); + new String[][] {{"Comparable", "List"}, numberTypes}, + " /**\n" + + " * Creates a pie plot.\n" + + " * Categorical data must not have duplicates.\n" + + " *\n" + + " * @param seriesName name of the created dataset\n" + + " * @param categories categories\n" + + " * @param values data values\n" + + "$JAVADOCS$" + + " * @return dataset created for plot\n" + + " */\n" + + " public $GENERIC$ $RETURNTYPE$ piePlot(final Comparable seriesName, $ARGS$) {\n" + + " return piePlot(seriesName, $INDEXABLE$);\n" + + " }\n", + new String[] { + " * @param $GENERIC$ type of the categorical data\n", + " * @param $GENERIC$ type of the numeric data\n" + }, "CategoryDataSeries", "CategoryDataSeriesInternal")); // System.out.println(code); @@ -882,8 +849,7 @@ private static void generate(final boolean assertNoChange, final String file, int cutPoint = axes.lastIndexOf(header); if (cutPoint != axes.indexOf(header)) { - throw new IllegalArgumentException( - "Input source code contains two autogenerated sections! file=" + file); + throw new IllegalArgumentException("Input source code contains two autogenerated sections! file=" + file); } if (cutPoint < 0) { @@ -901,7 +867,7 @@ private static void generate(final boolean assertNoChange, final String file, String oldCode = new String(Files.readAllBytes(Paths.get(file))); if (!newcode.equals(oldCode)) { throw new RuntimeException( - "Change in generated code. Run GenerateAxesPlotMethods or \"./gradlew :Generators:generateAxesPlotMethods\" to regenerate\n"); + "Change in generated code. Run GenerateAxesPlotMethods or \"./gradlew :Generators:generateAxesPlotMethods\" to regenerate\n"); } } else { diff --git a/Generators/src/main/java/io/deephaven/db/plot/util/GenerateFigureImmutable.java b/Generators/src/main/java/io/deephaven/db/plot/util/GenerateFigureImmutable.java index bb48ad2e360..5b1fa069b91 100644 --- a/Generators/src/main/java/io/deephaven/db/plot/util/GenerateFigureImmutable.java +++ b/Generators/src/main/java/io/deephaven/db/plot/util/GenerateFigureImmutable.java @@ -48,10 +48,10 @@ public class GenerateFigureImmutable { private final Collection> skips; private final Function functionNamer; - private GenerateFigureImmutable(final boolean isInterface, final String[] imports, - final String[] interfaces, final String[] seriesInterfaces, - final Collection> skips, - final Function functionNamer) throws ClassNotFoundException { + private GenerateFigureImmutable(final boolean isInterface, final String[] imports, final String[] interfaces, + final String[] seriesInterfaces, + final Collection> skips, final Function functionNamer) + throws ClassNotFoundException { this.outputClass = isInterface ? CLASS_NAME_INTERFACE : CLASS_NAME_IMPLEMENTATION; this.isInterface = isInterface; this.outputClassNameShort = this.outputClass.substring(outputClass.lastIndexOf('.') + 1); @@ -92,14 +92,14 @@ private boolean skip(final JavaFunction f) { private JavaFunction signature(final JavaFunction f) { return new JavaFunction( - outputClass, - outputClassNameShort, - functionNamer.apply(f), - f.getTypeParameters(), - f.getReturnType(), - f.getParameterTypes(), - f.getParameterNames(), - f.isVarArgs()); + outputClass, + outputClassNameShort, + functionNamer.apply(f), + f.getTypeParameters(), + f.getReturnType(), + f.getParameterTypes(), + f.getParameterNames(), + f.isVarArgs()); } private void addPublicNonStatic(Method m) { @@ -216,13 +216,13 @@ private String generateImplements() { private String generateCode() { String code = "/*\n" + - " * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + - " */\n\n" + - "/****************************************************************************************************************************\n" - + - " ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - Run GenerateFigureImmutable or \"./gradlew :Generators:generateFigureImmutable\" to regenerate\n" - + - " ****************************************************************************************************************************/\n\n"; + " * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + + " */\n\n" + + "/****************************************************************************************************************************\n" + + + " ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - Run GenerateFigureImmutable or \"./gradlew :Generators:generateFigureImmutable\" to regenerate\n" + + + " ****************************************************************************************************************************/\n\n"; code += "package io.deephaven.db.plot;\n\n"; @@ -233,12 +233,11 @@ private String generateCode() { } code += "\n"; - code += - "/** An interface for constructing plots. A Figure is immutable, and all function calls return a new immutable Figure instance."; + code += "/** An interface for constructing plots. A Figure is immutable, and all function calls return a new immutable Figure instance."; code += "*/\n"; code += "@SuppressWarnings({\"unused\", \"RedundantCast\", \"SameParameterValue\"})\n"; - code += "public" + (isInterface ? " interface " : " class ") + outputClassNameShort - + generateImplements() + " {\n"; + code += "public" + (isInterface ? " interface " : " class ") + outputClassNameShort + generateImplements() + + " {\n"; code += "\n"; code += createConstructor(); @@ -292,389 +291,373 @@ private String createConstructor() { } return " private static final long serialVersionUID = -4519904656095275663L;\n" + - "\n" + - " private final BaseFigureImpl figure;\n" + - " private final ChartLocation lastChart;\n" + - " private final AxesLocation lastAxes;\n" + - " private final AxisLocation lastAxis;\n" + - " private final SeriesLocation lastSeries;\n" + - " private final Map lastAxesMap;\n" + - " private final Map lastAxisMap;\n" + - " private final Map lastSeriesMap;\n" + - "\n" + - " private " + outputClassNameShort - + "(final BaseFigureImpl figure, final ChartLocation lastChart, final AxesLocation lastAxes, final AxisLocation lastAxis, final SeriesLocation lastSeries, final Map lastAxesMap, final Map lastAxisMap, final Map lastSeriesMap) {\n" - + - " this.figure = Require.neqNull(figure, \"figure\");\n" + - " this.lastChart = lastChart;\n" + - " this.lastAxes = lastAxes;\n" + - " this.lastAxis = lastAxis;\n" + - " this.lastSeries = lastSeries;\n" + - " this.lastAxesMap = new HashMap<>(lastAxesMap);\n" + - " this.lastAxisMap = new HashMap<>(lastAxisMap);\n" + - " this.lastSeriesMap = new HashMap<>(lastSeriesMap);\n" + - " if(this.lastAxes != null) { this.lastAxesMap.put(this.lastChart, this.lastAxes); }\n" - + - " if(this.lastAxis != null) { this.lastAxisMap.put(this.lastAxes, this.lastAxis); }\n" - + - " if(this.lastSeries != null) { this.lastSeriesMap.put(this.lastAxes, this.lastSeries); }\n" - + - " }\n" + - "\n" + - " public " + outputClassNameShort + "(final " + outputClassNameShort + " figure) {\n" - + - " this.figure = Require.neqNull(figure, \"figure\").figure;\n" + - " this.lastChart = figure.lastChart;\n" + - " this.lastAxes = figure.lastAxes;\n" + - " this.lastAxis = figure.lastAxis;\n" + - " this.lastSeries = figure.lastSeries;\n" + - " this.lastAxesMap = figure.lastAxesMap;\n" + - " this.lastAxisMap = figure.lastAxisMap;\n" + - " this.lastSeriesMap = figure.lastSeriesMap;\n" + - " }\n" + - "\n" + - " private " + outputClassNameShort + "(final BaseFigureImpl figure) {\n" + - " this(figure,null,null,null,null,new HashMap<>(),new HashMap<>(),new HashMap<>());\n" - + - " }\n" + - "\n" + - " " + outputClassNameShort + "() {\n" + - " this(new BaseFigureImpl());\n" + - " }\n" + - "\n" + - " " + outputClassNameShort + "(final int numRows, final int numCols) {\n" + - " this(new BaseFigureImpl(numRows,numCols));\n" + - " }\n" + - "\n" + - " private AxesLocation resolveLastAxes(final BaseFigureImpl figure, final ChartLocation chartLoc){\n" - + - " if(chartLoc == null){\n" + - " return null;\n" + - " }\n" + - "\n" + - " final AxesLocation a0 = lastAxesMap.get(chartLoc);\n" + - "\n" + - " if( a0 != null) {\n" + - " return a0;\n" + - " }\n" + - "\n" + - " final List axs = chartLoc.get(figure).getAxes();\n" + - " return axs.isEmpty() ? null : new AxesLocation(axs.get(axs.size()-1));\n" + - " }\n" + - "\n" + - " private AxisLocation resolveLastAxis(final BaseFigureImpl figure, final AxesLocation axesLoc){\n" - + - " if(axesLoc == null){\n" + - " return null;\n" + - " }\n" + - "\n" + - " final AxisLocation a0 = lastAxisMap.get(axesLoc);\n" + - "\n" + - " if( a0 != null ){\n" + - " return a0;\n" + - " }\n" + - "\n" + - " final AxesImpl axs = axesLoc.get(figure);\n" + - " return axs.dimension() <= 0 ? null : new AxisLocation(axs.axis(axs.dimension()-1));\n" - + - " }\n" + - "\n" + - " private SeriesLocation resolveLastSeries(final BaseFigureImpl figure, final AxesLocation axesLoc){\n" - + - " if(axesLoc == null){\n" + - " return null;\n" + - " }\n" + - "\n" + - " final SeriesLocation s0 = lastSeriesMap.get(axesLoc);\n" + - "\n" + - " if( s0 != null ){\n" + - " return s0;\n" + - " }\n" + - "\n" + - " final SeriesInternal s1 = axesLoc.get(figure).dataSeries().lastSeries();\n" + - " return s1 == null ? null : new SeriesLocation(s1);\n" + - " }\n" + - "\n" + - "\n" + - " /**\n" + - " * Gets the mutable figure backing this immutable figure.\n" + - " *\n" + - " * @return mutable figure backing this immutable figure\n" + - " */\n" + - " public BaseFigureImpl getFigure() { return this.figure; }\n" + - "\n" + - "\n" + - " private " + outputClassNameShort + " make(final BaseFigureImpl figure){\n" + - " final ChartLocation chartLoc = this.lastChart;\n" + - " final AxesLocation axesLoc = this.lastAxes;\n" + - " final AxisLocation axisLoc = this.lastAxis;\n" + - " final SeriesLocation seriesLoc = this.lastSeries;\n" + - " return new " + outputClassNameShort - + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" - + - " }\n" + - "\n" + - " private " + outputClassNameShort + " make(final ChartImpl chart){\n" + - " final BaseFigureImpl figure = chart.figure();\n" + - " final ChartLocation chartLoc = new ChartLocation(chart);\n" + - " final AxesLocation axesLoc = resolveLastAxes(figure, chartLoc);\n" + - " final AxisLocation axisLoc = resolveLastAxis(figure, axesLoc);\n" + - " final SeriesLocation seriesLoc = resolveLastSeries(figure, axesLoc);\n" + - " return new " + outputClassNameShort - + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" - + - " }\n" + - "\n" + - " private " + outputClassNameShort + " make(final AxesImpl axes){\n" + - " final BaseFigureImpl figure = axes.chart().figure();\n" + - " final ChartLocation chartLoc = new ChartLocation(axes.chart());\n" + - " final AxesLocation axesLoc = new AxesLocation(axes);\n" + - " final AxisLocation axisLoc = resolveLastAxis(figure, axesLoc);\n" + - " final SeriesLocation seriesLoc = resolveLastSeries(figure, axesLoc);\n" + - " return new " + outputClassNameShort - + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" - + - " }\n" + - "\n" + - " private " + outputClassNameShort - + " make(final AxesImpl axes, final AxisImpl axis){\n" + - " final BaseFigureImpl figure = axis.chart().figure();\n" + - " final ChartLocation chartLoc = new ChartLocation(axis.chart());\n" + - " final AxesLocation axesLoc = axes == null ? this.lastAxes : new AxesLocation(axes);\n" - + - " final AxisLocation axisLoc = new AxisLocation(axis);\n" + - " final SeriesLocation seriesLoc = resolveLastSeries(figure, axesLoc);\n" + - " return new " + outputClassNameShort - + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" - + - " }\n" + - "\n" + - " private " + outputClassNameShort + " make(final SeriesInternal series){\n" + - " final BaseFigureImpl figure = series.axes().chart().figure();\n" + - " final ChartLocation chartLoc = new ChartLocation(series.axes().chart());\n" + - " final AxesLocation axesLoc = new AxesLocation(series.axes());\n" + - " final AxisLocation axisLoc = resolveLastAxis(figure, axesLoc);\n" + - " final SeriesLocation seriesLoc = new SeriesLocation(series);\n" + - " return new " + outputClassNameShort - + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" - + - " }\n" + - "\n" + - "\n" + - " private BaseFigureImpl figure(final BaseFigureImpl figure) { return figure; }\n" + - "\n" + - " private ChartImpl chart(final BaseFigureImpl figure) { \n" + - " if( this.lastChart == null ) { return figure.newChart(); } \n" + - " ChartImpl c = this.lastChart.get(figure);\n" + - " if( c == null ) { c = figure.newChart(); }\n" + - " return c;\n" + - " }\n" + - "\n" + - " private AxesImpl axes(final BaseFigureImpl figure) {\n" + - " if( this.lastAxes == null ) { return chart(figure).newAxes(); }\n" + - " AxesImpl a = this.lastAxes.get(figure);\n" + - " if( a == null ) {\n" + - " ChartImpl c = chart(figure);\n" + - " a = c.newAxes();\n " + - " }\n" + - " return a;\n" + - " }\n" + - "\n" + - " private AxisImpl axis(final BaseFigureImpl figure) {\n" + - " if( this.lastAxis == null ) { throw new PlotRuntimeException(\"No axes have been selected.\", figure); }\n" - + - " AxisImpl a = this.lastAxis.get(figure);\n" + - " if( a == null ) { throw new PlotRuntimeException(\"No axes have been selected.\", figure); }\n" - + - " return a;\n" + - " }\n" + - "\n" + - " private Series series(final BaseFigureImpl figure) {\n" + - " if( this.lastSeries == null ) { throw new PlotRuntimeException(\"No series has been selected.\", figure); }\n" - + - " Series s = this.lastSeries.get(figure);\n" + - " if( s == null ) { throw new PlotRuntimeException(\"No series has been selected.\", figure); }\n" - + - " return s;\n" + - " }\n" + - "\n" + - "\n"; - } - - private String createFigureFuncs() { - return " /**\n" + - " * Creates a displayable figure that can be sent to the client.\n" + - " *\n" + - " * @return a displayable version of the figure\n" + - " */\n" + - " " + (isInterface ? "" : "@Override public ") + "Figure" - + (isInterface ? "" : "Impl") + " show()" + - (isInterface ? ";\n" - : " {\n" - + indent(2) + "final BaseFigureImpl fc = onDisplay();\n" - + indent(2) + "return new FigureWidget(make(fc));\n" - + indent(1) + "}\n") - + - (isInterface ? "\n" + "\n" + - " @Override Figure save( java.lang.String saveLocation );\n" + + " private final BaseFigureImpl figure;\n" + + " private final ChartLocation lastChart;\n" + + " private final AxesLocation lastAxes;\n" + + " private final AxisLocation lastAxis;\n" + + " private final SeriesLocation lastSeries;\n" + + " private final Map lastAxesMap;\n" + + " private final Map lastAxisMap;\n" + + " private final Map lastSeriesMap;\n" + + "\n" + + " private " + outputClassNameShort + + "(final BaseFigureImpl figure, final ChartLocation lastChart, final AxesLocation lastAxes, final AxisLocation lastAxis, final SeriesLocation lastSeries, final Map lastAxesMap, final Map lastAxisMap, final Map lastSeriesMap) {\n" + + + " this.figure = Require.neqNull(figure, \"figure\");\n" + + " this.lastChart = lastChart;\n" + + " this.lastAxes = lastAxes;\n" + + " this.lastAxis = lastAxis;\n" + + " this.lastSeries = lastSeries;\n" + + " this.lastAxesMap = new HashMap<>(lastAxesMap);\n" + + " this.lastAxisMap = new HashMap<>(lastAxisMap);\n" + + " this.lastSeriesMap = new HashMap<>(lastSeriesMap);\n" + + " if(this.lastAxes != null) { this.lastAxesMap.put(this.lastChart, this.lastAxes); }\n" + + " if(this.lastAxis != null) { this.lastAxisMap.put(this.lastAxes, this.lastAxis); }\n" + + " if(this.lastSeries != null) { this.lastSeriesMap.put(this.lastAxes, this.lastSeries); }\n" + + " }\n" + + "\n" + + " public " + outputClassNameShort + "(final " + outputClassNameShort + " figure) {\n" + + " this.figure = Require.neqNull(figure, \"figure\").figure;\n" + + " this.lastChart = figure.lastChart;\n" + + " this.lastAxes = figure.lastAxes;\n" + + " this.lastAxis = figure.lastAxis;\n" + + " this.lastSeries = figure.lastSeries;\n" + + " this.lastAxesMap = figure.lastAxesMap;\n" + + " this.lastAxisMap = figure.lastAxisMap;\n" + + " this.lastSeriesMap = figure.lastSeriesMap;\n" + + " }\n" + + "\n" + + " private " + outputClassNameShort + "(final BaseFigureImpl figure) {\n" + + " this(figure,null,null,null,null,new HashMap<>(),new HashMap<>(),new HashMap<>());\n" + + " }\n" + + "\n" + + " " + outputClassNameShort + "() {\n" + + " this(new BaseFigureImpl());\n" + + " }\n" + + "\n" + + " " + outputClassNameShort + "(final int numRows, final int numCols) {\n" + + " this(new BaseFigureImpl(numRows,numCols));\n" + + " }\n" + + "\n" + + " private AxesLocation resolveLastAxes(final BaseFigureImpl figure, final ChartLocation chartLoc){\n" + + + " if(chartLoc == null){\n" + + " return null;\n" + + " }\n" + + "\n" + + " final AxesLocation a0 = lastAxesMap.get(chartLoc);\n" + + "\n" + + " if( a0 != null) {\n" + + " return a0;\n" + + " }\n" + + "\n" + + " final List axs = chartLoc.get(figure).getAxes();\n" + + " return axs.isEmpty() ? null : new AxesLocation(axs.get(axs.size()-1));\n" + + " }\n" + + "\n" + + " private AxisLocation resolveLastAxis(final BaseFigureImpl figure, final AxesLocation axesLoc){\n" + + " if(axesLoc == null){\n" + + " return null;\n" + + " }\n" + + "\n" + + " final AxisLocation a0 = lastAxisMap.get(axesLoc);\n" + + "\n" + + " if( a0 != null ){\n" + + " return a0;\n" + + " }\n" + + "\n" + + " final AxesImpl axs = axesLoc.get(figure);\n" + + " return axs.dimension() <= 0 ? null : new AxisLocation(axs.axis(axs.dimension()-1));\n" + + " }\n" + + "\n" + + " private SeriesLocation resolveLastSeries(final BaseFigureImpl figure, final AxesLocation axesLoc){\n" + + + " if(axesLoc == null){\n" + + " return null;\n" + + " }\n" + + "\n" + + " final SeriesLocation s0 = lastSeriesMap.get(axesLoc);\n" + + "\n" + + " if( s0 != null ){\n" + + " return s0;\n" + + " }\n" + + "\n" + + " final SeriesInternal s1 = axesLoc.get(figure).dataSeries().lastSeries();\n" + + " return s1 == null ? null : new SeriesLocation(s1);\n" + + " }\n" + + "\n" + "\n" + - " @Override Figure save( java.lang.String saveLocation, int width, int height );\n" + " /**\n" + + " * Gets the mutable figure backing this immutable figure.\n" + + " *\n" + + " * @return mutable figure backing this immutable figure\n" + + " */\n" + + " public BaseFigureImpl getFigure() { return this.figure; }\n" + + "\n" + + "\n" + + " private " + outputClassNameShort + " make(final BaseFigureImpl figure){\n" + + " final ChartLocation chartLoc = this.lastChart;\n" + + " final AxesLocation axesLoc = this.lastAxes;\n" + + " final AxisLocation axisLoc = this.lastAxis;\n" + + " final SeriesLocation seriesLoc = this.lastSeries;\n" + + " return new " + outputClassNameShort + + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" + + + " }\n" + + "\n" + + " private " + outputClassNameShort + " make(final ChartImpl chart){\n" + + " final BaseFigureImpl figure = chart.figure();\n" + + " final ChartLocation chartLoc = new ChartLocation(chart);\n" + + " final AxesLocation axesLoc = resolveLastAxes(figure, chartLoc);\n" + + " final AxisLocation axisLoc = resolveLastAxis(figure, axesLoc);\n" + + " final SeriesLocation seriesLoc = resolveLastSeries(figure, axesLoc);\n" + + " return new " + outputClassNameShort + + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" + + + " }\n" + + "\n" + + " private " + outputClassNameShort + " make(final AxesImpl axes){\n" + + " final BaseFigureImpl figure = axes.chart().figure();\n" + + " final ChartLocation chartLoc = new ChartLocation(axes.chart());\n" + + " final AxesLocation axesLoc = new AxesLocation(axes);\n" + + " final AxisLocation axisLoc = resolveLastAxis(figure, axesLoc);\n" + + " final SeriesLocation seriesLoc = resolveLastSeries(figure, axesLoc);\n" + + " return new " + outputClassNameShort + + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" + + + " }\n" + + "\n" + + " private " + outputClassNameShort + " make(final AxesImpl axes, final AxisImpl axis){\n" + + " final BaseFigureImpl figure = axis.chart().figure();\n" + + " final ChartLocation chartLoc = new ChartLocation(axis.chart());\n" + + " final AxesLocation axesLoc = axes == null ? this.lastAxes : new AxesLocation(axes);\n" + + " final AxisLocation axisLoc = new AxisLocation(axis);\n" + + " final SeriesLocation seriesLoc = resolveLastSeries(figure, axesLoc);\n" + + " return new " + outputClassNameShort + + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" + + " }\n" + "\n" + - " @Override Figure save( java.lang.String saveLocation, boolean wait, long timeoutSeconds );\n" + " private " + outputClassNameShort + " make(final SeriesInternal series){\n" + + " final BaseFigureImpl figure = series.axes().chart().figure();\n" + + " final ChartLocation chartLoc = new ChartLocation(series.axes().chart());\n" + + " final AxesLocation axesLoc = new AxesLocation(series.axes());\n" + + " final AxisLocation axisLoc = resolveLastAxis(figure, axesLoc);\n" + + " final SeriesLocation seriesLoc = new SeriesLocation(series);\n" + + " return new " + outputClassNameShort + + "(figure, chartLoc, axesLoc, axisLoc, seriesLoc, this.lastAxesMap, this.lastAxisMap, this.lastSeriesMap);\n" + + " }\n" + + "\n" + + "\n" + + " private BaseFigureImpl figure(final BaseFigureImpl figure) { return figure; }\n" + + "\n" + + " private ChartImpl chart(final BaseFigureImpl figure) { \n" + + " if( this.lastChart == null ) { return figure.newChart(); } \n" + + " ChartImpl c = this.lastChart.get(figure);\n" + + " if( c == null ) { c = figure.newChart(); }\n" + + " return c;\n" + + " }\n" + "\n" + - " @Override Figure save( java.lang.String saveLocation, int width, int height, boolean wait, long timeoutSeconds );\n" - : "\n" - + " @Override public FigureImpl save( java.lang.String saveLocation ) {\n" + - " final BaseFigureImpl fc = onDisplay();\n" + - " figure(fc).save( saveLocation );\n" + - " return make(fc);\n" + - " }\n" + - "\n" + - " @Override public FigureImpl save( java.lang.String saveLocation, int width, int height ) {\n" - + - " final BaseFigureImpl fc = onDisplay();\n" + - " figure(fc).save( saveLocation, width, height );\n" + - " return make(fc);\n" + - " }\n" + - "\n" + - "\n" + - " @Override public FigureImpl save( java.lang.String saveLocation, boolean wait, long timeoutSeconds ) {\n" - + - " final BaseFigureImpl fc = onDisplay();\n" + - " figure(fc).save( saveLocation, wait, timeoutSeconds );\n" + - " return make(fc);\n" + - " }\n" + - "\n" + - " @Override public FigureImpl save( java.lang.String saveLocation, int width, int height, boolean wait, long timeoutSeconds ) {\n" - + - " final BaseFigureImpl fc = onDisplay();\n" + - " figure(fc).save( saveLocation, width, height, wait, timeoutSeconds );\n" - + - " return make(fc);\n" + - " }\n\n") - + (isInterface ? "" - : " /**\n" + - " * Perform operations required to display the plot.\n" + - " */\n" + - " private BaseFigureImpl onDisplay() {\n" + - " final FigureImpl fig = applyFunctionalProperties();\n" + - " final BaseFigureImpl fc = fig.figure.copy();\n" + - " fc.validateInitialization();\n" + - " return fc;\n" + - " }\n\n" + - " /**\n" + - " * Apply functions to our tables and consolidate them.\n" + - " */\n" + - " private FigureImpl applyFunctionalProperties() {\n" + - " final Map>> tableFunctionMap = getFigure().getTableFunctionMap();\n" - + - " final Map>> tableMapFunctionMap = getFigure().getTableMapFunctionMap();\n" - + - " final java.util.List figureFunctionList = getFigure().getFigureFunctionList();\n" - + - " final Map finalTableComputation = new HashMap<>();\n" + - " final Map finalTableMapComputation = new HashMap<>();\n" - + - " final java.util.Set

    allTables = new java.util.HashSet<>();\n" + - " final java.util.Set allTableMaps = new java.util.HashSet<>();\n" - + - "\n" + - " for(final io.deephaven.db.plot.util.tables.TableHandle h : getFigure().getTableHandles()) {\n" - + - " allTables.add(h.getTable());\n" + - " }\n" + - "\n" + - " for(final io.deephaven.db.plot.util.tables.TableMapHandle h : getFigure().getTableMapHandles()) {\n" - + - " if(h instanceof io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) {\n" - + - " allTables.add(((io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) h).getTable());\n" - + - " }\n" + - " if(h.getTableMap() != null) {\n" + - " allTableMaps.add(h.getTableMap());\n" + - " }\n" + - " }\n" + - "\n" + - " for(final Table initTable : allTables) {\n" + - " if(tableFunctionMap.get(initTable) != null) {\n" + - "\n" + - " finalTableComputation.computeIfAbsent(initTable, t -> {\n" + - " final java.util.Set> functions = tableFunctionMap.get(initTable);\n" - + - " Table resultTable = initTable;\n" + - "\n" + - " for(final java.util.function.Function f : functions) {\n" - + - " resultTable = f.apply(resultTable);\n" + - " }\n" + - "\n" + - " return resultTable;\n" + - " });\n" + - " } else {\n" + - " finalTableComputation.put(initTable, initTable);\n" + - " }\n" + - " }\n" + - "\n" + - "\n" + - " for(final io.deephaven.db.plot.util.tables.TableHandle h : getFigure().getTableHandles()) {\n" - + - " h.setTable(finalTableComputation.get(h.getTable()));\n" + - " }\n" + - "\n" + - " for(final io.deephaven.db.plot.util.tables.TableMapHandle h : getFigure().getTableMapHandles()) {\n" - + - " if(h instanceof io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) {\n" - + - " ((io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) h).setTable(finalTableComputation.get(((io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) h).getTable()));\n" - + - " }\n" + - " }\n" + - "\n" + - " for(final io.deephaven.db.v2.TableMap initTableMap : allTableMaps) {\n" - + - " if(tableMapFunctionMap.get(initTableMap) != null) {\n" + - " finalTableMapComputation.computeIfAbsent(initTableMap, t -> {\n" - + - " final java.util.Set> functions = tableMapFunctionMap.get(initTableMap);\n" - + - " io.deephaven.db.v2.TableMap resultTableMap = initTableMap;\n" - + - "\n" + - " for(final java.util.function.Function f : functions) {\n" - + - " resultTableMap = f.apply(resultTableMap);\n" + - " }\n" + - "\n" + - " return resultTableMap;\n" + - " });\n" + - " } else {\n" + - " finalTableMapComputation.put(initTableMap, initTableMap);\n" + - " }\n" + - " }\n" + - "\n" + - " for(final io.deephaven.db.plot.util.tables.TableMapHandle h : getFigure().getTableMapHandles()) {\n" - + - " h.setTableMap(finalTableMapComputation.get(h.getTableMap()));\n" + - " }\n" + - "\n" + - " FigureImpl finalFigure = this;\n" + - " for(final java.util.function.Function figureFunction : figureFunctionList) {\n" - + - " finalFigure = figureFunction.apply(finalFigure);\n" + - " }\n" + - "\n" + - " tableFunctionMap.clear();\n" + - " tableMapFunctionMap.clear();\n" + - " figureFunctionList.clear();\n" + - "\n" + - " return finalFigure;\n" + - " }" - + "\n" - + "\n"); + " private AxesImpl axes(final BaseFigureImpl figure) {\n" + + " if( this.lastAxes == null ) { return chart(figure).newAxes(); }\n" + + " AxesImpl a = this.lastAxes.get(figure);\n" + + " if( a == null ) {\n" + + " ChartImpl c = chart(figure);\n" + + " a = c.newAxes();\n " + + " }\n" + + " return a;\n" + + " }\n" + + "\n" + + " private AxisImpl axis(final BaseFigureImpl figure) {\n" + + " if( this.lastAxis == null ) { throw new PlotRuntimeException(\"No axes have been selected.\", figure); }\n" + + + " AxisImpl a = this.lastAxis.get(figure);\n" + + " if( a == null ) { throw new PlotRuntimeException(\"No axes have been selected.\", figure); }\n" + + + " return a;\n" + + " }\n" + + "\n" + + " private Series series(final BaseFigureImpl figure) {\n" + + " if( this.lastSeries == null ) { throw new PlotRuntimeException(\"No series has been selected.\", figure); }\n" + + + " Series s = this.lastSeries.get(figure);\n" + + " if( s == null ) { throw new PlotRuntimeException(\"No series has been selected.\", figure); }\n" + + + " return s;\n" + + " }\n" + + "\n" + + "\n"; + } + + private String createFigureFuncs() { + return " /**\n" + + " * Creates a displayable figure that can be sent to the client.\n" + + " *\n" + + " * @return a displayable version of the figure\n" + + " */\n" + + " " + (isInterface ? "" : "@Override public ") + "Figure" + (isInterface ? "" : "Impl") + " show()" + + (isInterface ? ";\n" + : " {\n" + + indent(2) + "final BaseFigureImpl fc = onDisplay();\n" + + indent(2) + "return new FigureWidget(make(fc));\n" + + indent(1) + "}\n") + + + (isInterface ? "\n" + + "\n" + + " @Override Figure save( java.lang.String saveLocation );\n" + + "\n" + + " @Override Figure save( java.lang.String saveLocation, int width, int height );\n" + + "\n" + + " @Override Figure save( java.lang.String saveLocation, boolean wait, long timeoutSeconds );\n" + + + "\n" + + " @Override Figure save( java.lang.String saveLocation, int width, int height, boolean wait, long timeoutSeconds );\n" + : "\n" + " @Override public FigureImpl save( java.lang.String saveLocation ) {\n" + + " final BaseFigureImpl fc = onDisplay();\n" + + " figure(fc).save( saveLocation );\n" + + " return make(fc);\n" + + " }\n" + + "\n" + + " @Override public FigureImpl save( java.lang.String saveLocation, int width, int height ) {\n" + + + " final BaseFigureImpl fc = onDisplay();\n" + + " figure(fc).save( saveLocation, width, height );\n" + + " return make(fc);\n" + + " }\n" + + "\n" + + "\n" + + " @Override public FigureImpl save( java.lang.String saveLocation, boolean wait, long timeoutSeconds ) {\n" + + + " final BaseFigureImpl fc = onDisplay();\n" + + " figure(fc).save( saveLocation, wait, timeoutSeconds );\n" + + " return make(fc);\n" + + " }\n" + + "\n" + + " @Override public FigureImpl save( java.lang.String saveLocation, int width, int height, boolean wait, long timeoutSeconds ) {\n" + + + " final BaseFigureImpl fc = onDisplay();\n" + + " figure(fc).save( saveLocation, width, height, wait, timeoutSeconds );\n" + + " return make(fc);\n" + + " }\n\n") + + (isInterface ? "" + : " /**\n" + + " * Perform operations required to display the plot.\n" + + " */\n" + + " private BaseFigureImpl onDisplay() {\n" + + " final FigureImpl fig = applyFunctionalProperties();\n" + + " final BaseFigureImpl fc = fig.figure.copy();\n" + + " fc.validateInitialization();\n" + + " return fc;\n" + + " }\n\n" + + " /**\n" + + " * Apply functions to our tables and consolidate them.\n" + + " */\n" + + " private FigureImpl applyFunctionalProperties() {\n" + + " final Map>> tableFunctionMap = getFigure().getTableFunctionMap();\n" + + + " final Map>> tableMapFunctionMap = getFigure().getTableMapFunctionMap();\n" + + + " final java.util.List figureFunctionList = getFigure().getFigureFunctionList();\n" + + + " final Map finalTableComputation = new HashMap<>();\n" + + " final Map finalTableMapComputation = new HashMap<>();\n" + + + " final java.util.Set
    allTables = new java.util.HashSet<>();\n" + + " final java.util.Set allTableMaps = new java.util.HashSet<>();\n" + + + "\n" + + " for(final io.deephaven.db.plot.util.tables.TableHandle h : getFigure().getTableHandles()) {\n" + + + " allTables.add(h.getTable());\n" + + " }\n" + + "\n" + + " for(final io.deephaven.db.plot.util.tables.TableMapHandle h : getFigure().getTableMapHandles()) {\n" + + + " if(h instanceof io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) {\n" + + + " allTables.add(((io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) h).getTable());\n" + + + " }\n" + + " if(h.getTableMap() != null) {\n" + + " allTableMaps.add(h.getTableMap());\n" + + " }\n" + + " }\n" + + "\n" + + " for(final Table initTable : allTables) {\n" + + " if(tableFunctionMap.get(initTable) != null) {\n" + + "\n" + + " finalTableComputation.computeIfAbsent(initTable, t -> {\n" + + " final java.util.Set> functions = tableFunctionMap.get(initTable);\n" + + + " Table resultTable = initTable;\n" + + "\n" + + " for(final java.util.function.Function f : functions) {\n" + + + " resultTable = f.apply(resultTable);\n" + + " }\n" + + "\n" + + " return resultTable;\n" + + " });\n" + + " } else {\n" + + " finalTableComputation.put(initTable, initTable);\n" + + " }\n" + + " }\n" + + "\n" + + "\n" + + " for(final io.deephaven.db.plot.util.tables.TableHandle h : getFigure().getTableHandles()) {\n" + + + " h.setTable(finalTableComputation.get(h.getTable()));\n" + + " }\n" + + "\n" + + " for(final io.deephaven.db.plot.util.tables.TableMapHandle h : getFigure().getTableMapHandles()) {\n" + + + " if(h instanceof io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) {\n" + + + " ((io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) h).setTable(finalTableComputation.get(((io.deephaven.db.plot.util.tables.TableBackedTableMapHandle) h).getTable()));\n" + + + " }\n" + + " }\n" + + "\n" + + " for(final io.deephaven.db.v2.TableMap initTableMap : allTableMaps) {\n" + + " if(tableMapFunctionMap.get(initTableMap) != null) {\n" + + " finalTableMapComputation.computeIfAbsent(initTableMap, t -> {\n" + + " final java.util.Set> functions = tableMapFunctionMap.get(initTableMap);\n" + + + " io.deephaven.db.v2.TableMap resultTableMap = initTableMap;\n" + + "\n" + + " for(final java.util.function.Function f : functions) {\n" + + + " resultTableMap = f.apply(resultTableMap);\n" + + " }\n" + + "\n" + + " return resultTableMap;\n" + + " });\n" + + " } else {\n" + + " finalTableMapComputation.put(initTableMap, initTableMap);\n" + + " }\n" + + " }\n" + + "\n" + + " for(final io.deephaven.db.plot.util.tables.TableMapHandle h : getFigure().getTableMapHandles()) {\n" + + + " h.setTableMap(finalTableMapComputation.get(h.getTableMap()));\n" + + " }\n" + + "\n" + + " FigureImpl finalFigure = this;\n" + + " for(final java.util.function.Function figureFunction : figureFunctionList) {\n" + + + " finalFigure = figureFunction.apply(finalFigure);\n" + + " }\n" + + "\n" + + " tableFunctionMap.clear();\n" + + " tableMapFunctionMap.clear();\n" + + " figureFunctionList.clear();\n" + + "\n" + + " return finalFigure;\n" + + " }" + + "\n" + + "\n"); } private static String createInstanceGetter(final JavaFunction f) { @@ -692,8 +675,7 @@ private static String createInstanceGetter(final JavaFunction f) { case "io.deephaven.db.plot.datasets.multiseries.MultiSeries": return "series(fc)"; default: - System.out.println( - "Don't know how to create instance getter: className=" + f.getClassName()); + System.out.println("Don't know how to create instance getter: className=" + f.getClassName()); return "WTF(fc)"; } } @@ -743,8 +725,7 @@ private String createFunctionSignature(final JavaFunction f) { if (f.isVarArgs() && i == f.getParameterTypes().length - 1) { final int index = typeString.lastIndexOf("[]"); - typeString = - typeString.substring(0, index) + "..." + typeString.substring(index + 2); + typeString = typeString.substring(0, index) + "..." + typeString.substring(index + 2); } s += " " + typeString + " " + f.getParameterNames()[i]; @@ -785,52 +766,46 @@ private String createFunction(final JavaFunction f) { s += " {\n" + indent(2) + "final BaseFigureImpl fc = this.figure.copy();\n"; if (returnClass != null && BaseFigure.class.isAssignableFrom(returnClass)) { - s += indent(2) + createInstanceGetter(f) + "." + signature.getMethodName() + "(" - + callArgs + ");\n" + - indent(2) + "return make(fc);\n"; + s += indent(2) + createInstanceGetter(f) + "." + signature.getMethodName() + "(" + callArgs + ");\n" + + indent(2) + "return make(fc);\n"; } else if (returnClass != null && Chart.class.isAssignableFrom(returnClass)) { s += indent(2) + "final ChartImpl chart = (ChartImpl) " + createInstanceGetter(f) + "." - + signature.getMethodName() + "(" + callArgs + ");\n" + - indent(2) + "return make(chart);\n"; + + signature.getMethodName() + "(" + callArgs + ");\n" + + indent(2) + "return make(chart);\n"; } else if (returnClass != null && Axes.class.isAssignableFrom(returnClass)) { s += indent(2) + "final AxesImpl axes = (AxesImpl) " + createInstanceGetter(f) + "." - + signature.getMethodName() + "(" + callArgs + ");\n" + - indent(2) + "return make(axes);\n"; + + signature.getMethodName() + "(" + callArgs + ");\n" + + indent(2) + "return make(axes);\n"; } else if (returnClass != null && Axis.class.isAssignableFrom(returnClass) - && f.getClassName().equals("io.deephaven.db.plot.Axes")) { + && f.getClassName().equals("io.deephaven.db.plot.Axes")) { s += indent(2) + "final AxesImpl axes = " + createInstanceGetter(f) + ";\n"; - s += indent(2) + "final AxisImpl axis = (AxisImpl) axes." + signature.getMethodName() - + "(" + callArgs + ");\n" + - indent(2) + "return make(axes, axis);\n"; + s += indent(2) + "final AxisImpl axis = (AxisImpl) axes." + signature.getMethodName() + "(" + callArgs + + ");\n" + + indent(2) + "return make(axes, axis);\n"; } else if (returnClass != null && Axis.class.isAssignableFrom(returnClass)) { s += indent(2) + "final AxisImpl axis = (AxisImpl) " + createInstanceGetter(f) + "." - + signature.getMethodName() + "(" + callArgs + ");\n" + - indent(2) + "return make(null, axis);\n"; + + signature.getMethodName() + "(" + callArgs + ");\n" + + indent(2) + "return make(null, axis);\n"; } else if (returnClass != null && DataSeries.class.isAssignableFrom(returnClass)) { - s += indent(2) + "final DataSeriesInternal series = (DataSeriesInternal) " - + createInstanceGetter(f) + "." + signature.getMethodName() + "(" + callArgs - + ");\n" + - indent(2) + "return make(series);\n"; + s += indent(2) + "final DataSeriesInternal series = (DataSeriesInternal) " + createInstanceGetter(f) + "." + + signature.getMethodName() + "(" + callArgs + ");\n" + + indent(2) + "return make(series);\n"; } else if (returnClass != null && Series.class.isAssignableFrom(returnClass)) { - s += indent(2) + "final SeriesInternal series = (SeriesInternal) " - + createInstanceGetter(f) + "." + signature.getMethodName() + "(" + callArgs - + ");\n" + - indent(2) + "return make(series);\n"; + s += indent(2) + "final SeriesInternal series = (SeriesInternal) " + createInstanceGetter(f) + "." + + signature.getMethodName() + "(" + callArgs + ");\n" + + indent(2) + "return make(series);\n"; } else if (returnClass != null && MultiSeries.class.isAssignableFrom(returnClass)) { - s += indent(2) + "final " + returnClass.getSimpleName() + " mseries = " - + createInstanceGetter(f) + "." + signature.getMethodName() + "(" + callArgs - + ");\n" + - indent(2) + "return make((SeriesInternal) mseries);\n"; + s += indent(2) + "final " + returnClass.getSimpleName() + " mseries = " + createInstanceGetter(f) + "." + + signature.getMethodName() + "(" + callArgs + ");\n" + + indent(2) + "return make((SeriesInternal) mseries);\n"; } else if (returnClass != null && void.class.equals(returnClass)) { - s += indent(2) + createInstanceGetter(f) + "." + signature.getMethodName() + "(" - + callArgs + ");\n" + - indent(2) + "return make(fc);\n"; + s += indent(2) + createInstanceGetter(f) + "." + signature.getMethodName() + "(" + callArgs + ");\n" + + indent(2) + "return make(fc);\n"; } else { System.out.println("WARN: UnsupportedReturnType: " + returnType + " " + f); - s += indent(2) + createInstanceGetter(f) + "." + signature.getMethodName() + "(" - + callArgs + ");\n" + - indent(2) + "return make(fc);\n"; + s += indent(2) + createInstanceGetter(f) + "." + signature.getMethodName() + "(" + callArgs + ");\n" + + indent(2) + "return make(fc);\n"; } s += indent(1) + "}\n"; @@ -858,8 +833,8 @@ private String createSignatureGroupFunction(final TreeSet fs) { String s = signature; s += " {\n" + - indent(2) + "final BaseFigureImpl fc = this.figure.copy();\n" + - indent(2) + "Series series = series(fc);\n"; + indent(2) + "final BaseFigureImpl fc = this.figure.copy();\n" + + indent(2) + "Series series = series(fc);\n"; boolean firstFunc = true; @@ -877,13 +852,12 @@ private String createSignatureGroupFunction(final TreeSet fs) { s += indent(2) + "} else if( series instanceof " + f.getClassNameShort() + "){\n"; } - s += indent(3) + returnClass.getSimpleName() + " result = ((" + f.getClassNameShort() - + ") series)." + f.getMethodName() + "(" + callArgs + ");\n"; + s += indent(3) + returnClass.getSimpleName() + " result = ((" + f.getClassNameShort() + ") series)." + + f.getMethodName() + "(" + callArgs + ");\n"; if (DataSeries.class.isAssignableFrom(returnClass)) { s += indent(3) + "return make((DataSeriesInternal)result);\n"; - } else if (MultiSeries.class.isAssignableFrom(returnClass) - || Series.class.isAssignableFrom(returnClass)) { + } else if (MultiSeries.class.isAssignableFrom(returnClass) || Series.class.isAssignableFrom(returnClass)) { s += indent(3) + "return make((SeriesInternal)result);\n"; } else { throw new IllegalStateException("UnsupportedReturnType: " + returnType + " " + f); @@ -893,27 +867,25 @@ private String createSignatureGroupFunction(final TreeSet fs) { s += indent(2) + "} "; - if (!f.getClassNameShort().equals("MultiSeries") - && !f.getClassNameShort().equals("XYDataSeriesFunction")) { + if (!f.getClassNameShort().equals("MultiSeries") && !f.getClassNameShort().equals("XYDataSeriesFunction")) { s += makeMultiSeriesGetter(f); } firstFunc = false; } s += "else {\n" + - indent(3) - + "throw new PlotUnsupportedOperationException(\"Series type does not support this method. seriesType=\" + series.getClass() + \" method='" - + signature.trim() + "'\", figure);\n" + - indent(2) + "}\n"; + indent(3) + + "throw new PlotUnsupportedOperationException(\"Series type does not support this method. seriesType=\" + series.getClass() + \" method='" + + signature.trim() + "'\", figure);\n" + + indent(2) + "}\n"; s += indent(1) + "}\n"; return s; } private Map> commonSignatureGroups( - final String[] interfaces) throws ClassNotFoundException { - final Map> methods = - new TreeMap<>(); + final String[] interfaces) throws ClassNotFoundException { + final Map> methods = new TreeMap<>(); final Set functionSet = new HashSet<>(); for (String iface : interfaces) { @@ -927,13 +899,12 @@ private Map> commonSig boolean isObject = m.getDeclaringClass().equals(Object.class); if (!isStatic && isPublic && !isObject) { - final GroovyStaticImportGenerator.JavaFunction f = - new GroovyStaticImportGenerator.JavaFunction(m); - if (functionSet.add(f)) { // avoids repeating methods that have the same - // parameter types but different parameter names + final GroovyStaticImportGenerator.JavaFunction f = new GroovyStaticImportGenerator.JavaFunction(m); + if (functionSet.add(f)) { // avoids repeating methods that have the same parameter types but + // different parameter names final String key = createFunctionSignature(f); final TreeSet mm = - methods.computeIfAbsent(key, k -> new TreeSet<>()); + methods.computeIfAbsent(key, k -> new TreeSet<>()); mm.add(f); } } @@ -946,10 +917,10 @@ private Map> commonSig private static String makeMultiSeriesGetter(final JavaFunction f) { final String args = createMultiSeriesArgs(f); return "else if(series instanceof MultiSeries) {\n" + - " final MultiSeries result = ((MultiSeries) series)." + f.getMethodName() - + "(" + args + ");\n" + - " return make((SeriesInternal) result);\n" + - " } "; + " final MultiSeries result = ((MultiSeries) series)." + f.getMethodName() + "(" + args + + ");\n" + + " return make((SeriesInternal) result);\n" + + " } "; } private static String createMultiSeriesArgs(JavaFunction f) { @@ -963,8 +934,8 @@ private static String createMultiSeriesArgs(JavaFunction f) { return args; } - private static void generateFile(final String devroot, final boolean assertNoChange, - final boolean isInterface) throws ClassNotFoundException, IOException { + private static void generateFile(final String devroot, final boolean assertNoChange, final boolean isInterface) + throws ClassNotFoundException, IOException { log.setLevel(Level.WARNING); log.warning("Running GenerateFigureImmutable assertNoChange=" + assertNoChange); @@ -1002,24 +973,21 @@ private static void generateFile(final String devroot, final boolean assertNoCha final List> skips = Arrays.asList(x -> { try { - return x - .equals(new JavaFunction(PlotExceptionCause.class.getMethod("getPlotInfo"))); + return x.equals(new JavaFunction(PlotExceptionCause.class.getMethod("getPlotInfo"))); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } }, x -> { try { - return x - .equals(new JavaFunction(BaseFigureImpl.class.getMethod("save", String.class))) - || - x.equals(new JavaFunction( - BaseFigureImpl.class.getMethod("save", String.class, int.class, int.class))) - || - x.equals(new JavaFunction(BaseFigureImpl.class.getMethod("save", String.class, - boolean.class, long.class))) - || - x.equals(new JavaFunction(BaseFigureImpl.class.getMethod("save", String.class, - int.class, int.class, boolean.class, long.class))); + return x.equals(new JavaFunction(BaseFigureImpl.class.getMethod("save", String.class))) || + x.equals(new JavaFunction( + BaseFigureImpl.class.getMethod("save", String.class, int.class, int.class))) + || + x.equals(new JavaFunction( + BaseFigureImpl.class.getMethod("save", String.class, boolean.class, long.class))) + || + x.equals(new JavaFunction(BaseFigureImpl.class.getMethod("save", String.class, int.class, + int.class, boolean.class, long.class))); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } @@ -1028,20 +996,19 @@ private static void generateFile(final String devroot, final boolean assertNoCha @SuppressWarnings("unchecked") GenerateFigureImmutable gen = new GenerateFigureImmutable(isInterface, imports, interfaces, - seriesInterfaces, skips, JavaFunction::getMethodName); + seriesInterfaces, skips, JavaFunction::getMethodName); final String code = gen.generateCode(); log.info("\n\n**************************************\n\n"); log.info(code); - String file = - devroot + "/Plot/src/main/java/" + gen.outputClass.replace(".", "/") + ".java"; + String file = devroot + "/Plot/src/main/java/" + gen.outputClass.replace(".", "/") + ".java"; if (assertNoChange) { String oldCode = new String(Files.readAllBytes(Paths.get(file))); if (!code.equals(oldCode)) { throw new RuntimeException( - "Change in generated code. Run GenerateFigureImmutable or \"./gradlew :Generators:generateFigureImmutable\" to regenerate\n"); + "Change in generated code. Run GenerateFigureImmutable or \"./gradlew :Generators:generateFigureImmutable\" to regenerate\n"); } } else { diff --git a/Generators/src/main/java/io/deephaven/db/plot/util/GenerateMultiSeries.java b/Generators/src/main/java/io/deephaven/db/plot/util/GenerateMultiSeries.java index 98fc468aec0..e1e92770727 100644 --- a/Generators/src/main/java/io/deephaven/db/plot/util/GenerateMultiSeries.java +++ b/Generators/src/main/java/io/deephaven/db/plot/util/GenerateMultiSeries.java @@ -29,8 +29,7 @@ public class GenerateMultiSeries { private static Logger log = Logger.getLogger(GenerateMultiSeries.class.toString()); - public static void main(String[] args) - throws ClassNotFoundException, IOException, NoSuchMethodException { + public static void main(String[] args) throws ClassNotFoundException, IOException, NoSuchMethodException { String devroot = null; boolean assertNoChange = false; @@ -47,119 +46,108 @@ public static void main(String[] args) } final Set skip = new HashSet<>(); - skip.add(Class.forName("io.deephaven.db.plot.datasets.DataSeries").getMethod("pointSize", - int.class)); - skip.add(Class.forName("io.deephaven.db.plot.datasets.DataSeries").getMethod("pointSize", - double.class)); - skip.add(Class.forName("io.deephaven.db.plot.datasets.DataSeries").getMethod("pointSize", - long.class)); + skip.add(Class.forName("io.deephaven.db.plot.datasets.DataSeries").getMethod("pointSize", int.class)); + skip.add(Class.forName("io.deephaven.db.plot.datasets.DataSeries").getMethod("pointSize", double.class)); + skip.add(Class.forName("io.deephaven.db.plot.datasets.DataSeries").getMethod("pointSize", long.class)); new Generator("io.deephaven.db.plot.datasets.multiseries.MultiSeries", - "DataSeriesInternal", - false, - false, - false, - "io.deephaven.db.plot.datasets.DataSeries", - "io.deephaven.db.plot.datasets.category.CategoryDataSeries", - "io.deephaven.db.plot.datasets.xy.XYDataSeries", - "io.deephaven.db.plot.datasets.ohlc.OHLCDataSeries").generateCode(devroot, - assertNoChange, skip); + "DataSeriesInternal", + false, + false, + false, + "io.deephaven.db.plot.datasets.DataSeries", + "io.deephaven.db.plot.datasets.category.CategoryDataSeries", + "io.deephaven.db.plot.datasets.xy.XYDataSeries", + "io.deephaven.db.plot.datasets.ohlc.OHLCDataSeries").generateCode(devroot, assertNoChange, skip); new Generator("io.deephaven.db.plot.datasets.multiseries.AbstractMultiSeries", - "DataSeriesInternal", - true, - false, - false, - "io.deephaven.db.plot.datasets.DataSeries", - "io.deephaven.db.plot.datasets.category.CategoryDataSeries", - "io.deephaven.db.plot.datasets.xy.XYDataSeries", - "io.deephaven.db.plot.datasets.ohlc.OHLCDataSeries").generateCode(devroot, - assertNoChange, skip); + "DataSeriesInternal", + true, + false, + false, + "io.deephaven.db.plot.datasets.DataSeries", + "io.deephaven.db.plot.datasets.category.CategoryDataSeries", + "io.deephaven.db.plot.datasets.xy.XYDataSeries", + "io.deephaven.db.plot.datasets.ohlc.OHLCDataSeries").generateCode(devroot, assertNoChange, skip); new Generator( - "io.deephaven.db.plot.datasets.multiseries.MultiCatSeries", - "CategoryDataSeriesInternal", - false, - false, - false, - "io.deephaven.db.plot.datasets.category.CategoryDataSeries").generateCode(devroot, - assertNoChange, skip); + "io.deephaven.db.plot.datasets.multiseries.MultiCatSeries", + "CategoryDataSeriesInternal", + false, + false, + false, + "io.deephaven.db.plot.datasets.category.CategoryDataSeries").generateCode(devroot, assertNoChange, + skip); new Generator( - "io.deephaven.db.plot.datasets.multiseries.MultiCatErrorBarSeries", - "CategoryErrorBarDataSeriesInternal", - false, - false, - false, - "io.deephaven.db.plot.datasets.category.CategoryDataSeries").generateCode(devroot, - assertNoChange, skip); + "io.deephaven.db.plot.datasets.multiseries.MultiCatErrorBarSeries", + "CategoryErrorBarDataSeriesInternal", + false, + false, + false, + "io.deephaven.db.plot.datasets.category.CategoryDataSeries").generateCode(devroot, assertNoChange, + skip); new Generator( - "io.deephaven.db.plot.datasets.multiseries.MultiCatErrorBarSeriesSwappable", - "CategoryErrorBarDataSeriesInternal", - false, - false, - true, - "io.deephaven.db.plot.datasets.category.CategoryDataSeries").generateCode(devroot, - assertNoChange, skip); + "io.deephaven.db.plot.datasets.multiseries.MultiCatErrorBarSeriesSwappable", + "CategoryErrorBarDataSeriesInternal", + false, + false, + true, + "io.deephaven.db.plot.datasets.category.CategoryDataSeries").generateCode(devroot, assertNoChange, + skip); new Generator( - "io.deephaven.db.plot.datasets.multiseries.MultiXYErrorBarSeries", - "XYErrorBarDataSeriesInternal", - false, - false, - false, - "io.deephaven.db.plot.datasets.xy.XYDataSeries").generateCode(devroot, assertNoChange, - skip); + "io.deephaven.db.plot.datasets.multiseries.MultiXYErrorBarSeries", + "XYErrorBarDataSeriesInternal", + false, + false, + false, + "io.deephaven.db.plot.datasets.xy.XYDataSeries").generateCode(devroot, assertNoChange, skip); new Generator( - "io.deephaven.db.plot.datasets.multiseries.MultiXYErrorBarSeriesSwappable", - "XYErrorBarDataSeriesInternal", - false, - false, - true, - "io.deephaven.db.plot.datasets.xy.XYDataSeries").generateCode(devroot, assertNoChange, - skip); + "io.deephaven.db.plot.datasets.multiseries.MultiXYErrorBarSeriesSwappable", + "XYErrorBarDataSeriesInternal", + false, + false, + true, + "io.deephaven.db.plot.datasets.xy.XYDataSeries").generateCode(devroot, assertNoChange, skip); new Generator( - "io.deephaven.db.plot.datasets.multiseries.MultiCatSeriesSwappable", - "CategoryDataSeriesInternal", - false, - false, - true, - "io.deephaven.db.plot.datasets.category.CategoryDataSeries").generateCode(devroot, - assertNoChange, skip); + "io.deephaven.db.plot.datasets.multiseries.MultiCatSeriesSwappable", + "CategoryDataSeriesInternal", + false, + false, + true, + "io.deephaven.db.plot.datasets.category.CategoryDataSeries").generateCode(devroot, assertNoChange, + skip); new Generator("io.deephaven.db.plot.datasets.multiseries.MultiXYSeries", - "XYDataSeriesInternal", - false, - false, - false, - "io.deephaven.db.plot.datasets.xy.XYDataSeries").generateCode(devroot, assertNoChange, - skip); + "XYDataSeriesInternal", + false, + false, + false, + "io.deephaven.db.plot.datasets.xy.XYDataSeries").generateCode(devroot, assertNoChange, skip); new Generator("io.deephaven.db.plot.datasets.multiseries.MultiXYSeriesSwappable", - "XYDataSeriesInternal", - false, - false, - true, - "io.deephaven.db.plot.datasets.xy.XYDataSeries").generateCode(devroot, assertNoChange, - skip); + "XYDataSeriesInternal", + false, + false, + true, + "io.deephaven.db.plot.datasets.xy.XYDataSeries").generateCode(devroot, assertNoChange, skip); new Generator("io.deephaven.db.plot.datasets.multiseries.MultiOHLCSeries", - "OHLCDataSeriesInternal", - false, - false, - false, - "io.deephaven.db.plot.datasets.ohlc.OHLCDataSeries").generateCode(devroot, - assertNoChange, skip); + "OHLCDataSeriesInternal", + false, + false, + false, + "io.deephaven.db.plot.datasets.ohlc.OHLCDataSeries").generateCode(devroot, assertNoChange, skip); new Generator("io.deephaven.db.plot.datasets.multiseries.MultiOHLCSeriesSwappable", - "OHLCDataSeriesInternal", - false, - false, - true, - "io.deephaven.db.plot.datasets.ohlc.OHLCDataSeries").generateCode(devroot, - assertNoChange, skip); + "OHLCDataSeriesInternal", + false, + false, + true, + "io.deephaven.db.plot.datasets.ohlc.OHLCDataSeries").generateCode(devroot, assertNoChange, skip); } static class Generator { @@ -174,8 +162,8 @@ static class Generator { private final Class output; Generator(final String outputClass, final String parameterizedType, final boolean isGeneric, - final boolean isTransform, final boolean isSwappable, final String... interfaces) - throws ClassNotFoundException { + final boolean isTransform, final boolean isSwappable, final String... interfaces) + throws ClassNotFoundException { this.outputClass = outputClass; this.parameterizedType = parameterizedType; this.isGeneric = isGeneric; @@ -190,35 +178,32 @@ static class Generator { } - private void generateCode(final String devroot, final boolean assertNoChange, - final Set skip) throws ClassNotFoundException, IOException { + private void generateCode(final String devroot, final boolean assertNoChange, final Set skip) + throws ClassNotFoundException, IOException { final StringBuilder code = new StringBuilder(); - final String outputFile = - devroot + "/Plot/src/main/java/" + outputClass.replace(".", "/") + ".java"; + final String outputFile = devroot + "/Plot/src/main/java/" + outputClass.replace(".", "/") + ".java"; final String headerMessage = "CODE BELOW HERE IS GENERATED -- DO NOT EDIT BY HAND"; final String headerComment = "//////////////////////////////"; final String headerSpace = " "; - final String header = - headerSpace + headerComment + " " + headerMessage + " " + headerComment; - final String header2 = headerSpace + headerComment - + " TO REGENERATE RUN GenerateMultiSeries " + headerComment; - final String header3 = headerSpace + headerComment - + " AND THEN RUN GenerateFigureImmutable " + headerComment; + final String header = headerSpace + headerComment + " " + headerMessage + " " + headerComment; + final String header2 = + headerSpace + headerComment + " TO REGENERATE RUN GenerateMultiSeries " + headerComment; + final String header3 = + headerSpace + headerComment + " AND THEN RUN GenerateFigureImmutable " + headerComment; code.append(String.join("\n", header, header2, header3)).append("\n\n"); code.append(generateClasses(skip)); - final String axes = - Files.lines(Paths.get(outputFile)).reduce("\n", (a, b) -> a + "\n" + b); + final String axes = Files.lines(Paths.get(outputFile)).reduce("\n", (a, b) -> a + "\n" + b); int cutPoint = axes.lastIndexOf(header); if (cutPoint != axes.indexOf(header)) { throw new IllegalArgumentException( - "Input source code contains two autogenerated sections! file=" + outputFile); + "Input source code contains two autogenerated sections! file=" + outputFile); } if (cutPoint < 0) { @@ -235,7 +220,7 @@ private void generateCode(final String devroot, final boolean assertNoChange, String oldCode = new String(Files.readAllBytes(Paths.get(outputFile))); if (!newcode.equals(oldCode)) { throw new RuntimeException("Change in generated code for " + outputFile - + ". Run GenerateMultiSeries or \"./gradlew :Generators:generateMultiSeries\" to regenerate\n"); + + ". Run GenerateMultiSeries or \"./gradlew :Generators:generateMultiSeries\" to regenerate\n"); } } else { PrintWriter out = new PrintWriter(outputFile); @@ -249,30 +234,29 @@ private String generateClasses(final Set skip) throws ClassNotFoundExcep final StringBuilder code = new StringBuilder(); if (!isInterface && !isAbstract) { code.append(indent(1)).append("@Override public void initializeSeries(") - .append(parameterizedType) - .append(" series) {\n").append(indent(2)) - .append("$$initializeSeries$$(series);\n").append(indent(1)).append("}\n\n"); + .append(parameterizedType) + .append(" series) {\n").append(indent(2)).append("$$initializeSeries$$(series);\n") + .append(indent(1)).append("}\n\n"); } final List sortedMethods = new ArrayList<>(); - final List methodsWithFunctionParameter = - new ArrayList<>(); + final List methodsWithFunctionParameter = new ArrayList<>(); for (final String clazz : interfaces) { final Class dataseries = Class.forName(clazz); final Method[] methods = Arrays.stream(dataseries.getMethods()) - .filter(m -> !skip.contains(m)) - .toArray(Method[]::new); + .filter(m -> !skip.contains(m)) + .toArray(Method[]::new); final GroovyStaticImportGenerator.JavaFunction[] functionalMethods = - filterBadMethods(Arrays.stream(methods) - .filter(m -> hasFunction(m.getParameterTypes())) - .map(GroovyStaticImportGenerator.JavaFunction::new) - .toArray(GroovyStaticImportGenerator.JavaFunction[]::new)); + filterBadMethods(Arrays.stream(methods) + .filter(m -> hasFunction(m.getParameterTypes())) + .map(GroovyStaticImportGenerator.JavaFunction::new) + .toArray(GroovyStaticImportGenerator.JavaFunction[]::new)); final GroovyStaticImportGenerator.JavaFunction[] nonFunctionalMethods = - Arrays.stream(methods) - .filter(m -> !hasFunction(m.getParameterTypes())) - .map(GroovyStaticImportGenerator.JavaFunction::new) - .toArray(GroovyStaticImportGenerator.JavaFunction[]::new); + Arrays.stream(methods) + .filter(m -> !hasFunction(m.getParameterTypes())) + .map(GroovyStaticImportGenerator.JavaFunction::new) + .toArray(GroovyStaticImportGenerator.JavaFunction[]::new); Arrays.sort(functionalMethods); Arrays.sort(nonFunctionalMethods); Collections.addAll(methodsWithFunctionParameter, functionalMethods); @@ -285,22 +269,19 @@ private String generateClasses(final Set skip) throws ClassNotFoundExcep if (!methodsDone.add(mapName)) { continue; } - final String outputSimple = - isTransform ? "AbstractMultiSeries" : output.getSimpleName(); + final String outputSimple = isTransform ? "AbstractMultiSeries" : output.getSimpleName(); code.append(createMethodWithFunctionParameter(outputSimple, function)); code.append("\n\n"); } - final Map mapToFunction = - new HashMap<>(); + final Map mapToFunction = new HashMap<>(); for (final GroovyStaticImportGenerator.JavaFunction function : sortedMethods) { final String mapName = createMapName(function); if (mapToFunction.get(mapName) != null) { continue; } mapToFunction.put(mapName, function); - final String outputSimple = - isTransform ? "AbstractMultiSeries" : output.getSimpleName(); + final String outputSimple = isTransform ? "AbstractMultiSeries" : output.getSimpleName(); if (!isInterface && !isAbstract && !isTransform) { String mapType = getMapType(function); code.append(createMap(mapType, mapName)); @@ -335,33 +316,30 @@ private boolean hasFunction(Class[] parameterTypes) { private String createCopyConstructor(final Set strings) { final List copiedVars = new ArrayList<>(); for (final String var : strings) { - copiedVars - .add(" %NEWSERIES%.%VAR% = %VAR%.copy();".replaceAll("%VAR%", var)); + copiedVars.add(" %NEWSERIES%.%VAR% = %VAR%.copy();".replaceAll("%VAR%", var)); } final String ret = " @Override\n" + - " public %CLASSNAME% copy(AxesImpl axes) {\n" + - " final %CLASSNAME% %NEWSERIES% = new %CLASSNAME%(this, axes);\n" + - " " + String.join("\n", copiedVars) + "\n" + - " return %NEWSERIES%;\n" + - " }"; - - return ret.replaceAll("%CLASSNAME%", output.getSimpleName()).replaceAll("%NEWSERIES%", - "__s__"); + " public %CLASSNAME% copy(AxesImpl axes) {\n" + + " final %CLASSNAME% %NEWSERIES% = new %CLASSNAME%(this, axes);\n" + + " " + String.join("\n", copiedVars) + "\n" + + " return %NEWSERIES%;\n" + + " }"; + + return ret.replaceAll("%CLASSNAME%", output.getSimpleName()).replaceAll("%NEWSERIES%", "__s__"); } private String createInitializeFunction( - final Map mapToFunction) { + final Map mapToFunction) { final StringBuilder code = new StringBuilder(); final Map> functionToGenerics = - createFunctionToGenerics(mapToFunction.values()); - - code.append(indent(1)).append("@SuppressWarnings(\"unchecked\") \n").append(indent(1)) - .append("private ") - .append(createGenericInitializeSeries(mapToFunction, functionToGenerics)) - .append("void") - .append(" $$initializeSeries$$(") - .append(parameterizedType) - .append(" series) {\n"); + createFunctionToGenerics(mapToFunction.values()); + + code.append(indent(1)).append("@SuppressWarnings(\"unchecked\") \n").append(indent(1)).append("private ") + .append(createGenericInitializeSeries(mapToFunction, functionToGenerics)) + .append("void") + .append(" $$initializeSeries$$(") + .append(parameterizedType) + .append(" series) {\n"); if (isTransform) { return code.append(createInitializeFunctionTransform()).toString(); } @@ -370,8 +348,7 @@ private String createInitializeFunction( boolean objectArrayInitialized = false; int numConsumers = 0; - for (final Map.Entry entry : mapToFunction - .entrySet()) { + for (final Map.Entry entry : mapToFunction.entrySet()) { final String map = entry.getKey(); final GroovyStaticImportGenerator.JavaFunction function = entry.getValue(); final boolean oneArg = function.getParameterNames().length == 1; @@ -382,26 +359,24 @@ private String createInitializeFunction( try { String typeName = function.getParameterTypes()[0].getTypeName(); typeName = typeName.contains(Closure.class.getCanonicalName()) - ? ClosureFunction.class.getCanonicalName() - : typeName; + ? ClosureFunction.class.getCanonicalName() + : typeName; code.append(indent(2)).append("java.util.function.Consumer<") - .append( - toGenerics == null - ? TypeUtils.getBoxedType(ClassUtil.lookupClass(typeName)) - .getCanonicalName() - : toGenerics.apply(typeName)) - .append("> ").append(consumerName) - .append(" = series::") - .append(function.getMethodName()); + .append(toGenerics == null + ? TypeUtils.getBoxedType(ClassUtil.lookupClass(typeName)).getCanonicalName() + : toGenerics.apply(typeName)) + .append("> ").append(consumerName) + .append(" = series::") + .append(function.getMethodName()); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } code.append(";\n"); code.append(indent(2)) - .append(map) - .append(".runIfKeyExistsCast(") - .append(consumerName) - .append(", name);\n"); + .append(map) + .append(".runIfKeyExistsCast(") + .append(consumerName) + .append(", name);\n"); } else if (!objectArrayInitialized) { code.append(indent(2)).append("java.lang.Object[] "); objectArrayInitialized = true; @@ -409,30 +384,28 @@ private String createInitializeFunction( if (!oneArg) { code.append(indent(2)).append("objectArray = ") - .append(map) - .append(".get(name);\n"); + .append(map) + .append(".get(name);\n"); code.append(indent(2)).append("if(objectArray != null) {series.") - .append(function.getMethodName()) - .append("("); + .append(function.getMethodName()) + .append("("); final Type[] types = function.getParameterTypes(); final List args = new ArrayList<>(); for (int i = 0; i < types.length; i++) { String typeName = types[i].getTypeName(); - if (typeName.contains("java.util.Map") - || typeName.contains("groovy.lang.Closure") - || typeName.contains("java.util.function.Function")) { + if (typeName.contains("java.util.Map") || typeName.contains("groovy.lang.Closure") + || typeName.contains("java.util.function.Function")) { typeName = typeName.replace(Closure.class.getCanonicalName(), - ClosureFunction.class.getCanonicalName()); + ClosureFunction.class.getCanonicalName()); final int ind = typeName.indexOf("<"); typeName = typeName.substring(0, ind < 0 ? typeName.length() : ind); } final String arg; if (typeName.equals("io.deephaven.db.tables.Table")) { - arg = "((io.deephaven.db.plot.util.tables.TableHandle) objectArray[" + i - + "]).getTable()"; + arg = "((io.deephaven.db.plot.util.tables.TableHandle) objectArray[" + i + "]).getTable()"; } else if (typeName.equals("java.lang.Object")) { arg = "objectArray[" + i + "]"; } else { @@ -450,24 +423,21 @@ private String createInitializeFunction( } private String createInitializeFunctionTransform() { - return indent(2) + "this.series.initializeSeries((SERIES2) series);\n" + indent(1) - + "}\n"; + return indent(2) + "this.series.initializeSeries((SERIES2) series);\n" + indent(1) + "}\n"; } private Map> createFunctionToGenerics( - Collection functionSet) { - final Map> map = - new HashMap<>(); + Collection functionSet) { + final Map> map = new HashMap<>(); final Map generics = new HashMap<>(); final Map counter = new HashMap<>(); for (final GroovyStaticImportGenerator.JavaFunction function : functionSet) { for (TypeVariable typeVariable : function.getTypeParameters()) { final String typeName = typeVariable.getTypeName(); final Type[] bounds = typeVariable.getBounds(); - if (bounds.length > 1) { // probably don't need more than one extend at this - // point, this is just much easier right now todo? - throw new UnsupportedOperationException( - "Doesn't support more than one type right now."); + if (bounds.length > 1) { // probably don't need more than one extend at this point, this is just + // much easier right now todo? + throw new UnsupportedOperationException("Doesn't support more than one type right now."); } final Type type = typeVariable.getBounds()[0]; if (!generics.containsKey(typeName)) { @@ -483,8 +453,7 @@ private Map> if (oldFunction == null) { newFunction = s -> s.replaceAll(typeName, newTypeName); } else { - newFunction = - s -> oldFunction.apply(s).replaceAll(typeName, newTypeName); + newFunction = s -> oldFunction.apply(s).replaceAll(typeName, newTypeName); } map.put(function, newFunction); } @@ -494,8 +463,8 @@ private Map> } private String createGenericInitializeSeries( - final Map mapToFunction, - final Map> functionToGenerics) { + final Map mapToFunction, + final Map> functionToGenerics) { final List args = new ArrayList<>(); final Set variableNames = new HashSet<>(); for (final GroovyStaticImportGenerator.JavaFunction function : mapToFunction.values()) { @@ -516,19 +485,17 @@ private String createGenericInitializeSeries( } private String createMapName(final GroovyStaticImportGenerator.JavaFunction function) { - return "%METHOD%SeriesNameTo%TYPES%Map" - .replaceAll("%TYPES%", createArgsString(function)) - .replaceAll("%METHOD%", function.getMethodName()); + return "%METHOD%SeriesNameTo%TYPES%Map".replaceAll("%TYPES%", createArgsString(function)) + .replaceAll("%METHOD%", function.getMethodName()); } private String createMap(String mapType, final String mapName) { - return indent(1) + "private " + mapType + " " + mapName - + " = new io.deephaven.db.plot.util.PlotUtils" + - ".HashMapWithDefault<>();\n"; + return indent(1) + "private " + mapType + " " + mapName + " = new io.deephaven.db.plot.util.PlotUtils" + + ".HashMapWithDefault<>();\n"; } - private String createMethod(final String returnClass, - final GroovyStaticImportGenerator.JavaFunction function, final String mapName) { + private String createMethod(final String returnClass, final GroovyStaticImportGenerator.JavaFunction function, + final String mapName) { final StringBuilder code = new StringBuilder(); code.append(createMethodHeader(returnClass, function)); if (isTransform) { @@ -544,12 +511,12 @@ private String createMethod(final String returnClass, private String createGetter(String mapType, String mapName) { return indent(1) + "public " + mapType + " " + mapName + "() {\n" + - indent(2) + "return " + mapName + ";\n" + - indent(1) + "}\n"; + indent(2) + "return " + mapName + ";\n" + + indent(1) + "}\n"; } private String createMethodWithFunctionParameter(String returnClass, - GroovyStaticImportGenerator.JavaFunction function) throws ClassNotFoundException { + GroovyStaticImportGenerator.JavaFunction function) throws ClassNotFoundException { final StringBuilder code = new StringBuilder(); code.append(createMethodHeader(returnClass, function)); if (isTransform) { @@ -563,43 +530,41 @@ private String createMethodWithFunctionParameter(String returnClass, return code.toString(); } - private String createFunctionalBody(final String returnClass, - GroovyStaticImportGenerator.JavaFunction function) throws ClassNotFoundException { + private String createFunctionalBody(final String returnClass, GroovyStaticImportGenerator.JavaFunction function) + throws ClassNotFoundException { if (function.getParameterTypes()[0].getTypeName().startsWith("groovy.lang.Closure")) { return indent(2) + "return " + function.getMethodName() - + "(new io.deephaven.db.plot.util.functions.ClosureFunction<>(" - + function.getParameterNames()[0] + "), keys);\n" + indent(1) + "}\n\n"; + + "(new io.deephaven.db.plot.util.functions.ClosureFunction<>(" + + function.getParameterNames()[0] + "), keys);\n" + indent(1) + "}\n\n"; } final String tableMethodName = getTableMethodName(function.getMethodName()); - return indent(2) + "final String newColumn = " - + getColumnNameConstant(function.getMethodName()) + " + this.hashCode();\n" + - indent(2) + "applyFunction(" + function.getParameterNames()[0] + ", newColumn, " - + getFunctionInput(function) + ", " + getReturnTypeName(function) + ".class);\n" + - indent(2) - + "chart().figure().registerFigureFunction(new io.deephaven.db.plot.util.functions.FigureImplFunction(f -> f." - + - tableMethodName + - "(" + getFigureFunctionInput(returnClass, function, tableMethodName) - + "));\n" + - indent(2) + "return this;\n" + indent(1) + "}\n\n"; + return indent(2) + "final String newColumn = " + getColumnNameConstant(function.getMethodName()) + + " + this.hashCode();\n" + + indent(2) + "applyFunction(" + function.getParameterNames()[0] + ", newColumn, " + + getFunctionInput(function) + ", " + getReturnTypeName(function) + ".class);\n" + + indent(2) + + "chart().figure().registerFigureFunction(new io.deephaven.db.plot.util.functions.FigureImplFunction(f -> f." + + + tableMethodName + + "(" + getFigureFunctionInput(returnClass, function, tableMethodName) + + "));\n" + + indent(2) + "return this;\n" + indent(1) + "}\n\n"; } private String getFigureFunctionInput(final String returnClass, - final GroovyStaticImportGenerator.JavaFunction function, final String tableMethodName) - throws ClassNotFoundException { + final GroovyStaticImportGenerator.JavaFunction function, final String tableMethodName) + throws ClassNotFoundException { final StringBuilder code = new StringBuilder(); code.append(isSwappable ? "new SelectableDataSetSwappableTable(getSwappableTable()), " - : "getTableMapHandle().getTable(), "); + : "getTableMapHandle().getTable(), "); if (function.getMethodName().equals("pointColorByY")) { - final Class c = - Class.forName("io.deephaven.db.plot.datasets.multiseries." + returnClass); + final Class c = Class.forName("io.deephaven.db.plot.datasets.multiseries." + returnClass); final Method[] methods = Arrays.stream(c.getDeclaredMethods()) - .filter(m -> m.getName().equals(tableMethodName)) - .filter(m -> m.getParameterTypes().length > 0 - && m.getParameterTypes()[0].equals(Table.class)) - .toArray(Method[]::new); + .filter(m -> m.getName().equals(tableMethodName)) + .filter(m -> m.getParameterTypes().length > 0 && m.getParameterTypes()[0].equals(Table.class)) + .toArray(Method[]::new); Arrays.sort(methods, Comparator.comparingInt(Method::getParameterCount)); @@ -613,9 +578,8 @@ private String getFigureFunctionInput(final String returnClass, break; default: log.warning(tableMethod.toString()); - throw new IllegalStateException( - "Can not calculate function input for function " + tableMethodName - + " in class " + function.getClassNameShort()); + throw new IllegalStateException("Can not calculate function input for function " + + tableMethodName + " in class " + function.getClassNameShort()); } return code.append(", keys), this").toString(); @@ -623,22 +587,21 @@ private String getFigureFunctionInput(final String returnClass, final Class c = Class.forName(function.getClassName()); final Method[] methods = Arrays.stream(c.getMethods()) - .filter(m -> m.getName().equals(tableMethodName)) - .filter(m -> m.getParameterTypes().length > 0 - && m.getParameterTypes()[0].equals(Table.class)) - .toArray(Method[]::new); + .filter(m -> m.getName().equals(tableMethodName)) + .filter(m -> m.getParameterTypes().length > 0 && m.getParameterTypes()[0].equals(Table.class)) + .toArray(Method[]::new); if (methods.length < 1) { - throw new IllegalStateException("No table methods for method " + tableMethodName - + " in class " + function.getClassNameShort()); + throw new IllegalStateException( + "No table methods for method " + tableMethodName + " in class " + function.getClassNameShort()); } if (methods.length != 1) { - log.warning("More than 1 possible table method for function input for function " - + tableMethodName + " in class " + function.getClassNameShort()); + log.warning("More than 1 possible table method for function input for function " + tableMethodName + + " in class " + function.getClassNameShort()); log.warning(Arrays.toString(methods)); - throw new IllegalStateException("Can not calculate function input for function " - + tableMethodName + " in class " + function.getClassNameShort()); + throw new IllegalStateException("Can not calculate function input for function " + tableMethodName + + " in class " + function.getClassNameShort()); } final Method method = methods[0]; @@ -649,8 +612,8 @@ private String getFigureFunctionInput(final String returnClass, code.append("getX(), "); break; default: - throw new IllegalStateException("Can not calculate function input for function " - + tableMethodName + " in class " + function.getClassNameShort()); + throw new IllegalStateException("Can not calculate function input for function " + tableMethodName + + " in class " + function.getClassNameShort()); } code.append("newColumn, keys), this"); @@ -659,22 +622,19 @@ private String getFigureFunctionInput(final String returnClass, } private static GroovyStaticImportGenerator.JavaFunction[] filterBadMethods( - final GroovyStaticImportGenerator.JavaFunction[] functions) { + final GroovyStaticImportGenerator.JavaFunction[] functions) { final List retList = new ArrayList<>(); - final Map, GroovyStaticImportGenerator.JavaFunction> functionMap = - new HashMap<>(); + final Map, GroovyStaticImportGenerator.JavaFunction> functionMap = new HashMap<>(); for (final GroovyStaticImportGenerator.JavaFunction function : functions) { if (function.getParameterTypes()[0].getTypeName().contains("Function")) { final Pair methodPair = - new Pair<>(function.getMethodName(), function.getParameterNames().length); - final GroovyStaticImportGenerator.JavaFunction oldFunction = - functionMap.get(methodPair); + new Pair<>(function.getMethodName(), function.getParameterNames().length); + final GroovyStaticImportGenerator.JavaFunction oldFunction = functionMap.get(methodPair); if (oldFunction != null) { - if (oldFunction.getTypeParameters().length < 1 - && function.getTypeParameters().length > 0) { + if (oldFunction.getTypeParameters().length < 1 && function.getTypeParameters().length > 0) { functionMap.put(methodPair, function); } } else { @@ -695,14 +655,12 @@ private String getFunctionInput(GroovyStaticImportGenerator.JavaFunction functio return "getX()"; } else if (function.getMethodName().endsWith("ByY")) { return "getY()"; - } else if (function.getParameterTypes()[0].getTypeName() - .startsWith("java.util.function.Function")) { + } else if (function.getParameterTypes()[0].getTypeName().startsWith("java.util.function.Function")) { return "getX()"; } - throw new IllegalStateException( - "Don't know what to make function input for method " + function.getMethodName() - + " param class " + function.getParameterTypes()[0].getClass()); + throw new IllegalStateException("Don't know what to make function input for method " + + function.getMethodName() + " param class " + function.getParameterTypes()[0].getClass()); } private String getReturnTypeName(final GroovyStaticImportGenerator.JavaFunction function) { @@ -712,15 +670,14 @@ private String getReturnTypeName(final GroovyStaticImportGenerator.JavaFunction return returnType.substring(0, returnType.length() - 1).trim(); } - return function.getTypeParameters()[function.getTypeParameters().length - 1] - .getBounds()[0].getTypeName(); + return function.getTypeParameters()[function.getTypeParameters().length - 1].getBounds()[0].getTypeName(); } private String getTableMethodName(String methodName) { methodName = methodName.endsWith("ByX") || methodName.endsWith("ByY") - ? methodName.substring(0, methodName.length() - 3) - : methodName; + ? methodName.substring(0, methodName.length() - 3) + : methodName; if (methodName.startsWith("pointColor")) { return "pointColor"; @@ -731,8 +688,8 @@ private String getTableMethodName(String methodName) { private String getColumnNameConstant(String methodName) { methodName = methodName.endsWith("ByX") || methodName.endsWith("ByY") - ? methodName.substring(0, methodName.length() - 3) - : methodName; + ? methodName.substring(0, methodName.length() - 3) + : methodName; if (methodName.startsWith("pointColor")) { return "io.deephaven.db.plot.datasets.ColumnNameConstants.POINT_COLOR"; @@ -744,42 +701,35 @@ private String getColumnNameConstant(String methodName) { return "io.deephaven.db.plot.datasets.ColumnNameConstants.POINT_LABEL"; } - throw new IllegalStateException( - "No column name constant corresponds to method name " + methodName); + throw new IllegalStateException("No column name constant corresponds to method name " + methodName); } - private String createTransformBody( - final GroovyStaticImportGenerator.JavaFunction function) { + private String createTransformBody(final GroovyStaticImportGenerator.JavaFunction function) { final List args = new ArrayList<>(); Collections.addAll(args, function.getParameterNames()); args.add("keys"); - return indent(2) + "//noinspection unchecked\n" + indent(2) - + "return (AbstractMultiSeries) series." + function.getMethodName() + "(" - + String.join(", ", args) + ");\n" + indent(1) + "}\n\n"; + return indent(2) + "//noinspection unchecked\n" + indent(2) + "return (AbstractMultiSeries) series." + + function.getMethodName() + "(" + String.join(", ", args) + ");\n" + indent(1) + "}\n\n"; } - private String createExceptionMethodBody( - final GroovyStaticImportGenerator.JavaFunction function) { + private String createExceptionMethodBody(final GroovyStaticImportGenerator.JavaFunction function) { return indent(2) - + "throw new PlotUnsupportedOperationException(\"DataSeries \" + this.getClass() + \" does not support method " - + function.getMethodName() + " for arguments " - + Arrays.toString(function.getParameterTypes()) - + ". If you think this method should work, try placing your keys into an Object array\", this);\n" - + indent(1) + "}\n\n"; + + "throw new PlotUnsupportedOperationException(\"DataSeries \" + this.getClass() + \" does not support method " + + function.getMethodName() + " for arguments " + Arrays.toString(function.getParameterTypes()) + + ". If you think this method should work, try placing your keys into an Object array\", this);\n" + + indent(1) + "}\n\n"; } private String createMethodHeader(final String returnClass, - final GroovyStaticImportGenerator.JavaFunction function) { - String methodHeader = indent(1) + (!isInterface ? "@Override public " : "") - + getGenericTypes(function) + returnClass + (isGeneric ? "" : "") + " " - + function.getMethodName() + "("; + final GroovyStaticImportGenerator.JavaFunction function) { + String methodHeader = indent(1) + (!isInterface ? "@Override public " : "") + getGenericTypes(function) + + returnClass + (isGeneric ? "" : "") + " " + function.getMethodName() + "("; final String[] args = function.getParameterNames(); final Type[] types = function.getParameterTypes(); if (args.length != types.length) { - throw new IllegalStateException( - "Number of parameter names and number of parameter types not equal!"); + throw new IllegalStateException("Number of parameter names and number of parameter types not equal!"); } final List arguments = new ArrayList<>(); @@ -788,7 +738,7 @@ private String createMethodHeader(final String returnClass, } return methodHeader + String.join(", ", arguments) + ", final Object... keys)" - + (!isInterface ? " {\n" : ";\n"); + + (!isInterface ? " {\n" : ";\n"); } private String getGenericTypes(final GroovyStaticImportGenerator.JavaFunction function) { @@ -812,25 +762,23 @@ private String getGenericTypes(TypeVariable typeVariable) { bounds.add(bound.getTypeName()); } } - return typeVariable.getName() - + (bounds.isEmpty() ? "" : " extends " + String.join(" & ", bounds)); + return typeVariable.getName() + (bounds.isEmpty() ? "" : " extends " + String.join(" & ", bounds)); } - private String createMapCode(final GroovyStaticImportGenerator.JavaFunction function, - final String mapName) { + private String createMapCode(final GroovyStaticImportGenerator.JavaFunction function, final String mapName) { final Type[] vars = function.getParameterTypes(); final String[] names = function.getParameterNames(); final StringBuilder code = new StringBuilder(); for (int i = 0; i < vars.length; i++) { if (vars[i].getTypeName().contains(Closure.class.getCanonicalName())) { code.append(indent(2)) - .append(ClosureFunction.class.getCanonicalName()) - .append(" ").append(names[i]) - .append("ClosureFunction = new ") - .append(ClosureFunction.class.getCanonicalName()) - .append("<>(") - .append(names[i]) - .append(");\n"); + .append(ClosureFunction.class.getCanonicalName()) + .append(" ").append(names[i]) + .append("ClosureFunction = new ") + .append(ClosureFunction.class.getCanonicalName()) + .append("<>(") + .append(names[i]) + .append(");\n"); } } final Map tableToTableHandleVarMap = new HashMap<>(); @@ -839,11 +787,9 @@ private String createMapCode(final GroovyStaticImportGenerator.JavaFunction func if (var.getTypeName().equals("io.deephaven.db.tables.Table")) { final String handleName = names[i] + "Handle"; tableToTableHandleVarMap.put(names[i], handleName); - code.append(indent(1)) - .append("final io.deephaven.db.plot.util.tables.TableHandle ") - .append(handleName) - .append(" = new io.deephaven.db.plot.util.tables.TableHandle(") - .append(names[i]); + code.append(indent(1)).append("final io.deephaven.db.plot.util.tables.TableHandle ") + .append(handleName).append(" = new io.deephaven.db.plot.util.tables.TableHandle(") + .append(names[i]); for (int j = i + 1; j < vars.length; j++) { if (vars[j].getTypeName().equals("java.lang.String")) { @@ -853,43 +799,39 @@ private String createMapCode(final GroovyStaticImportGenerator.JavaFunction func } } code.append(");\n"); - code.append(indent(1)).append("addTableHandle(").append(handleName) - .append(");\n"); + code.append(indent(1)).append("addTableHandle(").append(handleName).append(");\n"); } } final String args = createSmartKeyArgs(function, tableToTableHandleVarMap); final boolean oneArgument = function.getParameterNames().length == 1; - code.append(indent(2)).append("if(keys == null || keys.length == 0) {\n") - .append(indent(3)) - .append(mapName) - .append(".setDefault(") - .append(oneArgument ? args : "new Object[]{" + args + "}") - .append(");\n") - .append(indent(2)).append("} else {"); + code.append(indent(2)).append("if(keys == null || keys.length == 0) {\n").append(indent(3)) + .append(mapName) + .append(".setDefault(") + .append(oneArgument ? args : "new Object[]{" + args + "}") + .append(");\n") + .append(indent(2)).append("} else {"); code.append("\n").append(indent(3)) - .append(mapName) - .append( - ".put(namingFunction.apply(keys.length == 1 ? keys[0] : new io.deephaven.datastructures.util.SmartKey(keys)), "); + .append(mapName) + .append(".put(namingFunction.apply(keys.length == 1 ? keys[0] : new io.deephaven.datastructures.util.SmartKey(keys)), "); if (oneArgument) { - code.append("\n").append(indent(4)).append(args).append(");\n").append(indent(2)) - .append("}\n"); + code.append("\n").append(indent(4)).append(args).append(");\n").append(indent(2)).append("}\n"); } else { - code.append("\n").append(indent(4)).append("new Object[]{ ").append(args) - .append("});\n").append(indent(2)).append("}\n"); + code.append("\n").append(indent(4)).append("new Object[]{ ").append(args).append("});\n") + .append(indent(2)).append("}\n"); } - return code.append("\n").append(indent(2)).append("return this;\n").append(indent(1)) - .append("}\n\n").toString(); + return code.append("\n").append(indent(2)).append("return this;\n").append(indent(1)).append("}\n\n") + .toString(); } private String createSmartKeyArgs(final GroovyStaticImportGenerator.JavaFunction function, - final Map tableToTableHandleVarMap) { + final Map tableToTableHandleVarMap) { List args = new ArrayList<>(); final Type[] vars = function.getParameterTypes(); final String[] names = Arrays.stream(function.getParameterNames()) - .map(s -> tableToTableHandleVarMap.getOrDefault(s, s)).toArray(String[]::new); + .map(s -> tableToTableHandleVarMap.getOrDefault(s, s)).toArray(String[]::new); for (int i = 0; i < vars.length; i++) { if (vars[i].getTypeName().equals("java.lang.Object[]")) { @@ -911,8 +853,8 @@ private String createArgsString(final GroovyStaticImportGenerator.JavaFunction f final int indCarrot = typeName.indexOf("<"); typeName = typeName.substring(0, indCarrot > 0 ? indCarrot : typeName.length()); final int indDot = typeName.lastIndexOf("."); - args += typeName.substring(indDot + 1, typeName.length()).replaceAll("\\[", "Array") - .replaceAll("\\]", ""); + args += typeName.substring(indDot + 1, typeName.length()).replaceAll("\\[", "Array").replaceAll("\\]", + ""); } return args; } @@ -924,8 +866,7 @@ private static String getMapType(GroovyStaticImportGenerator.JavaFunction functi if (types.length == 1) { Class possiblyBoxed; try { - possiblyBoxed = - TypeUtils.getBoxedType(ClassUtil.lookupClass(types[0].getTypeName())); + possiblyBoxed = TypeUtils.getBoxedType(ClassUtil.lookupClass(types[0].getTypeName())); } catch (ClassNotFoundException e) { possiblyBoxed = Object.class; } diff --git a/Generators/src/main/java/io/deephaven/db/plot/util/GeneratePlottingConvenience.java b/Generators/src/main/java/io/deephaven/db/plot/util/GeneratePlottingConvenience.java index 4af43a1a258..653a70b5114 100644 --- a/Generators/src/main/java/io/deephaven/db/plot/util/GeneratePlottingConvenience.java +++ b/Generators/src/main/java/io/deephaven/db/plot/util/GeneratePlottingConvenience.java @@ -24,18 +24,16 @@ import static io.deephaven.db.plot.util.PlotGeneratorUtils.indent; /** - * Create static functions that resolve against the last created instance of a plotting figure - * class. This is to make a cleaner plotting interface + * Create static functions that resolve against the last created instance of a plotting figure class. This is to make a + * cleaner plotting interface */ public class GeneratePlottingConvenience { // See also GroovyStaticImportGenerator - private static final Logger log = - Logger.getLogger(GeneratePlottingConvenience.class.toString()); + private static final Logger log = Logger.getLogger(GeneratePlottingConvenience.class.toString()); private static final String OUTPUT_CLASS = "io.deephaven.db.plot.PlottingConvenience"; - private static final String OUTPUT_CLASS_NAME_SHORT = - OUTPUT_CLASS.substring(OUTPUT_CLASS.lastIndexOf('.') + 1); + private static final String OUTPUT_CLASS_NAME_SHORT = OUTPUT_CLASS.substring(OUTPUT_CLASS.lastIndexOf('.') + 1); private final Map nonstaticFunctions = new TreeMap<>(); @@ -44,8 +42,8 @@ public class GeneratePlottingConvenience { private final Function functionNamer; private GeneratePlottingConvenience(final String[] staticImports, final String[] imports, - final Collection> skips, - final Function functionNamer) throws ClassNotFoundException { + final Collection> skips, final Function functionNamer) + throws ClassNotFoundException { this.skips = skips; this.functionNamer = functionNamer == null ? JavaFunction::getMethodName : functionNamer; @@ -57,9 +55,9 @@ private GeneratePlottingConvenience(final String[] staticImports, final String[] log.info("Processing static class: " + c); final Method[] methods = Arrays.stream(c.getMethods()).filter( - m -> m.getName().equals(methodName) && Modifier.isStatic(m.getModifiers()) - && Modifier.isPublic(m.getModifiers())) - .toArray(Method[]::new); + m -> m.getName().equals(methodName) && Modifier.isStatic(m.getModifiers()) + && Modifier.isPublic(m.getModifiers())) + .toArray(Method[]::new); for (Method m : methods) { log.info("Processing static method (" + c + "): " + m); @@ -102,20 +100,19 @@ private boolean skip(final JavaFunction f, final boolean ignore) { return skip; } - private void addPublicMethod(Method m, Map functions, - boolean ignoreSkips) { + private void addPublicMethod(Method m, Map functions, boolean ignoreSkips) { log.info("Processing public method: " + m); final JavaFunction f = new JavaFunction(m); final JavaFunction signature = new JavaFunction( - OUTPUT_CLASS, - OUTPUT_CLASS_NAME_SHORT, - functionNamer.apply(f), - f.getTypeParameters(), - f.getReturnType(), - f.getParameterTypes(), - f.getParameterNames(), - f.isVarArgs()); + OUTPUT_CLASS, + OUTPUT_CLASS_NAME_SHORT, + functionNamer.apply(f), + f.getTypeParameters(), + f.getReturnType(), + f.getParameterTypes(), + f.getParameterNames(), + f.isVarArgs()); boolean skip = skip(f, ignoreSkips); @@ -196,13 +193,13 @@ private static Set typesToImport(Type t) { private String generateCode() { String code = "/*\n" + - " * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + - " */\n\n" + - "/****************************************************************************************************************************\n" - + - " ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - Run GeneratePlottingConvenience or \"./gradlew :Generators:generatePlottingConvenience\" to regenerate\n" - + - " ****************************************************************************************************************************/\n\n"; + " * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + + " */\n\n" + + "/****************************************************************************************************************************\n" + + + " ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - Run GeneratePlottingConvenience or \"./gradlew :Generators:generatePlottingConvenience\" to regenerate\n" + + + " ****************************************************************************************************************************/\n\n"; code += "package io.deephaven.db.plot;\n\n"; @@ -254,8 +251,7 @@ private String generateCode() { return code; } - private static String createFunction(final JavaFunction f, final JavaFunction signature, - final boolean isStatic) { + private static String createFunction(final JavaFunction f, final JavaFunction signature, final boolean isStatic) { String returnType = f.getReturnType().getTypeName().replace("$", "."); String s = createJavadoc(f, signature); s += " public static "; @@ -304,8 +300,7 @@ private static String createFunction(final JavaFunction f, final JavaFunction si if (f.isVarArgs() && i == f.getParameterTypes().length - 1) { final int index = typeString.lastIndexOf("[]"); - typeString = - typeString.substring(0, index) + "..." + typeString.substring(index + 2); + typeString = typeString.substring(0, index) + "..." + typeString.substring(index + 2); } s += " " + typeString + " " + f.getParameterNames()[i]; @@ -314,16 +309,15 @@ private static String createFunction(final JavaFunction f, final JavaFunction si s += " ) {\n"; s += indent(2) + (f.getReturnType().equals(void.class) ? "" : "return ") - + (isStatic ? (f.getClassNameShort() + ".") : "FigureFactory.figure().") - + f.getMethodName() + "(" + callArgs + " );\n"; + + (isStatic ? (f.getClassNameShort() + ".") : "FigureFactory.figure().") + f.getMethodName() + "(" + + callArgs + " );\n"; s += indent(1) + "}\n"; return s; } private static String createJavadoc(final JavaFunction f, final JavaFunction signature) { - return " /**\n * See {@link " + f.getClassName() + "#" + signature.getMethodName() - + "} \n **/\n"; + return " /**\n * See {@link " + f.getClassName() + "#" + signature.getMethodName() + "} \n **/\n"; } public static void main(String[] args) throws ClassNotFoundException, IOException { @@ -381,33 +375,33 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio }; final Set keepers = new HashSet<>(Arrays.asList( - "newAxes", - "newChart", - "plot", - "plotBy", - "ohlcPlot", - "ohlcPlotBy", - "histPlot", - "catHistPlot", - "catPlot", - "catPlotBy", - "piePlot", - "errorBarXY", - "errorBarXYBy", - "errorBarX", - "errorBarXBy", - "errorBarY", - "errorBarYBy", - "catErrorBar", - "catErrorBarBy")); + "newAxes", + "newChart", + "plot", + "plotBy", + "ohlcPlot", + "ohlcPlotBy", + "histPlot", + "catHistPlot", + "catPlot", + "catPlotBy", + "piePlot", + "errorBarXY", + "errorBarXYBy", + "errorBarX", + "errorBarXBy", + "errorBarY", + "errorBarYBy", + "catErrorBar", + "catErrorBarBy")); @SuppressWarnings("unchecked") GeneratePlottingConvenience gen = new GeneratePlottingConvenience(staticImports, imports, - Arrays.asList(javaFunction -> !keepers.contains(javaFunction.getMethodName())), - JavaFunction::getMethodName); + Arrays.asList(javaFunction -> !keepers.contains(javaFunction.getMethodName())), + JavaFunction::getMethodName); final String code = gen.generateCode() - .replace("io.deephaven.db.plot.FigureImpl", "io.deephaven.db.plot.Figure"); + .replace("io.deephaven.db.plot.FigureImpl", "io.deephaven.db.plot.Figure"); log.info("\n\n**************************************\n\n"); log.info(code); @@ -417,7 +411,7 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio String oldCode = new String(Files.readAllBytes(Paths.get(file))); if (!code.equals(oldCode)) { throw new RuntimeException( - "Change in generated code. Run GeneratePlottingConvenience or \"./gradlew :Generators:generatePlottingConvenience\" to regenerate\n"); + "Change in generated code. Run GeneratePlottingConvenience or \"./gradlew :Generators:generatePlottingConvenience\" to regenerate\n"); } } else { diff --git a/Generators/src/main/java/io/deephaven/libs/GroovyStaticImportGenerator.java b/Generators/src/main/java/io/deephaven/libs/GroovyStaticImportGenerator.java index b9005d14470..0057d2226c6 100644 --- a/Generators/src/main/java/io/deephaven/libs/GroovyStaticImportGenerator.java +++ b/Generators/src/main/java/io/deephaven/libs/GroovyStaticImportGenerator.java @@ -18,9 +18,9 @@ /** - * Groovy has a bug where performing a static import on multiple libraries containing functions with - * the same name causes some of the functions to not be present in the namespace. This class - * combines static imports from multiple sources into a single class that can be imported. + * Groovy has a bug where performing a static import on multiple libraries containing functions with the same name + * causes some of the functions to not be present in the namespace. This class combines static imports from multiple + * sources into a single class that can be imported. */ public class GroovyStaticImportGenerator { private static Logger log = Logger.getLogger(GroovyStaticImportGenerator.class.toString()); @@ -35,10 +35,9 @@ public static class JavaFunction implements Comparable { private final String[] parameterNames; private final boolean isVarArgs; - public JavaFunction(final String className, final String classNameShort, - final String methodName, final TypeVariable[] typeParameters, - final Type returnType, final Type[] parameterTypes, final String[] parameterNames, - final boolean isVarArgs) { + public JavaFunction(final String className, final String classNameShort, final String methodName, + final TypeVariable[] typeParameters, final Type returnType, final Type[] parameterTypes, + final String[] parameterNames, final boolean isVarArgs) { this.className = className; this.classNameShort = classNameShort; this.methodName = methodName; @@ -51,20 +50,20 @@ public JavaFunction(final String className, final String classNameShort, public JavaFunction(final Method m) { this( - m.getDeclaringClass().getCanonicalName(), - m.getDeclaringClass().getSimpleName(), - m.getName(), - m.getTypeParameters(), - m.getGenericReturnType(), - m.getGenericParameterTypes(), - Arrays.stream(m.getParameters()).map(Parameter::getName).toArray(String[]::new), - m.isVarArgs()); + m.getDeclaringClass().getCanonicalName(), + m.getDeclaringClass().getSimpleName(), + m.getName(), + m.getTypeParameters(), + m.getGenericReturnType(), + m.getGenericParameterTypes(), + Arrays.stream(m.getParameters()).map(Parameter::getName).toArray(String[]::new), + m.isVarArgs()); for (Parameter parameter : m.getParameters()) { if (!parameter.isNamePresent()) { throw new IllegalArgumentException( - "Parameter names are not present in the code! Was the code compiled with \"-parameters\": " - + toString()); + "Parameter names are not present in the code! Was the code compiled with \"-parameters\": " + + toString()); } } } @@ -95,13 +94,13 @@ public int hashCode() { @Override public String toString() { return "JavaFunction{" + - "className='" + className + '\'' + - ", methodName='" + methodName + '\'' + - ", typeParameters=" + Arrays.toString(typeParameters) + - ", returnType=" + returnType + - ", parameterTypes=" + Arrays.toString(parameterTypes) + - ", parameterNames=" + Arrays.toString(parameterNames) + - '}'; + "className='" + className + '\'' + + ", methodName='" + methodName + '\'' + + ", typeParameters=" + Arrays.toString(typeParameters) + + ", returnType=" + returnType + + ", parameterTypes=" + Arrays.toString(parameterTypes) + + ", parameterNames=" + Arrays.toString(parameterNames) + + '}'; } @Override @@ -154,8 +153,7 @@ public Class getReturnClass() { try { return TypeUtils.getErasedType(returnType); } catch (UnsupportedOperationException e) { - log.warning( - "Unable to determine Class from returnType=" + returnType.getTypeName()); + log.warning("Unable to determine Class from returnType=" + returnType.getTypeName()); return null; } } @@ -177,8 +175,8 @@ public boolean isVarArgs() { private final Map staticFunctions = new TreeMap<>(); private final Collection> skips; - private GroovyStaticImportGenerator(final String[] imports, - Collection> skips) throws ClassNotFoundException { + private GroovyStaticImportGenerator(final String[] imports, Collection> skips) + throws ClassNotFoundException { this.skips = skips; for (String imp : imports) { @@ -268,13 +266,13 @@ private static Set typesToImport(Type t) { private String generateCode() { String code = "/*\n" + - " * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + - " */\n\n" + - "/****************************************************************************************************************************\n" - + - " ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - Run GroovyStaticImportGenerator or \"./gradlew :Generators:groovyStaticImportGenerator\" to regenerate\n" - + - " ****************************************************************************************************************************/\n\n"; + " * Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + + " */\n\n" + + "/****************************************************************************************************************************\n" + + + " ****** AUTO-GENERATED CLASS - DO NOT EDIT MANUALLY - Run GroovyStaticImportGenerator or \"./gradlew :Generators:groovyStaticImportGenerator\" to regenerate\n" + + + " ****************************************************************************************************************************/\n\n"; code += "package io.deephaven.libs;\n\n"; @@ -305,12 +303,12 @@ private String generateCode() { String returnType = f.returnType.getTypeName(); String s = - " /** @see " + f.getClassName() + "#" + f.getMethodName() + "(" + - Arrays.stream(f.parameterTypes).map(t -> t.getTypeName().replace("", "")) - .collect(Collectors.joining(",")) - + - ") */\n" + - " public static "; + " /** @see " + f.getClassName() + "#" + f.getMethodName() + "(" + + Arrays.stream(f.parameterTypes).map(t -> t.getTypeName().replace("", "")) + .collect(Collectors.joining(",")) + + + ") */\n" + + " public static "; if (f.typeParameters.length > 0) { s += "<"; @@ -327,8 +325,7 @@ private String generateCode() { Type[] bounds = t.getBounds(); if (bounds.length != 1) { - throw new RuntimeException( - "Unsupported bounds: " + Arrays.toString(bounds)); + throw new RuntimeException("Unsupported bounds: " + Arrays.toString(bounds)); } Type bound = bounds[0]; @@ -414,10 +411,11 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio @SuppressWarnings("unchecked") GroovyStaticImportGenerator gen = new GroovyStaticImportGenerator(imports, - Collections.singletonList((f) -> f.methodName.equals("sum") - && f.parameterTypes.length == 1 && f.parameterTypes[0].getTypeName() - .contains("io.deephaven.db.tables.dbarrays.DbArray<")) // skipping common - // erasure "sum" + Collections.singletonList((f) -> f.methodName.equals("sum") && f.parameterTypes.length == 1 + && f.parameterTypes[0].getTypeName().contains("io.deephaven.db.tables.dbarrays.DbArray<")) // skipping + // common + // erasure + // "sum" ); final String code = gen.generateCode(); @@ -430,7 +428,7 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio String oldCode = new String(Files.readAllBytes(Paths.get(file))); if (!code.equals(oldCode)) { throw new RuntimeException( - "Change in generated code. Run GroovyStaticImportGenerator or \"./gradlew :DB:groovyStaticImportGenerator\" to regenerate\n"); + "Change in generated code. Run GroovyStaticImportGenerator or \"./gradlew :DB:groovyStaticImportGenerator\" to regenerate\n"); } } else { diff --git a/Generators/src/main/java/io/deephaven/python/PythonGeneratorParser.java b/Generators/src/main/java/io/deephaven/python/PythonGeneratorParser.java index 6969a726a5c..67ee448c004 100644 --- a/Generators/src/main/java/io/deephaven/python/PythonGeneratorParser.java +++ b/Generators/src/main/java/io/deephaven/python/PythonGeneratorParser.java @@ -40,9 +40,8 @@ static GeneratorElement[] validateArgs(final String[] args, final Logger log) { return parse(args, args[0]); } - static void logGenerationMessage(final Logger log, final String javaClass, - final String preambleFilePath, - final String destinationFilePath) { + static void logGenerationMessage(final Logger log, final String javaClass, final String preambleFilePath, + final String destinationFilePath) { log.info("Generating python methods for java class: " + javaClass); if (preambleFilePath != null) { log.info("Python preamble file: " + preambleFilePath); @@ -91,15 +90,15 @@ static GeneratorElement[] parse(final String[] args, final String devroot) { * @return array of valid methods. * @throws ClassNotFoundException */ - static GroovyStaticImportGenerator.JavaFunction[] getValidMethods(String javaClass, - Predicate filter) throws ClassNotFoundException { + static GroovyStaticImportGenerator.JavaFunction[] getValidMethods(String javaClass, Predicate filter) + throws ClassNotFoundException { final Class clazz = Class.forName(javaClass); final Method[] methods = clazz.getMethods(); final GroovyStaticImportGenerator.JavaFunction[] sortedMethods = Arrays.stream(methods) - .filter(filter) - .map(GroovyStaticImportGenerator.JavaFunction::new) - .sorted() - .toArray(GroovyStaticImportGenerator.JavaFunction[]::new); + .filter(filter) + .map(GroovyStaticImportGenerator.JavaFunction::new) + .sorted() + .toArray(GroovyStaticImportGenerator.JavaFunction[]::new); return sortedMethods; } @@ -112,9 +111,9 @@ static GroovyStaticImportGenerator.JavaFunction[] getValidMethods(String javaCla * @throws ClassNotFoundException */ static GroovyStaticImportGenerator.JavaFunction[] getPublicStaticMethods(final String javaClass, - final List skipGeneration) throws ClassNotFoundException { + final List skipGeneration) throws ClassNotFoundException { return getValidMethods(javaClass, (m) -> PythonGeneratorParser.isValidPublicStatic(m) - && (skipGeneration == null || !skipGeneration.contains(javaClass + "," + m.getName()))); + && (skipGeneration == null || !skipGeneration.contains(javaClass + "," + m.getName()))); } /** @@ -125,8 +124,8 @@ static GroovyStaticImportGenerator.JavaFunction[] getPublicStaticMethods(final S * @return MethodContainer corresponding to public, static methods of javaClass. * @throws ClassNotFoundException */ - static MethodContainer getPublicStaticMethodsDetails(final String javaClass, - final List skipGeneration) throws ClassNotFoundException { + static MethodContainer getPublicStaticMethodsDetails(final String javaClass, final List skipGeneration) + throws ClassNotFoundException { return new MethodContainer(getPublicStaticMethods(javaClass, skipGeneration)); } @@ -137,8 +136,7 @@ static MethodContainer getPublicStaticMethodsDetails(final String javaClass, * @return array of public, static methods. * @throws ClassNotFoundException */ - static GroovyStaticImportGenerator.JavaFunction[] getPublicMethods(String javaClass) - throws ClassNotFoundException { + static GroovyStaticImportGenerator.JavaFunction[] getPublicMethods(String javaClass) throws ClassNotFoundException { return getValidMethods(javaClass, PythonGeneratorParser::isValidPublic); } @@ -158,8 +156,7 @@ static MethodContainer getPublicMethodsDetails(String javaClass) throws ClassNot * @return true if the method is public, static and not from java.lang.Object. */ private static boolean isValidPublicStatic(Method m) { - return isValid(m) && Modifier.isStatic(m.getModifiers()) - && Modifier.isPublic(m.getModifiers()); + return isValid(m) && Modifier.isStatic(m.getModifiers()) && Modifier.isPublic(m.getModifiers()); } /** @@ -169,8 +166,7 @@ private static boolean isValidPublicStatic(Method m) { private static boolean isValidPublic(Method m) { // Note that this should _probably_ have !Modifier.isStatic(m.getModifiers()), // if it being used in tandem with isValidPublicStatic. - // i.e. this method should _maybe_ be called isValidPublicInstance. SME should make this - // decision. + // i.e. this method should _maybe_ be called isValidPublicInstance. SME should make this decision. return isValid(m) && Modifier.isPublic(m.getModifiers()); } @@ -192,19 +188,18 @@ private static boolean isValid(Method m) { * @throws IOException */ static void finalizeGeneration(final StringBuilder code, final boolean assertNoChange, - final String destinationFilePath, final String javaClass, final String gradleTask) - throws IOException { + final String destinationFilePath, final String javaClass, final String gradleTask) throws IOException { if (assertNoChange) { String oldCode = new String(Files.readAllBytes(Paths.get(destinationFilePath))); if (!code.toString().equals(oldCode)) { throw new RuntimeException("Change in generated code for class:" + javaClass + - " with Python file at " + destinationFilePath + ".\n " + - "Run the code generation task (" + gradleTask + ") to regenerate, " + - "followed by \"git diff " + destinationFilePath + "\" to see the changes." + - "\n\n" + - "To diagnose possible indeterminism in the generation process, regenerate " + - "the code and check the diff **multiple times**."); + " with Python file at " + destinationFilePath + ".\n " + + "Run the code generation task (" + gradleTask + ") to regenerate, " + + "followed by \"git diff " + destinationFilePath + "\" to see the changes." + + "\n\n" + + "To diagnose possible indeterminism in the generation process, regenerate " + + "the code and check the diff **multiple times**."); } } else { @@ -234,18 +229,18 @@ static ArrayList getDefaultDocRoot(final String devroot) { } /** - * Get the List of DocStringContainers corresponding to the java class specified by path and the - * documentation roots specified in docRoot. + * Get the List of DocStringContainers corresponding to the java class specified by path and the documentation roots + * specified in docRoot. * - * Note that it is currently expected that many classes have no docs populated, so nulls - * (corresponding to missing docs) are silently skipped. + * Note that it is currently expected that many classes have no docs populated, so nulls (corresponding to missing + * docs) are silently skipped. * * @param path fully qualified path name for the java class. * @param docRoot the doc root list. * @return the DocstringContainer list, in order of preference. */ - static List getDocstringContainer(final String path, - final List docRoot, final Logger log) { + static List getDocstringContainer(final String path, final List docRoot, + final Logger log) { ArrayList docs = new ArrayList<>(); for (String root : docRoot) { DocstringContainer tempClass = loadJsonDoc(path, root, log); @@ -257,16 +252,13 @@ static List getDocstringContainer(final String path, } /** - * Loads the DocstringContainer for the class with path javaClass, relative to the - * rootDirectory. + * Loads the DocstringContainer for the class with path javaClass, relative to the rootDirectory. * * @param javaClass fully qualified path name for the java class. * @param rootDirectory root directory below the json files are located. - * @return DocstringContainer for javaClass, which will be null if there is no corresponding - * json file. + * @return DocstringContainer for javaClass, which will be null if there is no corresponding json file. */ - static DocstringContainer loadJsonDoc(final String javaClass, final String rootDirectory, - final Logger log) { + static DocstringContainer loadJsonDoc(final String javaClass, final String rootDirectory, final Logger log) { // get reference to appropriate json file location String classPath = javaClass.replace('.', '/'); classPath = classPath.replace('$', '/'); @@ -280,9 +272,8 @@ static DocstringContainer loadJsonDoc(final String javaClass, final String rootD try { out = objectMapper.readValue(jsonFile, DocstringContainer.class); } catch (IOException e) { - log.warning("Parsing of file " + jsonFile - + " as a DocstringContainer class instance failed, " + - "and is being skipped!"); + log.warning("Parsing of file " + jsonFile + " as a DocstringContainer class instance failed, " + + "and is being skipped!"); out = null; } return out; @@ -293,8 +284,8 @@ static DocstringContainer loadJsonDoc(final String javaClass, final String rootD } /** - * Format the input string as a (multiline) Python docstring. It is assumed that the - * beginning/trailing quotation marks are not present, and there is no indentation formatting. + * Format the input string as a (multiline) Python docstring. It is assumed that the beginning/trailing quotation + * marks are not present, and there is no indentation formatting. * * @param input the input string. * @param indent the number of spaces by which to indent. @@ -322,14 +313,13 @@ static String formatDocstring(String input, int indent) { } /** - * Fetch the doc string for a class from a List of DocstringContainers and format appropriately. - * The priority of the DocstringContainers is implicit in their order in the List, so the first - * one (in List order) which returns a non-null docstring will be used. + * Fetch the doc string for a class from a List of DocstringContainers and format appropriately. The priority of the + * DocstringContainers is implicit in their order in the List, so the first one (in List order) which returns a + * non-null docstring will be used. * * @param docList List of the DocstringContainer object for the class in question * @param indent the number of spaces by which to indent - * @return the formatted string (or empty String, if no docstring for `method` anywhere in - * `docList`) + * @return the formatted string (or empty String, if no docstring for `method` anywhere in `docList`) */ static String getClassDocstring(final List docList, final int indent) { if (docList == null) { @@ -351,18 +341,16 @@ static String getClassDocstring(final List docList, final in } /** - * Fetch the doc string for a given method from a List of DocstringContainers and format - * appropriately. The priority of the DocstringContainers is implicit in their order in the - * List, so the first one (in List order) which returns a non-null docstring will be used. + * Fetch the doc string for a given method from a List of DocstringContainers and format appropriately. The priority + * of the DocstringContainers is implicit in their order in the List, so the first one (in List order) which returns + * a non-null docstring will be used. * * @param docList List of the DocstringContainer object for the class in question * @param method the name of the method * @param indent the number of spaces by which to indent - * @return the formatted string (or empty String, if no docstring for `method` anywhere in - * `docList`) + * @return the formatted string (or empty String, if no docstring for `method` anywhere in `docList`) */ - static String getMethodDocstring(final List docList, final String method, - final int indent) { + static String getMethodDocstring(final List docList, final String method, final int indent) { if (docList == null) { return ""; } @@ -405,8 +393,7 @@ public List getSignatures() { } /** - * Find the commonalities among all the overloads - this is specifically for Python wrapping - * of this function. + * Find the commonalities among all the overloads - this is specifically for Python wrapping of this function. * * @return the common version of all overloads. */ @@ -456,8 +443,7 @@ public static class MethodSignature { private final Type returnType; private final Class returnClass; - MethodSignature(final String params, final Boolean isVarArgs, final Type returnType, - final Class returnClass) { + MethodSignature(final String params, final Boolean isVarArgs, final Type returnType, final Class returnClass) { this.params = params; this.isVarArgs = isVarArgs; this.returnType = returnType; @@ -526,14 +512,12 @@ public String createPythonParams(final boolean addSelf) { * Container for all method signature details. */ public static class MethodContainer { - private final Map methodSignatures = - new LinkedHashMap<>(); + private final Map methodSignatures = new LinkedHashMap<>(); MethodContainer(GroovyStaticImportGenerator.JavaFunction[] sortedMethods) { for (final GroovyStaticImportGenerator.JavaFunction function : sortedMethods) { final String methodName = function.getMethodName(); - methodSignatures.computeIfAbsent(methodName, MethodSignatureCollection::new) - .addFunction(function); + methodSignatures.computeIfAbsent(methodName, MethodSignatureCollection::new).addFunction(function); } } @@ -570,11 +554,10 @@ public String getDestinationFile() { } /** - * Container for docstring details, which should have been extracted from the Java docs and - * populated into json files by a separate task. + * Container for docstring details, which should have been extracted from the Java docs and populated into json + * files by a separate task. * - * Instances of this class are expected to be constructed auto-magically by json parsing using - * the Jackson library. + * Instances of this class are expected to be constructed auto-magically by json parsing using the Jackson library. */ public static class DocstringContainer { private String className; diff --git a/Generators/src/main/java/io/deephaven/python/PythonPlottingGenerator.java b/Generators/src/main/java/io/deephaven/python/PythonPlottingGenerator.java index 2a9f4b68763..1bd30c53942 100644 --- a/Generators/src/main/java/io/deephaven/python/PythonPlottingGenerator.java +++ b/Generators/src/main/java/io/deephaven/python/PythonPlottingGenerator.java @@ -21,8 +21,7 @@ public class PythonPlottingGenerator { private static final Logger log = Logger.getLogger(PythonPlottingGenerator.class.toString()); private static final String FIGURE_PATH = "io.deephaven.db.plot.Figure"; - private static final String PLOTTING_CONVENIENCE_PATH = - "io.deephaven.db.plot.PlottingConvenience"; + private static final String PLOTTING_CONVENIENCE_PATH = "io.deephaven.db.plot.PlottingConvenience"; private static final String[] PLOTTING_CONVENIENCE_DOC_PATHS = { "io.deephaven.db.plot.Figure", "io.deephaven.gui.color.Color", @@ -43,8 +42,7 @@ public class PythonPlottingGenerator { private static String figureWrapperPreamble; private static String figureWrapperOutput; - private static final List catPlotWrapper = - Arrays.asList("catPlot", "catErrorBar", "piePlot"); + private static final List catPlotWrapper = Arrays.asList("catPlot", "catErrorBar", "piePlot"); private static List docRoot; private static List figureDocContainer; @@ -57,23 +55,20 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio devroot = args[0]; assertNoChange = Boolean.parseBoolean(args[1]); - plotPreamble = - devroot + "/Generators/src/main/java/io/deephaven/pythonPreambles/PlotPreamble.txt"; + plotPreamble = devroot + "/Generators/src/main/java/io/deephaven/pythonPreambles/PlotPreamble.txt"; plotOutput = devroot + "/Integrations/python/deephaven/Plot/__init__.py"; - figureWrapperPreamble = devroot - + "/Generators/src/main/java/io/deephaven/pythonPreambles/FigureWrapperPreamble.txt"; + figureWrapperPreamble = + devroot + "/Generators/src/main/java/io/deephaven/pythonPreambles/FigureWrapperPreamble.txt"; figureWrapperOutput = devroot + "/Integrations/python/deephaven/Plot/figure_wrapper.py"; docRoot = PythonGeneratorParser.getDefaultDocRoot(devroot); - log.info("PythonPlottingGenerator - using system file encoding: " - + System.getProperty("file.encoding")); + log.info("PythonPlottingGenerator - using system file encoding: " + System.getProperty("file.encoding")); figureDocContainer = PythonGeneratorParser.getDocstringContainer(FIGURE_PATH, docRoot, log); plotDocContainer = Arrays.stream(PLOTTING_CONVENIENCE_DOC_PATHS) - .flatMap( - path -> PythonGeneratorParser.getDocstringContainer(path, docRoot, log).stream()) - .collect(Collectors.toList()); + .flatMap(path -> PythonGeneratorParser.getDocstringContainer(path, docRoot, log).stream()) + .collect(Collectors.toList()); createFigureWrapper(); createInitFile(); @@ -87,17 +82,16 @@ private static void createInitFile() throws ClassNotFoundException, IOException final byte[] encoded = Files.readAllBytes(Paths.get(plotPreamble)); final StringBuilder code = new StringBuilder(new String(encoded, StandardCharsets.UTF_8)); - PythonGeneratorParser.logGenerationMessage(log, PLOTTING_CONVENIENCE_PATH, plotPreamble, - plotOutput); + PythonGeneratorParser.logGenerationMessage(log, PLOTTING_CONVENIENCE_PATH, plotPreamble, plotOutput); // create the container for each method's code final List generatedMethods = new ArrayList<>(); createFigureMethod(generatedMethods); // find all the methods for plotting convenience final PythonGeneratorParser.MethodContainer methodContainer = - PythonGeneratorParser.getPublicMethodsDetails(PLOTTING_CONVENIENCE_PATH); - for (final PythonGeneratorParser.MethodSignatureCollection method : methodContainer - .getMethodSignatures().values()) { + PythonGeneratorParser.getPublicMethodsDetails(PLOTTING_CONVENIENCE_PATH); + for (final PythonGeneratorParser.MethodSignatureCollection method : methodContainer.getMethodSignatures() + .values()) { final String methodName = method.getMethodName(); // get digested method signature final PythonGeneratorParser.MethodSignature methodDigest = method.reduceSignature(); @@ -108,9 +102,8 @@ private static void createInitFile() throws ClassNotFoundException, IOException // add in all of the generated methods code.append(String.join("\n\n", generatedMethods)); - PythonGeneratorParser.finalizeGeneration(code, assertNoChange, plotOutput, - PLOTTING_CONVENIENCE_PATH, - ":Generators:generatePythonFigureWrapper"); + PythonGeneratorParser.finalizeGeneration(code, assertNoChange, plotOutput, PLOTTING_CONVENIENCE_PATH, + ":Generators:generatePythonFigureWrapper"); } private static void createFigureWrapper() throws ClassNotFoundException, IOException { @@ -121,16 +114,15 @@ private static void createFigureWrapper() throws ClassNotFoundException, IOExcep final byte[] encoded = Files.readAllBytes(Paths.get(figureWrapperPreamble)); final StringBuilder code = new StringBuilder(new String(encoded, StandardCharsets.UTF_8)); - PythonGeneratorParser.logGenerationMessage(log, FIGURE_PATH, figureWrapperPreamble, - figureWrapperOutput); + PythonGeneratorParser.logGenerationMessage(log, FIGURE_PATH, figureWrapperPreamble, figureWrapperOutput); // create the container for each method's code final List generatedMethods = new ArrayList<>(); // find all the methods for Figure final PythonGeneratorParser.MethodContainer methodContainer = - PythonGeneratorParser.getPublicMethodsDetails(FIGURE_PATH); - for (final PythonGeneratorParser.MethodSignatureCollection method : methodContainer - .getMethodSignatures().values()) { + PythonGeneratorParser.getPublicMethodsDetails(FIGURE_PATH); + for (final PythonGeneratorParser.MethodSignatureCollection method : methodContainer.getMethodSignatures() + .values()) { final String methodName = method.getMethodName(); // get digested method signature final PythonGeneratorParser.MethodSignature methodDigest = method.reduceSignature(); @@ -141,27 +133,26 @@ private static void createFigureWrapper() throws ClassNotFoundException, IOExcep // add in all of the generated methods code.append(String.join("\n", generatedMethods)); - PythonGeneratorParser.finalizeGeneration(code, assertNoChange, figureWrapperOutput, - FIGURE_PATH, - ":Generators:generatePythonFigureWrapper"); + PythonGeneratorParser.finalizeGeneration(code, assertNoChange, figureWrapperOutput, FIGURE_PATH, + ":Generators:generatePythonFigureWrapper"); } private static void createFigureMethod(final List generatedMethods) { String decorator = ""; final String beginMethod = "def figure(*args):" + - PythonGeneratorParser.getMethodDocstring(plotDocContainer, "figure", 4) + "\n"; + PythonGeneratorParser.getMethodDocstring(plotDocContainer, "figure", 4) + "\n"; final String endMethod = " return FigureWrapper(*args)"; generatedMethods.add(decorator + beginMethod + endMethod); } private static void createMethod(final String methodName, - final PythonGeneratorParser.MethodSignature methodSig, - final List generatedMethods) { + final PythonGeneratorParser.MethodSignature methodSig, + final List generatedMethods) { final String paramString = methodSig.createPythonParams(); final Class rClass = methodSig.getReturnClass(); String decorator = ""; final String beginMethod = "def " + methodName + "(" + paramString + "):" + - PythonGeneratorParser.getMethodDocstring(plotDocContainer, methodName, 4) + "\n"; + PythonGeneratorParser.getMethodDocstring(plotDocContainer, methodName, 4) + "\n"; final String endMethod; if (Figure.class.equals(rClass)) { @@ -169,19 +160,17 @@ private static void createMethod(final String methodName, } else { decorator = "@_convertArguments\n"; if ((rClass != null) && (rClass.isArray())) { - endMethod = " return list(_plotting_convenience_." + methodName + "(" - + paramString + "))\n"; + endMethod = " return list(_plotting_convenience_." + methodName + "(" + paramString + "))\n"; } else { - endMethod = - " return _plotting_convenience_." + methodName + "(" + paramString + ")\n"; + endMethod = " return _plotting_convenience_." + methodName + "(" + paramString + ")\n"; } } generatedMethods.add(decorator + beginMethod + endMethod); } private static void createFigureMethod(final String methodName, - final PythonGeneratorParser.MethodSignature methodSig, - final List generatedMethods) { + final PythonGeneratorParser.MethodSignature methodSig, + final List generatedMethods) { final String paramString = methodSig.createPythonParams(); final String decorator; final String endMethod; @@ -192,9 +181,8 @@ private static void createFigureMethod(final String methodName, decorator = " @_convertArguments\n"; } endMethod = " def " + methodName + "(" + methodSig.createPythonParams(true) + "):" + - PythonGeneratorParser.getMethodDocstring(figureDocContainer, methodName, 8) + - "\n return FigureWrapper(figure=self.figure." + methodName + "(" + paramString - + "))\n"; + PythonGeneratorParser.getMethodDocstring(figureDocContainer, methodName, 8) + + "\n return FigureWrapper(figure=self.figure." + methodName + "(" + paramString + "))\n"; generatedMethods.add(decorator + endMethod); } } diff --git a/Generators/src/main/java/io/deephaven/python/PythonStaticGenerator.java b/Generators/src/main/java/io/deephaven/python/PythonStaticGenerator.java index 1ad82a3f5fb..0282e628aca 100644 --- a/Generators/src/main/java/io/deephaven/python/PythonStaticGenerator.java +++ b/Generators/src/main/java/io/deephaven/python/PythonStaticGenerator.java @@ -13,32 +13,31 @@ /** - * Generates Python file for the public, static method of class(es) determined by the input - * arguments + * Generates Python file for the public, static method of class(es) determined by the input arguments */ public class PythonStaticGenerator { private static final List customTableTools = - Arrays.asList("col", "byteCol", "shortCol", "intCol", "longCol", - "floatCol", "doubleCol", "charCol", "newTable", "colSource", "objColSource"); - private static final List customParquetTools = Arrays.asList("deleteTable", "readTable", - "writeParquetTables", "writeTable", "writeTables"); + Arrays.asList("col", "byteCol", "shortCol", "intCol", "longCol", + "floatCol", "doubleCol", "charCol", "newTable", "colSource", "objColSource"); + private static final List customParquetTools = + Arrays.asList("deleteTable", "readTable", "writeParquetTables", "writeTable", "writeTables"); private static final List customKafkaTools = Arrays.asList(); // "consumeToTable" // which methods should just be skipped private static final List skipGeneration = Arrays.asList( - "io.deephaven.db.tables.utils.TableTools,display", - "io.deephaven.db.tables.utils.DBTimeUtils,convertJimDateTimeQuiet", - "io.deephaven.db.tables.utils.DBTimeUtils,convertJimMicrosDateTimeQuiet", - "io.deephaven.db.tables.utils.DBTimeUtils,convertJimMicrosDateTimeQuietFast", - "io.deephaven.db.tables.utils.DBTimeUtils,convertJimMicrosDateTimeQuietFastTz", - "io.deephaven.db.tables.utils.DBTimeUtils,diff", - "io.deephaven.db.tables.utils.DBTimeUtils,yearDiff", - "io.deephaven.db.tables.utils.DBTimeUtils,dayDiff", - "io.deephaven.db.plot.colors.ColorMaps,closureMap", - "io.deephaven.kafka.KafkaTools,consumeToTable", - "io.deephaven.kafka.KafkaTools,jsonSpec", - "io.deephaven.kafka.KafkaTools,avroSpec", - "io.deephaven.kafka.KafkaTools,simpleSpec", - "io.deephaven.kafka.KafkaTools,ignoreSpec"); + "io.deephaven.db.tables.utils.TableTools,display", + "io.deephaven.db.tables.utils.DBTimeUtils,convertJimDateTimeQuiet", + "io.deephaven.db.tables.utils.DBTimeUtils,convertJimMicrosDateTimeQuiet", + "io.deephaven.db.tables.utils.DBTimeUtils,convertJimMicrosDateTimeQuietFast", + "io.deephaven.db.tables.utils.DBTimeUtils,convertJimMicrosDateTimeQuietFastTz", + "io.deephaven.db.tables.utils.DBTimeUtils,diff", + "io.deephaven.db.tables.utils.DBTimeUtils,yearDiff", + "io.deephaven.db.tables.utils.DBTimeUtils,dayDiff", + "io.deephaven.db.plot.colors.ColorMaps,closureMap", + "io.deephaven.kafka.KafkaTools,consumeToTable", + "io.deephaven.kafka.KafkaTools,jsonSpec", + "io.deephaven.kafka.KafkaTools,avroSpec", + "io.deephaven.kafka.KafkaTools,simpleSpec", + "io.deephaven.kafka.KafkaTools,ignoreSpec"); private static final List skipClassDocs = Collections.emptyList(); private static final Logger log = Logger.getLogger(PythonStaticGenerator.class.toString()); private static final String gradleTask = ":Generators:generatePythonIntegrationStaticMethods"; @@ -50,28 +49,23 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio final boolean assertNoChange = Boolean.parseBoolean(args[1]); final List docRoot = PythonGeneratorParser.getDefaultDocRoot(devroot); - log.info("PythonStaticGenerator - using system file encoding: " - + System.getProperty("file.encoding")); + log.info("PythonStaticGenerator - using system file encoding: " + System.getProperty("file.encoding")); - PythonGeneratorParser.GeneratorElement[] elements = - PythonGeneratorParser.parse(args, devroot); + PythonGeneratorParser.GeneratorElement[] elements = PythonGeneratorParser.parse(args, devroot); for (PythonGeneratorParser.GeneratorElement element : elements) { final String javaClass = element.getClassString(); final String preambleFilePath = element.getPreambleFile(); final String destinationFilePath = element.getDestinationFile(); - classDocContainer = - PythonGeneratorParser.getDocstringContainer(javaClass, docRoot, log); + classDocContainer = PythonGeneratorParser.getDocstringContainer(javaClass, docRoot, log); - PythonGeneratorParser.logGenerationMessage(log, javaClass, preambleFilePath, - destinationFilePath); + PythonGeneratorParser.logGenerationMessage(log, javaClass, preambleFilePath, destinationFilePath); // create the code container final StringBuilder code = new StringBuilder(); // fill in the class documentation if (!skipClassDocs.contains(javaClass)) { - final String classDoc = - PythonGeneratorParser.getClassDocstring(classDocContainer, 0); + final String classDoc = PythonGeneratorParser.getClassDocstring(classDocContainer, 0); code.append(classDoc).append("\n\n"); } @@ -86,9 +80,9 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio // find all the methods final PythonGeneratorParser.MethodContainer methodContainer = - PythonGeneratorParser.getPublicStaticMethodsDetails(javaClass, skipGeneration); - for (final PythonGeneratorParser.MethodSignatureCollection method : methodContainer - .getMethodSignatures().values()) { + PythonGeneratorParser.getPublicStaticMethodsDetails(javaClass, skipGeneration); + for (final PythonGeneratorParser.MethodSignatureCollection method : methodContainer.getMethodSignatures() + .values()) { final String methodName = method.getMethodName(); // get digested method signature final PythonGeneratorParser.MethodSignature methodDigest = method.reduceSignature(); @@ -99,27 +93,26 @@ public static void main(String[] args) throws ClassNotFoundException, IOExceptio // add in all of the generated methods code.append(String.join("\n\n", generatedMethods)); - PythonGeneratorParser.finalizeGeneration(code, assertNoChange, destinationFilePath, - javaClass, gradleTask); + PythonGeneratorParser.finalizeGeneration(code, assertNoChange, destinationFilePath, javaClass, gradleTask); } } private static void createMethod(String javaClass, String methodName, - PythonGeneratorParser.MethodSignature methodSig, - final List generatedMethods) { + PythonGeneratorParser.MethodSignature methodSig, + final List generatedMethods) { final String paramString = methodSig.createPythonParams(); final Class rClass = methodSig.getReturnClass(); final String beginMethod = "@_passThrough\ndef " + methodName + "(" + paramString + "):" + - PythonGeneratorParser.getMethodDocstring(classDocContainer, methodName, 4) + "\n"; + PythonGeneratorParser.getMethodDocstring(classDocContainer, methodName, 4) + "\n"; final String endMethod; if ((javaClass.equals("io.deephaven.db.tables.utils.ParquetTools") - && customParquetTools.contains(methodName)) || - (javaClass.equals("io.deephaven.db.tables.utils.TableTools") - && customTableTools.contains(methodName)) - || - (javaClass.equals("io.deephaven.kafka.KafkaTools") - && customKafkaTools.contains(methodName))) { + && customParquetTools.contains(methodName)) || + (javaClass.equals("io.deephaven.db.tables.utils.TableTools") + && customTableTools.contains(methodName)) + || + (javaClass.equals("io.deephaven.kafka.KafkaTools") + && customKafkaTools.contains(methodName))) { endMethod = " return _custom_" + methodName + "(" + paramString + ")\n"; } else if ((rClass != null) && (rClass.isArray())) { endMethod = " return list(_java_type_." + methodName + "(" + paramString + "))\n"; @@ -137,61 +130,60 @@ private static void createMethod(String javaClass, String methodName, */ private static void makeGenericPreamble(final StringBuilder code, final String javaClass) { code.append("#\n" + - "# Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + - "#\n" + - "\n" + - "##############################################################################\n" + - "# This code is auto generated. DO NOT EDIT FILE!\n" + - "# Run \"./gradlew " + gradleTask + "\" to generate\n" + - "##############################################################################\n" + - "\n" + - "\n" + - "import jpy\n" + - "import wrapt\n" + - "\n" + - "\n" + - "_java_type_ = None # None until the first _defineSymbols() call\n" + - "\n" + - "\n" + - "def _defineSymbols():\n" + - " \"\"\"\n" + - " Defines appropriate java symbol, which requires that the jvm has been initialized through the :class:`jpy` module,\n" - + - " for use throughout the module AT RUNTIME. This is versus static definition upon first import, which would lead to an\n" - + - " exception if the jvm wasn't initialized BEFORE importing the module.\n" + - " \"\"\"\n\n" + - " if not jpy.has_jvm():\n" + - " raise SystemError(\"No java functionality can be used until the JVM has been initialized through the jpy module\")\n" - + - "\n" + - " global _java_type_\n" + - " if _java_type_ is None:\n" + - " # This will raise an exception if the desired object is not the classpath\n" + - " _java_type_ = jpy.get_type(\"" + javaClass + "\")\n" + - "\n" + - "\n" + - "# every module method should be decorated with @_passThrough\n" + - "@wrapt.decorator\n" + - "def _passThrough(wrapped, instance, args, kwargs):\n" + - " \"\"\"\n" + - " For decoration of module methods, to define necessary symbols at runtime\n\n" + - " :param wrapped: the method to be decorated\n" + - " :param instance: the object to which the wrapped function was bound when it was called\n" - + - " :param args: the argument list for `wrapped`\n" + - " :param kwargs: the keyword argument dictionary for `wrapped`\n" + - " :return: the decorated version of the method\n" + - " \"\"\"\n\n" + - " _defineSymbols()\n" + - " return wrapped(*args, **kwargs)\n" + - "\n\n" + - "# Define all of our functionality, if currently possible\n" + - "try:\n" + - " _defineSymbols()\n" + - "except Exception as e:\n" + - " pass\n" + - "\n\n"); + "# Copyright (c) 2016-2021 Deephaven Data Labs and Patent Pending\n" + + "#\n" + + "\n" + + "##############################################################################\n" + + "# This code is auto generated. DO NOT EDIT FILE!\n" + + "# Run \"./gradlew " + gradleTask + "\" to generate\n" + + "##############################################################################\n" + + "\n" + + "\n" + + "import jpy\n" + + "import wrapt\n" + + "\n" + + "\n" + + "_java_type_ = None # None until the first _defineSymbols() call\n" + + "\n" + + "\n" + + "def _defineSymbols():\n" + + " \"\"\"\n" + + " Defines appropriate java symbol, which requires that the jvm has been initialized through the :class:`jpy` module,\n" + + + " for use throughout the module AT RUNTIME. This is versus static definition upon first import, which would lead to an\n" + + + " exception if the jvm wasn't initialized BEFORE importing the module.\n" + + " \"\"\"\n\n" + + " if not jpy.has_jvm():\n" + + " raise SystemError(\"No java functionality can be used until the JVM has been initialized through the jpy module\")\n" + + + "\n" + + " global _java_type_\n" + + " if _java_type_ is None:\n" + + " # This will raise an exception if the desired object is not the classpath\n" + + " _java_type_ = jpy.get_type(\"" + javaClass + "\")\n" + + "\n" + + "\n" + + "# every module method should be decorated with @_passThrough\n" + + "@wrapt.decorator\n" + + "def _passThrough(wrapped, instance, args, kwargs):\n" + + " \"\"\"\n" + + " For decoration of module methods, to define necessary symbols at runtime\n\n" + + " :param wrapped: the method to be decorated\n" + + " :param instance: the object to which the wrapped function was bound when it was called\n" + + " :param args: the argument list for `wrapped`\n" + + " :param kwargs: the keyword argument dictionary for `wrapped`\n" + + " :return: the decorated version of the method\n" + + " \"\"\"\n\n" + + " _defineSymbols()\n" + + " return wrapped(*args, **kwargs)\n" + + "\n\n" + + "# Define all of our functionality, if currently possible\n" + + "try:\n" + + " _defineSymbols()\n" + + "except Exception as e:\n" + + " pass\n" + + "\n\n"); } } diff --git a/IO/src/main/java/io/deephaven/io/CompressedFileUtil.java b/IO/src/main/java/io/deephaven/io/CompressedFileUtil.java index 2b6a9efd785..c9ab16b3f5c 100644 --- a/IO/src/main/java/io/deephaven/io/CompressedFileUtil.java +++ b/IO/src/main/java/io/deephaven/io/CompressedFileUtil.java @@ -23,10 +23,9 @@ public class CompressedFileUtil { public static final long UINT_TO_LONG = 0xFFFFFFFFL; public static final int BZIP2_MAGIC = ((int) 'B') | (((int) 'Z') << 8) | (((int) 'h') << 16); public static final String[] COMPRESSION_EXTENSIONS = - new String[] {".gz", ".tgz", ".tar.gz", ".zip", ".bz2", ".7z"}; - private static final int TAR_CHECKSUM_OFFSET = - TarConstants.NAMELEN + TarConstants.MODELEN + TarConstants.UIDLEN + TarConstants.GIDLEN - + TarConstants.SIZELEN + TarConstants.MODTIMELEN; + new String[] {".gz", ".tgz", ".tar.gz", ".zip", ".bz2", ".7z"}; + private static final int TAR_CHECKSUM_OFFSET = TarConstants.NAMELEN + TarConstants.MODELEN + TarConstants.UIDLEN + + TarConstants.GIDLEN + TarConstants.SIZELEN + TarConstants.MODTIMELEN; /** * Unzip the file @@ -55,8 +54,7 @@ public static int bunzip2(File file) throws IOException { * @param inputFiles * @throws IOException */ - public static void bzip2(String archiveName, String baseDir, String[] inputFiles) - throws IOException { + public static void bzip2(String archiveName, String baseDir, String[] inputFiles) throws IOException { if (inputFiles.length == 0) { throw new IllegalArgumentException("Need at least one input file."); } @@ -87,8 +85,7 @@ public static void bzip2(String archiveName, String baseDir, String[] inputFiles public static void unbzip2(File archive) throws IOException { if (!archive.exists()) { - throw new IllegalArgumentException( - "File " + archive.getAbsolutePath() + " doesn't exist."); + throw new IllegalArgumentException("File " + archive.getAbsolutePath() + " doesn't exist."); } String workDir = archive.getParent(); ArrayList tarCmdList = new ArrayList(); @@ -144,8 +141,7 @@ public static void zipFile(File inputFile, File zipFilePath) throws IOException * @param stdoutFile redirect output if not null * @param stderrFile redirect output if not null */ - private static ProcessExecutor exec(List commandList, String workDir, String stdoutFile, - String stderrFile) { + private static ProcessExecutor exec(List commandList, String workDir, String stdoutFile, String stderrFile) { return new ProcessExecutor(commandList, workDir, stdoutFile, stderrFile); } @@ -153,15 +149,15 @@ public static InputStream openPossiblyCompressedFiles(String[] fileNames) throws List inputStreams = new ArrayList<>(); for (String currentFileName : fileNames) { inputStreams.add(new MultiFileInputStream.DecoratedInputStream(currentFileName, - openPossiblyCompressedFile(currentFileName))); + openPossiblyCompressedFile(currentFileName))); } - return new MultiFileInputStream(inputStreams - .toArray(new MultiFileInputStream.DecoratedInputStream[inputStreams.size()])); + return new MultiFileInputStream( + inputStreams.toArray(new MultiFileInputStream.DecoratedInputStream[inputStreams.size()])); } /** - * If the given file doesn't exist, looks to see if a compressed version of the file exists. If - * no file is found, returns the original file name. + * If the given file doesn't exist, looks to see if a compressed version of the file exists. If no file is found, + * returns the original file name. */ public static String addCompressionExtensionIfNeeded(String sFileName) { File file = new File(sFileName); @@ -178,11 +174,11 @@ public static String addCompressionExtensionIfNeeded(String sFileName) { } /** - * Open the file, automatically determining if it has been zipped, GZipped, BZip2'd or not. The - * returned input stream will be buffered. See also {@link #addCompressionExtensionIfNeeded}. + * Open the file, automatically determining if it has been zipped, GZipped, BZip2'd or not. The returned input + * stream will be buffered. See also {@link #addCompressionExtensionIfNeeded}. */ - public static InputStream openPossiblyCompressedFile(String sFileName, - Boolean useMicrosPcapEmulation) throws IOException { + public static InputStream openPossiblyCompressedFile(String sFileName, Boolean useMicrosPcapEmulation) + throws IOException { if (useMicrosPcapEmulation == null) { return openPossiblyCompressedFile(new FileInputStreamFactory(sFileName)); } @@ -209,7 +205,7 @@ public static InputStream openPossiblyCompressedFile(String sFileName) throws IO } public static InputStream openPossiblyCompressedFile(String sFileName, int bufferedSize, - Boolean useMicrosPcapEmulation) throws IOException { + Boolean useMicrosPcapEmulation) throws IOException { if (useMicrosPcapEmulation == null) { return openPossiblyCompressedFile(new FileInputStreamFactory(sFileName, bufferedSize)); } @@ -231,27 +227,25 @@ public static InputStream openPossiblyCompressedFile(String sFileName, int buffe return inputStream; } - public static InputStream openPossiblyCompressedFile(String sFileName, int bufferedSize) - throws IOException { + public static InputStream openPossiblyCompressedFile(String sFileName, int bufferedSize) throws IOException { return openPossiblyCompressedFile(sFileName, bufferedSize, true); } /** * Open the file, automatically determining if it has been zipped, GZipped, BZip2'd or not. */ - public static InputStream openPossiblyCompressedFile(InputStreamFactory inputStreamFactory) - throws IOException { + public static InputStream openPossiblyCompressedFile(InputStreamFactory inputStreamFactory) throws IOException { InputStreamFactory decompressedInputStreamFactory = - createInputStreamFactoryForPossiblyCompressedStream(inputStreamFactory); + createInputStreamFactoryForPossiblyCompressedStream(inputStreamFactory); decompressedInputStreamFactory = - createInputStreamFactoryForPossiblyTarredStream(decompressedInputStreamFactory); + createInputStreamFactoryForPossiblyTarredStream(decompressedInputStreamFactory); InputStream inputStream = decompressedInputStreamFactory.createInputStream(); return inputStream; } // ---------------------------------------------------------------- public static InputStreamFactory createInputStreamFactoryForPossiblyCompressedStream( - final InputStreamFactory inputStreamFactory) throws IOException { + final InputStreamFactory inputStreamFactory) throws IOException { // Read in the header final InputStream testStream = inputStreamFactory.createInputStream(); int nMagic2 = getMagic2(testStream); @@ -290,12 +284,11 @@ public String getDescription() { private String m_sSubFileName; public InputStream createInputStream() throws IOException { - ZipInputStream zipInputStream = - new ZipInputStream(inputStreamFactory.createInputStream()); + ZipInputStream zipInputStream = new ZipInputStream(inputStreamFactory.createInputStream()); ZipEntry zipEntry = zipInputStream.getNextEntry(); if (null == zipEntry) { throw new FileNotFoundException( - "No zip entries in " + inputStreamFactory.getDescription() + "."); + "No zip entries in " + inputStreamFactory.getDescription() + "."); } m_sSubFileName = zipEntry.getName(); return zipInputStream; @@ -303,7 +296,7 @@ public InputStream createInputStream() throws IOException { public String getDescription() { return "zipped " + inputStreamFactory.getDescription() - + (null == m_sSubFileName ? "" : " (sub-file \"" + m_sSubFileName + "\")"); + + (null == m_sSubFileName ? "" : " (sub-file \"" + m_sSubFileName + "\")"); } }; @@ -313,13 +306,12 @@ public String getDescription() { private String m_sSubFileName; public InputStream createInputStream() throws IOException { - SevenZipInputStream zipInputStream = - new SevenZipInputStream(inputStreamFactory); - SevenZipInputStream.Entry entry = zipInputStream - .getNextEntry(SevenZipInputStream.Behavior.SKIP_WHEN_NO_STREAM); + SevenZipInputStream zipInputStream = new SevenZipInputStream(inputStreamFactory); + SevenZipInputStream.Entry entry = + zipInputStream.getNextEntry(SevenZipInputStream.Behavior.SKIP_WHEN_NO_STREAM); if (null == entry) { throw new FileNotFoundException( - "No zip entries in " + inputStreamFactory.getDescription() + "."); + "No zip entries in " + inputStreamFactory.getDescription() + "."); } m_sSubFileName = entry.getName(); return zipInputStream; @@ -327,7 +319,7 @@ public InputStream createInputStream() throws IOException { public String getDescription() { return "7zipped " + inputStreamFactory.getDescription() - + (null == m_sSubFileName ? "" : " (sub-file \"" + m_sSubFileName + "\")"); + + (null == m_sSubFileName ? "" : " (sub-file \"" + m_sSubFileName + "\")"); } }; } else { @@ -346,8 +338,8 @@ public String getDescription() { } public static long getMagic6(InputStream testStream, int nMagic4) throws IOException { - return ((testStream.read() & UINT_TO_LONG) << 32) - | ((testStream.read() & UINT_TO_LONG) << 40) | (nMagic4 & UINT_TO_LONG); + return ((testStream.read() & UINT_TO_LONG) << 32) | ((testStream.read() & UINT_TO_LONG) << 40) + | (nMagic4 & UINT_TO_LONG); } public static int getMagic4(InputStream testStream, int nMagic2) throws IOException { @@ -360,7 +352,7 @@ public static int getMagic2(InputStream testStream) throws IOException { // ---------------------------------------------------------------- public static InputStreamFactory createInputStreamFactoryForPossiblyTarredStream( - final InputStreamFactory inputStreamFactory) throws IOException { + final InputStreamFactory inputStreamFactory) throws IOException { // Read in the header byte[] header = new byte[TarConstants.DEFAULT_RCDSIZE]; InputStream testStream = inputStreamFactory.createInputStream(); @@ -379,8 +371,7 @@ public static InputStreamFactory createInputStreamFactoryForPossiblyTarredStream // see if the checksum is correct long nExpectedChecksum; try { - nExpectedChecksum = - TarUtils.parseOctal(header, TAR_CHECKSUM_OFFSET, TarConstants.CHKSUMLEN); + nExpectedChecksum = TarUtils.parseOctal(header, TAR_CHECKSUM_OFFSET, TarConstants.CHKSUMLEN); } catch (final IllegalArgumentException ignored) { return inputStreamFactory; } @@ -398,11 +389,10 @@ public static InputStreamFactory createInputStreamFactoryForPossiblyTarredStream public InputStream createInputStream() throws IOException { TarArchiveInputStream tarInputStream = - new TarArchiveInputStream(inputStreamFactory.createInputStream()); + new TarArchiveInputStream(inputStreamFactory.createInputStream()); TarArchiveEntry tarEntry = tarInputStream.getNextTarEntry(); if (null == tarEntry) { - throw new FileNotFoundException( - "No tar entries in " + inputStreamFactory.getDescription() + "."); + throw new FileNotFoundException("No tar entries in " + inputStreamFactory.getDescription() + "."); } m_sSubFileName = tarEntry.getName(); return tarInputStream; @@ -410,7 +400,7 @@ public InputStream createInputStream() throws IOException { public String getDescription() { return "tarred " + inputStreamFactory.getDescription() - + (null == m_sSubFileName ? "" : " (sub-file \"" + m_sSubFileName + "\")"); + + (null == m_sSubFileName ? "" : " (sub-file \"" + m_sSubFileName + "\")"); } }; @@ -434,8 +424,7 @@ private static class ProcessExecutor { outPipe.start(); errPipe.start(); } catch (Exception e) { - throw new RuntimeException( - "Error while executing native command: " + cmdList.get(0), e); + throw new RuntimeException("Error while executing native command: " + cmdList.get(0), e); } } @@ -476,8 +465,7 @@ public void run() { throw new UncheckedIOException("Error while writing to pipe.", e); } try { - // we need a very quick sleep, otherwise the executed process is being slowed - // down by java + // we need a very quick sleep, otherwise the executed process is being slowed down by java Thread.sleep(50); } catch (InterruptedException x) { return; diff --git a/IO/src/main/java/io/deephaven/io/NioUtil.java b/IO/src/main/java/io/deephaven/io/NioUtil.java index c8693f599e5..2d9a937082a 100644 --- a/IO/src/main/java/io/deephaven/io/NioUtil.java +++ b/IO/src/main/java/io/deephaven/io/NioUtil.java @@ -28,19 +28,17 @@ public class NioUtil { // ---------------------------------------------------------------- /** - * Use reflection to change the collection implementations so iteration operations used in the - * selector implementation will not produce garbage. + * Use reflection to change the collection implementations so iteration operations used in the selector + * implementation will not produce garbage. * *

    - * This is only applied when the system property {@code java.specification.version} is equal to - * "1.8". + * This is only applied when the system property {@code java.specification.version} is equal to "1.8". * *

    - * We can do this because, by looking at the source code, we can tell that there are no - * simultaneous iterations so reusing one iterator is OK. Because of concurrent modification - * issues and thread safety issues, this is generally likely to be the case anyway. The - * implementation of selector is not likely to change between minor JDK revisions. A major JDK - * release might produce a rewrite, but in that case we can check the JDK version and apply the + * We can do this because, by looking at the source code, we can tell that there are no simultaneous iterations so + * reusing one iterator is OK. Because of concurrent modification issues and thread safety issues, this is generally + * likely to be the case anyway. The implementation of selector is not likely to change between minor JDK revisions. + * A major JDK release might produce a rewrite, but in that case we can check the JDK version and apply the * appropriate set of patches. */ public static Selector reduceSelectorGarbage(Selector selector) { @@ -52,10 +50,8 @@ public static Selector reduceSelectorGarbage(Selector selector) { } private static Selector reduceSelectorGarbageImpl(Selector selector) { - // This code does several things that normally would be restricted, like accessing the Sun - // classes - // and changing the accessibility of fields via reflection. We need to make sure that we can - // do this, + // This code does several things that normally would be restricted, like accessing the Sun classes + // and changing the accessibility of fields via reflection. We need to make sure that we can do this, // but client code cannot, so we need to do all of this within a 'privileged' block. return AccessController.doPrivileged((PrivilegedAction) () -> { try { @@ -77,17 +73,15 @@ private static Selector reduceSelectorGarbageImpl(Selector selector) { Field selectedKeysField = selectorImplClass.getDeclaredField("selectedKeys"); selectedKeysField.setAccessible(true); - Field publicSelectedKeysField = - selectorImplClass.getDeclaredField("publicSelectedKeys"); + Field publicSelectedKeysField = selectorImplClass.getDeclaredField("publicSelectedKeys"); publicSelectedKeysField.setAccessible(true); Set newSelectedKeys = new LowGarbageArraySet(); selectedKeysField.set(selector, newSelectedKeys); publicSelectedKeysField.set(selector, newSelectedKeys); if (System.getProperty("os.name").startsWith("Windows") - && System.getProperty("java.vendor").startsWith("Oracle")) { - Class windowsSelectorImplClass = - Class.forName("sun.nio.ch.WindowsSelectorImpl"); + && System.getProperty("java.vendor").startsWith("Oracle")) { + Class windowsSelectorImplClass = Class.forName("sun.nio.ch.WindowsSelectorImpl"); Require.instanceOf(selector, "selector", windowsSelectorImplClass); Field threadsField = windowsSelectorImplClass.getDeclaredField("threads"); @@ -105,8 +99,7 @@ private static Selector reduceSelectorGarbageImpl(Selector selector) { fdToKeyField.set(selector, newFdToKey); } else if (System.getProperty("os.name").startsWith("SunOS")) { - Class devPollSelectorImplClass = - Class.forName("sun.nio.ch.DevPollSelectorImpl"); + Class devPollSelectorImplClass = Class.forName("sun.nio.ch.DevPollSelectorImpl"); Require.instanceOf(selector, "selector", devPollSelectorImplClass); Field fdToKeyField = devPollSelectorImplClass.getDeclaredField("fdToKey"); @@ -116,8 +109,7 @@ private static Selector reduceSelectorGarbageImpl(Selector selector) { } return selector; - } catch (final NoSuchFieldException | IllegalAccessException - | ClassNotFoundException e) { + } catch (final NoSuchFieldException | IllegalAccessException | ClassNotFoundException e) { throw Assert.exceptionNeverCaught(e); } }); diff --git a/IO/src/main/java/io/deephaven/io/log/LogEntry.java b/IO/src/main/java/io/deephaven/io/log/LogEntry.java index 8fe273877ed..d69767ebc26 100644 --- a/IO/src/main/java/io/deephaven/io/log/LogEntry.java +++ b/IO/src/main/java/io/deephaven/io/log/LogEntry.java @@ -172,8 +172,7 @@ public LogEntry append(ObjFormatter objFormatter, T t) { } @Override - public LogEntry append(ObjIntIntFormatter objFormatter, T t, int nOffset, - int nLength) { + public LogEntry append(ObjIntIntFormatter objFormatter, T t, int nOffset, int nLength) { return this; } @@ -203,8 +202,7 @@ public LogEntry appendTimestamp(final long utcMillis, final TimestampBuffer tb) } @Override - public LogEntry appendTimestampMicros(final long utcMicros, - final TimestampBufferMicros tb) { + public LogEntry appendTimestampMicros(final long utcMicros, final TimestampBufferMicros tb) { return this; } diff --git a/IO/src/main/java/io/deephaven/io/log/LogSink.java b/IO/src/main/java/io/deephaven/io/log/LogSink.java index 8b57fc6af6a..feee7206c1f 100644 --- a/IO/src/main/java/io/deephaven/io/log/LogSink.java +++ b/IO/src/main/java/io/deephaven/io/log/LogSink.java @@ -32,8 +32,7 @@ public interface LogSink { void terminate(); /** - * One element of a log sink - guaranteed to be logged without being split over rolling file - * boundaries, etc. + * One element of a log sink - guaranteed to be logged without being split over rolling file boundaries, etc. */ interface Element { long getTimestampMicros(); @@ -48,9 +47,8 @@ interface Element { } /** - * An interceptor is called with each element logged, *and* with the formatted output. It will - * receive buffers that are flipped, and should not change the position or limit of these - * buffers. + * An interceptor is called with each element logged, *and* with the formatted output. It will receive buffers that + * are flipped, and should not change the position or limit of these buffers. */ interface Interceptor { void element(T e, LogOutput output) throws IOException; @@ -99,9 +97,8 @@ interface LogSinkWriter> { } interface Factory { - LogSink create(String basePath, int rollInterval, DateFormat rollFormat, - Pool elementPool, boolean append, LogOutput outputBuffer, String header, - LogSinkWriter> maybeWriter); + LogSink create(String basePath, int rollInterval, DateFormat rollFormat, Pool elementPool, boolean append, + LogOutput outputBuffer, String header, LogSinkWriter> maybeWriter); } public static final Null NULL = new Null(); diff --git a/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryImpl.java index b4365cabcab..2afdb0e6eff 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryImpl.java @@ -54,14 +54,11 @@ private enum Action { private final FastLongArray longs = new FastLongArray(8); private final FastFloatArray floats = new FastFloatArray(8); private final FastDoubleArray doubles = new FastDoubleArray(8); - private final FastArray sequences = - new FastArray<>(CharSequence.class, null, 8, false); - private final FastArray timestamps = - new FastArray<>(TimestampBuffer.class, null, 8, false); + private final FastArray sequences = new FastArray<>(CharSequence.class, null, 8, false); + private final FastArray timestamps = new FastArray<>(TimestampBuffer.class, null, 8, false); private final FastArray timestampsMicros = - new FastArray<>(TimestampBufferMicros.class, null, 8, false); - private final FastArray throwables = - new FastArray<>(Throwable.class, null, 8, false); + new FastArray<>(TimestampBufferMicros.class, null, 8, false); + private final FastArray throwables = new FastArray<>(Throwable.class, null, 8, false); private void reset() { actions.quickReset(); @@ -111,8 +108,7 @@ public LogEntry start(final LogSink sink, final LogLevel level, final long curre } @Override - public LogEntry start(final LogSink sink, final LogLevel level, final long currentTimeMicros, - final Throwable t) { + public LogEntry start(final LogSink sink, final LogLevel level, final long currentTimeMicros, final Throwable t) { starts.getAndIncrement(); this.timestamp = currentTimeMicros; this.level = level; @@ -393,16 +389,14 @@ private void pushToDelegate(LogOutput logOutputBuffer) { logOutputBuffer = logOutputBuffer.append(seqs[seqPosition++]); break; case APPEND_CHARSEQ_RANGE: - logOutputBuffer = logOutputBuffer.append(seqs[seqPosition++], is[intPosition++], - is[intPosition++]); + logOutputBuffer = logOutputBuffer.append(seqs[seqPosition++], is[intPosition++], is[intPosition++]); break; case APPEND_TIMESTAMP: - logOutputBuffer = - logOutputBuffer.appendTimestamp(ls[longPosition++], times[timePosition++]); + logOutputBuffer = logOutputBuffer.appendTimestamp(ls[longPosition++], times[timePosition++]); break; case APPEND_TIMESTAMP_MICROS: logOutputBuffer = logOutputBuffer.appendTimestampMicros(ls[longPosition++], - timesMicros[timeMicrosPosition++]); + timesMicros[timeMicrosPosition++]); break; case APPEND_NULL: logOutputBuffer = logOutputBuffer.append((LogOutputAppendable) null); diff --git a/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryImpl2.java b/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryImpl2.java index bb19335100b..4ac05c3b147 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryImpl2.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryImpl2.java @@ -54,8 +54,7 @@ public LogOutput markEndOfHeader() { return this; } - // This doesn't apply for this implementation, it's intended for when the buffer is being - // created + // This doesn't apply for this implementation, it's intended for when the buffer is being created @Override public int getEndOfHeaderOffset() { return 0; @@ -89,14 +88,12 @@ public ByteBuffer nextBuffer(ByteBuffer lastBuffer) { } }; - private final ByteBufferStreams.Output primitiveWriter = - new ByteBufferStreams.Output(null, SINK); + private final ByteBufferStreams.Output primitiveWriter = new ByteBufferStreams.Output(null, SINK); private Object[] objects = null; private int objectsPtr = 0; private int numActions = 0; - private final ByteBufferStreams.Input primitiveReader = - new ByteBufferStreams.Input(null, SOURCE); + private final ByteBufferStreams.Input primitiveReader = new ByteBufferStreams.Input(null, SOURCE); private void reset() { primitiveWriter.setBuffer(bufferPool.take()); @@ -133,14 +130,13 @@ public LogEntry start(final LogSink logSink, final LogLevel level) { } @Override - public LogEntry start(final LogSink logSink, final LogLevel level, - final long currentTimeMicros) { + public LogEntry start(final LogSink logSink, final LogLevel level, final long currentTimeMicros) { return start(logSink, level, currentTimeMicros, null); } @Override public LogEntry start(final LogSink logSink, final LogLevel level, final long currentTimeMicros, - final Throwable t) { + final Throwable t) { this.timestamp = currentTimeMicros; this.level = level; this.throwable = t; @@ -494,13 +490,11 @@ private void pushToDelegate(LogOutput logOutputBuffer) { break; } case APPEND_DOUBLE: { - logOutputBuffer = - logOutputBuffer.appendDouble(primitiveReader.readDouble()); + logOutputBuffer = logOutputBuffer.appendDouble(primitiveReader.readDouble()); break; } case APPEND_CHARSEQ: { - logOutputBuffer = - logOutputBuffer.append((CharSequence) objects[objectsPtr++]); + logOutputBuffer = logOutputBuffer.append((CharSequence) objects[objectsPtr++]); break; } case APPEND_CHARSEQ_RANGE: { diff --git a/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryUnsafeImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryUnsafeImpl.java index c418e6d49e2..00ffbc9258e 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryUnsafeImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/DelayedLogEntryUnsafeImpl.java @@ -135,8 +135,7 @@ public LogEntry start(final LogSink sink, final LogLevel level, final long curre } @Override - public LogEntry start(final LogSink sink, final LogLevel level, final long currentTimeMicros, - final Throwable t) { + public LogEntry start(final LogSink sink, final LogLevel level, final long currentTimeMicros, final Throwable t) { starts.getAndIncrement(); this.timestamp = currentTimeMicros; this.level = level; @@ -500,20 +499,19 @@ private void pushToDelegate(LogOutput logOutputBuffer) { logOutputBuffer = logOutputBuffer.appendDouble(reader.nextDouble()); break; case APPEND_CHARSEQ: - logOutputBuffer = - logOutputBuffer.append((CharSequence) im[immutablePosition++]); + logOutputBuffer = logOutputBuffer.append((CharSequence) im[immutablePosition++]); break; case APPEND_CHARSEQ_RANGE: - logOutputBuffer = logOutputBuffer.append((CharSequence) im[immutablePosition++], - reader.nextInt(), reader.nextInt()); + logOutputBuffer = logOutputBuffer.append((CharSequence) im[immutablePosition++], reader.nextInt(), + reader.nextInt()); break; case APPEND_TIMESTAMP: logOutputBuffer = logOutputBuffer.appendTimestamp(reader.nextLong(), - (TimestampBuffer) im[immutablePosition++]); + (TimestampBuffer) im[immutablePosition++]); break; case APPEND_TIMESTAMP_MICROS: logOutputBuffer = logOutputBuffer.appendTimestampMicros(reader.nextLong(), - (TimestampBufferMicros) im[immutablePosition++]); + (TimestampBufferMicros) im[immutablePosition++]); break; case APPEND_NULL: logOutputBuffer = logOutputBuffer.append((LogOutputAppendable) null); @@ -537,12 +535,8 @@ private void pushToDelegate(LogOutput logOutputBuffer) { } break; case APPEND_BYTE: - logOutputBuffer = logOutputBuffer.append((short) reader.nextByte()); // cast to - // short so - // it gets - // printed - // as - // decimal + logOutputBuffer = logOutputBuffer.append((short) reader.nextByte()); // cast to short so it gets + // printed as decimal break; case END_OF_HEADER: logOutputBuffer.markEndOfHeader(); diff --git a/IO/src/main/java/io/deephaven/io/log/impl/DynamicDelayedLogEntryPoolImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/DynamicDelayedLogEntryPoolImpl.java index ab2c3992d32..0c998f50d3a 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/DynamicDelayedLogEntryPoolImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/DynamicDelayedLogEntryPoolImpl.java @@ -10,18 +10,16 @@ import io.deephaven.io.log.LogEntryPool; import io.deephaven.io.logger.LoggerTimeSource; -public class DynamicDelayedLogEntryPoolImpl extends ThreadSafeLenientFixedSizePool - implements LogEntryPool { +public class DynamicDelayedLogEntryPoolImpl extends ThreadSafeLenientFixedSizePool implements LogEntryPool { private final LoggerTimeSource timeSource; - public DynamicDelayedLogEntryPoolImpl(String name, int entryCount, - final LoggerTimeSource timeSource) { + public DynamicDelayedLogEntryPoolImpl(String name, int entryCount, final LoggerTimeSource timeSource) { super(name, entryCount, - new Function.Nullary() { - public LogEntry call() { - return new DelayedLogEntryImpl(timeSource); - } - }, null); + new Function.Nullary() { + public LogEntry call() { + return new DelayedLogEntryImpl(timeSource); + } + }, null); this.timeSource = timeSource; } diff --git a/IO/src/main/java/io/deephaven/io/log/impl/DynamicDelayedLogEntryUnsafePoolImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/DynamicDelayedLogEntryUnsafePoolImpl.java index ae95306f582..bbc8ea4848b 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/DynamicDelayedLogEntryUnsafePoolImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/DynamicDelayedLogEntryUnsafePoolImpl.java @@ -10,15 +10,15 @@ import io.deephaven.io.log.LogEntryPool; public class DynamicDelayedLogEntryUnsafePoolImpl extends ThreadSafeLenientFixedSizePool - implements LogEntryPool { + implements LogEntryPool { public DynamicDelayedLogEntryUnsafePoolImpl(String name, int entryCount) { super(name, entryCount, - new Function.Nullary() { - public LogEntry call() { - return new DelayedLogEntryUnsafeImpl(); - } - }, null); + new Function.Nullary() { + public LogEntry call() { + return new DelayedLogEntryUnsafeImpl(); + } + }, null); } @Override diff --git a/IO/src/main/java/io/deephaven/io/log/impl/DynamicLogBufferPoolImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/DynamicLogBufferPoolImpl.java index 332ec7557e3..eca75d36ce7 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/DynamicLogBufferPoolImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/DynamicLogBufferPoolImpl.java @@ -10,8 +10,7 @@ import java.nio.ByteBuffer; -public class DynamicLogBufferPoolImpl extends ThreadSafeLenientFixedSizePool - implements LogBufferPool { +public class DynamicLogBufferPoolImpl extends ThreadSafeLenientFixedSizePool implements LogBufferPool { // public DynamicLogBufferPoolImpl(final int poolSize, final int byteBufferCapacity) { // super(poolSize, @@ -22,11 +21,11 @@ public class DynamicLogBufferPoolImpl extends ThreadSafeLenientFixedSizePool() { - public ByteBuffer call() { - return ByteBuffer.allocate(byteBufferCapacity); - } - }, null); + new Function.Nullary() { + public ByteBuffer call() { + return ByteBuffer.allocate(byteBufferCapacity); + } + }, null); } @Override diff --git a/IO/src/main/java/io/deephaven/io/log/impl/DynamicLogEntryPoolImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/DynamicLogEntryPoolImpl.java index 111668a433b..f153953fcb5 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/DynamicLogEntryPoolImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/DynamicLogEntryPoolImpl.java @@ -10,15 +10,14 @@ import io.deephaven.io.log.LogBufferPool; import io.deephaven.base.Function; -public class DynamicLogEntryPoolImpl extends ThreadSafeLenientFixedSizePool - implements LogEntryPool { +public class DynamicLogEntryPoolImpl extends ThreadSafeLenientFixedSizePool implements LogEntryPool { public DynamicLogEntryPoolImpl(String name, int entryCount, final LogBufferPool bufferPool) { super(name, entryCount, - new Function.Nullary() { - public LogEntry call() { - return new LogEntryImpl(bufferPool); - } - }, null); + new Function.Nullary() { + public LogEntry call() { + return new LogEntryImpl(bufferPool); + } + }, null); } @Override diff --git a/IO/src/main/java/io/deephaven/io/log/impl/LogBufferPoolImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/LogBufferPoolImpl.java index 6a367809431..cc15c38a06e 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/LogBufferPoolImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/LogBufferPoolImpl.java @@ -11,8 +11,7 @@ import io.deephaven.io.log.LogBufferPool; import org.apache.log4j.Logger; -public class LogBufferPoolImpl extends ThreadSafeFixedSizePool - implements LogBufferPool { +public class LogBufferPoolImpl extends ThreadSafeFixedSizePool implements LogBufferPool { private final int bufferSize; diff --git a/IO/src/main/java/io/deephaven/io/log/impl/LogEntryImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/LogEntryImpl.java index 86802033769..e4bdf8de1b1 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/LogEntryImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/LogEntryImpl.java @@ -62,8 +62,7 @@ public LogEntry start(final LogSink sink, final LogLevel level, final long curre } @Override - public LogEntry start(final LogSink sink, final LogLevel level, final long currentTimeMicros, - final Throwable t) { + public LogEntry start(final LogSink sink, final LogLevel level, final long currentTimeMicros, final Throwable t) { super.start(); starts.getAndIncrement(); this.timestamp = currentTimeMicros; diff --git a/IO/src/main/java/io/deephaven/io/log/impl/LogOutputCsvImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/LogOutputCsvImpl.java index 449e8153ac2..53f027b70fe 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/LogOutputCsvImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/LogOutputCsvImpl.java @@ -188,9 +188,8 @@ public LogOutput append(Throwable t) { append(t.getClass().getName()).append(": ").append(t.getMessage()); for (StackTraceElement e : t.getStackTrace()) { nl().append(" at ") - .append(e.getClassName()).append(".").append(e.getMethodName()) - .append("(").append(e.getFileName()).append(":").append(e.getLineNumber()) - .append(")"); + .append(e.getClassName()).append(".").append(e.getMethodName()) + .append("(").append(e.getFileName()).append(":").append(e.getLineNumber()).append(")"); } nl(); } while ((t = t.getCause()) != null); diff --git a/IO/src/main/java/io/deephaven/io/log/impl/LogSinkImpl.java b/IO/src/main/java/io/deephaven/io/log/impl/LogSinkImpl.java index 974478eb288..fd82bf8d459 100644 --- a/IO/src/main/java/io/deephaven/io/log/impl/LogSinkImpl.java +++ b/IO/src/main/java/io/deephaven/io/log/impl/LogSinkImpl.java @@ -35,12 +35,11 @@ public class LogSinkImpl implements LogSink { private static Factory _FACTORY = new Factory() { @Override - public LogSink create(String basePath, int rollInterval, DateFormat rollFormat, - Pool elementPool, boolean append, LogOutput outputBuffer, String header, - LogSinkWriter maybeWriter) { + public LogSink create(String basePath, int rollInterval, DateFormat rollFormat, Pool elementPool, + boolean append, LogOutput outputBuffer, String header, LogSinkWriter maybeWriter) { // noinspection unchecked - return new LogSinkImpl(basePath, rollInterval, rollFormat, elementPool, append, - outputBuffer, header, maybeWriter); + return new LogSinkImpl(basePath, rollInterval, rollFormat, elementPool, append, outputBuffer, header, + maybeWriter); } }; @@ -118,7 +117,7 @@ public static Factory FACTORY() { public static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd-HHmmss.SSSZ"); public static final BigWriterThread globalWriterThread = - new BigWriterThread("LogSinkImpl.GlobalWriterThread", 1000000); // park for 1 milli + new BigWriterThread("LogSinkImpl.GlobalWriterThread", 1000000); // park for 1 milli /** * Constructor @@ -130,57 +129,51 @@ public LogSinkImpl(String basePath, long rollIntervalMillis, Pool elementPool /** * Constructor */ - public LogSinkImpl(String basePath, long rollIntervalMillis, Pool elementPool, - LogOutput outputBuffer) { + public LogSinkImpl(String basePath, long rollIntervalMillis, Pool elementPool, LogOutput outputBuffer) { this(basePath, rollIntervalMillis, DATE_FORMAT, elementPool, true, outputBuffer, null); } /** * Constructor */ - public LogSinkImpl(String basePath, long rollIntervalMillis, Pool elementPool, - boolean append) { + public LogSinkImpl(String basePath, long rollIntervalMillis, Pool elementPool, boolean append) { this(basePath, rollIntervalMillis, DATE_FORMAT, elementPool, append, null, null); } /** * Constructor */ - public LogSinkImpl(String basePath, long rollIntervalMillis, Pool elementPool, - boolean append, LogOutput outputBuffer) { + public LogSinkImpl(String basePath, long rollIntervalMillis, Pool elementPool, boolean append, + LogOutput outputBuffer) { this(basePath, rollIntervalMillis, DATE_FORMAT, elementPool, append, outputBuffer, null); } /** * Constructor */ - public LogSinkImpl(String basePath, long rollIntervalMillis, DateFormat rollFormat, - Pool elementPool, boolean append) { + public LogSinkImpl(String basePath, long rollIntervalMillis, DateFormat rollFormat, Pool elementPool, + boolean append) { this(basePath, rollIntervalMillis, rollFormat, elementPool, append, null, null); } /** * Constructor */ - public LogSinkImpl(String basePath, long rollIntervalMillis, DateFormat rollFormat, - Pool elementPool, boolean append, - LogOutput outputBuffer, String header) { - this(basePath, rollIntervalMillis, rollFormat, elementPool, append, outputBuffer, header, - null); + public LogSinkImpl(String basePath, long rollIntervalMillis, DateFormat rollFormat, Pool elementPool, + boolean append, + LogOutput outputBuffer, String header) { + this(basePath, rollIntervalMillis, rollFormat, elementPool, append, outputBuffer, header, null); } /** * Constructor */ - public LogSinkImpl(String basePath, long rollIntervalMillis, DateFormat rollFormat, - Pool elementPool, boolean append, - LogOutput outputBuffer, String header, LogSinkWriter> maybeWriter) { + public LogSinkImpl(String basePath, long rollIntervalMillis, DateFormat rollFormat, Pool elementPool, + boolean append, + LogOutput outputBuffer, String header, LogSinkWriter> maybeWriter) { this.basePath = basePath; this.rollIntervalMicros = rollIntervalMillis * 1000L; - this.rollFormat = null == rollFormat ? null : (DateFormat) rollFormat.clone(); // make sure - // we have - // one - // instance + this.rollFormat = null == rollFormat ? null : (DateFormat) rollFormat.clone(); // make sure we have one instance // per sink this.elementPool = elementPool; this.append = append; @@ -189,14 +182,8 @@ public LogSinkImpl(String basePath, long rollIntervalMillis, DateFormat rollForm this.header = header; this.linkPath = new File(basePath + ".current").toPath(); - this.supportsLinks = !System.getProperty("os.name").toLowerCase().contains("win"); // enabled - // until - // we - // find - // out - // otherwise, - // except - // on + this.supportsLinks = !System.getProperty("os.name").toLowerCase().contains("win"); // enabled until we find out + // otherwise, except on // windows // don't create the output file until we actually write to it @@ -213,8 +200,7 @@ public LogSinkImpl(String basePath, long rollIntervalMillis, DateFormat rollForm writer = maybeWriter; writer.addLogSink(this); // will start if not started } else { - final WriterThread thread = - new WriterThread<>("LogSinkImpl.WriterThread-" + basePath); + final WriterThread thread = new WriterThread<>("LogSinkImpl.WriterThread-" + basePath); thread.setDaemon(true); writer = thread; writer.addLogSink(this); // will start it @@ -237,8 +223,8 @@ public String toString() { @Override public void write(T e) { if (shutdown) - return; // don't want to add anything new to the queue, b/c we can cause - // isOpenAfterWrite() to never return (if we keep the queue full) + return; // don't want to add anything new to the queue, b/c we can cause isOpenAfterWrite() to never return + // (if we keep the queue full) int spins = 0; while (!outputQueue.enqueue(e)) { @@ -273,15 +259,13 @@ public void shutdown() { } /** - * Terminate the sink - does not guarantee entries will ever be written, but will not block - * shutdown() calls. + * Terminate the sink - does not guarantee entries will ever be written, but will not block shutdown() calls. */ @Override public void terminate() { shutdown = true; // We're not going to write any more entries, so don't block shutdown calls. - // If we release the semaphore extra times, it won't hurt anything - the semaphore could be - // replaced + // If we release the semaphore extra times, it won't hurt anything - the semaphore could be replaced // by a boolean and a condition variable in its current usage. writtenOnShutdown.release(1); } @@ -291,8 +275,8 @@ public void terminate() { */ @Override public void addInterceptor(Interceptor interceptor) { - interceptors = ArrayUtil.pushArray(interceptor, interceptors, - ClassUtil.>generify(Interceptor.class)); + interceptors = + ArrayUtil.pushArray(interceptor, interceptors, ClassUtil.>generify(Interceptor.class)); } // ------------------------------------------------------------------------------------------- @@ -335,8 +319,7 @@ private void writeOut(T e) throws IOException { /** * The writer thread */ - private class WriterThread extends Thread - implements LogSinkWriter> { + private class WriterThread extends Thread implements LogSinkWriter> { private final AtomicBoolean started; private final PrintStream err; @@ -369,7 +352,7 @@ public void run() { t.addSuppressed(t2); } finally { AsyncSystem.exitCaught(this, t, EXIT_STATUS, err, - "LogSinkImpl: unable to write log entry"); + "LogSinkImpl: unable to write log entry"); } } } @@ -413,8 +396,8 @@ public void run() { int spinsSinceLastChange = 0; while (true) { impl = toWriteOut.dequeue(); - Assert.neqNull(impl, "impl"); // b/c we are using a semaphore to wait, there should - // always be something here + Assert.neqNull(impl, "impl"); // b/c we are using a semaphore to wait, there should always be something + // here try { if (impl.didWrite()) { spinsSinceLastChange = 0; @@ -442,7 +425,7 @@ public void run() { t.addSuppressed(t2); } finally { AsyncSystem.exitCaught(this, t, EXIT_STATUS, err, - "LogSinkImpl: unable to write log entry"); + "LogSinkImpl: unable to write log entry"); } return; } @@ -465,15 +448,13 @@ private void waitForSomeEntries() { private void checkOutputFile(long nowMicros) throws IOException { boolean updateLink = false; if (outputFile == null) { - currentIntervalMicros = - nowMicros - (rollIntervalMicros == 0 ? 0 : (nowMicros % rollIntervalMicros)); + currentIntervalMicros = nowMicros - (rollIntervalMicros == 0 ? 0 : (nowMicros % rollIntervalMicros)); // note: first file has complete timestamp currentPath = stampedOutputFilePath(nowMicros); outputFile = new FileOutputStream(currentPath, append).getChannel(); writeHeader(); updateLink = true; - } else if (rollIntervalMicros > 0 - && nowMicros > currentIntervalMicros + rollIntervalMicros) { + } else if (rollIntervalMicros > 0 && nowMicros > currentIntervalMicros + rollIntervalMicros) { outputFile.close(); currentIntervalMicros = nowMicros - (nowMicros % rollIntervalMicros); currentPath = stampedOutputFilePath(currentIntervalMicros); @@ -493,8 +474,7 @@ private void checkOutputFile(long nowMicros) throws IOException { } private String stampedOutputFilePath(long nowMicros) { - return rollFormat == null ? basePath - : basePath + "." + rollFormat.format(new Date(nowMicros / 1000)); + return rollFormat == null ? basePath : basePath + "." + rollFormat.format(new Date(nowMicros / 1000)); } private void writeHeader() throws IOException { @@ -525,8 +505,8 @@ private void flushOutput(T e, LogOutput data) throws IOException { ByteBuffer b = data.getBuffer(i); while (b.remaining() > 0) { if (outputFile.write(b) == 0) { - // this is a file channel, so if we write zero bytes the disk is full - don't - // bang our heads against the wall + // this is a file channel, so if we write zero bytes the disk is full - don't bang our heads against + // the wall break; } } diff --git a/IO/src/main/java/io/deephaven/io/logger/CustomLog4jLevel.java b/IO/src/main/java/io/deephaven/io/logger/CustomLog4jLevel.java index da4e4d2b616..83a9bbc98a9 100644 --- a/IO/src/main/java/io/deephaven/io/logger/CustomLog4jLevel.java +++ b/IO/src/main/java/io/deephaven/io/logger/CustomLog4jLevel.java @@ -7,9 +7,8 @@ import org.apache.log4j.Level; /** - * Allows us to have an explicit priority named e-mail - this level is just logged with the priority - * string "EMAIL", and the actual email is created by an external tool which scans the logfiles for - * EMAIL lines. + * Allows us to have an explicit priority named e-mail - this level is just logged with the priority string "EMAIL", and + * the actual email is created by an external tool which scans the logfiles for EMAIL lines. */ public class CustomLog4jLevel extends Level { public static final int EMAIL_INT = 45000; diff --git a/IO/src/main/java/io/deephaven/io/logger/InternalLoggerUtil.java b/IO/src/main/java/io/deephaven/io/logger/InternalLoggerUtil.java index 7e6597332d7..4a1f16b4957 100644 --- a/IO/src/main/java/io/deephaven/io/logger/InternalLoggerUtil.java +++ b/IO/src/main/java/io/deephaven/io/logger/InternalLoggerUtil.java @@ -8,14 +8,13 @@ import java.nio.ByteBuffer; /** - * Class for logger utilities for internal Logger usage. Not named LoggerUtil or LoggerUtils to - * avoid confusion with those existing classes. + * Class for logger utilities for internal Logger usage. Not named LoggerUtil or LoggerUtils to avoid confusion with + * those existing classes. */ class InternalLoggerUtil { /** - * Write the specified LogEntry to the specified stream flipping buffers, then call any - * interceptors. + * Write the specified LogEntry to the specified stream flipping buffers, then call any interceptors. * * @param entry the entry to be written * @param outputStream the OutputStream to which the entry is written @@ -23,7 +22,7 @@ class InternalLoggerUtil { * @throws IOException from the outputStream write operations */ static void writeEntryToStream(final LogEntry entry, final OutputStream outputStream, - final LogSink.Interceptor[] interceptors) throws IOException { + final LogSink.Interceptor[] interceptors) throws IOException { for (int i = 0; i < entry.getBufferCount(); ++i) { final ByteBuffer b = entry.getBuffer(i); b.flip(); diff --git a/IO/src/main/java/io/deephaven/io/logger/Log4jAdapter.java b/IO/src/main/java/io/deephaven/io/logger/Log4jAdapter.java index 1b14601e1bf..6fb71799574 100644 --- a/IO/src/main/java/io/deephaven/io/logger/Log4jAdapter.java +++ b/IO/src/main/java/io/deephaven/io/logger/Log4jAdapter.java @@ -16,20 +16,18 @@ */ public class Log4jAdapter { - private static final Set APPENDER_NAMES_TO_PRESERVE = - new HashSet<>(Arrays.asList("MAILER")); + private static final Set APPENDER_NAMES_TO_PRESERVE = new HashSet<>(Arrays.asList("MAILER")); // ---------------------------------------------------------------- /** - * Sends all Log4j style log output to the given Logger. Specifically, adds the Logger as an - * appender of the root logger and removes appenders that aren't in the preservation list. Any - * non-root loggers and their appenders are left unmolested. The goal is to redirect all default - * logging when it is not feasible to replace the Log4j-style logging. + * Sends all Log4j style log output to the given Logger. Specifically, adds the Logger as an appender of the root + * logger and removes appenders that aren't in the preservation list. Any non-root loggers and their appenders are + * left unmolested. The goal is to redirect all default logging when it is not feasible to replace the Log4j-style + * logging. */ public static void sendLog4jToLogger(final Logger logger) { // noinspection unchecked - final Enumeration allAppenders = - org.apache.log4j.Logger.getRootLogger().getAllAppenders(); + final Enumeration allAppenders = org.apache.log4j.Logger.getRootLogger().getAllAppenders(); final List appendersToRemove = new ArrayList<>(); while (allAppenders.hasMoreElements()) { final Appender appender = allAppenders.nextElement(); @@ -38,9 +36,8 @@ public static void sendLog4jToLogger(final Logger logger) { } } for (final Appender appender : appendersToRemove) { - logger.debug().append(Log4jAdapter.class.getSimpleName()) - .append(": Removing Log4j root logger appender: ").append(appender.getName()) - .endl(); + logger.debug().append(Log4jAdapter.class.getSimpleName()).append(": Removing Log4j root logger appender: ") + .append(appender.getName()).endl(); org.apache.log4j.Logger.getRootLogger().removeAppender(appender); } org.apache.log4j.Logger.getRootLogger().addAppender(new AppenderSkeleton() { @@ -58,11 +55,9 @@ public boolean requiresLayout() { @Override protected void append(org.apache.log4j.spi.LoggingEvent event) { - logger - .getEntry(getLogLevel(event.getLevel()), event.getTimeStamp() * 1000, - null == event.getThrowableInformation() ? null - : event.getThrowableInformation().getThrowable()) - .append(event.getRenderedMessage()).endl(); + logger.getEntry(getLogLevel(event.getLevel()), event.getTimeStamp() * 1000, + null == event.getThrowableInformation() ? null : event.getThrowableInformation().getThrowable()) + .append(event.getRenderedMessage()).endl(); } }); } diff --git a/IO/src/main/java/io/deephaven/io/logger/Log4jLoggerImpl.java b/IO/src/main/java/io/deephaven/io/logger/Log4jLoggerImpl.java index 03536f3b355..6bcad74d027 100644 --- a/IO/src/main/java/io/deephaven/io/logger/Log4jLoggerImpl.java +++ b/IO/src/main/java/io/deephaven/io/logger/Log4jLoggerImpl.java @@ -23,13 +23,13 @@ public class Log4jLoggerImpl implements Logger { * Static buffer pool, shared among all log4j loggers */ private static final Pool buffers = new ThreadSafeLenientFixedSizePool<>(2048, - new Function.Nullary() { - @Override - public ByteBuffer call() { - return ByteBuffer.allocate(512); - } - }, - null); + new Function.Nullary() { + @Override + public ByteBuffer call() { + return ByteBuffer.allocate(512); + } + }, + null); private static final LogBufferPool logBufferPool = new LogBufferPool() { @Override @@ -59,13 +59,12 @@ public Entry(LogBufferPool logBufferPool) { super(logBufferPool); } - public Entry start(LogSink sink, org.apache.log4j.Logger log4jLogger, LogLevel level, - long currentTime) { + public Entry start(LogSink sink, org.apache.log4j.Logger log4jLogger, LogLevel level, long currentTime) { return start(sink, log4jLogger, level, currentTime, null); } - public Entry start(LogSink sink, org.apache.log4j.Logger log4jLogger, LogLevel level, - long currentTime, Throwable t) { + public Entry start(LogSink sink, org.apache.log4j.Logger log4jLogger, LogLevel level, long currentTime, + Throwable t) { super.start(sink, level, currentTime, t); this.log4jLogger = log4jLogger; this.log4jLevel = getLog4jLevel(level); @@ -86,13 +85,13 @@ public LogEntry endl() { * Static pool shared among all loggers */ private static final Pool entries = new ThreadSafeLenientFixedSizePool<>(1024, - new Function.Nullary() { - @Override - public Entry call() { - return new Entry(logBufferPool); - } - }, - null); + new Function.Nullary() { + @Override + public Entry call() { + return new Entry(logBufferPool); + } + }, + null); /** * Specialized sink for log4j loggers @@ -161,14 +160,12 @@ private Entry startEntry(LogLevel level, long currentTime, Throwable t) { @Override public LogEntry getEntry(LogLevel level) { - return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000) - : LogEntry.NULL; + return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000) : LogEntry.NULL; } @Override public LogEntry getEntry(LogLevel level, Throwable t) { - return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000, t) - : LogEntry.NULL; + return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000, t) : LogEntry.NULL; } @Override diff --git a/IO/src/main/java/io/deephaven/io/logger/LogBufferInterceptor.java b/IO/src/main/java/io/deephaven/io/logger/LogBufferInterceptor.java index 55109d0e45e..9c2ca4435b1 100644 --- a/IO/src/main/java/io/deephaven/io/logger/LogBufferInterceptor.java +++ b/IO/src/main/java/io/deephaven/io/logger/LogBufferInterceptor.java @@ -34,8 +34,7 @@ public LogBufferInterceptor() { // ----------------------------------------------------------------------------------------------------------------- @Override - public void element(@NotNull final LogEntry e, @NotNull final LogOutput output) - throws IOException { + public void element(@NotNull final LogEntry e, @NotNull final LogOutput output) throws IOException { if (e.getLevel() instanceof LogLevel.MailLevel) { // We don't support MAILER log lines. return; @@ -61,8 +60,7 @@ public void element(@NotNull final LogEntry e, @NotNull final LogOutput output) } stream.close(); } catch (IOException x) { - throw new IOException("Unexpected IOException while formatting LogBuffer Record", - x); + throw new IOException("Unexpected IOException while formatting LogBuffer Record", x); } final ByteBuffer resultData = sink.getBuffer(); resultData.flip(); diff --git a/IO/src/main/java/io/deephaven/io/logger/LogBufferOutputStream.java b/IO/src/main/java/io/deephaven/io/logger/LogBufferOutputStream.java index 699be9b3885..1f86de26934 100644 --- a/IO/src/main/java/io/deephaven/io/logger/LogBufferOutputStream.java +++ b/IO/src/main/java/io/deephaven/io/logger/LogBufferOutputStream.java @@ -17,8 +17,7 @@ class LogBufferOutputStream extends OutputStream { private LogBufferRecord next; private SimpleByteBufferSink buffer; - public LogBufferOutputStream(LogBuffer sink, LogLevel level, int initialBufferSize, - int maxBufferSize) { + public LogBufferOutputStream(LogBuffer sink, LogLevel level, int initialBufferSize, int maxBufferSize) { this.sink = Objects.requireNonNull(sink); this.level = Objects.requireNonNull(level); this.initialBufferSize = initialBufferSize; diff --git a/IO/src/main/java/io/deephaven/io/logger/LogBufferRecord.java b/IO/src/main/java/io/deephaven/io/logger/LogBufferRecord.java index 28c8c37a7d4..d1a34ab1f52 100644 --- a/IO/src/main/java/io/deephaven/io/logger/LogBufferRecord.java +++ b/IO/src/main/java/io/deephaven/io/logger/LogBufferRecord.java @@ -54,14 +54,13 @@ public LogBufferRecord deepCopy() { final LogBufferRecord copy = new LogBufferRecord(); copy.setTimestampMicros(timestampMicros); copy.setLevel(level); - copy.setData(ByteBuffer.wrap(Arrays.copyOfRange(data.array(), - data.position() + data.arrayOffset(), data.limit() + data.arrayOffset()))); + copy.setData(ByteBuffer.wrap(Arrays.copyOfRange(data.array(), data.position() + data.arrayOffset(), + data.limit() + data.arrayOffset()))); return copy; } @Override - public void readExternal(@NotNull final ObjectInput in) - throws IOException, ClassNotFoundException { + public void readExternal(@NotNull final ObjectInput in) throws IOException, ClassNotFoundException { timestampMicros = in.readLong(); level = LogLevel.valueOf(in.readUTF()); final byte[] dataArray = new byte[in.readInt()]; diff --git a/IO/src/main/java/io/deephaven/io/logger/LogCrashDump.java b/IO/src/main/java/io/deephaven/io/logger/LogCrashDump.java index 06e491e3f9a..cfa1e37b5ee 100644 --- a/IO/src/main/java/io/deephaven/io/logger/LogCrashDump.java +++ b/IO/src/main/java/io/deephaven/io/logger/LogCrashDump.java @@ -26,23 +26,21 @@ public static void logCrashDump(Logger log) { String lockOwnerName = lockName == null ? null : ti.getLockOwnerName(); long lockOwnerId = lockName == null ? -1 : ti.getLockOwnerId(); if (lockName != null && lockOwnerName != null) { - (entry == null ? (entry = log.info()) : entry).append("ThreadInfo: ") - .append(ti.getThreadName()).append(", id=").append(ti.getThreadId()) - .append(", state=").append(ti.getThreadState().name()) - .append(" on ").append(lockName + " owned by ").append(lockOwnerName) - .append(" id=").append(lockOwnerId).nl(); + (entry == null ? (entry = log.info()) : entry).append("ThreadInfo: ").append(ti.getThreadName()) + .append(", id=").append(ti.getThreadId()).append(", state=") + .append(ti.getThreadState().name()) + .append(" on ").append(lockName + " owned by ").append(lockOwnerName).append(" id=") + .append(lockOwnerId).nl(); } else { - (entry == null ? (entry = log.info()) : entry).append("ThreadInfo: ") - .append(ti.getThreadName()).append(", id=").append(ti.getThreadId()) - .append(", state=").append(ti.getThreadState().name()).nl(); + (entry == null ? (entry = log.info()) : entry).append("ThreadInfo: ").append(ti.getThreadName()) + .append(", id=").append(ti.getThreadId()).append(", state=") + .append(ti.getThreadState().name()).nl(); } if (stack == null || stack.length == 0) { - (entry == null ? (entry = log.info()) : entry) - .append(" ").nl(); + (entry == null ? (entry = log.info()) : entry).append(" ").nl(); } else { for (StackTraceElement e : stack) { - (entry == null ? (entry = log.info()) : entry).append(" ") - .append(e.toString()).nl(); + (entry == null ? (entry = log.info()) : entry).append(" ").append(e.toString()).nl(); } } } diff --git a/IO/src/main/java/io/deephaven/io/logger/LoggerImpl.java b/IO/src/main/java/io/deephaven/io/logger/LoggerImpl.java index c113b69ac01..f21222f39b1 100644 --- a/IO/src/main/java/io/deephaven/io/logger/LoggerImpl.java +++ b/IO/src/main/java/io/deephaven/io/logger/LoggerImpl.java @@ -36,9 +36,8 @@ protected String initialValue() { } }; - public LoggerImpl(LogEntryPool logEntryPool, LogSink logSink, String prefix, - LogLevel loggingLevel, LoggerTimeSource timeSource, TimeZone tz, boolean showLevel, - boolean showThreadName) { + public LoggerImpl(LogEntryPool logEntryPool, LogSink logSink, String prefix, LogLevel loggingLevel, + LoggerTimeSource timeSource, TimeZone tz, boolean showLevel, boolean showThreadName) { this.logEntryPool = logEntryPool; this.logSink = logSink; @@ -81,8 +80,7 @@ public LogEntry getEntry(LogLevel level, long currentTimeMicros, @Nullable Throw LogEntry entry = logEntryPool.take().start(logSink, level, currentTimeMicros, t); if (tz != null) { - entry.append("[").appendTimestampMicros(entry.getTimestampMicros(), localTimestamp) - .append("] "); + entry.append("[").appendTimestampMicros(entry.getTimestampMicros(), localTimestamp).append("] "); } if (showLevel) { diff --git a/IO/src/main/java/io/deephaven/io/logger/LoggerOutputStream.java b/IO/src/main/java/io/deephaven/io/logger/LoggerOutputStream.java index 0723d755678..7b149e438de 100644 --- a/IO/src/main/java/io/deephaven/io/logger/LoggerOutputStream.java +++ b/IO/src/main/java/io/deephaven/io/logger/LoggerOutputStream.java @@ -11,9 +11,9 @@ import java.io.OutputStream; /** - * OutputStream implementation for outputting data to a Logger. Expects that each appended byte - * represents a char (1:1, so only ASCII/LATIN-1 and similar charsets are supported). flush() events - * are ignored - we end() log entries on newlines. + * OutputStream implementation for outputting data to a Logger. Expects that each appended byte represents a char (1:1, + * so only ASCII/LATIN-1 and similar charsets are supported). flush() events are ignored - we end() log entries on + * newlines. */ public class LoggerOutputStream extends OutputStream { @@ -43,12 +43,12 @@ public synchronized void write(final int b) throws IOException { currentSize++; /* - * This class is primarily used to capture data to be sent to the ProcessEventLog. There are - * two restrictions on the size of what can be captured. 1. The number of available buffers. - * If we obtain all the buffers, we hang in ThreadSafeFixedSizePool trying to get a buffer - * that will never be available. By default this is 2048 buffers of 1K each. 2. The maximum - * size of a binary log entry. This is defined by BinaryStoreV2Constants.MAX_ENTRY_SIZE, - * currently defined at 1024 * 1024. To be safe, we'll start splitting messages at 512K. + * This class is primarily used to capture data to be sent to the ProcessEventLog. There are two restrictions on + * the size of what can be captured. 1. The number of available buffers. If we obtain all the buffers, we hang + * in ThreadSafeFixedSizePool trying to get a buffer that will never be available. By default this is 2048 + * buffers of 1K each. 2. The maximum size of a binary log entry. This is defined by + * BinaryStoreV2Constants.MAX_ENTRY_SIZE, currently defined at 1024 * 1024. To be safe, we'll start splitting + * messages at 512K. */ if (currentSize >= 524_288) { currentEntry.end(); diff --git a/IO/src/main/java/io/deephaven/io/logger/NullLoggerImpl.java b/IO/src/main/java/io/deephaven/io/logger/NullLoggerImpl.java index 0ee3acd4de5..303ee869990 100644 --- a/IO/src/main/java/io/deephaven/io/logger/NullLoggerImpl.java +++ b/IO/src/main/java/io/deephaven/io/logger/NullLoggerImpl.java @@ -17,8 +17,8 @@ import java.util.TimeZone; /** - * Logger implementation that calls Interceptors on log entries, but doesn't write them to anything. - * Note that this implementation does not append the timestamp or level to the entries. + * Logger implementation that calls Interceptors on log entries, but doesn't write them to anything. Note that this + * implementation does not append the timestamp or level to the entries. */ public class NullLoggerImpl extends LoggerImpl { @@ -31,24 +31,20 @@ public long currentTimeMicros() { } private NullLoggerImpl(@NotNull final LogEntryPool logEntryPool, - @NotNull final LogLevel loggingLevel, - @NotNull final LoggerTimeSource timeSource, - final TimeZone tz) { - super(logEntryPool, new NullLoggerImpl.Sink(logEntryPool), null, loggingLevel, timeSource, - tz, true, false); + @NotNull final LogLevel loggingLevel, + @NotNull final LoggerTimeSource timeSource, + final TimeZone tz) { + super(logEntryPool, new NullLoggerImpl.Sink(logEntryPool), null, loggingLevel, timeSource, tz, true, false); } public NullLoggerImpl(@NotNull final LogLevel loggingLevel) { this(new LogEntryPoolImpl(1024, new LogBufferPoolImpl(2048, 1024)), - loggingLevel, - new NullLoggerTimeSource(), - null); + loggingLevel, + new NullLoggerTimeSource(), + null); } - /** - * Override to avoid writing timestamp and level to the entry, as it's assumed they'll be - * handled independently - */ + /** Override to avoid writing timestamp and level to the entry, as it's assumed they'll be handled independently */ @Override public LogEntry getEntry(LogLevel level, long currentTimeMicros, @Nullable Throwable t) { if (!isLevelEnabled(level)) { @@ -103,8 +99,8 @@ public void terminate() {} @Override public void addInterceptor(@NotNull final Interceptor logEntryInterceptor) { - interceptors = ArrayUtil.pushArray(logEntryInterceptor, interceptors, - ClassUtil.generify(Interceptor.class)); + interceptors = + ArrayUtil.pushArray(logEntryInterceptor, interceptors, ClassUtil.generify(Interceptor.class)); } } } diff --git a/IO/src/main/java/io/deephaven/io/logger/PatternLayout.java b/IO/src/main/java/io/deephaven/io/logger/PatternLayout.java index f51ed38f75c..4c0830c7017 100644 --- a/IO/src/main/java/io/deephaven/io/logger/PatternLayout.java +++ b/IO/src/main/java/io/deephaven/io/logger/PatternLayout.java @@ -13,8 +13,8 @@ // -------------------------------------------------------------------- /** * A customized {@link org.apache.log4j.PatternLayout} that uses a customized - * {@link org.apache.log4j.helpers.PatternParser} that creates a customized {@link PatternConverter} - * that is very efficient for dates in our preferred format. + * {@link org.apache.log4j.helpers.PatternParser} that creates a customized {@link PatternConverter} that is very + * efficient for dates in our preferred format. */ public class PatternLayout extends org.apache.log4j.PatternLayout { diff --git a/IO/src/main/java/io/deephaven/io/logger/ProcessStreamLoggerImpl.java b/IO/src/main/java/io/deephaven/io/logger/ProcessStreamLoggerImpl.java index 04a97454595..f83c17e3b20 100644 --- a/IO/src/main/java/io/deephaven/io/logger/ProcessStreamLoggerImpl.java +++ b/IO/src/main/java/io/deephaven/io/logger/ProcessStreamLoggerImpl.java @@ -17,37 +17,34 @@ import java.util.TimeZone; /** - * Preserve some of the simplicity of StreamLoggerImpl while also retaining the formatting - * functionality of LoggerImpl. + * Preserve some of the simplicity of StreamLoggerImpl while also retaining the formatting functionality of LoggerImpl. */ public class ProcessStreamLoggerImpl extends LoggerImpl { public static Logger makeLogger(@NotNull final LoggerTimeSource timeSource, - @NotNull final TimeZone tz) { + @NotNull final TimeZone tz) { return makeLogger(System.out, LogLevel.INFO, 1024, 2048, 1024, timeSource, tz); } @SuppressWarnings({"WeakerAccess", "SameParameterValue"}) public static Logger makeLogger(@NotNull final OutputStream outputStream, - @NotNull final LogLevel loggingLevel, - final int bufferSize, - final int bufferCount, - final int entryCount, - @NotNull final LoggerTimeSource timeSource, - @NotNull final TimeZone tz) { + @NotNull final LogLevel loggingLevel, + final int bufferSize, + final int bufferCount, + final int entryCount, + @NotNull final LoggerTimeSource timeSource, + @NotNull final TimeZone tz) { final LogEntryPool logEntryPool = - new LogEntryPoolImpl(entryCount, new LogBufferPoolImpl(bufferCount, bufferSize)); - return new ProcessStreamLoggerImpl(logEntryPool, outputStream, loggingLevel, timeSource, - tz); + new LogEntryPoolImpl(entryCount, new LogBufferPoolImpl(bufferCount, bufferSize)); + return new ProcessStreamLoggerImpl(logEntryPool, outputStream, loggingLevel, timeSource, tz); } private ProcessStreamLoggerImpl(@NotNull final LogEntryPool logEntryPool, - @NotNull final OutputStream outputStream, - @NotNull final LogLevel loggingLevel, - @NotNull final LoggerTimeSource timeSource, - @NotNull final TimeZone tz) { - super(logEntryPool, new Sink(outputStream, logEntryPool), null, loggingLevel, timeSource, - tz, true, false); + @NotNull final OutputStream outputStream, + @NotNull final LogLevel loggingLevel, + @NotNull final LoggerTimeSource timeSource, + @NotNull final TimeZone tz) { + super(logEntryPool, new Sink(outputStream, logEntryPool), null, loggingLevel, timeSource, tz, true, false); } /** @@ -61,7 +58,7 @@ private static class Sink implements LogSink { private Interceptor[] interceptors = null; private Sink(@NotNull final OutputStream outputStream, - @NotNull final LogEntryPool logEntryPool) { + @NotNull final LogEntryPool logEntryPool) { this.outputStream = outputStream; this.logEntryPool = logEntryPool; } @@ -86,8 +83,8 @@ public void terminate() {} @Override public void addInterceptor(@NotNull final Interceptor logEntryInterceptor) { - interceptors = ArrayUtil.pushArray(logEntryInterceptor, interceptors, - ClassUtil.generify(Interceptor.class)); + interceptors = + ArrayUtil.pushArray(logEntryInterceptor, interceptors, ClassUtil.generify(Interceptor.class)); } } } diff --git a/IO/src/main/java/io/deephaven/io/logger/RollingFileAppender.java b/IO/src/main/java/io/deephaven/io/logger/RollingFileAppender.java index cb814e8904e..41801ee4397 100644 --- a/IO/src/main/java/io/deephaven/io/logger/RollingFileAppender.java +++ b/IO/src/main/java/io/deephaven/io/logger/RollingFileAppender.java @@ -29,8 +29,7 @@ private void rollOver() { new File(fileName + ".5.bak").delete(); for (int i = 5; i > 1; i--) { - new File(fileName + "." + (i - 1) + ".bak") - .renameTo(new File(fileName + "." + i + ".bak")); + new File(fileName + "." + (i - 1) + ".bak").renameTo(new File(fileName + "." + i + ".bak")); } new File(fileName).renameTo(new File(fileName + ".1.bak")); diff --git a/IO/src/main/java/io/deephaven/io/logger/StreamLoggerImpl.java b/IO/src/main/java/io/deephaven/io/logger/StreamLoggerImpl.java index 2e75937419d..00f182af909 100644 --- a/IO/src/main/java/io/deephaven/io/logger/StreamLoggerImpl.java +++ b/IO/src/main/java/io/deephaven/io/logger/StreamLoggerImpl.java @@ -25,8 +25,8 @@ public class StreamLoggerImpl implements Logger { * Static buffer pool, shared among all SystemOut loggers */ private static final Pool buffers = new ThreadSafeLenientFixedSizePool<>(2048, - () -> ByteBuffer.allocate(512), - null); + () -> ByteBuffer.allocate(512), + null); private static final LogBufferPool logBufferPool = new LogBufferPool() { @Override @@ -59,8 +59,7 @@ public Entry start(LogSink sink, OutputStream stream, LogLevel level, long curre return start(sink, stream, level, currentTime, null); } - public Entry start(LogSink sink, OutputStream stream, LogLevel level, long currentTime, - Throwable t) { + public Entry start(LogSink sink, OutputStream stream, LogLevel level, long currentTime, Throwable t) { super.start(sink, level, currentTime, t); this.stream = stream; return this; @@ -71,8 +70,8 @@ public Entry start(LogSink sink, OutputStream stream, LogLevel level, long curre * Static pool shared among all loggers */ private static final Pool entries = new ThreadSafeLenientFixedSizePool<>(1024, - () -> new Entry(logBufferPool), - null); + () -> new Entry(logBufferPool), + null); /** * Specialized sink for stream loggers @@ -105,8 +104,7 @@ public void terminate() { @Override public void addInterceptor(Interceptor entryInterceptor) { - interceptors = ArrayUtil.pushArray(entryInterceptor, interceptors, - ClassUtil.generify(Interceptor.class)); + interceptors = ArrayUtil.pushArray(entryInterceptor, interceptors, ClassUtil.generify(Interceptor.class)); } } @@ -136,14 +134,12 @@ private Entry startEntry(LogLevel level, long currentTime, Throwable t) { @Override public LogEntry getEntry(LogLevel level) { - return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000) - : LogEntry.NULL; + return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000) : LogEntry.NULL; } @Override public LogEntry getEntry(LogLevel level, Throwable t) { - return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000, t) - : LogEntry.NULL; + return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000, t) : LogEntry.NULL; } @Override diff --git a/IO/src/main/java/io/deephaven/io/logger/StreamToLogBuffer.java b/IO/src/main/java/io/deephaven/io/logger/StreamToLogBuffer.java index beaeb723fa1..f025f247b54 100644 --- a/IO/src/main/java/io/deephaven/io/logger/StreamToLogBuffer.java +++ b/IO/src/main/java/io/deephaven/io/logger/StreamToLogBuffer.java @@ -15,8 +15,8 @@ public class StreamToLogBuffer implements StandardStreamReceiver { private final int initialBufferSize; private final int maxBufferSize; - public StreamToLogBuffer(LogBuffer logBuffer, boolean receiveOut, boolean receiveErr, - int initialBufferSize, int maxBufferSize) { + public StreamToLogBuffer(LogBuffer logBuffer, boolean receiveOut, boolean receiveErr, int initialBufferSize, + int maxBufferSize) { this.logBuffer = Objects.requireNonNull(logBuffer); this.receiveOut = receiveOut; this.receiveErr = receiveErr; @@ -29,8 +29,7 @@ public Optional receiveOut() { if (!receiveOut) { return Optional.empty(); } - return Optional.of(new LogBufferOutputStream(logBuffer, LogLevel.STDOUT, initialBufferSize, - maxBufferSize)); + return Optional.of(new LogBufferOutputStream(logBuffer, LogLevel.STDOUT, initialBufferSize, maxBufferSize)); } @Override @@ -38,7 +37,6 @@ public Optional receiveErr() { if (!receiveErr) { return Optional.empty(); } - return Optional.of(new LogBufferOutputStream(logBuffer, LogLevel.STDERR, initialBufferSize, - maxBufferSize)); + return Optional.of(new LogBufferOutputStream(logBuffer, LogLevel.STDERR, initialBufferSize, maxBufferSize)); } } diff --git a/IO/src/main/java/io/deephaven/io/logger/StringsLoggerImpl.java b/IO/src/main/java/io/deephaven/io/logger/StringsLoggerImpl.java index 56d04c4a167..49575370539 100644 --- a/IO/src/main/java/io/deephaven/io/logger/StringsLoggerImpl.java +++ b/IO/src/main/java/io/deephaven/io/logger/StringsLoggerImpl.java @@ -89,8 +89,8 @@ public void addInterceptor(Interceptor entryInterceptor) { private LogLevel loggingLevel; - public StringsLoggerImpl(Function.Nullary entryFactory, int entryPoolSize, - LogOutput outputBuffer, LogLevel loggingLevel) { + public StringsLoggerImpl(Function.Nullary entryFactory, int entryPoolSize, LogOutput outputBuffer, + LogLevel loggingLevel) { this.loggingLevel = loggingLevel; this.entries = new ThreadSafeLenientFixedSizePool(entryPoolSize, entryFactory, null); this.outputBuffer = outputBuffer; @@ -117,14 +117,12 @@ private LogEntry startEntry(LogLevel level, long currentTime, Throwable t) { @Override public LogEntry getEntry(LogLevel level) { - return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000) - : LogEntry.NULL; + return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000) : LogEntry.NULL; } @Override public LogEntry getEntry(LogLevel level, Throwable t) { - return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000, t) - : LogEntry.NULL; + return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000, t) : LogEntry.NULL; } @Override diff --git a/IO/src/main/java/io/deephaven/io/sched/Job.java b/IO/src/main/java/io/deephaven/io/sched/Job.java index b6ebda1929a..e76718176ed 100644 --- a/IO/src/main/java/io/deephaven/io/sched/Job.java +++ b/IO/src/main/java/io/deephaven/io/sched/Job.java @@ -25,12 +25,10 @@ public abstract class Job implements LogOutputAppendable { * * @param channel the channel which has become ready * @param readyOps the operations which can be performed on this channel without blocking - * @returns the modified readyOps after the invocation; if non-zero, the job will be invoked - * again with these + * @returns the modified readyOps after the invocation; if non-zero, the job will be invoked again with these * @throws IOException - if something bad happens */ - public abstract int invoke(SelectableChannel channel, int readyOps, Procedure.Nullary handoff) - throws IOException; + public abstract int invoke(SelectableChannel channel, int readyOps, Procedure.Nullary handoff) throws IOException; /** * This method is invoked if the job times out. @@ -38,8 +36,7 @@ public abstract int invoke(SelectableChannel channel, int readyOps, Procedure.Nu public abstract void timedOut(); /** - * This method is called if the job is explicitly cancelled before it becomes ready or times - * out. + * This method is called if the job is explicitly cancelled before it becomes ready or times out. */ public abstract void cancelled(); diff --git a/IO/src/main/java/io/deephaven/io/sched/JobStateTimeoutQueue.java b/IO/src/main/java/io/deephaven/io/sched/JobStateTimeoutQueue.java index e19acfddf78..4347e03174d 100644 --- a/IO/src/main/java/io/deephaven/io/sched/JobStateTimeoutQueue.java +++ b/IO/src/main/java/io/deephaven/io/sched/JobStateTimeoutQueue.java @@ -9,8 +9,7 @@ import java.util.Set; /** - * A priority queue (heap) for JobState instances, ordered by their deadlines. Note that this class - * is package-private. + * A priority queue (heap) for JobState instances, ordered by their deadlines. Note that this class is package-private. */ class JobStateTimeoutQueue implements Cloneable { private final Logger log; @@ -168,8 +167,8 @@ private void fixDown(int k) { boolean testInvariantAux(int i, String what) { if (i <= size) { if (queue[i].tqPos != i) { - log.error().append(what).append(": queue[").append(i).append("].tqPos=") - .append(queue[i].tqPos).append(" != ").append(i).endl(); + log.error().append(what).append(": queue[").append(i).append("].tqPos=").append(queue[i].tqPos) + .append(" != ").append(i).endl(); } if (!testInvariantAux(i * 2, what)) { return false; @@ -179,9 +178,8 @@ boolean testInvariantAux(int i, String what) { } if (i > 1) { if (queue[i].deadline < queue[i / 2].deadline) { - log.error().append(what).append(": child[").append(i).append("]=") - .append(queue[i].deadline).append(" < parent[").append((i / 2)).append("]=") - .append(queue[i / 2].deadline).endl(); + log.error().append(what).append(": child[").append(i).append("]=").append(queue[i].deadline) + .append(" < parent[").append((i / 2)).append("]=").append(queue[i / 2].deadline).endl(); return false; } } @@ -194,8 +192,8 @@ boolean testInvariant(String what) { if (result) { for (int i = size + 1; i < queue.length; ++i) { if (queue[i] != null) { - log.error().append(what).append(": size = ").append(size).append(", child[") - .append(i).append("]=").append(queue[i].deadline).append(" != null").endl(); + log.error().append(what).append(": size = ").append(size).append(", child[").append(i).append("]=") + .append(queue[i].deadline).append(" != null").endl(); result = false; } } diff --git a/IO/src/main/java/io/deephaven/io/sched/Scheduler.java b/IO/src/main/java/io/deephaven/io/sched/Scheduler.java index 1cb35fec54f..247b6ab08cc 100644 --- a/IO/src/main/java/io/deephaven/io/sched/Scheduler.java +++ b/IO/src/main/java/io/deephaven/io/sched/Scheduler.java @@ -11,15 +11,13 @@ import java.util.concurrent.Executor; /** - * This class provides a singleton wrapper for scheduling invocations of multiple Job instances from - * a single thread. Job are scheduled in accordance with an interest set on a java.nio.Channel, - * deadline based time scheduling, and/or custom criteria defined by the Jobs' implementation of the - * ready() method. + * This class provides a singleton wrapper for scheduling invocations of multiple Job instances from a single thread. + * Job are scheduled in accordance with an interest set on a java.nio.Channel, deadline based time scheduling, and/or + * custom criteria defined by the Jobs' implementation of the ready() method. * - * Jobs are instantiated by the application and made known to the scheduler by one of the install() - * methods. Once the job is installed, the scheduler will call exactly one of its invoke(), - * timedOut() or cancelled() methods exactly once. After this, the scheduler forgets about the job - * completely, unless the application installs it again. + * Jobs are instantiated by the application and made known to the scheduler by one of the install() methods. Once the + * job is installed, the scheduler will call exactly one of its invoke(), timedOut() or cancelled() methods exactly + * once. After this, the scheduler forgets about the job completely, unless the application installs it again. */ public interface Scheduler { @@ -48,24 +46,22 @@ public interface Scheduler { public void cancelJob(Job job); /** - * Wait for jobs to become ready, then invoke() them all. This method will form the core of the - * main loop of a scheduler-driven application. The method first waits until: + * Wait for jobs to become ready, then invoke() them all. This method will form the core of the main loop of a + * scheduler-driven application. The method first waits until: * - * -- the given timeout expires, -- the earliest job-specific timeout expires, or -- one or more - * jobs becomes ready + * -- the given timeout expires, -- the earliest job-specific timeout expires, or -- one or more jobs becomes ready * - * If jobs have become ready, then the entire ready set will be invoked. If any job throws an - * uncaught exception, the job's terminated() method will be called and the job deregistered. - * This does not abort the invocation of the remaining jobs. The return value is then the number - * of jobs that were invoked. + * If jobs have become ready, then the entire ready set will be invoked. If any job throws an uncaught exception, + * the job's terminated() method will be called and the job deregistered. This does not abort the invocation of the + * remaining jobs. The return value is then the number of jobs that were invoked. * - * If no jobs are ready and any job-specific timeouts expire, the associated jobs' timedOut() - * methods are called. The return value is the negative of the number of expired timeouts. + * If no jobs are ready and any job-specific timeouts expire, the associated jobs' timedOut() methods are called. + * The return value is the negative of the number of expired timeouts. * * If the time given by the timeout argument expires, then zero is returned. * - * Note that this method is not synchronized. The application must ensure that it is never - * called concurrently by more than one thread. + * Note that this method is not synchronized. The application must ensure that it is never called concurrently by + * more than one thread. * * @return true, if some job was dispatched */ diff --git a/IO/src/main/java/io/deephaven/io/sched/TimedJob.java b/IO/src/main/java/io/deephaven/io/sched/TimedJob.java index e8755f13a1c..2304c4bfd85 100644 --- a/IO/src/main/java/io/deephaven/io/sched/TimedJob.java +++ b/IO/src/main/java/io/deephaven/io/sched/TimedJob.java @@ -10,8 +10,8 @@ import java.nio.channels.SelectableChannel; /** - * This is the base class for jobs which are only interested in timing events. It provides default - * invoke() and cancelled() method which do nothing. + * This is the base class for jobs which are only interested in timing events. It provides default invoke() and + * cancelled() method which do nothing. */ public abstract class TimedJob extends Job { public int invoke(SelectableChannel channel, int readyOps, Procedure.Nullary handoff) { diff --git a/IO/src/main/java/io/deephaven/io/sched/YASchedulerImpl.java b/IO/src/main/java/io/deephaven/io/sched/YASchedulerImpl.java index 5f67d202aae..dbd31634804 100644 --- a/IO/src/main/java/io/deephaven/io/sched/YASchedulerImpl.java +++ b/IO/src/main/java/io/deephaven/io/sched/YASchedulerImpl.java @@ -17,34 +17,29 @@ /** * Yet Another implementation of the Scheduler interface -- the best one yet. * - * This class provides a singleton wrapper for scheduling invocations of multiple Job instances from - * a single thread. Job are scheduled in accordance with an interest set on a java.nio.Channel, - * deadline based time scheduling, and/or custom criteria defined by the Jobs' implementation of the - * ready() method. + * This class provides a singleton wrapper for scheduling invocations of multiple Job instances from a single thread. + * Job are scheduled in accordance with an interest set on a java.nio.Channel, deadline based time scheduling, and/or + * custom criteria defined by the Jobs' implementation of the ready() method. * - * Jobs are instantiated by the application and made known to the scheduler by one of the - * installJob() methods. A previously installed job can be removed from the scheduler with the - * cancelJob() method. The installJob() and cancelJob() methods are thread-safe. It is allowed to - * call installJob() on a job that is already installed, or cancelJob() on a job that is not current - * in the scheduler. In the former case, the channel and/or deadline will be updated accordingly; in - * the latter, the call will be ignored. + * Jobs are instantiated by the application and made known to the scheduler by one of the installJob() methods. A + * previously installed job can be removed from the scheduler with the cancelJob() method. The installJob() and + * cancelJob() methods are thread-safe. It is allowed to call installJob() on a job that is already installed, or + * cancelJob() on a job that is not current in the scheduler. In the former case, the channel and/or deadline will be + * updated accordingly; in the latter, the call will be ignored. * - * Once the job is installed, the scheduler promises to call exactly one of its invoke(), timedOut() - * or cancelled() methods exactly once. The invoke() method will be called only if the job was - * (last) installed with a channel and non-zero interest set. The timedOut() method can be called - * for any job, since all jobs have an associated deadline (although the timeout value can be set to - * Integer.MAX_VALUE to make if effectively infinite). The cancelled() method is called only if the - * job is removed by a cancelJob() call before either the channe is ready or the deadline expires. + * Once the job is installed, the scheduler promises to call exactly one of its invoke(), timedOut() or cancelled() + * methods exactly once. The invoke() method will be called only if the job was (last) installed with a channel and + * non-zero interest set. The timedOut() method can be called for any job, since all jobs have an associated deadline + * (although the timeout value can be set to Integer.MAX_VALUE to make if effectively infinite). The cancelled() method + * is called only if the job is removed by a cancelJob() call before either the channe is ready or the deadline expires. * - * After the job is called back, the scheduler forgets about the job completely, unless the - * application installs it again. That is, from the scheduler's point of view *all* jobs are - * one-shots. This design is based on the observation that it is easier to reschedule jobs on every - * invocation in the style of a tail-recursive loop, as opposed to maintaining persistent state in - * the scheduler. + * After the job is called back, the scheduler forgets about the job completely, unless the application installs it + * again. That is, from the scheduler's point of view *all* jobs are one-shots. This design is based on the observation + * that it is easier to reschedule jobs on every invocation in the style of a tail-recursive loop, as opposed to + * maintaining persistent state in the scheduler. * - * The application must drive the scheduler by calling the work() method in a loop. The work() - * method is *not* thread-safe; the application must either call it from a single thread or - * synchronize calls accordingly. + * The application must drive the scheduler by calling the work() method in a loop. The work() method is *not* + * thread-safe; the application must either call it from a single thread or synchronize calls accordingly. */ public class YASchedulerImpl implements Scheduler { @@ -60,9 +55,7 @@ public class YASchedulerImpl implements Scheduler { /** lock for internal state */ private final Object stateLock = new Object(); - /** - * if non-zero, there is a select() in progress that will terminate at the specified deadline - */ + /** if non-zero, there is a select() in progress that will terminate at the specified deadline */ private long selectingTill = 0; private volatile boolean spinWakeSelector = false; @@ -152,8 +145,7 @@ public YASchedulerImpl(String name, Selector selector, Logger log) throws IOExce this(name, selector, log, true, false); } - public YASchedulerImpl(String name, Selector selector, Logger log, boolean doTimingStats, - boolean doSpinSelect) { + public YASchedulerImpl(String name, Selector selector, Logger log, boolean doTimingStats, boolean doSpinSelect) { this.name = name; this.selector = selector; this.log = log; @@ -163,43 +155,38 @@ public YASchedulerImpl(String name, Selector selector, Logger log, boolean doTim this.timeoutQueue = new JobStateTimeoutQueue(log, 1024); this.invokeCount = Stats.makeItem(name, "invokeCount", Counter.FACTORY, - "The number of jobs invoked for I/O").getValue(); + "The number of jobs invoked for I/O").getValue(); this.timeoutCount = Stats.makeItem(name, "timeoutCount", Counter.FACTORY, - "The number of jobs that have timed out").getValue(); + "The number of jobs that have timed out").getValue(); this.selectDuration = Stats.makeItem(name, "SelectDuration", State.FACTORY, - "The number of microseconds spent in select()").getValue(); + "The number of microseconds spent in select()").getValue(); this.workDuration = Stats.makeItem(name, "WorkDuration", State.FACTORY, - "The number of microseconds between successive select() calls").getValue(); + "The number of microseconds between successive select() calls").getValue(); this.gatheredDuration = Stats.makeItem(name, "GatheredDuration", State.FACTORY, - "The number of microseconds jobs spend waiting after being gathered").getValue(); + "The number of microseconds jobs spend waiting after being gathered").getValue(); this.channelInstalls = Stats.makeItem(name, "channelInstalls", Counter.FACTORY, - "The number of installJob() calls with a channel").getValue(); + "The number of installJob() calls with a channel").getValue(); this.timedInstalls = Stats.makeItem(name, "timedInstalls", Counter.FACTORY, - "The number of installJob() calls with just a timeout").getValue(); + "The number of installJob() calls with just a timeout").getValue(); this.jobCancels = Stats.makeItem(name, "jobCancels", Counter.FACTORY, - "The number of cancelJob() calls").getValue(); + "The number of cancelJob() calls").getValue(); this.jobUpdates = Stats.makeItem(name, "jobUpdates", Counter.FACTORY, - "The number of updates applied to the job state pre- and post-select").getValue(); + "The number of updates applied to the job state pre- and post-select").getValue(); this.keyUpdates = Stats.makeItem(name, "keyUpdates", Counter.FACTORY, - "The number of times an NIO SelectionKey was updated with non-zero interest") - .getValue(); + "The number of times an NIO SelectionKey was updated with non-zero interest").getValue(); this.keyOrphans = Stats.makeItem(name, "keyOrphans", Counter.FACTORY, - "The number of times an NIO SelectionKey's interest was cleared").getValue(); + "The number of times an NIO SelectionKey's interest was cleared").getValue(); this.selectorWakeups = Stats.makeItem(name, "selectorWakeups", Counter.FACTORY, - "The number of times the selector had to be woken up").getValue(); + "The number of times the selector had to be woken up").getValue(); - this.channelInterestWakeups = Stats - .makeItem(name, "channelInterestWakeups", Counter.FACTORY, - "The number of selector wakeups due to a change in a channel's interest set") - .getValue(); + this.channelInterestWakeups = Stats.makeItem(name, "channelInterestWakeups", Counter.FACTORY, + "The number of selector wakeups due to a change in a channel's interest set").getValue(); this.channelTimeoutWakeups = Stats.makeItem(name, "channelTimeoutWakeups", Counter.FACTORY, - "The number of selector wakeups due to a channel's timeout becoming the earliest") - .getValue(); + "The number of selector wakeups due to a channel's timeout becoming the earliest").getValue(); this.plainTimeoutWakeups = Stats.makeItem(name, "plainTimeoutWakeups", Counter.FACTORY, - "The number of selector wakeups due to a plain timeout becoming the earliest") - .getValue(); + "The number of selector wakeups due to a plain timeout becoming the earliest").getValue(); this.cancelWakeups = Stats.makeItem(name, "cancelWakeups", Counter.FACTORY, - "The number of selector wakeups due to a job cancellation").getValue(); + "The number of selector wakeups due to a job cancellation").getValue(); } /** @@ -224,8 +211,7 @@ public void installJob(Job job, long deadline, SelectableChannel channel, int in } else if (deadline < selectingTill) { wakeup = true; channelTimeoutWakeups.sample(1); - } else if (key.interestOps() != interest - && (channel != state.nextChannel || interest != state.nextOps)) { + } else if (key.interestOps() != interest && (channel != state.nextChannel || interest != state.nextOps)) { wakeup = true; channelInterestWakeups.sample(1); } @@ -239,12 +225,12 @@ public void installJob(Job job, long deadline, SelectableChannel channel, int in if (log.isDebugEnabled()) { log.debug().append(name).append(" installed job ").append(job) - .append(", d=").append(deadline) - .append(", ni=").append(state.nextOps) - // .append(", k=").append(key) - .append(", ki=").append((key == null || !key.isValid() ? 0 : key.interestOps())) - .append(", w=").append(wakeup) - .endl(); + .append(", d=").append(deadline) + .append(", ni=").append(state.nextOps) + // .append(", k=").append(key) + .append(", ki=").append((key == null || !key.isValid() ? 0 : key.interestOps())) + .append(", w=").append(wakeup) + .endl(); } if (wakeup) { @@ -273,10 +259,8 @@ public void installJob(Job job, long deadline) { state.forgotten = false; final boolean changed = changedState(state); - // Note: We don't need to be concerned with waking up due to channelInterest changes, - // since - // we would have to be reducing the interest set which can only lead to a later wakeup - // time. + // Note: We don't need to be concerned with waking up due to channelInterest changes, since + // we would have to be reducing the interest set which can only lead to a later wakeup time. // if the new deadline is earlier than the current top, wake up the selector boolean wakeup = false; @@ -292,10 +276,10 @@ else if (doSpinSelect) { if (log.isDebugEnabled()) { log.debug().append(name).append(" installed job ").append(job) - .append(", d=").append(deadline) - .append(", w=").append(wakeup) - .append(", c=").append(changed) - .endl(); + .append(", d=").append(deadline) + .append(", w=").append(wakeup) + .append(", c=").append(changed) + .endl(); } timedInstalls.sample(1); @@ -311,7 +295,7 @@ public void cancelJob(Job job) { synchronized (stateLock) { if (log.isDebugEnabled()) { log.debug().append(name).append(" explicitly cancelling ").append(job) - .append(" in YAScheduler.cancelJob").endl(); + .append(" in YAScheduler.cancelJob").endl(); } JobState state = job.getStateFor(this); if (state != null) { @@ -343,8 +327,8 @@ private void dropChannel(JobState state) { if (key.interestOps() != 0) { key.interestOps(0); if (log.isDebugEnabled()) { - log.debug().append(name).append(" setting interest on orphaned key ") - .append(key.toString()).append(" to 0").endl(); + log.debug().append(name).append(" setting interest on orphaned key ").append(key.toString()) + .append(" to 0").endl(); } keyUpdates.sample(1); } @@ -352,9 +336,8 @@ private void dropChannel(JobState state) { } catch (CancelledKeyException x) { // ignore it if (log.isDebugEnabled()) { - log.info().append(name) - .append(" got CancelledKeyException while dropping channel ") - .append(state.waitChannel.toString()).endl(); + log.info().append(name).append(" got CancelledKeyException while dropping channel ") + .append(state.waitChannel.toString()).endl(); } } state.waitChannel = null; @@ -370,27 +353,27 @@ private boolean grabChannel(JobState state) { if (key == null) { key = state.nextChannel.register(selector, state.nextOps, state); log.debug().append(name).append(" update ").append(state.job) - .append(": registered channel ").append(state.nextChannel.toString()) - .append(", ni=").append(state.nextOps) - .append(", k=").append(key.toString()) - .endl(); + .append(": registered channel ").append(state.nextChannel.toString()) + .append(", ni=").append(state.nextOps) + .append(", k=").append(key.toString()) + .endl(); } else { key.attach(state); if (key.interestOps() != state.nextOps) { if (log.isDebugEnabled()) { log.debug().append(name).append(" update ").append(state.job) - .append(": setting interest on key ").append(key.toString()) - .append(" to ").append(state.nextOps) - .endl(); + .append(": setting interest on key ").append(key.toString()).append(" to ") + .append(state.nextOps) + .endl(); } key.interestOps(state.nextOps); keyUpdates.sample(1); } else { if (log.isDebugEnabled()) { log.debug().append(name).append(" update ").append(state.job) - .append(": interest on key ").append(key.toString()) - .append(" already at ").append(state.nextOps) - .endl(); + .append(": interest on key ").append(key.toString()).append(" already at ") + .append(state.nextOps) + .endl(); } } } @@ -412,8 +395,8 @@ private boolean grabChannel(JobState state) { // fall through } state.waitChannel = null; - log.error().append(name).append(" tried to register ").append(state.job) - .append(" on closed channel ").append(state.nextChannel.toString()).endl(); + log.error().append(name).append(" tried to register ").append(state.job).append(" on closed channel ") + .append(state.nextChannel.toString()).endl(); return false; } @@ -435,11 +418,11 @@ private void update() { key = state.nextChannel.keyFor(selector); } log.debug().append(name).append(" updating job ").append(state.job) - .append(", d=").append(state.nextDeadline) - .append(", ni=").append(state.nextOps) - .append(", k=").append(key == null ? "null" : key.toString()) - .append(", ki=").append(key == null || !key.isValid() ? 0 : key.interestOps()) - .endl(); + .append(", d=").append(state.nextDeadline) + .append(", ni=").append(state.nextOps) + .append(", k=").append(key == null ? "null" : key.toString()) + .append(", ki=").append(key == null || !key.isValid() ? 0 : key.interestOps()) + .endl(); } if (state.gathered) { @@ -447,7 +430,7 @@ private void update() { } else if (state.nextChannel != null && state.nextOps != 0) { if (!grabChannel(state)) { log.error().append(name).append(" cancelling ").append(state.job) - .append(" after failed I/O registration").endl(); + .append(" after failed I/O registration").endl(); timeoutQueue.remove(state); state.cancelled = true; dispatchQueue.add(state); @@ -461,8 +444,7 @@ private void update() { dropChannel(state); timeoutQueue.remove(state); if (log.isDebugEnabled()) { - log.debug().append(name).append(" cancelling ").append(state.job) - .append(" from update()").endl(); + log.debug().append(name).append(" cancelling ").append(state.job).append(" from update()").endl(); } state.cancelled = true; dispatchQueue.add(state); @@ -476,12 +458,10 @@ private void update() { state.nextOps = 0; state.nextDeadline = 0; - assert state.waitChannel == null - || state.waitChannel.keyFor(selector).attachment() == state; + assert state.waitChannel == null || state.waitChannel.keyFor(selector).attachment() == state; } if (log.isDebugEnabled()) { - log.debug().append(name).append(" updated ").append(changedStates.size()) - .append(" jobs").endl(); + log.debug().append(name).append(" updated ").append(changedStates.size()).append(" jobs").endl(); } changedStates.clear(); updateClock++; @@ -495,16 +475,15 @@ private void update() { private long computeTimeout(long now, long timeout) { if (!dispatchQueue.isEmpty()) { if (log.isDebugEnabled()) { - log.debug().append(name) - .append(" update: dispatch queue is not empty, setting timeout to zero").endl(); + log.debug().append(name).append(" update: dispatch queue is not empty, setting timeout to zero").endl(); } timeout = 0; } else if (!timeoutQueue.isEmpty()) { JobState next = timeoutQueue.top(); long remain = next.deadline - now; if (log.isDebugEnabled()) { - log.debug().append(name).append(" update: next timeout due in ").append(remain) - .append(" millis: ").append(next.job).endl(); + log.debug().append(name).append(" update: next timeout due in ").append(remain).append(" millis: ") + .append(next.job).endl(); } timeout = Math.max(0, Math.min(timeout, remain)); } @@ -517,8 +496,7 @@ private long computeTimeout(long now, long timeout) { private void select(long timeout) { try { if (log.isDebugEnabled()) { - log.debug().append(name).append(" calling select(").append(timeout).append(")") - .endl(); + log.debug().append(name).append(" calling select(").append(timeout).append(")").endl(); } mark(workDuration); @@ -532,25 +510,23 @@ private void select(long timeout) { mark(selectDuration); } catch (IOException x) { if (java.util.regex.Pattern.matches(".*Operation not permitted.*", x.toString())) { - // There is a documented bug - // (http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6481709) in some - // versions of the epoll selector which causes occasional "Operation not permitted" - // errors to be + // There is a documented bug (http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6481709) in some + // versions of the epoll selector which causes occasional "Operation not permitted" errors to be // thrown. log.warn().append(name).append( - " Ignoring 'Operation not permitted' exception, see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6481709") - .endl(); + " Ignoring 'Operation not permitted' exception, see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6481709") + .endl(); } else { if (!isClosed()) { - log.fatal(x).append(name).append(" Unexpected IOException in select(): ") - .append(x.getMessage()).endl(); + log.fatal(x).append(name).append(" Unexpected IOException in select(): ").append(x.getMessage()) + .endl(); System.exit(1); } } } catch (ClosedSelectorException x) { if (!isClosed()) { - log.fatal(x).append(name).append(" ClosedSelectorException in select(): ") - .append(x.getMessage()).endl(); + log.fatal(x).append(name).append(" ClosedSelectorException in select(): ").append(x.getMessage()) + .endl(); System.exit(1); } } @@ -559,8 +535,7 @@ private void select(long timeout) { private void spinSelect(long times) { try { if (log.isDebugEnabled()) { - log.debug().append(name).append(" calling spinSelect(").append(times).append(")") - .endl(); + log.debug().append(name).append(" calling spinSelect(").append(times).append(")").endl(); } mark(workDuration); @@ -571,25 +546,23 @@ private void spinSelect(long times) { mark(selectDuration); } catch (IOException x) { if (java.util.regex.Pattern.matches(".*Operation not permitted.*", x.toString())) { - // There is a documented bug - // (http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6481709) in some - // versions of the epoll selector which causes occasional "Operation not permitted" - // errors to be + // There is a documented bug (http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6481709) in some + // versions of the epoll selector which causes occasional "Operation not permitted" errors to be // thrown. log.warn().append(name).append( - " Ignoring 'Operation not permitted' exception, see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6481709") - .endl(); + " Ignoring 'Operation not permitted' exception, see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6481709") + .endl(); } else { if (!isClosed()) { - log.fatal(x).append(name).append(" Unexpected IOException in spinSelect(): ") - .append(x.getMessage()).endl(); + log.fatal(x).append(name).append(" Unexpected IOException in spinSelect(): ").append(x.getMessage()) + .endl(); System.exit(1); } } } catch (ClosedSelectorException x) { if (!isClosed()) { - log.fatal(x).append(name).append(" ClosedSelectorException in spinSelect(): ") - .append(x.getMessage()).endl(); + log.fatal(x).append(name).append(" ClosedSelectorException in spinSelect(): ").append(x.getMessage()) + .endl(); System.exit(1); } } @@ -612,7 +585,7 @@ private void gather(long now) { if (key.isValid() && key.interestOps() != 0) { if (log.isDebugEnabled()) { log.debug().append(name).append(" clearing interest in orphaned key ") - .append(key.toString()).append(" in YASchedulerImpl.gather").endl(); + .append(key.toString()).append(" in YASchedulerImpl.gather").endl(); } if (key.isValid()) { key.interestOps(0); @@ -628,12 +601,12 @@ private void gather(long now) { dispatchQueue.add(state); timeoutQueue.remove(state); if (log.isDebugEnabled()) { - log.debug().append(name).append(" gather ").append(key.toString()) - .append(" -> ").append(state.job) - .append(", ops=").append(key.readyOps()) - .append(", ki=").append(key.interestOps()) - .append(", dq=").append(dispatchQueue.size()) - .endl(); + log.debug().append(name).append(" gather ").append(key.toString()).append(" -> ") + .append(state.job) + .append(", ops=").append(key.readyOps()) + .append(", ki=").append(key.interestOps()) + .append(", dq=").append(dispatchQueue.size()) + .endl(); } } } catch (CancelledKeyException x) { @@ -658,8 +631,8 @@ private void gather(long now) { timeoutCount.sample(numTimeouts); if (log.isDebugEnabled()) { - log.debug().append(name).append(" gathered ").append(numInvokes).append(" for I/O and ") - .append(numTimeouts).append(" timeouts").endl(); + log.debug().append(name).append(" gathered ").append(numInvokes).append(" for I/O and ").append(numTimeouts) + .append(" timeouts").endl(); } } @@ -689,9 +662,9 @@ private boolean dispatch(Procedure.Nullary handoff) { } if (log.isDebugEnabled()) { log.debug().append(name).append(" dispatch ").append(state.job) - .append(", ops=").append(readyOps) - .append(", dq=").append(dispatchQueue.size()) - .endl(); + .append(", ops=").append(readyOps) + .append(", dq=").append(dispatchQueue.size()) + .endl(); } assert readyChannel == null || readyOps != 0; } @@ -722,8 +695,8 @@ private boolean dispatch(Procedure.Nullary handoff) { } } } catch (Throwable x) { - log.fatal(x).append(": unhandled Throwable in dispatch on job [").append(state.job) - .append("]: ").append(x.getMessage()).endl(); + log.fatal(x).append(": unhandled Throwable in dispatch on job [").append(state.job).append("]: ") + .append(x.getMessage()).endl(); throw new RuntimeException(x); } @@ -750,14 +723,13 @@ private void maybeWakeSelector() { } /** - * Wait for jobs to become ready, then invoke() them all. This method will form the core of the - * main loop of a scheduler-driven application. The method first waits until: + * Wait for jobs to become ready, then invoke() them all. This method will form the core of the main loop of a + * scheduler-driven application. The method first waits until: * - * -- the given timeout expires, -- the earliest job-specific timeout expires, or -- one or more - * jobs becomes ready + * -- the given timeout expires, -- the earliest job-specific timeout expires, or -- one or more jobs becomes ready * - * Note that this method is not synchronized. The application must ensure that it is never - * called concurrently by more than one thread. + * Note that this method is not synchronized. The application must ensure that it is never called concurrently by + * more than one thread. * * @return true, if some work was done. */ @@ -784,8 +756,7 @@ public boolean work(long timeout, Procedure.Nullary handoff) { // wait for something to happen select(timeout); - // apply changes while we were waiting, then gather up all of the jobs that can be - // dispatched + // apply changes while we were waiting, then gather up all of the jobs that can be dispatched synchronized (stateLock) { selectingTill = 0; update(); @@ -815,8 +786,7 @@ private boolean spinWork(long times, Procedure.Nullary handoff) { // spin for something to happen spinSelect(times); - // apply changes while we were waiting, then gather up all of the jobs that can be - // dispatched + // apply changes while we were waiting, then gather up all of the jobs that can be dispatched synchronized (stateLock) { selectingTill = 0; update(); @@ -839,9 +809,8 @@ public void close() { try { selector.close(); } catch (IOException x) { - log.warn(x).append(name) - .append(" Scheduler.close: ignoring exception from selector.close(): ") - .append(x.getMessage()).endl(); + log.warn(x).append(name).append(" Scheduler.close: ignoring exception from selector.close(): ") + .append(x.getMessage()).endl(); } } @@ -860,8 +829,8 @@ private void clear() { for (Job j : allJobs) { cancelJob(j); } - log.info().append(name).append(" Scheduler.clear: starting with ").append(allJobs.size()) - .append(" jobs").endl(); + log.info().append(name).append(" Scheduler.clear: starting with ").append(allJobs.size()).append(" jobs") + .endl(); synchronized (stateLock) { update(); } @@ -883,8 +852,7 @@ private void clear() { break; } } catch (Exception x) { - log.warn().append(name).append(" Scheduler.clear: ignoring shutdown exception: ") - .append(x).endl(); + log.warn().append(name).append(" Scheduler.clear: ignoring shutdown exception: ").append(x).endl(); } } log.info().append(name).append(" Scheduler.clear: finished").endl(); @@ -906,8 +874,7 @@ private Set getAllJobs() { } for (SelectionKey key : junitGetAllKeys()) { Object attachment; - if (key != null && (attachment = key.attachment()) != null - && attachment instanceof JobState) { + if (key != null && (attachment = key.attachment()) != null && attachment instanceof JobState) { JobState state = (JobState) attachment; if (state.job != null) { result.add(state.job); @@ -992,8 +959,7 @@ public Map junitGetChannelsAndJobs() { Map result = new HashMap(); for (SelectionKey key : junitGetAllKeys()) { Object attachment; - if (key != null && (attachment = key.attachment()) != null - && attachment instanceof JobState) { + if (key != null && (attachment = key.attachment()) != null && attachment instanceof JobState) { JobState state = (JobState) attachment; if (state.job != null) { result.put(key.channel(), ((JobState) attachment).job); diff --git a/IO/src/main/java/io/deephaven/io/streams/BufferedChannelReader.java b/IO/src/main/java/io/deephaven/io/streams/BufferedChannelReader.java index 6ef933a03f2..868d0077fd2 100644 --- a/IO/src/main/java/io/deephaven/io/streams/BufferedChannelReader.java +++ b/IO/src/main/java/io/deephaven/io/streams/BufferedChannelReader.java @@ -21,9 +21,8 @@ public class BufferedChannelReader { private int limit; /** - * Guarantees that each buffer from readNext() will have remaining() >= blockSize [that is, - * until it starts winding down on the end of the file] When it needs to refresh, it will read - * <= readSize from the channel + * Guarantees that each buffer from readNext() will have remaining() >= blockSize [that is, until it starts winding + * down on the end of the file] When it needs to refresh, it will read <= readSize from the channel */ public BufferedChannelReader(final String filename, int blockSize, int readSize) { Require.leq(blockSize, "blockSize", readSize, "readSize"); @@ -34,8 +33,7 @@ public BufferedChannelReader(final String filename, int blockSize, int readSize) public ByteBuffer readNext() throws IOException { if (bb == null) { - bb = ByteBuffer.allocate(readSize); // faster to use heap BB since we are going to be - // parsing out of it + bb = ByteBuffer.allocate(readSize); // faster to use heap BB since we are going to be parsing out of it bb.flip(); channel = new FileInputStream(filename).getChannel(); // Channels.newChannel(WFileUtil.openPossiblyCompressedFile(path)); } diff --git a/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java b/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java index 89ccfe49cf6..67169001902 100644 --- a/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java +++ b/IO/src/main/java/io/deephaven/io/streams/ByteBufferInputStream.java @@ -16,11 +16,11 @@ import org.jetbrains.annotations.NotNull; /** - * This is an InputStream implementation which reads from a java.nio.ByteBuffer. If a read operation - * crosses the end of the buffer, the BufferUnderflowException is converted to an EOFException. + * This is an InputStream implementation which reads from a java.nio.ByteBuffer. If a read operation crosses the end of + * the buffer, the BufferUnderflowException is converted to an EOFException. * - * The stream contains no state other than that in in the buffer itself, so the buffer can be - * exchanged at will with the setBuffer() method. + * The stream contains no state other than that in in the buffer itself, so the buffer can be exchanged at will with the + * setBuffer() method. */ public class ByteBufferInputStream extends java.io.InputStream implements DataInput { @@ -30,9 +30,9 @@ public class ByteBufferInputStream extends java.io.InputStream implements DataIn private char[] utfChars; /** - * The DataOutput interface always writes bytes in big-endian order, while ByteBuffer allows the - * order to be big- or little-endian. Set this flag true to assume that the buffer is - * bid-endian, or false to check the buffer's order at each write. + * The DataOutput interface always writes bytes in big-endian order, while ByteBuffer allows the order to be big- or + * little-endian. Set this flag true to assume that the buffer is bid-endian, or false to check the buffer's order + * at each write. */ // protected static final boolean ASSUME_BIG_ENDIAN = true; @@ -331,8 +331,7 @@ public String readUTF() throws IOException { final int b2 = buf.get(); final int b3 = buf.get(); if ((b2 & 0xc0) != 0x80 || (b3 & 0xc0) != 0x80) { - throw new UTFDataFormatException( - "malformed second byte " + b2 + " or third byte " + b3); + throw new UTFDataFormatException("malformed second byte " + b2 + " or third byte " + b3); } chars[length++] = (char) (((b1 & 0x0F) << 12) | ((b2 & 0x3F) << 6) | (b3 & 0x3F)); total -= 3; @@ -345,7 +344,7 @@ public String readUTF() throws IOException { } public String readUTF(@NotNull final CharSequenceAdapterBuilder output, - @NotNull final StringCache cache) throws IOException { + @NotNull final StringCache cache) throws IOException { readUTF(output); return cache.getCachedString(output); } @@ -371,8 +370,7 @@ public void readUTF(@NotNull final CharSequenceAdapterBuilder output) throws IOE final int b2 = buf.get(); final int b3 = buf.get(); if ((b2 & 0xc0) != 0x80 || (b3 & 0xc0) != 0x80) { - throw new UTFDataFormatException( - "malformed second byte " + b2 + " or third byte " + b3); + throw new UTFDataFormatException("malformed second byte " + b2 + " or third byte " + b3); } output.append((char) (((b1 & 0x0F) << 12) | ((b2 & 0x3F) << 6) | (b3 & 0x3F))); total -= 3; diff --git a/IO/src/main/java/io/deephaven/io/streams/ByteBufferOutputStream.java b/IO/src/main/java/io/deephaven/io/streams/ByteBufferOutputStream.java index ee1bfe73390..e694a6a4ec6 100644 --- a/IO/src/main/java/io/deephaven/io/streams/ByteBufferOutputStream.java +++ b/IO/src/main/java/io/deephaven/io/streams/ByteBufferOutputStream.java @@ -20,13 +20,13 @@ // TODO: very expensive and extremely complex. /** - * This is an OutputStream implementation which places the output into a java.nio.ByteBuffer. The - * constructor accepts an initial buffer and an instance of ByteBufferSink. When an output operation - * would cause the buffer to overflow, it is handed to the sink's acceptBuffer() method, which is - * expected to dispose of the existing contents and return a buffer in which writing can continue. + * This is an OutputStream implementation which places the output into a java.nio.ByteBuffer. The constructor accepts an + * initial buffer and an instance of ByteBufferSink. When an output operation would cause the buffer to overflow, it is + * handed to the sink's acceptBuffer() method, which is expected to dispose of the existing contents and return a buffer + * in which writing can continue. * - * Note that the stream contains no state other than the buffer itself, so the buffer and/or the - * sink can be switched at any time by calling setBuffer() or setSink(). + * Note that the stream contains no state other than the buffer itself, so the buffer and/or the sink can be switched at + * any time by calling setBuffer() or setSink(). */ public class ByteBufferOutputStream extends java.io.OutputStream implements DataOutput { protected volatile ByteBuffer buf; @@ -339,8 +339,8 @@ public ByteBufferOutputStream appendChars(CharSequence s) throws IOException { } @SuppressWarnings("WeakerAccess") - public ByteBufferOutputStream appendChars(CharSequence s, - @SuppressWarnings("SameParameterValue") int position, int len) throws IOException { + public ByteBufferOutputStream appendChars(CharSequence s, @SuppressWarnings("SameParameterValue") int position, + int len) throws IOException { int remaining; while ((remaining = buf.remaining() / 2) < len) { for (int i = 0; i < remaining; ++i) { @@ -361,8 +361,7 @@ public ByteBufferOutputStream appendBytes(CharSequence s) throws IOException { return appendBytes(s, 0, s.length()); } - public ByteBufferOutputStream appendBytes(CharSequence s, int position, int len) - throws IOException { + public ByteBufferOutputStream appendBytes(CharSequence s, int position, int len) throws IOException { int remaining; while ((remaining = buf.remaining()) < len) { for (int i = 0; i < remaining; ++i) { diff --git a/IO/src/main/java/io/deephaven/io/streams/ByteBufferSink.java b/IO/src/main/java/io/deephaven/io/streams/ByteBufferSink.java index bae8bc4be9f..cced4e339c0 100644 --- a/IO/src/main/java/io/deephaven/io/streams/ByteBufferSink.java +++ b/IO/src/main/java/io/deephaven/io/streams/ByteBufferSink.java @@ -9,10 +9,9 @@ public interface ByteBufferSink { /** - * Dispose of the contents of the buffer b, probably by writing them to a channel, and return a - * new buffer in which writing can continue. The returned buffer must have at least need bytes - * of space remaining. The return value may be the same buffer, as long as it's remaining() - * value has been increased to be >= need. + * Dispose of the contents of the buffer b, probably by writing them to a channel, and return a new buffer in which + * writing can continue. The returned buffer must have at least need bytes of space remaining. The return value may + * be the same buffer, as long as it's remaining() value has been increased to be >= need. * * @param b the buffer whose contents need to be disposed of. * @return the buffer in which further output should be written. @@ -20,9 +19,9 @@ public interface ByteBufferSink { ByteBuffer acceptBuffer(ByteBuffer b, int need) throws IOException; /** - * Dispose of the contents of the final buffer in an output sequence, probably by writing them - * to a channel. Note that the argument buffer may be empty. Then do whatever it takes to - * release the resources of the sink, probably by closing a channel. + * Dispose of the contents of the final buffer in an output sequence, probably by writing them to a channel. Note + * that the argument buffer may be empty. Then do whatever it takes to release the resources of the sink, probably + * by closing a channel. */ void close(ByteBuffer b) throws IOException; } diff --git a/IO/src/main/java/io/deephaven/io/streams/ByteBufferStreams.java b/IO/src/main/java/io/deephaven/io/streams/ByteBufferStreams.java index 9ca90f600b5..10346ae941c 100644 --- a/IO/src/main/java/io/deephaven/io/streams/ByteBufferStreams.java +++ b/IO/src/main/java/io/deephaven/io/streams/ByteBufferStreams.java @@ -11,10 +11,9 @@ import java.nio.ByteBuffer; /** - * A pair of stream implementations which write to and read from sequences of byte buffers. They are - * guaranteed to be "compatible", that is, a stream of buffers written by the Output stream is - * guaranteed to be readable by the Input stream - given that the client correctly implements the - * source and sink interfaces. + * A pair of stream implementations which write to and read from sequences of byte buffers. They are guaranteed to be + * "compatible", that is, a stream of buffers written by the Output stream is guaranteed to be readable by the Input + * stream - given that the client correctly implements the source and sink interfaces. */ public class ByteBufferStreams { @@ -24,10 +23,9 @@ public class ByteBufferStreams { public interface Sink { /** - * Dispose of the contents of the buffer b, probably by writing them to a channel, and - * return a new buffer in which writing can continue. The returned buffer must have at least - * need bytes of space remaining. The return value may be the same buffer, as long as it's - * remaining() value has been increased to be >= need. + * Dispose of the contents of the buffer b, probably by writing them to a channel, and return a new buffer in + * which writing can continue. The returned buffer must have at least need bytes of space remaining. The return + * value may be the same buffer, as long as it's remaining() value has been increased to be >= need. * * @param b the buffer whose contents need to be disposed of. * @return the buffer in which further output should be written. @@ -35,9 +33,9 @@ public interface Sink { ByteBuffer acceptBuffer(ByteBuffer b, int need) throws IOException; /** - * Dispose of the contents of the final buffer in an output sequence, probably by writing - * them to a channel. Note that the argument buffer may be empty. Then do whatever it takes - * to release the resources of the sink, probably by closing a channel. + * Dispose of the contents of the final buffer in an output sequence, probably by writing them to a channel. + * Note that the argument buffer may be empty. Then do whatever it takes to release the resources of the sink, + * probably by closing a channel. */ void close(ByteBuffer b) throws IOException; } @@ -46,10 +44,7 @@ public interface Sink { // Output stream - writes to a sequence of byye buffers // ------------------------------------------------------------------------------------------------------- - public static class Output extends java.io.OutputStream implements DataOutput/* - * , - * WritableByteChannel - */ { + public static class Output extends java.io.OutputStream implements DataOutput/* , WritableByteChannel */ { protected volatile ByteBuffer buf; protected Sink sink; @@ -375,7 +370,7 @@ private void need(int n) throws BufferUnderflowException, EOFException { if (buf.remaining() < n) { if (buf.remaining() != 0) { throw new IllegalStateException( - "Partial primitive, input was not written by ByteBufferOutputStream?"); + "Partial primitive, input was not written by ByteBufferOutputStream?"); } if (next() == null) { throw new BufferUnderflowException(); @@ -384,9 +379,8 @@ private void need(int n) throws BufferUnderflowException, EOFException { } /** - * Get the next buffer from the source, if we have one. Skip empty buffers. The return value - * is either null (signaling the end of the buffer sequence) or a buffer with remaining() > - * 0. + * Get the next buffer from the source, if we have one. Skip empty buffers. The return value is either null + * (signaling the end of the buffer sequence) or a buffer with remaining() > 0. */ private ByteBuffer next() { if (source == null) { @@ -399,8 +393,8 @@ private ByteBuffer next() { } /** - * Return true if we are at EOF. If the return value is true, then there is at least one - * byte immediately available in buf. + * Return true if we are at EOF. If the return value is true, then there is at least one byte immediately + * available in buf. */ private boolean eof() { return buf == null || (buf.remaining() == 0 && next() == null); @@ -640,8 +634,7 @@ public String readUTF() throws IOException { if (b2 == -1 || b3 == -1 || (b2 & 0xc0) != 0x80 || (b3 & 0xc0) != 0x80) { throw new UTFDataFormatException(); } - stringBuffer[length++] = - (char) (((b1 & 0x0F) << 12) | ((b2 & 0x3F) << 6) | (b3 & 0x3F)); + stringBuffer[length++] = (char) (((b1 & 0x0F) << 12) | ((b2 & 0x3F) << 6) | (b3 & 0x3F)); total -= 3; } } diff --git a/IO/src/main/java/io/deephaven/io/streams/CurrentByteBufferSink.java b/IO/src/main/java/io/deephaven/io/streams/CurrentByteBufferSink.java index 7a2fec869a6..ac96194667a 100644 --- a/IO/src/main/java/io/deephaven/io/streams/CurrentByteBufferSink.java +++ b/IO/src/main/java/io/deephaven/io/streams/CurrentByteBufferSink.java @@ -7,24 +7,23 @@ import java.nio.ByteBuffer; /** - * A ByteBufferSink that knows and provides a getter for the last buffer it gave out, to avoid - * unnecessary state in code that uses the buffer. + * A ByteBufferSink that knows and provides a getter for the last buffer it gave out, to avoid unnecessary state in code + * that uses the buffer. */ public interface CurrentByteBufferSink extends ByteBufferSink { /** - * Access the current buffer for this sink. This is either the initial buffer, or the last one - * provided by {@link ByteBufferSink#acceptBuffer(ByteBuffer, int)}) or - * {@link CurrentByteBufferSink#ensureSpace(int)}. + * Access the current buffer for this sink. This is either the initial buffer, or the last one provided by + * {@link ByteBufferSink#acceptBuffer(ByteBuffer, int)}) or {@link CurrentByteBufferSink#ensureSpace(int)}. * * @return The current buffer for this sink */ ByteBuffer getBuffer(); /** - * Return the current buffer, guaranteed to have sufficient space remaining to append the - * requested number of bytes. The existing current buffer may be accepted (see - * {@link ByteBufferSink#acceptBuffer(ByteBuffer, int)}) as a side effect of this operation. + * Return the current buffer, guaranteed to have sufficient space remaining to append the requested number of bytes. + * The existing current buffer may be accepted (see {@link ByteBufferSink#acceptBuffer(ByteBuffer, int)}) as a side + * effect of this operation. * * @param need The number of bytes required to proceed * @return The current buffer for further output @@ -39,8 +38,7 @@ default ByteBuffer ensureSpace(final int need) throws IOException { } /** - * Cause the current buffer to be accepted if it has any contents that aren't yet accepted into - * the sink. + * Cause the current buffer to be accepted if it has any contents that aren't yet accepted into the sink. */ default void flush() throws IOException { final ByteBuffer current = getBuffer(); @@ -50,9 +48,8 @@ default void flush() throws IOException { } /** - * Convenience close method. Effectively the same as invoking - * {@link ByteBufferSink#close(ByteBuffer)} with the result of - * {@link CurrentByteBufferSink#getBuffer()}. + * Convenience close method. Effectively the same as invoking {@link ByteBufferSink#close(ByteBuffer)} with the + * result of {@link CurrentByteBufferSink#getBuffer()}. */ default void close() throws IOException { close(getBuffer()); @@ -64,8 +61,7 @@ class Adapter implements CurrentByteBufferSink { private ByteBuffer current; - public Adapter(@NotNull final ByteBufferSink innerSink, - @NotNull final ByteBuffer initialBuffer) { + public Adapter(@NotNull final ByteBufferSink innerSink, @NotNull final ByteBuffer initialBuffer) { this.innerSink = Require.neqNull(innerSink, "innerSink"); this.current = Require.neqNull(initialBuffer, "initialBuffer"); } @@ -76,11 +72,10 @@ public ByteBuffer getBuffer() { } @Override - public ByteBuffer acceptBuffer(@NotNull final ByteBuffer buffer, final int need) - throws IOException { + public ByteBuffer acceptBuffer(@NotNull final ByteBuffer buffer, final int need) throws IOException { if (buffer != current) { throw new UnsupportedOperationException( - "Expected current buffer " + current + ", instead tried to accept " + buffer); + "Expected current buffer " + current + ", instead tried to accept " + buffer); } return current = innerSink.acceptBuffer(current, need); } @@ -88,8 +83,8 @@ public ByteBuffer acceptBuffer(@NotNull final ByteBuffer buffer, final int need) @Override public void close(@NotNull final ByteBuffer buffer) throws IOException { if (buffer != current) { - throw new UnsupportedOperationException("Expected current buffer " + current - + ", instead tried to close with " + buffer); + throw new UnsupportedOperationException( + "Expected current buffer " + current + ", instead tried to close with " + buffer); } innerSink.close(current); current = null; diff --git a/IO/src/main/java/io/deephaven/io/streams/MultiFileInputStream.java b/IO/src/main/java/io/deephaven/io/streams/MultiFileInputStream.java index 56bddd55bd2..6631a16d2b0 100644 --- a/IO/src/main/java/io/deephaven/io/streams/MultiFileInputStream.java +++ b/IO/src/main/java/io/deephaven/io/streams/MultiFileInputStream.java @@ -28,8 +28,7 @@ public DecoratedInputStream(String filename, InputStream inputStream) { private int currentStream = -1; /* - * Note that MultiFileInputStream assumes ownership for all streams and will close them when it - * is closed. + * Note that MultiFileInputStream assumes ownership for all streams and will close them when it is closed. */ public MultiFileInputStream(DecoratedInputStream inputStreams[]) { this.inputStreams = inputStreams; diff --git a/IO/src/main/java/io/deephaven/io/streams/SevenZipInputStream.java b/IO/src/main/java/io/deephaven/io/streams/SevenZipInputStream.java index f1966ff1d76..30319834cd7 100644 --- a/IO/src/main/java/io/deephaven/io/streams/SevenZipInputStream.java +++ b/IO/src/main/java/io/deephaven/io/streams/SevenZipInputStream.java @@ -40,12 +40,11 @@ public class SevenZipInputStream extends InputStream { public static final long UINT_TO_LONG = 0xFFFFFFFFL; // 7zHeader.cpp - private static final byte[] SIGNATURE = - {0x37, 0x7A, (byte) 0xBC, (byte) 0xAF, (byte) 0x27, (byte) 0x1C}; + private static final byte[] SIGNATURE = {0x37, 0x7A, (byte) 0xBC, (byte) 0xAF, (byte) 0x27, (byte) 0x1C}; public static final long SIGNATURE_AS_LONG = - (SIGNATURE[0] & UBYTE_TO_LONG) | ((SIGNATURE[1] & UBYTE_TO_LONG) << 8) - | ((SIGNATURE[2] & UBYTE_TO_LONG) << 16) | ((SIGNATURE[3] & UBYTE_TO_LONG) << 24) - | ((SIGNATURE[4] & UBYTE_TO_LONG) << 32) | ((SIGNATURE[5] & UBYTE_TO_LONG) << 40); + (SIGNATURE[0] & UBYTE_TO_LONG) | ((SIGNATURE[1] & UBYTE_TO_LONG) << 8) + | ((SIGNATURE[2] & UBYTE_TO_LONG) << 16) | ((SIGNATURE[3] & UBYTE_TO_LONG) << 24) + | ((SIGNATURE[4] & UBYTE_TO_LONG) << 32) | ((SIGNATURE[5] & UBYTE_TO_LONG) << 40); private static final int SIGNATURE_LENGTH = SIGNATURE.length; // 7zHeader.h private static final byte ARCHIVE_VER_MAJOR = 0; // 7zHeader.h @@ -93,13 +92,11 @@ private interface BlockType { * An archive consists of: *

      *
    1. A list of packed streams. - *
    2. A list of folders, each of which consumes one or more of the packed streams (in order) - * and produces one output unpacked stream. - *
    3. A list of substream counts and lengths. Each unpacked stream (in order) is split into one - * or more substreams. - *
    4. A list of files. Each file (in order) may or may not consume one substream. (Directories - * and anit-files do not consume a stream.) Files have things like names, timestamps, - * attributes, etc. + *
    5. A list of folders, each of which consumes one or more of the packed streams (in order) and produces one + * output unpacked stream. + *
    6. A list of substream counts and lengths. Each unpacked stream (in order) is split into one or more substreams. + *
    7. A list of files. Each file (in order) may or may not consume one substream. (Directories and anit-files do + * not consume a stream.) Files have things like names, timestamps, attributes, etc. *
    */ // 7zItem.h @@ -210,8 +207,8 @@ private void fillFolderStartFileIndex() throws ZipException { } public long getFolderStreamPos(int folderIndex, int indexInFolder) { - return ArchiveInfo.DataStartPosition + PackStreamStartPositions - .get(FolderStartPackStreamIndex.get(folderIndex) + indexInFolder); + return ArchiveInfo.DataStartPosition + + PackStreamStartPositions.get(FolderStartPackStreamIndex.get(folderIndex) + indexInFolder); } public long getFolderFullPackSize(int folderIndex) { @@ -260,40 +257,36 @@ public void clear() { } /** - * A Folder is one compressed chunk of data. A folder has one codec and the cyphertext is a - * small number of packed streams (usually one). Since the plaintext is one stream, a folder has - * one CRC. The folder's plaintext stream will often be the concatenation of a bunch of files, - * but the Folder knows nothing of this. + * A Folder is one compressed chunk of data. A folder has one codec and the cyphertext is a small number of packed + * streams (usually one). Since the plaintext is one stream, a folder has one CRC. The folder's plaintext stream + * will often be the concatenation of a bunch of files, but the Folder knows nothing of this. *

    - * A codec (my term) is a small graph of coders. A coder does a transform from n - * input streams to m output streams. Bind pairs attach the output stream of one coder to - * the input stream of another coder. A codec always has one (unbound) output stream, but can - * have many (unboud) input streams. The most common codec consists of one coder with one input - * stream, one output stream, and no bind pairs. + * A codec (my term) is a small graph of coders. A coder does a transform from n input streams to m + * output streams. Bind pairs attach the output stream of one coder to the input stream of another coder. A + * codec always has one (unbound) output stream, but can have many (unboud) input streams. The most common codec + * consists of one coder with one input stream, one output stream, and no bind pairs. * *

    - * Input streams and output streams are numbered in the order the coders are listed. PackStreams - * is used to map from the (implied) list of packed (input) streams in the archive to the input - * streams of the coders. + * Input streams and output streams are numbered in the order the coders are listed. PackStreams is used to map from + * the (implied) list of packed (input) streams in the archive to the input streams of the coders. */ // 7zItem.h private static class Folder { /** - * List of coders. Input and output stream indices (as referenced by the BindPairs) are - * defined by the order of this list. + * List of coders. Input and output stream indices (as referenced by the BindPairs) are defined by the order of + * this list. */ List Coders = new LinkedList(); /** - * List of BindPairs. Bind pairs attach the output stream of one coder to the input stream - * of another coder. Stream indices are defined by the coders list. + * List of BindPairs. Bind pairs attach the output stream of one coder to the input stream of another coder. + * Stream indices are defined by the coders list. */ List BindPairs = new LinkedList(); /** - * Map [ packed stream index (in this folder) -> input stream index (in the list of - * coders) ] + * Map [ packed stream index (in this folder) -> input stream index (in the list of coders) ] */ List PackStreams = new LinkedList(); @@ -458,7 +451,7 @@ private static class InArchive { // 7zIn.cpp public void readDatabase(InputStream inputStream, ArchiveDatabaseEx database, - InputStreamFactory inputStreamFactory, int nBeginStreamPosition) throws IOException { + InputStreamFactory inputStreamFactory, int nBeginStreamPosition) throws IOException { database.clear(); database.ArchiveInfo.StartPosition = nBeginStreamPosition; @@ -479,8 +472,8 @@ public void readDatabase(InputStream inputStream, ArchiveDatabaseEx database, long nNextHeaderSize = byteBuffer.getLong(); int nNextHeaderCrc = byteBuffer.getInt(); - database.ArchiveInfo.StartPositionAfterHeader = database.ArchiveInfo.StartPosition - + VERSION_INFO_LENGTH + START_HEADER_CRC_LENGTH + START_HEADER_LENGTH; + database.ArchiveInfo.StartPositionAfterHeader = database.ArchiveInfo.StartPosition + VERSION_INFO_LENGTH + + START_HEADER_CRC_LENGTH + START_HEADER_LENGTH; if (nStartHeaderCrc != crc.GetDigest()) { throw new ZipException("Header CRC mismatch."); @@ -516,13 +509,11 @@ public void readDatabase(InputStream inputStream, ArchiveDatabaseEx database, throw new ZipException("Bad block type in header."); } Reference startPositionAfterHeaderRef = - new Reference(database.ArchiveInfo.StartPositionAfterHeader); - Reference dataStartPosition2Ref = - new Reference(database.ArchiveInfo.DataStartPosition2); - readAndDecodePackedStreams(byteBuffer, startPositionAfterHeaderRef, - dataStartPosition2Ref, dataVector, inputStreamFactory); - database.ArchiveInfo.StartPositionAfterHeader = - startPositionAfterHeaderRef.getValue(); + new Reference(database.ArchiveInfo.StartPositionAfterHeader); + Reference dataStartPosition2Ref = new Reference(database.ArchiveInfo.DataStartPosition2); + readAndDecodePackedStreams(byteBuffer, startPositionAfterHeaderRef, dataStartPosition2Ref, dataVector, + inputStreamFactory); + database.ArchiveInfo.StartPositionAfterHeader = startPositionAfterHeaderRef.getValue(); database.ArchiveInfo.DataStartPosition2 = dataStartPosition2Ref.getValue(); if (dataVector.isEmpty()) { return; @@ -538,7 +529,7 @@ public void readDatabase(InputStream inputStream, ArchiveDatabaseEx database, // 7zIn.cpp private void readHeader(ByteBuffer byteBuffer, ArchiveDatabaseEx database, - InputStreamFactory inputStreamFactory) throws IOException { + InputStreamFactory inputStreamFactory) throws IOException { long nBlockType = readId(byteBuffer); if (BlockType.ARCHIVE_PROPERTIES == nBlockType) { @@ -550,16 +541,13 @@ private void readHeader(ByteBuffer byteBuffer, ArchiveDatabaseEx database, if (BlockType.ADDITIONAL_STREAMS_INFO == nBlockType) { Reference startPositionAfterHeaderRef = - new Reference(database.ArchiveInfo.StartPositionAfterHeader); - Reference dataStartPosition2Ref = - new Reference(database.ArchiveInfo.DataStartPosition2); - readAndDecodePackedStreams(byteBuffer, startPositionAfterHeaderRef, - dataStartPosition2Ref, dataVector, inputStreamFactory); - database.ArchiveInfo.StartPositionAfterHeader = - startPositionAfterHeaderRef.getValue(); + new Reference(database.ArchiveInfo.StartPositionAfterHeader); + Reference dataStartPosition2Ref = new Reference(database.ArchiveInfo.DataStartPosition2); + readAndDecodePackedStreams(byteBuffer, startPositionAfterHeaderRef, dataStartPosition2Ref, dataVector, + inputStreamFactory); + database.ArchiveInfo.StartPositionAfterHeader = startPositionAfterHeaderRef.getValue(); database.ArchiveInfo.DataStartPosition2 = dataStartPosition2Ref.getValue(); - database.ArchiveInfo.DataStartPosition2 += - database.ArchiveInfo.StartPositionAfterHeader; + database.ArchiveInfo.DataStartPosition2 += database.ArchiveInfo.StartPositionAfterHeader; nBlockType = readId(byteBuffer); } @@ -567,11 +555,9 @@ private void readHeader(ByteBuffer byteBuffer, ArchiveDatabaseEx database, List digests = new LinkedList(); if (BlockType.MAIN_STREAMS_INFO == nBlockType) { - Reference dataStartPositionRef = - new Reference(database.ArchiveInfo.DataStartPosition); - readStreamsInfo(byteBuffer, dataVector, dataStartPositionRef, database.PackSizes, - database.PackCRCs, database.Folders, database.NumUnpackStreamsVector, - unPackSizes, digests); + Reference dataStartPositionRef = new Reference(database.ArchiveInfo.DataStartPosition); + readStreamsInfo(byteBuffer, dataVector, dataStartPositionRef, database.PackSizes, database.PackCRCs, + database.Folders, database.NumUnpackStreamsVector, unPackSizes, digests); database.ArchiveInfo.DataStartPosition = dataStartPositionRef.getValue(); nBlockType = readId(byteBuffer); } else { @@ -681,8 +667,7 @@ private void readHeader(ByteBuffer byteBuffer, ArchiveDatabaseEx database, break; } default: { - database.ArchiveInfo.FileInfoPopIDs - .remove(database.ArchiveInfo.FileInfoPopIDs.size() - 1); + database.ArchiveInfo.FileInfoPopIDs.remove(database.ArchiveInfo.FileInfoPopIDs.size() - 1); skipData(byteBuffer, size); } } @@ -710,8 +695,8 @@ private void readHeader(ByteBuffer byteBuffer, ArchiveDatabaseEx database, } // 7zIn.cpp - private void readTime(ByteBuffer byteBuffer, List alternateByteBuffers, - List files, long type) throws ZipException { + private void readTime(ByteBuffer byteBuffer, List alternateByteBuffers, List files, + long type) throws ZipException { List boolVector = new ArrayList(files.size()); readBoolVector2(byteBuffer, files.size(), boolVector); @@ -764,8 +749,8 @@ private void readFileNames(ByteBuffer byteBuffer, List files) { // 7zIn.cpp private void readAndDecodePackedStreams(ByteBuffer byteBuffer, Reference baseOffset, - Reference dataOffset, List dataVector, - InputStreamFactory inputStreamFactory) throws IOException { + Reference dataOffset, List dataVector, InputStreamFactory inputStreamFactory) + throws IOException { List packSizes = new LinkedList(); List packCRCs = new LinkedList(); List folders = new LinkedList(); @@ -774,8 +759,8 @@ private void readAndDecodePackedStreams(ByteBuffer byteBuffer, Reference b List unPackSizes = new LinkedList(); List digests = new LinkedList(); - readStreamsInfo(byteBuffer, null, dataOffset, packSizes, packCRCs, folders, - numUnPackStreamsInFolders, unPackSizes, digests); + readStreamsInfo(byteBuffer, null, dataOffset, packSizes, packCRCs, folders, numUnPackStreamsInFolders, + unPackSizes, digests); int packIndex = 0; Decoder decoder = new Decoder(); @@ -793,8 +778,8 @@ private void readAndDecodePackedStreams(ByteBuffer byteBuffer, Reference b ByteArrayOutputStream outStream = new ByteArrayOutputStream((int) unPackSize); decoder.Decode(inputStreamFactory, dataStartPos, - packSizes.subList(packIndex, packSizes.size()), folder, - new SequentialOutStreamWrapper(outStream), null); + packSizes.subList(packIndex, packSizes.size()), folder, + new SequentialOutStreamWrapper(outStream), null); byte[] bytes = outStream.toByteArray(); if (null != folder.UnpackCRC) { @@ -825,10 +810,10 @@ private void readArchiveProperties(ByteBuffer byteBuffer, InArchiveInfo archiveI // 7zIn.cpp private void readStreamsInfo(ByteBuffer byteBuffer, List alternateByteBuffers, - Reference dataOffsetRef, - List packedStreamSizes, List packedStreamDigests, List folders, - List numUnpackStreamsInFolders, List unpackedStreamSizes, - List digests) throws ZipException { + Reference dataOffsetRef, + List packedStreamSizes, List packedStreamDigests, List folders, + List numUnpackStreamsInFolders, List unpackedStreamSizes, List digests) + throws ZipException { while (true) { long nBlockType = readId(byteBuffer); if (BlockType.END == nBlockType) { @@ -838,8 +823,7 @@ private void readStreamsInfo(ByteBuffer byteBuffer, List alternateBy } else if (BlockType.UNPACK_INFO == nBlockType) { readUnpackInfo(byteBuffer, alternateByteBuffers, folders); } else if (BlockType.SUBSTREAMS_INFO == nBlockType) { - readSubstreamsInfo(byteBuffer, folders, numUnpackStreamsInFolders, - unpackedStreamSizes, digests); + readSubstreamsInfo(byteBuffer, folders, numUnpackStreamsInFolders, unpackedStreamSizes, digests); } else { throw new ZipException("Bad block type in header."); } @@ -848,8 +832,8 @@ private void readStreamsInfo(ByteBuffer byteBuffer, List alternateBy // 7zIn.cpp private void readSubstreamsInfo(ByteBuffer byteBuffer, List folders, - List numUnpackStreamsInFolders, List unpackedStreamSizes, - List digests) throws ZipException { + List numUnpackStreamsInFolders, List unpackedStreamSizes, List digests) + throws ZipException { numUnpackStreamsInFolders.clear(); digests.clear(); // (not in original code) long type; @@ -940,8 +924,8 @@ private void readSubstreamsInfo(ByteBuffer byteBuffer, List folders, } // 7zIn.cpp - private void readUnpackInfo(ByteBuffer byteBuffer, List alternateByteBuffers, - List folders) throws ZipException { + private void readUnpackInfo(ByteBuffer byteBuffer, List alternateByteBuffers, List folders) + throws ZipException { skipToBlockType(byteBuffer, BlockType.FOLDER); int numFolders = readNum(byteBuffer); @@ -1048,8 +1032,8 @@ private void readNextFolderItem(ByteBuffer byteBuffer, Folder folder) throws Zip } // 7zIn.cpp (CStreamSwitch::Set(CInArchive *, const CObjectVector *) - private ByteBuffer chooseStream(ByteBuffer byteBuffer, - List alternateByteBuffers) throws ZipException { + private ByteBuffer chooseStream(ByteBuffer byteBuffer, List alternateByteBuffers) + throws ZipException { if (0 == byteBuffer.get()) { return byteBuffer; } else { @@ -1058,8 +1042,8 @@ private ByteBuffer chooseStream(ByteBuffer byteBuffer, } // 7zIn.cpp - private void readPackInfo(ByteBuffer byteBuffer, Reference dataOffsetRef, - List packSizes, List packCRCs) throws ZipException { + private void readPackInfo(ByteBuffer byteBuffer, Reference dataOffsetRef, List packSizes, + List packCRCs) throws ZipException { dataOffsetRef.setValue(readNumber(byteBuffer)); int numPackStreams = readNum(byteBuffer); skipToBlockType(byteBuffer, BlockType.SIZE); @@ -1195,8 +1179,7 @@ private long readNumber(ByteBuffer byteBuffer) { private interface ISequentialOutStream { } - private static class SequentialOutStreamWrapper extends Reference - implements ISequentialOutStream { + private static class SequentialOutStreamWrapper extends Reference implements ISequentialOutStream { public SequentialOutStreamWrapper(OutputStream value) { super(value); } @@ -1205,8 +1188,7 @@ public SequentialOutStreamWrapper(OutputStream value) { private interface ISequentialInStream { } - private static class SequentialInStreamWrapper extends Reference - implements ISequentialInStream { + private static class SequentialInStreamWrapper extends Reference implements ISequentialInStream { public SequentialInStreamWrapper(InputStream value) { super(value); } @@ -1237,8 +1219,7 @@ public void AddCoder(ICompressCoder decoder) { m_compressCoders.add(decoder); } - public void SetCoderInfo(int nCoderIndex, List> packSizes, - List> unpackSizes) { + public void SetCoderInfo(int nCoderIndex, List> packSizes, List> unpackSizes) { Assert.eq(nCoderIndex, "nCoderIndex", m_packSizes.size(), "m_packSizes.size()"); Assert.eq(packSizes.size(), "packSizes.size()", 1); Assert.eq(unpackSizes.size(), "unpackSizes.size()", 1); @@ -1250,21 +1231,19 @@ public void SetCoderInfo(int nCoderIndex, List> packSizes, m_unpackSizes.add(unpackSizesInner.get(0)); } - public void Code(ISequentialInStream inStream, ISequentialOutStream outStream, long inSize, - long outSize, ICompressProgressInfo progress) { + public void Code(ISequentialInStream inStream, ISequentialOutStream outStream, long inSize, long outSize, + ICompressProgressInfo progress) { Assert.statementNeverExecuted(); } - public void Code(List inStreams, List> inSizes, - int nInStreams, List outStreams, List> outSizes, - int nOutStreams, ICompressProgressInfo progress) throws IOException { + public void Code(List inStreams, List> inSizes, int nInStreams, + List outStreams, List> outSizes, int nOutStreams, + ICompressProgressInfo progress) throws IOException { Assert.eq(inStreams.size(), "inStreams.size()", nInStreams, "nInStreams"); Assert.eq(outStreams.size(), "outStreams.size()", nOutStreams, "nOutStreams"); - Assert.eq(nOutStreams, "nOutStreams", m_compressCoders.size(), - "m_compressCoders.size()"); + Assert.eq(nOutStreams, "nOutStreams", m_compressCoders.size(), "m_compressCoders.size()"); Assert.eq(nOutStreams, "nOutStreams", nInStreams, "nInStreams"); - Assert.eq(m_compressCoders.size(), "m_compressCoders.size()", m_packSizes.size(), - "m_packSizes.size()"); + Assert.eq(m_compressCoders.size(), "m_compressCoders.size()", m_packSizes.size(), "m_packSizes.size()"); Iterator compressCodersItr = m_compressCoders.iterator(); Iterator inStreamsItr = inStreams.iterator(); @@ -1273,16 +1252,15 @@ public void Code(List inStreams, List> inSizes, Iterator unpackSizesItr = m_unpackSizes.iterator(); while (compressCodersItr.hasNext()) { - compressCodersItr.next().Code(inStreamsItr.next(), outStreamsItr.next(), - packSizesItr.next(), unpackSizesItr.next(), progress); + compressCodersItr.next().Code(inStreamsItr.next(), outStreamsItr.next(), packSizesItr.next(), + unpackSizesItr.next(), progress); } } } private static class LzmaWrapper implements ICompressCoder, ICompressSetDecoderProperties2 { - private final SevenZip.Compression.LZMA.Decoder decoder = - new SevenZip.Compression.LZMA.Decoder(); + private final SevenZip.Compression.LZMA.Decoder decoder = new SevenZip.Compression.LZMA.Decoder(); public void setDecoderProperties(byte[] properties) throws ZipException { if (false == decoder.SetDecoderProperties(properties)) { @@ -1290,10 +1268,10 @@ public void setDecoderProperties(byte[] properties) throws ZipException { } } - public void Code(ISequentialInStream inStream, ISequentialOutStream outStream, long inSize, - long outSize, ICompressProgressInfo progress) throws IOException { + public void Code(ISequentialInStream inStream, ISequentialOutStream outStream, long inSize, long outSize, + ICompressProgressInfo progress) throws IOException { if (false == decoder.Code(((Reference) inStream).getValue(), - ((Reference) outStream).getValue(), outSize)) { + ((Reference) outStream).getValue(), outSize)) { throw new ZipException("Bad compressed data."); } } @@ -1311,15 +1289,15 @@ private interface ICompressProgressInfo { // ICoder.h private interface ICompressCoder { - void Code(ISequentialInStream inStream, ISequentialOutStream outStream, long inSize, - long outSize, ICompressProgressInfo progress) throws IOException; + void Code(ISequentialInStream inStream, ISequentialOutStream outStream, long inSize, long outSize, + ICompressProgressInfo progress) throws IOException; } // ICoder.h private interface ICompressCoder2 extends ICompressCoder { void Code(List inStreams, List> inSizes, int nInStreams, - List outStreams, List> outSizes, int nOutStreams, - ICompressProgressInfo progress) throws IOException; + List outStreams, List> outSizes, int nOutStreams, + ICompressProgressInfo progress) throws IOException; } // 7zDecode.cpp @@ -1338,8 +1316,7 @@ private static class BindInfo { List InStreams = new LinkedList(); List OutStreams = new LinkedList(); - void getNumStreams(Reference numInStreamsRef, - Reference numOutStreamsRef) { + void getNumStreams(Reference numInStreamsRef, Reference numOutStreamsRef) { int numInStreams = 0; int numOutStreams = 0; for (CoderStreamsInfo coderStreamsInfo : Coders) { @@ -1384,8 +1361,8 @@ int getCoderOutStreamIndex(int coderIndex) { return streamIndex; } - void findInStream(int streamIndex, Reference coderIndexRef, - Reference coderStreamIndexRef) throws ZipException { + void findInStream(int streamIndex, Reference coderIndexRef, Reference coderStreamIndexRef) + throws ZipException { int coderIndex; int coderStreamIndex; for (coderIndex = 0; coderIndex < (int) Coders.size(); coderIndex++) { @@ -1401,8 +1378,8 @@ void findInStream(int streamIndex, Reference coderIndexRef, throw new ZipException(); } - void findOutStream(int streamIndex, Reference coderIndexRef, - Reference coderStreamIndexRef) throws ZipException { + void findOutStream(int streamIndex, Reference coderIndexRef, Reference coderStreamIndexRef) + throws ZipException { int coderIndex; int coderStreamIndex; for (coderIndex = 0; coderIndex < (int) Coders.size(); coderIndex++) { @@ -1436,9 +1413,9 @@ private static class Decoder { List _decoders = new LinkedList(); // 7zDecode.cpp - public void Decode(InputStreamFactory inputStreamFactory, long startPos, - List packSizes, Folder folderInfo, ISequentialOutStream outStream, - ICompressProgressInfo compressProgress) throws IOException { + public void Decode(InputStreamFactory inputStreamFactory, long startPos, List packSizes, + Folder folderInfo, ISequentialOutStream outStream, ICompressProgressInfo compressProgress) + throws IOException { List inStreams = new LinkedList(); try { @@ -1447,8 +1424,7 @@ public void Decode(InputStreamFactory inputStreamFactory, long startPos, skipFully(inputStream, startPos); startPos += packSizes.get(j); - LimitedInputStream streamSpec = - new LimitedInputStream(inputStream, packSizes.get(j)); + LimitedInputStream streamSpec = new LimitedInputStream(inputStream, packSizes.get(j)); inStreams.add(new SequentialInStreamWrapper(streamSpec)); } @@ -1514,8 +1490,7 @@ public void Decode(InputStreamFactory inputStreamFactory, long startPos, Object decoder = _decoders.get(coderIndex); if (decoder instanceof ICompressSetDecoderProperties2) { - ICompressSetDecoderProperties2 setDecoderProperties = - (ICompressSetDecoderProperties2) decoder; + ICompressSetDecoderProperties2 setDecoderProperties = (ICompressSetDecoderProperties2) decoder; byte[] properties = altCoderInfo.Properties; if (null != properties && properties.length > 0) { @@ -1531,16 +1506,15 @@ public void Decode(InputStreamFactory inputStreamFactory, long startPos, List> unPackSizesPointers = new ArrayList>(numOutStreams); int j; for (j = 0; j < numOutStreams; j++, unPackStreamIndex++) { - unPackSizesPointers.add(folderInfo.UnpackSizes.subList(unPackStreamIndex, - folderInfo.UnpackSizes.size())); + unPackSizesPointers + .add(folderInfo.UnpackSizes.subList(unPackStreamIndex, folderInfo.UnpackSizes.size())); } for (j = 0; j < numInStreams; j++, packStreamIndex++) { int bindPairIndex = folderInfo.findBindPairForInStream(packStreamIndex); if (bindPairIndex >= 0) { packSizesPointers.add(folderInfo.UnpackSizes.subList( - folderInfo.BindPairs.get(bindPairIndex).OutIndex, - folderInfo.UnpackSizes.size())); + folderInfo.BindPairs.get(bindPairIndex).OutIndex, folderInfo.UnpackSizes.size())); } else { int index = folderInfo.findPackStreamArrayIndex(packStreamIndex); if (index < 0) { @@ -1557,20 +1531,18 @@ public void Decode(InputStreamFactory inputStreamFactory, long startPos, // bindInfo.findOutStream(bindInfo.OutStreams.get(0), mainCoderRef, tempRef); // int mainCoder=mainCoderRef.getValue(); // int temp=tempRef.getValue(); - // _mixerCoderMTSpec.SetProgressCoderIndex(mainCoder); // set which coder in the - // graph best represents the actual progress of the entire + // _mixerCoderMTSpec.SetProgressCoderIndex(mainCoder); // set which coder in the graph best represents + // the actual progress of the entire if (numCoders == 0) { return; } - List inStreamPointers = - new ArrayList(inStreams.size()); + List inStreamPointers = new ArrayList(inStreams.size()); for (i = 0; i < inStreams.size(); i++) { inStreamPointers.add(inStreams.get(i)); } List outStreamPointer = Collections.singletonList(outStream); - _mixerCoder.Code(inStreamPointers, null, inStreams.size(), outStreamPointer, null, - 1, compressProgress); + _mixerCoder.Code(inStreamPointers, null, inStreams.size(), outStreamPointer, null, 1, compressProgress); } finally { for (ISequentialInStream sequentialInStream : inStreams) { @@ -1672,8 +1644,8 @@ private static class ExtractFolderInfo { // 7zFolderOutStream.h,.cpp private static class FolderOutStream implements ISequentialOutStream { - public void Init(ArchiveDatabaseEx database, int nUnderlyingId, int startIndex, - List extractStatuses, boolean testMode) {} + public void Init(ArchiveDatabaseEx database, int nUnderlyingId, int startIndex, List extractStatuses, + boolean testMode) {} } // 7zHandler.h (ported for educational purposes, not used) @@ -1703,8 +1675,7 @@ public void Extract(int[] indices, int numItems, boolean testMode) throws IOExce return; } - // build up the list of which folders and which streams (files) in those folders to - // extract + // build up the list of which folders and which streams (files) in those folders to extract List extractFolderInfoVector = new LinkedList(); for (int i = 0; i < numItems; i++) { int fileIndex = allFilesMode ? i : indices[i]; @@ -1713,21 +1684,18 @@ public void Extract(int[] indices, int numItems, boolean testMode) throws IOExce extractFolderInfoVector.add(new ExtractFolderInfo(fileIndex, NUM_NO_INDEX)); continue; } - if (extractFolderInfoVector.isEmpty() || folderIndex != extractFolderInfoVector - .get(extractFolderInfoVector.size() - 1).FolderIndex) { + if (extractFolderInfoVector.isEmpty() + || folderIndex != extractFolderInfoVector.get(extractFolderInfoVector.size() - 1).FolderIndex) { extractFolderInfoVector.add(new ExtractFolderInfo(NUM_NO_INDEX, folderIndex)); Folder folderInfo = m_database.Folders.get(folderIndex); long unPackSize = folderInfo.getUnpackSize(); - extractFolderInfoVector.get(extractFolderInfoVector.size() - 1).UnPackSize = - unPackSize; + extractFolderInfoVector.get(extractFolderInfoVector.size() - 1).UnPackSize = unPackSize; } - ExtractFolderInfo efi = - extractFolderInfoVector.get(extractFolderInfoVector.size() - 1); + ExtractFolderInfo efi = extractFolderInfoVector.get(extractFolderInfoVector.size() - 1); int startIndex = m_database.FolderStartFileIndex.get(folderIndex); - for (int index = efi.ExtractStatuses.size(); index <= fileIndex - - startIndex; index++) { + for (int index = efi.ExtractStatuses.size(); index <= fileIndex - startIndex; index++) { efi.ExtractStatuses.add(index == fileIndex - startIndex); } } @@ -1737,8 +1705,8 @@ public void Extract(int[] indices, int numItems, boolean testMode) throws IOExce long currentImportantTotalUnPacked = 0; long totalFolderUnPacked; - for (int i = 0; i < extractFolderInfoVector - .size(); i++, currentImportantTotalUnPacked += totalFolderUnPacked) { + for (int i = 0; i < extractFolderInfoVector.size(); i++, currentImportantTotalUnPacked += + totalFolderUnPacked) { ExtractFolderInfo efi = extractFolderInfoVector.get(i); totalFolderUnPacked = efi.UnPackSize; @@ -1766,12 +1734,12 @@ public void Extract(int[] indices, int numItems, boolean testMode) throws IOExce long folderStartPackPos = database.getFolderStreamPos(folderIndex, 0); decoder.Decode( - m_inputStreamFactory, - folderStartPackPos, - database.PackSizes.subList(packStreamIndex, database.PackSizes.size()), - folderInfo, - folderOutStream, - null); + m_inputStreamFactory, + folderStartPackPos, + database.PackSizes.subList(packStreamIndex, database.PackSizes.size()), + folderInfo, + folderOutStream, + null); } } } @@ -1780,8 +1748,7 @@ public void Extract(int[] indices, int numItems, boolean testMode) throws IOExce // input stream helper routines // ---------------------------------------------------------------- - private static ByteBuffer readToByteBuffer(InputStream inputStream, int nLength) - throws IOException { + private static ByteBuffer readToByteBuffer(InputStream inputStream, int nLength) throws IOException { byte[] bytes = new byte[nLength]; int nOffset = 0; while (nLength > 0) { @@ -1800,8 +1767,7 @@ private static ByteBuffer readToByteBuffer(InputStream inputStream, int nLength) private static void skipFully(InputStream inputStream, long nLength) throws IOException { while (nLength > 0) { long nSkipped = inputStream.skip(nLength); - // note: postcodition of skip is wider than that of read! 0 means eof or ...? Let's - // assume EOF or worse. + // note: postcodition of skip is wider than that of read! 0 means eof or ...? Let's assume EOF or worse. if (nSkipped < 1) { throw new EOFException(); } @@ -1852,8 +1818,7 @@ public void Init() { m_HighCoder.Init(); } - public int Decode(SevenZip.Compression.RangeCoder.Decoder rangeDecoder, int posState) - throws IOException { + public int Decode(SevenZip.Compression.RangeCoder.Decoder rangeDecoder, int posState) throws IOException { if (rangeDecoder.DecodeBit(m_Choice, 0) == 0) { return m_LowCoder[posState].Decode(rangeDecoder); } @@ -1877,8 +1842,7 @@ public void Init() { SevenZip.Compression.RangeCoder.Decoder.InitBitModels(m_Decoders); } - public byte DecodeNormal(SevenZip.Compression.RangeCoder.Decoder rangeDecoder) - throws IOException { + public byte DecodeNormal(SevenZip.Compression.RangeCoder.Decoder rangeDecoder) throws IOException { int symbol = 1; do { symbol = (symbol << 1) | rangeDecoder.DecodeBit(m_Decoders, symbol); @@ -1886,15 +1850,13 @@ public byte DecodeNormal(SevenZip.Compression.RangeCoder.Decoder rangeDecoder) return (byte) symbol; } - public byte DecodeWithMatchByte( - SevenZip.Compression.RangeCoder.Decoder rangeDecoder, byte matchByte) - throws IOException { + public byte DecodeWithMatchByte(SevenZip.Compression.RangeCoder.Decoder rangeDecoder, byte matchByte) + throws IOException { int symbol = 1; do { int matchBit = (matchByte >> 7) & 1; matchByte <<= 1; - int bit = - rangeDecoder.DecodeBit(m_Decoders, ((1 + matchBit) << 8) + symbol); + int bit = rangeDecoder.DecodeBit(m_Decoders, ((1 + matchBit) << 8) + symbol); symbol = (symbol << 1) | bit; if (matchBit != bit) { while (symbol < 0x100) { @@ -1913,8 +1875,7 @@ public byte DecodeWithMatchByte( private int m_PosMask; public void Create(int numPosBits, int numPrevBits) { - if (m_Coders != null && m_NumPrevBits == numPrevBits - && m_NumPosBits == numPosBits) { + if (m_Coders != null && m_NumPrevBits == numPrevBits && m_NumPosBits == numPosBits) { return; } m_NumPosBits = numPosBits; @@ -1935,43 +1896,37 @@ public void Init() { } LzmaIncrementalDecoder.LiteralDecoder.Decoder2 GetDecoder(int pos, byte prevByte) { - return m_Coders[((pos & m_PosMask) << m_NumPrevBits) - + ((prevByte & 0xFF) >>> (8 - m_NumPrevBits))]; + return m_Coders[((pos & m_PosMask) << m_NumPrevBits) + ((prevByte & 0xFF) >>> (8 - m_NumPrevBits))]; } } private OutWindow m_OutWindow = new OutWindow(); - private SevenZip.Compression.RangeCoder.Decoder m_RangeDecoder = - new SevenZip.Compression.RangeCoder.Decoder(); + private SevenZip.Compression.RangeCoder.Decoder m_RangeDecoder = new SevenZip.Compression.RangeCoder.Decoder(); private short[] m_IsMatchDecoders = new short[Base.kNumStates << Base.kNumPosStatesBitsMax]; private short[] m_IsRepDecoders = new short[Base.kNumStates]; private short[] m_IsRepG0Decoders = new short[Base.kNumStates]; private short[] m_IsRepG1Decoders = new short[Base.kNumStates]; private short[] m_IsRepG2Decoders = new short[Base.kNumStates]; - private short[] m_IsRep0LongDecoders = - new short[Base.kNumStates << Base.kNumPosStatesBitsMax]; + private short[] m_IsRep0LongDecoders = new short[Base.kNumStates << Base.kNumPosStatesBitsMax]; private BitTreeDecoder[] m_PosSlotDecoder = new BitTreeDecoder[Base.kNumLenToPosStates]; private short[] m_PosDecoders = new short[Base.kNumFullDistances - Base.kEndPosModelIndex]; private BitTreeDecoder m_PosAlignDecoder = new BitTreeDecoder(Base.kNumAlignBits); - private LzmaIncrementalDecoder.LenDecoder m_LenDecoder = - new LzmaIncrementalDecoder.LenDecoder(); - private LzmaIncrementalDecoder.LenDecoder m_RepLenDecoder = - new LzmaIncrementalDecoder.LenDecoder(); + private LzmaIncrementalDecoder.LenDecoder m_LenDecoder = new LzmaIncrementalDecoder.LenDecoder(); + private LzmaIncrementalDecoder.LenDecoder m_RepLenDecoder = new LzmaIncrementalDecoder.LenDecoder(); - private LzmaIncrementalDecoder.LiteralDecoder m_LiteralDecoder = - new LzmaIncrementalDecoder.LiteralDecoder(); + private LzmaIncrementalDecoder.LiteralDecoder m_LiteralDecoder = new LzmaIncrementalDecoder.LiteralDecoder(); private int m_DictionarySize = -1; private int m_DictionarySizeCheck = -1; private int m_PosStateMask; - public LzmaIncrementalDecoder(byte[] properties, InputStream inStream, - OutputStream outStream, long outSize) throws IOException { + public LzmaIncrementalDecoder(byte[] properties, InputStream inStream, OutputStream outStream, long outSize) + throws IOException { for (int i = 0; i < Base.kNumLenToPosStates; i++) { m_PosSlotDecoder[i] = new BitTreeDecoder(Base.kNumPosSlotBits); } @@ -2067,12 +2022,11 @@ public boolean Code() throws IOException { if (m_outSize < 0 || m_nowPos64 < m_outSize) { int posState = (int) m_nowPos64 & m_PosStateMask; if (m_RangeDecoder.DecodeBit(m_IsMatchDecoders, - (m_state << Base.kNumPosStatesBitsMax) + posState) == 0) { + (m_state << Base.kNumPosStatesBitsMax) + posState) == 0) { LzmaIncrementalDecoder.LiteralDecoder.Decoder2 decoder2 = - m_LiteralDecoder.GetDecoder((int) m_nowPos64, m_prevByte); + m_LiteralDecoder.GetDecoder((int) m_nowPos64, m_prevByte); if (!Base.StateIsCharState(m_state)) { - m_prevByte = decoder2.DecodeWithMatchByte(m_RangeDecoder, - m_OutWindow.GetByte(m_rep0)); + m_prevByte = decoder2.DecodeWithMatchByte(m_RangeDecoder, m_OutWindow.GetByte(m_rep0)); } else { m_prevByte = decoder2.DecodeNormal(m_RangeDecoder); } @@ -2085,7 +2039,7 @@ public boolean Code() throws IOException { len = 0; if (m_RangeDecoder.DecodeBit(m_IsRepG0Decoders, m_state) == 0) { if (m_RangeDecoder.DecodeBit(m_IsRep0LongDecoders, - (m_state << Base.kNumPosStatesBitsMax) + posState) == 0) { + (m_state << Base.kNumPosStatesBitsMax) + posState) == 0) { m_state = Base.StateUpdateShortRep(m_state); len = 1; } @@ -2106,8 +2060,7 @@ public boolean Code() throws IOException { m_rep0 = distance; } if (len == 0) { - len = m_RepLenDecoder.Decode(m_RangeDecoder, posState) - + Base.kMatchMinLen; + len = m_RepLenDecoder.Decode(m_RangeDecoder, posState) + Base.kMatchMinLen; m_state = Base.StateUpdateRep(m_state); } } else { @@ -2116,17 +2069,16 @@ public boolean Code() throws IOException { m_rep1 = m_rep0; len = Base.kMatchMinLen + m_LenDecoder.Decode(m_RangeDecoder, posState); m_state = Base.StateUpdateMatch(m_state); - int posSlot = - m_PosSlotDecoder[Base.GetLenToPosState(len)].Decode(m_RangeDecoder); + int posSlot = m_PosSlotDecoder[Base.GetLenToPosState(len)].Decode(m_RangeDecoder); if (posSlot >= Base.kStartPosModelIndex) { int numDirectBits = (posSlot >> 1) - 1; m_rep0 = ((2 | (posSlot & 1)) << numDirectBits); if (posSlot < Base.kEndPosModelIndex) { - m_rep0 += BitTreeDecoder.ReverseDecode(m_PosDecoders, - m_rep0 - posSlot - 1, m_RangeDecoder, numDirectBits); + m_rep0 += BitTreeDecoder.ReverseDecode(m_PosDecoders, m_rep0 - posSlot - 1, + m_RangeDecoder, numDirectBits); } else { - m_rep0 += (m_RangeDecoder.DecodeDirectBits( - numDirectBits - Base.kNumAlignBits) << Base.kNumAlignBits); + m_rep0 += (m_RangeDecoder + .DecodeDirectBits(numDirectBits - Base.kNumAlignBits) << Base.kNumAlignBits); m_rep0 += m_PosAlignDecoder.ReverseDecode(m_RangeDecoder); if (m_rep0 < 0) { if (m_rep0 == -1) { @@ -2174,15 +2126,13 @@ private static class LzmaDecompressingInputStream extends InputStream { private boolean m_bEof; private byte[] m_singleByteBuf = new byte[1]; - public LzmaDecompressingInputStream(InputStreamFactory inputStreamFactory, - long nOffsetIntoArchive, long nPackedSize, long nUnpackedSize, byte[] properties) - throws IOException { + public LzmaDecompressingInputStream(InputStreamFactory inputStreamFactory, long nOffsetIntoArchive, + long nPackedSize, long nUnpackedSize, byte[] properties) throws IOException { InputStream inputStream = inputStreamFactory.createInputStream(); skipFully(inputStream, nOffsetIntoArchive); m_inputStream = new LimitedInputStream(inputStream, nPackedSize); m_byteArrayOutputStream = new ByteArrayOutputStream(); - m_decoder = new LzmaIncrementalDecoder(properties, m_inputStream, - m_byteArrayOutputStream, nUnpackedSize); + m_decoder = new LzmaIncrementalDecoder(properties, m_inputStream, m_byteArrayOutputStream, nUnpackedSize); } // skip, available, markSupported, mark, reset - all stubbed out by base class @@ -2227,8 +2177,7 @@ public int read(byte[] bytes, int nOffset, int nLength) throws IOException { if (null != m_bytes) { // return what we've got nLength = Math.min(nLength, m_nBytesRemaining); - System.arraycopy(m_bytes, m_bytes.length - m_nBytesRemaining, bytes, nOffset, - nLength); + System.arraycopy(m_bytes, m_bytes.length - m_nBytesRemaining, bytes, nOffset, nLength); m_nBytesRemaining -= nLength; if (0 == m_nBytesRemaining) { m_bytes = null; @@ -2261,8 +2210,7 @@ public LimitedInputStream(InputStream inputStream, long nBytes) { this(inputStream, nBytes, CloseUnderlyingOnClose.YES); } - public LimitedInputStream(InputStream inputStream, long nBytes, - CloseUnderlyingOnClose closeUnderlyingOnClose) { + public LimitedInputStream(InputStream inputStream, long nBytes, CloseUnderlyingOnClose closeUnderlyingOnClose) { m_inputStream = inputStream; m_nBytesRemaining = nBytes; m_closeUnderlyingOnClose = closeUnderlyingOnClose; @@ -2357,7 +2305,7 @@ public ArchiveIterator(ArchiveDatabaseEx archiveDatabaseEx) { m_unpackStreamsForFolderItr = archiveDatabaseEx.NumUnpackStreamsVector.iterator(); m_packStreamSizesItr = archiveDatabaseEx.PackSizes.iterator(); m_nOffsetIntoArchive = archiveDatabaseEx.ArchiveInfo.DataStartPosition - + archiveDatabaseEx.ArchiveInfo.StartPositionAfterHeader; + + archiveDatabaseEx.ArchiveInfo.StartPositionAfterHeader; } } @@ -2438,19 +2386,18 @@ public Entry getNextEntry(Behavior behavior) throws IOException { } if (0 == m_archiveIterator.m_nStreamsRemainingInFolder) { - closeAllStreams(); // throw out whatever we've got because we will start with new - // streams + closeAllStreams(); // throw out whatever we've got because we will start with new streams // open next folder-stream Folder folder; do { if (false == m_archiveIterator.m_folderItr.hasNext() - || false == m_archiveIterator.m_unpackStreamsForFolderItr.hasNext()) { + || false == m_archiveIterator.m_unpackStreamsForFolderItr.hasNext()) { throw new ZipException("Bad header."); } folder = m_archiveIterator.m_folderItr.next(); m_archiveIterator.m_nStreamsRemainingInFolder = - m_archiveIterator.m_unpackStreamsForFolderItr.next(); + m_archiveIterator.m_unpackStreamsForFolderItr.next(); } while (0 == m_archiveIterator.m_nStreamsRemainingInFolder); if (folder.Coders.isEmpty()) { @@ -2476,8 +2423,8 @@ public Entry getNextEntry(Behavior behavior) throws IOException { long nOffsetIntoArchive = m_archiveIterator.m_nOffsetIntoArchive; m_archiveIterator.m_nOffsetIntoArchive += nPackedSize; - m_currentFolderStream = new LzmaDecompressingInputStream(m_inputStreamFactory, - nOffsetIntoArchive, nPackedSize, nUnpackedSize, properties); + m_currentFolderStream = new LzmaDecompressingInputStream(m_inputStreamFactory, nOffsetIntoArchive, + nPackedSize, nUnpackedSize, properties); } // if we are in the middle of a folder-stream, skip to the end @@ -2491,7 +2438,7 @@ public Entry getNextEntry(Behavior behavior) throws IOException { // get next file-stream from folder-stream m_archiveIterator.m_nStreamsRemainingInFolder--; m_currentFileStream = new LimitedInputStream(m_currentFolderStream, fileItem.UnPackSize, - LimitedInputStream.CloseUnderlyingOnClose.NO); + LimitedInputStream.CloseUnderlyingOnClose.NO); return new Entry(fileItem); } catch (IOException e) { @@ -2517,8 +2464,7 @@ private ArchiveDatabaseEx readArchiveDatabase() throws IOException { } // read the rest - new InArchive().readDatabase(inputStream, archiveDatabaseEx, m_inputStreamFactory, - SIGNATURE_LENGTH); + new InArchive().readDatabase(inputStream, archiveDatabaseEx, m_inputStreamFactory, SIGNATURE_LENGTH); } finally { inputStream.close(); } diff --git a/IO/src/main/java/io/deephaven/io/streams/SimpleByteBufferSink.java b/IO/src/main/java/io/deephaven/io/streams/SimpleByteBufferSink.java index 61b48684ccb..0617cafd8eb 100644 --- a/IO/src/main/java/io/deephaven/io/streams/SimpleByteBufferSink.java +++ b/IO/src/main/java/io/deephaven/io/streams/SimpleByteBufferSink.java @@ -30,8 +30,7 @@ public ByteBuffer getBuffer() { public ByteBuffer acceptBuffer(ByteBuffer b, int need) { if (b.remaining() < need) { b.flip(); - ByteBuffer b2 = - direct ? ByteBuffer.allocateDirect(Math.max(b.capacity() * 2, b.remaining() + need)) + ByteBuffer b2 = direct ? ByteBuffer.allocateDirect(Math.max(b.capacity() * 2, b.remaining() + need)) : ByteBuffer.allocate(Math.max(b.capacity() * 2, b.remaining() + need)); b2.put(b); currentBuffer = b = b2; diff --git a/IO/src/main/java/io/deephaven/io/streams/ZipInputStream.java b/IO/src/main/java/io/deephaven/io/streams/ZipInputStream.java index 4a7e595d18a..0387c66590a 100644 --- a/IO/src/main/java/io/deephaven/io/streams/ZipInputStream.java +++ b/IO/src/main/java/io/deephaven/io/streams/ZipInputStream.java @@ -29,8 +29,8 @@ private void fill() throws IOException { try { noBytes = dataIn.readInt(); if (noBytes < 0 || noBytes > BUFFER_LIMIT) - throw new ZipException("io.deephaven.io.streams.ZipInputStream was asked to read " - + noBytes + " which exceeded buffer limit. Is this a Zip file?"); + throw new ZipException("io.deephaven.io.streams.ZipInputStream was asked to read " + noBytes + + " which exceeded buffer limit. Is this a Zip file?"); buf = new byte[noBytes]; } catch (EOFException ee) { return; @@ -56,12 +56,12 @@ public synchronized int read() throws IOException { } public synchronized int read(byte b[]) - throws IOException { + throws IOException { return read(b, 0, b.length); } public synchronized int read(byte b[], int off, int len) - throws IOException { + throws IOException { if (iis.available() == 0) { fill(); if (iis == null) { diff --git a/IO/src/main/java/io/deephaven/io/streams/ZipOutputStream.java b/IO/src/main/java/io/deephaven/io/streams/ZipOutputStream.java index 72b10cddc58..5e8aaaf32c1 100644 --- a/IO/src/main/java/io/deephaven/io/streams/ZipOutputStream.java +++ b/IO/src/main/java/io/deephaven/io/streams/ZipOutputStream.java @@ -60,9 +60,8 @@ public synchronized void write(byte b[]) throws IOException { public synchronized void write(byte b[], int off, int len) throws IOException { if (len >= buf.length) { /* - * If the request length exceeds the size of the output buffer, flush the output buffer - * and then write the data directly. In this way buffered streams will cascade - * harmlessly. + * If the request length exceeds the size of the output buffer, flush the output buffer and then write the + * data directly. In this way buffered streams will cascade harmlessly. */ flushBuffer(); writeBuffer(b, off, len); diff --git a/IO/src/test/java/io/deephaven/io/log/impl/ConsolidatingLogEntry.java b/IO/src/test/java/io/deephaven/io/log/impl/ConsolidatingLogEntry.java index d3e6182543c..5d636a1bf2a 100644 --- a/IO/src/test/java/io/deephaven/io/log/impl/ConsolidatingLogEntry.java +++ b/IO/src/test/java/io/deephaven/io/log/impl/ConsolidatingLogEntry.java @@ -18,8 +18,7 @@ // -------------------------------------------------------------------- /** - * A mock {@link LogEntry} for use with JMock that consolidates all the append calls into the single - * resulting string. + * A mock {@link LogEntry} for use with JMock that consolidates all the append calls into the single resulting string. *

    * Example: *

    @@ -43,8 +42,8 @@ public class ConsolidatingLogEntry extends LogOutputStringImpl implements LogEnt public interface Monitor { /** - * Indicates that {@link LogEntry#endl()} was called on the monitored {@link LogEntry} and - * the given message had been accumulated since the last call to endl(). + * Indicates that {@link LogEntry#endl()} was called on the monitored {@link LogEntry} and the given message had + * been accumulated since the last call to endl(). */ void endl(String sMessage); diff --git a/IO/src/test/java/io/deephaven/io/log/impl/TestLogOutput.java b/IO/src/test/java/io/deephaven/io/log/impl/TestLogOutput.java index de845662c91..35b4286b461 100644 --- a/IO/src/test/java/io/deephaven/io/log/impl/TestLogOutput.java +++ b/IO/src/test/java/io/deephaven/io/log/impl/TestLogOutput.java @@ -9,11 +9,10 @@ /** - * TODO: This class never existed until the potential NPE bug when logging boxed primitives that are - * null was TODO: exposed, so those cases are the only ones tested here. But this should be expanded - * to include all TODO: LogOutput methods, and since StringsLoggerImpl can easily be instantiated - * with arbitrary LogEntry TODO: implenmentations, it should also be made into an abstract base - * class with concrete subclasses for TODO: entry entry impl. + * TODO: This class never existed until the potential NPE bug when logging boxed primitives that are null was TODO: + * exposed, so those cases are the only ones tested here. But this should be expanded to include all TODO: LogOutput + * methods, and since StringsLoggerImpl can easily be instantiated with arbitrary LogEntry TODO: implenmentations, it + * should also be made into an abstract base class with concrete subclasses for TODO: entry entry impl. */ public class TestLogOutput extends TestCase { LogBufferPoolImpl buffers; diff --git a/IO/src/test/java/io/deephaven/io/logger/TestLog4jLoggerImpl.java b/IO/src/test/java/io/deephaven/io/logger/TestLog4jLoggerImpl.java index 9fd527791fb..65d45fc142e 100644 --- a/IO/src/test/java/io/deephaven/io/logger/TestLog4jLoggerImpl.java +++ b/IO/src/test/java/io/deephaven/io/logger/TestLog4jLoggerImpl.java @@ -23,8 +23,7 @@ public void setUp() throws Exception { log4jlogger = context.mock(org.apache.log4j.Logger.class); } - // TODO: this is NOT a test of the Log4jLogger class, it's just enough so I can watch it in the - // debugger once. + // TODO: this is NOT a test of the Log4jLogger class, it's just enough so I can watch it in the debugger once. public void testSimple() { Log4jLoggerImpl SUT = new Log4jLoggerImpl(log4jlogger); diff --git a/IO/src/test/java/io/deephaven/io/sched/TestJobStateTimeoutQueue.java b/IO/src/test/java/io/deephaven/io/sched/TestJobStateTimeoutQueue.java index e2fe103304b..c1dfb52acc2 100644 --- a/IO/src/test/java/io/deephaven/io/sched/TestJobStateTimeoutQueue.java +++ b/IO/src/test/java/io/deephaven/io/sched/TestJobStateTimeoutQueue.java @@ -26,8 +26,7 @@ public void tearDown() throws Exception { * A null Job implementation */ private static class NullJob extends Job { - public int invoke(SelectableChannel channel, int readyOps, Procedure.Nullary handoff) - throws IOException { + public int invoke(SelectableChannel channel, int readyOps, Procedure.Nullary handoff) throws IOException { return 0; } diff --git a/IO/src/test/java/io/deephaven/io/streams/OutputInputStreamTest.java b/IO/src/test/java/io/deephaven/io/streams/OutputInputStreamTest.java index ec27abbc1d2..b51ba22ab17 100644 --- a/IO/src/test/java/io/deephaven/io/streams/OutputInputStreamTest.java +++ b/IO/src/test/java/io/deephaven/io/streams/OutputInputStreamTest.java @@ -55,8 +55,7 @@ public void close(ByteBuffer b) throws IOException { private OutputStream getOutputStream(int i, ByteBuffer buffer) { switch (i) { case 0: - return new ByteBufferOutputStream(ByteBuffer.allocate(1), - new SmallByteSink(buffer)); + return new ByteBufferOutputStream(ByteBuffer.allocate(1), new SmallByteSink(buffer)); default: fail("No such output stream"); @@ -90,8 +89,7 @@ private int getNumDataInputs() { private ByteBufferOutputStream getByteBufferOutputStream(int i, ByteBuffer buffer) { switch (i) { case 0: - return new ByteBufferOutputStream(ByteBuffer.allocate(1), - new SmallByteSink(buffer)); + return new ByteBufferOutputStream(ByteBuffer.allocate(1), new SmallByteSink(buffer)); default: fail("No such output stream"); @@ -132,8 +130,7 @@ public void testAll() { buffer.clear(); buffer.limit(0); - testWriteReadByteInterleave(getOutputStream(out, buffer), - getInputStream(in, buffer)); + testWriteReadByteInterleave(getOutputStream(out, buffer), getInputStream(in, buffer)); buffer.clear(); buffer.limit(0); @@ -148,23 +145,21 @@ public void testAll() { for (int out = 0; out < outs; ++out) { buffer.clear(); buffer.limit(0); - testWriteByteBuffer(getByteBufferOutputStream(out, buffer), - getByteBufferInputStream(in, buffer)); + testWriteByteBuffer(getByteBufferOutputStream(out, buffer), getByteBufferInputStream(in, buffer)); buffer.clear(); buffer.limit(0); testAppendBytesCharSequence(getByteBufferOutputStream(out, buffer), - getByteBufferInputStream(in, buffer)); + getByteBufferInputStream(in, buffer)); buffer.clear(); buffer.limit(0); testAppendCharsCharSequence(getByteBufferOutputStream(out, buffer), - getByteBufferInputStream(in, buffer)); + getByteBufferInputStream(in, buffer)); buffer.clear(); buffer.limit(0); - testWriteUTF(getByteBufferOutputStream(out, buffer), - getByteBufferInputStream(in, buffer)); + testWriteUTF(getByteBufferOutputStream(out, buffer), getByteBufferInputStream(in, buffer)); } } } diff --git a/IO/src/test/java/io/deephaven/io/streams/TestByteBufferStreams.java b/IO/src/test/java/io/deephaven/io/streams/TestByteBufferStreams.java index a1c6850d577..77ffda20bd8 100644 --- a/IO/src/test/java/io/deephaven/io/streams/TestByteBufferStreams.java +++ b/IO/src/test/java/io/deephaven/io/streams/TestByteBufferStreams.java @@ -67,8 +67,7 @@ private static class CHARS { } public void script(Object... os) throws Exception { - ByteBufferStreams.Output out = - new ByteBufferStreams.Output(ByteBuffer.allocate(BUFSZ), SINK); + ByteBufferStreams.Output out = new ByteBufferStreams.Output(ByteBuffer.allocate(BUFSZ), SINK); ByteBufferStreams.Input in = new ByteBufferStreams.Input(null, SOURCE); Arrays.fill(buffers, null); bufferPtr = 0; @@ -170,9 +169,7 @@ public void testIt() throws Exception { script((double) 3.14159); script((int) 314159, new FLUSH(), (int) 4711); script("123456789012345678901234567890", (int) 314159, "abcdefghijklmnopqrstuvwxyzabcd"); - script(new BYTES("123456789012345678901234567890"), (int) 314159, - new BYTES("abcdefghijklmnopqrstuvwxyzabcd")); - script(new CHARS("123456789012345"), (int) 314159, - new CHARS("abcdefghijklmnopqrstuvwxyzabcd")); + script(new BYTES("123456789012345678901234567890"), (int) 314159, new BYTES("abcdefghijklmnopqrstuvwxyzabcd")); + script(new CHARS("123456789012345"), (int) 314159, new CHARS("abcdefghijklmnopqrstuvwxyzabcd")); } } diff --git a/Integrations/src/main/java/io/deephaven/integrations/common/PrimitiveArrayConversionUtility.java b/Integrations/src/main/java/io/deephaven/integrations/common/PrimitiveArrayConversionUtility.java index 42d811d64dd..fb62a367181 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/common/PrimitiveArrayConversionUtility.java +++ b/Integrations/src/main/java/io/deephaven/integrations/common/PrimitiveArrayConversionUtility.java @@ -8,16 +8,14 @@ import io.deephaven.util.QueryConstants; /** - * General purpose helper methods for array conversion methods from specific object types to/from - * primitive types. This is specifically intended to improve performance in integration with Python, - * where conversion of primitive type arrays involves direct copy of a memory buffer, and is much - * more performant than element-by-element inspection. + * General purpose helper methods for array conversion methods from specific object types to/from primitive types. This + * is specifically intended to improve performance in integration with Python, where conversion of primitive type arrays + * involves direct copy of a memory buffer, and is much more performant than element-by-element inspection. */ public class PrimitiveArrayConversionUtility { /** - * Translates a java.lang.Boolean array to a byte array. The mapping will be performed as null - * -> -1, false -> 0, and true -> 1. This is the (psuedo)inverse of - * `translateArrayByteToBoolean`. + * Translates a java.lang.Boolean array to a byte array. The mapping will be performed as null -> -1, false -> 0, + * and true -> 1. This is the (psuedo)inverse of `translateArrayByteToBoolean`. * * @param array - the Boolean array * @return the byte array @@ -32,8 +30,8 @@ public static byte[] translateArrayBooleanToByte(final Boolean[] array) { } /** - * Translates a byte array to a Boolean array. The mapping will be performed as <0 -> null, 0 -> - * false, >0 -> true. This is the (psuedo)inverse of `translateArrayBooleanToByte`. + * Translates a byte array to a Boolean array. The mapping will be performed as <0 -> null, 0 -> false, >0 -> true. + * This is the (psuedo)inverse of `translateArrayBooleanToByte`. * * @param array - the byte array * @return the Boolean array @@ -48,8 +46,7 @@ public static Boolean[] translateArrayByteToBoolean(final byte[] array) { } /** - * Translates a DBDateTime array to a long array. This is the (psuedo)inverse of - * `translateArrayLongToDBDateTime`. + * Translates a DBDateTime array to a long array. This is the (psuedo)inverse of `translateArrayLongToDBDateTime`. * * @param array - the DBDateTime array * @return the corresponding long array @@ -64,8 +61,7 @@ public static long[] translateArrayDBDateTimeToLong(final DBDateTime[] array) { } /** - * Translates a long array to a DBDateTime array. This is the (psuedo)inverse of - * `translateArrayLongToDBDateTime`. + * Translates a long array to a DBDateTime array. This is the (psuedo)inverse of `translateArrayLongToDBDateTime`. * * @param array - the long array * @return the corresponding DBDateTime array diff --git a/Integrations/src/main/java/io/deephaven/integrations/numpy/Java2NumpyCopy.java b/Integrations/src/main/java/io/deephaven/integrations/numpy/Java2NumpyCopy.java index e08c0a99226..65dc8939ca6 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/numpy/Java2NumpyCopy.java +++ b/Integrations/src/main/java/io/deephaven/integrations/numpy/Java2NumpyCopy.java @@ -47,16 +47,15 @@ public static TableType tableType(final Table t) { } private static boolean isImageTable(final Table t) { - return t.getColumns().length == 1 - && DbImage.class.isAssignableFrom(t.getColumn(0).getType()); + return t.getColumns().length == 1 && DbImage.class.isAssignableFrom(t.getColumn(0).getType()); } private static boolean isNumberTable(final Table t) { return Arrays.stream(t.getColumns()).allMatch(c -> TypeUtils.isNumeric(c.getType())); } - private static void assertCopySliceArgs(final Table t, final long rowStart, - final int dataLength, final int nRow, final int nCol) { + private static void assertCopySliceArgs(final Table t, final long rowStart, final int dataLength, final int nRow, + final int nCol) { if (t == null) { throw new IllegalArgumentException("t must not be null"); } @@ -78,23 +77,21 @@ private static void assertCopySliceArgs(final Table t, final long rowStart, } if (dataLength != nRow * nCol) { - throw new IllegalArgumentException( - "data is expected to be of length nRow*nCol. length=" + dataLength + " nRow=" - + nRow + " nCol=" + nCol); + throw new IllegalArgumentException("data is expected to be of length nRow*nCol. length=" + dataLength + + " nRow=" + nRow + " nCol=" + nCol); } final int nc = t.getColumns().length; if (nc != nCol) { throw new IllegalArgumentException( - "Number of table columns does not match the number of output columns: table=" + nc - + " nCol=" + nCol); + "Number of table columns does not match the number of output columns: table=" + nc + " nCol=" + + nCol); } final long rowEnd = rowStart + nRow; if (rowStart < 0 || rowEnd > t.size()) { - throw new IllegalArgumentException( - "Selected rows that are not within the table. table=[0," + t.size() + "] rowStart=" - + rowStart + " rowEnd=" + rowEnd); + throw new IllegalArgumentException("Selected rows that are not within the table. table=[0," + t.size() + + "] rowStart=" + rowStart + " rowEnd=" + rowEnd); } } @@ -111,8 +108,8 @@ private interface CopySetter { } /** - * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is - * useful for copying table data directly into arrays. + * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is useful for copying + * table data directly into arrays. * * @param t table to copy data from * @param rowStart first row of data to copy @@ -122,11 +119,11 @@ private interface CopySetter { * @param cast string used to cast to the output type * @param setter setter used to assign data */ - private static void copySlice(final Table t, final long rowStart, final int nRow, - final int nCol, final Class type, final String cast, final CopySetter setter) { - final Table tt = t.view(Arrays.stream(t.getColumns()).map( - c -> c.getType() == type ? c.getName() : c.getName() + " = " + cast + " " + c.getName()) - .toArray(String[]::new)); + private static void copySlice(final Table t, final long rowStart, final int nRow, final int nCol, final Class type, + final String cast, final CopySetter setter) { + final Table tt = t.view(Arrays.stream(t.getColumns()) + .map(c -> c.getType() == type ? c.getName() : c.getName() + " = " + cast + " " + c.getName()) + .toArray(String[]::new)); final Index index = tt.getIndex().subindexByPos(rowStart, rowStart + nRow); for (int i = 0; i < nCol; i++) { @@ -146,8 +143,8 @@ private static void copySlice(final Table t, final long rowStart, final int nRow /** - * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is - * useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is useful for copying + * table data directly into numpy arrays. * * @param t table to copy data from * @param rowStart first row of data to copy @@ -155,17 +152,16 @@ private static void copySlice(final Table t, final long rowStart, final int nRow * @param nRow number of rows to copy; also the number of rows in data. * @param nCol number of table columns; also the number of columns in data. */ - public static void copySlice(final Table t, final long rowStart, final double[] data, - final int nRow, final int nCol) { + public static void copySlice(final Table t, final long rowStart, final double[] data, final int nRow, + final int nCol) { assertCopySliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, nCol); assert data != null; - copySlice(t, rowStart, nRow, nCol, double.class, "(double)", - (cs, k, idx) -> data[idx] = cs.getDouble(k)); + copySlice(t, rowStart, nRow, nCol, double.class, "(double)", (cs, k, idx) -> data[idx] = cs.getDouble(k)); } /** - * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is - * useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is useful for copying + * table data directly into numpy arrays. * * @param t table to copy data from * @param rowStart first row of data to copy @@ -173,17 +169,16 @@ public static void copySlice(final Table t, final long rowStart, final double[] * @param nRow number of rows to copy; also the number of rows in data. * @param nCol number of table columns; also the number of columns in data. */ - public static void copySlice(final Table t, final long rowStart, final float[] data, - final int nRow, final int nCol) { + public static void copySlice(final Table t, final long rowStart, final float[] data, final int nRow, + final int nCol) { assertCopySliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, nCol); assert data != null; - copySlice(t, rowStart, nRow, nCol, float.class, "(float)", - (cs, k, idx) -> data[idx] = cs.getFloat(k)); + copySlice(t, rowStart, nRow, nCol, float.class, "(float)", (cs, k, idx) -> data[idx] = cs.getFloat(k)); } /** - * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is - * useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is useful for copying + * table data directly into numpy arrays. * * @param t table to copy data from * @param rowStart first row of data to copy @@ -191,17 +186,16 @@ public static void copySlice(final Table t, final long rowStart, final float[] d * @param nRow number of rows to copy; also the number of rows in data. * @param nCol number of table columns; also the number of columns in data. */ - public static void copySlice(final Table t, final long rowStart, final byte[] data, - final int nRow, final int nCol) { + public static void copySlice(final Table t, final long rowStart, final byte[] data, final int nRow, + final int nCol) { assertCopySliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, nCol); assert data != null; - copySlice(t, rowStart, nRow, nCol, byte.class, "(byte)", - (cs, k, idx) -> data[idx] = cs.getByte(k)); + copySlice(t, rowStart, nRow, nCol, byte.class, "(byte)", (cs, k, idx) -> data[idx] = cs.getByte(k)); } /** - * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is - * useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is useful for copying + * table data directly into numpy arrays. * * @param t table to copy data from * @param rowStart first row of data to copy @@ -209,17 +203,16 @@ public static void copySlice(final Table t, final long rowStart, final byte[] da * @param nRow number of rows to copy; also the number of rows in data. * @param nCol number of table columns; also the number of columns in data. */ - public static void copySlice(final Table t, final long rowStart, final short[] data, - final int nRow, final int nCol) { + public static void copySlice(final Table t, final long rowStart, final short[] data, final int nRow, + final int nCol) { assertCopySliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, nCol); assert data != null; - copySlice(t, rowStart, nRow, nCol, short.class, "(short)", - (cs, k, idx) -> data[idx] = cs.getShort(k)); + copySlice(t, rowStart, nRow, nCol, short.class, "(short)", (cs, k, idx) -> data[idx] = cs.getShort(k)); } /** - * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is - * useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is useful for copying + * table data directly into numpy arrays. * * @param t table to copy data from * @param rowStart first row of data to copy @@ -227,17 +220,15 @@ public static void copySlice(final Table t, final long rowStart, final short[] d * @param nRow number of rows to copy; also the number of rows in data. * @param nCol number of table columns; also the number of columns in data. */ - public static void copySlice(final Table t, final long rowStart, final int[] data, - final int nRow, final int nCol) { + public static void copySlice(final Table t, final long rowStart, final int[] data, final int nRow, final int nCol) { assertCopySliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, nCol); assert data != null; - copySlice(t, rowStart, nRow, nCol, int.class, "(int)", - (cs, k, idx) -> data[idx] = cs.getInt(k)); + copySlice(t, rowStart, nRow, nCol, int.class, "(int)", (cs, k, idx) -> data[idx] = cs.getInt(k)); } /** - * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is - * useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is useful for copying + * table data directly into numpy arrays. * * @param t table to copy data from * @param rowStart first row of data to copy @@ -245,17 +236,16 @@ public static void copySlice(final Table t, final long rowStart, final int[] dat * @param nRow number of rows to copy; also the number of rows in data. * @param nCol number of table columns; also the number of columns in data. */ - public static void copySlice(final Table t, final long rowStart, final long[] data, - final int nRow, final int nCol) { + public static void copySlice(final Table t, final long rowStart, final long[] data, final int nRow, + final int nCol) { assertCopySliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, nCol); assert data != null; - copySlice(t, rowStart, nRow, nCol, long.class, "(long)", - (cs, k, idx) -> data[idx] = cs.getLong(k)); + copySlice(t, rowStart, nRow, nCol, long.class, "(long)", (cs, k, idx) -> data[idx] = cs.getLong(k)); } /** - * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is - * useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a slice of rows into a flattened 2D array. This is useful for copying + * table data directly into numpy arrays. * * @param t table to copy data from * @param rowStart first row of data to copy @@ -263,12 +253,11 @@ public static void copySlice(final Table t, final long rowStart, final long[] da * @param nRow number of rows to copy; also the number of rows in data. * @param nCol number of table columns; also the number of columns in data. */ - public static void copySlice(final Table t, final long rowStart, final boolean[] data, - final int nRow, final int nCol) { + public static void copySlice(final Table t, final long rowStart, final boolean[] data, final int nRow, + final int nCol) { assertCopySliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, nCol); assert data != null; - copySlice(t, rowStart, nRow, nCol, boolean.class, "(boolean)", - (cs, k, idx) -> data[idx] = cs.getBoolean(k)); + copySlice(t, rowStart, nRow, nCol, boolean.class, "(boolean)", (cs, k, idx) -> data[idx] = cs.getBoolean(k)); } @@ -294,9 +283,8 @@ final int size() { } /** - * Randomly fills the slice with vaules in the range [0,tSize-1] using a reservoir - * sampling algorithm. No rows will be repeated. The slice must be smaller than - * tSize + * Randomly fills the slice with vaules in the range [0,tSize-1] using a reservoir sampling algorithm. + * No rows will be repeated. The slice must be smaller than tSize * * @param slice slice to fill. * @param tSize table size. @@ -309,8 +297,7 @@ private static void reservoirSample(final Slice slice, final long tSize) { if (k > tSize) { throw new IllegalArgumentException( - "Requesting more items than are available. slice.size()=" + slice.size() - + " tSize=" + tSize); + "Requesting more items than are available. slice.size()=" + slice.size() + " tSize=" + tSize); } final RandomDataGenerator rnd = new RandomDataGenerator(); @@ -355,8 +342,8 @@ public static long[] randRows(final int nRow, final long tSize) { return R; } - private static void assertCopyRandArgs(final Table t, final long dataLength, final int nRow, - final int nCol, final long[] rows) { + private static void assertCopyRandArgs(final Table t, final long dataLength, final int nRow, final int nCol, + final long[] rows) { if (t == null) { throw new IllegalArgumentException("t must not be null"); } @@ -378,27 +365,26 @@ private static void assertCopyRandArgs(final Table t, final long dataLength, fin } if (dataLength != nRow * nCol) { - throw new IllegalArgumentException( - "data is expected to be of length nRow*nCol. length=" + dataLength + " nRow=" - + nRow + " nCol=" + nCol); + throw new IllegalArgumentException("data is expected to be of length nRow*nCol. length=" + dataLength + + " nRow=" + nRow + " nCol=" + nCol); } final int nc = t.getColumns().length; if (nc != nCol) { throw new IllegalArgumentException( - "Number of table columns does not match the number of output columns: table=" + nc - + " nCol=" + nCol); + "Number of table columns does not match the number of output columns: table=" + nc + " nCol=" + + nCol); } if (rows != null && rows.length != nRow) { - throw new IllegalArgumentException("Length of rows does not match nRow. rows.length=" - + rows.length + " nRow=" + nRow); + throw new IllegalArgumentException( + "Length of rows does not match nRow. rows.length=" + rows.length + " nRow=" + nRow); } } /** - * Casts data to the desired type and copies a random selection of rows into a flattened 2D - * array. This is useful for copying table data directly into arrays. + * Casts data to the desired type and copies a random selection of rows into a flattened 2D array. This is useful + * for copying table data directly into arrays. * * @param t table to copy data from * @param nRow number of rows to copy; also the number of rows in data. @@ -408,14 +394,14 @@ private static void assertCopyRandArgs(final Table t, final long dataLength, fin * @param cast string used to cast to the output type * @param setter setter used to assign data */ - public static void copyRand(final Table t, final int nRow, final int nCol, final long[] rows, - final Class type, final String cast, final CopySetter setter) { + public static void copyRand(final Table t, final int nRow, final int nCol, final long[] rows, final Class type, + final String cast, final CopySetter setter) { final long s = t.size(); final long[] tidxs = rows == null ? randRows(nRow, s) : rows; - final Table tt = t.view(Arrays.stream(t.getColumns()).map( - c -> c.getType() == type ? c.getName() : c.getName() + " = " + cast + " " + c.getName()) - .toArray(String[]::new)); + final Table tt = t.view(Arrays.stream(t.getColumns()) + .map(c -> c.getType() == type ? c.getName() : c.getName() + " = " + cast + " " + c.getName()) + .toArray(String[]::new)); final Index index = tt.getIndex(); for (int i = 0; i < nCol; i++) { @@ -428,7 +414,7 @@ public static void copyRand(final Table t, final int nRow, final int nCol, final if (tIdx < 0 || tIdx >= s) { throw new IllegalArgumentException( - "Table index out of range. range=[0," + (s - 1) + "] idx=" + tIdx); + "Table index out of range. range=[0," + (s - 1) + "] idx=" + tIdx); } final long k = index.get(tIdx); @@ -439,8 +425,8 @@ public static void copyRand(final Table t, final int nRow, final int nCol, final } /** - * Casts data to the desired type and copies a random selection of rows into a flattened 2D - * array. This is useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a random selection of rows into a flattened 2D array. This is useful + * for copying table data directly into numpy arrays. * * @param t table to copy data from * @param data array to copy data into @@ -448,17 +434,15 @@ public static void copyRand(final Table t, final int nRow, final int nCol, final * @param nCol number of table columns; also the number of columns in data. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyRand(final Table t, final double[] data, final int nRow, final int nCol, - final long[] rows) { + public static void copyRand(final Table t, final double[] data, final int nRow, final int nCol, final long[] rows) { assertCopyRandArgs(t, data == null ? -1 : data.length, nRow, nCol, rows); assert data != null; - copyRand(t, nRow, nCol, rows, double.class, "(double)", - (cs, k, idx) -> data[idx] = cs.getDouble(k)); + copyRand(t, nRow, nCol, rows, double.class, "(double)", (cs, k, idx) -> data[idx] = cs.getDouble(k)); } /** - * Casts data to the desired type and copies a random selection of rows into a flattened 2D - * array. This is useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a random selection of rows into a flattened 2D array. This is useful + * for copying table data directly into numpy arrays. * * @param t table to copy data from * @param data array to copy data into @@ -466,17 +450,15 @@ public static void copyRand(final Table t, final double[] data, final int nRow, * @param nCol number of table columns; also the number of columns in data. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyRand(final Table t, final float[] data, final int nRow, final int nCol, - final long[] rows) { + public static void copyRand(final Table t, final float[] data, final int nRow, final int nCol, final long[] rows) { assertCopyRandArgs(t, data == null ? -1 : data.length, nRow, nCol, rows); assert data != null; - copyRand(t, nRow, nCol, rows, float.class, "(float)", - (cs, k, idx) -> data[idx] = cs.getFloat(k)); + copyRand(t, nRow, nCol, rows, float.class, "(float)", (cs, k, idx) -> data[idx] = cs.getFloat(k)); } /** - * Casts data to the desired type and copies a random selection of rows into a flattened 2D - * array. This is useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a random selection of rows into a flattened 2D array. This is useful + * for copying table data directly into numpy arrays. * * @param t table to copy data from * @param data array to copy data into @@ -484,17 +466,15 @@ public static void copyRand(final Table t, final float[] data, final int nRow, f * @param nCol number of table columns; also the number of columns in data. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyRand(final Table t, final byte[] data, final int nRow, final int nCol, - final long[] rows) { + public static void copyRand(final Table t, final byte[] data, final int nRow, final int nCol, final long[] rows) { assertCopyRandArgs(t, data == null ? -1 : data.length, nRow, nCol, rows); assert data != null; - copyRand(t, nRow, nCol, rows, byte.class, "(byte)", - (cs, k, idx) -> data[idx] = cs.getByte(k)); + copyRand(t, nRow, nCol, rows, byte.class, "(byte)", (cs, k, idx) -> data[idx] = cs.getByte(k)); } /** - * Casts data to the desired type and copies a random selection of rows into a flattened 2D - * array. This is useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a random selection of rows into a flattened 2D array. This is useful + * for copying table data directly into numpy arrays. * * @param t table to copy data from * @param data array to copy data into @@ -502,17 +482,15 @@ public static void copyRand(final Table t, final byte[] data, final int nRow, fi * @param nCol number of table columns; also the number of columns in data. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyRand(final Table t, final short[] data, final int nRow, final int nCol, - final long[] rows) { + public static void copyRand(final Table t, final short[] data, final int nRow, final int nCol, final long[] rows) { assertCopyRandArgs(t, data == null ? -1 : data.length, nRow, nCol, rows); assert data != null; - copyRand(t, nRow, nCol, rows, short.class, "(short)", - (cs, k, idx) -> data[idx] = cs.getShort(k)); + copyRand(t, nRow, nCol, rows, short.class, "(short)", (cs, k, idx) -> data[idx] = cs.getShort(k)); } /** - * Casts data to the desired type and copies a random selection of rows into a flattened 2D - * array. This is useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a random selection of rows into a flattened 2D array. This is useful + * for copying table data directly into numpy arrays. * * @param t table to copy data from * @param data array to copy data into @@ -520,16 +498,15 @@ public static void copyRand(final Table t, final short[] data, final int nRow, f * @param nCol number of table columns; also the number of columns in data. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyRand(final Table t, final int[] data, final int nRow, final int nCol, - final long[] rows) { + public static void copyRand(final Table t, final int[] data, final int nRow, final int nCol, final long[] rows) { assertCopyRandArgs(t, data == null ? -1 : data.length, nRow, nCol, rows); assert data != null; copyRand(t, nRow, nCol, rows, int.class, "(int)", (cs, k, idx) -> data[idx] = cs.getInt(k)); } /** - * Casts data to the desired type and copies a random selection of rows into a flattened 2D - * array. This is useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a random selection of rows into a flattened 2D array. This is useful + * for copying table data directly into numpy arrays. * * @param t table to copy data from * @param data array to copy data into @@ -537,17 +514,15 @@ public static void copyRand(final Table t, final int[] data, final int nRow, fin * @param nCol number of table columns; also the number of columns in data. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyRand(final Table t, final long[] data, final int nRow, final int nCol, - final long[] rows) { + public static void copyRand(final Table t, final long[] data, final int nRow, final int nCol, final long[] rows) { assertCopyRandArgs(t, data == null ? -1 : data.length, nRow, nCol, rows); assert data != null; - copyRand(t, nRow, nCol, rows, long.class, "(long)", - (cs, k, idx) -> data[idx] = cs.getLong(k)); + copyRand(t, nRow, nCol, rows, long.class, "(long)", (cs, k, idx) -> data[idx] = cs.getLong(k)); } /** - * Casts data to the desired type and copies a random selection of rows into a flattened 2D - * array. This is useful for copying table data directly into numpy arrays. + * Casts data to the desired type and copies a random selection of rows into a flattened 2D array. This is useful + * for copying table data directly into numpy arrays. * * @param t table to copy data from * @param data array to copy data into @@ -556,16 +531,14 @@ public static void copyRand(final Table t, final long[] data, final int nRow, fi * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ public static void copyRand(final Table t, final boolean[] data, final int nRow, final int nCol, - final long[] rows) { + final long[] rows) { assertCopyRandArgs(t, data == null ? -1 : data.length, nRow, nCol, rows); assert data != null; - copyRand(t, nRow, nCol, rows, boolean.class, "(boolean)", - (cs, k, idx) -> data[idx] = cs.getBoolean(k)); + copyRand(t, nRow, nCol, rows, boolean.class, "(boolean)", (cs, k, idx) -> data[idx] = cs.getBoolean(k)); } - private static void assertCopyImageSliceArgs(final Table t, final long rowStart, - final int dataLength, final int nRow, final int width, final int height, - final boolean color) { + private static void assertCopyImageSliceArgs(final Table t, final long rowStart, final int dataLength, + final int nRow, final int width, final int height, final boolean color) { if (t == null) { throw new IllegalArgumentException("t must not be null"); } @@ -588,28 +561,25 @@ private static void assertCopyImageSliceArgs(final Table t, final long rowStart, if (color) { if (dataLength != nRow * height * width * 3) { - throw new IllegalArgumentException( - "data is expected to be of length nRow*height*width*3. length=" + dataLength - + " nRow=" + nRow + " width=" + width + " height=" + height); + throw new IllegalArgumentException("data is expected to be of length nRow*height*width*3. length=" + + dataLength + " nRow=" + nRow + " width=" + width + " height=" + height); } } else { if (dataLength != nRow * height * width) { - throw new IllegalArgumentException( - "data is expected to be of length nRow*height*width. length=" + dataLength - + " nRow=" + nRow + " width=" + width + " height=" + height); + throw new IllegalArgumentException("data is expected to be of length nRow*height*width. length=" + + dataLength + " nRow=" + nRow + " width=" + width + " height=" + height); } } final long rowEnd = rowStart + nRow; if (rowStart < 0 || rowEnd > t.size()) { - throw new IllegalArgumentException( - "Selected rows that are not within the table. table=[0," + t.size() + "] rowStart=" - + rowStart + " rowEnd=" + rowEnd); + throw new IllegalArgumentException("Selected rows that are not within the table. table=[0," + t.size() + + "] rowStart=" + rowStart + " rowEnd=" + rowEnd); } } - private static DbImage getImage(final ColumnSource c, final long k, final int width, - final int height, final boolean resize) { + private static DbImage getImage(final ColumnSource c, final long k, final int width, final int height, + final boolean resize) { final DbImage img = (DbImage) c.get(k); if (img == null) { @@ -624,10 +594,8 @@ private static DbImage getImage(final ColumnSource c, final long k, final int wi } } else { if (img.getWidth() != width || img.getHeight() != height) { - throw new IllegalArgumentException( - "Image size does not match expected size. index=" + k + " image=(" - + img.getWidth() + "," + img.getHeight() + ") expected=(" + width + "," - + height + ")"); + throw new IllegalArgumentException("Image size does not match expected size. index=" + k + " image=(" + + img.getWidth() + "," + img.getHeight() + ") expected=(" + width + "," + height + ")"); } else { return img; } @@ -655,8 +623,8 @@ private interface IntValSetter { * @param color true to return a color image; false to return a gray-scale image. * @param setter setter used to assign data */ - private static void setImage(final DbImage img, final int row, final int width, - final int height, final boolean color, final IntValSetter setter) { + private static void setImage(final DbImage img, final int row, final int width, final int height, + final boolean color, final IntValSetter setter) { if (color) { for (int h = 0; h < height; h++) { for (int w = 0; w < width; w++) { @@ -679,9 +647,8 @@ private static void setImage(final DbImage img, final int row, final int width, } /** - * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, - * height, width, channel], where channel is red, green, blue. For grayscale, the array is - * indexed as [row, height, width]. + * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, height, width, + * channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table images directly into arrays. * @@ -694,9 +661,8 @@ private static void setImage(final DbImage img, final int row, final int width, * @param color true to return a color image; false to return a gray-scale image. * @param setter setter used to assign data */ - public static void copyImageSlice(final Table t, final long rowStart, final int nRow, - final int width, final int height, final boolean resize, final boolean color, - final IntValSetter setter) { + public static void copyImageSlice(final Table t, final long rowStart, final int nRow, final int width, + final int height, final boolean resize, final boolean color, final IntValSetter setter) { final DataColumn c = t.getColumn(0); final ColumnSource cs = t.getColumnSource(c.getName()); @@ -711,9 +677,8 @@ public static void copyImageSlice(final Table t, final long rowStart, final int } /** - * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, - * height, width, channel], where channel is red, green, blue. For grayscale, the array is - * indexed as [row, height, width]. + * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, height, width, + * channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table images directly into numpy arrays. * @@ -726,20 +691,16 @@ public static void copyImageSlice(final Table t, final long rowStart, final int * @param resize true to resize the image to the target size; false otherwise. * @param color true to return a color image; false to return a gray-scale image. */ - public static void copyImageSlice(final Table t, final long rowStart, final short[] data, - final int nRow, final int width, final int height, final boolean resize, - final boolean color) { - assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, - color); + public static void copyImageSlice(final Table t, final long rowStart, final short[] data, final int nRow, + final int width, final int height, final boolean resize, final boolean color) { + assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, color); assert data != null; - copyImageSlice(t, rowStart, nRow, width, height, resize, color, - (idx, v) -> data[idx] = (short) v); + copyImageSlice(t, rowStart, nRow, width, height, resize, color, (idx, v) -> data[idx] = (short) v); } /** - * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, - * height, width, channel], where channel is red, green, blue. For grayscale, the array is - * indexed as [row, height, width]. + * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, height, width, + * channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table images directly into numpy arrays. * @@ -752,19 +713,16 @@ public static void copyImageSlice(final Table t, final long rowStart, final shor * @param resize true to resize the image to the target size; false otherwise. * @param color true to return a color image; false to return a gray-scale image. */ - public static void copyImageSlice(final Table t, final long rowStart, final int[] data, - final int nRow, final int width, final int height, final boolean resize, - final boolean color) { - assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, - color); + public static void copyImageSlice(final Table t, final long rowStart, final int[] data, final int nRow, + final int width, final int height, final boolean resize, final boolean color) { + assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, color); assert data != null; copyImageSlice(t, rowStart, nRow, width, height, resize, color, (idx, v) -> data[idx] = v); } /** - * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, - * height, width, channel], where channel is red, green, blue. For grayscale, the array is - * indexed as [row, height, width]. + * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, height, width, + * channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table images directly into numpy arrays. * @@ -777,20 +735,16 @@ public static void copyImageSlice(final Table t, final long rowStart, final int[ * @param resize true to resize the image to the target size; false otherwise. * @param color true to return a color image; false to return a gray-scale image. */ - public static void copyImageSlice(final Table t, final long rowStart, final long[] data, - final int nRow, final int width, final int height, final boolean resize, - final boolean color) { - assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, - color); + public static void copyImageSlice(final Table t, final long rowStart, final long[] data, final int nRow, + final int width, final int height, final boolean resize, final boolean color) { + assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, color); assert data != null; - copyImageSlice(t, rowStart, nRow, width, height, resize, color, - (idx, v) -> data[idx] = (long) v); + copyImageSlice(t, rowStart, nRow, width, height, resize, color, (idx, v) -> data[idx] = (long) v); } /** - * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, - * height, width, channel], where channel is red, green, blue. For grayscale, the array is - * indexed as [row, height, width]. + * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, height, width, + * channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table images directly into numpy arrays. * @@ -803,20 +757,16 @@ public static void copyImageSlice(final Table t, final long rowStart, final long * @param resize true to resize the image to the target size; false otherwise. * @param color true to return a color image; false to return a gray-scale image. */ - public static void copyImageSlice(final Table t, final long rowStart, final float[] data, - final int nRow, final int width, final int height, final boolean resize, - final boolean color) { - assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, - color); + public static void copyImageSlice(final Table t, final long rowStart, final float[] data, final int nRow, + final int width, final int height, final boolean resize, final boolean color) { + assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, color); assert data != null; - copyImageSlice(t, rowStart, nRow, width, height, resize, color, - (idx, v) -> data[idx] = (float) v); + copyImageSlice(t, rowStart, nRow, width, height, resize, color, (idx, v) -> data[idx] = (float) v); } /** - * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, - * height, width, channel], where channel is red, green, blue. For grayscale, the array is - * indexed as [row, height, width]. + * Copies a slice of image rows into a flattened array. For color, the array is indexed as [row, height, width, + * channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table images directly into numpy arrays. * @@ -829,19 +779,16 @@ public static void copyImageSlice(final Table t, final long rowStart, final floa * @param resize true to resize the image to the target size; false otherwise. * @param color true to return a color image; false to return a gray-scale image. */ - public static void copyImageSlice(final Table t, final long rowStart, final double[] data, - final int nRow, final int width, final int height, final boolean resize, - final boolean color) { - assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, - color); + public static void copyImageSlice(final Table t, final long rowStart, final double[] data, final int nRow, + final int width, final int height, final boolean resize, final boolean color) { + assertCopyImageSliceArgs(t, rowStart, data == null ? -1 : data.length, nRow, width, height, color); assert data != null; - copyImageSlice(t, rowStart, nRow, width, height, resize, color, - (idx, v) -> data[idx] = (double) v); + copyImageSlice(t, rowStart, nRow, width, height, resize, color, (idx, v) -> data[idx] = (double) v); } - private static void assertCopyImageRandArgs(final Table t, final int dataLength, final int nRow, - final int width, final int height, final boolean color, final long[] rows) { + private static void assertCopyImageRandArgs(final Table t, final int dataLength, final int nRow, final int width, + final int height, final boolean color, final long[] rows) { if (t == null) { throw new IllegalArgumentException("t must not be null"); } @@ -864,28 +811,25 @@ private static void assertCopyImageRandArgs(final Table t, final int dataLength, if (color) { if (dataLength != nRow * height * width * 3) { - throw new IllegalArgumentException( - "data is expected to be of length nRow*height*width*3. length=" + dataLength - + " nRow=" + nRow + " width=" + width + " height=" + height); + throw new IllegalArgumentException("data is expected to be of length nRow*height*width*3. length=" + + dataLength + " nRow=" + nRow + " width=" + width + " height=" + height); } } else { if (dataLength != nRow * height * width) { - throw new IllegalArgumentException( - "data is expected to be of length nRow*height*width. length=" + dataLength - + " nRow=" + nRow + " width=" + width + " height=" + height); + throw new IllegalArgumentException("data is expected to be of length nRow*height*width. length=" + + dataLength + " nRow=" + nRow + " width=" + width + " height=" + height); } } if (rows != null && rows.length != nRow) { - throw new IllegalArgumentException("Length of rows does not match nRow. rows.length=" - + rows.length + " nRow=" + nRow); + throw new IllegalArgumentException( + "Length of rows does not match nRow. rows.length=" + rows.length + " nRow=" + nRow); } } /** - * Copies a random selection of image rows into a flattened array. For color, the array is - * indexed as [row, height, width, channel], where channel is red, green, blue. For grayscale, - * the array is indexed as [row, height, width]. + * Copies a random selection of image rows into a flattened array. For color, the array is indexed as [row, height, + * width, channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table data directly into arrays. * @@ -898,9 +842,8 @@ private static void assertCopyImageRandArgs(final Table t, final int dataLength, * @param rows indices of rows to copy. Null causes rows to be randomly generated. * @param setter setter used to assign data */ - public static void copyImageRand(final Table t, final int nRow, final int width, - final int height, final boolean resize, final boolean color, final long[] rows, - final IntValSetter setter) { + public static void copyImageRand(final Table t, final int nRow, final int width, final int height, + final boolean resize, final boolean color, final long[] rows, final IntValSetter setter) { final long s = t.size(); final long[] tidxs = rows == null ? randRows(nRow, s) : rows; @@ -913,8 +856,7 @@ public static void copyImageRand(final Table t, final int nRow, final int width, final long ridx = tidxs[r]; if (ridx < 0 || ridx >= s) { - throw new IllegalArgumentException( - "Table index out of range. range=[0," + (s - 1) + "] idx=" + ridx); + throw new IllegalArgumentException("Table index out of range. range=[0," + (s - 1) + "] idx=" + ridx); } final long k = index.get(r); @@ -924,9 +866,8 @@ public static void copyImageRand(final Table t, final int nRow, final int width, } /** - * Copies a random selection of image rows into a flattened array. For color, the array is - * indexed as [row, height, width, channel], where channel is red, green, blue. For grayscale, - * the array is indexed as [row, height, width]. + * Copies a random selection of image rows into a flattened array. For color, the array is indexed as [row, height, + * width, channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table data directly into numpy arrays. * @@ -939,20 +880,16 @@ public static void copyImageRand(final Table t, final int nRow, final int width, * @param color true to return a color image; false to return a gray-scale image. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyImageRand(final Table t, final short[] data, final int nRow, - final int width, final int height, final boolean resize, final boolean color, - final long[] rows) { - assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, - rows); + public static void copyImageRand(final Table t, final short[] data, final int nRow, final int width, + final int height, final boolean resize, final boolean color, final long[] rows) { + assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, rows); assert data != null; - copyImageRand(t, nRow, width, height, resize, color, rows, - (idx, val) -> data[idx] = (short) val); + copyImageRand(t, nRow, width, height, resize, color, rows, (idx, val) -> data[idx] = (short) val); } /** - * Copies a random selection of image rows into a flattened array. For color, the array is - * indexed as [row, height, width, channel], where channel is red, green, blue. For grayscale, - * the array is indexed as [row, height, width]. + * Copies a random selection of image rows into a flattened array. For color, the array is indexed as [row, height, + * width, channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table data directly into numpy arrays. * @@ -965,19 +902,16 @@ public static void copyImageRand(final Table t, final short[] data, final int nR * @param color true to return a color image; false to return a gray-scale image. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyImageRand(final Table t, final int[] data, final int nRow, - final int width, final int height, final boolean resize, final boolean color, - final long[] rows) { - assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, - rows); + public static void copyImageRand(final Table t, final int[] data, final int nRow, final int width, final int height, + final boolean resize, final boolean color, final long[] rows) { + assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, rows); assert data != null; copyImageRand(t, nRow, width, height, resize, color, rows, (idx, val) -> data[idx] = val); } /** - * Copies a random selection of image rows into a flattened array. For color, the array is - * indexed as [row, height, width, channel], where channel is red, green, blue. For grayscale, - * the array is indexed as [row, height, width]. + * Copies a random selection of image rows into a flattened array. For color, the array is indexed as [row, height, + * width, channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table data directly into numpy arrays. * @@ -990,20 +924,16 @@ public static void copyImageRand(final Table t, final int[] data, final int nRow * @param color true to return a color image; false to return a gray-scale image. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyImageRand(final Table t, final long[] data, final int nRow, - final int width, final int height, final boolean resize, final boolean color, - final long[] rows) { - assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, - rows); + public static void copyImageRand(final Table t, final long[] data, final int nRow, final int width, + final int height, final boolean resize, final boolean color, final long[] rows) { + assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, rows); assert data != null; - copyImageRand(t, nRow, width, height, resize, color, rows, - (idx, val) -> data[idx] = (long) val); + copyImageRand(t, nRow, width, height, resize, color, rows, (idx, val) -> data[idx] = (long) val); } /** - * Copies a random selection of image rows into a flattened array. For color, the array is - * indexed as [row, height, width, channel], where channel is red, green, blue. For grayscale, - * the array is indexed as [row, height, width]. + * Copies a random selection of image rows into a flattened array. For color, the array is indexed as [row, height, + * width, channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table data directly into numpy arrays. * @@ -1016,20 +946,16 @@ public static void copyImageRand(final Table t, final long[] data, final int nRo * @param color true to return a color image; false to return a gray-scale image. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyImageRand(final Table t, final float[] data, final int nRow, - final int width, final int height, final boolean resize, final boolean color, - final long[] rows) { - assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, - rows); + public static void copyImageRand(final Table t, final float[] data, final int nRow, final int width, + final int height, final boolean resize, final boolean color, final long[] rows) { + assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, rows); assert data != null; - copyImageRand(t, nRow, width, height, resize, color, rows, - (idx, val) -> data[idx] = (float) val); + copyImageRand(t, nRow, width, height, resize, color, rows, (idx, val) -> data[idx] = (float) val); } /** - * Copies a random selection of image rows into a flattened array. For color, the array is - * indexed as [row, height, width, channel], where channel is red, green, blue. For grayscale, - * the array is indexed as [row, height, width]. + * Copies a random selection of image rows into a flattened array. For color, the array is indexed as [row, height, + * width, channel], where channel is red, green, blue. For grayscale, the array is indexed as [row, height, width]. *

    * This is useful for copying table data directly into numpy arrays. * @@ -1042,14 +968,11 @@ public static void copyImageRand(final Table t, final float[] data, final int nR * @param color true to return a color image; false to return a gray-scale image. * @param rows indices of rows to copy. Null causes rows to be randomly generated. */ - public static void copyImageRand(final Table t, final double[] data, final int nRow, - final int width, final int height, final boolean resize, final boolean color, - final long[] rows) { - assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, - rows); + public static void copyImageRand(final Table t, final double[] data, final int nRow, final int width, + final int height, final boolean resize, final boolean color, final long[] rows) { + assertCopyImageRandArgs(t, data == null ? -1 : data.length, nRow, width, height, color, rows); assert data != null; - copyImageRand(t, nRow, width, height, resize, color, rows, - (idx, val) -> data[idx] = (double) val); + copyImageRand(t, nRow, width, height, resize, color, rows, (idx, val) -> data[idx] = (double) val); } } diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/PythonFunction.java b/Integrations/src/main/java/io/deephaven/integrations/python/PythonFunction.java index 22b20741878..eb08f4a1942 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/PythonFunction.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/PythonFunction.java @@ -22,31 +22,28 @@ public class PythonFunction implements Function { /** * Creates a {@link Function} which calls a Python function. * - * @param pyObjectIn the python object providing the function - must either be callable or have - * an `apply` attribute which is callable. - * @param classOut the specific java class to interpret the return for the method. Note that - * this is probably only really useful if `classOut` is one of String, double, float, - * long, int, short, byte, or boolean. Otherwise, the return element will likely just - * remain PyObject, and not be particularly usable inside Java. + * @param pyObjectIn the python object providing the function - must either be callable or have an `apply` attribute + * which is callable. + * @param classOut the specific java class to interpret the return for the method. Note that this is probably only + * really useful if `classOut` is one of String, double, float, long, int, short, byte, or boolean. + * Otherwise, the return element will likely just remain PyObject, and not be particularly usable inside + * Java. */ public PythonFunction(final PyObject pyObjectIn, final Class classOut) { if (pyObjectIn.hasAttribute("apply")) { pyCallable = pyObjectIn.getAttribute("apply"); if (!pyCallable.hasAttribute("__call__")) { - throw new IllegalArgumentException( - "The Python object provided has an apply attribute " + + throw new IllegalArgumentException("The Python object provided has an apply attribute " + "which is not callable"); } } else if (pyObjectIn.hasAttribute("__call__")) { pyCallable = pyObjectIn; } else { - throw new IllegalArgumentException( - "The Python object specified should either be callable, or a " + + throw new IllegalArgumentException("The Python object specified should either be callable, or a " + "class instance with an apply method"); } - // Note: Potentially important types omitted -simply because handling from python is not - // super clear: + // Note: Potentially important types omitted -simply because handling from python is not super clear: // Character/char, BigInteger, BigDecimal if (CharSequence.class.isAssignableFrom(classOut)) { getter = new StringValueGetter(); @@ -111,8 +108,7 @@ public Object apply(PyObject valueIn) { if (valueIn == null) { return QueryConstants.NULL_FLOAT; } - return (float) valueIn.getDoubleValue(); // NB: should there be a getFloatValue() in - // jpy? + return (float) valueIn.getDoubleValue(); // NB: should there be a getFloatValue() in jpy? } } diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/PythonListenerAdapter.java b/Integrations/src/main/java/io/deephaven/integrations/python/PythonListenerAdapter.java index 5716d23d999..3204d83eeff 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/PythonListenerAdapter.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/PythonListenerAdapter.java @@ -14,9 +14,8 @@ /** * A Deephaven table listener which passes update events to a Python listener object. * - * The Python listener object can be either (1) a callable or (2) an object which provides an - * "onUpdate" method. In either case, the method must take three arguments (added, removed, - * modified). + * The Python listener object can be either (1) a callable or (2) an object which provides an "onUpdate" method. In + * either case, the method must take three arguments (added, removed, modified). */ @ScriptApi public class PythonListenerAdapter extends InstrumentedListenerAdapter { @@ -26,10 +25,9 @@ public class PythonListenerAdapter extends InstrumentedListenerAdapter { /** * Create a Python listener. * - * No description for this listener will be provided. A hard reference to this listener will be - * maintained to prevent garbage collection. See - * {@link #PythonListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do not want to - * prevent garbage collection of this listener. + * No description for this listener will be provided. A hard reference to this listener will be maintained to + * prevent garbage collection. See {@link #PythonListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do + * not want to prevent garbage collection of this listener. * * @param source The source table to which this listener will subscribe. * @param pyObjectIn Python listener object. @@ -42,11 +40,10 @@ public PythonListenerAdapter(DynamicTable source, PyObject pyObjectIn) { * Create a Python listener. * * A hard reference to this listener will be maintained to prevent garbage collection. See - * {@link #PythonListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do not want to - * prevent garbage collection of this listener. + * {@link #PythonListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do not want to prevent garbage + * collection of this listener. * - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description. + * @param description A description for the UpdatePerformanceTracker to append to its entry description. * @param source The source table to which this listener will subscribe. * @param pyObjectIn Python listener object. */ @@ -57,15 +54,12 @@ public PythonListenerAdapter(String description, DynamicTable source, PyObject p /** * Create a Python listener. * - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description. + * @param description A description for the UpdatePerformanceTracker to append to its entry description. * @param source The source table to which this listener will subscribe. - * @param retain Whether a hard reference to this listener should be maintained to prevent it - * from being collected. + * @param retain Whether a hard reference to this listener should be maintained to prevent it from being collected. * @param pyObjectIn Python listener object. */ - public PythonListenerAdapter(String description, DynamicTable source, boolean retain, - PyObject pyObjectIn) { + public PythonListenerAdapter(String description, DynamicTable source, boolean retain, PyObject pyObjectIn) { super(description, source, retain); pyCallable = PythonUtilities.pyListenerFunc(pyObjectIn); } diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/PythonReplayListenerAdapter.java b/Integrations/src/main/java/io/deephaven/integrations/python/PythonReplayListenerAdapter.java index dfed034f047..84d9abd7c76 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/PythonReplayListenerAdapter.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/PythonReplayListenerAdapter.java @@ -12,26 +12,23 @@ /** - * A Deephaven table listener which passes update events to a Python listener object. The listener - * can also replay the current table snapshot. + * A Deephaven table listener which passes update events to a Python listener object. The listener can also replay the + * current table snapshot. * - * The Python listener object can be either (1) a callable or (2) an object which provides an - * "onUpdate" method. In either case, the method must take four arguments (isReplay, added, removed, - * modified). + * The Python listener object can be either (1) a callable or (2) an object which provides an "onUpdate" method. In + * either case, the method must take four arguments (isReplay, added, removed, modified). */ @ScriptApi -public class PythonReplayListenerAdapter extends InstrumentedListenerAdapter - implements TableSnapshotReplayer { +public class PythonReplayListenerAdapter extends InstrumentedListenerAdapter implements TableSnapshotReplayer { private static final long serialVersionUID = -356456046697273581L; private final PyObject pyCallable; /** * Create a Python listener. * - * No description for this listener will be provided. A hard reference to this listener will be - * maintained to prevent garbage collection. See - * {@link #PythonReplayListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do not - * want to prevent garbage collection of this listener. + * No description for this listener will be provided. A hard reference to this listener will be maintained to + * prevent garbage collection. See {@link #PythonReplayListenerAdapter(String, DynamicTable, boolean, PyObject)} if + * you do not want to prevent garbage collection of this listener. * * @param source The source table to which this listener will subscribe. * @param pyObjectIn Python listener object. @@ -44,31 +41,26 @@ public PythonReplayListenerAdapter(DynamicTable source, PyObject pyObjectIn) { * Create a Python listener. * * A hard reference to this listener will be maintained to prevent garbage collection. See - * {@link #PythonReplayListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do not - * want to prevent garbage collection of this listener. + * {@link #PythonReplayListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do not want to prevent + * garbage collection of this listener. * - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description. + * @param description A description for the UpdatePerformanceTracker to append to its entry description. * @param source The source table to which this listener will subscribe. * @param pyObjectIn Python listener object. */ - public PythonReplayListenerAdapter(String description, DynamicTable source, - PyObject pyObjectIn) { + public PythonReplayListenerAdapter(String description, DynamicTable source, PyObject pyObjectIn) { this(description, source, true, pyObjectIn); } /** * Create a Python listener. * - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description. + * @param description A description for the UpdatePerformanceTracker to append to its entry description. * @param source The source table to which this listener will subscribe. - * @param retain Whether a hard reference to this listener should be maintained to prevent it - * from being collected. + * @param retain Whether a hard reference to this listener should be maintained to prevent it from being collected. * @param pyObjectIn Python listener object. */ - public PythonReplayListenerAdapter(String description, DynamicTable source, boolean retain, - PyObject pyObjectIn) { + public PythonReplayListenerAdapter(String description, DynamicTable source, boolean retain, PyObject pyObjectIn) { super(description, source, retain); pyCallable = PythonUtilities.pyListenerFunc(pyObjectIn); } diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/PythonShiftAwareListenerAdapter.java b/Integrations/src/main/java/io/deephaven/integrations/python/PythonShiftAwareListenerAdapter.java index ec2003d443a..82db8522772 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/PythonShiftAwareListenerAdapter.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/PythonShiftAwareListenerAdapter.java @@ -13,8 +13,8 @@ /** * A Deephaven table listener which passes update events to a Python listener object. * - * The Python listener object can be either (1) a callable or (2) an object which provides an - * "onUpdate" method. In either case, the method must take one argument (updates). + * The Python listener object can be either (1) a callable or (2) an object which provides an "onUpdate" method. In + * either case, the method must take one argument (updates). */ @ScriptApi public class PythonShiftAwareListenerAdapter extends InstrumentedShiftAwareListenerAdapter { @@ -24,10 +24,9 @@ public class PythonShiftAwareListenerAdapter extends InstrumentedShiftAwareListe /** * Creates a Python listener. * - * No description for this listener will be provided. A hard reference to this listener will be - * maintained to prevent garbage collection. See - * {@link #PythonShiftAwareListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do - * not want to prevent garbage collection of this listener. + * No description for this listener will be provided. A hard reference to this listener will be maintained to + * prevent garbage collection. See {@link #PythonShiftAwareListenerAdapter(String, DynamicTable, boolean, PyObject)} + * if you do not want to prevent garbage collection of this listener. * * @param source The source table to which this listener will subscribe. * @param pyObjectIn Python listener object. @@ -40,31 +39,27 @@ public PythonShiftAwareListenerAdapter(DynamicTable source, PyObject pyObjectIn) * Create a Python listener. * * A hard reference to this listener will be maintained to prevent garbage collection. See - * {@link #PythonShiftAwareListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do - * not want to prevent garbage collection of this listener. + * {@link #PythonShiftAwareListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do not want to prevent + * garbage collection of this listener. * - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description. + * @param description A description for the UpdatePerformanceTracker to append to its entry description. * @param source The source table to which this listener will subscribe. * @param pyObjectIn Python listener object. */ - public PythonShiftAwareListenerAdapter(String description, DynamicTable source, - PyObject pyObjectIn) { + public PythonShiftAwareListenerAdapter(String description, DynamicTable source, PyObject pyObjectIn) { this(description, source, true, pyObjectIn); } /** * Create a Python listener. * - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description. + * @param description A description for the UpdatePerformanceTracker to append to its entry description. * @param source The source table to which this listener will subscribe. - * @param retain Whether a hard reference to this listener should be maintained to prevent it - * from being collected. + * @param retain Whether a hard reference to this listener should be maintained to prevent it from being collected. * @param pyObjectIn Python listener object. */ public PythonShiftAwareListenerAdapter(String description, DynamicTable source, boolean retain, - PyObject pyObjectIn) { + PyObject pyObjectIn) { super(description, source, retain); pyCallable = PythonUtilities.pyListenerFunc(pyObjectIn); } diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/PythonShiftAwareReplayListenerAdapter.java b/Integrations/src/main/java/io/deephaven/integrations/python/PythonShiftAwareReplayListenerAdapter.java index e7196bafe6d..d8856ce3c3b 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/PythonShiftAwareReplayListenerAdapter.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/PythonShiftAwareReplayListenerAdapter.java @@ -14,25 +14,25 @@ /** - * A Deephaven table listener which passes update events to a Python listener object. The listener - * can also replay the current table snapshot. + * A Deephaven table listener which passes update events to a Python listener object. The listener can also replay the + * current table snapshot. * - * The Python listener object can be either (1) a callable or (2) an object which provides an - * "onUpdate" method. In either case, the method must take two arguments (isReplay, updates). + * The Python listener object can be either (1) a callable or (2) an object which provides an "onUpdate" method. In + * either case, the method must take two arguments (isReplay, updates). */ @ScriptApi public class PythonShiftAwareReplayListenerAdapter extends InstrumentedShiftAwareListenerAdapter - implements TableSnapshotReplayer { + implements TableSnapshotReplayer { private static final long serialVersionUID = -8882402061960621245L; private final PyObject pyCallable; /** * Create a Python listener. * - * No description for this listener will be provided. A hard reference to this listener will be - * maintained to prevent garbage collection. See - * {@link #PythonShiftAwareReplayListenerAdapter(String, DynamicTable, boolean, PyObject)} if - * you do not want to prevent garbage collection of this listener. + * No description for this listener will be provided. A hard reference to this listener will be maintained to + * prevent garbage collection. See + * {@link #PythonShiftAwareReplayListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do not want to + * prevent garbage collection of this listener. * * @param source The source table to which this listener will subscribe. * @param pyObjectIn Python listener object. @@ -45,31 +45,27 @@ public PythonShiftAwareReplayListenerAdapter(DynamicTable source, PyObject pyObj * Create a Python listener. * * A hard reference to this listener will be maintained to prevent garbage collection. See - * {@link #PythonShiftAwareReplayListenerAdapter(String, DynamicTable, boolean, PyObject)} if - * you do not want to prevent garbage collection of this listener. + * {@link #PythonShiftAwareReplayListenerAdapter(String, DynamicTable, boolean, PyObject)} if you do not want to + * prevent garbage collection of this listener. * - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description. + * @param description A description for the UpdatePerformanceTracker to append to its entry description. * @param source The source table to which this listener will subscribe. * @param pyObjectIn Python listener object. */ - public PythonShiftAwareReplayListenerAdapter(String description, DynamicTable source, - PyObject pyObjectIn) { + public PythonShiftAwareReplayListenerAdapter(String description, DynamicTable source, PyObject pyObjectIn) { this(description, source, true, pyObjectIn); } /** * Create a Python listener. * - * @param description A description for the UpdatePerformanceTracker to append to its entry - * description. + * @param description A description for the UpdatePerformanceTracker to append to its entry description. * @param source The source table to which this listener will subscribe. - * @param retain Whether a hard reference to this listener should be maintained to prevent it - * from being collected. + * @param retain Whether a hard reference to this listener should be maintained to prevent it from being collected. * @param pyObjectIn Python listener object. */ - public PythonShiftAwareReplayListenerAdapter(String description, DynamicTable source, - boolean retain, PyObject pyObjectIn) { + public PythonShiftAwareReplayListenerAdapter(String description, DynamicTable source, boolean retain, + PyObject pyObjectIn) { super(description, source, retain); pyCallable = PythonUtilities.pyListenerFunc(pyObjectIn); } @@ -79,8 +75,7 @@ public void replay() { final Index emptyIndex = Index.FACTORY.getEmptyIndex(); final IndexShiftData emptyShift = IndexShiftData.EMPTY; final ModifiedColumnSet emptyColumnSet = ModifiedColumnSet.EMPTY; - final Update update = - new Update(source.getIndex(), emptyIndex, emptyIndex, emptyShift, emptyColumnSet); + final Update update = new Update(source.getIndex(), emptyIndex, emptyIndex, emptyShift, emptyColumnSet); final boolean isReplay = true; pyCallable.call("__call__", isReplay, update); } diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/PythonThrowingRunnable.java b/Integrations/src/main/java/io/deephaven/integrations/python/PythonThrowingRunnable.java index 9deeeebeb40..530d60c40bc 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/PythonThrowingRunnable.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/PythonThrowingRunnable.java @@ -18,22 +18,20 @@ public class PythonThrowingRunnable implements FunctionalInterfaces.ThrowingRunn /** * Creates a new runnable. * - * @param pyObjectIn the python object providing a function - must either be callable or have an - * "apply" attribute which is callable. + * @param pyObjectIn the python object providing a function - must either be callable or have an "apply" attribute + * which is callable. */ public PythonThrowingRunnable(final PyObject pyObjectIn) { if (pyObjectIn.hasAttribute("apply")) { pyCallable = pyObjectIn.getAttribute("apply"); if (!pyCallable.hasAttribute("__call__")) { - throw new IllegalArgumentException( - "The Python object provided has an apply attribute " + + throw new IllegalArgumentException("The Python object provided has an apply attribute " + "which is not callable"); } } else if (pyObjectIn.hasAttribute("__call__")) { pyCallable = pyObjectIn; } else { - throw new IllegalArgumentException( - "The Python object specified should either be callable, or a " + + throw new IllegalArgumentException("The Python object specified should either be callable, or a " + "class instance with an apply method"); } } diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/PythonTools.java b/Integrations/src/main/java/io/deephaven/integrations/python/PythonTools.java index 902b927eab3..0ea6dfe0262 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/PythonTools.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/PythonTools.java @@ -10,28 +10,25 @@ public class PythonTools { /** * Convert a map to a function. * - * @param map the map to convert to function; the caller gives up ownership to the returned - * object + * @param map the map to convert to function; the caller gives up ownership to the returned object * @param defaultValue the value to map to any keys not in the map; may be null * @return the resulting function */ @SuppressWarnings("unused") public static Function functionfromMapWithDefault( - final Map map, - final String defaultValue) { + final Map map, + final String defaultValue) { return (final String key) -> map.getOrDefault(key, defaultValue); } /** * Convert a map to a function, mapping any nonexistent keys in the map to themselves. * - * @param map the map to convert to function; the caller gives up ownership to the returned - * object + * @param map the map to convert to function; the caller gives up ownership to the returned object * @return the resulting function */ @SuppressWarnings("unused") - public static Function functionFromMapWithIdentityDefaults( - final Map map) { + public static Function functionFromMapWithIdentityDefaults(final Map map) { return (final String key) -> map.getOrDefault(key, key); } } diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/PythonUtilities.java b/Integrations/src/main/java/io/deephaven/integrations/python/PythonUtilities.java index c9d7584ec57..dd837f613a9 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/PythonUtilities.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/PythonUtilities.java @@ -12,11 +12,11 @@ class PythonUtilities { /** - * Gets the python function that should be called by a listener. The input can be either (1) a - * callable or (2) an object which provides an "onUpdate" method. + * Gets the python function that should be called by a listener. The input can be either (1) a callable or (2) an + * object which provides an "onUpdate" method. * - * @param pyObject python listener object. This should either be a callable or an object which - * provides an "onUpdate" method. + * @param pyObject python listener object. This should either be a callable or an object which provides an + * "onUpdate" method. * @return python function that should be called by a listener. * @throws IllegalArgumentException python listener object is not a valid listener. */ @@ -25,8 +25,7 @@ static PyObject pyListenerFunc(final PyObject pyObject) { PyObject pyCallable = pyObject.getAttribute("onUpdate"); if (!pyCallable.hasAttribute("__call__")) { - throw new IllegalArgumentException( - "The Python object provided has an onUpdate attribute " + + throw new IllegalArgumentException("The Python object provided has an onUpdate attribute " + "which is not callable"); } @@ -34,8 +33,7 @@ static PyObject pyListenerFunc(final PyObject pyObject) { } else if (pyObject.hasAttribute("__call__")) { return pyObject; } else { - throw new IllegalArgumentException( - "The Python object specified should either be callable, or a " + + throw new IllegalArgumentException("The Python object specified should either be callable, or a " + "class instance with an onUpdate method"); } } diff --git a/Integrations/src/main/java/io/deephaven/integrations/python/TableSnapshotReplayer.java b/Integrations/src/main/java/io/deephaven/integrations/python/TableSnapshotReplayer.java index 1b0c4450e0e..c8e1ca7d350 100644 --- a/Integrations/src/main/java/io/deephaven/integrations/python/TableSnapshotReplayer.java +++ b/Integrations/src/main/java/io/deephaven/integrations/python/TableSnapshotReplayer.java @@ -13,8 +13,8 @@ public interface TableSnapshotReplayer { /** - * Replay the current table snapshot into a listener. A shared or exclusive LTM lock should be - * held when calling this method. + * Replay the current table snapshot into a listener. A shared or exclusive LTM lock should be held when calling + * this method. */ void replay(); } diff --git a/Integrations/src/test/java/io/deephaven/integrations/numpy/Java2NumpyCopyTest.java b/Integrations/src/test/java/io/deephaven/integrations/numpy/Java2NumpyCopyTest.java index 8e50aec7dee..3115138d73b 100644 --- a/Integrations/src/test/java/io/deephaven/integrations/numpy/Java2NumpyCopyTest.java +++ b/Integrations/src/test/java/io/deephaven/integrations/numpy/Java2NumpyCopyTest.java @@ -8,18 +8,18 @@ public class Java2NumpyCopyTest extends BaseArrayTestCase { final private String file = Configuration.getInstance().getDevRootPath() - + "/Integrations/src/test/java/io/deephaven/integrations/numpy/dh.jpg"; + + "/Integrations/src/test/java/io/deephaven/integrations/numpy/dh.jpg"; final private String q1 = Configuration.getInstance().getDevRootPath() - + "/Integrations/src/test/java/io/deephaven/integrations/numpy/quadrant1.jpg"; + + "/Integrations/src/test/java/io/deephaven/integrations/numpy/quadrant1.jpg"; final private String q2 = Configuration.getInstance().getDevRootPath() - + "/Integrations/src/test/java/io/deephaven/integrations/numpy/quadrant2.png"; + + "/Integrations/src/test/java/io/deephaven/integrations/numpy/quadrant2.png"; public void testTableType() { final Table tNum = TableTools.emptyTable(10).update("A=i", "B=1.0", "C=(float)1.0"); - final Table tImg = TableTools.emptyTable(10) - .update("D=io.deephaven.dbtypes.DbImage.newInstance(`" + file + "`)"); + final Table tImg = + TableTools.emptyTable(10).update("D=io.deephaven.dbtypes.DbImage.newInstance(`" + file + "`)"); final Table tFail = TableTools.emptyTable(10).update("A=i", "B=1.0", "C=(float)1.0", - "D=io.deephaven.dbtypes.DbImage.newInstance(`" + file + "`)"); + "D=io.deephaven.dbtypes.DbImage.newInstance(`" + file + "`)"); assertEquals(Java2NumpyCopy.TableType.NUMBER, Java2NumpyCopy.tableType(tNum)); assertEquals(Java2NumpyCopy.TableType.IMAGE, Java2NumpyCopy.tableType(tImg)); @@ -627,16 +627,16 @@ public void testCopyRandBoolean() { public void testCopyImageSliceShort() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final short[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final short[] rstBW = new short[8]; Java2NumpyCopy.copyImageSlice(t, 2, rstBW, 2, 2, 2, true, false); assertEquals(targetBW, rstBW); - final short[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, - 214, 155, 217, 150, 148, 254, 254, 210}; + final short[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, 150, + 148, 254, 254, 210}; final short[] rstC = new short[24]; Java2NumpyCopy.copyImageSlice(t, 2, rstC, 2, 2, 2, true, true); assertEquals(targetC, rstC); @@ -644,16 +644,16 @@ public void testCopyImageSliceShort() { public void testCopyImageSliceInt() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final int[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final int[] rstBW = new int[8]; Java2NumpyCopy.copyImageSlice(t, 2, rstBW, 2, 2, 2, true, false); assertEquals(targetBW, rstBW); - final int[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, - 214, 155, 217, 150, 148, 254, 254, 210}; + final int[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, 150, + 148, 254, 254, 210}; final int[] rstC = new int[24]; Java2NumpyCopy.copyImageSlice(t, 2, rstC, 2, 2, 2, true, true); assertEquals(targetC, rstC); @@ -661,16 +661,16 @@ public void testCopyImageSliceInt() { public void testCopyImageSliceLong() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final long[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final long[] rstBW = new long[8]; Java2NumpyCopy.copyImageSlice(t, 2, rstBW, 2, 2, 2, true, false); assertEquals(targetBW, rstBW); - final long[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, - 214, 155, 217, 150, 148, 254, 254, 210}; + final long[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, 150, + 148, 254, 254, 210}; final long[] rstC = new long[24]; Java2NumpyCopy.copyImageSlice(t, 2, rstC, 2, 2, 2, true, true); assertEquals(targetC, rstC); @@ -678,16 +678,16 @@ public void testCopyImageSliceLong() { public void testCopyImageSliceFloat() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final float[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final float[] rstBW = new float[8]; Java2NumpyCopy.copyImageSlice(t, 2, rstBW, 2, 2, 2, true, false); assertEquals(targetBW, rstBW); - final float[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, - 214, 155, 217, 150, 148, 254, 254, 210}; + final float[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, 150, + 148, 254, 254, 210}; final float[] rstC = new float[24]; Java2NumpyCopy.copyImageSlice(t, 2, rstC, 2, 2, 2, true, true); assertEquals(targetC, rstC); @@ -695,16 +695,16 @@ public void testCopyImageSliceFloat() { public void testCopyImageSliceDouble() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final double[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final double[] rstBW = new double[8]; Java2NumpyCopy.copyImageSlice(t, 2, rstBW, 2, 2, 2, true, false); assertEquals(targetBW, rstBW); - final double[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, - 195, 214, 155, 217, 150, 148, 254, 254, 210}; + final double[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, + 150, 148, 254, 254, 210}; final double[] rstC = new double[24]; Java2NumpyCopy.copyImageSlice(t, 2, rstC, 2, 2, 2, true, true); assertEquals(targetC, rstC); @@ -712,16 +712,16 @@ public void testCopyImageSliceDouble() { public void testCopyImageRandShort() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final short[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final short[] rstBW = new short[8]; Java2NumpyCopy.copyImageRand(t, rstBW, 2, 2, 2, true, false, new long[] {2, 3}); assertEquals(targetBW, rstBW); - final short[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, - 214, 155, 217, 150, 148, 254, 254, 210}; + final short[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, 150, + 148, 254, 254, 210}; final short[] rstC = new short[24]; Java2NumpyCopy.copyImageRand(t, rstC, 2, 2, 2, true, true, new long[] {2, 3}); assertEquals(targetC, rstC); @@ -729,16 +729,16 @@ public void testCopyImageRandShort() { public void testCopyImageRandInt() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final int[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final int[] rstBW = new int[8]; Java2NumpyCopy.copyImageRand(t, rstBW, 2, 2, 2, true, false, new long[] {2, 3}); assertEquals(targetBW, rstBW); - final int[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, - 214, 155, 217, 150, 148, 254, 254, 210}; + final int[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, 150, + 148, 254, 254, 210}; final int[] rstC = new int[24]; Java2NumpyCopy.copyImageRand(t, rstC, 2, 2, 2, true, true, new long[] {2, 3}); assertEquals(targetC, rstC); @@ -746,16 +746,16 @@ public void testCopyImageRandInt() { public void testCopyImageRandLong() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final long[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final long[] rstBW = new long[8]; Java2NumpyCopy.copyImageRand(t, rstBW, 2, 2, 2, true, false, new long[] {2, 3}); assertEquals(targetBW, rstBW); - final long[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, - 214, 155, 217, 150, 148, 254, 254, 210}; + final long[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, 150, + 148, 254, 254, 210}; final long[] rstC = new long[24]; Java2NumpyCopy.copyImageRand(t, rstC, 2, 2, 2, true, true, new long[] {2, 3}); assertEquals(targetC, rstC); @@ -763,16 +763,16 @@ public void testCopyImageRandLong() { public void testCopyImageRandFloat() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final float[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final float[] rstBW = new float[8]; Java2NumpyCopy.copyImageRand(t, rstBW, 2, 2, 2, true, false, new long[] {2, 3}); assertEquals(targetBW, rstBW); - final float[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, - 214, 155, 217, 150, 148, 254, 254, 210}; + final float[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, 150, + 148, 254, 254, 210}; final float[] rstC = new float[24]; Java2NumpyCopy.copyImageRand(t, rstC, 2, 2, 2, true, true, new long[] {2, 3}); assertEquals(targetC, rstC); @@ -780,16 +780,16 @@ public void testCopyImageRandFloat() { public void testCopyImageRandDouble() { final Table t = TableTools.emptyTable(10) - .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") - .update("A=i%2==0 ? A : A.resize(2,2)"); + .update("A=io.deephaven.dbtypes.DbImage.newInstance(i%2==0?`" + q1 + "`:`" + q2 + "`)") + .update("A=i%2==0 ? A : A.resize(2,2)"); final double[] targetBW = {85, 89, 150, 170, 206, 188, 171, 239}; final double[] rstBW = new double[8]; Java2NumpyCopy.copyImageRand(t, rstBW, 2, 2, 2, true, false, new long[] {2, 3}); assertEquals(targetBW, rstBW); - final double[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, - 195, 214, 155, 217, 150, 148, 254, 254, 210}; + final double[] targetC = {0, 255, 1, 255, 5, 7, 3, 193, 255, 255, 255, 0, 185, 205, 229, 195, 214, 155, 217, + 150, 148, 254, 254, 210}; final double[] rstC = new double[24]; Java2NumpyCopy.copyImageRand(t, rstC, 2, 2, 2, true, true, new long[] {2, 3}); assertEquals(targetC, rstC); diff --git a/Internals/src/test/java/io/deephaven/internals/TestJdkInternalsLoader.java b/Internals/src/test/java/io/deephaven/internals/TestJdkInternalsLoader.java index b67e025db56..798fb680ebb 100644 --- a/Internals/src/test/java/io/deephaven/internals/TestJdkInternalsLoader.java +++ b/Internals/src/test/java/io/deephaven/internals/TestJdkInternalsLoader.java @@ -12,8 +12,7 @@ public void testGetMemoryUsed() { @Test public void testMaxDirectMemory() { - long maxDirectMemory = - JdkInternalsLoader.getInstance().getDirectMemoryStats().maxDirectMemory(); + long maxDirectMemory = JdkInternalsLoader.getInstance().getDirectMemoryStats().maxDirectMemory(); System.out.println(maxDirectMemory); } } diff --git a/Kafka/src/main/java/io/deephaven/kafka/KafkaTools.java b/Kafka/src/main/java/io/deephaven/kafka/KafkaTools.java index f29338d74b0..1ed6e893008 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/KafkaTools.java +++ b/Kafka/src/main/java/io/deephaven/kafka/KafkaTools.java @@ -55,8 +55,7 @@ public class KafkaTools { - public static final String KAFKA_PARTITION_COLUMN_NAME_PROPERTY = - "deephaven.partition.column.name"; + public static final String KAFKA_PARTITION_COLUMN_NAME_PROPERTY = "deephaven.partition.column.name"; public static final String KAFKA_PARTITION_COLUMN_NAME_DEFAULT = "KafkaPartition"; public static final String OFFSET_COLUMN_NAME_PROPERTY = "deephaven.offset.column.name"; public static final String OFFSET_COLUMN_NAME_DEFAULT = "KafkaOffset"; @@ -68,8 +67,7 @@ public class KafkaTools { public static final String VALUE_COLUMN_NAME_DEFAULT = "KafkaValue"; public static final String KEY_COLUMN_TYPE_PROPERTY = "deephaven.key.column.type"; public static final String VALUE_COLUMN_TYPE_PROPERTY = "deephaven.value.column.type"; - public static final String SCHEMA_SERVER_PROPERTY = - AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; + public static final String SCHEMA_SERVER_PROPERTY = AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; public static final String SHORT_DESERIALIZER = ShortDeserializer.class.getName(); public static final String INT_DESERIALIZER = IntegerDeserializer.class.getName(); public static final String LONG_DESERIALIZER = LongDeserializer.class.getName(); @@ -86,26 +84,24 @@ public class KafkaTools { private static final int CHUNK_SIZE = 2048; - public static Schema getAvroSchema(final String schemaServerUrl, final String resourceName, - final String version) { + public static Schema getAvroSchema(final String schemaServerUrl, final String resourceName, final String version) { String action = "setup http client"; try (final CloseableHttpClient client = HttpClients.custom().build()) { final String requestStr = - schemaServerUrl + "/subjects/" + resourceName + "/versions/" + version + "/schema"; + schemaServerUrl + "/subjects/" + resourceName + "/versions/" + version + "/schema"; final HttpUriRequest request = RequestBuilder.get().setUri(requestStr).build(); action = "execute schema request " + requestStr; final HttpResponse response = client.execute(request); final int statusCode = response.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { - throw new UncheckedDeephavenException( - "Got status code " + statusCode + " requesting " + request); + throw new UncheckedDeephavenException("Got status code " + statusCode + " requesting " + request); } action = "extract json server response"; final HttpEntity entity = response.getEntity(); final Header encodingHeader = entity.getContentEncoding(); final Charset encoding = encodingHeader == null - ? StandardCharsets.UTF_8 - : Charsets.toCharset(encodingHeader.getValue()); + ? StandardCharsets.UTF_8 + : Charsets.toCharset(encodingHeader.getValue()); final String json = EntityUtils.toString(entity, encoding); action = "parse schema server response: " + json; return new Schema.Parser().parse(json); @@ -119,11 +115,11 @@ public static Schema getAvroSchema(final String schemaServerUrl, final String re } private static void pushColumnTypesFromAvroField( - final List columnsOut, - final Map mappedOut, - final String prefix, - final Schema.Field field, - final Function fieldNameToColumnName) { + final List columnsOut, + final Map mappedOut, + final String prefix, + final Schema.Field field, + final Function fieldNameToColumnName) { final Schema fieldSchema = field.schema(); final String fieldName = field.name(); final String mappedName = fieldNameToColumnName.apply(prefix + fieldName); @@ -133,21 +129,21 @@ private static void pushColumnTypesFromAvroField( } final Schema.Type fieldType = fieldSchema.getType(); pushColumnTypesFromAvroField( - columnsOut, mappedOut, prefix, field, fieldName, fieldSchema, mappedName, fieldType, - fieldNameToColumnName); + columnsOut, mappedOut, prefix, field, fieldName, fieldSchema, mappedName, fieldType, + fieldNameToColumnName); } private static void pushColumnTypesFromAvroField( - final List columnsOut, - final Map mappedOut, - final String prefix, - final Schema.Field field, - final String fieldName, - final Schema fieldSchema, - final String mappedName, - final Schema.Type fieldType, - final Function fieldNameToColumnName) { + final List columnsOut, + final Map mappedOut, + final String prefix, + final Schema.Field field, + final String fieldName, + final Schema fieldSchema, + final String mappedName, + final Schema.Type fieldType, + final Function fieldNameToColumnName) { switch (fieldType) { case BOOLEAN: columnsOut.add(ColumnDefinition.ofBoolean(mappedName)); @@ -175,31 +171,31 @@ private static void pushColumnTypesFromAvroField( } if (unionSize != 2) { throw new UnsupportedOperationException( - "Union " + fieldName + " with more than 2 fields not supported"); + "Union " + fieldName + " with more than 2 fields not supported"); } final Schema.Type unionType0 = unionTypes.get(0).getType(); final Schema.Type unionType1 = unionTypes.get(1).getType(); if (unionType1 == Schema.Type.NULL) { pushColumnTypesFromAvroField( - columnsOut, mappedOut, prefix, field, fieldName, fieldSchema, mappedName, - unionType0, fieldNameToColumnName); + columnsOut, mappedOut, prefix, field, fieldName, fieldSchema, mappedName, unionType0, + fieldNameToColumnName); return; } else if (unionType0 == Schema.Type.NULL) { pushColumnTypesFromAvroField( - columnsOut, mappedOut, prefix, field, fieldName, fieldSchema, mappedName, - unionType1, fieldNameToColumnName); + columnsOut, mappedOut, prefix, field, fieldName, fieldSchema, mappedName, unionType1, + fieldNameToColumnName); return; } - throw new UnsupportedOperationException("Union " + fieldName - + " not supported; only unions with NULL are supported at this time."); + throw new UnsupportedOperationException( + "Union " + fieldName + " not supported; only unions with NULL are supported at this time."); case RECORD: // Linearize any nesting. for (final Schema.Field nestedField : field.schema().getFields()) { pushColumnTypesFromAvroField( - columnsOut, mappedOut, - prefix + fieldName + NESTED_FIELD_NAME_SEPARATOR, - nestedField, - fieldNameToColumnName); + columnsOut, mappedOut, + prefix + fieldName + NESTED_FIELD_NAME_SEPARATOR, + nestedField, + fieldNameToColumnName); } return; case MAP: @@ -209,8 +205,7 @@ private static void pushColumnTypesFromAvroField( case BYTES: case FIXED: default: - throw new UnsupportedOperationException( - "Type " + fieldType + " not supported for field " + fieldName); + throw new UnsupportedOperationException("Type " + fieldType + " not supported for field " + fieldName); } if (mappedOut != null) { mappedOut.put(fieldName, mappedName); @@ -218,10 +213,10 @@ private static void pushColumnTypesFromAvroField( } public static void avroSchemaToColumnDefinitions( - final List columns, - final Map mappedOut, - final Schema schema, - final Function fieldNameToColumnName) { + final List columns, + final Map mappedOut, + final Schema schema, + final Function fieldNameToColumnName) { if (schema.isUnion()) { throw new UnsupportedOperationException("Union of records schemas are not supported"); } @@ -237,15 +232,15 @@ public static void avroSchemaToColumnDefinitions( } public static void avroSchemaToColumnDefinitions( - final List columns, - final Schema schema, - final Function fieldNameToColumnName) { + final List columns, + final Schema schema, + final Function fieldNameToColumnName) { avroSchemaToColumnDefinitions(columns, null, schema, fieldNameToColumnName); } public static void avroSchemaToColumnDefinitions( - final List columns, - final Schema schema) { + final List columns, + final Schema schema) { avroSchemaToColumnDefinitions(columns, schema, DIRECT_MAPPING); } @@ -275,8 +270,7 @@ public static final class Avro extends KeyOrValueSpec { public final String schemaVersion; public final Function fieldNameToColumnName; - private Avro(final Schema schema, - final Function fieldNameToColumnName) { + private Avro(final Schema schema, final Function fieldNameToColumnName) { this.schema = schema; this.schemaName = null; this.schemaVersion = null; @@ -284,8 +278,8 @@ private Avro(final Schema schema, } private Avro(final String schemaName, - final String schemaVersion, - final Function fieldNameToColumnName) { + final String schemaVersion, + final Function fieldNameToColumnName) { this.schema = null; this.schemaName = schemaName; this.schemaVersion = schemaVersion; @@ -314,11 +308,10 @@ public DataFormat dataFormat() { } /** - * The names for the key or value columns can be provided in the properties as - * "key.column.name" or "value.column.name", and otherwise default to "key" or "value". The - * types for key or value are either specified in the properties as "key.type" or - * "value.type", or deduced from the serializer classes for key or value in the provided - * Properties object. + * The names for the key or value columns can be provided in the properties as "key.column.name" or + * "value.column.name", and otherwise default to "key" or "value". The types for key or value are either + * specified in the properties as "key.type" or "value.type", or deduced from the serializer classes for key or + * value in the provided Properties object. */ private static final Simple FROM_PROPERTIES = new Simple(null, null); @@ -327,8 +320,8 @@ public static final class Json extends KeyOrValueSpec { public final Map fieldNameToColumnName; private Json( - final ColumnDefinition[] columnDefinitions, - final Map fieldNameToColumnName) { + final ColumnDefinition[] columnDefinitions, + final Map fieldNameToColumnName) { this.columnDefinitions = columnDefinitions; this.fieldNameToColumnName = fieldNameToColumnName; } @@ -346,11 +339,11 @@ public DataFormat dataFormat() { public enum TableType { /** *

    - * Consume all partitions into a single interleaved stream table, which will present only - * newly-available rows to downstream operations and visualizations. + * Consume all partitions into a single interleaved stream table, which will present only newly-available rows + * to downstream operations and visualizations. *

    - * See {@link Table#STREAM_TABLE_ATTRIBUTE} for a detailed explanation of stream table - * semantics, and {@link io.deephaven.db.v2.StreamTableTools} for related tooling. + * See {@link Table#STREAM_TABLE_ATTRIBUTE} for a detailed explanation of stream table semantics, and + * {@link io.deephaven.db.v2.StreamTableTools} for related tooling. */ Stream(false, false), /** @@ -361,29 +354,26 @@ public enum TableType { *

    * As in {@link #Stream}, but each partition is mapped to a distinct stream table. *

    - * The resulting per-partition tables are aggregated into a single {@link TableMap} keyed by - * {@link Integer} partition, which is then presented as a {@link Table} proxy via - * {@link TransformableTableMap#asTable(boolean, boolean, boolean) asTable} with - * {@code strictKeys=true}, {@code allowCoalesce=true}, and {@code sanityCheckJoins=true}. + * The resulting per-partition tables are aggregated into a single {@link TableMap} keyed by {@link Integer} + * partition, which is then presented as a {@link Table} proxy via + * {@link TransformableTableMap#asTable(boolean, boolean, boolean) asTable} with {@code strictKeys=true}, + * {@code allowCoalesce=true}, and {@code sanityCheckJoins=true}. *

    - * See {@link TransformableTableMap#asTableMap()} to explicitly work with the underlying - * {@link TableMap} and {@link TransformableTableMap#asTable(boolean, boolean, boolean)} for - * alternative proxy options. + * See {@link TransformableTableMap#asTableMap()} to explicitly work with the underlying {@link TableMap} and + * {@link TransformableTableMap#asTable(boolean, boolean, boolean)} for alternative proxy options. */ StreamMap(false, true), /** *

    - * As in {@link #Append}, but each partition is mapped to a distinct in-memory append-only - * table. + * As in {@link #Append}, but each partition is mapped to a distinct in-memory append-only table. *

    - * The resulting per-partition tables are aggregated into a single {@link TableMap} keyed by - * {@link Integer} partition, which is then presented as a {@link Table} proxy via - * {@link TransformableTableMap#asTable(boolean, boolean, boolean) asTable} with - * {@code strictKeys=true}, {@code allowCoalesce=true}, and {@code sanityCheckJoins=true}. + * The resulting per-partition tables are aggregated into a single {@link TableMap} keyed by {@link Integer} + * partition, which is then presented as a {@link Table} proxy via + * {@link TransformableTableMap#asTable(boolean, boolean, boolean) asTable} with {@code strictKeys=true}, + * {@code allowCoalesce=true}, and {@code sanityCheckJoins=true}. *

    - * See {@link TransformableTableMap#asTableMap()} to explicitly work with the underlying - * {@link TableMap} and {@link TransformableTableMap#asTable(boolean, boolean, boolean)} for - * alternative proxy options. + * See {@link TransformableTableMap#asTableMap()} to explicitly work with the underlying {@link TableMap} and + * {@link TransformableTableMap#asTable(boolean, boolean, boolean)} for alternative proxy options. */ AppendMap(true, true); @@ -427,8 +417,8 @@ public static KeyOrValueSpec ignoreSpec() { @SuppressWarnings("unused") public static KeyOrValueSpec jsonSpec( - final ColumnDefinition[] columnDefinitions, - final Map fieldNameToColumnName) { + final ColumnDefinition[] columnDefinitions, + final Map fieldNameToColumnName) { return new KeyOrValueSpec.Json(columnDefinitions, fieldNameToColumnName); } @@ -438,8 +428,7 @@ public static KeyOrValueSpec jsonSpec(final ColumnDefinition[] columnDefiniti } @SuppressWarnings("unused") - public static KeyOrValueSpec avroSpec(final Schema schema, - final Function fieldNameToColumnName) { + public static KeyOrValueSpec avroSpec(final Schema schema, final Function fieldNameToColumnName) { return new KeyOrValueSpec.Avro(schema, fieldNameToColumnName); } @@ -450,13 +439,13 @@ public static KeyOrValueSpec avroSpec(final Schema schema) { @SuppressWarnings("unused") public static KeyOrValueSpec avroSpec(final String schemaName, - final String schemaVersion, - final Function fieldNameToColumnName) { + final String schemaVersion, + final Function fieldNameToColumnName) { return new KeyOrValueSpec.Avro(schemaName, schemaVersion, fieldNameToColumnName); } public static KeyOrValueSpec avroSpec(final String schemaName, - final Function fieldNameToColumnName) { + final Function fieldNameToColumnName) { return new KeyOrValueSpec.Avro(schemaName, AVRO_LATEST_VERSION, fieldNameToColumnName); } @@ -475,9 +464,8 @@ public static KeyOrValueSpec simpleSpec(final String columnName, final Class } /** - * The types for key or value are either specified in the properties as "key.type" or - * "value.type", or deduced from the serializer classes for key or value in the provided - * Properties object. + * The types for key or value are either specified in the properties as "key.type" or "value.type", or deduced from + * the serializer classes for key or value in the provided Properties object. */ @SuppressWarnings("unused") public static KeyOrValueSpec simpleSpec(final String columnName) { @@ -487,32 +475,30 @@ public static KeyOrValueSpec simpleSpec(final String columnName) { /** * Consume from Kafka to a Deephaven table. * - * @param kafkaConsumerProperties Properties to configure this table and also to be passed to - * create the KafkaConsumer + * @param kafkaConsumerProperties Properties to configure this table and also to be passed to create the + * KafkaConsumer * @param topic Kafka topic name * @param partitionFilter A predicate returning true for the partitions to consume - * @param partitionToInitialOffset A function specifying the desired initial offset for each - * partition consumed + * @param partitionToInitialOffset A function specifying the desired initial offset for each partition consumed * @param keySpec Conversion specification for Kafka record keys * @param valueSpec Conversion specification for Kafka record values * @param resultType {@link TableType} specifying the type of the expected result - * @return The result table containing Kafka stream data formatted according to - * {@code resultType} + * @return The result table containing Kafka stream data formatted according to {@code resultType} */ @SuppressWarnings("unused") public static Table consumeToTable( - @NotNull final Properties kafkaConsumerProperties, - @NotNull final String topic, - @NotNull final IntPredicate partitionFilter, - @NotNull final IntToLongFunction partitionToInitialOffset, - @NotNull final KeyOrValueSpec keySpec, - @NotNull final KeyOrValueSpec valueSpec, - @NotNull final TableType resultType) { + @NotNull final Properties kafkaConsumerProperties, + @NotNull final String topic, + @NotNull final IntPredicate partitionFilter, + @NotNull final IntToLongFunction partitionToInitialOffset, + @NotNull final KeyOrValueSpec keySpec, + @NotNull final KeyOrValueSpec valueSpec, + @NotNull final TableType resultType) { final boolean ignoreKey = keySpec.dataFormat() == DataFormat.IGNORE; final boolean ignoreValue = valueSpec.dataFormat() == DataFormat.IGNORE; if (ignoreKey && ignoreValue) { throw new IllegalArgumentException( - "can't ignore both key and value: keySpec and valueSpec can't both be ignore specs"); + "can't ignore both key and value: keySpec and valueSpec can't both be ignore specs"); } if (ignoreKey) { setDeserIfNotSet(kafkaConsumerProperties, KeyOrValue.KEY, DESERIALIZER_FOR_IGNORE); @@ -537,83 +523,73 @@ public static Table consumeToTable( final MutableInt nextColumnIndexMut = new MutableInt(nextColumnIndex); final KeyOrValueIngestData keyIngestData = - getIngestData(KeyOrValue.KEY, kafkaConsumerProperties, columnDefinitions, - nextColumnIndexMut, keySpec); + getIngestData(KeyOrValue.KEY, kafkaConsumerProperties, columnDefinitions, nextColumnIndexMut, keySpec); final KeyOrValueIngestData valueIngestData = - getIngestData(KeyOrValue.VALUE, kafkaConsumerProperties, columnDefinitions, - nextColumnIndexMut, valueSpec); + getIngestData(KeyOrValue.VALUE, kafkaConsumerProperties, columnDefinitions, nextColumnIndexMut, + valueSpec); final TableDefinition tableDefinition = new TableDefinition(columnDefinitions); - final StreamTableMap streamTableMap = - resultType.isMap ? new StreamTableMap(tableDefinition) : null; + final StreamTableMap streamTableMap = resultType.isMap ? new StreamTableMap(tableDefinition) : null; final LiveTableRegistrar liveTableRegistrar = - streamTableMap == null ? LiveTableMonitor.DEFAULT : streamTableMap.refreshCombiner; + streamTableMap == null ? LiveTableMonitor.DEFAULT : streamTableMap.refreshCombiner; - final Supplier> adapterFactory = - () -> { - final StreamPublisherImpl streamPublisher = new StreamPublisherImpl(); - final StreamToTableAdapter streamToTableAdapter = + final Supplier> adapterFactory = () -> { + final StreamPublisherImpl streamPublisher = new StreamPublisherImpl(); + final StreamToTableAdapter streamToTableAdapter = new StreamToTableAdapter(tableDefinition, streamPublisher, liveTableRegistrar); - streamPublisher.setChunkFactory( - () -> streamToTableAdapter.makeChunksForDefinition(CHUNK_SIZE), + streamPublisher.setChunkFactory(() -> streamToTableAdapter.makeChunksForDefinition(CHUNK_SIZE), streamToTableAdapter::chunkTypeForIndex); - final KeyOrValueProcessor keyProcessor = + final KeyOrValueProcessor keyProcessor = getProcessor(keySpec, tableDefinition, streamToTableAdapter, keyIngestData); - final KeyOrValueProcessor valueProcessor = + final KeyOrValueProcessor valueProcessor = getProcessor(valueSpec, tableDefinition, streamToTableAdapter, valueIngestData); - return new Pair<>( + return new Pair<>( streamToTableAdapter, KafkaStreamPublisher.make( - streamPublisher, - commonColumnIndices[0], - commonColumnIndices[1], - commonColumnIndices[2], - keyProcessor, - valueProcessor, - keyIngestData == null ? -1 : keyIngestData.simpleColumnIndex, - valueIngestData == null ? -1 : valueIngestData.simpleColumnIndex, - keyIngestData == null ? Function.identity() - : keyIngestData.toObjectChunkMapper, - valueIngestData == null ? Function.identity() - : valueIngestData.toObjectChunkMapper)); - }; + streamPublisher, + commonColumnIndices[0], + commonColumnIndices[1], + commonColumnIndices[2], + keyProcessor, + valueProcessor, + keyIngestData == null ? -1 : keyIngestData.simpleColumnIndex, + valueIngestData == null ? -1 : valueIngestData.simpleColumnIndex, + keyIngestData == null ? Function.identity() : keyIngestData.toObjectChunkMapper, + valueIngestData == null ? Function.identity() : valueIngestData.toObjectChunkMapper)); + }; final UnaryOperator

    tableConversion = - resultType.isAppend ? StreamTableTools::streamToAppendOnlyTable - : UnaryOperator.identity(); + resultType.isAppend ? StreamTableTools::streamToAppendOnlyTable : UnaryOperator.identity(); final Table result; final IntFunction partitionToConsumer; if (resultType.isMap) { result = streamTableMap.asTable(true, true, true); partitionToConsumer = (final int partition) -> { final Pair partitionAdapterPair = - adapterFactory.get(); - final Table partitionTable = - tableConversion.apply(partitionAdapterPair.getFirst().table()); - streamTableMap.enqueueUpdate( - () -> Assert.eqNull(streamTableMap.put(partition, partitionTable), + adapterFactory.get(); + final Table partitionTable = tableConversion.apply(partitionAdapterPair.getFirst().table()); + streamTableMap.enqueueUpdate(() -> Assert.eqNull(streamTableMap.put(partition, partitionTable), "streamTableMap.put(partition, partitionTable)")); - return new SimpleKafkaStreamConsumer(partitionAdapterPair.getSecond(), - partitionAdapterPair.getFirst()); + return new SimpleKafkaStreamConsumer(partitionAdapterPair.getSecond(), partitionAdapterPair.getFirst()); }; } else { final Pair singleAdapterPair = - adapterFactory.get(); + adapterFactory.get(); result = tableConversion.apply(singleAdapterPair.getFirst().table()); - partitionToConsumer = (final int partition) -> new SimpleKafkaStreamConsumer( - singleAdapterPair.getSecond(), singleAdapterPair.getFirst()); + partitionToConsumer = (final int partition) -> new SimpleKafkaStreamConsumer(singleAdapterPair.getSecond(), + singleAdapterPair.getFirst()); } final KafkaIngester ingester = new KafkaIngester( - log, - kafkaConsumerProperties, - topic, - partitionFilter, - partitionToConsumer, - partitionToInitialOffset); + log, + kafkaConsumerProperties, + topic, + partitionFilter, + partitionToConsumer, + partitionToInitialOffset); ingester.start(); return result; @@ -644,22 +620,21 @@ private void enqueueUpdate(@NotNull final Runnable deferredUpdate) { } private static KeyOrValueProcessor getProcessor( - final KeyOrValueSpec spec, - final TableDefinition tableDef, - final StreamToTableAdapter streamToTableAdapter, - final KeyOrValueIngestData data) { + final KeyOrValueSpec spec, + final TableDefinition tableDef, + final StreamToTableAdapter streamToTableAdapter, + final KeyOrValueIngestData data) { switch (spec.dataFormat()) { case IGNORE: case SIMPLE: return null; case AVRO: return GenericRecordChunkAdapter.make( - tableDef, streamToTableAdapter::chunkTypeForIndex, data.fieldNameToColumnName, - (Schema) data.extra, true); + tableDef, streamToTableAdapter::chunkTypeForIndex, data.fieldNameToColumnName, + (Schema) data.extra, true); case JSON: return JsonNodeChunkAdapter.make( - tableDef, streamToTableAdapter::chunkTypeForIndex, data.fieldNameToColumnName, - true); + tableDef, streamToTableAdapter::chunkTypeForIndex, data.fieldNameToColumnName, true); default: throw new IllegalStateException("Unknown KeyOrvalueSpec value" + spec.dataFormat()); } @@ -679,28 +654,26 @@ private static void setIfNotSet(final Properties prop, final String key, final S prop.setProperty(key, value); } - private static void setDeserIfNotSet(final Properties prop, final KeyOrValue keyOrValue, - final String value) { + private static void setDeserIfNotSet(final Properties prop, final KeyOrValue keyOrValue, final String value) { final String propKey = (keyOrValue == KeyOrValue.KEY) - ? ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG - : ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG; + ? ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG + : ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG; setIfNotSet(prop, propKey, value); } private static KeyOrValueIngestData getIngestData( - final KeyOrValue keyOrValue, - final Properties kafkaConsumerProperties, - final List columnDefinitions, - final MutableInt nextColumnIndexMut, - final KeyOrValueSpec keyOrValueSpec) { + final KeyOrValue keyOrValue, + final Properties kafkaConsumerProperties, + final List columnDefinitions, + final MutableInt nextColumnIndexMut, + final KeyOrValueSpec keyOrValueSpec) { if (keyOrValueSpec.dataFormat() == DataFormat.IGNORE) { return null; } final KeyOrValueIngestData data = new KeyOrValueIngestData(); switch (keyOrValueSpec.dataFormat()) { case AVRO: - setDeserIfNotSet(kafkaConsumerProperties, keyOrValue, - KafkaAvroDeserializer.class.getName()); + setDeserIfNotSet(kafkaConsumerProperties, keyOrValue, KafkaAvroDeserializer.class.getName()); final KeyOrValueSpec.Avro avroSpec = (KeyOrValueSpec.Avro) keyOrValueSpec; data.fieldNameToColumnName = new HashMap<>(); final Schema schema; @@ -709,17 +682,14 @@ private static KeyOrValueIngestData getIngestData( } else { if (!kafkaConsumerProperties.containsKey(SCHEMA_SERVER_PROPERTY)) { throw new IllegalArgumentException( - "Avro schema name specified and schema server url propeorty " + - SCHEMA_SERVER_PROPERTY + " not found."); + "Avro schema name specified and schema server url propeorty " + + SCHEMA_SERVER_PROPERTY + " not found."); } - final String schemaServiceUrl = - kafkaConsumerProperties.getProperty(SCHEMA_SERVER_PROPERTY); - schema = getAvroSchema(schemaServiceUrl, avroSpec.schemaName, - avroSpec.schemaVersion); + final String schemaServiceUrl = kafkaConsumerProperties.getProperty(SCHEMA_SERVER_PROPERTY); + schema = getAvroSchema(schemaServiceUrl, avroSpec.schemaName, avroSpec.schemaVersion); } avroSchemaToColumnDefinitions( - columnDefinitions, data.fieldNameToColumnName, schema, - avroSpec.fieldNameToColumnName); + columnDefinitions, data.fieldNameToColumnName, schema, avroSpec.fieldNameToColumnName); data.extra = schema; break; case JSON: @@ -731,8 +701,7 @@ private static KeyOrValueIngestData getIngestData( data.fieldNameToColumnName = new HashMap<>(jsonSpec.columnDefinitions.length); final Set coveredColumns = new HashSet<>(jsonSpec.columnDefinitions.length); if (jsonSpec.fieldNameToColumnName != null) { - for (final Map.Entry entry : jsonSpec.fieldNameToColumnName - .entrySet()) { + for (final Map.Entry entry : jsonSpec.fieldNameToColumnName.entrySet()) { final String colName = entry.getValue(); data.fieldNameToColumnName.put(entry.getKey(), colName); coveredColumns.add(colName); @@ -750,15 +719,13 @@ private static KeyOrValueIngestData getIngestData( final KeyOrValueSpec.Simple simpleSpec = (KeyOrValueSpec.Simple) keyOrValueSpec; final ColumnDefinition colDef; if (simpleSpec.dataType == null) { - colDef = getKeyOrValueCol(keyOrValue, kafkaConsumerProperties, - simpleSpec.columnName, false); + colDef = getKeyOrValueCol(keyOrValue, kafkaConsumerProperties, simpleSpec.columnName, false); } else { - colDef = ColumnDefinition.fromGenericType(simpleSpec.columnName, - simpleSpec.dataType); + colDef = ColumnDefinition.fromGenericType(simpleSpec.columnName, simpleSpec.dataType); } final String propKey = (keyOrValue == KeyOrValue.KEY) - ? ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG - : ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG; + ? ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG + : ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG; if (!kafkaConsumerProperties.containsKey(propKey)) { final Class dataType = colDef.getDataType(); if (dataType == short.class) { @@ -775,17 +742,15 @@ private static KeyOrValueIngestData getIngestData( kafkaConsumerProperties.setProperty(propKey, STRING_DESERIALIZER); } else { throw new UncheckedDeephavenException( - "Deserializer for " + keyOrValue - + " not set in kafka consumer properties " + - "and can't automatically set it for type " + dataType); + "Deserializer for " + keyOrValue + " not set in kafka consumer properties " + + "and can't automatically set it for type " + dataType); } } setDeserIfNotSet(kafkaConsumerProperties, keyOrValue, STRING_DESERIALIZER); columnDefinitions.add(colDef); break; default: - throw new IllegalStateException( - "Unhandled spec type:" + keyOrValueSpec.dataFormat()); + throw new IllegalStateException("Unhandled spec type:" + keyOrValueSpec.dataFormat()); } return data; } @@ -801,12 +766,12 @@ private static KeyOrValueIngestData getIngestData( }; private static void getCommonCol( - @NotNull final ColumnDefinition[] columnsToSet, - final int outOffset, - @NotNull final Properties consumerProperties, - @NotNull final String columnNameProperty, - @NotNull final String columnNameDefault, - @NotNull Function> builder) { + @NotNull final ColumnDefinition[] columnsToSet, + final int outOffset, + @NotNull final Properties consumerProperties, + @NotNull final String columnNameProperty, + @NotNull final String columnNameDefault, + @NotNull Function> builder) { if (consumerProperties.containsKey(columnNameProperty)) { final String partitionColumnName = consumerProperties.getProperty(columnNameProperty); if (partitionColumnName == null || partitionColumnName.equals("")) { @@ -821,41 +786,41 @@ private static void getCommonCol( } private static int getCommonCols( - @NotNull final ColumnDefinition[] columnsToSet, - final int outOffset, - @NotNull final Properties consumerProperties) { + @NotNull final ColumnDefinition[] columnsToSet, + final int outOffset, + @NotNull final Properties consumerProperties) { int c = outOffset; getCommonCol( - columnsToSet, - c, - consumerProperties, - KAFKA_PARTITION_COLUMN_NAME_PROPERTY, - KAFKA_PARTITION_COLUMN_NAME_DEFAULT, - ColumnDefinition::ofInt); + columnsToSet, + c, + consumerProperties, + KAFKA_PARTITION_COLUMN_NAME_PROPERTY, + KAFKA_PARTITION_COLUMN_NAME_DEFAULT, + ColumnDefinition::ofInt); ++c; getCommonCol( - columnsToSet, - c++, - consumerProperties, - OFFSET_COLUMN_NAME_PROPERTY, - OFFSET_COLUMN_NAME_DEFAULT, - ColumnDefinition::ofLong); + columnsToSet, + c++, + consumerProperties, + OFFSET_COLUMN_NAME_PROPERTY, + OFFSET_COLUMN_NAME_DEFAULT, + ColumnDefinition::ofLong); getCommonCol( - columnsToSet, - c++, - consumerProperties, - TIMESTAMP_COLUMN_NAME_PROPERTY, - TIMESTAMP_COLUMN_NAME_DEFAULT, - (final String colName) -> ColumnDefinition.fromGenericType(colName, DBDateTime.class)); + columnsToSet, + c++, + consumerProperties, + TIMESTAMP_COLUMN_NAME_PROPERTY, + TIMESTAMP_COLUMN_NAME_DEFAULT, + (final String colName) -> ColumnDefinition.fromGenericType(colName, DBDateTime.class)); return c; } private static ColumnDefinition getKeyOrValueCol( - @NotNull final KeyOrValue keyOrValue, - @NotNull final Properties properties, - final String columnNameArg, - final boolean allowEmpty) { + @NotNull final KeyOrValue keyOrValue, + @NotNull final Properties properties, + final String columnNameArg, + final boolean allowEmpty) { final String typeProperty; final String deserializerProperty; final String nameProperty; @@ -886,8 +851,7 @@ private static ColumnDefinition getKeyOrValueCol( if (allowEmpty) { return null; } - throw new IllegalArgumentException( - "Property for " + nameDefault + " can't be empty."); + throw new IllegalArgumentException("Property for " + nameDefault + " can't be empty."); } } else { columnName = nameDefault; @@ -920,7 +884,7 @@ private static ColumnDefinition getKeyOrValueCol( return ColumnDefinition.ofString(columnName); default: throw new IllegalArgumentException( - "Property " + typeProperty + " value " + typeAsString + " not supported"); + "Property " + typeProperty + " value " + typeAsString + " not supported"); } } else if (!properties.containsKey(deserializerProperty)) { properties.setProperty(deserializerProperty, STRING_DESERIALIZER); @@ -931,7 +895,7 @@ private static ColumnDefinition getKeyOrValueCol( @NotNull private static ColumnDefinition columnDefinitionFromDeserializer( - @NotNull Properties properties, @NotNull String deserializerProperty, String columnName) { + @NotNull Properties properties, @NotNull String deserializerProperty, String columnName) { final String deserializer = properties.getProperty(deserializerProperty); if (INT_DESERIALIZER.equals(deserializer)) { return ColumnDefinition.ofInt(columnName); @@ -949,8 +913,7 @@ private static ColumnDefinition columnDefinitionFromDese return ColumnDefinition.ofString(columnName); } throw new IllegalArgumentException( - "Deserializer type " + deserializer + " for " + deserializerProperty - + " not supported."); + "Deserializer type " + deserializer + " for " + deserializerProperty + " not supported."); } @SuppressWarnings("unused") @@ -963,13 +926,11 @@ private static ColumnDefinition columnDefinitionFromDese public static final IntPredicate ALL_PARTITIONS = KafkaIngester.ALL_PARTITIONS; @SuppressWarnings("unused") public static final IntToLongFunction ALL_PARTITIONS_SEEK_TO_BEGINNING = - KafkaIngester.ALL_PARTITIONS_SEEK_TO_BEGINNING; + KafkaIngester.ALL_PARTITIONS_SEEK_TO_BEGINNING; @SuppressWarnings("unused") - public static final IntToLongFunction ALL_PARTITIONS_DONT_SEEK = - KafkaIngester.ALL_PARTITIONS_DONT_SEEK; + public static final IntToLongFunction ALL_PARTITIONS_DONT_SEEK = KafkaIngester.ALL_PARTITIONS_DONT_SEEK; @SuppressWarnings("unused") - public static final IntToLongFunction ALL_PARTITIONS_SEEK_TO_END = - KafkaIngester.ALL_PARTITIONS_SEEK_TO_END; + public static final IntToLongFunction ALL_PARTITIONS_SEEK_TO_END = KafkaIngester.ALL_PARTITIONS_SEEK_TO_END; @SuppressWarnings("unused") public static final Function DIRECT_MAPPING = Function.identity(); @SuppressWarnings("unused") @@ -988,13 +949,12 @@ public static IntPredicate partitionFilterFromArray(final int[] partitions) { @SuppressWarnings("unused") public static IntToLongFunction partitionToOffsetFromParallelArrays( - final int[] partitions, - final long[] offsets) { + final int[] partitions, + final long[] offsets) { if (partitions.length != offsets.length) { throw new IllegalArgumentException("lengths of array arguments do not match"); } - final TIntLongHashMap map = - new TIntLongHashMap(partitions.length, 0.5f, 0, KafkaIngester.DONT_SEEK); + final TIntLongHashMap map = new TIntLongHashMap(partitions.length, 0.5f, 0, KafkaIngester.DONT_SEEK); for (int i = 0; i < partitions.length; ++i) { map.put(partitions[i], offsets[i]); } @@ -1006,7 +966,7 @@ private static class SimpleKafkaStreamConsumer implements KafkaStreamConsumer { private final StreamToTableAdapter streamToTableAdapter; public SimpleKafkaStreamConsumer(ConsumerRecordToStreamPublisherAdapter adapter, - StreamToTableAdapter streamToTableAdapter) { + StreamToTableAdapter streamToTableAdapter) { this.adapter = adapter; this.streamToTableAdapter = streamToTableAdapter; } diff --git a/Kafka/src/main/java/io/deephaven/kafka/StreamPublisherImpl.java b/Kafka/src/main/java/io/deephaven/kafka/StreamPublisherImpl.java index 0c100cfc14e..491dfd5fbe6 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/StreamPublisherImpl.java +++ b/Kafka/src/main/java/io/deephaven/kafka/StreamPublisherImpl.java @@ -16,17 +16,15 @@ public class StreamPublisherImpl implements StreamPublisher { private IntFunction chunkTypeIntFunction; /** - * You must set the chunk factory and consumer before allowing other threads or objects to - * interact with the StreamPublisherImpl. + * You must set the chunk factory and consumer before allowing other threads or objects to interact with the + * StreamPublisherImpl. * * @param chunkFactory a supplier of WritableChunks that is acceptable to our consumer * @param chunkTypeIntFunction a function from column index to ChunkType */ - public void setChunkFactory(Supplier chunkFactory, - IntFunction chunkTypeIntFunction) { + public void setChunkFactory(Supplier chunkFactory, IntFunction chunkTypeIntFunction) { if (this.chunkFactory != null) { - throw new IllegalStateException( - "Can not reset the chunkFactory for a StreamPublisherImpl"); + throw new IllegalStateException("Can not reset the chunkFactory for a StreamPublisherImpl"); } this.chunkFactory = chunkFactory; this.chunkTypeIntFunction = chunkTypeIntFunction; @@ -61,8 +59,7 @@ public synchronized void flush() { } /** - * Run the provided Runnable under our lock, preventing flush from taking our chunks while - * filling them. + * Run the provided Runnable under our lock, preventing flush from taking our chunks while filling them. * * @param runnable the runnable to run */ diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/ConsumerRecordToStreamPublisherAdapter.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/ConsumerRecordToStreamPublisherAdapter.java index 3e82740e296..9a84edec5f7 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/ConsumerRecordToStreamPublisherAdapter.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/ConsumerRecordToStreamPublisherAdapter.java @@ -14,8 +14,7 @@ public interface ConsumerRecordToStreamPublisherAdapter { /** * Consume a List of Kafka records, producing zero or more rows in the output. * - * @param records the records received from - * {@link org.apache.kafka.clients.consumer.KafkaConsumer#poll(Duration)}. + * @param records the records received from {@link org.apache.kafka.clients.consumer.KafkaConsumer#poll(Duration)}. * @throws IOException if there was an error writing to the output table */ void consumeRecords(List> records) throws IOException; diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/GenericRecordChunkAdapter.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/GenericRecordChunkAdapter.java index 1d4468e8640..9b073f3fcfa 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/GenericRecordChunkAdapter.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/GenericRecordChunkAdapter.java @@ -18,19 +18,18 @@ /** * Convert an Avro {@link GenericRecord} to Deephaven rows. *

    - * Each GenericRecord produces a single row of output, according to the maps of Table column names - * to Avro field names for the keys and values. + * Each GenericRecord produces a single row of output, according to the maps of Table column names to Avro field names + * for the keys and values. */ public class GenericRecordChunkAdapter extends MultiFieldChunkAdapter { private GenericRecordChunkAdapter( - final TableDefinition definition, - final IntFunction chunkTypeForIndex, - final Map fieldNamesToColumnNames, - final Schema schema, - final boolean allowNulls) { - super(definition, chunkTypeForIndex, fieldNamesToColumnNames, allowNulls, - (fieldName, chunkType, dataType) -> GenericRecordChunkAdapter.makeFieldCopier(schema, - fieldName, chunkType, dataType)); + final TableDefinition definition, + final IntFunction chunkTypeForIndex, + final Map fieldNamesToColumnNames, + final Schema schema, + final boolean allowNulls) { + super(definition, chunkTypeForIndex, fieldNamesToColumnNames, allowNulls, (fieldName, chunkType, + dataType) -> GenericRecordChunkAdapter.makeFieldCopier(schema, fieldName, chunkType, dataType)); } /** @@ -44,17 +43,17 @@ private GenericRecordChunkAdapter( * @return a GenericRecordChunkAdapter for the given definition and column mapping */ public static GenericRecordChunkAdapter make( - final TableDefinition definition, - final IntFunction chunkTypeForIndex, - final Map columns, - final Schema schema, - final boolean allowNulls) { + final TableDefinition definition, + final IntFunction chunkTypeForIndex, + final Map columns, + final Schema schema, + final boolean allowNulls) { return new GenericRecordChunkAdapter( - definition, chunkTypeForIndex, columns, schema, allowNulls); + definition, chunkTypeForIndex, columns, schema, allowNulls); } private static FieldCopier makeFieldCopier(Schema schema, String fieldName, ChunkType chunkType, - Class dataType) { + Class dataType) { switch (chunkType) { case Char: return new GenericRecordCharFieldCopier(fieldName); @@ -74,24 +73,22 @@ private static FieldCopier makeFieldCopier(Schema schema, String fieldName, Chun final LogicalType logicalType = field.schema().getLogicalType(); if (logicalType == null) { throw new IllegalArgumentException( - "Can not map field without a logical type to DBDateTime: field=" - + fieldName); + "Can not map field without a logical type to DBDateTime: field=" + fieldName); } if (LogicalTypes.timestampMicros().equals(logicalType)) { // micros to nanos return new GenericRecordLongFieldCopierWithMultiplier(fieldName, 1000L); } else if (LogicalTypes.timestampMillis().equals(logicalType)) { // millis to nanos - return new GenericRecordLongFieldCopierWithMultiplier(fieldName, - 1000000L); + return new GenericRecordLongFieldCopierWithMultiplier(fieldName, 1000000L); } throw new IllegalArgumentException( - "Can not map field with unknown logical type to DBDateTime: field=" - + fieldName + ", type=" + logicalType); + "Can not map field with unknown logical type to DBDateTime: field=" + fieldName + + ", type=" + logicalType); } else { throw new IllegalArgumentException( - "Can not map field not in schema to DBDateTime: field=" + fieldName); + "Can not map field not in schema to DBDateTime: field=" + fieldName); } } return new GenericRecordLongFieldCopier(fieldName); diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/GenericRecordLongFieldCopierWithMultiplier.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/GenericRecordLongFieldCopierWithMultiplier.java index 3d8cccd9deb..8ca15e18a95 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/GenericRecordLongFieldCopierWithMultiplier.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/GenericRecordLongFieldCopierWithMultiplier.java @@ -12,19 +12,18 @@ public class GenericRecordLongFieldCopierWithMultiplier implements FieldCopier { private final String fieldName; private final long multiplier; - public GenericRecordLongFieldCopierWithMultiplier(final String fieldName, - final long multiplier) { + public GenericRecordLongFieldCopierWithMultiplier(final String fieldName, final long multiplier) { this.fieldName = fieldName; this.multiplier = multiplier; } @Override public void copyField( - final ObjectChunk inputChunk, - final WritableChunk publisherChunk, - final int sourceOffset, - final int destOffset, - final int length) { + final ObjectChunk inputChunk, + final WritableChunk publisherChunk, + final int sourceOffset, + final int destOffset, + final int length) { final WritableLongChunk output = publisherChunk.asWritableLongChunk(); for (int ii = 0; ii < length; ++ii) { final GenericRecord genericRecord = (GenericRecord) inputChunk.get(ii + sourceOffset); diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/JsonNodeChunkAdapter.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/JsonNodeChunkAdapter.java index a30367431a6..2a64bafa7cc 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/JsonNodeChunkAdapter.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/JsonNodeChunkAdapter.java @@ -14,12 +14,12 @@ public class JsonNodeChunkAdapter extends MultiFieldChunkAdapter { private JsonNodeChunkAdapter( - final TableDefinition definition, - final IntFunction chunkTypeForIndex, - final Map fieldNamesToColumnNames, - final boolean allowNulls) { + final TableDefinition definition, + final IntFunction chunkTypeForIndex, + final Map fieldNamesToColumnNames, + final boolean allowNulls) { super(definition, chunkTypeForIndex, fieldNamesToColumnNames, allowNulls, - JsonNodeChunkAdapter::makeFieldCopier); + JsonNodeChunkAdapter::makeFieldCopier); } /** @@ -32,16 +32,16 @@ private JsonNodeChunkAdapter( * @return a JsonRecordChunkAdapter for the given definition and column mapping */ public static JsonNodeChunkAdapter make( - final TableDefinition definition, - final IntFunction chunkTypeForIndex, - final Map fieldNamesToColumnNames, - final boolean allowNulls) { + final TableDefinition definition, + final IntFunction chunkTypeForIndex, + final Map fieldNamesToColumnNames, + final boolean allowNulls) { return new JsonNodeChunkAdapter( - definition, chunkTypeForIndex, fieldNamesToColumnNames, allowNulls); + definition, chunkTypeForIndex, fieldNamesToColumnNames, allowNulls); } private static FieldCopier makeFieldCopier( - final String fieldName, final ChunkType chunkType, final Class dataType) { + final String fieldName, final ChunkType chunkType, final Class dataType) { switch (chunkType) { case Char: return new JsonNodeCharFieldCopier(fieldName); diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/JsonNodeUtil.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/JsonNodeUtil.java index db8ac7a656a..82c0a9f5b57 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/JsonNodeUtil.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/JsonNodeUtil.java @@ -17,8 +17,8 @@ public class JsonNodeUtil { private static final ObjectMapper objectMapper = new ObjectMapper() - .setNodeFactory(JsonNodeFactory.withExactBigDecimals(true)) - .configure(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS, true); + .setNodeFactory(JsonNodeFactory.withExactBigDecimals(true)) + .configure(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS, true); public static JsonNode makeJsonNode(final String json) { final JsonNode node; @@ -30,16 +30,16 @@ public static JsonNode makeJsonNode(final String json) { } private static JsonNode checkAllowMissingOrNull( - final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode node, @NotNull final String key, + final boolean allowMissingKeys, final boolean allowNullValues) { final JsonNode tmpNode = node == null ? null : node.get(key); if (!allowMissingKeys && tmpNode == null) { throw new IllegalArgumentException( - "Key " + key + " not found in the record, and allowMissingKeys is false."); + "Key " + key + " not found in the record, and allowMissingKeys is false."); } if (tmpNode != null && !allowNullValues && tmpNode.isNull()) { throw new IllegalArgumentException( - "Value for Key " + key + " is null in the record, and allowNullValues is false."); + "Value for Key " + key + " is null in the record, and allowNullValues is false."); } return tmpNode; } @@ -52,23 +52,20 @@ private static boolean isNullField(final JsonNode node) { } /** - * Returns a Deephaven int (primitive int with reserved values for null) from a - * {@link JsonNode}. + * Returns a Deephaven int (primitive int with reserved values for null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @param key The String key of the value to retrieve. * @return A Deephaven int (primitive int with reserved values for null) */ public static int getInt(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getInt(tmpNode); } /** - * Returns a Deephaven int (primitive int with reserved values for null) from a - * {@link JsonNode}. + * Returns a Deephaven int (primitive int with reserved values for null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @return A Deephaven int (primitive int with reserved values for null) @@ -86,9 +83,8 @@ public static int getInt(final JsonNode node) { */ @Nullable public static Integer getBoxedInt(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBoxedInt(tmpNode); } @@ -104,23 +100,20 @@ public static Integer getBoxedInt(final JsonNode node) { } /** - * Returns a Deephaven short (primitive short with reserved values for Null) from a - * {@link JsonNode}. + * Returns a Deephaven short (primitive short with reserved values for Null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @param key The String key of the value to retrieve. * @return A Deephaven short (primitive short with reserved values for Null) */ public static short getShort(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getShort(tmpNode); } /** - * Returns a Deephaven short (primitive short with reserved values for Null) from a - * {@link JsonNode}. + * Returns a Deephaven short (primitive short with reserved values for Null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @return A Deephaven short (primitive short with reserved values for Null) @@ -138,9 +131,8 @@ public static short getShort(final JsonNode node) { */ @Nullable public static Short getBoxedShort(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBoxedShort(tmpNode); } @@ -150,23 +142,20 @@ public static Short getBoxedShort(final JsonNode node) { } /** - * Returns a Deephaven long (primitive long with reserved values for Null) from a - * {@link JsonNode}. + * Returns a Deephaven long (primitive long with reserved values for Null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @param key The String key of the value to retrieve. * @return A Deephaven long (primitive long with reserved values for Null) */ public static long getLong(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getLong(tmpNode); } /** - * Returns a Deephaven long (primitive long with reserved values for null) from a - * {@link JsonNode}. + * Returns a Deephaven long (primitive long with reserved values for null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @return A Deephaven long (primitive long with reserved values for null) @@ -184,9 +173,8 @@ public static long getLong(final JsonNode node) { */ @Nullable public static Long getBoxedLong(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBoxedLong(tmpNode); } @@ -202,23 +190,20 @@ public static Long getBoxedLong(final JsonNode node) { } /** - * Returns a Deephaven double (primitive double with reserved values for null) from a - * {@link JsonNode}. + * Returns a Deephaven double (primitive double with reserved values for null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @param key The String key of the value to retrieve. * @return A Deephaven double (primitive double with reserved values for null) */ public static double getDouble(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getDouble(tmpNode); } /** - * Returns a Deephaven double (primitive double with reserved values for null) from a - * {@link JsonNode}. + * Returns a Deephaven double (primitive double with reserved values for null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @return A Deephaven double (primitive double with reserved values for null) @@ -236,9 +221,8 @@ public static double getDouble(final JsonNode node) { */ @Nullable public static Double getBoxedDouble(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBoxedDouble(tmpNode); } @@ -254,23 +238,20 @@ public static Double getBoxedDouble(final JsonNode node) { } /** - * Returns a Deephaven float (primitive float with reserved values for Null) from a - * {@link JsonNode}. + * Returns a Deephaven float (primitive float with reserved values for Null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @param key The String key of the value to retrieve. * @return A Deephaven float (primitive float with reserved values for Null) */ public static float getFloat(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getFloat(tmpNode); } /** - * Returns a Deephaven float (primitive float with reserved values for Null) from a - * {@link JsonNode}. + * Returns a Deephaven float (primitive float with reserved values for Null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @return A Deephaven float (primitive float with reserved values for Null) @@ -288,9 +269,8 @@ public static float getFloat(final JsonNode node) { */ @Nullable public static Float getBoxedFloat(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBoxedFloat(tmpNode); } @@ -306,23 +286,20 @@ public static Float getBoxedFloat(final JsonNode node) { } /** - * Returns a Deephaven byte (primitive byte with a reserved value for Null) from a - * {@link JsonNode}. + * Returns a Deephaven byte (primitive byte with a reserved value for Null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @param key The String key of the value to retrieve. * @return A Deephaven byte (primitive byte with a reserved value for Null) */ public static byte getByte(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getByte(tmpNode); } /** - * Returns a Deephaven byte (primitive byte with a reserved value for Null) from a - * {@link JsonNode}. + * Returns a Deephaven byte (primitive byte with a reserved value for Null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @return A Deephaven byte (primitive byte with a reserved value for Null) @@ -347,9 +324,8 @@ public static byte getByte(final JsonNode node) { */ @Nullable public static Byte getBoxedByte(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBoxedByte(tmpNode); } @@ -372,23 +348,20 @@ public static Byte getBoxedByte(final JsonNode node) { } /** - * Returns a Deephaven char (primitive char with a reserved value for Null) from a - * {@link JsonNode}. + * Returns a Deephaven char (primitive char with a reserved value for Null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @param key The String key of the value to retrieve. * @return A Deephaven char (primitive char with a reserved value for Null) */ public static char getChar(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getChar(tmpNode); } /** - * Returns a Deephaven char (primitive char with a reserved value for Null) from a - * {@link JsonNode}. + * Returns a Deephaven char (primitive char with a reserved value for Null) from a {@link JsonNode}. * * @param node The {@link JsonNode} from which to retrieve the value. * @return A Deephaven char (primitive char with a reserved value for Null) @@ -413,9 +386,8 @@ public static char getChar(final JsonNode node) { */ @Nullable public static Character getBoxedChar(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBoxedChar(tmpNode); } @@ -440,9 +412,8 @@ public static Character getBoxedChar(final JsonNode tmpNode) { */ @Nullable public static String getString(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getString(tmpNode); } @@ -465,9 +436,8 @@ public static String getString(final JsonNode node) { * @return A Boolean */ public static Boolean getBoolean(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBoolean(tmpNode); } @@ -490,9 +460,8 @@ public static Boolean getBoolean(final JsonNode node) { * @return A BigInteger */ public static BigInteger getBigInteger(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBigInteger(tmpNode); } @@ -516,9 +485,8 @@ public static BigInteger getBigInteger(final JsonNode node) { */ @Nullable public static BigDecimal getBigDecimal(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getBigDecimal(tmpNode); } @@ -542,9 +510,8 @@ public static BigDecimal getBigDecimal(final JsonNode node) { */ @Nullable public static Object getValue(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getValue(tmpNode); } @@ -560,10 +527,9 @@ public static Object getValue(final JsonNode node) { } /** - * Returns a {@link DBDateTime} from a {@link JsonNode}. Will try to infer precision of a long - * value to be parsed using {@link DBTimeUtils} autoEpochToTime. If the value in the JSON record - * is not numeric, this method will attempt to parse it as a Deephaven DBDateTime string - * (yyyy-MM-ddThh:mm:ss[.nnnnnnnnn] TZ). + * Returns a {@link DBDateTime} from a {@link JsonNode}. Will try to infer precision of a long value to be parsed + * using {@link DBTimeUtils} autoEpochToTime. If the value in the JSON record is not numeric, this method will + * attempt to parse it as a Deephaven DBDateTime string (yyyy-MM-ddThh:mm:ss[.nnnnnnnnn] TZ). * * @param node The {@link JsonNode} from which to retrieve the value. * @param key The String key of the value to retrieve. @@ -571,17 +537,15 @@ public static Object getValue(final JsonNode node) { */ @Nullable public static DBDateTime getDBDateTime(@NotNull final JsonNode node, @NotNull final String key, - final boolean allowMissingKeys, final boolean allowNullValues) { - final JsonNode tmpNode = - checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); + final boolean allowMissingKeys, final boolean allowNullValues) { + final JsonNode tmpNode = checkAllowMissingOrNull(node, key, allowMissingKeys, allowNullValues); return getDBDateTime(tmpNode); } /** - * Returns a {@link DBDateTime} from a {@link JsonNode}. Will try to infer precision of a long - * value to be parsed using {@link DBTimeUtils} autoEpochToTime. If the value in the JSON record - * is not numeric, this method will attempt to parse it as a Deephaven DBDateTime string - * (yyyy-MM-ddThh:mm:ss[.nnnnnnnnn] TZ). + * Returns a {@link DBDateTime} from a {@link JsonNode}. Will try to infer precision of a long value to be parsed + * using {@link DBTimeUtils} autoEpochToTime. If the value in the JSON record is not numeric, this method will + * attempt to parse it as a Deephaven DBDateTime string (yyyy-MM-ddThh:mm:ss[.nnnnnnnnn] TZ). * * @param node The {@link JsonNode} from which to retrieve the value. * @return A {@link DBDateTime} diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaIngester.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaIngester.java index 9cb0b2c7bb9..c04c08ea4bf 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaIngester.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaIngester.java @@ -28,24 +28,23 @@ * An ingester that replicates a Apache Kafka topic to a Deephaven Table Writer. * *

    - * Each KafkaIngester is assigned a topic and a subset of Kafka partitions. Each Kafka partition is - * mapped to a Deephaven internal partition. The column partition can be set through the - * constructor, or defaults to {@link DBTimeUtils#currentDateNy()}. + * Each KafkaIngester is assigned a topic and a subset of Kafka partitions. Each Kafka partition is mapped to a + * Deephaven internal partition. The column partition can be set through the constructor, or defaults to + * {@link DBTimeUtils#currentDateNy()}. *

    * *

    - * Automatic partition assignment and rebalancing are not supported. Each Kafka ingester instance - * must uniquely control its checkpoint record, which is incompatible with rebalancing. + * Automatic partition assignment and rebalancing are not supported. Each Kafka ingester instance must uniquely control + * its checkpoint record, which is incompatible with rebalancing. *

    * * */ public class KafkaIngester { - private static final int REPORT_INTERVAL_MS = - Configuration.getInstance().getIntegerForClassWithDefault( + private static final int REPORT_INTERVAL_MS = Configuration.getInstance().getIntegerForClassWithDefault( KafkaIngester.class, "reportIntervalMs", 60_000); private static final long MAX_ERRS = Configuration.getInstance().getLongForClassWithDefault( - KafkaIngester.class, "maxErrs", 500); + KafkaIngester.class, "maxErrs", 500); private final KafkaConsumer consumer; @NotNull private final Logger log; @@ -53,21 +52,19 @@ public class KafkaIngester { private final String partitionDescription; private final IntFunction partitionToConsumer; private final TIntObjectHashMap consumers = new TIntObjectHashMap<>(); - private final Set uniqueConsumers = - Collections.newSetFromMap(new IdentityHashMap<>()); + private final Set uniqueConsumers = Collections.newSetFromMap(new IdentityHashMap<>()); private final String logPrefix; private long messagesProcessed = 0; private long messagesWithErr = 0; private long lastProcessed = 0; - private final Set openPartitions = - Collections.newSetFromMap(new ConcurrentHashMap<>()); + private final Set openPartitions = Collections.newSetFromMap(new ConcurrentHashMap<>()); /** - * Constant predicate that returns true for all partitions. This is the default, each and every - * partition that exists will be handled by the same ingester. Because Kafka consumers are - * inherently single threaded, to scale beyond what a single consumer can handle, you must - * create multiple consumers each with a subset of partitions using {@link PartitionRange}, - * {@link PartitionRoundRobin}, {@link SinglePartition} or a custom {@link IntPredicate}. + * Constant predicate that returns true for all partitions. This is the default, each and every partition that + * exists will be handled by the same ingester. Because Kafka consumers are inherently single threaded, to scale + * beyond what a single consumer can handle, you must create multiple consumers each with a subset of partitions + * using {@link PartitionRange}, {@link PartitionRoundRobin}, {@link SinglePartition} or a custom + * {@link IntPredicate}. */ public static final IntPredicate ALL_PARTITIONS = new IntPredicate() { @Override @@ -98,8 +95,7 @@ public static class PartitionRange implements IntPredicate { */ public PartitionRange(int startInclusive, int endInclusive) { this.startInclusive = startInclusive; - this.endInclusive = - Require.geq(endInclusive, "endInclusive", startInclusive, "startInclusive"); + this.endInclusive = Require.geq(endInclusive, "endInclusive", startInclusive, "startInclusive"); } @Override @@ -109,8 +105,7 @@ public boolean test(int value) { @Override public String toString() { - return Integer.toString(startInclusive) - + (startInclusive == endInclusive ? "" : endInclusive); + return Integer.toString(startInclusive) + (startInclusive == endInclusive ? "" : endInclusive); } } @@ -142,14 +137,13 @@ public static class PartitionRoundRobin implements IntPredicate { /** * Creates a predicate for evenly distributing partitions among a set of ingesters. * - * @param consumerIndex the index of this consumer; you should have precisely one ingester - * configured for each index between zero (inclusive) and consumerCount (exclusive) + * @param consumerIndex the index of this consumer; you should have precisely one ingester configured for each + * index between zero (inclusive) and consumerCount (exclusive) * @param consumerCount the number of consumers that will ingest this topic */ public PartitionRoundRobin(int consumerIndex, int consumerCount) { this.consumerIndex = Require.geqZero( - Require.lt(consumerIndex, "consumerIndex", consumerCount, "consumerCount"), - "consumerIndex"); + Require.lt(consumerIndex, "consumerIndex", consumerCount, "consumerCount"), "consumerIndex"); this.consumerCount = consumerCount; } @@ -170,16 +164,15 @@ public String toString() { * @param log a log for output * @param props the properties used to create the {@link KafkaConsumer} * @param topic the topic to replicate - * @param partitionToConsumer a function implementing a mapping from partition to its consumer - * of records. - * @param partitionToInitialSeekOffset a function implementing a mapping from partition to its - * intial seek offset, or -1 if seek to beginning is intended. + * @param partitionToConsumer a function implementing a mapping from partition to its consumer of records. + * @param partitionToInitialSeekOffset a function implementing a mapping from partition to its intial seek offset, + * or -1 if seek to beginning is intended. */ public KafkaIngester(final Logger log, - final Properties props, - final String topic, - final IntFunction partitionToConsumer, - final IntToLongFunction partitionToInitialSeekOffset) { + final Properties props, + final String topic, + final IntFunction partitionToConsumer, + final IntToLongFunction partitionToInitialSeekOffset) { this(log, props, topic, ALL_PARTITIONS, partitionToConsumer, partitionToInitialSeekOffset); } @@ -197,29 +190,27 @@ public KafkaIngester(final Logger log, * @param props the properties used to create the {@link KafkaConsumer} * @param topic the topic to replicate * @param partitionFilter a predicate indicating which partitions we should replicate - * @param partitionToConsumer a function implementing a mapping from partition to its consumer - * of records. - * @param partitionToInitialSeekOffset a function implementing a mapping from partition to its - * intial seek offset, or -1 if seek to beginning is intended. + * @param partitionToConsumer a function implementing a mapping from partition to its consumer of records. + * @param partitionToInitialSeekOffset a function implementing a mapping from partition to its intial seek offset, + * or -1 if seek to beginning is intended. */ @SuppressWarnings("rawtypes") public KafkaIngester(@NotNull final Logger log, - final Properties props, - final String topic, - final IntPredicate partitionFilter, - final IntFunction partitionToConsumer, - final IntToLongFunction partitionToInitialSeekOffset) { + final Properties props, + final String topic, + final IntPredicate partitionFilter, + final IntFunction partitionToConsumer, + final IntToLongFunction partitionToInitialSeekOffset) { this.log = log; this.topic = topic; this.partitionDescription = partitionFilter.toString(); this.partitionToConsumer = partitionToConsumer; - this.logPrefix = - KafkaIngester.class.getSimpleName() + "(" + topic + ", " + partitionDescription + "): "; + this.logPrefix = KafkaIngester.class.getSimpleName() + "(" + topic + ", " + partitionDescription + "): "; consumer = new KafkaConsumer(props); final List partitions = consumer.partitionsFor(topic); partitions.stream().filter(pi -> partitionFilter.test(pi.partition())) - .map(pi -> new TopicPartition(topic, pi.partition())).forEach(openPartitions::add); + .map(pi -> new TopicPartition(topic, pi.partition())).forEach(openPartitions::add); consumer.assign(openPartitions); @@ -227,29 +218,26 @@ public KafkaIngester(@NotNull final Logger log, if (assignments.size() <= 0) { throw new UncheckedDeephavenException("Empty partition assignments"); } - log.info().append(logPrefix).append("Partition Assignments: ") - .append(assignments.toString()).endl(); + log.info().append(logPrefix).append("Partition Assignments: ").append(assignments.toString()).endl(); if (assignments.size() != openPartitions.size()) { - throw new UncheckedDeephavenException( - logPrefix + "Partition assignments do not match request: assignments=" + assignments - + ", request=" + openPartitions); + throw new UncheckedDeephavenException(logPrefix + "Partition assignments do not match request: assignments=" + + assignments + ", request=" + openPartitions); } for (final TopicPartition topicPartition : assignments) { - final long seekOffset = - partitionToInitialSeekOffset.applyAsLong(topicPartition.partition()); + final long seekOffset = partitionToInitialSeekOffset.applyAsLong(topicPartition.partition()); if (seekOffset == SEEK_TO_BEGINNING) { - log.info().append(logPrefix).append(topicPartition.toString()) - .append(" seeking to beginning.").append(seekOffset).endl(); + log.info().append(logPrefix).append(topicPartition.toString()).append(" seeking to beginning.") + .append(seekOffset).endl(); consumer.seekToBeginning(Collections.singletonList(topicPartition)); } else if (seekOffset == SEEK_TO_END) { - log.info().append(logPrefix).append(topicPartition.toString()) - .append(" seeking to end.").append(seekOffset).endl(); + log.info().append(logPrefix).append(topicPartition.toString()).append(" seeking to end.") + .append(seekOffset).endl(); consumer.seekToEnd(Collections.singletonList(topicPartition)); } else if (seekOffset != DONT_SEEK) { - log.info().append(logPrefix).append(topicPartition.toString()) - .append(" seeking to offset ").append(seekOffset).append(".").endl(); + log.info().append(logPrefix).append(topicPartition.toString()).append(" seeking to offset ") + .append(seekOffset).append(".").endl(); consumer.seek(topicPartition, seekOffset); } } @@ -284,9 +272,8 @@ private void consumerLoop() { final int currentPartitionSize = openPartitions.size(); if (currentPartitionSize != expectedPartitionCount) { log.error().append(logPrefix) - .append("Stopping due to partition size change to ") - .append(currentPartitionSize) - .append(".").endl(); + .append("Stopping due to partition size change to ").append(currentPartitionSize) + .append(".").endl(); break; } final long beforePoll = System.nanoTime(); @@ -295,10 +282,9 @@ private void consumerLoop() { boolean noMore = pollOnce(Duration.ofNanos(remainingNanos)); if (noMore) { log.error().append(logPrefix) - .append("Stopping due to errors (").append(messagesWithErr) - .append(" messages with error out of ").append(messagesProcessed) - .append(" messages processed)") - .endl(); + .append("Stopping due to errors (").append(messagesWithErr) + .append(" messages with error out of ").append(messagesProcessed).append(" messages processed)") + .endl(); break; } final long afterPoll = System.nanoTime(); @@ -306,11 +292,10 @@ private void consumerLoop() { final long intervalMessages = messagesProcessed - lastProcessed; final long intervalNanos = afterPoll - lastReportNanos; log.info().append(logPrefix) - .append("Processed ").append(intervalMessages).append(" in ") - .append(intervalNanos / 1000_000L).append("ms, ") - .append(rateFormat.format(msgPerSec(intervalMessages, intervalNanos))) - .append(" msgs/sec") - .endl(); + .append("Processed ").append(intervalMessages).append(" in ") + .append(intervalNanos / 1000_000L).append("ms, ") + .append(rateFormat.format(msgPerSec(intervalMessages, intervalNanos))).append(" msgs/sec") + .endl(); lastReportNanos = afterPoll; lastProcessed = messagesProcessed; } @@ -333,8 +318,8 @@ private boolean pollOnce(final Duration timeout) { // we interpret this as a signal to stop. return false; } catch (Exception ex) { - log.error().append(logPrefix).append("Exception while polling for Kafka messages:") - .append(ex).append(", aborting."); + log.error().append(logPrefix).append("Exception while polling for Kafka messages:").append(ex) + .append(", aborting."); return false; } @@ -350,19 +335,17 @@ private boolean pollOnce(final Duration timeout) { } - final List> partitionRecords = - records.records(topicPartition); + final List> partitionRecords = records.records(topicPartition); try { consumer.accept(partitionRecords); } catch (Exception ex) { ++messagesWithErr; - log.error().append(logPrefix).append("Exception while processing Kafka message:") - .append(ex); + log.error().append(logPrefix).append("Exception while processing Kafka message:").append(ex); if (messagesWithErr > MAX_ERRS) { consumer.acceptFailure(ex); - log.error().append(logPrefix).append( - "Max number of errors exceeded, aborting " + this + " consumer thread."); + log.error().append(logPrefix) + .append("Max number of errors exceeded, aborting " + this + " consumer thread."); return true; } continue; diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaIngesterException.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaIngesterException.java index 70b22ea3e6f..e7a6be9df23 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaIngesterException.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaIngesterException.java @@ -3,8 +3,7 @@ import io.deephaven.UncheckedDeephavenException; /** - * This exception is thrown when there is a failure to consume a Kafka record during Kafka to - * Deephaven ingestion. + * This exception is thrown when there is a failure to consume a Kafka record during Kafka to Deephaven ingestion. */ public class KafkaIngesterException extends UncheckedDeephavenException { /** diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaStreamConsumer.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaStreamConsumer.java index fc720c81991..f54115a942b 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaStreamConsumer.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaStreamConsumer.java @@ -7,9 +7,8 @@ import java.util.function.Consumer; /** - * Consumer for lists of ConsumerRecords coming from Kafka. The StreamFailureConsumer is extended so - * that we can report errors emanating from our consumer thread. + * Consumer for lists of ConsumerRecords coming from Kafka. The StreamFailureConsumer is extended so that we can report + * errors emanating from our consumer thread. */ -public interface KafkaStreamConsumer - extends Consumer>>, StreamFailureConsumer { +public interface KafkaStreamConsumer extends Consumer>>, StreamFailureConsumer { } diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaStreamPublisher.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaStreamPublisher.java index a93b7ca7157..e78bc5136e2 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaStreamPublisher.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/KafkaStreamPublisher.java @@ -17,8 +17,8 @@ import java.util.stream.Collectors; /** - * An adapter that maps keys and values, possibly each with multiple fields, to single Deephaven - * columns. Each Kafka record produces one Deephaven row. + * An adapter that maps keys and values, possibly each with multiple fields, to single Deephaven columns. Each Kafka + * record produces one Deephaven row. */ public class KafkaStreamPublisher implements ConsumerRecordToStreamPublisherAdapter { private final StreamPublisherImpl publisher; @@ -37,16 +37,16 @@ public class KafkaStreamPublisher implements ConsumerRecordToStreamPublisherAdap final Function valueToChunkObjectMapper; private KafkaStreamPublisher( - final StreamPublisherImpl publisher, - final int kafkaPartitionColumnIndex, - final int offsetColumnIndex, - final int timestampColumnIndex, - final KeyOrValueProcessor keyProcessor, - final KeyOrValueProcessor valueProcessor, - final int simpleKeyColumnIndex, - final int simpleValueColumnIndex, - final Function keyToChunkObjectMapper, - final Function valueToChunkObjectMapper) { + final StreamPublisherImpl publisher, + final int kafkaPartitionColumnIndex, + final int offsetColumnIndex, + final int timestampColumnIndex, + final KeyOrValueProcessor keyProcessor, + final KeyOrValueProcessor valueProcessor, + final int simpleKeyColumnIndex, + final int simpleValueColumnIndex, + final Function keyToChunkObjectMapper, + final Function valueToChunkObjectMapper) { this.publisher = publisher; this.kafkaPartitionColumnIndex = kafkaPartitionColumnIndex; this.offsetColumnIndex = offsetColumnIndex; @@ -60,36 +60,32 @@ private KafkaStreamPublisher( keyIsSimpleObject = this.simpleKeyColumnIndex >= 0; if (keyIsSimpleObject && keyProcessor != null) { - throw new IllegalArgumentException( - "Simple Key Column Index can not be set when a keyProcessor is set"); + throw new IllegalArgumentException("Simple Key Column Index can not be set when a keyProcessor is set"); } valueIsSimpleObject = this.simpleValueColumnIndex >= 0; if (valueIsSimpleObject && valueProcessor != null) { - throw new IllegalArgumentException( - "Simple Value Column Index can not be set when a valueProcessor is set"); + throw new IllegalArgumentException("Simple Value Column Index can not be set when a valueProcessor is set"); } } public static ConsumerRecordToStreamPublisherAdapter make( - final StreamPublisherImpl publisher, - final int kafkaPartitionColumnIndex, - final int offsetColumnIndex, - final int timestampColumnIndex, - final KeyOrValueProcessor keyProcessorArg, - final KeyOrValueProcessor valueProcessorArg, - final int simpleKeyColumnIndexArg, - final int simpleValueColumnIndexArg, - final Function keyToChunkObjectMapper, - final Function valueToChunkObjectMapper) { + final StreamPublisherImpl publisher, + final int kafkaPartitionColumnIndex, + final int offsetColumnIndex, + final int timestampColumnIndex, + final KeyOrValueProcessor keyProcessorArg, + final KeyOrValueProcessor valueProcessorArg, + final int simpleKeyColumnIndexArg, + final int simpleValueColumnIndexArg, + final Function keyToChunkObjectMapper, + final Function valueToChunkObjectMapper) { if ((keyProcessorArg != null) && (simpleKeyColumnIndexArg != -1)) { - throw new IllegalArgumentException( - "Either keyProcessor != null or simpleKeyColumnIndex != -1"); + throw new IllegalArgumentException("Either keyProcessor != null or simpleKeyColumnIndex != -1"); } if ((valueProcessorArg != null) && (simpleValueColumnIndexArg != -1)) { - throw new IllegalArgumentException( - "Either valueProcessor != null or simpleValueColumnIndex != -1"); + throw new IllegalArgumentException("Either valueProcessor != null or simpleValueColumnIndex != -1"); } final KeyOrValueProcessor keyProcessor; @@ -99,9 +95,9 @@ public static ConsumerRecordToStreamPublisherAdapter make( simpleKeyColumnIndex = -1; } else { final Pair keyPair = - getProcessorAndSimpleIndex( - simpleKeyColumnIndexArg, - publisher.chunkType(simpleKeyColumnIndexArg)); + getProcessorAndSimpleIndex( + simpleKeyColumnIndexArg, + publisher.chunkType(simpleKeyColumnIndexArg)); keyProcessor = keyPair.first; simpleKeyColumnIndex = keyPair.second; } @@ -113,35 +109,33 @@ public static ConsumerRecordToStreamPublisherAdapter make( simpleValueColumnIndex = -1; } else { final Pair valuePair = - getProcessorAndSimpleIndex( - simpleValueColumnIndexArg, - publisher.chunkType(simpleValueColumnIndexArg)); + getProcessorAndSimpleIndex( + simpleValueColumnIndexArg, + publisher.chunkType(simpleValueColumnIndexArg)); valueProcessor = valuePair.first; simpleValueColumnIndex = valuePair.second; } return new KafkaStreamPublisher( - publisher, - kafkaPartitionColumnIndex, - offsetColumnIndex, - timestampColumnIndex, - keyProcessor, - valueProcessor, - simpleKeyColumnIndex, - simpleValueColumnIndex, - keyToChunkObjectMapper, - valueToChunkObjectMapper); + publisher, + kafkaPartitionColumnIndex, + offsetColumnIndex, + timestampColumnIndex, + keyProcessor, + valueProcessor, + simpleKeyColumnIndex, + simpleValueColumnIndex, + keyToChunkObjectMapper, + valueToChunkObjectMapper); } @NotNull - private static Pair getProcessorAndSimpleIndex(int columnIndex, - ChunkType chunkType) { + private static Pair getProcessorAndSimpleIndex(int columnIndex, ChunkType chunkType) { final boolean isSimpleObject = chunkType == ChunkType.Object; final int simpleIndex; final KeyOrValueProcessor processor; if (!isSimpleObject) { - processor = - new SimpleKeyOrValueProcessor(columnIndex, ChunkUnboxer.getEmptyUnboxer(chunkType)); + processor = new SimpleKeyOrValueProcessor(columnIndex, ChunkUnboxer.getEmptyUnboxer(chunkType)); simpleIndex = -1; } else { processor = null; @@ -172,11 +166,11 @@ private void doConsumeRecords(List> records) { final int chunkSize = Math.min(records.size(), chunks[0].capacity()); try (final WritableObjectChunk keyChunkCloseable = haveKey() - ? WritableObjectChunk.makeWritableChunk(chunkSize) - : null; - final WritableObjectChunk valueChunkCloseable = haveValue() ? WritableObjectChunk.makeWritableChunk(chunkSize) - : null) { + : null; + final WritableObjectChunk valueChunkCloseable = haveValue() + ? WritableObjectChunk.makeWritableChunk(chunkSize) + : null) { WritableObjectChunk keyChunk; if (keyChunkCloseable != null) { keyChunkCloseable.setSize(0); @@ -197,14 +191,14 @@ private void doConsumeRecords(List> records) { } WritableIntChunk partitionChunk = (kafkaPartitionColumnIndex >= 0) - ? chunks[kafkaPartitionColumnIndex].asWritableIntChunk() - : null; + ? chunks[kafkaPartitionColumnIndex].asWritableIntChunk() + : null; WritableLongChunk offsetChunk = offsetColumnIndex >= 0 - ? chunks[offsetColumnIndex].asWritableLongChunk() - : null; + ? chunks[offsetColumnIndex].asWritableLongChunk() + : null; WritableLongChunk timestampChunk = timestampColumnIndex >= 0 - ? chunks[timestampColumnIndex].asWritableLongChunk() - : null; + ? chunks[timestampColumnIndex].asWritableLongChunk() + : null; for (ConsumerRecord record : records) { if (--remaining == 0) { @@ -284,9 +278,8 @@ private void doConsumeRecords(List> records) { private void checkChunkSizes(WritableChunk[] chunks) { for (int cc = 1; cc < chunks.length; ++cc) { if (chunks[cc].size() != chunks[0].size()) { - throw new IllegalStateException( - "Publisher chunks have size mismatch: " + Arrays.stream(chunks) - .map(c -> Integer.toString(c.size())).collect(Collectors.joining(", "))); + throw new IllegalStateException("Publisher chunks have size mismatch: " + + Arrays.stream(chunks).map(c -> Integer.toString(c.size())).collect(Collectors.joining(", "))); } } } @@ -302,7 +295,7 @@ static class SimpleKeyOrValueProcessor implements KeyOrValueProcessor { @Override public void handleChunk(ObjectChunk inputChunk, - WritableChunk[] publisherChunks) { + WritableChunk[] publisherChunks) { final WritableChunk publisherChunk = publisherChunks[offset]; final int existingSize = publisherChunk.size(); publisherChunk.setSize(existingSize + inputChunk.size()); @@ -311,7 +304,7 @@ public void handleChunk(ObjectChunk inputChunk, } void flushKeyChunk(WritableObjectChunk objectChunk, - WritableChunk[] publisherChunks) { + WritableChunk[] publisherChunks) { if (keyIsSimpleObject) { return; } @@ -320,7 +313,7 @@ void flushKeyChunk(WritableObjectChunk objectChunk, } void flushValueChunk(WritableObjectChunk objectChunk, - WritableChunk[] publisherChunks) { + WritableChunk[] publisherChunks) { if (valueIsSimpleObject) { return; } diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/KeyOrValueProcessor.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/KeyOrValueProcessor.java index 655d9d09db5..1e481e4d4a4 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/KeyOrValueProcessor.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/KeyOrValueProcessor.java @@ -6,15 +6,13 @@ public interface KeyOrValueProcessor { /** - * After consuming a set of generic records for a batch that are not raw objects, we pass the - * keys or values to an appropriate handler. The handler must know its data types and offsets - * within the publisher chunks, and "copy" the data from the inputChunk to the appropriate - * chunks for the stream publisher. + * After consuming a set of generic records for a batch that are not raw objects, we pass the keys or values to an + * appropriate handler. The handler must know its data types and offsets within the publisher chunks, and "copy" the + * data from the inputChunk to the appropriate chunks for the stream publisher. * - * @param inputChunk the chunk containing the keys or values as Kafka deserialized them from the - * consumer record + * @param inputChunk the chunk containing the keys or values as Kafka deserialized them from the consumer record * @param publisherChunks the output chunks for this table that must be appended to. */ void handleChunk(ObjectChunk inputChunk, - WritableChunk[] publisherChunks); + WritableChunk[] publisherChunks); } diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/MultiFieldChunkAdapter.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/MultiFieldChunkAdapter.java index ea1868125b7..2cc6d68fd22 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/MultiFieldChunkAdapter.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/MultiFieldChunkAdapter.java @@ -22,18 +22,17 @@ public class MultiFieldChunkAdapter implements KeyOrValueProcessor { private final FieldCopier[] fieldCopiers; protected MultiFieldChunkAdapter( - final TableDefinition definition, - final IntFunction chunkTypeForIndex, - final Map fieldNamesToColumnNames, - final boolean allowNulls, - final FieldCopier.Factory fieldCopierFactory) { + final TableDefinition definition, + final IntFunction chunkTypeForIndex, + final Map fieldNamesToColumnNames, + final boolean allowNulls, + final FieldCopier.Factory fieldCopierFactory) { this.allowNulls = allowNulls; final String[] columnNames = definition.getColumnNamesArray(); final Class[] columnTypes = definition.getColumnTypesArray(); - final TObjectIntMap deephavenColumnNameToIndex = - new TObjectIntHashMap<>(columnNames.length, 0.5f, -1); + final TObjectIntMap deephavenColumnNameToIndex = new TObjectIntHashMap<>(columnNames.length, 0.5f, -1); for (int ii = 0; ii < columnNames.length; ++ii) { deephavenColumnNameToIndex.put(columnNames[ii], ii); } @@ -43,23 +42,21 @@ protected MultiFieldChunkAdapter( int col = 0; for (Map.Entry fieldToColumn : fieldNamesToColumnNames.entrySet()) { - final int deephavenColumnIndex = - deephavenColumnNameToIndex.get(fieldToColumn.getValue()); + final int deephavenColumnIndex = deephavenColumnNameToIndex.get(fieldToColumn.getValue()); if (deephavenColumnIndex == deephavenColumnNameToIndex.getNoEntryValue()) { - throw new IllegalArgumentException( - "Column not found in Deephaven table: " + deephavenColumnIndex); + throw new IllegalArgumentException("Column not found in Deephaven table: " + deephavenColumnIndex); } chunkOffsets[col] = deephavenColumnIndex; fieldCopiers[col++] = fieldCopierFactory.make(fieldToColumn.getKey(), - chunkTypeForIndex.apply(deephavenColumnIndex), columnTypes[deephavenColumnIndex]); + chunkTypeForIndex.apply(deephavenColumnIndex), columnTypes[deephavenColumnIndex]); } } @Override public void handleChunk(ObjectChunk inputChunk, - WritableChunk[] publisherChunks) { + WritableChunk[] publisherChunks) { if (!allowNulls) { for (int ii = 0; ii < inputChunk.size(); ++ii) { if (inputChunk.get(ii) == null) { @@ -68,12 +65,10 @@ public void handleChunk(ObjectChunk inputChunk, } } for (int cc = 0; cc < chunkOffsets.length; ++cc) { - final WritableChunk publisherChunk = - publisherChunks[chunkOffsets[cc]]; + final WritableChunk publisherChunk = publisherChunks[chunkOffsets[cc]]; final int existingSize = publisherChunk.size(); publisherChunk.setSize(existingSize + inputChunk.size()); - fieldCopiers[cc].copyField(inputChunk, publisherChunk, 0, existingSize, - inputChunk.size()); + fieldCopiers[cc].copyField(inputChunk, publisherChunk, 0, existingSize, inputChunk.size()); } } } diff --git a/Kafka/src/main/java/io/deephaven/kafka/ingest/ReplicateKafka.java b/Kafka/src/main/java/io/deephaven/kafka/ingest/ReplicateKafka.java index 00321a85c64..2ba40ff4585 100644 --- a/Kafka/src/main/java/io/deephaven/kafka/ingest/ReplicateKafka.java +++ b/Kafka/src/main/java/io/deephaven/kafka/ingest/ReplicateKafka.java @@ -10,9 +10,7 @@ public class ReplicateKafka { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.charToAllButBoolean(GenericRecordCharFieldCopier.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToAllButBoolean(JsonNodeCharFieldCopier.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(GenericRecordCharFieldCopier.class, ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAllButBoolean(JsonNodeCharFieldCopier.class, ReplicatePrimitiveCode.MAIN_SRC); } } diff --git a/Kafka/src/test/java/io/deephaven/kafka/KafkaToolsTest.java b/Kafka/src/test/java/io/deephaven/kafka/KafkaToolsTest.java index 10568b8326b..6c5c7c8cd09 100644 --- a/Kafka/src/test/java/io/deephaven/kafka/KafkaToolsTest.java +++ b/Kafka/src/test/java/io/deephaven/kafka/KafkaToolsTest.java @@ -17,15 +17,15 @@ public class KafkaToolsTest { // private static final String schemaWithNull = - " { " - + " \"type\": \"record\", " - + " \"name\": \"null_schema\"," - + " \"namespace\": \"io.deephaven.test\"," - + " \"fields\" : [" - + " {\"name\": \"Symbol\", \"type\": \"string\"}," - + " {\"name\": \"Price\", \"type\": [\"null\", \"double\"] }" - + " ]" - + "}"; + " { " + + " \"type\": \"record\", " + + " \"name\": \"null_schema\"," + + " \"namespace\": \"io.deephaven.test\"," + + " \"fields\" : [" + + " {\"name\": \"Symbol\", \"type\": \"string\"}," + + " {\"name\": \"Price\", \"type\": [\"null\", \"double\"] }" + + " ]" + + "}"; @Test public void testAvroSchemaWithNulls() { @@ -40,24 +40,24 @@ public void testAvroSchemaWithNulls() { } private static final String schemaWithNesting = - " { " - + " \"type\": \"record\", " - + " \"name\": \"nested_schema\"," - + " \"namespace\": \"io.deephaven.test\"," - + " \"fields\" : [" - + " {\"name\": \"NestedField\"," - + " \"type\": {" - + " \"type\": \"record\", " - + " \"name\": \"nested_record\"," - + " \"namespace\": \"io.deephaven.test.nested_schema\"," - + " \"fields\" : [" - + " {\"name\": \"Symbol\", \"type\": \"string\"}," - + " {\"name\": \"Price\", \"type\": \"double\"}" - + " ]" - + " }" - + " }" - + " ]" - + "}"; + " { " + + " \"type\": \"record\", " + + " \"name\": \"nested_schema\"," + + " \"namespace\": \"io.deephaven.test\"," + + " \"fields\" : [" + + " {\"name\": \"NestedField\"," + + " \"type\": {" + + " \"type\": \"record\", " + + " \"name\": \"nested_record\"," + + " \"namespace\": \"io.deephaven.test.nested_schema\"," + + " \"fields\" : [" + + " {\"name\": \"Symbol\", \"type\": \"string\"}," + + " {\"name\": \"Price\", \"type\": \"double\"}" + + " ]" + + " }" + + " }" + + " ]" + + "}"; @Test public void testAvroSchemaWithNesting() { @@ -72,19 +72,19 @@ public void testAvroSchemaWithNesting() { } private static final String schemaWithBasicTypes = - " { " - + " \"type\": \"record\", " - + " \"name\": \"types_schema\"," - + " \"namespace\": \"io.deephaven.test\"," - + " \"fields\" : [" - + " {\"name\": \"BooleanField\", \"type\": \"boolean\" }," - + " {\"name\": \"IntField\", \"type\": \"int\" }," - + " {\"name\": \"LongField\", \"type\": \"long\" }," - + " {\"name\": \"FloatField\", \"type\": \"float\" }," - + " {\"name\": \"DoubleField\", \"type\": \"double\" }," - + " {\"name\": \"StringField\", \"type\": \"string\" }" - + " ]" - + "}"; + " { " + + " \"type\": \"record\", " + + " \"name\": \"types_schema\"," + + " \"namespace\": \"io.deephaven.test\"," + + " \"fields\" : [" + + " {\"name\": \"BooleanField\", \"type\": \"boolean\" }," + + " {\"name\": \"IntField\", \"type\": \"int\" }," + + " {\"name\": \"LongField\", \"type\": \"long\" }," + + " {\"name\": \"FloatField\", \"type\": \"float\" }," + + " {\"name\": \"DoubleField\", \"type\": \"double\" }," + + " {\"name\": \"StringField\", \"type\": \"string\" }" + + " ]" + + "}"; @Test public void testAvroSChemaWithBasicTypesCoverage() { @@ -110,44 +110,44 @@ public void testAvroSChemaWithBasicTypesCoverage() { } private static final String schemaWithMoreNesting = - " { " - + " \"type\": \"record\", " - + " \"name\": \"nested_schema\"," - + " \"namespace\": \"io.deephaven.test\"," - + " \"fields\" : [" - + " {\"name\": \"NestedFields1\"," - + " \"type\": {" - + " \"type\": \"record\", " - + " \"name\": \"nested_record1\"," - + " \"namespace\": \"io.deephaven.test.nested_schema\"," - + " \"fields\" : [" - + " {\"name\": \"field1\", \"type\": \"int\"}," - + " {\"name\": \"field2\", \"type\": \"float\"}" - + " ]" - + " }" - + " }," - + " {\"name\": \"NestedFields2\"," - + " \"type\": {" - + " \"type\": \"record\", " - + " \"name\": \"nested_record2\"," - + " \"namespace\": \"io.deephaven.test.nested_schema\"," - + " \"fields\" : [" - + " {\"name\": \"NestedFields3\"," - + " \"type\": {" - + " \"type\": \"record\", " - + " \"name\": \"nested_record3\"," - + " \"namespace\": \"io.deephaven.test.nested_schema\"," - + " \"fields\" : [" - + " {\"name\": \"field3\", \"type\": \"long\"}," - + " {\"name\": \"field4\", \"type\": \"double\"}" - + " ]" - + " }" - + " }" - + " ]" - + " }" - + " }" - + " ]" - + "}"; + " { " + + " \"type\": \"record\", " + + " \"name\": \"nested_schema\"," + + " \"namespace\": \"io.deephaven.test\"," + + " \"fields\" : [" + + " {\"name\": \"NestedFields1\"," + + " \"type\": {" + + " \"type\": \"record\", " + + " \"name\": \"nested_record1\"," + + " \"namespace\": \"io.deephaven.test.nested_schema\"," + + " \"fields\" : [" + + " {\"name\": \"field1\", \"type\": \"int\"}," + + " {\"name\": \"field2\", \"type\": \"float\"}" + + " ]" + + " }" + + " }," + + " {\"name\": \"NestedFields2\"," + + " \"type\": {" + + " \"type\": \"record\", " + + " \"name\": \"nested_record2\"," + + " \"namespace\": \"io.deephaven.test.nested_schema\"," + + " \"fields\" : [" + + " {\"name\": \"NestedFields3\"," + + " \"type\": {" + + " \"type\": \"record\", " + + " \"name\": \"nested_record3\"," + + " \"namespace\": \"io.deephaven.test.nested_schema\"," + + " \"fields\" : [" + + " {\"name\": \"field3\", \"type\": \"long\"}," + + " {\"name\": \"field4\", \"type\": \"double\"}" + + " ]" + + " }" + + " }" + + " ]" + + " }" + + " }" + + " ]" + + "}"; @Test public void testAvroSchemaWithMoreNesting() { diff --git a/Kafka/src/test/java/io/deephaven/kafka/ingest/TestAvroAdapter.java b/Kafka/src/test/java/io/deephaven/kafka/ingest/TestAvroAdapter.java index d5f09230e9c..24e525e4b73 100644 --- a/Kafka/src/test/java/io/deephaven/kafka/ingest/TestAvroAdapter.java +++ b/Kafka/src/test/java/io/deephaven/kafka/ingest/TestAvroAdapter.java @@ -25,8 +25,8 @@ public class TestAvroAdapter { @NotNull private File getSchemaFile(String name) { - final String avscPath = Configuration.getInstance().getDevRootPath() - + "/Kafka/src/test/resources/avro-examples/"; + final String avscPath = + Configuration.getInstance().getDevRootPath() + "/Kafka/src/test/resources/avro-examples/"; return new File(avscPath + name); } @@ -37,8 +37,7 @@ public void testSimple() throws IOException { final String[] names = new String[] {"viewtime", "userid", "pageid"}; final Class[] types = new Class[] {long.class, String.class, String.class}; - final TableDefinition definition = - new TableDefinition(Arrays.asList(types), Arrays.asList(names)); + final TableDefinition definition = new TableDefinition(Arrays.asList(types), Arrays.asList(names)); final GenericData.Record genericRecord = new GenericData.Record(avroSchema); genericRecord.put("viewtime", 1234L); @@ -51,7 +50,7 @@ public void testSimple() throws IOException { colMap.put("pageid", "pageid"); try (final WritableObjectChunk inputValues = - WritableObjectChunk.makeWritableChunk(1)) { + WritableObjectChunk.makeWritableChunk(1)) { inputValues.setSize(0); inputValues.add(genericRecord); @@ -66,7 +65,7 @@ public void testSimple() throws IOException { } final GenericRecordChunkAdapter adapter = GenericRecordChunkAdapter.make(definition, - (idx) -> output[idx].getChunkType(), colMap, avroSchema, true); + (idx) -> output[idx].getChunkType(), colMap, avroSchema, true); adapter.handleChunk(inputValues, output); TestCase.assertEquals(1, output[0].size()); @@ -84,13 +83,12 @@ public void testSimple() throws IOException { public void testTimestamp() throws IOException { final Schema avroSchema = new Schema.Parser().parse(getSchemaFile("fieldtest.avsc")); - final String[] names = new String[] {"last_name", "number", "truthiness", "timestamp", - "timestampMicros", "timeMillis", "timeMicros"}; - final Class[] types = new Class[] {String.class, int.class, boolean.class, DBDateTime.class, - DBDateTime.class, int.class, long.class}; + final String[] names = new String[] {"last_name", "number", "truthiness", "timestamp", "timestampMicros", + "timeMillis", "timeMicros"}; + final Class[] types = new Class[] {String.class, int.class, boolean.class, DBDateTime.class, DBDateTime.class, + int.class, long.class}; - final TableDefinition definition = - new TableDefinition(Arrays.asList(types), Arrays.asList(names)); + final TableDefinition definition = new TableDefinition(Arrays.asList(types), Arrays.asList(names)); final DBDateTime dt1 = DBTimeUtils.convertDateTime("2021-08-23T12:00:00.123456789 NY"); final DBDateTime dt2 = DBTimeUtils.convertDateTime("2021-08-23T13:00:00.500600700 NY"); @@ -123,13 +121,13 @@ public void testTimestamp() throws IOException { genericRecord3.put("timeMicros", 300000L); final Map colMap = new HashMap<>(); - for (String s : new String[] {"last_name", "number", "truthiness", "timestamp", - "timestampMicros", "timeMillis", "timeMicros"}) { + for (String s : new String[] {"last_name", "number", "truthiness", "timestamp", "timestampMicros", "timeMillis", + "timeMicros"}) { colMap.put(s, s); } try (final WritableObjectChunk inputValues = - WritableObjectChunk.makeWritableChunk(3)) { + WritableObjectChunk.makeWritableChunk(3)) { inputValues.setSize(0); inputValues.add(genericRecord1); inputValues.add(genericRecord2); @@ -150,7 +148,7 @@ public void testTimestamp() throws IOException { } final GenericRecordChunkAdapter adapter = GenericRecordChunkAdapter.make(definition, - (idx) -> output[idx].getChunkType(), colMap, avroSchema, true); + (idx) -> output[idx].getChunkType(), colMap, avroSchema, true); adapter.handleChunk(inputValues, output); for (int ii = 0; ii < 7; ++ii) { @@ -159,30 +157,23 @@ public void testTimestamp() throws IOException { TestCase.assertEquals("LN1", output[0].asObjectChunk().get(0)); TestCase.assertEquals(32, output[1].asIntChunk().get(0)); - TestCase.assertEquals(BooleanUtils.FALSE_BOOLEAN_AS_BYTE, - output[2].asByteChunk().get(0)); - TestCase.assertEquals(DBTimeUtils.millisToNanos(dt1.getMillis()), - output[3].asLongChunk().get(0)); - TestCase.assertEquals(DBTimeUtils.microsToNanos(dt1.getMicros()), - output[4].asLongChunk().get(0)); + TestCase.assertEquals(BooleanUtils.FALSE_BOOLEAN_AS_BYTE, output[2].asByteChunk().get(0)); + TestCase.assertEquals(DBTimeUtils.millisToNanos(dt1.getMillis()), output[3].asLongChunk().get(0)); + TestCase.assertEquals(DBTimeUtils.microsToNanos(dt1.getMicros()), output[4].asLongChunk().get(0)); TestCase.assertEquals(10000, output[5].asIntChunk().get(0)); TestCase.assertEquals(100000, output[6].asLongChunk().get(0)); TestCase.assertNull(output[0].asObjectChunk().get(1)); TestCase.assertEquals(64, output[1].asIntChunk().get(1)); - TestCase.assertEquals(BooleanUtils.TRUE_BOOLEAN_AS_BYTE, - output[2].asByteChunk().get(1)); - TestCase.assertEquals(DBTimeUtils.millisToNanos(dt2.getMillis()), - output[3].asLongChunk().get(1)); - TestCase.assertEquals(DBTimeUtils.microsToNanos(dt2.getMicros()), - output[4].asLongChunk().get(1)); + TestCase.assertEquals(BooleanUtils.TRUE_BOOLEAN_AS_BYTE, output[2].asByteChunk().get(1)); + TestCase.assertEquals(DBTimeUtils.millisToNanos(dt2.getMillis()), output[3].asLongChunk().get(1)); + TestCase.assertEquals(DBTimeUtils.microsToNanos(dt2.getMicros()), output[4].asLongChunk().get(1)); TestCase.assertEquals(20000, output[5].asIntChunk().get(1)); TestCase.assertEquals(200000, output[6].asLongChunk().get(1)); TestCase.assertEquals("LN3", output[0].asObjectChunk().get(2)); TestCase.assertEquals(128, output[1].asIntChunk().get(2)); - TestCase.assertEquals(BooleanUtils.NULL_BOOLEAN_AS_BYTE, - output[2].asByteChunk().get(2)); + TestCase.assertEquals(BooleanUtils.NULL_BOOLEAN_AS_BYTE, output[2].asByteChunk().get(2)); TestCase.assertEquals(QueryConstants.NULL_LONG, output[3].asLongChunk().get(2)); TestCase.assertEquals(QueryConstants.NULL_LONG, output[4].asLongChunk().get(2)); TestCase.assertEquals(30000, output[5].asIntChunk().get(2)); diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/ConditionalModels.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/ConditionalModels.java index 0a735d9a7d8..acc0585d4d2 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/ConditionalModels.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/ConditionalModels.java @@ -18,19 +18,17 @@ import java.util.function.Function; /** - * A set of multiple models where predicates determine which of the models execute on any iteration. - * The predicates use input data and the most recent state for an underlying to determine which - * models should execute on an iteration. For example, the state could be the most recent valid - * result from a numerical model. + * A set of multiple models where predicates determine which of the models execute on any iteration. The predicates use + * input data and the most recent state for an underlying to determine which models should execute on an iteration. For + * example, the state could be the most recent valid result from a numerical model. *

    * Active models are executed in their input order. */ public class ConditionalModels implements Models { - private static final io.deephaven.io.logger.Logger log = - ProcessEnvironment.getDefaultLog(ConditionalModels.class); - private static final boolean LOG_PERF = Configuration.getInstance() - .getBooleanWithDefault("ModelFarm.logConditionalModelsPerformance", false); + private static final io.deephaven.io.logger.Logger log = ProcessEnvironment.getDefaultLog(ConditionalModels.class); + private static final boolean LOG_PERF = + Configuration.getInstance().getBooleanWithDefault("ModelFarm.logConditionalModelsPerformance", false); private final Map stateMap; private final Model[] models; @@ -46,13 +44,12 @@ public class ConditionalModels implements Model * * @param models models to execute. * @param predicates predicates used to determine if each model should execute. - * @param stateMap map of state by underlying id. THIS MAP SHOULD BE THREAD SAFE, OR RESULTS - * WILL BE UNPREDICTABLE! + * @param stateMap map of state by underlying id. THIS MAP SHOULD BE THREAD SAFE, OR RESULTS WILL BE UNPREDICTABLE! * @param dataToKey function to get a key from data. */ public ConditionalModels(final Model[] models, - final BiPredicate[] predicates, - final Map stateMap, final Function dataToKey) { + final BiPredicate[] predicates, + final Map stateMap, final Function dataToKey) { Require.neqNull(stateMap, "stateMap"); Require.neqNull(models, "models"); Require.elementsNeqNull(models, "models"); @@ -69,8 +66,7 @@ public ConditionalModels(final Model[] models, @Override public Object getLock(final DATA_TYPE data) { // Each underlying has a unique lock. - // This lock ensures that a single underlying cannot execute on multiple threads at the same - // time. + // This lock ensures that a single underlying cannot execute on multiple threads at the same time. return lockMap.computeIfAbsent(dataToKey.apply(data), key -> new Object()); } @@ -108,11 +104,11 @@ public Iterator> iterator(final DATA_TYPE data) { if (LOG_PERF) { log.warn().append("ConditionalModels.iterator PERFORMANCE: key=").append(key.toString()) - .append("tall=").append((t2 - t1) / 1000) - .append(" tstate=").append((t1 - t0) / 1000) - .append(" predicatevals=").append(Arrays.toString(pvals)) - .append(" predicatetimes=").append(Arrays.toString(ptimes)) - .endl(); + .append("tall=").append((t2 - t1) / 1000) + .append(" tstate=").append((t1 - t0) / 1000) + .append(" predicatevals=").append(Arrays.toString(pvals)) + .append(" predicatetimes=").append(Arrays.toString(ptimes)) + .endl(); } return toRun.iterator(); diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/ExecPrioritizer.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/ExecPrioritizer.java index 1b1c3b90ee3..77dff57b80c 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/ExecPrioritizer.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/ExecPrioritizer.java @@ -15,11 +15,9 @@ public interface ExecPrioritizer *

    - * This method does not wait for previously submitted tasks to complete execution. Use - * {@link #awaitTermination awaitTermination} to do that. + * This method does not wait for previously submitted tasks to complete execution. Use {@link #awaitTermination + * awaitTermination} to do that. */ void shutdown(); /** - * Initiates an immediate termination of all tasks. Unexecuted tasks will not be executed. Tasks - * already executing may not be interrupted. + * Initiates an immediate termination of all tasks. Unexecuted tasks will not be executed. Tasks already executing + * may not be interrupted. */ void terminate(); /** * Blocks until all tasks have completed execution after a shutdown request. * - * @return {@code true} if this executor terminated and {@code false} if the timeout elapsed - * before termination + * @return {@code true} if this executor terminated and {@code false} if the timeout elapsed before termination */ boolean awaitTermination(); /** - * Blocks until all tasks have completed execution after a shutdown request, or the timeout - * occurs, whichever happens first. + * Blocks until all tasks have completed execution after a shutdown request, or the timeout occurs, whichever + * happens first. * * @param timeout the maximum time to wait * @param unit the time unit of the timeout argument - * @return {@code true} if this executor terminated and {@code false} if the timeout elapsed - * before termination + * @return {@code true} if this executor terminated and {@code false} if the timeout elapsed before termination */ boolean awaitTermination(final long timeout, final TimeUnit unit); @@ -61,8 +59,7 @@ public interface ModelFarm { * * @param timeout the maximum time to wait * @param unit the time unit of the timeout argument - * @return {@code true} if this executor terminated and {@code false} if the timeout elapsed - * before termination + * @return {@code true} if this executor terminated and {@code false} if the timeout elapsed before termination */ boolean shutdownAndAwaitTermination(final long timeout, final TimeUnit unit); } diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmBase.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmBase.java index 707e61c50af..042158fb2f1 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmBase.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmBase.java @@ -44,10 +44,9 @@ public enum State { } /** - * An operation that uses data from Deephaven {@link io.deephaven.db.tables.Table Tables}, using - * either {@link io.deephaven.db.v2.sources.ColumnSource#getPrev} or - * {@link io.deephaven.db.v2.sources.ColumnSource#get}) depending on the value of the argument - * to {@link #retrieveData}. + * An operation that uses data from Deephaven {@link io.deephaven.db.tables.Table Tables}, using either + * {@link io.deephaven.db.v2.sources.ColumnSource#getPrev} or {@link io.deephaven.db.v2.sources.ColumnSource#get}) + * depending on the value of the argument to {@link #retrieveData}. */ @FunctionalInterface interface QueryDataRetrievalOperation { @@ -55,9 +54,9 @@ interface QueryDataRetrievalOperation { /** * Performs an operation using data from a query. * - * @param usePrev Whether to use the previous data at a given index when retrieving data - * (i.e. if {@code true}, use {@link io.deephaven.db.v2.sources.ColumnSource#getPrev} - * instead of {@link io.deephaven.db.v2.sources.ColumnSource#get}). + * @param usePrev Whether to use the previous data at a given index when retrieving data (i.e. if {@code true}, + * use {@link io.deephaven.db.v2.sources.ColumnSource#getPrev} instead of + * {@link io.deephaven.db.v2.sources.ColumnSource#get}). */ void retrieveData(boolean usePrev); @@ -93,8 +92,8 @@ public enum GetDataLockType { private final Set threads = new LinkedHashSet<>(); /** - * This model farm's state. Updated under lock on this {@code ModelFarmBase} instance. Should be - * used with {@link #setState} and {@link #getState} + * This model farm's state. Updated under lock on this {@code ModelFarmBase} instance. Should be used with + * {@link #setState} and {@link #getState} */ private State state = State.WAITING; @@ -103,8 +102,7 @@ private class Worker implements Runnable { public void run() { synchronized (ModelFarmBase.this) { // The worker threads should be added to the list of threads before starting. - Assert.assertion(threads.contains(Thread.currentThread()), - "threads.contains(Thread.currentThread())"); + Assert.assertion(threads.contains(Thread.currentThread()), "threads.contains(Thread.currentThread())"); } try { @@ -119,8 +117,8 @@ public void run() { final PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); pw.close(); - log.error().append("Exception in ModelFarm worker thread stack trace. \n") - .append(sw.toString()).endl(); + log.error().append("Exception in ModelFarm worker thread stack trace. \n").append(sw.toString()) + .endl(); throw new RuntimeException(e); } @@ -128,10 +126,9 @@ public void run() { // During shutdown, keep pulling items from the queue until it is empty. if ((state == State.SHUTDOWN && isQueueEmpty()) || state == State.TERMINATING - || state == State.TERMINATED) { - log.warn().append("ModelFarm worker thread exiting. state=") - .append(state.toString()) - .append(" isQueueEmpty=").append(isQueueEmpty()).endl(); + || state == State.TERMINATED) { + log.warn().append("ModelFarm worker thread exiting. state=").append(state.toString()) + .append(" isQueueEmpty=").append(isQueueEmpty()).endl(); return; } } @@ -142,7 +139,7 @@ public void run() { final boolean threadsEmpty = threads.isEmpty(); if (threadsEmpty && (ModelFarmBase.this.state == State.SHUTDOWN - || ModelFarmBase.this.state == State.TERMINATING)) { + || ModelFarmBase.this.state == State.TERMINATING)) { setState(State.TERMINATED); } } @@ -159,8 +156,7 @@ public void run() { @SuppressWarnings("WeakerAccess") protected ModelFarmBase(final int nThreads, final Model model) { this.model = Require.neqNull(model, "model"); - this.threadGroup = - initializeThreadGroup(Require.gtZero(nThreads, "nThreads"), this.threads); + this.threadGroup = initializeThreadGroup(Require.gtZero(nThreads, "nThreads"), this.threads); } private ThreadGroup initializeThreadGroup(final int nThreads, final Set threads) { @@ -169,8 +165,7 @@ private ThreadGroup initializeThreadGroup(final int nThreads, final Set final ThreadGroup threadGroup = new ThreadGroup("ModelFarm"); for (int i = 0; i < nThreads; i++) { - final String threadName = - "ModelFarm_" + modelFarmN + "_Thread_" + (modelFarmNThreads++); + final String threadName = "ModelFarm_" + modelFarmN + "_Thread_" + (modelFarmNThreads++); threads.add(new Thread(threadGroup, new Worker(), threadName)); } @@ -197,49 +192,45 @@ interface MostRecentDataGetter { * Gets the most recent row data for a unique identifier. * * @param key unique identifier - * @return most recent row data for the unique identifier, or null, if there is no data for - * the unique identifier. + * @return most recent row data for the unique identifier, or null, if there is no data for the unique + * identifier. */ DATATYPE get(final KEYTYPE key); } /** - * Returns a {@code ThrowingConsumer} that takes a {@link QueryDataRetrievalOperation}, acquires - * a {@link LiveTableMonitor} lock based on the specified {@code lockType}, then executes the - * {@code FitDataPopulator} with the appropriate value for usePrev. + * Returns a {@code ThrowingConsumer} that takes a {@link QueryDataRetrievalOperation}, acquires a + * {@link LiveTableMonitor} lock based on the specified {@code lockType}, then executes the {@code FitDataPopulator} + * with the appropriate value for usePrev. * * @param lockType The way of acquiring the {@code LiveTableMonitor} lock. * @return A function that runs a {@link } */ @SuppressWarnings("WeakerAccess") protected static FunctionalInterfaces.ThrowingBiConsumer getDoLockedConsumer( - final GetDataLockType lockType) { + final GetDataLockType lockType) { switch (lockType) { case LTM_LOCK_ALREADY_HELD: - return (queryDataRetrievalOperation, source) -> queryDataRetrievalOperation - .retrieveData(false); + return (queryDataRetrievalOperation, source) -> queryDataRetrievalOperation.retrieveData(false); case LTM_LOCK: - return (queryDataRetrievalOperation, source) -> LiveTableMonitor.DEFAULT - .exclusiveLock() - .doLocked(() -> queryDataRetrievalOperation.retrieveData(false)); + return (queryDataRetrievalOperation, source) -> LiveTableMonitor.DEFAULT.exclusiveLock() + .doLocked(() -> queryDataRetrievalOperation.retrieveData(false)); case LTM_READ_LOCK: - return (queryDataRetrievalOperation, source) -> LiveTableMonitor.DEFAULT - .sharedLock().doLocked(() -> queryDataRetrievalOperation.retrieveData(false)); + return (queryDataRetrievalOperation, source) -> LiveTableMonitor.DEFAULT.sharedLock() + .doLocked(() -> queryDataRetrievalOperation.retrieveData(false)); case SNAPSHOT: return (queryDataRetrievalOperation, source) -> { try { - ConstructSnapshot.callDataSnapshotFunction( - "ModelFarmBase.getData(SNAPSHOT)", - ConstructSnapshot.makeSnapshotControl(false, source), - (usePrev, beforeClockValue) -> { - queryDataRetrievalOperation.retrieveData(usePrev); - return true; // This indicates that the snapshot ran OK, not that - // the data is OK. - }); + ConstructSnapshot.callDataSnapshotFunction("ModelFarmBase.getData(SNAPSHOT)", + ConstructSnapshot.makeSnapshotControl(false, source), + (usePrev, beforeClockValue) -> { + queryDataRetrievalOperation.retrieveData(usePrev); + return true; // This indicates that the snapshot ran OK, not that the data is OK. + }); } catch (QueryCancellationException e) { log.warn(e).append( - "ModelFarmBase.getData(SNAPSHOT): QueryCancellationException. The ModelFarm is probably shutting down.") - .endl(); + "ModelFarmBase.getData(SNAPSHOT): QueryCancellationException. The ModelFarm is probably shutting down.") + .endl(); } }; default: @@ -248,8 +239,7 @@ protected static FunctionalInterfaces.ThrowingBiConsumer The type of the keys (e.g. {@link io.deephaven.modelfarm.fitterfarm.FitScope}). * @param The type of the data (e.g. @@ -29,13 +29,13 @@ * {@link io.deephaven.modelfarm.fitterfarm.futures.FuturesFitDataManager}). */ public class ModelFarmOnDemand> - extends ModelFarmBase { + extends ModelFarmBase { - private static final boolean LOG_PERF = Configuration.getInstance() - .getBooleanWithDefault("ModelFarm.logModelFarmOnDemandPerformance", false); + private static final boolean LOG_PERF = + Configuration.getInstance().getBooleanWithDefault("ModelFarm.logModelFarmOnDemandPerformance", false); private static final Logger log = ProcessEnvironment.getDefaultLog(ModelFarmOnDemand.class); private static final FunctionalInterfaces.ThrowingBiConsumer DO_LOCKED_FUNCTION = - getDoLockedConsumer(GetDataLockType.LTM_READ_LOCK); + getDoLockedConsumer(GetDataLockType.LTM_READ_LOCK); private static class QueueAndCallback { private final Queue queue; @@ -48,9 +48,9 @@ private QueueAndCallback(Queue queue, Runnable callback) { } /** - * A queue of queues. Each "inner queue" (the elements of the {@code execQueue} represents one - * on-demand pricing request. The {@link #execute()} method will drain each inner queue before - * removing that queue from the {@code execQueue}. + * A queue of queues. Each "inner queue" (the elements of the {@code execQueue} represents one on-demand pricing + * request. The {@link #execute()} method will drain each inner queue before removing that queue from the + * {@code execQueue}. * * Must always acquire lock on this object before using it. */ @@ -68,15 +68,13 @@ public ModelFarmOnDemand(int nThreads, Model model) { } /** - * Submit a request to {@link Model#exec execute} the {@link #model}. Can be called either with - * or without a LiveTableMonitor lock -- the decision of whether/how to acquire a lock is left - * to the {@link #DO_LOCKED_FUNCTION}. All keys represented by the data in the - * {@code dataManager} will be processed. + * Submit a request to {@link Model#exec execute} the {@link #model}. Can be called either with or without a + * LiveTableMonitor lock -- the decision of whether/how to acquire a lock is left to the + * {@link #DO_LOCKED_FUNCTION}. All keys represented by the data in the {@code dataManager} will be processed. * - * @param dataManager The {@code RowDataManager} that will provide data for the pricing - * requests. - * @param callback A callback function to run after all keys have been processed. Can be - * {@code null}, in which case it will be ignored. + * @param dataManager The {@code RowDataManager} that will provide data for the pricing requests. + * @param callback A callback function to run after all keys have been processed. Can be {@code null}, in which case + * it will be ignored. */ @SuppressWarnings("unused") public void requestUpdate(ROWDATAMANAGERTYPE dataManager, Runnable callback) { @@ -84,35 +82,32 @@ public void requestUpdate(ROWDATAMANAGERTYPE dataManager, Runnable callback) { } /** - * Submit a request to {@link Model#exec execute} the {@link #model}. Can be called either with - * or without a LiveTableMonitor lock -- the decision of whether/how to acquire a lock is left - * to the {@link #DO_LOCKED_FUNCTION}. + * Submit a request to {@link Model#exec execute} the {@link #model}. Can be called either with or without a + * LiveTableMonitor lock -- the decision of whether/how to acquire a lock is left to the + * {@link #DO_LOCKED_FUNCTION}. * - * @param dataManager The {@code RowDataManager} that will provide data for the pricing - * requests. - * @param callback A callback function to run after all keys have been processed. Can be - * {@code null}, in which case it will be ignored. - * @param keys They keys for which data should be passed to the model. If {@code keys == null}, - * then all keys represented by the data in the {@code dataManager} will be processed. + * @param dataManager The {@code RowDataManager} that will provide data for the pricing requests. + * @param callback A callback function to run after all keys have been processed. Can be {@code null}, in which case + * it will be ignored. + * @param keys They keys for which data should be passed to the model. If {@code keys == null}, then all keys + * represented by the data in the {@code dataManager} will be processed. */ @SuppressWarnings("WeakerAccess") - public void requestUpdate(ROWDATAMANAGERTYPE dataManager, Runnable callback, - Set keys) { + public void requestUpdate(ROWDATAMANAGERTYPE dataManager, Runnable callback, Set keys) { if (keys != null && keys.isEmpty()) { return; } final DynamicTable dataManagerTable = dataManager.table(); - final Queue dataToEval = - new ArrayDeque<>(keys != null ? keys.size() : dataManagerTable.intSize()); + final Queue dataToEval = new ArrayDeque<>(keys != null ? keys.size() : dataManagerTable.intSize()); // get data for all keys under the same lock DO_LOCKED_FUNCTION.accept((usePrev) -> { final Index index = dataManagerTable.getIndex(); if (index.empty()) { log.warn().append(ModelFarmOnDemand.class.getSimpleName() + ": ") - .append("Table is empty. Nothing to price.").endl(); + .append("Table is empty. Nothing to price.").endl(); callback.run(); return; } @@ -120,12 +115,10 @@ public void requestUpdate(ROWDATAMANAGERTYPE dataManager, Runnable callback, for (Index.Iterator iter = index.iterator(); iter.hasNext();) { final long idx = iter.nextLong(); - // if a `keys` set was provided, then only enqueue keys in the `dataManager` that - // are also in the set. + // if a `keys` set was provided, then only enqueue keys in the `dataManager` that are also in the set. final boolean includeThisKey; if (keys != null) { - final KEYTYPE key = - usePrev ? dataManager.uniqueIdPrev(idx) : dataManager.uniqueIdCurrent(idx); + final KEYTYPE key = usePrev ? dataManager.uniqueIdPrev(idx) : dataManager.uniqueIdCurrent(idx); includeThisKey = keys.contains(key); } else { includeThisKey = true; @@ -147,8 +140,7 @@ public void requestUpdate(ROWDATAMANAGERTYPE dataManager, Runnable callback, } synchronized (execQueue) { - // Enqueue the `dataToEval` -- i.e. the queue of all keys to price as part of this - // request. + // Enqueue the `dataToEval` -- i.e. the queue of all keys to price as part of this request. execQueue.add(new QueueAndCallback<>(dataToEval, callback)); execQueue.notifyAll(); } @@ -163,8 +155,7 @@ protected void execute() throws InterruptedException { final long t0 = System.nanoTime(); final DATATYPE data; - QueueAndCallback queueAndCallbackOneRequest; // A queue of all data pertaining to - // one request + QueueAndCallback queueAndCallbackOneRequest; // A queue of all data pertaining to one request final boolean queueEmpty; synchronized (execQueue) { @@ -177,8 +168,7 @@ protected void execute() throws InterruptedException { data = queueAndCallbackOneRequest.queue.poll(); Assert.neqNull(data, "data"); - // If `data` was the data for the last key in this request, then remove this request - // from the `execQueue`: + // If `data` was the data for the last key in this request, then remove this request from the `execQueue`: queueEmpty = queueAndCallbackOneRequest.queue.isEmpty(); if (queueEmpty) { execQueue.remove(); @@ -193,14 +183,13 @@ protected void execute() throws InterruptedException { model.exec(data); t2 = System.nanoTime(); } else { - // should be impossible; data is only populated with - // io.deephaven.modelfarm.RowDataManager.newData + // should be impossible; data is only populated with io.deephaven.modelfarm.RowDataManager.newData throw new IllegalStateException("Data is null!"); } if (LOG_PERF) { - log.warn("ModelFarmOnDemand.execute PERFORMANCE: all=" + (t2 - t0) / 1000 + " take=" - + (t1 - t0) / 100 + " exec=" + (t2 - t1) / 1000); + log.warn("ModelFarmOnDemand.execute PERFORMANCE: all=" + (t2 - t0) / 1000 + " take=" + (t1 - t0) / 100 + + " exec=" + (t2 - t1) / 1000); } if (queueEmpty && queueAndCallbackOneRequest.callback != null) { diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmRealTime.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmRealTime.java index cac27bea473..a0f6b68eace 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmRealTime.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmRealTime.java @@ -15,31 +15,28 @@ import java.util.concurrent.atomic.AtomicBoolean; /** - * A multithreaded resource to execute data driven models. Every time a row of the data table ticks, - * the unique identifier is queued for execution. Once the security reaches the top of the execution - * queue, the most recent data for the unique identifier is used for execution. + * A multithreaded resource to execute data driven models. Every time a row of the data table ticks, the unique + * identifier is queued for execution. Once the security reaches the top of the execution queue, the most recent data + * for the unique identifier is used for execution. *

    - * This is useful for real-time processing, where executing unique identifiers as fast as possible - * is desired. + * This is useful for real-time processing, where executing unique identifiers as fast as possible is desired. *

    - * The execution priority is determined by how long it has been since the data changed. The - * prioritizer can be used to bump a unique identifier to a higher execution priority. + * The execution priority is determined by how long it has been since the data changed. The prioritizer can be used to + * bump a unique identifier to a higher execution priority. * * @param unique ID key type * @param data type */ public class ModelFarmRealTime> - extends RDMModelFarm { + extends RDMModelFarm { private static final Logger log = LoggerFactory.getLogger(ModelFarmRealTime.class); - private static final boolean LOG_PERF = Configuration.getInstance() - .getBooleanWithDefault("ModelFarm.logModelFarmRealTimePerformance", false); - - private final ModelFarmBase.GetDataLockType GETDATA_LOCK_TYPE = - ModelFarmBase.GetDataLockType.valueOf(Configuration.getInstance() - .getStringWithDefault("ModelFarm.ModelFarmRealTime.getDataLockType", "LTM_READ_LOCK")); - private final KeyedPriorityBlockingQueue execQueue = - new KeyedPriorityBlockingQueue<>(); + private static final boolean LOG_PERF = + Configuration.getInstance().getBooleanWithDefault("ModelFarm.logModelFarmRealTimePerformance", false); + + private final ModelFarmBase.GetDataLockType GETDATA_LOCK_TYPE = ModelFarmBase.GetDataLockType.valueOf(Configuration + .getInstance().getStringWithDefault("ModelFarm.ModelFarmRealTime.getDataLockType", "LTM_READ_LOCK")); + private final KeyedPriorityBlockingQueue execQueue = new KeyedPriorityBlockingQueue<>(); private final ExecPrioritizer prioritizer; private final ModelFarmBase.MostRecentDataGetter mostRecentDataGetter; private final Map lastPriorityMap = new ConcurrentHashMap<>(); @@ -51,17 +48,14 @@ public class ModelFarmRealTime model, - final ROWDATAMANAGERTYPE dataManager, - final ExecPrioritizer prioritizer) { + public ModelFarmRealTime(final int nThreads, final Model model, final ROWDATAMANAGERTYPE dataManager, + final ExecPrioritizer prioritizer) { super(nThreads, model, dataManager); this.prioritizer = prioritizer; - log.warn().append("ModelFarmRealTime lock type: ").append(GETDATA_LOCK_TYPE.toString()) - .endl(); + log.warn().append("ModelFarmRealTime lock type: ").append(GETDATA_LOCK_TYPE.toString()).endl(); this.mostRecentDataGetter = getMostRecentDataFactory(GETDATA_LOCK_TYPE); } @@ -97,21 +91,17 @@ protected void execute() throws InterruptedException { final long t1 = System.nanoTime(); // Whether *any* thread is currently evaling for this symbol: - final AtomicBoolean isEvaling = - isEvalingMap.computeIfAbsent(key, k -> new AtomicBoolean(false)); + final AtomicBoolean isEvaling = isEvalingMap.computeIfAbsent(key, k -> new AtomicBoolean(false)); - // If another thread is already evaling this key, then reenque the key and move on to the - // next key. + // If another thread is already evaling this key, then reenque the key and move on to the next key. if (!isEvaling.compareAndSet(false, true)) { final Integer priority = lastPriorityMap.get(key); execQueue.enqueue(key, priority == null ? Integer.MIN_VALUE : priority); final long t2 = System.nanoTime(); if (LOG_PERF) { - log.warn().append("ModelFarmRealTime.execute PERFORMANCE: isExec=false all=") - .append((t2 - t0) / 1000) - .append(" take=").append((t1 - t0) / 1000).append(" enqueue=") - .append((t2 - t1) / 1000).endl(); + log.warn().append("ModelFarmRealTime.execute PERFORMANCE: isExec=false all=").append((t2 - t0) / 1000) + .append(" take=").append((t1 - t0) / 1000).append(" enqueue=").append((t2 - t1) / 1000).endl(); } return; @@ -135,19 +125,15 @@ protected void execute() throws InterruptedException { } if (LOG_PERF) { - log.warn().append("ModelFarmRealTime.execute PERFORMANCE: isExec=true all=") - .append((t4 - t0) / 1000) - .append(" take=").append((t1 - t0) / 1000).append(" get=") - .append((t2 - t1) / 1000) - .append(" prio=").append((t3 - t2) / 1000).append(" exec=" + (t4 - t3) / 1000) - .endl(); + log.warn().append("ModelFarmRealTime.execute PERFORMANCE: isExec=true all=").append((t4 - t0) / 1000) + .append(" take=").append((t1 - t0) / 1000).append(" get=").append((t2 - t1) / 1000) + .append(" prio=").append((t3 - t2) / 1000).append(" exec=" + (t4 - t3) / 1000).endl(); } } finally { if (!isEvaling.compareAndSet(true, false)) { - // noinspection ThrowFromFinallyBlock -- once a thread sets isEvaling to true for a - // key, no other thread should set it to false. - throw new IllegalStateException( - "isEvaling is false but should be true for key " + key); + // noinspection ThrowFromFinallyBlock -- once a thread sets isEvaling to true for a key, no other thread + // should set it to false. + throw new IllegalStateException("isEvaling is false but should be true for key " + key); } } } @@ -156,8 +142,8 @@ protected void execute() throws InterruptedException { * Request an update for the given {@code key} with the maximum priority level. * * @param key The key to update. - * @return {@code true} if the {@code element} was newly inserted to the queue or reinserted - * with a higher priority, otherwise {@code false}. + * @return {@code true} if the {@code element} was newly inserted to the queue or reinserted with a higher priority, + * otherwise {@code false}. */ @SuppressWarnings("unused") public boolean requestUpdateMaxPriority(KEYTYPE key) { @@ -169,8 +155,8 @@ public boolean requestUpdateMaxPriority(KEYTYPE key) { * * @param key The key to update. * @param priority The priority with which the key should be updated. - * @return {@code true} if the {@code element} was newly inserted to the queue or reinserted - * with a higher priority, otherwise {@code false}. + * @return {@code true} if the {@code element} was newly inserted to the queue or reinserted with a higher priority, + * otherwise {@code false}. */ @SuppressWarnings("WeakerAccess") public boolean requestUpdate(KEYTYPE key, int priority) { diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmTick.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmTick.java index 930b0963a60..607970512d2 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmTick.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelFarmTick.java @@ -17,9 +17,9 @@ /** - * A multithreaded resource to execute data driven models. Every time a row of the data table ticks, - * the unique identifier is executed using the data from the tick. Successive executions for a - * unique identifier are processed in order. + * A multithreaded resource to execute data driven models. Every time a row of the data table ticks, the unique + * identifier is executed using the data from the tick. Successive executions for a unique identifier are processed in + * order. *

    * This is useful for executing against historical data at a regular interval. * @@ -27,19 +27,19 @@ * @param data type */ public class ModelFarmTick> - extends RDMModelFarm { + extends RDMModelFarm { private static final Logger log = LoggerFactory.getLogger(ModelFarmTick.class); - private static final boolean LOG_PERF = Configuration.getInstance() - .getBooleanWithDefault("ModelFarm.logModelFarmTickPerformance", false); + private static final boolean LOG_PERF = + Configuration.getInstance().getBooleanWithDefault("ModelFarm.logModelFarmTickPerformance", false); private final int maxQueueSize; /** * A Queue for a unique identifier. *

    - * The queue uses a lock to ensure that only one item in the UQueue executes at a given time. - * Some models are stateful, and the order of input data matters. This locking ensures that - * models receive data in an expected order (e.g. time). + * The queue uses a lock to ensure that only one item in the UQueue executes at a given time. Some models are + * stateful, and the order of input data matters. This locking ensures that models receive data in an expected order + * (e.g. time). * * @param data type. */ @@ -58,13 +58,11 @@ private static class UQueue { * * @param nThreads number of worker threads. * @param model model to execute. - * @param dataManager interface for accessing and querying data contained in rows of a dynamic - * table. - * @param maxQueueSize number of elements in the work queue backlog before the blocking new - * updates. + * @param dataManager interface for accessing and querying data contained in rows of a dynamic table. + * @param maxQueueSize number of elements in the work queue backlog before the blocking new updates. */ - public ModelFarmTick(final int nThreads, final Model model, - final ROWDATAMANAGERTYPE dataManager, final int maxQueueSize) { + public ModelFarmTick(final int nThreads, final Model model, final ROWDATAMANAGERTYPE dataManager, + final int maxQueueSize) { super(nThreads, model, dataManager); this.maxQueueSize = maxQueueSize; this.queue = new ArrayDeque<>(this.maxQueueSize); @@ -105,8 +103,8 @@ private void updateQueue(final Set keys) { while (queue.size() >= maxQueueSize || !queue.offer(uqueue)) { if (getState() == State.WAITING) { throw new IllegalStateException( - "Queue is full, but model farm is not started! Possible deadlock. Consider increasing maxQueueSize: maxQueueSize=" - + maxQueueSize); + "Queue is full, but model farm is not started! Possible deadlock. Consider increasing maxQueueSize: maxQueueSize=" + + maxQueueSize); } try { @@ -142,8 +140,8 @@ protected void execute() throws InterruptedException { } } - // notify that the queue has changed, so that other threads can check if queue is empty, - // and so that updateQueue() can see if queue has more capacity + // notify that the queue has changed, so that other threads can check if queue is empty, and so that + // updateQueue() can see if queue has more capacity this.notifyAll(); } @@ -167,12 +165,10 @@ protected void execute() throws InterruptedException { final long t7 = System.nanoTime(); if (LOG_PERF) { - log.warn().append("ModelFarmTick.execute PERFORMANCE: all=").append((t7 - t0) / 1000) - .append(" take=") - .append((t1 - t0) / 1000).append(" lock=" + (t2 - t1) / 1000).append(" poll=") - .append((t3 - t2) / 1000) - .append(" exec=").append((t4 - t3) / 1000).append(" unlock=") - .append((t6 - t5) / 1000).endl(); + log.warn().append("ModelFarmTick.execute PERFORMANCE: all=").append((t7 - t0) / 1000).append(" take=") + .append((t1 - t0) / 1000).append(" lock=" + (t2 - t1) / 1000).append(" poll=") + .append((t3 - t2) / 1000) + .append(" exec=").append((t4 - t3) / 1000).append(" unlock=").append((t6 - t5) / 1000).endl(); } } diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelInputDeserializer.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelInputDeserializer.java index 5dde40f8406..80de67b0150 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelInputDeserializer.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/ModelInputDeserializer.java @@ -42,8 +42,7 @@ public ModelInputDeserializer(final Class c, final String filename) throws IO */ public synchronized T next() throws IOException, ClassNotFoundException { if (isClosed) { - throw new RuntimeException( - "Attempting to access the next value after the stream is closed."); + throw new RuntimeException("Attempting to access the next value after the stream is closed."); } return c.cast(ois.readObject()); diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/RDMModelFarm.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/RDMModelFarm.java index 71f595b0136..c66f7c641bb 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/RDMModelFarm.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/RDMModelFarm.java @@ -19,11 +19,10 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; /** - * Abstract class for ModelFarm implementations that will take data from a {@link RowDataManager}. - * This class tracks the mappings between each key and the corresponding index in the - * {@code RowDataManager}'s {@link RowDataManager#table() table}. Each row of this table should - * contain all of the data necessary to populate an instance of {@code DATATYPE}, which will then be - * passed to the {@link ModelFarmBase#model model}. + * Abstract class for ModelFarm implementations that will take data from a {@link RowDataManager}. This class tracks the + * mappings between each key and the corresponding index in the {@code RowDataManager}'s {@link RowDataManager#table() + * table}. Each row of this table should contain all of the data necessary to populate an instance of {@code DATATYPE}, + * which will then be passed to the {@link ModelFarmBase#model model}. * * @param The type of the keys (e.g. {@link io.deephaven.modelfarm.fitterfarm.FitScope}). * @param The type of the data (e.g. @@ -32,7 +31,7 @@ * {@link io.deephaven.modelfarm.fitterfarm.futures.FuturesFitDataManager}). */ public abstract class RDMModelFarm> - extends ModelFarmBase { + extends ModelFarmBase { private static final Logger log = LoggerFactory.getLogger(RDMModelFarm.class); private static final long NO_ENTRY_VALUE = -1; @@ -43,10 +42,8 @@ public abstract class RDMModelFarm keyIndexPrev = - new TObjectLongHashMap<>(10, 0.5f, NO_ENTRY_VALUE); - private final TObjectLongMap keyIndexDelta = - new TObjectLongHashMap<>(10, 0.5f, NO_ENTRY_VALUE); + private final TObjectLongMap keyIndexPrev = new TObjectLongHashMap<>(10, 0.5f, NO_ENTRY_VALUE); + private final TObjectLongMap keyIndexDelta = new TObjectLongHashMap<>(10, 0.5f, NO_ENTRY_VALUE); // keep the listener so that it doesn't get garbage collected @SuppressWarnings("FieldCanBeLocal") @@ -57,12 +54,10 @@ public abstract class RDMModelFarm model, - final ROWDATAMANAGERTYPE dataManager) { + public RDMModelFarm(final int nThreads, final Model model, final ROWDATAMANAGERTYPE dataManager) { super(nThreads, model); this.dataManager = Require.neqNull(dataManager, "dataManager"); } @@ -118,8 +113,7 @@ private void addKeyIndex(final Index index) { } /** - * Process a change to the data table. If the data table is being accessed, use the protected - * column source fields. + * Process a change to the data table. If the data table is being accessed, use the protected column source fields. * * @param added new indexes added to the data table * @param removed indexes removed from the data table @@ -128,8 +122,7 @@ private void addKeyIndex(final Index index) { protected abstract void onDataUpdate(Index added, Index removed, Index modified); /** - * Populates a data object with data from the most recent row with the provided unique - * identifier. + * Populates a data object with data from the most recent row with the provided unique identifier. * * @param data data structure to populate * @param key key to load data for @@ -137,10 +130,9 @@ private void addKeyIndex(final Index index) { * @return true if the data loaded; false if there was no data to load. */ private boolean loadData(final DATATYPE data, final KEYTYPE key, final boolean usePrev) { - // if this is called in the update loop, keyIndex should be updated and accessed in the same - // thread. - // if this is called outside the update loop, the access should be in the LTM lock, and the - // update should be in the update loop + // if this is called in the update loop, keyIndex should be updated and accessed in the same thread. + // if this is called outside the update loop, the access should be in the LTM lock, and the update should be in + // the update loop // therefore, keyIndex does not need synchronization. long i; @@ -165,8 +157,8 @@ private boolean loadData(final DATATYPE data, final KEYTYPE key, final boolean u } if (i == NO_ENTRY_VALUE) { - log.warn().append("Attempting to get row data for a key with no index. key=") - .append(key.toString()).endl(); + log.warn().append("Attempting to get row data for a key with no index. key=").append(key.toString()) + .endl(); return false; } @@ -175,28 +167,26 @@ private boolean loadData(final DATATYPE data, final KEYTYPE key, final boolean u } /** - * Returns a function that takes a key and returns an instance of {@code DATATYPE} that contains - * the most recent data for that key. The returned function will retrieve the data using the - * specified {@code lockType}. + * Returns a function that takes a key and returns an instance of {@code DATATYPE} that contains the most recent + * data for that key. The returned function will retrieve the data using the specified {@code lockType}. * * @param lockType locking algorithm used to ensure that data read from the table is consistent. * @return function to retrieve the most recent row data for a unique identifier. */ @SuppressWarnings("WeakerAccess") protected ModelFarmBase.MostRecentDataGetter getMostRecentDataFactory( - final ModelFarmBase.GetDataLockType lockType) { - // Get the "doLockedConsumer", which will call the FitDataPopulator (i.e. the lambda below) - // using the configured + final ModelFarmBase.GetDataLockType lockType) { + // Get the "doLockedConsumer", which will call the FitDataPopulator (i.e. the lambda below) using the configured // lock type and the appropriate value for 'usePrev'. final FunctionalInterfaces.ThrowingBiConsumer doLockedConsumer = - getDoLockedConsumer(lockType); + getDoLockedConsumer(lockType); return (key) -> { final DATATYPE data = dataManager.newData(); final boolean[] isOk = new boolean[1]; // (This lambda is a FitDataPopulator.) doLockedConsumer.accept(usePrev -> isOk[0] = loadData(data, key, usePrev), - (NotificationStepSource) dataManager.table()); + (NotificationStepSource) dataManager.table()); if (isOk[0]) { return data; diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/RowDataManager.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/RowDataManager.java index 550929dba2a..a800a96f94a 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/RowDataManager.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/RowDataManager.java @@ -39,8 +39,8 @@ public interface RowDataManager { KEYTYPE uniqueIdCurrent(final long index); /** - * Gets the previous unique identifier value for a row. One column of each table is designated - * as a unique identifier for data rows. + * Gets the previous unique identifier value for a row. One column of each table is designated as a unique + * identifier for data rows. *

    * This function should only be called during an update loop or while holding the LTM lock. * @@ -52,9 +52,9 @@ public interface RowDataManager { /** * Populates a data object with data from a table row. *

    - * This method should be called while the LTM lock is held. This can occur either during the - * update loop or the LTM lock can be acquired outside the update loop. If the LTM lock is not - * held, the loaded data can be inconsistent or corrupt. + * This method should be called while the LTM lock is held. This can occur either during the update loop or the LTM + * lock can be acquired outside the update loop. If the LTM lock is not held, the loaded data can be inconsistent or + * corrupt. * * @param data data structure to populate * @param index table index of the row to load data from diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/util/KeyedPriorityBlockingQueue.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/util/KeyedPriorityBlockingQueue.java index adc87f40937..11a6d2eaf0c 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/util/KeyedPriorityBlockingQueue.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/util/KeyedPriorityBlockingQueue.java @@ -11,8 +11,8 @@ import java.util.PriorityQueue; /** - * A priority blocking queue that maintains one element per key. If a later request comes in of - * higher priority, the lower priority item is replaced by the higher priority item. + * A priority blocking queue that maintains one element per key. If a later request comes in of higher priority, the + * lower priority item is replaced by the higher priority item. * * The element type must be usable in a hash map. */ @@ -52,13 +52,13 @@ public int compareTo(@NotNull Enqueued o) { private final Map map = new HashMap<>(); /** - * Add an element to the queue. If the element is already in the queue, the element's priority - * will be the higher of the existing element and the new element. + * Add an element to the queue. If the element is already in the queue, the element's priority will be the higher of + * the existing element and the new element. * * @param element element to enqueue. * @param priority priority of the element. - * @return {@code true} if the {@code element} was newly inserted to the queue or reinserted - * with a higher priority, otherwise {@code false}. + * @return {@code true} if the {@code element} was newly inserted to the queue or reinserted with a higher priority, + * otherwise {@code false}. */ public synchronized boolean enqueue(final E element, final int priority) { final Enqueued e = map.get(element); @@ -83,8 +83,7 @@ public synchronized boolean enqueue(final E element, final int priority) { } /** - * Retrieves and removes the head of this queue, waiting if necessary until an element becomes - * available. + * Retrieves and removes the head of this queue, waiting if necessary until an element becomes available. * * @return the head of this queue * @throws InterruptedException if interrupted while waiting diff --git a/ModelFarm/src/main/java/io/deephaven/modelfarm/util/ModelFarmUtils.java b/ModelFarm/src/main/java/io/deephaven/modelfarm/util/ModelFarmUtils.java index 6ae5de87f6e..bb5c21bb681 100644 --- a/ModelFarm/src/main/java/io/deephaven/modelfarm/util/ModelFarmUtils.java +++ b/ModelFarm/src/main/java/io/deephaven/modelfarm/util/ModelFarmUtils.java @@ -24,17 +24,16 @@ private ModelFarmUtils() {} * @param colTypes required column types */ public static void requireTable(final String tableName, final Table t, final String[] colNames, - final Class[] colTypes) { + final Class[] colTypes) { Require.eq(colNames.length, "colNames.length", colTypes.length, "colTypes.length"); for (int i = 0; i < colNames.length; i++) { final String cn = colNames[i]; final Class ct = colTypes[i]; - Require.eqTrue(t.hasColumns(cn), - "Table is missing column. tableName=" + tableName + " columnName=" + cn); + Require.eqTrue(t.hasColumns(cn), "Table is missing column. tableName=" + tableName + " columnName=" + cn); final Class cta = t.getColumn(cn).getType(); - Require.eqTrue(cta.equals(ct), "Table column is of the wrong type. tableName=" - + tableName + " columnName=" + cn + " typeRequired=" + ct + " typeActual=" + cta); + Require.eqTrue(cta.equals(ct), "Table column is of the wrong type. tableName=" + tableName + " columnName=" + + cn + " typeRequired=" + ct + " typeActual=" + cta); } } diff --git a/ModelFarm/src/test/java/io/deephaven/modelfarm/ConditonalModelsTest.java b/ModelFarm/src/test/java/io/deephaven/modelfarm/ConditonalModelsTest.java index bf4974b72ac..dfcc364f0e0 100644 --- a/ModelFarm/src/test/java/io/deephaven/modelfarm/ConditonalModelsTest.java +++ b/ModelFarm/src/test/java/io/deephaven/modelfarm/ConditonalModelsTest.java @@ -43,13 +43,11 @@ public void setUp() throws Exception { models = new Model[] {m1, m2}; // noinspection unchecked predicates = new BiPredicate[] { - (BiPredicate) (d, s) -> d.getUnderlyingId() == 3 - && s, + (BiPredicate) (d, s) -> d.getUnderlyingId() == 3 && s, (BiPredicate) (d, s) -> s }; stateMap = new HashMap<>(); - cm = new ConditionalModels<>(models, predicates, stateMap, - EquityFitDataOptionPrices::getUnderlyingId); + cm = new ConditionalModels<>(models, predicates, stateMap, EquityFitDataOptionPrices::getUnderlyingId); } public void testLock() { diff --git a/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelFarm.java b/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelFarm.java index f1863b51147..48af4cf7412 100644 --- a/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelFarm.java +++ b/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelFarm.java @@ -20,41 +20,35 @@ public class TestModelFarm extends TestCase { - private final long testShutdownTimeoutSecs = Configuration.getInstance() - .getIntegerWithDefault("TestModelFarm.testShutdownTimeoutSecs", 1); + private final long testShutdownTimeoutSecs = + Configuration.getInstance().getIntegerWithDefault("TestModelFarm.testShutdownTimeoutSecs", 1); private final int nModelFarmThreadsDefault = 8; /** - * Ensure that the ModelFarm terminates immediately if it is shut down while not busy with an - * empty queue. + * Ensure that the ModelFarm terminates immediately if it is shut down while not busy with an empty queue. */ public void testModelFarmNoWorkShutdown() throws Exception { final ModelFarmTick> modelFarmTick = - getModelFarmTick(nModelFarmThreadsDefault, 0, 0, null); + getModelFarmTick(nModelFarmThreadsDefault, 0, 0, null); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.WAITING); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.WAITING); modelFarmTick.start(); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.RUNNING); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.RUNNING); - Thread.sleep(100); // Give the model farm threads a bit of time to start up and get far - // enough to wait on the queue. + Thread.sleep(100); // Give the model farm threads a bit of time to start up and get far enough to wait on the + // queue. /* Since no work is being done, we should be able to terminate very quickly: */ - boolean terminated = - modelFarmTick.shutdownAndAwaitTermination(testShutdownTimeoutSecs, TimeUnit.SECONDS); - Require.requirement(terminated, "terminated == true", terminated, "terminated", - testShutdownTimeoutSecs, "testShutdownTimeoutSecs"); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.TERMINATED); + boolean terminated = modelFarmTick.shutdownAndAwaitTermination(testShutdownTimeoutSecs, TimeUnit.SECONDS); + Require.requirement(terminated, "terminated == true", terminated, "terminated", testShutdownTimeoutSecs, + "testShutdownTimeoutSecs"); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.TERMINATED); } /** - * Ensure that the ModelFarm drains its work queue and then exits when - * {@link ModelFarmBase#shutdown()} and {@link ModelFarmBase#awaitTermination}, but not - * {@link ModelFarmBase#terminate()}, are called. + * Ensure that the ModelFarm drains its work queue and then exits when {@link ModelFarmBase#shutdown()} and + * {@link ModelFarmBase#awaitTermination}, but not {@link ModelFarmBase#terminate()}, are called. */ public void testModelFarmBusyShutdownNoTerminate() throws Exception { final long workerSleepMs = 1000 * 3; @@ -62,106 +56,89 @@ public void testModelFarmBusyShutdownNoTerminate() throws Exception { final int nKeys = nModelFarmThreads / 2; final AtomicInteger successCounter = new AtomicInteger(0); final ModelFarmTick> modelFarmTick = - getModelFarmTick(nModelFarmThreads, nKeys, workerSleepMs, successCounter); + getModelFarmTick(nModelFarmThreads, nKeys, workerSleepMs, successCounter); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.WAITING); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.WAITING); modelFarmTick.start(); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.RUNNING); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.RUNNING); - Thread.sleep(100); // Give the model farm threads a bit of time to start up and get far - // enough to pick up some work. + Thread.sleep(100); // Give the model farm threads a bit of time to start up and get far enough to pick up some + // work. - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.RUNNING); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.RUNNING); - // Shut down the model farm. Since the worker threads will sleep for (workerSleepMs) - // milliseconds, + // Shut down the model farm. Since the worker threads will sleep for (workerSleepMs) milliseconds, // the model farm will still be busy -- so its state should be SHUTDOWN, not TERMINATED. modelFarmTick.shutdown(); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.SHUTDOWN); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.SHUTDOWN); // Test awaitTermination(), without calling terminate(). - // This should return true, we are waiting for (workerSleepMs * 2) milliseconds but all work - // should be finished within (workerSleepMs) milliseconds. - boolean terminated = - modelFarmTick.awaitTermination(workerSleepMs * 2, TimeUnit.MILLISECONDS); + // This should return true, we are waiting for (workerSleepMs * 2) milliseconds but all work should be finished + // within (workerSleepMs) milliseconds. + boolean terminated = modelFarmTick.awaitTermination(workerSleepMs * 2, TimeUnit.MILLISECONDS); Require.eqTrue(terminated, "terminated"); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.TERMINATED); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.TERMINATED); // Ensure all jobs completed successfully (i.e. were not interrupted) Require.eq(successCounter.get(), "successCounter.get()", nKeys, "nKeys"); } /** - * Ensure that the ModelFarm interrupts its worker threads, stops processing its queue, and - * terminates immediately if it is {@link ModelFarm#terminate() terminated} while busy and with - * work in its queue. + * Ensure that the ModelFarm interrupts its worker threads, stops processing its queue, and terminates immediately + * if it is {@link ModelFarm#terminate() terminated} while busy and with work in its queue. */ public void testModelFarmBusyShutdownAndTerminate() throws Exception { final long workerSleepMs = 1000 * 3; final int nKeys = nModelFarmThreadsDefault * 8; final AtomicInteger successCounter = new AtomicInteger(0); final ModelFarmTick> modelFarmTick = - getModelFarmTick(nModelFarmThreadsDefault, nKeys, workerSleepMs, successCounter); + getModelFarmTick(nModelFarmThreadsDefault, nKeys, workerSleepMs, successCounter); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.WAITING); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.WAITING); modelFarmTick.start(); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.RUNNING); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.RUNNING); - Thread.sleep(100); // Give the model farm threads a bit of time to start up and get far - // enough to pick up some work. + Thread.sleep(100); // Give the model farm threads a bit of time to start up and get far enough to pick up some + // work. - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.RUNNING); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.RUNNING); - // Shut down the model farm. Since the worker threads will sleep for (workerSleepMs) - // milliseconds, and there is work in the queue, + // Shut down the model farm. Since the worker threads will sleep for (workerSleepMs) milliseconds, and there is + // work in the queue, // the model farm will still be busy -- so its state should be SHUTDOWN, not TERMINATED. modelFarmTick.shutdown(); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.SHUTDOWN); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.SHUTDOWN); - // Test awaitTermination() before calling terminate(). This should return false, since - // terminate() was not called, - // and the ModelFarm should not drain its work queue until (workerSleepMs * nKeys / - // nModelFarmThreads) milliseconds have passed. + // Test awaitTermination() before calling terminate(). This should return false, since terminate() was not + // called, + // and the ModelFarm should not drain its work queue until (workerSleepMs * nKeys / nModelFarmThreads) + // milliseconds have passed. { - boolean terminated = - modelFarmTick.awaitTermination(workerSleepMs, TimeUnit.MILLISECONDS); + boolean terminated = modelFarmTick.awaitTermination(workerSleepMs, TimeUnit.MILLISECONDS); Require.eqFalse(terminated, "terminated"); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.SHUTDOWN); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.SHUTDOWN); } - // Test awaitTermination() after calling terminate(). The ModelFarm should shut down almost - // immediately calling terminate(). + // Test awaitTermination() after calling terminate(). The ModelFarm should shut down almost immediately calling + // terminate(). { modelFarmTick.terminate(); final ModelFarmBase.State stateAfterTerminate = modelFarmTick.getState(); Require.requirement( - stateAfterTerminate == ModelFarmBase.State.TERMINATING - || stateAfterTerminate == ModelFarmBase.State.TERMINATED, - "stateAfterTerminate == ModelFarmBase.State.TERMINATING || stateAfterTerminate == ModelFarmBase.State.TERMINATED", - stateAfterTerminate, - "stateAfterTerminate"); - - // Await termination. Since terminate() interrupts all model farm threads, this should - // happen quickly. - boolean terminated = - modelFarmTick.awaitTermination(testShutdownTimeoutSecs, TimeUnit.SECONDS); - Require.requirement(terminated, "terminated == true", terminated, "terminated", - testShutdownTimeoutSecs, "testShutdownTimeoutSecs"); - Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", - ModelFarmBase.State.TERMINATED); + stateAfterTerminate == ModelFarmBase.State.TERMINATING + || stateAfterTerminate == ModelFarmBase.State.TERMINATED, + "stateAfterTerminate == ModelFarmBase.State.TERMINATING || stateAfterTerminate == ModelFarmBase.State.TERMINATED", + stateAfterTerminate, + "stateAfterTerminate"); + + // Await termination. Since terminate() interrupts all model farm threads, this should happen quickly. + boolean terminated = modelFarmTick.awaitTermination(testShutdownTimeoutSecs, TimeUnit.SECONDS); + Require.requirement(terminated, "terminated == true", terminated, "terminated", testShutdownTimeoutSecs, + "testShutdownTimeoutSecs"); + Require.eq(modelFarmTick.getState(), "modelFarmTick.getState()", ModelFarmBase.State.TERMINATED); // Most of the keys should not have finished, since we terminated early. Require.lt(successCounter.get(), "successCounter.get()", nKeys, "nKeys"); @@ -169,55 +146,53 @@ public void testModelFarmBusyShutdownAndTerminate() throws Exception { } @NotNull - private ModelFarmTick> getModelFarmTick( - final int nThreads, final int nKeys, final long sleepMs, AtomicInteger successCounter) { + private ModelFarmTick> getModelFarmTick(final int nThreads, + final int nKeys, final long sleepMs, AtomicInteger successCounter) { return new ModelFarmTick<>( - nThreads, - data -> { - try { - final long sleepEndTime = System.currentTimeMillis() + sleepMs; - long remainingMs; - while ((remainingMs = (sleepEndTime - System.currentTimeMillis())) > 0) { - Thread.sleep(remainingMs); + nThreads, + data -> { + try { + final long sleepEndTime = System.currentTimeMillis() + sleepMs; + long remainingMs; + while ((remainingMs = (sleepEndTime - System.currentTimeMillis())) > 0) { + Thread.sleep(remainingMs); + } + if (successCounter != null) { + // Track number of keys processed without being interrupted. + successCounter.getAndIncrement(); + } + } catch (InterruptedException e) { + throw new RuntimeException("Interrupted!", e); } - if (successCounter != null) { - // Track number of keys processed without being interrupted. - successCounter.getAndIncrement(); + }, + new RowDataManager() { + @Override + public DynamicTable table() { + return (DynamicTable) TableTools.emptyTable(nKeys).update("Value = ii"); } - } catch (InterruptedException e) { - throw new RuntimeException("Interrupted!", e); - } - }, - new RowDataManager() { - @Override - public DynamicTable table() { - return (DynamicTable) TableTools.emptyTable(nKeys).update("Value = ii"); - } - - @Override - public MutableLong newData() { - return new MutableLong(NULL_LONG); - } - - @Override - public Long uniqueIdCurrent(long index) { - return index; - } - - @Override - public Long uniqueIdPrev(long index) { - return index; - } - - @Override - public void loadData(MutableLong data, long index, boolean usePrev) { - final ColumnSource columnSource = - table().getColumnSource("Value", long.class); - data.setValue( - usePrev ? columnSource.getPrevLong(index) : columnSource.getLong(index)); - } - }, - nKeys); + + @Override + public MutableLong newData() { + return new MutableLong(NULL_LONG); + } + + @Override + public Long uniqueIdCurrent(long index) { + return index; + } + + @Override + public Long uniqueIdPrev(long index) { + return index; + } + + @Override + public void loadData(MutableLong data, long index, boolean usePrev) { + final ColumnSource columnSource = table().getColumnSource("Value", long.class); + data.setValue(usePrev ? columnSource.getPrevLong(index) : columnSource.getLong(index)); + } + }, + nKeys); } } diff --git a/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelInputSerializerDeserializer.java b/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelInputSerializerDeserializer.java index c46ea8a8040..a967b3e2e3f 100644 --- a/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelInputSerializerDeserializer.java +++ b/ModelFarm/src/test/java/io/deephaven/modelfarm/TestModelInputSerializerDeserializer.java @@ -12,8 +12,8 @@ public class TestModelInputSerializerDeserializer extends BaseArrayTestCase { public void testSerializeDeserialize() throws IOException, ClassNotFoundException { - final String filename = Configuration.getInstance().getTempPath("test") - + "TestModelInputSerializerDeserializer.ser"; + final String filename = + Configuration.getInstance().getTempPath("test") + "TestModelInputSerializerDeserializer.ser"; final ModelInputSerializer serializer = new ModelInputSerializer<>(filename); @@ -26,8 +26,7 @@ public void testSerializeDeserialize() throws IOException, ClassNotFoundExceptio serializer.exec(3); serializer.close(); - final ModelInputDeserializer deserializer = - new ModelInputDeserializer<>(Integer.class, filename); + final ModelInputDeserializer deserializer = new ModelInputDeserializer<>(Integer.class, filename); assertEquals(d1, deserializer.next().intValue()); assertEquals(d2, deserializer.next().intValue()); assertEquals(d3, deserializer.next().intValue()); diff --git a/ModelFarm/src/test/java/io/deephaven/modelfarm/util/TestModelFarmUtils.java b/ModelFarm/src/test/java/io/deephaven/modelfarm/util/TestModelFarmUtils.java index e3d49252ef1..65517a328a9 100644 --- a/ModelFarm/src/test/java/io/deephaven/modelfarm/util/TestModelFarmUtils.java +++ b/ModelFarm/src/test/java/io/deephaven/modelfarm/util/TestModelFarmUtils.java @@ -18,11 +18,11 @@ public void testRequireTable() { final Table t = TableTools.emptyTable(5).updateView("A=(int)i", "B=(long)i", "C=(double)i"); ModelFarmUtils.requireTable("TABLENAME", t, new String[] {"A", "C", "B"}, - new Class[] {int.class, double.class, long.class}); + new Class[] {int.class, double.class, long.class}); try { ModelFarmUtils.requireTable("TABLENAME", t, new String[] {"A", "X", "B"}, - new Class[] {int.class, double.class, long.class}); + new Class[] {int.class, double.class, long.class}); fail(); } catch (RequirementFailure e) { // pass @@ -30,7 +30,7 @@ public void testRequireTable() { try { ModelFarmUtils.requireTable("TABLENAME", t, new String[] {"A", "C", "B"}, - new Class[] {int.class, double.class, double.class}); + new Class[] {int.class, double.class, double.class}); fail(); } catch (RequirementFailure e) { // pass @@ -84,8 +84,8 @@ public void testArrayLong() { public void testArray2Double() { final double[][] target = {{1.1, 2.2, 3.3}, {5, 6}}; final DbArray dba = new DbArrayDirect( - new DbDoubleArrayDirect(target[0]), - new DbDoubleArrayDirect(target[1])); + new DbDoubleArrayDirect(target[0]), + new DbDoubleArrayDirect(target[1])); final double[][] result = ModelFarmUtils.array2Double(dba); assertEquals(target, result); assertNull(ModelFarmUtils.array2Double(null)); diff --git a/Net/src/main/java/io/deephaven/net/CommBase.java b/Net/src/main/java/io/deephaven/net/CommBase.java index 483441089eb..e7754bd9ba6 100644 --- a/Net/src/main/java/io/deephaven/net/CommBase.java +++ b/Net/src/main/java/io/deephaven/net/CommBase.java @@ -23,28 +23,23 @@ public static FatalErrorHandler getDefaultFatalHandler() { if (defaultFatalErrorHandler == null) { synchronized (CommBase.class) { if (defaultFatalErrorHandler == null) { - final String defaultFatalErrorHandlerClassName = Configuration.getInstance() - .getProperty("Comm.fatalErrorHandlerFactoryClass"); + final String defaultFatalErrorHandlerClassName = + Configuration.getInstance().getProperty("Comm.fatalErrorHandlerFactoryClass"); final Class defaultFatalErrorHandlerClass; try { - defaultFatalErrorHandlerClass = - Class.forName(defaultFatalErrorHandlerClassName); + defaultFatalErrorHandlerClass = Class.forName(defaultFatalErrorHandlerClassName); } catch (ClassNotFoundException e) { throw new IllegalArgumentException( - "Could not find envelopeHandlerFactoryClass " - + defaultFatalErrorHandlerClassName, - e); + "Could not find envelopeHandlerFactoryClass " + defaultFatalErrorHandlerClassName, e); } final FatalErrorHandlerFactory defaultFatalErrorHandlerFactory; try { defaultFatalErrorHandlerFactory = - (FatalErrorHandlerFactory) defaultFatalErrorHandlerClass.newInstance(); - } catch (InstantiationException | IllegalAccessException - | ClassCastException e) { + (FatalErrorHandlerFactory) defaultFatalErrorHandlerClass.newInstance(); + } catch (InstantiationException | IllegalAccessException | ClassCastException e) { throw new IllegalArgumentException( - "Could not instantiate envelopeHandlerFactoryClass " - + defaultFatalErrorHandlerClass, - e); + "Could not instantiate envelopeHandlerFactoryClass " + defaultFatalErrorHandlerClass, + e); } defaultFatalErrorHandler = defaultFatalErrorHandlerFactory.get(); } @@ -58,8 +53,8 @@ public static void signalFatalError(final String message, Throwable x) { FatalErrorHandler feh = getDefaultFatalHandler(); feh.signalFatalError(message, x); } catch (Throwable fehx) { - // dump this to stderr, it's not great, but we had an error raising an error and really - // do want both of these in the log + // dump this to stderr, it's not great, but we had an error raising an error and really do want both of + // these in the log fehx.printStackTrace(System.err); x.printStackTrace(System.err); throw new RuntimeException("Could not raise fatal error: " + message, x); diff --git a/Net/src/main/java/io/deephaven/net/impl/nio/FastNIODriver.java b/Net/src/main/java/io/deephaven/net/impl/nio/FastNIODriver.java index 5f2c33d4fcc..7f7834e4841 100644 --- a/Net/src/main/java/io/deephaven/net/impl/nio/FastNIODriver.java +++ b/Net/src/main/java/io/deephaven/net/impl/nio/FastNIODriver.java @@ -39,8 +39,7 @@ public static Scheduler[] createSchedulers(String name, String property, Logger return createSchedulers(name, property, log, Configuration.getInstance()); } - public static Scheduler[] createSchedulers(String name, String property, Logger log, - Configuration config) { + public static Scheduler[] createSchedulers(String name, String property, Logger log, Configuration config) { final String[] values = config.getProperty(property).split(","); if (values.length != 6) return null; @@ -53,34 +52,30 @@ public static Scheduler[] createSchedulers(String name, String property, Logger final boolean doSpinSelect = Boolean.parseBoolean(values[5]); final Scheduler[] schedulers = new Scheduler[numSchedulers]; for (int i = 0; i < numSchedulers; ++i) { - schedulers[i] = - createDrivers(name + "-" + i, log, threadsPerScheduler, threadsPerScheduler, - timeoutsOrSpins, spinsUntilPark, false, doTimingStats, doSpinSelect) - .getScheduler(); + schedulers[i] = createDrivers(name + "-" + i, log, threadsPerScheduler, threadsPerScheduler, + timeoutsOrSpins, spinsUntilPark, false, doTimingStats, doSpinSelect).getScheduler(); } return schedulers; } - public static FastNIODriver createDrivers(String name, Logger log, int initialThreads, - int maxThreads, long workTimeout, int spinsUntilPark, boolean crashOnMax) { - return createDrivers(name, log, initialThreads, maxThreads, workTimeout, spinsUntilPark, - crashOnMax, true, false); + public static FastNIODriver createDrivers(String name, Logger log, int initialThreads, int maxThreads, + long workTimeout, int spinsUntilPark, boolean crashOnMax) { + return createDrivers(name, log, initialThreads, maxThreads, workTimeout, spinsUntilPark, crashOnMax, true, + false); } - public static FastNIODriver createDrivers(String name, Logger log, int initialThreads, - int maxThreads, long workTimeout, int spinsUntilPark, boolean crashOnMax, - boolean doTimingStats, boolean doSpinSelect) { + public static FastNIODriver createDrivers(String name, Logger log, int initialThreads, int maxThreads, + long workTimeout, int spinsUntilPark, boolean crashOnMax, boolean doTimingStats, boolean doSpinSelect) { FastNIODriver.log = log; - log.info().append(name).append(": Starting FastNIODriver Scheduler: threads: ") - .append(initialThreads) - .append(", maxThreads: ").append(maxThreads) - .append(", workTimeout/spinsOnSelect: ").append(workTimeout) - .append(", spinsUntilPark: ").append(spinsUntilPark) - .append(", doSpinSelect: ").append(doSpinSelect) - .endl(); + log.info().append(name).append(": Starting FastNIODriver Scheduler: threads: ").append(initialThreads) + .append(", maxThreads: ").append(maxThreads) + .append(", workTimeout/spinsOnSelect: ").append(workTimeout) + .append(", spinsUntilPark: ").append(spinsUntilPark) + .append(", doSpinSelect: ").append(doSpinSelect) + .endl(); try { - final Scheduler scheduler = new YASchedulerImpl(name, - NioUtil.reduceSelectorGarbage(Selector.open()), log, doTimingStats, doSpinSelect); + final Scheduler scheduler = new YASchedulerImpl(name, NioUtil.reduceSelectorGarbage(Selector.open()), log, + doTimingStats, doSpinSelect); final UnfairMutex mutex = new UnfairMutex(spinsUntilPark, maxThreads); final AtomicBoolean shutdown = new AtomicBoolean(false); @@ -90,8 +85,8 @@ public static FastNIODriver createDrivers(String name, Logger log, int initialTh final InternalThread[] threads = new InternalThread[initialThreads]; // separate the creation and start so the created / available values are setup for (int i = 0; i < initialThreads; ++i) { - threads[i] = createNewThread(name, scheduler, mutex, shutdown, workTimeout, created, - destroyed, available, maxThreads, crashOnMax); + threads[i] = createNewThread(name, scheduler, mutex, shutdown, workTimeout, created, destroyed, + available, maxThreads, crashOnMax); } for (int i = 0; i < initialThreads; ++i) { threads[i].start(); @@ -113,17 +108,16 @@ private InternalThread(final FastNIODriver driver) { } } - private static InternalThread createNewThread(final String name, final Scheduler scheduler, - final UnfairMutex mutex, final AtomicBoolean shutdown, final long workTimeout, - final AtomicInteger created, final AtomicInteger destroyed, final AtomicInteger available, - final int maxThreads, final boolean crashOnMax) { - InternalThread t = new InternalThread(new FastNIODriver(name, scheduler, mutex, shutdown, - workTimeout, created, destroyed, available, maxThreads, crashOnMax)); + private static InternalThread createNewThread(final String name, final Scheduler scheduler, final UnfairMutex mutex, + final AtomicBoolean shutdown, final long workTimeout, final AtomicInteger created, + final AtomicInteger destroyed, final AtomicInteger available, final int maxThreads, + final boolean crashOnMax) { + InternalThread t = new InternalThread(new FastNIODriver(name, scheduler, mutex, shutdown, workTimeout, created, + destroyed, available, maxThreads, crashOnMax)); t.setDaemon(true); t.setName(name + "-FastNIODriver-" + created.getAndIncrement()); int a = available.incrementAndGet(); - log.info().append("Creating thread ").append(t.getName()).append(". available: ").append(a) - .endl(); + log.info().append("Creating thread ").append(t.getName()).append(". available: ").append(a).endl(); return t; } @@ -141,9 +135,9 @@ private static InternalThread createNewThread(final String name, final Scheduler private final boolean crashOnMax; private FastNIODriver(final String name, final Scheduler scheduler, final UnfairMutex mutex, - final AtomicBoolean shutdown, final long workTimeout, final AtomicInteger created, - final AtomicInteger destroyed, final AtomicInteger available, final int maxThreads, - final boolean crashOnMax) { + final AtomicBoolean shutdown, final long workTimeout, final AtomicInteger created, + final AtomicInteger destroyed, final AtomicInteger available, final int maxThreads, + final boolean crashOnMax) { this.scheduler = scheduler; this.mutex = mutex; this.shutdown = shutdown; @@ -160,8 +154,8 @@ public void call() { if (!alreadyHandedOff) { if (shouldCreate()) { // nobody to handoff too! let's create a new driver - createNewThread(name, scheduler, mutex, shutdown, workTimeout, created, - destroyed, available, maxThreads, crashOnMax).start(); + createNewThread(name, scheduler, mutex, shutdown, workTimeout, created, destroyed, available, + maxThreads, crashOnMax).start(); } mutex.unlock(); alreadyHandedOff = true; @@ -176,12 +170,10 @@ private boolean shouldCreate() { // don't need to worry about races w/ index b/c we have lock if (created.get() == maxThreads) { if (crashOnMax) { - log.fatal().append("FastNIODriver: exceeded maximum thread pool limit: ") - .append(summary()).endl(); + log.fatal().append("FastNIODriver: exceeded maximum thread pool limit: ").append(summary()).endl(); LogCrashDump.logCrashDump(log); - CommBase.signalFatalError( - "FastNIODriver: exceeded maximum thread pool limit: " + summary(), - new Throwable()); + CommBase.signalFatalError("FastNIODriver: exceeded maximum thread pool limit: " + summary(), + new Throwable()); } return false; } @@ -191,8 +183,8 @@ private boolean shouldCreate() { } public String summary() { - return "(available: " + available.get() + ", created: " + created.get() + ", destroyed: " - + destroyed.get() + ")"; + return "(available: " + available.get() + ", created: " + created.get() + ", destroyed: " + destroyed.get() + + ")"; } @Override @@ -224,9 +216,8 @@ public void run() { scheduler.installJob(new TimedJob() { public void timedOut() {} }, 0); // wake us up yo - mutexUnlockHandoff.call(); // we aren't sure whether the scheduler.work has already - // called the handoff or not yet, so go ahead and call it - // (it won't double release it) + mutexUnlockHandoff.call(); // we aren't sure whether the scheduler.work has already called the handoff + // or not yet, so go ahead and call it (it won't double release it) long deadline = System.currentTimeMillis() + 5000; // b/c we haven't destroyed ourself yet... // meh spinning :/ @@ -246,16 +237,14 @@ public void timedOut() {} } if (throwable == null) { - log.error().append("Thread ").append(me.getName()).append(" is terminating: ") - .append(summary()).endl(); + log.error().append("Thread ").append(me.getName()).append(" is terminating: ").append(summary()).endl(); } else { - log.fatal(throwable).append("Thread ").append(me.getName()) - .append(" is terminating on a fatal exception: ").append(summary()).endl(); + log.fatal(throwable).append("Thread ").append(me.getName()).append(" is terminating on a fatal exception: ") + .append(summary()).endl(); } if (throwable != null) - CommBase.signalFatalError("Unhandled throwable from FastNIODriver scheduler", - throwable); + CommBase.signalFatalError("Unhandled throwable from FastNIODriver scheduler", throwable); } public boolean isShutdown() { diff --git a/Net/src/main/java/io/deephaven/net/impl/nio/NIODriver.java b/Net/src/main/java/io/deephaven/net/impl/nio/NIODriver.java index 7bbeef880a3..f011cc51ed7 100644 --- a/Net/src/main/java/io/deephaven/net/impl/nio/NIODriver.java +++ b/Net/src/main/java/io/deephaven/net/impl/nio/NIODriver.java @@ -39,8 +39,7 @@ public class NIODriver implements Runnable { public static int NUM_INITIAL_THREADS; public static int HARD_MAX_THREADS; - private static final boolean useFastNIODriver = - Configuration.getInstance().getBoolean("NIO.driver.useFast"); + private static final boolean useFastNIODriver = Configuration.getInstance().getBoolean("NIO.driver.useFast"); /** * Let another thread take over the leadership. @@ -50,13 +49,11 @@ private static void handoff() { synchronized (lock) { if (leader != me) { LogCrashDump.logCrashDump(log); - CommBase.signalFatalError("NIODriver: WTF? in handoff(), but not the leader?", - new Throwable()); + CommBase.signalFatalError("NIODriver: WTF? in handoff(), but not the leader?", new Throwable()); } if (log.isDebugEnabled()) { - log.debug().append("Thread ").append(me.getName()) - .append(" is giving up leadership").endl(); + log.debug().append("Thread ").append(me.getName()).append(" is giving up leadership").endl(); } leader = null; @@ -65,8 +62,8 @@ private static void handoff() { lock.notify(); } else { // no joy, have to add another thread - log.warn().append("Thread ").append(me.getName()) - .append(" is handing off with no threads available: ").append(summary()).endl(); + log.warn().append("Thread ").append(me.getName()).append(" is handing off with no threads available: ") + .append(summary()).endl(); addThread(); } } @@ -88,8 +85,7 @@ public static String summary() { if (useFastNIODriver) { return driver.summary(); } else { - return "(available: " + available + ", created: " + created + ", destroyed: " - + destroyed + ")"; + return "(available: " + available + ", created: " + created + ", destroyed: " + destroyed + ")"; } } @@ -114,18 +110,15 @@ public static void init(Logger log) { if (!initialized) { NIODriver.log = log; WORK_TIMEOUT = Configuration.getInstance().getInteger("NIO.driver.workTimeout"); - NUM_INITIAL_THREADS = - Configuration.getInstance().getInteger("NIO.driver.initialThreadCount"); - HARD_MAX_THREADS = - Configuration.getInstance().getInteger("NIO.driver.maxThreadCount"); + NUM_INITIAL_THREADS = Configuration.getInstance().getInteger("NIO.driver.initialThreadCount"); + HARD_MAX_THREADS = Configuration.getInstance().getInteger("NIO.driver.maxThreadCount"); if (useFastNIODriver) { - driver = FastNIODriver.createDrivers("Static", log, NUM_INITIAL_THREADS, - HARD_MAX_THREADS, WORK_TIMEOUT, 1000, true); + driver = FastNIODriver.createDrivers("Static", log, NUM_INITIAL_THREADS, HARD_MAX_THREADS, + WORK_TIMEOUT, 1000, true); sched = driver.getScheduler(); } else { try { - sched = new YASchedulerImpl(NioUtil.reduceSelectorGarbage(Selector.open()), - log); + sched = new YASchedulerImpl(NioUtil.reduceSelectorGarbage(Selector.open()), log); } catch (IOException x) { sched = null; CommBase.signalFatalError("NIODriver.init: can't create scheduler", x); @@ -141,10 +134,9 @@ public static void init(Logger log) { } /** - * Shut down, and wait for all threads to terminate. This method is really just for testing; - * it's a bad idea to do this in production because waiting for threads to terminate is prone to - * deadlocks. If desired, though, it can be called from an AbstractService shutdown hook - * installed in init(). + * Shut down, and wait for all threads to terminate. This method is really just for testing; it's a bad idea to do + * this in production because waiting for threads to terminate is prone to deadlocks. If desired, though, it can be + * called from an AbstractService shutdown hook installed in init(). */ public static boolean shutdown(long maxWait) { synchronized (lock) { @@ -169,8 +161,8 @@ public void timedOut() {} }, 0); while (created != destroyed) { try { - log.info().append("NIODriver.shutdown: waiting for threads to terminate: ") - .append(summary()).endl(); + log.info().append("NIODriver.shutdown: waiting for threads to terminate: ").append(summary()) + .endl(); lock.wait(Math.max(remain, 0)); } catch (InterruptedException x) { // ignore @@ -210,32 +202,27 @@ public static Logger getLogger() { * * NOTE: caller must hold the lock! * - * NOTE: We increment the "waiting" variable *before* we start the new thread, and then make - * sure to correct it in the first iteration of the thread loop. This prevents a race in which - * we handoff() method creates too many threads, because it keeps getting called before the - * first thread it creates can get started. + * NOTE: We increment the "waiting" variable *before* we start the new thread, and then make sure to correct it in + * the first iteration of the thread loop. This prevents a race in which we handoff() method creates too many + * threads, because it keeps getting called before the first thread it creates can get started. */ private static void addThread() { if (created == HARD_MAX_THREADS) { - log.fatal().append("NIODriver: exceeded maximum thread pool limit: ").append(summary()) - .endl(); + log.fatal().append("NIODriver: exceeded maximum thread pool limit: ").append(summary()).endl(); LogCrashDump.logCrashDump(log); - CommBase.signalFatalError("NIODriver: exceeded maximum thread pool limit: " + summary(), - new Throwable()); + CommBase.signalFatalError("NIODriver: exceeded maximum thread pool limit: " + summary(), new Throwable()); } Thread thread = new Thread(new NIODriver()); thread.setDaemon(true); thread.setName("NIODriver-" + created); created++; available.incrementAndGet(); - log.info().append("Thread ").append(thread.getName()).append(" is starting: ") - .append(summary()).endl(); + log.info().append("Thread ").append(thread.getName()).append(" is starting: ").append(summary()).endl(); thread.start(); } /** - * the threads' run method just does an endless loop, trying to become the leader whenever it - * can + * the threads' run method just does an endless loop, trying to become the leader whenever it can */ public void run() { Thread me = Thread.currentThread(); @@ -245,26 +232,26 @@ public void run() { while (leader != me) { if (stopped) { destroyed++; - log.info().append("Thread ").append(me.getName()) - .append(" is terminating: ").append(summary()).endl(); + log.info().append("Thread ").append(me.getName()).append(" is terminating: ") + .append(summary()).endl(); lock.notifyAll(); break STOP; } else if (leader == null) { if (log.isDebugEnabled()) { - log.debug().append("Thread ").append(me.getName()) - .append(" is assuming leadership").endl(); + log.debug().append("Thread ").append(me.getName()).append(" is assuming leadership") + .endl(); } leader = me; } else { try { if (log.isDebugEnabled()) { - log.debug().append("Thread ").append(me.getName()) - .append(" is waiting ").append(summary()).endl(); + log.debug().append("Thread ").append(me.getName()).append(" is waiting ") + .append(summary()).endl(); } lock.wait(); if (log.isDebugEnabled()) { - log.debug().append("Thread ").append(me.getName()) - .append(" has awoken ").append(summary()).endl(); + log.debug().append("Thread ").append(me.getName()).append(" has awoken ") + .append(summary()).endl(); } } catch (InterruptedException x) { // ignore @@ -280,8 +267,7 @@ public void run() { synchronized (lock) { destroyed++; log.fatal(x).append("Thread ").append(me.getName()) - .append(" is terminating on a fatal exception: ").append(summary()) - .endl(); + .append(" is terminating on a fatal exception: ").append(summary()).endl(); lock.notifyAll(); } diff --git a/Numerics/src/main/java/io/deephaven/numerics/derivatives/BlackScholes.java b/Numerics/src/main/java/io/deephaven/numerics/derivatives/BlackScholes.java index ee8d35ad351..7795bbf01bc 100644 --- a/Numerics/src/main/java/io/deephaven/numerics/derivatives/BlackScholes.java +++ b/Numerics/src/main/java/io/deephaven/numerics/derivatives/BlackScholes.java @@ -25,8 +25,8 @@ private BlackScholes() {} * @param v volatility * @return theoretical option price */ - public static double price(final boolean isCall, final double S, final double X, final double T, - final double r, final double b, final double v) { + public static double price(final boolean isCall, final double S, final double X, final double T, final double r, + final double b, final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); final double d2 = d1 - v * Math.sqrt(T); @@ -38,8 +38,8 @@ public static double price(final boolean isCall, final double S, final double X, } /** - * Computes the generalized Black-Scholes delta (first order partial derivative of option price - * with respect to stock price). + * Computes the generalized Black-Scholes delta (first order partial derivative of option price with respect to + * stock price). * * @param isCall true for call; false for put. * @param S underlying stock price @@ -50,8 +50,8 @@ public static double price(final boolean isCall, final double S, final double X, * @param v volatility * @return theoretical option delta */ - public static double delta(final boolean isCall, final double S, final double X, final double T, - final double r, final double b, final double v) { + public static double delta(final boolean isCall, final double S, final double X, final double T, final double r, + final double b, final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); if (isCall) { @@ -62,8 +62,8 @@ public static double delta(final boolean isCall, final double S, final double X, } /** - * Computes the generalized Black-Scholes gamma (second order partial derivative of option price - * with respect to stock price). + * Computes the generalized Black-Scholes gamma (second order partial derivative of option price with respect to + * stock price). * * @param S underlying stock price * @param X strike price @@ -73,15 +73,15 @@ public static double delta(final boolean isCall, final double S, final double X, * @param v volatility * @return theoretical option price */ - public static double gamma(final double S, final double X, final double T, final double r, - final double b, final double v) { + public static double gamma(final double S, final double X, final double T, final double r, final double b, + final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); return nd.density(d1) * Math.exp((b - r) * T) / (S * v * Math.sqrt(T)); } /** - * Computes the generalized Black-Scholes percentage gamma (first order partial derivative of - * delta with respect to ln(stock price)). + * Computes the generalized Black-Scholes percentage gamma (first order partial derivative of delta with respect to + * ln(stock price)). * * @param S underlying stock price * @param X strike price @@ -91,14 +91,14 @@ public static double gamma(final double S, final double X, final double T, final * @param v volatility * @return theoretical option price */ - public static double gammaP(final double S, final double X, final double T, final double r, - final double b, final double v) { + public static double gammaP(final double S, final double X, final double T, final double r, final double b, + final double v) { return gamma(S, X, T, r, b, v) * S; } /** - * Computes the generalized Black-Scholes vega (first order partial derivative of option price - * with respect to volatility). + * Computes the generalized Black-Scholes vega (first order partial derivative of option price with respect to + * volatility). * * @param S underlying stock price * @param X strike price @@ -108,15 +108,15 @@ public static double gammaP(final double S, final double X, final double T, fina * @param v volatility * @return theoretical option price */ - public static double vega(final double S, final double X, final double T, final double r, - final double b, final double v) { + public static double vega(final double S, final double X, final double T, final double r, final double b, + final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); return S * Math.exp((b - r) * T) * nd.density(d1) * Math.sqrt(T); } /** - * Computes the generalized Black-Scholes percentage vega (first order partial derivative of - * vega with respect to ln(volatility)). + * Computes the generalized Black-Scholes percentage vega (first order partial derivative of vega with respect to + * ln(volatility)). * * @param S underlying stock price * @param X strike price @@ -126,14 +126,14 @@ public static double vega(final double S, final double X, final double T, final * @param v volatility * @return theoretical option price */ - public static double vegaP(final double S, final double X, final double T, final double r, - final double b, final double v) { + public static double vegaP(final double S, final double X, final double T, final double r, final double b, + final double v) { return v * vega(S, X, T, r, b, v); } /** - * Computes the generalized Black-Scholes vomma (second order partial derivative of option price - * with respect to volatility). + * Computes the generalized Black-Scholes vomma (second order partial derivative of option price with respect to + * volatility). * * @param S underlying stock price * @param X strike price @@ -143,16 +143,16 @@ public static double vegaP(final double S, final double X, final double T, final * @param v volatility * @return theoretical option price */ - public static double vomma(final double S, final double X, final double T, final double r, - final double b, final double v) { + public static double vomma(final double S, final double X, final double T, final double r, final double b, + final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); final double d2 = d1 - (v * Math.sqrt(T)); return vega(S, X, T, r, b, v) * d1 * d2 / v; } /** - * Computes the generalized Black-Scholes percentage vomma (first order partial derivative of - * Vega with respect to ln(volatility)). + * Computes the generalized Black-Scholes percentage vomma (first order partial derivative of Vega with respect to + * ln(volatility)). * * @param S underlying stock price * @param X strike price @@ -162,16 +162,16 @@ public static double vomma(final double S, final double X, final double T, final * @param v volatility * @return theoretical option price */ - public static double vommaP(final double S, final double X, final double T, final double r, - final double b, final double v) { + public static double vommaP(final double S, final double X, final double T, final double r, final double b, + final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); final double d2 = d1 - (v * Math.sqrt(T)); return vegaP(S, X, T, r, b, v) * d1 * d2 / v; } /** - * Computes the generalized Black-Scholes vegaBleed (first order partial derivative of Vega with - * respect to time to expiry). + * Computes the generalized Black-Scholes vegaBleed (first order partial derivative of Vega with respect to time to + * expiry). * * @param S underlying stock price * @param X strike price @@ -181,17 +181,16 @@ public static double vommaP(final double S, final double X, final double T, fina * @param v volatility * @return theoretical option price */ - public static double vegaBleed(final double S, final double X, final double T, final double r, - final double b, final double v) { + public static double vegaBleed(final double S, final double X, final double T, final double r, final double b, + final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); final double d2 = d1 - (v * Math.sqrt(T)); - return vega(S, X, T, r, b, v) - * (r - b + (b * d1 / (v * Math.sqrt(T))) - (1 + d1 * d2) / (2 * T)); + return vega(S, X, T, r, b, v) * (r - b + (b * d1 / (v * Math.sqrt(T))) - (1 + d1 * d2) / (2 * T)); } /** - * Computes the generalized Black-Scholes charm (first order partial derivative of Delta with - * respect to time to expiry). + * Computes the generalized Black-Scholes charm (first order partial derivative of Delta with respect to time to + * expiry). * * @param isCall true for call; false for put. * @param S underlying stock price @@ -202,22 +201,22 @@ public static double vegaBleed(final double S, final double X, final double T, f * @param v volatility * @return theoretical option price */ - public static double charm(final boolean isCall, final double S, final double X, final double T, - final double r, final double b, final double v) { + public static double charm(final boolean isCall, final double S, final double X, final double T, final double r, + final double b, final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); final double d2 = d1 - (v * Math.sqrt(T)); if (isCall) { return -Math.exp((b - r) * T) - * ((nd.density(d1) * (b / (v * Math.sqrt(T)) - d2 / (2 * T))) + (b - r) * CND(d1)); + * ((nd.density(d1) * (b / (v * Math.sqrt(T)) - d2 / (2 * T))) + (b - r) * CND(d1)); } else { return -Math.exp((b - r) * T) - * ((nd.density(d1) * (b / (v * Math.sqrt(T)) - d2 / (2 * T))) - (b - r) * CND(-d1)); + * ((nd.density(d1) * (b / (v * Math.sqrt(T)) - d2 / (2 * T))) - (b - r) * CND(-d1)); } } /** - * Computes the generalized Black-Scholes theta (first order partial derivative of option price - * with respect to time to expiry). + * Computes the generalized Black-Scholes theta (first order partial derivative of option price with respect to time + * to expiry). * * @param isCall true for call; false for put. * @param S underlying stock price @@ -228,24 +227,22 @@ public static double charm(final boolean isCall, final double S, final double X, * @param v volatility * @return theoretical option price */ - public static double theta(final boolean isCall, final double S, final double X, final double T, - final double r, final double b, final double v) { + public static double theta(final boolean isCall, final double S, final double X, final double T, final double r, + final double b, final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); final double d2 = d1 - (v * Math.sqrt(T)); if (isCall) { return ((-S * Math.exp((b - r) * T) * nd.density(d1) * v) / (2 * Math.sqrt(T))) - - ((b - r) * S * Math.exp((b - r) * T) * CND(d1)) - - (r * X * Math.exp(-r * T) * CND(d2)); + - ((b - r) * S * Math.exp((b - r) * T) * CND(d1)) - (r * X * Math.exp(-r * T) * CND(d2)); } else { return ((-S * Math.exp((b - r) * T) * nd.density(d1) * v) / (2 * Math.sqrt(T))) - + ((b - r) * S * Math.exp((b - r) * T) * CND(-d1)) - + (r * X * Math.exp(-r * T) * CND(-d2)); + + ((b - r) * S * Math.exp((b - r) * T) * CND(-d1)) + (r * X * Math.exp(-r * T) * CND(-d2)); } } /** - * Computes the generalized Black-Scholes driftlessTheta (theta assuing the risk free rate and - * the cost of carry are zero). + * Computes the generalized Black-Scholes driftlessTheta (theta assuing the risk free rate and the cost of carry are + * zero). * * @param S underlying stock price * @param X strike price @@ -255,15 +252,15 @@ public static double theta(final boolean isCall, final double S, final double X, * @param v volatility * @return theoretical option price */ - public static double driftlessTheta(final double S, final double X, final double T, - final double r, final double b, final double v) { + public static double driftlessTheta(final double S, final double X, final double T, final double r, final double b, + final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); return -S * nd.density(d1) * v / (2 * Math.sqrt(T)); } /** - * Computes the generalized Black-Scholes rho (first order partial derivative of option price - * with respect to risk-free rate). + * Computes the generalized Black-Scholes rho (first order partial derivative of option price with respect to + * risk-free rate). * * @param isCall true for call; false for put. * @param S underlying stock price @@ -274,8 +271,8 @@ public static double driftlessTheta(final double S, final double X, final double * @param v volatility * @return theoretical option price */ - public static double rho(final boolean isCall, final double S, final double X, final double T, - final double r, final double b, final double v) { + public static double rho(final boolean isCall, final double S, final double X, final double T, final double r, + final double b, final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); final double d2 = d1 - (v * Math.sqrt(T)); if (isCall) { @@ -286,8 +283,8 @@ public static double rho(final boolean isCall, final double S, final double X, f } /** - * Computes the generalized Black-Scholes carryRho (first order partial derivative of option - * price with respect to cost-of-carry). + * Computes the generalized Black-Scholes carryRho (first order partial derivative of option price with respect to + * cost-of-carry). * * @param isCall true for call; false for put. * @param S underlying stock price @@ -298,8 +295,8 @@ public static double rho(final boolean isCall, final double S, final double X, f * @param v volatility * @return theoretical option price */ - public static double carryRho(final boolean isCall, final double S, final double X, - final double T, final double r, final double b, final double v) { + public static double carryRho(final boolean isCall, final double S, final double X, final double T, final double r, + final double b, final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); if (isCall) { return T * S * Math.exp((b - r) * T) * CND(d1); @@ -309,8 +306,8 @@ public static double carryRho(final boolean isCall, final double S, final double } /** - * Computes the generalized Black-Scholes strikeDelta (first order partial derivative of option - * price with respect to strike price). + * Computes the generalized Black-Scholes strikeDelta (first order partial derivative of option price with respect + * to strike price). * * @param isCall true for call; false for put. * @param S underlying stock price @@ -321,8 +318,8 @@ public static double carryRho(final boolean isCall, final double S, final double * @param v volatility * @return theoretical option price */ - public static double strikeDelta(final boolean isCall, final double S, final double X, - final double T, final double r, final double b, final double v) { + public static double strikeDelta(final boolean isCall, final double S, final double X, final double T, + final double r, final double b, final double v) { final double d1 = (Math.log(S / X) + (b + v * v / 2) * T) / (v * Math.sqrt(T)); final double d2 = d1 - (v * Math.sqrt(T)); if (isCall) { @@ -344,8 +341,8 @@ public static double strikeDelta(final boolean isCall, final double S, final dou * @param b cost-of-carry * @return implied volatility */ - public static double impliedVolBisect(final double P, final Boolean isCall, final double S, - final double X, final double T, final double r, final double b) { + public static double impliedVolBisect(final double P, final Boolean isCall, final double S, final double X, + final double T, final double r, final double b) { return impliedVolBisect(P, isCall, S, X, T, r, b, EPS, MAX_ITERS); } @@ -363,11 +360,10 @@ public static double impliedVolBisect(final double P, final Boolean isCall, fina * @param maxIters maximum number of optimization iterations * @return implied volatility */ - public static double impliedVolBisect(final double P, final Boolean isCall, final double S, - final double X, final double T, final double r, final double b, final double eps, - final int maxIters) { - if (P == NULL_DOUBLE || isCall == null || S == NULL_DOUBLE || X == NULL_DOUBLE - || T == NULL_DOUBLE || r == NULL_DOUBLE || b == NULL_DOUBLE) { + public static double impliedVolBisect(final double P, final Boolean isCall, final double S, final double X, + final double T, final double r, final double b, final double eps, final int maxIters) { + if (P == NULL_DOUBLE || isCall == null || S == NULL_DOUBLE || X == NULL_DOUBLE || T == NULL_DOUBLE + || r == NULL_DOUBLE || b == NULL_DOUBLE) { return NULL_DOUBLE; } @@ -413,8 +409,8 @@ public static double impliedVolBisect(final double P, final Boolean isCall, fina * @param b cost-of-carry * @return implied volatility */ - public static double impliedVolNewton(final double P, final Boolean isCall, final double S, - final double X, final double T, final double r, final double b) { + public static double impliedVolNewton(final double P, final Boolean isCall, final double S, final double X, + final double T, final double r, final double b) { return impliedVolNewton(P, isCall, S, X, T, r, b, EPS, MAX_ITERS); } @@ -432,11 +428,10 @@ public static double impliedVolNewton(final double P, final Boolean isCall, fina * @param maxIters maximum number of optimization iterations * @return implied volatility */ - public static double impliedVolNewton(final double P, final Boolean isCall, final double S, - final double X, final double T, final double r, final double b, final double eps, - final int maxIters) { - if (P == NULL_DOUBLE || isCall == null || S == NULL_DOUBLE || X == NULL_DOUBLE - || T == NULL_DOUBLE || r == NULL_DOUBLE || b == NULL_DOUBLE) { + public static double impliedVolNewton(final double P, final Boolean isCall, final double S, final double X, + final double T, final double r, final double b, final double eps, final int maxIters) { + if (P == NULL_DOUBLE || isCall == null || S == NULL_DOUBLE || X == NULL_DOUBLE || T == NULL_DOUBLE + || r == NULL_DOUBLE || b == NULL_DOUBLE) { return NULL_DOUBLE; } @@ -479,8 +474,8 @@ public static double impliedVolNewton(final double P, final Boolean isCall, fina * @param b cost-of-carry * @return implied volatility */ - public static double impliedVolNewtonP(final double P, final Boolean isCall, final double S, - final double X, final double T, final double r, final double b) { + public static double impliedVolNewtonP(final double P, final Boolean isCall, final double S, final double X, + final double T, final double r, final double b) { return impliedVolNewtonP(P, isCall, S, X, T, r, b, EPS, MAX_ITERS); } @@ -498,11 +493,10 @@ public static double impliedVolNewtonP(final double P, final Boolean isCall, fin * @param maxIters maximum number of optimization iterations * @return implied volatility */ - public static double impliedVolNewtonP(final double P, final Boolean isCall, final double S, - final double X, final double T, final double r, final double b, final double eps, - final int maxIters) { - if (P == NULL_DOUBLE || isCall == null || S == NULL_DOUBLE || X == NULL_DOUBLE - || T == NULL_DOUBLE || r == NULL_DOUBLE || b == NULL_DOUBLE) { + public static double impliedVolNewtonP(final double P, final Boolean isCall, final double S, final double X, + final double T, final double r, final double b, final double eps, final int maxIters) { + if (P == NULL_DOUBLE || isCall == null || S == NULL_DOUBLE || X == NULL_DOUBLE || T == NULL_DOUBLE + || r == NULL_DOUBLE || b == NULL_DOUBLE) { return NULL_DOUBLE; } @@ -551,8 +545,8 @@ public static double impliedVolNewtonP(final double P, final Boolean isCall, fin * @param v volatility * @return strike associated with the delta */ - public static double strikeFromDeltaBisect(final double delta, final Boolean isCall, - final double S, final double T, final double r, final double b, final double v) { + public static double strikeFromDeltaBisect(final double delta, final Boolean isCall, final double S, final double T, + final double r, final double b, final double v) { return strikeFromDeltaBisect(delta, isCall, S, T, r, b, v, EPS, MAX_ITERS); } @@ -570,11 +564,10 @@ public static double strikeFromDeltaBisect(final double delta, final Boolean isC * @param maxIters maximum number of optimization iterations * @return strike associated with the delta */ - public static double strikeFromDeltaBisect(final double delta, final Boolean isCall, - final double S, final double T, final double r, final double b, final double v, - final double eps, final int maxIters) { - if (delta == NULL_DOUBLE || isCall == null || S == NULL_DOUBLE || v == NULL_DOUBLE - || T == NULL_DOUBLE || r == NULL_DOUBLE || b == NULL_DOUBLE) { + public static double strikeFromDeltaBisect(final double delta, final Boolean isCall, final double S, final double T, + final double r, final double b, final double v, final double eps, final int maxIters) { + if (delta == NULL_DOUBLE || isCall == null || S == NULL_DOUBLE || v == NULL_DOUBLE || T == NULL_DOUBLE + || r == NULL_DOUBLE || b == NULL_DOUBLE) { return NULL_DOUBLE; } @@ -610,13 +603,11 @@ public static double strikeFromDeltaBisect(final double delta, final Boolean isC * The cumulative normal distribution function. */ private static double CND(final double X) { - final double a1 = 0.31938153, a2 = -0.356563782, a3 = 1.781477937, a4 = -1.821255978, - a5 = 1.330274429; + final double a1 = 0.31938153, a2 = -0.356563782, a3 = 1.781477937, a4 = -1.821255978, a5 = 1.330274429; final double L = Math.abs(X); final double K = 1.0 / (1.0 + 0.2316419 * L); - final double w = - 1.0 - 1.0 / Math.sqrt(2.0 * Math.PI) * Math.exp(-L * L / 2) * (a1 * K + a2 * K * K + a3 + final double w = 1.0 - 1.0 / Math.sqrt(2.0 * Math.PI) * Math.exp(-L * L / 2) * (a1 * K + a2 * K * K + a3 * Math.pow(K, 3) + a4 * Math.pow(K, 4) + a5 * Math.pow(K, 5)); if (X < 0.0) { diff --git a/Numerics/src/main/java/io/deephaven/numerics/interpolation/Interpolator.java b/Numerics/src/main/java/io/deephaven/numerics/interpolation/Interpolator.java index 075e0f8866c..8adec298291 100644 --- a/Numerics/src/main/java/io/deephaven/numerics/interpolation/Interpolator.java +++ b/Numerics/src/main/java/io/deephaven/numerics/interpolation/Interpolator.java @@ -31,13 +31,13 @@ public enum InterpolationAlgorithm { * @param y input y data. * @param xi x points to evaluate the interpolation at. * @param method interpolation method to use. - * @param extrapolate true if extrapolation is to be used for values outside of the x range; - * false if values outside the x range should return NaN. - * @return y values interpolated at points xi if xi is in range(x), and y values extrapolated - * based on the extrapolate flag if xi is not in range(x). + * @param extrapolate true if extrapolation is to be used for values outside of the x range; false if values outside + * the x range should return NaN. + * @return y values interpolated at points xi if xi is in range(x), and y values extrapolated based on the + * extrapolate flag if xi is not in range(x). */ - public static double[] interpolate(double[] x, double[] y, double[] xi, - InterpolationAlgorithm method, boolean extrapolate) { + public static double[] interpolate(double[] x, double[] y, double[] xi, InterpolationAlgorithm method, + boolean extrapolate) { if (xi.length == 0) { return new double[0]; } @@ -45,26 +45,22 @@ public static double[] interpolate(double[] x, double[] y, double[] xi, final int n = y.length; if (y.length != x.length) { - throw new IllegalArgumentException( - "X and Y are different lengths: " + x.length + "," + y.length); + throw new IllegalArgumentException("X and Y are different lengths: " + x.length + "," + y.length); } if (containsInfinityOrNanOrNull(x)) { - throw new IllegalArgumentException( - "X contains Inf or NaNs or Nulls! " + Arrays.toString(x)); + throw new IllegalArgumentException("X contains Inf or NaNs or Nulls! " + Arrays.toString(x)); } if (containsInfinityOrNanOrNull(y)) { - throw new IllegalArgumentException( - "Y contains Inf or NaNs or Nulls! " + Arrays.toString(y)); + throw new IllegalArgumentException("Y contains Inf or NaNs or Nulls! " + Arrays.toString(y)); } if (n < 2) { if (xi.length == 0) { return new double[0]; } else { - throw new IllegalArgumentException( - "At least 2 data points are needed to interpolate"); + throw new IllegalArgumentException("At least 2 data points are needed to interpolate"); } } @@ -153,8 +149,7 @@ public static double[] interpolate(double[] x, double[] y, double[] xi, break; default: - throw new UnsupportedOperationException( - "Interpolation method is not yet supported: " + method); + throw new UnsupportedOperationException("Interpolation method is not yet supported: " + method); } } @@ -189,8 +184,7 @@ private static double[] diff(double[] v) { private static double[][] sort(double[] x, double[] y) { if (x.length != y.length) { - throw new IllegalArgumentException( - "X and Y are not the same length: " + x.length + "," + y.length); + throw new IllegalArgumentException("X and Y are not the same length: " + x.length + "," + y.length); } @@ -227,10 +221,9 @@ private static boolean containsNegative(double[] x) { } private static double[] piecewiseCubicHermiteInterpolation(double[] x, double[] y, double[] xi, - InterpolationDerivativeCalculator derivativeCalculator) { + InterpolationDerivativeCalculator derivativeCalculator) { if (x.length != y.length) { - throw new IllegalArgumentException( - "X and Y are different lengths: " + x.length + "," + y.length); + throw new IllegalArgumentException("X and Y are different lengths: " + x.length + "," + y.length); } if (x.length == 0) { @@ -265,8 +258,8 @@ private static double[] piecewiseCubicHermiteInterpolation(double[] x, double[] double[] d = derivativeCalculator.computeDerivatives(x, y, h, delta); if (d.length != x.length) { - throw new IllegalStateException("Hermite derivative vector is the wrong length: " - + d.length + " != " + x.length + " " + derivativeCalculator.getClass()); + throw new IllegalStateException("Hermite derivative vector is the wrong length: " + d.length + " != " + + x.length + " " + derivativeCalculator.getClass()); } int[] k = getBin(xi, x); @@ -310,14 +303,13 @@ public double[] computeDerivatives(double[] x, double[] y, double[] h, double[] } d[0] = computeDeriviativeBoundaryValue(h[0], h[1], delta[0], delta[1]); - d[d.length - 1] = computeDeriviativeBoundaryValue(h[h.length - 1], h[h.length - 2], - delta[delta.length - 1], delta[delta.length - 2]); + d[d.length - 1] = computeDeriviativeBoundaryValue(h[h.length - 1], h[h.length - 2], delta[delta.length - 1], + delta[delta.length - 2]); return d; } - private double computeDeriviativeBoundaryValue(double h1, double h2, double delta1, - double delta2) { + private double computeDeriviativeBoundaryValue(double h1, double h2, double delta1, double delta2) { double d = ((2 * h1 + h2) * delta1 - h1 * delta2) / (h1 + h2); if (d * delta1 < 0) { @@ -350,12 +342,11 @@ public double[] computeDerivatives(double[] x, double[] y, double[] h, double[] double[][] r = new double[delta.length + 1][1]; - r[0][0] = ((h[0] + 2 * (h[0] + h[1])) * h[1] * delta[0] + h[0] * h[0] * delta[1]) - / (h[0] + h[1]); + r[0][0] = ((h[0] + 2 * (h[0] + h[1])) * h[1] * delta[0] + h[0] * h[0] * delta[1]) / (h[0] + h[1]); r[delta.length][0] = (h[h.length - 1] * h[h.length - 1] * delta[h.length - 2] - + (2 * (h[h.length - 2] + h[h.length - 1]) + h[h.length - 1]) * h[h.length - 2] - * delta[h.length - 1]) - / (h[h.length - 2] + h[h.length - 1]); + + (2 * (h[h.length - 2] + h[h.length - 1]) + h[h.length - 1]) * h[h.length - 2] + * delta[h.length - 1]) + / (h[h.length - 2] + h[h.length - 1]); for (int i = 1; i < delta.length; i++) { r[i][0] = 3 * (h[i] * delta[i - 1] + h[i - 1] * delta[i]); @@ -376,15 +367,13 @@ public double[] computeDerivatives(double[] x, double[] y, double[] h, double[] } /** - * Returns the bin, determined by the nodes of x, that each element of xi belongs in. If the - * sample is less than x[0] or greater than x[end], the sample is assigned to the first or last - * bin. x is assumed to be sorted. + * Returns the bin, determined by the nodes of x, that each element of xi belongs in. If the sample is less than + * x[0] or greater than x[end], the sample is assigned to the first or last bin. x is assumed to be sorted. * * @param xi elements being binned. * @param x nodes partitioning the space. - * @return the bin, determined by the nodes of x, that each element of xi belongs in. If the - * sample is less than x[0] or greater than x[end], the sample is assigned to the first - * or last bin. + * @return the bin, determined by the nodes of x, that each element of xi belongs in. If the sample is less than + * x[0] or greater than x[end], the sample is assigned to the first or last bin. */ private static int[] getBin(double[] xi, double[] x) { int[] bin = new int[xi.length]; diff --git a/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ByEma.java b/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ByEma.java index 42aa032c3e3..011f04a2db0 100644 --- a/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ByEma.java +++ b/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ByEma.java @@ -47,8 +47,8 @@ public int hashCode() { @Override public String toString() { return "Key{" + - "values=" + (values == null ? null : Arrays.asList(values)) + - '}'; + "values=" + (values == null ? null : Arrays.asList(values)) + + '}'; } } @@ -78,8 +78,8 @@ protected ByEma(BadDataBehavior nullBehavior, BadDataBehavior nanBehavior) { Require.neqNull(nanBehavior, "nanBehavior"); } - // DB automatic type conversion takes care of converting all non-double nulls into double nulls - // so we don't have to duplicate the null checking + // DB automatic type conversion takes care of converting all non-double nulls into double nulls so we don't have to + // duplicate the null checking // for each type. public synchronized double update(double value) { @@ -99,28 +99,26 @@ public synchronized double update(DBDateTime timestamp, double value, Object... } public synchronized double update(long timestampNanos, double value, Object... by) { - return updateInternal(timestampNanos, value, DoublePrimitives.isNull(value), - Double.isNaN(value), by); + return updateInternal(timestampNanos, value, DoublePrimitives.isNull(value), Double.isNaN(value), by); } private static boolean resetEma(boolean isNull, BadDataBehavior nullBehavior, boolean isNaN, - BadDataBehavior nanBehavior) { + BadDataBehavior nanBehavior) { return (isNull && nullBehavior.reset) || (isNaN && nanBehavior.reset); } private static boolean returnNan(boolean isNull, BadDataBehavior nullBehavior, boolean isNaN, - BadDataBehavior nanBehavior) { + BadDataBehavior nanBehavior) { return (isNull && nullBehavior.returnNan) || (isNaN && nanBehavior.returnNan); } - private static boolean processSample(boolean isNull, BadDataBehavior nullBehavior, - boolean isNaN, BadDataBehavior nanBehavior) { - return (!isNull && !isNaN) || (isNull && nullBehavior.process) - || (isNaN && nanBehavior.process); + private static boolean processSample(boolean isNull, BadDataBehavior nullBehavior, boolean isNaN, + BadDataBehavior nanBehavior) { + return (!isNull && !isNaN) || (isNull && nullBehavior.process) || (isNaN && nanBehavior.process); } - private synchronized double updateInternal(long timestampNanos, double value, boolean isNull, - boolean isNaN, Object... by) { + private synchronized double updateInternal(long timestampNanos, double value, boolean isNull, boolean isNaN, + Object... by) { Key key = new Key(by); AbstractMa ema = emas.get(key); diff --git a/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ByEmaSimple.java b/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ByEmaSimple.java index f90eac76e30..4bee78cc918 100644 --- a/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ByEmaSimple.java +++ b/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ByEmaSimple.java @@ -15,13 +15,13 @@ public class ByEmaSimple extends ByEma { private final AbstractMa.Mode mode; private final double timescaleNanos; - public ByEmaSimple(ByEma.BadDataBehavior nullBehavior, ByEma.BadDataBehavior nanBehavior, - AbstractMa.Mode mode, double timescale, TimeUnit timeUnit) { + public ByEmaSimple(ByEma.BadDataBehavior nullBehavior, ByEma.BadDataBehavior nanBehavior, AbstractMa.Mode mode, + double timescale, TimeUnit timeUnit) { this(nullBehavior, nanBehavior, AbstractMa.Type.LEVEL, mode, timescale, timeUnit); } - public ByEmaSimple(ByEma.BadDataBehavior nullBehavior, ByEma.BadDataBehavior nanBehavior, - AbstractMa.Type type, AbstractMa.Mode mode, double timescale, TimeUnit timeUnit) { + public ByEmaSimple(ByEma.BadDataBehavior nullBehavior, ByEma.BadDataBehavior nanBehavior, AbstractMa.Type type, + AbstractMa.Mode mode, double timescale, TimeUnit timeUnit) { super(nullBehavior, nanBehavior); this.type = type; this.mode = mode; diff --git a/Numerics/src/main/java/io/deephaven/numerics/movingaverages/Ema.java b/Numerics/src/main/java/io/deephaven/numerics/movingaverages/Ema.java index 893b927a3fb..ca150096748 100644 --- a/Numerics/src/main/java/io/deephaven/numerics/movingaverages/Ema.java +++ b/Numerics/src/main/java/io/deephaven/numerics/movingaverages/Ema.java @@ -39,8 +39,7 @@ protected void processDoubleLocal(long timestamp, double data) { emaState = 0; break; default: - throw new UnsupportedOperationException( - "Ema type is not yet supported: " + type); + throw new UnsupportedOperationException("Ema type is not yet supported: " + type); } initialized = true; diff --git a/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ExponentiallyDecayedSum.java b/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ExponentiallyDecayedSum.java index db335b11873..0f860d042fd 100644 --- a/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ExponentiallyDecayedSum.java +++ b/Numerics/src/main/java/io/deephaven/numerics/movingaverages/ExponentiallyDecayedSum.java @@ -21,8 +21,8 @@ public class ExponentiallyDecayedSum implements Serializable { * Creates a new exponentially decayed sum. * * @param decayRate rate in milliseconds to decay the sum. - * @param enableTimestepOutOfOrderException true to allow an exception to be thrown when - * timesteps are not sequential. + * @param enableTimestepOutOfOrderException true to allow an exception to be thrown when timesteps are not + * sequential. */ public ExponentiallyDecayedSum(double decayRate, boolean enableTimestepOutOfOrderException) { this.decayRate = decayRate; @@ -34,7 +34,7 @@ public void processDouble(long timestamp, double data) { if (enableTimestepOutOfOrderException && lastTimestamp != Long.MIN_VALUE && dt < 0) { throw new IllegalStateException( - "Timesteps are out of order: timestamps=" + lastTimestamp + "," + timestamp); + "Timesteps are out of order: timestamps=" + lastTimestamp + "," + timestamp); } double weight = Math.exp(-dt / decayRate); diff --git a/Numerics/src/test/java/io/deephaven/numerics/derivatives/TestBlackScholes.java b/Numerics/src/test/java/io/deephaven/numerics/derivatives/TestBlackScholes.java index 85abd92d7a9..7b83740ed97 100644 --- a/Numerics/src/test/java/io/deephaven/numerics/derivatives/TestBlackScholes.java +++ b/Numerics/src/test/java/io/deephaven/numerics/derivatives/TestBlackScholes.java @@ -5,8 +5,8 @@ import org.junit.Test; /** - * Most of the values for the testcases are taken from the book "The Complete Guide to Option - * Pricing Formula" (Chapter 2 and 12) + * Most of the values for the testcases are taken from the book "The Complete Guide to Option Pricing Formula" (Chapter + * 2 and 12) * */ public class TestBlackScholes extends BaseArrayTestCase { @@ -14,250 +14,202 @@ public class TestBlackScholes extends BaseArrayTestCase { @Test public void testPrices() { // price - assertEquals(12.43, - Math.round(BlackScholes.price(true, 105, 100, 0.5, 0.1, 0, 0.36) * 100d) / 100d); - assertEquals(7.68, - Math.round(BlackScholes.price(false, 105, 100, 0.5, 0.1, 0, 0.36) * 100d) / 100d); + assertEquals(12.43, Math.round(BlackScholes.price(true, 105, 100, 0.5, 0.1, 0, 0.36) * 100d) / 100d); + assertEquals(7.68, Math.round(BlackScholes.price(false, 105, 100, 0.5, 0.1, 0, 0.36) * 100d) / 100d); } @Test public void testDelta() { // delta - assertEquals(0.5946, - Math.round(BlackScholes.delta(true, 105, 100, 0.5, 0.1, 0, 0.36) * 10000d) / 10000d); - assertEquals(-0.3566, - Math.round(BlackScholes.delta(false, 105, 100, 0.5, 0.1, 0, 0.36) * 10000d) / 10000d); + assertEquals(0.5946, Math.round(BlackScholes.delta(true, 105, 100, 0.5, 0.1, 0, 0.36) * 10000d) / 10000d); + assertEquals(-0.3566, Math.round(BlackScholes.delta(false, 105, 100, 0.5, 0.1, 0, 0.36) * 10000d) / 10000d); // finite difference approximation - assertEquals( - Math.round(BlackScholes.delta(true, 105, 100, 0.5, 0.1, 0, 0.36) * 10000d) / 10000d, - Math.round((BlackScholes.price(true, 105.0005, 100, 0.5, 0.1, 0, 0.36) - - BlackScholes.price(true, 105, 100, 0.5, 0.1, 0, 0.36)) * 2000d * 10000d) / 10000d); - assertEquals( - Math.round(BlackScholes.delta(false, 105, 100, 0.5, 0.1, 0, 0.36) * 10000d) / 10000d, - Math.round((BlackScholes.price(false, 105.0005, 100, 0.5, 0.1, 0, 0.36) - - BlackScholes.price(false, 105, 100, 0.5, 0.1, 0, 0.36)) * 2000d * 10000d) / 10000d); + assertEquals(Math.round(BlackScholes.delta(true, 105, 100, 0.5, 0.1, 0, 0.36) * 10000d) / 10000d, + Math.round((BlackScholes.price(true, 105.0005, 100, 0.5, 0.1, 0, 0.36) - + BlackScholes.price(true, 105, 100, 0.5, 0.1, 0, 0.36)) * 2000d * 10000d) / 10000d); + assertEquals(Math.round(BlackScholes.delta(false, 105, 100, 0.5, 0.1, 0, 0.36) * 10000d) / 10000d, + Math.round((BlackScholes.price(false, 105.0005, 100, 0.5, 0.1, 0, 0.36) - + BlackScholes.price(false, 105, 100, 0.5, 0.1, 0, 0.36)) * 2000d * 10000d) / 10000d); } @Test public void testGamma() { // gamma - assertEquals(0.0278, - Math.round(BlackScholes.gamma(55, 60, 0.75, 0.1, 0.1, 0.3) * 10000d) / 10000d); + assertEquals(0.0278, Math.round(BlackScholes.gamma(55, 60, 0.75, 0.1, 0.1, 0.3) * 10000d) / 10000d); assertEquals(Math.round(0.0278 * 55 * 10000) / 10000d, - Math.round(BlackScholes.gammaP(55, 60, 0.75, 0.1, 0.1, 0.3) * 10000d) / 10000d, 2e-3); + Math.round(BlackScholes.gammaP(55, 60, 0.75, 0.1, 0.1, 0.3) * 10000d) / 10000d, 2e-3); // finite difference approximation - assertEquals( - Math.round(BlackScholes.gamma(55, 60, 0.75, 0.1, 0.1, 0.3) * 10000d) / 10000d, Math - .round((BlackScholes.delta(true, 55.0005, 60, 0.75, 0.1, 0.1, 0.3) - - BlackScholes.delta(true, 55, 60, 0.75, 0.1, 0.1, 0.3)) * 2000d * 10000d) - / 10000d); - assertEquals( - Math.round(BlackScholes.gamma(55, 60, 0.75, 0.1, 0.1, 0.3) * 10000d) / 10000d, Math - .round((BlackScholes.delta(false, 55.0005, 60, 0.75, 0.1, 0.1, 0.3) - - BlackScholes.delta(false, 55, 60, 0.75, 0.1, 0.1, 0.3)) * 2000d * 10000d) - / 10000d); + assertEquals(Math.round(BlackScholes.gamma(55, 60, 0.75, 0.1, 0.1, 0.3) * 10000d) / 10000d, + Math.round((BlackScholes.delta(true, 55.0005, 60, 0.75, 0.1, 0.1, 0.3) - + BlackScholes.delta(true, 55, 60, 0.75, 0.1, 0.1, 0.3)) * 2000d * 10000d) / 10000d); + assertEquals(Math.round(BlackScholes.gamma(55, 60, 0.75, 0.1, 0.1, 0.3) * 10000d) / 10000d, + Math.round((BlackScholes.delta(false, 55.0005, 60, 0.75, 0.1, 0.1, 0.3) - + BlackScholes.delta(false, 55, 60, 0.75, 0.1, 0.1, 0.3)) * 2000d * 10000d) / 10000d); assertEquals(BlackScholes.gammaP(55, 60, 0.75, 0.1, 0.1, 0.3), - (BlackScholes.delta(true, 55.005, 60, 0.75, 0.1, 0.1, 0.3) - - BlackScholes.delta(true, 55, 60, 0.75, 0.1, 0.1, 0.3)) - / (Math.log(55.005) - Math.log(55.00)), - 1e-4); + (BlackScholes.delta(true, 55.005, 60, 0.75, 0.1, 0.1, 0.3) - + BlackScholes.delta(true, 55, 60, 0.75, 0.1, 0.1, 0.3)) / (Math.log(55.005) - Math.log(55.00)), + 1e-4); assertEquals(BlackScholes.gammaP(55, 60, 0.75, 0.1, 0.1, 0.3), - (BlackScholes.delta(false, 55.005, 60, 0.75, 0.1, 0.1, 0.3) - - BlackScholes.delta(false, 55, 60, 0.75, 0.1, 0.1, 0.3)) - / (Math.log(55.005) - Math.log(55.00)), - 1e-4); + (BlackScholes.delta(false, 55.005, 60, 0.75, 0.1, 0.1, 0.3) - + BlackScholes.delta(false, 55, 60, 0.75, 0.1, 0.1, 0.3)) / (Math.log(55.005) - Math.log(55.00)), + 1e-4); } @Test public void testVega() { // vega - assertEquals(18.5027, - Math.round(BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d); + assertEquals(18.5027, Math.round(BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d); assertEquals(Math.round(18.5027 * 0.3 * 10000d) / 10000d, - Math.round(BlackScholes.vegaP(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d); + Math.round(BlackScholes.vegaP(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d); // not found in online options pricing calculator - assertEquals(-11.7142, - Math.round(BlackScholes.vegaBleed(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d); + assertEquals(-11.7142, Math.round(BlackScholes.vegaBleed(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d); // finite difference approximation assertEquals( - Math.round(BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) * 1000d) / 1000d, - Math.round((BlackScholes.price(true, 55, 60, 0.75, 0.105, 0.0695, 0.3000000001) - - BlackScholes.price(true, 55, 60, 0.75, 0.105, 0.0695, 0.3)) * 10000000000d * 1000d) - / 1000d); + Math.round(BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) * 1000d) / 1000d, Math + .round((BlackScholes.price(true, 55, 60, 0.75, 0.105, 0.0695, 0.3000000001) - + BlackScholes.price(true, 55, 60, 0.75, 0.105, 0.0695, 0.3)) * 10000000000d * 1000d) + / 1000d); assertEquals( - Math.round(BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) * 1000d) / 1000d, - Math.round((BlackScholes.price(false, 55, 60, 0.75, 0.105, 0.0695, 0.3000000001) - - BlackScholes.price(false, 55, 60, 0.75, 0.105, 0.0695, 0.3)) * 10000000000d * 1000d) - / 1000d); + Math.round(BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) * 1000d) / 1000d, Math + .round((BlackScholes.price(false, 55, 60, 0.75, 0.105, 0.0695, 0.3000000001) - + BlackScholes.price(false, 55, 60, 0.75, 0.105, 0.0695, 0.3)) * 10000000000d * 1000d) + / 1000d); assertEquals(BlackScholes.vegaP(55, 60, 0.75, 0.105, 0.0695, 0.3), - (BlackScholes.price(true, 55, 60, 0.75, 0.105, 0.0695, 0.30001) - - BlackScholes.price(true, 55, 60, 0.75, 0.105, 0.0695, 0.3)) - / (Math.log(0.30001) - Math.log(0.30)), - 1e-3); + (BlackScholes.price(true, 55, 60, 0.75, 0.105, 0.0695, 0.30001) - + BlackScholes.price(true, 55, 60, 0.75, 0.105, 0.0695, 0.3)) + / (Math.log(0.30001) - Math.log(0.30)), + 1e-3); assertEquals(BlackScholes.vegaP(55, 60, 0.75, 0.105, 0.0695, 0.3), - (BlackScholes.price(false, 55, 60, 0.75, 0.105, 0.0695, 0.30001) - - BlackScholes.price(false, 55, 60, 0.75, 0.105, 0.0695, 0.3)) - / (Math.log(0.30001) - Math.log(0.30)), - 1e-3); - - assertEquals( - Math.round(BlackScholes.vegaBleed(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d, - Math.round((BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) - - BlackScholes.vega(55, 60, 0.75001, 0.105, 0.0695, 0.3)) * 100000d * 10000d) - / 10000d); - assertEquals( - Math.round(BlackScholes.vegaBleed(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d, - Math.round((BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) - - BlackScholes.vega(55, 60, 0.75001, 0.105, 0.0695, 0.3)) * 100000d * 10000d) - / 10000d); + (BlackScholes.price(false, 55, 60, 0.75, 0.105, 0.0695, 0.30001) - + BlackScholes.price(false, 55, 60, 0.75, 0.105, 0.0695, 0.3)) + / (Math.log(0.30001) - Math.log(0.30)), + 1e-3); + + assertEquals(Math.round(BlackScholes.vegaBleed(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d, + Math.round((BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) - + BlackScholes.vega(55, 60, 0.75001, 0.105, 0.0695, 0.3)) * 100000d * 10000d) / 10000d); + assertEquals(Math.round(BlackScholes.vegaBleed(55, 60, 0.75, 0.105, 0.0695, 0.3) * 10000d) / 10000d, + Math.round((BlackScholes.vega(55, 60, 0.75, 0.105, 0.0695, 0.3) - + BlackScholes.vega(55, 60, 0.75001, 0.105, 0.0695, 0.3)) * 100000d * 10000d) / 10000d); } @Test public void testVomma() { // vomma - assertEquals(92.3444, - Math.round(BlackScholes.vomma(90, 130, 0.75, 0.05, 0, 0.28) * 10000d) / 10000d); + assertEquals(92.3444, Math.round(BlackScholes.vomma(90, 130, 0.75, 0.05, 0, 0.28) * 10000d) / 10000d); assertEquals(2.5856 * 10, BlackScholes.vommaP(90, 130, 0.75, 0.05, 0, 0.28), 1e-3); // finite difference approximation - assertEquals( - Math.round(BlackScholes.vomma(90, 130, 0.75, 0.05, 0, 0.28) * 10000d) / 10000d, Math - .round((BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.2800001) - - BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.28)) * 10000000d * 10000d) - / 10000d); - assertEquals( - Math.round(BlackScholes.vomma(90, 130, 0.75, 0.05, 0, 0.28) * 10000d) / 10000d, Math - .round((BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.2800001) - - BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.28)) * 10000000d * 10000d) - / 10000d); + assertEquals(Math.round(BlackScholes.vomma(90, 130, 0.75, 0.05, 0, 0.28) * 10000d) / 10000d, + Math.round((BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.2800001) - + BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.28)) * 10000000d * 10000d) / 10000d); + assertEquals(Math.round(BlackScholes.vomma(90, 130, 0.75, 0.05, 0, 0.28) * 10000d) / 10000d, + Math.round((BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.2800001) - + BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.28)) * 10000000d * 10000d) / 10000d); assertEquals(BlackScholes.vommaP(90, 130, 0.75, 0.05, 0, 0.28), - (BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.28001) - - BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.28)) - / (Math.log(0.28001) - Math.log(0.28)), - 1e-3); + (BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.28001) - BlackScholes.vega(90, 130, 0.75, 0.05, 0, 0.28)) + / (Math.log(0.28001) - Math.log(0.28)), + 1e-3); } @Test public void testCharm() { // charm - not found in online options pricing calculator - assertEquals(0.5052, - Math.round(BlackScholes.charm(true, 105, 90, 0.25, 0.14, 0, 0.24) * 10000d) / 10000d); - assertEquals(0.3700, - Math.round(BlackScholes.charm(false, 105, 90, 0.25, 0.14, 0, 0.24) * 10000d) / 10000d); + assertEquals(0.5052, Math.round(BlackScholes.charm(true, 105, 90, 0.25, 0.14, 0, 0.24) * 10000d) / 10000d); + assertEquals(0.3700, Math.round(BlackScholes.charm(false, 105, 90, 0.25, 0.14, 0, 0.24) * 10000d) / 10000d); // finite difference approximation assertEquals( - Math.round(BlackScholes.charm(true, 105, 90, 0.25, 0.14, 0, 0.24) * 10000d) / 10000d, - Math.round((BlackScholes.delta(true, 105, 90, 0.25, 0.14, 0, 0.24) - - BlackScholes.delta(true, 105, 90, 0.25001, 0.14, 0, 0.24)) * 100000d * 10000d) - / 10000d); + Math.round(BlackScholes.charm(true, 105, 90, 0.25, 0.14, 0, 0.24) * 10000d) / 10000d, Math + .round((BlackScholes.delta(true, 105, 90, 0.25, 0.14, 0, 0.24) - + BlackScholes.delta(true, 105, 90, 0.25001, 0.14, 0, 0.24)) * 100000d * 10000d) + / 10000d); assertEquals( - Math.round(BlackScholes.charm(false, 105, 90, 0.25, 0.14, 0, 0.24) * 10000d) / 10000d, - Math.round((BlackScholes.delta(false, 105, 90, 0.25, 0.14, 0, 0.24) - - BlackScholes.delta(false, 105, 90, 0.25001, 0.14, 0, 0.24)) * 100000d * 10000d) - / 10000d); + Math.round(BlackScholes.charm(false, 105, 90, 0.25, 0.14, 0, 0.24) * 10000d) / 10000d, Math + .round((BlackScholes.delta(false, 105, 90, 0.25, 0.14, 0, 0.24) - + BlackScholes.delta(false, 105, 90, 0.25001, 0.14, 0, 0.24)) * 100000d * 10000d) + / 10000d); } @Test public void testTheta() { // theta assertEquals(-37.9669, - Math.round(BlackScholes.theta(true, 430, 405, 0.0833, 0.07, 0.02, 0.2) * 10000d) - / 10000d); + Math.round(BlackScholes.theta(true, 430, 405, 0.0833, 0.07, 0.02, 0.2) * 10000d) / 10000d); assertEquals(-31.1924, - Math.round(BlackScholes.theta(false, 430, 405, 0.0833, 0.07, 0.02, 0.2) * 10000d) - / 10000d); + Math.round(BlackScholes.theta(false, 430, 405, 0.0833, 0.07, 0.02, 0.2) * 10000d) / 10000d); assertEquals(-32.6214, - Math.round(BlackScholes.driftlessTheta(430, 405, 0.0833, 0.07, 0.02, 0.2) * 10000d) - / 10000d); + Math.round(BlackScholes.driftlessTheta(430, 405, 0.0833, 0.07, 0.02, 0.2) * 10000d) / 10000d); // finite difference approximation - assertEquals( - Math.round(BlackScholes.theta(true, 430, 405, 0.0833, 0.07, 0.02, 0.2) * 1000d) / 1000d, - Math.round((BlackScholes.price(true, 430, 405, 0.0833, 0.07, 0.02, - 0.2) - BlackScholes.price(true, 430, 405, 0.083301, 0.07, 0.02, 0.2)) * 1000000d - * 1000d) / 1000d); - assertEquals( - Math.round(BlackScholes.theta(false, 430, 405, 0.0833, 0.07, 0.02, 0.2) * 1000d) - / 1000d, - Math.round((BlackScholes.price(false, 430, 405, 0.0833, 0.07, 0.02, - 0.2) - BlackScholes.price(false, 430, 405, 0.083301, 0.07, 0.02, 0.2)) * 1000000d - * 1000d) / 1000d); + assertEquals(Math.round(BlackScholes.theta(true, 430, 405, 0.0833, 0.07, 0.02, 0.2) * 1000d) / 1000d, + Math.round((BlackScholes.price(true, 430, 405, 0.0833, 0.07, 0.02, + 0.2) - BlackScholes.price(true, 430, 405, 0.083301, 0.07, 0.02, 0.2)) * 1000000d * 1000d) + / 1000d); + assertEquals(Math.round(BlackScholes.theta(false, 430, 405, 0.0833, 0.07, 0.02, 0.2) * 1000d) / 1000d, + Math.round((BlackScholes.price(false, 430, 405, 0.0833, 0.07, 0.02, + 0.2) - BlackScholes.price(false, 430, 405, 0.083301, 0.07, 0.02, 0.2)) * 1000000d * 1000d) + / 1000d); } @Test public void testRho() { // rho - assertEquals(38.7325, - Math.round(BlackScholes.rho(true, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) / 10000d); - assertEquals(-29.81, - Math.round(BlackScholes.rho(false, 72, 75, 1, 0.09, 0.09, 0.19) * 100d) / 100d); + assertEquals(38.7325, Math.round(BlackScholes.rho(true, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) / 10000d); + assertEquals(-29.81, Math.round(BlackScholes.rho(false, 72, 75, 1, 0.09, 0.09, 0.19) * 100d) / 100d); // finite difference approximation + assertEquals(Math.round(BlackScholes.rho(true, 72, 75, 1, 0.09, 0.09, 0.19) * 1000d) / 1000d, + Math.round((BlackScholes.price(true, 72, 75, 1, 0.09000001, 0.09000001, 0.19) - + BlackScholes.price(true, 72, 75, 1, 0.09, 0.09, 0.19)) * 100000000d * 1000d) / 1000d); assertEquals( - Math.round(BlackScholes.rho(true, 72, 75, 1, 0.09, 0.09, 0.19) * 1000d) / 1000d, - Math.round((BlackScholes.price(true, 72, 75, 1, 0.09000001, 0.09000001, 0.19) - - BlackScholes.price(true, 72, 75, 1, 0.09, 0.09, 0.19)) * 100000000d * 1000d) - / 1000d); - assertEquals( - Math.round(BlackScholes.rho(false, 72, 75, 1, 0.09, 0.09, 0.19) * 1000d) / 1000d, - Math.round((BlackScholes.price(false, 72, 75, 1, 0.09000001, 0.09000001, - 0.19) - BlackScholes.price(false, 72, 75, 1, 0.09, 0.09, 0.19)) * 100000000d - * 1000d) / 1000d); + Math.round(BlackScholes.rho(false, 72, 75, 1, 0.09, 0.09, 0.19) * 1000d) / 1000d, Math + .round((BlackScholes.price(false, 72, 75, 1, 0.09000001, 0.09000001, + 0.19) - BlackScholes.price(false, 72, 75, 1, 0.09, 0.09, 0.19)) * 100000000d * 1000d) + / 1000d); } @Test public void testCarryRho() { // carryRho - not found in online options pricing calculator assertEquals(81.2219, - Math.round(BlackScholes.carryRho(true, 500, 490, 0.25, 0.08, 0.03, 0.15) * 10000d) - / 10000d); + Math.round(BlackScholes.carryRho(true, 500, 490, 0.25, 0.08, 0.03, 0.15) * 10000d) / 10000d); assertEquals(-42.2254, - Math.round(BlackScholes.carryRho(false, 500, 490, 0.25, 0.08, 0.03, 0.15) * 10000d) - / 10000d); + Math.round(BlackScholes.carryRho(false, 500, 490, 0.25, 0.08, 0.03, 0.15) * 10000d) / 10000d); // finite difference approximation - assertEquals( - Math.round(BlackScholes.carryRho(true, 500, 490, 0.25, 0.08, 0.03, 0.15) * 1000d) - / 1000d, - Math.round((BlackScholes.price(true, 500, 490, 0.25, 0.08, 0.03000001, - 0.15) - BlackScholes.price(true, 500, 490, 0.25, 0.08, 0.03, 0.15)) * 100000000d - * 1000d) / 1000d); - assertEquals( - Math.round(BlackScholes.carryRho(false, 500, 490, 0.25, 0.08, 0.03, 0.15) * 1000d) - / 1000d, - Math.round((BlackScholes.price(false, 500, 490, 0.25, 0.08, - 0.03000001, 0.15) - BlackScholes.price(false, 500, 490, 0.25, 0.08, 0.03, 0.15)) - * 100000000d * 1000d) / 1000d); + assertEquals(Math.round(BlackScholes.carryRho(true, 500, 490, 0.25, 0.08, 0.03, 0.15) * 1000d) / 1000d, + Math.round((BlackScholes.price(true, 500, 490, 0.25, 0.08, 0.03000001, + 0.15) - BlackScholes.price(true, 500, 490, 0.25, 0.08, 0.03, 0.15)) * 100000000d * 1000d) + / 1000d); + assertEquals(Math.round(BlackScholes.carryRho(false, 500, 490, 0.25, 0.08, 0.03, 0.15) * 1000d) / 1000d, + Math.round((BlackScholes.price(false, 500, 490, 0.25, 0.08, + 0.03000001, 0.15) - BlackScholes.price(false, 500, 490, 0.25, 0.08, 0.03, 0.15)) * 100000000d + * 1000d) / 1000d); } @Test public void testStrikeDelta() { // strike-delta assertEquals(-0.5164, - Math.round(BlackScholes.strikeDelta(true, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) - / 10000d); + Math.round(BlackScholes.strikeDelta(true, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) / 10000d); assertEquals(0.3975, - Math.round(BlackScholes.strikeDelta(false, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) - / 10000d); + Math.round(BlackScholes.strikeDelta(false, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) / 10000d); // finite difference approximation + assertEquals(Math.round(BlackScholes.strikeDelta(true, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) / 10000d, + Math.round((BlackScholes.price(true, 72, 75.001, 1, 0.09, 0.09, 0.19) - + BlackScholes.price(true, 72, 75, 1, 0.09, 0.09, 0.19)) * 1000d * 10000d) / 10000d); assertEquals( - Math.round(BlackScholes.strikeDelta(true, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) - / 10000d, - Math.round((BlackScholes.price(true, 72, 75.001, 1, 0.09, 0.09, 0.19) - - BlackScholes.price(true, 72, 75, 1, 0.09, 0.09, 0.19)) * 1000d * 10000d) / 10000d); - assertEquals( - Math.round(BlackScholes.strikeDelta(false, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) - / 10000d, - Math.round((BlackScholes.price(false, 72, 75.001, 1, 0.09, 0.09, - 0.19) - BlackScholes.price(false, 72, 75, 1, 0.09, 0.09, 0.19)) * 1000d * 10000d) - / 10000d); + Math.round(BlackScholes.strikeDelta(false, 72, 75, 1, 0.09, 0.09, 0.19) * 10000d) / 10000d, Math + .round((BlackScholes.price(false, 72, 75.001, 1, 0.09, 0.09, + 0.19) - BlackScholes.price(false, 72, 75, 1, 0.09, 0.09, 0.19)) * 1000d * 10000d) + / 10000d); } @Test @@ -266,28 +218,24 @@ public void testStrikeFromDeltaBisect() { final double X = 1850; final double deltaCall = BlackScholes.delta(true, 1800, X, 0.25, 0.07, 0.04, 0.5); assertEquals(X, - Math.round( - BlackScholes.strikeFromDeltaBisect(deltaCall, true, 1800, 0.25, 0.07, 0.04, 0.5) - * 10000d) - / 10000d); + Math.round(BlackScholes.strikeFromDeltaBisect(deltaCall, true, 1800, 0.25, 0.07, 0.04, 0.5) * 10000d) + / 10000d); final double deltaPut = BlackScholes.delta(false, 1800, X, 0.25, 0.07, 0.04, 0.5); assertEquals(X, - Math.round( - BlackScholes.strikeFromDeltaBisect(deltaPut, false, 1800, 0.25, 0.07, 0.04, 0.5) - * 10000d) - / 10000d); + Math.round(BlackScholes.strikeFromDeltaBisect(deltaPut, false, 1800, 0.25, 0.07, 0.04, 0.5) * 10000d) + / 10000d); assertEquals(QueryConstants.NULL_DOUBLE, - BlackScholes.strikeFromDeltaBisect(deltaCall, null, 59, 60, 0.25, 0.067, 0.067)); + BlackScholes.strikeFromDeltaBisect(deltaCall, null, 59, 60, 0.25, 0.067, 0.067)); assertEquals(QueryConstants.NULL_DOUBLE, BlackScholes.strikeFromDeltaBisect(deltaCall, true, - QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, 0.067)); + QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, 0.067)); assertEquals(QueryConstants.NULL_DOUBLE, BlackScholes.strikeFromDeltaBisect(deltaCall, true, - QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, QueryConstants.NULL_DOUBLE)); - assertEquals(Double.NaN, - BlackScholes.strikeFromDeltaBisect(deltaCall, true, 59, 60, Double.NaN, 0.067, 0.067)); - assertEquals(X, Math.round(BlackScholes.strikeFromDeltaBisect(deltaPut, false, 1800, 0.25, - 0.07, 0.04, 0.5, 1e-3, 26) * 100d) / 100d); + QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, QueryConstants.NULL_DOUBLE)); + assertEquals(Double.NaN, BlackScholes.strikeFromDeltaBisect(deltaCall, true, 59, 60, Double.NaN, 0.067, 0.067)); + assertEquals(X, Math.round( + BlackScholes.strikeFromDeltaBisect(deltaPut, false, 1800, 0.25, 0.07, 0.04, 0.5, 1e-3, 26) * 100d) + / 100d); } @Test @@ -295,31 +243,25 @@ public void testImpliedVolatilityUsingBisection() { // implied volatility using bisection final double v = 0.25; final double priceCall = BlackScholes.price(true, 59, 60, 0.25, 0.067, 0.067, v); - assertEquals(v, - Math.round( - BlackScholes.impliedVolBisect(priceCall, true, 59, 60, 0.25, 0.067, 0.067) * 10000d) + assertEquals(v, Math.round(BlackScholes.impliedVolBisect(priceCall, true, 59, 60, 0.25, 0.067, 0.067) * 10000d) / 10000d); final double pricePut = BlackScholes.price(false, 59, 60, 0.25, 0.067, 0.067, v); - assertEquals(v, - Math.round( - BlackScholes.impliedVolBisect(pricePut, false, 59, 60, 0.25, 0.067, 0.067) * 10000d) + assertEquals(v, Math.round(BlackScholes.impliedVolBisect(pricePut, false, 59, 60, 0.25, 0.067, 0.067) * 10000d) / 10000d); assertEquals(v, - Math.round( - BlackScholes.impliedVolBisect(pricePut, false, 59, 60, 0.25, 0.067, 0.067, 1e-4, 16) - * 10000d) - / 10000d); + Math.round( + BlackScholes.impliedVolBisect(pricePut, false, 59, 60, 0.25, 0.067, 0.067, 1e-4, 16) * 10000d) + / 10000d); assertEquals(QueryConstants.NULL_DOUBLE, - BlackScholes.impliedVolBisect(pricePut, null, 59, 60, 0.25, 0.067, 0.067)); - assertEquals(QueryConstants.NULL_DOUBLE, BlackScholes.impliedVolBisect(pricePut, true, - QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, 0.067)); + BlackScholes.impliedVolBisect(pricePut, null, 59, 60, 0.25, 0.067, 0.067)); + assertEquals(QueryConstants.NULL_DOUBLE, + BlackScholes.impliedVolBisect(pricePut, true, QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, 0.067)); assertEquals(QueryConstants.NULL_DOUBLE, BlackScholes.impliedVolBisect(pricePut, true, - QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, QueryConstants.NULL_DOUBLE)); - assertEquals(Double.NaN, - BlackScholes.impliedVolBisect(pricePut, true, 59, 60, Double.NaN, 0.067, 0.067)); + QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, QueryConstants.NULL_DOUBLE)); + assertEquals(Double.NaN, BlackScholes.impliedVolBisect(pricePut, true, 59, 60, Double.NaN, 0.067, 0.067)); assertEquals(0d, BlackScholes.impliedVolBisect(0, true, 59, 60, 0.25, 0.067, 0.067)); } @@ -328,30 +270,23 @@ public void testImpliedVolatilityUsingNewtonRaphson() { // implied volatility using Newton-Raphson final double v = 0.25; final double priceCall = BlackScholes.price(true, 59, 60, 0.25, 0.067, 0.067, v); - assertEquals(v, - Math.round( - BlackScholes.impliedVolNewton(priceCall, true, 59, 60, 0.25, 0.067, 0.067) * 10000d) + assertEquals(v, Math.round(BlackScholes.impliedVolNewton(priceCall, true, 59, 60, 0.25, 0.067, 0.067) * 10000d) / 10000d); final double pricePut = BlackScholes.price(false, 59, 60, 0.25, 0.067, 0.067, v); - assertEquals(v, - Math.round( - BlackScholes.impliedVolNewton(pricePut, false, 59, 60, 0.25, 0.067, 0.067) * 10000d) + assertEquals(v, Math.round(BlackScholes.impliedVolNewton(pricePut, false, 59, 60, 0.25, 0.067, 0.067) * 10000d) / 10000d); assertEquals(v, - Math.round( - BlackScholes.impliedVolNewton(pricePut, false, 59, 60, 0.25, 0.067, 0.067, 1e-4, 2) - * 10000d) - / 10000d); + Math.round(BlackScholes.impliedVolNewton(pricePut, false, 59, 60, 0.25, 0.067, 0.067, 1e-4, 2) * 10000d) + / 10000d); assertEquals(QueryConstants.NULL_DOUBLE, - BlackScholes.impliedVolBisect(pricePut, null, 59, 60, 0.25, 0.067, 0.067)); - assertEquals(QueryConstants.NULL_DOUBLE, BlackScholes.impliedVolNewton(pricePut, true, - QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, 0.067)); + BlackScholes.impliedVolBisect(pricePut, null, 59, 60, 0.25, 0.067, 0.067)); + assertEquals(QueryConstants.NULL_DOUBLE, + BlackScholes.impliedVolNewton(pricePut, true, QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, 0.067)); assertEquals(QueryConstants.NULL_DOUBLE, BlackScholes.impliedVolNewton(pricePut, true, - QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, QueryConstants.NULL_DOUBLE)); - assertEquals(Double.NaN, - BlackScholes.impliedVolNewton(pricePut, true, 59, 60, Double.NaN, 0.067, 0.067)); + QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, QueryConstants.NULL_DOUBLE)); + assertEquals(Double.NaN, BlackScholes.impliedVolNewton(pricePut, true, 59, 60, Double.NaN, 0.067, 0.067)); assertEquals(0d, BlackScholes.impliedVolNewton(0, true, 59, 60, 0.25, 0.067, 0.067)); } @@ -359,31 +294,26 @@ public void testImpliedVolatilityUsingNewtonRaphson() { public void testImpliedVolatilityUsingNewtonRaphsonP() { final double v = 0.25; final double priceCall = BlackScholes.price(true, 59, 60, 0.25, 0.067, 0.067, v); - assertEquals(v, Math.round( - BlackScholes.impliedVolNewtonP(priceCall, true, 59, 60, 0.25, 0.067, 0.067) * 10000d) - / 10000d); - assertEquals(v, Math.round( - BlackScholes.impliedVolNewtonP(priceCall, true, 59, 60, 0.25, 0.067, 0.067) * 10000d) - / 10000d); + assertEquals(v, Math.round(BlackScholes.impliedVolNewtonP(priceCall, true, 59, 60, 0.25, 0.067, 0.067) * 10000d) + / 10000d); + assertEquals(v, Math.round(BlackScholes.impliedVolNewtonP(priceCall, true, 59, 60, 0.25, 0.067, 0.067) * 10000d) + / 10000d); final double pricePut = BlackScholes.price(false, 59, 60, 0.25, 0.067, 0.067, v); - assertEquals(v, Math.round( - BlackScholes.impliedVolNewtonP(pricePut, false, 59, 60, 0.25, 0.067, 0.067) * 10000d) - / 10000d); - assertEquals(v, - Math.round( - BlackScholes.impliedVolNewtonP(pricePut, false, 59, 60, 0.25, 0.067, 0.067, 1e-4, 3) - * 10000d) + assertEquals(v, Math.round(BlackScholes.impliedVolNewtonP(pricePut, false, 59, 60, 0.25, 0.067, 0.067) * 10000d) / 10000d); + assertEquals(v, + Math.round( + BlackScholes.impliedVolNewtonP(pricePut, false, 59, 60, 0.25, 0.067, 0.067, 1e-4, 3) * 10000d) + / 10000d); assertEquals(QueryConstants.NULL_DOUBLE, - BlackScholes.impliedVolNewtonP(pricePut, null, 59, 60, 0.25, 0.067, 0.067)); - assertEquals(QueryConstants.NULL_DOUBLE, BlackScholes.impliedVolNewtonP(pricePut, true, - QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, 0.067)); + BlackScholes.impliedVolNewtonP(pricePut, null, 59, 60, 0.25, 0.067, 0.067)); + assertEquals(QueryConstants.NULL_DOUBLE, + BlackScholes.impliedVolNewtonP(pricePut, true, QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, 0.067)); assertEquals(QueryConstants.NULL_DOUBLE, BlackScholes.impliedVolNewtonP(pricePut, true, - QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, QueryConstants.NULL_DOUBLE)); - assertEquals(Double.NaN, - BlackScholes.impliedVolNewtonP(pricePut, true, 59, 60, Double.NaN, 0.067, 0.067)); + QueryConstants.NULL_DOUBLE, 60, 0.25, 0.067, QueryConstants.NULL_DOUBLE)); + assertEquals(Double.NaN, BlackScholes.impliedVolNewtonP(pricePut, true, 59, 60, Double.NaN, 0.067, 0.067)); assertEquals(0d, BlackScholes.impliedVolNewtonP(0, true, 59, 60, 0.25, 0.067, 0.067)); } } diff --git a/Numerics/src/test/java/io/deephaven/numerics/interpolation/InterpolatorTest.java b/Numerics/src/test/java/io/deephaven/numerics/interpolation/InterpolatorTest.java index b98c81a16d3..3375286fe4a 100644 --- a/Numerics/src/test/java/io/deephaven/numerics/interpolation/InterpolatorTest.java +++ b/Numerics/src/test/java/io/deephaven/numerics/interpolation/InterpolatorTest.java @@ -17,8 +17,7 @@ public void testNearest() { final double extrapolateDistance = 0.01; final int nSteps = 95; final int nStepsInterp = 100; - final Interpolator.InterpolationAlgorithm method = - Interpolator.InterpolationAlgorithm.SPLINE; + final Interpolator.InterpolationAlgorithm method = Interpolator.InterpolationAlgorithm.SPLINE; t1(tol, nSteps, nStepsInterp, extrapolateDistance, method); } @@ -28,8 +27,7 @@ public void testLinear() { final double extrapolateDistance = 0.1; final int nSteps = 40; final int nStepsInterp = 100; - final Interpolator.InterpolationAlgorithm method = - Interpolator.InterpolationAlgorithm.LINEAR; + final Interpolator.InterpolationAlgorithm method = Interpolator.InterpolationAlgorithm.LINEAR; t1(tol, nSteps, nStepsInterp, extrapolateDistance, method); } @@ -39,8 +37,7 @@ public void testPchip() { final double extrapolateDistance = 0.1; final int nSteps = 35; final int nStepsInterp = 100; - final Interpolator.InterpolationAlgorithm method = - Interpolator.InterpolationAlgorithm.PCHIP; + final Interpolator.InterpolationAlgorithm method = Interpolator.InterpolationAlgorithm.PCHIP; t1(tol, nSteps, nStepsInterp, extrapolateDistance, method); } @@ -50,14 +47,13 @@ public void testSpline() { final double extrapolateDistance = 0.1; final int nSteps = 20; final int nStepsInterp = 100; - final Interpolator.InterpolationAlgorithm method = - Interpolator.InterpolationAlgorithm.SPLINE; + final Interpolator.InterpolationAlgorithm method = Interpolator.InterpolationAlgorithm.SPLINE; t1(tol, nSteps, nStepsInterp, extrapolateDistance, method); } private void t1(double tol, int nSteps, int nStepsInterp, double extrapolateDistance, - Interpolator.InterpolationAlgorithm method) { + Interpolator.InterpolationAlgorithm method) { final boolean extrapolate = true; final double xMin = extrapolateDistance; @@ -106,8 +102,7 @@ public void testNonExtrapolation() { double[] x = new double[] {1, 2, 3, 4, 5}; double[] y = new double[] {2, 3, 2, 1, 1}; double[] xi = new double[] {0, 2, 4, 6}; - double[] yi = - Interpolator.interpolate(x, y, xi, Interpolator.InterpolationAlgorithm.PCHIP, false); + double[] yi = Interpolator.interpolate(x, y, xi, Interpolator.InterpolationAlgorithm.PCHIP, false); assertTrue(Double.isNaN(yi[0])); assertTrue(Double.isNaN(yi[3])); } @@ -116,8 +111,7 @@ public void testNulls() { double[] x = new double[] {1, 2, 3, 4, 5}; double[] y = new double[] {2, 3, 2, 1, 1}; double[] xi = new double[] {1, 2, QueryConstants.NULL_DOUBLE, 5}; - double[] yi = - Interpolator.interpolate(x, y, xi, Interpolator.InterpolationAlgorithm.PCHIP, false); + double[] yi = Interpolator.interpolate(x, y, xi, Interpolator.InterpolationAlgorithm.PCHIP, false); assertFalse(yi[0] == QueryConstants.NULL_DOUBLE); assertFalse(yi[1] == QueryConstants.NULL_DOUBLE); assertTrue(yi[2] == QueryConstants.NULL_DOUBLE); diff --git a/Numerics/src/test/java/io/deephaven/numerics/movingaverages/ByEmaTest.java b/Numerics/src/test/java/io/deephaven/numerics/movingaverages/ByEmaTest.java index 1a077bff8bc..2f67cd663a4 100644 --- a/Numerics/src/test/java/io/deephaven/numerics/movingaverages/ByEmaTest.java +++ b/Numerics/src/test/java/io/deephaven/numerics/movingaverages/ByEmaTest.java @@ -29,8 +29,8 @@ private MA(RecordingMockObject logger, ByEma.Key key) { @Override protected void processDoubleLocal(long timestamp, double data) { nSamples++; - logger.recordActivity("MA.processDoubleLocal(" + key + "," + timestamp + "," + data - + ") = " + nSamples + "\n"); + logger.recordActivity( + "MA.processDoubleLocal(" + key + "," + timestamp + "," + data + ") = " + nSamples + "\n"); } @Override @@ -74,8 +74,7 @@ public void testEverything() { BE emaActual = new BE(nullBehavior, nanBehavior); RecordingMockObject target = new RecordingMockObject(); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts0 = new DBDateTime(DAY * 1000000); ByEma.Key k0 = new ByEma.Key("A", "B"); @@ -83,15 +82,13 @@ public void testEverything() { ma0.processDoubleLocal(ts0.getNanos(), 1); ma0.getCurrent(); emaActual.update(ts0, 1, "A", "B"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts1 = new DBDateTime(2 * DAY * 1000000); ma0.processDoubleLocal(ts1.getNanos(), 2); ma0.getCurrent(); emaActual.update(ts1, 2, "A", "B"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts2 = new DBDateTime(3 * DAY * 1000000); ByEma.Key k1 = new ByEma.Key("A", "C"); @@ -99,62 +96,53 @@ public void testEverything() { ma1.processDoubleLocal(ts2.getNanos(), 3); ma1.getCurrent(); emaActual.update(ts2, 3, "A", "C"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts3 = new DBDateTime(4 * DAY * 1000000); ma0.processDoubleLocal(ts3.getNanos(), 4); ma0.getCurrent(); emaActual.update(ts3, 4, "A", "B"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts4 = new DBDateTime(5 * DAY * 1000000); MA ma2 = new MA(target, k0); emaActual.update(ts4, NULL, "A", "B"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts5 = new DBDateTime(6 * DAY * 1000000); ma1.processDoubleLocal(ts5.getNanos(), 6); ma1.getCurrent(); emaActual.update(ts5, 6, "A", "C"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts6 = new DBDateTime(5 * DAY * 1000000); ma2.processDoubleLocal(ts6.getNanos(), 7); ma2.getCurrent(); emaActual.update(ts6, 7, "A", "B"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts7 = new DBDateTime(6 * DAY * 1000000); MA ma3 = new MA(target, k0); emaActual.update(ts7, NAN, "A", "B"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts8 = new DBDateTime(7 * DAY * 1000000); ma3.processDoubleLocal(ts8.getNanos(), 8); ma3.getCurrent(); emaActual.update(ts8, 8, "A", "B"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); DBDateTime ts9 = new DBDateTime(8 * DAY * 1000000); ma1.processDoubleLocal(ts9.getNanos(), 8); ma1.getCurrent(); emaActual.update(ts9, 8, "A", "C"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); // test no time version DBDateTime ts10 = new DBDateTime(9 * DAY * 1000000); ma1.processDoubleLocal(Long.MIN_VALUE, 9); ma1.getCurrent(); emaActual.update(9, "A", "C"); - assertEquals(target.getActivityRecordAndReset(), - emaActual.logger.getActivityRecordAndReset()); + assertEquals(target.getActivityRecordAndReset(), emaActual.logger.getActivityRecordAndReset()); } } diff --git a/Numerics/src/test/java/io/deephaven/numerics/movingaverages/EmaArrayTest.java b/Numerics/src/test/java/io/deephaven/numerics/movingaverages/EmaArrayTest.java index 3065f7d2afa..fb1a4895443 100644 --- a/Numerics/src/test/java/io/deephaven/numerics/movingaverages/EmaArrayTest.java +++ b/Numerics/src/test/java/io/deephaven/numerics/movingaverages/EmaArrayTest.java @@ -75,15 +75,13 @@ public void testDifferenceTick() { emas.processDouble(2, x2); assertEquals(2, emas.getLastTimestamp()); assertEquals(2, emas.getCurrent().length); - assertEquals(x1 * alphas[0] * (1 - alphas[0]) + x2 * (1 - alphas[0]), emas.getCurrent()[0], - tol); - assertEquals(x1 * alphas[1] * (1 - alphas[1]) + x2 * (1 - alphas[1]), emas.getCurrent()[1], - tol); + assertEquals(x1 * alphas[0] * (1 - alphas[0]) + x2 * (1 - alphas[0]), emas.getCurrent()[0], tol); + assertEquals(x1 * alphas[1] * (1 - alphas[1]) + x2 * (1 - alphas[1]), emas.getCurrent()[1], tol); } /** - * Make sure that smaller timescales correspond to faster moving averages. The smallest double - * should correspond to no averaging. + * Make sure that smaller timescales correspond to faster moving averages. The smallest double should correspond to + * no averaging. */ public void testTimescales() { double[] timeScales = {Double.MIN_VALUE}; diff --git a/Numerics/src/test/java/io/deephaven/numerics/movingaverages/EmaTest.java b/Numerics/src/test/java/io/deephaven/numerics/movingaverages/EmaTest.java index 945e23da1ac..3b323887029 100644 --- a/Numerics/src/test/java/io/deephaven/numerics/movingaverages/EmaTest.java +++ b/Numerics/src/test/java/io/deephaven/numerics/movingaverages/EmaTest.java @@ -72,8 +72,8 @@ public void testDifferenceTick() { } /** - * Make sure that smaller timescales correspond to faster moving averages. The smallest double - * should correspond to no averaging. + * Make sure that smaller timescales correspond to faster moving averages. The smallest double should correspond to + * no averaging. */ public void testTimescales() { double timeScale = Double.MIN_VALUE; diff --git a/Numerics/src/test/java/io/deephaven/numerics/movingaverages/ExponentiallyDecayedSumTest.java b/Numerics/src/test/java/io/deephaven/numerics/movingaverages/ExponentiallyDecayedSumTest.java index e811ff25b38..2c2abc8ee98 100644 --- a/Numerics/src/test/java/io/deephaven/numerics/movingaverages/ExponentiallyDecayedSumTest.java +++ b/Numerics/src/test/java/io/deephaven/numerics/movingaverages/ExponentiallyDecayedSumTest.java @@ -31,8 +31,8 @@ public void testEverything() { double value3 = 8; eds.processDouble(time3, value3); assertEquals(time3, eds.getLastTimestamp()); - assertEquals(value3 + Math.exp(-(time3 - time2) / rate) * value2 - + Math.exp(-(time3 - time1) / rate) * value1, eds.getValue(), tol); + assertEquals(value3 + Math.exp(-(time3 - time2) / rate) * value2 + Math.exp(-(time3 - time1) / rate) * value1, + eds.getValue(), tol); eds.reset(); diff --git a/Parquet/src/main/java/io/deephaven/parquet/AbstractBulkValuesWriter.java b/Parquet/src/main/java/io/deephaven/parquet/AbstractBulkValuesWriter.java index 8c1ff705025..30a9c00c859 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/AbstractBulkValuesWriter.java +++ b/Parquet/src/main/java/io/deephaven/parquet/AbstractBulkValuesWriter.java @@ -7,23 +7,19 @@ import java.io.IOException; import java.nio.IntBuffer; -public abstract class AbstractBulkValuesWriter extends ValuesWriter - implements BulkWriter { +public abstract class AbstractBulkValuesWriter extends ValuesWriter implements BulkWriter { @Override - public int writeBulkVector(T bulkValues, IntBuffer repeatCount, - RunLengthBitPackingHybridEncoder rlEncoder, - RunLengthBitPackingHybridEncoder dlEncoder, int nonNullValueCount, L nullValue) - throws IOException { - IntBuffer nullsOffsets = - writeBulkFilterNulls(bulkValues, nullValue, nonNullValueCount).nullOffsets; + public int writeBulkVector(T bulkValues, IntBuffer repeatCount, RunLengthBitPackingHybridEncoder rlEncoder, + RunLengthBitPackingHybridEncoder dlEncoder, int nonNullValueCount, L nullValue) throws IOException { + IntBuffer nullsOffsets = writeBulkFilterNulls(bulkValues, nullValue, nonNullValueCount).nullOffsets; return applyDlAndRl(repeatCount, rlEncoder, dlEncoder, nullsOffsets); } @NotNull int applyDlAndRl(IntBuffer repeatCount, RunLengthBitPackingHybridEncoder rlEncoder, - RunLengthBitPackingHybridEncoder dlEncoder, - IntBuffer nullsOffsets) throws IOException { + RunLengthBitPackingHybridEncoder dlEncoder, + IntBuffer nullsOffsets) throws IOException { int valueCount = 0; int leafCount = 0; @@ -40,8 +36,7 @@ int applyDlAndRl(IntBuffer repeatCount, RunLengthBitPackingHybridEncoder rlEncod dlEncoder.writeInt(1); } else { if (leafCount == nextNullOffset) { - nextNullOffset = - nullsOffsets.hasRemaining() ? nullsOffsets.get() : Integer.MAX_VALUE; + nextNullOffset = nullsOffsets.hasRemaining() ? nullsOffsets.get() : Integer.MAX_VALUE; dlEncoder.writeInt(2); } else { dlEncoder.writeInt(3); @@ -52,8 +47,7 @@ int applyDlAndRl(IntBuffer repeatCount, RunLengthBitPackingHybridEncoder rlEncod rlEncoder.writeInt(0); for (short i = 1; i < length; i++) { if (leafCount++ == nextNullOffset) { - nextNullOffset = - nullsOffsets.hasRemaining() ? nullsOffsets.get() : Integer.MAX_VALUE; + nextNullOffset = nullsOffsets.hasRemaining() ? nullsOffsets.get() : Integer.MAX_VALUE; dlEncoder.writeInt(2); } else { dlEncoder.writeInt(3); diff --git a/Parquet/src/main/java/io/deephaven/parquet/BulkWriter.java b/Parquet/src/main/java/io/deephaven/parquet/BulkWriter.java index 762770f636f..c4cd588bf05 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/BulkWriter.java +++ b/Parquet/src/main/java/io/deephaven/parquet/BulkWriter.java @@ -7,8 +7,8 @@ import java.nio.IntBuffer; /** - * Provides the option to write values of specific type in bulk The concrete type of the bulkValue - * object depends in the specific implementation + * Provides the option to write values of specific type in bulk The concrete type of the bulkValue object depends in the + * specific implementation */ public interface BulkWriter { void writeBulk(T bulkValues, int rowCount); @@ -28,12 +28,11 @@ public WriteResult(int valueCount, IntBuffer nullOffsets) { } } - WriteResult writeBulkFilterNulls(T bulkValues, L nullValue, - RunLengthBitPackingHybridEncoder dlEncoder, int rowCount) throws IOException; + WriteResult writeBulkFilterNulls(T bulkValues, L nullValue, RunLengthBitPackingHybridEncoder dlEncoder, + int rowCount) throws IOException; - int writeBulkVector(T bulkValues, IntBuffer repeatCount, - RunLengthBitPackingHybridEncoder rlEncoder, RunLengthBitPackingHybridEncoder dlEncoder, - int nonNullValueCount, L nullValue) throws IOException; + int writeBulkVector(T bulkValues, IntBuffer repeatCount, RunLengthBitPackingHybridEncoder rlEncoder, + RunLengthBitPackingHybridEncoder dlEncoder, int nonNullValueCount, L nullValue) throws IOException; WriteResult writeBulkFilterNulls(T bulkValues, L nullValue, int rowCount); diff --git a/Parquet/src/main/java/io/deephaven/parquet/ColumnChunkReader.java b/Parquet/src/main/java/io/deephaven/parquet/ColumnChunkReader.java index fc201cdb8b0..3d0665e8353 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/ColumnChunkReader.java +++ b/Parquet/src/main/java/io/deephaven/parquet/ColumnChunkReader.java @@ -8,8 +8,7 @@ public interface ColumnChunkReader { /** - * @return -1 if the current column doesn't guarantee fixed page size, otherwise the fixed page - * size + * @return -1 if the current column doesn't guarantee fixed page size, otherwise the fixed page size */ int getPageFixedSize(); @@ -24,8 +23,8 @@ public interface ColumnChunkReader { long numValues(); /** - * @return The depth of the number of nested repeated fields this column is a part of. 0 means - * this is a simple (non-repeating) field, 1 means this is a flat array. + * @return The depth of the number of nested repeated fields this column is a part of. 0 means this is a simple + * (non-repeating) field, 1 means this is a flat array. */ int getMaxRl(); diff --git a/Parquet/src/main/java/io/deephaven/parquet/ColumnChunkReaderImpl.java b/Parquet/src/main/java/io/deephaven/parquet/ColumnChunkReaderImpl.java index c1d7fd71971..7b117bf6204 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/ColumnChunkReaderImpl.java +++ b/Parquet/src/main/java/io/deephaven/parquet/ColumnChunkReaderImpl.java @@ -40,20 +40,18 @@ public class ColumnChunkReaderImpl implements ColumnChunkReader { private Dictionary dictionary; - ColumnChunkReaderImpl(ColumnChunk columnChunk, SeekableChannelsProvider channelsProvider, - Path rootPath, ThreadLocal codecFactory, MessageType type, - OffsetIndex offsetIndex, List fieldTypes) { + ColumnChunkReaderImpl(ColumnChunk columnChunk, SeekableChannelsProvider channelsProvider, Path rootPath, + ThreadLocal codecFactory, MessageType type, OffsetIndex offsetIndex, List fieldTypes) { this.channelsProvider = channelsProvider; this.columnChunk = columnChunk; this.rootPath = rootPath; - this.path = type - .getColumnDescription(columnChunk.meta_data.getPath_in_schema().toArray(new String[0])); + this.path = type.getColumnDescription(columnChunk.meta_data.getPath_in_schema().toArray(new String[0])); if (columnChunk.getMeta_data().isSetCodec()) { - decompressor = ThreadLocal.withInitial(() -> codecFactory.get().getDecompressor( - CompressionCodecName.valueOf(columnChunk.getMeta_data().getCodec().name()))); + decompressor = ThreadLocal.withInitial(() -> codecFactory.get() + .getDecompressor(CompressionCodecName.valueOf(columnChunk.getMeta_data().getCodec().name()))); } else { - decompressor = ThreadLocal.withInitial( - () -> codecFactory.get().getDecompressor(CompressionCodecName.UNCOMPRESSED)); + decompressor = ThreadLocal + .withInitial(() -> codecFactory.get().getDecompressor(CompressionCodecName.UNCOMPRESSED)); } this.offsetIndex = offsetIndex; this.fieldTypes = fieldTypes; @@ -86,8 +84,8 @@ public ColumnPageReaderIterator getPageIterator() throws IOException { final Dictionary dictionary = getDictionary(readChannel); if (offsetIndex == null) { return new ColumnPageReaderIteratorImpl(readChannel, - dataPageOffset, columnChunk.getMeta_data().getNum_values(), path, - dictionary, channelsProvider); + dataPageOffset, columnChunk.getMeta_data().getNum_values(), path, + dictionary, channelsProvider); } else { readChannel.close(); return new ColumnPageReaderIteratorIndexImpl(path, dictionary, channelsProvider); @@ -111,13 +109,13 @@ public boolean usesDictionaryOnEveryPage() { } for (final PageEncodingStats encodingStat : columnMeta.encoding_stats) { if (encodingStat.page_type != PageType.DATA_PAGE - && encodingStat.page_type != PageType.DATA_PAGE_V2) { + && encodingStat.page_type != PageType.DATA_PAGE_V2) { // Not a data page, skip continue; } // This is a data page if (encodingStat.encoding != PLAIN_DICTIONARY - && encodingStat.encoding != RLE_DICTIONARY) { + && encodingStat.encoding != RLE_DICTIONARY) { return false; } } @@ -144,10 +142,9 @@ private Dictionary getDictionary(SeekableByteChannel readChannel) throws IOExcep if (chunkMeta.isSetDictionary_page_offset()) { dictionaryPageOffset = chunkMeta.getDictionary_page_offset(); } else if ((chunkMeta.isSetEncoding_stats() && (chunkMeta.getEncoding_stats().stream() - .anyMatch(pes -> pes.getEncoding() == PLAIN_DICTIONARY - || pes.getEncoding() == RLE_DICTIONARY))) - || (chunkMeta.isSetEncodings() && (chunkMeta.getEncodings().stream() - .anyMatch(en -> en == PLAIN_DICTIONARY || en == RLE_DICTIONARY)))) { + .anyMatch(pes -> pes.getEncoding() == PLAIN_DICTIONARY || pes.getEncoding() == RLE_DICTIONARY))) + || (chunkMeta.isSetEncodings() && (chunkMeta.getEncodings().stream() + .anyMatch(en -> en == PLAIN_DICTIONARY || en == RLE_DICTIONARY)))) { // Fallback, inspired by // https://stackoverflow.com/questions/55225108/why-is-dictionary-page-offset-0-for-plain-dictionary-encoding dictionaryPageOffset = chunkMeta.getData_page_offset(); @@ -162,8 +159,7 @@ public PrimitiveType getType() { return path.getPrimitiveType(); } - private Dictionary readDictionary(SeekableByteChannel file, long dictionaryPageOffset) - throws IOException { + private Dictionary readDictionary(SeekableByteChannel file, long dictionaryPageOffset) throws IOException { file.position(dictionaryPageOffset); final InputStream inputStream = Channels.newInputStream(file); final PageHeader pageHeader = Util.readPageHeader(inputStream); @@ -178,8 +174,7 @@ private Dictionary readDictionary(SeekableByteChannel file, long dictionaryPageO payload = decompressor.get().decompress(payload, pageHeader.uncompressed_page_size); } - final DictionaryPage dictionaryPage = - new DictionaryPage(payload, dictHeader.getNum_values(), + final DictionaryPage dictionaryPage = new DictionaryPage(payload, dictHeader.getNum_values(), Encoding.valueOf(dictHeader.getEncoding().name())); return dictionaryPage.getEncoding().initDictionary(path, dictionaryPage); @@ -194,9 +189,8 @@ class ColumnPageReaderIteratorImpl implements ColumnPageReaderIterator { long remainingValues; private final Dictionary dictionary; - ColumnPageReaderIteratorImpl(SeekableByteChannel file, long startOffset, long numValues, - ColumnDescriptor path, Dictionary dictionary, - SeekableChannelsProvider channelsProvider) { + ColumnPageReaderIteratorImpl(SeekableByteChannel file, long startOffset, long numValues, ColumnDescriptor path, + Dictionary dictionary, SeekableChannelsProvider channelsProvider) { this.remainingValues = numValues; this.currentOffset = startOffset; this.file = file; @@ -226,12 +220,12 @@ public ColumnPageReader next() { } if (!pageHeader.isSetData_page_header() && !pageHeader.isSetData_page_header_v2()) { throw new IllegalStateException( - "Expected data page, but neither v1 nor v2 data page header is set in file " - + file + " at offset " + headerOffset); + "Expected data page, but neither v1 nor v2 data page header is set in file " + + file + " at offset " + headerOffset); } remainingValues -= pageHeader.isSetData_page_header() - ? pageHeader.getData_page_header().getNum_values() - : pageHeader.getData_page_header_v2().getNum_values(); + ? pageHeader.getData_page_header().getNum_values() + : pageHeader.getData_page_header_v2().getNum_values(); final org.apache.parquet.format.Encoding encoding; switch (pageHeader.type) { case DATA_PAGE: @@ -242,16 +236,16 @@ public ColumnPageReader next() { break; default: throw new UncheckedDeephavenException( - "Unknown parquet data page header type " + pageHeader.type); + "Unknown parquet data page header type " + pageHeader.type); } final Dictionary pageDictionary = - (encoding == PLAIN_DICTIONARY || encoding == RLE_DICTIONARY) - ? dictionary - : null; + (encoding == PLAIN_DICTIONARY || encoding == RLE_DICTIONARY) + ? dictionary + : null; return new ColumnPageReaderImpl( - channelsProvider, file.position(), pageHeader, - decompressor, path, pageDictionary, getPath(), - -1, fieldTypes); + channelsProvider, file.position(), pageHeader, + decompressor, path, pageDictionary, getPath(), + -1, fieldTypes); } catch (IOException e) { throw new RuntimeException("Error reading page header", e); } @@ -271,7 +265,7 @@ class ColumnPageReaderIteratorIndexImpl implements ColumnPageReaderIterator { private final Dictionary dictionary; ColumnPageReaderIteratorIndexImpl(ColumnDescriptor path, Dictionary dictionary, - SeekableChannelsProvider channelsProvider) { + SeekableChannelsProvider channelsProvider) { this.path = path; this.dictionary = dictionary; this.channelsProvider = channelsProvider; @@ -288,12 +282,11 @@ public ColumnPageReader next() { if (!hasNext()) { throw new RuntimeException("No next element"); } - int rowCount = - (int) (offsetIndex.getLastRowIndex(pos, columnChunk.getMeta_data().getNum_values()) + int rowCount = (int) (offsetIndex.getLastRowIndex(pos, columnChunk.getMeta_data().getNum_values()) - offsetIndex.getFirstRowIndex(pos) + 1); ColumnPageReaderImpl columnPageReader = - new ColumnPageReaderImpl(channelsProvider, offsetIndex.getOffset(pos), - null, decompressor, path, dictionary, getPath(), rowCount, fieldTypes); + new ColumnPageReaderImpl(channelsProvider, offsetIndex.getOffset(pos), + null, decompressor, path, dictionary, getPath(), rowCount, fieldTypes); pos++; return columnPageReader; } diff --git a/Parquet/src/main/java/io/deephaven/parquet/ColumnPageReader.java b/Parquet/src/main/java/io/deephaven/parquet/ColumnPageReader.java index 06ed86c7e75..524e3d5b29e 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/ColumnPageReader.java +++ b/Parquet/src/main/java/io/deephaven/parquet/ColumnPageReader.java @@ -22,8 +22,8 @@ default long numRows() throws IOException { * Triggers the value decompression and decoding * * @param nullValue The value to be stored under the null entries - * @return the data for that page in a format that makes sense for the given type - typically - * array of something that makes sense + * @return the data for that page in a format that makes sense for the given type - typically array of something + * that makes sense */ Object materialize(Object nullValue) throws IOException; diff --git a/Parquet/src/main/java/io/deephaven/parquet/ColumnPageReaderImpl.java b/Parquet/src/main/java/io/deephaven/parquet/ColumnPageReaderImpl.java index c5d9d015b75..1f3e594f488 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/ColumnPageReaderImpl.java +++ b/Parquet/src/main/java/io/deephaven/parquet/ColumnPageReaderImpl.java @@ -53,11 +53,9 @@ public class ColumnPageReaderImpl implements ColumnPageReader { private int numValues; private int rowCount = -1; - ColumnPageReaderImpl(SeekableChannelsProvider channelsProvider, long offset, - PageHeader pageHeader, - ThreadLocal decompressor, - ColumnDescriptor path, - Dictionary dictionary, Path filePath, int numValues, List fieldTypes) { + ColumnPageReaderImpl(SeekableChannelsProvider channelsProvider, long offset, PageHeader pageHeader, + ThreadLocal decompressor, ColumnDescriptor path, + Dictionary dictionary, Path filePath, int numValues, List fieldTypes) { this.channelsProvider = channelsProvider; this.offset = offset; this.pageHeader = pageHeader; @@ -131,17 +129,17 @@ private int readRowCountFromDataPage(SeekableByteChannel file) throws IOExceptio case DATA_PAGE: ByteBuffer payload = readFully(file, compressedPageSize); DataPageHeader dataHeaderV1 = pageHeader.getData_page_header(); - BytesInput decompressedInput = decompressor.get() - .decompress(BytesInput.from(payload), pageHeader.getUncompressed_page_size()); + BytesInput decompressedInput = + decompressor.get().decompress(BytesInput.from(payload), pageHeader.getUncompressed_page_size()); return readRowCountFromPageV1(new DataPageV1( - decompressedInput, - dataHeaderV1.getNum_values(), - uncompressedPageSize, - null, // TODO in the future might want to pull in statistics - getEncoding(dataHeaderV1.getRepetition_level_encoding()), - getEncoding(dataHeaderV1.getDefinition_level_encoding()), - getEncoding(dataHeaderV1.getEncoding()))); + decompressedInput, + dataHeaderV1.getNum_values(), + uncompressedPageSize, + null, // TODO in the future might want to pull in statistics + getEncoding(dataHeaderV1.getRepetition_level_encoding()), + getEncoding(dataHeaderV1.getDefinition_level_encoding()), + getEncoding(dataHeaderV1.getEncoding()))); case DATA_PAGE_V2: DataPageHeaderV2 dataHeaderV2 = pageHeader.getData_page_header_v2(); return dataHeaderV2.getNum_rows(); @@ -150,8 +148,8 @@ private int readRowCountFromDataPage(SeekableByteChannel file) throws IOExceptio } } - private IntBuffer readKeyFromDataPage(IntBuffer keyDest, int nullPlaceholder, - SeekableByteChannel file) throws IOException { + private IntBuffer readKeyFromDataPage(IntBuffer keyDest, int nullPlaceholder, SeekableByteChannel file) + throws IOException { int uncompressedPageSize = pageHeader.getUncompressed_page_size(); int compressedPageSize = pageHeader.getCompressed_page_size(); @@ -159,46 +157,42 @@ private IntBuffer readKeyFromDataPage(IntBuffer keyDest, int nullPlaceholder, case DATA_PAGE: ByteBuffer payload = readFully(file, compressedPageSize); DataPageHeader dataHeaderV1 = pageHeader.getData_page_header(); - BytesInput decompressedInput = decompressor.get() - .decompress(BytesInput.from(payload), pageHeader.getUncompressed_page_size()); + BytesInput decompressedInput = + decompressor.get().decompress(BytesInput.from(payload), pageHeader.getUncompressed_page_size()); return readKeysFromPageV1(new DataPageV1( - decompressedInput, - dataHeaderV1.getNum_values(), - uncompressedPageSize, - null, // TODO in the future might want to pull in statistics - getEncoding(dataHeaderV1.getRepetition_level_encoding()), - getEncoding(dataHeaderV1.getDefinition_level_encoding()), - getEncoding(dataHeaderV1.getEncoding())), keyDest, nullPlaceholder); + decompressedInput, + dataHeaderV1.getNum_values(), + uncompressedPageSize, + null, // TODO in the future might want to pull in statistics + getEncoding(dataHeaderV1.getRepetition_level_encoding()), + getEncoding(dataHeaderV1.getDefinition_level_encoding()), + getEncoding(dataHeaderV1.getEncoding())), keyDest, nullPlaceholder); case DATA_PAGE_V2: DataPageHeaderV2 dataHeaderV2 = pageHeader.getData_page_header_v2(); int dataSize = compressedPageSize - dataHeaderV2.getRepetition_levels_byte_length() - - dataHeaderV2.getDefinition_levels_byte_length(); - ByteBuffer repetitionLevels = - readFully(file, dataHeaderV2.getRepetition_levels_byte_length()); - ByteBuffer definitionLevels = - readFully(file, dataHeaderV2.getDefinition_levels_byte_length()); - BytesInput data = - decompressor.get().decompress(BytesInput.from(readFully(file, dataSize)), - pageHeader.getUncompressed_page_size() - - dataHeaderV2.getRepetition_levels_byte_length() - - dataHeaderV2.getDefinition_levels_byte_length()); + - dataHeaderV2.getDefinition_levels_byte_length(); + ByteBuffer repetitionLevels = readFully(file, dataHeaderV2.getRepetition_levels_byte_length()); + ByteBuffer definitionLevels = readFully(file, dataHeaderV2.getDefinition_levels_byte_length()); + BytesInput data = decompressor.get().decompress(BytesInput.from(readFully(file, dataSize)), + pageHeader.getUncompressed_page_size() - dataHeaderV2.getRepetition_levels_byte_length() + - dataHeaderV2.getDefinition_levels_byte_length()); readKeysFromPageV2(new DataPageV2( - dataHeaderV2.getNum_rows(), - dataHeaderV2.getNum_nulls(), - dataHeaderV2.getNum_values(), - BytesInput.from(repetitionLevels), - BytesInput.from(definitionLevels), - getEncoding(dataHeaderV2.getEncoding()), - data, - uncompressedPageSize, - null, // TODO in the future might want to pull in statistics, - false), keyDest, nullPlaceholder); + dataHeaderV2.getNum_rows(), + dataHeaderV2.getNum_nulls(), + dataHeaderV2.getNum_values(), + BytesInput.from(repetitionLevels), + BytesInput.from(definitionLevels), + getEncoding(dataHeaderV2.getEncoding()), + data, + uncompressedPageSize, + null, // TODO in the future might want to pull in statistics, + false), keyDest, nullPlaceholder); return null; default: - throw new IOException(String.format("Unexpecte page of type {} of size {}", - pageHeader.getType(), compressedPageSize)); + throw new IOException(String.format("Unexpecte page of type {} of size {}", pageHeader.getType(), + compressedPageSize)); } } @@ -209,44 +203,40 @@ private Object readDataPage(Object nullValue, SeekableByteChannel file) throws I case DATA_PAGE: ByteBuffer payload = readFully(file, compressedPageSize); DataPageHeader dataHeaderV1 = pageHeader.getData_page_header(); - BytesInput decompressedInput = decompressor.get() - .decompress(BytesInput.from(payload), pageHeader.getUncompressed_page_size()); + BytesInput decompressedInput = + decompressor.get().decompress(BytesInput.from(payload), pageHeader.getUncompressed_page_size()); return readPageV1(new DataPageV1( - decompressedInput, - dataHeaderV1.getNum_values(), - uncompressedPageSize, - null, // TODO in the future might want to pull in statistics - getEncoding(dataHeaderV1.getRepetition_level_encoding()), - getEncoding(dataHeaderV1.getDefinition_level_encoding()), - getEncoding(dataHeaderV1.getEncoding())), nullValue); + decompressedInput, + dataHeaderV1.getNum_values(), + uncompressedPageSize, + null, // TODO in the future might want to pull in statistics + getEncoding(dataHeaderV1.getRepetition_level_encoding()), + getEncoding(dataHeaderV1.getDefinition_level_encoding()), + getEncoding(dataHeaderV1.getEncoding())), nullValue); case DATA_PAGE_V2: DataPageHeaderV2 dataHeaderV2 = pageHeader.getData_page_header_v2(); int dataSize = compressedPageSize - dataHeaderV2.getRepetition_levels_byte_length() - - dataHeaderV2.getDefinition_levels_byte_length(); - ByteBuffer repetitionLevels = - readFully(file, dataHeaderV2.getRepetition_levels_byte_length()); - ByteBuffer definitionLevels = - readFully(file, dataHeaderV2.getDefinition_levels_byte_length()); - BytesInput data = - decompressor.get().decompress(BytesInput.from(readFully(file, dataSize)), - pageHeader.getUncompressed_page_size() - - dataHeaderV2.getRepetition_levels_byte_length() - - dataHeaderV2.getDefinition_levels_byte_length()); + - dataHeaderV2.getDefinition_levels_byte_length(); + ByteBuffer repetitionLevels = readFully(file, dataHeaderV2.getRepetition_levels_byte_length()); + ByteBuffer definitionLevels = readFully(file, dataHeaderV2.getDefinition_levels_byte_length()); + BytesInput data = decompressor.get().decompress(BytesInput.from(readFully(file, dataSize)), + pageHeader.getUncompressed_page_size() - dataHeaderV2.getRepetition_levels_byte_length() + - dataHeaderV2.getDefinition_levels_byte_length()); return readPageV2(new DataPageV2( - dataHeaderV2.getNum_rows(), - dataHeaderV2.getNum_nulls(), - dataHeaderV2.getNum_values(), - BytesInput.from(repetitionLevels), - BytesInput.from(definitionLevels), - getEncoding(dataHeaderV2.getEncoding()), - data, - uncompressedPageSize, - null, // TODO in the future might want to pull in statistics, - false), nullValue); + dataHeaderV2.getNum_rows(), + dataHeaderV2.getNum_nulls(), + dataHeaderV2.getNum_values(), + BytesInput.from(repetitionLevels), + BytesInput.from(definitionLevels), + getEncoding(dataHeaderV2.getEncoding()), + data, + uncompressedPageSize, + null, // TODO in the future might want to pull in statistics, + false), nullValue); default: - throw new IOException(String.format("Unexpecte page of type {} of size {}", - pageHeader.getType(), compressedPageSize)); + throw new IOException(String.format("Unexpecte page of type {} of size {}", pageHeader.getType(), + compressedPageSize)); } } @@ -257,8 +247,8 @@ private Encoding getEncoding(org.apache.parquet.format.Encoding encoding) { private int readRowCountFromPageV1(DataPageV1 page) { try { - ByteBuffer bytes = page.getBytes().toByteBuffer(); // TODO - move away from page and use - // ByteBuffers directly + ByteBuffer bytes = page.getBytes().toByteBuffer(); // TODO - move away from page and use ByteBuffers + // directly bytes.order(ByteOrder.LITTLE_ENDIAN); if (path.getMaxRepetitionLevel() != 0) { int length = bytes.getInt(); @@ -268,8 +258,7 @@ private int readRowCountFromPageV1(DataPageV1 page) { return page.getValueCount(); } } catch (IOException e) { - throw new ParquetDecodingException("could not read page " + page + " in col " + path, - e); + throw new ParquetDecodingException("could not read page " + page + " in col " + path, e); } } @@ -277,39 +266,35 @@ private IntBuffer readKeysFromPageV1(DataPageV1 page, IntBuffer keyDest, int nul RunLenghBitPackingHybridBufferDecoder rlDecoder = null; RunLenghBitPackingHybridBufferDecoder dlDecoder = null; try { - ByteBuffer bytes = page.getBytes().toByteBuffer(); // TODO - move away from page and use - // ByteBuffers directly + ByteBuffer bytes = page.getBytes().toByteBuffer(); // TODO - move away from page and use ByteBuffers + // directly bytes.order(ByteOrder.LITTLE_ENDIAN); /* - * IntBuffer offsets = null; if (path.getMaxRepetitionLevel() != 0) { int length = - * bytes.getInt(); offsets = readRepetitionLevels((ByteBuffer) - * bytes.slice().limit(length), IntBuffer.allocate(INITIAL_BUFFER_SIZE)); - * bytes.position(bytes.position() + length); } if (path.getMaxDefinitionLevel() > 0) { - * int length = bytes.getInt(); dlDecoder = new - * RunLenghBitPackingHybridBufferDecoder(path.getMaxDefinitionLevel(), (ByteBuffer) - * bytes.slice().limit(length)); bytes.position(bytes.position() + length); } - * ValuesReader dataReader = getDataReader(page.getValueEncoding(), bytes, - * page.getValueCount()); if (dlDecoder != null) { readKeysWithNulls(keyDest, - * nullPlaceholder, numValues(), dlDecoder, dataReader); } else { + * IntBuffer offsets = null; if (path.getMaxRepetitionLevel() != 0) { int length = bytes.getInt(); offsets = + * readRepetitionLevels((ByteBuffer) bytes.slice().limit(length), IntBuffer.allocate(INITIAL_BUFFER_SIZE)); + * bytes.position(bytes.position() + length); } if (path.getMaxDefinitionLevel() > 0) { int length = + * bytes.getInt(); dlDecoder = new RunLenghBitPackingHybridBufferDecoder(path.getMaxDefinitionLevel(), + * (ByteBuffer) bytes.slice().limit(length)); bytes.position(bytes.position() + length); } ValuesReader + * dataReader = getDataReader(page.getValueEncoding(), bytes, page.getValueCount()); if (dlDecoder != null) + * { readKeysWithNulls(keyDest, nullPlaceholder, numValues(), dlDecoder, dataReader); } else { * readKeysNonNulls(keyDest, numValues, dataReader); } */ if (path.getMaxRepetitionLevel() != 0) { int length = bytes.getInt(); rlDecoder = new RunLenghBitPackingHybridBufferDecoder(path.getMaxRepetitionLevel(), - (ByteBuffer) bytes.slice().limit(length)); + (ByteBuffer) bytes.slice().limit(length)); bytes.position(bytes.position() + length); } if (path.getMaxDefinitionLevel() > 0) { int length = bytes.getInt(); dlDecoder = new RunLenghBitPackingHybridBufferDecoder(path.getMaxDefinitionLevel(), - (ByteBuffer) bytes.slice().limit(length)); + (ByteBuffer) bytes.slice().limit(length)); bytes.position(bytes.position() + length); } - ValuesReader dataReader = - new KeyIndexReader((DictionaryValuesReader) getDataReader(page.getValueEncoding(), - bytes, page.getValueCount())); - Object result = materialize(PrimitiveType.PrimitiveTypeName.INT32, dlDecoder, rlDecoder, - dataReader, nullPlaceholder, numValues); + ValuesReader dataReader = new KeyIndexReader( + (DictionaryValuesReader) getDataReader(page.getValueEncoding(), bytes, page.getValueCount())); + Object result = materialize(PrimitiveType.PrimitiveTypeName.INT32, dlDecoder, rlDecoder, dataReader, + nullPlaceholder, numValues); if (result instanceof DataWithOffsets) { keyDest.put((int[]) ((DataWithOffsets) result).materializeResult); return ((DataWithOffsets) result).offsets; @@ -317,15 +302,13 @@ private IntBuffer readKeysFromPageV1(DataPageV1 page, IntBuffer keyDest, int nul keyDest.put((int[]) result); return null; } catch (IOException e) { - throw new ParquetDecodingException("could not read page " + page + " in col " + path, - e); + throw new ParquetDecodingException("could not read page " + page + " in col " + path, e); } } private void readRepetitionLevels(ByteBuffer byteBuffer) throws IOException { RunLenghBitPackingHybridBufferDecoder rlDecoder; - rlDecoder = - new RunLenghBitPackingHybridBufferDecoder(path.getMaxRepetitionLevel(), byteBuffer); + rlDecoder = new RunLenghBitPackingHybridBufferDecoder(path.getMaxRepetitionLevel(), byteBuffer); rowCount = 0; int totalCount = 0; while (rlDecoder.hasNext() && totalCount < numValues) { @@ -345,46 +328,41 @@ private void readRepetitionLevels(ByteBuffer byteBuffer) throws IOException { private Object readPageV1(DataPageV1 page, Object nullValue) { RunLenghBitPackingHybridBufferDecoder dlDecoder = null; try { - ByteBuffer bytes = page.getBytes().toByteBuffer(); // TODO - move away from page and use - // ByteBuffers directly + ByteBuffer bytes = page.getBytes().toByteBuffer(); // TODO - move away from page and use ByteBuffers + // directly bytes.order(ByteOrder.LITTLE_ENDIAN); RunLenghBitPackingHybridBufferDecoder rlDecoder = null; if (path.getMaxRepetitionLevel() != 0) { int length = bytes.getInt(); rlDecoder = new RunLenghBitPackingHybridBufferDecoder(path.getMaxRepetitionLevel(), - (ByteBuffer) bytes.slice().limit(length)); + (ByteBuffer) bytes.slice().limit(length)); bytes.position(bytes.position() + length); } if (path.getMaxDefinitionLevel() > 0) { int length = bytes.getInt(); dlDecoder = new RunLenghBitPackingHybridBufferDecoder(path.getMaxDefinitionLevel(), - (ByteBuffer) bytes.slice().limit(length)); + (ByteBuffer) bytes.slice().limit(length)); bytes.position(bytes.position() + length); } - ValuesReader dataReader = - getDataReader(page.getValueEncoding(), bytes, page.getValueCount()); - return materialize(path.getPrimitiveType().getPrimitiveTypeName(), dlDecoder, rlDecoder, - dataReader, nullValue, numValues); + ValuesReader dataReader = getDataReader(page.getValueEncoding(), bytes, page.getValueCount()); + return materialize(path.getPrimitiveType().getPrimitiveTypeName(), dlDecoder, rlDecoder, dataReader, + nullValue, numValues); } catch (IOException e) { - throw new ParquetDecodingException("could not read page " + page + " in col " + path, - e); + throw new ParquetDecodingException("could not read page " + page + " in col " + path, e); } } private Object materialize(PrimitiveType.PrimitiveTypeName primitiveTypeName, - RunLenghBitPackingHybridBufferDecoder dlDecoder, - RunLenghBitPackingHybridBufferDecoder rlDecoder, ValuesReader dataReader, Object nullValue, - int numValues) throws IOException { + RunLenghBitPackingHybridBufferDecoder dlDecoder, RunLenghBitPackingHybridBufferDecoder rlDecoder, + ValuesReader dataReader, Object nullValue, int numValues) throws IOException { if (dlDecoder == null) { return materializeNonNull(numValues, primitiveTypeName, dataReader); } else { - return materializeWithNulls(primitiveTypeName, dlDecoder, rlDecoder, dataReader, - nullValue); + return materializeWithNulls(primitiveTypeName, dlDecoder, rlDecoder, dataReader, nullValue); } } - private void readKeysFromPageV2(DataPageV2 page, IntBuffer keyDest, int nullPlaceholder) - throws IOException { + private void readKeysFromPageV2(DataPageV2 page, IntBuffer keyDest, int nullPlaceholder) throws IOException { if (path.getMaxRepetitionLevel() > 0) { throw new RuntimeException("Repeating levels not supported"); } @@ -392,21 +370,19 @@ private void readKeysFromPageV2(DataPageV2 page, IntBuffer keyDest, int nullPlac if (path.getMaxDefinitionLevel() > 0) { dlDecoder = new RunLenghBitPackingHybridBufferDecoder(path.getMaxDefinitionLevel(), - page.getDefinitionLevels().toByteBuffer()); + page.getDefinitionLevels().toByteBuffer()); } - // LOG.debug("page data size {} bytes and {} records", page.getData().size(), - // page.getValueCount()); + // LOG.debug("page data size {} bytes and {} records", page.getData().size(), page.getValueCount()); try { - ValuesReader dataReader = getDataReader(page.getDataEncoding(), - page.getData().toByteBuffer(), page.getValueCount()); + ValuesReader dataReader = + getDataReader(page.getDataEncoding(), page.getData().toByteBuffer(), page.getValueCount()); if (dlDecoder != null) { readKeysWithNulls(keyDest, nullPlaceholder, numValues(), dlDecoder, dataReader); } else { readKeysNonNulls(keyDest, numValues, dataReader); } } catch (IOException e) { - throw new ParquetDecodingException("could not read page " + page + " in col " + path, - e); + throw new ParquetDecodingException("could not read page " + page + " in col " + path, e); } } @@ -416,8 +392,7 @@ private Object readPageV2(DataPageV2 page, Object nullValue) throws IOException } private void readKeysWithNulls(IntBuffer keysBuffer, int nullPlaceholder, int numValues, - RunLenghBitPackingHybridBufferDecoder dlDecoder, ValuesReader dataReader) - throws IOException { + RunLenghBitPackingHybridBufferDecoder dlDecoder, ValuesReader dataReader) throws IOException { DictionaryValuesReader dictionaryValuesReader = (DictionaryValuesReader) dataReader; int startIndex = 0; while (dlDecoder.hasNext() && startIndex < numValues) { @@ -438,8 +413,7 @@ private void readKeysWithNulls(IntBuffer keysBuffer, int nullPlaceholder, int nu } } - private void readKeysNonNulls(IntBuffer keysBuffer, int numValues, ValuesReader dataReader) - throws IOException { + private void readKeysNonNulls(IntBuffer keysBuffer, int numValues, ValuesReader dataReader) throws IOException { DictionaryValuesReader dictionaryValuesReader = (DictionaryValuesReader) dataReader; for (int i = 0; i < numValues; i++) { keysBuffer.put(dictionaryValuesReader.readValueDictionaryId()); @@ -448,8 +422,8 @@ private void readKeysNonNulls(IntBuffer keysBuffer, int numValues, ValuesReader interface MaterializerWithNulls { - static MaterializerWithNulls forType(PrimitiveType.PrimitiveTypeName primitiveTypeName, - ValuesReader dataReader, Object nullValue, int numValues) { + static MaterializerWithNulls forType(PrimitiveType.PrimitiveTypeName primitiveTypeName, ValuesReader dataReader, + Object nullValue, int numValues) { switch (primitiveTypeName) { case INT32: return new Int(dataReader, nullValue, numValues); @@ -667,11 +641,10 @@ public Object data() { } - private Object materializeWithNulls(int numValues, - PrimitiveType.PrimitiveTypeName primitiveTypeName, IntBuffer nullOffsets, - ValuesReader dataReader, Object nullValue) { + private Object materializeWithNulls(int numValues, PrimitiveType.PrimitiveTypeName primitiveTypeName, + IntBuffer nullOffsets, ValuesReader dataReader, Object nullValue) { MaterializerWithNulls materializer = - MaterializerWithNulls.forType(primitiveTypeName, dataReader, nullValue, numValues); + MaterializerWithNulls.forType(primitiveTypeName, dataReader, nullValue, numValues); int startIndex = 0; int nextNullPos = nullOffsets.hasRemaining() ? nullOffsets.get() : numValues; while (startIndex < numValues) { @@ -694,8 +667,7 @@ private Object materializeWithNulls(int numValues, * @param repeatingRanges * @return */ - private IntBuffer combineOptionalAndRepeating(IntBuffer nullOffsets, IntBuffer repeatingRanges, - int nullValue) { + private IntBuffer combineOptionalAndRepeating(IntBuffer nullOffsets, IntBuffer repeatingRanges, int nullValue) { IntBuffer result = IntBuffer.allocate(nullOffsets.limit() + repeatingRanges.limit()); int startIndex = 0; int nextNullPos = nullOffsets.hasRemaining() ? nullOffsets.get() : result.capacity(); @@ -718,11 +690,10 @@ private IntBuffer combineOptionalAndRepeating(IntBuffer nullOffsets, IntBuffer r } private Object materializeWithNulls(PrimitiveType.PrimitiveTypeName primitiveTypeName, - RunLenghBitPackingHybridBufferDecoder dlDecoder, - RunLenghBitPackingHybridBufferDecoder rlDecoder, ValuesReader dataReader, Object nullValue) - throws IOException { + RunLenghBitPackingHybridBufferDecoder dlDecoder, RunLenghBitPackingHybridBufferDecoder rlDecoder, + ValuesReader dataReader, Object nullValue) throws IOException { Pair[], Integer> offsetsAndCount = - getOffsetsAndNulls(dlDecoder, rlDecoder, numValues); + getOffsetsAndNulls(dlDecoder, rlDecoder, numValues); int numValues = offsetsAndCount.second; Pair[] offsetAndNulls = offsetsAndCount.first; List offsetsWithNull = new ArrayList<>(); @@ -735,8 +706,8 @@ private Object materializeWithNulls(PrimitiveType.PrimitiveTypeName primitiveTyp currentNullOffsets = offsetAndNull.second; } else { if (currentNullOffsets != null) { - offsetsWithNull.add(combineOptionalAndRepeating(currentNullOffsets, - offsetAndNull.second, NULL_OFFSET)); + offsetsWithNull + .add(combineOptionalAndRepeating(currentNullOffsets, offsetAndNull.second, NULL_OFFSET)); currentNullOffsets = null; } else { offsetsWithNull.add(offsetAndNull.second); @@ -745,8 +716,7 @@ private Object materializeWithNulls(PrimitiveType.PrimitiveTypeName primitiveTyp } Object values; if (currentNullOffsets != null) { - values = materializeWithNulls(numValues, primitiveTypeName, currentNullOffsets, - dataReader, nullValue); + values = materializeWithNulls(numValues, primitiveTypeName, currentNullOffsets, dataReader, nullValue); } else { values = materializeNonNull(numValues, primitiveTypeName, dataReader); } @@ -760,8 +730,8 @@ private Object materializeWithNulls(PrimitiveType.PrimitiveTypeName primitiveTyp } - private Object materializeNonNull(int numValues, - PrimitiveType.PrimitiveTypeName primitiveTypeName, ValuesReader dataReader) { + private Object materializeNonNull(int numValues, PrimitiveType.PrimitiveTypeName primitiveTypeName, + ValuesReader dataReader) { switch (primitiveTypeName) { case INT32: int[] intData = new int[numValues]; @@ -815,8 +785,8 @@ private ValuesReader getDataReader(Encoding dataEncoding, ByteBuffer in, int val if (dataEncoding.usesDictionary()) { if (dictionary == null) { throw new ParquetDecodingException( - "could not read page in col " + path - + " as the dictionary was missing for encoding " + dataEncoding); + "could not read page in col " + path + " as the dictionary was missing for encoding " + + dataEncoding); } dataReader = new DictionaryValuesReader(dictionary); } else { @@ -843,8 +813,7 @@ public int numValues() throws IOException { case DATA_PAGE_V2: return numValues = pageHeader.getData_page_header_v2().getNum_values(); default: - throw new IOException( - String.format("Unexpected page of type {%s}", pageHeader.getType())); + throw new IOException(String.format("Unexpected page of type {%s}", pageHeader.getType())); } } @@ -872,8 +841,8 @@ public long numRows() throws IOException { private Pair[], Integer> getOffsetsAndNulls( - RunLenghBitPackingHybridBufferDecoder dlDecoder, - RunLenghBitPackingHybridBufferDecoder rlDecoder, int numValues) throws IOException { + RunLenghBitPackingHybridBufferDecoder dlDecoder, RunLenghBitPackingHybridBufferDecoder rlDecoder, + int numValues) throws IOException { dlDecoder.readNextRange(); if (rlDecoder != null) { rlDecoder.readNextRange(); @@ -883,8 +852,8 @@ private Pair[], Integer> getOffsetsAndNulls( int rlRangeSize = rlDecoder == null ? numValues : rlDecoder.currentRangeCount(); int currentRl = rlDecoder == null ? 0 : rlDecoder.currentValue(); - LevelsController levelsController = new LevelsController( - fieldTypes.stream().map(Type::getRepetition).toArray(Type.Repetition[]::new)); + LevelsController levelsController = + new LevelsController(fieldTypes.stream().map(Type::getRepetition).toArray(Type.Repetition[]::new)); for (int valuesProcessed = 0; valuesProcessed < numValues;) { if (dlRangeSize == 0) { dlDecoder.readNextRange(); diff --git a/Parquet/src/main/java/io/deephaven/parquet/ColumnWriter.java b/Parquet/src/main/java/io/deephaven/parquet/ColumnWriter.java index 3cbca8c50ed..ba5a602e313 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/ColumnWriter.java +++ b/Parquet/src/main/java/io/deephaven/parquet/ColumnWriter.java @@ -10,8 +10,7 @@ public interface ColumnWriter { void addPage(Object pageData, Object nullValues, int valuesCount) throws IOException; - void addVectorPage(Object pageData, IntBuffer repeatCount, int valuesCount, Object nullValue) - throws IOException; + void addVectorPage(Object pageData, IntBuffer repeatCount, int valuesCount, Object nullValue) throws IOException; void close(); } diff --git a/Parquet/src/main/java/io/deephaven/parquet/ColumnWriterImpl.java b/Parquet/src/main/java/io/deephaven/parquet/ColumnWriterImpl.java index b54a8ad5e57..88b36a9eb58 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/ColumnWriterImpl.java +++ b/Parquet/src/main/java/io/deephaven/parquet/ColumnWriterImpl.java @@ -37,8 +37,7 @@ public class ColumnWriterImpl implements ColumnWriter { private final CompressionCodecFactory.BytesInputCompressor compressor; private boolean hasDictionary; private int pageCount = 0; - private static final ParquetMetadataConverter metadataConverter = - new ParquetMetadataConverter(); + private static final ParquetMetadataConverter metadataConverter = new ParquetMetadataConverter(); private BulkWriter bulkWriter; @@ -56,25 +55,23 @@ public class ColumnWriterImpl implements ColumnWriter { private final OffsetIndexBuilder offsetIndexBuilder; ColumnWriterImpl( - final RowGroupWriterImpl owner, - final SeekableByteChannel writeChannel, - final ColumnDescriptor column, - final CompressionCodecFactory.BytesInputCompressor compressor, - final int pageSize, - final ByteBufferAllocator allocator) { + final RowGroupWriterImpl owner, + final SeekableByteChannel writeChannel, + final ColumnDescriptor column, + final CompressionCodecFactory.BytesInputCompressor compressor, + final int pageSize, + final ByteBufferAllocator allocator) { this.writeChannel = writeChannel; this.column = column; this.compressor = compressor; this.pageSize = pageSize; this.allocator = allocator; dlEncoder = column.getMaxDefinitionLevel() == 0 ? null - : new RunLengthBitPackingHybridEncoder( - getWidthFromMaxInt(column.getMaxDefinitionLevel()), MIN_SLAB_SIZE, pageSize, - allocator); + : new RunLengthBitPackingHybridEncoder( + getWidthFromMaxInt(column.getMaxDefinitionLevel()), MIN_SLAB_SIZE, pageSize, allocator); rlEncoder = column.getMaxRepetitionLevel() == 0 ? null - : new RunLengthBitPackingHybridEncoder( - getWidthFromMaxInt(column.getMaxRepetitionLevel()), MIN_SLAB_SIZE, pageSize, - allocator); + : new RunLengthBitPackingHybridEncoder( + getWidthFromMaxInt(column.getMaxRepetitionLevel()), MIN_SLAB_SIZE, pageSize, allocator); this.owner = owner; offsetIndexBuilder = OffsetIndexBuilder.getBuilder(); } @@ -97,7 +94,7 @@ private void initWriter() { if (bulkWriter == null) { if (hasDictionary) { bulkWriter = new RleIntChunkedWriter(pageSize, allocator, - (byte) (32 - Integer.numberOfLeadingZeros(dictionaryPage.num_values))); + (byte) (32 - Integer.numberOfLeadingZeros(dictionaryPage.num_values))); } else { bulkWriter = getWriter(column.getPrimitiveType()); } @@ -107,8 +104,7 @@ private void initWriter() { } @Override - public void addDictionaryPage(final Object dictionaryValues, final int valuesCount) - throws IOException { + public void addDictionaryPage(final Object dictionaryValues, final int valuesCount) throws IOException { if (pageCount > 0) { throw new RuntimeException("Attempting to add dictionary past the first page"); } @@ -119,13 +115,11 @@ public void addDictionaryPage(final Object dictionaryValues, final int valuesCou writeDictionaryPage(dictionaryWriter.getByteBufferView(), valuesCount); pageCount++; hasDictionary = true; - dictionaryPage = - new DictionaryPageHeader(valuesCount, org.apache.parquet.format.Encoding.PLAIN); + dictionaryPage = new DictionaryPageHeader(valuesCount, org.apache.parquet.format.Encoding.PLAIN); } - public void writeDictionaryPage(final ByteBuffer dictionaryBuffer, final int valuesCount) - throws IOException { + public void writeDictionaryPage(final ByteBuffer dictionaryBuffer, final int valuesCount) throws IOException { long currentChunkDictionaryPageOffset = writeChannel.position(); int uncompressedSize = dictionaryBuffer.remaining(); BytesInput compressedBytes = compressor.compress(BytesInput.from(dictionaryBuffer)); @@ -133,11 +127,11 @@ public void writeDictionaryPage(final ByteBuffer dictionaryBuffer, final int val int compressedPageSize = (int) compressedBytes.size(); metadataConverter.writeDictionaryPageHeader( - uncompressedSize, - compressedPageSize, - valuesCount, - Encoding.PLAIN, - Channels.newOutputStream(writeChannel)); + uncompressedSize, + compressedPageSize, + valuesCount, + Encoding.PLAIN, + Channels.newOutputStream(writeChannel)); long headerSize = writeChannel.position() - currentChunkDictionaryPageOffset; this.uncompressedLength += uncompressedSize + headerSize; this.compressedLength += compressedPageSize + headerSize; @@ -150,8 +144,7 @@ private BulkWriter getWriter(final PrimitiveType primitiveType) { case INT96: return new PlainFixedLenChunkedWriter(pageSize, 12, allocator); case FIXED_LEN_BYTE_ARRAY: - return new PlainFixedLenChunkedWriter(pageSize, - column.getPrimitiveType().getTypeLength(), allocator); + return new PlainFixedLenChunkedWriter(pageSize, column.getPrimitiveType().getTypeLength(), allocator); case INT32: return new PlainIntChunkedWriter(pageSize, allocator); case INT64: @@ -165,15 +158,13 @@ private BulkWriter getWriter(final PrimitiveType primitiveType) { case BOOLEAN: return new PlainBooleanChunkedWriter(pageSize, allocator); default: - throw new UnsupportedOperationException( - "Unknown type " + primitiveType.getPrimitiveTypeName()); + throw new UnsupportedOperationException("Unknown type " + primitiveType.getPrimitiveTypeName()); } } @Override - public void addPage(final Object pageData, final Object nullValues, final int valuesCount) - throws IOException { + public void addPage(final Object pageData, final Object nullValues, final int valuesCount) throws IOException { if (dlEncoder == null) { throw new RuntimeException("Null values not supported"); } @@ -185,10 +176,10 @@ public void addPage(final Object pageData, final Object nullValues, final int va } public void addVectorPage( - final Object pageData, - final IntBuffer repeatCount, - final int nonNullValueCount, - final Object nullValue) throws IOException { + final Object pageData, + final IntBuffer repeatCount, + final int nonNullValueCount, + final Object nullValue) throws IOException { if (dlEncoder == null) { throw new RuntimeException("Null values not supported"); } @@ -197,46 +188,45 @@ public void addVectorPage( } initWriter(); // noinspection unchecked - int valueCount = bulkWriter.writeBulkVector(pageData, repeatCount, rlEncoder, dlEncoder, - nonNullValueCount, nullValue); + int valueCount = + bulkWriter.writeBulkVector(pageData, repeatCount, rlEncoder, dlEncoder, nonNullValueCount, nullValue); writePage(bulkWriter.getByteBufferView(), valueCount); bulkWriter.reset(); } private void writeDataPageV2Header( - final int uncompressedSize, - final int compressedSize, - final int valueCount, - final int nullCount, - final int rowCount, - final int rlByteLength, - final int dlByteLength, - final OutputStream to) throws IOException { + final int uncompressedSize, + final int compressedSize, + final int valueCount, + final int nullCount, + final int rowCount, + final int rlByteLength, + final int dlByteLength, + final OutputStream to) throws IOException { writePageHeader( - newDataPageV2Header( - uncompressedSize, compressedSize, - valueCount, nullCount, rowCount, - rlByteLength, dlByteLength), - to); + newDataPageV2Header( + uncompressedSize, compressedSize, + valueCount, nullCount, rowCount, + rlByteLength, dlByteLength), + to); } private PageHeader newDataPageV2Header( - final int uncompressedSize, - final int compressedSize, - final int valueCount, - final int nullCount, - final int rowCount, - final int rlByteLength, - final int dlByteLength) { + final int uncompressedSize, + final int compressedSize, + final int valueCount, + final int nullCount, + final int rowCount, + final int rlByteLength, + final int dlByteLength) { // TODO: pageHeader.crc = ...; DataPageHeaderV2 dataPageHeaderV2 = new DataPageHeaderV2( - valueCount, nullCount, rowCount, - hasDictionary ? org.apache.parquet.format.Encoding.PLAIN_DICTIONARY - : org.apache.parquet.format.Encoding.PLAIN, - dlByteLength, rlByteLength); - PageHeader pageHeader = - new PageHeader(PageType.DATA_PAGE_V2, uncompressedSize, compressedSize); + valueCount, nullCount, rowCount, + hasDictionary ? org.apache.parquet.format.Encoding.PLAIN_DICTIONARY + : org.apache.parquet.format.Encoding.PLAIN, + dlByteLength, rlByteLength); + PageHeader pageHeader = new PageHeader(PageType.DATA_PAGE_V2, uncompressedSize, compressedSize); pageHeader.setData_page_header_v2(dataPageHeaderV2); if (hasDictionary) { pageHeader.setDictionary_page_header(dictionaryPage); @@ -247,32 +237,30 @@ private PageHeader newDataPageV2Header( public void writePageV2( - final int rowCount, - final int nullCount, - final int valueCount, - final BytesInput repetitionLevels, - final BytesInput definitionLevels, - final ByteBuffer data) throws IOException { + final int rowCount, + final int nullCount, + final int valueCount, + final BytesInput repetitionLevels, + final BytesInput definitionLevels, + final ByteBuffer data) throws IOException { int rlByteLength = (int) repetitionLevels.size(); int dlByteLength = (int) definitionLevels.size(); int uncompressedDataSize = data.remaining(); - int uncompressedSize = - (int) (uncompressedDataSize + repetitionLevels.size() + definitionLevels.size()); + int uncompressedSize = (int) (uncompressedDataSize + repetitionLevels.size() + definitionLevels.size()); BytesInput compressedData = compressor.compress(BytesInput.from(data)); - int compressedSize = - (int) (compressedData.size() + repetitionLevels.size() + definitionLevels.size()); + int compressedSize = (int) (compressedData.size() + repetitionLevels.size() + definitionLevels.size()); long initialOffset = writeChannel.position(); if (firstDataPageOffset == -1) { firstDataPageOffset = initialOffset; } writeDataPageV2Header( - uncompressedSize, compressedSize, - valueCount, nullCount, rowCount, - rlByteLength, - dlByteLength, - Channels.newOutputStream(writeChannel)); + uncompressedSize, compressedSize, + valueCount, nullCount, rowCount, + rlByteLength, + dlByteLength, + Channels.newOutputStream(writeChannel)); long headerSize = writeChannel.position() - initialOffset; this.uncompressedLength += (uncompressedSize + headerSize); this.compressedLength += (compressedSize + headerSize); @@ -283,8 +271,8 @@ public void writePageV2( writeChannel.write(compressedData.toByteBuffer()); } - private void writePage(final BytesInput bytes, final int valueCount, - final Encoding valuesEncoding) throws IOException { + private void writePage(final BytesInput bytes, final int valueCount, final Encoding valuesEncoding) + throws IOException { long initialOffset = writeChannel.position(); if (firstDataPageOffset == -1) { firstDataPageOffset = initialOffset; @@ -293,22 +281,22 @@ private void writePage(final BytesInput bytes, final int valueCount, long uncompressedSize = bytes.size(); if (uncompressedSize > Integer.MAX_VALUE) { throw new ParquetEncodingException( - "Cannot write page larger than Integer.MAX_VALUE bytes: " + - uncompressedSize); + "Cannot write page larger than Integer.MAX_VALUE bytes: " + + uncompressedSize); } BytesInput compressedBytes = compressor.compress(bytes); long compressedSize = compressedBytes.size(); if (compressedSize > Integer.MAX_VALUE) { throw new ParquetEncodingException( - "Cannot write compressed page larger than Integer.MAX_VALUE bytes: " - + compressedSize); + "Cannot write compressed page larger than Integer.MAX_VALUE bytes: " + + compressedSize); } writeDataPageV1Header( - (int) uncompressedSize, - (int) compressedSize, - valueCount, - valuesEncoding, - Channels.newOutputStream(writeChannel)); + (int) uncompressedSize, + (int) compressedSize, + valueCount, + valuesEncoding, + Channels.newOutputStream(writeChannel)); long headerSize = writeChannel.position() - initialOffset; this.uncompressedLength += (uncompressedSize + headerSize); this.compressedLength += (compressedSize + headerSize); @@ -321,30 +309,29 @@ private void writePage(final BytesInput bytes, final int valueCount, } private void writeDataPageV1Header( - final int uncompressedSize, - final int compressedSize, - final int valueCount, - final Encoding valuesEncoding, - final OutputStream to) throws IOException { + final int uncompressedSize, + final int compressedSize, + final int valueCount, + final Encoding valuesEncoding, + final OutputStream to) throws IOException { writePageHeader(newDataPageHeader(uncompressedSize, - compressedSize, - valueCount, - valuesEncoding), to); + compressedSize, + valueCount, + valuesEncoding), to); } private PageHeader newDataPageHeader( - final int uncompressedSize, - final int compressedSize, - final int valueCount, - final Encoding valuesEncoding) { - PageHeader pageHeader = - new PageHeader(PageType.DATA_PAGE, uncompressedSize, compressedSize); + final int uncompressedSize, + final int compressedSize, + final int valueCount, + final Encoding valuesEncoding) { + PageHeader pageHeader = new PageHeader(PageType.DATA_PAGE, uncompressedSize, compressedSize); pageHeader.setData_page_header(new DataPageHeader( - valueCount, - org.apache.parquet.format.Encoding.valueOf(valuesEncoding.name()), - org.apache.parquet.format.Encoding.valueOf(Encoding.RLE.name()), - org.apache.parquet.format.Encoding.valueOf(Encoding.RLE.name()))); + valueCount, + org.apache.parquet.format.Encoding.valueOf(valuesEncoding.name()), + org.apache.parquet.format.Encoding.valueOf(Encoding.RLE.name()), + org.apache.parquet.format.Encoding.valueOf(Encoding.RLE.name()))); return pageHeader; } @@ -358,20 +345,17 @@ private void writePage(final ByteBuffer encodedData, final long valueCount) { BytesInput bytes = BytesInput.from(encodedData); if (dlEncoder != null) { BytesInput dlBytesInput = dlEncoder.toBytes(); - bytes = BytesInput.concat(BytesInput.fromInt((int) dlBytesInput.size()), - dlBytesInput, bytes); + bytes = BytesInput.concat(BytesInput.fromInt((int) dlBytesInput.size()), dlBytesInput, bytes); } if (rlEncoder != null) { BytesInput rlBytesInput = rlEncoder.toBytes(); - bytes = BytesInput.concat(BytesInput.fromInt((int) rlBytesInput.size()), - rlBytesInput, bytes); + bytes = BytesInput.concat(BytesInput.fromInt((int) rlBytesInput.size()), rlBytesInput, bytes); } writePage( - bytes, - (int) valueCount, hasDictionary ? Encoding.RLE_DICTIONARY : Encoding.PLAIN); + bytes, + (int) valueCount, hasDictionary ? Encoding.RLE_DICTIONARY : Encoding.PLAIN); } catch (IOException e) { - throw new ParquetEncodingException("could not write page for " + column.getPath()[0], - e); + throw new ParquetEncodingException("could not write page for " + column.getPath()[0], e); } if (dlEncoder != null) { dlEncoder.reset(); @@ -384,11 +368,10 @@ private void writePage(final ByteBuffer encodedData, final long valueCount) { @Override public void close() { owner.releaseWriter(this, - ColumnChunkMetaData.get(ColumnPath.get(column.getPath()), column.getPrimitiveType(), - compressor.getCodecName(), - null, encodings, Statistics.createStats(column.getPrimitiveType()), - firstDataPageOffset, dictionaryOffset, totalValueCount, compressedLength, - uncompressedLength)); + ColumnChunkMetaData.get(ColumnPath.get(column.getPath()), column.getPrimitiveType(), + compressor.getCodecName(), + null, encodings, Statistics.createStats(column.getPrimitiveType()), firstDataPageOffset, + dictionaryOffset, totalValueCount, compressedLength, uncompressedLength)); } public ColumnDescriptor getColumn() { diff --git a/Parquet/src/main/java/io/deephaven/parquet/LevelsController.java b/Parquet/src/main/java/io/deephaven/parquet/LevelsController.java index eb9419fea89..cdef212f823 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/LevelsController.java +++ b/Parquet/src/main/java/io/deephaven/parquet/LevelsController.java @@ -7,10 +7,10 @@ import java.util.Arrays; /** - * Provides logic for an arbitrarily nested leaf in the tree. The implementation allows for both - * repeating and optional levels. The required levels are being ignored as they are implicitly - * present OR the controller should not be used for leaves where the entire path it required (in - * that case we have a simple straight copy of column data to the leaf array) + * Provides logic for an arbitrarily nested leaf in the tree. The implementation allows for both repeating and optional + * levels. The required levels are being ignored as they are implicitly present OR the controller should not be used for + * leaves where the entire path it required (in that case we have a simple straight copy of column data to the leaf + * array) */ class LevelsController { @@ -25,8 +25,8 @@ class LevelsController { System.arraycopy(levels, 0, newLevels, 1, levels.length); levels = newLevels; levelsList = new Level[levels.length]; - repeatLevelToDefLevel = new int[(int) Arrays.stream(levels) - .filter(levelType -> levelType == Type.Repetition.REPEATED).count()]; + repeatLevelToDefLevel = + new int[(int) Arrays.stream(levels).filter(levelType -> levelType == Type.Repetition.REPEATED).count()]; int rlPos = 0; for (int i = 0; i < levels.length; i++) { levelsList[i] = buildLevel(levels[i]); @@ -34,8 +34,7 @@ class LevelsController { repeatLevelToDefLevel[rlPos++] = i; } } - levelsList[0].addElements(1);// This is the root schema element and it needs one and only - // entry + levelsList[0].addElements(1);// This is the root schema element and it needs one and only entry } private Level buildLevel(Type.Repetition level) { @@ -55,68 +54,57 @@ void addElements(int repeatLevel, int defLevel, int elementsCount) { /* - * reason around something that has 3 nested repeating V DL RL {} 0 0 {{}} 1 0 {{{}}} 2 0 {} - * 0 0 {} 0 0 {{} 1 0 {}} 1 1 {{{} 2 0 {}}} 2 2 {{{1 3 0 2} 3 3 {} 2 2 {1 3 2 2 3 3 3}}} 3 3 + * reason around something that has 3 nested repeating V DL RL {} 0 0 {{}} 1 0 {{{}}} 2 0 {} 0 0 {} 0 0 {{} 1 0 + * {}} 1 1 {{{} 2 0 {}}} 2 2 {{{1 3 0 2} 3 3 {} 2 2 {1 3 2 2 3 3 3}}} 3 3 * - * addElements(0,0,1): levels[0].addValues(1) -> l[0] = {1},cvc=1 levels[1].addNulls(1) -> - * l[1] = {0}, cvc=0 + * addElements(0,0,1): levels[0].addValues(1) -> l[0] = {1},cvc=1 levels[1].addNulls(1) -> l[1] = {0}, cvc=0 * - * addElements(1,0,1): levels[0].addValues(1) -> l[0] = {2},cvc=2 levels[1].addElement(1) -> - * l[1] = {0,1}, cvc = 1 levels[2].addNull() -> l[2] = {0}, cvc = 0 + * addElements(1,0,1): levels[0].addValues(1) -> l[0] = {2},cvc=2 levels[1].addElement(1) -> l[1] = {0,1}, cvc = + * 1 levels[2].addNull() -> l[2] = {0}, cvc = 0 * - * addElements(2,0,1): levels[0].addValues(1) -> l[0] = {3},cvc=3 levels[1].addElement(1) -> - * l[1] = {0,1,2}, cvc = 2 levels[2].addElement(1) -> l[2] = {0,1}, cvc = 1 - * levels[3].addNull() -> l[3] = {0}, cvc = 0 + * addElements(2,0,1): levels[0].addValues(1) -> l[0] = {3},cvc=3 levels[1].addElement(1) -> l[1] = {0,1,2}, cvc + * = 2 levels[2].addElement(1) -> l[2] = {0,1}, cvc = 1 levels[3].addNull() -> l[3] = {0}, cvc = 0 * - * addElements(0,0,2): levels[0].addValues(2) -> l[0] = {5},cvc=5 levels[1].addNulls(2) -> - * l[1] = {0,1,2,2,2}, cvc=2 addElements(1,0,1): levels[0].addValues(1) -> l[0] = {6},cvc=6 - * levels[1].addElement(1) -> l[1] = {0,1,2,2,2,3}, cvc = 3 levels[2].addNull() -> l[2] = - * {0,1,1}, cvc = 1 addElements(1,1,1): levels[1].addValues(1) -> l[1] = {0,1,2,2,2,4}, cvc - * = 4 levels[2].addNull() -> l[2] = {0,1,1,1}, cvc = 1 addElements(2,0,1): - * levels[0].addValues(1) -> l[0] = {7},cvc=7 levels[1].addElement(1) -> l[1] = - * {0,1,2,2,2,4,5}, cvc = 5 levels[2].addElement(1) -> l[2] = {0,1,1,1,2}, cvc = 2 - * levels[3].addNull() -> l[3] = {0,0}, cvc = 0 addElements(2,2,1): levels[2].addValues(1) - * -> l[2] = {0,1,1,1,3}, cvc = 3 levels[3].addNull() -> l[3] = {0,0,0}, cvc = 0 - * addElements(3,0,1): levels[0].addValues(1) -> l[0] = {8},cvc=8 levels[1].addElement(1) -> - * l[1] = {0,1,2}, cvc = 2 levels[2].addElement(1) -> l[2] = {0,1,1,1,2,3}, cvc = 3 - * levels[3].addElement(1) -> l[3] = {0,0,0,1}, cvc = 1 addElements(3,3,1): - * levels[3].addValues(1) -> l[3] = {0,0,0,2}, cvc = 2 addElements(2,2,1): - * levels[2].addValues(1) -> l[2] = {0,1,1,1,3}, cvc = 3 levels[3].addNull() -> l[3] = - * {0,0,0,2,0}, cvc = 2 addElements(3,2,1): levels[2].addValues(1) -> l[2] = {0,1,1,1,4}, - * cvc = 4 levels[3].addElement(1) -> l[3] = {0,0,0,2,0,3}, cvc = 3 addElements(3,3,2): + * addElements(0,0,2): levels[0].addValues(2) -> l[0] = {5},cvc=5 levels[1].addNulls(2) -> l[1] = {0,1,2,2,2}, + * cvc=2 addElements(1,0,1): levels[0].addValues(1) -> l[0] = {6},cvc=6 levels[1].addElement(1) -> l[1] = + * {0,1,2,2,2,3}, cvc = 3 levels[2].addNull() -> l[2] = {0,1,1}, cvc = 1 addElements(1,1,1): + * levels[1].addValues(1) -> l[1] = {0,1,2,2,2,4}, cvc = 4 levels[2].addNull() -> l[2] = {0,1,1,1}, cvc = 1 + * addElements(2,0,1): levels[0].addValues(1) -> l[0] = {7},cvc=7 levels[1].addElement(1) -> l[1] = + * {0,1,2,2,2,4,5}, cvc = 5 levels[2].addElement(1) -> l[2] = {0,1,1,1,2}, cvc = 2 levels[3].addNull() -> l[3] = + * {0,0}, cvc = 0 addElements(2,2,1): levels[2].addValues(1) -> l[2] = {0,1,1,1,3}, cvc = 3 levels[3].addNull() + * -> l[3] = {0,0,0}, cvc = 0 addElements(3,0,1): levels[0].addValues(1) -> l[0] = {8},cvc=8 + * levels[1].addElement(1) -> l[1] = {0,1,2}, cvc = 2 levels[2].addElement(1) -> l[2] = {0,1,1,1,2,3}, cvc = 3 + * levels[3].addElement(1) -> l[3] = {0,0,0,1}, cvc = 1 addElements(3,3,1): levels[3].addValues(1) -> l[3] = + * {0,0,0,2}, cvc = 2 addElements(2,2,1): levels[2].addValues(1) -> l[2] = {0,1,1,1,3}, cvc = 3 + * levels[3].addNull() -> l[3] = {0,0,0,2,0}, cvc = 2 addElements(3,2,1): levels[2].addValues(1) -> l[2] = + * {0,1,1,1,4}, cvc = 4 levels[3].addElement(1) -> l[3] = {0,0,0,2,0,3}, cvc = 3 addElements(3,3,2): * levels[3].addValues(2) -> l[3] = {0,0,0,2,0,5}, cvc = 5 * * */ /* - * reason around something that has 3 nested optionals V DL RL null 0 0 a.null 1 0 a.b.null - * 2 0 null 0 0 null 0 0 a.null 1 0 a.null 1 0 a.b.null 2 0 a.b.null 2 0 a.b.c 3 0 a.b.c 3 0 - * a.b.null 2 0 a.b.c 3 0 a.b.c 3 0 a.b.c 3 0 + * reason around something that has 3 nested optionals V DL RL null 0 0 a.null 1 0 a.b.null 2 0 null 0 0 null 0 + * 0 a.null 1 0 a.null 1 0 a.b.null 2 0 a.b.null 2 0 a.b.c 3 0 a.b.c 3 0 a.b.null 2 0 a.b.c 3 0 a.b.c 3 0 a.b.c + * 3 0 * - * addElements(0,0,1): levels[0].addValues(1) -> l[0] = {1},cvc=1 levels[1].addNulls(1) -> - * l[1] = {0}, cvc=1 addElements(1,0,1): levels[0].addValues(1) -> l[0] = {2},cvc=2 - * levels[1].addElements(1) -> l[1]={0} cvc = 2 levels[2].addNulls(1) -> l[2] = {0}, cvc=1 - * addElements(2,0,1): levels[0].addValues(1) -> l[0] = {3},cvc=3 levels[1].addElements(1) - * -> l[1]={0} cvc = 3 levels[2].addElements(1) -> l[2]={0} cvc = 2 levels[3].addNulls(1) -> - * l[3] = {0}, cvc=1 addElements(0,0,2): levels[0].addValues(2) -> l[0] = {5},cvc=5 - * levels[1].addNulls(2) -> l[1] = {0,3,4}, cvc=5 addElements(1,0,2): levels[0].addValues(2) - * -> l[0] = {7},cvc=7 levels[1].addElements(2) -> l[1]={0,3,4} cvc = 7 - * levels[2].addNulls(2) -> l[2] = {0,2,3}, cvc=4 addElements(2,0,2): levels[0].addValues(2) - * -> l[0] = {9},cvc=9 levels[1].addElements(2) -> l[1]={0,3,4} cvc = 9 - * levels[2].addElements(2) -> l[2]={0,2,3} cvc = 6 levels[3].addNulls(2) -> l[3] = {0,1,2}, - * cvc=3 + * addElements(0,0,1): levels[0].addValues(1) -> l[0] = {1},cvc=1 levels[1].addNulls(1) -> l[1] = {0}, cvc=1 + * addElements(1,0,1): levels[0].addValues(1) -> l[0] = {2},cvc=2 levels[1].addElements(1) -> l[1]={0} cvc = 2 + * levels[2].addNulls(1) -> l[2] = {0}, cvc=1 addElements(2,0,1): levels[0].addValues(1) -> l[0] = {3},cvc=3 + * levels[1].addElements(1) -> l[1]={0} cvc = 3 levels[2].addElements(1) -> l[2]={0} cvc = 2 + * levels[3].addNulls(1) -> l[3] = {0}, cvc=1 addElements(0,0,2): levels[0].addValues(2) -> l[0] = {5},cvc=5 + * levels[1].addNulls(2) -> l[1] = {0,3,4}, cvc=5 addElements(1,0,2): levels[0].addValues(2) -> l[0] = {7},cvc=7 + * levels[1].addElements(2) -> l[1]={0,3,4} cvc = 7 levels[2].addNulls(2) -> l[2] = {0,2,3}, cvc=4 + * addElements(2,0,2): levels[0].addValues(2) -> l[0] = {9},cvc=9 levels[1].addElements(2) -> l[1]={0,3,4} cvc = + * 9 levels[2].addElements(2) -> l[2]={0,2,3} cvc = 6 levels[3].addNulls(2) -> l[3] = {0,1,2}, cvc=3 * - * addElements(3,0,2): levels[0].addValues(2) -> l[0] = {11},cvc=11 levels[1].addElements(2) - * -> l[1]={0,3,4} cvc = 11 levels[2].addElements(2) -> l[2]={0,2,3} cvc = 8 - * levels[3].addElements(2) -> l[3] = {0,1,2}, cvc=5 + * addElements(3,0,2): levels[0].addValues(2) -> l[0] = {11},cvc=11 levels[1].addElements(2) -> l[1]={0,3,4} cvc + * = 11 levels[2].addElements(2) -> l[2]={0,2,3} cvc = 8 levels[3].addElements(2) -> l[3] = {0,1,2}, cvc=5 * - * addElements(2,0,2): levels[0].addValues(1) -> l[0] = {12},cvc=12 levels[1].addElements(1) - * -> l[1]={0,3,4} cvc = 12 levels[2].addElements(1) -> l[2]={0,2,3} cvc = 9 - * levels[3].addNulls(1) -> l[3] = {0,1,2,5}, cvc = 6 + * addElements(2,0,2): levels[0].addValues(1) -> l[0] = {12},cvc=12 levels[1].addElements(1) -> l[1]={0,3,4} cvc + * = 12 levels[2].addElements(1) -> l[2]={0,2,3} cvc = 9 levels[3].addNulls(1) -> l[3] = {0,1,2,5}, cvc = 6 * - * addElements(3,0,2): levels[0].addValues(3) -> l[0] = {15},cvc=15 levels[1].addElements(3) - * -> l[1]={0,3,4} cvc = 15 levels[2].addElements(3) -> l[2]={0,2,3} cvc = 12 - * levels[3].addElements(3) -> l[3] = {0,1,2,5}, cvc = 9 + * addElements(3,0,2): levels[0].addValues(3) -> l[0] = {15},cvc=15 levels[1].addElements(3) -> l[1]={0,3,4} cvc + * = 15 levels[2].addElements(3) -> l[2]={0,2,3} cvc = 12 levels[3].addElements(3) -> l[3] = {0,1,2,5}, cvc = 9 * */ @@ -136,9 +124,7 @@ Pair[], Integer> getFinalState() { if (levelsList.length == 1) { childCount--; } - return new Pair<>( - Arrays.stream(levelsList).skip(1).map(Level::finalState).toArray(Pair[]::new), - childCount); + return new Pair<>(Arrays.stream(levelsList).skip(1).map(Level::finalState).toArray(Pair[]::new), childCount); } @@ -209,8 +195,7 @@ public void addElements(int elementsCount) { @Override public void addValues(int valuesCount) { - throw new UnsupportedOperationException( - "Optional levels don't allow multiple values - use addElements"); + throw new UnsupportedOperationException("Optional levels don't allow multiple values - use addElements"); } @Override diff --git a/Parquet/src/main/java/io/deephaven/parquet/ParquetFileReader.java b/Parquet/src/main/java/io/deephaven/parquet/ParquetFileReader.java index 92872709729..8fea39ea31d 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/ParquetFileReader.java +++ b/Parquet/src/main/java/io/deephaven/parquet/ParquetFileReader.java @@ -37,25 +37,20 @@ public class ParquetFileReader { private final Path rootPath; private final MessageType type; - public ParquetFileReader(String filePath, SeekableChannelsProvider channelsProvider, - int pageSizeHint) throws IOException { + public ParquetFileReader(String filePath, SeekableChannelsProvider channelsProvider, int pageSizeHint) + throws IOException { this.channelsProvider = channelsProvider; - this.codecFactory = - ThreadLocal.withInitial(() -> new CodecFactory(new Configuration(), pageSizeHint)); - // Root path should be this file if a single file, else the parent directory for a metadata - // file - rootPath = - filePath.endsWith(".parquet") ? Paths.get(filePath) : Paths.get(filePath).getParent(); + this.codecFactory = ThreadLocal.withInitial(() -> new CodecFactory(new Configuration(), pageSizeHint)); + // Root path should be this file if a single file, else the parent directory for a metadata file + rootPath = filePath.endsWith(".parquet") ? Paths.get(filePath) : Paths.get(filePath).getParent(); SeekableByteChannel f = channelsProvider.getReadChannel(filePath); long fileLen = f.size(); int FOOTER_LENGTH_SIZE = 4; - if (fileLen < MAGIC.length + FOOTER_LENGTH_SIZE + MAGIC.length) { // MAGIC + data + footer + - // footerIndex + MAGIC - throw new RuntimeException( - filePath + " is not a Parquet file (too small length: " + fileLen + ")"); + if (fileLen < MAGIC.length + FOOTER_LENGTH_SIZE + MAGIC.length) { // MAGIC + data + footer + footerIndex + MAGIC + throw new RuntimeException(filePath + " is not a Parquet file (too small length: " + fileLen + ")"); } long footerLengthIndex = fileLen - FOOTER_LENGTH_SIZE - MAGIC.length; @@ -65,14 +60,12 @@ public ParquetFileReader(String filePath, SeekableChannelsProvider channelsProvi byte[] magic = new byte[MAGIC.length]; readFully(f, magic); if (!Arrays.equals(MAGIC, magic)) { - throw new RuntimeException( - filePath + " is not a Parquet file. expected magic number at tail " + throw new RuntimeException(filePath + " is not a Parquet file. expected magic number at tail " + Arrays.toString(MAGIC) + " but found " + Arrays.toString(magic)); } long footerIndex = footerLengthIndex - footerLength; if (footerIndex < MAGIC.length || footerIndex >= footerLengthIndex) { - throw new RuntimeException( - "corrupted file: the footer index is not within the file: " + footerIndex); + throw new RuntimeException("corrupted file: the footer index is not within the file: " + footerIndex); } f.position(footerIndex); byte[] footer = new byte[footerLength]; @@ -83,8 +76,7 @@ public ParquetFileReader(String filePath, SeekableChannelsProvider channelsProvi } /** - * @return The {@link SeekableChannelsProvider} used for this reader, appropriate to use for - * related file acccess + * @return The {@link SeekableChannelsProvider} used for this reader, appropriate to use for related file acccess */ public SeekableChannelsProvider getChannelsProvider() { return channelsProvider; @@ -93,23 +85,22 @@ public SeekableChannelsProvider getChannelsProvider() { private Set columnsWithDictionaryUsedOnEveryDataPage = null; /** - * Get the name of all columns that we can know for certain (a) have a dictionary, and (b) use - * the dictionary on all data pages. + * Get the name of all columns that we can know for certain (a) have a dictionary, and (b) use the dictionary on all + * data pages. * * @return A set of parquet column names that satisfies the required condition. */ @SuppressWarnings("unused") public Set getColumnsWithDictionaryUsedOnEveryDataPage() { if (columnsWithDictionaryUsedOnEveryDataPage == null) { - columnsWithDictionaryUsedOnEveryDataPage = - calculateColumnsWithDictionaryUsedOnEveryDataPage(); + columnsWithDictionaryUsedOnEveryDataPage = calculateColumnsWithDictionaryUsedOnEveryDataPage(); } return columnsWithDictionaryUsedOnEveryDataPage; } /** - * True only if we are certain every data page in this column chunk uses dictionary encoding; - * note false also covers the "we can't tell" case. + * True only if we are certain every data page in this column chunk uses dictionary encoding; note false also covers + * the "we can't tell" case. */ private boolean columnChunkUsesDictionaryOnEveryPage(final ColumnChunk columnChunk) { final ColumnMetaData columnMeta = columnChunk.getMeta_data(); @@ -118,13 +109,13 @@ private boolean columnChunkUsesDictionaryOnEveryPage(final ColumnChunk columnChu } for (PageEncodingStats encodingStat : columnMeta.encoding_stats) { if (encodingStat.page_type != PageType.DATA_PAGE - && encodingStat.page_type != PageType.DATA_PAGE_V2) { + && encodingStat.page_type != PageType.DATA_PAGE_V2) { // skip non-data pages. continue; } // this is a data page. if (encodingStat.encoding != Encoding.PLAIN_DICTIONARY - && encodingStat.encoding != Encoding.RLE_DICTIONARY) { + && encodingStat.encoding != Encoding.RLE_DICTIONARY) { return false; } } @@ -184,16 +175,15 @@ private int readIntLittleEndian(SeekableByteChannel f) throws IOException { public RowGroupReader getRowGroup(int groupNumber) { return new RowGroupReaderImpl( - fileMetaData.getRow_groups().get(groupNumber), - channelsProvider, - rootPath, - codecFactory, - type, - getSchema()); + fileMetaData.getRow_groups().get(groupNumber), + channelsProvider, + rootPath, + codecFactory, + type, + getSchema()); } - private static MessageType fromParquetSchema(List schema, - List columnOrders) { + private static MessageType fromParquetSchema(List schema, List columnOrders) { Iterator iterator = schema.iterator(); SchemaElement root = iterator.next(); Types.MessageTypeBuilder builder = Types.buildMessage(); @@ -204,14 +194,13 @@ private static MessageType fromParquetSchema(List schema, return builder.named(root.name); } - private static void buildChildren(Types.GroupBuilder builder, Iterator schema, - int childrenCount, List columnOrders, int columnCount) { + private static void buildChildren(Types.GroupBuilder builder, Iterator schema, int childrenCount, + List columnOrders, int columnCount) { for (int i = 0; i < childrenCount; ++i) { SchemaElement schemaElement = schema.next(); Object childBuilder; if (schemaElement.type != null) { - Types.PrimitiveBuilder primitiveBuilder = - builder.primitive(getPrimitive(schemaElement.type), + Types.PrimitiveBuilder primitiveBuilder = builder.primitive(getPrimitive(schemaElement.type), fromParquetRepetition(schemaElement.repetition_type)); if (schemaElement.isSetType_length()) { primitiveBuilder.length(schemaElement.type_length); @@ -227,11 +216,11 @@ private static void buildChildren(Types.GroupBuilder builder, Iterator> offsetIndexes = new ArrayList<>(); public ParquetFileWriter( - final String filePath, - final SeekableChannelsProvider channelsProvider, - final int pageSize, - final ByteBufferAllocator allocator, - final MessageType type, - final CompressionCodecName codecName, - final Map extraMetaData) throws IOException { + final String filePath, + final SeekableChannelsProvider channelsProvider, + final int pageSize, + final ByteBufferAllocator allocator, + final MessageType type, + final CompressionCodecName codecName, + final Map extraMetaData) throws IOException { this.pageSize = pageSize; this.allocator = allocator; this.extraMetaData = new HashMap<>(extraMetaData); - writeChannel = channelsProvider.getWriteChannel(filePath, false); // TODO add support for - // appending + writeChannel = channelsProvider.getWriteChannel(filePath, false); // TODO add support for appending this.type = type; this.channelsProvider = channelsProvider; CodecFactory codecFactory = new CodecFactory(new Configuration(), pageSize); @@ -60,15 +59,14 @@ public ParquetFileWriter( } RowGroupWriter addRowGroup(final String path, final boolean append) throws IOException { - RowGroupWriterImpl rowGroupWriter = new RowGroupWriterImpl(path, append, channelsProvider, - type, pageSize, allocator, compressor); + RowGroupWriterImpl rowGroupWriter = + new RowGroupWriterImpl(path, append, channelsProvider, type, pageSize, allocator, compressor); blocks.add(rowGroupWriter.getBlock()); return rowGroupWriter; } public RowGroupWriter addRowGroup(final long size) { - RowGroupWriterImpl rowGroupWriter = - new RowGroupWriterImpl(writeChannel, type, pageSize, allocator, compressor); + RowGroupWriterImpl rowGroupWriter = new RowGroupWriterImpl(writeChannel, type, pageSize, allocator, compressor); rowGroupWriter.getBlock().setRowCount(size); blocks.add(rowGroupWriter.getBlock()); offsetIndexes.add(rowGroupWriter.offsetIndexes()); @@ -79,27 +77,25 @@ public void close() throws IOException { try (final OutputStream os = Channels.newOutputStream(writeChannel)) { os.write(ParquetFileReader.MAGIC); serializeOffsetIndexes(offsetIndexes, blocks, os); - ParquetMetadata footer = new ParquetMetadata( - new FileMetaData(type, extraMetaData, Version.FULL_VERSION), blocks); + ParquetMetadata footer = + new ParquetMetadata(new FileMetaData(type, extraMetaData, Version.FULL_VERSION), blocks); serializeFooter(footer, os); } // os (and thus writeChannel) are closed at this point. } - private void serializeFooter(final ParquetMetadata footer, final OutputStream os) - throws IOException { + private void serializeFooter(final ParquetMetadata footer, final OutputStream os) throws IOException { final long footerIndex = writeChannel.position(); - org.apache.parquet.format.FileMetaData parquetMetadata = - metadataConverter.toParquetMetadata(VERSION, footer); + org.apache.parquet.format.FileMetaData parquetMetadata = metadataConverter.toParquetMetadata(VERSION, footer); writeFileMetaData(parquetMetadata, os); BytesUtils.writeIntLittleEndian(os, (int) (writeChannel.position() - footerIndex)); os.write(ParquetFileReader.MAGIC); } private void serializeOffsetIndexes( - final List> offsetIndexes, - final List blocks, - final OutputStream os) throws IOException { + final List> offsetIndexes, + final List blocks, + final OutputStream os) throws IOException { for (int bIndex = 0, bSize = blocks.size(); bIndex < bSize; ++bIndex) { final List columns = blocks.get(bIndex).getColumns(); final List blockOffsetIndexes = offsetIndexes.get(bIndex); @@ -110,10 +106,8 @@ private void serializeOffsetIndexes( } ColumnChunkMetaData column = columns.get(cIndex); final long offset = writeChannel.position(); - Util.writeOffsetIndex(ParquetMetadataConverter.toParquetOffsetIndex(offsetIndex), - os); - column.setOffsetIndexReference( - new IndexReference(offset, (int) (writeChannel.position() - offset))); + Util.writeOffsetIndex(ParquetMetadataConverter.toParquetOffsetIndex(offsetIndex), os); + column.setOffsetIndexReference(new IndexReference(offset, (int) (writeChannel.position() - offset))); } } } diff --git a/Parquet/src/main/java/io/deephaven/parquet/Reader.java b/Parquet/src/main/java/io/deephaven/parquet/Reader.java index 930708de3cb..0f07e8e3a1f 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/Reader.java +++ b/Parquet/src/main/java/io/deephaven/parquet/Reader.java @@ -27,8 +27,7 @@ public DummyRecordConverter(MessageType schema) { this.root = (GroupConverter) schema.convertWith(new TypeConverter() { @Override - public Converter convertPrimitiveType(List path, - PrimitiveType primitiveType) { + public Converter convertPrimitiveType(List path, PrimitiveType primitiveType) { String name = primitiveType.getName(); return new PrimitiveConverter() { @@ -70,7 +69,7 @@ public void addLong(long value) { @Override public Converter convertGroupType(List path, GroupType groupType, - final List converters) { + final List converters) { String name = groupType.getName(); return new GroupConverter() { @@ -91,8 +90,7 @@ public void end() { } @Override - public Converter convertMessageType(MessageType messageType, - List children) { + public Converter convertMessageType(MessageType messageType, List children) { return convertGroupType(null, messageType, children); } }); @@ -119,7 +117,7 @@ public ReadContext init(InitContext context) { @Override public RecordMaterializer prepareForRead(Configuration configuration, Map keyValueMetaData, - MessageType fileSchema, ReadContext readContext) { + MessageType fileSchema, ReadContext readContext) { return new DummyRecordConverter(fileSchema); } } diff --git a/Parquet/src/main/java/io/deephaven/parquet/Replicate.java b/Parquet/src/main/java/io/deephaven/parquet/Replicate.java index de4cbde9af6..b5b96c17add 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/Replicate.java +++ b/Parquet/src/main/java/io/deephaven/parquet/Replicate.java @@ -8,13 +8,11 @@ public class Replicate { public static void main(String[] args) throws IOException { - ReplicatePrimitiveCode.intToLongAndFloatingPoints(PlainIntChunkedWriter.class, - ReplicatePrimitiveCode.MAIN_SRC, - "int pageSize", "IntBuffer.allocate(4)", "int originalLimit", "int writeBulk", - "int valueCount", "int rowCount", - "int nullCount", "writeInt\\(", "IntBuffer repeatCount", "length != Integer.MIN_VALUE", - "int length", - "int i = 0;", "int targetCapacity", "IntBuffer nullOffsets"); + ReplicatePrimitiveCode.intToLongAndFloatingPoints(PlainIntChunkedWriter.class, ReplicatePrimitiveCode.MAIN_SRC, + "int pageSize", "IntBuffer.allocate(4)", "int originalLimit", "int writeBulk", "int valueCount", + "int rowCount", + "int nullCount", "writeInt\\(", "IntBuffer repeatCount", "length != Integer.MIN_VALUE", "int length", + "int i = 0;", "int targetCapacity", "IntBuffer nullOffsets"); } } diff --git a/Parquet/src/main/java/io/deephaven/parquet/RowGroupReaderImpl.java b/Parquet/src/main/java/io/deephaven/parquet/RowGroupReaderImpl.java index 3e09c28a685..215cdc5588c 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/RowGroupReaderImpl.java +++ b/Parquet/src/main/java/io/deephaven/parquet/RowGroupReaderImpl.java @@ -33,7 +33,7 @@ public class RowGroupReaderImpl implements RowGroupReader { private final Path rootPath; RowGroupReaderImpl(RowGroup rowGroup, SeekableChannelsProvider channelsProvider, Path rootPath, - ThreadLocal codecFactory, MessageType type, MessageType schema) { + ThreadLocal codecFactory, MessageType type, MessageType schema) { this.channelsProvider = channelsProvider; this.codecFactory = codecFactory; this.rowGroup = rowGroup; @@ -45,8 +45,7 @@ public class RowGroupReaderImpl implements RowGroupReader { chunkMap.put(key, column); List nonRequiredFields = new ArrayList<>(); for (int indexInPath = 0; indexInPath < path_in_schema.size(); indexInPath++) { - Type fieldType = schema - .getType(path_in_schema.subList(0, indexInPath + 1).toArray(new String[0])); + Type fieldType = schema.getType(path_in_schema.subList(0, indexInPath + 1).toArray(new String[0])); if (fieldType.getRepetition() != Type.Repetition.REQUIRED) { nonRequiredFields.add(fieldType); } @@ -72,14 +71,14 @@ public ColumnChunkReaderImpl getColumnChunk(List path) { if (columnChunk.isSetOffset_index_offset()) { try (SeekableByteChannel f = channelsProvider.getReadChannel(rootPath)) { f.position(columnChunk.getOffset_index_offset()); - offsetIndex = ParquetMetadataConverter.fromParquetOffsetIndex(Util.readOffsetIndex( - new BufferedInputStream(Channels.newInputStream(f), BUFFER_SIZE))); + offsetIndex = ParquetMetadataConverter.fromParquetOffsetIndex( + Util.readOffsetIndex(new BufferedInputStream(Channels.newInputStream(f), BUFFER_SIZE))); } catch (IOException e) { throw new RuntimeException(e); } } - return new ColumnChunkReaderImpl(columnChunk, channelsProvider, rootPath, codecFactory, - type, offsetIndex, fieldTypes); + return new ColumnChunkReaderImpl(columnChunk, channelsProvider, rootPath, codecFactory, type, offsetIndex, + fieldTypes); } @Override diff --git a/Parquet/src/main/java/io/deephaven/parquet/RowGroupWriterImpl.java b/Parquet/src/main/java/io/deephaven/parquet/RowGroupWriterImpl.java index caadc4e68ba..2ddf53f89cc 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/RowGroupWriterImpl.java +++ b/Parquet/src/main/java/io/deephaven/parquet/RowGroupWriterImpl.java @@ -26,12 +26,11 @@ public class RowGroupWriterImpl implements RowGroupWriter { private final List currentOffsetIndexes = new ArrayList<>(); private final CompressionCodecFactory.BytesInputCompressor compressor; - RowGroupWriterImpl(String path, boolean append, SeekableChannelsProvider channelsProvider, - MessageType type, - int pageSize, ByteBufferAllocator allocator, - CompressionCodecFactory.BytesInputCompressor compressor) throws IOException { - this(channelsProvider.getWriteChannel(path, append), type, pageSize, allocator, - blockWithPath(path), compressor); + RowGroupWriterImpl(String path, boolean append, SeekableChannelsProvider channelsProvider, MessageType type, + int pageSize, ByteBufferAllocator allocator, CompressionCodecFactory.BytesInputCompressor compressor) + throws IOException { + this(channelsProvider.getWriteChannel(path, append), type, pageSize, allocator, blockWithPath(path), + compressor); } private static BlockMetaData blockWithPath(String path) { @@ -40,15 +39,15 @@ private static BlockMetaData blockWithPath(String path) { return blockMetaData; } - RowGroupWriterImpl(SeekableByteChannel writeChannel, MessageType type, int pageSize, - ByteBufferAllocator allocator, CompressionCodecFactory.BytesInputCompressor compressor) { + RowGroupWriterImpl(SeekableByteChannel writeChannel, MessageType type, int pageSize, ByteBufferAllocator allocator, + CompressionCodecFactory.BytesInputCompressor compressor) { this(writeChannel, type, pageSize, allocator, new BlockMetaData(), compressor); } private RowGroupWriterImpl(SeekableByteChannel writeChannel, MessageType type, int pageSize, - ByteBufferAllocator allocator, BlockMetaData blockMetaData, - CompressionCodecFactory.BytesInputCompressor compressor) { + ByteBufferAllocator allocator, BlockMetaData blockMetaData, + CompressionCodecFactory.BytesInputCompressor compressor) { this.writeChannel = writeChannel; this.type = type; this.pageSize = pageSize; @@ -64,8 +63,7 @@ String[] getPrimitivePath(String columnName) { while (!(rollingType = type.getType(result)).isPrimitive()) { GroupType groupType = rollingType.asGroupType(); if (groupType.getFieldCount() != 1) { - throw new UnsupportedOperationException( - "Encountered struct at:" + Arrays.toString(result)); + throw new UnsupportedOperationException("Encountered struct at:" + Arrays.toString(result)); } result = Arrays.copyOf(result, result.length + 1); result[result.length - 1] = groupType.getFieldName(0); @@ -76,13 +74,12 @@ String[] getPrimitivePath(String columnName) { @Override public ColumnWriter addColumn(String columnName) { if (activeWriter != null) { - throw new RuntimeException("There is already an active column writer for " - + activeWriter.getColumn().getPath()[0] - + " need to close that before opening a writer for " + columnName); + throw new RuntimeException( + "There is already an active column writer for " + activeWriter.getColumn().getPath()[0] + + " need to close that before opening a writer for " + columnName); } - activeWriter = new ColumnWriterImpl(this, writeChannel, - type.getColumnDescription(getPrimitivePath(columnName)), compressor, pageSize, - allocator); + activeWriter = new ColumnWriterImpl(this, writeChannel, type.getColumnDescription(getPrimitivePath(columnName)), + compressor, pageSize, allocator); return activeWriter; } @@ -93,13 +90,11 @@ public BlockMetaData getBlock() { void releaseWriter(ColumnWriterImpl columnWriter, ColumnChunkMetaData columnChunkMetaData) { if (activeWriter != columnWriter) { - throw new RuntimeException( - columnWriter.getColumn().getPath()[0] + " is not the active column"); + throw new RuntimeException(columnWriter.getColumn().getPath()[0] + " is not the active column"); } currentOffsetIndexes.add(columnWriter.getOffsetIndex()); blockMetaData.addColumn(columnChunkMetaData); - blockMetaData.setTotalByteSize( - columnChunkMetaData.getTotalSize() + blockMetaData.getTotalByteSize()); + blockMetaData.setTotalByteSize(columnChunkMetaData.getTotalSize() + blockMetaData.getTotalByteSize()); activeWriter = null; } diff --git a/Parquet/src/main/java/io/deephaven/parquet/utils/CachedChannelProvider.java b/Parquet/src/main/java/io/deephaven/parquet/utils/CachedChannelProvider.java index 4a7d97962c0..8193c572cfd 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/utils/CachedChannelProvider.java +++ b/Parquet/src/main/java/io/deephaven/parquet/utils/CachedChannelProvider.java @@ -15,8 +15,7 @@ import java.util.*; /** - * {@link SeekableChannelsProvider Channel provider} that will cache a bounded number of unused - * channels. + * {@link SeekableChannelsProvider Channel provider} that will cache a bounded number of unused channels. */ public class CachedChannelProvider implements SeekableChannelsProvider { @@ -34,17 +33,17 @@ enum ChannelType { { final Map> channelPoolsTemp = - new EnumMap<>(ChannelType.class); - Arrays.stream(ChannelType.values()).forEach( - ct -> channelPoolsTemp.put(ct, new KeyedObjectHashMap<>((PerPathPool.KOHM_KEY)))); + new EnumMap<>(ChannelType.class); + Arrays.stream(ChannelType.values()) + .forEach(ct -> channelPoolsTemp.put(ct, new KeyedObjectHashMap<>((PerPathPool.KOHM_KEY)))); channelPools = Collections.unmodifiableMap(channelPoolsTemp); } private final RAPriQueue releasePriority = - new RAPriQueue<>(8, PerPathPool.RAPQ_ADAPTER, PerPathPool.class); + new RAPriQueue<>(8, PerPathPool.RAPQ_ADAPTER, PerPathPool.class); public CachedChannelProvider(@NotNull final SeekableChannelsProvider wrappedProvider, - final int maximumPooledCount) { + final int maximumPooledCount) { this.wrappedProvider = wrappedProvider; this.maximumPooledCount = Require.gtZero(maximumPooledCount, "maximumPooledCount"); } @@ -52,30 +51,28 @@ public CachedChannelProvider(@NotNull final SeekableChannelsProvider wrappedProv @Override public SeekableByteChannel getReadChannel(@NotNull final Path path) throws IOException { final String pathKey = path.toAbsolutePath().toString(); - final KeyedObjectHashMap channelPool = - channelPools.get(ChannelType.Read); + final KeyedObjectHashMap channelPool = channelPools.get(ChannelType.Read); final CachedChannel result = tryGetPooledChannel(pathKey, channelPool); return result == null - ? new CachedChannel(wrappedProvider.getReadChannel(path), ChannelType.Read, pathKey) - : result.position(0); + ? new CachedChannel(wrappedProvider.getReadChannel(path), ChannelType.Read, pathKey) + : result.position(0); } @Override - public SeekableByteChannel getWriteChannel(@NotNull final Path path, final boolean append) - throws IOException { + public SeekableByteChannel getWriteChannel(@NotNull final Path path, final boolean append) throws IOException { final String pathKey = path.toAbsolutePath().toString(); final ChannelType channelType = append ? ChannelType.WriteAppend : ChannelType.Write; final KeyedObjectHashMap channelPool = channelPools.get(channelType); final CachedChannel result = tryGetPooledChannel(pathKey, channelPool); return result == null - ? new CachedChannel(wrappedProvider.getWriteChannel(path, append), channelType, pathKey) - : result.position(append ? result.size() : 0); // The seek isn't really necessary for - // append; will be at end no matter what. + ? new CachedChannel(wrappedProvider.getWriteChannel(path, append), channelType, pathKey) + : result.position(append ? result.size() : 0); // The seek isn't really necessary for append; will be at + // end no matter what. } @Nullable private synchronized CachedChannel tryGetPooledChannel(@NotNull final String pathKey, - @NotNull final KeyedObjectHashMap channelPool) { + @NotNull final KeyedObjectHashMap channelPool) { final PerPathPool perPathPool = channelPool.get(pathKey); final CachedChannel result; if (perPathPool == null || perPathPool.availableChannels.isEmpty()) { @@ -92,8 +89,7 @@ private synchronized CachedChannel tryGetPooledChannel(@NotNull final String pat return result; } - private synchronized void returnPoolableChannel(@NotNull final CachedChannel cachedChannel) - throws IOException { + private synchronized void returnPoolableChannel(@NotNull final CachedChannel cachedChannel) throws IOException { Assert.eqFalse(cachedChannel.isOpen, "cachedChannel.isOpen"); cachedChannel.closeTime = advanceClock(); if (pooledCount == maximumPooledCount) { @@ -107,8 +103,8 @@ private synchronized void returnPoolableChannel(@NotNull final CachedChannel cac ++pooledCount; } final PerPathPool perPathPool = channelPools.get(cachedChannel.channelType) - .putIfAbsent(cachedChannel.pathKey, - pk -> new PerPathPool(cachedChannel.channelType, cachedChannel.pathKey)); + .putIfAbsent(cachedChannel.pathKey, + pk -> new PerPathPool(cachedChannel.channelType, cachedChannel.pathKey)); perPathPool.availableChannels.addFirst(cachedChannel); releasePriority.enter(perPathPool); } @@ -138,8 +134,8 @@ private class CachedChannel implements SeekableByteChannel { private volatile boolean isOpen = true; private long closeTime; - private CachedChannel(@NotNull final SeekableByteChannel wrappedChannel, - @NotNull final ChannelType channelType, @NotNull final String pathKey) { + private CachedChannel(@NotNull final SeekableByteChannel wrappedChannel, @NotNull final ChannelType channelType, + @NotNull final String pathKey) { this.wrappedChannel = wrappedChannel; this.channelType = channelType; this.pathKey = pathKey; @@ -210,39 +206,36 @@ private void dispose() throws IOException { */ private static class PerPathPool { - private static final RAPriQueue.Adapter RAPQ_ADAPTER = - new RAPriQueue.Adapter() { + private static final RAPriQueue.Adapter RAPQ_ADAPTER = new RAPriQueue.Adapter() { - @Override - public boolean less(@NotNull final PerPathPool ppp1, - @NotNull final PerPathPool ppp2) { - final CachedChannel ch1 = ppp1.availableChannels.peekLast(); // Oldest channel - // is at the tail - final CachedChannel ch2 = ppp2.availableChannels.peekLast(); - Assert.neq(Objects.requireNonNull(ch1).closeTime, "ch1.closeTime", + @Override + public boolean less(@NotNull final PerPathPool ppp1, @NotNull final PerPathPool ppp2) { + final CachedChannel ch1 = ppp1.availableChannels.peekLast(); // Oldest channel is at the tail + final CachedChannel ch2 = ppp2.availableChannels.peekLast(); + Assert.neq(Objects.requireNonNull(ch1).closeTime, "ch1.closeTime", Objects.requireNonNull(ch2).closeTime, "ch2.closeTime"); - return ch1.closeTime < ch2.closeTime; - } + return ch1.closeTime < ch2.closeTime; + } - @Override - public void setPos(@NotNull final PerPathPool ppp, final int slot) { - ppp.priorityQueueSlot = slot; - } + @Override + public void setPos(@NotNull final PerPathPool ppp, final int slot) { + ppp.priorityQueueSlot = slot; + } - @Override - public int getPos(@NotNull final PerPathPool ppp) { - return ppp.priorityQueueSlot; - } - }; + @Override + public int getPos(@NotNull final PerPathPool ppp) { + return ppp.priorityQueueSlot; + } + }; private static final KeyedObjectKey KOHM_KEY = - new KeyedObjectKey.Basic() { + new KeyedObjectKey.Basic() { - @Override - public String getKey(@NotNull final PerPathPool ppp) { - return ppp.path; - } - }; + @Override + public String getKey(@NotNull final PerPathPool ppp) { + return ppp.path; + } + }; @SuppressWarnings({"FieldCanBeLocal", "unused"}) // Field has debugging utility private final ChannelType channelType; diff --git a/Parquet/src/main/java/io/deephaven/parquet/utils/Helpers.java b/Parquet/src/main/java/io/deephaven/parquet/utils/Helpers.java index cc45914bcf2..d53cb7318e8 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/utils/Helpers.java +++ b/Parquet/src/main/java/io/deephaven/parquet/utils/Helpers.java @@ -13,8 +13,7 @@ public class Helpers { public static void readFully(SeekableByteChannel f, byte[] buffer) throws IOException { int read = f.read(ByteBuffer.wrap(buffer)); if (read != buffer.length) { - throw new IOException( - "Expected for bytes, only read " + read + " while it expected " + buffer.length); + throw new IOException("Expected for bytes, only read " + read + " while it expected " + buffer.length); } } @@ -22,8 +21,7 @@ public static void readFully(SeekableByteChannel f, ByteBuffer buffer) throws IO int expected = buffer.remaining(); int read = f.read(buffer); if (read != expected) { - throw new IOException( - "Expected for bytes, only read " + read + " while it expected " + expected); + throw new IOException("Expected for bytes, only read " + read + " while it expected " + expected); } } @@ -31,8 +29,7 @@ public static ByteBuffer readFully(SeekableByteChannel f, int expected) throws I ByteBuffer buffer = allocate(expected); int read = f.read(buffer); if (read != expected) { - throw new IOException( - "Expected for bytes, only read " + read + " while it expected " + expected); + throw new IOException("Expected for bytes, only read " + read + " while it expected " + expected); } buffer.flip(); return buffer; @@ -54,7 +51,7 @@ static int readUnsignedVarInt(ByteBuffer in) { } static int readIntLittleEndianPaddedOnBitWidth(ByteBuffer in, int bitWidth) - throws IOException { + throws IOException { int bytesWidth = BytesUtils.paddedByteCountFromBits(bitWidth); switch (bytesWidth) { @@ -70,8 +67,7 @@ static int readIntLittleEndianPaddedOnBitWidth(ByteBuffer in, int bitWidth) return in.getInt(); default: throw new IOException( - String.format("Encountered bitWidth (%d) that requires more than 4 bytes", - bitWidth)); + String.format("Encountered bitWidth (%d) that requires more than 4 bytes", bitWidth)); } } diff --git a/Parquet/src/main/java/io/deephaven/parquet/utils/LocalFSChannelProvider.java b/Parquet/src/main/java/io/deephaven/parquet/utils/LocalFSChannelProvider.java index 5918e04902a..367d3b40787 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/utils/LocalFSChannelProvider.java +++ b/Parquet/src/main/java/io/deephaven/parquet/utils/LocalFSChannelProvider.java @@ -16,12 +16,11 @@ public SeekableByteChannel getReadChannel(@NotNull final Path path) throws IOExc } @Override - public SeekableByteChannel getWriteChannel(@NotNull final Path filePath, final boolean append) - throws IOException { + public SeekableByteChannel getWriteChannel(@NotNull final Path filePath, final boolean append) throws IOException { final FileChannel result = FileChannel.open(filePath, - StandardOpenOption.WRITE, - StandardOpenOption.CREATE, - append ? StandardOpenOption.APPEND : StandardOpenOption.TRUNCATE_EXISTING); + StandardOpenOption.WRITE, + StandardOpenOption.CREATE, + append ? StandardOpenOption.APPEND : StandardOpenOption.TRUNCATE_EXISTING); if (append) { result.position(result.size()); } else { diff --git a/Parquet/src/main/java/io/deephaven/parquet/utils/RunLenghBitPackingHybridBufferDecoder.java b/Parquet/src/main/java/io/deephaven/parquet/utils/RunLenghBitPackingHybridBufferDecoder.java index 01f61a949f3..5e101ba160f 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/utils/RunLenghBitPackingHybridBufferDecoder.java +++ b/Parquet/src/main/java/io/deephaven/parquet/utils/RunLenghBitPackingHybridBufferDecoder.java @@ -17,8 +17,7 @@ * Decodes values written in the grammar described in {@link RunLengthBitPackingHybridEncoder} */ public class RunLenghBitPackingHybridBufferDecoder { - private static final Logger LOG = - LoggerFactory.getLogger(RunLenghBitPackingHybridBufferDecoder.class); + private static final Logger LOG = LoggerFactory.getLogger(RunLenghBitPackingHybridBufferDecoder.class); private int rangeCount; private final int maxLevel; private int rleCandidateValue; @@ -40,8 +39,7 @@ public RunLenghBitPackingHybridBufferDecoder(int maxLevel, ByteBuffer in) { this.bitWidth = BytesUtils.getWidthFromMaxInt(maxLevel); this.maxLevel = maxLevel; LOG.debug("decoding bitWidth {}", bitWidth); - Preconditions.checkArgument(bitWidth >= 0 && bitWidth <= 32, - "bitWidth must be >= 0 and <= 32"); + Preconditions.checkArgument(bitWidth >= 0 && bitWidth <= 32, "bitWidth must be >= 0 and <= 32"); this.packer = Packer.LITTLE_ENDIAN.newBytePacker(bitWidth); this.in = in; } @@ -99,8 +97,7 @@ public void readNextRange() throws IOException { currentCount = 0; break; case PACKED: - while (currentCount > 0 - && (currentBuffer[currentBuffer.length - currentCount] == currentValue)) { + while (currentCount > 0 && (currentBuffer[currentBuffer.length - currentCount] == currentValue)) { currentCount--; rangeCount++; } @@ -153,8 +150,8 @@ private void readNext() throws IOException { int bytesToRead = (int) Math.ceil(currentCount * bitWidth / 8.0); bytesToRead = Math.min(bytesToRead, in.remaining()); int newPos = in.position() + bytesToRead; - for (int valueIndex = 0, byteIndex = 0; valueIndex < currentCount; valueIndex += - 8, byteIndex += bitWidth) { + for (int valueIndex = 0, byteIndex = 0; valueIndex < currentCount; valueIndex += 8, byteIndex += + bitWidth) { packer.unpack8Values(in, byteIndex + in.position(), currentBuffer, valueIndex); } in.position(newPos); diff --git a/Parquet/src/main/java/io/deephaven/parquet/utils/SeekableChannelsProvider.java b/Parquet/src/main/java/io/deephaven/parquet/utils/SeekableChannelsProvider.java index 01e406d3956..aa270b302ac 100644 --- a/Parquet/src/main/java/io/deephaven/parquet/utils/SeekableChannelsProvider.java +++ b/Parquet/src/main/java/io/deephaven/parquet/utils/SeekableChannelsProvider.java @@ -15,8 +15,7 @@ default SeekableByteChannel getReadChannel(@NotNull final String path) throws IO SeekableByteChannel getReadChannel(@NotNull Path path) throws IOException; - default SeekableByteChannel getWriteChannel(@NotNull final String path, final boolean append) - throws IOException { + default SeekableByteChannel getWriteChannel(@NotNull final String path, final boolean append) throws IOException { return getWriteChannel(Paths.get(path), append); } diff --git a/Parquet/src/test/java/io/deephaven/parquet/utils/CachedChannelProviderTest.java b/Parquet/src/test/java/io/deephaven/parquet/utils/CachedChannelProviderTest.java index ad1c487d79f..b87d48a28bf 100644 --- a/Parquet/src/test/java/io/deephaven/parquet/utils/CachedChannelProviderTest.java +++ b/Parquet/src/test/java/io/deephaven/parquet/utils/CachedChannelProviderTest.java @@ -25,8 +25,7 @@ public void tearDown() { @Test public void testSimpleRead() throws IOException { final SeekableChannelsProvider wrappedProvider = new TestChannelProvider(); - final CachedChannelProvider cachedChannelProvider = - new CachedChannelProvider(wrappedProvider, 100); + final CachedChannelProvider cachedChannelProvider = new CachedChannelProvider(wrappedProvider, 100); for (int ii = 0; ii < 100; ++ii) { final SeekableByteChannel[] sameFile = new SeekableByteChannel[10]; for (int jj = 0; jj < sameFile.length; ++jj) { @@ -45,11 +44,9 @@ public void testSimpleRead() throws IOException { @Test public void testSimpleReadWrite() throws IOException { SeekableChannelsProvider wrappedProvider = new TestChannelProvider(); - CachedChannelProvider cachedChannelProvider = - new CachedChannelProvider(wrappedProvider, 100); + CachedChannelProvider cachedChannelProvider = new CachedChannelProvider(wrappedProvider, 100); for (int i = 0; i < 1000; i++) { - SeekableByteChannel rc = - ((i / 100) % 2 == 0 ? cachedChannelProvider.getReadChannel("r" + i) + SeekableByteChannel rc = ((i / 100) % 2 == 0 ? cachedChannelProvider.getReadChannel("r" + i) : cachedChannelProvider.getWriteChannel("w" + i, false)); rc.close(); } @@ -60,8 +57,7 @@ public void testSimpleReadWrite() throws IOException { @Test public void testSimpleWrite() throws IOException { SeekableChannelsProvider wrappedProvider = new TestChannelProvider(); - CachedChannelProvider cachedChannelProvider = - new CachedChannelProvider(wrappedProvider, 100); + CachedChannelProvider cachedChannelProvider = new CachedChannelProvider(wrappedProvider, 100); for (int i = 0; i < 1000; i++) { SeekableByteChannel rc = cachedChannelProvider.getWriteChannel("w" + i, false); rc.close(); @@ -75,8 +71,7 @@ public void testSimpleWrite() throws IOException { @Test public void testSimpleAppend() throws IOException { SeekableChannelsProvider wrappedProvider = new TestChannelProvider(); - CachedChannelProvider cachedChannelProvider = - new CachedChannelProvider(wrappedProvider, 100); + CachedChannelProvider cachedChannelProvider = new CachedChannelProvider(wrappedProvider, 100); for (int i = 0; i < 1000; i++) { SeekableByteChannel rc = cachedChannelProvider.getWriteChannel("a" + i, true); rc.close(); @@ -90,8 +85,7 @@ public void testSimpleAppend() throws IOException { @Test public void testCloseOrder() throws IOException { SeekableChannelsProvider wrappedProvider = new TestChannelProvider(); - CachedChannelProvider cachedChannelProvider = - new CachedChannelProvider(wrappedProvider, 100); + CachedChannelProvider cachedChannelProvider = new CachedChannelProvider(wrappedProvider, 100); for (int i = 0; i < 20; i++) { List channels = new ArrayList<>(); for (int j = 0; j < 50; j++) { @@ -112,8 +106,7 @@ public void testCloseOrder() throws IOException { @Test public void testReuse() throws IOException { final SeekableChannelsProvider wrappedProvider = new TestChannelProvider(); - final CachedChannelProvider cachedChannelProvider = - new CachedChannelProvider(wrappedProvider, 50); + final CachedChannelProvider cachedChannelProvider = new CachedChannelProvider(wrappedProvider, 50); final SeekableByteChannel[] someResult = new SeekableByteChannel[50]; for (int ci = 0; ci < someResult.length; ++ci) { someResult[ci] = cachedChannelProvider.getReadChannel("r" + ci); @@ -135,13 +128,11 @@ public void testReuse() throws IOException { @Test public void testReuse10() throws IOException { final SeekableChannelsProvider wrappedProvider = new TestChannelProvider(); - final CachedChannelProvider cachedChannelProvider = - new CachedChannelProvider(wrappedProvider, 100); + final CachedChannelProvider cachedChannelProvider = new CachedChannelProvider(wrappedProvider, 100); final SeekableByteChannel[] someResult = new SeekableByteChannel[100]; for (int pi = 0; pi < 10; ++pi) { for (int ci = 0; ci < 10; ++ci) { - someResult[pi * 10 + ci] = - cachedChannelProvider.getWriteChannel("w" + pi % 10, false); + someResult[pi * 10 + ci] = cachedChannelProvider.getWriteChannel("w" + pi % 10, false); } for (int ci = 0; ci < 10; ++ci) { someResult[pi * 10 + 9 - ci].close(); @@ -150,8 +141,7 @@ public void testReuse10() throws IOException { for (int step = 0; step < 10; ++step) { final SeekableByteChannel[] reused = new SeekableByteChannel[100]; for (int ri = 0; ri < 100; ++ri) { - SeekableByteChannel rc = - cachedChannelProvider.getWriteChannel("w" + (ri / 10) % 10, false); + SeekableByteChannel rc = cachedChannelProvider.getWriteChannel("w" + (ri / 10) % 10, false); Assert.assertSame(rc, someResult[ri % 100]); reused[ri] = rc; } diff --git a/Plot/src/main/java/io/deephaven/db/plot/AxesLocation.java b/Plot/src/main/java/io/deephaven/db/plot/AxesLocation.java index cf55f5b46af..e9423456a93 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/AxesLocation.java +++ b/Plot/src/main/java/io/deephaven/db/plot/AxesLocation.java @@ -36,8 +36,7 @@ public boolean equals(Object o) { if (id != that.id) return false; - return chartLocation != null ? chartLocation.equals(that.chartLocation) - : that.chartLocation == null; + return chartLocation != null ? chartLocation.equals(that.chartLocation) : that.chartLocation == null; } @Override diff --git a/Plot/src/main/java/io/deephaven/db/plot/Axis.java b/Plot/src/main/java/io/deephaven/db/plot/Axis.java index faa9ffdaa38..e1c9bb1c8c5 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/Axis.java +++ b/Plot/src/main/java/io/deephaven/db/plot/Axis.java @@ -138,12 +138,11 @@ public interface Axis extends Serializable { /** * Sets this Axis's {@link AxisTransform} as an {@link AxisTransformBusinessCalendar}. * - * @param sds selectable data set (e.g. OneClick filterable table) containing the business - * calendar. - * @param valueColumn name of a column containing String values, where each value is the name of - * a {@link BusinessCalendar}. - * @return this Axis using the business calendar from row 0 of the filtered {@code sds} for the - * business calendar. If no value is found, no transform will be applied. + * @param sds selectable data set (e.g. OneClick filterable table) containing the business calendar. + * @param valueColumn name of a column containing String values, where each value is the name of a + * {@link BusinessCalendar}. + * @return this Axis using the business calendar from row 0 of the filtered {@code sds} for the business calendar. + * If no value is found, no transform will be applied. */ Axis businessTime(final SelectableDataSet sds, final String valueColumn); @@ -168,8 +167,8 @@ public interface Axis extends Serializable { /** * Inverts this Axis so that larger values are closer to the origin. * - * @param invert if true, larger values will be closer to the origin; otherwise, smaller values - * will be closer to the origin. + * @param invert if true, larger values will be closer to the origin; otherwise, smaller values will be closer to + * the origin. * @return this Axes */ Axis invert(final boolean invert); @@ -233,8 +232,8 @@ public interface Axis extends Serializable { /** * Sets the tick locations. * - * @param gapBetweenTicks the distance between ticks. For example, if {@code gapBetweenTicks} is - * 5.0, and the first tick is at 10.0, the next will be drawn at 15.0. + * @param gapBetweenTicks the distance between ticks. For example, if {@code gapBetweenTicks} is 5.0, and the first + * tick is at 10.0, the next will be drawn at 15.0. * @return this Axis */ Axis ticks(double gapBetweenTicks); @@ -256,8 +255,7 @@ public interface Axis extends Serializable { Axis minorTicksVisible(boolean visible); /** - * Sets the number of minor ticks between consecutive major ticks. These minor ticks are equally - * spaced. + * Sets the number of minor ticks between consecutive major ticks. These minor ticks are equally spaced. * * @param count number of minor ticks between consecutive major ticks. * @return this Axis diff --git a/Plot/src/main/java/io/deephaven/db/plot/AxisImpl.java b/Plot/src/main/java/io/deephaven/db/plot/AxisImpl.java index abd165f4266..377ec7ddd20 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/AxisImpl.java +++ b/Plot/src/main/java/io/deephaven/db/plot/AxisImpl.java @@ -149,7 +149,7 @@ public Paint getColor() { void setType(final Type type) { if (this.type != null && this.type != type) { throw new PlotUnsupportedOperationException( - "Switching axis types is not supported: " + this.type + " " + type, this); + "Switching axis types is not supported: " + this.type + " " + type, this); } this.type = type; @@ -451,7 +451,7 @@ public AxisImpl businessTime(final BusinessCalendar calendar) { @Override public AxisImpl businessTime(final SelectableDataSet sds, final String valueColumn) { throw new PlotUnsupportedOperationException( - "Selectable business time transformation is not currently supported", this); + "Selectable business time transformation is not currently supported", this); } @Override @@ -498,14 +498,12 @@ public AxisImpl max(double max) { @Override public AxisImpl min(final SelectableDataSet sds, final String valueColumn) { - throw new PlotUnsupportedOperationException( - "Selectable min transformation is not currently supported", this); + throw new PlotUnsupportedOperationException("Selectable min transformation is not currently supported", this); } @Override public AxisImpl max(final SelectableDataSet sds, final String valueColumn) { - throw new PlotUnsupportedOperationException( - "Selectable max transformation is not currently supported", this); + throw new PlotUnsupportedOperationException("Selectable max transformation is not currently supported", this); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/AxisLocation.java b/Plot/src/main/java/io/deephaven/db/plot/AxisLocation.java index be9ca842b92..6f51755e693 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/AxisLocation.java +++ b/Plot/src/main/java/io/deephaven/db/plot/AxisLocation.java @@ -40,8 +40,7 @@ public boolean equals(Object o) { return false; if (id != that.id) return false; - return chartLocation != null ? chartLocation.equals(that.chartLocation) - : that.chartLocation == null; + return chartLocation != null ? chartLocation.equals(that.chartLocation) : that.chartLocation == null; } @Override diff --git a/Plot/src/main/java/io/deephaven/db/plot/BaseFigure.java b/Plot/src/main/java/io/deephaven/db/plot/BaseFigure.java index e59b6326db0..ae4d960dbee 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/BaseFigure.java +++ b/Plot/src/main/java/io/deephaven/db/plot/BaseFigure.java @@ -86,12 +86,12 @@ public interface BaseFigure extends Serializable { * Adds a new {@link Chart} to this figure. * * @throws RuntimeException if no space for the new {@link Chart} exists or can not be made - * @return the new {@link Chart}. The {@link Chart} is placed in the next available grid space, - * starting at the upper left hand corner of the grid, going left to right, top to - * bottom. If no available space is found in the grid: + * @return the new {@link Chart}. The {@link Chart} is placed in the next available grid space, starting at the + * upper left hand corner of the grid, going left to right, top to bottom. If no available space is found in + * the grid: *

      - *
    • if this Figure was created with no specified grid size, then the Figure will - * resize itself to add the new {@link Chart};
    • + *
    • if this Figure was created with no specified grid size, then the Figure will resize itself to add the + * new {@link Chart};
    • *
    • if not, a RuntimeException will be thrown.
    • *
    */ @@ -101,12 +101,11 @@ public interface BaseFigure extends Serializable { /** * Adds a new {@link Chart} to this figure. * - * @param index index from the Figure's grid to remove. The index starts at 0 in the upper left - * hand corner of the grid and increases going left to right, top to bottom. E.g. for a - * 2x2 Figure, the indices would be [0, 1] [2, 3]. + * @param index index from the Figure's grid to remove. The index starts at 0 in the upper left hand corner of the + * grid and increases going left to right, top to bottom. E.g. for a 2x2 Figure, the indices would be [0, 1] + * [2, 3]. * @throws RuntimeException if {@code index} is outside this Figure's grid - * @return the new {@link Chart}. The {@link Chart} is placed at the grid space indicated by the - * {@code index}. + * @return the new {@link Chart}. The {@link Chart} is placed at the grid space indicated by the {@code index}. */ Chart newChart(final int index); @@ -116,17 +115,16 @@ public interface BaseFigure extends Serializable { * @param rowNum row index in this Figure's grid. The row index starts at 0. * @param colNum column index in this Figure's grid. The column index starts at 0. * @throws RuntimeException if the coordinates are outside the Figure's grid - * @return the new {@link Chart}. The {@link Chart} is placed at the grid space [{@code rowNum}, - * {@code colNum}. + * @return the new {@link Chart}. The {@link Chart} is placed at the grid space [{@code rowNum}, {@code colNum}. */ Chart newChart(final int rowNum, final int colNum); /** * Removes a chart from the Figure's grid. * - * @param index index from the Figure's grid to remove. The index starts at 0 in the upper left - * hand corner of the grid and increases going left to right, top to bottom. E.g. for a - * 2x2 Figure, the indices would be [0, 1] [2, 3]. + * @param index index from the Figure's grid to remove. The index starts at 0 in the upper left hand corner of the + * grid and increases going left to right, top to bottom. E.g. for a 2x2 Figure, the indices would be [0, 1] + * [2, 3]. * @return this Figure with the chart removed. */ BaseFigure removeChart(final int index); @@ -144,9 +142,9 @@ public interface BaseFigure extends Serializable { /** * Returns a chart from this Figure's grid. * - * @param index index from the Figure's grid to remove. The index starts at 0 in the upper left - * hand corner of the grid and increases going left to right, top to bottom. E.g. for a - * 2x2 Figure, the indices would be [0, 1] [2, 3]. + * @param index index from the Figure's grid to remove. The index starts at 0 in the upper left hand corner of the + * grid and increases going left to right, top to bottom. E.g. for a 2x2 Figure, the indices would be [0, 1] + * [2, 3]. * @throws RuntimeException if the index is outside the Figure's grid * @return selected {@link Chart} */ @@ -214,8 +212,7 @@ default BaseFigure save(String saveLocation, int width, int height) { * @return figure * @throws io.deephaven.base.verify.RequirementFailure saveLocation is null */ - default BaseFigure save(String saveLocation, int width, int height, boolean wait, - long timeoutSeconds) { + default BaseFigure save(String saveLocation, int width, int height, boolean wait, long timeoutSeconds) { throw new UnsupportedOperationException(getClass() + " does not implement save"); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/BaseFigureImpl.java b/Plot/src/main/java/io/deephaven/db/plot/BaseFigureImpl.java index 265959161ff..67a158f91ec 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/BaseFigureImpl.java +++ b/Plot/src/main/java/io/deephaven/db/plot/BaseFigureImpl.java @@ -36,8 +36,7 @@ public class BaseFigureImpl implements BaseFigure, PlotExceptionCause { private PlotInfo plotInfo; private String figureName; private int sessionId; - private long updateInterval = - Configuration.getInstance().getLongWithDefault("plot.update.interval", 1000L); + private long updateInterval = Configuration.getInstance().getLongWithDefault("plot.update.interval", 1000L); private transient Map>> tableFunctionMap; private transient Map>> tableMapFunctionMap; @@ -45,8 +44,8 @@ public class BaseFigureImpl implements BaseFigure, PlotExceptionCause { /** - * Creates a new Figure instance with a 1x1 grid. If newChart() with no arguments is called on - * this new Figure, the Figure will resize itself to hold the new {@link Chart}. + * Creates a new Figure instance with a 1x1 grid. If newChart() with no arguments is called on this new Figure, the + * Figure will resize itself to hold the new {@link Chart}. */ public BaseFigureImpl() { this(1, 1, true); @@ -190,15 +189,14 @@ public Set getTableHandles() { for (ChartImpl chart : getCharts().getCharts()) { for (AxesImpl axes : chart.getAxes()) { - for (SeriesCollection.SeriesDescription seriesDescription : axes.dataSeries() - .getSeriesDescriptions().values()) { + for (SeriesCollection.SeriesDescription seriesDescription : axes.dataSeries().getSeriesDescriptions() + .values()) { result.addAll(seriesDescription.getSeries().getTableHandles()); } } if (chart.getChartTitle() instanceof DynamicChartTitle.ChartTitleTable) { - result.add( - ((DynamicChartTitle.ChartTitleTable) chart.getChartTitle()).getTableHandle()); + result.add(((DynamicChartTitle.ChartTitleTable) chart.getChartTitle()).getTableHandle()); } } @@ -219,8 +217,7 @@ public Set getTableMapHandles() { } if (chart.getChartTitle() instanceof DynamicChartTitle.ChartTitleSwappableTable) { - result.add(((DynamicChartTitle.ChartTitleSwappableTable) chart.getChartTitle()) - .getTableMapHandle()); + result.add(((DynamicChartTitle.ChartTitleSwappableTable) chart.getChartTitle()).getTableMapHandle()); } } @@ -258,14 +255,14 @@ public Map>> getTableFunctionMap() { } public void registerTableMapFunction(final TableMapHandle tableMapHandle, - final Function tableTransform) { + final Function tableTransform) { if (tableMapFunctionMap == null) { tableMapFunctionMap = new HashMap<>(); } final TableMap tMap = tableMapHandle.getTableMap(); - tableMapHandle.applyFunction(tableTransform); // allows the signature of the TableMapHandle - // to be changed if necessary + tableMapHandle.applyFunction(tableTransform); // allows the signature of the TableMapHandle to be changed if + // necessary tableMapFunctionMap.putIfAbsent(tMap, new LinkedHashSet<>()); tableMapFunctionMap.get(tMap).add(tm -> tm.transformTables(tableTransform)); } @@ -409,8 +406,7 @@ private int toCoordinate(final int chart, final int coord) { private int toCoordinate(int chart, int coord, int gridWidth) { if (gridWidth == 0) { - throw new PlotIllegalArgumentException( - "Can not determine chart location in grid; chart = " + chart, this); + throw new PlotIllegalArgumentException("Can not determine chart location in grid; chart = " + chart, this); } switch (coord) { @@ -419,8 +415,8 @@ private int toCoordinate(int chart, int coord, int gridWidth) { case 1: // y coordinate return chart / gridWidth; default: - throw new PlotIllegalArgumentException( - "Can not determine chart location in grid; coord = " + coord, this); + throw new PlotIllegalArgumentException("Can not determine chart location in grid; coord = " + coord, + this); } } @@ -484,8 +480,7 @@ public void consolidateTableMaps() { for (final TableMapHandle h : getTableMapHandles()) { if (h instanceof TableBackedTableMapHandle) { - thMap.computeIfAbsent(((TableBackedTableMapHandle) h).getTable(), - t -> new HashSet<>()).add(h); + thMap.computeIfAbsent(((TableBackedTableMapHandle) h).getTable(), t -> new HashSet<>()).add(h); } } @@ -496,14 +491,13 @@ public void consolidateTableMaps() { final Map, TableMap> byColMap = new HashMap<>(); for (final TableMapHandle h : hs) { final Set keyColumns = h.getKeyColumns(); - final String[] keyColumnsArray = - keyColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] keyColumnsArray = keyColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); final TableMap map = byColMap.computeIfAbsent(keyColumns, - x -> { - final TableMap handleMap = h.getTableMap(); - return handleMap == null ? table.byExternal(keyColumnsArray) : handleMap; - }); + x -> { + final TableMap handleMap = h.getTableMap(); + return handleMap == null ? table.byExternal(keyColumnsArray) : handleMap; + }); h.setTableMap(map); h.setKeyColumnsOrdered(keyColumnsArray); @@ -511,8 +505,8 @@ public void consolidateTableMaps() { } } - // Find the common tables and common columns across the figure so that the minimum set of table - // data can be defined for this figure widget + // Find the common tables and common columns across the figure so that the minimum set of table data can be defined + // for this figure widget public void consolidateTables() { final Map> colMap = new IdentityHashMap<>(); final Map> thMap = new IdentityHashMap<>(); diff --git a/Plot/src/main/java/io/deephaven/db/plot/Chart.java b/Plot/src/main/java/io/deephaven/db/plot/Chart.java index 94103dedc83..436b88712ea 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/Chart.java +++ b/Plot/src/main/java/io/deephaven/db/plot/Chart.java @@ -47,8 +47,7 @@ public interface Chart extends Serializable { * * @param t table * @param titleColumns columns to include in the chart title - * @return this {@link Chart} with the title set to display comma-separated values from the - * table + * @return this {@link Chart} with the title set to display comma-separated values from the table */ default Chart chartTitle(final Table t, final String... titleColumns) { return chartTitle(false, t, titleColumns); @@ -57,18 +56,16 @@ default Chart chartTitle(final Table t, final String... titleColumns) { /** * Sets the title of this Chart. * - * @param showColumnNamesInTitle Whether to show column names in title. If this is true, the - * title format will include the column name before the comma separated values; otherwise - * only the comma separated values will be included. + * @param showColumnNamesInTitle Whether to show column names in title. If this is true, the title format will + * include the column name before the comma separated values; otherwise only the comma separated values will + * be included. * @param t table * @param titleColumns columns to include in the chart title - * @return this {@link Chart} with the title set to display comma-separated values from the - * table + * @return this {@link Chart} with the title set to display comma-separated values from the table */ - default Chart chartTitle(final boolean showColumnNamesInTitle, final Table t, - final String... titleColumns) { + default Chart chartTitle(final boolean showColumnNamesInTitle, final Table t, final String... titleColumns) { return chartTitle(showColumnNamesInTitle ? defaultTitleFormatWithColumnNames(titleColumns) - : defaultTitleFormat(titleColumns), t, titleColumns); + : defaultTitleFormat(titleColumns), t, titleColumns); } /** @@ -86,8 +83,7 @@ default Chart chartTitle(final boolean showColumnNamesInTitle, final Table t, * * @param sds selectable data set (e.g. OneClick table) * @param titleColumns columns to include in the chart title - * @return this {@link Chart} with the title set to display comma-separated values from the - * table + * @return this {@link Chart} with the title set to display comma-separated values from the table */ default Chart chartTitle(final SelectableDataSet sds, final String... titleColumns) { return chartTitle(false, sds, titleColumns); @@ -96,18 +92,17 @@ default Chart chartTitle(final SelectableDataSet sds, final String... titleColum /** * Sets the title of this Chart. * - * @param showColumnNamesInTitle Whether to show column names in title. If this is true, the - * title format will include the column name before the comma separated values; otherwise - * only the comma separated values will be included. + * @param showColumnNamesInTitle Whether to show column names in title. If this is true, the title format will + * include the column name before the comma separated values; otherwise only the comma separated values will + * be included. * @param sds selectable data set (e.g. OneClick table) * @param titleColumns columns to include in the chart title - * @return this {@link Chart} with the title set to display comma-separated values from the - * table + * @return this {@link Chart} with the title set to display comma-separated values from the table */ default Chart chartTitle(final boolean showColumnNamesInTitle, final SelectableDataSet sds, - final String... titleColumns) { + final String... titleColumns) { return chartTitle(showColumnNamesInTitle ? defaultTitleFormatWithColumnNames(titleColumns) - : defaultTitleFormat(titleColumns), sds, titleColumns); + : defaultTitleFormat(titleColumns), sds, titleColumns); } /** @@ -118,14 +113,13 @@ default Chart chartTitle(final boolean showColumnNamesInTitle, final SelectableD * @param titleColumns columns to include in the chart title * @return this {@link Chart} with the title set to display values from the table */ - Chart chartTitle(final String titleFormat, final SelectableDataSet sds, - final String... titleColumns); + Chart chartTitle(final String titleFormat, final SelectableDataSet sds, final String... titleColumns); /** * Sets the maximum row values that will be shown in title. *

    - * If total rows < {@code maxRowsCount}, then all the values will be shown separated by comma, - * otherwise just {@code maxRowsCount} values will be shown along with ellipsis.
    + * If total rows < {@code maxRowsCount}, then all the values will be shown separated by comma, otherwise just + * {@code maxRowsCount} values will be shown along with ellipsis.
    * if {@code maxRowsCount} is < 0, all values will be shown.
    * if {@code maxRowsCount} is 0, then just first value will be shown without ellipsis.
    * The default is 0. diff --git a/Plot/src/main/java/io/deephaven/db/plot/ChartArray.java b/Plot/src/main/java/io/deephaven/db/plot/ChartArray.java index 3d6950ca1e0..cfe59dc0ce7 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/ChartArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/ChartArray.java @@ -85,8 +85,7 @@ int nextOpenIndex() { void resize(final int width, final int height) { if (width < takenIndices[0].length || height < takenIndices.length) { - throw new PlotUnsupportedOperationException("Can not resize ChartArray to be smaller", - plotInfo); + throw new PlotUnsupportedOperationException("Can not resize ChartArray to be smaller", plotInfo); } final int[][] newArray = new int[height][width]; @@ -120,19 +119,17 @@ void removeChart(final int row, final int col) { final int index = takenIndices[row][col]; if (index < 0 || index > charts.size() - 1) { throw new PlotIllegalArgumentException( - "Can not remove chart at (" + row + ", " + col + ")- chart does not exist!", - plotInfo); + "Can not remove chart at (" + row + ", " + col + ")- chart does not exist!", plotInfo); } final ChartImpl chart = charts.get(index); if (chart == null) { throw new PlotIllegalArgumentException( - "Can not remove chart at (" + row + ", " + col + ")- chart does not exist!", - plotInfo); + "Can not remove chart at (" + row + ", " + col + ")- chart does not exist!", plotInfo); } markAllNoCheck(takenIndices, chart.row(), chart.row() + chart.rowSpan(), chart.column(), - chart.column() + chart.colSpan(), EMPTY_INDEX); + chart.column() + chart.colSpan(), EMPTY_INDEX); } void resizeChart(final int row, final int col, final int rowspan, final int colspan) { @@ -141,8 +138,7 @@ void resizeChart(final int row, final int col, final int rowspan, final int cols markIndices(row, col, rowspan, colspan, index); } - private void markIndices(final int row, final int col, final int rowspan, final int colspan, - final int index) { + private void markIndices(final int row, final int col, final int rowspan, final int colspan, final int index) { // check first final int maxRow = row + rowspan; final int maxCol = col + colspan; @@ -159,16 +155,16 @@ private void markIndices(final int row, final int col, final int rowspan, final if (!(index < 0 || index > charts.size() - 1)) { final ChartImpl chart = charts.get(index); if (chart != null && (rowspan < chart.rowSpan() || colspan < chart.colSpan())) { - markAllNoCheck(takenIndices, chart.row(), chart.row() + chart.rowSpan(), - chart.column(), chart.column() + chart.colSpan(), EMPTY_INDEX); + markAllNoCheck(takenIndices, chart.row(), chart.row() + chart.rowSpan(), chart.column(), + chart.column() + chart.colSpan(), EMPTY_INDEX); } } markAllNoCheck(takenIndices, row, maxRow, col, maxCol, index); } - private static void markAllNoCheck(final int[][] matrix, final int rowMin, final int rowMax, - final int colMin, final int colMax, final int index) { + private static void markAllNoCheck(final int[][] matrix, final int rowMin, final int rowMax, final int colMin, + final int colMax, final int index) { for (int i = rowMin; i < rowMax; i++) { for (int j = colMin; j < colMax; j++) { matrix[i][j] = index; @@ -178,16 +174,14 @@ private static void markAllNoCheck(final int[][] matrix, final int rowMin, final private void checkBounds(final int row, final int col) { if (row < 0 || col < 0) { - throw new PlotIllegalArgumentException( - "Chart indices must be >0. row:" + row + " col:" + col, plotInfo); + throw new PlotIllegalArgumentException("Chart indices must be >0. row:" + row + " col:" + col, plotInfo); } if (row >= takenIndices.length || col >= takenIndices[0].length) { throw new PlotIllegalArgumentException( - "Chart is not in grid. Trying to access chart at position [" + (row + 1) + "x" - + (col + 1) + "], chart grid is [" + takenIndices.length + "x" - + takenIndices[0].length + "]", - plotInfo); + "Chart is not in grid. Trying to access chart at position [" + (row + 1) + "x" + (col + 1) + + "], chart grid is [" + takenIndices.length + "x" + takenIndices[0].length + "]", + plotInfo); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/ChartImpl.java b/Plot/src/main/java/io/deephaven/db/plot/ChartImpl.java index 7a3a8e27b6f..cf6635d41bc 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/ChartImpl.java +++ b/Plot/src/main/java/io/deephaven/db/plot/ChartImpl.java @@ -34,7 +34,7 @@ public class ChartImpl implements Chart, PlotExceptionCause { private final BaseFigureImpl figure; @SuppressWarnings("unchecked") private final List[] axis = - new List[] {new ArrayList(), new ArrayList(), new ArrayList()}; + new List[] {new ArrayList(), new ArrayList(), new ArrayList()}; private final List axes = new ArrayList<>(); private ChartType chartType; private Font titleFont; @@ -58,8 +58,7 @@ public class ChartImpl implements Chart, PlotExceptionCause { this.figure = figure; this.row = row; this.column = column; - maxVisibleRowsCount = - Configuration.getInstance().getIntegerWithDefault(MAX_VISIBLE_ROWS_COUNT_PROP, 0); + maxVisibleRowsCount = Configuration.getInstance().getIntegerWithDefault(MAX_VISIBLE_ROWS_COUNT_PROP, 0); } /** @@ -146,9 +145,7 @@ public int rowSpan() { private void resize(int rowspan, int colspan) { if (rowspan < 1 || colspan < 1) { throw new PlotIllegalArgumentException( - "Row and column span must be at least one! rowspan=" + rowspan + ", colspan=" - + colspan, - this); + "Row and column span must be at least one! rowspan=" + rowspan + ", colspan=" + colspan, this); } figure.resizePlot(row, column, rowspan, colspan); this.rowspan = rowspan; @@ -168,9 +165,7 @@ void setChartType(final ChartType chartType) { this.chartType = chartType; } else if (this.chartType != chartType) { throw new PlotUnsupportedOperationException( - "Attempting to create inconsistent plot types: " + this.chartType + ", " - + chartType, - this); + "Attempting to create inconsistent plot types: " + this.chartType + ", " + chartType, this); } } @@ -187,8 +182,7 @@ AxesImpl newAxes(final AxisImpl[] ax, final String name) { for (final AxesImpl aa : axes) { if (aa.name().equals(n)) { - throw new PlotRuntimeException( - "Axis with this name already exists. name=" + aa.name(), this); + throw new PlotRuntimeException("Axis with this name already exists. name=" + aa.name(), this); } } @@ -207,8 +201,8 @@ public int dimension() { continue; } if (d != -1 && d != dd) { - throw new PlotRuntimeException( - "Inconsistent axis dimensions in chart: dim1=" + d + " dim2=" + dd, this); + throw new PlotRuntimeException("Inconsistent axis dimensions in chart: dim1=" + d + " dim2=" + dd, + this); } d = dd; @@ -345,8 +339,8 @@ private Set getTableHandles() { final Set result = new HashSet<>(); for (AxesImpl axes : getAxes()) { - for (SeriesCollection.SeriesDescription seriesDescription : axes.dataSeries() - .getSeriesDescriptions().values()) { + for (SeriesCollection.SeriesDescription seriesDescription : axes.dataSeries().getSeriesDescriptions() + .values()) { result.addAll(seriesDescription.getSeries().getTableHandles()); } } @@ -362,15 +356,14 @@ private Set getSwappableTables() { final Set result = new HashSet<>(); for (AxesImpl axes : getAxes()) { - for (SeriesCollection.SeriesDescription seriesDescription : axes.dataSeries() - .getSeriesDescriptions().values()) { + for (SeriesCollection.SeriesDescription seriesDescription : axes.dataSeries().getSeriesDescriptions() + .values()) { result.addAll(seriesDescription.getSeries().getSwappableTables()); } } if (getChartTitle() instanceof DynamicChartTitle.ChartTitleSwappableTable) { - result.add( - ((DynamicChartTitle.ChartTitleSwappableTable) getChartTitle()).getSwappableTable()); + result.add(((DynamicChartTitle.ChartTitleSwappableTable) getChartTitle()).getSwappableTable()); } return result; @@ -412,36 +405,32 @@ public ChartImpl chartTitle(final String title) { } @Override - public ChartImpl chartTitle(final String titleFormat, final Table t, - final String... titleColumns) { + public ChartImpl chartTitle(final String titleFormat, final Table t, final String... titleColumns) { ArgumentValidations.assertNotNull(t, "table", getPlotInfo()); ArgumentValidations.assertNotNull(titleColumns, "titleColumns", getPlotInfo()); - ArgumentValidations.assertGreaterThan0(titleColumns.length, "titleColumns size", - getPlotInfo()); + ArgumentValidations.assertGreaterThan0(titleColumns.length, "titleColumns size", getPlotInfo()); - IntStream.range(0, titleColumns.length).forEachOrdered(i -> ArgumentValidations - .assertNotNull(titleColumns[i], "titleColumn[" + i + "]", getPlotInfo())); + IntStream.range(0, titleColumns.length).forEachOrdered( + i -> ArgumentValidations.assertNotNull(titleColumns[i], "titleColumn[" + i + "]", getPlotInfo())); ArgumentValidations.assertColumnsInTable(t, getPlotInfo(), titleColumns); final TableHandle tableHandle = new TableHandle(t, titleColumns); // set dynamicTitle for table - this.chartTitle = new DynamicChartTitle.ChartTitleTable(titleFormat, tableHandle, - getPlotInfo(), maxVisibleRowsCount, titleColumns); + this.chartTitle = new DynamicChartTitle.ChartTitleTable(titleFormat, tableHandle, getPlotInfo(), + maxVisibleRowsCount, titleColumns); return this; } @Override - public ChartImpl chartTitle(final String titleFormat, final SelectableDataSet sds, - final String... titleColumns) { + public ChartImpl chartTitle(final String titleFormat, final SelectableDataSet sds, final String... titleColumns) { ArgumentValidations.assertNotNull(sds, "sds", getPlotInfo()); ArgumentValidations.assertNotNull(titleColumns, "titleColumns", getPlotInfo()); - ArgumentValidations.assertGreaterThan0(titleColumns.length, "titleColumns size", - getPlotInfo()); + ArgumentValidations.assertGreaterThan0(titleColumns.length, "titleColumns size", getPlotInfo()); for (int i = 0; i < titleColumns.length; i++) { final String titleColumn = titleColumns[i]; @@ -449,14 +438,13 @@ public ChartImpl chartTitle(final String titleFormat, final SelectableDataSet sd } final SwappableTable swappableTable = sds.getSwappableTable("ChartTitle", this, - (Function & Serializable) table -> table, titleColumns); + (Function & Serializable) table -> table, titleColumns); - ArgumentValidations.assertColumnsInTable(swappableTable.getTableDefinition(), getPlotInfo(), - titleColumns); + ArgumentValidations.assertColumnsInTable(swappableTable.getTableDefinition(), getPlotInfo(), titleColumns); // set dynamicTitle for Swappable table - this.chartTitle = new DynamicChartTitle.ChartTitleSwappableTable(titleFormat, - swappableTable, getPlotInfo(), maxVisibleRowsCount, titleColumns); + this.chartTitle = new DynamicChartTitle.ChartTitleSwappableTable(titleFormat, swappableTable, getPlotInfo(), + maxVisibleRowsCount, titleColumns); return this; } @@ -466,8 +454,7 @@ public Chart maxRowsInTitle(final int maxRowsCount) { chartTitle = new ChartTitle(getPlotInfo()); } - // we're setting at both places since user can call chartTitle() and maxRowsInTitle() in any - // order. + // we're setting at both places since user can call chartTitle() and maxRowsInTitle() in any order. this.maxVisibleRowsCount = maxRowsCount; chartTitle.maxVisibleRowsCount = maxRowsCount; return this; @@ -616,8 +603,7 @@ public Axes axes(int id) { final int size = axes.size(); if (id < 0 || id >= size) { throw new PlotIllegalArgumentException( - "Axes not in chart: index=" + id + ", required in range = [0," + (size - 1) + "]", - this); + "Axes not in chart: index=" + id + ", required in range = [0," + (size - 1) + "]", this); } return axes.get(id); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/ChartTitle.java b/Plot/src/main/java/io/deephaven/db/plot/ChartTitle.java index 2818c3e3cbb..d94ceeec682 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/ChartTitle.java +++ b/Plot/src/main/java/io/deephaven/db/plot/ChartTitle.java @@ -38,8 +38,7 @@ public class ChartTitle implements Serializable { ChartTitle(final PlotInfo plotInfo) { - this(plotInfo, - Configuration.getInstance().getIntegerWithDefault(MAX_VISIBLE_ROWS_COUNT_PROP, 3)); + this(plotInfo, Configuration.getInstance().getIntegerWithDefault(MAX_VISIBLE_ROWS_COUNT_PROP, 3)); } ChartTitle(final PlotInfo plotInfo, final int maxVisibleRowsCount) { diff --git a/Plot/src/main/java/io/deephaven/db/plot/DynamicChartTitle.java b/Plot/src/main/java/io/deephaven/db/plot/DynamicChartTitle.java index 664e0d5c78d..885b3633f78 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/DynamicChartTitle.java +++ b/Plot/src/main/java/io/deephaven/db/plot/DynamicChartTitle.java @@ -11,18 +11,15 @@ /** * Dynamic chart title created from table columns *

    - * For each column, by default the title String takes 1 values. This is configurable either via - * property "Plot.chartTitle.maxRowsInTitle" or - * {@link io.deephaven.db.plot.Chart#maxRowsInTitle(int)} + * For each column, by default the title String takes 1 values. This is configurable either via property + * "Plot.chartTitle.maxRowsInTitle" or {@link io.deephaven.db.plot.Chart#maxRowsInTitle(int)} *

    * Also, the default format for the title is: *

    - * ${comma separated values}        ${comma separated - * values},... + * ${comma separated values}        ${comma separated values},... *

    *

    - * In order to customize this format, (a {@link java.text.MessageFormat} instance), please refer - *
    + * In order to customize this format, (a {@link java.text.MessageFormat} instance), please refer
    * {@link io.deephaven.db.plot.Chart#chartTitle(String, Table, String...)} and
    * {@link io.deephaven.db.plot.Chart#chartTitle(String, SelectableDataSet, String...)} */ @@ -37,8 +34,7 @@ public abstract class DynamicChartTitle extends ChartTitle { */ String dynamicTitleString; - DynamicChartTitle(final String titleFormat, final PlotInfo plotInfo, - final int maxVisibleRowsCount) { + DynamicChartTitle(final String titleFormat, final PlotInfo plotInfo, final int maxVisibleRowsCount) { super(plotInfo, maxVisibleRowsCount); this.titleFormat = titleFormat; } @@ -68,8 +64,8 @@ public static abstract class DynamicChartTitleTable extends DynamicChartTitle { private final Set titleColumns; - DynamicChartTitleTable(final String titleFormat, final Set titleColumns, - final PlotInfo plotInfo, final int maxVisibleRowsCount) { + DynamicChartTitleTable(final String titleFormat, final Set titleColumns, final PlotInfo plotInfo, + final int maxVisibleRowsCount) { super(titleFormat, plotInfo, maxVisibleRowsCount); this.titleColumns = titleColumns; } @@ -112,8 +108,7 @@ static String defaultTitleFormat(final String... titleColumns) { final StringBuilder sb = new StringBuilder(); for (int i = 0; i < titleColumns.length; i++) { - sb.append("{").append(i).append("}") - .append(i == titleColumns.length - 1 ? "" : " "); + sb.append("{").append(i).append("}").append(i == titleColumns.length - 1 ? "" : " "); } return sb.toString(); @@ -140,10 +135,9 @@ public static class ChartTitleSwappableTable extends DynamicChartTitleTable { private transient Table localTable; - ChartTitleSwappableTable(final String titleFormat, final SwappableTable swappableTable, - final PlotInfo plotInfo, final int maxVisibleRowsCount, final String... titleColumns) { - super(titleFormat, new LinkedHashSet<>(Arrays.asList(titleColumns)), plotInfo, - maxVisibleRowsCount); + ChartTitleSwappableTable(final String titleFormat, final SwappableTable swappableTable, final PlotInfo plotInfo, + final int maxVisibleRowsCount, final String... titleColumns) { + super(titleFormat, new LinkedHashSet<>(Arrays.asList(titleColumns)), plotInfo, maxVisibleRowsCount); this.swappableTable = swappableTable; if (swappableTable instanceof SwappableTableMap) { @@ -198,10 +192,9 @@ public TableHandle getTableHandle() { return tableHandle; } - ChartTitleTable(final String titleFormat, final TableHandle tableHandle, - final PlotInfo plotInfo, final int maxVisibleRowsCount, final String... titleColumns) { - super(titleFormat, new LinkedHashSet<>(Arrays.asList(titleColumns)), plotInfo, - maxVisibleRowsCount); + ChartTitleTable(final String titleFormat, final TableHandle tableHandle, final PlotInfo plotInfo, + final int maxVisibleRowsCount, final String... titleColumns) { + super(titleFormat, new LinkedHashSet<>(Arrays.asList(titleColumns)), plotInfo, maxVisibleRowsCount); this.tableHandle = tableHandle; } diff --git a/Plot/src/main/java/io/deephaven/db/plot/FigureWidget.java b/Plot/src/main/java/io/deephaven/db/plot/FigureWidget.java index 14efc4f917e..e8d170b8d8d 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/FigureWidget.java +++ b/Plot/src/main/java/io/deephaven/db/plot/FigureWidget.java @@ -15,8 +15,7 @@ /** * Displayable version of a Figure. */ -public class FigureWidget extends FigureImpl - implements LiveWidget, LiveWidgetVisibilityProvider, FigureWidgetMarker { +public class FigureWidget extends FigureImpl implements LiveWidget, LiveWidgetVisibilityProvider, FigureWidgetMarker { private static final long serialVersionUID = 763409998768966385L; private String[] validGroups; diff --git a/Plot/src/main/java/io/deephaven/db/plot/Font.java b/Plot/src/main/java/io/deephaven/db/plot/Font.java index d2d3f1cffe1..651b704b9aa 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/Font.java +++ b/Plot/src/main/java/io/deephaven/db/plot/Font.java @@ -123,8 +123,7 @@ public static String[] fontStyleNames() { private final java.awt.Font font; /** - * Creates a new instance of the Font with the specified {@code family}, {@code style}, and - * {@code size}. + * Creates a new instance of the Font with the specified {@code family}, {@code style}, and {@code size}. * * @param family font family; if null, set to Arial * @param style font style; if null, set to {@link Font.FontStyle} PLAIN @@ -140,8 +139,7 @@ public Font(final String family, final FontStyle style, final int size) { } /** - * Creates a new instance of the Font with the specified {@code family}, {@code style}, and - * {@code size}. + * Creates a new instance of the Font with the specified {@code family}, {@code style}, and {@code size}. * * @param family font family; if null, set to Arial * @param style font style; if null, set to {@link Font.FontStyle} PLAIN @@ -194,8 +192,7 @@ public static Font font(final String family, final String style, final int size) * @return array of available Font family names */ public static String[] fontFamilyNames() { - return java.awt.GraphicsEnvironment.getLocalGraphicsEnvironment() - .getAvailableFontFamilyNames(); + return java.awt.GraphicsEnvironment.getLocalGraphicsEnvironment().getAvailableFontFamilyNames(); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/LineStyle.java b/Plot/src/main/java/io/deephaven/db/plot/LineStyle.java index ef93e994812..01529334066 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/LineStyle.java +++ b/Plot/src/main/java/io/deephaven/db/plot/LineStyle.java @@ -11,19 +11,18 @@ import java.util.List; /** - * Specifications for the style of a line. These specifications include line thickness, dash - * patterns, end styles, segment join styles, and shapes. + * Specifications for the style of a line. These specifications include line thickness, dash patterns, end styles, + * segment join styles, and shapes. * *

    * Line thickness is 1 by default. Larger numbers draw thicker lines. *

    *

    - * Dash pattern is defined by an array. If only one value is included in the array, the dash and the - * gap after the dash will be the same. If more than one value is used in the array, the first value - * represents the length of the first dash in the line. The next value represents the length of the - * gap between it and the next dash. Additional values can be added into the array for subsequent - * dash/gap combinations. For example, the array [20,5] creates a dash pattern with a 20 length dash - * and a 5 length gap. This pattern is repeated till the end of the line. + * Dash pattern is defined by an array. If only one value is included in the array, the dash and the gap after the dash + * will be the same. If more than one value is used in the array, the first value represents the length of the first + * dash in the line. The next value represents the length of the gap between it and the next dash. Additional values can + * be added into the array for subsequent dash/gap combinations. For example, the array [20,5] creates a dash pattern + * with a 20 length dash and a 5 length gap. This pattern is repeated till the end of the line. *

    */ public class LineStyle implements Serializable { @@ -161,8 +160,7 @@ public static String[] lineJoinStyleNames() { * @param joinStyle line join style * @param dashPattern dash pattern */ - public LineStyle(double width, LineEndStyle endStyle, LineJoinStyle joinStyle, - double... dashPattern) { + public LineStyle(double width, LineEndStyle endStyle, LineJoinStyle joinStyle, double... dashPattern) { this.width = width; this.endStyle = endStyle; this.joinStyle = joinStyle; @@ -171,8 +169,7 @@ public LineStyle(double width, LineEndStyle endStyle, LineJoinStyle joinStyle, } /** - * Creates a LineStyle with specified thickness, {@link LineEndStyle}, {@link LineJoinStyle}, - * and dash pattern. + * Creates a LineStyle with specified thickness, {@link LineEndStyle}, {@link LineJoinStyle}, and dash pattern. * * @param width line thickness * @param endStyle line end style @@ -180,17 +177,14 @@ public LineStyle(double width, LineEndStyle endStyle, LineJoinStyle joinStyle, * @param dashPattern dash pattern * @param data type of {@code dashPattern} */ - public LineStyle(double width, LineEndStyle endStyle, - LineJoinStyle joinStyle, List dashPattern) { - this(width, endStyle, joinStyle, - dashPattern == null ? null - : dashPattern.stream().mapToDouble(x -> x == null ? Double.NaN : x.doubleValue()) - .toArray()); + public LineStyle(double width, LineEndStyle endStyle, LineJoinStyle joinStyle, + List dashPattern) { + this(width, endStyle, joinStyle, dashPattern == null ? null + : dashPattern.stream().mapToDouble(x -> x == null ? Double.NaN : x.doubleValue()).toArray()); } /** - * Creates a LineStyle with specified thickness, {@link LineEndStyle}, {@link LineJoinStyle}, - * and dash pattern. + * Creates a LineStyle with specified thickness, {@link LineEndStyle}, {@link LineJoinStyle}, and dash pattern. * * @param width line thickness * @param endStyle line end style descriptor @@ -202,8 +196,7 @@ public LineStyle(double width, String endStyle, String joinStyle, double... dash } /** - * Creates a LineStyle with specified thickness, {@link LineEndStyle}, {@link LineJoinStyle}, - * and dash pattern. + * Creates a LineStyle with specified thickness, {@link LineEndStyle}, {@link LineJoinStyle}, and dash pattern. * * @param width line thickness * @param endStyle line end style descriptor @@ -211,17 +204,14 @@ public LineStyle(double width, String endStyle, String joinStyle, double... dash * @param dashPattern dash pattern * @param data type of {@code dashPattern} */ - public LineStyle(double width, String endStyle, String joinStyle, - List dashPattern) { - this(width, lineEndStyle(endStyle), lineJoinStyle(joinStyle), - dashPattern == null ? null - : dashPattern.stream().mapToDouble(x -> x == null ? Double.NaN : x.doubleValue()) - .toArray()); + public LineStyle(double width, String endStyle, String joinStyle, List dashPattern) { + this(width, lineEndStyle(endStyle), lineJoinStyle(joinStyle), dashPattern == null ? null + : dashPattern.stream().mapToDouble(x -> x == null ? Double.NaN : x.doubleValue()).toArray()); } /** - * Creates a LineStyle with specified thickness. Defaults the {@link LineEndStyle} and - * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. No dash pattern is set. + * Creates a LineStyle with specified thickness. Defaults the {@link LineEndStyle} and {@link LineJoinStyle} to + * {@link LineJoinStyle#ROUND}. No dash pattern is set. * * @param width line thickness */ @@ -230,8 +220,8 @@ public LineStyle(final double width) { } /** - * Creates a LineStyle with specified thickness and dash pattern. Defaults the - * {@link LineEndStyle} and {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. + * Creates a LineStyle with specified thickness and dash pattern. Defaults the {@link LineEndStyle} and + * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. * * @param width line thickness * @param dashPattern dash pattern @@ -241,8 +231,8 @@ public LineStyle(final double width, double[] dashPattern) { } /** - * Creates a LineStyle with specified thickness and dash pattern. Defaults the - * {@link LineEndStyle} and {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. + * Creates a LineStyle with specified thickness and dash pattern. Defaults the {@link LineEndStyle} and + * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. * * @param width line thickness * @param dashPattern dash pattern @@ -252,8 +242,8 @@ public LineStyle(final double width, int[] dashPattern) { } /** - * Creates a LineStyle with specified thickness and dash pattern. Defaults the - * {@link LineEndStyle} and {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. + * Creates a LineStyle with specified thickness and dash pattern. Defaults the {@link LineEndStyle} and + * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. * * @param width line thickness * @param dashPattern dash pattern @@ -263,8 +253,8 @@ public LineStyle(final double width, long[] dashPattern) { } /** - * Creates a LineStyle with specified thickness and dash pattern. Defaults the - * {@link LineEndStyle} and {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. + * Creates a LineStyle with specified thickness and dash pattern. Defaults the {@link LineEndStyle} and + * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. * * @param width line thickness * @param dashPattern dash pattern @@ -274,8 +264,8 @@ public LineStyle(final double width, float[] dashPattern) { } /** - * Creates a LineStyle with specified thickness and dash pattern. Defaults the - * {@link LineEndStyle} and {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. + * Creates a LineStyle with specified thickness and dash pattern. Defaults the {@link LineEndStyle} and + * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. * * @param width line thickness * @param dashPattern dash pattern @@ -286,8 +276,8 @@ public LineStyle(final double width, T[] dashPattern) { } /** - * Creates a LineStyle with specified thickness and dash pattern. Defaults the - * {@link LineEndStyle} and {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. + * Creates a LineStyle with specified thickness and dash pattern. Defaults the {@link LineEndStyle} and + * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. * * @param width line thickness * @param dashPattern dash pattern @@ -298,8 +288,8 @@ public LineStyle(final double width, List dashPattern) { } /** - * Creates a LineStyle with specified dash pattern. Defaults line width to 1.0 and the - * {@link LineEndStyle} and {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. + * Creates a LineStyle with specified dash pattern. Defaults line width to 1.0 and the {@link LineEndStyle} and + * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. * * @param dashPattern dash pattern */ @@ -308,8 +298,8 @@ public LineStyle(double... dashPattern) { } /** - * Creates a LineStyle with specified dash pattern. Defaults line width to 1.0 and the - * {@link LineEndStyle} and {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. + * Creates a LineStyle with specified dash pattern. Defaults line width to 1.0 and the {@link LineEndStyle} and + * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. * * @param dashPattern dash pattern * @param data type of {@code dashPattern} @@ -319,8 +309,8 @@ public LineStyle(List dashPattern) { } /** - * Creates a LineStyle with specified {@link LineEndStyle} and {@link LineJoinStyle} Defaults - * line width to 1.0. No dash pattern is set. + * Creates a LineStyle with specified {@link LineEndStyle} and {@link LineJoinStyle} Defaults line width to 1.0. No + * dash pattern is set. * * @param endStyle line end style * @param joinStyle line join style @@ -330,8 +320,8 @@ public LineStyle(String endStyle, String joinStyle) { } /** - * Creates a LineStyle. Defaults the line width to 1.0 and the {@link LineEndStyle} and - * {@link LineJoinStyle} to {@link LineJoinStyle#ROUND}. No dash pattern is set. + * Creates a LineStyle. Defaults the line width to 1.0 and the {@link LineEndStyle} and {@link LineJoinStyle} to + * {@link LineJoinStyle#ROUND}. No dash pattern is set. */ public LineStyle() { this(DEFAULT_WIDTH, DEFAULT_ENDSTYLE, DEFAULT_JOINSTYLE, DEFAULT_DASHPATTERN); @@ -351,7 +341,7 @@ public LineStyle() { * @return line style. */ public static LineStyle lineStyle(double width, LineEndStyle endStyle, LineJoinStyle joinStyle, - double... dashPattern) { + double... dashPattern) { return new LineStyle(width, endStyle, joinStyle, dashPattern); } @@ -365,8 +355,8 @@ public static LineStyle lineStyle(double width, LineEndStyle endStyle, LineJoinS * @param data type of {@code dashPattern} * @return line style. */ - public static LineStyle lineStyle(double width, LineEndStyle endStyle, - LineJoinStyle joinStyle, List dashPattern) { + public static LineStyle lineStyle(double width, LineEndStyle endStyle, LineJoinStyle joinStyle, + List dashPattern) { return new LineStyle(width, endStyle, joinStyle, dashPattern); } @@ -379,8 +369,7 @@ public static LineStyle lineStyle(double width, LineEndStyle * @param dashPattern dash pattern * @return line style. */ - public static LineStyle lineStyle(double width, String endStyle, String joinStyle, - double... dashPattern) { + public static LineStyle lineStyle(double width, String endStyle, String joinStyle, double... dashPattern) { return new LineStyle(width, endStyle, joinStyle, dashPattern); } @@ -394,8 +383,8 @@ public static LineStyle lineStyle(double width, String endStyle, String joinStyl * @param data type of {@code dashPattern} * @return line style. */ - public static LineStyle lineStyle(double width, String endStyle, - String joinStyle, List dashPattern) { + public static LineStyle lineStyle(double width, String endStyle, String joinStyle, + List dashPattern) { return new LineStyle(width, endStyle, joinStyle, dashPattern); } @@ -481,8 +470,8 @@ public static LineStyle lineStyle(final double width, List * Returns a line style. * * @param dashPattern dash pattern - * @return line style with the line end style and line join style set to {@code ROUND}, and the - * line width set to 1.0. + * @return line style with the line end style and line join style set to {@code ROUND}, and the line width set to + * 1.0. */ public static LineStyle lineStyle(double... dashPattern) { return new LineStyle(dashPattern); @@ -493,8 +482,8 @@ public static LineStyle lineStyle(double... dashPattern) { * * @param dashPattern dash pattern * @param data type of {@code dashPattern} - * @return line style with the line end style and line join style set to {@code ROUND}, and the - * line width set to 1.0. + * @return line style with the line end style and line join style set to {@code ROUND}, and the line width set to + * 1.0. */ public static LineStyle lineStyle(List dashPattern) { return new LineStyle(dashPattern); @@ -514,8 +503,8 @@ public static LineStyle lineStyle(final String endStyle, final String joinStyle) /** * Returns a line style. * - * @return line style with the line end style and line join style set to {@code ROUND}, the line - * width set to 1.0, and no dash pattern set. + * @return line style with the line end style and line join style set to {@code ROUND}, the line width set to 1.0, + * and no dash pattern set. */ public static LineStyle lineStyle() { return new LineStyle(); @@ -567,14 +556,13 @@ private void assertDashPatternOk(final double[] dash) { } if (dash.length == 0) { - throw new IllegalArgumentException( - "Dash pattern is empty. dash=" + Arrays.toString(dash)); + throw new IllegalArgumentException("Dash pattern is empty. dash=" + Arrays.toString(dash)); } for (double aDash : dash) { if (aDash <= 0) { throw new IllegalArgumentException( - "Dash pattern contains zero or negative values: dash=" + Arrays.toString(dash)); + "Dash pattern contains zero or negative values: dash=" + Arrays.toString(dash)); } } } @@ -582,11 +570,11 @@ private void assertDashPatternOk(final double[] dash) { @Override public String toString() { return "LineStyle{" + - "width=" + width + - ", endStyle=" + endStyle + - ", joinStyle=" + joinStyle + - ", dashPattern=" + Arrays.toString(dashPattern) + - '}'; + "width=" + width + + ", endStyle=" + endStyle + + ", joinStyle=" + joinStyle + + ", dashPattern=" + Arrays.toString(dashPattern) + + '}'; } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/SeriesCollection.java b/Plot/src/main/java/io/deephaven/db/plot/SeriesCollection.java index 7c56b507342..40cc67e45cf 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/SeriesCollection.java +++ b/Plot/src/main/java/io/deephaven/db/plot/SeriesCollection.java @@ -46,8 +46,7 @@ public static class SeriesDescription implements Serializable { private final boolean isMultiSeries; private final SeriesInternal series; - private SeriesDescription(final SeriesType type, final boolean isMultiSeries, - final SeriesInternal series) { + private SeriesDescription(final SeriesType type, final boolean isMultiSeries, final SeriesInternal series) { this.type = type; this.isMultiSeries = isMultiSeries; this.series = series; @@ -107,8 +106,7 @@ public synchronized SeriesCollection copy(final AxesImpl axes) { } /** - * Gets the descriptions of the series in the collection. The result is a map between series - * name and description. + * Gets the descriptions of the series in the collection. The result is a map between series name and description. * * @return descriptions of the series in the collection */ @@ -182,13 +180,10 @@ synchronized SeriesInternal lastSeries() { * @param isMultiSeries true for multi-series; false for standard mono-series. * @param series series */ - public synchronized void add(final SeriesType type, final boolean isMultiSeries, - final SeriesInternal series) { + public synchronized void add(final SeriesType type, final boolean isMultiSeries, final SeriesInternal series) { if (seriesDescriptions.containsKey(series.name())) { throw new PlotUnsupportedOperationException( - "Series with the same name already exists in the collection. name=" - + series.name(), - this); + "Series with the same name already exists in the collection. name=" + series.name(), this); } seriesDescriptions.put(series.name(), new SeriesDescription(type, isMultiSeries, series)); diff --git a/Plot/src/main/java/io/deephaven/db/plot/SeriesLocation.java b/Plot/src/main/java/io/deephaven/db/plot/SeriesLocation.java index 9014f269f6a..6d295f9dded 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/SeriesLocation.java +++ b/Plot/src/main/java/io/deephaven/db/plot/SeriesLocation.java @@ -36,8 +36,7 @@ public boolean equals(Object o) { if (id != that.id) return false; - return axesLocation != null ? axesLocation.equals(that.axesLocation) - : that.axesLocation == null; + return axesLocation != null ? axesLocation.equals(that.axesLocation) : that.axesLocation == null; } @Override diff --git a/Plot/src/main/java/io/deephaven/db/plot/axisformatters/AxisFormat.java b/Plot/src/main/java/io/deephaven/db/plot/axisformatters/AxisFormat.java index 3478081d06b..56f203521ff 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/axisformatters/AxisFormat.java +++ b/Plot/src/main/java/io/deephaven/db/plot/axisformatters/AxisFormat.java @@ -7,8 +7,8 @@ import java.text.NumberFormat; /** - * Format for axis tick labels. For time values, this would be how the dates are formatted. For - * numerical values, this would be the number of significant digits, etc. + * Format for axis tick labels. For time values, this would be how the dates are formatted. For numerical values, this + * would be the number of significant digits, etc. */ public interface AxisFormat { @@ -22,8 +22,7 @@ public interface AxisFormat { /** * Gets the formatter for given pattern. *

    - * Note that as time values are expressed as numbers, a number formatter is still suitable for - * dates. + * Note that as time values are expressed as numbers, a number formatter is still suitable for dates. *

    * * @return formatter diff --git a/Plot/src/main/java/io/deephaven/db/plot/axisformatters/DecimalAxisFormat.java b/Plot/src/main/java/io/deephaven/db/plot/axisformatters/DecimalAxisFormat.java index 2d3a5e328de..7863bac24ec 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/axisformatters/DecimalAxisFormat.java +++ b/Plot/src/main/java/io/deephaven/db/plot/axisformatters/DecimalAxisFormat.java @@ -10,8 +10,8 @@ /** * A formatter for converting decimals into formatted strings. * - * For details on the supported patterns see the javadoc for DecimalFormat + * For details on the supported patterns see the javadoc for + * DecimalFormat */ public class DecimalAxisFormat implements AxisFormat { private String pattern; diff --git a/Plot/src/main/java/io/deephaven/db/plot/axisformatters/NanosAxisFormat.java b/Plot/src/main/java/io/deephaven/db/plot/axisformatters/NanosAxisFormat.java index ebc0420abef..e0f00870f38 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/axisformatters/NanosAxisFormat.java +++ b/Plot/src/main/java/io/deephaven/db/plot/axisformatters/NanosAxisFormat.java @@ -16,8 +16,8 @@ /** * A formatter for converting nanoseconds into formatted strings. * - * For details on the supported patterns see the javadoc for DateTimeFormatter + * For details on the supported patterns see the javadoc for + * DateTimeFormatter */ public class NanosAxisFormat implements AxisFormat, Serializable { @@ -84,19 +84,16 @@ public void updateTimeZone(final DBTimeZone tz) { private void updateFormatter(String format) { format = format == null ? "yyyy-MM-dd" : format; - this.formatter = DateTimeFormatter.ofPattern(format) - .withZone(tz.getTimeZone().toTimeZone().toZoneId()); + this.formatter = DateTimeFormatter.ofPattern(format).withZone(tz.getTimeZone().toTimeZone().toZoneId()); } @Override - public StringBuffer format(final double number, final StringBuffer toAppendTo, - final FieldPosition pos) { + public StringBuffer format(final double number, final StringBuffer toAppendTo, final FieldPosition pos) { return format((long) number, toAppendTo, pos); } @Override - public StringBuffer format(final long number, final StringBuffer toAppendTo, - final FieldPosition pos) { + public StringBuffer format(final long number, final StringBuffer toAppendTo, final FieldPosition pos) { return toAppendTo.append(formatter.format(new DBDateTime(number).getInstant())); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/axisformatters/ScientificNumberFormatter.java b/Plot/src/main/java/io/deephaven/db/plot/axisformatters/ScientificNumberFormatter.java index 22f6b436c7b..4236b7310a8 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/axisformatters/ScientificNumberFormatter.java +++ b/Plot/src/main/java/io/deephaven/db/plot/axisformatters/ScientificNumberFormatter.java @@ -13,16 +13,15 @@ import java.text.ParsePosition; /** - * {@link NumberFormat} which formats numbers in scientific notation if they are outside the given - * range. + * {@link NumberFormat} which formats numbers in scientific notation if they are outside the given range. */ public class ScientificNumberFormatter extends NumberFormat { private static final int DEFAULT_NUMBER_OF_DECIMALS = - Configuration.getInstance().getIntegerWithDefault("plot.axis.ticks.numdecimals", 3); + Configuration.getInstance().getIntegerWithDefault("plot.axis.ticks.numdecimals", 3); private static final double DEFAULT_LOWER_LIMIT = - Configuration.getInstance().getDoubleWithDefault("plot.axis.ticks.lowerlimit", 1e-7); + Configuration.getInstance().getDoubleWithDefault("plot.axis.ticks.lowerlimit", 1e-7); private static final double DEFAULT_UPPER_LIMIT = - Configuration.getInstance().getDoubleWithDefault("plot.axis.ticks.upperlimit", 1e7); + Configuration.getInstance().getDoubleWithDefault("plot.axis.ticks.upperlimit", 1e7); private final DecimalFormat decimalFormat = new DecimalFormat(); private final DecimalFormat scientificFormat; @@ -30,8 +29,7 @@ public class ScientificNumberFormatter extends NumberFormat { private final double upperLimit; /** - * Creates a ScientificNumberFormatter instance with the default number of decimals, lower - * limit, and upper limit. + * Creates a ScientificNumberFormatter instance with the default number of decimals, lower limit, and upper limit. */ @SuppressWarnings("WeakerAccess") public ScientificNumberFormatter() { @@ -42,10 +40,10 @@ public ScientificNumberFormatter() { * Creates a ScientificNumberFormatter instance. * * @param numDecimals the max number of decimals to display - * @param lowerLimit gives a range around 0 [-lowerLimit, lowerLimit] for which each number - * inside the range excluding 0 will be formatted with scientific notation - * @param upperLimit gives a range around 0 [-upperLimit, upperLimit] for which each number - * outside the range will be formatted with scientific notation + * @param lowerLimit gives a range around 0 [-lowerLimit, lowerLimit] for which each number inside the range + * excluding 0 will be formatted with scientific notation + * @param upperLimit gives a range around 0 [-upperLimit, upperLimit] for which each number outside the range will + * be formatted with scientific notation */ @SuppressWarnings("WeakerAccess") public ScientificNumberFormatter(int numDecimals, double lowerLimit, double upperLimit) { @@ -76,8 +74,8 @@ public Number parse(String source, ParsePosition parsePosition) { return decimalFormat.parse(source, parsePosition); } - private StringBuffer formatInternal(final double abs, final StringBuffer toAppendTo, - final FieldPosition pos, final long number1, final double number2, final boolean isLong) { + private StringBuffer formatInternal(final double abs, final StringBuffer toAppendTo, final FieldPosition pos, + final long number1, final double number2, final boolean isLong) { if ((abs < lowerLimit && abs > 0) || abs > upperLimit) { if (isLong) { return scientificFormat.format(number1, toAppendTo, pos); diff --git a/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransform.java b/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransform.java index a9488e5f8e7..e7cf0a8a7ac 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransform.java +++ b/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransform.java @@ -5,14 +5,12 @@ package io.deephaven.db.plot.axistransformations; /** - * Function applied to transform data before plotting. For example, a log plot is a logarithmic - * transform of an axis. + * Function applied to transform data before plotting. For example, a log plot is a logarithmic transform of an axis. * - * Determines how dataset values are displayed in the plot by transforming dataset values into Axis - * space and back. + * Determines how dataset values are displayed in the plot by transforming dataset values into Axis space and back. * - * For example, if the forward transform is x -> x^0.5, a dataset value of 1 maps to 1 in Axis - * space; a dataset value of 4 maps to 2. In the plot these values will be displayed close together. + * For example, if the forward transform is x -> x^0.5, a dataset value of 1 maps to 1 in Axis space; a dataset value of + * 4 maps to 2. In the plot these values will be displayed close together. */ public interface AxisTransform { diff --git a/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransformBusinessCalendar.java b/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransformBusinessCalendar.java index cb7fc9f8deb..1efa9253e7a 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransformBusinessCalendar.java +++ b/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransformBusinessCalendar.java @@ -17,12 +17,10 @@ import java.util.function.Predicate; /** - * AxisTransform into business time. Useful for plotting time series data with large gaps in - * non-business hours. + * AxisTransform into business time. Useful for plotting time series data with large gaps in non-business hours. * - * The forward transform takes a data value from standard epoch time and translates it into the - * cumulative business time for the dataset. The inverse transform takes it back to standard epoch - * time. + * The forward transform takes a data value from standard epoch time and translates it into the cumulative business time + * for the dataset. The inverse transform takes it back to standard epoch time. * * Data values outside of business hours are not visible. */ @@ -44,8 +42,7 @@ private Nugget(BusinessSchedule day, long cumulativeBusinessTimeNanosAtStartOfDa private final List nuggets = new ArrayList<>(); /** - * Creates an AxisTransformBusinessCalendar instance with the specified - * {@link BusinessCalendar}. + * Creates an AxisTransformBusinessCalendar instance with the specified {@link BusinessCalendar}. * * @throws io.deephaven.base.verify.RequirementFailure {@code busCal} can not be null * @param busCal business calendar @@ -76,21 +73,17 @@ private Nugget getNuggetByTime(final double timeNanos) { while (timeNanos < nMin.businessDay.getSOBD().getNanos()) { final BusinessSchedule d = - busCal.getBusinessSchedule(busCal.previousBusinessDay(nMin.businessDay.getSOBD())); - final Nugget n = - new Nugget(d, nMin.cumulativeBusinessTimeNanosAtStartOfDay - d.getLOBD()); + busCal.getBusinessSchedule(busCal.previousBusinessDay(nMin.businessDay.getSOBD())); + final Nugget n = new Nugget(d, nMin.cumulativeBusinessTimeNanosAtStartOfDay - d.getLOBD()); nuggets.add(0, n); nMin = n; } - // noinspection ConstantConditions nMax can't cause NPE (for now! Don't add nulls to - // nuggets!) + // noinspection ConstantConditions nMax can't cause NPE (for now! Don't add nulls to nuggets!) while (timeNanos > nMax.businessDay.getEOBD().getNanos()) { - final BusinessSchedule d = - busCal.getBusinessSchedule(busCal.nextBusinessDay(nMax.businessDay.getEOBD())); - final Nugget n = new Nugget(d, - nMax.cumulativeBusinessTimeNanosAtStartOfDay + nMax.businessDay.getLOBD()); + final BusinessSchedule d = busCal.getBusinessSchedule(busCal.nextBusinessDay(nMax.businessDay.getEOBD())); + final Nugget n = new Nugget(d, nMax.cumulativeBusinessTimeNanosAtStartOfDay + nMax.businessDay.getLOBD()); nuggets.add(n); nMax = n; @@ -113,9 +106,8 @@ private Nugget getNuggetByValue(final double value) { while (value < nMin.cumulativeBusinessTimeNanosAtStartOfDay) { final BusinessSchedule d = - busCal.getBusinessSchedule(busCal.previousBusinessDay(nMin.businessDay.getSOBD())); - final Nugget n = - new Nugget(d, nMin.cumulativeBusinessTimeNanosAtStartOfDay - d.getLOBD()); + busCal.getBusinessSchedule(busCal.previousBusinessDay(nMin.businessDay.getSOBD())); + final Nugget n = new Nugget(d, nMin.cumulativeBusinessTimeNanosAtStartOfDay - d.getLOBD()); nuggets.add(0, n); nMin = n; @@ -126,21 +118,17 @@ private Nugget getNuggetByValue(final double value) { } while (value > nMax.cumulativeBusinessTimeNanosAtStartOfDay + nMax.businessDay.getLOBD()) { - final BusinessSchedule d = - busCal.getBusinessSchedule(busCal.nextBusinessDay(nMax.businessDay.getEOBD())); - final Nugget n = new Nugget(d, - nMax.cumulativeBusinessTimeNanosAtStartOfDay + nMax.businessDay.getLOBD()); + final BusinessSchedule d = busCal.getBusinessSchedule(busCal.nextBusinessDay(nMax.businessDay.getEOBD())); + final Nugget n = new Nugget(d, nMax.cumulativeBusinessTimeNanosAtStartOfDay + nMax.businessDay.getLOBD()); nuggets.add(n); nMax = n; } - return findNugget( - n -> value < n.cumulativeBusinessTimeNanosAtStartOfDay + n.businessDay.getLOBD()); + return findNugget(n -> value < n.cumulativeBusinessTimeNanosAtStartOfDay + n.businessDay.getLOBD()); } - // only getNuggetByTime or getNuggetByValue should call this to ensure that the desired value is - // in range + // only getNuggetByTime or getNuggetByValue should call this to ensure that the desired value is in range private Nugget findNugget(final Predicate lessThanEqual) { int iMin = 0, iMax = nuggets.size() - 1; @@ -168,7 +156,7 @@ private Nugget findNugget(final Predicate lessThanEqual) { @Override public boolean isVisible(final double timeNanos) { return !(Double.isInfinite(timeNanos) || Double.isNaN(timeNanos)) - && busCal.isBusinessTime(DBTimeUtils.nanosToTime((long) timeNanos)); + && busCal.isBusinessTime(DBTimeUtils.nanosToTime((long) timeNanos)); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransformLambda.java b/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransformLambda.java index c1db6aec857..9dd144ba9fb 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransformLambda.java +++ b/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransformLambda.java @@ -13,9 +13,9 @@ /** * Transformations from dataset space to axis space and back. * - * Axis space refers to how data is displayed in the chart. For example, if the transform from - * dataset space to axis space was defined as x^0.5, the dataset values 1, 4, and 9 would be plotted - * as 1, 2, and 3,] creating a square root axis scale. + * Axis space refers to how data is displayed in the chart. For example, if the transform from dataset space to axis + * space was defined as x^0.5, the dataset values 1, 4, and 9 would be plotted as 1, 2, and 3,] creating a square root + * axis scale. */ public class AxisTransformLambda implements AxisTransform, Serializable { @@ -31,8 +31,8 @@ public class AxisTransformLambda implements AxisTransform, Serializable { * @param axisToData transform from axis space to dataset space * @param isVisible function that determines if a particular data point should be displayed */ - public AxisTransformLambda(final DoubleUnaryOperator dataToAxis, - final DoubleUnaryOperator axisToData, final DoublePredicate isVisible) { + public AxisTransformLambda(final DoubleUnaryOperator dataToAxis, final DoubleUnaryOperator axisToData, + final DoublePredicate isVisible) { Require.neqNull(dataToAxis, "dataToAxis"); Require.neqNull(axisToData, "axisToData"); Require.neqNull(isVisible, "isVisible"); @@ -47,19 +47,17 @@ public AxisTransformLambda(final DoubleUnaryOperator dataToAxis, * @param dataToAxis transform from dataset space to axis space * @param axisToData transform from axis space to dataset space */ - public AxisTransformLambda(final DoubleUnaryOperator dataToAxis, - final DoubleUnaryOperator axisToData) { + public AxisTransformLambda(final DoubleUnaryOperator dataToAxis, final DoubleUnaryOperator axisToData) { this(dataToAxis, axisToData, (DoublePredicate & Serializable) v -> true); } /** - * Creates an AxisTransformLambda instance where all dataset values are visible in the plot and - * the dataset to axis space transform and its inverse are the identity function. + * Creates an AxisTransformLambda instance where all dataset values are visible in the plot and the dataset to axis + * space transform and its inverse are the identity function. */ public AxisTransformLambda() { - this((DoubleUnaryOperator & Serializable) v -> v, - (DoubleUnaryOperator & Serializable) v -> v, - (DoublePredicate & Serializable) v -> true); + this((DoubleUnaryOperator & Serializable) v -> v, (DoubleUnaryOperator & Serializable) v -> v, + (DoublePredicate & Serializable) v -> true); } @Override diff --git a/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransforms.java b/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransforms.java index 834b5c6a916..adc240e2c9c 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransforms.java +++ b/Plot/src/main/java/io/deephaven/db/plot/axistransformations/AxisTransforms.java @@ -24,22 +24,20 @@ public enum AxisTransforms implements AxisTransform, Serializable { * Natural logarithm. Non-positive data values are not displayed. */ LOG((DoubleUnaryOperator & Serializable) x -> x <= 0.0 ? Double.NaN : Math.log(x), - (DoubleUnaryOperator & Serializable) Math::exp, - (DoublePredicate & Serializable) x -> x > 0.0), + (DoubleUnaryOperator & Serializable) Math::exp, (DoublePredicate & Serializable) x -> x > 0.0), /** * Square root. Negative data values are not displayed. */ SQRT((DoubleUnaryOperator & Serializable) x -> x < 0.0 ? Double.NaN : Math.sqrt(x), - (DoubleUnaryOperator & Serializable) x -> x * x, - (DoublePredicate & Serializable) x -> x >= 0.0); + (DoubleUnaryOperator & Serializable) x -> x * x, (DoublePredicate & Serializable) x -> x >= 0.0); private final DoubleUnaryOperator transform; private final DoubleUnaryOperator inverseTransform; private final DoublePredicate isVisible; AxisTransforms(final DoubleUnaryOperator dataToAxis, final DoubleUnaryOperator axisToData, - final DoublePredicate isVisible) { + final DoublePredicate isVisible) { this.transform = dataToAxis; this.inverseTransform = axisToData; this.isVisible = isVisible; @@ -93,8 +91,7 @@ public static AxisTransform axisTransform(final String name) { if (at1 != null && at2 != null) { log.warning( - "Axis transform is defined in both enum and calendar. Returning the enum value. name=" - + name); + "Axis transform is defined in both enum and calendar. Returning the enum value. name=" + name); return at1; } else if (at1 != null) { return at1; @@ -111,15 +108,14 @@ public static AxisTransform axisTransform(final String name) { * @return an array of the available axis transform names. */ public static String[] axisTransformNames() { - final Set results = Arrays.stream(values()).map(Enum::name) - .collect(Collectors.toCollection(LinkedHashSet::new)); + final Set results = + Arrays.stream(values()).map(Enum::name).collect(Collectors.toCollection(LinkedHashSet::new)); final Set calendars = new LinkedHashSet<>(Arrays.asList(Calendars.calendarNames())); final Set conflicts = new LinkedHashSet<>(results); final boolean hasConflicts = conflicts.retainAll(calendars); if (hasConflicts) { - log.warning("AxisTransform enum and calendar names have conflicting values: values=" - + conflicts); + log.warning("AxisTransform enum and calendar names have conflicting values: values=" + conflicts); } results.addAll(calendars); diff --git a/Plot/src/main/java/io/deephaven/db/plot/chartmodifiers/OneClickChartModifier.java b/Plot/src/main/java/io/deephaven/db/plot/chartmodifiers/OneClickChartModifier.java index 07d291ec69c..490fed63aaf 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/chartmodifiers/OneClickChartModifier.java +++ b/Plot/src/main/java/io/deephaven/db/plot/chartmodifiers/OneClickChartModifier.java @@ -19,12 +19,12 @@ public abstract class OneClickChartModifier implements Serializable { * * @param t swappable table * @param valueColumn column of swappable table - * @param visibilityLevel which series to hide before a OneClick event, e.g. AXIS will hide all - * series for the given axis + * @param visibilityLevel which series to hide before a OneClick event, e.g. AXIS will hide all series for the given + * axis * @param plotInfo info for exceptions */ public OneClickChartModifier(final SwappableTable t, final String valueColumn, - final VisibilityLevel visibilityLevel, final PlotInfo plotInfo) { + final VisibilityLevel visibilityLevel, final PlotInfo plotInfo) { this.valueColumn = valueColumn; this.visibilityLevel = visibilityLevel; this.plotInfo = plotInfo; @@ -52,8 +52,8 @@ public PlotInfo getPlotInfo() { } /** - * At what level in the Figure hierarchy the series of a plot will be restricted, e.g. AXIS will - * hide all series for the given {@link io.deephaven.db.plot.Axis} + * At what level in the Figure hierarchy the series of a plot will be restricted, e.g. AXIS will hide all series for + * the given {@link io.deephaven.db.plot.Axis} */ public enum VisibilityLevel { AXIS diff --git a/Plot/src/main/java/io/deephaven/db/plot/colors/ColorMaps.java b/Plot/src/main/java/io/deephaven/db/plot/colors/ColorMaps.java index 4d403e1f39d..34b5b61e9ae 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/colors/ColorMaps.java +++ b/Plot/src/main/java/io/deephaven/db/plot/colors/ColorMaps.java @@ -30,18 +30,17 @@ private ColorMaps() {} /** * Returns a heat map to map numerical values to colors. *

    - * Values less than or equal to {@code min} return the starting color. Values greater than or - * equal to {@code max} return the ending color. Values in between this range are a linear - * combination of the RGB components of these two colors. Higher values return colors that are - * closer to the ending color, and lower values return colors that are closer to the starting - * color. + * Values less than or equal to {@code min} return the starting color. Values greater than or equal to {@code max} + * return the ending color. Values in between this range are a linear combination of the RGB components of these two + * colors. Higher values return colors that are closer to the ending color, and lower values return colors that are + * closer to the starting color. *

    * Inputs that are null or Double.NaN return a null color. * * @param min minimum * @param max maximum - * @return function for mapping double values to colors. The starting color is blue (#0000FF) - * and the ending color is yellow (#FFFF00). + * @return function for mapping double values to colors. The starting color is blue (#0000FF) and the ending color + * is yellow (#FFFF00). */ public static Function heatMap(final double min, final double max) { return heatMap(min, max, Color.color("blue"), Color.color("red"), Color.color("yellow")); @@ -50,11 +49,10 @@ public static Function heatMap(final double min, final double max /** * Returns a heat map to map numerical values to colors. *

    - * Values less than or equal to {@code min} return the starting color. Values greater than or - * equal to {@code max} return the ending color. Values in between this range are a linear - * combination of the RGB components of these two colors. Higher values return colors that are - * closer to the ending color, and lower values return colors that are closer to the starting - * color. + * Values less than or equal to {@code min} return the starting color. Values greater than or equal to {@code max} + * return the ending color. Values in between this range are a linear combination of the RGB components of these two + * colors. Higher values return colors that are closer to the ending color, and lower values return colors that are + * closer to the starting color. *

    * Inputs that are null or Double.NaN return a null color. * @@ -64,19 +62,18 @@ public static Function heatMap(final double min, final double max * @param endColor color at values greater than or equal to {@code max} * @return function for mapping double values to colors */ - public static Function heatMap(final double min, final double max, - final Color startColor, final Color endColor) { + public static Function heatMap(final double min, final double max, final Color startColor, + final Color endColor) { return heatMap(min, max, startColor, endColor, null); } /** * Returns a heat map to map numerical values to colors. *

    - * Values less than or equal to {@code min} return the starting color. Values greater than or - * equal to {@code max} return the ending color. Values in between this range are a linear - * combination of the RGB components of these two colors. Higher values return colors that are - * closer to the ending color, and lower values return colors that are closer to the starting - * color. + * Values less than or equal to {@code min} return the starting color. Values greater than or equal to {@code max} + * return the ending color. Values in between this range are a linear combination of the RGB components of these two + * colors. Higher values return colors that are closer to the ending color, and lower values return colors that are + * closer to the starting color. *

    * Inputs that are null or Double.NaN return a null color. * @@ -87,8 +84,8 @@ public static Function heatMap(final double min, final double max * @param nullColor color at null input values * @return function for mapping double values to colors */ - public static Function heatMap(final double min, final double max, - final Color startColor, final Color endColor, final Color nullColor) { + public static Function heatMap(final double min, final double max, final Color startColor, + final Color endColor, final Color nullColor) { return (Function & Serializable) value -> { if (value == null || value == Double.NaN) { return nullColor; @@ -108,51 +105,46 @@ public static Function heatMap(final double min, final double max int b2 = endColor.javaColor().getBlue(); return new Color((int) (r1 + pert * (r2 - r1)), (int) (g1 + pert * (g2 - g1)), - (int) (b1 + pert * (b2 - b1))); + (int) (b1 + pert * (b2 - b1))); } }; } /** - * Maps {@link Range}s of values to specific colors. Values inside a given {@link Range} return - * the corresponding {@link Paint}. + * Maps {@link Range}s of values to specific colors. Values inside a given {@link Range} return the corresponding + * {@link Paint}. *

    - * Values not in any of the specified {@link Range} return an out of range color. Null inputs - * return a null color. + * Values not in any of the specified {@link Range} return an out of range color. Null inputs return a null color. * * @param map map of {@link Range}s to {@link Paint}s. * @param type of {@link Paint} in the map - * @return function for mapping double values to colors. Null and out of range values return - * null. + * @return function for mapping double values to colors. Null and out of range values return null. */ - public static Function rangeMap( - final Map map) { + public static Function rangeMap(final Map map) { return rangeMap(map, null); } /** - * Maps {@link Range}s of values to specific colors. Values inside a given {@link Range} return - * the corresponding {@link Paint}. + * Maps {@link Range}s of values to specific colors. Values inside a given {@link Range} return the corresponding + * {@link Paint}. *

    - * Values not in any of the specified {@link Range} return an out of range color. Null inputs - * return a null color. + * Values not in any of the specified {@link Range} return an out of range color. Null inputs return a null color. * * @param map map of {@link Range}s to {@link Paint}s. * @param outOfRangeColor color for values not within any of the defined ranges * @param type of {@link Paint} in the map * @return function for mapping double values to colors. Null values return null. */ - public static Function rangeMap( - final Map map, final Color outOfRangeColor) { + public static Function rangeMap(final Map map, + final Color outOfRangeColor) { return rangeMap(map, outOfRangeColor, null); } /** - * Maps {@link Range}s of values to specific colors. Values inside a given {@link Range} return - * the corresponding {@link Paint}. + * Maps {@link Range}s of values to specific colors. Values inside a given {@link Range} return the corresponding + * {@link Paint}. *

    - * Values not in any of the specified {@link Range} return an out of range color. Null inputs - * return a null color. + * Values not in any of the specified {@link Range} return an out of range color. Null inputs return a null color. * * @param map map of {@link Range}s to {@link Paint}s. * @param outOfRangeColor color for values not within any of the defined ranges @@ -160,8 +152,8 @@ public static Function rangeMap( * @param type of {@link Paint} in the map * @return function for mapping double values to colors */ - public static Function rangeMap( - final Map map, final Paint outOfRangeColor, final Paint nullColor) { + public static Function rangeMap(final Map map, + final Paint outOfRangeColor, final Paint nullColor) { Require.neqNull(map, "map"); final Map, COLOR> pm = new LinkedHashMap<>(); @@ -183,56 +175,55 @@ public boolean test(Double o) { } /** - * Returns a function which uses predicate functions to determine which colors is returned for - * an input value. For each input value, a map is iterated through until the predicate function - * (map key) returns true. When the predicate returns true, the associated color (map value) is - * returned. If no such predicate is found, an out of range color is returned. + * Returns a function which uses predicate functions to determine which colors is returned for an input value. For + * each input value, a map is iterated through until the predicate function (map key) returns true. When the + * predicate returns true, the associated color (map value) is returned. If no such predicate is found, an out of + * range color is returned. * * @param map map from {@link SerializablePredicate} to color * @param type of {@link Paint} in {@code map} - * @return function which returns the color mapped to the first {@link SerializablePredicate} - * for which the input is true. Out of range, null, and NaN values return null. + * @return function which returns the color mapped to the first {@link SerializablePredicate} for which the input is + * true. Out of range, null, and NaN values return null. */ public static Function predicateMap( - final Map, COLOR> map) { + final Map, COLOR> map) { return predicateMap(map, null); } /** - * Returns a function which uses predicate functions to determine which colors is returned for - * an input value. For each input value, a map is iterated through until the predicate function - * (map key) returns true. When the predicate returns true, the associated color (map value) is - * returned. If no such predicate is found, an out of range color is returned. + * Returns a function which uses predicate functions to determine which colors is returned for an input value. For + * each input value, a map is iterated through until the predicate function (map key) returns true. When the + * predicate returns true, the associated color (map value) is returned. If no such predicate is found, an out of + * range color is returned. * * @param map map from {@link SerializablePredicate} to color - * @param outOfRangeColor color returned when the input satisfies no - * {@link SerializablePredicate} in the {@code map} + * @param outOfRangeColor color returned when the input satisfies no {@link SerializablePredicate} in the + * {@code map} * @param type of {@link Paint} in {@code map} - * @return function which returns the color mapped to the first {@link SerializablePredicate} - * for which the input is true. Null and NaN inputs return null. + * @return function which returns the color mapped to the first {@link SerializablePredicate} for which the input is + * true. Null and NaN inputs return null. */ public static Function predicateMap( - final Map, COLOR> map, final Color outOfRangeColor) { + final Map, COLOR> map, final Color outOfRangeColor) { return predicateMap(map, outOfRangeColor, null); } /** - * Returns a function which uses predicate functions to determine which colors is returned for - * an input value. For each input value, a map is iterated through until the predicate function - * (map key) returns true. When the predicate returns true, the associated color (map value) is - * returned. If no such predicate is found, an out of range color is returned. + * Returns a function which uses predicate functions to determine which colors is returned for an input value. For + * each input value, a map is iterated through until the predicate function (map key) returns true. When the + * predicate returns true, the associated color (map value) is returned. If no such predicate is found, an out of + * range color is returned. * * @param map map from {@link SerializablePredicate} to color - * @param outOfRangeColor color returned when the input satisfies no - * {@link SerializablePredicate} in the {@code map} + * @param outOfRangeColor color returned when the input satisfies no {@link SerializablePredicate} in the + * {@code map} * @param nullColor color returned when the input is null or Double.NaN * @param type of {@link Paint} in {@code map} - * @return function which returns the color mapped to the first {@link SerializablePredicate} - * for which the input is true + * @return function which returns the color mapped to the first {@link SerializablePredicate} for which the input is + * true */ public static Function predicateMap( - final Map, COLOR> map, final Paint outOfRangeColor, - final Paint nullColor) { + final Map, COLOR> map, final Paint outOfRangeColor, final Paint nullColor) { Require.neqNull(map, "map"); return (Function & Serializable) value -> { if (value == null || value == Double.NaN) { @@ -253,64 +244,58 @@ public static Function predicateMap( } /** - * Returns a function which uses closure functions to determine which colors is returned for an - * input value. For each input value, a map is iterated through until the closure function (map - * key) returns true. When the closure returns true, the associated color (map value) is - * returned. If no such closure is found, an out of range color is returned. + * Returns a function which uses closure functions to determine which colors is returned for an input value. For + * each input value, a map is iterated through until the closure function (map key) returns true. When the closure + * returns true, the associated color (map value) is returned. If no such closure is found, an out of range color is + * returned. * * @param map map from {@link Closure} to color * @param type of {@link Paint} in {@code map} - * @return function which returns the color mapped to the first {@link Closure} for which the - * input is true. Out of range, null, and NaN inputs return null. + * @return function which returns the color mapped to the first {@link Closure} for which the input is true. Out of + * range, null, and NaN inputs return null. */ - public static Function closureMap( - final Map, COLOR> map) { + public static Function closureMap(final Map, COLOR> map) { return closureMap(map, null); } /** - * Returns a function which uses closure functions to determine which colors is returned for an - * input value. For each input value, a map is iterated through until the closure function (map - * key) returns true. When the closure returns true, the associated color (map value) is - * returned. If no such closure is found, an out of range color is returned. + * Returns a function which uses closure functions to determine which colors is returned for an input value. For + * each input value, a map is iterated through until the closure function (map key) returns true. When the closure + * returns true, the associated color (map value) is returned. If no such closure is found, an out of range color is + * returned. * * @param map map from {@link Closure} to color - * @param outOfRangeColor color returned when the input satisfies no {@link Closure} in the - * {@code map} + * @param outOfRangeColor color returned when the input satisfies no {@link Closure} in the {@code map} * @param type of {@link Paint} in {@code map} - * @return function which returns the color mapped to the first {@link Closure} for which the - * input is true. Null and NaN inputs return null. + * @return function which returns the color mapped to the first {@link Closure} for which the input is true. Null + * and NaN inputs return null. */ - public static Function closureMap( - final Map, COLOR> map, final Color outOfRangeColor) { + public static Function closureMap(final Map, COLOR> map, + final Color outOfRangeColor) { return closureMap(map, outOfRangeColor, null); } /** - * Returns a function which uses closure functions to determine which colors is returned for an - * input value. For each input value, a map is iterated through until the closure function (map - * key) returns true. When the closure returns true, the associated color (map value) is - * returned. If no such closure is found, an out of range color is returned. + * Returns a function which uses closure functions to determine which colors is returned for an input value. For + * each input value, a map is iterated through until the closure function (map key) returns true. When the closure + * returns true, the associated color (map value) is returned. If no such closure is found, an out of range color is + * returned. * * @param map map from {@link Closure} to color - * @param outOfRangeColor color returned when the input satisfies no {@link Closure} in the - * {@code map} + * @param outOfRangeColor color returned when the input satisfies no {@link Closure} in the {@code map} * @param nullColor color returned when the input is null or Double.NaN * @param type of {@link Paint} in {@code map} - * @return function which returns the color mapped to the first {@link Closure} for which the - * input is true + * @return function which returns the color mapped to the first {@link Closure} for which the input is true */ - public static Function closureMap( - final Map, COLOR> map, final Paint outOfRangeColor, - final Paint nullColor) { + public static Function closureMap(final Map, COLOR> map, + final Paint outOfRangeColor, final Paint nullColor) { final Map, COLOR> pm = new LinkedHashMap<>(); for (final Map.Entry, COLOR> e : map.entrySet()) { final Closure closure = e.getKey(); final COLOR color = e.getValue(); - final SerializableClosure serializableClosure = - new SerializableClosure<>(closure); + final SerializableClosure serializableClosure = new SerializableClosure<>(closure); final SerializablePredicate predicate = new SerializablePredicate() { private static final long serialVersionUID = 613420989214281949L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/composite/ScatterPlotMatrix.java b/Plot/src/main/java/io/deephaven/db/plot/composite/ScatterPlotMatrix.java index 23add767736..c0c2c244e0b 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/composite/ScatterPlotMatrix.java +++ b/Plot/src/main/java/io/deephaven/db/plot/composite/ScatterPlotMatrix.java @@ -105,8 +105,8 @@ public ScatterPlotMatrix pointSize(final Number factor) { /** * Sets the point size of the plot at index {@code plotIndex}. * - * The index starts at 0 in the upper left hand corner of the grid and increases going left to - * right, top to bottom. E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] + * The index starts at 0 in the upper left hand corner of the grid and increases going left to right, top to bottom. + * E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] * * @param plotIndex index * @param factor point size @@ -119,8 +119,8 @@ public ScatterPlotMatrix pointSize(final int plotIndex, final int factor) { /** * Sets the point size of the plot at index {@code plotIndex}. * - * The index starts at 0 in the upper left hand corner of the grid and increases going left to - * right, top to bottom. E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] + * The index starts at 0 in the upper left hand corner of the grid and increases going left to right, top to bottom. + * E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] * * @param plotIndex index * @param factor point size @@ -133,8 +133,8 @@ public ScatterPlotMatrix pointSize(final int plotIndex, final long factor) { /** * Sets the point size of the plot at index {@code plotIndex}. * - * The index starts at 0 in the upper left hand corner of the grid and increases going left to - * right, top to bottom. E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] + * The index starts at 0 in the upper left hand corner of the grid and increases going left to right, top to bottom. + * E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] * * @param plotIndex index * @param factor point size @@ -146,10 +146,10 @@ public ScatterPlotMatrix pointSize(final int plotIndex, double factor) { } final FigureImpl result = new FigureImpl(this) - .chart(plotIndex) - .axes(0) - .series(0) - .pointSize(factor); + .chart(plotIndex) + .axes(0) + .series(0) + .pointSize(factor); return new ScatterPlotMatrix(result, this.numCols); } @@ -157,8 +157,8 @@ public ScatterPlotMatrix pointSize(final int plotIndex, double factor) { /** * Sets the point size of the plot at index {@code plotIndex}. * - * The index starts at 0 in the upper left hand corner of the grid and increases going left to - * right, top to bottom. E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] + * The index starts at 0 in the upper left hand corner of the grid and increases going left to right, top to bottom. + * E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] * * @param plotIndex index * @param factor point size @@ -170,10 +170,10 @@ public ScatterPlotMatrix pointSize(final int plotIndex, final Number factor) { } final FigureImpl result = new FigureImpl(this) - .chart(plotIndex) - .axes(0) - .series(0) - .pointSize(factor); + .chart(plotIndex) + .axes(0) + .series(0) + .pointSize(factor); return new ScatterPlotMatrix(result, this.numCols); } @@ -181,9 +181,9 @@ public ScatterPlotMatrix pointSize(final int plotIndex, final Number factor) { /** * Sets the point size of the plot at index {@code plotIndex}. * - * Row and column numbers start at 0 in the upper left hand corner of the grid and increase - * going top to bottom and left to right respectively. For example, in a 2x2 ScatterPlotMatrix - * the coordinates would be [(0,0), (0,1)] [(1,0), (1,1)] + * Row and column numbers start at 0 in the upper left hand corner of the grid and increase going top to bottom and + * left to right respectively. For example, in a 2x2 ScatterPlotMatrix the coordinates would be [(0,0), (0,1)] + * [(1,0), (1,1)] * * @param row row index of this Figure's grid * @param col column index of this Figure's grid @@ -197,9 +197,9 @@ public ScatterPlotMatrix pointSize(final int row, final int col, final int facto /** * Sets the point size of the plot at index {@code plotIndex}. * - * Row and column numbers start at 0 in the upper left hand corner of the grid and increase - * going top to bottom and left to right respectively. For example, in a 2x2 ScatterPlotMatrix - * the coordinates would be [(0,0), (0,1)] [(1,0), (1,1)] + * Row and column numbers start at 0 in the upper left hand corner of the grid and increase going top to bottom and + * left to right respectively. For example, in a 2x2 ScatterPlotMatrix the coordinates would be [(0,0), (0,1)] + * [(1,0), (1,1)] * * @param row row index of this Figure's grid * @param col column index of this Figure's grid @@ -213,9 +213,9 @@ public ScatterPlotMatrix pointSize(final int row, final int col, final long fact /** * Sets the point size of the plot at index {@code plotIndex}. * - * Row and column numbers start at 0 in the upper left hand corner of the grid and increase - * going top to bottom and left to right respectively. For example, in a 2x2 ScatterPlotMatrix - * the coordinates would be [(0,0), (0,1)] [(1,0), (1,1)] + * Row and column numbers start at 0 in the upper left hand corner of the grid and increase going top to bottom and + * left to right respectively. For example, in a 2x2 ScatterPlotMatrix the coordinates would be [(0,0), (0,1)] + * [(1,0), (1,1)] * * @param row row index of this Figure's grid * @param col column index of this Figure's grid @@ -230,9 +230,9 @@ public ScatterPlotMatrix pointSize(final int row, final int col, double factor) /** * Sets the point size of the plot at index {@code plotIndex}. * - * Row and column numbers start at 0 in the upper left hand corner of the grid and increase - * going top to bottom and left to right respectively. For example, in a 2x2 ScatterPlotMatrix - * the coordinates would be [(0,0), (0,1)] [(1,0), (1,1)] + * Row and column numbers start at 0 in the upper left hand corner of the grid and increase going top to bottom and + * left to right respectively. For example, in a 2x2 ScatterPlotMatrix the coordinates would be [(0,0), (0,1)] + * [(1,0), (1,1)] * * @param row row index of this Figure's grid * @param col column index of this Figure's grid @@ -247,18 +247,17 @@ public ScatterPlotMatrix pointSize(final int row, final int col, final Number fa /** * Sets the point size of plot i as the factor in {@code factors} at index i. * - * The index starts at 0 in the upper left hand corner of the grid and increases going left to - * right, top to bottom. E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] + * The index starts at 0 in the upper left hand corner of the grid and increases going left to right, top to bottom. + * E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] * - * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The - * number of {@code factors} must be equal to the number of plots. + * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The number of + * {@code factors} must be equal to the number of plots. * @param factors point sizes * @return this ScatterPlotMatrix */ public ScatterPlotMatrix pointSize(final IndexableData factors) { Require.neqNull(factors, "factors"); - Require.eq(factors.size(), "number of factors", this.numCols * this.numCols, - "number of plots"); + Require.eq(factors.size(), "number of factors", this.numCols * this.numCols, "number of plots"); FigureImpl result = new FigureImpl(this); @@ -272,11 +271,11 @@ public ScatterPlotMatrix pointSize(final IndexableData factors) { /** * Sets the point size of plot i as the factor in {@code factors} at index i. * - * The index starts at 0 in the upper left hand corner of the grid and increases going left to - * right, top to bottom. E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] + * The index starts at 0 in the upper left hand corner of the grid and increases going left to right, top to bottom. + * E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] * - * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The - * number of {@code factors} must be equal to the number of plots. + * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The number of + * {@code factors} must be equal to the number of plots. * @param factors point sizes * @return this ScatterPlotMatrix */ @@ -287,11 +286,11 @@ public ScatterPlotMatrix pointSize(final int... factors) { /** * Sets the point size of plot i as the factor in {@code factors} at index i. * - * The index starts at 0 in the upper left hand corner of the grid and increases going left to - * right, top to bottom. E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] + * The index starts at 0 in the upper left hand corner of the grid and increases going left to right, top to bottom. + * E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] * - * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The - * number of {@code factors} must be equal to the number of plots. + * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The number of + * {@code factors} must be equal to the number of plots. * @param factors point sizes * @return this ScatterPlotMatrix */ @@ -302,11 +301,11 @@ public ScatterPlotMatrix pointSize(final long... factors) { /** * Sets the point size of plot i as the factor in {@code factors} at index i. * - * The index starts at 0 in the upper left hand corner of the grid and increases going left to - * right, top to bottom. E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] + * The index starts at 0 in the upper left hand corner of the grid and increases going left to right, top to bottom. + * E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] * - * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The - * number of {@code factors} must be equal to the number of plots. + * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The number of + * {@code factors} must be equal to the number of plots. * @param factors point sizes * @return this ScatterPlotMatrix */ @@ -317,11 +316,11 @@ public ScatterPlotMatrix pointSize(final double... factors) { /** * Sets the point size of plot i as the factor in {@code factors} at index i. * - * The index starts at 0 in the upper left hand corner of the grid and increases going left to - * right, top to bottom. E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] + * The index starts at 0 in the upper left hand corner of the grid and increases going left to right, top to bottom. + * E.g. for a 2x2 ScatterPlotMatrix, the indices would be [0, 1] [2, 3] * - * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The - * number of {@code factors} must be equal to the number of plots. + * @throws io.deephaven.base.verify.RequirementFailure {@code factors} must not be null. The number of + * {@code factors} must be equal to the number of plots. * @param factors point sizes * @return this ScatterPlotMatrix */ @@ -330,23 +329,23 @@ public ScatterPlotMatrix pointSize(final T[] factors) { } - // these functions return an object which extends figure so that we can add ScatterPlotMatrix - // specific modifier methods later. + // these functions return an object which extends figure so that we can add ScatterPlotMatrix specific modifier + // methods later. /** * Creates a scatter plot matrix by graphing each variable against every other variable. * * @param variables data to plot * @param data type of the {@code variables} - * @return new {@link Figure} containing the scatter plot matrix where variable names are - * assigned as x1, x2, ... in order. + * @return new {@link Figure} containing the scatter plot matrix where variable names are assigned as x1, x2, ... in + * order. */ @SuppressWarnings("unchecked") public static ScatterPlotMatrix scatterPlotMatrix(final T[]... variables) { Require.neqNull(variables, "variables"); final String[] variableNames = - IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); + IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); return scatterPlotMatrix(variableNames, variables); } @@ -359,11 +358,10 @@ public static ScatterPlotMatrix scatterPlotMatrix(final T[].. * @return new {@link Figure} containing the scatter plot matrix */ @SuppressWarnings("unchecked") - public static ScatterPlotMatrix scatterPlotMatrix( - final String[] variableNames, final T[]... variables) { - final IndexableNumericData[] data = - Arrays.stream(variables).map(x -> new IndexableNumericDataArrayNumber(x, null)) - .toArray(IndexableNumericData[]::new); + public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, + final T[]... variables) { + final IndexableNumericData[] data = Arrays.stream(variables) + .map(x -> new IndexableNumericDataArrayNumber(x, null)).toArray(IndexableNumericData[]::new); return scatterPlotMatrix(variableNames, data); } @@ -371,14 +369,14 @@ public static ScatterPlotMatrix scatterPlotMatrix( * Creates a scatter plot matrix by graphing each variable against every other variable. * * @param variables data to plot - * @return new {@link Figure} containing the scatter plot matrix where variable names are - * assigned as x1, x2, ... in order. + * @return new {@link Figure} containing the scatter plot matrix where variable names are assigned as x1, x2, ... in + * order. */ public static ScatterPlotMatrix scatterPlotMatrix(final int[]... variables) { Require.neqNull(variables, "variables"); final String[] variableNames = - IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); + IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); return scatterPlotMatrix(variableNames, variables); } @@ -389,12 +387,10 @@ public static ScatterPlotMatrix scatterPlotMatrix(final int[]... variables) { * @param variables data to plot * @return new {@link Figure} containing the scatter plot matrix */ - public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, - final int[]... variables) { + public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, final int[]... variables) { Require.neqNull(variables, "variables"); - final IndexableNumericData[] data = - Arrays.stream(variables).map(x -> new IndexableNumericDataArrayInt(x, null)) + final IndexableNumericData[] data = Arrays.stream(variables).map(x -> new IndexableNumericDataArrayInt(x, null)) .toArray(IndexableNumericData[]::new); return scatterPlotMatrix(variableNames, data); } @@ -403,14 +399,14 @@ public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, * Creates a scatter plot matrix by graphing each variable against every other variable. * * @param variables data to plot - * @return new {@link Figure} containing the scatter plot matrix where variable names are - * assigned as x1, x2, ... in order. + * @return new {@link Figure} containing the scatter plot matrix where variable names are assigned as x1, x2, ... in + * order. */ public static ScatterPlotMatrix scatterPlotMatrix(final long[]... variables) { Require.neqNull(variables, "variables"); final String[] variableNames = - IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); + IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); return scatterPlotMatrix(variableNames, variables); } @@ -421,11 +417,9 @@ public static ScatterPlotMatrix scatterPlotMatrix(final long[]... variables) { * @param variables data to plot * @return new {@link Figure} containing the scatter plot matrix */ - public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, - final long[]... variables) { - final IndexableNumericData[] data = - Arrays.stream(variables).map(x -> new IndexableNumericDataArrayLong(x, null)) - .toArray(IndexableNumericData[]::new); + public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, final long[]... variables) { + final IndexableNumericData[] data = Arrays.stream(variables) + .map(x -> new IndexableNumericDataArrayLong(x, null)).toArray(IndexableNumericData[]::new); return scatterPlotMatrix(variableNames, data); } @@ -433,14 +427,14 @@ public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, * Creates a scatter plot matrix by graphing each variable against every other variable. * * @param variables data to plot - * @return new {@link Figure} containing the scatter plot matrix where variable names are - * assigned as x1, x2, ... in order. + * @return new {@link Figure} containing the scatter plot matrix where variable names are assigned as x1, x2, ... in + * order. */ public static ScatterPlotMatrix scatterPlotMatrix(final float[]... variables) { Require.neqNull(variables, "variables"); final String[] variableNames = - IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); + IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); return scatterPlotMatrix(variableNames, variables); } @@ -451,11 +445,9 @@ public static ScatterPlotMatrix scatterPlotMatrix(final float[]... variables) { * @param variables data to plot * @return new {@link Figure} containing the scatter plot matrix */ - public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, - final float[]... variables) { - final IndexableNumericData[] data = - Arrays.stream(variables).map(x -> new IndexableNumericDataArrayFloat(x, null)) - .toArray(IndexableNumericData[]::new); + public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, final float[]... variables) { + final IndexableNumericData[] data = Arrays.stream(variables) + .map(x -> new IndexableNumericDataArrayFloat(x, null)).toArray(IndexableNumericData[]::new); return scatterPlotMatrix(variableNames, data); } @@ -463,14 +455,14 @@ public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, * Creates a scatter plot matrix by graphing each variable against every other variable. * * @param variables data to plot - * @return new {@link Figure} containing the scatter plot matrix where variable names are - * assigned as x1, x2, ... in order. + * @return new {@link Figure} containing the scatter plot matrix where variable names are assigned as x1, x2, ... in + * order. */ public static ScatterPlotMatrix scatterPlotMatrix(final double[]... variables) { Require.neqNull(variables, "variables"); final String[] variableNames = - IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); + IntStream.range(0, variables.length).mapToObj(i -> "x" + i).toArray(String[]::new); return scatterPlotMatrix(variableNames, variables); } @@ -481,31 +473,28 @@ public static ScatterPlotMatrix scatterPlotMatrix(final double[]... variables) { * @param variables data to plot * @return new {@link Figure} containing the scatter plot matrix */ - public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, - final double[]... variables) { - final IndexableNumericData[] data = - Arrays.stream(variables).map(x -> new IndexableNumericDataArrayDouble(x, null)) - .toArray(IndexableNumericData[]::new); + public static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, final double[]... variables) { + final IndexableNumericData[] data = Arrays.stream(variables) + .map(x -> new IndexableNumericDataArrayDouble(x, null)).toArray(IndexableNumericData[]::new); return scatterPlotMatrix(variableNames, data); } private static ScatterPlotMatrix scatterPlotMatrix(final String[] variableNames, - final IndexableNumericData[] columns) { + final IndexableNumericData[] columns) { Require.neqNull(variableNames, "variableNames"); Require.neqNull(columns, "columns"); Figure fig = FigureFactory.figure(columns.length, columns.length); Require.eqTrue(fig instanceof FigureImpl, "fig instanceof FigureImpl"); - ArgumentValidations.assertSameSize(columns, variableNames, - ((FigureImpl) fig).getFigure().getPlotInfo()); + ArgumentValidations.assertSameSize(columns, variableNames, ((FigureImpl) fig).getFigure().getPlotInfo()); int subPlotNum = 0; for (int i = 0; i < columns.length; i++) { for (int j = 0; j < columns.length; j++) { fig = fig.newChart(subPlotNum) - .legendVisible(false); + .legendVisible(false); if (i == 0) { fig = fig.chartTitle(variableNames[j]); } @@ -540,7 +529,7 @@ public static ScatterPlotMatrix scatterPlotMatrix(final Table t, final String... for (int i = 0; i < columns.length; i++) { for (int j = 0; j < columns.length; j++) { fig = fig.newChart(subPlotNum) - .legendVisible(false); + .legendVisible(false); if (i == 0) { fig = fig.chartTitle(columns[j]); } @@ -566,8 +555,7 @@ public static ScatterPlotMatrix scatterPlotMatrix(final Table t, final String... * @param columns data to plot * @return new {@link Figure} containing the scatter plot matrix */ - public static ScatterPlotMatrix scatterPlotMatrix(final SelectableDataSet sds, - final String... columns) { + public static ScatterPlotMatrix scatterPlotMatrix(final SelectableDataSet sds, final String... columns) { Require.neqNull(sds, "sds"); Require.neqNull(columns, "columns"); @@ -576,7 +564,7 @@ public static ScatterPlotMatrix scatterPlotMatrix(final SelectableDataSet sds, for (int i = 0; i < columns.length; i++) { for (int j = 0; j < columns.length; j++) { fig = fig.newChart(subPlotNum) - .legendVisible(false); + .legendVisible(false); if (i == 0) { fig = fig.chartTitle(columns[j]); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/AbstractDataSeries.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/AbstractDataSeries.java index 84097478ab1..19d8897fd3a 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/AbstractDataSeries.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/AbstractDataSeries.java @@ -15,8 +15,7 @@ /** * Base class for {@link DataSeriesInternal}. */ -public abstract class AbstractDataSeries extends AbstractSeriesInternal - implements DataSeriesInternal, Serializable { +public abstract class AbstractDataSeries extends AbstractSeriesInternal implements DataSeriesInternal, Serializable { private static final long serialVersionUID = 8733895549099825055L; @@ -132,8 +131,8 @@ public String getYToolTipPattern() { return yToolTipPattern; } - // below is done as setters so that when the associated methods are implemented, they return the - // most precise type for the builder + // below is done as setters so that when the associated methods are implemented, they return the most precise type + // for the builder ////////////////////////// visibility ////////////////////////// diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/DataSeries.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/DataSeries.java index 26fe46386dd..d19b941a8bb 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/DataSeries.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/DataSeries.java @@ -52,8 +52,8 @@ public interface DataSeries extends Series, Serializable { /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param factor point size * @return this data series. @@ -61,8 +61,8 @@ public interface DataSeries extends Series, Serializable { DataSeries pointSize(int factor); /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param factor point size * @return this data series. @@ -70,8 +70,8 @@ public interface DataSeries extends Series, Serializable { DataSeries pointSize(long factor); /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param factor point size * @return this data series. @@ -79,8 +79,8 @@ public interface DataSeries extends Series, Serializable { DataSeries pointSize(double factor); /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param factor point size * @return this data series. @@ -202,8 +202,8 @@ default DataSeries seriesColor(final String color) { /** - * Sets the point label for data point i from index i of the input labels. Points outside of - * these indices are unlabeled. + * Sets the point label for data point i from index i of the input labels. Points outside of these indices are + * unlabeled. * * @param label label * @return this XYDataSeries @@ -215,8 +215,8 @@ default DataSeries seriesColor(final String color) { /** - * Sets the point shapes for data point i from index i of the input labels. Points outside of - * these indices use default shapes. + * Sets the point shapes for data point i from index i of the input labels. Points outside of these indices use + * default shapes. * * @param shape shape * @return this DataSeries @@ -224,8 +224,8 @@ default DataSeries seriesColor(final String color) { DataSeries pointShape(final String shape); /** - * Sets the point shapes for data point i from index i of the input labels. Points outside of - * these indices use default shapes. + * Sets the point shapes for data point i from index i of the input labels. Points outside of these indices use + * default shapes. * * @param shape shape * @return this DataSeries @@ -279,8 +279,8 @@ default DataSeries seriesColor(final String color) { /** * Sets the point label format. *

    - * Use {0} where the data series name should be inserted, {1} for the x-value and {2} y-value - * e.g. "{0}: ({1}, {2})" will display as Series1: (2.0, 5.5). + * Use {0} where the data series name should be inserted, {1} for the x-value and {2} y-value e.g. "{0}: ({1}, {2})" + * will display as Series1: (2.0, 5.5). * * @param format format * @return this data series. diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/DynamicSeriesNamer.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/DynamicSeriesNamer.java index 803895f4701..83b7053018f 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/DynamicSeriesNamer.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/DynamicSeriesNamer.java @@ -32,8 +32,7 @@ public synchronized void add(final Comparable name, final PlotInfo plotInfo) { final boolean inSet = names.add(nameString); if (!inSet) { throw new PlotUnsupportedOperationException( - "Series with the same name already exists in the collection. name=" + nameString, - plotInfo); + "Series with the same name already exists in the collection. name=" + nameString, plotInfo); } else { namesList.add(nameString); } @@ -53,8 +52,8 @@ public synchronized void removeAll(final Collection names) } /** - * Takes a potential series name and creates a unique name from it. If the series would be new, - * the original series name is returned. + * Takes a potential series name and creates a unique name from it. If the series would be new, the original series + * name is returned. * * @param potentialName potential series name * @return uniquified series name diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractCategoryDataSeries.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractCategoryDataSeries.java index 846a7e212de..89a6db789a8 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractCategoryDataSeries.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractCategoryDataSeries.java @@ -37,8 +37,7 @@ *

    * Please note that any method which uses a {@link SwappableTable} must call a lastBy()! */ -public abstract class AbstractCategoryDataSeries extends AbstractDataSeries - implements CategoryDataSeriesInternal { +public abstract class AbstractCategoryDataSeries extends AbstractDataSeries implements CategoryDataSeriesInternal { private static final long serialVersionUID = 6881532832713307316L; private final AssociativeDataWithDefault colors; @@ -63,7 +62,7 @@ public AbstractCategoryDataSeries(final AxesImpl axes, final int id, final Compa * */ public AbstractCategoryDataSeries(final AxesImpl axes, final int id, final Comparable name, - final AbstractCategoryDataSeries series) { + final AbstractCategoryDataSeries series) { super(axes, id, name, series); colors = new AssociativeDataWithDefault<>(getPlotInfo()); @@ -87,8 +86,7 @@ public AbstractCategoryDataSeries(final AxesImpl axes, final int id, final Compa * @param series series to copy. * @param axes new axes to use. */ - protected AbstractCategoryDataSeries(final AbstractCategoryDataSeries series, - final AxesImpl axes) { + protected AbstractCategoryDataSeries(final AbstractCategoryDataSeries series, final AxesImpl axes) { super(series, axes); colors = new AssociativeDataWithDefault<>(series.getPlotInfo()); @@ -348,27 +346,27 @@ public AbstractCategoryDataSeries pointSize(final Comparable category, final Num } @Override - public AbstractCategoryDataSeries pointSize( - final CATEGORY[] categories, int[] factors) { + public AbstractCategoryDataSeries pointSize(final CATEGORY[] categories, + int[] factors) { return pointSize(categories, PlotUtils.toDouble(factors)); } @Override - public AbstractCategoryDataSeries pointSize( - final CATEGORY[] categories, long[] factors) { + public AbstractCategoryDataSeries pointSize(final CATEGORY[] categories, + long[] factors) { return pointSize(categories, PlotUtils.toDouble(factors)); } @Override public AbstractCategoryDataSeries pointSize( - final CATEGORY[] categories, NUMBER[] factors) { + final CATEGORY[] categories, NUMBER[] factors) { return pointSize(categories, PlotUtils.toDouble(factors)); } @Override - public AbstractCategoryDataSeries pointSize( - final CATEGORY[] categories, double[] factors) { + public AbstractCategoryDataSeries pointSize(final CATEGORY[] categories, + double[] factors) { ArgumentValidations.assertNotNull(categories, "categories", getPlotInfo()); ArgumentValidations.assertNotNull(factors, "factors", getPlotInfo()); Require.eq(categories.length, "categories.length", factors.length, "factors.length"); @@ -385,7 +383,7 @@ public AbstractCategoryDataSeries pointSize( @Override public AbstractCategoryDataSeries pointSize( - final Map factors) { + final Map factors) { if (!this.sizes.isModifiable()) { this.sizes.setSpecific(new AssociativeDataHashMap<>(getPlotInfo())); } @@ -409,11 +407,10 @@ public AbstractCategoryDataSeries pointSize(Table t, String keyColumn, String va addTableHandle(tableHandle); ArgumentValidations.assertInstance(t, keyColumn, Comparable.class, - "key column is not a supported type (Comparable): keyColumn=" + keyColumn, - getPlotInfo()); + "key column is not a supported type (Comparable): keyColumn=" + keyColumn, getPlotInfo()); - sizes.setSpecific(new AssociativeDataTable(tableHandle, - keyColumn, valueColumn, Comparable.class, Number.class, getPlotInfo()) { + sizes.setSpecific(new AssociativeDataTable(tableHandle, keyColumn, valueColumn, + Comparable.class, Number.class, getPlotInfo()) { @Override public Double convert(Number v) { return PlotUtils.numberToDouble(v); @@ -424,24 +421,21 @@ public Double convert(Number v) { } @Override - public AbstractCategoryDataSeries pointSize(SelectableDataSet sds, String keyColumn, - String valueColumn) { + public AbstractCategoryDataSeries pointSize(SelectableDataSet sds, String keyColumn, String valueColumn) { ArgumentValidations.assertNotNull(sds, "sds", getPlotInfo()); ArgumentValidations.assertNotNull(keyColumn, "keyColumn", getPlotInfo()); ArgumentValidations.assertNotNull(valueColumn, "valueColumn", getPlotInfo()); final Function tableTransform = - (Function & Serializable) table -> table.lastBy(keyColumn); - final SwappableTable t = - sds.getSwappableTable(name(), chart(), tableTransform, valueColumn); + (Function & Serializable) table -> table.lastBy(keyColumn); + final SwappableTable t = sds.getSwappableTable(name(), chart(), tableTransform, valueColumn); addSwappableTable(t); ArgumentValidations.assertInstance(t.getTableDefinition(), keyColumn, Comparable.class, - "key column is not a supported type (Comparable): keyColumn=" + keyColumn, - getPlotInfo()); + "key column is not a supported type (Comparable): keyColumn=" + keyColumn, getPlotInfo()); - sizes.setSpecific(new AssociativeDataSwappableTable(t, - keyColumn, valueColumn, Comparable.class, Number.class, getPlotInfo()) { + sizes.setSpecific(new AssociativeDataSwappableTable(t, keyColumn, valueColumn, + Comparable.class, Number.class, getPlotInfo()) { @Override public Double convert(Number v) { return PlotUtils.numberToDouble(v); @@ -493,7 +487,7 @@ public CategoryDataSeriesInternal pointColor(final Comparable category, final St @Override public AbstractCategoryDataSeries pointColor( - final Map colors) { + final Map colors) { if (!this.colors.isModifiable()) { this.colors.setSpecific(new AssociativeDataHashMap<>(getPlotInfo())); } @@ -504,7 +498,7 @@ public AbstractCategoryDataSe @Override public AbstractCategoryDataSeries pointColorInteger( - final Map colors) { + final Map colors) { if (!this.colors.isModifiable()) { this.colors.setSpecific(new AssociativeDataHashMap<>(getPlotInfo())); } @@ -517,74 +511,63 @@ public AbstractCategoryData } @Override - public AbstractCategoryDataSeries pointColor(final Table t, final String keyColumn, - final String valueColumn) { + public AbstractCategoryDataSeries pointColor(final Table t, final String keyColumn, final String valueColumn) { ArgumentValidations.assertNotNull(t, "table", getPlotInfo()); ArgumentValidations.assertNotNull(keyColumn, "keyColumn", getPlotInfo()); ArgumentValidations.assertNotNull(valueColumn, "valueColumn", getPlotInfo()); ArgumentValidations.assertInstance(t, keyColumn, Comparable.class, - "key column is not a supported type (Comparable): keyColumn=" + keyColumn, - getPlotInfo()); + "key column is not a supported type (Comparable): keyColumn=" + keyColumn, getPlotInfo()); final TableHandle tableHandle = new TableHandle(t, keyColumn, valueColumn); addTableHandle(tableHandle); final ColumnHandlerFactory.ColumnHandler valueColumnHandler = - ColumnHandlerFactory.newNumericHandler(tableHandle, valueColumn, getPlotInfo()); - - if (valueColumnHandler.typeClassification() - .equals(ColumnHandlerFactory.TypeClassification.INTEGER) && - (valueColumnHandler.type() == int.class - || valueColumnHandler.type() == Integer.class)) { - this.colors.setSpecific(new AssociativeDataTableComparablePaint(tableHandle, keyColumn, - valueColumn, chart(), getPlotInfo())); - } else if (valueColumnHandler.typeClassification() - .equals(ColumnHandlerFactory.TypeClassification.PAINT)) { - this.colors.setSpecific(new AssociativeDataTable<>(tableHandle, keyColumn, valueColumn, - Comparable.class, Paint.class, getPlotInfo())); + ColumnHandlerFactory.newNumericHandler(tableHandle, valueColumn, getPlotInfo()); + + if (valueColumnHandler.typeClassification().equals(ColumnHandlerFactory.TypeClassification.INTEGER) && + (valueColumnHandler.type() == int.class || valueColumnHandler.type() == Integer.class)) { + this.colors.setSpecific(new AssociativeDataTableComparablePaint(tableHandle, keyColumn, valueColumn, + chart(), getPlotInfo())); + } else if (valueColumnHandler.typeClassification().equals(ColumnHandlerFactory.TypeClassification.PAINT)) { + this.colors.setSpecific(new AssociativeDataTable<>(tableHandle, keyColumn, valueColumn, Comparable.class, + Paint.class, getPlotInfo())); } else { - throw new PlotUnsupportedOperationException( - "Column can not be converted into a color: column=" + valueColumn + "\ttype=" - + valueColumnHandler.type(), - this); + throw new PlotUnsupportedOperationException("Column can not be converted into a color: column=" + + valueColumn + "\ttype=" + valueColumnHandler.type(), this); } return this; } @Override - public AbstractCategoryDataSeries pointColor(final SelectableDataSet sds, - final String keyColumn, final String valueColumn) { + public AbstractCategoryDataSeries pointColor(final SelectableDataSet sds, final String keyColumn, + final String valueColumn) { ArgumentValidations.assertNotNull(sds, "sds", getPlotInfo()); ArgumentValidations.assertNotNull(keyColumn, "keyColumn", getPlotInfo()); ArgumentValidations.assertNotNull(valueColumn, "valueColumn", getPlotInfo()); ArgumentValidations.assertInstance(sds, keyColumn, Comparable.class, - "key column is not a supported type (Comparable): keyColumn=" + keyColumn, - getPlotInfo()); + "key column is not a supported type (Comparable): keyColumn=" + keyColumn, getPlotInfo()); final Class type = sds.getTableDefinition().getColumn(valueColumn).getDataType(); - final boolean isInt = type.equals(int.class) || type.equals(Integer.class) - || type.equals(short.class) || type.equals(Short.class); + final boolean isInt = type.equals(int.class) || type.equals(Integer.class) || type.equals(short.class) + || type.equals(Short.class); final boolean isPaint = Paint.class.isAssignableFrom(type); if (!isInt && !isPaint) { throw new PlotUnsupportedOperationException( - "Column can not be converted into a color: column=" + valueColumn + "\ttype=" - + type, - this); + "Column can not be converted into a color: column=" + valueColumn + "\ttype=" + type, this); } final Function tableTransform = - (Function & Serializable) table -> table.lastBy(keyColumn); - final SwappableTable t = - sds.getSwappableTable(name(), chart(), tableTransform, keyColumn, valueColumn); + (Function & Serializable) table -> table.lastBy(keyColumn); + final SwappableTable t = sds.getSwappableTable(name(), chart(), tableTransform, keyColumn, valueColumn); addSwappableTable(t); if (isInt) { - this.colors.setSpecific(new AssociativeDataSwappableTableComparablePaint(t, keyColumn, - valueColumn, chart(), getPlotInfo())); + this.colors.setSpecific(new AssociativeDataSwappableTableComparablePaint(t, keyColumn, valueColumn, chart(), + getPlotInfo())); } else if (isPaint) { - this.colors.setSpecific(new AssociativeDataSwappableTable<>(t, keyColumn, valueColumn, - Comparable.class, Paint.class, getPlotInfo())); + this.colors.setSpecific(new AssociativeDataSwappableTable<>(t, keyColumn, valueColumn, Comparable.class, + Paint.class, getPlotInfo())); } else { throw new PlotIllegalStateException("Should never reach here", this); } @@ -625,7 +608,7 @@ public AbstractCategoryDataSeries pointLabel(final Comparable category, final Ob @Override public AbstractCategoryDataSeries pointLabel( - final Map labels) { + final Map labels) { if (!this.labels.isModifiable()) { this.labels.setSpecific(new AssociativeDataHashMap<>(getPlotInfo())); } @@ -638,16 +621,13 @@ public AbstractCategoryDataSeries pointLabe } @Override - public AbstractCategoryDataSeries pointLabel(final Table t, final String keyColumn, - final String valueColumn) { + public AbstractCategoryDataSeries pointLabel(final Table t, final String keyColumn, final String valueColumn) { final TableHandle tableHandle = new TableHandle(t, keyColumn, valueColumn); addTableHandle(tableHandle); ArgumentValidations.assertInstance(t, keyColumn, Comparable.class, - "key column is not a supported type (Comparable): keyColumn=" + keyColumn, - getPlotInfo()); + "key column is not a supported type (Comparable): keyColumn=" + keyColumn, getPlotInfo()); - this.labels.setSpecific( - new AssociativeDataTableLabel(tableHandle, keyColumn, valueColumn, getPlotInfo())); + this.labels.setSpecific(new AssociativeDataTableLabel(tableHandle, keyColumn, valueColumn, getPlotInfo())); return this; } @@ -677,20 +657,17 @@ public AbstractCategoryDataSeries yToolTipPattern(final String format) { } @Override - public AbstractCategoryDataSeries pointLabel(final SelectableDataSet sds, - final String keyColumn, final String valueColumn) { + public AbstractCategoryDataSeries pointLabel(final SelectableDataSet sds, final String keyColumn, + final String valueColumn) { ArgumentValidations.assertColumnsInTable(sds, getPlotInfo(), keyColumn, valueColumn); ArgumentValidations.assertInstance(sds, keyColumn, Comparable.class, - "key column is not a supported type (Comparable): keyColumn=" + keyColumn, - getPlotInfo()); + "key column is not a supported type (Comparable): keyColumn=" + keyColumn, getPlotInfo()); final Function tableTransform = - (Function & Serializable) table -> table.lastBy(keyColumn); - final SwappableTable t = - sds.getSwappableTable(name(), chart(), tableTransform, keyColumn, valueColumn); + (Function & Serializable) table -> table.lastBy(keyColumn); + final SwappableTable t = sds.getSwappableTable(name(), chart(), tableTransform, keyColumn, valueColumn); addSwappableTable(t); - this.labels.setSpecific( - new AssociativeDataSwappableTableLabel(t, keyColumn, valueColumn, getPlotInfo())); + this.labels.setSpecific(new AssociativeDataSwappableTableLabel(t, keyColumn, valueColumn, getPlotInfo())); return this; } @@ -722,8 +699,7 @@ public CategoryDataSeries pointShape(final Comparable category, final Shape shap } @Override - public CategoryDataSeries pointShape( - final Map shapes) { + public CategoryDataSeries pointShape(final Map shapes) { ArgumentValidations.assertNotNull(shapes, "shapes", getPlotInfo()); if (!this.shapes.isModifiable()) { @@ -735,10 +711,8 @@ public CategoryDataSeries pointShape( this.shapes.put(l.getKey(), NamedShape.getShape(l.getValue())); } catch (final IllegalArgumentException iae) { this.shapes.setSpecific(null); - throw new PlotIllegalArgumentException( - "Not a valid shape: `" + l.getValue() + "` for category:" + l.getKey() - + "; valid shapes: " + NamedShape.getShapesString(), - this); + throw new PlotIllegalArgumentException("Not a valid shape: `" + l.getValue() + "` for category:" + + l.getKey() + "; valid shapes: " + NamedShape.getShapesString(), this); } } @@ -746,31 +720,28 @@ public CategoryDataSeries pointShape( } @Override - public CategoryDataSeries pointShape(final Table t, final String keyColumn, - final String valueColumn) { + public CategoryDataSeries pointShape(final Table t, final String keyColumn, final String valueColumn) { ArgumentValidations.assertNotNull(t, "t", getPlotInfo()); ArgumentValidations.assertNotNull(keyColumn, "keyColumn", getPlotInfo()); ArgumentValidations.assertNotNull(valueColumn, "valueColumn", getPlotInfo()); ArgumentValidations.assertInstance(t, keyColumn, Comparable.class, - "key column is not a supported type (Comparable): keyColumn=" + keyColumn, - getPlotInfo()); + "key column is not a supported type (Comparable): keyColumn=" + keyColumn, getPlotInfo()); final Class columnType = ArgumentValidations.getColumnType(t, valueColumn, getPlotInfo()); if (String.class.isAssignableFrom(columnType)) { final TableHandle tableHandle = new TableHandle(t, keyColumn, valueColumn); addTableHandle(tableHandle); - this.shapes.setSpecific(new AssociativeDataTablePointShapeString(tableHandle, keyColumn, - valueColumn, getPlotInfo())); + this.shapes.setSpecific( + new AssociativeDataTablePointShapeString(tableHandle, keyColumn, valueColumn, getPlotInfo())); } else if (Shape.class.isAssignableFrom(columnType)) { final TableHandle tableHandle = new TableHandle(t, keyColumn, valueColumn); addTableHandle(tableHandle); - this.shapes.setSpecific(new AssociativeDataTablePointShapeObj(tableHandle, keyColumn, - valueColumn, getPlotInfo())); + this.shapes.setSpecific( + new AssociativeDataTablePointShapeObj(tableHandle, keyColumn, valueColumn, getPlotInfo())); } else { throw new PlotRuntimeException( - "column is not a supported type (String or Shape): columnName=" + valueColumn, - this); + "column is not a supported type (String or Shape): columnName=" + valueColumn, this); } return this; @@ -778,42 +749,38 @@ public CategoryDataSeries pointShape(final Table t, final String keyColumn, @Override public CategoryDataSeries pointShape(final SelectableDataSet sds, final String keyColumn, - final String valueColumn) { + final String valueColumn) { ArgumentValidations.assertNotNull(sds, "sds", getPlotInfo()); ArgumentValidations.assertNotNull(keyColumn, "keyColumn", getPlotInfo()); ArgumentValidations.assertNotNull(valueColumn, "valueColumn", getPlotInfo()); ArgumentValidations.assertColumnsInTable(sds, getPlotInfo(), keyColumn, valueColumn); ArgumentValidations.assertInstance(sds, keyColumn, Comparable.class, - "key column is not a supported type (Comparable): keyColumn=" + keyColumn, - getPlotInfo()); + "key column is not a supported type (Comparable): keyColumn=" + keyColumn, getPlotInfo()); - final SwappableTable swappableTable = - sds.getSwappableTable(name(), chart(), keyColumn, valueColumn); + final SwappableTable swappableTable = sds.getSwappableTable(name(), chart(), keyColumn, valueColumn); final Class columnType = ArgumentValidations.getColumnType(sds, valueColumn, getPlotInfo()); if (String.class.isAssignableFrom(columnType)) { addSwappableTable(swappableTable); - this.shapes.setSpecific(new AssociativeDataSwappableTablePointShapeString( - swappableTable, keyColumn, valueColumn, getPlotInfo())); + this.shapes.setSpecific(new AssociativeDataSwappableTablePointShapeString(swappableTable, keyColumn, + valueColumn, getPlotInfo())); } else if (Shape.class.isAssignableFrom(columnType)) { addSwappableTable(swappableTable); - this.shapes.setSpecific(new AssociativeDataSwappableTablePointShapeObj(swappableTable, - keyColumn, valueColumn, getPlotInfo())); + this.shapes.setSpecific(new AssociativeDataSwappableTablePointShapeObj(swappableTable, keyColumn, + valueColumn, getPlotInfo())); } else { throw new PlotRuntimeException( - "column is not a supported type (String OR Shape): columnName=" + valueColumn, - this); + "column is not a supported type (String OR Shape): columnName=" + valueColumn, this); } return this; } - private static class AssociativeDataTablePointShapeString - extends AssociativeDataTable { + private static class AssociativeDataTablePointShapeString extends AssociativeDataTable { private static final long serialVersionUID = -782616004116345049L; - AssociativeDataTablePointShapeString(final TableHandle handle, final String keyColumn, - final String valueColumn, final PlotInfo plotInfo) { + AssociativeDataTablePointShapeString(final TableHandle handle, final String keyColumn, final String valueColumn, + final PlotInfo plotInfo) { super(handle, keyColumn, valueColumn, Comparable.class, String.class, plotInfo); } @@ -823,14 +790,13 @@ public Shape convert(final String v) { } } - private static class AssociativeDataTablePointShapeObj - extends AssociativeDataTable { + private static class AssociativeDataTablePointShapeObj extends AssociativeDataTable { private static final long serialVersionUID = -2868764888409198544L; - AssociativeDataTablePointShapeObj(final TableHandle handle, final String keyColumn, - final String valueColumn, final PlotInfo plotInfo) { + AssociativeDataTablePointShapeObj(final TableHandle handle, final String keyColumn, final String valueColumn, + final PlotInfo plotInfo) { super(handle, keyColumn, valueColumn, Comparable.class, Shape.class, plotInfo); } @@ -841,12 +807,12 @@ public Shape convert(final Shape v) { } private static class AssociativeDataSwappableTablePointShapeString - extends AssociativeDataSwappableTable { + extends AssociativeDataSwappableTable { private static final long serialVersionUID = 120758160744582475L; - AssociativeDataSwappableTablePointShapeString(final SwappableTable t, - final String keyColumn, final String valueColumn, final PlotInfo plotInfo) { + AssociativeDataSwappableTablePointShapeString(final SwappableTable t, final String keyColumn, + final String valueColumn, final PlotInfo plotInfo) { super(t, keyColumn, valueColumn, Comparable.class, String.class, plotInfo); } @@ -857,12 +823,12 @@ public Shape convert(final String v) { } private static class AssociativeDataSwappableTablePointShapeObj - extends AssociativeDataSwappableTable { + extends AssociativeDataSwappableTable { private static final long serialVersionUID = -682972849470492883L; AssociativeDataSwappableTablePointShapeObj(final SwappableTable t, final String keyColumn, - final String valueColumn, final PlotInfo plotInfo) { + final String valueColumn, final PlotInfo plotInfo) { super(t, keyColumn, valueColumn, Comparable.class, Shape.class, plotInfo); } @@ -872,15 +838,15 @@ public Shape convert(final Shape v) { } } - private static class AssociativeDataPaintByYMap - extends AssociativeData implements Serializable { + private static class AssociativeDataPaintByYMap extends AssociativeData + implements Serializable { private static final long serialVersionUID = 1040533194319869777L; private final Map colors; private final AbstractCategoryDataSeries dataSeries; private AssociativeDataPaintByYMap(final Map colors, - final AbstractCategoryDataSeries dataSeries) { + final AbstractCategoryDataSeries dataSeries) { super(dataSeries.getPlotInfo()); this.colors = colors; this.dataSeries = dataSeries; @@ -898,25 +864,22 @@ public boolean isModifiable() { @Override public void put(Comparable comparable, Paint paint) { - throw new PlotUnsupportedOperationException( - "AssociativeDataPaintByY can not be modified", this); + throw new PlotUnsupportedOperationException("AssociativeDataPaintByY can not be modified", this); } @Override public void putAll(Map values) { - throw new PlotUnsupportedOperationException( - "AssociativeDataPaintByY can not be modified", this); + throw new PlotUnsupportedOperationException("AssociativeDataPaintByY can not be modified", this); } } - private static class AssociativeDataTableComparablePaint - extends AssociativeDataTable { + private static class AssociativeDataTableComparablePaint extends AssociativeDataTable { private static final long serialVersionUID = 2872945661540856625L; private final ChartImpl chart; - AssociativeDataTableComparablePaint(final TableHandle handle, final String keyColumn, - final String valueColumn, final ChartImpl chart, final PlotInfo plotInfo) { + AssociativeDataTableComparablePaint(final TableHandle handle, final String keyColumn, final String valueColumn, + final ChartImpl chart, final PlotInfo plotInfo) { super(handle, keyColumn, valueColumn, Comparable.class, Integer.class, plotInfo); this.chart = chart; } @@ -928,13 +891,13 @@ public Paint convert(Integer v) { } private static class AssociativeDataSwappableTableComparablePaint - extends AssociativeDataSwappableTable { + extends AssociativeDataSwappableTable { private static final long serialVersionUID = -644994476705986379L; private final ChartImpl chart; AssociativeDataSwappableTableComparablePaint(final SwappableTable t, final String keyColumn, - final String valueColumn, final ChartImpl chart, final PlotInfo plotInfo) { + final String valueColumn, final ChartImpl chart, final PlotInfo plotInfo) { super(t, keyColumn, valueColumn, Comparable.class, Integer.class, plotInfo); this.chart = chart; } @@ -945,12 +908,11 @@ public Paint convert(Integer v) { } } - private static class AssociativeDataTableLabel - extends AssociativeDataTable { + private static class AssociativeDataTableLabel extends AssociativeDataTable { private static final long serialVersionUID = -2209957632708434850L; - AssociativeDataTableLabel(final TableHandle handle, final String keyColumn, - final String valueColumn, final PlotInfo plotInfo) { + AssociativeDataTableLabel(final TableHandle handle, final String keyColumn, final String valueColumn, + final PlotInfo plotInfo) { super(handle, keyColumn, valueColumn, Comparable.class, Object.class, plotInfo); } @@ -961,12 +923,12 @@ public String convert(Object v) { } private static class AssociativeDataSwappableTableLabel - extends AssociativeDataSwappableTable { + extends AssociativeDataSwappableTable { private static final long serialVersionUID = -7266731699707547063L; // The SwappableTable needs to have a lastBy applied - AssociativeDataSwappableTableLabel(final SwappableTable t, final String keyColumn, - final String valueColumn, final PlotInfo plotInfo) { + AssociativeDataSwappableTableLabel(final SwappableTable t, final String keyColumn, final String valueColumn, + final PlotInfo plotInfo) { super(t, keyColumn, valueColumn, Comparable.class, Object.class, plotInfo); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractMapBasedCategoryDataSeries.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractMapBasedCategoryDataSeries.java index 7ef050574e1..d4470fb96b2 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractMapBasedCategoryDataSeries.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractMapBasedCategoryDataSeries.java @@ -17,13 +17,12 @@ public AbstractMapBasedCategoryDataSeries(AxesImpl axes, int id, Comparable name super(axes, id, name); } - public AbstractMapBasedCategoryDataSeries(final AxesImpl axes, final int id, - final Comparable name, final AbstractCategoryDataSeries series) { + public AbstractMapBasedCategoryDataSeries(final AxesImpl axes, final int id, final Comparable name, + final AbstractCategoryDataSeries series) { super(axes, id, name, series); } - public AbstractMapBasedCategoryDataSeries(final AbstractCategoryDataSeries series, - final AxesImpl axes) { + public AbstractMapBasedCategoryDataSeries(final AbstractCategoryDataSeries series, final AxesImpl axes) { super(series, axes); } @@ -33,20 +32,18 @@ public CategoryDataSeries pointShape(final Function shapes) } @Override - public AbstractCategoryDataSeries pointSize( - final Function factors) { + public AbstractCategoryDataSeries pointSize(final Function factors) { return super.pointSize(constructMapFromData(factors)); } @Override - public AbstractCategoryDataSeries pointColor( - final Function colors) { + public AbstractCategoryDataSeries pointColor(final Function colors) { return super.pointColor(constructMapFromData(colors)); } @Override public AbstractCategoryDataSeries pointColorInteger( - final Function colors) { + final Function colors) { return super.pointColorInteger(constructMapFromData(colors)); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractSwappableTableBasedCategoryDataSeries.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractSwappableTableBasedCategoryDataSeries.java index 2e4b5e6770b..2c916d2108f 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractSwappableTableBasedCategoryDataSeries.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/AbstractSwappableTableBasedCategoryDataSeries.java @@ -22,20 +22,18 @@ import static io.deephaven.db.plot.util.PlotUtils.intToColor; -public abstract class AbstractSwappableTableBasedCategoryDataSeries - extends AbstractCategoryDataSeries { +public abstract class AbstractSwappableTableBasedCategoryDataSeries extends AbstractCategoryDataSeries { public AbstractSwappableTableBasedCategoryDataSeries(AxesImpl axes, int id, Comparable name) { super(axes, id, name); } - public AbstractSwappableTableBasedCategoryDataSeries(final AxesImpl axes, final int id, - final Comparable name, final AbstractCategoryDataSeries series) { + public AbstractSwappableTableBasedCategoryDataSeries(final AxesImpl axes, final int id, final Comparable name, + final AbstractCategoryDataSeries series) { super(axes, id, name, series); } - public AbstractSwappableTableBasedCategoryDataSeries(final AbstractCategoryDataSeries series, - final AxesImpl axes) { + public AbstractSwappableTableBasedCategoryDataSeries(final AbstractCategoryDataSeries series, final AxesImpl axes) { super(series, axes); } @@ -43,77 +41,73 @@ public AbstractSwappableTableBasedCategoryDataSeries(final AbstractCategoryDataS public CategoryDataSeries pointShape(final Function shapes) { final String colName = ColumnNameConstants.POINT_SHAPE + this.hashCode(); chart().figure().registerTableMapFunction(getSwappableTable().getTableMapHandle(), - constructTableMapFromFunctionOnCategoryCol(shapes, String.class, colName)); + constructTableMapFromFunctionOnCategoryCol(shapes, String.class, colName)); getSwappableTable().getTableMapHandle().addColumn(colName); chart().figure().registerFigureFunction(new FigureImplFunction(figImpl -> { ((AbstractSwappableTableBasedCategoryDataSeries) figImpl.getFigure().getCharts() - .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) - .shapesSetSpecific(new AssociativeDataSwappableTable( - getSwappableTable(), getCategoryCol(), colName, Comparable.class, - String.class, getPlotInfo()) { - @Override - public Shape convert(String v) { - return NamedShape.getShape(v); - } - }); + .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) + .shapesSetSpecific( + new AssociativeDataSwappableTable(getSwappableTable(), + getCategoryCol(), colName, Comparable.class, String.class, getPlotInfo()) { + @Override + public Shape convert(String v) { + return NamedShape.getShape(v); + } + }); return figImpl; }, this)); return this; } @Override - public CategoryDataSeries pointSize( - final Function factors) { + public CategoryDataSeries pointSize(final Function factors) { final String colName = ColumnNameConstants.POINT_SIZE + this.hashCode(); chart().figure().registerTableMapFunction(getSwappableTable().getTableMapHandle(), - constructTableMapFromFunctionOnCategoryCol(factors, Number.class, colName)); + constructTableMapFromFunctionOnCategoryCol(factors, Number.class, colName)); getSwappableTable().getTableMapHandle().addColumn(colName); chart().figure().registerFigureFunction(new FigureImplFunction(figImpl -> { ((AbstractSwappableTableBasedCategoryDataSeries) figImpl.getFigure().getCharts() - .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) - .sizesSetSpecific(new AssociativeDataSwappableTable<>(getSwappableTable(), - getCategoryCol(), colName, Comparable.class, Number.class, getPlotInfo())); + .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) + .sizesSetSpecific(new AssociativeDataSwappableTable<>(getSwappableTable(), getCategoryCol(), + colName, Comparable.class, Number.class, getPlotInfo())); return figImpl; }, this)); return this; } @Override - public CategoryDataSeries pointColor( - final Function colors) { + public CategoryDataSeries pointColor(final Function colors) { final String colName = ColumnNameConstants.POINT_COLOR + this.hashCode(); chart().figure().registerTableMapFunction(getSwappableTable().getTableMapHandle(), - constructTableMapFromFunctionOnCategoryCol(colors, Paint.class, colName)); + constructTableMapFromFunctionOnCategoryCol(colors, Paint.class, colName)); getSwappableTable().getTableMapHandle().addColumn(colName); chart().figure().registerFigureFunction(new FigureImplFunction(figImpl -> { ((AbstractSwappableTableBasedCategoryDataSeries) figImpl.getFigure().getCharts() - .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) - .colorsSetSpecific(new AssociativeDataSwappableTable<>(getSwappableTable(), - getCategoryCol(), colName, Comparable.class, Paint.class, getPlotInfo())); + .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) + .colorsSetSpecific(new AssociativeDataSwappableTable<>(getSwappableTable(), + getCategoryCol(), colName, Comparable.class, Paint.class, getPlotInfo())); return figImpl; }, this)); return this; } @Override - public CategoryDataSeries pointColorInteger( - final Function colors) { + public CategoryDataSeries pointColorInteger(final Function colors) { final String colName = ColumnNameConstants.POINT_COLOR + this.hashCode(); chart().figure().registerTableMapFunction(getSwappableTable().getTableMapHandle(), - constructTableMapFromFunctionOnCategoryCol(colors, Integer.class, colName)); + constructTableMapFromFunctionOnCategoryCol(colors, Integer.class, colName)); getSwappableTable().getTableMapHandle().addColumn(colName); chart().figure().registerFigureFunction(new FigureImplFunction(figImpl -> { ((AbstractSwappableTableBasedCategoryDataSeries) figImpl.getFigure().getCharts() - .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) - .colorsSetSpecific( - new AssociativeDataSwappableTable( - getSwappableTable(), getCategoryCol(), colName, Comparable.class, - Integer.class, getPlotInfo()) { - @Override - public Paint convert(Integer v) { - return intToColor(chart(), v); - } - }); + .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) + .colorsSetSpecific( + new AssociativeDataSwappableTable(getSwappableTable(), + getCategoryCol(), colName, Comparable.class, Integer.class, getPlotInfo()) { + @Override + public Paint convert(Integer v) { + return intToColor(chart(), v); + } + }); return figImpl; }, this)); return this; @@ -123,13 +117,13 @@ public Paint convert(Integer v) { public CategoryDataSeries pointColorByY(Function colors) { final String colName = ColumnNameConstants.POINT_COLOR + this.hashCode(); chart().figure().registerTableMapFunction(getSwappableTable().getTableMapHandle(), - constructTableMapFromFunctionOnCategoryCol(colors, Paint.class, colName)); + constructTableMapFromFunctionOnCategoryCol(colors, Paint.class, colName)); getSwappableTable().getTableMapHandle().addColumn(colName); chart().figure().registerFigureFunction(new FigureImplFunction(figImpl -> { ((AbstractSwappableTableBasedCategoryDataSeries) figImpl.getFigure().getCharts() - .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) - .colorsSetSpecific(new AssociativeDataSwappableTable<>(getSwappableTable(), - getCategoryCol(), colName, Comparable.class, Paint.class, getPlotInfo())); + .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) + .colorsSetSpecific(new AssociativeDataSwappableTable<>(getSwappableTable(), + getCategoryCol(), colName, Comparable.class, Paint.class, getPlotInfo())); return figImpl; }, this)); return this; @@ -139,40 +133,38 @@ public CategoryDataSeries pointColorByY(Function co public

    - * The number of categories and the number of values must be the same. Does not support repeat - * categories. + * The number of categories and the number of values must be the same. Does not support repeat categories. */ public class CategoryDataSeriesMap extends AbstractMapBasedCategoryDataSeries { @@ -35,8 +34,7 @@ public class CategoryDataSeriesMap extends AbstractMapBasedCategoryDataSeries { /** * Creates an instance of CategoryDataSeriesMap, which maps a category to it's numerical value. *

    - * The number of categories and the number of values must be the same. Does not support null or - * repeat categories. + * The number of categories and the number of values must be the same. Does not support null or repeat categories. * * @param axes {@link AxesImpl} on which this dataset is being plotted * @param id data series id @@ -44,25 +42,25 @@ public class CategoryDataSeriesMap extends AbstractMapBasedCategoryDataSeries { * @param categories categorical data * @param values numerical data * @param type of the categorical data - * @throws IllegalArgumentException {@code categories} and {@code values} must not be null - * {@code categories} and {@code values} must have equal sizes - * @throws UnsupportedOperationException {@code categories} must not contain null values - * {@code categories} must not contain repeat values + * @throws IllegalArgumentException {@code categories} and {@code values} must not be null {@code categories} and + * {@code values} must have equal sizes + * @throws UnsupportedOperationException {@code categories} must not contain null values {@code categories} must not + * contain repeat values */ public > CategoryDataSeriesMap(final AxesImpl axes, - final int id, - final Comparable name, - final IndexableData categories, - final IndexableNumericData values) { + final int id, + final Comparable name, + final IndexableData categories, + final IndexableNumericData values) { this(axes, id, name, categories, values, null); } public > CategoryDataSeriesMap(final AxesImpl axes, - final int id, - final Comparable name, - final IndexableData categories, - final IndexableNumericData values, - final AbstractCategoryDataSeries series) { + final int id, + final Comparable name, + final IndexableData categories, + final IndexableNumericData values, + final AbstractCategoryDataSeries series) { super(axes, id, name, series); if (categories == null || values == null) { @@ -70,8 +68,7 @@ public > CategoryDataSeriesMap(final AxesImpl axes, } if (categories.size() != values.size()) { - throw new PlotIllegalArgumentException("Categories and Values lengths do not match", - this); + throw new PlotIllegalArgumentException("Categories and Values lengths do not match", this); } for (int i = 0; i < categories.size(); i++) { @@ -79,10 +76,8 @@ public > CategoryDataSeriesMap(final AxesImpl axes, category = category == null ? INSTANCE : category; if (data.containsKey(category)) { - throw new PlotUnsupportedOperationException( - "Category value repeated multiple times in dataset: series=" + name - + "category=" + categories.get(i), - this); + throw new PlotUnsupportedOperationException("Category value repeated multiple times in dataset: series=" + + name + "category=" + categories.get(i), this); } final double value = values.get(i); diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/CategoryDataSeriesSwappableTableMap.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/CategoryDataSeriesSwappableTableMap.java index c81fb41a0b7..7feab47d9c1 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/CategoryDataSeriesSwappableTableMap.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/CategoryDataSeriesSwappableTableMap.java @@ -15,9 +15,8 @@ /** * A dataset for {@link SwappableTable} based categorical data. */ -public class CategoryDataSeriesSwappableTableMap - extends AbstractSwappableTableBasedCategoryDataSeries - implements CategorySwappableTableDataSeriesInternal, TableSnapshotSeries { +public class CategoryDataSeriesSwappableTableMap extends AbstractSwappableTableBasedCategoryDataSeries + implements CategorySwappableTableDataSeriesInternal, TableSnapshotSeries { private static final long serialVersionUID = 2L; private transient Table localTable; @@ -38,25 +37,24 @@ public class CategoryDataSeriesSwappableTableMap * @param categories discrete data column in {@code swappableTable} * @param values continuous data column in {@code swappableTable} * @param type of the categorical data - * @throws io.deephaven.db.plot.errors.PlotIllegalArgumentException {@code chart}, - * {@code swappableTable}, {@code categories}, and {@code values} may not be null. - * @throws RuntimeException {@code categories} column must be {@link Comparable} {@code values} - * column must be numeric + * @throws io.deephaven.db.plot.errors.PlotIllegalArgumentException {@code chart}, {@code swappableTable}, + * {@code categories}, and {@code values} may not be null. + * @throws RuntimeException {@code categories} column must be {@link Comparable} {@code values} column must be + * numeric */ public CategoryDataSeriesSwappableTableMap(final AxesImpl axes, - final int id, - final Comparable name, - final SwappableTable swappableTable, - final String categories, - final String values) { + final int id, + final Comparable name, + final SwappableTable swappableTable, + final String categories, + final String values) { super(axes, id, name); ArgumentValidations.assertNotNull(axes, "axes", getPlotInfo()); ArgumentValidations.assertNotNull(swappableTable, "swappableTable", getPlotInfo()); - ArgumentValidations.assertInstance(swappableTable.getTableDefinition(), categories, - Comparable.class, "Invalid data type in category column: column=" + categories, - getPlotInfo()); + ArgumentValidations.assertInstance(swappableTable.getTableDefinition(), categories, Comparable.class, + "Invalid data type in category column: column=" + categories, getPlotInfo()); ArgumentValidations.assertIsNumericOrTime(swappableTable.getTableDefinition(), values, - "Invalid data type in data column: column=" + values, getPlotInfo()); + "Invalid data type in data column: column=" + values, getPlotInfo()); this.swappableTable = swappableTable; this.categoryCol = categories; @@ -70,8 +68,7 @@ public CategoryDataSeriesSwappableTableMap(final AxesImpl * @param series series to copy. * @param axes new axes to use. */ - private CategoryDataSeriesSwappableTableMap(final CategoryDataSeriesSwappableTableMap series, - final AxesImpl axes) { + private CategoryDataSeriesSwappableTableMap(final CategoryDataSeriesSwappableTableMap series, final AxesImpl axes) { super(series, axes); this.swappableTable = series.swappableTable; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/CategoryDataSeriesTableMap.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/CategoryDataSeriesTableMap.java index 2bff45ed653..504055c3c10 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/category/CategoryDataSeriesTableMap.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/category/CategoryDataSeriesTableMap.java @@ -17,7 +17,7 @@ * A dataset for table-based categorical data. */ public class CategoryDataSeriesTableMap extends AbstractTableBasedCategoryDataSeries - implements CategoryTableDataSeriesInternal, TableSnapshotSeries { + implements CategoryTableDataSeriesInternal, TableSnapshotSeries { private static final long serialVersionUID = 2L; private final TableHandle tableHandle; @@ -38,26 +38,24 @@ public class CategoryDataSeriesTableMap extends AbstractTableBasedCategoryDataSe * @param categories column in the underlying table containing the categorical data * @param values column in the underlying table containing the numerical data * @param type of the categorical data - * @throws RequirementFailure {@code chart}, {@code tableHandle}, {@code categories}, and - * {@code values} must not be null - * @throws RuntimeException {@code categories} column must be either time, - * char/{@link Character}, {@link Comparable}, or numeric {@code values} column must be - * numeric + * @throws RequirementFailure {@code chart}, {@code tableHandle}, {@code categories}, and {@code values} must not be + * null + * @throws RuntimeException {@code categories} column must be either time, char/{@link Character}, + * {@link Comparable}, or numeric {@code values} column must be numeric */ public CategoryDataSeriesTableMap(final AxesImpl axes, - final int id, - final Comparable name, - final TableHandle tableHandle, - final String categories, - final String values) { + final int id, + final Comparable name, + final TableHandle tableHandle, + final String categories, + final String values) { super(axes, id, name); ArgumentValidations.assertNotNull(axes, "axes", getPlotInfo()); ArgumentValidations.assertNotNull(tableHandle, "table", getPlotInfo()); - ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance( - tableHandle.getFinalTableDefinition(), categories, - "Invalid data type in category column: column=" + categories, getPlotInfo()); + ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(tableHandle.getFinalTableDefinition(), + categories, "Invalid data type in category column: column=" + categories, getPlotInfo()); ArgumentValidations.assertIsNumericOrTime(tableHandle.getFinalTableDefinition(), values, - "Invalid data type in data column: column=" + values, getPlotInfo()); + "Invalid data type in data column: column=" + values, getPlotInfo()); this.tableHandle = tableHandle; this.categoryCol = categories; @@ -71,8 +69,7 @@ public CategoryDataSeriesTableMap(final AxesImpl axes, * @param series series to copy. * @param axes new axes to use. */ - private CategoryDataSeriesTableMap(final CategoryDataSeriesTableMap series, - final AxesImpl axes) { + private CategoryDataSeriesTableMap(final CategoryDataSeriesTableMap series, final AxesImpl axes) { super(series, axes); this.tableHandle = series.tableHandle; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesInternal.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesInternal.java index c041ce0cc05..730da4c8459 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesInternal.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesInternal.java @@ -9,8 +9,7 @@ /** * An {@link CategoryDataSeriesInternal} with error bars. */ -public interface CategoryErrorBarDataSeriesInternal - extends CategoryErrorBarDataSeries, CategoryDataSeriesInternal { +public interface CategoryErrorBarDataSeriesInternal extends CategoryErrorBarDataSeries, CategoryDataSeriesInternal { @Override default boolean drawYError() { return true; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesKernel.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesKernel.java index b294f030d8c..b420d5205a8 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesKernel.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesKernel.java @@ -10,9 +10,9 @@ import java.util.*; /** - * The core of the Error Bar Category date series update and indexing. This is used by the regular - * category data series to both update the values of existing categories as well as to ensure that - * new categories are maintained in order of the original table. + * The core of the Error Bar Category date series update and indexing. This is used by the regular category data series + * to both update the values of existing categories as well as to ensure that new categories are maintained in order of + * the original table. */ @SuppressWarnings("rawtypes") public class CategoryErrorBarDataSeriesKernel { @@ -53,10 +53,10 @@ public class CategoryErrorBarDataSeriesKernel { private double yMax; public CategoryErrorBarDataSeriesKernel(@NotNull String categoryCol, - @NotNull String valueColumn, - @NotNull String errorBarLowColumn, - @NotNull String errorBarHighColumn, - @NotNull PlotInfo plotInfo) { + @NotNull String valueColumn, + @NotNull String errorBarLowColumn, + @NotNull String errorBarHighColumn, + @NotNull PlotInfo plotInfo) { ArgumentValidations.assertNotNull(categories, "categories", plotInfo); ArgumentValidations.assertNotNull(valueColumn, "values", plotInfo); ArgumentValidations.assertNotNull(errorBarLowColumn, "yLow", plotInfo); @@ -108,8 +108,8 @@ public synchronized Number getValue(final Comparable category) { } /** - * Get the index key of the category within the original dataset. This can be used to enforce a - * global ordering of a MultiSeries cat plot. + * Get the index key of the category within the original dataset. This can be used to enforce a global ordering of a + * MultiSeries cat plot. * * @param category The category to locate. * @return the key of the specified category within the original data set. or -1 if not present. diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesMap.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesMap.java index 05a4cebfb41..083d20ab318 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesMap.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesMap.java @@ -23,11 +23,10 @@ /** * A dataset for categorical data which maps a category to it's numerical value. *

    - * The number of categories and the number of values must be the same. Does not support repeat - * categories. + * The number of categories and the number of values must be the same. Does not support repeat categories. */ public class CategoryErrorBarDataSeriesMap extends AbstractMapBasedCategoryDataSeries - implements CategoryErrorBarDataSeriesInternal { + implements CategoryErrorBarDataSeriesInternal { private static final long serialVersionUID = 3326261675883932559L; private final Map data = new LinkedHashMap<>(); @@ -40,8 +39,7 @@ public class CategoryErrorBarDataSeriesMap extends AbstractMapBasedCategoryDataS /** * Creates an instance of CategoryDataSeriesMap, which maps a category to it's numerical value. *

    - * The number of categories and the number of values must be the same. Does not support null or - * repeat categories. + * The number of categories and the number of values must be the same. Does not support null or repeat categories. * * @param axes {@link AxesImpl} on which this dataset is being plotted * @param id data series id @@ -51,39 +49,37 @@ public class CategoryErrorBarDataSeriesMap extends AbstractMapBasedCategoryDataS * @param yLow low error bar data * @param yHigh high error bar data * @param type of the categorical data - * @throws IllegalArgumentException {@code categories} and {@code values} must not be null - * {@code categories} and {@code values} must have equal sizes - * @throws UnsupportedOperationException {@code categories} must not contain null values - * {@code categories} must not contain repeat values + * @throws IllegalArgumentException {@code categories} and {@code values} must not be null {@code categories} and + * {@code values} must have equal sizes + * @throws UnsupportedOperationException {@code categories} must not contain null values {@code categories} must not + * contain repeat values */ public CategoryErrorBarDataSeriesMap(final AxesImpl axes, - final int id, - final Comparable name, - final IndexableData categories, - final IndexableNumericData y, - final IndexableNumericData yLow, - final IndexableNumericData yHigh) { + final int id, + final Comparable name, + final IndexableData categories, + final IndexableNumericData y, + final IndexableNumericData yLow, + final IndexableNumericData yHigh) { this(axes, id, name, categories, y, yLow, yHigh, null); } public CategoryErrorBarDataSeriesMap(final AxesImpl axes, - final int id, - final Comparable name, - final IndexableData categories, - final IndexableNumericData y, - final IndexableNumericData yLow, - final IndexableNumericData yHigh, - final AbstractCategoryDataSeries series) { + final int id, + final Comparable name, + final IndexableData categories, + final IndexableNumericData y, + final IndexableNumericData yLow, + final IndexableNumericData yHigh, + final AbstractCategoryDataSeries series) { super(axes, id, name, series); if (categories == null || y == null || yLow == null || yHigh == null) { throw new PlotIllegalArgumentException("Null input array", this); } - if (categories.size() != y.size() || y.size() != yLow.size() - || yLow.size() != yHigh.size()) { - throw new PlotIllegalArgumentException("Categories and values lengths do not match", - this); + if (categories.size() != y.size() || y.size() != yLow.size() || yLow.size() != yHigh.size()) { + throw new PlotIllegalArgumentException("Categories and values lengths do not match", this); } for (int i = 0; i < categories.size(); i++) { @@ -91,10 +87,8 @@ public CategoryErrorBarDataSeriesMap(final AxesImpl axes, category = category == null ? INSTANCE : category; if (data.containsKey(category)) { - throw new PlotUnsupportedOperationException( - "Category value repeated multiple times in dataset: series=" + name - + "category=" + categories.get(i), - this); + throw new PlotUnsupportedOperationException("Category value repeated multiple times in dataset: series=" + + name + "category=" + categories.get(i), this); } setValueIndexed(category, y.get(i), data, i); @@ -109,8 +103,7 @@ public CategoryErrorBarDataSeriesMap(final AxesImpl axes, * @param series series to copy. * @param axes new axes to use. */ - private CategoryErrorBarDataSeriesMap(final CategoryErrorBarDataSeriesMap series, - final AxesImpl axes) { + private CategoryErrorBarDataSeriesMap(final CategoryErrorBarDataSeriesMap series, final AxesImpl axes) { super(series, axes); this.data.putAll(series.data); this.dataToYStart.putAll(series.dataToYStart); @@ -158,7 +151,7 @@ public Number getEndY(final Comparable category) { } private void setValueIndexed(final Comparable category, final Number value, - final Map data, int index) { + final Map data, int index) { if (value == null) { data.remove(category); } else { @@ -169,8 +162,7 @@ private void setValueIndexed(final Comparable category, final Number value, yMax = PlotUtils.maxIgnoreNaN(yMax, value == null ? Double.NaN : value.doubleValue()); } - private void setValue(final Comparable category, final Number value, - final Map data) { + private void setValue(final Comparable category, final Number value, final Map data) { if (value == null) { data.remove(category); } else { diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesSwappableTableMap.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesSwappableTableMap.java index 3be28e56151..79b27b08484 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesSwappableTableMap.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesSwappableTableMap.java @@ -17,9 +17,8 @@ /** * A dataset for {@link SwappableTable} based categorical data. */ -public class CategoryErrorBarDataSeriesSwappableTableMap extends - AbstractSwappableTableBasedCategoryDataSeries implements CategoryErrorBarDataSeriesInternal, - CategorySwappableTableDataSeriesInternal, TableSnapshotSeries { +public class CategoryErrorBarDataSeriesSwappableTableMap extends AbstractSwappableTableBasedCategoryDataSeries + implements CategoryErrorBarDataSeriesInternal, CategorySwappableTableDataSeriesInternal, TableSnapshotSeries { private static final long serialVersionUID = 2L; private transient Table localTable; @@ -42,33 +41,30 @@ public class CategoryErrorBarDataSeriesSwappableTableMap extends * @param categories discrete data column in {@code swappableTable} * @param valueCol continuous data column in {@code swappableTable} * @param type of the categorical data - * @throws io.deephaven.db.plot.errors.PlotIllegalArgumentException {@code chart}, - * {@code swappableTable}, {@code categories}, and {@code values} may not be null. - * @throws RuntimeException {@code categories} column must be {@link Comparable} {@code values} - * column must be numeric + * @throws io.deephaven.db.plot.errors.PlotIllegalArgumentException {@code chart}, {@code swappableTable}, + * {@code categories}, and {@code values} may not be null. + * @throws RuntimeException {@code categories} column must be {@link Comparable} {@code values} column must be + * numeric */ public CategoryErrorBarDataSeriesSwappableTableMap(final AxesImpl axes, - final int id, - final Comparable name, - final SwappableTable swappableTable, - final String categories, - final String valueCol, - final String errorBarLowCol, - final String errorBarHighCol) { + final int id, + final Comparable name, + final SwappableTable swappableTable, + final String categories, + final String valueCol, + final String errorBarLowCol, + final String errorBarHighCol) { super(axes, id, name); ArgumentValidations.assertNotNull(axes, "axes", getPlotInfo()); ArgumentValidations.assertNotNull(swappableTable, "swappableTable", getPlotInfo()); - ArgumentValidations.assertInstance(swappableTable.getTableDefinition(), categories, - Comparable.class, "Invalid data type in category column: column=" + categories, - getPlotInfo()); + ArgumentValidations.assertInstance(swappableTable.getTableDefinition(), categories, Comparable.class, + "Invalid data type in category column: column=" + categories, getPlotInfo()); ArgumentValidations.assertIsNumericOrTime(swappableTable.getTableDefinition(), valueCol, - "Invalid data type in data column: column=" + valueCol, getPlotInfo()); - ArgumentValidations.assertIsNumericOrTime(swappableTable.getTableDefinition(), - errorBarLowCol, "Invalid data type in data column: column=" + errorBarLowCol, - getPlotInfo()); - ArgumentValidations.assertIsNumericOrTime(swappableTable.getTableDefinition(), - errorBarHighCol, "Invalid data type in data column: column=" + errorBarHighCol, - getPlotInfo()); + "Invalid data type in data column: column=" + valueCol, getPlotInfo()); + ArgumentValidations.assertIsNumericOrTime(swappableTable.getTableDefinition(), errorBarLowCol, + "Invalid data type in data column: column=" + errorBarLowCol, getPlotInfo()); + ArgumentValidations.assertIsNumericOrTime(swappableTable.getTableDefinition(), errorBarHighCol, + "Invalid data type in data column: column=" + errorBarHighCol, getPlotInfo()); this.swappableTable = swappableTable; this.categoryCol = categories; @@ -76,8 +72,8 @@ public CategoryErrorBarDataSeriesSwappableTableMap(final this.errorBarLowCol = errorBarLowCol; this.errorBarHighCol = errorBarHighCol; - this.kernel = new CategoryErrorBarDataSeriesKernel(categories, valueCol, errorBarLowCol, - errorBarHighCol, getPlotInfo()); + this.kernel = new CategoryErrorBarDataSeriesKernel(categories, valueCol, errorBarLowCol, errorBarHighCol, + getPlotInfo()); } /** @@ -86,8 +82,8 @@ public CategoryErrorBarDataSeriesSwappableTableMap(final * @param series series to copy. * @param axes new axes to use. */ - private CategoryErrorBarDataSeriesSwappableTableMap( - final CategoryErrorBarDataSeriesSwappableTableMap series, final AxesImpl axes) { + private CategoryErrorBarDataSeriesSwappableTableMap(final CategoryErrorBarDataSeriesSwappableTableMap series, + final AxesImpl axes) { super(series, axes); this.swappableTable = series.swappableTable; @@ -95,8 +91,8 @@ private CategoryErrorBarDataSeriesSwappableTableMap( this.valueCol = series.valueCol; this.errorBarLowCol = series.errorBarLowCol; this.errorBarHighCol = series.errorBarHighCol; - this.kernel = new CategoryErrorBarDataSeriesKernel(categoryCol, valueCol, errorBarLowCol, - errorBarHighCol, getPlotInfo()); + this.kernel = new CategoryErrorBarDataSeriesKernel(categoryCol, valueCol, errorBarLowCol, errorBarHighCol, + getPlotInfo()); } @Override diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesTableMap.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesTableMap.java index ceecf455f23..e68385e65bd 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesTableMap.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/categoryerrorbar/CategoryErrorBarDataSeriesTableMap.java @@ -15,7 +15,7 @@ import java.util.Collection; public class CategoryErrorBarDataSeriesTableMap extends AbstractTableBasedCategoryDataSeries - implements CategoryErrorBarDataSeriesInternal, CategoryTableDataSeriesInternal, SeriesInternal { + implements CategoryErrorBarDataSeriesInternal, CategoryTableDataSeriesInternal, SeriesInternal { private static final long serialVersionUID = 2L; private final TableHandle tableHandle; @@ -37,46 +37,40 @@ public class CategoryErrorBarDataSeriesTableMap extends AbstractTableBasedCatego * @param tableHandle table data * @param categoryCol discrete data column in {@code tableHandle} * @param valueCol continuous data column in {@code tableHandle} - * @param errorBarLowCol column in {@code tableHandle} that holds the low whisker value in the y - * direction - * @param errorBarHighCol column in {@code tableHandle} that holds the high whisker value in the - * y direction - * @throws io.deephaven.base.verify.RequirementFailure {@code axes}, {@code tableHandle}, - * {@code categoryCol}, {@code values} {@code yLow}, and {@code yHigh} may not be null. - * @throws RuntimeException {@code categoryCol} column must be {@link Comparable} {@code values} - * column must be numeric {@code yLow} column must be numeric {@code yHigh} column must - * be numeric + * @param errorBarLowCol column in {@code tableHandle} that holds the low whisker value in the y direction + * @param errorBarHighCol column in {@code tableHandle} that holds the high whisker value in the y direction + * @throws io.deephaven.base.verify.RequirementFailure {@code axes}, {@code tableHandle}, {@code categoryCol}, + * {@code values} {@code yLow}, and {@code yHigh} may not be null. + * @throws RuntimeException {@code categoryCol} column must be {@link Comparable} {@code values} column must be + * numeric {@code yLow} column must be numeric {@code yHigh} column must be numeric */ public CategoryErrorBarDataSeriesTableMap(final AxesImpl axes, - final int id, - final Comparable name, - final TableHandle tableHandle, - final String categoryCol, - final String valueCol, - final String errorBarLowCol, - final String errorBarHighCol) { + final int id, + final Comparable name, + final TableHandle tableHandle, + final String categoryCol, + final String valueCol, + final String errorBarLowCol, + final String errorBarHighCol) { super(axes, id, name); ArgumentValidations.assertNotNull(axes, "axes", getPlotInfo()); ArgumentValidations.assertNotNull(tableHandle, "table", getPlotInfo()); - ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance( - tableHandle.getFinalTableDefinition(), categoryCol, - "Invalid data type in category column: column=" + categoryCol, getPlotInfo()); + ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(tableHandle.getFinalTableDefinition(), + categoryCol, "Invalid data type in category column: column=" + categoryCol, getPlotInfo()); ArgumentValidations.assertIsNumericOrTime(tableHandle.getFinalTableDefinition(), valueCol, - "Invalid data type in data column: column=" + valueCol, getPlotInfo()); - ArgumentValidations.assertIsNumericOrTime(tableHandle.getFinalTableDefinition(), - errorBarLowCol, "Invalid data type in data column: column=" + errorBarLowCol, - getPlotInfo()); - ArgumentValidations.assertIsNumericOrTime(tableHandle.getFinalTableDefinition(), - errorBarHighCol, "Invalid data type in data column: column=" + errorBarHighCol, - getPlotInfo()); + "Invalid data type in data column: column=" + valueCol, getPlotInfo()); + ArgumentValidations.assertIsNumericOrTime(tableHandle.getFinalTableDefinition(), errorBarLowCol, + "Invalid data type in data column: column=" + errorBarLowCol, getPlotInfo()); + ArgumentValidations.assertIsNumericOrTime(tableHandle.getFinalTableDefinition(), errorBarHighCol, + "Invalid data type in data column: column=" + errorBarHighCol, getPlotInfo()); this.tableHandle = tableHandle; this.categoryCol = categoryCol; this.valueCol = valueCol; this.errorBarLowCol = errorBarLowCol; this.errorBarHighCol = errorBarHighCol; - this.kernel = new CategoryErrorBarDataSeriesKernel(categoryCol, valueCol, errorBarLowCol, - errorBarHighCol, getPlotInfo()); + this.kernel = new CategoryErrorBarDataSeriesKernel(categoryCol, valueCol, errorBarLowCol, errorBarHighCol, + getPlotInfo()); } /** @@ -85,8 +79,7 @@ public CategoryErrorBarDataSeriesTableMap(final AxesImpl axes, * @param series series to copy. * @param axes new axes to use. */ - private CategoryErrorBarDataSeriesTableMap(final CategoryErrorBarDataSeriesTableMap series, - final AxesImpl axes) { + private CategoryErrorBarDataSeriesTableMap(final CategoryErrorBarDataSeriesTableMap series, final AxesImpl axes) { super(series, axes); this.tableHandle = series.tableHandle; @@ -94,8 +87,8 @@ private CategoryErrorBarDataSeriesTableMap(final CategoryErrorBarDataSeriesTable this.valueCol = series.valueCol; this.errorBarLowCol = series.errorBarLowCol; this.errorBarHighCol = series.errorBarHighCol; - this.kernel = new CategoryErrorBarDataSeriesKernel(categoryCol, valueCol, errorBarLowCol, - errorBarHighCol, getPlotInfo()); + this.kernel = new CategoryErrorBarDataSeriesKernel(categoryCol, valueCol, errorBarLowCol, errorBarHighCol, + getPlotInfo()); } @Override diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataSwappableTable.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataSwappableTable.java index 61a19adfad3..cf4f0c4b740 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataSwappableTable.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataSwappableTable.java @@ -13,13 +13,13 @@ import java.util.Map; /** - * An {@link AssociativeData} dataset backed by a {@link SwappableTable}. Table columns hold the - * keys and associated values. + * An {@link AssociativeData} dataset backed by a {@link SwappableTable}. Table columns hold the keys and associated + * values. * * Data types are specified in construction. */ public class AssociativeDataSwappableTable - extends LiveAssociativeData { + extends LiveAssociativeData { private static final long serialVersionUID = -8997550925581334311L; private final SwappableTable swappableTable; @@ -32,40 +32,37 @@ public class AssociativeDataSwappableTable * * The {@code swappableTable} must have had a lastBy applied! * - * Keys are held in the {@code keyColumn} of {@code swappableTable}. Their associated values are - * held in the {@code valueColumn}. + * Keys are held in the {@code keyColumn} of {@code swappableTable}. Their associated values are held in the + * {@code valueColumn}. * - * The data type of the keys is specified by {@code keyColumnType}. The data type of the values - * is specified by {@code valueColumnType}. + * The data type of the keys is specified by {@code keyColumnType}. The data type of the values is specified by + * {@code valueColumnType}. * - * @throws io.deephaven.base.verify.RequirementFailure {@code swappableTable}, - * {@code keyColumn}, and {@code valueColumn} must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code swappableTable}, {@code keyColumn}, and + * {@code valueColumn} must not be null * @throws IllegalArgumentException {@code keyColumn} and {@code valueColumn} must be columns in * {@code swappableTable} - * @throws RuntimeException the specified data types must match the data types of the - * corresponding columns + * @throws RuntimeException the specified data types must match the data types of the corresponding columns * @param swappableTable table. Must have a lastBy applied. * @param keyColumn column in {@code swappableTable} which holds the key values - * @param valueColumn column in {@code swappableTable} which holds the values associated with - * the keys + * @param valueColumn column in {@code swappableTable} which holds the values associated with the keys * @param keyColumnType data type of the keys * @param valueColumnType data type of the values * @param plotInfo plot information */ - public AssociativeDataSwappableTable(final SwappableTable swappableTable, - final String keyColumn, final String valueColumn, final Class keyColumnType, - final Class valueColumnType, final PlotInfo plotInfo) { + public AssociativeDataSwappableTable(final SwappableTable swappableTable, final String keyColumn, + final String valueColumn, final Class keyColumnType, final Class valueColumnType, + final PlotInfo plotInfo) { super(plotInfo); this.swappableTable = swappableTable; ArgumentValidations.assertNotNull(swappableTable, "swappableTable", getPlotInfo()); ArgumentValidations.assertNotNull(keyColumn, "keyColumn", getPlotInfo()); ArgumentValidations.assertNotNull(valueColumn, "valueColumn", getPlotInfo()); - ArgumentValidations.assertColumnsInTable(swappableTable.getTableDefinition(), plotInfo, - keyColumn, valueColumn); - ArgumentValidations.assertInstance(swappableTable.getTableDefinition(), keyColumn, - keyColumnType, keyColumn + " is not of type " + keyColumnType, plotInfo); - ArgumentValidations.assertInstance(swappableTable.getTableDefinition(), valueColumn, - valueColumnType, valueColumn + " is not of type " + valueColumnType, plotInfo); + ArgumentValidations.assertColumnsInTable(swappableTable.getTableDefinition(), plotInfo, keyColumn, valueColumn); + ArgumentValidations.assertInstance(swappableTable.getTableDefinition(), keyColumn, keyColumnType, + keyColumn + " is not of type " + keyColumnType, plotInfo); + ArgumentValidations.assertInstance(swappableTable.getTableDefinition(), valueColumn, valueColumnType, + valueColumn + " is not of type " + valueColumnType, plotInfo); this.keyColumn = keyColumn; this.valueColumn = valueColumn; @@ -88,12 +85,12 @@ public boolean isModifiable() { @Override public void put(KEY key, VALUE value) { throw new PlotUnsupportedOperationException( - "Modifying values is not supported for AssociativeDataSwappableTable", getPlotInfo()); + "Modifying values is not supported for AssociativeDataSwappableTable", getPlotInfo()); } @Override public void putAll(Map values) { throw new PlotUnsupportedOperationException( - "Modifying values is not supported for AssociativeDataSwappableTable", getPlotInfo()); + "Modifying values is not supported for AssociativeDataSwappableTable", getPlotInfo()); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataTable.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataTable.java index f844e2f418d..1d907cecf95 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataTable.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataTable.java @@ -15,13 +15,11 @@ import java.util.Map; /** - * An {@link AssociativeData} dataset backed by a {@link Table}. Table columns hold the keys and the - * associated values. + * An {@link AssociativeData} dataset backed by a {@link Table}. Table columns hold the keys and the associated values. * * Data types are specified in construction. */ -public class AssociativeDataTable - extends LiveAssociativeData { +public class AssociativeDataTable extends LiveAssociativeData { private static final long serialVersionUID = -1752085070371782144L; private final TableHandle tableHandle; @@ -32,18 +30,17 @@ public class AssociativeDataTable /** * Creates an AssociativeDataSwappableTable instance. * - * Key are in the {@code keyColumn} of the table held by {@code tableHandle}. Their associated - * values are in the {@code valueColumn}. + * Key are in the {@code keyColumn} of the table held by {@code tableHandle}. Their associated values are in the + * {@code valueColumn}. * - * The data type of the keys is specified by {@code keyColumnType}. The data type of the values - * is specified by {@code valueColumnType}. + * The data type of the keys is specified by {@code keyColumnType}. The data type of the values is specified by + * {@code valueColumnType}. * - * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle}, {@code keyColumn}, - * and {@code valueColumn} must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle}, {@code keyColumn}, and + * {@code valueColumn} must not be null * @throws IllegalArgumentException {@code keyColumn} and {@code valueColumn} must be columns in * {@code swappableTable} - * @throws RuntimeException the specified data types must match the data types of the - * corresponding columns + * @throws RuntimeException the specified data types must match the data types of the corresponding columns * @param tableHandle holds the underlying table * @param keyColumn column in the table which holds the key values * @param valueColumn column in the table which holds the values associated with the keys @@ -51,27 +48,25 @@ public class AssociativeDataTable * @param valueColumnType data type of the values * @param plotInfo plot information */ - public AssociativeDataTable(final TableHandle tableHandle, final String keyColumn, - final String valueColumn, final Class keyColumnType, - final Class valueColumnType, final PlotInfo plotInfo) { + public AssociativeDataTable(final TableHandle tableHandle, final String keyColumn, final String valueColumn, + final Class keyColumnType, final Class valueColumnType, final PlotInfo plotInfo) { super(plotInfo); ArgumentValidations.assertNotNull(tableHandle, "tableHandle", getPlotInfo()); ArgumentValidations.assertNotNull(keyColumn, "keyColumn", getPlotInfo()); ArgumentValidations.assertNotNull(valueColumn, "valueColumn", getPlotInfo()); ArgumentValidations.assertColumnsInTable(tableHandle, plotInfo, keyColumn, valueColumn); ArgumentValidations.assertInstance(tableHandle.getTable(), keyColumn, keyColumnType, - keyColumn + " is not of type " + keyColumnType, plotInfo); + keyColumn + " is not of type " + keyColumnType, plotInfo); ArgumentValidations.assertInstance(tableHandle.getTable(), valueColumn, valueColumnType, - valueColumn + " is not of type " + valueColumnType, plotInfo); + valueColumn + " is not of type " + valueColumnType, plotInfo); this.tableHandle = tableHandle; - tableHandle.setTable( - PlotUtils.createCategoryTable(tableHandle.getTable(), new String[] {keyColumn})); // todo - // should - // this - // be - // done - // here + tableHandle.setTable(PlotUtils.createCategoryTable(tableHandle.getTable(), new String[] {keyColumn})); // todo + // should + // this + // be + // done + // here this.keyColumn = keyColumn; this.valueColumn = valueColumn; } @@ -92,14 +87,14 @@ public boolean isModifiable() { @Override public void put(KEY key, VALUE value) { - throw new PlotUnsupportedOperationException( - "Modifying values is not supported for AssociativeDataTable", getPlotInfo()); + throw new PlotUnsupportedOperationException("Modifying values is not supported for AssociativeDataTable", + getPlotInfo()); } @Override public void putAll(Map values) { - throw new PlotUnsupportedOperationException( - "Modifying values is not supported for AssociativeDataTable", getPlotInfo()); + throw new PlotUnsupportedOperationException("Modifying values is not supported for AssociativeDataTable", + getPlotInfo()); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataWithDefault.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataWithDefault.java index 93f7621375b..8e5994206f8 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataWithDefault.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/AssociativeDataWithDefault.java @@ -10,8 +10,8 @@ import java.util.Map; /** - * Dataset which has values associated with keys. When the dataset has no value associated with a - * given key, it falls back to a specified default value. + * Dataset which has values associated with keys. When the dataset has no value associated with a given key, it falls + * back to a specified default value. */ public class AssociativeDataWithDefault extends AssociativeData { @@ -80,8 +80,7 @@ public boolean isModifiable() { @Override public void put(final KEY key, final VALUE value) { if (!isModifiable()) { - throw new PlotRuntimeException("AssociativeDataWithDefault is unmodifiable", - getPlotInfo()); + throw new PlotRuntimeException("AssociativeDataWithDefault is unmodifiable", getPlotInfo()); } specificValues.put(key, value); @@ -90,8 +89,7 @@ public void put(final KEY key, final VALUE value) { @Override public void putAll(final Map values) { if (!isModifiable()) { - throw new PlotRuntimeException("AssociativeDataWithDefault is unmodifiable", - getPlotInfo()); + throw new PlotRuntimeException("AssociativeDataWithDefault is unmodifiable", getPlotInfo()); } specificValues.putAll(values); diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataArrayNullCategory.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataArrayNullCategory.java index 9523fc5f3c2..021d02bd2dd 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataArrayNullCategory.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataArrayNullCategory.java @@ -10,8 +10,7 @@ import static io.deephaven.db.plot.util.NullCategory.INSTANCE; /** - * {@link IndexableData} dataset backed by an array. If the array contains a null value, return a - * NULL_CATEGORY. + * {@link IndexableData} dataset backed by an array. If the array contains a null value, return a NULL_CATEGORY. */ public class IndexableDataArrayNullCategory extends IndexableData { private static final long serialVersionUID = -3605356450513219514L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataByte.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataByte.java index d94a0f56782..8b62534de51 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataByte.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataByte.java @@ -21,8 +21,7 @@ public class IndexableDataByte extends IndexableData { /** * Creates an IndexableDataByte instance. * - * Values in {@code data} equal to {@link io.deephaven.util.QueryConstants#NULL_INT} are treated - * as null. + * Values in {@code data} equal to {@link io.deephaven.util.QueryConstants#NULL_INT} are treated as null. * * @param data data * @param plotInfo plot information diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataCharacter.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataCharacter.java index e2ec45755e3..b760bd52fe5 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataCharacter.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataCharacter.java @@ -21,8 +21,7 @@ public class IndexableDataCharacter extends IndexableData { /** * Creates an IndexableDataCharacter instance. * - * Values in {@code data} equal to {@link io.deephaven.util.QueryConstants#NULL_CHAR} are - * treated as null. + * Values in {@code data} equal to {@link io.deephaven.util.QueryConstants#NULL_CHAR} are treated as null. * * @param data data * @param plotInfo plot information diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataDBDateTime.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataDBDateTime.java index 04d6ec4da08..027133e6d11 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataDBDateTime.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataDBDateTime.java @@ -24,8 +24,7 @@ public class IndexableDataDBDateTime extends IndexableData { /** * Creates an IndexableDataDBDateTime instance. * - * Values in {@code data} equal to {@link io.deephaven.util.QueryConstants#NULL_LONG} are - * treated as null. + * Values in {@code data} equal to {@link io.deephaven.util.QueryConstants#NULL_LONG} are treated as null. * * @param data data * @param plotInfo plot information diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataDouble.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataDouble.java index a1f5416ac9b..7b0170d29dc 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataDouble.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataDouble.java @@ -24,8 +24,7 @@ public class IndexableDataDouble extends IndexableData { * @param data data * @param mapNanToNull if true, Double.NaN values are mapped to null */ - public IndexableDataDouble(IndexableNumericData data, boolean mapNanToNull, - final PlotInfo plotInfo) { + public IndexableDataDouble(IndexableNumericData data, boolean mapNanToNull, final PlotInfo plotInfo) { super(plotInfo); ArgumentValidations.assertNotNull(data, "data", getPlotInfo()); @@ -42,8 +41,7 @@ public IndexableDataDouble(IndexableNumericData data, boolean mapNanToNull, * @param mapNanToNull if true, Double.NaN values are mapped to null * @param type of the data in {@code values} */ - public IndexableDataDouble(T[] values, boolean mapNanToNull, - final PlotInfo plotInfo) { + public IndexableDataDouble(T[] values, boolean mapNanToNull, final PlotInfo plotInfo) { this(new IndexableNumericDataArrayNumber<>(values, plotInfo), mapNanToNull, plotInfo); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataInfinite.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataInfinite.java index 384dc100e71..2408165f91f 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataInfinite.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataInfinite.java @@ -7,16 +7,16 @@ import org.jetbrains.annotations.NotNull; /** - * {@link IndexableData} dataset which ensures no {@link IndexOutOfBoundsException}s are thrown. - * Instead, null values are returned. + * {@link IndexableData} dataset which ensures no {@link IndexOutOfBoundsException}s are thrown. Instead, null values + * are returned. */ public class IndexableDataInfinite extends IndexableData { private static final long serialVersionUID = 492887860330671830L; private final IndexableData data; /** - * Creates an IndexableDataInfinite instance, which wraps {@code data} such that out-of-bounds - * indices return null values. + * Creates an IndexableDataInfinite instance, which wraps {@code data} such that out-of-bounds indices return null + * values. * */ public IndexableDataInfinite(@NotNull IndexableData data) { diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataInteger.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataInteger.java index b351524b514..8b37ff12c88 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataInteger.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataInteger.java @@ -21,8 +21,7 @@ public class IndexableDataInteger extends IndexableData { /** * Creates an IndexableDataInteger instance. * - * Values in {@code data} equal to {@link io.deephaven.util.QueryConstants#NULL_INT} are treated - * as null. + * Values in {@code data} equal to {@link io.deephaven.util.QueryConstants#NULL_INT} are treated as null. * * @param data data * @param plotInfo plot information diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataListNullCategory.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataListNullCategory.java index 4b3087fb43f..934dd025411 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataListNullCategory.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataListNullCategory.java @@ -12,8 +12,7 @@ import static io.deephaven.db.plot.util.NullCategory.INSTANCE; /** - * {@link IndexableData} dataset backed by an array. If the array contains a null value, return a - * NULL_CATEGORY. + * {@link IndexableData} dataset backed by an array. If the array contains a null value, return a NULL_CATEGORY. */ public class IndexableDataListNullCategory extends IndexableData { private static final long serialVersionUID = -3605356450513219514L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataSwappableTable.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataSwappableTable.java index bcdd09a3a18..4a10122de7a 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataSwappableTable.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataSwappableTable.java @@ -9,8 +9,8 @@ import io.deephaven.db.plot.util.tables.SwappableTable; /** - * {@link IndexableData} dataset whose data is a column in a {@link SwappableTable} and whose - * indices are the row numbers of the column. + * {@link IndexableData} dataset whose data is a column in a {@link SwappableTable} and whose indices are the row + * numbers of the column. */ public class IndexableDataSwappableTable extends LiveIndexableData { private static final long serialVersionUID = -7007547039766134485L; @@ -18,24 +18,22 @@ public class IndexableDataSwappableTable extends LiveIndexableData { private final String column; /** - * Creates an IndexableDataSwappableTable instance. The data is a {@code column} in the - * {@code swappableTable}. Indices are the row numbers of the column. + * Creates an IndexableDataSwappableTable instance. The data is a {@code column} in the {@code swappableTable}. + * Indices are the row numbers of the column. * - * @throws io.deephaven.base.verify.RequirementFailure {@code swappableTable} and {@code column} - * must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code swappableTable} and {@code column} must not be null * @throws IllegalArgumentException {@code column} must be a column in {@code swappableTable} * @param swappableTable swappable table * @param column column of {@code swappableTable} holding the data values * @param plotInfo plot information */ public IndexableDataSwappableTable(final SwappableTable swappableTable, final String column, - final PlotInfo plotInfo) { + final PlotInfo plotInfo) { super(plotInfo); this.swappableTable = swappableTable; ArgumentValidations.assertNotNull(swappableTable, "swappableTable", getPlotInfo()); ArgumentValidations.assertNotNull(column, "column", getPlotInfo()); - ArgumentValidations.assertColumnsInTable(swappableTable.getTableDefinition(), plotInfo, - column); + ArgumentValidations.assertColumnsInTable(swappableTable.getTableDefinition(), plotInfo, column); this.column = column; } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataSwappableTableDouble.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataSwappableTableDouble.java index 433368d114c..27b6ed50393 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataSwappableTableDouble.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataSwappableTableDouble.java @@ -10,8 +10,8 @@ import io.deephaven.db.plot.util.tables.SwappableTable; /** - * {@link IndexableData} dataset whose data is a column in a {@link SwappableTable} and whose - * indices are the row numbers of the column. + * {@link IndexableData} dataset whose data is a column in a {@link SwappableTable} and whose indices are the row + * numbers of the column. * * Data values are converted to doubles. */ @@ -20,22 +20,20 @@ public class IndexableDataSwappableTableDouble extends IndexableDataSwappableTab private static final long serialVersionUID = 2719767692871468219L; /** - * Creates an IndexableDataSwappableTable instance. The data is a {@code column} in the - * {@code swappableTable}. Indices are the row numbers of the column. + * Creates an IndexableDataSwappableTable instance. The data is a {@code column} in the {@code swappableTable}. + * Indices are the row numbers of the column. * - * @throws io.deephaven.base.verify.RequirementFailure {@code swappableTable} and {@code column} - * must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code swappableTable} and {@code column} must not be null * @throws IllegalArgumentException {@code column} must be a column in {@code swappableTable} * @throws RuntimeException data in {@code column} must be numeric * @param swappableTable swappable table * @param column column of {@code swappableTable} holding the data values * @param plotInfo plot information */ - public IndexableDataSwappableTableDouble(SwappableTable swappableTable, String column, - final PlotInfo plotInfo) { + public IndexableDataSwappableTableDouble(SwappableTable swappableTable, String column, final PlotInfo plotInfo) { super(swappableTable, column, plotInfo); ArgumentValidations.assertIsNumeric(swappableTable.getTableDefinition(), column, - "Non-numeric column cannot be converted to a double. column=" + column, plotInfo); + "Non-numeric column cannot be converted to a double. column=" + column, plotInfo); } @Override diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataTable.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataTable.java index 191aac98e58..28aecffa216 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataTable.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataTable.java @@ -9,8 +9,7 @@ import io.deephaven.db.plot.util.tables.ColumnHandlerFactory; /** - * {@link IndexableData} dataset whose data is a column in a table and whose indices are the row - * numbers of the column. + * {@link IndexableData} dataset whose data is a column in a table and whose indices are the row numbers of the column. */ public class IndexableDataTable extends LiveIndexableData { private static final long serialVersionUID = 8328713615740315451L; @@ -23,8 +22,7 @@ public class IndexableDataTable extends LiveIndexableData { * @param columnHandler data * @param plotInfo plot information */ - public IndexableDataTable(final ColumnHandlerFactory.ColumnHandler columnHandler, - final PlotInfo plotInfo) { + public IndexableDataTable(final ColumnHandlerFactory.ColumnHandler columnHandler, final PlotInfo plotInfo) { super(plotInfo); ArgumentValidations.assertNotNull(columnHandler, "columnHandler", getPlotInfo()); this.columnHandler = columnHandler; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataWithDefault.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataWithDefault.java index 10a4195e1b4..24f1d4e09af 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataWithDefault.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableDataWithDefault.java @@ -58,15 +58,13 @@ public T getDefaultValue() { /** * Sets this datasets indexed values. * - * If infinite is true, null values are returned for out-of-bounds indices. If not, an exception - * will be thrown. + * If infinite is true, null values are returned for out-of-bounds indices. If not, an exception will be thrown. * * @param specificValues data * @param infinite if this dataset should return nulls for out-of-bounds indices. * @param type of the data in {@code specificValues} */ - public void setSpecific(final IndexableData specificValues, - final boolean infinite) { + public void setSpecific(final IndexableData specificValues, final boolean infinite) { if (infinite) { this.specificValues = new IndexableDataInfinite<>(specificValues); } else { diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayDBDateTime.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayDBDateTime.java index cd067aeb39e..a83d8cc5dce 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayDBDateTime.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayDBDateTime.java @@ -11,8 +11,8 @@ /** * {@link IndexableNumericData} dataset comprised of an array of {@link DBDateTime}s. * - * Date values are accessed as nanoseconds from epoch. Data conversion to double means these values - * are accurate to about 250 nanoseconds. + * Date values are accessed as nanoseconds from epoch. Data conversion to double means these values are accurate to + * about 250 nanoseconds. */ public class IndexableNumericDataArrayDBDateTime extends IndexableNumericData { private static final long serialVersionUID = 2006200987348909028L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayDate.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayDate.java index 26dc1fe38d9..798fd0f79ff 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayDate.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayDate.java @@ -12,8 +12,8 @@ /** * {@link IndexableNumericData} dataset comprised of an array of {@link Date}s. * - * Date values are accessed as nanoseconds from epoch. Data conversion to double means these values - * are accurate to about 250 nanoseconds. + * Date values are accessed as nanoseconds from epoch. Data conversion to double means these values are accurate to + * about 250 nanoseconds. */ public class IndexableNumericDataArrayDate extends IndexableNumericData { private static final long serialVersionUID = 7132588196200176807L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayFloat.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayFloat.java index 51b424b0f8c..04d22e4f2f4 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayFloat.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayFloat.java @@ -10,8 +10,8 @@ import static io.deephaven.util.QueryConstants.NULL_FLOAT; /** - * {@link IndexableNumericData} dataset backed by an array of floats. When accessed, data values are - * converted to doubles before being returned. + * {@link IndexableNumericData} dataset backed by an array of floats. When accessed, data values are converted to + * doubles before being returned. */ public class IndexableNumericDataArrayFloat extends IndexableNumericData { private static final long serialVersionUID = -1243064714255448279L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayInt.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayInt.java index 4b186906711..2cfe7f777cd 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayInt.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayInt.java @@ -11,8 +11,8 @@ import static io.deephaven.util.QueryConstants.NULL_INT; /** - * {@link IndexableNumericData} dataset backed by an array of ints. When accessed, data values are - * converted to doubles before being returned. + * {@link IndexableNumericData} dataset backed by an array of ints. When accessed, data values are converted to doubles + * before being returned. */ public class IndexableNumericDataArrayInt extends IndexableNumericData { private static final long serialVersionUID = -980842236353746501L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayLong.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayLong.java index 83f316c9e58..54cbd633e96 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayLong.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayLong.java @@ -10,8 +10,8 @@ import static io.deephaven.util.QueryConstants.NULL_LONG; /** - * {@link IndexableNumericData} dataset backed by an array of longs. When accessed, data values are - * converted to doubles before being returned. + * {@link IndexableNumericData} dataset backed by an array of longs. When accessed, data values are converted to doubles + * before being returned. */ public class IndexableNumericDataArrayLong extends IndexableNumericData { private static final long serialVersionUID = -1725079204730360920L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayNumber.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayNumber.java index 202b34833dc..0714d88b509 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayNumber.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayNumber.java @@ -8,8 +8,8 @@ import io.deephaven.db.plot.util.ArgumentValidations; /** - * {@link IndexableNumericData} dataset backed by an array of {@link Number}s. When accessed, data - * values are converted to doubles before being returned. + * {@link IndexableNumericData} dataset backed by an array of {@link Number}s. When accessed, data values are converted + * to doubles before being returned. */ public class IndexableNumericDataArrayNumber extends IndexableNumericData { private static final long serialVersionUID = -4587124538812025714L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayShort.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayShort.java index dfc56b937e5..b83656bf220 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayShort.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataArrayShort.java @@ -11,8 +11,8 @@ import static io.deephaven.util.QueryConstants.NULL_SHORT; /** - * {@link IndexableNumericData} dataset backed by an array of shorts. When accessed, data values are - * converted to doubles before being returned. + * {@link IndexableNumericData} dataset backed by an array of shorts. When accessed, data values are converted to + * doubles before being returned. */ public class IndexableNumericDataArrayShort extends IndexableNumericData { private static final long serialVersionUID = 2340751903216609352L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataListNumber.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataListNumber.java index daafd5a73c0..280ffffbb4d 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataListNumber.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataListNumber.java @@ -10,8 +10,8 @@ import java.util.List; /** - * {@link IndexableNumericData} dataset backed by a list of {@link Number}s. When accessed, data - * values are converted to doubles before being returned. + * {@link IndexableNumericData} dataset backed by a list of {@link Number}s. When accessed, data values are converted to + * doubles before being returned. */ public class IndexableNumericDataListNumber extends IndexableNumericData { private static final long serialVersionUID = -382291808039710173L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataSwappableTable.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataSwappableTable.java index eb53d5dd269..6ace0786950 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataSwappableTable.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataSwappableTable.java @@ -9,8 +9,8 @@ import io.deephaven.db.plot.util.tables.SwappableTable; /** - * {@link IndexableData} dataset whose data is a column in a {@link SwappableTable} and whose - * indices are the row numbers of the column. + * {@link IndexableData} dataset whose data is a column in a {@link SwappableTable} and whose indices are the row + * numbers of the column. * * The column must be numeric. */ @@ -20,27 +20,24 @@ public class IndexableNumericDataSwappableTable extends LiveIndexableNumericData private final String column; /** - * Creates an IndexableNumericDataSwappableTable instance. The numeric data is a {@code column} - * in the {@code swappableTable}. Indices are the row numbers of the column. + * Creates an IndexableNumericDataSwappableTable instance. The numeric data is a {@code column} in the + * {@code swappableTable}. Indices are the row numbers of the column. * - * @throws io.deephaven.base.verify.RequirementFailure {@code swappableTable} and {@code column} - * must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code swappableTable} and {@code column} must not be null * @throws IllegalArgumentException {@code column} must be a column in {@code swappableTable} * @throws RuntimeException {@code column} must be numeric * @param swappableTable swappable table * @param column column of {@code swappableTable} holding the data values * @param plotInfo plot information */ - public IndexableNumericDataSwappableTable(final SwappableTable swappableTable, - final String column, final PlotInfo plotInfo) { + public IndexableNumericDataSwappableTable(final SwappableTable swappableTable, final String column, + final PlotInfo plotInfo) { super(plotInfo); ArgumentValidations.assertNotNull(swappableTable, "swappableTable", getPlotInfo()); ArgumentValidations.assertNotNull(column, "column", getPlotInfo()); - ArgumentValidations.assertColumnsInTable(swappableTable.getTableDefinition(), plotInfo, - column); + ArgumentValidations.assertColumnsInTable(swappableTable.getTableDefinition(), plotInfo, column); ArgumentValidations.assertIsNumericOrTime(swappableTable.getTableDefinition(), column, - "Attempting to create a dataseries with a non-numeric column: column=" + column, - plotInfo); + "Attempting to create a dataseries with a non-numeric column: column=" + column, plotInfo); this.swappableTable = swappableTable; this.column = column; } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataTable.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataTable.java index cddecdae87e..caff7b01417 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataTable.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/IndexableNumericDataTable.java @@ -11,8 +11,7 @@ import io.deephaven.db.plot.util.tables.TableHandle; /** - * {@link IndexableData} dataset whose data is a column in a table and whose indices are the row - * numbers of the column. + * {@link IndexableData} dataset whose data is a column in a table and whose indices are the row numbers of the column. * * The column must be numeric. */ @@ -28,8 +27,7 @@ public class IndexableNumericDataTable extends LiveIndexableNumericData { * @param columnHandler data * @param plotInfo plot information */ - public IndexableNumericDataTable(final ColumnHandlerFactory.ColumnHandler columnHandler, - final PlotInfo plotInfo) { + public IndexableNumericDataTable(final ColumnHandlerFactory.ColumnHandler columnHandler, final PlotInfo plotInfo) { super(plotInfo); ArgumentValidations.assertNotNull(columnHandler, "columnHandler", getPlotInfo()); @@ -37,26 +35,24 @@ public IndexableNumericDataTable(final ColumnHandlerFactory.ColumnHandler column if (!this.columnHandler.typeClassification().isNumeric()) { throw new PlotUnsupportedOperationException( - "Attempting to create a dataseries with a non-numeric column: column=" - + columnHandler.getColumnName(), - this); + "Attempting to create a dataseries with a non-numeric column: column=" + + columnHandler.getColumnName(), + this); } } /** - * Creates an IndexableNumericDataTable instance. The numeric data is a {@code column} in the - * table held by {@code tableHandle}. Indices are the row numbers of the column. + * Creates an IndexableNumericDataTable instance. The numeric data is a {@code column} in the table held by + * {@code tableHandle}. Indices are the row numbers of the column. * - * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle} and {@code column} - * must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle} and {@code column} must not be null * @throws IllegalArgumentException {@code column} must be a column in {@code tableHandle} * @throws UnsupportedOperationException {@code column} must be numeric * @param tableHandle holds the table * @param column column of underlying table holding the data values */ - public IndexableNumericDataTable(final TableHandle tableHandle, final String column, - final PlotInfo plotInfo) { + public IndexableNumericDataTable(final TableHandle tableHandle, final String column, final PlotInfo plotInfo) { this(ColumnHandlerFactory.newNumericHandler(tableHandle, column, plotInfo), plotInfo); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/LiveAssociativeData.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/LiveAssociativeData.java index 73e7f8aa0cb..4e2416b1163 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/data/LiveAssociativeData.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/data/LiveAssociativeData.java @@ -9,8 +9,7 @@ * @param class of the values * @param class of the value column in the table. Will be converted to VALUE */ -public abstract class LiveAssociativeData - extends AssociativeData { +public abstract class LiveAssociativeData extends AssociativeData { /** * @param plotInfo plot information */ diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/histogram/DiscretizedRangeEqual.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/histogram/DiscretizedRangeEqual.java index e7a6cc91494..47da4de9a1f 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/histogram/DiscretizedRangeEqual.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/histogram/DiscretizedRangeEqual.java @@ -21,8 +21,8 @@ public class DiscretizedRangeEqual implements DiscretizedRange, Serializable { private final double binWidth; /** - * Creates a DiscretizedRangeEqual instance with specified {@code min} and {@code max} with - * {@code nBins} equally sized bins. + * Creates a DiscretizedRangeEqual instance with specified {@code min} and {@code max} with {@code nBins} equally + * sized bins. * * @param min minimum of the total range * @param max maximum of the total range @@ -51,7 +51,6 @@ public long index(double value) { } final long index = (long) ((value - min) / binWidth); - return value == max ? index - 1 : index; // if value is at max, it would be placed in an - // extra bin + return value == max ? index - 1 : index; // if value is at max, it would be placed in an extra bin } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/histogram/HistogramCalculator.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/histogram/HistogramCalculator.java index 87b99be196a..ab9a760c1ea 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/histogram/HistogramCalculator.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/histogram/HistogramCalculator.java @@ -16,29 +16,25 @@ import static io.deephaven.db.v2.by.ComboAggregateFactory.*; /** - * Utility for calculating histogram plot information: bin locations and data frequencies within - * these bins. + * Utility for calculating histogram plot information: bin locations and data frequencies within these bins. */ public class HistogramCalculator { private static Table clean(final Table t, final String histogramColumn, final PlotInfo plotInfo, - final String... byColumns) { + final String... byColumns) { ArgumentValidations.assertIsNumeric(t, histogramColumn, "Histogram table", plotInfo); final String[] view = new String[byColumns == null ? 1 : byColumns.length + 1]; int i = 0; if (ArgumentValidations.isBoxedNumeric(t, histogramColumn, plotInfo)) { - view[i++] = "X= " + histogramColumn + "==null ? Double.NaN : " + histogramColumn - + ".doubleValue()"; + view[i++] = "X= " + histogramColumn + "==null ? Double.NaN : " + histogramColumn + ".doubleValue()"; } else if (ArgumentValidations.isPrimitiveNumeric(t, histogramColumn, plotInfo)) { - view[i++] = - "X= isNull(" + histogramColumn + ") ? Double.NaN : (double) " + histogramColumn; + view[i++] = "X= isNull(" + histogramColumn + ") ? Double.NaN : (double) " + histogramColumn; } else { - throw new PlotUnsupportedOperationException( - "Unsupported histogram histogramColumn: histogramColumn=" + histogramColumn - + " type=" + ArgumentValidations.getColumnType(t, histogramColumn, plotInfo), - plotInfo); + throw new PlotUnsupportedOperationException("Unsupported histogram histogramColumn: histogramColumn=" + + histogramColumn + " type=" + ArgumentValidations.getColumnType(t, histogramColumn, plotInfo), + plotInfo); } // add byColumns in groupBy columns @@ -47,8 +43,8 @@ private static Table clean(final Table t, final String histogramColumn, final Pl } return t - .view(view) - .where("io.deephaven.libs.primitives.DoubleFpPrimitives.isNormal(X)"); + .view(view) + .where("io.deephaven.libs.primitives.DoubleFpPrimitives.isNormal(X)"); } private static Table counts(final Table data, final Table range, final String... byColumns) { @@ -61,52 +57,46 @@ private static Table counts(final Table data, final Table range, final String... } return data.join(range) - .updateView("RangeIndex = Range.index(X)") - .where("!isNull(RangeIndex)") - .by(AggCombo(AggCount("Count"), AggLast("Range")), groupByColumns) - .updateView("BinMin = Range.binMin(RangeIndex)", "BinMax = Range.binMax(RangeIndex)", - "BinMid=0.5*(BinMin+BinMax)"); + .updateView("RangeIndex = Range.index(X)") + .where("!isNull(RangeIndex)") + .by(AggCombo(AggCount("Count"), AggLast("Range")), groupByColumns) + .updateView("BinMin = Range.binMin(RangeIndex)", "BinMax = Range.binMax(RangeIndex)", + "BinMid=0.5*(BinMin+BinMax)"); } private static Table range(final Table t, final int nbins) { return t.by(AggCombo(AggMin("RangeMin=X"), AggMax("RangeMax=X"), AggCount("NSamples"))) - .update( - "Range = new io.deephaven.db.plot.datasets.histogram.DiscretizedRangeEqual(RangeMin, RangeMax, " - + nbins + ")") - .view("Range"); + .update("Range = new io.deephaven.db.plot.datasets.histogram.DiscretizedRangeEqual(RangeMin, RangeMax, " + + nbins + ")") + .view("Range"); } private static Table range(final double rangeMin, final double rangeMax, final int nbins) { return TableTools.emptyTable(1) - .update("Range = new io.deephaven.db.plot.datasets.histogram.DiscretizedRangeEqual(" - + rangeMin + "," + rangeMax + "," + nbins + ")"); + .update("Range = new io.deephaven.db.plot.datasets.histogram.DiscretizedRangeEqual(" + rangeMin + "," + + rangeMax + "," + nbins + ")"); } /** - * Finds the minimum and maximum of the data in the {@code column} of the {@code table} and - * splits this range into {@code nbins} equally sized bins. Calculates the number of data values - * in each bin. + * Finds the minimum and maximum of the data in the {@code column} of the {@code table} and splits this range into + * {@code nbins} equally sized bins. Calculates the number of data values in each bin. *

    - * Data which is not normal as defined in - * {@link io.deephaven.libs.primitives.DoubleFpPrimitives#isNormal} is filtered out of the data - * set. + * Data which is not normal as defined in {@link io.deephaven.libs.primitives.DoubleFpPrimitives#isNormal} is + * filtered out of the data set. * - * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code column} must not - * be null + * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code column} must not be null * @throws RuntimeException {@code column} must be numeric * @param table table * @param column column in {@code table} * @param nbins number of bins * @param plotInfo plot information - * @param byColumns other columns needed to calaculate histogram, these columns will be included - * while grouping + * @param byColumns other columns needed to calaculate histogram, these columns will be included while grouping * @return table holding the calculated bins and their counts - * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code column} must not - * be null + * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code column} must not be null * @throws RuntimeException {@code column} must be numeric */ - public static Table calc(final Table table, final String column, final int nbins, - final PlotInfo plotInfo, final String... byColumns) { + public static Table calc(final Table table, final String column, final int nbins, final PlotInfo plotInfo, + final String... byColumns) { Require.neqNull(table, "table"); Require.neqNull(column, "column"); @@ -116,40 +106,33 @@ public static Table calc(final Table table, final String column, final int nbins } /** - * Finds the minimum and maximum of the data in the {@code column} of the {@code table} and - * splits this range into {@code nbins} equally sized bins. Calculates the number of data values - * in each bin. + * Finds the minimum and maximum of the data in the {@code column} of the {@code table} and splits this range into + * {@code nbins} equally sized bins. Calculates the number of data values in each bin. *

    - * Data which is not normal as defined in - * {@link io.deephaven.libs.primitives.DoubleFpPrimitives#isNormal} is filtered out of the data - * set. + * Data which is not normal as defined in {@link io.deephaven.libs.primitives.DoubleFpPrimitives#isNormal} is + * filtered out of the data set. * * @param table table * @param column column in {@code table} * @param nbins number of bins * @param plotInfo plot information - * @param byColumns other columns needed to calaculate histogram, these columns will be included - * while grouping + * @param byColumns other columns needed to calaculate histogram, these columns will be included while grouping * @return table holding the calculated bins and their counts - * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code column} must not - * be null + * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code column} must not be null * @throws RuntimeException {@code column} must be numeric */ - public static Table calc(final Table table, final String column, final int nbins, - final PlotInfo plotInfo, final List byColumns) { + public static Table calc(final Table table, final String column, final int nbins, final PlotInfo plotInfo, + final List byColumns) { return calc(table, column, nbins, plotInfo, - byColumns != null && !byColumns.isEmpty() - ? byColumns.toArray(new String[byColumns.size()]) - : null); + byColumns != null && !byColumns.isEmpty() ? byColumns.toArray(new String[byColumns.size()]) : null); } /** - * Splits the specified range into {@code nbins} equally sized bins. Calculates the number of - * data values in each bin. + * Splits the specified range into {@code nbins} equally sized bins. Calculates the number of data values in each + * bin. *

    - * Data which is not normal as defined in - * {@link io.deephaven.libs.primitives.DoubleFpPrimitives#isNormal} is filtered out of the data - * set. + * Data which is not normal as defined in {@link io.deephaven.libs.primitives.DoubleFpPrimitives#isNormal} is + * filtered out of the data set. * * @param table table * @param histogramColumn histogramColumn in {@code table} @@ -157,16 +140,13 @@ public static Table calc(final Table table, final String column, final int nbins * @param rangeMax range maximum * @param nbins number of bins * @param plotInfo plot information - * @param byColumns other columns needed to calaculate histogram, these columns will be included - * while grouping + * @param byColumns other columns needed to calaculate histogram, these columns will be included while grouping * @return table holding the calculated bins and their counts - * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code histogramColumn} - * must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code histogramColumn} must not be null * @throws RuntimeException {@code histogramColumn} must be numeric */ public static Table calc(final Table table, final String histogramColumn, final double rangeMin, - final double rangeMax, final int nbins, final PlotInfo plotInfo, - final String... byColumns) { + final double rangeMax, final int nbins, final PlotInfo plotInfo, final String... byColumns) { ArgumentValidations.assertNotNull(table, "table", plotInfo); ArgumentValidations.assertNotNull(histogramColumn, "histogramColumn", plotInfo); @@ -176,12 +156,11 @@ public static Table calc(final Table table, final String histogramColumn, final } /** - * Splits the specified range into {@code nbins} equally sized bins. Calculates the number of - * data values in each bin. + * Splits the specified range into {@code nbins} equally sized bins. Calculates the number of data values in each + * bin. *

    - * Data which is not normal as defined in - * {@link io.deephaven.libs.primitives.DoubleFpPrimitives#isNormal} is filtered out of the data - * set. + * Data which is not normal as defined in {@link io.deephaven.libs.primitives.DoubleFpPrimitives#isNormal} is + * filtered out of the data set. * * @param table table * @param histogramColumn histogramColumn in {@code table} @@ -189,20 +168,15 @@ public static Table calc(final Table table, final String histogramColumn, final * @param rangeMax range maximum * @param nbins number of bins * @param plotInfo plot information - * @param byColumns other columns needed to calaculate histogram, these columns will be included - * while grouping + * @param byColumns other columns needed to calaculate histogram, these columns will be included while grouping * @return table holding the calculated bins and their counts - * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code histogramColumn} - * must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code histogramColumn} must not be null * @throws RuntimeException {@code histogramColumn} must be numeric */ public static Table calc(final Table table, final String histogramColumn, final double rangeMin, - final double rangeMax, final int nbins, final PlotInfo plotInfo, - final List byColumns) { + final double rangeMax, final int nbins, final PlotInfo plotInfo, final List byColumns) { return calc(table, histogramColumn, rangeMin, rangeMax, nbins, plotInfo, - byColumns != null && !byColumns.isEmpty() - ? byColumns.toArray(new String[byColumns.size()]) - : null); + byColumns != null && !byColumns.isEmpty() ? byColumns.toArray(new String[byColumns.size()]) : null); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/interval/IntervalXYDataSeriesArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/interval/IntervalXYDataSeriesArray.java index 5635861be55..79e7c19d33a 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/interval/IntervalXYDataSeriesArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/interval/IntervalXYDataSeriesArray.java @@ -19,7 +19,7 @@ * An {@link XYDataSeriesArray} suitable for bar charts. */ public class IntervalXYDataSeriesArray extends XYDataSeriesArray - implements IntervalXYDataSeriesInternal, TableSnapshotSeries { + implements IntervalXYDataSeriesInternal, TableSnapshotSeries { private static final long serialVersionUID = 5911383536377254715L; @@ -37,38 +37,30 @@ public class IntervalXYDataSeriesArray extends XYDataSeriesArray private final SwappableTable swappableTable; public IntervalXYDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - @NotNull final TableHandle tableHandle, - final IndexableNumericData startX, final IndexableNumericData midX, - final IndexableNumericData endX, - final IndexableNumericData startY, final IndexableNumericData midY, - final IndexableNumericData endY) { + @NotNull final TableHandle tableHandle, + final IndexableNumericData startX, final IndexableNumericData midX, final IndexableNumericData endX, + final IndexableNumericData startY, final IndexableNumericData midY, final IndexableNumericData endY) { this(axes, id, name, tableHandle, null, startX, midX, endX, startY, midY, endY); } public IntervalXYDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - @NotNull final SwappableTable swappableTable, - final IndexableNumericData startX, final IndexableNumericData midX, - final IndexableNumericData endX, - final IndexableNumericData startY, final IndexableNumericData midY, - final IndexableNumericData endY) { + @NotNull final SwappableTable swappableTable, + final IndexableNumericData startX, final IndexableNumericData midX, final IndexableNumericData endX, + final IndexableNumericData startY, final IndexableNumericData midY, final IndexableNumericData endY) { this(axes, id, name, null, swappableTable, startX, midX, endX, startY, midY, endY); } private IntervalXYDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - final TableHandle tableHandle, final SwappableTable swappableTable, - final IndexableNumericData startX, final IndexableNumericData midX, - final IndexableNumericData endX, - final IndexableNumericData startY, final IndexableNumericData midY, - final IndexableNumericData endY) { - this(axes, id, name, tableHandle, swappableTable, startX, midX, endX, startY, midY, endY, - null); + final TableHandle tableHandle, final SwappableTable swappableTable, + final IndexableNumericData startX, final IndexableNumericData midX, final IndexableNumericData endX, + final IndexableNumericData startY, final IndexableNumericData midY, final IndexableNumericData endY) { + this(axes, id, name, tableHandle, swappableTable, startX, midX, endX, startY, midY, endY, null); } /** * Creates an instance of IntervalXYDataSeriesArray with the specified data points. *

    - * {@code startX}, {@code midX}, and {@code endX} at each index define the location of a bar in - * the chart. + * {@code startX}, {@code midX}, and {@code endX} at each index define the location of a bar in the chart. * * @param axes axes displaying the plot * @param id data series id @@ -79,18 +71,16 @@ private IntervalXYDataSeriesArray(final AxesImpl axes, final int id, final Compa * @param startY lowest y-coordinate of the bar at the given index * @param midY middle y-coordinate of the bar at the given index * @param endY highest y-coordinate of the bar at the given index - * @throws io.deephaven.base.verify.RequirementFailure {@code startX}, {@code midX}, - * {@code endX}, {@code startY}, {@code midY}, and {@code endY} must not be null - * @throws IllegalArgumentException {@code startX}, {@code midX}, {@code endX}, {@code startY}, - * {@code midY}, and {@code endY} must be the same size + * @throws io.deephaven.base.verify.RequirementFailure {@code startX}, {@code midX}, {@code endX}, {@code startY}, + * {@code midY}, and {@code endY} must not be null + * @throws IllegalArgumentException {@code startX}, {@code midX}, {@code endX}, {@code startY}, {@code midY}, and + * {@code endY} must be the same size */ public IntervalXYDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - final TableHandle tableHandle, final SwappableTable swappableTable, - final IndexableNumericData startX, final IndexableNumericData midX, - final IndexableNumericData endX, - final IndexableNumericData startY, final IndexableNumericData midY, - final IndexableNumericData endY, - final XYDataSeriesArray series) { + final TableHandle tableHandle, final SwappableTable swappableTable, + final IndexableNumericData startX, final IndexableNumericData midX, final IndexableNumericData endX, + final IndexableNumericData startY, final IndexableNumericData midY, final IndexableNumericData endY, + final XYDataSeriesArray series) { super(axes, id, name, midX, midY, series); ArgumentValidations.assertNotNull(startX, "startX", getPlotInfo()); @@ -101,8 +91,7 @@ public IntervalXYDataSeriesArray(final AxesImpl axes, final int id, final Compar ArgumentValidations.assertNotNull(endY, "endY", getPlotInfo()); if (tableHandle == null && swappableTable == null) { - throw new PlotIllegalArgumentException( - "One of tableHandle or swappableTable must be non null!", this); + throw new PlotIllegalArgumentException("One of tableHandle or swappableTable must be non null!", this); } this.tableHandle = tableHandle; @@ -114,9 +103,8 @@ public IntervalXYDataSeriesArray(final AxesImpl axes, final int id, final Compar this.midY = midY; this.endY = endY; - ArgumentValidations.assertSameSize( - new IndexableNumericData[] {startX, midX, endX, startY, midY, endY}, - new String[] {"startX", "midX", "endX", "startY", "midY", "endY"}, getPlotInfo()); + ArgumentValidations.assertSameSize(new IndexableNumericData[] {startX, midX, endX, startY, midY, endY}, + new String[] {"startX", "midX", "endX", "startY", "midY", "endY"}, getPlotInfo()); } /** diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesArray.java index 6daae3ffc4c..49a0de38fc0 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesArray.java @@ -22,11 +22,10 @@ public class OHLCDataSeriesArray extends AbstractXYDataSeries implements OHLCDat private final IndexableNumericData close; /** - * Creates an OHLCDataSeriesArray instance. This dataset is suited for open-high-low-close - * charts. + * Creates an OHLCDataSeriesArray instance. This dataset is suited for open-high-low-close charts. * - * @throws io.deephaven.base.verify.RequirementFailure {@code time}, {@code open}, {@code high}, - * {@code low}, {@code close} must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code time}, {@code open}, {@code high}, {@code low}, + * {@code close} must not be null * @param axes axes on which the dataset will be plotted * @param id data series id * @param name series name @@ -37,16 +36,14 @@ public class OHLCDataSeriesArray extends AbstractXYDataSeries implements OHLCDat * @param close close data */ public OHLCDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - final IndexableNumericData time, final IndexableNumericData open, - final IndexableNumericData high, final IndexableNumericData low, - final IndexableNumericData close) { + final IndexableNumericData time, final IndexableNumericData open, final IndexableNumericData high, + final IndexableNumericData low, final IndexableNumericData close) { this(axes, id, name, time, open, high, low, close, null); } public OHLCDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - final IndexableNumericData time, final IndexableNumericData open, - final IndexableNumericData high, final IndexableNumericData low, - final IndexableNumericData close, final AbstractXYDataSeries series) { + final IndexableNumericData time, final IndexableNumericData open, final IndexableNumericData high, + final IndexableNumericData low, final IndexableNumericData close, final AbstractXYDataSeries series) { super(axes, id, name, series); ArgumentValidations.assertNotNull(time, "Time", getPlotInfo()); ArgumentValidations.assertNotNull(open, "Open", getPlotInfo()); diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesSwappableTableArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesSwappableTableArray.java index 431a2673ae1..9215cd14253 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesSwappableTableArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesSwappableTableArray.java @@ -14,8 +14,7 @@ import org.jetbrains.annotations.NotNull; -public class OHLCDataSeriesSwappableTableArray extends OHLCDataSeriesArray - implements TableSnapshotSeries { +public class OHLCDataSeriesSwappableTableArray extends OHLCDataSeriesArray implements TableSnapshotSeries { private final SwappableTable swappableTable; private final String timeCol; @@ -25,20 +24,14 @@ public class OHLCDataSeriesSwappableTableArray extends OHLCDataSeriesArray private final String closeCol; private Table localTable; - public OHLCDataSeriesSwappableTableArray(final AxesImpl axes, final int id, - final Comparable name, @NotNull final SwappableTable swappableTable, final String timeCol, - final String openCol, final String highCol, final String lowCol, final String closeCol) { - super(axes, id, name, - new IndexableNumericDataSwappableTable(swappableTable, timeCol, - new PlotInfo(axes, name)), - new IndexableNumericDataSwappableTable(swappableTable, openCol, - new PlotInfo(axes, name)), - new IndexableNumericDataSwappableTable(swappableTable, highCol, - new PlotInfo(axes, name)), - new IndexableNumericDataSwappableTable(swappableTable, lowCol, - new PlotInfo(axes, name)), - new IndexableNumericDataSwappableTable(swappableTable, closeCol, - new PlotInfo(axes, name))); + public OHLCDataSeriesSwappableTableArray(final AxesImpl axes, final int id, final Comparable name, + @NotNull final SwappableTable swappableTable, final String timeCol, final String openCol, + final String highCol, final String lowCol, final String closeCol) { + super(axes, id, name, new IndexableNumericDataSwappableTable(swappableTable, timeCol, new PlotInfo(axes, name)), + new IndexableNumericDataSwappableTable(swappableTable, openCol, new PlotInfo(axes, name)), + new IndexableNumericDataSwappableTable(swappableTable, highCol, new PlotInfo(axes, name)), + new IndexableNumericDataSwappableTable(swappableTable, lowCol, new PlotInfo(axes, name)), + new IndexableNumericDataSwappableTable(swappableTable, closeCol, new PlotInfo(axes, name))); ArgumentValidations.assertNotNull(timeCol, "timeCol", getPlotInfo()); ArgumentValidations.assertNotNull(openCol, "openCol", getPlotInfo()); @@ -54,8 +47,7 @@ public OHLCDataSeriesSwappableTableArray(final AxesImpl axes, final int id, this.closeCol = closeCol; } - private OHLCDataSeriesSwappableTableArray(final OHLCDataSeriesSwappableTableArray series, - final AxesImpl axes) { + private OHLCDataSeriesSwappableTableArray(final OHLCDataSeriesSwappableTableArray series, final AxesImpl axes) { super(series, axes); this.swappableTable = series.swappableTable; this.timeCol = series.timeCol; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesTableArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesTableArray.java index 6519c468e1c..a7cb00b5b4b 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesTableArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/ohlc/OHLCDataSeriesTableArray.java @@ -23,13 +23,13 @@ public class OHLCDataSeriesTableArray extends OHLCDataSeriesArray implements Tab private final String closeCol; public OHLCDataSeriesTableArray(final AxesImpl axes, final int id, final Comparable name, - @NotNull final TableHandle th, final String timeCol, final String openCol, - final String highCol, final String lowCol, final String closeCol) { + @NotNull final TableHandle th, final String timeCol, final String openCol, final String highCol, + final String lowCol, final String closeCol) { super(axes, id, name, new IndexableNumericDataTable(th, timeCol, new PlotInfo(axes, name)), - new IndexableNumericDataTable(th, openCol, new PlotInfo(axes, name)), - new IndexableNumericDataTable(th, highCol, new PlotInfo(axes, name)), - new IndexableNumericDataTable(th, lowCol, new PlotInfo(axes, name)), - new IndexableNumericDataTable(th, closeCol, new PlotInfo(axes, name))); + new IndexableNumericDataTable(th, openCol, new PlotInfo(axes, name)), + new IndexableNumericDataTable(th, highCol, new PlotInfo(axes, name)), + new IndexableNumericDataTable(th, lowCol, new PlotInfo(axes, name)), + new IndexableNumericDataTable(th, closeCol, new PlotInfo(axes, name))); ArgumentValidations.assertNotNull(timeCol, "timeCol", getPlotInfo()); ArgumentValidations.assertNotNull(openCol, "openCol", getPlotInfo()); diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/AbstractXYDataSeries.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/AbstractXYDataSeries.java index 2312441a541..3561d707c51 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/AbstractXYDataSeries.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/AbstractXYDataSeries.java @@ -39,8 +39,7 @@ /** * Base class for the {@link XYDataSeriesInternal}. */ -public abstract class AbstractXYDataSeries extends AbstractDataSeries - implements XYDataSeriesInternal { +public abstract class AbstractXYDataSeries extends AbstractDataSeries implements XYDataSeriesInternal { private static final long serialVersionUID = 5353144043894861970L; private final IndexableDataWithDefault shapeLabels; @@ -56,7 +55,7 @@ public abstract class AbstractXYDataSeries extends AbstractDataSeries * @param name series name */ public AbstractXYDataSeries(final AxesImpl axes, final int id, final Comparable name, - final AbstractXYDataSeries series) { + final AbstractXYDataSeries series) { super(axes, id, name, series); shapeLabels = new IndexableDataWithDefault<>(getPlotInfo()); shapeSizes = new IndexableDataWithDefault<>(getPlotInfo()); @@ -282,7 +281,7 @@ public AbstractXYDataSeries pointSize(final Table t, final String columnName) { final TableHandle tableHandle = new TableHandle(t, columnName); addTableHandle(tableHandle); final ColumnHandlerFactory.ColumnHandler columnHandler = - ColumnHandlerFactory.newNumericHandler(tableHandle, columnName, getPlotInfo()); + ColumnHandlerFactory.newNumericHandler(tableHandle, columnName, getPlotInfo()); if (columnHandler.typeClassification().isNumeric()) { shapeSizes.setSpecific(new IndexableDataTable(columnHandler, getPlotInfo()) { @@ -292,8 +291,8 @@ public Double convert(Object v) { } }, true); } else { - throw new PlotUnsupportedOperationException( - "Column can not be converted into a size: column=" + columnName, this); + throw new PlotUnsupportedOperationException("Column can not be converted into a size: column=" + columnName, + this); } return this; @@ -308,8 +307,7 @@ public AbstractXYDataSeries pointSize(final SelectableDataSet sds, final String final SwappableTable t = sds.getSwappableTable(name(), chart(), columnName); addSwappableTable(t); - shapeSizes.setSpecific(new IndexableDataSwappableTableDouble(t, columnName, getPlotInfo()), - true); + shapeSizes.setSpecific(new IndexableDataSwappableTableDouble(t, columnName, getPlotInfo()), true); return this; } @@ -373,19 +371,17 @@ public AbstractXYDataSeries pointColor(final Table t, final String columnName) { final TableHandle tableHandle = new TableHandle(t, columnName); addTableHandle(tableHandle); final ColumnHandlerFactory.ColumnHandler columnHandler = - ColumnHandlerFactory.newNumericHandler(tableHandle, columnName, getPlotInfo()); + ColumnHandlerFactory.newNumericHandler(tableHandle, columnName, getPlotInfo()); if (columnHandler.typeClassification() == ColumnHandlerFactory.TypeClassification.INTEGER && - (columnHandler.type() == int.class || columnHandler.type() == Integer.class)) { + (columnHandler.type() == int.class || columnHandler.type() == Integer.class)) { return pointColor(new IndexableDataTablePaint(columnHandler, chart())); - } else if (columnHandler - .typeClassification() == ColumnHandlerFactory.TypeClassification.PAINT) { + } else if (columnHandler.typeClassification() == ColumnHandlerFactory.TypeClassification.PAINT) { return pointColor(new IndexableDataTable<>(columnHandler, getPlotInfo())); } else { throw new PlotUnsupportedOperationException( - "Column can not be converted into a color: column=" + columnName + "\ttype=" - + columnHandler.type(), - this); + "Column can not be converted into a color: column=" + columnName + "\ttype=" + columnHandler.type(), + this); } } @@ -393,14 +389,13 @@ public AbstractXYDataSeries pointColor(final Table t, final String columnName) { public AbstractXYDataSeries pointColor(final SelectableDataSet sds, final String columnName) { ArgumentValidations.assertColumnsInTable(sds, getPlotInfo(), columnName); final Class type = sds.getTableDefinition().getColumn(columnName).getDataType(); - final boolean isInt = type.equals(int.class) || type.equals(Integer.class) - || type.equals(short.class) || type.equals(Short.class); + final boolean isInt = type.equals(int.class) || type.equals(Integer.class) || type.equals(short.class) + || type.equals(Short.class); final boolean isPaint = Paint.class.isAssignableFrom(type); if (!isInt && !isPaint) { throw new PlotUnsupportedOperationException( - "Column can not be converted into a color: column=" + columnName + "\ttype=" + type, - this); + "Column can not be converted into a color: column=" + columnName + "\ttype=" + type, this); } final SwappableTable t = sds.getSwappableTable(name(), chart(), columnName); @@ -458,14 +453,13 @@ public AbstractXYDataSeries pointLabel(@NotNull Table t, @NotNull String columnN final TableHandle tableHandle = new TableHandle(t, columnName); addTableHandle(tableHandle); final ColumnHandlerFactory.ColumnHandler columnHandler = - ColumnHandlerFactory.newObjectHandler(tableHandle, columnName, getPlotInfo()); + ColumnHandlerFactory.newObjectHandler(tableHandle, columnName, getPlotInfo()); return pointLabel(new IndexableDataTableString(columnHandler, getPlotInfo())); } @Override - public AbstractXYDataSeries pointLabel(@NotNull SelectableDataSet sds, - @NotNull String columnName) { + public AbstractXYDataSeries pointLabel(@NotNull SelectableDataSet sds, @NotNull String columnName) { ArgumentValidations.assertColumnsInTable(sds, getPlotInfo(), columnName); final SwappableTable t = sds.getSwappableTable(name(), chart(), columnName); @@ -505,10 +499,8 @@ public XYDataSeries pointShape(final String... shapes) { try { NamedShape.getShape(shape); } catch (final IllegalArgumentException iae) { - throw new PlotIllegalArgumentException( - "Not a valid shape: `" + shape + "` at index: " + index + "; valid shapes: " - + Arrays.toString(NamedShape.values()), - this); + throw new PlotIllegalArgumentException("Not a valid shape: `" + shape + "` at index: " + index + + "; valid shapes: " + Arrays.toString(NamedShape.values()), this); } ++index; } @@ -519,9 +511,8 @@ public XYDataSeries pointShape(final String... shapes) { @Override public XYDataSeries pointShape(final Shape... shapes) { ArgumentValidations.assertNotNull(shapes, "shapes", getPlotInfo()); - pointShapes.setSpecific( - new IndexableDataPointShapeObject(new IndexableDataArray<>(shapes, getPlotInfo())), - true); + pointShapes.setSpecific(new IndexableDataPointShapeObject(new IndexableDataArray<>(shapes, getPlotInfo())), + true); return this; } @@ -535,19 +526,18 @@ public XYDataSeries pointShape(final Table t, final String columnName) { final TableHandle tableHandle = new TableHandle(t, columnName); addTableHandle(tableHandle); final ColumnHandlerFactory.ColumnHandler columnHandler = - ColumnHandlerFactory.newObjectHandler(tableHandle, columnName, getPlotInfo()); + ColumnHandlerFactory.newObjectHandler(tableHandle, columnName, getPlotInfo()); return pointShape(new IndexableDataTableString<>(columnHandler, getPlotInfo())); } else if (Shape.class.isAssignableFrom(columnType)) { final TableHandle tableHandle = new TableHandle(t, columnName); addTableHandle(tableHandle); final ColumnHandlerFactory.ColumnHandler columnHandler = - ColumnHandlerFactory.newObjectHandler(tableHandle, columnName, getPlotInfo()); - pointShapes.setSpecific( - new IndexableDataTablePointShapeObject(columnHandler, getPlotInfo()), true); + ColumnHandlerFactory.newObjectHandler(tableHandle, columnName, getPlotInfo()); + pointShapes.setSpecific(new IndexableDataTablePointShapeObject(columnHandler, getPlotInfo()), true); return this; } else { - throw new PlotRuntimeException( - "column is not a supported type (String or Shape): columnName=" + columnName, this); + throw new PlotRuntimeException("column is not a supported type (String or Shape): columnName=" + columnName, + this); } } @@ -560,18 +550,16 @@ public XYDataSeries pointShape(final SelectableDataSet sds, final String columnN if (String.class.isAssignableFrom(columnType)) { final SwappableTable t = sds.getSwappableTable(name(), chart(), columnName); addSwappableTable(t); - return pointShape( - new IndexableDataSwappableTableString<>(t, columnName, getPlotInfo())); + return pointShape(new IndexableDataSwappableTableString<>(t, columnName, getPlotInfo())); } else if (Shape.class.isAssignableFrom(columnType)) { final SwappableTable t = sds.getSwappableTable(name(), chart(), columnName); addSwappableTable(t); - pointShapes.setSpecific( - new IndexableDataSwappableTablePointShapeObject(t, columnName, getPlotInfo()), - true); + pointShapes.setSpecific(new IndexableDataSwappableTablePointShapeObject(t, columnName, getPlotInfo()), + true); return this; } else { - throw new PlotRuntimeException( - "column is not a supported type (String or Shape): columnName=" + columnName, this); + throw new PlotRuntimeException("column is not a supported type (String or Shape): columnName=" + columnName, + this); } } @@ -593,8 +581,7 @@ public AbstractXYDataSeries yToolTipPattern(final String format) { return this; } - private static class IndexableDataString extends IndexableData - implements Serializable { + private static class IndexableDataString extends IndexableData implements Serializable { private static final long serialVersionUID = 4764967316583190069L; private IndexableData labels; @@ -616,14 +603,12 @@ public String get(int i) { } } - private static class IndexableDataTableString extends IndexableDataTable - implements Serializable { + private static class IndexableDataTableString extends IndexableDataTable implements Serializable { private static final long serialVersionUID = 5039901915605865720L; private IndexableData labels; - IndexableDataTableString(ColumnHandlerFactory.ColumnHandler columnHandler, - final PlotInfo plotInfo) { + IndexableDataTableString(ColumnHandlerFactory.ColumnHandler columnHandler, final PlotInfo plotInfo) { super(columnHandler, plotInfo); this.labels = labels; } @@ -634,13 +619,11 @@ public String convert(Object v) { } } - private static class IndexableDataTablePointShapeObject extends IndexableDataTable - implements Serializable { + private static class IndexableDataTablePointShapeObject extends IndexableDataTable implements Serializable { private static final long serialVersionUID = -3933148551484191243L; - IndexableDataTablePointShapeObject(ColumnHandlerFactory.ColumnHandler columnHandler, - final PlotInfo plotInfo) { + IndexableDataTablePointShapeObject(ColumnHandlerFactory.ColumnHandler columnHandler, final PlotInfo plotInfo) { super(columnHandler, plotInfo); } @@ -650,13 +633,13 @@ public Shape convert(Object v) { } } - private static class IndexableDataSwappableTablePointShapeObject - extends IndexableDataSwappableTable implements Serializable { + private static class IndexableDataSwappableTablePointShapeObject extends IndexableDataSwappableTable + implements Serializable { private static final long serialVersionUID = 5642048699432187943L; - IndexableDataSwappableTablePointShapeObject(final SwappableTable swappableTable, - final String column, PlotInfo plotInfo) { + IndexableDataSwappableTablePointShapeObject(final SwappableTable swappableTable, final String column, + PlotInfo plotInfo) { super(swappableTable, column, plotInfo); } @@ -666,13 +649,12 @@ public Shape convert(Object v) { } } - private static class IndexableDataSwappableTableString - extends IndexableDataSwappableTable implements Serializable { + private static class IndexableDataSwappableTableString extends IndexableDataSwappableTable + implements Serializable { private static final long serialVersionUID = 5039901915605865720L; - IndexableDataSwappableTableString(final SwappableTable t, final String column, - final PlotInfo plotInfo) { + IndexableDataSwappableTableString(final SwappableTable t, final String column, final PlotInfo plotInfo) { super(t, column, plotInfo); } @@ -683,12 +665,11 @@ public String convert(Object v) { } private static class IndexableDataSwappableTablePaint extends IndexableDataSwappableTable - implements Serializable { + implements Serializable { private static final long serialVersionUID = 1809364632850960872L; private final ChartImpl chart; - IndexableDataSwappableTablePaint(final SwappableTable t, final String column, - final ChartImpl chart) { + IndexableDataSwappableTablePaint(final SwappableTable t, final String column, final ChartImpl chart) { super(t, column, chart.getPlotInfo()); this.chart = chart; } @@ -699,8 +680,7 @@ public Paint convert(final Object v) { } } - private static class IndexableDataPaintInteger extends IndexableData - implements Serializable { + private static class IndexableDataPaintInteger extends IndexableData implements Serializable { private static final long serialVersionUID = 6258408120262796309L; private final IndexableData colors; private final ChartImpl chart; @@ -722,8 +702,7 @@ public Paint get(int i) { } } - private static class IndexableDataTablePaint extends IndexableDataTable - implements Serializable { + private static class IndexableDataTablePaint extends IndexableDataTable implements Serializable { private static final long serialVersionUID = 1809364632850960872L; private final ChartImpl chart; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeries.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeries.java index fe316f4a16b..96c30980c3f 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeries.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeries.java @@ -12,8 +12,7 @@ import io.deephaven.gui.shape.Shape; /** - * {@link DataSeries} with two numerical components, x and y. Data points are numbered and are - * accessed with an index. + * {@link DataSeries} with two numerical components, x and y. Data points are numbered and are accessed with an index. */ public interface XYDataSeries extends DataSeries { @@ -25,8 +24,8 @@ public interface XYDataSeries extends DataSeries { /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param factors factors to multiply the default size (1) by * @return this XYDataSeries @@ -34,8 +33,8 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointSize(IndexableData factors); /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param factors factors to multiply the default size (1) by * @return this XYDataSeries @@ -43,8 +42,8 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointSize(int... factors); /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param factors factors to multiply the default size (1) by * @return this XYDataSeries @@ -52,8 +51,8 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointSize(long... factors); /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param factors factors to multiply the default size (1) by * @return this XYDataSeries @@ -61,8 +60,8 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointSize(double... factors); /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param factors factors to multiply the default size (1) by * @param data type of the {@code factors} @@ -71,24 +70,24 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointSize(T[] factors); /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * * @param t table containing factors to multiply the default size (1) by - * @param columnName column in {@code t} containing size scaling factors. The size data for - * point i comes from row i. + * @param columnName column in {@code t} containing size scaling factors. The size data for point i comes from row + * i. * @return this XYDataSeries */ XYDataSeries pointSize(Table t, String columnName); /** - * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the - * default size. Unspecified points use the default size. + * Sets the point size. A scale factor of 1 is the default size. A scale factor of 2 is 2x the default size. + * Unspecified points use the default size. * - * @param sds selectable data set (e.g. OneClick filterable table) containing factors to - * multiply the default size (1) by - * @param columnName column in {@code sds} containing size scaling factors. The size data for - * point i comes from row i. + * @param sds selectable data set (e.g. OneClick filterable table) containing factors to multiply the default size + * (1) by + * @param columnName column in {@code sds} containing size scaling factors. The size data for point i comes from row + * i. * @return this XYDataSeries */ XYDataSeries pointSize(SelectableDataSet sds, String columnName); @@ -125,8 +124,8 @@ public interface XYDataSeries extends DataSeries { /** * Sets the point color. Unspecified points use the default color. * - * @param colors color palette indices. The color for data point i comes from index i. A value - * of 3 corresponds to the 3rd color from the color pallette. + * @param colors color palette indices. The color for data point i comes from index i. A value of 3 corresponds to + * the 3rd color from the color pallette. * @return this XYDataSeries */ XYDataSeries pointColor(int... colors); @@ -134,8 +133,8 @@ public interface XYDataSeries extends DataSeries { /** * Sets the point color. Unspecified points use the default color. * - * @param colors color palette indices. The color for data point i comes from index i. A value - * of 3 corresponds to the 3rd color from the color pallette. + * @param colors color palette indices. The color for data point i comes from index i. A value of 3 corresponds to + * the 3rd color from the color pallette. * @return this XYDataSeries */ XYDataSeries pointColor(Integer... colors); @@ -152,8 +151,7 @@ public interface XYDataSeries extends DataSeries { * Sets the point color. Unspecified points use the default color. * * @param t table containing colors - * @param columnName column in {@code t} containing colors. The color data for point i comes - * from row i. + * @param columnName column in {@code t} containing colors. The color data for point i comes from row i. * @return this XYDataSeries */ XYDataSeries pointColor(Table t, String columnName); @@ -162,8 +160,7 @@ public interface XYDataSeries extends DataSeries { * Sets the point color. Unspecified points use the default color. * * @param sds selectable data set (e.g. OneClick filterable table) containing colors - * @param columnName column in {@code sds} containing colors. The color data for point i comes - * from row i. + * @param columnName column in {@code sds} containing colors. The color data for point i comes from row i. * @return this XYDataSeries */ XYDataSeries pointColor(SelectableDataSet sds, String columnName); @@ -173,8 +170,8 @@ public interface XYDataSeries extends DataSeries { /** - * Sets the point label for data point i from index i of the input labels. Points outside of - * these indices are unlabeled. + * Sets the point label for data point i from index i of the input labels. Points outside of these indices are + * unlabeled. * * @param labels labels * @return this XYDataSeries @@ -182,8 +179,8 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointLabel(IndexableData labels); /** - * Sets the point label for data point i from index i of the input labels. Points outside of - * these indices are unlabeled. + * Sets the point label for data point i from index i of the input labels. Points outside of these indices are + * unlabeled. * * @param labels labels * @return this XYDataSeries @@ -191,23 +188,21 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointLabel(Object... labels); /** - * Sets the point label for data point i from index i of the input labels. Points outside of - * these indices are unlabeled. + * Sets the point label for data point i from index i of the input labels. Points outside of these indices are + * unlabeled. * * @param t table containing labels - * @param columnName column in {@code t} containing labels. The label data for point i comes - * from row i. + * @param columnName column in {@code t} containing labels. The label data for point i comes from row i. * @return this XYDataSeries */ XYDataSeries pointLabel(Table t, String columnName); /** - * Sets the point label for data point i from index i of the input labels. Points outside of - * these indices are unlabeled. + * Sets the point label for data point i from index i of the input labels. Points outside of these indices are + * unlabeled. * * @param sds selectable data set (e.g. OneClick filterable table) containing labels - * @param columnName column in {@code sds} containing labels. The color data for point i comes - * from row i. + * @param columnName column in {@code sds} containing labels. The color data for point i comes from row i. * @return this XYDataSeries */ XYDataSeries pointLabel(SelectableDataSet sds, String columnName); @@ -218,8 +213,8 @@ public interface XYDataSeries extends DataSeries { /** - * Sets the point shapes for data point i from index i of the input labels. Points outside of - * these indices use default shapes. + * Sets the point shapes for data point i from index i of the input labels. Points outside of these indices use + * default shapes. * * @param shapes shapes * @return this XYDataSeries @@ -227,8 +222,8 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointShape(IndexableData shapes); /** - * Sets the point shapes for data point i from index i of the input labels. Points outside of - * these indices use default shapes. + * Sets the point shapes for data point i from index i of the input labels. Points outside of these indices use + * default shapes. * * @param shapes shapes * @return this XYDataSeries @@ -236,8 +231,8 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointShape(String... shapes); /** - * Sets the point shapes for data point i from index i of the input labels. Points outside of - * these indices use default shapes. + * Sets the point shapes for data point i from index i of the input labels. Points outside of these indices use + * default shapes. * * @param shapes shapes * @return this XYDataSeries @@ -245,23 +240,21 @@ public interface XYDataSeries extends DataSeries { XYDataSeries pointShape(Shape... shapes); /** - * Sets the point shapes for data point i from index i of the input labels. Points outside of - * these indices use default shapes. + * Sets the point shapes for data point i from index i of the input labels. Points outside of these indices use + * default shapes. * * @param t table containing shapes - * @param columnName column in {@code t} containing shapes. The shape data for point i comes - * from row i. + * @param columnName column in {@code t} containing shapes. The shape data for point i comes from row i. * @return this XYDataSeries */ XYDataSeries pointShape(Table t, String columnName); /** - * Sets the point shapes for data point i from index i of the input labels. Points outside of - * these indices use default shapes. + * Sets the point shapes for data point i from index i of the input labels. Points outside of these indices use + * default shapes. * * @param sds selectable data set (e.g. OneClick filterable table) containing shapes - * @param columnName column in {@code sds} containing shapes. The color data for point i comes - * from row i. + * @param columnName column in {@code sds} containing shapes. The color data for point i comes from row i. * @return this XYDataSeries */ XYDataSeries pointShape(SelectableDataSet sds, String columnName); diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesArray.java index b1237cfe387..916d1aab84c 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesArray.java @@ -26,19 +26,17 @@ public class XYDataSeriesArray extends AbstractXYDataSeries { * @param x x-values * @param y y-values */ - public XYDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - final IndexableNumericData x, final IndexableNumericData y) { + public XYDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, final IndexableNumericData x, + final IndexableNumericData y) { this(axes, id, name, x, y, null); } - public XYDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - final IndexableNumericData x, final IndexableNumericData y, - final AbstractXYDataSeries series) { + public XYDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, final IndexableNumericData x, + final IndexableNumericData y, final AbstractXYDataSeries series) { super(axes, id, name, series); ArgumentValidations.assertNotNull(x, "x", getPlotInfo()); ArgumentValidations.assertNotNull(y, "y", getPlotInfo()); - ArgumentValidations.assertSameSize(new IndexableNumericData[] {x, y}, - new String[] {"x", "y"}, getPlotInfo()); + ArgumentValidations.assertSameSize(new IndexableNumericData[] {x, y}, new String[] {"x", "y"}, getPlotInfo()); this.x = x; this.y = y; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunction.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunction.java index 5fcd87994cd..a863554144d 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunction.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunction.java @@ -6,9 +6,9 @@ /** * {@link XYDataSeries} based on a function. * - * By default, this calculates at least 200 data points inside the plot's existing range. The number - * of points can be increased for a finer grained plot, or decreased if less resolution is needed. - * The points are recomputed as the {@link Chart}'s x-range changes. + * By default, this calculates at least 200 data points inside the plot's existing range. The number of points can be + * increased for a finer grained plot, or decreased if less resolution is needed. The points are recomputed as the + * {@link Chart}'s x-range changes. */ public interface XYDataSeriesFunction extends XYDataSeries { @@ -24,9 +24,8 @@ public interface XYDataSeriesFunction extends XYDataSeries { /** * Sets the data range for this series. * - * @throws IllegalArgumentException {@code xmin} must not be less than {@code xmax} {@code xmin} - * and {@code xmax} must be normal. See {@link DoubleFpPrimitives#isNormal} - * {@code npoints} must non-negative + * @throws IllegalArgumentException {@code xmin} must not be less than {@code xmax} {@code xmin} and {@code xmax} + * must be normal. See {@link DoubleFpPrimitives#isNormal} {@code npoints} must non-negative * @param xmin range minimum * @param xmax range maximum * @param npoints number of data points diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunctionImpl.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunctionImpl.java index 853eee0b6af..e33f39dc4bd 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunctionImpl.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunctionImpl.java @@ -21,12 +21,11 @@ /** * {@link XYDataSeriesInternal} based on a function. * - * By default, this calculates at least 200 data points inside the plots existing range. The number - * of points can be increased for a finer grained plot, or decreased if less resolution is needed. - * The points are recomputed as the {@link Chart}'s x-range changes. + * By default, this calculates at least 200 data points inside the plots existing range. The number of points can be + * increased for a finer grained plot, or decreased if less resolution is needed. The points are recomputed as the + * {@link Chart}'s x-range changes. */ -public class XYDataSeriesFunctionImpl extends AbstractXYDataSeries - implements XYDataSeriesFunctionInternal { +public class XYDataSeriesFunctionImpl extends AbstractXYDataSeries implements XYDataSeriesFunctionInternal { private static final long serialVersionUID = -2830236235998986828L; private static final Logger log = LoggerFactory.getLogger(XYDataSeriesFunctionImpl.class); @@ -54,7 +53,7 @@ public class XYDataSeriesFunctionImpl extends AbstractXYDataSeries * @param function function to plot */ public XYDataSeriesFunctionImpl(final AxesImpl axes, final int id, final Comparable name, - @SuppressWarnings("ConstantConditions") final DoubleUnaryOperator function) { + @SuppressWarnings("ConstantConditions") final DoubleUnaryOperator function) { super(axes, id, name, null); this.function = function; ArgumentValidations.assertNotNull(function, "function", getPlotInfo()); @@ -145,9 +144,9 @@ private void recomputeRange(final double xmin, final double xmax, final int npoi final double dx = (xmax - xmin) / (npoints - 1); if (!DoubleFpPrimitives.isNormal(xmin) || !DoubleFpPrimitives.isNormal(xmax) - || !DoubleFpPrimitives.isNormal(dx)) { - log.info("XYDataSeriesFunction: abnormal range: xmin=" + xmin + " xmax=" + xmax + " dx=" - + dx + " npoints=" + npoints); + || !DoubleFpPrimitives.isNormal(dx)) { + log.info("XYDataSeriesFunction: abnormal range: xmin=" + xmin + " xmax=" + xmax + " dx=" + dx + " npoints=" + + npoints); return; } @@ -162,8 +161,8 @@ private void computeY(final double x) { buffer.computeIfAbsent(x, val -> { double y = function.applyAsDouble(x); if (!DoubleFpPrimitives.isNormal(y)) { - log.info("XYDataSeriesFunction: abnormal y value: x=" + x + " y=" + y + " xmin=" - + xmin + " xmax=" + xmax + " npoints=" + npoints); + log.info("XYDataSeriesFunction: abnormal y value: x=" + x + " y=" + y + " xmin=" + xmin + " xmax=" + + xmax + " npoints=" + npoints); y = Double.NaN; } ymin = PlotUtils.minIgnoreNaN(ymin, y); @@ -171,8 +170,8 @@ private void computeY(final double x) { return y; }); } else { - log.info("XYDataSeriesFunction: abnormal x value: x=" + x + " xmin=" + xmin + " xmax=" - + xmax + " npoints=" + npoints); + log.info("XYDataSeriesFunction: abnormal x value: x=" + x + " xmin=" + xmin + " xmax=" + xmax + " npoints=" + + npoints); } } @@ -194,8 +193,7 @@ public int size() { @Override public double getX(int i) { if (i < 0 || i > size()) { - throw new IndexOutOfBoundsException( - "Index out of bounds. index=" + i + " size=" + size()); + throw new IndexOutOfBoundsException("Index out of bounds. index=" + i + " size=" + size()); } return currentData[0][i]; @@ -204,8 +202,7 @@ public double getX(int i) { @Override public double getY(int i) { if (i < 0 || i > size()) { - throw new IndexOutOfBoundsException( - "Index out of bounds. index=" + i + " size=" + size()); + throw new IndexOutOfBoundsException("Index out of bounds. index=" + i + " size=" + size()); } return currentData[1][i]; @@ -231,17 +228,15 @@ public XYDataSeriesFunctionImpl funcRange(final double xmin, final double xmax) /** * Sets the data range for this series. * - * @throws IllegalArgumentException {@code xmin} must not be less than {@code xmax} {@code xmin} - * and {@code xmax} must be normal. See {@link DoubleFpPrimitives#isNormal} - * {@code npoints} must non-negative + * @throws IllegalArgumentException {@code xmin} must not be less than {@code xmax} {@code xmin} and {@code xmax} + * must be normal. See {@link DoubleFpPrimitives#isNormal} {@code npoints} must non-negative * @param xmin range minimum * @param xmax range maximum * @param npoints number of data points * @return this data series with the new range */ @Override - public XYDataSeriesFunctionImpl funcRange(final double xmin, final double xmax, - final int npoints) { + public XYDataSeriesFunctionImpl funcRange(final double xmin, final double xmax, final int npoints) { rangeSet = true; nPointsSet = true; return funcRangeInternal(xmin, xmax, npoints, true); @@ -263,8 +258,7 @@ public XYDataSeriesFunctionImpl funcNPoints(final int npoints) { /** * Invokes a funcRangeInternal if xmin or xmax has changed. */ - public void invokeRecompute(final double xmin, final double xmax, final String name, - final int sessionId) { + public void invokeRecompute(final double xmin, final double xmax, final String name, final int sessionId) { if (xmin == this.xmin && xmax == this.xmax) { return; } @@ -294,9 +288,8 @@ public XYDataSeriesFunctionImpl funcNPointsInternal(int npoints) { * * @param xmin lower bound for the x values to plot * @param xmax upper bound for the x values to plot - * @param isUser whether the user is the one who called for the range to be changed. If a user - * called for the change, we don't want internal calls to change the plot out from under - * them. + * @param isUser whether the user is the one who called for the range to be changed. If a user called for the + * change, we don't want internal calls to change the plot out from under them. * @return this series */ private XYDataSeriesFunctionImpl funcRangeInternal(double xmin, double xmax, boolean isUser) { @@ -309,21 +302,17 @@ private XYDataSeriesFunctionImpl funcRangeInternal(double xmin, double xmax, boo * @param xmin lower bound for the x values to plot * @param xmax upper bound for the x values to plot * @param npoints number of points to compute - * @param isUser whether the user is the one who called for the range to be changed. If a user - * called for the change, we don't want internal calls to change the plot out from under - * them. + * @param isUser whether the user is the one who called for the range to be changed. If a user called for the + * change, we don't want internal calls to change the plot out from under them. * @return this series */ - private XYDataSeriesFunctionImpl funcRangeInternal(double xmin, double xmax, int npoints, - boolean isUser) { + private XYDataSeriesFunctionImpl funcRangeInternal(double xmin, double xmax, int npoints, boolean isUser) { if (!DoubleFpPrimitives.isNormal(xmin) || !DoubleFpPrimitives.isNormal(xmax)) { - throw new PlotIllegalArgumentException( - "Abnormal range value. xmin=" + xmin + " xmax=" + xmax, this); + throw new PlotIllegalArgumentException("Abnormal range value. xmin=" + xmin + " xmax=" + xmax, this); } if (xmin > xmax) { - throw new PlotIllegalArgumentException("xmax < xmin: xmin=" + xmin + " xmax=" + xmax, - this); + throw new PlotIllegalArgumentException("xmax < xmin: xmin=" + xmin + " xmax=" + xmax, this); } if (npoints < 0) { @@ -350,9 +339,8 @@ private XYDataSeriesFunctionImpl funcRangeInternal(double xmin, double xmax, int * Change the granularity of the plot by specifying how many points to compute. * * @param npoints number of points to compute - * @param isUser whether the user is the one who called for the number of points to be changed. - * If a user called for the change, we don't want internal calls to change the plot out - * from under them. + * @param isUser whether the user is the one who called for the number of points to be changed. If a user called for + * the change, we don't want internal calls to change the plot out from under them. * @return this series */ private XYDataSeriesFunctionImpl funcNPointsInternal(int npoints, boolean isUser) { diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunctionInternal.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunctionInternal.java index e54e19b4afe..2cbb799802d 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunctionInternal.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesFunctionInternal.java @@ -21,16 +21,14 @@ public interface XYDataSeriesFunctionInternal extends XYDataSeriesFunction { /** * Sets the data range and number of points for this series if the user did not set funcRange. * - * @throws IllegalArgumentException {@code xmin} must not be less than {@code xmax} {@code xmin} - * and {@code xmax} must be normal. See {@link DoubleFpPrimitives#isNormal} - * {@code npoints} must non-negative + * @throws IllegalArgumentException {@code xmin} must not be less than {@code xmax} {@code xmin} and {@code xmax} + * must be normal. See {@link DoubleFpPrimitives#isNormal} {@code npoints} must non-negative * @param xmin range minimum * @param xmax range maximum * @param npoints number of data points * @return this data series with the new range */ - XYDataSeriesFunctionInternal funcRangeInternal(final double xmin, final double xmax, - final int npoints); + XYDataSeriesFunctionInternal funcRangeInternal(final double xmin, final double xmax, final int npoints); /** * Sets the number of data points in this dataset if the user did not set funcNPoints. diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesInternal.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesInternal.java index 6288278f6fb..464054e106b 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesInternal.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesInternal.java @@ -10,8 +10,8 @@ import io.deephaven.gui.shape.Shape; /** - * {@link DataSeriesInternal} with two numerical components, x and y. Data points are numbered and - * are accessed with an index. + * {@link DataSeriesInternal} with two numerical components, x and y. Data points are numbered and are accessed with an + * index. */ public interface XYDataSeriesInternal extends XYDataSeries, DataSeriesInternal { diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesSwappableTableArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesSwappableTableArray.java index 771772a9dba..2e3b4466a7b 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesSwappableTableArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesSwappableTableArray.java @@ -20,8 +20,7 @@ import java.util.function.Function; -public class XYDataSeriesSwappableTableArray extends XYDataSeriesArray - implements TableSnapshotSeries { +public class XYDataSeriesSwappableTableArray extends XYDataSeriesArray implements TableSnapshotSeries { private static final long serialVersionUID = 1L; private final SwappableTable swappableTable; @@ -30,10 +29,9 @@ public class XYDataSeriesSwappableTableArray extends XYDataSeriesArray private Table localTable; public XYDataSeriesSwappableTableArray(final AxesImpl axes, final int id, final Comparable name, - final SwappableTable swappableTable, final String x, final String y) { - super(axes, id, name, - new IndexableNumericDataSwappableTable(swappableTable, x, new PlotInfo(axes, name)), - new IndexableNumericDataSwappableTable(swappableTable, y, new PlotInfo(axes, name))); + final SwappableTable swappableTable, final String x, final String y) { + super(axes, id, name, new IndexableNumericDataSwappableTable(swappableTable, x, new PlotInfo(axes, name)), + new IndexableNumericDataSwappableTable(swappableTable, y, new PlotInfo(axes, name))); this.swappableTable = swappableTable; this.x = x; @@ -44,13 +42,13 @@ public XYDataSeriesSwappableTableArray(final AxesImpl axes, final int id, final public AbstractXYDataSeries pointColorByY(Function colors) { final String colName = ColumnNameConstants.POINT_COLOR + this.hashCode(); chart().figure().registerTableMapFunction(swappableTable.getTableMapHandle(), - constructTableMapFromFunction(colors, Paint.class, y, colName)); + constructTableMapFromFunction(colors, Paint.class, y, colName)); swappableTable.getTableMapHandle().addColumn(colName); chart().figure().registerFigureFunction(new FigureImplFunction(figImpl -> { - ((XYDataSeriesSwappableTableArray) figImpl.getFigure().getCharts() - .getChart(chart().row(), chart().column()).axes(axes().id()).series(id())) - .colorsSetSpecific( - new IndexableDataSwappableTable<>(swappableTable, colName, getPlotInfo())); + ((XYDataSeriesSwappableTableArray) figImpl.getFigure().getCharts().getChart(chart().row(), chart().column()) + .axes(axes().id()).series(id())) + .colorsSetSpecific( + new IndexableDataSwappableTable<>(swappableTable, colName, getPlotInfo())); return figImpl; }, this)); return this; @@ -61,24 +59,22 @@ public XYDataSeriesArray copy(AxesImpl axes) { return new XYDataSeriesSwappableTableArray(this, axes); } - private XYDataSeriesSwappableTableArray(final XYDataSeriesSwappableTableArray series, - final AxesImpl axes) { + private XYDataSeriesSwappableTableArray(final XYDataSeriesSwappableTableArray series, final AxesImpl axes) { super(series, axes); this.swappableTable = series.swappableTable; this.x = series.x; this.y = series.y; } - private Function constructTableMapFromFunction( - final Function function, final Class resultClass, final String onColumn, - final String columnName) { + private Function constructTableMapFromFunction(final Function function, + final Class resultClass, final String onColumn, final String columnName) { ArgumentValidations.assertNotNull(function, "function", getPlotInfo()); final String queryFunction = columnName + "Function"; return t -> { QueryScope.addParam(queryFunction, function); QueryLibrary.importClass(resultClass); - return t.update(columnName + " = (" + resultClass.getSimpleName() + ") " + queryFunction - + ".apply(" + onColumn + ")"); + return t.update(columnName + " = (" + resultClass.getSimpleName() + ") " + queryFunction + ".apply(" + + onColumn + ")"); }; } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesTableArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesTableArray.java index 9245540a506..4bf9d922f29 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesTableArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xy/XYDataSeriesTableArray.java @@ -26,10 +26,9 @@ public class XYDataSeriesTableArray extends XYDataSeriesArray implements SeriesI private final String y; public XYDataSeriesTableArray(final AxesImpl axes, final int id, final Comparable name, - final TableHandle tableHandle, final String x, final String y) { - super(axes, id, name, - new IndexableNumericDataTable(tableHandle, x, new PlotInfo(axes, name)), - new IndexableNumericDataTable(tableHandle, y, new PlotInfo(axes, name))); + final TableHandle tableHandle, final String x, final String y) { + super(axes, id, name, new IndexableNumericDataTable(tableHandle, x, new PlotInfo(axes, name)), + new IndexableNumericDataTable(tableHandle, y, new PlotInfo(axes, name))); this.tableHandle = tableHandle; this.x = x; @@ -40,9 +39,9 @@ public XYDataSeriesTableArray(final AxesImpl axes, final int id, final Comparabl public AbstractXYDataSeries pointColorByY(Function colors) { final String colName = ColumnNameConstants.POINT_COLOR + this.hashCode(); chart().figure().registerTableFunction(tableHandle.getTable(), - t -> constructTableFromFunction(t, colors, Paint.class, y, colName)); + t -> constructTableFromFunction(t, colors, Paint.class, y, colName)); chart().figure().registerFigureFunction( - new FigureImplFunction(f -> f.pointColor(tableHandle.getTable(), colName), this)); + new FigureImplFunction(f -> f.pointColor(tableHandle.getTable(), colName), this)); return this; } @@ -59,12 +58,12 @@ public XYDataSeriesTableArray copy(final AxesImpl axes) { } private Table constructTableFromFunction(final Table t, final Function function, - final Class resultClass, final String onColumn, final String columnName) { + final Class resultClass, final String onColumn, final String columnName) { ArgumentValidations.assertNotNull(function, "function", getPlotInfo()); final String queryFunction = columnName + "Function"; QueryScope.addParam(queryFunction, function); QueryLibrary.importClass(resultClass); - return t.update(columnName + " = (" + resultClass.getSimpleName() + ") " + queryFunction - + ".apply(" + onColumn + ")"); + return t.update( + columnName + " = (" + resultClass.getSimpleName() + ") " + queryFunction + ".apply(" + onColumn + ")"); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesArray.java index 79cf91958b0..35da7a434a7 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesArray.java @@ -13,8 +13,7 @@ /** * Dataset appropriate for an {@link XYErrorBarDataSeriesInternal} composed of indexable data. */ -public class XYErrorBarDataSeriesArray extends XYDataSeriesArray - implements XYErrorBarDataSeriesInternal { +public class XYErrorBarDataSeriesArray extends XYDataSeriesArray implements XYErrorBarDataSeriesInternal { private IndexableNumericData x; private IndexableNumericData xLow; @@ -27,18 +26,16 @@ public class XYErrorBarDataSeriesArray extends XYDataSeriesArray private final boolean drawYError; public XYErrorBarDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - final IndexableNumericData x, final IndexableNumericData xLow, - final IndexableNumericData xHigh, final IndexableNumericData y, - final IndexableNumericData yLow, final IndexableNumericData yHigh, final boolean drawXError, - final boolean drawYError) { + final IndexableNumericData x, final IndexableNumericData xLow, final IndexableNumericData xHigh, + final IndexableNumericData y, final IndexableNumericData yLow, final IndexableNumericData yHigh, + final boolean drawXError, final boolean drawYError) { this(axes, id, name, x, xLow, xHigh, y, yLow, yHigh, drawXError, drawYError, null); } public XYErrorBarDataSeriesArray(final AxesImpl axes, final int id, final Comparable name, - final IndexableNumericData x, final IndexableNumericData xLow, - final IndexableNumericData xHigh, final IndexableNumericData y, - final IndexableNumericData yLow, final IndexableNumericData yHigh, final boolean drawXError, - final boolean drawYError, final AbstractXYDataSeries series) { + final IndexableNumericData x, final IndexableNumericData xLow, final IndexableNumericData xHigh, + final IndexableNumericData y, final IndexableNumericData yLow, final IndexableNumericData yHigh, + final boolean drawXError, final boolean drawYError, final AbstractXYDataSeries series) { super(axes, id, name, x, y, series); ArgumentValidations.assertNotNull(x, "x", getPlotInfo()); this.x = x; @@ -73,8 +70,7 @@ public XYErrorBarDataSeriesArray(final AxesImpl axes, final int id, final Compar * @param series series to copy. * @param axes new axes to use. */ - protected XYErrorBarDataSeriesArray(final XYErrorBarDataSeriesArray series, - final AxesImpl axes) { + protected XYErrorBarDataSeriesArray(final XYErrorBarDataSeriesArray series, final AxesImpl axes) { super(series, axes); this.x = series.x; this.xLow = series.xLow; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesSwappableTableArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesSwappableTableArray.java index db01d6cfb8f..835e8c8b8b8 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesSwappableTableArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesSwappableTableArray.java @@ -13,8 +13,7 @@ import org.jetbrains.annotations.NotNull; -public class XYErrorBarDataSeriesSwappableTableArray extends XYErrorBarDataSeriesArray - implements TableSnapshotSeries { +public class XYErrorBarDataSeriesSwappableTableArray extends XYErrorBarDataSeriesArray implements TableSnapshotSeries { private final SwappableTable swappableTable; private final String x; @@ -25,26 +24,20 @@ public class XYErrorBarDataSeriesSwappableTableArray extends XYErrorBarDataSerie private final String yHigh; private Table localTable; - public XYErrorBarDataSeriesSwappableTableArray(final AxesImpl axes, final int id, - final Comparable name, @NotNull final SwappableTable swappableTable, final String x, - final String xLow, final String xHigh, final String y, final String yLow, - final String yHigh, final boolean drawXError, final boolean drawYError) { - super(axes, id, name, - new IndexableNumericDataSwappableTable(swappableTable, x, new PlotInfo(axes, name)), - xLow == null ? null - : new IndexableNumericDataSwappableTable(swappableTable, xLow, - new PlotInfo(axes, name)), - xHigh == null ? null - : new IndexableNumericDataSwappableTable(swappableTable, xHigh, - new PlotInfo(axes, name)), - new IndexableNumericDataSwappableTable(swappableTable, y, new PlotInfo(axes, name)), - yLow == null ? null - : new IndexableNumericDataSwappableTable(swappableTable, yLow, - new PlotInfo(axes, name)), - yHigh == null ? null - : new IndexableNumericDataSwappableTable(swappableTable, yHigh, - new PlotInfo(axes, name)), - drawXError, drawYError); + public XYErrorBarDataSeriesSwappableTableArray(final AxesImpl axes, final int id, final Comparable name, + @NotNull final SwappableTable swappableTable, final String x, final String xLow, final String xHigh, + final String y, final String yLow, final String yHigh, final boolean drawXError, final boolean drawYError) { + super(axes, id, name, new IndexableNumericDataSwappableTable(swappableTable, x, new PlotInfo(axes, name)), + xLow == null ? null + : new IndexableNumericDataSwappableTable(swappableTable, xLow, new PlotInfo(axes, name)), + xHigh == null ? null + : new IndexableNumericDataSwappableTable(swappableTable, xHigh, new PlotInfo(axes, name)), + new IndexableNumericDataSwappableTable(swappableTable, y, new PlotInfo(axes, name)), + yLow == null ? null + : new IndexableNumericDataSwappableTable(swappableTable, yLow, new PlotInfo(axes, name)), + yHigh == null ? null + : new IndexableNumericDataSwappableTable(swappableTable, yHigh, new PlotInfo(axes, name)), + drawXError, drawYError); this.swappableTable = swappableTable; this.x = x; @@ -55,8 +48,8 @@ public XYErrorBarDataSeriesSwappableTableArray(final AxesImpl axes, final int id this.yHigh = yHigh; } - private XYErrorBarDataSeriesSwappableTableArray( - final XYErrorBarDataSeriesSwappableTableArray series, final AxesImpl axes) { + private XYErrorBarDataSeriesSwappableTableArray(final XYErrorBarDataSeriesSwappableTableArray series, + final AxesImpl axes) { super(series, axes); this.swappableTable = series.swappableTable; this.x = series.x; diff --git a/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesTableArray.java b/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesTableArray.java index 6d476fb90cc..8ae902fafb5 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesTableArray.java +++ b/Plot/src/main/java/io/deephaven/db/plot/datasets/xyerrorbar/XYErrorBarDataSeriesTableArray.java @@ -12,8 +12,7 @@ import org.jetbrains.annotations.NotNull; -public class XYErrorBarDataSeriesTableArray extends XYErrorBarDataSeriesArray - implements TableSnapshotSeries { +public class XYErrorBarDataSeriesTableArray extends XYErrorBarDataSeriesArray implements TableSnapshotSeries { private final TableHandle tableHandle; private final String x; @@ -24,21 +23,15 @@ public class XYErrorBarDataSeriesTableArray extends XYErrorBarDataSeriesArray private final String yHigh; public XYErrorBarDataSeriesTableArray(final AxesImpl axes, final int id, final Comparable name, - @NotNull final TableHandle tableHandle, final String x, final String xLow, - final String xHigh, final String y, final String yLow, final String yHigh, - final boolean drawXError, final boolean drawYError) { - super(axes, id, name, - new IndexableNumericDataTable(tableHandle, x, new PlotInfo(axes, name)), - xLow == null ? null - : new IndexableNumericDataTable(tableHandle, xLow, new PlotInfo(axes, name)), - xHigh == null ? null - : new IndexableNumericDataTable(tableHandle, xHigh, new PlotInfo(axes, name)), - new IndexableNumericDataTable(tableHandle, y, new PlotInfo(axes, name)), - yLow == null ? null - : new IndexableNumericDataTable(tableHandle, yLow, new PlotInfo(axes, name)), - yHigh == null ? null - : new IndexableNumericDataTable(tableHandle, yHigh, new PlotInfo(axes, name)), - drawXError, drawYError); + @NotNull final TableHandle tableHandle, final String x, final String xLow, final String xHigh, + final String y, final String yLow, final String yHigh, final boolean drawXError, final boolean drawYError) { + super(axes, id, name, new IndexableNumericDataTable(tableHandle, x, new PlotInfo(axes, name)), + xLow == null ? null : new IndexableNumericDataTable(tableHandle, xLow, new PlotInfo(axes, name)), + xHigh == null ? null : new IndexableNumericDataTable(tableHandle, xHigh, new PlotInfo(axes, name)), + new IndexableNumericDataTable(tableHandle, y, new PlotInfo(axes, name)), + yLow == null ? null : new IndexableNumericDataTable(tableHandle, yLow, new PlotInfo(axes, name)), + yHigh == null ? null : new IndexableNumericDataTable(tableHandle, yHigh, new PlotInfo(axes, name)), + drawXError, drawYError); this.tableHandle = tableHandle; this.x = x; @@ -49,8 +42,7 @@ public XYErrorBarDataSeriesTableArray(final AxesImpl axes, final int id, final C this.yHigh = yHigh; } - private XYErrorBarDataSeriesTableArray(final XYErrorBarDataSeriesTableArray series, - final AxesImpl axes) { + private XYErrorBarDataSeriesTableArray(final XYErrorBarDataSeriesTableArray series, final AxesImpl axes) { super(series, axes); this.tableHandle = series.tableHandle; this.x = series.x; diff --git a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotIllegalArgumentException.java b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotIllegalArgumentException.java index 8009fec5615..c077e50cee6 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotIllegalArgumentException.java +++ b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotIllegalArgumentException.java @@ -9,13 +9,12 @@ */ public class PlotIllegalArgumentException extends IllegalArgumentException { - public PlotIllegalArgumentException(final String exception, - final PlotExceptionCause exceptionCause) { + public PlotIllegalArgumentException(final String exception, final PlotExceptionCause exceptionCause) { this(exception, exceptionCause == null ? null : exceptionCause.getPlotInfo()); } public PlotIllegalArgumentException(final String exception, final PlotInfo plotInfo) { - super("" + (plotInfo == null || plotInfo.toString() == null ? "" - : "Plot Information: " + plotInfo + " ") + exception); + super("" + (plotInfo == null || plotInfo.toString() == null ? "" : "Plot Information: " + plotInfo + " ") + + exception); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotIllegalStateException.java b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotIllegalStateException.java index e325cd2e21e..4e6dcf52d94 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotIllegalStateException.java +++ b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotIllegalStateException.java @@ -9,8 +9,7 @@ */ public class PlotIllegalStateException extends IllegalStateException { - public PlotIllegalStateException(final String exception, - final PlotExceptionCause exceptionCause) { + public PlotIllegalStateException(final String exception, final PlotExceptionCause exceptionCause) { this(exception, exceptionCause == null ? null : exceptionCause.getPlotInfo()); } @@ -19,7 +18,7 @@ public PlotIllegalStateException(final PlotInfo exceptionCause) { } public PlotIllegalStateException(final String exception, final PlotInfo plotInfo) { - super("" + (plotInfo == null || plotInfo.toString() == null ? "" - : "Plot Information: " + plotInfo + " ") + exception); + super("" + (plotInfo == null || plotInfo.toString() == null ? "" : "Plot Information: " + plotInfo + " ") + + exception); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotInfo.java b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotInfo.java index b50bda0e466..4bedfeb0905 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotInfo.java +++ b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotInfo.java @@ -11,8 +11,8 @@ import java.io.Serializable; /** - * Information about a plot. Has 3 pieces of information (all optional) Figure title, if it has one - * Chart title, if it has one Series name + * Information about a plot. Has 3 pieces of information (all optional) Figure title, if it has one Chart title, if it + * has one Series name */ public class PlotInfo implements Serializable, LogOutputAppendable { @@ -22,19 +22,16 @@ public PlotInfo(final AxesImpl axes, final Comparable seriesName) { this(getAxesFigure(axes), getAxesChart(axes), seriesName); } - public PlotInfo(final BaseFigureImpl figure, final ChartImpl chart, - final SeriesInternal series) { + public PlotInfo(final BaseFigureImpl figure, final ChartImpl chart, final SeriesInternal series) { this(figure, chart, series == null ? null : series.name()); } - public PlotInfo(final BaseFigureImpl figure, final ChartImpl chart, - final Comparable seriesName) { + public PlotInfo(final BaseFigureImpl figure, final ChartImpl chart, final Comparable seriesName) { this(figure, chart, seriesName == null ? null : seriesName.toString()); } public PlotInfo(final BaseFigureImpl figure, final ChartImpl chart, final String seriesName) { - this(figure == null ? null : figure.getTitle(), chart == null ? null : chart.getTitle(), - seriesName); + this(figure == null ? null : figure.getTitle(), chart == null ? null : chart.getTitle(), seriesName); } public PlotInfo(final String figureName, final String chartName, final String seriesName) { @@ -50,8 +47,7 @@ private static ChartImpl getAxesChart(final AxesImpl axes) { return axes == null ? null : axes.chart(); } - private String encodeInfo(final String figureName, final String chartName, - final String seriesName) { + private String encodeInfo(final String figureName, final String chartName, final String seriesName) { String info = ""; if (figureName != null) { diff --git a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotRenderingException.java b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotRenderingException.java index a05d04ec62d..3bdf1542633 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotRenderingException.java +++ b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotRenderingException.java @@ -14,7 +14,6 @@ public PlotRenderingException(final PlotRuntimeException e) { } public PlotRenderingException(final Throwable cause, final PlotInfo plotInfo) { - super("" + (plotInfo == null || plotInfo.toString() == null ? "" - : "Plot Information: " + plotInfo), cause); + super("" + (plotInfo == null || plotInfo.toString() == null ? "" : "Plot Information: " + plotInfo), cause); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotRuntimeException.java b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotRuntimeException.java index c23f2bc8d4a..14b6ad6124b 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotRuntimeException.java +++ b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotRuntimeException.java @@ -16,18 +16,17 @@ public PlotRuntimeException(final String exception, final PlotExceptionCause exc } public PlotRuntimeException(final String exception, final Throwable cause, - final PlotExceptionCause exceptionCause) { + final PlotExceptionCause exceptionCause) { this(exception, cause, exception == null ? null : exceptionCause.getPlotInfo()); } - public PlotRuntimeException(final String exception, final Throwable cause, - final PlotInfo plotInfo) { - super("" + (plotInfo == null || plotInfo.toString() == null ? "" - : "Plot Information: " + plotInfo + " ") + exception, cause); + public PlotRuntimeException(final String exception, final Throwable cause, final PlotInfo plotInfo) { + super("" + (plotInfo == null || plotInfo.toString() == null ? "" : "Plot Information: " + plotInfo + " ") + + exception, cause); } public PlotRuntimeException(final String exception, final PlotInfo plotInfo) { - super("" + (plotInfo == null || plotInfo.toString() == null ? "" - : "Plot Information: " + plotInfo + " ") + exception); + super("" + (plotInfo == null || plotInfo.toString() == null ? "" : "Plot Information: " + plotInfo + " ") + + exception); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotUnsupportedOperationException.java b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotUnsupportedOperationException.java index bfef53f1e4d..9b0fb125b98 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/errors/PlotUnsupportedOperationException.java +++ b/Plot/src/main/java/io/deephaven/db/plot/errors/PlotUnsupportedOperationException.java @@ -9,24 +9,22 @@ */ public class PlotUnsupportedOperationException extends UnsupportedOperationException { - public PlotUnsupportedOperationException(final String exception, - final PlotExceptionCause exceptionCause) { + public PlotUnsupportedOperationException(final String exception, final PlotExceptionCause exceptionCause) { this(exception, exceptionCause == null ? null : exceptionCause.getPlotInfo()); } public PlotUnsupportedOperationException(final String exception, final Throwable cause, - final PlotExceptionCause exceptionCause) { + final PlotExceptionCause exceptionCause) { this(exception, cause, exception == null ? null : exceptionCause.getPlotInfo()); } - public PlotUnsupportedOperationException(final String exception, final Throwable cause, - final PlotInfo plotInfo) { - super("" + (plotInfo == null || plotInfo.toString() == null ? "" - : "Plot Information: " + plotInfo + " ") + exception, cause); + public PlotUnsupportedOperationException(final String exception, final Throwable cause, final PlotInfo plotInfo) { + super("" + (plotInfo == null || plotInfo.toString() == null ? "" : "Plot Information: " + plotInfo + " ") + + exception, cause); } public PlotUnsupportedOperationException(final String exception, final PlotInfo plotInfo) { - super("" + (plotInfo == null || plotInfo.toString() == null ? "" - : "Plot Information: " + plotInfo + " ") + exception); + super("" + (plotInfo == null || plotInfo.toString() == null ? "" : "Plot Information: " + plotInfo + " ") + + exception); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSet.java b/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSet.java index d63db9affcd..302919e0368 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSet.java +++ b/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSet.java @@ -27,24 +27,22 @@ public interface SelectableDataSet { * Gets the view of the {@link Table} with the selected subset. * * @param chart chart - * @param tableTransform tableTransform applied to the tables in tableMaps. The purpose of this - * transform is to track the table definitions for tables inside tableMap + * @param tableTransform tableTransform applied to the tables in tableMaps. The purpose of this transform is to + * track the table definitions for tables inside tableMap * @param cols selected columns * @return table view on selected subset */ SwappableTable getSwappableTable(final Comparable seriesName, final ChartImpl chart, - Function tableTransform, final String... cols); + Function tableTransform, final String... cols); /** - * Gets the view of the {@link Table} with the selected subset. The table transform is the - * identity function. + * Gets the view of the {@link Table} with the selected subset. The table transform is the identity function. * * @param chart chart * @param cols selected columns * @return table view on selected subset */ - default SwappableTable getSwappableTable(final Comparable seriesName, final ChartImpl chart, - final String... cols) { + default SwappableTable getSwappableTable(final Comparable seriesName, final ChartImpl chart, final String... cols) { return getSwappableTable(seriesName, chart, null, cols); } @@ -54,24 +52,20 @@ default SwappableTable getSwappableTable(final Comparable seriesName, final Char * @param groupByColumns The grouping columns for the lastBy * @return a new SelectableDataSet with lastBy applied * - * @deprecated This method will be removed in a future release, use - * {@link #transform(Object, Function)} instead. + * @deprecated This method will be removed in a future release, use {@link #transform(Object, Function)} instead. */ @Deprecated() - default SelectableDataSet getLastBy( - final Collection groupByColumns) { + default SelectableDataSet getLastBy(final Collection groupByColumns) { return transform(groupByColumns, t -> t.lastBy(groupByColumns)); } /** - * Produces a derivative {@link SelectableDataSet} with the specified transformation applied to - * all internal tables. + * Produces a derivative {@link SelectableDataSet} with the specified transformation applied to all internal tables. * - * @param memoKey An Object that uniquely identifies the actions taken by the transformation so - * it can be cached. + * @param memoKey An Object that uniquely identifies the actions taken by the transformation so it can be cached. * * @return a new {@link SelectableDataSet} with the transformation applied */ SelectableDataSet transform(@NotNull Object memoKey, - @NotNull Function transformation); + @NotNull Function transformation); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSetOneClick.java b/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSetOneClick.java index 308a3073862..69866895a00 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSetOneClick.java +++ b/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSetOneClick.java @@ -26,9 +26,8 @@ /** * A OneClick filtered table. * - * If requireAllFiltersToDisplay is true, data is only displayed once the user has OneClick filtered - * all byColumns. If requireAllFiltersToDisplay is false, data is displayed when not all oneclicks - * are selected + * If requireAllFiltersToDisplay is true, data is only displayed once the user has OneClick filtered all byColumns. If + * requireAllFiltersToDisplay is false, data is displayed when not all oneclicks are selected */ public class SelectableDataSetOneClick implements SelectableDataSet> { private final TableMap tableMap; @@ -36,46 +35,43 @@ public class SelectableDataSetOneClick implements SelectableDataSet>>> transformationCache = - new HashMap<>(); + new HashMap<>(); /** * Creates a SelectableDataSetOneClick instance. * - * Listens for OneClick events for the specified {@code byColumns} and calculates the - * {@link SwappableTable} by filtering the {@code table}. The {@link SwappableTable} will be - * null until all {@code byColumns} have a corresponding OneClick filter. + * Listens for OneClick events for the specified {@code byColumns} and calculates the {@link SwappableTable} by + * filtering the {@code table}. The {@link SwappableTable} will be null until all {@code byColumns} have a + * corresponding OneClick filter. * - * @throws io.deephaven.base.verify.RequirementFailure {@code tableMap}, {@code table} and - * {@code byColumns} must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code tableMap}, {@code table} and {@code byColumns} must + * not be null * @param tableMap table map * @param tableMapTableDefinition table definition * @param byColumns selected columns */ - public SelectableDataSetOneClick(TableMap tableMap, TableDefinition tableMapTableDefinition, - String[] byColumns) { + public SelectableDataSetOneClick(TableMap tableMap, TableDefinition tableMapTableDefinition, String[] byColumns) { this(tableMap, tableMapTableDefinition, byColumns, true); } /** * Creates a SelectableDataSetOneClick instance. * - * Listens for OneClick events for the specified {@code byColumns} and calculates the - * {@link SwappableTable} by filtering the {@code table}. If {@code requireAllFiltersToDisplay} - * is true, the {@link SwappableTable} will be null until all {@code byColumns} have a - * corresponding OneClick filter. If {@code requireAllFiltersToDisplay} is false, the - * {@link SwappableTable} will be calculated by filtering the {@code table} with the OneClicks - * used. + * Listens for OneClick events for the specified {@code byColumns} and calculates the {@link SwappableTable} by + * filtering the {@code table}. If {@code requireAllFiltersToDisplay} is true, the {@link SwappableTable} will be + * null until all {@code byColumns} have a corresponding OneClick filter. If {@code requireAllFiltersToDisplay} is + * false, the {@link SwappableTable} will be calculated by filtering the {@code table} with the OneClicks used. * - * @throws io.deephaven.base.verify.RequirementFailure {@code tableMap}, {@code table} and - * {@code byColumns} must not be null + * @throws io.deephaven.base.verify.RequirementFailure {@code tableMap}, {@code table} and {@code byColumns} must + * not be null * @param tableMap table map * @param tableMapTableDefinition TableDefinition of the underlying table * @param byColumns selected columns - * @param requireAllFiltersToDisplay false to display data when not all oneclicks are selected; - * true to only display data when appropriate oneclicks are selected + * @param requireAllFiltersToDisplay false to display data when not all oneclicks are selected; true to only display + * data when appropriate oneclicks are selected */ - public SelectableDataSetOneClick(TableMap tableMap, TableDefinition tableMapTableDefinition, - String[] byColumns, final boolean requireAllFiltersToDisplay) { + public SelectableDataSetOneClick(TableMap tableMap, TableDefinition tableMapTableDefinition, String[] byColumns, + final boolean requireAllFiltersToDisplay) { Require.neqNull(tableMap, "tableMap"); Require.neqNull(tableMapTableDefinition, "tableMapTableDefinition"); Require.neqNull(byColumns, "byColumns"); @@ -96,72 +92,69 @@ public String[] getByColumns() { @Override public SwappableTable getSwappableTable(final Comparable seriesName, - final ChartImpl chart, - final Function tableTransform, - final String... cols) { + final ChartImpl chart, + final Function tableTransform, + final String... cols) { ArgumentValidations.assertNotNull(chart, "chart", chart.getPlotInfo()); ArgumentValidations.assertNotNull(cols, "cols", chart.getPlotInfo()); final BaseFigureImpl figure = chart.figure(); - final TableDefinition updatedTableDef = - transformTableDefinition(tableMapTableDefinition, tableTransform); + final TableDefinition updatedTableDef = transformTableDefinition(tableMapTableDefinition, tableTransform); final List allCols = new ArrayList<>(Arrays.asList(cols)); allCols.addAll(Arrays.asList(byColumns)); final List missingColumns = allCols.stream() - .filter(col -> updatedTableDef.getColumn(col) == null) - .collect(Collectors.toList()); + .filter(col -> updatedTableDef.getColumn(col) == null) + .collect(Collectors.toList()); if (!missingColumns.isEmpty()) { throw new IllegalStateException("The columns [" + String.join(", ", missingColumns) - + "] do not exist in the resulting table. Available columns are [" + - updatedTableDef.getColumnNamesAsString() + "]"); + + "] do not exist in the resulting table. Available columns are [" + + updatedTableDef.getColumnNamesAsString() + "]"); } final List viewColumns; // If these do not match Then we'll have to use a different set of view columns. if (!updatedTableDef.equals(tableMapTableDefinition)) { viewColumns = allCols.stream() - .filter(col -> tableMapTableDefinition.getColumn(col) != null) - .collect(Collectors.toList()); + .filter(col -> tableMapTableDefinition.getColumn(col) != null) + .collect(Collectors.toList()); } else { viewColumns = null; } - final TableMapHandle tableMapHandle = new TableMapBackedTableMapHandle(tableMap, - updatedTableDef, byColumns, chart.getPlotInfo(), allCols, viewColumns); + final TableMapHandle tableMapHandle = new TableMapBackedTableMapHandle(tableMap, updatedTableDef, byColumns, + chart.getPlotInfo(), allCols, viewColumns); tableMapHandle.setTableMap(tableMap); tableMapHandle.setKeyColumnsOrdered(byColumns); tableMapHandle.setOneClickMap(true); - return new SwappableTableOneClickMap(seriesName, figure.getUpdateInterval(), tableMapHandle, - tableTransform, requireAllFiltersToDisplay, byColumns); + return new SwappableTableOneClickMap(seriesName, figure.getUpdateInterval(), tableMapHandle, tableTransform, + requireAllFiltersToDisplay, byColumns); } private TableDefinition transformTableDefinition(TableDefinition toTransform, - Function transformation) { - return transformation != null - ? transformation.apply(TableTools.newTable(toTransform)).getDefinition() - : toTransform; + Function transformation) { + return transformation != null ? transformation.apply(TableTools.newTable(toTransform)).getDefinition() + : toTransform; } @Override public SelectableDataSet> transform(@NotNull Object memoKey, - @NotNull Function transformation) { + @NotNull Function transformation) { SelectableDataSet> value = null; - final WeakReference>> reference = - transformationCache.get(memoKey); + final WeakReference>> reference = transformationCache.get(memoKey); if (reference != null) { value = reference.get(); } if (value == null) { value = new SelectableDataSetOneClick(tableMap.transformTables(transformation), - transformTableDefinition(tableMapTableDefinition, transformation), - byColumns, - requireAllFiltersToDisplay); + transformTableDefinition(tableMapTableDefinition, transformation), + byColumns, + requireAllFiltersToDisplay); transformationCache.put(memoKey, new WeakReference<>(value)); } diff --git a/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSetSwappableTable.java b/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSetSwappableTable.java index ff3fa0c9111..4ccbc1a5141 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSetSwappableTable.java +++ b/Plot/src/main/java/io/deephaven/db/plot/filters/SelectableDataSetSwappableTable.java @@ -10,7 +10,7 @@ import java.util.function.Function; public class SelectableDataSetSwappableTable - implements SelectableDataSet, Serializable { + implements SelectableDataSet, Serializable { private final SwappableTable swappableTable; @@ -25,13 +25,13 @@ public TableDefinition getTableDefinition() { @Override public SelectableDataSet transform(@NotNull Object memoKey, - @NotNull Function transformation) { + @NotNull Function transformation) { throw new UnsupportedOperationException(); } @Override public SwappableTable getSwappableTable(final Comparable seriesName, final ChartImpl chart, - Function tableTransform, final String... col) { + Function tableTransform, final String... col) { return swappableTable; } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/filters/Selectables.java b/Plot/src/main/java/io/deephaven/db/plot/filters/Selectables.java index 76f8a25e7a4..65cfaccc852 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/filters/Selectables.java +++ b/Plot/src/main/java/io/deephaven/db/plot/filters/Selectables.java @@ -34,8 +34,7 @@ public static SelectableDataSetOneClick oneClick(final Table t, final String... * @param byColumns selected columns * @return {@link SelectableDataSetOneClick} with the specified table map and columns */ - public static SelectableDataSetOneClick oneClick(final TableMap tMap, final Table t, - final String... byColumns) { + public static SelectableDataSetOneClick oneClick(final TableMap tMap, final Table t, final String... byColumns) { return oneClick(tMap, t.getDefinition(), byColumns); } @@ -46,8 +45,8 @@ public static SelectableDataSetOneClick oneClick(final TableMap tMap, final Tabl * @param byColumns selected columns * @return {@link SelectableDataSetOneClick} with the specified table map and columns */ - public static SelectableDataSetOneClick oneClick(final TableMap tMap, - final TableDefinition tableDefinition, final String... byColumns) { + public static SelectableDataSetOneClick oneClick(final TableMap tMap, final TableDefinition tableDefinition, + final String... byColumns) { return oneClick(tMap, tableDefinition, true, byColumns); } @@ -56,12 +55,12 @@ public static SelectableDataSetOneClick oneClick(final TableMap tMap, * * @param t table * @param byColumns selected columns - * @param requireAllFiltersToDisplay false to display data when not all oneclicks are selected; - * true to only display data when appropriate oneclicks are selected + * @param requireAllFiltersToDisplay false to display data when not all oneclicks are selected; true to only display + * data when appropriate oneclicks are selected * @return {@link SelectableDataSetOneClick} with the specified table and columns */ - public static SelectableDataSetOneClick oneClick(final Table t, - final boolean requireAllFiltersToDisplay, final String... byColumns) { + public static SelectableDataSetOneClick oneClick(final Table t, final boolean requireAllFiltersToDisplay, + final String... byColumns) { if (byColumns == null || byColumns.length < 1) { throw new IllegalArgumentException("byColumns can not be empty"); } @@ -76,12 +75,12 @@ public static SelectableDataSetOneClick oneClick(final Table t, * * @param tMap TableMap * @param byColumns selected columns - * @param requireAllFiltersToDisplay false to display data when not all oneclicks are selected; - * true to only display data when appropriate oneclicks are selected + * @param requireAllFiltersToDisplay false to display data when not all oneclicks are selected; true to only display + * data when appropriate oneclicks are selected * @return {@link SelectableDataSetOneClick} with the specified table map and columns */ public static SelectableDataSetOneClick oneClick(final TableMap tMap, final Table t, - final boolean requireAllFiltersToDisplay, final String... byColumns) { + final boolean requireAllFiltersToDisplay, final String... byColumns) { return oneClick(tMap, t.getDefinition(), requireAllFiltersToDisplay, byColumns); } @@ -90,19 +89,17 @@ public static SelectableDataSetOneClick oneClick(final TableMap tMap, final Tabl * * @param tMap TableMap * @param byColumns selected columns - * @param requireAllFiltersToDisplay false to display data when not all oneclicks are selected; - * true to only display data when appropriate oneclicks are selected + * @param requireAllFiltersToDisplay false to display data when not all oneclicks are selected; true to only display + * data when appropriate oneclicks are selected * @return {@link SelectableDataSetOneClick} with the specified table map and columns */ - public static SelectableDataSetOneClick oneClick(final TableMap tMap, - final TableDefinition tableDefinition, final boolean requireAllFiltersToDisplay, - final String... byColumns) { + public static SelectableDataSetOneClick oneClick(final TableMap tMap, final TableDefinition tableDefinition, + final boolean requireAllFiltersToDisplay, final String... byColumns) { if (byColumns == null || byColumns.length < 1) { throw new IllegalArgumentException("byColumns can not be empty"); } Require.neqNull(tMap, "tMap"); Require.neqNull(tableDefinition, "tableDefinition"); - return new SelectableDataSetOneClick(tMap, tableDefinition, byColumns, - requireAllFiltersToDisplay); + return new SelectableDataSetOneClick(tMap, tableDefinition, byColumns, requireAllFiltersToDisplay); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/ArgumentValidations.java b/Plot/src/main/java/io/deephaven/db/plot/util/ArgumentValidations.java index 4029121cdab..8e80368aa46 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/ArgumentValidations.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/ArgumentValidations.java @@ -24,7 +24,7 @@ */ public class ArgumentValidations { private static final boolean ENABLE_X_VALUE_ASSERTIONS = - Configuration.getInstance().getBooleanWithDefault("plotting.enableXValueAssertions", true); + Configuration.getInstance().getBooleanWithDefault("plotting.enableXValueAssertions", true); /** * Requires the input object {@code o} to be non-null. @@ -62,8 +62,7 @@ public static void assertNotNull(Object o, final String message, final PlotInfo * @param message error message * @param plotInfo source of the exception */ - public static void assertNotNullAndNotEmpty(T[] o, final String message, - final PlotInfo plotInfo) { + public static void assertNotNullAndNotEmpty(T[] o, final String message, final PlotInfo plotInfo) { if (o == null || o.length == 0) { throw new PlotIllegalArgumentException("Null or empty input: " + message, plotInfo); } @@ -86,8 +85,7 @@ public static void assertNull(Object o, final String message, final PlotInfo plo * @param message error message * @param plotInfo source of the exception */ - public static void assertInstance(final Class c1, final Class c2, final String message, - final PlotInfo plotInfo) { + public static void assertInstance(final Class c1, final Class c2, final String message, final PlotInfo plotInfo) { assertNotNull(c1, "c1", plotInfo); assertNotNull(c2, "c2", plotInfo); if (!c1.isAssignableFrom(c2)) { @@ -99,22 +97,21 @@ public static void assertInstance(final Class c1, final Class c2, final String m * Requires the {@code column} of be an instance of {@link Class} {@code c}. * * @throws NullPointerException {@code t} and {@code c} must not be null - * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} - * does not have data type {@code c} + * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} does not have data + * type {@code c} * @param t table * @param column column * @param c class * @param message error message * @param plotInfo source of the exception */ - public static void assertInstance(final Table t, final String column, final Class c, - final String message, final PlotInfo plotInfo) { + public static void assertInstance(final Table t, final String column, final Class c, final String message, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); final DataColumn col = t.getColumn(column); if (col == null) { - throw new PlotRuntimeException("Column not present in table: column=" + column, - plotInfo); + throw new PlotRuntimeException("Column not present in table: column=" + column, plotInfo); } assertInstance(c, ClassUtils.primitiveToWrapper(col.getType()), message, plotInfo); @@ -124,22 +121,21 @@ public static void assertInstance(final Table t, final String column, final Clas * Requires the {@code column} of be an instance of {@link Class} {@code c}. * * @throws NullPointerException {@code t} and {@code c} must not be null - * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} - * does not have data type {@code c} + * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} does not have data + * type {@code c} * @param t table * @param column column * @param c class * @param message error message * @param plotInfo source of the exception */ - public static void assertInstance(final TableDefinition t, final String column, final Class c, - final String message, final PlotInfo plotInfo) { + public static void assertInstance(final TableDefinition t, final String column, final Class c, final String message, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); final ColumnDefinition col = t.getColumn(column); if (col == null) { - throw new PlotRuntimeException("Column not present in table: column=" + column, - plotInfo); + throw new PlotRuntimeException("Column not present in table: column=" + column, plotInfo); } assertInstance(c, ClassUtils.primitiveToWrapper(col.getDataType()), message, plotInfo); @@ -149,22 +145,21 @@ public static void assertInstance(final TableDefinition t, final String column, * Requires the {@code column} of be an instance of {@link Class} {@code c}. * * @throws NullPointerException {@code sds} and {@code c} must not be null - * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} - * does not have data type {@code c} + * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} does not have data + * type {@code c} * @param sds swappable data set * @param column column * @param c class * @param message error message * @param plotInfo source of the exception */ - public static void assertInstance(final SelectableDataSet sds, final String column, - final Class c, final String message, final PlotInfo plotInfo) { + public static void assertInstance(final SelectableDataSet sds, final String column, final Class c, + final String message, final PlotInfo plotInfo) { assertNotNull(sds, "sds", plotInfo); final ColumnDefinition col = sds.getTableDefinition().getColumn(column); if (col == null) { - throw new PlotRuntimeException("Column not present in table: column=" + column, - plotInfo); + throw new PlotRuntimeException("Column not present in table: column=" + column, plotInfo); } assertInstance(c, ClassUtils.primitiveToWrapper(col.getDataType()), message, plotInfo); @@ -172,116 +167,107 @@ public static void assertInstance(final SelectableDataSet sds, final String colu /** - * Requires the {@code column} of be numeric, or an instance of time, char/{@link Character}, or - * {@link Comparable}. + * Requires the {@code column} of be numeric, or an instance of time, char/{@link Character}, or {@link Comparable}. * * @throws NullPointerException {@code t} must not be null - * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} - * does not have the correct data type + * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} does not have the + * correct data type * @param t table * @param column column * @param plotInfo source of the exception */ - public static void assertIsNumericOrTimeOrCharOrComparableInstance(final Table t, - final String column, final PlotInfo plotInfo) { + public static void assertIsNumericOrTimeOrCharOrComparableInstance(final Table t, final String column, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); assertIsNumericOrTimeOrCharOrComparableInstance(t, column, - createWrongColumnTypeErrorMessage(t, column, plotInfo), plotInfo); + createWrongColumnTypeErrorMessage(t, column, plotInfo), plotInfo); } /** - * Requires the {@code column} of be numeric, or an instance of time, char/{@link Character}, or - * {@link Comparable}. + * Requires the {@code column} of be numeric, or an instance of time, char/{@link Character}, or {@link Comparable}. * * @throws NullPointerException {@code t} must not be null - * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} - * does not have the correct data type + * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} does not have the + * correct data type * @param t table * @param column column * @param plotInfo source of the exception */ - public static void assertIsNumericOrTimeOrCharOrComparableInstance(final TableDefinition t, - final String column, final PlotInfo plotInfo) { + public static void assertIsNumericOrTimeOrCharOrComparableInstance(final TableDefinition t, final String column, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); assertIsNumericOrTimeOrCharOrComparableInstance(t, column, - createWrongColumnTypeErrorMessage(t, column, plotInfo), plotInfo); + createWrongColumnTypeErrorMessage(t, column, plotInfo), plotInfo); } /** - * Requires the {@code column} of be numeric, or an instance of time, char/{@link Character}, or - * {@link Comparable}. + * Requires the {@code column} of be numeric, or an instance of time, char/{@link Character}, or {@link Comparable}. * * @throws NullPointerException {@code t} must not be null - * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} - * does not have the correct data type + * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} does not have the + * correct data type * @param t table * @param column column * @param message error message * @param plotInfo source of the exception */ - public static void assertIsNumericOrTimeOrCharOrComparableInstance(final Table t, - final String column, final String message, final PlotInfo plotInfo) { + public static void assertIsNumericOrTimeOrCharOrComparableInstance(final Table t, final String column, + final String message, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); final DataColumn col = t.getColumn(column); if (col == null) { - throw new PlotRuntimeException("Column not present in table: column=" + column, - plotInfo); + throw new PlotRuntimeException("Column not present in table: column=" + column, plotInfo); } - if (!isNumericOrTime(col.getType(), plotInfo) - && !Comparable.class.isAssignableFrom(col.getType()) - && !TypeUtils.isCharacter(col.getType())) { + if (!isNumericOrTime(col.getType(), plotInfo) && !Comparable.class.isAssignableFrom(col.getType()) + && !TypeUtils.isCharacter(col.getType())) { throw new PlotRuntimeException(message, plotInfo); } } /** - * Requires the {@code column} of be numeric, or an instance of time, char/{@link Character}, or - * {@link Comparable}. + * Requires the {@code column} of be numeric, or an instance of time, char/{@link Character}, or {@link Comparable}. * * @throws NullPointerException {@code t} must not be null - * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} - * does not have the correct data type + * @throws PlotRuntimeException {@code column} is not a column of {@code t}, or {@code column} does not have the + * correct data type * @param t table * @param column column * @param message error message * @param plotInfo source of the exception */ - public static void assertIsNumericOrTimeOrCharOrComparableInstance(final TableDefinition t, - final String column, final String message, final PlotInfo plotInfo) { + public static void assertIsNumericOrTimeOrCharOrComparableInstance(final TableDefinition t, final String column, + final String message, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); final ColumnDefinition col = t.getColumn(column); if (col == null) { - throw new PlotRuntimeException("Column not present in table: column=" + column, - plotInfo); + throw new PlotRuntimeException("Column not present in table: column=" + column, plotInfo); } - if (!isNumericOrTime(col.getDataType(), plotInfo) - && !Comparable.class.isAssignableFrom(col.getDataType()) - && !TypeUtils.isCharacter(col.getDataType())) { + if (!isNumericOrTime(col.getDataType(), plotInfo) && !Comparable.class.isAssignableFrom(col.getDataType()) + && !TypeUtils.isCharacter(col.getDataType())) { throw new PlotRuntimeException(message, plotInfo); } } /** - * Requires {@code data} and {@code dataNames} 1. contain the same number of members. 2. contain - * no null members + * Requires {@code data} and {@code dataNames} 1. contain the same number of members. 2. contain no null members * * All members of {@code data} must be the same size. * * @throws PlotIllegalStateException {@code data} and {@code dataNames} are not the same size - * @throws PlotIllegalArgumentException a member of {@code data} or {@code dataNames} is null 2+ - * members of {@code data} are not the same size + * @throws PlotIllegalArgumentException a member of {@code data} or {@code dataNames} is null 2+ members of + * {@code data} are not the same size * * @param data array of {@link IndexableNumericData} * @param dataNames names for each {@link IndexableNumericData} * @param plotInfo source of the exception */ public static void assertSameSize(final IndexableNumericData[] data, final String[] dataNames, - final PlotInfo plotInfo) { + final PlotInfo plotInfo) { assertNotNull(data, "data", plotInfo); assertNotNull(dataNames, "dataNames", plotInfo); if (data.length != dataNames.length) { @@ -294,9 +280,8 @@ public static void assertSameSize(final IndexableNumericData[] data, final Strin for (int i = 0; i < data.length; i++) { if (data[i].size() != data[0].size()) { - throw new PlotIllegalArgumentException("Input data is of inconsistent size: (" - + dataNames[i] + "=" + data[i] + "," + dataNames[0] + "=" + data[0] + ")", - plotInfo); + throw new PlotIllegalArgumentException("Input data is of inconsistent size: (" + dataNames[i] + "=" + + data[i] + "," + dataNames[0] + "=" + data[0] + ")", plotInfo); } } } @@ -315,8 +300,7 @@ public static Class getColumnType(final Table t, final String column, final Plot final DataColumn col = t.getColumn(column); if (col == null) { - throw new PlotRuntimeException("Column not present in table: column=" + column, - plotInfo); + throw new PlotRuntimeException("Column not present in table: column=" + column, plotInfo); } return col.getType(); @@ -331,14 +315,12 @@ public static Class getColumnType(final Table t, final String column, final Plot * @param plotInfo source of the exception * @return data type of {@code column} */ - public static Class getColumnType(final TableDefinition t, final String column, - final PlotInfo plotInfo) { + public static Class getColumnType(final TableDefinition t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); final ColumnDefinition col = t.getColumn(column); if (col == null) { - throw new PlotRuntimeException("Column not present in table: column=" + column, - plotInfo); + throw new PlotRuntimeException("Column not present in table: column=" + column, plotInfo); } return col.getDataType(); @@ -353,14 +335,12 @@ public static Class getColumnType(final TableDefinition t, final String column, * @param plotInfo source of the exception * @return data type of {@code column} */ - public static Class getColumnType(final SelectableDataSet sds, final String column, - final PlotInfo plotInfo) { + public static Class getColumnType(final SelectableDataSet sds, final String column, final PlotInfo plotInfo) { assertNotNull(sds, "sds", plotInfo); final ColumnDefinition col = sds.getTableDefinition().getColumn(column); if (col == null) { - throw new PlotRuntimeException("Column not present in table: column=" + column, - plotInfo); + throw new PlotRuntimeException("Column not present in table: column=" + column, plotInfo); } return col.getDataType(); @@ -419,8 +399,7 @@ public static boolean isTime(final Table t, final String column, final PlotInfo * @param plotInfo source of the exception * @return true if the column's data type equals Date.class or DBDateTime.class, false otherwise */ - public static boolean isTime(final TableDefinition t, final String column, - final PlotInfo plotInfo) { + public static boolean isTime(final TableDefinition t, final String column, final PlotInfo plotInfo) { return isTime(getColumnType(t, column, plotInfo), plotInfo); } @@ -432,8 +411,7 @@ public static boolean isTime(final TableDefinition t, final String column, * @param plotInfo source of the exception * @return true if the column's data type equals Date.class or DBDateTime.class, false otherwise */ - public static boolean isTime(final SelectableDataSet sds, final String column, - final PlotInfo plotInfo) { + public static boolean isTime(final SelectableDataSet sds, final String column, final PlotInfo plotInfo) { return isTime(getColumnType(sds, column, plotInfo), plotInfo); } @@ -445,8 +423,7 @@ public static boolean isTime(final SelectableDataSet sds, final String column, * @param plotInfo source of the exception * @return true if the column's data type is a numeric primitive, false otherwise */ - public static boolean isPrimitiveNumeric(final Table t, final String column, - final PlotInfo plotInfo) { + public static boolean isPrimitiveNumeric(final Table t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); return TypeUtils.isPrimitiveNumeric(getColumnType(t, column, plotInfo)); } @@ -459,8 +436,7 @@ public static boolean isPrimitiveNumeric(final Table t, final String column, * @param plotInfo source of the exception * @return true if the column's data type is a boxed numeric, false otherwise */ - public static boolean isBoxedNumeric(final Table t, final String column, - final PlotInfo plotInfo) { + public static boolean isBoxedNumeric(final Table t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); return TypeUtils.isBoxedNumeric(getColumnType(t, column, plotInfo)); } @@ -486,8 +462,7 @@ public static boolean isNumeric(final Table t, final String column, final PlotIn * @param plotInfo source of the exception * @return true if the column's data type is numeric, false otherwise */ - public static boolean isNumeric(final TableDefinition t, final String column, - final PlotInfo plotInfo) { + public static boolean isNumeric(final TableDefinition t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); return TypeUtils.isNumeric(getColumnType(t, column, plotInfo)); } @@ -500,8 +475,7 @@ public static boolean isNumeric(final TableDefinition t, final String column, * @param plotInfo source of the exception * @return true if the column's data type is numeric, false otherwise */ - public static boolean isNumeric(final SelectableDataSet sds, final String column, - final PlotInfo plotInfo) { + public static boolean isNumeric(final SelectableDataSet sds, final String column, final PlotInfo plotInfo) { assertNotNull(sds, "t", plotInfo); return TypeUtils.isNumeric(getColumnType(sds, column, plotInfo)); } @@ -514,8 +488,7 @@ public static boolean isNumeric(final SelectableDataSet sds, final String column * @param plotInfo source of the exception * @return true if the column's data type is a numeric or time instance, false otherwise */ - public static boolean isNumericOrTime(final Table t, final String column, - final PlotInfo plotInfo) { + public static boolean isNumericOrTime(final Table t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); return isNumericOrTime(getColumnType(t, column, plotInfo), plotInfo); } @@ -528,8 +501,7 @@ public static boolean isNumericOrTime(final Table t, final String column, * @param plotInfo source of the exception * @return true if the column's data type is a numeric or time instance, false otherwise */ - public static boolean isNumericOrTime(final TableDefinition t, final String column, - final PlotInfo plotInfo) { + public static boolean isNumericOrTime(final TableDefinition t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); return isNumericOrTime(getColumnType(t, column, plotInfo), plotInfo); } @@ -542,15 +514,13 @@ public static boolean isNumericOrTime(final TableDefinition t, final String colu * @param plotInfo source of the exception * @return true if the column's data type is a numeric or time instance, false otherwise */ - public static boolean isNumericOrTime(final SelectableDataSet sds, final String column, - final PlotInfo plotInfo) { + public static boolean isNumericOrTime(final SelectableDataSet sds, final String column, final PlotInfo plotInfo) { assertNotNull(sds, "sds", plotInfo); return isNumericOrTime(getColumnType(sds, column, plotInfo), plotInfo); } /** - * Requires the column's data type to be a time instance as defined in - * {@link #isTime(Class, PlotInfo)} + * Requires the column's data type to be a time instance as defined in {@link #isTime(Class, PlotInfo)} * * @throws RuntimeException if the column's data type isn't a time instance * @param t table @@ -563,23 +533,20 @@ public static void assertIsTime(final Table t, final String column, final PlotIn } /** - * Requires the column's data type to be a time instance as defined in - * {@link #isTime(Class, PlotInfo)} + * Requires the column's data type to be a time instance as defined in {@link #isTime(Class, PlotInfo)} * * @throws RuntimeException if the column's data type isn't a time instance * @param t table * @param column column * @param plotInfo source of the exception */ - public static void assertIsTime(final TableDefinition t, final String column, - final PlotInfo plotInfo) { + public static void assertIsTime(final TableDefinition t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); assertIsTime(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo), plotInfo); } /** - * Requires the column's data type to be a time instance as defined in - * {@link #isTime(Class, PlotInfo)} + * Requires the column's data type to be a time instance as defined in {@link #isTime(Class, PlotInfo)} * * @throws RuntimeException if the column's data type isn't a time instance * @param t table @@ -587,8 +554,7 @@ public static void assertIsTime(final TableDefinition t, final String column, * @param plotInfo source of the exception * @param message error message */ - public static void assertIsTime(final Table t, final String column, final String message, - final PlotInfo plotInfo) { + public static void assertIsTime(final Table t, final String column, final String message, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); if (!isTime(t, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -596,8 +562,7 @@ public static void assertIsTime(final Table t, final String column, final String } /** - * Requires the column's data type to be a time instance as defined in - * {@link #isTime(Class, PlotInfo)} + * Requires the column's data type to be a time instance as defined in {@link #isTime(Class, PlotInfo)} * * @throws RuntimeException if the column's data type isn't a time instance * @param t table @@ -605,8 +570,8 @@ public static void assertIsTime(final Table t, final String column, final String * @param plotInfo source of the exception * @param message error message */ - public static void assertIsTime(final TableDefinition t, final String column, - final String message, final PlotInfo plotInfo) { + public static void assertIsTime(final TableDefinition t, final String column, final String message, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); if (!isTime(t, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -622,11 +587,9 @@ public static void assertIsTime(final TableDefinition t, final String column, * @param column column * @param plotInfo source of the exception */ - public static void assertIsPrimitiveNumeric(final Table t, final String column, - final PlotInfo plotInfo) { + public static void assertIsPrimitiveNumeric(final Table t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); - assertIsPrimitiveNumeric(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo), - plotInfo); + assertIsPrimitiveNumeric(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo), plotInfo); } /** @@ -639,8 +602,8 @@ public static void assertIsPrimitiveNumeric(final Table t, final String column, * @param plotInfo source of the exception * @param message error message */ - public static void assertIsPrimitiveNumeric(final Table t, final String column, - final String message, final PlotInfo plotInfo) { + public static void assertIsPrimitiveNumeric(final Table t, final String column, final String message, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); if (!isPrimitiveNumeric(t, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -656,11 +619,9 @@ public static void assertIsPrimitiveNumeric(final Table t, final String column, * @param column column * @param plotInfo source of the exception */ - public static void assertIsBoxedNumeric(final Table t, final String column, - final PlotInfo plotInfo) { + public static void assertIsBoxedNumeric(final Table t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); - assertIsBoxedNumeric(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo), - plotInfo); + assertIsBoxedNumeric(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo), plotInfo); } /** @@ -673,8 +634,8 @@ public static void assertIsBoxedNumeric(final Table t, final String column, * @param message error message * @param plotInfo source of the exception */ - public static void assertIsBoxedNumeric(final Table t, final String column, - final String message, final PlotInfo plotInfo) { + public static void assertIsBoxedNumeric(final Table t, final String column, final String message, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); if (!isBoxedNumeric(t, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -683,42 +644,35 @@ public static void assertIsBoxedNumeric(final Table t, final String column, /** - * Requires the column's data type to be a numeric instance as defined in - * {@link TypeUtils#isNumeric(Class)} + * Requires the column's data type to be a numeric instance as defined in {@link TypeUtils#isNumeric(Class)} * * @throws PlotRuntimeException if the column's data type isn't a numeric instance * @param t table * @param column column * @param plotInfo source of the exception */ - public static void assertIsNumeric(final Table t, final String column, - final PlotInfo plotInfo) { + public static void assertIsNumeric(final Table t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); - assertIsNumeric(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo), - plotInfo); + assertIsNumeric(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo), plotInfo); } /** - * Requires the column's data type to be a numeric instance as defined in - * {@link TypeUtils#isNumeric(Class)} + * Requires the column's data type to be a numeric instance as defined in {@link TypeUtils#isNumeric(Class)} * * @throws PlotRuntimeException if the column's data type isn't a numeric instance * @param t table * @param column column * @param plotInfo source of the exception */ - public static void assertIsNumeric(final TableDefinition t, final String column, - final PlotInfo plotInfo) { + public static void assertIsNumeric(final TableDefinition t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); - assertIsNumeric(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo), - plotInfo); + assertIsNumeric(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo), plotInfo); } /** - * Requires the column's data type to be a numeric instance as defined in - * {@link TypeUtils#isNumeric(Class)} + * Requires the column's data type to be a numeric instance as defined in {@link TypeUtils#isNumeric(Class)} * * @throws PlotRuntimeException if the column's data type isn't a numeric instance * @param t table @@ -727,7 +681,7 @@ public static void assertIsNumeric(final TableDefinition t, final String column, * @param plotInfo source of the exception */ public static void assertIsNumeric(final Table t, final String column, final String message, - final PlotInfo plotInfo) { + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); if (!isNumeric(t, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -736,8 +690,7 @@ public static void assertIsNumeric(final Table t, final String column, final Str /** - * Requires the column's data type to be a numeric instance as defined in - * {@link TypeUtils#isNumeric(Class)} + * Requires the column's data type to be a numeric instance as defined in {@link TypeUtils#isNumeric(Class)} * * @throws PlotRuntimeException if the column's data type isn't a numeric instance * @param t table @@ -745,8 +698,8 @@ public static void assertIsNumeric(final Table t, final String column, final Str * @param message error message * @param plotInfo source of the exception */ - public static void assertIsNumeric(final TableDefinition t, final String column, - final String message, final PlotInfo plotInfo) { + public static void assertIsNumeric(final TableDefinition t, final String column, final String message, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); if (!isNumeric(t, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -755,8 +708,7 @@ public static void assertIsNumeric(final TableDefinition t, final String column, /** - * Requires the column's data type to be a numeric instance as defined in - * {@link TypeUtils#isNumeric(Class)} + * Requires the column's data type to be a numeric instance as defined in {@link TypeUtils#isNumeric(Class)} * * @throws PlotRuntimeException if the column's data type isn't a numeric instance * @param sds selectable dataset @@ -764,8 +716,8 @@ public static void assertIsNumeric(final TableDefinition t, final String column, * @param message error message * @param plotInfo source of the exception */ - public static void assertIsNumeric(final SelectableDataSet sds, final String column, - final String message, final PlotInfo plotInfo) { + public static void assertIsNumeric(final SelectableDataSet sds, final String column, final String message, + final PlotInfo plotInfo) { assertNotNull(sds, "t", plotInfo); if (!isNumeric(sds, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -781,11 +733,10 @@ public static void assertIsNumeric(final SelectableDataSet sds, final String col * @param column column * @param plotInfo source of the exception */ - public static void assertIsNumericOrTime(final Table t, final String column, - final PlotInfo plotInfo) { + public static void assertIsNumericOrTime(final Table t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); - assertIsNumericOrTime(t, column, - createWrongColumnTypeErrorMessage(t, column, plotInfo, "Numeric, Time"), plotInfo); + assertIsNumericOrTime(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo, "Numeric, Time"), + plotInfo); } /** @@ -797,11 +748,10 @@ public static void assertIsNumericOrTime(final Table t, final String column, * @param column column * @param plotInfo source of the exception */ - public static void assertIsNumericOrTime(final TableDefinition t, final String column, - final PlotInfo plotInfo) { + public static void assertIsNumericOrTime(final TableDefinition t, final String column, final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); - assertIsNumericOrTime(t, column, - createWrongColumnTypeErrorMessage(t, column, plotInfo, "Numeric, Time"), plotInfo); + assertIsNumericOrTime(t, column, createWrongColumnTypeErrorMessage(t, column, plotInfo, "Numeric, Time"), + plotInfo); } /** @@ -814,10 +764,10 @@ public static void assertIsNumericOrTime(final TableDefinition t, final String c * @param plotInfo source of the exception */ public static void assertIsNumericOrTime(final SelectableDataSet sds, final String column, - final PlotInfo plotInfo) { + final PlotInfo plotInfo) { assertNotNull(sds, "sds", plotInfo); - assertIsNumericOrTime(sds, column, - createWrongColumnTypeErrorMessage(sds, column, plotInfo, "Numeric, Time"), plotInfo); + assertIsNumericOrTime(sds, column, createWrongColumnTypeErrorMessage(sds, column, plotInfo, "Numeric, Time"), + plotInfo); } /** @@ -830,8 +780,8 @@ public static void assertIsNumericOrTime(final SelectableDataSet sds, final Stri * @param message error message * @param plotInfo source of the exception */ - public static void assertIsNumericOrTime(final Table t, final String column, - final String message, final PlotInfo plotInfo) { + public static void assertIsNumericOrTime(final Table t, final String column, final String message, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); if (!isNumericOrTime(t, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -848,8 +798,8 @@ public static void assertIsNumericOrTime(final Table t, final String column, * @param message error message * @param plotInfo source of the exception */ - public static void assertIsNumericOrTime(final TableDefinition t, final String column, - final String message, final PlotInfo plotInfo) { + public static void assertIsNumericOrTime(final TableDefinition t, final String column, final String message, + final PlotInfo plotInfo) { assertNotNull(t, "t", plotInfo); if (!isNumericOrTime(t, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -866,8 +816,8 @@ public static void assertIsNumericOrTime(final TableDefinition t, final String c * @param message error message * @param plotInfo source of the exception */ - public static void assertIsNumericOrTime(final SelectableDataSet sds, final String column, - final String message, final PlotInfo plotInfo) { + public static void assertIsNumericOrTime(final SelectableDataSet sds, final String column, final String message, + final PlotInfo plotInfo) { assertNotNull(sds, "t", plotInfo); if (!isNumericOrTime(sds, column, plotInfo)) { throw new PlotRuntimeException(message, plotInfo); @@ -882,14 +832,12 @@ public static void assertIsNumericOrTime(final SelectableDataSet sds, final Stri * @param plotInfo source of the exception * @param cols column names */ - public static void assertColumnsInTable(final Table t, final PlotInfo plotInfo, - final String... cols) { + public static void assertColumnsInTable(final Table t, final PlotInfo plotInfo, final String... cols) { assertNotNull(t, "t", plotInfo); assertNotNull(cols, "cols", plotInfo); for (String c : cols) { if (!t.getColumnSourceMap().containsKey(c)) { - throw new PlotIllegalArgumentException( - "Column " + c + " could not be found in table.", plotInfo); + throw new PlotIllegalArgumentException("Column " + c + " could not be found in table.", plotInfo); } } } @@ -902,14 +850,12 @@ public static void assertColumnsInTable(final Table t, final PlotInfo plotInfo, * @param plotInfo source of the exception * @param cols column names */ - public static void assertColumnsInTable(final TableDefinition t, final PlotInfo plotInfo, - final String... cols) { + public static void assertColumnsInTable(final TableDefinition t, final PlotInfo plotInfo, final String... cols) { assertNotNull(t, "t", plotInfo); assertNotNull(cols, "cols", plotInfo); for (String c : cols) { if (t.getColumn(c) == null) { - throw new PlotIllegalArgumentException( - "Column " + c + " could not be found in table.", plotInfo); + throw new PlotIllegalArgumentException("Column " + c + " could not be found in table.", plotInfo); } } } @@ -923,15 +869,15 @@ public static void assertColumnsInTable(final TableDefinition t, final PlotInfo * @param cols column names */ public static void assertColumnsInTable(final SelectableDataSet sds, final PlotInfo plotInfo, - final String... cols) { + final String... cols) { assertNotNull(sds, "t", plotInfo); assertNotNull(cols, "cols", plotInfo); final Set colnames = new HashSet<>(sds.getTableDefinition().getColumnNames()); for (String c : cols) { if (!colnames.contains(c)) { - throw new PlotIllegalArgumentException( - "Column " + c + " could not be found in selectable dataset.", plotInfo); + throw new PlotIllegalArgumentException("Column " + c + " could not be found in selectable dataset.", + plotInfo); } } } @@ -944,14 +890,12 @@ public static void assertColumnsInTable(final SelectableDataSet sds, final PlotI * @param plotInfo source of the exception * @param cols column names */ - public static void assertColumnsInTable(final TableHandle t, final PlotInfo plotInfo, - final String... cols) { + public static void assertColumnsInTable(final TableHandle t, final PlotInfo plotInfo, final String... cols) { assertNotNull(t, "t", plotInfo); assertNotNull(cols, "cols", plotInfo); for (String c : cols) { if (!t.hasColumns(c)) { - throw new PlotIllegalArgumentException( - "Column " + c + " could not be found in table.", plotInfo); + throw new PlotIllegalArgumentException("Column " + c + " could not be found in table.", plotInfo); } } } @@ -960,24 +904,24 @@ public static boolean nanSafeEquals(double x, double x1) { return x == x1 || (Double.isNaN(x) && Double.isNaN(x1)); } - private static String createWrongColumnTypeErrorMessage(final Table t, final String column, - final PlotInfo plotInfo, final String... types) { + private static String createWrongColumnTypeErrorMessage(final Table t, final String column, final PlotInfo plotInfo, + final String... types) { assertNotNull(t, "t", plotInfo); - return "Invalid data type in column = " + column + ". Expected one of " - + Arrays.toString(types) + ", was " + getColumnType(t, column, plotInfo); + return "Invalid data type in column = " + column + ". Expected one of " + Arrays.toString(types) + ", was " + + getColumnType(t, column, plotInfo); } - private static String createWrongColumnTypeErrorMessage(final TableDefinition t, - final String column, final PlotInfo plotInfo, final String... types) { + private static String createWrongColumnTypeErrorMessage(final TableDefinition t, final String column, + final PlotInfo plotInfo, final String... types) { assertNotNull(t, "t", plotInfo); - return "Invalid data type in column = " + column + ". Expected one of " - + Arrays.toString(types) + ", was " + getColumnType(t, column, plotInfo); + return "Invalid data type in column = " + column + ". Expected one of " + Arrays.toString(types) + ", was " + + getColumnType(t, column, plotInfo); } - private static String createWrongColumnTypeErrorMessage(final SelectableDataSet sds, - final String column, final PlotInfo plotInfo, final String... types) { + private static String createWrongColumnTypeErrorMessage(final SelectableDataSet sds, final String column, + final PlotInfo plotInfo, final String... types) { assertNotNull(sds, "sds", plotInfo); - return "Invalid data type in column = " + column + ". Expected one of " - + Arrays.toString(types) + ", was " + getColumnType(sds, column, plotInfo); + return "Invalid data type in column = " + column + ". Expected one of " + Arrays.toString(types) + ", was " + + getColumnType(sds, column, plotInfo); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/PlotUtils.java b/Plot/src/main/java/io/deephaven/db/plot/util/PlotUtils.java index db909d524b5..7d054650816 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/PlotUtils.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/PlotUtils.java @@ -51,8 +51,7 @@ public class PlotUtils { private PlotUtils() {} /** Instances of ColorPaletteArray have some state, so this is kept privat. */ - private static final ColorPaletteArray MATPLOT_COLORS = - new ColorPaletteArray(ColorPaletteArray.Palette.MATPLOTLIB); + private static final ColorPaletteArray MATPLOT_COLORS = new ColorPaletteArray(ColorPaletteArray.Palette.MATPLOTLIB); private static final Random rng = new Random(); @@ -72,8 +71,8 @@ private static int randVar() { } /** - * Gets a variable name not already in the {@link QueryScope} by appending random integers to - * the end of {@code root} until a unique name is found. + * Gets a variable name not already in the {@link QueryScope} by appending random integers to the end of + * {@code root} until a unique name is found. * * @param root base variable name * @return unique randomized variable name based off {@code root} @@ -107,8 +106,7 @@ public static Paint intToColor(final Integer color) { } /** - * Gets the double equivalent of the {@link Number}. Null {@link QueryConstants} are converted - * to Double.NaN. + * Gets the double equivalent of the {@link Number}. Null {@link QueryConstants} are converted to Double.NaN. * * @param n number * @return double value of {@code n} @@ -221,8 +219,8 @@ public static Table table(final double[] x, final String colName) { } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -231,12 +229,12 @@ public static Table table(final double[] x, final String colName) { */ public static Table doubleTable(final T[] x, final String colName) { return table(x, "X") - .view(colName + " = X==null ? Double.NaN : X.doubleValue()"); + .view(colName + " = X==null ? Double.NaN : X.doubleValue()"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -245,24 +243,20 @@ public static Table doubleTable(final T[] x, final String col */ public static Table doubleTable(final List x, final String colName) { return table(x, "X") - .updateView("__Class__ = X.getClass()") - .view(colName - + " = __Class__==Double.class ? X=NULL_DOUBLE ? Double.NaN : ((Double) X).doubleValue() " - + - ": __Class__==Short.class ? X=NULL_SHORT ? Double.NaN : ((Short) X).doubleValue() " - + - ": __Class__==Long.class ? X=NULL_LONG ? Double.NaN : ((Long) X).doubleValue() " + - ": __Class__==Integer.class ? X=NULL_INT ? Double.NaN : ((Integer) X).doubleValue() " - + - ": __Class__==Float.class ? X==NULL_FLOAT ? Double.NaN : ((Float) X).doubleValue()" - + - ": Double.NaN"); + .updateView("__Class__ = X.getClass()") + .view(colName + " = __Class__==Double.class ? X=NULL_DOUBLE ? Double.NaN : ((Double) X).doubleValue() " + + + ": __Class__==Short.class ? X=NULL_SHORT ? Double.NaN : ((Short) X).doubleValue() " + + ": __Class__==Long.class ? X=NULL_LONG ? Double.NaN : ((Long) X).doubleValue() " + + ": __Class__==Integer.class ? X=NULL_INT ? Double.NaN : ((Integer) X).doubleValue() " + + ": __Class__==Float.class ? X==NULL_FLOAT ? Double.NaN : ((Float) X).doubleValue()" + + ": Double.NaN"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -270,12 +264,12 @@ public static Table doubleTable(final List x, final String */ public static Table doubleTable(final short[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -283,12 +277,12 @@ public static Table doubleTable(final short[] x, final String colName) { */ public static Table doubleTable(final int[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -296,12 +290,12 @@ public static Table doubleTable(final int[] x, final String colName) { */ public static Table doubleTable(final long[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -309,12 +303,12 @@ public static Table doubleTable(final long[] x, final String colName) { */ public static Table doubleTable(final float[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -322,17 +316,17 @@ public static Table doubleTable(final float[] x, final String colName) { */ public static Table doubleTable(final double[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /* - * Auto unboxing of these types will throw an error in the generic method public static Table doubleTable(final T[] x, final String colName) + * Auto unboxing of these types will throw an error in the generic method public static Table + * doubleTable(final T[] x, final String colName) */ /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -340,12 +334,12 @@ public static Table doubleTable(final double[] x, final String colName) { */ public static Table doubleTable(final Double[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -353,12 +347,12 @@ public static Table doubleTable(final Double[] x, final String colName) { */ public static Table doubleTable(final Short[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -366,12 +360,12 @@ public static Table doubleTable(final Short[] x, final String colName) { */ public static Table doubleTable(final Long[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -379,12 +373,12 @@ public static Table doubleTable(final Long[] x, final String colName) { */ public static Table doubleTable(final Float[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /** - * Creates a new table with a column of doubles holding the specified data. Values of {@code x} - * are converted to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new table with a column of doubles holding the specified data. Values of {@code x} are converted to + * their corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param colName column name @@ -392,12 +386,12 @@ public static Table doubleTable(final Float[] x, final String colName) { */ public static Table doubleTable(final Integer[] x, final String colName) { return table(x, "X") - .view(colName + " = isNull(X) ? Double.NaN : X"); + .view(colName + " = isNull(X) ? Double.NaN : X"); } /** - * Creates a new array of floats holding the specified data. Values of {@code x} are converted - * to their corresponding float values. Nulls are mapped to Float.NaN. + * Creates a new array of floats holding the specified data. Values of {@code x} are converted to their + * corresponding float values. Nulls are mapped to Float.NaN. * * @param x data * @return new float array holding {@code x} @@ -407,14 +401,13 @@ public static float[] toFloat(final double[] x) { return null; } final float[] result = new float[x.length]; - IntStream.range(0, x.length) - .forEach(i -> result[i] = x[i] == NULL_DOUBLE ? Float.NaN : (float) x[i]); + IntStream.range(0, x.length).forEach(i -> result[i] = x[i] == NULL_DOUBLE ? Float.NaN : (float) x[i]); return result; } /** - * Creates a new array of floats holding the specified data. Values of {@code x} are converted - * to their corresponding float values. Nulls are mapped to Float.NaN. + * Creates a new array of floats holding the specified data. Values of {@code x} are converted to their + * corresponding float values. Nulls are mapped to Float.NaN. * * @param x data * @return new float array holding {@code x} @@ -424,14 +417,13 @@ public static float[] toFloat(final int[] x) { return null; } final float[] result = new float[x.length]; - IntStream.range(0, x.length) - .forEach(i -> result[i] = x[i] == NULL_INT ? Float.NaN : (float) x[i]); + IntStream.range(0, x.length).forEach(i -> result[i] = x[i] == NULL_INT ? Float.NaN : (float) x[i]); return result; } /** - * Creates a new array of floats holding the specified data. Values of {@code x} are converted - * to their corresponding float values. Nulls are mapped to Float.NaN. + * Creates a new array of floats holding the specified data. Values of {@code x} are converted to their + * corresponding float values. Nulls are mapped to Float.NaN. * * @param x data * @return new float array holding {@code x} @@ -441,14 +433,13 @@ public static float[] toFloat(final long[] x) { return null; } final float[] result = new float[x.length]; - IntStream.range(0, x.length) - .forEach(i -> result[i] = x[i] == NULL_LONG ? Float.NaN : (float) x[i]); + IntStream.range(0, x.length).forEach(i -> result[i] = x[i] == NULL_LONG ? Float.NaN : (float) x[i]); return result; } /** - * Creates a new array of floats holding the specified data. Values of {@code x} are converted - * to their corresponding float values. Nulls are mapped to Float.NaN. + * Creates a new array of floats holding the specified data. Values of {@code x} are converted to their + * corresponding float values. Nulls are mapped to Float.NaN. * * @param x data * @param type of the data in {@code x} @@ -459,14 +450,13 @@ public static float[] toFloat(final T[] x) { return null; } final float[] result = new float[x.length]; - IntStream.range(0, x.length) - .forEach(i -> result[i] = x[i] == null ? Float.NaN : x[i].floatValue()); + IntStream.range(0, x.length).forEach(i -> result[i] = x[i] == null ? Float.NaN : x[i].floatValue()); return result; } /** - * Creates a new array of doubles holding the specified data. Values of {@code x} are converted - * to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new array of doubles holding the specified data. Values of {@code x} are converted to their + * corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @return new float array holding {@code x} @@ -476,14 +466,13 @@ public static double[] toDouble(final float[] x) { return null; } final double[] result = new double[x.length]; - IntStream.range(0, x.length) - .forEach(i -> result[i] = x[i] == NULL_FLOAT ? Double.NaN : (double) x[i]); + IntStream.range(0, x.length).forEach(i -> result[i] = x[i] == NULL_FLOAT ? Double.NaN : (double) x[i]); return result; } /** - * Creates a new array of doubles holding the specified data. Values of {@code x} are converted - * to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new array of doubles holding the specified data. Values of {@code x} are converted to their + * corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @return new float array holding {@code x} @@ -493,14 +482,13 @@ public static double[] toDouble(final int[] x) { return null; } final double[] result = new double[x.length]; - IntStream.range(0, x.length) - .forEach(i -> result[i] = x[i] == NULL_INT ? Double.NaN : (double) x[i]); + IntStream.range(0, x.length).forEach(i -> result[i] = x[i] == NULL_INT ? Double.NaN : (double) x[i]); return result; } /** - * Creates a new array of doubles holding the specified data. Values of {@code x} are converted - * to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new array of doubles holding the specified data. Values of {@code x} are converted to their + * corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @return new float array holding {@code x} @@ -510,14 +498,13 @@ public static double[] toDouble(final long[] x) { return null; } final double[] result = new double[x.length]; - IntStream.range(0, x.length) - .forEach(i -> result[i] = x[i] == NULL_LONG ? Double.NaN : (double) x[i]); + IntStream.range(0, x.length).forEach(i -> result[i] = x[i] == NULL_LONG ? Double.NaN : (double) x[i]); return result; } /** - * Creates a new array of doubles holding the specified data. Values of {@code x} are converted - * to their corresponding double values. Nulls are mapped to Double.NaN. + * Creates a new array of doubles holding the specified data. Values of {@code x} are converted to their + * corresponding double values. Nulls are mapped to Double.NaN. * * @param x data * @param type of the data in {@code x} @@ -528,8 +515,7 @@ public static double[] toDouble(final T[] x) { return null; } final double[] result = new double[x.length]; - IntStream.range(0, x.length) - .forEach(i -> result[i] = x[i] == null ? Double.NaN : x[i].doubleValue()); + IntStream.range(0, x.length).forEach(i -> result[i] = x[i] == null ? Double.NaN : x[i].doubleValue()); return result; } @@ -689,13 +675,12 @@ public static long maxIgnoreNull(final long oldMin, final long value) { return Math.max(value, oldMin); } - public static TableHandle createCategoryTableHandle(Table t, final String catColumn, - final String... otherColumns) { + public static TableHandle createCategoryTableHandle(Table t, final String catColumn, final String... otherColumns) { return createCategoryTableHandle(t, new String[] {catColumn}, otherColumns); } public static TableHandle createCategoryTableHandle(Table t, final String[] catColumns, - final String... otherColumns) { + final String... otherColumns) { t = createCategoryTable(t, catColumns); final String[] cols = new String[otherColumns.length + catColumns.length + 1]; @@ -707,19 +692,18 @@ public static TableHandle createCategoryTableHandle(Table t, final String[] catC } public static TableBackedTableMapHandle createCategoryTableMapHandle(Table t, - final String catColumn, - final String[] otherColumns, - final String[] byColumns, - final PlotInfo plotInfo) { - return createCategoryTableMapHandle(t, new String[] {catColumn}, otherColumns, byColumns, - plotInfo); + final String catColumn, + final String[] otherColumns, + final String[] byColumns, + final PlotInfo plotInfo) { + return createCategoryTableMapHandle(t, new String[] {catColumn}, otherColumns, byColumns, plotInfo); } public static TableBackedTableMapHandle createCategoryTableMapHandle(Table t, - final String[] catColumns, - final String[] otherColumns, - final String[] byColumns, - final PlotInfo plotInfo) { + final String[] catColumns, + final String[] otherColumns, + final String[] byColumns, + final PlotInfo plotInfo) { final String[] lastByColumns = new String[catColumns.length + byColumns.length]; System.arraycopy(catColumns, 0, lastByColumns, 0, catColumns.length); System.arraycopy(byColumns, 0, lastByColumns, catColumns.length, byColumns.length); @@ -735,18 +719,14 @@ public static TableBackedTableMapHandle createCategoryTableMapHandle(Table t, } public static Table createCategoryTable(final Table t, final String[] catColumns) { - // We need to do the equivalent of LastBy wrt. to columns included, or we have a chance to - // break ACLs + // We need to do the equivalent of LastBy wrt. to columns included, or we have a chance to break ACLs final List lastColumns = t.getDefinition().getColumnNames(); lastColumns.removeAll(Arrays.asList(catColumns)); final Table result = - t.by( - createCategoryComboAgg( - AggLast(lastColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))), - catColumns); + t.by(createCategoryComboAgg(AggLast(lastColumns.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))), + catColumns); - // We must explicitly copy attributes because we are doing a modified manual first/lastBy - // which will not + // We must explicitly copy attributes because we are doing a modified manual first/lastBy which will not // automatically do the copy. ((BaseTable) t).copyAttributes(result, BaseTable.CopyAttributeOperation.LastBy); return result; @@ -758,17 +738,17 @@ public static Table createCategoryHistogramTable(final Table t, final String... public static ComboAggregateFactory createCategoryComboAgg(ComboBy agg) { return AggCombo( - Agg(new KeyOnlyFirstOrLastByStateFactory(CategoryDataSeries.CAT_SERIES_ORDER_COLUMN, - AggType.First), MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY), - agg); + Agg(new KeyOnlyFirstOrLastByStateFactory(CategoryDataSeries.CAT_SERIES_ORDER_COLUMN, AggType.First), + MatchPair.ZERO_LENGTH_MATCH_PAIR_ARRAY), + agg); } public static List getColumnConditions(final Table arg, final String column) { return Collections.singletonList(Condition.EQUALS); } - public static Function getNumberFromNumericOrTimeSource(final Table t, - final String numericCol, final PlotInfo plotInfo) { + public static Function getNumberFromNumericOrTimeSource(final Table t, final String numericCol, + final PlotInfo plotInfo) { ArgumentValidations.isNumericOrTime(t, numericCol, plotInfo); final ColumnSource columnSource = t.getColumnSource(numericCol); @@ -825,8 +805,7 @@ public HashMapWithDefault copy() { } } - public static IndexableData createIndexableData(final Table t, final String column, - final PlotInfo plotInfo) { + public static IndexableData createIndexableData(final Table t, final String column, final PlotInfo plotInfo) { final DataColumn dataColumn = t.getColumn(column); final Object o = dataColumn.getDirect(); @@ -834,14 +813,12 @@ public static IndexableData createIndexableData(final Table t, final String } public static IndexableData createIndexableData(final Map snapshotData, - @NotNull final TableDefinition tableDefinition, final String column, - final PlotInfo plotInfo) { - return createIndexableData(snapshotData, tableDefinition.getColumn(column).getDataType(), - column, plotInfo); + @NotNull final TableDefinition tableDefinition, final String column, final PlotInfo plotInfo) { + return createIndexableData(snapshotData, tableDefinition.getColumn(column).getDataType(), column, plotInfo); } - public static IndexableData createIndexableData(final Map snapshotData, - final Class c, final String column, final PlotInfo plotInfo) { + public static IndexableData createIndexableData(final Map snapshotData, final Class c, + final String column, final PlotInfo plotInfo) { if (snapshotData == null) { return createEmptyIndexableData(c, plotInfo); } @@ -849,8 +826,7 @@ public static IndexableData createIndexableData(final Map sn return createIndexableData(snapshotData.get(column), c, plotInfo); } - public static IndexableData createEmptyIndexableData(final Class c, - final PlotInfo plotInfo) { + public static IndexableData createEmptyIndexableData(final Class c, final PlotInfo plotInfo) { if (c.equals(int.class)) { return new IndexableDataInteger(EMPTY_INT_ARRAY, plotInfo); } else if (c.equals(double.class)) { @@ -870,8 +846,7 @@ public static IndexableData createEmptyIndexableData(final Class c, return new IndexableDataListNullCategory(Collections.emptyList(), plotInfo); } - public static IndexableData createIndexableData(final Object data, - final PlotInfo plotInfo) { + public static IndexableData createIndexableData(final Object data, final PlotInfo plotInfo) { if (data instanceof int[]) { return new IndexableDataInteger((int[]) data, plotInfo); } else if (data instanceof double[]) { @@ -891,8 +866,7 @@ public static IndexableData createIndexableData(final Object data, return new IndexableDataArrayNullCategory((T[]) data, plotInfo); } - public static IndexableData createIndexableData(final Object data, final Class c, - final PlotInfo plotInfo) { + public static IndexableData createIndexableData(final Object data, final Class c, final PlotInfo plotInfo) { if (c.equals(int.class)) { return new IndexableDataInteger((int[]) data, plotInfo); } else if (c.equals(double.class)) { @@ -916,41 +890,36 @@ public static IndexableData createIndexableData(final Object data, final Cla return new IndexableDataArrayNullCategory((T[]) data, plotInfo); } - public static IndexableData createIndexableData(final T[] data, - final PlotInfo plotInfo) { + public static IndexableData createIndexableData(final T[] data, final PlotInfo plotInfo) { return new IndexableDataArray(data, plotInfo); } - public static IndexableNumericData createIndexableNumericDataArray( - final Map data, @NotNull final TableHandle th, final String column, - final PlotInfo plotInfo) { + public static IndexableNumericData createIndexableNumericDataArray(final Map data, + @NotNull final TableHandle th, final String column, final PlotInfo plotInfo) { return createIndexableNumericDataArray(data, th.getTable(), column, plotInfo); } - public static IndexableNumericData createIndexableNumericDataArray( - final Map data, @NotNull final Table t, final String column, - final PlotInfo plotInfo) { + public static IndexableNumericData createIndexableNumericDataArray(final Map data, + @NotNull final Table t, final String column, final PlotInfo plotInfo) { return createIndexableNumericDataArray(data, t.getDefinition(), column, plotInfo); } - public static IndexableNumericData createIndexableNumericDataArray( - final Map data, @NotNull final TableDefinition tableDefinition, - final String column, final PlotInfo plotInfo) { + public static IndexableNumericData createIndexableNumericDataArray(final Map data, + @NotNull final TableDefinition tableDefinition, final String column, final PlotInfo plotInfo) { if (data == null) { if (tableDefinition.getColumn(column) == null) { return createEmptyIndexableNumericDataArray(double.class, plotInfo); } - return createEmptyIndexableNumericDataArray( - tableDefinition.getColumn(column).getDataType(), plotInfo); + return createEmptyIndexableNumericDataArray(tableDefinition.getColumn(column).getDataType(), plotInfo); } - return createIndexableNumericDataArray(data.get(column), - tableDefinition.getColumn(column).getDataType(), plotInfo); + return createIndexableNumericDataArray(data.get(column), tableDefinition.getColumn(column).getDataType(), + plotInfo); } public static IndexableNumericData createEmptyIndexableNumericDataArray(final Class dataType, - final PlotInfo plotInfo) { + final PlotInfo plotInfo) { if (dataType == int.class) { return new IndexableNumericDataArrayInt(EMPTY_INT_ARRAY, plotInfo); } else if (dataType == double.class) { @@ -968,13 +937,12 @@ public static IndexableNumericData createEmptyIndexableNumericDataArray(final Cl } else if (Number.class.isAssignableFrom(dataType)) { return new IndexableNumericDataArrayNumber(EMPTY_NUMBER_ARRAY, plotInfo); } else { - throw new UnsupportedOperationException( - "Can not create IndexableNumericDataArray from type " + dataType); + throw new UnsupportedOperationException("Can not create IndexableNumericDataArray from type " + dataType); } } - public static IndexableNumericData createIndexableNumericDataArray(final Object data, - final Class dataType, final PlotInfo plotInfo) { + public static IndexableNumericData createIndexableNumericDataArray(final Object data, final Class dataType, + final PlotInfo plotInfo) { if (dataType == int.class) { return new IndexableNumericDataArrayInt((int[]) data, plotInfo); } else if (dataType == double.class) { @@ -996,8 +964,7 @@ public static IndexableNumericData createIndexableNumericDataArray(final Object } else if (Number.class.isAssignableFrom(dataType)) { return new IndexableNumericDataArrayNumber((Number[]) data, plotInfo); } else { - throw new UnsupportedOperationException( - "Can not create IndexableNumericDataArray from type " + dataType); + throw new UnsupportedOperationException("Can not create IndexableNumericDataArray from type " + dataType); } } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/ShapeUtils.java b/Plot/src/main/java/io/deephaven/db/plot/util/ShapeUtils.java index 36b31bcc60f..704f2e6ec16 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/ShapeUtils.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/ShapeUtils.java @@ -104,10 +104,10 @@ private static java.awt.Shape rotateRectangle(RectangularShape s, double angle) for (int i = 0; i < 4; i++) { double cos = Math.cos(angle); double sin = Math.sin(angle); - newXPoints[i] = s.getCenterX() + ((xPoints[i] - s.getCenterX()) * cos) - - ((xPoints[i] - s.getCenterY()) * sin); - newYPoints[i] = s.getCenterY() + ((yPoints[i] - s.getCenterY()) * cos) - - ((yPoints[i] - s.getCenterX()) * sin); + newXPoints[i] = + s.getCenterX() + ((xPoints[i] - s.getCenterX()) * cos) - ((xPoints[i] - s.getCenterY()) * sin); + newYPoints[i] = + s.getCenterY() + ((yPoints[i] - s.getCenterY()) * cos) - ((yPoints[i] - s.getCenterX()) * sin); } Path2D path = new Path2D.Double(); @@ -131,10 +131,8 @@ private static java.awt.Shape rotatePolygon(final Polygon p, final double angle) for (int i = 0; i < p.npoints; i++) { double cos = Math.cos(angle); double sin = Math.sin(angle); - xPoints[i] = - center[0] + ((p.xpoints[i] - center[0]) * cos) - ((p.ypoints[i] - center[1]) * sin); - yPoints[i] = - center[1] + ((p.ypoints[i] - center[1]) * cos) - ((p.xpoints[i] - center[0]) * sin); + xPoints[i] = center[0] + ((p.xpoints[i] - center[0]) * cos) - ((p.ypoints[i] - center[1]) * sin); + yPoints[i] = center[1] + ((p.ypoints[i] - center[1]) * cos) - ((p.xpoints[i] - center[0]) * sin); } Path2D path = new Path2D.Double(); diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureBiFunction.java b/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureBiFunction.java index 274bba48f1a..66251582f31 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureBiFunction.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureBiFunction.java @@ -11,8 +11,7 @@ /** * Wraps a {@link SerializableBiFunction} with the API of a function.
    */ -public class ClosureBiFunction extends SerializableClosure - implements SerializableBiFunction { +public class ClosureBiFunction extends SerializableClosure implements SerializableBiFunction { private static final long serialVersionUID = 697974379939190730L; /** diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureDoubleBinaryOperator.java b/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureDoubleBinaryOperator.java index 35b66337bf0..261cff2f423 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureDoubleBinaryOperator.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureDoubleBinaryOperator.java @@ -14,7 +14,7 @@ * A serializable closure which maps pair of doubles to doubles. */ public class ClosureDoubleBinaryOperator extends SerializableClosure - implements DoubleBinaryOperator { + implements DoubleBinaryOperator { private static final long serialVersionUID = -6533578879266557626L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureDoubleUnaryOperator.java b/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureDoubleUnaryOperator.java index 255c78775cb..3a350f78c55 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureDoubleUnaryOperator.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureDoubleUnaryOperator.java @@ -12,7 +12,7 @@ * A serializable closure which maps doubles to doubles. */ public class ClosureDoubleUnaryOperator extends SerializableClosure - implements DoubleUnaryOperator { + implements DoubleUnaryOperator { private static final long serialVersionUID = -4092987117189101803L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureFunction.java b/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureFunction.java index 3951d92ceaf..816c8dde2a0 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureFunction.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/functions/ClosureFunction.java @@ -9,8 +9,7 @@ /** * Wraps a {@link SerializableClosure} with the API of a function. */ -public class ClosureFunction extends SerializableClosure - implements SerializableFunction { +public class ClosureFunction extends SerializableClosure implements SerializableFunction { private static final long serialVersionUID = 3693316124178311688L; diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/functions/FigureImplFunction.java b/Plot/src/main/java/io/deephaven/db/plot/util/functions/FigureImplFunction.java index c514b8038ba..6805448d82c 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/functions/FigureImplFunction.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/functions/FigureImplFunction.java @@ -22,20 +22,19 @@ public class FigureImplFunction implements Function { private final int seriesIndex; public FigureImplFunction(final Function function, - final MultiSeriesInternal multiSeriesInternal) { + final MultiSeriesInternal multiSeriesInternal) { this(function, multiSeriesInternal.chart().row(), multiSeriesInternal.chart().column(), - multiSeriesInternal.axes().id(), multiSeriesInternal.id()); + multiSeriesInternal.axes().id(), multiSeriesInternal.id()); } public FigureImplFunction(final Function function, - final DataSeriesInternal dataSeriesInternal) { + final DataSeriesInternal dataSeriesInternal) { this(function, dataSeriesInternal.chart().row(), dataSeriesInternal.chart().column(), - dataSeriesInternal.axes().id(), dataSeriesInternal.id()); + dataSeriesInternal.axes().id(), dataSeriesInternal.id()); } - public FigureImplFunction(final Function function, - final int chartRowIndex, final int chartColumnIndex, final int axesIndex, - final int seriesIndex) { + public FigureImplFunction(final Function function, final int chartRowIndex, + final int chartColumnIndex, final int axesIndex, final int seriesIndex) { this.function = function; this.chartRowIndex = chartRowIndex; this.chartColumnIndex = chartColumnIndex; @@ -45,7 +44,6 @@ public FigureImplFunction(final Function function, @Override public FigureImpl apply(FigureImpl figure) { - return function.apply( - figure.chart(chartRowIndex, chartColumnIndex).axes(axesIndex).series(seriesIndex)); + return function.apply(figure.chart(chartRowIndex, chartColumnIndex).axes(axesIndex).series(seriesIndex)); } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/functions/SerializableClosure.java b/Plot/src/main/java/io/deephaven/db/plot/util/functions/SerializableClosure.java index 756a4c6252e..ae5375dea3b 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/functions/SerializableClosure.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/functions/SerializableClosure.java @@ -56,8 +56,7 @@ private void writeObject(final ObjectOutputStream oos) throws IOException { } @SuppressWarnings("unchecked") - private void readObject(final ObjectInputStream ois) - throws IOException, ClassNotFoundException { + private void readObject(final ObjectInputStream ois) throws IOException, ClassNotFoundException { final String closureClassName = (String) ois.readObject(); final byte[] closureCode = (byte[]) ois.readObject(); final byte[] closureData = (byte[]) ois.readObject(); @@ -65,33 +64,30 @@ private void readObject(final ObjectInputStream ois) final MapBackedClassLoader cl = new MapBackedClassLoader(); cl.addClassData(closureClassName, closureCode); final ObjectInputStream objectStream = - new CustomClassLoaderObjectInputStream<>(new ByteArrayInputStream(closureData), cl); + new CustomClassLoaderObjectInputStream<>(new ByteArrayInputStream(closureData), cl); closure = (Closure) objectStream.readObject(); } - private static class CustomClassLoaderObjectInputStream - extends ObjectInputStream { + private static class CustomClassLoaderObjectInputStream extends ObjectInputStream { private final CLT classLoader; - public CustomClassLoaderObjectInputStream(InputStream inputStream, CLT classLoader) - throws IOException { + public CustomClassLoaderObjectInputStream(InputStream inputStream, CLT classLoader) throws IOException { super(inputStream); this.classLoader = classLoader; } @Override - protected Class resolveClass(ObjectStreamClass desc) - throws ClassNotFoundException, IOException { + protected Class resolveClass(ObjectStreamClass desc) throws ClassNotFoundException, IOException { if (classLoader != null) { try { return Class.forName(desc.getName(), false, classLoader); } catch (ClassNotFoundException cnfe) { /* - * The default implementation in ObjectInputStream handles primitive types with - * a map from name to class. Rather than duplicate the functionality, we are - * delegating to the super method for all failures that may be of this kind, as - * well as any case where the passed in ClassLoader fails to find the class. + * The default implementation in ObjectInputStream handles primitive types with a map from name to + * class. Rather than duplicate the functionality, we are delegating to the super method for all + * failures that may be of this kind, as well as any case where the passed in ClassLoader fails to + * find the class. */ } } diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/tables/ColumnHandlerFactory.java b/Plot/src/main/java/io/deephaven/db/plot/util/tables/ColumnHandlerFactory.java index ab1b8bc2279..70f5a211722 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/tables/ColumnHandlerFactory.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/tables/ColumnHandlerFactory.java @@ -26,8 +26,7 @@ private ColumnHandlerFactory() {} private static final long serialVersionUID = -6737880834225877984L; public enum TypeClassification { - INTEGER(true), FLOATINGPOINT(true), TIME(true), PAINT(false), COMPARABLE(false), OBJECT( - false); + INTEGER(true), FLOATINGPOINT(true), TIME(true), PAINT(false), COMPARABLE(false), OBJECT(false); TypeClassification(boolean isNumeric) { this.isNumeric = isNumeric; @@ -92,8 +91,7 @@ public interface ColumnHandler { * Gets the object in row {@code i} of the column as a double. * * - * @throws UnsupportedOperationException if the value in the column can not be converted to - * double + * @throws UnsupportedOperationException if the value in the column can not be converted to double * @param i index * @return column's value at row {@code i} as a double */ @@ -109,8 +107,7 @@ private static abstract class ColumnHandlerTable implements ColumnHandler { private transient DataColumn dataColumn; private final PlotInfo plotInfo; - private ColumnHandlerTable(final Table table, final String columnName, Class type, - final PlotInfo plotInfo) { + private ColumnHandlerTable(final Table table, final String columnName, Class type, final PlotInfo plotInfo) { this.type = type; ArgumentValidations.assertColumnsInTable(table, plotInfo, columnName); this.table = table; @@ -127,8 +124,7 @@ protected DataColumn getDataColumn() { } public TableHandle getTableHandle() { - throw new PlotUnsupportedOperationException( - "Local ColumnHandler does not support table handles", plotInfo); + throw new PlotUnsupportedOperationException("Local ColumnHandler does not support table handles", plotInfo); } public String getColumnName() { @@ -161,8 +157,8 @@ private static abstract class ColumnHandlerHandle implements ColumnHandler, Seri private final PlotInfo plotInfo; private transient DataColumn dataColumn; - private ColumnHandlerHandle(final TableHandle tableHandle, final String columnName, - final Class type, final PlotInfo plotInfo) { + private ColumnHandlerHandle(final TableHandle tableHandle, final String columnName, final Class type, + final PlotInfo plotInfo) { this.type = type; ArgumentValidations.assertColumnsInTable(tableHandle, plotInfo, columnName); this.tableHandle = tableHandle; @@ -206,16 +202,15 @@ public Class type() { /** * Creates a new ColumnHandler instance with a numeric {@link TypeClassification}. * - * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle} and - * {@code columnName} must not be null. + * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle} and {@code columnName} must not be null. * @throws IllegalArgumentException if {@code columnName} is not a column in the table * @throws UnsupportedOperationException data in the {@code columnName} must be numeric * @param tableHandle holds the table * @param columnName column in the table * @return new numeric ColumnHandler */ - public static ColumnHandler newNumericHandler(final TableHandle tableHandle, - final String columnName, final PlotInfo plotInfo) { + public static ColumnHandler newNumericHandler(final TableHandle tableHandle, final String columnName, + final PlotInfo plotInfo) { ArgumentValidations.assertNotNull(tableHandle, "tableHandle", plotInfo); ArgumentValidations.assertNotNull(columnName, "columnName", plotInfo); ArgumentValidations.assertColumnsInTable(tableHandle.getTable(), plotInfo, columnName); @@ -413,30 +408,27 @@ public TypeClassification typeClassification() { @Override public double getDouble(int i) { - throw new UnsupportedOperationException( - "Double conversion not supported for paints"); + throw new UnsupportedOperationException("Double conversion not supported for paints"); } }; } else { throw new UnsupportedOperationException( - "Unsupported numeric data type: columnName=" + columnName + " type=" + type); + "Unsupported numeric data type: columnName=" + columnName + " type=" + type); } } /** * Creates a new ColumnHandler instance with a numeric {@link TypeClassification}. * - * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code columnName} must - * not be null. + * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code columnName} must not be null. * @throws IllegalArgumentException if {@code columnName} is not a column in the table * @throws UnsupportedOperationException data in the column must be numeric * @param table table * @param columnName column in the table * @return new numeric ColumnHandler */ - public static ColumnHandler newNumericHandler(final Table table, final String columnName, - final PlotInfo plotInfo) { + public static ColumnHandler newNumericHandler(final Table table, final String columnName, final PlotInfo plotInfo) { ArgumentValidations.assertNotNull(table, "table", plotInfo); ArgumentValidations.assertNotNull(columnName, "columnName", plotInfo); @@ -633,32 +625,29 @@ public TypeClassification typeClassification() { @Override public double getDouble(int i) { - throw new UnsupportedOperationException( - "Double conversion not supported for paints"); + throw new UnsupportedOperationException("Double conversion not supported for paints"); } }; } else { throw new UnsupportedOperationException( - "Unsupported numeric data type: columnName=" + columnName + " type=" + type); + "Unsupported numeric data type: columnName=" + columnName + " type=" + type); } } /** * Creates a new ColumnHandler instance with a comparable {@link TypeClassification}. * - * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle} and - * {@code columnName} must not be null. + * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle} and {@code columnName} must not be null. * @throws IllegalArgumentException if {@code columnName} is not a column in the table - * @throws UnsupportedOperationException data in the {@code columnName} must be - * {@link Comparable} + * @throws UnsupportedOperationException data in the {@code columnName} must be {@link Comparable} * @param tableHandle holds the table * @param columnName column in the table * @return new comparable ColumnHandler */ @SuppressWarnings("WeakerAccess") - public static ColumnHandler newComparableHandler(final TableHandle tableHandle, - final String columnName, final PlotInfo plotInfo) { + public static ColumnHandler newComparableHandler(final TableHandle tableHandle, final String columnName, + final PlotInfo plotInfo) { ArgumentValidations.assertNotNull(tableHandle, "tableHandle", plotInfo); ArgumentValidations.assertNotNull(columnName, "columnName", plotInfo); @@ -673,22 +662,21 @@ public TypeClassification typeClassification() { @Override public double getDouble(int i) { - throw new PlotUnsupportedOperationException( - "Double conversion not supported for comparables", plotInfo); + throw new PlotUnsupportedOperationException("Double conversion not supported for comparables", + plotInfo); } }; } else { throw new PlotUnsupportedOperationException( - "Unsupported data type: columnName=" + columnName + " type=" + type, plotInfo); + "Unsupported data type: columnName=" + columnName + " type=" + type, plotInfo); } } /** * Creates a new ColumnHandler instance with a comparable {@link TypeClassification}. * - * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code columnName} must - * not be null. + * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code columnName} must not be null. * @throws IllegalArgumentException if {@code columnName} is not a column in the {@code table} * @throws UnsupportedOperationException data in the column must be {@link Comparable} * @param table table @@ -697,7 +685,7 @@ public double getDouble(int i) { */ @SuppressWarnings("WeakerAccess") public static ColumnHandler newComparableHandler(final Table table, final String columnName, - final PlotInfo plotInfo) { + final PlotInfo plotInfo) { ArgumentValidations.assertNotNull(table, "table", plotInfo); ArgumentValidations.assertNotNull(columnName, "columnName", plotInfo); @@ -712,29 +700,28 @@ public TypeClassification typeClassification() { @Override public double getDouble(int i) { - throw new PlotUnsupportedOperationException( - "Double conversion not supported for comparables", plotInfo); + throw new PlotUnsupportedOperationException("Double conversion not supported for comparables", + plotInfo); } }; } else { throw new PlotUnsupportedOperationException( - "Unsupported data type: columnName=" + columnName + " type=" + type, plotInfo); + "Unsupported data type: columnName=" + columnName + " type=" + type, plotInfo); } } /** * Creates a new ColumnHandler instance with a object {@link TypeClassification}. * - * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle} and - * {@code columnName} must not be null. + * @throws io.deephaven.base.verify.RequirementFailure {@code tableHandle} and {@code columnName} must not be null. * @throws IllegalArgumentException if {@code columnName} is not a column in the table * @param tableHandle holds the table * @param columnName column in the table * @return new object ColumnHandler */ - public static ColumnHandler newObjectHandler(final TableHandle tableHandle, - final String columnName, final PlotInfo plotInfo) { + public static ColumnHandler newObjectHandler(final TableHandle tableHandle, final String columnName, + final PlotInfo plotInfo) { ArgumentValidations.assertNotNull(tableHandle, "tableHandle", plotInfo); ArgumentValidations.assertNotNull(columnName, "columnName", plotInfo); @@ -747,8 +734,7 @@ public TypeClassification typeClassification() { @Override public double getDouble(int i) { - throw new PlotUnsupportedOperationException( - "Double conversion not supported for objects", plotInfo); + throw new PlotUnsupportedOperationException("Double conversion not supported for objects", plotInfo); } }; } @@ -756,18 +742,15 @@ public double getDouble(int i) { /** * Creates a new ColumnHandler instance with a object {@link TypeClassification}. * - * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code columnName} must - * not be null. + * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code columnName} must not be null. * @throws IllegalArgumentException if {@code columnName} is not a column in the {@code table} * @param table table * @param columnName column in the table * @return new object ColumnHandler - * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code columnName} must - * not be null. + * @throws io.deephaven.base.verify.RequirementFailure {@code table} and {@code columnName} must not be null. * @throws IllegalArgumentException if {@code columnName} is not a column in the {@code table} */ - public static ColumnHandler newObjectHandler(final Table table, final String columnName, - final PlotInfo plotInfo) { + public static ColumnHandler newObjectHandler(final Table table, final String columnName, final PlotInfo plotInfo) { ArgumentValidations.assertNotNull(table, "table", plotInfo); ArgumentValidations.assertNotNull(columnName, "columnName", plotInfo); @@ -780,8 +763,7 @@ public TypeClassification typeClassification() { @Override public double getDouble(int i) { - throw new PlotUnsupportedOperationException( - "Double conversion not supported for objects", plotInfo); + throw new PlotUnsupportedOperationException("Double conversion not supported for objects", plotInfo); } }; } diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/tables/SwappableTableOneClickAbstract.java b/Plot/src/main/java/io/deephaven/db/plot/util/tables/SwappableTableOneClickAbstract.java index cb03235adab..bfac826bc1b 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/tables/SwappableTableOneClickAbstract.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/tables/SwappableTableOneClickAbstract.java @@ -12,8 +12,7 @@ /** * Holds a handle on a one click table that may get swapped out for another table. */ -public abstract class SwappableTableOneClickAbstract extends SwappableTable - implements SwappableTableMap { +public abstract class SwappableTableOneClickAbstract extends SwappableTable implements SwappableTableMap { private static final long serialVersionUID = 2L; protected final long updateInterval; @@ -23,10 +22,10 @@ public abstract class SwappableTableOneClickAbstract extends SwappableTable private final Comparable seriesName; protected SwappableTableOneClickAbstract(@NotNull final Comparable seriesName, - final long updateInterval, - @NotNull final TableMapHandle tableMapHandle, - final boolean requireAllFiltersToDisplay, - @NotNull final String[] byColumns) { + final long updateInterval, + @NotNull final TableMapHandle tableMapHandle, + final boolean requireAllFiltersToDisplay, + @NotNull final String[] byColumns) { super(tableMapHandle); Require.neqNull(byColumns, "byColumns"); this.tableMapHandle = tableMapHandle; diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/tables/SwappableTableOneClickMap.java b/Plot/src/main/java/io/deephaven/db/plot/util/tables/SwappableTableOneClickMap.java index fc407c6f5b3..5d14503ecea 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/tables/SwappableTableOneClickMap.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/tables/SwappableTableOneClickMap.java @@ -11,8 +11,8 @@ import java.util.function.Function; /** - * Holds a handle on a one click table that may get swapped out for another table. A TableMap is - * used to compute the OneClick. + * Holds a handle on a one click table that may get swapped out for another table. A TableMap is used to compute the + * OneClick. */ public class SwappableTableOneClickMap extends SwappableTableOneClickAbstract { private static final long serialVersionUID = 1L; @@ -20,17 +20,15 @@ public class SwappableTableOneClickMap extends SwappableTableOneClickAbstract { private final Function transform; public SwappableTableOneClickMap(final Comparable seriesName, final long updateInterval, - final TableMapHandle tableMapHandle, final Function transform, - final String... byColumns) { + final TableMapHandle tableMapHandle, final Function transform, final String... byColumns) { this(seriesName, updateInterval, tableMapHandle, transform, true, byColumns); } public SwappableTableOneClickMap(final Comparable seriesName, final long updateInterval, - final TableMapHandle tableMapHandle, final Function transform, - final boolean requireAllFiltersToDisplay, final String... byColumns) { + final TableMapHandle tableMapHandle, final Function transform, + final boolean requireAllFiltersToDisplay, final String... byColumns) { super(seriesName, updateInterval, tableMapHandle, requireAllFiltersToDisplay, byColumns); - ArgumentValidations.assertColumnsInTable(tableMapHandle.getTableDefinition(), null, - byColumns); + ArgumentValidations.assertColumnsInTable(tableMapHandle.getTableDefinition(), null, byColumns); this.transform = transform; } diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableBackedTableMapHandle.java b/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableBackedTableMapHandle.java index c77f4885ae2..7f4337d70c5 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableBackedTableMapHandle.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableBackedTableMapHandle.java @@ -9,8 +9,8 @@ import java.util.Collection; /** - * {@link TableMapHandle} with an underlying table. The purpose of this class is to allow - * consolidation between constructed TableMaps in FigureWidget. + * {@link TableMapHandle} with an underlying table. The purpose of this class is to allow consolidation between + * constructed TableMaps in FigureWidget. */ public class TableBackedTableMapHandle extends TableMapHandle { @@ -18,12 +18,12 @@ public class TableBackedTableMapHandle extends TableMapHandle { private final TableDefinition tableDefinition; public TableBackedTableMapHandle(final TableHandle tableHandle, final String[] keyColumns, - final PlotInfo plotInfo) { + final PlotInfo plotInfo) { this(tableHandle.getTable(), tableHandle.getColumns(), keyColumns, plotInfo); } - public TableBackedTableMapHandle(final Table table, final Collection columns, - final String[] keyColumns, final PlotInfo plotInfo) { + public TableBackedTableMapHandle(final Table table, final Collection columns, final String[] keyColumns, + final PlotInfo plotInfo) { super(columns, keyColumns, plotInfo); ArgumentValidations.assertNotNull(table, "table", plotInfo); diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableHandle.java b/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableHandle.java index 1db28afc473..cc27e750996 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableHandle.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableHandle.java @@ -21,10 +21,10 @@ public class TableHandle implements Serializable { private final Set columns; public TableHandle(@NotNull final Table table, - @NotNull final String... columns) { + @NotNull final String... columns) { this.table = table; - this.columns = new TreeSet<>( - Arrays.asList(Arrays.stream(columns).filter(Objects::nonNull).toArray(String[]::new))); + this.columns = + new TreeSet<>(Arrays.asList(Arrays.stream(columns).filter(Objects::nonNull).toArray(String[]::new))); } public void addColumn(final String column) { @@ -44,11 +44,10 @@ public Table getTable() { } /** - * Get the {@link TableDefinition} of the table that will be handed off to actual plotting - * methods. This method is important because in some cases (ie when ACls are applied to source - * tables) computations must be deferred until after ACL application so that they are applied - * correctly. In this case, the table produced by {@link #getTable()} may be the raw source - * table, not the final table. This method is used to get the final result table definition no + * Get the {@link TableDefinition} of the table that will be handed off to actual plotting methods. This method is + * important because in some cases (ie when ACls are applied to source tables) computations must be deferred until + * after ACL application so that they are applied correctly. In this case, the table produced by {@link #getTable()} + * may be the raw source table, not the final table. This method is used to get the final result table definition no * matter what the preconditions are. * * @return The {@link TableDefinition} of the plotted table. diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableMapBackedTableMapHandle.java b/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableMapBackedTableMapHandle.java index 1ad80986d89..8759a30f693 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableMapBackedTableMapHandle.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableMapBackedTableMapHandle.java @@ -25,18 +25,17 @@ public class TableMapBackedTableMapHandle extends TableMapHandle { private transient Table tableDefinitionTable;; private Set viewColumns; - public TableMapBackedTableMapHandle(final TableMap tableMap, - final TableDefinition tableDefinition, final String[] keyColumns, final PlotInfo plotInfo, - Collection columns) { + public TableMapBackedTableMapHandle(final TableMap tableMap, final TableDefinition tableDefinition, + final String[] keyColumns, final PlotInfo plotInfo, Collection columns) { this(tableMap, tableDefinition, keyColumns, plotInfo, columns, null); } public TableMapBackedTableMapHandle(@NotNull final TableMap tableMap, - @NotNull final TableDefinition tableDefinition, - @NotNull final String[] keyColumns, - final PlotInfo plotInfo, - @NotNull Collection columns, - @Nullable Collection viewColumns) { + @NotNull final TableDefinition tableDefinition, + @NotNull final String[] keyColumns, + final PlotInfo plotInfo, + @NotNull Collection columns, + @Nullable Collection viewColumns) { super(columns, keyColumns, plotInfo); ArgumentValidations.assertNotNull(tableMap, "tableMap", plotInfo); diff --git a/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableMapHandle.java b/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableMapHandle.java index 74ccd392f3c..2d7b4d0f3da 100644 --- a/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableMapHandle.java +++ b/Plot/src/main/java/io/deephaven/db/plot/util/tables/TableMapHandle.java @@ -41,8 +41,7 @@ public abstract class TableMapHandle implements Serializable, PlotExceptionCause private final PlotInfo plotInfo; - protected TableMapHandle(final Collection columns, final String[] keyColumns, - final PlotInfo plotInfo) { + protected TableMapHandle(final Collection columns, final String[] keyColumns, final PlotInfo plotInfo) { this.id = nextId.incrementAndGet(); this.requiredColumns = new LinkedHashSet<>(columns); Collections.addAll(this.requiredColumns, keyColumns); @@ -79,10 +78,9 @@ public Set getColumns() { } /** - * Get the set of columns to .view() when the table is fetched. Typically this is identical to - * {@link #getColumns()} however, there are situations where the transformations applied to a - * TableMap result in columns that are not present in the base tableMap. (for example - * catHistPlot). + * Get the set of columns to .view() when the table is fetched. Typically this is identical to {@link #getColumns()} + * however, there are situations where the transformations applied to a TableMap result in columns that are not + * present in the base tableMap. (for example catHistPlot). * * @return */ diff --git a/Plot/src/main/java/io/deephaven/gui/shape/JShapes.java b/Plot/src/main/java/io/deephaven/gui/shape/JShapes.java index 4de0d94f066..84554aca941 100644 --- a/Plot/src/main/java/io/deephaven/gui/shape/JShapes.java +++ b/Plot/src/main/java/io/deephaven/gui/shape/JShapes.java @@ -40,45 +40,40 @@ public java.awt.Shape getShape() { private static final double SIZE = 6d; - private static JShapes SQUARE = - new JShapes(new Rectangle2D.Double(-SIZE / 2, -SIZE / 2, SIZE, SIZE)); - private static JShapes CIRCLE = - new JShapes(new Ellipse2D.Double(-SIZE / 2, -SIZE / 2, SIZE, SIZE)); - private static JShapes UP_TRIANGLE = - new JShapes(new Polygon(new int[] {0, (int) SIZE / 2, (int) -SIZE / 2}, + private static JShapes SQUARE = new JShapes(new Rectangle2D.Double(-SIZE / 2, -SIZE / 2, SIZE, SIZE)); + private static JShapes CIRCLE = new JShapes(new Ellipse2D.Double(-SIZE / 2, -SIZE / 2, SIZE, SIZE)); + private static JShapes UP_TRIANGLE = new JShapes(new Polygon(new int[] {0, (int) SIZE / 2, (int) -SIZE / 2}, new int[] {(int) -SIZE / 2, (int) SIZE / 2, (int) SIZE / 2}, 3)); - private static JShapes DIAMOND = - new JShapes(new Polygon(new int[] {0, (int) SIZE / 2, 0, (int) -SIZE / 2}, + private static JShapes DIAMOND = new JShapes(new Polygon(new int[] {0, (int) SIZE / 2, 0, (int) -SIZE / 2}, new int[] {(int) -SIZE / 2, 0, (int) SIZE / 2, 0}, 4)); private static JShapes HORIZONTAL_RECTANGLE = - new JShapes(new Rectangle2D.Double(-SIZE / 2, -SIZE / 2 / 2d, SIZE, SIZE / 2d)); + new JShapes(new Rectangle2D.Double(-SIZE / 2, -SIZE / 2 / 2d, SIZE, SIZE / 2d)); private static JShapes ELLIPSE = - new JShapes(new java.awt.geom.Ellipse2D.Double(-SIZE / 2, -SIZE / 2 / 2d, SIZE, SIZE / 2d)); - private static JShapes DOWN_TRIANGLE = - new JShapes(new Polygon(new int[] {(int) -SIZE / 2, (int) SIZE / 2, 0}, + new JShapes(new java.awt.geom.Ellipse2D.Double(-SIZE / 2, -SIZE / 2 / 2d, SIZE, SIZE / 2d)); + private static JShapes DOWN_TRIANGLE = new JShapes(new Polygon(new int[] {(int) -SIZE / 2, (int) SIZE / 2, 0}, new int[] {(int) -SIZE / 2, (int) -SIZE / 2, (int) SIZE / 2}, 3)); private static JShapes RIGHT_TRIANGLE = - new JShapes(new Polygon(new int[] {(int) -SIZE / 2, (int) SIZE / 2, (int) -SIZE / 2}, - new int[] {(int) -SIZE / 2, 0, (int) SIZE / 2}, 3)); + new JShapes(new Polygon(new int[] {(int) -SIZE / 2, (int) SIZE / 2, (int) -SIZE / 2}, + new int[] {(int) -SIZE / 2, 0, (int) SIZE / 2}, 3)); private static JShapes VERTICAL_RECTANGLE = - new JShapes(new Rectangle2D.Double(-SIZE / 2 / 2d, -SIZE / 2, SIZE / 2d, SIZE)); + new JShapes(new Rectangle2D.Double(-SIZE / 2 / 2d, -SIZE / 2, SIZE / 2d, SIZE)); private static JShapes LEFT_TRIANGLE = - new JShapes(new Polygon(new int[] {(int) -SIZE / 2, (int) SIZE / 2, (int) SIZE / 2}, - new int[] {0, (int) -SIZE / 2, (int) SIZE / 2}, 3)); + new JShapes(new Polygon(new int[] {(int) -SIZE / 2, (int) SIZE / 2, (int) SIZE / 2}, + new int[] {0, (int) -SIZE / 2, (int) SIZE / 2}, 3)); private final static BiMap shapeStringToInstanceMap = - ImmutableBiMap.builder() - .put(NamedShape.SQUARE, SQUARE) - .put(NamedShape.CIRCLE, CIRCLE) - .put(NamedShape.UP_TRIANGLE, UP_TRIANGLE) - .put(NamedShape.DIAMOND, DIAMOND) - .put(NamedShape.HORIZONTAL_RECTANGLE, HORIZONTAL_RECTANGLE) - .put(NamedShape.ELLIPSE, ELLIPSE) - .put(NamedShape.DOWN_TRIANGLE, DOWN_TRIANGLE) - .put(NamedShape.RIGHT_TRIANGLE, RIGHT_TRIANGLE) - .put(NamedShape.VERTICAL_RECTANGLE, VERTICAL_RECTANGLE) - .put(NamedShape.LEFT_TRIANGLE, LEFT_TRIANGLE) - .build(); + ImmutableBiMap.builder() + .put(NamedShape.SQUARE, SQUARE) + .put(NamedShape.CIRCLE, CIRCLE) + .put(NamedShape.UP_TRIANGLE, UP_TRIANGLE) + .put(NamedShape.DIAMOND, DIAMOND) + .put(NamedShape.HORIZONTAL_RECTANGLE, HORIZONTAL_RECTANGLE) + .put(NamedShape.ELLIPSE, ELLIPSE) + .put(NamedShape.DOWN_TRIANGLE, DOWN_TRIANGLE) + .put(NamedShape.RIGHT_TRIANGLE, RIGHT_TRIANGLE) + .put(NamedShape.VERTICAL_RECTANGLE, VERTICAL_RECTANGLE) + .put(NamedShape.LEFT_TRIANGLE, LEFT_TRIANGLE) + .build(); } diff --git a/Plot/src/main/java/io/deephaven/gui/shape/NamedShape.java b/Plot/src/main/java/io/deephaven/gui/shape/NamedShape.java index bca8f7b7340..1805b74d9fa 100644 --- a/Plot/src/main/java/io/deephaven/gui/shape/NamedShape.java +++ b/Plot/src/main/java/io/deephaven/gui/shape/NamedShape.java @@ -21,8 +21,7 @@ public static NamedShape getShape(final String shape) { try { return shape == null ? null : NamedShape.valueOf(shape.toUpperCase()); } catch (final IllegalArgumentException iae) { - throw new IllegalArgumentException( - "Not a valid shape: `" + shape + "`; valid shapes: " + shapes); + throw new IllegalArgumentException("Not a valid shape: `" + shape + "`; valid shapes: " + shapes); } } diff --git a/Plot/src/test/java/io/deephaven/db/plot/Filters/TestSelectables.java b/Plot/src/test/java/io/deephaven/db/plot/Filters/TestSelectables.java index 34d00bc4976..eb901a27744 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/Filters/TestSelectables.java +++ b/Plot/src/test/java/io/deephaven/db/plot/Filters/TestSelectables.java @@ -22,8 +22,8 @@ public class TestSelectables extends BaseArrayTestCase { private final double[] values = {1, 2, 3}; private final String byColumn = "Cats"; private final String valueColumn = "Values"; - private final Table table = TableTools.newTable(TableTools.col(byColumn, categories), - TableTools.doubleCol(valueColumn, values)); + private final Table table = + TableTools.newTable(TableTools.col(byColumn, categories), TableTools.doubleCol(valueColumn, values)); private final BaseFigureImpl figure = new BaseFigureImpl(); public void testFilteredTableOneClick() { @@ -40,15 +40,14 @@ public void testFilteredTableOneClick() { assertTrue(e.getMessage().contains("empty")); } testFilteredTable(Selectables.oneClick(table, byColumn)); - testFilteredTable(new SelectableDataSetOneClick(table.byExternal(byColumn), - table.getDefinition(), new String[] {byColumn})); + testFilteredTable(new SelectableDataSetOneClick(table.byExternal(byColumn), table.getDefinition(), + new String[] {byColumn})); } private void testFilteredTable(SelectableDataSet> selectableDataSet) { - SwappableTable swappableTable = selectableDataSet.getSwappableTable("M", figure.newChart(), - t -> t, byColumn, valueColumn); - testTableMapEquals(table.byExternal(byColumn), - swappableTable.getTableMapHandle().getTableMap()); + SwappableTable swappableTable = + selectableDataSet.getSwappableTable("M", figure.newChart(), t -> t, byColumn, valueColumn); + testTableMapEquals(table.byExternal(byColumn), swappableTable.getTableMapHandle().getTableMap()); } private void testTableMapEquals(final TableMap t1, final TableMap t2) { diff --git a/Plot/src/test/java/io/deephaven/db/plot/TestColor.java b/Plot/src/test/java/io/deephaven/db/plot/TestColor.java index 99fba8f7eb8..46ab8b04a23 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/TestColor.java +++ b/Plot/src/test/java/io/deephaven/db/plot/TestColor.java @@ -50,8 +50,7 @@ public void testColorDefinitions() { assertEquals(new Color(0x11223344).javaColor(), new java.awt.Color(0x11223344)); assertEquals(new Color(0x11223344, true).javaColor(), new java.awt.Color(0x11223344, true)); assertEquals(new Color(0.1f, 0.2f, 0.3f).javaColor(), new java.awt.Color(0.1f, 0.2f, 0.3f)); - assertEquals(new Color(0.1f, 0.2f, 0.3f, 0.4f).javaColor(), - new java.awt.Color(0.1f, 0.2f, 0.3f, 0.4f)); + assertEquals(new Color(0.1f, 0.2f, 0.3f, 0.4f).javaColor(), new java.awt.Color(0.1f, 0.2f, 0.3f, 0.4f)); } public void testStaticHelpers() { @@ -85,15 +84,11 @@ public void testStaticHelpers() { assertEquals(Color.colorRGB(1, 2, 3, 4).javaColor(), new java.awt.Color(1, 2, 3, 4)); assertEquals(Color.colorRGB(1, 2, 3, 4).javaColor(), new java.awt.Color(1, 2, 3, 4)); assertEquals(Color.colorRGB(0x11223344).javaColor(), new java.awt.Color(0x11223344)); - assertEquals(Color.colorRGB(0x11223344, true).javaColor(), - new java.awt.Color(0x11223344, true)); - assertEquals(Color.colorRGB(0.1f, 0.2f, 0.3f).javaColor(), - new java.awt.Color(0.1f, 0.2f, 0.3f)); - assertEquals(Color.colorRGB(0.1f, 0.2f, 0.3f, 0.4f).javaColor(), - new java.awt.Color(0.1f, 0.2f, 0.3f, 0.4f)); + assertEquals(Color.colorRGB(0x11223344, true).javaColor(), new java.awt.Color(0x11223344, true)); + assertEquals(Color.colorRGB(0.1f, 0.2f, 0.3f).javaColor(), new java.awt.Color(0.1f, 0.2f, 0.3f)); + assertEquals(Color.colorRGB(0.1f, 0.2f, 0.3f, 0.4f).javaColor(), new java.awt.Color(0.1f, 0.2f, 0.3f, 0.4f)); assertEquals(Color.colorHSL(36f, 20, 20).javaColor(), new java.awt.Color(61, 53, 41)); - assertEquals(Color.colorHSL(36f, 20, 20, 0.5f).javaColor(), - new java.awt.Color(61, 53, 41, 128)); + assertEquals(Color.colorHSL(36f, 20, 20, 0.5f).javaColor(), new java.awt.Color(61, 53, 41, 128)); try { diff --git a/Plot/src/test/java/io/deephaven/db/plot/TestFont.java b/Plot/src/test/java/io/deephaven/db/plot/TestFont.java index fd12aef2968..511f063915e 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/TestFont.java +++ b/Plot/src/test/java/io/deephaven/db/plot/TestFont.java @@ -47,27 +47,22 @@ public void testStyle() { assertEquals(Font.FontStyle.BOLD_ITALIC, Font.fontStyle("ib")); final Set target = new HashSet<>(); - target.addAll( - Arrays.asList("PLAIN", "BOLD", "ITALIC", "BOLD_ITALIC", "P", "B", "I", "BI", "IB")); + target.addAll(Arrays.asList("PLAIN", "BOLD", "ITALIC", "BOLD_ITALIC", "P", "B", "I", "BI", "IB")); assertEquals(target, new HashSet(Arrays.asList(Font.fontStyleNames()))); } public void testConstructors() { assertEquals(new Font("Ariel", Font.FontStyle.PLAIN, 10).javaFont(), - new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); - assertEquals(new Font("Ariel", "PLAIN", 10).javaFont(), - new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); - assertEquals(new Font("Ariel", "P", 10).javaFont(), - new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); + new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); + assertEquals(new Font("Ariel", "PLAIN", 10).javaFont(), new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); + assertEquals(new Font("Ariel", "P", 10).javaFont(), new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); assertEquals(Font.font("Ariel", Font.FontStyle.PLAIN, 10).javaFont(), - new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); - assertEquals(Font.font("Ariel", "PLAIN", 10).javaFont(), - new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); - assertEquals(Font.font("Ariel", "P", 10).javaFont(), - new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); + new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); + assertEquals(Font.font("Ariel", "PLAIN", 10).javaFont(), new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); + assertEquals(Font.font("Ariel", "P", 10).javaFont(), new java.awt.Font("Ariel", java.awt.Font.PLAIN, 10)); } public void testTransforms() { diff --git a/Plot/src/test/java/io/deephaven/db/plot/TestSeriesCollection.java b/Plot/src/test/java/io/deephaven/db/plot/TestSeriesCollection.java index 085fed81d1b..12aaccce441 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/TestSeriesCollection.java +++ b/Plot/src/test/java/io/deephaven/db/plot/TestSeriesCollection.java @@ -36,13 +36,11 @@ public void testSeriesCollection() { sc.add(SeriesCollection.SeriesType.CATEGORY, false, s1); assertEquals(1, sc.getSeriesDescriptions().size()); - assertSeriesDescription(sc.getSeriesDescriptions().get("S1"), - SeriesCollection.SeriesType.CATEGORY, false, s1); + assertSeriesDescription(sc.getSeriesDescriptions().get("S1"), SeriesCollection.SeriesType.CATEGORY, false, s1); sc.add(SeriesCollection.SeriesType.XY, true, s2); assertEquals(2, sc.getSeriesDescriptions().size()); - assertSeriesDescription(sc.getSeriesDescriptions().get("S2"), - SeriesCollection.SeriesType.XY, true, s2); + assertSeriesDescription(sc.getSeriesDescriptions().get("S2"), SeriesCollection.SeriesType.XY, true, s2); try { sc.add(SeriesCollection.SeriesType.XY, true, s3); @@ -64,8 +62,7 @@ public void testSeriesCollection() { } private void assertSeriesDescription(final SeriesCollection.SeriesDescription sd, - final SeriesCollection.SeriesType type, final boolean isMultiSeries, - final SeriesInternal series) { + final SeriesCollection.SeriesType type, final boolean isMultiSeries, final SeriesInternal series) { assertNotNull(sd); assertEquals(type, sd.getType()); assertEquals(isMultiSeries, sd.isMultiSeries()); @@ -125,10 +122,9 @@ public void testCopy() { assertNotNull(copy); assertEquals(2, copy.getSeriesDescriptions().size()); - assertSeriesDescription(copy.getSeriesDescriptions().get("S1C"), - SeriesCollection.SeriesType.CATEGORY, false, s1copy); - assertSeriesDescription(copy.getSeriesDescriptions().get("S2C"), - SeriesCollection.SeriesType.XY, true, s2copy); + assertSeriesDescription(copy.getSeriesDescriptions().get("S1C"), SeriesCollection.SeriesType.CATEGORY, false, + s1copy); + assertSeriesDescription(copy.getSeriesDescriptions().get("S2C"), SeriesCollection.SeriesType.XY, true, s2copy); } } diff --git a/Plot/src/test/java/io/deephaven/db/plot/TestSeriesLocation.java b/Plot/src/test/java/io/deephaven/db/plot/TestSeriesLocation.java index 23306478a19..a89c0b0b21a 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/TestSeriesLocation.java +++ b/Plot/src/test/java/io/deephaven/db/plot/TestSeriesLocation.java @@ -16,17 +16,14 @@ public class TestSeriesLocation extends BaseArrayTestCase { public void testLocation() { - final Table t = - TableTools.emptyTable(100).update("A=i%2==0?`A`:`B`", "X=1.0*i", "Y=1.0*i*i"); + final Table t = TableTools.emptyTable(100).update("A=i%2==0?`A`:`B`", "X=1.0*i", "Y=1.0*i*i"); final BaseFigureImpl fig1 = new BaseFigureImpl(3, 2); final ChartImpl c11 = fig1.newChart(2, 1); final AxesImpl a11 = c11.newAxes(); final AxesImpl a12 = c11.newAxes(); - final XYDataSeriesArray s11 = - a11.plot("S1", new double[] {1, 2, 3}, new double[] {4, 5, 6}); - final XYDataSeriesArray s12 = - a12.plot("S2", new double[] {1, 2, 3}, new double[] {4, 5, 6}); + final XYDataSeriesArray s11 = a11.plot("S1", new double[] {1, 2, 3}, new double[] {4, 5, 6}); + final XYDataSeriesArray s12 = a12.plot("S2", new double[] {1, 2, 3}, new double[] {4, 5, 6}); final MultiXYSeries ms11 = a11.plotBy("MS1", t, "X", "Y", "A"); final MultiXYSeries ms12 = a12.plotBy("MS2", t, "X", "Y", "A"); final AxisImpl ax11 = a11.axis(0); @@ -36,10 +33,8 @@ public void testLocation() { final ChartImpl c21 = fig2.newChart(2, 1); final AxesImpl a21 = c21.newAxes(); final AxesImpl a22 = c21.newAxes(); - final XYDataSeriesArray s21 = - a21.plot("S1", new double[] {1, 2, 3}, new double[] {4, 5, 6}); - final XYDataSeriesArray s22 = - a22.plot("S2", new double[] {1, 2, 3}, new double[] {4, 5, 6}); + final XYDataSeriesArray s21 = a21.plot("S1", new double[] {1, 2, 3}, new double[] {4, 5, 6}); + final XYDataSeriesArray s22 = a22.plot("S2", new double[] {1, 2, 3}, new double[] {4, 5, 6}); final MultiXYSeries ms21 = a21.plotBy("MS1", t, "X", "Y", "A"); final MultiXYSeries ms22 = a22.plotBy("MS2", t, "X", "Y", "A"); final AxisImpl ax21 = a21.axis(0); diff --git a/Plot/src/test/java/io/deephaven/db/plot/axisformatters/ScientificNumberFormatterTest.java b/Plot/src/test/java/io/deephaven/db/plot/axisformatters/ScientificNumberFormatterTest.java index d7ae00807bc..ccf8547c9ff 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/axisformatters/ScientificNumberFormatterTest.java +++ b/Plot/src/test/java/io/deephaven/db/plot/axisformatters/ScientificNumberFormatterTest.java @@ -14,49 +14,33 @@ public void testFormat() { final ScientificNumberFormatter formatter = new ScientificNumberFormatter(3, 1E-5, 1E5); // less than the max number of decimals - assertEquals(formatter.format(3.5, new StringBuffer(), new FieldPosition(0)).toString(), - "3.5"); + assertEquals(formatter.format(3.5, new StringBuffer(), new FieldPosition(0)).toString(), "3.5"); // more than the max number of decimals, rounded down - assertEquals(formatter.format(3.5234, new StringBuffer(), new FieldPosition(0)).toString(), - "3.523"); + assertEquals(formatter.format(3.5234, new StringBuffer(), new FieldPosition(0)).toString(), "3.523"); // more than the max number of decimals, rounded up - assertEquals(formatter.format(3.5238, new StringBuffer(), new FieldPosition(0)).toString(), - "3.524"); + assertEquals(formatter.format(3.5238, new StringBuffer(), new FieldPosition(0)).toString(), "3.524"); // smaller than lower limit - assertEquals(formatter.format(3E-6, new StringBuffer(), new FieldPosition(0)).toString(), - "3E-6"); - assertEquals( - formatter.format(0.000003, new StringBuffer(), new FieldPosition(0)).toString(), - "3E-6"); - assertEquals( - formatter.format(-0.000003, new StringBuffer(), new FieldPosition(0)).toString(), - "-3E-6"); + assertEquals(formatter.format(3E-6, new StringBuffer(), new FieldPosition(0)).toString(), "3E-6"); + assertEquals(formatter.format(0.000003, new StringBuffer(), new FieldPosition(0)).toString(), "3E-6"); + assertEquals(formatter.format(-0.000003, new StringBuffer(), new FieldPosition(0)).toString(), "-3E-6"); // smaller than lower limit, more decimal points than max number of decimals - assertEquals( - formatter.format(0.00000334534, new StringBuffer(), new FieldPosition(0)).toString(), - "3.345E-6"); + assertEquals(formatter.format(0.00000334534, new StringBuffer(), new FieldPosition(0)).toString(), "3.345E-6"); // larger than upper limit - assertEquals(formatter.format(3E6, new StringBuffer(), new FieldPosition(0)).toString(), - "3E6"); - assertEquals(formatter.format(3000000, new StringBuffer(), new FieldPosition(0)).toString(), - "3E6"); + assertEquals(formatter.format(3E6, new StringBuffer(), new FieldPosition(0)).toString(), "3E6"); + assertEquals(formatter.format(3000000, new StringBuffer(), new FieldPosition(0)).toString(), "3E6"); // larger than upper limit, more decimal points than max number of decimals - assertEquals( - formatter.format(334584234, new StringBuffer(), new FieldPosition(0)).toString(), - "3.346E8"); + assertEquals(formatter.format(334584234, new StringBuffer(), new FieldPosition(0)).toString(), "3.346E8"); // long larger than upper limit, more decimal points than max number of decimals - assertEquals( - formatter.format(33458423423423L, new StringBuffer(), new FieldPosition(0)).toString(), - "3.346E13"); - assertEquals( - formatter.format(-33458423423423L, new StringBuffer(), new FieldPosition(0)).toString(), - "-3.346E13"); + assertEquals(formatter.format(33458423423423L, new StringBuffer(), new FieldPosition(0)).toString(), + "3.346E13"); + assertEquals(formatter.format(-33458423423423L, new StringBuffer(), new FieldPosition(0)).toString(), + "-3.346E13"); } } diff --git a/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransformBusinessCalendar.java b/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransformBusinessCalendar.java index 5d79825804e..df640199cdd 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransformBusinessCalendar.java +++ b/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransformBusinessCalendar.java @@ -12,8 +12,7 @@ public class TestAxisTransformBusinessCalendar extends BaseArrayTestCase { - private final AxisTransformBusinessCalendar bt = - new AxisTransformBusinessCalendar(Calendars.calendar("JPOSE")); + private final AxisTransformBusinessCalendar bt = new AxisTransformBusinessCalendar(Calendars.calendar("JPOSE")); private final DBDateTime holiday = DBTimeUtils.convertDateTime("2017-01-03T10:00:00 JP"); private final DBDateTime weekend = DBTimeUtils.convertDateTime("2017-01-02T10:00:00 JP"); @@ -85,31 +84,29 @@ public void testTransform() { // tests bugs where first day was transformed incorrectly public void testFirstTransformedDay() { AxisTransform transform = new AxisTransformBusinessCalendar(Calendars.calendar("USNYSE")); - double d = - transform.transform(DBTimeUtils.convertDateTime("2018-02-02T09:30:01 NY").getNanos()); - double d2 = - transform.transform(DBTimeUtils.convertDateTime("2018-02-02T14:30:01 NY").getNanos()); + double d = transform.transform(DBTimeUtils.convertDateTime("2018-02-02T09:30:01 NY").getNanos()); + double d2 = transform.transform(DBTimeUtils.convertDateTime("2018-02-02T14:30:01 NY").getNanos()); assertFalse(d == d2); // first day holiday transform = new AxisTransformBusinessCalendar(Calendars.calendar("USNYSE")); transform.transform(DBTimeUtils.convertDateTime("2018-02-03T09:30:01 NY").getNanos()); assertEquals(0.0 + 30 * DBTimeUtils.MINUTE, - transform.transform(DBTimeUtils.convertDateTime("2018-02-02T10:00:00 NY").getNanos())); + transform.transform(DBTimeUtils.convertDateTime("2018-02-02T10:00:00 NY").getNanos())); assertEquals(2.34E13 + 30 * DBTimeUtils.MINUTE, - transform.transform(DBTimeUtils.convertDateTime("2018-02-05T10:00:00 NY").getNanos())); + transform.transform(DBTimeUtils.convertDateTime("2018-02-05T10:00:00 NY").getNanos())); // first time outside business hours transform = new AxisTransformBusinessCalendar(Calendars.calendar("USNYSE")); transform.transform(DBTimeUtils.convertDateTime("2018-02-02T09:29:00 NY").getNanos()); assertEquals(2.34E13 + 30 * DBTimeUtils.MINUTE, - transform.transform(DBTimeUtils.convertDateTime("2018-02-02T10:00:00 NY").getNanos())); + transform.transform(DBTimeUtils.convertDateTime("2018-02-02T10:00:00 NY").getNanos())); // previous day was holiday transform = new AxisTransformBusinessCalendar(Calendars.calendar("USNYSE")); transform.transform(DBTimeUtils.convertDateTime("2018-01-29T09:29:00 NY").getNanos()); assertEquals(2 * 2.34E13 + 30 * DBTimeUtils.MINUTE, - transform.transform(DBTimeUtils.convertDateTime("2018-01-30T10:00:00 NY").getNanos())); + transform.transform(DBTimeUtils.convertDateTime("2018-01-30T10:00:00 NY").getNanos())); } private void testTransform(final DBDateTime tIn, final DBDateTime tTarget) { diff --git a/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransformLambda.java b/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransformLambda.java index f89711657e3..be296ea83dc 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransformLambda.java +++ b/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransformLambda.java @@ -13,8 +13,7 @@ public class TestAxisTransformLambda extends BaseArrayTestCase { private final DoubleUnaryOperator dataToAxis = Math::exp; private final DoubleUnaryOperator axisToData = Math::log; private final DoublePredicate isVisible = d -> d > 1; - private final AxisTransformLambda lambda = - new AxisTransformLambda(dataToAxis, axisToData, isVisible); + private final AxisTransformLambda lambda = new AxisTransformLambda(dataToAxis, axisToData, isVisible); private final AxisTransformLambda lambda2 = new AxisTransformLambda(dataToAxis, axisToData); private final AxisTransformLambda lambda3 = new AxisTransformLambda(); private final double delta = 0.00001; diff --git a/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransforms.java b/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransforms.java index bc53db62cbb..5a81dd54845 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransforms.java +++ b/Plot/src/test/java/io/deephaven/db/plot/axistransformations/TestAxisTransforms.java @@ -78,7 +78,7 @@ public void testAxisTransform() { final BusinessCalendar cal = Calendars.calendar("USNYSE"); final AxisTransformBusinessCalendar at1 = - (AxisTransformBusinessCalendar) AxisTransforms.axisTransform("USNYSE"); + (AxisTransformBusinessCalendar) AxisTransforms.axisTransform("USNYSE"); assertEquals(cal, at1.getBusinessCalendar()); } } diff --git a/Plot/src/test/java/io/deephaven/db/plot/colors/TestColorMap.java b/Plot/src/test/java/io/deephaven/db/plot/colors/TestColorMap.java index 45da7a2767f..312b64faf11 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/colors/TestColorMap.java +++ b/Plot/src/test/java/io/deephaven/db/plot/colors/TestColorMap.java @@ -19,8 +19,7 @@ public class TestColorMap extends BaseArrayTestCase { public void testHeatMap() { - Function map = - ColorMaps.heatMap(0, 100, new Color(254, 0, 0), new Color(0, 0, 254)); + Function map = ColorMaps.heatMap(0, 100, new Color(254, 0, 0), new Color(0, 0, 254)); Color c = map.apply(0.0); assertEquals(new Color(254, 0, 0), c); c = map.apply(50.0); diff --git a/Plot/src/test/java/io/deephaven/db/plot/composite/TestScatterPlotMatrix.java b/Plot/src/test/java/io/deephaven/db/plot/composite/TestScatterPlotMatrix.java index ebd9b8fa480..50b0dc2f610 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/composite/TestScatterPlotMatrix.java +++ b/Plot/src/test/java/io/deephaven/db/plot/composite/TestScatterPlotMatrix.java @@ -91,11 +91,10 @@ public void testScatterPlotMatrix() { i++; } t = TableTools.newTable(columns).updateView("Cat = i == 0 ? `A` : `B`"); - SelectableDataSetOneClick oneClick = new SelectableDataSetOneClick( - t.byExternal(columnNames), t.getDefinition(), new String[] {"Cat"}); + SelectableDataSetOneClick oneClick = + new SelectableDataSetOneClick(t.byExternal(columnNames), t.getDefinition(), new String[] {"Cat"}); final ScatterPlotMatrix matrix = ScatterPlotMatrix.scatterPlotMatrix(oneClick, columnNames); - final XYDataSeriesInternal series = - (XYDataSeriesInternal) matrix.getFigure().chart(0).axes(0).series(0); + final XYDataSeriesInternal series = (XYDataSeriesInternal) matrix.getFigure().chart(0).axes(0).series(0); for (int j = 0; j < series.size(); j++) { assertEquals(Double.NaN, series.getX(i)); assertEquals(Double.NaN, series.getY(i)); @@ -198,9 +197,7 @@ public void testPointSize() { testPointSize(new double[] {pointSizesLong[0], pointSizesLong[1]}, matrix); matrix = matrix.pointSize(pointSizesNumber); - testPointSize( - new double[] {pointSizesNumber[0].doubleValue(), pointSizesNumber[1].doubleValue()}, - matrix); + testPointSize(new double[] {pointSizesNumber[0].doubleValue(), pointSizesNumber[1].doubleValue()}, matrix); @@ -212,10 +209,8 @@ public void testPointSize() { } private void testPointSize(final double[] size, final ScatterPlotMatrix matrix) { - final XYDataSeriesInternal series1 = - (XYDataSeriesInternal) matrix.getFigure().chart(0).axes(0).series(0); - final XYDataSeriesInternal series2 = - (XYDataSeriesInternal) matrix.getFigure().chart(1).axes(0).series(0); + final XYDataSeriesInternal series1 = (XYDataSeriesInternal) matrix.getFigure().chart(0).axes(0).series(0); + final XYDataSeriesInternal series2 = (XYDataSeriesInternal) matrix.getFigure().chart(1).axes(0).series(0); final XYDataSeriesInternal[] seriesArray = new XYDataSeriesInternal[] {series1, series2}; for (int i = 0; i < size.length; i++) { @@ -227,10 +222,8 @@ private void testPointSize(final double[] size, final ScatterPlotMatrix matrix) } private void testPointSize(final ScatterPlotMatrix matrix) { - final XYDataSeriesInternal series1 = - (XYDataSeriesInternal) matrix.getFigure().chart(0).axes(0).series(0); - final XYDataSeriesInternal series2 = - (XYDataSeriesInternal) matrix.getFigure().chart(1).axes(0).series(0); + final XYDataSeriesInternal series1 = (XYDataSeriesInternal) matrix.getFigure().chart(0).axes(0).series(0); + final XYDataSeriesInternal series2 = (XYDataSeriesInternal) matrix.getFigure().chart(1).axes(0).series(0); for (int j = 0; j < series1.size(); j++) { assertNull(series1.getPointSize(j)); @@ -241,8 +234,7 @@ private void testPointSize(final ScatterPlotMatrix matrix) { } private void testScatterPlotMatrix(ScatterPlotMatrix matrix) { - XYDataSeriesInternal series = - (XYDataSeriesInternal) matrix.getFigure().chart(0).axes(0).series(0); + XYDataSeriesInternal series = (XYDataSeriesInternal) matrix.getFigure().chart(0).axes(0).series(0); for (int i = 0; i < length; i++) { assertEquals(0.0, series.getX(i)); assertEquals(0.0, series.getY(i)); diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestAbstractCategoryDataSeries.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestAbstractCategoryDataSeries.java index d57f5afe56b..54724d78571 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestAbstractCategoryDataSeries.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestAbstractCategoryDataSeries.java @@ -34,7 +34,7 @@ private static class TestCat extends CategoryDataSeriesMap { TestCat(AxesImpl axes) { super(axes, 1, name, new IndexableDataArray<>(categories, null), - new IndexableNumericDataArrayInt(values, null)); + new IndexableNumericDataArrayInt(values, null)); assertEquals(name(), "test"); } } @@ -320,17 +320,13 @@ public void testPointShape() { shapesMap.put(categories[0], "circle"); shapesMap.put(categories[1], "square"); data.pointShape(shapesMap); - assertEquals(NamedShape.valueOf(shapesMap.get(categories[0]).toUpperCase()), - data.getPointShape(categories[0])); - assertEquals(NamedShape.valueOf(shapesMap.get(categories[1]).toUpperCase()), - data.getPointShape(categories[1])); + assertEquals(NamedShape.valueOf(shapesMap.get(categories[0]).toUpperCase()), data.getPointShape(categories[0])); + assertEquals(NamedShape.valueOf(shapesMap.get(categories[1]).toUpperCase()), data.getPointShape(categories[1])); assertEquals(NamedShape.SQUARE, data.getPointShape(categories[2])); data.pointShape(shapesMap::get); - assertEquals(NamedShape.valueOf(shapesMap.get(categories[0]).toUpperCase()), - data.getPointShape(categories[0])); - assertEquals(NamedShape.valueOf(shapesMap.get(categories[1]).toUpperCase()), - data.getPointShape(categories[1])); + assertEquals(NamedShape.valueOf(shapesMap.get(categories[0]).toUpperCase()), data.getPointShape(categories[0])); + assertEquals(NamedShape.valueOf(shapesMap.get(categories[1]).toUpperCase()), data.getPointShape(categories[1])); assertEquals(NamedShape.SQUARE, data.getPointShape(categories[2])); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesMap.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesMap.java index 02986471638..6aede1aecc4 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesMap.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesMap.java @@ -35,8 +35,7 @@ public class TestCategoryDataSeriesMap extends BaseArrayTestCase { private static final IndexableDataDouble dx = new IndexableDataDouble(dataX, false, null); private static final IndexableData dyInvalid = new IndexableDataArray<>(invalidY, null); - private static final IndexableDataDouble dxMismatched = - new IndexableDataDouble(dataXMisMatched, false, null); + private static final IndexableDataDouble dxMismatched = new IndexableDataDouble(dataXMisMatched, false, null); private static final IndexableNumericData dy = new IndexableNumericDataArrayDouble(dataY, null); public void testCategoryDataSeriesMap() { @@ -95,8 +94,8 @@ public void testCopy() { map2.gradientVisible(true); map2.lineColor("green"); map2.pointColor("red"); - map2.pointSize(TableTools.newTable(TableTools.intCol("Values", dataX), - TableTools.intCol("Sizes", dataX)), "Values", "Sizes"); + map2.pointSize(TableTools.newTable(TableTools.intCol("Values", dataX), TableTools.intCol("Sizes", dataX)), + "Values", "Sizes"); map2.pointLabelFormat("{1}"); map2.toolTipPattern("0.00E0"); map2.seriesColor(2); @@ -109,8 +108,7 @@ public void testCopy() { } - static void testCopy(final CategoryDataSeriesInternal original, - final CategoryDataSeriesInternal copy) { + static void testCopy(final CategoryDataSeriesInternal original, final CategoryDataSeriesInternal copy) { assertEquals(original.name(), copy.name()); assertEquals(original.size(), copy.size()); for (int aDataX : dataX) { diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesSwappableTableMap.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesSwappableTableMap.java index acdefb7fe29..3b3ccac3277 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesSwappableTableMap.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesSwappableTableMap.java @@ -28,7 +28,7 @@ public void testCategoryDataSeriesTableMap() { } Table t = TableTools.newTable(TableTools.col("x", dataX), - TableTools.doubleCol("y", dataY)); + TableTools.doubleCol("y", dataY)); t = t.updateView("Cat = `A`"); // todo test oneClick @@ -45,7 +45,7 @@ public void testLiveTable() { } Table t = TableTools.newTable(TableTools.col("x", dataX), - TableTools.doubleCol("y", dataY)); + TableTools.doubleCol("y", dataY)); t = t.updateView("Cat = `A`"); // todo test oneClick @@ -62,7 +62,7 @@ public void testCopy() { } Table t = TableTools.newTable(TableTools.col("x", dataX), - TableTools.doubleCol("y", dataY)); + TableTools.doubleCol("y", dataY)); t = t.updateView("Cat = `A`"); // todo test oneClick diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesTableMap.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesTableMap.java index 8a0d3bbb0dd..a89f3918e39 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesTableMap.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/category/TestCategoryDataSeriesTableMap.java @@ -24,13 +24,12 @@ public void testCopy() { } Table t = TableTools.newTable(TableTools.col("x", dataX), - TableTools.doubleCol("y", dataY)); + TableTools.doubleCol("y", dataY)); final TableHandle h = PlotUtils.createCategoryTableHandle(t, "x", "y"); final CategoryDataSeriesTableMap series = - new CategoryDataSeriesTableMap(chart.newAxes(), 1, "Test", h, "x", "y"); - final CategoryDataSeriesTableMap copy = - series.copy(new BaseFigureImpl().newChart().newAxes()); + new CategoryDataSeriesTableMap(chart.newAxes(), 1, "Test", h, "x", "y"); + final CategoryDataSeriesTableMap copy = series.copy(new BaseFigureImpl().newChart().newAxes()); series.size(); copy.size(); @@ -40,8 +39,7 @@ public void testCopy() { } - private void testCopy(final CategoryDataSeriesTableMap series, - final AbstractCategoryDataSeries copy) { + private void testCopy(final CategoryDataSeriesTableMap series, final AbstractCategoryDataSeries copy) { assertEquals(series.getValue("0"), copy.getValue("0")); assertEquals(series.getValue("5"), copy.getValue("5")); assertEquals(series.getValue("55"), copy.getValue("55")); diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/data/TestAssociativeData.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/data/TestAssociativeData.java index aaef51f41c9..01d18e1c705 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/data/TestAssociativeData.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/data/TestAssociativeData.java @@ -19,13 +19,11 @@ public class TestAssociativeData extends BaseArrayTestCase { private final String[] cats = {"A", "B"}; private final int[] values = {1, 2}; private final Integer x = values[0]; - private final Table t = - TableTools.newTable(TableTools.col("Cat", cats), TableTools.intCol("Values", values)); + private final Table t = TableTools.newTable(TableTools.col("Cat", cats), TableTools.intCol("Values", values)); private final TableHandle tableHandle = new TableHandle(t, "Cat", "Values"); private final AssociativeDataTable associativeDataTable = - new AssociativeDataTable<>(tableHandle, "Cat", "Values", String.class, Integer.class, null); - private final AssociativeDataHashMap dataHashMap = - new AssociativeDataHashMap<>(null); + new AssociativeDataTable<>(tableHandle, "Cat", "Values", String.class, Integer.class, null); + private final AssociativeDataHashMap dataHashMap = new AssociativeDataHashMap<>(null); @Override public void setUp() throws Exception { @@ -63,24 +61,24 @@ public void testAssociativeDataHashMap() { public void testAssociativeDataTable() { try { - new AssociativeDataTable(null, "Cat", "Values", String.class, - Integer.class, null); + new AssociativeDataTable(null, "Cat", "Values", String.class, Integer.class, + null); TestCase.fail("Expected an exception"); } catch (PlotIllegalArgumentException e) { assertTrue(e.getMessage().contains("Null")); } try { - new AssociativeDataTable(tableHandle, null, "Values", - String.class, Integer.class, null); + new AssociativeDataTable(tableHandle, null, "Values", String.class, Integer.class, + null); TestCase.fail("Expected an exception"); } catch (PlotIllegalArgumentException e) { assertTrue(e.getMessage().contains("Null")); } try { - new AssociativeDataTable(tableHandle, "Cat", null, - String.class, Integer.class, null); + new AssociativeDataTable(tableHandle, "Cat", null, String.class, Integer.class, + null); TestCase.fail("Expected an exception"); } catch (PlotIllegalArgumentException e) { assertTrue(e.getMessage().contains("Null")); @@ -110,8 +108,7 @@ public void testAssociativeDataWithDefault() { final int def = 2; final Map moreData = new HashMap<>(); moreData.put("A", 5); - final AssociativeDataWithDefault dataWithDefault = - new AssociativeDataWithDefault<>(null); + final AssociativeDataWithDefault dataWithDefault = new AssociativeDataWithDefault<>(null); assertNull(dataWithDefault.getDefault()); dataWithDefault.setDefault(def); diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/data/TestIndexableData.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/data/TestIndexableData.java index 20ad8bc1e29..d1934e54260 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/data/TestIndexableData.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/data/TestIndexableData.java @@ -60,19 +60,14 @@ public void setUp() throws Exception { public void testIndexableNumericData() { final IndexableNumericData shortData = new IndexableNumericDataArrayShort(shortArray, null); final IndexableNumericData intData = new IndexableNumericDataArrayInt(intArray, null); - final IndexableNumericData doubleData = - new IndexableNumericDataArrayDouble(doubleArray, null); + final IndexableNumericData doubleData = new IndexableNumericDataArrayDouble(doubleArray, null); final IndexableNumericData longData = new IndexableNumericDataArrayLong(longArray, null); final IndexableNumericData floatData = new IndexableNumericDataArrayFloat(floatArray, null); - final IndexableNumericData numberData = - new IndexableNumericDataArrayNumber<>(numberArray, null); - final IndexableNumericData listData = - new IndexableNumericDataListNumber<>(numberList, null); + final IndexableNumericData numberData = new IndexableNumericDataArrayNumber<>(numberArray, null); + final IndexableNumericData listData = new IndexableNumericDataListNumber<>(numberList, null); final IndexableNumericData dateData = new IndexableNumericDataArrayDate(dateArray, null); - final IndexableNumericData dateTimeData = - new IndexableNumericDataArrayDBDateTime(dbdateTimesArray, null); - checkData(shortData, intData, doubleData, longData, floatData, numberData, listData, - dateTimeData); + final IndexableNumericData dateTimeData = new IndexableNumericDataArrayDBDateTime(dbdateTimesArray, null); + checkData(shortData, intData, doubleData, longData, floatData, numberData, listData, dateTimeData); checkDateData(dateData); } @@ -83,8 +78,7 @@ public void testIndexableDouble() { IndexableData longData = new IndexableDataDouble(longArray, false, null); IndexableData floatData = new IndexableDataDouble(floatArray, false, null); IndexableData numberData = new IndexableDataDouble(numberArray, false, null); - checkData(Double.NaN, true, shortData, intData, doubleData, longData, floatData, - numberData); + checkData(Double.NaN, true, shortData, intData, doubleData, longData, floatData, numberData); shortData = new IndexableDataDouble(shortArray, true, null); intData = new IndexableDataDouble(intArray, true, null); @@ -102,25 +96,25 @@ public void testIndexableInteger() { public void testIndexableDataTable() { final Table t = TableTools.newTable(TableTools.shortCol("shortCol", shortArray), - TableTools.intCol("intCol", intArray), TableTools.doubleCol("doubleCol", doubleArray), - TableTools.floatCol("floatCol", floatArray), TableTools.longCol("longCol", longArray), - TableTools.col("numberCol", numberArray)); + TableTools.intCol("intCol", intArray), TableTools.doubleCol("doubleCol", doubleArray), + TableTools.floatCol("floatCol", floatArray), TableTools.longCol("longCol", longArray), + TableTools.col("numberCol", numberArray)); final BaseFigureImpl figure = new BaseFigureImpl(); - final TableHandle tableHandle = new TableHandle(t, "shortCol", "intCol", "doubleCol", - "floatCol", "longCol", "numberCol"); + final TableHandle tableHandle = + new TableHandle(t, "shortCol", "intCol", "doubleCol", "floatCol", "longCol", "numberCol"); final ColumnHandlerFactory.ColumnHandler shortColumnHandler = - ColumnHandlerFactory.newNumericHandler(tableHandle, "shortCol", null); + ColumnHandlerFactory.newNumericHandler(tableHandle, "shortCol", null); final ColumnHandlerFactory.ColumnHandler intColumnHandler = - ColumnHandlerFactory.newNumericHandler(tableHandle, "intCol", null); + ColumnHandlerFactory.newNumericHandler(tableHandle, "intCol", null); final ColumnHandlerFactory.ColumnHandler doubleColHandler = - ColumnHandlerFactory.newNumericHandler(tableHandle, "doubleCol", null); + ColumnHandlerFactory.newNumericHandler(tableHandle, "doubleCol", null); final ColumnHandlerFactory.ColumnHandler floatColHandler = - ColumnHandlerFactory.newNumericHandler(tableHandle, "floatCol", null); + ColumnHandlerFactory.newNumericHandler(tableHandle, "floatCol", null); final ColumnHandlerFactory.ColumnHandler longColHandler = - ColumnHandlerFactory.newNumericHandler(tableHandle, "longCol", null); + ColumnHandlerFactory.newNumericHandler(tableHandle, "longCol", null); final ColumnHandlerFactory.ColumnHandler numberColHandler = - ColumnHandlerFactory.newNumericHandler(tableHandle, "numberCol", null); + ColumnHandlerFactory.newNumericHandler(tableHandle, "numberCol", null); final IndexableData shortData = new IndexableDataTable(shortColumnHandler, null); final IndexableData intData = new IndexableDataTable(intColumnHandler, null); @@ -134,7 +128,7 @@ public void testIndexableDataTable() { public void testIndexableDataInfinite() { final IndexableDataInfinite indexableDataInfinite = - new IndexableDataInfinite<>(new IndexableDataDouble(doubleArray, true, null)); + new IndexableDataInfinite<>(new IndexableDataDouble(doubleArray, true, null)); assertEquals(Integer.MAX_VALUE, indexableDataInfinite.size()); for (int i = 1; i < doubleArray.length; i++) { assertEquals(doubleArray[i], indexableDataInfinite.get(i)); @@ -146,18 +140,15 @@ public void testIndexableDataInfinite() { } public void testIndexableDataWithDefault() { - final IndexableDataWithDefault indexableDataWithDefault = - new IndexableDataWithDefault(null); + final IndexableDataWithDefault indexableDataWithDefault = new IndexableDataWithDefault(null); - indexableDataWithDefault.setSpecific(new IndexableDataDouble(doubleArray, false, null), - false); + indexableDataWithDefault.setSpecific(new IndexableDataDouble(doubleArray, false, null), false); assertEquals(doubleArray.length, indexableDataWithDefault.size()); for (int i = 1; i < doubleArray.length; i++) { assertEquals(doubleArray[i], indexableDataWithDefault.get(i)); } - indexableDataWithDefault.setSpecific(new IndexableDataDouble(doubleArray, false, null), - false); + indexableDataWithDefault.setSpecific(new IndexableDataDouble(doubleArray, false, null), false); assertEquals(doubleArray.length, indexableDataWithDefault.size()); } @@ -181,7 +172,7 @@ private void checkData(Double emptyValue, boolean checkOutOfBounds, IndexableDat if (checkOutOfBounds) { if (!(dataset instanceof IndexableDataDouble) || - ((IndexableDataDouble) dataset).getMapNanToNull()) { + ((IndexableDataDouble) dataset).getMapNanToNull()) { assertNull(dataset.get(SIZE)); } else { assertEquals(Double.NaN, dataset.get(SIZE)); diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/errorbar/CategoryErrorBarDataSeriesTableMapTest.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/errorbar/CategoryErrorBarDataSeriesTableMapTest.java index 3673d96058a..0035c86771a 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/errorbar/CategoryErrorBarDataSeriesTableMapTest.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/errorbar/CategoryErrorBarDataSeriesTableMapTest.java @@ -33,13 +33,13 @@ public void testCopy() { dataYHigh[0] = Double.NaN; dataX[10] = null; Table t = TableTools.newTable(TableTools.col("x", dataX), - TableTools.doubleCol("y", dataY), - TableTools.doubleCol("yLow", dataYLow), - TableTools.doubleCol("yHigh", dataYHigh)); + TableTools.doubleCol("y", dataY), + TableTools.doubleCol("yLow", dataYLow), + TableTools.doubleCol("yHigh", dataYHigh)); final TableHandle h = PlotUtils.createCategoryTableHandle(t, "x", "y", "yLow", "yHigh"); - final CategoryErrorBarDataSeriesTableMap series = new CategoryErrorBarDataSeriesTableMap( - chart.newAxes(), 1, "Test", h, "x", "y", "yLow", "yHigh"); + final CategoryErrorBarDataSeriesTableMap series = + new CategoryErrorBarDataSeriesTableMap(chart.newAxes(), 1, "Test", h, "x", "y", "yLow", "yHigh"); final CategoryErrorBarDataSeriesTableMap copy = series.copy(chart.newAxes()); series.size(); @@ -51,7 +51,7 @@ public void testCopy() { } private void testCopy(final CategoryErrorBarDataSeriesTableMap series, - final CategoryErrorBarDataSeriesInternal copy) { + final CategoryErrorBarDataSeriesInternal copy) { assertEquals(series.getValue("0"), copy.getValue("0")); assertEquals(series.getValue("5"), copy.getValue("5")); assertEquals(series.getValue("55"), copy.getValue("55")); diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/errorbar/XYErrorBarDataSeriesTableArrayTest.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/errorbar/XYErrorBarDataSeriesTableArrayTest.java index 11d2b51b9f9..7187e24e66b 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/errorbar/XYErrorBarDataSeriesTableArrayTest.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/errorbar/XYErrorBarDataSeriesTableArrayTest.java @@ -53,13 +53,13 @@ public void testXYErrorBarDataSeriesTableArray() { dataX[10] = Double.NaN; dataX[100] = Double.NaN; Table t = TableTools.newTable(TableTools.doubleCol("x", dataX), - TableTools.doubleCol("y", dataY), - TableTools.doubleCol("yLow", dataYLow), - TableTools.doubleCol("yHigh", dataYHigh)); + TableTools.doubleCol("y", dataY), + TableTools.doubleCol("yLow", dataYLow), + TableTools.doubleCol("yHigh", dataYHigh)); final TableHandle h = new TableHandle(t, "x", "y", "yLow", "yHigh"); - final XYErrorBarDataSeriesTableArray series = new XYErrorBarDataSeriesTableArray( - chart.newAxes(), 1, "Test", h, "x", null, null, "y", "yLow", "yHigh", false, true); + final XYErrorBarDataSeriesTableArray series = new XYErrorBarDataSeriesTableArray(chart.newAxes(), 1, "Test", h, + "x", null, null, "y", "yLow", "yHigh", false, true); assertEquals(series.getX(0), 0.0); assertEquals(series.getX(5), 5.0); @@ -86,19 +86,18 @@ public void testLiveTable() { final BaseFigureImpl figure = new BaseFigureImpl(); final ChartImpl chart = figure.newChart(); - final QueryTable liveTable = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), - c("y", 1, 2, 3), c("yLow", 0, 1, 2), c("yHigh", 11, 22, 33)); + final QueryTable liveTable = TstUtils.testRefreshingTable(i(2, 4, 6), c("x", 1, 2, 3), c("y", 1, 2, 3), + c("yLow", 0, 1, 2), c("yHigh", 11, 22, 33)); final TableHandle h = new TableHandle(liveTable, "x", "y", "yLow", "yHigh"); - final XYErrorBarDataSeriesTableArray series = new XYErrorBarDataSeriesTableArray( - chart.newAxes(), 1, "Test", h, "x", null, null, "y", "yLow", "yHigh", false, true); + final XYErrorBarDataSeriesTableArray series = new XYErrorBarDataSeriesTableArray(chart.newAxes(), 1, "Test", h, + "x", null, null, "y", "yLow", "yHigh", false, true); assertEquals(series.getX(4), Double.NaN); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> { - addToTable(liveTable, i(7, 9), c("x", 4, 5), c("y", 4, 5), c("yLow", 3, 4), - c("yHigh", 5, 6)); + addToTable(liveTable, i(7, 9), c("x", 4, 5), c("y", 4, 5), c("yLow", 3, 4), c("yHigh", 5, 6)); liveTable.notifyListeners(i(7, 9), i(), i()); }); @@ -126,13 +125,13 @@ public void testCopy() { dataX[10] = Double.NaN; dataX[100] = Double.NaN; Table t = TableTools.newTable(TableTools.doubleCol("x", dataX), - TableTools.doubleCol("y", dataY), - TableTools.doubleCol("yLow", dataYLow), - TableTools.doubleCol("yHigh", dataYHigh)); + TableTools.doubleCol("y", dataY), + TableTools.doubleCol("yLow", dataYLow), + TableTools.doubleCol("yHigh", dataYHigh)); final TableHandle h = new TableHandle(t, "x", "y", "yLow", "yHigh"); - XYErrorBarDataSeriesTableArray series = new XYErrorBarDataSeriesTableArray(chart.newAxes(), - 1, "Test", h, "x", null, null, "y", "yLow", "yHigh", false, true); + XYErrorBarDataSeriesTableArray series = new XYErrorBarDataSeriesTableArray(chart.newAxes(), 1, "Test", h, "x", + null, null, "y", "yLow", "yHigh", false, true); XYErrorBarDataSeriesTableArray copy = series.copy(chart.newAxes()); series.size(); @@ -142,8 +141,7 @@ public void testCopy() { } - private void testCopy(final XYErrorBarDataSeriesTableArray series, - final XYErrorBarDataSeriesInternal copy) { + private void testCopy(final XYErrorBarDataSeriesTableArray series, final XYErrorBarDataSeriesInternal copy) { assertEquals(series.getX(0), copy.getX(0)); assertEquals(series.getX(5), copy.getX(5)); assertEquals(series.getX(10), copy.getX(10)); diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/ohlc/TestOHLCDataSeries.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/ohlc/TestOHLCDataSeries.java index 8ac5a270320..d77f289ae8c 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/ohlc/TestOHLCDataSeries.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/ohlc/TestOHLCDataSeries.java @@ -22,24 +22,22 @@ import java.util.ArrayList; public class TestOHLCDataSeries extends BaseArrayTestCase { - private final DBDateTime[] datesA = - {new DBDateTime(DBTimeUtils.DAY), new DBDateTime(2 * DBTimeUtils.DAY), - new DBDateTime(3 * DBTimeUtils.DAY), new DBDateTime(4 * DBTimeUtils.DAY)}; + private final DBDateTime[] datesA = {new DBDateTime(DBTimeUtils.DAY), new DBDateTime(2 * DBTimeUtils.DAY), + new DBDateTime(3 * DBTimeUtils.DAY), new DBDateTime(4 * DBTimeUtils.DAY)}; private final double[] openA = {1.0, 2.0, 1.5, 2.0}; private final double[] closeA = {1.8, 1.8, 1.7, 2.2}; private final double[] highA = {2.0, 2.0, 1.8, 2.5}; private final double[] lowA = {0.9, 1.5, 1.5, 1.8}; - private final IndexableNumericData dates = - new IndexableNumericDataArrayDBDateTime(datesA, null); + private final IndexableNumericData dates = new IndexableNumericDataArrayDBDateTime(datesA, null); private final IndexableNumericData open = new IndexableNumericDataArrayDouble(openA, null); private final IndexableNumericData close = new IndexableNumericDataArrayDouble(closeA, null); private final IndexableNumericData high = new IndexableNumericDataArrayDouble(highA, null); private final IndexableNumericData low = new IndexableNumericDataArrayDouble(lowA, null); - private final OHLCDataSeriesInternal dataSeries = new OHLCDataSeriesArray( - new BaseFigureImpl().newChart().newAxes(), 1, "Test", dates, open, high, low, close); + private final OHLCDataSeriesInternal dataSeries = new OHLCDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), + 1, "Test", dates, open, high, low, close); private final OHLCDataSeriesInternal dataSeries2 = new OHLCDataSeriesArray( - new BaseFigureImpl().newChart().newAxes(), 1, "Test2", dates, close, high, low, open); + new BaseFigureImpl().newChart().newAxes(), 1, "Test2", dates, close, high, low, open); @Override @@ -50,8 +48,8 @@ public void testOHLCDataSeriesArray() { checkOHLCDataSeriesArray(dataSeries2, datesA, closeA, highA, lowA, openA); } - private void checkOHLCDataSeriesArray(OHLCDataSeriesInternal dataSeries, DBDateTime[] time, - double[] open, double[] high, double[] low, double[] close) { + private void checkOHLCDataSeriesArray(OHLCDataSeriesInternal dataSeries, DBDateTime[] time, double[] open, + double[] high, double[] low, double[] close) { assertEquals(dataSeries.size(), time.length); for (int i = 0; i < dataSeries.size(); i++) { @@ -65,12 +63,12 @@ private void checkOHLCDataSeriesArray(OHLCDataSeriesInternal dataSeries, DBDateT } public void testCopy() { - final OHLCDataSeriesArray ohlc1 = new OHLCDataSeriesArray( - new BaseFigureImpl().newChart().newAxes(), 1, "Test", dates, open, high, low, close); + final OHLCDataSeriesArray ohlc1 = new OHLCDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 1, "Test", + dates, open, high, low, close); testCopy(ohlc1, ohlc1.copy(new BaseFigureImpl().newChart().newAxes())); - final OHLCDataSeriesArray ohlc2 = new OHLCDataSeriesArray( - new BaseFigureImpl().newChart().newAxes(), 1, "Test2", dates, close, high, low, open); + final OHLCDataSeriesArray ohlc2 = new OHLCDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 1, "Test2", + dates, close, high, low, open); ohlc2.pointsVisible(true); ohlc2.linesVisible(false); ohlc2.pointLabelFormat("{0}: {1}, {2}"); @@ -79,17 +77,16 @@ public void testCopy() { ohlc2.seriesColor("blue"); ohlc2.lineColor("red"); ohlc2.pointSize(0.5, 4.2, 3.0); - ohlc2.addTableHandle( - new TableHandle(TableTools.emptyTable(2).updateView("A=i", "B=i"), "A", "B")); + ohlc2.addTableHandle(new TableHandle(TableTools.emptyTable(2).updateView("A=i", "B=i"), "A", "B")); ohlc2.addTableHandle(new TableHandle(TableTools.emptyTable(2).updateView("C=i"), "C")); - final SwappableTable swappableTable = new SwappableTable( - new TableBackedTableMapHandle(TableTools.emptyTable(2).updateView("A=i", "B=i"), - new ArrayList<>(), new String[0], null) {}) { - @Override - public void addColumn(String column) { - - } - }; + final SwappableTable swappableTable = + new SwappableTable(new TableBackedTableMapHandle(TableTools.emptyTable(2).updateView("A=i", "B=i"), + new ArrayList<>(), new String[0], null) {}) { + @Override + public void addColumn(String column) { + + } + }; ohlc2.addSwappableTable(swappableTable); final Color c1 = new Color(0, 0, 0); diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestAbstractXYDataSeries.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestAbstractXYDataSeries.java index 1f577e3c9c3..95bba4135f2 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestAbstractXYDataSeries.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestAbstractXYDataSeries.java @@ -152,8 +152,7 @@ public void testPointSize() { } final String[] cats = {"A", "B", "C"}; - Table t = TableTools - .newTable(TableTools.doubleCol("Dubs", dsizes), TableTools.col("Str", cats)).ungroup(); + Table t = TableTools.newTable(TableTools.doubleCol("Dubs", dsizes), TableTools.col("Str", cats)).ungroup(); data.pointSize(t, "Dubs"); for (int i = 0; i < t.size(); i++) { assertEquals(data.getPointSize(i), dsizes[i]); @@ -168,13 +167,11 @@ public void testPointSize() { // dsizes[0] = 2; - // t = TableTools.newTable(TableTools.doubleCol("Dubs", dsizes), TableTools.col("Str", - // cats)).ungroup(); + // t = TableTools.newTable(TableTools.doubleCol("Dubs", dsizes), TableTools.col("Str", cats)).ungroup(); // Set set = new HashSet<>(); // Collections.addAll(set, cats); // - // SwappableTableTestUtils.testSwappableTableMethod(t, "Str", set, data, - // XYDataSeries::pointSize, "Dubs"); + // SwappableTableTestUtils.testSwappableTableMethod(t, "Str", set, data, XYDataSeries::pointSize, "Dubs"); // data.getPointSize(0); // SwingTestUtils.emptySwingQueue(); // for (int i = 0; i < t.size(); i++) { @@ -235,8 +232,8 @@ public void testPointColor() { } final String[] cats = {"A", "B", "C"}; - Table t = TableTools.newTable(TableTools.intCol("ints", icolors), - TableTools.col("Str", cats), TableTools.col("Paints", colors)); + Table t = TableTools.newTable(TableTools.intCol("ints", icolors), TableTools.col("Str", cats), + TableTools.col("Paints", colors)); data.pointColor(t, "ints"); for (int i = 0; i < t.size(); i++) { assertEquals(data.getPointColor(i), PlotUtils.intToColor(icolors[i])); @@ -244,7 +241,7 @@ public void testPointColor() { colors[0] = c3; t = TableTools.newTable(TableTools.intCol("ints", icolors), TableTools.col("Str", cats), - TableTools.col("Paints", colors)); + TableTools.col("Paints", colors)); data.pointColor(t, "Paints"); for (int i = 0; i < t.size(); i++) { assertEquals(colors[i], data.getPointColor(i)); @@ -255,14 +252,12 @@ public void testPointColor() { // TableTools.col("Paints", colors)); // Set set = new HashSet<>(); // Collections.addAll(set, cats); - // SwappableTableTestUtils.testSwappableTableMethod(t, "Str", set, data, - // XYDataSeries::pointColor, "Paints"); + // SwappableTableTestUtils.testSwappableTableMethod(t, "Str", set, data, XYDataSeries::pointColor, "Paints"); // for (int i = 0; i < t.size(); i++) { // assertEquals(colors[i], data.getPointColor(i)); // } // - // SwappableTableTestUtils.testSwappableTableMethod(t, "Str", set, data, - // XYDataSeries::pointColor, "ints"); + // SwappableTableTestUtils.testSwappableTableMethod(t, "Str", set, data, XYDataSeries::pointColor, "ints"); // for (int i = 0; i < t.size(); i++) { // assertEquals(theme.getSeriesColor(icolors[i]), data.getPointColor(i)); // } @@ -362,7 +357,7 @@ public void testPointShape() { data.pointShape(new String[] {"up_triangle", "circle"}); assertEquals(JShapes.shape((NamedShape) data.getPointShape(0)), - JShapes.shape(NamedShape.valueOf("up_triangle".toUpperCase()))); + JShapes.shape(NamedShape.valueOf("up_triangle".toUpperCase()))); assertEquals(data.getPointShape(1), NamedShape.valueOf("circle".toUpperCase())); assertEquals(data.getPointShape(2), NamedShape.valueOf("square".toUpperCase())); @@ -383,16 +378,15 @@ public void testPointShape() { assertEquals(data.getPointShape(1), NamedShape.valueOf("down_triangle".toUpperCase())); assertEquals(data.getPointShape(2), NamedShape.valueOf("right_triangle".toUpperCase())); - final DynamicTable shapeTable = - TableTools.newTable(TableTools.col("shapes", "diamond", "circle", "ellipse")); + final DynamicTable shapeTable = TableTools.newTable(TableTools.col("shapes", "diamond", "circle", "ellipse")); data.pointShape(shapeTable, "shapes"); data.getPointShape(0); assertEquals(data.getPointShape(0), NamedShape.valueOf("diamond".toUpperCase())); assertEquals(data.getPointShape(1), NamedShape.valueOf("circle".toUpperCase())); assertEquals(data.getPointShape(2), NamedShape.valueOf("ellipse".toUpperCase())); - final DynamicTable shapeObjectTable = TableTools.newTable(TableTools.col("shapes", - NamedShape.DIAMOND, NamedShape.ELLIPSE, NamedShape.UP_TRIANGLE)); + final DynamicTable shapeObjectTable = TableTools + .newTable(TableTools.col("shapes", NamedShape.DIAMOND, NamedShape.ELLIPSE, NamedShape.UP_TRIANGLE)); data.pointShape(shapeObjectTable, "shapes"); data.getPointShape(0); assertEquals(data.getPointShape(0), NamedShape.valueOf("diamond".toUpperCase())); @@ -454,8 +448,7 @@ public String get(int index) { assertTrue(e.getMessage().contains("String")); } - final DynamicTable shapeTable = - TableTools.newTable(TableTools.col("shapes", "diamond", "circle", "ellips")); + final DynamicTable shapeTable = TableTools.newTable(TableTools.col("shapes", "diamond", "circle", "ellips")); data.pointShape(shapeTable, "shapes"); try { data.getPointShape(2); @@ -465,13 +458,12 @@ public String get(int index) { } } - public static void testCopy(final AbstractXYDataSeries original, - final AbstractXYDataSeries copy) { + public static void testCopy(final AbstractXYDataSeries original, final AbstractXYDataSeries copy) { testCopy(original, copy, true); } - public static void testCopy(final AbstractXYDataSeries original, - final AbstractXYDataSeries copy, final boolean testTables) { + public static void testCopy(final AbstractXYDataSeries original, final AbstractXYDataSeries copy, + final boolean testTables) { assertEquals(original.name(), copy.name()); assertEquals(original.size(), copy.size()); for (int i = 0; i < original.size(); i++) { diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestXYDataSeriesArray.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestXYDataSeriesArray.java index 13f293569d2..1f6876c1f10 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestXYDataSeriesArray.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestXYDataSeriesArray.java @@ -29,10 +29,8 @@ public void testXYDataSeriesArray() { final double[] valueArray2 = {4, 5, 6}; final IndexableNumericData values = new IndexableNumericDataArrayDouble(valueArray, null); final IndexableNumericData values2 = new IndexableNumericDataArrayDouble(valueArray2, null); - final XYDataSeriesInternal x1 = - new XYDataSeriesArray(chart.newAxes(), 1, "Test", values, values2); - final XYDataSeriesInternal x2 = - new XYDataSeriesArray(chart.newAxes(), 2, "Test2", values2, values); + final XYDataSeriesInternal x1 = new XYDataSeriesArray(chart.newAxes(), 1, "Test", values, values2); + final XYDataSeriesInternal x2 = new XYDataSeriesArray(chart.newAxes(), 2, "Test2", values2, values); assertEquals(x1.size(), valueArray.length); assertEquals(x2.size(), valueArray.length); @@ -44,28 +42,24 @@ public void testXYDataSeriesArray() { } final double[] misSized = {1, 2}; - final IndexableNumericData misSizedValues = - new IndexableNumericDataArrayDouble(misSized, null); + final IndexableNumericData misSizedValues = new IndexableNumericDataArrayDouble(misSized, null); try { - new XYDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 3, "Test", - misSizedValues, values2); + new XYDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 3, "Test", misSizedValues, values2); TestCase.fail("Expected an exception"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("inconsistent size")); } try { - new XYDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 4, "Test", null, - values2); + new XYDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 4, "Test", null, values2); TestCase.fail("Expected an exception"); } catch (PlotIllegalArgumentException e) { assertTrue(e.getMessage().contains("Null")); } try { - new XYDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 5, "Test", - misSizedValues, null); + new XYDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 5, "Test", misSizedValues, null); TestCase.fail("Expected an exception"); } catch (PlotIllegalArgumentException e) { assertTrue(e.getMessage().contains("Null")); @@ -89,22 +83,21 @@ public void testCopy() { final IndexableNumericData values = new IndexableNumericDataArrayDouble(valueArray, null); final IndexableNumericData values2 = new IndexableNumericDataArrayDouble(valueArray2, null); - final XYDataSeriesArray x1 = new XYDataSeriesArray( - new BaseFigureImpl().newChart().newAxes(), 1, "Test", values, values2); - final XYDataSeriesArray x2 = new XYDataSeriesArray( - new BaseFigureImpl().newChart().newAxes(), 2, "Test2", values2, values); + final XYDataSeriesArray x1 = + new XYDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 1, "Test", values, values2); + final XYDataSeriesArray x2 = + new XYDataSeriesArray(new BaseFigureImpl().newChart().newAxes(), 2, "Test2", values2, values); - x1.addTableHandle( - new TableHandle(TableTools.emptyTable(2).updateView("A=i", "B=i"), "A", "B")); + x1.addTableHandle(new TableHandle(TableTools.emptyTable(2).updateView("A=i", "B=i"), "A", "B")); x1.addTableHandle(new TableHandle(TableTools.emptyTable(2).updateView("C=i"), "C")); - final SwappableTable swappableTable = new SwappableTable( - new TableBackedTableMapHandle(TableTools.emptyTable(2).updateView("A=i", "B=i"), - new ArrayList<>(), new String[0], null)) { - @Override - public void addColumn(String column) { - - } - }; + final SwappableTable swappableTable = + new SwappableTable(new TableBackedTableMapHandle(TableTools.emptyTable(2).updateView("A=i", "B=i"), + new ArrayList<>(), new String[0], null)) { + @Override + public void addColumn(String column) { + + } + }; x1.addSwappableTable(swappableTable); final XYDataSeriesArray x1Copy = x1.copy(new BaseFigureImpl().newChart().newAxes()); diff --git a/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestXYDataSeriesFunctionImpl.java b/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestXYDataSeriesFunctionImpl.java index 7a843a3eaf8..afb261c7dec 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestXYDataSeriesFunctionImpl.java +++ b/Plot/src/test/java/io/deephaven/db/plot/datasets/xy/TestXYDataSeriesFunctionImpl.java @@ -12,12 +12,9 @@ public class TestXYDataSeriesFunctionImpl extends BaseArrayTestCase { public void testXYDataSeriesFunction() { - final XYDataSeriesFunctionImpl f1 = - new BaseFigureImpl().newChart().newAxes().plot("Test", x -> x); - final XYDataSeriesFunctionImpl f2 = - new BaseFigureImpl().newChart().newAxes().plot("Test", Math::log); - final XYDataSeriesFunctionImpl f3 = - new BaseFigureImpl().newChart().newAxes().plot("Test", x -> Double.NaN); + final XYDataSeriesFunctionImpl f1 = new BaseFigureImpl().newChart().newAxes().plot("Test", x -> x); + final XYDataSeriesFunctionImpl f2 = new BaseFigureImpl().newChart().newAxes().plot("Test", Math::log); + final XYDataSeriesFunctionImpl f3 = new BaseFigureImpl().newChart().newAxes().plot("Test", x -> Double.NaN); assertEquals(f1.size(), 0); assertEquals(f2.size(), 0); @@ -80,8 +77,7 @@ public void testXYDataSeriesFunction() { try { - new XYDataSeriesFunctionImpl(new BaseFigureImpl().newChart().newAxes(), 1, "Test", - null); + new XYDataSeriesFunctionImpl(new BaseFigureImpl().newChart().newAxes(), 1, "Test", null); TestCase.fail("Expected an exception"); } catch (PlotIllegalArgumentException e) { assertTrue(e.getMessage().contains("Null")); @@ -90,13 +86,11 @@ public void testXYDataSeriesFunction() { } public void testCopy() { - final XYDataSeriesFunctionImpl f1 = - new BaseFigureImpl().newChart().newAxes().plot("Test", x -> x); + final XYDataSeriesFunctionImpl f1 = new BaseFigureImpl().newChart().newAxes().plot("Test", x -> x); testFunction(f1, f1.copy(new BaseFigureImpl().newChart().newAxes())); - final XYDataSeriesFunctionImpl f2 = - new BaseFigureImpl().newChart().newAxes().plot("Test2", Math::log); + final XYDataSeriesFunctionImpl f2 = new BaseFigureImpl().newChart().newAxes().plot("Test2", Math::log); f2.pointsVisible(false); f2.linesVisible(true); f2.pointLabelFormat("{0}: {1}, {2}"); @@ -107,14 +101,12 @@ public void testCopy() { f2.pointSize(0.5, 4.2, 3.0); testFunction(f2, f2.copy(new BaseFigureImpl().newChart().newAxes())); - final XYDataSeriesFunctionImpl f3 = - new BaseFigureImpl().newChart().newAxes().plot("Test", x -> Double.NaN); + final XYDataSeriesFunctionImpl f3 = new BaseFigureImpl().newChart().newAxes().plot("Test", x -> Double.NaN); f3.funcNPoints(1); testFunction(f3, f3.copy(new BaseFigureImpl().newChart().newAxes())); } - private void testFunction(final XYDataSeriesFunctionImpl original, - final XYDataSeriesFunctionImpl copy) { + private void testFunction(final XYDataSeriesFunctionImpl original, final XYDataSeriesFunctionImpl copy) { TestAbstractXYDataSeries.testCopy(original, copy); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/BusinessTime.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/BusinessTime.java index 5c2d9352546..c59718f3de8 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/BusinessTime.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/BusinessTime.java @@ -32,20 +32,19 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(2, 1); Figure cht = fig.newChart(0) - .chartTitle("Business"); - Figure axs = cht.newAxes() - .xTransform(new AxisTransformBusinessCalendar(Calendars.calendar("USNYSE"))) - .xTicksVisible(false) - .xLabel("X") - .yLabel("Y") - .plot("Test", x, y); + .chartTitle("Business"); + Figure axs = cht.newAxes().xTransform(new AxisTransformBusinessCalendar(Calendars.calendar("USNYSE"))) + .xTicksVisible(false) + .xLabel("X") + .yLabel("Y") + .plot("Test", x, y); Figure cht2 = axs.newChart(1) - .chartTitle("NonBusiness"); + .chartTitle("NonBusiness"); Figure axs2 = cht2.newAxes() - .xLabel("X") - .yLabel("Y") - .plot("Test", x, y); + .xLabel("X") + .yLabel("Y") + .plot("Test", x, y); ExamplePlotUtils.display(axs2); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatErrorPlotBy.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatErrorPlotBy.java index beb3b5473f7..ac5c15a83c1 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatErrorPlotBy.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatErrorPlotBy.java @@ -20,16 +20,16 @@ public static void main(String[] args) { final double[] highValues = {6, 5, 5, 4}; Table t = TableTools.newTable(TableTools.col("USym", usym), - TableTools.col("Cats", cats), - TableTools.doubleCol("Values", values), - TableTools.doubleCol("Low", lowValues), - TableTools.doubleCol("High", highValues)).update("Cats = i % 2 == 0 ? null : Cats") - .update("A = `A`").update("B = `B`"); + TableTools.col("Cats", cats), + TableTools.doubleCol("Values", values), + TableTools.doubleCol("Low", lowValues), + TableTools.doubleCol("High", highValues)).update("Cats = i % 2 == 0 ? null : Cats").update("A = `A`") + .update("B = `B`"); TableTools.show(t); Figure fig = FigureFactory.figure() - .catPlotBy("Test1", t, "Cats", "Values", "A").show(); + .catPlotBy("Test1", t, "Cats", "Values", "A").show(); ExamplePlotUtils.display(fig); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatPlotBy.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatPlotBy.java index b72fba93ef8..3a62978b20d 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatPlotBy.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatPlotBy.java @@ -18,15 +18,15 @@ public static void main(String[] args) { final double[] values = {5, 4, 4, 3}; Table t = TableTools.newTable(TableTools.col("USym", usym), - TableTools.col("Cats", cats), - TableTools.doubleCol("Values", values)); + TableTools.col("Cats", cats), + TableTools.doubleCol("Values", values)); t = t.update("Timestamp = DBDateTime.now() + (HOUR * i)"); Figure fig = FigureFactory.figure(); for (int i = 0; i < 1; i++) { fig = fig.newChart() - .newAxes() - .catPlot("Test1", t, "Timestamp", "Values"); + .newAxes() + .catPlot("Test1", t, "Timestamp", "Values"); } ExamplePlotUtils.display(fig); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatStackedBar.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatStackedBar.java index c7fdb53ce36..4b26a4ec8b8 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatStackedBar.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/CatStackedBar.java @@ -21,15 +21,15 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .plotStyle(PlotStyle.STACKED_BAR) - .catPlot("Test1", x1, y1).gradientVisible(true) - .catPlot("Test2", x2, y2) - .catPlot("Test3", x3, y3).group(2) - .yTicks(new double[] {1, 5}); + .xLabel("X") + .yLabel("Y") + .plotStyle(PlotStyle.STACKED_BAR) + .catPlot("Test1", x1, y1).gradientVisible(true) + .catPlot("Test2", x2, y2) + .catPlot("Test3", x3, y3).group(2) + .yTicks(new double[] {1, 5}); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/ExamplePlotUtils.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/ExamplePlotUtils.java index 39ccd9466c5..0e98a00e193 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/ExamplePlotUtils.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/ExamplePlotUtils.java @@ -17,7 +17,6 @@ public class ExamplePlotUtils { */ public static void display(final Figure fig) { final BaseFigureImpl figImpl = ((FigureImpl) fig).getFigure(); - // TODO: Do something here to actually display a sample plot, in the absence of Swing - // plotting support. + // TODO: Do something here to actually display a sample plot, in the absence of Swing plotting support. } } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/OHLCChart.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/OHLCChart.java index 88377bca7d8..1f4b1c47eb8 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/OHLCChart.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/OHLCChart.java @@ -27,11 +27,11 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .ohlcPlot("Test", date, open, high, low, close); + .xLabel("X") + .yLabel("Y") + .ohlcPlot("Test", date, open, high, low, close); ExamplePlotUtils.display(axs); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/OHLCPlotBy.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/OHLCPlotBy.java index 32ff1f1a5db..6913d25fe8d 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/OHLCPlotBy.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/OHLCPlotBy.java @@ -23,22 +23,22 @@ public static void main(String[] args) { final double[] close = {4, 5, 4, 6, 5, 6}; final long time = 1491946585000000000L; Table t = TableTools.newTable(TableTools.col("USym", cats), TableTools.col("USym2", cats2), - TableTools.doubleCol("Open", open), - TableTools.doubleCol("High", high), - TableTools.doubleCol("Low", low), - TableTools.doubleCol("Close", close)); + TableTools.doubleCol("Open", open), + TableTools.doubleCol("High", high), + TableTools.doubleCol("Low", low), + TableTools.doubleCol("Close", close)); QueryScope.addParam("time", time); t = t.updateView("Time = new DBDateTime(time + (MINUTE * i))"); Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes().xTransform(new AxisTransformLambda()) - .xLabel("X") - .yLabel("Y") - .ohlcPlotBy("Test1", t, "Time", "Open", "High", "Low", "Close", "USym") - .pointColor("black", "A"); + .xLabel("X") + .yLabel("Y") + .ohlcPlotBy("Test1", t, "Time", "Open", "High", "Low", "Close", "USym") + .pointColor("black", "A"); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/PieChartArray.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/PieChartArray.java index 4dcfa7443cc..7539c6069b5 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/PieChartArray.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/PieChartArray.java @@ -15,11 +15,11 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .piePlot("Test", categories, values).pointLabelFormat("{0}"); + .xLabel("X") + .yLabel("Y") + .piePlot("Test", categories, values).pointLabelFormat("{0}"); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/PrettyChart1.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/PrettyChart1.java index 08967088458..f972e92e615 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/PrettyChart1.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/PrettyChart1.java @@ -16,8 +16,7 @@ public static void main(String[] args) { final java.awt.Color red = java.awt.Color.decode("#d62728"); final java.awt.Color darkBlue = java.awt.Color.decode("#1f77b4"); final Color lighterRed = new Color(red.getRed(), red.getGreen(), red.getBlue(), 50); - final Color lighterDarkBlue = - new Color(darkBlue.getRed(), darkBlue.getGreen(), darkBlue.getBlue(), 100); + final Color lighterDarkBlue = new Color(darkBlue.getRed(), darkBlue.getGreen(), darkBlue.getBlue(), 100); final long time = 1491946585000000000L; DBDateTime[] date1 = { @@ -167,19 +166,19 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes().plotStyle("LINE") - .yLabel("Predicted Index") - .plot("Test2", date3, y3).pointsVisible(false) - .plot("Test1", date1, y1).pointsVisible(false); + .yLabel("Predicted Index") + .plot("Test2", date3, y3).pointsVisible(false) + .plot("Test1", date1, y1).pointsVisible(false); Figure axs2 = axs.twin() - .plotStyle(PlotStyle.AREA) - .plot("Test1", date3, y3Lower).seriesColor(new Color(250, 250, 250)) - .plot("Test2", date3, y3Higher).seriesColor(lighterRed) - .plot("Test3", date1, y1Lower).seriesColor(new Color(250, 250, 250)) - .plot("Test4", date1, y1Higher).seriesColor(lighterDarkBlue); + .plotStyle(PlotStyle.AREA) + .plot("Test1", date3, y3Lower).seriesColor(new Color(250, 250, 250)) + .plot("Test2", date3, y3Higher).seriesColor(lighterRed) + .plot("Test3", date1, y1Lower).seriesColor(new Color(250, 250, 250)) + .plot("Test4", date1, y1Higher).seriesColor(lighterDarkBlue); ExamplePlotUtils.display(axs2); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/ScatterPlotTable.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/ScatterPlotTable.java index 61ebb72bad8..fc7892b5176 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/ScatterPlotTable.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/ScatterPlotTable.java @@ -18,9 +18,9 @@ public static void main(String[] args) { final Number[] x4 = {1.3, 3.2, 3.4, 3.8}; Table t = TableTools.newTable(TableTools.col("x1", x1), - TableTools.col("x2", x2), - TableTools.col("x3", x3), - TableTools.col("x4", x4)); + TableTools.col("x2", x2), + TableTools.col("x3", x3), + TableTools.col("x4", x4)); ScatterPlotMatrix f = ScatterPlotMatrix.scatterPlotMatrix(t, "x1", "x2", "x3", "x4"); ExamplePlotUtils.display(f); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCatError.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCatError.java index b75eb76518e..106f402cc44 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCatError.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCatError.java @@ -38,9 +38,9 @@ public static void main(String[] args) { final Number[] close = {4, 5, 4, 6}; Figure fig = FigureFactory.figure() - .catErrorBar("S1", x1, y1, yLow, yHigh) - .plotStyle("bar") - .plotOrientation("H"); + .catErrorBar("S1", x1, y1, yLow, yHigh) + .plotStyle("bar") + .plotOrientation("H"); ExamplePlotUtils.display(fig); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCatErrorAxisTransform.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCatErrorAxisTransform.java index 36fb343f595..7bb19323d78 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCatErrorAxisTransform.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCatErrorAxisTransform.java @@ -20,9 +20,9 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure() - .catErrorBar("S1", x1, y1, yLow, yHigh) - .plotStyle("bar") - .yTransform(AxisTransforms.LOG); + .catErrorBar("S1", x1, y1, yLow, yHigh) + .plotStyle("bar") + .yTransform(AxisTransforms.LOG); ExamplePlotUtils.display(fig); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryAxisTransform.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryAxisTransform.java index a5d79a2ec6e..adff79367f9 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryAxisTransform.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryAxisTransform.java @@ -22,11 +22,11 @@ public static void main(String[] args) { final Number[] y2 = {18, -5, 0, 9, 12}; Figure axs2 = catPlot("Test2", x2, y2) - .catPlot("Test1", x1, y1) - .xLabel("Cats") - .yLabel("Y") - .yTransform(AxisTransforms.SQRT) - .plotOrientation("H"); + .catPlot("Test1", x1, y1) + .xLabel("Cats") + .yLabel("Y") + .yTransform(AxisTransforms.SQRT) + .plotOrientation("H"); ExamplePlotUtils.display(axs2); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryColor.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryColor.java index 9dff1797a65..ef2f60b4d21 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryColor.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryColor.java @@ -24,27 +24,26 @@ public static void main(String[] args) { final String[] x2 = {"A", "B", "C", "E", "F"}; final Number[] y2 = {5, 2, 1, 7, 5}; final Color[] c2 = {new Color(0, 255, 0), - new Color(0, 0, 255), new Color(0, 0, 0), new Color(255, 0, 0), - new Color(255, 255, 0)}; + new Color(0, 0, 255), new Color(0, 0, 0), new Color(255, 0, 0), new Color(255, 255, 0)}; final Map c1m = IntStream.range(0, c1.length).boxed() - .collect(Collectors.toMap(i -> x1[i], i -> c1[i])); + .collect(Collectors.toMap(i -> x1[i], i -> c1[i])); final Map c2m = IntStream.range(0, c2.length).boxed() - .collect(Collectors.toMap(i -> x2[i], i -> c2[i])); + .collect(Collectors.toMap(i -> x2[i], i -> c2[i])); c2m.put("C", null); Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .catPlot("Test2", x2, y2) - .pointColor(new Color(0, 0, 0)) - .pointColor(c2m) - .catPlot("Test1", x1, y1) - .pointColor(c1m); + .xLabel("X") + .yLabel("Y") + .catPlot("Test2", x2, y2) + .pointColor(new Color(0, 0, 0)) + .pointColor(c2m) + .catPlot("Test1", x1, y1) + .pointColor(c1m); ExamplePlotUtils.display(axs); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryPlot.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryPlot.java index 27028949cc4..d511325c0bc 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryPlot.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryPlot.java @@ -15,12 +15,12 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .piePlot("Test1", x1, new int[] {2, 2, 3, 2}).piePercentLabelFormat("0.00") - .axis(0).axisLabelFont("Courier", "BOLD_ITALIC", 25); + .xLabel("X") + .yLabel("Y") + .piePlot("Test1", x1, new int[] {2, 2, 3, 2}).piePercentLabelFormat("0.00") + .axis(0).axisLabelFont("Courier", "BOLD_ITALIC", 25); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryPlot2.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryPlot2.java index 2f889bc8c1b..d8c30a1b212 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryPlot2.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleCategoryPlot2.java @@ -18,21 +18,21 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs1 = cht.newAxes() - .xLabel("X") - .yLabel("Y").plotStyle("SCATTER") + .xLabel("X") + .yLabel("Y").plotStyle("SCATTER") - .catPlot("Test1", x1, y1) - .pointShape("circle") - .pointSize(2); + .catPlot("Test1", x1, y1) + .pointShape("circle") + .pointSize(2); Figure axs2 = axs1.twin().plotStyle("SCATTER") - .catPlot("Test2", x2, y2) - .pointShape("up_triangle") - .pointSize(2) - .axis(0).axisLabelFont("Courier", "BOLD_ITALIC", 25) - .xTickLabelAngle(45); + .catPlot("Test2", x2, y2) + .pointShape("up_triangle") + .pointSize(2) + .axis(0).axisLabelFont("Courier", "BOLD_ITALIC", 25) + .xTickLabelAngle(45); ExamplePlotUtils.display(axs2); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleColorMaps.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleColorMaps.java index 3128cc57ddf..168946655b3 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleColorMaps.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleColorMaps.java @@ -37,13 +37,13 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs1 = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .plotStyle("LINE") - .plot("Test1", x, y) - .pointColorByY(val -> heatMap(0, 50, green, red).apply(val)); + .xLabel("X") + .yLabel("Y") + .plotStyle("LINE") + .plot("Test1", x, y) + .pointColorByY(val -> heatMap(0, 50, green, red).apply(val)); Range[] ranges = new Range[] { new Range(Double.NEGATIVE_INFINITY, 10, true, false), @@ -57,7 +57,7 @@ public static void main(String[] args) { m.put(ranges[1], colors[1]); m.put(ranges[2], colors[2]); axs1 = axs1.plot("Test2", x, y2) - .pointColorByY(val -> rangeMap(m).apply(val)); + .pointColorByY(val -> rangeMap(m).apply(val)); ExamplePlotUtils.display(axs1); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleHistoTable.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleHistoTable.java index 695c5927b5f..9ce2b4ec8e6 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleHistoTable.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleHistoTable.java @@ -19,11 +19,11 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .histPlot("Test1", t, "x1", 4).pointColor("red"); + .xLabel("X") + .yLabel("Y") + .histPlot("Test1", t, "x1", 4).pointColor("red"); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimplePlotBy.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimplePlotBy.java index af1c1e52738..88d24e677e1 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimplePlotBy.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimplePlotBy.java @@ -18,17 +18,16 @@ public static void main(String[] args) { final String[] cats = {"A", "B", "A", "B", "A", "B"}; final int[] x1 = {1, 1, 2, 2, 3, 3}; final double[] y1 = {2.0, 3.0, 4.0, 5.0, 3.0, 4.0}; - final Table t = TableTools.newTable(TableTools.col("USym", cats), - TableTools.intCol("Index", x1), TableTools.doubleCol("Value", y1)); + final Table t = TableTools.newTable(TableTools.col("USym", cats), TableTools.intCol("Index", x1), + TableTools.doubleCol("Value", y1)); Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes().xTransform(AxisTransforms.LOG) - .xLabel("X") - .yLabel("Y") - .plotBy("Test1", t, "Index", "Value", "USym").lineColor("black").pointsVisible(true) - .show(); + .xLabel("X") + .yLabel("Y") + .plotBy("Test1", t, "Index", "Value", "USym").lineColor("black").pointsVisible(true).show(); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimplePlotDynamicChartTitle.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimplePlotDynamicChartTitle.java index dd52a5ffc08..65c57692e85 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimplePlotDynamicChartTitle.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimplePlotDynamicChartTitle.java @@ -24,14 +24,13 @@ public static void main(String[] args) { final Number[] x2 = {0, 1.5, 15.5}; final Number[] y2 = {1.3, 3.2, 3.4}; - final Table table = - TableTools.emptyTable(2).updateView("by = i%2==0 ? `A` : `B`", "title=i", "title2=2*i"); + final Table table = TableTools.emptyTable(2).updateView("by = i%2==0 ? `A` : `B`", "title=i", "title2=2*i"); final SelectableDataSetOneClick by = oneClick(table, "by"); Figure fig = FigureFactory.figure() - .xInvert(true) - .plot("TestF", x1, y1) - .chartTitle(false, table, "title", "title2"); + .xInvert(true) + .plot("TestF", x1, y1) + .chartTitle(false, table, "title", "title2"); ExamplePlotUtils.display(fig); } } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsDBDatePlot.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsDBDatePlot.java index 48c49d71b13..60863879b35 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsDBDatePlot.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsDBDatePlot.java @@ -30,14 +30,13 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .plot("Test1", x1, y1).pointLabelFormat("{0}: {1}, {2}") - .xToolTipPattern("HH:mm:SSSSSSSSS") - .plot("Test2", x2, y2) - .axis(0).axisLabelFont("Courier", "BOLD_ITALIC", 25); + .xLabel("X") + .yLabel("Y") + .plot("Test1", x1, y1).pointLabelFormat("{0}: {1}, {2}").xToolTipPattern("HH:mm:SSSSSSSSS") + .plot("Test2", x2, y2) + .axis(0).axisLabelFont("Courier", "BOLD_ITALIC", 25); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsPlot.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsPlot.java index dbe9ea0b550..92a04d349b6 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsPlot.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsPlot.java @@ -31,9 +31,9 @@ public static void main(String[] args) { for (int i = 0; i < size * size; i++) { fig = fig.newChart() - .newAxes() - .plot("Test1", x1, y1) - .plot("Test2", x2, y2); + .newAxes() + .plot("Test1", x1, y1) + .plot("Test2", x2, y2); } ExamplePlotUtils.display(fig); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsPlot2.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsPlot2.java index 29b3290c5f7..00ffbcf6f52 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsPlot2.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleTsPlot2.java @@ -42,13 +42,13 @@ public static void main(String[] args) throws IOException, ParseException { final Number[] y1 = values.toArray(new Number[values.size()]); Figure fig = FigureFactory.figure() - .newChart(0) - .chartTitle(fileName) - .newAxes() - .xLabel("X") - .yLabel("Y") - .plot("Test1", x1, y1) - .pointsVisible(false); + .newChart(0) + .chartTitle(fileName) + .newAxes() + .xLabel("X") + .yLabel("Y") + .plot("Test1", x1, y1) + .pointsVisible(false); ExamplePlotUtils.display(fig); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYArea.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYArea.java index 837df4848a5..c64766f3d0f 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYArea.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYArea.java @@ -19,12 +19,12 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y").plotStyle("AREA") - .plot("Test1", x1, y1).pointsVisible(true).linesVisible(true) - .plot("Test2", x2, y2); + .xLabel("X") + .yLabel("Y").plotStyle("AREA") + .plot("Test1", x1, y1).pointsVisible(true).linesVisible(true) + .plot("Test2", x2, y2); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYAxisTransform.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYAxisTransform.java index 1014f264c02..45b1678285b 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYAxisTransform.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYAxisTransform.java @@ -22,11 +22,11 @@ public static void main(String[] args) { final Number[] y2 = {1.3, -3.2, 3.4, 0}; Figure axs2 = plot("Test2", x2, y2).pointsVisible(true) - .plot("Test1", x1, y1).pointsVisible(true) - .xLabel("X") - .yLabel("Y") - .yTransform(AxisTransforms.LOG) - .plotOrientation("H"); + .plot("Test1", x1, y1).pointsVisible(true) + .xLabel("X") + .yLabel("Y") + .yTransform(AxisTransforms.LOG) + .plotOrientation("H"); ExamplePlotUtils.display(axs2); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYBar.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYBar.java index ffc4e72369a..b77a7f23c23 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYBar.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYBar.java @@ -21,9 +21,9 @@ public static void main(String[] args) { final Number[] y2 = {1.3, 3.2, 3.4}; Figure axs2 = plot("Test2", x2, y2) - .plot("Test1", x1, y1) - .plotStyle("BAR") - .yTransform(AxisTransforms.LOG); + .plot("Test1", x1, y1) + .plotStyle("BAR") + .yTransform(AxisTransforms.LOG); ExamplePlotUtils.display(axs2); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYColor.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYColor.java index cff871a71c3..387e040249f 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYColor.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYColor.java @@ -25,13 +25,13 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .plot("Test1", x1, y1).pointColor(c1).linesVisible(false) - .plot("Test2", x2, y2).pointColor(green) - .plot("TestF", x -> x * x / 5).pointColor("black"); // .npoints(5); //.range(-10,10); + .xLabel("X") + .yLabel("Y") + .plot("Test1", x1, y1).pointColor(c1).linesVisible(false) + .plot("Test2", x2, y2).pointColor(green) + .plot("TestF", x -> x * x / 5).pointColor("black"); // .npoints(5); //.range(-10,10); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYDateBar.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYDateBar.java index 6b2e53d954f..54791c94d5a 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYDateBar.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYDateBar.java @@ -24,11 +24,11 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y").plotStyle("BAR") - .plot("Test1", x1, y1).pointColor("red"); + .xLabel("X") + .yLabel("Y").plotStyle("BAR") + .plot("Test1", x1, y1).pointColor("red"); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYDateTime.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYDateTime.java index 2b20b4edbe2..6bf946f0e5f 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYDateTime.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYDateTime.java @@ -21,17 +21,16 @@ public static void main(String[] args) { DBTimeUtils.nanosToTime(dbDateTime + 4 * DBTimeUtils.HOUR), DBTimeUtils.nanosToTime(dbDateTime + 5 * DBTimeUtils.HOUR), DBTimeUtils.nanosToTime(dbDateTime + 6 * DBTimeUtils.HOUR), - DBTimeUtils - .nanosToTime(dbDateTime + 6 * DBTimeUtils.HOUR + 30 * DBTimeUtils.MINUTE), + DBTimeUtils.nanosToTime(dbDateTime + 6 * DBTimeUtils.HOUR + 30 * DBTimeUtils.MINUTE), }; final double[] data = new double[] {1, 2, 3, 4, 5, 6, 7, 8}; Figure axs2 = plot("Test2", dates, data) - .xBusinessTime() - .plotStyle(PlotStyle.SCATTER) - .linesVisible(true) - .xFormatPattern("HH:mm"); + .xBusinessTime() + .plotStyle(PlotStyle.SCATTER) + .linesVisible(true) + .xFormatPattern("HH:mm"); ExamplePlotUtils.display(axs2); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYError.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYError.java index 84a3fea818c..3384a6e250b 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYError.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYError.java @@ -41,13 +41,12 @@ public static void main(String[] args) { if (testOHLC) { fig = FigureFactory.figure() - .plot("S1", date, y1).plotStyle("Line") - .twin() - .ohlcPlot("S2", date, open, high, low, close).plotStyle("OHLC"); + .plot("S1", date, y1).plotStyle("Line") + .twin() + .ohlcPlot("S2", date, open, high, low, close).plotStyle("OHLC"); } else { fig = FigureFactory.figure() - .errorBarXY("S1", x1, xLow, xHigh, y1, yLow, yHigh).plotStyle("bar") - .pointsVisible(true); + .errorBarXY("S1", x1, xLow, xHigh, y1, yLow, yHigh).plotStyle("bar").pointsVisible(true); } ExamplePlotUtils.display(fig); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYErrorAxisTransform.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYErrorAxisTransform.java index b1fdac66088..698d3c20cc3 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYErrorAxisTransform.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYErrorAxisTransform.java @@ -23,10 +23,10 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure() - .errorBarXY("S1", x1, xLow, xHigh, y1, yLow, yHigh) - .plotStyle("bar") - .plotOrientation("H") - .yLog(); + .errorBarXY("S1", x1, xLow, xHigh, y1, yLow, yHigh) + .plotStyle("bar") + .plotOrientation("H") + .yLog(); ExamplePlotUtils.display(fig); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYHisto.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYHisto.java index 3ec2ecd676e..b5ad42dd4e9 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYHisto.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYHisto.java @@ -15,11 +15,11 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .histPlot("Test1", x1, 4).pointColor("green"); + .xLabel("X") + .yLabel("Y") + .histPlot("Test1", x1, 4).pointColor("green"); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot.java index bc49d2f4bfe..0b286c52d11 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot.java @@ -17,8 +17,8 @@ public static void main(String[] args) { final Number[] y2 = {1.3, 3.2, 3.4}; Figure fig = FigureFactory.figure() - .xInvert(true) - .plot("TestF", x1, y1); + .xInvert(true) + .plot("TestF", x1, y1); ExamplePlotUtils.display(fig); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot2.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot2.java index 756846ce8af..4f73594e321 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot2.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot2.java @@ -18,15 +18,15 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title") - - .newAxes() - .xLabel("X1") - .yLabel("Y1") - .plot("Test1", x1, y1).plotStyle("bar").twin() - .xLabel("X2") - .yLabel("Y2") - .plot("Test2", x2, y2); + .chartTitle("Chart Title") + + .newAxes() + .xLabel("X1") + .yLabel("Y1") + .plot("Test1", x1, y1).plotStyle("bar").twin() + .xLabel("X2") + .yLabel("Y2") + .plot("Test2", x2, y2); ExamplePlotUtils.display(cht); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot2b.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot2b.java index c4ab3832057..e7014d2d20d 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot2b.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot2b.java @@ -18,19 +18,19 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title") - - .newAxes() - .xLabel("X1") - .yLabel("Y1") - .xLog() - .plot("Test1", x1, y1) - - .newAxes() - .xLabel("X2") - .yLabel("Y2") - .yLog() - .plot("Test2", x2, y2); + .chartTitle("Chart Title") + + .newAxes() + .xLabel("X1") + .yLabel("Y1") + .xLog() + .plot("Test1", x1, y1) + + .newAxes() + .xLabel("X2") + .yLabel("Y2") + .yLog() + .plot("Test2", x2, y2); ExamplePlotUtils.display(cht); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot3.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot3.java index b88fe6161f7..c6d66f88a40 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot3.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot3.java @@ -20,22 +20,22 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title") - - .newAxes() - .xLabel("X1") - .yLabel("Y1") - .plot("Test1", x1, y1) - - .newAxes() - .xLabel("X2") - .yLabel("Y2") - .plot("Test2", x2, y2) - - .newAxes() - .xLabel("X3") - .yLabel("Y3") - .plot("Test3", x3, y3); + .chartTitle("Chart Title") + + .newAxes() + .xLabel("X1") + .yLabel("Y1") + .plot("Test1", x1, y1) + + .newAxes() + .xLabel("X2") + .yLabel("Y2") + .plot("Test2", x2, y2) + + .newAxes() + .xLabel("X3") + .yLabel("Y3") + .plot("Test3", x3, y3); ExamplePlotUtils.display(cht); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot4.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot4.java index 7a8723720c1..705f91ddf90 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot4.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot4.java @@ -18,15 +18,15 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("triad_mixing") - .chartTitleFont("Courier", "PLAIN", 40); + .chartTitle("triad_mixing") + .chartTitleFont("Courier", "PLAIN", 40); Figure ax = cht.newAxes() - .xLabel("X1") - .yLabel("Y1") - .plot("Func0", x -> -5) - .plot("Func1", x -> -4) - .plot("Func2", x -> -3).lineStyle(thickDash); + .xLabel("X1") + .yLabel("Y1") + .plot("Func0", x -> -5) + .plot("Func1", x -> -4) + .plot("Func2", x -> -3).lineStyle(thickDash); ExamplePlotUtils.display(ax); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot5.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot5.java index 2e47fb96e19..ffc5d84b8bb 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot5.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot5.java @@ -19,36 +19,36 @@ public static void main(String[] args) { final Number[] y3 = {11.3, 2.2, 8.4}; Figure fig = FigureFactory.figure(2, 2) - .figureTitle("Figure"); + .figureTitle("Figure"); Figure cht = fig.newChart(0) - .colSpan(2) - .chartTitle("My Chart 1"); + .colSpan(2) + .chartTitle("My Chart 1"); Figure ax = cht.newAxes() - .xLabel("X1") - .yLabel("Y1") - .plot("Test1", x1, y1) - .plotStyle("SCATTER"); + .xLabel("X1") + .yLabel("Y1") + .plot("Test1", x1, y1) + .plotStyle("SCATTER"); Figure cht2 = ax.newChart(1, 0) - .chartTitle("My Chart 2"); + .chartTitle("My Chart 2"); Figure ax2 = cht2.newAxes() - .xLabel("X2") - .yLabel("Y2") - .plot("Test1", x2, y2) - .plotStyle("SCATTER"); + .xLabel("X2") + .yLabel("Y2") + .plot("Test1", x2, y2) + .plotStyle("SCATTER"); Figure cht3 = ax2.newChart(1, 1) - .chartTitle("My Chart 3"); + .chartTitle("My Chart 3"); Figure ax3 = cht3.newAxes() - .xLabel("X3") - .yLabel("Y3") - .plot("Test1", x3, y3) - .plotStyle("SCATTER"); + .xLabel("X3") + .yLabel("Y3") + .plot("Test1", x3, y3) + .plotStyle("SCATTER"); ExamplePlotUtils.display(ax3); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot5b.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot5b.java index 0cf00b17037..187e13b88ca 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot5b.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYPlot5b.java @@ -22,35 +22,35 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(2, 2); Figure cht = fig.newChart(0).rowSpan(2) - .chartTitle("My Chart 1") - .chartTitleFont("Courier", "PLAIN", 40); + .chartTitle("My Chart 1") + .chartTitleFont("Courier", "PLAIN", 40); Figure ax = cht.newAxes() - .xLabel("X1") - .yLabel("Y1") - .plot("Test1", x1, y1) - .linesVisible(false).pointsVisible(true); + .xLabel("X1") + .yLabel("Y1") + .plot("Test1", x1, y1) + .linesVisible(false).pointsVisible(true); Figure cht2 = ax.newChart() - .chartTitle("My Chart 2") - .chartTitleFont("Courier", "PLAIN", 40); + .chartTitle("My Chart 2") + .chartTitleFont("Courier", "PLAIN", 40); Figure ax2 = cht2.newAxes() - .xLabel("X2") - .yLabel("Y2") - .plot("Test1", x2, y2) - .linesVisible(false).pointsVisible(true); + .xLabel("X2") + .yLabel("Y2") + .plot("Test1", x2, y2) + .linesVisible(false).pointsVisible(true); Figure cht3 = ax2.newChart() - .chartTitle("My Chart 3") - .chartTitleFont("Courier", "PLAIN", 40); + .chartTitle("My Chart 3") + .chartTitleFont("Courier", "PLAIN", 40); Figure ax3 = cht3.newAxes() - .xLabel("X3") - .yLabel("Y3") - .plot("Test1", x3, y3).pointLabel(labels) - .linesVisible(false).pointsVisible(true); + .xLabel("X3") + .yLabel("Y3") + .plot("Test1", x3, y3).pointLabel(labels) + .linesVisible(false).pointsVisible(true); ExamplePlotUtils.display(ax3); diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYSize.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYSize.java index f7d070efad7..fc717951d83 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYSize.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYSize.java @@ -27,13 +27,13 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xLabel("X") - .yLabel("Y") - .plot("Test1", x1, y1).pointSize(s1).linesVisible(false) - .plot("Test2", x2, y2).pointSize(.5) - .plot("Test3", x3, y3); + .xLabel("X") + .yLabel("Y") + .plot("Test1", x1, y1).pointSize(s1).linesVisible(false) + .plot("Test2", x2, y2).pointSize(.5) + .plot("Test3", x3, y3); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYStackedArea.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYStackedArea.java index db7f20f601f..5973375bba5 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYStackedArea.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYStackedArea.java @@ -17,14 +17,14 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xColor("blue") - .yColor("red") - .xLabel("X") - .yLabel("Y").plotStyle("STACKED_AREA") - .plot("Test1", x1, y1).pointsVisible(true) - .plot("Test2", x1, y2).pointsVisible(true); + .xColor("blue") + .yColor("red") + .xLabel("X") + .yLabel("Y").plotStyle("STACKED_AREA") + .plot("Test1", x1, y1).pointsVisible(true) + .plot("Test2", x1, y2).pointsVisible(true); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYStepPlot.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYStepPlot.java index 95a5a3eb7f9..abbe1d71f54 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYStepPlot.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYStepPlot.java @@ -15,7 +15,7 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig - .plot("Test1", x1, y1).plotStyle("step"); + .plot("Test1", x1, y1).plotStyle("step"); ExamplePlotUtils.display(cht); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYTable.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYTable.java index 5b9ad476e98..2304ecee2ec 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYTable.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYTable.java @@ -16,20 +16,19 @@ public class SimpleXYTable { public static void main(String[] args) { - Table t = TableTools.emptyTable(10).updateView("Timestamp = new DBDateTime(i * HOUR)", - "Open = i", "High = i + 2", "Low = i - 2", "Close = i + 1", "By = i % 5"); - Table t2 = TableTools.emptyTable(5000).updateView( - "Timestamp = new DBDateTime(0) + (i * HOUR)", "Open = i + 100", "High = i + 2 + 100", - "Low = i - 2 + 100", "Close = i + 1 + 100", "By = i % 5"); - - final Figure f = FigureFactory.figure() - .ohlcPlotBy("Test1", t, "Timestamp", "Open", "High", "Low", "Close", "By") - .yTransform(AxisTransforms.SQRT) - .lineColor("black") - .pointLabel("A") - .newChart() - .ohlcPlotBy("Test2", t2, "Timestamp", "Open", "High", "Low", "Close", "By") - .show(); + Table t = TableTools.emptyTable(10).updateView("Timestamp = new DBDateTime(i * HOUR)", "Open = i", + "High = i + 2", "Low = i - 2", "Close = i + 1", "By = i % 5"); + Table t2 = TableTools.emptyTable(5000).updateView("Timestamp = new DBDateTime(0) + (i * HOUR)", + "Open = i + 100", "High = i + 2 + 100", "Low = i - 2 + 100", "Close = i + 1 + 100", "By = i % 5"); + + final Figure f = + FigureFactory.figure().ohlcPlotBy("Test1", t, "Timestamp", "Open", "High", "Low", "Close", "By") + .yTransform(AxisTransforms.SQRT) + .lineColor("black") + .pointLabel("A") + .newChart() + .ohlcPlotBy("Test2", t2, "Timestamp", "Open", "High", "Low", "Close", "By") + .show(); ExamplePlotUtils.display(f); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYTicks.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYTicks.java index 2aaf20300b5..8f11cd89f51 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYTicks.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/SimpleXYTicks.java @@ -19,19 +19,19 @@ public static void main(String[] args) { Figure fig = FigureFactory.figure(); Figure cht = fig.newChart(0) - .chartTitle("Chart Title"); + .chartTitle("Chart Title"); Figure axs = cht.newAxes() - .xTicks(5) - .xMinorTicks(3) - .yTicksVisible(false) - .yMinorTicks(3) - .xLabel("X") - .yLabel("Y") - .plot("Test1", x1, y1) - .plot("Test2", x2, y2) - .plot("TestF", x -> x * x / 5).funcRange(5, 10) // .npoints(5); //.range(-10,10); - .xAxis().axisColor(new Color(0, 255, 255)) - .axis(0).axisLabelFont("Courier", "BOLD_ITALIC", 25); + .xTicks(5) + .xMinorTicks(3) + .yTicksVisible(false) + .yMinorTicks(3) + .xLabel("X") + .yLabel("Y") + .plot("Test1", x1, y1) + .plot("Test2", x2, y2) + .plot("TestF", x -> x * x / 5).funcRange(5, 10) // .npoints(5); //.range(-10,10); + .xAxis().axisColor(new Color(0, 255, 255)) + .axis(0).axisLabelFont("Courier", "BOLD_ITALIC", 25); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/example_plots/XYStackedBar.java b/Plot/src/test/java/io/deephaven/db/plot/example_plots/XYStackedBar.java index 0c3e9bc4cca..a484965d8f8 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/example_plots/XYStackedBar.java +++ b/Plot/src/test/java/io/deephaven/db/plot/example_plots/XYStackedBar.java @@ -20,8 +20,8 @@ public static void main(String[] args) { final Number[] y3 = {2.3, 1.0, 3.4, 2.3}; Figure axs = plot("Test1", x1, y1) - .plot("Test2", x2, y2) - .plot("Test3", x3, y3).plotStyle("stacked_bar"); + .plot("Test2", x2, y2) + .plot("Test3", x3, y3).plotStyle("stacked_bar"); ExamplePlotUtils.display(axs); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/util/TestArgumentValidations.java b/Plot/src/test/java/io/deephaven/db/plot/util/TestArgumentValidations.java index 188315f17aa..707fffbdad2 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/util/TestArgumentValidations.java +++ b/Plot/src/test/java/io/deephaven/db/plot/util/TestArgumentValidations.java @@ -29,15 +29,14 @@ public void testArgumentValidations() { final String INVALID = "INVALID"; final int[] ints = {1}; final IndexableNumericData intData = new IndexableNumericDataArrayInt(ints, null); - final IndexableNumericData intData2 = - new IndexableNumericDataArrayInt(new int[] {2, 3}, null); + final IndexableNumericData intData2 = new IndexableNumericDataArrayInt(new int[] {2, 3}, null); final DBDateTime[] dates = {new DBDateTime(1)}; final Color[] colors = {new Color(1)}; final Table table = TableTools.newTable( - TableTools.col(stringColumn, NON_NULL), - TableTools.col(dateColumn, dates), - TableTools.col(colorColumn, colors), - TableTools.intCol(intColumn, ints)).ungroup(); + TableTools.col(stringColumn, NON_NULL), + TableTools.col(dateColumn, dates), + TableTools.col(colorColumn, colors), + TableTools.intCol(intColumn, ints)).ungroup(); ArgumentValidations.assertInstance(String.class, String.class, message, null); try { @@ -61,39 +60,32 @@ public void testArgumentValidations() { assertTrue(e.getMessage().contains(message)); } - ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, intColumn, - message, null); - ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, dateColumn, - message, null); - ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, stringColumn, - message, null); + ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, intColumn, message, null); + ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, dateColumn, message, null); + ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, stringColumn, message, null); try { - ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, INVALID, - message, null); + ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, INVALID, message, null); TestCase.fail("Expected an exception"); } catch (RuntimeException e) { assertTrue(e.getMessage().contains(INVALID)); } try { - ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, INVALID, - message, null); + ArgumentValidations.assertIsNumericOrTimeOrCharOrComparableInstance(table, INVALID, message, null); TestCase.fail("Expected an exception"); } catch (RuntimeException e) { assertTrue(e.getMessage().contains(INVALID)); } - ArgumentValidations.assertSameSize(new IndexableNumericData[] {intData}, - new String[] {"TEST"}, null); + ArgumentValidations.assertSameSize(new IndexableNumericData[] {intData}, new String[] {"TEST"}, null); try { - ArgumentValidations.assertSameSize(new IndexableNumericData[] {null}, - new String[] {message}, null); + ArgumentValidations.assertSameSize(new IndexableNumericData[] {null}, new String[] {message}, null); TestCase.fail("Expected an exception"); } catch (RuntimeException e) { assertTrue(e.getMessage().contains(message)); } try { - ArgumentValidations.assertSameSize(new IndexableNumericData[] {intData, intData2}, - new String[] {"A", "B"}, null); + ArgumentValidations.assertSameSize(new IndexableNumericData[] {intData, intData2}, new String[] {"A", "B"}, + null); TestCase.fail("Expected an exception"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Input data")); @@ -246,7 +238,7 @@ public void testArgumentValidations() { } final TableHandle handle = - new TableHandle(table, intColumn, stringColumn, dateColumn); + new TableHandle(table, intColumn, stringColumn, dateColumn); ArgumentValidations.assertColumnsInTable(handle, null, dateColumn, intColumn, stringColumn); try { ArgumentValidations.assertColumnsInTable(handle, null, stringColumn, INVALID); diff --git a/Plot/src/test/java/io/deephaven/db/plot/util/TestShapeUtils.java b/Plot/src/test/java/io/deephaven/db/plot/util/TestShapeUtils.java index 89b99ea7c55..8f814ffafbc 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/util/TestShapeUtils.java +++ b/Plot/src/test/java/io/deephaven/db/plot/util/TestShapeUtils.java @@ -13,8 +13,7 @@ public class TestShapeUtils extends BaseArrayTestCase { - private final Polygon polygon = - new Polygon(new int[] {-1, -1, 1, 1}, new int[] {-1, 1, 1, -1}, 4); + private final Polygon polygon = new Polygon(new int[] {-1, -1, 1, 1}, new int[] {-1, 1, 1, -1}, 4); private final double polygonCenterX = Arrays.stream(polygon.xpoints).sum(); private final double polygonCenterY = Arrays.stream(polygon.ypoints).sum(); private final RectangularShape rectangle = new Rectangle2D.Double(0, 0, 1, 1); diff --git a/Plot/src/test/java/io/deephaven/db/plot/util/functions/TestSerializableClosure.java b/Plot/src/test/java/io/deephaven/db/plot/util/functions/TestSerializableClosure.java index 1a07210683c..c8aac15dd67 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/util/functions/TestSerializableClosure.java +++ b/Plot/src/test/java/io/deephaven/db/plot/util/functions/TestSerializableClosure.java @@ -37,14 +37,13 @@ public void testSerializableClosure() { // testing serialization is too hard /* - * Object clone = null; try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); - * ObjectOutputStream oos = new ObjectOutputStream(bos); - * oos.writeObject(serializableClosure); oos.flush(); byte[] obis = bos.toByteArray(); - * oos.close(); bos.close(); + * Object clone = null; try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = + * new ObjectOutputStream(bos); oos.writeObject(serializableClosure); oos.flush(); byte[] obis = + * bos.toByteArray(); oos.close(); bos.close(); * * ByteArrayInputStream bis = new ByteArrayInputStream(obis); ObjectInputStream ois = new - * ObjectInputStream(bis); clone = ois.readObject(); ois.close(); bis.close(); } catch - * (IOException | ClassNotFoundException e) { e.printStackTrace(); } + * ObjectInputStream(bis); clone = ois.readObject(); ois.close(); bis.close(); } catch (IOException | + * ClassNotFoundException e) { e.printStackTrace(); } * * serializableClosure = (SerializableClosure) clone; if(serializableClosure == null) { * TestCase.fail("Null return from serialization"); } diff --git a/Plot/src/test/java/io/deephaven/db/plot/util/tables/TestColumnHandlerFactory.java b/Plot/src/test/java/io/deephaven/db/plot/util/tables/TestColumnHandlerFactory.java index bf523800497..5e64339e080 100644 --- a/Plot/src/test/java/io/deephaven/db/plot/util/tables/TestColumnHandlerFactory.java +++ b/Plot/src/test/java/io/deephaven/db/plot/util/tables/TestColumnHandlerFactory.java @@ -35,25 +35,25 @@ public class TestColumnHandlerFactory extends BaseArrayTestCase { private final Paint[] paints = {null, new Color(100, 0, 0), new Color(0, 100, 0)}; private final String[] strings = {"A", "B", "C"}; private final Table table = TableTools.newTable( - TableTools.intCol("ints", ints), - TableTools.floatCol("floats", floats), - TableTools.longCol("longs", longs), - TableTools.doubleCol("doubles", doubles), - TableTools.shortCol("shorts", shorts), - TableTools.col("Shorts", Shorts), - TableTools.col("Integers", Integers), - TableTools.col("Longs", Longs), - TableTools.col("Floats", Floats), - TableTools.col("Doubles", Doubles), - TableTools.col("Numbers", Numbers), - TableTools.col("Dates", Dates), - TableTools.col("DBDateTimes", DBDateTimes), - TableTools.col("Paints", paints), - TableTools.col("Strings", strings)).ungroup(); + TableTools.intCol("ints", ints), + TableTools.floatCol("floats", floats), + TableTools.longCol("longs", longs), + TableTools.doubleCol("doubles", doubles), + TableTools.shortCol("shorts", shorts), + TableTools.col("Shorts", Shorts), + TableTools.col("Integers", Integers), + TableTools.col("Longs", Longs), + TableTools.col("Floats", Floats), + TableTools.col("Doubles", Doubles), + TableTools.col("Numbers", Numbers), + TableTools.col("Dates", Dates), + TableTools.col("DBDateTimes", DBDateTimes), + TableTools.col("Paints", paints), + TableTools.col("Strings", strings)).ungroup(); private final TableHandle tableHandle = new TableHandle(table, - "ints", "floats", "longs", "doubles", "shorts", "Shorts", "Integers", "Longs", "Floats", - "Doubles", "Numbers", "Dates", "DBDateTimes", "Paints", "Strings"); + "ints", "floats", "longs", "doubles", "shorts", "Shorts", "Integers", "Longs", "Floats", "Doubles", + "Numbers", "Dates", "DBDateTimes", "Paints", "Strings"); public void testTypeClassification() { @@ -80,55 +80,42 @@ public void testNumericColumnHandlerHandle() { assertTrue(e.getMessage().contains("Null")); } - ColumnHandlerFactory.ColumnHandler handler = - ColumnHandlerFactory.newNumericHandler(tableHandle, "ints", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "ints", int.class, - handler); + ColumnHandlerFactory.ColumnHandler handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "ints", null); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "ints", int.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "doubles", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "doubles", - double.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "doubles", double.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "longs", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "longs", long.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "longs", long.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "floats", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "floats", - float.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "floats", float.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "Integers", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Integers", int.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Integers", int.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "Doubles", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Doubles", - double.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Doubles", double.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "Floats", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Floats", - float.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Floats", float.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "Shorts", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Shorts", short.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Shorts", short.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "Longs", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Longs", long.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Longs", long.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "Numbers", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Numbers", - Number.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Numbers", Number.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "Dates", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.TIME, "Dates", Date.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.TIME, "Dates", Date.class, handler); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "DBDateTimes", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.TIME, "DBDateTimes", - DBDateTime.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.TIME, "DBDateTimes", DBDateTime.class, handler); handler.getTableHandle(); handler = ColumnHandlerFactory.newNumericHandler(tableHandle, "Paints", null); @@ -173,55 +160,42 @@ public void testNumericColumnHandlerTable() { assertTrue(e.getMessage().contains("Null")); } - ColumnHandlerFactory.ColumnHandler handler = - ColumnHandlerFactory.newNumericHandler(table, "ints", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "ints", int.class, - handler); + ColumnHandlerFactory.ColumnHandler handler = ColumnHandlerFactory.newNumericHandler(table, "ints", null); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "ints", int.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "doubles", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "doubles", - double.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "doubles", double.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "longs", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "longs", long.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "longs", long.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "floats", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "floats", - float.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "floats", float.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "Integers", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Integers", int.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Integers", int.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "Doubles", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Doubles", - double.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Doubles", double.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "Floats", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Floats", - float.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Floats", float.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "Shorts", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Shorts", short.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Shorts", short.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "Longs", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Longs", long.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.INTEGER, "Longs", long.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "Numbers", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Numbers", - Number.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.FLOATINGPOINT, "Numbers", Number.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "Dates", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.TIME, "Dates", Date.class, - handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.TIME, "Dates", Date.class, handler); handler = ColumnHandlerFactory.newNumericHandler(table, "DBDateTimes", null); - columnHandlerTest(ColumnHandlerFactory.TypeClassification.TIME, "DBDateTimes", - DBDateTime.class, handler); + columnHandlerTest(ColumnHandlerFactory.TypeClassification.TIME, "DBDateTimes", DBDateTime.class, handler); try { handler.getTableHandle(); @@ -280,9 +254,8 @@ public void testComparableHandlerHandle() { ColumnHandlerFactory.ColumnHandler handler = - ColumnHandlerFactory.newComparableHandler(tableHandle, "Strings", null); - assertEquals(ColumnHandlerFactory.TypeClassification.COMPARABLE, - handler.typeClassification()); + ColumnHandlerFactory.newComparableHandler(tableHandle, "Strings", null); + assertEquals(ColumnHandlerFactory.TypeClassification.COMPARABLE, handler.typeClassification()); assertEquals(strings.length, handler.size()); assertEquals("Strings", handler.getColumnName()); assertEquals(String.class, handler.type()); @@ -321,10 +294,8 @@ public void testComparableHandlerTable() { } - ColumnHandlerFactory.ColumnHandler handler = - ColumnHandlerFactory.newComparableHandler(table, "Strings", null); - assertEquals(ColumnHandlerFactory.TypeClassification.COMPARABLE, - handler.typeClassification()); + ColumnHandlerFactory.ColumnHandler handler = ColumnHandlerFactory.newComparableHandler(table, "Strings", null); + assertEquals(ColumnHandlerFactory.TypeClassification.COMPARABLE, handler.typeClassification()); assertEquals(strings.length, handler.size()); assertEquals("Strings", handler.getColumnName()); assertEquals(String.class, handler.type()); @@ -357,7 +328,7 @@ public void testObjectHandlerHandle() { ColumnHandlerFactory.ColumnHandler handler = - ColumnHandlerFactory.newObjectHandler(tableHandle, "Strings", null); + ColumnHandlerFactory.newObjectHandler(tableHandle, "Strings", null); assertEquals(ColumnHandlerFactory.TypeClassification.OBJECT, handler.typeClassification()); assertEquals(strings.length, handler.size()); assertEquals("Strings", handler.getColumnName()); @@ -390,8 +361,7 @@ public void testObjectHandlerTable() { } - ColumnHandlerFactory.ColumnHandler handler = - ColumnHandlerFactory.newObjectHandler(table, "Strings", null); + ColumnHandlerFactory.ColumnHandler handler = ColumnHandlerFactory.newObjectHandler(table, "Strings", null); assertEquals(ColumnHandlerFactory.TypeClassification.OBJECT, handler.typeClassification()); assertEquals(strings.length, handler.size()); assertEquals("Strings", handler.getColumnName()); @@ -408,8 +378,8 @@ public void testObjectHandlerTable() { } } - private void columnHandlerTest(ColumnHandlerFactory.TypeClassification type, String name, - Class clazz, ColumnHandlerFactory.ColumnHandler handler) { + private void columnHandlerTest(ColumnHandlerFactory.TypeClassification type, String name, Class clazz, + ColumnHandlerFactory.ColumnHandler handler) { assertEquals(type, handler.typeClassification()); assertEquals(doubles.length, handler.size()); diff --git a/Plot/src/test/java/io/deephaven/db/util/PlotReflectionPlaypen.java b/Plot/src/test/java/io/deephaven/db/util/PlotReflectionPlaypen.java index b600df83c4b..2dac16938a8 100644 --- a/Plot/src/test/java/io/deephaven/db/util/PlotReflectionPlaypen.java +++ b/Plot/src/test/java/io/deephaven/db/util/PlotReflectionPlaypen.java @@ -13,18 +13,18 @@ public static void main(String[] args) { final Method[] methods = Figure.class.getMethods(); System.out.println(methods.length); - final List plotMethods = Arrays.stream(methods) - .filter(m -> m.getName().equals("plot")).collect(Collectors.toList()); + final List plotMethods = + Arrays.stream(methods).filter(m -> m.getName().equals("plot")).collect(Collectors.toList()); System.out.println(plotMethods.size()); - final List fourArg = plotMethods.stream().filter(m -> m.getParameterCount() == 4) - .collect(Collectors.toList()); + final List fourArg = + plotMethods.stream().filter(m -> m.getParameterCount() == 4).collect(Collectors.toList()); System.out.println(fourArg.size()); // fourArg.forEach(System.out::println); - final List table2nd = fourArg.stream() - .filter(m -> m.getParameterTypes()[1] == Table.class).collect(Collectors.toList()); + final List table2nd = + fourArg.stream().filter(m -> m.getParameterTypes()[1] == Table.class).collect(Collectors.toList()); System.out.println(table2nd.size()); for (Method method : table2nd) { diff --git a/SevenZip/src/main/java/SevenZip/Compression/LZ/BinTree.java b/SevenZip/src/main/java/SevenZip/Compression/LZ/BinTree.java index d87fe2c61e0..34a7cb9a385 100644 --- a/SevenZip/src/main/java/SevenZip/Compression/LZ/BinTree.java +++ b/SevenZip/src/main/java/SevenZip/Compression/LZ/BinTree.java @@ -65,16 +65,15 @@ public void MovePos() throws IOException { public boolean Create(int historySize, int keepAddBufferBefore, - int matchMaxLen, int keepAddBufferAfter) { + int matchMaxLen, int keepAddBufferAfter) { if (historySize > kMaxValForNormalize - 256) return false; _cutValue = 16 + (matchMaxLen >> 1); int windowReservSize = (historySize + keepAddBufferBefore + - matchMaxLen + keepAddBufferAfter) / 2 + 256; + matchMaxLen + keepAddBufferAfter) / 2 + 256; - super.Create(historySize + keepAddBufferBefore, matchMaxLen + keepAddBufferAfter, - windowReservSize); + super.Create(historySize + keepAddBufferBefore, matchMaxLen + keepAddBufferAfter, windowReservSize); _matchMaxLen = matchMaxLen; @@ -166,7 +165,7 @@ public int GetMatches(int[] distances) throws IOException { if (kNumHashDirectBytes != 0) { if (curMatch > matchMinPos) { if (_bufferBase[_bufferOffset + curMatch + kNumHashDirectBytes] != _bufferBase[cur - + kNumHashDirectBytes]) { + + kNumHashDirectBytes]) { distances[offset++] = maxLen = kNumHashDirectBytes; distances[offset++] = _pos - curMatch - 1; } @@ -182,7 +181,7 @@ public int GetMatches(int[] distances) throws IOException { } int delta = _pos - curMatch; int cyclicPos = ((delta <= _cyclicBufferPos) ? (_cyclicBufferPos - delta) - : (_cyclicBufferPos - delta + _cyclicBufferSize)) << 1; + : (_cyclicBufferPos - delta + _cyclicBufferSize)) << 1; int pby1 = _bufferOffset + curMatch; int len = Math.min(len0, len1); @@ -243,8 +242,7 @@ public void Skip(int num) throws IOException { _hash[kHash3Offset + hash3Value] = _pos; hashValue = (temp ^ (CrcTable[_bufferBase[cur + 3] & 0xFF] << 5)) & _hashMask; } else - hashValue = - ((_bufferBase[cur] & 0xFF) ^ ((int) (_bufferBase[cur + 1] & 0xFF) << 8)); + hashValue = ((_bufferBase[cur] & 0xFF) ^ ((int) (_bufferBase[cur + 1] & 0xFF) << 8)); int curMatch = _hash[kFixHashSize + hashValue]; _hash[kFixHashSize + hashValue] = _pos; @@ -264,7 +262,7 @@ public void Skip(int num) throws IOException { int delta = _pos - curMatch; int cyclicPos = ((delta <= _cyclicBufferPos) ? (_cyclicBufferPos - delta) - : (_cyclicBufferPos - delta + _cyclicBufferSize)) << 1; + : (_cyclicBufferPos - delta + _cyclicBufferSize)) << 1; int pby1 = _bufferOffset + curMatch; int len = Math.min(len0, len1); diff --git a/SevenZip/src/main/java/SevenZip/Compression/LZMA/Base.java b/SevenZip/src/main/java/SevenZip/Compression/LZMA/Base.java index dbdcea5afa1..932f7ef30af 100644 --- a/SevenZip/src/main/java/SevenZip/Compression/LZMA/Base.java +++ b/SevenZip/src/main/java/SevenZip/Compression/LZMA/Base.java @@ -75,6 +75,6 @@ public static final int GetLenToPosState(int len) { public static final int kNumLowLenSymbols = 1 << kNumLowLenBits; public static final int kNumMidLenSymbols = 1 << kNumMidLenBits; public static final int kNumLenSymbols = kNumLowLenSymbols + kNumMidLenSymbols + - (1 << kNumHighLenBits); + (1 << kNumHighLenBits); public static final int kMatchMaxLen = kMatchMinLen + kNumLenSymbols - 1; } diff --git a/SevenZip/src/main/java/SevenZip/Compression/LZMA/Decoder.java b/SevenZip/src/main/java/SevenZip/Compression/LZMA/Decoder.java index 9e5230eb42b..c3ee1665773 100644 --- a/SevenZip/src/main/java/SevenZip/Compression/LZMA/Decoder.java +++ b/SevenZip/src/main/java/SevenZip/Compression/LZMA/Decoder.java @@ -29,8 +29,7 @@ public void Init() { m_HighCoder.Init(); } - public int Decode(SevenZip.Compression.RangeCoder.Decoder rangeDecoder, int posState) - throws IOException { + public int Decode(SevenZip.Compression.RangeCoder.Decoder rangeDecoder, int posState) throws IOException { if (rangeDecoder.DecodeBit(m_Choice, 0) == 0) return m_LowCoder[posState].Decode(rangeDecoder); int symbol = Base.kNumLowLenSymbols; @@ -50,8 +49,7 @@ public void Init() { SevenZip.Compression.RangeCoder.Decoder.InitBitModels(m_Decoders); } - public byte DecodeNormal(SevenZip.Compression.RangeCoder.Decoder rangeDecoder) - throws IOException { + public byte DecodeNormal(SevenZip.Compression.RangeCoder.Decoder rangeDecoder) throws IOException { int symbol = 1; do symbol = (symbol << 1) | rangeDecoder.DecodeBit(m_Decoders, symbol); @@ -59,8 +57,8 @@ public byte DecodeNormal(SevenZip.Compression.RangeCoder.Decoder rangeDecoder) return (byte) symbol; } - public byte DecodeWithMatchByte(SevenZip.Compression.RangeCoder.Decoder rangeDecoder, - byte matchByte) throws IOException { + public byte DecodeWithMatchByte(SevenZip.Compression.RangeCoder.Decoder rangeDecoder, byte matchByte) + throws IOException { int symbol = 1; do { int matchBit = (matchByte >> 7) & 1; @@ -101,14 +99,12 @@ public void Init() { } Decoder2 GetDecoder(int pos, byte prevByte) { - return m_Coders[((pos & m_PosMask) << m_NumPrevBits) - + ((prevByte & 0xFF) >>> (8 - m_NumPrevBits))]; + return m_Coders[((pos & m_PosMask) << m_NumPrevBits) + ((prevByte & 0xFF) >>> (8 - m_NumPrevBits))]; } } OutWindow m_OutWindow = new OutWindow(); - SevenZip.Compression.RangeCoder.Decoder m_RangeDecoder = - new SevenZip.Compression.RangeCoder.Decoder(); + SevenZip.Compression.RangeCoder.Decoder m_RangeDecoder = new SevenZip.Compression.RangeCoder.Decoder(); short[] m_IsMatchDecoders = new short[Base.kNumStates << Base.kNumPosStatesBitsMax]; short[] m_IsRepDecoders = new short[Base.kNumStates]; @@ -181,7 +177,7 @@ void Init() throws IOException { } public boolean Code(java.io.InputStream inStream, java.io.OutputStream outStream, - long outSize) throws IOException { + long outSize) throws IOException { m_RangeDecoder.SetStream(inStream); m_OutWindow.SetStream(outStream); Init(); @@ -193,13 +189,10 @@ public boolean Code(java.io.InputStream inStream, java.io.OutputStream outStream byte prevByte = 0; while (outSize < 0 || nowPos64 < outSize) { int posState = (int) nowPos64 & m_PosStateMask; - if (m_RangeDecoder.DecodeBit(m_IsMatchDecoders, - (state << Base.kNumPosStatesBitsMax) + posState) == 0) { - LiteralDecoder.Decoder2 decoder2 = - m_LiteralDecoder.GetDecoder((int) nowPos64, prevByte); + if (m_RangeDecoder.DecodeBit(m_IsMatchDecoders, (state << Base.kNumPosStatesBitsMax) + posState) == 0) { + LiteralDecoder.Decoder2 decoder2 = m_LiteralDecoder.GetDecoder((int) nowPos64, prevByte); if (!Base.StateIsCharState(state)) - prevByte = - decoder2.DecodeWithMatchByte(m_RangeDecoder, m_OutWindow.GetByte(rep0)); + prevByte = decoder2.DecodeWithMatchByte(m_RangeDecoder, m_OutWindow.GetByte(rep0)); else prevByte = decoder2.DecodeNormal(m_RangeDecoder); m_OutWindow.PutByte(prevByte); @@ -211,7 +204,7 @@ public boolean Code(java.io.InputStream inStream, java.io.OutputStream outStream len = 0; if (m_RangeDecoder.DecodeBit(m_IsRepG0Decoders, state) == 0) { if (m_RangeDecoder.DecodeBit(m_IsRep0LongDecoders, - (state << Base.kNumPosStatesBitsMax) + posState) == 0) { + (state << Base.kNumPosStatesBitsMax) + posState) == 0) { state = Base.StateUpdateShortRep(state); len = 1; } @@ -241,17 +234,16 @@ public boolean Code(java.io.InputStream inStream, java.io.OutputStream outStream rep1 = rep0; len = Base.kMatchMinLen + m_LenDecoder.Decode(m_RangeDecoder, posState); state = Base.StateUpdateMatch(state); - int posSlot = - m_PosSlotDecoder[Base.GetLenToPosState(len)].Decode(m_RangeDecoder); + int posSlot = m_PosSlotDecoder[Base.GetLenToPosState(len)].Decode(m_RangeDecoder); if (posSlot >= Base.kStartPosModelIndex) { int numDirectBits = (posSlot >> 1) - 1; rep0 = ((2 | (posSlot & 1)) << numDirectBits); if (posSlot < Base.kEndPosModelIndex) rep0 += BitTreeDecoder.ReverseDecode(m_PosDecoders, - rep0 - posSlot - 1, m_RangeDecoder, numDirectBits); + rep0 - posSlot - 1, m_RangeDecoder, numDirectBits); else { rep0 += (m_RangeDecoder.DecodeDirectBits( - numDirectBits - Base.kNumAlignBits) << Base.kNumAlignBits); + numDirectBits - Base.kNumAlignBits) << Base.kNumAlignBits); rep0 += m_PosAlignDecoder.ReverseDecode(m_RangeDecoder); if (rep0 < 0) { if (rep0 == -1) diff --git a/SevenZip/src/main/java/SevenZip/Compression/LZMA/Encoder.java b/SevenZip/src/main/java/SevenZip/Compression/LZMA/Encoder.java index 69715a47207..d77c63ceac9 100644 --- a/SevenZip/src/main/java/SevenZip/Compression/LZMA/Encoder.java +++ b/SevenZip/src/main/java/SevenZip/Compression/LZMA/Encoder.java @@ -67,8 +67,7 @@ public void Init() { - public void Encode(SevenZip.Compression.RangeCoder.Encoder rangeEncoder, byte symbol) - throws IOException { + public void Encode(SevenZip.Compression.RangeCoder.Encoder rangeEncoder, byte symbol) throws IOException { int context = 1; for (int i = 7; i >= 0; i--) { int bit = ((symbol >> i) & 1); @@ -77,8 +76,8 @@ public void Encode(SevenZip.Compression.RangeCoder.Encoder rangeEncoder, byte sy } } - public void EncodeMatched(SevenZip.Compression.RangeCoder.Encoder rangeEncoder, - byte matchByte, byte symbol) throws IOException { + public void EncodeMatched(SevenZip.Compression.RangeCoder.Encoder rangeEncoder, byte matchByte, byte symbol) + throws IOException { int context = 1; boolean same = true; for (int i = 7; i >= 0; i--) { @@ -103,7 +102,7 @@ public int GetPrice(boolean matchMode, byte matchByte, byte symbol) { int matchBit = (matchByte >> i) & 1; int bit = (symbol >> i) & 1; price += SevenZip.Compression.RangeCoder.Encoder - .GetPrice(m_Encoders[((1 + matchBit) << 8) + context], bit); + .GetPrice(m_Encoders[((1 + matchBit) << 8) + context], bit); context = (context << 1) | bit; if (matchBit != bit) { i--; @@ -113,8 +112,7 @@ public int GetPrice(boolean matchMode, byte matchByte, byte symbol) { } for (; i >= 0; i--) { int bit = (symbol >> i) & 1; - price += - SevenZip.Compression.RangeCoder.Encoder.GetPrice(m_Encoders[context], bit); + price += SevenZip.Compression.RangeCoder.Encoder.GetPrice(m_Encoders[context], bit); context = (context << 1) | bit; } return price; @@ -145,8 +143,7 @@ public void Init() { } public Encoder2 GetSubCoder(int pos, byte prevByte) { - return m_Coders[((pos & m_PosMask) << m_NumPrevBits) - + ((prevByte & 0xFF) >>> (8 - m_NumPrevBits))]; + return m_Coders[((pos & m_PosMask) << m_NumPrevBits) + ((prevByte & 0xFF) >>> (8 - m_NumPrevBits))]; } } @@ -174,8 +171,8 @@ public void Init(int numPosStates) { _highCoder.Init(); } - public void Encode(SevenZip.Compression.RangeCoder.Encoder rangeEncoder, int symbol, - int posState) throws IOException { + public void Encode(SevenZip.Compression.RangeCoder.Encoder rangeEncoder, int symbol, int posState) + throws IOException { if (symbol < Base.kNumLowLenSymbols) { rangeEncoder.Encode(_choice, 0, 0); _lowCoder[posState].Encode(rangeEncoder, symbol); @@ -209,8 +206,7 @@ public void SetPrices(int posState, int numSymbols, int[] prices, int st) { prices[st + i] = b0 + _midCoder[posState].GetPrice(i - Base.kNumLowLenSymbols); } for (; i < numSymbols; i++) - prices[st + i] = - b1 + _highCoder.GetPrice(i - Base.kNumLowLenSymbols - Base.kNumMidLenSymbols); + prices[st + i] = b1 + _highCoder.GetPrice(i - Base.kNumLowLenSymbols - Base.kNumMidLenSymbols); } }; @@ -239,8 +235,8 @@ public void UpdateTables(int numPosStates) { UpdateTable(posState); } - public void Encode(SevenZip.Compression.RangeCoder.Encoder rangeEncoder, int symbol, - int posState) throws IOException { + public void Encode(SevenZip.Compression.RangeCoder.Encoder rangeEncoder, int symbol, int posState) + throws IOException { super.Encode(rangeEncoder, symbol, posState); if (--_counters[posState] == 0) UpdateTable(posState); @@ -284,8 +280,7 @@ public boolean IsShortRep() { Optimal[] _optimum = new Optimal[kNumOpts]; SevenZip.Compression.LZ.BinTree _matchFinder = null; - SevenZip.Compression.RangeCoder.Encoder _rangeEncoder = - new SevenZip.Compression.RangeCoder.Encoder(); + SevenZip.Compression.RangeCoder.Encoder _rangeEncoder = new SevenZip.Compression.RangeCoder.Encoder(); short[] _isMatch = new short[Base.kNumStates << Base.kNumPosStatesBitsMax]; short[] _isRep = new short[Base.kNumStates]; @@ -408,9 +403,8 @@ int ReadMatchDistances() throws java.io.IOException { if (_numDistancePairs > 0) { lenRes = _matchDistances[_numDistancePairs - 2]; if (lenRes == _numFastBytes) - lenRes += _matchFinder.GetMatchLen((int) lenRes - 1, - _matchDistances[_numDistancePairs - 1], - Base.kMatchMaxLen - lenRes); + lenRes += _matchFinder.GetMatchLen((int) lenRes - 1, _matchDistances[_numDistancePairs - 1], + Base.kMatchMaxLen - lenRes); } _additionalOffset++; return lenRes; @@ -425,8 +419,8 @@ void MovePos(int num) throws java.io.IOException { int GetRepLen1Price(int state, int posState) { return SevenZip.Compression.RangeCoder.Encoder.GetPrice0(_isRepG0[state]) + - SevenZip.Compression.RangeCoder.Encoder - .GetPrice0(_isRep0Long[(state << Base.kNumPosStatesBitsMax) + posState]); + SevenZip.Compression.RangeCoder.Encoder + .GetPrice0(_isRep0Long[(state << Base.kNumPosStatesBitsMax) + posState]); } int GetPureRepPrice(int repIndex, int state, int posState) { @@ -434,15 +428,14 @@ int GetPureRepPrice(int repIndex, int state, int posState) { if (repIndex == 0) { price = SevenZip.Compression.RangeCoder.Encoder.GetPrice0(_isRepG0[state]); price += SevenZip.Compression.RangeCoder.Encoder - .GetPrice1(_isRep0Long[(state << Base.kNumPosStatesBitsMax) + posState]); + .GetPrice1(_isRep0Long[(state << Base.kNumPosStatesBitsMax) + posState]); } else { price = SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRepG0[state]); if (repIndex == 1) price += SevenZip.Compression.RangeCoder.Encoder.GetPrice0(_isRepG1[state]); else { price += SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRepG1[state]); - price += - SevenZip.Compression.RangeCoder.Encoder.GetPrice(_isRepG2[state], repIndex - 2); + price += SevenZip.Compression.RangeCoder.Encoder.GetPrice(_isRepG2[state], repIndex - 2); } } return price; @@ -460,7 +453,7 @@ int GetPosLenPrice(int pos, int len, int posState) { price = _distancesPrices[(lenToPosState * Base.kNumFullDistances) + pos]; else price = _posSlotPrices[(lenToPosState << Base.kNumPosSlotBits) + GetPosSlot2(pos)] + - _alignPrices[pos & Base.kAlignMask]; + _alignPrices[pos & Base.kAlignMask]; return price + _lenEncoder.GetPrice(len - Base.kMatchMinLen, posState); } @@ -557,15 +550,14 @@ int GetOptimum(int position) throws IOException { int posState = (position & _posStateMask); _optimum[1].Price = SevenZip.Compression.RangeCoder.Encoder - .GetPrice0(_isMatch[(_state << Base.kNumPosStatesBitsMax) + posState]) + - _literalEncoder.GetSubCoder(position, _previousByte) - .GetPrice(!Base.StateIsCharState(_state), matchByte, currentByte); + .GetPrice0(_isMatch[(_state << Base.kNumPosStatesBitsMax) + posState]) + + _literalEncoder.GetSubCoder(position, _previousByte).GetPrice(!Base.StateIsCharState(_state), matchByte, + currentByte); _optimum[1].MakeAsChar(); int matchPrice = SevenZip.Compression.RangeCoder.Encoder - .GetPrice1(_isMatch[(_state << Base.kNumPosStatesBitsMax) + posState]); - int repMatchPrice = - matchPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRep[_state]); + .GetPrice1(_isMatch[(_state << Base.kNumPosStatesBitsMax) + posState]); + int repMatchPrice = matchPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRep[_state]); if (matchByte == currentByte) { int shortRepPrice = repMatchPrice + GetRepLen1Price(_state, posState); @@ -611,8 +603,7 @@ int GetOptimum(int position) throws IOException { } while (--repLen >= 2); } - int normalMatchPrice = - matchPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice0(_isRep[_state]); + int normalMatchPrice = matchPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice0(_isRep[_state]); len = ((repLens[0] >= 2) ? repLens[0] + 1 : 2); if (len <= lenMain) { @@ -728,11 +719,11 @@ int GetOptimum(int position) throws IOException { posState = (position & _posStateMask); int curAnd1Price = curPrice + - SevenZip.Compression.RangeCoder.Encoder - .GetPrice0(_isMatch[(state << Base.kNumPosStatesBitsMax) + posState]) - + - _literalEncoder.GetSubCoder(position, _matchFinder.GetIndexByte(0 - 2)) - .GetPrice(!Base.StateIsCharState(state), matchByte, currentByte); + SevenZip.Compression.RangeCoder.Encoder + .GetPrice0(_isMatch[(state << Base.kNumPosStatesBitsMax) + posState]) + + + _literalEncoder.GetSubCoder(position, _matchFinder.GetIndexByte(0 - 2)) + .GetPrice(!Base.StateIsCharState(state), matchByte, currentByte); Optimal nextOptimum = _optimum[cur + 1]; @@ -745,12 +736,11 @@ int GetOptimum(int position) throws IOException { } matchPrice = curPrice + SevenZip.Compression.RangeCoder.Encoder - .GetPrice1(_isMatch[(state << Base.kNumPosStatesBitsMax) + posState]); - repMatchPrice = - matchPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRep[state]); + .GetPrice1(_isMatch[(state << Base.kNumPosStatesBitsMax) + posState]); + repMatchPrice = matchPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRep[state]); if (matchByte == currentByte && - !(nextOptimum.PosPrev < cur && nextOptimum.BackPrev == 0)) { + !(nextOptimum.PosPrev < cur && nextOptimum.BackPrev == 0)) { int shortRepPrice = repMatchPrice + GetRepLen1Price(state, posState); if (shortRepPrice <= nextOptimum.Price) { nextOptimum.Price = shortRepPrice; @@ -777,16 +767,16 @@ int GetOptimum(int position) throws IOException { int posStateNext = (position + 1) & _posStateMask; int nextRepMatchPrice = curAnd1Price + - SevenZip.Compression.RangeCoder.Encoder.GetPrice1( - _isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]) - + - SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRep[state2]); + SevenZip.Compression.RangeCoder.Encoder + .GetPrice1(_isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]) + + + SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRep[state2]); { int offset = cur + 1 + lenTest2; while (lenEnd < offset) _optimum[++lenEnd].Price = kIfinityPrice; int curAndLenPrice = nextRepMatchPrice + GetRepPrice( - 0, lenTest2, state2, posStateNext); + 0, lenTest2, state2, posStateNext); Optimal optimum = _optimum[offset]; if (curAndLenPrice < optimum.Price) { optimum.Price = curAndLenPrice; @@ -809,8 +799,7 @@ int GetOptimum(int position) throws IOException { do { while (lenEnd < cur + lenTest) _optimum[++lenEnd].Price = kIfinityPrice; - int curAndLenPrice = - repMatchPrice + GetRepPrice(repIndex, lenTest, state, posState); + int curAndLenPrice = repMatchPrice + GetRepPrice(repIndex, lenTest, state, posState); Optimal optimum = _optimum[cur + lenTest]; if (curAndLenPrice < optimum.Price) { optimum.Price = curAndLenPrice; @@ -833,30 +822,28 @@ int GetOptimum(int position) throws IOException { int posStateNext = (position + lenTest) & _posStateMask; int curAndLenCharPrice = - repMatchPrice + GetRepPrice(repIndex, lenTest, state, posState) + - SevenZip.Compression.RangeCoder.Encoder.GetPrice0( - _isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]) - + - _literalEncoder.GetSubCoder(position + lenTest, - _matchFinder.GetIndexByte(lenTest - 1 - 1)).GetPrice(true, - _matchFinder - .GetIndexByte(lenTest - 1 - (reps[repIndex] + 1)), - _matchFinder.GetIndexByte(lenTest - 1)); + repMatchPrice + GetRepPrice(repIndex, lenTest, state, posState) + + SevenZip.Compression.RangeCoder.Encoder + .GetPrice0( + _isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]) + + + _literalEncoder.GetSubCoder(position + lenTest, + _matchFinder.GetIndexByte(lenTest - 1 - 1)).GetPrice(true, + _matchFinder.GetIndexByte(lenTest - 1 - (reps[repIndex] + 1)), + _matchFinder.GetIndexByte(lenTest - 1)); state2 = Base.StateUpdateChar(state2); posStateNext = (position + lenTest + 1) & _posStateMask; - int nextMatchPrice = - curAndLenCharPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice1( - _isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]); - int nextRepMatchPrice = nextMatchPrice - + SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRep[state2]); + int nextMatchPrice = curAndLenCharPrice + SevenZip.Compression.RangeCoder.Encoder + .GetPrice1(_isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]); + int nextRepMatchPrice = + nextMatchPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRep[state2]); // for(; lenTest2 >= 2; lenTest2--) { int offset = lenTest + 1 + lenTest2; while (lenEnd < cur + offset) _optimum[++lenEnd].Price = kIfinityPrice; - int curAndLenPrice = - nextRepMatchPrice + GetRepPrice(0, lenTest2, state2, posStateNext); + int curAndLenPrice = nextRepMatchPrice + GetRepPrice(0, lenTest2, state2, posStateNext); Optimal optimum = _optimum[cur + offset]; if (curAndLenPrice < optimum.Price) { optimum.Price = curAndLenPrice; @@ -874,14 +861,12 @@ int GetOptimum(int position) throws IOException { if (newLen > numAvailableBytes) { newLen = numAvailableBytes; - for (numDistancePairs = - 0; newLen > _matchDistances[numDistancePairs]; numDistancePairs += 2); + for (numDistancePairs = 0; newLen > _matchDistances[numDistancePairs]; numDistancePairs += 2); _matchDistances[numDistancePairs] = newLen; numDistancePairs += 2; } if (newLen >= startLen) { - normalMatchPrice = - matchPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice0(_isRep[state]); + normalMatchPrice = matchPrice + SevenZip.Compression.RangeCoder.Encoder.GetPrice0(_isRep[state]); while (lenEnd < cur + newLen) _optimum[++lenEnd].Price = kIfinityPrice; @@ -891,8 +876,7 @@ int GetOptimum(int position) throws IOException { for (int lenTest = startLen;; lenTest++) { int curBack = _matchDistances[offs + 1]; - int curAndLenPrice = - normalMatchPrice + GetPosLenPrice(curBack, lenTest, posState); + int curAndLenPrice = normalMatchPrice + GetPosLenPrice(curBack, lenTest, posState); Optimal optimum = _optimum[cur + lenTest]; if (curAndLenPrice < optimum.Price) { optimum.Price = curAndLenPrice; @@ -910,29 +894,25 @@ int GetOptimum(int position) throws IOException { int posStateNext = (position + lenTest) & _posStateMask; int curAndLenCharPrice = curAndLenPrice + - SevenZip.Compression.RangeCoder.Encoder - .GetPrice0(_isMatch[(state2 << Base.kNumPosStatesBitsMax) - + posStateNext]) - + - _literalEncoder.GetSubCoder(position + lenTest, - _matchFinder.GetIndexByte(lenTest - 1 - 1)).GetPrice(true, - _matchFinder.GetIndexByte(lenTest - (curBack + 1) - 1), - _matchFinder.GetIndexByte(lenTest - 1)); + SevenZip.Compression.RangeCoder.Encoder + .GetPrice0( + _isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]) + + + _literalEncoder.GetSubCoder(position + lenTest, + _matchFinder.GetIndexByte(lenTest - 1 - 1)).GetPrice(true, + _matchFinder.GetIndexByte(lenTest - (curBack + 1) - 1), + _matchFinder.GetIndexByte(lenTest - 1)); state2 = Base.StateUpdateChar(state2); posStateNext = (position + lenTest + 1) & _posStateMask; - int nextMatchPrice = - curAndLenCharPrice + SevenZip.Compression.RangeCoder.Encoder - .GetPrice1(_isMatch[(state2 << Base.kNumPosStatesBitsMax) - + posStateNext]); - int nextRepMatchPrice = - nextMatchPrice + SevenZip.Compression.RangeCoder.Encoder - .GetPrice1(_isRep[state2]); + int nextMatchPrice = curAndLenCharPrice + SevenZip.Compression.RangeCoder.Encoder + .GetPrice1(_isMatch[(state2 << Base.kNumPosStatesBitsMax) + posStateNext]); + int nextRepMatchPrice = nextMatchPrice + + SevenZip.Compression.RangeCoder.Encoder.GetPrice1(_isRep[state2]); int offset = lenTest + 1 + lenTest2; while (lenEnd < cur + offset) _optimum[++lenEnd].Price = kIfinityPrice; - curAndLenPrice = nextRepMatchPrice - + GetRepPrice(0, lenTest2, state2, posStateNext); + curAndLenPrice = nextRepMatchPrice + GetRepPrice(0, lenTest2, state2, posStateNext); optimum = _optimum[cur + offset]; if (curAndLenPrice < optimum.Price) { optimum.Price = curAndLenPrice; @@ -973,8 +953,7 @@ void WriteEndMarker(int posState) throws IOException { _posSlotEncoder[lenToPosState].Encode(_rangeEncoder, posSlot); int footerBits = 30; int posReduced = (1 << footerBits) - 1; - _rangeEncoder.EncodeDirectBits(posReduced >> Base.kNumAlignBits, - footerBits - Base.kNumAlignBits); + _rangeEncoder.EncodeDirectBits(posReduced >> Base.kNumAlignBits, footerBits - Base.kNumAlignBits); _posAlignEncoder.ReverseEncode(_rangeEncoder, posReduced & Base.kAlignMask); } @@ -1014,8 +993,7 @@ public void CodeOneBlock(long[] inSize, long[] outSize, boolean[] finished) thro _rangeEncoder.Encode(_isMatch, (_state << Base.kNumPosStatesBitsMax) + posState, 0); _state = Base.StateUpdateChar(_state); byte curByte = _matchFinder.GetIndexByte(0 - _additionalOffset); - _literalEncoder.GetSubCoder((int) (nowPos64), _previousByte).Encode(_rangeEncoder, - curByte); + _literalEncoder.GetSubCoder((int) (nowPos64), _previousByte).Encode(_rangeEncoder, curByte); _previousByte = curByte; _additionalOffset--; nowPos64++; @@ -1033,11 +1011,9 @@ public void CodeOneBlock(long[] inSize, long[] outSize, boolean[] finished) thro if (len == 1 && pos == -1) { _rangeEncoder.Encode(_isMatch, complexState, 0); byte curByte = _matchFinder.GetIndexByte((int) (0 - _additionalOffset)); - LiteralEncoder.Encoder2 subCoder = - _literalEncoder.GetSubCoder((int) nowPos64, _previousByte); + LiteralEncoder.Encoder2 subCoder = _literalEncoder.GetSubCoder((int) nowPos64, _previousByte); if (!Base.StateIsCharState(_state)) { - byte matchByte = _matchFinder - .GetIndexByte((int) (0 - _repDistances[0] - 1 - _additionalOffset)); + byte matchByte = _matchFinder.GetIndexByte((int) (0 - _repDistances[0] - 1 - _additionalOffset)); subCoder.EncodeMatched(_rangeEncoder, matchByte, curByte); } else subCoder.Encode(_rangeEncoder, curByte); @@ -1065,8 +1041,7 @@ public void CodeOneBlock(long[] inSize, long[] outSize, boolean[] finished) thro if (len == 1) _state = Base.StateUpdateShortRep(_state); else { - _repMatchLenEncoder.Encode(_rangeEncoder, len - Base.kMatchMinLen, - posState); + _repMatchLenEncoder.Encode(_rangeEncoder, len - Base.kMatchMinLen, posState); _state = Base.StateUpdateRep(_state); } int distance = _repDistances[pos]; @@ -1091,12 +1066,11 @@ public void CodeOneBlock(long[] inSize, long[] outSize, boolean[] finished) thro if (posSlot < Base.kEndPosModelIndex) BitTreeEncoder.ReverseEncode(_posEncoders, - baseVal - posSlot - 1, _rangeEncoder, footerBits, posReduced); + baseVal - posSlot - 1, _rangeEncoder, footerBits, posReduced); else { _rangeEncoder.EncodeDirectBits(posReduced >> Base.kNumAlignBits, - footerBits - Base.kNumAlignBits); - _posAlignEncoder.ReverseEncode(_rangeEncoder, - posReduced & Base.kAlignMask); + footerBits - Base.kNumAlignBits); + _posAlignEncoder.ReverseEncode(_rangeEncoder, posReduced & Base.kAlignMask); _alignPriceCount++; } } @@ -1153,7 +1127,7 @@ void ReleaseStreams() { } void SetStreams(java.io.InputStream inStream, java.io.OutputStream outStream, - long inSize, long outSize) { + long inSize, long outSize) { _inStream = inStream; _finished = false; Create(); @@ -1179,7 +1153,7 @@ void SetStreams(java.io.InputStream inStream, java.io.OutputStream outStream, boolean[] finished = new boolean[1]; public void Code(java.io.InputStream inStream, java.io.OutputStream outStream, - long inSize, long outSize, ICodeProgress progress) throws IOException { + long inSize, long outSize, ICodeProgress progress) throws IOException { _needReleaseMFStream = false; try { SetStreams(inStream, outStream, inSize, outSize); @@ -1203,8 +1177,7 @@ public void Code(java.io.InputStream inStream, java.io.OutputStream outStream, byte[] properties = new byte[kPropSize]; public void WriteCoderProperties(java.io.OutputStream outStream) throws IOException { - properties[0] = - (byte) ((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits); + properties[0] = (byte) ((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits); for (int i = 0; i < 4; i++) properties[1 + i] = (byte) (_dictionarySize >> (8 * i)); outStream.write(properties, 0, kPropSize); @@ -1219,7 +1192,7 @@ void FillDistancesPrices() { int footerBits = (int) ((posSlot >> 1) - 1); int baseVal = ((2 | (posSlot & 1)) << footerBits); tempPrices[i] = BitTreeEncoder.ReverseGetPrice(_posEncoders, - baseVal - posSlot - 1, footerBits, i - baseVal); + baseVal - posSlot - 1, footerBits, i - baseVal); } for (int lenToPosState = 0; lenToPosState < Base.kNumLenToPosStates; lenToPosState++) { @@ -1231,7 +1204,7 @@ void FillDistancesPrices() { _posSlotPrices[st + posSlot] = encoder.GetPrice(posSlot); for (posSlot = Base.kEndPosModelIndex; posSlot < _distTableSize; posSlot++) _posSlotPrices[st + posSlot] += ((((posSlot >> 1) - 1) - - Base.kNumAlignBits) << SevenZip.Compression.RangeCoder.Encoder.kNumBitPriceShiftBits); + - Base.kNumAlignBits) << SevenZip.Compression.RangeCoder.Encoder.kNumBitPriceShiftBits); int st2 = lenToPosState * Base.kNumFullDistances; int i; @@ -1259,8 +1232,7 @@ public boolean SetAlgorithm(int algorithm) { public boolean SetDictionarySize(int dictionarySize) { int kDicLogSizeMaxCompress = 29; - if (dictionarySize < (1 << Base.kDicLogSizeMin) - || dictionarySize > (1 << kDicLogSizeMaxCompress)) + if (dictionarySize < (1 << Base.kDicLogSizeMin) || dictionarySize > (1 << kDicLogSizeMaxCompress)) return false; _dictionarySize = dictionarySize; int dicLogSize; @@ -1290,8 +1262,8 @@ public boolean SetMatchFinder(int matchFinderIndex) { public boolean SetLcLpPb(int lc, int lp, int pb) { if (lp < 0 || lp > Base.kNumLitPosStatesBitsEncodingMax || - lc < 0 || lc > Base.kNumLitContextBitsMax || - pb < 0 || pb > Base.kNumPosStatesBitsEncodingMax) + lc < 0 || lc > Base.kNumLitContextBitsMax || + pb < 0 || pb > Base.kNumPosStatesBitsEncodingMax) return false; _numLiteralPosStateBits = lp; _numLiteralContextBits = lc; diff --git a/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/BitTreeDecoder.java b/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/BitTreeDecoder.java index 5b96544262a..0f503499d3c 100644 --- a/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/BitTreeDecoder.java +++ b/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/BitTreeDecoder.java @@ -33,7 +33,7 @@ public int ReverseDecode(Decoder rangeDecoder) throws java.io.IOException { } public static int ReverseDecode(short[] Models, int startIndex, - Decoder rangeDecoder, int NumBitLevels) throws java.io.IOException { + Decoder rangeDecoder, int NumBitLevels) throws java.io.IOException { int m = 1; int symbol = 0; for (int bitIndex = 0; bitIndex < NumBitLevels; bitIndex++) { diff --git a/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/BitTreeEncoder.java b/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/BitTreeEncoder.java index c3230b7e69f..08a531e1a79 100644 --- a/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/BitTreeEncoder.java +++ b/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/BitTreeEncoder.java @@ -60,7 +60,7 @@ public int ReverseGetPrice(int symbol) { } public static int ReverseGetPrice(short[] Models, int startIndex, - int NumBitLevels, int symbol) { + int NumBitLevels, int symbol) { int price = 0; int m = 1; for (int i = NumBitLevels; i != 0; i--) { @@ -73,7 +73,7 @@ public static int ReverseGetPrice(short[] Models, int startIndex, } public static void ReverseEncode(short[] Models, int startIndex, - Encoder rangeEncoder, int NumBitLevels, int symbol) throws IOException { + Encoder rangeEncoder, int NumBitLevels, int symbol) throws IOException { int m = 1; for (int i = 0; i < NumBitLevels; i++) { int bit = symbol & 1; diff --git a/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/Encoder.java b/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/Encoder.java index a314c55c369..a5276ef3d58 100644 --- a/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/Encoder.java +++ b/SevenZip/src/main/java/SevenZip/Compression/RangeCoder/Encoder.java @@ -111,13 +111,12 @@ public void Encode(short[] probs, int index, int symbol) throws IOException { int end = 1 << (kNumBits - i); for (int j = start; j < end; j++) ProbPrices[j] = (i << kNumBitPriceShiftBits) + - (((end - j) << kNumBitPriceShiftBits) >>> (kNumBits - i - 1)); + (((end - j) << kNumBitPriceShiftBits) >>> (kNumBits - i - 1)); } } static public int GetPrice(int Prob, int symbol) { - return ProbPrices[(((Prob - symbol) ^ ((-symbol))) - & (kBitModelTotal - 1)) >>> kNumMoveReducingBits]; + return ProbPrices[(((Prob - symbol) ^ ((-symbol))) & (kBitModelTotal - 1)) >>> kNumMoveReducingBits]; } static public int GetPrice0(int Prob) { diff --git a/SevenZip/src/main/java/SevenZip/LzmaAlone.java b/SevenZip/src/main/java/SevenZip/LzmaAlone.java index 528a063756f..e203e6da550 100644 --- a/SevenZip/src/main/java/SevenZip/LzmaAlone.java +++ b/SevenZip/src/main/java/SevenZip/LzmaAlone.java @@ -118,19 +118,19 @@ else if (s.equalsIgnoreCase("b")) static void PrintHelp() { System.out.println( - "\nUsage: LZMA [...] inputFile outputFile\n" + - " e: encode file\n" + - " d: decode file\n" + - " b: Benchmark\n" + - "\n" + - // " -a{N}: set compression mode - [0, 1], default: 1 (max)\n" + - " -d{N}: set dictionary - [0,28], default: 23 (8MB)\n" + - " -fb{N}: set number of fast bytes - [5, 273], default: 128\n" + - " -lc{N}: set number of literal context bits - [0, 8], default: 3\n" + - " -lp{N}: set number of literal pos bits - [0, 4], default: 0\n" + - " -pb{N}: set number of pos bits - [0, 4], default: 2\n" + - " -mf{MF_ID}: set Match Finder: [bt2, bt4], default: bt4\n" + - " -eos: write End Of Stream marker\n"); + "\nUsage: LZMA [...] inputFile outputFile\n" + + " e: encode file\n" + + " d: decode file\n" + + " b: Benchmark\n" + + "\n" + + // " -a{N}: set compression mode - [0, 1], default: 1 (max)\n" + + " -d{N}: set dictionary - [0,28], default: 23 (8MB)\n" + + " -fb{N}: set number of fast bytes - [5, 273], default: 128\n" + + " -lc{N}: set number of literal context bits - [0, 8], default: 3\n" + + " -lp{N}: set number of literal pos bits - [0, 4], default: 0\n" + + " -pb{N}: set number of pos bits - [0, 4], default: 2\n" + + " -mf{MF_ID}: set Match Finder: [bt2, bt4], default: bt4\n" + + " -eos: write End Of Stream marker\n"); } public static void main(String[] args) throws Exception { @@ -158,10 +158,9 @@ public static void main(String[] args) throws Exception { java.io.File inFile = new java.io.File(params.InFile); java.io.File outFile = new java.io.File(params.OutFile); - java.io.BufferedInputStream inStream = - new java.io.BufferedInputStream(new java.io.FileInputStream(inFile)); + java.io.BufferedInputStream inStream = new java.io.BufferedInputStream(new java.io.FileInputStream(inFile)); java.io.BufferedOutputStream outStream = - new java.io.BufferedOutputStream(new java.io.FileOutputStream(outFile)); + new java.io.BufferedOutputStream(new java.io.FileOutputStream(outFile)); boolean eos = false; if (params.Eos) diff --git a/SevenZip/src/main/java/SevenZip/LzmaBench.java b/SevenZip/src/main/java/SevenZip/LzmaBench.java index 6ebcf466bd8..bbaf57b8350 100644 --- a/SevenZip/src/main/java/SevenZip/LzmaBench.java +++ b/SevenZip/src/main/java/SevenZip/LzmaBench.java @@ -22,7 +22,7 @@ public void Init() { public int GetRnd() { return ((A1 = 36969 * (A1 & 0xffff) + (A1 >>> 16)) << 16) ^ - ((A2 = 18000 * (A2 & 0xffff) + (A2 >>> 16))); + ((A2 = 18000 * (A2 & 0xffff) + (A2 >>> 16))); } }; @@ -239,12 +239,12 @@ static long GetDecompressRating(long elapsedTime, long outSize, long inSize) { } static long GetTotalRating( - int dictionarySize, - long elapsedTimeEn, long sizeEn, - long elapsedTimeDe, - long inSizeDe, long outSizeDe) { + int dictionarySize, + long elapsedTimeEn, long sizeEn, + long elapsedTimeDe, + long inSizeDe, long outSizeDe) { return (GetCompressRating(dictionarySize, elapsedTimeEn, sizeEn) + - GetDecompressRating(elapsedTimeDe, inSizeDe, outSizeDe)) / 2; + GetDecompressRating(elapsedTimeDe, inSizeDe, outSizeDe)) / 2; } static void PrintValue(long v) { @@ -261,10 +261,10 @@ static void PrintRating(long rating) { } static void PrintResults( - int dictionarySize, - long elapsedTime, - long size, - boolean decompressMode, long secondSize) { + int dictionarySize, + long elapsedTime, + long size, + boolean decompressMode, long secondSize) { long speed = MyMultDiv64(size, elapsedTime); PrintValue(speed / 1024); System.out.print(" KB/s "); @@ -366,7 +366,7 @@ static public int LzmaBenchmark(int numIterations, int dictionarySize) throws Ex PrintResults(dictionarySize, totalEncodeTime, totalBenchSize, false, 0); System.out.print(" "); PrintResults(dictionarySize, totalDecodeTime, - kBufferSize * (long) numIterations, true, totalCompressedSize); + kBufferSize * (long) numIterations, true, totalCompressedSize); System.out.println(" Average"); return 0; } diff --git a/Stats/src/main/java/io/deephaven/stats/ObjectAllocationCollector.java b/Stats/src/main/java/io/deephaven/stats/ObjectAllocationCollector.java index c8c7507f466..d418c5a69db 100644 --- a/Stats/src/main/java/io/deephaven/stats/ObjectAllocationCollector.java +++ b/Stats/src/main/java/io/deephaven/stats/ObjectAllocationCollector.java @@ -13,8 +13,7 @@ import io.deephaven.base.stats.Value; /** - * Use the allocation instrumenter from http://code.google.com/p/java-allocation-instrumenter/ to - * produce stats + * Use the allocation instrumenter from http://code.google.com/p/java-allocation-instrumenter/ to produce stats * * To use make sure you set * @@ -37,8 +36,7 @@ public class ObjectAllocationCollector { } String sEnable = System.getProperty("ObjectAllocationCollector.dumpStack"); - if (null != sEnable - && (sEnable.toLowerCase().contains("y") || sEnable.toLowerCase().contains("t"))) { + if (null != sEnable && (sEnable.toLowerCase().contains("y") || sEnable.toLowerCase().contains("t"))) { DUMP_STACK = true; } } @@ -62,25 +60,24 @@ private AllocationState(final Value size, final Class clazz) { // } // } - public static KeyedObjectKey keyDef = - new KeyedObjectKey() { - @Override - protected KeyedObjectKey clone() throws CloneNotSupportedException { - return (KeyedObjectKey) super.clone(); - } + public static KeyedObjectKey keyDef = new KeyedObjectKey() { + @Override + protected KeyedObjectKey clone() throws CloneNotSupportedException { + return (KeyedObjectKey) super.clone(); + } - public Class getKey(AllocationState v) { - return v.clazz; - } + public Class getKey(AllocationState v) { + return v.clazz; + } - public int hashKey(Class k) { - return k.hashCode(); - } + public int hashKey(Class k) { + return k.hashCode(); + } - public boolean equalKey(Class k, AllocationState v) { - return k.equals(v.clazz); - } - }; + public boolean equalKey(Class k, AllocationState v) { + return k.equals(v.clazz); + } + }; public void sample(long bytes) { if (size != null) { @@ -105,35 +102,32 @@ public void sample(long bytes) { } private final KeyedObjectHash classAllocationStates = - new KeyedObjectHash(AllocationState.keyDef); + new KeyedObjectHash(AllocationState.keyDef); private final KeyedObjectHash.ValueFactoryT STATE_FACTORY = - new KeyedObjectHash.ValueFactoryT() { - public AllocationState newValue(Class key, Value value) { - return new AllocationState(value, key); - } - }; + new KeyedObjectHash.ValueFactoryT() { + public AllocationState newValue(Class key, Value value) { + return new AllocationState(value, key); + } + }; public ObjectAllocationCollector() { // This hooks the JVM bytecode to call us back every time an object is allocated AllocationRecorder.addSampler(new Sampler() { public void sampleAllocation(int count, String desc, - Object newObj, long size) { - // unfortunately, we can't use this b/c it syncs around the putIfAbsent AND the - // Stats.makeItem which causes deadlocks! + Object newObj, long size) { + // unfortunately, we can't use this b/c it syncs around the putIfAbsent AND the Stats.makeItem which + // causes deadlocks! // classAllocationStates.putIfAbsent(newObj.getClass(), STATE_FACTORY).sample(size); final Class clazz = newObj.getClass(); - if (clazz.getName().startsWith("sun.") - || clazz.getName().endsWith("AllocationState")) + if (clazz.getName().startsWith("sun.") || clazz.getName().endsWith("AllocationState")) return; final AllocationState allocationState = classAllocationStates.get(clazz); if (allocationState == null) { - final Value value = - Stats.makeItem("GAllocation", clazz.getName(), State.FACTORY).getValue(); + final Value value = Stats.makeItem("GAllocation", clazz.getName(), State.FACTORY).getValue(); final AllocationState state = new AllocationState(value, clazz); - final AllocationState existing = - classAllocationStates.putIfAbsent(clazz, state); + final AllocationState existing = classAllocationStates.putIfAbsent(clazz, state); (existing == null ? state : existing).sample(size); } else { allocationState.sample(size); diff --git a/Stats/src/main/java/io/deephaven/stats/StatsCPUCollector.java b/Stats/src/main/java/io/deephaven/stats/StatsCPUCollector.java index e72a93e98a8..b7b2ea6848e 100644 --- a/Stats/src/main/java/io/deephaven/stats/StatsCPUCollector.java +++ b/Stats/src/main/java/io/deephaven/stats/StatsCPUCollector.java @@ -18,14 +18,13 @@ import java.lang.management.*; /** - * Collects statistic related to CPU and memory usage of the entire system, the process, and each - * thread in the process. + * Collects statistic related to CPU and memory usage of the entire system, the process, and each thread in the process. */ public class StatsCPUCollector { private static final Logger log = LoggerFactory.getLogger(StatsCPUCollector.class); public static final boolean MEASURE_PER_THREAD_CPU = - Configuration.getInstance().getBoolean("measurement.per_thread_cpu"); + Configuration.getInstance().getBoolean("measurement.per_thread_cpu"); private static final long NANOS = 1000000000; private static final long MILLIS = 1000; @@ -74,8 +73,7 @@ public class StatsCPUCollector { long seconds = interval / MILLIS; this.divisor = NANOS / (seconds * 10); Stats.makeGroup("Kernel", "Unix kernel statistics, as read from /proc/stat"); - Stats.makeGroup("Proc", - "Unix process statistics, as read from /proc/self/stat and /proc/self/fd"); + Stats.makeGroup("Proc", "Unix process statistics, as read from /proc/self/stat and /proc/self/fd"); Stats.makeGroup("CPU", "JMX CPU usage data, per-thread and for the entire process"); if (OSUtil.runningMacOS() || OSUtil.runningWindows()) { @@ -97,48 +95,47 @@ public ThreadState(long id) { this.id = id; } - public static KeyedLongObjectKey keyDef = - new KeyedLongObjectKey() { - public Long getKey(ThreadState v) { - return v.id; - } + public static KeyedLongObjectKey keyDef = new KeyedLongObjectKey() { + public Long getKey(ThreadState v) { + return v.id; + } - public long getLongKey(ThreadState v) { - return v.id; - } + public long getLongKey(ThreadState v) { + return v.id; + } - public int hashKey(Long k) { - return (int) k.longValue(); - } + public int hashKey(Long k) { + return (int) k.longValue(); + } - public int hashLongKey(long k) { - return (int) k; - } + public int hashLongKey(long k) { + return (int) k; + } - public boolean equalKey(Long k, ThreadState v) { - return k == v.id; - } + public boolean equalKey(Long k, ThreadState v) { + return k == v.id; + } - public boolean equalLongKey(long k, ThreadState v) { - return k == v.id; - } - }; + public boolean equalLongKey(long k, ThreadState v) { + return k == v.id; + } + }; public static KeyedLongObjectHash.ValueFactory factory = - new KeyedLongObjectHash.ValueFactory() { - public ThreadState newValue(long key) { - return new ThreadState(key); - } + new KeyedLongObjectHash.ValueFactory() { + public ThreadState newValue(long key) { + return new ThreadState(key); + } - public ThreadState newValue(Long key) { - return new ThreadState(key); - } - }; + public ThreadState newValue(Long key) { + return new ThreadState(key); + } + }; } /** the map containing all thread states */ private static KeyedLongObjectHashMap threadStates = - new KeyedLongObjectHashMap<>(100, ThreadState.keyDef); + new KeyedLongObjectHashMap<>(100, ThreadState.keyDef); /** the user time for the process as a whole */ private State processUserTime; @@ -198,8 +195,7 @@ private boolean getNextFieldSample(State v, int nb) { private long getNextFieldLong(int nb) { long result = 0; - while (statBufferIndex < nb && statBuffer[statBufferIndex] >= '0' - && statBuffer[statBufferIndex] <= '9') { + while (statBufferIndex < nb && statBuffer[statBufferIndex] >= '0' && statBuffer[statBufferIndex] <= '9') { result *= 10; result += statBuffer[statBufferIndex] - '0'; statBufferIndex++; @@ -208,8 +204,7 @@ private long getNextFieldLong(int nb) { } private boolean peekNextLong(int nb) { - return statBufferIndex < nb && statBuffer[statBufferIndex] >= '0' - && statBuffer[statBufferIndex] <= '9'; + return statBufferIndex < nb && statBuffer[statBufferIndex] >= '0' && statBuffer[statBufferIndex] <= '9'; } /** @@ -235,24 +230,23 @@ private void updateSys() { if (startsWith("cpu ", nb)) { if (skipNextField(nb) && peekNextLong(nb)) { if (statSysUserJiffies == null) { - statSysUserJiffies = Stats.makeItem("Kernel", "UserJiffies", - Counter.FACTORY, - "User jiffies per 10 second interval (1000 equals 1 full CPU)") - .getValue(); - statSysSystemJiffies = Stats.makeItem("Kernel", "SystemJiffies", - Counter.FACTORY, - "System jiffies per 10 second interval (1000 equals 1 full CPU)") - .getValue(); + statSysUserJiffies = Stats + .makeItem("Kernel", "UserJiffies", Counter.FACTORY, + "User jiffies per 10 second interval (1000 equals 1 full CPU)") + .getValue(); + statSysSystemJiffies = Stats + .makeItem("Kernel", "SystemJiffies", Counter.FACTORY, + "System jiffies per 10 second interval (1000 equals 1 full CPU)") + .getValue(); } - if (getNextFieldDeltaJiffies(statSysUserJiffies, nb) - && skipNextField(nb) && peekNextLong(nb)) { - if (getNextFieldDeltaJiffies(statSysSystemJiffies, nb) - && skipNextField(nb) && skipNextField(nb) && peekNextLong(nb)) { + if (getNextFieldDeltaJiffies(statSysUserJiffies, nb) && skipNextField(nb) + && peekNextLong(nb)) { + if (getNextFieldDeltaJiffies(statSysSystemJiffies, nb) && skipNextField(nb) + && skipNextField(nb) && peekNextLong(nb)) { if (statSysIOWait == null) { - statSysIOWait = Stats.makeItem("Kernel", "IOWait", - Counter.FACTORY, - "IOWait jiffies per 10 second interval (1000 equals 1 full CPU)") - .getValue(); + statSysIOWait = Stats.makeItem("Kernel", "IOWait", Counter.FACTORY, + "IOWait jiffies per 10 second interval (1000 equals 1 full CPU)") + .getValue(); } getNextFieldDeltaJiffies(statSysIOWait, nb); } @@ -262,9 +256,9 @@ && skipNextField(nb) && skipNextField(nb) && peekNextLong(nb)) { if (skipNextField(nb) && peekNextLong(nb)) { if (statSysPageIn == null) { statSysPageIn = Stats.makeItem("Kernel", "PageIn", Counter.FACTORY, - "Number of pages read in from disk").getValue(); - statSysPageOut = Stats.makeItem("Kernel", "PageOut", - Counter.FACTORY, "Number of pages written to disk").getValue(); + "Number of pages read in from disk").getValue(); + statSysPageOut = Stats.makeItem("Kernel", "PageOut", Counter.FACTORY, + "Number of pages written to disk").getValue(); } if (getNextFieldDelta(statSysPageIn, nb) && peekNextLong(nb)) { getNextFieldDelta(statSysPageOut, nb); @@ -273,26 +267,27 @@ && skipNextField(nb) && skipNextField(nb) && peekNextLong(nb)) { } else if (startsWith("swap", nb)) { if (statSysSwapIn == null) { statSysSwapIn = Stats.makeItem("Kernel", "SwapIn", Counter.FACTORY, - "Number of pages read from swap space").getValue(); + "Number of pages read from swap space").getValue(); statSysSwapOut = Stats.makeItem("Kernel", "SwapOut", Counter.FACTORY, - "Number of pages written to swap space").getValue(); + "Number of pages written to swap space").getValue(); } - if (skipNextField(nb) && getNextFieldDelta(statSysSwapIn, nb) - && peekNextLong(nb)) { + if (skipNextField(nb) && getNextFieldDelta(statSysSwapIn, nb) && peekNextLong(nb)) { getNextFieldDelta(statSysSwapOut, nb); } } else if (startsWith("intr", nb)) { if (statSysInterrupts == null) { - statSysInterrupts = Stats.makeItem("Kernel", "Interrupts", - Counter.FACTORY, "Number of interrupts").getValue(); + statSysInterrupts = + Stats.makeItem("Kernel", "Interrupts", Counter.FACTORY, "Number of interrupts") + .getValue(); } if (skipNextField(nb)) { getNextFieldDelta(statSysInterrupts, nb); } } else if (startsWith("ctxt", nb)) { if (statSysCtxt == null) { - statSysCtxt = Stats.makeItem("Kernel", "Ctxt", Counter.FACTORY, - "Number of context switches").getValue(); + statSysCtxt = + Stats.makeItem("Kernel", "Ctxt", Counter.FACTORY, "Number of context switches") + .getValue(); } if (skipNextField(nb)) { getNextFieldDelta(statSysCtxt, nb); @@ -324,22 +319,22 @@ private void updateProc() { } procFile.getChannel().position(0); if (statProcMinorFaults == null) { - statProcMinorFaults = Stats.makeItem("Proc", "MinorFaults", Counter.FACTORY, - "Minor faults the process has incurred").getValue(); - statProcMajorFaults = Stats.makeItem("Proc", "MajorFaults", Counter.FACTORY, - "Major faults the process has incurred").getValue(); - statProcUserJiffies = Stats - .makeItem("Proc", "UserJiffies", Counter.FACTORY, - "User jiffies per 10 second interval (1000 equals 1 full CPU)") - .getValue(); - statProcSystemJiffies = Stats - .makeItem("Proc", "SystemJiffies", Counter.FACTORY, - "System jiffies per 10 second interval (1000 equals 1 full CPU)") - .getValue(); - statProcVSZ = Stats.makeItem("Proc", "VSZ", State.FACTORY, - "Virtual size of the process in kilobytes").getValue(); - statProcRSS = Stats.makeItem("Proc", "RSS", State.FACTORY, - "Resident set size of the process in pages").getValue(); + statProcMinorFaults = Stats + .makeItem("Proc", "MinorFaults", Counter.FACTORY, "Minor faults the process has incurred") + .getValue(); + statProcMajorFaults = Stats + .makeItem("Proc", "MajorFaults", Counter.FACTORY, "Major faults the process has incurred") + .getValue(); + statProcUserJiffies = Stats.makeItem("Proc", "UserJiffies", Counter.FACTORY, + "User jiffies per 10 second interval (1000 equals 1 full CPU)").getValue(); + statProcSystemJiffies = Stats.makeItem("Proc", "SystemJiffies", Counter.FACTORY, + "System jiffies per 10 second interval (1000 equals 1 full CPU)").getValue(); + statProcVSZ = + Stats.makeItem("Proc", "VSZ", State.FACTORY, "Virtual size of the process in kilobytes") + .getValue(); + statProcRSS = + Stats.makeItem("Proc", "RSS", State.FACTORY, "Resident set size of the process in pages") + .getValue(); } statBufferIndex = 0; int nb = procFile.read(statBuffer, 0, statBuffer.length); @@ -378,10 +373,11 @@ private void updateProcFD() { return; } if (statProcNumFDs == null) { - statProcNumFDs = Stats.makeItem("Proc", "NumFDs", State.FACTORY, - "Number of open file descriptors in the process").getValue(); + statProcNumFDs = Stats + .makeItem("Proc", "NumFDs", State.FACTORY, "Number of open file descriptors in the process") + .getValue(); statProcMaxFD = Stats.makeItem("Proc", "MaxFD", State.FACTORY, - "Highest-numbered file descriptors in the process").getValue(); + "Highest-numbered file descriptors in the process").getValue(); } int maxFd = -1; for (String s : entries) { @@ -415,20 +411,18 @@ private void updatePerThreadCPU() { if (tinfo == null) { continue; } - ThreadState state = - threadStates.putIfAbsent(tinfo.getThreadId(), ThreadState.factory); + ThreadState state = threadStates.putIfAbsent(tinfo.getThreadId(), ThreadState.factory); long cpuTime = threadMXBean.getThreadCpuTime(state.id); long userTime = threadMXBean.getThreadUserTime(state.id); if (state.name == null) { // first time we've seen this thread, no sample state.name = tinfo.getThreadName(); state.userTime = Stats.makeItem("CPU", state.name + "-userTime", State.FACTORY, - "Per-thread CPU usage in user mode, 1000 equals 1 full CPU, as reported by Java") - .getValue(); - state.systemTime = Stats.makeItem("CPU", state.name + "-systemTime", - State.FACTORY, - "Per-thread CPU usage in system mode, 1000 equals 1 full CPU, as reported by Java") - .getValue(); + "Per-thread CPU usage in user mode, 1000 equals 1 full CPU, as reported by Java") + .getValue(); + state.systemTime = Stats.makeItem("CPU", state.name + "-systemTime", State.FACTORY, + "Per-thread CPU usage in system mode, 1000 equals 1 full CPU, as reported by Java") + .getValue(); } else { long deltaCpuTime = cpuTime - state.lastCpuTime; long deltaUserTime = userTime - state.lastUserTime; @@ -446,11 +440,9 @@ private void updatePerThreadCPU() { } if (processUserTime == null) { processUserTime = Stats.makeItem("CPU", "process-userTime", State.FACTORY, - "Process CPU usage in user mode, 1000 equals 1 full CPU, as reported by JMX") - .getValue(); + "Process CPU usage in user mode, 1000 equals 1 full CPU, as reported by JMX").getValue(); processSystemTime = Stats.makeItem("CPU", "process-systemTime", State.FACTORY, - "Process CPU usage in system mode, 1000 equals 1 full CPU, as reported by JMX") - .getValue(); + "Process CPU usage in system mode, 1000 equals 1 full CPU, as reported by JMX").getValue(); } processUserTime.sample(deltaProcessUserTime / divisor); processSystemTime.sample((deltaProcessCpuTime - deltaProcessUserTime) / divisor); diff --git a/Stats/src/main/java/io/deephaven/stats/StatsDriver.java b/Stats/src/main/java/io/deephaven/stats/StatsDriver.java index 351b8671561..7d41f872532 100644 --- a/Stats/src/main/java/io/deephaven/stats/StatsDriver.java +++ b/Stats/src/main/java/io/deephaven/stats/StatsDriver.java @@ -50,7 +50,7 @@ public boolean cmsAlertEnabled() { private final TimestampBuffer appTimestamp; public final static String header = - "Stat,IntervalName,NowSec,NowString,AppNowSec,AppNowString,TypeTag,Name,N,Sum,Last,Min,Max,Avg,Sum2,Stdev"; + "Stat,IntervalName,NowSec,NowString,AppNowSec,AppNowString,TypeTag,Name,N,Sum,Last,Min,Max,Avg,Sum2,Stdev"; private long nextInvocation = System.currentTimeMillis(); private long nextCpuUpdate = nextInvocation + CPU_INTERVAL; @@ -65,7 +65,7 @@ public boolean cmsAlertEnabled() { private static final int GUESS_ENTRY_SIZE = 256; private final Value statsTiming = Stats.makeItem("Stats", "updateDuration", State.FACTORY, - "Microseconds required to update the statistics histories each second").getValue(); + "Microseconds required to update the statistics histories each second").getValue(); private final Clock clock; private final StatsIntradayLogger intraday; @@ -95,8 +95,7 @@ public StatsDriver(Clock clock, StatsIntradayLogger intraday, boolean getFdStats this(clock, intraday, getFdStats, StatusAdapter.NULL); } - public StatsDriver(Clock clock, StatsIntradayLogger intraday, boolean getFdStats, - StatusAdapter statusAdapter) { + public StatsDriver(Clock clock, StatsIntradayLogger intraday, boolean getFdStats, StatusAdapter statusAdapter) { Properties props = Configuration.getInstance().getProperties(); String path = props.getProperty("stats.log.prefix"); if (path != null) { @@ -124,8 +123,7 @@ public StatsDriver(Clock clock, StatsIntradayLogger intraday, boolean getFdStats this.sink = null; this.entries = null; } else { - LogBufferPool bufferPool = - new LogBufferPoolImpl(History.INTERVALS.length * 20, BUFFER_SIZE); + LogBufferPool bufferPool = new LogBufferPoolImpl(History.INTERVALS.length * 20, BUFFER_SIZE); this.entryPool = new LogEntryPoolImpl(History.INTERVALS.length * 20, bufferPool); this.sink = new LogSinkImpl<>(path, 3600 * 1000, entryPool, true); this.entries = new LogEntry[History.INTERVALS.length]; @@ -136,8 +134,7 @@ public StatsDriver(Clock clock, StatsIntradayLogger intraday, boolean getFdStats this.sinkHisto = null; this.entriesHisto = null; } else { - LogBufferPool bufferPool = - new LogBufferPoolImpl(History.INTERVALS.length * 20, BUFFER_SIZE); + LogBufferPool bufferPool = new LogBufferPoolImpl(History.INTERVALS.length * 20, BUFFER_SIZE); this.entryPoolHisto = new LogEntryPoolImpl(History.INTERVALS.length * 20, bufferPool); this.sinkHisto = new LogSinkImpl<>(histoPath, 3600 * 1000, entryPoolHisto, true); this.entriesHisto = new LogEntry[History.INTERVALS.length]; @@ -147,7 +144,7 @@ public StatsDriver(Clock clock, StatsIntradayLogger intraday, boolean getFdStats this.intraday = intraday; if (this.clock != null) { clockValue = Stats.makeItem("Clock", "value", State.FACTORY, - "The value of the Clock, useful for mapping data from simulation runs").getValue(); + "The value of the Clock, useful for mapping data from simulation runs").getValue(); } else { clockValue = null; } @@ -156,8 +153,7 @@ public StatsDriver(Clock clock, StatsIntradayLogger intraday, boolean getFdStats long delay = STEP - (now % STEP); nextInvocation = now + delay; cpuStats = new StatsCPUCollector(CPU_INTERVAL, getFdStats); - memStats = new StatsMemoryCollector(MEM_INTERVAL, statusAdapter::sendAlert, - statusAdapter::cmsAlertEnabled); + memStats = new StatsMemoryCollector(MEM_INTERVAL, statusAdapter::sendAlert, statusAdapter::cmsAlertEnabled); if (Configuration.getInstance().getBoolean("allocation.stats.enabled")) { objectAllocation = new ObjectAllocationCollector(); } @@ -191,8 +187,7 @@ public void timedOut() { for (int i = 0; i < History.INTERVALS.length; ++i) { entries[i] = entryPool.take().start(sink, LogLevel.INFO, now * 1000); if (entriesHisto != null) { - entriesHisto[i] = - entryPoolHisto.take().start(sinkHisto, LogLevel.INFO, now * 1000); + entriesHisto[i] = entryPoolHisto.take().start(sinkHisto, LogLevel.INFO, now * 1000); } } Stats.update(LISTENER, now, appNow, REPORT_INTERVAL); @@ -217,8 +212,8 @@ private void schedule() { private final ItemUpdateListener LISTENER = new ItemUpdateListener() { @Override - public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, - long intervalMillis, String intervalName) { + public void handleItemUpdated(Item item, long now, long appNow, int intervalIndex, long intervalMillis, + String intervalName) { final Value v = item.getValue(); final History history = v.getHistory(); final char typeTag = v.getTypeTag(); @@ -242,43 +237,43 @@ public void handleItemUpdated(Item item, long now, long appNow, int intervalInde if (e.size() > BUFFER_SIZE - GUESS_ENTRY_SIZE) { e.end(); e = entriesHisto[intervalIndex] = - entryPoolHisto.take().start(sinkHisto, LogLevel.INFO, now * 1000); + entryPoolHisto.take().start(sinkHisto, LogLevel.INFO, now * 1000); } e.append("HISTOGRAM") - .append(',').append(intervalName) - .append(',').append(now / 1000) - .append(',').appendTimestamp(now, systemTimestamp) - .append(',').append(appNow / 1000) - .append(',').appendTimestamp(appNow, appTimestamp) - .append(',').append(v.getTypeTag()) - .append(',').append(item.getGroupName()) - .append('.').append(item.getName()) - .append(',').append(n) - .append(',').append(sum) - .append(',').append(last) - .append(',').append(min) - .append(',').append(max) - .append(',').append(avg) - .append(',').append(sum2) - .append(',').append(stdev) - .append(',').append(nh.getHistogramString()) - .nl(); + .append(',').append(intervalName) + .append(',').append(now / 1000) + .append(',').appendTimestamp(now, systemTimestamp) + .append(',').append(appNow / 1000) + .append(',').appendTimestamp(appNow, appTimestamp) + .append(',').append(v.getTypeTag()) + .append(',').append(item.getGroupName()) + .append('.').append(item.getName()) + .append(',').append(n) + .append(',').append(sum) + .append(',').append(last) + .append(',').append(min) + .append(',').append(max) + .append(',').append(avg) + .append(',').append(sum2) + .append(',').append(stdev) + .append(',').append(nh.getHistogramString()) + .nl(); } intraday.log( - intervalName, - now, - appNow, - v.getTypeTag(), - item.getCompactName(), - n, - sum, - last, - min, - max, - avg, - sum2, - stdev, - nh.getHistogram()); + intervalName, + now, + appNow, + v.getTypeTag(), + item.getCompactName(), + n, + sum, + last, + min, + max, + avg, + sum2, + stdev, + nh.getHistogram()); break; } default: { @@ -297,43 +292,42 @@ public void handleItemUpdated(Item item, long now, long appNow, int intervalInde e = entries[intervalIndex]; if (e.size() > BUFFER_SIZE - GUESS_ENTRY_SIZE) { e.end(); - e = entries[intervalIndex] = - entryPool.take().start(sink, LogLevel.INFO, now * 1000); + e = entries[intervalIndex] = entryPool.take().start(sink, LogLevel.INFO, now * 1000); } e.append("STAT") - .append(',').append(intervalName) - .append(',').append(now / 1000) - .append(',').appendTimestamp(now, systemTimestamp) - .append(',').append(appNow / 1000) - .append(',').appendTimestamp(appNow, appTimestamp) - .append(',').append(v.getTypeTag()) - .append(',').append(item.getGroupName()) - .append('.').append(item.getName()) - .append(',').append(n) - .append(',').append(sum) - .append(',').append(last) - .append(',').append(min) - .append(',').append(max) - .append(',').append(avg) - .append(',').append(sum2) - .append(',').append(stdev) - .nl(); + .append(',').append(intervalName) + .append(',').append(now / 1000) + .append(',').appendTimestamp(now, systemTimestamp) + .append(',').append(appNow / 1000) + .append(',').appendTimestamp(appNow, appTimestamp) + .append(',').append(v.getTypeTag()) + .append(',').append(item.getGroupName()) + .append('.').append(item.getName()) + .append(',').append(n) + .append(',').append(sum) + .append(',').append(last) + .append(',').append(min) + .append(',').append(max) + .append(',').append(avg) + .append(',').append(sum2) + .append(',').append(stdev) + .nl(); } intraday.log( - intervalName, - now, - appNow, - v.getTypeTag(), - item.getCompactName(), - n, - sum, - last, - min, - max, - avg, - sum2, - stdev); + intervalName, + now, + appNow, + v.getTypeTag(), + item.getCompactName(), + n, + sum, + last, + min, + max, + avg, + sum2, + stdev); break; } } diff --git a/Stats/src/main/java/io/deephaven/stats/StatsIntradayLogger.java b/Stats/src/main/java/io/deephaven/stats/StatsIntradayLogger.java index 37138ed7bc1..3ac123c6756 100644 --- a/Stats/src/main/java/io/deephaven/stats/StatsIntradayLogger.java +++ b/Stats/src/main/java/io/deephaven/stats/StatsIntradayLogger.java @@ -5,23 +5,21 @@ package io.deephaven.stats; public interface StatsIntradayLogger { - void log(String intervalName, long now, long appNow, char typeTag, String compactName, long n, - long sum, long last, long min, long max, long avg, long sum2, long stdev); + void log(String intervalName, long now, long appNow, char typeTag, String compactName, long n, long sum, long last, + long min, long max, long avg, long sum2, long stdev); - void log(String intervalName, long now, long appNow, char typeTag, String compactName, long n, - long sum, long last, long min, long max, long avg, long sum2, long stdev, long[] h); + void log(String intervalName, long now, long appNow, char typeTag, String compactName, long n, long sum, long last, + long min, long max, long avg, long sum2, long stdev, long[] h); StatsIntradayLogger NULL = new Null(); class Null implements StatsIntradayLogger { @Override - public void log(String intervalName, long now, long appNow, char typeTag, - String compactName, long n, long sum, long last, long min, long max, long avg, - long sum2, long stdev) {} + public void log(String intervalName, long now, long appNow, char typeTag, String compactName, long n, long sum, + long last, long min, long max, long avg, long sum2, long stdev) {} @Override - public void log(String intervalName, long now, long appNow, char typeTag, - String compactName, long n, long sum, long last, long min, long max, long avg, - long sum2, long stdev, long[] h) {} + public void log(String intervalName, long now, long appNow, char typeTag, String compactName, long n, long sum, + long last, long min, long max, long avg, long sum2, long stdev, long[] h) {} } } diff --git a/Stats/src/main/java/io/deephaven/stats/StatsMemoryCollector.java b/Stats/src/main/java/io/deephaven/stats/StatsMemoryCollector.java index 7e9ef5af5a4..01a89c68b26 100644 --- a/Stats/src/main/java/io/deephaven/stats/StatsMemoryCollector.java +++ b/Stats/src/main/java/io/deephaven/stats/StatsMemoryCollector.java @@ -28,9 +28,8 @@ public class StatsMemoryCollector { private static final long NANOS = 1000000000; private static final long MICROS = 1000000; private static final long MILLIS = 1000; - private static final DirectMemoryStats DIRECT_MEMORY_STATS = - AccessController.doPrivileged((PrivilegedAction) JdkInternalsLoader - .getInstance()::getDirectMemoryStats); + private static final DirectMemoryStats DIRECT_MEMORY_STATS = AccessController + .doPrivileged((PrivilegedAction) JdkInternalsLoader.getInstance()::getDirectMemoryStats); private final MemoryMXBean memoryBean; private final Consumer alertFunction; @@ -39,10 +38,9 @@ public class StatsMemoryCollector { private static final Logger log = Logger.getLogger(StatsMemoryCollector.class); /* - * This used to use the ServerStatus.getJvmUptime(), which is really only interesting because it - * is the first time that the ServerStatus had an opportunity to call - * System.currentTimeMillis(). Because the StatsDriver is generally created, we'll just make our - * own uptime calcuation. + * This used to use the ServerStatus.getJvmUptime(), which is really only interesting because it is the first time + * that the ServerStatus had an opportunity to call System.currentTimeMillis(). Because the StatsDriver is generally + * created, we'll just make our own uptime calcuation. */ private static final long statsStartupTime = System.currentTimeMillis(); @@ -60,28 +58,24 @@ private static class PoolState { public PoolState(long seconds, MemoryPoolMXBean bean) { this.seconds = seconds; this.bean = bean; - this.used = - Stats.makeItem("Memory-Pool", bean.getName() + "-Used", State.FACTORY).getValue(); - this.committed = Stats - .makeItem("Memory-Pool", bean.getName() + "-Committed", State.FACTORY).getValue(); - this.max = - Stats.makeItem("Memory-Pool", bean.getName() + "-Max", State.FACTORY).getValue(); + this.used = Stats.makeItem("Memory-Pool", bean.getName() + "-Used", State.FACTORY).getValue(); + this.committed = Stats.makeItem("Memory-Pool", bean.getName() + "-Committed", State.FACTORY).getValue(); + this.max = Stats.makeItem("Memory-Pool", bean.getName() + "-Max", State.FACTORY).getValue(); } - public static KeyedObjectKey keyDef = - new KeyedObjectKey() { - public String getKey(PoolState v) { - return v.bean.getName(); - } + public static KeyedObjectKey keyDef = new KeyedObjectKey() { + public String getKey(PoolState v) { + return v.bean.getName(); + } - public int hashKey(String k) { - return k.hashCode(); - } + public int hashKey(String k) { + return k.hashCode(); + } - public boolean equalKey(String k, PoolState v) { - return k.equals(v.bean.getName()); - } - }; + public boolean equalKey(String k, PoolState v) { + return k.equals(v.bean.getName()); + } + }; public void update() { MemoryUsage u = bean.getUsage(); @@ -91,8 +85,7 @@ public void update() { } } - private final KeyedObjectHash pools = - new KeyedObjectHash(PoolState.keyDef); + private final KeyedObjectHash pools = new KeyedObjectHash(PoolState.keyDef); private static class CollectorState { private final long seconds; @@ -106,32 +99,29 @@ private static class CollectorState { private long lastCMSOccurrence = 0; private long lastCMSOccurrenceMailSent = 0; - public CollectorState(long seconds, GarbageCollectorMXBean bean, - Consumer alertFunction, BooleanSupplier enableCmsAlerts) { + public CollectorState(long seconds, GarbageCollectorMXBean bean, Consumer alertFunction, + BooleanSupplier enableCmsAlerts) { this.seconds = seconds; this.bean = bean; this.alertFunction = alertFunction; this.enableCmsAlerts = enableCmsAlerts; - this.count = - Stats.makeItem("Memory-GC", bean.getName() + "-Count", Counter.FACTORY).getValue(); - this.time = - Stats.makeItem("Memory-GC", bean.getName() + "-Time", Counter.FACTORY).getValue(); + this.count = Stats.makeItem("Memory-GC", bean.getName() + "-Count", Counter.FACTORY).getValue(); + this.time = Stats.makeItem("Memory-GC", bean.getName() + "-Time", Counter.FACTORY).getValue(); } - public static KeyedObjectKey keyDef = - new KeyedObjectKey() { - public String getKey(CollectorState v) { - return v.bean.getName(); - } + public static KeyedObjectKey keyDef = new KeyedObjectKey() { + public String getKey(CollectorState v) { + return v.bean.getName(); + } - public int hashKey(String k) { - return k.hashCode(); - } + public int hashKey(String k) { + return k.hashCode(); + } - public boolean equalKey(String k, CollectorState v) { - return k.equals(v.bean.getName()); - } - }; + public boolean equalKey(String k, CollectorState v) { + return k.equals(v.bean.getName()); + } + }; public void update() { long c = bean.getCollectionCount(); @@ -145,30 +135,24 @@ public void update() { if ("ConcurrentMarkSweep".equals(bean.getName())) { if (timeSample > 1000 && getStatsUptime() > 300000) { - // snooze this e-mail for one minute after we forcefully schedule a preopen - // gc + // snooze this e-mail for one minute after we forcefully schedule a preopen gc if (enableCmsAlerts.getAsBoolean()) { - log.log( - SimpleMailAppender.MAIL( - "Long GC detected -- " + System.getProperty("process.name")), - "GC Time=" + timeSample); + log.log(SimpleMailAppender + .MAIL("Long GC detected -- " + System.getProperty("process.name")), + "GC Time=" + timeSample); } } if (c - lastCount > 0 && getStatsUptime() > 600000) { final long now = System.currentTimeMillis(); - if (now - lastCMSOccurrence < 30000) { // twice in 30 seconds seems like a - // bit much - if (now - lastCMSOccurrenceMailSent > TimeConstants.HOUR) { // send at - // most one - // an hour + if (now - lastCMSOccurrence < 30000) { // twice in 30 seconds seems like a bit much + if (now - lastCMSOccurrenceMailSent > TimeConstants.HOUR) { // send at most one an hour SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss.SSS"); - final String message = "Last CMS collection at " - + dateFormat.format(new Date(lastCMSOccurrence)); - log.log(SimpleMailAppender.MAIL( - "Rapid CMS detected -- " + System.getProperty("process.name")), - message); + final String message = + "Last CMS collection at " + dateFormat.format(new Date(lastCMSOccurrence)); + log.log(SimpleMailAppender + .MAIL("Rapid CMS detected -- " + System.getProperty("process.name")), message); if (alertFunction != null) { alertFunction.accept("Rapid CMS detected, " + message); } @@ -185,7 +169,7 @@ public void update() { } private final KeyedObjectHash collectors = - new KeyedObjectHash(CollectorState.keyDef); + new KeyedObjectHash(CollectorState.keyDef); private final long seconds; @@ -200,8 +184,7 @@ public void update() { private final Value directMemoryUsed; private final Value directMemoryMax; - StatsMemoryCollector(long interval, Consumer alertFunction, - BooleanSupplier cmsAlertEnabled) { + StatsMemoryCollector(long interval, Consumer alertFunction, BooleanSupplier cmsAlertEnabled) { this.alertFunction = alertFunction; this.cmsAlertEnabled = cmsAlertEnabled; this.seconds = interval / MILLIS; @@ -212,8 +195,7 @@ public void update() { this.heapMax = Stats.makeItem("Memory-Heap", "Max", State.FACTORY).getValue(); this.nonHeapUsed = Stats.makeItem("Memory-NonHeap", "Used", State.FACTORY).getValue(); - this.nonHeapCommitted = - Stats.makeItem("Memory-NonHeap", "Committed", State.FACTORY).getValue(); + this.nonHeapCommitted = Stats.makeItem("Memory-NonHeap", "Committed", State.FACTORY).getValue(); this.nonHeapMax = Stats.makeItem("Memory-NonHeap", "Max", State.FACTORY).getValue(); this.directMemoryUsed = Stats.makeItem("Memory-Direct", "Used", State.FACTORY).getValue(); @@ -236,8 +218,8 @@ public void update() { nonHeapMax.sample(nonHeap.getMax()); AccessController.doPrivileged((PrivilegedAction) () -> { - // Obviously we are using internal Sun APIs here. Worst case is Java 9 breaks this and - // we have to do it a different way + // Obviously we are using internal Sun APIs here. Worst case is Java 9 breaks this and we have to do it a + // different way directMemoryUsed.sample(DIRECT_MEMORY_STATS.getMemoryUsed()); directMemoryMax.sample(DIRECT_MEMORY_STATS.maxDirectMemory()); return null; @@ -254,8 +236,7 @@ public void update() { for (GarbageCollectorMXBean b : ManagementFactory.getGarbageCollectorMXBeans()) { CollectorState collector = collectors.get(b.getName()); if (collector == null) { - collectors.add( - collector = new CollectorState(seconds, b, alertFunction, cmsAlertEnabled)); + collectors.add(collector = new CollectorState(seconds, b, alertFunction, cmsAlertEnabled)); } collector.update(); } diff --git a/Stats/src/test/java/io/deephaven/stats/TestStatsCPUCollector.java b/Stats/src/test/java/io/deephaven/stats/TestStatsCPUCollector.java index 8dcb9f4e9b9..a36e339c28a 100644 --- a/Stats/src/test/java/io/deephaven/stats/TestStatsCPUCollector.java +++ b/Stats/src/test/java/io/deephaven/stats/TestStatsCPUCollector.java @@ -31,8 +31,7 @@ public void testProcFD() { long maxFdUpdates = maxFdItem.getValue().getN(); assertEquals(1, numFdsUpdates); assertEquals(1, maxFdUpdates); - System.out.println( - "TestStatsCPUCollector.testProcFd: " + numFds + " fds, maxFd = " + maxFd); + System.out.println("TestStatsCPUCollector.testProcFd: " + numFds + " fds, maxFd = " + maxFd); } else { // if not, we see nothing - but we also don't crash assertNull(numFdsItem); diff --git a/TableLogger/src/main/java/io/deephaven/tablelogger/Row.java b/TableLogger/src/main/java/io/deephaven/tablelogger/Row.java index 0bb5fe61247..360355d0f34 100644 --- a/TableLogger/src/main/java/io/deephaven/tablelogger/Row.java +++ b/TableLogger/src/main/java/io/deephaven/tablelogger/Row.java @@ -33,13 +33,13 @@ default RowSetter getSetter(String name, Class tClass) { final Class unboxedType; // noinspection unchecked if (tClass.isAssignableFrom(setter.getType()) - || (unboxedType = TypeUtils.getUnboxedType(tClass)) != null - && unboxedType.isAssignableFrom(setter.getType())) { + || (unboxedType = TypeUtils.getUnboxedType(tClass)) != null + && unboxedType.isAssignableFrom(setter.getType())) { // noinspection unchecked return (RowSetter) setter; } - throw new ClassCastException(name + " is of type " + setter.getType() + ", not of type " - + tClass + (unboxedType == null ? "" : " or " + unboxedType)); + throw new ClassCastException(name + " is of type " + setter.getType() + ", not of type " + tClass + + (unboxedType == null ? "" : " or " + unboxedType)); } /** @@ -52,9 +52,8 @@ default RowSetter getSetter(String name, Class tClass) { /** * Number of rows written out. * - * @deprecated {@link Row#size()} is somewhat ambiguously specified in the interface and its - * implementations. Some implementations keep track of all rows written. Others keep - * track of number of rows buffered. + * @deprecated {@link Row#size()} is somewhat ambiguously specified in the interface and its implementations. Some + * implementations keep track of all rows written. Others keep track of number of rows buffered. *

    * It seems safer to simply not allow the question to be asked. * @@ -66,8 +65,7 @@ default RowSetter getSetter(String name, Class tClass) { /** * Per-row transaction flags. * - * In Deephaven, a transaction is a group of rows that must be made visible to applications - * entirely, or not at all. + * In Deephaven, a transaction is a group of rows that must be made visible to applications entirely, or not at all. */ enum Flags { /** This row does not start or stop a transaction. */ @@ -83,24 +81,24 @@ enum Flags { void setFlags(Flags flags); /** - * For rows that are to be used with file managers that allow dynamic column partition - * selection, set the column partition value. + * For rows that are to be used with file managers that allow dynamic column partition selection, set the column + * partition value. * * @param columnPartitionValue the column partition value */ default void setColumnPartitionValue(final String columnPartitionValue) { throw new UnsupportedOperationException( - "Default Row implementation does not support setColumnPartitionValue()"); + "Default Row implementation does not support setColumnPartitionValue()"); } /** - * For rows that are to be used with file managers that allow dynamic column partition - * selection, retrieve the column partition value. + * For rows that are to be used with file managers that allow dynamic column partition selection, retrieve the + * column partition value. * * @return the previously-set column partition value */ default String getColumnPartitionValue() { throw new UnsupportedOperationException( - "Default Row implementation does not support getColumnPartitionValue()"); + "Default Row implementation does not support getColumnPartitionValue()"); } } diff --git a/TableLogger/src/main/java/io/deephaven/tablelogger/RowSetter.java b/TableLogger/src/main/java/io/deephaven/tablelogger/RowSetter.java index 798cf4b6ebf..c41ebab2968 100644 --- a/TableLogger/src/main/java/io/deephaven/tablelogger/RowSetter.java +++ b/TableLogger/src/main/java/io/deephaven/tablelogger/RowSetter.java @@ -5,8 +5,7 @@ package io.deephaven.tablelogger; /** - * Type-safe interface for setting cell values in individual columns of a row to allow a row to be - * written. + * Type-safe interface for setting cell values in individual columns of a row to allow a row to be written. */ public interface RowSetter { void set(T value); diff --git a/TableLogger/src/main/java/io/deephaven/tablelogger/TableLogger.java b/TableLogger/src/main/java/io/deephaven/tablelogger/TableLogger.java index 0806245f489..9b492d17e69 100644 --- a/TableLogger/src/main/java/io/deephaven/tablelogger/TableLogger.java +++ b/TableLogger/src/main/java/io/deephaven/tablelogger/TableLogger.java @@ -21,18 +21,16 @@ public interface TableLogger { boolean isClosed(); /** - * Close the logger and any writer in use. Users should quiesce all logging threads and invoke - * {@link #shutdown()} first in order to guarantee that all pending rows have been written to - * storage. + * Close the logger and any writer in use. Users should quiesce all logging threads and invoke {@link #shutdown()} + * first in order to guarantee that all pending rows have been written to storage. * * @throws IOException if an error occurred closing the logger. */ void close() throws IOException; /** - * Write all enqueued elements to the {@link TableWriter} and prevent further writes. This - * should be invoked before {@link #close()}. This must not be invoked if any threads might - * still try to log additional items. + * Write all enqueued elements to the {@link TableWriter} and prevent further writes. This should be invoked before + * {@link #close()}. This must not be invoked if any threads might still try to log additional items. */ void shutdown(); } diff --git a/TableLogger/src/main/java/io/deephaven/tablelogger/TableLoggerImpl2.java b/TableLogger/src/main/java/io/deephaven/tablelogger/TableLoggerImpl2.java index 306304faefd..6e78012d410 100644 --- a/TableLogger/src/main/java/io/deephaven/tablelogger/TableLoggerImpl2.java +++ b/TableLogger/src/main/java/io/deephaven/tablelogger/TableLoggerImpl2.java @@ -21,9 +21,9 @@ /** * Common parts of the generated TableLoggers. * - * It is "2" so that we can change the implementation details from TableLoggerImpl; and existing - * client code will still compile. Otherwise, there is a chicken and egg problem, with clients not - * being able to run the new TableLoggerFactory using modules that contain their logger classes. + * It is "2" so that we can change the implementation details from TableLoggerImpl; and existing client code will still + * compile. Otherwise, there is a chicken and egg problem, with clients not being able to run the new TableLoggerFactory + * using modules that contain their logger classes. */ public abstract class TableLoggerImpl2 implements TableLogger { protected TableWriter writer; @@ -80,8 +80,7 @@ public void release() { } @Override - public final synchronized void init(final TableWriter tableWriter, final int queueSize) - throws IOException { + public final synchronized void init(final TableWriter tableWriter, final int queueSize) throws IOException { if (this.initialized) { return; } @@ -126,8 +125,7 @@ public final void shutdown() { private static final PrintStream err = PrintStreamGlobals.getErr(); private void exit(Throwable t) { - AsyncSystem.exitCaught(Thread.currentThread(), t, EXIT_STATUS, err, - "Unable to write log entry"); + AsyncSystem.exitCaught(Thread.currentThread(), t, EXIT_STATUS, err, "Unable to write log entry"); } protected final void flush(final T setter) { @@ -136,9 +134,9 @@ protected final void flush(final T setter) { } catch (IOException x) { if (isClosed()) { err.println(String.format( - "TableLogger.flush: caught exception in thread %s. Unable to write log entry. " - + "Logger already closed, not invoking shutdown.", - Thread.currentThread().getName())); + "TableLogger.flush: caught exception in thread %s. Unable to write log entry. " + + "Logger already closed, not invoking shutdown.", + Thread.currentThread().getName())); x.printStackTrace(err); } else { exit(x); diff --git a/TableLogger/src/main/java/io/deephaven/tablelogger/TableWriter.java b/TableLogger/src/main/java/io/deephaven/tablelogger/TableWriter.java index cbfbdea83d4..1bf95a28adf 100644 --- a/TableLogger/src/main/java/io/deephaven/tablelogger/TableWriter.java +++ b/TableLogger/src/main/java/io/deephaven/tablelogger/TableWriter.java @@ -50,8 +50,7 @@ public Class getType() { /** * {@inheritDoc} *

    - * The implementation is likely to delegate to {@link Row#getSetter(String)} in a default Row - * instance. + * The implementation is likely to delegate to {@link Row#getSetter(String)} in a default Row instance. */ @Override // Row RowSetter getSetter(String name); @@ -59,8 +58,7 @@ public Class getType() { /** * {@inheritDoc} *

    - * The implementation is likely to delegate to {@link Row#getSetter(String, Class)} in a default - * Row instance. + * The implementation is likely to delegate to {@link Row#getSetter(String, Class)} in a default Row instance. */ @Override // Row default RowSetter getSetter(@NotNull final String name, @NotNull final Class tClass) { @@ -71,16 +69,15 @@ default RowSetter getSetter(@NotNull final String name, @NotNull final Cl /** * {@inheritDoc} *

    - * The implementation is likely to delegate to {@link Row#setFlags(Flags)} in a default Row - * instance. + * The implementation is likely to delegate to {@link Row#setFlags(Flags)} in a default Row instance. */ @Override // Row void setFlags(Row.Flags flags); /** - * Get a writer for a Row entries. This is likely to be newly created, so callers should cache - * this value. In practice, TableWriter implementations generally cache the result of the first - * call to this method as a primary writer. + * Get a writer for a Row entries. This is likely to be newly created, so callers should cache this value. In + * practice, TableWriter implementations generally cache the result of the first call to this method as a primary + * writer. * * @return a Row, likely newly created */ @@ -91,8 +88,7 @@ default RowSetter getSetter(@NotNull final String name, @NotNull final Cl *

    * The implementation is likely to delegate to {@link Row#writeRow()} in a default Row instance. * - * @implNote This method is used as part of the import portion of the table recording process in - * TableListeners + * @implNote This method is used as part of the import portion of the table recording process in TableListeners */ @Override // Row void writeRow() throws IOException; diff --git a/TableLogger/src/main/java/io/deephaven/tablelogger/WritableRowContainer.java b/TableLogger/src/main/java/io/deephaven/tablelogger/WritableRowContainer.java index 0a789abd17c..be495553c31 100644 --- a/TableLogger/src/main/java/io/deephaven/tablelogger/WritableRowContainer.java +++ b/TableLogger/src/main/java/io/deephaven/tablelogger/WritableRowContainer.java @@ -23,9 +23,8 @@ public interface WritableRowContainer { void writeRow() throws IOException; /** - * Indicate that the underlying row has been written and should be reclaimed. This may be a - * no-op for many storage types, however it enables additional layers of buffering where it may - * be beneficial. + * Indicate that the underlying row has been written and should be reclaimed. This may be a no-op for many storage + * types, however it enables additional layers of buffering where it may be beneficial. */ void release(); } diff --git a/Util/src/main/java/io/deephaven/UncheckedDeephavenException.java b/Util/src/main/java/io/deephaven/UncheckedDeephavenException.java index 0fc9c1a49b2..130c7b865a0 100644 --- a/Util/src/main/java/io/deephaven/UncheckedDeephavenException.java +++ b/Util/src/main/java/io/deephaven/UncheckedDeephavenException.java @@ -17,7 +17,7 @@ public UncheckedDeephavenException(Throwable cause) { } public UncheckedDeephavenException(String message, Throwable cause, boolean enableSuppression, - boolean writableStackTrace) { + boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } diff --git a/Util/src/main/java/io/deephaven/stats/GcEventStats.java b/Util/src/main/java/io/deephaven/stats/GcEventStats.java index fad09c26886..f2f148d6b59 100644 --- a/Util/src/main/java/io/deephaven/stats/GcEventStats.java +++ b/Util/src/main/java/io/deephaven/stats/GcEventStats.java @@ -18,16 +18,14 @@ * Gc event stats provides the stats {@code Memory-GC.Reclaimed} and {@code Memory.Allocated}. * *

    - * Unfortunately, the JVM doesn't seem to natively provide access to how much data has been - * allocated, so we are using the GC before and after values as a proxy for that. (The JVM *does* - * provide it on a per-thread basis though, + * Unfortunately, the JVM doesn't seem to natively provide access to how much data has been allocated, so we are using + * the GC before and after values as a proxy for that. (The JVM *does* provide it on a per-thread basis though, * {@link com.sun.management.ThreadMXBean#getThreadAllocatedBytes(long)}.) * *

    - * This implementation relies on - * {@link GarbageCollectionNotificationInfo#GARBAGE_COLLECTION_NOTIFICATION} to get stats around the - * memory usage before and after GCs. This means that these statistics are somewhat less useful when - * GC events are sparse. + * This implementation relies on {@link GarbageCollectionNotificationInfo#GARBAGE_COLLECTION_NOTIFICATION} to get stats + * around the memory usage before and after GCs. This means that these statistics are somewhat less useful when GC + * events are sparse. */ class GcEventStats implements NotificationListener { @@ -63,16 +61,15 @@ public void handleNotification(Notification notification, Object handback) { if (!GARBAGE_COLLECTION_NOTIFICATION.equals(notification.getType())) { return; } - handleGCInfo( - GarbageCollectionNotificationInfo.from((CompositeData) notification.getUserData())); + handleGCInfo(GarbageCollectionNotificationInfo.from((CompositeData) notification.getUserData())); } // synchronized *shouldn't* be necessary, but doesn't hurt. private synchronized void handleGCInfo(GarbageCollectionNotificationInfo info) { - final long beforeGc = info.getGcInfo().getMemoryUsageBeforeGc().values().stream() - .mapToLong(MemoryUsage::getUsed).sum(); - final long afterGc = info.getGcInfo().getMemoryUsageAfterGc().values().stream() - .mapToLong(MemoryUsage::getUsed).sum(); + final long beforeGc = + info.getGcInfo().getMemoryUsageBeforeGc().values().stream().mapToLong(MemoryUsage::getUsed).sum(); + final long afterGc = + info.getGcInfo().getMemoryUsageAfterGc().values().stream().mapToLong(MemoryUsage::getUsed).sum(); final long reclaimedBytes = beforeGc - afterGc; // note: this *CAN* be negative reclaimed.increment(reclaimedBytes); // There are potentially other interesting stats that can be derived here in the future. The diff --git a/Util/src/main/java/io/deephaven/test/types/OutOfBandTest.java b/Util/src/main/java/io/deephaven/test/types/OutOfBandTest.java index 41cda2454e8..45a6a11f358 100644 --- a/Util/src/main/java/io/deephaven/test/types/OutOfBandTest.java +++ b/Util/src/main/java/io/deephaven/test/types/OutOfBandTest.java @@ -1,8 +1,8 @@ package io.deephaven.test.types; /** - * An out-of-band test is a test that is not typically run on a commit-by-commit basis; usually - * because it is expensive and will delay the typical CI feedback cycle. + * An out-of-band test is a test that is not typically run on a commit-by-commit basis; usually because it is expensive + * and will delay the typical CI feedback cycle. */ @SuppressWarnings("JavadocReference") public interface OutOfBandTest { diff --git a/Util/src/main/java/io/deephaven/test/types/ParallelTest.java b/Util/src/main/java/io/deephaven/test/types/ParallelTest.java index 5f22006b9fc..d5fff4da704 100644 --- a/Util/src/main/java/io/deephaven/test/types/ParallelTest.java +++ b/Util/src/main/java/io/deephaven/test/types/ParallelTest.java @@ -3,18 +3,15 @@ /** * A JUnit category for tests that can be run in parallel. * - * To convert a JUnit 3 base test case (henceforth TestType) to JUnit 4, you must: a) annotate any - * testMethods() in TestType with @Test (junit 3 selects test methods based on - * nameStartsWith("test")) b) override the JUnit 3 test case into a new supertype (henceforth: - * AdapterType) b-1) [Required] Add a public no-op method beginning with text `test` in your - * AdapterType, so it is a "valid JUnit 3 test" b-2) [Recommended] Add methods with @Before - * and @After to call setUp() / tearDown() on the AdapterType field c) create a field assigned to an - * instance of AdapterType (you may want simple instance field, an @Rule instance field, or - * an @ClassRule static field) c-1) If your AdapterType has setUp/tearDown, and you are not - * using @Rule or @ClassRule, annotate methods in your TestType: Add @Before/@After in TestType - * which calls this.adapter.setUp()/tearDown() as appropriate. d) add an appropriate junit runner. - * If you do not need anything fancy, use: - * {@code @RunWith(org.junit.runners.BlockJUnit4ClassRunner.class)} + * To convert a JUnit 3 base test case (henceforth TestType) to JUnit 4, you must: a) annotate any testMethods() in + * TestType with @Test (junit 3 selects test methods based on nameStartsWith("test")) b) override the JUnit 3 test case + * into a new supertype (henceforth: AdapterType) b-1) [Required] Add a public no-op method beginning with text `test` + * in your AdapterType, so it is a "valid JUnit 3 test" b-2) [Recommended] Add methods with @Before and @After to call + * setUp() / tearDown() on the AdapterType field c) create a field assigned to an instance of AdapterType (you may want + * simple instance field, an @Rule instance field, or an @ClassRule static field) c-1) If your AdapterType has + * setUp/tearDown, and you are not using @Rule or @ClassRule, annotate methods in your TestType: Add @Before/@After in + * TestType which calls this.adapter.setUp()/tearDown() as appropriate. d) add an appropriate junit runner. If you do + * not need anything fancy, use: {@code @RunWith(org.junit.runners.BlockJUnit4ClassRunner.class)} * * If you extend and use our existing JUnit3 test fixtures like LiveTableTestCase, you can use * {@link io.deephaven.test.junit4.JUnit4LiveTableTestCase} as an example. diff --git a/Util/src/main/java/io/deephaven/test/types/SerialTest.java b/Util/src/main/java/io/deephaven/test/types/SerialTest.java index 04aac51c09f..7e9156a8ea5 100644 --- a/Util/src/main/java/io/deephaven/test/types/SerialTest.java +++ b/Util/src/main/java/io/deephaven/test/types/SerialTest.java @@ -3,39 +3,34 @@ /** * A JUnit category for tests that must be run in serial. * - * If your test launches N or more work-heavy threads, you should be using SerialTest, since you'll - * likely be pegging your cpu. + * If your test launches N or more work-heavy threads, you should be using SerialTest, since you'll likely be pegging + * your cpu. * - * That is, running a thread hog during the parallel test task just causes resource starvation, and - * large system-wide pauses while contending to get execution time for a completely unrelated thread - * on the system. + * That is, running a thread hog during the parallel test task just causes resource starvation, and large system-wide + * pauses while contending to get execution time for a completely unrelated thread on the system. * - * The 'testSerial' task created for these tests additionally have .mustRunAfter semantics, whereby, - * in order of script evaluation, each testSerial.mustRunAfter allOther"testSerial"Tests, as well as - * .mustRunAfter allOtherTestTasksNotNamed"testSerial"; that is, all testSerial tasks run after all - * other Test tasks are complete, and then they take turns running one after another (see - * TestTools.groovy, #addDbTest) + * The 'testSerial' task created for these tests additionally have .mustRunAfter semantics, whereby, in order of script + * evaluation, each testSerial.mustRunAfter allOther"testSerial"Tests, as well as .mustRunAfter + * allOtherTestTasksNotNamed"testSerial"; that is, all testSerial tasks run after all other Test tasks are complete, and + * then they take turns running one after another (see TestTools.groovy, #addDbTest) * * ALL BENCHMARKS OR HEAVY-HITTER TESTS SHOULD USE @Category(SerialTest.class); * * Until better automation is delivered, you may need to edit your ModName.gradle file and add * TestTools.addDbTest('Serial', false) to have this task created for you. * - * Include: `dependencies { testCompile TestTools.projectDependency(project, 'Util') }` to add - * SerialTest to your classpath if it is not available. - * - * To convert a JUnit 3 base test case (henceforth TestType) to JUnit 4, you must: a) annotate any - * testMethods() in TestType with @Test (junit 3 selects test methods based on - * nameStartsWith("test")) b) override the JUnit 3 test case into a new supertype (henceforth: - * AdapterType) b-1) [Required] Add a public no-op method beginning with text `test` in your - * AdapterType, so it is a "valid JUnit 3 test" b-2) [Recommended] Add methods with @Before - * and @After to call setUp() / tearDown() on the AdapterType field c) create a field assigned to an - * instance of AdapterType (you may want simple instance field, an @Rule instance field, or - * an @ClassRule static field) c-1) If your AdapterType has setUp/tearDown, and you are not - * using @Rule or @ClassRule, annotate methods in your TestType: Add @Before/@After in TestType - * which calls this.adapter.setUp()/tearDown() as appropriate. d) add an appropriate junit runner. - * If you do not need anything fancy, use: - * {@code @RunWith(org.junit.runners.BlockJUnit4ClassRunner.class)} + * Include: `dependencies { testCompile TestTools.projectDependency(project, 'Util') }` to add SerialTest to your + * classpath if it is not available. + * + * To convert a JUnit 3 base test case (henceforth TestType) to JUnit 4, you must: a) annotate any testMethods() in + * TestType with @Test (junit 3 selects test methods based on nameStartsWith("test")) b) override the JUnit 3 test case + * into a new supertype (henceforth: AdapterType) b-1) [Required] Add a public no-op method beginning with text `test` + * in your AdapterType, so it is a "valid JUnit 3 test" b-2) [Recommended] Add methods with @Before and @After to call + * setUp() / tearDown() on the AdapterType field c) create a field assigned to an instance of AdapterType (you may want + * simple instance field, an @Rule instance field, or an @ClassRule static field) c-1) If your AdapterType has + * setUp/tearDown, and you are not using @Rule or @ClassRule, annotate methods in your TestType: Add @Before/@After in + * TestType which calls this.adapter.setUp()/tearDown() as appropriate. d) add an appropriate junit runner. If you do + * not need anything fancy, use: {@code @RunWith(org.junit.runners.BlockJUnit4ClassRunner.class)} * * If you extend and use our existing JUnit3 test fixtures like LiveTableTestCase, you can use * {@link io.deephaven.test.junit4.JUnit4LiveTableTestCase} as an example. diff --git a/Util/src/main/java/io/deephaven/util/BooleanUtils.java b/Util/src/main/java/io/deephaven/util/BooleanUtils.java index e260dfc6d15..49c18852fb3 100644 --- a/Util/src/main/java/io/deephaven/util/BooleanUtils.java +++ b/Util/src/main/java/io/deephaven/util/BooleanUtils.java @@ -50,6 +50,6 @@ public static Boolean byteAsBoolean(final byte byteValue) { */ public static byte booleanAsByte(final Boolean booleanValue) { return booleanValue == null ? NULL_BOOLEAN_AS_BYTE - : booleanValue ? TRUE_BOOLEAN_AS_BYTE : FALSE_BOOLEAN_AS_BYTE; + : booleanValue ? TRUE_BOOLEAN_AS_BYTE : FALSE_BOOLEAN_AS_BYTE; } } diff --git a/Util/src/main/java/io/deephaven/util/BridgingLogHandler.java b/Util/src/main/java/io/deephaven/util/BridgingLogHandler.java index 77ed75aeab4..95583204ba8 100644 --- a/Util/src/main/java/io/deephaven/util/BridgingLogHandler.java +++ b/Util/src/main/java/io/deephaven/util/BridgingLogHandler.java @@ -23,8 +23,8 @@ public final class BridgingLogHandler extends Handler { static { // Tools that don't want this in their output should set this to false. final String propValue = System.getProperty("BridgingLogHandler.flushPendingOnShutdown"); - flushPendingOnShutdown = (propValue == null) ? FLUSH_PENDING_ON_SHUTDOWN_DEFAULT - : Boolean.parseBoolean(propValue); + flushPendingOnShutdown = + (propValue == null) ? FLUSH_PENDING_ON_SHUTDOWN_DEFAULT : Boolean.parseBoolean(propValue); } public static synchronized void setFlushPendingOnShutdown(final boolean v) { @@ -41,13 +41,13 @@ private static class PendingLogRecord { public final String msg; public PendingLogRecord( - final Level level, - final long timeMillis, - final Throwable throwable, - final String sourceClassName, - final String sourceMethodName, - final int threadId, - final String msg) { + final Level level, + final long timeMillis, + final Throwable throwable, + final String sourceClassName, + final String sourceMethodName, + final int threadId, + final String msg) { this.level = level; this.throwable = throwable; this.timeMillis = timeMillis; @@ -89,54 +89,51 @@ private static void pushRecord(final LogRecord logRecord) { if (pending == null) { pending = new ArrayList<>(); if (flushPendingOnShutdown) { - Runtime.getRuntime().addShutdownHook( - new Thread(BridgingLogHandler::pushPendingToStdout)); + Runtime.getRuntime().addShutdownHook(new Thread(BridgingLogHandler::pushPendingToStdout)); } } pending.add(new PendingLogRecord( - logRecord.getLevel(), - logRecord.getMillis(), - logRecord.getThrown(), - sourceClassName, - sourceMethodName, - logRecord.getThreadID(), - logRecord.getMessage())); + logRecord.getLevel(), + logRecord.getMillis(), + logRecord.getThrown(), + sourceClassName, + sourceMethodName, + logRecord.getThreadID(), + logRecord.getMessage())); return; } } } - final LogEntry entry = - logEntry(log, logRecord.getLevel(), logRecord.getMillis(), logRecord.getThrown()); + final LogEntry entry = logEntry(log, logRecord.getLevel(), logRecord.getMillis(), logRecord.getThrown()); appendMsg(entry, sourceClassName, sourceMethodName, - logRecord.getThreadID(), logRecord.getMessage()); + logRecord.getThreadID(), logRecord.getMessage()); entry.endl(); } public static void appendPendingLogRecord( - final LogOutput logOutput, final PendingLogRecord pendingLogRecord) { + final LogOutput logOutput, final PendingLogRecord pendingLogRecord) { appendMsg(logOutput, - pendingLogRecord.sourceClassName, - pendingLogRecord.sourceMethodName, - pendingLogRecord.threadId, - pendingLogRecord.msg); + pendingLogRecord.sourceClassName, + pendingLogRecord.sourceMethodName, + pendingLogRecord.threadId, + pendingLogRecord.msg); } private static void appendMsg(LogOutput logOutput, - final String sourceClassName, final String sourceMethodName, final int threadId, - final String msg) { + final String sourceClassName, final String sourceMethodName, final int threadId, final String msg) { logOutput.append("[") - .append(sourceClassName) - .append(":") - .append(sourceMethodName) - .append(":tid=") - .append(threadId) - .append("] ") - .append(msg); + .append(sourceClassName) + .append(":") + .append(sourceMethodName) + .append(":tid=") + .append(threadId) + .append("] ") + .append(msg); } private static LogEntry logEntry( - final io.deephaven.io.logger.Logger log, final Level level, final long timeMillis, - final Throwable throwable) { + final io.deephaven.io.logger.Logger log, final Level level, final long timeMillis, + final Throwable throwable) { return log.getEntry(mapLevel(level), 1000 * timeMillis, throwable); } diff --git a/Util/src/main/java/io/deephaven/util/BufferUtil.java b/Util/src/main/java/io/deephaven/util/BufferUtil.java index 455f26d2bb9..53ad1168ed5 100644 --- a/Util/src/main/java/io/deephaven/util/BufferUtil.java +++ b/Util/src/main/java/io/deephaven/util/BufferUtil.java @@ -14,38 +14,36 @@ public class BufferUtil { /** *

    - * Allocate a new ByteBuffer larger than the supplied buffer. This is generally used when - * dynamically resizing an output buffer. + * Allocate a new ByteBuffer larger than the supplied buffer. This is generally used when dynamically resizing an + * output buffer. *

    - * The resulting buffer's size will be a power-of-two at least double the supplied buffer's size - * and at least the specified minimum size, unless constrained by {@link Integer#MAX_VALUE}. + * The resulting buffer's size will be a power-of-two at least double the supplied buffer's size and at least the + * specified minimum size, unless constrained by {@link Integer#MAX_VALUE}. *

    * The resulting buffer will be direct if and only if the supplied buffer was direct. *

    - * The contents of the supplied buffer are copied into the result buffer as in - * {@link ByteBuffer#put(ByteBuffer)}.
    + * The contents of the supplied buffer are copied into the result buffer as in {@link ByteBuffer#put(ByteBuffer)}. + *
    * See also io.deephaven.tablelogger.AbstractBinaryStoreWriter#ensureSpace * * @param buffer The buffer to grow (and copy from) * @param minimumSize The minimum size for the result buffer * @return The new buffer, including contents of buffer */ - public static ByteBuffer reallocateBuffer(@NotNull final ByteBuffer buffer, - final int minimumSize) { + public static ByteBuffer reallocateBuffer(@NotNull final ByteBuffer buffer, final int minimumSize) { final int newCapacity = (int) Math.min(Integer.MAX_VALUE, - 1L << MathUtil.ceilLog2(Math.max(minimumSize, (long) buffer.capacity() << 1))); - final ByteBuffer newBuffer = buffer.isDirect() ? ByteBuffer.allocateDirect(newCapacity) - : ByteBuffer.allocate(newCapacity); + 1L << MathUtil.ceilLog2(Math.max(minimumSize, (long) buffer.capacity() << 1))); + final ByteBuffer newBuffer = + buffer.isDirect() ? ByteBuffer.allocateDirect(newCapacity) : ByteBuffer.allocate(newCapacity); newBuffer.put(buffer); return newBuffer; } /** - * Return a buffer with at least requiredSize remaining capacity. The provided buffer will be - * copied to the new buffer, and might be the buffer returned. The new buffer's limit will be - * unchanged if the buffer is not reallocated, and equal to the new capacity if it is - * reallocated. The new buffer will be allocated with allocateDirect() if the original buffer is - * direct, else with allocate(). + * Return a buffer with at least requiredSize remaining capacity. The provided buffer will be copied to the new + * buffer, and might be the buffer returned. The new buffer's limit will be unchanged if the buffer is not + * reallocated, and equal to the new capacity if it is reallocated. The new buffer will be allocated with + * allocateDirect() if the original buffer is direct, else with allocate(). * * @param dataBuffer a byte buffer in write mode * @param requiredSize additional capacity needed @@ -54,25 +52,22 @@ public static ByteBuffer reallocateBuffer(@NotNull final ByteBuffer buffer, public static ByteBuffer ensureSpace(ByteBuffer dataBuffer, int requiredSize) { final int remainingCapacity = dataBuffer.capacity() - dataBuffer.position(); if (remainingCapacity < requiredSize) { - final int newCapacity = Math.max( - (dataBuffer.capacity() > 1024 * 1024) ? dataBuffer.capacity() + 1024 * 1024 - : dataBuffer.capacity() * 2, - dataBuffer.capacity() + (requiredSize - remainingCapacity)); + final int newCapacity = Math.max((dataBuffer.capacity() > 1024 * 1024) ? dataBuffer.capacity() + 1024 * 1024 + : dataBuffer.capacity() * 2, dataBuffer.capacity() + (requiredSize - remainingCapacity)); final ByteBuffer newBuffer = - dataBuffer.isDirect() ? ByteBuffer.allocateDirect(newCapacity) - : ByteBuffer.allocate(newCapacity); + dataBuffer.isDirect() ? ByteBuffer.allocateDirect(newCapacity) : ByteBuffer.allocate(newCapacity); dataBuffer.flip(); newBuffer.put(dataBuffer); dataBuffer = newBuffer; } - Require.geq(dataBuffer.capacity() - dataBuffer.position(), - "dataBuffer.capacity() - dataBuffer.position()", requiredSize, "requiredSize"); + Require.geq(dataBuffer.capacity() - dataBuffer.position(), "dataBuffer.capacity() - dataBuffer.position()", + requiredSize, "requiredSize"); return dataBuffer; } /** - * Encode the given string as UTF_8 and write the size and data to the given buffer. The buffer - * might be reallocated using {@link BufferUtil#ensureSpace} if necessary. + * Encode the given string as UTF_8 and write the size and data to the given buffer. The buffer might be reallocated + * using {@link BufferUtil#ensureSpace} if necessary. * * @param dataBuffer Write to this ByteBuffer. * @param value Encode this string. diff --git a/Util/src/main/java/io/deephaven/util/EncodingUtil.java b/Util/src/main/java/io/deephaven/util/EncodingUtil.java index d85337d4b7c..afff08ce590 100644 --- a/Util/src/main/java/io/deephaven/util/EncodingUtil.java +++ b/Util/src/main/java/io/deephaven/util/EncodingUtil.java @@ -26,8 +26,7 @@ public class EncodingUtil { * @throws IllegalArgumentException if there is no associated encoding */ @NotNull - public static EncodingInfo getEncodingInfoForCharset(@NotNull Charset charSet) - throws IllegalArgumentException { + public static EncodingInfo getEncodingInfoForCharset(@NotNull Charset charSet) throws IllegalArgumentException { return getEncodingInfoForCharset(charSet.name()); } @@ -40,15 +39,14 @@ public static EncodingInfo getEncodingInfoForCharset(@NotNull Charset charSet) */ public static EncodingInfo getEncodingInfoForCharset(@NotNull String charsetName) { return Stream.of(EncodingInfo.values()) - .filter(info -> info.getCharset().name().equals(charsetName)) - .findFirst() - .orElseThrow(() -> new IllegalArgumentException("No EncodingInfo for " + charsetName)); + .filter(info -> info.getCharset().name().equals(charsetName)) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("No EncodingInfo for " + charsetName)); } /** - * Get an array containing the possible {@link ByteOrderMark byte order marks} that could be - * present within a file of the specified encoding. This is intended for use with - * {@link org.apache.commons.io.input.BOMInputStream} + * Get an array containing the possible {@link ByteOrderMark byte order marks} that could be present within a file + * of the specified encoding. This is intended for use with {@link org.apache.commons.io.input.BOMInputStream} * * @param encoding The encoding. * @return An array containing the possible {@link ByteOrderMark BOMs} for the encoding. @@ -70,31 +68,28 @@ public static ByteOrderMark[] getBOMsForEncoding(EncodingInfo encoding) { } /** - * Encode the given string in UTF-8 format into the given ByteBuffer. The string is encoded as - * an int length followed by the encoded bytes. + * Encode the given string in UTF-8 format into the given ByteBuffer. The string is encoded as an int length + * followed by the encoded bytes. * - * @param destination a ByteBuffer in which to encode the string. The buffer must be big enough - * for the encoded string. + * @param destination a ByteBuffer in which to encode the string. The buffer must be big enough for the encoded + * string. * @param value the String value to encode. * @throws BufferOverflowException if the destination isn't big enough. */ - public static void putUtf8String(@NotNull final ByteBuffer destination, - @NotNull final String value) { + public static void putUtf8String(@NotNull final ByteBuffer destination, @NotNull final String value) { final int initialPosition = destination.position(); destination.position(initialPosition + Integer.BYTES); final CharsetEncoder encoder = EncodingInfo.UTF_8.getEncoder().reset(); if (!encoder.encode(CharBuffer.wrap(value), destination, true).isUnderflow() - || !encoder.flush(destination).isUnderflow()) { + || !encoder.flush(destination).isUnderflow()) { throw new BufferOverflowException(); } - destination.putInt(initialPosition, - destination.position() - initialPosition - Integer.BYTES); + destination.putInt(initialPosition, destination.position() - initialPosition - Integer.BYTES); } /** - * Extract a UTF-8 encoded string from the given buffer. The buffer must be positioned at the - * start of the encoding, which is an int length followed by the UTF-8 encoded bytes. The buffer - * is advanced to the end of the string. + * Extract a UTF-8 encoded string from the given buffer. The buffer must be positioned at the start of the encoding, + * which is an int length followed by the UTF-8 encoded bytes. The buffer is advanced to the end of the string. * * @param source a ByteBuffer positioned at the string encoded as length + UTF-8 encoded bytes. * @return a new String extracted from the buffer diff --git a/Util/src/main/java/io/deephaven/util/ExceptionDetails.java b/Util/src/main/java/io/deephaven/util/ExceptionDetails.java index 45ab65be119..3e1b111cf68 100644 --- a/Util/src/main/java/io/deephaven/util/ExceptionDetails.java +++ b/Util/src/main/java/io/deephaven/util/ExceptionDetails.java @@ -51,15 +51,14 @@ public String toString() { } /** - * Returns true if exceptionDetails is not null and the result of applying testToApply on - * exceptionDetails is true + * Returns true if exceptionDetails is not null and the result of applying testToApply on exceptionDetails is true * * @param exceptionDetails the exception to test * @param testToApply the test to apply * @return true if exceptionDetails is not null and testToApply returns true */ public static boolean testExceptionDetails(@Nullable final ExceptionDetails exceptionDetails, - @NotNull final Predicate testToApply) { + @NotNull final Predicate testToApply) { return exceptionDetails != null && testToApply.test(exceptionDetails); } } diff --git a/Util/src/main/java/io/deephaven/util/ExpandingThreadPoolExecutorFactory.java b/Util/src/main/java/io/deephaven/util/ExpandingThreadPoolExecutorFactory.java index d964bddd50c..1c7f13d34d8 100644 --- a/Util/src/main/java/io/deephaven/util/ExpandingThreadPoolExecutorFactory.java +++ b/Util/src/main/java/io/deephaven/util/ExpandingThreadPoolExecutorFactory.java @@ -15,19 +15,17 @@ import java.util.concurrent.atomic.AtomicInteger; /** - * Creates a ThreadPoolExecutor which can then be used to submit or execute tasks. This is intended - * for cases where a relatively small number of threads can handle most circumstances, but - * occasional abnormal events may exceed expectations. The executor has the following - * characteristics: + * Creates a ThreadPoolExecutor which can then be used to submit or execute tasks. This is intended for cases where a + * relatively small number of threads can handle most circumstances, but occasional abnormal events may exceed + * expectations. The executor has the following characteristics: *

      *
    • Starting core and maximum thread pool sizes defined at creation time
    • - *
    • If an attempted execution exceeds the maximum number of allowed executor threads, a new - * Thread will be created dynamically instead of generating an exception
    • + *
    • If an attempted execution exceeds the maximum number of allowed executor threads, a new Thread will be created + * dynamically instead of generating an exception
    • *
    * If the executor has been shut down, any excess events will be discarded. * - * To create one of these executors, use - * {@link ExpandingThreadPoolExecutorFactory#createThreadPoolExecutor}. + * To create one of these executors, use {@link ExpandingThreadPoolExecutorFactory#createThreadPoolExecutor}. */ public class ExpandingThreadPoolExecutorFactory { @@ -35,11 +33,10 @@ public class ExpandingThreadPoolExecutorFactory { private ExpandingThreadPoolExecutorFactory() {} /** - * Class to handle rejection events from a ThreadPoolExecutor by creating a new Thread to run - * the task, unless the executor has been shut down, in which case the task is discarded. + * Class to handle rejection events from a ThreadPoolExecutor by creating a new Thread to run the task, unless the + * executor has been shut down, in which case the task is discarded. */ - private static class RejectedExecutionPolicy - implements RejectedExecutionHandler, LogOutputAppendable { + private static class RejectedExecutionPolicy implements RejectedExecutionHandler, LogOutputAppendable { final Logger log; final String executorName; final String threadName; @@ -53,9 +50,9 @@ private static class RejectedExecutionPolicy * @param threadName the name prefix for new threads */ private RejectedExecutionPolicy(final Logger log, - final String executorName, - final String threadName, - final AtomicInteger executorThreadNumber) { + final String executorName, + final String threadName, + final AtomicInteger executorThreadNumber) { this.log = log; this.executorName = executorName; this.threadName = threadName; @@ -63,8 +60,7 @@ private RejectedExecutionPolicy(final Logger log, } /** - * Executes task r in a new thread, unless the executor has been shut down, in which case - * the task is discarded. + * Executes task r in a new thread, unless the executor has been shut down, in which case the task is discarded. * * @param r the runnable task requested to be executed * @param e the executor attempting to execute this task @@ -73,8 +69,8 @@ private RejectedExecutionPolicy(final Logger log, public void rejectedExecution(Runnable r, ThreadPoolExecutor e) { if (!e.isShutdown()) { final String newThreadName = threadName + executorThreadNumber.getAndIncrement(); - log.warn().append("Executor has run out of threads for ").append(this) - .append(", creating new thread ").append(newThreadName).endl(); + log.warn().append("Executor has run out of threads for ").append(this).append(", creating new thread ") + .append(newThreadName).endl(); newDaemonThread(r, newThreadName).start(); } } @@ -96,28 +92,27 @@ private static Thread newDaemonThread(Runnable r, final String name) { * {@link ExpandingThreadPoolExecutorFactory}. * * @param log a Logger to log messages - * @param corePoolSize the core pool size (the executor will use this value for the initial core - * and maximum pool sizes) + * @param corePoolSize the core pool size (the executor will use this value for the initial core and maximum pool + * sizes) * @param keepAliveMinutes the number of minutes to keep alive core threads * @param executorName the name of the executor, used when logging dynamic thread creation * @param poolThreadNamePrefix the prefix for thread pool threads - * @param dynamicThreadNamePrefix the prefix for dynamic (overflow) threads created when the - * maximum number of pool threads is exceeded + * @param dynamicThreadNamePrefix the prefix for dynamic (overflow) threads created when the maximum number of pool + * threads is exceeded */ public static ThreadPoolExecutor createThreadPoolExecutor(final Logger log, - final int corePoolSize, - final int keepAliveMinutes, - final String executorName, - final String poolThreadNamePrefix, - final String dynamicThreadNamePrefix) { + final int corePoolSize, + final int keepAliveMinutes, + final String executorName, + final String poolThreadNamePrefix, + final String dynamicThreadNamePrefix) { final AtomicInteger executorThreadNumber = new AtomicInteger(1); return new ThreadPoolExecutor(corePoolSize, - corePoolSize, - keepAliveMinutes, - TimeUnit.MINUTES, - new SynchronousQueue<>(), - r -> newDaemonThread(r, poolThreadNamePrefix + executorThreadNumber.getAndIncrement()), - new RejectedExecutionPolicy(log, executorName, dynamicThreadNamePrefix, - executorThreadNumber)); + corePoolSize, + keepAliveMinutes, + TimeUnit.MINUTES, + new SynchronousQueue<>(), + r -> newDaemonThread(r, poolThreadNamePrefix + executorThreadNumber.getAndIncrement()), + new RejectedExecutionPolicy(log, executorName, dynamicThreadNamePrefix, executorThreadNumber)); } } diff --git a/Util/src/main/java/io/deephaven/util/FindExceptionCause.java b/Util/src/main/java/io/deephaven/util/FindExceptionCause.java index 24bcaf99d79..b85589339b2 100644 --- a/Util/src/main/java/io/deephaven/util/FindExceptionCause.java +++ b/Util/src/main/java/io/deephaven/util/FindExceptionCause.java @@ -6,17 +6,15 @@ public class FindExceptionCause { /** - * Given an exception and a list of expected exception types, traverse the cause tree and return - * the first exception that matches the list of expected cause types. + * Given an exception and a list of expected exception types, traverse the cause tree and return the first exception + * that matches the list of expected cause types. */ @SafeVarargs - public static Exception findCause(Exception original, - Class... expectedTypes) { + public static Exception findCause(Exception original, Class... expectedTypes) { Throwable cause = original.getCause(); while (cause != null) { final Throwable checkCause = cause; - if (Arrays.stream(expectedTypes) - .anyMatch(type -> type.isAssignableFrom(checkCause.getClass()))) { + if (Arrays.stream(expectedTypes).anyMatch(type -> type.isAssignableFrom(checkCause.getClass()))) { return (Exception) cause; } cause = cause.getCause(); @@ -25,17 +23,15 @@ public static Exception findCause(Exception original, } /** - * Given a throwable and a list of expected throwable types, traverse the cause tree and return - * the first exception that matches the list of expected cause types. + * Given a throwable and a list of expected throwable types, traverse the cause tree and return the first exception + * that matches the list of expected cause types. */ @SafeVarargs - public static Throwable findCause(Throwable original, - Class... expectedTypes) { + public static Throwable findCause(Throwable original, Class... expectedTypes) { Throwable cause = original.getCause(); while (cause != null) { final Throwable checkCause = cause; - if (Arrays.stream(expectedTypes) - .anyMatch(type -> type.isAssignableFrom(checkCause.getClass()))) { + if (Arrays.stream(expectedTypes).anyMatch(type -> type.isAssignableFrom(checkCause.getClass()))) { return cause; } cause = cause.getCause(); @@ -71,23 +67,20 @@ public static String shortCauses(@NotNull Throwable throwable, String lineSepara } /** - * Given a throwable and a list of expected throwable types, traverse the cause tree and return - * the last exception that matches the list of expected cause types. + * Given a throwable and a list of expected throwable types, traverse the cause tree and return the last exception + * that matches the list of expected cause types. * * @param original the original Throwable * @param expectedTypes the list of expected types - * @return the last Throwable of one of the defined types, or the original Throwable if none - * were found + * @return the last Throwable of one of the defined types, or the original Throwable if none were found */ @SafeVarargs - public static Throwable findLastCause(Throwable original, - Class... expectedTypes) { + public static Throwable findLastCause(Throwable original, Class... expectedTypes) { Throwable cause = original.getCause(); Throwable lastCause = original.getCause(); while (cause != null) { final Throwable checkCause = cause; - if (Arrays.stream(expectedTypes) - .anyMatch(type -> type.isAssignableFrom(checkCause.getClass()))) { + if (Arrays.stream(expectedTypes).anyMatch(type -> type.isAssignableFrom(checkCause.getClass()))) { lastCause = cause; } cause = cause.getCause(); diff --git a/Util/src/main/java/io/deephaven/util/FunctionalInterfaces.java b/Util/src/main/java/io/deephaven/util/FunctionalInterfaces.java index 77af8ae1e0e..45b41e33d45 100644 --- a/Util/src/main/java/io/deephaven/util/FunctionalInterfaces.java +++ b/Util/src/main/java/io/deephaven/util/FunctionalInterfaces.java @@ -14,8 +14,7 @@ public static T unexpectedException(ThrowingSupplier the type of the input to the operation * @param the type of the exception that can be thrown @@ -37,8 +36,7 @@ public interface ThrowingSupplier { } /** - * Represents an operation that accepts no input and returns a boolean result, throwing an - * exception. + * Represents an operation that accepts no input and returns a boolean result, throwing an exception. * * @param the type of the exception that can be thrown */ diff --git a/Util/src/main/java/io/deephaven/util/GrpcLogging.java b/Util/src/main/java/io/deephaven/util/GrpcLogging.java index 267244bf926..8263bca04ff 100644 --- a/Util/src/main/java/io/deephaven/util/GrpcLogging.java +++ b/Util/src/main/java/io/deephaven/util/GrpcLogging.java @@ -6,11 +6,10 @@ public final class GrpcLogging { public static void setupFromBooleanProperty( - final String property, final boolean defaultValue, final String shadowPath) { + final String property, final boolean defaultValue, final String shadowPath) { final boolean logAllDefault = defaultValue; final String logAllStr = System.getProperty(property); - final boolean logAll = - (logAllStr == null) ? logAllDefault : Boolean.parseBoolean(logAllStr); + final boolean logAll = (logAllStr == null) ? logAllDefault : Boolean.parseBoolean(logAllStr); if (!logAll) { return; } diff --git a/Util/src/main/java/io/deephaven/util/HeapDump.java b/Util/src/main/java/io/deephaven/util/HeapDump.java index 91346ac2fd6..894548aa396 100644 --- a/Util/src/main/java/io/deephaven/util/HeapDump.java +++ b/Util/src/main/java/io/deephaven/util/HeapDump.java @@ -30,48 +30,44 @@ public static String generateHeapDumpPath() { final Configuration configuration = Configuration.getInstance(); final String processName = configuration.getProcessName(); return configuration.getLogPath(processName + "_" - + new SimpleDateFormat("YYYYMMddHHmmss").format(new Date(System.currentTimeMillis())) - + ".hprof"); + + new SimpleDateFormat("YYYYMMddHHmmss").format(new Date(System.currentTimeMillis())) + ".hprof"); } @SuppressWarnings("WeakerAccess") public static void heapDump(String filename) throws IOException { final MBeanServer server = ManagementFactory.getPlatformMBeanServer(); final HotSpotDiagnosticMXBean mxBean = ManagementFactory.newPlatformMXBeanProxy(server, - "com.sun.management:type=HotSpotDiagnostic", HotSpotDiagnosticMXBean.class); + "com.sun.management:type=HotSpotDiagnostic", HotSpotDiagnosticMXBean.class); mxBean.dumpHeap(filename, true); } private static void heapDumpWrapper(final String cause, final RuntimeException failure, - final Predicate ignore, final Logger log) { + final Predicate ignore, final Logger log) { if (ignore != null && ignore.test(failure)) { return; } try { final String heapDumpPath = HeapDump.generateHeapDumpPath(); log.fatal().append(cause + ", generating heap dump to") - .append(heapDumpPath).append(": ").append(failure).endl(); + .append(heapDumpPath).append(": ").append(failure).endl(); heapDump(heapDumpPath); } catch (Exception e) { log.info() - .append("Exception while trying to dump heap on assertion failure: " - + e.getMessage() + ":\n") - .append(e) - .endl(); + .append("Exception while trying to dump heap on assertion failure: " + e.getMessage() + ":\n") + .append(e) + .endl(); } } public static void setupHeapDumpWithDefaults(final Configuration configuration, - final Predicate ignore, final Logger log) { + final Predicate ignore, final Logger log) { if (configuration.getBooleanWithDefault("assertion.heapDump", false)) { log.info().append("Heap dump on assertion failures enabled.").endl(); - Assert.setOnAssertionCallback( - af -> heapDumpWrapper("Assertion failure", af, ignore, log)); + Assert.setOnAssertionCallback(af -> heapDumpWrapper("Assertion failure", af, ignore, log)); } if (configuration.getBooleanWithDefault("require.heapDump", false)) { log.info().append("Heap dump on requirement failures enabled.").endl(); - Require.setOnFailureCallback( - rf -> heapDumpWrapper("Requirement failure", rf, ignore, log)); + Require.setOnFailureCallback(rf -> heapDumpWrapper("Requirement failure", rf, ignore, log)); } } diff --git a/Util/src/main/java/io/deephaven/util/MultiException.java b/Util/src/main/java/io/deephaven/util/MultiException.java index 48c376e4820..0f4f98d10c4 100644 --- a/Util/src/main/java/io/deephaven/util/MultiException.java +++ b/Util/src/main/java/io/deephaven/util/MultiException.java @@ -5,8 +5,8 @@ import java.util.List; /** - * An exception to use when a series of operations must all be executed, but may all throw - * exceptions themselves. This allows for retention of all exception data. + * An exception to use when a series of operations must all be executed, but may all throw exceptions themselves. This + * allows for retention of all exception data. */ public class MultiException extends Exception { @@ -25,8 +25,7 @@ public MultiException(String description, Throwable... causes) { } /** - * If there is a single exception, return that exception; otherwise wrap the causes into a - * MultiException. + * If there is a single exception, return that exception; otherwise wrap the causes into a MultiException. * * @param description the description for the MultiException * @param causes the array of causes @@ -40,8 +39,7 @@ public static Throwable maybeWrapInMultiException(String description, Throwable. } /** - * If there is a single exception, return that exception; otherwise wrap the causes into a - * MultiException. + * If there is a single exception, return that exception; otherwise wrap the causes into a MultiException. * * @param description the description for the MultiException * @param causes the list of causes @@ -55,8 +53,7 @@ public static Throwable maybeWrapInMultiException(String description, List implements SafeCloseable { diff --git a/Util/src/main/java/io/deephaven/util/SafeCloseablePair.java b/Util/src/main/java/io/deephaven/util/SafeCloseablePair.java index 2256353a618..2afe6d458b9 100644 --- a/Util/src/main/java/io/deephaven/util/SafeCloseablePair.java +++ b/Util/src/main/java/io/deephaven/util/SafeCloseablePair.java @@ -2,8 +2,7 @@ import java.util.Objects; -public class SafeCloseablePair - implements SafeCloseable { +public class SafeCloseablePair implements SafeCloseable { public final A first; public final B second; @@ -21,8 +20,7 @@ public boolean equals(final Object other) { return false; } SafeCloseablePair otherPair = (SafeCloseablePair) other; - return Objects.equals(this.first, otherPair.first) - && Objects.equals(this.second, otherPair.second); + return Objects.equals(this.first, otherPair.first) && Objects.equals(this.second, otherPair.second); } public int hashCode() { @@ -40,13 +38,13 @@ public void close() { } public static SafeCloseablePair downcast( - SafeCloseablePair self) { + SafeCloseablePair self) { // noinspection unchecked return (SafeCloseablePair) self; } - public static SafeCloseablePair of( - final A first, final B second) { + public static SafeCloseablePair of(final A first, + final B second) { return new SafeCloseablePair<>(first, second); } diff --git a/Util/src/main/java/io/deephaven/util/SoftRecycler.java b/Util/src/main/java/io/deephaven/util/SoftRecycler.java index f42e9bfe3a3..56a12162afe 100644 --- a/Util/src/main/java/io/deephaven/util/SoftRecycler.java +++ b/Util/src/main/java/io/deephaven/util/SoftRecycler.java @@ -12,17 +12,15 @@ import java.util.function.Supplier; /** - * This class makes a little "recycle bin" for your objects of type T. When you want an object, call - * borrowItem(). When you do so, either a fresh T will be constructed for you, or a reused T will be - * pulled from the recycle bin. When you are done with the object and want to recycle it, call - * returnItem(). This class will keep a maximum of 'capacity' items in its recycle bin. - * Additionally, the items are held by SoftReferences, so the garbage collector may reclaim them if - * it feels like it. The items are borrowed in LIFO order, which hopefully is somewhat - * cache-friendly. + * This class makes a little "recycle bin" for your objects of type T. When you want an object, call borrowItem(). When + * you do so, either a fresh T will be constructed for you, or a reused T will be pulled from the recycle bin. When you + * are done with the object and want to recycle it, call returnItem(). This class will keep a maximum of 'capacity' + * items in its recycle bin. Additionally, the items are held by SoftReferences, so the garbage collector may reclaim + * them if it feels like it. The items are borrowed in LIFO order, which hopefully is somewhat cache-friendly. * - * Note that the caller has no special obligation to return a borrowed item nor to return borrowed - * items in any particular order. If your code has a need to keep a borrowed item forever, there is - * no problem with that. But if you want your objects to be reused, you have to return them. + * Note that the caller has no special obligation to return a borrowed item nor to return borrowed items in any + * particular order. If your code has a need to keep a borrowed item forever, there is no problem with that. But if you + * want your objects to be reused, you have to return them. */ public class SoftRecycler { private final int capacity; @@ -34,8 +32,8 @@ public class SoftRecycler { /** * @param capacity Capacity of the recycler * @param constructItem A callback that creates a new item - * @param sanitizeItem Optional. A callback that sanitizes the item before reuse. Pass null if - * no sanitization is needed. + * @param sanitizeItem Optional. A callback that sanitizes the item before reuse. Pass null if no sanitization is + * needed. */ public SoftRecycler(int capacity, Supplier constructItem, Consumer sanitizeItem) { this.capacity = capacity; @@ -50,8 +48,7 @@ public T borrowItem() { synchronized (this) { // Working backwards, try to find an item that is still live while (!recycleBin.isEmpty()) { - // Peel off the last SoftReference. If it still has a value, return that value to - // the caller. Otherwise, + // Peel off the last SoftReference. If it still has a value, return that value to the caller. Otherwise, // toss it and move on to the next. T item = recycleBin.remove(recycleBin.size() - 1).get(); if (item != null) { @@ -82,27 +79,22 @@ public void returnItem(T item) { } private void cleanup() { - // Process all the SoftReferences that have lost their referents, and remove them from the - // recycle bin. + // Process all the SoftReferences that have lost their referents, and remove them from the recycle bin. while (true) { SoftReferenceWithIndex sri = (SoftReferenceWithIndex) retirementQueue.poll(); if (sri == null) { break; } - // If this SoftReference is still in the recycle bin (it may or may not be), we remove - // it from the recycle - // bin. In order to do this remove efficiently, rather than moving all the items down to - // fill the empty + // If this SoftReference is still in the recycle bin (it may or may not be), we remove it from the recycle + // bin. In order to do this remove efficiently, rather than moving all the items down to fill the empty // slot, we just replace the item at the current slot with the item at the end. - // The SoftReferenceWithIndex objects always know what position they are at in the - // recycleBin. + // The SoftReferenceWithIndex objects always know what position they are at in the recycleBin. final int destIndex = sri.index; if (destIndex < recycleBin.size() && sri == recycleBin.get(destIndex)) { final int lastIndex = recycleBin.size() - 1; SoftReferenceWithIndex lastSri = recycleBin.remove(lastIndex); if (destIndex != lastIndex) { - // Move the item that was formerly in the last position to the recently-evicted - // position. Also + // Move the item that was formerly in the last position to the recently-evicted position. Also // update the object's position in the array. lastSri.index = destIndex; recycleBin.set(destIndex, lastSri); diff --git a/Util/src/main/java/io/deephaven/util/Utils.java b/Util/src/main/java/io/deephaven/util/Utils.java index 39caa55aa88..579bdc19f81 100644 --- a/Util/src/main/java/io/deephaven/util/Utils.java +++ b/Util/src/main/java/io/deephaven/util/Utils.java @@ -81,8 +81,8 @@ public static String unNull(String s) { * @param thing the thing to run despite interruptions. * @param name what to call the thing - for logging */ - public static void runWithoutInterruption(Logger log, - Procedure.ThrowingNullary thing, String name) { + public static void runWithoutInterruption(Logger log, Procedure.ThrowingNullary thing, + String name) { do { try { thing.call(); @@ -114,15 +114,13 @@ public static long sleepIgnoringInterruptions(final long millisToSleep) { } /** - * Checks if an {@link Element} is empty, ignoring the specified set of attributes. An empty - * element contains no content and no attributes aside from those indicated in ignoredAttrs + * Checks if an {@link Element} is empty, ignoring the specified set of attributes. An empty element contains no + * content and no attributes aside from those indicated in ignoredAttrs * * @param elem The element to check - * @param ignoredAttrs A set of attributes that can be present while this element is still - * considered empty. + * @param ignoredAttrs A set of attributes that can be present while this element is still considered empty. * - * @return true if the element contained no content or attributes excluding those indicated in - * ignoredAttrs + * @return true if the element contained no content or attributes excluding those indicated in ignoredAttrs */ public static boolean isEmptyElement(Element elem, String... ignoredAttrs) { final List attrs = elem.getAttributes(); @@ -149,8 +147,7 @@ public static Element wrapElement(@NotNull String wrapperName, @NotNull Element } /** - * Get the single element that was wrapped by a previous call to - * {@link #wrapElement(String, Element)} + * Get the single element that was wrapped by a previous call to {@link #wrapElement(String, Element)} * * @param wrapperName The name of the wrapper * @param parentElem The element containing the wrapper @@ -167,13 +164,12 @@ public static Element unwrapElement(@NotNull String wrapperName, @NotNull Elemen } public static LocalDateTime getLastModifiedTime(File f) { - return LocalDateTime.ofInstant(Instant.ofEpochMilli(f.lastModified()), - ZoneId.systemDefault()); + return LocalDateTime.ofInstant(Instant.ofEpochMilli(f.lastModified()), ZoneId.systemDefault()); } /** - * Get a {@code Comparator} that treats its inputs as file names in the same directory - * basePath, and compares each file by its modified time + * Get a {@code Comparator} that treats its inputs as file names in the same directory basePath, and + * compares each file by its modified time * * @param dir The root path in which both files reside. * @return A new {@link Comparator}. @@ -195,8 +191,8 @@ public static Comparator getModifiedTimeComparator(File dir, boolean des } /** - * Get a {@link Comparator} that treats its inputs as file names in the same directory - * basePath, and compares each file by its modified time + * Get a {@link Comparator} that treats its inputs as file names in the same directory basePath, and compares + * each file by its modified time * * @return A new comparator. */ @@ -240,21 +236,18 @@ public static URLClassLoader cleanRoom() { } cl = cl.getParent(); } - // We should be able to create this class loader even if this is invoked from external code - // that lacks that permission. + // We should be able to create this class loader even if this is invoked from external code that lacks that + // permission. return AccessController.doPrivileged( - (PrivilegedAction) () -> new URLClassLoader(all.toArray(new URL[0]), - null)); + (PrivilegedAction) () -> new URLClassLoader(all.toArray(new URL[0]), null)); } /** * Close an {@link AutoCloseable} object and discard any exceptions.
    - * NB: Per {@link AutoCloseable#close()}, Note that unlike the close method of - * java.io.Closeable, this close method is not required to be idempotent. In other words, - * calling this close method more than once may have some visible side effect, unlike - * Closeable.close which is required to have no effect if called more than once. However, - * implementers of this interface are strongly encouraged to make their close methods - * idempotent. + * NB: Per {@link AutoCloseable#close()}, Note that unlike the close method of java.io.Closeable, this close + * method is not required to be idempotent. In other words, calling this close method more than once may have + * some visible side effect, unlike Closeable.close which is required to have no effect if called more than + * once. However, implementers of this interface are strongly encouraged to make their close methods idempotent. * * @param autoCloseable The resource to close. */ @@ -278,8 +271,8 @@ public static void reverseArraySubset(T[] array, int start, int end) { } if (start < 0 || end > array.length - 1) { - throw new IllegalArgumentException("Invalid indices to reverse array: (" + start + ", " - + end + ") allowed " + "(0, " + (array.length - 1) + ")"); + throw new IllegalArgumentException("Invalid indices to reverse array: (" + start + ", " + end + ") allowed " + + "(0, " + (array.length - 1) + ")"); } int i = start; @@ -296,8 +289,7 @@ public static void reverseArraySubset(T[] array, int start, int end) { } /** - * Does a line by line comparison of two files to check if they are the same. Can skip the first - * N lines. + * Does a line by line comparison of two files to check if they are the same. Can skip the first N lines. * * @param fileA the first file * @param fileB the second file @@ -306,7 +298,7 @@ public static void reverseArraySubset(T[] array, int start, int end) { */ public static boolean areFileLinesEqual(Path fileA, Path fileB, int skipLines) { try (BufferedReader readerA = Files.newBufferedReader(fileA); - BufferedReader readerB = Files.newBufferedReader(fileB)) { + BufferedReader readerB = Files.newBufferedReader(fileB)) { String lineA = readerA.readLine(); String lineB = readerB.readLine(); @@ -353,8 +345,8 @@ public static Path changeFileExtension(Path path, String extension) { } /** - * Anonymous inner classes return "" as simple name. In most cases, we want the SimpleName to - * reflect the class being overridden. + * Anonymous inner classes return "" as simple name. In most cases, we want the SimpleName to reflect the class + * being overridden. *

    * For example, * x = new SomeClass() { @Override ... }; @@ -371,8 +363,8 @@ public static String getSimpleNameFor(@NotNull Object o) { } /** - * Anonymous inner classes return "" as simple name. In most cases, we want the SimpleName to - * reflect the class being overridden. + * Anonymous inner classes return "" as simple name. In most cases, we want the SimpleName to reflect the class + * being overridden. * * @param objectClass the class for which to return the SimpleName * @return The SimpleName of the class, or of its superclass @@ -380,8 +372,7 @@ public static String getSimpleNameFor(@NotNull Object o) { @SuppressWarnings("WeakerAccess") public static String getSimpleNameFor(@NotNull Class objectClass) { String simpleName = objectClass.getSimpleName(); - // noinspection ConstantConditions // objectClass could hypothetically be null as result of - // getSuperClass + // noinspection ConstantConditions // objectClass could hypothetically be null as result of getSuperClass while (simpleName.isEmpty() && objectClass != null) { objectClass = objectClass.getSuperclass(); simpleName = objectClass.getSimpleName(); @@ -406,8 +397,8 @@ public static T castTo(Object o, String name, Class type) { } /** - * Describe the object in a standardized format without accessing its fields or otherwise - * risking interacting with partially-initialized state. + * Describe the object in a standardized format without accessing its fields or otherwise risking interacting with + * partially-initialized state. * * @param object The object * @return The description @@ -419,13 +410,12 @@ public static String makeReferentDescription(@NotNull final Object object) { /** * Append the non-null argument in the same format as {@link #makeReferentDescription(Object)}. */ - public static LogOutput.ObjFormatter REFERENT_FORMATTER = - (logOutput, object) -> logOutput.append(getSimpleNameFor(object)).append('-') - .append(System.identityHashCode(object)); + public static LogOutput.ObjFormatter REFERENT_FORMATTER = (logOutput, object) -> logOutput + .append(getSimpleNameFor(object)).append('-').append(System.identityHashCode(object)); /** - * Get the major Java version (e.g. 8, 11). Throw an exception if it can't be determined, or if - * it isn't a Deephaven-supported version. Currently supported versions include: + * Get the major Java version (e.g. 8, 11). Throw an exception if it can't be determined, or if it isn't a + * Deephaven-supported version. Currently supported versions include: *
      *
    • 1.8 (returned as 8)
    • *
    • 11
    • @@ -484,16 +474,14 @@ public static boolean fileIsDirectoryPrivileged(final File file) { * @return same as File.isDirectory */ public static boolean fileIsDirectoryPrivileged(final Path path, final LinkOption... options) { - return AccessController - .doPrivileged((PrivilegedAction) () -> Files.isDirectory(path, options)); + return AccessController.doPrivileged((PrivilegedAction) () -> Files.isDirectory(path, options)); } public static DirectoryStream fileGetDirectoryStream(Path dir, - DirectoryStream.Filter filter) throws IOException { + DirectoryStream.Filter filter) throws IOException { try { - return AccessController - .doPrivileged((PrivilegedExceptionAction>) () -> Files - .newDirectoryStream(dir, filter)); + return AccessController.doPrivileged( + (PrivilegedExceptionAction>) () -> Files.newDirectoryStream(dir, filter)); } catch (final PrivilegedActionException pae) { if (pae.getException() instanceof IOException) { throw (IOException) pae.getException(); @@ -525,8 +513,7 @@ public static String fileGetAbsolutePathPrivileged(final File file) { } /** - * Create the directory specified by this File, excluding parent directories, in a privileged - * context. + * Create the directory specified by this File, excluding parent directories, in a privileged context. * * @param file The directory to create * @return same as File.mkdir diff --git a/Util/src/main/java/io/deephaven/util/annotations/ArrayType.java b/Util/src/main/java/io/deephaven/util/annotations/ArrayType.java index a2d9d1d6c4d..aa22a92a586 100644 --- a/Util/src/main/java/io/deephaven/util/annotations/ArrayType.java +++ b/Util/src/main/java/io/deephaven/util/annotations/ArrayType.java @@ -3,8 +3,7 @@ import java.lang.annotation.*; /** - * This annotation indicates that the annotated class in some way represents an array of the - * specified type. + * This annotation indicates that the annotated class in some way represents an array of the specified type. */ @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) diff --git a/Util/src/main/java/io/deephaven/util/annotations/ArrayTypeGetter.java b/Util/src/main/java/io/deephaven/util/annotations/ArrayTypeGetter.java index c855dd4c680..f3b4ecd9658 100644 --- a/Util/src/main/java/io/deephaven/util/annotations/ArrayTypeGetter.java +++ b/Util/src/main/java/io/deephaven/util/annotations/ArrayTypeGetter.java @@ -6,9 +6,9 @@ import java.lang.annotation.Target; /** - * This annotation marks a specific method within a class that has been annotated with - * {@link ArrayType} as the means to retrieve an array of the type indicated. No guarantees are - * provided with respect to the mutability of the annotated type.. + * This annotation marks a specific method within a class that has been annotated with {@link ArrayType} as the means to + * retrieve an array of the type indicated. No guarantees are provided with respect to the mutability of the annotated + * type.. */ @Target({ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) diff --git a/Util/src/main/java/io/deephaven/util/annotations/DynamicUse.java b/Util/src/main/java/io/deephaven/util/annotations/DynamicUse.java index 747efb6fd6e..2d778e75974 100644 --- a/Util/src/main/java/io/deephaven/util/annotations/DynamicUse.java +++ b/Util/src/main/java/io/deephaven/util/annotations/DynamicUse.java @@ -8,10 +8,9 @@ * This annotation indicates that a field is used in dynamically generated code. */ /* - * IntelliJ must be configured to recognize this annotation and suppress warnings. The applicable - * settings are Preferences -> Editors -> Inspections: -- Unused Declarations -> Entry Points -> - * Annotations -> Add it to the list -- Field Can Be Local -> Additional Special Annotations -> Add - * it to the list. + * IntelliJ must be configured to recognize this annotation and suppress warnings. The applicable settings are + * Preferences -> Editors -> Inspections: -- Unused Declarations -> Entry Points -> Annotations -> Add it to the list -- + * Field Can Be Local -> Additional Special Annotations -> Add it to the list. */ @Documented @Target({ElementType.FIELD, ElementType.LOCAL_VARIABLE, ElementType.METHOD}) diff --git a/Util/src/main/java/io/deephaven/util/annotations/InternalUseOnly.java b/Util/src/main/java/io/deephaven/util/annotations/InternalUseOnly.java index 6d7542c00f7..a177531ec72 100644 --- a/Util/src/main/java/io/deephaven/util/annotations/InternalUseOnly.java +++ b/Util/src/main/java/io/deephaven/util/annotations/InternalUseOnly.java @@ -6,8 +6,8 @@ import java.lang.annotation.Target; /** - * Indicates that a particular method is for internal use only and should not be used by client - * code. It is subject to change/removal at any time. + * Indicates that a particular method is for internal use only and should not be used by client code. It is subject to + * change/removal at any time. */ @Target({ElementType.METHOD, ElementType.CONSTRUCTOR, ElementType.TYPE}) @Inherited diff --git a/Util/src/main/java/io/deephaven/util/annotations/ReferentialIntegrity.java b/Util/src/main/java/io/deephaven/util/annotations/ReferentialIntegrity.java index 0d98f9b7fb3..1c709f4935d 100644 --- a/Util/src/main/java/io/deephaven/util/annotations/ReferentialIntegrity.java +++ b/Util/src/main/java/io/deephaven/util/annotations/ReferentialIntegrity.java @@ -5,14 +5,12 @@ import java.lang.annotation.Target; /** - * This annotation indicates that a field exists simply for referential integrity to the object it's - * holding. + * This annotation indicates that a field exists simply for referential integrity to the object it's holding. */ /* - * IntelliJ must be configured to recognize this annotation and suppress warnings. The applicable - * settings are Preferences -> Editors -> Inspections: -- Unused Declarations -> Entry Points -> - * Annotations -> Add it to the list -- Field Can Be Local -> Additional Special Annotations -> Add - * it to the list. + * IntelliJ must be configured to recognize this annotation and suppress warnings. The applicable settings are + * Preferences -> Editors -> Inspections: -- Unused Declarations -> Entry Points -> Annotations -> Add it to the list -- + * Field Can Be Local -> Additional Special Annotations -> Add it to the list. */ @Documented @Target({ElementType.FIELD, ElementType.LOCAL_VARIABLE}) diff --git a/Util/src/main/java/io/deephaven/util/annotations/TestUseOnly.java b/Util/src/main/java/io/deephaven/util/annotations/TestUseOnly.java index 9017297b439..820fabc2c55 100644 --- a/Util/src/main/java/io/deephaven/util/annotations/TestUseOnly.java +++ b/Util/src/main/java/io/deephaven/util/annotations/TestUseOnly.java @@ -6,8 +6,8 @@ import java.lang.annotation.Target; /** - * Indicates that a particular method is for test purposes only and should not be used by client or - * production code. It is subject to change/removal at any time. + * Indicates that a particular method is for test purposes only and should not be used by client or production code. It + * is subject to change/removal at any time. */ @Target({ElementType.METHOD, ElementType.CONSTRUCTOR, ElementType.TYPE}) @Inherited diff --git a/Util/src/main/java/io/deephaven/util/audit/AuditEventLoggerBasic.java b/Util/src/main/java/io/deephaven/util/audit/AuditEventLoggerBasic.java index 343513926a9..8d7299f27ad 100644 --- a/Util/src/main/java/io/deephaven/util/audit/AuditEventLoggerBasic.java +++ b/Util/src/main/java/io/deephaven/util/audit/AuditEventLoggerBasic.java @@ -1,9 +1,9 @@ package io.deephaven.util.audit; /** - * An simple interface that hides the context necessary for creating and logging - * {@link AuditEvent}s. Useful so that callers are only responsible for the core parts of the event, - * namely {@link AuditEvent#getEvent()} and {@link AuditEvent#getDetails()}. + * An simple interface that hides the context necessary for creating and logging {@link AuditEvent}s. Useful so that + * callers are only responsible for the core parts of the event, namely {@link AuditEvent#getEvent()} and + * {@link AuditEvent#getDetails()}. */ public interface AuditEventLoggerBasic { void log(String event, String details); diff --git a/Util/src/main/java/io/deephaven/util/codec/BigDecimalCodec.java b/Util/src/main/java/io/deephaven/util/codec/BigDecimalCodec.java index 2b0c9eada7d..62d88cbad47 100644 --- a/Util/src/main/java/io/deephaven/util/codec/BigDecimalCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/BigDecimalCodec.java @@ -13,9 +13,8 @@ /** * BigDecimal encoder, with fixed and variable width support. * - * We use 1's complement to store fixed precision values so that they may be ordered in binary - * without decoding. There is no practical limit on the precision we can store this way but we limit - * it to 1000 decimal digits for sanity. + * We use 1's complement to store fixed precision values so that they may be ordered in binary without decoding. There + * is no practical limit on the precision we can store this way but we limit it to 1000 decimal digits for sanity. * * Variable width values are stored raw as BigDecimal scale followed by the unscaled byte array. */ @@ -69,8 +68,7 @@ public byte[] encodedNullValue() { public BigDecimalCodec(@Nullable String arguments) { // noinspection ConstantConditions try { - int _precision = 0, _scale = 0; // zero indicates unlimited precision/scale, variable - // width encoding + int _precision = 0, _scale = 0; // zero indicates unlimited precision/scale, variable width encoding boolean _strict = true; if (arguments != null && arguments.trim().length() > 0) { final String[] tokens = arguments.split(","); @@ -94,8 +92,8 @@ public BigDecimalCodec(@Nullable String arguments) { break; default: throw new IllegalArgumentException( - "Unexpected rounding mode (legal values are \"allowRounding\" or \"noRounding\"): " - + mode); + "Unexpected rounding mode (legal values are \"allowRounding\" or \"noRounding\"): " + + mode); } } } @@ -106,8 +104,7 @@ public BigDecimalCodec(@Nullable String arguments) { this.scale = _scale; this.strict = _strict; } catch (Exception ex) { - throw new IllegalArgumentException( - "Error parsing codec argument(s): " + ex.getMessage(), ex); + throw new IllegalArgumentException("Error parsing codec argument(s): " + ex.getMessage(), ex); } init(); @@ -116,8 +113,7 @@ public BigDecimalCodec(@Nullable String arguments) { private void init() { if (precision < 0 || precision > MAX_FIXED_PRECISION) { - throw new IllegalArgumentException( - "Precision out of legal range (0-" + MAX_FIXED_PRECISION + ")"); + throw new IllegalArgumentException("Precision out of legal range (0-" + MAX_FIXED_PRECISION + ")"); } if (scale < 0) { throw new IllegalArgumentException("Scale must be non-negative"); @@ -130,8 +126,7 @@ private void init() { encodedSize = (int) Math.ceil(Math.log(10) / Math.log(2) * precision / Byte.SIZE) + 1; zeroBytes = new byte[encodedSize]; // there are two possible ways to represent zero in our schema, - // we choose all zeros ("positive zero") for zero and "negative zero" for null. This is - // arbitrary convention + // we choose all zeros ("positive zero") for zero and "negative zero" for null. This is arbitrary convention Arrays.fill(zeroBytes, (byte) 0); zeroBytes[0] = (byte) 1; nullBytes = new byte[encodedSize]; @@ -171,17 +166,14 @@ public byte[] encode(@Nullable final BigDecimal input) { // FIXED SIZE // round if necessary - // we need to make sure we adjust for both precision and scale since we are encoding with a - // fixed scale + // we need to make sure we adjust for both precision and scale since we are encoding with a fixed scale // (i.e. too high a scale requires reducing precision to "make room") if ((value.precision() > this.precision || value.scale() > scale)) { if (strict) { - throw new IllegalArgumentException( - "Unable to encode value " + value.toString() + " with precision " + throw new IllegalArgumentException("Unable to encode value " + value.toString() + " with precision " + precision + " scale " + scale); } - final int targetPrecision = - Math.min(precision, value.precision() - Math.max(0, value.scale() - scale)); + final int targetPrecision = Math.min(precision, value.precision() - Math.max(0, value.scale() - scale)); if (targetPrecision > 0) { value = value.round(new MathContext(targetPrecision)); } else { @@ -193,16 +185,13 @@ public byte[] encode(@Nullable final BigDecimal input) { bytes[0] = value.signum() >= 0 ? (byte) 1 : (byte) 0; // set sign bit // we should not ever have to round here, that is taken care of above - // we store everything as an unscaled integer value (the smallest non-zero value we can - // store is "1") + // we store everything as an unscaled integer value (the smallest non-zero value we can store is "1") value = value.movePointRight(scale).setScale(0).abs(); // copy unscaled bytes to proper size array final byte[] unscaledValue = value.unscaledValue().toByteArray(); - if (unscaledValue.length >= bytes.length) { // unscaled value must be at most one less than - // length of our buffer - throw new IllegalArgumentException( - "Value " + input.toString() + " is too large to encode with precision " + if (unscaledValue.length >= bytes.length) { // unscaled value must be at most one less than length of our buffer + throw new IllegalArgumentException("Value " + input.toString() + " is too large to encode with precision " + precision + " and scale " + scale); } @@ -213,8 +202,7 @@ public byte[] encode(@Nullable final BigDecimal input) { bytes[bytes.length - unscaledValue.length + i] = (byte) ~((int) unscaledValue[i]); } } else { - System.arraycopy(unscaledValue, 0, bytes, bytes.length - unscaledValue.length, - unscaledValue.length); + System.arraycopy(unscaledValue, 0, bytes, bytes.length - unscaledValue.length, unscaledValue.length); } return bytes; diff --git a/Util/src/main/java/io/deephaven/util/codec/BigIntegerCodec.java b/Util/src/main/java/io/deephaven/util/codec/BigIntegerCodec.java index d97c6ea2545..d4772efd07a 100644 --- a/Util/src/main/java/io/deephaven/util/codec/BigIntegerCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/BigIntegerCodec.java @@ -9,8 +9,8 @@ /** * Simple ObjectCodec for BigIntegers. * - * For now this just wraps BigDecimalCodec with scale=0. At some point we might reimplement this - * directly but the encoded format is likely the same. + * For now this just wraps BigDecimalCodec with scale=0. At some point we might reimplement this directly but the + * encoded format is likely the same. */ @SuppressWarnings("unused") public class BigIntegerCodec implements ObjectCodec { @@ -35,12 +35,11 @@ public BigIntegerCodec(@Nullable String arguments) { throw new IllegalArgumentException("Specified precision must be >= 1"); } } - // we pass strict=true, rounding isn't relevant for BigInteger and we always want - // exceptions if precision is exceeded + // we pass strict=true, rounding isn't relevant for BigInteger and we always want exceptions if precision is + // exceeded this.codec = new BigDecimalCodec(_precision, 0, true); } catch (Exception ex) { - throw new IllegalArgumentException( - "Error parsing codec argument(s): " + ex.getMessage(), ex); + throw new IllegalArgumentException("Error parsing codec argument(s): " + ex.getMessage(), ex); } } @@ -63,8 +62,8 @@ public int getScale() { @Override public byte[] encode(@Nullable final BigInteger input) { return input == null - ? codec.encodedNullValue() - : codec.encode(new BigDecimal(input)); + ? codec.encodedNullValue() + : codec.encode(new BigDecimal(input)); } @Nullable diff --git a/Util/src/main/java/io/deephaven/util/codec/CodecCache.java b/Util/src/main/java/io/deephaven/util/codec/CodecCache.java index 880bf543c46..b70a5f5a507 100644 --- a/Util/src/main/java/io/deephaven/util/codec/CodecCache.java +++ b/Util/src/main/java/io/deephaven/util/codec/CodecCache.java @@ -29,7 +29,7 @@ private static class Item { private final ObjectCodec codec; private Item(@NotNull final String className, - @Nullable final String arguments) { + @Nullable final String arguments) { Require.neqNull(className, "className"); final Class codecClass; @@ -43,15 +43,15 @@ private Item(@NotNull final String className, try { codecConstructor = codecClass.getConstructor(String.class); } catch (NoSuchMethodException e) { - throw new CodecCacheException( - "Codec class " + codecClass + " is missing expected String constructor", e); + throw new CodecCacheException("Codec class " + codecClass + " is missing expected String constructor", + e); } try { codec = codecConstructor.newInstance(arguments); } catch (InstantiationException | IllegalAccessException | InvocationTargetException - | ClassCastException e) { - throw new CodecCacheException("Failed to instantiate codec of type " + codecClass - + " from arguments " + arguments, e); + | ClassCastException e) { + throw new CodecCacheException( + "Failed to instantiate codec of type " + codecClass + " from arguments " + arguments, e); } } } @@ -65,23 +65,22 @@ private Item(@NotNull final String className, * @param arguments The constructor arguments * @param The encoding type * @return The corresponding {@link ObjectCodec} - * @throws CodecCacheException If an error occurred while instantiating the named codec class - * from the supplied arguments + * @throws CodecCacheException If an error occurred while instantiating the named codec class from the supplied + * arguments */ public synchronized ObjectCodec getCodec(@NotNull final String className, - @Nullable final String arguments) { + @Nullable final String arguments) { // noinspection unchecked return (ObjectCodec) items - .computeIfAbsent(className, (cn) -> new HashMap<>()) - .computeIfAbsent(arguments, (a) -> new Item(className, a)).codec; + .computeIfAbsent(className, (cn) -> new HashMap<>()) + .computeIfAbsent(arguments, (a) -> new Item(className, a)).codec; } /** * Get the default {@link ObjectCodec} class to use for the given column type. * * @param dataType The column data type - * @return The name of the default {@link ObjectCodec} subclass to use for encoding the given - * type + * @return The name of the default {@link ObjectCodec} subclass to use for encoding the given type */ public static String getDefaultCodecClass(@NotNull final Class dataType) { if (dataType.equals(LocalDate.class)) { diff --git a/Util/src/main/java/io/deephaven/util/codec/LocalDateCodec.java b/Util/src/main/java/io/deephaven/util/codec/LocalDateCodec.java index 7b54dd0e26a..87df62b88cf 100644 --- a/Util/src/main/java/io/deephaven/util/codec/LocalDateCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/LocalDateCodec.java @@ -9,15 +9,15 @@ /** * LocalDate codec, with support for "full" and "compact" encodings. * - * The full (default) encoding is a 5-byte packed integer format that can represent the full range - * of dates that a LocalDate object can hold (years -999,999,999 to 999,999,999). + * The full (default) encoding is a 5-byte packed integer format that can represent the full range of dates that a + * LocalDate object can hold (years -999,999,999 to 999,999,999). * - * The compact encoding is a 3-byte packed-integer. This format is constrained to represent dates in - * the range 0000-01-01 to 9999-01-01. This encoding covers the range supported by many SQL - * databases, so is often a good candidate for imported data sets. + * The compact encoding is a 3-byte packed-integer. This format is constrained to represent dates in the range + * 0000-01-01 to 9999-01-01. This encoding covers the range supported by many SQL databases, so is often a good + * candidate for imported data sets. * - * Both encodings are "nullable", indicated by setting the most significant byte to 0xFF (this can - * never represent a valid value in the ranges specified for each encoding). + * Both encodings are "nullable", indicated by setting the most significant byte to 0xFF (this can never represent a + * valid value in the ranges specified for each encoding). */ public class LocalDateCodec implements ObjectCodec { @@ -42,8 +42,7 @@ public LocalDateCodec(@Nullable String arguments) { try { domain = Domain.valueOf(domainStr); } catch (IllegalArgumentException ex) { - throw new IllegalArgumentException( - "Unexpected value for LocalDate domain: " + domainStr); + throw new IllegalArgumentException("Unexpected value for LocalDate domain: " + domainStr); } if (tokens.length > 1) { final String nullability = tokens[1].trim(); @@ -56,8 +55,8 @@ public LocalDateCodec(@Nullable String arguments) { break; default: throw new IllegalArgumentException( - "Unexpected value for nullability (legal values are \"nullable\" or \"notNull\"): " - + nullability); + "Unexpected value for nullability (legal values are \"nullable\" or \"notNull\"): " + + nullability); } } } else { @@ -113,19 +112,17 @@ public byte[] encode(@Nullable final LocalDate input) { } } else { if (input.getYear() < minYear || input.getYear() > maxYear) { - throw new IllegalArgumentException( - "Year out of legal range [" + minYear + "," + maxYear + "] for " + throw new IllegalArgumentException("Year out of legal range [" + minYear + "," + maxYear + "] for " + domain.toString() + " encoder: " + input.getYear()); } final byte[] encodedValue = new byte[encodedSize]; switch (domain) { case Compact: { - // 5 bits for day of month, 4 for month, and 14 for the year (leading bit is for - // null indicator) + // 5 bits for day of month, 4 for month, and 14 for the year (leading bit is for null indicator) // this totals 23 so we have one extra bit for the null indicator in 3 bytes int packedValue = (input.getYear() << 4 + 5) - | (input.getMonthValue() << 5) - | (input.getDayOfMonth()); + | (input.getMonthValue() << 5) + | (input.getDayOfMonth()); for (int i = encodedSize - 1; i >= 0; i--) { encodedValue[i] = (byte) (packedValue & 0xFF); packedValue >>= 8; @@ -134,16 +131,14 @@ public byte[] encode(@Nullable final LocalDate input) { break; case Full: { // 5 bits for day of month, 4 for month, and 31 for the year - // this uses every available bit, but since our legal year range is just - // -999,999,999 to 999,999,999 + // this uses every available bit, but since our legal year range is just -999,999,999 to 999,999,999 // a leading 0xFF byte still represents an illegal value - // we want to represent negative years, so we convert to a positive value to - // avoid messing with the high bit - final long year = (long) input.getYear() - minYear; // offset the year to avoid - // negative values + // we want to represent negative years, so we convert to a positive value to avoid messing with the + // high bit + final long year = (long) input.getYear() - minYear; // offset the year to avoid negative values long packedValue = (year << 4 + 5) - | ((long) input.getMonthValue() << 5) - | ((long) input.getDayOfMonth()); + | ((long) input.getMonthValue() << 5) + | ((long) input.getDayOfMonth()); for (int i = encodedSize - 1; i >= 0; i--) { encodedValue[i] = (byte) (packedValue & 0xFF); packedValue >>= 8; diff --git a/Util/src/main/java/io/deephaven/util/codec/LocalTimeCodec.java b/Util/src/main/java/io/deephaven/util/codec/LocalTimeCodec.java index 0ce92f7b354..f5568c61893 100644 --- a/Util/src/main/java/io/deephaven/util/codec/LocalTimeCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/LocalTimeCodec.java @@ -6,18 +6,18 @@ import java.time.LocalTime; /** - * LocalTime codec, with support for nanosecond, millisecond, or second precision. This codec always - * uses a fixed-width encoding, the size of which depends on the desired precision. + * LocalTime codec, with support for nanosecond, millisecond, or second precision. This codec always uses a fixed-width + * encoding, the size of which depends on the desired precision. * - * The Nanos (default) encoding is a 6-byte packed-integer format that can represent the full range - * of times that a LocalTime can represent. + * The Nanos (default) encoding is a 6-byte packed-integer format that can represent the full range of times that a + * LocalTime can represent. * * The Millis encoding is a 4-byte packed-integer format. Sub-millisecond values are truncated. * * The Seconds encoding is a 3-byte packed-integer format. Sub-second values are truncated. * - * All encodings are "nullable". Since every encoding leaves room for at least one "extra" bit, the - * leading bit always indicates null - non-null values will always contain zero for the 1st bit. + * All encodings are "nullable". Since every encoding leaves room for at least one "extra" bit, the leading bit always + * indicates null - non-null values will always contain zero for the 1st bit. */ public class LocalTimeCodec implements ObjectCodec { // decimal precision of fractional second @@ -51,17 +51,15 @@ public LocalTimeCodec(@Nullable String arguments) { break; default: throw new IllegalArgumentException( - "Unexpected value for nullability (legal values are \"nullable\" or \"notNull\"): " - + nullability); + "Unexpected value for nullability (legal values are \"nullable\" or \"notNull\"): " + + nullability); } } } catch (NumberFormatException ex) { - throw new IllegalArgumentException( - "Error fractional second precision: " + ex.getMessage(), ex); + throw new IllegalArgumentException("Error fractional second precision: " + ex.getMessage(), ex); } if (precision < 0 || precision > 9) { - throw new IllegalArgumentException( - "Invalid fractional second precision: " + precision); + throw new IllegalArgumentException("Invalid fractional second precision: " + precision); } } else { precision = 9; @@ -69,8 +67,7 @@ public LocalTimeCodec(@Nullable String arguments) { // the # of bits we need to store the specified number of digits of decimal precision fractionalBits = (int) Math.ceil(Math.log(10) / Math.log(2) * precision); - // 5 bits for hour, 6 bits for minute, 6 bits for second plus whatever we need for - // fractional seconds + // 5 bits for hour, 6 bits for minute, 6 bits for second plus whatever we need for fractional seconds int encodedBits = 17 + fractionalBits; // add a bit for null indicator if needed if (nullable) { @@ -118,9 +115,9 @@ public byte[] encode(@Nullable final LocalTime input) { // 5 bits for hour, 6 bits for minute, 6 bits for second, 0-30 bits for fractional second // so we never need more than 47 bits (48 with null indicator) long packedValue = ((long) input.getHour() << 12 + fractionalBits) - | ((long) input.getMinute() << 6 + fractionalBits) - | ((long) input.getSecond() << fractionalBits) - | fractionalSecond; + | ((long) input.getMinute() << 6 + fractionalBits) + | ((long) input.getSecond() << fractionalBits) + | fractionalSecond; for (int i = encodedSize - 1; i >= 0; i--) { encodedValue[i] = (byte) (packedValue & 0xFF); packedValue >>= 8; diff --git a/Util/src/main/java/io/deephaven/util/codec/MapCodec.java b/Util/src/main/java/io/deephaven/util/codec/MapCodec.java index 69335ff81f6..41ae9381269 100644 --- a/Util/src/main/java/io/deephaven/util/codec/MapCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/MapCodec.java @@ -25,8 +25,7 @@ public abstract class MapCodec implements ObjectCodec> { private static final int MINIMUM_SCRATCH_CAPACITY = 4096; private static final ThreadLocal> scratchBufferThreadLocal = - ThreadLocal - .withInitial(() -> new SoftReference<>(ByteBuffer.allocate(MINIMUM_SCRATCH_CAPACITY))); + ThreadLocal.withInitial(() -> new SoftReference<>(ByteBuffer.allocate(MINIMUM_SCRATCH_CAPACITY))); MapCodec(@Nullable final String arguments) {} @@ -67,12 +66,10 @@ public byte[] encode(@Nullable final Map input) { int estimatedCapacity = -1; // on the first try, we'll use whatever our scratch buffer was - // on the second try, we'll have an estimate which is 10% bigger than if every character was - // 1 byte + // on the second try, we'll have an estimate which is 10% bigger than if every character was 1 byte // on the third try, we'll allow for every character to be two bytes // on the fourth try, we'll allow for every character to be four bytes - // if there is a fifth try, it means that we could not encode this properly, given that - // there is a limit of + // if there is a fifth try, it means that we could not encode this properly, given that there is a limit of // 4 bytes in a UTF-8 character. for (int tryCount = 0; tryCount < 4; ++tryCount) { try { @@ -98,8 +95,7 @@ public byte[] encode(@Nullable final Map input) { private ByteBuffer allocateScratch(int estimatedCapacity) { final ByteBuffer holdScratch; - scratchBufferThreadLocal - .set(new SoftReference<>(holdScratch = ByteBuffer.allocate(estimatedCapacity))); + scratchBufferThreadLocal.set(new SoftReference<>(holdScratch = ByteBuffer.allocate(estimatedCapacity))); return holdScratch; } @@ -140,9 +136,9 @@ public Map decode(@NotNull final byte[] input, final int offset, final int /** * Estimate the size of the encoded map. * - * The estimated size is used to encode the map; and is doubled twice if there is a buffer - * underflow exception. Thus if you are wrong by more than a factor of 4x, the map can not be - * encoded and a BufferUnderflow exception is returned to the caller. + * The estimated size is used to encode the map; and is doubled twice if there is a buffer underflow exception. Thus + * if you are wrong by more than a factor of 4x, the map can not be encoded and a BufferUnderflow exception is + * returned to the caller. * * @param input the input map * @return the estimated size of the map diff --git a/Util/src/main/java/io/deephaven/util/codec/ObjectCodec.java b/Util/src/main/java/io/deephaven/util/codec/ObjectCodec.java index 07bb10576f4..6f9758cf4db 100644 --- a/Util/src/main/java/io/deephaven/util/codec/ObjectCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/ObjectCodec.java @@ -9,21 +9,20 @@ *

      * Implementations must follow several rules to enable correct usage: *

        - *
      1. They must be stateless or designed for concurrent use (e.g. by using only ThreadLocal state), - * as they will generally be cached and re-used.
      2. - *
      3. They must not modify their inputs in any way, retain references to their inputs, or return - * results that retain references to their inputs.
      4. - *
      5. They should provide a public constructor that takes a single String argument, in order to - * allow configuration-driven reflective instantiation.
      6. + *
      7. They must be stateless or designed for concurrent use (e.g. by using only ThreadLocal state), as they will + * generally be cached and re-used.
      8. + *
      9. They must not modify their inputs in any way, retain references to their inputs, or return results that retain + * references to their inputs.
      10. + *
      11. They should provide a public constructor that takes a single String argument, in order to allow + * configuration-driven reflective instantiation.
      12. *
      */ public interface ObjectCodec extends ObjectDecoder { /** - * Encode the specified input as an array of bytes. Note that it is up to the implementation how - * to encode null inputs. The use of a zero-length byte array (e.g. - * {@link io.deephaven.datastructures.util.CollectionUtil#ZERO_LENGTH_BYTE_ARRAY}) is strongly - * encouraged. + * Encode the specified input as an array of bytes. Note that it is up to the implementation how to encode null + * inputs. The use of a zero-length byte array (e.g. + * {@link io.deephaven.datastructures.util.CollectionUtil#ZERO_LENGTH_BYTE_ARRAY}) is strongly encouraged. * * @param input The input object, possibly null * @return The output byte array @@ -39,16 +38,16 @@ public interface ObjectCodec extends ObjectDecoder { boolean isNullable(); /** - * If applicable, the maximum encodable precision. If precision is not applicable (i.e. for - * non-numeric types) this method should return zero. + * If applicable, the maximum encodable precision. If precision is not applicable (i.e. for non-numeric types) this + * method should return zero. * * @return the numeric precision supported by this codec */ int getPrecision(); /** - * If applicable, the maximum encodable scale. If scale is not applicable (i.e. for non-numeric - * types) this method should return zero. + * If applicable, the maximum encodable scale. If scale is not applicable (i.e. for non-numeric types) this method + * should return zero. * * @return the numeric scale (digits after the decimal point) supported by this codec */ diff --git a/Util/src/main/java/io/deephaven/util/codec/ObjectDecoder.java b/Util/src/main/java/io/deephaven/util/codec/ObjectDecoder.java index d4dc0e418fd..b07f10652a7 100644 --- a/Util/src/main/java/io/deephaven/util/codec/ObjectDecoder.java +++ b/Util/src/main/java/io/deephaven/util/codec/ObjectDecoder.java @@ -6,15 +6,14 @@ /** *

      - * Codec superinterface for Object translation from byte arrays for serialization and - * deserialization. + * Codec superinterface for Object translation from byte arrays for serialization and deserialization. *

      * Implementations must follow several rules to enable correct usage: *

        - *
      1. They must be stateless or designed for concurrent use (e.g. by using only ThreadLocal state), - * as they will generally be cached and re-used.
      2. - *
      3. They must not modify their inputs in any way, retain references to their inputs, or return - * results that retain references to their inputs.
      4. + *
      5. They must be stateless or designed for concurrent use (e.g. by using only ThreadLocal state), as they will + * generally be cached and re-used.
      6. + *
      7. They must not modify their inputs in any way, retain references to their inputs, or return results that retain + * references to their inputs.
      8. *
      */ public interface ObjectDecoder { @@ -37,14 +36,13 @@ public interface ObjectDecoder { /** * What width byte array does this ObjectCodec expect to encode and decode? * - * @return VARIABLE_WIDTH_SENTINEL if the codec must encode and decode variable width columns, - * otherwise the fixed size of byte array that must be decoded and encoded. + * @return VARIABLE_WIDTH_SENTINEL if the codec must encode and decode variable width columns, otherwise the fixed + * size of byte array that must be decoded and encoded. */ int expectedObjectWidth(); /** - * Verify that this codec is capable of supporting a column that has an actual width of - * {@code actualWidth}. + * Verify that this codec is capable of supporting a column that has an actual width of {@code actualWidth}. * * @param actualWidth the actual width of the instantiated column * @throws IllegalArgumentException if {@code actualWidth} is not compatible with this codec diff --git a/Util/src/main/java/io/deephaven/util/codec/SimpleByteArrayCodec.java b/Util/src/main/java/io/deephaven/util/codec/SimpleByteArrayCodec.java index 5b1546bc8dd..7c48c74f7bf 100644 --- a/Util/src/main/java/io/deephaven/util/codec/SimpleByteArrayCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/SimpleByteArrayCodec.java @@ -9,8 +9,7 @@ *

      * Codec for non-nullable byte arrays that does a no-op encode/decode. *

      - * One particular instance where this is useful is reading parquet 1.0 data encoded as binary as - * "raw". + * One particular instance where this is useful is reading parquet 1.0 data encoded as binary as "raw". * */ public class SimpleByteArrayCodec implements ObjectCodec { @@ -30,8 +29,7 @@ public SimpleByteArrayCodec(@Nullable final String arguments) { try { size = Integer.parseInt(tokens[0].trim()); if (tokens.length > 1) { - throw new IllegalArgumentException( - "Unexpected additional arguments after first: " + arguments); + throw new IllegalArgumentException("Unexpected additional arguments after first: " + arguments); } } catch (NumberFormatException ex) { throw new IllegalArgumentException("Error parsing column size: " + ex.getMessage(), ex); @@ -46,8 +44,7 @@ public SimpleByteArrayCodec(@Nullable final String arguments) { @Override public byte[] encode(@Nullable final byte[] input) { if (input == null) { - throw new IllegalArgumentException( - SimpleByteArrayCodec.class.getSimpleName() + " cannot encode nulls"); + throw new IllegalArgumentException(SimpleByteArrayCodec.class.getSimpleName() + " cannot encode nulls"); } return input; } diff --git a/Util/src/main/java/io/deephaven/util/codec/StringBooleanMapCodec.java b/Util/src/main/java/io/deephaven/util/codec/StringBooleanMapCodec.java index ea171eb73ab..319f0917bb1 100644 --- a/Util/src/main/java/io/deephaven/util/codec/StringBooleanMapCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/StringBooleanMapCodec.java @@ -8,8 +8,7 @@ /** * ObjectCodec implementation for Maps of String to Boolean. * - * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the - * value. + * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the value. * * A null map is represented as an array of zero bytes. */ diff --git a/Util/src/main/java/io/deephaven/util/codec/StringDoubleMapCodec.java b/Util/src/main/java/io/deephaven/util/codec/StringDoubleMapCodec.java index b86d6f30d72..d4c98f4f054 100644 --- a/Util/src/main/java/io/deephaven/util/codec/StringDoubleMapCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/StringDoubleMapCodec.java @@ -8,8 +8,7 @@ /** * ObjectCodec implementation for Maps of String to Double. * - * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the - * value. + * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the value. * * A null map is represented as an array of zero bytes. */ diff --git a/Util/src/main/java/io/deephaven/util/codec/StringFloatMapCodec.java b/Util/src/main/java/io/deephaven/util/codec/StringFloatMapCodec.java index 8eb709b008c..84a3258aeae 100644 --- a/Util/src/main/java/io/deephaven/util/codec/StringFloatMapCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/StringFloatMapCodec.java @@ -8,8 +8,7 @@ /** * ObjectCodec implementation for Maps of String to Float. * - * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the - * value. + * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the value. * * A null map is represented as an array of zero bytes. */ diff --git a/Util/src/main/java/io/deephaven/util/codec/StringIntMapCodec.java b/Util/src/main/java/io/deephaven/util/codec/StringIntMapCodec.java index d48a76b5c19..91eca2f934d 100644 --- a/Util/src/main/java/io/deephaven/util/codec/StringIntMapCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/StringIntMapCodec.java @@ -8,8 +8,7 @@ /** * ObjectCodec implementation for Maps of String to Integer. * - * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the - * value. + * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the value. * * A null map is represented as an array of zero bytes. */ diff --git a/Util/src/main/java/io/deephaven/util/codec/StringKeyedMapCodec.java b/Util/src/main/java/io/deephaven/util/codec/StringKeyedMapCodec.java index f78fb24f7f2..24e572b582d 100644 --- a/Util/src/main/java/io/deephaven/util/codec/StringKeyedMapCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/StringKeyedMapCodec.java @@ -24,8 +24,8 @@ int estimateSize(Map input) { /** * Return the size of the values (presuming they are fixed size). * - * If your values are not fixed size, then you must override the {@link #estimateSize(Map)} - * method and should throw an UnsupportedOperationException. + * If your values are not fixed size, then you must override the {@link #estimateSize(Map)} method and should throw + * an UnsupportedOperationException. * * @return the size of each encoded value */ diff --git a/Util/src/main/java/io/deephaven/util/codec/StringLongMapCodec.java b/Util/src/main/java/io/deephaven/util/codec/StringLongMapCodec.java index b5f92da3294..f2b82b620b9 100644 --- a/Util/src/main/java/io/deephaven/util/codec/StringLongMapCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/StringLongMapCodec.java @@ -8,8 +8,7 @@ /** * ObjectCodec implementation for Maps of String to Long. * - * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the - * value. + * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and the value. * * A null map is represented as an array of zero bytes. */ diff --git a/Util/src/main/java/io/deephaven/util/codec/StringStringMapCodec.java b/Util/src/main/java/io/deephaven/util/codec/StringStringMapCodec.java index 77d36fd1939..208a7569a6e 100644 --- a/Util/src/main/java/io/deephaven/util/codec/StringStringMapCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/StringStringMapCodec.java @@ -9,8 +9,7 @@ /** * ObjectCodec implementation for Maps of String to String. * - * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and - * value. + * Each map is encoded as an integer length, followed by UTF-8 encoded strings for each key and value. * * A null map is represented as an array of zero bytes. */ diff --git a/Util/src/main/java/io/deephaven/util/codec/UTF8StringAsByteArrayCodec.java b/Util/src/main/java/io/deephaven/util/codec/UTF8StringAsByteArrayCodec.java index 9273759f1f0..84761e4379d 100644 --- a/Util/src/main/java/io/deephaven/util/codec/UTF8StringAsByteArrayCodec.java +++ b/Util/src/main/java/io/deephaven/util/codec/UTF8StringAsByteArrayCodec.java @@ -10,8 +10,7 @@ *

      * Codec for non-nullable Strings from UTF8 byte arrays. *

      - * One particular instance where this is useful is reading parquet 1.0 data encoded as binary as - * String. + * One particular instance where this is useful is reading parquet 1.0 data encoded as binary as String. * */ public class UTF8StringAsByteArrayCodec implements ObjectCodec { @@ -31,8 +30,7 @@ public UTF8StringAsByteArrayCodec(@Nullable final String arguments) { try { size = Integer.parseInt(tokens[0].trim()); if (tokens.length > 1) { - throw new IllegalArgumentException( - "Unexpected additional arguments after first: " + arguments); + throw new IllegalArgumentException("Unexpected additional arguments after first: " + arguments); } } catch (NumberFormatException ex) { throw new IllegalArgumentException("Error parsing column size: " + ex.getMessage(), ex); @@ -48,7 +46,7 @@ public UTF8StringAsByteArrayCodec(@Nullable final String arguments) { public byte[] encode(@Nullable final String input) { if (input == null) { throw new IllegalArgumentException( - UTF8StringAsByteArrayCodec.class.getSimpleName() + " cannot encode nulls"); + UTF8StringAsByteArrayCodec.class.getSimpleName() + " cannot encode nulls"); } return input.getBytes(StandardCharsets.UTF_8); } diff --git a/Util/src/main/java/io/deephaven/util/datastructures/RandomAccessDeque.java b/Util/src/main/java/io/deephaven/util/datastructures/RandomAccessDeque.java index a5221a136fa..e2543a224dc 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/RandomAccessDeque.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/RandomAccessDeque.java @@ -197,8 +197,7 @@ public boolean remove(Object entry) { public T get(int index) { if (start + index >= end || index < 0 || (start + index < start)) { - throw new ArrayIndexOutOfBoundsException( - "index=" + index + ", end=" + end + ", start=" + start); + throw new ArrayIndexOutOfBoundsException("index=" + index + ", end=" + end + ", start=" + start); } // noinspection unchecked return (T) array[start + index]; diff --git a/Util/src/main/java/io/deephaven/util/datastructures/ReleaseTracker.java b/Util/src/main/java/io/deephaven/util/datastructures/ReleaseTracker.java index 1f5c05bd477..63b2dc3d887 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/ReleaseTracker.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/ReleaseTracker.java @@ -12,8 +12,8 @@ * Instrumentation tool for detecting missing resource releases. */ public interface ReleaseTracker { - boolean CAPTURE_STACK_TRACES = Configuration.getInstance() - .getBooleanForClassWithDefault(ReleaseTracker.class, "captureStackTraces", false); + boolean CAPTURE_STACK_TRACES = Configuration.getInstance().getBooleanForClassWithDefault(ReleaseTracker.class, + "captureStackTraces", false); void reportAcquire(@NotNull final RESOURCE_TYPE resource); @@ -52,8 +52,7 @@ public boolean isMyType(final Class type) { }; class StrictReleaseTracker implements ReleaseTracker { - private static final StackTraceElement[] ZERO_ELEMENT_STACK_TRACE_ARRAY = - new StackTraceElement[0]; + private static final StackTraceElement[] ZERO_ELEMENT_STACK_TRACE_ARRAY = new StackTraceElement[0]; private final Map lastAcquireMap = new HashMap<>(); @@ -63,19 +62,17 @@ private static final class LastAcquireAndReleaseInfo { private final StackTraceElement[] lastRelease; private LastAcquireAndReleaseInfo(final StackTraceElement[] lastAcquire, - final StackTraceElement[] lastRelease) { + final StackTraceElement[] lastRelease) { this.lastAcquire = lastAcquire; this.lastRelease = lastRelease; } } - private final Map lastAcquireAndReleaseMap = - new WeakHashMap<>(); + private final Map lastAcquireAndReleaseMap = new WeakHashMap<>(); public final void reportAcquire(@NotNull final RESOURCE_TYPE resource) { final StackTraceElement[] stackTrace = - CAPTURE_STACK_TRACES ? Thread.currentThread().getStackTrace() - : ZERO_ELEMENT_STACK_TRACE_ARRAY; + CAPTURE_STACK_TRACES ? Thread.currentThread().getStackTrace() : ZERO_ELEMENT_STACK_TRACE_ARRAY; synchronized (this) { final StackTraceElement[] prev = lastAcquireMap.put(resource, stackTrace); if (prev != null) { @@ -87,20 +84,17 @@ public final void reportAcquire(@NotNull final RESOURCE_TYPE resource) { public final void reportRelease(@NotNull final RESOURCE_TYPE resource) { final StackTraceElement[] stackTrace = - CAPTURE_STACK_TRACES ? Thread.currentThread().getStackTrace() - : ZERO_ELEMENT_STACK_TRACE_ARRAY; + CAPTURE_STACK_TRACES ? Thread.currentThread().getStackTrace() : ZERO_ELEMENT_STACK_TRACE_ARRAY; synchronized (this) { final StackTraceElement[] prev = lastAcquireMap.remove(resource); if (prev != null) { - lastAcquireAndReleaseMap.put(resource, - new LastAcquireAndReleaseInfo(prev, stackTrace)); + lastAcquireAndReleaseMap.put(resource, new LastAcquireAndReleaseInfo(prev, stackTrace)); return; } - final LastAcquireAndReleaseInfo lastAcquireAndRelease = - lastAcquireAndReleaseMap.get(resource); + final LastAcquireAndReleaseInfo lastAcquireAndRelease = lastAcquireAndReleaseMap.get(resource); if (lastAcquireAndRelease != null) { - throw new AlreadyReleasedException(stackTrace, - lastAcquireAndRelease.lastAcquire, lastAcquireAndRelease.lastRelease); + throw new AlreadyReleasedException(stackTrace, lastAcquireAndRelease.lastAcquire, + lastAcquireAndRelease.lastRelease); } throw new UnmatchedAcquireException(stackTrace); } @@ -124,8 +118,7 @@ public final void check() { } class WeakReleaseTracker implements ReleaseTracker { - private final Map outstandingCookies = - Collections.synchronizedMap(new WeakHashMap<>()); + private final Map outstandingCookies = Collections.synchronizedMap(new WeakHashMap<>()); private final ReferenceQueue collectedCookies = new ReferenceQueue<>(); public final void check() { @@ -151,8 +144,7 @@ public final void reportRelease(@NotNull final RESOURCE_TYPE resource) { private final class Cookie extends WeakReference { - private UnmatchedAcquireException pendingAcquireException = - new UnmatchedAcquireException(); + private UnmatchedAcquireException pendingAcquireException = new UnmatchedAcquireException(); private boolean released = false; private Cookie(@NotNull final RESOURCE_TYPE referent) { @@ -225,12 +217,11 @@ private static String build(@NotNull final Collection leaks stackTrace.setLength(0); append(stackTrace, " ", leak); final String stackTraceString = stackTrace.toString(); - dupDetector.put(stackTraceString, - 1 + dupDetector.getOrDefault(stackTraceString, 0L)); + dupDetector.put(stackTraceString, 1 + dupDetector.getOrDefault(stackTraceString, 0L)); } - final StringBuilder sb = new StringBuilder("Leaked " + leaks.size() + " resources (" - + dupDetector.size() + " unique traces):\n"); + final StringBuilder sb = new StringBuilder( + "Leaked " + leaks.size() + " resources (" + dupDetector.size() + " unique traces):\n"); final MutableInt i = new MutableInt(); dupDetector.entrySet().stream().limit(maxUniqueTraces).forEach(entry -> { sb.append(" Leak #").append(i.intValue()); @@ -255,22 +246,21 @@ private LeakedException(@NotNull final Collection leaked) { } private LeakedException(final long numLeaks) { - super("Leaked " + numLeaks - + " resources. Enable `ReleaseTracker.captureStackTraces` to further debug."); + super("Leaked " + numLeaks + " resources. Enable `ReleaseTracker.captureStackTraces` to further debug."); } } class AlreadyAcquiredException extends RuntimeException { private static String build( - @NotNull final StackTraceElement[] newAcquire, - @NotNull final StackTraceElement[] existingAcquire) { + @NotNull final StackTraceElement[] newAcquire, + @NotNull final StackTraceElement[] existingAcquire) { if (newAcquire.length == 0) { return "Already acquired resource is being re-acquired without intervening release. Enable `ReleaseTracker.captureStackTraces` to further debug."; } - final StringBuilder sb = new StringBuilder( - "Already acquired resource is being re-acquired without intervening release:\n"); + final StringBuilder sb = + new StringBuilder("Already acquired resource is being re-acquired without intervening release:\n"); sb.append(" New acquire:\n"); append(sb, " ", newAcquire); sb.append(" Existing acquire:\n"); @@ -279,8 +269,8 @@ private static String build( } private AlreadyAcquiredException( - @NotNull final StackTraceElement[] newAcquire, - @NotNull final StackTraceElement[] existingAcquire) { + @NotNull final StackTraceElement[] newAcquire, + @NotNull final StackTraceElement[] existingAcquire) { super(build(newAcquire, existingAcquire)); } } @@ -288,13 +278,12 @@ private AlreadyAcquiredException( class AlreadyReleasedException extends RuntimeException { private static String build(@NotNull final StackTraceElement[] newRelease, - @NotNull final StackTraceElement[] lastAcquire, - @NotNull final StackTraceElement[] lastRelease) { + @NotNull final StackTraceElement[] lastAcquire, + @NotNull final StackTraceElement[] lastRelease) { if (newRelease.length == 0) { return "Already released resource is being re-released. Enable `ReleaseTracker.captureStackTraces` to further debug."; } - final StringBuilder sb = - new StringBuilder("Already released resource is being re-released:\n"); + final StringBuilder sb = new StringBuilder("Already released resource is being re-released:\n"); sb.append(" New release:\n"); append(sb, " ", newRelease); sb.append(" Last release:\n"); @@ -305,9 +294,9 @@ private static String build(@NotNull final StackTraceElement[] newRelease, } private AlreadyReleasedException( - @NotNull final StackTraceElement[] newRelease, - @NotNull final StackTraceElement[] lastAcquire, - @NotNull final StackTraceElement[] lastRelease) { + @NotNull final StackTraceElement[] newRelease, + @NotNull final StackTraceElement[] lastAcquire, + @NotNull final StackTraceElement[] lastRelease) { super(build(newRelease, lastAcquire, lastRelease)); } } diff --git a/Util/src/main/java/io/deephaven/util/datastructures/SegmentedSoftPool.java b/Util/src/main/java/io/deephaven/util/datastructures/SegmentedSoftPool.java index f0f4a57ff87..b8ca3178a02 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/SegmentedSoftPool.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/SegmentedSoftPool.java @@ -12,8 +12,8 @@ /** *

      - * Re-usable data structure for a segmented stack of pooled elements which tries to strike a balance - * between GC-sensitivity and element reuse. + * Re-usable data structure for a segmented stack of pooled elements which tries to strike a balance between + * GC-sensitivity and element reuse. *

      * The pool is safe for multi-threaded use, but not highly-concurrent. */ @@ -42,25 +42,25 @@ public class SegmentedSoftPool { /** * Create a new pool with the supplied creation and cleanup procedures. * - * @param segmentCapacity The capacity of each segment of this pool. This is the unit of cleanup - * for the garbage collector. - * @param creationProcedure Creation procedure for new elements. If null, all elements must - * supplied via {@link #give(java.lang.Object)}. - * @param cleanupProcedure Cleanup procedure for returned elements. If null, no cleanup will be - * performed in {@link #give(java.lang.Object)}. + * @param segmentCapacity The capacity of each segment of this pool. This is the unit of cleanup for the garbage + * collector. + * @param creationProcedure Creation procedure for new elements. If null, all elements must supplied via + * {@link #give(java.lang.Object)}. + * @param cleanupProcedure Cleanup procedure for returned elements. If null, no cleanup will be performed in + * {@link #give(java.lang.Object)}. */ public SegmentedSoftPool(final int segmentCapacity, - @Nullable final Supplier creationProcedure, - @Nullable final Consumer cleanupProcedure) { + @Nullable final Supplier creationProcedure, + @Nullable final Consumer cleanupProcedure) { this.segmentCapacity = Require.gtZero(segmentCapacity, "segmentCapacity"); this.creationProcedure = creationProcedure; this.cleanupProcedure = cleanupProcedure; } /** - * Take an element from the pool, or make a new one if the pool is exhausted and a creation - * procedure was supplied at pool construction time. The element belongs to the caller, and the - * caller may keep it rather than return it to the pool if desired. + * Take an element from the pool, or make a new one if the pool is exhausted and a creation procedure was supplied + * at pool construction time. The element belongs to the caller, and the caller may keep it rather than return it to + * the pool if desired. * * @return An element from the pool, possibly newly-constructed */ @@ -84,9 +84,9 @@ public final ELEMENT_TYPE take() { } /** - * Give an element to the pool. Neither the caller nor any other thread may interact with the - * element again until it has been returned by a subsequent call to {@link #take()}. The element - * will be cleaned if a cleanup procedure was provided at pool construction time. + * Give an element to the pool. Neither the caller nor any other thread may interact with the element again until it + * has been returned by a subsequent call to {@link #take()}. The element will be cleaned if a cleanup procedure was + * provided at pool construction time. * * @param element The element to give to the pool */ @@ -135,8 +135,7 @@ private void updateCurrentSegment(@NotNull final Segment newCurren */ private ELEMENT_TYPE maybeCreateElement() { if (creationProcedure == null) { - throw new UnsupportedOperationException( - "Pool exhausted and no creation procedure supplied"); + throw new UnsupportedOperationException("Pool exhausted and no creation procedure supplied"); } return creationProcedure.get(); } @@ -153,10 +152,9 @@ private void maybeCleanElement(@NotNull final ELEMENT_TYPE element) { } /** - * A Segment holds a very simple array-backed stack of available elements. It refers softly to - * the previous segment (if such exists and has not been collected), and strongly to the next - * segment (if such exists). The main pool structure only keeps a hard reference to the segment - * while operating on it - otherwise it + * A Segment holds a very simple array-backed stack of available elements. It refers softly to the previous segment + * (if such exists and has not been collected), and strongly to the next segment (if such exists). The main pool + * structure only keeps a hard reference to the segment while operating on it - otherwise it */ private static class Segment extends SoftReference> { @@ -166,8 +164,8 @@ private static class Segment extends SoftReference> selfReference; @@ -259,8 +257,8 @@ private void setNext(@NotNull final Segment other) { } /** - * Get the previous segment. This may return null either because there was no previous - * segment, or because all previous segments have been garbage collected. + * Get the previous segment. This may return null either because there was no previous segment, or because all + * previous segments have been garbage collected. * * @return The previous segment, or null if none such exists */ diff --git a/Util/src/main/java/io/deephaven/util/datastructures/SimpleReferenceManager.java b/Util/src/main/java/io/deephaven/util/datastructures/SimpleReferenceManager.java index 61d7693740c..673914b5cc7 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/SimpleReferenceManager.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/SimpleReferenceManager.java @@ -14,8 +14,8 @@ import java.util.function.Predicate; /** - * A helper for manging a list of References. It hides the internal management of expired references - * and provides for iteration over the valid ones + * A helper for manging a list of References. It hides the internal management of expired references and provides for + * iteration over the valid ones */ public final class SimpleReferenceManager> { @@ -25,23 +25,20 @@ public final class SimpleReferenceManager> { /** * Create a SimpleReferenceManager, with {@link CopyOnWriteArrayList} as backing structure. * - * @param referenceFactory Factory to create references for added referents; should always make - * a unique reference + * @param referenceFactory Factory to create references for added referents; should always make a unique reference */ public SimpleReferenceManager(@NotNull final Function referenceFactory) { this(referenceFactory, true); } /** - * Create a SimpleReferenceManager, with either {@link ArrayList} or - * {@link CopyOnWriteArrayList} as backing structure. + * Create a SimpleReferenceManager, with either {@link ArrayList} or {@link CopyOnWriteArrayList} as backing + * structure. * - * @param referenceFactory Factory to create references for added referents; should always make - * a unique reference + * @param referenceFactory Factory to create references for added referents; should always make a unique reference * @param concurrent Use CopyOnWriteArrayList for internal storage if true, else ArrayList */ - public SimpleReferenceManager(@NotNull final Function referenceFactory, - final boolean concurrent) { + public SimpleReferenceManager(@NotNull final Function referenceFactory, final boolean concurrent) { this.referenceFactory = referenceFactory; references = concurrent ? new CopyOnWriteArrayList<>() : new ArrayList<>(); } @@ -58,8 +55,8 @@ public R add(@NotNull final T item) { } /** - * Remove item from the list if present according to reference equality ({@code ==}), and also - * any cleared references. + * Remove item from the list if present according to reference equality ({@code ==}), and also any cleared + * references. * * @param item the item to remove. * @return The item if it was removed, else null @@ -78,8 +75,7 @@ public void removeAll(@NotNull final Collection items) { } /** - * Retrieve all encountered items that satisfy a filter, while also removing any cleared - * references. + * Retrieve all encountered items that satisfy a filter, while also removing any cleared references. * * @param filter The filter to decide if a valid item should be removed * @return Whether we succeeded in removing anything @@ -107,8 +103,8 @@ public boolean removeIf(@NotNull final Predicate filter) { } /** - * Execute the provided procedure on each reference, item pair whose item is still reachable, - * while removing any cleared references. + * Execute the provided procedure on each reference, item pair whose item is still reachable, while removing any + * cleared references. * * @param consumer The function to call with each reachable pair */ @@ -132,14 +128,13 @@ public void forEach(@NotNull final BiConsumer consumer) { } /** - * Retrieve the first valid item that satisfies a filter. Remove any encountered cleared - * references as a side effect. + * Retrieve the first valid item that satisfies a filter. Remove any encountered cleared references as a side + * effect. * * @param filter The filter to decide if a valid item should be returned * @return The first valid item that passed the filter, or null if no such item exists */ - @SuppressWarnings("unused") // NB: Not used, yet. Needed in order to replace some instances of - // WeakReferenceManager. + @SuppressWarnings("unused") // NB: Not used, yet. Needed in order to replace some instances of WeakReferenceManager. public T getFirstItem(@NotNull final Predicate filter) { if (references.isEmpty()) { return null; @@ -163,8 +158,8 @@ public T getFirstItem(@NotNull final Predicate filter) { } /** - * Retrieve the first valid reference whose item satisfies a filter. Remove any encountered - * cleared references as a side effect. + * Retrieve the first valid reference whose item satisfies a filter. Remove any encountered cleared references as a + * side effect. * * @param filter The filter to decide if a valid item should be returned * @return The first valid item that passed the filter, or null if no such item exists @@ -211,12 +206,9 @@ private Deque maybeMakeRemovalDeque(@Nullable final Deque current) { if (current != null) { return current; } - // This is very unusual, in that contains only checks the head of the deque and removes it - // if it matches. - // This is particular to the use case in question, because the deque is used as an ordered - // subset of an ordered - // data structure for removing items by identity match. Don't try to use this deque for - // anything else and expect + // This is very unusual, in that contains only checks the head of the deque and removes it if it matches. + // This is particular to the use case in question, because the deque is used as an ordered subset of an ordered + // data structure for removing items by identity match. Don't try to use this deque for anything else and expect // you'll like the results. return new ArrayDeque() { @Override diff --git a/Util/src/main/java/io/deephaven/util/datastructures/SortedIndexableMapWrapper.java b/Util/src/main/java/io/deephaven/util/datastructures/SortedIndexableMapWrapper.java index 68acfd99bc7..399bd8c7b36 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/SortedIndexableMapWrapper.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/SortedIndexableMapWrapper.java @@ -41,9 +41,8 @@ private synchronized List> getValuesList() { if (valueList != null) { return valueList; } - valueList = baseMap.entrySet().stream() - .sorted((e1, e2) -> comparator.compare(e1.getValue(), e2.getValue())) - .collect(Collectors.toCollection(ArrayList::new)); + valueList = baseMap.entrySet().stream().sorted((e1, e2) -> comparator.compare(e1.getValue(), e2.getValue())) + .collect(Collectors.toCollection(ArrayList::new)); return valueList; } @@ -105,8 +104,7 @@ public Set keySet() { @NotNull @Override public synchronized Collection values() { - return Collections.unmodifiableList( - getValuesList().stream().map(Entry::getValue).collect(Collectors.toList())); + return Collections.unmodifiableList(getValuesList().stream().map(Entry::getValue).collect(Collectors.toList())); } @NotNull @@ -122,7 +120,7 @@ public boolean equals(Object o) { return false; } return comparator.equals(((SortedIndexableMapWrapper) o).comparator) - && baseMap.equals(((SortedIndexableMapWrapper) o).baseMap); + && baseMap.equals(((SortedIndexableMapWrapper) o).baseMap); } @Override @@ -177,22 +175,19 @@ public synchronized V computeIfAbsent(K key, Function ma } @Override - public synchronized V computeIfPresent(K key, - BiFunction remappingFunction) { + public synchronized V computeIfPresent(K key, BiFunction remappingFunction) { clearList(); return baseMap.computeIfPresent(key, remappingFunction); } @Override - public synchronized V compute(K key, - BiFunction remappingFunction) { + public synchronized V compute(K key, BiFunction remappingFunction) { clearList(); return baseMap.compute(key, remappingFunction); } @Override - public synchronized V merge(K key, V value, - BiFunction remappingFunction) { + public synchronized V merge(K key, V value, BiFunction remappingFunction) { clearList(); return baseMap.merge(key, value, remappingFunction); } diff --git a/Util/src/main/java/io/deephaven/util/datastructures/SubscriptionSet.java b/Util/src/main/java/io/deephaven/util/datastructures/SubscriptionSet.java index 1694c07591a..2cf7e6e5b12 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/SubscriptionSet.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/SubscriptionSet.java @@ -13,12 +13,11 @@ /** *

      - * Array-backed set that stores generic listener objects, only enforcing hard reachability on - * listeners that are SubstitutionWeakReferences. + * Array-backed set that stores generic listener objects, only enforcing hard reachability on listeners that are + * SubstitutionWeakReferences. *

      *

      - * All public operations (except clear and isEmpty) are O(n). All comparisons are based on object - * reference equality. + * All public operations (except clear and isEmpty) are O(n). All comparisons are based on object reference equality. *

      *

      * Requires external synchronization for thread safe usage, except where otherwise noted. @@ -35,8 +34,8 @@ public class Entry { private boolean active; private Entry(final LISTENER_TYPE listener) { - this.listenerReference = WeakReferenceWrapper - .maybeCreateWeakReference(Require.neqNull(listener, "listener")); + this.listenerReference = + WeakReferenceWrapper.maybeCreateWeakReference(Require.neqNull(listener, "listener")); } private LISTENER_TYPE getListener() { @@ -91,8 +90,7 @@ public final boolean collect() { for (int si = 0; si < size;) { if (subscriptions[si].getListener() == null) { removeAt(si); - continue; // si is not incremented in this case - we'll reconsider the same slot if - // necessary. + continue; // si is not incremented in this case - we'll reconsider the same slot if necessary. } ++si; } @@ -100,8 +98,8 @@ public final boolean collect() { } /** - * Make an entry for a listener, in order to pass it to {@link #add(Object, Entry)}. May be - * called without holding any locks. + * Make an entry for a listener, in order to pass it to {@link #add(Object, Entry)}. May be called without holding + * any locks. * * @param listener The listener * @return A new entry for the listener @@ -128,8 +126,7 @@ public final boolean add(final @NotNull LISTENER_TYPE listener, final @NotNull E final LISTENER_TYPE currentListener = currentEntry.getListener(); if (currentListener == null) { removeAt(si); - continue; // si is not incremented in this case - we'll reconsider the same slot if - // necessary. + continue; // si is not incremented in this case - we'll reconsider the same slot if necessary. } if (currentEntry == entry || currentListener == listener) { found = true; @@ -158,8 +155,7 @@ public final boolean remove(final LISTENER_TYPE listener) { final LISTENER_TYPE currentListener = subscriptions[si].getListener(); if (currentListener == null || currentListener == listener) { removeAt(si); - continue; // si is not incremented in this case - we'll reconsider the same slot if - // necessary. + continue; // si is not incremented in this case - we'll reconsider the same slot if necessary. } ++si; } @@ -173,17 +169,15 @@ public final boolean remove(final LISTENER_TYPE listener) { * @param activeOnly Whether to restrict this notification to active subscriptions only * @return Whether this operation caused the set to become empty */ - public final boolean deliverNotification( - @NotNull final Procedure.Unary procedure, - final boolean activeOnly) { + public final boolean deliverNotification(@NotNull final Procedure.Unary procedure, + final boolean activeOnly) { final int initialSize = size; for (int si = 0; si < size;) { final Entry currentEntry = subscriptions[si]; final LISTENER_TYPE currentListener = currentEntry.getListener(); if (currentListener == null) { removeAt(si); - continue; // si is not incremented in this case - we'll reconsider the same slot if - // necessary. + continue; // si is not incremented in this case - we'll reconsider the same slot if necessary. } if (!activeOnly || currentEntry.isActive()) { procedure.call(currentListener); @@ -202,17 +196,16 @@ public final boolean deliverNotification( * @return Whether this operation caused the set to become empty */ public final boolean deliverNotification( - @NotNull final Procedure.Binary procedure, - @Nullable final NOTIFICATION_TYPE notification, - final boolean activeOnly) { + @NotNull final Procedure.Binary procedure, + @Nullable final NOTIFICATION_TYPE notification, + final boolean activeOnly) { final int initialSize = size; for (int si = 0; si < size;) { final Entry currentEntry = subscriptions[si]; final LISTENER_TYPE currentListener = currentEntry.getListener(); if (currentListener == null) { removeAt(si); - continue; // si is not incremented in this case - we'll reconsider the same slot if - // necessary. + continue; // si is not incremented in this case - we'll reconsider the same slot if necessary. } if (!activeOnly || currentEntry.isActive()) { procedure.call(currentListener, notification); diff --git a/Util/src/main/java/io/deephaven/util/datastructures/WeakIdentityHashSet.java b/Util/src/main/java/io/deephaven/util/datastructures/WeakIdentityHashSet.java index b606661f897..466661def81 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/WeakIdentityHashSet.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/WeakIdentityHashSet.java @@ -8,8 +8,8 @@ import java.util.function.Consumer; /** - * An open-addressed identity hash set that only maintains weak references to its members. Only - * supports {@link #add} and {@link #clear}. See {@link Synchronized} variant for concurrent usage. + * An open-addressed identity hash set that only maintains weak references to its members. Only supports {@link #add} + * and {@link #clear}. See {@link Synchronized} variant for concurrent usage. */ public class WeakIdentityHashSet { @@ -18,8 +18,7 @@ public class WeakIdentityHashSet { private static final WeakReference DELETED_REFERENCE = new DeletedReference(); private WeakReference[] storage; - private int usedSlots; // The number of slots that *may* contain a valid reference (not exactly - // "size") + private int usedSlots; // The number of slots that *may* contain a valid reference (not exactly "size") private int freeSlots; private int rehashThreshold; @@ -36,14 +35,12 @@ private WeakIdentityHashSet(final int initialCapacity, final boolean exact) { * Initialize the set with the new capacity. * * @param capacity The new capacity - * @param exact Whether the capacity is the exact requirement, or should be adjusted for load - * factor and primeness + * @param exact Whether the capacity is the exact requirement, or should be adjusted for load factor and primeness */ private void initialize(final int capacity, final boolean exact) { storage = null; usedSlots = 0; - freeSlots = - exact ? capacity : PrimeFinder.nextPrime((int) Math.ceil(capacity / LOAD_FACTOR)); + freeSlots = exact ? capacity : PrimeFinder.nextPrime((int) Math.ceil(capacity / LOAD_FACTOR)); rehashThreshold = Math.min(freeSlots - 1, (int) Math.floor(freeSlots * LOAD_FACTOR)); } @@ -55,24 +52,20 @@ private void updateAccountingForInsert(final boolean usedFreeSlot) { --freeSlots; } - // Follows the same policies as io.deephaven.base.hash.KHash, as we may very well want to - // take advantage of the + // Follows the same policies as io.deephaven.base.hash.KHash, as we may very well want to take advantage of the // guarantees they provide in the future. if (++usedSlots > rehashThreshold || freeSlots == 1) { // If we've grown beyond our maximum usedSlots, double capacity. - // If we've exhausted the free spots, rehash to the same capacity, freeing up removed - // slots. + // If we've exhausted the free spots, rehash to the same capacity, freeing up removed slots. final int newCapacity = - usedSlots > rehashThreshold ? PrimeFinder.nextPrime(storage.length << 1) - : storage.length; + usedSlots > rehashThreshold ? PrimeFinder.nextPrime(storage.length << 1) : storage.length; rehash(newCapacity); } } /** - * Mark a slot deleted (either because we removed the value there, or because it was found to - * have a cleared reference). This allows us to update usedSlots in order to avoid rehash calls - * in some cases. + * Mark a slot deleted (either because we removed the value there, or because it was found to have a cleared + * reference). This allows us to update usedSlots in order to avoid rehash calls in some cases. * * @param slot The slot index to mark */ @@ -92,7 +85,7 @@ private void rehash(final int newCapacity) { for (WeakReference valueReference : storage) { final TYPE value; if (valueReference != null && valueReference != DELETED_REFERENCE - && (value = valueReference.get()) != null) { + && (value = valueReference.get()) != null) { other.add(value, valueReference); } } @@ -126,8 +119,7 @@ public boolean add(@NotNull final TYPE value) { * @param valueReference A re-usable WeakReference to value if already available, else null * @return True if the value was added to the set */ - public boolean add(@NotNull final TYPE value, - @Nullable final WeakReference valueReference) { + public boolean add(@NotNull final TYPE value, @Nullable final WeakReference valueReference) { if (storage == null) { // noinspection unchecked storage = new WeakReference[freeSlots]; // usedSlots == 0, freeSlots == capacity @@ -144,7 +136,7 @@ public boolean add(@NotNull final TYPE value, final WeakReference candidateReference = storage[candidateSlot]; if (candidateReference == null) { storage[foundDeletedSlot ? firstDeletedSlot : candidateSlot] = - valueReference == null ? new WeakReference<>(value) : valueReference; + valueReference == null ? new WeakReference<>(value) : valueReference; updateAccountingForInsert(!foundDeletedSlot); return true; } @@ -184,7 +176,7 @@ public void forEach(@NotNull final Consumer action) { return; } for (int slotIndex = 0, usedSlotsConsumed = 0; slotIndex < storage.length - && usedSlotsConsumed < usedSlots; ++slotIndex) { + && usedSlotsConsumed < usedSlots; ++slotIndex) { final WeakReference memberReference = storage[slotIndex]; if (memberReference == null || memberReference == DELETED_REFERENCE) { continue; @@ -217,7 +209,7 @@ public synchronized boolean add(@NotNull final TYPE value) { @Override public synchronized boolean add(@NotNull final TYPE value, - @Nullable final WeakReference valueReference) { + @Nullable final WeakReference valueReference) { return super.add(value, valueReference); } diff --git a/Util/src/main/java/io/deephaven/util/datastructures/cache/ArrayBackedOffsetLookupCache.java b/Util/src/main/java/io/deephaven/util/datastructures/cache/ArrayBackedOffsetLookupCache.java index 11ad8dbfbf9..7748ee4e178 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/cache/ArrayBackedOffsetLookupCache.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/cache/ArrayBackedOffsetLookupCache.java @@ -7,14 +7,14 @@ import java.lang.reflect.Array; /** - * Caching data structure for caching int (offset) to Object mappings. For use when lookup is - * expensive but idempotent, and the range of offset indexes is relatively contiguous. This is only - * suitable for lookup functions that return fully-initialized, immutable objects (or null). + * Caching data structure for caching int (offset) to Object mappings. For use when lookup is expensive but idempotent, + * and the range of offset indexes is relatively contiguous. This is only suitable for lookup functions that return + * fully-initialized, immutable objects (or null). *

      * This implementation stores data in a single contiguous array. */ public class ArrayBackedOffsetLookupCache - extends BaseOffsetLookupCache { + extends BaseOffsetLookupCache { /** * The cached index to value mappings for this column source. @@ -22,15 +22,15 @@ public class ArrayBackedOffsetLookupCache private volatile VALUE_TYPE[] cachedValues; /** - * Construct a lookup cache for the supplied arguments, using - * {@link OffsetLookupCache#createPlaceholder(Class)} to create a "null" placeholder value. + * Construct a lookup cache for the supplied arguments, using {@link OffsetLookupCache#createPlaceholder(Class)} to + * create a "null" placeholder value. * * @param valueType The value type - * @param lookupFunction The lookup function from index to value, must return a - * fully-initialized, immutable object or null + * @param lookupFunction The lookup function from index to value, must return a fully-initialized, immutable object + * or null */ public ArrayBackedOffsetLookupCache(@NotNull final Class valueType, - @NotNull final OffsetLookup lookupFunction) { + @NotNull final OffsetLookup lookupFunction) { this(valueType, lookupFunction, OffsetLookupCache.createPlaceholder(valueType)); } @@ -38,14 +38,13 @@ public ArrayBackedOffsetLookupCache(@NotNull final Class valueType, * Construct a lookup cache for the supplied arguments. instance. * * @param valueType The value type - * @param lookupFunction The lookup function from index to value, must return a - * fully-initialized, immutable object or null - * @param nullValue The "null" placeholder value, stored internally whenever lookupFunction - * returns null + * @param lookupFunction The lookup function from index to value, must return a fully-initialized, immutable object + * or null + * @param nullValue The "null" placeholder value, stored internally whenever lookupFunction returns null */ private ArrayBackedOffsetLookupCache(@NotNull final Class valueType, - @NotNull final OffsetLookup lookupFunction, - @NotNull final VALUE_TYPE nullValue) { + @NotNull final OffsetLookup lookupFunction, + @NotNull final VALUE_TYPE nullValue) { super(lookupFunction, nullValue, valueType); // noinspection unchecked @@ -61,8 +60,7 @@ public VALUE_TYPE get(final int index, final EXTRA_INPUT_TYPE extra) { if (index >= localCachedValues.length || (value = localCachedValues[index]) == null) { synchronized (this) { - cachedValues = - localCachedValues = ArrayUtil.ensureSize(cachedValues, index + 1, valueType); + cachedValues = localCachedValues = ArrayUtil.ensureSize(cachedValues, index + 1, valueType); if ((value = localCachedValues[index]) == null) { value = lookupFunction.lookup(index, extra); localCachedValues[index] = value == null ? nullValue : value; diff --git a/Util/src/main/java/io/deephaven/util/datastructures/cache/BaseOffsetLookupCache.java b/Util/src/main/java/io/deephaven/util/datastructures/cache/BaseOffsetLookupCache.java index 0a97619c68d..981227f237c 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/cache/BaseOffsetLookupCache.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/cache/BaseOffsetLookupCache.java @@ -7,7 +7,7 @@ * Base class for offset-lookup cache implementations. */ abstract class BaseOffsetLookupCache - implements OffsetLookupCache { + implements OffsetLookupCache { /** * The type of the values in the cache. @@ -20,14 +20,14 @@ abstract class BaseOffsetLookupCache final OffsetLookup lookupFunction; /** - * This VALUE_TYPE object represents a value that has been looked up and determined to be null, - * as opposed to a value that has been reclaimed. + * This VALUE_TYPE object represents a value that has been looked up and determined to be null, as opposed to a + * value that has been reclaimed. */ final VALUE_TYPE nullValue; BaseOffsetLookupCache(@NotNull final OffsetLookup lookupFunction, - @NotNull final VALUE_TYPE nullValue, - @NotNull final Class valueType) { + @NotNull final VALUE_TYPE nullValue, + @NotNull final Class valueType) { this.lookupFunction = Require.neqNull(lookupFunction, "lookupFunction"); this.nullValue = Require.neqNull(nullValue, "nullValue"); this.valueType = Require.neqNull(valueType, "valueType"); diff --git a/Util/src/main/java/io/deephaven/util/datastructures/cache/BoundedIntrusiveMappingCache.java b/Util/src/main/java/io/deephaven/util/datastructures/cache/BoundedIntrusiveMappingCache.java index 6a279a23a64..abe9fa165e7 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/cache/BoundedIntrusiveMappingCache.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/cache/BoundedIntrusiveMappingCache.java @@ -15,24 +15,23 @@ import java.util.function.ToLongFunction; /** - * An LRU mapping cache that relies on an queue of intrusively doubly-linked nodes for keeping track - * of eviction policy details. + * An LRU mapping cache that relies on an queue of intrusively doubly-linked nodes for keeping track of eviction policy + * details. */ public abstract class BoundedIntrusiveMappingCache> { final int maximumCachedMappings; - // TODO: Swap this Map out for a data structure with doubly-linked bucket lists, to enable - // eviction with no extra lookup. + // TODO: Swap this Map out for a data structure with doubly-linked bucket lists, to enable eviction with no extra + // lookup. final KeyedObjectIntrusiveChainedHashMap mappingCache; final IntrusiveDoublyLinkedQueue evictionQueue; private BoundedIntrusiveMappingCache(final int maximumCachedMappings) { this.maximumCachedMappings = Require.gtZero(maximumCachedMappings, "maximumCachedMappings"); - mappingCache = new KeyedObjectIntrusiveChainedHashMap( - MappingCacheAdapter.getInstance(), MappingKey.getInstance()); - evictionQueue = - new IntrusiveDoublyLinkedQueue(EvictionQueueAdapter.getInstance()); + mappingCache = new KeyedObjectIntrusiveChainedHashMap(MappingCacheAdapter.getInstance(), + MappingKey.getInstance()); + evictionQueue = new IntrusiveDoublyLinkedQueue(EvictionQueueAdapter.getInstance()); } // ================================================================================================================== @@ -73,8 +72,7 @@ private void initialize(final KEY_TYPE key, final short value) { } } - private static class IntegerMapping - extends Mapping> { + private static class IntegerMapping extends Mapping> { private int value; @@ -104,8 +102,7 @@ private void initialize(final KEY_TYPE key, final float value) { } } - private static class DoubleMapping - extends Mapping> { + private static class DoubleMapping extends Mapping> { private double value; @@ -115,8 +112,7 @@ private void initialize(final KEY_TYPE key, final double value) { } } - private static class CharacterMapping - extends Mapping> { + private static class CharacterMapping extends Mapping> { private char value; @@ -127,7 +123,7 @@ private void initialize(final KEY_TYPE key, final char value) { } private static class ObjectMapping - extends Mapping> { + extends Mapping> { private VALUE_TYPE value; @@ -145,7 +141,7 @@ private void initialize(final KEY_TYPE key, final VALUE_TYPE value) { * Key definition for Mappings. */ private static class MappingKey> - extends KeyedObjectKey.Basic { + extends KeyedObjectKey.Basic { private static final MappingKey INSTANCE = new MappingKey<>(); @@ -164,7 +160,7 @@ public KEY_TYPE getKey(final MAPPING_TYPE mapping) { * Adapter for the mapping cache. */ private static class MappingCacheAdapter> - implements IntrusiveChainedHashAdapter { + implements IntrusiveChainedHashAdapter { private static final MappingCacheAdapter INSTANCE = new MappingCacheAdapter<>(); @@ -188,7 +184,7 @@ public void setNext(@NotNull final MAPPING_TYPE self, final MAPPING_TYPE next) { * Adapter for the eviction queue. */ private static class EvictionQueueAdapter> - implements IntrusiveDoublyLinkedStructureBase.Adapter { + implements IntrusiveDoublyLinkedStructureBase.Adapter { private static final EvictionQueueAdapter INSTANCE = new EvictionQueueAdapter<>(); @@ -250,16 +246,14 @@ public interface ToCharacterFunction { // ================================================================================================================== @SuppressWarnings("unused") - public static class ByteImpl - extends BoundedIntrusiveMappingCache> { + public static class ByteImpl extends BoundedIntrusiveMappingCache> { @SuppressWarnings("WeakerAccess") public ByteImpl(final int maximumCachedMappings) { super(maximumCachedMappings); } - public byte computeIfAbsent(final KEY_TYPE key, - @NotNull final ToByteFunction mapper) { + public byte computeIfAbsent(final KEY_TYPE key, @NotNull final ToByteFunction mapper) { ByteMapping mapping = mappingCache.get(key); if (mapping == null) { final byte value = mapper.applyAsByte(key); @@ -280,16 +274,14 @@ public byte computeIfAbsent(final KEY_TYPE key, } @SuppressWarnings("unused") - public static class ShortImpl - extends BoundedIntrusiveMappingCache> { + public static class ShortImpl extends BoundedIntrusiveMappingCache> { @SuppressWarnings("WeakerAccess") public ShortImpl(final int maximumCachedMappings) { super(maximumCachedMappings); } - public short computeIfAbsent(final KEY_TYPE key, - @NotNull final ToShortFunction mapper) { + public short computeIfAbsent(final KEY_TYPE key, @NotNull final ToShortFunction mapper) { ShortMapping mapping = mappingCache.get(key); if (mapping == null) { final short value = mapper.applyAsShort(key); @@ -315,16 +307,14 @@ public short computeIfAbsent(final KEY_TYPE key, * @param */ @SuppressWarnings("unused") - public static class IntegerImpl - extends BoundedIntrusiveMappingCache> { + public static class IntegerImpl extends BoundedIntrusiveMappingCache> { @SuppressWarnings("WeakerAccess") public IntegerImpl(final int maximumCachedMappings) { super(maximumCachedMappings); } - public int computeIfAbsent(final KEY_TYPE key, - @NotNull final ToIntFunction mapper) { + public int computeIfAbsent(final KEY_TYPE key, @NotNull final ToIntFunction mapper) { IntegerMapping mapping = mappingCache.get(key); if (mapping == null) { final int value = mapper.applyAsInt(key); @@ -350,15 +340,14 @@ public int computeIfAbsent(final KEY_TYPE key, * @param */ public static class FifoIntegerImpl - extends BoundedIntrusiveMappingCache> { + extends BoundedIntrusiveMappingCache> { @SuppressWarnings("WeakerAccess") public FifoIntegerImpl(final int maximumCachedMappings) { super(maximumCachedMappings); } - public int computeIfAbsent(final KEY_TYPE key, - @NotNull final ToIntFunction mapper) { + public int computeIfAbsent(final KEY_TYPE key, @NotNull final ToIntFunction mapper) { IntegerMapping mapping = mappingCache.get(key); if (mapping == null) { final int value = mapper.applyAsInt(key); @@ -382,16 +371,14 @@ public void clear() { } @SuppressWarnings("unused") - public static class LongImpl - extends BoundedIntrusiveMappingCache> { + public static class LongImpl extends BoundedIntrusiveMappingCache> { @SuppressWarnings("WeakerAccess") public LongImpl(final int maximumCachedMappings) { super(maximumCachedMappings); } - public long computeIfAbsent(final KEY_TYPE key, - @NotNull final ToLongFunction mapper) { + public long computeIfAbsent(final KEY_TYPE key, @NotNull final ToLongFunction mapper) { LongMapping mapping = mappingCache.get(key); if (mapping == null) { final long value = mapper.applyAsLong(key); @@ -412,16 +399,14 @@ public long computeIfAbsent(final KEY_TYPE key, } @SuppressWarnings("unused") - public static class FloatImpl - extends BoundedIntrusiveMappingCache> { + public static class FloatImpl extends BoundedIntrusiveMappingCache> { @SuppressWarnings("WeakerAccess") public FloatImpl(final int maximumCachedMappings) { super(maximumCachedMappings); } - public float computeIfAbsent(final KEY_TYPE key, - @NotNull final ToFloatFunction mapper) { + public float computeIfAbsent(final KEY_TYPE key, @NotNull final ToFloatFunction mapper) { FloatMapping mapping = mappingCache.get(key); if (mapping == null) { final float value = mapper.applyAsFloat(key); @@ -442,16 +427,14 @@ public float computeIfAbsent(final KEY_TYPE key, } @SuppressWarnings("unused") - public static class DoubleImpl - extends BoundedIntrusiveMappingCache> { + public static class DoubleImpl extends BoundedIntrusiveMappingCache> { @SuppressWarnings("WeakerAccess") public DoubleImpl(final int maximumCachedMappings) { super(maximumCachedMappings); } - public double computeIfAbsent(final KEY_TYPE key, - @NotNull final ToDoubleFunction mapper) { + public double computeIfAbsent(final KEY_TYPE key, @NotNull final ToDoubleFunction mapper) { DoubleMapping mapping = mappingCache.get(key); if (mapping == null) { final double value = mapper.applyAsDouble(key); @@ -473,15 +456,14 @@ public double computeIfAbsent(final KEY_TYPE key, @SuppressWarnings("unused") public static class CharacterImpl - extends BoundedIntrusiveMappingCache> { + extends BoundedIntrusiveMappingCache> { @SuppressWarnings("WeakerAccess") public CharacterImpl(final int maximumCachedMappings) { super(maximumCachedMappings); } - public char computeIfAbsent(final KEY_TYPE key, - @NotNull final ToCharacterFunction mapper) { + public char computeIfAbsent(final KEY_TYPE key, @NotNull final ToCharacterFunction mapper) { CharacterMapping mapping = mappingCache.get(key); if (mapping == null) { final char value = mapper.applyAsCharacter(key); @@ -503,15 +485,14 @@ public char computeIfAbsent(final KEY_TYPE key, @SuppressWarnings("unused") public static class ObjectImpl - extends BoundedIntrusiveMappingCache> { + extends BoundedIntrusiveMappingCache> { @SuppressWarnings("WeakerAccess") public ObjectImpl(final int maximumCachedMappings) { super(maximumCachedMappings); } - public VALUE_TYPE computeIfAbsent(final KEY_TYPE key, - @NotNull final Function mapper) { + public VALUE_TYPE computeIfAbsent(final KEY_TYPE key, @NotNull final Function mapper) { ObjectMapping mapping = mappingCache.get(key); if (mapping == null) { final VALUE_TYPE value = mapper.apply(key); @@ -559,41 +540,32 @@ public static String getCacheDeclarationType(final Class keyType, final Class return "BoundedIntrusiveMappingCache.CharacterImpl<" + keyClassName + '>'; } final String valueClassName = TypeUtils.getBoxedType(valueType).getCanonicalName(); - return "BoundedIntrusiveMappingCache.ObjectImpl<" + keyClassName + ',' + valueClassName - + '>'; + return "BoundedIntrusiveMappingCache.ObjectImpl<" + keyClassName + ',' + valueClassName + '>'; } @SuppressWarnings("SameParameterValue") - public static String getCacheInitializer(final Class valueType, - final String maximumCachedMappingsString) { + public static String getCacheInitializer(final Class valueType, final String maximumCachedMappingsString) { if (valueType == byte.class || valueType == Byte.class) { - return "new BoundedIntrusiveMappingCache.ByteImpl<>(" + maximumCachedMappingsString - + ")"; + return "new BoundedIntrusiveMappingCache.ByteImpl<>(" + maximumCachedMappingsString + ")"; } if (valueType == short.class || valueType == Short.class) { - return "new BoundedIntrusiveMappingCache.ShortImpl<>(" + maximumCachedMappingsString - + ")"; + return "new BoundedIntrusiveMappingCache.ShortImpl<>(" + maximumCachedMappingsString + ")"; } if (valueType == int.class || valueType == Integer.class) { - return "new BoundedIntrusiveMappingCache.IntegerImpl<>(" + maximumCachedMappingsString - + ")"; + return "new BoundedIntrusiveMappingCache.IntegerImpl<>(" + maximumCachedMappingsString + ")"; } if (valueType == long.class || valueType == Long.class) { - return "new BoundedIntrusiveMappingCache.LongImpl<>(" + maximumCachedMappingsString - + ")"; + return "new BoundedIntrusiveMappingCache.LongImpl<>(" + maximumCachedMappingsString + ")"; } if (valueType == float.class || valueType == Float.class) { - return "new BoundedIntrusiveMappingCache.FloatImpl<>(" + maximumCachedMappingsString - + ")"; + return "new BoundedIntrusiveMappingCache.FloatImpl<>(" + maximumCachedMappingsString + ")"; } if (valueType == double.class || valueType == Double.class) { - return "new BoundedIntrusiveMappingCache.DoubleImpl<>(" + maximumCachedMappingsString - + ")"; + return "new BoundedIntrusiveMappingCache.DoubleImpl<>(" + maximumCachedMappingsString + ")"; } if (valueType == char.class || valueType == Character.class) { - return "new BoundedIntrusiveMappingCache.CharacterImpl<>(" + maximumCachedMappingsString - + ")"; + return "new BoundedIntrusiveMappingCache.CharacterImpl<>(" + maximumCachedMappingsString + ")"; } return "new BoundedIntrusiveMappingCache.ObjectImpl<>(" + maximumCachedMappingsString + ")"; } diff --git a/Util/src/main/java/io/deephaven/util/datastructures/cache/OffsetLookup.java b/Util/src/main/java/io/deephaven/util/datastructures/cache/OffsetLookup.java index a164bd35552..b7e07dda1f8 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/cache/OffsetLookup.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/cache/OffsetLookup.java @@ -1,8 +1,7 @@ package io.deephaven.util.datastructures.cache; /** - * Lookup function interface for {@link OffsetLookupCache}s and {@link ReverseOffsetLookupCache}s to - * use. + * Lookup function interface for {@link OffsetLookupCache}s and {@link ReverseOffsetLookupCache}s to use. */ @FunctionalInterface public interface OffsetLookup { diff --git a/Util/src/main/java/io/deephaven/util/datastructures/cache/OffsetLookupCache.java b/Util/src/main/java/io/deephaven/util/datastructures/cache/OffsetLookupCache.java index 5bf3a70c25a..f153ac0b379 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/cache/OffsetLookupCache.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/cache/OffsetLookupCache.java @@ -21,8 +21,8 @@ public interface OffsetLookupCache { void clear(); /** - * Attempt to instantiate an instance of valueType with the nullary constructor, in order to - * create a placeholder instance reference. + * Attempt to instantiate an instance of valueType with the nullary constructor, in order to create a placeholder + * instance reference. * * @param valueType The type of the desired placeholder * @return A placeholder instance reference diff --git a/Util/src/main/java/io/deephaven/util/datastructures/cache/ReverseOffsetLookupCache.java b/Util/src/main/java/io/deephaven/util/datastructures/cache/ReverseOffsetLookupCache.java index 4012ef8a1cd..2c8f5e15407 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/cache/ReverseOffsetLookupCache.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/cache/ReverseOffsetLookupCache.java @@ -12,23 +12,21 @@ import java.util.function.ToIntFunction; /** - * Caching data structure interface for caching Object to int (offset) mappings returned by an - * expensive, idempotent lookup function from int to Object. + * Caching data structure interface for caching Object to int (offset) mappings returned by an expensive, idempotent + * lookup function from int to Object. */ -public class ReverseOffsetLookupCache - implements ToIntFunction { +public class ReverseOffsetLookupCache implements ToIntFunction { private static final int NULL_INDEX = -1; private final OffsetLookup lookupFunction; private final Map> reverseLookup = - new KeyedObjectHashMap<>(getKeyDefinition()); + new KeyedObjectHashMap<>(getKeyDefinition()); private volatile int highestKeyChecked = -1; - public ReverseOffsetLookupCache( - @NotNull final OffsetLookup lookupFunction) { + public ReverseOffsetLookupCache(@NotNull final OffsetLookup lookupFunction) { this.lookupFunction = Require.neqNull(lookupFunction, "lookupFunction"); } @@ -37,12 +35,11 @@ public ReverseOffsetLookupCache( * * @param highestIndexNeeded The highest index needed for this operation */ - public void ensurePopulated(final int highestIndexNeeded, - @NotNull final Supplier extraFactory, - @Nullable Consumer extraCleanup) { + public void ensurePopulated(final int highestIndexNeeded, @NotNull final Supplier extraFactory, + @Nullable Consumer extraCleanup) { if (highestIndexNeeded > highestKeyChecked) { - synchronized (reverseLookup) { // Only let one thread through here at a time, to avoid - // contention and redundant work. + synchronized (reverseLookup) { // Only let one thread through here at a time, to avoid contention and + // redundant work. if (highestIndexNeeded > highestKeyChecked) { final EXTRA_INPUT_TYPE extra = extraFactory.get(); try { @@ -64,8 +61,8 @@ public void ensurePopulated(final int highestIndexNeeded, } /** - * Get the index of value in reverse lookup cache. Be sure to call - * {@link #ensurePopulated(int, Supplier, Consumer)} for the appropriate index bound, first. + * Get the index of value in reverse lookup cache. Be sure to call {@link #ensurePopulated(int, Supplier, Consumer)} + * for the appropriate index bound, first. * * @param value The value to look up * @return The index of value in the cache, or {@link #NULL_INDEX} (-1) if not found @@ -87,8 +84,7 @@ public void clear() { } /** - * Implements a pair from a value in the range of the lookup function to the index at which it - * occurs. + * Implements a pair from a value in the range of the lookup function to the index at which it occurs. * * @param */ @@ -109,7 +105,7 @@ private CachedMapping(final VALUE_TYPE value, final int index) { * @param */ private static class CachedMappingKeyDef - extends KeyedObjectKey.Basic> { + extends KeyedObjectKey.Basic> { private static final KeyedObjectKey.Basic INSTANCE = new CachedMappingKeyDef(); diff --git a/Util/src/main/java/io/deephaven/util/datastructures/cache/SoftArrayBackedOffsetLookupCache.java b/Util/src/main/java/io/deephaven/util/datastructures/cache/SoftArrayBackedOffsetLookupCache.java index 01ca2e83028..0290f540741 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/cache/SoftArrayBackedOffsetLookupCache.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/cache/SoftArrayBackedOffsetLookupCache.java @@ -8,15 +8,15 @@ import java.lang.reflect.Array; /** - * Caching data structure for caching int (offset) to Object mappings. For use when lookup is - * expensive but idempotent, and the range of offset indexes is relatively contiguous. This is only - * suitable for lookup functions that return fully-initialized, immutable objects (or null). + * Caching data structure for caching int (offset) to Object mappings. For use when lookup is expensive but idempotent, + * and the range of offset indexes is relatively contiguous. This is only suitable for lookup functions that return + * fully-initialized, immutable objects (or null). *

      - * This implementation stores data in an array of softly-reachable arrays, to enable unused regions - * to be reclaimed under memory pressure. + * This implementation stores data in an array of softly-reachable arrays, to enable unused regions to be reclaimed + * under memory pressure. */ public class SoftArrayBackedOffsetLookupCache - extends BaseOffsetLookupCache { + extends BaseOffsetLookupCache { private static final int LOG2_BLOCK_SIZE = 12; private static final int BLOCK_SIZE = 1 << LOG2_BLOCK_SIZE; @@ -36,15 +36,15 @@ private static int INDEX_TO_SUB_BLOCK_INDEX(final int index) { private volatile SoftReference[] cachedValues; /** - * Construct a lookup cache for the supplied arguments, using - * {@link OffsetLookupCache#createPlaceholder(Class)} to create a "null" placeholder value. + * Construct a lookup cache for the supplied arguments, using {@link OffsetLookupCache#createPlaceholder(Class)} to + * create a "null" placeholder value. * * @param valueType The value type - * @param lookupFunction The lookup function from index to value, must return a - * fully-initialized, immutable object or null + * @param lookupFunction The lookup function from index to value, must return a fully-initialized, immutable object + * or null */ public SoftArrayBackedOffsetLookupCache(@NotNull final Class valueType, - @NotNull final OffsetLookup lookupFunction) { + @NotNull final OffsetLookup lookupFunction) { this(valueType, lookupFunction, OffsetLookupCache.createPlaceholder(valueType)); } @@ -52,14 +52,13 @@ public SoftArrayBackedOffsetLookupCache(@NotNull final Class valueTy * Construct a lookup cache for the supplied arguments. * * @param valueType The value type - * @param lookupFunction The lookup function from index to value, must return a - * fully-initialized, immutable object or null - * @param nullValue The "null" placeholder value, stored internally whenever lookupFunction - * returns null + * @param lookupFunction The lookup function from index to value, must return a fully-initialized, immutable object + * or null + * @param nullValue The "null" placeholder value, stored internally whenever lookupFunction returns null */ private SoftArrayBackedOffsetLookupCache(@NotNull final Class valueType, - @NotNull final OffsetLookup lookupFunction, - @NotNull final VALUE_TYPE nullValue) { + @NotNull final OffsetLookup lookupFunction, + @NotNull final VALUE_TYPE nullValue) { super(lookupFunction, nullValue, valueType); // noinspection unchecked @@ -77,23 +76,22 @@ public VALUE_TYPE get(final int index, final EXTRA_INPUT_TYPE extra) { VALUE_TYPE[] block; VALUE_TYPE value; - // This is only correct because we rely on lookupFunction to return fully-initialized, - // immutable objects (or null). + // This is only correct because we rely on lookupFunction to return fully-initialized, immutable objects (or + // null). if ((localCachedValues = cachedValues).length <= blockIndex - || (blockRef = localCachedValues[blockIndex]) == null - || (block = blockRef.get()) == null - || (value = block[subBlockIndex]) == null) { + || (blockRef = localCachedValues[blockIndex]) == null + || (block = blockRef.get()) == null + || (value = block[subBlockIndex]) == null) { synchronized (this) { if ((localCachedValues = cachedValues).length <= blockIndex) { // noinspection unchecked final SoftReference[] newCachedValues = - new SoftReference[1 << MathUtil.ceilLog2(blockIndex + 1)]; - System.arraycopy(localCachedValues, 0, newCachedValues, 0, - localCachedValues.length); + new SoftReference[1 << MathUtil.ceilLog2(blockIndex + 1)]; + System.arraycopy(localCachedValues, 0, newCachedValues, 0, localCachedValues.length); cachedValues = localCachedValues = newCachedValues; } if ((blockRef = localCachedValues[blockIndex]) == null - || (block = blockRef.get()) == null) { + || (block = blockRef.get()) == null) { // noinspection unchecked block = (VALUE_TYPE[]) Array.newInstance(valueType, BLOCK_SIZE); blockRef = new SoftReference<>(block); diff --git a/Util/src/main/java/io/deephaven/util/datastructures/hash/IntrusiveChainedEntryPool.java b/Util/src/main/java/io/deephaven/util/datastructures/hash/IntrusiveChainedEntryPool.java index e214e769be1..803266ca6ee 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/hash/IntrusiveChainedEntryPool.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/hash/IntrusiveChainedEntryPool.java @@ -6,9 +6,8 @@ import org.jetbrains.annotations.Nullable; /** - * A pool for "free" entries to be use with IntrusiveChainedHash structures, implemented as a stack - * using the same adapter and intrusive fields. Requires external synchronization of all methods for - * concurrent use. + * A pool for "free" entries to be use with IntrusiveChainedHash structures, implemented as a stack using the same + * adapter and intrusive fields. Requires external synchronization of all methods for concurrent use. */ public class IntrusiveChainedEntryPool { @@ -27,8 +26,7 @@ public class IntrusiveChainedEntryPool { * * @param adapter The adapter */ - public IntrusiveChainedEntryPool( - @NotNull final IntrusiveChainedHashAdapter adapter) { + public IntrusiveChainedEntryPool(@NotNull final IntrusiveChainedHashAdapter adapter) { this.adapter = adapter; } diff --git a/Util/src/main/java/io/deephaven/util/datastructures/intrusive/IntrusiveArraySet.java b/Util/src/main/java/io/deephaven/util/datastructures/intrusive/IntrusiveArraySet.java index 1a7a483b7a1..040afd32e9c 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/intrusive/IntrusiveArraySet.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/intrusive/IntrusiveArraySet.java @@ -11,11 +11,11 @@ /** * An intrusive set that uses an array for its backing storage. * - * You can insert, remove, or check for existence in O(1) time. Clearing the set is O(n); as we need - * to null out references. + * You can insert, remove, or check for existence in O(1) time. Clearing the set is O(n); as we need to null out + * references. * - * If you attempt to perform an operation element which is not in this set, but is in another set - * with the same adapter; then you are going to have a bad time. Tread carefully. + * If you attempt to perform an operation element which is not in this set, but is in another set with the same adapter; + * then you are going to have a bad time. Tread carefully. * * @param the type of the element we are storing. */ @@ -79,7 +79,7 @@ public Object[] toArray() { public T1[] toArray(@NotNull T1[] a) { // noinspection unchecked final T[] r = a.length >= size ? (T[]) a - : (T[]) java.lang.reflect.Array.newInstance(a.getClass().getComponentType(), size); + : (T[]) java.lang.reflect.Array.newInstance(a.getClass().getComponentType(), size); if (size >= 0) { System.arraycopy(storage, 0, r, 0, size); diff --git a/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedNode.java b/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedNode.java index d101362cfb1..3066615e358 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedNode.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedNode.java @@ -3,8 +3,7 @@ import org.jetbrains.annotations.NotNull; /** - * Interface for allowing sub-interfaces to enforce a common interface for intrusive doubly-linked - * nodes. + * Interface for allowing sub-interfaces to enforce a common interface for intrusive doubly-linked nodes. */ public interface IntrusiveDoublyLinkedNode> { @@ -39,8 +38,7 @@ public interface IntrusiveDoublyLinkedNode> implements IntrusiveDoublyLinkedNode { @@ -76,14 +74,12 @@ public void setPrev(@NotNull final NODE_TYPE other) { } /** - * Generic {@link IntrusiveDoublyLinkedStructureBase.Adapter} usable with any implementing - * class. + * Generic {@link IntrusiveDoublyLinkedStructureBase.Adapter} usable with any implementing class. */ class Adapter> - implements IntrusiveDoublyLinkedStructureBase.Adapter { + implements IntrusiveDoublyLinkedStructureBase.Adapter { - private static final IntrusiveDoublyLinkedStructureBase.Adapter INSTANCE = - new Adapter<>(); + private static final IntrusiveDoublyLinkedStructureBase.Adapter INSTANCE = new Adapter<>(); public static > IntrusiveDoublyLinkedStructureBase.Adapter getInstance() { // noinspection unchecked diff --git a/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedQueue.java b/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedQueue.java index cebae5dff99..374af7aaa1e 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedQueue.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedQueue.java @@ -13,8 +13,8 @@ /** * A simple queue based on circular intrusive doubly linked nodes (for O(1) random removal). */ -public class IntrusiveDoublyLinkedQueue - extends IntrusiveDoublyLinkedStructureBase implements Iterable { +public class IntrusiveDoublyLinkedQueue extends IntrusiveDoublyLinkedStructureBase + implements Iterable { /** * The head of the queue, or null if the queue is empty @@ -58,8 +58,7 @@ public final int size() { * * @param other The queue to transfer from */ - public final void transferBeforeHeadFrom( - @NotNull final IntrusiveDoublyLinkedQueue other) { + public final void transferBeforeHeadFrom(@NotNull final IntrusiveDoublyLinkedQueue other) { transferFrom(other, true); } @@ -68,8 +67,7 @@ public final void transferBeforeHeadFrom( * * @param other The queue to transfer from */ - public final void transferAfterTailFrom( - @NotNull final IntrusiveDoublyLinkedQueue other) { + public final void transferAfterTailFrom(@NotNull final IntrusiveDoublyLinkedQueue other) { transferFrom(other, false); } @@ -77,14 +75,11 @@ public final void transferAfterTailFrom( * Move all nodes from {@code other} to this queue in O(1) time. * * @param other The queue to transfer from - * @param front Whether to add {@code other}'s elements at the front (instead of the back) of - * this queue + * @param front Whether to add {@code other}'s elements at the front (instead of the back) of this queue */ - private void transferFrom(@NotNull final IntrusiveDoublyLinkedQueue other, - final boolean front) { + private void transferFrom(@NotNull final IntrusiveDoublyLinkedQueue other, final boolean front) { if (!compatible(other)) { - throw new UnsupportedOperationException( - this + ": Attempted to transfer from incompatible queue " + other); + throw new UnsupportedOperationException(this + ": Attempted to transfer from incompatible queue " + other); } if (other.isEmpty()) { return; @@ -132,8 +127,7 @@ public final boolean offer(@NotNull final VALUE_TYPE node) { */ public final void insert(@NotNull final VALUE_TYPE node, final int offset) { if (offset < 0 || offset > size) { - throw new IllegalArgumentException( - "Invalid offset " + offset + ", must be in [0, size(" + size + ")]"); + throw new IllegalArgumentException("Invalid offset " + offset + ", must be in [0, size(" + size + ")]"); } if (offset == size) { @@ -224,8 +218,7 @@ public final void clear() { } /** - * Remove all nodes in the queue, without unlinking anything. This is suitable for nodes that - * will be discarded. + * Remove all nodes in the queue, without unlinking anything. This is suitable for nodes that will be discarded. */ public final void clearFast() { head = null; @@ -233,8 +226,8 @@ public final void clearFast() { } /** - * Determine if a node is currently in the queue. Assumes that the node's prev/next pointers are - * only used in this queue. + * Determine if a node is currently in the queue. Assumes that the node's prev/next pointers are only used in this + * queue. * * @param node The node * @return Whether the node is currently in the queue @@ -282,8 +275,10 @@ public Iterator iterator() { @Override public Spliterator spliterator() { - return Spliterators.spliterator(iterator(), size(), - Spliterator.ORDERED | Spliterator.NONNULL); // Implicitly | SIZED | SUBSIZED, too. + return Spliterators.spliterator(iterator(), size(), Spliterator.ORDERED | Spliterator.NONNULL); // Implicitly | + // SIZED | + // SUBSIZED, + // too. } diff --git a/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedStructureBase.java b/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedStructureBase.java index 03f1e9c1603..66834f79375 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedStructureBase.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveDoublyLinkedStructureBase.java @@ -123,8 +123,7 @@ protected final boolean isLinked(@NotNull final VALUE_TYPE node) { * @param other The node to insert before * @return node */ - protected @NotNull final VALUE_TYPE linkBefore(@NotNull final VALUE_TYPE node, - @NotNull final VALUE_TYPE other) { + protected @NotNull final VALUE_TYPE linkBefore(@NotNull final VALUE_TYPE node, @NotNull final VALUE_TYPE other) { setNext(node, other); setPrev(node, getPrev(other)); setNext(getPrev(other), node); @@ -139,8 +138,7 @@ protected final boolean isLinked(@NotNull final VALUE_TYPE node) { * @param other The node to insert after * @return node */ - protected @NotNull final VALUE_TYPE linkAfter(@NotNull final VALUE_TYPE node, - @NotNull final VALUE_TYPE other) { + protected @NotNull final VALUE_TYPE linkAfter(@NotNull final VALUE_TYPE node, @NotNull final VALUE_TYPE other) { setPrev(node, other); setNext(node, getNext(other)); setPrev(getNext(other), node); @@ -163,14 +161,13 @@ protected final boolean isLinked(@NotNull final VALUE_TYPE node) { } /** - * Is the other data structure compatible with this one? This is true if and only if it's the - * same class, with the same adapter instance. + * Is the other data structure compatible with this one? This is true if and only if it's the same class, with the + * same adapter instance. * * @param other The other data structure * @return Whether other is compatible */ - protected final boolean compatible( - @NotNull final IntrusiveDoublyLinkedStructureBase other) { + protected final boolean compatible(@NotNull final IntrusiveDoublyLinkedStructureBase other) { return getClass() == other.getClass() && adapter == other.adapter; } } diff --git a/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveSinglyLinkedQueue.java b/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveSinglyLinkedQueue.java index 3c89bb4cfdd..8f6aaaf0a53 100644 --- a/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveSinglyLinkedQueue.java +++ b/Util/src/main/java/io/deephaven/util/datastructures/linked/IntrusiveSinglyLinkedQueue.java @@ -4,8 +4,8 @@ import org.jetbrains.annotations.Nullable; /** - * Singly-linked queue. Supports basic queue operations, but not extended Collection methods that - * would be required by actually implementing java.lang.Queue. + * Singly-linked queue. Supports basic queue operations, but not extended Collection methods that would be required by + * actually implementing java.lang.Queue. */ public class IntrusiveSinglyLinkedQueue { VALUE_TYPE head = null; diff --git a/Util/src/main/java/io/deephaven/util/files/DirWatchService.java b/Util/src/main/java/io/deephaven/util/files/DirWatchService.java index 7724d8b2ac5..0f3199bb63e 100644 --- a/Util/src/main/java/io/deephaven/util/files/DirWatchService.java +++ b/Util/src/main/java/io/deephaven/util/files/DirWatchService.java @@ -25,14 +25,10 @@ public class DirWatchService { /** - * The preferred watcher type is the built-in Java one as it's more efficient, but it doesn't - * catch all new files + * The preferred watcher type is the built-in Java one as it's more efficient, but it doesn't catch all new files */ public enum WatchServiceType { - /** - * The built-in Java classes are efficient on Linux but don't catch remotely-created NFS - * files - */ + /** The built-in Java classes are efficient on Linux but don't catch remotely-created NFS files */ JAVAWATCHSERVICE("JavaWatchService"), /** A simple poll loop watcher */ POLLWATCHSERVICE("PollWatchService"); @@ -68,8 +64,8 @@ public static class ExceptionConsumerParameter { private final boolean watchServiceTerminated; private final Path path; - private ExceptionConsumerParameter(final Exception exception, - final boolean watchServiceTerminated, final Path path) { + private ExceptionConsumerParameter(final Exception exception, final boolean watchServiceTerminated, + final Path path) { this.exception = exception; this.watchServiceTerminated = watchServiceTerminated; this.path = path; @@ -89,8 +85,7 @@ public Path getPath() { } /** - * Class to store the consumers and matcher types for use in the Maps used to track the files - * being watched for + * Class to store the consumers and matcher types for use in the Maps used to track the files being watched for */ private class FileWatcher { @@ -106,10 +101,9 @@ void addConsumer(@NotNull final BiConsumer consumer) { } /** - * Wrap the call to the user's consumer in a try block, as these are called on the watcher - * thread and we don't want consumer exceptions to kill that thread. At some point we may - * want to consider moving this call into its own thread so that the file watches don't hang - * up on long-lived consume operations. + * Wrap the call to the user's consumer in a try block, as these are called on the watcher thread and we don't + * want consumer exceptions to kill that thread. At some point we may want to consider moving this call into its + * own thread so that the file watches don't hang up on long-lived consume operations. */ void consume(final Path p, final WatchEvent.Kind k) { for (BiConsumer consumer : consumers) { @@ -123,28 +117,28 @@ void consume(final Path p, final WatchEvent.Kind k) { } /** - * An "exact-match" file watcher, keyed by the token that must appear before the separator - * (supplied on registration). + * An "exact-match" file watcher, keyed by the token that must appear before the separator (supplied on + * registration). */ private class ExactMatchFileWatcher extends FileWatcher { private final String tokenToMatch; private ExactMatchFileWatcher(@NotNull final BiConsumer consumer, - @NotNull final String tokenToMatch) { + @NotNull final String tokenToMatch) { super(consumer); this.tokenToMatch = tokenToMatch; } } private static final KeyedObjectKey EXACT_MATCH_KEY = - new KeyedObjectKey.Basic() { + new KeyedObjectKey.Basic() { - @Override - public String getKey(@NotNull final ExactMatchFileWatcher value) { - return value.tokenToMatch; - } - }; + @Override + public String getKey(@NotNull final ExactMatchFileWatcher value) { + return value.tokenToMatch; + } + }; private class SeparatorToExactMatchWatchersPair { @@ -157,10 +151,10 @@ private SeparatorToExactMatchWatchersPair(String fileSeparator) { } private void addWatcher(@NotNull final String filePattern, - @NotNull final BiConsumer consumer) { + @NotNull final BiConsumer consumer) { exactMatchWatchers.compute(filePattern, (k, oldFileWatcher) -> { - // TODO: If we didn't need to take a consumer in the constructor, we could make this - // simpler and use putIfAbsent or computeIfAbsent. + // TODO: If we didn't need to take a consumer in the constructor, we could make this simpler and use + // putIfAbsent or computeIfAbsent. if (oldFileWatcher != null) { oldFileWatcher.addConsumer(consumer); return oldFileWatcher; @@ -179,7 +173,7 @@ private class FilteringFileWatcher extends FileWatcher { private final Predicate matcher; private FilteringFileWatcher(@NotNull final BiConsumer consumer, - @NotNull final Predicate matcher) { + @NotNull final Predicate matcher) { super(consumer); this.matcher = matcher; } @@ -223,8 +217,7 @@ public synchronized void start() throws IOException { watcher = FileSystems.getDefault().newWatchService(); dir.register(watcher, watchEventKinds); - watcherThread = - new Thread(this::runJavaFileWatcher, "DirWatchService" + "-" + dirToWatch); + watcherThread = new Thread(this::runJavaFileWatcher, "DirWatchService" + "-" + dirToWatch); watcherThread.setDaemon(true); watcherThread.start(); } @@ -232,8 +225,7 @@ public synchronized void start() throws IOException { @Override public synchronized void stop() throws IOException { if (watcherThread == null) { - throw new IllegalStateException( - "Trying to stop an already-stopped DirWatchService!"); + throw new IllegalStateException("Trying to stop an already-stopped DirWatchService!"); } stopRequested = true; @@ -272,12 +264,11 @@ private void runJavaFileWatcher() { for (WatchEvent event : watchKey.pollEvents()) { final WatchEvent.Kind kind = event.kind(); - // An overflow event means that too many file events have happened and we've - // lost some - best to indicate a problem + // An overflow event means that too many file events have happened and we've lost some - best to + // indicate a problem if (kind == StandardWatchEventKinds.OVERFLOW) { callExceptionConsumer(new RuntimeException( - "Overflow event received in DirWatchService, file events possibly lost"), - true); + "Overflow event received in DirWatchService, file events possibly lost"), true); return; } @@ -290,30 +281,25 @@ private void runJavaFileWatcher() { } if (!watchKey.reset()) { - callExceptionConsumer(new RuntimeException( - "Error resetting watch key in directory watch service"), true); + callExceptionConsumer(new RuntimeException("Error resetting watch key in directory watch service"), + true); return; } } } } - /** - * WatcherInterface implementation for an implementation that uses the Apache polling service - */ + /** WatcherInterface implementation for an implementation that uses the Apache polling service */ private class WatcherInterfacePollImpl implements WatcherInterface { private final FileAlterationMonitor fileAlterationMonitor; private boolean fileAlterationMonitorStarted; - private WatcherInterfacePollImpl(long pollIntervalMillis, final String dirToWatch, - WatchEvent.Kind... kinds) { - final FileAlterationObserver fileAlterationObserver = - new FileAlterationObserver(new File(dirToWatch)); + private WatcherInterfacePollImpl(long pollIntervalMillis, final String dirToWatch, WatchEvent.Kind... kinds) { + final FileAlterationObserver fileAlterationObserver = new FileAlterationObserver(new File(dirToWatch)); /* - * Convert the events to and from the Java standard - it would be nice to have all these - * in one listener... A switch statement won't work here as these values might look like - * enums or constants but aren't + * Convert the events to and from the Java standard - it would be nice to have all these in one listener... + * A switch statement won't work here as these values might look like enums or constants but aren't */ for (WatchEvent.Kind kind : kinds) { if (kind == StandardWatchEventKinds.ENTRY_CREATE) { @@ -339,20 +325,18 @@ public void onFileDelete(File file) { }); } else { throw new IllegalArgumentException( - "DirWatchService passed unsupported watch event kind: " + kind.name()); + "DirWatchService passed unsupported watch event kind: " + kind.name()); } } - fileAlterationMonitor = - new FileAlterationMonitor(pollIntervalMillis, fileAlterationObserver); + fileAlterationMonitor = new FileAlterationMonitor(pollIntervalMillis, fileAlterationObserver); fileAlterationMonitorStarted = false; } @Override public synchronized void start() throws Exception { if (fileAlterationMonitorStarted) { - throw new IllegalStateException( - "Trying to start an already-started DirWatchService!"); + throw new IllegalStateException("Trying to start an already-started DirWatchService!"); } fileAlterationMonitor.start(); fileAlterationMonitorStarted = true; @@ -383,26 +367,25 @@ private void handlePollFileEvent(final File file, final WatchEvent.Kind kind) { private final WatcherInterface watcherImpl; /** - * Constructor to create a directory watch service. This initializes the instance but doesn't - * add any watch patterns, and doesn't start the watch thread. + * Constructor to create a directory watch service. This initializes the instance but doesn't add any watch + * patterns, and doesn't start the watch thread. * * @param dirToWatch Directory to watch for changes - * @param exceptionConsumer Consumer to accept exceptions if they occur. Even if the watch - * service has terminated, stop() should be called before restarting it. The - * exceptionConsumer must accept two arguments - the Exception generated, and a boolean - * which specifies whether the WatchService has terminated as a result of the exception. + * @param exceptionConsumer Consumer to accept exceptions if they occur. Even if the watch service has terminated, + * stop() should be called before restarting it. The exceptionConsumer must accept two arguments - the + * Exception generated, and a boolean which specifies whether the WatchService has terminated as a result of + * the exception. * @param watchServiceType the watch service type, from the WatchServiceType enum * @param pollIntervalMillis for a poll service, the interval between polls - * @param kinds The kinds of events that may need to be watched from - * java.nio.file.StandardWatchEventKinds, valid options are: ENTRY_CREATE, ENTRY_DELETE, - * and ENTRY_MODIFY + * @param kinds The kinds of events that may need to be watched from java.nio.file.StandardWatchEventKinds, valid + * options are: ENTRY_CREATE, ENTRY_DELETE, and ENTRY_MODIFY */ @SuppressWarnings("WeakerAccess") public DirWatchService(@NotNull final String dirToWatch, - @NotNull final Consumer exceptionConsumer, - @NotNull final WatchServiceType watchServiceType, - final long pollIntervalMillis, - @NotNull final WatchEvent.Kind... kinds) { + @NotNull final Consumer exceptionConsumer, + @NotNull final WatchServiceType watchServiceType, + final long pollIntervalMillis, + @NotNull final WatchEvent.Kind... kinds) { separatorToExactMatchWatchers = new ArrayList<>(); filteringFileWatchers = new ArrayDeque<>(); @@ -420,43 +403,38 @@ public DirWatchService(@NotNull final String dirToWatch, break; default: - throw new IllegalArgumentException( - "Unknown watch service type: " + watchServiceType); + throw new IllegalArgumentException("Unknown watch service type: " + watchServiceType); } } - // If the exception consumer generates an exception, wrap it; this will terminate the watch - // service. + // If the exception consumer generates an exception, wrap it; this will terminate the watch service. private void callExceptionConsumer(final Exception e, final Boolean watchServiceTerminated) { try { - exceptionConsumer - .accept(new ExceptionConsumerParameter(e, watchServiceTerminated, keyDir)); + exceptionConsumer.accept(new ExceptionConsumerParameter(e, watchServiceTerminated, keyDir)); } catch (Exception e2) { throw new RuntimeException(e); } } /** - * Adds an exact match file pattern to watch for; equivalent to adding with the - * EXACT_MATCH_WITH_SEPARATOR MatcherType + * Adds an exact match file pattern to watch for; equivalent to adding with the EXACT_MATCH_WITH_SEPARATOR + * MatcherType * - * @param filePattern The exact file pattern to watch for (i.e. the part before the separator - * must match this) + * @param filePattern The exact file pattern to watch for (i.e. the part before the separator must match this) * @param consumer The consumer to be called when the pattern is matched */ @SuppressWarnings("WeakerAccess") public synchronized void addExactFileWatch(@NotNull final String separator, - @NotNull final String filePattern, - @NotNull final BiConsumer consumer) { + @NotNull final String filePattern, + @NotNull final BiConsumer consumer) { final SeparatorToExactMatchWatchersPair pair = separatorToExactMatchWatchers.stream() - .filter((p) -> p.fileSeparator.equals(separator)) - .findFirst() - .orElseGet(() -> { - final SeparatorToExactMatchWatchersPair p = - new SeparatorToExactMatchWatchersPair(separator); - separatorToExactMatchWatchers.add(p); - return p; - }); + .filter((p) -> p.fileSeparator.equals(separator)) + .findFirst() + .orElseGet(() -> { + final SeparatorToExactMatchWatchersPair p = new SeparatorToExactMatchWatchersPair(separator); + separatorToExactMatchWatchers.add(p); + return p; + }); pair.addWatcher(filePattern, consumer); } @@ -468,7 +446,7 @@ public synchronized void addExactFileWatch(@NotNull final String separator, */ @SuppressWarnings("unused") public synchronized void addFileWatchAtEnd(@NotNull final Predicate matcher, - @NotNull final BiConsumer consumer) { + @NotNull final BiConsumer consumer) { filteringFileWatchers.addLast(new FilteringFileWatcher(consumer, matcher)); } @@ -479,7 +457,7 @@ public synchronized void addFileWatchAtEnd(@NotNull final Predicate matc * @param consumer Consumer to be called with the file and event type */ public synchronized void addFileWatchAtStart(@NotNull final Predicate matcher, - @NotNull final BiConsumer consumer) { + @NotNull final BiConsumer consumer) { filteringFileWatchers.addFirst(new FilteringFileWatcher(consumer, matcher)); } @@ -491,8 +469,8 @@ public void stop() throws Exception { } /** - * Starts the watch service thread. Even if it's initially empty the service should start as it - * could get files later. + * Starts the watch service thread. Even if it's initially empty the service should start as it could get files + * later. * * @throws IOException from the Java watch service */ @@ -501,8 +479,8 @@ public void start() throws Exception { } /** - * Check the exact matcher list for any matchers and call the consumer if one is found. Returns - * true if a match was found. + * Check the exact matcher list for any matchers and call the consumer if one is found. Returns true if a match was + * found. */ private synchronized FileWatcher checkExactMatches(final String fileName) { for (final SeparatorToExactMatchWatchersPair pair : separatorToExactMatchWatchers) { @@ -511,15 +489,9 @@ private synchronized FileWatcher checkExactMatches(final String fileName) { final int fileSeparatorIndex = fileName.indexOf(fileSeparator); if (fileSeparatorIndex > 0) { - final String fileNameBeforeSeparator = fileName.substring(0, fileSeparatorIndex); // Must - // be - // non-zero - // length - // because - // of - // the - // above - // check + final String fileNameBeforeSeparator = fileName.substring(0, fileSeparatorIndex); // Must be non-zero + // length because of + // the above check final FileWatcher fileWatcher = exactMatchWatchers.get(fileNameBeforeSeparator); if (fileWatcher != null) { return fileWatcher; @@ -530,8 +502,7 @@ private synchronized FileWatcher checkExactMatches(final String fileName) { } /** - * Check the pattern match list and call the first one found, if any. Returns true if a match - * was found. + * Check the pattern match list and call the first one found, if any. Returns true if a match was found. */ private synchronized FileWatcher checkPatternMatches(final String fileName) { for (FilteringFileWatcher fileWatcher : filteringFileWatchers) { @@ -545,8 +516,7 @@ private synchronized FileWatcher checkPatternMatches(final String fileName) { private void handleFileEvent(final String fileName, final WatchEvent.Kind kind) { FileWatcher foundFileWatcher; synchronized (this) { - // The first check is against the set of hash entries for the exact match - this - // requires a separator + // The first check is against the set of hash entries for the exact match - this requires a separator foundFileWatcher = checkExactMatches(fileName); // If an exact match wasn't found, then check for pattern matches diff --git a/Util/src/main/java/io/deephaven/util/files/FileHelper.java b/Util/src/main/java/io/deephaven/util/files/FileHelper.java index 66f4fa82b4d..9139446353c 100644 --- a/Util/src/main/java/io/deephaven/util/files/FileHelper.java +++ b/Util/src/main/java/io/deephaven/util/files/FileHelper.java @@ -21,8 +21,7 @@ public class FileHelper { private FileHelper() {} /** - * Get the canonical path for the given path string, converting IOExceptions to - * UncheckIOException. + * Get the canonical path for the given path string, converting IOExceptions to UncheckIOException. * * @param path The file (as String) for which to get the canonical form. * @return the canonical file string. @@ -36,8 +35,8 @@ public static String getCanonicalForm(String path) { } /** - * Augment {@link FileUtils#deleteRecursivelyOnNFS(File)} with an exclude pattern. Files - * matching the pattern will not be removed, nor will containing directories. + * Augment {@link FileUtils#deleteRecursivelyOnNFS(File)} with an exclude pattern. Files matching the pattern will + * not be removed, nor will containing directories. * * @param file the file or folder to delete. * @param excludePattern don't delete files or folders matching this pattern. @@ -46,13 +45,12 @@ public static String getCanonicalForm(String path) { */ public static boolean deleteRecursivelyOnNFS(File file, String excludePattern) { Pattern pattern = excludePattern == null ? null : Pattern.compile(excludePattern); - return deleteRecursivelyOnNFS( - new File(file.getParentFile(), '.' + file.getName() + ".trash"), file, pattern); + return deleteRecursivelyOnNFS(new File(file.getParentFile(), '.' + file.getName() + ".trash"), file, pattern); } /** - * Augment {@link FileUtils#deleteRecursivelyOnNFS(File)} with an exclude pattern. Files - * matching the pattern will not be removed, nor will containing directories. + * Augment {@link FileUtils#deleteRecursivelyOnNFS(File)} with an exclude pattern. Files matching the pattern will + * not be removed, nor will containing directories. * * @param file the file or folder to delete. * @param excludePattern don't delete files or folders matching this pattern. @@ -60,25 +58,25 @@ public static boolean deleteRecursivelyOnNFS(File file, String excludePattern) { * @throws FileDeletionException on any errors moving/renaming/deleting files */ public static boolean deleteRecursivelyOnNFS(File file, Pattern excludePattern) { - return deleteRecursivelyOnNFS( - new File(file.getParentFile(), '.' + file.getName() + ".trash"), file, excludePattern); + return deleteRecursivelyOnNFS(new File(file.getParentFile(), '.' + file.getName() + ".trash"), file, + excludePattern); } /** - * Augment {@link FileUtils#deleteRecursivelyOnNFS(File, File)} with an exclude pattern. Files - * matching the pattern will not be removed, nor will containing directories. + * Augment {@link FileUtils#deleteRecursivelyOnNFS(File, File)} with an exclude pattern. Files matching the pattern + * will not be removed, nor will containing directories. *

      * This is an implementation method for {@link #deleteRecursivelyOnNFS(File, String)} * - * @param trashFile Filename to move regular files to before deletion. .nfs files may be created - * in its parent directory. + * @param trashFile Filename to move regular files to before deletion. .nfs files may be created in its parent + * directory. * @param fileToBeDeleted File or directory at which to begin recursive deletion. * @param excludePattern don't delete files or folders matching this pattern. * @return true if any files were excluded (so caller will know if a directory is empty) * @throws FileDeletionException on any errors moving/renaming/deleting files */ public static boolean deleteRecursivelyOnNFS(File trashFile, File fileToBeDeleted, - @Nullable Pattern excludePattern) { + @Nullable Pattern excludePattern) { if (excludePattern != null && excludePattern.matcher(fileToBeDeleted.getName()).matches()) { return true; } @@ -89,19 +87,17 @@ public static boolean deleteRecursivelyOnNFS(File trashFile, File fileToBeDelete excluded = deleteRecursivelyOnNFS(trashFile, childFile, excludePattern) || excluded; } if (!excluded && !fileToBeDeleted.delete()) { - throw new FileDeletionException("Failed to delete expected empty directory " - + fileToBeDeleted.getAbsolutePath()); + throw new FileDeletionException( + "Failed to delete expected empty directory " + fileToBeDeleted.getAbsolutePath()); } return excluded; } else if (fileToBeDeleted.exists()) { if (!fileToBeDeleted.renameTo(trashFile)) { - throw new FileDeletionException( - "Failed to move file " + fileToBeDeleted.getAbsolutePath() + throw new FileDeletionException("Failed to move file " + fileToBeDeleted.getAbsolutePath() + " to temporary location " + trashFile.getAbsolutePath()); } if (!trashFile.delete()) { - throw new FileDeletionException( - "Failed to delete temporary location " + trashFile.getAbsolutePath() + throw new FileDeletionException("Failed to delete temporary location " + trashFile.getAbsolutePath() + " for file " + fileToBeDeleted.getAbsolutePath()); } return false; diff --git a/Util/src/main/java/io/deephaven/util/files/ResourceResolution.java b/Util/src/main/java/io/deephaven/util/files/ResourceResolution.java index f2e14bb4e22..06cc30624d5 100644 --- a/Util/src/main/java/io/deephaven/util/files/ResourceResolution.java +++ b/Util/src/main/java/io/deephaven/util/files/ResourceResolution.java @@ -35,41 +35,36 @@ public class ResourceResolution { private final static Pattern normalizePattern = Pattern.compile("([\\\\/]+)"); /** - * An instance of this class should be created to contain the required parameters to use for the - * resource search + * An instance of this class should be created to contain the required parameters to use for the resource search * * @param configuration configuration to be used for resource resolution - * @param delimiterRegex regular expression to be used to delimit the resources; if it is null - * or has a length of 0 then the default (semicolon or space) will be used - * @param delimitedResourceList list of resources to find, delimited by delimiterRegex. These - * resources can include files and directories. There can be many of these. + * @param delimiterRegex regular expression to be used to delimit the resources; if it is null or has a length of 0 + * then the default (semicolon or space) will be used + * @param delimitedResourceList list of resources to find, delimited by delimiterRegex. These resources can include + * files and directories. There can be many of these. */ - public ResourceResolution(@NotNull final Configuration configuration, - final String delimiterRegex, @NotNull final String... delimitedResourceList) { - final Pattern delimiterPattern = - Pattern.compile(delimiterRegex == null || delimiterRegex.isEmpty() ? DEFAULT_SPLIT_REGEX - : delimiterRegex); + public ResourceResolution(@NotNull final Configuration configuration, final String delimiterRegex, + @NotNull final String... delimitedResourceList) { + final Pattern delimiterPattern = Pattern + .compile(delimiterRegex == null || delimiterRegex.isEmpty() ? DEFAULT_SPLIT_REGEX : delimiterRegex); resourceList = Arrays.stream(delimitedResourceList) - .filter(l -> l != null && !l.isEmpty()) - .flatMap(l -> Arrays.stream(delimiterPattern.split(l))) - .filter(r -> !r.isEmpty()) - .map(r -> { - String subs = - r.contains("") ? r.replace("", configuration.getDevRootPath()) - : r; - subs = subs.contains("") - ? subs.replace("", configuration.getWorkspacePath()) - : subs; - return subs; - }) - .collect(Collectors.toCollection(LinkedHashSet::new)); + .filter(l -> l != null && !l.isEmpty()) + .flatMap(l -> Arrays.stream(delimiterPattern.split(l))) + .filter(r -> !r.isEmpty()) + .map(r -> { + String subs = r.contains("") ? r.replace("", configuration.getDevRootPath()) : r; + subs = subs.contains("") ? subs.replace("", configuration.getWorkspacePath()) + : subs; + return subs; + }) + .collect(Collectors.toCollection(LinkedHashSet::new)); } private void processJarFile(@NotNull final Path path, @NotNull final String suffix, - @NotNull final BiConsumer consumer) throws IOException { + @NotNull final BiConsumer consumer) throws IOException { /* - * Wrap all this in a try block, as we don't want to fail if there's a matching filename - * that's not a jar. Is there a better way to handle this? + * Wrap all this in a try block, as we don't want to fail if there's a matching filename that's not a jar. Is + * there a better way to handle this? */ final URL baseUrl = path.toUri().toURL(); try (final JarFile jarFile = new JarFile(path.toFile())) { @@ -95,11 +90,11 @@ private void processJarFile(@NotNull final Path path, @NotNull final String suff * * @param suffix filename suffix to be searched for * @param consumer operation to perform on each found resource (URL, file name pair) - * @throws IOException in the event of issues opening files or malformed URLs; this should not - * occur unless files are being modified as we walk the directories + * @throws IOException in the event of issues opening files or malformed URLs; this should not occur unless files + * are being modified as we walk the directories */ - public void findResources(@NotNull final String suffix, - @NotNull final BiConsumer consumer) throws IOException { + public void findResources(@NotNull final String suffix, @NotNull final BiConsumer consumer) + throws IOException { if (resourceList.isEmpty()) { return; } @@ -112,21 +107,19 @@ public void findResources(@NotNull final String suffix, final DirectoryScanner scanner = new DirectoryScanner(); /* - * Wildcards work best if we normalize the path sent in, removing extra - * slashes/backslashes and making everything use one separator based on the OS - * separator. We have to take into account full paths and relative paths on both - * Linux and Windows. + * Wildcards work best if we normalize the path sent in, removing extra slashes/backslashes and making + * everything use one separator based on the OS separator. We have to take into account full paths and + * relative paths on both Linux and Windows. */ final String normalizedResourcePath = normalize(resourcePath); final String rootPath = getRoot(normalizedResourcePath); scanner.setBasedir(rootPath); - // If this resource's normalized search path starts with rootPath, then it was a - // fully-specified path, so pull - // that part off the path to determine the wildcard include list. Otherwise it was - // relative, and that becomes the include array. - final String[] wildcardIncludeAry = - new String[] {normalizedResourcePath.startsWith(rootPath) + // If this resource's normalized search path starts with rootPath, then it was a fully-specified path, + // so pull + // that part off the path to determine the wildcard include list. Otherwise it was relative, and that + // becomes the include array. + final String[] wildcardIncludeAry = new String[] {normalizedResourcePath.startsWith(rootPath) ? normalizedResourcePath.substring(rootPath.length()) : normalizedResourcePath}; scanner.setIncludes(wildcardIncludeAry); @@ -138,8 +131,8 @@ public void findResources(@NotNull final String suffix, if (files.length > 0) { traversedPaths++; - // The returned file array will be the relative path from the root, so add that - // root back and process the file + // The returned file array will be the relative path from the root, so add that root back and + // process the file for (final String file : files) { // The returned filename doesn't include the root, so add it final String fullFilename = rootPath + File.separator + file; @@ -150,12 +143,12 @@ public void findResources(@NotNull final String suffix, } else { traversedPaths++; try (final Stream pathStream = - Files.walk(Paths.get(resourcePath), FileVisitOption.FOLLOW_LINKS)) { - // Walk this file (or all files in this directory and subdirectories) and look - // for <.suffix> and .jar files. + Files.walk(Paths.get(resourcePath), FileVisitOption.FOLLOW_LINKS)) { + // Walk this file (or all files in this directory and subdirectories) and look for <.suffix> and + // .jar files. pathStream - .filter(p -> Files.isRegularFile(p)) - .forEach(p -> processFile(p.getFileName().toString(), p, suffix, consumer)); + .filter(p -> Files.isRegularFile(p)) + .forEach(p -> processFile(p.getFileName().toString(), p, suffix, consumer)); } catch (UncheckedIOException e) { throw e.getCause(); } @@ -167,8 +160,7 @@ public void findResources(@NotNull final String suffix, } /** - * Find the filesystem root for this search path. For example, it may be /, C:\, or something - * passed by the user. + * Find the filesystem root for this search path. For example, it may be /, C:\, or something passed by the user. * * @param searchPath the already-normalized search path * @return the root for searchPath @@ -178,17 +170,16 @@ private static String getRoot(String searchPath) { File dir = new File(searchPath).getAbsoluteFile(); final String fullNameWithDir = dir.getPath(); - // If we were passed an absolute path then the fully-qualified name will match the created - // File's name. In that case - // the root needs to be the filesystem/disk root, so look up the path until it's found. If - // this is a UNC path + // If we were passed an absolute path then the fully-qualified name will match the created File's name. In that + // case + // the root needs to be the filesystem/disk root, so look up the path until it's found. If this is a UNC path // we need to stop traversing when we reach the share level. if (new File(searchPath).isAbsolute()) { if (searchPath.startsWith("\\\\")) { File parent = dir; while ((parent != null) - && (parent.getParentFile() != null) - && (parent.getParentFile().getParentFile()) != null) { + && (parent.getParentFile() != null) + && (parent.getParentFile().getParentFile()) != null) { dir = parent; parent = dir.getParentFile(); } @@ -203,17 +194,17 @@ private static String getRoot(String searchPath) { return dir.getAbsolutePath(); } } else { - // Any trailing separators foil the endsWith call below. The string is supposed to be - // normalized, so just find File.separator. + // Any trailing separators foil the endsWith call below. The string is supposed to be normalized, so just + // find File.separator. while (searchPath.endsWith(File.separator)) { searchPath = searchPath.substring(0, searchPath.length() - 1); } - // If it's a relative path, the root will be the part before the passed-in searchPath. - // as that's before the wildcards could start + // If it's a relative path, the root will be the part before the passed-in searchPath. as that's before the + // wildcards could start if (!fullNameWithDir.endsWith(searchPath)) { - throw new IllegalArgumentException("Can't resolve relative path " + searchPath - + " to full directory name " + fullNameWithDir); + throw new IllegalArgumentException( + "Can't resolve relative path " + searchPath + " to full directory name " + fullNameWithDir); } else { return fullNameWithDir.substring(0, fullNameWithDir.length() - searchPath.length()); } @@ -221,7 +212,7 @@ private static String getRoot(String searchPath) { } private void processFile(final String fileName, final Path path, @NotNull final String suffix, - @NotNull final BiConsumer consumer) { + @NotNull final BiConsumer consumer) { try { if (fileName.endsWith(suffix)) { consumer.accept(path.toUri().toURL(), fileName); @@ -234,16 +225,14 @@ private void processFile(final String fileName, final Path path, @NotNull final } /** - * Replace multiple sequential file separators (either Windows or Unix) with single instances of - * the system file separator. Make sure UNC paths are preserved by leaving two leading file - * sparator characters in place + * Replace multiple sequential file separators (either Windows or Unix) with single instances of the system file + * separator. Make sure UNC paths are preserved by leaving two leading file sparator characters in place * * @param resourcePath the path to normalize * @return the normalized path */ protected String normalize(final String resourcePath) { return ((File.separator.equals("\\") && resourcePath.startsWith("\\\\")) ? "\\" : "") - + normalizePattern.matcher(resourcePath) - .replaceAll(Matcher.quoteReplacement(File.separator)); + + normalizePattern.matcher(resourcePath).replaceAll(Matcher.quoteReplacement(File.separator)); } } diff --git a/Util/src/main/java/io/deephaven/util/locks/AwareFunctionalLock.java b/Util/src/main/java/io/deephaven/util/locks/AwareFunctionalLock.java index c499e7c5089..d195dd38e09 100644 --- a/Util/src/main/java/io/deephaven/util/locks/AwareFunctionalLock.java +++ b/Util/src/main/java/io/deephaven/util/locks/AwareFunctionalLock.java @@ -1,8 +1,7 @@ package io.deephaven.util.locks; /** - * {@link java.util.concurrent.locks.Lock} that implements {@link AwareLock} and - * {@link FunctionalLock}. + * {@link java.util.concurrent.locks.Lock} that implements {@link AwareLock} and {@link FunctionalLock}. */ public interface AwareFunctionalLock extends AwareLock, FunctionalLock { } diff --git a/Util/src/main/java/io/deephaven/util/locks/AwareLock.java b/Util/src/main/java/io/deephaven/util/locks/AwareLock.java index b595ffbd7df..4a511371f44 100644 --- a/Util/src/main/java/io/deephaven/util/locks/AwareLock.java +++ b/Util/src/main/java/io/deephaven/util/locks/AwareLock.java @@ -3,8 +3,7 @@ import java.util.concurrent.locks.Lock; /** - * Extension to the {@link Lock} interface to make awareness of the current thread's state - * accessible. + * Extension to the {@link Lock} interface to make awareness of the current thread's state accessible. */ public interface AwareLock extends Lock { diff --git a/Util/src/main/java/io/deephaven/util/locks/FunctionalLock.java b/Util/src/main/java/io/deephaven/util/locks/FunctionalLock.java index 1b015142ce9..a52544ba276 100644 --- a/Util/src/main/java/io/deephaven/util/locks/FunctionalLock.java +++ b/Util/src/main/java/io/deephaven/util/locks/FunctionalLock.java @@ -15,14 +15,14 @@ public interface FunctionalLock extends Lock { /** - * Acquire the lock, invoke {@link ThrowingRunnable#run()} while holding the lock, and release - * the lock before returning. + * Acquire the lock, invoke {@link ThrowingRunnable#run()} while holding the lock, and release the lock before + * returning. * * @param runnable The {@link ThrowingRunnable} to run * @throws EXCEPTION_TYPE If {@code runnable} throws its declared exception */ - default void doLocked( - @NotNull final ThrowingRunnable runnable) throws EXCEPTION_TYPE { + default void doLocked(@NotNull final ThrowingRunnable runnable) + throws EXCEPTION_TYPE { lock(); try { runnable.run(); @@ -32,17 +32,15 @@ default void doLocked( } /** - * Acquire the lock interruptibly, invoke {@link ThrowingRunnable#run()} while holding the lock, - * and release the lock before returning. + * Acquire the lock interruptibly, invoke {@link ThrowingRunnable#run()} while holding the lock, and release the + * lock before returning. * * @param runnable The {@link ThrowingRunnable#run()} to run - * @throws InterruptedException If the current thread was interrupted while waiting to acquire - * the lock + * @throws InterruptedException If the current thread was interrupted while waiting to acquire the lock * @throws EXCEPTION_TYPE If {@code runnable} throws its declared exception */ default void doLockedInterruptibly( - @NotNull final ThrowingRunnable runnable) - throws InterruptedException, EXCEPTION_TYPE { + @NotNull final ThrowingRunnable runnable) throws InterruptedException, EXCEPTION_TYPE { lockInterruptibly(); try { runnable.run(); @@ -52,16 +50,15 @@ default void doLockedInterruptibly( } /** - * Acquire the lock, invoke {@link ThrowingSupplier#get()} while holding the lock, and release - * the lock before returning the result. + * Acquire the lock, invoke {@link ThrowingSupplier#get()} while holding the lock, and release the lock before + * returning the result. * * @param supplier The {@link ThrowingSupplier} to get * @return The result of invoking {@code supplier} * @throws EXCEPTION_TYPE If {@code supplier} throws its declared exception */ default RESULT_TYPE computeLocked( - @NotNull final ThrowingSupplier supplier) - throws EXCEPTION_TYPE { + @NotNull final ThrowingSupplier supplier) throws EXCEPTION_TYPE { lock(); try { return supplier.get(); @@ -71,15 +68,15 @@ default RESULT_TYPE computeLocke } /** - * Acquire the lock, invoke {@link ThrowingBooleanSupplier#get()} while holding the lock, and - * release the lock before returning the result. + * Acquire the lock, invoke {@link ThrowingBooleanSupplier#get()} while holding the lock, and release the lock + * before returning the result. * * @param supplier The {@link ThrowingBooleanSupplier} to get * @return The result of invoking {@code supplier} * @throws EXCEPTION_TYPE If {@code supplier} throws its declared exception */ default boolean testLocked( - @NotNull final ThrowingBooleanSupplier supplier) throws EXCEPTION_TYPE { + @NotNull final ThrowingBooleanSupplier supplier) throws EXCEPTION_TYPE { lock(); try { return supplier.get(); @@ -89,18 +86,17 @@ default boolean testLocked( } /** - * Acquire the lock interruptibly, invoke {@link ThrowingSupplier#get()} while holding the lock, - * and release the lock before returning the result. + * Acquire the lock interruptibly, invoke {@link ThrowingSupplier#get()} while holding the lock, and release the + * lock before returning the result. * * @param supplier The {@link ThrowingSupplier} to get * @return The result of invoking {@code supplier} - * @throws InterruptedException If the current thread was interrupted while waiting to acquire - * the lock + * @throws InterruptedException If the current thread was interrupted while waiting to acquire the lock * @throws EXCEPTION_TYPE If {@code supplier} throws its declared exception */ default RESULT_TYPE computeLockedInterruptibly( - @NotNull final ThrowingSupplier supplier) - throws InterruptedException, EXCEPTION_TYPE { + @NotNull final ThrowingSupplier supplier) + throws InterruptedException, EXCEPTION_TYPE { lockInterruptibly(); try { return supplier.get(); diff --git a/Util/src/main/java/io/deephaven/util/pool/ThreadSafeFixedSizePool.java b/Util/src/main/java/io/deephaven/util/pool/ThreadSafeFixedSizePool.java index 90f40c861ea..665997e721a 100644 --- a/Util/src/main/java/io/deephaven/util/pool/ThreadSafeFixedSizePool.java +++ b/Util/src/main/java/io/deephaven/util/pool/ThreadSafeFixedSizePool.java @@ -34,10 +34,10 @@ public class ThreadSafeFixedSizePool implements PoolEx { private final Logger log; private ThreadSafeFixedSizePool(int size, @Nullable Function.Nullary factory, - Procedure.Unary clearingProcedure, Logger log, String logPfx) { + Procedure.Unary clearingProcedure, Logger log, String logPfx) { Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"); Require.requirement((log == null) == (logPfx == null), - "log and logPfx must either both be null, or both non-null"); + "log and logPfx must either both be null, or both non-null"); this.clearingProcedure = clearingProcedure; this.log = log; this.logPfx = logPfx; @@ -54,8 +54,7 @@ private ThreadSafeFixedSizePool(int size, @Nullable Function.Nullary factory, } } - public ThreadSafeFixedSizePool(int size, Function.Nullary factory, - Procedure.Unary clearingProcedure) { + public ThreadSafeFixedSizePool(int size, Function.Nullary factory, Procedure.Unary clearingProcedure) { this(size, factory, clearingProcedure, null, null); } @@ -83,9 +82,8 @@ public void give(T item) { if (now > nextGiveLog) { nextGiveLog = (now + 100000) - (now % 100000); long dt = (now - t0); - log.warn().append(logPfx) - .append(": give() can't enqueue returned item, yield count = ") - .append(yields).endl(); + log.warn().append(logPfx).append(": give() can't enqueue returned item, yield count = ") + .append(yields).endl(); } } Thread.yield(); @@ -98,10 +96,8 @@ public void give(T item) { if (now > nextGiveLog) { nextGiveLog = (now + 100000) - (now % 100000); long dt = (now - t0); - log.warn().append(logPfx).append(": give() took ").append(dt) - .append(" micros, with ") - .append(yields).append(" yields and ").append(spins) - .append(" additional spins").endl(); + log.warn().append(logPfx).append(": give() took ").append(dt).append(" micros, with ") + .append(yields).append(" yields and ").append(spins).append(" additional spins").endl(); } } } @@ -126,9 +122,8 @@ public T take() { if (now > nextTakeLog) { nextTakeLog = (now + 100000) - (now % 100000); long dt = (now - t0); - log.warn().append(logPfx) - .append(": take() can't dequeue from pool, waiting for ") - .append(dt).append(" micros, yield count = ").append(yields).endl(); + log.warn().append(logPfx).append(": take() can't dequeue from pool, waiting for ") + .append(dt).append(" micros, yield count = ").append(yields).endl(); } } Thread.yield(); @@ -142,10 +137,8 @@ public T take() { if (now > nextTakeLog) { nextTakeLog = (now + 100000) - (now % 100000); long dt = (now - t0); - log.warn().append(logPfx).append(": take() took ").append(dt) - .append(" micros, with ") - .append(yields).append(" yields and ").append(spins) - .append(" additional spins").endl(); + log.warn().append(logPfx).append(": take() took ").append(dt).append(" micros, with ") + .append(yields).append(" yields and ").append(spins).append(" additional spins").endl(); } } } diff --git a/Util/src/main/java/io/deephaven/util/pool/ThreadSafeLenientFixedSizePool.java b/Util/src/main/java/io/deephaven/util/pool/ThreadSafeLenientFixedSizePool.java index 2f63340fe52..65bb1b715e7 100644 --- a/Util/src/main/java/io/deephaven/util/pool/ThreadSafeLenientFixedSizePool.java +++ b/Util/src/main/java/io/deephaven/util/pool/ThreadSafeLenientFixedSizePool.java @@ -34,7 +34,7 @@ public class ThreadSafeLenientFixedSizePool implements Pool.MultiPool, Poo * @return a Function taking a ThreadSafeLenientFixedSizePool(ignored) and returning T */ private static Function.Unary> makeFactoryAdapter( - final Function.Nullary callable) { + final Function.Nullary callable) { return arg -> callable.call(); } @@ -46,23 +46,23 @@ private static Function.Unary> makeFact private final Counter extraFactoryCalls; public ThreadSafeLenientFixedSizePool(String name, - int size, - Function.Nullary initFactory, - Function.Nullary overflowFactory, - Procedure.Unary clearingProcedure) { + int size, + Function.Nullary initFactory, + Function.Nullary overflowFactory, + Procedure.Unary clearingProcedure) { this( - name, - Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"), - makeFactoryAdapter(Require.neqNull(initFactory, "initFactory")), - makeFactoryAdapter(Require.neqNull(overflowFactory, "overflowFactory")), - clearingProcedure); + name, + Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"), + makeFactoryAdapter(Require.neqNull(initFactory, "initFactory")), + makeFactoryAdapter(Require.neqNull(overflowFactory, "overflowFactory")), + clearingProcedure); } public ThreadSafeLenientFixedSizePool(String name, - int size, - Function.Unary> initFactory, - Function.Unary> overflowFactory, - Procedure.Unary clearingProcedure) { + int size, + Function.Unary> initFactory, + Function.Unary> overflowFactory, + Procedure.Unary clearingProcedure) { Require.geq(size, "size", MIN_SIZE, "MIN_SIZE"); Require.neqNull(initFactory, "initFactory"); Require.neqNull(overflowFactory, "overflowFactory"); @@ -72,8 +72,7 @@ public ThreadSafeLenientFixedSizePool(String name, for (int i = 0; i < size; ++i) { pool.enqueue(initFactory.call(this)); } - extraFactoryCalls = name == null ? null - : Stats.makeItem(name, "extraFactoryCalls", Counter.FACTORY).getValue(); + extraFactoryCalls = name == null ? null : Stats.makeItem(name, "extraFactoryCalls", Counter.FACTORY).getValue(); } public T take() { diff --git a/Util/src/main/java/io/deephaven/util/profiling/BaselineThreadMXBeanThreadProfiler.java b/Util/src/main/java/io/deephaven/util/profiling/BaselineThreadMXBeanThreadProfiler.java index 8801ae96562..52cf1648fb2 100644 --- a/Util/src/main/java/io/deephaven/util/profiling/BaselineThreadMXBeanThreadProfiler.java +++ b/Util/src/main/java/io/deephaven/util/profiling/BaselineThreadMXBeanThreadProfiler.java @@ -5,11 +5,10 @@ import java.lang.management.ThreadMXBean; /** - * A concrete generic {@link ThreadMXBean}-based {@link ThreadProfiler} implementation, with support - * for baseline measurements available on all JVMs only. + * A concrete generic {@link ThreadMXBean}-based {@link ThreadProfiler} implementation, with support for baseline + * measurements available on all JVMs only. */ -public final class BaselineThreadMXBeanThreadProfiler - extends ThreadMXBeanThreadProfiler { +public final class BaselineThreadMXBeanThreadProfiler extends ThreadMXBeanThreadProfiler { @Override public final boolean memoryProfilingAvailable() { diff --git a/Util/src/main/java/io/deephaven/util/profiling/SunThreadMXBeanThreadProfiler.java b/Util/src/main/java/io/deephaven/util/profiling/SunThreadMXBeanThreadProfiler.java index fc9585f429b..5016bee0362 100644 --- a/Util/src/main/java/io/deephaven/util/profiling/SunThreadMXBeanThreadProfiler.java +++ b/Util/src/main/java/io/deephaven/util/profiling/SunThreadMXBeanThreadProfiler.java @@ -7,19 +7,16 @@ import java.lang.management.ThreadMXBean; /** - * A {@link ThreadMXBean}-based {@link ThreadProfiler} implementation for use on Oracle and OpenJDK - * JVMs, adding support for memory measurements. + * A {@link ThreadMXBean}-based {@link ThreadProfiler} implementation for use on Oracle and OpenJDK JVMs, adding support + * for memory measurements. */ -public final class SunThreadMXBeanThreadProfiler - extends ThreadMXBeanThreadProfiler { +public final class SunThreadMXBeanThreadProfiler extends ThreadMXBeanThreadProfiler { @VisibleForTesting - static final boolean TRY_ENABLE_THREAD_ALLOCATED_MEMORY = - Configuration.getInstance().getBooleanForClassWithDefault( - SunThreadMXBeanThreadProfiler.class, "tryEnableThreadAllocatedMemory", true); + static final boolean TRY_ENABLE_THREAD_ALLOCATED_MEMORY = Configuration.getInstance() + .getBooleanForClassWithDefault(SunThreadMXBeanThreadProfiler.class, "tryEnableThreadAllocatedMemory", true); - // NB: This class may need to be moved to a JDK-specific source set at some future date, if and - // when we add support + // NB: This class may need to be moved to a JDK-specific source set at some future date, if and when we add support // to compile on other JDKs. /** @@ -33,19 +30,18 @@ public SunThreadMXBeanThreadProfiler() { return; } - if (threadMXBean.isThreadAllocatedMemorySupported() - && !threadMXBean.isThreadAllocatedMemoryEnabled() - && TRY_ENABLE_THREAD_ALLOCATED_MEMORY) { + if (threadMXBean.isThreadAllocatedMemorySupported() && !threadMXBean.isThreadAllocatedMemoryEnabled() + && TRY_ENABLE_THREAD_ALLOCATED_MEMORY) { try { threadMXBean.setThreadAllocatedMemoryEnabled(true); } catch (UnsupportedOperationException e) { throw new UnsupportedOperationException( - "Failed to enable thread allocated memory - set SunThreadMXBeanThreadProfiler.tryEnableThreadAllocatedMemory=false to proceed without it", - e); + "Failed to enable thread allocated memory - set SunThreadMXBeanThreadProfiler.tryEnableThreadAllocatedMemory=false to proceed without it", + e); } } - memoryProfilingAvailable = threadMXBean.isThreadAllocatedMemorySupported() - && threadMXBean.isThreadAllocatedMemoryEnabled(); + memoryProfilingAvailable = + threadMXBean.isThreadAllocatedMemorySupported() && threadMXBean.isThreadAllocatedMemoryEnabled(); } @Override diff --git a/Util/src/main/java/io/deephaven/util/profiling/ThreadMXBeanThreadProfiler.java b/Util/src/main/java/io/deephaven/util/profiling/ThreadMXBeanThreadProfiler.java index 0c8c4a5fb78..43cf85eade0 100644 --- a/Util/src/main/java/io/deephaven/util/profiling/ThreadMXBeanThreadProfiler.java +++ b/Util/src/main/java/io/deephaven/util/profiling/ThreadMXBeanThreadProfiler.java @@ -8,16 +8,14 @@ import java.lang.management.ThreadMXBean; /** - * An abstract generic {@link ThreadMXBean}-based {@link ThreadProfiler} implementation, with - * support for baseline measurements available on all JVMs. + * An abstract generic {@link ThreadMXBean}-based {@link ThreadProfiler} implementation, with support for baseline + * measurements available on all JVMs. */ -public abstract class ThreadMXBeanThreadProfiler - implements ThreadProfiler { +public abstract class ThreadMXBeanThreadProfiler implements ThreadProfiler { @VisibleForTesting - static final boolean TRY_ENABLE_THREAD_CPU_TIME = - Configuration.getInstance().getBooleanForClassWithDefault(ThreadMXBeanThreadProfiler.class, - "tryEnableThreadCpuTime", true); + static final boolean TRY_ENABLE_THREAD_CPU_TIME = Configuration.getInstance() + .getBooleanForClassWithDefault(ThreadMXBeanThreadProfiler.class, "tryEnableThreadCpuTime", true); /** * The bean for measurements. @@ -39,17 +37,16 @@ protected ThreadMXBeanThreadProfiler() { } if (threadMXBean.isCurrentThreadCpuTimeSupported() && !threadMXBean.isThreadCpuTimeEnabled() - && TRY_ENABLE_THREAD_CPU_TIME) { + && TRY_ENABLE_THREAD_CPU_TIME) { try { threadMXBean.setThreadCpuTimeEnabled(true); } catch (UnsupportedOperationException e) { throw new UnsupportedOperationException( - "Failed to enable thread cpu time - set ThreadMXBeanThreadProfiler.tryEnableThreadCpuTime=false to proceed without it", - e); + "Failed to enable thread cpu time - set ThreadMXBeanThreadProfiler.tryEnableThreadCpuTime=false to proceed without it", + e); } } - cpuProfilingAvailable = - threadMXBean.isCurrentThreadCpuTimeSupported() && threadMXBean.isThreadCpuTimeEnabled(); + cpuProfilingAvailable = threadMXBean.isCurrentThreadCpuTimeSupported() && threadMXBean.isThreadCpuTimeEnabled(); } @Override diff --git a/Util/src/main/java/io/deephaven/util/profiling/ThreadProfiler.java b/Util/src/main/java/io/deephaven/util/profiling/ThreadProfiler.java index 43082e92f85..25b1cd69b3d 100644 --- a/Util/src/main/java/io/deephaven/util/profiling/ThreadProfiler.java +++ b/Util/src/main/java/io/deephaven/util/profiling/ThreadProfiler.java @@ -8,14 +8,13 @@ */ public interface ThreadProfiler { - boolean CPU_PROFILING_ENABLED = Configuration.getInstance() - .getBooleanForClassWithDefault(ThreadProfiler.class, "cpuProfilingEnabled", true); - boolean MEMORY_PROFILING_ENABLED = Configuration.getInstance() - .getBooleanForClassWithDefault(ThreadProfiler.class, "memoryProfilingEnabled", true); + boolean CPU_PROFILING_ENABLED = Configuration.getInstance().getBooleanForClassWithDefault(ThreadProfiler.class, + "cpuProfilingEnabled", true); + boolean MEMORY_PROFILING_ENABLED = Configuration.getInstance().getBooleanForClassWithDefault(ThreadProfiler.class, + "memoryProfilingEnabled", true); /** - * Check if memory profiling (e.g. {@link #getCurrentThreadAllocatedBytes()}) is available - * (supported and enabled). + * Check if memory profiling (e.g. {@link #getCurrentThreadAllocatedBytes()}) is available (supported and enabled). * * @return Whether memory profiling is available. */ @@ -30,16 +29,16 @@ public interface ThreadProfiler { long getCurrentThreadAllocatedBytes(); /** - * Check if CPU profiling (e.g. {@link #getCurrentThreadCpuTime()} and - * {@link #getCurrentThreadUserTime()}) is available (supported and enabled). + * Check if CPU profiling (e.g. {@link #getCurrentThreadCpuTime()} and {@link #getCurrentThreadUserTime()}) is + * available (supported and enabled). * * @return Whether CPU profiling is available. */ boolean cpuProfilingAvailable(); /** - * Get the approximate number of total nanoseconds the current thread has executed (in system or - * user mode) since CPU time measurement started. + * Get the approximate number of total nanoseconds the current thread has executed (in system or user mode) since + * CPU time measurement started. * * @return The approximate number of total nanoseconds the current thread has executed, or * {@link QueryConstants#NULL_LONG} if unavailable. @@ -47,17 +46,17 @@ public interface ThreadProfiler { long getCurrentThreadCpuTime(); /** - * Get the approximate number of total nanoseconds the current thread has executed (in user - * mode) since CPU time measurement started. + * Get the approximate number of total nanoseconds the current thread has executed (in user mode) since CPU time + * measurement started. * - * @return The approximate number of total nanoseconds the current thread has executed in user - * mode, or {@link QueryConstants#NULL_LONG} if unavailable. + * @return The approximate number of total nanoseconds the current thread has executed in user mode, or + * {@link QueryConstants#NULL_LONG} if unavailable. */ long getCurrentThreadUserTime(); /** - * Make a new ThreadProfiler for this JVM. The result may not support all measurements, if - * there's no suitable implementation available. + * Make a new ThreadProfiler for this JVM. The result may not support all measurements, if there's no suitable + * implementation available. * * @return A new ThreadProfiler for this JVM. */ diff --git a/Util/src/main/java/io/deephaven/util/progress/ProcessStatusSubrange.java b/Util/src/main/java/io/deephaven/util/progress/ProcessStatusSubrange.java index b25f107aa46..ccd2fb6c248 100644 --- a/Util/src/main/java/io/deephaven/util/progress/ProcessStatusSubrange.java +++ b/Util/src/main/java/io/deephaven/util/progress/ProcessStatusSubrange.java @@ -25,8 +25,8 @@ public ProcessStatusSubrange(StatusCallback parentCallback, int begin, int end) } /** - * This object has been assigned a range of the parents range. Adjust the stepProgress value - * from 0-100 to the value in the full range of the parent object. + * This object has been assigned a range of the parents range. Adjust the stepProgress value from 0-100 to the value + * in the full range of the parent object. * * @param progress percent complete (0-100) * @param status optional message text @@ -35,8 +35,7 @@ public ProcessStatusSubrange(StatusCallback parentCallback, int begin, int end) public void update(final int progress, final Supplier status) { Require.geqZero(progress, "progress"); Require.leq(progress, "progress", 100); - // Require.geq(getStepValue(), "current progress", progress, "progress"); // Needed? - // probably not. + // Require.geq(getStepValue(), "current progress", progress, "progress"); // Needed? probably not. this.stepProgress = progress; int adjustedProgress = (int) ((progress / 100.0) * (end - begin) + begin); diff --git a/Util/src/main/java/io/deephaven/util/progress/ProgressLogger.java b/Util/src/main/java/io/deephaven/util/progress/ProgressLogger.java index 6f2480122c1..41b9811b1b0 100644 --- a/Util/src/main/java/io/deephaven/util/progress/ProgressLogger.java +++ b/Util/src/main/java/io/deephaven/util/progress/ProgressLogger.java @@ -27,8 +27,7 @@ public ProgressLogger(StatusCallback parent, Logger log) { public void update(final int progress, final Supplier status) { parent.update(progress, status); if (progress != lastProgress) { - log.info().append(status.get()).append(" - Progress: ").append(parent.getValue()) - .append("%").endl(); + log.info().append(status.get()).append(" - Progress: ").append(parent.getValue()).append("%").endl(); lastProgress = progress; } } diff --git a/Util/src/main/java/io/deephaven/util/progress/StatusCallback.java b/Util/src/main/java/io/deephaven/util/progress/StatusCallback.java index 8f92cd02ae8..3976bb3548d 100644 --- a/Util/src/main/java/io/deephaven/util/progress/StatusCallback.java +++ b/Util/src/main/java/io/deephaven/util/progress/StatusCallback.java @@ -62,8 +62,7 @@ default int getStepValue() { /** * Get a new status callback representing a sub range of this one. * - * TODO: it's iffy whether this belongs in the interface, but is pretty handy. Could easily be a - * factory. + * TODO: it's iffy whether this belongs in the interface, but is pretty handy. Could easily be a factory. * * @param min 0% in the subrange corresponds to min in the parent * @param max 100% in the subrange corresponds to max in the parent diff --git a/Util/src/main/java/io/deephaven/util/reference/CleanupReferenceProcessor.java b/Util/src/main/java/io/deephaven/util/reference/CleanupReferenceProcessor.java index 1e4886aed4c..fbb2c3ff82a 100644 --- a/Util/src/main/java/io/deephaven/util/reference/CleanupReferenceProcessor.java +++ b/Util/src/main/java/io/deephaven/util/reference/CleanupReferenceProcessor.java @@ -14,15 +14,14 @@ import java.lang.ref.ReferenceQueue; /** - * Utility for draining a reference queue of {@link CleanupReference}s and invoking their cleanup - * methods. + * Utility for draining a reference queue of {@link CleanupReference}s and invoking their cleanup methods. */ public class CleanupReferenceProcessor { private static final Logger log = LoggerFactory.getLogger(CleanupReferenceProcessor.class); - private static final boolean LOG_CLEANED_REFERENCES = Configuration.getInstance() - .getBooleanWithDefault("CleanupReferenceProcessor.logCleanedReferences", false); + private static final boolean LOG_CLEANED_REFERENCES = + Configuration.getInstance().getBooleanWithDefault("CleanupReferenceProcessor.logCleanedReferences", false); private final String name; private final long shutdownCheckDelayMillis; @@ -41,11 +40,10 @@ public class CleanupReferenceProcessor { * @param exceptionHandler Callback for exception handling */ public CleanupReferenceProcessor(@NotNull final String name, - final long shutdownCheckDelayMillis, - @NotNull final FunctionalInterfaces.ThrowingTriConsumer exceptionHandler) { + final long shutdownCheckDelayMillis, + @NotNull final FunctionalInterfaces.ThrowingTriConsumer exceptionHandler) { this.name = Require.neqNull(name, "name"); - this.shutdownCheckDelayMillis = - Require.geqZero(shutdownCheckDelayMillis, "shutdownDelayCheckMillis"); + this.shutdownCheckDelayMillis = Require.geqZero(shutdownCheckDelayMillis, "shutdownDelayCheckMillis"); this.exceptionHandler = Require.neqNull(exceptionHandler, "exceptionHandler"); } @@ -53,12 +51,12 @@ public CleanupReferenceProcessor(@NotNull final String name, *

      * Get the reference queue for this cleaner. *

      - * On the first call after construction or {@link #resetForUnitTests()}, this method initializes - * the instance as a side effect. Initialization entails: + * On the first call after construction or {@link #resetForUnitTests()}, this method initializes the instance as a + * side effect. Initialization entails: *

        *
      1. Constructing a {@link ReferenceQueue}.
      2. - *
      3. Starting a daemon thread that will drain the reference queue and invoke - * {@link CleanupReference#cleanup()} on any {@link CleanupReference} dequeued.
      4. + *
      5. Starting a daemon thread that will drain the reference queue and invoke {@link CleanupReference#cleanup()} on + * any {@link CleanupReference} dequeued.
      6. *
      * * @return The {@link ReferenceQueue} constructed in the most recent initialization of this @@ -71,7 +69,7 @@ public ReferenceQueue getReferenceQueue() { if ((localQueue = referenceQueue) == null) { referenceQueue = localQueue = new ReferenceQueue<>(); final Thread cleanerThread = new Thread(new DrainQueue(localQueue), - "CleanupReferenceProcessor-" + name + "-drainingThread"); + "CleanupReferenceProcessor-" + name + "-drainingThread"); cleanerThread.setDaemon(true); cleanerThread.start(); } @@ -82,9 +80,8 @@ public ReferenceQueue getReferenceQueue() { } /** - * Reset this instance so that the next call to {@link #getReferenceQueue()} will re-initialize - * it and provide a new queue. Results in eventual termination of the daemon thread that may - * have been draining the existing queue. + * Reset this instance so that the next call to {@link #getReferenceQueue()} will re-initialize it and provide a new + * queue. Results in eventual termination of the daemon thread that may have been draining the existing queue. */ @TestUseOnly public final synchronized void resetForUnitTests() { @@ -113,9 +110,8 @@ public void run() { if (reference instanceof CleanupReference) { try { if (LOG_CLEANED_REFERENCES) { - log.info().append("CleanupReferenceProcessor-").append(name) - .append(", cleaning ").append(Utils.REFERENT_FORMATTER, reference) - .endl(); + log.info().append("CleanupReferenceProcessor-").append(name).append(", cleaning ") + .append(Utils.REFERENT_FORMATTER, reference).endl(); } ((CleanupReference) reference).cleanup(); } catch (Exception e) { diff --git a/Util/src/main/java/io/deephaven/util/referencecounting/ProceduralReferenceCounted.java b/Util/src/main/java/io/deephaven/util/referencecounting/ProceduralReferenceCounted.java index 517986b3d51..5a4f6d7613b 100644 --- a/Util/src/main/java/io/deephaven/util/referencecounting/ProceduralReferenceCounted.java +++ b/Util/src/main/java/io/deephaven/util/referencecounting/ProceduralReferenceCounted.java @@ -4,8 +4,8 @@ import org.jetbrains.annotations.NotNull; /** - * A {@link ReferenceCounted} implementation that takes a {@link Runnable} onReferenceCountAtZero - * procedure, in order to avoid relying on inheritance where necessary or desirable. + * A {@link ReferenceCounted} implementation that takes a {@link Runnable} onReferenceCountAtZero procedure, in order to + * avoid relying on inheritance where necessary or desirable. */ public final class ProceduralReferenceCounted extends ReferenceCounted { @@ -14,11 +14,10 @@ public final class ProceduralReferenceCounted extends ReferenceCounted { */ private final Runnable onReferenceCountAtZeroProcedure; - public ProceduralReferenceCounted(@NotNull final Runnable onReferenceCountAtZeroProcedure, - final int initialValue) { + public ProceduralReferenceCounted(@NotNull final Runnable onReferenceCountAtZeroProcedure, final int initialValue) { super(initialValue); this.onReferenceCountAtZeroProcedure = - Require.neqNull(onReferenceCountAtZeroProcedure, "onReferenceCountAtZeroProcedure"); + Require.neqNull(onReferenceCountAtZeroProcedure, "onReferenceCountAtZeroProcedure"); } public ProceduralReferenceCounted(@NotNull final Runnable onReferenceCountAtZeroProcedure) { diff --git a/Util/src/main/java/io/deephaven/util/referencecounting/ReferenceCounted.java b/Util/src/main/java/io/deephaven/util/referencecounting/ReferenceCounted.java index c198ec94fef..f4191e55fe9 100644 --- a/Util/src/main/java/io/deephaven/util/referencecounting/ReferenceCounted.java +++ b/Util/src/main/java/io/deephaven/util/referencecounting/ReferenceCounted.java @@ -15,24 +15,22 @@ import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; /** - * Implements a recurring reference counting pattern - a concurrent reference count that should - * refuse to go below zero, and invokes {@link #onReferenceCountAtZero()} exactly once when the - * count returns to zero. + * Implements a recurring reference counting pattern - a concurrent reference count that should refuse to go below zero, + * and invokes {@link #onReferenceCountAtZero()} exactly once when the count returns to zero. */ public abstract class ReferenceCounted implements LogOutputAppendable, Serializable { private static final long serialVersionUID = 1L; /** - * Field updater for referenceCount, so we can avoid creating an - * {@link java.util.concurrent.atomic.AtomicInteger} for each instance. + * Field updater for referenceCount, so we can avoid creating an {@link java.util.concurrent.atomic.AtomicInteger} + * for each instance. */ private static final AtomicIntegerFieldUpdater REFERENCE_COUNT_UPDATER = - AtomicIntegerFieldUpdater.newUpdater(ReferenceCounted.class, "referenceCount"); + AtomicIntegerFieldUpdater.newUpdater(ReferenceCounted.class, "referenceCount"); /** - * This constant represents a "zero" reference count value that doesn't prevent increasing the - * reference count. + * This constant represents a "zero" reference count value that doesn't prevent increasing the reference count. */ private static final int INITIAL_ZERO_VALUE = -1; @@ -76,8 +74,8 @@ public String toString() { @Override public LogOutput append(@NotNull final LogOutput logOutput) { - return logOutput.append(Utils.REFERENT_FORMATTER, this).append('[') - .append(getCurrentReferenceCount()).append(']'); + return logOutput.append(Utils.REFERENT_FORMATTER, this).append('[').append(getCurrentReferenceCount()) + .append(']'); } private void initializeReferenceCount(final int initialValue) { @@ -100,8 +98,8 @@ private boolean tryUpdateReferenceCount(final int expected, final int update) { */ public final void resetReferenceCount() { if (!tryUpdateReferenceCount(TERMINAL_ZERO_VALUE, INITIAL_ZERO_VALUE)) { - throw new IllegalStateException(Utils.makeReferentDescription(this) - + "'s reference count is non-zero and cannot be reset"); + throw new IllegalStateException( + Utils.makeReferentDescription(this) + "'s reference count is non-zero and cannot be reset"); } } @@ -115,12 +113,11 @@ public final boolean tryIncrementReferenceCount() { int currentReferenceCount; while ((currentReferenceCount = getCurrentReferenceCount()) != TERMINAL_ZERO_VALUE) { if (currentReferenceCount == MAXIMUM_VALUE) { - throw new IllegalStateException(Utils.makeReferentDescription(this) - + "'s reference count cannot exceed maximum value"); + throw new IllegalStateException( + Utils.makeReferentDescription(this) + "'s reference count cannot exceed maximum value"); } if (tryUpdateReferenceCount(currentReferenceCount, - currentReferenceCount == INITIAL_ZERO_VALUE ? ONE_VALUE - : currentReferenceCount + 1)) { + currentReferenceCount == INITIAL_ZERO_VALUE ? ONE_VALUE : currentReferenceCount + 1)) { return true; } } @@ -134,15 +131,14 @@ public final boolean tryIncrementReferenceCount() { */ public final void incrementReferenceCount() { if (!tryIncrementReferenceCount()) { - throw new IllegalStateException(Utils.makeReferentDescription(this) - + "'s reference count has already reached zero"); + throw new IllegalStateException( + Utils.makeReferentDescription(this) + "'s reference count has already reached zero"); } } /** - * Decrement the reference count by one, if it has ever been increased and has not already been - * decreased to zero. Invokes the implementation's {@link #onReferenceCountAtZero()} method if - * decrementing to zero. + * Decrement the reference count by one, if it has ever been increased and has not already been decreased to zero. + * Invokes the implementation's {@link #onReferenceCountAtZero()} method if decrementing to zero. * * @return Whether the reference count was successfully decremented */ @@ -150,7 +146,7 @@ public final void incrementReferenceCount() { public final boolean tryDecrementReferenceCount() { int currentReferenceCount; while ((currentReferenceCount = getCurrentReferenceCount()) != TERMINAL_ZERO_VALUE - && currentReferenceCount != INITIAL_ZERO_VALUE) { + && currentReferenceCount != INITIAL_ZERO_VALUE) { if (tryUpdateReferenceCount(currentReferenceCount, currentReferenceCount - 1)) { if (currentReferenceCount == ONE_VALUE) { // Did we just CAS from 1 to 0? onReferenceCountAtZero(); @@ -162,16 +158,15 @@ public final boolean tryDecrementReferenceCount() { } /** - * Decrement the reference count by one, if it has ever been increased and has not already been - * decreased to zero. Invokes the implementation's {@link #onReferenceCountAtZero()} method if - * decrementing to zero. + * Decrement the reference count by one, if it has ever been increased and has not already been decreased to zero. + * Invokes the implementation's {@link #onReferenceCountAtZero()} method if decrementing to zero. * * @throws IllegalStateException If the reference count was not successfully decremented */ public final void decrementReferenceCount() { if (!tryDecrementReferenceCount()) { - throw new IllegalStateException(Utils.makeReferentDescription(this) - + "'s reference count has been decreased more than increased"); + throw new IllegalStateException( + Utils.makeReferentDescription(this) + "'s reference count has been decreased more than increased"); } } diff --git a/Util/src/main/java/io/deephaven/util/text/Indenter.java b/Util/src/main/java/io/deephaven/util/text/Indenter.java index 4ed10987255..ecf018752f3 100644 --- a/Util/src/main/java/io/deephaven/util/text/Indenter.java +++ b/Util/src/main/java/io/deephaven/util/text/Indenter.java @@ -5,8 +5,8 @@ /** * Simple utility class for managing the indentation of generated code. *

      - * Pass the indenter to your StringBuilder; calling increaseLevel or decreaseLevel as you start and - * finish your indentation blocks. + * Pass the indenter to your StringBuilder; calling increaseLevel or decreaseLevel as you start and finish your + * indentation blocks. */ public class Indenter { diff --git a/Util/src/main/java/io/deephaven/util/text/ScriptSanitizer.java b/Util/src/main/java/io/deephaven/util/text/ScriptSanitizer.java index c2b2577057c..a4bf0c3fe78 100644 --- a/Util/src/main/java/io/deephaven/util/text/ScriptSanitizer.java +++ b/Util/src/main/java/io/deephaven/util/text/ScriptSanitizer.java @@ -2,8 +2,8 @@ public class ScriptSanitizer { /** - * Replaces unwanted characters like smart quotes with the standard equivalent. Used so that - * copy and paste operations from tools like Outlook or Slack don't result in unusable scripts. + * Replaces unwanted characters like smart quotes with the standard equivalent. Used so that copy and paste + * operations from tools like Outlook or Slack don't result in unusable scripts. * * @param commandToSanitize the command to sanitize * @return the command with unwanted characters replaced with allowable characters diff --git a/Util/src/main/java/io/deephaven/util/text/SplitIgnoreQuotes.java b/Util/src/main/java/io/deephaven/util/text/SplitIgnoreQuotes.java index 68c0cc191b5..3de996e6da6 100644 --- a/Util/src/main/java/io/deephaven/util/text/SplitIgnoreQuotes.java +++ b/Util/src/main/java/io/deephaven/util/text/SplitIgnoreQuotes.java @@ -6,8 +6,8 @@ import java.util.List; /** - * Splits a String on a character ignoring that character inside quotes and back ticks. For example - * splitting on a comma: + * Splits a String on a character ignoring that character inside quotes and back ticks. For example splitting on a + * comma: * * 'a,b', "c,d", 'e', "f", g splits to ['a,b'] ["c,d"] ['e'] ["f"] [g] */ diff --git a/Util/src/main/java/io/deephaven/util/thread/NamingThreadFactory.java b/Util/src/main/java/io/deephaven/util/thread/NamingThreadFactory.java index 5a2de00016c..0a43c751c47 100644 --- a/Util/src/main/java/io/deephaven/util/thread/NamingThreadFactory.java +++ b/Util/src/main/java/io/deephaven/util/thread/NamingThreadFactory.java @@ -20,8 +20,7 @@ public NamingThreadFactory(final Class clazz, final String name, boolean daemon) this(null, clazz, name, daemon); } - public NamingThreadFactory(ThreadGroup threadGroup, final Class clazz, final String name, - boolean daemon) { + public NamingThreadFactory(ThreadGroup threadGroup, final Class clazz, final String name, boolean daemon) { this.threadGroup = threadGroup; this.clazz = clazz; this.name = name; @@ -30,8 +29,8 @@ public NamingThreadFactory(ThreadGroup threadGroup, final Class clazz, final Str @Override public Thread newThread(@NotNull final Runnable r) { - final Thread thread = new Thread(threadGroup, r, - clazz.getSimpleName() + "-" + name + "-" + threadCounter.incrementAndGet()); + final Thread thread = + new Thread(threadGroup, r, clazz.getSimpleName() + "-" + name + "-" + threadCounter.incrementAndGet()); thread.setDaemon(daemon); return thread; } diff --git a/Util/src/main/java/io/deephaven/util/thread/ThreadDump.java b/Util/src/main/java/io/deephaven/util/thread/ThreadDump.java index cd4fdf7b34c..b7c98d06e20 100644 --- a/Util/src/main/java/io/deephaven/util/thread/ThreadDump.java +++ b/Util/src/main/java/io/deephaven/util/thread/ThreadDump.java @@ -14,9 +14,8 @@ import java.util.Objects; /** - * A simple method for generating a Thread dump for this JVM; it doesn't do all the stuff that the - * kill -3 does; but you can easily run it from inside the JVM without having to send yourself a - * signal. + * A simple method for generating a Thread dump for this JVM; it doesn't do all the stuff that the kill -3 does; but you + * can easily run it from inside the JVM without having to send yourself a signal. */ public class ThreadDump { /** @@ -66,71 +65,67 @@ private static ThreadInfo[] getThreadInfos() { return threadMXBean.dumpAllThreads(true, true); } - private static final LogOutput.ObjFormatter THREAD_INFO_OBJ_FORMATTER = - (logOutput, threadInfo) -> { - if (threadInfo == null) { - logOutput.append("null"); - return; - } - logOutput.append("\"").append(threadInfo.getThreadName()).append("\" Id=") - .append(threadInfo.getThreadId()).append(" ") - .append(Objects.toString(threadInfo.getThreadState())); + private static final LogOutput.ObjFormatter THREAD_INFO_OBJ_FORMATTER = (logOutput, threadInfo) -> { + if (threadInfo == null) { + logOutput.append("null"); + return; + } + logOutput.append("\"").append(threadInfo.getThreadName()).append("\" Id=").append(threadInfo.getThreadId()) + .append(" ").append(Objects.toString(threadInfo.getThreadState())); - if (threadInfo.getLockName() != null) { - logOutput.append(" on ").append(threadInfo.getLockName()); - } - if (threadInfo.getLockOwnerName() != null) { - logOutput.append(" owned by \"").append(threadInfo.getLockOwnerName()) - .append("\" Id=").append(threadInfo.getLockOwnerId()); - } - if (threadInfo.isSuspended()) { - logOutput.append(" (suspended)"); - } - if (threadInfo.isInNative()) { - logOutput.append(" (in native)"); - } + if (threadInfo.getLockName() != null) { + logOutput.append(" on ").append(threadInfo.getLockName()); + } + if (threadInfo.getLockOwnerName() != null) { + logOutput.append(" owned by \"").append(threadInfo.getLockOwnerName()).append("\" Id=") + .append(threadInfo.getLockOwnerId()); + } + if (threadInfo.isSuspended()) { + logOutput.append(" (suspended)"); + } + if (threadInfo.isInNative()) { + logOutput.append(" (in native)"); + } + logOutput.append('\n'); + int i = 0; + final StackTraceElement[] stackTrace = threadInfo.getStackTrace(); + for (; i < stackTrace.length; i++) { + final StackTraceElement ste = stackTrace[i]; + logOutput.append("\tat ").append(ste.toString()); logOutput.append('\n'); - int i = 0; - final StackTraceElement[] stackTrace = threadInfo.getStackTrace(); - for (; i < stackTrace.length; i++) { - final StackTraceElement ste = stackTrace[i]; - logOutput.append("\tat ").append(ste.toString()); - logOutput.append('\n'); - if (i == 0 && threadInfo.getLockInfo() != null) { - final Thread.State ts = threadInfo.getThreadState(); - switch (ts) { - case BLOCKED: - logOutput.append("\t- blocked on ") - .append(Objects.toString(threadInfo.getLockInfo())); - logOutput.append('\n'); - break; - case WAITING: - case TIMED_WAITING: - logOutput.append("\t- waiting on ") - .append(Objects.toString(threadInfo.getLockInfo())); - logOutput.append('\n'); - break; - default: - } - } - - for (final MonitorInfo mi : threadInfo.getLockedMonitors()) { - if (mi.getLockedStackDepth() == i) { - logOutput.append("\t- locked ").append(Objects.toString(mi)); + if (i == 0 && threadInfo.getLockInfo() != null) { + final Thread.State ts = threadInfo.getThreadState(); + switch (ts) { + case BLOCKED: + logOutput.append("\t- blocked on ").append(Objects.toString(threadInfo.getLockInfo())); logOutput.append('\n'); - } + break; + case WAITING: + case TIMED_WAITING: + logOutput.append("\t- waiting on ").append(Objects.toString(threadInfo.getLockInfo())); + logOutput.append('\n'); + break; + default: } } - final LockInfo[] locks = threadInfo.getLockedSynchronizers(); - if (locks.length > 0) { - logOutput.append("\n\tNumber of locked synchronizers = ").append(locks.length); - logOutput.append('\n'); - for (final LockInfo li : locks) { - logOutput.append("\t- ").append(Objects.toString(li)); + for (final MonitorInfo mi : threadInfo.getLockedMonitors()) { + if (mi.getLockedStackDepth() == i) { + logOutput.append("\t- locked ").append(Objects.toString(mi)); logOutput.append('\n'); } } + } + + final LockInfo[] locks = threadInfo.getLockedSynchronizers(); + if (locks.length > 0) { + logOutput.append("\n\tNumber of locked synchronizers = ").append(locks.length); logOutput.append('\n'); - }; + for (final LockInfo li : locks) { + logOutput.append("\t- ").append(Objects.toString(li)); + logOutput.append('\n'); + } + } + logOutput.append('\n'); + }; } diff --git a/Util/src/main/java/io/deephaven/util/type/ClassUtils.java b/Util/src/main/java/io/deephaven/util/type/ClassUtils.java index 06dddad02f5..4326dc65be0 100644 --- a/Util/src/main/java/io/deephaven/util/type/ClassUtils.java +++ b/Util/src/main/java/io/deephaven/util/type/ClassUtils.java @@ -4,8 +4,8 @@ public class ClassUtils { /** - * Gets the specified className, and if it is assignable from the expectedType returns it. - * Otherwise throws a RuntimeException. + * Gets the specified className, and if it is assignable from the expectedType returns it. Otherwise throws a + * RuntimeException. * * @param className the class we would like to retrieve * @param expectedType the type of class we expect className to be @@ -13,8 +13,7 @@ public class ClassUtils { * @return the Class object for className */ @NotNull - static public Class checkedClassForName(final String className, - final Class expectedType) { + static public Class checkedClassForName(final String className, final Class expectedType) { Class resultClass; try { resultClass = Class.forName(className); @@ -24,7 +23,7 @@ static public Class checkedClassForName(final String className, if (!expectedType.isAssignableFrom(resultClass)) { throw new RuntimeException("Invalid session class, " + resultClass.getCanonicalName() - + ", does not implement " + expectedType.getCanonicalName()); + + ", does not implement " + expectedType.getCanonicalName()); } // noinspection unchecked diff --git a/Util/src/main/java/io/deephaven/util/type/EnumValue.java b/Util/src/main/java/io/deephaven/util/type/EnumValue.java index a0dd356544b..4d6a789be65 100644 --- a/Util/src/main/java/io/deephaven/util/type/EnumValue.java +++ b/Util/src/main/java/io/deephaven/util/type/EnumValue.java @@ -5,8 +5,7 @@ public class EnumValue { /** - * Retrieves the enum constant whose name matches a given value according to case-insensitive - * comparison. + * Retrieves the enum constant whose name matches a given value according to case-insensitive comparison. * * @param enumClass the enum type we are querying * @param value the constant value we are looking up @@ -14,9 +13,9 @@ public class EnumValue { * @throws IllegalArgumentException when value is not found in the Enum's constants */ public static > T caseInsensitiveValueOf(Class enumClass, String value) { - return Arrays.stream(enumClass.getEnumConstants()) - .filter(x -> x.name().equalsIgnoreCase(value)).findFirst().orElseThrow( - () -> new IllegalArgumentException( - enumClass.getSimpleName() + " has no constant that matches " + value)); + return Arrays.stream(enumClass.getEnumConstants()).filter(x -> x.name().equalsIgnoreCase(value)).findFirst() + .orElseThrow( + () -> new IllegalArgumentException( + enumClass.getSimpleName() + " has no constant that matches " + value)); } } diff --git a/Util/src/main/java/io/deephaven/util/type/NamedImplementation.java b/Util/src/main/java/io/deephaven/util/type/NamedImplementation.java index 9e8cb446c6c..64cea0dd214 100644 --- a/Util/src/main/java/io/deephaven/util/type/NamedImplementation.java +++ b/Util/src/main/java/io/deephaven/util/type/NamedImplementation.java @@ -5,8 +5,8 @@ public interface NamedImplementation { /** *

      * Get a name for the implementing class. Useful for abstract classes that implement - * {@link io.deephaven.base.log.LogOutputAppendable LogOutputAppendable} or override - * {@link Object#toString() toString}. + * {@link io.deephaven.base.log.LogOutputAppendable LogOutputAppendable} or override {@link Object#toString() + * toString}. *

      * The default implementation is correct, but not suitable for high-frequency usage. * diff --git a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java index a6ae955aaab..1c829f86887 100644 --- a/Util/src/main/java/io/deephaven/util/type/TypeUtils.java +++ b/Util/src/main/java/io/deephaven/util/type/TypeUtils.java @@ -52,10 +52,9 @@ public class TypeUtils { boxedToPrimitive = Collections.unmodifiableMap(boxedToPrimitiveTemp); PRIMITIVE_TYPES = Collections.unmodifiableSet(primitiveToBoxedTemp.keySet()); - BOXED_TYPES = - Collections.unmodifiableSet(new LinkedHashSet<>(primitiveToBoxedTemp.values())); - primitiveClassNameToClass = Collections.unmodifiableMap( - PRIMITIVE_TYPES.stream().collect(Collectors.toMap(Class::getName, type -> type))); + BOXED_TYPES = Collections.unmodifiableSet(new LinkedHashSet<>(primitiveToBoxedTemp.values())); + primitiveClassNameToClass = Collections + .unmodifiableMap(PRIMITIVE_TYPES.stream().collect(Collectors.toMap(Class::getName, type -> type))); } @Retention(RetentionPolicy.RUNTIME) @@ -64,9 +63,8 @@ public class TypeUtils { } /** - * Returns a reference type corresponding to the given {@code type}. If {@code type} is itself a - * reference type, then {@code type} is returned. If {@code type} is a primitive type, then the - * appropriate boxed type is returned. + * Returns a reference type corresponding to the given {@code type}. If {@code type} is itself a reference type, + * then {@code type} is returned. If {@code type} is a primitive type, then the appropriate boxed type is returned. * * @param type The type */ @@ -78,9 +76,9 @@ public static Class getBoxedType(Class type) { } /** - * Returns the primitive type corresponding to the given {@code type}. If {@code type} is itself - * a primitive type, then {@code type} is returned. If {@code type} is neither a primitive type - * nor a boxed type, then {@code null} is returned. + * Returns the primitive type corresponding to the given {@code type}. If {@code type} is itself a primitive type, + * then {@code type} is returned. If {@code type} is neither a primitive type nor a boxed type, then {@code null} is + * returned. * * @param type The type * @return type's primitive equivalent, or null @@ -343,9 +341,8 @@ public static double[] toDoubleArray(double[] array) { public static boolean isConvertibleToPrimitive(Class type) { final Class unboxedType = TypeUtils.getUnboxedType(type); - return unboxedType != null && unboxedType != boolean.class; // TODO: - // isConvertibleToPrimitive(Boolean.class) - // == false ??? + return unboxedType != null && unboxedType != boolean.class; // TODO: isConvertibleToPrimitive(Boolean.class) == + // false ??? } public static boolean isBoxedType(Class exprType) { @@ -375,16 +372,14 @@ public static String nullConstantForType(Class type) { } /** - * Whether the class is equal to one of the six numeric primitives: float, double, int, long, - * short, or byte. + * Whether the class is equal to one of the six numeric primitives: float, double, int, long, short, or byte. * * @param c class * @return true if {@code c} is a numeric primitive, false otherwise */ public static boolean isPrimitiveNumeric(@NotNull final Class c) { return c.equals(double.class) || c.equals(float.class) - || c.equals(int.class) || c.equals(long.class) || c.equals(short.class) - || c.equals(byte.class); + || c.equals(int.class) || c.equals(long.class) || c.equals(short.class) || c.equals(byte.class); } /** @@ -525,7 +520,7 @@ public static boolean isCharacter(@NotNull final Class c) { */ public static boolean isDateTime(Class type) { return Date.class.isAssignableFrom(type) || type.getAnnotation(IsDateTime.class) != null - && ((IsDateTime) type.getAnnotation(IsDateTime.class)).value(); + && ((IsDateTime) type.getAnnotation(IsDateTime.class)).value(); } /** @@ -565,15 +560,13 @@ public static boolean isPrimitiveOrSerializable(Class type) { * @return true if it is a float type, false otherwise */ public static boolean isFloatType(Class type) { - return type.equals(double.class) || type.equals(float.class) || isBoxedDouble(type) - || isBoxedFloat(type); + return type.equals(double.class) || type.equals(float.class) || isBoxedDouble(type) || isBoxedFloat(type); } /** - * Converts an Object to a String for writing to a workspace. This is meant to be used in - * conjunction with {@code TypeUtils.fromString}. Strings, Numbers, and primitives will all - * convert using {@code Obect.toString}. Serializable objects will be encoded in base64. All - * others will return null. + * Converts an Object to a String for writing to a workspace. This is meant to be used in conjunction with + * {@code TypeUtils.fromString}. Strings, Numbers, and primitives will all convert using {@code Obect.toString}. + * Serializable objects will be encoded in base64. All others will return null. * * @param o the object to convert * @return a String representation of the object, null if it cannot be converted @@ -585,23 +578,21 @@ public static String objectToString(Object o) throws IOException { } final Class type = o.getClass(); - // isNumeric gets BigInteger and BigDecimal in addition to everything gotten by - // isConvertibleToPrimitive + // isNumeric gets BigInteger and BigDecimal in addition to everything gotten by isConvertibleToPrimitive if (type == String.class || isConvertibleToPrimitive(type) || isNumeric(type)) { return o.toString(); } else if (o instanceof Serializable) { return encode64Serializable((Serializable) o); } - throw new RuntimeException("Failed to convert object of type " + type.getCanonicalName() - + ". Type not supported"); + throw new RuntimeException( + "Failed to convert object of type " + type.getCanonicalName() + ". Type not supported"); } /** - * Creates an Object from a String. This is meant to be used in conjunction with - * {@code TypeUtils.objectToString} Strings, Numbers, and primitives will all parse using their - * boxed type parsing methods. Serializable types will be decoded from base64. Returns null if - * the String fails to parse. + * Creates an Object from a String. This is meant to be used in conjunction with {@code TypeUtils.objectToString} + * Strings, Numbers, and primitives will all parse using their boxed type parsing methods. Serializable types will + * be decoded from base64. Returns null if the String fails to parse. * * @param string the String to parse * @param typeString the Canonical Name of the class type @@ -620,10 +611,9 @@ public static Optional fromString(String string, String typeString) thro } /** - * Creates an Object from a String. This is meant to be used in conjunction with - * {@code TypeUtils.objectToString} Strings, Numbers, and primitives will all parse using their - * boxed type parsing methods. Serializable types will be decoded from base64. Returns null if - * the String fails to parse. + * Creates an Object from a String. This is meant to be used in conjunction with {@code TypeUtils.objectToString} + * Strings, Numbers, and primitives will all parse using their boxed type parsing methods. Serializable types will + * be decoded from base64. Returns null if the String fails to parse. * * @param string the String to parse * @param type the type of the object @@ -662,12 +652,11 @@ public static Object fromString(String string, Class type) throws IOException } catch (IOException ioe) { throw ioe; } catch (Exception e) { - throw new RuntimeException( - "Failed to parse " + string + "into type " + type.getCanonicalName(), e); + throw new RuntimeException("Failed to parse " + string + "into type " + type.getCanonicalName(), e); } - throw new RuntimeException("Failed to parse " + string + "into type " - + type.getCanonicalName() + ". Type not supported"); + throw new RuntimeException( + "Failed to parse " + string + "into type " + type.getCanonicalName() + ". Type not supported"); } /** @@ -679,7 +668,7 @@ public static Object fromString(String string, Class type) throws IOException */ public static String encode64Serializable(Serializable serializable) throws IOException { try (ByteArrayOutputStream bos = new ByteArrayOutputStream(); - ObjectOutputStream os = new ObjectOutputStream(bos)) { + ObjectOutputStream os = new ObjectOutputStream(bos)) { os.writeObject(serializable); return Base64.getEncoder().encodeToString(bos.toByteArray()); } @@ -693,10 +682,9 @@ public static String encode64Serializable(Serializable serializable) throws IOEx * @throws IOException if the string cannot be decoded * @throws ClassNotFoundException if the Object type is unknown */ - public static Object decode64Serializable(String string) - throws IOException, ClassNotFoundException { + public static Object decode64Serializable(String string) throws IOException, ClassNotFoundException { try (ObjectInputStream is = - new ObjectInputStream(new ByteArrayInputStream(Base64.getDecoder().decode(string)))) { + new ObjectInputStream(new ByteArrayInputStream(Base64.getDecoder().decode(string)))) { return is.readObject(); } } @@ -711,8 +699,7 @@ public static Class getErasedType(Type paramType) { if (paramType instanceof Class) { return (Class) paramType; } else if (paramType instanceof ParameterizedType) { - return (Class) // We are asking the parameterized type for it's raw type, which is - // always Class + return (Class) // We are asking the parameterized type for it's raw type, which is always Class ((ParameterizedType) paramType).getRawType(); } else if (paramType instanceof WildcardType) { final Type[] upper = ((WildcardType) paramType).getUpperBounds(); @@ -769,8 +756,7 @@ private static Class getWeakest(Class one, Class two) { return Object.class; } } - // Will be Object.class if there were no shared interfaces (or shared interfaces were not - // compatible). + // Will be Object.class if there were no shared interfaces (or shared interfaces were not compatible). return strongest; } diff --git a/Util/src/main/java/io/deephaven/utils/BigDecimalUtils.java b/Util/src/main/java/io/deephaven/utils/BigDecimalUtils.java index 0d4a74da42e..800a7e1482c 100644 --- a/Util/src/main/java/io/deephaven/utils/BigDecimalUtils.java +++ b/Util/src/main/java/io/deephaven/utils/BigDecimalUtils.java @@ -34,16 +34,15 @@ public static BigDecimal sqrt(final BigDecimal x, final int scale) { } final double intermediateInitial = Math.sqrt(x.doubleValue()); - final double initial = Double.isFinite(intermediateInitial) ? intermediateInitial - : Math.sqrt(Double.MAX_VALUE); + final double initial = Double.isFinite(intermediateInitial) ? intermediateInitial : Math.sqrt(Double.MAX_VALUE); final BigDecimal epsilon = new BigDecimal(BigInteger.ONE, scale); BigDecimal x0 = BigDecimal.valueOf(initial); while (true) { - final BigDecimal x1 = x0.add(x.divide(x0, scale, BigDecimal.ROUND_HALF_UP)).divide(TWO, - scale, BigDecimal.ROUND_HALF_UP); + final BigDecimal x1 = + x0.add(x.divide(x0, scale, BigDecimal.ROUND_HALF_UP)).divide(TWO, scale, BigDecimal.ROUND_HALF_UP); final BigDecimal difference = x1.subtract(x0).abs(); x0 = x1; diff --git a/Util/src/main/java/io/deephaven/utils/ClockFactory.java b/Util/src/main/java/io/deephaven/utils/ClockFactory.java index d3539b3cacb..c5d81e1ef3d 100644 --- a/Util/src/main/java/io/deephaven/utils/ClockFactory.java +++ b/Util/src/main/java/io/deephaven/utils/ClockFactory.java @@ -4,8 +4,8 @@ import io.deephaven.util.clock.RealTimeClock; /** - * This is a hook to enable testing and simulation. Users of Clock should use this factory to get an - * instance. When they do, it is possible to change the class behavior from a test harness. + * This is a hook to enable testing and simulation. Users of Clock should use this factory to get an instance. When they + * do, it is possible to change the class behavior from a test harness. * * Note: This would be better housed as a member of Clock. */ diff --git a/Util/src/test/java/io/deephaven/util/UtilsTest.java b/Util/src/test/java/io/deephaven/util/UtilsTest.java index 4836ac2db68..46128d67335 100644 --- a/Util/src/test/java/io/deephaven/util/UtilsTest.java +++ b/Util/src/test/java/io/deephaven/util/UtilsTest.java @@ -26,16 +26,13 @@ public void testSimpleName() { Assert.assertTrue(f2.getClass().getSimpleName().isEmpty()); Assert.assertTrue(!Utils.getSimpleNameFor(f1).isEmpty()); - // simple name of anonymous extension same as parent // note: we could want to change this - // someday + // simple name of anonymous extension same as parent // note: we could want to change this someday Assert.assertEquals(Utils.getSimpleNameFor(f1), Utils.getSimpleNameFor(f2)); // class and object behave the same Assert.assertEquals(Utils.getSimpleNameFor(f1), Utils.getSimpleNameFor(Foo.class)); - Assert.assertEquals(Utils.getSimpleNameFor(new Foo() {}), - Utils.getSimpleNameFor(Foo.class)); - Assert.assertEquals(Utils.getSimpleNameFor(new AFoo() {}), - Utils.getSimpleNameFor(AFoo.class)); + Assert.assertEquals(Utils.getSimpleNameFor(new Foo() {}), Utils.getSimpleNameFor(Foo.class)); + Assert.assertEquals(Utils.getSimpleNameFor(new AFoo() {}), Utils.getSimpleNameFor(AFoo.class)); // simple anonymous class returns "Object" Assert.assertEquals(Utils.getSimpleNameFor(new IFoo() { @@ -46,9 +43,7 @@ public int foo() { }), Utils.getSimpleNameFor(Object.class)); // check the lambda version - // Assert.assertEquals(Utils.getSimpleNameFor((IFoo) () -> 0), - // Utils.getSimpleNameFor(Object.class)); - // this getClass().getSimpleName() on a lambda returns something like - // "UtilsTest$$Lambda$2/385242642" + // Assert.assertEquals(Utils.getSimpleNameFor((IFoo) () -> 0), Utils.getSimpleNameFor(Object.class)); + // this getClass().getSimpleName() on a lambda returns something like "UtilsTest$$Lambda$2/385242642" } } diff --git a/Util/src/test/java/io/deephaven/util/codec/BigDecimalCodecTest.java b/Util/src/test/java/io/deephaven/util/codec/BigDecimalCodecTest.java index 9f3f660f1e1..1510b038237 100644 --- a/Util/src/test/java/io/deephaven/util/codec/BigDecimalCodecTest.java +++ b/Util/src/test/java/io/deephaven/util/codec/BigDecimalCodecTest.java @@ -36,8 +36,8 @@ private void roundTrip(final String args, final BigDecimal value, final BigDecim roundTripWithOffset(args, value, expected, 0); } - private void roundTripWithOffset(final String args, final BigDecimal value, - final BigDecimal expected, final int offset) { + private void roundTripWithOffset(final String args, final BigDecimal value, final BigDecimal expected, + final int offset) { final BigDecimalCodec codec = new BigDecimalCodec(args); byte[] enc = codec.encode(value); // if we expect to be decoding from an offset, construct the input accordingly @@ -208,8 +208,7 @@ public void testFixedEncodeNoRounding() { } public void testFixedOverflow() { - // we should get overflow exceptions if the value is too large, regardless if we allow - // rounding + // we should get overflow exceptions if the value is too large, regardless if we allow rounding expectIllegalArgumentException("5,5,allowRounding", 1111111); expectIllegalArgumentException("5,5,noRounding", 1111111); expectIllegalArgumentException("10,3,noRounding", 9999999999L); // just one over @@ -222,12 +221,10 @@ public void testIllegalRoundingMode() { public void testFixedPrecisionLimit() { final int maxPrec = BigDecimalCodec.MAX_FIXED_PRECISION; - final BigDecimal hugeInt = - BigDecimal.valueOf(10).pow(BigDecimalCodec.MAX_FIXED_PRECISION - 1); - final BigDecimal hugeFrac = BigDecimal.valueOf(10) - .pow(BigDecimalCodec.MAX_FIXED_PRECISION - 4).add(BigDecimal.valueOf(0.111)); - final BigDecimal teenyNumber = - BigDecimal.valueOf(0.1).pow(BigDecimalCodec.MAX_FIXED_PRECISION - 1); + final BigDecimal hugeInt = BigDecimal.valueOf(10).pow(BigDecimalCodec.MAX_FIXED_PRECISION - 1); + final BigDecimal hugeFrac = + BigDecimal.valueOf(10).pow(BigDecimalCodec.MAX_FIXED_PRECISION - 4).add(BigDecimal.valueOf(0.111)); + final BigDecimal teenyNumber = BigDecimal.valueOf(0.1).pow(BigDecimalCodec.MAX_FIXED_PRECISION - 1); // 614 should be ok, that's how many decimal digits we can store in 255 bytes roundTrip(maxPrec + ",5", 12345); @@ -316,29 +313,23 @@ public void testFixedEncodingWithOffset() { // these commented out tests are fun to run manually /* - * public void testSpeed() { final Random random = new Random(); final long start = - * System.currentTimeMillis(); final int n = 10_000_000; + * public void testSpeed() { final Random random = new Random(); final long start = System.currentTimeMillis(); + * final int n = 10_000_000; * - * for(int i = 0; i < n; i++) { BigDecimal bd = - * BigDecimal.valueOf(Math.round(random.nextDouble() * 10_000_000)); bd = - * bd.setScale(9).round(new MathContext(20)); if(random.nextDouble() >= 0.5) { bd = bd.negate(); - * } roundTrip("20,9,noRounding", bd); } final long end = System.currentTimeMillis(); + * for(int i = 0; i < n; i++) { BigDecimal bd = BigDecimal.valueOf(Math.round(random.nextDouble() * 10_000_000)); bd + * = bd.setScale(9).round(new MathContext(20)); if(random.nextDouble() >= 0.5) { bd = bd.negate(); } + * roundTrip("20,9,noRounding", bd); } final long end = System.currentTimeMillis(); * System.out.println("Encoded & decoded " + n + " in " + (end-start) + "ms"); } * - * private void printEncoded(byte[] enc) { for(int i = 0; i < enc.length; i++) { - * System.out.print(" "); int d = enc[i]; System.out.print(String.format("0x%02X",(int)(d & - * 0xff))); } System.out.println(); } + * private void printEncoded(byte[] enc) { for(int i = 0; i < enc.length; i++) { System.out.print(" "); int d = + * enc[i]; System.out.print(String.format("0x%02X",(int)(d & 0xff))); } System.out.println(); } * * public void testOrdering() { BigDecimalCodec codec = new BigDecimalCodec(10, 3, true); * printEncoded(codec.encode(BigDecimal.valueOf(-9999999.999))); - * printEncoded(codec.encode(BigDecimal.valueOf(-10000.200))); - * printEncoded(codec.encode(BigDecimal.valueOf(-0.1))); - * printEncoded(codec.encode(BigDecimal.valueOf(-0.01))); - * printEncoded(codec.encode(BigDecimal.valueOf(-0.001))); - * printEncoded(codec.encode(BigDecimal.valueOf(0))); - * printEncoded(codec.encode(BigDecimal.valueOf(0.001))); - * printEncoded(codec.encode(BigDecimal.valueOf(0.01))); - * printEncoded(codec.encode(BigDecimal.valueOf(0.1))); + * printEncoded(codec.encode(BigDecimal.valueOf(-10000.200))); printEncoded(codec.encode(BigDecimal.valueOf(-0.1))); + * printEncoded(codec.encode(BigDecimal.valueOf(-0.01))); printEncoded(codec.encode(BigDecimal.valueOf(-0.001))); + * printEncoded(codec.encode(BigDecimal.valueOf(0))); printEncoded(codec.encode(BigDecimal.valueOf(0.001))); + * printEncoded(codec.encode(BigDecimal.valueOf(0.01))); printEncoded(codec.encode(BigDecimal.valueOf(0.1))); * printEncoded(codec.encode(BigDecimal.valueOf(100))); } */ } diff --git a/Util/src/test/java/io/deephaven/util/codec/BigIntegerCodecTest.java b/Util/src/test/java/io/deephaven/util/codec/BigIntegerCodecTest.java index b7faa4d393e..3b398cd9b2a 100644 --- a/Util/src/test/java/io/deephaven/util/codec/BigIntegerCodecTest.java +++ b/Util/src/test/java/io/deephaven/util/codec/BigIntegerCodecTest.java @@ -28,8 +28,8 @@ private void roundTrip(final String args, final BigInteger value) { roundTripWithOffset(args, value, value, 0); } - private void roundTripWithOffset(final String args, final BigInteger value, - final BigInteger expected, final int offset) { + private void roundTripWithOffset(final String args, final BigInteger value, final BigInteger expected, + final int offset) { final BigIntegerCodec codec = new BigIntegerCodec(args); byte[] enc = codec.encode(value); // if we expect to be decoding from an offset, construct the input accordingly @@ -149,10 +149,8 @@ public void testPrecisionLimits() { public void testLargeValues() { final int maxPrec = BigIntegerCodec.MAX_FIXED_PRECISION; - final BigInteger hugeInt = - BigInteger.valueOf(10).pow(BigIntegerCodec.MAX_FIXED_PRECISION - 1); - final BigInteger hugeNegativeInt = - BigInteger.valueOf(10).pow(BigIntegerCodec.MAX_FIXED_PRECISION - 1).negate(); + final BigInteger hugeInt = BigInteger.valueOf(10).pow(BigIntegerCodec.MAX_FIXED_PRECISION - 1); + final BigInteger hugeNegativeInt = BigInteger.valueOf(10).pow(BigIntegerCodec.MAX_FIXED_PRECISION - 1).negate(); // prove that we can encode and decode a huge integer roundTrip(Integer.toString(maxPrec), hugeInt); diff --git a/Util/src/test/java/io/deephaven/util/codec/LocalDateCodecTest.java b/Util/src/test/java/io/deephaven/util/codec/LocalDateCodecTest.java index f0b87bc5662..a58419e1880 100644 --- a/Util/src/test/java/io/deephaven/util/codec/LocalDateCodecTest.java +++ b/Util/src/test/java/io/deephaven/util/codec/LocalDateCodecTest.java @@ -32,8 +32,8 @@ private void roundTripWithOffset(final String args, final LocalDate value, final roundTripWithOffset(args, value, value, offset); } - private void roundTripWithOffset(final String args, final LocalDate value, - final LocalDate expected, final int offset) { + private void roundTripWithOffset(final String args, final LocalDate value, final LocalDate expected, + final int offset) { final LocalDateCodec codec = new LocalDateCodec(args); byte[] enc = codec.encode(value); // if we expect to be decoding from an offset, construct the input accordingly @@ -159,8 +159,7 @@ public void testOutOfRangeCompactDate() { } public void testCompactEncodingWithOffset() { - roundTripWithOffset(LocalDateCodec.Domain.Compact.name(), "0000-01-01", 3); // minimum - // compact value + roundTripWithOffset(LocalDateCodec.Domain.Compact.name(), "0000-01-01", 3); // minimum compact value roundTripWithOffset(LocalDateCodec.Domain.Compact.name(), "9999-12-31", 3); roundTripWithOffset(LocalDateCodec.Domain.Compact.name(), "2018-01-01", 3); diff --git a/Util/src/test/java/io/deephaven/util/codec/LocalTimeCodecTest.java b/Util/src/test/java/io/deephaven/util/codec/LocalTimeCodecTest.java index 3b3efa6fbf8..4edf46a8058 100644 --- a/Util/src/test/java/io/deephaven/util/codec/LocalTimeCodecTest.java +++ b/Util/src/test/java/io/deephaven/util/codec/LocalTimeCodecTest.java @@ -31,8 +31,8 @@ private void roundTripWithOffset(final String args, final LocalTime value, final roundTripWithOffset(args, value, value, offset); } - private void roundTripWithOffset(final String args, final LocalTime value, - final LocalTime expected, final int offset) { + private void roundTripWithOffset(final String args, final LocalTime value, final LocalTime expected, + final int offset) { final LocalTimeCodec codec = new LocalTimeCodec(args); byte[] enc = codec.encode(value); // if we expect to be decoding from an offset, construct the input accordingly @@ -79,8 +79,7 @@ public void testNanosEncoding() { roundTrip("", (LocalTime) null); } - // test encoding with 7 fractional digits (nteresting because this matches SQL Server TIME - // precision) + // test encoding with 7 fractional digits (nteresting because this matches SQL Server TIME precision) public void test100NanosEncoding() { roundTrip("7", LocalTime.of(0, 0, 0, 0)); roundTrip("7", LocalTime.of(23, 0, 0, 0)); @@ -93,8 +92,8 @@ public void test100NanosEncoding() { public void testMillisEncoding() { roundTrip("3", LocalTime.of(0, 0, 0, 0)); - roundTrip("3", LocalTime.of(23, 59, 59, 999_000_000)); // 999 millis is max fractional value - // with milli precision + roundTrip("3", LocalTime.of(23, 59, 59, 999_000_000)); // 999 millis is max fractional value with milli + // precision roundTrip("3", "18:36:29.123"); roundTrip("3", "03:00:00"); @@ -103,8 +102,8 @@ public void testMillisEncoding() { public void testMicrosEncoding() { roundTrip("6", LocalTime.of(0, 0, 0, 0)); - roundTrip("6", LocalTime.of(23, 59, 59, 999_999_000)); // 999 millis is max fractional value - // with milli precision + roundTrip("6", LocalTime.of(23, 59, 59, 999_999_000)); // 999 millis is max fractional value with milli + // precision roundTrip("6", "18:36:29.123456"); roundTrip("6", "03:00:00"); @@ -113,8 +112,7 @@ public void testMicrosEncoding() { public void testSecondEncoding() { roundTrip("0", LocalTime.of(0, 0, 0, 0)); - roundTrip("0", LocalTime.of(23, 59, 59, 0)); // 999 millis is max fractional value with - // milli precision + roundTrip("0", LocalTime.of(23, 59, 59, 0)); // 999 millis is max fractional value with milli precision roundTrip("0", "18:36:29"); roundTrip("0", "03:00:00"); @@ -130,8 +128,7 @@ public void testNanosNotNullEncoding() { roundTrip("9,notnull", "03:00:00"); } - // test encoding with 7 fractional digits (nteresting because this matches SQL Server TIME - // precision) + // test encoding with 7 fractional digits (nteresting because this matches SQL Server TIME precision) public void test100NanosNotNullEncoding() { roundTrip("7,notnull", LocalTime.of(0, 0, 0, 0)); roundTrip("7,notnull", LocalTime.of(23, 0, 0, 0)); @@ -143,9 +140,8 @@ public void test100NanosNotNullEncoding() { public void testMillisNotNullEncoding() { roundTrip("3,notnull", LocalTime.of(0, 0, 0, 0)); - roundTrip("3,notnull", LocalTime.of(23, 59, 59, 999_000_000)); // 999 millis is max - // fractional value with - // milli precision + roundTrip("3,notnull", LocalTime.of(23, 59, 59, 999_000_000)); // 999 millis is max fractional value with milli + // precision roundTrip("3,notnull", "18:36:29.123"); roundTrip("3,notnull", "03:00:00"); @@ -153,9 +149,8 @@ public void testMillisNotNullEncoding() { public void testMicrosNotNullEncoding() { roundTrip("6,notnull", LocalTime.of(0, 0, 0, 0)); - roundTrip("6,notnull", LocalTime.of(23, 59, 59, 999_999_000)); // 999 millis is max - // fractional value with - // milli precision + roundTrip("6,notnull", LocalTime.of(23, 59, 59, 999_999_000)); // 999 millis is max fractional value with milli + // precision roundTrip("6,notnull", "18:36:29.123456"); roundTrip("6,notnull", "03:00:00"); @@ -163,8 +158,7 @@ public void testMicrosNotNullEncoding() { public void testSecondNotNullEncoding() { roundTrip("0,notnull", LocalTime.of(0, 0, 0, 0)); - roundTrip("0,notnull", LocalTime.of(23, 59, 59, 0)); // 999 millis is max fractional value - // with milli precision + roundTrip("0,notnull", LocalTime.of(23, 59, 59, 0)); // 999 millis is max fractional value with milli precision roundTrip("0,notnull", "18:36:29"); roundTrip("0,notnull", "03:00:00"); @@ -213,8 +207,7 @@ public void testNanosNotNullEncodingWithOffset() { public void testMillisEncodingWithOffset() { roundTripWithOffset("3", LocalTime.of(0, 0, 0, 0), 3); - roundTripWithOffset("3", LocalTime.of(23, 59, 59, 999_000_000), 3); // 999 millis is max - // fractional value with + roundTripWithOffset("3", LocalTime.of(23, 59, 59, 999_000_000), 3); // 999 millis is max fractional value with // milli precision roundTripWithOffset("3", "18:36:29.123", 3); @@ -224,12 +217,8 @@ public void testMillisEncodingWithOffset() { public void testMillisNotNullEncodingWithOffset() { roundTripWithOffset("3,notnull", LocalTime.of(0, 0, 0, 0), 3); - roundTripWithOffset("3,notnull", LocalTime.of(23, 59, 59, 999_000_000), 3); // 999 millis is - // max - // fractional - // value with - // milli - // precision + roundTripWithOffset("3,notnull", LocalTime.of(23, 59, 59, 999_000_000), 3); // 999 millis is max fractional + // value with milli precision roundTripWithOffset("3,notnull", "18:36:29.123", 3); roundTripWithOffset("3,notnull", "03:00:00", 3); diff --git a/Util/src/test/java/io/deephaven/util/datastructures/TestRandomAccessDeque.java b/Util/src/test/java/io/deephaven/util/datastructures/TestRandomAccessDeque.java index 9cf06e7936b..b516ab4190f 100644 --- a/Util/src/test/java/io/deephaven/util/datastructures/TestRandomAccessDeque.java +++ b/Util/src/test/java/io/deephaven/util/datastructures/TestRandomAccessDeque.java @@ -62,10 +62,9 @@ public void testSimple() { assertFalse(deque.removeIf(x -> x == 999)); TestCase.assertTrue(Arrays.equals(values.toArray(), deque.toArray())); + TestCase.assertTrue(Arrays.equals(values.toArray(new Integer[0]), deque.toArray(new Integer[0]))); TestCase.assertTrue( - Arrays.equals(values.toArray(new Integer[0]), deque.toArray(new Integer[0]))); - TestCase.assertTrue(Arrays.equals(values.toArray(new Integer[values.size()]), - deque.toArray(new Integer[deque.size()]))); + Arrays.equals(values.toArray(new Integer[values.size()]), deque.toArray(new Integer[deque.size()]))); values.addAll(Arrays.asList(7, 8, 9)); deque.addAll(Arrays.asList(7, 8, 9)); diff --git a/Util/src/test/java/io/deephaven/util/datastructures/TestSegmentedSoftPool.java b/Util/src/test/java/io/deephaven/util/datastructures/TestSegmentedSoftPool.java index c03ee204d47..b8adbb33d8b 100644 --- a/Util/src/test/java/io/deephaven/util/datastructures/TestSegmentedSoftPool.java +++ b/Util/src/test/java/io/deephaven/util/datastructures/TestSegmentedSoftPool.java @@ -33,10 +33,10 @@ public void testWithoutFactory() { IntStream.range(0, 100).boxed().forEach(pool::give); IntStream.range(0, 100).boxed().sorted(Comparator.reverseOrder()) - .forEach(II -> TestCase.assertEquals(II, pool.take())); + .forEach(II -> TestCase.assertEquals(II, pool.take())); IntStream.range(100, 400).boxed().forEach(pool::give); IntStream.range(100, 400).boxed().sorted(Comparator.reverseOrder()) - .forEach(II -> TestCase.assertEquals(II, pool.take())); + .forEach(II -> TestCase.assertEquals(II, pool.take())); } @Test @@ -46,18 +46,18 @@ public void testWithFactory() { final MutableInt sumCleared = new MutableInt(0); final SegmentedSoftPool pool = new SegmentedSoftPool<>(10, - () -> { - counter.increment(); - sumAllocated.add(counter); - return counter.toInteger(); - }, - sumCleared::add); + () -> { + counter.increment(); + sumAllocated.add(counter); + return counter.toInteger(); + }, + sumCleared::add); IntStream.range(0, 10).boxed().forEach( - II -> { - TestCase.assertEquals((Integer) 0, pool.take()); - pool.give(0); - }); + II -> { + TestCase.assertEquals((Integer) 0, pool.take()); + pool.give(0); + }); IntStream.range(0, 1000).boxed().forEach(II -> TestCase.assertEquals(II, pool.take())); IntStream.range(0, 1000).boxed().forEach(pool::give); @@ -108,11 +108,11 @@ private TestObject() { public void testForGC() { // noinspection ConstantConditions Assume.assumeTrue( - "Skipping testForGC, as it is very long under most conditions, requires specific JVM parameters, and isn't really a unit test", - false); + "Skipping testForGC, as it is very long under most conditions, requires specific JVM parameters, and isn't really a unit test", + false); - // With the following settings, cleanup should begin in 30 seconds and converge on 72 items - // remaining: -Xmx4g -XX:SoftRefLRUPolicyMSPerMB=10 + // With the following settings, cleanup should begin in 30 seconds and converge on 72 items remaining: -Xmx4g + // -XX:SoftRefLRUPolicyMSPerMB=10 final long nanosStart = System.nanoTime(); final SegmentedSoftPool pool = new SegmentedSoftPool<>(8, null, null); @@ -127,10 +127,9 @@ public void testForGC() { final TestObject[] inUse = new TestObject[65]; long nanosLastCleanup = 0; - while (OUTSTANDING_INSTANCES.cardinality() != 72 - || System.nanoTime() - nanosLastCleanup < 10_000_000_000L) { - // Try to simulate some usage of the pool - we want soft refs that have been followed - // more recently than others. + while (OUTSTANDING_INSTANCES.cardinality() != 72 || System.nanoTime() - nanosLastCleanup < 10_000_000_000L) { + // Try to simulate some usage of the pool - we want soft refs that have been followed more recently than + // others. int lengthTaken = 0; for (; lengthTaken < inUse.length; ++lengthTaken) { try { @@ -141,8 +140,8 @@ public void testForGC() { } // Pause here to verify pool structure with items taken if (lengthTaken != inUse.length) { - throw new IllegalStateException("Pool has been cleaned up more than expected, took " - + lengthTaken + " out of " + inUse.length); + throw new IllegalStateException( + "Pool has been cleaned up more than expected, took " + lengthTaken + " out of " + inUse.length); } for (int oi = 0; oi < lengthTaken; ++oi) { pool.give(inUse[oi]); @@ -161,9 +160,8 @@ public void testForGC() { synchronized (OUTSTANDING_INSTANCES) { final long elapsedSeconds = (nanosLastCleanup - nanosStart) / 1_000_000_000L; System.err.println("elapsedSeconds=" + elapsedSeconds + ", firstRemaining=" - + OUTSTANDING_INSTANCES.nextSetBit(0) + ", remaining=" - + OUTSTANDING_INSTANCES.cardinality() + ", allocated=" - + OUTSTANDING_INSTANCES.length()); + + OUTSTANDING_INSTANCES.nextSetBit(0) + ", remaining=" + OUTSTANDING_INSTANCES.cardinality() + + ", allocated=" + OUTSTANDING_INSTANCES.length()); } } } diff --git a/Util/src/test/java/io/deephaven/util/datastructures/TestSimpleReferenceManager.java b/Util/src/test/java/io/deephaven/util/datastructures/TestSimpleReferenceManager.java index 29dbc578af6..2f1e01e8762 100644 --- a/Util/src/test/java/io/deephaven/util/datastructures/TestSimpleReferenceManager.java +++ b/Util/src/test/java/io/deephaven/util/datastructures/TestSimpleReferenceManager.java @@ -46,9 +46,8 @@ public void clear() { @SuppressWarnings({"NumberEquality", "PointlessArithmeticExpression"}) private void doTest(final boolean concurrent) { final SimpleReferenceManager> SUT = - new SimpleReferenceManager<>((final MutableInt item) -> ((IntRef) item), concurrent); - final IntRef[] items = - IntStream.range(0, 1000).mapToObj(IntRef::new).toArray(IntRef[]::new); + new SimpleReferenceManager<>((final MutableInt item) -> ((IntRef) item), concurrent); + final IntRef[] items = IntStream.range(0, 1000).mapToObj(IntRef::new).toArray(IntRef[]::new); Arrays.stream(items, 0, 500).forEach(SUT::add); @@ -56,26 +55,22 @@ private void doTest(final boolean concurrent) { testSumExpectations(SUT, expectedSum); Arrays.stream(items, 0, 500).forEach((final IntRef item) -> TestCase.assertSame(item, - SUT.getFirstItem((final MutableInt other) -> item == other))); + SUT.getFirstItem((final MutableInt other) -> item == other))); Arrays.stream(items, 0, 500).forEach((final IntRef item) -> TestCase.assertSame(item, - SUT.getFirstReference((final MutableInt other) -> item == other))); + SUT.getFirstReference((final MutableInt other) -> item == other))); items[200].clear(); expectedSum -= 200; - TestCase.assertSame(items[199], - SUT.getFirstItem((final MutableInt other) -> items[199] == other)); + TestCase.assertSame(items[199], SUT.getFirstItem((final MutableInt other) -> items[199] == other)); TestCase.assertNull(SUT.getFirstItem((final MutableInt other) -> items[200] == other)); - TestCase.assertSame(items[201], - SUT.getFirstItem((final MutableInt other) -> items[201] == other)); + TestCase.assertSame(items[201], SUT.getFirstItem((final MutableInt other) -> items[201] == other)); testSumExpectations(SUT, expectedSum); items[300].clear(); expectedSum -= 300; - TestCase.assertSame(items[299], - SUT.getFirstReference((final MutableInt other) -> items[299] == other)); + TestCase.assertSame(items[299], SUT.getFirstReference((final MutableInt other) -> items[299] == other)); TestCase.assertNull(SUT.getFirstReference((final MutableInt other) -> items[300] == other)); - TestCase.assertSame(items[301], - SUT.getFirstReference((final MutableInt other) -> items[301] == other)); + TestCase.assertSame(items[301], SUT.getFirstReference((final MutableInt other) -> items[301] == other)); testSumExpectations(SUT, expectedSum); items[400].clear(); @@ -97,9 +92,8 @@ private void doTest(final boolean concurrent) { testSumExpectations(SUT, expectedSum); } - private void testSumExpectations( - @NotNull final SimpleReferenceManager> SUT, - final int expectedSum) { + private void testSumExpectations(@NotNull final SimpleReferenceManager> SUT, + final int expectedSum) { final MutableInt sum = new MutableInt(); SUT.forEach((final SimpleReference ref, final MutableInt item) -> { TestCase.assertSame(ref, item); diff --git a/Util/src/test/java/io/deephaven/util/datastructures/TestWeakIdentityHashSet.java b/Util/src/test/java/io/deephaven/util/datastructures/TestWeakIdentityHashSet.java index 498bcee486d..2e4b484becf 100644 --- a/Util/src/test/java/io/deephaven/util/datastructures/TestWeakIdentityHashSet.java +++ b/Util/src/test/java/io/deephaven/util/datastructures/TestWeakIdentityHashSet.java @@ -14,8 +14,7 @@ public class TestWeakIdentityHashSet { @Test public void testAdd() { - String[] values = - IntStream.range(0, 1000).mapToObj(Integer::toString).toArray(String[]::new); + String[] values = IntStream.range(0, 1000).mapToObj(Integer::toString).toArray(String[]::new); final WeakIdentityHashSet set = new WeakIdentityHashSet<>(); Stream.of(values).forEach(v -> TestCase.assertTrue(set.add(v))); Stream.of(values).forEach(v -> TestCase.assertFalse(set.add(v))); @@ -28,8 +27,7 @@ public void testAdd() { @Test public void testClear() { - final String[] values = - IntStream.range(1000, 2000).mapToObj(Integer::toString).toArray(String[]::new); + final String[] values = IntStream.range(1000, 2000).mapToObj(Integer::toString).toArray(String[]::new); final WeakIdentityHashSet set = new WeakIdentityHashSet<>(); Stream.of(values).forEach(v -> TestCase.assertTrue(set.add(v))); Stream.of(values).forEach(v -> TestCase.assertFalse(set.add(v))); @@ -40,8 +38,7 @@ public void testClear() { @Test public void testForEach() { - final String[] values = - IntStream.range(1000, 2000).mapToObj(Integer::toString).toArray(String[]::new); + final String[] values = IntStream.range(1000, 2000).mapToObj(Integer::toString).toArray(String[]::new); final WeakIdentityHashSet set = new WeakIdentityHashSet<>(); final MutableInt counter = new MutableInt(0); diff --git a/Util/src/test/java/io/deephaven/util/datastructures/cache/TestBoundedIntrusiveMappingCache.java b/Util/src/test/java/io/deephaven/util/datastructures/cache/TestBoundedIntrusiveMappingCache.java index e073e170b5e..8866068ea88 100644 --- a/Util/src/test/java/io/deephaven/util/datastructures/cache/TestBoundedIntrusiveMappingCache.java +++ b/Util/src/test/java/io/deephaven/util/datastructures/cache/TestBoundedIntrusiveMappingCache.java @@ -27,12 +27,11 @@ public int applyAsInt(@NotNull final String value) { public void testLRU() { final int size = 10; final BoundedIntrusiveMappingCache.IntegerImpl cache = - new BoundedIntrusiveMappingCache.IntegerImpl<>(size); - final String[] strings = - IntStream.range(0, size).mapToObj(Integer::toString).toArray(String[]::new); + new BoundedIntrusiveMappingCache.IntegerImpl<>(size); + final String[] strings = IntStream.range(0, size).mapToObj(Integer::toString).toArray(String[]::new); final int addedSize = size / 2; - final String[] addedStrings = IntStream.range(size, size + addedSize) - .mapToObj(Integer::toString).toArray(String[]::new); + final String[] addedStrings = + IntStream.range(size, size + addedSize).mapToObj(Integer::toString).toArray(String[]::new); // Fill the cache initially, to its maximum size for (int si = 0; si < strings.length; si++) { @@ -48,8 +47,7 @@ public void testLRU() { TestCase.assertFalse(observer.created); } - // Prime the set of values we want to be most recently used (odd indexes, of which there are - // addedSize) + // Prime the set of values we want to be most recently used (odd indexes, of which there are addedSize) for (int si = 1; si < strings.length; si += 2) { final MappingCreationObserver observer = new MappingCreationObserver(); TestCase.assertEquals(si, cache.computeIfAbsent(strings[si], observer)); @@ -82,12 +80,11 @@ public void testLRU() { public void testFIFO() { final int size = 10; final BoundedIntrusiveMappingCache.FifoIntegerImpl cache = - new BoundedIntrusiveMappingCache.FifoIntegerImpl<>(size); - final String[] strings = - IntStream.range(0, size).mapToObj(Integer::toString).toArray(String[]::new); + new BoundedIntrusiveMappingCache.FifoIntegerImpl<>(size); + final String[] strings = IntStream.range(0, size).mapToObj(Integer::toString).toArray(String[]::new); final int addedSize = size / 2; - final String[] addedStrings = IntStream.range(size, size + addedSize) - .mapToObj(Integer::toString).toArray(String[]::new); + final String[] addedStrings = + IntStream.range(size, size + addedSize).mapToObj(Integer::toString).toArray(String[]::new); // Fill the cache initially, to its maximum size for (int si = 0; si < strings.length; si++) { @@ -103,8 +100,7 @@ public void testFIFO() { TestCase.assertFalse(observer.created); } - // Acccess the odds - not for any particular reason, just to make sure we don't care about - // access order + // Acccess the odds - not for any particular reason, just to make sure we don't care about access order for (int si = 1; si < strings.length; si += 2) { final MappingCreationObserver observer = new MappingCreationObserver(); TestCase.assertEquals(si, cache.computeIfAbsent(strings[si], observer)); diff --git a/Util/src/test/java/io/deephaven/util/datastructures/intrusive/TestIntrusiveArraySet.java b/Util/src/test/java/io/deephaven/util/datastructures/intrusive/TestIntrusiveArraySet.java index 27261c4e471..3f9fd145229 100644 --- a/Util/src/test/java/io/deephaven/util/datastructures/intrusive/TestIntrusiveArraySet.java +++ b/Util/src/test/java/io/deephaven/util/datastructures/intrusive/TestIntrusiveArraySet.java @@ -12,8 +12,7 @@ private IntrusiveValue(int sentinel) { } private static List make(int... sentinel) { - return Arrays.stream(sentinel).mapToObj(IntrusiveValue::new) - .collect(Collectors.toList()); + return Arrays.stream(sentinel).mapToObj(IntrusiveValue::new).collect(Collectors.toList()); } int sentinel; @@ -22,9 +21,9 @@ private static List make(int... sentinel) { @Override public String toString() { return "IntrusiveValue{" + - "sentinel=" + sentinel + - ", slot=" + slot + - '}'; + "sentinel=" + sentinel + + ", slot=" + slot + + '}'; } } @@ -42,8 +41,7 @@ public void setSlot(IntrusiveValue element, int slot) { } public void testSimple() { - final IntrusiveArraySet set = - new IntrusiveArraySet<>(new Adapter(), IntrusiveValue.class); + final IntrusiveArraySet set = new IntrusiveArraySet<>(new Adapter(), IntrusiveValue.class); final IntrusiveValue twentyThree = new IntrusiveValue(23); assertFalse(set.contains(twentyThree)); diff --git a/Util/src/test/java/io/deephaven/util/datastructures/linked/TestIntrusiveDoublyLinkedQueue.java b/Util/src/test/java/io/deephaven/util/datastructures/linked/TestIntrusiveDoublyLinkedQueue.java index eb9de5f0995..69a68948ec6 100644 --- a/Util/src/test/java/io/deephaven/util/datastructures/linked/TestIntrusiveDoublyLinkedQueue.java +++ b/Util/src/test/java/io/deephaven/util/datastructures/linked/TestIntrusiveDoublyLinkedQueue.java @@ -31,8 +31,7 @@ public String toString() { } } - private static class IntNodeAdapter - implements IntrusiveDoublyLinkedStructureBase.Adapter { + private static class IntNodeAdapter implements IntrusiveDoublyLinkedStructureBase.Adapter { @NotNull @Override @@ -59,8 +58,7 @@ public void setPrev(@NotNull final IntNode node, @NotNull final IntNode other) { @Test public void testEmpty() { - final IntrusiveDoublyLinkedQueue queue = - new IntrusiveDoublyLinkedQueue<>(new IntNodeAdapter()); + final IntrusiveDoublyLinkedQueue queue = new IntrusiveDoublyLinkedQueue<>(new IntNodeAdapter()); TestCase.assertTrue(queue.isEmpty()); TestCase.assertNull(queue.peek()); TestCase.assertNull(queue.poll()); @@ -72,8 +70,7 @@ public void testEmpty() { } /** - * Test straightforward usage as a queue, with only adds at the end and removes from the - * beginning. + * Test straightforward usage as a queue, with only adds at the end and removes from the beginning. */ @Test public void testSimple() { @@ -81,8 +78,7 @@ public void testSimple() { } private void doSimpleTest(final int nodeCount) { - final IntrusiveDoublyLinkedQueue queue = - new IntrusiveDoublyLinkedQueue<>(new IntNodeAdapter()); + final IntrusiveDoublyLinkedQueue queue = new IntrusiveDoublyLinkedQueue<>(new IntNodeAdapter()); for (int ni = 0; ni < nodeCount; ++ni) { queue.offer(new IntNode(ni)); } @@ -100,8 +96,7 @@ private void doSimpleTest(final int nodeCount) { } /** - * Test "fancy" usage as a queue, with adds at the end, removes from the beginning, and O(1) - * internal removes. + * Test "fancy" usage as a queue, with adds at the end, removes from the beginning, and O(1) internal removes. */ @Test public void testIntrusiveRemoves() { @@ -110,10 +105,8 @@ public void testIntrusiveRemoves() { @SuppressWarnings("AutoBoxing") private void doIntrusiveRemoveTest(final int nodeCount) { - final IntrusiveDoublyLinkedQueue queue = - new IntrusiveDoublyLinkedQueue<>(new IntNodeAdapter()); - final List nodes = - IntStream.range(0, nodeCount).mapToObj(IntNode::new).collect(Collectors.toList()); + final IntrusiveDoublyLinkedQueue queue = new IntrusiveDoublyLinkedQueue<>(new IntNodeAdapter()); + final List nodes = IntStream.range(0, nodeCount).mapToObj(IntNode::new).collect(Collectors.toList()); nodes.forEach(queue::offer); Iterator qi = queue.iterator(); @@ -129,26 +122,21 @@ private void doIntrusiveRemoveTest(final int nodeCount) { TestCase.assertEquals(nodes.size(), ti); // noinspection unchecked - for (final Predicate predicate : new Predicate[] { - n -> ((IntNode) n).value % 2 == 0, n -> ((IntNode) n).value % 3 == 0, - n -> ((IntNode) n).value % 4 == 0}) { + for (final Predicate predicate : new Predicate[] {n -> ((IntNode) n).value % 2 == 0, + n -> ((IntNode) n).value % 3 == 0, n -> ((IntNode) n).value % 4 == 0}) { final Map> partitioned = - nodes.stream().collect(Collectors.partitioningBy(predicate)); + nodes.stream().collect(Collectors.partitioningBy(predicate)); for (final boolean partitionToKeep : new boolean[] {false, true}) { // Put all nodes in nodes.forEach(queue::offer); // Remove half the nodes - partitioned.get(!partitionToKeep) - .forEach(n -> TestCase.assertTrue(queue.remove(n))); - partitioned.get(!partitionToKeep) - .forEach(n -> TestCase.assertFalse(queue.isLinked(n))); - partitioned.get(!partitionToKeep) - .forEach(n -> TestCase.assertFalse(queue.contains(n))); - partitioned.get(!partitionToKeep) - .forEach(n -> TestCase.assertFalse(queue.remove(n))); + partitioned.get(!partitionToKeep).forEach(n -> TestCase.assertTrue(queue.remove(n))); + partitioned.get(!partitionToKeep).forEach(n -> TestCase.assertFalse(queue.isLinked(n))); + partitioned.get(!partitionToKeep).forEach(n -> TestCase.assertFalse(queue.contains(n))); + partitioned.get(!partitionToKeep).forEach(n -> TestCase.assertFalse(queue.remove(n))); // Make sure contains only the other half qi = queue.iterator(); @@ -172,10 +160,8 @@ private void doIntrusiveRemoveTest(final int nodeCount) { @Test public void testTransferFrom() { final IntNodeAdapter adapter = new IntNodeAdapter(); - final IntrusiveDoublyLinkedQueue queue1 = - new IntrusiveDoublyLinkedQueue<>(adapter); - final IntrusiveDoublyLinkedQueue queue2 = - new IntrusiveDoublyLinkedQueue<>(adapter); + final IntrusiveDoublyLinkedQueue queue1 = new IntrusiveDoublyLinkedQueue<>(adapter); + final IntrusiveDoublyLinkedQueue queue2 = new IntrusiveDoublyLinkedQueue<>(adapter); queue1.transferBeforeHeadFrom(queue2); TestCase.assertTrue(queue1.isEmpty()); @@ -241,13 +227,11 @@ public void testTransferFrom() { @SuppressWarnings("ConstantConditions") @Test public void testInsert() { - final IntrusiveDoublyLinkedQueue queue = - new IntrusiveDoublyLinkedQueue<>(new IntNodeAdapter()); + final IntrusiveDoublyLinkedQueue queue = new IntrusiveDoublyLinkedQueue<>(new IntNodeAdapter()); for (int at : new int[] {-1, 1, 100}) { try { queue.insert(new IntNode(0), at); - TestCase - .fail("Unexpectedly succeeded in inserting at " + at + " in an empty queue"); + TestCase.fail("Unexpectedly succeeded in inserting at " + at + " in an empty queue"); } catch (IllegalArgumentException expected) { } } @@ -260,8 +244,7 @@ public void testInsert() { for (int at : new int[] {-1, 2, 100}) { try { queue.insert(new IntNode(2), at); - TestCase - .fail("Unexpectedly succeeded in inserting at " + at + " in queue with size=1"); + TestCase.fail("Unexpectedly succeeded in inserting at " + at + " in queue with size=1"); } catch (IllegalArgumentException expected) { } } @@ -272,8 +255,7 @@ public void testInsert() { for (int at : new int[] {-1, 3, 100}) { try { queue.insert(new IntNode(3), at); - TestCase - .fail("Unexpectedly succeeded in inserting at " + at + " in queue with size=2"); + TestCase.fail("Unexpectedly succeeded in inserting at " + at + " in queue with size=2"); } catch (IllegalArgumentException expected) { } } diff --git a/Util/src/test/java/io/deephaven/util/datastructures/linked/TestIntrusiveSinglyLinkedQueue.java b/Util/src/test/java/io/deephaven/util/datastructures/linked/TestIntrusiveSinglyLinkedQueue.java index 2c79955da6d..7cdc602c431 100644 --- a/Util/src/test/java/io/deephaven/util/datastructures/linked/TestIntrusiveSinglyLinkedQueue.java +++ b/Util/src/test/java/io/deephaven/util/datastructures/linked/TestIntrusiveSinglyLinkedQueue.java @@ -24,17 +24,17 @@ public String toString() { } private static class IntNodeAdapter - implements IntrusiveSinglyLinkedQueue.Adapter { + implements IntrusiveSinglyLinkedQueue.Adapter { @Override public TestIntrusiveSinglyLinkedQueue.IntNode getNext( - @NotNull final TestIntrusiveSinglyLinkedQueue.IntNode node) { + @NotNull final TestIntrusiveSinglyLinkedQueue.IntNode node) { return node.next; } @Override public void setNext(@NotNull final TestIntrusiveSinglyLinkedQueue.IntNode node, - final TestIntrusiveSinglyLinkedQueue.IntNode other) { + final TestIntrusiveSinglyLinkedQueue.IntNode other) { node.next = other; } } @@ -42,7 +42,7 @@ public void setNext(@NotNull final TestIntrusiveSinglyLinkedQueue.IntNode node, @Test public void testEmpty() { final IntrusiveSinglyLinkedQueue queue = - new IntrusiveSinglyLinkedQueue<>(new TestIntrusiveSinglyLinkedQueue.IntNodeAdapter()); + new IntrusiveSinglyLinkedQueue<>(new TestIntrusiveSinglyLinkedQueue.IntNodeAdapter()); TestCase.assertTrue(queue.isEmpty()); TestCase.assertNull(queue.peek()); TestCase.assertNull(queue.poll()); @@ -58,8 +58,7 @@ public void testEmpty() { } /** - * Test straightforward usage as a queue, with only adds at the end and removes from the - * beginning. + * Test straightforward usage as a queue, with only adds at the end and removes from the beginning. */ @Test public void testSimple() { @@ -68,7 +67,7 @@ public void testSimple() { private void doSimpleTest(final int nodeCount) { final IntrusiveSinglyLinkedQueue queue = - new IntrusiveSinglyLinkedQueue<>(new TestIntrusiveSinglyLinkedQueue.IntNodeAdapter()); + new IntrusiveSinglyLinkedQueue<>(new TestIntrusiveSinglyLinkedQueue.IntNodeAdapter()); for (int ni = 0; ni < nodeCount; ++ni) { queue.offer(new TestIntrusiveSinglyLinkedQueue.IntNode(ni)); } diff --git a/Util/src/test/java/io/deephaven/util/files/TestDirWatchService.java b/Util/src/test/java/io/deephaven/util/files/TestDirWatchService.java index e8e5eb32384..5464cbed5d5 100644 --- a/Util/src/test/java/io/deephaven/util/files/TestDirWatchService.java +++ b/Util/src/test/java/io/deephaven/util/files/TestDirWatchService.java @@ -69,39 +69,37 @@ public void tearDown() throws Exception { public void testBasicFilesJavaFileWatch() throws Exception { - final String[] expectedResults = - {STARTSWITH_FILE, STARTSWITH_FILE2, ENDSWITH_FILE, EXACT_MATCH_FILE, EXACT_MATCH_FILE, - EXACT_MATCH_FILE2, EXACT_MATCH_FILE2, REGEX_MATCH_FILE}; + final String[] expectedResults = {STARTSWITH_FILE, STARTSWITH_FILE2, ENDSWITH_FILE, EXACT_MATCH_FILE, + EXACT_MATCH_FILE, EXACT_MATCH_FILE2, EXACT_MATCH_FILE2, REGEX_MATCH_FILE}; Arrays.sort(expectedResults); final DirWatchService service = new DirWatchService(dir.toString(), - this::watcherExceptionOccurred, - DirWatchService.WatchServiceType.JAVAWATCHSERVICE, - 1000, - ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); + this::watcherExceptionOccurred, + DirWatchService.WatchServiceType.JAVAWATCHSERVICE, + 1000, + ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); service.start(); service.addFileWatchAtStart(DirWatchService.makeStartsWithMatcher("test"), - (p, w) -> checkAndUpdate(p, w, STARTSWITH_FILE, STARTSWITH_FILE2)); + (p, w) -> checkAndUpdate(p, w, STARTSWITH_FILE, STARTSWITH_FILE2)); service.addFileWatchAtStart(DirWatchService.makeEndsWithMatcher("aaaa"), - (p, w) -> checkAndUpdate(p, w, ENDSWITH_FILE)); + (p, w) -> checkAndUpdate(p, w, ENDSWITH_FILE)); service.addExactFileWatch(".foo.", "abcde", - (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); + (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); service.addExactFileWatch(".oof.", "abcde", - (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); + (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); - // Add a second exact watch pair (one per separator), this should catch both the exact match - // files so increases the expected results to 8 + // Add a second exact watch pair (one per separator), this should catch both the exact match files so increases + // the expected results to 8 service.addExactFileWatch(".foo.", "abcde", - (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); + (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); service.addExactFileWatch(".oof.", "abcde", - (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); + (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); - service.addFileWatchAtStart( - DirWatchService.makeRegexMatcher("^[A-Za-z0-9_]*\\.stats\\.foo\\..*"), - (p, w) -> checkAndUpdate(p, w, REGEX_MATCH_FILE)); + service.addFileWatchAtStart(DirWatchService.makeRegexMatcher("^[A-Za-z0-9_]*\\.stats\\.foo\\..*"), + (p, w) -> checkAndUpdate(p, w, REGEX_MATCH_FILE)); assertTrue(new File(TEST_DIR + File.separator + STARTSWITH_FILE).createNewFile()); assertTrue(new File(TEST_DIR + File.separator + STARTSWITH_FILE2).createNewFile()); @@ -175,31 +173,30 @@ public void testBasicFilesJavaFileWatch() throws Exception { public void testBasicFilesPollFileWatch() throws Exception { - final String[] expectedResults = {STARTSWITH_FILE, STARTSWITH_FILE2, ENDSWITH_FILE, - EXACT_MATCH_FILE, EXACT_MATCH_FILE2, REGEX_MATCH_FILE}; + final String[] expectedResults = {STARTSWITH_FILE, STARTSWITH_FILE2, ENDSWITH_FILE, EXACT_MATCH_FILE, + EXACT_MATCH_FILE2, REGEX_MATCH_FILE}; Arrays.sort(expectedResults); final DirWatchService service = new DirWatchService(dir.toString(), - this::watcherExceptionOccurred, - DirWatchService.WatchServiceType.POLLWATCHSERVICE, - 1000, - ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); + this::watcherExceptionOccurred, + DirWatchService.WatchServiceType.POLLWATCHSERVICE, + 1000, + ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); service.start(); service.addFileWatchAtStart(DirWatchService.makeStartsWithMatcher("test"), - (p, w) -> checkAndUpdate(p, w, STARTSWITH_FILE, STARTSWITH_FILE2)); + (p, w) -> checkAndUpdate(p, w, STARTSWITH_FILE, STARTSWITH_FILE2)); service.addFileWatchAtStart(DirWatchService.makeEndsWithMatcher("aaaa"), - (p, w) -> checkAndUpdate(p, w, ENDSWITH_FILE)); + (p, w) -> checkAndUpdate(p, w, ENDSWITH_FILE)); service.addExactFileWatch(".foo.", "abcde", - (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); + (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); service.addExactFileWatch(".oof.", "abcde", - (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); + (p, w) -> checkAndUpdate(p, w, EXACT_MATCH_FILE, EXACT_MATCH_FILE2)); - service.addFileWatchAtStart( - DirWatchService.makeRegexMatcher("^[A-Za-z0-9_]*\\.stats\\.foo\\..*"), - (p, w) -> checkAndUpdate(p, w, REGEX_MATCH_FILE)); + service.addFileWatchAtStart(DirWatchService.makeRegexMatcher("^[A-Za-z0-9_]*\\.stats\\.foo\\..*"), + (p, w) -> checkAndUpdate(p, w, REGEX_MATCH_FILE)); assertTrue(new File(TEST_DIR + File.separator + STARTSWITH_FILE).createNewFile()); assertTrue(new File(TEST_DIR + File.separator + STARTSWITH_FILE2).createNewFile()); @@ -267,11 +264,11 @@ public void testBasicFilesPollFileWatch() throws Exception { assertEquals(6, addedFiles.size()); assertEquals(6, deletedFiles.size()); - // On Mac sometimes the .delete() modifies a file before deleting it, resulting in a - // second entry for a file. Remove duplicates from this list before checking. + // On Mac sometimes the .delete() modifies a file before deleting it, resulting in a second entry for a + // file. Remove duplicates from this list before checking. final List modifiedFilesNoDuplicates = modifiedFiles.stream() - .distinct() - .collect(Collectors.toList()); + .distinct() + .collect(Collectors.toList()); assertEquals(6, modifiedFilesNoDuplicates.size()); final String[] removedFiles = deletedFiles.toArray(new String[0]); Arrays.sort(removedFiles); @@ -281,10 +278,10 @@ public void testBasicFilesPollFileWatch() throws Exception { public void testStopJava() throws Exception { final DirWatchService service = new DirWatchService(dir.toString(), - this::watcherExceptionOccurred, - DirWatchService.WatchServiceType.JAVAWATCHSERVICE, - 1000, - ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); + this::watcherExceptionOccurred, + DirWatchService.WatchServiceType.JAVAWATCHSERVICE, + 1000, + ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); service.start(); assertFalse(exceptionOccurred); @@ -309,10 +306,10 @@ public void testStopJava() throws Exception { public void testStopPoll() throws Exception { final DirWatchService service = new DirWatchService(dir.toString(), - this::watcherExceptionOccurred, - DirWatchService.WatchServiceType.POLLWATCHSERVICE, - 1000, - ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); + this::watcherExceptionOccurred, + DirWatchService.WatchServiceType.POLLWATCHSERVICE, + 1000, + ENTRY_CREATE, ENTRY_DELETE, ENTRY_MODIFY); service.start(); assertFalse(exceptionOccurred); @@ -361,7 +358,7 @@ private static boolean touch(File file) { @SuppressWarnings("unused") private synchronized void watcherExceptionOccurred( - final DirWatchService.ExceptionConsumerParameter consumerParameter) { + final DirWatchService.ExceptionConsumerParameter consumerParameter) { exceptionOccurred = true; } diff --git a/Util/src/test/java/io/deephaven/util/files/TestResourceResolution.java b/Util/src/test/java/io/deephaven/util/files/TestResourceResolution.java index 3e205fee96b..99120d14f23 100644 --- a/Util/src/test/java/io/deephaven/util/files/TestResourceResolution.java +++ b/Util/src/test/java/io/deephaven/util/files/TestResourceResolution.java @@ -89,8 +89,7 @@ public void tearDown() throws Exception { } public void testRelativeOneDirNoWildcard() throws IOException { - ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, TEST_DIR2); + ResourceResolution resourceResolution = new ResourceResolution(Configuration.getInstance(), null, TEST_DIR2); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 3); assertTrue(filenameToUrlMap.containsKey(TEST_FILE2_NAME)); @@ -104,7 +103,7 @@ public void testAbsoluteTwoDirsNoWildcard() throws IOException { final String dirName2 = dir2.getAbsolutePath(); ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, dirName1, dirName2); + new ResourceResolution(Configuration.getInstance(), null, dirName1, dirName2); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 4); assertTrue(filenameToUrlMap.containsKey(TEST_FILE1_NAME)); @@ -116,7 +115,7 @@ public void testAbsoluteTwoDirsNoWildcard() throws IOException { public void testRelativeTopDirNoWildcard() throws IOException { ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, TEST_DIR_BASE); + new ResourceResolution(Configuration.getInstance(), null, TEST_DIR_BASE); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 4); assertTrue(filenameToUrlMap.containsKey(TEST_FILE1_NAME)); @@ -129,8 +128,7 @@ public void testRelativeTopDirNoWildcard() throws IOException { public void testAbsoluteTopDirNoWildcard() throws IOException { final String dirName = dirBase.getAbsolutePath(); - ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, dirName); + ResourceResolution resourceResolution = new ResourceResolution(Configuration.getInstance(), null, dirName); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 4); assertTrue(filenameToUrlMap.containsKey(TEST_FILE1_NAME)); @@ -142,7 +140,7 @@ public void testAbsoluteTopDirNoWildcard() throws IOException { public void testRelativeWildcardLinux() throws IOException { ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, TEST_WILDCARD_LINUX); + new ResourceResolution(Configuration.getInstance(), null, TEST_WILDCARD_LINUX); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 4); assertTrue(filenameToUrlMap.containsKey(TEST_FILE1_NAME)); @@ -156,8 +154,7 @@ public void testAbsoluteWildcardLinux() throws IOException { final File dirBase = new File(TEST_WILDCARD_LINUX); final String dirName = dirBase.getAbsolutePath(); - ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, dirName); + ResourceResolution resourceResolution = new ResourceResolution(Configuration.getInstance(), null, dirName); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 4); assertTrue(filenameToUrlMap.containsKey(TEST_FILE1_NAME)); @@ -169,7 +166,7 @@ public void testAbsoluteWildcardLinux() throws IOException { public void testRelativeWildcardWindows() throws IOException { ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, TEST_WILDCARD_WINDOWS); + new ResourceResolution(Configuration.getInstance(), null, TEST_WILDCARD_WINDOWS); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 4); assertTrue(filenameToUrlMap.containsKey(TEST_FILE1_NAME)); @@ -183,8 +180,7 @@ public void testAbsoluteWildcardWindows() throws IOException { final File dirBase = new File(TEST_WILDCARD_WINDOWS); final String dirName = dirBase.getAbsolutePath(); - ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, dirName); + ResourceResolution resourceResolution = new ResourceResolution(Configuration.getInstance(), null, dirName); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 4); assertTrue(filenameToUrlMap.containsKey(TEST_FILE1_NAME)); @@ -196,7 +192,7 @@ public void testAbsoluteWildcardWindows() throws IOException { public void testRelativeWildcardLinuxDups() throws IOException { ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, TEST_WILDCARD_LINUX_DUPS); + new ResourceResolution(Configuration.getInstance(), null, TEST_WILDCARD_LINUX_DUPS); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 4); assertTrue(filenameToUrlMap.containsKey(TEST_FILE1_NAME)); @@ -210,8 +206,7 @@ public void testAbsoluteWildcardWindowsDups() throws IOException { final File dirBase = new File(TEST_WILDCARD_WINDOWS_DUPS); final String dirName = dirBase.getAbsolutePath(); - ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, dirName); + ResourceResolution resourceResolution = new ResourceResolution(Configuration.getInstance(), null, dirName); resourceResolution.findResources(SUFFIX, this::resourceFound); assertTrue(filenameToUrlMap.size() == 4); assertTrue(filenameToUrlMap.containsKey(TEST_FILE1_NAME)); @@ -222,22 +217,20 @@ public void testAbsoluteWildcardWindowsDups() throws IOException { } public void testUNC() { - ResourceResolution resourceResolution = - new ResourceResolution(Configuration.getInstance(), null, "") { - @Override - public String normalize(String resourcePath) { - return super.normalize(resourcePath); - } - }; + ResourceResolution resourceResolution = new ResourceResolution(Configuration.getInstance(), null, "") { + @Override + public String normalize(String resourcePath) { + return super.normalize(resourcePath); + } + }; final Map answers = new HashMap<>(); final String answer1 = String.join(File.separator, "", "WindowsDC", "path", "to", ""); final String answer2; - // separators are collapsed to singles (e.g. /WIndowsDC/path/to) unless it is a UNC path on - // Windows (\WindowsDC\path\to) + // separators are collapsed to singles (e.g. /WIndowsDC/path/to) unless it is a UNC path on Windows + // (\WindowsDC\path\to) if (OSUtil.runningWindows()) { - // The normalize method checks for a Windows file separator type and adds an extra slash - // for UNC paths + // The normalize method checks for a Windows file separator type and adds an extra slash for UNC paths answer2 = File.separator + answer1; } else { // For Linux and Mac, the normal normalization is done diff --git a/Util/src/test/java/io/deephaven/util/pool/TestThreadSafeFixedSizePool.java b/Util/src/test/java/io/deephaven/util/pool/TestThreadSafeFixedSizePool.java index 11d25c727de..7a20511e0dd 100644 --- a/Util/src/test/java/io/deephaven/util/pool/TestThreadSafeFixedSizePool.java +++ b/Util/src/test/java/io/deephaven/util/pool/TestThreadSafeFixedSizePool.java @@ -37,11 +37,10 @@ public void testThreadSafeFixedSizePool() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - Pool pool = io.deephaven.base.pool.ThreadSafeFixedSizePool.FACTORY - .create(OBJECTS.length, m_mockObjectFactory, m_mockClearingProcedure); - assertEquals( - "call()call()call()call()call()call()call()call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + Pool pool = io.deephaven.base.pool.ThreadSafeFixedSizePool.FACTORY.create(OBJECTS.length, + m_mockObjectFactory, m_mockClearingProcedure); + assertEquals("call()call()call()call()call()call()call()call()call()call()call()call()call()call()", + m_mockObjectFactory.getActivityRecordAndReset()); // take Object alphaObject = OBJECTS[0]; @@ -74,8 +73,7 @@ public void testThreadSafeFixedSizePool() { // give for (Object object : OBJECTS) { pool.give(object); - assertEquals("call(" + object + ")", - m_mockClearingProcedure.getActivityRecordAndReset()); + assertEquals("call(" + object + ")", m_mockClearingProcedure.getActivityRecordAndReset()); checkNoOtherActivity(); } @@ -93,11 +91,10 @@ public void testThreadSafeFixedSizePoolNoClearingProcedure() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - Pool pool = io.deephaven.base.pool.ThreadSafeFixedSizePool.FACTORY - .create(OBJECTS.length, m_mockObjectFactory, null); - assertEquals( - "call()call()call()call()call()call()call()call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + Pool pool = io.deephaven.base.pool.ThreadSafeFixedSizePool.FACTORY.create(OBJECTS.length, + m_mockObjectFactory, null); + assertEquals("call()call()call()call()call()call()call()call()call()call()call()call()call()call()", + m_mockObjectFactory.getActivityRecordAndReset()); // take Object alphaObject = OBJECTS[0]; @@ -122,8 +119,7 @@ public void testThreadSafeFixedSizePoolNoFactory() { // no factory try { - new io.deephaven.base.pool.ThreadSafeFixedSizePool(OBJECTS.length, null, - m_mockClearingProcedure); + new io.deephaven.base.pool.ThreadSafeFixedSizePool(OBJECTS.length, null, m_mockClearingProcedure); fail("Should have thrown"); } catch (RequirementFailure requirementFailure) { // expected @@ -131,8 +127,7 @@ public void testThreadSafeFixedSizePoolNoFactory() { // too small try { - new io.deephaven.base.pool.ThreadSafeFixedSizePool(6, m_mockObjectFactory, - m_mockClearingProcedure); + new io.deephaven.base.pool.ThreadSafeFixedSizePool(6, m_mockObjectFactory, m_mockClearingProcedure); fail("should have thrown"); } catch (RequirementFailure requirementFailure) { // expected @@ -143,13 +138,12 @@ public void testThreadSafeFixedSizePoolNoFactory() { m_mockObjectFactory.add(object); } new io.deephaven.base.pool.ThreadSafeFixedSizePool(7, m_mockObjectFactory, null); - assertEquals("call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + assertEquals("call()call()call()call()call()call()call()", m_mockObjectFactory.getActivityRecordAndReset()); // no factory try { io.deephaven.base.pool.ThreadSafeFixedSizePool.FACTORY.create(OBJECTS.length, null, - m_mockClearingProcedure); + m_mockClearingProcedure); fail("should have thrown"); } catch (RequirementFailure requirementFailure) { // expected @@ -158,7 +152,7 @@ public void testThreadSafeFixedSizePoolNoFactory() { // too small try { io.deephaven.base.pool.ThreadSafeFixedSizePool.FACTORY.create(6, m_mockObjectFactory, - m_mockClearingProcedure); + m_mockClearingProcedure); fail("should have thrown"); } catch (RequirementFailure requirementFailure) { // expected @@ -169,8 +163,7 @@ public void testThreadSafeFixedSizePoolNoFactory() { m_mockObjectFactory.add(object); } new ThreadSafeFixedSizePool<>(7, m_mockObjectFactory, null); - assertEquals("call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + assertEquals("call()call()call()call()call()call()call()", m_mockObjectFactory.getActivityRecordAndReset()); } // ---------------------------------------------------------------- diff --git a/Util/src/test/java/io/deephaven/util/pool/TestThreadSafeLenientFixedSizePool.java b/Util/src/test/java/io/deephaven/util/pool/TestThreadSafeLenientFixedSizePool.java index b646071be5f..fcc32e80045 100644 --- a/Util/src/test/java/io/deephaven/util/pool/TestThreadSafeLenientFixedSizePool.java +++ b/Util/src/test/java/io/deephaven/util/pool/TestThreadSafeLenientFixedSizePool.java @@ -38,11 +38,10 @@ public void testThreadSafeLenientFixedSizePool() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - Pool pool = io.deephaven.base.pool.ThreadSafeLenientFixedSizePool.FACTORY - .create(OBJECTS.length, m_mockObjectFactory, m_mockClearingProcedure); - assertEquals( - "call()call()call()call()call()call()call()call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + Pool pool = io.deephaven.base.pool.ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, + m_mockObjectFactory, m_mockClearingProcedure); + assertEquals("call()call()call()call()call()call()call()call()call()call()call()call()call()call()", + m_mockObjectFactory.getActivityRecordAndReset()); // take Object alphaObject = OBJECTS[0]; @@ -84,8 +83,7 @@ public void testThreadSafeLenientFixedSizePool() { // give for (Object object : OBJECTS) { pool.give(object); - assertEquals("call(" + object + ")", - m_mockClearingProcedure.getActivityRecordAndReset()); + assertEquals("call(" + object + ")", m_mockClearingProcedure.getActivityRecordAndReset()); checkNoOtherActivity(); } @@ -103,11 +101,10 @@ public void testThreadSafeLenientFixedSizePoolNoClearingProcedure() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - Pool pool = io.deephaven.base.pool.ThreadSafeLenientFixedSizePool.FACTORY - .create(OBJECTS.length, m_mockObjectFactory, null); - assertEquals( - "call()call()call()call()call()call()call()call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + Pool pool = io.deephaven.base.pool.ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, + m_mockObjectFactory, null); + assertEquals("call()call()call()call()call()call()call()call()call()call()call()call()call()call()", + m_mockObjectFactory.getActivityRecordAndReset()); // take Object alphaObject = OBJECTS[0]; @@ -137,8 +134,8 @@ public void testThreadSafeLenientFixedSizePoolNoFactory() { RequirementFailure failure = null; try { new io.deephaven.base.pool.ThreadSafeLenientFixedSizePool(OBJECTS.length, - (Function.Unary>) null, - m_mockClearingProcedure); + (Function.Unary>) null, + m_mockClearingProcedure); } catch (RequirementFailure requirementFailure) { failure = requirementFailure; // assertTrue(requirementFailure.isThisStackFrameCulprit(0)); @@ -148,8 +145,8 @@ public void testThreadSafeLenientFixedSizePoolNoFactory() { // too small try { - new io.deephaven.base.pool.ThreadSafeLenientFixedSizePool(6, - m_mockObjectFactory, m_mockClearingProcedure); + new io.deephaven.base.pool.ThreadSafeLenientFixedSizePool(6, m_mockObjectFactory, + m_mockClearingProcedure); } catch (RequirementFailure requirementFailure) { assertTrue(requirementFailure.isThisStackFrameCulprit(0)); } @@ -158,23 +155,21 @@ public void testThreadSafeLenientFixedSizePoolNoFactory() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - new io.deephaven.base.pool.ThreadSafeLenientFixedSizePool(7, m_mockObjectFactory, - null); - assertEquals("call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + new io.deephaven.base.pool.ThreadSafeLenientFixedSizePool(7, m_mockObjectFactory, null); + assertEquals("call()call()call()call()call()call()call()", m_mockObjectFactory.getActivityRecordAndReset()); // no factory try { - io.deephaven.base.pool.ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, - null, m_mockClearingProcedure); + io.deephaven.base.pool.ThreadSafeLenientFixedSizePool.FACTORY.create(OBJECTS.length, null, + m_mockClearingProcedure); } catch (RequirementFailure requirementFailure) { assertTrue(requirementFailure.isThisStackFrameCulprit(0)); } // too small try { - io.deephaven.base.pool.ThreadSafeLenientFixedSizePool.FACTORY.create(6, - m_mockObjectFactory, m_mockClearingProcedure); + io.deephaven.base.pool.ThreadSafeLenientFixedSizePool.FACTORY.create(6, m_mockObjectFactory, + m_mockClearingProcedure); } catch (RequirementFailure requirementFailure) { assertTrue(requirementFailure.isThisStackFrameCulprit(0)); } @@ -183,10 +178,8 @@ public void testThreadSafeLenientFixedSizePoolNoFactory() { for (Object object : OBJECTS) { m_mockObjectFactory.add(object); } - new ThreadSafeLenientFixedSizePool<>(null, 7, m_mockObjectFactory, m_mockObjectFactory, - null); - assertEquals("call()call()call()call()call()call()call()", - m_mockObjectFactory.getActivityRecordAndReset()); + new ThreadSafeLenientFixedSizePool<>(null, 7, m_mockObjectFactory, m_mockObjectFactory, null); + assertEquals("call()call()call()call()call()call()call()", m_mockObjectFactory.getActivityRecordAndReset()); } // ---------------------------------------------------------------- diff --git a/Util/src/test/java/io/deephaven/util/profiling/TestThreadProfiler.java b/Util/src/test/java/io/deephaven/util/profiling/TestThreadProfiler.java index 29163dfda45..c6d1d2460f1 100644 --- a/Util/src/test/java/io/deephaven/util/profiling/TestThreadProfiler.java +++ b/Util/src/test/java/io/deephaven/util/profiling/TestThreadProfiler.java @@ -22,27 +22,25 @@ public class TestThreadProfiler { private static ThreadProfiler SUT; /** - * Depending on the environment and architecture, it can be very hard to have strict guarantees - * about predictable memory allocation or CPU usage. Rather than introduce unpredictable tests - * to CI, we disable such assertions unless specifically desired. + * Depending on the environment and architecture, it can be very hard to have strict guarantees about predictable + * memory allocation or CPU usage. Rather than introduce unpredictable tests to CI, we disable such assertions + * unless specifically desired. */ private static final boolean STRICT_MODE = false; @BeforeClass public static void setUpOnce() { threadMXBean = (com.sun.management.ThreadMXBean) ManagementFactory.getThreadMXBean(); - final ThreadMXBean threadMXBean = - (com.sun.management.ThreadMXBean) ManagementFactory.getThreadMXBean(); + final ThreadMXBean threadMXBean = (com.sun.management.ThreadMXBean) ManagementFactory.getThreadMXBean(); if (threadMXBean.isCurrentThreadCpuTimeSupported()) { if (ThreadMXBeanThreadProfiler.TRY_ENABLE_THREAD_CPU_TIME - && (oldThreadCpuTimeEnabled = threadMXBean.isThreadCpuTimeEnabled())) { + && (oldThreadCpuTimeEnabled = threadMXBean.isThreadCpuTimeEnabled())) { threadMXBean.setThreadCpuTimeEnabled(false); } } if (threadMXBean.isThreadAllocatedMemorySupported()) { if (SunThreadMXBeanThreadProfiler.TRY_ENABLE_THREAD_ALLOCATED_MEMORY - && (oldThreadAllocatedMemoryEnabled = - threadMXBean.isThreadAllocatedMemoryEnabled())) { + && (oldThreadAllocatedMemoryEnabled = threadMXBean.isThreadAllocatedMemoryEnabled())) { threadMXBean.setThreadAllocatedMemoryEnabled(false); } } @@ -51,13 +49,13 @@ public static void setUpOnce() { @AfterClass public static void tearDownOnce() { - if (threadMXBean.isCurrentThreadCpuTimeSupported() - && ThreadMXBeanThreadProfiler.TRY_ENABLE_THREAD_CPU_TIME && oldThreadCpuTimeEnabled) { + if (threadMXBean.isCurrentThreadCpuTimeSupported() && ThreadMXBeanThreadProfiler.TRY_ENABLE_THREAD_CPU_TIME + && oldThreadCpuTimeEnabled) { threadMXBean.setThreadCpuTimeEnabled(true); } if (threadMXBean.isThreadAllocatedMemorySupported() - && SunThreadMXBeanThreadProfiler.TRY_ENABLE_THREAD_ALLOCATED_MEMORY - && oldThreadAllocatedMemoryEnabled) { + && SunThreadMXBeanThreadProfiler.TRY_ENABLE_THREAD_ALLOCATED_MEMORY + && oldThreadAllocatedMemoryEnabled) { threadMXBean.setThreadAllocatedMemoryEnabled(true); } } @@ -83,13 +81,11 @@ public void testMemoryMeasurement() { TestCase.assertFalse(endBytes == QueryConstants.NULL_LONG); final long allocatedBytes = endBytes - startBytes; - System.out.println("TestThreadProfiler: Allocated " + allocatedBytes + " with items hash " - + items.hashCode()); + System.out.println("TestThreadProfiler: Allocated " + allocatedBytes + " with items hash " + items.hashCode()); if (STRICT_MODE) { final long minimumExpectedBytes = count * size; final long maximumExpectedBytes = count * (128 + size); - TestCase.assertTrue( - allocatedBytes >= minimumExpectedBytes && allocatedBytes <= maximumExpectedBytes); + TestCase.assertTrue(allocatedBytes >= minimumExpectedBytes && allocatedBytes <= maximumExpectedBytes); } else { TestCase.assertTrue(allocatedBytes >= 0); } @@ -122,8 +118,8 @@ public void testCpuMeasurement() { final long elapsedCpuNanos = endCpuNanos - startCpuNanos; final long elapsedUserNanos = endUserNanos - startUserNanos; - System.out.println("TestThreadProfiler: Spent " + elapsedCpuNanos + "ns (" - + elapsedUserNanos + " ns user) calculating fib(92) == " + fib_curr); + System.out.println("TestThreadProfiler: Spent " + elapsedCpuNanos + "ns (" + elapsedUserNanos + + " ns user) calculating fib(92) == " + fib_curr); TestCase.assertEquals(7540113804746346429L, fib_curr); if (STRICT_MODE) { TestCase.assertTrue(elapsedUserNanos <= elapsedCpuNanos); diff --git a/Util/src/test/java/io/deephaven/utils/TestBigDecimalSqrt.java b/Util/src/test/java/io/deephaven/utils/TestBigDecimalSqrt.java index f0a55f79af2..dd87050d026 100644 --- a/Util/src/test/java/io/deephaven/utils/TestBigDecimalSqrt.java +++ b/Util/src/test/java/io/deephaven/utils/TestBigDecimalSqrt.java @@ -7,20 +7,18 @@ public class TestBigDecimalSqrt extends TestCase { public void testSqrt() { /* - * x = √10; eps = 1e-11 => (x + / -eps)^2 = x^2 +/- 2*eps*x + eps^2 Therefore the sqrt - * squared is accurate to 2*eps*x, which is ~6e-11 in this case. + * x = √10; eps = 1e-11 => (x + / -eps)^2 = x^2 +/- 2*eps*x + eps^2 Therefore the sqrt squared is accurate to + * 2*eps*x, which is ~6e-11 in this case. */ testSqrt(BigDecimal.TEN, 11, 10); testSqrt(BigDecimal.ONE, 10, 10); testSqrt(BigDecimal.TEN.scaleByPowerOfTen(100), 50, 0); testSqrt(BigDecimal.valueOf(Long.MAX_VALUE).pow(4), 10, 10); - testSqrt(BigDecimal.valueOf(Long.MAX_VALUE).pow(4).divide(BigDecimal.valueOf(7), - BigDecimal.ROUND_HALF_UP), 40, 0); + testSqrt(BigDecimal.valueOf(Long.MAX_VALUE).pow(4).divide(BigDecimal.valueOf(7), BigDecimal.ROUND_HALF_UP), 40, + 0); // value > Double.MAX_VALUE - testSqrt( - new BigDecimal( - "1.3965847798346571265746871246578426578246587146581476581476578465814276518E400"), - 400, 200); + testSqrt(new BigDecimal("1.3965847798346571265746871246578426578246587146581476581476578465814276518E400"), 400, + 200); } public void testSqrtExceptionals() { diff --git a/Util/src/test/java/io/deephaven/utils/test/PropertySaver.java b/Util/src/test/java/io/deephaven/utils/test/PropertySaver.java index 36e23e4ac5f..ecc613cb984 100644 --- a/Util/src/test/java/io/deephaven/utils/test/PropertySaver.java +++ b/Util/src/test/java/io/deephaven/utils/test/PropertySaver.java @@ -6,8 +6,8 @@ import java.util.Map; /** - * Simple utility class for use in unit tests that need to adjust properties, and then put them - * back. Meant to be used in a try { } finally { } block; because java has no good RAII. + * Simple utility class for use in unit tests that need to adjust properties, and then put them back. Meant to be used + * in a try { } finally { } block; because java has no good RAII. */ public class PropertySaver { private final Map savedProperties = new HashMap<>(); @@ -31,8 +31,7 @@ public PropertySaver setProperty(String property, String value) { public void remove(String property) { if (Configuration.getInstance().hasProperty(property)) { - savedProperties.put(property, - Configuration.getInstance().getProperties().remove(property).toString()); + savedProperties.put(property, Configuration.getInstance().getProperties().remove(property).toString()); } } diff --git a/Util/src/test/java/io/deephaven/utils/test/PropertySaverTest.java b/Util/src/test/java/io/deephaven/utils/test/PropertySaverTest.java index 79636655faf..0595c873dc2 100644 --- a/Util/src/test/java/io/deephaven/utils/test/PropertySaverTest.java +++ b/Util/src/test/java/io/deephaven/utils/test/PropertySaverTest.java @@ -37,18 +37,14 @@ public void propertySaverRemoveSomeProp() throws Exception { final PropertySaver propertySaver = new PropertySaver(); try { - final String someProperty = - configuration.getProperties().stringPropertyNames().iterator().next(); + final String someProperty = configuration.getProperties().stringPropertyNames().iterator().next(); log.info().append("Remove someProperty: " + someProperty).endl(); propertySaver.remove(someProperty); - log.info().append("configuration currentSize: " + configuration.getProperties().size()) - .endl(); + log.info().append("configuration currentSize: " + configuration.getProperties().size()).endl(); Assert.assertEquals(configuration.getProperties().size(), initialSize - 1); } finally { propertySaver.restore(); - log.info() - .append("configuration restored size: " + configuration.getProperties().size()) - .endl(); + log.info().append("configuration restored size: " + configuration.getProperties().size()).endl(); Assert.assertEquals(initialSize, configuration.getProperties().size()); } } @@ -64,14 +60,11 @@ public void propertySaverRemoveAllProps() throws Exception { try { Set props = configuration.getProperties().stringPropertyNames(); props.forEach((k) -> propertySaver.remove(k)); - log.info().append("configuration currentSize: " + configuration.getProperties().size()) - .endl(); + log.info().append("configuration currentSize: " + configuration.getProperties().size()).endl(); Assert.assertEquals(configuration.getProperties().size(), 0); } finally { propertySaver.restore(); - log.info() - .append("configuration restored size: " + configuration.getProperties().size()) - .endl(); + log.info().append("configuration restored size: " + configuration.getProperties().size()).endl(); Assert.assertEquals(initialSize, configuration.getProperties().size()); } } @@ -95,14 +88,11 @@ public void propertySaverRemoveNonExistentProp() throws Exception { } log.info().append("Remove random (non-existing) Property: " + randomProperty).endl(); propertySaver.remove(randomProperty); - log.info().append("configuration currentSize: " + configuration.getProperties().size()) - .endl(); + log.info().append("configuration currentSize: " + configuration.getProperties().size()).endl(); Assert.assertEquals(configuration.getProperties().size(), initialSize); } finally { propertySaver.restore(); - log.info() - .append("configuration restored size: " + configuration.getProperties().size()) - .endl(); + log.info().append("configuration restored size: " + configuration.getProperties().size()).endl(); Assert.assertEquals(initialSize, configuration.getProperties().size()); } } diff --git a/extensions/classgraph/src/main/java/io/deephaven/extensions/ClassGraphExtension.java b/extensions/classgraph/src/main/java/io/deephaven/extensions/ClassGraphExtension.java index e79d9f2b91e..a3b99a63e8a 100644 --- a/extensions/classgraph/src/main/java/io/deephaven/extensions/ClassGraphExtension.java +++ b/extensions/classgraph/src/main/java/io/deephaven/extensions/ClassGraphExtension.java @@ -63,10 +63,10 @@ public static void printAllExtensions(String extension, PrintStream out) { public static Table tableAllExtensions(String extension, ScanResult scan) { return fromUris(scan - .getResourcesWithExtension(extension) - .stream() - .map(Resource::getURI) - .iterator()); + .getResourcesWithExtension(extension) + .stream() + .map(Resource::getURI) + .iterator()); } public static Table tableAllExtensions(String extension) { @@ -99,16 +99,15 @@ public static Table fromUris(Iterator it) { fragments.add(uri.getFragment()); } return TableTools.newTable( - TableTools.stringCol("Scheme", schemes.toArray(new String[0])), - TableTools.stringCol("SchemeSpecificPart", schemeSpecificParts.toArray(new String[0])), - TableTools.stringCol("Authority", authorities.toArray(new String[0])), - TableTools.stringCol("UserInfo", userInfos.toArray(new String[0])), - TableTools.stringCol("Host", hosts.toArray(new String[0])), - TableTools.intCol("Port", - ports.stream().mapToInt(ClassGraphExtension::toDhInt).toArray()), - TableTools.stringCol("Path", paths.toArray(new String[0])), - TableTools.stringCol("Query", queries.toArray(new String[0])), - TableTools.stringCol("Fragment", fragments.toArray(new String[0]))); + TableTools.stringCol("Scheme", schemes.toArray(new String[0])), + TableTools.stringCol("SchemeSpecificPart", schemeSpecificParts.toArray(new String[0])), + TableTools.stringCol("Authority", authorities.toArray(new String[0])), + TableTools.stringCol("UserInfo", userInfos.toArray(new String[0])), + TableTools.stringCol("Host", hosts.toArray(new String[0])), + TableTools.intCol("Port", ports.stream().mapToInt(ClassGraphExtension::toDhInt).toArray()), + TableTools.stringCol("Path", paths.toArray(new String[0])), + TableTools.stringCol("Query", queries.toArray(new String[0])), + TableTools.stringCol("Fragment", fragments.toArray(new String[0]))); } private static int toDhInt(Integer boxed) { diff --git a/extensions/suanshu/src/main/java/io/deephaven/numerics/suanshu/SuanShuIntegration.java b/extensions/suanshu/src/main/java/io/deephaven/numerics/suanshu/SuanShuIntegration.java index bbb139368b5..f78c2f1f988 100644 --- a/extensions/suanshu/src/main/java/io/deephaven/numerics/suanshu/SuanShuIntegration.java +++ b/extensions/suanshu/src/main/java/io/deephaven/numerics/suanshu/SuanShuIntegration.java @@ -75,8 +75,7 @@ public Set> statics() { private SuanShuIntegration() {} - ////////////// Methods to convert Deephaven data-structure to Suanshu data-structures - ////////////// //////////////// + ////////////// Methods to convert Deephaven data-structure to Suanshu data-structures //////////////// /** @@ -92,8 +91,8 @@ public static Vector ssVec(final DbByteArray dbByteArray) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and {@link DbByteArray} is 0-based - // data-structure, Vector[i] = DbByteArray[i-1] + // Since {@link Vector} is 1-based data-structure and {@link DbByteArray} is 0-based data-structure, + // Vector[i] = DbByteArray[i-1] return getValue(dbByteArray.get(i - 1)); } }; @@ -112,8 +111,8 @@ public static Vector ssVec(final Byte... bytes) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and byte[] is 0-based - // data-structure, Vector[i] = DbByteArray[i-1] + // Since {@link Vector} is 1-based data-structure and byte[] is 0-based data-structure, Vector[i] = + // DbByteArray[i-1] return getValue(bytes[i - 1]); } @@ -137,8 +136,8 @@ public static Vector ssVec(final byte[] bytes) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and byte[] is 0-based - // data-structure, Vector[i] = DbByteArray[i-1] + // Since {@link Vector} is 1-based data-structure and byte[] is 0-based data-structure, Vector[i] = + // DbByteArray[i-1] return getValue(bytes[i - 1]); } @@ -162,8 +161,8 @@ public static Vector ssVec(final DbShortArray dbShortArray) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and {@link DbShortArray} is - // 0-based data-structure, Vector[i] = DbShortArray[i-1] + // Since {@link Vector} is 1-based data-structure and {@link DbShortArray} is 0-based data-structure, + // Vector[i] = DbShortArray[i-1] return getValue(dbShortArray.get(i - 1)); } }; @@ -182,8 +181,8 @@ public static Vector ssVec(final Short... shorts) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and short[] is 0-based - // data-structure, Vector[i] = short[i-1] + // Since {@link Vector} is 1-based data-structure and short[] is 0-based data-structure, Vector[i] = + // short[i-1] return getValue(shorts[i - 1]); } @@ -207,8 +206,8 @@ public static Vector ssVec(final short[] shorts) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and short[] is 0-based - // data-structure, Vector[i] = short[i-1] + // Since {@link Vector} is 1-based data-structure and short[] is 0-based data-structure, Vector[i] = + // short[i-1] return getValue(shorts[i - 1]); } @@ -232,8 +231,8 @@ public static Vector ssVec(final DbIntArray dbIntArray) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and {@link DbIntArray} is 0-based - // data-structure, Vector[i] = DbIntArray[i-1] + // Since {@link Vector} is 1-based data-structure and {@link DbIntArray} is 0-based data-structure, + // Vector[i] = DbIntArray[i-1] return getValue(dbIntArray.get(i - 1)); } }; @@ -252,8 +251,8 @@ public static Vector ssVec(final int[] ints) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and int[] is 0-based - // data-structure, Vector[i] = int[i-1] + // Since {@link Vector} is 1-based data-structure and int[] is 0-based data-structure, Vector[i] = + // int[i-1] return getValue(ints[i - 1]); } @@ -277,8 +276,8 @@ public static Vector ssVec(final Integer... ints) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and int[] is 0-based - // data-structure, Vector[i] = int[i-1] + // Since {@link Vector} is 1-based data-structure and int[] is 0-based data-structure, Vector[i] = + // int[i-1] return getValue(ints[i - 1]); } @@ -302,8 +301,8 @@ public static Vector ssVec(final DbFloatArray dbFloatArray) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and {@link DbFloatArray} is - // 0-based data-structure, Vector[i] = DbFloatArray[i-1] + // Since {@link Vector} is 1-based data-structure and {@link DbFloatArray} is 0-based data-structure, + // Vector[i] = DbFloatArray[i-1] return getValue(dbFloatArray.get(i - 1)); } }; @@ -322,8 +321,8 @@ public static Vector ssVec(final Float... floats) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and float[] is 0-based - // data-structure, Vector[i] = float[i-1] + // Since {@link Vector} is 1-based data-structure and float[] is 0-based data-structure, Vector[i] = + // float[i-1] return getValue(floats[i - 1]); } @@ -347,8 +346,8 @@ public static Vector ssVec(final float[] floats) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and float[] is 0-based - // data-structure, Vector[i] = float[i-1] + // Since {@link Vector} is 1-based data-structure and float[] is 0-based data-structure, Vector[i] = + // float[i-1] return getValue(floats[i - 1]); } @@ -372,8 +371,8 @@ public static Vector ssVec(final DbLongArray dbLongArray) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and {@link DbLongArray} is 0-based - // data-structure, Vector[i] = DbLongArray[i-1] + // Since {@link Vector} is 1-based data-structure and {@link DbLongArray} is 0-based data-structure, + // Vector[i] = DbLongArray[i-1] return getValue(dbLongArray.get(i - 1)); } }; @@ -392,8 +391,8 @@ public static Vector ssVec(final Long... longs) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and long[] is 0-based - // data-structure, Vector[i] = long[i-1] + // Since {@link Vector} is 1-based data-structure and long[] is 0-based data-structure, Vector[i] = + // long[i-1] return getValue(longs[i - 1]); } @@ -417,8 +416,8 @@ public static Vector ssVec(final long[] longs) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and long[] is 0-based - // data-structure, Vector[i] = long[i-1] + // Since {@link Vector} is 1-based data-structure and long[] is 0-based data-structure, Vector[i] = + // long[i-1] return getValue(longs[i - 1]); } @@ -442,8 +441,8 @@ public static Vector ssVec(final DbDoubleArray dbDoubleArray) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and {@link DbDoubleArray} is - // 0-based data-structure, Vector[i] = DbDoubleArray[i-1] + // Since {@link Vector} is 1-based data-structure and {@link DbDoubleArray} is 0-based data-structure, + // Vector[i] = DbDoubleArray[i-1] return getValue(dbDoubleArray.get(i - 1)); } @@ -467,8 +466,8 @@ public static Vector ssVec(final Double... doubles) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and double[] is 0-based - // data-structure, Vector[i] = double[i-1] + // Since {@link Vector} is 1-based data-structure and double[] is 0-based data-structure, Vector[i] = + // double[i-1] return getValue(doubles[i - 1]); } @@ -497,8 +496,8 @@ public static Vector ssVec(final double[] doubles) { @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and double[] is 0-based - // data-structure, Vector[i] = double[i-1] + // Since {@link Vector} is 1-based data-structure and double[] is 0-based data-structure, Vector[i] = + // double[i-1] return getValue(doubles[i - 1]); } @@ -531,20 +530,20 @@ public static Vector ssVec(final Number... numbers) { * @param dbArray array to wrap * @param type of elements in dbArray * @return Immutable {@link Vector} backed by {@link DbArray} - * @throws io.deephaven.base.verify.RequirementFailure if dbArray contains elements - * other than {@link Number} or any of its implementations. + * @throws io.deephaven.base.verify.RequirementFailure if dbArray contains elements other than + * {@link Number} or any of its implementations. */ public static Vector ssVec(final DbArray dbArray) { Require.neqNull(dbArray, "dbArray"); Require.requirement(Number.class.isAssignableFrom(dbArray.getComponentType()), - "dbArray of type " + Number.class + ", instead found " + dbArray.getComponentType()); + "dbArray of type " + Number.class + ", instead found " + dbArray.getComponentType()); return new AbstractDbArrayBaseVector(dbArray) { private static final long serialVersionUID = 905559534474469661L; @Override public double get(int i) { - // Since {@link Vector} is 1-based data-structure and {@link DbArray} is 0-based - // data-structure, Vector[i] = DbArray[i-1] + // Since {@link Vector} is 1-based data-structure and {@link DbArray} is 0-based data-structure, + // Vector[i] = DbArray[i-1] return getValue(dbArray.get(i - 1)); } }; @@ -563,8 +562,8 @@ private static double[] convertVectorToDoubleArray(final Vector vector) { Require.neqNull(vector, "vector"); final double[] doubles = new double[vector.size()]; for (int i = 1; i <= vector.size(); i++) { - // Since {@link Vector} is 1-based data-structure and double[] is 0-based - // data-structure, Vector[i] = double[i-1] + // Since {@link Vector} is 1-based data-structure and double[] is 0-based data-structure, Vector[i] = + // double[i-1] doubles[i - 1] = vector.get(i); } return doubles; @@ -575,9 +574,8 @@ private static double[] convertVectorToDoubleArray(final Vector vector) { /** - * Wraps {@link DbByteArray}... as {@link Matrix} This method assumes {@code dbByteArrays} to be - * in unconventional [columns][rows] structure, where first dimension denotes columns and second - * dimension denotes rows. + * Wraps {@link DbByteArray}... as {@link Matrix} This method assumes {@code dbByteArrays} to be in unconventional + * [columns][rows] structure, where first dimension denotes columns and second dimension denotes rows. * * @param dbByteArrays array to wrap * @return Immutable {@link Matrix} backed by {@link DbByteArray}... @@ -587,8 +585,8 @@ public static Matrix ssMat(final DbByteArray... dbByteArrays) { for (int i = 0; i < dbByteArrays.length; i++) { Require.neqNull(dbByteArrays[i], "dbByteArrays[" + i + "]"); if (i > 0) { - Require.eq(dbByteArrays[0].intSize(), "dbByteArrays[0].intSize()", - dbByteArrays[i].intSize(), "dbByteArrays[" + i + "].intSize()"); + Require.eq(dbByteArrays[0].intSize(), "dbByteArrays[0].intSize()", dbByteArrays[i].intSize(), + "dbByteArrays[" + i + "].intSize()"); } } return new AbstractMatrix() { @@ -601,8 +599,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbByteArrays[i - 1].get(row - 1)); } @@ -615,16 +612,14 @@ public int size() { @Override public Vector getColumn(int column) throws MatrixAccessException { - return new AbstractDbArrayBaseVector(dbByteArrays[column - 1]) {// Because 1-based - // row and column + return new AbstractDbArrayBaseVector(dbByteArrays[column - 1]) {// Because 1-based row and column // indices in // com.numericalmethod.suanshu.matrix.doubles.Matrix private static final long serialVersionUID = 6151466803319078752L; @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbByteArrays[column - 1].get(i - 1)); } }; @@ -632,8 +627,7 @@ public double get(int i) { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbByteArrays[column - 1].get(row - 1)); } @@ -650,9 +644,9 @@ public int nCols() { } /** - * Wraps byte[]... as {@link Matrix} This method assumes {@code byteColumnsData} to - * be in unconventional [columns][rows] structure, where first dimension denotes columns and - * second dimension denotes rows. + * Wraps byte[]... as {@link Matrix} This method assumes {@code byteColumnsData} to be in + * unconventional [columns][rows] structure, where first dimension denotes columns and second dimension denotes + * rows. * * @param byteColumnsData 2D array to wrap * @return Immutable {@link Matrix} backed by byte[]... @@ -662,8 +656,8 @@ public static Matrix ssMat(final byte[]... byteColumnsData) { for (int i = 0; i < byteColumnsData.length; i++) { Require.neqNull(byteColumnsData[i], "byteColumnsData[" + i + "]"); if (i > 0) { - Require.eq(byteColumnsData[0].length, "byteColumnsData[0].length", - byteColumnsData[i].length, "byteColumnsData[" + i + "].length"); + Require.eq(byteColumnsData[0].length, "byteColumnsData[0].length", byteColumnsData[i].length, + "byteColumnsData[" + i + "].length"); } } return new AbstractMatrix() { @@ -676,8 +670,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(byteColumnsData[i - 1][row - 1]); } @@ -695,8 +688,7 @@ public Vector getColumn(int column) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(byteColumnsData[column - 1][i - 1]); } @@ -709,8 +701,7 @@ public int size() { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(byteColumnsData[column - 1][row - 1]); } @@ -727,9 +718,8 @@ public int nCols() { } /** - * Wraps {@link DbShortArray}... as {@link Matrix} This method assumes {@code dbShortArrays} to - * be in unconventional [columns][rows] structure, where first dimension denotes columns and - * second dimension denotes rows. + * Wraps {@link DbShortArray}... as {@link Matrix} This method assumes {@code dbShortArrays} to be in unconventional + * [columns][rows] structure, where first dimension denotes columns and second dimension denotes rows. * * @param dbShortArrays array to wrap * @return Immutable {@link Matrix} backed by {@link DbShortArray}... @@ -739,8 +729,8 @@ public static Matrix ssMat(final DbShortArray... dbShortArrays) { for (int i = 0; i < dbShortArrays.length; i++) { Require.neqNull(dbShortArrays[i], "dbShortArrays[" + i + "]"); if (i > 0) { - Require.eq(dbShortArrays[0].intSize(), "dbShortArrays[0].intSize()", - dbShortArrays[i].intSize(), "dbShortArrays[" + i + "].intSize()"); + Require.eq(dbShortArrays[0].intSize(), "dbShortArrays[0].intSize()", dbShortArrays[i].intSize(), + "dbShortArrays[" + i + "].intSize()"); } } return new AbstractMatrix() { @@ -753,8 +743,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbShortArrays[i - 1].get(row - 1)); } @@ -767,16 +756,14 @@ public int size() { @Override public Vector getColumn(int column) throws MatrixAccessException { - return new AbstractDbArrayBaseVector(dbShortArrays[column - 1]) {// Because 1-based - // row and column + return new AbstractDbArrayBaseVector(dbShortArrays[column - 1]) {// Because 1-based row and column // indices in // com.numericalmethod.suanshu.matrix.doubles.Matrix private static final long serialVersionUID = 6991137420725851810L; @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbShortArrays[column - 1].get(i - 1)); } }; @@ -784,8 +771,7 @@ public double get(int i) { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbShortArrays[column - 1].get(row - 1)); } @@ -802,9 +788,9 @@ public int nCols() { } /** - * Wraps short[]... as {@link Matrix} This method assumes {@code shortColumnsData} - * to be in unconventional [columns][rows] structure, where first dimension denotes columns and - * second dimension denotes rows. + * Wraps short[]... as {@link Matrix} This method assumes {@code shortColumnsData} to be in + * unconventional [columns][rows] structure, where first dimension denotes columns and second dimension denotes + * rows. * * @param shortColumnsData 2D array to wrap * @return Immutable {@link Matrix} backed by short[]... @@ -814,8 +800,8 @@ public static Matrix ssMat(final short[]... shortColumnsData) { for (int i = 0; i < shortColumnsData.length; i++) { Require.neqNull(shortColumnsData[i], "shortColumnsData[" + i + "]"); if (i > 0) { - Require.eq(shortColumnsData[0].length, "shortColumnsData[0].length", - shortColumnsData[i].length, "shortColumnsData[" + i + "].length"); + Require.eq(shortColumnsData[0].length, "shortColumnsData[0].length", shortColumnsData[i].length, + "shortColumnsData[" + i + "].length"); } } return new AbstractMatrix() { @@ -828,8 +814,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(shortColumnsData[i - 1][row - 1]); } @@ -847,8 +832,7 @@ public Vector getColumn(int column) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(shortColumnsData[column - 1][i - 1]); } @@ -861,8 +845,7 @@ public int size() { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(shortColumnsData[column - 1][row - 1]); } @@ -879,9 +862,8 @@ public int nCols() { } /** - * Wraps {@link DbIntArray}... as {@link Matrix} This method assumes {@code dbIntArrays} to be - * in unconventional [columns][rows] structure, where first dimension denotes columns and second - * dimension denotes rows. + * Wraps {@link DbIntArray}... as {@link Matrix} This method assumes {@code dbIntArrays} to be in unconventional + * [columns][rows] structure, where first dimension denotes columns and second dimension denotes rows. * * @param dbIntArrays array to wrap * @return Immutable {@link Matrix} backed by {@link DbIntArray}... @@ -891,8 +873,8 @@ public static Matrix ssMat(final DbIntArray... dbIntArrays) { for (int i = 0; i < dbIntArrays.length; i++) { Require.neqNull(dbIntArrays[i], "dbIntArrays[" + i + "]"); if (i > 0) { - Require.eq(dbIntArrays[0].intSize(), "dbIntArrays[0].intSize()", - dbIntArrays[i].intSize(), "dbIntArrays[" + i + "].intSize()"); + Require.eq(dbIntArrays[0].intSize(), "dbIntArrays[0].intSize()", dbIntArrays[i].intSize(), + "dbIntArrays[" + i + "].intSize()"); } } return new AbstractMatrix() { @@ -905,8 +887,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbIntArrays[i - 1].get(row - 1)); } @@ -919,16 +900,14 @@ public int size() { @Override public Vector getColumn(int column) throws MatrixAccessException { - return new AbstractDbArrayBaseVector(dbIntArrays[column - 1]) {// Because 1-based - // row and column - // indices in + return new AbstractDbArrayBaseVector(dbIntArrays[column - 1]) {// Because 1-based row and column indices + // in // com.numericalmethod.suanshu.matrix.doubles.Matrix private static final long serialVersionUID = 821557745996553552L; @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbIntArrays[column - 1].get(i - 1)); } }; @@ -936,8 +915,7 @@ public double get(int i) { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbIntArrays[column - 1].get(row - 1)); } @@ -954,9 +932,8 @@ public int nCols() { } /** - * Wraps int[]... as {@link Matrix} This method assumes {@code intColumnsData} to - * be in unconventional [columns][rows] structure, where first dimension denotes columns and - * second dimension denotes rows. + * Wraps int[]... as {@link Matrix} This method assumes {@code intColumnsData} to be in unconventional + * [columns][rows] structure, where first dimension denotes columns and second dimension denotes rows. * * @param intColumnsData 2D array to wrap * @return Immutable {@link Matrix} backed by int[]... @@ -966,8 +943,8 @@ public static Matrix ssMat(final int[]... intColumnsData) { for (int i = 0; i < intColumnsData.length; i++) { Require.neqNull(intColumnsData[i], "intColumnsData[" + i + "]"); if (i > 0) { - Require.eq(intColumnsData[0].length, "intColumnsData[0].length", - intColumnsData[i].length, "intColumnsData[" + i + "].length"); + Require.eq(intColumnsData[0].length, "intColumnsData[0].length", intColumnsData[i].length, + "intColumnsData[" + i + "].length"); } } return new AbstractMatrix() { @@ -980,8 +957,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(intColumnsData[i - 1][row - 1]); } @@ -999,8 +975,7 @@ public Vector getColumn(int column) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(intColumnsData[column - 1][i - 1]); } @@ -1013,8 +988,7 @@ public int size() { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(intColumnsData[column - 1][row - 1]); } @@ -1031,9 +1005,8 @@ public int nCols() { } /** - * Wraps {@link DbFloatArray}... as {@link Matrix} This method assumes {@code dbFloatArrays} to - * be in unconventional [columns][rows] structure, where first dimension denotes columns and - * second dimension denotes rows. + * Wraps {@link DbFloatArray}... as {@link Matrix} This method assumes {@code dbFloatArrays} to be in unconventional + * [columns][rows] structure, where first dimension denotes columns and second dimension denotes rows. * * @param dbFloatArrays array to wrap * @return Immutable {@link Matrix} backed by {@link DbFloatArray}... @@ -1043,8 +1016,8 @@ public static Matrix ssMat(final DbFloatArray... dbFloatArrays) { for (int i = 0; i < dbFloatArrays.length; i++) { Require.neqNull(dbFloatArrays[i], "dbFloatArrays[" + i + "]"); if (i > 0) { - Require.eq(dbFloatArrays[0].intSize(), "dbFloatArrays[0].intSize()", - dbFloatArrays[i].intSize(), "dbFloatArrays[" + i + "].intSize()"); + Require.eq(dbFloatArrays[0].intSize(), "dbFloatArrays[0].intSize()", dbFloatArrays[i].intSize(), + "dbFloatArrays[" + i + "].intSize()"); } } return new AbstractMatrix() { @@ -1057,8 +1030,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbFloatArrays[i - 1].get(row - 1)); } @@ -1071,16 +1043,14 @@ public int size() { @Override public Vector getColumn(int column) throws MatrixAccessException { - return new AbstractDbArrayBaseVector(dbFloatArrays[column - 1]) {// Because 1-based - // row and column + return new AbstractDbArrayBaseVector(dbFloatArrays[column - 1]) {// Because 1-based row and column // indices in // com.numericalmethod.suanshu.matrix.doubles.Matrix private static final long serialVersionUID = -8535605234772136511L; @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbFloatArrays[column - 1].get(i - 1)); } }; @@ -1088,8 +1058,7 @@ public double get(int i) { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbFloatArrays[column - 1].get(row - 1)); } @@ -1106,9 +1075,9 @@ public int nCols() { } /** - * Wraps float[]... as {@link Matrix}. This method assumes {@code floatColumnsData} - * to be in unconventional [columns][rows] structure, where first dimension denotes columns and - * second dimension denotes rows. + * Wraps float[]... as {@link Matrix}. This method assumes {@code floatColumnsData} to be in + * unconventional [columns][rows] structure, where first dimension denotes columns and second dimension denotes + * rows. * * @param floatColumnsData 2D array to wrap * @return Immutable {@link Matrix} backed by float[]... @@ -1118,8 +1087,8 @@ public static Matrix ssMat(final float[]... floatColumnsData) { for (int i = 0; i < floatColumnsData.length; i++) { Require.neqNull(floatColumnsData[i], "floatColumnsData[" + i + "]"); if (i > 0) { - Require.eq(floatColumnsData[0].length, "floatColumnsData[0].length", - floatColumnsData[i].length, "floatColumnsData[" + i + "].length"); + Require.eq(floatColumnsData[0].length, "floatColumnsData[0].length", floatColumnsData[i].length, + "floatColumnsData[" + i + "].length"); } } return new AbstractMatrix() { @@ -1132,8 +1101,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(floatColumnsData[i - 1][row - 1]); } @@ -1151,8 +1119,7 @@ public Vector getColumn(int column) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(floatColumnsData[column - 1][i - 1]); } @@ -1165,8 +1132,7 @@ public int size() { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(floatColumnsData[column - 1][row - 1]); } @@ -1183,9 +1149,8 @@ public int nCols() { } /** - * Wraps {@link DbLongArray}... as {@link Matrix} This method assumes {@code dbLongArrays} to be - * in unconventional [columns][rows] structure, where first dimension denotes columns and second - * dimension denotes rows. + * Wraps {@link DbLongArray}... as {@link Matrix} This method assumes {@code dbLongArrays} to be in unconventional + * [columns][rows] structure, where first dimension denotes columns and second dimension denotes rows. * * @param dbLongArrays array to wrap * @return Immutable {@link Matrix} backed by {@link DbLongArray}... @@ -1195,8 +1160,8 @@ public static Matrix ssMat(final DbLongArray... dbLongArrays) { for (int i = 0; i < dbLongArrays.length; i++) { Require.neqNull(dbLongArrays[i], "dbLongArrays[" + i + "]"); if (i > 0) { - Require.eq(dbLongArrays[0].intSize(), "dbLongArrays[0].intSize()", - dbLongArrays[i].intSize(), "dbLongArrays[" + i + "].intSize()"); + Require.eq(dbLongArrays[0].intSize(), "dbLongArrays[0].intSize()", dbLongArrays[i].intSize(), + "dbLongArrays[" + i + "].intSize()"); } } return new AbstractMatrix() { @@ -1209,8 +1174,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbLongArrays[i - 1].get(row - 1)); } @@ -1223,16 +1187,14 @@ public int size() { @Override public Vector getColumn(int column) throws MatrixAccessException { - return new AbstractDbArrayBaseVector(dbLongArrays[column - 1]) {// Because 1-based - // row and column + return new AbstractDbArrayBaseVector(dbLongArrays[column - 1]) {// Because 1-based row and column // indices in // com.numericalmethod.suanshu.matrix.doubles.Matrix private static final long serialVersionUID = 4391740406197864817L; @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbLongArrays[column - 1].get(i - 1)); } }; @@ -1240,8 +1202,7 @@ public double get(int i) { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbLongArrays[column - 1].get(row - 1)); } @@ -1258,9 +1219,9 @@ public int nCols() { } /** - * Wraps long[]... as {@link Matrix} This method assumes {@code longColumnsData} to - * be in unconventional [columns][rows] structure, where first dimension denotes columns and - * second dimension denotes rows. + * Wraps long[]... as {@link Matrix} This method assumes {@code longColumnsData} to be in + * unconventional [columns][rows] structure, where first dimension denotes columns and second dimension denotes + * rows. * * @param longColumnsData 2D array to wrap * @return Immutable {@link Matrix} backed by long[]... @@ -1270,8 +1231,8 @@ public static Matrix ssMat(final long[]... longColumnsData) { for (int i = 0; i < longColumnsData.length; i++) { Require.neqNull(longColumnsData[i], "longColumnsData[" + i + "]"); if (i > 0) { - Require.eq(longColumnsData[0].length, "longColumnsData[0].length", - longColumnsData[i].length, "longColumnsData[" + i + "].length"); + Require.eq(longColumnsData[0].length, "longColumnsData[0].length", longColumnsData[i].length, + "longColumnsData[" + i + "].length"); } } return new AbstractMatrix() { @@ -1284,8 +1245,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(longColumnsData[i - 1][row - 1]); } @@ -1303,8 +1263,7 @@ public Vector getColumn(int column) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(longColumnsData[column - 1][i - 1]); } @@ -1317,8 +1276,7 @@ public int size() { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(longColumnsData[column - 1][row - 1]); } @@ -1335,9 +1293,9 @@ public int nCols() { } /** - * Wraps {@link DbDoubleArray}... as {@link Matrix} This method assumes {@code dbDoubleArrays} - * to be in unconventional [columns][rows] structure, where first dimension denotes columns and - * second dimension denotes rows. + * Wraps {@link DbDoubleArray}... as {@link Matrix} This method assumes {@code dbDoubleArrays} to be in + * unconventional [columns][rows] structure, where first dimension denotes columns and second dimension denotes + * rows. * * @param dbDoubleArrays array to wrap * @return Immutable {@link Matrix} backed by {@link DbDoubleArray}... @@ -1347,8 +1305,8 @@ public static Matrix ssMat(final DbDoubleArray... dbDoubleArrays) { for (int i = 0; i < dbDoubleArrays.length; i++) { Require.neqNull(dbDoubleArrays[i], "dbDoubleArrays[" + i + "]"); if (i > 0) { - Require.eq(dbDoubleArrays[0].intSize(), "dbDoubleArrays[0].intSize()", - dbDoubleArrays[i].intSize(), "dbDoubleArrays[" + i + "].intSize()"); + Require.eq(dbDoubleArrays[0].intSize(), "dbDoubleArrays[0].intSize()", dbDoubleArrays[i].intSize(), + "dbDoubleArrays[" + i + "].intSize()"); } } return new AbstractMatrix() { @@ -1361,8 +1319,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbDoubleArrays[i - 1].get(row - 1)); } @@ -1375,16 +1332,14 @@ public int size() { @Override public Vector getColumn(int column) throws MatrixAccessException { - return new AbstractDbArrayBaseVector(dbDoubleArrays[column - 1]) {// Because 1-based - // row and column + return new AbstractDbArrayBaseVector(dbDoubleArrays[column - 1]) {// Because 1-based row and column // indices in // com.numericalmethod.suanshu.matrix.doubles.Matrix private static final long serialVersionUID = 172294086541855763L; @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbDoubleArrays[column - 1].get(i - 1)); } @@ -1397,8 +1352,7 @@ public Vector deepCopy() { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(dbDoubleArrays[column - 1].get(row - 1)); } @@ -1415,9 +1369,9 @@ public int nCols() { } /** - * Wraps double[]... as {@link Matrix} This method assumes - * {@code doubleColumnsData} to be in unconventional [columns][rows] structure, where first - * dimension denotes columns and second dimension denotes rows. + * Wraps double[]... as {@link Matrix} This method assumes {@code doubleColumnsData} to be in + * unconventional [columns][rows] structure, where first dimension denotes columns and second dimension denotes + * rows. * * @param doubleColumnsData 2D array to wrap * @return Immutable {@link Matrix} backed by double[]... @@ -1427,8 +1381,8 @@ public static Matrix ssMat(final double[]... doubleColumnsData) { for (int i = 0; i < doubleColumnsData.length; i++) { Require.neqNull(doubleColumnsData[i], "doubleColumnsData[" + i + "]"); if (i > 0) { - Require.eq(doubleColumnsData[0].length, "doubleColumnsData[0].length", - doubleColumnsData[i].length, "doubleColumnsData[" + i + "].length"); + Require.eq(doubleColumnsData[0].length, "doubleColumnsData[0].length", doubleColumnsData[i].length, + "doubleColumnsData[" + i + "].length"); } } return new AbstractMatrix() { @@ -1441,8 +1395,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(doubleColumnsData[i - 1][row - 1]); } @@ -1460,8 +1413,7 @@ public Vector getColumn(int column) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(doubleColumnsData[column - 1][i - 1]); } @@ -1479,8 +1431,7 @@ public Vector deepCopy() { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(doubleColumnsData[column - 1][row - 1]); } @@ -1497,9 +1448,8 @@ public int nCols() { } /** - * Wrap {@link Number}[]... as {@link Matrix} This method assumes {@code numberColumnsData} to - * be in unconventional [columns][rows] structure, where first dimension denotes columns and - * second dimension denotes rows. + * Wrap {@link Number}[]... as {@link Matrix} This method assumes {@code numberColumnsData} to be in unconventional + * [columns][rows] structure, where first dimension denotes columns and second dimension denotes rows. * * @param numberColumnsData 2D array to wrap * @return Immutable {@link Matrix} backed by {@link Number}[]... @@ -1509,8 +1459,8 @@ public static Matrix ssMat(final Number[]... numberColumnsData) { for (int i = 0; i < numberColumnsData.length; i++) { Require.neqNull(numberColumnsData[i], "numberColumnsData[" + i + "]"); if (i > 0) { - Require.eq(numberColumnsData[0].length, "numberColumnsData[0].length", - numberColumnsData[i].length, "numberColumnsData[" + i + "].length"); + Require.eq(numberColumnsData[0].length, "numberColumnsData[0].length", numberColumnsData[i].length, + "numberColumnsData[" + i + "].length"); } } return new AbstractMatrix() { @@ -1523,8 +1473,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(numberColumnsData[i - 1][row - 1]); } @@ -1542,8 +1491,7 @@ public Vector getColumn(int column) throws MatrixAccessException { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return getValue(numberColumnsData[column - 1][row - 1]); } @@ -1559,8 +1507,7 @@ public int nCols() { }; } - private static Optional makeDoubleAccessor( - @NotNull final DbArrayBase dbArrayBase) { + private static Optional makeDoubleAccessor(@NotNull final DbArrayBase dbArrayBase) { final LongToDoubleFunction accessor; if (dbArrayBase instanceof DbDoubleArray) { accessor = (final long pos) -> getValue(((DbDoubleArray) dbArrayBase).get(pos)); @@ -1574,11 +1521,9 @@ private static Optional makeDoubleAccessor( accessor = (final long pos) -> getValue(((DbShortArray) dbArrayBase).get(pos)); } else if (dbArrayBase instanceof DbByteArray) { accessor = (final long pos) -> getValue(((DbByteArray) dbArrayBase).get(pos)); - } else if (dbArrayBase instanceof DbArray - && Number.class.isAssignableFrom(dbArrayBase.getComponentType())) { + } else if (dbArrayBase instanceof DbArray && Number.class.isAssignableFrom(dbArrayBase.getComponentType())) { // noinspection unchecked - accessor = - (final long pos) -> getValue(((DbArray) dbArrayBase).get(pos)); + accessor = (final long pos) -> getValue(((DbArray) dbArrayBase).get(pos)); } else { accessor = null; } @@ -1586,33 +1531,29 @@ private static Optional makeDoubleAccessor( } /** - * Wraps {@link DbArrayBase}... as {@link Matrix} This method assumes {@code dbArrayBases} to be - * in unconventional [columns][rows] structure, where first dimension denotes columns and second - * dimension denotes rows. + * Wraps {@link DbArrayBase}... as {@link Matrix} This method assumes {@code dbArrayBases} to be in unconventional + * [columns][rows] structure, where first dimension denotes columns and second dimension denotes rows. * * @param dbArrayBases array to wrap * @return Immutable {@link Matrix} backed by {@link DbArrayBase}... - * @throws UnsupportedOperationException if any of the arrays in {@code dbArrayBases} does not - * belong to {{@link DbByteArray}, {@link DbShortArray}, {@link DbIntArray}, - * {@link DbFloatArray}, {@link DbLongArray}, {@link DbDoubleArray}, - * {@link DbArray}<? extends {@link Number}>} + * @throws UnsupportedOperationException if any of the arrays in {@code dbArrayBases} does not belong to + * {{@link DbByteArray}, {@link DbShortArray}, {@link DbIntArray}, {@link DbFloatArray}, + * {@link DbLongArray}, {@link DbDoubleArray}, {@link DbArray}<? extends {@link Number}>} */ public static Matrix ssMat(final DbArrayBase... dbArrayBases) { return ssMat(new DbArrayDirect<>(dbArrayBases)); } /** - * Wraps {@link DbArray}... as {@link Matrix} This method assumes {@code dbArray} to be in - * unconventional [columns][rows] structure, where first dimension denotes columns and second - * dimension denotes rows. + * Wraps {@link DbArray}... as {@link Matrix} This method assumes {@code dbArray} to be in unconventional + * [columns][rows] structure, where first dimension denotes columns and second dimension denotes rows. * * @param dbArray array to wrap * @param - type of elements in dbArray * @return Immutable {@link Matrix} backed by {@link DbArray}... - * @throws UnsupportedOperationException if any of the arrays in {@code dbArrayBases} does not - * belong to {{@link DbByteArray}, {@link DbShortArray}, {@link DbIntArray}, - * {@link DbFloatArray}, {@link DbLongArray}, {@link DbDoubleArray}, - * {@link DbArray}<? extends {@link Number}>} + * @throws UnsupportedOperationException if any of the arrays in {@code dbArrayBases} does not belong to + * {{@link DbByteArray}, {@link DbShortArray}, {@link DbIntArray}, {@link DbFloatArray}, + * {@link DbLongArray}, {@link DbDoubleArray}, {@link DbArray}<? extends {@link Number}>} */ public static Matrix ssMat(final DbArray dbArray) { Require.neqNull(dbArray, "dbArray"); @@ -1625,14 +1566,14 @@ public static Matrix ssMat(final DbArray dbArray) { throw new IllegalArgumentException("Null array at index " + ai); } if (ai > 0 && dbArrayBase.intSize() != nRows) { - throw new IllegalArgumentException("Size mismatch: first array has size " + nRows - + ", array at index " + ai + " has size " + dbArrayBase.intSize()); + throw new IllegalArgumentException("Size mismatch: first array has size " + nRows + ", array at index " + + ai + " has size " + dbArrayBase.intSize()); } final int arrayIndex = ai; accessors[ai] = makeDoubleAccessor(dbArrayBase) - .orElseThrow(() -> new UnsupportedOperationException("Invalid array at index " - + arrayIndex + " with type " + dbArrayBase.getClass() + " and component type " - + dbArrayBase.getComponentType() + ": must be numeric")); + .orElseThrow(() -> new UnsupportedOperationException( + "Invalid array at index " + arrayIndex + " with type " + dbArrayBase.getClass() + + " and component type " + dbArrayBase.getComponentType() + ": must be numeric")); } return new AbstractMatrix() { @@ -1645,8 +1586,7 @@ public Vector getRow(int row) throws MatrixAccessException { @Override public double get(int i) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return accessors[i - 1].applyAsDouble(row - 1); } @@ -1664,8 +1604,7 @@ public Vector getColumn(final int column) throws MatrixAccessException { @Override public double get(final int row) { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return accessors[column - 1].applyAsDouble(row - 1); } }; @@ -1673,8 +1612,7 @@ public double get(final int row) { @Override public double get(final int row, final int column) throws MatrixAccessException { - // Because 1-based row and column indices in - // com.numericalmethod.suanshu.matrix.doubles.Matrix + // Because 1-based row and column indices in com.numericalmethod.suanshu.matrix.doubles.Matrix return accessors[column - 1].applyAsDouble(row - 1); } @@ -1719,15 +1657,14 @@ private static double getValue(final double value) { } private static double getValue(final T value) { - return (value == null - || (Byte.class.isAssignableFrom(value.getClass()) && value.equals(NULL_BYTE)) - || (Short.class.isAssignableFrom(value.getClass()) && value.equals(NULL_SHORT)) - || (Integer.class.isAssignableFrom(value.getClass()) && value.equals(NULL_INT)) - || (Float.class.isAssignableFrom(value.getClass()) && value.equals(NULL_FLOAT)) - || (Long.class.isAssignableFrom(value.getClass()) && value.equals(NULL_LONG)) - || (Double.class.isAssignableFrom(value.getClass()) && value.equals(NULL_DOUBLE))) - ? Double.NaN - : value.doubleValue(); + return (value == null || (Byte.class.isAssignableFrom(value.getClass()) && value.equals(NULL_BYTE)) + || (Short.class.isAssignableFrom(value.getClass()) && value.equals(NULL_SHORT)) + || (Integer.class.isAssignableFrom(value.getClass()) && value.equals(NULL_INT)) + || (Float.class.isAssignableFrom(value.getClass()) && value.equals(NULL_FLOAT)) + || (Long.class.isAssignableFrom(value.getClass()) && value.equals(NULL_LONG)) + || (Double.class.isAssignableFrom(value.getClass()) && value.equals(NULL_DOUBLE))) + ? Double.NaN + : value.doubleValue(); } @@ -1749,8 +1686,7 @@ public abstract static class AbstractVector implements Vector, Serializable { @Override public void set(final int index, final double value) { - throw new UnsupportedOperationException( - "Setting elements for vectors is not supported."); + throw new UnsupportedOperationException("Setting elements for vectors is not supported."); } @Override @@ -1839,8 +1775,8 @@ public Vector deepCopy() { } /** - * Returns the compact {@link String} representation of {@link Vector}. If you want to have - * String representation of the whole {@link Vector}, please use {@code show()} method. + * Returns the compact {@link String} representation of {@link Vector}. If you want to have String + * representation of the whole {@link Vector}, please use {@code show()} method. * * @return Compact string representation of {@link Vector} */ @@ -1954,8 +1890,8 @@ public int size() { @Override public double get(final int i) { - // Since {@link Vector} is 1-based data-structure and Number[] is 0-based - // data-structure, Vector[i] = Number[i-1] + // Since {@link Vector} is 1-based data-structure and Number[] is 0-based data-structure, Vector[i] = + // Number[i-1] return getValue(nums[i - 1]); } } @@ -1992,13 +1928,12 @@ public Vector multiply(final Vector vector) { public abstract Vector getColumn(int column) throws MatrixAccessException; /** - * Gets the value at rowth indexed row (1-based) and columnth indexed - * (1-based) column from matrix. + * Gets the value at rowth indexed row (1-based) and columnth indexed (1-based) column + * from matrix. * * @param row 1-based row-index * @param column 1-based column-index - * @return value at rowth indexed row (1-based) and columnth indexed - * (1-based) column + * @return value at rowth indexed row (1-based) and columnth indexed (1-based) column */ @Override public abstract double get(final int row, final int column) throws MatrixAccessException; @@ -2014,10 +1949,8 @@ public Matrix deepCopy() { } @Override - public void set(final int row, final int column, final double value) - throws MatrixAccessException { - throw new UnsupportedOperationException( - "Setting elements for matrix is not supported."); + public void set(final int row, final int column, final double value) throws MatrixAccessException { + throw new UnsupportedOperationException("Setting elements for matrix is not supported."); } @Override @@ -2056,8 +1989,8 @@ public Matrix ONE() { } /** - * Returns the compact {@link String} representation of {@link Matrix}. If you want to have - * String representation of the whole {@link Matrix}, please use {@code show()} method. + * Returns the compact {@link String} representation of {@link Matrix}. If you want to have String + * representation of the whole {@link Matrix}, please use {@code show()} method. * * @return Compact string representation of {@link Matrix} */ @@ -2077,8 +2010,7 @@ public String show() { private String show(final int maxRows, final int maxCols) { final int rowSizeToShow = maxRows < 0 ? this.nRows() : Math.min(maxRows, this.nRows()); - final int columnSizeToShow = - maxCols < 0 ? this.nCols() : Math.min(maxCols, this.nCols()); + final int columnSizeToShow = maxCols < 0 ? this.nCols() : Math.min(maxCols, this.nCols()); final StringBuilder result = new StringBuilder(); result.append(String.format("%dx%d\n", this.nRows(), this.nCols())); diff --git a/extensions/suanshu/src/test/java/io/deephaven/numerics/suanshu/TestSuanShuIntegration.java b/extensions/suanshu/src/test/java/io/deephaven/numerics/suanshu/TestSuanShuIntegration.java index 9bc70487e33..f4966522e22 100644 --- a/extensions/suanshu/src/test/java/io/deephaven/numerics/suanshu/TestSuanShuIntegration.java +++ b/extensions/suanshu/src/test/java/io/deephaven/numerics/suanshu/TestSuanShuIntegration.java @@ -33,13 +33,13 @@ public void testConvertByteArrayToVector() throws Exception { testAbstractVector(actual); final DenseVector expected = new DenseVector(doubles); testVecEquals(expected, actual); - testVecEquals(expected, ssVec((byte) 1, (byte) 2, (byte) 3, (byte) 4, (byte) 5, (byte) 6, - (byte) 7, (byte) 8, (byte) 9, (byte) 10, (byte) 11, (byte) 12, (byte) 13, (byte) 14)); + testVecEquals(expected, ssVec((byte) 1, (byte) 2, (byte) 3, (byte) 4, (byte) 5, (byte) 6, (byte) 7, (byte) 8, + (byte) 9, (byte) 10, (byte) 11, (byte) 12, (byte) 13, (byte) 14)); } public void testConvertDbByteArrayToVector() throws Exception { final DbByteArrayDirect dbByteArrayDirect = - new DbByteArrayDirect(new byte[] {1, 2, 3, 4, QueryConstants.NULL_BYTE, 6}); + new DbByteArrayDirect(new byte[] {1, 2, 3, 4, QueryConstants.NULL_BYTE, 6}); final double[] doubles = new double[] {1, 2, 3, 4, Double.NaN, 6}; final Vector actual = ssVec(dbByteArrayDirect); testAbstractVector(actual); @@ -54,13 +54,12 @@ public void testConvertShortArrayToVector() throws Exception { testAbstractVector(actual); final DenseVector expected = new DenseVector(doubles); testVecEquals(expected, actual); - testVecEquals(expected, - ssVec((short) 1, (short) 2, (short) 3, (short) 4, (short) 5, (short) 6)); + testVecEquals(expected, ssVec((short) 1, (short) 2, (short) 3, (short) 4, (short) 5, (short) 6)); } public void testConvertDbShortArrayToVector() throws Exception { final DbShortArrayDirect dbShortArrayDirect = - new DbShortArrayDirect(new short[] {1, 2, 3, 4, QueryConstants.NULL_SHORT, 6}); + new DbShortArrayDirect(new short[] {1, 2, 3, 4, QueryConstants.NULL_SHORT, 6}); final double[] doubles = new double[] {1, 2, 3, 4, Double.NaN, 6}; final Vector actual = ssVec(dbShortArrayDirect); testAbstractVector(actual); @@ -79,8 +78,7 @@ public void testConvertIntArrayToVector() throws Exception { } public void testConvertDbIntArrayToVector() throws Exception { - final DbIntArrayDirect dbIntArrayDirect = - new DbIntArrayDirect(-1, 2, -3, 4, QueryConstants.NULL_INT, 6); + final DbIntArrayDirect dbIntArrayDirect = new DbIntArrayDirect(-1, 2, -3, 4, QueryConstants.NULL_INT, 6); final double[] doubles = new double[] {-1, 2, -3, 4, Double.NaN, 6}; final Vector actual = ssVec(dbIntArrayDirect); testAbstractVector(actual); @@ -100,7 +98,7 @@ public void testConvertFloatArrayToVector() throws Exception { public void testConvertDbFloatArrayToVector() throws Exception { final DbFloatArrayDirect dbFloatArrayDirect = - new DbFloatArrayDirect(1.2f, -562, -23.069f, 4.56f, QueryConstants.NULL_FLOAT, 6f); + new DbFloatArrayDirect(1.2f, -562, -23.069f, 4.56f, QueryConstants.NULL_FLOAT, 6f); final double[] doubles = new double[] {1.2f, -562, -23.069f, 4.56f, Double.NaN, 6f}; final Vector actual = ssVec(dbFloatArrayDirect); testAbstractVector(actual); @@ -110,8 +108,7 @@ public void testConvertDbFloatArrayToVector() throws Exception { public void testConvertLongArrayToVector() throws Exception { final long[] longs = new long[] {-126564L, 256746545L, 3545678945136L, 4544L, 5L, 6654845L}; - final double[] doubles = - new double[] {-126564d, 256746545d, 3545678945136d, 4544d, 5d, 6654845L}; + final double[] doubles = new double[] {-126564d, 256746545d, 3545678945136d, 4544d, 5d, 6654845L}; final Vector actual = ssVec(longs); testAbstractVector(actual); final DenseVector expected = new DenseVector(doubles); @@ -120,10 +117,9 @@ public void testConvertLongArrayToVector() throws Exception { } public void testConvertDbLongArrayToVector() throws Exception { - final DbLongArrayDirect dbLongArrayDirect = new DbLongArrayDirect(-126564L, 256746545L, - 3545678945136L, 4544L, QueryConstants.NULL_LONG, 6654845L); - final double[] doubles = - new double[] {-126564L, 256746545L, 3545678945136L, 4544L, Double.NaN, 6654845L}; + final DbLongArrayDirect dbLongArrayDirect = + new DbLongArrayDirect(-126564L, 256746545L, 3545678945136L, 4544L, QueryConstants.NULL_LONG, 6654845L); + final double[] doubles = new double[] {-126564L, 256746545L, 3545678945136L, 4544L, Double.NaN, 6654845L}; final Vector actual = ssVec(dbLongArrayDirect); testAbstractVector(actual); final DenseVector expected = new DenseVector(doubles); @@ -131,23 +127,19 @@ public void testConvertDbLongArrayToVector() throws Exception { } public void testConvertDoubleArrayToVector() throws Exception { - final double[] doublesActual = - new double[] {2.365d, 2125.5698d, -98231.2656897451d, 697, 3457836.7283648723d}; - final double[] doubles = - new double[] {2.365d, 2125.5698d, -98231.2656897451d, 697, 3457836.7283648723d}; + final double[] doublesActual = new double[] {2.365d, 2125.5698d, -98231.2656897451d, 697, 3457836.7283648723d}; + final double[] doubles = new double[] {2.365d, 2125.5698d, -98231.2656897451d, 697, 3457836.7283648723d}; final Vector actual = ssVec(doublesActual); testAbstractVector(actual); final DenseVector expected = new DenseVector(doubles); testVecEquals(expected, actual); - testVecEquals(expected, - ssVec(2.365d, 2125.5698d, -98231.2656897451d, 697, 3457836.7283648723d)); + testVecEquals(expected, ssVec(2.365d, 2125.5698d, -98231.2656897451d, 697, 3457836.7283648723d)); } public void testConvertDbDoubleArrayToVector() throws Exception { - final DbDoubleArrayDirect dbDoubleArrayDirect = new DbDoubleArrayDirect(2.365d, 2125.5698d, - -98231.2656897451d, QueryConstants.NULL_DOUBLE, 3457836.7283648723d); - final double[] doubles = - new double[] {2.365d, 2125.5698d, -98231.2656897451d, Double.NaN, 3457836.7283648723d}; + final DbDoubleArrayDirect dbDoubleArrayDirect = new DbDoubleArrayDirect(2.365d, 2125.5698d, -98231.2656897451d, + QueryConstants.NULL_DOUBLE, 3457836.7283648723d); + final double[] doubles = new double[] {2.365d, 2125.5698d, -98231.2656897451d, Double.NaN, 3457836.7283648723d}; final Vector actual = ssVec(dbDoubleArrayDirect); testAbstractVector(actual); final DenseVector expected = new DenseVector(doubles); @@ -155,8 +147,8 @@ public void testConvertDbDoubleArrayToVector() throws Exception { } public void testConvertDbNumberArrayToVector() throws Exception { - DbArray dbArray = new DbArrayDirect<>(BigDecimal.ZERO, BigDecimal.ONE, BigDecimal.ZERO, - BigDecimal.ONE, null, BigDecimal.ONE); + DbArray dbArray = new DbArrayDirect<>(BigDecimal.ZERO, BigDecimal.ONE, BigDecimal.ZERO, BigDecimal.ONE, null, + BigDecimal.ONE); double[] doubles = new double[] {0, 1, 0, 1, Double.NaN, 1}; Vector actual = ssVec(dbArray); testAbstractVector(actual); @@ -164,10 +156,9 @@ public void testConvertDbNumberArrayToVector() throws Exception { assertTrue(expected.equals(actual)); assertEquals(expected.toArray(), actual.toArray()); - dbArray = new DbArrayDirect<>(2.365d, 2125.5698d, -98231.2656897451d, - QueryConstants.NULL_DOUBLE, 3457836.7283648723d); - doubles = - new double[] {2.365d, 2125.5698d, -98231.2656897451d, Double.NaN, 3457836.7283648723d}; + dbArray = new DbArrayDirect<>(2.365d, 2125.5698d, -98231.2656897451d, QueryConstants.NULL_DOUBLE, + 3457836.7283648723d); + doubles = new double[] {2.365d, 2125.5698d, -98231.2656897451d, Double.NaN, 3457836.7283648723d}; actual = ssVec(dbArray); testAbstractVector(actual); expected = new DenseVector(doubles); @@ -182,11 +173,9 @@ public void testConvertDbNumberArrayToVector() throws Exception { assertTrue(expected.equals(actual)); assertEquals(expected.toArray(), actual.toArray()); - dbArray = new DbArrayDirect<>(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT, - QueryConstants.NULL_INT, QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG, - QueryConstants.NULL_DOUBLE); - doubles = - new double[] {Double.NaN, Double.NaN, Double.NaN, Double.NaN, Double.NaN, Double.NaN}; + dbArray = new DbArrayDirect<>(QueryConstants.NULL_BYTE, QueryConstants.NULL_SHORT, QueryConstants.NULL_INT, + QueryConstants.NULL_FLOAT, QueryConstants.NULL_LONG, QueryConstants.NULL_DOUBLE); + doubles = new double[] {Double.NaN, Double.NaN, Double.NaN, Double.NaN, Double.NaN, Double.NaN}; actual = ssVec(dbArray); testAbstractVector(actual); expected = new DenseVector(doubles); @@ -232,8 +221,8 @@ public void testConvertByte2dArrayToMatrix() throws Exception { Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); - final byte[][] bytes = new byte[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, 16}, - {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; + final byte[][] bytes = new byte[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, 16}, {21, 22, 23, 24, 25, 26}, + {31, 32, 33, 34, 35, 36}}; final Matrix actual = ssMat(bytes); testAbstractMatrix(actual); @@ -241,16 +230,13 @@ public void testConvertByte2dArrayToMatrix() throws Exception { } public void testConvertDbByteArrayToMatrix() throws Exception { - final double[][] doubles = - new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, - {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; - - final DbByteArray[] dbByteArrays = - new DbByteArray[] {new DbByteArrayDirect(new byte[] {1, 2, 3, 4, 5, 6}), - new DbByteArrayDirect( - new byte[] {11, 12, 13, 14, 15, QueryConstants.NULL_BYTE}), - new DbByteArrayDirect(new byte[] {21, 22, 23, 24, 25, 26}), - new DbByteArrayDirect(new byte[] {31, 32, 33, 34, 35, 36})}; + final double[][] doubles = new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, + {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; + + final DbByteArray[] dbByteArrays = new DbByteArray[] {new DbByteArrayDirect(new byte[] {1, 2, 3, 4, 5, 6}), + new DbByteArrayDirect(new byte[] {11, 12, 13, 14, 15, QueryConstants.NULL_BYTE}), + new DbByteArrayDirect(new byte[] {21, 22, 23, 24, 25, 26}), + new DbByteArrayDirect(new byte[] {31, 32, 33, 34, 35, 36})}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); @@ -265,27 +251,24 @@ public void testConvertShort2dArrayToMatrix() throws Exception { Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); - final short[][] shorts = new short[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, 16}, - {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; + final short[][] shorts = new short[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, 16}, {21, 22, 23, 24, 25, 26}, + {31, 32, 33, 34, 35, 36}}; final Matrix actual = ssMat(shorts); testAbstractMatrix(actual); testMatEquals(expected, actual); } public void testConvertDbShortArrayToMatrix() throws Exception { - final double[][] doubles = - new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, - {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; + final double[][] doubles = new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, + {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); - final DbShortArray[] dbShortArrays = - new DbShortArray[] {new DbShortArrayDirect(new short[] {1, 2, 3, 4, 5, 6}), - new DbShortArrayDirect( - new short[] {11, 12, 13, 14, 15, QueryConstants.NULL_SHORT}), - new DbShortArrayDirect(new short[] {21, 22, 23, 24, 25, 26}), - new DbShortArrayDirect(new short[] {31, 32, 33, 34, 35, 36})}; + final DbShortArray[] dbShortArrays = new DbShortArray[] {new DbShortArrayDirect(new short[] {1, 2, 3, 4, 5, 6}), + new DbShortArrayDirect(new short[] {11, 12, 13, 14, 15, QueryConstants.NULL_SHORT}), + new DbShortArrayDirect(new short[] {21, 22, 23, 24, 25, 26}), + new DbShortArrayDirect(new short[] {31, 32, 33, 34, 35, 36})}; final Matrix actual = ssMat(dbShortArrays); testAbstractMatrix(actual); testMatEquals(expected, actual); @@ -297,25 +280,23 @@ public void testConvertInt2dArrayToMatrix() throws Exception { Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); - final int[][] ints = new int[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, 16}, - {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; + final int[][] ints = new int[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, 16}, {21, 22, 23, 24, 25, 26}, + {31, 32, 33, 34, 35, 36}}; final Matrix actual = ssMat(ints); testAbstractMatrix(actual); testMatEquals(expected, actual); } public void testConvertDbIntArrayToMatrix() throws Exception { - final double[][] doubles = - new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, - {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; + final double[][] doubles = new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, + {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); final DbIntArray[] dbIntArrays = new DbIntArray[] {new DbIntArrayDirect(1, 2, 3, 4, 5, 6), new DbIntArrayDirect(11, 12, 13, 14, 15, QueryConstants.NULL_INT), - new DbIntArrayDirect(21, 22, 23, 24, 25, 26), - new DbIntArrayDirect(31, 32, 33, 34, 35, 36)}; + new DbIntArrayDirect(21, 22, 23, 24, 25, 26), new DbIntArrayDirect(31, 32, 33, 34, 35, 36)}; final Matrix actual = ssMat(dbIntArrays); testAbstractMatrix(actual); @@ -323,66 +304,58 @@ public void testConvertDbIntArrayToMatrix() throws Exception { } public void testConvertFloat2dArrayToMatrix() throws Exception { - final double[][] doubles = - new double[][] {{1f, 2f, 3f, 4f, 5f, 6f}, {11f, 12f, 13f, 14f, 15f, 16f}, - {21f, 22f, 23f, 24f, 25f, 26f}, {31f, 32f, 33f, 34f, 35f, 36f}}; + final double[][] doubles = new double[][] {{1f, 2f, 3f, 4f, 5f, 6f}, {11f, 12f, 13f, 14f, 15f, 16f}, + {21f, 22f, 23f, 24f, 25f, 26f}, {31f, 32f, 33f, 34f, 35f, 36f}}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); - final float[][] floats = - new float[][] {{1f, 2f, 3f, 4f, 5f, 6f}, {11f, 12f, 13f, 14f, 15f, 16f}, - {21f, 22f, 23f, 24f, 25f, 26f}, {31f, 32f, 33f, 34f, 35f, 36f}}; + final float[][] floats = new float[][] {{1f, 2f, 3f, 4f, 5f, 6f}, {11f, 12f, 13f, 14f, 15f, 16f}, + {21f, 22f, 23f, 24f, 25f, 26f}, {31f, 32f, 33f, 34f, 35f, 36f}}; final Matrix actual = ssMat(floats); testAbstractMatrix(actual); testMatEquals(expected, actual); } public void testConvertDbFloatArrayToMatrix() throws Exception { - final double[][] doubles = - new double[][] {{1f, 2f, 3f, 4f, 5f, 6f}, {11f, 12f, 13f, 14f, 15f, Double.NaN}, - {21f, 22f, 23f, 24f, 25f, 26f}, {31f, 32f, 33f, 34f, 35f, 36f}}; + final double[][] doubles = new double[][] {{1f, 2f, 3f, 4f, 5f, 6f}, {11f, 12f, 13f, 14f, 15f, Double.NaN}, + {21f, 22f, 23f, 24f, 25f, 26f}, {31f, 32f, 33f, 34f, 35f, 36f}}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); - final DbFloatArray[] dbFloatArrays = - new DbFloatArray[] {new DbFloatArrayDirect(1f, 2f, 3f, 4f, 5f, 6f), - new DbFloatArrayDirect(11f, 12f, 13f, 14f, 15f, QueryConstants.NULL_FLOAT), - new DbFloatArrayDirect(21f, 22f, 23f, 24f, 25f, 26f), - new DbFloatArrayDirect(31f, 32f, 33f, 34f, 35f, 36f)}; + final DbFloatArray[] dbFloatArrays = new DbFloatArray[] {new DbFloatArrayDirect(1f, 2f, 3f, 4f, 5f, 6f), + new DbFloatArrayDirect(11f, 12f, 13f, 14f, 15f, QueryConstants.NULL_FLOAT), + new DbFloatArrayDirect(21f, 22f, 23f, 24f, 25f, 26f), + new DbFloatArrayDirect(31f, 32f, 33f, 34f, 35f, 36f)}; final Matrix actual = ssMat(dbFloatArrays); testAbstractMatrix(actual); testMatEquals(expected, actual); } public void testConvertLong2dArrayToMatrix() throws Exception { - final double[][] doubles = - new double[][] {{1L, 2L, 3L, 4L, 5L, 6L}, {11L, 12L, 13L, 14L, 15L, 16L}, - {21L, 22L, 23L, 24L, 25L, 26L}, {31L, 32L, 33L, 34L, 35L, 36L}}; + final double[][] doubles = new double[][] {{1L, 2L, 3L, 4L, 5L, 6L}, {11L, 12L, 13L, 14L, 15L, 16L}, + {21L, 22L, 23L, 24L, 25L, 26L}, {31L, 32L, 33L, 34L, 35L, 36L}}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); - final long[][] longs = - new long[][] {{1L, 2L, 3L, 4L, 5L, 6L}, {11L, 12L, 13L, 14L, 15L, 16L}, - {21L, 22L, 23L, 24L, 25L, 26L}, {31L, 32L, 33L, 34L, 35L, 36L}}; + final long[][] longs = new long[][] {{1L, 2L, 3L, 4L, 5L, 6L}, {11L, 12L, 13L, 14L, 15L, 16L}, + {21L, 22L, 23L, 24L, 25L, 26L}, {31L, 32L, 33L, 34L, 35L, 36L}}; final Matrix actual = ssMat(longs); testAbstractMatrix(actual); testMatEquals(expected, actual); } public void testConvertDbLongArrayToMatrix() throws Exception { - final double[][] doubles = - new double[][] {{1l, 2l, 3l, 4l, 5l, 6l}, {11l, 12l, 13l, 14l, 15l, Double.NaN}, - {21l, 22l, 23l, 24l, 25l, 26l}, {31l, 32l, 33l, 34l, 35l, 36l}}; + final double[][] doubles = new double[][] {{1l, 2l, 3l, 4l, 5l, 6l}, {11l, 12l, 13l, 14l, 15l, Double.NaN}, + {21l, 22l, 23l, 24l, 25l, 26l}, {31l, 32l, 33l, 34l, 35l, 36l}}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); - final DbLongArray[] dbLongArrays = - new DbLongArray[] {new DbLongArrayDirect(1l, 2l, 3l, 4l, 5l, 6l), - new DbLongArrayDirect(11l, 12l, 13l, 14l, 15l, QueryConstants.NULL_LONG), - new DbLongArrayDirect(21l, 22l, 23l, 24l, 25l, 26l), - new DbLongArrayDirect(31l, 32l, 33l, 34l, 35l, 36l)}; + final DbLongArray[] dbLongArrays = new DbLongArray[] {new DbLongArrayDirect(1l, 2l, 3l, 4l, 5l, 6l), + new DbLongArrayDirect(11l, 12l, 13l, 14l, 15l, QueryConstants.NULL_LONG), + new DbLongArrayDirect(21l, 22l, 23l, 24l, 25l, 26l), + new DbLongArrayDirect(31l, 32l, 33l, 34l, 35l, 36l)}; final Matrix actual = ssMat(dbLongArrays); testAbstractMatrix(actual); testMatEquals(expected, actual); @@ -400,15 +373,13 @@ public void testConvertDouble2dArrayToMatrix() throws Exception { } public void testConvertDbDoubleArrayToMatrix() throws Exception { - final double[][] doubles = - new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, - {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; - - final DbDoubleArray[] dbDoubleArrays = - new DbDoubleArray[] {new DbDoubleArrayDirect(1d, 2d, 3d, 4d, 5d, 6d), - new DbDoubleArrayDirect(11d, 12d, 13d, 14d, 15d, QueryConstants.NULL_DOUBLE), - new DbDoubleArrayDirect(21d, 22d, 23d, 24d, 25d, 26d), - new DbDoubleArrayDirect(31d, 32d, 33d, 34d, 35d, 36d)}; + final double[][] doubles = new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, + {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; + + final DbDoubleArray[] dbDoubleArrays = new DbDoubleArray[] {new DbDoubleArrayDirect(1d, 2d, 3d, 4d, 5d, 6d), + new DbDoubleArrayDirect(11d, 12d, 13d, 14d, 15d, QueryConstants.NULL_DOUBLE), + new DbDoubleArrayDirect(21d, 22d, 23d, 24d, 25d, 26d), + new DbDoubleArrayDirect(31d, 32d, 33d, 34d, 35d, 36d)}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); @@ -418,19 +389,14 @@ public void testConvertDbDoubleArrayToMatrix() throws Exception { } public void testConvertDbArrayBasesToMatrix() throws Exception { - final double[][] doubles = new double[][] {{1, 2, 3, 4, 5, 6}, {1, 2, 3, 4, 5, 6}, - {1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, 16}, {21, 22, 23, 24, 25, 26}, - {31, 32, 33, 34, 35, 36}, {0, 1, 0, 1, 0, 1}}; - - final DbArrayBase[] dbArrayBases = - new DbArrayBase[] {new DbByteArrayDirect(new byte[] {1, 2, 3, 4, 5, 6}), - new DbShortArrayDirect(new short[] {1, 2, 3, 4, 5, 6}), - new DbDoubleArrayDirect(1d, 2d, 3d, 4d, 5d, 6d), - new DbIntArrayDirect(11, 12, 13, 14, 15, 16), - new DbLongArrayDirect(21l, 22l, 23l, 24l, 25l, 26l), - new DbFloatArrayDirect(31f, 32f, 33f, 34f, 35f, 36f), - new DbArrayDirect<>(BigDecimal.ZERO, BigDecimal.ONE, BigDecimal.ZERO, - BigDecimal.ONE, BigDecimal.ZERO, BigDecimal.ONE)}; + final double[][] doubles = new double[][] {{1, 2, 3, 4, 5, 6}, {1, 2, 3, 4, 5, 6}, {1, 2, 3, 4, 5, 6}, + {11, 12, 13, 14, 15, 16}, {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}, {0, 1, 0, 1, 0, 1}}; + + final DbArrayBase[] dbArrayBases = new DbArrayBase[] {new DbByteArrayDirect(new byte[] {1, 2, 3, 4, 5, 6}), + new DbShortArrayDirect(new short[] {1, 2, 3, 4, 5, 6}), new DbDoubleArrayDirect(1d, 2d, 3d, 4d, 5d, 6d), + new DbIntArrayDirect(11, 12, 13, 14, 15, 16), new DbLongArrayDirect(21l, 22l, 23l, 24l, 25l, 26l), + new DbFloatArrayDirect(31f, 32f, 33f, 34f, 35f, 36f), new DbArrayDirect<>(BigDecimal.ZERO, + BigDecimal.ONE, BigDecimal.ZERO, BigDecimal.ONE, BigDecimal.ZERO, BigDecimal.ONE)}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); @@ -440,20 +406,19 @@ public void testConvertDbArrayBasesToMatrix() throws Exception { } public void testConvertDbArraysToMatrix() throws Exception { - final double[][] doubles = new double[][] {{1, 2, 3, 4, Double.NaN, 6}, - {1, 2, 3, 4, Double.NaN, 6}, {1, 2, 3, 4, Double.NaN, 6}, - {11, 12, 13, 14, Double.NaN, 16}, {21, 22, 23, 24, Double.NaN, 26}, + final double[][] doubles = new double[][] {{1, 2, 3, 4, Double.NaN, 6}, {1, 2, 3, 4, Double.NaN, 6}, + {1, 2, 3, 4, Double.NaN, 6}, {11, 12, 13, 14, Double.NaN, 16}, {21, 22, 23, 24, Double.NaN, 26}, {31, 32, 33, 34, Double.NaN, 36}, {0, 1, 0, 1, Double.NaN, 1}}; - final DbArray dbArrays = new DbArrayDirect( - new DbByteArrayDirect(new byte[] {1, 2, 3, 4, QueryConstants.NULL_BYTE, 6}), - new DbShortArrayDirect(new short[] {1, 2, 3, 4, QueryConstants.NULL_SHORT, 6}), - new DbDoubleArrayDirect(1d, 2d, 3d, 4d, QueryConstants.NULL_DOUBLE, 6d), - new DbIntArrayDirect(11, 12, 13, 14, QueryConstants.NULL_INT, 16), - new DbLongArrayDirect(21l, 22l, 23l, 24l, QueryConstants.NULL_LONG, 26l), - new DbFloatArrayDirect(31f, 32f, 33f, 34f, QueryConstants.NULL_FLOAT, 36f), - new DbArrayDirect<>(BigDecimal.ZERO, BigDecimal.ONE, BigDecimal.ZERO, BigDecimal.ONE, - null, BigDecimal.ONE)); + final DbArray dbArrays = + new DbArrayDirect(new DbByteArrayDirect(new byte[] {1, 2, 3, 4, QueryConstants.NULL_BYTE, 6}), + new DbShortArrayDirect(new short[] {1, 2, 3, 4, QueryConstants.NULL_SHORT, 6}), + new DbDoubleArrayDirect(1d, 2d, 3d, 4d, QueryConstants.NULL_DOUBLE, 6d), + new DbIntArrayDirect(11, 12, 13, 14, QueryConstants.NULL_INT, 16), + new DbLongArrayDirect(21l, 22l, 23l, 24l, QueryConstants.NULL_LONG, 26l), + new DbFloatArrayDirect(31f, 32f, 33f, 34f, QueryConstants.NULL_FLOAT, 36f), + new DbArrayDirect<>(BigDecimal.ZERO, BigDecimal.ONE, BigDecimal.ZERO, BigDecimal.ONE, null, + BigDecimal.ONE)); Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); @@ -482,12 +447,11 @@ public void testConvertDbArrayBasesToMatrixException() throws Exception { } public void testConvertNumber2dArrayToMatrix() throws Exception { - final double[][] doubles = - new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, - {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; + final double[][] doubles = new double[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, Double.NaN}, + {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; - final Integer[][] integers = new Integer[][] {{1, 2, 3, 4, 5, 6}, - {11, 12, 13, 14, 15, null}, {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; + final Integer[][] integers = new Integer[][] {{1, 2, 3, 4, 5, 6}, {11, 12, 13, 14, 15, null}, + {21, 22, 23, 24, 25, 26}, {31, 32, 33, 34, 35, 36}}; Matrix expected = new DenseMatrix(doubles); expected = new SimpleMatrixMathOperation().transpose(expected); @@ -495,9 +459,9 @@ public void testConvertNumber2dArrayToMatrix() throws Exception { testAbstractMatrix(actual); testMatEquals(expected, actual); testMatEquals(expected, ssMat(ssVec(1, 2, 3, 4, 5, 6).toArray(), - ssVec(11, 12, 13, 14, 15, null).toArray(), - ssVec(21, 22, 23, 24, 25, 26).toArray(), - ssVec(31, 32, 33, 34, 35, 36).toArray())); + ssVec(11, 12, 13, 14, 15, null).toArray(), + ssVec(21, 22, 23, 24, 25, 26).toArray(), + ssVec(31, 32, 33, 34, 35, 36).toArray())); } private void testMatEquals(final Matrix expected, final Matrix actual) { diff --git a/fishconfig-local/src/main/java/io/deephaven/fishconfig/PropertyInputStreamLoaderResourcesOnly.java b/fishconfig-local/src/main/java/io/deephaven/fishconfig/PropertyInputStreamLoaderResourcesOnly.java index 7c8d7d05616..06ad104a2a7 100644 --- a/fishconfig-local/src/main/java/io/deephaven/fishconfig/PropertyInputStreamLoaderResourcesOnly.java +++ b/fishconfig-local/src/main/java/io/deephaven/fishconfig/PropertyInputStreamLoaderResourcesOnly.java @@ -5,8 +5,8 @@ import java.io.InputStream; /** - * A {@link PropertyInputStreamLoader} that loads the property input stream from resources only. Has - * priority 0. Useful for unit testing. + * A {@link PropertyInputStreamLoader} that loads the property input stream from resources only. Has priority 0. Useful + * for unit testing. */ public class PropertyInputStreamLoaderResourcesOnly implements PropertyInputStreamLoader { @@ -21,7 +21,7 @@ public InputStream openConfiguration(String filename) { final InputStream in = getClass().getResourceAsStream(resourcePath); if (in == null) { final String message = String.format("Unable to find prop file at resource path '%s'", - resourcePath); + resourcePath); throw new ConfigurationException(message); } return in; diff --git a/grpc-api-client/console/src/main/java/io/deephaven/grpc_api/example/ConsoleClient.java b/grpc-api-client/console/src/main/java/io/deephaven/grpc_api/example/ConsoleClient.java index c1c37d3d5d3..52986f7269b 100644 --- a/grpc-api-client/console/src/main/java/io/deephaven/grpc_api/example/ConsoleClient.java +++ b/grpc-api-client/console/src/main/java/io/deephaven/grpc_api/example/ConsoleClient.java @@ -48,15 +48,15 @@ public static void main(final String[] args) throws Exception { // Assign properties that need to be set to even turn on System.setProperty("Configuration.rootFile", "grpc-api.prop"); System.setProperty("io.deephaven.configuration.PropertyInputStreamLoader.override", - "io.deephaven.configuration.PropertyInputStreamLoaderTraditional"); + "io.deephaven.configuration.PropertyInputStreamLoaderTraditional"); final String sessionType = System.getProperty("console.sessionType", "groovy"); log.info().append("Session type ").append(sessionType).endl(); final String target = args.length == 0 ? "localhost:8080" : args[0]; final ManagedChannel channel = ManagedChannelBuilder.forTarget(target) - .usePlaintext() - .build(); + .usePlaintext() + .build(); final Scheduler scheduler = DeephavenApiServerModule.provideScheduler(2); final ConsoleClient client = new ConsoleClient(scheduler, channel, sessionType); @@ -78,8 +78,7 @@ public static void main(final String[] args) throws Exception { private String sessionHeader; private Metadata.Key sessionHeaderKey; - private ConsoleClient(final Scheduler scheduler, final ManagedChannel managedChannel, - String sessionType) { + private ConsoleClient(final Scheduler scheduler, final ManagedChannel managedChannel, String sessionType) { this.scheduler = scheduler; this.serverChannel = ClientInterceptors.intercept(managedChannel, new AuthInterceptor()); this.sessionService = SessionServiceGrpc.newStub(serverChannel); @@ -92,11 +91,11 @@ private void start() { // no payload in this simple server auth sessionService.newSession(HandshakeRequest.newBuilder().setAuthProtocol(1).build(), - new ResponseBuilder() - .onError(this::onError) - .onComplete(this::startConsole) - .onNext(this::onNewHandshakeResponse) - .build()); + new ResponseBuilder() + .onError(this::onError) + .onComplete(this::startConsole) + .onNext(this::onNewHandshakeResponse) + .build()); } private void stop() { @@ -117,58 +116,57 @@ private void blockUntilShutdown() throws InterruptedException { private void startConsole() { consoleTicket = ExportTicketHelper.exportIdToTicket(nextId++); consoleServiceGrpc.startConsole(StartConsoleRequest.newBuilder() - .setResultId(consoleTicket) - .setSessionType(sessionType) - .build(), - new ResponseBuilder() - .onNext(response -> scheduler.runImmediately(this::awaitCommand)) - .build()); + .setResultId(consoleTicket) + .setSessionType(sessionType) + .build(), + new ResponseBuilder() + .onNext(response -> scheduler.runImmediately(this::awaitCommand)) + .build()); LogSubscriptionRequest request = LogSubscriptionRequest.newBuilder() - .addLevels("STDOUT") - .addLevels("STDERR") - .setLastSeenLogTimestamp(System.currentTimeMillis() * 1000) // don't replay any logs - // that came before now - .build(); + .addLevels("STDOUT") + .addLevels("STDERR") + .setLastSeenLogTimestamp(System.currentTimeMillis() * 1000) // don't replay any logs that came before + // now + .build(); consoleServiceGrpc.subscribeToLogs(request, - new StreamObserver() { - private String stripLastNewline(String msg) { - if (msg.endsWith("\n")) { - return msg.substring(0, msg.length() - 1); + new StreamObserver() { + private String stripLastNewline(String msg) { + if (msg.endsWith("\n")) { + return msg.substring(0, msg.length() - 1); + } + return msg; } - return msg; - } - @Override - public void onNext(LogSubscriptionData value) { - if ("STDOUT".equals(value.getLogLevel())) { - RemoteStdout.log.info() - .append(stripLastNewline(value.getMessage())) - .endl(); - } else if ("STDERR".equals(value.getLogLevel())) { - RemoteStderr.log.info() - .append(stripLastNewline(value.getMessage())) - .endl(); + @Override + public void onNext(LogSubscriptionData value) { + if ("STDOUT".equals(value.getLogLevel())) { + RemoteStdout.log.info() + .append(stripLastNewline(value.getMessage())) + .endl(); + } else if ("STDERR".equals(value.getLogLevel())) { + RemoteStderr.log.info() + .append(stripLastNewline(value.getMessage())) + .endl(); + } } - } - @Override - public void onError(Throwable t) { - log.error(t).append("onError").endl(); - stop(); - } + @Override + public void onError(Throwable t) { + log.error(t).append("onError").endl(); + stop(); + } - @Override - public void onCompleted() { - // TODO reconnect - } - }); + @Override + public void onCompleted() { + // TODO reconnect + } + }); } private void awaitCommand() { Console console = System.console(); if (console == null) { - log.error().append("Can't open a console prompt, try running this outside of gradle?") - .endl(); + log.error().append("Can't open a console prompt, try running this outside of gradle?").endl(); stop(); return; } @@ -179,47 +177,45 @@ private void awaitCommand() { } log.debug().append("client preparing to send command").endl(); consoleServiceGrpc.executeCommand( - ExecuteCommandRequest.newBuilder() - .setConsoleId(consoleTicket) - .setCode(userCode) - .build(), - new ResponseBuilder() - .onNext(response -> { - log.debug().append("command completed successfully: ") - .append(response.toString()).endl(); - Optional firstTable = response.getCreatedList().stream() - .filter(var -> var.getType().equals("Table")).findAny(); - firstTable.ifPresent(table -> { - log.debug().append("A table was created: ").append(table.toString()).endl(); - consoleServiceGrpc.fetchTable(FetchTableRequest.newBuilder() - .setConsoleId(consoleTicket) - .setTableId(ExportTicketHelper.exportIdToTicket(nextId++)) - .setTableName(table.getName()) - .build(), - new ResponseBuilder() - .onNext(this::onExportedTableCreationResponse) - .onError(err -> { - log.error(err).append("onError").endl(); - scheduler.runImmediately(this::awaitCommand); - }) - .onComplete(() -> { - log.debug().append("fetch complete").endl(); - }) - .build()); - }); - // otherwise go for another query - if (!firstTable.isPresent()) { - // let's give the just-executed command a little bit of time to print so we - // reduce - // the chance of clobbering our stdin prompt. - scheduler.runAfterDelay(100, this::awaitCommand); - } - }) - .onError(err -> { - log.error(err).append("onError").endl(); - scheduler.runImmediately(this::awaitCommand); - }) - .build()); + ExecuteCommandRequest.newBuilder() + .setConsoleId(consoleTicket) + .setCode(userCode) + .build(), + new ResponseBuilder() + .onNext(response -> { + log.debug().append("command completed successfully: ").append(response.toString()).endl(); + Optional firstTable = response.getCreatedList().stream() + .filter(var -> var.getType().equals("Table")).findAny(); + firstTable.ifPresent(table -> { + log.debug().append("A table was created: ").append(table.toString()).endl(); + consoleServiceGrpc.fetchTable(FetchTableRequest.newBuilder() + .setConsoleId(consoleTicket) + .setTableId(ExportTicketHelper.exportIdToTicket(nextId++)) + .setTableName(table.getName()) + .build(), + new ResponseBuilder() + .onNext(this::onExportedTableCreationResponse) + .onError(err -> { + log.error(err).append("onError").endl(); + scheduler.runImmediately(this::awaitCommand); + }) + .onComplete(() -> { + log.debug().append("fetch complete").endl(); + }) + .build()); + }); + // otherwise go for another query + if (!firstTable.isPresent()) { + // let's give the just-executed command a little bit of time to print so we reduce + // the chance of clobbering our stdin prompt. + scheduler.runAfterDelay(100, this::awaitCommand); + } + }) + .onError(err -> { + log.error(err).append("onError").endl(); + scheduler.runImmediately(this::awaitCommand); + }) + .build()); } private void onNewHandshakeResponse(final HandshakeResponse result) { @@ -230,33 +226,32 @@ private void onNewHandshakeResponse(final HandshakeResponse result) { sessionHeaderKey = Metadata.Key.of(sessionHeader, Metadata.ASCII_STRING_MARSHALLER); } log.debug().append("Session Details: {header: '") - .append(this.sessionHeader).append("', token: '") - .append(this.session.toString()).append("}").endl(); + .append(this.sessionHeader).append("', token: '") + .append(this.session.toString()).append("}").endl(); // Guess a good time to do the next refresh. final long refreshDelayMs = Math.min( - scheduler.currentTime().getMillis() + result.getTokenExpirationDelayMillis() / 3, - result.getTokenDeadlineTimeMillis() - result.getTokenExpirationDelayMillis() / 10); + scheduler.currentTime().getMillis() + result.getTokenExpirationDelayMillis() / 3, + result.getTokenDeadlineTimeMillis() - result.getTokenExpirationDelayMillis() / 10); scheduler.runAtTime(DBTimeUtils.millisToTime(refreshDelayMs), this::refreshToken); } private void refreshToken() { sessionService.refreshSessionToken(HandshakeRequest.newBuilder() - .setAuthProtocol(0) - .setPayload(ByteString.copyFromUtf8(session.toString())).build(), - new ResponseBuilder() - .onError(this::onError) - .onNext(this::onNewHandshakeResponse) - .build()); + .setAuthProtocol(0) + .setPayload(ByteString.copyFromUtf8(session.toString())).build(), + new ResponseBuilder() + .onError(this::onError) + .onNext(this::onNewHandshakeResponse) + .build()); } private void onExportedTableCreationResponse(final ExportedTableCreationResponse result) { final LogEntry entry = log.info().append("Received ExportedTableCreationResponse for {"); if (result.getResultId().hasTicket()) { - entry.append("exportId: ") - .append(ExportTicketHelper.ticketToExportId(result.getResultId().getTicket())); + entry.append("exportId: ").append(ExportTicketHelper.ticketToExportId(result.getResultId().getTicket())); } else { entry.append("batchOffset: ").append(result.getResultId().getBatchOffset()); } @@ -339,10 +334,10 @@ public void onCompleted() { private class AuthInterceptor implements ClientInterceptor { @Override public ClientCall interceptCall( - final MethodDescriptor methodDescriptor, final CallOptions callOptions, - final Channel channel) { + final MethodDescriptor methodDescriptor, final CallOptions callOptions, + final Channel channel) { return new ForwardingClientCall.SimpleForwardingClientCall( - channel.newCall(methodDescriptor, callOptions)) { + channel.newCall(methodDescriptor, callOptions)) { @Override public void start(final Listener responseListener, final Metadata headers) { if (session != null) { diff --git a/grpc-api-client/simple/src/main/java/io/deephaven/grpc_api/example/SimpleDeephavenClient.java b/grpc-api-client/simple/src/main/java/io/deephaven/grpc_api/example/SimpleDeephavenClient.java index d91b46c565f..168f343fa6d 100644 --- a/grpc-api-client/simple/src/main/java/io/deephaven/grpc_api/example/SimpleDeephavenClient.java +++ b/grpc-api-client/simple/src/main/java/io/deephaven/grpc_api/example/SimpleDeephavenClient.java @@ -66,12 +66,12 @@ public static void main(final String[] args) throws Exception { // Assign properties that need to be set to even turn on System.setProperty("Configuration.rootFile", "grpc-api.prop"); System.setProperty("io.deephaven.configuration.PropertyInputStreamLoader.override", - "io.deephaven.configuration.PropertyInputStreamLoaderTraditional"); + "io.deephaven.configuration.PropertyInputStreamLoaderTraditional"); final String target = args.length == 0 ? "localhost:8080" : args[0]; final ManagedChannel channel = ManagedChannelBuilder.forTarget(target) - .usePlaintext() - .build(); + .usePlaintext() + .build(); final Scheduler scheduler = DeephavenApiServerModule.provideScheduler(2); final SimpleDeephavenClient client = new SimpleDeephavenClient(scheduler, channel); @@ -106,11 +106,11 @@ private void start() { // no payload in this simple server auth sessionService.newSession(HandshakeRequest.newBuilder().setAuthProtocol(1).build(), - new ResponseBuilder() - .onError(this::onError) - .onComplete(this::runScript) - .onNext(this::onNewHandshakeResponse) - .build()); + new ResponseBuilder() + .onError(this::onError) + .onComplete(this::runScript) + .onNext(this::onNewHandshakeResponse) + .build()); } private void stop() { @@ -137,74 +137,67 @@ private void runScript() { log.info().append("Script Running: ").endl(); sessionService.exportNotifications(ExportNotificationRequest.getDefaultInstance(), - new ResponseBuilder() - .onNext(m -> onExportNotificationMessage("global: ", m)) - .onError((err) -> log.error().append("export notification listener error: ") - .append(err).endl()) - .onComplete( - () -> log.info().append("export notification listener completed").endl()) - .build()); + new ResponseBuilder() + .onNext(m -> onExportNotificationMessage("global: ", m)) + .onError((err) -> log.error().append("export notification listener error: ").append(err).endl()) + .onComplete(() -> log.info().append("export notification listener completed").endl()) + .build()); tableService.exportedTableUpdates(ExportedTableUpdatesRequest.getDefaultInstance(), - new ResponseBuilder() - .onNext(this::onTableUpdate) - .onError( - (err) -> log.error().append("table update listener error: ").append(err).endl()) - .onComplete(() -> log.info().append("table update listener completed").endl()) - .build()); + new ResponseBuilder() + .onNext(this::onTableUpdate) + .onError((err) -> log.error().append("table update listener error: ").append(err).endl()) + .onComplete(() -> log.info().append("table update listener completed").endl()) + .build()); tableService.batch(BatchTableRequest.newBuilder() - .addOps(BatchTableRequest.Operation.newBuilder() - .setTimeTable(TimeTableRequest.newBuilder() - .setPeriodNanos(1_000_000_000))) - .addOps(BatchTableRequest.Operation.newBuilder() - .setTimeTable(TimeTableRequest.newBuilder() - .setPeriodNanos(12_000_000))) - .addOps(BatchTableRequest.Operation.newBuilder() - .setUpdate(SelectOrUpdateRequest.newBuilder() - .addColumnSpecs("I = i") - .addColumnSpecs("II = ii") - .addColumnSpecs("S = `` + i") - .setResultId(exportTable) - .setSourceId(TableReference.newBuilder().setBatchOffset(1).build()) - .build())) - .build(), - new ResponseBuilder() - .onError(this::onError) - .onNext(this::onExportedTableCreationResponse) - .onComplete(() -> log.info().append("Batch Complete")) - .build()); + .addOps(BatchTableRequest.Operation.newBuilder().setTimeTable(TimeTableRequest.newBuilder() + .setPeriodNanos(1_000_000_000))) + .addOps(BatchTableRequest.Operation.newBuilder().setTimeTable(TimeTableRequest.newBuilder() + .setPeriodNanos(12_000_000))) + .addOps(BatchTableRequest.Operation.newBuilder().setUpdate(SelectOrUpdateRequest.newBuilder() + .addColumnSpecs("I = i") + .addColumnSpecs("II = ii") + .addColumnSpecs("S = `` + i") + .setResultId(exportTable) + .setSourceId(TableReference.newBuilder().setBatchOffset(1).build()) + .build())) + .build(), + new ResponseBuilder() + .onError(this::onError) + .onNext(this::onExportedTableCreationResponse) + .onComplete(() -> log.info().append("Batch Complete")) + .build()); flightService.getSchema( - ExportTicketHelper.ticketToDescriptor(exportTable), - new ResponseBuilder() - .onError(this::onError) - .onNext(this::onSchemaResult) - .build()); + ExportTicketHelper.ticketToDescriptor(exportTable), + new ResponseBuilder() + .onError(this::onError) + .onNext(this::onSchemaResult) + .build()); final StreamObserver putObserver = - flightService.doPut(new ResponseBuilder() - .onError(this::onError) - .onComplete(() -> log.info().append("Flight PUT Complete").endl()) - .build()); + flightService.doPut(new ResponseBuilder() + .onError(this::onError) + .onComplete(() -> log.info().append("Flight PUT Complete").endl()) + .build()); flightService.doGet(Flight.Ticket.newBuilder().setTicket(exportTable.getTicket()).build(), - new ResponseBuilder() - .onError(this::onError) - .onNext(data -> { - log.info().append("DoGet Recv Payload").endl(); - putObserver.onNext(data); - }) - .onComplete(putObserver::onCompleted) - .build()); + new ResponseBuilder() + .onError(this::onError) + .onNext(data -> { + log.info().append("DoGet Recv Payload").endl(); + putObserver.onNext(data); + }) + .onComplete(putObserver::onCompleted) + .build()); } private void onSchemaResult(final Flight.SchemaResult schemaResult) { - final Schema schema = - Schema.getRootAsSchema(schemaResult.getSchema().asReadOnlyByteBuffer()); + final Schema schema = Schema.getRootAsSchema(schemaResult.getSchema().asReadOnlyByteBuffer()); final TableDefinition definition = BarrageSchemaUtil.schemaToTableDefinition(schema); - // Note: until subscriptions move to flatbuffer, we cannot distinguish between the - // all-inclusive non-existing-bitset and an empty bitset. + // Note: until subscriptions move to flatbuffer, we cannot distinguish between the all-inclusive + // non-existing-bitset and an empty bitset. final BitSet columns = new BitSet(); columns.set(0, definition.getColumns().length); @@ -212,7 +205,7 @@ private void onSchemaResult(final Flight.SchemaResult schemaResult) { final InstrumentedShiftAwareListener listener = new InstrumentedShiftAwareListener("test") { @Override protected void onFailureInternal(final Throwable originalException, - final UpdatePerformanceTracker.Entry sourceEntry) { + final UpdatePerformanceTracker.Entry sourceEntry) { SimpleDeephavenClient.this.onError(originalException); } @@ -224,29 +217,27 @@ public void onUpdate(final Update update) { resultTable.listenForUpdates(listener); resultSub = new BarrageClientSubscription( - ExportTicketHelper.toReadableString(exportTable), - serverChannel, BarrageClientSubscription.makeRequest(null, columns), - new BarrageStreamReader(), resultTable); + ExportTicketHelper.toReadableString(exportTable), + serverChannel, BarrageClientSubscription.makeRequest(null, columns), + new BarrageStreamReader(), resultTable); } private void onScriptComplete() { sessionService.closeSession(HandshakeRequest.newBuilder() - .setAuthProtocol(0) - .setPayload(ByteString.copyFromUtf8(session.toString())).build(), - new ResponseBuilder() - .onNext( - r -> log.info().append("release session response ").append(r.toString()).endl()) - .onError(e -> stop()) - .onComplete(this::stop) - .build()); + .setAuthProtocol(0) + .setPayload(ByteString.copyFromUtf8(session.toString())).build(), + new ResponseBuilder() + .onNext(r -> log.info().append("release session response ").append(r.toString()).endl()) + .onError(e -> stop()) + .onComplete(this::stop) + .build()); } private void onExportedTableCreationResponse(final ExportedTableCreationResponse result) { final LogEntry entry = log.info().append("Received ExportedTableCreationResponse for {"); if (result.getResultId().hasTicket()) { - entry.append("exportId: ") - .append(ExportTicketHelper.ticketToExportId(result.getResultId().getTicket())); + entry.append("exportId: ").append(ExportTicketHelper.ticketToExportId(result.getResultId().getTicket())); } else { entry.append("batchOffset: ").append(result.getResultId().getBatchOffset()); } @@ -276,10 +267,8 @@ private void onExportedTableCreationResponse(final ExportedTableCreationResponse entry.append("}").endl(); } - private void onExportNotificationMessage(final String prefix, - final ExportNotification notification) { - final LogEntry entry = - log.info().append(prefix).append("Received ExportNotification: {id: ") + private void onExportNotificationMessage(final String prefix, final ExportNotification notification) { + final LogEntry entry = log.info().append(prefix).append("Received ExportNotification: {id: ") .append(ExportTicketHelper.ticketToExportId(notification.getTicket())) .append(", state: ").append(notification.getExportState().toString()); @@ -296,8 +285,8 @@ private void onTableUpdate(final ExportedTableUpdateMessage msg) { log.info().append("Received ExportedTableUpdatedMessage:").endl(); final LogEntry entry = log.info().append("\tid=") - .append(ExportTicketHelper.ticketToExportId(msg.getExportId())) - .append(" size=").append(msg.getSize()); + .append(ExportTicketHelper.ticketToExportId(msg.getExportId())) + .append(" size=").append(msg.getSize()); if (!msg.getUpdateFailureMessage().isEmpty()) { entry.append(" error='").append(msg.getUpdateFailureMessage()).append("'"); @@ -314,25 +303,25 @@ private void onNewHandshakeResponse(final HandshakeResponse result) { } session = UUID.fromString(result.getSessionToken().toStringUtf8()); log.info().append("Session Details: {header: '") - .append(this.sessionHeader).append("', token: '") - .append(this.session.toString()).append("}").endl(); + .append(this.sessionHeader).append("', token: '") + .append(this.session.toString()).append("}").endl(); // Guess a good time to do the next refresh. final long refreshDelayMs = Math.min( - scheduler.currentTime().getMillis() + result.getTokenExpirationDelayMillis() / 3, - result.getTokenDeadlineTimeMillis() - result.getTokenExpirationDelayMillis() / 10); + scheduler.currentTime().getMillis() + result.getTokenExpirationDelayMillis() / 3, + result.getTokenDeadlineTimeMillis() - result.getTokenExpirationDelayMillis() / 10); scheduler.runAtTime(DBTimeUtils.millisToTime(refreshDelayMs), this::refreshToken); } private void refreshToken() { sessionService.refreshSessionToken(HandshakeRequest.newBuilder() - .setAuthProtocol(0) - .setPayload(ByteString.copyFromUtf8(session.toString())).build(), - new ResponseBuilder() - .onError(this::onError) - .onNext(this::onNewHandshakeResponse) - .build()); + .setAuthProtocol(0) + .setPayload(ByteString.copyFromUtf8(session.toString())).build(), + new ResponseBuilder() + .onError(this::onError) + .onNext(this::onNewHandshakeResponse) + .build()); } private void onError(final Throwable t) { @@ -400,10 +389,10 @@ public void onCompleted() { private class AuthInterceptor implements ClientInterceptor { @Override public ClientCall interceptCall( - final MethodDescriptor methodDescriptor, final CallOptions callOptions, - final Channel channel) { + final MethodDescriptor methodDescriptor, final CallOptions callOptions, + final Channel channel) { return new ForwardingClientCall.SimpleForwardingClientCall( - channel.newCall(methodDescriptor, callOptions)) { + channel.newCall(methodDescriptor, callOptions)) { @Override public void start(final Listener responseListener, final Metadata headers) { final UUID currSession = session; diff --git a/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/barrage/chunk/ReplicateBarrageUtils.java b/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/barrage/chunk/ReplicateBarrageUtils.java index 2da8d57ac53..d08f3386c92 100644 --- a/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/barrage/chunk/ReplicateBarrageUtils.java +++ b/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/barrage/chunk/ReplicateBarrageUtils.java @@ -12,8 +12,7 @@ public class ReplicateBarrageUtils { public static void main(final String[] args) throws IOException { ReplicatePrimitiveCode.charToAllButBoolean(CharChunkInputStreamGenerator.class, - ReplicatePrimitiveCode.MAIN_SRC); - ReplicatePrimitiveCode.charToAll(CharArrayExpansionKernel.class, - ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.MAIN_SRC); + ReplicatePrimitiveCode.charToAll(CharArrayExpansionKernel.class, ReplicatePrimitiveCode.MAIN_SRC); } } diff --git a/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/table/BarrageTable.java b/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/table/BarrageTable.java index d1ad6986823..6754143dc2c 100644 --- a/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/table/BarrageTable.java +++ b/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/table/BarrageTable.java @@ -51,9 +51,9 @@ public class BarrageTable extends QueryTable implements LiveTable, BarrageMessage.Listener { private static final boolean REQUEST_LIVE_TABLE_MONITOR_REFRESH = Configuration.getInstance() - .getBooleanWithDefault("BarrageSourcedTable.requestLiveTableMonitorRefresh", true); + .getBooleanWithDefault("BarrageSourcedTable.requestLiveTableMonitorRefresh", true); public static final boolean REPLICATED_TABLE_DEBUG = - Configuration.getInstance().getBooleanWithDefault("BarrageSourcedTable.debug", false); + Configuration.getInstance().getBooleanWithDefault("BarrageSourcedTable.debug", false); private static final Logger log = LoggerFactory.getLogger(BarrageTable.class); @@ -82,17 +82,15 @@ public class BarrageTable extends QueryTable implements LiveTable, BarrageMessag private final boolean isViewPort; /** - * The client and the server update asynchronously with respect to one another. The client - * requests a viewport, the server will send the client the snapshot for the request and - * continue to send data that is inside of that view. Due to the asynchronous aspect of this - * protocol, the client may have multiple requests in-flight and the server may choose to honor - * the most recent request and assumes that the client no longer wants earlier but unacked + * The client and the server update asynchronously with respect to one another. The client requests a viewport, the + * server will send the client the snapshot for the request and continue to send data that is inside of that view. + * Due to the asynchronous aspect of this protocol, the client may have multiple requests in-flight and the server + * may choose to honor the most recent request and assumes that the client no longer wants earlier but unacked * viewport changes. * - * The server notifies the client which viewport it is respecting by including it inside of each - * snapshot. Note that the server assumes that the client has maintained its state prior to - * these server-side viewport acks and will not re-send data that the client should already have - * within the existing viewport. + * The server notifies the client which viewport it is respecting by including it inside of each snapshot. Note that + * the server assumes that the client has maintained its state prior to these server-side viewport acks and will not + * re-send data that the client should already have within the existing viewport. */ private Index serverViewport; private BitSet serverColumns; @@ -107,9 +105,7 @@ public class BarrageTable extends QueryTable implements LiveTable, BarrageMessag /** alternative pendingUpdates container to avoid allocating, and resizing, a new instance */ private ArrayDeque shadowPendingUpdates = new ArrayDeque<>(); - /** - * if we receive an error from upstream, then we publish the error downstream and stop updating - */ + /** if we receive an error from upstream, then we publish the error downstream and stop updating */ private Throwable pendingError = null; private final List processedData; @@ -118,21 +114,21 @@ public class BarrageTable extends QueryTable implements LiveTable, BarrageMessag /** enable prev tracking only after receiving first snapshot */ private volatile int prevTrackingEnabled = 0; private static final AtomicIntegerFieldUpdater PREV_TRACKING_UPDATER = - AtomicIntegerFieldUpdater.newUpdater(BarrageTable.class, "prevTrackingEnabled"); + AtomicIntegerFieldUpdater.newUpdater(BarrageTable.class, "prevTrackingEnabled"); protected BarrageTable(final LiveTableRegistrar registrar, - final NotificationQueue notificationQueue, - final LinkedHashMap> columns, - final WritableSource[] writableSources, - final RedirectionIndex redirectionIndex, - final boolean isViewPort) { + final NotificationQueue notificationQueue, + final LinkedHashMap> columns, + final WritableSource[] writableSources, + final RedirectionIndex redirectionIndex, + final boolean isViewPort) { super(Index.FACTORY.getEmptyIndex(), columns); this.registrar = registrar; this.notificationQueue = notificationQueue; this.redirectionIndex = redirectionIndex; this.refreshEntry = UpdatePerformanceTracker.getInstance() - .getEntry("BarrageSourcedTable refresh " + System.identityHashCode(this)); + .getEntry("BarrageSourcedTable refresh " + System.identityHashCode(this)); this.isViewPort = isViewPort; if (isViewPort) { @@ -143,14 +139,12 @@ protected BarrageTable(final LiveTableRegistrar registrar, this.destSources = new WritableSource[writableSources.length]; for (int ii = 0; ii < writableSources.length; ++ii) { - destSources[ii] = (WritableSource) ReinterpretUtilities - .maybeConvertToPrimitive(writableSources[ii]); + destSources[ii] = (WritableSource) ReinterpretUtilities.maybeConvertToPrimitive(writableSources[ii]); } // we always start empty, and can be notified this cycle if we are refreshed final long currentClockValue = LogicalClock.DEFAULT.currentValue(); - setLastNotificationStep( - LogicalClock.getState(currentClockValue) == LogicalClock.State.Updating + setLastNotificationStep(LogicalClock.getState(currentClockValue) == LogicalClock.State.Updating ? LogicalClock.getStep(currentClockValue) - 1 : LogicalClock.getStep(currentClockValue)); @@ -168,8 +162,7 @@ protected BarrageTable(final LiveTableRegistrar registrar, } public ChunkType[] getWireChunkTypes() { - return Arrays.stream(destSources).map(s -> ChunkType.fromElementType(s.getType())) - .toArray(ChunkType[]::new); + return Arrays.stream(destSources).map(s -> ChunkType.fromElementType(s.getType())).toArray(ChunkType[]::new); } public Class[] getWireTypes() { @@ -177,21 +170,16 @@ public Class[] getWireTypes() { } public Class[] getWireComponentTypes() { - return Arrays.stream(destSources).map(ColumnSource::getComponentType) - .toArray(Class[]::new); + return Arrays.stream(destSources).map(ColumnSource::getComponentType).toArray(Class[]::new); } /** - * Invoke sealTable to prevent further updates from being processed and to mark this source - * table as static. + * Invoke sealTable to prevent further updates from being processed and to mark this source table as static. * - * @param onSealRunnable pass a callback that gets invoked once the table has finished applying - * updates - * @param onSealFailure pass a callback that gets invoked if the table fails to finish applying - * updates + * @param onSealRunnable pass a callback that gets invoked once the table has finished applying updates + * @param onSealFailure pass a callback that gets invoked if the table fails to finish applying updates */ - public synchronized void sealTable(final Runnable onSealRunnable, - final Runnable onSealFailure) { + public synchronized void sealTable(final Runnable onSealRunnable, final Runnable onSealFailure) { // TODO (core#803): sealing of static table data acquired over flight/barrage setRefreshing(false); sealed = true; @@ -203,8 +191,7 @@ public synchronized void sealTable(final Runnable onSealRunnable, @Override public void handleBarrageMessage(final BarrageMessage update) { if (unsubscribed || sealed) { - beginLog(LogLevel.INFO).append(": Discarding update for unsubscribed/sealed table!") - .endl(); + beginLog(LogLevel.INFO).append(": Discarding update for unsubscribed/sealed table!").endl(); return; } @@ -220,7 +207,7 @@ public void handleBarrageError(Throwable t) { } private Index.IndexUpdateCoalescer processUpdate(final BarrageMessage update, - final Index.IndexUpdateCoalescer coalescer) { + final Index.IndexUpdateCoalescer coalescer) { if (REPLICATED_TABLE_DEBUG) { saveForDebugging(update); @@ -234,33 +221,31 @@ private Index.IndexUpdateCoalescer processUpdate(final BarrageMessage update, } } final ShiftAwareListener.Update up = new ShiftAwareListener.Update( - update.rowsAdded, update.rowsRemoved, mods, update.shifted, modifiedColumnSet); + update.rowsAdded, update.rowsRemoved, mods, update.shifted, modifiedColumnSet); beginLog(LogLevel.INFO).append(": Processing delta updates ") - .append(update.firstSeq).append("-").append(update.lastSeq) - .append(" update=").append(up).endl(); + .append(update.firstSeq).append("-").append(update.lastSeq) + .append(" update=").append(up).endl(); mods.close(); } if (update.isSnapshot) { serverViewport = update.snapshotIndex == null ? null : update.snapshotIndex.clone(); - serverColumns = - update.snapshotColumns == null ? null : (BitSet) update.snapshotColumns.clone(); + serverColumns = update.snapshotColumns == null ? null : (BitSet) update.snapshotColumns.clone(); } - // make sure that these index updates make some sense compared with each other, and our - // current view of the table + // make sure that these index updates make some sense compared with each other, and our current view of the + // table final Index currentIndex = getIndex(); final boolean mightBeInitialSnapshot = currentIndex.empty() && update.isSnapshot; try (final Index currRowsFromPrev = currentIndex.clone(); - final Index populatedRows = - (serverViewport != null ? currentIndex.subindexByPos(serverViewport) : null)) { + final Index populatedRows = + (serverViewport != null ? currentIndex.subindexByPos(serverViewport) : null)) { // removes currentIndex.remove(update.rowsRemoved); - try (final Index removed = - serverViewport != null ? populatedRows.extract(update.rowsRemoved) : null) { + try (final Index removed = serverViewport != null ? populatedRows.extract(update.rowsRemoved) : null) { freeRows(removed != null ? removed : update.rowsRemoved); } @@ -281,23 +266,21 @@ private Index.IndexUpdateCoalescer processUpdate(final BarrageMessage update, } if (update.rowsIncluded.nonempty()) { - try ( - final WritableChunkSink.FillFromContext redirContext = + try (final WritableChunkSink.FillFromContext redirContext = redirectionIndex.makeFillFromContext(update.rowsIncluded.intSize()); - final Index destinationIndex = getFreeRows(update.rowsIncluded.size())) { + final Index destinationIndex = getFreeRows(update.rowsIncluded.size())) { // Update redirection mapping: - redirectionIndex.fillFromChunk(redirContext, - destinationIndex.asKeyIndicesChunk(), update.rowsIncluded); + redirectionIndex.fillFromChunk(redirContext, destinationIndex.asKeyIndicesChunk(), + update.rowsIncluded); // Update data chunk-wise: for (int ii = 0; ii < update.addColumnData.length; ++ii) { if (isSubscribedColumn(ii)) { - final Chunk data = - update.addColumnData[ii].data; - Assert.eq(data.size(), "delta.includedAdditions.size()", - destinationIndex.size(), "destinationIndex.size()"); + final Chunk data = update.addColumnData[ii].data; + Assert.eq(data.size(), "delta.includedAdditions.size()", destinationIndex.size(), + "destinationIndex.size()"); try (final WritableChunkSink.FillFromContext ctxt = - destSources[ii].makeFillFromContext(destinationIndex.intSize())) { + destSources[ii].makeFillFromContext(destinationIndex.intSize())) { destSources[ii].fillFromChunk(ctxt, data, destinationIndex); } } @@ -314,18 +297,17 @@ private Index.IndexUpdateCoalescer processUpdate(final BarrageMessage update, modifiedColumnSet.setColumnWithIndex(ii); - try ( - final RedirectionIndex.FillContext redirContext = + try (final RedirectionIndex.FillContext redirContext = redirectionIndex.makeFillContext(column.rowsModified.intSize(), null); - final WritableLongChunk keys = - WritableLongChunk.makeWritableChunk(column.rowsModified.intSize())) { + final WritableLongChunk keys = + WritableLongChunk.makeWritableChunk(column.rowsModified.intSize())) { redirectionIndex.fillChunk(redirContext, keys, column.rowsModified); for (int i = 0; i < keys.size(); ++i) { Assert.notEquals(keys.get(i), "keys[i]", Index.NULL_KEY, "Index.NULL_KEY"); } try (final WritableChunkSink.FillFromContext ctxt = - destSources[ii].makeFillFromContext(keys.size())) { + destSources[ii].makeFillFromContext(keys.size())) { destSources[ii].fillFromChunkUnordered(ctxt, column.data, keys); } } @@ -340,20 +322,16 @@ private Index.IndexUpdateCoalescer processUpdate(final BarrageMessage update, } if (update.isSnapshot && !mightBeInitialSnapshot) { - // This applies to viewport or subscribed column changes; after the first snapshot - // later snapshots can't - // change the index. In this case, we apply the data from the snapshot to local - // column sources but + // This applies to viewport or subscribed column changes; after the first snapshot later snapshots can't + // change the index. In this case, we apply the data from the snapshot to local column sources but // otherwise cannot communicate this change to listeners. return coalescer; } final ShiftAwareListener.Update downstream = new ShiftAwareListener.Update( - update.rowsAdded.clone(), update.rowsRemoved.clone(), totalMods, update.shifted, - modifiedColumnSet); - return (coalescer == null) - ? new Index.IndexUpdateCoalescer(currRowsFromPrev, downstream) - : coalescer.update(downstream); + update.rowsAdded.clone(), update.rowsRemoved.clone(), totalMods, update.shifted, modifiedColumnSet); + return (coalescer == null) ? new Index.IndexUpdateCoalescer(currRowsFromPrev, downstream) + : coalescer.update(downstream); } } @@ -400,13 +378,12 @@ private void freeRows(final Index rowsToFree) { // Note: these are NOT OrderedKeyIndices until after the call to .sort() try (final WritableLongChunk redirectedRows = - WritableLongChunk.makeWritableChunk(rowsToFree.intSize("BarrageSourcedTable"))) { + WritableLongChunk.makeWritableChunk(rowsToFree.intSize("BarrageSourcedTable"))) { redirectedRows.setSize(0); rowsToFree.forAllLongs(next -> { final long prevIndex = redirectionIndex.remove(next); - Assert.assertion(prevIndex != -1, "prevIndex != -1", prevIndex, "prevIndex", next, - "next"); + Assert.assertion(prevIndex != -1, "prevIndex != -1", prevIndex, "prevIndex", next, "next"); redirectedRows.add(prevIndex); }); @@ -421,8 +398,7 @@ public void refresh() { try { realRefresh(); } catch (Exception e) { - beginLog(LogLevel.ERROR).append(": Failure during BarrageSourcedTable refresh: ") - .append(e).endl(); + beginLog(LogLevel.ERROR).append(": Failure during BarrageSourcedTable refresh: ").append(e).endl(); notifyListenersOnError(e, null); } finally { refreshEntry.onUpdateEnd(); @@ -441,8 +417,7 @@ private synchronized void realRefresh() { // publish one last clear downstream; this data would be stale final Index allRows = getIndex().clone(); getIndex().remove(allRows); - notifyListeners(Index.FACTORY.getEmptyIndex(), allRows, - Index.FACTORY.getEmptyIndex()); + notifyListeners(Index.FACTORY.getEmptyIndex(), allRows, Index.FACTORY.getEmptyIndex()); } cleanup(); return; @@ -455,8 +430,7 @@ private synchronized void realRefresh() { pendingUpdates = shadowPendingUpdates; shadowPendingUpdates = localPendingUpdates; - // we should allow the next pass to start fresh, so we make sure that the queues were - // actually drained + // we should allow the next pass to start fresh, so we make sure that the queues were actually drained // on the last refresh Assert.eqZero(pendingUpdates.size(), "pendingUpdates.size()"); } @@ -530,8 +504,7 @@ public void enqueueError(final Throwable e) { } /** - * Set up a replicated table from the given proxy, id and columns. This is intended for internal - * use only. + * Set up a replicated table from the given proxy, id and columns. This is intended for internal use only. * * @param tableDefinition the table definition * @param isViewPort true if the table will be a viewport. @@ -539,31 +512,26 @@ public void enqueueError(final Throwable e) { * @return a properly initialized {@link BarrageTable} */ @InternalUseOnly - public static BarrageTable make(final TableDefinition tableDefinition, - final boolean isViewPort) { - return make(LiveTableMonitor.DEFAULT, LiveTableMonitor.DEFAULT, tableDefinition, - isViewPort); + public static BarrageTable make(final TableDefinition tableDefinition, final boolean isViewPort) { + return make(LiveTableMonitor.DEFAULT, LiveTableMonitor.DEFAULT, tableDefinition, isViewPort); } @VisibleForTesting public static BarrageTable make(final LiveTableRegistrar registrar, - final NotificationQueue queue, - final TableDefinition tableDefinition, - final boolean isViewPort) { + final NotificationQueue queue, + final TableDefinition tableDefinition, + final boolean isViewPort) { final ColumnDefinition[] columns = tableDefinition.getColumns(); final WritableSource[] writableSources = new WritableSource[columns.length]; - final RedirectionIndex redirectionIndex = - RedirectionIndex.FACTORY.createRedirectionIndex(8); + final RedirectionIndex redirectionIndex = RedirectionIndex.FACTORY.createRedirectionIndex(8); final LinkedHashMap> finalColumns = - makeColumns(columns, writableSources, redirectionIndex); + makeColumns(columns, writableSources, redirectionIndex); - final BarrageTable table = new BarrageTable(registrar, queue, finalColumns, writableSources, - redirectionIndex, isViewPort); + final BarrageTable table = + new BarrageTable(registrar, queue, finalColumns, writableSources, redirectionIndex, isViewPort); - // Even if this source table will eventually be static, the data isn't here already. Static - // tables need to - // have refreshing set to false after processing data but prior to publishing the object to - // consumers. + // Even if this source table will eventually be static, the data isn't here already. Static tables need to + // have refreshing set to false after processing data but prior to publishing the object to consumers. table.setRefreshing(true); return table; @@ -575,17 +543,16 @@ public static BarrageTable make(final LiveTableRegistrar registrar, * @apiNote emptyRedirectionIndex must be initialized and empty. */ @NotNull - protected static LinkedHashMap> makeColumns( - final ColumnDefinition[] columns, - final WritableSource[] writableSources, - final RedirectionIndex emptyRedirectionIndex) { + protected static LinkedHashMap> makeColumns(final ColumnDefinition[] columns, + final WritableSource[] writableSources, + final RedirectionIndex emptyRedirectionIndex) { final LinkedHashMap> finalColumns = new LinkedHashMap<>(); for (int ii = 0; ii < columns.length; ii++) { // noinspection unchecked - writableSources[ii] = ArrayBackedColumnSource.getMemoryColumnSource(0, - columns[ii].getDataType(), columns[ii].getComponentType()); + writableSources[ii] = ArrayBackedColumnSource.getMemoryColumnSource(0, columns[ii].getDataType(), + columns[ii].getComponentType()); finalColumns.put(columns[ii].getName(), - new RedirectedColumnSource<>(emptyRedirectionIndex, writableSources[ii], 0)); + new RedirectedColumnSource<>(emptyRedirectionIndex, writableSources[ii], 0)); } return finalColumns; @@ -609,8 +576,8 @@ private void doWakeup() { } /** - * Check if this table is a viewport. A viewport table is a partial view of another table. If - * this returns false then this table contains the entire source table it was based on. + * Check if this table is a viewport. A viewport table is a partial view of another table. If this returns false + * then this table contains the entire source table it was based on. * * @return true if this table was a viewport. */ diff --git a/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/util/BarrageProtoUtil.java b/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/util/BarrageProtoUtil.java index 1772ff9ead0..5f1b8ccfac2 100644 --- a/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/util/BarrageProtoUtil.java +++ b/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/util/BarrageProtoUtil.java @@ -23,7 +23,7 @@ public class BarrageProtoUtil { public static ByteBuffer toByteBuffer(final Index index) { // noinspection UnstableApiUsage try (final ExposedByteArrayOutputStream baos = new ExposedByteArrayOutputStream(); - final LittleEndianDataOutputStream oos = new LittleEndianDataOutputStream(baos)) { + final LittleEndianDataOutputStream oos = new LittleEndianDataOutputStream(baos)) { ExternalizableIndexUtils.writeExternalCompressedDeltas(oos, index); oos.flush(); return ByteBuffer.wrap(baos.peekBuffer(), 0, baos.size()); @@ -35,11 +35,10 @@ public static ByteBuffer toByteBuffer(final Index index) { public static Index toIndex(final ByteBuffer string) { // noinspection UnstableApiUsage try (final InputStream bais = new ByteBufferInputStream(string); - final LittleEndianDataInputStream ois = new LittleEndianDataInputStream(bais)) { + final LittleEndianDataInputStream ois = new LittleEndianDataInputStream(bais)) { return ExternalizableIndexUtils.readExternalCompressedDelta(ois); } catch (final IOException e) { - throw new UncheckedDeephavenException("Unexpected exception during deserialization: ", - e); + throw new UncheckedDeephavenException("Unexpected exception during deserialization: ", e); } } diff --git a/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/util/GrpcServiceOverrideBuilder.java b/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/util/GrpcServiceOverrideBuilder.java index a0fdbf8a3f0..ee8a64b70ca 100644 --- a/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/util/GrpcServiceOverrideBuilder.java +++ b/grpc-api-client/src/main/java/io/deephaven/grpc_api_client/util/GrpcServiceOverrideBuilder.java @@ -26,7 +26,7 @@ private static class GrpcOverrride { private final ServerCallHandler handler; private GrpcOverrride(@NotNull MethodDescriptor method, - @NotNull ServerCallHandler handler) { + @NotNull ServerCallHandler handler) { this.method = method; this.handler = handler; } @@ -45,98 +45,94 @@ private GrpcServiceOverrideBuilder(ServerServiceDefinition baseDefinition, Strin this.serviceName = serviceName; } - public static GrpcServiceOverrideBuilder newBuilder(ServerServiceDefinition baseDefinition, - String serviceName) { + public static GrpcServiceOverrideBuilder newBuilder(ServerServiceDefinition baseDefinition, String serviceName) { return new GrpcServiceOverrideBuilder(baseDefinition, serviceName); } public GrpcServiceOverrideBuilder override(MethodDescriptor method, - ServerCalls.BidiStreamingMethod handler) { + ServerCalls.BidiStreamingMethod handler) { validateMethodType(method.getType(), MethodDescriptor.MethodType.BIDI_STREAMING); overrides.add(new GrpcOverrride<>(method, ServerCalls.asyncBidiStreamingCall(handler))); return this; } public GrpcServiceOverrideBuilder override(MethodDescriptor method, - ServerCalls.ServerStreamingMethod handler) { + ServerCalls.ServerStreamingMethod handler) { validateMethodType(method.getType(), MethodDescriptor.MethodType.SERVER_STREAMING); overrides.add(new GrpcOverrride<>(method, ServerCalls.asyncServerStreamingCall(handler))); return this; } public GrpcServiceOverrideBuilder override(MethodDescriptor method, - ServerCalls.ClientStreamingMethod handler) { + ServerCalls.ClientStreamingMethod handler) { validateMethodType(method.getType(), MethodDescriptor.MethodType.CLIENT_STREAMING); overrides.add(new GrpcOverrride<>(method, ServerCalls.asyncClientStreamingCall(handler))); return this; } public GrpcServiceOverrideBuilder override(MethodDescriptor method, - ServerCalls.UnaryMethod handler) { + ServerCalls.UnaryMethod handler) { validateMethodType(method.getType(), MethodDescriptor.MethodType.UNARY); overrides.add(new GrpcOverrride<>(method, ServerCalls.asyncUnaryCall(handler))); return this; } public GrpcServiceOverrideBuilder onNextOverride( - final Delegate delegate, - final String methodName, - final MethodDescriptor descriptor, - final MethodDescriptor.Marshaller requestMarshaller) { + final Delegate delegate, + final String methodName, + final MethodDescriptor descriptor, + final MethodDescriptor.Marshaller requestMarshaller) { return override(MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.UNARY) - .setFullMethodName(MethodDescriptor.generateFullMethodName(serviceName, methodName)) - .setSampledToLocalTracing(false) - .setRequestMarshaller(requestMarshaller) - .setResponseMarshaller( - ProtoUtils.marshaller(BrowserFlight.BrowserNextResponse.getDefaultInstance())) - .setSchemaDescriptor(descriptor.getSchemaDescriptor()) - .build(), new NextBrowserStreamMethod<>(delegate)); + .setType(MethodDescriptor.MethodType.UNARY) + .setFullMethodName(MethodDescriptor.generateFullMethodName(serviceName, methodName)) + .setSampledToLocalTracing(false) + .setRequestMarshaller(requestMarshaller) + .setResponseMarshaller(ProtoUtils.marshaller(BrowserFlight.BrowserNextResponse.getDefaultInstance())) + .setSchemaDescriptor(descriptor.getSchemaDescriptor()) + .build(), new NextBrowserStreamMethod<>(delegate)); } public GrpcServiceOverrideBuilder onOpenOverride( - final Delegate delegate, - final String methodName, - final MethodDescriptor descriptor, - final MethodDescriptor.Marshaller requestMarshaller, - final MethodDescriptor.Marshaller responseMarshaller) { + final Delegate delegate, + final String methodName, + final MethodDescriptor descriptor, + final MethodDescriptor.Marshaller requestMarshaller, + final MethodDescriptor.Marshaller responseMarshaller) { return override(MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.SERVER_STREAMING) - .setFullMethodName(MethodDescriptor.generateFullMethodName(serviceName, methodName)) - .setSampledToLocalTracing(false) - .setRequestMarshaller(requestMarshaller) - .setResponseMarshaller(responseMarshaller) - .setSchemaDescriptor(descriptor.getSchemaDescriptor()) - .build(), new OpenBrowserStreamMethod<>(delegate)); + .setType(MethodDescriptor.MethodType.SERVER_STREAMING) + .setFullMethodName(MethodDescriptor.generateFullMethodName(serviceName, methodName)) + .setSampledToLocalTracing(false) + .setRequestMarshaller(requestMarshaller) + .setResponseMarshaller(responseMarshaller) + .setSchemaDescriptor(descriptor.getSchemaDescriptor()) + .build(), new OpenBrowserStreamMethod<>(delegate)); } public GrpcServiceOverrideBuilder onBidiOverride( - final BidiDelegate delegate, - final String methodName, - final MethodDescriptor descriptor, - final MethodDescriptor.Marshaller requestMarshaller, - final MethodDescriptor.Marshaller responseMarshaller) { + final BidiDelegate delegate, + final String methodName, + final MethodDescriptor descriptor, + final MethodDescriptor.Marshaller requestMarshaller, + final MethodDescriptor.Marshaller responseMarshaller) { return override(MethodDescriptor.newBuilder() - .setType(MethodDescriptor.MethodType.BIDI_STREAMING) - .setFullMethodName(MethodDescriptor.generateFullMethodName(serviceName, methodName)) - .setSampledToLocalTracing(false) - .setRequestMarshaller(requestMarshaller) - .setResponseMarshaller(responseMarshaller) - .setSchemaDescriptor(descriptor.getSchemaDescriptor()) - .build(), new BidiStreamMethod<>(delegate)); + .setType(MethodDescriptor.MethodType.BIDI_STREAMING) + .setFullMethodName(MethodDescriptor.generateFullMethodName(serviceName, methodName)) + .setSampledToLocalTracing(false) + .setRequestMarshaller(requestMarshaller) + .setResponseMarshaller(responseMarshaller) + .setSchemaDescriptor(descriptor.getSchemaDescriptor()) + .build(), new BidiStreamMethod<>(delegate)); } public ServerServiceDefinition build() { final String service = baseDefinition.getServiceDescriptor().getName(); final Set overrideMethodNames = overrides.stream() - .map(o -> o.method.getFullMethodName()) - .collect(Collectors.toSet()); + .map(o -> o.method.getFullMethodName()) + .collect(Collectors.toSet()); - // Make sure we preserve SchemaDescriptor fields on methods so that gRPC reflection still - // works. - final ServiceDescriptor.Builder serviceDescriptorBuilder = - ServiceDescriptor.newBuilder(service) + // Make sure we preserve SchemaDescriptor fields on methods so that gRPC reflection still works. + final ServiceDescriptor.Builder serviceDescriptorBuilder = ServiceDescriptor.newBuilder(service) .setSchemaDescriptor(baseDefinition.getServiceDescriptor().getSchemaDescriptor()); // define descriptor overrides @@ -144,40 +140,39 @@ public ServerServiceDefinition build() { // keep non-overridden descriptors baseDefinition.getServiceDescriptor().getMethods().stream() - .filter(d -> !overrideMethodNames.contains(d.getFullMethodName())) - .forEach(serviceDescriptorBuilder::addMethod); + .filter(d -> !overrideMethodNames.contains(d.getFullMethodName())) + .forEach(serviceDescriptorBuilder::addMethod); final ServiceDescriptor serviceDescriptor = serviceDescriptorBuilder.build(); - ServerServiceDefinition.Builder serviceBuilder = - ServerServiceDefinition.builder(serviceDescriptor); + ServerServiceDefinition.Builder serviceBuilder = ServerServiceDefinition.builder(serviceDescriptor); // add method overrides overrides.forEach(dp -> dp.addMethod(serviceBuilder)); // add non-overridden methods baseDefinition.getMethods().stream() - .filter(d -> !overrideMethodNames.contains(d.getMethodDescriptor().getFullMethodName())) - .forEach(serviceBuilder::addMethod); + .filter(d -> !overrideMethodNames.contains(d.getMethodDescriptor().getFullMethodName())) + .forEach(serviceBuilder::addMethod); return serviceBuilder.build(); } public static MethodDescriptor descriptorFor( - final MethodDescriptor.MethodType methodType, - final String serviceName, - final String methodName, - final MethodDescriptor.Marshaller requestMarshaller, - final MethodDescriptor.Marshaller responseMarshaller, - final MethodDescriptor descriptor) { + final MethodDescriptor.MethodType methodType, + final String serviceName, + final String methodName, + final MethodDescriptor.Marshaller requestMarshaller, + final MethodDescriptor.Marshaller responseMarshaller, + final MethodDescriptor descriptor) { return MethodDescriptor.newBuilder() - .setType(methodType) - .setFullMethodName(MethodDescriptor.generateFullMethodName(serviceName, methodName)) - .setSampledToLocalTracing(false) - .setRequestMarshaller(requestMarshaller) - .setResponseMarshaller(responseMarshaller) - .setSchemaDescriptor(descriptor.getSchemaDescriptor()) - .build(); + .setType(methodType) + .setFullMethodName(MethodDescriptor.generateFullMethodName(serviceName, methodName)) + .setSampledToLocalTracing(false) + .setRequestMarshaller(requestMarshaller) + .setResponseMarshaller(responseMarshaller) + .setSchemaDescriptor(descriptor.getSchemaDescriptor()) + .build(); } @FunctionalInterface @@ -185,8 +180,7 @@ public interface Delegate { void doInvoke(final ReqT request, final StreamObserver responseObserver); } - public static class OpenBrowserStreamMethod - implements ServerCalls.ServerStreamingMethod { + public static class OpenBrowserStreamMethod implements ServerCalls.ServerStreamingMethod { private final Delegate delegate; @@ -196,16 +190,14 @@ public OpenBrowserStreamMethod(final Delegate delegate) { @Override public void invoke(final ReqT request, final StreamObserver responseObserver) { - final ServerCallStreamObserver serverCall = - (ServerCallStreamObserver) responseObserver; + final ServerCallStreamObserver serverCall = (ServerCallStreamObserver) responseObserver; serverCall.disableAutoInboundFlowControl(); serverCall.request(Integer.MAX_VALUE); delegate.doInvoke(request, responseObserver); } } - public static class NextBrowserStreamMethod - implements ServerCalls.UnaryMethod { + public static class NextBrowserStreamMethod implements ServerCalls.UnaryMethod { private final Delegate delegate; @@ -224,8 +216,7 @@ public interface BidiDelegate { StreamObserver doInvoke(final StreamObserver responseObserver); } - public static class BidiStreamMethod - implements ServerCalls.BidiStreamingMethod { + public static class BidiStreamMethod implements ServerCalls.BidiStreamingMethod { private final BidiDelegate delegate; public BidiStreamMethod(final BidiDelegate delegate) { @@ -234,8 +225,7 @@ public BidiStreamMethod(final BidiDelegate delegate) { @Override public StreamObserver invoke(final StreamObserver responseObserver) { - final ServerCallStreamObserver serverCall = - (ServerCallStreamObserver) responseObserver; + final ServerCallStreamObserver serverCall = (ServerCallStreamObserver) responseObserver; serverCall.disableAutoInboundFlowControl(); serverCall.request(Integer.MAX_VALUE); return delegate.doInvoke(responseObserver); @@ -243,10 +233,10 @@ public StreamObserver invoke(final StreamObserver responseObserver) } private static void validateMethodType(MethodDescriptor.MethodType methodType, - MethodDescriptor.MethodType handlerType) { + MethodDescriptor.MethodType handlerType) { if (methodType != handlerType) { throw new IllegalArgumentException("Provided method's type (" + methodType.name() - + ") does not match handler's type of " + handlerType.name()); + + ") does not match handler's type of " + handlerType.name()); } } } diff --git a/grpc-api-client/src/test/java/io/deephaven/grpc_api_client/barrage/chunk/BarrageColumnRoundTripTest.java b/grpc-api-client/src/test/java/io/deephaven/grpc_api_client/barrage/chunk/BarrageColumnRoundTripTest.java index 951965f51ee..5524219af62 100644 --- a/grpc-api-client/src/test/java/io/deephaven/grpc_api_client/barrage/chunk/BarrageColumnRoundTripTest.java +++ b/grpc-api-client/src/test/java/io/deephaven/grpc_api_client/barrage/chunk/BarrageColumnRoundTripTest.java @@ -37,18 +37,16 @@ public class BarrageColumnRoundTripTest extends LiveTableTestCase { private static final ChunkInputStreamGenerator.Options OPT_DEFAULT_DH_NULLS = - new ChunkInputStreamGenerator.Options.Builder() - .setUseDeephavenNulls(true) - .build(); - private static final ChunkInputStreamGenerator.Options OPT_DEFAULT = - new ChunkInputStreamGenerator.Options.Builder() + new ChunkInputStreamGenerator.Options.Builder() + .setUseDeephavenNulls(true) + .build(); + private static final ChunkInputStreamGenerator.Options OPT_DEFAULT = new ChunkInputStreamGenerator.Options.Builder() .build(); - private static final ChunkInputStreamGenerator.Options[] options = - new ChunkInputStreamGenerator.Options[] { - OPT_DEFAULT_DH_NULLS, - OPT_DEFAULT - }; + private static final ChunkInputStreamGenerator.Options[] options = new ChunkInputStreamGenerator.Options[] { + OPT_DEFAULT_DH_NULLS, + OPT_DEFAULT + }; public void testCharChunkSerialization() throws IOException { final Random random = new Random(0); @@ -63,14 +61,12 @@ public void testCharChunkSerialization() throws IOException { final WritableCharChunk computed = utC.asWritableCharChunk(); if (subset == null) { for (int i = 0; i < original.size(); ++i) { - Assert.equals(original.get(i), "original.get(i)", computed.get(i), - "computed.get(i)"); + Assert.equals(original.get(i), "original.get(i)", computed.get(i), "computed.get(i)"); } } else { final MutableInt off = new MutableInt(); - subset.forAllLongs(key -> Assert.equals(original.get((int) key), - "original.get(key)", computed.get(off.getAndIncrement()), - "computed.get(off.getAndIncrement())")); + subset.forAllLongs(key -> Assert.equals(original.get((int) key), "original.get(key)", + computed.get(off.getAndIncrement()), "computed.get(off.getAndIncrement())")); } }); } @@ -89,14 +85,12 @@ public void testByteChunkSerialization() throws IOException { final WritableByteChunk computed = utC.asWritableByteChunk(); if (subset == null) { for (int i = 0; i < original.size(); ++i) { - Assert.equals(original.get(i), "original.get(i)", computed.get(i), - "computed.get(i)"); + Assert.equals(original.get(i), "original.get(i)", computed.get(i), "computed.get(i)"); } } else { final MutableInt off = new MutableInt(); - subset.forAllLongs(key -> Assert.equals(original.get((int) key), - "original.get(key)", computed.get(off.getAndIncrement()), - "computed.get(off.getAndIncrement())")); + subset.forAllLongs(key -> Assert.equals(original.get((int) key), "original.get(key)", + computed.get(off.getAndIncrement()), "computed.get(off.getAndIncrement())")); } }); } @@ -115,14 +109,12 @@ public void testShortChunkSerialization() throws IOException { final WritableShortChunk computed = utC.asWritableShortChunk(); if (subset == null) { for (int i = 0; i < original.size(); ++i) { - Assert.equals(original.get(i), "original.get(i)", computed.get(i), - "computed.get(i)"); + Assert.equals(original.get(i), "original.get(i)", computed.get(i), "computed.get(i)"); } } else { final MutableInt off = new MutableInt(); - subset.forAllLongs(key -> Assert.equals(original.get((int) key), - "original.get(key)", computed.get(off.getAndIncrement()), - "computed.get(off.getAndIncrement())")); + subset.forAllLongs(key -> Assert.equals(original.get((int) key), "original.get(key)", + computed.get(off.getAndIncrement()), "computed.get(off.getAndIncrement())")); } }); } @@ -141,14 +133,12 @@ public void testIntChunkSerialization() throws IOException { final WritableIntChunk computed = utC.asWritableIntChunk(); if (subset == null) { for (int i = 0; i < original.size(); ++i) { - Assert.equals(original.get(i), "original.get(i)", computed.get(i), - "computed.get(i)"); + Assert.equals(original.get(i), "original.get(i)", computed.get(i), "computed.get(i)"); } } else { final MutableInt off = new MutableInt(); - subset.forAllLongs(key -> Assert.equals(original.get((int) key), - "original.get(key)", computed.get(off.getAndIncrement()), - "computed.get(off.getAndIncrement())")); + subset.forAllLongs(key -> Assert.equals(original.get((int) key), "original.get(key)", + computed.get(off.getAndIncrement()), "computed.get(off.getAndIncrement())")); } }); } @@ -167,14 +157,12 @@ public void testLongChunkSerialization() throws IOException { final WritableLongChunk computed = utC.asWritableLongChunk(); if (subset == null) { for (int i = 0; i < original.size(); ++i) { - Assert.equals(original.get(i), "original.get(i)", computed.get(i), - "computed.get(i)"); + Assert.equals(original.get(i), "original.get(i)", computed.get(i), "computed.get(i)"); } } else { final MutableInt off = new MutableInt(); - subset.forAllLongs(key -> Assert.equals(original.get((int) key), - "original.get(key)", computed.get(off.getAndIncrement()), - "computed.get(off.getAndIncrement())")); + subset.forAllLongs(key -> Assert.equals(original.get((int) key), "original.get(key)", + computed.get(off.getAndIncrement()), "computed.get(off.getAndIncrement())")); } }); } @@ -193,14 +181,12 @@ public void testFloatChunkSerialization() throws IOException { final WritableFloatChunk computed = utC.asWritableFloatChunk(); if (subset == null) { for (int i = 0; i < original.size(); ++i) { - Assert.equals(original.get(i), "original.get(i)", computed.get(i), - "computed.get(i)"); + Assert.equals(original.get(i), "original.get(i)", computed.get(i), "computed.get(i)"); } } else { final MutableInt off = new MutableInt(); - subset.forAllLongs(key -> Assert.equals(original.get((int) key), - "original.get(key)", computed.get(off.getAndIncrement()), - "computed.get(off.getAndIncrement())")); + subset.forAllLongs(key -> Assert.equals(original.get((int) key), "original.get(key)", + computed.get(off.getAndIncrement()), "computed.get(off.getAndIncrement())")); } }); } @@ -219,14 +205,12 @@ public void testDoubleChunkSerialization() throws IOException { final WritableDoubleChunk computed = utC.asWritableDoubleChunk(); if (subset == null) { for (int i = 0; i < original.size(); ++i) { - Assert.equals(original.get(i), "original.get(i)", computed.get(i), - "computed.get(i)"); + Assert.equals(original.get(i), "original.get(i)", computed.get(i), "computed.get(i)"); } } else { final MutableInt off = new MutableInt(); - subset.forAllLongs(key -> Assert.equals(original.get((int) key), - "original.get(key)", computed.get(off.getAndIncrement()), - "computed.get(off.getAndIncrement())")); + subset.forAllLongs(key -> Assert.equals(original.get((int) key), "original.get(key)", + computed.get(off.getAndIncrement()), "computed.get(off.getAndIncrement())")); } }); } @@ -234,47 +218,47 @@ public void testDoubleChunkSerialization() throws IOException { public void testObjectSerialization() throws IOException { testRoundTripSerialization(OPT_DEFAULT, Object.class, initObjectChunk(Integer::toString), - new ObjectIdentityValidator<>()); + new ObjectIdentityValidator<>()); } public void testStringSerializationDHNulls() throws IOException { - testRoundTripSerialization(OPT_DEFAULT_DH_NULLS, String.class, - initObjectChunk(Integer::toString), new ObjectIdentityValidator<>()); + testRoundTripSerialization(OPT_DEFAULT_DH_NULLS, String.class, initObjectChunk(Integer::toString), + new ObjectIdentityValidator<>()); } public void testStringSerialization() throws IOException { testRoundTripSerialization(OPT_DEFAULT, Object.class, initObjectChunk(Integer::toString), - new ObjectIdentityValidator<>()); + new ObjectIdentityValidator<>()); } public void testUniqueToStringSerializationDHNulls() throws IOException { testRoundTripSerialization(OPT_DEFAULT_DH_NULLS, Unique.class, initObjectChunk(Unique::new), - new ObjectToStringValidator<>()); + new ObjectToStringValidator<>()); } public void testUniqueToStringSerialization() throws IOException { testRoundTripSerialization(OPT_DEFAULT, Unique.class, initObjectChunk(Unique::new), - new ObjectToStringValidator<>()); + new ObjectToStringValidator<>()); } public void testStringArrayDHNullsSerialization() throws IOException { testRoundTripSerialization(OPT_DEFAULT_DH_NULLS, String[].class, - BarrageColumnRoundTripTest::initStringArrayChunk, new ObjectIdentityValidator<>()); + BarrageColumnRoundTripTest::initStringArrayChunk, new ObjectIdentityValidator<>()); } public void testStringArraySerialization() throws IOException { - testRoundTripSerialization(OPT_DEFAULT, String[].class, - BarrageColumnRoundTripTest::initStringArrayChunk, new ObjectIdentityValidator<>()); + testRoundTripSerialization(OPT_DEFAULT, String[].class, BarrageColumnRoundTripTest::initStringArrayChunk, + new ObjectIdentityValidator<>()); } public void testLongArraySerializationDHNulls() throws IOException { - testRoundTripSerialization(OPT_DEFAULT_DH_NULLS, long[].class, - BarrageColumnRoundTripTest::initLongArrayChunk, new LongArrayIdentityValidator<>()); + testRoundTripSerialization(OPT_DEFAULT_DH_NULLS, long[].class, BarrageColumnRoundTripTest::initLongArrayChunk, + new LongArrayIdentityValidator<>()); } public void testLongArraySerialization() throws IOException { - testRoundTripSerialization(OPT_DEFAULT, long[].class, - BarrageColumnRoundTripTest::initLongArrayChunk, new LongArrayIdentityValidator<>()); + testRoundTripSerialization(OPT_DEFAULT, long[].class, BarrageColumnRoundTripTest::initLongArrayChunk, + new LongArrayIdentityValidator<>()); } private static class Unique { @@ -299,11 +283,9 @@ public boolean equals(final Object obj) { } } - private static Consumer> initObjectChunk( - final IntFunction mapper) { + private static Consumer> initObjectChunk(final IntFunction mapper) { return (untypedChunk) -> { - final WritableObjectChunk chunk = - untypedChunk.asWritableObjectChunk(); + final WritableObjectChunk chunk = untypedChunk.asWritableObjectChunk(); for (int i = 0; i < chunk.size(); ++i) { chunk.set(i, i % 7 == 0 ? null : mapper.apply(i)); } @@ -312,8 +294,7 @@ private static Consumer> initObjectChunk( private static void initStringArrayChunk(final WritableChunk untypedChunk) { final Random random = new Random(); - final WritableObjectChunk chunk = - untypedChunk.asWritableObjectChunk(); + final WritableObjectChunk chunk = untypedChunk.asWritableObjectChunk(); for (int i = 0; i < chunk.size(); ++i) { final int j = random.nextInt(20) - 1; @@ -331,8 +312,7 @@ private static void initStringArrayChunk(final WritableChunk private static void initLongArrayChunk(final WritableChunk untypedChunk) { final Random random = new Random(); - final WritableObjectChunk chunk = - untypedChunk.asWritableObjectChunk(); + final WritableObjectChunk chunk = untypedChunk.asWritableObjectChunk(); for (int i = 0; i < chunk.size(); ++i) { final int j = random.nextInt(20) - 1; @@ -350,19 +330,17 @@ private static void initLongArrayChunk(final WritableChunk un private interface Validator { void assertExpected(final WritableChunk original, - final WritableChunk computed, - @Nullable final OrderedKeys subset); + final WritableChunk computed, + @Nullable final OrderedKeys subset); } private static final class ObjectIdentityValidator implements Validator { @Override public void assertExpected(final WritableChunk untypedOriginal, - final WritableChunk unTypedComputed, - @Nullable OrderedKeys subset) { - final WritableObjectChunk original = - untypedOriginal.asWritableObjectChunk(); - final WritableObjectChunk computed = - unTypedComputed.asWritableObjectChunk(); + final WritableChunk unTypedComputed, + @Nullable OrderedKeys subset) { + final WritableObjectChunk original = untypedOriginal.asWritableObjectChunk(); + final WritableObjectChunk computed = unTypedComputed.asWritableObjectChunk(); if (subset == null) { subset = Index.CURRENT_FACTORY.getFlatIndex(untypedOriginal.size()); @@ -382,8 +360,7 @@ public void assertExpected(final WritableChunk untypedOrigina Assert.equals(nt[k], "nt[k]", ct[k], "ct[k]"); } } else { - Assert.equals(next, "next", computed.get(off.getAndIncrement()), - "computed.get(i)"); + Assert.equals(next, "next", computed.get(off.getAndIncrement()), "computed.get(i)"); } }); } @@ -392,12 +369,10 @@ public void assertExpected(final WritableChunk untypedOrigina private static class ObjectToStringValidator implements Validator { @Override public void assertExpected(final WritableChunk untypedOriginal, - final WritableChunk untypedComputed, - @Nullable OrderedKeys subset) { - final WritableObjectChunk original = - untypedOriginal.asWritableObjectChunk(); - final WritableObjectChunk computed = - untypedComputed.asWritableObjectChunk(); + final WritableChunk untypedComputed, + @Nullable OrderedKeys subset) { + final WritableObjectChunk original = untypedOriginal.asWritableObjectChunk(); + final WritableObjectChunk computed = untypedComputed.asWritableObjectChunk(); if (subset == null) { subset = Index.CURRENT_FACTORY.getFlatIndex(original.size()); } @@ -406,9 +381,8 @@ public void assertExpected(final WritableChunk untypedOrigina if (original.get((int) key) == null) { Assert.eqNull(computed.get(off.getAndIncrement()), "computed"); } else { - Assert.equals(original.get((int) key).toString(), - "original.get(key).toString()", - computed.get(off.getAndIncrement()), "computed.get(off.getAndIncrement())"); + Assert.equals(original.get((int) key).toString(), "original.get(key).toString()", + computed.get(off.getAndIncrement()), "computed.get(off.getAndIncrement())"); } }); } @@ -417,12 +391,10 @@ public void assertExpected(final WritableChunk untypedOrigina private static final class LongArrayIdentityValidator implements Validator { @Override public void assertExpected(final WritableChunk untypedOriginal, - final WritableChunk unTypedComputed, - @Nullable OrderedKeys subset) { - final WritableObjectChunk original = - untypedOriginal.asWritableObjectChunk(); - final WritableObjectChunk computed = - unTypedComputed.asWritableObjectChunk(); + final WritableChunk unTypedComputed, + @Nullable OrderedKeys subset) { + final WritableObjectChunk original = untypedOriginal.asWritableObjectChunk(); + final WritableObjectChunk computed = unTypedComputed.asWritableObjectChunk(); if (subset == null) { subset = Index.CURRENT_FACTORY.getFlatIndex(original.size()); } @@ -447,9 +419,8 @@ public void assertExpected(final WritableChunk untypedOrigina @SuppressWarnings("UnstableApiUsage") private void testRoundTripSerialization( - final ChunkInputStreamGenerator.Options options, final Class type, - final Consumer> initData, final Validator validator) - throws IOException { + final ChunkInputStreamGenerator.Options options, final Class type, + final Consumer> initData, final Validator validator) throws IOException { final ChunkType chunkType = ChunkType.fromElementType(type); WritableChunk rtData = null; @@ -459,47 +430,42 @@ private void testRoundTripSerialization( initData.accept(data); final ChunkInputStreamGenerator generator = - ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, type, data); + ChunkInputStreamGenerator.makeInputStreamGenerator(chunkType, type, data); // full sub logic try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream()) { - final ChunkInputStreamGenerator.DrainableColumn column = - generator.getInputStream(options, null); - final ArrayList fieldNodes = - new ArrayList<>(); + new BarrageProtoUtil.ExposedByteArrayOutputStream()) { + final ChunkInputStreamGenerator.DrainableColumn column = generator.getInputStream(options, null); + final ArrayList fieldNodes = new ArrayList<>(); column.visitFieldNodes((numElements, nullCount) -> fieldNodes - .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount))); + .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount))); final TLongArrayList bufferNodes = new TLongArrayList(); column.visitBuffers(bufferNodes::add); column.drainTo(baos); - final DataInput dis = new LittleEndianDataInputStream( - new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); + final DataInput dis = + new LittleEndianDataInputStream(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - rtData = (WritableChunk) ChunkInputStreamGenerator - .extractChunkFromInputStream(options, chunkType, type, fieldNodes.iterator(), - bufferNodes.iterator(), dis); + rtData = (WritableChunk) ChunkInputStreamGenerator.extractChunkFromInputStream( + options, chunkType, type, fieldNodes.iterator(), bufferNodes.iterator(), dis); Assert.eq(data.size(), "data.size()", rtData.size(), "rtData.size()"); validator.assertExpected(data, rtData, null); } // empty subset try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream()) { + new BarrageProtoUtil.ExposedByteArrayOutputStream()) { final ChunkInputStreamGenerator.DrainableColumn column = - generator.getInputStream(options, Index.CURRENT_FACTORY.getEmptyIndex()); - final ArrayList fieldNodes = - new ArrayList<>(); + generator.getInputStream(options, Index.CURRENT_FACTORY.getEmptyIndex()); + final ArrayList fieldNodes = new ArrayList<>(); column.visitFieldNodes((numElements, nullCount) -> fieldNodes - .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount))); + .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount))); final TLongArrayList bufferNodes = new TLongArrayList(); column.visitBuffers(bufferNodes::add); column.drainTo(baos); - final DataInput dis = new LittleEndianDataInputStream( - new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - rtData = (WritableChunk) ChunkInputStreamGenerator - .extractChunkFromInputStream(options, chunkType, type, fieldNodes.iterator(), - bufferNodes.iterator(), dis); + final DataInput dis = + new LittleEndianDataInputStream(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); + rtData = (WritableChunk) ChunkInputStreamGenerator.extractChunkFromInputStream( + options, chunkType, type, fieldNodes.iterator(), bufferNodes.iterator(), dis); Assert.eq(rtData.size(), "rtData.size()", 0); } @@ -511,24 +477,20 @@ private void testRoundTripSerialization( builder.appendKey(i); } } - try ( - final BarrageProtoUtil.ExposedByteArrayOutputStream baos = + try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = new BarrageProtoUtil.ExposedByteArrayOutputStream(); - final Index subset = builder.getIndex()) { - final ChunkInputStreamGenerator.DrainableColumn column = - generator.getInputStream(options, subset); - final ArrayList fieldNodes = - new ArrayList<>(); + final Index subset = builder.getIndex()) { + final ChunkInputStreamGenerator.DrainableColumn column = generator.getInputStream(options, subset); + final ArrayList fieldNodes = new ArrayList<>(); column.visitFieldNodes((numElements, nullCount) -> fieldNodes - .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount))); + .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount))); final TLongArrayList bufferNodes = new TLongArrayList(); column.visitBuffers(bufferNodes::add); column.drainTo(baos); - final DataInput dis = new LittleEndianDataInputStream( - new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - rtData = (WritableChunk) ChunkInputStreamGenerator - .extractChunkFromInputStream(options, chunkType, type, fieldNodes.iterator(), - bufferNodes.iterator(), dis); + final DataInput dis = + new LittleEndianDataInputStream(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); + rtData = (WritableChunk) ChunkInputStreamGenerator.extractChunkFromInputStream( + options, chunkType, type, fieldNodes.iterator(), bufferNodes.iterator(), dis); Assert.eq(subset.intSize(), "subset.intSize()", rtData.size(), "rtData.size()"); validator.assertExpected(data, rtData, subset); } diff --git a/grpc-api/server/docker/src/main/java/io/deephaven/grpc_api/runner/DockerEmpty.java b/grpc-api/server/docker/src/main/java/io/deephaven/grpc_api/runner/DockerEmpty.java index 390912c7c63..85de09a4c00 100644 --- a/grpc-api/server/docker/src/main/java/io/deephaven/grpc_api/runner/DockerEmpty.java +++ b/grpc-api/server/docker/src/main/java/io/deephaven/grpc_api/runner/DockerEmpty.java @@ -5,6 +5,6 @@ class DockerEmpty { // empty on purpose, otherwise // Execution failed for task ':grpc-api-docker:dockerBuildImage'. -// > Could not build image: COPY failed: file not found in build context or excluded by -// .dockerignore: stat classes: file does not exist +// > Could not build image: COPY failed: file not found in build context or excluded by .dockerignore: stat classes: +// file does not exist // TODO: exclude the COPY classes part for dockerBuildImage diff --git a/grpc-api/src/flightTest/java/io/deephaven/grpc_api/flight/FlightMessageRoundTripTest.java b/grpc-api/src/flightTest/java/io/deephaven/grpc_api/flight/FlightMessageRoundTripTest.java index 59c7a32b2b8..735889a868c 100644 --- a/grpc-api/src/flightTest/java/io/deephaven/grpc_api/flight/FlightMessageRoundTripTest.java +++ b/grpc-api/src/flightTest/java/io/deephaven/grpc_api/flight/FlightMessageRoundTripTest.java @@ -61,8 +61,8 @@ import static org.junit.Assert.*; /** - * Deliberately much lower in scope (and running time) than BarrageMessageRoundTripTest, the only - * purpose of this test is to verify that we can round trip + * Deliberately much lower in scope (and running time) than BarrageMessageRoundTripTest, the only purpose of this test + * is to verify that we can round trip */ public class FlightMessageRoundTripTest { @Module @@ -75,8 +75,7 @@ TicketResolver ticketResolver(ScopeTicketResolver resolver) { @Provides AbstractScriptSession createGlobalScriptSession(GlobalSessionProvider sessionProvider) { - final AbstractScriptSession scriptSession = - new NoLanguageDeephavenSession("non-script-session"); + final AbstractScriptSession scriptSession = new NoLanguageDeephavenSession("non-script-session"); sessionProvider.initializeGlobalScriptSession(scriptSession); return scriptSession; } @@ -124,11 +123,11 @@ interface Builder { @Before public void setup() throws IOException { TestComponent component = DaggerFlightMessageRoundTripTest_TestComponent - .builder() - .withScheduler(new Scheduler.DelegatingImpl(Executors.newSingleThreadExecutor(), - Executors.newScheduledThreadPool(1))) - .withSessionTokenExpireTmMs(60_000_000) - .build(); + .builder() + .withScheduler(new Scheduler.DelegatingImpl(Executors.newSingleThreadExecutor(), + Executors.newScheduledThreadPool(1))) + .withSessionTokenExpireTmMs(60_000_000) + .build(); NettyServerBuilder serverBuilder = NettyServerBuilder.forPort(0); component.interceptors().forEach(serverBuilder::intercept); @@ -140,30 +139,29 @@ public void setup() throws IOException { scriptSession = component.scriptSession(); client = FlightClient.builder().location(Location.forGrpcInsecure("localhost", actualPort)) - .allocator(new RootAllocator()).intercept(info -> new FlightClientMiddleware() { - @Override - public void onBeforeSendingHeaders(CallHeaders outgoingHeaders) { - final UUID currSession = sessionToken; - if (currSession != null) { - outgoingHeaders.insert(SessionServiceGrpcImpl.DEEPHAVEN_SESSION_ID, - currSession.toString()); + .allocator(new RootAllocator()).intercept(info -> new FlightClientMiddleware() { + @Override + public void onBeforeSendingHeaders(CallHeaders outgoingHeaders) { + final UUID currSession = sessionToken; + if (currSession != null) { + outgoingHeaders.insert(SessionServiceGrpcImpl.DEEPHAVEN_SESSION_ID, currSession.toString()); + } } - } - @Override - public void onHeadersReceived(CallHeaders incomingHeaders) {} + @Override + public void onHeadersReceived(CallHeaders incomingHeaders) {} - @Override - public void onCallCompleted(CallStatus status) {} - }).build(); + @Override + public void onCallCompleted(CallStatus status) {} + }).build(); channel = ManagedChannelBuilder.forTarget("localhost:" + actualPort) - .usePlaintext() - .build(); + .usePlaintext() + .build(); SessionServiceGrpc.SessionServiceBlockingStub sessionServiceClient = - SessionServiceGrpc.newBlockingStub(channel); + SessionServiceGrpc.newBlockingStub(channel); - HandshakeResponse response = sessionServiceClient - .newSession(HandshakeRequest.newBuilder().setAuthProtocol(1).build()); + HandshakeResponse response = + sessionServiceClient.newSession(HandshakeRequest.newBuilder().setAuthProtocol(1).build()); assertNotNull(response.getSessionToken()); sessionToken = UUID.fromString(response.getSessionToken().toStringUtf8()); @@ -214,10 +212,9 @@ protected void after() { public void testSimpleEmptyTableDoGet() { Flight.Ticket simpleTableTicket = ExportTicketHelper.exportIdToArrowTicket(1); currentSession.newExport(simpleTableTicket) - .submit(() -> TableTools.emptyTable(10).update("I=i")); + .submit(() -> TableTools.emptyTable(10).update("I=i")); - FlightStream stream = - client.getStream(new Ticket(simpleTableTicket.getTicket().toByteArray())); + FlightStream stream = client.getStream(new Ticket(simpleTableTicket.getTicket().toByteArray())); assertTrue(stream.next()); VectorSchemaRoot root = stream.getRoot(); // row count should match what we expect @@ -258,10 +255,9 @@ public void testRoundTripData() throws InterruptedException, ExecutionException assertRoundTripDataEqual(TableTools.emptyTable(10).update("empty=(String)null")); // some nulls in columns + assertRoundTripDataEqual(TableTools.emptyTable(10).update("empty= ((i % 2) == 0) ? i : (int)null")); assertRoundTripDataEqual( - TableTools.emptyTable(10).update("empty= ((i % 2) == 0) ? i : (int)null")); - assertRoundTripDataEqual(TableTools.emptyTable(10) - .update("empty= ((i % 2) == 0) ? String.valueOf(i) : (String)null")); + TableTools.emptyTable(10).update("empty= ((i % 2) == 0) ? String.valueOf(i) : (String)null")); // list columns TODO(#755): support for DBArray // assertRoundTripDataEqual(TableTools.emptyTable(5).update("A=i").by().join(TableTools.emptyTable(5))); @@ -274,17 +270,15 @@ public void testFlightInfo() { final Table table = TableTools.emptyTable(10).update("I = i"); final Table tickingTable = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.timeTable(1_000_000).update("I = i")); + .computeLocked(() -> TableTools.timeTable(1_000_000).update("I = i")); // stuff table into the scope scriptSession.setVariable(staticTableName, table); scriptSession.setVariable(tickingTableName, tickingTable); // test fetch info from scoped ticket - assertInfoMatchesTable(client.getInfo(arrowFlightDescriptorForName(staticTableName)), - table); - assertInfoMatchesTable(client.getInfo(arrowFlightDescriptorForName(tickingTableName)), - tickingTable); + assertInfoMatchesTable(client.getInfo(arrowFlightDescriptorForName(staticTableName)), table); + assertInfoMatchesTable(client.getInfo(arrowFlightDescriptorForName(tickingTableName)), tickingTable); // test list flights which runs through scoped tickets final MutableInt seenTables = new MutableInt(); @@ -307,7 +301,7 @@ public void testGetSchema() { final Table table = TableTools.emptyTable(10).update("I = i"); final Table tickingTable = LiveTableMonitor.DEFAULT.sharedLock() - .computeLocked(() -> TableTools.timeTable(1_000_000).update("I = i")); + .computeLocked(() -> TableTools.timeTable(1_000_000).update("I = i")); try (final SafeCloseable ignored = LivenessScopeStack.open(scriptSession, false)) { // stuff table into the scope @@ -315,11 +309,10 @@ public void testGetSchema() { scriptSession.setVariable(tickingTableName, tickingTable); // test fetch info from scoped ticket - assertSchemaMatchesTable( - client.getSchema(arrowFlightDescriptorForName(staticTableName)).getSchema(), table); - assertSchemaMatchesTable( - client.getSchema(arrowFlightDescriptorForName(tickingTableName)).getSchema(), - tickingTable); + assertSchemaMatchesTable(client.getSchema(arrowFlightDescriptorForName(staticTableName)).getSchema(), + table); + assertSchemaMatchesTable(client.getSchema(arrowFlightDescriptorForName(tickingTableName)).getSchema(), + tickingTable); // test list flights which runs through scoped tickets final MutableInt seenTables = new MutableInt(); @@ -342,10 +335,8 @@ private static FlightDescriptor arrowFlightDescriptorForName(String name) { @Test public void testExportTicketVisibility() { - // we have decided that if an api client creates export tickets, that they probably gain no - // value from - // seeing them via Flight's listFlights but we do want them to work with getFlightInfo (or - // anywhere else a + // we have decided that if an api client creates export tickets, that they probably gain no value from + // seeing them via Flight's listFlights but we do want them to work with getFlightInfo (or anywhere else a // flight ticket can be resolved). final Flight.Ticket ticket = ExportTicketHelper.exportIdToArrowTicket(1); final Table table = TableTools.emptyTable(10).update("I = i"); @@ -375,16 +366,15 @@ private void assertInfoMatchesTable(FlightInfo info, Table table) { private void assertSchemaMatchesTable(Schema schema, Table table) { Assert.eq(schema.getFields().size(), "schema.getFields().size()", table.getColumns().length, - "table.getColumns().length"); + "table.getColumns().length"); Assert.equals(BarrageSchemaUtil.schemaToTableDefinition(schema), - "BarrageSchemaUtil.schemaToTableDefinition(schema)", - table.getDefinition(), "table.getDefinition()"); + "BarrageSchemaUtil.schemaToTableDefinition(schema)", + table.getDefinition(), "table.getDefinition()"); } private static int nextTicket = 1; - private void assertRoundTripDataEqual(Table deephavenTable) - throws InterruptedException, ExecutionException { + private void assertRoundTripDataEqual(Table deephavenTable) throws InterruptedException, ExecutionException { // bind the table in the session Flight.Ticket dhTableTicket = ExportTicketHelper.exportIdToArrowTicket(nextTicket++); currentSession.newExport(dhTableTicket).submit(() -> deephavenTable); @@ -395,10 +385,8 @@ private void assertRoundTripDataEqual(Table deephavenTable) // start the DoPut and send the schema int flightDescriptorTicketValue = nextTicket++; - FlightDescriptor descriptor = - FlightDescriptor.path("export", flightDescriptorTicketValue + ""); - FlightClient.ClientStreamListener putStream = - client.startPut(descriptor, root, new AsyncPutListener()); + FlightDescriptor descriptor = FlightDescriptor.path("export", flightDescriptorTicketValue + ""); + FlightClient.ClientStreamListener putStream = client.startPut(descriptor, root, new AsyncPutListener()); // send the body of the table while (stream.next()) { @@ -410,12 +398,11 @@ private void assertRoundTripDataEqual(Table deephavenTable) // get the table that was uploaded, and confirm it matches what we originally sent CompletableFuture

    tableFuture = new CompletableFuture<>(); - SessionState.ExportObject
    tableExport = - currentSession.getExport(flightDescriptorTicketValue); + SessionState.ExportObject
    tableExport = currentSession.getExport(flightDescriptorTicketValue); currentSession.nonExport() - .onError(exception -> tableFuture.cancel(true)) - .require(tableExport) - .submit(() -> tableFuture.complete(tableExport.get())); + .onError(exception -> tableFuture.cancel(true)) + .require(tableExport) + .submit(() -> tableFuture.complete(tableExport.get())); // block until we're done, so we can get the table and see what is inside putStream.getResult(); @@ -425,7 +412,6 @@ private void assertRoundTripDataEqual(Table deephavenTable) assertEquals(deephavenTable.size(), uploadedTable.size()); assertEquals(deephavenTable.getDefinition(), uploadedTable.getDefinition()); assertEquals(0, (long) TableTools - .diffPair(deephavenTable, uploadedTable, 0, EnumSet.noneOf(TableDiff.DiffItems.class)) - .getSecond()); + .diffPair(deephavenTable, uploadedTable, 0, EnumSet.noneOf(TableDiff.DiffItems.class)).getSecond()); } } diff --git a/grpc-api/src/main/java/io/deephaven/figures/FigureWidgetTranslator.java b/grpc-api/src/main/java/io/deephaven/figures/FigureWidgetTranslator.java index d6dfe5b5dd2..d209f9f2651 100644 --- a/grpc-api/src/main/java/io/deephaven/figures/FigureWidgetTranslator.java +++ b/grpc-api/src/main/java/io/deephaven/figures/FigureWidgetTranslator.java @@ -72,9 +72,9 @@ private FigureDescriptor translateFigure(FigureWidget f, SessionState sessionSta // translate tables first, so we can use them to look up tables as needed int i = 0; for (Map.Entry> entry : figure.getTableHandles().stream() - .collect(Collectors.groupingBy(TableHandle::getTable)).entrySet()) { - Set relevantColumns = entry.getValue().stream().map(TableHandle::getColumns) - .flatMap(Set::stream).collect(Collectors.toSet()); + .collect(Collectors.groupingBy(TableHandle::getTable)).entrySet()) { + Set relevantColumns = entry.getValue().stream().map(TableHandle::getColumns).flatMap(Set::stream) + .collect(Collectors.toSet()); Table table = entry.getKey().view(Selectable.from(relevantColumns)); for (TableHandle handle : entry.getValue()) { @@ -82,11 +82,9 @@ private FigureDescriptor translateFigure(FigureWidget f, SessionState sessionSta } i++; - SessionState.ExportObject
    tableExportObject = - sessionState.newServerSideExport(table); + SessionState.ExportObject
    tableExportObject = sessionState.newServerSideExport(table); clientFigure.addTables(TableServiceGrpcImpl.buildTableCreationResponse( - TableReference.newBuilder().setTicket(tableExportObject.getExportId()).build(), - table)); + TableReference.newBuilder().setTicket(tableExportObject.getExportId()).build(), table)); } // TODO (deephaven-core#62) implement once tablemaps are ready @@ -104,16 +102,14 @@ private FigureDescriptor translateFigure(FigureWidget f, SessionState sessionSta // } // i++; // - // SessionState.ExportObject tableExportObject = - // sessionState.newServerSideExport(tableMap); + // SessionState.ExportObject tableExportObject = sessionState.newServerSideExport(tableMap); // clientFigure.addTableMap(...) // } assignOptionalField(figure.getTitle(), clientFigure::setTitle, clientFigure::clearTitle); assignOptionalField(toCssColorString(figure.getTitleColor()), clientFigure::setTitleColor, - clientFigure::clearTitleColor); - assignOptionalField(toCssFont(figure.getTitleFont()), clientFigure::setTitleFont, - clientFigure::clearTitleFont); + clientFigure::clearTitleColor); + assignOptionalField(toCssFont(figure.getTitleFont()), clientFigure::setTitleFont, clientFigure::clearTitleFont); List charts = figure.getCharts().getCharts(); @@ -141,16 +137,14 @@ private static void assignOptionalField(T value, Consumer setter, Runnabl private FigureDescriptor.ChartDescriptor translate(ChartImpl chart) { assert chart.dimension() == 2 : "Only dim=2 supported"; - FigureDescriptor.ChartDescriptor.Builder clientChart = - FigureDescriptor.ChartDescriptor.newBuilder(); + FigureDescriptor.ChartDescriptor.Builder clientChart = FigureDescriptor.ChartDescriptor.newBuilder(); boolean swappedPositions = chart.getPlotOrientation() != ChartImpl.PlotOrientation.VERTICAL; Map axes = new HashMap<>(); // x=0, y=1, z=2, unless swapped - // The first X axis is on the bottom, later instances should be on the top. Likewise, the - // first Y axis + // The first X axis is on the bottom, later instances should be on the top. Likewise, the first Y axis // is on the left, and later instances appear on the right. AxisDescriptor.Builder firstX = null; AxisDescriptor.Builder firstY = null; @@ -172,17 +166,15 @@ private FigureDescriptor.ChartDescriptor translate(ChartImpl chart) { } AxisDescriptor.Builder clientAxis = AxisDescriptor.newBuilder(); clientAxis.setId(type.name() + axis.id()); - clientAxis - .setFormatType(AxisDescriptor.AxisFormatType.valueOf(axis.getType().name())); + clientAxis.setFormatType(AxisDescriptor.AxisFormatType.valueOf(axis.getType().name())); clientAxis.setLog(axis.isLog()); assignOptionalField(axis.getLabel(), clientAxis::setLabel, clientAxis::clearLabel); assignOptionalField(toCssFont(axis.getLabelFont()), clientAxis::setLabelFont, - clientAxis::clearLabelFont); + clientAxis::clearLabelFont); // clientAxis.setFormat(axis.getFormat().toString()); assignOptionalField(axis.getFormatPattern(), clientAxis::setFormatPattern, - clientAxis::clearFormatPattern); - assignOptionalField(toCssColorString(axis.getColor()), clientAxis::setColor, - clientAxis::clearColor); + clientAxis::clearFormatPattern); + assignOptionalField(toCssColorString(axis.getColor()), clientAxis::setColor, clientAxis::clearColor); clientAxis.setMinRange(axis.getMinRange()); clientAxis.setMaxRange(axis.getMaxRange()); clientAxis.setMinorTicksVisible(axis.isMinorTicksVisible()); @@ -190,8 +182,8 @@ private FigureDescriptor.ChartDescriptor translate(ChartImpl chart) { clientAxis.setMinorTickCount(axis.getMinorTickCount()); clientAxis.setGapBetweenMajorTicks(axis.getGapBetweenMajorTicks()); assignOptionalField(axis.getMajorTickLocations(), - arr -> DoubleStream.of(arr).forEach(clientAxis::addMajorTickLocations), - clientAxis::clearMajorTickLocations); + arr -> DoubleStream.of(arr).forEach(clientAxis::addMajorTickLocations), + clientAxis::clearMajorTickLocations); // clientAxis.setAxisTransform(axis.getAxisTransform().toString()); clientAxis.setTickLabelAngle(axis.getTickLabelAngle()); clientAxis.setInvert(axis.getInvert()); @@ -200,7 +192,7 @@ private FigureDescriptor.ChartDescriptor translate(ChartImpl chart) { final AxisTransform axisTransform = axis.getAxisTransform(); if (axisTransform instanceof AxisTransformBusinessCalendar) { clientAxis.setBusinessCalendarDescriptor( - translateBusinessCalendar((AxisTransformBusinessCalendar) axisTransform)); + translateBusinessCalendar((AxisTransformBusinessCalendar) axisTransform)); } clientAxis.setType(type); @@ -226,8 +218,7 @@ private FigureDescriptor.ChartDescriptor translate(ChartImpl chart) { clientChart.addAllAxes(axes.values()); Stream.Builder clientSeriesCollection = Stream.builder(); - Stream.Builder clientMultiSeriesCollection = - Stream.builder(); + Stream.Builder clientMultiSeriesCollection = Stream.builder(); chart.getAxes().forEach(axesImpl -> { @@ -254,262 +245,224 @@ private FigureDescriptor.ChartDescriptor translate(ChartImpl chart) { // use the description map since it is known to be ordered correctly axesImpl.dataSeries().getSeriesDescriptions().values().stream() - .map(SeriesCollection.SeriesDescription::getSeries).forEach(seriesInternal -> { - if (seriesInternal instanceof AbstractDataSeries) { - SeriesDescriptor.Builder clientSeries = SeriesDescriptor.newBuilder(); - clientSeries.setPlotStyle(FigureDescriptor.SeriesPlotStyle - .valueOf(axesImpl.getPlotStyle().name())); - clientSeries.setName(String.valueOf(seriesInternal.name())); - Stream.Builder clientAxes = Stream.builder(); - - AbstractDataSeries s = (AbstractDataSeries) seriesInternal; - - assignOptionalField(s.getLinesVisible(), clientSeries::setLinesVisible, - clientSeries::clearLinesVisible); - assignOptionalField(s.getPointsVisible(), clientSeries::setShapesVisible, - clientSeries::clearShapesVisible); - clientSeries.setGradientVisible(s.getGradientVisible()); - assignOptionalField(toCssColorString(s.getLineColor()), - clientSeries::setLineColor, clientSeries::clearLineColor); - // clientSeries.setLineStyle(s.getLineStyle().toString()); - assignOptionalField(s.getPointLabelFormat(), - clientSeries::setPointLabelFormat, clientSeries::clearPointLabelFormat); - assignOptionalField(s.getXToolTipPattern(), - clientSeries::setXToolTipPattern, clientSeries::clearXToolTipPattern); - assignOptionalField(s.getYToolTipPattern(), - clientSeries::setYToolTipPattern, clientSeries::clearYToolTipPattern); - - // build the set of axes that the series is watching, and give each a type, - // starting - // with the x and y we have so far mapped to this - - if (s instanceof AbstractXYDataSeries) { - if (s instanceof IntervalXYDataSeriesArray) { - // interval (aka histogram) - IntervalXYDataSeriesArray series = (IntervalXYDataSeriesArray) s; - clientAxes - .add(makeSourceDescriptor(series.getX(), SourceType.X, xAxis)); - clientAxes.add(makeSourceDescriptor(series.getStartX(), - SourceType.X_LOW, xAxis)); - clientAxes.add(makeSourceDescriptor(series.getEndX(), - SourceType.X_HIGH, xAxis)); - clientAxes - .add(makeSourceDescriptor(series.getY(), SourceType.Y, yAxis)); - clientAxes.add(makeSourceDescriptor(series.getStartY(), - SourceType.Y_LOW, yAxis)); - clientAxes.add(makeSourceDescriptor(series.getEndY(), - SourceType.Y_HIGH, yAxis)); - } else if (s instanceof XYErrorBarDataSeriesArray) { - // errorbar x, xy - XYErrorBarDataSeriesArray series = (XYErrorBarDataSeriesArray) s; - clientAxes - .add(makeSourceDescriptor(series.getX(), SourceType.X, xAxis)); - clientAxes.add(makeSourceDescriptor(series.getXLow(), - SourceType.X_LOW, xAxis)); - clientAxes.add(makeSourceDescriptor(series.getXHigh(), - SourceType.X_HIGH, xAxis)); - clientAxes - .add(makeSourceDescriptor(series.getY(), SourceType.Y, yAxis)); - clientAxes.add(makeSourceDescriptor(series.getYLow(), - SourceType.Y_LOW, yAxis)); - clientAxes.add(makeSourceDescriptor(series.getYHigh(), - SourceType.Y_HIGH, yAxis)); - } else if (s instanceof OHLCDataSeriesArray) { - OHLCDataSeriesArray series = (OHLCDataSeriesArray) s; - clientAxes.add( - makeSourceDescriptor(series.getTime(), SourceType.TIME, xAxis)); - clientAxes.add( - makeSourceDescriptor(series.getOpen(), SourceType.OPEN, yAxis)); - clientAxes.add(makeSourceDescriptor(series.getClose(), - SourceType.CLOSE, yAxis)); - clientAxes.add( - makeSourceDescriptor(series.getHigh(), SourceType.HIGH, yAxis)); - clientAxes.add( - makeSourceDescriptor(series.getLow(), SourceType.LOW, yAxis)); - } else if (s instanceof XYDataSeriesArray) { - // xy of some other kind - XYDataSeriesArray series = (XYDataSeriesArray) s; - clientAxes - .add(makeSourceDescriptor(series.getX(), SourceType.X, xAxis)); - clientAxes - .add(makeSourceDescriptor(series.getY(), SourceType.Y, yAxis)); - } else { - // warn about other unsupported series types - errorList.add("OpenAPI presently does not support series of type " - + s.getClass()); + .map(SeriesCollection.SeriesDescription::getSeries).forEach(seriesInternal -> { + if (seriesInternal instanceof AbstractDataSeries) { + SeriesDescriptor.Builder clientSeries = SeriesDescriptor.newBuilder(); + clientSeries.setPlotStyle( + FigureDescriptor.SeriesPlotStyle.valueOf(axesImpl.getPlotStyle().name())); + clientSeries.setName(String.valueOf(seriesInternal.name())); + Stream.Builder clientAxes = Stream.builder(); + + AbstractDataSeries s = (AbstractDataSeries) seriesInternal; + + assignOptionalField(s.getLinesVisible(), clientSeries::setLinesVisible, + clientSeries::clearLinesVisible); + assignOptionalField(s.getPointsVisible(), clientSeries::setShapesVisible, + clientSeries::clearShapesVisible); + clientSeries.setGradientVisible(s.getGradientVisible()); + assignOptionalField(toCssColorString(s.getLineColor()), clientSeries::setLineColor, + clientSeries::clearLineColor); + // clientSeries.setLineStyle(s.getLineStyle().toString()); + assignOptionalField(s.getPointLabelFormat(), clientSeries::setPointLabelFormat, + clientSeries::clearPointLabelFormat); + assignOptionalField(s.getXToolTipPattern(), clientSeries::setXToolTipPattern, + clientSeries::clearXToolTipPattern); + assignOptionalField(s.getYToolTipPattern(), clientSeries::setYToolTipPattern, + clientSeries::clearYToolTipPattern); + + // build the set of axes that the series is watching, and give each a type, starting + // with the x and y we have so far mapped to this + + if (s instanceof AbstractXYDataSeries) { + if (s instanceof IntervalXYDataSeriesArray) { + // interval (aka histogram) + IntervalXYDataSeriesArray series = (IntervalXYDataSeriesArray) s; + clientAxes.add(makeSourceDescriptor(series.getX(), SourceType.X, xAxis)); + clientAxes.add(makeSourceDescriptor(series.getStartX(), SourceType.X_LOW, xAxis)); + clientAxes.add(makeSourceDescriptor(series.getEndX(), SourceType.X_HIGH, xAxis)); + clientAxes.add(makeSourceDescriptor(series.getY(), SourceType.Y, yAxis)); + clientAxes.add(makeSourceDescriptor(series.getStartY(), SourceType.Y_LOW, yAxis)); + clientAxes.add(makeSourceDescriptor(series.getEndY(), SourceType.Y_HIGH, yAxis)); + } else if (s instanceof XYErrorBarDataSeriesArray) { + // errorbar x, xy + XYErrorBarDataSeriesArray series = (XYErrorBarDataSeriesArray) s; + clientAxes.add(makeSourceDescriptor(series.getX(), SourceType.X, xAxis)); + clientAxes.add(makeSourceDescriptor(series.getXLow(), SourceType.X_LOW, xAxis)); + clientAxes.add(makeSourceDescriptor(series.getXHigh(), SourceType.X_HIGH, xAxis)); + clientAxes.add(makeSourceDescriptor(series.getY(), SourceType.Y, yAxis)); + clientAxes.add(makeSourceDescriptor(series.getYLow(), SourceType.Y_LOW, yAxis)); + clientAxes.add(makeSourceDescriptor(series.getYHigh(), SourceType.Y_HIGH, yAxis)); + } else if (s instanceof OHLCDataSeriesArray) { + OHLCDataSeriesArray series = (OHLCDataSeriesArray) s; + clientAxes.add(makeSourceDescriptor(series.getTime(), SourceType.TIME, xAxis)); + clientAxes.add(makeSourceDescriptor(series.getOpen(), SourceType.OPEN, yAxis)); + clientAxes.add(makeSourceDescriptor(series.getClose(), SourceType.CLOSE, yAxis)); + clientAxes.add(makeSourceDescriptor(series.getHigh(), SourceType.HIGH, yAxis)); + clientAxes.add(makeSourceDescriptor(series.getLow(), SourceType.LOW, yAxis)); + } else if (s instanceof XYDataSeriesArray) { + // xy of some other kind + XYDataSeriesArray series = (XYDataSeriesArray) s; + clientAxes.add(makeSourceDescriptor(series.getX(), SourceType.X, xAxis)); + clientAxes.add(makeSourceDescriptor(series.getY(), SourceType.Y, yAxis)); + } else { + // warn about other unsupported series types + errorList.add("OpenAPI presently does not support series of type " + s.getClass()); + } + + // TODO color label size shape + } else if (s instanceof AbstractCategoryDataSeries) { + if (s instanceof CategoryDataSeriesTableMap) {// bar and pie from a table + CategoryDataSeriesTableMap series = (CategoryDataSeriesTableMap) s; + clientAxes + .add(makeSourceDescriptor(series.getTableHandle(), series.getCategoryCol(), + catAxis == xAxis ? SourceType.X : SourceType.Y, catAxis)); + clientAxes.add(makeSourceDescriptor(series.getTableHandle(), series.getValueCol(), + numAxis == xAxis ? SourceType.X : SourceType.Y, numAxis)); + } else if (s instanceof CategoryDataSeriesSwappableTableMap) { + CategoryDataSeriesSwappableTableMap series = + (CategoryDataSeriesSwappableTableMap) s; + + clientAxes.add( + makeSourceDescriptor(series.getSwappableTable(), series.getCategoryCol(), + catAxis == xAxis ? SourceType.X : SourceType.Y, catAxis)); + clientAxes.add( + makeSourceDescriptor(series.getSwappableTable(), series.getNumericCol(), + numAxis == xAxis ? SourceType.X : SourceType.Y, numAxis)); + + } else if (s instanceof CategoryDataSeriesMap) {// bar and plot from constant data + errorList.add("OpenAPI presently does not support series of type " + s.getClass()); + } + // TODO color label size shape } - // TODO color label size shape - } else if (s instanceof AbstractCategoryDataSeries) { - if (s instanceof CategoryDataSeriesTableMap) {// bar and pie from a - // table - CategoryDataSeriesTableMap series = (CategoryDataSeriesTableMap) s; - clientAxes.add(makeSourceDescriptor(series.getTableHandle(), - series.getCategoryCol(), - catAxis == xAxis ? SourceType.X : SourceType.Y, catAxis)); - clientAxes.add(makeSourceDescriptor(series.getTableHandle(), - series.getValueCol(), - numAxis == xAxis ? SourceType.X : SourceType.Y, numAxis)); - } else if (s instanceof CategoryDataSeriesSwappableTableMap) { - CategoryDataSeriesSwappableTableMap series = - (CategoryDataSeriesSwappableTableMap) s; - - clientAxes.add(makeSourceDescriptor(series.getSwappableTable(), - series.getCategoryCol(), - catAxis == xAxis ? SourceType.X : SourceType.Y, catAxis)); - clientAxes.add(makeSourceDescriptor(series.getSwappableTable(), - series.getNumericCol(), - numAxis == xAxis ? SourceType.X : SourceType.Y, numAxis)); - - } else if (s instanceof CategoryDataSeriesMap) {// bar and plot from - // constant data - errorList.add("OpenAPI presently does not support series of type " - + s.getClass()); + clientSeries.addAllDataSources(clientAxes.build().collect(Collectors.toList())); + clientSeriesCollection.add(clientSeries.build()); + } else if (seriesInternal instanceof AbstractMultiSeries) { + AbstractMultiSeries multiSeries = (AbstractMultiSeries) seriesInternal; + + MultiSeriesDescriptor.Builder clientSeries = MultiSeriesDescriptor.newBuilder(); + clientSeries.setPlotStyle(SeriesPlotStyle.valueOf(axesImpl.getPlotStyle().name())); + clientSeries.setName(String.valueOf(seriesInternal.name())); + + Stream.Builder clientAxes = Stream.builder(); + + + if (multiSeries instanceof AbstractTableMapHandleMultiSeries) { + AbstractTableMapHandleMultiSeries tableMapMultiSeries = + (AbstractTableMapHandleMultiSeries) multiSeries; + int plotHandleId = tableMapMultiSeries.getTableMapHandle().id(); + + if (tableMapMultiSeries instanceof MultiXYSeries) { + MultiXYSeries multiXYSeries = (MultiXYSeries) tableMapMultiSeries; + clientAxes.add(makeTableMapSourceDescriptor(plotHandleId, multiXYSeries.getXCol(), + SourceType.X, xAxis)); + clientAxes.add(makeTableMapSourceDescriptor(plotHandleId, multiXYSeries.getYCol(), + SourceType.Y, yAxis)); + clientSeries.setLineColor(stringMapWithDefault(mergeColors( + multiXYSeries.lineColorSeriesNameTointMap(), + multiXYSeries.lineColorSeriesNameToStringMap(), + multiXYSeries.lineColorSeriesNameToPaintMap()))); + clientSeries.setPointColor(stringMapWithDefault(mergeColors( + multiXYSeries.pointColorSeriesNameTointMap(), + multiXYSeries.pointColorSeriesNameToStringMap(), + multiXYSeries.pointColorSeriesNameToPaintMap()))); + clientSeries.setLinesVisible( + boolMapWithDefault(multiXYSeries.linesVisibleSeriesNameToBooleanMap())); + clientSeries.setPointsVisible( + boolMapWithDefault(multiXYSeries.pointsVisibleSeriesNameToBooleanMap())); + clientSeries.setGradientVisible( + boolMapWithDefault(multiXYSeries.gradientVisibleSeriesNameTobooleanMap())); + clientSeries.setPointLabelFormat(stringMapWithDefault( + multiXYSeries.pointLabelFormatSeriesNameToStringMap())); + clientSeries.setXToolTipPattern( + stringMapWithDefault(multiXYSeries.xToolTipPatternSeriesNameToStringMap())); + clientSeries.setYToolTipPattern( + stringMapWithDefault(multiXYSeries.yToolTipPatternSeriesNameToStringMap())); + clientSeries.setPointLabel(stringMapWithDefault( + multiXYSeries.pointColorSeriesNameToStringMap(), Objects::toString)); + clientSeries.setPointSize(doubleMapWithDefault( + multiXYSeries.pointSizeSeriesNameToNumberMap(), + number -> number == null ? null : number.doubleValue())); + + clientSeries.setPointShape(stringMapWithDefault(mergeShapes( + multiXYSeries.pointShapeSeriesNameToStringMap(), + multiXYSeries.pointShapeSeriesNameToShapeMap()))); + } else if (tableMapMultiSeries instanceof MultiCatSeries) { + MultiCatSeries multiCatSeries = (MultiCatSeries) tableMapMultiSeries; + clientAxes.add( + makeTableMapSourceDescriptor(plotHandleId, multiCatSeries.getCategoryCol(), + catAxis == xAxis ? SourceType.X : SourceType.Y, catAxis)); + clientAxes.add( + makeTableMapSourceDescriptor(plotHandleId, multiCatSeries.getNumericCol(), + numAxis == xAxis ? SourceType.X : SourceType.Y, numAxis)); + clientSeries.setLineColor(stringMapWithDefault(mergeColors( + multiCatSeries.lineColorSeriesNameTointMap(), + multiCatSeries.lineColorSeriesNameToStringMap(), + multiCatSeries.lineColorSeriesNameToPaintMap()))); + clientSeries.setPointColor(stringMapWithDefault(mergeColors( + multiCatSeries.pointColorSeriesNameTointMap(), + multiCatSeries.pointColorSeriesNameToStringMap(), + multiCatSeries.pointColorSeriesNameToPaintMap()))); + clientSeries.setLinesVisible( + boolMapWithDefault(multiCatSeries.linesVisibleSeriesNameToBooleanMap())); + clientSeries.setPointsVisible( + boolMapWithDefault(multiCatSeries.pointsVisibleSeriesNameToBooleanMap())); + clientSeries.setGradientVisible( + boolMapWithDefault(multiCatSeries.gradientVisibleSeriesNameTobooleanMap())); + clientSeries.setPointLabelFormat(stringMapWithDefault( + multiCatSeries.pointLabelFormatSeriesNameToStringMap())); + clientSeries.setXToolTipPattern(stringMapWithDefault( + multiCatSeries.xToolTipPatternSeriesNameToStringMap())); + clientSeries.setYToolTipPattern(stringMapWithDefault( + multiCatSeries.yToolTipPatternSeriesNameToStringMap())); + clientSeries.setPointLabel(stringMapWithDefault( + multiCatSeries.pointLabelSeriesNameToObjectMap(), Objects::toString)); + clientSeries.setPointSize(doubleMapWithDefault( + multiCatSeries.pointSizeSeriesNameToNumberMap(), + number -> number == null ? null : number.doubleValue())); + + clientSeries.setPointShape(stringMapWithDefault(mergeShapes( + multiCatSeries.pointShapeSeriesNameToStringMap(), + multiCatSeries.pointShapeSeriesNameToShapeMap()))); + } + } else { + errorList.add( + "OpenAPI presently does not support series of type " + multiSeries.getClass()); } - // TODO color label size shape - } - clientSeries - .addAllDataSources(clientAxes.build().collect(Collectors.toList())); - clientSeriesCollection.add(clientSeries.build()); - } else if (seriesInternal instanceof AbstractMultiSeries) { - AbstractMultiSeries multiSeries = (AbstractMultiSeries) seriesInternal; - - MultiSeriesDescriptor.Builder clientSeries = - MultiSeriesDescriptor.newBuilder(); - clientSeries - .setPlotStyle(SeriesPlotStyle.valueOf(axesImpl.getPlotStyle().name())); - clientSeries.setName(String.valueOf(seriesInternal.name())); - - Stream.Builder clientAxes = Stream.builder(); - - - if (multiSeries instanceof AbstractTableMapHandleMultiSeries) { - AbstractTableMapHandleMultiSeries tableMapMultiSeries = - (AbstractTableMapHandleMultiSeries) multiSeries; - int plotHandleId = tableMapMultiSeries.getTableMapHandle().id(); - - if (tableMapMultiSeries instanceof MultiXYSeries) { - MultiXYSeries multiXYSeries = (MultiXYSeries) tableMapMultiSeries; - clientAxes.add(makeTableMapSourceDescriptor(plotHandleId, - multiXYSeries.getXCol(), SourceType.X, xAxis)); - clientAxes.add(makeTableMapSourceDescriptor(plotHandleId, - multiXYSeries.getYCol(), SourceType.Y, yAxis)); - clientSeries.setLineColor(stringMapWithDefault(mergeColors( - multiXYSeries.lineColorSeriesNameTointMap(), - multiXYSeries.lineColorSeriesNameToStringMap(), - multiXYSeries.lineColorSeriesNameToPaintMap()))); - clientSeries.setPointColor(stringMapWithDefault(mergeColors( - multiXYSeries.pointColorSeriesNameTointMap(), - multiXYSeries.pointColorSeriesNameToStringMap(), - multiXYSeries.pointColorSeriesNameToPaintMap()))); - clientSeries.setLinesVisible(boolMapWithDefault( - multiXYSeries.linesVisibleSeriesNameToBooleanMap())); - clientSeries.setPointsVisible(boolMapWithDefault( - multiXYSeries.pointsVisibleSeriesNameToBooleanMap())); - clientSeries.setGradientVisible(boolMapWithDefault( - multiXYSeries.gradientVisibleSeriesNameTobooleanMap())); - clientSeries.setPointLabelFormat(stringMapWithDefault( - multiXYSeries.pointLabelFormatSeriesNameToStringMap())); - clientSeries.setXToolTipPattern(stringMapWithDefault( - multiXYSeries.xToolTipPatternSeriesNameToStringMap())); - clientSeries.setYToolTipPattern(stringMapWithDefault( - multiXYSeries.yToolTipPatternSeriesNameToStringMap())); - clientSeries.setPointLabel(stringMapWithDefault( - multiXYSeries.pointColorSeriesNameToStringMap(), - Objects::toString)); - clientSeries.setPointSize(doubleMapWithDefault( - multiXYSeries.pointSizeSeriesNameToNumberMap(), - number -> number == null ? null : number.doubleValue())); - - clientSeries.setPointShape(stringMapWithDefault(mergeShapes( - multiXYSeries.pointShapeSeriesNameToStringMap(), - multiXYSeries.pointShapeSeriesNameToShapeMap()))); - } else if (tableMapMultiSeries instanceof MultiCatSeries) { - MultiCatSeries multiCatSeries = - (MultiCatSeries) tableMapMultiSeries; - clientAxes.add(makeTableMapSourceDescriptor(plotHandleId, - multiCatSeries.getCategoryCol(), - catAxis == xAxis ? SourceType.X : SourceType.Y, catAxis)); - clientAxes.add(makeTableMapSourceDescriptor(plotHandleId, - multiCatSeries.getNumericCol(), - numAxis == xAxis ? SourceType.X : SourceType.Y, numAxis)); - clientSeries.setLineColor(stringMapWithDefault(mergeColors( - multiCatSeries.lineColorSeriesNameTointMap(), - multiCatSeries.lineColorSeriesNameToStringMap(), - multiCatSeries.lineColorSeriesNameToPaintMap()))); - clientSeries.setPointColor(stringMapWithDefault(mergeColors( - multiCatSeries.pointColorSeriesNameTointMap(), - multiCatSeries.pointColorSeriesNameToStringMap(), - multiCatSeries.pointColorSeriesNameToPaintMap()))); - clientSeries.setLinesVisible(boolMapWithDefault( - multiCatSeries.linesVisibleSeriesNameToBooleanMap())); - clientSeries.setPointsVisible(boolMapWithDefault( - multiCatSeries.pointsVisibleSeriesNameToBooleanMap())); - clientSeries.setGradientVisible(boolMapWithDefault( - multiCatSeries.gradientVisibleSeriesNameTobooleanMap())); - clientSeries.setPointLabelFormat(stringMapWithDefault( - multiCatSeries.pointLabelFormatSeriesNameToStringMap())); - clientSeries.setXToolTipPattern(stringMapWithDefault( - multiCatSeries.xToolTipPatternSeriesNameToStringMap())); - clientSeries.setYToolTipPattern(stringMapWithDefault( - multiCatSeries.yToolTipPatternSeriesNameToStringMap())); - clientSeries.setPointLabel(stringMapWithDefault( - multiCatSeries.pointLabelSeriesNameToObjectMap(), - Objects::toString)); - clientSeries.setPointSize(doubleMapWithDefault( - multiCatSeries.pointSizeSeriesNameToNumberMap(), - number -> number == null ? null : number.doubleValue())); - - clientSeries.setPointShape(stringMapWithDefault(mergeShapes( - multiCatSeries.pointShapeSeriesNameToStringMap(), - multiCatSeries.pointShapeSeriesNameToShapeMap()))); - } + clientSeries.addAllDataSources(clientAxes.build().collect(Collectors.toList())); + + clientMultiSeriesCollection.add(clientSeries.build()); } else { - errorList.add("OpenAPI presently does not support series of type " - + multiSeries.getClass()); + errorList.add( + "OpenAPI presently does not support series of type " + seriesInternal.getClass()); + // TODO handle multi-series, possibly transformed case? } - - clientSeries - .addAllDataSources(clientAxes.build().collect(Collectors.toList())); - - clientMultiSeriesCollection.add(clientSeries.build()); - } else { - errorList.add("OpenAPI presently does not support series of type " - + seriesInternal.getClass()); - // TODO handle multi-series, possibly transformed case? - } - }); + }); }); clientChart.addAllSeries(clientSeriesCollection.build().collect(Collectors.toList())); - clientChart - .addAllMultiSeries(clientMultiSeriesCollection.build().collect(Collectors.toList())); + clientChart.addAllMultiSeries(clientMultiSeriesCollection.build().collect(Collectors.toList())); - clientChart.setChartType( - FigureDescriptor.ChartDescriptor.ChartType.valueOf(chart.getChartType().name())); + clientChart.setChartType(FigureDescriptor.ChartDescriptor.ChartType.valueOf(chart.getChartType().name())); clientChart.setColspan(chart.colSpan()); assignOptionalField(toCssColorString(chart.getLegendColor()), clientChart::setLegendColor, - clientChart::clearLegendColor); - assignOptionalField(toCssFont(chart.getLegendFont()), clientChart::setLegendFont, - clientChart::clearLegendFont); + clientChart::clearLegendColor); + assignOptionalField(toCssFont(chart.getLegendFont()), clientChart::setLegendFont, clientChart::clearLegendFont); clientChart.setRowspan(chart.rowSpan()); clientChart.setShowLegend(chart.isShowLegend()); assignOptionalField(chart.getTitle(), clientChart::setTitle, clientChart::clearTitle); assignOptionalField(toCssColorString(chart.getTitleColor()), clientChart::setTitleColor, - clientChart::clearTitleColor); - assignOptionalField(toCssFont(chart.getTitleFont()), clientChart::setTitleFont, - clientChart::clearTitleFont); + clientChart::clearTitleColor); + assignOptionalField(toCssFont(chart.getTitleFont()), clientChart::setTitleFont, clientChart::clearTitleFont); return clientChart.build(); } @NotNull - private BusinessCalendarDescriptor translateBusinessCalendar( - AxisTransformBusinessCalendar axisTransform) { + private BusinessCalendarDescriptor translateBusinessCalendar(AxisTransformBusinessCalendar axisTransform) { final BusinessCalendar businessCalendar = axisTransform.getBusinessCalendar(); - final BusinessCalendarDescriptor.Builder businessCalendarDescriptor = - BusinessCalendarDescriptor.newBuilder(); + final BusinessCalendarDescriptor.Builder businessCalendarDescriptor = BusinessCalendarDescriptor.newBuilder(); businessCalendarDescriptor.setName(businessCalendar.name()); businessCalendarDescriptor.setTimeZone(businessCalendar.timeZone().getTimeZone().getID()); Arrays.stream(BusinessCalendarDescriptor.DayOfWeek.values()).filter(dayOfWeek -> { @@ -524,55 +477,50 @@ private BusinessCalendarDescriptor translateBusinessCalendar( return businessPeriod; }).forEach(businessCalendarDescriptor::addBusinessPeriods); - businessCalendar.getHolidays().entrySet().stream() - .sorted(Comparator.comparing(Map.Entry::getKey)).map(entry -> { - final LocalDate.Builder localDate = LocalDate.newBuilder(); - localDate.setYear(entry.getKey().getYear()); - localDate.setMonth(entry.getKey().getMonthValue()); - localDate.setDay(entry.getKey().getDayOfMonth()); - final Holiday.Builder holiday = Holiday.newBuilder(); - Arrays.stream(entry.getValue().getBusinessPeriods()).map(bp -> { - final String open = - HOLIDAY_TIME_FORMAT.withZone(businessCalendar.timeZone().getTimeZone()) - .print(bp.getStartTime().getMillis()); - final String close = - HOLIDAY_TIME_FORMAT.withZone(businessCalendar.timeZone().getTimeZone()) - .print(bp.getEndTime().getMillis()); - final BusinessPeriod.Builder businessPeriod = BusinessPeriod.newBuilder(); - businessPeriod.setOpen(open); - businessPeriod.setClose(close); - return businessPeriod; - }).forEach(holiday::addBusinessPeriods); - holiday.setDate(localDate); - return holiday.build(); - }).forEach(businessCalendarDescriptor::addHolidays); + businessCalendar.getHolidays().entrySet().stream().sorted(Comparator.comparing(Map.Entry::getKey)) + .map(entry -> { + final LocalDate.Builder localDate = LocalDate.newBuilder(); + localDate.setYear(entry.getKey().getYear()); + localDate.setMonth(entry.getKey().getMonthValue()); + localDate.setDay(entry.getKey().getDayOfMonth()); + final Holiday.Builder holiday = Holiday.newBuilder(); + Arrays.stream(entry.getValue().getBusinessPeriods()).map(bp -> { + final String open = HOLIDAY_TIME_FORMAT.withZone(businessCalendar.timeZone().getTimeZone()) + .print(bp.getStartTime().getMillis()); + final String close = HOLIDAY_TIME_FORMAT.withZone(businessCalendar.timeZone().getTimeZone()) + .print(bp.getEndTime().getMillis()); + final BusinessPeriod.Builder businessPeriod = BusinessPeriod.newBuilder(); + businessPeriod.setOpen(open); + businessPeriod.setClose(close); + return businessPeriod; + }).forEach(holiday::addBusinessPeriods); + holiday.setDate(localDate); + return holiday.build(); + }).forEach(businessCalendarDescriptor::addHolidays); return businessCalendarDescriptor.build(); } private PlotUtils.HashMapWithDefault mergeShapes( - PlotUtils.HashMapWithDefault strings, - PlotUtils.HashMapWithDefault shapes) { - PlotUtils.HashMapWithDefault result = - Stream.of(strings.keySet(), shapes.keySet()) + PlotUtils.HashMapWithDefault strings, PlotUtils.HashMapWithDefault shapes) { + PlotUtils.HashMapWithDefault result = Stream.of(strings.keySet(), shapes.keySet()) .flatMap(Set::stream) .distinct() .collect(Collectors.toMap( - Comparable::toString, - key -> Objects.requireNonNull( - mergeShape( - strings.get(key), - shapes.get(key)), - "key " + key + " had nulls in both shape maps"), - (s, s2) -> { - if (!s.equals(s2)) { - throw new IllegalStateException( - "More than one value possible for a given key: " + s + " and " - + s2); - } - return s; - }, - PlotUtils.HashMapWithDefault::new)); + Comparable::toString, + key -> Objects.requireNonNull( + mergeShape( + strings.get(key), + shapes.get(key)), + "key " + key + " had nulls in both shape maps"), + (s, s2) -> { + if (!s.equals(s2)) { + throw new IllegalStateException( + "More than one value possible for a given key: " + s + " and " + s2); + } + return s; + }, + PlotUtils.HashMapWithDefault::new)); result.setDefault(mergeShape(strings.getDefault(), shapes.getDefault())); return result; } @@ -598,38 +546,36 @@ private String mergeShape(String string, Shape shape) { * Merges the three maps into one, using CSS color strings, including default values */ private PlotUtils.HashMapWithDefault mergeColors( - PlotUtils.HashMapWithDefault seriesNameTointMap, - PlotUtils.HashMapWithDefault seriesNameToStringMap, - PlotUtils.HashMapWithDefault seriesNameToPaintMap) { - PlotUtils.HashMapWithDefault result = Stream - .of(seriesNameTointMap.keySet(), seriesNameToStringMap.keySet(), - seriesNameToPaintMap.keySet()) - .flatMap(Set::stream) - .distinct() - .collect(Collectors.toMap( - Comparable::toString, - key -> Objects.requireNonNull( - mergeCssColor( - seriesNameTointMap.get(key), - seriesNameToStringMap.get(key), - seriesNameToPaintMap.get(key)), - "key " + key + " had nulls in all three color maps"), - (s, s2) -> { - if (!s.equals(s2)) { - throw new IllegalStateException( - "More than one value possible for a given key: " + s + " and " + s2); - } - return s; - }, - PlotUtils.HashMapWithDefault::new)); + PlotUtils.HashMapWithDefault seriesNameTointMap, + PlotUtils.HashMapWithDefault seriesNameToStringMap, + PlotUtils.HashMapWithDefault seriesNameToPaintMap) { + PlotUtils.HashMapWithDefault result = + Stream.of(seriesNameTointMap.keySet(), seriesNameToStringMap.keySet(), seriesNameToPaintMap.keySet()) + .flatMap(Set::stream) + .distinct() + .collect(Collectors.toMap( + Comparable::toString, + key -> Objects.requireNonNull( + mergeCssColor( + seriesNameTointMap.get(key), + seriesNameToStringMap.get(key), + seriesNameToPaintMap.get(key)), + "key " + key + " had nulls in all three color maps"), + (s, s2) -> { + if (!s.equals(s2)) { + throw new IllegalStateException( + "More than one value possible for a given key: " + s + " and " + s2); + } + return s; + }, + PlotUtils.HashMapWithDefault::new)); // if a "higher precedence" map has a default, it overrides the other defaults - result.setDefault(mergeCssColor(seriesNameTointMap.getDefault(), - seriesNameToStringMap.getDefault(), seriesNameToPaintMap.getDefault())); + result.setDefault(mergeCssColor(seriesNameTointMap.getDefault(), seriesNameToStringMap.getDefault(), + seriesNameToPaintMap.getDefault())); return result; } - private String mergeCssColor(Integer intColor, String strColor, - io.deephaven.gui.color.Paint paintColor) { + private String mergeCssColor(Integer intColor, String strColor, io.deephaven.gui.color.Paint paintColor) { if (paintColor != null) { String candidate = toCssColorString(paintColor); if (candidate != null) { @@ -637,8 +583,8 @@ private String mergeCssColor(Integer intColor, String strColor, } // otherwise failed to be translated, lets at least try the others } if (strColor != null) { - // lean on Color's translation. We know toCssColorString won't fail us here, since we're - // explicitly passing in a Color + // lean on Color's translation. We know toCssColorString won't fail us here, since we're explicitly passing + // in a Color return toCssColorString(new io.deephaven.gui.color.Color(strColor)); } if (intColor != null) { @@ -648,53 +594,45 @@ private String mergeCssColor(Integer intColor, String strColor, } private StringMapWithDefault stringMapWithDefault( - PlotUtils.HashMapWithDefault, String> map) { + PlotUtils.HashMapWithDefault, String> map) { return stringMapWithDefault(map, Function.identity()); } - private StringMapWithDefault stringMapWithDefault( - PlotUtils.HashMapWithDefault, T> map, - Function mappingFunc) { + private StringMapWithDefault stringMapWithDefault(PlotUtils.HashMapWithDefault, T> map, + Function mappingFunc) { StringMapWithDefault.Builder result = StringMapWithDefault.newBuilder(); result.setDefaultString(mappingFunc.apply(map.getDefault())); LinkedHashMap, T> ordered = new LinkedHashMap<>(map); - result.addAllKeys( - ordered.keySet().stream().map(Comparable::toString).collect(Collectors.toList())); - result - .addAllValues(ordered.values().stream().map(mappingFunc).collect(Collectors.toList())); + result.addAllKeys(ordered.keySet().stream().map(Comparable::toString).collect(Collectors.toList())); + result.addAllValues(ordered.values().stream().map(mappingFunc).collect(Collectors.toList())); return result.build(); } private DoubleMapWithDefault doubleMapWithDefault( - PlotUtils.HashMapWithDefault, Double> map) { + PlotUtils.HashMapWithDefault, Double> map) { return doubleMapWithDefault(map, Function.identity()); } - private DoubleMapWithDefault doubleMapWithDefault( - PlotUtils.HashMapWithDefault, T> map, - Function mappingFunc) { + private DoubleMapWithDefault doubleMapWithDefault(PlotUtils.HashMapWithDefault, T> map, + Function mappingFunc) { DoubleMapWithDefault.Builder result = DoubleMapWithDefault.newBuilder(); result.setDefaultDouble(mappingFunc.apply(map.getDefault())); LinkedHashMap, T> ordered = new LinkedHashMap<>(map); - result.addAllKeys( - ordered.keySet().stream().map(Comparable::toString).collect(Collectors.toList())); - result - .addAllValues(ordered.values().stream().map(mappingFunc).collect(Collectors.toList())); + result.addAllKeys(ordered.keySet().stream().map(Comparable::toString).collect(Collectors.toList())); + result.addAllValues(ordered.values().stream().map(mappingFunc).collect(Collectors.toList())); return result.build(); } - private BoolMapWithDefault boolMapWithDefault( - PlotUtils.HashMapWithDefault, Boolean> map) { + private BoolMapWithDefault boolMapWithDefault(PlotUtils.HashMapWithDefault, Boolean> map) { BoolMapWithDefault.Builder result = BoolMapWithDefault.newBuilder(); LinkedHashMap, Boolean> ordered = new LinkedHashMap<>(map); - result.addAllKeys( - ordered.keySet().stream().map(Comparable::toString).collect(Collectors.toList())); + result.addAllKeys(ordered.keySet().stream().map(Comparable::toString).collect(Collectors.toList())); result.addAllValues(new ArrayList<>(ordered.values())); return result.build(); } - private MultiSeriesSourceDescriptor makeTableMapSourceDescriptor(int plotHandleId, - String columnName, SourceType sourceType, AxisDescriptor axis) { + private MultiSeriesSourceDescriptor makeTableMapSourceDescriptor(int plotHandleId, String columnName, + SourceType sourceType, AxisDescriptor axis) { MultiSeriesSourceDescriptor.Builder source = MultiSeriesSourceDescriptor.newBuilder(); source.setAxisId(axis.getId()); source.setType(sourceType); @@ -703,8 +641,8 @@ private MultiSeriesSourceDescriptor makeTableMapSourceDescriptor(int plotHandleI return source.build(); } - private SourceDescriptor makeSourceDescriptor(TableHandle tableHandle, String columnName, - SourceType sourceType, AxisDescriptor axis) { + private SourceDescriptor makeSourceDescriptor(TableHandle tableHandle, String columnName, SourceType sourceType, + AxisDescriptor axis) { SourceDescriptor.Builder source = SourceDescriptor.newBuilder(); source.setColumnName(columnName); @@ -716,60 +654,56 @@ private SourceDescriptor makeSourceDescriptor(TableHandle tableHandle, String co } private SourceDescriptor makeSourceDescriptor(SwappableTable swappableTable, String columnName, - SourceType sourceType, AxisDescriptor axis) { + SourceType sourceType, AxisDescriptor axis) { SourceDescriptor.Builder source = SourceDescriptor.newBuilder(); source.setAxisId(axis.getId()); source.setType(sourceType); if (swappableTable instanceof SwappableTableOneClickAbstract) { - SwappableTableOneClickAbstract oneClick = - (SwappableTableOneClickAbstract) swappableTable; + SwappableTableOneClickAbstract oneClick = (SwappableTableOneClickAbstract) swappableTable; source.setColumnName(columnName); - source.setColumnType(swappableTable.getTableDefinition().getColumn(columnName) - .getDataType().getCanonicalName()); + source.setColumnType( + swappableTable.getTableDefinition().getColumn(columnName).getDataType().getCanonicalName()); source.setTableMapId(oneClick.getTableMapHandle().id()); source.setOneClick(makeOneClick(oneClick)); } else { - errorList.add("OpenAPI does not presently support swappable table of type " - + swappableTable.getClass()); + errorList.add("OpenAPI does not presently support swappable table of type " + swappableTable.getClass()); } return source.build(); } private SourceDescriptor makeSourceDescriptor(IndexableNumericData data, SourceType sourceType, - AxisDescriptor axis) { + AxisDescriptor axis) { SourceDescriptor.Builder source = SourceDescriptor.newBuilder(); source.setAxisId(axis.getId()); source.setType(sourceType); if (data instanceof IndexableNumericDataTable) { - ColumnHandlerFactory.ColumnHandler columnHandler = - ((IndexableNumericDataTable) data).getColumnHandler(); + ColumnHandlerFactory.ColumnHandler columnHandler = ((IndexableNumericDataTable) data).getColumnHandler(); source.setColumnName(columnHandler.getColumnName()); source.setTableId(tablePositionMap.get(columnHandler.getTableHandle())); } else if (data instanceof IndexableNumericDataSwappableTable) { - IndexableNumericDataSwappableTable swappableTable = - (IndexableNumericDataSwappableTable) data; + IndexableNumericDataSwappableTable swappableTable = (IndexableNumericDataSwappableTable) data; if (swappableTable.getSwappableTable() instanceof SwappableTableOneClickAbstract) { SwappableTableOneClickAbstract oneClick = - (SwappableTableOneClickAbstract) swappableTable.getSwappableTable(); + (SwappableTableOneClickAbstract) swappableTable.getSwappableTable(); if (oneClick instanceof SwappableTableOneClickMap - && ((SwappableTableOneClickMap) oneClick).getTransform() != null) { + && ((SwappableTableOneClickMap) oneClick).getTransform() != null) { errorList.add( - "OpenAPI does not presently support swappable tables that also use transform functions"); + "OpenAPI does not presently support swappable tables that also use transform functions"); return source.build(); } source.setColumnName(swappableTable.getColumn()); source.setColumnType(swappableTable.getSwappableTable().getTableDefinition() - .getColumn(swappableTable.getColumn()).getDataType().getCanonicalName()); + .getColumn(swappableTable.getColumn()).getDataType().getCanonicalName()); source.setTableMapId(oneClick.getTableMapHandle().id()); source.setOneClick(makeOneClick(oneClick)); } else { errorList.add("OpenAPI does not presently support swappable table of type " - + swappableTable.getSwappableTable().getClass()); + + swappableTable.getSwappableTable().getClass()); } } else { @@ -783,10 +717,10 @@ private OneClickDescriptor makeOneClick(SwappableTableOneClickAbstract swappable OneClickDescriptor.Builder oneClick = OneClickDescriptor.newBuilder(); oneClick.addAllColumns(swappableTable.getByColumns()); oneClick.addAllColumnTypes(swappableTable.getByColumns() - .stream() - .map(colName -> swappableTable.getTableMapHandle().getTableDefinition() - .getColumn(colName).getDataType().getCanonicalName()) - .collect(Collectors.toList())); + .stream() + .map(colName -> swappableTable.getTableMapHandle().getTableDefinition().getColumn(colName).getDataType() + .getCanonicalName()) + .collect(Collectors.toList())); oneClick.setRequireAllFiltersToDisplay(swappableTable.isRequireAllFiltersToDisplay()); return oneClick.build(); } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/ArrowFlightUtil.java b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/ArrowFlightUtil.java index 8e318b19fdd..68ce68509f9 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/ArrowFlightUtil.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/ArrowFlightUtil.java @@ -61,33 +61,26 @@ public class ArrowFlightUtil { public static final ChunkInputStreamGenerator.Options DEFAULT_SER_OPTIONS = - new ChunkInputStreamGenerator.Options.Builder().build(); + new ChunkInputStreamGenerator.Options.Builder().build(); public static final int DEFAULT_UPDATE_INTERVAL_MS = - Configuration.getInstance().getIntegerWithDefault("barrage.updateInterval", 1000); + Configuration.getInstance().getIntegerWithDefault("barrage.updateInterval", 1000); private static final int TAG_TYPE_BITS = 3; - private static final BarrageMessage.ModColumnData[] ZERO_MOD_COLUMNS = - new BarrageMessage.ModColumnData[0]; + private static final BarrageMessage.ModColumnData[] ZERO_MOD_COLUMNS = new BarrageMessage.ModColumnData[0]; - public static final int BODY_TAG = (Flight.FlightData.DATA_BODY_FIELD_NUMBER << TAG_TYPE_BITS) - | WireFormat.WIRETYPE_LENGTH_DELIMITED; + public static final int BODY_TAG = + (Flight.FlightData.DATA_BODY_FIELD_NUMBER << TAG_TYPE_BITS) | WireFormat.WIRETYPE_LENGTH_DELIMITED; public static final int DATA_HEADER_TAG = - (Flight.FlightData.DATA_HEADER_FIELD_NUMBER << TAG_TYPE_BITS) - | WireFormat.WIRETYPE_LENGTH_DELIMITED; + (Flight.FlightData.DATA_HEADER_FIELD_NUMBER << TAG_TYPE_BITS) | WireFormat.WIRETYPE_LENGTH_DELIMITED; public static final int APP_METADATA_TAG = - (Flight.FlightData.APP_METADATA_FIELD_NUMBER << TAG_TYPE_BITS) - | WireFormat.WIRETYPE_LENGTH_DELIMITED; + (Flight.FlightData.APP_METADATA_FIELD_NUMBER << TAG_TYPE_BITS) | WireFormat.WIRETYPE_LENGTH_DELIMITED; public static final int FLIGHT_DESCRIPTOR_TAG = - (Flight.FlightData.FLIGHT_DESCRIPTOR_FIELD_NUMBER << TAG_TYPE_BITS) - | WireFormat.WIRETYPE_LENGTH_DELIMITED; + (Flight.FlightData.FLIGHT_DESCRIPTOR_FIELD_NUMBER << TAG_TYPE_BITS) | WireFormat.WIRETYPE_LENGTH_DELIMITED; private static final Logger log = LoggerFactory.getLogger(ArrowFlightUtil.class); public static final class MessageInfo extends BrowserStream.MessageBase { - /** - * outer-most Arrow Flight Message that indicates the msg type (i.e. schema, record batch, - * etc) - */ + /** outer-most Arrow Flight Message that indicates the msg type (i.e. schema, record batch, etc) */ Message header = null; /** the embedded flatbuffer metadata indicating information about this batch */ BarrageMessageWrapper app_metadata = null; @@ -103,28 +96,24 @@ public static MessageInfo parseProtoMessage(final InputStream stream) throws IOE final CodedInputStream decoder = CodedInputStream.newInstance(stream); - // if we find a body tag we stop iterating through the loop as there should be no more tags - // after the body - // and we lazily drain the payload from the decoder (so the next bytes are payload and not a - // tag) + // if we find a body tag we stop iterating through the loop as there should be no more tags after the body + // and we lazily drain the payload from the decoder (so the next bytes are payload and not a tag) decodeLoop: for (int tag = decoder.readTag(); tag != 0; tag = decoder.readTag()) { final int size; switch (tag) { case DATA_HEADER_TAG: size = decoder.readRawVarint32(); - mi.header = - Message.getRootAsMessage(ByteBuffer.wrap(decoder.readRawBytes(size))); + mi.header = Message.getRootAsMessage(ByteBuffer.wrap(decoder.readRawBytes(size))); break; case APP_METADATA_TAG: size = decoder.readRawVarint32(); - mi.app_metadata = BarrageMessageWrapper.getRootAsBarrageMessageWrapper( - ByteBuffer.wrap(decoder.readRawBytes(size))); + mi.app_metadata = BarrageMessageWrapper + .getRootAsBarrageMessageWrapper(ByteBuffer.wrap(decoder.readRawBytes(size))); if (mi.app_metadata.magic() != BarrageStreamGenerator.FLATBUFFER_MAGIC) { log.error().append("received invalid magic").endl(); mi.app_metadata = null; } else { - mi.setSequenceInfo(mi.app_metadata.sequence(), - mi.app_metadata.halfCloseAfterMessage()); + mi.setSequenceInfo(mi.app_metadata.sequence(), mi.app_metadata.halfCloseAfterMessage()); } break; case FLIGHT_DESCRIPTOR_TAG: @@ -133,14 +122,14 @@ public static MessageInfo parseProtoMessage(final InputStream stream) throws IOE mi.descriptor = Flight.FlightDescriptor.parseFrom(bytes); break; case BODY_TAG: - // at this point, we're in the body, we will read it and then break, the rest of - // the payload should be the body + // at this point, we're in the body, we will read it and then break, the rest of the payload should + // be the body size = decoder.readRawVarint32(); // noinspection UnstableApiUsage mi.inputStream = new LittleEndianDataInputStream( - new BarrageProtoUtil.ObjectInputStreamAdapter(decoder, size)); - // we do not actually remove the content from our stream; prevent reading the - // next tag via a labeled break + new BarrageProtoUtil.ObjectInputStreamAdapter(decoder, size)); + // we do not actually remove the content from our stream; prevent reading the next tag via a labeled + // break break decodeLoop; default: @@ -149,11 +138,10 @@ public static MessageInfo parseProtoMessage(final InputStream stream) throws IOE } } - if (mi.header != null && mi.header.headerType() == MessageHeader.RecordBatch - && mi.inputStream == null) { + if (mi.header != null && mi.header.headerType() == MessageHeader.RecordBatch && mi.inputStream == null) { // noinspection UnstableApiUsage - mi.inputStream = new LittleEndianDataInputStream( - new ByteArrayInputStream(CollectionUtil.ZERO_LENGTH_BYTE_ARRAY)); + mi.inputStream = + new LittleEndianDataInputStream(new ByteArrayInputStream(CollectionUtil.ZERO_LENGTH_BYTE_ARRAY)); } return mi; @@ -163,7 +151,7 @@ public static MessageInfo parseProtoMessage(final InputStream stream) throws IOE * This is a stateful observer; a DoPut stream begins with its schema. */ public static class DoPutObserver extends SingletonLivenessManager - implements BrowserStream.Marshaller, StreamObserver, Closeable { + implements BrowserStream.Marshaller, StreamObserver, Closeable { private final SessionState session; private final TicketRouter ticketRouter; @@ -179,17 +167,16 @@ public static class DoPutObserver extends SingletonLivenessManager private ChunkInputStreamGenerator.Options options = DEFAULT_SER_OPTIONS; public DoPutObserver( - final SessionState session, - final TicketRouter ticketRouter, - final StreamObserver observer) { + final SessionState session, + final TicketRouter ticketRouter, + final StreamObserver observer) { this.session = session; this.ticketRouter = ticketRouter; this.observer = observer; this.session.addOnCloseCallback(this); if (observer instanceof ServerCallStreamObserver) { - ((ServerCallStreamObserver) observer) - .setOnCancelHandler(this::onCancel); + ((ServerCallStreamObserver) observer).setOnCancelHandler(this::onCancel); } } @@ -206,19 +193,18 @@ public void onMessageReceived(final MessageInfo mi) { if (mi.descriptor != null) { if (resultExportBuilder != null) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Only one descriptor definition allowed"); + "Only one descriptor definition allowed"); } resultExportBuilder = ticketRouter - .
    publish(session, mi.descriptor) - .onError(observer::onError); + .
    publish(session, mi.descriptor) + .onError(observer::onError); manage(resultExportBuilder.getExport()); } if (mi.app_metadata != null - && mi.app_metadata.msgType() == BarrageMessageType.BarrageSerializationOptions) { - options = ChunkInputStreamGenerator.Options - .of(BarrageSerializationOptions.getRootAsBarrageSerializationOptions( - mi.app_metadata.msgPayloadAsByteBuffer())); + && mi.app_metadata.msgType() == BarrageMessageType.BarrageSerializationOptions) { + options = ChunkInputStreamGenerator.Options.of(BarrageSerializationOptions + .getRootAsBarrageSerializationOptions(mi.app_metadata.msgPayloadAsByteBuffer())); } if (mi.header == null) { @@ -232,7 +218,7 @@ public void onMessageReceived(final MessageInfo mi) { if (mi.header.headerType() != MessageHeader.RecordBatch) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Only schema/record-batch messages supported"); + "Only schema/record-batch messages supported"); } final int numColumns = resultTable.getColumnSources().size(); @@ -240,19 +226,16 @@ public void onMessageReceived(final MessageInfo mi) { final RecordBatch batch = (RecordBatch) mi.header.header(new RecordBatch()); final Iterator fieldNodeIter = - new FlatBufferIteratorAdapter<>(batch.nodesLength(), - i -> new ChunkInputStreamGenerator.FieldNodeInfo(batch.nodes(i))); + new FlatBufferIteratorAdapter<>(batch.nodesLength(), + i -> new ChunkInputStreamGenerator.FieldNodeInfo(batch.nodes(i))); final TLongArrayList bufferInfo = new TLongArrayList(batch.buffersLength()); for (int bi = 0; bi < batch.buffersLength(); ++bi) { - int offset = - LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).offset()); - int length = - LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).length()); + int offset = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).offset()); + int length = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).length()); if (bi < batch.buffersLength() - 1) { - final int nextOffset = LongSizedDataStructure.intSize("BufferInfo", - batch.buffers(bi + 1).offset()); + final int nextOffset = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi + 1).offset()); // our parsers handle overhanging buffers length += Math.max(0, nextOffset - offset - length); } @@ -264,32 +247,28 @@ public void onMessageReceived(final MessageInfo mi) { msg.shifted = IndexShiftData.EMPTY; // include all columns as add-columns - int numRowsAdded = - LongSizedDataStructure.intSize("RecordBatch.length()", batch.length()); + int numRowsAdded = LongSizedDataStructure.intSize("RecordBatch.length()", batch.length()); msg.addColumnData = new BarrageMessage.AddColumnData[numColumns]; for (int ci = 0; ci < numColumns; ++ci) { final BarrageMessage.AddColumnData acd = new BarrageMessage.AddColumnData(); msg.addColumnData[ci] = acd; try { - acd.data = ChunkInputStreamGenerator.extractChunkFromInputStream(options, - columnChunkTypes[ci], columnTypes[ci], fieldNodeIter, bufferInfoIter, - mi.inputStream); + acd.data = ChunkInputStreamGenerator.extractChunkFromInputStream(options, columnChunkTypes[ci], + columnTypes[ci], fieldNodeIter, bufferInfoIter, mi.inputStream); } catch (final IOException unexpected) { throw new UncheckedDeephavenException(unexpected); } if (acd.data.size() != numRowsAdded) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Inconsistent num records per column: " + numRowsAdded + " != " - + acd.data.size()); + "Inconsistent num records per column: " + numRowsAdded + " != " + acd.data.size()); } acd.type = columnTypes[ci]; acd.componentType = componentTypes[ci]; } - msg.rowsAdded = Index.FACTORY.getIndexByRange(resultTable.size(), - resultTable.size() + numRowsAdded - 1); + msg.rowsAdded = Index.FACTORY.getIndexByRange(resultTable.size(), resultTable.size() + numRowsAdded - 1); msg.rowsIncluded = msg.rowsAdded.clone(); msg.modColumnData = ZERO_MOD_COLUMNS; @@ -330,11 +309,10 @@ public void onCompleted() { GrpcUtil.rpcWrapper(log, observer, () -> { if (resultExportBuilder == null) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Result flight descriptor never provided"); + "Result flight descriptor never provided"); } if (resultTable == null) { - throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Result flight schema never provided"); + throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "Result flight schema never provided"); } // no more changes allowed; this is officially static content @@ -344,8 +322,7 @@ public void onCompleted() { resultTable.dropReference(); GrpcUtil.safelyExecute(observer::onCompleted); return resultTable; - }), () -> GrpcUtil.safelyError(observer, Code.DATA_LOSS, - "Do put could not be sealed")); + }), () -> GrpcUtil.safelyError(observer, Code.DATA_LOSS, "Do put could not be sealed")); }); } @@ -358,11 +335,9 @@ public void close() { private void parseSchema(final Schema header) { if (resultTable != null) { - throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Schema evolution not supported"); + throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "Schema evolution not supported"); } - resultTable = - BarrageTable.make(BarrageSchemaUtil.schemaToTableDefinition(header), false); + resultTable = BarrageTable.make(BarrageSchemaUtil.schemaToTableDefinition(header), false); columnChunkTypes = resultTable.getWireChunkTypes(); columnTypes = resultTable.getWireTypes(); componentTypes = resultTable.getWireComponentTypes(); @@ -373,18 +348,17 @@ private void parseSchema(final Schema header) { } /** - * Helper class that maintains a subscription whether it was created by a bi-directional stream - * request or the no-client-streaming request. If the SubscriptionRequest sets the sequence, - * then it treats sequence as a watermark and will not send out-of-order requests (due to - * out-of-band requests). The client should already anticipate subscription changes may be - * coalesced by the BarrageMessageProducer. + * Helper class that maintains a subscription whether it was created by a bi-directional stream request or the + * no-client-streaming request. If the SubscriptionRequest sets the sequence, then it treats sequence as a watermark + * and will not send out-of-order requests (due to out-of-band requests). The client should already anticipate + * subscription changes may be coalesced by the BarrageMessageProducer. */ public static class DoExchangeMarshaller extends SingletonLivenessManager - implements BrowserStream.Marshaller, StreamObserver, Closeable { + implements BrowserStream.Marshaller, StreamObserver, Closeable { @AssistedFactory public interface Factory { DoExchangeMarshaller openExchange(SessionState session, - StreamObserver observer); + StreamObserver observer); } private final String myPrefix; @@ -405,15 +379,13 @@ DoExchangeMarshaller openExchange(SessionState session, @AssistedInject public DoExchangeMarshaller( - final TicketRouter ticketRouter, - final BarrageMessageProducer.Operation.Factory operationFactory, - final BarrageMessageProducer.Adapter, StreamObserver> listenerAdapter, - final BarrageMessageProducer.Adapter optionsAdapter, - @Assisted final SessionState session, - @Assisted final StreamObserver responseObserver) { - - this.myPrefix = "DoExchangeMarshaller{" - + Integer.toHexString(System.identityHashCode(this)) + "}: "; + final TicketRouter ticketRouter, + final BarrageMessageProducer.Operation.Factory operationFactory, + final BarrageMessageProducer.Adapter, StreamObserver> listenerAdapter, + final BarrageMessageProducer.Adapter optionsAdapter, + @Assisted final SessionState session, @Assisted final StreamObserver responseObserver) { + + this.myPrefix = "DoExchangeMarshaller{" + Integer.toHexString(System.identityHashCode(this)) + "}: "; this.ticketRouter = ticketRouter; this.operationFactory = operationFactory; this.optionsAdapter = optionsAdapter; @@ -422,8 +394,7 @@ public DoExchangeMarshaller( this.session.addOnCloseCallback(this); if (responseObserver instanceof ServerCallStreamObserver) { - ((ServerCallStreamObserver) responseObserver) - .setOnCancelHandler(this::tryClose); + ((ServerCallStreamObserver) responseObserver).setOnCancelHandler(this::tryClose); } } @@ -442,15 +413,13 @@ public void onNext(final InputStream request) { // this is the entry point for browser streams; and client-streams delegate to this public synchronized void onMessageReceived(final MessageInfo message) { if (message.app_metadata.magic() != BarrageStreamGenerator.FLATBUFFER_MAGIC - || message.app_metadata - .msgType() != BarrageMessageType.BarrageSubscriptionRequest) { - log.warn().append(myPrefix).append("received a message without app_metadata") - .endl(); + || message.app_metadata.msgType() != BarrageMessageType.BarrageSubscriptionRequest) { + log.warn().append(myPrefix).append("received a message without app_metadata").endl(); return; } final BarrageSubscriptionRequest subscriptionRequest = - BarrageSubscriptionRequest.getRootAsBarrageSubscriptionRequest( - message.app_metadata.msgPayloadAsByteBuffer()); + BarrageSubscriptionRequest + .getRootAsBarrageSubscriptionRequest(message.app_metadata.msgPayloadAsByteBuffer()); if (bmp != null) { apply(subscriptionRequest); @@ -475,12 +444,12 @@ public synchronized void onMessageReceived(final MessageInfo message) { preExportSubscriptions = new ArrayDeque<>(); preExportSubscriptions.add(subscriptionRequest); final SessionState.ExportObject parent = - ticketRouter.resolve(session, subscriptionRequest.ticketAsByteBuffer()); + ticketRouter.resolve(session, subscriptionRequest.ticketAsByteBuffer()); onExportResolvedContinuation = session.nonExport() - .require(parent) - .onError(listener::onError) - .submit(() -> onExportResolved(parent)); + .require(parent) + .onError(listener::onError) + .submit(() -> onExportResolved(parent)); } private synchronized void onExportResolved(final SessionState.ExportObject parent) { @@ -491,8 +460,7 @@ private synchronized void onExportResolved(final SessionState.ExportObject bindStreamGenerator( - BarrageStreamGenerator.Factory factory); + BarrageStreamGenerator.Factory factory); @Provides static BarrageMessageProducer.Adapter, StreamObserver> provideListenerAdapter() { diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/BrowserFlightServiceGrpcBinding.java b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/BrowserFlightServiceGrpcBinding.java index 72728ff09cc..da1e6a377a7 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/BrowserFlightServiceGrpcBinding.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/BrowserFlightServiceGrpcBinding.java @@ -28,35 +28,32 @@ public class BrowserFlightServiceGrpcBinding implements BindableService { @Inject public BrowserFlightServiceGrpcBinding( - final BrowserFlightServiceGrpcImpl service) { + final BrowserFlightServiceGrpcImpl service) { this.delegate = service; } @Override public ServerServiceDefinition bindService() { - return GrpcServiceOverrideBuilder - .newBuilder(delegate.bindService(), BrowserFlightServiceGrpc.SERVICE_NAME) - .onOpenOverride(delegate::openHandshakeCustom, "OpenHandshake", - BrowserFlightServiceGrpc.getOpenHandshakeMethod(), - ProtoUtils.marshaller(Flight.HandshakeRequest.getDefaultInstance()), - ProtoUtils.marshaller(Flight.HandshakeResponse.getDefaultInstance())) - .onNextOverride(delegate::nextHandshakeCustom, "NextHandshake", - BrowserFlightServiceGrpc.getNextHandshakeMethod(), - ProtoUtils.marshaller(Flight.HandshakeRequest.getDefaultInstance())) - .onOpenOverride(delegate::openDoPutCustom, "OpenDoPut", - BrowserFlightServiceGrpc.getOpenDoPutMethod(), - UnaryInputStreamMarshaller.INSTANCE, - ProtoUtils.marshaller(Flight.PutResult.getDefaultInstance())) - .onNextOverride(delegate::nextDoPutCustom, "NextDoPut", - BrowserFlightServiceGrpc.getNextDoPutMethod(), - UnaryInputStreamMarshaller.INSTANCE) - .onOpenOverride(delegate::openDoExchangeCustom, "OpenDoExchange", - BrowserFlightServiceGrpc.getOpenDoExchangeMethod(), - UnaryInputStreamMarshaller.INSTANCE, - PassthroughInputStreamMarshaller.INSTANCE) - .onNextOverride(delegate::nextDoExchangeCustom, "NextDoExchange", - BrowserFlightServiceGrpc.getNextDoExchangeMethod(), - UnaryInputStreamMarshaller.INSTANCE) - .build(); + return GrpcServiceOverrideBuilder.newBuilder(delegate.bindService(), BrowserFlightServiceGrpc.SERVICE_NAME) + .onOpenOverride(delegate::openHandshakeCustom, "OpenHandshake", + BrowserFlightServiceGrpc.getOpenHandshakeMethod(), + ProtoUtils.marshaller(Flight.HandshakeRequest.getDefaultInstance()), + ProtoUtils.marshaller(Flight.HandshakeResponse.getDefaultInstance())) + .onNextOverride(delegate::nextHandshakeCustom, "NextHandshake", + BrowserFlightServiceGrpc.getNextHandshakeMethod(), + ProtoUtils.marshaller(Flight.HandshakeRequest.getDefaultInstance())) + .onOpenOverride(delegate::openDoPutCustom, "OpenDoPut", BrowserFlightServiceGrpc.getOpenDoPutMethod(), + UnaryInputStreamMarshaller.INSTANCE, + ProtoUtils.marshaller(Flight.PutResult.getDefaultInstance())) + .onNextOverride(delegate::nextDoPutCustom, "NextDoPut", BrowserFlightServiceGrpc.getNextDoPutMethod(), + UnaryInputStreamMarshaller.INSTANCE) + .onOpenOverride(delegate::openDoExchangeCustom, "OpenDoExchange", + BrowserFlightServiceGrpc.getOpenDoExchangeMethod(), + UnaryInputStreamMarshaller.INSTANCE, + PassthroughInputStreamMarshaller.INSTANCE) + .onNextOverride(delegate::nextDoExchangeCustom, "NextDoExchange", + BrowserFlightServiceGrpc.getNextDoExchangeMethod(), + UnaryInputStreamMarshaller.INSTANCE) + .build(); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/BrowserFlightServiceGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/BrowserFlightServiceGrpcImpl.java index f4ec6e28413..c39074f3167 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/BrowserFlightServiceGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/BrowserFlightServiceGrpcImpl.java @@ -23,8 +23,7 @@ import static io.deephaven.grpc_api.arrow.ArrowFlightUtil.parseProtoMessage; @Singleton -public class BrowserFlightServiceGrpcImpl - extends BrowserFlightServiceGrpc.BrowserFlightServiceImplBase { +public class BrowserFlightServiceGrpcImpl extends BrowserFlightServiceGrpc.BrowserFlightServiceImplBase { private static final Logger log = LoggerFactory.getLogger(BrowserFlightServiceGrpcImpl.class); private final SessionService sessionService; @@ -33,111 +32,105 @@ public class BrowserFlightServiceGrpcImpl @Inject() public BrowserFlightServiceGrpcImpl(final SessionService sessionService, - final TicketRouter ticketRouter, - final ArrowFlightUtil.DoExchangeMarshaller.Factory doExchangeFactory) { + final TicketRouter ticketRouter, + final ArrowFlightUtil.DoExchangeMarshaller.Factory doExchangeFactory) { this.ticketRouter = ticketRouter; this.sessionService = sessionService; this.doExchangeFactory = doExchangeFactory; } public void openHandshakeCustom(final Flight.HandshakeRequest request, - final StreamObserver responseObserver) { - throw GrpcUtil.statusRuntimeException(Code.UNIMPLEMENTED, - "See deephaven-core#997; support flight auth."); + final StreamObserver responseObserver) { + throw GrpcUtil.statusRuntimeException(Code.UNIMPLEMENTED, "See deephaven-core#997; support flight auth."); } public void nextHandshakeCustom(final Flight.HandshakeRequest request, - final StreamObserver responseObserver) { - throw GrpcUtil.statusRuntimeException(Code.UNIMPLEMENTED, - "See deephaven-core#997; support flight auth."); + final StreamObserver responseObserver) { + throw GrpcUtil.statusRuntimeException(Code.UNIMPLEMENTED, "See deephaven-core#997; support flight auth."); } - public void openDoPutCustom(final InputStream request, - final StreamObserver responseObserver) { + public void openDoPutCustom(final InputStream request, final StreamObserver responseObserver) { internalOnOpen(request, responseObserver, session -> { final ArrowFlightUtil.DoPutObserver marshaller = - new ArrowFlightUtil.DoPutObserver(session, ticketRouter, responseObserver); + new ArrowFlightUtil.DoPutObserver(session, ticketRouter, responseObserver); return new BrowserStream<>(BrowserStream.Mode.IN_ORDER, session, marshaller); }); } public void nextDoPutCustom(final InputStream request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { internalOnNext(request, responseObserver); } - public void openDoExchangeCustom(final InputStream request, - final StreamObserver responseObserver) { + public void openDoExchangeCustom(final InputStream request, final StreamObserver responseObserver) { internalOnOpen(request, responseObserver, session -> { final ArrowFlightUtil.DoExchangeMarshaller marshaller = - doExchangeFactory.openExchange(session, responseObserver); + doExchangeFactory.openExchange(session, responseObserver); return new BrowserStream<>(BrowserStream.Mode.IN_ORDER, session, marshaller); }); } public void nextDoExchangeCustom(final InputStream request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { internalOnNext(request, responseObserver); } - private void internalOnOpen(final InputStream request, - final StreamObserver responseObserver, - final Function> browserStreamSupplier) { + private void internalOnOpen(final InputStream request, final StreamObserver responseObserver, + final Function> browserStreamSupplier) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); final ArrowFlightUtil.MessageInfo mi = parseProtoMessage(request); if (mi.app_metadata == null) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "app_metadata not provided or was not a BarrageMessageWrapper"); + "app_metadata not provided or was not a BarrageMessageWrapper"); } final ByteBuffer ticketBuffer = mi.app_metadata.rpcTicketAsByteBuffer(); if (ticketBuffer == null && !mi.app_metadata.halfCloseAfterMessage()) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "no rpc_ticket provided; cannot export this browser stream but message does not half close"); + "no rpc_ticket provided; cannot export this browser stream but message does not half close"); } - final BrowserStream browserStream = - browserStreamSupplier.apply(session); + final BrowserStream browserStream = browserStreamSupplier.apply(session); browserStream.onMessageReceived(mi); if (ticketBuffer != null) { session.newExport(ExportTicketHelper.exportIdToTicket(ticketBuffer)) - .onError(responseObserver::onError) - .submit(() -> browserStream); + .onError(responseObserver::onError) + .submit(() -> browserStream); } }); } private void internalOnNext(final InputStream request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); final ArrowFlightUtil.MessageInfo mi = parseProtoMessage(request); if (mi.app_metadata == null) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "app_metadata not provided or was not a BarrageMessageWrapper"); + "app_metadata not provided or was not a BarrageMessageWrapper"); } final ByteBuffer ticketBuffer = mi.app_metadata.rpcTicketAsByteBuffer(); if (ticketBuffer == null) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "No rpc_ticket provided; cannot append to existing browser stream"); + "No rpc_ticket provided; cannot append to existing browser stream"); } final SessionState.ExportObject> browserStream = - session.getExport(ExportTicketHelper.ticketToExportId(ticketBuffer)); + session.getExport(ExportTicketHelper.ticketToExportId(ticketBuffer)); session.nonExport() - .require(browserStream) - .onError(responseObserver::onError) - .submit(() -> { - browserStream.get().onMessageReceived(mi); - responseObserver.onNext(BrowserFlight.BrowserNextResponse.getDefaultInstance()); - responseObserver.onCompleted(); - }); + .require(browserStream) + .onError(responseObserver::onError) + .submit(() -> { + browserStream.get().onMessageReceived(mi); + responseObserver.onNext(BrowserFlight.BrowserNextResponse.getDefaultInstance()); + responseObserver.onCompleted(); + }); }); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/FlightServiceGrpcBinding.java b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/FlightServiceGrpcBinding.java index 79ea97deeba..b08752fd5ad 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/FlightServiceGrpcBinding.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/FlightServiceGrpcBinding.java @@ -29,25 +29,23 @@ public class FlightServiceGrpcBinding implements BindableService { @Inject public FlightServiceGrpcBinding( - final FlightServiceGrpcImpl service) { + final FlightServiceGrpcImpl service) { this.delegate = service; } @Override public ServerServiceDefinition bindService() { - return GrpcServiceOverrideBuilder - .newBuilder(delegate.bindService(), FlightServiceGrpc.SERVICE_NAME) - .onOpenOverride(delegate::doGetCustom, "DoGet", FlightServiceGrpc.getDoGetMethod(), - ProtoUtils.marshaller(Flight.Ticket.getDefaultInstance()), - PassthroughInputStreamMarshaller.INSTANCE) - .onBidiOverride(delegate::doPutCustom, "DoPut", FlightServiceGrpc.getDoPutMethod(), - PassthroughInputStreamMarshaller.INSTANCE, - ProtoUtils.marshaller(Flight.PutResult.getDefaultInstance())) - .onBidiOverride(delegate::doExchangeCustom, "DoExchange", - FlightServiceGrpc.getDoExchangeMethod(), - PassthroughInputStreamMarshaller.INSTANCE, - PassthroughInputStreamMarshaller.INSTANCE) - .build(); + return GrpcServiceOverrideBuilder.newBuilder(delegate.bindService(), FlightServiceGrpc.SERVICE_NAME) + .onOpenOverride(delegate::doGetCustom, "DoGet", FlightServiceGrpc.getDoGetMethod(), + ProtoUtils.marshaller(Flight.Ticket.getDefaultInstance()), + PassthroughInputStreamMarshaller.INSTANCE) + .onBidiOverride(delegate::doPutCustom, "DoPut", FlightServiceGrpc.getDoPutMethod(), + PassthroughInputStreamMarshaller.INSTANCE, + ProtoUtils.marshaller(Flight.PutResult.getDefaultInstance())) + .onBidiOverride(delegate::doExchangeCustom, "DoExchange", FlightServiceGrpc.getDoExchangeMethod(), + PassthroughInputStreamMarshaller.INSTANCE, + PassthroughInputStreamMarshaller.INSTANCE) + .build(); } /** @@ -62,22 +60,19 @@ public ServerServiceDefinition bindService() { * @return the client side method descriptor */ public static MethodDescriptor getClientDoExchangeDescriptor( - final Options options, - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final BarrageMessageConsumer.StreamReader streamReader) { + final Options options, + final ChunkType[] columnChunkTypes, + final Class[] columnTypes, + final Class[] componentTypes, + final BarrageMessageConsumer.StreamReader streamReader) { return GrpcServiceOverrideBuilder.descriptorFor( - MethodDescriptor.MethodType.BIDI_STREAMING, FlightServiceGrpc.SERVICE_NAME, - "DoExchange", - ProtoUtils.marshaller(Flight.FlightData.getDefaultInstance()), - new BarrageDataMarshaller<>(options, columnChunkTypes, columnTypes, componentTypes, - streamReader), - FlightServiceGrpc.getDoExchangeMethod()); + MethodDescriptor.MethodType.BIDI_STREAMING, FlightServiceGrpc.SERVICE_NAME, "DoExchange", + ProtoUtils.marshaller(Flight.FlightData.getDefaultInstance()), + new BarrageDataMarshaller<>(options, columnChunkTypes, columnTypes, componentTypes, streamReader), + FlightServiceGrpc.getDoExchangeMethod()); } - public static class BarrageDataMarshaller - implements MethodDescriptor.Marshaller { + public static class BarrageDataMarshaller implements MethodDescriptor.Marshaller { private final Options options; private final ChunkType[] columnChunkTypes; private final Class[] columnTypes; @@ -85,11 +80,11 @@ public static class BarrageDataMarshaller private final BarrageMessageConsumer.StreamReader streamReader; public BarrageDataMarshaller( - final Options options, - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final BarrageMessageConsumer.StreamReader streamReader) { + final Options options, + final ChunkType[] columnChunkTypes, + final Class[] columnTypes, + final Class[] componentTypes, + final BarrageMessageConsumer.StreamReader streamReader) { this.options = options; this.columnChunkTypes = columnChunkTypes; this.columnTypes = columnTypes; @@ -100,13 +95,12 @@ public BarrageDataMarshaller( @Override public InputStream stream(final BarrageMessage value) { throw new UnsupportedOperationException( - "BarrageDataMarshaller unexpectedly used to directly convert BarrageMessage to InputStream"); + "BarrageDataMarshaller unexpectedly used to directly convert BarrageMessage to InputStream"); } @Override public BarrageMessage parse(final InputStream stream) { - return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, - componentTypes, stream); + return streamReader.safelyParseFrom(options, columnChunkTypes, columnTypes, componentTypes, stream); } } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/FlightServiceGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/FlightServiceGrpcImpl.java index 2990adc4f30..daac2a40c3b 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/FlightServiceGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/arrow/FlightServiceGrpcImpl.java @@ -33,10 +33,10 @@ @Singleton public class FlightServiceGrpcImpl extends FlightServiceGrpc.FlightServiceImplBase { - // TODO NATE: pull app_metadata off of DoGet -- what about doPut? (core#412): use app_metadata - // to communicate serialization options + // TODO NATE: pull app_metadata off of DoGet -- what about doPut? (core#412): use app_metadata to communicate + // serialization options private static final ChunkInputStreamGenerator.Options DEFAULT_DESER_OPTIONS = - new ChunkInputStreamGenerator.Options.Builder().build(); + new ChunkInputStreamGenerator.Options.Builder().build(); private static final Logger log = LoggerFactory.getLogger(FlightServiceGrpcImpl.class); @@ -46,8 +46,8 @@ public class FlightServiceGrpcImpl extends FlightServiceGrpc.Flig @Inject() public FlightServiceGrpcImpl(final SessionService sessionService, - final TicketRouter ticketRouter, - final ArrowFlightUtil.DoExchangeMarshaller.Factory doExchangeFactory) { + final TicketRouter ticketRouter, + final ArrowFlightUtil.DoExchangeMarshaller.Factory doExchangeFactory) { this.ticketRouter = ticketRouter; this.sessionService = sessionService; this.doExchangeFactory = doExchangeFactory; @@ -55,40 +55,36 @@ public FlightServiceGrpcImpl(final SessionService sessionService, @Override public StreamObserver handshake( - StreamObserver responseObserver) { + StreamObserver responseObserver) { return GrpcUtil.rpcWrapper(log, responseObserver, () -> { - throw GrpcUtil.statusRuntimeException(Code.UNIMPLEMENTED, - "See deephaven-core#997; support flight auth."); + throw GrpcUtil.statusRuntimeException(Code.UNIMPLEMENTED, "See deephaven-core#997; support flight auth."); }); } @Override - public void listFlights(final Flight.Criteria request, - final StreamObserver responseObserver) { + public void listFlights(final Flight.Criteria request, final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { - ticketRouter.visitFlightInfo(sessionService.getOptionalSession(), - responseObserver::onNext); + ticketRouter.visitFlightInfo(sessionService.getOptionalSession(), responseObserver::onNext); responseObserver.onCompleted(); }); } @Override public void getFlightInfo(final Flight.FlightDescriptor request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getOptionalSession(); - final SessionState.ExportObject export = - ticketRouter.flightInfoFor(session, request); + final SessionState.ExportObject export = ticketRouter.flightInfoFor(session, request); if (session != null) { session.nonExport() - .require(export) - .onError(responseObserver::onError) - .submit(() -> { - responseObserver.onNext(export.get()); - responseObserver.onCompleted(); - }); + .require(export) + .onError(responseObserver::onError) + .submit(() -> { + responseObserver.onNext(export.get()); + responseObserver.onCompleted(); + }); } else { if (export.tryRetainReference()) { try { @@ -100,8 +96,8 @@ public void getFlightInfo(final Flight.FlightDescriptor request, export.dropReference(); } } else { - responseObserver.onError(GrpcUtil.statusRuntimeException( - Code.FAILED_PRECONDITION, "Could not find flight info")); + responseObserver.onError( + GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, "Could not find flight info")); } } }); @@ -109,87 +105,81 @@ public void getFlightInfo(final Flight.FlightDescriptor request, @Override public void getSchema(final Flight.FlightDescriptor request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getOptionalSession(); - final SessionState.ExportObject export = - ticketRouter.flightInfoFor(session, request); + final SessionState.ExportObject export = ticketRouter.flightInfoFor(session, request); if (session != null) { session.nonExport() - .require(export) - .onError(responseObserver::onError) - .submit(() -> { - responseObserver.onNext(Flight.SchemaResult.newBuilder() - .setSchema(export.get().getSchema()) - .build()); - responseObserver.onCompleted(); - }); + .require(export) + .onError(responseObserver::onError) + .submit(() -> { + responseObserver.onNext(Flight.SchemaResult.newBuilder() + .setSchema(export.get().getSchema()) + .build()); + responseObserver.onCompleted(); + }); } else { if (export.tryRetainReference()) { try { if (export.getState() == ExportNotification.State.EXPORTED) { responseObserver.onNext(Flight.SchemaResult.newBuilder() - .setSchema(export.get().getSchema()) - .build()); + .setSchema(export.get().getSchema()) + .build()); responseObserver.onCompleted(); } } finally { export.dropReference(); } } else { - responseObserver.onError(GrpcUtil.statusRuntimeException( - Code.FAILED_PRECONDITION, "Could not find flight info")); + responseObserver.onError( + GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, "Could not find flight info")); } } }); } - public void doGetCustom(final Flight.Ticket request, - final StreamObserver responseObserver) { + public void doGetCustom(final Flight.Ticket request, final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); - final SessionState.ExportObject export = - ticketRouter.resolve(session, request); + final SessionState.ExportObject export = ticketRouter.resolve(session, request); session.nonExport() - .require(export) - .onError(responseObserver::onError) - .submit(() -> { - final BaseTable table = export.get(); - - // Send Schema wrapped in Message - final FlatBufferBuilder builder = new FlatBufferBuilder(); - final int schemaOffset = BarrageSchemaUtil.makeSchemaPayload(builder, - table.getDefinition(), table.getAttributes()); - builder.finish(BarrageStreamGenerator.wrapInMessage(builder, schemaOffset, - org.apache.arrow.flatbuf.MessageHeader.Schema)); - final ByteBuffer serializedMessage = builder.dataBuffer(); - - final byte[] msgBytes = Flight.FlightData.newBuilder() - .setDataHeader(ByteStringAccess.wrap(serializedMessage)) - .build() - .toByteArray(); - responseObserver - .onNext(new BarrageStreamGenerator.DrainableByteArrayInputStream(msgBytes, - 0, msgBytes.length)); - - // get ourselves some data! - final BarrageMessage msg = - ConstructSnapshot.constructBackplaneSnapshot(this, table); - msg.modColumnData = new BarrageMessage.ModColumnData[0]; // actually no mod - // column data for - // DoGet - - try (final BarrageStreamGenerator bsg = new BarrageStreamGenerator(msg)) { - bsg.forEachDoGetStream(bsg.getSubView(DEFAULT_DESER_OPTIONS, false), - responseObserver::onNext); - } catch (final IOException e) { - throw new UncheckedDeephavenException(e); // unexpected - } + .require(export) + .onError(responseObserver::onError) + .submit(() -> { + final BaseTable table = export.get(); + + // Send Schema wrapped in Message + final FlatBufferBuilder builder = new FlatBufferBuilder(); + final int schemaOffset = BarrageSchemaUtil.makeSchemaPayload(builder, table.getDefinition(), + table.getAttributes()); + builder.finish(BarrageStreamGenerator.wrapInMessage(builder, schemaOffset, + org.apache.arrow.flatbuf.MessageHeader.Schema)); + final ByteBuffer serializedMessage = builder.dataBuffer(); + + final byte[] msgBytes = Flight.FlightData.newBuilder() + .setDataHeader(ByteStringAccess.wrap(serializedMessage)) + .build() + .toByteArray(); + responseObserver.onNext( + new BarrageStreamGenerator.DrainableByteArrayInputStream(msgBytes, 0, msgBytes.length)); + + // get ourselves some data! + final BarrageMessage msg = ConstructSnapshot.constructBackplaneSnapshot(this, table); + msg.modColumnData = new BarrageMessage.ModColumnData[0]; // actually no mod column data for + // DoGet + + try (final BarrageStreamGenerator bsg = new BarrageStreamGenerator(msg)) { + bsg.forEachDoGetStream(bsg.getSubView(DEFAULT_DESER_OPTIONS, false), + responseObserver::onNext); + } catch (final IOException e) { + throw new UncheckedDeephavenException(e); // unexpected + } - responseObserver.onCompleted(); - }); + responseObserver.onCompleted(); + }); }); } @@ -199,11 +189,10 @@ public void doGetCustom(final Flight.Ticket request, * @param responseObserver the observer to reply to * @return the observer that grpc can delegate received messages to */ - public StreamObserver doPutCustom( - final StreamObserver responseObserver) { + public StreamObserver doPutCustom(final StreamObserver responseObserver) { return GrpcUtil.rpcWrapper(log, responseObserver, - () -> new ArrowFlightUtil.DoPutObserver(sessionService.getCurrentSession(), - ticketRouter, responseObserver)); + () -> new ArrowFlightUtil.DoPutObserver(sessionService.getCurrentSession(), ticketRouter, + responseObserver)); } /** @@ -212,10 +201,8 @@ public StreamObserver doPutCustom( * @param responseObserver the observer to reply to * @return the observer that grpc can delegate received messages to */ - public StreamObserver doExchangeCustom( - final StreamObserver responseObserver) { + public StreamObserver doExchangeCustom(final StreamObserver responseObserver) { return GrpcUtil.rpcWrapper(log, responseObserver, - () -> doExchangeFactory.openExchange(sessionService.getCurrentSession(), - responseObserver)); + () -> doExchangeFactory.openExchange(sessionService.getCurrentSession(), responseObserver)); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/auth/AuthContextProvider.java b/grpc-api/src/main/java/io/deephaven/grpc_api/auth/AuthContextProvider.java index 1dc8d2d537c..8459855b285 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/auth/AuthContextProvider.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/auth/AuthContextProvider.java @@ -4,14 +4,12 @@ import io.deephaven.util.auth.AuthContext; /** - * The AuthContextProvider's job is to manage all authentication and authorization responsibilities. - * If audit logging is required, an implementation of this (and its companion AuthContext) is the - * correct way path forward. + * The AuthContextProvider's job is to manage all authentication and authorization responsibilities. If audit logging is + * required, an implementation of this (and its companion AuthContext) is the correct way path forward. */ public interface AuthContextProvider { /** - * Returns true if this auth context provider can authenticate using the provided protocol - * version. + * Returns true if this auth context provider can authenticate using the provided protocol version. * * @param authProtocol the protocol version to use (application specific) * @return true iff `authProtocol` is supported diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageClientSubscription.java b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageClientSubscription.java index b056d016b13..0dcec222a56 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageClientSubscription.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageClientSubscription.java @@ -49,27 +49,27 @@ public class BarrageClientSubscription implements LogOutputAppendable { private final ClientCall call; public BarrageClientSubscription( - final String logName, - final Channel channel, - final BarrageSubscriptionRequest initialRequest, - final BarrageMessageConsumer.StreamReader streamReader, - final BarrageTable resultTable) { + final String logName, + final Channel channel, + final BarrageSubscriptionRequest initialRequest, + final BarrageMessageConsumer.StreamReader streamReader, + final BarrageTable resultTable) { this(logName, channel, initialRequest, streamReader, - resultTable.getWireChunkTypes(), - resultTable.getWireTypes(), - resultTable.getWireComponentTypes(), - new WeakReference<>(resultTable)); + resultTable.getWireChunkTypes(), + resultTable.getWireTypes(), + resultTable.getWireComponentTypes(), + new WeakReference<>(resultTable)); } public BarrageClientSubscription( - final String logName, - final Channel channel, - final BarrageSubscriptionRequest initialRequest, - final BarrageMessageConsumer.StreamReader streamReader, - final ChunkType[] wireChunkTypes, - final Class[] wireTypes, - final Class[] wireComponentTypes, - final WeakReference weakListener) { + final String logName, + final Channel channel, + final BarrageSubscriptionRequest initialRequest, + final BarrageMessageConsumer.StreamReader streamReader, + final ChunkType[] wireChunkTypes, + final Class[] wireTypes, + final Class[] wireComponentTypes, + final WeakReference weakListener) { this.logName = logName; this.isViewport = initialRequest.viewportVector() != null; @@ -78,81 +78,77 @@ public BarrageClientSubscription( final BarrageMessage.Listener rt = weakListener.get(); if (rt == null) { this.call = null; - log.error().append(this) - .append(": replicated table already garbage collected not requesting subscription") - .endl(); + log.error().append(this).append(": replicated table already garbage collected not requesting subscription") + .endl(); return; } - final ChunkInputStreamGenerator.Options options = - ChunkInputStreamGenerator.Options.of(initialRequest); + final ChunkInputStreamGenerator.Options options = ChunkInputStreamGenerator.Options.of(initialRequest); final MethodDescriptor subscribeDescriptor = - FlightServiceGrpcBinding.getClientDoExchangeDescriptor(options, wireChunkTypes, - wireTypes, wireComponentTypes, streamReader); + FlightServiceGrpcBinding.getClientDoExchangeDescriptor(options, wireChunkTypes, wireTypes, + wireComponentTypes, streamReader); this.call = channel.newCall(subscribeDescriptor, CallOptions.DEFAULT); - ClientCalls.asyncBidiStreamingCall(call, - new ClientResponseObserver() { - @Override - public void beforeStart( - final ClientCallStreamObserver requestStream) { - // IDS-6890-3: control flow may be needed here - requestStream.disableAutoInboundFlowControl(); + ClientCalls.asyncBidiStreamingCall(call, new ClientResponseObserver() { + @Override + public void beforeStart(final ClientCallStreamObserver requestStream) { + // IDS-6890-3: control flow may be needed here + requestStream.disableAutoInboundFlowControl(); + } + + @Override + public void onNext(final BarrageMessage barrageMessage) { + if (barrageMessage == null) { + return; } - - @Override - public void onNext(final BarrageMessage barrageMessage) { - if (barrageMessage == null) { + try { + final BarrageMessage.Listener listener = getListener(); + if (!connected || listener == null) { return; } - try { - final BarrageMessage.Listener listener = getListener(); - if (!connected || listener == null) { - return; - } - listener.handleBarrageMessage(barrageMessage); - } finally { - barrageMessage.close(); - } + listener.handleBarrageMessage(barrageMessage); + } finally { + barrageMessage.close(); } + } - @Override - public void onError(final Throwable t) { - log.error().append(BarrageClientSubscription.this) + @Override + public void onError(final Throwable t) { + log.error().append(BarrageClientSubscription.this) .append(": Error detected in subscription: ") .append(t).endl(); - final BarrageMessage.Listener listener = getListener(); - if (!connected || listener == null) { - return; - } - listener.handleBarrageError(t); - handleDisconnect(); - } - - @Override - public void onCompleted() { - handleDisconnect(); + final BarrageMessage.Listener listener = getListener(); + if (!connected || listener == null) { + return; } - - @Nullable - private BarrageMessage.Listener getListener() { - final BarrageMessage.Listener listener = weakListener.get(); - if (listener == null) { - close(); - } - return listener; + listener.handleBarrageError(t); + handleDisconnect(); + } + + @Override + public void onCompleted() { + handleDisconnect(); + } + + @Nullable + private BarrageMessage.Listener getListener() { + final BarrageMessage.Listener listener = weakListener.get(); + if (listener == null) { + close(); } - }); + return listener; + } + }); // Set connected here before we initialize the request. this.connected = true; // Send the initial subscription: call.sendMessage(Flight.FlightData.newBuilder() - .setAppMetadata(ByteStringAccess.wrap(initialRequest.getByteBuffer())) - .build()); + .setAppMetadata(ByteStringAccess.wrap(initialRequest.getByteBuffer())) + .build()); // Allow the server to send us all of the commands when there is bandwidth: call.request(Integer.MAX_VALUE); @@ -189,21 +185,18 @@ public synchronized void update(final Index viewport, final BitSet columns) { } call.sendMessage(Flight.FlightData.newBuilder() - .setAppMetadata(ByteStringAccess.wrap(makeRequestInternal(viewport, columns))) - .build()); + .setAppMetadata(ByteStringAccess.wrap(makeRequestInternal(viewport, columns))) + .build()); } @Override public LogOutput append(final LogOutput logOutput) { - return logOutput.append("Barrage/").append("/ClientSubscription/").append(logName) - .append("/") - .append(System.identityHashCode(this)).append("/"); + return logOutput.append("Barrage/").append("/ClientSubscription/").append(logName).append("/") + .append(System.identityHashCode(this)).append("/"); } - public static BarrageSubscriptionRequest makeRequest(final Index viewport, - final BitSet columns) { - return BarrageSubscriptionRequest - .getRootAsBarrageSubscriptionRequest(makeRequestInternal(viewport, columns)); + public static BarrageSubscriptionRequest makeRequest(final Index viewport, final BitSet columns) { + return BarrageSubscriptionRequest.getRootAsBarrageSubscriptionRequest(makeRequestInternal(viewport, columns)); } private static ByteBuffer makeRequestInternal(final Index viewport, final BitSet columns) { @@ -211,13 +204,12 @@ private static ByteBuffer makeRequestInternal(final Index viewport, final BitSet int colOffset = 0; if (columns != null) { - colOffset = - BarrageSubscriptionRequest.createColumnsVector(metadata, columns.toByteArray()); + colOffset = BarrageSubscriptionRequest.createColumnsVector(metadata, columns.toByteArray()); } int vpOffset = 0; if (viewport != null) { - vpOffset = BarrageSubscriptionRequest.createViewportVector(metadata, - BarrageProtoUtil.toByteBuffer(viewport)); + vpOffset = + BarrageSubscriptionRequest.createViewportVector(metadata, BarrageProtoUtil.toByteBuffer(viewport)); } BarrageSubscriptionRequest.startBarrageSubscriptionRequest(metadata); @@ -226,13 +218,13 @@ private static ByteBuffer makeRequestInternal(final Index viewport, final BitSet final int subscription = BarrageSubscriptionRequest.endBarrageSubscriptionRequest(metadata); final int wrapper = BarrageMessageWrapper.createBarrageMessageWrapper( - metadata, - BarrageStreamGenerator.FLATBUFFER_MAGIC, - BarrageMessageType.BarrageSubscriptionRequest, - subscription, - 0, // no ticket - 0, // no sequence - false // don't half-close + metadata, + BarrageStreamGenerator.FLATBUFFER_MAGIC, + BarrageMessageType.BarrageSubscriptionRequest, + subscription, + 0, // no ticket + 0, // no sequence + false // don't half-close ); metadata.finish(wrapper); return metadata.dataBuffer(); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageMessageConsumer.java b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageMessageConsumer.java index c924e1d237d..d44e8559d50 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageMessageConsumer.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageMessageConsumer.java @@ -27,9 +27,9 @@ public interface StreamReader { * @return a BarrageMessage filled out by the stream's payload */ BarrageMessage safelyParseFrom(final Options options, - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final InputStream stream); + final ChunkType[] columnChunkTypes, + final Class[] columnTypes, + final Class[] componentTypes, + final InputStream stream); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageMessageProducer.java b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageMessageProducer.java index 21fc397e0b7..c4552ba829b 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageMessageProducer.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageMessageProducer.java @@ -70,17 +70,16 @@ /** * The server-side implementation of a Barrage replication source. * - * When a client subscribes initially, a snapshot of the table is sent. The snapshot is obtained - * using either get() or getPrev() based on the state of the LogicalClock. On each subsequent - * update, the client is given the deltas between the last update propagation and the next. + * When a client subscribes initially, a snapshot of the table is sent. The snapshot is obtained using either get() or + * getPrev() based on the state of the LogicalClock. On each subsequent update, the client is given the deltas between + * the last update propagation and the next. * - * When a client changes its subscription it will be sent a snapshot of only the data that the - * server believes it needs assuming that the client has been respecting the existing subscription. - * Practically, this means that the server may omit some data if the client's viewport change - * overlaps the currently recognized viewport. + * When a client changes its subscription it will be sent a snapshot of only the data that the server believes it needs + * assuming that the client has been respecting the existing subscription. Practically, this means that the server may + * omit some data if the client's viewport change overlaps the currently recognized viewport. * - * It is possible to use this replication source to create subscriptions that propagate changes from - * one LTM to another inside the same JVM. + * It is possible to use this replication source to create subscriptions that propagate changes from one LTM to another + * inside the same JVM. * * The client-side counterpart of this is the {@link BarrageMessageConsumer}. * @@ -88,19 +87,19 @@ * @param The sub-view type that the listener expects to receive. */ public class BarrageMessageProducer extends LivenessArtifact - implements DynamicNode, NotificationStepReceiver { - private static final boolean DEBUG = Configuration.getInstance() - .getBooleanForClassWithDefault(BarrageMessageProducer.class, "debug", false); + implements DynamicNode, NotificationStepReceiver { + private static final boolean DEBUG = + Configuration.getInstance().getBooleanForClassWithDefault(BarrageMessageProducer.class, "debug", false); // NB: It's probably best for this to default to a poolable chunk size. See // ChunkPoolConstants.LARGEST_POOLED_CHUNK_LOG2_CAPACITY. private static final int DELTA_CHUNK_SIZE = Configuration.getInstance() - .getIntegerForClassWithDefault(BarrageMessageProducer.class, "deltaChunkSize", 1 << 16); + .getIntegerForClassWithDefault(BarrageMessageProducer.class, "deltaChunkSize", 1 << 16); private static final Logger log = LoggerFactory.getLogger(BarrageMessageProducer.class); /** - * A StreamGenerator takes a BarrageMessage and re-uses portions of the serialized payload - * across different subscribers that may subscribe to different viewports and columns. + * A StreamGenerator takes a BarrageMessage and re-uses portions of the serialized payload across different + * subscribers that may subscribe to different viewports and columns. * * @param The options related to serialization. * @param The sub-view type that the listener expects to receive. @@ -115,16 +114,14 @@ interface Factory { StreamGenerator newGenerator(BarrageMessage message); /** - * Create a MessageView of the Schema to send as the initial message to a new - * subscriber. + * Create a MessageView of the Schema to send as the initial message to a new subscriber. * * @param options serialization options for this specific view * @param table the description of the table's data layout * @param attributes the table attributes * @return a MessageView that can be sent to a subscriber */ - MessageView getSchemaView(Options options, TableDefinition table, - Map attributes); + MessageView getSchemaView(Options options, TableDefinition table, Map attributes); } /** @@ -133,14 +130,11 @@ MessageView getSchemaView(Options options, TableDefinition table, BarrageMessage getMessage(); /** - * Obtain a Full-Subscription View of this StreamGenerator that can be sent to a single - * subscriber. + * Obtain a Full-Subscription View of this StreamGenerator that can be sent to a single subscriber. * * @param options serialization options for this specific view - * @param isInitialSnapshot indicates whether or not this is the first snapshot for the - * listener - * @return a MessageView filtered by the subscription properties that can be sent to that - * subscriber + * @param isInitialSnapshot indicates whether or not this is the first snapshot for the listener + * @return a MessageView filtered by the subscription properties that can be sent to that subscriber */ MessageView getSubView(Options options, boolean isInitialSnapshot); @@ -148,16 +142,14 @@ MessageView getSchemaView(Options options, TableDefinition table, * Obtain a View of this StreamGenerator that can be sent to a single subscriber. * * @param options serialization options for this specific view - * @param isInitialSnapshot indicates whether or not this is the first snapshot for the - * listener + * @param isInitialSnapshot indicates whether or not this is the first snapshot for the listener * @param viewport is the position-space viewport * @param keyspaceViewport is the key-space viewport * @param subscribedColumns are the columns subscribed for this view - * @return a MessageView filtered by the subscription properties that can be sent to that - * subscriber + * @return a MessageView filtered by the subscription properties that can be sent to that subscriber */ MessageView getSubView(Options options, boolean isInitialSnapshot, @Nullable Index viewport, - @Nullable Index keyspaceViewport, BitSet subscribedColumns); + @Nullable Index keyspaceViewport, BitSet subscribedColumns); } /** @@ -171,7 +163,7 @@ public interface Adapter { } public static class Operation - implements QueryTable.MemoizableOperation> { + implements QueryTable.MemoizableOperation> { @AssistedFactory public interface Factory { @@ -186,18 +178,18 @@ public interface Factory { @AssistedInject public Operation(final Scheduler scheduler, - final StreamGenerator.Factory streamGeneratorFactory, - @Assisted final BaseTable parent, - @Assisted final long updateIntervalMs) { + final StreamGenerator.Factory streamGeneratorFactory, + @Assisted final BaseTable parent, + @Assisted final long updateIntervalMs) { this(scheduler, streamGeneratorFactory, parent, updateIntervalMs, null); } @VisibleForTesting public Operation(final Scheduler scheduler, - final StreamGenerator.Factory streamGeneratorFactory, - final BaseTable parent, - final long updateIntervalMs, - @Nullable final Runnable onGetSnapshot) { + final StreamGenerator.Factory streamGeneratorFactory, + final BaseTable parent, + final long updateIntervalMs, + @Nullable final Runnable onGetSnapshot) { this.scheduler = scheduler; this.streamGeneratorFactory = streamGeneratorFactory; this.parent = parent; @@ -221,10 +213,9 @@ public MemoizedOperationKey getMemoizedOperationKey() { } @Override - public Result> initialize( - final boolean usePrev, final long beforeClock) { - final BarrageMessageProducer result = - new BarrageMessageProducer<>( + public Result> initialize(final boolean usePrev, + final long beforeClock) { + final BarrageMessageProducer result = new BarrageMessageProducer<>( scheduler, streamGeneratorFactory, parent, updateIntervalMs, onGetSnapshot); return new Result<>(result, result.constructListener()); } @@ -268,21 +259,20 @@ public int hashCode() { private final Index propagationIndex; /** - * On every update we compute which subset of rows need to be recorded dependent on our active - * subscriptions. We compute two sets, which rows were added (or need to be scoped into - * viewports) and which rows were modified. For all added (and scoped) rows we store the new - * values in every subscribed column. For all modified rows we store only the columns that are - * dirty according to the update's ModifiedColumnSet. We record the upstream update along with - * which rows are in the added + scoped set, which rows are in the modified set, as well as - * which region of the deltaColumn sources belong to these sets. We allocate continuous rows via - * a simple watermark that is reset to zero whenever our update propagation job runs. + * On every update we compute which subset of rows need to be recorded dependent on our active subscriptions. We + * compute two sets, which rows were added (or need to be scoped into viewports) and which rows were modified. For + * all added (and scoped) rows we store the new values in every subscribed column. For all modified rows we store + * only the columns that are dirty according to the update's ModifiedColumnSet. We record the upstream update along + * with which rows are in the added + scoped set, which rows are in the modified set, as well as which region of the + * deltaColumn sources belong to these sets. We allocate continuous rows via a simple watermark that is reset to + * zero whenever our update propagation job runs. */ private long nextFreeDeltaKey = 0; private final WritableSource[] deltaColumns; /** - * This is the last step on which the LTM-synced index was updated. This is used only for - * consistency checking between our initial creation and subsequent updates. + * This is the last step on which the LTM-synced index was updated. This is used only for consistency checking + * between our initial creation and subsequent updates. */ private long lastIndexClockStep = 0; @@ -299,9 +289,9 @@ private static final class Delta implements SafeCloseable { private final BitSet modifiedColumns; private Delta(final long step, final long deltaColumnOffset, - final ShiftAwareListener.Update update, - final Index recordedAdds, final Index recordedMods, - final BitSet subscribedColumns, final BitSet modifiedColumns) { + final ShiftAwareListener.Update update, + final Index recordedAdds, final Index recordedMods, + final BitSet subscribedColumns, final BitSet modifiedColumns) { this.step = step; this.deltaColumnOffset = deltaColumnOffset; this.update = update.copy(); @@ -322,9 +312,8 @@ public void close() { private final UpdatePropagationJob updatePropagationJob = new UpdatePropagationJob(); /** - * Subscription updates accumulate in pendingSubscriptions until the next time our update - * propagation job runs. See notes on {@link Subscription} for details of the subscription life - * cycle. + * Subscription updates accumulate in pendingSubscriptions until the next time our update propagation job runs. See + * notes on {@link Subscription} for details of the subscription life cycle. */ private Index activeViewport = null; private Index postSnapshotViewport = null; @@ -339,12 +328,11 @@ public void close() { private final Runnable onGetSnapshot; public BarrageMessageProducer(final Scheduler scheduler, - final StreamGenerator.Factory streamGeneratorFactory, - final BaseTable parent, - final long updateIntervalMs, - final Runnable onGetSnapshot) { - this.logPrefix = - "BarrageMessageProducer(" + Integer.toHexString(System.identityHashCode(this)) + "): "; + final StreamGenerator.Factory streamGeneratorFactory, + final BaseTable parent, + final long updateIntervalMs, + final Runnable onGetSnapshot) { + this.logPrefix = "BarrageMessageProducer(" + Integer.toHexString(System.identityHashCode(this)) + "): "; this.scheduler = scheduler; this.streamGeneratorFactory = streamGeneratorFactory; @@ -357,24 +345,21 @@ public BarrageMessageProducer(final Scheduler scheduler, if (DEBUG) { log.info().append(logPrefix).append("Creating new BarrageMessageProducer for ") - .append(System.identityHashCode(parent)).append(" with an interval of ") - .append(updateIntervalMs).endl(); + .append(System.identityHashCode(parent)).append(" with an interval of ") + .append(updateIntervalMs).endl(); } - sourceColumns = - parent.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); + sourceColumns = parent.getColumnSources().toArray(ColumnSource.ZERO_LENGTH_COLUMN_SOURCE_ARRAY); deltaColumns = new WritableSource[sourceColumns.length]; - // we start off with initial sizes of zero, because its quite possible no one will ever look - // at this table + // we start off with initial sizes of zero, because its quite possible no one will ever look at this table final int capacity = 0; for (int i = 0; i < sourceColumns.length; ++i) { - // If the source column is a DBDate time we'll just always use longs to avoid silly - // reinterpretations during serialization/deserialization + // If the source column is a DBDate time we'll just always use longs to avoid silly reinterpretations during + // serialization/deserialization sourceColumns[i] = ReinterpretUtilities.maybeConvertToPrimitive(sourceColumns[i]); - deltaColumns[i] = - ArrayBackedColumnSource.getMemoryColumnSource(capacity, sourceColumns[i].getType()); + deltaColumns[i] = ArrayBackedColumnSource.getMemoryColumnSource(capacity, sourceColumns[i].getType()); if (deltaColumns[i] instanceof ObjectArraySource) { objectColumns.set(i); @@ -397,29 +382,24 @@ public TableDefinition getTableDefinition() { ///////////////////////////////////// /** - * Here is the typical lifecycle of a subscription: 1) The new subscription is added to - * pendingSubscriptions. It is not active and its viewport / subscribed columns are empty. 2) If - * a subscription is updated before the initial snapshot is prepared, we overwrite the viewport - * / columns stored in the variables prefixed with `pending`. These variables will always - * contain the most recently requested viewport / columns that have not yet been acknowledged by - * the BMP. 3) The BMP's update propagation job runs. All pendingSubscriptions (new or updated) - * will have their pending viewport / columns requests accepted. All pendingSubscriptions move - * to the activeSubscription list if they were brand new. The pendingSubscription list is - * cleared. At this stage, the `pending` variables are nulled and their contents move to the - * variables prefixed with `snapshot`. If a viewport's subscribedColumns change when the - * viewport remains the same, we copy the reference from `viewport` to `snapshotViewport`. The - * propagation job is responsible for building the snapshot and sending it to the client. - * Finally, the `snapshot` variables are nulled and promoted to `viewport` and - * `subscribedColumns`. 4) If a subscription is updated during or after stage 3, it will be - * added back to the pendingSubscription list, and the updated requests will sit in the - * `pending` variables until the next time the update propagation job executes. It will NOT be - * removed from the activeSubscription list. A given subscription will exist no more than once - * in either subscription list. 5) Finally, when a subscription is removed we mark it as having - * a `pendingDelete` and add it to the pendingSubscription list. Any subscription - * requests/updates that re-use this handleId will ignore this instance of Subscription and be - * allowed to construct a new Subscription starting from step 1. When the update propagation job - * is run we clean up deleted subscriptions and rebuild any state that is used to filter - * recorded updates. + * Here is the typical lifecycle of a subscription: 1) The new subscription is added to pendingSubscriptions. It is + * not active and its viewport / subscribed columns are empty. 2) If a subscription is updated before the initial + * snapshot is prepared, we overwrite the viewport / columns stored in the variables prefixed with `pending`. These + * variables will always contain the most recently requested viewport / columns that have not yet been acknowledged + * by the BMP. 3) The BMP's update propagation job runs. All pendingSubscriptions (new or updated) will have their + * pending viewport / columns requests accepted. All pendingSubscriptions move to the activeSubscription list if + * they were brand new. The pendingSubscription list is cleared. At this stage, the `pending` variables are nulled + * and their contents move to the variables prefixed with `snapshot`. If a viewport's subscribedColumns change when + * the viewport remains the same, we copy the reference from `viewport` to `snapshotViewport`. The propagation job + * is responsible for building the snapshot and sending it to the client. Finally, the `snapshot` variables are + * nulled and promoted to `viewport` and `subscribedColumns`. 4) If a subscription is updated during or after stage + * 3, it will be added back to the pendingSubscription list, and the updated requests will sit in the `pending` + * variables until the next time the update propagation job executes. It will NOT be removed from the + * activeSubscription list. A given subscription will exist no more than once in either subscription list. 5) + * Finally, when a subscription is removed we mark it as having a `pendingDelete` and add it to the + * pendingSubscription list. Any subscription requests/updates that re-use this handleId will ignore this instance + * of Subscription and be allowed to construct a new Subscription starting from step 1. When the update propagation + * job is run we clean up deleted subscriptions and rebuild any state that is used to filter recorded updates. */ private class Subscription { final Options options; @@ -430,24 +410,21 @@ private class Subscription { BitSet subscribedColumns; // active subscription columns boolean isActive = false; // is this subscription in our active list? - boolean pendingDelete = false; // is this subscription deleted as far as the client is - // concerned? + boolean pendingDelete = false; // is this subscription deleted as far as the client is concerned? boolean hasPendingUpdate = false; // is this subscription in our pending list? boolean pendingInitialSnapshot = true; // do we need to send the initial snapshot? Index pendingViewport; // if an update is pending this is our new viewport BitSet pendingColumns; // if an update is pending this is our new column subscription set - Index snapshotViewport = null; // captured viewport during snapshot portion of propagation - // job + Index snapshotViewport = null; // captured viewport during snapshot portion of propagation job BitSet snapshotColumns = null; // captured column during snapshot portion of propagation job private Subscription(final StreamObserver listener, - final Options options, - final BitSet subscribedColumns, - final @Nullable Index initialViewport) { + final Options options, + final BitSet subscribedColumns, + final @Nullable Index initialViewport) { this.options = options; this.listener = listener; - this.logPrefix = - "Sub{" + Integer.toHexString(System.identityHashCode(listener)) + "}: "; + this.logPrefix = "Sub{" + Integer.toHexString(System.identityHashCode(listener)) + "}: "; this.viewport = initialViewport != null ? Index.CURRENT_FACTORY.getEmptyIndex() : null; this.subscribedColumns = new BitSet(); this.pendingColumns = subscribedColumns; @@ -460,79 +437,76 @@ public boolean isViewport() { } public boolean addSubscription(final StreamObserver listener, - final Options options, - final BitSet columnsToSubscribe, - final @Nullable Index initialViewport) { + final Options options, + final BitSet columnsToSubscribe, + final @Nullable Index initialViewport) { synchronized (this) { - final boolean hasSubscription = - activeSubscriptions.stream().anyMatch(item -> item.listener == listener) + final boolean hasSubscription = activeSubscriptions.stream().anyMatch(item -> item.listener == listener) || pendingSubscriptions.stream().anyMatch(item -> item.listener == listener); if (hasSubscription) { throw new IllegalStateException( - "asking to add a subscription for an already existing session and listener"); + "asking to add a subscription for an already existing session and listener"); } - final Subscription subscription = new Subscription(listener, options, - (BitSet) columnsToSubscribe.clone(), initialViewport); + final Subscription subscription = + new Subscription(listener, options, (BitSet) columnsToSubscribe.clone(), initialViewport); Assert.neqNull(columnsToSubscribe, "columnsToSubscribe"); log.info().append(logPrefix) - .append(subscription.logPrefix) - .append("subbing to columns ") - .append(FormatBitSet.formatBitSet(columnsToSubscribe)) - .endl(); + .append(subscription.logPrefix) + .append("subbing to columns ") + .append(FormatBitSet.formatBitSet(columnsToSubscribe)) + .endl(); subscription.hasPendingUpdate = true; pendingSubscriptions.add(subscription); // we'd like to send the initial snapshot as soon as possible log.info().append(logPrefix).append(subscription.logPrefix) - .append("scheduling update immediately, for initial snapshot.").endl(); + .append("scheduling update immediately, for initial snapshot.").endl(); updatePropagationJob.scheduleImmediately(); return true; } } private boolean findAndUpdateSubscription(final StreamObserver listener, - final Consumer updateSubscription) { - final Function, Boolean> findAndUpdate = - (List subscriptions) -> { - for (final Subscription sub : subscriptions) { - if (sub.listener == listener) { - updateSubscription.accept(sub); - if (!sub.hasPendingUpdate) { - sub.hasPendingUpdate = true; - pendingSubscriptions.add(sub); - } - - updatePropagationJob.scheduleImmediately(); - return true; + final Consumer updateSubscription) { + final Function, Boolean> findAndUpdate = (List subscriptions) -> { + for (final Subscription sub : subscriptions) { + if (sub.listener == listener) { + updateSubscription.accept(sub); + if (!sub.hasPendingUpdate) { + sub.hasPendingUpdate = true; + pendingSubscriptions.add(sub); } + + updatePropagationJob.scheduleImmediately(); + return true; } + } - return false; - }; + return false; + }; synchronized (this) { - return findAndUpdate.apply(activeSubscriptions) - || findAndUpdate.apply(pendingSubscriptions); + return findAndUpdate.apply(activeSubscriptions) || findAndUpdate.apply(pendingSubscriptions); } } public boolean updateSubscription(final StreamObserver listener, - final BitSet newSubscribedColumns) { + final BitSet newSubscribedColumns) { return findAndUpdateSubscription(listener, sub -> { sub.pendingColumns = (BitSet) newSubscribedColumns.clone(); if (sub.isViewport() && sub.pendingViewport == null) { sub.pendingViewport = sub.viewport.clone(); } log.info().append(logPrefix).append(sub.logPrefix) - .append("scheduling update immediately, for column updates.").endl(); + .append("scheduling update immediately, for column updates.").endl(); }); } public boolean updateViewport(final StreamObserver listener, - final Index newViewport) { + final Index newViewport) { return findAndUpdateSubscription(listener, sub -> { if (sub.pendingViewport != null) { sub.pendingViewport.close(); @@ -542,12 +516,12 @@ public boolean updateViewport(final StreamObserver listener, sub.pendingColumns = (BitSet) sub.subscribedColumns.clone(); } log.info().append(logPrefix).append(sub.logPrefix) - .append("scheduling update immediately, for viewport updates.").endl(); + .append("scheduling update immediately, for viewport updates.").endl(); }); } public boolean updateViewportAndColumns(final StreamObserver listener, - final Index newViewport, final BitSet columnsToSubscribe) { + final Index newViewport, final BitSet columnsToSubscribe) { return findAndUpdateSubscription(listener, sub -> { if (sub.pendingViewport != null) { sub.pendingViewport.close(); @@ -555,7 +529,7 @@ public boolean updateViewportAndColumns(final StreamObserver listen sub.pendingViewport = newViewport.clone(); sub.pendingColumns = (BitSet) columnsToSubscribe.clone(); log.info().append(logPrefix).append(sub.logPrefix) - .append("scheduling update immediately, for viewport and column updates.").endl(); + .append("scheduling update immediately, for viewport and column updates.").endl(); }); } @@ -563,7 +537,7 @@ public void removeSubscription(final StreamObserver listener) { findAndUpdateSubscription(listener, sub -> { sub.pendingDelete = true; log.info().append(logPrefix).append(sub.logPrefix) - .append("scheduling update immediately, for removed subscription.").endl(); + .append("scheduling update immediately, for removed subscription.").endl(); }); } @@ -589,8 +563,7 @@ private class DeltaListener extends InstrumentedShiftAwareListener { public void onUpdate(final Update upstream) { synchronized (BarrageMessageProducer.this) { if (lastIndexClockStep >= LogicalClock.DEFAULT.currentStep()) { - throw new IllegalStateException( - logPrefix + "lastIndexClockStep=" + lastIndexClockStep + throw new IllegalStateException(logPrefix + "lastIndexClockStep=" + lastIndexClockStep + " >= notification on " + LogicalClock.DEFAULT.currentStep()); } @@ -600,17 +573,16 @@ public void onUpdate(final Update upstream) { schedulePropagation(); } - // mark when the last indices are from, so that terminal notifications can make use - // of them if required + // mark when the last indices are from, so that terminal notifications can make use of them if required lastIndexClockStep = LogicalClock.DEFAULT.currentStep(); if (DEBUG) { try (final Index prevIndex = parent.getIndex().getPrevIndex()) { log.info().append(logPrefix) - .append("lastIndexClockStep=").append(lastIndexClockStep) - .append(", upstream=").append(upstream).append(", shouldEnqueueDelta=") - .append(shouldEnqueueDelta) - .append(", index=").append(parent.getIndex()).append(", prevIndex=") - .append(prevIndex).endl(); + .append("lastIndexClockStep=").append(lastIndexClockStep) + .append(", upstream=").append(upstream).append(", shouldEnqueueDelta=") + .append(shouldEnqueueDelta) + .append(", index=").append(parent.getIndex()).append(", prevIndex=").append(prevIndex) + .endl(); } } } @@ -618,7 +590,7 @@ public void onUpdate(final Update upstream) { @Override protected void onFailureInternal(final Throwable originalException, - final UpdatePerformanceTracker.Entry sourceEntry) { + final UpdatePerformanceTracker.Entry sourceEntry) { synchronized (BarrageMessageProducer.this) { if (pendingError != null) { pendingError = originalException; @@ -636,10 +608,10 @@ private static class FillDeltaContext implements SafeCloseable { final WritableChunkSink.FillFromContext deltaFillContext; public FillDeltaContext(final int columnIndex, - final ColumnSource sourceColumn, - final WritableSource deltaColumn, - final SharedContext sharedContext, - final int chunkSize) { + final ColumnSource sourceColumn, + final WritableSource deltaColumn, + final SharedContext sharedContext, + final int chunkSize) { this.columnIndex = columnIndex; this.sourceColumn = sourceColumn; this.deltaColumn = deltaColumn; @@ -648,8 +620,7 @@ public FillDeltaContext(final int columnIndex, } public void doFillChunk(final OrderedKeys srcKeys, final OrderedKeys dstKeys) { - deltaColumn.fillFromChunk(deltaFillContext, - sourceColumn.getChunk(sourceGetContext, srcKeys), dstKeys); + deltaColumn.fillFromChunk(deltaFillContext, sourceColumn.getChunk(sourceGetContext, srcKeys), dstKeys); } @Override @@ -675,21 +646,17 @@ private void enqueueUpdate(final ShiftAwareListener.Update upstream) { modsToRecord = deltaViewport.intersect(upstream.modified); } } else { - // we have new viewport subscriptions and we are actively fetching snapshots so there is - // no data to record - // however we must record the index updates or else the propagationIndex will be out of - // sync + // we have new viewport subscriptions and we are actively fetching snapshots so there is no data to record + // however we must record the index updates or else the propagationIndex will be out of sync addsToRecord = Index.FACTORY.getEmptyIndex(); modsToRecord = Index.FACTORY.getEmptyIndex(); } - // Note: viewports are in position space, inserted and removed rows may cause the keyspace - // for a given viewport - // to shift. Let's compute which rows are being scoped into view. If current index is empty, - // we have nothing to + // Note: viewports are in position space, inserted and removed rows may cause the keyspace for a given viewport + // to shift. Let's compute which rows are being scoped into view. If current index is empty, we have nothing to // store. If prev index is empty, all rows are new and are already in addsToRecord. - if (activeViewport != null && (upstream.added.nonempty() || upstream.removed.nonempty()) - && index.nonempty() && index.sizePrev() > 0) { + if (activeViewport != null && (upstream.added.nonempty() || upstream.removed.nonempty()) && index.nonempty() + && index.sizePrev() > 0) { final Index.RandomBuilder scopedViewBuilder = Index.FACTORY.getRandomBuilder(); try (final Index prevIndex = index.getPrevIndex()) { @@ -698,36 +665,30 @@ private void enqueueUpdate(final ShiftAwareListener.Update upstream) { continue; } - final Index.ShiftInversionHelper inverter = - new Index.ShiftInversionHelper(upstream.shifted); + final Index.ShiftInversionHelper inverter = new Index.ShiftInversionHelper(upstream.shifted); sub.viewport.forAllLongRanges((posStart, posEnd) -> { // Note: we already know that both index and prevIndex are non-empty. - final long currKeyStart = inverter.mapToPrevKeyspace( - index.get(Math.min(posStart, index.size() - 1)), false); - final long currKeyEnd = inverter - .mapToPrevKeyspace(index.get(Math.min(posEnd, index.size() - 1)), true); + final long currKeyStart = + inverter.mapToPrevKeyspace(index.get(Math.min(posStart, index.size() - 1)), false); + final long currKeyEnd = + inverter.mapToPrevKeyspace(index.get(Math.min(posEnd, index.size() - 1)), true); - // if our current viewport includes no previous values this range may be - // empty + // if our current viewport includes no previous values this range may be empty if (currKeyEnd < currKeyStart) { return; } final long prevKeyStart = - posStart >= prevIndex.size() ? prevIndex.lastKey() + 1 - : prevIndex.get(posStart); - final long prevKeyEnd = - prevIndex.get(Math.min(posEnd, prevIndex.size() - 1)); + posStart >= prevIndex.size() ? prevIndex.lastKey() + 1 : prevIndex.get(posStart); + final long prevKeyEnd = prevIndex.get(Math.min(posEnd, prevIndex.size() - 1)); // Note: we already know that scoped rows must touch viewport boundaries if (currKeyStart < prevKeyStart) { - scopedViewBuilder.addRange(currKeyStart, - Math.min(prevKeyStart - 1, currKeyEnd)); + scopedViewBuilder.addRange(currKeyStart, Math.min(prevKeyStart - 1, currKeyEnd)); } if (currKeyEnd > prevKeyEnd) { - scopedViewBuilder.addRange(Math.max(prevKeyEnd + 1, currKeyStart), - currKeyEnd); + scopedViewBuilder.addRange(Math.max(prevKeyEnd + 1, currKeyStart), currKeyEnd); } }); } @@ -742,12 +703,12 @@ private void enqueueUpdate(final ShiftAwareListener.Update upstream) { if (DEBUG) { log.info().append(logPrefix).append("step=").append(LogicalClock.DEFAULT.currentStep()) - .append(", upstream=").append(upstream).append(", activeSubscriptions=") - .append(activeSubscriptions.size()) - .append(", numFullSubscriptions=").append(numFullSubscriptions) - .append(", addsToRecord=").append(addsToRecord) - .append(", modsToRecord=").append(modsToRecord).append(", columns=") - .append(FormatBitSet.formatBitSet(activeColumns)).endl(); + .append(", upstream=").append(upstream).append(", activeSubscriptions=") + .append(activeSubscriptions.size()) + .append(", numFullSubscriptions=").append(numFullSubscriptions).append(", addsToRecord=") + .append(addsToRecord) + .append(", modsToRecord=").append(modsToRecord).append(", columns=") + .append(FormatBitSet.formatBitSet(activeColumns)).endl(); } // Now append any data that we need to save for later. @@ -763,38 +724,37 @@ private void enqueueUpdate(final ShiftAwareListener.Update upstream) { final long deltaColumnOffset = nextFreeDeltaKey; if (addsToRecord.nonempty() || modsToRecord.nonempty()) { - final FillDeltaContext[] fillDeltaContexts = - new FillDeltaContext[activeColumns.cardinality()]; + final FillDeltaContext[] fillDeltaContexts = new FillDeltaContext[activeColumns.cardinality()]; try (final SharedContext sharedContext = SharedContext.makeSharedContext(); - final SafeCloseableArray ignored = new SafeCloseableArray<>(fillDeltaContexts)) { - final int totalSize = - LongSizedDataStructure.intSize("BarrageMessageProducer#enqueueUpdate", + final SafeCloseableArray ignored = new SafeCloseableArray<>(fillDeltaContexts)) { + final int totalSize = LongSizedDataStructure.intSize("BarrageMessageProducer#enqueueUpdate", addsToRecord.size() + modsToRecord.size() + nextFreeDeltaKey); - final int deltaChunkSize = (int) Math.min(DELTA_CHUNK_SIZE, - Math.max(addsToRecord.size(), modsToRecord.size())); + final int deltaChunkSize = + (int) Math.min(DELTA_CHUNK_SIZE, Math.max(addsToRecord.size(), modsToRecord.size())); - for (int columnIndex = activeColumns.nextSetBit(0), aci = - 0; columnIndex >= 0; columnIndex = activeColumns.nextSetBit(columnIndex + 1)) { + for (int columnIndex = activeColumns.nextSetBit(0), aci = 0; columnIndex >= 0; columnIndex = + activeColumns.nextSetBit(columnIndex + 1)) { if (addsToRecord.empty() && !modifiedColumns.get(columnIndex)) { continue; } deltaColumns[columnIndex].ensureCapacity(totalSize); - fillDeltaContexts[aci++] = - new FillDeltaContext(columnIndex, sourceColumns[columnIndex], + fillDeltaContexts[aci++] = new FillDeltaContext(columnIndex, sourceColumns[columnIndex], deltaColumns[columnIndex], sharedContext, deltaChunkSize); } final BiConsumer recordRows = (keysToAdd, columnsToRecord) -> { try (final OrderedKeys.Iterator okIt = keysToAdd.getOrderedKeysIterator()) { while (okIt.hasMore()) { - final OrderedKeys srcKeys = - okIt.getNextOrderedKeysWithLength(DELTA_CHUNK_SIZE); // NB: This - // will never - // return more - // keys than - // deltaChunkSize - try (final OrderedKeys dstKeys = OrderedKeys.forRange(nextFreeDeltaKey, - nextFreeDeltaKey + srcKeys.size() - 1)) { + final OrderedKeys srcKeys = okIt.getNextOrderedKeysWithLength(DELTA_CHUNK_SIZE); // NB: This + // will + // never + // return + // more + // keys + // than + // deltaChunkSize + try (final OrderedKeys dstKeys = + OrderedKeys.forRange(nextFreeDeltaKey, nextFreeDeltaKey + srcKeys.size() - 1)) { nextFreeDeltaKey += srcKeys.size(); for (final FillDeltaContext fillDeltaContext : fillDeltaContexts) { @@ -824,11 +784,10 @@ private void enqueueUpdate(final ShiftAwareListener.Update upstream) { if (DEBUG) { log.info().append(logPrefix).append("update accumulation complete for step=") - .append(LogicalClock.DEFAULT.currentStep()).endl(); + .append(LogicalClock.DEFAULT.currentStep()).endl(); } - pendingDeltas.add( - new Delta(LogicalClock.DEFAULT.currentStep(), deltaColumnOffset, upstream, addsToRecord, + pendingDeltas.add(new Delta(LogicalClock.DEFAULT.currentStep(), deltaColumnOffset, upstream, addsToRecord, modsToRecord, (BitSet) activeColumns.clone(), modifiedColumns)); } @@ -839,19 +798,17 @@ private void schedulePropagation() { // we have updated within the period, so wait until a sufficient gap final long nextRunTime = lastUpdateTime + updateIntervalMs; if (DEBUG) { - log.info().append(logPrefix).append("Last Update Time: ").append(lastUpdateTime) - .append(" next run: ").append(nextRunTime).endl(); + log.info().append(logPrefix).append("Last Update Time: ").append(lastUpdateTime).append(" next run: ") + .append(nextRunTime).endl(); } updatePropagationJob.scheduleAt(nextRunTime); } else { // we have not updated recently, so go for it right away if (DEBUG) { log.info().append(logPrefix) - .append("Scheduling update immediately, because last update was ") - .append(lastUpdateTime) - .append(" and now is ").append(now).append(" msSinceLastUpdate=") - .append(msSinceLastUpdate) - .append(" interval=").append(updateIntervalMs).endl(); + .append("Scheduling update immediately, because last update was ").append(lastUpdateTime) + .append(" and now is ").append(now).append(" msSinceLastUpdate=").append(msSinceLastUpdate) + .append(" interval=").append(updateIntervalMs).endl(); } updatePropagationJob.scheduleImmediately(); } @@ -870,8 +827,7 @@ public void run() { needsRun.set(true); while (true) { if (!runLock.tryLock()) { - // if we can't get a lock, the thread that lets it go will check before exiting - // the method + // if we can't get a lock, the thread that lets it go will check before exiting the method return; } @@ -881,9 +837,8 @@ public void run() { } } catch (final Exception exception) { // TODO: global error notification core/#55 - log.error().append(logPrefix) - .append("Could not handle barrage update propagation: ").append(exception) - .endl(); + log.error().append(logPrefix).append("Could not handle barrage update propagation: ") + .append(exception).endl(); } finally { runLock.unlock(); } @@ -946,8 +901,7 @@ private void updateSubscriptionsSnapshotAndPropagate() { if (!subscription.isActive) { firstSubscription |= activeSubscriptions.isEmpty(); - // Note that initial subscriptions have empty viewports and no subscribed - // columns. + // Note that initial subscriptions have empty viewports and no subscribed columns. subscription.isActive = true; activeSubscriptions.add(subscription); @@ -977,8 +931,7 @@ private void updateSubscriptionsSnapshotAndPropagate() { boolean haveViewport = false; postSnapshotColumns.clear(); - final Index.RandomBuilder postSnapshotViewportBuilder = - Index.FACTORY.getRandomBuilder(); + final Index.RandomBuilder postSnapshotViewportBuilder = Index.FACTORY.getRandomBuilder(); for (int i = 0; i < activeSubscriptions.size(); ++i) { final Subscription sub = activeSubscriptions.get(i); @@ -987,8 +940,7 @@ private void updateSubscriptionsSnapshotAndPropagate() { --numFullSubscriptions; } - activeSubscriptions.set(i, - activeSubscriptions.get(activeSubscriptions.size() - 1)); + activeSubscriptions.set(i, activeSubscriptions.get(activeSubscriptions.size() - 1)); activeSubscriptions.remove(activeSubscriptions.size() - 1); --i; continue; @@ -996,18 +948,16 @@ private void updateSubscriptionsSnapshotAndPropagate() { if (sub.isViewport()) { haveViewport = true; - postSnapshotViewportBuilder.addIndex( - sub.snapshotViewport != null ? sub.snapshotViewport : sub.viewport); + postSnapshotViewportBuilder + .addIndex(sub.snapshotViewport != null ? sub.snapshotViewport : sub.viewport); } - postSnapshotColumns.or( - sub.snapshotColumns != null ? sub.snapshotColumns : sub.subscribedColumns); + postSnapshotColumns.or(sub.snapshotColumns != null ? sub.snapshotColumns : sub.subscribedColumns); } postSnapshotViewport = haveViewport ? postSnapshotViewportBuilder.getIndex() : null; if (!needsSnapshot) { - // i.e. We have only removed subscriptions; we can update this state - // immediately. + // i.e. We have only removed subscriptions; we can update this state immediately. promoteSnapshotToActive(); } } @@ -1021,8 +971,7 @@ private void updateSubscriptionsSnapshotAndPropagate() { // then we spend the effort to take a snapshot if (needsSnapshot) { try (final Index snapshotIndex = snapshotRows.getIndex()) { - snapshot = getSnapshot(updatedSubscriptions, snapshotColumns, - needsFullSnapshot ? null : snapshotIndex); + snapshot = getSnapshot(updatedSubscriptions, snapshotColumns, needsFullSnapshot ? null : snapshotIndex); } } @@ -1054,8 +1003,7 @@ private void updateSubscriptionsSnapshotAndPropagate() { if (firstSubscription) { Assert.neqNull(snapshot, "snapshot"); - // propagationIndex is only updated when we have listeners; let's "refresh" it if - // needed + // propagationIndex is only updated when we have listeners; let's "refresh" it if needed propagationIndex.clear(); propagationIndex.insert(snapshot.rowsAdded); } @@ -1092,7 +1040,7 @@ private void updateSubscriptionsSnapshotAndPropagate() { if (snapshot != null) { try (final StreamGenerator snapshotGenerator = - streamGeneratorFactory.newGenerator(snapshot)) { + streamGeneratorFactory.newGenerator(snapshot)) { for (final Subscription subscription : updatedSubscriptions) { if (subscription.pendingDelete) { continue; @@ -1121,12 +1069,9 @@ private void updateSubscriptionsSnapshotAndPropagate() { } } - private void propagateToSubscribers(final BarrageMessage message, - final Index propIndexForMessage) { - // message is released via transfer to stream generator (as it must live until all view's - // are closed) - try (final StreamGenerator generator = - streamGeneratorFactory.newGenerator(message)) { + private void propagateToSubscribers(final BarrageMessage message, final Index propIndexForMessage) { + // message is released via transfer to stream generator (as it must live until all view's are closed) + try (final StreamGenerator generator = streamGeneratorFactory.newGenerator(message)) { for (final Subscription subscription : activeSubscriptions) { if (subscription.pendingInitialSnapshot || subscription.pendingDelete) { continue; @@ -1134,21 +1079,19 @@ private void propagateToSubscribers(final BarrageMessage message, // There are three messages that might be sent this update: // - pre-snapshot: snapshotViewport/snapshotColumn values apply during this phase - // - snapshot: here we close and clear the snapshotViewport/snapshotColumn values; - // officially we recognize the subscription change - // - post-snapshot: now we use the viewport/subscribedColumn values (these are the - // values the LTM listener uses) + // - snapshot: here we close and clear the snapshotViewport/snapshotColumn values; officially we + // recognize the subscription change + // - post-snapshot: now we use the viewport/subscribedColumn values (these are the values the LTM + // listener uses) final Index vp = - subscription.snapshotViewport != null ? subscription.snapshotViewport - : subscription.viewport; - final BitSet cols = - subscription.snapshotColumns != null ? subscription.snapshotColumns + subscription.snapshotViewport != null ? subscription.snapshotViewport : subscription.viewport; + final BitSet cols = subscription.snapshotColumns != null ? subscription.snapshotColumns : subscription.subscribedColumns; try (final Index clientView = - subscription.isViewport() ? propIndexForMessage.subindexByPos(vp) : null) { - subscription.listener.onNext( - generator.getSubView(subscription.options, false, vp, clientView, cols)); + subscription.isViewport() ? propIndexForMessage.subindexByPos(vp) : null) { + subscription.listener + .onNext(generator.getSubView(subscription.options, false, vp, clientView, cols)); } catch (final Exception e) { try { subscription.listener.onError(GrpcUtil.securelyWrapError(log, e)); @@ -1162,15 +1105,12 @@ private void propagateToSubscribers(final BarrageMessage message, private void clearObjectDeltaColumns(@NotNull final BitSet objectColumnsToClear) { try (final ResettableWritableObjectChunk backingChunk = - ResettableWritableObjectChunk.makeResettableChunk()) { - for (int columnIndex = - objectColumnsToClear.nextSetBit(0); columnIndex >= 0; columnIndex = + ResettableWritableObjectChunk.makeResettableChunk()) { + for (int columnIndex = objectColumnsToClear.nextSetBit(0); columnIndex >= 0; columnIndex = objectColumnsToClear.nextSetBit(columnIndex + 1)) { - final ObjectArraySource sourceToNull = - (ObjectArraySource) deltaColumns[columnIndex]; + final ObjectArraySource sourceToNull = (ObjectArraySource) deltaColumns[columnIndex]; final long targetCapacity = Math.min(nextFreeDeltaKey, sourceToNull.getCapacity()); - for (long positionToNull = 0; positionToNull < targetCapacity; positionToNull += - backingChunk.size()) { + for (long positionToNull = 0; positionToNull < targetCapacity; positionToNull += backingChunk.size()) { sourceToNull.resetWritableChunkToBackingStore(backingChunk, positionToNull); backingChunk.fillWithNullValue(0, backingChunk.size()); } @@ -1179,15 +1119,12 @@ private void clearObjectDeltaColumns(@NotNull final BitSet objectColumnsToClear) } private void propagateSnapshotForSubscription(final Subscription subscription, - final StreamGenerator snapshotGenerator) { + final StreamGenerator snapshotGenerator) { boolean needsSnapshot = subscription.pendingInitialSnapshot; - // This is a little confusing, but by the time we propagate, the - // `snapshotViewport`/`snapshotColumns` objects - // are the previous subscription items. The ones we want are already active; since we no - // longer hold the lock - // the parent table listener needs to be recording data as if we've already sent the - // successful snapshot. + // This is a little confusing, but by the time we propagate, the `snapshotViewport`/`snapshotColumns` objects + // are the previous subscription items. The ones we want are already active; since we no longer hold the lock + // the parent table listener needs to be recording data as if we've already sent the successful snapshot. if (subscription.snapshotViewport != null) { needsSnapshot = true; @@ -1204,27 +1141,25 @@ private void propagateSnapshotForSubscription(final Subscription subscription, if (needsSnapshot) { if (DEBUG) { log.info().append(logPrefix).append("Sending snapshot to ") - .append(System.identityHashCode(subscription)).endl(); + .append(System.identityHashCode(subscription)).endl(); } final boolean isViewport = subscription.viewport != null; - try (final Index keySpaceViewport = isViewport - ? snapshotGenerator.getMessage().rowsAdded.subindexByPos(subscription.viewport) - : null) { + try (final Index keySpaceViewport = + isViewport ? snapshotGenerator.getMessage().rowsAdded.subindexByPos(subscription.viewport) : null) { if (subscription.pendingInitialSnapshot) { // Send schema metadata to this new client. subscription.listener.onNext(streamGeneratorFactory.getSchemaView( - subscription.options, - parent.getDefinition(), - parent.getAttributes())); + subscription.options, + parent.getDefinition(), + parent.getAttributes())); } - subscription.listener.onNext(snapshotGenerator.getSubView(subscription.options, - subscription.pendingInitialSnapshot, subscription.viewport, keySpaceViewport, - subscription.subscribedColumns)); + subscription.listener + .onNext(snapshotGenerator.getSubView(subscription.options, subscription.pendingInitialSnapshot, + subscription.viewport, keySpaceViewport, subscription.subscribedColumns)); } catch (final Exception e) { - GrpcUtil.safelyExecute( - () -> subscription.listener.onError(GrpcUtil.securelyWrapError(log, e))); + GrpcUtil.safelyExecute(() -> subscription.listener.onError(GrpcUtil.securelyWrapError(log, e))); removeSubscription(subscription.listener); } } @@ -1251,21 +1186,20 @@ private BarrageMessage aggregateUpdatesInRange(final int startDelta, final int e localAdded = Index.CURRENT_FACTORY.getEmptyIndex(); } else { localAdded = Index.CURRENT_FACTORY.getIndexByRange( - firstDelta.deltaColumnOffset, - firstDelta.deltaColumnOffset + firstDelta.recordedAdds.size() - 1); + firstDelta.deltaColumnOffset, + firstDelta.deltaColumnOffset + firstDelta.recordedAdds.size() - 1); } final Index localModified; if (firstDelta.recordedMods.empty()) { localModified = Index.CURRENT_FACTORY.getEmptyIndex(); } else { localModified = Index.CURRENT_FACTORY.getIndexByRange( - firstDelta.deltaColumnOffset + firstDelta.recordedAdds.size(), - firstDelta.deltaColumnOffset + firstDelta.recordedAdds.size() - + firstDelta.recordedMods.size() - 1); + firstDelta.deltaColumnOffset + firstDelta.recordedAdds.size(), + firstDelta.deltaColumnOffset + firstDelta.recordedAdds.size() + firstDelta.recordedMods.size() + - 1); } - addColumnSet = - firstDelta.recordedAdds.empty() ? new BitSet() : firstDelta.subscribedColumns; + addColumnSet = firstDelta.recordedAdds.empty() ? new BitSet() : firstDelta.subscribedColumns; modColumnSet = firstDelta.modifiedColumns; downstream.rowsAdded = firstDelta.update.added.clone(); @@ -1283,9 +1217,8 @@ private BarrageMessage aggregateUpdatesInRange(final int startDelta, final int e if (addColumnSet.get(ci)) { final int chunkCapacity = localAdded.intSize("serializeItems"); final WritableChunk chunk = - deltaColumn.getChunkType().makeWritableChunk(chunkCapacity); - try (final ChunkSource.FillContext fc = - deltaColumn.makeFillContext(chunkCapacity)) { + deltaColumn.getChunkType().makeWritableChunk(chunkCapacity); + try (final ChunkSource.FillContext fc = deltaColumn.makeFillContext(chunkCapacity)) { deltaColumn.fillChunk(fc, chunk, localAdded); } adds.data = chunk; @@ -1299,8 +1232,7 @@ private BarrageMessage aggregateUpdatesInRange(final int startDelta, final int e for (int ci = 0; ci < downstream.modColumnData.length; ++ci) { final ColumnSource deltaColumn = deltaColumns[ci]; - final BarrageMessage.ModColumnData modifications = - new BarrageMessage.ModColumnData(); + final BarrageMessage.ModColumnData modifications = new BarrageMessage.ModColumnData(); downstream.modColumnData[ci] = modifications; if (modColumnSet.get(ci)) { @@ -1308,9 +1240,8 @@ private BarrageMessage aggregateUpdatesInRange(final int startDelta, final int e final int chunkCapacity = localModified.intSize("serializeItems"); final WritableChunk chunk = - deltaColumn.getChunkType().makeWritableChunk(chunkCapacity); - try (final ChunkSource.FillContext fc = - deltaColumn.makeFillContext(chunkCapacity)) { + deltaColumn.getChunkType().makeWritableChunk(chunkCapacity); + try (final ChunkSource.FillContext fc = deltaColumn.makeFillContext(chunkCapacity)) { deltaColumn.fillChunk(fc, chunk, localModified); } modifications.data = chunk; @@ -1325,13 +1256,12 @@ private BarrageMessage aggregateUpdatesInRange(final int startDelta, final int e } else { // We must coalesce these updates. final Index.IndexUpdateCoalescer coalescer = - new Index.IndexUpdateCoalescer(propagationIndex, firstDelta.update); + new Index.IndexUpdateCoalescer(propagationIndex, firstDelta.update); for (int i = startDelta + 1; i < endDelta; ++i) { coalescer.update(pendingDeltas.get(i).update); } - // We need to build our included additions and included modifications in addition to the - // coalesced update. + // We need to build our included additions and included modifications in addition to the coalesced update. addColumnSet = new BitSet(); modColumnSet = new BitSet(); @@ -1351,8 +1281,7 @@ private BarrageMessage aggregateUpdatesInRange(final int startDelta, final int e addColumnSet.or(delta.subscribedColumns); } else { // It pays to be certain that all of the data we look up was written down. - Assert.equals(delta.subscribedColumns, "delta.subscribedColumns", - addColumnSet, "addColumnSet"); + Assert.equals(delta.subscribedColumns, "delta.subscribedColumns", addColumnSet, "addColumnSet"); } localAdded.insert(delta.recordedAdds); @@ -1363,12 +1292,11 @@ private BarrageMessage aggregateUpdatesInRange(final int startDelta, final int e } } - // One drawback of the ModifiedColumnSet, is that our adds must include data for all - // columns. However, column - // specific data may be updated and we only write down that single changed column. So, - // the computation of mapping - // output rows to input data may be different per Column. We can re-use calculations - // where the set of deltas + // One drawback of the ModifiedColumnSet, is that our adds must include data for all columns. However, + // column + // specific data may be updated and we only write down that single changed column. So, the computation of + // mapping + // output rows to input data may be different per Column. We can re-use calculations where the set of deltas // that modify column A are the same as column B. final class ColumnInfo { final Index modified = Index.CURRENT_FACTORY.getEmptyIndex(); @@ -1412,12 +1340,10 @@ final class ColumnInfo { Arrays.fill(retval.addedMapping, Index.NULL_KEY); Arrays.fill(retval.modifiedMapping, Index.NULL_KEY); - final Index unfilledAdds = - localAdded.empty() ? Index.CURRENT_FACTORY.getEmptyIndex() + final Index unfilledAdds = localAdded.empty() ? Index.CURRENT_FACTORY.getEmptyIndex() : Index.CURRENT_FACTORY.getIndexByRange(0, retval.addedMapping.length - 1); - final Index unfilledMods = retval.recordedMods.empty() - ? Index.CURRENT_FACTORY.getEmptyIndex() - : Index.CURRENT_FACTORY.getIndexByRange(0, retval.modifiedMapping.length - 1); + final Index unfilledMods = retval.recordedMods.empty() ? Index.CURRENT_FACTORY.getEmptyIndex() + : Index.CURRENT_FACTORY.getIndexByRange(0, retval.modifiedMapping.length - 1); final Index addedRemaining = localAdded.clone(); final Index modifiedRemaining = retval.recordedMods.clone(); @@ -1428,40 +1354,34 @@ final class ColumnInfo { final Delta delta = pendingDeltas.get(i); - final BiConsumer applyMapping = - (addedMapping, recordedAdds) -> { - final Index remaining = - addedMapping ? addedRemaining : modifiedRemaining; - final Index deltaRecorded = - recordedAdds ? delta.recordedAdds : delta.recordedMods; - try (final Index recorded = remaining.intersect(deltaRecorded); + final BiConsumer applyMapping = (addedMapping, recordedAdds) -> { + final Index remaining = addedMapping ? addedRemaining : modifiedRemaining; + final Index deltaRecorded = recordedAdds ? delta.recordedAdds : delta.recordedMods; + try (final Index recorded = remaining.intersect(deltaRecorded); final Index sourceRows = deltaRecorded.invert(recorded); final Index destinationsInPosSpace = remaining.invert(recorded); - final Index rowsToFill = - (addedMapping ? unfilledAdds : unfilledMods) + final Index rowsToFill = (addedMapping ? unfilledAdds : unfilledMods) .subindexByPos(destinationsInPosSpace)) { - sourceRows.shiftInPlace(delta.deltaColumnOffset - + (recordedAdds ? 0 : delta.recordedAdds.size())); - - remaining.remove(recorded); - if (addedMapping) { - unfilledAdds.remove(rowsToFill); - } else { - unfilledMods.remove(rowsToFill); - } + sourceRows.shiftInPlace( + delta.deltaColumnOffset + (recordedAdds ? 0 : delta.recordedAdds.size())); + + remaining.remove(recorded); + if (addedMapping) { + unfilledAdds.remove(rowsToFill); + } else { + unfilledMods.remove(rowsToFill); + } - applyRedirMapping(rowsToFill, sourceRows, + applyRedirMapping(rowsToFill, sourceRows, addedMapping ? retval.addedMapping : retval.modifiedMapping); - } - }; + } + }; applyMapping.accept(true, true); // map recorded adds - applyMapping.accept(false, true); // map recorded mods that might have a scoped - // add + applyMapping.accept(false, true); // map recorded mods that might have a scoped add if (deltasThatModifyThisColumn.get(i)) { - applyMapping.accept(true, false); // map recorded mods that propagate as - // adds + applyMapping.accept(true, false); // map recorded mods that propagate as adds applyMapping.accept(false, false); // map recorded mods } @@ -1470,8 +1390,8 @@ final class ColumnInfo { } if (unfilledAdds.size() > 0) { - Assert.assertion(false, "Error: added:" + coalescer.added + " unfilled:" - + unfilledAdds + " missing:" + coalescer.added.subindexByPos(unfilledAdds)); + Assert.assertion(false, "Error: added:" + coalescer.added + " unfilled:" + unfilledAdds + + " missing:" + coalescer.added.subindexByPos(unfilledAdds)); } Assert.eq(unfilledAdds.size(), "unfilledAdds.size()", 0); Assert.eq(unfilledMods.size(), "unfilledMods.size()", 0); @@ -1501,11 +1421,10 @@ final class ColumnInfo { if (addColumnSet.get(ci)) { final ColumnInfo info = getColumnInfo.apply(ci); final WritableChunk chunk = - deltaColumn.getChunkType().makeWritableChunk(info.addedMapping.length); - try (final ChunkSource.FillContext fc = - deltaColumn.makeFillContext(info.addedMapping.length)) { + deltaColumn.getChunkType().makeWritableChunk(info.addedMapping.length); + try (final ChunkSource.FillContext fc = deltaColumn.makeFillContext(info.addedMapping.length)) { ((FillUnordered) deltaColumn).fillChunkUnordered(fc, chunk, - LongChunk.chunkWrap(info.addedMapping)); + LongChunk.chunkWrap(info.addedMapping)); } adds.data = chunk; } else { @@ -1519,8 +1438,7 @@ final class ColumnInfo { int numActualModCols = 0; for (int i = 0; i < downstream.modColumnData.length; ++i) { final ColumnSource sourceColumn = deltaColumns[i]; - final BarrageMessage.ModColumnData modifications = - new BarrageMessage.ModColumnData(); + final BarrageMessage.ModColumnData modifications = new BarrageMessage.ModColumnData(); downstream.modColumnData[numActualModCols++] = modifications; if (modColumnSet.get(i)) { @@ -1528,11 +1446,10 @@ final class ColumnInfo { modifications.rowsModified = info.recordedMods.clone(); final WritableChunk chunk = - sourceColumn.getChunkType().makeWritableChunk(info.modifiedMapping.length); - try (final ChunkSource.FillContext fc = - sourceColumn.makeFillContext(info.modifiedMapping.length)) { + sourceColumn.getChunkType().makeWritableChunk(info.modifiedMapping.length); + try (final ChunkSource.FillContext fc = sourceColumn.makeFillContext(info.modifiedMapping.length)) { ((FillUnordered) sourceColumn).fillChunkUnordered(fc, chunk, - LongChunk.chunkWrap(info.modifiedMapping)); + LongChunk.chunkWrap(info.modifiedMapping)); } modifications.data = chunk; @@ -1556,8 +1473,7 @@ final class ColumnInfo { } // Updates provided mapping so that mapping[i] returns values.get(i) for all i in keys. - private static void applyRedirMapping(final Index keys, final Index values, - final long[] mapping) { + private static void applyRedirMapping(final Index keys, final Index values, final long[] mapping) { Assert.eq(keys.size(), "keys.size()", values.size(), "values.size()"); Assert.leq(keys.size(), "keys.size()", mapping.length, "mapping.length"); final Index.Iterator vit = values.iterator(); @@ -1595,8 +1511,7 @@ private void promoteSnapshotToActive() { // Pre-condition: activeObjectColumns == objectColumns & activeColumns this.objectColumnsToClear.or(postSnapshotColumns); this.objectColumnsToClear.and(objectColumns); - // Post-condition: activeObjectColumns == objectColumns & (activeColumns | - // postSnapshotColumns) + // Post-condition: activeObjectColumns == objectColumns & (activeColumns | postSnapshotColumns) this.activeColumns.clear(); this.activeColumns.or(this.postSnapshotColumns); @@ -1635,22 +1550,20 @@ public Boolean usePreviousValues(final long beforeClockValue) { if (DEBUG) { log.info().append(logPrefix) - .append("previousValuesAllowed usePrevious=").append(usePrevious) - .append(", step=").append(step).append(", validStep=").append(this.step).endl(); + .append("previousValuesAllowed usePrevious=").append(usePrevious) + .append(", step=").append(step).append(", validStep=").append(this.step).endl(); } return usePrevious; } @Override - public boolean snapshotConsistent(final long currentClockValue, - final boolean usingPreviousValues) { + public boolean snapshotConsistent(final long currentClockValue, final boolean usingPreviousValues) { return capturedLastIndexClockStep == getLastIndexClockStep(); } @Override - public boolean snapshotCompletedConsistently(final long afterClockValue, - final boolean usedPreviousValues) { + public boolean snapshotCompletedConsistently(final long afterClockValue, final boolean usedPreviousValues) { final boolean success; synchronized (BarrageMessageProducer.this) { success = capturedLastIndexClockStep == getLastIndexClockStep(); @@ -1664,7 +1577,7 @@ public boolean snapshotCompletedConsistently(final long afterClockValue, } if (DEBUG) { log.info().append(logPrefix) - .append("success=").append(success).append(", step=").append(step).endl(); + .append("success=").append(success).append(", step=").append(step).endl(); } return success; } @@ -1672,8 +1585,8 @@ public boolean snapshotCompletedConsistently(final long afterClockValue, @VisibleForTesting BarrageMessage getSnapshot(final List snapshotSubscriptions, - final BitSet columnsToSnapshot, - final Index positionsToSnapshot) { + final BitSet columnsToSnapshot, + final Index positionsToSnapshot) { if (onGetSnapshot != null) { onGetSnapshot.run(); } @@ -1682,7 +1595,7 @@ BarrageMessage getSnapshot(final List snapshotSubscriptions, // TODO: Let notification-indifferent use cases skip notification test final SnapshotControl snapshotControl = new SnapshotControl(snapshotSubscriptions); return ConstructSnapshot.constructBackplaneSnapshotInPositionSpace( - this, parent, columnsToSnapshot, positionsToSnapshot, snapshotControl); + this, parent, columnsToSnapshot, positionsToSnapshot, snapshotControl); } //////////////////////////////////////////////////// @@ -1701,8 +1614,7 @@ public boolean setRefreshing(final boolean refreshing) { if (parent.isRefreshing() || !refreshing) { return parent.isRefreshing(); } - throw new UnsupportedOperationException( - "cannot modify the source table's refreshing state"); + throw new UnsupportedOperationException("cannot modify the source table's refreshing state"); } @Override diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageStreamGenerator.java b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageStreamGenerator.java index d47211acc2e..96744f75971 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageStreamGenerator.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageStreamGenerator.java @@ -54,7 +54,7 @@ import static io.deephaven.grpc_api_client.barrage.chunk.BaseChunkInputStreamGenerator.PADDING_BUFFER; public class BarrageStreamGenerator implements - BarrageMessageProducer.StreamGenerator { + BarrageMessageProducer.StreamGenerator { private static final Logger log = LoggerFactory.getLogger(BarrageStreamGenerator.class); public static final long FLATBUFFER_MAGIC = 0x6E687064; @@ -64,25 +64,23 @@ public interface View { } @Singleton - public static class Factory implements - BarrageMessageProducer.StreamGenerator.Factory { + public static class Factory + implements BarrageMessageProducer.StreamGenerator.Factory { @Inject public Factory() {} @Override public BarrageMessageProducer.StreamGenerator newGenerator( - final BarrageMessage message) { + final BarrageMessage message) { return new BarrageStreamGenerator(message); } @Override - public View getSchemaView(final ChunkInputStreamGenerator.Options options, - final TableDefinition table, final Map attributes) { + public View getSchemaView(final ChunkInputStreamGenerator.Options options, final TableDefinition table, + final Map attributes) { final FlatBufferBuilder builder = new FlatBufferBuilder(); - final int schemaOffset = - BarrageSchemaUtil.makeSchemaPayload(builder, table, attributes); - builder.finish(wrapInMessage(builder, schemaOffset, - org.apache.arrow.flatbuf.MessageHeader.Schema)); + final int schemaOffset = BarrageSchemaUtil.makeSchemaPayload(builder, table, attributes); + builder.finish(wrapInMessage(builder, schemaOffset, org.apache.arrow.flatbuf.MessageHeader.Schema)); return new SchemaView(builder.dataBuffer()); } } @@ -93,8 +91,7 @@ public static class ModColumnData { ModColumnData(final BarrageMessage.ModColumnData col) throws IOException { rowsModified = new IndexGenerator(col.rowsModified); - data = ChunkInputStreamGenerator.makeInputStreamGenerator(col.data.getChunkType(), - col.type, col.data); + data = ChunkInputStreamGenerator.makeInputStreamGenerator(col.data.getChunkType(), col.type, col.data); } } @@ -115,8 +112,7 @@ public static class ModColumnData { public final ModColumnData[] modColumnData; /** - * Create a barrage stream generator that can slice and dice the barrage message for delivery to - * clients. + * Create a barrage stream generator that can slice and dice the barrage message for delivery to clients. * * @param message the generator takes ownership of the message and its internal objects */ @@ -136,8 +132,8 @@ public BarrageStreamGenerator(final BarrageMessage message) { addColumnData = new ChunkInputStreamGenerator[message.addColumnData.length]; for (int i = 0; i < message.addColumnData.length; ++i) { final BarrageMessage.AddColumnData acd = message.addColumnData[i]; - addColumnData[i] = ChunkInputStreamGenerator - .makeInputStreamGenerator(acd.data.getChunkType(), acd.type, acd.data); + addColumnData[i] = + ChunkInputStreamGenerator.makeInputStreamGenerator(acd.data.getChunkType(), acd.type, acd.data); } modColumnData = new ModColumnData[message.modColumnData.length]; for (int i = 0; i < modColumnData.length; ++i) { @@ -148,8 +144,7 @@ public BarrageStreamGenerator(final BarrageMessage message) { message.snapshotIndex.close(); } } catch (final IOException e) { - throw new UncheckedDeephavenException( - "unexpected IOException while creating barrage message stream", e); + throw new UncheckedDeephavenException("unexpected IOException while creating barrage message stream", e); } finally { if (message.snapshotIndex != null) { message.snapshotIndex.close(); @@ -189,31 +184,26 @@ public void close() { * @param viewport is the position-space viewport * @param keyspaceViewport is the key-space viewport * @param subscribedColumns are the columns subscribed for this view - * @return a MessageView filtered by the subscription properties that can be sent to that - * subscriber + * @return a MessageView filtered by the subscription properties that can be sent to that subscriber */ @Override public SubView getSubView(final ChunkInputStreamGenerator.Options options, - final boolean isInitialSnapshot, - @Nullable final Index viewport, - @Nullable final Index keyspaceViewport, - @Nullable final BitSet subscribedColumns) { - return new SubView(this, options, isInitialSnapshot, viewport, keyspaceViewport, - subscribedColumns); + final boolean isInitialSnapshot, + @Nullable final Index viewport, + @Nullable final Index keyspaceViewport, + @Nullable final BitSet subscribedColumns) { + return new SubView(this, options, isInitialSnapshot, viewport, keyspaceViewport, subscribedColumns); } /** - * Obtain a Full-Subscription View of this StreamGenerator that can be sent to a single - * subscriber. + * Obtain a Full-Subscription View of this StreamGenerator that can be sent to a single subscriber. * * @param options serialization options for this specific view * @param isInitialSnapshot indicates whether or not this is the first snapshot for the listener - * @return a MessageView filtered by the subscription properties that can be sent to that - * subscriber + * @return a MessageView filtered by the subscription properties that can be sent to that subscriber */ @Override - public SubView getSubView(ChunkInputStreamGenerator.Options options, - boolean isInitialSnapshot) { + public SubView getSubView(ChunkInputStreamGenerator.Options options, boolean isInitialSnapshot) { return getSubView(options, isInitialSnapshot, null, null, null); } @@ -228,11 +218,11 @@ public static class SubView implements View { public final boolean hasModBatch; public SubView(final BarrageStreamGenerator generator, - final ChunkInputStreamGenerator.Options options, - final boolean isInitialSnapshot, - @Nullable final Index viewport, - @Nullable final Index keyspaceViewport, - @Nullable final BitSet subscribedColumns) { + final ChunkInputStreamGenerator.Options options, + final boolean isInitialSnapshot, + @Nullable final Index viewport, + @Nullable final Index keyspaceViewport, + @Nullable final BitSet subscribedColumns) { this.generator = generator; this.options = options; this.isInitialSnapshot = isInitialSnapshot; @@ -248,13 +238,11 @@ public SubView(final BarrageStreamGenerator generator, public void forEachStream(Consumer visitor) throws IOException { ByteBuffer metadata = generator.getMetadata(this); if (hasAddBatch) { - visitor - .accept(generator.getInputStream(this, metadata, generator::appendAddColumns)); + visitor.accept(generator.getInputStream(this, metadata, generator::appendAddColumns)); metadata = null; } if (hasModBatch) { - visitor - .accept(generator.getInputStream(this, metadata, generator::appendModColumns)); + visitor.accept(generator.getInputStream(this, metadata, generator::appendModColumns)); } } @@ -268,9 +256,9 @@ public static class SchemaView implements View { public SchemaView(final ByteBuffer buffer) { this.msgBytes = Flight.FlightData.newBuilder() - .setDataHeader(ByteStringAccess.wrap(buffer)) - .build() - .toByteArray(); + .setDataHeader(ByteStringAccess.wrap(buffer)) + .build() + .toByteArray(); } @Override @@ -282,17 +270,16 @@ public void forEachStream(Consumer visitor) { /** * Treats the visitor with FlightData InputStream's to fulfill a DoGet. */ - public void forEachDoGetStream(final SubView view, final Consumer visitor) - throws IOException { + public void forEachDoGetStream(final SubView view, final Consumer visitor) throws IOException { visitor.accept(getInputStream(view, null, view.generator::appendAddColumns)); } @FunctionalInterface private interface ColumnVisitor { long visit(final SubView view, - final Consumer addStream, - final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, - final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException; + final Consumer addStream, + final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException; } /** @@ -300,12 +287,11 @@ long visit(final SubView view, * * @param view the view of the overall chunk to generate a RecordBatch for * @param metadata the optional flight data metadata to attach to the message - * @param columnVisitor the helper method responsible for appending the payload columns to the - * RecordBatch + * @param columnVisitor the helper method responsible for appending the payload columns to the RecordBatch * @return an InputStream ready to be drained by GRPC */ - private InputStream getInputStream(final SubView view, final ByteBuffer metadata, - final ColumnVisitor columnVisitor) throws IOException { + private InputStream getInputStream(final SubView view, final ByteBuffer metadata, final ColumnVisitor columnVisitor) + throws IOException { final ArrayDeque streams = new ArrayDeque<>(); final MutableInt size = new MutableInt(); @@ -323,8 +309,8 @@ private InputStream getInputStream(final SubView view, final ByteBuffer metadata throw new UncheckedDeephavenException("Unexpected IOException", e); } - // These buffers must be aligned to an 8-byte boundary in order for efficient alignment - // in languages like C++. + // These buffers must be aligned to an 8-byte boundary in order for efficient alignment in languages like + // C++. if (size.intValue() % 8 != 0) { final int paddingBytes = (8 - (size.intValue() % 8)); size.add(paddingBytes); @@ -337,18 +323,17 @@ private InputStream getInputStream(final SubView view, final ByteBuffer metadata final long numRows; final int nodesOffset; final int buffersOffset; - try ( - final WritableObjectChunk nodeOffsets = + try (final WritableObjectChunk nodeOffsets = WritableObjectChunk.makeWritableChunk(addColumnData.length); - final WritableLongChunk bufferInfos = - WritableLongChunk.makeWritableChunk(addColumnData.length * 3)) { + final WritableLongChunk bufferInfos = + WritableLongChunk.makeWritableChunk(addColumnData.length * 3)) { nodeOffsets.setSize(0); bufferInfos.setSize(0); final MutableLong totalBufferLength = new MutableLong(); final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener = - (numElements, nullCount) -> nodeOffsets - .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount)); + (numElements, nullCount) -> nodeOffsets + .add(new ChunkInputStreamGenerator.FieldNodeInfo(numElements, nullCount)); final ChunkInputStreamGenerator.BufferListener bufferListener = (length) -> { totalBufferLength.add(length); @@ -377,21 +362,19 @@ private InputStream getInputStream(final SubView view, final ByteBuffer metadata RecordBatch.addLength(header, numRows); final int headerOffset = RecordBatch.endRecordBatch(header); - header.finish(wrapInMessage(header, headerOffset, - org.apache.arrow.flatbuf.MessageHeader.RecordBatch, size.intValue())); + header.finish(wrapInMessage(header, headerOffset, org.apache.arrow.flatbuf.MessageHeader.RecordBatch, + size.intValue())); // now create the proto header try (final ExposedByteArrayOutputStream baos = new ExposedByteArrayOutputStream()) { final CodedOutputStream cos = CodedOutputStream.newInstance(baos); - cos.writeByteBuffer(Flight.FlightData.DATA_HEADER_FIELD_NUMBER, - header.dataBuffer().slice()); + cos.writeByteBuffer(Flight.FlightData.DATA_HEADER_FIELD_NUMBER, header.dataBuffer().slice()); if (metadata != null) { cos.writeByteBuffer(Flight.FlightData.APP_METADATA_FIELD_NUMBER, metadata); } - cos.writeTag(Flight.FlightData.DATA_BODY_FIELD_NUMBER, - WireFormat.WIRETYPE_LENGTH_DELIMITED); + cos.writeTag(Flight.FlightData.DATA_BODY_FIELD_NUMBER, WireFormat.WIRETYPE_LENGTH_DELIMITED); cos.writeUInt32NoTag(size.intValue()); cos.flush(); @@ -403,13 +386,12 @@ private InputStream getInputStream(final SubView view, final ByteBuffer metadata } } - public static int wrapInMessage(final FlatBufferBuilder builder, final int headerOffset, - final byte headerType) { + public static int wrapInMessage(final FlatBufferBuilder builder, final int headerOffset, final byte headerType) { return wrapInMessage(builder, headerOffset, headerType, 0); } - public static int wrapInMessage(final FlatBufferBuilder builder, final int headerOffset, - final byte headerType, final int bodyLength) { + public static int wrapInMessage(final FlatBufferBuilder builder, final int headerOffset, final byte headerType, + final int bodyLength) { Message.startMessage(builder); Message.addHeaderType(builder, headerType); Message.addHeader(builder, headerOffset); @@ -418,8 +400,8 @@ public static int wrapInMessage(final FlatBufferBuilder builder, final int heade return Message.endMessage(builder); } - private static int createByteVector(final FlatBufferBuilder builder, final byte[] data, - final int offset, final int length) { + private static int createByteVector(final FlatBufferBuilder builder, final byte[] data, final int offset, + final int length) { builder.startVector(1, length, 1); if (length > 0) { @@ -434,15 +416,14 @@ private static int createByteVector(final FlatBufferBuilder builder, final byte[ } private long appendAddColumns(final SubView view, - final Consumer addStream, - final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, - final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { + final Consumer addStream, + final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { // Added Chunk Data: final Index myAddedOffsets; if (view.isViewport()) { // only include added rows that are within the viewport - myAddedOffsets = rowsIncluded.original - .invert(view.keyspaceViewport.intersect(rowsIncluded.original)); + myAddedOffsets = rowsIncluded.original.invert(view.keyspaceViewport.intersect(rowsIncluded.original)); } else if (!rowsAdded.original.equals(rowsIncluded.original)) { // there are scoped rows included in the chunks that need to be removed myAddedOffsets = rowsIncluded.original.invert(rowsAdded.original); @@ -454,7 +435,7 @@ private long appendAddColumns(final SubView view, // add the add-column streams for (final ChunkInputStreamGenerator col : addColumnData) { final ChunkInputStreamGenerator.DrainableColumn drainableColumn = - col.getInputStream(view.options, myAddedOffsets); + col.getInputStream(view.options, myAddedOffsets); addStream.accept(drainableColumn); drainableColumn.visitFieldNodes(fieldNodeListener); drainableColumn.visitBuffers(bufferListener); @@ -463,24 +444,24 @@ private long appendAddColumns(final SubView view, } private long appendModColumns(final SubView view, - final Consumer addStream, - final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, - final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { + final Consumer addStream, + final ChunkInputStreamGenerator.FieldNodeListener fieldNodeListener, + final ChunkInputStreamGenerator.BufferListener bufferListener) throws IOException { // now add mod-column streams, and write the mod column indexes long numRows = 0; for (final ModColumnData mcd : modColumnData) { Index myModOffsets = null; if (view.isViewport()) { // only include added rows that are within the viewport - myModOffsets = mcd.rowsModified.original - .invert(view.keyspaceViewport.intersect(mcd.rowsModified.original)); + myModOffsets = + mcd.rowsModified.original.invert(view.keyspaceViewport.intersect(mcd.rowsModified.original)); numRows = Math.max(numRows, myModOffsets.size()); } else { numRows = Math.max(numRows, mcd.rowsModified.original.size()); } final ChunkInputStreamGenerator.DrainableColumn drainableColumn = - mcd.data.getInputStream(view.options, myModOffsets); + mcd.data.getInputStream(view.options, myModOffsets); addStream.accept(drainableColumn); drainableColumn.visitFieldNodes(fieldNodeListener); @@ -518,8 +499,7 @@ private ByteBuffer getMetadata(final SubView view) throws IOException { int effectiveColumnSetOffset = 0; if (isSnapshot && view.subscribedColumns != null) { - effectiveColumnSetOffset = - new BitSetGenerator(view.subscribedColumns).addToFlatBuffer(metadata); + effectiveColumnSetOffset = new BitSetGenerator(view.subscribedColumns).addToFlatBuffer(metadata); } final int rowsAddedOffset; @@ -548,8 +528,7 @@ private ByteBuffer getMetadata(final SubView view) throws IOException { } else { myModRowOffset = mcd.rowsModified.addToFlatBuffer(metadata); } - modOffsets.add( - BarrageModColumnMetadata.createBarrageModColumnMetadata(metadata, myModRowOffset)); + modOffsets.add(BarrageModColumnMetadata.createBarrageModColumnMetadata(metadata, myModRowOffset)); } BarrageUpdateMetadata.startNodesVector(metadata, modOffsets.size()); @@ -575,8 +554,7 @@ private ByteBuffer getMetadata(final SubView view) throws IOException { metadata.finish(BarrageUpdateMetadata.endBarrageUpdateMetadata(metadata)); final FlatBufferBuilder header = new FlatBufferBuilder(); - final int payloadOffset = - BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); + final int payloadOffset = BarrageMessageWrapper.createMsgPayloadVector(header, metadata.dataBuffer()); BarrageMessageWrapper.startBarrageMessageWrapper(header); BarrageMessageWrapper.addMagic(header, FLATBUFFER_MAGIC); BarrageMessageWrapper.addMsgType(header, BarrageMessageType.BarrageUpdateMetadata); @@ -602,7 +580,7 @@ public IndexGenerator(final Index index) throws IOException { this.original = index.clone(); // noinspection UnstableApiUsage try (final ExposedByteArrayOutputStream baos = new ExposedByteArrayOutputStream(); - final LittleEndianDataOutputStream oos = new LittleEndianDataOutputStream(baos)) { + final LittleEndianDataOutputStream oos = new LittleEndianDataOutputStream(baos)) { ExternalizableIndexUtils.writeExternalCompressedDeltas(oos, index); oos.flush(); raw = baos.peekBuffer(); @@ -626,8 +604,7 @@ public DrainableByteArrayInputStream getInputStream() { * @param builder the flatbuffer builder * @return offset of the item in the flatbuffer */ - protected int addToFlatBuffer(final Index viewport, final FlatBufferBuilder builder) - throws IOException { + protected int addToFlatBuffer(final Index viewport, final FlatBufferBuilder builder) throws IOException { if (original.subsetOf(viewport)) { return addToFlatBuffer(builder); } @@ -636,8 +613,8 @@ protected int addToFlatBuffer(final Index viewport, final FlatBufferBuilder buil final byte[] nraw; // noinspection UnstableApiUsage try (final ExposedByteArrayOutputStream baos = new ExposedByteArrayOutputStream(); - final LittleEndianDataOutputStream oos = new LittleEndianDataOutputStream(baos); - final Index viewOfOriginal = original.intersect(viewport)) { + final LittleEndianDataOutputStream oos = new LittleEndianDataOutputStream(baos); + final Index viewOfOriginal = original.intersect(viewport)) { ExternalizableIndexUtils.writeExternalCompressedDeltas(oos, viewOfOriginal); oos.flush(); nraw = baos.peekBuffer(); @@ -658,8 +635,7 @@ public BitSetGenerator(final BitSet bitset) throws IOException { this.len = (int) ((long) nBits + 7) / 8; } - public int addToFlatBuffer(final BitSet mine, final FlatBufferBuilder builder) - throws IOException { + public int addToFlatBuffer(final BitSet mine, final FlatBufferBuilder builder) throws IOException { if (mine.equals(original)) { return addToFlatBuffer(builder); } @@ -677,12 +653,9 @@ public static class IndexShiftDataGenerator extends ByteArrayGenerator { public IndexShiftDataGenerator(final IndexShiftData shifted) throws IOException { this.original = shifted; - final Index.SequentialBuilder sRangeBuilder = - Index.CURRENT_FACTORY.getSequentialBuilder(); - final Index.SequentialBuilder eRangeBuilder = - Index.CURRENT_FACTORY.getSequentialBuilder(); - final Index.SequentialBuilder destBuilder = - Index.CURRENT_FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder sRangeBuilder = Index.CURRENT_FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder eRangeBuilder = Index.CURRENT_FACTORY.getSequentialBuilder(); + final Index.SequentialBuilder destBuilder = Index.CURRENT_FACTORY.getSequentialBuilder(); if (shifted != null) { for (int i = 0; i < shifted.size(); ++i) { @@ -701,10 +674,10 @@ public IndexShiftDataGenerator(final IndexShiftData shifted) throws IOException // noinspection UnstableApiUsage try (final Index sRange = sRangeBuilder.getIndex(); - final Index eRange = eRangeBuilder.getIndex(); - final Index dest = destBuilder.getIndex(); - final ExposedByteArrayOutputStream baos = new ExposedByteArrayOutputStream(); - final LittleEndianDataOutputStream oos = new LittleEndianDataOutputStream(baos)) { + final Index eRange = eRangeBuilder.getIndex(); + final Index dest = destBuilder.getIndex(); + final ExposedByteArrayOutputStream baos = new ExposedByteArrayOutputStream(); + final LittleEndianDataOutputStream oos = new LittleEndianDataOutputStream(baos)) { ExternalizableIndexUtils.writeExternalCompressedDeltas(oos, sRange); ExternalizableIndexUtils.writeExternalCompressedDeltas(oos, eRange); ExternalizableIndexUtils.writeExternalCompressedDeltas(oos, dest); @@ -715,8 +688,7 @@ public IndexShiftDataGenerator(final IndexShiftData shifted) throws IOException } } - public static class DrainableByteArrayInputStream extends ByteArrayInputStream - implements Drainable { + public static class DrainableByteArrayInputStream extends ByteArrayInputStream implements Drainable { public DrainableByteArrayInputStream(final byte[] buf, final int offset, final int length) { super(buf, offset, length); } @@ -742,8 +714,7 @@ private static class ConsecutiveDrainableStreams extends InputStream implements this.streams = streams; for (final InputStream stream : streams) { if (!(stream instanceof Drainable)) { - throw new IllegalArgumentException( - "expecting sub-class of Drainable; found: " + stream.getClass()); + throw new IllegalArgumentException("expecting sub-class of Drainable; found: " + stream.getClass()); } } } @@ -755,12 +726,10 @@ public int drainTo(final OutputStream outputStream) throws IOException { final int expected = total + stream.available(); total += ((Drainable) stream).drainTo(outputStream); if (expected != total) { - throw new IllegalStateException( - "drained message drained wrong number of bytes"); + throw new IllegalStateException("drained message drained wrong number of bytes"); } if (total < 0) { - throw new IllegalStateException( - "drained message is too large; exceeds Integer.MAX_VALUE"); + throw new IllegalStateException("drained message is too large; exceeds Integer.MAX_VALUE"); } } return total; @@ -777,8 +746,7 @@ public int available() throws IOException { for (final InputStream stream : streams) { total += stream.available(); if (total < 0) { - throw new IllegalStateException( - "drained message is too large; exceeds Integer.MAX_VALUE"); + throw new IllegalStateException("drained message is too large; exceeds Integer.MAX_VALUE"); } } return total; diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageStreamReader.java b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageStreamReader.java index b0fd3b12848..db7085543dd 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageStreamReader.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/BarrageStreamReader.java @@ -38,8 +38,7 @@ import java.util.BitSet; import java.util.Iterator; -public class BarrageStreamReader - implements BarrageMessageConsumer.StreamReader { +public class BarrageStreamReader implements BarrageMessageConsumer.StreamReader { private static final Logger log = LoggerFactory.getLogger(BarrageStreamReader.class); @@ -49,10 +48,10 @@ public class BarrageStreamReader @Override public BarrageMessage safelyParseFrom(final ChunkInputStreamGenerator.Options options, - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final InputStream stream) { + final ChunkType[] columnChunkTypes, + final Class[] columnTypes, + final Class[] componentTypes, + final InputStream stream) { Message header = null; try { boolean bodyParsed = false; @@ -66,24 +65,20 @@ public BarrageMessage safelyParseFrom(final ChunkInputStreamGenerator.Options op } else if (tag == ArrowFlightUtil.APP_METADATA_TAG) { final int size = decoder.readRawVarint32(); final ByteBuffer msgAsBB = ByteBuffer.wrap(decoder.readRawBytes(size)); - final BarrageMessageWrapper wrapper = - BarrageMessageWrapper.getRootAsBarrageMessageWrapper(msgAsBB); + final BarrageMessageWrapper wrapper = BarrageMessageWrapper.getRootAsBarrageMessageWrapper(msgAsBB); if (wrapper.magic() != BarrageStreamGenerator.FLATBUFFER_MAGIC) { log.warn().append( - "BarrageStreamReader: skipping app_metadata that does not look like BarrageMessageWrapper") - .endl(); + "BarrageStreamReader: skipping app_metadata that does not look like BarrageMessageWrapper") + .endl(); } else if (wrapper.msgType() == BarrageMessageType.BarrageUpdateMetadata) { if (msg != null) { throw new IllegalStateException( - "Previous message was not complete; pending " - + numAddBatchesRemaining - + " add batches and " + numModBatchesRemaining - + " mod batches"); + "Previous message was not complete; pending " + numAddBatchesRemaining + + " add batches and " + numModBatchesRemaining + " mod batches"); } final BarrageUpdateMetadata metadata = - BarrageUpdateMetadata - .getRootAsBarrageUpdateMetadata(wrapper.msgPayloadAsByteBuffer()); + BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata(wrapper.msgPayloadAsByteBuffer()); msg = new BarrageMessage(); @@ -92,23 +87,21 @@ public BarrageMessage safelyParseFrom(final ChunkInputStreamGenerator.Options op numModBatchesRemaining = metadata.numModBatches(); if (numAddBatchesRemaining > 1 || numModBatchesRemaining > 1) { throw new UnsupportedOperationException( - "Multiple consecutive add or mod RecordBatches are not yet supported"); + "Multiple consecutive add or mod RecordBatches are not yet supported"); } if (numAddBatchesRemaining < 0 || numModBatchesRemaining < 0) { throw new IllegalStateException( - "Found negative number of record batches in barrage metadata: " - + numAddBatchesRemaining + " add batches and " - + numModBatchesRemaining + " mod batches"); + "Found negative number of record batches in barrage metadata: " + + numAddBatchesRemaining + " add batches and " + numModBatchesRemaining + + " mod batches"); } if (msg.isSnapshot) { - final ByteBuffer effectiveViewport = - metadata.effectiveViewportAsByteBuffer(); + final ByteBuffer effectiveViewport = metadata.effectiveViewportAsByteBuffer(); if (effectiveViewport != null) { msg.snapshotIndex = extractIndex(effectiveViewport); } - msg.snapshotColumns = - extractBitSet(metadata.effectiveColumnSetAsByteBuffer()); + msg.snapshotColumns = extractBitSet(metadata.effectiveColumnSetAsByteBuffer()); } msg.firstSeq = metadata.firstSeq(); @@ -118,8 +111,7 @@ public BarrageMessage safelyParseFrom(final ChunkInputStreamGenerator.Options op msg.shifted = extractIndexShiftData(metadata.shiftDataAsByteBuffer()); final ByteBuffer rowsIncluded = metadata.addedRowsIncludedAsByteBuffer(); - msg.rowsIncluded = rowsIncluded != null ? extractIndex(rowsIncluded) - : msg.rowsAdded.clone(); + msg.rowsIncluded = rowsIncluded != null ? extractIndex(rowsIncluded) : msg.rowsAdded.clone(); msg.addColumnData = new BarrageMessage.AddColumnData[columnTypes.length]; for (int ci = 0; ci < msg.addColumnData.length; ++ci) { msg.addColumnData[ci] = new BarrageMessage.AddColumnData(); @@ -133,8 +125,7 @@ public BarrageMessage safelyParseFrom(final ChunkInputStreamGenerator.Options op msg.modColumnData[ci].componentType = componentTypes[ci]; final BarrageModColumnMetadata mcd = metadata.nodes(ci); - msg.modColumnData[ci].rowsModified = - extractIndex(mcd.modifiedRowsAsByteBuffer()); + msg.modColumnData[ci].rowsModified = extractIndex(mcd.modifiedRowsAsByteBuffer()); } } @@ -156,8 +147,7 @@ public BarrageMessage safelyParseFrom(final ChunkInputStreamGenerator.Options op } if (header.headerType() != org.apache.arrow.flatbuf.MessageHeader.RecordBatch) { - throw new IllegalStateException( - "Only know how to decode Schema/BarrageRecordBatch messages"); + throw new IllegalStateException("Only know how to decode Schema/BarrageRecordBatch messages"); } if (msg == null) { @@ -169,22 +159,20 @@ public BarrageMessage safelyParseFrom(final ChunkInputStreamGenerator.Options op final RecordBatch batch = (RecordBatch) header.header(new RecordBatch()); // noinspection UnstableApiUsage - try (final LittleEndianDataInputStream ois = new LittleEndianDataInputStream( - new BarrageProtoUtil.ObjectInputStreamAdapter(decoder, size))) { + try (final LittleEndianDataInputStream ois = + new LittleEndianDataInputStream(new BarrageProtoUtil.ObjectInputStreamAdapter(decoder, size))) { final MutableInt bufferOffset = new MutableInt(); final Iterator fieldNodeIter = - new FlatBufferIteratorAdapter<>(batch.nodesLength(), - i -> new ChunkInputStreamGenerator.FieldNodeInfo(batch.nodes(i))); + new FlatBufferIteratorAdapter<>(batch.nodesLength(), + i -> new ChunkInputStreamGenerator.FieldNodeInfo(batch.nodes(i))); final TLongArrayList bufferInfo = new TLongArrayList(batch.buffersLength()); for (int bi = 0; bi < batch.buffersLength(); ++bi) { - int offset = LongSizedDataStructure.intSize("BufferInfo", - batch.buffers(bi).offset()); - int length = LongSizedDataStructure.intSize("BufferInfo", - batch.buffers(bi).length()); + int offset = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).offset()); + int length = LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi).length()); if (bi < batch.buffersLength() - 1) { - final int nextOffset = LongSizedDataStructure.intSize("BufferInfo", - batch.buffers(bi + 1).offset()); + final int nextOffset = + LongSizedDataStructure.intSize("BufferInfo", batch.buffers(bi + 1).offset()); // our parsers handle overhanging buffers length += Math.max(0, nextOffset - offset - length); } @@ -202,20 +190,18 @@ public BarrageMessage safelyParseFrom(final ChunkInputStreamGenerator.Options op if (isAddBatch) { for (int ci = 0; ci < msg.addColumnData.length; ++ci) { - msg.addColumnData[ci].data = ChunkInputStreamGenerator - .extractChunkFromInputStream(options, columnChunkTypes[ci], - columnTypes[ci], fieldNodeIter, bufferInfoIter, ois); + msg.addColumnData[ci].data = ChunkInputStreamGenerator.extractChunkFromInputStream(options, + columnChunkTypes[ci], columnTypes[ci], fieldNodeIter, bufferInfoIter, ois); } } else { for (int ci = 0; ci < msg.modColumnData.length; ++ci) { final BarrageMessage.ModColumnData mcd = msg.modColumnData[ci]; final int numModded = mcd.rowsModified.intSize(); - mcd.data = ChunkInputStreamGenerator.extractChunkFromInputStream( - options, columnChunkTypes[ci], columnTypes[ci], fieldNodeIter, - bufferInfoIter, ois); + mcd.data = ChunkInputStreamGenerator.extractChunkFromInputStream(options, + columnChunkTypes[ci], columnTypes[ci], fieldNodeIter, bufferInfoIter, ois); if (mcd.data.size() != numModded) { throw new IllegalStateException( - "Mod column data does not have the expected number of rows."); + "Mod column data does not have the expected number of rows."); } } } @@ -251,7 +237,7 @@ private static Index extractIndex(final ByteBuffer bb) throws IOException { } // noinspection UnstableApiUsage try (final LittleEndianDataInputStream is = - new LittleEndianDataInputStream(new ByteBufferBackedInputStream(bb))) { + new LittleEndianDataInputStream(new ByteBufferBackedInputStream(bb))) { return ExternalizableIndexUtils.readExternalCompressedDelta(is); } } @@ -266,15 +252,15 @@ private static IndexShiftData extractIndexShiftData(final ByteBuffer bb) throws final Index sIndex, eIndex, dIndex; // noinspection UnstableApiUsage try (final LittleEndianDataInputStream is = - new LittleEndianDataInputStream(new ByteBufferBackedInputStream(bb))) { + new LittleEndianDataInputStream(new ByteBufferBackedInputStream(bb))) { sIndex = ExternalizableIndexUtils.readExternalCompressedDelta(is); eIndex = ExternalizableIndexUtils.readExternalCompressedDelta(is); dIndex = ExternalizableIndexUtils.readExternalCompressedDelta(is); } try (final Index.Iterator sit = sIndex.iterator(); - final Index.Iterator eit = eIndex.iterator(); - final Index.Iterator dit = dIndex.iterator()) { + final Index.Iterator eit = eIndex.iterator(); + final Index.Iterator dit = dIndex.iterator()) { while (sit.hasNext()) { if (!eit.hasNext() || !dit.hasNext()) { throw new IllegalStateException("IndexShiftData is inconsistent"); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/util/BarrageSchemaUtil.java b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/util/BarrageSchemaUtil.java index a6b8ea431c5..f91e4702e65 100755 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/util/BarrageSchemaUtil.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/barrage/util/BarrageSchemaUtil.java @@ -47,19 +47,21 @@ public class BarrageSchemaUtil { // per flight specification: 0xFFFFFFFF value is the first 4 bytes of a valid IPC message private static final int IPC_CONTINUATION_TOKEN = -1; - public static final ArrowType.FixedSizeBinary LOCAL_DATE_TYPE = - new ArrowType.FixedSizeBinary(6);// year is 4 bytes, month is 1 byte, day is 1 byte - public static final ArrowType.FixedSizeBinary LOCAL_TIME_TYPE = - new ArrowType.FixedSizeBinary(7);// hour, minute, second are each one byte, nano is 4 bytes + public static final ArrowType.FixedSizeBinary LOCAL_DATE_TYPE = new ArrowType.FixedSizeBinary(6);// year is 4 bytes, + // month is 1 byte, + // day is 1 byte + public static final ArrowType.FixedSizeBinary LOCAL_TIME_TYPE = new ArrowType.FixedSizeBinary(7);// hour, minute, + // second are each + // one byte, nano + // is 4 bytes private static final int ATTR_STRING_LEN_CUTOFF = 1024; /** - * These are the types that get special encoding but are otherwise not primitives. TODO - * (core#58): add custom barrage serialization/deserialization support + * These are the types that get special encoding but are otherwise not primitives. TODO (core#58): add custom + * barrage serialization/deserialization support */ - private static final Set> supportedTypes = - new HashSet<>(Collections2.>asImmutableList( + private static final Set> supportedTypes = new HashSet<>(Collections2.>asImmutableList( BigDecimal.class, BigInteger.class, String.class, @@ -71,15 +73,14 @@ public static ByteString schemaBytesFromTable(final Table table) { } public static ByteString schemaBytesFromTable(final TableDefinition table, - final Map attributes) { - // note that flight expects the Schema to be wrapped in a Message prefixed by a 4-byte - // identifier + final Map attributes) { + // note that flight expects the Schema to be wrapped in a Message prefixed by a 4-byte identifier // (to detect end-of-stream in some cases) followed by the size of the flatbuffer message final FlatBufferBuilder builder = new FlatBufferBuilder(); final int schemaOffset = BarrageSchemaUtil.makeSchemaPayload(builder, table, attributes); builder.finish(BarrageStreamGenerator.wrapInMessage(builder, schemaOffset, - org.apache.arrow.flatbuf.MessageHeader.Schema)); + org.apache.arrow.flatbuf.MessageHeader.Schema)); final ByteBuffer msg = builder.dataBuffer(); @@ -105,23 +106,21 @@ private static void intToBytes(int value, byte[] bytes, int offset) { } public static int makeSchemaPayload(final FlatBufferBuilder builder, - final TableDefinition table, - final Map attributes) { + final TableDefinition table, + final Map attributes) { final Map> fieldExtraMetadata = new HashMap<>(); final Function> getExtraMetadata = - (colName) -> fieldExtraMetadata.computeIfAbsent(colName, k -> new HashMap<>()); + (colName) -> fieldExtraMetadata.computeIfAbsent(colName, k -> new HashMap<>()); // noinspection unchecked - final Map descriptions = Optional - .ofNullable((Map) attributes.get(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)) - .orElse(Collections.emptyMap()); - final MutableInputTable inputTable = - (MutableInputTable) attributes.get(Table.INPUT_TABLE_ATTRIBUTE); + final Map descriptions = + Optional.ofNullable((Map) attributes.get(Table.COLUMN_DESCRIPTIONS_ATTRIBUTE)) + .orElse(Collections.emptyMap()); + final MutableInputTable inputTable = (MutableInputTable) attributes.get(Table.INPUT_TABLE_ATTRIBUTE); // find format columns final Set formatColumns = new HashSet<>(); - table.getColumnNames().stream().filter(ColumnFormattingValues::isFormattingColumn) - .forEach(formatColumns::add); + table.getColumnNames().stream().filter(ColumnFormattingValues::isFormattingColumn).forEach(formatColumns::add); // create metadata on the schema for table attributes final Map schemaMetadata = new HashMap<>(); @@ -131,9 +130,9 @@ public static int makeSchemaPayload(final FlatBufferBuilder builder, final String key = entry.getKey(); final Object val = entry.getValue(); if (val instanceof Byte || val instanceof Short || val instanceof Integer || - val instanceof Long || val instanceof Float || val instanceof Double || - val instanceof Character || val instanceof Boolean || - (val instanceof String && ((String) val).length() < ATTR_STRING_LEN_CUTOFF)) { + val instanceof Long || val instanceof Float || val instanceof Double || + val instanceof Character || val instanceof Boolean || + (val instanceof String && ((String) val).length() < ATTR_STRING_LEN_CUTOFF)) { putMetadata(schemaMetadata, "attribute." + key, val.toString()); } } @@ -141,22 +140,19 @@ public static int makeSchemaPayload(final FlatBufferBuilder builder, // copy rollup details if (attributes.containsKey(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE)) { final HierarchicalTableInfo hierarchicalTableInfo = - (HierarchicalTableInfo) attributes.remove(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); - final String hierarchicalSourceKeyPrefix = - "attribute." + Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE + "."; + (HierarchicalTableInfo) attributes.remove(Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE); + final String hierarchicalSourceKeyPrefix = "attribute." + Table.HIERARCHICAL_SOURCE_INFO_ATTRIBUTE + "."; putMetadata(schemaMetadata, hierarchicalSourceKeyPrefix + "hierarchicalColumnName", - hierarchicalTableInfo.getHierarchicalColumnName()); + hierarchicalTableInfo.getHierarchicalColumnName()); if (hierarchicalTableInfo instanceof RollupInfo) { final RollupInfo rollupInfo = (RollupInfo) hierarchicalTableInfo; putMetadata(schemaMetadata, hierarchicalSourceKeyPrefix + "byColumns", - String.join(",", rollupInfo.byColumnNames)); - putMetadata(schemaMetadata, hierarchicalSourceKeyPrefix + "leafType", - rollupInfo.getLeafType().name()); + String.join(",", rollupInfo.byColumnNames)); + putMetadata(schemaMetadata, hierarchicalSourceKeyPrefix + "leafType", rollupInfo.getLeafType().name()); // mark columns to indicate their sources for (final MatchPair matchPair : rollupInfo.getMatchPairs()) { - putMetadata(getExtraMetadata.apply(matchPair.left()), "rollup.sourceColumn", - matchPair.right()); + putMetadata(getExtraMetadata.apply(matchPair.left()), "rollup.sourceColumn", matchPair.right()); } } } @@ -168,52 +164,43 @@ public static int makeSchemaPayload(final FlatBufferBuilder builder, // wire up style and format column references if (formatColumns.contains(colName + ColumnFormattingValues.TABLE_FORMAT_NAME)) { - putMetadata(extraMetadata, "styleColumn", - colName + ColumnFormattingValues.TABLE_FORMAT_NAME); - } else if (formatColumns - .contains(colName + ColumnFormattingValues.TABLE_NUMERIC_FORMAT_NAME)) { + putMetadata(extraMetadata, "styleColumn", colName + ColumnFormattingValues.TABLE_FORMAT_NAME); + } else if (formatColumns.contains(colName + ColumnFormattingValues.TABLE_NUMERIC_FORMAT_NAME)) { putMetadata(extraMetadata, "numberFormatColumn", - colName + ColumnFormattingValues.TABLE_NUMERIC_FORMAT_NAME); - } else if (formatColumns - .contains(colName + ColumnFormattingValues.TABLE_DATE_FORMAT_NAME)) { - putMetadata(extraMetadata, "dateFormatColumn", - colName + ColumnFormattingValues.TABLE_DATE_FORMAT_NAME); + colName + ColumnFormattingValues.TABLE_NUMERIC_FORMAT_NAME); + } else if (formatColumns.contains(colName + ColumnFormattingValues.TABLE_DATE_FORMAT_NAME)) { + putMetadata(extraMetadata, "dateFormatColumn", colName + ColumnFormattingValues.TABLE_DATE_FORMAT_NAME); } - fields.put(colName, arrowFieldFor(colName, column, descriptions.get(colName), - inputTable, extraMetadata)); + fields.put(colName, arrowFieldFor(colName, column, descriptions.get(colName), inputTable, extraMetadata)); } return new Schema(new ArrayList<>(fields.values()), schemaMetadata).getSchema(builder); } - private static void putMetadata(final Map metadata, final String key, - final String value) { + private static void putMetadata(final Map metadata, final String key, final String value) { metadata.put("deephaven:" + key, value); } - public static TableDefinition schemaToTableDefinition( - final org.apache.arrow.flatbuf.Schema schema) { - return schemaToTableDefinition(schema.fieldsLength(), i -> schema.fields(i).name(), - i -> visitor -> { - final org.apache.arrow.flatbuf.Field field = schema.fields(i); - for (int j = 0; j < field.customMetadataLength(); j++) { - final KeyValue keyValue = field.customMetadata(j); - visitor.accept(keyValue.key(), keyValue.value()); - } - }); + public static TableDefinition schemaToTableDefinition(final org.apache.arrow.flatbuf.Schema schema) { + return schemaToTableDefinition(schema.fieldsLength(), i -> schema.fields(i).name(), i -> visitor -> { + final org.apache.arrow.flatbuf.Field field = schema.fields(i); + for (int j = 0; j < field.customMetadataLength(); j++) { + final KeyValue keyValue = field.customMetadata(j); + visitor.accept(keyValue.key(), keyValue.value()); + } + }); } public static TableDefinition schemaToTableDefinition(final Schema schema) { - return schemaToTableDefinition(schema.getFields().size(), - i -> schema.getFields().get(i).getName(), i -> visitor -> { - schema.getFields().get(i).getMetadata().forEach(visitor); - }); + return schemaToTableDefinition(schema.getFields().size(), i -> schema.getFields().get(i).getName(), + i -> visitor -> { + schema.getFields().get(i).getMetadata().forEach(visitor); + }); } - private static TableDefinition schemaToTableDefinition(final int numColumns, - final IntFunction getName, - final IntFunction>> visitMetadata) { + private static TableDefinition schemaToTableDefinition(final int numColumns, final IntFunction getName, + final IntFunction>> visitMetadata) { final ColumnDefinition[] columns = new ColumnDefinition[numColumns]; for (int i = 0; i < numColumns; ++i) { @@ -226,33 +213,29 @@ private static TableDefinition schemaToTableDefinition(final int numColumns, try { type.setValue(ClassUtil.lookupClass(value)); } catch (final ClassNotFoundException e) { - throw new UncheckedDeephavenException("Could not load class from schema", - e); + throw new UncheckedDeephavenException("Could not load class from schema", e); } } else if (key.equals("deephaven:componentType")) { try { componentType.setValue(ClassUtil.lookupClass(value)); } catch (final ClassNotFoundException e) { - throw new UncheckedDeephavenException("Could not load class from schema", - e); + throw new UncheckedDeephavenException("Could not load class from schema", e); } } }); if (type.getValue() == null) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Schema did not include `deephaven:type` metadata"); + "Schema did not include `deephaven:type` metadata"); } - columns[i] = - ColumnDefinition.fromGenericType(name, type.getValue(), componentType.getValue()); + columns[i] = ColumnDefinition.fromGenericType(name, type.getValue(), componentType.getValue()); } return new TableDefinition(columns); } - private static Field arrowFieldFor(final String name, final ColumnDefinition column, - final String description, final MutableInputTable inputTable, - final Map extraMetadata) { + private static Field arrowFieldFor(final String name, final ColumnDefinition column, final String description, + final MutableInputTable inputTable, final Map extraMetadata) { List children = Collections.emptyList(); // is hidden? @@ -268,34 +251,27 @@ private static Field arrowFieldFor(final String name, final ColumnDefinition } // only one of these will be true, if any are true the column will not be visible - putMetadata(metadata, "isStyle", - name.endsWith(ColumnFormattingValues.TABLE_FORMAT_NAME) + ""); + putMetadata(metadata, "isStyle", name.endsWith(ColumnFormattingValues.TABLE_FORMAT_NAME) + ""); putMetadata(metadata, "isRowStyle", - name.equals( - ColumnFormattingValues.ROW_FORMAT_NAME + ColumnFormattingValues.TABLE_FORMAT_NAME) - + ""); - putMetadata(metadata, "isDateFormat", - name.endsWith(ColumnFormattingValues.TABLE_DATE_FORMAT_NAME) + ""); - putMetadata(metadata, "isNumberFormat", - name.endsWith(ColumnFormattingValues.TABLE_NUMERIC_FORMAT_NAME) + ""); + name.equals(ColumnFormattingValues.ROW_FORMAT_NAME + ColumnFormattingValues.TABLE_FORMAT_NAME) + ""); + putMetadata(metadata, "isDateFormat", name.endsWith(ColumnFormattingValues.TABLE_DATE_FORMAT_NAME) + ""); + putMetadata(metadata, "isNumberFormat", name.endsWith(ColumnFormattingValues.TABLE_NUMERIC_FORMAT_NAME) + ""); putMetadata(metadata, "isRollupColumn", name.equals(RollupInfo.ROLLUP_COLUMN) + ""); if (description != null) { putMetadata(metadata, "description", description); } if (inputTable != null) { - putMetadata(metadata, "inputtable.isKey", - Arrays.asList(inputTable.getKeyNames()).contains(name) + ""); + putMetadata(metadata, "inputtable.isKey", Arrays.asList(inputTable.getKeyNames()).contains(name) + ""); } final FieldType fieldType = arrowFieldTypeFor(type, componentType, metadata); if (fieldType.getType().isComplex()) { if (type.isArray()) { - children = Collections.singletonList(new Field("", - arrowFieldTypeFor(componentType, null, metadata), Collections.emptyList())); + children = Collections.singletonList( + new Field("", arrowFieldTypeFor(componentType, null, metadata), Collections.emptyList())); } else { - throw new UnsupportedOperationException( - "Arrow Complex Type Not Supported: " + fieldType.getType()); + throw new UnsupportedOperationException("Arrow Complex Type Not Supported: " + fieldType.getType()); } } @@ -303,7 +279,7 @@ private static Field arrowFieldFor(final String name, final ColumnDefinition } private static FieldType arrowFieldTypeFor(final Class type, final Class componentType, - final Map metadata) { + final Map metadata) { return new FieldType(true, arrowTypeFor(type, componentType), null, metadata); } @@ -337,7 +313,7 @@ private static ArrowType arrowTypeFor(final Class type, final Class compon return LOCAL_TIME_TYPE; } if (type == BigDecimal.class - || type == BigInteger.class) { + || type == BigInteger.class) { return Types.MinorType.VARBINARY.getType(); } if (type == DBDateTime.class) { @@ -347,7 +323,6 @@ private static ArrowType arrowTypeFor(final Class type, final Class compon // everything gets converted to a string return Types.MinorType.VARCHAR.getType(); // aka Utf8 } - throw new IllegalStateException( - "No ArrowType for type: " + type + " w/chunkType: " + chunkType); + throw new IllegalStateException("No ArrowType for type: " + type + " w/chunkType: " + chunkType); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/console/ConsoleServiceGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/console/ConsoleServiceGrpcImpl.java index 22a19df80c6..83d7a36fcbe 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/console/ConsoleServiceGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/console/ConsoleServiceGrpcImpl.java @@ -54,7 +54,7 @@ public class ConsoleServiceGrpcImpl extends ConsoleServiceGrpc.ConsoleServiceImp private static final Logger log = LoggerFactory.getLogger(ConsoleServiceGrpcImpl.class); public static final String WORKER_CONSOLE_TYPE = - Configuration.getInstance().getStringWithDefault("io.deephaven.console.type", "python"); + Configuration.getInstance().getStringWithDefault("io.deephaven.console.type", "python"); private final Map> scriptTypes; private final TicketRouter ticketRouter; @@ -66,11 +66,11 @@ public class ConsoleServiceGrpcImpl extends ConsoleServiceGrpc.ConsoleServiceImp @Inject public ConsoleServiceGrpcImpl(final Map> scriptTypes, - final TicketRouter ticketRouter, - final SessionService sessionService, - final LogBuffer logBuffer, - final LiveTableMonitor liveTableMonitor, - final GlobalSessionProvider globalSessionProvider) { + final TicketRouter ticketRouter, + final SessionService sessionService, + final LogBuffer logBuffer, + final LiveTableMonitor liveTableMonitor, + final GlobalSessionProvider globalSessionProvider) { this.scriptTypes = scriptTypes; this.ticketRouter = ticketRouter; this.sessionService = sessionService; @@ -84,26 +84,24 @@ public ConsoleServiceGrpcImpl(final Map> scriptT } public void initializeGlobalScriptSession() { - globalSessionProvider - .initializeGlobalScriptSession(scriptTypes.get(WORKER_CONSOLE_TYPE).get()); + globalSessionProvider.initializeGlobalScriptSession(scriptTypes.get(WORKER_CONSOLE_TYPE).get()); } @Override public void getConsoleTypes(final GetConsoleTypesRequest request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { - // TODO (#702): initially show all console types; the first console determines the - // global console type thereafter + // TODO (#702): initially show all console types; the first console determines the global console type + // thereafter responseObserver.onNext(GetConsoleTypesResponse.newBuilder() - .addConsoleTypes(WORKER_CONSOLE_TYPE) - .build()); + .addConsoleTypes(WORKER_CONSOLE_TYPE) + .build()); responseObserver.onCompleted(); }); } @Override - public void startConsole(StartConsoleRequest request, - StreamObserver responseObserver) { + public void startConsole(StartConsoleRequest request, StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { SessionState session = sessionService.getCurrentSession(); // TODO auth hook, ensure the user can do this (owner of worker or admin) @@ -114,41 +112,39 @@ public void startConsole(StartConsoleRequest request, final String sessionType = request.getSessionType(); if (!scriptTypes.containsKey(sessionType)) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "session type '" + sessionType + "' is not supported"); + "session type '" + sessionType + "' is not supported"); } session.newExport(request.getResultId()) - .onError(responseObserver::onError) - .submit(() -> { - final ScriptSession scriptSession; - if (sessionType.equals(WORKER_CONSOLE_TYPE)) { - scriptSession = globalSessionProvider.getGlobalSession(); - } else { - scriptSession = new NoLanguageDeephavenSession(sessionType); - log.error().append("Session type '" + sessionType + "' is disabled." + - "Use the session type '" + WORKER_CONSOLE_TYPE + "' instead.").endl(); - } - - safelyExecute(() -> { - responseObserver.onNext(StartConsoleResponse.newBuilder() - .setResultId(request.getResultId()) - .build()); - responseObserver.onCompleted(); + .onError(responseObserver::onError) + .submit(() -> { + final ScriptSession scriptSession; + if (sessionType.equals(WORKER_CONSOLE_TYPE)) { + scriptSession = globalSessionProvider.getGlobalSession(); + } else { + scriptSession = new NoLanguageDeephavenSession(sessionType); + log.error().append("Session type '" + sessionType + "' is disabled." + + "Use the session type '" + WORKER_CONSOLE_TYPE + "' instead.").endl(); + } + + safelyExecute(() -> { + responseObserver.onNext(StartConsoleResponse.newBuilder() + .setResultId(request.getResultId()) + .build()); + responseObserver.onCompleted(); + }); + + return scriptSession; }); - - return scriptSession; - }); }); } @Override - public void subscribeToLogs(LogSubscriptionRequest request, - StreamObserver responseObserver) { + public void subscribeToLogs(LogSubscriptionRequest request, StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { SessionState session = sessionService.getCurrentSession(); // if that didn't fail, we at least are authenticated, but possibly not authorized - // TODO auth hook, ensure the user can do this (owner of worker or admin). same rights - // as creating a console + // TODO auth hook, ensure the user can do this (owner of worker or admin). same rights as creating a console // session.getAuthContext().requirePrivilege(LogBuffer); logBuffer.subscribe(new LogBufferStreamAdapter(session, request, responseObserver)); @@ -156,63 +152,55 @@ public void subscribeToLogs(LogSubscriptionRequest request, } @Override - public void executeCommand(ExecuteCommandRequest request, - StreamObserver responseObserver) { + public void executeCommand(ExecuteCommandRequest request, StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); SessionState.ExportObject exportedConsole = - ticketRouter.resolve(session, request.getConsoleId()); + ticketRouter.resolve(session, request.getConsoleId()); session.nonExport() - .requiresSerialQueue() - .require(exportedConsole) - .onError(responseObserver::onError) - .submit(() -> { - ScriptSession scriptSession = exportedConsole.get(); - - // produce a diff - ExecuteCommandResponse.Builder diff = ExecuteCommandResponse.newBuilder(); - - ScriptSession.Changes changes = scriptSession.evaluateScript(request.getCode()); - - changes.created.entrySet() - .forEach(entry -> diff.addCreated(makeVariableDefinition(entry))); - changes.updated.entrySet() - .forEach(entry -> diff.addUpdated(makeVariableDefinition(entry))); - changes.removed.entrySet() - .forEach(entry -> diff.addRemoved(makeVariableDefinition(entry))); - - responseObserver.onNext(diff.build()); - responseObserver.onCompleted(); - }); + .requiresSerialQueue() + .require(exportedConsole) + .onError(responseObserver::onError) + .submit(() -> { + ScriptSession scriptSession = exportedConsole.get(); + + // produce a diff + ExecuteCommandResponse.Builder diff = ExecuteCommandResponse.newBuilder(); + + ScriptSession.Changes changes = scriptSession.evaluateScript(request.getCode()); + + changes.created.entrySet().forEach(entry -> diff.addCreated(makeVariableDefinition(entry))); + changes.updated.entrySet().forEach(entry -> diff.addUpdated(makeVariableDefinition(entry))); + changes.removed.entrySet().forEach(entry -> diff.addRemoved(makeVariableDefinition(entry))); + + responseObserver.onNext(diff.build()); + responseObserver.onCompleted(); + }); }); } - private static VariableDefinition makeVariableDefinition( - Map.Entry entry) { - return VariableDefinition.newBuilder().setName(entry.getKey()) - .setType(entry.getValue().name()).build(); + private static VariableDefinition makeVariableDefinition(Map.Entry entry) { + return VariableDefinition.newBuilder().setName(entry.getKey()).setType(entry.getValue().name()).build(); } @Override - public void cancelCommand(CancelCommandRequest request, - StreamObserver responseObserver) { + public void cancelCommand(CancelCommandRequest request, StreamObserver responseObserver) { // TODO not yet implemented, need a way to handle stopping a command in a consistent way super.cancelCommand(request, responseObserver); } @Override public void bindTableToVariable(BindTableToVariableRequest request, - StreamObserver responseObserver) { + StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); - final SessionState.ExportObject
    exportedTable = - ticketRouter.resolve(session, request.getTableId()); + final SessionState.ExportObject
    exportedTable = ticketRouter.resolve(session, request.getTableId()); final SessionState.ExportObject exportedConsole; ExportBuilder exportBuilder = session.nonExport() - .requiresSerialQueue() - .onError(responseObserver::onError); + .requiresSerialQueue() + .onError(responseObserver::onError); if (request.hasConsoleId()) { exportedConsole = ticketRouter.resolve(session, request.getConsoleId()); @@ -223,8 +211,8 @@ public void bindTableToVariable(BindTableToVariableRequest request, } exportBuilder.submit(() -> { - ScriptSession scriptSession = exportedConsole != null ? exportedConsole.get() - : globalSessionProvider.getGlobalSession(); + ScriptSession scriptSession = + exportedConsole != null ? exportedConsole.get() : globalSessionProvider.getGlobalSession(); Table table = exportedTable.get(); scriptSession.setVariable(request.getVariableName(), table); scriptSession.manage(table); @@ -234,212 +222,192 @@ public void bindTableToVariable(BindTableToVariableRequest request, }); } - // TODO will be moved to a more general place, serve as a general "Fetch from scope" and this - // will be deprecated + // TODO will be moved to a more general place, serve as a general "Fetch from scope" and this will be deprecated @Override - public void fetchTable(FetchTableRequest request, - StreamObserver responseObserver) { + public void fetchTable(FetchTableRequest request, StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); SessionState.ExportObject exportedConsole = - ticketRouter.resolve(session, request.getConsoleId()); + ticketRouter.resolve(session, request.getConsoleId()); session.newExport(request.getTableId()) - .require(exportedConsole) - .onError(responseObserver::onError) - .submit(() -> liveTableMonitor.exclusiveLock().computeLocked(() -> { - ScriptSession scriptSession = exportedConsole.get(); - String tableName = request.getTableName(); - if (!scriptSession.hasVariableName(tableName)) { - throw GrpcUtil.statusRuntimeException(Code.NOT_FOUND, - "No value exists with name " + tableName); - } - - // Explicit typecheck to catch any wrong-type-ness right away - Object result = - scriptSession.unwrapObject(scriptSession.getVariable(tableName)); - if (!(result instanceof Table)) { - throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Value bound to name " + tableName + " is not a Table"); - } - - // Apply preview columns TODO core#107 move to table service - Table table = ColumnPreviewManager.applyPreview((Table) result); - - safelyExecute(() -> { - final TableReference resultRef = - TableReference.newBuilder().setTicket(request.getTableId()).build(); - responseObserver.onNext( - TableServiceGrpcImpl.buildTableCreationResponse(resultRef, table)); - responseObserver.onCompleted(); - }); - return table; - })); + .require(exportedConsole) + .onError(responseObserver::onError) + .submit(() -> liveTableMonitor.exclusiveLock().computeLocked(() -> { + ScriptSession scriptSession = exportedConsole.get(); + String tableName = request.getTableName(); + if (!scriptSession.hasVariableName(tableName)) { + throw GrpcUtil.statusRuntimeException(Code.NOT_FOUND, + "No value exists with name " + tableName); + } + + // Explicit typecheck to catch any wrong-type-ness right away + Object result = scriptSession.unwrapObject(scriptSession.getVariable(tableName)); + if (!(result instanceof Table)) { + throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, + "Value bound to name " + tableName + " is not a Table"); + } + + // Apply preview columns TODO core#107 move to table service + Table table = ColumnPreviewManager.applyPreview((Table) result); + + safelyExecute(() -> { + final TableReference resultRef = + TableReference.newBuilder().setTicket(request.getTableId()).build(); + responseObserver.onNext(TableServiceGrpcImpl.buildTableCreationResponse(resultRef, table)); + responseObserver.onCompleted(); + }); + return table; + })); }); } // TODO(core#101) autocomplete support @Override - public void openDocument(OpenDocumentRequest request, - StreamObserver responseObserver) { - // when we open a document, we should start a parsing thread that will monitor for changes, - // and pre-parse document + public void openDocument(OpenDocumentRequest request, StreamObserver responseObserver) { + // when we open a document, we should start a parsing thread that will monitor for changes, and pre-parse + // document // so we can respond appropriately when client wants completions. GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); - SessionState.ExportObject exportedConsole = - session.getExport(request.getConsoleId()); + SessionState.ExportObject exportedConsole = session.getExport(request.getConsoleId()); session - .nonExport() - .require(exportedConsole) - .onError(responseObserver::onError) - .submit(() -> { - final ScriptSession scriptSession = exportedConsole.get(); - final TextDocumentItem doc = request.getTextDocument(); - scriptSession.getParser().open(doc.getText(), doc.getUri(), - Integer.toString(doc.getVersion())); - safelyExecute(() -> { - responseObserver.onNext(OpenDocumentResponse.getDefaultInstance()); - responseObserver.onCompleted(); + .nonExport() + .require(exportedConsole) + .onError(responseObserver::onError) + .submit(() -> { + final ScriptSession scriptSession = exportedConsole.get(); + final TextDocumentItem doc = request.getTextDocument(); + scriptSession.getParser().open(doc.getText(), doc.getUri(), Integer.toString(doc.getVersion())); + safelyExecute(() -> { + responseObserver.onNext(OpenDocumentResponse.getDefaultInstance()); + responseObserver.onCompleted(); + }); }); - }); }); } @Override - public void changeDocument(ChangeDocumentRequest request, - StreamObserver responseObserver) { + public void changeDocument(ChangeDocumentRequest request, StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); - SessionState.ExportObject exportedConsole = - session.getExport(request.getConsoleId()); + SessionState.ExportObject exportedConsole = session.getExport(request.getConsoleId()); session - .nonExport() - .require(exportedConsole) - .onError(responseObserver::onError) - .submit(() -> { - final ScriptSession scriptSession = exportedConsole.get(); - final VersionedTextDocumentIdentifier text = request.getTextDocument(); - @SuppressWarnings("unchecked") - final CompletionParseService parser = - scriptSession.getParser(); - parser.update(text.getUri(), Integer.toString(text.getVersion()), - request.getContentChangesList()); - safelyExecute(() -> { - responseObserver.onNext(ChangeDocumentResponse.getDefaultInstance()); - responseObserver.onCompleted(); + .nonExport() + .require(exportedConsole) + .onError(responseObserver::onError) + .submit(() -> { + final ScriptSession scriptSession = exportedConsole.get(); + final VersionedTextDocumentIdentifier text = request.getTextDocument(); + @SuppressWarnings("unchecked") + final CompletionParseService parser = + scriptSession.getParser(); + parser.update(text.getUri(), Integer.toString(text.getVersion()), + request.getContentChangesList()); + safelyExecute(() -> { + responseObserver.onNext(ChangeDocumentResponse.getDefaultInstance()); + responseObserver.onCompleted(); + }); }); - }); }); } @Override public void getCompletionItems(GetCompletionItemsRequest request, - StreamObserver responseObserver) { + StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); - SessionState.ExportObject exportedConsole = - session.getExport(request.getConsoleId()); + SessionState.ExportObject exportedConsole = session.getExport(request.getConsoleId()); final ScriptSession scriptSession = exportedConsole.get(); session - .nonExport() - .require(exportedConsole) - .onError(responseObserver::onError) - .submit(() -> { - - final VersionedTextDocumentIdentifier doc = request.getTextDocument(); - final VariableProvider vars = scriptSession.getVariableProvider(); - final CompletionLookups h = CompletionLookups.preload(scriptSession); - // The only stateful part of a completer is the CompletionLookups, which are - // already once-per-session-cached - // so, we'll just create a new completer for each request. No need to hand onto - // these guys. - final ChunkerCompleter completer = new ChunkerCompleter(log, vars, h); - @SuppressWarnings("unchecked") - final CompletionParseService parser = - scriptSession.getParser(); - final ParsedDocument parsed = parser.finish(doc.getUri()); - int offset = - LspTools.getOffsetFromPosition(parsed.getSource(), request.getPosition()); - final Collection results = - completer.runCompletion(parsed, request.getPosition(), offset); - final GetCompletionItemsResponse mangledResults = - GetCompletionItemsResponse.newBuilder() - .addAllItems(results.stream().map( - // insertTextFormat is a default we used to set in constructor; - // for now, we'll just process the objects before sending back to - // client - item -> item.setInsertTextFormat(2).build()) - .collect(Collectors.toSet())) - .build(); - - safelyExecute(() -> { - responseObserver.onNext(mangledResults); - responseObserver.onCompleted(); + .nonExport() + .require(exportedConsole) + .onError(responseObserver::onError) + .submit(() -> { + + final VersionedTextDocumentIdentifier doc = request.getTextDocument(); + final VariableProvider vars = scriptSession.getVariableProvider(); + final CompletionLookups h = CompletionLookups.preload(scriptSession); + // The only stateful part of a completer is the CompletionLookups, which are already + // once-per-session-cached + // so, we'll just create a new completer for each request. No need to hand onto these guys. + final ChunkerCompleter completer = new ChunkerCompleter(log, vars, h); + @SuppressWarnings("unchecked") + final CompletionParseService parser = + scriptSession.getParser(); + final ParsedDocument parsed = parser.finish(doc.getUri()); + int offset = LspTools.getOffsetFromPosition(parsed.getSource(), request.getPosition()); + final Collection results = + completer.runCompletion(parsed, request.getPosition(), offset); + final GetCompletionItemsResponse mangledResults = GetCompletionItemsResponse.newBuilder() + .addAllItems(results.stream().map( + // insertTextFormat is a default we used to set in constructor; + // for now, we'll just process the objects before sending back to client + item -> item.setInsertTextFormat(2).build()).collect(Collectors.toSet())) + .build(); + + safelyExecute(() -> { + responseObserver.onNext(mangledResults); + responseObserver.onCompleted(); + }); }); - }); }); } @Override - public void closeDocument(CloseDocumentRequest request, - StreamObserver responseObserver) { + public void closeDocument(CloseDocumentRequest request, StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); - SessionState.ExportObject exportedConsole = - session.getExport(request.getConsoleId()); + SessionState.ExportObject exportedConsole = session.getExport(request.getConsoleId()); session - .nonExport() - .require(exportedConsole) - .onError(responseObserver::onError) - .submit(() -> { - final ScriptSession scriptSession = exportedConsole.get(); - scriptSession.getParser().close(request.getTextDocument().getUri()); - safelyExecute(() -> { - responseObserver.onNext(CloseDocumentResponse.getDefaultInstance()); - responseObserver.onCompleted(); + .nonExport() + .require(exportedConsole) + .onError(responseObserver::onError) + .submit(() -> { + final ScriptSession scriptSession = exportedConsole.get(); + scriptSession.getParser().close(request.getTextDocument().getUri()); + safelyExecute(() -> { + responseObserver.onNext(CloseDocumentResponse.getDefaultInstance()); + responseObserver.onCompleted(); + }); }); - }); }); } @Override - public void fetchFigure(FetchFigureRequest request, - StreamObserver responseObserver) { + public void fetchFigure(FetchFigureRequest request, StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); - SessionState.ExportObject exportedConsole = - session.getExport(request.getConsoleId()); + SessionState.ExportObject exportedConsole = session.getExport(request.getConsoleId()); session.nonExport() - .require(exportedConsole) - .onError(responseObserver::onError) - .submit(() -> { - ScriptSession scriptSession = exportedConsole.get(); - - String figureName = request.getFigureName(); - if (!scriptSession.hasVariableName(figureName)) { - throw GrpcUtil.statusRuntimeException(Code.NOT_FOUND, - "No value exists with name " + figureName); - } - - Object result = - scriptSession.unwrapObject(scriptSession.getVariable(figureName)); - if (!(result instanceof FigureWidget)) { - throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Value bound to name " + figureName + " is not a FigureWidget"); - } - FigureWidget widget = (FigureWidget) result; - - FigureDescriptor translated = FigureWidgetTranslator.translate(widget, session); - - responseObserver.onNext( - FetchFigureResponse.newBuilder().setFigureDescriptor(translated).build()); - responseObserver.onCompleted(); - }); + .require(exportedConsole) + .onError(responseObserver::onError) + .submit(() -> { + ScriptSession scriptSession = exportedConsole.get(); + + String figureName = request.getFigureName(); + if (!scriptSession.hasVariableName(figureName)) { + throw GrpcUtil.statusRuntimeException(Code.NOT_FOUND, + "No value exists with name " + figureName); + } + + Object result = scriptSession.unwrapObject(scriptSession.getVariable(figureName)); + if (!(result instanceof FigureWidget)) { + throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, + "Value bound to name " + figureName + " is not a FigureWidget"); + } + FigureWidget widget = (FigureWidget) result; + + FigureDescriptor translated = FigureWidgetTranslator.translate(widget, session); + + responseObserver + .onNext(FetchFigureResponse.newBuilder().setFigureDescriptor(translated).build()); + responseObserver.onCompleted(); + }); }); } @@ -450,15 +418,14 @@ private class LogBufferStreamAdapter implements Closeable, LogBufferRecordListen private boolean isClosed = false; public LogBufferStreamAdapter( - final SessionState session, - final LogSubscriptionRequest request, - final StreamObserver responseObserver) { + final SessionState session, + final LogSubscriptionRequest request, + final StreamObserver responseObserver) { this.session = session; this.request = request; this.responseObserver = responseObserver; session.addOnCloseCallback(this); - ((ServerCallStreamObserver) responseObserver) - .setOnCancelHandler(this::tryClose); + ((ServerCallStreamObserver) responseObserver).setOnCancelHandler(this::tryClose); } @Override @@ -483,8 +450,7 @@ private void tryClose() { @Override public void record(LogBufferRecord record) { // only pass levels the client wants - if (request.getLevelsCount() != 0 - && !request.getLevelsList().contains(record.getLevel().getName())) { + if (request.getLevelsCount() != 0 && !request.getLevelsList().contains(record.getLevel().getName())) { return; } @@ -494,18 +460,17 @@ public void record(LogBufferRecord record) { return; } - // TODO this is not a good implementation, just a quick one, but it does appear to be - // safe, + // TODO this is not a good implementation, just a quick one, but it does appear to be safe, // since LogBuffer is synchronized on access to the listeners. We're on the same thread // as all other log receivers and try { LogSubscriptionData payload = LogSubscriptionData.newBuilder() - .setMicros(record.getTimestampMicros()) - .setLogLevel(record.getLevel().getName()) - // this could be done on either side, doing it here because its a weird charset - // and we should own that - .setMessage(record.getDataString()) - .build(); + .setMicros(record.getTimestampMicros()) + .setLogLevel(record.getLevel().getName()) + // this could be done on either side, doing it here because its a weird charset and we should + // own that + .setMessage(record.getDataString()) + .build(); synchronized (responseObserver) { responseObserver.onNext(payload); } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/console/ScopeTicketResolver.java b/grpc-api/src/main/java/io/deephaven/grpc_api/console/ScopeTicketResolver.java index 5b20e3389d1..afac08d2f80 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/console/ScopeTicketResolver.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/console/ScopeTicketResolver.java @@ -44,56 +44,50 @@ public String getLogNameFor(ByteBuffer ticket) { @Override public SessionState.ExportObject flightInfoFor( - @Nullable final SessionState session, final Flight.FlightDescriptor descriptor) { - // there is no mechanism to wait for a scope variable to resolve; require that the scope - // variable exists now + @Nullable final SessionState session, final Flight.FlightDescriptor descriptor) { + // there is no mechanism to wait for a scope variable to resolve; require that the scope variable exists now final String scopeName = nameForDescriptor(descriptor); - final Flight.FlightInfo flightInfo = - LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> { - final ScriptSession gss = globalSessionProvider.getGlobalSession(); - Object scopeVar = gss.getVariable(scopeName); - if (scopeVar == null) { - throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, + final Flight.FlightInfo flightInfo = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> { + final ScriptSession gss = globalSessionProvider.getGlobalSession(); + Object scopeVar = gss.getVariable(scopeName); + if (scopeVar == null) { + throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, "Could not resolve: no variable exists with name '" + scopeName + "'"); - } - if (scopeVar instanceof Table) { - return TicketRouter.getFlightInfo((Table) scopeVar, descriptor, - ticketForName(scopeName)); - } + } + if (scopeVar instanceof Table) { + return TicketRouter.getFlightInfo((Table) scopeVar, descriptor, ticketForName(scopeName)); + } - throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, + throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, "Could not resolve: no variable exists with name '" + scopeName + "'"); - }); + }); return SessionState.wrapAsExport(flightInfo); } @Override - public void forAllFlightInfo(@Nullable final SessionState session, - final Consumer visitor) { + public void forAllFlightInfo(@Nullable final SessionState session, final Consumer visitor) { globalSessionProvider.getGlobalSession().getVariables().forEach((varName, varObj) -> { if (varObj instanceof Table) { - visitor.accept(TicketRouter.getFlightInfo((Table) varObj, - descriptorForName(varName), ticketForName(varName))); + visitor.accept( + TicketRouter.getFlightInfo((Table) varObj, descriptorForName(varName), ticketForName(varName))); } }); } @Override - public SessionState.ExportObject resolve(@Nullable final SessionState session, - final ByteBuffer ticket) { + public SessionState.ExportObject resolve(@Nullable final SessionState session, final ByteBuffer ticket) { return resolve(session, nameForTicket(ticket)); } @Override public SessionState.ExportObject resolve(@Nullable final SessionState session, - final Flight.FlightDescriptor descriptor) { + final Flight.FlightDescriptor descriptor) { return resolve(session, nameForDescriptor(descriptor)); } - private SessionState.ExportObject resolve(@Nullable final SessionState session, - final String scopeName) { + private SessionState.ExportObject resolve(@Nullable final SessionState session, final String scopeName) { // if we are not attached to a session, check the scope for a variable right now final T export = LiveTableMonitor.DEFAULT.sharedLock().computeLocked(() -> { final ScriptSession gss = globalSessionProvider.getGlobalSession(); @@ -101,46 +95,43 @@ private SessionState.ExportObject resolve(@Nullable final SessionState se T scopeVar = (T) gss.getVariable(scopeName); if (scopeVar == null) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Could not resolve: no variable exists with name '" + scopeName + "'"); + "Could not resolve: no variable exists with name '" + scopeName + "'"); } return scopeVar; }); if (export == null) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Could not resolve: no variable exists with name '" + scopeName + "'"); + "Could not resolve: no variable exists with name '" + scopeName + "'"); } return SessionState.wrapAsExport(export); } @Override - public SessionState.ExportBuilder publish(final SessionState session, - final ByteBuffer ticket) { + public SessionState.ExportBuilder publish(final SessionState session, final ByteBuffer ticket) { return publish(session, nameForTicket(ticket)); } @Override public SessionState.ExportBuilder publish(final SessionState session, - final Flight.FlightDescriptor descriptor) { + final Flight.FlightDescriptor descriptor) { return publish(session, nameForDescriptor(descriptor)); } - private SessionState.ExportBuilder publish(final SessionState session, - final String varName) { - // We publish to the query scope after the client finishes publishing their result. We - // accomplish this by + private SessionState.ExportBuilder publish(final SessionState session, final String varName) { + // We publish to the query scope after the client finishes publishing their result. We accomplish this by // directly depending on the result of this export builder. final SessionState.ExportBuilder resultBuilder = session.nonExport(); final SessionState.ExportObject resultExport = resultBuilder.getExport(); final SessionState.ExportBuilder publishTask = session.nonExport(); publishTask - .requiresSerialQueue() - .require(resultExport) - .submit(() -> { - final ScriptSession gss = globalSessionProvider.getGlobalSession(); - gss.setVariable(varName, resultExport.get()); - }); + .requiresSerialQueue() + .require(resultExport) + .submit(() -> { + final ScriptSession gss = globalSessionProvider.getGlobalSession(); + gss.setVariable(varName, resultExport.get()); + }); return resultBuilder; } @@ -154,8 +145,8 @@ private SessionState.ExportBuilder publish(final SessionState session, public static Flight.Ticket ticketForName(final String name) { final byte[] ticket = (TICKET_PREFIX + '/' + name).getBytes(StandardCharsets.UTF_8); return Flight.Ticket.newBuilder() - .setTicket(ByteStringAccess.wrap(ticket)) - .build(); + .setTicket(ByteStringAccess.wrap(ticket)) + .build(); } /** @@ -166,10 +157,10 @@ public static Flight.Ticket ticketForName(final String name) { */ public static Flight.FlightDescriptor descriptorForName(final String name) { return Flight.FlightDescriptor.newBuilder() - .setType(Flight.FlightDescriptor.DescriptorType.PATH) - .addPath(FLIGHT_DESCRIPTOR_ROUTE) - .addPath(name) - .build(); + .setType(Flight.FlightDescriptor.DescriptorType.PATH) + .addPath(FLIGHT_DESCRIPTOR_ROUTE) + .addPath(name) + .build(); } /** @@ -183,9 +174,9 @@ public static String nameForTicket(final ByteBuffer ticket) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, "Ticket not supplied"); } if (ticket.remaining() < 3 || ticket.get(ticket.position()) != TICKET_PREFIX - || ticket.get(ticket.position() + 1) != '/') { + || ticket.get(ticket.position() + 1) != '/') { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot parse ticket: found 0x" + byteBufToHex(ticket) + "' (hex)"); + "Cannot parse ticket: found 0x" + byteBufToHex(ticket) + "' (hex)"); } final int initialLimit = ticket.limit(); @@ -196,7 +187,7 @@ public static String nameForTicket(final ByteBuffer ticket) { return decoder.decode(ticket).toString(); } catch (CharacterCodingException e) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot parse ticket: failed to decode: " + e.getMessage()); + "Cannot parse ticket: failed to decode: " + e.getMessage()); } finally { ticket.position(initialPosition); ticket.limit(initialLimit); @@ -211,13 +202,12 @@ public static String nameForTicket(final ByteBuffer ticket) { */ public static String nameForDescriptor(final Flight.FlightDescriptor descriptor) { if (descriptor.getType() != Flight.FlightDescriptor.DescriptorType.PATH) { - throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot parse descriptor: not a path"); + throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, "Cannot parse descriptor: not a path"); } if (descriptor.getPathCount() != 2) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot parse descriptor: unexpected path length (found: " - + TicketRouterHelper.getLogNameFor(descriptor) + ", expected: 2)"); + "Cannot parse descriptor: unexpected path length (found: " + + TicketRouterHelper.getLogNameFor(descriptor) + ", expected: 2)"); } return descriptor.getPath(1); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/log/LogInit.java b/grpc-api/src/main/java/io/deephaven/grpc_api/log/LogInit.java index d81538b823c..1bf874030a2 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/log/LogInit.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/log/LogInit.java @@ -22,7 +22,7 @@ public class LogInit { @Inject public LogInit(StandardStreamState standardStreamState, LogBuffer logBuffer, LogSink logSink, - Set sinkInits) { + Set sinkInits) { this.standardStreamState = standardStreamState; this.logBuffer = logBuffer; this.logSink = logSink; @@ -43,13 +43,10 @@ private void configureLoggerSink() { private void checkLogSinkIsSingleton() { if (log.getSink() != logSink) { - // If this contract is broken, we'll need to start attaching interceptors at - // LoggerFactory - // Logger creation time, or have some sort of mechanism for LoggerFactory to notify us - // about + // If this contract is broken, we'll need to start attaching interceptors at LoggerFactory + // Logger creation time, or have some sort of mechanism for LoggerFactory to notify us about // new log creations. - throw new RuntimeException( - String.format("Logger impl %s does not work with the current implementation.", + throw new RuntimeException(String.format("Logger impl %s does not work with the current implementation.", log.getClass().getName())); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/log/LogModule.java b/grpc-api/src/main/java/io/deephaven/grpc_api/log/LogModule.java index 7dbed467a79..dc3c5d20436 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/log/LogModule.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/log/LogModule.java @@ -28,8 +28,7 @@ public interface LogModule { @Provides static LogBuffer providesLogBuffer() { - return LogBufferGlobal.getInstance() - .orElseThrow(() -> new RuntimeException("No global LogBuffer found")); + return LogBufferGlobal.getInstance().orElseThrow(() -> new RuntimeException("No global LogBuffer found")); } @Provides @@ -42,8 +41,8 @@ static LogSink providesLogSink() { @ElementsIntoSet static Set providesLoggerSinkSetups() { return StreamSupport - .stream(ServiceLoader.load(InitSink.class).spliterator(), false) - .collect(Collectors.toSet()); + .stream(ServiceLoader.load(InitSink.class).spliterator(), false) + .collect(Collectors.toSet()); } @Provides @@ -56,8 +55,7 @@ static StreamToLogBuffer providesStreamToLogBuffer(LogBuffer logBuffer) { } @Provides - static StreamToPrintStreams providesStreamToReal(@Named("out") PrintStream out, - @Named("err") PrintStream err) { + static StreamToPrintStreams providesStreamToReal(@Named("out") PrintStream out, @Named("err") PrintStream err) { final boolean skipStdout = Boolean.getBoolean("stdout.skipReal"); final boolean skipStderr = Boolean.getBoolean("stderr.skipReal"); return new StreamToPrintStreams(skipStdout ? null : out, skipStderr ? null : err); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/runner/DeephavenApiServer.java b/grpc-api/src/main/java/io/deephaven/grpc_api/runner/DeephavenApiServer.java index a538726081f..04c00f5caad 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/runner/DeephavenApiServer.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/runner/DeephavenApiServer.java @@ -54,40 +54,38 @@ interface Builder { } } - public static void startMain(PrintStream out, PrintStream err) - throws IOException, InterruptedException { + public static void startMain(PrintStream out, PrintStream err) throws IOException, InterruptedException { final ServerComponent injector = DaggerDeephavenApiServer_ServerComponent - .builder() - .withPort(8080) - .withSchedulerPoolSize(4) - .withSessionTokenExpireTmMs(300000) // defaults to 5 min - .withOut(out) - .withErr(err) - .build(); + .builder() + .withPort(8080) + .withSchedulerPoolSize(4) + .withSessionTokenExpireTmMs(300000) // defaults to 5 min + .withOut(out) + .withErr(err) + .build(); final DeephavenApiServer server = injector.getServer(); final SessionService sessionService = injector.getSessionService(); // Stop accepting new gRPC requests. - ProcessEnvironment.getGlobalShutdownManager() - .registerTask(ShutdownManager.OrderingCategory.FIRST, server.server::shutdown); + ProcessEnvironment.getGlobalShutdownManager().registerTask(ShutdownManager.OrderingCategory.FIRST, + server.server::shutdown); // Close outstanding sessions to give any gRPCs closure. - ProcessEnvironment.getGlobalShutdownManager().registerTask( - ShutdownManager.OrderingCategory.MIDDLE, sessionService::closeAllSessions); + ProcessEnvironment.getGlobalShutdownManager().registerTask(ShutdownManager.OrderingCategory.MIDDLE, + sessionService::closeAllSessions); // Finally wait for gRPC to exit now. - ProcessEnvironment.getGlobalShutdownManager() - .registerTask(ShutdownManager.OrderingCategory.LAST, () -> { - try { - if (!server.server.awaitTermination(10, TimeUnit.SECONDS)) { - log.error().append( + ProcessEnvironment.getGlobalShutdownManager().registerTask(ShutdownManager.OrderingCategory.LAST, () -> { + try { + if (!server.server.awaitTermination(10, TimeUnit.SECONDS)) { + log.error().append( "The gRPC server did not terminate in a reasonable amount of time. Invoking shutdownNow().") .endl(); - server.server.shutdownNow(); - } - } catch (final InterruptedException ignored) { + server.server.shutdownNow(); } - }); + } catch (final InterruptedException ignored) { + } + }); server.start(); server.blockUntilShutdown(); @@ -102,10 +100,10 @@ public static void startMain(PrintStream out, PrintStream err) @Inject public DeephavenApiServer( - final Server server, - final LiveTableMonitor ltm, - final LogInit logInit, - final ConsoleServiceGrpcImpl consoleService) { + final Server server, + final LiveTableMonitor ltm, + final LogInit logInit, + final ConsoleServiceGrpcImpl consoleService) { this.server = server; this.ltm = ltm; this.logInit = logInit; diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/runner/DeephavenApiServerModule.java b/grpc-api/src/main/java/io/deephaven/grpc_api/runner/DeephavenApiServerModule.java index 92a9eeabb37..a83edb85b80 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/runner/DeephavenApiServerModule.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/runner/DeephavenApiServerModule.java @@ -50,9 +50,9 @@ public class DeephavenApiServerModule { @Provides @Singleton static Server buildServer( - final @Named("grpc.port") int port, - final Set services, - final Set interceptors) { + final @Named("grpc.port") int port, + final Set services, + final Set interceptors) { final ServerBuilder builder = ServerBuilder.forPort(port); @@ -84,17 +84,17 @@ static Set primeInterceptors() { public static Scheduler provideScheduler(final @Named("scheduler.poolSize") int poolSize) { final ThreadFactory concurrentThreadFactory = new ThreadFactory("Scheduler-Concurrent"); final ScheduledExecutorService concurrentExecutor = - new ScheduledThreadPoolExecutor(poolSize, concurrentThreadFactory) { - @Override - protected void afterExecute(final Runnable task, final Throwable error) { - super.afterExecute(task, error); - DeephavenApiServerModule.afterExecute("concurrentExecutor", task, error); - } - }; + new ScheduledThreadPoolExecutor(poolSize, concurrentThreadFactory) { + @Override + protected void afterExecute(final Runnable task, final Throwable error) { + super.afterExecute(task, error); + DeephavenApiServerModule.afterExecute("concurrentExecutor", task, error); + } + }; final ThreadFactory serialThreadFactory = new ThreadFactory("Scheduler-Serial"); final ExecutorService serialExecutor = new ThreadPoolExecutor(1, 1, 0L, - TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), serialThreadFactory) { + TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), serialThreadFactory) { @Override protected void afterExecute(final Runnable task, final Throwable error) { @@ -107,12 +107,11 @@ protected void afterExecute(final Runnable task, final Throwable error) { } private static void report(final String executorType, final Throwable error) { - ProcessEnvironment.getGlobalFatalErrorReporter() - .report("Exception while processing " + executorType + " task", error); + ProcessEnvironment.getGlobalFatalErrorReporter().report("Exception while processing " + executorType + " task", + error); } - private static void afterExecute(final String executorType, final Runnable task, - final Throwable error) { + private static void afterExecute(final String executorType, final Runnable task, final Throwable error) { if (error != null) { report(executorType, error); } else if (task instanceof Future) { diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/runner/Main.java b/grpc-api/src/main/java/io/deephaven/grpc_api/runner/Main.java index c00e6f09c4b..54df5807cc6 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/runner/Main.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/runner/Main.java @@ -28,11 +28,9 @@ public static void main(String[] args) throws IOException, InterruptedException // Capture the original System.out and System.err early PrintStreamGlobals.init(); - // Since our dagger injection happens later, we need to provider a static way to get the - // LogBuffer (for example, + // Since our dagger injection happens later, we need to provider a static way to get the LogBuffer (for example, // logback configuration may reference LogBufferAppender). - LogBufferGlobal - .setInstance(new LogBufferInterceptor(Integer.getInteger("logBuffer.history", 1024))); + LogBufferGlobal.setInstance(new LogBufferInterceptor(Integer.getInteger("logBuffer.history", 1024))); final Logger log = LoggerFactory.getLogger(Main.class); @@ -40,11 +38,10 @@ public static void main(String[] args) throws IOException, InterruptedException final Configuration config = Configuration.getInstance(); - // Push our log to ProcessEnvironment, so that any parts of the system relying on - // ProcessEnvironment + // Push our log to ProcessEnvironment, so that any parts of the system relying on ProcessEnvironment // instead of LoggerFactory can get the correct logger. - final ProcessEnvironment processEnvironment = ProcessEnvironment - .basicInteractiveProcessInitialization(config, Main.class.getName(), log); + final ProcessEnvironment processEnvironment = + ProcessEnvironment.basicInteractiveProcessInitialization(config, Main.class.getName(), log); Thread.setDefaultUncaughtExceptionHandler(processEnvironment.getFatalErrorReporter()); DeephavenApiServer.startMain(PrintStreamGlobals.getOut(), PrintStreamGlobals.getErr()); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/session/ExportTicketResolver.java b/grpc-api/src/main/java/io/deephaven/grpc_api/session/ExportTicketResolver.java index 6b0d5f31cb6..4c99110f7dc 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/session/ExportTicketResolver.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/session/ExportTicketResolver.java @@ -30,38 +30,34 @@ public String getLogNameFor(ByteBuffer ticket) { } @Override - public SessionState.ExportObject flightInfoFor( - @Nullable final SessionState session, final Flight.FlightDescriptor descriptor) { + public SessionState.ExportObject flightInfoFor(@Nullable final SessionState session, + final Flight.FlightDescriptor descriptor) { if (session == null) { throw GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, "No session to search"); } final SessionState.ExportObject export = resolve(session, descriptor); return session.nonExport() - .require(export) - .submit(() -> { - if (export.get() instanceof Table) { - return TicketRouter.getFlightInfo((Table) export.get(), descriptor, - ExportTicketHelper.descriptorToArrowTicket(descriptor)); - } - - throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "No such flight exists"); - }); + .require(export) + .submit(() -> { + if (export.get() instanceof Table) { + return TicketRouter.getFlightInfo((Table) export.get(), descriptor, + ExportTicketHelper.descriptorToArrowTicket(descriptor)); + } + + throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, "No such flight exists"); + }); } @Override - public void forAllFlightInfo(@Nullable final SessionState session, - final Consumer visitor) { + public void forAllFlightInfo(@Nullable final SessionState session, final Consumer visitor) { // sessions do not expose tickets via list flights } @Override - public SessionState.ExportObject resolve(@Nullable final SessionState session, - final ByteBuffer ticket) { + public SessionState.ExportObject resolve(@Nullable final SessionState session, final ByteBuffer ticket) { if (session == null) { - throw GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, - "No session to resolve from"); + throw GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, "No session to resolve from"); } return session.getExport(ExportTicketHelper.ticketToExportId(ticket)); @@ -69,24 +65,22 @@ public SessionState.ExportObject resolve(@Nullable final SessionState ses @Override public SessionState.ExportObject resolve(@Nullable final SessionState session, - final Flight.FlightDescriptor descriptor) { + final Flight.FlightDescriptor descriptor) { if (session == null) { - throw GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, - "No session to resolve from"); + throw GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, "No session to resolve from"); } return session.getExport(ExportTicketHelper.descriptorToExportId(descriptor)); } @Override - public SessionState.ExportBuilder publish(final SessionState session, - final ByteBuffer ticket) { + public SessionState.ExportBuilder publish(final SessionState session, final ByteBuffer ticket) { return session.newExport(ExportTicketHelper.ticketToExportId(ticket)); } @Override public SessionState.ExportBuilder publish(final SessionState session, - final Flight.FlightDescriptor descriptor) { + final Flight.FlightDescriptor descriptor) { return session.newExport(ExportTicketHelper.descriptorToExportId(descriptor)); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionModule.java b/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionModule.java index ca2e68ac2bf..b496f738bdf 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionModule.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionModule.java @@ -15,7 +15,7 @@ public interface SessionModule { @Binds @IntoSet ServerInterceptor bindSessionServiceInterceptor( - SessionServiceGrpcImpl.AuthServerInterceptor sessionServiceInterceptor); + SessionServiceGrpcImpl.AuthServerInterceptor sessionServiceInterceptor); @Binds @IntoSet diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionService.java b/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionService.java index 79ca7a25399..a156405f613 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionService.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionService.java @@ -37,21 +37,18 @@ public class SessionService { @Inject() public SessionService(final Scheduler scheduler, final SessionState.Factory sessionFactory, - @Named("session.tokenExpireMs") final long tokenExpireMs) { + @Named("session.tokenExpireMs") final long tokenExpireMs) { this.scheduler = scheduler; this.sessionFactory = sessionFactory; this.tokenExpireMs = tokenExpireMs; if (tokenExpireMs < MIN_COOKIE_EXPIRE_MS) { - throw new IllegalArgumentException( - "session.tokenExpireMs is set too low. It is configured to " - + tokenExpireMs + "ms (minimum is " + MIN_COOKIE_EXPIRE_MS - + "ms). At low levels it is difficult " + throw new IllegalArgumentException("session.tokenExpireMs is set too low. It is configured to " + + tokenExpireMs + "ms (minimum is " + MIN_COOKIE_EXPIRE_MS + "ms). At low levels it is difficult " + "to guarantee smooth operability given a distributed system and potential clock drift"); } - // Protect ourselves from rotation spam, but be loose enough that any reasonable refresh - // strategy works. + // Protect ourselves from rotation spam, but be loose enough that any reasonable refresh strategy works. this.tokenRotateMs = tokenExpireMs / 5; } @@ -68,8 +65,7 @@ public SessionState newSession(final AuthContext authContext) { } /** - * If enough time has passed since the last token refresh, rotate to a new token and reset the - * expiration deadline. + * If enough time has passed since the last token refresh, rotate to a new token and reset the expiration deadline. * * @param session the session to refresh * @return the most recent token expiration @@ -86,16 +82,16 @@ private TokenExpiration refreshToken(final SessionState session, boolean initial synchronized (session) { expiration = session.getExpiration(); - if (expiration != null && expiration.deadline.getMillis() - tokenExpireMs - + tokenRotateMs > now.getMillis()) { + if (expiration != null + && expiration.deadline.getMillis() - tokenExpireMs + tokenRotateMs > now.getMillis()) { // current token is not old enough to rotate return expiration; } do { newUUID = UuidCreator.getRandomBased(); - expiration = new TokenExpiration(newUUID, - DBTimeUtils.millisToTime(now.getMillis() + tokenExpireMs), session); + expiration = new TokenExpiration(newUUID, DBTimeUtils.millisToTime(now.getMillis() + tokenExpireMs), + session); } while (tokenToSession.putIfAbsent(newUUID, expiration) != null); if (initialToken) { @@ -132,19 +128,18 @@ public long getExpirationDelayMs() { public SessionState getSessionForToken(final UUID token) { final TokenExpiration expiration = tokenToSession.get(token); if (expiration == null || expiration.session.isExpired() - || expiration.deadline.compareTo(scheduler.currentTime()) <= 0) { + || expiration.deadline.compareTo(scheduler.currentTime()) <= 0) { return null; } return expiration.session; } /** - * Lookup a session via the SessionServiceGrpcImpl.SESSION_CONTEXT_KEY. This method is only - * valid in the context of the original calling gRPC thread. + * Lookup a session via the SessionServiceGrpcImpl.SESSION_CONTEXT_KEY. This method is only valid in the context of + * the original calling gRPC thread. * * @return the session attached to this gRPC request - * @throws StatusRuntimeException if thread is not attached to a session or if the session is - * expired/closed + * @throws StatusRuntimeException if thread is not attached to a session or if the session is expired/closed */ public SessionState getCurrentSession() { final SessionState session = getOptionalSession(); @@ -155,8 +150,8 @@ public SessionState getCurrentSession() { } /** - * Lookup a session via the SessionServiceGrpcImpl.SESSION_CONTEXT_KEY. This method is only - * valid in the context of the original calling gRPC thread. + * Lookup a session via the SessionServiceGrpcImpl.SESSION_CONTEXT_KEY. This method is only valid in the context of + * the original calling gRPC thread. * * @return the session attached to this gRPC request; null if no session is established */ @@ -193,8 +188,7 @@ public static final class TokenExpiration { public final DBDateTime deadline; public final SessionState session; - public TokenExpiration(final UUID cookie, final DBDateTime deadline, - final SessionState session) { + public TokenExpiration(final UUID cookie, final DBDateTime deadline, final SessionState session) { this.token = cookie; this.deadline = deadline; this.session = session; @@ -219,18 +213,15 @@ public void run() { break; } - // Permanently remove the first token as it is officially expired, note that other - // tokens may exist for - // this session, so the session itself does not expire. We allow multiple tokens to - // co-exist to best - // support out of order requests and thus allow any reasonable client behavior that - // respects a given + // Permanently remove the first token as it is officially expired, note that other tokens may exist for + // this session, so the session itself does not expire. We allow multiple tokens to co-exist to best + // support out of order requests and thus allow any reasonable client behavior that respects a given // token expiration time. outstandingCookies.poll(); synchronized (next.session) { if (next.session.getExpiration() != null - && next.session.getExpiration().deadline.getMillis() <= now.getMillis()) { + && next.session.getExpiration().deadline.getMillis() <= now.getMillis()) { next.session.onExpired(); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionServiceGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionServiceGrpcImpl.java index 46d4fd1d04f..42bdda6cbcc 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionServiceGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionServiceGrpcImpl.java @@ -26,9 +26,8 @@ public class SessionServiceGrpcImpl extends SessionServiceGrpc.SessionServiceImp // TODO (#997): use flight AuthConstants public static final String DEEPHAVEN_SESSION_ID = "DEEPHAVEN_SESSION_ID"; public static final Metadata.Key SESSION_HEADER_KEY = - Metadata.Key.of(DEEPHAVEN_SESSION_ID, Metadata.ASCII_STRING_MARSHALLER); - public static final Context.Key SESSION_CONTEXT_KEY = - Context.key(DEEPHAVEN_SESSION_ID); + Metadata.Key.of(DEEPHAVEN_SESSION_ID, Metadata.ASCII_STRING_MARSHALLER); + public static final Context.Key SESSION_CONTEXT_KEY = Context.key(DEEPHAVEN_SESSION_ID); private static final Logger log = LoggerFactory.getLogger(SessionServiceGrpcImpl.class); @@ -37,36 +36,34 @@ public class SessionServiceGrpcImpl extends SessionServiceGrpc.SessionServiceImp @Inject() public SessionServiceGrpcImpl(final SessionService service, - final AuthContextProvider authProvider) { + final AuthContextProvider authProvider) { this.service = service; this.authProvider = authProvider; } @Override - public void newSession(final HandshakeRequest request, - final StreamObserver responseObserver) { + public void newSession(final HandshakeRequest request, final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { if (!authProvider.supportsProtocol(request.getAuthProtocol())) { - responseObserver.onError(GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Protocol version not allowed.")); + responseObserver.onError( + GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "Protocol version not allowed.")); return; } - final AuthContext authContext = - authProvider.authenticate(request.getAuthProtocol(), request.getPayload()); + final AuthContext authContext = authProvider.authenticate(request.getAuthProtocol(), request.getPayload()); if (authContext == null) { - responseObserver.onError(GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, - "Authentication failed.")); + responseObserver + .onError(GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, "Authentication failed.")); return; } final SessionState session = service.newSession(authContext); responseObserver.onNext(HandshakeResponse.newBuilder() - .setMetadataHeader(ByteString.copyFromUtf8(DEEPHAVEN_SESSION_ID)) - .setSessionToken(session.getExpiration().getTokenAsByteString()) - .setTokenDeadlineTimeMillis(session.getExpiration().deadline.getMillis()) - .setTokenExpirationDelayMillis(service.getExpirationDelayMs()) - .build()); + .setMetadataHeader(ByteString.copyFromUtf8(DEEPHAVEN_SESSION_ID)) + .setSessionToken(session.getExpiration().getTokenAsByteString()) + .setTokenDeadlineTimeMillis(session.getExpiration().deadline.getMillis()) + .setTokenExpirationDelayMillis(service.getExpirationDelayMs()) + .build()); responseObserver.onCompleted(); }); @@ -74,50 +71,47 @@ public void newSession(final HandshakeRequest request, @Override public void refreshSessionToken(final HandshakeRequest request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { if (request.getAuthProtocol() != 0) { - responseObserver.onError(GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Protocol version not allowed.")); + responseObserver.onError( + GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "Protocol version not allowed.")); return; } final SessionState session = service.getCurrentSession(); - if (session != service - .getSessionForToken(UUID.fromString(request.getPayload().toStringUtf8()))) { + if (session != service.getSessionForToken(UUID.fromString(request.getPayload().toStringUtf8()))) { responseObserver.onError(GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Refresh request's session ID does not match metadata header provided ID.")); + "Refresh request's session ID does not match metadata header provided ID.")); return; } final SessionService.TokenExpiration expiration = service.refreshToken(session); responseObserver.onNext(HandshakeResponse.newBuilder() - .setMetadataHeader(ByteString.copyFromUtf8(DEEPHAVEN_SESSION_ID)) - .setSessionToken(expiration.getTokenAsByteString()) - .setTokenDeadlineTimeMillis(expiration.deadline.getMillis()) - .setTokenExpirationDelayMillis(service.getExpirationDelayMs()) - .build()); + .setMetadataHeader(ByteString.copyFromUtf8(DEEPHAVEN_SESSION_ID)) + .setSessionToken(expiration.getTokenAsByteString()) + .setTokenDeadlineTimeMillis(expiration.deadline.getMillis()) + .setTokenExpirationDelayMillis(service.getExpirationDelayMs()) + .build()); responseObserver.onCompleted(); }); } @Override - public void closeSession(final HandshakeRequest request, - final StreamObserver responseObserver) { + public void closeSession(final HandshakeRequest request, final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { if (request.getAuthProtocol() != 0) { - responseObserver.onError(GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Protocol version not allowed.")); + responseObserver.onError( + GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "Protocol version not allowed.")); return; } final SessionState session = service.getCurrentSession(); - if (session != service - .getSessionForToken(UUID.fromString(request.getPayload().toStringUtf8()))) { + if (session != service.getSessionForToken(UUID.fromString(request.getPayload().toStringUtf8()))) { responseObserver.onError(GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Refresh request's session ID does not match metadata header provided ID.")); + "Refresh request's session ID does not match metadata header provided ID.")); return; } @@ -128,33 +122,30 @@ public void closeSession(final HandshakeRequest request, } @Override - public void release(final Ticket request, - final StreamObserver responseObserver) { + public void release(final Ticket request, final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { - final SessionState.ExportObject export = - service.getCurrentSession().getExportIfExists(request); + final SessionState.ExportObject export = service.getCurrentSession().getExportIfExists(request); final ExportNotification.State currState = - export != null ? export.getState() : ExportNotification.State.UNKNOWN; + export != null ? export.getState() : ExportNotification.State.UNKNOWN; if (export != null) { export.release(); } - responseObserver.onNext(ReleaseResponse.newBuilder() - .setSuccess(currState != ExportNotification.State.UNKNOWN).build()); + responseObserver.onNext( + ReleaseResponse.newBuilder().setSuccess(currState != ExportNotification.State.UNKNOWN).build()); responseObserver.onCompleted(); }); } @Override public void exportNotifications(final ExportNotificationRequest request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = service.getCurrentSession(); session.addExportListener(responseObserver); - ((ServerCallStreamObserver) responseObserver) - .setOnCancelHandler(() -> { - session.removeExportListener(responseObserver); - }); + ((ServerCallStreamObserver) responseObserver).setOnCancelHandler(() -> { + session.removeExportListener(responseObserver); + }); }); } @@ -168,13 +159,11 @@ public AuthServerInterceptor(final SessionService service) { } @Override - public ServerCall.Listener interceptCall( - final ServerCall serverCall, - final Metadata metadata, - final ServerCallHandler serverCallHandler) { + public ServerCall.Listener interceptCall(final ServerCall serverCall, + final Metadata metadata, + final ServerCallHandler serverCallHandler) { SessionState session = null; - final Optional tokenBytes = - Optional.ofNullable(metadata.get(SESSION_HEADER_KEY)); + final Optional tokenBytes = Optional.ofNullable(metadata.get(SESSION_HEADER_KEY)); if (tokenBytes.isPresent()) { UUID token = UuidCreator.fromString(tokenBytes.get()); session = service.getSessionForToken(token); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionState.java b/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionState.java index 4ffbfe6c6f9..de39921c6c7 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionState.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/session/SessionState.java @@ -65,26 +65,24 @@ /** * SessionState manages all exports for a single session. * - * It manages exported {@link io.deephaven.db.util.liveness.LivenessReferent}. It cascades failures - * to child dependencies. + * It manages exported {@link io.deephaven.db.util.liveness.LivenessReferent}. It cascades failures to child + * dependencies. * * TODO: - cyclical dependency detection - out-of-order dependency timeout * * Details Regarding Data Structure of ExportObjects: * - * The exportMap map, exportListeners list, exportListenerVersion, and export object's - * exportListenerVersion work together to enable a listener to synchronize with outstanding exports - * in addition to sending the listener updates while they continue to subscribe. + * The exportMap map, exportListeners list, exportListenerVersion, and export object's exportListenerVersion work + * together to enable a listener to synchronize with outstanding exports in addition to sending the listener updates + * while they continue to subscribe. * * - SessionState::exportMap's purpose is to map from the export id to the export object - * SessionState::exportListeners' purpose is to keep a list of active subscribers - - * SessionState::exportListenerVersion's purpose is to know whether or not a subscriber has already - * seen a status + * SessionState::exportListenerVersion's purpose is to know whether or not a subscriber has already seen a status * - * A listener will receive an export notification for export id NON_EXPORT_ID (a zero) to indicate - * that the refresh has completed. A listener may see an update for an export before receiving the - * "refresh has completed" message. A listener should be prepared to receive duplicate/redundant - * updates. + * A listener will receive an export notification for export id NON_EXPORT_ID (a zero) to indicate that the refresh has + * completed. A listener may see an update for an export before receiving the "refresh has completed" message. A + * listener should be prepared to receive duplicate/redundant updates. */ public class SessionState { // Some work items will be dependent on other exports, but do not export anything themselves. @@ -115,28 +113,25 @@ public static ExportObject wrapAsExport(final T export) { private final String sessionId; private volatile SessionService.TokenExpiration expiration = null; private static final AtomicReferenceFieldUpdater EXPIRATION_UPDATER = - AtomicReferenceFieldUpdater.newUpdater(SessionState.class, - SessionService.TokenExpiration.class, "expiration"); + AtomicReferenceFieldUpdater.newUpdater(SessionState.class, SessionService.TokenExpiration.class, + "expiration"); // some types of exports have a more sound story if the server tells the client what to call it private volatile int nextServerAllocatedId = -1; private static final AtomicIntegerFieldUpdater SERVER_EXPORT_UPDATER = - AtomicIntegerFieldUpdater.newUpdater(SessionState.class, "nextServerAllocatedId"); + AtomicIntegerFieldUpdater.newUpdater(SessionState.class, "nextServerAllocatedId"); // maintains all requested exports by this client's session - private final KeyedIntObjectHashMap> exportMap = - new KeyedIntObjectHashMap<>(EXPORT_OBJECT_ID_KEY); + private final KeyedIntObjectHashMap> exportMap = new KeyedIntObjectHashMap<>(EXPORT_OBJECT_ID_KEY); // the list of active listeners private final List exportListeners = new CopyOnWriteArrayList<>(); private volatile int exportListenerVersion = 0; - // Usually, export life cycles are managed explicitly with the life cycle of the session state. - // However, we need - // to be able to close non-exports that are not in the map but are otherwise satisfying - // outstanding gRPC requests. + // Usually, export life cycles are managed explicitly with the life cycle of the session state. However, we need + // to be able to close non-exports that are not in the map but are otherwise satisfying outstanding gRPC requests. private final SimpleReferenceManager> onCloseCallbacks = - new SimpleReferenceManager<>(WeakSimpleReference::new, false); + new SimpleReferenceManager<>(WeakSimpleReference::new, false); @AssistedInject public SessionState(final Scheduler scheduler, @Assisted final AuthContext authContext) { @@ -148,8 +143,7 @@ public SessionState(final Scheduler scheduler, @Assisted final AuthContext authC } /** - * This method is controlled by SessionService to update the expiration whenever the session is - * refreshed. + * This method is controlled by SessionService to update the expiration whenever the session is refreshed. * * @param expiration the initial expiration time and session token */ @@ -164,14 +158,13 @@ protected void initializeExpiration(@NotNull final SessionService.TokenExpiratio } log.info().append(logPrefix) - .append("token initialized to '").append(expiration.token.toString()) - .append("' which expires at ").append(expiration.deadline.toString()) - .append(".").endl(); + .append("token initialized to '").append(expiration.token.toString()) + .append("' which expires at ").append(expiration.deadline.toString()) + .append(".").endl(); } /** - * This method is controlled by SessionService to update the expiration whenever the session is - * refreshed. + * This method is controlled by SessionService to update the expiration whenever the session is refreshed. * * @param expiration the new expiration time and session token */ @@ -194,9 +187,9 @@ protected void updateExpiration(@NotNull final SessionService.TokenExpiration ex } log.info().append(logPrefix) - .append("token rotating to '").append(expiration.token.toString()) - .append("' which expires at ").append(expiration.deadline.toString()) - .append(".").endl(); + .append("token rotating to '").append(expiration.token.toString()) + .append("' which expires at ").append(expiration.deadline.toString()) + .append(".").endl(); } /** @@ -259,14 +252,13 @@ public ExportObject getExport(final int exportId) { final ExportObject result; - // If this a non-export or server side export, then it must already exist or else is a user - // error. + // If this a non-export or server side export, then it must already exist or else is a user error. if (exportId <= NON_EXPORT_ID) { result = (ExportObject) exportMap.get(exportId); if (result == null) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Export id " + exportId + " does not exist and cannot be used out-of-order!"); + "Export id " + exportId + " does not exist and cannot be used out-of-order!"); } } else { result = (ExportObject) exportMap.putIfAbsent(exportId, EXPORT_OBJECT_VALUE_FACTORY); @@ -302,8 +294,8 @@ public ExportObject getExportIfExists(final Ticket ticket) { } /** - * Create and export a pre-computed element. This is typically used in scenarios where the - * number of exports is not known in advance by the requesting client. + * Create and export a pre-computed element. This is typically used in scenarios where the number of exports is not + * known in advance by the requesting client. * * @param export the result of the export * @param the export type @@ -317,8 +309,7 @@ public ExportObject newServerSideExport(final T export) { final int exportId = SERVER_EXPORT_UPDATER.getAndDecrement(this); // noinspection unchecked - final ExportObject result = - (ExportObject) exportMap.putIfAbsent(exportId, EXPORT_OBJECT_VALUE_FACTORY); + final ExportObject result = (ExportObject) exportMap.putIfAbsent(exportId, EXPORT_OBJECT_VALUE_FACTORY); result.setResult(export); return result; } @@ -358,15 +349,13 @@ public ExportBuilder newExport(final int exportId) { throw GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, "session has expired"); } if (exportId <= 0) { - throw new IllegalArgumentException( - "exportId's <= 0 are reserved for server allocation only"); + throw new IllegalArgumentException("exportId's <= 0 are reserved for server allocation only"); } return new ExportBuilder<>(exportId); } /** - * Create an ExportBuilder to perform work after dependencies are satisfied that itself does not - * create any exports. + * Create an ExportBuilder to perform work after dependencies are satisfied that itself does not create any exports. * * @return an export builder */ @@ -407,8 +396,7 @@ public Closeable removeOnCloseCallback(final Closeable onClose) { * Notes that this session has expired and exports should be released. */ public void onExpired() { - // note that once we set expiration to null; we are not able to add any more objects to the - // exportMap + // note that once we set expiration to null; we are not able to add any more objects to the exportMap SessionService.TokenExpiration prevToken = expiration; while (prevToken != null) { if (EXPIRATION_UPDATER.compareAndSet(this, prevToken, null)) { @@ -438,8 +426,7 @@ public void onExpired() { try { callback.close(); } catch (final IOException e) { - log.error().append(logPrefix).append("error during onClose callback: ") - .append(e).endl(); + log.error().append(logPrefix).append("error during onClose callback: ").append(e).endl(); } }); onCloseCallbacks.clear(); @@ -450,12 +437,11 @@ public void onExpired() { * @return true iff the provided export state is a failure state */ public static boolean isExportStateFailure(final ExportNotification.State state) { - return state == ExportNotification.State.FAILED - || state == ExportNotification.State.CANCELLED - || state == ExportNotification.State.DEPENDENCY_FAILED - || state == ExportNotification.State.DEPENDENCY_NEVER_FOUND - || state == ExportNotification.State.DEPENDENCY_RELEASED - || state == ExportNotification.State.DEPENDENCY_CANCELLED; + return state == ExportNotification.State.FAILED || state == ExportNotification.State.CANCELLED + || state == ExportNotification.State.DEPENDENCY_FAILED + || state == ExportNotification.State.DEPENDENCY_NEVER_FOUND + || state == ExportNotification.State.DEPENDENCY_RELEASED + || state == ExportNotification.State.DEPENDENCY_CANCELLED; } /** @@ -473,8 +459,8 @@ public static boolean isExportStateTerminal(final ExportNotification.State state * * @param Is context sensitive depending on the export. * - * @apiNote ExportId may be 0, if this is a task that has exported dependencies, but does not - * export anything itself. + * @apiNote ExportId may be 0, if this is a task that has exported dependencies, but does not export anything + * itself. * @apiNote Non-exports do not publish state changes. */ public final static class ExportObject extends LivenessArtifact { @@ -490,33 +476,22 @@ public final static class ExportObject extends LivenessArtifact { /** This indicates whether or not this export should use the serial execution queue. */ private boolean requiresSerialQueue; - /** - * This is a reference of the work to-be-done. It is non-null only during the PENDING state. - */ + /** This is a reference of the work to-be-done. It is non-null only during the PENDING state. */ private Callable exportMain; - /** - * This is a reference to the error handler to call if this item enters one of the failure - * states. - */ + /** This is a reference to the error handler to call if this item enters one of the failure states. */ private ExportErrorHandler errorHandler; /** used to keep track of which children need notification on export completion */ private List> children = Collections.emptyList(); - /** - * used to manage liveness of dependencies (to prevent a dependency from being released - * before it is used) - */ + /** used to manage liveness of dependencies (to prevent a dependency from being released before it is used) */ private List> parents = Collections.emptyList(); - /** - * used to detect when this object is ready for export (is visible for atomic int field - * updater) - */ + /** used to detect when this object is ready for export (is visible for atomic int field updater) */ private volatile int dependentCount = -1; @SuppressWarnings("unchecked") private static final AtomicIntegerFieldUpdater> DEPENDENT_COUNT_UPDATER = - AtomicIntegerFieldUpdater.newUpdater( - (Class>) (Class) ExportObject.class, "dependentCount"); + AtomicIntegerFieldUpdater.newUpdater((Class>) (Class) ExportObject.class, + "dependentCount"); /** used to identify and propagate error details */ private String errorId; @@ -528,8 +503,8 @@ public final static class ExportObject extends LivenessArtifact { private ExportObject(final SessionState session, final int exportId) { this.session = session; this.exportId = exportId; - this.logIdentity = isNonExport() ? Integer.toHexString(System.identityHashCode(this)) - : Long.toString(exportId); + this.logIdentity = + isNonExport() ? Integer.toHexString(System.identityHashCode(this)) : Long.toString(exportId); setState(ExportNotification.State.UNKNOWN); // non-exports stay alive until they have been exported @@ -539,8 +514,8 @@ private ExportObject(final SessionState session, final int exportId) { } /** - * Create an ExportObject that is not tied to any session. These must be non-exports that - * have require no work to be performed. These export objects can be used as dependencies. + * Create an ExportObject that is not tied to any session. These must be non-exports that have require no work + * to be performed. These export objects can be used as dependencies. * * @param result the object to wrap in an export */ @@ -564,13 +539,11 @@ private boolean isNonExport() { /** * Sets the dependencies and tracks liveness dependencies. * - * @param parents the dependencies that must be exported prior to invoking the exportMain - * callable + * @param parents the dependencies that must be exported prior to invoking the exportMain callable */ private synchronized void setDependencies(final List> parents) { if (dependentCount != -1) { - throw new IllegalStateException( - "dependencies can only be set once on an exportable object"); + throw new IllegalStateException("dependencies can only be set once on an exportable object"); } this.parents = parents; @@ -579,30 +552,27 @@ private synchronized void setDependencies(final List> parents) { if (log.isDebugEnabled()) { final Exception e = new RuntimeException(); - final LogEntry entry = log.debug().append(e).nl().append(session.logPrefix) - .append("export '").append(logIdentity) - .append("' has ").append(dependentCount).append(" dependencies remaining: "); + final LogEntry entry = + log.debug().append(e).nl().append(session.logPrefix).append("export '").append(logIdentity) + .append("' has ").append(dependentCount).append(" dependencies remaining: "); for (ExportObject parent : parents) { - entry.nl().append('\t').append(parent.logIdentity).append(" is ") - .append(parent.getState().name()); + entry.nl().append('\t').append(parent.logIdentity).append(" is ").append(parent.getState().name()); } entry.endl(); } } /** - * Sets the dependencies and initializes the relevant data structures to include this export - * as a child for each. + * Sets the dependencies and initializes the relevant data structures to include this export as a child for + * each. * * @param exportMain the exportMain callable to invoke when dependencies are satisfied - * @param errorHandler the errorHandler to notify so that it may propagate errors to the - * requesting client + * @param errorHandler the errorHandler to notify so that it may propagate errors to the requesting client */ - private synchronized void setWork(final Callable exportMain, - final ExportErrorHandler errorHandler, final boolean requiresSerialQueue) { + private synchronized void setWork(final Callable exportMain, final ExportErrorHandler errorHandler, + final boolean requiresSerialQueue) { if (this.exportMain != null) { - throw new IllegalStateException( - "work can only be set once on an exportable object"); + throw new IllegalStateException("work can only be set once on an exportable object"); } this.requiresSerialQueue = requiresSerialQueue; @@ -612,8 +582,8 @@ private synchronized void setWork(final Callable exportMain, } if (isNonExport()) { - // exports are retained via the exportMap; non-exports need to be retained while - // their work is outstanding + // exports are retained via the exportMap; non-exports need to be retained while their work is + // outstanding retainReference(); } @@ -626,8 +596,7 @@ private synchronized void setWork(final Callable exportMain, scheduleExport(); } else { for (final ExportObject parent : parents) { - // we allow parents to be null to simplify calling conventions around optional - // dependencies + // we allow parents to be null to simplify calling conventions around optional dependencies if (parent == null || !parent.maybeAddDependency(this)) { onResolveOne(parent); } @@ -639,12 +608,11 @@ private synchronized void setWork(final Callable exportMain, /** * WARNING! This method call is only safe to use in the following patterns: *

    - * 1) If an export (or non-export) {@link ExportBuilder#require}'d this export then the - * method is valid from within the Callable/Runnable passed to {@link ExportBuilder#submit}. + * 1) If an export (or non-export) {@link ExportBuilder#require}'d this export then the method is valid from + * within the Callable/Runnable passed to {@link ExportBuilder#submit}. *

    - * 2) By first obtaining a reference to the {@link ExportObject}, and then observing its - * state as {@link ExportNotification.State#EXPORTED}. The caller must abide by the Liveness - * API and dropReference. + * 2) By first obtaining a reference to the {@link ExportObject}, and then observing its state as + * {@link ExportNotification.State#EXPORTED}. The caller must abide by the Liveness API and dropReference. *

    * Example: * @@ -672,11 +640,11 @@ public T get() { throw GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, "session has expired"); } - // Note: an export may be released while still being a dependency of queued work; so - // let's make sure we're still valid + // Note: an export may be released while still being a dependency of queued work; so let's make sure we're + // still valid if (result == null) { throw new IllegalStateException( - "Dependent export '" + exportId + "' is null and in state " + state.name()); + "Dependent export '" + exportId + "' is null and in state " + state.name()); } return result; @@ -690,8 +658,8 @@ public ExportNotification.State getState() { } /** - * @return the ticket for this export; note if this is a non-export the returned ticket will - * not resolve to anything and is considered an invalid ticket + * @return the ticket for this export; note if this is a non-export the returned ticket will not resolve to + * anything and is considered an invalid ticket */ public Ticket getExportId() { return ExportTicketHelper.exportIdToTicket(exportId); @@ -721,30 +689,29 @@ private boolean maybeAddDependency(final ExportObject child) { } /** - * This helper notifies any export notification listeners, and propagates resolution to - * children that depend on this export. + * This helper notifies any export notification listeners, and propagates resolution to children that depend on + * this export. * * @param state the new state for this export */ private synchronized void setState(final ExportNotification.State state) { if ((this.state == ExportNotification.State.EXPORTED && isNonExport()) - || isExportStateTerminal(this.state)) { - throw new IllegalStateException( - "cannot change state if export is already in terminal state"); + || isExportStateTerminal(this.state)) { + throw new IllegalStateException("cannot change state if export is already in terminal state"); } this.state = state; // Send an export notification before possibly notifying children of our state change. if (exportId != NON_EXPORT_ID) { log.debug().append(session.logPrefix).append("export '").append(logIdentity) - .append("' is ExportState.").append(state.name()).endl(); + .append("' is ExportState.").append(state.name()).endl(); final ExportNotification notification = makeExportNotification(); exportListenerVersion = session.exportListenerVersion; session.exportListeners.forEach(listener -> listener.notify(notification)); } else { log.debug().append(session.logPrefix).append("non-export '").append(logIdentity) - .append("' is ExportState.").append(state.name()).endl(); + .append("' is ExportState.").append(state.name()).endl(); } if (isExportStateFailure(state) && errorHandler != null) { @@ -760,8 +727,7 @@ private synchronized void setState(final ExportNotification.State state) { errorHandler = null; } - if ((state == ExportNotification.State.EXPORTED && isNonExport()) - || isExportStateTerminal(state)) { + if ((state == ExportNotification.State.EXPORTED && isNonExport()) || isExportStateTerminal(state)) { dropReference(); } } @@ -781,8 +747,7 @@ private void onResolveOne(@Nullable final ExportObject parent) { if (parent != null && isExportStateTerminal(parent.state)) { synchronized (this) { errorId = parent.errorId; - ExportNotification.State terminalState = - ExportNotification.State.DEPENDENCY_FAILED; + ExportNotification.State terminalState = ExportNotification.State.DEPENDENCY_FAILED; if (errorId == null) { final String errorDetails; @@ -798,14 +763,13 @@ private void onResolveOne(@Nullable final ExportObject parent) { default: // Note: the other error states should have non-null errorId errorDetails = "dependency does not have its own error defined " + - "and is in an unexpected state: " + parent.state; + "and is in an unexpected state: " + parent.state; break; } errorId = UuidCreator.toString(UuidCreator.getRandomBased()); dependentHandle = parent.logIdentity; - log.error().append("Internal Error '").append(errorId).append("' ") - .append(errorDetails).endl(); + log.error().append("Internal Error '").append(errorId).append("' ").append(errorDetails).endl(); } setState(terminalState); @@ -847,8 +811,7 @@ private void doExport() { final Callable capturedExport; synchronized (this) { capturedExport = exportMain; - if (state != ExportNotification.State.QUEUED || session.isExpired() - || capturedExport == null) { + if (state != ExportNotification.State.QUEUED || session.isExpired() || capturedExport == null) { return; // had a cancel race with client } } @@ -858,10 +821,10 @@ private void doExport() { QueryProcessingResults queryProcessingResults = null; try (final AutoCloseable ignored = LivenessScopeStack.open()) { queryProcessingResults = new QueryProcessingResults( - QueryPerformanceRecorder.getInstance()); + QueryPerformanceRecorder.getInstance()); evaluationNumber = QueryPerformanceRecorder.getInstance() - .startQuery("session=" + session.sessionId + ",exportId=" + logIdentity); + .startQuery("session=" + session.sessionId + ",exportId=" + logIdentity); try { setResult(capturedExport.call()); } finally { @@ -877,8 +840,7 @@ private void doExport() { synchronized (this) { if (!isExportStateTerminal(state)) { errorId = UuidCreator.toString(UuidCreator.getRandomBased()); - log.error().append("Internal Error '").append(errorId).append("' ") - .append(err).endl(); + log.error().append("Internal Error '").append(errorId).append("' ").append(err).endl(); setState(ExportNotification.State.FAILED); } } @@ -894,17 +856,17 @@ private void doExport() { final QueryOperationPerformanceLogLogger qoplLogger = memLoggers.getQoplLogger(); try { final QueryPerformanceNugget nugget = Require.neqNull( - queryProcessingResults.getRecorder().getQueryLevelPerformanceData(), - "queryProcessingResults.getRecorder().getQueryLevelPerformanceData()"); + queryProcessingResults.getRecorder().getQueryLevelPerformanceData(), + "queryProcessingResults.getRecorder().getQueryLevelPerformanceData()"); // noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (qplLogger) { qplLogger.log(evaluationNumber, - queryProcessingResults, - nugget); + queryProcessingResults, + nugget); } final List nuggets = - queryProcessingResults.getRecorder().getOperationLevelPerformanceData(); + queryProcessingResults.getRecorder().getOperationLevelPerformanceData(); // noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (qoplLogger) { int opNo = 0; @@ -938,8 +900,7 @@ private void setResult(final T result) { // client may race a cancel with setResult if (!isExportStateTerminal(state)) { this.result = result; - if (result instanceof LivenessReferent - && DynamicNode.notDynamicOrIsRefreshing(result)) { + if (result instanceof LivenessReferent && DynamicNode.notDynamicOrIsRefreshing(result)) { manage((LivenessReferent) result); } setState(ExportNotification.State.EXPORTED); @@ -968,8 +929,7 @@ public synchronized void release() { } /** - * Releases this export; it will cancel the work and dependent exports proactively when - * possible. + * Releases this export; it will cancel the work and dependent exports proactively when possible. */ public synchronized void cancel() { if (session == null) { @@ -995,8 +955,8 @@ protected synchronized void destroy() { */ private synchronized ExportNotification makeExportNotification() { final ExportNotification.Builder builder = ExportNotification.newBuilder() - .setTicket(ExportTicketHelper.exportIdToTicket(exportId)) - .setExportState(state); + .setTicket(ExportTicketHelper.exportIdToTicket(exportId)) + .setExportState(state); if (errorId != null) { builder.setContext(errorId); @@ -1031,8 +991,7 @@ public void addExportListener(final StreamObserver observer) * @param observer the observer to no longer be subscribed * @return The item if it was removed, else null */ - public StreamObserver removeExportListener( - final StreamObserver observer) { + public StreamObserver removeExportListener(final StreamObserver observer) { final MutableObject wrappedListener = new MutableObject<>(); final boolean found = exportListeners.removeIf(wrap -> { if (wrappedListener.getValue() != null) { @@ -1126,10 +1085,10 @@ private void initialize(final int versionId) { // notify that the refresh has completed notify(ExportNotification.newBuilder() - .setTicket(ExportTicketHelper.exportIdToTicket(NON_EXPORT_ID)) - .setExportState(ExportNotification.State.EXPORTED) - .setContext("refresh is complete") - .build()); + .setTicket(ExportTicketHelper.exportIdToTicket(NON_EXPORT_ID)) + .setExportState(ExportNotification.State.EXPORTED) + .setContext("refresh is complete") + .build()); log.info().append(logPrefix).append("refresh complete for listener ").append(id).endl(); } @@ -1152,17 +1111,17 @@ public interface ExportErrorHandler { * * @param resultState the final state of the export * @param errorContext an identifier to locate the details as to why the export failed - * @param dependentExportId an identifier for the export id of the dependent that caused the - * failure if applicable + * @param dependentExportId an identifier for the export id of the dependent that caused the failure if + * applicable */ - void onError(final ExportNotification.State resultState, - @Nullable final String errorContext, @Nullable final String dependentExportId); + void onError(final ExportNotification.State resultState, @Nullable final String errorContext, + @Nullable final String dependentExportId); } @FunctionalInterface public interface ExportErrorGrpcHandler { /** - * This error handler receives a grpc friendly {@link StatusRuntimeException} that can be - * directly sent to {@link io.grpc.stub.StreamObserver#onError}. + * This error handler receives a grpc friendly {@link StatusRuntimeException} that can be directly sent to + * {@link io.grpc.stub.StreamObserver#onError}. * * @param notification the notification to forward to the grpc client */ @@ -1183,26 +1142,24 @@ public class ExportBuilder { this.export = new ExportObject<>(SessionState.this, NON_EXPORT_ID); } else { // noinspection unchecked - this.export = - (ExportObject) exportMap.putIfAbsent(exportId, EXPORT_OBJECT_VALUE_FACTORY); + this.export = (ExportObject) exportMap.putIfAbsent(exportId, EXPORT_OBJECT_VALUE_FACTORY); switch (this.export.getState()) { case UNKNOWN: return; case RELEASED: case CANCELLED: throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "export already released/cancelled id: " + exportId); + "export already released/cancelled id: " + exportId); default: throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "cannot re-export to existing exportId: " + exportId); + "cannot re-export to existing exportId: " + exportId); } } } /** - * Some exports must happen serially w.r.t. other exports. For example, an export that - * acquires the exclusive LTM lock. We enqueue these dependencies independently of the - * otherwise regularly concurrent exports. + * Some exports must happen serially w.r.t. other exports. For example, an export that acquires the exclusive + * LTM lock. We enqueue these dependencies independently of the otherwise regularly concurrent exports. * * @return this builder */ @@ -1212,8 +1169,8 @@ public ExportBuilder requiresSerialQueue() { } /** - * Invoke this method to set the required dependencies for this export. A parent may be null - * to simplify usage of optional export dependencies. + * Invoke this method to set the required dependencies for this export. A parent may be null to simplify usage + * of optional export dependencies. * * @param dependencies the parent dependencies * @return this builder @@ -1224,8 +1181,8 @@ public ExportBuilder require(final ExportObject... dependencies) { } /** - * Invoke this method to set the required dependencies for this export. A parent may be null - * to simplify usage of optional export dependencies. + * Invoke this method to set the required dependencies for this export. A parent may be null to simplify usage + * of optional export dependencies. * * @param dependencies the parent dependencies * @return this builder @@ -1236,8 +1193,8 @@ public ExportBuilder require(final List> dependencies) { } /** - * Invoke this method to set the error handler to be notified if this export fails. Only one - * error handler may be set. + * Invoke this method to set the error handler to be notified if this export fails. Only one error handler may + * be set. * * @param errorHandler the error handler to be notified * @return this builder @@ -1251,9 +1208,8 @@ public ExportBuilder onError(final ExportErrorHandler errorHandler) { } /** - * Invoke this method to set the error handler to be notified if this export fails. Only one - * error handler may be set. This is a convenience method for use with - * {@link io.grpc.stub.StreamObserver}. + * Invoke this method to set the error handler to be notified if this export fails. Only one error handler may + * be set. This is a convenience method for use with {@link io.grpc.stub.StreamObserver}. * * @param errorHandler the error handler to be notified * @return this builder @@ -1261,22 +1217,21 @@ public ExportBuilder onError(final ExportErrorHandler errorHandler) { public ExportBuilder onError(final ExportErrorGrpcHandler errorHandler) { return onError(((resultState, errorContext, dependentExportId) -> { final String dependentStr = dependentExportId == null ? "" - : (" (related parent export id: " + dependentExportId + ")"); + : (" (related parent export id: " + dependentExportId + ")"); errorHandler.onError(StatusProto.toStatusRuntimeException(Status.newBuilder() - .setCode(Code.FAILED_PRECONDITION.getNumber()) - .setMessage("Details Logged w/ID '" + errorContext + "'" + dependentStr) - .build())); + .setCode(Code.FAILED_PRECONDITION.getNumber()) + .setMessage("Details Logged w/ID '" + errorContext + "'" + dependentStr) + .build())); })); } /** - * This method is the final method for submitting an export to the session. The provided - * callable is enqueued on the scheduler when all dependencies have been satisfied. Only the - * dependencies supplied to the builder are guaranteed to be resolved when the exportMain is - * executing. + * This method is the final method for submitting an export to the session. The provided callable is enqueued on + * the scheduler when all dependencies have been satisfied. Only the dependencies supplied to the builder are + * guaranteed to be resolved when the exportMain is executing. * - * Warning! It is the SessionState owner's responsibility to wait to release any dependency - * until after this exportMain callable/runnable has complete. + * Warning! It is the SessionState owner's responsibility to wait to release any dependency until after this + * exportMain callable/runnable has complete. * * @param exportMain the callable that generates the export * @return the submitted export object @@ -1287,13 +1242,12 @@ public ExportObject submit(final Callable exportMain) { } /** - * This method is the final method for submitting an export to the session. The provided - * runnable is enqueued on the scheduler when all dependencies have been satisfied. Only the - * dependencies supplied to the builder are guaranteed to be resolved when the exportMain is - * executing. + * This method is the final method for submitting an export to the session. The provided runnable is enqueued on + * the scheduler when all dependencies have been satisfied. Only the dependencies supplied to the builder are + * guaranteed to be resolved when the exportMain is executing. * - * Warning! It is the SessionState owner's responsibility to wait to release any dependency - * until after this exportMain callable/runnable has complete. + * Warning! It is the SessionState owner's responsibility to wait to release any dependency until after this + * exportMain callable/runnable has complete. * * @param exportMain the runnable to execute once dependencies have resolved * @return the submitted export object @@ -1313,8 +1267,7 @@ public ExportObject getExport() { } /** - * @return the export id of this export or {@link SessionState#NON_EXPORT_ID} if is a - * non-export + * @return the export id of this export or {@link SessionState#NON_EXPORT_ID} if is a non-export */ public int getExportId() { return exportId; @@ -1322,25 +1275,24 @@ public int getExportId() { } private static final KeyedIntObjectKey> EXPORT_OBJECT_ID_KEY = - new KeyedIntObjectKey.BasicStrict>() { - @Override - public int getIntKey(final ExportObject exportObject) { - return exportObject.exportId; - } - }; + new KeyedIntObjectKey.BasicStrict>() { + @Override + public int getIntKey(final ExportObject exportObject) { + return exportObject.exportId; + } + }; private final KeyedIntObjectHash.ValueFactory> EXPORT_OBJECT_VALUE_FACTORY = - new KeyedIntObjectHash.ValueFactory.Strict>() { - @Override - public ExportObject newValue(final int key) { - if (isExpired()) { - throw GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, - "session has expired"); - } + new KeyedIntObjectHash.ValueFactory.Strict>() { + @Override + public ExportObject newValue(final int key) { + if (isExpired()) { + throw GrpcUtil.statusRuntimeException(Code.UNAUTHENTICATED, "session has expired"); + } - final ExportObject retval = new ExportObject<>(SessionState.this, key); - retval.retainReference(); - return retval; - } - }; + final ExportObject retval = new ExportObject<>(SessionState.this, key); + retval.retainReference(); + return retval; + } + }; } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/session/TicketResolver.java b/grpc-api/src/main/java/io/deephaven/grpc_api/session/TicketResolver.java index b2751ae8f51..f9554c804e3 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/session/TicketResolver.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/session/TicketResolver.java @@ -17,8 +17,8 @@ public interface TicketResolver { byte ticketRoute(); /** - * The first path entry on a route indicates which resolver to use. The remaining path elements - * are used to resolve the descriptor. + * The first path entry on a route indicates which resolver to use. The remaining path elements are used to resolve + * the descriptor. * * @return the string that will route from flight descriptor to this resolver */ @@ -42,8 +42,7 @@ public interface TicketResolver { * @param the expected return type of the ticket; this is not validated * @return an export object; see {@link SessionState} for lifecycle propagation details */ - SessionState.ExportObject resolve(@Nullable SessionState session, - Flight.FlightDescriptor descriptor); + SessionState.ExportObject resolve(@Nullable SessionState session, Flight.FlightDescriptor descriptor); /** * Publish a new result as a flight ticket to an export object future. @@ -67,8 +66,7 @@ SessionState.ExportObject resolve(@Nullable SessionState session, * @param the type of the result the export will publish * @return an export object; see {@link SessionState} for lifecycle propagation details */ - SessionState.ExportBuilder publish(SessionState session, - Flight.FlightDescriptor descriptor); + SessionState.ExportBuilder publish(SessionState session, Flight.FlightDescriptor descriptor); /** * Retrieve a FlightInfo for a given FlightDescriptor. @@ -77,24 +75,21 @@ SessionState.ExportBuilder publish(SessionState session, * @return a FlightInfo describing this flight */ SessionState.ExportObject flightInfoFor(@Nullable SessionState session, - Flight.FlightDescriptor descriptor); + Flight.FlightDescriptor descriptor); /** * Create a human readable string to identify this ticket. * * @param ticket the ticket to parse * @return a string that is good for log/error messages - * @apiNote There is not a {@link Flight.FlightDescriptor} equivalent as the path must already - * be displayable. + * @apiNote There is not a {@link Flight.FlightDescriptor} equivalent as the path must already be displayable. */ String getLogNameFor(ByteBuffer ticket); /** - * This invokes the provided visitor for each valid flight descriptor this ticket resolver - * exposes via flight. + * This invokes the provided visitor for each valid flight descriptor this ticket resolver exposes via flight. * - * @param session optional session that the resolver can use to filter which flights a visitor - * sees + * @param session optional session that the resolver can use to filter which flights a visitor sees * @param visitor the callback to invoke per descriptor path */ void forAllFlightInfo(@Nullable SessionState session, Consumer visitor); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/session/TicketRouter.java b/grpc-api/src/main/java/io/deephaven/grpc_api/session/TicketRouter.java index f99cc43fa4f..f6298c52d82 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/session/TicketRouter.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/session/TicketRouter.java @@ -25,9 +25,9 @@ @Singleton public class TicketRouter { private final KeyedIntObjectHashMap byteResolverMap = - new KeyedIntObjectHashMap<>(RESOLVER_OBJECT_TICKET_ID); + new KeyedIntObjectHashMap<>(RESOLVER_OBJECT_TICKET_ID); private final KeyedObjectHashMap descriptorResolverMap = - new KeyedObjectHashMap<>(RESOLVER_OBJECT_DESCRIPTOR_ID); + new KeyedObjectHashMap<>(RESOLVER_OBJECT_DESCRIPTOR_ID); @Inject public TicketRouter(final Set resolvers) { @@ -46,8 +46,8 @@ public TicketRouter(final Set resolvers) { * @return an export object; see {@link SessionState} for lifecycle propagation details */ public SessionState.ExportObject resolve( - @Nullable final SessionState session, - final ByteBuffer ticket) { + @Nullable final SessionState session, + final ByteBuffer ticket) { return getResolver(ticket.get(ticket.position())).resolve(session, ticket); } @@ -60,8 +60,8 @@ public SessionState.ExportObject resolve( * @return an export object; see {@link SessionState} for lifecycle propagation details */ public SessionState.ExportObject resolve( - @Nullable final SessionState session, - final Flight.Ticket ticket) { + @Nullable final SessionState session, + final Flight.Ticket ticket) { return resolve(session, ticket.getTicket().asReadOnlyByteBuffer()); } @@ -74,8 +74,8 @@ public SessionState.ExportObject resolve( * @return an export object; see {@link SessionState} for lifecycle propagation details */ public SessionState.ExportObject resolve( - @Nullable final SessionState session, - final Ticket ticket) { + @Nullable final SessionState session, + final Ticket ticket) { return resolve(session, ticket.getTicket().asReadOnlyByteBuffer()); } @@ -89,8 +89,8 @@ public SessionState.ExportObject resolve( */ // TODO #412 use this or remove it? public SessionState.ExportObject resolve( - @Nullable final SessionState session, - final Flight.FlightDescriptor descriptor) { + @Nullable final SessionState session, + final Flight.FlightDescriptor descriptor) { return getResolver(descriptor).resolve(session, descriptor); } @@ -105,8 +105,8 @@ public SessionState.ExportObject resolve( * @return an export object; see {@link SessionState} for lifecycle propagation details */ public SessionState.ExportBuilder publish( - final SessionState session, - final ByteBuffer ticket) { + final SessionState session, + final ByteBuffer ticket) { return getResolver(ticket.get(ticket.position())).publish(session, ticket); } @@ -122,8 +122,8 @@ public SessionState.ExportBuilder publish( */ // TODO #412 use this or remove it public SessionState.ExportBuilder publish( - final SessionState session, - final Flight.Ticket ticket) { + final SessionState session, + final Flight.Ticket ticket) { return publish(session, ticket.getTicket().asReadOnlyByteBuffer()); } @@ -138,23 +138,23 @@ public SessionState.ExportBuilder publish( * @return an export object; see {@link SessionState} for lifecycle propagation details */ public SessionState.ExportBuilder publish( - final SessionState session, - final Flight.FlightDescriptor descriptor) { + final SessionState session, + final Flight.FlightDescriptor descriptor) { return getResolver(descriptor).publish(session, descriptor); } /** * Resolve a flight descriptor and retrieve flight info for the flight. * - * @param session the user session context; ticket resolvers may expose flights that do not - * require a session (such as via DoGet) + * @param session the user session context; ticket resolvers may expose flights that do not require a session (such + * as via DoGet) * @param descriptor the flight descriptor - * @return an export object that will resolve to the flight descriptor; see {@link SessionState} - * for lifecycle propagation details + * @return an export object that will resolve to the flight descriptor; see {@link SessionState} for lifecycle + * propagation details */ public SessionState.ExportObject flightInfoFor( - @Nullable final SessionState session, - final Flight.FlightDescriptor descriptor) { + @Nullable final SessionState session, + final Flight.FlightDescriptor descriptor) { return getResolver(descriptor).flightInfoFor(session, descriptor); } @@ -180,38 +180,34 @@ public String getLogNameFor(final ByteBuffer ticket) { } /** - * This invokes the provided visitor for each valid flight descriptor this ticket resolver - * exposes via flight. + * This invokes the provided visitor for each valid flight descriptor this ticket resolver exposes via flight. * - * @param session optional session that the resolver can use to filter which flights a visitor - * sees + * @param session optional session that the resolver can use to filter which flights a visitor sees * @param visitor the callback to invoke per descriptor path */ - public void visitFlightInfo(final @Nullable SessionState session, - final Consumer visitor) { - byteResolverMap.iterator() - .forEachRemaining(resolver -> resolver.forAllFlightInfo(session, visitor)); + public void visitFlightInfo(final @Nullable SessionState session, final Consumer visitor) { + byteResolverMap.iterator().forEachRemaining(resolver -> resolver.forAllFlightInfo(session, visitor)); } public static Flight.FlightInfo getFlightInfo(final Table table, - final Flight.FlightDescriptor descriptor, - final Flight.Ticket ticket) { + final Flight.FlightDescriptor descriptor, + final Flight.Ticket ticket) { return Flight.FlightInfo.newBuilder() - .setSchema(BarrageSchemaUtil.schemaBytesFromTable(table)) - .setFlightDescriptor(descriptor) - .addEndpoint(Flight.FlightEndpoint.newBuilder() - .setTicket(ticket) - .build()) - .setTotalRecords(table.isLive() ? -1 : table.size()) - .setTotalBytes(-1) - .build(); + .setSchema(BarrageSchemaUtil.schemaBytesFromTable(table)) + .setFlightDescriptor(descriptor) + .addEndpoint(Flight.FlightEndpoint.newBuilder() + .setTicket(ticket) + .build()) + .setTotalRecords(table.isLive() ? -1 : table.size()) + .setTotalBytes(-1) + .build(); } private TicketResolver getResolver(final byte route) { final TicketResolver resolver = byteResolverMap.get(route); if (resolver == null) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot find resolver for route '" + route + "' (byte)"); + "Cannot find resolver for route '" + route + "' (byte)"); } return resolver; } @@ -219,36 +215,36 @@ private TicketResolver getResolver(final byte route) { private TicketResolver getResolver(final Flight.FlightDescriptor descriptor) { if (descriptor.getType() != Flight.FlightDescriptor.DescriptorType.PATH) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot find resolver: flight descriptor is not a path"); + "Cannot find resolver: flight descriptor is not a path"); } if (descriptor.getPathCount() <= 0) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot find resolver: flight descriptor does not have route path"); + "Cannot find resolver: flight descriptor does not have route path"); } final String route = descriptor.getPath(0); final TicketResolver resolver = descriptorResolverMap.get(route); if (resolver == null) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot find resolver for route '" + route + "'"); + "Cannot find resolver for route '" + route + "'"); } return resolver; } private static final KeyedIntObjectKey RESOLVER_OBJECT_TICKET_ID = - new KeyedIntObjectKey.BasicStrict() { - @Override - public int getIntKey(final TicketResolver ticketResolver) { - return ticketResolver.ticketRoute(); - } - }; + new KeyedIntObjectKey.BasicStrict() { + @Override + public int getIntKey(final TicketResolver ticketResolver) { + return ticketResolver.ticketRoute(); + } + }; private static final KeyedObjectKey RESOLVER_OBJECT_DESCRIPTOR_ID = - new KeyedObjectKey.Basic() { - @Override - public String getKey(TicketResolver ticketResolver) { - return ticketResolver.flightDescriptorRoute(); - } - }; + new KeyedObjectKey.Basic() { + @Override + public String getKey(TicketResolver ticketResolver) { + return ticketResolver.flightDescriptorRoute(); + } + }; } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ExportedTableUpdateListener.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ExportedTableUpdateListener.java index 2185a885409..d5df302d63f 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ExportedTableUpdateListener.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ExportedTableUpdateListener.java @@ -32,10 +32,9 @@ /** * Manage the lifecycle of exports that are Tables. * - * Initially we receive a refresh of exports from the session state. This allows us to timely notify - * the observer of existing table sizes for both static tables and tables that won't tick - * frequently. When the refresh is complete we are sent a notification for exportId == 0 (which is - * otherwise an invalid export id). + * Initially we receive a refresh of exports from the session state. This allows us to timely notify the observer of + * existing table sizes for both static tables and tables that won't tick frequently. When the refresh is complete we + * are sent a notification for exportId == 0 (which is otherwise an invalid export id). */ public class ExportedTableUpdateListener implements StreamObserver { @@ -45,17 +44,15 @@ public class ExportedTableUpdateListener implements StreamObserver responseObserver; - private final KeyedLongObjectHashMap updateListenerMap = - new KeyedLongObjectHashMap<>(EXPORT_KEY); + private final KeyedLongObjectHashMap updateListenerMap = new KeyedLongObjectHashMap<>(EXPORT_KEY); private volatile boolean isDestroyed = false; public ExportedTableUpdateListener( - final SessionState session, - final StreamObserver responseObserver) { + final SessionState session, + final StreamObserver responseObserver) { this.session = session; - this.logPrefix = "ExportedTableUpdateListener(" - + Integer.toHexString(System.identityHashCode(this)) + ") "; + this.logPrefix = "ExportedTableUpdateListener(" + Integer.toHexString(System.identityHashCode(this)) + ") "; this.responseObserver = responseObserver; } @@ -115,15 +112,14 @@ public synchronized void onCompleted() { } /** - * Initialize the listener for a newly exported table. This method is synchronized to prevent a - * race from the table ticking before we append the initial refresh msg. + * Initialize the listener for a newly exported table. This method is synchronized to prevent a race from the table + * ticking before we append the initial refresh msg. * * @param ticket of the table being exported * @param exportId the export id of the table being exported * @param table the table that was just exported */ - private synchronized void onNewTableExport(final Ticket ticket, final int exportId, - final BaseTable table) { + private synchronized void onNewTableExport(final Ticket ticket, final int exportId, final BaseTable table) { if (!table.isLive()) { sendUpdateMessage(ticket, table.size(), null); return; @@ -151,22 +147,20 @@ private synchronized void onNewTableExport(final Ticket ticket, final int export } /** - * Append an update message to the batch being built this cycle. If this is the first update on - * this LTM cycle then this also adds the terminal notification to flush the outstanding - * updates. + * Append an update message to the batch being built this cycle. If this is the first update on this LTM cycle then + * this also adds the terminal notification to flush the outstanding updates. * * @param ticket ticket of the table that has updated * @param size the current size of the table * @param error any propagated error of the table */ - private synchronized void sendUpdateMessage(final Ticket ticket, final long size, - final Throwable error) { + private synchronized void sendUpdateMessage(final Ticket ticket, final long size, final Throwable error) { if (isDestroyed) { return; } final ExportedTableUpdateMessage.Builder update = ExportedTableUpdateMessage.newBuilder() - .setExportId(ticket).setSize(size); + .setExportId(ticket).setSize(size); if (error != null) { update.setUpdateFailureMessage(GrpcUtil.securelyWrapError(log, error).getMessage()); @@ -175,8 +169,7 @@ private synchronized void sendUpdateMessage(final Ticket ticket, final long size try { responseObserver.onNext(update.build()); } catch (final RuntimeException err) { - log.error().append(logPrefix).append("failed to notify listener of state change: ") - .append(err).endl(); + log.error().append(logPrefix).append("failed to notify listener of state change: ").append(err).endl(); session.removeExportListener(this); } } @@ -191,8 +184,7 @@ private class ListenerImpl extends InstrumentedShiftAwareListener { @ReferentialIntegrity final ShiftAwareSwapListener swapListener; - private ListenerImpl(final BaseTable table, final int exportId, - final ShiftAwareSwapListener swapListener) { + private ListenerImpl(final BaseTable table, final int exportId, final ShiftAwareSwapListener swapListener) { super("ExportedTableUpdateListener (" + exportId + ")"); this.table = table; this.exportId = exportId; @@ -206,21 +198,19 @@ public void onUpdate(final Update upstream) { } @Override - public void onFailureInternal(final Throwable error, - final UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(final Throwable error, final UpdatePerformanceTracker.Entry sourceEntry) { sendUpdateMessage(ExportTicketHelper.exportIdToTicket(exportId), table.size(), error); } } private static final KeyedLongObjectKey EXPORT_KEY = - new KeyedLongObjectKey.BasicStrict() { - @Override - public long getLongKey(@NotNull final ListenerImpl listener) { - return listener.exportId; - } - }; + new KeyedLongObjectKey.BasicStrict() { + @Override + public long getLongKey(@NotNull final ListenerImpl listener) { + return listener.exportId; + } + }; - private static final NotificationStepReceiver NOOP_NOTIFICATION_STEP_RECEIVER = - lastNotificationStep -> { - }; + private static final NotificationStepReceiver NOOP_NOTIFICATION_STEP_RECEIVER = lastNotificationStep -> { + }; } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/TableServiceGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/TableServiceGrpcImpl.java index e66822e672e..e97fa9b582b 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/TableServiceGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/TableServiceGrpcImpl.java @@ -66,8 +66,8 @@ public class TableServiceGrpcImpl extends TableServiceGrpc.TableServiceImplBase @Inject public TableServiceGrpcImpl(final TicketRouter ticketRouter, - final SessionService sessionService, - final Map> operationMap) { + final SessionService sessionService, + final Map> operationMap) { this.ticketRouter = ticketRouter; this.sessionService = sessionService; this.operationMap = operationMap; @@ -78,207 +78,184 @@ private GrpcTableOperation getOp(final BatchTableRequest.Operation.OpCase final GrpcTableOperation operation = (GrpcTableOperation) operationMap.get(op); if (operation == null) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "BatchTableRequest.Operation.OpCode is unset, incompatible, or not yet supported. (found: " - + op + ")"); + "BatchTableRequest.Operation.OpCode is unset, incompatible, or not yet supported. (found: " + op + + ")"); } return operation; } @Override public void emptyTable(final EmptyTableRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.EMPTY_TABLE, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.EMPTY_TABLE, request, responseObserver); } @Override public void timeTable(final TimeTableRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.TIME_TABLE, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.TIME_TABLE, request, responseObserver); } @Override public void mergeTables(final MergeTablesRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.MERGE, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.MERGE, request, responseObserver); } @Override public void selectDistinct(final SelectDistinctRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.SELECT_DISTINCT, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.SELECT_DISTINCT, request, responseObserver); } @Override public void update(final SelectOrUpdateRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.UPDATE, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.UPDATE, request, responseObserver); } @Override public void lazyUpdate(final SelectOrUpdateRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.LAZY_UPDATE, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.LAZY_UPDATE, request, responseObserver); } @Override public void view(final SelectOrUpdateRequest request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.VIEW, request, responseObserver); } @Override public void updateView(final SelectOrUpdateRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.UPDATE_VIEW, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.UPDATE_VIEW, request, responseObserver); } @Override public void select(final SelectOrUpdateRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.SELECT, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.SELECT, request, responseObserver); } @Override public void headBy(final HeadOrTailByRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.HEAD_BY, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.HEAD_BY, request, responseObserver); } @Override public void tailBy(final HeadOrTailByRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.TAIL_BY, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.TAIL_BY, request, responseObserver); } @Override public void head(final HeadOrTailRequest request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.HEAD, request, responseObserver); } @Override public void tail(final HeadOrTailRequest request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.TAIL, request, responseObserver); } @Override public void ungroup(final UngroupRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.UNGROUP, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.UNGROUP, request, responseObserver); } @Override public void comboAggregate(final ComboAggregateRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.COMBO_AGGREGATE, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.COMBO_AGGREGATE, request, responseObserver); } @Override public void snapshot(final SnapshotTableRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.SNAPSHOT, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.SNAPSHOT, request, responseObserver); } @Override public void dropColumns(final DropColumnsRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.DROP_COLUMNS, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.DROP_COLUMNS, request, responseObserver); } @Override public void filter(final FilterTableRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.FILTER, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.FILTER, request, responseObserver); } @Override public void unstructuredFilter(final UnstructuredFilterTableRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.UNSTRUCTURED_FILTER, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.UNSTRUCTURED_FILTER, request, responseObserver); } @Override public void sort(final SortTableRequest request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.SORT, request, responseObserver); } @Override public void flatten(final FlattenRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.FLATTEN, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.FLATTEN, request, responseObserver); } @Override public void crossJoinTables(final CrossJoinTablesRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.CROSS_JOIN, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.CROSS_JOIN, request, responseObserver); } @Override public void naturalJoinTables(final NaturalJoinTablesRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.NATURAL_JOIN, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.NATURAL_JOIN, request, responseObserver); } @Override public void exactJoinTables(final ExactJoinTablesRequest request, - final StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.EXACT_JOIN, request, - responseObserver); + final StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.EXACT_JOIN, request, responseObserver); } @Override public void leftJoinTables(LeftJoinTablesRequest request, - StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.LEFT_JOIN, request, - responseObserver); + StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.LEFT_JOIN, request, responseObserver); } @Override public void asOfJoinTables(AsOfJoinTablesRequest request, - StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.AS_OF_JOIN, request, - responseObserver); + StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.AS_OF_JOIN, request, responseObserver); } @Override public void runChartDownsample(RunChartDownsampleRequest request, - StreamObserver responseObserver) { - oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.RUN_CHART_DOWNSAMPLE, request, - responseObserver); + StreamObserver responseObserver) { + oneShotOperationWrapper(BatchTableRequest.Operation.OpCase.RUN_CHART_DOWNSAMPLE, request, responseObserver); } @Override public void batch(final BatchTableRequest request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); // step 1: initialize exports final List exportBuilders = request.getOpsList().stream() - .map(op -> new BatchExportBuilder(session, op)) - .collect(Collectors.toList()); + .map(op -> new BatchExportBuilder(session, op)) + .collect(Collectors.toList()); // step 2: resolve dependencies final Function> resolver = ref -> { @@ -294,18 +271,16 @@ public void batch(final BatchTableRequest request, final int offset = ref.getBatchOffset(); if (offset < 0 || offset >= exportBuilders.size()) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "invalid table reference: " + ref); + "invalid table reference: " + ref); } return exportBuilders.get(offset).exportBuilder.getExport(); default: - throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "invalid table reference: " + ref); + throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "invalid table reference: " + ref); } }; exportBuilders.forEach(export -> export.resolveDependencies(resolver)); - // step 3: check for cyclical dependencies; this is our only opportunity to check - // non-export cycles + // step 3: check for cyclical dependencies; this is our only opportunity to check non-export cycles // TODO: check for cycles // step 4: submit the batched operations @@ -319,17 +294,17 @@ public void batch(final BatchTableRequest request, if (exportId == SessionState.NON_EXPORT_ID) { resultId = TableReference.newBuilder().setBatchOffset(i).build(); } else { - resultId = TableReference.newBuilder() - .setTicket(ExportTicketHelper.exportIdToTicket(exportId)).build(); + resultId = TableReference.newBuilder().setTicket(ExportTicketHelper.exportIdToTicket(exportId)) + .build(); } exportBuilder.exportBuilder.onError((result, errorContext, dependentId) -> { safelyExecuteLocked(responseObserver, - () -> responseObserver.onNext(ExportedTableCreationResponse.newBuilder() - .setResultId(resultId) - .setSuccess(false) - .setErrorInfo(errorContext + " dependency: " + dependentId) - .build())); + () -> responseObserver.onNext(ExportedTableCreationResponse.newBuilder() + .setResultId(resultId) + .setSuccess(false) + .setErrorInfo(errorContext + " dependency: " + dependentId) + .build())); if (remaining.decrementAndGet() == 0) { safelyExecuteLocked(responseObserver, responseObserver::onCompleted); @@ -338,7 +313,7 @@ public void batch(final BatchTableRequest request, final Table table = exportBuilder.doExport(); safelyExecuteLocked(responseObserver, - () -> responseObserver.onNext(buildTableCreationResponse(resultId, table))); + () -> responseObserver.onNext(buildTableCreationResponse(resultId, table))); if (remaining.decrementAndGet() == 0) { safelyExecuteLocked(responseObserver, responseObserver::onCompleted); } @@ -351,67 +326,62 @@ public void batch(final BatchTableRequest request, @Override public void exportedTableUpdates(final ExportedTableUpdatesRequest request, - final StreamObserver responseObserver) { + final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); - final ExportedTableUpdateListener listener = - new ExportedTableUpdateListener(session, responseObserver); + final ExportedTableUpdateListener listener = new ExportedTableUpdateListener(session, responseObserver); session.addExportListener(listener); - ((ServerCallStreamObserver) responseObserver) - .setOnCancelHandler(() -> { - session.removeExportListener(listener); - }); + ((ServerCallStreamObserver) responseObserver).setOnCancelHandler(() -> { + session.removeExportListener(listener); + }); }); } - public static ExportedTableCreationResponse buildTableCreationResponse( - final TableReference tableRef, final Table table) { + public static ExportedTableCreationResponse buildTableCreationResponse(final TableReference tableRef, + final Table table) { return ExportedTableCreationResponse.newBuilder() - .setSuccess(true) - .setResultId(tableRef) - .setIsStatic(!table.isLive()) - .setSize(table.size()) - .setSchemaHeader(BarrageSchemaUtil.schemaBytesFromTable(table)) - .build(); + .setSuccess(true) + .setResultId(tableRef) + .setIsStatic(!table.isLive()) + .setSize(table.size()) + .setSchemaHeader(BarrageSchemaUtil.schemaBytesFromTable(table)) + .build(); } /** - * This helper is a wrapper that enables one-shot RPCs to utilize the same code paths that a - * batch RPC utilizes. + * This helper is a wrapper that enables one-shot RPCs to utilize the same code paths that a batch RPC utilizes. * * @param op the protobuf op-code for the batch operation request * @param request the protobuf that is mapped to this op-code * @param responseObserver the observer that needs to know the result of this rpc * @param the protobuf type that configures the behavior of the operation */ - private void oneShotOperationWrapper(final BatchTableRequest.Operation.OpCase op, - final T request, final StreamObserver responseObserver) { + private void oneShotOperationWrapper(final BatchTableRequest.Operation.OpCase op, final T request, + final StreamObserver responseObserver) { GrpcUtil.rpcWrapper(log, responseObserver, () -> { final SessionState session = sessionService.getCurrentSession(); final GrpcTableOperation operation = getOp(op); operation.validateRequest(request); final Ticket resultId = operation.getResultTicket(request); - final TableReference resultRef = - TableReference.newBuilder().setTicket(resultId).build(); + final TableReference resultRef = TableReference.newBuilder().setTicket(resultId).build(); - final List> dependencies = - operation.getTableReferences(request).stream() + final List> dependencies = operation.getTableReferences(request).stream() .map(TableReference::getTicket) .map((ticket) -> ticketRouter.

    resolve(session, ticket)) .collect(Collectors.toList()); session.newExport(resultId) - .require(dependencies) - .onError(responseObserver::onError) - .submit(() -> { - final Table result = operation.create(request, dependencies); - safelyExecute(() -> { - responseObserver.onNext(buildTableCreationResponse(resultRef, result)); - responseObserver.onCompleted(); + .require(dependencies) + .onError(responseObserver::onError) + .submit(() -> { + final Table result = operation.create(request, dependencies); + safelyExecute(() -> { + responseObserver.onNext(buildTableCreationResponse(resultRef, result)); + responseObserver.onCompleted(); + }); + return result; }); - return result; - }); }); } @@ -426,15 +396,13 @@ private class BatchExportBuilder { operation = getOp(op.getOpCase()); // get operation from op code request = operation.getRequestFromOperation(op); final Ticket resultId = operation.getResultTicket(request); - exportBuilder = resultId.getTicket().size() == 0 ? session.nonExport() - : session.newExport(resultId); + exportBuilder = resultId.getTicket().size() == 0 ? session.nonExport() : session.newExport(resultId); } - void resolveDependencies( - final Function> resolveReference) { + void resolveDependencies(final Function> resolveReference) { dependencies = operation.getTableReferences(request).stream() - .map(resolveReference) - .collect(Collectors.toList()); + .map(resolveReference) + .collect(Collectors.toList()); exportBuilder.require(dependencies); } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/ComboAggregateGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/ComboAggregateGrpcImpl.java index a10d1bc8797..d7cf933f57c 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/ComboAggregateGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/ComboAggregateGrpcImpl.java @@ -26,26 +26,24 @@ public class ComboAggregateGrpcImpl extends GrpcTableOperation> sourceTables) { + final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table parent = sourceTables.get(0).get(); - final String[] groupBySpecs = - request.getGroupByColumnsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] groupBySpecs = request.getGroupByColumnsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); final SelectColumn[] groupByColumns = SelectColumnFactory.getExpressions(groupBySpecs); ColumnExpressionValidator.validateColumnExpressions(groupByColumns, groupBySpecs, parent); final Table result; if (!request.getForceCombo() && request.getAggregatesCount() == 1 - && request.getAggregates(0).getType() != ComboAggregateRequest.AggType.PERCENTILE - && request.getAggregates(0).getMatchPairsCount() == 0) { - // This is a special case with a special operator that can be invoked right off of the - // table api. + && request.getAggregates(0).getType() != ComboAggregateRequest.AggType.PERCENTILE + && request.getAggregates(0).getMatchPairsCount() == 0) { + // This is a special case with a special operator that can be invoked right off of the table api. result = singleAggregateHelper(parent, groupByColumns, request.getAggregates(0)); } else { result = comboAggregateHelper(parent, groupByColumns, request.getAggregatesList()); @@ -53,8 +51,8 @@ public Table create(final ComboAggregateRequest request, return result; } - private static Table singleAggregateHelper(final Table parent, - final SelectColumn[] groupByColumns, final ComboAggregateRequest.Aggregate aggregate) { + private static Table singleAggregateHelper(final Table parent, final SelectColumn[] groupByColumns, + final ComboAggregateRequest.Aggregate aggregate) { switch (aggregate.getType()) { case SUM: return parent.sumBy(groupByColumns); @@ -83,19 +81,17 @@ private static Table singleAggregateHelper(final Table parent, case WEIGHTED_AVG: return parent.wavgBy(aggregate.getColumnName(), groupByColumns); default: - throw new UnsupportedOperationException( - "Unsupported aggregate: " + aggregate.getType()); + throw new UnsupportedOperationException("Unsupported aggregate: " + aggregate.getType()); } } - private static Table comboAggregateHelper(final Table parent, - final SelectColumn[] groupByColumns, - final List aggregates) { + private static Table comboAggregateHelper(final Table parent, final SelectColumn[] groupByColumns, + final List aggregates) { final Set groupByColumnSet = - Arrays.stream(groupByColumns).map(SelectColumn::getName).collect(Collectors.toSet()); + Arrays.stream(groupByColumns).map(SelectColumn::getName).collect(Collectors.toSet()); final ComboAggregateFactory.ComboBy[] comboBy = - new ComboAggregateFactory.ComboBy[aggregates.size()]; + new ComboAggregateFactory.ComboBy[aggregates.size()]; for (int i = 0; i < aggregates.size(); i++) { final ComboAggregateRequest.Aggregate agg = aggregates.get(i); @@ -104,18 +100,15 @@ private static Table comboAggregateHelper(final Table parent, if (agg.getMatchPairsCount() == 0) { // if not specified, we apply the aggregate to all columns not "otherwise involved" matchPairs = Arrays.stream(parent.getColumns()) - .map(DataColumn::getName) - .filter(n -> !(groupByColumnSet.contains(n) - || (agg.getType() == ComboAggregateRequest.AggType.WEIGHTED_AVG - && agg.getColumnName().equals(n)))) - .toArray(String[]::new); + .map(DataColumn::getName) + .filter(n -> !(groupByColumnSet.contains(n) + || (agg.getType() == ComboAggregateRequest.AggType.WEIGHTED_AVG + && agg.getColumnName().equals(n)))) + .toArray(String[]::new); } else { - matchPairs = - agg.getMatchPairsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - final SelectColumn[] matchPairExpressions = - SelectColumnFactory.getExpressions(matchPairs); - ColumnExpressionValidator.validateColumnExpressions(matchPairExpressions, - matchPairs, parent); + matchPairs = agg.getMatchPairsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final SelectColumn[] matchPairExpressions = SelectColumnFactory.getExpressions(matchPairs); + ColumnExpressionValidator.validateColumnExpressions(matchPairExpressions, matchPairs, parent); } final Supplier comboMapper = () -> { @@ -141,8 +134,7 @@ private static Table comboAggregateHelper(final Table parent, case MEDIAN: return ComboAggregateFactory.AggMed(matchPairs); case PERCENTILE: - return ComboAggregateFactory.AggPct(agg.getPercentile(), agg.getAvgMedian(), - matchPairs); + return ComboAggregateFactory.AggPct(agg.getPercentile(), agg.getAvgMedian(), matchPairs); case STD: return ComboAggregateFactory.AggStd(matchPairs); case VAR: @@ -150,8 +142,7 @@ private static Table comboAggregateHelper(final Table parent, case WEIGHTED_AVG: return ComboAggregateFactory.AggWAvg(agg.getColumnName(), matchPairs); default: - throw new UnsupportedOperationException( - "Unsupported aggregate: " + agg.getType()); + throw new UnsupportedOperationException("Unsupported aggregate: " + agg.getType()); } }; diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/DropColumnsGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/DropColumnsGrpcImpl.java index d9a5c2bc158..a0f6781564a 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/DropColumnsGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/DropColumnsGrpcImpl.java @@ -16,12 +16,11 @@ public class DropColumnsGrpcImpl extends GrpcTableOperation @Inject public DropColumnsGrpcImpl() { super(BatchTableRequest.Operation::getDropColumns, DropColumnsRequest::getResultId, - DropColumnsRequest::getSourceId); + DropColumnsRequest::getSourceId); } @Override - public Table create(final DropColumnsRequest request, - final List> sourceTables) { + public Table create(final DropColumnsRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); return sourceTables.get(0).get().dropColumns(request.getColumnNamesList()); } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/EmptyTableGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/EmptyTableGrpcImpl.java index c633361bd0f..b1d531c5ff2 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/EmptyTableGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/EmptyTableGrpcImpl.java @@ -34,14 +34,12 @@ public EmptyTableGrpcImpl() { @Override public void validateRequest(final EmptyTableRequest request) throws StatusRuntimeException { if (request.getSize() < 0) { - throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Size must be greater than zero"); + throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "Size must be greater than zero"); } } @Override - public Table create(final EmptyTableRequest request, - final List> sourceTables) { + public Table create(final EmptyTableRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 0); return TableTools.emptyTable(request.getSize()); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/FilterTableGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/FilterTableGrpcImpl.java index 80f1c483f7f..3d651c522ef 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/FilterTableGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/FilterTableGrpcImpl.java @@ -18,13 +18,11 @@ public class FilterTableGrpcImpl extends GrpcTableOperation @Inject public FilterTableGrpcImpl() { - super(BatchTableRequest.Operation::getFilter, FilterTableRequest::getResultId, - FilterTableRequest::getSourceId); + super(BatchTableRequest.Operation::getFilter, FilterTableRequest::getResultId, FilterTableRequest::getSourceId); } @Override - public Table create(final FilterTableRequest request, - final List> sourceTables) { + public Table create(final FilterTableRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); Table sourceTable = sourceTables.get(0).get(); @@ -37,10 +35,10 @@ public Table create(final FilterTableRequest request, filter = filters.get(0); } else { filter = Condition.newBuilder() - .setAnd(AndCondition.newBuilder() - .addAllFilters(filters) - .build()) - .build(); + .setAnd(AndCondition.newBuilder() + .addAllFilters(filters) + .build()) + .build(); } // make type info available @@ -52,28 +50,22 @@ public Table create(final FilterTableRequest request, // rewrite unnecessary NOT expressions away filter = NormalizeNots.exec(filter); - // if a "in" expression has a non-reference on the left or reference on the right, flip it, - // and split - // up values so these can be left as INs or remade into EQs, and join them together with - // OR/ANDs. + // if a "in" expression has a non-reference on the left or reference on the right, flip it, and split + // up values so these can be left as INs or remade into EQs, and join them together with OR/ANDs. filter = FlipNonReferenceMatchExpression.exec(filter); // merge ANDs nested in ANDs and ORs nested in ORs for a simpler structure filter = MergeNestedBinaryOperations.exec(filter); - // for any "in" expression (at this point, all have a reference on the left), if they have a - // reference + // for any "in" expression (at this point, all have a reference on the left), if they have a reference // value on the left it must be split into its own "equals" instead. filter = ConvertInvalidInExpressions.exec(filter); - // replace any EQ-type expression with its corresponding IN-type expression. this preserves - // the changes - // made above, could be moved earlier in this list, but must come before "in"/"not in"s are - // merged + // replace any EQ-type expression with its corresponding IN-type expression. this preserves the changes + // made above, could be moved earlier in this list, but must come before "in"/"not in"s are merged // TODO (deephaven-core#733) - // within each OR/AND, find any comparable "in"/"not in" expression referring to the same - // column + // within each OR/AND, find any comparable "in"/"not in" expression referring to the same column // on the left side and merge them into one match // TODO (deephaven-core#733) @@ -86,8 +78,8 @@ public Table create(final FilterTableRequest request, List finishedConditions = Collections.singletonList(filter); // build SelectFilter[] to pass to the table - SelectFilter[] selectFilters = finishedConditions.stream() - .map(f -> FilterFactory.makeFilter(sourceTable, f)).toArray(SelectFilter[]::new); + SelectFilter[] selectFilters = finishedConditions.stream().map(f -> FilterFactory.makeFilter(sourceTable, f)) + .toArray(SelectFilter[]::new); // execute the filters return sourceTable.where(selectFilters); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/FlattenTableGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/FlattenTableGrpcImpl.java index f42f2bd3ed2..c929ac7e56f 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/FlattenTableGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/FlattenTableGrpcImpl.java @@ -13,13 +13,11 @@ public class FlattenTableGrpcImpl extends GrpcTableOperation { @Inject public FlattenTableGrpcImpl() { - super(BatchTableRequest.Operation::getFlatten, FlattenRequest::getResultId, - FlattenRequest::getSourceId); + super(BatchTableRequest.Operation::getFlatten, FlattenRequest::getResultId, FlattenRequest::getSourceId); } @Override - public Table create(FlattenRequest request, - List> sourceTables) { + public Table create(FlattenRequest request, List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table parent = sourceTables.get(0).get(); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/GrpcTableOperation.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/GrpcTableOperation.java index 37d880ce07a..b751d947dfe 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/GrpcTableOperation.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/GrpcTableOperation.java @@ -26,13 +26,12 @@ protected interface MultiDependencyFunction { * * @param getRequest a functor to extract the request from a BatchTableRequest.Operation * @param getTicket a function to extract the result ticket from the request - * @param getDependencies a function to extract the table-reference dependencies from the - * request + * @param getDependencies a function to extract the table-reference dependencies from the request */ protected GrpcTableOperation( - final Function getRequest, - final Function getTicket, - final MultiDependencyFunction getDependencies) { + final Function getRequest, + final Function getTicket, + final MultiDependencyFunction getDependencies) { this.getRequest = getRequest; this.getTicket = getTicket; this.getDependencies = getDependencies; @@ -46,9 +45,9 @@ protected GrpcTableOperation( * @param getDependency a function to extract the table-reference dependency from the request */ protected GrpcTableOperation( - final Function getRequest, - final Function getTicket, - final Function getDependency) { + final Function getRequest, + final Function getTicket, + final Function getDependency) { this.getRequest = getRequest; this.getTicket = getTicket; this.getDependencies = (request) -> Collections.singletonList(getDependency.apply(request)); @@ -61,8 +60,8 @@ protected GrpcTableOperation( * @param getTicket a function to extract the result ticket from the request */ protected GrpcTableOperation( - final Function getRequest, - final Function getTicket) { + final Function getRequest, + final Function getTicket) { this.getRequest = getRequest; this.getTicket = getTicket; this.getDependencies = (request) -> Collections.emptyList(); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/HeadOrTailByGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/HeadOrTailByGrpcImpl.java index b2596ad7f5a..5c68317711b 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/HeadOrTailByGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/HeadOrTailByGrpcImpl.java @@ -29,9 +29,9 @@ protected interface RealTableOperation { private final LiveTableMonitor liveTableMonitor; protected HeadOrTailByGrpcImpl( - final Function getRequest, - final RealTableOperation realTableOperation, - final LiveTableMonitor liveTableMonitor) { + final Function getRequest, + final RealTableOperation realTableOperation, + final LiveTableMonitor liveTableMonitor) { super(getRequest, HeadOrTailByRequest::getResultId, HeadOrTailByRequest::getSourceId); this.realTableOperation = realTableOperation; this.liveTableMonitor = liveTableMonitor; @@ -41,30 +41,27 @@ protected HeadOrTailByGrpcImpl( public void validateRequest(final HeadOrTailByRequest request) throws StatusRuntimeException { final long nRows = request.getNumRows(); if (nRows < 0) { - throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "numRows must be >= 0 (found: " + nRows + ")"); + throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "numRows must be >= 0 (found: " + nRows + ")"); } } @Override - public Table create(final HeadOrTailByRequest request, - final List> sourceTables) { + public Table create(final HeadOrTailByRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table parent = sourceTables.get(0).get(); final String[] columnSpecs = - request.getGroupByColumnSpecsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + request.getGroupByColumnSpecsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); final SelectColumn[] expressions = SelectColumnFactory.getExpressions(columnSpecs); // note: we don't use the output from validateColumnExpressions because the headBy/tailBy - // overloads that take SelectColumn arrays throw UnsupportedOperationException, but we - // validate anyway + // overloads that take SelectColumn arrays throw UnsupportedOperationException, but we validate anyway ColumnExpressionValidator.validateColumnExpressions(expressions, columnSpecs, parent); // note that headBy/tailBy use ungroup which currently requires the LTM lock - return liveTableMonitor.sharedLock().computeLocked( - () -> realTableOperation.apply(parent, request.getNumRows(), columnSpecs)); + return liveTableMonitor.sharedLock() + .computeLocked(() -> realTableOperation.apply(parent, request.getNumRows(), columnSpecs)); } @Singleton diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/HeadOrTailGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/HeadOrTailGrpcImpl.java index 691fabbe521..fb428a420ca 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/HeadOrTailGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/HeadOrTailGrpcImpl.java @@ -23,8 +23,8 @@ protected interface RealTableOperation { private final RealTableOperation realTableOperation; protected HeadOrTailGrpcImpl( - final Function getRequest, - final RealTableOperation realTableOperation) { + final Function getRequest, + final RealTableOperation realTableOperation) { super(getRequest, HeadOrTailRequest::getResultId, HeadOrTailRequest::getSourceId); this.realTableOperation = realTableOperation; } @@ -33,14 +33,12 @@ protected HeadOrTailGrpcImpl( public void validateRequest(final HeadOrTailRequest request) throws StatusRuntimeException { final long nRows = request.getNumRows(); if (nRows < 0) { - throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "numRows must be >= 0 (found: " + nRows + ")"); + throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "numRows must be >= 0 (found: " + nRows + ")"); } } @Override - public Table create(final HeadOrTailRequest request, - final List> sourceTables) { + public Table create(final HeadOrTailRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); return realTableOperation.apply(sourceTables.get(0).get(), request.getNumRows()); } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/JoinTablesGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/JoinTablesGrpcImpl.java index 2c82f05fd1f..f527c488594 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/JoinTablesGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/JoinTablesGrpcImpl.java @@ -27,8 +27,7 @@ public abstract class JoinTablesGrpcImpl extends GrpcTableOperation { @FunctionalInterface protected interface RealTableOperation { - Table apply(Table lhs, Table rhs, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, - T request); + Table apply(Table lhs, Table rhs, MatchPair[] columnsToMatch, MatchPair[] columnsToAdd, T request); } private final Function> getColMatchList; @@ -37,12 +36,12 @@ Table apply(Table lhs, Table rhs, MatchPair[] columnsToMatch, MatchPair[] column private final RealTableOperation realTableOperation; protected JoinTablesGrpcImpl(final LiveTableMonitor liveTableMonitor, - final Function getRequest, - final Function getTicket, - final MultiDependencyFunction getDependencies, - final Function> getColMatchList, - final Function> getColAddList, - final RealTableOperation realTableOperation) { + final Function getRequest, + final Function getTicket, + final MultiDependencyFunction getDependencies, + final Function> getColMatchList, + final Function> getColAddList, + final RealTableOperation realTableOperation) { super(getRequest, getTicket, getDependencies); this.liveTableMonitor = liveTableMonitor; this.getColMatchList = getColMatchList; @@ -57,13 +56,12 @@ public void validateRequest(final T request) throws StatusRuntimeException { MatchPairFactory.getExpressions(getColAddList.apply(request)); } catch (final ExpressionException err) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - err.getMessage() + ": " + err.getProblemExpression()); + err.getMessage() + ": " + err.getProblemExpression()); } } @Override - public Table create(final T request, - final List> sourceTables) { + public Table create(final T request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 2); final MatchPair[] columnsToMatch; @@ -74,7 +72,7 @@ public Table create(final T request, columnsToAdd = MatchPairFactory.getExpressions(getColAddList.apply(request)); } catch (final ExpressionException err) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - err.getMessage() + ": " + err.getProblemExpression()); + err.getMessage() + ": " + err.getProblemExpression()); } final Table lhs = sourceTables.get(0).get(); @@ -85,7 +83,7 @@ public Table create(final T request, result = realTableOperation.apply(lhs, rhs, columnsToMatch, columnsToAdd, request); } else { result = liveTableMonitor.sharedLock().computeLocked( - () -> realTableOperation.apply(lhs, rhs, columnsToMatch, columnsToAdd, request)); + () -> realTableOperation.apply(lhs, rhs, columnsToMatch, columnsToAdd, request)); } return result; } @@ -94,33 +92,29 @@ public Table create(final T request, public static class AsOfJoinTablesGrpcImpl extends JoinTablesGrpcImpl { private static final MultiDependencyFunction EXTRACT_DEPS = - (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); + (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); @Inject protected AsOfJoinTablesGrpcImpl(LiveTableMonitor liveTableMonitor) { - super(liveTableMonitor, BatchTableRequest.Operation::getAsOfJoin, - AsOfJoinTablesRequest::getResultId, EXTRACT_DEPS, - AsOfJoinTablesRequest::getColumnsToMatchList, - AsOfJoinTablesRequest::getColumnsToAddList, - AsOfJoinTablesGrpcImpl::doJoin); + super(liveTableMonitor, BatchTableRequest.Operation::getAsOfJoin, AsOfJoinTablesRequest::getResultId, + EXTRACT_DEPS, + AsOfJoinTablesRequest::getColumnsToMatchList, AsOfJoinTablesRequest::getColumnsToAddList, + AsOfJoinTablesGrpcImpl::doJoin); } @Override - public void validateRequest(final AsOfJoinTablesRequest request) - throws StatusRuntimeException { + public void validateRequest(final AsOfJoinTablesRequest request) throws StatusRuntimeException { super.validateRequest(request); if (request.getAsOfMatchRule() == AsOfJoinTablesRequest.MatchRule.UNRECOGNIZED) { - throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Unrecognized as-of match rule"); + throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "Unrecognized as-of match rule"); } } public static Table doJoin(final Table lhs, final Table rhs, - final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, - final AsOfJoinTablesRequest request) { - Table.AsOfMatchRule matchRule = - Table.AsOfMatchRule.valueOf(request.getAsOfMatchRule().name()); + final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, + final AsOfJoinTablesRequest request) { + Table.AsOfMatchRule matchRule = Table.AsOfMatchRule.valueOf(request.getAsOfMatchRule().name()); switch (matchRule) { case LESS_THAN: case LESS_THAN_EQUAL: @@ -138,24 +132,22 @@ public static Table doJoin(final Table lhs, final Table rhs, public static class CrossJoinTablesGrpcImpl extends JoinTablesGrpcImpl { private static final MultiDependencyFunction EXTRACT_DEPS = - (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); + (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); @Inject public CrossJoinTablesGrpcImpl(final LiveTableMonitor liveTableMonitor) { - super(liveTableMonitor, BatchTableRequest.Operation::getCrossJoin, - CrossJoinTablesRequest::getResultId, EXTRACT_DEPS, - CrossJoinTablesRequest::getColumnsToMatchList, - CrossJoinTablesRequest::getColumnsToAddList, - CrossJoinTablesGrpcImpl::doJoin); + super(liveTableMonitor, BatchTableRequest.Operation::getCrossJoin, CrossJoinTablesRequest::getResultId, + EXTRACT_DEPS, + CrossJoinTablesRequest::getColumnsToMatchList, CrossJoinTablesRequest::getColumnsToAddList, + CrossJoinTablesGrpcImpl::doJoin); } public static Table doJoin(final Table lhs, final Table rhs, - final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, - final CrossJoinTablesRequest request) { + final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, + final CrossJoinTablesRequest request) { int reserveBits = request.getReserveBits(); if (reserveBits <= 0) { - return lhs.join(rhs, columnsToMatch, columnsToAdd); // use the default number of - // reserve_bits + return lhs.join(rhs, columnsToMatch, columnsToAdd); // use the default number of reserve_bits } else { return lhs.join(rhs, columnsToMatch, columnsToAdd, reserveBits); } @@ -166,20 +158,19 @@ public static Table doJoin(final Table lhs, final Table rhs, public static class ExactJoinTablesGrpcImpl extends JoinTablesGrpcImpl { private static final MultiDependencyFunction EXTRACT_DEPS = - (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); + (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); @Inject public ExactJoinTablesGrpcImpl(final LiveTableMonitor liveTableMonitor) { - super(liveTableMonitor, BatchTableRequest.Operation::getExactJoin, - ExactJoinTablesRequest::getResultId, EXTRACT_DEPS, - ExactJoinTablesRequest::getColumnsToMatchList, - ExactJoinTablesRequest::getColumnsToAddList, - ExactJoinTablesGrpcImpl::doJoin); + super(liveTableMonitor, BatchTableRequest.Operation::getExactJoin, ExactJoinTablesRequest::getResultId, + EXTRACT_DEPS, + ExactJoinTablesRequest::getColumnsToMatchList, ExactJoinTablesRequest::getColumnsToAddList, + ExactJoinTablesGrpcImpl::doJoin); } public static Table doJoin(final Table lhs, final Table rhs, - final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, - final ExactJoinTablesRequest request) { + final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, + final ExactJoinTablesRequest request) { return lhs.exactJoin(rhs, columnsToMatch, columnsToAdd); } } @@ -188,43 +179,40 @@ public static Table doJoin(final Table lhs, final Table rhs, public static class LeftJoinTablesGrpcImpl extends JoinTablesGrpcImpl { private static final MultiDependencyFunction EXTRACT_DEPS = - (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); + (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); @Inject public LeftJoinTablesGrpcImpl(final LiveTableMonitor liveTableMonitor) { - super(liveTableMonitor, BatchTableRequest.Operation::getLeftJoin, - LeftJoinTablesRequest::getResultId, EXTRACT_DEPS, - LeftJoinTablesRequest::getColumnsToMatchList, - LeftJoinTablesRequest::getColumnsToAddList, - LeftJoinTablesGrpcImpl::doJoin); + super(liveTableMonitor, BatchTableRequest.Operation::getLeftJoin, LeftJoinTablesRequest::getResultId, + EXTRACT_DEPS, + LeftJoinTablesRequest::getColumnsToMatchList, LeftJoinTablesRequest::getColumnsToAddList, + LeftJoinTablesGrpcImpl::doJoin); } public static Table doJoin(final Table lhs, final Table rhs, - final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, - final LeftJoinTablesRequest request) { + final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, + final LeftJoinTablesRequest request) { return lhs.leftJoin(rhs, columnsToMatch, columnsToAdd); } } @Singleton - public static class NaturalJoinTablesGrpcImpl - extends JoinTablesGrpcImpl { + public static class NaturalJoinTablesGrpcImpl extends JoinTablesGrpcImpl { private static final MultiDependencyFunction EXTRACT_DEPS = - (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); + (request) -> Lists.newArrayList(request.getLeftId(), request.getRightId()); @Inject public NaturalJoinTablesGrpcImpl(final LiveTableMonitor liveTableMonitor) { - super(liveTableMonitor, BatchTableRequest.Operation::getNaturalJoin, - NaturalJoinTablesRequest::getResultId, EXTRACT_DEPS, - NaturalJoinTablesRequest::getColumnsToMatchList, - NaturalJoinTablesRequest::getColumnsToAddList, - NaturalJoinTablesGrpcImpl::doJoin); + super(liveTableMonitor, BatchTableRequest.Operation::getNaturalJoin, NaturalJoinTablesRequest::getResultId, + EXTRACT_DEPS, + NaturalJoinTablesRequest::getColumnsToMatchList, NaturalJoinTablesRequest::getColumnsToAddList, + NaturalJoinTablesGrpcImpl::doJoin); } public static Table doJoin(final Table lhs, final Table rhs, - final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, - final NaturalJoinTablesRequest request) { + final MatchPair[] columnsToMatch, final MatchPair[] columnsToAdd, + final NaturalJoinTablesRequest request) { return lhs.naturalJoin(rhs, columnsToMatch, columnsToAdd); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/MergeTablesGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/MergeTablesGrpcImpl.java index 187cc6e5309..66325815731 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/MergeTablesGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/MergeTablesGrpcImpl.java @@ -24,32 +24,29 @@ public class MergeTablesGrpcImpl extends GrpcTableOperation @Inject public MergeTablesGrpcImpl(final LiveTableMonitor liveTableMonitor) { super(BatchTableRequest.Operation::getMerge, MergeTablesRequest::getResultId, - MergeTablesRequest::getSourceIdsList); + MergeTablesRequest::getSourceIdsList); this.liveTableMonitor = liveTableMonitor; } @Override public void validateRequest(final MergeTablesRequest request) throws StatusRuntimeException { if (request.getSourceIdsList().isEmpty()) { - throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Cannot merge zero source tables."); + throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, "Cannot merge zero source tables."); } } @Override - public Table create(final MergeTablesRequest request, - final List> sourceTables) { + public Table create(final MergeTablesRequest request, final List> sourceTables) { Assert.gt(sourceTables.size(), "sourceTables.size()", 0); final String keyColumn = request.getKeyColumn(); final List
    tables = sourceTables.stream() - .map(SessionState.ExportObject::get) - .collect(Collectors.toList()); + .map(SessionState.ExportObject::get) + .collect(Collectors.toList()); Table result; if (tables.stream().noneMatch(Table::isLive)) { - result = keyColumn.isEmpty() ? TableTools.merge(tables) - : TableTools.mergeSorted(keyColumn, tables); + result = keyColumn.isEmpty() ? TableTools.merge(tables) : TableTools.mergeSorted(keyColumn, tables); } else { result = liveTableMonitor.sharedLock().computeLocked(() -> TableTools.merge(tables)); if (!keyColumn.isEmpty()) { diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/RunChartDownsampleGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/RunChartDownsampleGrpcImpl.java index 1700cb3d693..147024f440e 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/RunChartDownsampleGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/RunChartDownsampleGrpcImpl.java @@ -14,13 +14,12 @@ public class RunChartDownsampleGrpcImpl extends GrpcTableOperation { @Inject protected RunChartDownsampleGrpcImpl() { - super(BatchTableRequest.Operation::getRunChartDownsample, - RunChartDownsampleRequest::getResultId, RunChartDownsampleRequest::getSourceId); + super(BatchTableRequest.Operation::getRunChartDownsample, RunChartDownsampleRequest::getResultId, + RunChartDownsampleRequest::getSourceId); } @Override - public Table create(RunChartDownsampleRequest request, - List> sourceTables) { + public Table create(RunChartDownsampleRequest request, List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table parent = sourceTables.get(0).get(); @@ -34,9 +33,9 @@ public Table create(RunChartDownsampleRequest request, zoomRange = null; } return parent.apply(new RunChartDownsample( - request.getPixelCount(), - zoomRange, - request.getXColumnName(), - request.getYColumnNamesList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); + request.getPixelCount(), + zoomRange, + request.getXColumnName(), + request.getYColumnNamesList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY))); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SelectDistinctGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SelectDistinctGrpcImpl.java index f20520e7de4..fda487a0081 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SelectDistinctGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SelectDistinctGrpcImpl.java @@ -19,12 +19,12 @@ public class SelectDistinctGrpcImpl extends GrpcTableOperation> sourceTables) { + final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table parent = sourceTables.get(0).get(); @@ -34,7 +34,7 @@ public Table create(final SelectDistinctRequest request, requestedMissing.removeAll(parent.getDefinition().getColumnNameMap().keySet()); if (!requestedMissing.isEmpty()) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "column(s) not found: " + String.join(", ", requestedMissing)); + "column(s) not found: " + String.join(", ", requestedMissing)); } return parent.selectDistinct(request.getColumnNamesList()); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SnapshotTableGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SnapshotTableGrpcImpl.java index 360147aa0be..90a2da72f41 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SnapshotTableGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SnapshotTableGrpcImpl.java @@ -25,23 +25,21 @@ public class SnapshotTableGrpcImpl extends GrpcTableOperation EXTRACT_DEPS = - (request) -> { - if (request.hasLeftId()) { - return Arrays.asList(request.getLeftId(), request.getRightId()); - } - return Collections.singletonList(request.getRightId()); - }; + (request) -> { + if (request.hasLeftId()) { + return Arrays.asList(request.getLeftId(), request.getRightId()); + } + return Collections.singletonList(request.getRightId()); + }; @Inject public SnapshotTableGrpcImpl(final LiveTableMonitor liveTableMonitor) { - super(BatchTableRequest.Operation::getSnapshot, SnapshotTableRequest::getResultId, - EXTRACT_DEPS); + super(BatchTableRequest.Operation::getSnapshot, SnapshotTableRequest::getResultId, EXTRACT_DEPS); this.liveTableMonitor = liveTableMonitor; } @Override - public Table create(final SnapshotTableRequest request, - final List> sourceTables) { + public Table create(final SnapshotTableRequest request, final List> sourceTables) { final Table lhs; final Table rhs; if (sourceTables.size() == 1) { @@ -51,18 +49,15 @@ public Table create(final SnapshotTableRequest request, lhs = sourceTables.get(0).get(); rhs = sourceTables.get(1).get(); } else { - throw Assert - .statementNeverExecuted("Unexpected sourceTables size " + sourceTables.size()); + throw Assert.statementNeverExecuted("Unexpected sourceTables size " + sourceTables.size()); } - final String[] stampColumns = - request.getStampColumnsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - final SelectColumn[] stampExpressions = - SelectColumnFactory.getExpressions(request.getStampColumnsList()); + final String[] stampColumns = request.getStampColumnsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final SelectColumn[] stampExpressions = SelectColumnFactory.getExpressions(request.getStampColumnsList()); ColumnExpressionValidator.validateColumnExpressions(stampExpressions, stampColumns, lhs); final FunctionalInterfaces.ThrowingSupplier doSnapshot = - () -> lhs.snapshot(rhs, request.getDoInitialSnapshot(), stampColumns); + () -> lhs.snapshot(rhs, request.getDoInitialSnapshot(), stampColumns); final Table result; if (!lhs.isLive() && !rhs.isLive()) { diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SortTableGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SortTableGrpcImpl.java index 554ff0aaaa7..ecbf3bf23bf 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SortTableGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/SortTableGrpcImpl.java @@ -22,13 +22,11 @@ public class SortTableGrpcImpl extends GrpcTableOperation { @Inject public SortTableGrpcImpl() { - super(BatchTableRequest.Operation::getSort, SortTableRequest::getResultId, - SortTableRequest::getSourceId); + super(BatchTableRequest.Operation::getSort, SortTableRequest::getResultId, SortTableRequest::getSourceId); } @Override - public Table create(final SortTableRequest request, - final List> sourceTables) { + public Table create(final SortTableRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table original = sourceTables.get(0).get(); @@ -50,8 +48,7 @@ public Table create(final SortTableRequest request, } // This loop does two optimizations: - // 1. Consolidate all sorts into a SortPair array in order to only call one sort on the - // table + // 1. Consolidate all sorts into a SortPair array in order to only call one sort on the table // 2. Move all the reverses to the back: // - For an odd number of reverses only call one reverse // - For an even number of reverses do not call reverse (they cancel out) @@ -64,8 +61,7 @@ public Table create(final SortTableRequest request, int direction = 0; switch (sort.getDirection()) { case REVERSE: - // Toggle the reverse flag, should be true for odd number of reverses and false - // for an even number + // Toggle the reverse flag, should be true for odd number of reverses and false for an even number shouldReverse = !shouldReverse; continue; case DESCENDING: @@ -76,7 +72,7 @@ public Table create(final SortTableRequest request, break; default: throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Unexpected sort direction: " + direction); + "Unexpected sort direction: " + direction); } final StringBuilder columnName = new StringBuilder(sort.getColumnName()); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/TimeTableGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/TimeTableGrpcImpl.java index 79ec5b93a1a..936ec177b27 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/TimeTableGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/TimeTableGrpcImpl.java @@ -35,18 +35,17 @@ public void validateRequest(final TimeTableRequest request) throws StatusRuntime final long periodNanos = request.getPeriodNanos(); if (periodNanos <= 0) { throw GrpcUtil.statusRuntimeException(Code.FAILED_PRECONDITION, - "periodNanos must be >= 0 (found: " + periodNanos + ")"); + "periodNanos must be >= 0 (found: " + periodNanos + ")"); } } @Override - public Table create(final TimeTableRequest request, - final List> sourceTables) { + public Table create(final TimeTableRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 0); final long startTime = request.getStartTimeNanos(); final long periodValue = request.getPeriodNanos(); - final TimeTable timeTable = new TimeTable(scheduler, - startTime <= 0 ? null : DBTimeUtils.nanosToTime(startTime), periodValue); + final TimeTable timeTable = + new TimeTable(scheduler, startTime <= 0 ? null : DBTimeUtils.nanosToTime(startTime), periodValue); liveTableMonitor.addTable(timeTable); return timeTable; } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UngroupGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UngroupGrpcImpl.java index d2fc996c77c..84f29dd6036 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UngroupGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UngroupGrpcImpl.java @@ -19,20 +19,18 @@ public class UngroupGrpcImpl extends GrpcTableOperation { @Inject public UngroupGrpcImpl(final LiveTableMonitor liveTableMonitor) { - super(BatchTableRequest.Operation::getUngroup, UngroupRequest::getResultId, - UngroupRequest::getSourceId); + super(BatchTableRequest.Operation::getUngroup, UngroupRequest::getResultId, UngroupRequest::getSourceId); this.liveTableMonitor = liveTableMonitor; } @Override - public Table create(final UngroupRequest request, - final List> sourceTables) { + public Table create(final UngroupRequest request, final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table parent = sourceTables.get(0).get(); final String[] columnsToUngroup = - request.getColumnsToUngroupList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + request.getColumnsToUngroupList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); return liveTableMonitor.sharedLock() - .computeLocked(() -> parent.ungroup(request.getNullFill(), columnsToUngroup)); + .computeLocked(() -> parent.ungroup(request.getNullFill(), columnsToUngroup)); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UnstructuredFilterTableGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UnstructuredFilterTableGrpcImpl.java index 9aa10a1b1e7..d1b5bb32742 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UnstructuredFilterTableGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UnstructuredFilterTableGrpcImpl.java @@ -14,26 +14,22 @@ import java.util.List; @Singleton -public class UnstructuredFilterTableGrpcImpl - extends GrpcTableOperation { +public class UnstructuredFilterTableGrpcImpl extends GrpcTableOperation { @Inject public UnstructuredFilterTableGrpcImpl() { - super(BatchTableRequest.Operation::getUnstructuredFilter, - UnstructuredFilterTableRequest::getResultId, - UnstructuredFilterTableRequest::getSourceId); + super(BatchTableRequest.Operation::getUnstructuredFilter, UnstructuredFilterTableRequest::getResultId, + UnstructuredFilterTableRequest::getSourceId); } @Override public Table create(final UnstructuredFilterTableRequest request, - final List> sourceTables) { + final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table parent = sourceTables.get(0).get(); - final String[] filters = - request.getFiltersList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); - final SelectFilter[] selectFilters = - ColumnExpressionValidator.validateSelectFilters(filters, parent); + final String[] filters = request.getFiltersList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final SelectFilter[] selectFilters = ColumnExpressionValidator.validateSelectFilters(filters, parent); return parent.where(selectFilters); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UpdateOrSelectGrpcImpl.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UpdateOrSelectGrpcImpl.java index 97960e2166a..26aa0da0667 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UpdateOrSelectGrpcImpl.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/UpdateOrSelectGrpcImpl.java @@ -24,20 +24,19 @@ protected interface RealTableOperation { private final RealTableOperation realTableOperation; protected UpdateOrSelectGrpcImpl( - final Function getRequest, - final RealTableOperation realTableOperation) { + final Function getRequest, + final RealTableOperation realTableOperation) { super(getRequest, SelectOrUpdateRequest::getResultId, SelectOrUpdateRequest::getSourceId); this.realTableOperation = realTableOperation; } @Override public Table create(final SelectOrUpdateRequest request, - final List> sourceTables) { + final List> sourceTables) { Assert.eq(sourceTables.size(), "sourceTables.size()", 1); final Table parent = sourceTables.get(0).get(); - final String[] columnSpecs = - request.getColumnSpecsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] columnSpecs = request.getColumnSpecsList().toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); final SelectColumn[] expressions = SelectColumnFactory.getExpressions(columnSpecs); ColumnExpressionValidator.validateColumnExpressions(expressions, columnSpecs, parent); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/AbstractNormalizeFilters.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/AbstractNormalizeFilters.java index f7d5b032dc2..291771e5be5 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/AbstractNormalizeFilters.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/AbstractNormalizeFilters.java @@ -21,14 +21,14 @@ public Condition onNot(Condition filter) { } @Override - public Condition onComparison(CompareCondition.CompareOperation operation, - CaseSensitivity caseSensitivity, Value lhs, Value rhs) { + public Condition onComparison(CompareCondition.CompareOperation operation, CaseSensitivity caseSensitivity, + Value lhs, Value rhs) { return NormalizeFilterUtil.doComparison(operation, caseSensitivity, lhs, rhs); } @Override public Condition onIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, - MatchType matchType) { + MatchType matchType) { return NormalizeFilterUtil.doIn(target, candidatesList, caseSensitivity, matchType); } @@ -43,14 +43,14 @@ public Condition onInvoke(String method, Value target, List argumentsList } @Override - public Condition onContains(Reference reference, String searchString, - CaseSensitivity caseSensitivity, MatchType matchType) { + public Condition onContains(Reference reference, String searchString, CaseSensitivity caseSensitivity, + MatchType matchType) { return NormalizeFilterUtil.doContains(reference, searchString, caseSensitivity, matchType); } @Override public Condition onMatches(Reference reference, String regex, CaseSensitivity caseSensitivity, - MatchType matchType) { + MatchType matchType) { return NormalizeFilterUtil.doMatches(reference, regex, caseSensitivity, matchType); } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/ConvertInvalidInExpressions.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/ConvertInvalidInExpressions.java index fe92173d861..e3a51e87cda 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/ConvertInvalidInExpressions.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/ConvertInvalidInExpressions.java @@ -5,18 +5,17 @@ import java.util.List; /** - * Rewrite any IN-type expression into its corresponding EQUALS if the left side is not a reference - * or if the right side does have a reference. Assumes that FlipNonReferenceMatchExpression has - * already been run, making this the second attempt to deal with these, and letting us be confident - * that these expressions cannot be expressed as more efficient "in"s. + * Rewrite any IN-type expression into its corresponding EQUALS if the left side is not a reference or if the right side + * does have a reference. Assumes that FlipNonReferenceMatchExpression has already been run, making this the second + * attempt to deal with these, and letting us be confident that these expressions cannot be expressed as more efficient + * "in"s. * - * Examples: o ColumnA in 1 - left as is o ColumnA in 1, 2 - left as is o 1 in 2 - rewritten to 1 == - * 2. o ColumnA in ColumnB - rewritten to ColumnA == ColumnB + * Examples: o ColumnA in 1 - left as is o ColumnA in 1, 2 - left as is o 1 in 2 - rewritten to 1 == 2. o ColumnA in + * ColumnB - rewritten to ColumnA == ColumnB * - * Signs that visitors were mis-ordered: o 1 in ColumnA - literal on LHS should already be handled o - * 1 in 2, 3 - literal on LHS with multiple RHS values should already be handled, should have been - * flipped and split into individual exprs o ColumnA in ColumnB, 2 - column ref on RHS should - * already be handled + * Signs that visitors were mis-ordered: o 1 in ColumnA - literal on LHS should already be handled o 1 in 2, 3 - literal + * on LHS with multiple RHS values should already be handled, should have been flipped and split into individual exprs o + * ColumnA in ColumnB, 2 - column ref on RHS should already be handled */ public class ConvertInvalidInExpressions extends AbstractNormalizeFilters { private static final ConvertInvalidInExpressions INSTANCE = new ConvertInvalidInExpressions(); @@ -27,14 +26,12 @@ public static Condition exec(Condition filter) { @Override public Condition onIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, - MatchType matchType) { - if (candidatesList.size() != 1 - || target.getDataCase() != candidatesList.get(0).getDataCase()) { + MatchType matchType) { + if (candidatesList.size() != 1 || target.getDataCase() != candidatesList.get(0).getDataCase()) { return super.onIn(target, candidatesList, caseSensitivity, matchType); } - return NormalizeFilterUtil.doComparison(operation(matchType), caseSensitivity, target, - candidatesList.get(0)); + return NormalizeFilterUtil.doComparison(operation(matchType), caseSensitivity, target, candidatesList.get(0)); } private CompareCondition.CompareOperation operation(MatchType matchType) { diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterFactory.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterFactory.java index 9ced46ca166..82a7c08fed3 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterFactory.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterFactory.java @@ -37,16 +37,16 @@ public static SelectFilter makeFilter(Table table, Condition condition) { @Override public SelectFilter onAnd(List filtersList) { final SelectFilter[] items = filtersList.stream() - .map(cond -> FilterVisitor.accept(cond, this)) - .toArray(SelectFilter[]::new); + .map(cond -> FilterVisitor.accept(cond, this)) + .toArray(SelectFilter[]::new); return ConjunctiveFilter.makeConjunctiveFilter(items); } @Override public SelectFilter onOr(List filtersList) { final SelectFilter[] items = filtersList.stream() - .map(cond -> FilterVisitor.accept(cond, this)) - .toArray(SelectFilter[]::new); + .map(cond -> FilterVisitor.accept(cond, this)) + .toArray(SelectFilter[]::new); return DisjunctiveFilter.makeDisjunctiveFilter(items); } @@ -58,13 +58,13 @@ private SelectFilter generateConditionFilter(Condition filter) { public SelectFilter onNot(Condition filter) { // already must have optimized out any nested operations that we can flatten this into return generateConditionFilter(Condition.newBuilder().setNot(NotCondition.newBuilder() - .setFilter(filter) - .build()).build()); + .setFilter(filter) + .build()).build()); } @Override - public SelectFilter onComparison(CompareCondition.CompareOperation operation, - CaseSensitivity caseSensitivity, Value lhs, Value rhs) { + public SelectFilter onComparison(CompareCondition.CompareOperation operation, CaseSensitivity caseSensitivity, + Value lhs, Value rhs) { switch (operation) { case LESS_THAN: case LESS_THAN_OR_EQUAL: @@ -73,46 +73,40 @@ public SelectFilter onComparison(CompareCondition.CompareOperation operation, return generateNumericConditionFilter(operation, lhs, rhs); case EQUALS: case NOT_EQUALS: - // At this point, we shouldn't be able to be optimized to a match filter, so we'll - // tostring and build a condition + // At this point, we shouldn't be able to be optimized to a match filter, so we'll tostring and build a + // condition // and let the DBLangParser turn the "==" into the appropriate java call // Note that case insensitive checks aren't supported on this path if (caseSensitivity != CaseSensitivity.MATCH_CASE) { - throw new IllegalStateException( - "Should have been compiled out in a previous pass"); + throw new IllegalStateException("Should have been compiled out in a previous pass"); } - return generateConditionFilter( - NormalizeFilterUtil.doComparison(operation, caseSensitivity, lhs, rhs)); + return generateConditionFilter(NormalizeFilterUtil.doComparison(operation, caseSensitivity, lhs, rhs)); case UNRECOGNIZED: default: throw new IllegalStateException("Can't handle compare operation " + operation); } } - private SelectFilter generateNumericConditionFilter(CompareCondition.CompareOperation operation, - Value lhs, Value rhs) { + private SelectFilter generateNumericConditionFilter(CompareCondition.CompareOperation operation, Value lhs, + Value rhs) { boolean invert; String columName; Literal value; - if (lhs.getDataCase() == Value.DataCase.LITERAL - && rhs.getDataCase() == Value.DataCase.REFERENCE) { + if (lhs.getDataCase() == Value.DataCase.LITERAL && rhs.getDataCase() == Value.DataCase.REFERENCE) { invert = true; value = lhs.getLiteral(); columName = rhs.getReference().getColumnName(); - } else if (lhs.getDataCase() == Value.DataCase.REFERENCE - && rhs.getDataCase() == Value.DataCase.LITERAL) { + } else if (lhs.getDataCase() == Value.DataCase.REFERENCE && rhs.getDataCase() == Value.DataCase.LITERAL) { invert = false; columName = lhs.getReference().getColumnName(); value = rhs.getLiteral(); } else { // both are references or literals, handle as a condition filter, not range - return generateConditionFilter(Condition.newBuilder() - .setCompare(CompareCondition.newBuilder() + return generateConditionFilter(Condition.newBuilder().setCompare(CompareCondition.newBuilder() .setOperation(operation) .setLhs(lhs) .setRhs(rhs) - .build()) - .build()); + .build()).build()); } String valueString; switch (value.getValueCase()) { @@ -136,41 +130,38 @@ private SelectFilter generateNumericConditionFilter(CompareCondition.CompareOper break; case VALUE_NOT_SET: default: - throw new IllegalStateException( - "Range filter can't handle literal type " + value.getValueCase()); + throw new IllegalStateException("Range filter can't handle literal type " + value.getValueCase()); } - return new RangeConditionFilter(columName, rangeCondition(operation, invert), valueString, - null, - FormulaParserConfiguration.parser); + return new RangeConditionFilter(columName, rangeCondition(operation, invert), valueString, null, + FormulaParserConfiguration.parser); } - private io.deephaven.gui.table.filters.Condition rangeCondition( - CompareCondition.CompareOperation operation, boolean invert) { + private io.deephaven.gui.table.filters.Condition rangeCondition(CompareCondition.CompareOperation operation, + boolean invert) { switch (operation) { case LESS_THAN: return invert ? io.deephaven.gui.table.filters.Condition.GREATER_THAN_OR_EQUAL - : io.deephaven.gui.table.filters.Condition.LESS_THAN; + : io.deephaven.gui.table.filters.Condition.LESS_THAN; case LESS_THAN_OR_EQUAL: return invert ? io.deephaven.gui.table.filters.Condition.GREATER_THAN - : io.deephaven.gui.table.filters.Condition.LESS_THAN_OR_EQUAL; + : io.deephaven.gui.table.filters.Condition.LESS_THAN_OR_EQUAL; case GREATER_THAN: return invert ? io.deephaven.gui.table.filters.Condition.LESS_THAN_OR_EQUAL - : io.deephaven.gui.table.filters.Condition.GREATER_THAN; + : io.deephaven.gui.table.filters.Condition.GREATER_THAN; case GREATER_THAN_OR_EQUAL: return invert ? io.deephaven.gui.table.filters.Condition.LESS_THAN - : io.deephaven.gui.table.filters.Condition.GREATER_THAN_OR_EQUAL; + : io.deephaven.gui.table.filters.Condition.GREATER_THAN_OR_EQUAL; case EQUALS: case NOT_EQUALS: case UNRECOGNIZED: default: - throw new IllegalStateException( - "Can't handle compare operation " + operation + " in range operation"); + throw new IllegalStateException("Can't handle compare operation " + operation + " in range operation"); } } @Override - public SelectFilter onIn(Value target, List candidatesList, - CaseSensitivity caseSensitivity, MatchType matchType) { + public SelectFilter onIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, + MatchType matchType) { assert target.getDataCase() == Value.DataCase.REFERENCE; Reference reference = target.getReference(); String[] values = new String[candidatesList.size()]; @@ -180,15 +171,13 @@ public SelectFilter onIn(Value target, List candidatesList, Literal literal = d.getLiteral(); // all other literals get created from a toString except DateTime if (literal.getValueCase() == Literal.ValueCase.NANO_TIME_VALUE) { - values[i] = - "'" + new DBDateTime(literal.getNanoTimeValue()).toString(DBTimeZone.TZ_DEFAULT) - + "'"; + values[i] = "'" + new DBDateTime(literal.getNanoTimeValue()).toString(DBTimeZone.TZ_DEFAULT) + "'"; } else { values[i] = FilterPrinter.printNoEscape(literal); } } - return new MatchFilter(caseSensitivity(caseSensitivity), matchType(matchType), - reference.getColumnName(), values); + return new MatchFilter(caseSensitivity(caseSensitivity), matchType(matchType), reference.getColumnName(), + values); } private MatchFilter.CaseSensitivity caseSensitivity(CaseSensitivity caseSensitivity) { @@ -199,8 +188,7 @@ private MatchFilter.CaseSensitivity caseSensitivity(CaseSensitivity caseSensitiv return MatchFilter.CaseSensitivity.IgnoreCase; case UNRECOGNIZED: default: - throw new IllegalStateException( - "Can't handle compare case sensitivity " + caseSensitivity); + throw new IllegalStateException("Can't handle compare case sensitivity " + caseSensitivity); } } @@ -219,39 +207,38 @@ private MatchFilter.MatchType matchType(MatchType matchType) { @Override public SelectFilter onIsNull(Reference reference) { return generateConditionFilter(Condition.newBuilder().setIsNull(IsNullCondition.newBuilder() - .setReference(reference) - .build()).build()); + .setReference(reference) + .build()).build()); } @Override public SelectFilter onInvoke(String method, Value target, List argumentsList) { return generateConditionFilter(Condition.newBuilder().setInvoke(InvokeCondition.newBuilder() - .setMethod(method) - .setTarget(target) - .addAllArguments(argumentsList) - .build()).build()); + .setMethod(method) + .setTarget(target) + .addAllArguments(argumentsList) + .build()).build()); } @Override - public SelectFilter onContains(Reference reference, String searchString, - CaseSensitivity caseSensitivity, MatchType matchType) { + public SelectFilter onContains(Reference reference, String searchString, CaseSensitivity caseSensitivity, + MatchType matchType) { return new StringContainsFilter(caseSensitivity(caseSensitivity), matchType(matchType), - reference.getColumnName(), searchString); + reference.getColumnName(), searchString); } @Override - public SelectFilter onMatches(Reference reference, String regex, - CaseSensitivity caseSensitivity, MatchType matchType) { - return new RegexFilter(caseSensitivity(caseSensitivity), matchType(matchType), - reference.getColumnName(), regex); + public SelectFilter onMatches(Reference reference, String regex, CaseSensitivity caseSensitivity, + MatchType matchType) { + return new RegexFilter(caseSensitivity(caseSensitivity), matchType(matchType), reference.getColumnName(), + regex); } @Override public SelectFilter onSearch(String searchString, List optionalReferencesList) { - final Set columnNames = optionalReferencesList.stream() - .map(Reference::getColumnName).collect(Collectors.toSet()); - SelectFilter[] selectFilters = - SelectFilterFactory.expandQuickFilter(table, searchString, columnNames); + final Set columnNames = + optionalReferencesList.stream().map(Reference::getColumnName).collect(Collectors.toSet()); + SelectFilter[] selectFilters = SelectFilterFactory.expandQuickFilter(table, searchString, columnNames); if (selectFilters.length == 0) { return SelectNoneFilter.INSTANCE; } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterPrinter.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterPrinter.java index ad99af202be..9492ff62d6a 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterPrinter.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterPrinter.java @@ -91,8 +91,8 @@ public Void onNot(Condition filter) { } @Override - public Void onComparison(CompareCondition.CompareOperation operation, - CaseSensitivity caseSensitivity, Value lhs, Value rhs) { + public Void onComparison(CompareCondition.CompareOperation operation, CaseSensitivity caseSensitivity, Value lhs, + Value rhs) { accept(lhs); switch (operation) { case LESS_THAN: @@ -128,8 +128,7 @@ public Void onComparison(CompareCondition.CompareOperation operation, } @Override - public Void onIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, - MatchType matchType) { + public Void onIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, MatchType matchType) { if (candidatesList.isEmpty()) { // should have already been pruned return null; @@ -176,8 +175,8 @@ public Void onInvoke(String method, Value target, List argumentsList) { } @Override - public Void onContains(Reference reference, String searchString, - CaseSensitivity caseSensitivity, MatchType matchType) { + public Void onContains(Reference reference, String searchString, CaseSensitivity caseSensitivity, + MatchType matchType) { if (matchType == MatchType.INVERTED) { sb.append("!"); } @@ -194,8 +193,7 @@ public Void onContains(Reference reference, String searchString, } @Override - public Void onMatches(Reference reference, String regex, CaseSensitivity caseSensitivity, - MatchType matchType) { + public Void onMatches(Reference reference, String regex, CaseSensitivity caseSensitivity, MatchType matchType) { if (matchType == MatchType.INVERTED) { sb.append("!"); } @@ -255,10 +253,8 @@ private void onLiteral(Literal literal) { } else if (Double.isNaN(doubleVal)) { sb.append("Double.NaN"); } else { - // Cast the double value to a long, then test to see if they actually compare to - // the same - // value - if they do not, we have some decimal value and need the entire double - // to be + // Cast the double value to a long, then test to see if they actually compare to the same + // value - if they do not, we have some decimal value and need the entire double to be // appended, if they do, then we just append the integer instead. long longVal = (long) doubleVal; if (longVal - doubleVal != 0) { diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterVisitor.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterVisitor.java index 6ccd0c367d5..994e411b281 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterVisitor.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FilterVisitor.java @@ -11,21 +11,17 @@ public interface FilterVisitor { R onNot(Condition filter); - R onComparison(CompareCondition.CompareOperation operation, CaseSensitivity caseSensitivity, - Value lhs, Value rhs); + R onComparison(CompareCondition.CompareOperation operation, CaseSensitivity caseSensitivity, Value lhs, Value rhs); - R onIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, - MatchType matchType); + R onIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, MatchType matchType); R onIsNull(Reference reference); R onInvoke(String method, Value target, List argumentsList); - R onContains(Reference reference, String searchString, CaseSensitivity caseSensitivity, - MatchType matchType); + R onContains(Reference reference, String searchString, CaseSensitivity caseSensitivity, MatchType matchType); - R onMatches(Reference reference, String regex, CaseSensitivity caseSensitivity, - MatchType matchType); + R onMatches(Reference reference, String regex, CaseSensitivity caseSensitivity, MatchType matchType); R onSearch(String searchString, List optionalReferencesList); @@ -39,29 +35,26 @@ static R accept(Condition condition, FilterVisitor visitor) { return visitor.onNot(condition.getNot().getFilter()); case COMPARE: return visitor.onComparison(condition.getCompare().getOperation(), - condition.getCompare().getCaseSensitivity(), condition.getCompare().getLhs(), - condition.getCompare().getRhs()); + condition.getCompare().getCaseSensitivity(), condition.getCompare().getLhs(), + condition.getCompare().getRhs()); case IN: - return visitor.onIn(condition.getIn().getTarget(), - condition.getIn().getCandidatesList(), condition.getIn().getCaseSensitivity(), - condition.getIn().getMatchType()); + return visitor.onIn(condition.getIn().getTarget(), condition.getIn().getCandidatesList(), + condition.getIn().getCaseSensitivity(), condition.getIn().getMatchType()); case INVOKE: - return visitor.onInvoke(condition.getInvoke().getMethod(), - condition.getInvoke().getTarget(), condition.getInvoke().getArgumentsList()); + return visitor.onInvoke(condition.getInvoke().getMethod(), condition.getInvoke().getTarget(), + condition.getInvoke().getArgumentsList()); case IS_NULL: return visitor.onIsNull(condition.getIsNull().getReference()); case MATCHES: - return visitor.onMatches(condition.getMatches().getReference(), - condition.getMatches().getRegex(), condition.getMatches().getCaseSensitivity(), - condition.getMatches().getMatchType()); + return visitor.onMatches(condition.getMatches().getReference(), condition.getMatches().getRegex(), + condition.getMatches().getCaseSensitivity(), condition.getMatches().getMatchType()); case CONTAINS: return visitor.onContains(condition.getContains().getReference(), - condition.getContains().getSearchString(), - condition.getContains().getCaseSensitivity(), - condition.getContains().getMatchType()); + condition.getContains().getSearchString(), condition.getContains().getCaseSensitivity(), + condition.getContains().getMatchType()); case SEARCH: return visitor.onSearch(condition.getSearch().getSearchString(), - condition.getSearch().getOptionalReferencesList()); + condition.getSearch().getOptionalReferencesList()); case DATA_NOT_SET: default: throw new IllegalStateException("Unsupported condition " + condition); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FlipNonReferenceMatchExpression.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FlipNonReferenceMatchExpression.java index e53332e2e47..2e95f9e91d8 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FlipNonReferenceMatchExpression.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/FlipNonReferenceMatchExpression.java @@ -10,26 +10,23 @@ import java.util.List; /** - * Enforces that all IN-type expressions have a reference on the left side and only literals on the - * right side. Any non-reference on the left or reference on the right will trigger the entire - * IN-type expression being replaced with an OR or AND, with a sub-IN for each expression on the - * right side. + * Enforces that all IN-type expressions have a reference on the left side and only literals on the right side. Any + * non-reference on the left or reference on the right will trigger the entire IN-type expression being replaced with an + * OR or AND, with a sub-IN for each expression on the right side. * * - * Examples: o ColumnA in 1, 2, 3 - left as-is o ColumnA in 1, 2, ColumnB - rewritten as (ColumnA in - * 1 OR ColumnA in 2 OR ColumnA in ColumnB) o 1 in 3, 4, 5 - will be rewritten as (3 in 1 OR 4 in 1 - * OR 5 in 1). This is a silly case, but we're not judging. At this step. o 1 in ColumnA, 4, 5 - - * will be rewritten as (ColumnA in 1 OR 4 in 1 OR 5 in 1) o 1 in ColumnA - will be rewritten as - * ColumnA in 1 o ColumnA in ColumnB - will be rewritten as ColumnB in ColumnA. Note that like the - * second example, this isn't productive on its own, but as a pair with a reference on the right, it - * will be noticed by {@link ConvertInvalidInExpressions}. + * Examples: o ColumnA in 1, 2, 3 - left as-is o ColumnA in 1, 2, ColumnB - rewritten as (ColumnA in 1 OR ColumnA in 2 + * OR ColumnA in ColumnB) o 1 in 3, 4, 5 - will be rewritten as (3 in 1 OR 4 in 1 OR 5 in 1). This is a silly case, but + * we're not judging. At this step. o 1 in ColumnA, 4, 5 - will be rewritten as (ColumnA in 1 OR 4 in 1 OR 5 in 1) o 1 + * in ColumnA - will be rewritten as ColumnA in 1 o ColumnA in ColumnB - will be rewritten as ColumnB in ColumnA. Note + * that like the second example, this isn't productive on its own, but as a pair with a reference on the right, it will + * be noticed by {@link ConvertInvalidInExpressions}. * - * It is assumed that some time after this step, related "in" expressions will be merged together, - * and that these one-off expressions will get checked later. + * It is assumed that some time after this step, related "in" expressions will be merged together, and that these + * one-off expressions will get checked later. */ public class FlipNonReferenceMatchExpression extends AbstractNormalizeFilters { - private static final FlipNonReferenceMatchExpression INSTANCE = - new FlipNonReferenceMatchExpression(); + private static final FlipNonReferenceMatchExpression INSTANCE = new FlipNonReferenceMatchExpression(); public static Condition exec(Condition filter) { return FilterVisitor.accept(filter, FlipNonReferenceMatchExpression.INSTANCE); @@ -37,7 +34,7 @@ public static Condition exec(Condition filter) { @Override public Condition onIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, - MatchType matchType) { + MatchType matchType) { // check each child - if we pass all checks we will give up boolean rewrite = target.hasLiteral(); if (!rewrite) { @@ -54,16 +51,16 @@ public Condition onIn(Value target, List candidatesList, CaseSensitivity if (candidatesList.size() == 1) { // make a single node to replace with, just swap the order of the two children - return NormalizeFilterUtil.doIn(candidatesList.get(0), - Collections.singletonList(target), caseSensitivity, matchType); + return NormalizeFilterUtil.doIn(candidatesList.get(0), Collections.singletonList(target), caseSensitivity, + matchType); } // make a AND/OR to join each of the new children with List replacementChildren = new ArrayList<>(); for (Value candidate : candidatesList) { - replacementChildren.add(NormalizeFilterUtil.doIn(candidate, - Collections.singletonList(target), caseSensitivity, matchType)); + replacementChildren.add( + NormalizeFilterUtil.doIn(candidate, Collections.singletonList(target), caseSensitivity, matchType)); } // wrap each of the new operations in an AND or OR as necessary diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/MakeExpressionsNullSafe.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/MakeExpressionsNullSafe.java index 82c692262f3..3b59a65e001 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/MakeExpressionsNullSafe.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/MakeExpressionsNullSafe.java @@ -8,12 +8,12 @@ import static io.deephaven.grpc_api.table.ops.filter.NormalizeFilterUtil.*; /** - * Rewrites logical expressions into an actual version that does what would be expected. Right now - * this is just equalsIgnoreCase and its negation, to support null values. + * Rewrites logical expressions into an actual version that does what would be expected. Right now this is just + * equalsIgnoreCase and its negation, to support null values. * - * Note that some of the branches here should not be needed (such as - * reference.equalsIgnoreCase("literal")) as they should be replaced by a MatchFilter instead, but - * this may not be fully implemented, so we are defensively leaving these cases in place for now. + * Note that some of the branches here should not be needed (such as reference.equalsIgnoreCase("literal")) as they + * should be replaced by a MatchFilter instead, but this may not be fully implemented, so we are defensively leaving + * these cases in place for now. */ public class MakeExpressionsNullSafe extends AbstractNormalizeFilters { private static final MakeExpressionsNullSafe INSTANCE = new MakeExpressionsNullSafe(); @@ -23,8 +23,8 @@ public static Condition exec(Condition filter) { } @Override - public Condition onComparison(CompareCondition.CompareOperation operation, - CaseSensitivity caseSensitivity, Value lhs, Value rhs) { + public Condition onComparison(CompareCondition.CompareOperation operation, CaseSensitivity caseSensitivity, + Value lhs, Value rhs) { // only apply to ==/!= operations that are case insensitive if (caseSensitivity == CaseSensitivity.MATCH_CASE) { return super.onComparison(operation, caseSensitivity, lhs, rhs); @@ -33,10 +33,8 @@ public Condition onComparison(CompareCondition.CompareOperation operation, throw new IllegalStateException("Unrecognized case sensitivity " + caseSensitivity); } - // if lhs is not a reference, we aren't worried about null, can emit the safe call without a - // null check - Condition equalsIgnoreCase = - doInvoke("equalsIgnoreCase", lhs, Collections.singletonList(rhs)); + // if lhs is not a reference, we aren't worried about null, can emit the safe call without a null check + Condition equalsIgnoreCase = doInvoke("equalsIgnoreCase", lhs, Collections.singletonList(rhs)); if (lhs.hasLiteral()) { // the left side of the compare is a literal, so we emit something like // "foo".equalsIgnoreCase(right-hand-side) @@ -46,22 +44,20 @@ public Condition onComparison(CompareCondition.CompareOperation operation, Reference lhsRef = lhs.getReference(); Condition lhsNullCheck = doAnd(Arrays.asList( - doInvert(doIsNull(lhsRef)), - equalsIgnoreCase)); + doInvert(doIsNull(lhsRef)), + equalsIgnoreCase)); if (rhs.hasLiteral()) { - // if rhs isn't a reference, it cannot be null, and we don't need to worry about the - // second - // null check, so we emit something like !isNull(foo) && - // foo.equalsIgnoreCase(right-hand-side) + // if rhs isn't a reference, it cannot be null, and we don't need to worry about the second + // null check, so we emit something like !isNull(foo) && foo.equalsIgnoreCase(right-hand-side) return lhsNullCheck; } // both sides could be null, so add a check where either one could be null // (foo == null && bar == null) || (foo != null && foo.equalsIgnoreCase(bar)) return doOr(Arrays.asList( - doAnd(Arrays.asList( - doIsNull(lhsRef), - doIsNull(rhs.getReference()))), - lhsNullCheck)); + doAnd(Arrays.asList( + doIsNull(lhsRef), + doIsNull(rhs.getReference()))), + lhsNullCheck)); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/MergeNestedBinaryOperations.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/MergeNestedBinaryOperations.java index c8b96200084..95084694f6b 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/MergeNestedBinaryOperations.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/MergeNestedBinaryOperations.java @@ -8,11 +8,10 @@ import java.util.function.Predicate; /** - * Any AND nested within another AND or OR nested within another OR should be flattened into just a - * single level. + * Any AND nested within another AND or OR nested within another OR should be flattened into just a single level. * - * This should be run after NOTs are distributed (so that (A AND B AND !(C OR D)) is first - * normalized to (A AND B AND (!C AND !D))). + * This should be run after NOTs are distributed (so that (A AND B AND !(C OR D)) is first normalized to (A AND B AND + * (!C AND !D))). */ public class MergeNestedBinaryOperations extends AbstractNormalizeFilters { private static final MergeNestedBinaryOperations INSTANCE = new MergeNestedBinaryOperations(); @@ -24,16 +23,15 @@ public static Condition exec(Condition filter) { @Override public Condition onAnd(List filtersList) { List visited = new ArrayList<>(); - // walk the descendents, recursing into AND nodes, but only copying non-AND nodes into our - // result list + // walk the descendents, recursing into AND nodes, but only copying non-AND nodes into our result list collect(filtersList, visited, Condition::hasAnd, c -> c.getAnd().getFiltersList()); // before actually wrapping, visit children in case there are ANDs or ORs further nested return NormalizeFilterUtil.doAnd(visited, this); } - private void collect(List filtersList, List visited, - Predicate matches, Function> getChildren) { + private void collect(List filtersList, List visited, Predicate matches, + Function> getChildren) { for (Condition condition : filtersList) { if (matches.test(condition)) { // recurse, find more @@ -47,8 +45,7 @@ private void collect(List filtersList, List visited, @Override public Condition onOr(List filtersList) { List visited = new ArrayList<>(); - // walk the descendents, recursing into OR nodes, but only copying non-OR nodes into our - // result list + // walk the descendents, recursing into OR nodes, but only copying non-OR nodes into our result list collect(filtersList, visited, Condition::hasOr, c -> c.getOr().getFiltersList()); // before actually wrapping, visit children in case there are ANDs or ORs further nested diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/NormalizeFilterUtil.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/NormalizeFilterUtil.java index 47822c5cad2..fda37f95443 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/NormalizeFilterUtil.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/NormalizeFilterUtil.java @@ -9,16 +9,14 @@ * Tools to create filter conditions */ public class NormalizeFilterUtil { - private static List visitChildren(List children, - FilterVisitor visitor) { - return children.stream().map(c -> FilterVisitor.accept(c, visitor)) - .collect(Collectors.toList()); + private static List visitChildren(List children, FilterVisitor visitor) { + return children.stream().map(c -> FilterVisitor.accept(c, visitor)).collect(Collectors.toList()); } public static Condition doAnd(List filtersList) { return Condition.newBuilder().setAnd(AndCondition.newBuilder() - .addAllFilters(filtersList) - .build()).build(); + .addAllFilters(filtersList) + .build()).build(); } public static Condition doAnd(List filtersList, FilterVisitor visitor) { @@ -27,8 +25,8 @@ public static Condition doAnd(List filtersList, FilterVisitor filtersList) { return Condition.newBuilder().setOr(OrCondition.newBuilder() - .addAllFilters(filtersList) - .build()).build(); + .addAllFilters(filtersList) + .build()).build(); } public static Condition doOr(List filtersList, FilterVisitor visitor) { @@ -37,8 +35,8 @@ public static Condition doOr(List filtersList, FilterVisitor visitor) { @@ -48,68 +46,68 @@ public static Condition doInvert(Condition condition, FilterVisitor v public static Condition doNot(Condition filter, FilterVisitor visitor) { Condition replacement = FilterVisitor.accept(filter, visitor); return Condition.newBuilder().setNot(NotCondition.newBuilder() - .setFilter(replacement) - .build()).build(); + .setFilter(replacement) + .build()).build(); } - public static Condition doComparison(CompareCondition.CompareOperation operation, - CaseSensitivity caseSensitivity, Value lhs, Value rhs) { + public static Condition doComparison(CompareCondition.CompareOperation operation, CaseSensitivity caseSensitivity, + Value lhs, Value rhs) { return Condition.newBuilder().setCompare(CompareCondition.newBuilder() - .setOperation(operation) - .setCaseSensitivity(caseSensitivity) - .setLhs(lhs) - .setRhs(rhs) - .build()).build(); + .setOperation(operation) + .setCaseSensitivity(caseSensitivity) + .setLhs(lhs) + .setRhs(rhs) + .build()).build(); } - public static Condition doIn(Value target, List candidatesList, - CaseSensitivity caseSensitivity, MatchType matchType) { + public static Condition doIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, + MatchType matchType) { return Condition.newBuilder().setIn(InCondition.newBuilder() - .setTarget(target) - .addAllCandidates(candidatesList) - .setCaseSensitivity(caseSensitivity) - .setMatchType(matchType) - .build()).build(); + .setTarget(target) + .addAllCandidates(candidatesList) + .setCaseSensitivity(caseSensitivity) + .setMatchType(matchType) + .build()).build(); } public static Condition doIsNull(Reference reference) { return Condition.newBuilder().setIsNull(IsNullCondition.newBuilder() - .setReference(reference) - .build()).build(); + .setReference(reference) + .build()).build(); } public static Condition doInvoke(String method, Value target, List argumentsList) { return Condition.newBuilder().setInvoke(InvokeCondition.newBuilder() - .setMethod(method) - .setTarget(target) - .addAllArguments(argumentsList) - .build()).build(); + .setMethod(method) + .setTarget(target) + .addAllArguments(argumentsList) + .build()).build(); } - public static Condition doContains(Reference reference, String searchString, - CaseSensitivity caseSensitivity, MatchType matchType) { + public static Condition doContains(Reference reference, String searchString, CaseSensitivity caseSensitivity, + MatchType matchType) { return Condition.newBuilder().setContains(ContainsCondition.newBuilder() - .setReference(reference) - .setSearchString(searchString) - .setCaseSensitivity(caseSensitivity) - .setMatchType(matchType) - .build()).build(); + .setReference(reference) + .setSearchString(searchString) + .setCaseSensitivity(caseSensitivity) + .setMatchType(matchType) + .build()).build(); } - public static Condition doMatches(Reference reference, String regex, - CaseSensitivity caseSensitivity, MatchType matchType) { + public static Condition doMatches(Reference reference, String regex, CaseSensitivity caseSensitivity, + MatchType matchType) { return Condition.newBuilder().setMatches(MatchesCondition.newBuilder() - .setReference(reference) - .setRegex(regex) - .setCaseSensitivity(caseSensitivity) - .setMatchType(matchType) - .build()).build(); + .setReference(reference) + .setRegex(regex) + .setCaseSensitivity(caseSensitivity) + .setMatchType(matchType) + .build()).build(); } public static Condition doSearch(String searchString, List optionalReferencesList) { return Condition.newBuilder().setSearch(SearchCondition.newBuilder() - .setSearchString(searchString) - .addAllOptionalReferences(optionalReferencesList) - .build()).build(); + .setSearchString(searchString) + .addAllOptionalReferences(optionalReferencesList) + .build()).build(); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/NormalizeNots.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/NormalizeNots.java index ea91593f5a0..4994b3a0645 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/NormalizeNots.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/ops/filter/NormalizeNots.java @@ -6,10 +6,9 @@ import java.util.List; /** - * Normalizes expressions, with the goal of removing as many as possible and simplifying the - * expression tree. This classes passes through all operations to NormalizeUtil *except* the Not - * operation, which it rewrites by using the NormalizeInvertedFilters visitor defined later in this - * file. + * Normalizes expressions, with the goal of removing as many as possible and simplifying the expression tree. This + * classes passes through all operations to NormalizeUtil *except* the Not operation, which it rewrites by using the + * NormalizeInvertedFilters visitor defined later in this file. */ public class NormalizeNots extends AbstractNormalizeFilters { private static final NormalizeNots INSTANCE = new NormalizeNots(); @@ -26,17 +25,15 @@ public Condition onNot(Condition filter) { } /** - * Normalizes *and inverts* expressions, with the goal of removing as many as possible and - * simplifying the expression tree. This class rewrites and, or, comparisons, and matches, and - * nots: + * Normalizes *and inverts* expressions, with the goal of removing as many as possible and simplifying the + * expression tree. This class rewrites and, or, comparisons, and matches, and nots: *
      *
    • Distribute NOTs to children of AND/OR expressions, via DeMorgan's law.
    • *
    • {@code NOT(NOT(A))} is replaced with A.
    • - *
    • Replace any operation with its opposite, if any. For example, {@code NOT(A >= B)} is - * replaced with {@code A < B}, and likewise for all the other inequality operators, {@code EQ}, - * and {@code IN}.
    • - *
    • Other operations {@code IS_NULL}, {@code INVOKE}, {@code SEARCH}, {@code CONTAINS} are - * left as-is, wrapped wrapped with a {@code NOT}.
    • + *
    • Replace any operation with its opposite, if any. For example, {@code NOT(A >= B)} is replaced with + * {@code A < B}, and likewise for all the other inequality operators, {@code EQ}, and {@code IN}.
    • + *
    • Other operations {@code IS_NULL}, {@code INVOKE}, {@code SEARCH}, {@code CONTAINS} are left as-is, wrapped + * wrapped with a {@code NOT}.
    • *
    */ static class NormalizeInvertedFilters implements FilterVisitor { @@ -64,8 +61,8 @@ public Condition onNot(Condition filter) { } @Override - public Condition onComparison(CompareCondition.CompareOperation operation, - CaseSensitivity caseSensitivity, Value lhs, Value rhs) { + public Condition onComparison(CompareCondition.CompareOperation operation, CaseSensitivity caseSensitivity, + Value lhs, Value rhs) { // Invert the condition switch (operation) { case LESS_THAN: @@ -94,10 +91,9 @@ public Condition onComparison(CompareCondition.CompareOperation operation, } @Override - public Condition onIn(Value target, List candidatesList, - CaseSensitivity caseSensitivity, MatchType matchType) { - return NormalizeFilterUtil.doIn(target, candidatesList, caseSensitivity, - invertMatchType(matchType)); + public Condition onIn(Value target, List candidatesList, CaseSensitivity caseSensitivity, + MatchType matchType) { + return NormalizeFilterUtil.doIn(target, candidatesList, caseSensitivity, invertMatchType(matchType)); } @NotNull @@ -117,17 +113,15 @@ private static MatchType invertMatchType(MatchType matchType) { } @Override - public Condition onContains(Reference reference, String searchString, - CaseSensitivity caseSensitivity, MatchType matchType) { - return NormalizeFilterUtil.doContains(reference, searchString, caseSensitivity, - invertMatchType(matchType)); + public Condition onContains(Reference reference, String searchString, CaseSensitivity caseSensitivity, + MatchType matchType) { + return NormalizeFilterUtil.doContains(reference, searchString, caseSensitivity, invertMatchType(matchType)); } @Override - public Condition onMatches(Reference reference, String regex, - CaseSensitivity caseSensitivity, MatchType matchType) { - return NormalizeFilterUtil.doMatches(reference, regex, caseSensitivity, - invertMatchType(matchType)); + public Condition onMatches(Reference reference, String regex, CaseSensitivity caseSensitivity, + MatchType matchType) { + return NormalizeFilterUtil.doMatches(reference, regex, caseSensitivity, invertMatchType(matchType)); } @Override @@ -139,8 +133,7 @@ public Condition onIsNull(Reference reference) { @Override public Condition onInvoke(String method, Value target, List argumentsList) { // This operation doesn't have a corresponding NOT, we have to wrap them as before - return NormalizeFilterUtil - .doInvert(NormalizeFilterUtil.doInvoke(method, target, argumentsList)); + return NormalizeFilterUtil.doInvert(NormalizeFilterUtil.doInvoke(method, target, argumentsList)); } @Override diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/table/validation/ColumnExpressionValidator.java b/grpc-api/src/main/java/io/deephaven/grpc_api/table/validation/ColumnExpressionValidator.java index eeedcae3e89..505440e1db1 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/table/validation/ColumnExpressionValidator.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/table/validation/ColumnExpressionValidator.java @@ -33,11 +33,11 @@ import java.util.stream.Stream; /** - * Validates a column expression coming from the web api, to ensure that the included code will use - * the limited supported API, and no use of `new`. + * Validates a column expression coming from the web api, to ensure that the included code will use the limited + * supported API, and no use of `new`. * - * This must be an early pass at the AST on the server, as the server's stricter validation will not - * function without it. + * This must be an early pass at the AST on the server, as the server's stricter validation will not function without + * it. */ public class ColumnExpressionValidator extends GenericVisitorAdapter { private static final Set whitelistedStaticMethods; @@ -45,48 +45,43 @@ public class ColumnExpressionValidator extends GenericVisitorAdapter static { // list all static methods in supported util classes: whitelistedStaticMethods = Stream - .of( - DBLanguageFunctionUtil.class, - GroovyStaticImports.class, - DBTimeUtils.class, - DBColorUtilImpl.class) - .map(Class::getDeclaredMethods) - .flatMap(Arrays::stream) - .filter(m -> Modifier.isStatic(m.getModifiers()) && Modifier.isPublic(m.getModifiers())) - .map(Method::getName) - .collect( - Collectors.collectingAndThen(Collectors.toSet(), Collections::unmodifiableSet)); + .of( + DBLanguageFunctionUtil.class, + GroovyStaticImports.class, + DBTimeUtils.class, + DBColorUtilImpl.class) + .map(Class::getDeclaredMethods) + .flatMap(Arrays::stream) + .filter(m -> Modifier.isStatic(m.getModifiers()) && Modifier.isPublic(m.getModifiers())) + .map(Method::getName) + .collect(Collectors.collectingAndThen(Collectors.toSet(), Collections::unmodifiableSet)); // list all non-inherited instance methods in supported data classes: // DBDateTime // String whitelistedInstanceMethods = Stream - .of( - DBDateTime.class, - String.class) - .map(Class::getDeclaredMethods) - .flatMap(Arrays::stream) - .filter(m -> !Modifier.isStatic(m.getModifiers())) - .map(Method::getName) - .collect( - Collectors.collectingAndThen(Collectors.toSet(), Collections::unmodifiableSet)); + .of( + DBDateTime.class, + String.class) + .map(Class::getDeclaredMethods) + .flatMap(Arrays::stream) + .filter(m -> !Modifier.isStatic(m.getModifiers())) + .map(Method::getName) + .collect(Collectors.collectingAndThen(Collectors.toSet(), Collections::unmodifiableSet)); } - public static SelectFilter[] validateSelectFilters(final String[] conditionalExpressions, - final Table table) { - final SelectFilter[] selectFilters = - SelectFilterFactory.getExpressions(conditionalExpressions); + public static SelectFilter[] validateSelectFilters(final String[] conditionalExpressions, final Table table) { + final SelectFilter[] selectFilters = SelectFilterFactory.getExpressions(conditionalExpressions); final List dummyAssignments = new ArrayList<>(); for (int ii = 0; ii < selectFilters.length; ++ii) { final SelectFilter sf = selectFilters[ii]; if (sf instanceof ConditionFilter) { - dummyAssignments.add(String.format("__boolean_placeholder_%d__ = (%s)", ii, - conditionalExpressions[ii])); + dummyAssignments + .add(String.format("__boolean_placeholder_%d__ = (%s)", ii, conditionalExpressions[ii])); } } if (!dummyAssignments.isEmpty()) { - final String[] daArray = - dummyAssignments.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); + final String[] daArray = dummyAssignments.toArray(CollectionUtil.ZERO_LENGTH_STRING_ARRAY); final SelectColumn[] selectColumns = SelectColumnFactory.getExpressions(daArray); validateColumnExpressions(selectColumns, daArray, table); } @@ -94,16 +89,14 @@ public static SelectFilter[] validateSelectFilters(final String[] conditionalExp } public static void validateColumnExpressions(final SelectColumn[] selectColumns, - final String[] originalExpressions, - final Table table) { + final String[] originalExpressions, + final Table table) { assert (selectColumns.length == originalExpressions.length); final SelectValidationResult validationResult = table.validateSelect(selectColumns); SelectAndViewAnalyzer top = validationResult.getAnalyzer(); - // We need the cloned columns because the SelectAndViewAnalyzer has left state behind in - // them - // (namely the "realColumn" of the SwitchColumn) that we want to look at in - // validateSelectColumnHelper. + // We need the cloned columns because the SelectAndViewAnalyzer has left state behind in them + // (namely the "realColumn" of the SwitchColumn) that we want to look at in validateSelectColumnHelper. final SelectColumn[] clonedColumns = validationResult.getClonedColumns(); // Flatten and reverse the analyzer stack final List analyzers = new ArrayList<>(); @@ -118,16 +111,15 @@ public static void validateColumnExpressions(final SelectColumn[] selectColumns, final Map availableColumns = new LinkedHashMap<>(); for (int ii = 0; ii < clonedColumns.length; ++ii) { analyzers.get(ii).updateColumnDefinitionsFromTopLayer(availableColumns); - validateSelectColumnHelper(clonedColumns[ii], originalExpressions[ii], availableColumns, - table); + validateSelectColumnHelper(clonedColumns[ii], originalExpressions[ii], availableColumns, table); } } @SuppressWarnings("rawtypes") private static void validateSelectColumnHelper(SelectColumn selectColumn, - final String originalExpression, - final Map availableColumns, - final Table table) { + final String originalExpression, + final Map availableColumns, + final Table table) { while (selectColumn instanceof SwitchColumn) { selectColumn = ((SwitchColumn) selectColumn).getRealColumn(); } @@ -145,12 +137,10 @@ private static void validateSelectColumnHelper(SelectColumn selectColumn, final DBTimeUtils.Result timeConversionResult; try { timeConversionResult = DBTimeUtils.convertExpression(formulaString); - compiledFormula = - FormulaAnalyzer.getCompiledFormula(availableColumns, timeConversionResult, null); + compiledFormula = FormulaAnalyzer.getCompiledFormula(availableColumns, timeConversionResult, null); } catch (final Exception e) { // in theory not possible, since we already parsed it once - throw new IllegalStateException( - "Error occurred while re-compiling formula for whitelist", e); + throw new IllegalStateException("Error occurred while re-compiling formula for whitelist", e); } final boolean isAddOnly = table instanceof BaseTable && ((BaseTable) table).isAddOnly(); if (table.isLive() && !(isAddOnly && table.isFlat())) { @@ -159,10 +149,8 @@ private static void validateSelectColumnHelper(SelectColumn selectColumn, disallowedVariables.add("ii"); // TODO walk QueryScope.getInstance() and remove them too? - if (compiledFormula.getVariablesUsed().stream() - .anyMatch(disallowedVariables::contains)) { - throw new IllegalStateException( - "Formulas involving live tables are not permitted to use i or ii"); + if (compiledFormula.getVariablesUsed().stream().anyMatch(disallowedVariables::contains)) { + throw new IllegalStateException("Formulas involving live tables are not permitted to use i or ii"); } } @@ -184,8 +172,7 @@ private static void validateInvocations(String expression) { } } catch (final ParseException | TokenMgrError e) { // in theory not possible, since we already parsed once - throw new IllegalStateException("Error occurred while re-parsing formula for whitelist", - e); + throw new IllegalStateException("Error occurred while re-parsing formula for whitelist", e); } // now that we finally have the AST... @@ -195,20 +182,16 @@ private static void validateInvocations(String expression) { @Override public Void visit(final MethodCallExpr n, final Void arg) { - // verify that this is a call on a supported instance, or is one of the supported static - // methods + // verify that this is a call on a supported instance, or is one of the supported static methods if (n.getScope() == null) { if (!whitelistedStaticMethods.contains(n.getName())) { - throw new IllegalStateException( - "User expressions are not permitted to use method " + n.getName()); + throw new IllegalStateException("User expressions are not permitted to use method " + n.getName()); } } else { - // note that it is possible that there is a scoped static method in this block, and that - // the + // note that it is possible that there is a scoped static method in this block, and that the // user unnecessarily specified the classname TODO handle this if it becomes an issue if (!whitelistedInstanceMethods.contains(n.getName())) { - throw new IllegalStateException( - "User expressions are not permitted to use method " + n.getName()); + throw new IllegalStateException("User expressions are not permitted to use method " + n.getName()); } } return super.visit(n, arg); diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/util/BrowserStream.java b/grpc-api/src/main/java/io/deephaven/grpc_api/util/BrowserStream.java index 16e63eeab1d..7ad07402df5 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/util/BrowserStream.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/util/BrowserStream.java @@ -63,11 +63,9 @@ public interface Marshaller { /** most recent queued msg for when mode is Mode.MOST_RECENT */ private T queuedMessage; - public BrowserStream(final Mode mode, final SessionState session, - final Marshaller marshaller) { + public BrowserStream(final Mode mode, final SessionState session, final Marshaller marshaller) { this.mode = mode; - this.logIdentity = - "BrowserStream(" + Integer.toHexString(System.identityHashCode(this)) + "): "; + this.logIdentity = "BrowserStream(" + Integer.toHexString(System.identityHashCode(this)) + "): "; this.session = session; this.marshaller = marshaller; @@ -77,16 +75,14 @@ public BrowserStream(final Mode mode, final SessionState session, public void onMessageReceived(T message) { synchronized (this) { if (halfClosedSeq != -1 && message.sequence > halfClosedSeq) { - throw GrpcUtil.statusRuntimeException(Code.ABORTED, - "Sequence sent after half close: closed seq=" + halfClosedSeq + " recv seq=" - + message.sequence); + throw GrpcUtil.statusRuntimeException(Code.ABORTED, "Sequence sent after half close: closed seq=" + + halfClosedSeq + " recv seq=" + message.sequence); } if (message.isHalfClosed) { if (halfClosedSeq != -1) { throw GrpcUtil.statusRuntimeException(Code.INVALID_ARGUMENT, - "Already half closed: closed seq=" + halfClosedSeq + " recv seq=" - + message.sequence); + "Already half closed: closed seq=" + halfClosedSeq + " recv seq=" + message.sequence); } halfClosedSeq = message.sequence; } @@ -94,43 +90,39 @@ public void onMessageReceived(T message) { if (mode == Mode.IN_ORDER) { if (message.sequence < nextSeq) { throw GrpcUtil.statusRuntimeException(Code.OUT_OF_RANGE, - "Duplicate sequence sent: next seq=" + nextSeq + " recv seq=" - + message.sequence); + "Duplicate sequence sent: next seq=" + nextSeq + " recv seq=" + message.sequence); } boolean queueMsg = false; if (processingMessage) { queueMsg = true; log.debug().append(logIdentity).append("queueing; next seq=").append(nextSeq) - .append(" recv seq=").append(message.sequence).endl(); + .append(" recv seq=").append(message.sequence).endl(); } else if (message.sequence != nextSeq) { queueMsg = true; log.debug().append(logIdentity).append("queueing; waiting seq=").append(nextSeq) - .append(" recv seq=").append(message.sequence).endl(); + .append(" recv seq=").append(message.sequence).endl(); } if (queueMsg) { if (pendingSeq == null) { - pendingSeq = new RAPriQueue<>(1, MessageInfoQueueAdapter.getInstance(), - MessageBase.class); + pendingSeq = new RAPriQueue<>(1, MessageInfoQueueAdapter.getInstance(), MessageBase.class); } pendingSeq.enter(message); return; } } else { // Mode.MOST_RECENT if (message.sequence < nextSeq - || (message.sequence == nextSeq && processingMessage) // checks for duplicate - || (queuedMessage != null && message.sequence < queuedMessage.sequence)) { + || (message.sequence == nextSeq && processingMessage) // checks for duplicate + || (queuedMessage != null && message.sequence < queuedMessage.sequence)) { // this message is too old log.debug().append(logIdentity).append("dropping; next seq=").append(nextSeq) - .append(" queued seq=") - .append(queuedMessage != null ? queuedMessage.sequence : -1) - .append(" recv seq=").append(message.sequence).endl(); + .append(" queued seq=").append(queuedMessage != null ? queuedMessage.sequence : -1) + .append(" recv seq=").append(message.sequence).endl(); return; } // is most recent msg seen if (processingMessage) { - log.debug().append(logIdentity).append("queueing; processing seq=") - .append(nextSeq) - .append(" recv seq=").append(message.sequence).endl(); + log.debug().append(logIdentity).append("queueing; processing seq=").append(nextSeq) + .append(" recv seq=").append(message.sequence).endl(); queuedMessage = message; return; } @@ -170,8 +162,7 @@ public void onMessageReceived(T message) { queuedMessage = null; } - log.debug().append(logIdentity).append("processing queued seq=") - .append(message.sequence).endl(); + log.debug().append(logIdentity).append("processing queued seq=").append(message.sequence).endl(); nextSeq = message.sequence + 1; } } while (true); @@ -180,8 +171,7 @@ public void onMessageReceived(T message) { public void onError(final RuntimeException e) { if (session.removeOnCloseCallback(this) != null) { - log.error().append(logIdentity) - .append("closing browser stream on unexpected exception: ").append(e).endl(); + log.error().append(logIdentity).append("closing browser stream on unexpected exception: ").append(e).endl(); this.marshaller.onError(e); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/util/GrpcUtil.java b/grpc-api/src/main/java/io/deephaven/grpc_api/util/GrpcUtil.java index cae5af2ce72..64a94345fa1 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/util/GrpcUtil.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/util/GrpcUtil.java @@ -18,8 +18,8 @@ public class GrpcUtil { private static Logger log = LoggerFactory.getLogger(GrpcUtil.class); - public static void rpcWrapper(final Logger log, - final StreamObserver response, final FunctionalInterfaces.ThrowingRunnable lambda) { + public static void rpcWrapper(final Logger log, final StreamObserver response, + final FunctionalInterfaces.ThrowingRunnable lambda) { try (final SafeCloseable ignored = LivenessScopeStack.open()) { lambda.run(); } catch (final StatusRuntimeException err) { @@ -34,8 +34,7 @@ public static void rpcWrapper(final Logger log, } } - public static T rpcWrapper(final Logger log, final StreamObserver response, - final Callable lambda) { + public static T rpcWrapper(final Logger log, final StreamObserver response, final Callable lambda) { try (final SafeCloseable ignored = LivenessScopeStack.open()) { return lambda.call(); } catch (final StatusRuntimeException err) { @@ -55,37 +54,32 @@ public static StatusRuntimeException securelyWrapError(final Logger log, final T } public static StatusRuntimeException securelyWrapError(final Logger log, final Throwable err, - final Code statusCode) { + final Code statusCode) { if (err instanceof StatusRuntimeException) { return (StatusRuntimeException) err; } final UUID errorId = UUID.randomUUID(); - log.error().append("Internal Error '").append(errorId.toString()).append("' ").append(err) - .endl(); + log.error().append("Internal Error '").append(errorId.toString()).append("' ").append(err).endl(); return statusRuntimeException(statusCode, "Details Logged w/ID '" + errorId + "'"); } - public static StatusRuntimeException statusRuntimeException(final Code statusCode, - final String details) { + public static StatusRuntimeException statusRuntimeException(final Code statusCode, final String details) { return Exceptions.statusRuntimeException(statusCode, details); } /** - * This helper allows one to propagate the onError/onComplete calls through to the delegate, - * while applying the provided mapping function to the original input objects. The mapper may - * return null to skip sending a message to the delegated stream observer. + * This helper allows one to propagate the onError/onComplete calls through to the delegate, while applying the + * provided mapping function to the original input objects. The mapper may return null to skip sending a message to + * the delegated stream observer. * * @param delegate the stream observer to ultimately receive this message - * @param mapper the function that maps from input objects to the objects the stream observer - * expects + * @param mapper the function that maps from input objects to the objects the stream observer expects * @param input type * @param output type - * @return a new stream observer that maps from T to V before delivering to - * {@code delegate::onNext} + * @return a new stream observer that maps from T to V before delivering to {@code delegate::onNext} */ - public static StreamObserver mapOnNext(final StreamObserver delegate, - final Function mapper) { + public static StreamObserver mapOnNext(final StreamObserver delegate, final Function mapper) { return new StreamObserver() { @Override public void onNext(final T value) { @@ -108,13 +102,11 @@ public void onCompleted() { } /** - * Wraps the provided runner in a try/catch block to minimize damage caused by a failing - * externally supplied helper. + * Wraps the provided runner in a try/catch block to minimize damage caused by a failing externally supplied helper. * * @param runner the runnable to execute safely */ - public static void safelyExecute( - final FunctionalInterfaces.ThrowingRunnable runner) { + public static void safelyExecute(final FunctionalInterfaces.ThrowingRunnable runner) { try { runner.run(); } catch (final Exception err) { @@ -123,13 +115,12 @@ public static void safelyExecute( } /** - * Wraps the provided runner in a try/catch block to minimize damage caused by a failing - * externally supplied helper. + * Wraps the provided runner in a try/catch block to minimize damage caused by a failing externally supplied helper. * * @param runner the runnable to execute safely */ public static void safelyExecuteLocked(final Object lockedObject, - final FunctionalInterfaces.ThrowingRunnable runner) { + final FunctionalInterfaces.ThrowingRunnable runner) { try { // noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (lockedObject) { @@ -141,11 +132,9 @@ public static void safelyExecuteLocked(final Object lockedObject, } /** - * Writes an error to the observer in a try/catch block to minimize damage caused by failing - * observer call. + * Writes an error to the observer in a try/catch block to minimize damage caused by failing observer call. */ - public static void safelyError(final StreamObserver observer, final Code statusCode, - final String msg) { + public static void safelyError(final StreamObserver observer, final Code statusCode, final String msg) { safelyExecute(() -> observer.onError(GrpcUtil.statusRuntimeException(statusCode, msg))); } } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/util/PassthroughInputStreamMarshaller.java b/grpc-api/src/main/java/io/deephaven/grpc_api/util/PassthroughInputStreamMarshaller.java index f66226e04c7..7bb6041823e 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/util/PassthroughInputStreamMarshaller.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/util/PassthroughInputStreamMarshaller.java @@ -5,8 +5,7 @@ import java.io.InputStream; public class PassthroughInputStreamMarshaller implements MethodDescriptor.Marshaller { - public static final PassthroughInputStreamMarshaller INSTANCE = - new PassthroughInputStreamMarshaller(); + public static final PassthroughInputStreamMarshaller INSTANCE = new PassthroughInputStreamMarshaller(); @Override public InputStream stream(final InputStream inputStream) { diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/util/Scheduler.java b/grpc-api/src/main/java/io/deephaven/grpc_api/util/Scheduler.java index c453ba17b9f..f0959fae809 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/util/Scheduler.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/util/Scheduler.java @@ -49,8 +49,7 @@ class DelegatingImpl implements Scheduler { private final ExecutorService serialDelegate; private final ScheduledExecutorService concurrentDelegate; - public DelegatingImpl(final ExecutorService serialExecutor, - final ScheduledExecutorService concurrentExecutor) { + public DelegatingImpl(final ExecutorService serialExecutor, final ScheduledExecutorService concurrentExecutor) { this.serialDelegate = serialExecutor; this.concurrentDelegate = concurrentExecutor; } @@ -61,8 +60,7 @@ public DBDateTime currentTime() { } @Override - public void runAtTime(@NotNull final DBDateTime absoluteTime, - final @NotNull Runnable command) { + public void runAtTime(@NotNull final DBDateTime absoluteTime, final @NotNull Runnable command) { runAfterDelay(absoluteTime.getMillis() - currentTime().getMillis(), command); } diff --git a/grpc-api/src/main/java/io/deephaven/grpc_api/util/UnaryInputStreamMarshaller.java b/grpc-api/src/main/java/io/deephaven/grpc_api/util/UnaryInputStreamMarshaller.java index cfeb08994af..13f3395f41b 100644 --- a/grpc-api/src/main/java/io/deephaven/grpc_api/util/UnaryInputStreamMarshaller.java +++ b/grpc-api/src/main/java/io/deephaven/grpc_api/util/UnaryInputStreamMarshaller.java @@ -18,7 +18,7 @@ public InputStream stream(InputStream value) { @Override public InputStream parse(InputStream stream) { try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream()) { + new BarrageProtoUtil.ExposedByteArrayOutputStream()) { final byte[] buffer = new byte[4096]; while (stream.available() > 0) { int len = stream.read(buffer); diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/session/SessionServiceTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/session/SessionServiceTest.java index f2154e628c5..5aff13e2da0 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/session/SessionServiceTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/session/SessionServiceTest.java @@ -24,8 +24,8 @@ public class SessionServiceTest { public void setup() { livenessScope = LivenessScopeStack.open(); scheduler = new TestControlledScheduler(); - sessionService = new SessionService(scheduler, - authContext -> new SessionState(scheduler, authContext), TOKEN_EXPIRE_MS); + sessionService = + new SessionService(scheduler, authContext -> new SessionState(scheduler, authContext), TOKEN_EXPIRE_MS); } @After @@ -53,17 +53,16 @@ public void testSessionExpiresOnClose() { Assert.eqTrue(session.isExpired(), "session.isExpired()"); Assert.eqNull(session.getExpiration(), "session.getExpiration()"); Assert.eqNull(sessionService.getSessionForToken(expiration.token), - "sessionService.getSessionForToken(expiration.token)"); + "sessionService.getSessionForToken(expiration.token)"); } @Test public void testTokenRotationHasSpamProtection() { - // because we need to keep some state per refresh token, we must protect slightly from - // accidental DOS spam + // because we need to keep some state per refresh token, we must protect slightly from accidental DOS spam final SessionState session = sessionService.newSession(AUTH_CONTEXT); final SessionService.TokenExpiration initialToken = session.getExpiration(); - Assert.eq(sessionService.refreshToken(session), "sessionService.refreshToken(session)", - initialToken, "initialToken"); + Assert.eq(sessionService.refreshToken(session), "sessionService.refreshToken(session)", initialToken, + "initialToken"); } @Test @@ -74,8 +73,7 @@ public void testTokenRotation() { // let's advance by some reasonable amount and ensure that the token now refreshes scheduler.runUntil(scheduler.timeAfterMs(TOKEN_EXPIRE_MS / 3)); final SessionService.TokenExpiration newToken = sessionService.refreshToken(session); - final long timeToNewExpiration = - newToken.deadline.getMillis() - scheduler.currentTime().getMillis(); + final long timeToNewExpiration = newToken.deadline.getMillis() - scheduler.currentTime().getMillis(); Assert.eq(timeToNewExpiration, "timeToNewExpiration", TOKEN_EXPIRE_MS); // ensure that the UUIDs are different so they may expire independently @@ -114,7 +112,7 @@ public void testTokenLookup() { final SessionState session = sessionService.newSession(AUTH_CONTEXT); final SessionService.TokenExpiration initialToken = session.getExpiration(); Assert.eq(sessionService.getSessionForToken(initialToken.token), - "sessionService.getSessionForToken(initialToken.token)", session, "session"); + "sessionService.getSessionForToken(initialToken.token)", session, "session"); // advance so we can rotate token scheduler.runUntil(scheduler.timeAfterMs(TOKEN_EXPIRE_MS / 3)); @@ -123,22 +121,22 @@ public void testTokenLookup() { // check both tokens are valid Assert.eq(sessionService.getSessionForToken(initialToken.token), - "sessionService.getSessionForToken(initialToken.token)", session, "session"); + "sessionService.getSessionForToken(initialToken.token)", session, "session"); Assert.eq(sessionService.getSessionForToken(newToken.token), - "sessionService.getSessionForToken(newToken.token)", session, "session"); + "sessionService.getSessionForToken(newToken.token)", session, "session"); // expire original token; current token should be valid scheduler.runThrough(initialToken.deadline); Assert.eqNull(sessionService.getSessionForToken(initialToken.token), - "sessionService.getSessionForToken(initialToken.token)"); + "sessionService.getSessionForToken(initialToken.token)"); Assert.eq(sessionService.getSessionForToken(newToken.token), - "sessionService.getSessionForToken(newToken.token)", session, "session"); + "sessionService.getSessionForToken(newToken.token)", session, "session"); // let's expire the new token scheduler.runThrough(session.getExpiration().deadline); Assert.eqTrue(session.isExpired(), "session.isExpired()"); Assert.eqNull(sessionService.getSessionForToken(newToken.token), - "sessionService.getSessionForToken(newToken.token)"); + "sessionService.getSessionForToken(newToken.token)"); } @Test @@ -152,20 +150,20 @@ public void testSessionsAreIndependent() { final SessionService.TokenExpiration expiration1 = sessionService.refreshToken(session1); final SessionService.TokenExpiration expiration2 = session2.getExpiration(); - Assert.lt(expiration2.deadline.getNanos(), "expiration2.deadline", - expiration1.deadline.getNanos(), "expiration1.deadline"); + Assert.lt(expiration2.deadline.getNanos(), "expiration2.deadline", expiration1.deadline.getNanos(), + "expiration1.deadline"); scheduler.runThrough(expiration2.deadline); // first session is live Assert.eqFalse(session1.isExpired(), "session2.isExpired()"); Assert.eq(sessionService.getSessionForToken(expiration1.token), - "sessionService.getSessionForToken(expiration1.token)", session1, "session1"); + "sessionService.getSessionForToken(expiration1.token)", session1, "session1"); Assert.eqNull(sessionService.getSessionForToken(expiration2.token), - "sessionService.getSessionForToken(initialToken.token)"); + "sessionService.getSessionForToken(initialToken.token)"); // second session has expired Assert.eqTrue(session2.isExpired(), "session2.isExpired()"); Assert.eqNull(sessionService.getSessionForToken(expiration2.token), - "sessionService.getSessionForToken(initialToken.token)"); + "sessionService.getSessionForToken(initialToken.token)"); } } diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/session/SessionStateTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/session/SessionStateTest.java index 461c6f3a8e3..2eac70fbeb7 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/session/SessionStateTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/session/SessionStateTest.java @@ -60,7 +60,7 @@ public void setup() { scheduler = new TestControlledScheduler(); session = new SessionState(scheduler, AUTH_CONTEXT); session.initializeExpiration(new SessionService.TokenExpiration(UUID.randomUUID(), - DBTimeUtils.nanosToTime(Long.MAX_VALUE), session)); + DBTimeUtils.nanosToTime(Long.MAX_VALUE), session)); nextExportId = 1; } @@ -89,10 +89,9 @@ public void testDestroyOnExportRelease() { Assert.eq(export.refCount, "export.refCount", 1); // assert lookup is same object - Assert.eq(session.getExport(nextExportId - 1), "session.getExport(nextExport - 1)", - exportObj, "exportObj"); - Assert.equals(exportObj.getExportId(), "exportObj.getExportId()", - exportIdToTicket(nextExportId - 1), "nextExportId - 1"); + Assert.eq(session.getExport(nextExportId - 1), "session.getExport(nextExport - 1)", exportObj, "exportObj"); + Assert.equals(exportObj.getExportId(), "exportObj.getExportId()", exportIdToTicket(nextExportId - 1), + "nextExportId - 1"); // release exportObj.release(); @@ -111,8 +110,8 @@ public void testServerExportDestroyOnExportRelease() { Assert.eq(export.refCount, "export.refCount", 1); // assert lookup is same object - Assert.eq(session.getExport(exportObj.getExportId()), - "session.getExport(exportObj.getExportId())", exportObj, "exportObj"); + Assert.eq(session.getExport(exportObj.getExportId()), "session.getExport(exportObj.getExportId())", exportObj, + "exportObj"); // release exportObj.release(); @@ -135,10 +134,9 @@ public void testDestroyOnSessionRelease() { Assert.eq(export.refCount, "export.refCount", 1); // assert lookup is same object - Assert.eq(session.getExport(nextExportId - 1), "session.getExport(nextExport - 1)", - exportObj, "exportObj"); - Assert.equals(exportObj.getExportId(), "exportObj.getExportId()", - exportIdToTicket(nextExportId - 1), "nextExportId - 1"); + Assert.eq(session.getExport(nextExportId - 1), "session.getExport(nextExport - 1)", exportObj, "exportObj"); + Assert.equals(exportObj.getExportId(), "exportObj.getExportId()", exportIdToTicket(nextExportId - 1), + "nextExportId - 1"); // release session.onExpired(); @@ -157,8 +155,8 @@ public void testServerExportDestroyOnSessionRelease() { Assert.eq(export.refCount, "export.refCount", 1); // assert lookup is same object - Assert.eq(session.getExport(exportObj.getExportId()), - "session.getExport(exportObj.getExportId())", exportObj, "exportObj"); + Assert.eq(session.getExport(exportObj.getExportId()), "session.getExport(exportObj.getExportId())", exportObj, + "exportObj"); // release session.onExpired(); @@ -168,8 +166,7 @@ public void testServerExportDestroyOnSessionRelease() { @Test public void testWorkItemNoDependencies() { final Object export = new Object(); - final SessionState.ExportObject exportObj = - session.newExport(nextExportId++).submit(() -> export); + final SessionState.ExportObject exportObj = session.newExport(nextExportId++).submit(() -> export); expectException(IllegalStateException.class, exportObj::get); Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.QUEUED); scheduler.runUntilQueueEmpty(); @@ -181,10 +178,10 @@ public void testWorkItemNoDependencies() { public void testThrowInExportMain() { final MutableBoolean errored = new MutableBoolean(); final SessionState.ExportObject exportObj = session.newExport(nextExportId++) - .onError(err -> errored.setTrue()) - .submit(() -> { - throw new RuntimeException("submit exception"); - }); + .onError(err -> errored.setTrue()) + .submit(() -> { + throw new RuntimeException("submit exception"); + }); Assert.eqFalse(errored.booleanValue(), "errored.booleanValue()"); Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.QUEUED); scheduler.runUntilQueueEmpty(); @@ -196,13 +193,13 @@ public void testThrowInExportMain() { public void testThrowInErrorHandler() { final MutableBoolean submitted = new MutableBoolean(); final SessionState.ExportObject exportObj = session.newExport(nextExportId++) - .onError(err -> { - throw new RuntimeException("error handler exception"); - }) - .submit(() -> { - submitted.setTrue(); - throw new RuntimeException("submit exception"); - }); + .onError(err -> { + throw new RuntimeException("error handler exception"); + }) + .submit(() -> { + submitted.setTrue(); + throw new RuntimeException("submit exception"); + }); Assert.eqFalse(submitted.booleanValue(), "submitted.booleanValue()"); Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.QUEUED); scheduler.runUntilQueueEmpty(); @@ -220,7 +217,7 @@ public void testCancelBeforeDefined() { final MutableBoolean submitted = new MutableBoolean(); expectException(StatusRuntimeException.class, - () -> session.newExport(nextExportId++).submit(submitted::setTrue)); + () -> session.newExport(nextExportId++).submit(submitted::setTrue)); scheduler.runUntilQueueEmpty(); Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.CANCELLED); @@ -233,8 +230,8 @@ public void testCancelBeforeExport() { final MutableBoolean submitted = new MutableBoolean(); final SessionState.ExportObject exportObj = session.newExport(nextExportId++) - .require(d1) - .submit(submitted::setTrue); + .require(d1) + .submit(submitted::setTrue); Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.PENDING); exportObj.cancel(); @@ -249,11 +246,11 @@ public void testCancelBeforeExport() { public void testCancelDuringExport() { final MutableObject export = new MutableObject<>(); final SessionState.ExportObject exportObj = - session.newExport(nextExportId++).submit(() -> { - session.getExport(nextExportId - 1).cancel(); - export.setValue(new PublicLivenessArtifact()); - return export; - }); + session.newExport(nextExportId++).submit(() -> { + session.getExport(nextExportId - 1).cancel(); + export.setValue(new PublicLivenessArtifact()); + return export; + }); scheduler.runUntilQueueEmpty(); Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.CANCELLED); @@ -294,13 +291,12 @@ public void testCancelPropagates() { final MutableBoolean submitted = new MutableBoolean(); final SessionState.ExportObject d1 = session.getExport(nextExportId++); final SessionState.ExportObject exportObj = session.newExport(nextExportId++) - .require(d1) - .submit(submitted::setTrue); + .require(d1) + .submit(submitted::setTrue); d1.cancel(); scheduler.runUntilQueueEmpty(); - Assert.eq(exportObj.getState(), "exportObj.getState()", - ExportNotification.State.DEPENDENCY_CANCELLED); + Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.DEPENDENCY_CANCELLED); Assert.eqFalse(submitted.booleanValue(), "submitted.booleanValue()"); } @@ -309,17 +305,16 @@ public void testErrorPropagatesNotYetFailed() { final MutableBoolean submitted = new MutableBoolean(); final SessionState.ExportObject d1 = session.getExport(nextExportId++); final SessionState.ExportObject exportObj = session.newExport(nextExportId++) - .require(d1) - .submit(submitted::setTrue); + .require(d1) + .submit(submitted::setTrue); session.newExport(d1.getExportId()) - .submit(() -> { - throw new RuntimeException("I fail."); - }); + .submit(() -> { + throw new RuntimeException("I fail."); + }); scheduler.runUntilQueueEmpty(); - Assert.eq(exportObj.getState(), "exportObj.getState()", - ExportNotification.State.DEPENDENCY_FAILED); + Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.DEPENDENCY_FAILED); Assert.eqFalse(submitted.booleanValue(), "submitted.booleanValue()"); } @@ -327,19 +322,18 @@ public void testErrorPropagatesNotYetFailed() { public void testErrorPropagatesAlreadyFailed() { final MutableBoolean submitted = new MutableBoolean(); final SessionState.ExportObject d1 = session.newExport(nextExportId++) - .submit(() -> { - throw new RuntimeException("I fail."); - }); + .submit(() -> { + throw new RuntimeException("I fail."); + }); scheduler.runUntilQueueEmpty(); Assert.eq(d1.getState(), "d1.getState()", ExportNotification.State.FAILED); final SessionState.ExportObject exportObj = session.newExport(nextExportId++) - .require(d1) - .submit(submitted::setTrue); + .require(d1) + .submit(submitted::setTrue); scheduler.runUntilQueueEmpty(); - Assert.eq(exportObj.getState(), "exportObj.getState()", - ExportNotification.State.DEPENDENCY_FAILED); + Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.DEPENDENCY_FAILED); Assert.eqFalse(submitted.booleanValue(), "submitted.booleanValue()"); } @@ -348,14 +342,14 @@ public void testWorkItemOutOfOrderDependency() { final MutableBoolean submitted = new MutableBoolean(); final SessionState.ExportObject d1 = session.getExport(nextExportId++); final SessionState.ExportObject exportObj = session.newExport(nextExportId++) - .require(d1) - .submit(submitted::setTrue); + .require(d1) + .submit(submitted::setTrue); Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.PENDING); session.newExport(d1.getExportId()) - .submit(() -> { - }); + .submit(() -> { + }); scheduler.runOne(); // d1 Assert.eq(exportObj.getState(), "exportObj.getState()", ExportNotification.State.QUEUED); @@ -367,15 +361,15 @@ public void testWorkItemOutOfOrderDependency() { public void testWorkItemDeepDependency() { final MutableBoolean submitted = new MutableBoolean(); final SessionState.ExportObject e1 = session.newExport(nextExportId++) - .submit(() -> { - }); + .submit(() -> { + }); final SessionState.ExportObject e2 = session.newExport(nextExportId++) - .require(e1) - .submit(() -> { - }); + .require(e1) + .submit(() -> { + }); final SessionState.ExportObject e3 = session.newExport(nextExportId++) - .require(e2) - .submit(submitted::setTrue); + .require(e2) + .submit(submitted::setTrue); Assert.eq(e1.getState(), "e1.getState()", ExportNotification.State.QUEUED); Assert.eq(e2.getState(), "e2.getState()", ExportNotification.State.PENDING); @@ -404,15 +398,15 @@ public void testDependencyNotReleasedEarly() { final SessionState.ExportObject e1; try (final SafeCloseable scope = LivenessScopeStack.open()) { e1 = session.newExport(nextExportId++) - .submit(() -> export); + .submit(() -> export); } scheduler.runOne(); Assert.eq(e1.getState(), "e1.getState()", ExportNotification.State.EXPORTED); final SessionState.ExportObject e2 = session.newExport(nextExportId++) - .require(e1) - .submit(() -> Assert.gt(e1.get().refCount, "e1.get().refCount", 0)); + .require(e1) + .submit(() -> Assert.gt(e1.get().refCount, "e1.get().refCount", 0)); Assert.eq(e2.getState(), "e1.getState()", ExportNotification.State.QUEUED); e1.release(); @@ -430,7 +424,7 @@ public void testLateDependencyAlreadyReleasedFails() { final SessionState.ExportObject e1; try (final SafeCloseable scope = LivenessScopeStack.open()) { e1 = session.newExport(nextExportId++) - .submit(() -> export); + .submit(() -> export); } scheduler.runOne(); @@ -441,9 +435,9 @@ public void testLateDependencyAlreadyReleasedFails() { final MutableBoolean errored = new MutableBoolean(); expectException(LivenessStateException.class, () -> { final SessionState.ExportObject e2 = session.newExport(nextExportId++) - .require(e1) - .onError(err -> errored.setTrue()) - .submit(() -> Assert.gt(e1.get().refCount, "e1.get().refCount", 0)); + .require(e1) + .onError(err -> errored.setTrue()) + .submit(() -> Assert.gt(e1.get().refCount, "e1.get().refCount", 0)); }); } @@ -455,22 +449,19 @@ public void testNewExportRequiresPositiveId() { @Test public void testDependencyAlreadyReleased() { - final SessionState.ExportObject e1 = - session.newExport(nextExportId++).submit(() -> { - }); + final SessionState.ExportObject e1 = session.newExport(nextExportId++).submit(() -> { + }); scheduler.runUntilQueueEmpty(); e1.release(); Assert.eq(e1.getState(), "e1.getState()", ExportNotification.State.RELEASED); - final SessionState.ExportObject e2 = - session.newExport(nextExportId++).require(e1).submit(() -> { - }); + final SessionState.ExportObject e2 = session.newExport(nextExportId++).require(e1).submit(() -> { + }); Assert.eq(e2.getState(), "e1.getState()", ExportNotification.State.DEPENDENCY_RELEASED); } @Test public void testExpiredNewExport() { - final SessionState.ExportObject exportObj = - session.newExport(nextExportId++).submit(Object::new); + final SessionState.ExportObject exportObj = session.newExport(nextExportId++).submit(Object::new); scheduler.runUntilQueueEmpty(); session.onExpired(); expectException(StatusRuntimeException.class, exportObj::get); @@ -495,8 +486,7 @@ public void testExpiredServerSideExport() { @Test public void testExpiresBeforeExport() { session.onExpired(); - expectException(StatusRuntimeException.class, - () -> session.newServerSideExport(new Object())); + expectException(StatusRuntimeException.class, () -> session.newServerSideExport(new Object())); expectException(StatusRuntimeException.class, () -> session.nonExport()); expectException(StatusRuntimeException.class, () -> session.newExport(nextExportId++)); expectException(StatusRuntimeException.class, () -> session.getExport(nextExportId++)); @@ -526,10 +516,10 @@ public void testExpireBeforeExportSubmit() { public void testExpireDuringExport() { final CountingLivenessReferent export = new CountingLivenessReferent(); session.newExport(nextExportId++) - .submit(() -> { - session.onExpired(); - return export; - }); + .submit(() -> { + session.onExpired(); + return export; + }); scheduler.runUntilQueueEmpty(); Assert.eq(export.refCount, "export.refCount", 0); } @@ -538,9 +528,9 @@ public void testExpireDuringExport() { public void testDependencyFailed() { final SessionState.ExportObject e1 = session.getExport(nextExportId++); final SessionState.ExportObject e2 = session.newExport(nextExportId++) - .require(e1) - .submit(() -> { - }); + .require(e1) + .submit(() -> { + }); session.newExport(e1.getExportId()).submit(() -> { throw new RuntimeException(); }); @@ -550,18 +540,17 @@ public void testDependencyFailed() { @Test public void testDependencyAlreadyFailed() { - final SessionState.ExportObject e1 = - session.newExport(nextExportId++).submit(() -> { - throw new RuntimeException(); - }); + final SessionState.ExportObject e1 = session.newExport(nextExportId++).submit(() -> { + throw new RuntimeException(); + }); scheduler.runUntilQueueEmpty(); Assert.eq(e1.getState(), "e1.getState()", ExportNotification.State.FAILED); expectException(IllegalStateException.class, e1::get); final SessionState.ExportObject e2 = session.newExport(nextExportId++) - .require(e1) - .submit(() -> { - }); + .require(e1) + .submit(() -> { + }); scheduler.runUntilQueueEmpty(); Assert.eq(e2.getState(), "e2.getState()", ExportNotification.State.DEPENDENCY_FAILED); expectException(IllegalStateException.class, e2::get); @@ -574,26 +563,24 @@ public void testDependencyAlreadyCanceled() { scheduler.runUntilQueueEmpty(); final SessionState.ExportObject e2 = session.newExport(nextExportId++) - .require(e1) - .submit(() -> { - }); + .require(e1) + .submit(() -> { + }); scheduler.runUntilQueueEmpty(); - Assert.eq(e2.getState(), "e2.getState()", ExportNotification.State.DEPENDENCY_CANCELLED); // cancels - // propagate + Assert.eq(e2.getState(), "e2.getState()", ExportNotification.State.DEPENDENCY_CANCELLED); // cancels propagate expectException(IllegalStateException.class, e2::get); } @Test public void testDependencyAlreadyExported() { - final SessionState.ExportObject e1 = - session.newExport(nextExportId++).submit(() -> { - }); + final SessionState.ExportObject e1 = session.newExport(nextExportId++).submit(() -> { + }); scheduler.runUntilQueueEmpty(); final SessionState.ExportObject e2 = session.newExport(nextExportId++) - .require(e1) - .submit(() -> { - }); + .require(e1) + .submit(() -> { + }); Assert.eq(e2.getState(), "e2.getState()", ExportNotification.State.QUEUED); scheduler.runUntilQueueEmpty(); Assert.eq(e2.getState(), "e2.getState()", ExportNotification.State.EXPORTED); @@ -609,11 +596,11 @@ public void testDependencyReleasedBeforeExport() { scheduler.runUntilQueueEmpty(); final SessionState.ExportObject e2obj = session.newExport(nextExportId++) - .require(e1obj) - .submit(() -> { - Assert.neqNull(e1obj.get(), "e1obj.get()"); - Assert.gt(e1.refCount, "e1.refCount", 0); - }); + .require(e1obj) + .submit(() -> { + Assert.neqNull(e1obj.get(), "e1obj.get()"); + Assert.gt(e1.refCount, "e1.refCount", 0); + }); e1obj.release(); Assert.eq(e1obj.getState(), "e1obj.getState()", ExportNotification.State.RELEASED); @@ -625,12 +612,10 @@ public void testDependencyReleasedBeforeExport() { @Test public void testChildCancelledFirst() { - final SessionState.ExportObject e1 = - session.newExport(nextExportId++).submit(() -> { - }); - final SessionState.ExportObject e2 = - session.newExport(nextExportId++).require(e1).submit(() -> { - }); + final SessionState.ExportObject e1 = session.newExport(nextExportId++).submit(() -> { + }); + final SessionState.ExportObject e2 = session.newExport(nextExportId++).require(e1).submit(() -> { + }); e2.cancel(); Assert.eq(e2.getState(), "e2.getState()", ExportNotification.State.CANCELLED); scheduler.runUntilQueueEmpty(); @@ -647,12 +632,10 @@ public void testCannotOutOfOrderServerExports() { @Test public void testVerifyExpirationSession() { final SessionState session = new SessionState(scheduler, AUTH_CONTEXT); - final SessionService.TokenExpiration expiration = new SessionService.TokenExpiration( - UUID.randomUUID(), DBTimeUtils.nanosToTime(Long.MAX_VALUE), session); - expectException(IllegalArgumentException.class, - () -> this.session.initializeExpiration(expiration)); - expectException(IllegalArgumentException.class, - () -> this.session.updateExpiration(expiration)); + final SessionService.TokenExpiration expiration = + new SessionService.TokenExpiration(UUID.randomUUID(), DBTimeUtils.nanosToTime(Long.MAX_VALUE), session); + expectException(IllegalArgumentException.class, () -> this.session.initializeExpiration(expiration)); + expectException(IllegalArgumentException.class, () -> this.session.updateExpiration(expiration)); } @Test @@ -665,11 +648,10 @@ public void testGetExpiration() { @Test public void testExpiredByTime() { - session.updateExpiration(new SessionService.TokenExpiration(UUID.randomUUID(), - scheduler.currentTime(), session)); + session.updateExpiration( + new SessionService.TokenExpiration(UUID.randomUUID(), scheduler.currentTime(), session)); Assert.eqNull(session.getExpiration(), "session.getExpiration()"); // already expired - expectException(StatusRuntimeException.class, - () -> session.newServerSideExport(new Object())); + expectException(StatusRuntimeException.class, () -> session.newServerSideExport(new Object())); expectException(StatusRuntimeException.class, () -> session.nonExport()); expectException(StatusRuntimeException.class, () -> session.newExport(nextExportId++)); expectException(StatusRuntimeException.class, () -> session.getExport(nextExportId++)); @@ -677,15 +659,13 @@ public void testExpiredByTime() { @Test public void testGetAuthContext() { - Assert.eq(session.getAuthContext(), "session.getAuthContext()", AUTH_CONTEXT, - "AUTH_CONTEXT"); + Assert.eq(session.getAuthContext(), "session.getAuthContext()", AUTH_CONTEXT, "AUTH_CONTEXT"); } @Test public void testReleaseIsNotProactive() { final MutableBoolean submitted = new MutableBoolean(); - final SessionState.ExportObject e1 = - session.newExport(nextExportId++).submit(submitted::setTrue); + final SessionState.ExportObject e1 = session.newExport(nextExportId++).submit(submitted::setTrue); e1.release(); Assert.eq(e1.getState(), "e1.getState()", ExportNotification.State.QUEUED); Assert.eqFalse(submitted.booleanValue(), "submitted.booleanValue()"); @@ -729,9 +709,8 @@ public void testWorkItemNonTrivialCycle() { public void testCycleErrorPropagates() { final SessionState.ExportObject e1 = session.getExport(nextExportId++); final SessionState.ExportObject e2 = session.getExport(nextExportId++); - final SessionState.ExportObject e3 = - session.newExport(nextExportId++).require(e1, e2).submit(() -> { - }); + final SessionState.ExportObject e3 = session.newExport(nextExportId++).require(e1, e2).submit(() -> { + }); session.newExport(e1.getExportId()).require(e2).submit(() -> { }); session.newExport(e2.getExportId()).require(e1).submit(() -> { @@ -753,12 +732,9 @@ public void testNonExportCycle() { }); b3.require(b1.getExport()).submit(() -> { }); - Assert.eq(b1.getExport().getState(), "b1.getExport().getState()", - ExportNotification.State.FAILED); - Assert.eq(b2.getExport().getState(), "b2.getExport().getState()", - ExportNotification.State.FAILED); - Assert.eq(b3.getExport().getState(), "b3.getExport().getState()", - ExportNotification.State.FAILED); + Assert.eq(b1.getExport().getState(), "b1.getExport().getState()", ExportNotification.State.FAILED); + Assert.eq(b2.getExport().getState(), "b2.getExport().getState()", ExportNotification.State.FAILED); + Assert.eq(b3.getExport().getState(), "b3.getExport().getState()", ExportNotification.State.FAILED); } @Test @@ -788,17 +764,16 @@ public void textExportListenerNoExports() { final QueueingExportListener listener = new QueueingExportListener(); session.addExportListener(listener); Assert.eq(listener.notifications.size(), "notifications.size()", 1); - final ExportNotification refreshComplete = - listener.notifications.get(listener.notifications.size() - 1); + final ExportNotification refreshComplete = listener.notifications.get(listener.notifications.size() - 1); Assert.eq(ticketToExportId(refreshComplete.getTicket()), "refreshComplete.getTicket()", - SessionState.NON_EXPORT_ID, "SessionState.NON_EXPORT_ID"); + SessionState.NON_EXPORT_ID, "SessionState.NON_EXPORT_ID"); } @Test public void textExportListenerOneExport() { final QueueingExportListener listener = new QueueingExportListener(); final SessionState.ExportObject e1 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); scheduler.runUntilQueueEmpty(); session.addExportListener(listener); listener.validateNotificationQueue(e1, EXPORTED); @@ -807,7 +782,7 @@ public void textExportListenerOneExport() { Assert.eq(listener.notifications.size(), "notifications.size()", 2); final ExportNotification refreshComplete = listener.notifications.get(1); Assert.eq(ticketToExportId(refreshComplete.getTicket()), "lastNotification.getTicket()", - SessionState.NON_EXPORT_ID, "SessionState.NON_EXPORT_ID"); + SessionState.NON_EXPORT_ID, "SessionState.NON_EXPORT_ID"); } @Test @@ -833,7 +808,7 @@ public void textExportListenerAddHeadAfterRefreshComplete() { final QueueingExportListener listener = new QueueingExportListener(); session.addExportListener(listener); final SessionState.ExportObject e1 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); scheduler.runUntilQueueEmpty(); Assert.eq(listener.notifications.size(), "notifications.size()", 5); listener.validateIsRefreshComplete(0); @@ -843,30 +818,28 @@ public void textExportListenerAddHeadAfterRefreshComplete() { @Test public void testExportListenerInterestingRefresh() { final QueueingExportListener listener = new QueueingExportListener(); - final SessionState.ExportObject e1 = - session.getExport(nextExportId++); + final SessionState.ExportObject e1 = session.getExport(nextExportId++); final SessionState.ExportObject e4 = - session.newExport(nextExportId++).submit(() -> session); // exported + session.newExport(nextExportId++).submit(() -> session); // exported final SessionState.ExportObject e5 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); final SessionState.ExportObject e7 = - session.newExport(nextExportId++).submit(() -> { - throw new RuntimeException(); - }); // failed + session.newExport(nextExportId++).submit(() -> { + throw new RuntimeException(); + }); // failed final SessionState.ExportObject e8 = - session.newExport(nextExportId++).require(e7).submit(() -> session); // dependency - // failed + session.newExport(nextExportId++).require(e7).submit(() -> session); // dependency failed scheduler.runUntilQueueEmpty(); e5.release(); // released final SessionState.ExportObject e6 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); e6.cancel(); final SessionState.ExportObject e3 = - session.newExport(nextExportId++).submit(() -> session); // queued + session.newExport(nextExportId++).submit(() -> session); // queued final SessionState.ExportObject e2 = - session.newExport(nextExportId++).require(e3).submit(() -> session); // pending + session.newExport(nextExportId++).require(e3).submit(() -> session); // pending session.addExportListener(listener); listener.validateIsRefreshComplete(-1); @@ -885,30 +858,27 @@ public void testExportListenerInterestingUpdates() { final QueueingExportListener listener = new QueueingExportListener(); session.addExportListener(listener); - final SessionState.ExportObject e1 = - session.getExport(nextExportId++); + final SessionState.ExportObject e1 = session.getExport(nextExportId++); final SessionState.ExportObject e4 = - session.newExport(nextExportId++).submit(() -> session); // exported + session.newExport(nextExportId++).submit(() -> session); // exported final SessionState.ExportObject e5 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); final SessionState.ExportObject e7 = - session.newExport(nextExportId++).submit(() -> { - throw new RuntimeException(); - }); // failed + session.newExport(nextExportId++).submit(() -> { + throw new RuntimeException(); + }); // failed final SessionState.ExportObject e8 = - session.newExport(nextExportId++).require(e7).submit(() -> session); // dependency - // failed + session.newExport(nextExportId++).require(e7).submit(() -> session); // dependency failed scheduler.runUntilQueueEmpty(); e5.release(); // released - final SessionState.ExportObject e6 = - session.newExport(nextExportId++).getExport(); + final SessionState.ExportObject e6 = session.newExport(nextExportId++).getExport(); e6.cancel(); final SessionState.ExportObject e3 = - session.newExport(nextExportId++).submit(() -> session); // queued + session.newExport(nextExportId++).submit(() -> session); // queued final SessionState.ExportObject e2 = - session.newExport(nextExportId++).require(e3).submit(() -> session); // pending + session.newExport(nextExportId++).require(e3).submit(() -> session); // pending listener.validateIsRefreshComplete(0); listener.validateNotificationQueue(e1, UNKNOWN); @@ -942,8 +912,7 @@ public void onNext(final ExportNotification n) { session.addExportListener(listener); listener.validateIsRefreshComplete(-1); listener.validateNotificationQueue(b1, UNKNOWN); - listener.validateNotificationQueue(b2, PENDING, QUEUED); // PENDING is optional/racy w.r.t. - // spec + listener.validateNotificationQueue(b2, PENDING, QUEUED); // PENDING is optional/racy w.r.t. spec listener.validateNotificationQueue(b3, UNKNOWN); } @@ -968,9 +937,7 @@ public void onNext(final ExportNotification n) { session.addExportListener(listener); listener.validateIsRefreshComplete(-1); listener.validateNotificationQueue(b1, UNKNOWN); - listener.validateNotificationQueue(b2, UNKNOWN, PENDING, QUEUED); // PENDING is - // optional/racy w.r.t. - // spec + listener.validateNotificationQueue(b2, UNKNOWN, PENDING, QUEUED); // PENDING is optional/racy w.r.t. spec listener.validateNotificationQueue(b3, UNKNOWN); } @@ -993,8 +960,7 @@ public void onNext(final ExportNotification n) { } }; session.addExportListener(listener); - listener.validateIsRefreshComplete(5); // note that we receive refresh complete after - // receiving updates to b2 + listener.validateIsRefreshComplete(5); // note that we receive refresh complete after receiving updates to b2 listener.validateNotificationQueue(b1, UNKNOWN); listener.validateNotificationQueue(b2, UNKNOWN, PENDING, QUEUED); listener.validateNotificationQueue(b3, UNKNOWN); @@ -1101,8 +1067,7 @@ public void onNext(final ExportNotification n) { } }; session.addExportListener(listener); - listener.validateIsRefreshComplete(4); // note we receive refresh complete after the update - // to b2 + listener.validateIsRefreshComplete(4); // note we receive refresh complete after the update to b2 listener.validateNotificationQueue(b1, UNKNOWN); listener.validateNotificationQueue(b2, UNKNOWN, CANCELLED); listener.validateNotificationQueue(b3, UNKNOWN); @@ -1212,15 +1177,15 @@ public void onNext(final ExportNotification n) { public void testExportListenerNewExportAfterRefreshComplete() { final QueueingExportListener listener = new QueueingExportListener(); final SessionState.ExportObject b1 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); final SessionState.ExportObject b2 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); final SessionState.ExportObject b3 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); session.addExportListener(listener); final SessionState.ExportObject b4 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); // for fun we'll flush after refresh scheduler.runUntilQueueEmpty(); @@ -1247,13 +1212,13 @@ public void testExportListenerServerSideExports() { @Test public void testNonExportWithDependencyFails() { final SessionState.ExportObject e1 = - session.newExport(nextExportId++).submit(() -> session); + session.newExport(nextExportId++).submit(() -> session); final SessionState.ExportObject n1 = - session.nonExport() - .require(e1) - .submit(() -> { - throw new RuntimeException("this should not reach test framework"); - }); + session.nonExport() + .require(e1) + .submit(() -> { + throw new RuntimeException("this should not reach test framework"); + }); scheduler.runUntilQueueEmpty(); Assert.eq(n1.getState(), "n1.getState()", FAILED, "FAILED"); } @@ -1309,44 +1274,42 @@ private void validateIsRefreshComplete(int offset) { offset += notifications.size(); } final ExportNotification notification = notifications.get(offset); - Assert.eq(getExportId(notification), "getExportId(notification)", - SessionState.NON_EXPORT_ID, "SessionState.NON_EXPORT_ID"); + Assert.eq(getExportId(notification), "getExportId(notification)", SessionState.NON_EXPORT_ID, + "SessionState.NON_EXPORT_ID"); } private void validateNotificationQueue(final SessionState.ExportBuilder export, - final ExportNotification.State... states) { + final ExportNotification.State... states) { validateNotificationQueue(export.getExport(), states); } private void validateNotificationQueue(final SessionState.ExportObject export, - final ExportNotification.State... states) { + final ExportNotification.State... states) { final Ticket exportId = export.getExportId(); final List foundStates = notifications.stream() - .filter(n -> n.getTicket().equals(exportId)) - .map(ExportNotification::getExportState) - .collect(Collectors.toList()); + .filter(n -> n.getTicket().equals(exportId)) + .map(ExportNotification::getExportState) + .collect(Collectors.toList()); boolean error = foundStates.size() != states.length; for (int offset = 0; !error && offset < states.length; ++offset) { error = !foundStates.get(offset).equals(states[offset]); } if (error) { - final String found = foundStates.stream().map(ExportNotification.State::toString) - .collect(Collectors.joining(", ")); - final String expected = Arrays.stream(states) - .map(ExportNotification.State::toString).collect(Collectors.joining(", ")); - throw new AssertionFailure( - "Notification Queue Differs. Expected: " + expected + " Found: " + found); + final String found = + foundStates.stream().map(ExportNotification.State::toString).collect(Collectors.joining(", ")); + final String expected = + Arrays.stream(states).map(ExportNotification.State::toString).collect(Collectors.joining(", ")); + throw new AssertionFailure("Notification Queue Differs. Expected: " + expected + " Found: " + found); } } } /** - * Throw an exception if lambda either does not throw, or throws an exception that is not - * assignable to expectedExceptionType + * Throw an exception if lambda either does not throw, or throws an exception that is not assignable to + * expectedExceptionType */ - private static void expectException(Class expectedExceptionType, - Runnable lambda) { + private static void expectException(Class expectedExceptionType, Runnable lambda) { String nameOfCaughtException = "(no exception)"; try { lambda.run(); @@ -1357,7 +1320,7 @@ private static void expectException(Class expectedExcep nameOfCaughtException = actual.getClass().getSimpleName(); } throw new RuntimeException(String.format("Expected exception %s, got %s", - expectedExceptionType.getSimpleName(), nameOfCaughtException)); + expectedExceptionType.getSimpleName(), nameOfCaughtException)); } // LivenessArtifact's constructor is private diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/BarrageMessageRoundTripTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/BarrageMessageRoundTripTest.java index a0040403ff5..343740f529c 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/BarrageMessageRoundTripTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/BarrageMessageRoundTripTest.java @@ -76,8 +76,7 @@ @Category(OutOfBandTest.class) public class BarrageMessageRoundTripTest extends LiveTableTestCase { - private static final long UPDATE_INTERVAL = 1000; // arbitrary; we enforce coalescing on both - // sides + private static final long UPDATE_INTERVAL = 1000; // arbitrary; we enforce coalescing on both sides private Logger log; private TestControlledScheduler scheduler; @@ -114,9 +113,9 @@ protected void setUp() throws Exception { useDeephavenNulls = true; daggerRoot = DaggerBarrageMessageRoundTripTest_TestComponent - .builder() - .withScheduler(scheduler) - .build(); + .builder() + .withScheduler(scheduler) + .build(); } @Override @@ -151,8 +150,7 @@ public void onUpdate(final Update upstream) { } @Override - public void onFailureInternal(Throwable originalException, - UpdatePerformanceTracker.Entry sourceEntry) { + public void onFailureInternal(Throwable originalException, UpdatePerformanceTracker.Entry sourceEntry) { exceptions.add(originalException); final StringWriter errors = new StringWriter(); originalException.printStackTrace(new PrintWriter(errors)); @@ -179,34 +177,32 @@ private class RemoteClient { private final Queue commandQueue = new ArrayDeque<>(); private final DummyObserver dummyObserver; - // The replicated table's TableUpdateValidator will be confused if the table is a viewport. - // Instead we rely on + // The replicated table's TableUpdateValidator will be confused if the table is a viewport. Instead we rely on // comparing the producer table to the consumer table to validate contents are correct. RemoteClient(final Index viewport, final BitSet subscribedColumns, - final BarrageMessageProducer barrageMessageProducer, - final String name) { + final BarrageMessageProducer barrageMessageProducer, + final String name) { this(viewport, subscribedColumns, barrageMessageProducer, name, false); } RemoteClient(final Index viewport, final BitSet subscribedColumns, - final BarrageMessageProducer barrageMessageProducer, - final String name, final boolean deferSubscription) { + final BarrageMessageProducer barrageMessageProducer, + final String name, final boolean deferSubscription) { this.viewport = viewport; this.subscribedColumns = subscribedColumns; this.name = name; this.barrageMessageProducer = barrageMessageProducer; this.barrageTable = BarrageTable.make(liveTableRegistrar, LiveTableMonitor.DEFAULT, - barrageMessageProducer.getTableDefinition(), viewport != null); + barrageMessageProducer.getTableDefinition(), viewport != null); - final ChunkInputStreamGenerator.Options options = - new ChunkInputStreamGenerator.Options.Builder() + final ChunkInputStreamGenerator.Options options = new ChunkInputStreamGenerator.Options.Builder() .setIsViewport(viewport != null) .setUseDeephavenNulls(useDeephavenNulls) .build(); final BarrageMarshaller marshaller = new BarrageMarshaller( - options, barrageTable.getWireChunkTypes(), barrageTable.getWireTypes(), - barrageTable.getWireComponentTypes(), new BarrageStreamReader()); + options, barrageTable.getWireChunkTypes(), barrageTable.getWireTypes(), + barrageTable.getWireComponentTypes(), new BarrageStreamReader()); this.dummyObserver = new DummyObserver(marshaller, commandQueue); if (viewport == null) { @@ -214,10 +210,9 @@ private class RemoteClient { replicatedTUVListener = new FailureListener("Replicated Table Update Validator"); replicatedTUV.getResultTable().listenForUpdates(replicatedTUVListener); } else { - // the TUV is unaware of the viewport and gets confused about which data should be - // valid. - // instead we rely on the validation of the content in the viewport between the - // consumer and expected table. + // the TUV is unaware of the viewport and gets confused about which data should be valid. + // instead we rely on the validation of the content in the viewport between the consumer and expected + // table. replicatedTUV = null; replicatedTUVListener = null; } @@ -229,13 +224,11 @@ private class RemoteClient { public void doSubscribe() { subscribed = true; - final ChunkInputStreamGenerator.Options options = - new ChunkInputStreamGenerator.Options.Builder() + final ChunkInputStreamGenerator.Options options = new ChunkInputStreamGenerator.Options.Builder() .setIsViewport(viewport != null) .setUseDeephavenNulls(useDeephavenNulls) .build(); - barrageMessageProducer.addSubscription(dummyObserver, options, subscribedColumns, - viewport); + barrageMessageProducer.addSubscription(dummyObserver, options, subscribedColumns, viewport); } public void validate(final String msg, QueryTable expected) { @@ -243,8 +236,7 @@ public void validate(final String msg, QueryTable expected) { return; // no subscription implies no refresh implies no data -- so we're valid } - // We expect all messages from original table to have been propagated to the replicated - // table at this point. + // We expect all messages from original table to have been propagated to the replicated table at this point. QueryTable toCheck = barrageTable; if (viewport != null) { @@ -257,8 +249,7 @@ public void validate(final String msg, QueryTable expected) { } if (subscribedColumns.cardinality() != expected.getColumns().length) { final List columns = new ArrayList<>(); - for (int i = subscribedColumns.nextSetBit(0); i >= 0; i = - subscribedColumns.nextSetBit(i + 1)) { + for (int i = subscribedColumns.nextSetBit(0); i >= 0; i = subscribedColumns.nextSetBit(i + 1)) { columns.add(ColumnName.of(expected.getColumns()[i].getName())); } expected = (QueryTable) expected.view(columns); @@ -267,12 +258,11 @@ public void validate(final String msg, QueryTable expected) { // Data should be identical and in-order. TstUtils.assertTableEquals(expected, toCheck); - // Since key-space needs to be kept the same, the indexes should also be identical - // between producer and consumer - // (not the indexes between expected and consumer; as the consumer maintains the entire - // index). + // Since key-space needs to be kept the same, the indexes should also be identical between producer and + // consumer + // (not the indexes between expected and consumer; as the consumer maintains the entire index). Assert.equals(barrageMessageProducer.getIndex(), "barrageMessageProducer.getIndex()", - barrageTable.getIndex(), ".getIndex()"); + barrageTable.getIndex(), ".getIndex()"); } private void showResult(final String label, final Table table) { @@ -292,8 +282,7 @@ public void show(QueryTable expected) { } if (subscribedColumns.cardinality() != expected.getColumns().length) { final List columns = new ArrayList<>(); - for (int i = subscribedColumns.nextSetBit(0); i >= 0; i = - subscribedColumns.nextSetBit(i + 1)) { + for (int i = subscribedColumns.nextSetBit(0); i >= 0; i = subscribedColumns.nextSetBit(i + 1)) { columns.add(ColumnName.of(expected.getColumns()[i].getName())); } expected = (QueryTable) expected.view(columns); @@ -301,20 +290,18 @@ public void show(QueryTable expected) { } final int maxLines = 100; - final Pair diffPair = TableTools.diffPair(toCheck, expected, maxLines, - EnumSet.of(TableDiff.DiffItems.DoublesExact)); + final Pair diffPair = + TableTools.diffPair(toCheck, expected, maxLines, EnumSet.of(TableDiff.DiffItems.DoublesExact)); if (diffPair.getFirst().equals("")) { showResult("Ticking Table (" + name + "):", toCheck); } else { - final long numTableRows = - Math.min(maxLines, Math.max(toCheck.size(), expected.size())); + final long numTableRows = Math.min(maxLines, Math.max(toCheck.size(), expected.size())); final long firstRow = Math.max(0, diffPair.getSecond() - 5); - final long lastRow = Math.min(firstRow + numTableRows, - Math.min(firstRow + maxLines, diffPair.getSecond() + 5)); + final long lastRow = + Math.min(firstRow + numTableRows, Math.min(firstRow + maxLines, diffPair.getSecond() + 5)); - System.out - .println("Recomputed Table (" + name + ") Differs:\n" + diffPair.getFirst() + System.out.println("Recomputed Table (" + name + ") Differs:\n" + diffPair.getFirst() + "\nRecomputed Table Rows [" + firstRow + ", " + lastRow + "]:"); TableTools.showWithIndex(expected, firstRow, lastRow + 1); System.out.println("Replicated Table Rows [" + firstRow + ", " + lastRow + "]:"); @@ -343,8 +330,7 @@ public void setSubscribedColumns(final BitSet newColumns) { public void setViewportAndColumns(final Index newViewport, final BitSet newColumns) { viewport = newViewport; subscribedColumns = newColumns; - barrageMessageProducer.updateViewportAndColumns(dummyObserver, viewport, - subscribedColumns); + barrageMessageProducer.updateViewportAndColumns(dummyObserver, viewport, subscribedColumns); } } @@ -366,10 +352,8 @@ private class RemoteNugget implements EvalNuggetInterface { RemoteNugget(final Supplier
    makeTable) { this.makeTable = makeTable; this.originalTable = (QueryTable) makeTable.get(); - this.barrageMessageProducer = - originalTable.getResult(new BarrageMessageProducer.Operation<>(scheduler, - daggerRoot.getStreamGeneratorFactory(), originalTable, UPDATE_INTERVAL, - this::onGetSnapshot)); + this.barrageMessageProducer = originalTable.getResult(new BarrageMessageProducer.Operation<>(scheduler, + daggerRoot.getStreamGeneratorFactory(), originalTable, UPDATE_INTERVAL, this::onGetSnapshot)); originalTUV = TableUpdateValidator.make(originalTable); originalTUVListener = new FailureListener("Original Table Update Validator"); @@ -398,10 +382,8 @@ public void flushClientEvents() { } } - public RemoteClient newClient(final Index viewport, final BitSet subscribedColumns, - final String name) { - clients - .add(new RemoteClient(viewport, subscribedColumns, barrageMessageProducer, name)); + public RemoteClient newClient(final Index viewport, final BitSet subscribedColumns, final String name) { + clients.add(new RemoteClient(viewport, subscribedColumns, barrageMessageProducer, name)); return clients.get(clients.size() - 1); } @@ -421,8 +403,8 @@ private abstract class TestHelper { QueryTable sourceTable; TstUtils.ColumnInfo[] columnInfo; - TestHelper(final int numProducerCoalesce, final int numConsumerCoalesce, final int size, - final int seed, final MutableInt numSteps) { + TestHelper(final int numProducerCoalesce, final int numConsumerCoalesce, final int size, final int seed, + final MutableInt numSteps) { this.numProducerCoalesce = numProducerCoalesce; this.numConsumerCoalesce = numConsumerCoalesce; this.size = size; @@ -432,28 +414,24 @@ private abstract class TestHelper { public void createTable() { sourceTable = getTable(size / 4, random, - columnInfo = initColumnInfos( - new String[] {"Sym", "intCol", "doubleCol", "Keys", "boolCol", "TimeStamp"}, - new TstUtils.SetGenerator<>("a", "b", "c", "d"), - new TstUtils.IntGenerator(10, 100), - new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), - new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1), - new TstUtils.BooleanGenerator(0.2), - new TstUtils.UnsortedDateTimeGenerator( - DBTimeUtils.convertDateTime("2020-02-14T00:00:00 NY"), - DBTimeUtils.convertDateTime("2020-02-25T00:00:00 NY")))); + columnInfo = + initColumnInfos(new String[] {"Sym", "intCol", "doubleCol", "Keys", "boolCol", "TimeStamp"}, + new TstUtils.SetGenerator<>("a", "b", "c", "d"), + new TstUtils.IntGenerator(10, 100), + new TstUtils.SetGenerator<>(10.1, 20.1, 30.1), + new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1), + new TstUtils.BooleanGenerator(0.2), + new TstUtils.UnsortedDateTimeGenerator( + DBTimeUtils.convertDateTime("2020-02-14T00:00:00 NY"), + DBTimeUtils.convertDateTime("2020-02-25T00:00:00 NY")))); } public void createNuggets() { createNuggetsForTableMaker(() -> sourceTable); // test the explicit updates - createNuggetsForTableMaker(sourceTable::flatten); // test shift aggressive version of - // these updates - createNuggetsForTableMaker(() -> sourceTable.sort("doubleCol")); // test updates in the - // middle of the + createNuggetsForTableMaker(sourceTable::flatten); // test shift aggressive version of these updates + createNuggetsForTableMaker(() -> sourceTable.sort("doubleCol")); // test updates in the middle of the // keyspace - createNuggetsForTableMaker(() -> sourceTable.where("intCol % 12 < 5")); // test - // sparse(r) - // updates + createNuggetsForTableMaker(() -> sourceTable.where("intCol % 12 < 5")); // test sparse(r) updates } void runTest(final Runnable simulateSourceStep) { @@ -490,25 +468,23 @@ void runTest(final Runnable simulateSourceStep) { } private class OneProducerPerClient extends TestHelper { - OneProducerPerClient(final int numProducerCoalesce, final int numConsumerCoalesce, - final int size, final int seed, final MutableInt numSteps) { + OneProducerPerClient(final int numProducerCoalesce, final int numConsumerCoalesce, final int size, + final int seed, final MutableInt numSteps) { super(numProducerCoalesce, numConsumerCoalesce, size, seed, numSteps); } void createNuggetsForTableMaker(final Supplier
    makeTable) { nuggets.add(new RemoteNugget(makeTable)); final BitSet subscribedColumns = new BitSet(); - subscribedColumns.set(0, - nuggets.get(nuggets.size() - 1).originalTable.getColumns().length); + subscribedColumns.set(0, nuggets.get(nuggets.size() - 1).originalTable.getColumns().length); nuggets.get(nuggets.size() - 1).newClient(null, subscribedColumns, "full"); nuggets.add(new RemoteNugget(makeTable)); - nuggets.get(nuggets.size() - 1).newClient(Index.FACTORY.getIndexByRange(0, size / 10), - subscribedColumns, "header"); + nuggets.get(nuggets.size() - 1).newClient(Index.FACTORY.getIndexByRange(0, size / 10), subscribedColumns, + "header"); nuggets.add(new RemoteNugget(makeTable)); - nuggets.get(nuggets.size() - 1).newClient( - Index.FACTORY.getIndexByRange(size / 2, size * 3 / 4), subscribedColumns, - "floating"); + nuggets.get(nuggets.size() - 1).newClient(Index.FACTORY.getIndexByRange(size / 2, size * 3 / 4), + subscribedColumns, "floating"); final Index.SequentialBuilder swissIndexBuilder = Index.FACTORY.getSequentialBuilder(); final long rangeSize = Math.max(1, size / 20); @@ -524,8 +500,8 @@ void createNuggetsForTableMaker(final Supplier
    makeTable) { } private class SharedProducerForAllClients extends TestHelper { - SharedProducerForAllClients(final int numProducerCoalesce, final int numConsumerCoalesce, - final int size, final int seed, final MutableInt numSteps) { + SharedProducerForAllClients(final int numProducerCoalesce, final int numConsumerCoalesce, final int size, + final int seed, final MutableInt numSteps) { super(numProducerCoalesce, numConsumerCoalesce, size, seed, numSteps); } @@ -537,10 +513,8 @@ void createNuggetsForTableMaker(final Supplier
    makeTable) { subscribedColumns.set(0, nugget.originalTable.getColumns().length); nugget.newClient(null, subscribedColumns, "full"); - nugget.newClient(Index.FACTORY.getIndexByRange(0, size / 10), subscribedColumns, - "header"); - nugget.newClient(Index.FACTORY.getIndexByRange(size / 2, size * 3L / 4), - subscribedColumns, "floating"); + nugget.newClient(Index.FACTORY.getIndexByRange(0, size / 10), subscribedColumns, "header"); + nugget.newClient(Index.FACTORY.getIndexByRange(size / 2, size * 3L / 4), subscribedColumns, "floating"); final Index.SequentialBuilder swissIndexBuilder = Index.FACTORY.getSequentialBuilder(); final long rangeSize = Math.max(1, size / 20); @@ -556,23 +530,19 @@ void createNuggetsForTableMaker(final Supplier
    makeTable) { public void testAppendIncremental() { final int MAX_STEPS = 100; final Consumer runOne = helper -> { - final int maxSteps = - MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; + final int maxSteps = MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; helper.runTest(() -> { final long lastKey = (Math.abs(helper.random.nextLong()) % 16) - + (helper.sourceTable.getIndex().nonempty() - ? helper.sourceTable.getIndex().lastKey() - : -1); + + (helper.sourceTable.getIndex().nonempty() ? helper.sourceTable.getIndex().lastKey() : -1); final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); update.added = Index.CURRENT_FACTORY.getIndexByRange(lastKey + 1, - lastKey + Math.max(1, helper.size / maxSteps)); + lastKey + Math.max(1, helper.size / maxSteps)); update.removed = i(); update.modified = i(); update.shifted = IndexShiftData.EMPTY; update.modifiedColumnSet = ModifiedColumnSet.EMPTY; - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, helper.random, helper.sourceTable, helper.columnInfo)); }); }; @@ -580,8 +550,8 @@ public void testAppendIncremental() { for (final int size : new int[] {10, 100, 1000}) { for (final int numProducerCoalesce : new int[] {1, 10}) { for (final int numConsumerCoalesce : new int[] {1, 10}) { - runOne.accept(new OneProducerPerClient(numProducerCoalesce, numConsumerCoalesce, - size, 0, new MutableInt(MAX_STEPS))); + runOne.accept(new OneProducerPerClient(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(MAX_STEPS))); } } } @@ -590,12 +560,10 @@ public void testAppendIncremental() { public void testPrependIncremental() { final int MAX_STEPS = 100; final Consumer runOne = helper -> { - final int maxSteps = - MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; + final int maxSteps = MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; helper.runTest(() -> { - final long lastKey = helper.sourceTable.getIndex().nonempty() - ? helper.sourceTable.getIndex().lastKey() - : -1; + final long lastKey = + helper.sourceTable.getIndex().nonempty() ? helper.sourceTable.getIndex().lastKey() : -1; final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); final int stepSize = Math.max(1, helper.size / maxSteps); update.added = Index.CURRENT_FACTORY.getIndexByRange(0, stepSize - 1); @@ -605,13 +573,11 @@ public void testPrependIncremental() { final IndexShiftData.Builder shifted = new IndexShiftData.Builder(); if (lastKey >= 0) { - shifted.shiftRange(0, lastKey, - stepSize + (Math.abs(helper.random.nextLong()) % 16)); + shifted.shiftRange(0, lastKey, stepSize + (Math.abs(helper.random.nextLong()) % 16)); } update.shifted = shifted.build(); - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, helper.random, helper.sourceTable, helper.columnInfo)); }); }; @@ -619,8 +585,8 @@ public void testPrependIncremental() { for (final int size : new int[] {10, 100, 1000}) { for (final int numProducerCoalesce : new int[] {1, 10}) { for (final int numConsumerCoalesce : new int[] {1, 10}) { - runOne.accept(new OneProducerPerClient(numProducerCoalesce, numConsumerCoalesce, - size, 0, new MutableInt(MAX_STEPS))); + runOne.accept(new OneProducerPerClient(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(MAX_STEPS))); } } } @@ -628,17 +594,16 @@ public void testPrependIncremental() { public void testRoundTripIncremental() { final Consumer runOne = helper -> { - helper.runTest( - () -> LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates - .generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, - helper.size, helper.random, helper.sourceTable, helper.columnInfo))); + helper.runTest(() -> LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( + () -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + helper.size, helper.random, helper.sourceTable, helper.columnInfo))); }; for (final int size : new int[] {10, 100, 1000}) { for (final int numProducerCoalesce : new int[] {1, 10}) { for (final int numConsumerCoalesce : new int[] {1, 10}) { - runOne.accept(new OneProducerPerClient(numProducerCoalesce, numConsumerCoalesce, - size, 0, new MutableInt(100))); + runOne.accept(new OneProducerPerClient(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(100))); } } } @@ -647,23 +612,19 @@ public void testRoundTripIncremental() { public void testAppendIncrementalSharedProducer() { final int MAX_STEPS = 100; final Consumer runOne = helper -> { - final int maxSteps = - MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; + final int maxSteps = MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; helper.runTest(() -> { final long lastKey = (Math.abs(helper.random.nextLong()) % 16) - + (helper.sourceTable.getIndex().nonempty() - ? helper.sourceTable.getIndex().lastKey() - : -1); + + (helper.sourceTable.getIndex().nonempty() ? helper.sourceTable.getIndex().lastKey() : -1); final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); update.added = Index.CURRENT_FACTORY.getIndexByRange(lastKey + 1, - lastKey + Math.max(1, helper.size / maxSteps)); + lastKey + Math.max(1, helper.size / maxSteps)); update.removed = i(); update.modified = i(); update.shifted = IndexShiftData.EMPTY; update.modifiedColumnSet = ModifiedColumnSet.EMPTY; - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, helper.random, helper.sourceTable, helper.columnInfo)); }); }; @@ -671,8 +632,8 @@ public void testAppendIncrementalSharedProducer() { for (final int size : new int[] {10, 100, 1000}) { for (final int numProducerCoalesce : new int[] {1, 2, 10}) { for (final int numConsumerCoalesce : new int[] {1, 2, 10}) { - runOne.accept(new SharedProducerForAllClients(numProducerCoalesce, - numConsumerCoalesce, size, 0, new MutableInt(MAX_STEPS))); + runOne.accept(new SharedProducerForAllClients(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(MAX_STEPS))); } } } @@ -681,12 +642,10 @@ public void testAppendIncrementalSharedProducer() { public void testPrependIncrementalSharedProducer() { final int MAX_STEPS = 100; final Consumer runOne = helper -> { - final int maxSteps = - MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; + final int maxSteps = MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; helper.runTest(() -> { - final long lastKey = helper.sourceTable.getIndex().nonempty() - ? helper.sourceTable.getIndex().lastKey() - : -1; + final long lastKey = + helper.sourceTable.getIndex().nonempty() ? helper.sourceTable.getIndex().lastKey() : -1; final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); final int stepSize = Math.max(1, helper.size / maxSteps); update.added = Index.CURRENT_FACTORY.getIndexByRange(0, stepSize - 1); @@ -696,13 +655,11 @@ public void testPrependIncrementalSharedProducer() { final IndexShiftData.Builder shifted = new IndexShiftData.Builder(); if (lastKey >= 0) { - shifted.shiftRange(0, lastKey, - stepSize + (Math.abs(helper.random.nextLong()) % 16)); + shifted.shiftRange(0, lastKey, stepSize + (Math.abs(helper.random.nextLong()) % 16)); } update.shifted = shifted.build(); - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, helper.random, helper.sourceTable, helper.columnInfo)); }); }; @@ -710,8 +667,8 @@ public void testPrependIncrementalSharedProducer() { for (final int size : new int[] {10, 100, 1000}) { for (final int numProducerCoalsce : new int[] {1, 10}) { for (final int numConsumerCoalesce : new int[] {1, 10}) { - runOne.accept(new SharedProducerForAllClients(numProducerCoalsce, - numConsumerCoalesce, size, 0, new MutableInt(MAX_STEPS))); + runOne.accept(new SharedProducerForAllClients(numProducerCoalsce, numConsumerCoalesce, size, 0, + new MutableInt(MAX_STEPS))); } } } @@ -719,17 +676,16 @@ public void testPrependIncrementalSharedProducer() { public void testRoundTripIncrementalSharedProducer() { final Consumer runOne = helper -> { - helper.runTest( - () -> LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates - .generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, - helper.size, helper.random, helper.sourceTable, helper.columnInfo))); + helper.runTest(() -> LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( + () -> GenerateTableUpdates.generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, + helper.size, helper.random, helper.sourceTable, helper.columnInfo))); }; for (final int size : new int[] {10, 100, 1000}) { for (final int numProducerCoalesce : new int[] {1, 10}) { for (final int numConsumerCoalesce : new int[] {1, 10}) { - runOne.accept(new SharedProducerForAllClients(numProducerCoalesce, - numConsumerCoalesce, size, 0, new MutableInt(100))); + runOne.accept(new SharedProducerForAllClients(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(100))); } } } @@ -738,8 +694,8 @@ public void testRoundTripIncrementalSharedProducer() { // These test mid-cycle subscription changes and snapshot content private abstract class SubscriptionChangingHelper extends SharedProducerForAllClients { - SubscriptionChangingHelper(final int numProducerCoalesce, final int numConsumerCoalesce, - final int size, final int seed, final MutableInt numSteps) { + SubscriptionChangingHelper(final int numProducerCoalesce, final int numConsumerCoalesce, final int size, + final int seed, final MutableInt numSteps) { super(numProducerCoalesce, numConsumerCoalesce, size, seed, numSteps); } @@ -754,9 +710,9 @@ void runTest() { for (int pt = 0; pt < numProducerCoalesce; ++pt) { maybeChangeSub(numSteps.intValue(), rt, pt); - LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates - .generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, - size, random, sourceTable, columnInfo)); + LiveTableMonitor.DEFAULT + .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateShiftAwareTableUpdates( + GenerateTableUpdates.DEFAULT_PROFILE, size, random, sourceTable, columnInfo)); } // flush producer @@ -781,36 +737,32 @@ public void testColumnSubChange() { for (final int numProducerCoalesce : new int[] {1, 4}) { for (final int numConsumerCoalesce : new int[] {1, 4}) { for (int subProducerCoalesce = - 0; subProducerCoalesce < numProducerCoalesce; ++subProducerCoalesce) { + 0; subProducerCoalesce < numProducerCoalesce; ++subProducerCoalesce) { for (int subConsumerCoalesce = - 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { + 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { final int finalSubProducerCoalesce = subProducerCoalesce; final int finalSubConsumerCoalesce = subConsumerCoalesce; - new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, - size, 0, new MutableInt(4)) { + new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(4)) { { for (final RemoteNugget nugget : nuggets) { final BitSet columns = new BitSet(); - columns.set(0, - nugget.originalTable.getColumns().length / 2); - nugget.clients.add(new RemoteClient( - Index.FACTORY.getIndexByRange(size / 5, 2 * size / 5), - columns, nugget.barrageMessageProducer, "sub-changer")); + columns.set(0, nugget.originalTable.getColumns().length / 2); + nugget.clients.add( + new RemoteClient(Index.FACTORY.getIndexByRange(size / 5, 2 * size / 5), + columns, nugget.barrageMessageProducer, "sub-changer")); } } void maybeChangeSub(final int step, final int rt, final int pt) { - if (step != 2 || rt != finalSubConsumerCoalesce - || pt != finalSubProducerCoalesce) { + if (step != 2 || rt != finalSubConsumerCoalesce || pt != finalSubProducerCoalesce) { return; } for (final RemoteNugget nugget : nuggets) { - final RemoteClient client = - nugget.clients.get(nugget.clients.size() - 1); + final RemoteClient client = nugget.clients.get(nugget.clients.size() - 1); final BitSet columns = new BitSet(); - final int numColumns = - nugget.originalTable.getColumns().length; + final int numColumns = nugget.originalTable.getColumns().length; columns.set(numColumns / 2, numColumns); client.setSubscribedColumns(columns); } @@ -828,13 +780,13 @@ public void testViewportChange() { for (final int numProducerCoalesce : new int[] {1, 4}) { for (final int numConsumerCoalesce : new int[] {1, 4}) { for (int subProducerCoalesce = - 0; subProducerCoalesce < numProducerCoalesce; ++subProducerCoalesce) { + 0; subProducerCoalesce < numProducerCoalesce; ++subProducerCoalesce) { for (int subConsumerCoalesce = - 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { + 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { final int finalSubProducerCoalesce = 0; final int finalSubConsumerCoalesce = 1; - new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, - size, 0, new MutableInt(25)) { + new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(25)) { @Override void createNuggetsForTableMaker(final Supplier
    makeTable) { final RemoteNugget nugget = new RemoteNugget(makeTable); @@ -842,20 +794,18 @@ void createNuggetsForTableMaker(final Supplier
    makeTable) { final BitSet columns = new BitSet(); columns.set(0, 4); - nugget.clients.add( - new RemoteClient(Index.FACTORY.getIndexByRange(0, size / 5), + nugget.clients.add(new RemoteClient(Index.FACTORY.getIndexByRange(0, size / 5), columns, nugget.barrageMessageProducer, "sub-changer")); } void maybeChangeSub(final int step, final int rt, final int pt) { if (step % 2 != 0 || rt != finalSubConsumerCoalesce - || pt != finalSubProducerCoalesce) { + || pt != finalSubProducerCoalesce) { return; } for (final RemoteNugget nugget : nuggets) { - final RemoteClient client = - nugget.clients.get(nugget.clients.size() - 1); + final RemoteClient client = nugget.clients.get(nugget.clients.size() - 1); final Index viewport = client.viewport.clone(); viewport.shiftInPlace(Math.max(size / 25, 1)); client.setViewport(viewport); @@ -874,32 +824,30 @@ public void testOverlappedColumnSubsChange() { for (final int numProducerCoalesce : new int[] {1, 4}) { for (final int numConsumerCoalesce : new int[] {1, 4}) { for (int subProducerCoalesce = - 0; subProducerCoalesce < numProducerCoalesce; ++subProducerCoalesce) { + 0; subProducerCoalesce < numProducerCoalesce; ++subProducerCoalesce) { for (int subConsumerCoalesce = - 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { + 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { final int finalSubProducerCoalesce = subProducerCoalesce; final int finalSubConsumerCoalesce = subConsumerCoalesce; - new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, - size, 0, new MutableInt(4)) { + new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(4)) { { for (final RemoteNugget nugget : nuggets) { final BitSet columns = new BitSet(); columns.set(0, 3); - nugget.clients.add(new RemoteClient( - Index.FACTORY.getIndexByRange(size / 5, 2 * size / 5), - columns, nugget.barrageMessageProducer, "sub-changer")); + nugget.clients.add( + new RemoteClient(Index.FACTORY.getIndexByRange(size / 5, 2 * size / 5), + columns, nugget.barrageMessageProducer, "sub-changer")); } } void maybeChangeSub(final int step, final int rt, final int pt) { - if (step != 2 || rt != finalSubConsumerCoalesce - || pt != finalSubProducerCoalesce) { + if (step != 2 || rt != finalSubConsumerCoalesce || pt != finalSubProducerCoalesce) { return; } for (final RemoteNugget nugget : nuggets) { - final RemoteClient client = - nugget.clients.get(nugget.clients.size() - 1); + final RemoteClient client = nugget.clients.get(nugget.clients.size() - 1); final BitSet columns = new BitSet(); columns.set(1, 4); client.setSubscribedColumns(columns); @@ -914,52 +862,46 @@ void maybeChangeSub(final int step, final int rt, final int pt) { } public void testViewportSubscribeMidCycle() { - // This is a regression test for IDS-6392. It catches a race between when a subscription - // becomes active and + // This is a regression test for IDS-6392. It catches a race between when a subscription becomes active and // when the viewport becomes active post-snapshot. for (final int size : new int[] {10, 100, 1000}) { for (final int numProducerCoalesce : new int[] {2, 3, 4}) { for (final int numConsumerCoalesce : new int[] {1, 4}) { - new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, size, - 0, new MutableInt(4)) { + new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(4)) { void maybeChangeSub(final int step, final int rt, final int pt) { if (step != 0 || rt != 0 || pt != 1) { - // Only subscribe after we have sent at least one update, but no - // need to subscribe again. + // Only subscribe after we have sent at least one update, but no need to subscribe + // again. return; } - nuggets.forEach( - (nugget) -> nugget.clients.forEach(RemoteClient::doSubscribe)); + nuggets.forEach((nugget) -> nugget.clients.forEach(RemoteClient::doSubscribe)); } @Override void createNuggetsForTableMaker(final Supplier
    makeTable) { if (!nuggets.isEmpty()) { - return; // we can only have a single nugget since they all share a - // single source table + return; // we can only have a single nugget since they all share a single source table } final RemoteNugget nugget = new RemoteNugget(makeTable) { @Override public void onGetSnapshot() { - LiveTableMonitor.DEFAULT.runWithinUnitTestCycle( - () -> GenerateTableUpdates.generateShiftAwareTableUpdates( - GenerateTableUpdates.DEFAULT_PROFILE, size, random, - sourceTable, columnInfo)); + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates + .generateShiftAwareTableUpdates(GenerateTableUpdates.DEFAULT_PROFILE, size, + random, sourceTable, columnInfo)); } }; nuggets.add(nugget); - // we can only have viewport subscriptions or else we won't tickle the - // original bug + // we can only have viewport subscriptions or else we won't tickle the original bug final BitSet columns = new BitSet(); columns.set(0, 4); final boolean deferSubscription = true; - nugget.clients.add(new RemoteClient( - Index.FACTORY.getIndexByRange(size / 5, 2 * size / 5), columns, - nugget.barrageMessageProducer, "sub-changer", deferSubscription)); + nugget.clients.add(new RemoteClient(Index.FACTORY.getIndexByRange(size / 5, 2 * size / 5), + columns, nugget.barrageMessageProducer, "sub-changer", deferSubscription)); } }.runTest(); @@ -973,13 +915,13 @@ public void testOverlappingViewportChange() { for (final int numProducerCoalesce : new int[] {1, 4}) { for (final int numConsumerCoalesce : new int[] {1, 4}) { for (int subProducerCoalesce = - 0; subProducerCoalesce < numProducerCoalesce; ++subProducerCoalesce) { + 0; subProducerCoalesce < numProducerCoalesce; ++subProducerCoalesce) { for (int subConsumerCoalesce = - 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { + 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { final int finalSubProducerCoalesce = subProducerCoalesce; final int finalSubConsumerCoalesce = subConsumerCoalesce; - new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, - size, 0, new MutableInt(4)) { + new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, size, 0, + new MutableInt(4)) { @Override public void createNuggets() { super.createNuggets(); @@ -987,21 +929,19 @@ public void createNuggets() { for (final RemoteNugget nugget : nuggets) { final BitSet columns = new BitSet(); columns.set(0, 4); - nugget.clients.add(new RemoteClient( - Index.FACTORY.getIndexByRange(size / 5, 3 * size / 5), - columns, nugget.barrageMessageProducer, "sub-changer")); + nugget.clients.add( + new RemoteClient(Index.FACTORY.getIndexByRange(size / 5, 3 * size / 5), + columns, nugget.barrageMessageProducer, "sub-changer")); } } void maybeChangeSub(final int step, final int rt, final int pt) { - if (step != 2 || rt != finalSubConsumerCoalesce - || pt != finalSubProducerCoalesce) { + if (step != 2 || rt != finalSubConsumerCoalesce || pt != finalSubProducerCoalesce) { return; } for (final RemoteNugget nugget : nuggets) { - final RemoteClient client = - nugget.clients.get(nugget.clients.size() - 1); + final RemoteClient client = nugget.clients.get(nugget.clients.size() - 1); final Index viewport = client.viewport.clone(); viewport.shiftInPlace(size / 5); client.setViewport(viewport); @@ -1019,18 +959,15 @@ public void testSimultaneousSubscriptionChanges() { for (final int size : new int[] {10, 100, 1000}) { final int numProducerCoalesce = 8; final int numConsumerCoalesce = 8; - for (int subConsumerCoalesce = - 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { + for (int subConsumerCoalesce = 0; subConsumerCoalesce < numConsumerCoalesce; ++subConsumerCoalesce) { final int finalSubConsumerCoalesce = subConsumerCoalesce; - new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, size, 0, - new MutableInt(4)) { + new SubscriptionChangingHelper(numProducerCoalesce, numConsumerCoalesce, size, 0, new MutableInt(4)) { { for (final RemoteNugget nugget : nuggets) { final BitSet columns = new BitSet(); columns.set(0, 4); - nugget.clients.add(new RemoteClient( - Index.FACTORY.getIndexByRange(size / 5, 2 * size / 5), columns, - nugget.barrageMessageProducer, "sub-changer")); + nugget.clients.add(new RemoteClient(Index.FACTORY.getIndexByRange(size / 5, 2 * size / 5), + columns, nugget.barrageMessageProducer, "sub-changer")); } } @@ -1040,11 +977,10 @@ void maybeChangeSub(final int step, final int rt, final int pt) { } for (final RemoteNugget nugget : nuggets) { - final RemoteClient client = - nugget.clients.get(nugget.clients.size() - 1); + final RemoteClient client = nugget.clients.get(nugget.clients.size() - 1); final int firstKey = random.nextInt(size); client.setViewport(Index.FACTORY.getIndexByRange(firstKey, - firstKey + random.nextInt(size - firstKey))); + firstKey + random.nextInt(size - firstKey))); } } }.runTest(); @@ -1065,18 +1001,16 @@ public void testUsePrevOnSnapshot() { shiftBuilder.shiftRange(0, 12, -5); queryTable.notifyListeners(new ShiftAwareListener.Update( - Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex(), - shiftBuilder.build(), ModifiedColumnSet.EMPTY)); + Index.FACTORY.getEmptyIndex(), + Index.FACTORY.getEmptyIndex(), + Index.FACTORY.getEmptyIndex(), + shiftBuilder.build(), ModifiedColumnSet.EMPTY)); final BitSet cols = new BitSet(1); cols.set(0); - remoteClient.setValue( - remoteNugget.newClient(Index.FACTORY.getIndexByRange(0, 1), cols, "prevSnapshot")); + remoteClient.setValue(remoteNugget.newClient(Index.FACTORY.getIndexByRange(0, 1), cols, "prevSnapshot")); - // flush producer in the middle of the cycle -- but we need a different thread to - // usePrev + // flush producer in the middle of the cycle -- but we need a different thread to usePrev final Thread thread = new Thread(this::flushProducerTable); thread.start(); do { @@ -1092,8 +1026,7 @@ public void testUsePrevOnSnapshot() { flushProducerTable(); // We expect two pending messages for our client: snapshot in prev and the shift update - Assert.equals(remoteClient.getValue().commandQueue.size(), - "remoteClient.getValue().commandQueue.size()", 2); + Assert.equals(remoteClient.getValue().commandQueue.size(), "remoteClient.getValue().commandQueue.size()", 2); remoteNugget.flushClientEvents(); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(liveTableRegistrar::refresh); @@ -1105,13 +1038,12 @@ public void testRegressModificationsInPrevView() { final BitSet allColumns = new BitSet(1); allColumns.set(0); - final QueryTable queryTable = - TstUtils.testRefreshingTable(i(5, 10, 12), c("intCol", 5, 10, 12)); + final QueryTable queryTable = TstUtils.testRefreshingTable(i(5, 10, 12), c("intCol", 5, 10, 12)); final RemoteNugget remoteNugget = new RemoteNugget(() -> queryTable); // Set original viewport. final RemoteClient remoteClient = - remoteNugget.newClient(Index.FACTORY.getIndexByRange(1, 2), allColumns, "prevSnapshot"); + remoteNugget.newClient(Index.FACTORY.getIndexByRange(1, 2), allColumns, "prevSnapshot"); // Obtain snapshot of original viewport. flushProducerTable(); @@ -1127,14 +1059,14 @@ public void testRegressModificationsInPrevView() { TstUtils.addToTable(queryTable, i(12), c("intCol", 13)); queryTable.notifyListeners(new ShiftAwareListener.Update( - Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getIndexByValues(12), - IndexShiftData.EMPTY, ModifiedColumnSet.ALL)); + Index.FACTORY.getEmptyIndex(), + Index.FACTORY.getEmptyIndex(), + Index.FACTORY.getIndexByValues(12), + IndexShiftData.EMPTY, ModifiedColumnSet.ALL)); }); - // Do not allow the two updates to coalesce; we must force the consumer to apply the - // modification. (An allowed race.) + // Do not allow the two updates to coalesce; we must force the consumer to apply the modification. (An allowed + // race.) flushProducerTable(); // Add rows to shift modified row into new viewport. @@ -1142,16 +1074,16 @@ public void testRegressModificationsInPrevView() { TstUtils.removeRows(queryTable, i(5)); queryTable.notifyListeners(new ShiftAwareListener.Update( - Index.FACTORY.getEmptyIndex(), - Index.FACTORY.getIndexByValues(5), - Index.FACTORY.getEmptyIndex(), - IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); + Index.FACTORY.getEmptyIndex(), + Index.FACTORY.getIndexByValues(5), + Index.FACTORY.getEmptyIndex(), + IndexShiftData.EMPTY, ModifiedColumnSet.EMPTY)); }); // Obtain snapshot of new viewport. (which will not include the modified row) flushProducerTable(); - Assert.equals(remoteClient.commandQueue.size(), - "remoteClient.getValue().commandQueue.size()", 3); // mod, add, snaphot + Assert.equals(remoteClient.commandQueue.size(), "remoteClient.getValue().commandQueue.size()", 3); // mod, add, + // snaphot remoteNugget.flushClientEvents(); LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(liveTableRegistrar::refresh); remoteNugget.validate("new viewport with modification"); @@ -1171,53 +1103,48 @@ private void testAllUniqueChunkTypeColumnSources(final boolean useDeephavenNulls final int MAX_STEPS = 100; for (int size : new int[] {10, 1000, 10000}) { SharedProducerForAllClients helper = - new SharedProducerForAllClients(1, 1, size, 0, new MutableInt(MAX_STEPS)) { - @Override - public void createTable() { - columnInfo = initColumnInfos( - new String[] {"longCol", "intCol", "objCol", "byteCol", "doubleCol", - "floatCol", "shortCol", "charCol", "boolCol", "strArrCol", - "datetimeCol"}, - new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1), - new TstUtils.IntGenerator(10, 100, 0.1), - new TstUtils.SetGenerator<>("a", "b", "c", "d"), // covers object - new TstUtils.ByteGenerator((byte) 0, (byte) 127, 0.1), - new TstUtils.DoubleGenerator(100.1, 200.1, 0.1), - new TstUtils.FloatGenerator(100.1f, 200.1f, 0.1), - new TstUtils.ShortGenerator((short) 0, (short) 20000, 0.1), - new TstUtils.CharGenerator('a', 'z', 0.1), - new TstUtils.BooleanGenerator(0.2), - new TstUtils.SetGenerator<>(new String[] {"a", "b"}, - new String[] {"0", "1"}, new String[] {}, null), - new TstUtils.UnsortedDateTimeGenerator( - DBTimeUtils.convertDateTime("2020-02-14T00:00:00 NY"), - DBTimeUtils.convertDateTime("2020-02-25T00:00:00 NY"))); - sourceTable = getTable(size / 4, random, columnInfo); - } - }; + new SharedProducerForAllClients(1, 1, size, 0, new MutableInt(MAX_STEPS)) { + @Override + public void createTable() { + columnInfo = initColumnInfos( + new String[] {"longCol", "intCol", "objCol", "byteCol", "doubleCol", "floatCol", + "shortCol", "charCol", "boolCol", "strArrCol", "datetimeCol"}, + new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1), + new TstUtils.IntGenerator(10, 100, 0.1), + new TstUtils.SetGenerator<>("a", "b", "c", "d"), // covers object + new TstUtils.ByteGenerator((byte) 0, (byte) 127, 0.1), + new TstUtils.DoubleGenerator(100.1, 200.1, 0.1), + new TstUtils.FloatGenerator(100.1f, 200.1f, 0.1), + new TstUtils.ShortGenerator((short) 0, (short) 20000, 0.1), + new TstUtils.CharGenerator('a', 'z', 0.1), + new TstUtils.BooleanGenerator(0.2), + new TstUtils.SetGenerator<>(new String[] {"a", "b"}, new String[] {"0", "1"}, + new String[] {}, null), + new TstUtils.UnsortedDateTimeGenerator( + DBTimeUtils.convertDateTime("2020-02-14T00:00:00 NY"), + DBTimeUtils.convertDateTime("2020-02-25T00:00:00 NY"))); + sourceTable = getTable(size / 4, random, columnInfo); + } + }; - final int maxSteps = - MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; + final int maxSteps = MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; helper.runTest(() -> { final long lastKey = (Math.abs(helper.random.nextLong()) % 16) - + (helper.sourceTable.isEmpty() ? -1 : helper.sourceTable.getIndex().lastKey()); + + (helper.sourceTable.isEmpty() ? -1 : helper.sourceTable.getIndex().lastKey()); final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); final int stepSize = Math.max(1, helper.size / maxSteps); - update.added = - Index.CURRENT_FACTORY.getIndexByRange(lastKey + 1, lastKey + stepSize); + update.added = Index.CURRENT_FACTORY.getIndexByRange(lastKey + 1, lastKey + stepSize); update.removed = i(); if (helper.sourceTable.isEmpty()) { update.modified = i(); } else { - update.modified = Index.CURRENT_FACTORY - .getIndexByRange(Math.max(0, lastKey - stepSize), lastKey); + update.modified = Index.CURRENT_FACTORY.getIndexByRange(Math.max(0, lastKey - stepSize), lastKey); update.modified.retain(helper.sourceTable.getIndex()); } update.shifted = IndexShiftData.EMPTY; update.modifiedColumnSet = ModifiedColumnSet.EMPTY; - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, helper.random, helper.sourceTable, helper.columnInfo)); }); } @@ -1237,54 +1164,49 @@ private void testAllUniqueNonJavaSerRoundTripTypes(final boolean useDeephavenNul final int MAX_STEPS = 100; for (int size : new int[] {10, 1000, 10000}) { SharedProducerForAllClients helper = - new SharedProducerForAllClients(1, 1, size, 0, new MutableInt(MAX_STEPS)) { - @Override - public void createTable() { - columnInfo = initColumnInfos( - new String[] {"longCol", "intCol", "objCol", "byteCol", "doubleCol", - "floatCol", "shortCol", "charCol", "boolCol", "strCol", - "strArrCol", "datetimeCol"}, - new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1), - new TstUtils.IntGenerator(10, 100, 0.1), - new TstUtils.SetGenerator<>("a", "b", "c", "d"), // covers strings - new TstUtils.ByteGenerator((byte) 0, (byte) 127, 0.1), - new TstUtils.DoubleGenerator(100.1, 200.1, 0.1), - new TstUtils.FloatGenerator(100.1f, 200.1f, 0.1), - new TstUtils.ShortGenerator((short) 0, (short) 20000, 0.1), - new TstUtils.CharGenerator('a', 'z', 0.1), - new TstUtils.BooleanGenerator(0.2), - new TstUtils.StringGenerator(), - new TstUtils.SetGenerator<>(new String[] {"a", "b"}, - new String[] {"0", "1"}, new String[] {}, null), - new TstUtils.UnsortedDateTimeGenerator( - DBTimeUtils.convertDateTime("2020-02-14T00:00:00 NY"), - DBTimeUtils.convertDateTime("2020-02-25T00:00:00 NY"))); - sourceTable = getTable(size / 4, random, columnInfo); - } - }; + new SharedProducerForAllClients(1, 1, size, 0, new MutableInt(MAX_STEPS)) { + @Override + public void createTable() { + columnInfo = initColumnInfos( + new String[] {"longCol", "intCol", "objCol", "byteCol", "doubleCol", "floatCol", + "shortCol", "charCol", "boolCol", "strCol", "strArrCol", "datetimeCol"}, + new TstUtils.SortedLongGenerator(0, Long.MAX_VALUE - 1), + new TstUtils.IntGenerator(10, 100, 0.1), + new TstUtils.SetGenerator<>("a", "b", "c", "d"), // covers strings + new TstUtils.ByteGenerator((byte) 0, (byte) 127, 0.1), + new TstUtils.DoubleGenerator(100.1, 200.1, 0.1), + new TstUtils.FloatGenerator(100.1f, 200.1f, 0.1), + new TstUtils.ShortGenerator((short) 0, (short) 20000, 0.1), + new TstUtils.CharGenerator('a', 'z', 0.1), + new TstUtils.BooleanGenerator(0.2), + new TstUtils.StringGenerator(), + new TstUtils.SetGenerator<>(new String[] {"a", "b"}, new String[] {"0", "1"}, + new String[] {}, null), + new TstUtils.UnsortedDateTimeGenerator( + DBTimeUtils.convertDateTime("2020-02-14T00:00:00 NY"), + DBTimeUtils.convertDateTime("2020-02-25T00:00:00 NY"))); + sourceTable = getTable(size / 4, random, columnInfo); + } + }; - final int maxSteps = - MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; + final int maxSteps = MAX_STEPS * helper.numConsumerCoalesce * helper.numProducerCoalesce; helper.runTest(() -> { final long lastKey = (Math.abs(helper.random.nextLong()) % 16) - + (helper.sourceTable.isEmpty() ? -1 : helper.sourceTable.getIndex().lastKey()); + + (helper.sourceTable.isEmpty() ? -1 : helper.sourceTable.getIndex().lastKey()); final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); final int stepSize = Math.max(1, helper.size / maxSteps); - update.added = - Index.CURRENT_FACTORY.getIndexByRange(lastKey + 1, lastKey + stepSize); + update.added = Index.CURRENT_FACTORY.getIndexByRange(lastKey + 1, lastKey + stepSize); update.removed = i(); if (helper.sourceTable.isEmpty()) { update.modified = i(); } else { - update.modified = Index.CURRENT_FACTORY - .getIndexByRange(Math.max(0, lastKey - stepSize), lastKey); + update.modified = Index.CURRENT_FACTORY.getIndexByRange(Math.max(0, lastKey - stepSize), lastKey); update.modified.retain(helper.sourceTable.getIndex()); } update.shifted = IndexShiftData.EMPTY; update.modifiedColumnSet = ModifiedColumnSet.EMPTY; - LiveTableMonitor.DEFAULT - .runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, + LiveTableMonitor.DEFAULT.runWithinUnitTestCycle(() -> GenerateTableUpdates.generateTableUpdates(update, helper.random, helper.sourceTable, helper.columnInfo)); }); } @@ -1296,8 +1218,7 @@ public static class DummyObserver implements StreamObserver receivedCommands; - DummyObserver(final BarrageMarshaller marshaller, - final Queue receivedCommands) { + DummyObserver(final BarrageMarshaller marshaller, final Queue receivedCommands) { this.marshaller = marshaller; this.receivedCommands = receivedCommands; } @@ -1307,13 +1228,12 @@ public void onNext(final BarrageStreamGenerator.View messageView) { try { messageView.forEachStream(inputStream -> { try (final BarrageProtoUtil.ExposedByteArrayOutputStream baos = - new BarrageProtoUtil.ExposedByteArrayOutputStream()) { + new BarrageProtoUtil.ExposedByteArrayOutputStream()) { ((Drainable) inputStream).drainTo(baos); inputStream.close(); - final BarrageMessage message = marshaller - .parse(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); - // we skip schema messages, but can't suppress without propagating - // something... + final BarrageMessage message = + marshaller.parse(new ByteArrayInputStream(baos.peekBuffer(), 0, baos.size())); + // we skip schema messages, but can't suppress without propagating something... if (message != null) { receivedCommands.add(message); } @@ -1338,12 +1258,12 @@ public void onCompleted() { } private static class BarrageMarshaller - extends FlightServiceGrpcBinding.BarrageDataMarshaller { + extends FlightServiceGrpcBinding.BarrageDataMarshaller { public BarrageMarshaller(final ChunkInputStreamGenerator.Options options, - final ChunkType[] columnChunkTypes, - final Class[] columnTypes, - final Class[] componentTypes, - final BarrageMessageConsumer.StreamReader streamReader) { + final ChunkType[] columnChunkTypes, + final Class[] columnTypes, + final Class[] componentTypes, + final BarrageMessageConsumer.StreamReader streamReader) { super(options, columnChunkTypes, columnTypes, componentTypes, streamReader); } } diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ExportTableUpdateListenerTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ExportTableUpdateListenerTest.java index c6143568f13..32a68c14f3b 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ExportTableUpdateListenerTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ExportTableUpdateListenerTest.java @@ -63,8 +63,7 @@ public void tearDown() { @Test public void testLifeCycleStaticTable() { - final ExportedTableUpdateListener listener = - new ExportedTableUpdateListener(session, observer); + final ExportedTableUpdateListener listener = new ExportedTableUpdateListener(session, observer); try (final SafeCloseable scope = LivenessScopeStack.open()) { session.addExportListener(listener); } @@ -89,8 +88,7 @@ public void testRefreshStaticTable() { final SessionState.ExportObject t1 = session.newServerSideExport(src); // now add the listener - final ExportedTableUpdateListener listener = - new ExportedTableUpdateListener(session, observer); + final ExportedTableUpdateListener listener = new ExportedTableUpdateListener(session, observer); try (final SafeCloseable scope = LivenessScopeStack.open()) { session.addExportListener(listener); } @@ -105,8 +103,7 @@ public void testRefreshStaticTable() { @Test public void testLifeCycleTickingTable() { - final ExportedTableUpdateListener listener = - new ExportedTableUpdateListener(session, observer); + final ExportedTableUpdateListener listener = new ExportedTableUpdateListener(session, observer); try (final SafeCloseable scope = LivenessScopeStack.open()) { session.addExportListener(listener); } @@ -142,8 +139,7 @@ public void testRefreshTickingTable() { } // now add the listener - final ExportedTableUpdateListener listener = - new ExportedTableUpdateListener(session, observer); + final ExportedTableUpdateListener listener = new ExportedTableUpdateListener(session, observer); try (final SafeCloseable scope = LivenessScopeStack.open()) { session.addExportListener(listener); } @@ -169,8 +165,7 @@ public void testSessionClose() { final SessionState.ExportObject t1 = session.newServerSideExport(src); // now add the listener - final ExportedTableUpdateListener listener = - new ExportedTableUpdateListener(session, observer); + final ExportedTableUpdateListener listener = new ExportedTableUpdateListener(session, observer); try (final SafeCloseable scope = LivenessScopeStack.open()) { session.addExportListener(listener); } @@ -203,8 +198,7 @@ public void testPropagatesError() { } // now add the listener - final ExportedTableUpdateListener listener = - new ExportedTableUpdateListener(session, observer); + final ExportedTableUpdateListener listener = new ExportedTableUpdateListener(session, observer); try (final SafeCloseable scope = LivenessScopeStack.open()) { session.addExportListener(listener); } @@ -220,11 +214,9 @@ public void testPropagatesError() { final Ticket updateId = msg.getExportId(); Assert.equals(updateId, "updateId", t1.getExportId(), "t1.getExportId()"); Assert.eq(msg.getSize(), "msg.getSize()", 42); - Assert.eqFalse(msg.getUpdateFailureMessage().isEmpty(), - "msg.getUpdateFailureMessage().isEmpty()"); + Assert.eqFalse(msg.getUpdateFailureMessage().isEmpty(), "msg.getUpdateFailureMessage().isEmpty()"); - // validate that our error is not directly embedded in the update (that would be a security - // concern) + // validate that our error is not directly embedded in the update (that would be a security concern) Assert.eqFalse(msg.getUpdateFailureMessage().contains("awful"), "msg.contains('awful')"); } @@ -238,8 +230,7 @@ public void testListenerClosed() { } // now add the listener - final ExportedTableUpdateListener listener = - new ExportedTableUpdateListener(session, observer); + final ExportedTableUpdateListener listener = new ExportedTableUpdateListener(session, observer); try (final SafeCloseable scope = LivenessScopeStack.open()) { session.addExportListener(listener); } @@ -260,8 +251,7 @@ public void testListenerClosed() { Assert.eq(observer.countPostComplete, "observer.countPostComplete", 1); Assert.eqFalse(src.hasListeners(), "src.hasListeners()"); - // the actual ExportedTableUpdateListener should be "live", and should no longer be - // listening + // the actual ExportedTableUpdateListener should be "live", and should no longer be listening addRowsToSource(src, 2); expectNoMessage(); @@ -274,8 +264,7 @@ public void testTableSizeUsesPrev() { final MutableObject> t1 = new MutableObject<>(); // now add the listener - final ExportedTableUpdateListener listener = - new ExportedTableUpdateListener(session, observer); + final ExportedTableUpdateListener listener = new ExportedTableUpdateListener(session, observer); try (final SafeCloseable scope = LivenessScopeStack.open()) { session.addExportListener(listener); } @@ -286,8 +275,7 @@ public void testTableSizeUsesPrev() { // export mid-tick liveTableMonitor.runWithinUnitTestCycle(() -> { final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); - update.added = Index.FACTORY.getIndexByRange(src.getIndex().lastKey() + 1, - src.getIndex().lastKey() + 42); + update.added = Index.FACTORY.getIndexByRange(src.getIndex().lastKey() + 1, src.getIndex().lastKey() + 42); update.removed = update.modified = i(); update.modifiedColumnSet = ModifiedColumnSet.EMPTY; update.shifted = IndexShiftData.EMPTY; @@ -316,8 +304,8 @@ public void testTableSizeUsesPrev() { private void addRowsToSource(final QueryTable src, final long nRows) { liveTableMonitor.runWithinUnitTestCycle(() -> { final ShiftAwareListener.Update update = new ShiftAwareListener.Update(); - update.added = Index.FACTORY.getIndexByRange(src.getIndex().lastKey() + 1, - src.getIndex().lastKey() + nRows); + update.added = + Index.FACTORY.getIndexByRange(src.getIndex().lastKey() + 1, src.getIndex().lastKey() + nRows); update.removed = update.modified = i(); update.modifiedColumnSet = ModifiedColumnSet.EMPTY; update.shifted = IndexShiftData.EMPTY; @@ -332,8 +320,7 @@ private void expectSizes(final Ticket exportId, final long... sizes) { final Ticket updateId = msg.getExportId(); Assert.equals(updateId, "updateId", exportId, "exportId"); Assert.eq(msg.getSize(), "msg.getSize()", size); - Assert.eqTrue(msg.getUpdateFailureMessage().isEmpty(), - "msg.getUpdateFailureMessage().isEmpty()"); + Assert.eqTrue(msg.getUpdateFailureMessage().isEmpty(), "msg.getUpdateFailureMessage().isEmpty()"); } } @@ -348,12 +335,11 @@ public class TestSessionState extends SessionState { public TestSessionState() { super(scheduler, AUTH_CONTEXT); initializeExpiration(new SessionService.TokenExpiration(UUID.randomUUID(), - DBTimeUtils.nanosToTime(Long.MAX_VALUE), this)); + DBTimeUtils.nanosToTime(Long.MAX_VALUE), this)); } } - public static class QueuingResponseObserver - implements StreamObserver { + public static class QueuingResponseObserver implements StreamObserver { boolean complete = false; long countPostComplete = 0; Queue msgQueue = new ArrayDeque<>(); diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/AbstractNormalizingFilterTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/AbstractNormalizingFilterTest.java index 414abdf9a90..6fb28fee490 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/AbstractNormalizingFilterTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/AbstractNormalizingFilterTest.java @@ -13,8 +13,7 @@ protected void assertUnchanged(String message, Condition input) { protected void assertFilterEquals(String message, Condition input, Condition output) { Condition actual = execute(input); if (!output.equals(actual)) { - fail(message + " expected: " + FilterPrinter.print(output) + " but was: " - + FilterPrinter.print(actual)); + fail(message + " expected: " + FilterPrinter.print(output) + " but was: " + FilterPrinter.print(actual)); } assertEquals(message, output, actual); } diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/ConvertInvalidInExpressionsTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/ConvertInvalidInExpressionsTest.java index ea2f5005450..37ddd75cef5 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/ConvertInvalidInExpressionsTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/ConvertInvalidInExpressionsTest.java @@ -11,18 +11,18 @@ public class ConvertInvalidInExpressionsTest extends AbstractNormalizingFilterTe @Test public void testConvertInvalidInExpressions() { assertUnchanged("already correct", - in("ColumnA", 1)); + in("ColumnA", 1)); assertUnchanged("already correct", - in("ColumnA", 1, 2)); + in("ColumnA", 1, 2)); assertFilterEquals("literals on both sides", - in(literal(1), literal(2)), - NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, - CaseSensitivity.MATCH_CASE, literal(1), literal(2))); + in(literal(1), literal(2)), + NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, CaseSensitivity.MATCH_CASE, + literal(1), literal(2))); assertFilterEquals("references on both sides", - in(reference("ColumnA"), reference("ColumnB")), - NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, - CaseSensitivity.MATCH_CASE, reference("ColumnA"), reference("ColumnB"))); + in(reference("ColumnA"), reference("ColumnB")), + NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, CaseSensitivity.MATCH_CASE, + reference("ColumnA"), reference("ColumnB"))); } @Override diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FilterPrinterTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FilterPrinterTest.java index ecd92707388..ab97d73e603 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FilterPrinterTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FilterPrinterTest.java @@ -12,8 +12,8 @@ public void printNoEscape() { // simple double -> string and back again assertSameValue(1.5); - // make sure that if a "double" is passed instead of a more correct integer type, we still - // print something that matches + // make sure that if a "double" is passed instead of a more correct integer type, we still print something that + // matches assertSameValue(2.0); // other cases, make sure we match plain java expectations @@ -23,8 +23,7 @@ public void printNoEscape() { Math.E, Math.PI, Double.MAX_VALUE, Double.MIN_VALUE, Double.MIN_NORMAL, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NaN}) { - // ensure the rendered value turns back into a double and equals _exactly_ the original - // value + // ensure the rendered value turns back into a double and equals _exactly_ the original value assertSameValue(val); } @@ -66,15 +65,14 @@ private static void assertSameValue(double expected) { assertEquals(Double.toString(expected), str); } else { assertEquals("Provided value should have no decimal component " + expected, 0, - expected - (long) expected, 0); + expected - (long) expected, 0); assertEquals(Long.toString((long) expected), str); } } } private static void assertSameValue(long expected) { - assertTrue("Must be in the range that a double value can represent", - Math.abs(expected) < (1L << 53)); + assertTrue("Must be in the range that a double value can represent", Math.abs(expected) < (1L << 53)); Literal literal = lit(expected); String str = FilterPrinter.printNoEscape(literal); diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FilterTestUtils.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FilterTestUtils.java index 9ab81d3c897..dd3305562d2 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FilterTestUtils.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FilterTestUtils.java @@ -24,13 +24,11 @@ public static Condition not(Condition filterDescriptor) { public static Value reference(String columnName) { - return Value.newBuilder() - .setReference(Reference.newBuilder().setColumnName(columnName).build()).build(); + return Value.newBuilder().setReference(Reference.newBuilder().setColumnName(columnName).build()).build(); } public static Value literal(String stringValue) { - return Value.newBuilder() - .setLiteral(Literal.newBuilder().setStringValue(stringValue).build()).build(); + return Value.newBuilder().setLiteral(Literal.newBuilder().setStringValue(stringValue).build()).build(); } public static List literals(String... stringValues) { @@ -38,8 +36,7 @@ public static List literals(String... stringValues) { } public static Value literal(long longValue) { - return Value.newBuilder().setLiteral(Literal.newBuilder().setLongValue(longValue).build()) - .build(); + return Value.newBuilder().setLiteral(Literal.newBuilder().setLongValue(longValue).build()).build(); } public static Value literal(int intValue) { @@ -47,67 +44,59 @@ public static Value literal(int intValue) { } public static Value literal(double doubleValue) { - return Value.newBuilder() - .setLiteral(Literal.newBuilder().setDoubleValue(doubleValue).build()).build(); + return Value.newBuilder().setLiteral(Literal.newBuilder().setDoubleValue(doubleValue).build()).build(); } public static List literals(int... intValues) { - return IntStream.of(intValues).mapToObj(i -> literal((double) i)) - .collect(Collectors.toList()); + return IntStream.of(intValues).mapToObj(i -> literal((double) i)).collect(Collectors.toList()); } public static List literals(double... doubleValues) { - return DoubleStream.of(doubleValues).mapToObj(FilterTestUtils::literal) - .collect(Collectors.toList()); + return DoubleStream.of(doubleValues).mapToObj(FilterTestUtils::literal).collect(Collectors.toList()); } public static Condition eq(String columnName, int value) { - return NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, - CaseSensitivity.MATCH_CASE, reference(columnName), literal(value)); + return NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, CaseSensitivity.MATCH_CASE, + reference(columnName), literal(value)); } public static Condition notEq(String columnName, int value) { return NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.NOT_EQUALS, - CaseSensitivity.MATCH_CASE, reference(columnName), literal(value)); + CaseSensitivity.MATCH_CASE, reference(columnName), literal(value)); } public static Condition in(String columnName, int... values) { - return NormalizeFilterUtil.doIn(reference(columnName), literals(values), - CaseSensitivity.MATCH_CASE, MatchType.REGULAR); + return NormalizeFilterUtil.doIn(reference(columnName), literals(values), CaseSensitivity.MATCH_CASE, + MatchType.REGULAR); } public static Condition in(Value lhs, Value... rhs) { - return NormalizeFilterUtil.doIn(lhs, Arrays.asList(rhs), CaseSensitivity.MATCH_CASE, - MatchType.REGULAR); + return NormalizeFilterUtil.doIn(lhs, Arrays.asList(rhs), CaseSensitivity.MATCH_CASE, MatchType.REGULAR); } public static Condition inICase(Value lhs, Value... rhs) { - return NormalizeFilterUtil.doIn(lhs, Arrays.asList(rhs), CaseSensitivity.IGNORE_CASE, - MatchType.REGULAR); + return NormalizeFilterUtil.doIn(lhs, Arrays.asList(rhs), CaseSensitivity.IGNORE_CASE, MatchType.REGULAR); } public static Condition notIn(String columnName, int... values) { - return NormalizeFilterUtil.doIn(reference(columnName), literals(values), - CaseSensitivity.MATCH_CASE, MatchType.INVERTED); + return NormalizeFilterUtil.doIn(reference(columnName), literals(values), CaseSensitivity.MATCH_CASE, + MatchType.INVERTED); } public static Condition notIn(Value lhs, Value... rhs) { - return NormalizeFilterUtil.doIn(lhs, Arrays.asList(rhs), CaseSensitivity.MATCH_CASE, - MatchType.INVERTED); + return NormalizeFilterUtil.doIn(lhs, Arrays.asList(rhs), CaseSensitivity.MATCH_CASE, MatchType.INVERTED); } public static Condition notInICase(Value lhs, Value... rhs) { - return NormalizeFilterUtil.doIn(lhs, Arrays.asList(rhs), CaseSensitivity.IGNORE_CASE, - MatchType.INVERTED); + return NormalizeFilterUtil.doIn(lhs, Arrays.asList(rhs), CaseSensitivity.IGNORE_CASE, MatchType.INVERTED); } - public static Condition compare(CompareCondition.CompareOperation operation, Value lhs, - Value rhs) { + public static Condition compare(CompareCondition.CompareOperation operation, Value lhs, Value rhs) { return NormalizeFilterUtil.doComparison(operation, CaseSensitivity.MATCH_CASE, lhs, rhs); } - public static Condition compare(CompareCondition.CompareOperation operation, - CaseSensitivity caseSensitivity, Value lhs, Value rhs) { + public static Condition compare(CompareCondition.CompareOperation operation, CaseSensitivity caseSensitivity, + Value lhs, Value rhs) { return NormalizeFilterUtil.doComparison(operation, caseSensitivity, lhs, rhs); } diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FlipNonReferenceMatchExpressionTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FlipNonReferenceMatchExpressionTest.java index 1295299066d..c28e4fba2d6 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FlipNonReferenceMatchExpressionTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/FlipNonReferenceMatchExpressionTest.java @@ -16,34 +16,33 @@ public class FlipNonReferenceMatchExpressionTest extends AbstractNormalizingFilt @Test public void testFlipNonReferenceMatchExpression() { assertUnchanged("correct form", - in("ColumnA", 1, 2)); + in("ColumnA", 1, 2)); assertUnchanged("correct form", - in("ColumnA", 1)); + in("ColumnA", 1)); assertFilterEquals("all literals", - NormalizeFilterUtil.doIn(literal(1), Arrays.asList(literal(2), literal(3)), - CaseSensitivity.MATCH_CASE, MatchType.REGULAR), - or( - NormalizeFilterUtil.doIn(literal(2), Collections.singletonList(literal(1)), - CaseSensitivity.MATCH_CASE, MatchType.REGULAR), - NormalizeFilterUtil.doIn(literal(3), Collections.singletonList(literal(1)), - CaseSensitivity.MATCH_CASE, MatchType.REGULAR))); + NormalizeFilterUtil.doIn(literal(1), Arrays.asList(literal(2), literal(3)), CaseSensitivity.MATCH_CASE, + MatchType.REGULAR), + or( + NormalizeFilterUtil.doIn(literal(2), Collections.singletonList(literal(1)), + CaseSensitivity.MATCH_CASE, MatchType.REGULAR), + NormalizeFilterUtil.doIn(literal(3), Collections.singletonList(literal(1)), + CaseSensitivity.MATCH_CASE, MatchType.REGULAR))); assertFilterEquals("reference on right", - NormalizeFilterUtil.doIn(literal(1), - Arrays.asList(reference("ColumnA"), literal(4), literal(5)), - CaseSensitivity.MATCH_CASE, MatchType.REGULAR), - or( - in("ColumnA", 1), - NormalizeFilterUtil.doIn(literal(4), Collections.singletonList(literal(1)), - CaseSensitivity.MATCH_CASE, MatchType.REGULAR), - NormalizeFilterUtil.doIn(literal(5), Collections.singletonList(literal(1)), - CaseSensitivity.MATCH_CASE, MatchType.REGULAR))); + NormalizeFilterUtil.doIn(literal(1), Arrays.asList(reference("ColumnA"), literal(4), literal(5)), + CaseSensitivity.MATCH_CASE, MatchType.REGULAR), + or( + in("ColumnA", 1), + NormalizeFilterUtil.doIn(literal(4), Collections.singletonList(literal(1)), + CaseSensitivity.MATCH_CASE, MatchType.REGULAR), + NormalizeFilterUtil.doIn(literal(5), Collections.singletonList(literal(1)), + CaseSensitivity.MATCH_CASE, MatchType.REGULAR))); assertFilterEquals("reference on right, no OR required", - NormalizeFilterUtil.doIn(literal(1), Collections.singletonList(reference("ColumnA")), - CaseSensitivity.MATCH_CASE, MatchType.REGULAR), - in("ColumnA", 1)); + NormalizeFilterUtil.doIn(literal(1), Collections.singletonList(reference("ColumnA")), + CaseSensitivity.MATCH_CASE, MatchType.REGULAR), + in("ColumnA", 1)); } @Override diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/MakeExpressionsNullSafeTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/MakeExpressionsNullSafeTest.java index f9922ccc397..7be98503b44 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/MakeExpressionsNullSafeTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/MakeExpressionsNullSafeTest.java @@ -12,23 +12,21 @@ public class MakeExpressionsNullSafeTest extends AbstractNormalizingFilterTest { @Test public void testMakeExpressionsNullSafe() { assertUnchanged("doesnt affect EQ", - NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, - CaseSensitivity.MATCH_CASE, reference("ColumnA"), literal("A"))); + NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, CaseSensitivity.MATCH_CASE, + reference("ColumnA"), literal("A"))); assertFilterEquals("add null checks when implicit invoke is needed", - NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, - CaseSensitivity.IGNORE_CASE, reference("ColumnA"), reference("ColumnB")), - or( - and( - NormalizeFilterUtil - .doIsNull(Reference.newBuilder().setColumnName("ColumnA").build()), - NormalizeFilterUtil - .doIsNull(Reference.newBuilder().setColumnName("ColumnB").build())), - and( - not( - NormalizeFilterUtil - .doIsNull(Reference.newBuilder().setColumnName("ColumnA").build())), - invoke("equalsIgnoreCase", reference("ColumnA"), reference("ColumnB"))))); + NormalizeFilterUtil.doComparison(CompareCondition.CompareOperation.EQUALS, CaseSensitivity.IGNORE_CASE, + reference("ColumnA"), reference("ColumnB")), + or( + and( + NormalizeFilterUtil.doIsNull(Reference.newBuilder().setColumnName("ColumnA").build()), + NormalizeFilterUtil.doIsNull(Reference.newBuilder().setColumnName("ColumnB").build())), + and( + not( + NormalizeFilterUtil + .doIsNull(Reference.newBuilder().setColumnName("ColumnA").build())), + invoke("equalsIgnoreCase", reference("ColumnA"), reference("ColumnB"))))); } @Override diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/MergeNestedBinaryOperationsTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/MergeNestedBinaryOperationsTest.java index 92eecc2775a..462f277ca8f 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/MergeNestedBinaryOperationsTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/MergeNestedBinaryOperationsTest.java @@ -10,58 +10,58 @@ public class MergeNestedBinaryOperationsTest extends AbstractNormalizingFilterTe @Test public void testMergeNestedBinaryOperations() { assertUnchanged("don't merge AND and OR", - and( - or( - eq("ColumnB", 3), - eq("ColumnA", 4)), - eq("ColumnA", 1))); - assertUnchanged("don't merge AND and OR", - or( and( - eq("ColumnB", 3), - eq("ColumnA", 4)), - eq("ColumnA", 1))); + or( + eq("ColumnB", 3), + eq("ColumnA", 4)), + eq("ColumnA", 1))); + assertUnchanged("don't merge AND and OR", + or( + and( + eq("ColumnB", 3), + eq("ColumnA", 4)), + eq("ColumnA", 1))); assertFilterEquals("merge ANDs", - and( - eq("ColumnA", 3), and( - eq("ColumnB", 3), - eq("ColumnC", 3))), - and( - eq("ColumnA", 3), - eq("ColumnB", 3), - eq("ColumnC", 3))); + eq("ColumnA", 3), + and( + eq("ColumnB", 3), + eq("ColumnC", 3))), + and( + eq("ColumnA", 3), + eq("ColumnB", 3), + eq("ColumnC", 3))); assertFilterEquals("merge ANDs", - and( and( - eq("ColumnA", 3), - eq("ColumnB", 3)), - eq("ColumnC", 3)), - and( - eq("ColumnA", 3), - eq("ColumnB", 3), - eq("ColumnC", 3))); + and( + eq("ColumnA", 3), + eq("ColumnB", 3)), + eq("ColumnC", 3)), + and( + eq("ColumnA", 3), + eq("ColumnB", 3), + eq("ColumnC", 3))); assertFilterEquals("merge ORs", - or( - eq("ColumnA", 3), or( - eq("ColumnB", 3), - eq("ColumnC", 3))), - or( - eq("ColumnA", 3), - eq("ColumnB", 3), - eq("ColumnC", 3))); + eq("ColumnA", 3), + or( + eq("ColumnB", 3), + eq("ColumnC", 3))), + or( + eq("ColumnA", 3), + eq("ColumnB", 3), + eq("ColumnC", 3))); assertFilterEquals("merge ANDs", - or( or( - eq("ColumnA", 3), - eq("ColumnB", 3)), - eq("ColumnC", 3)), - or( - eq("ColumnA", 3), - eq("ColumnB", 3), - eq("ColumnC", 3))); + or( + eq("ColumnA", 3), + eq("ColumnB", 3)), + eq("ColumnC", 3)), + or( + eq("ColumnA", 3), + eq("ColumnB", 3), + eq("ColumnC", 3))); } @Override diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/NormalizeNotsTest.java b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/NormalizeNotsTest.java index 85b9bb9e2aa..44e968cac9d 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/NormalizeNotsTest.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/table/ops/filter/NormalizeNotsTest.java @@ -12,84 +12,79 @@ public void testNormalizeNots() { // this test effectively tests FilterDescriptor.not(), but also that the visitor // correctly calls it on the tree assertFilterEquals("two nots around a simple expression", - not(not(invoke("foo", reference("ColumnA")))), - invoke("foo", reference("ColumnA"))); + not(not(invoke("foo", reference("ColumnA")))), + invoke("foo", reference("ColumnA"))); assertFilterEquals("two nots around a simple expression", - not(not(eq("foo", 1))), - eq("foo", 1)); + not(not(eq("foo", 1))), + eq("foo", 1)); assertFilterEquals("two nots within a tree", - not(and( - not(or( - invoke("methodA", reference("ColumnA")), // invoke used since it can't be - // rewritten to handle a NOT - invoke("methodB", reference("ColumnA")))), - or( - invoke("methodC", reference("ColumnA")), - invoke("methodD", reference("ColumnA"))))), - or( + not(and( + not(or( + invoke("methodA", reference("ColumnA")), // invoke used since it can't be rewritten to + // handle a NOT + invoke("methodB", reference("ColumnA")))), + or( + invoke("methodC", reference("ColumnA")), + invoke("methodD", reference("ColumnA"))))), or( - invoke("methodA", reference("ColumnA")), - invoke("methodB", reference("ColumnA"))), - and( - not(invoke("methodC", reference("ColumnA"))), - not(invoke("methodD", reference("ColumnA")))))); + or( + invoke("methodA", reference("ColumnA")), + invoke("methodB", reference("ColumnA"))), + and( + not(invoke("methodC", reference("ColumnA"))), + not(invoke("methodD", reference("ColumnA")))))); assertUnchanged("other non-flippble expression", - or( - not(NormalizeFilterUtil - .doIsNull(Reference.newBuilder().setColumnName("ColumnA").build())), - not(invoke("foo", reference("ColumnA"))))); + or( + not(NormalizeFilterUtil.doIsNull(Reference.newBuilder().setColumnName("ColumnA").build())), + not(invoke("foo", reference("ColumnA"))))); assertFilterEquals("flip various leaf expressions", - not(or( - compare(CompareCondition.CompareOperation.LESS_THAN, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.GREATER_THAN, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.LESS_THAN_OR_EQUAL, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.GREATER_THAN_OR_EQUAL, - reference("ColumnA"), reference("ColumnB")), - compare(CompareCondition.CompareOperation.EQUALS, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.EQUALS, CaseSensitivity.IGNORE_CASE, - reference("ColumnA"), reference("ColumnB")), - compare(CompareCondition.CompareOperation.NOT_EQUALS, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.NOT_EQUALS, CaseSensitivity.IGNORE_CASE, - reference("ColumnA"), reference("ColumnB")), - in(reference("ColumnA"), reference("ColumnB")), - inICase(reference("ColumnA"), reference("ColumnB")), - notIn(reference("ColumnA"), reference("ColumnB")), - notInICase(reference("ColumnA"), reference("ColumnB")), - NormalizeFilterUtil.doContains( - Reference.newBuilder().setColumnName("ColumnA").build(), "asdf", - CaseSensitivity.MATCH_CASE, MatchType.REGULAR))), - and( - compare(CompareCondition.CompareOperation.GREATER_THAN_OR_EQUAL, - reference("ColumnA"), reference("ColumnB")), - compare(CompareCondition.CompareOperation.LESS_THAN_OR_EQUAL, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.GREATER_THAN, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.LESS_THAN, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.NOT_EQUALS, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.NOT_EQUALS, CaseSensitivity.IGNORE_CASE, - reference("ColumnA"), reference("ColumnB")), - compare(CompareCondition.CompareOperation.EQUALS, reference("ColumnA"), - reference("ColumnB")), - compare(CompareCondition.CompareOperation.EQUALS, CaseSensitivity.IGNORE_CASE, - reference("ColumnA"), reference("ColumnB")), - notIn(reference("ColumnA"), reference("ColumnB")), - notInICase(reference("ColumnA"), reference("ColumnB")), - in(reference("ColumnA"), reference("ColumnB")), - inICase(reference("ColumnA"), reference("ColumnB")), - NormalizeFilterUtil.doContains( - Reference.newBuilder().setColumnName("ColumnA").build(), "asdf", - CaseSensitivity.MATCH_CASE, MatchType.INVERTED))); + not(or( + compare(CompareCondition.CompareOperation.LESS_THAN, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.GREATER_THAN, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.LESS_THAN_OR_EQUAL, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.GREATER_THAN_OR_EQUAL, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.EQUALS, reference("ColumnA"), reference("ColumnB")), + compare(CompareCondition.CompareOperation.EQUALS, CaseSensitivity.IGNORE_CASE, + reference("ColumnA"), reference("ColumnB")), + compare(CompareCondition.CompareOperation.NOT_EQUALS, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.NOT_EQUALS, CaseSensitivity.IGNORE_CASE, + reference("ColumnA"), reference("ColumnB")), + in(reference("ColumnA"), reference("ColumnB")), + inICase(reference("ColumnA"), reference("ColumnB")), + notIn(reference("ColumnA"), reference("ColumnB")), + notInICase(reference("ColumnA"), reference("ColumnB")), + NormalizeFilterUtil.doContains(Reference.newBuilder().setColumnName("ColumnA").build(), "asdf", + CaseSensitivity.MATCH_CASE, MatchType.REGULAR))), + and( + compare(CompareCondition.CompareOperation.GREATER_THAN_OR_EQUAL, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.LESS_THAN_OR_EQUAL, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.GREATER_THAN, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.LESS_THAN, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.NOT_EQUALS, reference("ColumnA"), + reference("ColumnB")), + compare(CompareCondition.CompareOperation.NOT_EQUALS, CaseSensitivity.IGNORE_CASE, + reference("ColumnA"), reference("ColumnB")), + compare(CompareCondition.CompareOperation.EQUALS, reference("ColumnA"), reference("ColumnB")), + compare(CompareCondition.CompareOperation.EQUALS, CaseSensitivity.IGNORE_CASE, + reference("ColumnA"), reference("ColumnB")), + notIn(reference("ColumnA"), reference("ColumnB")), + notInICase(reference("ColumnA"), reference("ColumnB")), + in(reference("ColumnA"), reference("ColumnB")), + inICase(reference("ColumnA"), reference("ColumnB")), + NormalizeFilterUtil.doContains(Reference.newBuilder().setColumnName("ColumnA").build(), "asdf", + CaseSensitivity.MATCH_CASE, MatchType.INVERTED))); } @Override diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/util/TestControlledScheduler.java b/grpc-api/src/test/java/io/deephaven/grpc_api/util/TestControlledScheduler.java index 653b7cbf6f0..56219960923 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/util/TestControlledScheduler.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/util/TestControlledScheduler.java @@ -18,7 +18,7 @@ public class TestControlledScheduler implements Scheduler { private long currentTimeInNs = 0; private final TreeMultimap workQueue = - TreeMultimap.create(Ordering.natural(), Ordering.arbitrary()); + TreeMultimap.create(Ordering.natural(), Ordering.arbitrary()); /** * Runs the first queued command if there are any. @@ -29,8 +29,7 @@ public void runOne() { } try { - final Map.Entry> entry = - workQueue.asMap().firstEntry(); + final Map.Entry> entry = workQueue.asMap().firstEntry(); final Runnable runner = entry.getValue().iterator().next(); currentTimeInNs = Math.max(currentTimeInNs, entry.getKey().getNanos()); @@ -44,8 +43,8 @@ public void runOne() { } /** - * Will run commands until all work items that should be run before Max(currentTime, untilTime) - * have run. Does not execute events scheduled at the provided time. + * Will run commands until all work items that should be run before Max(currentTime, untilTime) have run. Does not + * execute events scheduled at the provided time. * * @param untilTime time to run until */ @@ -63,8 +62,8 @@ public void runUntil(final DBDateTime untilTime) { } /** - * Will run commands until all work items that should be run through Max(currentTime, untilTime) - * have run. Does execute events scheduled at the provided time. + * Will run commands until all work items that should be run through Max(currentTime, untilTime) have run. Does + * execute events scheduled at the provided time. * * @param throughTime time to run through */ diff --git a/grpc-api/src/test/java/io/deephaven/grpc_api/util/TestUtil.java b/grpc-api/src/test/java/io/deephaven/grpc_api/util/TestUtil.java index 80cd556dc55..5ce874ea023 100644 --- a/grpc-api/src/test/java/io/deephaven/grpc_api/util/TestUtil.java +++ b/grpc-api/src/test/java/io/deephaven/grpc_api/util/TestUtil.java @@ -5,8 +5,7 @@ public class TestUtil { public static void assertThrows( - final Class type, - final FunctionalInterfaces.ThrowingRunnable runner) { + final Class type, final FunctionalInterfaces.ThrowingRunnable runner) { boolean threwExpectedException = false; try { runner.run(); diff --git a/log-factory/sinks/log-to-file/src/main/java/io/deephaven/internal/log/LoggerFactoryFile.java b/log-factory/sinks/log-to-file/src/main/java/io/deephaven/internal/log/LoggerFactoryFile.java index ee19c964e78..0326f207e7a 100644 --- a/log-factory/sinks/log-to-file/src/main/java/io/deephaven/internal/log/LoggerFactoryFile.java +++ b/log-factory/sinks/log-to-file/src/main/java/io/deephaven/internal/log/LoggerFactoryFile.java @@ -22,29 +22,29 @@ private static String getPath() { private static boolean append() { return Boolean.parseBoolean( - System.getProperty("io.deephaven.internal.log.LoggerFactoryFile.append", "true")); + System.getProperty("io.deephaven.internal.log.LoggerFactoryFile.append", "true")); } private static boolean showLevel() { return Boolean.parseBoolean( - System.getProperty("io.deephaven.internal.log.LoggerFactoryFile.showLevel", "true")); + System.getProperty("io.deephaven.internal.log.LoggerFactoryFile.showLevel", "true")); } private static boolean showThreadName() { return Boolean.parseBoolean(System - .getProperty("io.deephaven.internal.log.LoggerFactoryFile.showThreadName", "true")); + .getProperty("io.deephaven.internal.log.LoggerFactoryFile.showThreadName", "true")); } private static TimeZone timeZone() { final String timeZone = - System.getProperty("io.deephaven.internal.log.LoggerFactoryFile.timeZone"); + System.getProperty("io.deephaven.internal.log.LoggerFactoryFile.timeZone"); return timeZone == null ? TimeZone.getDefault() : TimeZone.getTimeZone(timeZone); } private static LogLevel level() { return LogLevel - .valueOf(System.getProperty("io.deephaven.internal.log.LoggerFactoryFile.level", "INFO") - .toUpperCase()); + .valueOf(System.getProperty("io.deephaven.internal.log.LoggerFactoryFile.level", "INFO") + .toUpperCase()); } @Override @@ -52,14 +52,14 @@ public final Logger createInternal() { // todo: parameterize based on config final LogBufferPool bufferPool = new DynamicLogBufferPoolImpl("LogBufferPool", 1024, 1024); final LogEntryPool logEntryPool = - new DynamicDelayedLogEntryUnsafePoolImpl("LogEntryPool", 32768); + new DynamicDelayedLogEntryUnsafePoolImpl("LogEntryPool", 32768); // note: this calls a thread per call; need to change dynamics final String header = null; final LogSink logSink = new LogSinkImpl<>(getPath(), Integer.MAX_VALUE, null, - logEntryPool, append(), new LogOutputCsvImpl(bufferPool), header, null); + logEntryPool, append(), new LogOutputCsvImpl(bufferPool), header, null); final String prefix = null; final LoggerTimeSource timeSource = new NullLoggerTimeSource(); return new LoggerImpl(logEntryPool, logSink, prefix, level(), timeSource, timeZone(), - showLevel(), showThreadName()); + showLevel(), showThreadName()); } } diff --git a/log-factory/sinks/log-to-slf4j/src/main/java/io/deephaven/internal/log/LoggerSlf4j.java b/log-factory/sinks/log-to-slf4j/src/main/java/io/deephaven/internal/log/LoggerSlf4j.java index 28301e766ff..d6150a584e2 100644 --- a/log-factory/sinks/log-to-slf4j/src/main/java/io/deephaven/internal/log/LoggerSlf4j.java +++ b/log-factory/sinks/log-to-slf4j/src/main/java/io/deephaven/internal/log/LoggerSlf4j.java @@ -21,12 +21,12 @@ public final class LoggerSlf4j implements Logger { private static final Pool buffers = - new ThreadSafeLenientFixedSizePool<>(2048, new Function.Nullary() { - @Override - public ByteBuffer call() { - return ByteBuffer.allocate(512); - } - }, null); + new ThreadSafeLenientFixedSizePool<>(2048, new Function.Nullary() { + @Override + public ByteBuffer call() { + return ByteBuffer.allocate(512); + } + }, null); private static final LogBufferPool logBufferPool = new LogBufferPool() { @Override @@ -82,7 +82,7 @@ public Entry start(LogSink sink, org.slf4j.Logger log, LogLevel level, long curr } public Entry start(LogSink sink, org.slf4j.Logger log, LogLevel level, long currentTime, - Throwable t) { + Throwable t) { super.start(sink, level, currentTime, t); this.log = log; this.level = getLevelSlf4j(level); @@ -103,12 +103,12 @@ public LogEntry endl() { /** Static pool shared among all loggers */ private static final Pool entries = - new ThreadSafeLenientFixedSizePool<>(1024, new Function.Nullary() { - @Override - public Entry call() { - return new Entry(logBufferPool); - } - }, null); + new ThreadSafeLenientFixedSizePool<>(1024, new Function.Nullary() { + @Override + public Entry call() { + return new Entry(logBufferPool); + } + }, null); /** Specialized sink for DH loggers */ private enum Sink implements LogSink { @@ -198,7 +198,7 @@ public void terminate() { @Override public void addInterceptor(Interceptor entryInterceptor) { interceptors = ArrayUtil.pushArray(entryInterceptor, interceptors, - ClassUtil.generify(Interceptor.class)); + ClassUtil.generify(Interceptor.class)); } } @@ -221,13 +221,13 @@ private Entry startEntry(LogLevel level, long currentTime, Throwable t) { @Override public LogEntry getEntry(LogLevel level) { return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000) - : LogEntry.NULL; + : LogEntry.NULL; } @Override public LogEntry getEntry(LogLevel level, Throwable t) { return isLevelEnabled(level) ? startEntry(level, System.currentTimeMillis() * 1000, t) - : LogEntry.NULL; + : LogEntry.NULL; } @Override diff --git a/log-factory/sinks/log-to-stream/src/main/java/io/deephaven/internal/log/LoggerFactoryStream.java b/log-factory/sinks/log-to-stream/src/main/java/io/deephaven/internal/log/LoggerFactoryStream.java index c33b69df1f5..80488cbe676 100644 --- a/log-factory/sinks/log-to-stream/src/main/java/io/deephaven/internal/log/LoggerFactoryStream.java +++ b/log-factory/sinks/log-to-stream/src/main/java/io/deephaven/internal/log/LoggerFactoryStream.java @@ -9,7 +9,7 @@ public final class LoggerFactoryStream extends LoggerFactorySingleCache { private static PrintStream getStream() { final String value = - System.getProperty("io.deephaven.internal.log.LoggerFactoryStream.stream", "OUT"); + System.getProperty("io.deephaven.internal.log.LoggerFactoryStream.stream", "OUT"); switch (value.toUpperCase()) { case "OUT": return System.out; @@ -22,7 +22,7 @@ private static PrintStream getStream() { private static LogLevel getLevel() { return LogLevel.valueOf( - System.getProperty("io.deephaven.internal.log.LoggerFactoryStream.level", "INFO")); + System.getProperty("io.deephaven.internal.log.LoggerFactoryStream.level", "INFO")); } @Override diff --git a/log-factory/sinks/logback-logbuffer/src/main/java/io/deephaven/logback/LogBufferAppender.java b/log-factory/sinks/logback-logbuffer/src/main/java/io/deephaven/logback/LogBufferAppender.java index 785c88d97c0..d3af645b379 100644 --- a/log-factory/sinks/logback-logbuffer/src/main/java/io/deephaven/logback/LogBufferAppender.java +++ b/log-factory/sinks/logback-logbuffer/src/main/java/io/deephaven/logback/LogBufferAppender.java @@ -26,7 +26,7 @@ public void start() { int errors = 0; if (this.encoder == null) { addStatus( - new ErrorStatus("No encoder set for the appender named \"" + name + "\".", this)); + new ErrorStatus("No encoder set for the appender named \"" + name + "\".", this)); errors++; } @@ -78,7 +78,7 @@ private static LogLevel adapt(Level level) { default: throw new IllegalArgumentException( - "Unexpected level " + level + " " + level.toInt()); + "Unexpected level " + level + " " + level.toInt()); } } } diff --git a/log-factory/src/main/java/io/deephaven/internal/log/Bootstrap.java b/log-factory/src/main/java/io/deephaven/internal/log/Bootstrap.java index 03297f9c5bb..33ee48a7379 100644 --- a/log-factory/src/main/java/io/deephaven/internal/log/Bootstrap.java +++ b/log-factory/src/main/java/io/deephaven/internal/log/Bootstrap.java @@ -3,7 +3,7 @@ class Bootstrap { private static boolean isEnabled() { return Boolean.parseBoolean( - System.getProperty("io.deephaven.internal.log.Bootstrap.enabled", "true")); + System.getProperty("io.deephaven.internal.log.Bootstrap.enabled", "true")); } public static void log(Class source, String message) { diff --git a/log-factory/src/main/java/io/deephaven/internal/log/LoggerFactoryServiceLoaderImpl.java b/log-factory/src/main/java/io/deephaven/internal/log/LoggerFactoryServiceLoaderImpl.java index 5807ab02dce..eb0044f9bfb 100644 --- a/log-factory/src/main/java/io/deephaven/internal/log/LoggerFactoryServiceLoaderImpl.java +++ b/log-factory/src/main/java/io/deephaven/internal/log/LoggerFactoryServiceLoaderImpl.java @@ -11,7 +11,7 @@ enum LoggerFactoryServiceLoaderImpl { LoggerFactoryServiceLoaderImpl() { Bootstrap.log(getClass(), - String.format("searching for '%s'...", LoggerFactory.class.getName())); + String.format("searching for '%s'...", LoggerFactory.class.getName())); final Iterator it = ServiceLoader.load(LoggerFactory.class).iterator(); if (!it.hasNext()) { throw new IllegalStateException("No LoggerFactory found via ServiceLoader"); @@ -22,7 +22,7 @@ enum LoggerFactoryServiceLoaderImpl { while (it.hasNext()) { factory = it.next(); Bootstrap.log(getClass(), - String.format("found '%s'", factory.getClass().getName())); + String.format("found '%s'", factory.getClass().getName())); } throw new IllegalStateException("Multiple LoggerFactories found via ServiceLoader"); } diff --git a/log-factory/src/main/java/io/deephaven/internal/log/LoggerFactorySingleCache.java b/log-factory/src/main/java/io/deephaven/internal/log/LoggerFactorySingleCache.java index f2336ab809a..d2b52fec632 100644 --- a/log-factory/src/main/java/io/deephaven/internal/log/LoggerFactorySingleCache.java +++ b/log-factory/src/main/java/io/deephaven/internal/log/LoggerFactorySingleCache.java @@ -3,8 +3,8 @@ import io.deephaven.io.logger.Logger; /** - * An implementation that always returns a single cached {@link Logger}. (This also means that the - * implementations does not make use of the logger name.) + * An implementation that always returns a single cached {@link Logger}. (This also means that the implementations does + * not make use of the logger name.) */ public abstract class LoggerFactorySingleCache implements LoggerFactory { private volatile Logger INSTANCE = null; diff --git a/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/CompletionParseService.java b/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/CompletionParseService.java index 9f3b46c2677..e92931257ac 100644 --- a/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/CompletionParseService.java +++ b/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/CompletionParseService.java @@ -14,9 +14,9 @@ public interface CompletionParseService changes); + String uri, + String version, + List changes); void remove(String uri); diff --git a/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/CompletionParseServiceNoOp.java b/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/CompletionParseServiceNoOp.java index 3c7312d9176..d54f6d69183 100644 --- a/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/CompletionParseServiceNoOp.java +++ b/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/CompletionParseServiceNoOp.java @@ -9,12 +9,11 @@ /** * A do-nothing CompletionParseService, to be used only if the real completer is not available. */ -public class CompletionParseServiceNoOp implements - CompletionParseService, Object, RuntimeException> { +public class CompletionParseServiceNoOp + implements CompletionParseService, Object, RuntimeException> { @Override - public ParsedResult parse(final String document) - throws RuntimeException { + public ParsedResult parse(final String document) throws RuntimeException { return new ParsedResult() { private Map> map = new LinkedHashMap<>(); diff --git a/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/ParsedResult.java b/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/ParsedResult.java index 68e2a1142e8..54c180155b4 100644 --- a/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/ParsedResult.java +++ b/open-api/lang-api/src/main/java/io/deephaven/lang/parse/api/ParsedResult.java @@ -6,8 +6,8 @@ /** * Represents a parsed document. * - * For now, we will be re-parsing the entire string document every time, but in the future, we would - * like to be able to update only ranges of changed code. + * For now, we will be re-parsing the entire string document every time, but in the future, we would like to be able to + * update only ranges of changed code. */ public interface ParsedResult { diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ChunkerInvokable.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ChunkerInvokable.java index e1965659779..740f74ea612 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ChunkerInvokable.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ChunkerInvokable.java @@ -10,8 +10,7 @@ /** * Represents an ast node that could be invokable. * - * For now, this is methods and constructors, but will likely be expanded to handle closures to some - * degree as well. + * For now, this is methods and constructors, but will likely be expanded to handle closures to some degree as well. */ public interface ChunkerInvokable extends IsScope { @@ -32,7 +31,7 @@ default Token getNameToken() { // this fallback shouldn't really be needed. assert false : "Invokable without a method name: " + this; return tokens(true) - .filter(tok -> tok.kind == ChunkerConstants.INVOKE) - .first(); + .filter(tok -> tok.kind == ChunkerConstants.INVOKE) + .first(); } } diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/api/IsScope.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/api/IsScope.java index 268b6e5bd90..dbf2b90aec5 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/api/IsScope.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/api/IsScope.java @@ -6,9 +6,9 @@ import java.util.List; /** - * A common interface for {@link ChunkerIdent}, {@link ChunkerInvoke}, {@link ChunkerNew} and - * {@link ChunkerArray} which are the only ast nodes that can be "scope objects": - * thing.field.callMethod()[0].moreMethod.new SomeClass().why.would().you.new Ever() + * A common interface for {@link ChunkerIdent}, {@link ChunkerInvoke}, {@link ChunkerNew} and {@link ChunkerArray} which + * are the only ast nodes that can be "scope objects": thing.field.callMethod()[0].moreMethod.new + * SomeClass().why.would().you.new Ever() */ public interface IsScope extends Node { diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ParseCancelled.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ParseCancelled.java index d2cf3e5bd1d..ab6ae1cdebe 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ParseCancelled.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ParseCancelled.java @@ -3,8 +3,8 @@ /** * Thrown from the parser if the thread interrupt status is set. * - * We don't want to deal with a checked exception from generated code, so we'll just use this, which - * can be thrown freely. + * We don't want to deal with a checked exception from generated code, so we'll just use this, which can be thrown + * freely. */ public class ParseCancelled extends RuntimeException { } diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ParseState.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ParseState.java index 503123f1189..3cdc6c84d41 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ParseState.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/api/ParseState.java @@ -5,10 +5,10 @@ /** * Represents the state of a "currently being parsed node". * - * For now, we're going to use this to have our own hierarchy of ast nodes outside of the generated - * jjt state machine, and in the future, the hope is that this will also allow us to perform - * resumable / incremental document parsing, by simply invalidating any changed ParseState, and then - * restarting the parser in the correct state, over the changed set of text. + * For now, we're going to use this to have our own hierarchy of ast nodes outside of the generated jjt state machine, + * and in the future, the hope is that this will also allow us to perform resumable / incremental document parsing, by + * simply invalidating any changed ParseState, and then restarting the parser in the correct state, over the changed set + * of text. * */ public class ParseState { diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/meta/Messenger.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/meta/Messenger.java index a985207e5bb..7b8dc4f2162 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/meta/Messenger.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/meta/Messenger.java @@ -11,8 +11,8 @@ import java.util.List; /** - * An instance of this object is sent to our parser, so that as it runs into invalid syntax, it can - * report useful errors, and allow handler code to inspect the jjtree parser when it occurs. + * An instance of this object is sent to our parser, so that as it runs into invalid syntax, it can report useful + * errors, and allow handler code to inspect the jjtree parser when it occurs. * */ public class Messenger { diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/CompletionParser.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/CompletionParser.java index 61498e7fe34..f77105a38d4 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/CompletionParser.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/CompletionParser.java @@ -17,7 +17,7 @@ * A specialized parser for autocompletion; maybe better to call it a chunker than a parser... */ public class CompletionParser implements - CompletionParseService { + CompletionParseService { private static final Logger LOGGER = LoggerFactory.getLogger(CompletionParser.class); private Map docs = new ConcurrentHashMap<>(); @@ -32,16 +32,16 @@ public ParsedDocument parse(String document) throws ParseException { public void open(final String text, final String uri, final String version) { if (LOGGER.isTraceEnabled()) { LOGGER.trace() - .append("Opening document ") - .append(uri) - .append("[") - .append(version) - .append("] ->\n") - .append(text) - .endl(); + .append("Opening document ") + .append(uri) + .append("[") + .append(version) + .append("] ->\n") + .append(text) + .endl(); } startParse(uri) - .requestParse(String.valueOf(version), text, false); + .requestParse(String.valueOf(version), text, false); } private PendingParse startParse(String uri) { @@ -50,18 +50,18 @@ private PendingParse startParse(String uri) { @Override public void update(final String uri, final String version, - final List changes) { + final List changes) { if (LOGGER.isTraceEnabled()) { LOGGER.trace() - .append("Updating document ") - .append(uri) - .append(" [") - .append(version) - .append("] all docs: ") - .append(docs.keySet().toString()) - .append(" changes: ") - .append(changes.toString()) - .endl(); + .append("Updating document ") + .append(uri) + .append(" [") + .append(version) + .append("] all docs: ") + .append(docs.keySet().toString()) + .append(" changes: ") + .append(changes.toString()) + .endl(); } PendingParse doc = docs.get(uri); final boolean forceParse; @@ -69,8 +69,7 @@ public void update(final String uri, final String version, doc = startParse(uri); forceParse = false; } else { - // let the parser know that we have an incoming change, so it can clear out its worker - // thread asap + // let the parser know that we have an incoming change, so it can clear out its worker thread asap doc.invalidate(); forceParse = true; } @@ -83,34 +82,32 @@ public void update(final String uri, final String version, if (offset < 0) { if (LOGGER.isWarnEnabled()) { LOGGER.warn() - .append("Invalid change in document ") - .append(uri) - .append("[") - .append(version) - .append("] @") - .append(range.getStart().getLine()) - .append(":") - .append(range.getStart().getCharacter()) - .endl(); + .append("Invalid change in document ") + .append(uri) + .append("[") + .append(version) + .append("] @") + .append(range.getStart().getLine()) + .append(":") + .append(range.getStart().getCharacter()) + .endl(); } return; } - String prefix = - offset > 0 && offset <= document.length() ? document.substring(0, offset) : ""; - String suffix = - offset + length < document.length() ? document.substring(offset + length) : ""; + String prefix = offset > 0 && offset <= document.length() ? document.substring(0, offset) : ""; + String suffix = offset + length < document.length() ? document.substring(offset + length) : ""; document = prefix + change.getText() + suffix; } doc.requestParse(version, document, forceParse); if (LOGGER.isTraceEnabled()) { LOGGER.trace() - .append("Finished updating ") - .append(uri) - .append(" [") - .append(version) - .append("]") - .endl(); + .append("Finished updating ") + .append(uri) + .append(" [") + .append(version) + .append("]") + .endl(); } } @@ -128,8 +125,8 @@ public ParsedDocument finish(String uri) { if (doc == null) { throw new IllegalStateException("Unable to find parsed document " + uri); } - return doc.finishParse().orElseThrow( - () -> new IllegalStateException("Unable to complete document parsing for " + uri)); + return doc.finishParse() + .orElseThrow(() -> new IllegalStateException("Unable to complete document parsing for " + uri)); } @Override diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/LspTools.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/LspTools.java index dc38144d118..2fd9878534e 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/LspTools.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/LspTools.java @@ -39,42 +39,37 @@ public static int getOffsetFromPosition(String document, Position position) { } public static boolean lessThan(PositionOrBuilder p, PositionOrBuilder start) { - return p.getLine() == start.getLine() ? p.getCharacter() < start.getCharacter() - : p.getLine() < start.getLine(); + return p.getLine() == start.getLine() ? p.getCharacter() < start.getCharacter() : p.getLine() < start.getLine(); } public static boolean lessOrEqual(PositionOrBuilder p, PositionOrBuilder start) { return p.getLine() == start.getLine() ? p.getCharacter() <= start.getCharacter() - : p.getLine() < start.getLine(); + : p.getLine() < start.getLine(); } public static boolean greaterThan(PositionOrBuilder p, PositionOrBuilder end) { - return p.getLine() == end.getLine() ? p.getCharacter() > end.getCharacter() - : p.getLine() > end.getLine(); + return p.getLine() == end.getLine() ? p.getCharacter() > end.getCharacter() : p.getLine() > end.getLine(); } public static boolean greaterOrEqual(PositionOrBuilder p, PositionOrBuilder end) { - return p.getLine() == end.getLine() ? p.getCharacter() >= end.getCharacter() - : p.getLine() > end.getLine(); + return p.getLine() == end.getLine() ? p.getCharacter() >= end.getCharacter() : p.getLine() > end.getLine(); } public static int extend(Position.Builder p, PositionOrBuilder requested) { if (p.getLine() != requested.getLine()) { - throw new IllegalArgumentException("Can only extend on same-line; " + p + " and " - + requested + " are not on same line"); + throw new IllegalArgumentException( + "Can only extend on same-line; " + p + " and " + requested + " are not on same line"); } p.setCharacter(requested.getCharacter()).build(); return requested.getCharacter() - p.getCharacter(); } public static Position plus(Position p, int line, int character) { - return p.toBuilder().setLine(p.getLine() + line).setCharacter(p.getCharacter() + character) - .build(); + return p.toBuilder().setLine(p.getLine() + line).setCharacter(p.getCharacter() + character).build(); } public static Position minus(Position p, int line, int character) { - return p.toBuilder().setLine(p.getLine() - line).setCharacter(p.getCharacter() - character) - .build(); + return p.toBuilder().setLine(p.getLine() - line).setCharacter(p.getCharacter() - character).build(); } public static Position copy(Position p) { @@ -83,19 +78,19 @@ public static Position copy(Position p) { public static boolean isInside(DocumentRangeOrBuilder range, PositionOrBuilder innerStart, - PositionOrBuilder innerEnd) { + PositionOrBuilder innerEnd) { return innerStart.getLine() >= range.getStart().getLine() - && innerStart.getCharacter() >= range.getStart().getCharacter() - && innerEnd.getLine() <= range.getEnd().getLine() - && innerEnd.getCharacter() <= range.getEnd().getCharacter(); + && innerStart.getCharacter() >= range.getStart().getCharacter() + && innerEnd.getLine() <= range.getEnd().getLine() + && innerEnd.getCharacter() <= range.getEnd().getCharacter(); } public static DocumentRange.Builder rangeFromSource(String source, int start, int length) { final DocumentRange.Builder range = DocumentRange.newBuilder(); range.setStart(getPositionFromOffset(source, start)); range.setEnd(getPositionFromOffset( - source, - start + length)); + source, + start + length)); return range; } diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/ParsedDocument.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/ParsedDocument.java index 9ea7e04e713..12e6cff71ef 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/ParsedDocument.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/ParsedDocument.java @@ -18,8 +18,8 @@ /** * Represents a parsed document. * - * For now, we will be re-parsing the entire string document every time, but in the future, we would - * like to be able to update only ranges of changed code. + * For now, we will be re-parsing the entire string document every time, but in the future, we would like to be able to + * update only ranges of changed code. */ public class ParsedDocument implements ParsedResult { @@ -87,15 +87,13 @@ public boolean containsIndex(int i) { return 1; } - assert b.isChildOf(a) - : "Nodes occupying the same tokenspace were not in a parent-child relationship " + assert b.isChildOf(a) : "Nodes occupying the same tokenspace were not in a parent-child relationship " + a + " does not contain " + b + " (or vice versa)"; return -1; }; /** - * TODO: enforce clients to only send \n. We don't want to mess around with \r\n taking up two - * chars. IDS-1517-26 + * TODO: enforce clients to only send \n. We don't want to mess around with \r\n taking up two chars. IDS-1517-26 */ private static final Pattern NEW_LINE_PATTERN = Pattern.compile("\\r?\\n"); private final ChunkerDocument doc; @@ -118,8 +116,7 @@ public ParsedDocument(ChunkerDocument doc, String document) { @Override public Object visitChunkerStatement(ChunkerStatement node, Object data) { stmts.add(node); - // only want to add top level statements; we don't have a good solution for - // nested ranges yet. + // only want to add top level statements; we don't have a good solution for nested ranges yet. return null; } }, null); @@ -129,8 +126,7 @@ public Object visitChunkerStatement(ChunkerStatement node, Object data) { public Node findNode(int p) { - if (doc.jjtGetFirstToken() == doc.jjtGetLastToken() - && doc.jjtGetFirstToken().kind == ChunkerConstants.EOF) { + if (doc.jjtGetFirstToken() == doc.jjtGetLastToken() && doc.jjtGetFirstToken().kind == ChunkerConstants.EOF) { return doc; } @@ -155,7 +151,7 @@ private Node findDeepest(Node best, int i) { for (int c = best.jjtGetNumChildren(); c-- > 0;) { final Node child = best.jjtGetChild(c); if (child.containsIndex(i) || - (best instanceof ChunkerStatement && i == child.getEndIndex())) { + (best instanceof ChunkerStatement && i == child.getEndIndex())) { return findDeepest(child, i); } } @@ -188,13 +184,11 @@ public ChunkerDocument getDoc() { /** * When a parse fails, we do not throw away our last-good document. * - * We do, however, record the failure information, which you should check via - * {@link #isError()}. + * We do, however, record the failure information, which you should check via {@link #isError()}. * * @param src The source with an error * @param e The parse exception. May make this any exception type. - * @return this, for chaining. We may need to make copies later, but for now, we'll use it - * as-is. + * @return this, for chaining. We may need to make copies later, but for now, we'll use it as-is. */ public ParsedDocument withError(String src, ParseException e) { this.errorSource = src; @@ -229,10 +223,10 @@ public void logErrors(Logger log) { @Override public String toString() { return "ParsedDocument{" + - "doc=" + doc + - ", errorSource='" + errorSource + '\'' + - ", error=" + error + - '}'; + "doc=" + doc + + ", errorSource='" + errorSource + '\'' + + ", error=" + error + + '}'; } public Position.Builder findEditRange(DocumentRange replaceRange) { @@ -244,16 +238,14 @@ public Position.Builder findEditRange(DocumentRange replaceRange) { assert replaceRange.getEnd().getCharacter() <= end.endColumn; // Most definitely want to cache this very expensive operation. - return computedPositions.computeIfAbsent(replaceRange, - r -> findFromNodes(replaceRange, doc)); + return computedPositions.computeIfAbsent(replaceRange, r -> findFromNodes(replaceRange, doc)); } private Position.Builder findFromNodes(DocumentRange replaceRange, Node startNode) { return findFromNodes(replaceRange, startNode, null); } - private Position.Builder findFromNodes(DocumentRange replaceRange, Node startNode, - Node endNode) { + private Position.Builder findFromNodes(DocumentRange replaceRange, Node startNode, Node endNode) { if (startNode.jjtGetNumChildren() == 0) { // we are the winner node! @@ -263,8 +255,7 @@ private Position.Builder findFromNodes(DocumentRange replaceRange, Node startNod assert startsBefore(startNode, replaceRange.getStart()); // Note, we're intentionally sending the first token of both the start and end node, // as we need to search forward-only when examining tokens. - return findFromTokens(replaceRange, startNode.jjtGetFirstToken(), - endNode.jjtGetFirstToken()); + return findFromTokens(replaceRange, startNode.jjtGetFirstToken(), endNode.jjtGetFirstToken()); } else { // we are going to look through children backwards, // as the user is most likely to be editing the end of the document. @@ -284,8 +275,7 @@ private Position.Builder findFromNodes(DocumentRange replaceRange, Node startNod if (endNode != startNode) { endNode = refineEndNode(replaceRange, endNode == null ? startNode : endNode); } - return findFromTokens(replaceRange, startNode.jjtGetFirstToken(), - endNode.jjtGetFirstToken()); + return findFromTokens(replaceRange, startNode.jjtGetFirstToken(), endNode.jjtGetFirstToken()); } } @@ -312,8 +302,7 @@ private Position.Builder findFromTokens(DocumentRange replaceRange, Token start, // to setup backlinks. final Position.Builder startPos = start.positionStart(); final Position.Builder endPos = end.positionStart(); - // both asserts are >= because both start and end are the earliest - // token-containing-our-range we could find. + // both asserts are >= because both start and end are the earliest token-containing-our-range we could find. assert LspTools.greaterOrEqual(replaceRange.getStart(), startPos); assert LspTools.greaterOrEqual(replaceRange.getEnd(), endPos); @@ -345,10 +334,8 @@ private int findFromToken(Position pos, Token tok, boolean start) { } else { candidate.setLine(candidate.getLine() + 1); candidate.setCharacter(0); - // TODO: make monaco force \n only instead of \r\n, and blow up if client - // gives us \r\ns - // so the +1 we are doing here for the line split is always valid. - // IDS-1517-26 + // TODO: make monaco force \n only instead of \r\n, and blow up if client gives us \r\ns + // so the +1 we are doing here for the line split is always valid. IDS-1517-26 ind += line.length() + 1; } } @@ -357,8 +344,7 @@ private int findFromToken(Position pos, Token tok, boolean start) { tok = tok.next; } } - throw new IllegalArgumentException( - "Token " + startTok + " does not contain position " + pos); + throw new IllegalArgumentException("Token " + startTok + " does not contain position " + pos); } public void extendEnd(CompletionItem.Builder item, Position requested, Node node) { @@ -367,7 +353,7 @@ public void extendEnd(CompletionItem.Builder item, Position requested, Node node tok = extendEnd(tok, requested, item); if (tok == null) { item.getTextEditBuilder().getRangeBuilder() - .setEnd(LspTools.plus(requested, 0, 1)); + .setEnd(LspTools.plus(requested, 0, 1)); break; } } @@ -377,20 +363,18 @@ private Token extendEnd(Token tok, Position requested, CompletionItem.Builder ed if (tok.beginLine == tok.endLine) { // most common case (almost everything) final TextEdit.Builder textEdit = edit.getTextEditBuilder(); - int moved = - LspTools.extend(textEdit.getRangeBuilder().getEndBuilder(), tok.positionEnd()); + int moved = LspTools.extend(textEdit.getRangeBuilder().getEndBuilder(), tok.positionEnd()); String txt = tok.image; textEdit.setText(textEdit.getText() + - txt.substring(txt.length() - moved)); + txt.substring(txt.length() - moved)); edit.setTextEdit(textEdit); return tok.next; } else { // ick. multi-line tokens are the devil. // we should probably reduce this to be only-the-newline-token, - // and instead run more productions on the contents of multi-line strings (the other - // possible line-spanning token) - throw new UnsupportedOperationException( - "Multi-line edits not supported yet; cannot extendEnd over " + tok); + // and instead run more productions on the contents of multi-line strings (the other possible line-spanning + // token) + throw new UnsupportedOperationException("Multi-line edits not supported yet; cannot extendEnd over " + tok); } } diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/PendingParse.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/PendingParse.java index 1e9fd190546..72a3a7984e8 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/PendingParse.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/parse/PendingParse.java @@ -15,8 +15,8 @@ /** * An object to represent the operation of parsing documents from users. * - * Because we update the server with new document state as the user types, we don't want to get a - * deluge of stale parse operations sucking down CPU. + * Because we update the server with new document state as the user types, we don't want to get a deluge of stale parse + * operations sucking down CPU. * * Only one version (the newest) of a given document will be parsed; all others will be cancelled. */ @@ -28,29 +28,27 @@ public class PendingParse { private static final Logger LOGGER = LoggerFactory.getLogger(PendingParse.class); /** - * A ParseJob represents a unit of work for our parser. Note that we only ever want to parse / - * use the newest ParseJob, and only when there is no new parse job incoming, as completion - * requests must only ever be issued when the document is stable (if the document is changing, - * then any completion request is stale, as the user is typing new code). + * A ParseJob represents a unit of work for our parser. Note that we only ever want to parse / use the newest + * ParseJob, and only when there is no new parse job incoming, as completion requests must only ever be issued when + * the document is stable (if the document is changing, then any completion request is stale, as the user is typing + * new code). */ private final class ParseJob { /** - * monaco-supplied version string. We use this to sanity check that we're not parsing an old - * document. We _should_ do some testing that Ctrl+Z does not re-issue an old version - * identifier. + * monaco-supplied version string. We use this to sanity check that we're not parsing an old document. We + * _should_ do some testing that Ctrl+Z does not re-issue an old version identifier. */ private final String version; /** - * The text of the document. Monaco only sends us diffs of changes, and our parser is not - * (yet) incremental, so we rebuild the whole document from the text of the previous state - * of the document. + * The text of the document. Monaco only sends us diffs of changes, and our parser is not (yet) incremental, so + * we rebuild the whole document from the text of the previous state of the document. */ private final String text; /** - * The final parsed result. Only non-null after a parse operation has completed. Note that - * we only ever read this field inside a synchronized() block, so no need for volatile here. + * The final parsed result. Only non-null after a parse operation has completed. Note that we only ever read + * this field inside a synchronized() block, so no need for volatile here. */ private ParsedDocument result; @@ -62,8 +60,8 @@ private ParseJob(@NotNull final String version, @NotNull final String text) { /** * Attempt to perform a document parse. * - * @return true if parsing was successful, false if it failed with a parse exception, and - * null if we were interrupted. + * @return true if parsing was successful, false if it failed with a parse exception, and null if we were + * interrupted. */ private Boolean parse() { try { @@ -77,10 +75,10 @@ private Boolean parse() { } catch (TokenMgrException | ParseException e) { if (LOGGER.isWarnEnabled()) { LOGGER.warn() - .append("Parser for ").append(uri) - .append(": Encountered parse exception ").append(e) - .append(" while parsing ").append(text) - .endl(); + .append("Parser for ").append(uri) + .append(": Encountered parse exception ").append(e) + .append(" while parsing ").append(text) + .endl(); } return false; } @@ -93,36 +91,35 @@ private Boolean parse() { private final String uri; /** - * The thread doing the parse work. We (over)use this field to interrupt the parser, and as an - * object monitor to un/pause our threads. + * The thread doing the parse work. We (over)use this field to interrupt the parser, and as an object monitor to + * un/pause our threads. */ private final Thread parseThread; /** - * Set whenever the document is mutated and we need to reparse. This is volatile so we can read - * the `String text` inside it without acquiring any locks. + * Set whenever the document is mutated and we need to reparse. This is volatile so we can read the `String text` + * inside it without acquiring any locks. */ private volatile ParseJob targetState; /** - * Set when a parse has completed either successfully or unsuccessfully. We use this to prevent - * re-parsing a document so invalid that the parser blows up. + * Set when a parse has completed either successfully or unsuccessfully. We use this to prevent re-parsing a + * document so invalid that the parser blows up. */ private ParseJob lastParseState; /** - * Set when the parse has completed successfully. We only ever return a valid result when - * completedState == lastParseState == targetState, as this means "parse has completed - * successfully, and document has not been mutated. + * Set when the parse has completed successfully. We only ever return a valid result when completedState == + * lastParseState == targetState, as this means "parse has completed successfully, and document has not been + * mutated. */ private ParseJob completedState; /** - * Always true while user is connected to IDE session. False once user disconnects and we need - * to stop working. + * Always true while user is connected to IDE session. False once user disconnects and we need to stop working. */ private volatile boolean alive; /** - * Set to false when we know a document change is incoming, but a new parse has not been - * requested. Used to keep the parser thread cleared while we are waiting. + * Set to false when we know a document change is incoming, but a new parse has not been requested. Used to keep the + * parser thread cleared while we are waiting. */ private volatile boolean valid; @@ -138,12 +135,9 @@ private ParseJob awaitNext() { while (alive) { synchronized (parseThread) { if (valid && targetState != null && targetState != lastParseState) { - // we'll set lastParseState if a parse completes, whether with success or - // failure. - // targetState _might_ get processed more than once, if a superfluous interrupt - // is issued; - // note that we can probably replace lastParseState w/ a `boolean finished` - // field in the ParseJob + // we'll set lastParseState if a parse completes, whether with success or failure. + // targetState _might_ get processed more than once, if a superfluous interrupt is issued; + // note that we can probably replace lastParseState w/ a `boolean finished` field in the ParseJob return targetState; } try { @@ -166,20 +160,19 @@ private void parse() { synchronized (parseThread) { // we'll return true if parse succeeded, false if failed, and null if interrupted. if (Boolean.TRUE.equals(success)) { - // pending state was successfully parsed, signal that it has a completed - // document, and should not be run again + // pending state was successfully parsed, signal that it has a completed document, and should not be + // run again completedState = lastParseState = pending; } else if (Boolean.FALSE.equals(success)) { // failed // signal that the pending state cannot be parsed, so don't try again. - // In this state, we will allow fallback to V1 parser if the document has not - // changed since we failed, - // otherwise, we will cancel all parse attempts until the document no longer - // causes the parser to blow up. + // In this state, we will allow fallback to V1 parser if the document has not changed since we + // failed, + // otherwise, we will cancel all parse attempts until the document no longer causes the parser to + // blow up. lastParseState = pending; } // wake up anyone waiting on us (in finishParse()) - // note that if the parse was cancelled, we still want to wake up pending completion - // requests, + // note that if the parse was cancelled, we still want to wake up pending completion requests, // since they are no longer valid and should be cancelled. parseThread.notifyAll(); } @@ -189,49 +182,41 @@ private void parse() { /** * Called when the document has been updated and we need to reparse the latest document text. * - * @param version The monaco-supplied version string. Monatomically increasing (TODO: test - * editor undo operations) + * @param version The monaco-supplied version string. Monatomically increasing (TODO: test editor undo operations) * @param text The full text of the document * @param force A boolean to signal if we should forcibly parse, ignoring current state. */ - void requestParse(@NotNull final String version, @NotNull final String text, - final boolean force) { + void requestParse(@NotNull final String version, @NotNull final String text, final boolean force) { final ParseJob localTargetState; - // hm, if we fail the version check, we should actually send a message to the client, and - // tell them - // to send us the full document text so that we can get into a good state. Failing the check - // means our + // hm, if we fail the version check, we should actually send a message to the client, and tell them + // to send us the full document text so that we can get into a good state. Failing the check means our // document diff application has likely resulted in a malformed document. // TODO: test if we _can_ even get into a bad state - if (force || (localTargetState = targetState) == null - || localTargetState.version.compareTo(version) < 0) { + if (force || (localTargetState = targetState) == null || localTargetState.version.compareTo(version) < 0) { - // first, interrupt, in case the parser is running a stale job, we want it to die (it - // checks interrupt state). - // it's unlikely this will do much, since we already interrupted in invalidate(), but - // it's possible to need - // this if there's three edits in flight at once; invalidate() cancels the first, we - // cancel the second, + // first, interrupt, in case the parser is running a stale job, we want it to die (it checks interrupt + // state). + // it's unlikely this will do much, since we already interrupted in invalidate(), but it's possible to need + // this if there's three edits in flight at once; invalidate() cancels the first, we cancel the second, // and then we queue up the third, current request, below. parseThread.interrupt(); synchronized (parseThread) { // next, set the targetState so the parse thread can pick up the work we want. - // we do this inside this synchronized block, so the logic in finishParse() can - // consider targetState atomic + // we do this inside this synchronized block, so the logic in finishParse() can consider targetState + // atomic // (we only *read* targetState field outside of a synchronized block). targetState = new ParseJob(version, text); - // allows parse attempts to occur. We set valid=false when we invalidate() a parse - // upon receiving document updates. + // allows parse attempts to occur. We set valid=false when we invalidate() a parse upon receiving + // document updates. valid = true; - // now, just in case this thread was paused in this method call, we want to be sure - // that the job is picked up, - // WITHOUT telling the parser to cancel (if it has already started working on - // targetState). - // i.e. if we interrupted the thread and then the parse thread woke up, it would - // have noticed valid=false, - // and gone back to sleep; so, we issue an extra wake up here. It's very unlikely to - // occur, but better to be safe... + // now, just in case this thread was paused in this method call, we want to be sure that the job is + // picked up, + // WITHOUT telling the parser to cancel (if it has already started working on targetState). + // i.e. if we interrupted the thread and then the parse thread woke up, it would have noticed + // valid=false, + // and gone back to sleep; so, we issue an extra wake up here. It's very unlikely to occur, but better + // to be safe... parseThread.notifyAll(); } } @@ -240,15 +225,14 @@ void requestParse(@NotNull final String version, @NotNull final String text, /** * Called inside CompletionQuery to request / block on a parsed document. * - * @return Optional.empty() if we have either failed a parse, or never parsed a document (tells - * calling code to fallback to V1 autocomplete). Optional.of(validDocument) iff we have - * successfully parsed a document and no new changes have arrived. + * @return Optional.empty() if we have either failed a parse, or never parsed a document (tells calling code to + * fallback to V1 autocomplete). Optional.of(validDocument) iff we have successfully parsed a document and + * no new changes have arrived. * @throws CompletionCancelled if the document was mutate while we were blocking. */ Optional finishParse() { final ParseJob localTargetState = targetState; - // I don't like big synchronized blocks, but the only "expensive" thing we do in here is - // some logging. + // I don't like big synchronized blocks, but the only "expensive" thing we do in here is some logging. synchronized (parseThread) { // loop while alive, valid, and not finished parsing. while (alive && valid && localTargetState != lastParseState) { @@ -259,10 +243,9 @@ Optional finishParse() { // but the user kept typing; we want to fail the current request quickly. if (LOGGER.isInfoEnabled()) { LOGGER.info() - .append( - "Document changed while awaiting parsing; failing current request ") - .append(localTargetState.version) - .endl(); + .append("Document changed while awaiting parsing; failing current request ") + .append(localTargetState.version) + .endl(); } // this exception tells calling code that it should immediately fail, // rather than fall back to the V1 parser. @@ -273,18 +256,17 @@ Optional finishParse() { } catch (InterruptedException failure) { if (LOGGER.isWarnEnabled()) { LOGGER.warn() - .append("Unexpected interruption of document parser") - .append(failure) - .endl(); + .append("Unexpected interruption of document parser") + .append(failure) + .endl(); } Thread.currentThread().interrupt(); // percolate interruption - throw new CompletionCancelled(); // calling code will catch this to fail-fast - // (does not allow fallback). + throw new CompletionCancelled(); // calling code will catch this to fail-fast (does not allow + // fallback). } } // end while(){} loop - // we have been woken up. Either parsing has finished, or the document has been mutated - // under us. + // we have been woken up. Either parsing has finished, or the document has been mutated under us. if (!alive) { // user closed tab while we were working. give up. throw new CompletionCancelled(); @@ -294,16 +276,14 @@ Optional finishParse() { throw new CompletionCancelled(); } if (completedState == null) { - // We have never successfully parsed the document; let calling code fallback to V1 - // parser + // We have never successfully parsed the document; let calling code fallback to V1 parser return Optional.empty(); } if (lastParseState == targetState) { - // The parse operation completed w/ success or failure, and the document has not - // been mutated under us. + // The parse operation completed w/ success or failure, and the document has not been mutated under us. if (completedState == lastParseState) { - // The parse completed successfully, and the document is uptodate. Use the - // result to perform completion. + // The parse completed successfully, and the document is uptodate. Use the result to perform + // completion. return Optional.of(completedState.result); } // The parse failed, but the document is uptodate. Allow fallback to V1 parser. @@ -335,17 +315,15 @@ public String getText() { } /** - * Called when the document has been updated, but before we are ready to submit new text to be - * parsed. + * Called when the document has been updated, but before we are ready to submit new text to be parsed. * - * We'll just interrupt the parser thread, so it has time to cancel any work before our caller - * submits the new work. Note that we don't update the targetState field, since we use it to - * return the current text. + * We'll just interrupt the parser thread, so it has time to cancel any work before our caller submits the new work. + * Note that we don't update the targetState field, since we use it to return the current text. * - * Our caller must call getText() above and then apply diffs to create new document text before - * it can call .requestParse(). Calling invalidate() gives the parser thread a little extra time - * to realize that it is processing stale input and throw ParseCancelled(), so the parser thread - * can be ready to immediately start work when .requestParse() is called. + * Our caller must call getText() above and then apply diffs to create new document text before it can call + * .requestParse(). Calling invalidate() gives the parser thread a little extra time to realize that it is + * processing stale input and throw ParseCancelled(), so the parser thread can be ready to immediately start work + * when .requestParse() is called. */ public void invalidate() { valid = false; diff --git a/open-api/lang-parser/src/main/java/io/deephaven/lang/shared/lsp/CompletionCancelled.java b/open-api/lang-parser/src/main/java/io/deephaven/lang/shared/lsp/CompletionCancelled.java index d260aea4d58..5dd9cd53f88 100644 --- a/open-api/lang-parser/src/main/java/io/deephaven/lang/shared/lsp/CompletionCancelled.java +++ b/open-api/lang-parser/src/main/java/io/deephaven/lang/shared/lsp/CompletionCancelled.java @@ -1,8 +1,8 @@ package io.deephaven.lang.shared.lsp; /** - * Thrown from the document parser if the user has updated the document while an ongoing completion - * request was blocking on stale input. + * Thrown from the document parser if the user has updated the document while an ongoing completion request was blocking + * on stale input. * * This is used to fast-path quitting a completion request because the document was invalidated. * diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/ChunkerCompleter.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/ChunkerCompleter.java index 037b7bcea38..06c2bb1376b 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/ChunkerCompleter.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/ChunkerCompleter.java @@ -35,10 +35,9 @@ public enum SearchDirection { private static final Pattern CAMEL_PATTERN = Pattern.compile("(?=\\p{Lu})"); public static final String CONTAINS_NEWLINE = ".*\\R.*"; // \R is "any newline" in java 8 - // For now, only testing uses this property, when we want to test column expressions without the - // noise of static methods - // Since we don't really expect clients to twiddle this, we only consider it as a system - // property. + // For now, only testing uses this property, when we want to test column expressions without the noise of static + // methods + // Since we don't really expect clients to twiddle this, we only consider it as a system property. public static final String PROP_SUGGEST_STATIC_METHODS = "suggest.all.static.methods"; private final Logger log; @@ -51,15 +50,13 @@ public ChunkerCompleter(final Logger log, VariableProvider variables) { this(log, variables, new CompletionLookups()); } - public ChunkerCompleter(final Logger log, VariableProvider variables, - CompletionLookups lookups) { + public ChunkerCompleter(final Logger log, VariableProvider variables, CompletionLookups lookups) { this.log = log; this.variables = variables; this.lookups = lookups; } - public CompletableFuture> complete(String command, - int offset) { + public CompletableFuture> complete(String command, int offset) { final long start = System.nanoTime(); CompletionParser parser = new CompletionParser(); try { @@ -71,42 +68,39 @@ public CompletableFuture> complete(Stri final Set results = runCompletion(doc, offset); final long end = System.nanoTime(); log.info() - .append("Found ") - .append(results.size()) - .append(" completion items;\nparse time:\t") - .append(parseTime - start) - .append("nanos;\ncompletion time: ") - .append(end - parseTime) - .append("nanos") - .endl(); + .append("Found ") + .append(results.size()) + .append(" completion items;\nparse time:\t") + .append(parseTime - start) + .append("nanos;\ncompletion time: ") + .append(end - parseTime) + .append("nanos") + .endl(); return results.stream() - .map(this::toFragment) - .collect(Collectors.toList()); + .map(this::toFragment) + .collect(Collectors.toList()); }); } catch (ParseException e) { - final CompletableFuture> future = - new CompletableFuture<>(); + final CompletableFuture> future = new CompletableFuture<>(); future.completeExceptionally(e); - // TODO: better logging here; preferably writing to a system diagnostics table. - // IDS-1517-16 + // TODO: better logging here; preferably writing to a system diagnostics table. IDS-1517-16 log.info() - .append("Parse error in experimental completion api") - .append(e) - .append(" found in source\n") - .append(command) - .endl(); + .append("Parse error in experimental completion api") + .append(e) + .append(" found in source\n") + .append(command) + .endl(); return future; } } private CompletionFragment toFragment(CompletionItemOrBuilder item) { - return new CompletionFragment(item.getStart(), item.getLength(), - item.getTextEdit().getText(), item.getLabel()); + return new CompletionFragment(item.getStart(), item.getLength(), item.getTextEdit().getText(), item.getLabel()); } @Override - public Collection runCompletion(final ParsedDocument doc, - final Position pos, final int offset) { + public Collection runCompletion(final ParsedDocument doc, final Position pos, + final int offset) { final List results = new ArrayList<>(); this.doc = doc; @@ -128,8 +122,7 @@ public Collection runCompletion(final ParsedDocument doc fixRanges(doc, results, node, pos); if ("true".equals(System.getProperty("test.monaco.sanity"))) { - // // We may want to manually test what monaco does with various formats of result - // objects, + // // We may want to manually test what monaco does with various formats of result objects, // // so we have this block of code hidden behind an off-by-default system property // results.add(new CompletionItem(0, 0, "A1", "A1", new DocumentRange( // pos.plus(0, -2), pos @@ -160,12 +153,10 @@ public Collection runCompletion(final ParsedDocument doc } /** - * Part 1 of the V2 completion api; adapting our API to fit into existing CompletionHandler - * semantics. + * Part 1 of the V2 completion api; adapting our API to fit into existing CompletionHandler semantics. * - * Right now we are just blindly re-parsing the whole document when using the old api, which is - * going to be good-enough-for-now; this may also allow us to borrow the existing unit tests to - * some degree. + * Right now we are just blindly re-parsing the whole document when using the old api, which is going to be + * good-enough-for-now; this may also allow us to borrow the existing unit tests to some degree. * * @param doc * @param offset @@ -192,22 +183,22 @@ public Set runCompletion(ParsedDocument doc, int offset) { } /** - * So, despite no documentation describing the exact semantics, monaco is _very_ picky about the - * format of completions it receives. + * So, despite no documentation describing the exact semantics, monaco is _very_ picky about the format of + * completions it receives. * * The list of invariants we have to obey: * - * The main text edit (Range and String) is stored on the CompletionItem itself. The main Range - * must _start_ at the user's cursor, and end at the earlier of: A) The end of the text we want - * to replace B) The end of line where the cursor is. + * The main text edit (Range and String) is stored on the CompletionItem itself. The main Range must _start_ at the + * user's cursor, and end at the earlier of: A) The end of the text we want to replace B) The end of line where the + * cursor is. * - * Any other changes we want must be placed into the additionalTextEdits fields. This includes: - * A) Any changes on the same line as cursor that come before the cursor. B) Any changes on - * lines where the cursor is not currently placed. + * Any other changes we want must be placed into the additionalTextEdits fields. This includes: A) Any changes on + * the same line as cursor that come before the cursor. B) Any changes on lines where the cursor is not currently + * placed. * - * Due to this unfortunate complexity, we are doing this in a post-processing phase (here), so - * that individual completion provider logic only has to handle "replace text X with Y", and - * we'll figure out the correct incantation to keep monaco happy here. + * Due to this unfortunate complexity, we are doing this in a post-processing phase (here), so that individual + * completion provider logic only has to handle "replace text X with Y", and we'll figure out the correct + * incantation to keep monaco happy here. * * @param parsed The parsed document (bag of state related to the source document). * @param res A set of CompletionItem to fixup. @@ -215,10 +206,10 @@ public Set runCompletion(ParsedDocument doc, int offset) { * @param cursor The position of the user's cursor (the location we need to slice from) */ private void fixRanges( - ParsedDocument parsed, - Collection res, - Node node, - Position cursor) { + ParsedDocument parsed, + Collection res, + Node node, + Position cursor) { int ind = 0; for (CompletionItem.Builder item : res) { final Position requested = cursor.toBuilder().build(); @@ -230,12 +221,12 @@ private void fixRanges( // adjust the text edit back, stopping at the cursor. if (log.isTraceEnabled()) { log.trace() - .append("No extendStart support yet; result: ") - .append(result.toString()) - .nl() - .append("Requested: ") - .append(requested.toString()) - .endl(); + .append("No extendStart support yet; result: ") + .append(result.toString()) + .nl() + .append("Requested: ") + .append(requested.toString()) + .endl(); } continue; } else if (LspTools.lessThan(result.getEnd(), requested)) { @@ -246,8 +237,7 @@ private void fixRanges( // Move the part up to the cursor into an additional edit. final TextEdit.Builder edit = sliceBefore(item, requested, node); if (edit == null) { - // could not process this edit. TODO: We should log this case at least. - // IDS-1517-31 + // could not process this edit. TODO: We should log this case at least. IDS-1517-31 continue; } item.addAdditionalTextEdits(edit); @@ -259,8 +249,8 @@ private void fixRanges( // since monaco only supports same-line text edits. if (result.getStart().getLine() != result.getEnd().getLine()) { List broken = new ArrayList<>(); - // TODO: also split up the additional text edits, once they actually support - // multiline operations. IDS-1517-31 + // TODO: also split up the additional text edits, once they actually support multiline operations. + // IDS-1517-31 } item.setLabel(item.getTextEdit().getText()); @@ -268,15 +258,12 @@ private void fixRanges( } - public TextEdit.Builder sliceBefore(CompletionItem.Builder item, Position requested, - Node node) { + public TextEdit.Builder sliceBefore(CompletionItem.Builder item, Position requested, Node node) { final TextEdit.Builder edit = TextEdit.newBuilder(); - final DocumentRange.Builder range = - DocumentRange.newBuilder(item.getTextEditBuilder().getRange()); + final DocumentRange.Builder range = DocumentRange.newBuilder(item.getTextEditBuilder().getRange()); Token tok = node.findToken(range.getStart()); Position.Builder start = tok.positionStart(); - if (start.getLine() != requested.getLine() - || range.getStart().getLine() != requested.getLine()) { + if (start.getLine() != requested.getLine() || range.getStart().getLine() != requested.getLine()) { // not going to worry about this highly unlikely and complex corner case just yet. return null; } @@ -289,8 +276,7 @@ public TextEdit.Builder sliceBefore(CompletionItem.Builder item, Position reques range.setStart(start.build()).setEnd(requested); edit.setRange(range); StringBuilder b = new StringBuilder(); - // now, from here, gobble up the token contents as we advance the position to the requested - // index. + // now, from here, gobble up the token contents as we advance the position to the requested index. while (LspTools.lessThan(start, requested)) { if (LspTools.lessOrEqual(tok.positionEnd(false), start)) { // find next non-empty token @@ -302,8 +288,7 @@ public TextEdit.Builder sliceBefore(CompletionItem.Builder item, Position reques } } if (tok != startTok) { - // shouldn't really happen, but this is way better than potential infinite loops - // of doom + // shouldn't really happen, but this is way better than potential infinite loops of doom break; } imageInd = 0; @@ -319,15 +304,12 @@ public TextEdit.Builder sliceBefore(CompletionItem.Builder item, Position reques return edit; } - private TextEdit.Builder extendEnd(final CompletionItem.Builder item, final Position requested, - final Node node) { + private TextEdit.Builder extendEnd(final CompletionItem.Builder item, final Position requested, final Node node) { final TextEdit.Builder edit = TextEdit.newBuilder(); - final DocumentRange.Builder range = - DocumentRange.newBuilder(item.getTextEditBuilder().getRange()); + final DocumentRange.Builder range = DocumentRange.newBuilder(item.getTextEditBuilder().getRange()); Token tok = node.findToken(range.getStart()); Position.Builder start = tok.positionStart(); - if (start.getLine() != requested.getLine() - || range.getStart().getLine() != requested.getLine()) { + if (start.getLine() != requested.getLine() || range.getStart().getLine() != requested.getLine()) { // not going to worry about this highly unlikely and complex corner case just yet. return null; } @@ -340,8 +322,7 @@ private TextEdit.Builder extendEnd(final CompletionItem.Builder item, final Posi range.setStart(start.build()).setEnd(requested); edit.setRange(range); StringBuilder b = new StringBuilder(); - // now, from here, gobble up the token contents as we advance the position to the requested - // index. + // now, from here, gobble up the token contents as we advance the position to the requested index. while (LspTools.lessThan(start, requested)) { if (LspTools.lessOrEqual(tok.positionEnd(false), start)) { // find next non-empty token @@ -353,8 +334,7 @@ private TextEdit.Builder extendEnd(final CompletionItem.Builder item, final Posi } } if (tok != startTok) { - // shouldn't really happen, but this is way better than potential infinite loops - // of doom + // shouldn't really happen, but this is way better than potential infinite loops of doom break; } imageInd = 0; @@ -380,11 +360,11 @@ private String sortable(int i) { } private void searchForResults( - ParsedDocument doc, - Collection results, - Node node, - CompletionRequest request, - SearchDirection direction) { + ParsedDocument doc, + Collection results, + Node node, + CompletionRequest request, + SearchDirection direction) { // alright! let's figure out where the user's cursor is, and what we can help them with. node.jjtAccept(new ChunkerVisitor() { @Override @@ -482,14 +462,14 @@ public Object visitChunkerInvoke(ChunkerInvoke node, Object data) { public Object visitChunkerMethodName(ChunkerMethodName node, Object data) { final Token tok = node.jjtGetFirstToken(); assert tok == node.jjtGetLastToken(); - addMethodsAndVariables(results, tok, request, - ((HasScope) node.jjtGetParent()).getScope(), tok.image.replace("(", "")); + addMethodsAndVariables(results, tok, request, ((HasScope) node.jjtGetParent()).getScope(), + tok.image.replace("(", "")); if (request.getOffset() >= tok.endIndex - 1) { - // The user's cursor is on the opening ( of an invocation, lets add method - // arguments to completion results as well. + // The user's cursor is on the opening ( of an invocation, lets add method arguments to completion + // results as well. ChunkerInvoke invoke = (ChunkerInvoke) node.jjtGetParent(); - methodArgumentCompletion(invoke.getName(), results, invoke, - invoke.getArgument(0), request, direction); + methodArgumentCompletion(invoke.getName(), results, invoke, invoke.getArgument(0), request, + direction); } return null; } @@ -501,39 +481,35 @@ public Object visitChunkerParam(ChunkerParam node, Object data) { @Override public Object visitChunkerClosure(ChunkerClosure node, Object data) { - // not supporting completion for closures just yet; can likely offer parameter - // suggestions later though. + // not supporting completion for closures just yet; can likely offer parameter suggestions later though. return unsupported(node); } @Override public Object visitChunkerArray(ChunkerArray node, Object data) { - // when we're in an array, we should suggest "anything of the same type as other - // array elements", - // or otherwise look at where this array is being assigned to determine more type - // inference we can do for suggestion. + // when we're in an array, we should suggest "anything of the same type as other array elements", + // or otherwise look at where this array is being assigned to determine more type inference we can do + // for suggestion. return unsupported(node); } @Override public Object visitChunkerBinaryExpression(ChunkerBinaryExpression node, Object data) { - // if we're actually in the binary expression, it's likely that we're on the - // operator itself. - // for now, we'll try searching both left and right, and if we get unwanted matches, - // we'll reduce our scope. + // if we're actually in the binary expression, it's likely that we're on the operator itself. + // for now, we'll try searching both left and right, and if we get unwanted matches, we'll reduce our + // scope. switch (direction) { case BOTH: case LEFT: - searchForResults(doc, results, node.getLeft(), - request.candidate(node.getLeft().getEndIndex()), SearchDirection.LEFT); + searchForResults(doc, results, node.getLeft(), request.candidate(node.getLeft().getEndIndex()), + SearchDirection.LEFT); } if (node.getRight() != null) { switch (direction) { case BOTH: case RIGHT: searchForResults(doc, results, node.getRight(), - request.candidate(node.getRight().getStartIndex()), - SearchDirection.RIGHT); + request.candidate(node.getRight().getStartIndex()), SearchDirection.RIGHT); } } return null; @@ -557,8 +533,8 @@ public Object visitChunkerEof(ChunkerEof node, Object data) { } - private void annotationComplete(Collection results, - ChunkerAnnotation node, CompletionRequest offset) { + private void annotationComplete(Collection results, ChunkerAnnotation node, + CompletionRequest offset) { // suggest names of annotations / arguments for groovy... // while python should suggest the names of decorator functions only. } @@ -566,31 +542,29 @@ private void annotationComplete(Collection results, private Object unsupported(Node node) { if (log.isTraceEnabled()) { Node parent = node; - while (parent.jjtGetParent() != null - && !(parent.jjtGetParent() instanceof ChunkerDocument)) { + while (parent.jjtGetParent() != null && !(parent.jjtGetParent() instanceof ChunkerDocument)) { parent = parent.jjtGetParent(); } log.trace() - .append("Node type ") - .append(node.getClass().getCanonicalName()) - .append(" not yet supported: ") - .append(node.toSource()) - .nl() - .append("Parent source: ") - .append(parent.toSource()) - .endl(); + .append("Node type ") + .append(node.getClass().getCanonicalName()) + .append(" not yet supported: ") + .append(node.toSource()) + .nl() + .append("Parent source: ") + .append(parent.toSource()) + .endl(); } return null; } - private void numCompletion(Collection results, ChunkerNum node, - CompletionRequest offset) { + private void numCompletion(Collection results, ChunkerNum node, CompletionRequest offset) { // not really sure what, if anything, we'd want for numbers. // perhaps past history of number values entered / typed in? } private void assignCompletion(Collection results, ChunkerAssign node, - CompletionRequest offset) { + CompletionRequest offset) { final CompleteAssignment completer = new CompleteAssignment(this, node); final Node value = node.getValue(); if (value == null) { @@ -604,21 +578,17 @@ private void assignCompletion(Collection results, Chunke completer.doCompletion(results, offset, varName, false); } - // TODO: also consider offering static classes / non-void-methods or block-local-scope - // vars. + // TODO: also consider offering static classes / non-void-methods or block-local-scope vars. // This would get really crazy really fast w/ no filter, - // so maybe we'll just keep a cache of user-actually-used-these classes/methods/etc, and - // offer only those + // so maybe we'll just keep a cache of user-actually-used-these classes/methods/etc, and offer only those // (possibly primed with "things we want to promote users seeing"). IDS-1517-22 } else { // we only want to suggest variable names beginning with the next token final String startWith = value.jjtGetFirstToken().image; FuzzyList sorted = new FuzzyList<>(startWith); - // TODO: actually use a visitor here; really only Ident tokens should get the behavior - // below; + // TODO: actually use a visitor here; really only Ident tokens should get the behavior below; // Really, we should be adding all variable names like we do, then visiting all source, - // removing anything which occurs later than here in source, and adding any assignments - // which + // removing anything which occurs later than here in source, and adding any assignments which // occur earlier in source-but-not-in-runtime-variable-pool. IDS-1517-23 for (String varName : variables.getVariableNames()) { if (camelMatch(varName, startWith)) { @@ -632,22 +602,22 @@ private void assignCompletion(Collection results, Chunke } } - private void typedAssignCompletion(Collection results, - ChunkerTypedAssign node, CompletionRequest offset) {} + private void typedAssignCompletion(Collection results, ChunkerTypedAssign node, + CompletionRequest offset) {} - private void typeParamsCompletion(Collection results, - ChunkerTypeParams node, CompletionRequest offset) {} + private void typeParamsCompletion(Collection results, ChunkerTypeParams node, + CompletionRequest offset) {} - private void typeParamCompletion(Collection results, - ChunkerTypeParam node, CompletionRequest offset) { + private void typeParamCompletion(Collection results, ChunkerTypeParam node, + CompletionRequest offset) { } private void identCompletion( - ParsedDocument doc, - Collection results, - ChunkerIdent node, - CompletionRequest request) { + ParsedDocument doc, + Collection results, + ChunkerIdent node, + CompletionRequest request) { boolean onEnd = node.jjtGetFirstToken().getEndIndex() <= request.getOffset(); if (onEnd) { // user cursor is on the . or at the end of a possibly.chained.expression @@ -667,8 +637,7 @@ private void identCompletion( String src = node.toSource(); final Token tok = node.jjtGetFirstToken(); src = src.substring(0, request.getCandidate() - tok.startIndex); - addMethodsAndVariables(results, node.jjtGetFirstToken(), request, - Collections.singletonList(node), src); + addMethodsAndVariables(results, node.jjtGetFirstToken(), request, Collections.singletonList(node), src); } } @@ -685,13 +654,12 @@ private Token findReplacement(Node node, CompletionRequest request) { } private void whitespaceComplete( - ParsedDocument doc, - Collection results, - SimpleNode node, - CompletionRequest req, - SearchDirection direction) { - // when the cursor is on whitespace, we'll look around from here to find something to bind - // to... + ParsedDocument doc, + Collection results, + SimpleNode node, + CompletionRequest req, + SearchDirection direction) { + // when the cursor is on whitespace, we'll look around from here to find something to bind to... // for now, we'll be lazy, and just move the cursor to our non-whitespace neighbors... final int nextLeft = node.getStartIndex() - 1; final int nextRight = node.getEndIndex() + 1; @@ -699,15 +667,13 @@ private void whitespaceComplete( case LEFT: Node left = findLeftOf(node); if (left != null) { - searchForResults(doc, results, left, req.candidate(left.getEndIndex()), - SearchDirection.LEFT); + searchForResults(doc, results, left, req.candidate(left.getEndIndex()), SearchDirection.LEFT); } break; case RIGHT: Node right = findRightOf(node); if (right != null) { - searchForResults(doc, results, right, req.candidate(right.getStartIndex()), - SearchDirection.RIGHT); + searchForResults(doc, results, right, req.candidate(right.getStartIndex()), SearchDirection.RIGHT); } break; case BOTH: @@ -716,28 +682,22 @@ private void whitespaceComplete( right = findRightOf(node); if (left == null) { if (right != null) { - searchForResults(doc, results, right, req.candidate(nextRight), - SearchDirection.LEFT); + searchForResults(doc, results, right, req.candidate(nextRight), SearchDirection.LEFT); } } else { // left is non-null if (right == null) { - searchForResults(doc, results, left, req.candidate(nextLeft), - SearchDirection.LEFT); + searchForResults(doc, results, left, req.candidate(nextLeft), SearchDirection.LEFT); } else { // both left and right are non-null. Pick the closest one first. if (req.getCandidate() - nextLeft > nextRight - req.getCandidate()) { // right pos is closer, so we'll start there. - searchForResults(doc, results, right, req.candidate(nextRight), - SearchDirection.RIGHT); - searchForResults(doc, results, left, req.candidate(nextLeft), - SearchDirection.LEFT); + searchForResults(doc, results, right, req.candidate(nextRight), SearchDirection.RIGHT); + searchForResults(doc, results, left, req.candidate(nextLeft), SearchDirection.LEFT); } else { // cursor is closer to left side, so start there. - searchForResults(doc, results, left, req.candidate(nextLeft), - SearchDirection.LEFT); - searchForResults(doc, results, right, req.candidate(nextRight), - SearchDirection.RIGHT); + searchForResults(doc, results, left, req.candidate(nextLeft), SearchDirection.LEFT); + searchForResults(doc, results, right, req.candidate(nextRight), SearchDirection.RIGHT); } } } @@ -794,8 +754,8 @@ private Node findLeftOf(Node node) { while (!isTerminal(next) && next.jjtGetNumChildren() > 0) { Node candidate = next.jjtGetChild(next.jjtGetNumChildren() - 1); if (candidate instanceof ChunkerEof || // if we found the EOF, skip back again - (candidate == node && next.jjtGetNumChildren() > 1) // if we ran into the original - // target node, skip it. + (candidate == node && next.jjtGetNumChildren() > 1) // if we ran into the original target node, skip + // it. ) { candidate = next.jjtGetChild(next.jjtGetNumChildren() - 2); } @@ -804,22 +764,19 @@ private Node findLeftOf(Node node) { return next; } - private void newComplete(Collection results, ChunkerNew node, - CompletionRequest offset) { - // `new ` completion not implemented yet. This would need to lookup matching types w/ public - // constructors + private void newComplete(Collection results, ChunkerNew node, CompletionRequest offset) { + // `new ` completion not implemented yet. This would need to lookup matching types w/ public constructors } private void invokeComplete( - Collection results, - ChunkerInvoke node, - CompletionRequest request, - SearchDirection direction) { + Collection results, + ChunkerInvoke node, + CompletionRequest request, + SearchDirection direction) { // invoke completions are one of the most important to consider. // for now, this will be a naive replacement, but later we'll want to look at _where_ // in the invoke the cursor is; when on the ending paren, we'd likely want to look at - // whether we are the argument to something, and if so, do a type check, and suggest useful - // .coercions(). + // whether we are the argument to something, and if so, do a type check, and suggest useful .coercions(). String name = node.getName().trim(); // Now, for our magic-named methods that we want to handle... @@ -827,8 +784,7 @@ private void invokeComplete( boolean inMethodName = node.isCursorOnName(request.getCandidate()); // when the cursor is between name(andParen, both of the above will trigger. - // Find or create a "string as first arg" that will only be used if we match a magic method - // name below. + // Find or create a "string as first arg" that will only be used if we match a magic method name below. if (inArguments) { Node firstArg = argNode(node, request); methodArgumentCompletion(name, results, node, firstArg, request, direction); @@ -852,11 +808,11 @@ private void invokeComplete( } private void addMethods( - Collection results, - Token replacing, - CompletionRequest request, - List scope, - String methodPrefix) { + Collection results, + Token replacing, + CompletionRequest request, + List scope, + String methodPrefix) { Optional> bindingVar = resolveScopeType(request, scope); if (bindingVar.isPresent()) { final Class bindingClass = bindingVar.get(); @@ -865,49 +821,45 @@ private void addMethods( // log that we couldn't find the binding var; // in theory we should be able to resolve anything that is valid source. log.trace() - .append("Unable to find binding variable for ") - .append(methodPrefix) - .append(" from scope ") - .append(scope.stream().map(IsScope::getName).collect(Collectors.joining("."))) - .append(" from request ") - .append(request.toString()) - .endl(); + .append("Unable to find binding variable for ") + .append(methodPrefix) + .append(" from scope ") + .append(scope.stream().map(IsScope::getName).collect(Collectors.joining("."))) + .append(" from request ") + .append(request.toString()) + .endl(); } } private void addMethodsAndVariables( - Collection results, - Token replacing, - CompletionRequest request, - List scope, - String variablePrefix) { + Collection results, + Token replacing, + CompletionRequest request, + List scope, + String variablePrefix) { variablePrefix = variablePrefix.trim(); // Add any method which make sense from the existing name / ident token. addMethods(results, replacing, request, scope, variablePrefix); // Add any variables present in current scope (will show system objects) doVariableCompletion(results, variablePrefix, replacing, request); - // TODO: Add completion for any assignment expressions which occur earlier than us in the - // document. - // This will show only-in-source objects, without needing to actually run user's code. - // IDS-1517-18 + // TODO: Add completion for any assignment expressions which occur earlier than us in the document. + // This will show only-in-source objects, without needing to actually run user's code. IDS-1517-18 } private void doMethodCompletion( - Collection results, - Class bindingClass, - String methodPrefix, - Token replacing, - CompletionRequest request) { + Collection results, + Class bindingClass, + String methodPrefix, + Token replacing, + CompletionRequest request) { // hokay! now, we can use the invoke's name to find methods / fields in this type. FuzzyList sorter = new FuzzyList<>(methodPrefix); for (Method method : bindingClass.getMethods()) { if (Modifier.isPublic(method.getModifiers())) { - // TODO we'll likely want to pick between static or instance methods, based on - // calling scope. + // TODO we'll likely want to pick between static or instance methods, based on calling scope. // IDS-1517-19 - // TODO(deephaven-core#875): Auto-complete on instance should not suggest static - // methods + // TODO(deephaven-core#875): Auto-complete on instance should not suggest static methods if (camelMatch(method.getName(), methodPrefix)) { sorter.add(method.getName(), method); } @@ -921,13 +873,12 @@ private void doMethodCompletion( } } - private void doVariableCompletion(Collection results, - String variablePrefix, Token replacing, CompletionRequest request) { + private void doVariableCompletion(Collection results, String variablePrefix, + Token replacing, CompletionRequest request) { FuzzyList sorter = new FuzzyList<>(variablePrefix); for (String name : variables.getVariableNames()) { if (!name.equals(variablePrefix) && camelMatch(name, variablePrefix)) { - // only suggest names which are camel-case-matches (ignoring - // same-as-existing-variable names) + // only suggest names which are camel-case-matches (ignoring same-as-existing-variable names) sorter.add(name, name); } } @@ -985,13 +936,13 @@ private Node argNode(ChunkerInvoke node, CompletionRequest request) { } private void methodArgumentCompletion( - String name, Collection results, - ChunkerInvoke node, - Node replaceNode, - CompletionRequest request, - SearchDirection direction) { - // TODO: replace this hardcoded list of magic method names with something generated by an - // annotation processor. IDS-1517-32 + String name, Collection results, + ChunkerInvoke node, + Node replaceNode, + CompletionRequest request, + SearchDirection direction) { + // TODO: replace this hardcoded list of magic method names with something generated by an annotation processor. + // IDS-1517-32 boolean tableReturningMethod = false; switch (name) { case "join": @@ -1000,18 +951,15 @@ private void methodArgumentCompletion( case "exactJoin": case "aj": // TODO: joins will need special handling; IDS-1517-5 example from Charles: - // j=l.naturalJoin(r, "InBoth,AFromLeft=BFromRight,CInLeft=DFromRight", - // "EInOut=FromRight,FInOut") + // j=l.naturalJoin(r, "InBoth,AFromLeft=BFromRight,CInLeft=DFromRight", "EInOut=FromRight,FInOut") // as you see, we need both the scope table l and the joined table r, // then we also need to handle CSV-separated column expressions. - // To even try this using string inspection is foolish, as a `,` or `=` could easily - // appear inside the expression. - // For these reasons, proper support here will have to wait until we also parse the - // string contents; - // we can parse them on-demand via Chunker#MethodArgs, and just get a list of - // assignments. - // might actually be better to just make Chunker#JoinArgs, to specify CSV of - // assignments (and allow ` strings) + // To even try this using string inspection is foolish, as a `,` or `=` could easily appear inside the + // expression. + // For these reasons, proper support here will have to wait until we also parse the string contents; + // we can parse them on-demand via Chunker#MethodArgs, and just get a list of assignments. + // might actually be better to just make Chunker#JoinArgs, to specify CSV of assignments (and allow ` + // strings) break; case "where": @@ -1048,7 +996,7 @@ private void methodArgumentCompletion( } private Optional> resolveScopeType( - CompletionRequest request, List scope) { + CompletionRequest request, List scope) { if (scope == null || scope.isEmpty()) { return Optional.empty(); } @@ -1059,7 +1007,7 @@ private Optional> resolveScopeType( return Optional.of(type); } Optional> result = resolveScope(scope) - .map(Object::getClass); + .map(Object::getClass); if (result.isPresent()) { return result; } @@ -1087,8 +1035,8 @@ private Optional> resolveScopeType( return Optional.empty(); } - public List findAssignment(final ParsedDocument doc, - final CompletionRequest request, final String name) { + public List findAssignment(final ParsedDocument doc, final CompletionRequest request, + final String name) { final Map> assignments = ensureAssignments(doc); final List options = assignments.get(name), results = new ArrayList<>(); if (options != null) { @@ -1131,8 +1079,7 @@ private Optional resolveScope(List scope) { return Optional.empty(); } IsScope o = scope.get(0); - // TODO: also handle static classes / variables only present in source (code not run yet) - // IDS-1517-23 + // TODO: also handle static classes / variables only present in source (code not run yet) IDS-1517-23 try { final Object var = variables.getVariable(o.getName(), null); @@ -1150,8 +1097,7 @@ private Optional resolveSubScope(Object var, List scope, int i) return Optional.ofNullable(var); } // keep digging into scope to find a variable. Since we are allowing groovy here, - // we may need to do some reflective `field access to getter method` coercion - // automatically... + // we may need to do some reflective `field access to getter method` coercion automatically... String nextName = scope.get(i++).getName(); Object next = getFromVar(var, nextName); return resolveSubScope(next, scope, i); @@ -1215,25 +1161,22 @@ private String toBeanMethod(String nextName, String prefix) { if (nextName.startsWith(prefix)) { return nextName; } - nextName = Character.toUpperCase(nextName.charAt(0)) - + (nextName.length() == 1 ? "" : nextName.substring(1)); + nextName = Character.toUpperCase(nextName.charAt(0)) + (nextName.length() == 1 ? "" : nextName.substring(1)); return prefix + nextName; } private void stringComplete( - Collection results, - ChunkerString node, - CompletionRequest offset, - SearchDirection direction) { - // Alright! we are inside of a string. Find out what method, if any, this string is - // contained by. + Collection results, + ChunkerString node, + CompletionRequest offset, + SearchDirection direction) { + // Alright! we are inside of a string. Find out what method, if any, this string is contained by. Node parent = node.jjtGetParent(); while (parent != null && !(parent instanceof ChunkerInvoke)) { parent = parent.jjtGetParent(); } if (parent == null) { - // hm, a string that's not inside a method... there might be something we can do here - // later, + // hm, a string that's not inside a method... there might be something we can do here later, // but for now, let's just ignore... } else { ChunkerInvoke invoke = (ChunkerInvoke) parent; @@ -1242,11 +1185,11 @@ private void stringComplete( } private void stringInMethodComplete( - Collection results, - ChunkerInvoke invoke, - ChunkerString node, - CompletionRequest offset, - SearchDirection direction) { + Collection results, + ChunkerInvoke invoke, + ChunkerString node, + CompletionRequest offset, + SearchDirection direction) { methodArgumentCompletion(invoke.getName(), results, invoke, node, offset, direction); // Check the query library for static methods } @@ -1256,11 +1199,11 @@ private boolean isValidTableName(String tableName) { } private void maybeColumnComplete( - Collection results, - ChunkerInvoke invoke, - Node node, - CompletionRequest offset, - SearchDirection direction) { + Collection results, + ChunkerInvoke invoke, + Node node, + CompletionRequest offset, + SearchDirection direction) { String str = stringLiteral(node); final boolean doColumnNameCompletion, doColumnExpressionCompletion; @@ -1286,8 +1229,8 @@ private void maybeColumnComplete( } else { // The document index of the start of the String str; if (node.isWellFormed()) { - // we are inside a method(''[)] <- optional ), we don't really care, since we have - // an argument for reference + // we are inside a method(''[)] <- optional ), we don't really care, since we have an argument for + // reference } else { // we are inside a method(' @@ -1313,9 +1256,8 @@ private void maybeColumnComplete( if (def == null) { // No table definition? We should report this as a failure (TableDoesNotExist). // TODO proper diagnostic messages; IDS-1517-16 - log.info().append("No table definition found for ") - .append(String.valueOf(invoke.getScope())).append(" at offset ") - .append(offset.toString()).endl(); + log.info().append("No table definition found for ").append(String.valueOf(invoke.getScope())) + .append(" at offset ").append(offset.toString()).endl(); } else { // finally, complete the column name... final CompleteColumnName completer = new CompleteColumnName(this, node, invoke); @@ -1330,8 +1272,7 @@ private void maybeColumnComplete( } if (doColumnExpressionCompletion) { - // Users already has ColumnName=bleh (where bleh may be any string, including empty - // string) + // Users already has ColumnName=bleh (where bleh may be any string, including empty string) Class colType = guessColumnType(columnName, invoke.getScope(), offset); final int methodNameStarts = equal + 1; final int methodNameEnds = str.length(); @@ -1341,8 +1282,7 @@ private void maybeColumnComplete( } else { partialMatch = str.substring(methodNameStarts, methodNameEnds).trim(); } - final CompleteColumnExpression completer = - new CompleteColumnExpression(this, node, invoke); + final CompleteColumnExpression completer = new CompleteColumnExpression(this, node, invoke); if (def != null) { // There is a table definition. Lets try to suggest column names here. for (String colName : def.getColumnNames()) { @@ -1350,29 +1290,26 @@ private void maybeColumnComplete( // do not suggest the same column name that is already there !colName.equals(partialMatch) && // only suggest column names with a camelCase match - camelMatch(colName, partialMatch)) { + camelMatch(colName, partialMatch)) { completer.doCompletion(results, offset, colName); } } } - if (!partialMatch.isEmpty() - || !"false".equals(System.getProperty(PROP_SUGGEST_STATIC_METHODS))) { + if (!partialMatch.isEmpty() || !"false".equals(System.getProperty(PROP_SUGGEST_STATIC_METHODS))) { // empty method name will generate MANY matches (lots of static method imports). - // We left a system property to remove these during testing, so we don't have 30+ - // results, + // We left a system property to remove these during testing, so we don't have 30+ results, // when we want to test only for column name matches (above) for (Class cls : lookups.getStatics()) { for (Method method : cls.getMethods()) { // try to handle the column type here, to limit what we send... // We may want to simply have this affect the score of the results, - // so that we'd still send back something incorrect (at least, in cases like - // update()) + // so that we'd still send back something incorrect (at least, in cases like update()) if (colType.isAssignableFrom(method.getReturnType())) { // TODO: handle instance method calls if appropriate IDS-1517-19 - // TODO: check binding for variables that can be referenced in column - // expressions (any non-typed assign before now) IDS-1517-23 + // TODO: check binding for variables that can be referenced in column expressions (any + // non-typed assign before now) IDS-1517-23 if (Modifier.isStatic(method.getModifiers())) { if (camelMatch(method.getName(), partialMatch)) { completer.doCompletion(results, offset, method); @@ -1388,42 +1325,36 @@ private void maybeColumnComplete( } private TableDefinition findTableDefinition( - List scope, - CompletionRequest offset) { + List scope, + CompletionRequest offset) { if (scope == null || scope.isEmpty()) { return null; } final IsScope previous = scope.get(scope.size() - 1); if (scope.size() == 1) { // previous is all we have; just lookup the table definition - final TableDefinition definition = - offset.getTableDefinition(this, doc, variables, previous.getName()); + final TableDefinition definition = offset.getTableDefinition(this, doc, variables, previous.getName()); if (definition == null) { // log missing definition... } return definition; } else if (previous instanceof ChunkerInvoke) { - // recurse into each scope node and use the result of our parent's type to compute our - // own. - // This allows us to hack in special support for statically analyzing expressions which - // return tables. + // recurse into each scope node and use the result of our parent's type to compute our own. + // This allows us to hack in special support for statically analyzing expressions which return tables. return findTableDefinition(((ChunkerInvoke) previous).getScope(), offset); } return null; } - private Class guessColumnType(String columnName, List scope, - CompletionRequest offset) { + private Class guessColumnType(String columnName, List scope, CompletionRequest offset) { // When guessing column type, we'll prefer a perfect match against a known table-as-scope. // Failing that, we'll do a global lookup of all columns-by-types from all table definitions - // And, finally, failing that, we'll do some random guessing based on "well-known column - // names". + // And, finally, failing that, we'll do some random guessing based on "well-known column names". // guess table name from the scope; either a reference to a table, or a db.i|t call. - // for now, we are not going to do complex scope inspections to guess at not-yet-run update - // operations. + // for now, we are not going to do complex scope inspections to guess at not-yet-run update operations. if (scope != null && scope.size() > 0) { IsScope root = scope.get(0); if (root instanceof ChunkerIdent) { @@ -1433,15 +1364,12 @@ private Class guessColumnType(String columnName, List scope, } } else { // look for the table in binding - final TableDefinition def = - offset.getTableDefinition(this, doc, variables, root.getName()); + final TableDefinition def = offset.getTableDefinition(this, doc, variables, root.getName()); if (def != null) { final ColumnDefinition col = def.getColumn(columnName); if (col == null) { - // might happen if user did - // someTable.update("NewCol=123").update("NewCol= - // we can handle this by inspecting the scope chain, but leaving edge - // case out for now. + // might happen if user did someTable.update("NewCol=123").update("NewCol= + // we can handle this by inspecting the scope chain, but leaving edge case out for now. } else { return col.getDataType(); } @@ -1462,8 +1390,8 @@ private Class guessColumnType(String columnName, List scope, } @Deprecated - public void addMatch(Collection results, Node node, String match, - CompletionRequest index, String... nextTokens) { + public void addMatch(Collection results, Node node, String match, CompletionRequest index, + String... nextTokens) { // IDS-1517-13 // The mess has gone too far. It is no longer maintainable. @@ -1568,15 +1496,14 @@ public void addMatch(Collection results, Node node, Stri displayed = completion.toString(); // + (node.isWellFormed() ? "" : nextChar); - final DocumentRange.Builder range = - LspTools.rangeFromSource(doc.getSource(), start, length); + final DocumentRange.Builder range = LspTools.rangeFromSource(doc.getSource(), start, length); final CompletionItem.Builder result = CompletionItem.newBuilder(); result.setStart(start) - .setLength(length) - .setLabel(displayed) - .getTextEditBuilder() - .setText(displayed) - .setRange(range); + .setLength(length) + .setLabel(displayed) + .getTextEditBuilder() + .setText(displayed) + .setRange(range); results.add(result); } diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionFragment.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionFragment.java index 33b7b6f8a16..d209722d0d4 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionFragment.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionFragment.java @@ -3,8 +3,8 @@ /** * A Deephaven-internal completion fragment. * - * Represents a source code replacement option, with fields for "what code to insert", "where to - * insert it", and "what to render for completion, if different from code insertion". + * Represents a source code replacement option, with fields for "what code to insert", "where to insert it", and "what + * to render for completion, if different from code insertion". */ public class CompletionFragment { int start; // where to start the replacement @@ -45,11 +45,11 @@ public String getDisplayCompletion() { @Override public String toString() { return "CompletionFragment{" + - "start=" + start + - ", length=" + length + - ", completion='" + completion + '\'' + - ", displayCompletion='" + displayCompletion + '\'' + - '}'; + "start=" + start + + ", length=" + length + + ", completion='" + completion + '\'' + + ", displayCompletion='" + displayCompletion + '\'' + + '}'; } @Override diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionLookups.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionLookups.java index 193248574c9..6e0b2c37a01 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionLookups.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionLookups.java @@ -14,14 +14,12 @@ /** * A lookup object for various values that the {@link ChunkerCompleter} might be interested in. * - * This is extracted into its own class, so preloading can start as soon as the console session - * starts. + * This is extracted into its own class, so preloading can start as soon as the console session starts. * */ public class CompletionLookups { - private static final WeakHashMap lookups = - new WeakHashMap<>(); + private static final WeakHashMap lookups = new WeakHashMap<>(); private final Lazy ql; private final Lazy> statics; diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionOptions.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionOptions.java index 8b84f7ad055..f8dd7362415 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionOptions.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionOptions.java @@ -7,8 +7,8 @@ /** * Various context-sensitive options to use when generating completions. * - * This includes information like "result will have quotes around it", or "result should have a - * comma after it" or "result should have space before it". + * This includes information like "result will have quotes around it", or "result should have a comma after it" or + * "result should have space before it". */ public class CompletionOptions { diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionRequest.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionRequest.java index 536838b64d2..1f30e33dc78 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionRequest.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/CompletionRequest.java @@ -19,20 +19,20 @@ /** * A stateful object to represent a document search at a given position. * - * When we search left or right from an intermediate node like whitespace, dot, comma or EOF, we - * will create a new CompletionRequest at the new index. + * When we search left or right from an intermediate node like whitespace, dot, comma or EOF, we will create a new + * CompletionRequest at the new index. * - * This currently uses absolute cursor positions, but we want this to be a line/column position - * instead, so we can completely remove consideration of absolute cursors. + * This currently uses absolute cursor positions, but we want this to be a line/column position instead, so we can + * completely remove consideration of absolute cursors. * - * This mistake was made by trying to base V2 on V1 semantics which are not really relevant when - * considering Monaco, LSP and Javacc which all use line/column semantics. + * This mistake was made by trying to base V2 on V1 semantics which are not really relevant when considering Monaco, LSP + * and Javacc which all use line/column semantics. * - * Absolute cursor positions are unfortunately deeply entwined in {@link ChunkerCompleter}, so we - * are leaving it in place for now. + * Absolute cursor positions are unfortunately deeply entwined in {@link ChunkerCompleter}, so we are leaving it in + * place for now. * - * Note that this class also maintains a map of loaded table definitions, so that repeated - * completions will not pay to load the same table definition more than once. + * Note that this class also maintains a map of loaded table definitions, so that repeated completions will not pay to + * load the same table definition more than once. */ public class CompletionRequest { @@ -49,7 +49,7 @@ public CompletionRequest(@NotNull ChunkerCompleter completer, String command, in } private CompletionRequest(@NotNull ChunkerCompleter completer, String command, int offset, - Map localDefs) { + Map localDefs) { this.source = command; this.offset = this.candidate = offset; this.completer = completer; @@ -80,30 +80,27 @@ public CompletionRequest candidate(int index) { return req; } - public TableDefinition getTableDefinition(final ChunkerCompleter completer, - final ParsedDocument doc, VariableProvider variables, String name) { - // Each request maintains a local cache of looked-up table definitions, to avoid going to - // the VariableHandler unless needed + public TableDefinition getTableDefinition(final ChunkerCompleter completer, final ParsedDocument doc, + VariableProvider variables, String name) { + // Each request maintains a local cache of looked-up table definitions, to avoid going to the VariableHandler + // unless needed // Note that we do NOT go to the completer.getReferencedTables map at all; // we don't want to cache anything local-to-script-session any further // than a single completion invocation (i.e. local to a CompletionRequest) if (localDefs.containsKey(name)) { - // if there wasn't a table definition in the script session once, it won't magically - // appear again later. - // This might seem a little excessive, but in python at least, it is non-"free" to check - // if binding a variable exists. + // if there wasn't a table definition in the script session once, it won't magically appear again later. + // This might seem a little excessive, but in python at least, it is non-"free" to check if binding a + // variable exists. return localDefs.get(name); } TableDefinition result = variables.getTableDefinition(name); if (result == null) { - // If the result was null, we can try to search for an assign statement that is - // initialized w/ something we _can_ grok. + // If the result was null, we can try to search for an assign statement that is initialized w/ something we + // _can_ grok. final List assignment = completer.findAssignment(doc, this, name); if (!assignment.isEmpty()) { - // ok! there was an assignment to our table variable that occurred before user's - // cursor (this is, - // of course, bad when user creates some random functions that are defined in any - // order). + // ok! there was an assignment to our table variable that occurred before user's cursor (this is, + // of course, bad when user creates some random functions that are defined in any order). final ListIterator itr = assignment.listIterator(assignment.size()); while (itr.hasPrevious()) { final ChunkerAssign check = itr.previous(); @@ -140,19 +137,18 @@ private TableDefinition convertNewTableInvocation(final ChunkerInvoke invoke) { final List colArgs = colInvoke.getArguments(); if (colArgs.isEmpty()) { // this is normal, if user has typed `newTable(stringCol(` - // in this case, there is no valid table definition, so we'll just skip this - // column. - // In the future, we may want to consider user-defined zero-arg functions that - // return ColumnDefinitions + // in this case, there is no valid table definition, so we'll just skip this column. + // In the future, we may want to consider user-defined zero-arg functions that return + // ColumnDefinitions continue; } final String colName = toStringLiteral(colArgs.get(0)); if (colName == null) { if (LOGGER.isTraceEnabled()) { LOGGER.trace() - .append("Unable to trace first argument back to a string literal: ") - .append(colInvoke.toSource()) - .endl(); + .append("Unable to trace first argument back to a string literal: ") + .append(colInvoke.toSource()) + .endl(); } continue; } @@ -186,26 +182,26 @@ private TableDefinition convertNewTableInvocation(final ChunkerInvoke invoke) { columns.add(ColumnDefinition.ofFloat(colName)); break; case "col": - // We _could_ technically try to guess from the col() varargs what the type - // is, but, not worth it atm. + // We _could_ technically try to guess from the col() varargs what the type is, but, not worth + // it atm. columns.add(ColumnDefinition.fromGenericType(colName, Object.class)); break; default: LOGGER.warn() - .append("Unhandled newTable() argument ") - .append(argument.toSource()) - .append(" not a recognized invocation") - .endl(); + .append("Unhandled newTable() argument ") + .append(argument.toSource()) + .append(" not a recognized invocation") + .endl(); break; } } else { // TODO: handle ColumnDefition/etc variables LOGGER.warn() - .append("Unhandled newTable() argument ") - .append(argument.toSource()) - .append(" of type ") - .append(argument.getClass().getName()) - .endl(); + .append("Unhandled newTable() argument ") + .append(argument.toSource()) + .append(" of type ") + .append(argument.getClass().getName()) + .endl(); } } def.setColumns(columns.toArray(new ColumnDefinition[0])); @@ -215,17 +211,17 @@ private TableDefinition convertNewTableInvocation(final ChunkerInvoke invoke) { private String toStringLiteral(final Node node) { if (node instanceof ChunkerString) { return ((ChunkerString) node).getRaw(); - } // TODO: if it's a variable name, try to trace it back to a static assignment of a string, - // or a binding variable. + } // TODO: if it's a variable name, try to trace it back to a static assignment of a string, or a binding + // variable. return null; } @Override public String toString() { return "CompletionRequest{" + - "source='" + source + '\'' + - ", offset=" + offset + - ", candidate=" + candidate + - '}'; + "source='" + source + '\'' + + ", offset=" + offset + + ", candidate=" + candidate + + '}'; } } diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/DelegatingCompletionHandler.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/DelegatingCompletionHandler.java index a36a781d4b8..5b938262157 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/DelegatingCompletionHandler.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/DelegatingCompletionHandler.java @@ -17,14 +17,13 @@ /** * Allows you to join multiple completion handlers together, and coalesce the final results. * - * In the future, we will better log which handlers provided which completions. We may also simply - * call the different handlers in a different manner, when the v2 implementation api changes to - * better support the ChunkerDocument model (and pushing results to client directly), rendering this - * class obsolete. + * In the future, we will better log which handlers provided which completions. We may also simply call the different + * handlers in a different manner, when the v2 implementation api changes to better support the ChunkerDocument model + * (and pushing results to client directly), rendering this class obsolete. * - * NOTE: This class is not currently in use, since the coalesced results wound up causing ugly - * duplications with different whitespace. We will delete it after merging, to have it in git - * history, in case we decide to revive it later. + * NOTE: This class is not currently in use, since the coalesced results wound up causing ugly duplications with + * different whitespace. We will delete it after merging, to have it in git history, in case we decide to revive it + * later. */ public class DelegatingCompletionHandler implements CompletionHandler { @@ -36,14 +35,12 @@ public DelegatingCompletionHandler(CompletionHandler... handlers) { } @Override - public Collection runCompletion(final ParsedDocument doc, - final Position pos, final int offset) { - CompletableFuture>[] futures = - new CompletableFuture[handlers.length]; + public Collection runCompletion(final ParsedDocument doc, final Position pos, + final int offset) { + CompletableFuture>[] futures = new CompletableFuture[handlers.length]; for (int i = 0; i < handlers.length; i++) { final CompletionHandler handler = handlers[i]; - futures[i] = - CompletableFuture.supplyAsync(() -> handler.runCompletion(doc, pos, offset)); + futures[i] = CompletableFuture.supplyAsync(() -> handler.runCompletion(doc, pos, offset)); } Set all = new LinkedHashSet<>(); for (CompletableFuture> future : futures) { @@ -54,12 +51,11 @@ public Collection runCompletion(final ParsedDocument doc break; } catch (ExecutionException e) { LOGGER.trace() - .append("Unknown error running autocomplete. ") - .append(e.getCause()) - .endl(); + .append("Unknown error running autocomplete. ") + .append(e.getCause()) + .endl(); } catch (TimeoutException e) { - // yikes, more than 5 seconds? no human alive wants to wait 5 seconds to get - // autocomplete popup! + // yikes, more than 5 seconds? no human alive wants to wait 5 seconds to get autocomplete popup! continue; } } diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteAssignment.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteAssignment.java index f4f186a2920..d3f95c098a3 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteAssignment.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteAssignment.java @@ -11,8 +11,8 @@ import java.util.Set; /** - * A class specifically for completing assignment statements; to be called after the completer has - * discovered the cursor near an assignment token. + * A class specifically for completing assignment statements; to be called after the completer has discovered the cursor + * near an assignment token. * */ public class CompleteAssignment extends CompletionBuilder { @@ -28,18 +28,16 @@ public CompleteAssignment(ChunkerCompleter completer, ChunkerAssign assign) { } public void doCompletion( - Collection results, - CompletionRequest request, - String varName, - boolean methodMatched) { + Collection results, + CompletionRequest request, + String varName, + boolean methodMatched) { final CompletionOptions opts = new CompletionOptions().setPrevTokens("=", " "); if (assign.getValue() == null) { // There is no value after the =, so try adding a . or ( to complete the expression. - // In truth, this could probably just be done at the call site by adding to the varName - // argument. + // In truth, this could probably just be done at the call site by adding to the varName argument. opts.setNextTokens(methodMatched ? "(" : "."); - // If the user has already typed a ( or ., then we definitely should not add either of - // them, + // If the user has already typed a ( or ., then we definitely should not add either of them, // so we use stopTokens to tell #addMatch when to stop adding suffixes. opts.setStopTokens("(", "."); } diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteColumnExpression.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteColumnExpression.java index f5a1681c3a4..823b46b60e3 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteColumnExpression.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteColumnExpression.java @@ -16,8 +16,8 @@ import java.util.Set; /** - * A class specifically for completing column expression; to be called after the completer has - * discovered the a column expression with an = and the cursor is at or after =. + * A class specifically for completing column expression; to be called after the completer has discovered the a column + * expression with an = and the cursor is at or after =. * */ public class CompleteColumnExpression extends CompletionBuilder { @@ -27,9 +27,9 @@ public class CompleteColumnExpression extends CompletionBuilder { private final ChunkerInvoke invoke; public CompleteColumnExpression( - ChunkerCompleter completer, - Node node, - ChunkerInvoke invoke) { + ChunkerCompleter completer, + Node node, + ChunkerInvoke invoke) { super(completer); this.node = node; this.invoke = invoke; @@ -37,16 +37,15 @@ public CompleteColumnExpression( } public void doCompletion( - Collection results, - CompletionRequest request, - Method method) { + Collection results, + CompletionRequest request, + Method method) { final String displayCompletion; if (method.getDeclaringClass().getSimpleName().endsWith("Primitives") - && method.getDeclaringClass().getPackage().equals( - BytePrimitives.class.getPackage())) { + && method.getDeclaringClass().getPackage().equals( + BytePrimitives.class.getPackage())) { // reduce massive duplication from same-named primitives methods. - // In the future, when we have better column/type inference, we should be able to delete - // this workaround + // In the future, when we have better column/type inference, we should be able to delete this workaround displayCompletion = "*Primitives."; } else { displayCompletion = method.getDeclaringClass().getSimpleName() + "."; @@ -98,25 +97,24 @@ public void doCompletion( spaceBefore = Character.isWhitespace(prev); sawEqual = true; default: - range.getStartBuilder() - .setCharacter(range.getStartBuilder().getCharacter() + 1); + range.getStartBuilder().setCharacter(range.getStartBuilder().getCharacter() + 1); start++; } } CompletionItem.Builder result = CompletionItem.newBuilder(); result.setStart(start) - .setLength(len) - .setLabel(displayCompletion + replaced + suffix) - .getTextEditBuilder() - .setText(replaced) - .setRange(range); + .setLength(len) + .setLabel(displayCompletion + replaced + suffix) + .getTextEditBuilder() + .setText(replaced) + .setRange(range); results.add(result); } public void doCompletion( - Collection results, - CompletionRequest request, - String colName) { + Collection results, + CompletionRequest request, + String colName) { String replaced = colName; // need to handle null node using parent invoke, the same as CompleteColumnName @@ -147,8 +145,7 @@ public void doCompletion( spaceBefore = Character.isWhitespace(prev); sawEqual = true; default: - range.getStartBuilder() - .setCharacter(range.getStartBuilder().getCharacter() + 1); + range.getStartBuilder().setCharacter(range.getStartBuilder().getCharacter() + 1); start++; } } @@ -171,11 +168,11 @@ public void doCompletion( } final CompletionItem.Builder result = CompletionItem.newBuilder(); result.setStart(start) - .setLength(len) - .setLabel(withClose) - .getTextEditBuilder() - .setText(withClose) - .setRange(range); + .setLength(len) + .setLabel(withClose) + .getTextEditBuilder() + .setText(withClose) + .setRange(range); results.add(result); // An alternate version which does not include the close quote. // Ideally, we just move the user's cursor position backwards, by making the main diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteColumnName.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteColumnName.java index 0af676e45bd..e8eff13b9c8 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteColumnName.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteColumnName.java @@ -13,8 +13,8 @@ import java.util.Set; /** - * A class specifically for completing column names; to be called after the completer has discovered - * the name of the column to match. + * A class specifically for completing column names; to be called after the completer has discovered the name of the + * column to match. * */ public class CompleteColumnName extends CompletionBuilder { @@ -23,18 +23,18 @@ public class CompleteColumnName extends CompletionBuilder { private final ChunkerInvoke invoke; public CompleteColumnName( - ChunkerCompleter completer, - Node node, - ChunkerInvoke invoke) { + ChunkerCompleter completer, + Node node, + ChunkerInvoke invoke) { super(completer); this.node = node; this.invoke = invoke; } public void doCompletion( - Collection results, - CompletionRequest request, - String colName) { + Collection results, + CompletionRequest request, + String colName) { final String src; final DocumentRange.Builder range; src = node == null ? "" : node.toSource(); @@ -54,8 +54,7 @@ public void doCompletion( StringBuilder b = new StringBuilder(); b.append(qt); b.append(colName); - // Instead of addTokens, we need to use raw strings, since we don't tokenize inside strings - // (yet). + // Instead of addTokens, we need to use raw strings, since we don't tokenize inside strings (yet). int ind = src.indexOf('='); if (ind == -1) { b.append(" = "); @@ -83,11 +82,11 @@ public void doCompletion( final CompletionItem.Builder result = CompletionItem.newBuilder(); String item = b.toString(); result.setStart(start) - .setLength(len) - .setLabel(item) - .getTextEditBuilder() - .setText(item) - .setRange(range); + .setLength(len) + .setLabel(item) + .getTextEditBuilder() + .setText(item) + .setRange(range); results.add(result); } } diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteInvocation.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteInvocation.java index 4c6a112f064..08a46e1dade 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteInvocation.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteInvocation.java @@ -14,8 +14,8 @@ import java.util.Set; /** - * A class specifically for completing invocations; to be called with method results when the cursor - * is somewhere that a method is valid. + * A class specifically for completing invocations; to be called with method results when the cursor is somewhere that a + * method is valid. * */ public class CompleteInvocation extends CompletionBuilder { @@ -27,8 +27,7 @@ public CompleteInvocation(ChunkerCompleter completer, Token replacing) { this.replacing = replacing; } - public void doCompletion(Collection results, CompletionRequest request, - Method method) { + public void doCompletion(Collection results, CompletionRequest request, Method method) { final int start = replacing.getStartIndex(); final int length = replacing.getEndIndex() - start; @@ -67,30 +66,27 @@ public void doCompletion(Collection results, CompletionR } final String displayCompletion; if (method.getDeclaringClass().getSimpleName().endsWith("Primitives") && - BytePrimitives.class.getPackage().equals(method.getDeclaringClass().getPackage())) { + BytePrimitives.class.getPackage().equals(method.getDeclaringClass().getPackage())) { // reduce massive duplication from same-named primitives methods. - // In the future, when we have better column/type inference, we should be able to delete - // this workaround + // In the future, when we have better column/type inference, we should be able to delete this workaround displayCompletion = "*Primitives." + method.getName() + "("; } else { - displayCompletion = - method.getDeclaringClass().getSimpleName() + "." + method.getName() + "("; + displayCompletion = method.getDeclaringClass().getSimpleName() + "." + method.getName() + "("; } res.append(method.getName()).append("("); CompletionItem.Builder result = - CompletionItem.newBuilder() - .setStart(start) - .setLength(length) - // let the user know where this method is coming from (include class name in display - // completion); - .setLabel(displayCompletion); + CompletionItem.newBuilder() + .setStart(start) + .setLength(length) + // let the user know where this method is coming from (include class name in display + // completion); + .setLabel(displayCompletion); result.getTextEditBuilder() - .setText(res.toString()) - .setRange(range); + .setText(res.toString()) + .setRange(range); // in the future, we should enable adding // explicit import statements for static methods. For now, we're assuming all static methods - // already came from imports, but we'll want to handle this explicitly for more exotic cases - // in the future. + // already came from imports, but we'll want to handle this explicitly for more exotic cases in the future. results.add(result); } } diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteTableName.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteTableName.java index 4d06c72ceda..3378beadac8 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteTableName.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteTableName.java @@ -10,8 +10,8 @@ import java.util.stream.Stream; /** - * A class specifically for completing table names; to be called after the completer has discovered - * the name of the table. + * A class specifically for completing table names; to be called after the completer has discovered the name of the + * table. * */ public class CompleteTableName extends CompletionBuilder { @@ -19,17 +19,16 @@ public class CompleteTableName extends CompletionBuilder { private final ChunkerInvoke invoke; private final Stream matches; - public CompleteTableName(ChunkerCompleter completer, ChunkerInvoke invoke, - Stream matches) { + public CompleteTableName(ChunkerCompleter completer, ChunkerInvoke invoke, Stream matches) { super(completer); this.invoke = invoke; this.matches = matches; } public void doCompletion( - Node node, - Set results, - CompletionRequest request) { + Node node, + Set results, + CompletionRequest request) { final int argInd = invoke.indexOfArgument(node); final String qt = getCompleter().getQuoteType(node); final DocumentRange.Builder range; @@ -110,8 +109,7 @@ public void doCompletion( if (node != null && node.isWellFormed()) { len++; range.getEndBuilder().setCharacter(range.getEndBuilder().getCharacter() + 1); - // may need to skip this item, in case we are suggesting the exact thing which - // already exists. + // may need to skip this item, in case we are suggesting the exact thing which already exists. if (name.equals(node.toSource())) { // This suggestion is a duplicate; discard it. return; @@ -120,11 +118,11 @@ public void doCompletion( final CompletionItem.Builder result = CompletionItem.newBuilder(); String item = b.toString(); result.setStart(start) - .setLength(len) - .setLabel(item) - .getTextEditBuilder() - .setText(item) - .setRange(range); + .setLength(len) + .setLabel(item) + .getTextEditBuilder() + .setText(item) + .setRange(range); results.add(result); }); } diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteTableNamespace.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteTableNamespace.java index fde51fcb9bd..bf36183eea7 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteTableNamespace.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteTableNamespace.java @@ -11,8 +11,8 @@ import java.util.stream.Stream; /** - * A class specifically for completing table namespaces; to be called after the completer has - * discovered the name of the namespace. + * A class specifically for completing table namespaces; to be called after the completer has discovered the name of the + * namespace. * */ public class CompleteTableNamespace extends CompletionBuilder { @@ -20,26 +20,23 @@ public class CompleteTableNamespace extends CompletionBuilder { private final ChunkerInvoke invoke; private final Stream matches; - public CompleteTableNamespace(ChunkerCompleter completer, ChunkerInvoke invoke, - Stream matches) { + public CompleteTableNamespace(ChunkerCompleter completer, ChunkerInvoke invoke, Stream matches) { super(completer); this.invoke = invoke; - // we should probably change this argument from a Stream to the `Node replaced`, so - // we can do the - // replaceNode/placeAfter work here, once, and then invoke doCompletion N times for each - // item. + // we should probably change this argument from a Stream to the `Node replaced`, so we can do the + // replaceNode/placeAfter work here, once, and then invoke doCompletion N times for each item. // That is, we should move the loop out of this CompletionBuilder this.matches = matches; } public void doCompletion( - Node replaced, - Set results, - CompletionRequest request) { + Node replaced, + Set results, + CompletionRequest request) { final int argInd = invoke.indexOfArgument(replaced); final String qt = getCompleter().getQuoteType(replaced); - // TODO: move the chunk of code below into constructor, since it's not necessary to repeat - // inside a loop. IDS-1517-14 + // TODO: move the chunk of code below into constructor, since it's not necessary to repeat inside a loop. + // IDS-1517-14 if (argInd == 0 || argInd == -1) { // The cursor is on the table namespace argument. Replace the string node itself. final DocumentRange.Builder range; @@ -66,17 +63,17 @@ public void doCompletion( final CompletionItem.Builder result = CompletionItem.newBuilder(); String item = b.toString(); result - .setStart(start) - .setLength(len) - .setLabel(item) - .getTextEditBuilder() - .setText(item) - .setRange(range); + .setStart(start) + .setLength(len) + .setLabel(item) + .getTextEditBuilder() + .setText(item) + .setRange(range); results.add(result); }); } else if (argInd == 1) { - // The cursor is on the table namespace argument. We'll need to replace the whole thing, - // plus add a little suffix on + // The cursor is on the table namespace argument. We'll need to replace the whole thing, plus add a little + // suffix on matches.forEach(match -> { getCompleter().addMatch(results, replaced, match, request, qt, ")"); }); diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteVarName.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteVarName.java index 02067b81574..75acdd5111c 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteVarName.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompleteVarName.java @@ -25,9 +25,9 @@ public CompleteVarName(ChunkerCompleter completer, Token replacing) { } public void doCompletion( - Collection results, - CompletionRequest request, - String varName) { + Collection results, + CompletionRequest request, + String varName) { final CompletionOptions opts = new CompletionOptions(); final Token before = replacing.prev(); // if the user put a space before a =, make sure we add one after, if it is missing diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompletionBuilder.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompletionBuilder.java index 6ca6ea05e55..beebff348ab 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompletionBuilder.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/CompletionBuilder.java @@ -66,7 +66,7 @@ protected void addTokens(StringBuilder b, Token tok, String... suffix) { } protected io.deephaven.proto.backplane.script.grpc.DocumentRange.Builder replaceNode(Node node, - CompletionRequest request) { + CompletionRequest request) { start = node.getStartIndex(); len = node.getEndIndex() - node.getStartIndex(); return node.asRange(); @@ -76,8 +76,7 @@ protected DocumentRange.Builder replaceToken(Token startToken, CompletionRequest return replaceTokens(startToken, startToken, request); } - protected DocumentRange.Builder replaceTokens(Token startToken, Token endToken, - CompletionRequest request) { + protected DocumentRange.Builder replaceTokens(Token startToken, Token endToken, CompletionRequest request) { if (endToken == null) { endToken = startToken; } @@ -85,8 +84,8 @@ protected DocumentRange.Builder replaceTokens(Token startToken, Token endToken, start = startToken.getStartIndex(); len = endToken.getEndIndex() - start; return DocumentRange.newBuilder() - .setStart(startToken.positionStart()) - .setEnd(endToken.positionEnd()); + .setStart(startToken.positionStart()) + .setEnd(endToken.positionEnd()); } protected DocumentRange.Builder placeAfter(Node node, CompletionRequest request) { @@ -94,14 +93,14 @@ protected DocumentRange.Builder placeAfter(Node node, CompletionRequest request) len = 1; final DocumentRange.Builder range = node.asRange(); Position.Builder pos = Position.newBuilder() - .setLine(range.getEnd().getLine()) - .setCharacter(range.getEnd().getCharacter() - 1); + .setLine(range.getEnd().getLine()) + .setCharacter(range.getEnd().getCharacter() - 1); range.setStart(pos.build()); return range; } - protected void addMatch(Collection results, Token startToken, - Token endToken, String match, CompletionRequest index, CompletionOptions options) { + protected void addMatch(Collection results, Token startToken, Token endToken, String match, + CompletionRequest index, CompletionOptions options) { if (endToken == null) { endToken = startToken; } @@ -112,8 +111,7 @@ protected void addMatch(Collection results, Token startT String check = startToken.image.trim(); for (final String prefix : prefixes) { if (prefix.trim().equals(check)) { - // keep the user's version of this token, and keep looking for required - // prefixes. + // keep the user's version of this token, and keep looking for required prefixes. completion.append(startToken.image); startToken = startToken.next; check = startToken.image.trim(); @@ -153,12 +151,12 @@ protected void addMatch(Collection results, Token startT final String displayed = completion.toString(); final CompletionItem.Builder result = CompletionItem.newBuilder(); result - .setStart(start) - .setLength(len) - .setLabel(displayed) - .getTextEditBuilder() - .setText(displayed) - .setRange(replacement.build()); + .setStart(start) + .setLength(len) + .setLabel(displayed) + .getTextEditBuilder() + .setText(displayed) + .setRange(replacement.build()); results.add(result); } } diff --git a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/FuzzyList.java b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/FuzzyList.java index c2849f298e1..e207a7bbe77 100644 --- a/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/FuzzyList.java +++ b/open-api/lang-tools/src/main/java/io/deephaven/lang/completion/results/FuzzyList.java @@ -11,12 +11,10 @@ /** * A list-like object that performs fuzzy sorting on the edit distance of strings. * - * The constructor takes the pattern String (user input), then you can add-by-string-key any number - * of objects of type T. The score for each String is used to sort entries which contain source - * object and string key. + * The constructor takes the pattern String (user input), then you can add-by-string-key any number of objects of type + * T. The score for each String is used to sort entries which contain source object and string key. * - * This can allow you to, for example, sort methods based on distance to a user-entered search - * query. + * This can allow you to, for example, sort methods based on distance to a user-entered search query. */ public class FuzzyList implements Iterable { diff --git a/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/AllTokenIterableImpl.java b/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/AllTokenIterableImpl.java index 196baeb7bb4..4ca05b69dbf 100644 --- a/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/AllTokenIterableImpl.java +++ b/open-api/lang-tools/src/test/groovy/io/deephaven/lang/completion/AllTokenIterableImpl.java @@ -44,18 +44,17 @@ private void computeNext() { } else { next = cur.next; if (mark == null) { - // not processing specialToken yet; we don't want to look at - // specialToken if we're already + // not processing specialToken yet; we don't want to look at specialToken if we're already // processing a chain of specialTokens... if (next.specialToken != null) { // ok, now we process specialToken. - // specialToken is weird. When you see one, if you want to iterate - // them in lexical order, - // you must follow the specialToken links until they dry up, then - // follow the `next` links + // specialToken is weird. When you see one, if you want to iterate them in lexical + // order, + // you must follow the specialToken links until they dry up, then follow the `next` + // links // until they are null. - // We mark the node where we start, then skip next ahead to the end - // of specialToken chain. + // We mark the node where we start, then skip next ahead to the end of specialToken + // chain. mark = next; while (next.specialToken != null) { next = next.specialToken; diff --git a/open-api/shared-fu/src/main/java/io/deephaven/web/shared/cmd/ServerReplyHandle.java b/open-api/shared-fu/src/main/java/io/deephaven/web/shared/cmd/ServerReplyHandle.java index b67a9b68192..3420dff34d4 100644 --- a/open-api/shared-fu/src/main/java/io/deephaven/web/shared/cmd/ServerReplyHandle.java +++ b/open-api/shared-fu/src/main/java/io/deephaven/web/shared/cmd/ServerReplyHandle.java @@ -5,15 +5,13 @@ /** * Like a TableHandle, but not constrained to tables (i.e. not always a TableHandle). * - * This allows us to build infrastructure like metrics without requiring use of table handle - * semantics. + * This allows us to build infrastructure like metrics without requiring use of table handle semantics. */ public abstract class ServerReplyHandle implements Serializable { /** - * Use this in public no-arg constructors for serialization; serialization is allowed to - * overwrite final fields, so just call this(DESERIALIZATION_IN_PROGRESS); inside your no-arg - * ctors. + * Use this in public no-arg constructors for serialization; serialization is allowed to overwrite final fields, so + * just call this(DESERIALIZATION_IN_PROGRESS); inside your no-arg ctors. */ public static final int DESERIALIZATION_IN_PROGRESS = -3; diff --git a/open-api/shared-fu/src/main/java/io/deephaven/web/shared/fu/LinkedIterable.java b/open-api/shared-fu/src/main/java/io/deephaven/web/shared/fu/LinkedIterable.java index d597ecdadb5..fd461f3befe 100644 --- a/open-api/shared-fu/src/main/java/io/deephaven/web/shared/fu/LinkedIterable.java +++ b/open-api/shared-fu/src/main/java/io/deephaven/web/shared/fu/LinkedIterable.java @@ -42,8 +42,7 @@ public T next() { public void remove() { if (remover != null) { if (removed) { - throw new IllegalStateException( - "Cannot call remove() more than once per next()"); + throw new IllegalStateException("Cannot call remove() more than once per next()"); } removed = true; if (node == null) { @@ -62,11 +61,10 @@ public void remove() { private final boolean skipHead; private JsBiConsumer remover; - public LinkedIterable(T head, T tail, boolean includeTail, boolean strict, - UnaryOperator next) { + public LinkedIterable(T head, T tail, boolean includeTail, boolean strict, UnaryOperator next) { this(head, new UnaryOperator() { - boolean done = head == tail; // when head == tail, we will always return the head - // anyway, so just skip the next-ing. + boolean done = head == tail; // when head == tail, we will always return the head anyway, so just skip the + // next-ing. @Override public T apply(T cur) { @@ -81,8 +79,7 @@ public T apply(T cur) { } final T find = next.apply(cur); if (strict && find == null) { - throw new IllegalStateException( - "Iterable starting at " + head + " did not end with " + tail); + throw new IllegalStateException("Iterable starting at " + head + " did not end with " + tail); } if (find == tail) { done = true; diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ConsoleConfig.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ConsoleConfig.java index bd9a6a34758..2422046a325 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ConsoleConfig.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ConsoleConfig.java @@ -186,7 +186,7 @@ public void setEnvVars(Object envVars) { JsArrayLike jsPair = Js.asArrayLike(envVars).getAnyAt(i).asArrayLike(); if (jsPair.getLength() != 2) { throw new IllegalArgumentException( - "Argument set doesn't contain two items: " + Global.JSON.stringify(jsPair)); + "Argument set doesn't contain two items: " + Global.JSON.stringify(jsPair)); } String[] typed = new String[2]; typed[0] = jsPair.getAnyAt(0).asString(); @@ -213,8 +213,7 @@ public Map getEnvironmentVars() { return map; } for (String[] envVar : envVars) { - assert envVar.length == 2 - : "env vars must be arrays of arrays of length 2: [ [key, val], [k, v] ]"; + assert envVar.length == 2 : "env vars must be arrays of arrays of length 2: [ [key, val], [k, v] ]"; map.put(envVar[0], envVar[1]); } @@ -224,17 +223,17 @@ public Map getEnvironmentVars() { @Override public String toString() { return "ConsoleConfig{" + - "dispatcherHost='" + dispatcherHost + '\'' + - ", dispatcherPort=" + dispatcherPort + - ", classpath=" + Arrays.toString(classpath) + - ", maxHeapMb=" + maxHeapMb + - ", queryDescription='" + queryDescription + '\'' + - ", debug=" + debug + - ", detailedGCLogging=" + detailedGCLogging + - ", omitDefaultGcParameters=" + omitDefaultGcParameters + - ", classpath=" + Arrays.toString(classpath_()) + - ", jvmArgs=" + Arrays.toString(jvmArgs_()) + - ", environmentVars=" + getEnvironmentVars() + - '}'; + "dispatcherHost='" + dispatcherHost + '\'' + + ", dispatcherPort=" + dispatcherPort + + ", classpath=" + Arrays.toString(classpath) + + ", maxHeapMb=" + maxHeapMb + + ", queryDescription='" + queryDescription + '\'' + + ", debug=" + debug + + ", detailedGCLogging=" + detailedGCLogging + + ", omitDefaultGcParameters=" + omitDefaultGcParameters + + ", classpath=" + Arrays.toString(classpath_()) + + ", jvmArgs=" + Arrays.toString(jvmArgs_()) + + ", environmentVars=" + getEnvironmentVars() + + '}'; } } diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ConsoleSessionType.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ConsoleSessionType.java index 1cb70dd9695..053d5712495 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ConsoleSessionType.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ConsoleSessionType.java @@ -14,6 +14,6 @@ public static ConsoleSessionType from(String type) { return Groovy; } throw new UnsupportedOperationException( - "Session type " + type + " not supported; valid options are ['groovy' or 'python']"); + "Session type " + type + " not supported; valid options are ['groovy' or 'python']"); } } diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ExecutionHandle.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ExecutionHandle.java index 22e631f2f58..29d56a34c2f 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ExecutionHandle.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ExecutionHandle.java @@ -5,8 +5,7 @@ /** * A client-generated handle for each execution. * - * This allows the client to issue cancellation requests using an id it knows before the script is - * even requested. + * This allows the client to issue cancellation requests using an id it knows before the script is even requested. */ public class ExecutionHandle extends ServerReplyHandle { diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ScriptHandle.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ScriptHandle.java index e1ab2cbf244..a5cffd016b8 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ScriptHandle.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/ScriptHandle.java @@ -8,24 +8,22 @@ public class ScriptHandle extends ServerReplyHandle { /** - * The client-originated handle for this script session. Used by each client to identify which - * handle to use in operations. + * The client-originated handle for this script session. Used by each client to identify which handle to use in + * operations. * * These will not be unique; most cases this will =1 * * Identity semantics will use clientId + connectionId. * - * When we support joining other user's sessions, we will do so by providing both the clientId - * and the connectionId as integers, and a new ScriptHandle to represent joining the other - * session (so when the joining user cancels that handle, it does not affect the joined - * session). Internally, we will record in each "real session" how many ScriptHandles are open - * to it. + * When we support joining other user's sessions, we will do so by providing both the clientId and the connectionId + * as integers, and a new ScriptHandle to represent joining the other session (so when the joining user cancels that + * handle, it does not affect the joined session). Internally, we will record in each "real session" how many + * ScriptHandles are open to it. */ private int clientId; /** - * The server-originated handle for this script session. This is used to key into the static - * ScriptSession object; when we support recovering / restarting past sessions, this value - * _might_ change, but it effectively stable. + * The server-originated handle for this script session. This is used to key into the static ScriptSession object; + * when we support recovering / restarting past sessions, this value _might_ change, but it effectively stable. * * This field does not participate in identity semantics. */ @@ -33,13 +31,12 @@ public class ScriptHandle extends ServerReplyHandle { /** * The per-socket connection id. * - * This is a bit of a "magic id" that is passed through all handles, so we can effectively store - * session state keyed off any given handle. + * This is a bit of a "magic id" that is passed through all handles, so we can effectively store session state keyed + * off any given handle. * - * The custom field serializers treat this specially, where client and server each have their - * own internal connectionId for each socket, and they write the connectionId their recipient - * expects to receive (so all objects in memory have local ids, and all objects on the wire have - * remote ids). + * The custom field serializers treat this specially, where client and server each have their own internal + * connectionId for each socket, and they write the connectionId their recipient expects to receive (so all objects + * in memory have local ids, and all objects on the wire have remote ids). * * If a socket dies and has to reconnect, this value will be updated. * diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/CompletionItem.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/CompletionItem.java index e2668e92e66..77212f506c4 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/CompletionItem.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/CompletionItem.java @@ -36,25 +36,21 @@ public CompletionItem() { } /** - * This constructor matches CompletionFragment semantics; it is here to ease the transition to - * the LSP model. + * This constructor matches CompletionFragment semantics; it is here to ease the transition to the LSP model. */ @JsIgnore - public CompletionItem(int start, int length, String completion, String displayed, - String source) { - this(start, length, completion, displayed, - DocumentRange.rangeFromSource(source, start, length)); + public CompletionItem(int start, int length, String completion, String displayed, String source) { + this(start, length, completion, displayed, DocumentRange.rangeFromSource(source, start, length)); } @JsIgnore - public CompletionItem(int start, int length, String completion, String displayed, - DocumentRange range) { + public CompletionItem(int start, int length, String completion, String displayed, DocumentRange range) { this(); textEdit = new TextEdit(); textEdit.text = completion; textEdit.range = range; - insertTextFormat = 2; // snippet format is insertTextFormat=2 in lsp, and insertTextRules=4. - // See MonacoCompletionProvider.jsx. + insertTextFormat = 2; // snippet format is insertTextFormat=2 in lsp, and insertTextRules=4. See + // MonacoCompletionProvider.jsx. label = displayed == null ? completion : displayed; this.start = start; this.length = length; @@ -78,9 +74,7 @@ public void setCommitCharacters(Object args) { @JsProperty(name = "additionalTextEdits") public Object additionalTextEdits_() { if (additionalTextEdits != null) { - return Js - .cast(Js.>uncheckedCast(additionalTextEdits) - .slice()); + return Js.cast(Js.>uncheckedCast(additionalTextEdits).slice()); } else { return null; } @@ -152,10 +146,10 @@ public void setLength(int length) { @JsIgnore public String toString() { return "CompletionItem{" + - "start=" + start + - ", length=" + length + - ", textEdit=" + textEdit + - "}\n"; + "start=" + start + + ", length=" + length + + ", textEdit=" + textEdit + + "}\n"; } @Override diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/Diagnostic.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/Diagnostic.java index a0ff1419c7f..8b9ac5b472e 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/Diagnostic.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/Diagnostic.java @@ -9,9 +9,8 @@ /** * Represents a diagnostic message sent to the client, to mark info/warn/error in the source. *

    - * Original definition in current lsp documentation, - * https://microsoft.github.io/language-server-protocol/specification (text search Diagnostic; - * couldn't find good intra-document name='d links) + * Original definition in current lsp documentation, https://microsoft.github.io/language-server-protocol/specification + * (text search Diagnostic; couldn't find good intra-document name='d links) *

    * All field documentation here is copy-pasted from original source. */ @@ -23,8 +22,8 @@ public class Diagnostic implements Serializable { private DocumentRange range; /** - * The diagnostic's severity. Can be omitted. If omitted it is up to the client to interpret - * diagnostics as error, warning, info or hint. + * The diagnostic's severity. Can be omitted. If omitted it is up to the client to interpret diagnostics as error, + * warning, info or hint. */ private Integer severity; @@ -34,8 +33,7 @@ public class Diagnostic implements Serializable { private Integer code; /** - * A human-readable string describing the source of this diagnostic, e.g. 'typescript' or 'super - * lint'. + * A human-readable string describing the source of this diagnostic, e.g. 'typescript' or 'super lint'. */ private String source; @@ -45,8 +43,8 @@ public class Diagnostic implements Serializable { private String message; /** - * An array of related diagnostic information, e.g. when symbol-names within a scope collide all - * definitions can be marked via this property. + * An array of related diagnostic information, e.g. when symbol-names within a scope collide all definitions can be + * marked via this property. */ private DiagnosticRelatedInformation[] relatedInformation; @@ -79,8 +77,7 @@ public Diagnostic(@JsOptional JsPropertyMap source) { Any[] related = source.getAny("relatedInformation").asArray(); relatedInformation = new DiagnosticRelatedInformation[related.length]; for (int i = 0; i < related.length; i++) { - relatedInformation[i] = - new DiagnosticRelatedInformation(related[i].asPropertyMap()); + relatedInformation[i] = new DiagnosticRelatedInformation(related[i].asPropertyMap()); } } } @@ -104,7 +101,7 @@ public Double getSeverity() { public void setSeverity(Double severity) { this.severity = severity == null ? null : severity.intValue(); assert severity == null || severity == severity.intValue() - : "Only set integer severity! (you sent " + severity + ")"; + : "Only set integer severity! (you sent " + severity + ")"; } @JsProperty @@ -120,8 +117,7 @@ public void code(int code) { @JsProperty // for js public void setCode(Double code) { this.code = code == null ? null : code.intValue(); - assert code == null || code == code.intValue() - : "Only set integer code! (you sent " + code + ")"; + assert code == null || code == code.intValue() : "Only set integer code! (you sent " + code + ")"; } @JsProperty diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DidChangeTextDocumentParams.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DidChangeTextDocumentParams.java index c0a32e6e9d8..798bbbf62ee 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DidChangeTextDocumentParams.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DidChangeTextDocumentParams.java @@ -19,16 +19,14 @@ public DidChangeTextDocumentParams(JsPropertyMap source) { this(); if (source.has("textDocument")) { - textDocument = - new VersionedTextDocumentIdentifier(source.getAny("textDocument").asPropertyMap()); + textDocument = new VersionedTextDocumentIdentifier(source.getAny("textDocument").asPropertyMap()); } if (source.has("contentChanges")) { Any[] rawContentChanges = source.getAny("contentChanges").asArray(); contentChanges = new TextDocumentContentChangeEvent[rawContentChanges.length]; for (int i = 0; i < rawContentChanges.length; i++) { - contentChanges[i] = - new TextDocumentContentChangeEvent(rawContentChanges[i].asPropertyMap()); + contentChanges[i] = new TextDocumentContentChangeEvent(rawContentChanges[i].asPropertyMap()); } } } diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DidCloseTextDocumentParams.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DidCloseTextDocumentParams.java index d60df45f552..4e4db039762 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DidCloseTextDocumentParams.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DidCloseTextDocumentParams.java @@ -18,8 +18,7 @@ public DidCloseTextDocumentParams(JsPropertyMap source) { this(); if (source.has("textDocument")) { - textDocument = - new TextDocumentIdentifier(source.getAny("textDocument").asPropertyMap()); + textDocument = new TextDocumentIdentifier(source.getAny("textDocument").asPropertyMap()); } } } diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DocumentRange.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DocumentRange.java index 8ab0d583a74..03742d6813d 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DocumentRange.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/DocumentRange.java @@ -45,8 +45,8 @@ public static DocumentRange rangeFromSource(String source, int start, int length final DocumentRange range = new DocumentRange(); range.start = getPositionFromOffset(source, start); range.end = getPositionFromOffset( - source, - start + length); + source, + start + length); return range; } @@ -94,9 +94,9 @@ public static Position getPositionFromOffset(String document, int offset) { @JsIgnore public String toString() { return "Range{" + - "start=" + start + - ", end=" + end + - '}'; + "start=" + start + + ", end=" + end + + '}'; } @Override @@ -130,6 +130,6 @@ public void decrementColumns() { public boolean isInside(Position innerStart, Position innerEnd) { return innerStart.line >= start.line && innerStart.character >= start.character - && innerEnd.line <= end.line && innerEnd.character <= end.character; + && innerEnd.line <= end.line && innerEnd.character <= end.character; } } diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/Position.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/Position.java index 9f0ae644ee9..17c3f596e50 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/Position.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/Position.java @@ -44,9 +44,9 @@ public Position(JsPropertyMap source) { @JsIgnore public String toString() { return "Position{" + - "line=" + line + - ", character=" + character + - '}'; + "line=" + line + + ", character=" + character + + '}'; } @Override @@ -91,8 +91,8 @@ public boolean greaterOrEqual(Position end) { @JsIgnore public int extend(Position requested) { if (line != requested.line) { - throw new IllegalArgumentException("Can only extend on same-line; " + this + " and " - + requested + " are not on same line"); + throw new IllegalArgumentException( + "Can only extend on same-line; " + this + " and " + requested + " are not on same line"); } int delta = requested.character - character; character = requested.character; diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextDocumentContentChangeEvent.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextDocumentContentChangeEvent.java index 8dfc14a5302..d95458cfb55 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextDocumentContentChangeEvent.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextDocumentContentChangeEvent.java @@ -44,9 +44,9 @@ public TextDocumentContentChangeEvent(JsPropertyMap source) { @JsIgnore public String toString() { return "TextDocumentContentChangeEvent{" + - "range=" + range + - ", rangeLength=" + rangeLength + - ", text='" + text + '\'' + - '}'; + "range=" + range + + ", rangeLength=" + rangeLength + + ", text='" + text + '\'' + + '}'; } } diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextDocumentPositionParams.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextDocumentPositionParams.java index 0ac065a4a2f..0ad6dee395f 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextDocumentPositionParams.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextDocumentPositionParams.java @@ -22,8 +22,7 @@ public TextDocumentPositionParams() {} protected void updateFromJsPropertyMap(JsPropertyMap source) { if (source.has("textDocument")) { - textDocument = - new TextDocumentIdentifier(source.getAny("textDocument").asPropertyMap()); + textDocument = new TextDocumentIdentifier(source.getAny("textDocument").asPropertyMap()); } if (source.has("position")) { diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextEdit.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextEdit.java index 2fc314a27c3..1c5ce915810 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextEdit.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/TextEdit.java @@ -14,9 +14,9 @@ public class TextEdit implements Serializable { @JsIgnore public String toString() { return "TextEdit{" + - "range=" + range + - ", text='" + text + '\'' + - '}'; + "range=" + range + + ", text='" + text + '\'' + + '}'; } @Override diff --git a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/VersionedTextDocumentIdentifier.java b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/VersionedTextDocumentIdentifier.java index c6ab185e84d..46baf7e4be2 100644 --- a/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/VersionedTextDocumentIdentifier.java +++ b/open-api/shared-ide/src/main/java/io/deephaven/web/shared/ide/lsp/VersionedTextDocumentIdentifier.java @@ -7,8 +7,7 @@ import java.io.Serializable; @JsType(namespace = "dh.lsp") -public class VersionedTextDocumentIdentifier extends TextDocumentIdentifier - implements Serializable { +public class VersionedTextDocumentIdentifier extends TextDocumentIdentifier implements Serializable { public int version; public VersionedTextDocumentIdentifier() { diff --git a/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/Exceptions.java b/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/Exceptions.java index fe7882335a8..7ae66781a36 100644 --- a/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/Exceptions.java +++ b/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/Exceptions.java @@ -11,8 +11,8 @@ public class Exceptions { public static StatusRuntimeException statusRuntimeException(final Code statusCode, - final String details) { + final String details) { return StatusProto.toStatusRuntimeException( - Status.newBuilder().setCode(statusCode.getNumber()).setMessage(details).build()); + Status.newBuilder().setCode(statusCode.getNumber()).setMessage(details).build()); } } diff --git a/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/ExportTicketHelper.java b/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/ExportTicketHelper.java index 731fb51fcd8..d0d1e688a79 100644 --- a/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/ExportTicketHelper.java +++ b/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/ExportTicketHelper.java @@ -47,47 +47,47 @@ public static Ticket exportIdToTicket(int exportId) { */ public static Flight.FlightDescriptor exportIdToDescriptor(int exportId) { return Flight.FlightDescriptor.newBuilder() - .setType(Flight.FlightDescriptor.DescriptorType.PATH).addPath(FLIGHT_DESCRIPTOR_ROUTE) - .addPath(Integer.toString(exportId)).build(); + .setType(Flight.FlightDescriptor.DescriptorType.PATH).addPath(FLIGHT_DESCRIPTOR_ROUTE) + .addPath(Integer.toString(exportId)).build(); } /** * Convenience method to convert from {@link Flight.Ticket} to export id. * *

    - * Ticket's byte[0] must be {@link ExportTicketHelper#TICKET_PREFIX}, bytes[1-4] are a signed - * int export id in little-endian. + * Ticket's byte[0] must be {@link ExportTicketHelper#TICKET_PREFIX}, bytes[1-4] are a signed int export id in + * little-endian. * * @param ticket the grpc Ticket * @return the export id that the Ticket wraps */ public static int ticketToExportId(final Ticket ticket) { return ticketToExportIdInternal( - ticket.getTicket().asReadOnlyByteBuffer().order(ByteOrder.LITTLE_ENDIAN)); + ticket.getTicket().asReadOnlyByteBuffer().order(ByteOrder.LITTLE_ENDIAN)); } /** * Convenience method to convert from {@link Flight.Ticket} to export id. * *

    - * Ticket's byte[0] must be {@link ExportTicketHelper#TICKET_PREFIX}, bytes[1-4] are a signed - * int export id in little-endian. + * Ticket's byte[0] must be {@link ExportTicketHelper#TICKET_PREFIX}, bytes[1-4] are a signed int export id in + * little-endian. * * @param ticket the grpc Ticket * @return the export id that the Ticket wraps */ public static int ticketToExportId(final Flight.Ticket ticket) { return ticketToExportIdInternal( - ticket.getTicket().asReadOnlyByteBuffer().order(ByteOrder.LITTLE_ENDIAN)); + ticket.getTicket().asReadOnlyByteBuffer().order(ByteOrder.LITTLE_ENDIAN)); } /** - * Convenience method to convert from {@link ByteBuffer} to export id. Most efficient when - * {@code ticket} is {@link ByteOrder#LITTLE_ENDIAN}. + * Convenience method to convert from {@link ByteBuffer} to export id. Most efficient when {@code ticket} is + * {@link ByteOrder#LITTLE_ENDIAN}. * *

    - * Ticket's byte[0] must be {@link ExportTicketHelper#TICKET_PREFIX}, bytes[1-4] are a signed - * int export id in little-endian. + * Ticket's byte[0] must be {@link ExportTicketHelper#TICKET_PREFIX}, bytes[1-4] are a signed int export id in + * little-endian. * *

    * Does not consume the {@code ticket}. @@ -98,20 +98,20 @@ public static int ticketToExportId(final Flight.Ticket ticket) { public static int ticketToExportId(final ByteBuffer ticket) { if (ticket == null) { throw Exceptions.statusRuntimeException(Code.FAILED_PRECONDITION, - "Ticket not supplied"); + "Ticket not supplied"); } return ticket.order() == ByteOrder.LITTLE_ENDIAN ? ticketToExportIdInternal(ticket) - : ticketToExportIdInternal(ticket.asReadOnlyBuffer().order(ByteOrder.LITTLE_ENDIAN)); + : ticketToExportIdInternal(ticket.asReadOnlyBuffer().order(ByteOrder.LITTLE_ENDIAN)); } /** - * Convenience method to convert from {@link ByteBuffer} to export ticket. Most efficient when - * {@code ticket} is {@link ByteOrder#LITTLE_ENDIAN}. + * Convenience method to convert from {@link ByteBuffer} to export ticket. Most efficient when {@code ticket} is + * {@link ByteOrder#LITTLE_ENDIAN}. * *

    - * Ticket's byte[0] must be {@link ExportTicketHelper#TICKET_PREFIX}, bytes[1-4] are a signed - * int export id in little-endian. + * Ticket's byte[0] must be {@link ExportTicketHelper#TICKET_PREFIX}, bytes[1-4] are a signed int export id in + * little-endian. * *

    * Does not consume the {@code ticket}. @@ -121,7 +121,7 @@ public static int ticketToExportId(final ByteBuffer ticket) { */ public static Ticket exportIdToTicket(final ByteBuffer ticket) { final ByteBuffer lebb = ticket.order() == ByteOrder.LITTLE_ENDIAN ? ticket - : ticket.asReadOnlyBuffer().order(ByteOrder.LITTLE_ENDIAN); + : ticket.asReadOnlyBuffer().order(ByteOrder.LITTLE_ENDIAN); return Ticket.newBuilder().setTicket(ByteStringAccess.wrap(lebb)).build(); } @@ -137,24 +137,24 @@ public static Ticket exportIdToTicket(final ByteBuffer ticket) { public static int descriptorToExportId(final Flight.FlightDescriptor descriptor) { if (descriptor == null) { throw Exceptions.statusRuntimeException(Code.FAILED_PRECONDITION, - "Descriptor not supplied"); + "Descriptor not supplied"); } if (descriptor.getType() != Flight.FlightDescriptor.DescriptorType.PATH) { throw Exceptions.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot parse descriptor: not a path"); + "Cannot parse descriptor: not a path"); } if (descriptor.getPathCount() != 2) { throw Exceptions.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot parse descriptor: unexpected path length (found: " - + TicketRouterHelper.getLogNameFor(descriptor) + ", expected: 2)"); + "Cannot parse descriptor: unexpected path length (found: " + + TicketRouterHelper.getLogNameFor(descriptor) + ", expected: 2)"); } try { return Integer.parseInt(descriptor.getPath(1)); } catch (final NumberFormatException nfe) { throw Exceptions.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot parse descriptor: export id not numeric (found: " - + TicketRouterHelper.getLogNameFor(descriptor) + ")"); + "Cannot parse descriptor: export id not numeric (found: " + + TicketRouterHelper.getLogNameFor(descriptor) + ")"); } } @@ -208,12 +208,12 @@ public static Ticket descriptorToTicket(final Flight.FlightDescriptor descriptor */ public static String toReadableString(final Ticket ticket) { return toReadableString( - ticket.getTicket().asReadOnlyByteBuffer().order(ByteOrder.LITTLE_ENDIAN)); + ticket.getTicket().asReadOnlyByteBuffer().order(ByteOrder.LITTLE_ENDIAN)); } /** - * Convenience method to create a human readable string from the flight ticket (as ByteBuffer). - * Most efficient when {@code ticket} is {@link ByteOrder#LITTLE_ENDIAN}. + * Convenience method to create a human readable string from the flight ticket (as ByteBuffer). Most efficient when + * {@code ticket} is {@link ByteOrder#LITTLE_ENDIAN}. * *

    * Does not consume the {@code ticket}. @@ -250,7 +250,7 @@ private static int ticketToExportIdInternal(final ByteBuffer ticket) { int pos = ticket.position(); if (ticket.remaining() != 5 || ticket.get(pos) != TICKET_PREFIX) { throw Exceptions.statusRuntimeException(Code.FAILED_PRECONDITION, - "Cannot parse ticket: found 0x" + byteBufToHex(ticket) + " (hex)"); + "Cannot parse ticket: found 0x" + byteBufToHex(ticket) + " (hex)"); } return ticket.getInt(pos + 1); } diff --git a/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/TicketRouterHelper.java b/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/TicketRouterHelper.java index a34c857003f..e55b5b04a62 100644 --- a/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/TicketRouterHelper.java +++ b/proto/proto-backplane-grpc-util/src/main/java/io/deephaven/grpc_api/util/TicketRouterHelper.java @@ -17,7 +17,7 @@ public class TicketRouterHelper { public static String getLogNameFor(final Flight.FlightDescriptor descriptor) { if (descriptor.getType() != Flight.FlightDescriptor.DescriptorType.PATH) { throw Exceptions.statusRuntimeException(Code.INVALID_ARGUMENT, - "Flight descriptor is not a path"); + "Flight descriptor is not a path"); } final StringBuilder sb = new StringBuilder(); diff --git a/py/jpy-config/src/main/java/io/deephaven/jpy/JpyConfig.java b/py/jpy-config/src/main/java/io/deephaven/jpy/JpyConfig.java index 28cbb2da398..e226ea57f62 100644 --- a/py/jpy-config/src/main/java/io/deephaven/jpy/JpyConfig.java +++ b/py/jpy-config/src/main/java/io/deephaven/jpy/JpyConfig.java @@ -8,28 +8,26 @@ import java.util.stream.Collectors; /** - * This class encapsulates the configuration data and invocation of {/@link - * PyLibInitializer#initPyLib(String, String, String)}, {/@link PyLib#setProgramName(String)}, - * {/@link PyLib#setPythonHome(String)}, and {/@link PyLib#startPython(int, String...)}. + * This class encapsulates the configuration data and invocation of {/@link PyLibInitializer#initPyLib(String, String, + * String)}, {/@link PyLib#setProgramName(String)}, {/@link PyLib#setPythonHome(String)}, and {/@link + * PyLib#startPython(int, String...)}. * *

    * Note: *

    - * We *don't* want JpyConfig to have an explicit dependency on jpy anymore - that way we can still - * configure jpy without having the unnecessary dependency. For example, the bootstrap kernel needs - * to be able to configure jpy, but it should not depend on jpy. It's still useful at this time to - * have fake @links to it though, as it gives useful context for developers. In a better world, the - * jpy project itself would be better configure-able (ie, not static), and this type of external - * configuration class wouldn't be necessary. + * We *don't* want JpyConfig to have an explicit dependency on jpy anymore - that way we can still configure jpy without + * having the unnecessary dependency. For example, the bootstrap kernel needs to be able to configure jpy, but it should + * not depend on jpy. It's still useful at this time to have fake @links to it though, as it gives useful context for + * developers. In a better world, the jpy project itself would be better configure-able (ie, not static), and this type + * of external configuration class wouldn't be necessary. */ final public class JpyConfig { /** - * We can't reference the values in {/@link Diag} directly - that would cause {/@link - * org.jpy.PyLib} to prematurely initialize. + * We can't reference the values in {/@link Diag} directly - that would cause {/@link org.jpy.PyLib} to prematurely + * initialize. * - * We could: 1) Refactor {/@link Diag} so as not to initialize {/@link PyLib} 2) Use - * compile-time code generation against {/@link Diag} 3) Test to make sure {/@link Flag} and - * {/@link Diag} are in-sync + * We could: 1) Refactor {/@link Diag} so as not to initialize {/@link PyLib} 2) Use compile-time code generation + * against {/@link Diag} 3) Test to make sure {/@link Flag} and {/@link Diag} are in-sync * * We are currently doing the #3, see JpyConfigFlagTest */ @@ -95,8 +93,7 @@ public int getBitset() { private static void ensureAbsolute(Path path, String name) { if (path != null && !path.isAbsolute()) { - throw new IllegalArgumentException( - String.format("%s must be absolute, is '%s'", name, path)); + throw new IllegalArgumentException(String.format("%s must be absolute, is '%s'", name, path)); } } @@ -111,24 +108,24 @@ private static void ensureAbsolute(Path path, String name) { * @param flags argument to {/@link PyLib#startPython(int, String...)} */ public JpyConfig( - Path programName, - Path pythonHome, - Path pythonLib, - Path jpyLib, - Path jdlLib, - List extraPaths, - EnumSet flags) { + Path programName, + Path pythonHome, + Path pythonLib, + Path jpyLib, + Path jdlLib, + List extraPaths, + EnumSet flags) { ensureAbsolute(programName, "programName"); ensureAbsolute(pythonHome, "pythonHome"); ensureAbsolute(pythonLib, "pythonLib"); ensureAbsolute(jpyLib, "jpyLib"); ensureAbsolute(jdlLib, "jdlLib"); if (jpyLib != null - && jdlLib != null - && !Objects.equals(jpyLib.getParent(), jdlLib.getParent())) { + && jdlLib != null + && !Objects.equals(jpyLib.getParent(), jdlLib.getParent())) { throw new IllegalArgumentException(String.format( - "jpy lib and jdl lib must be siblings, jpy is '%s', jdl is '%s', parents '%s' and '%s'", - jpyLib, jdlLib, jpyLib.getParent(), jdlLib.getParent())); + "jpy lib and jdl lib must be siblings, jpy is '%s', jdl is '%s', parents '%s' and '%s'", + jpyLib, jdlLib, jpyLib.getParent(), jdlLib.getParent())); } this.programName = programName; this.pythonHome = pythonHome; @@ -179,14 +176,13 @@ class AsSource implements JpyConfigSource { @Override public Optional getFlags() { return flags.isEmpty() ? Optional.empty() - : Optional.of(flags.stream().map(Enum::name).collect(Collectors.joining(","))); + : Optional.of(flags.stream().map(Enum::name).collect(Collectors.joining(","))); } @Override public Optional getExtraPaths() { return extraPaths.isEmpty() ? Optional.empty() - : Optional - .of(extraPaths.stream().map(Path::toString).collect(Collectors.joining(","))); + : Optional.of(extraPaths.stream().map(Path::toString).collect(Collectors.joining(","))); } @Override @@ -227,15 +223,14 @@ public boolean equals(Object o) { JpyConfig jpyConfig = (JpyConfig) o; if (pythonHome != null ? !pythonHome.equals(jpyConfig.pythonHome) - : jpyConfig.pythonHome != null) { + : jpyConfig.pythonHome != null) { return false; } if (programName != null ? !programName.equals(jpyConfig.programName) - : jpyConfig.programName != null) { + : jpyConfig.programName != null) { return false; } - if (pythonLib != null ? !pythonLib.equals(jpyConfig.pythonLib) - : jpyConfig.pythonLib != null) { + if (pythonLib != null ? !pythonLib.equals(jpyConfig.pythonLib) : jpyConfig.pythonLib != null) { return false; } if (jpyLib != null ? !jpyLib.equals(jpyConfig.jpyLib) : jpyConfig.jpyLib != null) { diff --git a/py/jpy-config/src/main/java/io/deephaven/jpy/JpyConfigSource.java b/py/jpy-config/src/main/java/io/deephaven/jpy/JpyConfigSource.java index 4a18e0a3cf8..3cc2bac3a64 100644 --- a/py/jpy-config/src/main/java/io/deephaven/jpy/JpyConfigSource.java +++ b/py/jpy-config/src/main/java/io/deephaven/jpy/JpyConfigSource.java @@ -19,8 +19,7 @@ */ public interface JpyConfigSource { - // Note: these are "suggested" property names, since they might not be valid in all - // configuration + // Note: these are "suggested" property names, since they might not be valid in all configuration // contexts. /** @@ -44,20 +43,20 @@ public interface JpyConfigSource { String JPY_PROGRAM_NAME_PROP = "jpy.programName"; /** - * Suggested property name for use with {@link #getPythonLib()}. Matches the system property key - * that jpy uses internally for pythonLib. + * Suggested property name for use with {@link #getPythonLib()}. Matches the system property key that jpy uses + * internally for pythonLib. */ String JPY_PY_LIB_PROP = "jpy.pythonLib"; /** - * Suggested property name for use with {@link #getJpyLib()}. Matches the system property key - * that jpy uses internally for jpyLib. + * Suggested property name for use with {@link #getJpyLib()}. Matches the system property key that jpy uses + * internally for jpyLib. */ String JPY_JPY_LIB_PROP = "jpy.jpyLib"; /** - * Suggested property name for use with {@link #getJdlLib()}. Matches the system property key - * that jpy uses internally for jdlLib. + * Suggested property name for use with {@link #getJdlLib()}. Matches the system property key that jpy uses + * internally for jdlLib. */ String JPY_JDL_LIB_PROP = "jpy.jdlLib"; @@ -90,38 +89,38 @@ default Map asProperties() { default EnumSet getFlagsSet() { final EnumSet flags = EnumSet.noneOf(Flag.class); getFlags() - .map(s -> s.split(",")) - .map(Stream::of) - .orElseGet(Stream::empty) - .map(String::trim) - .filter(s -> !s.isEmpty()) - .map(Flag::valueOf) - .forEach(flags::add); + .map(s -> s.split(",")) + .map(Stream::of) + .orElseGet(Stream::empty) + .map(String::trim) + .filter(s -> !s.isEmpty()) + .map(Flag::valueOf) + .forEach(flags::add); return flags; } default List getExtraPathsList() { final List extraPaths = new ArrayList<>(); getExtraPaths() - .map(s -> s.split(",")) - .map(Stream::of) - .orElseGet(Stream::empty) - .map(String::trim) - .filter(s -> !s.isEmpty()) - .map(Paths::get) - .forEachOrdered(extraPaths::add); + .map(s -> s.split(",")) + .map(Stream::of) + .orElseGet(Stream::empty) + .map(String::trim) + .filter(s -> !s.isEmpty()) + .map(Paths::get) + .forEachOrdered(extraPaths::add); return extraPaths; } default JpyConfig asJpyConfig() { return new JpyConfig( - sanitize(getProgramName()), - sanitize(getPythonHome()), - sanitize(getPythonLib()), - sanitize(getJpyLib()), - sanitize(getJdlLib()), - getExtraPathsList(), - getFlagsSet()); + sanitize(getProgramName()), + sanitize(getPythonHome()), + sanitize(getPythonLib()), + sanitize(getJpyLib()), + sanitize(getJdlLib()), + getExtraPathsList(), + getFlagsSet()); } /* private */ static Path sanitize(Optional value) { @@ -129,8 +128,7 @@ default JpyConfig asJpyConfig() { } /** - * A system property based implementation of {@link JpyConfigSource}, using the suggested - * property names. + * A system property based implementation of {@link JpyConfigSource}, using the suggested property names. */ enum SysProps implements JpyConfigSource { INSTANCE; diff --git a/py/jpy-ext/src/main/java/io/deephaven/jpy/BuiltinsModule.java b/py/jpy-ext/src/main/java/io/deephaven/jpy/BuiltinsModule.java index 5f80eddf615..f5a9038b49b 100644 --- a/py/jpy-ext/src/main/java/io/deephaven/jpy/BuiltinsModule.java +++ b/py/jpy-ext/src/main/java/io/deephaven/jpy/BuiltinsModule.java @@ -43,9 +43,8 @@ static BuiltinsModule create() { PyObject dict(); /** - * Get the length (the number of items) of an object. The argument may be a sequence (such as a - * string, bytes, tuple, list, or range) or a collection (such as a dictionary, set, or frozen - * set). + * Get the length (the number of items) of an object. The argument may be a sequence (such as a string, bytes, + * tuple, list, or range) or a collection (such as a dictionary, set, or frozen set). * * @param pyObject the python object * @return the length (the number of items) of an object diff --git a/py/jpy-ext/src/main/java/io/deephaven/jpy/JpyConfigExt.java b/py/jpy-ext/src/main/java/io/deephaven/jpy/JpyConfigExt.java index 0c4c85becd9..7e6daa34ef7 100644 --- a/py/jpy-ext/src/main/java/io/deephaven/jpy/JpyConfigExt.java +++ b/py/jpy-ext/src/main/java/io/deephaven/jpy/JpyConfigExt.java @@ -52,12 +52,12 @@ public void initPython() { } if (initialized) { throw new IllegalStateException( - "Already initialized - this should not happen, unless there is some weird class unloading going on?"); + "Already initialized - this should not happen, unless there is some weird class unloading going on?"); } PyLibInitializer.initPyLib( - config.getPythonLib().map(Path::toString).orElse(null), - config.getJpyLib().map(Path::toString).orElse(null), - config.getJdlLib().map(Path::toString).orElse(null)); + config.getPythonLib().map(Path::toString).orElse(null), + config.getJpyLib().map(Path::toString).orElse(null), + config.getJdlLib().map(Path::toString).orElse(null)); initialized = true; } } @@ -68,8 +68,7 @@ public void startPython() { throw new IllegalStateException("PyLib has not been initialized"); } if (!initialized) { - throw new IllegalStateException( - "PyLib has been initialized, but not by the current JpyConfigExt!"); + throw new IllegalStateException("PyLib has been initialized, but not by the current JpyConfigExt!"); } } if (PyLib.isPythonRunning()) { @@ -81,8 +80,7 @@ public void startPython() { for (Flag flag : config.getFlags()) { bitset |= flag.bitset; } - PyLib.startPython(bitset, - config.getExtraPaths().stream().map(Path::toString).toArray(String[]::new)); + PyLib.startPython(bitset, config.getExtraPaths().stream().map(Path::toString).toArray(String[]::new)); } public void stopPython(Duration cleanupTimeout) { @@ -91,8 +89,7 @@ public void stopPython(Duration cleanupTimeout) { throw new IllegalStateException("PyLib has not been initialized"); } if (!initialized) { - throw new IllegalStateException( - "PyLib has been initialized, but not by the current JpyConfigExt!"); + throw new IllegalStateException("PyLib has been initialized, but not by the current JpyConfigExt!"); } } if (!PyLib.isPythonRunning()) { @@ -108,14 +105,12 @@ public void stopPython(Duration cleanupTimeout) { @Override public LogOutput append(LogOutput logOutput) { return logOutput - .append("flags=").append(JpyConfigExt::format, config.getFlags()) - .append(",programName=") - .append(JpyConfigExt::format, config.getProgramName().orElse(null)) - .append(",pythonHome=") - .append(JpyConfigExt::format, config.getPythonHome().orElse(null)) - .append(",pythonLib=").append(JpyConfigExt::format, config.getPythonLib().orElse(null)) - .append(",jpyLib=").append(JpyConfigExt::format, config.getJpyLib().orElse(null)) - .append(",jdlLib=").append(JpyConfigExt::format, config.getJdlLib().orElse(null)) - .append(",extras=").append(JpyConfigExt::format, config.getExtraPaths()); + .append("flags=").append(JpyConfigExt::format, config.getFlags()) + .append(",programName=").append(JpyConfigExt::format, config.getProgramName().orElse(null)) + .append(",pythonHome=").append(JpyConfigExt::format, config.getPythonHome().orElse(null)) + .append(",pythonLib=").append(JpyConfigExt::format, config.getPythonLib().orElse(null)) + .append(",jpyLib=").append(JpyConfigExt::format, config.getJpyLib().orElse(null)) + .append(",jdlLib=").append(JpyConfigExt::format, config.getJdlLib().orElse(null)) + .append(",extras=").append(JpyConfigExt::format, config.getExtraPaths()); } } diff --git a/py/jpy-ext/src/main/java/io/deephaven/jpy/JpyModule.java b/py/jpy-ext/src/main/java/io/deephaven/jpy/JpyModule.java index 447e3f0b0de..f18838985ba 100644 --- a/py/jpy-ext/src/main/java/io/deephaven/jpy/JpyModule.java +++ b/py/jpy-ext/src/main/java/io/deephaven/jpy/JpyModule.java @@ -96,46 +96,46 @@ public PrimitiveArrayType getType() { public PyObject newPyCopy(T values) { // PyObject val = jpy.array("", existing-array) return module.call( - PyObject.class, - ARRAY_METHOD_NAME, - String.class, JpyArrayType.of(type), - type.getArrayType(), values); + PyObject.class, + ARRAY_METHOD_NAME, + String.class, JpyArrayType.of(type), + type.getArrayType(), values); } @Override public T newCopy(T values) { // primitive-type[] val = jpy.array("", existing-array) return module.call( - type.getArrayType(), - ARRAY_METHOD_NAME, - String.class, JpyArrayType.of(type), - type.getArrayType(), values); + type.getArrayType(), + ARRAY_METHOD_NAME, + String.class, JpyArrayType.of(type), + type.getArrayType(), values); } @Override public PyObject newPyInstance(int len) { // PyObject val = jpy.array("", length) return module.call( - PyObject.class, - ARRAY_METHOD_NAME, - String.class, JpyArrayType.of(type), - Integer.class, len); + PyObject.class, + ARRAY_METHOD_NAME, + String.class, JpyArrayType.of(type), + Integer.class, len); } @Override public T newInstance(int len) { // primitive-type[] val = jpy.array("", length) return module.call( - type.getArrayType(), - ARRAY_METHOD_NAME, - String.class, JpyArrayType.of(type), - Integer.class, len); + type.getArrayType(), + ARRAY_METHOD_NAME, + String.class, JpyArrayType.of(type), + Integer.class, len); } } /** - * This maps a {@link PrimitiveArrayType} to the corresponding type that jpy expects for an - * appropriately typed array. + * This maps a {@link PrimitiveArrayType} to the corresponding type that jpy expects for an appropriately typed + * array. */ private static class JpyArrayType implements Visitor { diff --git a/py/jpy-ext/src/main/java/io/deephaven/util/PrimitiveArrayType.java b/py/jpy-ext/src/main/java/io/deephaven/util/PrimitiveArrayType.java index d22f6eb5b77..aa8c8c5637d 100644 --- a/py/jpy-ext/src/main/java/io/deephaven/util/PrimitiveArrayType.java +++ b/py/jpy-ext/src/main/java/io/deephaven/util/PrimitiveArrayType.java @@ -41,14 +41,14 @@ static Optional> lookupForObject(Object array) { static List> types() { return Arrays.asList( - booleans(), - bytes(), - chars(), - shorts(), - ints(), - longs(), - floats(), - doubles()); + booleans(), + bytes(), + chars(), + shorts(), + ints(), + longs(), + floats(), + doubles()); } static Booleans booleans() { diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/CreateModuleTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/CreateModuleTest.java index 11d380ddefe..63b0f5469e0 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/CreateModuleTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/CreateModuleTest.java @@ -12,11 +12,11 @@ public class CreateModuleTest extends PythonTest { private static final String PLUS_42_CODE = "def plus_42(x):\n return x + 42"; private static final String GET_STATE = - "import sys\n" - + "def get_globals(x):\n" - + " return globals()\n" - + "def get_locals(x):\n" - + " return locals()\n"; + "import sys\n" + + "def get_globals(x):\n" + + " return globals()\n" + + "def get_locals(x):\n" + + " return locals()\n"; interface Plus42 extends AutoCloseable { int plus_42(int x); @@ -38,7 +38,7 @@ interface GetState extends AutoCloseable { @Test public void plus_42() { try (final Plus42 plus42 = getCreateModule() - .callAsFunctionModule("plus_42", PLUS_42_CODE, Plus42.class)) { + .callAsFunctionModule("plus_42", PLUS_42_CODE, Plus42.class)) { Assert.assertEquals(42, plus42.plus_42(0)); Assert.assertEquals(84, plus42.plus_42(42)); } @@ -47,8 +47,8 @@ public void plus_42() { @Test public void getGlobals() { try (final GetState getState = getCreateModule() - .callAsFunctionModule("some_module_name", GET_STATE, GetState.class); - final PyDictWrapper globals = getState.get_globals(42).asDict()) { + .callAsFunctionModule("some_module_name", GET_STATE, GetState.class); + final PyDictWrapper globals = getState.get_globals(42).asDict()) { Assert.assertTrue(globals.containsKey("sys")); Assert.assertTrue(globals.containsKey("get_globals")); Assert.assertTrue(globals.containsKey("get_locals")); @@ -68,8 +68,8 @@ public void getGlobals() { @Test public void getLocals() { try (final GetState getState = getCreateModule() - .callAsFunctionModule("some_module_name", GET_STATE, GetState.class); - final PyDictWrapper locals = getState.get_locals(42).asDict()) { + .callAsFunctionModule("some_module_name", GET_STATE, GetState.class); + final PyDictWrapper locals = getState.get_locals(42).asDict()) { Assert.assertEquals(1, locals.size()); try (final PyObject p = locals.getItem("x")) { Assert.assertTrue(p.isLong()); @@ -95,9 +95,9 @@ public boolean hasGil() { public void hasGil() { final HasGilObject hasGilObject = new HasGilObject(); try (final IdentityModule identityModule = IdentityModule.create(getCreateModule()); - final HasGil proxy = identityModule - .identity(hasGilObject) - .createProxy(HasGil.class)) { + final HasGil proxy = identityModule + .identity(hasGilObject) + .createProxy(HasGil.class)) { Assert.assertFalse(hasGilObject.hasGil()); Assert.assertTrue(proxy.hasGil()); } diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/PyProxyTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/PyProxyTest.java index 76e039259c7..826edda576a 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/PyProxyTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/PyProxyTest.java @@ -38,8 +38,8 @@ interface AfterTheFactJavaCloseable extends WithoutJavaCloseable, AutoCloseable private static String readResource(String name) { try { return new String( - Files.readAllBytes(Paths.get(PyProxyTest.class.getResource(name).toURI())), - StandardCharsets.UTF_8); + Files.readAllBytes(Paths.get(PyProxyTest.class.getResource(name).toURI())), + StandardCharsets.UTF_8); } catch (IOException | URISyntaxException e) { throw new RuntimeException(e); } @@ -64,13 +64,13 @@ public void tearDown() throws Exception { public void withJavaCloseable() { final WithJavaCloseable anotherReference; try (final WithJavaCloseable pythonObj = getCreateModule() - .call("py_proxy_test", readResource("proxy_close_test.py")) - .call("SomeClass") - .createProxy(WithJavaCloseable.class)) { + .call("py_proxy_test", readResource("proxy_close_test.py")) + .call("SomeClass") + .createProxy(WithJavaCloseable.class)) { anotherReference = identityModule - .identity(PyObject.unwrapProxy(pythonObj)) - .createProxy(WithJavaCloseable.class); + .identity(PyObject.unwrapProxy(pythonObj)) + .createProxy(WithJavaCloseable.class); Assert.assertFalse(anotherReference.get_closed()); Assert.assertEquals(pythonObj, anotherReference); @@ -87,13 +87,13 @@ public void withJavaCloseable() { public void withoutJavaCloseable() { final WithoutJavaCloseable anotherReference; final WithoutJavaCloseable pythonObj = getCreateModule() - .call("py_proxy_test", readResource("proxy_close_test.py")) - .call("SomeClass") - .createProxy(WithoutJavaCloseable.class); + .call("py_proxy_test", readResource("proxy_close_test.py")) + .call("SomeClass") + .createProxy(WithoutJavaCloseable.class); anotherReference = identityModule - .identity(PyObject.unwrapProxy(pythonObj)) - .createProxy(WithoutJavaCloseable.class); + .identity(PyObject.unwrapProxy(pythonObj)) + .createProxy(WithoutJavaCloseable.class); Assert.assertFalse(anotherReference.get_closed()); Assert.assertEquals(pythonObj, anotherReference); @@ -113,13 +113,13 @@ public void withoutJavaCloseable() { public void afterTheFactJavaCloseable() { final AfterTheFactJavaCloseable anotherReference; final AfterTheFactJavaCloseable pythonObj = getCreateModule() - .call("py_proxy_test", readResource("proxy_close_test.py")) - .call("SomeClass") - .createProxy(AfterTheFactJavaCloseable.class); + .call("py_proxy_test", readResource("proxy_close_test.py")) + .call("SomeClass") + .createProxy(AfterTheFactJavaCloseable.class); anotherReference = identityModule - .identity(PyObject.unwrapProxy(pythonObj)) - .createProxy(AfterTheFactJavaCloseable.class); + .identity(PyObject.unwrapProxy(pythonObj)) + .createProxy(AfterTheFactJavaCloseable.class); Assert.assertFalse(anotherReference.get_closed()); Assert.assertEquals(pythonObj, anotherReference); diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/PythonTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/PythonTest.java index cfdd14d4151..25b841ed7bd 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/PythonTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/PythonTest.java @@ -31,8 +31,8 @@ public static void assumePython3() { public static String readResource(Class clazz, String name) { try { return new String( - Files.readAllBytes(Paths.get(clazz.getResource(name).toURI())), - StandardCharsets.UTF_8); + Files.readAllBytes(Paths.get(clazz.getResource(name).toURI())), + StandardCharsets.UTF_8); } catch (IOException | URISyntaxException e) { throw new RuntimeException(e); } diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/DestructorModuleParent.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/DestructorModuleParent.java index 29d41ad7f52..ea74af7d657 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/DestructorModuleParent.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/DestructorModuleParent.java @@ -20,10 +20,10 @@ static DestructorModuleParent create() { static DestructorModuleParent create(CreateModule createModule) { final String code = readResource("destructor_test.py"); try ( - final PyObject module = createModule.call("destructor_module", code)) { + final PyObject module = createModule.call("destructor_module", code)) { return module - .call("Parent") - .createProxy(DestructorModuleParent.class); + .call("Parent") + .createProxy(DestructorModuleParent.class); } } @@ -35,9 +35,9 @@ static DestructorModuleParent create(CreateModule createModule) { static String readResource(String name) { try { return new String( - Files.readAllBytes(Paths.get( - DestructorModuleParent.class.getResource(name).toURI())), - StandardCharsets.UTF_8); + Files.readAllBytes(Paths.get( + DestructorModuleParent.class.getResource(name).toURI())), + StandardCharsets.UTF_8); } catch (IOException | URISyntaxException e) { throw new RuntimeException(e); } @@ -53,8 +53,7 @@ class OnDelete { // todo: this *doesn't* actually get mapped to a PyCallable_Check b/c the code looks like: /** - * int PyCallable_Check(PyObject *x) { if (x == NULL) return 0; return Py_TYPE(x)->tp_call - * != NULL; } + * int PyCallable_Check(PyObject *x) { if (x == NULL) return 0; return Py_TYPE(x)->tp_call != NULL; } */ public void __call__() { diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/IntegerOutTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/IntegerOutTest.java index 7a102e8602b..e118c505c57 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/IntegerOutTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/IntegerOutTest.java @@ -54,7 +54,7 @@ public void intToInteger() { @Test public void explicitIntegerToInteger() { Assert.assertEquals(Integer.valueOf(UNIQ_INT), out - .identity(Integer.valueOf(UNIQ_INT))); + .identity(Integer.valueOf(UNIQ_INT))); } @Test diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/NoopModule.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/NoopModule.java index 386895574dd..ce73534b128 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/NoopModule.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/NoopModule.java @@ -43,9 +43,9 @@ static NoopModule create(CreateModule createModule) { static String readResource(String name) { try { return new String( - Files.readAllBytes(Paths.get( - NoopModule.class.getResource(name).toURI())), - StandardCharsets.UTF_8); + Files.readAllBytes(Paths.get( + NoopModule.class.getResource(name).toURI())), + StandardCharsets.UTF_8); } catch (IOException | URISyntaxException e) { throw new RuntimeException(e); } diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PrimitiveArrayTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PrimitiveArrayTest.java index c0949c5882e..f3f864540ba 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PrimitiveArrayTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PrimitiveArrayTest.java @@ -81,13 +81,12 @@ interface ArrayProxy { } enum PrimitiveArrayType { - BOOLEAN(new boolean[] {true, false, true}, Booleans.INSTANCE), CHAR( - new char[] {'a', 'b', 'c'}, - Chars.INSTANCE), BYTE(new byte[] {(byte) 'd', (byte) 'e', (byte) 'a', (byte) 'd'}, - Bytes.INSTANCE), SHORT(new short[] {1, 42, 31, 15, -5}, - Shorts.INSTANCE), INT(new int[] {1, 42, 31, 15, -5}, Ints.INSTANCE), LONG( - new long[] {1, 42, 31, 15, -5}, Longs.INSTANCE), FLOAT(new float[] {42.0f}, - Floats.INSTANCE), DOUBLE(new double[] {42.0}, Doubles.INSTANCE); + BOOLEAN(new boolean[] {true, false, true}, Booleans.INSTANCE), CHAR(new char[] {'a', 'b', 'c'}, + Chars.INSTANCE), BYTE(new byte[] {(byte) 'd', (byte) 'e', (byte) 'a', (byte) 'd'}, + Bytes.INSTANCE), SHORT(new short[] {1, 42, 31, 15, -5}, Shorts.INSTANCE), INT( + new int[] {1, 42, 31, 15, -5}, Ints.INSTANCE), LONG(new long[] {1, 42, 31, 15, -5}, + Longs.INSTANCE), FLOAT(new float[] {42.0f}, + Floats.INSTANCE), DOUBLE(new double[] {42.0}, Doubles.INSTANCE); private final Object o; private final io.deephaven.util.PrimitiveArrayType type; @@ -175,8 +174,8 @@ void checkNewPyCopy(PrimitiveArrayTest pat) { public void setUp() { PyObject.executeCode(readResource("primitive_array_test.py"), PyInputMode.SCRIPT); SUT = PyObject - .executeCode("ArrayTest()", PyInputMode.EXPRESSION) - .createProxy(ArrayProxy.class); + .executeCode("ArrayTest()", PyInputMode.EXPRESSION) + .createProxy(ArrayProxy.class); builtins = BuiltinsModule.create(); jpy = JpyModule.create(); } @@ -239,8 +238,8 @@ private void check(PrimitiveArrayType type, Supplier supplier) { private static String readResource(String name) { try { return new String( - Files.readAllBytes(Paths.get(PrimitiveArrayTest.class.getResource(name).toURI())), - StandardCharsets.UTF_8); + Files.readAllBytes(Paths.get(PrimitiveArrayTest.class.getResource(name).toURI())), + StandardCharsets.UTF_8); } catch (IOException | URISyntaxException e) { throw new RuntimeException(e); } diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyDebug.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyDebug.java index bd7afe50a60..52a150e1212 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyDebug.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyDebug.java @@ -39,13 +39,13 @@ public static void main(String[] args) { private static List getSysPath() { try ( - final PyModule sys = PyModule.importModule("sys"); - final PyObject path = sys.getAttribute("path")) { + final PyModule sys = PyModule.importModule("sys"); + final PyObject path = sys.getAttribute("path")) { return path - .asList() - .stream() - .map(PyObject::getStringValue) - .collect(Collectors.toList()); + .asList() + .stream() + .map(PyObject::getStringValue) + .collect(Collectors.toList()); } } } diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyDictTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyDictTest.java index 3a594491fdb..9de9759a8f4 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyDictTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyDictTest.java @@ -41,9 +41,8 @@ public void emptyDict() { @Test public void simpleDict() { - PyDictWrapper dict = PyObject - .executeCode("{'mock':'yeah', 'ing':'yeah', 'bird':'yeah'}", PyInputMode.EXPRESSION) - .asDict(); + PyDictWrapper dict = + PyObject.executeCode("{'mock':'yeah', 'ing':'yeah', 'bird':'yeah'}", PyInputMode.EXPRESSION).asDict(); Assert.assertTrue(!dict.isEmpty()); Assert.assertTrue(!dict.keySet().isEmpty()); @@ -80,12 +79,12 @@ public void noneKeyDict() { @Test public void globals() { PyLib.getMainGlobals() - .asDict() - .copy() - .entrySet() - .stream() - .map(e -> new SimpleImmutableEntry<>(e.getKey().toString(), convert(e.getValue()))) - .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); + .asDict() + .copy() + .entrySet() + .stream() + .map(e -> new SimpleImmutableEntry<>(e.getKey().toString(), convert(e.getValue()))) + .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); } static Object convert(PyObject pyObject) { diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyLibNullArgTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyLibNullArgTest.java index 217a3276988..582b6545965 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyLibNullArgTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyLibNullArgTest.java @@ -19,9 +19,9 @@ interface SomeModule extends AutoCloseable { @Before public void setUp() throws Exception { someModule = getCreateModule().callAsFunctionModule( - "some_module", - readResource(PyLibNullArgTest.class, "pylib_null_arg_test.py"), - SomeModule.class); + "some_module", + readResource(PyLibNullArgTest.class, "pylib_null_arg_test.py"), + SomeModule.class); } @After diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyLibTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyLibTest.java index 549aa49041e..ab90128a14e 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyLibTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PyLibTest.java @@ -50,20 +50,20 @@ public void myClass() { @Test public void pingPong5() { Assert.assertEquals("PyLibTest(java,5)(python,4)(java,3)(python,2)(java,1)", - PingPongStack.pingPongPython("PyLibTest", 5)); + PingPongStack.pingPongPython("PyLibTest", 5)); } @Test public void pingPong4() { Assert.assertEquals("PyLibTest(java,4)(python,3)(java,2)(python,1)", - PingPongStack.pingPongPython("PyLibTest", 4)); + PingPongStack.pingPongPython("PyLibTest", 4)); } private static String readResource(String name) { try { return new String( - Files.readAllBytes(Paths.get(PyLibTest.class.getResource(name).toURI())), - StandardCharsets.UTF_8); + Files.readAllBytes(Paths.get(PyLibTest.class.getResource(name).toURI())), + StandardCharsets.UTF_8); } catch (IOException | URISyntaxException e) { throw new RuntimeException(e); } diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PySysPath.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PySysPath.java index 3277e411311..ae80d77c0db 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PySysPath.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/PySysPath.java @@ -23,13 +23,13 @@ public static void main(String[] args) { private static List getSysPath() { try ( - final PyModule sys = PyModule.importModule("sys"); - final PyObject path = sys.getAttribute("path")) { + final PyModule sys = PyModule.importModule("sys"); + final PyObject path = sys.getAttribute("path")) { return path - .asList() - .stream() - .map(PyObject::getStringValue) - .collect(Collectors.toList()); + .asList() + .stream() + .map(PyObject::getStringValue) + .collect(Collectors.toList()); } } } diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/ReferenceCounting.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/ReferenceCounting.java index 3645caf4182..2f2f6b537e7 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/ReferenceCounting.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/ReferenceCounting.java @@ -31,8 +31,7 @@ public void close() { public void check(int logicalReferenceCount, Object obj) { final PyObject pyObject = PyObject.unwrapProxy(obj); if (pyObject != null) { - obj = pyObject; // todo: arguably, this is something that the the jpy should be doing - // itself + obj = pyObject; // todo: arguably, this is something that the the jpy should be doing itself } // the extra ref b/c of the tuple ref-stealing (see PyLib_CallAndReturnObject) @@ -64,11 +63,10 @@ private int getrefcount(Object o) { } /** - * This is a fragile method, meant to ensure that GC gets invoked. There are a couple of - * shortcomings: + * This is a fragile method, meant to ensure that GC gets invoked. There are a couple of shortcomings: * - * 1) There is no guarantee that GC will actually be invoked. 2) Even if our dummy object is - * collected, it doesn't guarantee that any other objects we care about have been GCd. + * 1) There is no guarantee that GC will actually be invoked. 2) Even if our dummy object is collected, it doesn't + * guarantee that any other objects we care about have been GCd. * * That said - this seems to work for at least some VM implementations. */ @@ -91,8 +89,7 @@ protected void finalize() throws Throwable { } /** - * The blackhole ensures that java can't GC away our java objects early (which effects the - * python reference count) + * The blackhole ensures that java can't GC away our java objects early (which effects the python reference count) */ public static void blackhole(Object... objects) { if (Objects.hash(objects) == Integer.MAX_VALUE) { diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/ReferenceCountingTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/ReferenceCountingTest.java index aefd6daff5e..ced0b0d654f 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/ReferenceCountingTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/ReferenceCountingTest.java @@ -49,8 +49,8 @@ public void tearDown() { } /* - * @Test public void javaOnlyObjectDoesntHavePythonReferences() { final Object obj = new - * Object(); checkReferenceCount(0, obj); blackhole(obj); } + * @Test public void javaOnlyObjectDoesntHavePythonReferences() { final Object obj = new Object(); + * checkReferenceCount(0, obj); blackhole(obj); } */ @Test @@ -168,8 +168,7 @@ public void identityFunctionOnPyObjectIncreasesCount() { @Test public void nativePythonObjectsCanLiveInJava() { - // A slightly different construction, showing raw executeCode as statements instead of - // expressions + // A slightly different construction, showing raw executeCode as statements instead of expressions PyObject.executeCode("devin = {'was': 'here'}", PyInputMode.STATEMENT); final PyObject devin = PyObject.executeCode("devin", PyInputMode.EXPRESSION); ref.check(2, devin); @@ -210,8 +209,8 @@ public void pythonObjectInJavaWillDestructAfterClosure() throws InterruptedExcep @Test public void setAttributeDelAttributeCounted() { try ( - final PyObject simpleObject = SimpleObject.create(getCreateModule()); - final PyObject someValue = builtins.dict()) { + final PyObject simpleObject = SimpleObject.create(getCreateModule()); + final PyObject someValue = builtins.dict()) { ref.check(1, simpleObject); ref.check(1, someValue); diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/SimpleObject.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/SimpleObject.java index b5b382924c7..26f7f8725b1 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/SimpleObject.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/SimpleObject.java @@ -20,9 +20,9 @@ static PyObject create(CreateModule createModule) { static String readResource(String name) { try { return new String( - Files.readAllBytes(Paths.get( - SimpleObject.class.getResource(name).toURI())), - StandardCharsets.UTF_8); + Files.readAllBytes(Paths.get( + SimpleObject.class.getResource(name).toURI())), + StandardCharsets.UTF_8); } catch (IOException | URISyntaxException e) { throw new RuntimeException(e); } diff --git a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/TypeTest.java b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/TypeTest.java index 41d164690d8..0a301a9ffc8 100644 --- a/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/TypeTest.java +++ b/py/jpy-integration/src/javaToPython/java/io/deephaven/jpy/integration/TypeTest.java @@ -10,10 +10,10 @@ public class TypeTest extends PythonTest { @Test public void checkReferenceCount() { try ( - final ReferenceCounting ref = ReferenceCounting.create(); - final IdentityModule identity = IdentityModule.create(getCreateModule()); - final PyObject pyObject = SimpleObject.create(getCreateModule()); - final PyObject type = pyObject.getType()) { + final ReferenceCounting ref = ReferenceCounting.create(); + final IdentityModule identity = IdentityModule.create(getCreateModule()); + final PyObject pyObject = SimpleObject.create(getCreateModule()); + final PyObject type = pyObject.getType()) { // It's hard for me to be more precise about this - jpy, and python itself, might be // keeping their own references to the type for lookup purposes. diff --git a/py/jpy-integration/src/pythonToJava/java/io/deephaven/jpy/integration/PassPyObjectToJava.java b/py/jpy-integration/src/pythonToJava/java/io/deephaven/jpy/integration/PassPyObjectToJava.java index 6b028ee0acd..d5be43430bb 100644 --- a/py/jpy-integration/src/pythonToJava/java/io/deephaven/jpy/integration/PassPyObjectToJava.java +++ b/py/jpy-integration/src/pythonToJava/java/io/deephaven/jpy/integration/PassPyObjectToJava.java @@ -4,14 +4,12 @@ public class PassPyObjectToJava { public static void from_python_with_love(PyObject object) { - System.out.println( - String.format("Received PyObject, pointer=%d, str=%s", object.getPointer(), object)); + System.out.println(String.format("Received PyObject, pointer=%d, str=%s", object.getPointer(), object)); } public static void from_python_with_love_var(PyObject... varArgs) { for (PyObject arg : varArgs) { - System.out.println( - String.format("Received PyObject(s), pointer=%d, str=%s", arg.getPointer(), arg)); + System.out.println(String.format("Received PyObject(s), pointer=%d, str=%s", arg.getPointer(), arg)); } } @@ -20,8 +18,8 @@ interface TheContext { } public static int invoke_the_context_plus(PyObject object, int other) { - // The easiest way to invoke functions on a python object is to have the python object be a - // class, and proxy it to a java interface! + // The easiest way to invoke functions on a python object is to have the python object be a class, and proxy it + // to a java interface! TheContext theContext = object.createProxy(TheContext.class); return theContext.plus(other); } diff --git a/py/jpy-integration/src/test/java/io/deephaven/jpy/integration/PingPongStack.java b/py/jpy-integration/src/test/java/io/deephaven/jpy/integration/PingPongStack.java index 984dcb71c58..6c06879617e 100644 --- a/py/jpy-integration/src/test/java/io/deephaven/jpy/integration/PingPongStack.java +++ b/py/jpy-integration/src/test/java/io/deephaven/jpy/integration/PingPongStack.java @@ -21,9 +21,8 @@ private synchronized static void init() { if (CODE == null) { try { CODE = new String( - Files.readAllBytes( - Paths.get(PingPongStack.class.getResource("pingpongstack.py").toURI())), - StandardCharsets.UTF_8); + Files.readAllBytes(Paths.get(PingPongStack.class.getResource("pingpongstack.py").toURI())), + StandardCharsets.UTF_8); } catch (IOException | URISyntaxException e) { throw new RuntimeException(e); } @@ -38,15 +37,12 @@ public static String pingPongPython(String result, int remaining) { if (remaining <= 0) { return result; } - // note: we can't cache this module here, since we are starting and stopping python - // interpreter - // from our junit testing framework, and don't have the appropriate infra to pass down the - // env. + // note: we can't cache this module here, since we are starting and stopping python interpreter + // from our junit testing framework, and don't have the appropriate infra to pass down the env. // the same thing would likely happen from python->java, in regards to caching // jpy.get_type() among jvm create/destroys. PyObject module = loadCodeAsModule(CODE, "pingpongstack"); - return module.call("ping_pong_java", result + "(java," + remaining + ")", remaining - 1) - .str(); + return module.call("ping_pong_java", result + "(java," + remaining + ")", remaining - 1).str(); } } diff --git a/qst/src/main/java/io/deephaven/annotations/AllowNulls.java b/qst/src/main/java/io/deephaven/annotations/AllowNulls.java index 2533192b300..d97e8fd6896 100644 --- a/qst/src/main/java/io/deephaven/annotations/AllowNulls.java +++ b/qst/src/main/java/io/deephaven/annotations/AllowNulls.java @@ -8,8 +8,7 @@ /** * Annotation to allow nulls in {@link org.immutables.value.Value.Immutable} collections. * - * @see nulls-in-collection */ @Retention(RetentionPolicy.CLASS) @Target({ElementType.METHOD}) diff --git a/qst/src/main/java/io/deephaven/annotations/LeafStyle.java b/qst/src/main/java/io/deephaven/annotations/LeafStyle.java index 5469378c97d..6206ba262c8 100644 --- a/qst/src/main/java/io/deephaven/annotations/LeafStyle.java +++ b/qst/src/main/java/io/deephaven/annotations/LeafStyle.java @@ -14,6 +14,6 @@ @Target({ElementType.TYPE}) @Retention(RetentionPolicy.CLASS) @Value.Style(visibility = ImplementationVisibility.PACKAGE, - defaults = @Value.Immutable(copy = false), strictBuilder = true, weakInterning = true) + defaults = @Value.Immutable(copy = false), strictBuilder = true, weakInterning = true) public @interface LeafStyle { } diff --git a/qst/src/main/java/io/deephaven/annotations/NodeStyle.java b/qst/src/main/java/io/deephaven/annotations/NodeStyle.java index 45d6dca10dd..43cf8fa17b5 100644 --- a/qst/src/main/java/io/deephaven/annotations/NodeStyle.java +++ b/qst/src/main/java/io/deephaven/annotations/NodeStyle.java @@ -9,13 +9,12 @@ import java.lang.annotation.Target; /** - * The node style is suitable for nested / recursive structures. As such, it is prehashed and - * interned. + * The node style is suitable for nested / recursive structures. As such, it is prehashed and interned. */ @Target({ElementType.TYPE}) @Retention(RetentionPolicy.CLASS) @Value.Style(visibility = ImplementationVisibility.PACKAGE, - defaults = @Value.Immutable(prehash = true, intern = true), strictBuilder = true, - weakInterning = true) + defaults = @Value.Immutable(prehash = true, intern = true), strictBuilder = true, + weakInterning = true) public @interface NodeStyle { } diff --git a/qst/src/main/java/io/deephaven/qst/TableAdapterImpl.java b/qst/src/main/java/io/deephaven/qst/TableAdapterImpl.java index bb0bea0962c..f2cd1d1abf1 100644 --- a/qst/src/main/java/io/deephaven/qst/TableAdapterImpl.java +++ b/qst/src/main/java/io/deephaven/qst/TableAdapterImpl.java @@ -36,14 +36,14 @@ class TableAdapterImpl, TABLE> implements Visitor { static , TABLE> TABLE toTable( - TableCreator

    creation, TableCreator.TableToOperations toOps, - TableCreator.OperationsToTable toTable, TableSpec table) { + TableCreator
    creation, TableCreator.TableToOperations toOps, + TableCreator.OperationsToTable toTable, TableSpec table) { return table.walk(new TableAdapterImpl<>(creation, toOps, toTable)).getTableOut(); } static , TABLE> TOPS toOperations( - TableCreator
    creation, TableCreator.TableToOperations toOps, - TableCreator.OperationsToTable toTable, TableSpec table) { + TableCreator
    creation, TableCreator.TableToOperations toOps, + TableCreator.OperationsToTable toTable, TableSpec table) { return table.walk(new TableAdapterImpl<>(creation, toOps, toTable)).getOperationsOut(); } @@ -55,8 +55,8 @@ static , TABLE> TOPS toOperations( private TOPS topsOut; private TableAdapterImpl(TableCreator
    tableCreation, - TableCreator.TableToOperations toOps, - TableCreator.OperationsToTable toTable) { + TableCreator.TableToOperations toOps, + TableCreator.OperationsToTable toTable) { this.tableCreation = Objects.requireNonNull(tableCreation); this.toOps = Objects.requireNonNull(toOps); this.toTable = Objects.requireNonNull(toTable); @@ -112,7 +112,7 @@ public void visit(TimeTable timeTable) { @Override public void visit(MergeTable mergeTable) { List
    tables = - mergeTable.tables().stream().map(this::table).collect(Collectors.toList()); + mergeTable.tables().stream().map(this::table).collect(Collectors.toList()); tableOut = tableCreation.merge(tables); } @@ -141,7 +141,7 @@ public void visit(SnapshotTable snapshotTable) { final TOPS trigger = ops(snapshotTable.trigger()); final TABLE base = table(snapshotTable.base()); topsOut = - trigger.snapshot(base, snapshotTable.doInitialSnapshot(), snapshotTable.stampColumns()); + trigger.snapshot(base, snapshotTable.doInitialSnapshot(), snapshotTable.stampColumns()); } @Override @@ -202,7 +202,7 @@ public void visit(JoinTable joinTable) { final TOPS left = ops(joinTable.left()); final TABLE right = table(joinTable.right()); topsOut = - left.join(right, joinTable.matches(), joinTable.additions(), joinTable.reserveBits()); + left.join(right, joinTable.matches(), joinTable.additions(), joinTable.reserveBits()); } @Override @@ -234,6 +234,6 @@ public void visit(ByTable byTable) { @Override public void visit(AggregationTable aggregationTable) { topsOut = parentOps(aggregationTable).by(aggregationTable.columns(), - aggregationTable.aggregations()); + aggregationTable.aggregations()); } } diff --git a/qst/src/main/java/io/deephaven/qst/TableCreator.java b/qst/src/main/java/io/deephaven/qst/TableCreator.java index b1514479565..e21549689d7 100644 --- a/qst/src/main/java/io/deephaven/qst/TableCreator.java +++ b/qst/src/main/java/io/deephaven/qst/TableCreator.java @@ -32,8 +32,8 @@ public interface TableCreator
    { * @return the output table */ static , TABLE> TABLE create( - TableCreator
    creation, TableToOperations toOps, - OperationsToTable toTable, TableSpec table) { + TableCreator
    creation, TableToOperations toOps, + OperationsToTable toTable, TableSpec table) { return TableAdapterImpl.toTable(creation, toOps, toTable, table); } @@ -177,7 +177,7 @@ default TABLE merge(TABLE t1, TABLE t2, TABLE t3, TABLE t4, TABLE t5, TABLE t6, * @see #merge(Iterable) */ default TABLE merge(TABLE t1, TABLE t2, TABLE t3, TABLE t4, TABLE t5, TABLE t6, TABLE t7, - TABLE t8) { + TABLE t8) { return merge(Arrays.asList(t1, t2, t3, t4, t5, t6, t7, t8)); } @@ -185,7 +185,7 @@ default TABLE merge(TABLE t1, TABLE t2, TABLE t3, TABLE t4, TABLE t5, TABLE t6, * @see #merge(Iterable) */ default TABLE merge(TABLE t1, TABLE t2, TABLE t3, TABLE t4, TABLE t5, TABLE t6, TABLE t7, - TABLE t8, TABLE t9) { + TABLE t8, TABLE t9) { return merge(Arrays.asList(t1, t2, t3, t4, t5, t6, t7, t8, t9)); } @@ -194,10 +194,10 @@ default TABLE merge(TABLE t1, TABLE t2, TABLE t3, TABLE t4, TABLE t5, TABLE t6, */ @SuppressWarnings("unchecked") default TABLE merge(TABLE t1, TABLE t2, TABLE t3, TABLE t4, TABLE t5, TABLE t6, TABLE t7, - TABLE t8, TABLE t9, TABLE... remaining) { + TABLE t8, TABLE t9, TABLE... remaining) { return merge( - () -> Stream.concat(Stream.of(t1, t2, t3, t4, t5, t6, t7, t8, t9), Stream.of(remaining)) - .iterator()); + () -> Stream.concat(Stream.of(t1, t2, t3, t4, t5, t6, t7, t8, t9), Stream.of(remaining)) + .iterator()); } /** diff --git a/qst/src/main/java/io/deephaven/qst/array/BooleanArray.java b/qst/src/main/java/io/deephaven/qst/array/BooleanArray.java index 85050dcb8ed..9020e5669c0 100644 --- a/qst/src/main/java/io/deephaven/qst/array/BooleanArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/BooleanArray.java @@ -90,7 +90,7 @@ public final int hashCode() { } public static class Builder extends PrimitiveArrayHelper - implements ArrayBuilder { + implements ArrayBuilder { private Builder(int initialCapacity) { super(new byte[initialCapacity]); diff --git a/qst/src/main/java/io/deephaven/qst/array/ByteArray.java b/qst/src/main/java/io/deephaven/qst/array/ByteArray.java index ac4afd0d812..1928fbbc65c 100644 --- a/qst/src/main/java/io/deephaven/qst/array/ByteArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/ByteArray.java @@ -89,7 +89,7 @@ public final int hashCode() { } public static class Builder extends PrimitiveArrayHelper - implements ArrayBuilder { + implements ArrayBuilder { private Builder(int initialCapacity) { super(new byte[initialCapacity]); diff --git a/qst/src/main/java/io/deephaven/qst/array/CharArray.java b/qst/src/main/java/io/deephaven/qst/array/CharArray.java index ebe67527621..94ae8ab388b 100644 --- a/qst/src/main/java/io/deephaven/qst/array/CharArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/CharArray.java @@ -89,7 +89,7 @@ public final int hashCode() { } public static class Builder extends PrimitiveArrayHelper - implements ArrayBuilder { + implements ArrayBuilder { private Builder(int initialCapacity) { super(new char[initialCapacity]); diff --git a/qst/src/main/java/io/deephaven/qst/array/DoubleArray.java b/qst/src/main/java/io/deephaven/qst/array/DoubleArray.java index d3f6de44e23..d016a780a87 100644 --- a/qst/src/main/java/io/deephaven/qst/array/DoubleArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/DoubleArray.java @@ -89,7 +89,7 @@ public final int hashCode() { } public static class Builder extends PrimitiveArrayHelper - implements ArrayBuilder { + implements ArrayBuilder { private Builder(int initialCapacity) { super(new double[initialCapacity]); diff --git a/qst/src/main/java/io/deephaven/qst/array/FloatArray.java b/qst/src/main/java/io/deephaven/qst/array/FloatArray.java index 8eebd6295cc..1850cc9a8f5 100644 --- a/qst/src/main/java/io/deephaven/qst/array/FloatArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/FloatArray.java @@ -89,7 +89,7 @@ public final int hashCode() { } public static class Builder extends PrimitiveArrayHelper - implements ArrayBuilder { + implements ArrayBuilder { private Builder(int initialCapacity) { super(new float[initialCapacity]); diff --git a/qst/src/main/java/io/deephaven/qst/array/GenericArray.java b/qst/src/main/java/io/deephaven/qst/array/GenericArray.java index 9881472548f..e126c159efc 100644 --- a/qst/src/main/java/io/deephaven/qst/array/GenericArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/GenericArray.java @@ -60,14 +60,14 @@ public final T get(int index) { public final GenericArray cast(GenericType type) { if (!componentType().equals(type)) { throw new IllegalArgumentException( - String.format("Can't cast GenericArray with type %s to %s", componentType(), type)); + String.format("Can't cast GenericArray with type %s to %s", componentType(), type)); } // noinspection unchecked return (GenericArray) this; } public abstract static class Builder - implements ArrayBuilder, Builder> { + implements ArrayBuilder, Builder> { public abstract Builder addValues(T item); diff --git a/qst/src/main/java/io/deephaven/qst/array/IntArray.java b/qst/src/main/java/io/deephaven/qst/array/IntArray.java index f838b55997f..e21cd9063d3 100644 --- a/qst/src/main/java/io/deephaven/qst/array/IntArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/IntArray.java @@ -89,7 +89,7 @@ public final int hashCode() { } public static class Builder extends PrimitiveArrayHelper - implements ArrayBuilder { + implements ArrayBuilder { private Builder(int initialCapacity) { super(new int[initialCapacity]); diff --git a/qst/src/main/java/io/deephaven/qst/array/LongArray.java b/qst/src/main/java/io/deephaven/qst/array/LongArray.java index 35437650f0c..3c45f8e00e2 100644 --- a/qst/src/main/java/io/deephaven/qst/array/LongArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/LongArray.java @@ -89,7 +89,7 @@ public final int hashCode() { } public static class Builder extends PrimitiveArrayHelper - implements ArrayBuilder { + implements ArrayBuilder { private Builder(int initialCapacity) { super(new long[initialCapacity]); diff --git a/qst/src/main/java/io/deephaven/qst/array/PrimitiveArray.java b/qst/src/main/java/io/deephaven/qst/array/PrimitiveArray.java index 35e3a433731..4291d847784 100644 --- a/qst/src/main/java/io/deephaven/qst/array/PrimitiveArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/PrimitiveArray.java @@ -24,7 +24,7 @@ public interface PrimitiveArray extends Array { } static ArrayBuilder, ?> builder(PrimitiveType type, - int initialCapacity) { + int initialCapacity) { return TypeToArrayBuilder.of(type, initialCapacity); } diff --git a/qst/src/main/java/io/deephaven/qst/array/ShortArray.java b/qst/src/main/java/io/deephaven/qst/array/ShortArray.java index a0172f959f8..be78253eed9 100644 --- a/qst/src/main/java/io/deephaven/qst/array/ShortArray.java +++ b/qst/src/main/java/io/deephaven/qst/array/ShortArray.java @@ -89,7 +89,7 @@ public final int hashCode() { } public static class Builder extends PrimitiveArrayHelper - implements ArrayBuilder { + implements ArrayBuilder { private Builder(int initialCapacity) { super(new short[initialCapacity]); } diff --git a/qst/src/main/java/io/deephaven/qst/array/TypeToArrayBuilder.java b/qst/src/main/java/io/deephaven/qst/array/TypeToArrayBuilder.java index c308915946c..5aed35d35f1 100644 --- a/qst/src/main/java/io/deephaven/qst/array/TypeToArrayBuilder.java +++ b/qst/src/main/java/io/deephaven/qst/array/TypeToArrayBuilder.java @@ -22,7 +22,7 @@ class TypeToArrayBuilder implements Type.Visitor, PrimitiveType.Visitor { } static ArrayBuilder, ?> of(PrimitiveType type, - int initialCapacity) { + int initialCapacity) { TypeToArrayBuilder visitor = new TypeToArrayBuilder(initialCapacity); type.walk((PrimitiveType.Visitor) visitor); // noinspection unchecked diff --git a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeader.java b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeader.java index 02017fa1125..b31e44e5b68 100644 --- a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeader.java +++ b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeader.java @@ -20,8 +20,8 @@ * A column header is {@link #name() name} and {@link #componentType() type} pair. * *

    - * Multiple column headers, up to 9, can be strongly-linked together to provide a convenient - * interface for building {@link NewTable new tables} in a row-oriented, type-safe manner. + * Multiple column headers, up to 9, can be strongly-linked together to provide a convenient interface for building + * {@link NewTable new tables} in a row-oriented, type-safe manner. * * @param the type */ @@ -84,55 +84,55 @@ public static ColumnHeaders2 of(ColumnHeader c1, ColumnHead } public static ColumnHeaders3 of(ColumnHeader c1, - ColumnHeader c2, ColumnHeader c3) { + ColumnHeader c2, ColumnHeader c3) { return of(c1, c2).header(c3); } public static ColumnHeaders4 of(ColumnHeader c1, - ColumnHeader c2, ColumnHeader c3, ColumnHeader c4) { + ColumnHeader c2, ColumnHeader c3, ColumnHeader c4) { return of(c1, c2, c3).header(c4); } public static ColumnHeaders5 of(ColumnHeader c1, - ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, ColumnHeader c5) { + ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, ColumnHeader c5) { return of(c1, c2, c3, c4).header(c5); } public static ColumnHeaders6 of( - ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, - ColumnHeader c5, ColumnHeader c6) { + ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, + ColumnHeader c5, ColumnHeader c6) { return of(c1, c2, c3, c4, c5).header(c6); } public static ColumnHeaders7 of( - ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, - ColumnHeader c5, ColumnHeader c6, ColumnHeader c7) { + ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, + ColumnHeader c5, ColumnHeader c6, ColumnHeader c7) { return of(c1, c2, c3, c4, c5, c6).header(c7); } public static ColumnHeaders8 of( - ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, - ColumnHeader c5, ColumnHeader c6, ColumnHeader c7, ColumnHeader c8) { + ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, + ColumnHeader c5, ColumnHeader c6, ColumnHeader c7, ColumnHeader c8) { return of(c1, c2, c3, c4, c5, c6, c7).header(c8); } public static ColumnHeaders9 of( - ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, - ColumnHeader c5, ColumnHeader c6, ColumnHeader c7, ColumnHeader c8, - ColumnHeader c9) { + ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, + ColumnHeader c5, ColumnHeader c6, ColumnHeader c7, ColumnHeader c8, + ColumnHeader c9) { return of(c1, c2, c3, c4, c5, c6, c7, c8).header(c9); } // Note: we can add additional typed ColumnHeaders about 9 if desired public static ColumnHeadersN of( - ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, - ColumnHeader c5, ColumnHeader c6, ColumnHeader c7, ColumnHeader c8, - ColumnHeader c9, ColumnHeader... headers) { + ColumnHeader c1, ColumnHeader c2, ColumnHeader c3, ColumnHeader c4, + ColumnHeader c5, ColumnHeader c6, ColumnHeader c7, ColumnHeader c8, + ColumnHeader c9, ColumnHeader... headers) { ColumnHeaders9 typed = - of(c1, c2, c3, c4, c5, c6, c7, c8, c9); + of(c1, c2, c3, c4, c5, c6, c7, c8, c9); return ImmutableColumnHeadersN.builder().others(typed) - .addHeaders(headers).build(); + .addHeaders(headers).build(); } @Parameter diff --git a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders6.java b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders6.java index 68303045818..a33f3ebee9b 100644 --- a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders6.java +++ b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders6.java @@ -26,12 +26,12 @@ public abstract class ColumnHeaders6 implements TableHea public abstract ColumnHeaders5 others(); public final ColumnHeaders7 header(String name, - Class clazz) { + Class clazz) { return header(ColumnHeader.of(name, clazz)); } public final ColumnHeaders7 header(String name, - Type type) { + Type type) { return header(ColumnHeader.of(name, type)); } diff --git a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders7.java b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders7.java index 8dce70372c7..a8227b4d832 100644 --- a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders7.java +++ b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders7.java @@ -26,17 +26,17 @@ public abstract class ColumnHeaders7 implements Tabl public abstract ColumnHeaders6 others(); public final ColumnHeaders8 header(String name, - Class clazz) { + Class clazz) { return header(ColumnHeader.of(name, clazz)); } public final ColumnHeaders8 header(String name, - Type type) { + Type type) { return header(ColumnHeader.of(name, type)); } public final ColumnHeaders8 header( - ColumnHeader header) { + ColumnHeader header) { return ImmutableColumnHeaders8.of(header, this); } diff --git a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders8.java b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders8.java index c6cd27cf51c..98f8d6b7691 100644 --- a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders8.java +++ b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders8.java @@ -18,7 +18,7 @@ @Immutable @SimpleStyle public abstract class ColumnHeaders8 - implements TableHeader.Buildable { + implements TableHeader.Buildable { @Parameter public abstract ColumnHeader header8(); @@ -27,17 +27,17 @@ public abstract class ColumnHeaders8 public abstract ColumnHeaders7 others(); public final ColumnHeaders9 header(String name, - Class clazz) { + Class clazz) { return header(ColumnHeader.of(name, clazz)); } public final ColumnHeaders9 header(String name, - Type type) { + Type type) { return header(ColumnHeader.of(name, type)); } public final ColumnHeaders9 header( - ColumnHeader header) { + ColumnHeader header) { return ImmutableColumnHeaders9.of(header, this); } diff --git a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders9.java b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders9.java index fb34e59bd84..340dae76df7 100644 --- a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders9.java +++ b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeaders9.java @@ -17,7 +17,7 @@ @Immutable @SimpleStyle public abstract class ColumnHeaders9 - implements TableHeader.Buildable { + implements TableHeader.Buildable { @Parameter public abstract ColumnHeader header9(); @@ -27,7 +27,7 @@ public abstract class ColumnHeaders9 public final ColumnHeadersN header(ColumnHeader header) { return ImmutableColumnHeadersN.builder().others(this) - .addHeaders(header).build(); + .addHeaders(header).build(); } public final Rows start(int initialCapacity) { diff --git a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeadersN.java b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeadersN.java index d9817858fb4..0f3d47aa7ca 100644 --- a/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeadersN.java +++ b/qst/src/main/java/io/deephaven/qst/column/header/ColumnHeadersN.java @@ -19,7 +19,7 @@ @Immutable @BuildableStyle public abstract class ColumnHeadersN - implements TableHeader.Buildable { + implements TableHeader.Buildable { public abstract List> headers(); @@ -27,7 +27,7 @@ public abstract class ColumnHeadersN public final ColumnHeadersN header(ColumnHeader header) { return ImmutableColumnHeadersN.builder() - .others(others()).addAllHeaders(headers()).addHeaders(header).build(); + .others(others()).addAllHeaders(headers()).addHeaders(header).build(); } public final Rows start(int initialCapacity) { @@ -35,7 +35,7 @@ public final Rows start(int initialCapacity) { } public final Rows row(T1 a, T2 b, T3 c, T4 d, T5 e, T6 f, T7 g, T8 h, T9 i, - Object... remaining) { + Object... remaining) { return start(DEFAULT_BUILDER_INITIAL_CAPACITY).row(a, b, c, d, e, f, g, h, i, remaining); } @@ -43,7 +43,7 @@ public final Rows row(T1 a, T2 b, T3 c, T4 d, T5 e, T6 f, T7 g, T8 h, T9 i, final void checkSize() { if (headers().isEmpty()) { throw new IllegalArgumentException(String - .format("Additional headers are empty, use %s instead", ColumnHeaders9.class)); + .format("Additional headers are empty, use %s instead", ColumnHeaders9.class)); } } @@ -60,12 +60,12 @@ public class Rows implements NewTable.Buildable { } public final Rows row(T1 c1, T2 c2, T3 c3, T4 c4, T5 c5, T6 c6, T7 c7, T8 c8, T9 c9, - Object... remaining) { + Object... remaining) { if (remaining.length != headers().size()) { final int expected = 9 + headers().size(); final int actual = 9 + remaining.length; throw new IllegalArgumentException( - String.format("Expected %d columns, found %d", expected, actual)); + String.format("Expected %d columns, found %d", expected, actual)); } others.row(c1, c2, c3, c4, c5, c6, c7, c8, c9); int ix = 0; diff --git a/qst/src/main/java/io/deephaven/qst/table/HeadTable.java b/qst/src/main/java/io/deephaven/qst/table/HeadTable.java index 6f89b175e55..7ebfa96638d 100644 --- a/qst/src/main/java/io/deephaven/qst/table/HeadTable.java +++ b/qst/src/main/java/io/deephaven/qst/table/HeadTable.java @@ -32,7 +32,7 @@ public final V walk(V visitor) { final void checkSize() { if (size() < 0) { throw new IllegalArgumentException( - String.format("head must have a non-negative size: %d", size())); + String.format("head must have a non-negative size: %d", size())); } } } diff --git a/qst/src/main/java/io/deephaven/qst/table/JoinBase.java b/qst/src/main/java/io/deephaven/qst/table/JoinBase.java index a8609a88f56..514b2101d77 100644 --- a/qst/src/main/java/io/deephaven/qst/table/JoinBase.java +++ b/qst/src/main/java/io/deephaven/qst/table/JoinBase.java @@ -8,9 +8,9 @@ public abstract class JoinBase extends TableBase implements Join { @Check final void checkAdditions() { if (additions().stream().map(JoinAddition::newColumn).distinct().count() != additions() - .size()) { + .size()) { throw new IllegalArgumentException( - "Invalid join additions, must not use the same output column multiple times."); + "Invalid join additions, must not use the same output column multiple times."); } } } diff --git a/qst/src/main/java/io/deephaven/qst/table/JoinTable.java b/qst/src/main/java/io/deephaven/qst/table/JoinTable.java index 371681f9e9e..017c3ff4c97 100644 --- a/qst/src/main/java/io/deephaven/qst/table/JoinTable.java +++ b/qst/src/main/java/io/deephaven/qst/table/JoinTable.java @@ -15,8 +15,7 @@ public abstract class JoinTable extends JoinBase { /** - * The number of {@link #reserveBits() reserve bits} to use when it is not explicitly set during - * building. + * The number of {@link #reserveBits() reserve bits} to use when it is not explicitly set during building. * *

    * By default, is 10. Can be changed with system property {@code JoinTable.reserveBits}. diff --git a/qst/src/main/java/io/deephaven/qst/table/LabeledTable.java b/qst/src/main/java/io/deephaven/qst/table/LabeledTable.java index 3e4b60496cc..176edd055a5 100644 --- a/qst/src/main/java/io/deephaven/qst/table/LabeledTable.java +++ b/qst/src/main/java/io/deephaven/qst/table/LabeledTable.java @@ -26,7 +26,7 @@ public static LabeledTable of(String label, TableSpec table) { final void checkNotEmpty() { if (label().isEmpty()) { throw new IllegalArgumentException( - "label is empty, must provide non-empty label for LabeledTable"); + "label is empty, must provide non-empty label for LabeledTable"); } } } diff --git a/qst/src/main/java/io/deephaven/qst/table/MergeTable.java b/qst/src/main/java/io/deephaven/qst/table/MergeTable.java index 65a6807b4ec..3c44b9bbe08 100644 --- a/qst/src/main/java/io/deephaven/qst/table/MergeTable.java +++ b/qst/src/main/java/io/deephaven/qst/table/MergeTable.java @@ -10,16 +10,14 @@ * Concatenates multiple tables into a single table. * *

    - * The resultant table will have rows from the same table together, in the order they are specified - * as inputs. + * The resultant table will have rows from the same table together, in the order they are specified as inputs. * *

    - * When ticking tables grow, they may run out of the 'pre-allocated' space for newly added rows. - * When more key- space is needed, tables in higher key-space are shifted to yet higher key-space to - * make room for new rows. Shifts are handled efficiently, but some downstream operations generate a - * linear O(n) amount of work per shifted row. When possible, one should favor ordering the - * constituent tables first by static/non-ticking sources followed by tables that are expected to - * grow at slower rates, and finally by tables that grow without bound. + * When ticking tables grow, they may run out of the 'pre-allocated' space for newly added rows. When more key- space is + * needed, tables in higher key-space are shifted to yet higher key-space to make room for new rows. Shifts are handled + * efficiently, but some downstream operations generate a linear O(n) amount of work per shifted row. When possible, one + * should favor ordering the constituent tables first by static/non-ticking sources followed by tables that are expected + * to grow at slower rates, and finally by tables that grow without bound. */ @Immutable @NodeStyle diff --git a/qst/src/main/java/io/deephaven/qst/table/NewTable.java b/qst/src/main/java/io/deephaven/qst/table/NewTable.java index 8f99c5680e4..3a5d06c124e 100644 --- a/qst/src/main/java/io/deephaven/qst/table/NewTable.java +++ b/qst/src/main/java/io/deephaven/qst/table/NewTable.java @@ -16,8 +16,7 @@ import java.util.function.Function; /** - * A new table is a list of {@link Column columns} of equal size. Each column will have a distinct - * name. + * A new table is a list of {@link Column columns} of equal size. Each column will have a distinct name. */ @Immutable @LeafStyle @@ -56,7 +55,7 @@ default NewTable newTable() { Iterator> it = iterator(); if (!it.hasNext()) { throw new IllegalArgumentException( - String.format("Unable to use %s without any columns", Buildable.class)); + String.format("Unable to use %s without any columns", Buildable.class)); } Column first = it.next(); Builder builder = builder().size(first.size()).addColumns(first); @@ -147,7 +146,7 @@ public final void forEach(Consumer> action) { @Override public final Spliterator> spliterator() { return columns().entrySet().stream() - .map((Function>, Column>) NewTable::adapt).spliterator(); + .map((Function>, Column>) NewTable::adapt).spliterator(); } private static Column adapt(Entry> e) { diff --git a/qst/src/main/java/io/deephaven/qst/table/ParentsVisitor.java b/qst/src/main/java/io/deephaven/qst/table/ParentsVisitor.java index e9b33fbf0f3..4258a6750ed 100644 --- a/qst/src/main/java/io/deephaven/qst/table/ParentsVisitor.java +++ b/qst/src/main/java/io/deephaven/qst/table/ParentsVisitor.java @@ -31,9 +31,8 @@ public static Stream getParents(TableSpec table) { * Create a post-order set from {@code tables}. * *

    - * Post-order means that for any given table, the table's dependencies will come before the - * table itself. There may be multiple valid post-orderings; callers should not rely on a - * specific post-ordering. + * Post-order means that for any given table, the table's dependencies will come before the table itself. There may + * be multiple valid post-orderings; callers should not rely on a specific post-ordering. * * @param tables the tables * @return the post-order set @@ -49,9 +48,8 @@ public static Set postOrder(Iterable tables) { * Create a de-duplicated, post-order list from {@code tables}. * *

    - * Post-order means that for any given table, the table's dependencies will come before the - * table itself. There may be multiple valid post-orderings; callers should not rely on a - * specific post-ordering. + * Post-order means that for any given table, the table's dependencies will come before the table itself. There may + * be multiple valid post-orderings; callers should not rely on a specific post-ordering. * * @param tables the tables * @return the de-duplicated, post-order list diff --git a/qst/src/main/java/io/deephaven/qst/table/TableBase.java b/qst/src/main/java/io/deephaven/qst/table/TableBase.java index 197ca20e341..790ead9f16b 100644 --- a/qst/src/main/java/io/deephaven/qst/table/TableBase.java +++ b/qst/src/main/java/io/deephaven/qst/table/TableBase.java @@ -48,9 +48,9 @@ public final SnapshotTable snapshot(TableSpec baseTable, String... stampColumns) @Override public final SnapshotTable snapshot(TableSpec baseTable, boolean doInitialSnapshot, - String... stampColumns) { + String... stampColumns) { SnapshotTable.Builder builder = SnapshotTable.builder().trigger(this).base(baseTable) - .doInitialSnapshot(doInitialSnapshot); + .doInitialSnapshot(doInitialSnapshot); for (String stampColumn : stampColumns) { builder.addStampColumns(ColumnName.of(stampColumn)); } @@ -59,9 +59,9 @@ public final SnapshotTable snapshot(TableSpec baseTable, boolean doInitialSnapsh @Override public final SnapshotTable snapshot(TableSpec baseTable, boolean doInitialSnapshot, - Collection stampColumns) { + Collection stampColumns) { return SnapshotTable.builder().trigger(this).base(baseTable) - .doInitialSnapshot(doInitialSnapshot).addAllStampColumns(stampColumns).build(); + .doInitialSnapshot(doInitialSnapshot).addAllStampColumns(stampColumns).build(); } @Override @@ -112,9 +112,9 @@ public final WhereInTable whereIn(TableSpec rightTable, String... columnsToMatch @Override public final WhereInTable whereIn(TableSpec rightTable, - Collection columnsToMatch) { + Collection columnsToMatch) { return WhereInTable.builder().left(this).right(rightTable).addAllMatches(columnsToMatch) - .build(); + .build(); } @Override @@ -128,17 +128,17 @@ public final WhereNotInTable whereNotIn(TableSpec rightTable, String... columnsT @Override public final WhereNotInTable whereNotIn(TableSpec rightTable, - Collection columnsToMatch) { + Collection columnsToMatch) { return WhereNotInTable.builder().left(this).right(rightTable).addAllMatches(columnsToMatch) - .build(); + .build(); } @Override public final NaturalJoinTable naturalJoin(TableSpec rightTable, - Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToMatch, + Collection columnsToAdd) { return NaturalJoinTable.builder().left(this).right(rightTable).addAllMatches(columnsToMatch) - .addAllAdditions(columnsToAdd).build(); + .addAllAdditions(columnsToAdd).build(); } @Override @@ -152,7 +152,7 @@ public final NaturalJoinTable naturalJoin(TableSpec rightTable, String columnsTo @Override public final NaturalJoinTable naturalJoin(TableSpec rightTable, String columnsToMatch, - String columnsToAdd) { + String columnsToAdd) { NaturalJoinTable.Builder builder = NaturalJoinTable.builder().left(this).right(rightTable); for (String match : split(columnsToMatch)) { builder.addMatches(JoinMatch.parse(match)); @@ -165,10 +165,10 @@ public final NaturalJoinTable naturalJoin(TableSpec rightTable, String columnsTo @Override public final ExactJoinTable exactJoin(TableSpec rightTable, - Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToMatch, + Collection columnsToAdd) { return ExactJoinTable.builder().left(this).right(rightTable).addAllMatches(columnsToMatch) - .addAllAdditions(columnsToAdd).build(); + .addAllAdditions(columnsToAdd).build(); } @Override @@ -182,7 +182,7 @@ public final ExactJoinTable exactJoin(TableSpec rightTable, String columnsToMatc @Override public final ExactJoinTable exactJoin(TableSpec rightTable, String columnsToMatch, - String columnsToAdd) { + String columnsToAdd) { ExactJoinTable.Builder builder = ExactJoinTable.builder().left(this).right(rightTable); for (String match : split(columnsToMatch)) { builder.addMatches(JoinMatch.parse(match)); @@ -195,17 +195,17 @@ public final ExactJoinTable exactJoin(TableSpec rightTable, String columnsToMatc @Override public JoinTable join(TableSpec rightTable, Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToAdd) { return JoinTable.builder().left(this).right(rightTable).addAllMatches(columnsToMatch) - .addAllAdditions(columnsToAdd).build(); + .addAllAdditions(columnsToAdd).build(); } @Override public final JoinTable join(TableSpec rightTable, - Collection columnsToMatch, - Collection columnsToAdd, int reserveBits) { + Collection columnsToMatch, + Collection columnsToAdd, int reserveBits) { return JoinTable.builder().left(this).right(rightTable).addAllMatches(columnsToMatch) - .addAllAdditions(columnsToAdd).reserveBits(reserveBits).build(); + .addAllAdditions(columnsToAdd).reserveBits(reserveBits).build(); } @Override @@ -240,7 +240,7 @@ public final LeftJoinTable leftJoin(TableSpec rightTable, String columnsToMatch) @Override public final LeftJoinTable leftJoin(TableSpec rightTable, String columnsToMatch, - String columnsToAdd) { + String columnsToAdd) { LeftJoinTable.Builder builder = LeftJoinTable.builder().left(this).right(rightTable); for (String match : split(columnsToMatch)) { builder.addMatches(JoinMatch.parse(match)); @@ -253,10 +253,10 @@ public final LeftJoinTable leftJoin(TableSpec rightTable, String columnsToMatch, @Override public final LeftJoinTable leftJoin(TableSpec rightTable, - Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToMatch, + Collection columnsToAdd) { return LeftJoinTable.builder().left(this).right(rightTable).addAllMatches(columnsToMatch) - .addAllAdditions(columnsToAdd).build(); + .addAllAdditions(columnsToAdd).build(); } @Override @@ -270,7 +270,7 @@ public final AsOfJoinTable aj(TableSpec rightTable, String columnsToMatch) { @Override public final AsOfJoinTable aj(TableSpec rightTable, String columnsToMatch, - String columnsToAdd) { + String columnsToAdd) { AsOfJoinTable.Builder builder = AsOfJoinTable.builder().left(this).right(rightTable); for (String match : split(columnsToMatch)) { builder.addMatches(JoinMatch.parse(match)); @@ -283,24 +283,24 @@ public final AsOfJoinTable aj(TableSpec rightTable, String columnsToMatch, @Override public final AsOfJoinTable aj(TableSpec rightTable, - Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToMatch, + Collection columnsToAdd) { return AsOfJoinTable.builder().left(this).right(rightTable).addAllMatches(columnsToMatch) - .addAllAdditions(columnsToAdd).build(); + .addAllAdditions(columnsToAdd).build(); } @Override public final AsOfJoinTable aj(TableSpec rightTable, - Collection columnsToMatch, - Collection columnsToAdd, AsOfJoinRule asOfJoinRule) { + Collection columnsToMatch, + Collection columnsToAdd, AsOfJoinRule asOfJoinRule) { return AsOfJoinTable.builder().left(this).right(rightTable).addAllMatches(columnsToMatch) - .addAllAdditions(columnsToAdd).rule(asOfJoinRule).build(); + .addAllAdditions(columnsToAdd).rule(asOfJoinRule).build(); } @Override public final ReverseAsOfJoinTable raj(TableSpec rightTable, String columnsToMatch) { ReverseAsOfJoinTable.Builder builder = - ReverseAsOfJoinTable.builder().left(this).right(rightTable); + ReverseAsOfJoinTable.builder().left(this).right(rightTable); for (String match : split(columnsToMatch)) { builder.addMatches(JoinMatch.parse(match)); } @@ -309,9 +309,9 @@ public final ReverseAsOfJoinTable raj(TableSpec rightTable, String columnsToMatc @Override public final ReverseAsOfJoinTable raj(TableSpec rightTable, String columnsToMatch, - String columnsToAdd) { + String columnsToAdd) { ReverseAsOfJoinTable.Builder builder = - ReverseAsOfJoinTable.builder().left(this).right(rightTable); + ReverseAsOfJoinTable.builder().left(this).right(rightTable); for (String match : split(columnsToMatch)) { builder.addMatches(JoinMatch.parse(match)); } @@ -323,19 +323,19 @@ public final ReverseAsOfJoinTable raj(TableSpec rightTable, String columnsToMatc @Override public final ReverseAsOfJoinTable raj(TableSpec rightTable, - Collection columnsToMatch, - Collection columnsToAdd) { + Collection columnsToMatch, + Collection columnsToAdd) { return ReverseAsOfJoinTable.builder().left(this).right(rightTable) - .addAllMatches(columnsToMatch).addAllAdditions(columnsToAdd).build(); + .addAllMatches(columnsToMatch).addAllAdditions(columnsToAdd).build(); } @Override public final ReverseAsOfJoinTable raj(TableSpec rightTable, - Collection columnsToMatch, - Collection columnsToAdd, ReverseAsOfJoinRule reverseAsOfJoinRule) { + Collection columnsToMatch, + Collection columnsToAdd, ReverseAsOfJoinRule reverseAsOfJoinRule) { return ReverseAsOfJoinTable.builder().left(this).right(rightTable) - .addAllMatches(columnsToMatch).addAllAdditions(columnsToAdd).rule(reverseAsOfJoinRule) - .build(); + .addAllMatches(columnsToMatch).addAllAdditions(columnsToAdd).rule(reverseAsOfJoinRule) + .build(); } @Override @@ -415,9 +415,9 @@ public final ByTable by(Collection groupByColumns) { @Override public final AggregationTable by(Collection groupByColumns, - Collection aggregations) { + Collection aggregations) { return AggregationTable.builder().parent(this).addAllColumns(groupByColumns) - .addAllAggregations(aggregations).build(); + .addAllAggregations(aggregations).build(); } @Override @@ -429,7 +429,7 @@ public final String toString() { private static Collection split(String string) { return string.trim().isEmpty() ? Collections.emptyList() - : Arrays.stream(string.split(",")).map(String::trim).filter(s -> !s.isEmpty()) - .collect(Collectors.toList()); + : Arrays.stream(string.split(",")).map(String::trim).filter(s -> !s.isEmpty()) + .collect(Collectors.toList()); } } diff --git a/qst/src/main/java/io/deephaven/qst/table/TableCreatorImpl.java b/qst/src/main/java/io/deephaven/qst/table/TableCreatorImpl.java index 156d375d697..0e0d0c58d23 100644 --- a/qst/src/main/java/io/deephaven/qst/table/TableCreatorImpl.java +++ b/qst/src/main/java/io/deephaven/qst/table/TableCreatorImpl.java @@ -4,16 +4,15 @@ import io.deephaven.qst.TableCreator; /** - * A "no-op" table creator impl, based on the QST structure itself. Mainly useful for testing the - * equivalence for the {@link TableOperations} of {@link TableSpec}; but publicly available for - * functional completeness. + * A "no-op" table creator impl, based on the QST structure itself. Mainly useful for testing the equivalence for the + * {@link TableOperations} of {@link TableSpec}; but publicly available for functional completeness. */ public enum TableCreatorImpl implements TableCreator { INSTANCE; static TableSpec toTable(TableSpec table) { return TableCreator.create(INSTANCE, TableToOperationsImpl.INSTANCE, - OperationsToTableImpl.INSTANCE, table); + OperationsToTableImpl.INSTANCE, table); } @Override diff --git a/qst/src/main/java/io/deephaven/qst/table/TableHeader.java b/qst/src/main/java/io/deephaven/qst/table/TableHeader.java index 6064450433c..1ce867ef6d9 100644 --- a/qst/src/main/java/io/deephaven/qst/table/TableHeader.java +++ b/qst/src/main/java/io/deephaven/qst/table/TableHeader.java @@ -14,8 +14,7 @@ import java.util.function.Function; /** - * A table header is a list of {@link ColumnHeader column headers}. Each column header will have a - * distinct name. + * A table header is a list of {@link ColumnHeader column headers}. Each column header will have a distinct name. */ @Immutable @BuildableStyle @@ -93,8 +92,8 @@ public final void forEach(Consumer> action) { @Override public final Spliterator> spliterator() { return headers().entrySet().stream() - .map((Function>, ColumnHeader>) TableHeader::adapt) - .spliterator(); + .map((Function>, ColumnHeader>) TableHeader::adapt) + .spliterator(); } private static ColumnHeader adapt(Entry> e) { diff --git a/qst/src/main/java/io/deephaven/qst/table/TableSpec.java b/qst/src/main/java/io/deephaven/qst/table/TableSpec.java index eb5dcbba5bf..998c7978b8d 100644 --- a/qst/src/main/java/io/deephaven/qst/table/TableSpec.java +++ b/qst/src/main/java/io/deephaven/qst/table/TableSpec.java @@ -17,12 +17,11 @@ import java.util.Collection; /** - * A table specification is a declarative description of a table query. Part of a "query syntax - * tree". + * A table specification is a declarative description of a table query. Part of a "query syntax tree". * *

    - * A table specification may be built-up explicitly via the individual implementation class build - * patterns, or may be built-up in a fluent-manner via the {@link TableOperations} interface. + * A table specification may be built-up explicitly via the individual implementation class build patterns, or may be + * built-up in a fluent-manner via the {@link TableOperations} interface. * *

    * A table specification can be "replayed" against the fluent interfaces, see @@ -64,15 +63,15 @@ static TableSpec merge(Collection tables) { */ static TableSpec file(Path path) throws IOException, ClassNotFoundException { try (InputStream in = Files.newInputStream(path); - BufferedInputStream buf = new BufferedInputStream(in); - ObjectInputStream oIn = new ObjectInputStream(buf)) { + BufferedInputStream buf = new BufferedInputStream(in); + ObjectInputStream oIn = new ObjectInputStream(buf)) { return (TableSpec) oIn.readObject(); } } /** - * The depth of the table is the maximum depth of its dependencies plus one. A table with no - * dependencies has a depth of zero. + * The depth of the table is the maximum depth of its dependencies plus one. A table with no dependencies has a + * depth of zero. * * @return the depth */ diff --git a/qst/src/main/java/io/deephaven/qst/table/TailTable.java b/qst/src/main/java/io/deephaven/qst/table/TailTable.java index c367cec3a6a..95e86077a0c 100644 --- a/qst/src/main/java/io/deephaven/qst/table/TailTable.java +++ b/qst/src/main/java/io/deephaven/qst/table/TailTable.java @@ -33,7 +33,7 @@ public final V walk(V visitor) { final void checkSize() { if (size() < 0) { throw new IllegalArgumentException( - String.format("tail must have a non-negative size: %d", size())); + String.format("tail must have a non-negative size: %d", size())); } } } diff --git a/qst/src/main/java/io/deephaven/qst/table/TimeTable.java b/qst/src/main/java/io/deephaven/qst/table/TimeTable.java index ab12dcf71d8..e693f8bc5d5 100644 --- a/qst/src/main/java/io/deephaven/qst/table/TimeTable.java +++ b/qst/src/main/java/io/deephaven/qst/table/TimeTable.java @@ -10,8 +10,8 @@ import java.util.UUID; /** - * A time table adds rows at a fixed {@link #interval() interval} with a - * {@link io.deephaven.qst.type.InstantType Timestamp} column. + * A time table adds rows at a fixed {@link #interval() interval} with a {@link io.deephaven.qst.type.InstantType + * Timestamp} column. */ @Immutable @LeafStyle @@ -37,7 +37,7 @@ public static Builder builder() { */ public static TimeTable of(Duration interval) { return builder().timeProvider(TimeProviderSystem.INSTANCE).interval(interval) - .id(UUID.randomUUID()).build(); + .id(UUID.randomUUID()).build(); } /** @@ -49,7 +49,7 @@ public static TimeTable of(Duration interval) { */ public static TimeTable of(Duration interval, Instant startTime) { return builder().timeProvider(TimeProviderSystem.INSTANCE).interval(interval) - .startTime(startTime).id(ZERO_UUID).build(); + .startTime(startTime).id(ZERO_UUID).build(); } // Note: if new "of(...)" static methods are added here, they should likely be added to diff --git a/qst/src/main/java/io/deephaven/qst/type/ArrayTypeBase.java b/qst/src/main/java/io/deephaven/qst/type/ArrayTypeBase.java index af097a28134..612555008b9 100644 --- a/qst/src/main/java/io/deephaven/qst/type/ArrayTypeBase.java +++ b/qst/src/main/java/io/deephaven/qst/type/ArrayTypeBase.java @@ -1,7 +1,7 @@ package io.deephaven.qst.type; public abstract class ArrayTypeBase extends GenericTypeBase - implements ArrayType { + implements ArrayType { @Override public final V walk(V visitor) { diff --git a/qst/src/main/java/io/deephaven/qst/type/CustomType.java b/qst/src/main/java/io/deephaven/qst/type/CustomType.java index 90c4f1c632b..4e4780303bd 100644 --- a/qst/src/main/java/io/deephaven/qst/type/CustomType.java +++ b/qst/src/main/java/io/deephaven/qst/type/CustomType.java @@ -11,8 +11,8 @@ * A custom type {@link #clazz() class}. * *

    - * The {@link #clazz() class} must not be representable by a {@link Type#knownTypes() known type}, - * and must not be an array. + * The {@link #clazz() class} must not be representable by a {@link Type#knownTypes() known type}, and must not be an + * array. * * @param the type */ @@ -38,7 +38,7 @@ final void checkNotStatic() { final Optional> staticType = TypeHelper.findStatic(clazz()); if (staticType.isPresent()) { throw new IllegalArgumentException( - String.format("Use static type %s instead", staticType.get())); + String.format("Use static type %s instead", staticType.get())); } } @@ -46,7 +46,7 @@ final void checkNotStatic() { final void checkNotArray() { if (clazz().isArray()) { throw new IllegalArgumentException(String.format( - "Can't create an array type here, use '%s' instead", NativeArrayType.class)); + "Can't create an array type here, use '%s' instead", NativeArrayType.class)); } } diff --git a/qst/src/main/java/io/deephaven/qst/type/DbGenericArrayType.java b/qst/src/main/java/io/deephaven/qst/type/DbGenericArrayType.java index 742986b81fd..20ffd3956b8 100644 --- a/qst/src/main/java/io/deephaven/qst/type/DbGenericArrayType.java +++ b/qst/src/main/java/io/deephaven/qst/type/DbGenericArrayType.java @@ -9,7 +9,7 @@ public abstract class DbGenericArrayType extends ArrayTypeBase { public static DbGenericArrayType of(Class clazz, - GenericType genericType) { + GenericType genericType) { return ImmutableDbGenericArrayType.of(clazz, genericType); } diff --git a/qst/src/main/java/io/deephaven/qst/type/DbPrimitiveArrayType.java b/qst/src/main/java/io/deephaven/qst/type/DbPrimitiveArrayType.java index f3c6dd08157..ab5fb48d7f4 100644 --- a/qst/src/main/java/io/deephaven/qst/type/DbPrimitiveArrayType.java +++ b/qst/src/main/java/io/deephaven/qst/type/DbPrimitiveArrayType.java @@ -22,7 +22,7 @@ @Immutable @SimpleStyle public abstract class DbPrimitiveArrayType - extends ArrayTypeBase { + extends ArrayTypeBase { private static final String DB_BOOLEAN_ARRAY = "io.deephaven.db.tables.dbarrays.DbBooleanArray"; private static final String DB_BYTE_ARRAY = "io.deephaven.db.tables.dbarrays.DbByteArray"; @@ -33,16 +33,16 @@ public abstract class DbPrimitiveArrayType private static final String DB_FLOAT_ARRAY = "io.deephaven.db.tables.dbarrays.DbFloatArray"; private static final String DB_DOUBLE_ARRAY = "io.deephaven.db.tables.dbarrays.DbDoubleArray"; private static final Set VALID_CLASSES = - Stream.of(DB_BOOLEAN_ARRAY, DB_BYTE_ARRAY, DB_CHAR_ARRAY, DB_SHORT_ARRAY, DB_INT_ARRAY, - DB_LONG_ARRAY, DB_FLOAT_ARRAY, DB_DOUBLE_ARRAY).collect(Collectors.toSet()); + Stream.of(DB_BOOLEAN_ARRAY, DB_BYTE_ARRAY, DB_CHAR_ARRAY, DB_SHORT_ARRAY, DB_INT_ARRAY, + DB_LONG_ARRAY, DB_FLOAT_ARRAY, DB_DOUBLE_ARRAY).collect(Collectors.toSet()); public static DbPrimitiveArrayType of(Class clazz, - PrimitiveType primitiveType) { + PrimitiveType primitiveType) { return ImmutableDbPrimitiveArrayType.of(clazz, primitiveType); } static List> types() throws ClassNotFoundException, - NoSuchMethodException, IllegalAccessException, InvocationTargetException { + NoSuchMethodException, IllegalAccessException, InvocationTargetException { List> out = new ArrayList<>(VALID_CLASSES.size()); for (String className : VALID_CLASSES) { out.add(invokeTypeMethod(className)); @@ -51,8 +51,8 @@ public static DbPrimitiveArrayType of(Class } private static DbPrimitiveArrayType invokeTypeMethod( - String className) throws ClassNotFoundException, NoSuchMethodException, - InvocationTargetException, IllegalAccessException { + String className) throws ClassNotFoundException, NoSuchMethodException, + InvocationTargetException, IllegalAccessException { final Class clazz = Class.forName(className); final Method method = clazz.getDeclaredMethod("type"); // noinspection rawtypes,unchecked @@ -75,7 +75,7 @@ public final V walk(V visitor) { final void checkClazz() { if (!VALID_CLASSES.contains(clazz().getName())) { throw new IllegalArgumentException(String.format("Class '%s' is not a valid '%s'", - clazz(), DbPrimitiveArrayType.class)); + clazz(), DbPrimitiveArrayType.class)); } } } diff --git a/qst/src/main/java/io/deephaven/qst/type/NativeArrayType.java b/qst/src/main/java/io/deephaven/qst/type/NativeArrayType.java index 9f31860bd87..f9a9babdf38 100644 --- a/qst/src/main/java/io/deephaven/qst/type/NativeArrayType.java +++ b/qst/src/main/java/io/deephaven/qst/type/NativeArrayType.java @@ -26,12 +26,12 @@ public abstract class NativeArrayType extends ArrayTypeBase NativeArrayType of(Class arrayType, - Type componentType) { + Type componentType) { return ImmutableNativeArrayType.of(arrayType, componentType); } public static NativeArrayType toArrayType( - GenericType type) { + GenericType type) { // Note: in Java 12+, we can use Class#arrayType() final Class clazz = Array.newInstance(type.clazz(), 0).getClass(); return NativeArrayType.of(clazz, type); diff --git a/qst/src/main/java/io/deephaven/qst/type/Type.java b/qst/src/main/java/io/deephaven/qst/type/Type.java index 3963b5946f0..26322f04b9c 100644 --- a/qst/src/main/java/io/deephaven/qst/type/Type.java +++ b/qst/src/main/java/io/deephaven/qst/type/Type.java @@ -13,8 +13,8 @@ public interface Type { /** - * Finds the {@link #knownTypes() known type}, or else creates the relevant - * {@link NativeArrayType native array type} or {@link CustomType custom type}. + * Finds the {@link #knownTypes() known type}, or else creates the relevant {@link NativeArrayType native array + * type} or {@link CustomType custom type}. * * @param clazz the class * @param the generic type of {@code clazz} @@ -32,9 +32,8 @@ static Type find(Class clazz) { } /** - * The list of known types. Includes the universe of {@link PrimitiveType primitive types} and - * {@link GenericType generic types} minus {@link CustomType custom types} and {@link ArrayType - * array types}. + * The list of known types. Includes the universe of {@link PrimitiveType primitive types} and {@link GenericType + * generic types} minus {@link CustomType custom types} and {@link ArrayType array types}. * * @return the list of known types */ diff --git a/qst/src/main/java/io/deephaven/qst/type/TypeHelper.java b/qst/src/main/java/io/deephaven/qst/type/TypeHelper.java index ac689aba74c..bba2c1d19c0 100644 --- a/qst/src/main/java/io/deephaven/qst/type/TypeHelper.java +++ b/qst/src/main/java/io/deephaven/qst/type/TypeHelper.java @@ -28,20 +28,20 @@ static List> knownTypes() { static Stream> primitiveTypes() { return Stream.of(BooleanType.instance(), ByteType.instance(), CharType.instance(), - ShortType.instance(), IntType.instance(), LongType.instance(), FloatType.instance(), - DoubleType.instance()); + ShortType.instance(), IntType.instance(), LongType.instance(), FloatType.instance(), + DoubleType.instance()); } static Stream> genericTypes() { return Stream.concat(Stream.of(StringType.instance(), InstantType.instance()), - dbPrimitiveArrayTypes()); + dbPrimitiveArrayTypes()); } static Stream> dbPrimitiveArrayTypes() { try { return DbPrimitiveArrayType.types().stream(); } catch (ClassNotFoundException | InvocationTargetException | NoSuchMethodException - | IllegalAccessException e) { + | IllegalAccessException e) { return Stream.empty(); } } @@ -141,7 +141,7 @@ public void visit(ArrayType arrayType) { @Override public void visit(NativeArrayType nativeArrayType) { throw new IllegalArgumentException( - "Native array types should not be created statically, they will be found dynamically"); + "Native array types should not be created statically, they will be found dynamically"); } @Override @@ -153,7 +153,7 @@ public void visit(DbPrimitiveArrayType dbArrayPrimitiveType) { public void visit(DbGenericArrayType dbGenericArrayType) { // The db array type by itself is not specific enough throw new IllegalStateException( - "Should not be adding DbGenericArrayType as static mapping"); + "Should not be adding DbGenericArrayType as static mapping"); } }); } diff --git a/qst/src/test/java/io/deephaven/qst/array/BooleanArrayTest.java b/qst/src/test/java/io/deephaven/qst/array/BooleanArrayTest.java index 5172b5e4570..357c11a6dc0 100644 --- a/qst/src/test/java/io/deephaven/qst/array/BooleanArrayTest.java +++ b/qst/src/test/java/io/deephaven/qst/array/BooleanArrayTest.java @@ -9,12 +9,12 @@ public class BooleanArrayTest { @Test void boxInRawOut() { assertThat(BooleanArray.of(false, null, true).values()).containsExactly(Util.FALSE_BOOL, - Util.NULL_BOOL, Util.TRUE_BOOL); + Util.NULL_BOOL, Util.TRUE_BOOL); } @Test void rawInRawOut() { assertThat(BooleanArray.ofUnsafe(Util.FALSE_BOOL, Util.NULL_BOOL, Util.TRUE_BOOL).values()) - .containsExactly(Util.FALSE_BOOL, Util.NULL_BOOL, Util.TRUE_BOOL); + .containsExactly(Util.FALSE_BOOL, Util.NULL_BOOL, Util.TRUE_BOOL); } } diff --git a/qst/src/test/java/io/deephaven/qst/array/ByteArrayTest.java b/qst/src/test/java/io/deephaven/qst/array/ByteArrayTest.java index e5751625c36..2b1d638c9ee 100644 --- a/qst/src/test/java/io/deephaven/qst/array/ByteArrayTest.java +++ b/qst/src/test/java/io/deephaven/qst/array/ByteArrayTest.java @@ -9,12 +9,12 @@ public class ByteArrayTest { @Test void boxInRawOut() { assertThat(ByteArray.of((byte) 1, null, (byte) 3).values()).containsExactly((byte) 1, - Util.NULL_BYTE, (byte) 3); + Util.NULL_BYTE, (byte) 3); } @Test void rawInRawOut() { assertThat(ByteArray.ofUnsafe((byte) 1, Util.NULL_BYTE, (byte) 3).values()) - .containsExactly((byte) 1, Util.NULL_BYTE, (byte) 3); + .containsExactly((byte) 1, Util.NULL_BYTE, (byte) 3); } } diff --git a/qst/src/test/java/io/deephaven/qst/array/CharArrayTest.java b/qst/src/test/java/io/deephaven/qst/array/CharArrayTest.java index ae5efcb2986..29a4d84d2c3 100644 --- a/qst/src/test/java/io/deephaven/qst/array/CharArrayTest.java +++ b/qst/src/test/java/io/deephaven/qst/array/CharArrayTest.java @@ -14,6 +14,6 @@ void boxInRawOut() { @Test void rawInRawOut() { assertThat(CharArray.ofUnsafe('1', Util.NULL_CHAR, '3').values()).containsExactly('1', - Util.NULL_CHAR, '3'); + Util.NULL_CHAR, '3'); } } diff --git a/qst/src/test/java/io/deephaven/qst/array/DoubleArrayTest.java b/qst/src/test/java/io/deephaven/qst/array/DoubleArrayTest.java index 866c5c98497..462151e75cb 100644 --- a/qst/src/test/java/io/deephaven/qst/array/DoubleArrayTest.java +++ b/qst/src/test/java/io/deephaven/qst/array/DoubleArrayTest.java @@ -14,7 +14,7 @@ void boxInRawOut() { @Test void rawInRawOut() { assertThat(DoubleArray.ofUnsafe(1d, Util.NULL_DOUBLE, 3d).values()).containsExactly(1d, - Util.NULL_DOUBLE, 3d); + Util.NULL_DOUBLE, 3d); } } diff --git a/qst/src/test/java/io/deephaven/qst/array/FloatArrayTest.java b/qst/src/test/java/io/deephaven/qst/array/FloatArrayTest.java index 667a342f152..151239cf2cd 100644 --- a/qst/src/test/java/io/deephaven/qst/array/FloatArrayTest.java +++ b/qst/src/test/java/io/deephaven/qst/array/FloatArrayTest.java @@ -14,6 +14,6 @@ void boxInRawOut() { @Test void rawInRawOut() { assertThat(FloatArray.ofUnsafe(1f, Util.NULL_FLOAT, 3f).values()).containsExactly(1f, - Util.NULL_FLOAT, 3f); + Util.NULL_FLOAT, 3f); } } diff --git a/qst/src/test/java/io/deephaven/qst/array/IntArrayTest.java b/qst/src/test/java/io/deephaven/qst/array/IntArrayTest.java index b76f5270e4b..446b7d2cd6a 100644 --- a/qst/src/test/java/io/deephaven/qst/array/IntArrayTest.java +++ b/qst/src/test/java/io/deephaven/qst/array/IntArrayTest.java @@ -14,6 +14,6 @@ void boxInRawOut() { @Test void rawInRawOut() { assertThat(IntArray.ofUnsafe(1, Util.NULL_INT, 3).values()).containsExactly(1, - Util.NULL_INT, 3); + Util.NULL_INT, 3); } } diff --git a/qst/src/test/java/io/deephaven/qst/array/LongArrayTest.java b/qst/src/test/java/io/deephaven/qst/array/LongArrayTest.java index 0be57dc56ad..8afee5409d0 100644 --- a/qst/src/test/java/io/deephaven/qst/array/LongArrayTest.java +++ b/qst/src/test/java/io/deephaven/qst/array/LongArrayTest.java @@ -14,6 +14,6 @@ void boxInRawOut() { @Test void rawInRawOut() { assertThat(LongArray.ofUnsafe(1L, Util.NULL_LONG, 3L).values()).containsExactly(1L, - Util.NULL_LONG, 3L); + Util.NULL_LONG, 3L); } } diff --git a/qst/src/test/java/io/deephaven/qst/array/ShortArrayTest.java b/qst/src/test/java/io/deephaven/qst/array/ShortArrayTest.java index 51c2bcaefd4..cf8580d1915 100644 --- a/qst/src/test/java/io/deephaven/qst/array/ShortArrayTest.java +++ b/qst/src/test/java/io/deephaven/qst/array/ShortArrayTest.java @@ -9,12 +9,12 @@ public class ShortArrayTest { @Test void boxInRawOut() { assertThat(ShortArray.of((short) 1, null, (short) 3).values()).containsExactly((short) 1, - Util.NULL_SHORT, (short) 3); + Util.NULL_SHORT, (short) 3); } @Test void rawInRawOut() { assertThat(ShortArray.ofUnsafe((short) 1, Util.NULL_SHORT, (short) 3).values()) - .containsExactly((short) 1, Util.NULL_SHORT, (short) 3); + .containsExactly((short) 1, Util.NULL_SHORT, (short) 3); } } diff --git a/qst/src/test/java/io/deephaven/qst/column/header/ColumnHeadersTest.java b/qst/src/test/java/io/deephaven/qst/column/header/ColumnHeadersTest.java index 5f545a1b47b..cf764d68f12 100644 --- a/qst/src/test/java/io/deephaven/qst/column/header/ColumnHeadersTest.java +++ b/qst/src/test/java/io/deephaven/qst/column/header/ColumnHeadersTest.java @@ -34,13 +34,13 @@ public class ColumnHeadersTest { private static final ColumnHeader HEADER_STRING = ColumnHeader.ofString("String"); private static final ColumnHeader HEADER_INSTANT = ColumnHeader.ofInstant("Instant"); private static final ColumnHeader HEADER_CUSTOM = - ColumnHeader.of("Custom", CustomType.of(Custom.class)); + ColumnHeader.of("Custom", CustomType.of(Custom.class)); @Test void h11Header() { assertThat(h11().tableHeader()).isEqualTo(TableHeader.of(HEADER_BOOLEAN, HEADER_BYTE, - HEADER_CHAR, HEADER_SHORT, HEADER_INT, HEADER_LONG, HEADER_FLOAT, HEADER_DOUBLE, - HEADER_STRING, HEADER_INSTANT, HEADER_CUSTOM)); + HEADER_CHAR, HEADER_SHORT, HEADER_INT, HEADER_LONG, HEADER_FLOAT, HEADER_DOUBLE, + HEADER_STRING, HEADER_INSTANT, HEADER_CUSTOM)); } @Test diff --git a/qst/src/test/java/io/deephaven/qst/examples/EmployeesExample.java b/qst/src/test/java/io/deephaven/qst/examples/EmployeesExample.java index a501df0c42b..6873677969c 100644 --- a/qst/src/test/java/io/deephaven/qst/examples/EmployeesExample.java +++ b/qst/src/test/java/io/deephaven/qst/examples/EmployeesExample.java @@ -8,20 +8,20 @@ public class EmployeesExample { public static NewTable employees() { return ColumnHeader - .of(ColumnHeader.ofString("LastName"), ColumnHeader.ofInt("DeptId"), - ColumnHeader.ofString("Telephone")) - .row("Rafferty", 31, "(347) 555-0123").row("Jones", 33, "(917) 555-0198") - .row("Steiner", 33, "(212) 555-0167").row("Robins", 34, "(952) 555-0110") - .row("Smith", 34, null).row("Rogers", null, null).newTable(); + .of(ColumnHeader.ofString("LastName"), ColumnHeader.ofInt("DeptId"), + ColumnHeader.ofString("Telephone")) + .row("Rafferty", 31, "(347) 555-0123").row("Jones", 33, "(917) 555-0198") + .row("Steiner", 33, "(212) 555-0167").row("Robins", 34, "(952) 555-0110") + .row("Smith", 34, null).row("Rogers", null, null).newTable(); } public static NewTable departments() { return ColumnHeader - .of(ColumnHeader.ofInt("DeptId"), ColumnHeader.ofString("DeptName"), - ColumnHeader.ofString("Telephone")) - .row(31, "Sales", "(646) 555-0134").row(33, "Engineering", "(646) 555-0178") - .row(34, "Clerical", "(646) 555-0159").row(35, "Marketing", "(212) 555-0111") - .newTable(); + .of(ColumnHeader.ofInt("DeptId"), ColumnHeader.ofString("DeptName"), + ColumnHeader.ofString("Telephone")) + .row(31, "Sales", "(646) 555-0134").row(33, "Engineering", "(646) 555-0178") + .row(34, "Clerical", "(646) 555-0159").row(35, "Marketing", "(212) 555-0111") + .newTable(); } public static JoinTable joined() { diff --git a/qst/src/test/java/io/deephaven/qst/table/ParentsVisitorTest.java b/qst/src/test/java/io/deephaven/qst/table/ParentsVisitorTest.java index cd765364b08..eeb2fd3e084 100644 --- a/qst/src/test/java/io/deephaven/qst/table/ParentsVisitorTest.java +++ b/qst/src/test/java/io/deephaven/qst/table/ParentsVisitorTest.java @@ -128,8 +128,7 @@ void deepWalkAllShuffled() { } /** - * This is specifically designed to break implementations that don't have already-visited - * checks. + * This is specifically designed to break implementations that don't have already-visited checks. */ @Test void heavilyBranchedWalk() { @@ -206,11 +205,11 @@ private static Iterable tables() { List heavilyBranchedTable = createHeavilyBranchedTable(HEAVILY_BRANCHED_SIZE); return () -> Stream - .concat( - Stream.of(S4, heavilyBranchedTable.get(heavilyBranchedTable.size() - 1), - deepWalk.get(deepWalk.size() - 1)), - TableCreatorImplTest.createTables().stream()) - .iterator(); + .concat( + Stream.of(S4, heavilyBranchedTable.get(heavilyBranchedTable.size() - 1), + deepWalk.get(deepWalk.size() - 1)), + TableCreatorImplTest.createTables().stream()) + .iterator(); } private static void checkValidPostOrder(Iterable items) { @@ -218,14 +217,14 @@ private static void checkValidPostOrder(Iterable items) { for (TableSpec item : items) { boolean allDependenciesSatisfied = getParents(item).allMatch(visited::contains); assertThat(allDependenciesSatisfied).withFailMessage("items are not in post-order") - .isTrue(); + .isTrue(); assertThat(visited.add(item)).withFailMessage("items are not de-duplicated").isTrue(); } } /** - * This is a table that branches at every level except the leaf. Naive implementations may need - * to search every single path through the DAG; but that is not feasible (2^64 paths). + * This is a table that branches at every level except the leaf. Naive implementations may need to search every + * single path through the DAG; but that is not feasible (2^64 paths). */ private static List createHeavilyBranchedTable(int size) { List out = new ArrayList<>(size + 1); @@ -264,12 +263,12 @@ private void canonicalOrder(Iterable inputs, Iterable expe } /** - * In general, a set of tables will have multiple valid post-orders. To check against a specific - * order, we should ensure that there is one canonical ordering. + * In general, a set of tables will have multiple valid post-orders. To check against a specific order, we should + * ensure that there is one canonical ordering. * *

    - * This is a check against adding an overly-specific test that depends on a specific - * post-ordering, which we should not do. + * This is a check against adding an overly-specific test that depends on a specific post-ordering, which we should + * not do. */ private static void checkIsCanonicalOrder(Iterable items) { TableSpec prev = null; diff --git a/qst/src/test/java/io/deephaven/qst/table/TableCreatorImplTest.java b/qst/src/test/java/io/deephaven/qst/table/TableCreatorImplTest.java index d37a89dd028..f1949da6b74 100644 --- a/qst/src/test/java/io/deephaven/qst/table/TableCreatorImplTest.java +++ b/qst/src/test/java/io/deephaven/qst/table/TableCreatorImplTest.java @@ -18,27 +18,27 @@ public class TableCreatorImplTest { // stricter query validation private static final List SOURCE_TABLES = - Arrays.asList(NewTable.of(Column.of("Foo", 0, -1, 1, 42, null, 1, Integer.MAX_VALUE)), - NewTable.of(Column.of("Bar", 0L, -1L, 1L, 42L, null, 1L, Long.MAX_VALUE)), - TableSpec.empty(0), TableSpec.empty(1), TableSpec.empty(100), TableSpec.empty(0)); + Arrays.asList(NewTable.of(Column.of("Foo", 0, -1, 1, 42, null, 1, Integer.MAX_VALUE)), + NewTable.of(Column.of("Bar", 0L, -1L, 1L, 42L, null, 1L, Long.MAX_VALUE)), + TableSpec.empty(0), TableSpec.empty(1), TableSpec.empty(100), TableSpec.empty(0)); private static final List> SINGLE_PARENT_OPS = - Arrays.asList(TableCreatorImplTest::head1, TableCreatorImplTest::headMax, - TableCreatorImplTest::tail1, TableCreatorImplTest::tailMax, - TableCreatorImplTest::whereFooEq1, TableCreatorImplTest::whereFooEqTest, - TableCreatorImplTest::whereFooIsNull, TableCreatorImplTest::viewFoo, - TableCreatorImplTest::viewFooPlus1, TableCreatorImplTest::viewFooEqBar, - TableCreatorImplTest::updateViewFoo, TableCreatorImplTest::updateViewFooPlus1, - TableCreatorImplTest::updateViewFooEqBar, TableCreatorImplTest::updateFoo, - TableCreatorImplTest::updateFooPlus1, TableCreatorImplTest::updateFooEqBar, - TableCreatorImplTest::selectFoo, TableCreatorImplTest::selectFooPlus1, - TableCreatorImplTest::selectFooEqBar, TableCreatorImplTest::selectAll); + Arrays.asList(TableCreatorImplTest::head1, TableCreatorImplTest::headMax, + TableCreatorImplTest::tail1, TableCreatorImplTest::tailMax, + TableCreatorImplTest::whereFooEq1, TableCreatorImplTest::whereFooEqTest, + TableCreatorImplTest::whereFooIsNull, TableCreatorImplTest::viewFoo, + TableCreatorImplTest::viewFooPlus1, TableCreatorImplTest::viewFooEqBar, + TableCreatorImplTest::updateViewFoo, TableCreatorImplTest::updateViewFooPlus1, + TableCreatorImplTest::updateViewFooEqBar, TableCreatorImplTest::updateFoo, + TableCreatorImplTest::updateFooPlus1, TableCreatorImplTest::updateFooEqBar, + TableCreatorImplTest::selectFoo, TableCreatorImplTest::selectFooPlus1, + TableCreatorImplTest::selectFooEqBar, TableCreatorImplTest::selectAll); private static final List> DUAL_TABLE_OPS = - Arrays.asList(TableCreatorImplTest::naturalJoin1, TableCreatorImplTest::naturalJoin2, - TableCreatorImplTest::naturalJoin3, TableCreatorImplTest::naturalJoin4, - TableCreatorImplTest::exactJoin1, TableCreatorImplTest::exactJoin2, - TableCreatorImplTest::exactJoin3, TableCreatorImplTest::exactJoin4); + Arrays.asList(TableCreatorImplTest::naturalJoin1, TableCreatorImplTest::naturalJoin2, + TableCreatorImplTest::naturalJoin3, TableCreatorImplTest::naturalJoin4, + TableCreatorImplTest::exactJoin1, TableCreatorImplTest::exactJoin2, + TableCreatorImplTest::exactJoin3, TableCreatorImplTest::exactJoin4); static TableSpec head1(TableSpec table) { diff --git a/qst/src/test/java/io/deephaven/qst/type/TypeTest.java b/qst/src/test/java/io/deephaven/qst/type/TypeTest.java index 04848bc9dfd..b7ef82829ff 100644 --- a/qst/src/test/java/io/deephaven/qst/type/TypeTest.java +++ b/qst/src/test/java/io/deephaven/qst/type/TypeTest.java @@ -137,40 +137,40 @@ void doubleArrayType() { @Test void nestedPrimitive2x() { assertThat(find(int[][].class)).isEqualTo( - NativeArrayType.of(int[][].class, NativeArrayType.of(int[].class, IntType.instance()))); + NativeArrayType.of(int[][].class, NativeArrayType.of(int[].class, IntType.instance()))); } @Test void nestedPrimitive3x() { assertThat(find(int[][][].class)) - .isEqualTo(NativeArrayType.of(int[][][].class, NativeArrayType.of(int[][].class, - NativeArrayType.of(int[].class, IntType.instance())))); + .isEqualTo(NativeArrayType.of(int[][][].class, NativeArrayType.of(int[][].class, + NativeArrayType.of(int[].class, IntType.instance())))); } @Test void nestedStatic2x() { assertThat(find(String[][].class)).isEqualTo(NativeArrayType.of(String[][].class, - NativeArrayType.of(String[].class, StringType.instance()))); + NativeArrayType.of(String[].class, StringType.instance()))); } @Test void nestedStatic3x() { assertThat(find(String[][][].class)) - .isEqualTo(NativeArrayType.of(String[][][].class, NativeArrayType.of(String[][].class, - NativeArrayType.of(String[].class, StringType.instance())))); + .isEqualTo(NativeArrayType.of(String[][][].class, NativeArrayType.of(String[][].class, + NativeArrayType.of(String[].class, StringType.instance())))); } @Test void nestedCustom2x() { assertThat(find(Custom[][].class)).isEqualTo(NativeArrayType.of(Custom[][].class, - NativeArrayType.of(Custom[].class, CustomType.of(Custom.class)))); + NativeArrayType.of(Custom[].class, CustomType.of(Custom.class)))); } @Test void nestedCustom3x() { assertThat(find(Custom[][][].class)) - .isEqualTo(NativeArrayType.of(Custom[][][].class, NativeArrayType.of(Custom[][].class, - NativeArrayType.of(Custom[].class, CustomType.of(Custom.class))))); + .isEqualTo(NativeArrayType.of(Custom[][][].class, NativeArrayType.of(Custom[][].class, + NativeArrayType.of(Custom[].class, CustomType.of(Custom.class))))); } @Test @@ -187,7 +187,7 @@ void nonEqualityCheck() { @Test void dbPrimitiveTypesAreEmpty() - throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { + throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { // Db primitive array types are not on the classpath as QST dependency, make sure // they are not found. try { diff --git a/style/eclipse-java-google-style.xml b/style/eclipse-java-google-style.xml index c6bc183b7ae..a8d58efd984 100644 --- a/style/eclipse-java-google-style.xml +++ b/style/eclipse-java-google-style.xml @@ -1,6 +1,6 @@ - - + + @@ -48,7 +48,7 @@ - + @@ -87,7 +87,7 @@ - + @@ -239,7 +239,7 @@ - + diff --git a/table-api/src/main/java/io/deephaven/annotations/BuildableStyle.java b/table-api/src/main/java/io/deephaven/annotations/BuildableStyle.java index 487c8a7ce28..bb5c39def31 100644 --- a/table-api/src/main/java/io/deephaven/annotations/BuildableStyle.java +++ b/table-api/src/main/java/io/deephaven/annotations/BuildableStyle.java @@ -9,14 +9,14 @@ import java.lang.annotation.Target; /** - * A simple style is for objects that are simple to build. Not recommended for objects with more - * than two fields. Not applicable for objects with default fields. + * A simple style is for objects that are simple to build. Not recommended for objects with more than two fields. Not + * applicable for objects with default fields. */ @Target({ElementType.TYPE, ElementType.PACKAGE}) @Retention(RetentionPolicy.CLASS) @Value.Style(visibility = ImplementationVisibility.PACKAGE, - defaults = @Value.Immutable(copy = false), strictBuilder = true, weakInterning = true, - jdkOnly = true) + defaults = @Value.Immutable(copy = false), strictBuilder = true, weakInterning = true, + jdkOnly = true) public @interface BuildableStyle { // Note: this produces ImmutableX.builder()s for the implementation classes } diff --git a/table-api/src/main/java/io/deephaven/annotations/SimpleStyle.java b/table-api/src/main/java/io/deephaven/annotations/SimpleStyle.java index 075dd1ba508..a55b720d906 100644 --- a/table-api/src/main/java/io/deephaven/annotations/SimpleStyle.java +++ b/table-api/src/main/java/io/deephaven/annotations/SimpleStyle.java @@ -9,14 +9,14 @@ import java.lang.annotation.Target; /** - * A simple style is for objects that are simple to build. Not recommended for objects with more - * than two fields. Not applicable for objects with default fields. + * A simple style is for objects that are simple to build. Not recommended for objects with more than two fields. Not + * applicable for objects with default fields. */ @Target({ElementType.TYPE, ElementType.PACKAGE}) @Retention(RetentionPolicy.CLASS) @Value.Style(visibility = ImplementationVisibility.PACKAGE, - defaults = @Value.Immutable(builder = false, copy = false), strictBuilder = true, - weakInterning = true, jdkOnly = true) + defaults = @Value.Immutable(builder = false, copy = false), strictBuilder = true, + weakInterning = true, jdkOnly = true) public @interface SimpleStyle { // Note: this produces ImmutableX.of() methods for the implementation classes } diff --git a/table-api/src/main/java/io/deephaven/api/ColumnName.java b/table-api/src/main/java/io/deephaven/api/ColumnName.java index 7aa79e5c028..4317c6f1e9b 100644 --- a/table-api/src/main/java/io/deephaven/api/ColumnName.java +++ b/table-api/src/main/java/io/deephaven/api/ColumnName.java @@ -17,7 +17,7 @@ @Immutable @SimpleStyle public abstract class ColumnName - implements Selectable, Value, Expression, Pair, JoinMatch, JoinAddition, Serializable { + implements Selectable, Value, Expression, Pair, JoinMatch, JoinAddition, Serializable { public static boolean isValidColumnName(String name) { try { diff --git a/table-api/src/main/java/io/deephaven/api/JoinAddition.java b/table-api/src/main/java/io/deephaven/api/JoinAddition.java index bb644ab6b77..d1e6b4d168a 100644 --- a/table-api/src/main/java/io/deephaven/api/JoinAddition.java +++ b/table-api/src/main/java/io/deephaven/api/JoinAddition.java @@ -6,8 +6,8 @@ import java.util.stream.Collectors; /** - * A join addition represents a {@link #newColumn() new column} that should be added as the result - * of a join, brought over from an {@link #existingColumn() existing column}. + * A join addition represents a {@link #newColumn() new column} that should be added as the result of a join, brought + * over from an {@link #existingColumn() existing column}. * * @see TableOperations#join(Object, Collection, Collection, int) * @see TableOperations#naturalJoin(Object, Collection, Collection) @@ -32,7 +32,7 @@ static JoinAddition parse(String x) { final int ix = x.indexOf('='); if (ix < 0) { throw new IllegalArgumentException(String.format( - "Unable to parse addition '%s', expected form '='", x)); + "Unable to parse addition '%s', expected form '='", x)); } ColumnName newColumn = ColumnName.parse(x.substring(0, ix)); ColumnName existingColumn = ColumnName.parse(x.substring(ix + 1)); diff --git a/table-api/src/main/java/io/deephaven/api/JoinAdditionImpl.java b/table-api/src/main/java/io/deephaven/api/JoinAdditionImpl.java index e76daff4130..6f66c6268bd 100644 --- a/table-api/src/main/java/io/deephaven/api/JoinAdditionImpl.java +++ b/table-api/src/main/java/io/deephaven/api/JoinAdditionImpl.java @@ -25,7 +25,7 @@ final void checkNotSameColumn() { // To make sure that JoinAddition#equals() works as we would expect, we should always // use canonical ColumnName when applicable. throw new IllegalArgumentException( - "Should not construct JoinAdditionImpl with equal columns, use the ColumnName directly"); + "Should not construct JoinAdditionImpl with equal columns, use the ColumnName directly"); } } } diff --git a/table-api/src/main/java/io/deephaven/api/JoinMatch.java b/table-api/src/main/java/io/deephaven/api/JoinMatch.java index 45d9c258357..1ffd677868f 100644 --- a/table-api/src/main/java/io/deephaven/api/JoinMatch.java +++ b/table-api/src/main/java/io/deephaven/api/JoinMatch.java @@ -6,10 +6,9 @@ import java.util.stream.Collectors; /** - * A join match represents one column from a {@link #left() left} table and one column from a - * {@link #right() right} table. The exact semantics of the match depend on context. For example, a - * natural-join has equal-to matches; where-not-in has not-equal-to matches; and as-of-join's last - * match has less-than or less-than-or-equal-to matches. + * A join match represents one column from a {@link #left() left} table and one column from a {@link #right() right} + * table. The exact semantics of the match depend on context. For example, a natural-join has equal-to matches; + * where-not-in has not-equal-to matches; and as-of-join's last match has less-than or less-than-or-equal-to matches. * * @see TableOperations#join(Object, Collection, Collection, int) * @see TableOperations#naturalJoin(Object, Collection, Collection) @@ -36,14 +35,14 @@ static JoinMatch parse(String x) { final int ix = x.indexOf('='); if (ix < 0 || ix + 1 == x.length()) { throw new IllegalArgumentException(String.format( - "Unable to parse match '%s', expected form '==' or `=`", - x)); + "Unable to parse match '%s', expected form '==' or `=`", + x)); } final int ix2 = x.charAt(ix + 1) == '=' ? ix + 1 : ix; if (ix2 + 1 == x.length()) { throw new IllegalArgumentException(String.format( - "Unable to parse match '%s', expected form '==' or `=`", - x)); + "Unable to parse match '%s', expected form '==' or `=`", + x)); } ColumnName left = ColumnName.parse(x.substring(0, ix)); ColumnName right = ColumnName.parse(x.substring(ix2 + 1)); diff --git a/table-api/src/main/java/io/deephaven/api/JoinMatchImpl.java b/table-api/src/main/java/io/deephaven/api/JoinMatchImpl.java index 7ef41688821..283f2dfe9cc 100644 --- a/table-api/src/main/java/io/deephaven/api/JoinMatchImpl.java +++ b/table-api/src/main/java/io/deephaven/api/JoinMatchImpl.java @@ -25,7 +25,7 @@ final void checkNotSameColumn() { // To make sure that JoinMatch#equals() works as we would expect, we should always use // canonical ColumnName when applicable. throw new IllegalArgumentException( - "Should not construct JoinMatchImpl with left() equal to right(), use the ColumnName directly"); + "Should not construct JoinMatchImpl with left() equal to right(), use the ColumnName directly"); } } } diff --git a/table-api/src/main/java/io/deephaven/api/RawString.java b/table-api/src/main/java/io/deephaven/api/RawString.java index f82865a5997..f76a55be72e 100644 --- a/table-api/src/main/java/io/deephaven/api/RawString.java +++ b/table-api/src/main/java/io/deephaven/api/RawString.java @@ -10,8 +10,8 @@ import java.io.Serializable; /** - * An un-parsed string; used for cases where the server has string-parsing that hasn't been - * structurally represented at the api layer yet. + * An un-parsed string; used for cases where the server has string-parsing that hasn't been structurally represented at + * the api layer yet. */ @Immutable @SimpleStyle diff --git a/table-api/src/main/java/io/deephaven/api/Selectable.java b/table-api/src/main/java/io/deephaven/api/Selectable.java index a842eeade6e..706f370342a 100644 --- a/table-api/src/main/java/io/deephaven/api/Selectable.java +++ b/table-api/src/main/java/io/deephaven/api/Selectable.java @@ -32,14 +32,14 @@ static Selectable parse(String x) { final int ix = x.indexOf('='); if (ix < 0 || ix + 1 == x.length()) { throw new IllegalArgumentException(String.format( - "Unable to parse formula '%s', expected form '='", x)); + "Unable to parse formula '%s', expected form '='", x)); } if (x.charAt(ix + 1) == '=') { throw new IllegalArgumentException(String.format( - "Unable to parse formula '%s', expected form '='", x)); + "Unable to parse formula '%s', expected form '='", x)); } return SelectableImpl.of(ColumnName.parse(x.substring(0, ix)), - RawString.of(x.substring(ix + 1))); + RawString.of(x.substring(ix + 1))); } static List from(String... values) { diff --git a/table-api/src/main/java/io/deephaven/api/SelectableImpl.java b/table-api/src/main/java/io/deephaven/api/SelectableImpl.java index 34623a8adaf..3118cb5da18 100644 --- a/table-api/src/main/java/io/deephaven/api/SelectableImpl.java +++ b/table-api/src/main/java/io/deephaven/api/SelectableImpl.java @@ -28,7 +28,7 @@ final void checkExpressionNotSameColumn() { // To make sure that Selectable#equals() works as we would expect, we should always use // canonical ColumnName when applicable. throw new IllegalArgumentException( - "Should not construct SelectableImpl with expression() equal to newColumn(), use the ColumnName directly"); + "Should not construct SelectableImpl with expression() equal to newColumn(), use the ColumnName directly"); } } } diff --git a/table-api/src/main/java/io/deephaven/api/Strings.java b/table-api/src/main/java/io/deephaven/api/Strings.java index 942d3540527..5729ee0ad37 100644 --- a/table-api/src/main/java/io/deephaven/api/Strings.java +++ b/table-api/src/main/java/io/deephaven/api/Strings.java @@ -15,8 +15,7 @@ import java.util.stream.Collectors; /** - * A set of static helpers to turn strongly-typed api arguments into their {@link String} - * counterparts. + * A set of static helpers to turn strongly-typed api arguments into their {@link String} counterparts. */ public class Strings { @@ -46,7 +45,7 @@ public static String of(FilterCondition condition) { return String.format("%s != %s", lhs, rhs); default: throw new IllegalStateException( - "Unexpected condition operator: " + condition.operator()); + "Unexpected condition operator: " + condition.operator()); } } @@ -64,12 +63,12 @@ public static String of(FilterIsNotNull isNotNull) { public static String of(FilterOr filterOr) { return filterOr.filters().stream().map(Strings::of) - .collect(Collectors.joining(") || (", "(", ")")); + .collect(Collectors.joining(") || (", "(", ")")); } public static String of(FilterAnd filterAnd) { return filterAnd.filters().stream().map(Strings::of) - .collect(Collectors.joining(") && (", "(", ")")); + .collect(Collectors.joining(") && (", "(", ")")); } public static String of(Pair pair) { @@ -120,7 +119,7 @@ public static String of(Value value) { * If we ever need to provide more specificity for a type, we can create a non-universal impl. */ private static class UniversalAdapter - implements Filter.Visitor, Expression.Visitor, Value.Visitor { + implements Filter.Visitor, Expression.Visitor, Value.Visitor { private String out; public String getOut() { diff --git a/table-api/src/main/java/io/deephaven/api/TableOperations.java b/table-api/src/main/java/io/deephaven/api/TableOperations.java index af5a2539cbc..565b2f130fa 100644 --- a/table-api/src/main/java/io/deephaven/api/TableOperations.java +++ b/table-api/src/main/java/io/deephaven/api/TableOperations.java @@ -26,51 +26,48 @@ public interface TableOperations, TABL // ------------------------------------------------------------------------------------------- /** - * Snapshot {@code baseTable}, triggered by {@code this} table, and return a new table as a - * result. The returned table will include an initial snapshot. + * Snapshot {@code baseTable}, triggered by {@code this} table, and return a new table as a result. The returned + * table will include an initial snapshot. * *

    * Delegates to {@link #snapshot(Object, boolean, Collection)}. * * @param baseTable The table to be snapshotted - * @param stampColumns The columns forming the "snapshot key", i.e. some subset of this Table's - * columns to be included in the result at snapshot time. As a special case, an empty - * stampColumns is taken to mean "include all columns". + * @param stampColumns The columns forming the "snapshot key", i.e. some subset of this Table's columns to be + * included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean + * "include all columns". * @return The result table */ TOPS snapshot(TABLE baseTable, String... stampColumns); /** - * Snapshot {@code baseTable}, triggered by {@code this} table, and return a new table as a - * result. + * Snapshot {@code baseTable}, triggered by {@code this} table, and return a new table as a result. * *

    * Delegates to {@link #snapshot(Object, boolean, Collection)}. * * @param baseTable The table to be snapshotted * @param doInitialSnapshot Take the first snapshot now (otherwise wait for a change event) - * @param stampColumns The columns forming the "snapshot key", i.e. some subset of this Table's - * columns to be included in the result at snapshot time. As a special case, an empty - * stampColumns is taken to mean "include all columns". + * @param stampColumns The columns forming the "snapshot key", i.e. some subset of this Table's columns to be + * included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean + * "include all columns". * @return The result table */ TOPS snapshot(TABLE baseTable, boolean doInitialSnapshot, String... stampColumns); /** - * Snapshot {@code baseTable}, triggered by {@code this} table, and return a new table as a - * result. + * Snapshot {@code baseTable}, triggered by {@code this} table, and return a new table as a result. * *

    - * {@code this} table is the triggering table, i.e. the table whose change events cause a new - * snapshot to be taken. The result table includes a "snapshot key" which is a subset (possibly - * all) of {@code this} table's columns. The remaining columns in the result table come from - * {@code baseTable}, the table being snapshotted. + * {@code this} table is the triggering table, i.e. the table whose change events cause a new snapshot to be taken. + * The result table includes a "snapshot key" which is a subset (possibly all) of {@code this} table's columns. The + * remaining columns in the result table come from {@code baseTable}, the table being snapshotted. * * @param baseTable The table to be snapshotted * @param doInitialSnapshot Take the first snapshot now (otherwise wait for a change event) - * @param stampColumns The columns forming the "snapshot key", i.e. some subset of this Table's - * columns to be included in the result at snapshot time. As a special case, an empty - * stampColumns is taken to mean "include all columns". + * @param stampColumns The columns forming the "snapshot key", i.e. some subset of this Table's columns to be + * included in the result at snapshot time. As a special case, an empty stampColumns is taken to mean + * "include all columns". * @return The result table */ TOPS snapshot(TABLE baseTable, boolean doInitialSnapshot, Collection stampColumns); @@ -107,9 +104,8 @@ public interface TableOperations, TABL * Filters {@code this} table based on the set of values in the {@code rightTable}. * *

    - * Note that when the {@code rightTable} ticks, all of the rows in {@code this} table are going - * to be re-evaluated, thus the intention is that the {@code rightTable} is fairly slow moving - * compared with {@code this} table. + * Note that when the {@code rightTable} ticks, all of the rows in {@code this} table are going to be re-evaluated, + * thus the intention is that the {@code rightTable} is fairly slow moving compared with {@code this} table. * * @param rightTable the filtering table. * @param columnsToMatch the columns to match between the two tables @@ -135,9 +131,8 @@ public interface TableOperations, TABL * Filters {@code this} table based on the set of values not in the {@code rightTable}. * *

    - * Note that when the {@code rightTable} ticks, all of the rows in {@code this} table are going - * to be re-evaluated, thus the intention is that the {@code rightTable} is fairly slow moving - * compared with {@code this} table. + * Note that when the {@code rightTable} ticks, all of the rows in {@code this} table are going to be re-evaluated, + * thus the intention is that the {@code rightTable} is fairly slow moving compared with {@code this} table. * * @param rightTable the filtering table. * @param columnsToMatch the columns to match between the two tables @@ -193,8 +188,8 @@ public interface TableOperations, TABL * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the right side that need to - * be added to the left side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the right side that need to be added to the left + * side as a result of the match. * @return the natural-joined table */ TOPS naturalJoin(TABLE rightTable, String columnsToMatch, String columnsToAdd); @@ -207,12 +202,12 @@ public interface TableOperations, TABL * * @param rightTable The right side table on the join. * @param columnsToMatch The match pair conditions. - * @param columnsToAdd The columns from the right side that need to be added to the left side as - * a result of the match. + * @param columnsToAdd The columns from the right side that need to be added to the left side as a result of the + * match. * @return the natural-joined table */ TOPS naturalJoin(TABLE rightTable, Collection columnsToMatch, - Collection columnsToAdd); + Collection columnsToAdd); // ------------------------------------------------------------------------------------------- @@ -238,8 +233,8 @@ TOPS naturalJoin(TABLE rightTable, Collection columnsToMatc * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the right side that need to - * be added to the left side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the right side that need to be added to the left + * side as a result of the match. * @return the exact-joined table */ TOPS exactJoin(TABLE rightTable, String columnsToMatch, String columnsToAdd); @@ -248,17 +243,17 @@ TOPS naturalJoin(TABLE rightTable, Collection columnsToMatc * Perform an exact-join with the {@code rightTable}. * *

    - * Similar to {@link #naturalJoin(Object, Collection, Collection)}, but requires that exactly - * one match from the {@code rightTable}. + * Similar to {@link #naturalJoin(Object, Collection, Collection)}, but requires that exactly one match from the + * {@code rightTable}. * * @param rightTable The right side table on the join. * @param columnsToMatch The match pair conditions. - * @param columnsToAdd The columns from the right side that need to be added to the left side as - * a result of the match. + * @param columnsToAdd The columns from the right side that need to be added to the left side as a result of the + * match. * @return the exact-joined table */ TOPS exactJoin(TABLE rightTable, Collection columnsToMatch, - Collection columnsToAdd); + Collection columnsToAdd); // ------------------------------------------------------------------------------------------- @@ -284,8 +279,8 @@ TOPS exactJoin(TABLE rightTable, Collection columnsToMatch, * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the right side that need to - * be added to the left side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the right side that need to be added to the left + * side as a result of the match. * @return the left-joined table */ TOPS leftJoin(TABLE rightTable, String columnsToMatch, String columnsToAdd); @@ -294,34 +289,30 @@ TOPS exactJoin(TABLE rightTable, Collection columnsToMatch, * Perform a left-join with the {@code rightTable}. * *

    - * Returns a table that has one column for each of {@code this} table's columns, and one column - * corresponding to each of the {@code rightTable} columns from {@code columnsToAdd} (or all the - * columns whose names don't overlap with the name of a column from the source table if - * {@code columnsToAdd} is empty). The new columns (those corresponding to the - * {@code rightTable}) contain an aggregation of all values from the left side that match the - * join criteria. Consequently the types of all right side columns not involved in a join - * criteria, is an array of the original column type. If the two tables have columns with - * matching names then the method will fail with an exception unless the columns with - * corresponding names are found in one of the matching criteria. + * Returns a table that has one column for each of {@code this} table's columns, and one column corresponding to + * each of the {@code rightTable} columns from {@code columnsToAdd} (or all the columns whose names don't overlap + * with the name of a column from the source table if {@code columnsToAdd} is empty). The new columns (those + * corresponding to the {@code rightTable}) contain an aggregation of all values from the left side that match the + * join criteria. Consequently the types of all right side columns not involved in a join criteria, is an array of + * the original column type. If the two tables have columns with matching names then the method will fail with an + * exception unless the columns with corresponding names are found in one of the matching criteria. * *

    - * NOTE: leftJoin operation does not involve an actual data copy, or an in-memory table - * creation. In order to produce an actual in memory table you need to apply a select call on - * the join result. + * NOTE: leftJoin operation does not involve an actual data copy, or an in-memory table creation. In order to + * produce an actual in memory table you need to apply a select call on the join result. * * @param rightTable The right side table on the join. * @param columnsToMatch The match pair conditions. - * @param columnsToAdd The columns from the right side that need to be added to the left side as - * a result of the match. - * @return a table that has one column for each original table's columns, and one column - * corresponding to each column listed in columnsToAdd. If - * {@code columnsToAdd.isEmpty()} one column corresponding to each column of the input - * table (right table) columns whose names don't overlap with the name of a column from - * the source table is added. The new columns (those corresponding to the input table) - * contain an aggregation of all values from the left side that match the join criteria. + * @param columnsToAdd The columns from the right side that need to be added to the left side as a result of the + * match. + * @return a table that has one column for each original table's columns, and one column corresponding to each + * column listed in columnsToAdd. If {@code columnsToAdd.isEmpty()} one column corresponding to each column + * of the input table (right table) columns whose names don't overlap with the name of a column from the + * source table is added. The new columns (those corresponding to the input table) contain an aggregation of + * all values from the left side that match the join criteria. */ TOPS leftJoin(TABLE rightTable, Collection columnsToMatch, - Collection columnsToAdd); + Collection columnsToAdd); // ------------------------------------------------------------------------------------------- @@ -334,8 +325,8 @@ TOPS leftJoin(TABLE rightTable, Collection columnsToMatch, * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @return a new table joined according to the specification in columnsToMatch and includes all - * non-key-columns from the right table + * @return a new table joined according to the specification in columnsToMatch and includes all non-key-columns from + * the right table * @see #join(Object, Collection, Collection, int) */ TOPS join(TABLE rightTable, String columnsToMatch); @@ -349,8 +340,8 @@ TOPS leftJoin(TABLE rightTable, Collection columnsToMatch, * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth") - * @param columnsToAdd A comma separated list with the columns from the right side that need to - * be added to the left side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the right side that need to be added to the left + * side as a result of the match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd * @see #join(Object, Collection, Collection, int) */ @@ -365,44 +356,41 @@ TOPS leftJoin(TABLE rightTable, Collection columnsToMatch, * * @param rightTable The right side table on the join. * @param columnsToMatch The match pair conditions. - * @param columnsToAdd The columns from the right side that need to be added to the left side as - * a result of the match. + * @param columnsToAdd The columns from the right side that need to be added to the left side as a result of the + * match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ TOPS join(TABLE rightTable, Collection columnsToMatch, - Collection columnsToAdd); + Collection columnsToAdd); /** * Perform a cross join with the {@code rightTable}. * *

    - * Returns a table that is the cartesian product of left rows X right rows, with one column for - * each of {@code this} table's columns, and one column corresponding to each of the - * {@code rightTable}'s columns that are included in the {@code columnsToAdd} argument. The rows - * are ordered first by the {@code this} table then by the {@code rightTable}. If - * {@code columnsToMatch} is non-empty then the product is filtered by the supplied match + * Returns a table that is the cartesian product of left rows X right rows, with one column for each of {@code this} + * table's columns, and one column corresponding to each of the {@code rightTable}'s columns that are included in + * the {@code columnsToAdd} argument. The rows are ordered first by the {@code this} table then by the + * {@code rightTable}. If {@code columnsToMatch} is non-empty then the product is filtered by the supplied match * conditions. * *

    - * To efficiently produce updates, the bits that represent a key for a given row are split into - * two. Unless specified, join reserves 16 bits to represent a right row. When there are too few - * bits to represent all of the right rows for a given aggregation group the table will shift a - * bit from the left side to the right side. The default of 16 bits was carefully chosen because - * it results in an efficient implementation to process live updates. + * To efficiently produce updates, the bits that represent a key for a given row are split into two. Unless + * specified, join reserves 16 bits to represent a right row. When there are too few bits to represent all of the + * right rows for a given aggregation group the table will shift a bit from the left side to the right side. The + * default of 16 bits was carefully chosen because it results in an efficient implementation to process live + * updates. * *

    - * An io.deephaven.db.v2.utils.OutOfKeySpaceException is thrown when the total number of bits - * needed to express the result table exceeds that needed to represent Long.MAX_VALUE. There are - * a few work arounds: + * An io.deephaven.db.v2.utils.OutOfKeySpaceException is thrown when the total number of bits needed to express the + * result table exceeds that needed to represent Long.MAX_VALUE. There are a few work arounds: * *

    * - If the left table is sparse, consider flattening the left table. *

    - * - If there are no key-columns and the right table is sparse, consider flattening the right - * table. + * - If there are no key-columns and the right table is sparse, consider flattening the right table. *

    - * - If the maximum size of a right table's group is small, you can reserve fewer bits by - * setting {@code reserveBits} on initialization. + * - If the maximum size of a right table's group is small, you can reserve fewer bits by setting + * {@code reserveBits} on initialization. * *

    * Note: If you know that a given group has at most one right-row then you should prefer using @@ -410,13 +398,13 @@ TOPS join(TABLE rightTable, Collection columnsToMatch, * * @param rightTable The right side table on the join. * @param columnsToMatch The match pair conditions. - * @param columnsToAdd The columns from the right side that need to be added to the left side as - * a result of the match. + * @param columnsToAdd The columns from the right side that need to be added to the left side as a result of the + * match. * @param reserveBits The number of bits to reserve for rightTable groups. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ TOPS join(TABLE rightTable, Collection columnsToMatch, - Collection columnsToAdd, int reserveBits); + Collection columnsToAdd, int reserveBits); // ------------------------------------------------------------------------------------------- @@ -442,8 +430,8 @@ TOPS join(TABLE rightTable, Collection columnsToMatch, * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth"). - * @param columnsToAdd A comma separated list with the columns from the left side that need to - * be added to the right side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the left side that need to be added to the right + * side as a result of the match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ TOPS aj(TABLE rightTable, String columnsToMatch, String columnsToAdd); @@ -456,31 +444,31 @@ TOPS join(TABLE rightTable, Collection columnsToMatch, * * @param rightTable The right side table on the join. * @param columnsToMatch The match pair conditions. - * @param columnsToAdd The columns from the right side that need to be added to the left side as - * a result of the match. + * @param columnsToAdd The columns from the right side that need to be added to the left side as a result of the + * match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ TOPS aj(TABLE rightTable, Collection columnsToMatch, - Collection columnsToAdd); + Collection columnsToAdd); /** * Perform an as-of join with the {@code rightTable}. * *

    - * Looks up the columns in the {@code rightTable} that meet the match conditions in - * {@code columnsToMatch}. Matching is done exactly for the first n-1 columns and via a binary - * search for the last match pair. The columns of the {@code this} table are returned intact, - * together with the columns from {@code rightTable} defined in the {@code columnsToAdd}. + * Looks up the columns in the {@code rightTable} that meet the match conditions in {@code columnsToMatch}. Matching + * is done exactly for the first n-1 columns and via a binary search for the last match pair. The columns of the + * {@code this} table are returned intact, together with the columns from {@code rightTable} defined in the + * {@code columnsToAdd}. * * @param rightTable The right side table on the join. * @param columnsToMatch The match pair conditions. - * @param columnsToAdd The columns from the right side that need to be added to the left side as - * a result of the match. + * @param columnsToAdd The columns from the right side that need to be added to the left side as a result of the + * match. * @param asOfJoinRule The binary search operator for the last match pair. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ TOPS aj(TABLE rightTable, Collection columnsToMatch, - Collection columnsToAdd, AsOfJoinRule asOfJoinRule); + Collection columnsToAdd, AsOfJoinRule asOfJoinRule); // ------------------------------------------------------------------------------------------- @@ -506,8 +494,8 @@ TOPS aj(TABLE rightTable, Collection columnsToMatch, * @param rightTable The right side table on the join. * @param columnsToMatch A comma separated list of match conditions ("leftColumn=rightColumn" or * "columnFoundInBoth"). - * @param columnsToAdd A comma separated list with the columns from the left side that need to - * be added to the right side as a result of the match. + * @param columnsToAdd A comma separated list with the columns from the left side that need to be added to the right + * side as a result of the match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ TOPS raj(TABLE rightTable, String columnsToMatch, String columnsToAdd); @@ -520,36 +508,35 @@ TOPS aj(TABLE rightTable, Collection columnsToMatch, * * @param rightTable The right side table on the join. * @param columnsToMatch The match pair conditions. - * @param columnsToAdd The columns from the right side that need to be added to the left side as - * a result of the match. + * @param columnsToAdd The columns from the right side that need to be added to the left side as a result of the + * match. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ TOPS raj(TABLE rightTable, Collection columnsToMatch, - Collection columnsToAdd); + Collection columnsToAdd); /** * Perform a reverse-as-of join with the {@code rightTable}. * *

    - * Just like {@link #aj(Object, Collection, Collection, AsOfJoinRule)}, but the matching on the - * last column is in reverse order, so that you find the row after the given timestamp instead - * of the row before. + * Just like {@link #aj(Object, Collection, Collection, AsOfJoinRule)}, but the matching on the last column is in + * reverse order, so that you find the row after the given timestamp instead of the row before. * *

    - * Looks up the columns in the {@code rightTable} that meet the match conditions in - * {@code columnsToMatch}. Matching is done exactly for the first n-1 columns and via a binary - * search for the last match pair. The columns of {@code this} table are returned intact, - * together with the columns from {@code rightTable} defined in {@code columnsToAdd}. + * Looks up the columns in the {@code rightTable} that meet the match conditions in {@code columnsToMatch}. Matching + * is done exactly for the first n-1 columns and via a binary search for the last match pair. The columns of + * {@code this} table are returned intact, together with the columns from {@code rightTable} defined in + * {@code columnsToAdd}. * * @param rightTable The right side table on the join. * @param columnsToMatch The match pair conditions. - * @param columnsToAdd The columns from the right side that need to be added to the left side as - * a result of the match. + * @param columnsToAdd The columns from the right side that need to be added to the left side as a result of the + * match. * @param reverseAsOfJoinRule The binary search operator for the last match pair. * @return a new table joined according to the specification in columnsToMatch and columnsToAdd */ TOPS raj(TABLE rightTable, Collection columnsToMatch, - Collection columnsToAdd, ReverseAsOfJoinRule reverseAsOfJoinRule); + Collection columnsToAdd, ReverseAsOfJoinRule reverseAsOfJoinRule); // ------------------------------------------------------------------------------------------- @@ -560,5 +547,5 @@ TOPS raj(TABLE rightTable, Collection columnsToMatch, TOPS by(Collection groupByColumns); TOPS by(Collection groupByColumns, - Collection aggregations); + Collection aggregations); } diff --git a/table-api/src/main/java/io/deephaven/api/agg/Aggregation.java b/table-api/src/main/java/io/deephaven/api/agg/Aggregation.java index ece3ee1d153..5d406a228f2 100644 --- a/table-api/src/main/java/io/deephaven/api/agg/Aggregation.java +++ b/table-api/src/main/java/io/deephaven/api/agg/Aggregation.java @@ -107,12 +107,12 @@ static Multi AggPct(double percentile, String... pairs) { static SortedFirst AggSortedFirst(String sortedColumn, String pair) { return AggregationFinisher.sortedFirst(SortColumn.asc(ColumnName.of(sortedColumn))) - .of(pair); + .of(pair); } static Multi AggSortedFirst(String sortedColumn, String... pairs) { return AggregationFinisher.sortedFirst(SortColumn.asc(ColumnName.of(sortedColumn))) - .of(pairs); + .of(pairs); } static SortedLast AggSortedLast(String sortedColumn, String pair) { @@ -121,7 +121,7 @@ static SortedLast AggSortedLast(String sortedColumn, String pair) { static Multi AggSortedLast(String sortedColumn, String... pairs) { return AggregationFinisher.sortedLast(SortColumn.asc(ColumnName.of(sortedColumn))) - .of(pairs); + .of(pairs); } static Std AggStd(String pair) { diff --git a/table-api/src/main/java/io/deephaven/api/agg/AggregationFinisher.java b/table-api/src/main/java/io/deephaven/api/agg/AggregationFinisher.java index dcfee55f4e5..6b2872afa3d 100644 --- a/table-api/src/main/java/io/deephaven/api/agg/AggregationFinisher.java +++ b/table-api/src/main/java/io/deephaven/api/agg/AggregationFinisher.java @@ -10,11 +10,10 @@ import java.util.function.Function; /** - * The aggregation finisher is a helper to aid in building aggregations whose construction can be - * finished by a {@link Pair}. A vararg overload is provided to build a {@link Multi}, - * {@link #of(Pair...)}, which can be useful to reduce the syntax required to build multiple - * aggregations of the same basic type. Helpers are provided that translate the string-equivalents - * via {@link Pair#parse(String)}. + * The aggregation finisher is a helper to aid in building aggregations whose construction can be finished by a + * {@link Pair}. A vararg overload is provided to build a {@link Multi}, {@link #of(Pair...)}, which can be useful to + * reduce the syntax required to build multiple aggregations of the same basic type. Helpers are provided that translate + * the string-equivalents via {@link Pair#parse(String)}. * *

    * Not all aggregations may be suitable for construction in this style. @@ -75,7 +74,7 @@ static AggregationFinisher sortedFirst(SortColumn sortColumn) { static AggregationFinisher sortedFirst(Iterable sortColumns) { return ImmutableAggregationFinisher - .of(pair -> SortedFirst.builder().addAllColumns(sortColumns).pair(pair).build()); + .of(pair -> SortedFirst.builder().addAllColumns(sortColumns).pair(pair).build()); } static AggregationFinisher sortedLast(SortColumn sortColumn) { @@ -84,7 +83,7 @@ static AggregationFinisher sortedLast(SortColumn sortColumn) { static AggregationFinisher sortedLast(Iterable sortColumns) { return ImmutableAggregationFinisher - .of(pair -> SortedLast.builder().addAllColumns(sortColumns).pair(pair).build()); + .of(pair -> SortedLast.builder().addAllColumns(sortColumns).pair(pair).build()); } public static AggregationFinisher std() { diff --git a/table-api/src/main/java/io/deephaven/api/agg/AggregationOutputs.java b/table-api/src/main/java/io/deephaven/api/agg/AggregationOutputs.java index 819b60165b9..09c76d197d0 100644 --- a/table-api/src/main/java/io/deephaven/api/agg/AggregationOutputs.java +++ b/table-api/src/main/java/io/deephaven/api/agg/AggregationOutputs.java @@ -7,8 +7,7 @@ import java.util.stream.Stream; /** - * A visitor to get the ordered output {@link ColumnName column names} for {@link Aggregation - * aggregations}. + * A visitor to get the ordered output {@link ColumnName column names} for {@link Aggregation aggregations}. */ public class AggregationOutputs implements Aggregation.Visitor { diff --git a/table-api/src/main/java/io/deephaven/api/agg/Multi.java b/table-api/src/main/java/io/deephaven/api/agg/Multi.java index 1fe5fb2f69d..ac3c26a8db8 100644 --- a/table-api/src/main/java/io/deephaven/api/agg/Multi.java +++ b/table-api/src/main/java/io/deephaven/api/agg/Multi.java @@ -7,8 +7,8 @@ import java.util.List; /** - * An aggregation that is a list of other aggregations. Useful as a helper when returning an - * aggregation constructed via a varargs parameter. + * An aggregation that is a list of other aggregations. Useful as a helper when returning an aggregation constructed via + * a varargs parameter. * * @param the aggregation type * @see AggregationFinisher#of(String...) @@ -33,7 +33,7 @@ public final V walk(V visitor) { final void checkSize() { if (aggregations().size() < 2) { throw new IllegalArgumentException( - String.format("%s should have at least two aggregations", Multi.class)); + String.format("%s should have at least two aggregations", Multi.class)); } } diff --git a/table-api/src/main/java/io/deephaven/api/agg/Pair.java b/table-api/src/main/java/io/deephaven/api/agg/Pair.java index ae245cbe1f5..eb0d00e99d1 100644 --- a/table-api/src/main/java/io/deephaven/api/agg/Pair.java +++ b/table-api/src/main/java/io/deephaven/api/agg/Pair.java @@ -5,9 +5,9 @@ import java.io.Serializable; /** - * An aggregation pair represents a {@link #input() input} and {@link #output() output} column for - * some {@link Aggregation aggregations}. Aggregations that don't have a one-to-one input/output - * mapping will not need an agg pair. + * An aggregation pair represents a {@link #input() input} and {@link #output() output} column for some + * {@link Aggregation aggregations}. Aggregations that don't have a one-to-one input/output mapping will not need an agg + * pair. */ public interface Pair extends Serializable { @@ -25,8 +25,8 @@ static Pair parse(String x) { final int ix = x.indexOf('='); if (ix < 0) { throw new IllegalArgumentException(String.format( - "Unable to parse agg pair '%s', expected form '' or '='", - x)); + "Unable to parse agg pair '%s', expected form '' or '='", + x)); } ColumnName output = ColumnName.parse(x.substring(0, ix)); ColumnName input = ColumnName.parse(x.substring(ix + 1)); diff --git a/table-api/src/main/java/io/deephaven/api/agg/PairImpl.java b/table-api/src/main/java/io/deephaven/api/agg/PairImpl.java index 95706f206a1..37b3b63eea0 100644 --- a/table-api/src/main/java/io/deephaven/api/agg/PairImpl.java +++ b/table-api/src/main/java/io/deephaven/api/agg/PairImpl.java @@ -27,7 +27,7 @@ final void checkNotSameColumns() { // To make sure that Pair#equals() works as we would expect, we should always use // canonical ColumnName when applicable. throw new IllegalArgumentException( - "Should not construct PairImpl with the same columns, use the ColumnName directly"); + "Should not construct PairImpl with the same columns, use the ColumnName directly"); } } } diff --git a/table-api/src/main/java/io/deephaven/api/agg/Pct.java b/table-api/src/main/java/io/deephaven/api/agg/Pct.java index f2457efab02..0febc4f513c 100644 --- a/table-api/src/main/java/io/deephaven/api/agg/Pct.java +++ b/table-api/src/main/java/io/deephaven/api/agg/Pct.java @@ -24,7 +24,7 @@ public boolean averageMedian() { public Pct withAverage() { return ImmutablePct.builder().percentile(percentile()).pair(pair()).averageMedian(true) - .build(); + .build(); } @Override diff --git a/table-api/src/main/java/io/deephaven/api/agg/SortedFirst.java b/table-api/src/main/java/io/deephaven/api/agg/SortedFirst.java index b94e23fb2bb..9948950bdef 100644 --- a/table-api/src/main/java/io/deephaven/api/agg/SortedFirst.java +++ b/table-api/src/main/java/io/deephaven/api/agg/SortedFirst.java @@ -8,11 +8,10 @@ import java.util.List; /** - * Note: the sorted-first aggregation only supports {@link SortColumn.Order#ASCENDING} columns at - * the moment. + * Note: the sorted-first aggregation only supports {@link SortColumn.Order#ASCENDING} columns at the moment. * - * @see SortedFirst / SortedLast - * aggregations with sort direction + * @see SortedFirst / SortedLast aggregations with sort + * direction */ @Immutable @BuildableStyle @@ -44,7 +43,7 @@ final void checkSortOrder() { // TODO(deephaven-core#821): SortedFirst / SortedLast aggregations with sort direction if (!columns().stream().map(SortColumn::order).allMatch(SortedFirst::isAscending)) { throw new IllegalArgumentException( - "Can only construct SortedFirst with ascending, see https://github.com/deephaven/deephaven-core/issues/821"); + "Can only construct SortedFirst with ascending, see https://github.com/deephaven/deephaven-core/issues/821"); } } diff --git a/table-api/src/main/java/io/deephaven/api/agg/SortedLast.java b/table-api/src/main/java/io/deephaven/api/agg/SortedLast.java index 819fbbc7afc..302c38165cc 100644 --- a/table-api/src/main/java/io/deephaven/api/agg/SortedLast.java +++ b/table-api/src/main/java/io/deephaven/api/agg/SortedLast.java @@ -8,11 +8,10 @@ import java.util.List; /** - * Note: the sorted-last aggregation only supports {@link SortColumn.Order#ASCENDING} columns at the - * moment. + * Note: the sorted-last aggregation only supports {@link SortColumn.Order#ASCENDING} columns at the moment. * - * @see SortedFirst / SortedLast - * aggregations with sort direction + * @see SortedFirst / SortedLast aggregations with sort + * direction */ @Immutable @BuildableStyle @@ -44,7 +43,7 @@ final void checkSortOrder() { // TODO(deephaven-core#821): SortedFirst / SortedLast aggregations with sort direction if (!columns().stream().map(SortColumn::order).allMatch(SortedLast::isAscending)) { throw new IllegalArgumentException( - "Can only construct SortedLast with ascending, see https://github.com/deephaven/deephaven-core/issues/821"); + "Can only construct SortedLast with ascending, see https://github.com/deephaven/deephaven-core/issues/821"); } } diff --git a/table-api/src/main/java/io/deephaven/api/filter/FilterAnd.java b/table-api/src/main/java/io/deephaven/api/filter/FilterAnd.java index 7add9bd9ce8..21339bd3261 100644 --- a/table-api/src/main/java/io/deephaven/api/filter/FilterAnd.java +++ b/table-api/src/main/java/io/deephaven/api/filter/FilterAnd.java @@ -11,8 +11,7 @@ import java.util.function.Consumer; /** - * Evaluates to {@code true} when all of the given {@link #filters() filters} evaluates to - * {@code true}. + * Evaluates to {@code true} when all of the given {@link #filters() filters} evaluates to {@code true}. */ @Immutable @BuildableStyle @@ -59,7 +58,7 @@ public final Spliterator spliterator() { final void checkSize() { if (filters().size() < 2) { throw new IllegalArgumentException( - String.format("%s must have at least 2 filters", FilterAnd.class)); + String.format("%s must have at least 2 filters", FilterAnd.class)); } } diff --git a/table-api/src/main/java/io/deephaven/api/filter/FilterCondition.java b/table-api/src/main/java/io/deephaven/api/filter/FilterCondition.java index 239b7c3d4a8..a37cf15d9f7 100644 --- a/table-api/src/main/java/io/deephaven/api/filter/FilterCondition.java +++ b/table-api/src/main/java/io/deephaven/api/filter/FilterCondition.java @@ -8,8 +8,8 @@ import java.io.Serializable; /** - * Evaluates to true based on the specific {@link #operator() operator} applied to the {@link #lhs() - * left-hand side} and {@link #rhs() right-hand side}. + * Evaluates to true based on the specific {@link #operator() operator} applied to the {@link #lhs() left-hand side} and + * {@link #rhs() right-hand side}. */ @Immutable @BuildableStyle @@ -164,12 +164,12 @@ public final FilterCondition transpose() { } /** - * {@link #transpose() Transpose} the filter if the {@link #lhs() left-hand side} is not a - * {@link ColumnName} and the {@link #rhs() right-hand side} is a {@link ColumnName}. + * {@link #transpose() Transpose} the filter if the {@link #lhs() left-hand side} is not a {@link ColumnName} and + * the {@link #rhs() right-hand side} is a {@link ColumnName}. * *

    - * Useful in cases where a visitor wants to walk the sides, and prefers to have a - * {@link ColumnName} on the {@link #lhs() left-hand side}. + * Useful in cases where a visitor wants to walk the sides, and prefers to have a {@link ColumnName} on the + * {@link #lhs() left-hand side}. * * @return the filter, potentially transposed */ diff --git a/table-api/src/main/java/io/deephaven/api/filter/FilterOr.java b/table-api/src/main/java/io/deephaven/api/filter/FilterOr.java index edb40919cd9..433eab174f9 100644 --- a/table-api/src/main/java/io/deephaven/api/filter/FilterOr.java +++ b/table-api/src/main/java/io/deephaven/api/filter/FilterOr.java @@ -11,8 +11,7 @@ import java.util.function.Consumer; /** - * Evaluates to {@code true} when any of the given {@link #filters() filters} evaluates to - * {@code true}. + * Evaluates to {@code true} when any of the given {@link #filters() filters} evaluates to {@code true}. */ @Immutable @BuildableStyle @@ -59,7 +58,7 @@ public final Spliterator spliterator() { final void checkSize() { if (filters().size() < 2) { throw new IllegalArgumentException( - String.format("%s must have at least 2 filters", FilterOr.class)); + String.format("%s must have at least 2 filters", FilterOr.class)); } } diff --git a/table-api/src/main/java/io/deephaven/api/value/ValueLong.java b/table-api/src/main/java/io/deephaven/api/value/ValueLong.java index 4f4088be9ad..fe86bfa14b4 100644 --- a/table-api/src/main/java/io/deephaven/api/value/ValueLong.java +++ b/table-api/src/main/java/io/deephaven/api/value/ValueLong.java @@ -26,7 +26,7 @@ public final V walk(V visitor) { final void checkNotDeephavenNull() { if (value() == Long.MIN_VALUE) { throw new IllegalArgumentException( - "Can't represent Long.MIN_VALUE, is Deephaven null representation"); + "Can't represent Long.MIN_VALUE, is Deephaven null representation"); } } } diff --git a/table-api/src/main/java/io/deephaven/db/tables/utils/NameValidator.java b/table-api/src/main/java/io/deephaven/db/tables/utils/NameValidator.java index 29e09471fb2..f2aac551595 100644 --- a/table-api/src/main/java/io/deephaven/db/tables/utils/NameValidator.java +++ b/table-api/src/main/java/io/deephaven/db/tables/utils/NameValidator.java @@ -35,7 +35,7 @@ private enum ValidationCode { private String getErrorMessage(String name, String type) { return message.replace("", name == null ? "null" : name).replaceAll("", - type); + type); } private boolean isValid() { @@ -46,9 +46,9 @@ private boolean isValid() { // table names should not start with numbers. Partition names should be able to // TODO(deephaven-core#822): Allow more table names private final static Pattern TABLE_NAME_PATTERN = - Pattern.compile("([a-zA-Z_$])[a-zA-Z0-9_$[-][+]@]*"); + Pattern.compile("([a-zA-Z_$])[a-zA-Z0-9_$[-][+]@]*"); private final static Pattern PARTITION_NAME_PATTERN = - Pattern.compile("([a-zA-Z0-9$])[a-zA-Z0-9_$[-][+]@\\.]*"); + Pattern.compile("([a-zA-Z0-9$])[a-zA-Z0-9_$[-][+]@\\.]*"); public enum Type { // @formatter:off @@ -70,7 +70,7 @@ public String toString() { } Type(boolean checkReservedVariableNames, boolean checkValidJavaWord, String type, - Pattern pattern) { + Pattern pattern) { this.checkReservedVariableNames = checkReservedVariableNames; this.checkValidJavaWord = checkValidJavaWord; this.type = type; @@ -79,7 +79,7 @@ public String toString() { private String validate(String name) { ValidationCode code = - getCode(name, pattern, checkReservedVariableNames, checkValidJavaWord); + getCode(name, pattern, checkReservedVariableNames, checkValidJavaWord); if (!code.isValid()) { throw new InvalidNameException(code.getErrorMessage(name, type)); } @@ -89,8 +89,8 @@ private String validate(String name) { } private static final Set DB_RESERVED_VARIABLE_NAMES = - Stream.of("in", "not", "i", "ii", "k").collect( - Collectors.collectingAndThen(Collectors.toSet(), Collections::unmodifiableSet)); + Stream.of("in", "not", "i", "ii", "k").collect( + Collectors.collectingAndThen(Collectors.toSet(), Collections::unmodifiableSet)); public static String validateTableName(String name) { return Type.TABLE.validate(name); @@ -113,7 +113,7 @@ public static String validateColumnName(String name) { } private static ValidationCode getCode(String name, Pattern pattern, - boolean checkReservedVariableNames, boolean checkValidJavaWord) { + boolean checkReservedVariableNames, boolean checkValidJavaWord) { if (name == null || name.isEmpty()) { return ValidationCode.NULL_NAME; } @@ -182,27 +182,25 @@ public static String legalizeColumnName(String name, Function re * Attempts to return a legal name based on the passed in {@code name}. * *

    - * Illegal characters are simply removed. Custom replacement is possible through - * {@code customReplace} + * Illegal characters are simply removed. Custom replacement is possible through {@code customReplace} * *

    - * To avoid duplicated names, anything in the set {@code takenNames} will not be returned. These - * duplicates are resolved by adding sequential digits at the end of the variable name. + * To avoid duplicated names, anything in the set {@code takenNames} will not be returned. These duplicates are + * resolved by adding sequential digits at the end of the variable name. * *

    - * Column names A variable's name can be any legal identifier - an unlimited-length sequence of - * Unicode letters and digits, beginning with a letter, the dollar sign "$", or the underscore - * character "_". Subsequent characters may be letters, digits, dollar signs, or underscore - * characters. + * Column names A variable's name can be any legal identifier - an unlimited-length sequence of Unicode letters and + * digits, beginning with a letter, the dollar sign "$", or the underscore character "_". Subsequent characters may + * be letters, digits, dollar signs, or underscore characters. * * @param name, customReplace, takenNames can not be null * @return */ public static String legalizeColumnName(String name, Function customReplace, - Set takenNames) { + Set takenNames) { return legalizeName(name, customReplace, takenNames, "Can not legalize column name " + name, - COLUMN_PREFIX, STERILE_COLUMN_AND_QUERY_REGEX, true, true, - NameValidator::validateColumnName); + COLUMN_PREFIX, STERILE_COLUMN_AND_QUERY_REGEX, true, true, + NameValidator::validateColumnName); } public static String[] legalizeColumnNames(String[] names) { @@ -210,7 +208,7 @@ public static String[] legalizeColumnNames(String[] names) { } public static String[] legalizeColumnNames(String[] names, - Function customReplace) { + Function customReplace) { return legalizeColumnNames(names, customReplace, false); } @@ -219,9 +217,9 @@ public static String[] legalizeColumnNames(String[] names, boolean resolveConfli } public static String[] legalizeColumnNames(String[] names, - Function customReplace, boolean resolveConflicts) { + Function customReplace, boolean resolveConflicts) { return legalizeNames(names, customReplace, resolveConflicts, - NameValidator::legalizeColumnName); + NameValidator::legalizeColumnName); } public static String legalizeQueryParameterName(String name) { @@ -229,7 +227,7 @@ public static String legalizeQueryParameterName(String name) { } public static String legalizeQueryParameterName(String name, - Function replaceCustom) { + Function replaceCustom) { return legalizeQueryParameterName(name, replaceCustom, Collections.emptySet()); } @@ -241,12 +239,11 @@ public static String legalizeQueryParameterName(String name, Set takenNa * Attempts to return a legal name based on the passed in {@code name}. * *

    - * Illegal characters are simply removed. Custom replacement is possible through - * {@code customReplace} + * Illegal characters are simply removed. Custom replacement is possible through {@code customReplace} * *

    - * To avoid duplicated names, anything in the set {@code takenNames} will not be returned. These - * duplicates are resolved by adding sequential digits at the end of the variable name. + * To avoid duplicated names, anything in the set {@code takenNames} will not be returned. These duplicates are + * resolved by adding sequential digits at the end of the variable name. * *

    * Query parameters follow the same rules as column names @@ -255,10 +252,10 @@ public static String legalizeQueryParameterName(String name, Set takenNa * @return */ public static String legalizeQueryParameterName(String name, - Function customReplace, Set takenNames) { + Function customReplace, Set takenNames) { return legalizeName(name, customReplace, takenNames, "Can not legalize table name " + name, - QUERY_PREFIX, STERILE_COLUMN_AND_QUERY_REGEX, true, true, - NameValidator::validateQueryParameterName); + QUERY_PREFIX, STERILE_COLUMN_AND_QUERY_REGEX, true, true, + NameValidator::validateQueryParameterName); } public static String[] legalizeQueryParameterNames(String[] names) { @@ -266,7 +263,7 @@ public static String[] legalizeQueryParameterNames(String[] names) { } public static String[] legalizeQueryParameterNames(String[] names, - Function customReplace) { + Function customReplace) { return legalizeQueryParameterNames(names, customReplace, false); } @@ -275,9 +272,9 @@ public static String[] legalizeQueryParameterNames(String[] names, boolean resol } public static String[] legalizeQueryParameterNames(String[] names, - Function customReplace, boolean resolveConflicts) { + Function customReplace, boolean resolveConflicts) { return legalizeNames(names, customReplace, resolveConflicts, - NameValidator::legalizeQueryParameterName); + NameValidator::legalizeQueryParameterName); } public static String legalizeTableName(String name) { @@ -296,12 +293,11 @@ public static String legalizeTableName(String name, Function rep * Attempts to return a legal name based on the passed in {@code name}. * *

    - * Illegal characters are simply removed. Custom replacement is possible through - * {@code customReplace} + * Illegal characters are simply removed. Custom replacement is possible through {@code customReplace} * *

    - * To avoid duplicated names, anything in the set {@code takenNames} will not be returned. These - * duplicates are resolved by adding sequential digits at the end of the variable name. + * To avoid duplicated names, anything in the set {@code takenNames} will not be returned. These duplicates are + * resolved by adding sequential digits at the end of the variable name. * *

    * Table Names- check the regex {@code TABLE_NAME_PATTERN} @@ -310,10 +306,10 @@ public static String legalizeTableName(String name, Function rep * @return */ public static String legalizeTableName(String name, Function customReplace, - Set takenNames) { + Set takenNames) { return legalizeName(name, customReplace, takenNames, "Can not legalize table name " + name, - TABLE_PREFIX, STERILE_TABLE_AND_NAMESPACE_REGEX, false, true, - NameValidator::validateTableName); + TABLE_PREFIX, STERILE_TABLE_AND_NAMESPACE_REGEX, false, true, + NameValidator::validateTableName); } public static boolean isLegalTableName(String name) { @@ -338,9 +334,9 @@ public static boolean isLegalTableName(String name, Function rep * */ public static boolean isLegalTableName(String name, Function customReplace, - Set takenNames) { + Set takenNames) { return isLegal(name, customReplace, takenNames, STERILE_TABLE_AND_NAMESPACE_REGEX, false, - true, NameValidator::validateTableName); + true, NameValidator::validateTableName); } public static String[] legalizeTableNames(String[] names) { @@ -348,7 +344,7 @@ public static String[] legalizeTableNames(String[] names) { } public static String[] legalizeTableNames(String[] names, - Function customReplace) { + Function customReplace) { return legalizeTableNames(names, customReplace, false); } @@ -357,9 +353,9 @@ public static String[] legalizeTableNames(String[] names, boolean resolveConflic } public static String[] legalizeTableNames(String[] names, - Function customReplace, boolean resolveConflicts) { + Function customReplace, boolean resolveConflicts) { return legalizeNames(names, customReplace, resolveConflicts, - NameValidator::legalizeTableName); + NameValidator::legalizeTableName); } public static String legalizeNamespaceName(String name) { @@ -371,7 +367,7 @@ public static String legalizeNamespaceName(String name, Set takenNames) } public static String legalizeNamespaceName(String name, - Function replaceCustom) { + Function replaceCustom) { return legalizeNamespaceName(name, replaceCustom, Collections.emptySet()); } @@ -379,12 +375,11 @@ public static String legalizeNamespaceName(String name, * Attempts to return a legal name based on the passed in {@code name}. * *

    - * Illegal characters are simply removed. Custom replacement is possible through - * {@code customReplace} + * Illegal characters are simply removed. Custom replacement is possible through {@code customReplace} * *

    - * To avoid duplicated names, anything in the set {@code takenNames} will not be returned. These - * duplicates are resolved by adding sequential digits at the end of the variable name. + * To avoid duplicated names, anything in the set {@code takenNames} will not be returned. These duplicates are + * resolved by adding sequential digits at the end of the variable name. * *

    * Namespace Names- check the regex {@code TABLE_NAME_PATTERN} @@ -393,10 +388,10 @@ public static String legalizeNamespaceName(String name, * @return */ public static String legalizeNamespaceName(String name, Function customReplace, - Set takenNames) { + Set takenNames) { return legalizeName(name, customReplace, takenNames, - "Can not legalize namespace name " + name, null, STERILE_TABLE_AND_NAMESPACE_REGEX, - false, false, NameValidator::validateNamespaceName); + "Can not legalize namespace name " + name, null, STERILE_TABLE_AND_NAMESPACE_REGEX, + false, false, NameValidator::validateNamespaceName); } /** @@ -408,9 +403,9 @@ public static String legalizeNamespaceName(String name, Function * @return whether the name is valid for a new namespace */ public static boolean isLegalNamespaceName(String name, Function customReplace, - Set takenNames) { + Set takenNames) { return isLegal(name, customReplace, takenNames, STERILE_TABLE_AND_NAMESPACE_REGEX, false, - false, NameValidator::validateNamespaceName); + false, NameValidator::validateNamespaceName); } public static boolean isLegalNamespaceName(String name) { @@ -422,7 +417,7 @@ public static boolean isLegalNamespaceName(String name, Set takenNames) } public static boolean isLegalNamespaceName(String name, - Function replaceCustom) { + Function replaceCustom) { return isLegalNamespaceName(name, replaceCustom, Collections.emptySet()); } @@ -431,7 +426,7 @@ public static String[] legalizeNamespaceNames(String[] names) { } public static String[] legalizeNamespaceNames(String[] names, - Function customReplace) { + Function customReplace) { return legalizeNamespaceNames(names, customReplace, false); } @@ -440,14 +435,14 @@ public static String[] legalizeNamespaceNames(String[] names, boolean resolveCon } public static String[] legalizeNamespaceNames(String[] names, - Function customReplace, boolean resolveConflicts) { + Function customReplace, boolean resolveConflicts) { return legalizeNames(names, customReplace, resolveConflicts, - NameValidator::legalizeNamespaceName); + NameValidator::legalizeNamespaceName); } private static String legalizeName(String name, Function customReplace, - Set takenNames, String error, String prefix, String regex, boolean checkReserved, - boolean checkFirstIsNumber, Consumer validation) { + Set takenNames, String error, String prefix, String regex, boolean checkReserved, + boolean checkFirstIsNumber, Consumer validation) { // if null, throw an exception if (name == null) { throw new LegalizeNameException("Can not legalize a null name"); @@ -491,8 +486,8 @@ private static String legalizeName(String name, Function customR } private static boolean isLegal(String name, Function customReplace, - Set takenNames, String regex, boolean checkReserved, boolean checkFirstIsNumber, - Consumer validation) { + Set takenNames, String regex, boolean checkReserved, boolean checkFirstIsNumber, + Consumer validation) { // if null, throw an exception if (name == null || name.isEmpty()) { return false; @@ -534,12 +529,12 @@ private static boolean isLegal(String name, Function customRepla private static boolean isReserved(String replacedName) { return DB_RESERVED_VARIABLE_NAMES.contains(replacedName) - || SourceVersion.isKeyword(replacedName); + || SourceVersion.isKeyword(replacedName); } private static String[] legalizeNames(final String[] names, - final Function customReplace, final boolean resolveConflicts, - final Legalizer legalizer) { + final Function customReplace, final boolean resolveConflicts, + final Legalizer legalizer) { // if null, throw an exception if (names == null) { throw new LegalizeNameException("Can not legalize a null name array"); @@ -550,7 +545,7 @@ private static String[] legalizeNames(final String[] names, Set result = new LinkedHashSet<>(); for (String name : names) { name = legalizer.apply(name, customReplace, - resolveConflicts ? result : Collections.emptySet()); + resolveConflicts ? result : Collections.emptySet()); if (!resolveConflicts && result.contains(name)) { throw new LegalizeNameException("Duplicate names during legalization: " + name); } diff --git a/table-api/src/test/java/io/deephaven/api/JoinAdditionTest.java b/table-api/src/test/java/io/deephaven/api/JoinAdditionTest.java index 29039621e6c..b4a4e092900 100644 --- a/table-api/src/test/java/io/deephaven/api/JoinAdditionTest.java +++ b/table-api/src/test/java/io/deephaven/api/JoinAdditionTest.java @@ -9,7 +9,7 @@ public class JoinAdditionTest { public static final JoinAddition FOO = ColumnName.of("Foo"); public static final JoinAddition FOO_BAR = - JoinAddition.of(ColumnName.of("Foo"), ColumnName.of("Bar")); + JoinAddition.of(ColumnName.of("Foo"), ColumnName.of("Bar")); @Test void newColumn() { diff --git a/table-api/src/test/java/io/deephaven/api/JoinMatchTest.java b/table-api/src/test/java/io/deephaven/api/JoinMatchTest.java index 1f568d8e536..e0222d12d02 100644 --- a/table-api/src/test/java/io/deephaven/api/JoinMatchTest.java +++ b/table-api/src/test/java/io/deephaven/api/JoinMatchTest.java @@ -9,7 +9,7 @@ public class JoinMatchTest { public static final JoinMatch FOO = ColumnName.of("Foo"); public static final JoinMatch FOO_BAR = - JoinMatch.of(ColumnName.of("Foo"), ColumnName.of("Bar")); + JoinMatch.of(ColumnName.of("Foo"), ColumnName.of("Bar")); @Test void left() { diff --git a/table-api/src/test/java/io/deephaven/api/SelectableTest.java b/table-api/src/test/java/io/deephaven/api/SelectableTest.java index ab115467658..9c354df08a3 100644 --- a/table-api/src/test/java/io/deephaven/api/SelectableTest.java +++ b/table-api/src/test/java/io/deephaven/api/SelectableTest.java @@ -9,9 +9,9 @@ public class SelectableTest { private static final Selectable FOO = ColumnName.of("Foo"); private static final Selectable FOO_BAR = - Selectable.of(ColumnName.of("Foo"), ColumnName.of("Bar")); + Selectable.of(ColumnName.of("Foo"), ColumnName.of("Bar")); private static final Selectable FOO_EXP = - Selectable.of(ColumnName.of("Foo"), RawString.of("foo(Bar) + 42")); + Selectable.of(ColumnName.of("Foo"), RawString.of("foo(Bar) + 42")); @Test void newColumn() { diff --git a/table-api/src/test/java/io/deephaven/api/filter/FilterTest.java b/table-api/src/test/java/io/deephaven/api/filter/FilterTest.java index 0ff9ac8d22a..d4e5ff7a252 100644 --- a/table-api/src/test/java/io/deephaven/api/filter/FilterTest.java +++ b/table-api/src/test/java/io/deephaven/api/filter/FilterTest.java @@ -32,17 +32,17 @@ void not() { @Test void ands() { toString( - FilterAnd.of(Filter.isNotNull(ColumnName.of("Foo")), - FilterCondition.gt(ColumnName.of("Foo"), Value.of(42L))), - "(!isNull(Foo)) && (Foo > 42)"); + FilterAnd.of(Filter.isNotNull(ColumnName.of("Foo")), + FilterCondition.gt(ColumnName.of("Foo"), Value.of(42L))), + "(!isNull(Foo)) && (Foo > 42)"); } @Test void ors() { toString( - FilterOr.of(Filter.isNull(ColumnName.of("Foo")), - FilterCondition.eq(ColumnName.of("Foo"), Value.of(42L))), - "(isNull(Foo)) || (Foo == 42)"); + FilterOr.of(Filter.isNull(ColumnName.of("Foo")), + FilterCondition.eq(ColumnName.of("Foo"), Value.of(42L))), + "(isNull(Foo)) || (Foo == 42)"); } private static void toString(Filter filter, String expected) { diff --git a/table-api/src/test/java/io/deephaven/db/tables/utils/TestNameValidator.java b/table-api/src/test/java/io/deephaven/db/tables/utils/TestNameValidator.java index 2fbeee52bd2..60d58e8c1a6 100644 --- a/table-api/src/test/java/io/deephaven/db/tables/utils/TestNameValidator.java +++ b/table-api/src/test/java/io/deephaven/db/tables/utils/TestNameValidator.java @@ -430,7 +430,7 @@ public void testLegalizeNames() { } final String[] canBeLegalized2 = - {invalidTable1, invalidTable2, invalidTable3, validTable1, validTable2, validTable3}; + {invalidTable1, invalidTable2, invalidTable3, validTable1, validTable2, validTable3}; // table names try { @@ -453,7 +453,7 @@ public void testLegalizeNames() { } final String[] canBeLegalized3 = - {invalidTable2, invalidTable3, validTable1, validTable2, validTable3}; + {invalidTable2, invalidTable3, validTable1, validTable2, validTable3}; ret = NameValidator.legalizeNamespaceNames(canBeLegalized3, true); correct = new String[] {"a0", "a02", validTable1, "a03", validTable3}; diff --git a/web/client-api/src/main/java/io/deephaven/ide/shared/IdeSession.java b/web/client-api/src/main/java/io/deephaven/ide/shared/IdeSession.java index a44f1422f51..650c3baf4bd 100644 --- a/web/client-api/src/main/java/io/deephaven/ide/shared/IdeSession.java +++ b/web/client-api/src/main/java/io/deephaven/ide/shared/IdeSession.java @@ -46,9 +46,9 @@ public class IdeSession extends HasEventHandling { @JsIgnore public IdeSession( - WorkerConnection connection, - Ticket connectionResult, - JsRunnable closer) { + WorkerConnection connection, + Ticket connectionResult, + JsRunnable closer) { this.result = connectionResult; cancelled = new JsSet<>(); this.connection = connection; @@ -78,8 +78,7 @@ public Promise getObject(Object definitionObject) { return connection.getObject(definition, result); } - public Promise newTable(String[] columnNames, String[] types, String[][] data, - String userTimeZone) { + public Promise newTable(String[] columnNames, String[] types, String[][] data, String userTimeZone) { return connection.newTable(columnNames, types, data, userTimeZone, this).then(table -> { final CustomEventInit event = CustomEventInit.create(); event.setDetail(table); @@ -104,9 +103,9 @@ public Promise bindTableToVariable(JsTable table, String name) { bindRequest.setTableId(table.getHandle().makeTicket()); bindRequest.setVariableName(name); return Callbacks - .grpcUnaryPromise(c -> connection.consoleServiceClient() - .bindTableToVariable(bindRequest, connection.metadata(), c::apply)) - .then(ignore -> Promise.resolve((Void) null)); + .grpcUnaryPromise(c -> connection.consoleServiceClient().bindTableToVariable(bindRequest, + connection.metadata(), c::apply)) + .then(ignore -> Promise.resolve((Void) null)); } public void close() { @@ -119,8 +118,7 @@ public CancellablePromise runCode(String code) { request.setConsoleId(this.result); request.setCode(code); Promise runCodePromise = Callbacks.grpcUnaryPromise(c -> { - connection.consoleServiceClient().executeCommand(request, connection.metadata(), - c::apply); + connection.consoleServiceClient().executeCommand(request, connection.metadata(), c::apply); }); runCodePromise.then(response -> { CommandResult commandResult = new CommandResult(); @@ -138,15 +136,14 @@ public CancellablePromise runCode(String code) { }); CancellablePromise result = promise.asPromise( - res -> new JsCommandResult(res), - () -> { - // cancelled.add(handle); - // CancelCommandRequest cancelRequest = new CancelCommandRequest(); - // cancelRequest.setCommandid(); - // connection.consoleServiceClient().cancelCommand(cancelRequest, - // connection.metadata()); - throw new UnsupportedOperationException("cancelCommand"); - }); + res -> new JsCommandResult(res), + () -> { + // cancelled.add(handle); + // CancelCommandRequest cancelRequest = new CancelCommandRequest(); + // cancelRequest.setCommandid(); + // connection.consoleServiceClient().cancelCommand(cancelRequest, connection.metadata()); + throw new UnsupportedOperationException("cancelCommand"); + }); CommandInfo commandInfo = new CommandInfo(code, result); final CustomEventInit event = CustomEventInit.create(); @@ -157,11 +154,10 @@ public CancellablePromise runCode(String code) { } private VariableDefinition[] copyVariables( - JsArray list) { + JsArray list) { VariableDefinition[] array = new VariableDefinition[0]; // noinspection ConstantConditions - list.forEach((item, p1, p2) -> array[array.length] = - new VariableDefinition(item.getName(), item.getType())); + list.forEach((item, p1, p2) -> array[array.length] = new VariableDefinition(item.getName(), item.getType())); return array; } @@ -184,7 +180,7 @@ public void openDocument(Object params) { JsLog.debug("Opening document for autocomplete ", request); connection.consoleServiceClient().openDocument(request, connection.metadata(), - (p0, p1) -> JsLog.debug("open doc response", p0, p1)); + (p0, p1) -> JsLog.debug("open doc response", p0, p1)); } public void changeDocument(Object params) { @@ -216,7 +212,7 @@ public void changeDocument(Object params) { JsLog.debug("Sending content changes", request); connection.consoleServiceClient().changeDocument(request, connection.metadata(), - (p0, p1) -> JsLog.debug("Updated doc", p0, p1)); + (p0, p1) -> JsLog.debug("Updated doc", p0, p1)); } private DocumentRange toRange(final Any range) { @@ -235,32 +231,27 @@ private Position toPosition(final Any pos) { return result; } - public Promise> getCompletionItems( - Object params) { + public Promise> getCompletionItems(Object params) { final JsPropertyMap jsMap = Js.uncheckedCast(params); final GetCompletionItemsRequest request = new GetCompletionItemsRequest(); - final VersionedTextDocumentIdentifier textDocument = - toVersionedTextDoc(jsMap.getAny("textDocument")); + final VersionedTextDocumentIdentifier textDocument = toVersionedTextDoc(jsMap.getAny("textDocument")); request.setTextDocument(textDocument); request.setPosition(toPosition(jsMap.getAny("position"))); request.setContext(toContext(jsMap.getAny("context"))); request.setConsoleId(this.result); - LazyPromise> promise = - new LazyPromise<>(); - connection.consoleServiceClient().getCompletionItems(request, connection.metadata(), - (p0, p1) -> { - JsLog.debug("Got completions", p0, p1); - promise.succeed(cleanupItems(p1.getItemsList())); - }); + LazyPromise> promise = new LazyPromise<>(); + connection.consoleServiceClient().getCompletionItems(request, connection.metadata(), (p0, p1) -> { + JsLog.debug("Got completions", p0, p1); + promise.succeed(cleanupItems(p1.getItemsList())); + }); return promise.asPromise(JsTable.MAX_BATCH_TIME) - .then(Promise::resolve); + .then(Promise::resolve); } - private JsArray cleanupItems( - final JsArray itemsList) { + private JsArray cleanupItems(final JsArray itemsList) { JsArray cleaned = new JsArray<>(); if (itemsList != null) { for (int i = 0; i < itemsList.getLength(); i++) { @@ -287,13 +278,12 @@ public void closeDocument(Object params) { final JsPropertyMap jsMap = Js.uncheckedCast(params); final CloseDocumentRequest request = new CloseDocumentRequest(); request.setConsoleId(result); - final VersionedTextDocumentIdentifier textDocument = - toVersionedTextDoc(jsMap.getAny("textDocument")); + final VersionedTextDocumentIdentifier textDocument = toVersionedTextDoc(jsMap.getAny("textDocument")); request.setTextDocument(textDocument); JsLog.debug("Closing document for autocomplete ", request); connection.consoleServiceClient().closeDocument(request, connection.metadata(), - (p0, p1) -> JsLog.debug("response back", p0, p1)); + (p0, p1) -> JsLog.debug("response back", p0, p1)); } private VersionedTextDocumentIdentifier toVersionedTextDoc(final Any textDoc) { diff --git a/web/client-api/src/main/java/io/deephaven/ide/shared/LspTranslate.java b/web/client-api/src/main/java/io/deephaven/ide/shared/LspTranslate.java index 47127f358b0..af519834438 100644 --- a/web/client-api/src/main/java/io/deephaven/ide/shared/LspTranslate.java +++ b/web/client-api/src/main/java/io/deephaven/ide/shared/LspTranslate.java @@ -10,15 +10,15 @@ * LspTranslate: *

    *

    - * This class is responsible for transforming "off-the-wire" protobuf completion responses into - * js-friendly "js api" objects that we can hand off to clients. + * This class is responsible for transforming "off-the-wire" protobuf completion responses into js-friendly "js api" + * objects that we can hand off to clients. *

    *

    */ public class LspTranslate { public static CompletionItem toJs( - io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.console_pb.CompletionItem src) { + io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.console_pb.CompletionItem src) { final CompletionItem item = new CompletionItem(); item.setStart((int) src.getStart()); item.setLength((int) src.getLength()); @@ -48,7 +48,7 @@ public static CompletionItem toJs( final JsArray edits = new JsArray<>(); final JsArray textEdits = - src.getAdditionalTextEditsList(); + src.getAdditionalTextEditsList(); for (int i = 0; i < textEdits.getLength(); i++) { edits.push(toJs(textEdits.getAt(i))); } @@ -57,7 +57,7 @@ public static CompletionItem toJs( } private static TextEdit toJs( - final io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.console_pb.TextEdit src) { + final io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.console_pb.TextEdit src) { final TextEdit item = new TextEdit(); item.text = src.getText(); item.range = toJs(src.getRange()); @@ -65,7 +65,7 @@ private static TextEdit toJs( } private static DocumentRange toJs( - final io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.console_pb.DocumentRange range) { + final io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.console_pb.DocumentRange range) { final DocumentRange item = new DocumentRange(); item.start = toJs(range.getStart()); item.end = toJs(range.getEnd()); @@ -73,7 +73,7 @@ private static DocumentRange toJs( } private static Position toJs( - final io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.console_pb.Position src) { + final io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.console_pb.Position src) { final Position item = new Position(); item.line = (int) src.getLine(); item.character = (int) src.getCharacter(); diff --git a/web/client-api/src/main/java/io/deephaven/web/DeephavenJsApiLinker.java b/web/client-api/src/main/java/io/deephaven/web/DeephavenJsApiLinker.java index 79ba42d02db..caaa6218a5e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/DeephavenJsApiLinker.java +++ b/web/client-api/src/main/java/io/deephaven/web/DeephavenJsApiLinker.java @@ -22,13 +22,13 @@ public String getDescription() { } @Override - public ArtifactSet link(TreeLogger logger, LinkerContext context, ArtifactSet artifacts, - boolean onePermutation) throws UnableToCompleteException { + public ArtifactSet link(TreeLogger logger, LinkerContext context, ArtifactSet artifacts, boolean onePermutation) + throws UnableToCompleteException { return this.link(logger, context, artifacts); } public ArtifactSet link(TreeLogger logger, LinkerContext context, ArtifactSet artifacts) - throws UnableToCompleteException { + throws UnableToCompleteException { ArtifactSet toReturn = new ArtifactSet(artifacts); DefaultTextOutput out = new DefaultTextOutput(true); @@ -43,8 +43,7 @@ public ArtifactSet link(TreeLogger logger, LinkerContext context, ArtifactSet ar } if (results.size() > 1) { - logger.log(TreeLogger.ERROR, - "Expected 1 permutation, found " + results.size() + " permutations."); + logger.log(TreeLogger.ERROR, "Expected 1 permutation, found " + results.size() + " permutations."); throw new UnableToCompleteException(); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/BigDecimalWrapper.java b/web/client-api/src/main/java/io/deephaven/web/client/api/BigDecimalWrapper.java index 53e01053525..ac590d2bd29 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/BigDecimalWrapper.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/BigDecimalWrapper.java @@ -6,8 +6,7 @@ import java.math.BigDecimal; /** - * Wrap BigDecimal values for use in JS. Provides text formatting for display and access to the - * underlying value. + * Wrap BigDecimal values for use in JS. Provides text formatting for display and access to the underlying value. */ public class BigDecimalWrapper { private final BigDecimal value; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/BigIntegerWrapper.java b/web/client-api/src/main/java/io/deephaven/web/client/api/BigIntegerWrapper.java index 64e552fcf0a..96c409427c2 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/BigIntegerWrapper.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/BigIntegerWrapper.java @@ -6,8 +6,7 @@ import java.math.BigInteger; /** - * Wrap BigInteger values for use in JS. Provides text formatting for display and access to the - * underlying value. + * Wrap BigInteger values for use in JS. Provides text formatting for display and access to the underlying value. */ public class BigIntegerWrapper { private final BigInteger value; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/Callbacks.java b/web/client-api/src/main/java/io/deephaven/web/client/api/Callbacks.java index 21e8024dd95..bfd24974d11 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/Callbacks.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/Callbacks.java @@ -14,12 +14,10 @@ */ public interface Callbacks { - static Promise promise(@Nullable HasEventHandling failHandler, - Consumer> t) { + static Promise promise(@Nullable HasEventHandling failHandler, Consumer> t) { return new Promise<>(( - Promise.PromiseExecutorCallbackFn.ResolveCallbackFn resolve, - Promise.PromiseExecutorCallbackFn.RejectCallbackFn reject) -> t - .accept(new Callback() { + Promise.PromiseExecutorCallbackFn.ResolveCallbackFn resolve, + Promise.PromiseExecutorCallbackFn.RejectCallbackFn reject) -> t.accept(new Callback() { @Override public void onFailure(T reason) { notNull(failHandler, t, reject).onInvoke(reason); @@ -33,9 +31,9 @@ public void onSuccess(S result) { } static RejectCallbackFn notNull( - @Nullable HasEventHandling failHandler, // system provided failHandler - Consumer realCallback, // success handler - RejectCallbackFn reject // promise-supplied failHandler + @Nullable HasEventHandling failHandler, // system provided failHandler + Consumer realCallback, // success handler + RejectCallbackFn reject // promise-supplied failHandler ) { if (reject == null) { return f -> failLog(failHandler, realCallback, f); @@ -66,8 +64,8 @@ static void failLog(HasEventHandling failHandler, Consumer from, F fai } /** - * Transform a bi-consumer into a callback. It is the caller's responsibility to fire - * "requestfailed" events as appropriate. + * Transform a bi-consumer into a callback. It is the caller's responsibility to fire "requestfailed" events as + * appropriate. */ static Callback of(BiConsumer from) { return new Callback() { @@ -98,8 +96,7 @@ static Promise grpcUnaryPromise(Consumer> t) { }); } - static void translateCallback(Callback callback, - Consumer> t) { + static void translateCallback(Callback callback, Consumer> t) { try { t.accept((fail, success) -> { if (fail != null) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/ClientConfiguration.java b/web/client-api/src/main/java/io/deephaven/web/client/api/ClientConfiguration.java index f45ad1fca3d..9e6053fb93a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/ClientConfiguration.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/ClientConfiguration.java @@ -3,15 +3,15 @@ import elemental2.core.Uint8Array; /** - * A place to assemble various "services" we want to make ubiquitously available in the client by - * passing around a single object. + * A place to assemble various "services" we want to make ubiquitously available in the client by passing around a + * single object. */ public class ClientConfiguration { private static final byte EXPORT_PREFIX = 'e'; /** - * The next number to use when making a ticket. These values must always be positive, as zero is - * an invalid value, and negative values represent server-created tickets. + * The next number to use when making a ticket. These values must always be positive, as zero is an invalid value, + * and negative values represent server-created tickets. */ private int next = 1; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/Column.java b/web/client-api/src/main/java/io/deephaven/web/client/api/Column.java index df4f285de32..9541ad41ccc 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/Column.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/Column.java @@ -25,18 +25,16 @@ public class Column { private final int jsIndex; /** - * Specific to rollup tables when constituent columns are enabled. Used in toString(), but - * ignored for equals/hashcode, since it might be helpful for debugging, but could potentially - * confuse some comparisons between instances since this is set after the CTS is created, ready - * for use. + * Specific to rollup tables when constituent columns are enabled. Used in toString(), but ignored for + * equals/hashcode, since it might be helpful for debugging, but could potentially confuse some comparisons between + * instances since this is set after the CTS is created, ready for use. */ private String constituentType; private String description; - public Column(int jsIndex, int index, Integer formatColumnIndex, Integer styleColumnIndex, - String type, String name, boolean isPartitionColumn, Integer formatStringColumnIndex, - String description) { + public Column(int jsIndex, int index, Integer formatColumnIndex, Integer styleColumnIndex, String type, String name, + boolean isPartitionColumn, Integer formatStringColumnIndex, String description) { this.jsIndex = jsIndex; this.index = index; this.formatColumnIndex = formatColumnIndex; @@ -139,13 +137,13 @@ public FilterValue filter() { @Override public String toString() { return "Column{" + - "index=" + index + - ", formatColumnIndex=" + formatColumnIndex + - ", styleColumnIndex=" + styleColumnIndex + - ", formatStringColumnIndex=" + formatStringColumnIndex + - ", type='" + type + '\'' + - ", name='" + name + '\'' + - '}'; + "index=" + index + + ", formatColumnIndex=" + formatColumnIndex + + ", styleColumnIndex=" + styleColumnIndex + + ", formatStringColumnIndex=" + formatStringColumnIndex + + ", type='" + type + '\'' + + ", name='" + name + '\'' + + '}'; } @Override @@ -160,14 +158,13 @@ public boolean equals(Object o) { if (index != column.index) return false; if (formatColumnIndex != null ? !formatColumnIndex.equals(column.formatColumnIndex) - : column.formatColumnIndex != null) + : column.formatColumnIndex != null) return false; if (styleColumnIndex != null ? !styleColumnIndex.equals(column.styleColumnIndex) - : column.styleColumnIndex != null) + : column.styleColumnIndex != null) return false; - if (formatStringColumnIndex != null - ? !formatStringColumnIndex.equals(column.formatStringColumnIndex) - : column.formatStringColumnIndex != null) + if (formatStringColumnIndex != null ? !formatStringColumnIndex.equals(column.formatStringColumnIndex) + : column.formatStringColumnIndex != null) return false; if (!type.equals(column.type)) return false; @@ -179,8 +176,7 @@ public int hashCode() { int result = index; result = 31 * result + (formatColumnIndex != null ? formatColumnIndex.hashCode() : 0); result = 31 * result + (styleColumnIndex != null ? styleColumnIndex.hashCode() : 0); - result = 31 * result - + (formatStringColumnIndex != null ? formatStringColumnIndex.hashCode() : 0); + result = 31 * result + (formatStringColumnIndex != null ? formatStringColumnIndex.hashCode() : 0); result = 31 * result + type.hashCode(); result = 31 * result + name.hashCode(); return result; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/HasEventHandling.java b/web/client-api/src/main/java/io/deephaven/web/client/api/HasEventHandling.java index 2fe2be6f9ad..31cb31c38e3 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/HasEventHandling.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/HasEventHandling.java @@ -47,7 +47,7 @@ public RemoverFn addEventListener(String name, EventFn callback) { } else { if (hasListener(name, callback)) { JsLog.warn(logPrefix() + "You are double-adding the callback " + name + " : ", - callback + ", removing old instance."); + callback + ", removing old instance."); removeEventListener(name, callback); } listeners.push(callback); @@ -57,10 +57,9 @@ public RemoverFn addEventListener(String name, EventFn callback) { public void addEventListenerOneShot(String name, EventFn callback) { /* - * Hack to workaround how GWT creates js functions and manages binding "this". The "self" - * instance is actually _not_ the same object as "this", as it represents the JS Function - * instead of the Java instance - effectively, "self" is something like - * this::onEvent.bind(this). + * Hack to workaround how GWT creates js functions and manages binding "this". The "self" instance is actually + * _not_ the same object as "this", as it represents the JS Function instead of the Java instance - effectively, + * "self" is something like this::onEvent.bind(this). */ final class WrappedCallback implements EventFn { private EventFn self; @@ -127,15 +126,14 @@ public boolean hasListener(String name, EventFn fn) { public boolean removeEventListener(String name, EventFn callback) { final JsArray listeners = map.get(name); if (listeners == null) { - JsLog.warn( - logPrefix() + "Asked to remove an event listener which wasn't present, ignoring."); + JsLog.warn(logPrefix() + "Asked to remove an event listener which wasn't present, ignoring."); return false; } int index = listeners.indexOf(callback); if (index == -1) { JsLog.warn(logPrefix() - + "Asked to remove an event listener which wasn't present, ignoring. Present listeners for that event: ", - listeners); + + "Asked to remove an event listener which wasn't present, ignoring. Present listeners for that event: ", + listeners); return false; } // remove the item @@ -166,14 +164,12 @@ public void fireEvent(String type, Event e) { return; } if (map.has(e.type)) { - final JsArray callbacks = - Js.cast(JsArray.from((JsArrayLike) map.get(e.type))); + final JsArray callbacks = Js.cast(JsArray.from((JsArrayLike) map.get(e.type))); callbacks.forEach((item, ind, all) -> { try { item.onEvent(e); } catch (Throwable t) { - DomGlobal.console.error(logPrefix() + "User callback (", item, ") of type ", - type, " failed: ", t); + DomGlobal.console.error(logPrefix() + "User callback (", item, ") of type ", type, " failed: ", t); t.printStackTrace(); } return true; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsColumnStatistics.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsColumnStatistics.java index a1441d24842..099e2e631e8 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsColumnStatistics.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsColumnStatistics.java @@ -18,10 +18,9 @@ public class JsColumnStatistics { public enum StatType { // Note that a null format means default to columns formatting - COUNT("COUNT", "long"), SIZE("SIZE", "long"), UNIQUE_VALUES("UNIQUE VALUES", "long"), SUM( - "SUM", null), SUM_ABS("SUM (ABS)", null), AVG("AVG", "double"), AVG_ABS("AVG (ABS)", - "double"), MIN("MIN", - null), MIN_ABS("MIN (ABS)", null), MAX("MAX", null), MAX_ABS("MAX (ABS)", null); + COUNT("COUNT", "long"), SIZE("SIZE", "long"), UNIQUE_VALUES("UNIQUE VALUES", "long"), SUM("SUM", null), SUM_ABS( + "SUM (ABS)", null), AVG("AVG", "double"), AVG_ABS("AVG (ABS)", "double"), MIN("MIN", + null), MIN_ABS("MIN (ABS)", null), MAX("MAX", null), MAX_ABS("MAX (ABS)", null); private final String displayName; private final String formatType; @@ -43,7 +42,7 @@ public String getFormatType() { private static final Map STAT_TYPE_MAP = new HashMap<>(); static { Arrays.stream(StatType.values()) - .forEach(type -> STAT_TYPE_MAP.put(type.getDisplayName(), type.getFormatType())); + .forEach(type -> STAT_TYPE_MAP.put(type.getDisplayName(), type.getFormatType())); } private final JsMap statisticsMap; @@ -61,10 +60,9 @@ public JsColumnStatistics(ColumnStatistics statistics) { } statisticsMap.set(StatType.COUNT.getDisplayName(), (double) statistics.getCount()); if (statistics.getCount() > 0) { - statisticsMap.set(StatType.AVG.getDisplayName(), - statistics.getSum() / (double) statistics.getCount()); + statisticsMap.set(StatType.AVG.getDisplayName(), statistics.getSum() / (double) statistics.getCount()); statisticsMap.set(StatType.AVG_ABS.getDisplayName(), - statistics.getAbsSum() / (double) statistics.getCount()); + statistics.getAbsSum() / (double) statistics.getCount()); statisticsMap.set(StatType.MIN.getDisplayName(), statistics.getMin()); statisticsMap.set(StatType.MAX.getDisplayName(), statistics.getMax()); statisticsMap.set(StatType.MIN_ABS.getDisplayName(), statistics.getAbsMin()); @@ -73,16 +71,13 @@ public JsColumnStatistics(ColumnStatistics statistics) { } else if (statistics.getType() == ColumnStatistics.ColumnType.DATETIME) { statisticsMap.set(StatType.COUNT.getDisplayName(), (double) statistics.getCount()); if (statistics.getCount() > 0) { - statisticsMap.set(StatType.MIN.getDisplayName(), - new DateWrapper(statistics.getMinDateTime())); - statisticsMap.set(StatType.MAX.getDisplayName(), - new DateWrapper(statistics.getMaxDateTime())); + statisticsMap.set(StatType.MIN.getDisplayName(), new DateWrapper(statistics.getMinDateTime())); + statisticsMap.set(StatType.MAX.getDisplayName(), new DateWrapper(statistics.getMaxDateTime())); } } else { statisticsMap.set(StatType.COUNT.getDisplayName(), (double) statistics.getCount()); if (statistics.getType() == ColumnStatistics.ColumnType.COMPARABLE) { - statisticsMap.set(StatType.UNIQUE_VALUES.getDisplayName(), - (double) statistics.getNumUnique()); + statisticsMap.set(StatType.UNIQUE_VALUES.getDisplayName(), (double) statistics.getNumUnique()); } } @@ -90,10 +85,8 @@ public JsColumnStatistics(ColumnStatistics statistics) { if (statistics.getType() == ColumnStatistics.ColumnType.COMPARABLE) { final String[] keys = statistics.getUniqueKeys(); final long[] values = statistics.getUniqueValues(); - assert keys.length == values.length - : "Table Statistics Unique Value Count does not have the same" + - "number of keys and values. Keys = " + keys.length + ", Values = " - + values.length; + assert keys.length == values.length : "Table Statistics Unique Value Count does not have the same" + + "number of keys and values. Keys = " + keys.length + ", Values = " + values.length; for (int i = 0; i < keys.length; i++) { uniqueValues.set(keys[i], (double) values[i]); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsRangeSet.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsRangeSet.java index 322e1906efd..3b039a1f62e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsRangeSet.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsRangeSet.java @@ -11,8 +11,7 @@ import java.util.stream.StreamSupport; /** - * Simple wrapper to emulate RangeSet/Index in JS, with the caveat that LongWrappers may make poor - * keys in plain JS. + * Simple wrapper to emulate RangeSet/Index in JS, with the caveat that LongWrappers may make poor keys in plain JS. */ public class JsRangeSet { private final RangeSet range; @@ -42,10 +41,9 @@ public static JsRangeSet ofRanges(JsRangeSet[] ranges) { @JsMethod(namespace = "dh.RangeSet", name = "ofSortedRanges") public static JsRangeSet ofSortedRanges(JsRangeSet[] ranges) { - Range[] rangeArray = Arrays.stream(ranges) - .flatMap(r -> StreamSupport.stream( - Spliterators.spliterator(r.range.rangeIterator(), Long.MAX_VALUE, 0), false)) - .toArray(Range[]::new); + Range[] rangeArray = Arrays.stream(ranges).flatMap( + r -> StreamSupport.stream(Spliterators.spliterator(r.range.rangeIterator(), Long.MAX_VALUE, 0), false)) + .toArray(Range[]::new); return new JsRangeSet(RangeSet.fromSortedRanges(rangeArray)); } @@ -57,11 +55,9 @@ public JsRangeSet(RangeSet range) { @JsMethod public JsIterator iterator() { return new JsIterator<>( - StreamSupport - .longStream(Spliterators.spliterator(range.indexIterator(), Long.MAX_VALUE, 0), - false) - .mapToObj(LongWrapper::of) - .iterator()); + StreamSupport.longStream(Spliterators.spliterator(range.indexIterator(), Long.MAX_VALUE, 0), false) + .mapToObj(LongWrapper::of) + .iterator()); } @JsProperty diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java index fa9799f45c4..46e65d832ef 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTable.java @@ -56,31 +56,31 @@ import static io.deephaven.web.client.fu.LazyPromise.logError; /** - * TODO provide hooks into the event handlers so we can see if no one is listening any more and - * release the table handle/viewport. + * TODO provide hooks into the event handlers so we can see if no one is listening any more and release the table + * handle/viewport. */ public class JsTable extends HasEventHandling implements HasTableBinding, HasLifecycle { @JsProperty(namespace = "dh.Table") public static final String EVENT_SIZECHANGED = "sizechanged", - EVENT_UPDATED = "updated", - EVENT_ROWADDED = "rowadded", - EVENT_ROWREMOVED = "rowremoved", - EVENT_ROWUPDATED = "rowupdated", - EVENT_SORTCHANGED = "sortchanged", - EVENT_FILTERCHANGED = "filterchanged", - EVENT_CUSTOMCOLUMNSCHANGED = "customcolumnschanged", - EVENT_DISCONNECT = "disconnect", - EVENT_RECONNECT = "reconnect", - EVENT_RECONNECTFAILED = "reconnectfailed"; + EVENT_UPDATED = "updated", + EVENT_ROWADDED = "rowadded", + EVENT_ROWREMOVED = "rowremoved", + EVENT_ROWUPDATED = "rowupdated", + EVENT_SORTCHANGED = "sortchanged", + EVENT_FILTERCHANGED = "filterchanged", + EVENT_CUSTOMCOLUMNSCHANGED = "customcolumnschanged", + EVENT_DISCONNECT = "disconnect", + EVENT_RECONNECT = "reconnect", + EVENT_RECONNECTFAILED = "reconnectfailed"; @JsProperty(namespace = "dh.Table") public static final double SIZE_UNCOALESCED = -2; // indicates that the CTS has changed, "downstream" tables should take note public static final String INTERNAL_EVENT_STATECHANGED = "statechanged-internal", - // indicates that the "size listener" has gone off, thought possibly without a change in - // size, indicating a change in some table data - INTERNAL_EVENT_SIZELISTENER = "sizelistener-internal"; + // indicates that the "size listener" has gone off, thought possibly without a change in size, indicating a + // change in some table data + INTERNAL_EVENT_SIZELISTENER = "sizelistener-internal"; // Amount of debounce to use when eating snapshot events. public static final int DEBOUNCE_TIME = 20; @@ -109,13 +109,12 @@ public class JsTable extends HasEventHandling implements HasTableBinding, HasLif private TableSubscription nonViewportSub; /** - * Creates a new Table directly from an existing ClientTableState. The CTS manages all fetch - * operations, so this is just a simple constructor to get a table that points to the given - * state. + * Creates a new Table directly from an existing ClientTableState. The CTS manages all fetch operations, so this is + * just a simple constructor to get a table that points to the given state. */ public JsTable( - WorkerConnection workerConnection, - ClientTableState state) { + WorkerConnection workerConnection, + ClientTableState state) { this.subscriptionId = nextSubscriptionId++; this.workerConnection = workerConnection; onClosed = new ArrayList<>(); @@ -124,9 +123,8 @@ public JsTable( } /** - * Copy-constructor, used to build a new table instance based on the current handle/state of the - * current one, allowing not only sharing state, but also actual handle and viewport - * subscriptions. + * Copy-constructor, used to build a new table instance based on the current handle/state of the current one, + * allowing not only sharing state, but also actual handle and viewport subscriptions. * * @param table the original table to copy settings from */ @@ -157,7 +155,7 @@ public Promise batch(JsConsumer userCode) { userCode.apply(batcher); if (--batchDepth == 0) { return batcher.sendRequest() - .then(ignored -> Promise.resolve(JsTable.this)); + .then(ignored -> Promise.resolve(JsTable.this)); } else { return batcher.nestedPromise(this); } @@ -178,9 +176,9 @@ public Column[] findColumns(String[] keys) { } /** - * Returns the current state if active, or the last state which was active which represents the - * state we will return to if an error occurs with the state we're presently waiting on. This - * lets user code access the last known table size and columns. + * Returns the current state if active, or the last state which was active which represents the state we will return + * to if an error occurs with the state we're presently waiting on. This lets user code access the last known table + * size and columns. */ public ClientTableState lastVisibleState() { // Try and get the running state first @@ -220,8 +218,7 @@ public Promise inputTable() { return Js.uncheckedCast(Promise.reject("Table is not an InputTable")); } return new Promise<>((resolve, reject) -> { - // workerConnection.getServer().fetchInputTable(getHeadHandle(), Callbacks.of((success, - // fail) -> { + // workerConnection.getServer().fetchInputTable(getHeadHandle(), Callbacks.of((success, fail) -> { // if (fail == null) { // resolve.onInvoke(new JsInputTable(this, success.getKeys(), success.getValues())); // } else { @@ -235,8 +232,7 @@ public Promise inputTable() { @JsMethod public void close() { if (currentState == null) { - // deliberately avoiding JsLog so that it shows up (with stack trace) in developer's - // console + // deliberately avoiding JsLog so that it shows up (with stack trace) in developer's console JsLog.warn("Table.close() called twice, second call being ignored", this); return; } @@ -259,8 +255,8 @@ public void close() { public String[] getAttributes() { TableAttributesDefinition attrs = lastVisibleState().getTableDef().getAttributes(); return Stream.concat( - attrs.getAsMap().keySet().stream(), - Stream.of(attrs.getRemainingKeys())).toArray(String[]::new); + attrs.getAsMap().keySet().stream(), + Stream.of(attrs.getRemainingKeys())).toArray(String[]::new); } @JsMethod @@ -298,9 +294,9 @@ public Object getAttribute(String attributeName) { // ); throw new UnsupportedOperationException("getAttribute"); }, - "reading table from attribute with name " + attributeName) - .refetch(this, workerConnection.metadata()) - .then(cts -> Promise.resolve(new JsTable(workerConnection, cts))); + "reading table from attribute with name " + attributeName) + .refetch(this, workerConnection.metadata()) + .then(cts -> Promise.resolve(new JsTable(workerConnection, cts))); } // TODO: make these use Promise, so that if the tables list is only partially resolved, @@ -314,8 +310,7 @@ public JsArray getColumns() { @JsProperty public double getSize() { TableViewportSubscription subscription = subscriptions.get(getHandle()); - if (subscription != null - && subscription.getStatus() == TableViewportSubscription.Status.ACTIVE) { + if (subscription != null && subscription.getStatus() == TableViewportSubscription.Status.ACTIVE) { // only ask the viewport for the size if it is alive and ticking return subscription.size(); } @@ -333,8 +328,7 @@ public String getDescription() { @JsProperty public double getTotalSize() { TableViewportSubscription subscription = subscriptions.get(getHandle()); - if (subscription != null - && subscription.getStatus() == TableViewportSubscription.Status.ACTIVE) { + if (subscription != null && subscription.getStatus() == TableViewportSubscription.Status.ACTIVE) { // only ask the viewport for the size if it is alive and ticking return subscription.totalSize(); } @@ -411,8 +405,7 @@ public JsArray applyFilter(FilterCondition[] filter) { @SuppressWarnings("unusable-by-js") public JsArray applyCustomColumns(String[] customColumns) { - final List newCustomColumns = - CustomColumnDescriptor.from(customColumns); + final List newCustomColumns = CustomColumnDescriptor.from(customColumns); // take a look at the current custom columns so we can return it final ClientTableState current = state(); @@ -429,8 +422,7 @@ public JsArray applyCustomColumns(String[] customColumns) { batcher.customColumns(newCustomColumns); batcher.filter(current.getFilters()); batcher.sort(current.getSorts()); - }).catch_(logError( - () -> "Failed to apply custom columns: " + Arrays.toString(customColumns))); + }).catch_(logError(() -> "Failed to apply custom columns: " + Arrays.toString(customColumns))); } } @@ -453,44 +445,36 @@ public TableViewportSubscription setViewport(double firstRow, double lastRow) { /** * Overload for Java (since JS just omits the optional param) */ - public TableViewportSubscription setViewport(double firstRow, double lastRow, - JsArray columns) { + public TableViewportSubscription setViewport(double firstRow, double lastRow, JsArray columns) { return setViewport(firstRow, lastRow, columns, null); } @JsMethod - public TableViewportSubscription setViewport(double firstRow, double lastRow, - @JsOptional JsArray columns, @JsOptional Double updateIntervalMs) { - if (lastVisibleState().getTableDef().getAttributes() - .getTreeHierarchicalColumnName() != null) { - // we only need to check the last visible state since if it isn't a tree, our current - // state isnt either + public TableViewportSubscription setViewport(double firstRow, double lastRow, @JsOptional JsArray columns, + @JsOptional Double updateIntervalMs) { + if (lastVisibleState().getTableDef().getAttributes().getTreeHierarchicalColumnName() != null) { + // we only need to check the last visible state since if it isn't a tree, our current state isnt either throw new IllegalStateException( - "Cannot set a normal table viewport on a treetable - please re-fetch this as a treetable"); + "Cannot set a normal table viewport on a treetable - please re-fetch this as a treetable"); } Column[] columnsCopy = columns != null ? Js.uncheckedCast(columns.slice()) : null; ClientTableState currentState = state(); TableViewportSubscription activeSubscription = subscriptions.get(getHandle()); - if (activeSubscription != null - && activeSubscription.getStatus() != TableViewportSubscription.Status.DONE) { + if (activeSubscription != null && activeSubscription.getStatus() != TableViewportSubscription.Status.DONE) { // hasn't finished, lets reuse it - activeSubscription.setInternalViewport(firstRow, lastRow, columnsCopy, - updateIntervalMs); + activeSubscription.setInternalViewport(firstRow, lastRow, columnsCopy, updateIntervalMs); return activeSubscription; } else { - // In the past, we left the old sub going until the new one was ready, then started the - // new one. But now, - // we want to reference the old or the new as appropriate - until the new state is - // running, we keep pumping + // In the past, we left the old sub going until the new one was ready, then started the new one. But now, + // we want to reference the old or the new as appropriate - until the new state is running, we keep pumping // the old one, then cross over once we're able. - // We're not responsible here for shutting down the old one here - setState will do that - // after the new one + // We're not responsible here for shutting down the old one here - setState will do that after the new one // is running. // rewrap current state in a new one, when ready the viewport will be applied - TableViewportSubscription replacement = new TableViewportSubscription(firstRow, lastRow, - columnsCopy, updateIntervalMs, this); + TableViewportSubscription replacement = + new TableViewportSubscription(firstRow, lastRow, columnsCopy, updateIntervalMs, this); subscriptions.put(currentState.getHandle(), replacement); return replacement; @@ -505,8 +489,7 @@ public void setInternalViewport(double firstRow, double lastRow, Column[] column throw new IllegalArgumentException(firstRow + " < " + 0); } currentViewportData = null; - // we must wait for the latest stack entry that can add columns (so we get an appropriate - // BitSet) + // we must wait for the latest stack entry that can add columns (so we get an appropriate BitSet) state().setDesiredViewport(this, (long) firstRow, (long) lastRow, columns); } @@ -525,8 +508,7 @@ public Promise getInternalViewportData() { active.onRunning(state -> { if (currentViewportData == null) { // no viewport data received yet; let's setup a one-shot UPDATED event listener - addEventListenerOneShot(EVENT_UPDATED, - ignored -> promise.succeed(currentViewportData)); + addEventListenerOneShot(EVENT_UPDATED, ignored -> promise.succeed(currentViewportData)); } else { promise.succeed(currentViewportData); } @@ -542,8 +524,7 @@ public TableSubscription subscribe(JsArray columns) { } @JsMethod - public TableSubscription subscribe(JsArray columns, - @JsOptional Double updateIntervalMs) { + public TableSubscription subscribe(JsArray columns, @JsOptional Double updateIntervalMs) { assert nonViewportSub == null : "Can't directly subscribe to the 'private' table instance"; // make a new table with a pUT call, listen to the subscription there return new TableSubscription(columns, this, updateIntervalMs); @@ -562,8 +543,7 @@ public void internalSubscribe(JsArray columns, TableSubscription sub) { public Promise selectDistinct(Column[] columns) { final ClientTableState state = state(); // We are going to forget all configuration for the current state - // by just creating a new, fresh state. This should be an optional flatten()/copy() step - // instead. + // by just creating a new, fresh state. This should be an optional flatten()/copy() step instead. String[] columnNames = Arrays.stream(columns).map(Column::getName).toArray(String[]::new); final ClientTableState distinct = workerConnection.newState((c, cts, metadata) -> { SelectDistinctRequest request = new SelectDistinctRequest(); @@ -572,9 +552,9 @@ public Promise selectDistinct(Column[] columns) { request.setColumnNamesList(columnNames); workerConnection.tableServiceClient().selectDistinct(request, metadata, c::apply); }, - "selectDistinct " + Arrays.toString(columnNames)); + "selectDistinct " + Arrays.toString(columnNames)); return distinct.refetch(this, workerConnection.metadata()) - .then(cts -> Promise.resolve(new JsTable(workerConnection, cts))); + .then(cts -> Promise.resolve(new JsTable(workerConnection, cts))); } @JsMethod @@ -583,9 +563,9 @@ public Promise copy(boolean resolved) { LazyPromise promise = new LazyPromise<>(); final ClientTableState unresolved = state(); unresolved.onRunning(promise::succeed, promise::fail, - () -> promise.fail("Table failed or closed, copy could not complete")); + () -> promise.fail("Table failed or closed, copy could not complete")); return promise.asPromise(MAX_BATCH_TIME) - .then(s -> Promise.resolve(new JsTable(this))); + .then(s -> Promise.resolve(new JsTable(this))); } return Promise.resolve(new JsTable(this)); } @@ -619,16 +599,14 @@ private Promise fetchTotals(Object config, JsProvider { final ClientTableState target; - // we know this will get called at least once, immediately, so lastGood will never be - // null + // we know this will get called at least once, immediately, so lastGood will never be null if (isClosed()) { // source table was closed, we have to rely on lastGood... target = lastGood[0]; } else { target = state.valueOf(); // make sure we are only retained by one state at a time - // TODO: refactor subscription system to handle non-JsTable subscriptions w/ same - // one:one semantics, + // TODO: refactor subscription system to handle non-JsTable subscriptions w/ same one:one semantics, target.retain(directive); if (lastGood[0] != null && lastGood[0] != target) { lastGood[0].unretain(directive); @@ -636,8 +614,8 @@ private Promise fetchTotals(Object config, JsProvider fetchTotals(Object config, JsProvider result = new LazyPromise<>(); boolean[] downsample = {true}; - return totals.refetch(this, workerConnection.metadata()) // lastGood will always be non-null - // after this - .then(ready -> { - JsTable wrapped = new JsTable(workerConnection, ready); - // technically this is overkill, but it is more future-proofed than only listening - // for column changes - final RemoverFn remover = addEventListener( - INTERNAL_EVENT_STATECHANGED, - e -> { - if (wrapped.isClosed()) { - return; - } - // eat superfluous changes (wait until event loop settles before firing - // requests). - // IDS-2684 If you disable downsampling, you can lock up the entire - // websocket with some rapid - // table-state-changes that trigger downstream totals table changes. - // It probably makes more sense to move this downsampling to the internal - // event, - // or expose a public event that is already downsampled by a more - // sophisticated latch. - // (for example, a batch that can outlive a single event loop by using an - // internal table copy() - // which simply accrues state until the user decides to commit the - // modification). - if (downsample[0]) { - downsample[0] = false; - LazyPromise.runLater(() -> { - downsample[0] = true; - // IDS-2684 - comment out the four lines above to reproduce - // when ever the main table changes its state, reload the totals - // table from the new state - final ClientTableState existing = wrapped.state(); - final ClientTableState nextState = - workerConnection.newState(totalsFactory, summary); - JsLog.debug("Rebasing totals table", existing, " -> ", nextState, - " for ", wrapped); - wrapped.setState(nextState); - // If the wrapped table's state has changed (any filter / sort / - // columns applied), - // then we'll want to re-apply these conditions on top of the newly - // set state. - final boolean needsMutation = !existing.isEqual(ready); - - final ThenOnFulfilledCallbackFn restoreVp = running -> { - // now that we've (possibly) updated selection conditions, put - // back in any viewport. - result.onSuccess(JsTotalsTable::refreshViewport); - return null; - }; - final Promise promise = - nextState.refetch(this, workerConnection.metadata()); - if (needsMutation) { // nextState will be empty, so we might want to - // test for isEmpty() instead - wrapped.batch(b -> b.setConfig(existing)).then(restoreVp); - } else { - promise.then(restoreVp); + return totals.refetch(this, workerConnection.metadata()) // lastGood will always be non-null after this + .then(ready -> { + JsTable wrapped = new JsTable(workerConnection, ready); + // technically this is overkill, but it is more future-proofed than only listening for column + // changes + final RemoverFn remover = addEventListener( + INTERNAL_EVENT_STATECHANGED, + e -> { + if (wrapped.isClosed()) { + return; + } + // eat superfluous changes (wait until event loop settles before firing requests). + // IDS-2684 If you disable downsampling, you can lock up the entire websocket with some + // rapid + // table-state-changes that trigger downstream totals table changes. + // It probably makes more sense to move this downsampling to the internal event, + // or expose a public event that is already downsampled by a more sophisticated latch. + // (for example, a batch that can outlive a single event loop by using an internal table + // copy() + // which simply accrues state until the user decides to commit the modification). + if (downsample[0]) { + downsample[0] = false; + LazyPromise.runLater(() -> { + downsample[0] = true; + // IDS-2684 - comment out the four lines above to reproduce + // when ever the main table changes its state, reload the totals table from the + // new state + final ClientTableState existing = wrapped.state(); + final ClientTableState nextState = + workerConnection.newState(totalsFactory, summary); + JsLog.debug("Rebasing totals table", existing, " -> ", nextState, " for ", + wrapped); + wrapped.setState(nextState); + // If the wrapped table's state has changed (any filter / sort / columns + // applied), + // then we'll want to re-apply these conditions on top of the newly set state. + final boolean needsMutation = !existing.isEqual(ready); + + final ThenOnFulfilledCallbackFn restoreVp = running -> { + // now that we've (possibly) updated selection conditions, put back in any + // viewport. + result.onSuccess(JsTotalsTable::refreshViewport); + return null; + }; + final Promise promise = + nextState.refetch(this, workerConnection.metadata()); + if (needsMutation) { // nextState will be empty, so we might want to test for + // isEmpty() instead + wrapped.batch(b -> b.setConfig(existing)).then(restoreVp); + } else { + promise.then(restoreVp); + } + // IDS-2684 - Comment out the two lines below to reproduce + }); } - // IDS-2684 - Comment out the two lines below to reproduce }); - } - }); - wrapped.onClosed.add(remover::remove); - wrapped.onClosed.add(() -> lastGood[0].unretain(directive)); - onClosed.add(remover::remove); - final JsTotalsTable totalsTable = - new JsTotalsTable(wrapped, directive.serialize(), directive.groupBy); - result.succeed(totalsTable); - return result.asPromise(); - }); + wrapped.onClosed.add(remover::remove); + wrapped.onClosed.add(() -> lastGood[0].unretain(directive)); + onClosed.add(remover::remove); + final JsTotalsTable totalsTable = + new JsTotalsTable(wrapped, directive.serialize(), directive.groupBy); + result.succeed(totalsTable); + return result.asPromise(); + }); } private JsTotalsTableConfig getTotalsDirectiveFromOptionalConfig(Object config) { if (config == null) { - return JsTotalsTableConfig - .parse(lastVisibleState().getTableDef().getAttributes().getTotalsTableConfig()); + return JsTotalsTableConfig.parse(lastVisibleState().getTableDef().getAttributes().getTotalsTableConfig()); } else { if (config instanceof JsTotalsTableConfig) { return (JsTotalsTableConfig) config; @@ -742,8 +713,7 @@ private JsTotalsTableConfig getTotalsDirectiveFromOptionalConfig(Object config) // @JsMethod public Promise getGrandTotalsTable(/* @JsOptional */Object config) { // As in getTotalsTable, but this time we want to skip any filters - this could mean use the - // most-derived table which has no filter, or the least-derived table which has all custom - // columns. + // most-derived table which has no filter, or the least-derived table which has all custom columns. // Currently, these two mean the same thing. return fetchTotals(config, () -> { ClientTableState unfiltered = state(); @@ -772,7 +742,7 @@ public Promise rollup(Object configObject) { // workerConnection.getServer().rollup(rollupRequest, c); throw new UnsupportedOperationException("rollup"); }, "rollup " + Global.JSON.stringify(config)).refetch(this, workerConnection.metadata()) - .then(state -> new JsTreeTable(state, workerConnection).finishFetch()); + .then(state -> new JsTreeTable(state, workerConnection).finishFetch()); } // TODO: #37: Need SmartKey support for this functionality @@ -796,7 +766,7 @@ public Promise treeTable(Object configObject) { // ); throw new UnsupportedOperationException("treeTable"); }, "treeTable " + Global.JSON.stringify(config)).refetch(this, workerConnection.metadata()) - .then(state -> new JsTreeTable(state, workerConnection).finishFetch()); + .then(state -> new JsTreeTable(state, workerConnection).finishFetch()); } @JsMethod @@ -810,12 +780,12 @@ public Promise freeze() { request.setStampColumnsList(new String[0]); workerConnection.tableServiceClient().snapshot(request, metadata, c::apply); }, "freeze").refetch(this, workerConnection.metadata()) - .then(state -> Promise.resolve(new JsTable(workerConnection, state))); + .then(state -> Promise.resolve(new JsTable(workerConnection, state))); } @JsMethod public Promise snapshot(JsTable rightHandSide, @JsOptional Boolean doInitialSnapshot, - @JsOptional String[] stampColumns) { + @JsOptional String[] stampColumns) { Objects.requireNonNull(rightHandSide, "Snapshot right-hand-side table"); final boolean realDoInitialSnapshot; if (doInitialSnapshot != null) { @@ -830,8 +800,8 @@ public Promise snapshot(JsTable rightHandSide, @JsOptional Boolean doIn // make sure we pass an actual string array realStampColums = Arrays.stream(stampColumns).toArray(String[]::new); } - final String fetchSummary = "snapshot(" + rightHandSide + ", " + doInitialSnapshot + ", " - + Arrays.toString(stampColumns) + ")"; + final String fetchSummary = + "snapshot(" + rightHandSide + ", " + doInitialSnapshot + ", " + Arrays.toString(stampColumns) + ")"; return workerConnection.newState((c, state, metadata) -> { SnapshotTableRequest request = new SnapshotTableRequest(); request.setLeftId(state().getHandle().makeTableReference()); @@ -842,14 +812,13 @@ public Promise snapshot(JsTable rightHandSide, @JsOptional Boolean doIn workerConnection.tableServiceClient().snapshot(request, metadata, c::apply); }, fetchSummary).refetch(this, workerConnection.metadata()) - .then(state -> Promise.resolve(new JsTable(workerConnection, state))); + .then(state -> Promise.resolve(new JsTable(workerConnection, state))); } @JsMethod @Deprecated - public Promise join(Object joinType, JsTable rightTable, - JsArray columnsToMatch, - @JsOptional JsArray columnsToAdd, @JsOptional Object asOfMatchRule) { + public Promise join(Object joinType, JsTable rightTable, JsArray columnsToMatch, + @JsOptional JsArray columnsToAdd, @JsOptional Object asOfMatchRule) { if (joinType.equals("AJ") || joinType.equals("RAJ")) { return asOfJoin(rightTable, columnsToMatch, columnsToAdd, (String) asOfMatchRule); } else if (joinType.equals("CROSS_JOIN")) { @@ -867,10 +836,10 @@ public Promise join(Object joinType, JsTable rightTable, @JsMethod public Promise asOfJoin(JsTable rightTable, JsArray columnsToMatch, - @JsOptional JsArray columnsToAdd, @JsOptional String asOfMatchRule) { + @JsOptional JsArray columnsToAdd, @JsOptional String asOfMatchRule) { if (rightTable.workerConnection != workerConnection) { throw new IllegalStateException( - "Table argument passed to join is not from the same worker as current table"); + "Table argument passed to join is not from the same worker as current table"); } return workerConnection.newState((c, state, metadata) -> { AsOfJoinTablesRequest request = new AsOfJoinTablesRequest(); @@ -880,22 +849,21 @@ public Promise asOfJoin(JsTable rightTable, JsArray columnsToMa request.setColumnsToMatchList(columnsToMatch); request.setColumnsToAddList(columnsToAdd); if (asOfMatchRule != null) { - request.setAsOfMatchRule(Js.asPropertyMap(AsOfJoinTablesRequest.MatchRule) - .getAny(asOfMatchRule).asDouble()); + request.setAsOfMatchRule( + Js.asPropertyMap(AsOfJoinTablesRequest.MatchRule).getAny(asOfMatchRule).asDouble()); } workerConnection.tableServiceClient().asOfJoinTables(request, metadata, c::apply); - }, "asOfJoin(" + rightTable + ", " + columnsToMatch + ", " + columnsToAdd + "," - + asOfMatchRule + ")") - .refetch(this, workerConnection.metadata()) - .then(state -> Promise.resolve(new JsTable(workerConnection, state))); + }, "asOfJoin(" + rightTable + ", " + columnsToMatch + ", " + columnsToAdd + "," + asOfMatchRule + ")") + .refetch(this, workerConnection.metadata()) + .then(state -> Promise.resolve(new JsTable(workerConnection, state))); } @JsMethod public Promise crossJoin(JsTable rightTable, JsArray columnsToMatch, - @JsOptional JsArray columnsToAdd, @JsOptional Double reserve_bits) { + @JsOptional JsArray columnsToAdd, @JsOptional Double reserve_bits) { if (rightTable.workerConnection != workerConnection) { throw new IllegalStateException( - "Table argument passed to join is not from the same worker as current table"); + "Table argument passed to join is not from the same worker as current table"); } return workerConnection.newState((c, state, metadata) -> { CrossJoinTablesRequest request = new CrossJoinTablesRequest(); @@ -908,18 +876,17 @@ public Promise crossJoin(JsTable rightTable, JsArray columnsToM request.setReserveBits(reserve_bits); } workerConnection.tableServiceClient().crossJoinTables(request, metadata, c::apply); - }, "join(" + rightTable + ", " + columnsToMatch + ", " + columnsToAdd + "," + reserve_bits - + ")") - .refetch(this, workerConnection.metadata()) - .then(state -> Promise.resolve(new JsTable(workerConnection, state))); + }, "join(" + rightTable + ", " + columnsToMatch + ", " + columnsToAdd + "," + reserve_bits + ")") + .refetch(this, workerConnection.metadata()) + .then(state -> Promise.resolve(new JsTable(workerConnection, state))); } @JsMethod public Promise exactJoin(JsTable rightTable, JsArray columnsToMatch, - @JsOptional JsArray columnsToAdd) { + @JsOptional JsArray columnsToAdd) { if (rightTable.workerConnection != workerConnection) { throw new IllegalStateException( - "Table argument passed to join is not from the same worker as current table"); + "Table argument passed to join is not from the same worker as current table"); } return workerConnection.newState((c, state, metadata) -> { ExactJoinTablesRequest request = new ExactJoinTablesRequest(); @@ -930,16 +897,16 @@ public Promise exactJoin(JsTable rightTable, JsArray columnsToM request.setColumnsToAddList(columnsToAdd); workerConnection.tableServiceClient().exactJoinTables(request, metadata, c::apply); }, "exactJoin(" + rightTable + ", " + columnsToMatch + ", " + columnsToAdd + ")") - .refetch(this, workerConnection.metadata()) - .then(state -> Promise.resolve(new JsTable(workerConnection, state))); + .refetch(this, workerConnection.metadata()) + .then(state -> Promise.resolve(new JsTable(workerConnection, state))); } @JsMethod public Promise leftJoin(JsTable rightTable, JsArray columnsToMatch, - @JsOptional JsArray columnsToAdd) { + @JsOptional JsArray columnsToAdd) { if (rightTable.workerConnection != workerConnection) { throw new IllegalStateException( - "Table argument passed to join is not from the same worker as current table"); + "Table argument passed to join is not from the same worker as current table"); } return workerConnection.newState((c, state, metadata) -> { LeftJoinTablesRequest request = new LeftJoinTablesRequest(); @@ -950,16 +917,16 @@ public Promise leftJoin(JsTable rightTable, JsArray columnsToMa request.setColumnsToAddList(columnsToAdd); workerConnection.tableServiceClient().leftJoinTables(request, metadata, c::apply); }, "leftJoin(" + rightTable + ", " + columnsToMatch + ", " + columnsToAdd + ")") - .refetch(this, workerConnection.metadata()) - .then(state -> Promise.resolve(new JsTable(workerConnection, state))); + .refetch(this, workerConnection.metadata()) + .then(state -> Promise.resolve(new JsTable(workerConnection, state))); } @JsMethod public Promise naturalJoin(JsTable rightTable, JsArray columnsToMatch, - @JsOptional JsArray columnsToAdd) { + @JsOptional JsArray columnsToAdd) { if (rightTable.workerConnection != workerConnection) { throw new IllegalStateException( - "Table argument passed to join is not from the same worker as current table"); + "Table argument passed to join is not from the same worker as current table"); } return workerConnection.newState((c, state, metadata) -> { NaturalJoinTablesRequest request = new NaturalJoinTablesRequest(); @@ -970,8 +937,8 @@ public Promise naturalJoin(JsTable rightTable, JsArray columnsT request.setColumnsToAddList(columnsToAdd); workerConnection.tableServiceClient().naturalJoinTables(request, metadata, c::apply); }, "naturalJoin(" + rightTable + ", " + columnsToMatch + ", " + columnsToAdd + ")") - .refetch(this, workerConnection.metadata()) - .then(state -> Promise.resolve(new JsTable(workerConnection, state))); + .refetch(this, workerConnection.metadata()) + .then(state -> Promise.resolve(new JsTable(workerConnection, state))); } @JsMethod @@ -982,16 +949,14 @@ public Promise byExternal(Object keys, @JsOptional Boolean dropKeys) { } else if (JsArray.isArray(keys)) { actualKeys = Js.asArrayLike(keys).asList().toArray(new String[0]); } else { - throw new IllegalArgumentException( - "Can't use keys argument as either a string or array of strings"); + throw new IllegalArgumentException("Can't use keys argument as either a string or array of strings"); } - // we don't validate that the keys are non-empty, since that is allowed, but ensure they are - // all columns + // we don't validate that the keys are non-empty, since that is allowed, but ensure they are all columns findColumns(actualKeys); return new TableMap(workerConnection, c -> { - // workerConnection.getServer().byExternal(state().getHandle(), dropKeys == null ? false - // : dropKeys, actualKeys, c); + // workerConnection.getServer().byExternal(state().getHandle(), dropKeys == null ? false : dropKeys, + // actualKeys, c); throw new UnsupportedOperationException("byExternal"); }).refetch(); } @@ -1000,11 +965,10 @@ public Promise byExternal(Object keys, @JsOptional Boolean dropKeys) { // @JsMethod public Promise getColumnStatistics(Column column) { return Callbacks.promise(null, c -> { - // workerConnection.getServer().getColumnStatisticsForTable(state().getHandle(), - // column.getName(), c); + // workerConnection.getServer().getColumnStatisticsForTable(state().getHandle(), column.getName(), c); throw new UnsupportedOperationException("getColumnStatistics"); }).then( - tableStatics -> Promise.resolve(new JsColumnStatistics(tableStatics))); + tableStatics -> Promise.resolve(new JsColumnStatistics(tableStatics))); } public void maybeRevive(ClientTableState state) { @@ -1029,11 +993,10 @@ public void die(Object error) { notifyDeath(this, error); } - public Promise downsample(LongWrapper[] zoomRange, int pixelCount, String xCol, - String[] yCols) { + public Promise downsample(LongWrapper[] zoomRange, int pixelCount, String xCol, String[] yCols) { JsLog.info("downsample", zoomRange, pixelCount, xCol, yCols); - final String fetchSummary = "downsample(" + Arrays.toString(zoomRange) + ", " + pixelCount - + ", " + xCol + ", " + Arrays.toString(yCols) + ")"; + final String fetchSummary = "downsample(" + Arrays.toString(zoomRange) + ", " + pixelCount + ", " + xCol + ", " + + Arrays.toString(yCols) + ")"; return workerConnection.newState((c, state, metadata) -> { RunChartDownsampleRequest downsampleRequest = new RunChartDownsampleRequest(); downsampleRequest.setPixelCount(pixelCount); @@ -1047,10 +1010,10 @@ public Promise downsample(LongWrapper[] zoomRange, int pixelCount, Stri downsampleRequest.setYColumnNamesList(yCols); downsampleRequest.setSourceId(state().getHandle().makeTableReference()); downsampleRequest.setResultId(state.getHandle().makeTicket()); - workerConnection.tableServiceClient().runChartDownsample(downsampleRequest, - workerConnection.metadata(), c::apply); + workerConnection.tableServiceClient().runChartDownsample(downsampleRequest, workerConnection.metadata(), + c::apply); }, fetchSummary).refetch(this, workerConnection.metadata()) - .then(state -> Promise.resolve(new JsTable(workerConnection, state))); + .then(state -> Promise.resolve(new JsTable(workerConnection, state))); } private final class Debounce { @@ -1064,12 +1027,12 @@ private final class Debounce { private final long maxRows; public Debounce( - TableTicket table, - SnapshotType snapshotType, - RangeSet includedRows, - BitSet columns, - Object[] dataColumns, - long maxRows) { + TableTicket table, + SnapshotType snapshotType, + RangeSet includedRows, + BitSet columns, + Object[] dataColumns, + long maxRows) { this.handle = table; this.type = snapshotType; this.includedRows = includedRows; @@ -1082,8 +1045,8 @@ public Debounce( public boolean isEqual(Debounce o) { if (type == o.type) { - // this is intentionally weird. We only want to debounce when one instance is column - // snapshot and the other is row snapshot, + // this is intentionally weird. We only want to debounce when one instance is column snapshot and the + // other is row snapshot, // so we consider two events of the same type to be incompatible with debouncing. return false; } @@ -1112,23 +1075,19 @@ public boolean isEqual(Debounce o) { private Debounce debounce; private void handleSnapshot(TableTicket table, SnapshotType snapshotType, RangeSet includedRows, - Object[] dataColumns, BitSet columns, long maxRows) { + Object[] dataColumns, BitSet columns, long maxRows) { assert table.equals(state().getHandle()) : "Table received incorrect snapshot"; - // if the type is initial_snapshot, we've already recorded the size, so only watch for the - // other two updates. - // note that this will sometimes result in multiple updates on startup, so we do this ugly - // debounce-dance. + // if the type is initial_snapshot, we've already recorded the size, so only watch for the other two updates. + // note that this will sometimes result in multiple updates on startup, so we do this ugly debounce-dance. // When IDS-2113 is fixed, we can likely remove this code. - JsLog.debug("Received snapshot for ", table, snapshotType, includedRows, dataColumns, - columns); - Debounce operation = - new Debounce(table, snapshotType, includedRows, columns, dataColumns, maxRows); + JsLog.debug("Received snapshot for ", table, snapshotType, includedRows, dataColumns, columns); + Debounce operation = new Debounce(table, snapshotType, includedRows, columns, dataColumns, maxRows); if (debounce == null) { debounce = operation; DomGlobal.setTimeout(ignored -> processSnapshot(), DEBOUNCE_TIME); } else if (debounce.isEqual(operation)) { - // If we think the problem is fixed, we can put `assert false` here for a while before - // deleting Debounce class + // If we think the problem is fixed, we can put `assert false` here for a while before deleting Debounce + // class JsLog.debug("Eating duplicated operation", debounce, operation); } else { processSnapshot(); @@ -1158,12 +1117,10 @@ public void handleSnapshot(TableTicket handle, TableSnapshot snapshot) { JsArray[] remappedData = new JsArray[dataColumns.length]; // remap dataColumns to the expected range for that table's viewport long lastRow = -1; - for (int col = viewport.getColumns().nextSetBit(0); col >= 0; col = - viewport.getColumns().nextSetBit(col + 1)) { + for (int col = viewport.getColumns().nextSetBit(0); col >= 0; col = viewport.getColumns().nextSetBit(col + 1)) { ColumnData dataColumn = dataColumns[col]; if (dataColumn == null) { - // skip this, at least one column requested by that table isn't present, waiting on - // a later update + // skip this, at least one column requested by that table isn't present, waiting on a later update // TODO when IDS-2138 is fixed stop throwing this data away return; } @@ -1182,8 +1139,7 @@ public void handleSnapshot(TableTicket handle, TableSnapshot snapshot) { break; } if (!includedRowsIterator.hasNext()) { - // we've reached the end, the viewport apparently goes past the end of what the - // server sent, + // we've reached the end, the viewport apparently goes past the end of what the server sent, // so there is another snapshot on its way // TODO when IDS-2138 is fixed stop throwing this data away return; @@ -1209,18 +1165,17 @@ public void handleSnapshot(TableTicket handle, TableSnapshot snapshot) { remappedData[col].push(data); dataIndex++;// increment for the next row - // Track how many rows were actually present, allowing the snapshot to stop before - // the viewport's end + // Track how many rows were actually present, allowing the snapshot to stop before the viewport's end lastRow = Math.max(lastRow, possibleMatch); } } // TODO correct this - assumes max one range per table viewport, and nothing skipped - RangeSet actualViewport = lastRow == -1 ? RangeSet.empty() - : RangeSet.ofRange(viewportRows.indexIterator().nextLong(), lastRow); + RangeSet actualViewport = + lastRow == -1 ? RangeSet.empty() : RangeSet.ofRange(viewportRows.indexIterator().nextLong(), lastRow); - handleSnapshot(handle, snapshot.getSnapshotType(), actualViewport, remappedData, - viewport.getColumns(), viewportRows.size()); + handleSnapshot(handle, snapshot.getSnapshotType(), actualViewport, remappedData, viewport.getColumns(), + viewportRows.size()); } @@ -1231,28 +1186,25 @@ protected void processSnapshot() { return; } if (debounce.state != currentState) { - JsLog.debug("Skipping snapshot because state has changed ", debounce.state, " != ", - currentState); + JsLog.debug("Skipping snapshot because state has changed ", debounce.state, " != ", currentState); return; } if (isClosed()) { JsLog.debug("Skipping snapshot because table is closed", this); return; } - JsArray viewportColumns = Js.uncheckedCast( - getColumns().filter((item, index, all) -> debounce.columns.get(item.getIndex()))); - ViewportData data = - new ViewportData(debounce.includedRows, debounce.dataColumns, viewportColumns, + JsArray viewportColumns = + Js.uncheckedCast(getColumns().filter((item, index, all) -> debounce.columns.get(item.getIndex()))); + ViewportData data = new ViewportData(debounce.includedRows, debounce.dataColumns, viewportColumns, currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : currentState.getRowFormatColumn().getIndex(), + : currentState.getRowFormatColumn().getIndex(), debounce.maxRows); this.currentViewportData = data; CustomEventInit updatedEvent = CustomEventInit.create(); updatedEvent.setDetail(data); fireEvent(EVENT_UPDATED, updatedEvent); - // also fire rowadded events - TODO also fire some kind of remove event for now-missing - // rows? + // also fire rowadded events - TODO also fire some kind of remove event for now-missing rows? for (int i = 0; i < data.getRows().length; i++) { CustomEventInit addedEvent = CustomEventInit.create(); addedEvent.setDetail(wrap(data.getRows().getAt(i), i)); @@ -1294,14 +1246,14 @@ public void handleDelta(ClientTableState current, DeltaUpdates updates) { } final ViewportData vpd = currentViewportData; if (vpd == null) { - // if the current viewport data is null, we're waiting on an initial snapshot to - // arrive for a different part of the viewport + // if the current viewport data is null, we're waiting on an initial snapshot to arrive for a different + // part of the viewport JsLog.debug("Received delta while waiting for reinitialization"); return; } MergeResults mergeResults = vpd.merge(updates); if (mergeResults.added.size() == 0 && mergeResults.modified.size() == 0 - && mergeResults.removed.size() == 0) { + && mergeResults.removed.size() == 0) { return; } CustomEventInit event = CustomEventInit.create(); @@ -1310,8 +1262,8 @@ public void handleDelta(ClientTableState current, DeltaUpdates updates) { fireEvent(EVENT_UPDATED, event); // fire rowadded/rowupdated/rowremoved - // TODO when we keep more rows loaded than the user is aware of, check if a given row is - // actually in the viewport + // TODO when we keep more rows loaded than the user is aware of, check if a given row is actually in the + // viewport // here for (Integer index : mergeResults.added) { CustomEventInit addedEvent = CustomEventInit.create(); @@ -1355,8 +1307,8 @@ private ClientTableState getHeadState() { @Override public String toString() { if (isAlive()) { - return "Table { id=" + subscriptionId + " filters=[" + getFilter() + "], sort=[" - + getSort() + "], customColumns=[" + getCustomColumns() + "] }"; + return "Table { id=" + subscriptionId + " filters=[" + getFilter() + "], sort=[" + getSort() + + "], customColumns=[" + getCustomColumns() + "] }"; } return "Table { id=" + subscriptionId + " CLOSED }"; } @@ -1366,24 +1318,19 @@ public WorkerConnection getConnection() { } public void refreshViewport(ClientTableState state, Viewport vp) { - assert state() == state - : "Called refreshViewport with wrong state (" + state + " instead of " + state() + ")"; + assert state() == state : "Called refreshViewport with wrong state (" + state + " instead of " + state() + ")"; assert state.getResolution() == ClientTableState.ResolutionState.RUNNING - : "Do not call refreshViewport for a state that is not running! (" + state + ")"; + : "Do not call refreshViewport for a state that is not running! (" + state + ")"; currentViewportData = null; // ignore any deltas for past viewports workerConnection.scheduleCheck(state); - // now that we've made sure the server knows, if we already know that the viewport is beyond - // what exists, we - // can go ahead and fire an update event. We're in the onResolved call, so we know the - // handle has resolved - // and if size is not -1, then we've already at least gotten the initial snapshot - // (otherwise, that snapshot + // now that we've made sure the server knows, if we already know that the viewport is beyond what exists, we + // can go ahead and fire an update event. We're in the onResolved call, so we know the handle has resolved + // and if size is not -1, then we've already at least gotten the initial snapshot (otherwise, that snapshot // will be here soon, and will fire its own event) - if (state.getSize() != ClientTableState.SIZE_UNINITIALIZED - && state.getSize() <= vp.getRows().getFirstRow()) { + if (state.getSize() != ClientTableState.SIZE_UNINITIALIZED && state.getSize() <= vp.getRows().getFirstRow()) { JsLog.debug("Preparing to send a 'fake' update event since " + state.getSize() + "<=" - + vp.getRows().getFirstRow(), state); + + vp.getRows().getFirstRow(), state); LazyPromise.runLater(() -> { if (state != state()) { return; @@ -1402,27 +1349,25 @@ assert state() == state Column c = columns.getAt(i); dataColumns[c.getIndex()] = JsData.newArray(c.getType()); if (c.getFormatStringColumnIndex() != null) { - dataColumns[c.getFormatStringColumnIndex()] = - JsData.newArray("java.lang.String"); + dataColumns[c.getFormatStringColumnIndex()] = JsData.newArray("java.lang.String"); } if (c.getStyleColumnIndex() != null) { dataColumns[c.getStyleColumnIndex()] = JsData.newArray("long"); } } if (currentState.getRowFormatColumn() != null) { - dataColumns[currentState.getRowFormatColumn().getIndex()] = - JsData.newArray("long"); + dataColumns[currentState.getRowFormatColumn().getIndex()] = JsData.newArray("long"); } ViewportData data = new ViewportData(RangeSet.empty(), dataColumns, columns, - currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : currentState.getRowFormatColumn().getIndex(), - 0); + currentState.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN + : currentState.getRowFormatColumn().getIndex(), + 0); this.currentViewportData = data; CustomEventInit updatedEvent = CustomEventInit.create(); updatedEvent.setDetail(data); - JsLog.debug("Sending 'fake' update event since " + state.getSize() + "<=" - + vp.getRows().getFirstRow(), vp, state); + JsLog.debug("Sending 'fake' update event since " + state.getSize() + "<=" + vp.getRows().getFirstRow(), + vp, state); fireEvent(EVENT_UPDATED, updatedEvent); }); } @@ -1438,8 +1383,7 @@ public void setState(final ClientTableState state) { if (state == currentState) { lastVisibleState = state; hasInputTable = s.getTableDef().getAttributes().isInputTable(); - // defer the size change so that is there is a viewport sub also waiting for - // onRunning, it gets it first + // defer the size change so that is there is a viewport sub also waiting for onRunning, it gets it first LazyPromise.runLater(() -> { if (state == state()) { setSize(state.getSize()); @@ -1454,13 +1398,12 @@ public void setState(final ClientTableState state) { // If already closed, we can ignore this, since we already cleaned those up if (!isClosed() && was != null && was != state()) { // if we held a subscription - TableViewportSubscription existingSubscription = - subscriptions.remove(was.getHandle()); - if (existingSubscription != null && existingSubscription - .getStatus() != TableViewportSubscription.Status.DONE) { + TableViewportSubscription existingSubscription = subscriptions.remove(was.getHandle()); + if (existingSubscription != null + && existingSubscription.getStatus() != TableViewportSubscription.Status.DONE) { JsLog.debug("closing old viewport", state(), existingSubscription.state()); - // with the replacement state successfully running, we can shut down the old - // viewport (unless something + // with the replacement state successfully running, we can shut down the old viewport (unless + // something // external retained it) existingSubscription.internalClose(); } @@ -1473,8 +1416,8 @@ public void setState(final ClientTableState state) { historyChanged = !state.isAncestor(was); was.pause(this); JsLog.debug("Table state change (new history? ", historyChanged, ") " + - "from ", was.getHandle().toString(), was, - " to ", state.getHandle().toString(), state); + "from ", was.getHandle().toString(), was, + " to ", state.getHandle().toString(), state); } currentState = state; ActiveTableBinding active = state.getActiveBinding(this); @@ -1486,8 +1429,7 @@ public void setState(final ClientTableState state) { if (historyChanged) { // when the new state is not derived from the current state, - // then, when the new state succeeds, we will totally releaseTable the previous - // table, + // then, when the new state succeeds, we will totally releaseTable the previous table, // allowing it to be automatically released (if nobody else needs it). state.onRunning(success -> { if (isClosed()) { @@ -1495,13 +1437,13 @@ public void setState(final ClientTableState state) { return; } if (currentState != state) { - // we've already moved on from this state, cleanup() should manage it, don't - // release the ancestor + // we've already moved on from this state, cleanup() should manage it, don't release the + // ancestor return; } final boolean shouldRelease = !state().isAncestor(was); - JsLog.debug("History changing state update complete; release? ", shouldRelease, - " state: ", was, LazyString.of(was::toStringMinimal)); + JsLog.debug("History changing state update complete; release? ", shouldRelease, " state: ", was, + LazyString.of(was::toStringMinimal)); if (shouldRelease) { was.releaseTable(this); } @@ -1512,13 +1454,13 @@ public void setState(final ClientTableState state) { return; } if (currentState != state) { - // we've already moved on from this state, cleanup() should manage it, - // don't release the ancestor + // we've already moved on from this state, cleanup() should manage it, don't release the + // ancestor return; } final boolean shouldRelease = !currentState.isAncestor(was); - JsLog.debug("History changing state update failed; release? ", - shouldRelease, " state: ", was, LazyString.of(was::toStringMinimal)); + JsLog.debug("History changing state update failed; release? ", shouldRelease, " state: ", was, + LazyString.of(was::toStringMinimal)); if (shouldRelease) { was.releaseTable(this); } @@ -1597,12 +1539,9 @@ public void setSize(double s) { this.size = s; TableViewportSubscription subscription = subscriptions.get(getHandle()); - if (changed && (subscription == null - || subscription.getStatus() == TableViewportSubscription.Status.DONE)) { - // If the size changed, and we have no subscription active, fire. Otherwise, we want to - // let the - // subscription itself manage this, so that the size changes are synchronized with data - // changes, + if (changed && (subscription == null || subscription.getStatus() == TableViewportSubscription.Status.DONE)) { + // If the size changed, and we have no subscription active, fire. Otherwise, we want to let the + // subscription itself manage this, so that the size changes are synchronized with data changes, // and consumers won't be confused by the table size not matching data. CustomEventInit event = CustomEventInit.create(); event.setDetail(s); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTableFetch.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTableFetch.java index 0358d369946..52ec986d31e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTableFetch.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTableFetch.java @@ -9,6 +9,6 @@ * Describe how to perform initial fetch for a table */ public interface JsTableFetch { - void fetch(JsBiConsumer callback, - ClientTableState newState, BrowserHeaders metadata); + void fetch(JsBiConsumer callback, ClientTableState newState, + BrowserHeaders metadata); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java index d7bf5e21674..f428efef7a7 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTable.java @@ -11,14 +11,13 @@ import jsinterop.base.Js; /** - * Behaves like a Table, but doesn't expose all of its API for changing the internal state. Instead, - * state is driven by the upstream table - when it changes handle, this listens and updates its own - * handle accordingly. + * Behaves like a Table, but doesn't expose all of its API for changing the internal state. Instead, state is driven by + * the upstream table - when it changes handle, this listens and updates its own handle accordingly. * * Additionally, this is automatically subscribed to its one and only row, across all columns. * - * A new config is returned any time it is accessed, to prevent accidental mutation, and to allow it - * to be used as a template when fetching a new totals table, or changing the totals table in use. + * A new config is returned any time it is accessed, to prevent accidental mutation, and to allow it to be used as a + * template when fetching a new totals table, or changing the totals table in use. */ public class JsTotalsTable { private final JsTable wrappedTable; @@ -34,9 +33,8 @@ public class JsTotalsTable { private Double updateIntervalMs; /** - * Table is wrapped to let us delegate calls to it, the directive is a serialized string, and - * the groupBy is copied when passed in, as well as when it is accessed, to prevent accidental - * mutation of the array. + * Table is wrapped to let us delegate calls to it, the directive is a serialized string, and the groupBy is copied + * when passed in, as well as when it is accessed, to prevent accidental mutation of the array. */ public JsTotalsTable(JsTable wrappedTable, String directive, JsArray groupBy) { this.wrappedTable = wrappedTable; @@ -59,7 +57,7 @@ public JsTotalsTableConfig getTotalsTableConfig() { @JsMethod public void setViewport(double firstRow, double lastRow, @JsOptional JsArray columns, - @JsOptional Double updateIntervalMs) { + @JsOptional Double updateIntervalMs) { this.firstRow = firstRow; this.lastRow = lastRow; this.columns = columns != null ? Js.uncheckedCast(columns.slice()) : null; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTableConfig.java b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTableConfig.java index 1b9c4ea9549..61d5d895435 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTableConfig.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/JsTotalsTableConfig.java @@ -18,32 +18,32 @@ public class JsTotalsTableConfig { @Deprecated // Use JsAggregationOperation instead public static final String COUNT = "Count", - MIN = "Min", - MAX = "Max", - SUM = "Sum", - ABS_SUM = "AbsSum", - VAR = "Var", - AVG = "Avg", - STD = "Std", - FIRST = "First", - LAST = "Last", - // ARRAY = "Array", - SKIP = "Skip"; + MIN = "Min", + MAX = "Max", + SUM = "Sum", + ABS_SUM = "AbsSum", + VAR = "Var", + AVG = "Avg", + STD = "Std", + FIRST = "First", + LAST = "Last", + // ARRAY = "Array", + SKIP = "Skip"; private static final List knownAggTypes = Arrays.asList( - JsAggregationOperation.COUNT, - JsAggregationOperation.MIN, - JsAggregationOperation.MAX, - JsAggregationOperation.SUM, - JsAggregationOperation.ABS_SUM, - JsAggregationOperation.VAR, - JsAggregationOperation.AVG, - JsAggregationOperation.STD, - JsAggregationOperation.FIRST, - JsAggregationOperation.LAST, - JsAggregationOperation.SKIP, - JsAggregationOperation.COUNT_DISTINCT, - JsAggregationOperation.DISTINCT, - JsAggregationOperation.UNIQUE); + JsAggregationOperation.COUNT, + JsAggregationOperation.MIN, + JsAggregationOperation.MAX, + JsAggregationOperation.SUM, + JsAggregationOperation.ABS_SUM, + JsAggregationOperation.VAR, + JsAggregationOperation.AVG, + JsAggregationOperation.STD, + JsAggregationOperation.FIRST, + JsAggregationOperation.LAST, + JsAggregationOperation.SKIP, + JsAggregationOperation.COUNT_DISTINCT, + JsAggregationOperation.DISTINCT, + JsAggregationOperation.UNIQUE); public boolean showTotalsByDefault = false; public boolean showGrandTotalsByDefault = false; @@ -83,8 +83,8 @@ public JsTotalsTableConfig(JsPropertyMap source) { } /** - * Implementation from TotalsTableBuilder.fromDirective, plus changes required to make this able - * to act on plan JS objects/arrays. + * Implementation from TotalsTableBuilder.fromDirective, plus changes required to make this able to act on plan JS + * objects/arrays. * * Note that this omits groupBy for now, until the server directive format supports it! */ @@ -114,8 +114,8 @@ public static JsTotalsTableConfig parse(String configString) { continue; final String[] kv = columnDirective.split("="); if (kv.length != 2) { - throw new IllegalArgumentException("Invalid Totals Table: " + configString - + ", bad column " + columnDirective); + throw new IllegalArgumentException( + "Invalid Totals Table: " + configString + ", bad column " + columnDirective); } final String[] operations = kv[1].split(":"); builder.operationMap.set(kv[0], new JsArray<>()); @@ -138,18 +138,17 @@ private static void checkOperation(String op) { @Override public String toString() { return "JsTotalsTableConfig{" + - "showTotalsByDefault=" + showTotalsByDefault + - ", showGrandTotalsByDefault=" + showGrandTotalsByDefault + - ", defaultOperation='" + defaultOperation + '\'' + - ", operationMap=" + Global.JSON.stringify(operationMap) + // Object.create(null) has no - // valueOf - ", groupBy=" + groupBy + - '}'; + "showTotalsByDefault=" + showTotalsByDefault + + ", showGrandTotalsByDefault=" + showGrandTotalsByDefault + + ", defaultOperation='" + defaultOperation + '\'' + + ", operationMap=" + Global.JSON.stringify(operationMap) + // Object.create(null) has no valueOf + ", groupBy=" + groupBy + + '}'; } /** - * Implementation from TotalsTableBuilder.buildDirective(), plus a minor change to iterate JS - * arrays/objects correctly. + * Implementation from TotalsTableBuilder.buildDirective(), plus a minor change to iterate JS arrays/objects + * correctly. * * Note that this omits groupBy until the server directive format supports it! */ @@ -157,10 +156,9 @@ public String toString() { public String serialize() { final StringBuilder builder = new StringBuilder(); builder.append(Boolean.toString(showTotalsByDefault)).append(",") - .append(Boolean.toString(showGrandTotalsByDefault)).append(",").append(defaultOperation) - .append(";"); - operationMap.forEach(key -> builder.append(key).append("=") - .append(operationMap.get(key).join(":")).append(",")); + .append(Boolean.toString(showGrandTotalsByDefault)).append(",").append(defaultOperation).append(";"); + operationMap + .forEach(key -> builder.append(key).append("=").append(operationMap.get(key).join(":")).append(",")); return builder.toString(); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/LazyString.java b/web/client-api/src/main/java/io/deephaven/web/client/api/LazyString.java index e17de1cfde6..420df624bab 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/LazyString.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/LazyString.java @@ -4,8 +4,7 @@ import io.deephaven.web.shared.fu.JsProvider; /** - * Useful for logging; toString calls a supplier so you can easily hide expensive toString() behind - * a lambda + * Useful for logging; toString calls a supplier so you can easily hide expensive toString() behind a lambda */ public class LazyString { private final JsProvider expensiveThing; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/LocalDateWrapper.java b/web/client-api/src/main/java/io/deephaven/web/client/api/LocalDateWrapper.java index b6527066408..31e72e07711 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/LocalDateWrapper.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/LocalDateWrapper.java @@ -7,8 +7,7 @@ import javax.annotation.Nonnull; /** - * Wrap LocalDate values for use in JS. Provides text formatting for display and access to the - * underlying value. + * Wrap LocalDate values for use in JS. Provides text formatting for display and access to the underlying value. */ public class LocalDateWrapper { private final static NumberFormat YEAR_FORMAT = NumberFormat.getFormat("0000"); @@ -62,7 +61,7 @@ public LocalDate getWrapped() { @Override public String toString() { return YEAR_FORMAT.format(getYear()) - + "-" + MONTH_DAY_FORMAT.format(getMonthValue()) - + "-" + MONTH_DAY_FORMAT.format(getDayOfMonth()); + + "-" + MONTH_DAY_FORMAT.format(getMonthValue()) + + "-" + MONTH_DAY_FORMAT.format(getDayOfMonth()); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/LocalTimeWrapper.java b/web/client-api/src/main/java/io/deephaven/web/client/api/LocalTimeWrapper.java index 5d1066d3d16..86a8554e930 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/LocalTimeWrapper.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/LocalTimeWrapper.java @@ -7,8 +7,7 @@ import javax.annotation.Nonnull; /** - * Wrap LocalTime values for use in JS. Provides text formatting for display and access to the - * underlying value. + * Wrap LocalTime values for use in JS. Provides text formatting for display and access to the underlying value. */ public class LocalTimeWrapper { private final static NumberFormat TWO_DIGIT_FORMAT = NumberFormat.getFormat("00"); @@ -53,8 +52,8 @@ public LocalTime getWrapped() { @Override public String toString() { return TWO_DIGIT_FORMAT.format(localTime.getHour()) - + ":" + TWO_DIGIT_FORMAT.format(localTime.getMinute()) - + ":" + TWO_DIGIT_FORMAT.format(localTime.getSecond()) - + "." + NANOS_FORMAT.format(localTime.getNano()); + + ":" + TWO_DIGIT_FORMAT.format(localTime.getMinute()) + + ":" + TWO_DIGIT_FORMAT.format(localTime.getSecond()) + + "." + NANOS_FORMAT.format(localTime.getNano()); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/QueryConnectable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/QueryConnectable.java index 6d5ce10039c..da30f5afcf9 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/QueryConnectable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/QueryConnectable.java @@ -30,11 +30,10 @@ import static io.deephaven.web.shared.fu.PromiseLike.CANCELLATION_MESSAGE; /** - * JS-exposed supertype handling details about connecting to a deephaven query worker. Wraps the - * WorkerConnection instance, which manages the connection to the API server. + * JS-exposed supertype handling details about connecting to a deephaven query worker. Wraps the WorkerConnection + * instance, which manages the connection to the API server. */ -public abstract class QueryConnectable> - extends HasEventHandling { +public abstract class QueryConnectable> extends HasEventHandling { public interface AuthTokenPromiseSupplier extends Supplier> { default AuthTokenPromiseSupplier withInitialToken(ConnectToken initialToken) { AuthTokenPromiseSupplier original = this; @@ -55,8 +54,8 @@ public Promise get() { static AuthTokenPromiseSupplier oneShot(ConnectToken initialToken) { // noinspection unchecked return ((AuthTokenPromiseSupplier) () -> (Promise) Promise - .reject("Only one token provided, cannot reconnect")) - .withInitialToken(initialToken); + .reject("Only one token provided, cannot reconnect")) + .withInitialToken(initialToken); } } @@ -88,9 +87,9 @@ public QueryConnectable(Supplier> authTokenPromiseSupplier public void notifyConnectionError(ResponseStreamWrapper.Status status) { CustomEventInit event = CustomEventInit.create(); event.setDetail(JsPropertyMap.of( - "status", status.getCode(), - "details", status.getDetails(), - "metadata", status.getMetadata())); + "status", status.getCode(), + "details", status.getDetails(), + "metadata", status.getMetadata())); fireEvent(HACK_CONNECTION_FAILURE, event); } @@ -109,8 +108,8 @@ private Promise onConnected() { } return new Promise<>((resolve, reject) -> addEventListenerOneShot( - EventPair.of(EVENT_CONNECT, e -> resolve.onInvoke((Void) null)), - EventPair.of(EVENT_DISCONNECT, e -> reject.onInvoke("Connection disconnected")))); + EventPair.of(EVENT_CONNECT, e -> resolve.onInvoke((Void) null)), + EventPair.of(EVENT_DISCONNECT, e -> reject.onInvoke("Connection disconnected")))); } @JsIgnore @@ -164,8 +163,7 @@ public CancellablePromise startSession(String type) { StartConsoleRequest request = new StartConsoleRequest(); request.setSessionType(type); request.setResultId(ticket); - connection.get().consoleServiceClient().startConsole(request, - connection.get().metadata(), callback::apply); + connection.get().consoleServiceClient().startConsole(request, connection.get().metadata(), callback::apply); })).then(result -> { promise.succeed(ticket); return null; @@ -191,8 +189,8 @@ public CancellablePromise startSession(String type) { public Promise> getConsoleTypes() { Promise promise = Callbacks.grpcUnaryPromise(callback -> { GetConsoleTypesRequest request = new GetConsoleTypesRequest(); - connection.get().consoleServiceClient().getConsoleTypes(request, - connection.get().metadata(), callback::apply); + connection.get().consoleServiceClient().getConsoleTypes(request, connection.get().metadata(), + callback::apply); }); return promise.then(result -> Promise.resolve(result.getConsoleTypesList())); @@ -216,7 +214,7 @@ public void connected() { fireEvent(EVENT_RECONNECT); } else { DomGlobal.console.log(logPrefix() - + "Query reconnected (to prevent this log message, handle the EVENT_RECONNECT event)"); + + "Query reconnected (to prevent this log message, handle the EVENT_RECONNECT event)"); } } } @@ -245,7 +243,7 @@ public void disconnected() { this.fireEvent(QueryConnectable.EVENT_DISCONNECT); } else { DomGlobal.console.log(logPrefix() - + "Query disconnected (to prevent this log message, handle the EVENT_DISCONNECT event)"); + + "Query disconnected (to prevent this log message, handle the EVENT_DISCONNECT event)"); } } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/ReconnectState.java b/web/client-api/src/main/java/io/deephaven/web/client/api/ReconnectState.java index e71d9cc9bdb..2764f66d988 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/ReconnectState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/ReconnectState.java @@ -5,13 +5,12 @@ import io.deephaven.web.shared.fu.JsRunnable; /** - * Util class using exponential backoff to keep trying to connect to a server. Any disconnect should - * call failed(), and the given retry callback will be invoked as appropriate. Once connection is - * established, success() should be invoked to clear the reset tries attempted. + * Util class using exponential backoff to keep trying to connect to a server. Any disconnect should call failed(), and + * the given retry callback will be invoked as appropriate. Once connection is established, success() should be invoked + * to clear the reset tries attempted. * - * Max backoff is doubled each failure, to a max of one minute. A random value between 0 and that - * backoff is selected to wait before trying again, to avoid clients all attempting to - * simultaneously reconnect. + * Max backoff is doubled each failure, to a max of one minute. A random value between 0 and that backoff is selected to + * wait before trying again, to avoid clients all attempting to simultaneously reconnect. * * A maximum number of times to try can be specified, otherwise defaults to MAX_VALUE. */ @@ -41,8 +40,8 @@ public ReconnectState(int maxTries, JsRunnable retry) { } /** - * Call once it is time to connect for the first time, will invoke the retry function. Can also - * be called after failure to start trying to connect fresh. + * Call once it is time to connect for the first time, will invoke the retry function. Can also be called after + * failure to start trying to connect fresh. */ public void initialConnection() { if (state == State.Connecting || state == State.Reconnecting || state == State.Connected) { @@ -87,8 +86,8 @@ public void failed() { // randomly pick a delay, exponentially back off based on number of tries // https://en.wikipedia.org/wiki/Exponential_backoff double delay = Math.random() * Math.min( - MIN_BACKOFF_MILLIS * Math.pow(2, currentTry - 1), - MAX_BACKOFF_MILLIS // don't go above the max delay + MIN_BACKOFF_MILLIS * Math.pow(2, currentTry - 1), + MAX_BACKOFF_MILLIS // don't go above the max delay ); JsLog.debug("Attempting reconnect in ", delay, "ms", this); cancel = DomGlobal.setTimeout(ignore -> { @@ -99,8 +98,7 @@ public void failed() { } /** - * After the connection has been deliberately closed, call this to prepare for a later - * connection. + * After the connection has been deliberately closed, call this to prepare for a later connection. */ public void disconnected() { state = State.Disconnected; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/ResponseStreamWrapper.java b/web/client-api/src/main/java/io/deephaven/web/client/api/ResponseStreamWrapper.java index 3b82ca4fa1e..cf20167e9c4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/ResponseStreamWrapper.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/ResponseStreamWrapper.java @@ -10,9 +10,8 @@ import jsinterop.base.Js; /** - * Java wrapper to deal with the distinct ResponseStream types that are emitted. Provides strongly - * typed methods for cleaner Java consumption, that can be used to represent any of the structural - * types that are used for grpc methods. + * Java wrapper to deal with the distinct ResponseStream types that are emitted. Provides strongly typed methods for + * cleaner Java consumption, that can be used to represent any of the structural types that are used for grpc methods. * * @param payload that is emitted from the stream */ diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/Sort.java b/web/client-api/src/main/java/io/deephaven/web/client/api/Sort.java index 221dec92854..9952391a328 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/Sort.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/Sort.java @@ -7,11 +7,11 @@ public class Sort { @JsProperty(namespace = "dh.Sort") public static final String ASCENDING = "ASC", - DESCENDING = "DESC", - REVERSE = "REVERSE"; + DESCENDING = "DESC", + REVERSE = "REVERSE"; private static final Column REVERSE_COLUMN = - new Column(-1, -1, null, null, "", "__REVERSE_COLUMN", false, null, null); + new Column(-1, -1, null, null, "", "__REVERSE_COLUMN", false, null, null); private final Column column; private String direction; @@ -68,8 +68,7 @@ public Sort abs() { public SortDescriptor makeDescriptor() { if (direction == null) { - throw new IllegalStateException( - "Cannot perform a sort without a direction, please call desc() or asc()"); + throw new IllegalStateException("Cannot perform a sort without a direction, please call desc() or asc()"); } SortDescriptor descriptor = new SortDescriptor(); descriptor.setIsAbsolute(isAbs()); @@ -92,10 +91,10 @@ public SortDescriptor makeDescriptor() { @Override public String toString() { return "Sort{" + - "column=" + column + - ", direction='" + direction + '\'' + - ", abs=" + abs + - '}'; + "column=" + column + + ", direction='" + direction + '\'' + + ", abs=" + abs + + '}'; } @Override diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/TableMap.java b/web/client-api/src/main/java/io/deephaven/web/client/api/TableMap.java index 9085214b3aa..6461ea60f0a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/TableMap.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/TableMap.java @@ -73,19 +73,17 @@ public int hashCode() { } public static final String EVENT_KEYADDED = "keyadded", - EVENT_DISCONNECT = JsTable.EVENT_DISCONNECT, - EVENT_RECONNECT = JsTable.EVENT_RECONNECT, - EVENT_RECONNECTFAILED = JsTable.EVENT_RECONNECTFAILED; + EVENT_DISCONNECT = JsTable.EVENT_DISCONNECT, + EVENT_RECONNECT = JsTable.EVENT_RECONNECT, + EVENT_RECONNECTFAILED = JsTable.EVENT_RECONNECTFAILED; private final WorkerConnection workerConnection; private final Consumer> fetch; private TableMapHandle tableMapHandle; - // Represents the sorta-kinda memoized results, tables that we've already locally fetched from - // the tablemap, - // and if all references to a table are released, entries here will be replaced with unresolved - // instances so + // Represents the sorta-kinda memoized results, tables that we've already locally fetched from the tablemap, + // and if all references to a table are released, entries here will be replaced with unresolved instances so // we don't leak server references or memory. private final Map>> tables = new HashMap<>(); @@ -99,8 +97,7 @@ public TableMap(WorkerConnection workerConnection, String tableMapName) { } @JsIgnore - public TableMap(WorkerConnection workerConnection, - Consumer> fetch) { + public TableMap(WorkerConnection workerConnection, Consumer> fetch) { this.workerConnection = workerConnection; this.fetch = fetch; } @@ -133,8 +130,7 @@ public Promise refetch() { } public Promise getTable(Object key) { - // Every caller gets a fresh table instance, and when all are closed, the CTS will be - // released. + // Every caller gets a fresh table instance, and when all are closed, the CTS will be released. // See #put for how that is tracked. final JsLazy> entry = tables.get(LocalKey.of(key)); if (entry == null) { @@ -149,8 +145,8 @@ public Promise getMergedTable() { // workerConnection.getServer().getMergedTableMap(tableMapHandle, cts.getHandle(), c); throw new UnsupportedOperationException("getMergedTableMap"); }, "tablemap merged table") - .refetch(this, workerConnection.metadata()) - .then(cts -> Promise.resolve(new JsTable(cts.getConnection(), cts))); + .refetch(this, workerConnection.metadata()) + .then(cts -> Promise.resolve(new JsTable(cts.getConnection(), cts))); } public JsSet getKeys() { @@ -181,28 +177,25 @@ public void notifyKeyAdded(Object key) { protected void put(Object key, LocalKey localKey) { tables.put(localKey, JsLazy.of(() -> { - // If we've entered this lambda, the JsLazy is being used, so we need to go ahead and - // get the tablehandle + // If we've entered this lambda, the JsLazy is being used, so we need to go ahead and get the tablehandle final ClientTableState entry = workerConnection.newState((c, cts, metadata) -> { // if (key == null || key instanceof String) { - // workerConnection.getServer().getTableMapStringEntry(tableMapHandle, - // cts.getHandle(), (String) key, c); + // workerConnection.getServer().getTableMapStringEntry(tableMapHandle, cts.getHandle(), (String) key, + // c); // } else { - // workerConnection.getServer().getTableMapStringArrayEntry(tableMapHandle, - // cts.getHandle(), (String[]) key, c); + // workerConnection.getServer().getTableMapStringArrayEntry(tableMapHandle, cts.getHandle(), (String[]) + // key, c); // } throw new UnsupportedOperationException("getTableMapEntry"); }, - "tablemap key " + key); + "tablemap key " + key); - // later, when the CTS is released, remove this "table" from the map and replace with an - // unresolved JsLazy + // later, when the CTS is released, remove this "table" from the map and replace with an unresolved JsLazy entry.onRunning(ignore -> { }, ignore -> { }, () -> put(key, localKey)); - // we'll make a table to return later, this func here just produces the JsLazy of the - // CTS + // we'll make a table to return later, this func here just produces the JsLazy of the CTS return entry.refetch(this, workerConnection.metadata()); })); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/TableTicket.java b/web/client-api/src/main/java/io/deephaven/web/client/api/TableTicket.java index bc0e8bd6a5c..e8705964d52 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/TableTicket.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/TableTicket.java @@ -6,15 +6,14 @@ import io.deephaven.javascript.proto.dhinternal.io.deephaven.proto.table_pb.TableReference; /** - * Replacement for TableHandle, wraps up Ticket plus current export state. We only consider the - * lower bytes for hashing (since until we've got millions of tickets it won't matter). + * Replacement for TableHandle, wraps up Ticket plus current export state. We only consider the lower bytes for hashing + * (since until we've got millions of tickets it won't matter). */ public class TableTicket { /** - * UNKNOWN: 0, PENDING: 1, PUBLISHING: 2, QUEUED: 3, EXPORTED: 4, RELEASED: 5, CANCELLED: 6, - * FAILED: 7, DEPENDENCY_FAILED: 8, DEPENDENCY_NEVER_FOUND: 9 DEPENDENCY_CANCELLED: 10 - * DEPENDENCY_RELEASED: 11 + * UNKNOWN: 0, PENDING: 1, PUBLISHING: 2, QUEUED: 3, EXPORTED: 4, RELEASED: 5, CANCELLED: 6, FAILED: 7, + * DEPENDENCY_FAILED: 8, DEPENDENCY_NEVER_FOUND: 9 DEPENDENCY_CANCELLED: 10 DEPENDENCY_RELEASED: 11 */ public enum State { UNKNOWN, PENDING, PUBLISHING, QUEUED, EXPORTED, RELEASED, CANCELLED, FAILED, DEPENDENCY_FAILED, DEPENDENCY_NEVER_FOUND, DEPENDENCY_CANCELLED, DEPENDENCY_RELEASED; @@ -87,10 +86,10 @@ public FlightDescriptor makeFlightDescriptor() { @Override public String toString() { return "TableTicket{" + - "ticket=" + ticket + - ", state=" + state + - ", isConnected=" + isConnected + - '}'; + "ticket=" + ticket + + ", state=" + state + + ", isConnected=" + isConnected + + '}'; } @Override diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java index 9e647760d84..2ee0d36bd25 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/WorkerConnection.java @@ -69,21 +69,19 @@ import static io.deephaven.web.client.api.barrage.BarrageUtils.*; /** - * Non-exported class, manages the connection to a given worker server. Exported types like - * QueryInfo and Table will refer to this, and allow us to try to keep track of how many open tables - * there are, so we can close the connection if not in use. + * Non-exported class, manages the connection to a given worker server. Exported types like QueryInfo and Table will + * refer to this, and allow us to try to keep track of how many open tables there are, so we can close the connection if + * not in use. * - * Might make more sense to be part of QueryInfo, but this way we can WeakMap instances, check - * periodically if any QueryInfos are left alive or event handlers still exist, and close - * connections that seem unused. + * Might make more sense to be part of QueryInfo, but this way we can WeakMap instances, check periodically if any + * QueryInfos are left alive or event handlers still exist, and close connections that seem unused. * - * Except for the delegated call from QueryInfo.getTable, none of these calls will be possible in - * Connecting or Disconnected state if done right. Failed state is possible, and we will want to - * think more about handling, possible re-Promise-ing all of the things, or just return stale values - * if we have them. + * Except for the delegated call from QueryInfo.getTable, none of these calls will be possible in Connecting or + * Disconnected state if done right. Failed state is possible, and we will want to think more about handling, possible + * re-Promise-ing all of the things, or just return stale values if we have them. * - * Responsible for reconnecting to the query server when required - when that server disappears, and - * at least one table is left un-closed. + * Responsible for reconnecting to the query server when required - when that server disappears, and at least one table + * is left un-closed. */ public class WorkerConnection { @@ -95,27 +93,25 @@ public class WorkerConnection { } private String sessionToken; - // All calls to the server should share this metadata instance, or copy from it if they need - // something custom + // All calls to the server should share this metadata instance, or copy from it if they need something custom private BrowserHeaders metadata = new BrowserHeaders(); /** - * States the connection can be in. If non-requested disconnect occurs, transition to - * reconnecting. If reconnect fails, move to failed, and do not attempt again. + * States the connection can be in. If non-requested disconnect occurs, transition to reconnecting. If reconnect + * fails, move to failed, and do not attempt again. * - * If an error happens on the websocket connection, we'll get a close event also - since we also - * use onError to handle failed work, and will just try one reconnect per close event. + * If an error happens on the websocket connection, we'll get a close event also - since we also use onError to + * handle failed work, and will just try one reconnect per close event. * - * Reconnecting requires waiting for the worker to return to "Running" state, requesting a new - * auth token, and then initiating that connection. + * Reconnecting requires waiting for the worker to return to "Running" state, requesting a new auth token, and then + * initiating that connection. * * Mostly informational, useful for debugging and error messages. */ private enum State { Connecting, Connected, /** - * Indicates that this worker was deliberately disconnected, should be reconnected again if - * needed. + * Indicates that this worker was deliberately disconnected, should be reconnected again if needed. */ Disconnected, Failed, Reconnecting } @@ -138,13 +134,11 @@ private enum State { private final StateCache cache = new StateCache(); private final JsWeakMap batchers = new JsWeakMap<>(); private JsWeakMap> handleCallbacks = new JsWeakMap<>(); - private JsWeakMap> definitionCallbacks = - new JsWeakMap<>(); + private JsWeakMap> definitionCallbacks = new JsWeakMap<>(); private final Set flushable = new HashSet<>(); private final JsSet> logCallbacks = new JsSet<>(); - private final Map> subscriptionStreams = - new HashMap<>(); + private final Map> subscriptionStreams = new HashMap<>(); private ResponseStreamWrapper exportNotifications; private Map tableMaps = new HashMap<>(); @@ -155,23 +149,18 @@ private enum State { private JsConsumer recordLog = pastLogs::add; private ResponseStreamWrapper logStream; - public WorkerConnection(QueryConnectable info, - Supplier> authTokenPromiseSupplier) { + public WorkerConnection(QueryConnectable info, Supplier> authTokenPromiseSupplier) { this.info = info; this.config = new ClientConfiguration(); state = State.Connecting; this.reviver = new TableReviver(this); boolean debugGrpc = false; - sessionServiceClient = - new SessionServiceClient(info.getServerUrl(), JsPropertyMap.of("debug", debugGrpc)); - tableServiceClient = - new TableServiceClient(info.getServerUrl(), JsPropertyMap.of("debug", debugGrpc)); - consoleServiceClient = - new ConsoleServiceClient(info.getServerUrl(), JsPropertyMap.of("debug", debugGrpc)); - flightServiceClient = - new FlightServiceClient(info.getServerUrl(), JsPropertyMap.of("debug", debugGrpc)); - browserFlightServiceClient = new BrowserFlightServiceClient(info.getServerUrl(), - JsPropertyMap.of("debug", debugGrpc)); + sessionServiceClient = new SessionServiceClient(info.getServerUrl(), JsPropertyMap.of("debug", debugGrpc)); + tableServiceClient = new TableServiceClient(info.getServerUrl(), JsPropertyMap.of("debug", debugGrpc)); + consoleServiceClient = new ConsoleServiceClient(info.getServerUrl(), JsPropertyMap.of("debug", debugGrpc)); + flightServiceClient = new FlightServiceClient(info.getServerUrl(), JsPropertyMap.of("debug", debugGrpc)); + browserFlightServiceClient = + new BrowserFlightServiceClient(info.getServerUrl(), JsPropertyMap.of("debug", debugGrpc)); // builder.setConnectionErrorHandler(msg -> info.failureHandled(String.valueOf(msg))); @@ -184,123 +173,118 @@ public WorkerConnection(QueryConnectable info, } /** - * Creates a new session based on the current auth info, and attempts to re-create all tables - * and other objects that were currently open. + * Creates a new session based on the current auth info, and attempts to re-create all tables and other objects that + * were currently open. * - * First we assume that the auth token provider is valid, and ask for a new token to provide to - * the worker. + * First we assume that the auth token provider is valid, and ask for a new token to provide to the worker. * * Given that token, we create a new session on the worker server. * - * When a table is first fetched, it might fail - the worker connection will keep trying to - * connect even if the failure is in one of the above steps. A later attempt to fetch that table - * may succeed however. + * When a table is first fetched, it might fail - the worker connection will keep trying to connect even if the + * failure is in one of the above steps. A later attempt to fetch that table may succeed however. * - * Once the table has been successfully fetched, after each reconnect until the table is - * close()d we'll attempt to restore the table by re-fetching the table, then reapplying all - * operations on it. + * Once the table has been successfully fetched, after each reconnect until the table is close()d we'll attempt to + * restore the table by re-fetching the table, then reapplying all operations on it. */ private void connectToWorker(Supplier> authTokenPromiseSupplier) { info.running() - .then(queryWorkerRunning -> { - // get the auth token - return authTokenPromiseSupplier.get(); - }).then(authToken -> { - // create a new session - HandshakeRequest handshakeRequest = new HandshakeRequest(); - if (authToken != null) { - Uint8Array token = new Uint8Array(authToken.getBytes().length); - handshakeRequest.setPayload(token); - } - handshakeRequest.setAuthProtocol(1); - - return Callbacks - .grpcUnaryPromise(c -> sessionServiceClient - .newSession(handshakeRequest, (BrowserHeaders) null, c::apply)); - }).then(handshakeResponse -> { - // start the reauth cycle - authUpdate(handshakeResponse); - - state = State.Connected; - - JsLog.debug("Connected to worker, ensuring all states are refreshed"); - // mark that we succeeded - newSessionReconnect.success(); - - // nuke pending callbacks, we'll remake them - handleCallbacks = new JsWeakMap<>(); - definitionCallbacks = new JsWeakMap<>(); - - - // for each cts in the cache, get all with active subs - ClientTableState[] hasActiveSubs = cache.getAllStates().stream() - .peek(cts -> { - cts.getHandle().setConnected(false); - cts.setSubscribed(false); - cts.forActiveLifecycles(item -> { - assert !(item instanceof JsTable) || - ((JsTable) item).state() == cts - : "Invalid table state " + item + " does not point to state " + cts; - item.suppressEvents(); - }); - }) - .filter(cts -> !cts.isEmpty()) - .peek(cts -> { - cts.forActiveTables(t -> { - assert t.state().isAncestor(cts) : "Invalid binding " + t + " (" - + t.state() + ") does not contain " + cts; - }); - }) - .toArray(ClientTableState[]::new); - // clear caches - List inactiveStatesToRemove = cache.getAllStates().stream() - .filter(ClientTableState::isEmpty) - .collect(Collectors.toList()); - inactiveStatesToRemove.forEach(cache::release); - - flushable.clear(); - - reviver.revive(metadata, hasActiveSubs); - - tableMaps.forEach((handle, tableMap) -> tableMap.refetch()); - figures.forEach((p0, p1, p2) -> p0.refetch()); - - info.connected(); - - // if any tables have been requested, make sure they start working now that we are - // connected - onOpen.forEach(c -> c.onSuccess(null)); - onOpen.clear(); - - // // start a heartbeat to check if connection is properly alive - // ping(success.getAuthSessionToken()); - startExportNotificationsStream(); - - return Promise.resolve(handshakeResponse); - }, fail -> { - // this is non-recoverable, connection/auth/registration failed, but we'll let it - // start again when state changes - state = State.Failed; - JsLog.debug("Failed to connect to worker."); - - final String failure = fail.toString(); - - // notify all pending fetches that they failed - onOpen.forEach(c -> c.onFailure(failure)); - onOpen.clear(); - - // if (server != null) { - // // explicitly disconnect from the query worker - // server.close(); - // } - - // signal that we should try again - newSessionReconnect.failed(); - - // inform the UI that it failed to connect - info.failureHandled("Failed to connect: " + failure); - return null; - }); + .then(queryWorkerRunning -> { + // get the auth token + return authTokenPromiseSupplier.get(); + }).then(authToken -> { + // create a new session + HandshakeRequest handshakeRequest = new HandshakeRequest(); + if (authToken != null) { + Uint8Array token = new Uint8Array(authToken.getBytes().length); + handshakeRequest.setPayload(token); + } + handshakeRequest.setAuthProtocol(1); + + return Callbacks.grpcUnaryPromise( + c -> sessionServiceClient.newSession(handshakeRequest, (BrowserHeaders) null, c::apply)); + }).then(handshakeResponse -> { + // start the reauth cycle + authUpdate(handshakeResponse); + + state = State.Connected; + + JsLog.debug("Connected to worker, ensuring all states are refreshed"); + // mark that we succeeded + newSessionReconnect.success(); + + // nuke pending callbacks, we'll remake them + handleCallbacks = new JsWeakMap<>(); + definitionCallbacks = new JsWeakMap<>(); + + + // for each cts in the cache, get all with active subs + ClientTableState[] hasActiveSubs = cache.getAllStates().stream() + .peek(cts -> { + cts.getHandle().setConnected(false); + cts.setSubscribed(false); + cts.forActiveLifecycles(item -> { + assert !(item instanceof JsTable) || + ((JsTable) item).state() == cts + : "Invalid table state " + item + " does not point to state " + cts; + item.suppressEvents(); + }); + }) + .filter(cts -> !cts.isEmpty()) + .peek(cts -> { + cts.forActiveTables(t -> { + assert t.state().isAncestor(cts) + : "Invalid binding " + t + " (" + t.state() + ") does not contain " + cts; + }); + }) + .toArray(ClientTableState[]::new); + // clear caches + List inactiveStatesToRemove = cache.getAllStates().stream() + .filter(ClientTableState::isEmpty) + .collect(Collectors.toList()); + inactiveStatesToRemove.forEach(cache::release); + + flushable.clear(); + + reviver.revive(metadata, hasActiveSubs); + + tableMaps.forEach((handle, tableMap) -> tableMap.refetch()); + figures.forEach((p0, p1, p2) -> p0.refetch()); + + info.connected(); + + // if any tables have been requested, make sure they start working now that we are connected + onOpen.forEach(c -> c.onSuccess(null)); + onOpen.clear(); + + // // start a heartbeat to check if connection is properly alive + // ping(success.getAuthSessionToken()); + startExportNotificationsStream(); + + return Promise.resolve(handshakeResponse); + }, fail -> { + // this is non-recoverable, connection/auth/registration failed, but we'll let it start again when + // state changes + state = State.Failed; + JsLog.debug("Failed to connect to worker."); + + final String failure = fail.toString(); + + // notify all pending fetches that they failed + onOpen.forEach(c -> c.onFailure(failure)); + onOpen.clear(); + + // if (server != null) { + // // explicitly disconnect from the query worker + // server.close(); + // } + + // signal that we should try again + newSessionReconnect.failed(); + + // inform the UI that it failed to connect + info.failureHandled("Failed to connect: " + failure); + return null; + }); } public void checkStatus(ResponseStreamWrapper.Status status) { @@ -323,17 +307,15 @@ private void startExportNotificationsStream() { if (exportNotifications != null) { exportNotifications.cancel(); } - exportNotifications = ResponseStreamWrapper.of( - tableServiceClient.exportedTableUpdates(new ExportedTableUpdatesRequest(), metadata())); + exportNotifications = ResponseStreamWrapper + .of(tableServiceClient.exportedTableUpdates(new ExportedTableUpdatesRequest(), metadata())); exportNotifications.onData(update -> { - if (update.getUpdateFailureMessage() != null - && !update.getUpdateFailureMessage().isEmpty()) { - exportedTableUpdateMessageError( - new TableTicket(update.getExportId().getTicket_asU8()), - update.getUpdateFailureMessage()); + if (update.getUpdateFailureMessage() != null && !update.getUpdateFailureMessage().isEmpty()) { + exportedTableUpdateMessageError(new TableTicket(update.getExportId().getTicket_asU8()), + update.getUpdateFailureMessage()); } else { exportedTableUpdateMessage(new TableTicket(update.getExportId().getTicket_asU8()), - java.lang.Long.parseLong(update.getSize())); + java.lang.Long.parseLong(update.getSize())); } }); @@ -343,10 +325,9 @@ private void startExportNotificationsStream() { private void authUpdate(HandshakeResponse handshakeResponse) { // store the token and schedule refresh calls to keep it alive - sessionToken = new String(Js.uncheckedCast(handshakeResponse.getSessionToken_asU8()), - Charset.forName("UTF-8")); - String sessionHeaderName = new String( - Js.uncheckedCast(handshakeResponse.getMetadataHeader_asU8()), Charset.forName("UTF-8")); + sessionToken = new String(Js.uncheckedCast(handshakeResponse.getSessionToken_asU8()), Charset.forName("UTF-8")); + String sessionHeaderName = + new String(Js.uncheckedCast(handshakeResponse.getMetadataHeader_asU8()), Charset.forName("UTF-8")); metadata.set(sessionHeaderName, sessionToken); // TODO maybe accept server advice on refresh rates, or just do our own thing @@ -399,8 +380,7 @@ public void incrementalUpdates(TableTicket tableHandle, DeltaUpdates updates) { // notify table that it has individual row updates final Optional cts = cache.get(tableHandle); if (!cts.isPresent()) { - JsLog.debug("Discarding delta for disconnected state ", tableHandle, " : ", - updates); + JsLog.debug("Discarding delta for disconnected state ", tableHandle, " : ", updates); } JsLog.debug("Delta received", tableHandle, updates); cts.ifPresent(s -> { @@ -432,11 +412,10 @@ public void exportedTableUpdateMessageError(TableTicket clientId, String errorMe // @Override public void onOpen() { - // never actually called - this instance isn't configured to be the "client" in the - // connection until auth + // never actually called - this instance isn't configured to be the "client" in the connection until auth // has succeeded. assert false - : "WorkerConnection.onOpen() should not be invoked directly, check the stack trace to see how this was triggered"; + : "WorkerConnection.onOpen() should not be invoked directly, check the stack trace to see how this was triggered"; } // @Override @@ -548,20 +527,20 @@ public Promise getTable(String tableName, Ticket script) { return whenServerReady("get a table").then(serve -> { JsLog.debug("innerGetTable", tableName, " started"); return newState(info, - (c, cts, metadata) -> { - JsLog.debug("performing fetch for ", tableName, " / ", cts, - " (" + LazyString.of(cts::getHandle), ",", script, ")"); - assert script != null : "no global scope support at this time"; - FetchTableRequest fetch = new FetchTableRequest(); - fetch.setConsoleId(script); - fetch.setTableName(tableName); - fetch.setTableId(cts.getHandle().makeTicket()); - consoleServiceClient.fetchTable(fetch, metadata, c::apply); - }, "fetch table " + tableName).then(cts -> { - JsLog.debug("innerGetTable", tableName, " succeeded ", cts); - JsTable table = new JsTable(this, cts); - return Promise.resolve(table); - }); + (c, cts, metadata) -> { + JsLog.debug("performing fetch for ", tableName, " / ", cts, + " (" + LazyString.of(cts::getHandle), ",", script, ")"); + assert script != null : "no global scope support at this time"; + FetchTableRequest fetch = new FetchTableRequest(); + fetch.setConsoleId(script); + fetch.setTableName(tableName); + fetch.setTableId(cts.getHandle().makeTicket()); + consoleServiceClient.fetchTable(fetch, metadata, c::apply); + }, "fetch table " + tableName).then(cts -> { + JsLog.debug("innerGetTable", tableName, " succeeded ", cts); + JsTable table = new JsTable(this, cts); + return Promise.resolve(table); + }); }); } @@ -573,21 +552,21 @@ public Promise getPandas(String name, Ticket script) { return whenServerReady("get a pandas table").then(serve -> { JsLog.debug("innerGetPandasTable", name, " started"); return newState(info, - (c, cts, metadata) -> { - JsLog.debug("performing fetch for ", name, " / ", cts, - " (" + LazyString.of(cts::getHandle), ",", script, ")"); - // if (script != null) { - // getServer().fetchPandasScriptTable(cts.getHandle(), script, name, c); - // } else { - // getServer().fetchPandasTable(cts.getHandle(), name, c); - // } - throw new UnsupportedOperationException("getPandas"); - - }, "fetch pandas table " + name).then(cts -> { - JsLog.debug("innerGetPandasTable", name, " succeeded ", cts); - JsTable table = new JsTable(this, cts); - return Promise.resolve(table); - }); + (c, cts, metadata) -> { + JsLog.debug("performing fetch for ", name, " / ", cts, " (" + LazyString.of(cts::getHandle), + ",", script, ")"); + // if (script != null) { + // getServer().fetchPandasScriptTable(cts.getHandle(), script, name, c); + // } else { + // getServer().fetchPandasTable(cts.getHandle(), name, c); + // } + throw new UnsupportedOperationException("getPandas"); + + }, "fetch pandas table " + name).then(cts -> { + JsLog.debug("innerGetPandasTable", name, " succeeded ", cts); + JsTable table = new JsTable(this, cts); + return Promise.resolve(table); + }); }); } @@ -604,8 +583,8 @@ public Promise getObject(JsVariableDefinition definition) { case Pandas: return (Promise) getPandas(definition.getName()); default: - return Promise.reject(new Error( - "Object " + definition.getName() + " unknown type " + definition.getType())); + return Promise + .reject(new Error("Object " + definition.getName() + " unknown type " + definition.getType())); } } @@ -620,8 +599,8 @@ public Promise getObject(JsVariableDefinition definition, Ticket script) case Pandas: return (Promise) getPandas(definition.getName(), script); default: - return Promise.reject(new Error("Object " + definition.getName() + " unknown type " - + definition.getType() + " for script.")); + return Promise.reject(new Error( + "Object " + definition.getName() + " unknown type " + definition.getType() + " for script.")); } } @@ -634,26 +613,23 @@ public Promise whenServerReady(String operationName) { // deliberate fall-through case Connecting: case Reconnecting: - // Create a new promise around a callback, add that to the list of callbacks to - // complete when + // Create a new promise around a callback, add that to the list of callbacks to complete when // connection is complete - return Callbacks.promise(info, c -> onOpen.add(c)) - .then(ignore -> Promise.resolve(this)); + return Callbacks.promise(info, c -> onOpen.add(c)).then(ignore -> Promise.resolve(this)); case Connected: // Already connected, continue return Promise.resolve(this); default: // not possible, means null state // noinspection unchecked - return (Promise) Promise - .reject("Can't " + operationName + " while connection is in state " + state); + return (Promise) Promise.reject("Can't " + operationName + " while connection is in state " + state); } } public Promise getTableMap(String tableMapName) { return whenServerReady("get a tablemap") - .then(server -> Promise.resolve(new TableMap(this, tableMapName)) - .then(TableMap::refetch)); + .then(server -> Promise.resolve(new TableMap(this, tableMapName)) + .then(TableMap::refetch)); } public void registerTableMap(TableMapHandle handle, TableMap tableMap) { @@ -666,8 +642,7 @@ public Promise getTreeTable(String tableName) { public Promise getTreeTable(String tableName, Ticket script) { return getTable(tableName, script).then(t -> { - Promise result = - Promise.resolve(new JsTreeTable(t.state(), this).finishFetch()); + Promise result = Promise.resolve(new JsTreeTable(t.state(), this).finishFetch()); t.close(); return result; }); @@ -679,12 +654,12 @@ public Promise getFigure(String figureName) { public Promise getFigure(String figureName, Ticket script) { return whenServerReady("get a figure") - .then(server -> new JsFigure(this, c -> { - FetchFigureRequest request = new FetchFigureRequest(); - request.setConsoleId(script); - request.setFigureName(figureName); - consoleServiceClient().fetchFigure(request, metadata(), c::apply); - }).refetch()); + .then(server -> new JsFigure(this, c -> { + FetchFigureRequest request = new FetchFigureRequest(); + request.setConsoleId(script); + request.setFigureName(figureName); + consoleServiceClient().fetchFigure(request, metadata(), c::apply); + }).refetch()); } public void registerFigure(JsFigure figure) { @@ -716,12 +691,10 @@ public BrowserHeaders metadata() { return metadata; } - public Promise newTable(String[] columnNames, String[] types, String[][] data, - String userTimeZone, HasEventHandling failHandler) { - // Store the ref to the data using an array we can clear out, so the data is garbage - // collected later - // This means the table can only be created once, but that's probably what we want in this - // case anyway + public Promise newTable(String[] columnNames, String[] types, String[][] data, String userTimeZone, + HasEventHandling failHandler) { + // Store the ref to the data using an array we can clear out, so the data is garbage collected later + // This means the table can only be created once, but that's probably what we want in this case anyway final String[][][] dataRef = new String[][][] {data}; return newState(failHandler, (c, cts, metadata) -> { final String[][] d = dataRef[0]; @@ -749,7 +722,7 @@ public Promise newTable(String[] columnNames, String[] types, String[][ double typeOffset = writer.writeType(schema); double metadataOffset = Field.createCustomMetadataVector(schema, new double[] { KeyValue.createKeyValue(schema, schema.createString("deephaven:type"), - schema.createString(writer.deephavenType())) + schema.createString(writer.deephavenType())) }); Field.startField(schema); @@ -770,23 +743,21 @@ public Promise newTable(String[] columnNames, String[] types, String[][ // wrap in a message and send as the first payload FlightData schemaMessage = new FlightData(); Uint8Array schemaMessagePayload = - createMessage(schema, MessageHeader.Schema, Schema.endSchema(schema), 0, 0); + createMessage(schema, MessageHeader.Schema, Schema.endSchema(schema), 0, 0); schemaMessage.setDataHeader(schemaMessagePayload); Uint8Array rpcTicket = config.newTicket(); schemaMessage.setAppMetadata(BarrageUtils.barrageMessage(rpcTicket, 0, false)); schemaMessage.setFlightDescriptor(cts.getHandle().makeFlightDescriptor()); - // we wait for any errors in this response to pass to the caller, but success is - // determined by the eventual + // we wait for any errors in this response to pass to the caller, but success is determined by the eventual // table's creation, which can race this - ResponseStreamWrapper doPutResponseStream = ResponseStreamWrapper - .of(browserFlightServiceClient.openDoPut(schemaMessage, metadata())); + ResponseStreamWrapper doPutResponseStream = + ResponseStreamWrapper.of(browserFlightServiceClient.openDoPut(schemaMessage, metadata())); doPutResponseStream.onEnd(status -> { if (status.getCode() == Code.OK) { - ExportedTableCreationResponse syntheticResponse = - new ExportedTableCreationResponse(); + ExportedTableCreationResponse syntheticResponse = new ExportedTableCreationResponse(); Uint8Array schemaPlusHeader = new Uint8Array(schemaMessagePayload.length + 8); schemaPlusHeader.set(schemaMessagePayload, 8); syntheticResponse.setSchemaHeader(schemaPlusHeader); @@ -805,8 +776,7 @@ public Promise newTable(String[] columnNames, String[] types, String[][ Builder bodyData = new Builder(1024); - // iterate each column, building buffers and fieldnodes, as well as building the actual - // payload + // iterate each column, building buffers and fieldnodes, as well as building the actual payload List buffers = new ArrayList<>(); List nodes = new ArrayList<>(); for (int i = 0; i < data.length; i++) { @@ -815,8 +785,7 @@ public Promise newTable(String[] columnNames, String[] types, String[][ // write down the buffers for the RecordBatch RecordBatch.startBuffersVector(bodyData, buffers.size()); - int length = 0;// record the size, we need to be sure all buffers are padded to full - // width + int length = 0;// record the size, we need to be sure all buffers are padded to full width for (Uint8Array arr : buffers) { assert arr.byteLength % 8 == 0; length += arr.byteLength; @@ -825,8 +794,7 @@ public Promise newTable(String[] columnNames, String[] types, String[][ for (int i = buffers.size() - 1; i >= 0; i--) { Uint8Array buffer = buffers.get(i); cumulativeOffset -= buffer.byteLength; - Buffer.createBuffer(bodyData, Long.create(cumulativeOffset, 0), - Long.create(buffer.byteLength, 0)); + Buffer.createBuffer(bodyData, Long.create(cumulativeOffset, 0), Long.create(buffer.byteLength, 0)); } assert cumulativeOffset == 0; double buffersOffset = bodyData.endVector(); @@ -834,8 +802,7 @@ public Promise newTable(String[] columnNames, String[] types, String[][ RecordBatch.startNodesVector(bodyData, nodes.size()); for (int i = nodes.size() - 1; i >= 0; i--) { CsvTypeParser.Node node = nodes.get(i); - FieldNode.createFieldNode(bodyData, Long.create(node.length(), 0), - Long.create(node.nullCount(), 0)); + FieldNode.createFieldNode(bodyData, Long.create(node.length(), 0), Long.create(node.nullCount(), 0)); } double nodesOffset = bodyData.endVector(); @@ -846,8 +813,7 @@ public Promise newTable(String[] columnNames, String[] types, String[][ RecordBatch.addLength(bodyData, Long.create(data[0].length, 0)); double recordBatchOffset = RecordBatch.endRecordBatch(bodyData); - bodyMessage.setDataHeader( - createMessage(bodyData, MessageHeader.RecordBatch, recordBatchOffset, length, 0)); + bodyMessage.setDataHeader(createMessage(bodyData, MessageHeader.RecordBatch, recordBatchOffset, length, 0)); bodyMessage.setDataBody(padAndConcat(buffers, length)); browserFlightServiceClient.nextDoPut(bodyMessage, metadata(), (fail, success) -> { @@ -871,10 +837,10 @@ private Uint8Array padAndConcat(List buffers, int length) { return all; } - private static Uint8Array createMessage(Builder payload, int messageHeaderType, - double messageHeaderOffset, int bodyLength, double customMetadataOffset) { - payload.finish(Message.createMessage(payload, MetadataVersion.V5, messageHeaderType, - messageHeaderOffset, Long.create(bodyLength, 0), customMetadataOffset)); + private static Uint8Array createMessage(Builder payload, int messageHeaderType, double messageHeaderOffset, + int bodyLength, double customMetadataOffset) { + payload.finish(Message.createMessage(payload, MetadataVersion.V5, messageHeaderType, messageHeaderOffset, + Long.create(bodyLength, 0), customMetadataOffset)); return payload.asUint8Array(); } @@ -884,14 +850,13 @@ public Promise mergeTables(JsTable[] tables, HasEventHandling failHandl for (int i = 0; i < tables.length; i++) { final JsTable table = tables[i]; if (!table.getConnection().equals(this)) { - throw new IllegalStateException( - "Table in merge is not on the worker for this connection"); + throw new IllegalStateException("Table in merge is not on the worker for this connection"); } tableHandles[i] = new TableReference(); tableHandles[i].setTicket(tables[i].getHandle().makeTicket()); } - JsLog.debug("Merging tables: ", LazyString.of(cts.getHandle()), " for ", - cts.getHandle().isResolved(), cts.getResolution()); + JsLog.debug("Merging tables: ", LazyString.of(cts.getHandle()), " for ", cts.getHandle().isResolved(), + cts.getResolution()); MergeTablesRequest requestMessage = new MergeTablesRequest(); requestMessage.setResultId(cts.getHandle().makeTicket()); requestMessage.setSourceIdsList(tableHandles); @@ -943,16 +908,15 @@ private TableTicket newHandle() { } public RequestBatcher getBatcher(JsTable table) { - // LATER: consider a global client.batch(()=>{}) method which causes all table statements to - // be batched together. + // LATER: consider a global client.batch(()=>{}) method which causes all table statements to be batched + // together. // We will build this architecture to support this, without wiring it up just yet RequestBatcher batcher = batchers.get(table); if (batcher == null || batcher.isSent()) { final RequestBatcher myBatcher = new RequestBatcher(table, this); batchers.set(table, myBatcher); myBatcher.onSend(r -> { - // clear out our map references if we're the last batcher to finish running for the - // given table. + // clear out our map references if we're the last batcher to finish running for the given table. if (batchers.get(table) == myBatcher) { batchers.delete(table); } @@ -962,14 +926,12 @@ public RequestBatcher getBatcher(JsTable table) { return batcher; } - public ClientTableState newStateFromUnsolicitedTable( - ExportedTableCreationResponse unsolicitedTable, String fetchSummary) { - TableTicket tableTicket = - new TableTicket(unsolicitedTable.getResultId().getTicket().getTicket_asU8()); + public ClientTableState newStateFromUnsolicitedTable(ExportedTableCreationResponse unsolicitedTable, + String fetchSummary) { + TableTicket tableTicket = new TableTicket(unsolicitedTable.getResultId().getTicket().getTicket_asU8()); JsTableFetch failFetch = (callback, newState, metadata1) -> { throw new IllegalStateException( - "Cannot reconnect, must recreate the unsolicited table on the server: " - + fetchSummary); + "Cannot reconnect, must recreate the unsolicited table on the server: " + fetchSummary); }; return cache.create(tableTicket, handle -> { ClientTableState cts = new ClientTableState(this, handle, failFetch, fetchSummary); @@ -979,23 +941,19 @@ public ClientTableState newStateFromUnsolicitedTable( } public ClientTableState newState(JsTableFetch fetcher, String fetchSummary) { - return cache.create(newHandle(), - handle -> new ClientTableState(this, handle, fetcher, fetchSummary)); + return cache.create(newHandle(), handle -> new ClientTableState(this, handle, fetcher, fetchSummary)); } /** * * @param fetcher The lambda to perform the fetch of the table's definition. - * @return A promise that will resolve when the ClientTableState is RUNNING (and fail if - * anything goes awry). + * @return A promise that will resolve when the ClientTableState is RUNNING (and fail if anything goes awry). * * TODO: consider a fetch timeout. */ - public Promise newState(HasEventHandling failHandler, JsTableFetch fetcher, - String fetchSummary) { + public Promise newState(HasEventHandling failHandler, JsTableFetch fetcher, String fetchSummary) { final TableTicket handle = newHandle(); - final ClientTableState s = - cache.create(handle, h -> new ClientTableState(this, h, fetcher, fetchSummary)); + final ClientTableState s = cache.create(handle, h -> new ClientTableState(this, h, fetcher, fetchSummary)); return s.refetch(failHandler, metadata); } @@ -1012,8 +970,7 @@ public StateCache getCache() { } /** - * Schedules a deferred command to check the given state for active tables and adjust viewports - * accordingly. + * Schedules a deferred command to check the given state for active tables and adjust viewports accordingly. */ public void scheduleCheck(ClientTableState state) { if (flushable.isEmpty()) { @@ -1072,8 +1029,7 @@ private void flush() { cache.release(state); JsLog.debug("Releasing state", state, LazyString.of(state.getHandle())); - // don't send a release message to the server if the table isn't really - // there + // don't send a release message to the server if the table isn't really there if (state.getHandle().isConnected()) { releaseHandle(state.getHandle()); } @@ -1086,8 +1042,7 @@ private void flush() { if (state.isSubscribed()) { state.setSubscribed(false); if (state.getHandle().isConnected()) { - ResponseStreamWrapper stream = - subscriptionStreams.remove(state); + ResponseStreamWrapper stream = subscriptionStreams.remove(state); if (stream != null) { stream.cancel(); } @@ -1098,99 +1053,87 @@ private void flush() { List vps = new ArrayList<>(); state.forActiveSubscriptions((table, subscription) -> { assert table.isActive(state) : "Inactive table has a viewport still attached"; - vps.add(new TableSubscriptionRequest(table.getSubscriptionId(), - subscription.getRows(), subscription.getColumns())); + vps.add(new TableSubscriptionRequest(table.getSubscriptionId(), subscription.getRows(), + subscription.getColumns())); }); boolean isViewport = vps.stream().allMatch(req -> req.getRows() != null); assert isViewport || vps.stream().noneMatch(req -> req.getRows() != null) - : "All subscriptions to a given handle must be consistently viewport or non-viewport"; + : "All subscriptions to a given handle must be consistently viewport or non-viewport"; - BitSet includedColumns = - vps.stream().map(TableSubscriptionRequest::getColumns).reduce((bs1, bs2) -> { - BitSet result = new BitSet(); - result.or(bs1); - result.or(bs2); - return result; - }).orElseThrow(() -> new IllegalStateException( - "Cannot call subscribe with zero subscriptions")); + BitSet includedColumns = vps.stream().map(TableSubscriptionRequest::getColumns).reduce((bs1, bs2) -> { + BitSet result = new BitSet(); + result.or(bs1); + result.or(bs2); + return result; + }).orElseThrow(() -> new IllegalStateException("Cannot call subscribe with zero subscriptions")); String[] columnTypes = Arrays.stream(state.getAllColumns()) - .map(Column::getType) - .toArray(String[]::new); + .map(Column::getType) + .toArray(String[]::new); state.setSubscribed(true); Builder subscriptionReq = new Builder(1024); - double columnsOffset = BarrageSubscriptionRequest.createColumnsVector( - subscriptionReq, makeUint8ArrayFromBitset(includedColumns)); + double columnsOffset = BarrageSubscriptionRequest.createColumnsVector(subscriptionReq, + makeUint8ArrayFromBitset(includedColumns)); double viewportOffset = 0; if (isViewport) { - viewportOffset = BarrageSubscriptionRequest - .createViewportVector(subscriptionReq, serializeRanges(vps.stream() - .map(TableSubscriptionRequest::getRows).collect(Collectors.toSet()))); + viewportOffset = BarrageSubscriptionRequest.createViewportVector(subscriptionReq, serializeRanges( + vps.stream().map(TableSubscriptionRequest::getRows).collect(Collectors.toSet()))); } - double serializationOptionsOffset = - BarrageSerializationOptions.createBarrageSerializationOptions(subscriptionReq, - ColumnConversionMode.Stringify, true); - double tableTicketOffset = BarrageSubscriptionRequest - .createTicketVector(subscriptionReq, state.getHandle().getTicket()); + double serializationOptionsOffset = BarrageSerializationOptions + .createBarrageSerializationOptions(subscriptionReq, ColumnConversionMode.Stringify, true); + double tableTicketOffset = + BarrageSubscriptionRequest.createTicketVector(subscriptionReq, state.getHandle().getTicket()); BarrageSubscriptionRequest.startBarrageSubscriptionRequest(subscriptionReq); BarrageSubscriptionRequest.addColumns(subscriptionReq, columnsOffset); - BarrageSubscriptionRequest.addSerializationOptions(subscriptionReq, - serializationOptionsOffset); + BarrageSubscriptionRequest.addSerializationOptions(subscriptionReq, serializationOptionsOffset); // BarrageSubscriptionRequest.addUpdateIntervalMs();//TODO #188 support this BarrageSubscriptionRequest.addViewport(subscriptionReq, viewportOffset); BarrageSubscriptionRequest.addTicket(subscriptionReq, tableTicketOffset); - subscriptionReq.finish( - BarrageSubscriptionRequest.endBarrageSubscriptionRequest(subscriptionReq)); + subscriptionReq.finish(BarrageSubscriptionRequest.endBarrageSubscriptionRequest(subscriptionReq)); Uint8Array rpcTicket = config.newTicket(); FlightData request = new FlightData(); // TODO make sure we can set true on halfClose before commit request.setAppMetadata(BarrageUtils.barrageMessage(subscriptionReq, - BarrageMessageType.BarrageSubscriptionRequest, rpcTicket, 0, false)); + BarrageMessageType.BarrageSubscriptionRequest, rpcTicket, 0, false)); - // new BidirectionStreamEmul(flightServiceClient::openDoExchange, - // flightServiceClient::nextDoExchange, subscriptionReq, - // BarrageMessageType.BarrageSubscriptionRequest, reqOffset); + // new BidirectionStreamEmul(flightServiceClient::openDoExchange, flightServiceClient::nextDoExchange, + // subscriptionReq, BarrageMessageType.BarrageSubscriptionRequest, reqOffset); - ResponseStreamWrapper stream = ResponseStreamWrapper - .of(browserFlightServiceClient.openDoExchange(request, metadata)); + ResponseStreamWrapper stream = + ResponseStreamWrapper.of(browserFlightServiceClient.openDoExchange(request, metadata)); stream.onData(new JsConsumer() { @Override public void apply(FlightData data) { - ByteBuffer body = - typedArrayToLittleEndianByteBuffer(data.getDataBody_asU8()); - Message headerMessage = Message.getRootAsMessage( - new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer( - data.getDataHeader_asU8())); - if (body.limit() == 0 - && headerMessage.headerType() != MessageHeader.RecordBatch) { - // a subscription stream presently ignores schemas and other message - // types + ByteBuffer body = typedArrayToLittleEndianByteBuffer(data.getDataBody_asU8()); + Message headerMessage = Message + .getRootAsMessage(new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer( + data.getDataHeader_asU8())); + if (body.limit() == 0 && headerMessage.headerType() != MessageHeader.RecordBatch) { + // a subscription stream presently ignores schemas and other message types // TODO hang on to the schema to better handle the now-Utf8 columns return; } RecordBatch header = headerMessage.header(new RecordBatch()); BarrageMessageWrapper barrageMessageWrapper = - BarrageMessageWrapper.getRootAsBarrageMessageWrapper( - new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer( - data.getAppMetadata_asU8())); + BarrageMessageWrapper.getRootAsBarrageMessageWrapper( + new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer( + data.getAppMetadata_asU8())); if (barrageMessageWrapper.msgType() == 0) { // continue previous message, just read RecordBatch appendAndMaybeFlush(header, body); } else { - assert barrageMessageWrapper - .msgType() == BarrageMessageType.BarrageUpdateMetadata; - BarrageUpdateMetadata barrageUpdate = - BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( + assert barrageMessageWrapper.msgType() == BarrageMessageType.BarrageUpdateMetadata; + BarrageUpdateMetadata barrageUpdate = BarrageUpdateMetadata.getRootAsBarrageUpdateMetadata( new io.deephaven.javascript.proto.dhinternal.flatbuffers.ByteBuffer( - new Uint8Array(barrageMessageWrapper.msgPayloadArray()))); - startAndMaybeFlush(barrageUpdate.isSnapshot(), header, body, - barrageUpdate, isViewport, columnTypes); + new Uint8Array(barrageMessageWrapper.msgPayloadArray()))); + startAndMaybeFlush(barrageUpdate.isSnapshot(), header, body, barrageUpdate, isViewport, + columnTypes); } } @@ -1206,12 +1149,11 @@ private void appendAndMaybeFlush(RecordBatch header, ByteBuffer body) { } } - private void startAndMaybeFlush(boolean isSnapshot, RecordBatch header, - ByteBuffer body, BarrageUpdateMetadata barrageUpdate, boolean isViewport, - String[] columnTypes) { + private void startAndMaybeFlush(boolean isSnapshot, RecordBatch header, ByteBuffer body, + BarrageUpdateMetadata barrageUpdate, boolean isViewport, String[] columnTypes) { if (isSnapshot) { - TableSnapshot snapshot = createSnapshot(header, body, barrageUpdate, - isViewport, columnTypes); + TableSnapshot snapshot = + createSnapshot(header, body, barrageUpdate, isViewport, columnTypes); // for now we always expect snapshots to arrive in a single payload initialSnapshot(state.getHandle(), snapshot); @@ -1227,8 +1169,7 @@ private void startAndMaybeFlush(boolean isSnapshot, RecordBatch header, // attempt retry, unless auth related? } }); - ResponseStreamWrapper oldStream = - subscriptionStreams.put(state, stream); + ResponseStreamWrapper oldStream = subscriptionStreams.put(state, stream); if (oldStream != null) { // cancel any old stream, we presently expect a fresh instance oldStream.cancel(); @@ -1245,10 +1186,8 @@ public boolean isUsable() { switch (state) { case Connected: case Connecting: - // Ignore Reconnecting, this is here for tree tables to decide whether to poll or - // not; - // if we really are disconnected, tree tables should wait until we are reconnected - // to poll again. + // Ignore Reconnecting, this is here for tree tables to decide whether to poll or not; + // if we really are disconnected, tree tables should wait until we are reconnected to poll again. return true; } @@ -1281,7 +1220,7 @@ public JsRunnable subscribeToLogs(JsConsumer callback) { logCallbacks.add(recordLog); // TODO core#225 track latest message seen and only sub after that logStream = ResponseStreamWrapper - .of(consoleServiceClient.subscribeToLogs(new LogSubscriptionRequest(), metadata)); + .of(consoleServiceClient.subscribeToLogs(new LogSubscriptionRequest(), metadata)); logStream.onData(data -> { LogItem logItem = new LogItem(); logItem.setLogLevel(data.getLogLevel()); @@ -1319,8 +1258,7 @@ public String dump(@JsOptional String graphName) { graph.append(" null [label=\"fetch from server\" shape=plaintext]\n"); // collect the parent/child relationships - Map> statesAndParents = - cache.getAllStates().stream() + Map> statesAndParents = cache.getAllStates().stream() .collect(Collectors.groupingBy(ClientTableState::getPrevious)); // append all handles, operations, and how they were performed @@ -1332,12 +1270,11 @@ public String dump(@JsOptional String graphName) { int tableId = binding.getTable().getSubscriptionId(); graph.append(" table").append(tableId).append("[shape=box];\n"); graph.append(" table").append(tableId).append(" -> handle") - .append(binding.getTable().getHandle().hashCode()).append("[color=blue];\n"); + .append(binding.getTable().getHandle().hashCode()).append("[color=blue];\n"); if (binding.getRollback() != null) { - graph.append(" handle").append(binding.getState().getHandle().hashCode()) - .append(" -> handle") - .append(binding.getRollback().getState().getHandle().hashCode()) - .append(" [style=dotted, label=rollback];\n"); + graph.append(" handle").append(binding.getState().getHandle().hashCode()).append(" -> handle") + .append(binding.getRollback().getState().getHandle().hashCode()) + .append(" [style=dotted, label=rollback];\n"); } }); }); @@ -1346,7 +1283,7 @@ public String dump(@JsOptional String graphName) { } private void appendStatesToDump(ClientTableState parent, - Map> statesAndParents, StringBuilder graph) { + Map> statesAndParents, StringBuilder graph) { List childStates = statesAndParents.get(parent); if (childStates == null) { return; @@ -1357,33 +1294,30 @@ private void appendStatesToDump(ClientTableState parent, } else { graph.append(" handle").append(parent.getHandle().hashCode()); } - graph.append(" -> handle").append(clientTableState.getHandle().hashCode()) - .append("[label=\"").append(clientTableState.getFetchSummary().replaceAll("\"", "")) - .append("\"];\n"); + graph.append(" -> handle").append(clientTableState.getHandle().hashCode()).append("[label=\"") + .append(clientTableState.getFetchSummary().replaceAll("\"", "")).append("\"];\n"); appendStatesToDump(clientTableState, statesAndParents, graph); } } public Promise emptyTable(double size) { - return whenServerReady("create emptyTable") - .then(server -> newState(info, (c, cts, metadata) -> { - EmptyTableRequest emptyTableRequest = new EmptyTableRequest(); - emptyTableRequest.setResultId(cts.getHandle().makeTicket()); - emptyTableRequest.setSize(size + ""); - tableServiceClient.emptyTable(emptyTableRequest, metadata, c::apply); - }, "emptyTable(" + size + ")")).then(cts -> Promise.resolve(new JsTable(this, cts))); + return whenServerReady("create emptyTable").then(server -> newState(info, (c, cts, metadata) -> { + EmptyTableRequest emptyTableRequest = new EmptyTableRequest(); + emptyTableRequest.setResultId(cts.getHandle().makeTicket()); + emptyTableRequest.setSize(size + ""); + tableServiceClient.emptyTable(emptyTableRequest, metadata, c::apply); + }, "emptyTable(" + size + ")")).then(cts -> Promise.resolve(new JsTable(this, cts))); } public Promise timeTable(double periodNanos, DateWrapper startTime) { final long startTimeNanos = startTime == null ? -1 : startTime.getWrapped(); - return whenServerReady("create timetable") - .then(server -> newState(info, (c, cts, metadata) -> { - TimeTableRequest timeTableRequest = new TimeTableRequest(); - timeTableRequest.setResultId(cts.getHandle().makeTicket()); - timeTableRequest.setPeriodNanos(periodNanos + ""); - timeTableRequest.setStartTimeNanos(startTimeNanos + ""); - tableServiceClient.timeTable(timeTableRequest, metadata, c::apply); - }, "create timetable(" + periodNanos + ", " + startTime + ")")) - .then(cts -> Promise.resolve(new JsTable(this, cts))); + return whenServerReady("create timetable").then(server -> newState(info, (c, cts, metadata) -> { + TimeTableRequest timeTableRequest = new TimeTableRequest(); + timeTableRequest.setResultId(cts.getHandle().makeTicket()); + timeTableRequest.setPeriodNanos(periodNanos + ""); + timeTableRequest.setStartTimeNanos(startTimeNanos + ""); + tableServiceClient.timeTable(timeTableRequest, metadata, c::apply); + }, "create timetable(" + periodNanos + ", " + startTime + ")")) + .then(cts -> Promise.resolve(new JsTable(this, cts))); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/BarrageUtils.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/BarrageUtils.java index 6697585b71c..922e085ba48 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/BarrageUtils.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/BarrageUtils.java @@ -32,35 +32,29 @@ public class BarrageUtils { private static final int MAGIC = 0x6E687064; - // TODO #1049 another wrapper that makes something which looks like a stream and manages - // rpcTicket internally - public static Uint8Array barrageMessage(Builder innerBuilder, int messageType, - Uint8Array rpcTicket, int sequence, boolean halfCloseAfterMessage) { + // TODO #1049 another wrapper that makes something which looks like a stream and manages rpcTicket internally + public static Uint8Array barrageMessage(Builder innerBuilder, int messageType, Uint8Array rpcTicket, int sequence, + boolean halfCloseAfterMessage) { Builder outerBuilder = new Builder(1024); - // noinspection deprecation - this deprecation is incorrect, tsickle didn't understand that - // only one overload is deprecated - double messageOffset = - BarrageMessageWrapper.createMsgPayloadVector(outerBuilder, innerBuilder.asUint8Array()); - // noinspection deprecation - this deprecation is incorrect, tsickle didn't understand that - // only one overload is deprecated - double rpcTicketOffset = - BarrageMessageWrapper.createRpcTicketVector(outerBuilder, rpcTicket); - double offset = - BarrageMessageWrapper.createBarrageMessageWrapper(outerBuilder, MAGIC, messageType, + // noinspection deprecation - this deprecation is incorrect, tsickle didn't understand that only one overload is + // deprecated + double messageOffset = BarrageMessageWrapper.createMsgPayloadVector(outerBuilder, innerBuilder.asUint8Array()); + // noinspection deprecation - this deprecation is incorrect, tsickle didn't understand that only one overload is + // deprecated + double rpcTicketOffset = BarrageMessageWrapper.createRpcTicketVector(outerBuilder, rpcTicket); + double offset = BarrageMessageWrapper.createBarrageMessageWrapper(outerBuilder, MAGIC, messageType, messageOffset, rpcTicketOffset, Long.create(sequence, 0), halfCloseAfterMessage); outerBuilder.finish(offset); return outerBuilder.asUint8Array(); } - public static Uint8Array barrageMessage(Uint8Array rpcTicket, int sequence, - boolean halfCloseAfterMessage) { + public static Uint8Array barrageMessage(Uint8Array rpcTicket, int sequence, boolean halfCloseAfterMessage) { Builder builder = new Builder(1024); - // noinspection deprecation - this deprecation is incorrect, tsickle didn't understand that - // only one overload is deprecated + // noinspection deprecation - this deprecation is incorrect, tsickle didn't understand that only one overload is + // deprecated double rpcTicketOffset = BarrageMessageWrapper.createRpcTicketVector(builder, rpcTicket); - double offset = BarrageMessageWrapper.createBarrageMessageWrapper(builder, MAGIC, - BarrageMessageType.None, 0, rpcTicketOffset, Long.create(sequence, 0), - halfCloseAfterMessage); + double offset = BarrageMessageWrapper.createBarrageMessageWrapper(builder, MAGIC, BarrageMessageType.None, 0, + rpcTicketOffset, Long.create(sequence, 0), halfCloseAfterMessage); builder.finish(offset); return builder.asUint8Array(); } @@ -142,8 +136,8 @@ public static ByteBuffer typedArrayToLittleEndianByteBuffer(Int8Array data) { return bb; } - public static TableSnapshot createSnapshot(RecordBatch header, ByteBuffer body, - BarrageUpdateMetadata barrageUpdate, boolean isViewport, String[] columnTypes) { + public static TableSnapshot createSnapshot(RecordBatch header, ByteBuffer body, BarrageUpdateMetadata barrageUpdate, + boolean isViewport, String[] columnTypes) { RangeSet added; final RangeSet includedAdditions; @@ -151,46 +145,36 @@ public static TableSnapshot createSnapshot(RecordBatch header, ByteBuffer body, includedAdditions = RangeSet.ofRange(0, (long) (header.length().toFloat64() - 1)); } else { added = new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsArray())); + .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsArray())); if (isViewport) { - includedAdditions = new CompressedRangeSetReader().read( - typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsIncludedArray())); + includedAdditions = new CompressedRangeSetReader() + .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsIncludedArray())); } else { - // if this isn't a viewport, then a second index isn't sent, because all rows are - // included + // if this isn't a viewport, then a second index isn't sent, because all rows are included includedAdditions = added; } } - // read the nodes and buffers into iterators so that we can descend into the data columns as - // necessary - Iter nodes = new Iter<>( - IntStream.range(0, (int) header.nodesLength()).mapToObj(header::nodes).iterator()); - Iter buffers = new Iter<>( - IntStream.range(0, (int) header.buffersLength()).mapToObj(header::buffers).iterator()); + // read the nodes and buffers into iterators so that we can descend into the data columns as necessary + Iter nodes = + new Iter<>(IntStream.range(0, (int) header.nodesLength()).mapToObj(header::nodes).iterator()); + Iter buffers = + new Iter<>(IntStream.range(0, (int) header.buffersLength()).mapToObj(header::buffers).iterator()); ColumnData[] columnData = new ColumnData[columnTypes.length]; for (int columnIndex = 0; columnIndex < columnTypes.length; ++columnIndex) { - columnData[columnIndex] = readArrowBuffer(body, nodes, buffers, - (int) includedAdditions.size(), columnTypes[columnIndex]); + columnData[columnIndex] = + readArrowBuffer(body, nodes, buffers, (int) includedAdditions.size(), columnTypes[columnIndex]); } - return new TableSnapshot(includedAdditions, columnData, (long) header.length().toFloat64());// note - // that - // this + return new TableSnapshot(includedAdditions, columnData, (long) header.length().toFloat64());// note that this // truncates - // precision - // if - // we - // have - // more - // than - // around - // 2^52 - // rows + // precision if we + // have more than + // around 2^52 rows } - public static DeltaUpdatesBuilder deltaUpdates(BarrageUpdateMetadata barrageUpdate, - boolean isViewport, String[] columnTypes) { + public static DeltaUpdatesBuilder deltaUpdates(BarrageUpdateMetadata barrageUpdate, boolean isViewport, + String[] columnTypes) { return new DeltaUpdatesBuilder(barrageUpdate, isViewport, columnTypes); } @@ -200,26 +184,24 @@ public static class DeltaUpdatesBuilder { private final String[] columnTypes; private int recordBatchesSeen = 0; - public DeltaUpdatesBuilder(BarrageUpdateMetadata barrageUpdate, boolean isViewport, - String[] columnTypes) { + public DeltaUpdatesBuilder(BarrageUpdateMetadata barrageUpdate, boolean isViewport, String[] columnTypes) { this.barrageUpdate = barrageUpdate; this.columnTypes = columnTypes; deltaUpdates.setAdded(new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsArray()))); + .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsArray()))); deltaUpdates.setRemoved(new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.removedRowsArray()))); + .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.removedRowsArray()))); - deltaUpdates.setShiftedRanges(new ShiftedRangeReader() - .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.shiftDataArray()))); + deltaUpdates.setShiftedRanges( + new ShiftedRangeReader().read(typedArrayToLittleEndianByteBuffer(barrageUpdate.shiftDataArray()))); RangeSet includedAdditions; if (isViewport) { - includedAdditions = new CompressedRangeSetReader().read( - typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsIncludedArray())); + includedAdditions = new CompressedRangeSetReader() + .read(typedArrayToLittleEndianByteBuffer(barrageUpdate.addedRowsIncludedArray())); } else { - // if this isn't a viewport, then a second index isn't sent, because all rows are - // included + // if this isn't a viewport, then a second index isn't sent, because all rows are included includedAdditions = deltaUpdates.getAdded(); } deltaUpdates.setIncludedAdditions(includedAdditions); @@ -228,60 +210,55 @@ public DeltaUpdatesBuilder(BarrageUpdateMetadata barrageUpdate, boolean isViewpo } /** - * Appends a new record batch and payload. Returns true if this was the final record batch - * that was expected. + * Appends a new record batch and payload. Returns true if this was the final record batch that was expected. */ public boolean appendRecordBatch(RecordBatch recordBatch, ByteBuffer body) { - assert recordBatchesSeen < barrageUpdate.numAddBatches() - + barrageUpdate.numModBatches(); + assert recordBatchesSeen < barrageUpdate.numAddBatches() + barrageUpdate.numModBatches(); if (barrageUpdate.numAddBatches() > recordBatchesSeen) { handleAddBatch(recordBatch, body); } else { handleModBatch(recordBatch, body); } recordBatchesSeen++; - return recordBatchesSeen == barrageUpdate.numAddBatches() - + barrageUpdate.numModBatches(); + return recordBatchesSeen == barrageUpdate.numAddBatches() + barrageUpdate.numModBatches(); } private void handleAddBatch(RecordBatch recordBatch, ByteBuffer body) { - Iter nodes = new Iter<>(IntStream.range(0, (int) recordBatch.nodesLength()) - .mapToObj(recordBatch::nodes).iterator()); - Iter buffers = new Iter<>(IntStream.range(0, (int) recordBatch.buffersLength()) - .mapToObj(recordBatch::buffers).iterator()); + Iter nodes = new Iter<>( + IntStream.range(0, (int) recordBatch.nodesLength()).mapToObj(recordBatch::nodes).iterator()); + Iter buffers = new Iter<>( + IntStream.range(0, (int) recordBatch.buffersLength()).mapToObj(recordBatch::buffers).iterator()); - DeltaUpdates.ColumnAdditions[] addedColumnData = - new DeltaUpdates.ColumnAdditions[columnTypes.length]; + DeltaUpdates.ColumnAdditions[] addedColumnData = new DeltaUpdates.ColumnAdditions[columnTypes.length]; for (int columnIndex = 0; columnIndex < columnTypes.length; ++columnIndex) { assert nodes.hasNext() && buffers.hasNext(); - ColumnData columnData = readArrowBuffer(body, nodes, buffers, - (int) nodes.peek().length().toFloat64(), columnTypes[columnIndex]); + ColumnData columnData = readArrowBuffer(body, nodes, buffers, (int) nodes.peek().length().toFloat64(), + columnTypes[columnIndex]); - addedColumnData[columnIndex] = - new DeltaUpdates.ColumnAdditions(columnIndex, columnData); + addedColumnData[columnIndex] = new DeltaUpdates.ColumnAdditions(columnIndex, columnData); } deltaUpdates.setSerializedAdditions(addedColumnData); } private void handleModBatch(RecordBatch recordBatch, ByteBuffer body) { - Iter nodes = new Iter<>(IntStream.range(0, (int) recordBatch.nodesLength()) - .mapToObj(recordBatch::nodes).iterator()); - Iter buffers = new Iter<>(IntStream.range(0, (int) recordBatch.buffersLength()) - .mapToObj(recordBatch::buffers).iterator()); + Iter nodes = new Iter<>( + IntStream.range(0, (int) recordBatch.nodesLength()).mapToObj(recordBatch::nodes).iterator()); + Iter buffers = new Iter<>( + IntStream.range(0, (int) recordBatch.buffersLength()).mapToObj(recordBatch::buffers).iterator()); DeltaUpdates.ColumnModifications[] modifiedColumnData = - new DeltaUpdates.ColumnModifications[columnTypes.length]; + new DeltaUpdates.ColumnModifications[columnTypes.length]; for (int columnIndex = 0; columnIndex < columnTypes.length; ++columnIndex) { assert nodes.hasNext() && buffers.hasNext(); BarrageModColumnMetadata columnMetadata = barrageUpdate.nodes(columnIndex); RangeSet modifiedRows = new CompressedRangeSetReader() - .read(typedArrayToLittleEndianByteBuffer(columnMetadata.modifiedRowsArray())); + .read(typedArrayToLittleEndianByteBuffer(columnMetadata.modifiedRowsArray())); - ColumnData columnData = readArrowBuffer(body, nodes, buffers, - (int) nodes.peek().length().toFloat64(), columnTypes[columnIndex]); + ColumnData columnData = readArrowBuffer(body, nodes, buffers, (int) nodes.peek().length().toFloat64(), + columnTypes[columnIndex]); modifiedColumnData[columnIndex] = - new DeltaUpdates.ColumnModifications(columnIndex, modifiedRows, columnData); + new DeltaUpdates.ColumnModifications(columnIndex, modifiedRows, columnData); } deltaUpdates.setSerializedModifications(modifiedColumnData); } @@ -291,8 +268,8 @@ public DeltaUpdates build() { } } - private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes, - Iter buffers, int size, String columnType) { + private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes, Iter buffers, int size, + String columnType) { // explicit cast to be clear that we're rounding down BitSet valid = readValidityBufferAsBitset(data, size, buffers.next()); FieldNode thisNode = nodes.next(); @@ -305,34 +282,34 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes case "int": assert positions.length().toFloat64() >= size * 4; Int32Array intArray = new Int32Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset().toFloat64(), size); return new IntArrayColumnData(Js.uncheckedCast(intArray)); case "short": assert positions.length().toFloat64() >= size * 2; Int16Array shortArray = new Int16Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset().toFloat64(), size); return new ShortArrayColumnData(Js.uncheckedCast(shortArray)); case "boolean": case "java.lang.Boolean": case "byte": assert positions.length().toFloat64() >= size; - Int8Array byteArray = new Int8Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + Int8Array byteArray = + new Int8Array(TypedArrayHelper.unwrap(data).buffer, (int) positions.offset().toFloat64(), size); return new ByteArrayColumnData(Js.uncheckedCast(byteArray)); case "double": assert positions.length().toFloat64() >= size * 8; Float64Array doubleArray = new Float64Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset().toFloat64(), size); return new DoubleArrayColumnData(Js.uncheckedCast(doubleArray)); case "float": assert positions.length().toFloat64() >= size * 4; Float32Array floatArray = new Float32Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset().toFloat64(), size); return new FloatArrayColumnData(Js.uncheckedCast(floatArray)); case "char": assert positions.length().toFloat64() >= size * 2; Uint16Array charArray = new Uint16Array(TypedArrayHelper.unwrap(data).buffer, - (int) positions.offset().toFloat64(), size); + (int) positions.offset().toFloat64(), size); return new CharArrayColumnData(Js.uncheckedCast(charArray)); // longs are a special case despite being java primitives case "long": @@ -398,14 +375,12 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes assert innerOffsets != null; if (!innerValid.get(j)) { assert innerOffsets.get(j) == innerOffsets.get(j + 1) - : innerOffsets.get(j) + " == " - + innerOffsets.get(j + 1); + : innerOffsets.get(j) + " == " + innerOffsets.get(j + 1); continue; } - // might be cheaper to do views on the underlying bb (which will - // be copied anyway into the String) - data.position( - (int) (payload.offset().toFloat64()) + offsets.get(i)); + // might be cheaper to do views on the underlying bb (which will be copied anyway + // into the String) + data.position((int) (payload.offset().toFloat64()) + offsets.get(i)); byte[] stringBytes = new byte[data.remaining()]; data.get(stringBytes); strArr[j] = new String(stringBytes, StandardCharsets.UTF_8); @@ -415,13 +390,11 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes return new StringArrayArrayColumnData(strArrArr); default: - throw new IllegalStateException( - "Can't decode column of type " + columnType); + throw new IllegalStateException("Can't decode column of type " + columnType); } } else { - // non-array, variable length stuff, just grab the buffer and read ranges - // specified by offsets + // non-array, variable length stuff, just grab the buffer and read ranges specified by offsets Buffer payload = buffers.next(); switch (columnType) { @@ -432,8 +405,7 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes continue; } byte[] stringBytes = new byte[offsets.get(i + 1) - offsets.get(i)]; - data.position( - (int) (payload.offset().toFloat64()) + offsets.get(i)); + data.position((int) (payload.offset().toFloat64()) + offsets.get(i)); data.get(stringBytes); stringArray[i] = new String(stringBytes, StandardCharsets.UTF_8);// new // String(Js.uncheckedCast(stringBytes)); @@ -445,8 +417,7 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes if (hasNulls && !valid.get(i)) { continue; } - data.position( - (int) (payload.offset().toFloat64()) + offsets.get(i)); + data.position((int) (payload.offset().toFloat64()) + offsets.get(i)); int scale = data.getInt(); bigDecArray[i] = new BigDecimal(readBigInt(data), scale); } @@ -457,15 +428,13 @@ private static ColumnData readArrowBuffer(ByteBuffer data, Iter nodes if (hasNulls && !valid.get(i)) { continue; } - data.position( - (int) (payload.offset().toFloat64()) + offsets.get(i)); + data.position((int) (payload.offset().toFloat64()) + offsets.get(i)); bigIntArray[i] = readBigInt(data); } return new BigIntegerArrayColumnData(bigIntArray); default: - throw new IllegalStateException( - "Can't decode column of type " + columnType); + throw new IllegalStateException("Can't decode column of type " + columnType); } } } @@ -480,8 +449,8 @@ private static BigInteger readBigInt(ByteBuffer data) { private static BitSet readValidityBufferAsBitset(ByteBuffer data, int size, Buffer buffer) { if (size == 0 || buffer.length().toFloat64() == 0) { - // these buffers are optional (and empty) if the column is empty, or if it has - // primitives and we've allowed DH nulls + // these buffers are optional (and empty) if the column is empty, or if it has primitives and we've allowed + // DH nulls return new BitSet(0); } data.position((int) buffer.offset().toFloat64()); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java index 0754b9f5493..7d47f2e229e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReader.java @@ -33,12 +33,9 @@ public class CompressedRangeSetReader { public static ByteBuffer writeRange(RangeSet s) { long offset = 0; - ByteBuffer payload = ByteBuffer.allocate(s.rangeCount() * 2 * (8 + 1) + 1);// max size it - // would need to - // be + ByteBuffer payload = ByteBuffer.allocate(s.rangeCount() * 2 * (8 + 1) + 1);// max size it would need to be payload.order(ByteOrder.LITTLE_ENDIAN); - ShortBuffer shorts = ShortBuffer.allocate(s.rangeCount() * 2);// assuming that every range - // will need 2 shorts + ShortBuffer shorts = ShortBuffer.allocate(s.rangeCount() * 2);// assuming that every range will need 2 shorts for (Iterator it = s.rangeIterator(); it.hasNext();) { Range r = it.next(); if (r.getLast() == r.getFirst()) { @@ -54,13 +51,12 @@ public static ByteBuffer writeRange(RangeSet s) { payload.flip(); ByteBuffer sliced = payload.slice(); - sliced.order(ByteOrder.LITTLE_ENDIAN);// this is required in JVM code, but apparently not in - // GWT emulation + sliced.order(ByteOrder.LITTLE_ENDIAN);// this is required in JVM code, but apparently not in GWT emulation return sliced; } - private static long appendWithDeltaOffset(ByteBuffer payload, ShortBuffer shorts, long offset, - long value, boolean negate) { + private static long appendWithDeltaOffset(ByteBuffer payload, ShortBuffer shorts, long offset, long value, + boolean negate) { if (value >= offset + Short.MAX_VALUE) { flushShorts(payload, shorts); @@ -77,8 +73,7 @@ private static long appendWithDeltaOffset(ByteBuffer payload, ShortBuffer shorts private static void flushShorts(ByteBuffer payload, ShortBuffer shorts) { for (int offset = 0; offset < shorts.position();) { int byteCount = 0; - while (offset + byteCount < shorts.position() - && (shorts.get(offset + byteCount) < Byte.MAX_VALUE + while (offset + byteCount < shorts.position() && (shorts.get(offset + byteCount) < Byte.MAX_VALUE && shorts.get(offset + byteCount) > Byte.MIN_VALUE)) { byteCount++; } @@ -98,8 +93,7 @@ private static void flushShorts(ByteBuffer payload, ShortBuffer shorts) { int consecutiveBytes = 0; while (shortCount + consecutiveBytes + offset < shorts.position()) { final short shortValue = shorts.get(offset + shortCount + consecutiveBytes); - final boolean requiresShort = - (shortValue >= Byte.MAX_VALUE || shortValue <= Byte.MIN_VALUE); + final boolean requiresShort = (shortValue >= Byte.MAX_VALUE || shortValue <= Byte.MIN_VALUE); if (!requiresShort) { consecutiveBytes++; } else { @@ -114,7 +108,7 @@ private static void flushShorts(ByteBuffer payload, ShortBuffer shorts) { } // if we have a small number of trailing bytes, tack them onto the end if (consecutiveBytes > 0 && consecutiveBytes <= 3 - && (offset + shortCount + consecutiveBytes == shorts.position())) { + && (offset + shortCount + consecutiveBytes == shorts.position())) { shortCount += consecutiveBytes; } if (shortCount >= 2) { @@ -182,8 +176,7 @@ public RangeSet read(ByteBuffer data) { } return RangeSet.fromSortedRanges(sortedRanges.toArray(new Range[0])); default: - throw new IllegalStateException( - "Bad command: " + command + " at position " + data.position()); + throw new IllegalStateException("Bad command: " + command + " at position " + data.position()); } } while (true); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/ShiftedRangeReader.java b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/ShiftedRangeReader.java index 6d0a37ef625..ded698cb860 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/ShiftedRangeReader.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/barrage/ShiftedRangeReader.java @@ -22,7 +22,7 @@ public ShiftedRange[] read(ByteBuffer data) { while (startIter.hasNext()) { long startPosition = startIter.nextLong(); ranges[ranges.length] = new ShiftedRange(new Range(startPosition, endIter.nextLong()), - postShiftStartIter.nextLong() - startPosition); + postShiftStartIter.nextLong() - startPosition); } return ranges; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/batch/BatchBuilder.java b/web/client-api/src/main/java/io/deephaven/web/client/api/batch/BatchBuilder.java index a2229576a36..da697949dff 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/batch/BatchBuilder.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/batch/BatchBuilder.java @@ -22,8 +22,8 @@ /** * Used by client to create batched requests. * - * This allows us to store everything using the objects a client expects to interact with (Sort, - * FilterCondition, etc), rather than DTO types like SortDescriptor, FilterDescriptor, etc. + * This allows us to store everything using the objects a client expects to interact with (Sort, FilterCondition, etc), + * rather than DTO types like SortDescriptor, FilterDescriptor, etc. * */ public class BatchBuilder { @@ -111,8 +111,7 @@ public boolean equals(Object o) { final BatchOp batchOp = (BatchOp) o; - // even if both have null handles, they should not be equal unless they are the same - // instance... + // even if both have null handles, they should not be equal unless they are the same instance... if (source != null ? !source.equals(batchOp.source) : batchOp.source != null) return false; return target != null ? target.equals(batchOp.target) : batchOp.target == null; @@ -138,16 +137,15 @@ public boolean isEqual(ClientTableState value) { return false; } // Array properties where order is not important; properties are commutative - if (getFilters().size() != value.getFilters().size() - || !getFilters().containsAll(value.getFilters())) { + if (getFilters().size() != value.getFilters().size() || !getFilters().containsAll(value.getFilters())) { return false; } if (getDropColumns().size() != value.getDropColumns().size() - || !getDropColumns().containsAll(value.getDropColumns())) { + || !getDropColumns().containsAll(value.getDropColumns())) { return false; } if (getViewColumns().size() != value.getViewColumns().size() - || !getViewColumns().containsAll(value.getViewColumns())) { + || !getViewColumns().containsAll(value.getViewColumns())) { return false; } @@ -162,9 +160,9 @@ public boolean isEqual(ClientTableState value) { public String toString() { return "BatchOp{" + // "handles=" + handles + - ", state=" + (state == null ? null : state.toStringMinimal()) + - ", appendTo=" + (appendTo == null ? null : appendTo.toStringMinimal()) + - ", " + super.toString() + "}"; + ", state=" + (state == null ? null : state.toStringMinimal()) + + ", appendTo=" + (appendTo == null ? null : appendTo.toStringMinimal()) + + ", " + super.toString() + "}"; } } @@ -176,18 +174,16 @@ public JsArray serializable() { JsArray send = new JsArray<>(); for (BatchOp op : ops) { if (!op.hasHandles()) { - assert op.getState().isRunning() - : "Only running states should be found in batch without a new handle"; + assert op.getState().isRunning() : "Only running states should be found in batch without a new handle"; continue; } if (op.getState().isEmpty()) { op.getState().setResolution(ClientTableState.ResolutionState.FAILED, - "Table state abandoned before request was made"); + "Table state abandoned before request was made"); continue; } - // Each BatchOp is assumed to have one source table and a list of specific ordered - // operations to produce + // Each BatchOp is assumed to have one source table and a list of specific ordered operations to produce // a target. Intermediate items each use the offset before them Supplier prevTableSupplier = new Supplier() { // initialize as -1 because a reference to the "first" will be zero @@ -211,14 +207,14 @@ public TableReference get() { Consumer[] lastOp = new Consumer[1]; List operations = Stream.of( - buildCustomColumns(op, prevTableSupplier, lastOp), - buildViewColumns(op, prevTableSupplier, lastOp), - buildFilter(op, prevTableSupplier, lastOp), - buildSort(op, prevTableSupplier, lastOp), - buildDropColumns(op, prevTableSupplier, lastOp), - flattenOperation(op, prevTableSupplier, lastOp)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + buildCustomColumns(op, prevTableSupplier, lastOp), + buildViewColumns(op, prevTableSupplier, lastOp), + buildFilter(op, prevTableSupplier, lastOp), + buildSort(op, prevTableSupplier, lastOp), + buildDropColumns(op, prevTableSupplier, lastOp), + flattenOperation(op, prevTableSupplier, lastOp)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); lastOp[0].accept(op.getNewId().makeTicket()); @@ -232,7 +228,7 @@ public TableReference get() { } private Operation buildCustomColumns(BatchOp op, Supplier prevTableSupplier, - Consumer[] lastOp) { + Consumer[] lastOp) { SelectOrUpdateRequest value = new SelectOrUpdateRequest(); for (CustomColumnDescriptor customColumn : op.getCustomColumns()) { @@ -254,7 +250,7 @@ private Operation buildCustomColumns(BatchOp op, Supplier prevTa } private Operation flattenOperation(BatchOp op, Supplier prevTableSupplier, - Consumer[] lastOp) { + Consumer[] lastOp) { if (!op.isFlat()) { return null; } @@ -267,8 +263,7 @@ private Operation flattenOperation(BatchOp op, Supplier prevTabl return flattenOp; } - private Operation buildSort(BatchOp op, Supplier prevTableSupplier, - Consumer[] lastOp) { + private Operation buildSort(BatchOp op, Supplier prevTableSupplier, Consumer[] lastOp) { SortTableRequest value = new SortTableRequest(); for (Sort sort : op.getSorts()) { if (op.getAppendTo() == null || !op.getAppendTo().hasSort(sort)) { @@ -287,8 +282,7 @@ private Operation buildSort(BatchOp op, Supplier prevTableSuppli return sortOp; } - private Operation buildFilter(BatchOp op, Supplier prevTableSupplier, - Consumer[] lastOp) { + private Operation buildFilter(BatchOp op, Supplier prevTableSupplier, Consumer[] lastOp) { FilterTableRequest value = new FilterTableRequest(); for (FilterCondition filter : op.getFilters()) { if (op.getAppendTo() == null || !op.getAppendTo().hasFilter(filter)) { @@ -308,7 +302,7 @@ private Operation buildFilter(BatchOp op, Supplier prevTableSupp } private Operation buildDropColumns(BatchOp op, Supplier prevTableSupplier, - Consumer[] lastOp) { + Consumer[] lastOp) { DropColumnsRequest value = new DropColumnsRequest(); for (String dropColumn : op.getDropColumns()) { value.addColumnNames(dropColumn); @@ -327,7 +321,7 @@ private Operation buildDropColumns(BatchOp op, Supplier prevTabl } private Operation buildViewColumns(BatchOp op, Supplier prevTableSupplier, - Consumer[] lastOp) { + Consumer[] lastOp) { SelectOrUpdateRequest value = new SelectOrUpdateRequest(); for (String dropColumn : op.getDropColumns()) { value.addColumnSpecs(dropColumn); @@ -398,8 +392,8 @@ public BatchOp getFirstOp() { @Override public String toString() { return "BatchBuilder{" + - "ops=" + ops + - '}'; + "ops=" + ops + + '}'; } public String toStringMinimal() { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/batch/RequestBatcher.java b/web/client-api/src/main/java/io/deephaven/web/client/api/batch/RequestBatcher.java index 16370053d61..3cdcbfdeca8 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/batch/RequestBatcher.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/batch/RequestBatcher.java @@ -29,8 +29,8 @@ /** * A bucket for queuing up requests on Tables to be sent all at once. * - * Currently scoped to a single table, but we should be able to refactor this to handle multiple - * tables at once (by pushing table/handles into method signatures) + * Currently scoped to a single table, but we should be able to refactor this to handle multiple tables at once (by + * pushing table/handles into method signatures) * * TODO fix core#80 */ @@ -65,7 +65,7 @@ public Promise nestedPromise(JsTable table) { final ClientTableState state = builder.getOp().getState(); return new Promise<>(((resolve, reject) -> { state.onRunning(ignored -> resolve.onInvoke(table), reject::onInvoke, - () -> reject.onInvoke("Table failed, or was closed")); + () -> reject.onInvoke("Table failed, or was closed")); })); } @@ -86,11 +86,10 @@ private void doCreateOps() { BatchOp op = builder.getOp(); ClientTableState appendTo = table.state(); assert appendTo.getBinding(table).isActive() - : "Table state " + appendTo + " did not have " + table + " actively bound to it"; + : "Table state " + appendTo + " did not have " + table + " actively bound to it"; rollbackTo = appendTo.getActiveBinding(table); - while (!appendTo.isCompatible(op.getSorts(), op.getFilters(), op.getCustomColumns(), - op.isFlat())) { + while (!appendTo.isCompatible(op.getSorts(), op.getFilters(), op.getCustomColumns(), op.isFlat())) { final ClientTableState nextTry = appendTo.getPrevious(); assert nextTry != null : "Root state " + appendTo + " is not blank!"; orphans.add(appendTo); @@ -109,8 +108,7 @@ private void doCreateOps() { // but we'll still keep it around to process as an active state builder.doNextOp(op); } else { - // Create new states. If we are adding both filters and sorts, we'll want an - // intermediate table + // Create new states. If we are adding both filters and sorts, we'll want an intermediate table final BatchOp newOp = maybeInsertInterimTable(op, appendTo); // Insert the final operation @@ -120,16 +118,14 @@ private void doCreateOps() { private ClientTableState insertOp(BatchOp op, ClientTableState sourceState) { - assert table.state() == sourceState - : "You must update your table's currentState before calling insertOp"; + assert table.state() == sourceState : "You must update your table's currentState before calling insertOp"; final TableTicket handle = sourceState.getHandle(); final ClientTableState newTail = connection.newState(sourceState, op); table.setState(newTail); table.setRollback(rollbackTo); op.setState(newTail); op.setHandles(handle, newTail.getHandle()); - // add the intermediate state to the request, causes next call to getOp() to return a new - // builder + // add the intermediate state to the request, causes next call to getOp() to return a new builder builder.doNextOp(op); return newTail; @@ -162,10 +158,8 @@ private BatchOp maybeInsertInterimTable(BatchOp op, ClientTableState appendTo) { op.setAppendTo(appendTo); boolean filterChanged = !appendTo.getFilters().equals(op.getFilters()); if (filterChanged) { - // whenever filters have changed, we will create one exported table w/ filters and any - // custom columns. - // if there are _also_ sorts that have changed, then we'll want to add those on - // afterwards. + // whenever filters have changed, we will create one exported table w/ filters and any custom columns. + // if there are _also_ sorts that have changed, then we'll want to add those on afterwards. final List appendToSort = appendTo.getSorts(); final List desiredSorts = op.getSorts(); boolean sortChanged = !appendToSort.equals(desiredSorts); @@ -191,10 +185,8 @@ private BatchOp maybeInsertInterimTable(BatchOp op, ClientTableState appendTo) { public Promise sendRequest() { return new Promise<>((resolve, reject) -> { - // calling buildRequest will change the state of each table, so we first check what the - // state was - // of each table before we do that, in case we are actually not making a call to the - // server + // calling buildRequest will change the state of each table, so we first check what the state was + // of each table before we do that, in case we are actually not making a call to the server ClientTableState prevState = table.lastVisibleState(); final BatchTableRequest request = buildRequest(); @@ -206,23 +198,18 @@ public Promise sendRequest() { onSend.clear(); sent = true; if (request.getOpsList().length == 0) { - // Since this is an empty request, there are no "interested" tables as we normally - // would define them, + // Since this is an empty request, there are no "interested" tables as we normally would define them, // so we can only operate on the root table object - // No server call needed - we need to examine the "before" state and fire events - // based on that. - // This is like tableLoop below, except no failure is possible, since we already - // have the results + // No server call needed - we need to examine the "before" state and fire events based on that. + // This is like tableLoop below, except no failure is possible, since we already have the results if (table.isAlive()) { final ClientTableState active = table.state(); assert active.isRunning() : active; boolean sortChanged = !prevState.getSorts().equals(active.getSorts()); boolean filterChanged = !prevState.getFilters().equals(active.getFilters()); - boolean customColumnChanged = - !prevState.getCustomColumns().equals(active.getCustomColumns()); + boolean customColumnChanged = !prevState.getCustomColumns().equals(active.getCustomColumns()); table.fireEvent(HasEventHandling.EVENT_REQUEST_SUCCEEDED); - // TODO think more about the order of events, and what kinds of things one might - // bind to each + // TODO think more about the order of events, and what kinds of things one might bind to each if (sortChanged) { table.fireEvent(JsTable.EVENT_SORTCHANGED); } @@ -236,8 +223,8 @@ public Promise sendRequest() { orphans.forEach(ClientTableState::cleanup); resolve.onInvoke((Void) null); - // if there's no outgoing operation, user may have removed an operation and wants to - // return to where they left off + // if there's no outgoing operation, user may have removed an operation and wants to return to where + // they left off table.maybeReviveSubscription(); finished = true; return; @@ -251,13 +238,12 @@ public Promise sendRequest() { JsLog.debug("Sending request", LazyString.of(request), request, " based on ", this); - ResponseStreamWrapper batchStream = ResponseStreamWrapper - .of(connection.tableServiceClient().batch(request, connection.metadata())); + ResponseStreamWrapper batchStream = + ResponseStreamWrapper.of(connection.tableServiceClient().batch(request, connection.metadata())); batchStream.onData(response -> { TableReference resultid = response.getResultId(); if (!resultid.hasTicket()) { - // thanks for telling us, but we don't at this time have a nice way to indicate - // this + // thanks for telling us, but we don't at this time have a nice way to indicate this return; } Ticket ticket = resultid.getTicket(); @@ -265,24 +251,25 @@ public Promise sendRequest() { String fail = response.getErrorInfo(); // any table which has that state active should fire a failed event - ClientTableState state = allStates().filter(cts -> cts.getHandle().makeTicket() - .getTicket_asB64().equals(ticket.getTicket_asB64())).first(); + ClientTableState state = allStates().filter( + cts -> cts.getHandle().makeTicket().getTicket_asB64().equals(ticket.getTicket_asB64())) + .first(); state.getHandle().setState(TableTicket.State.FAILED); for (JsTable table : allInterestedTables().filter(t -> t.state() == state)) { // fire the failed event failTable(table, fail); } - // mark state as failed (his has to happen after table is marked as failed, it - // will trigger rollback + // mark state as failed (his has to happen after table is marked as failed, it will trigger rollback state.setResolution(ClientTableState.ResolutionState.FAILED, fail); return; } // any table which has that state active should fire a failed event - ClientTableState state = allStates().filter(cts -> cts.getHandle().makeTicket() - .getTicket_asB64().equals(ticket.getTicket_asB64())).first(); + ClientTableState state = allStates() + .filter(cts -> cts.getHandle().makeTicket().getTicket_asB64().equals(ticket.getTicket_asB64())) + .first(); // state.getHandle().setState(TableTicket.State.EXPORTED); for (JsTable table : allInterestedTables().filter(t -> t.state() == state)) { // check what state it was in previously to use for firing an event @@ -295,13 +282,10 @@ public Promise sendRequest() { // fire any events that are necessary boolean sortChanged = !lastVisibleState.getSorts().equals(state.getSorts()); - boolean filterChanged = - !lastVisibleState.getFilters().equals(state.getFilters()); - boolean customColumnChanged = - !lastVisibleState.getCustomColumns().equals(state.getCustomColumns()); + boolean filterChanged = !lastVisibleState.getFilters().equals(state.getFilters()); + boolean customColumnChanged = !lastVisibleState.getCustomColumns().equals(state.getCustomColumns()); table.fireEvent(HasEventHandling.EVENT_REQUEST_SUCCEEDED); - // TODO think more about the order of events, and what kinds of things one might - // bind to each + // TODO think more about the order of events, and what kinds of things one might bind to each if (sortChanged) { table.fireEvent(JsTable.EVENT_SORTCHANGED); } @@ -337,14 +321,14 @@ private void failTable(JsTable t, String failureMessage) { } event.setDetail(JsPropertyMap.of( - "errorMessage", failureMessage, - "configuration", best.toJs())); + "errorMessage", failureMessage, + "configuration", best.toJs())); try { t.rollback(); } catch (Exception e) { JsLog.warn( - "An exception occurred trying to rollback the table. This means that there will be no ticking data until the table configuration is applied again in a way that makes sense. See IDS-5199 for more detail.", - e); + "An exception occurred trying to rollback the table. This means that there will be no ticking data until the table configuration is applied again in a way that makes sense. See IDS-5199 for more detail.", + e); } t.fireEvent(HasEventHandling.EVENT_REQUEST_FAILED, event); } @@ -424,8 +408,7 @@ public void setConfig(TableConfig other) { customColumns(other.getCustomColumns()); sort(other.getSorts()); filter(other.getFilters()); - // selectDistinct ignored as it's not really treated as a standard table operation right - // now. + // selectDistinct ignored as it's not really treated as a standard table operation right now. setFlat(other.isFlat()); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/batch/TableConfig.java b/web/client-api/src/main/java/io/deephaven/web/client/api/batch/TableConfig.java index cf61169f593..f2c927e48c4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/batch/TableConfig.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/batch/TableConfig.java @@ -11,11 +11,10 @@ import java.util.stream.Collectors; /** - * This class represents a container for all the various operations you might apply to a given table - * (sort, filter, custom columns, select distinct, soon others). + * This class represents a container for all the various operations you might apply to a given table (sort, filter, + * custom columns, select distinct, soon others). * - * Using this container allows us to add an optional new property without updating many methods - * signatures. + * Using this container allows us to add an optional new property without updating many methods signatures. */ public class TableConfig { @@ -43,12 +42,12 @@ public static class JsConfig { public TableConfig() {} public TableConfig( - List sorts, - List conditions, - List filters, - List customColumns, - List dropColumns, - List viewColumns) { + List sorts, + List conditions, + List filters, + List customColumns, + List dropColumns, + List viewColumns) { this.sorts.addAll(sorts); this.conditions.addAll(conditions); this.filters.addAll(filters); @@ -143,20 +142,19 @@ public int hashCode() { @Override public String toString() { return "TableConfig{" + - "sorts=" + sorts + - ", filters=" + filters + - ", customColumns=" + customColumns + - ", selectDistinct=" + selectDistinct + - ", conditions=" + conditions + - ", dropColumns=" + dropColumns + - ", viewColumns=" + viewColumns + - ", isFlat=" + isFlat + - '}'; + "sorts=" + sorts + + ", filters=" + filters + + ", customColumns=" + customColumns + + ", selectDistinct=" + selectDistinct + + ", conditions=" + conditions + + ", dropColumns=" + dropColumns + + ", viewColumns=" + viewColumns + + ", isFlat=" + isFlat + + '}'; } public boolean isEmpty() { - return sorts.isEmpty() && conditions.isEmpty() && filters.isEmpty() - && customColumns.isEmpty(); + return sorts.isEmpty() && conditions.isEmpty() && filters.isEmpty() && customColumns.isEmpty(); } protected void setSorts(List sorts) { @@ -190,8 +188,7 @@ protected void setCustomColumns(List customColumns) { } public boolean isEmptyConfig() { - return sorts.isEmpty() && filters.isEmpty() && customColumns.isEmpty() - && selectDistinct.isEmpty(); + return sorts.isEmpty() && filters.isEmpty() && customColumns.isEmpty() && selectDistinct.isEmpty(); } public JsConfig toJs() { @@ -202,8 +199,8 @@ public JsConfig toJs() { config.viewColumns = viewColumns.toArray(new String[viewColumns.size()]); config.filters = filters.toArray(new FilterCondition[filters.size()]); config.customColumns = customColumns.stream() - .map(CustomColumnDescriptor::getExpression) - .toArray(String[]::new); + .map(CustomColumnDescriptor::getExpression) + .toArray(String[]::new); config.isFlat = isFlat; return config; } @@ -212,44 +209,44 @@ public String toSummaryString() { StringBuilder result = new StringBuilder(); if (!customColumns.isEmpty()) { result.append("customColumns: ") - .append(customColumns.stream() - .map(CustomColumnDescriptor::getExpression) - .collect(Collectors.joining(","))) - .append("\n"); + .append(customColumns.stream() + .map(CustomColumnDescriptor::getExpression) + .collect(Collectors.joining(","))) + .append("\n"); } if (!filters.isEmpty()) { result.append("filters: ") - .append(filters.stream() - .map(FilterCondition::toString) - .collect(Collectors.joining(","))) - .append("\n"); + .append(filters.stream() + .map(FilterCondition::toString) + .collect(Collectors.joining(","))) + .append("\n"); } if (!sorts.isEmpty()) { result.append("sorts: ") - .append(sorts.stream() - .map(s -> s.getColumn().getName() + " " + s.getDirection()) - .collect(Collectors.joining(","))) - .append("\n"); + .append(sorts.stream() + .map(s -> s.getColumn().getName() + " " + s.getDirection()) + .collect(Collectors.joining(","))) + .append("\n"); } if (!dropColumns.isEmpty()) { result.append("dropColumns: ") - .append(dropColumns.stream().collect(Collectors.joining(","))) - .append("\n"); + .append(dropColumns.stream().collect(Collectors.joining(","))) + .append("\n"); } if (!viewColumns.isEmpty()) { result.append("viewColumns: ") - .append(viewColumns.stream().collect(Collectors.joining(","))) - .append("\n"); + .append(viewColumns.stream().collect(Collectors.joining(","))) + .append("\n"); } if (!conditions.isEmpty()) { result.append("conditions: ") - .append(conditions.stream().collect(Collectors.joining(","))) - .append("\n"); + .append(conditions.stream().collect(Collectors.joining(","))) + .append("\n"); } if (isFlat) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/console/JsVariableChanges.java b/web/client-api/src/main/java/io/deephaven/web/client/api/console/JsVariableChanges.java index 777b2bf9069..55e82ccc94b 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/console/JsVariableChanges.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/console/JsVariableChanges.java @@ -11,20 +11,19 @@ public class JsVariableChanges { @JsProperty(namespace = "dh.VariableType") public static final String TABLE = "Table", - TREETABLE = "TreeTable", - TABLEMAP = "TableMap", - FIGURE = "Figure", - OTHERWIDGET = "OtherWidget", - PANDAS = "Pandas"; + TREETABLE = "TreeTable", + TABLEMAP = "TableMap", + FIGURE = "Figure", + OTHERWIDGET = "OtherWidget", + PANDAS = "Pandas"; private JsVariableDefinition[] created; private JsVariableDefinition[] updated; private JsVariableDefinition[] removed; private static JsVariableDefinition[] convertDefinitions(VariableDefinition[] definitions) { - return Arrays.stream(definitions) - .map(def -> new JsVariableDefinition(def.getName(), def.getType())) - .toArray(JsVariableDefinition[]::new); + return Arrays.stream(definitions).map(def -> new JsVariableDefinition(def.getName(), def.getType())) + .toArray(JsVariableDefinition[]::new); } public JsVariableChanges(VariableChanges changes) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/csv/CsvTypeParser.java b/web/client-api/src/main/java/io/deephaven/web/client/api/csv/CsvTypeParser.java index 200b4c80af8..10ee8b3d3ef 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/csv/CsvTypeParser.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/csv/CsvTypeParser.java @@ -33,14 +33,12 @@ public double writeType(Builder builder) { } @Override - public void writeColumn(String[] strings, JsConsumer addNode, - JsConsumer addBuffer) { + public void writeColumn(String[] strings, JsConsumer addNode, JsConsumer addBuffer) { int nullCount = 0; BitSet nulls = new BitSet(strings.length); Int32Array positions = ArrowType.makeBuffer(strings.length + 1, 4, Int32Array::new); // work out the total length we'll need for the payload, plus padding - int payloadLength = - Arrays.stream(strings).filter(Objects::nonNull).mapToInt(String::length).sum(); + int payloadLength = Arrays.stream(strings).filter(Objects::nonNull).mapToInt(String::length).sum(); Uint8Array payload = makeBuffer(payloadLength); int lastOffset = 0; @@ -68,8 +66,7 @@ public double writeType(Builder builder) { } @Override - public void writeColumn(String[] strings, JsConsumer addNode, - JsConsumer addBuffer) { + public void writeColumn(String[] strings, JsConsumer addNode, JsConsumer addBuffer) { // TODO #1041 this type is expected to work throw new IllegalArgumentException("Can't serialize DateTime for CSV"); } @@ -81,19 +78,17 @@ public double writeType(Builder builder) { } @Override - public void writeColumn(String[] strings, JsConsumer addNode, - JsConsumer addBuffer) { + public void writeColumn(String[] strings, JsConsumer addNode, JsConsumer addBuffer) { int nullCount = 0; BitSet nulls = new BitSet(strings.length); - Int32Array payload = ArrowType.makeBuffer(strings.length, - Int32Array.BYTES_PER_ELEMENT, Int32Array::new); + Int32Array payload = + ArrowType.makeBuffer(strings.length, Int32Array.BYTES_PER_ELEMENT, Int32Array::new); for (int i = 0; i < strings.length; i++) { if (strings[i] == null || strings[i].trim().isEmpty()) { payload.setAt(i, (double) QueryConstants.NULL_INT); nullCount++; } else { - payload.setAt(i, - (double) Integer.parseInt(strings[i].trim().replaceAll(",", ""))); + payload.setAt(i, (double) Integer.parseInt(strings[i].trim().replaceAll(",", ""))); nulls.set(i); } } @@ -118,14 +113,11 @@ public double writeType(Builder builder) { } @Override - public void writeColumn(String[] strings, JsConsumer addNode, - JsConsumer addBuffer) { + public void writeColumn(String[] strings, JsConsumer addNode, JsConsumer addBuffer) { int nullCount = 0; BitSet nulls = new BitSet(strings.length); - Float64Array payload = new Float64Array(strings.length);// using float because we - // can convert longs to - // doubles, though not - // cheaply + Float64Array payload = new Float64Array(strings.length);// using float because we can convert longs to + // doubles, though not cheaply for (int i = 0; i < strings.length; i++) { long value; if (strings[i] == null || strings[i].trim().isEmpty()) { @@ -170,8 +162,7 @@ public double writeType(Builder builder) { } @Override - public void writeColumn(String[] strings, JsConsumer addNode, - JsConsumer addBuffer) { + public void writeColumn(String[] strings, JsConsumer addNode, JsConsumer addBuffer) { int nullCount = 0; BitSet nulls = new BitSet(strings.length); Float64Array payload = new Float64Array(strings.length);// 64 bits, already aligned @@ -226,8 +217,7 @@ public double writeType(Builder builder) { } @Override - public void writeColumn(String[] strings, JsConsumer addNode, - JsConsumer addBuffer) { + public void writeColumn(String[] strings, JsConsumer addNode, JsConsumer addBuffer) { // TODO #1041 this type is expected to work throw new IllegalArgumentException("Can't serialize DateTime for CSV"); } @@ -252,7 +242,7 @@ private static Uint8Array makeValidityBuffer(int nullCount, BitSet nulls) { } private static T makeBuffer(int elementCount, double bytesPerElement, - Function constructor) { + Function constructor) { return constructor.apply(makeBuffer(elementCount * (int) bytesPerElement).buffer); } @@ -291,10 +281,8 @@ public int typeType() { public abstract double writeType(Builder builder); @Override - public void writeColumn(String[] strings, JsConsumer addNode, - JsConsumer addBuffer) { - throw new IllegalArgumentException( - "Type " + this + " not yet supported for CSV upload"); + public void writeColumn(String[] strings, JsConsumer addNode, JsConsumer addBuffer) { + throw new IllegalArgumentException("Type " + this + " not yet supported for CSV upload"); } } @@ -339,8 +327,7 @@ public interface CsvColumn { double writeType(Builder builder); - void writeColumn(String[] strings, JsConsumer addNode, - JsConsumer addBuffer); + void writeColumn(String[] strings, JsConsumer addNode, JsConsumer addBuffer); } public static CsvColumn getColumn(String columnType) { @@ -418,10 +405,9 @@ private static long parseDateTime(String str, String userTimeZone) { } final String pattern = getSubsecondPattern(dateTimeString); final String tzString = timeZoneString == null ? userTimeZone : timeZoneString; - final com.google.gwt.i18n.client.TimeZone timeZone = - JsTimeZone.getTimeZone(tzString).unwrap(); + final com.google.gwt.i18n.client.TimeZone timeZone = JsTimeZone.getTimeZone(tzString).unwrap(); return JsDateTimeFormat.getFormat(pattern).parseWithTimezoneAsLong(dateTimeString, timeZone, - JsTimeZone.needsDstAdjustment(timeZoneString)); + JsTimeZone.needsDstAdjustment(timeZoneString)); } // Updates the pattern for the correct number of subsecond digits 'S' @@ -467,10 +453,10 @@ private static LocalTime parseLocalTime(String s) { final String[] tokens = s.split(":"); if (tokens.length == 2) { // hh:mm return new LocalTime(Byte.parseByte(tokens[0]), Byte.parseByte(tokens[1]), (byte) 0, - (int) (dayNanos + subsecondNanos)); + (int) (dayNanos + subsecondNanos)); } else if (tokens.length == 3) { // hh:mm:ss - return new LocalTime(Byte.parseByte(tokens[0]), Byte.parseByte(tokens[1]), - Byte.parseByte(tokens[2]), (int) (dayNanos + subsecondNanos)); + return new LocalTime(Byte.parseByte(tokens[0]), Byte.parseByte(tokens[1]), Byte.parseByte(tokens[2]), + (int) (dayNanos + subsecondNanos)); } return null; @@ -486,8 +472,7 @@ private static long parseNanos(final String input) { } else { digit = Character.digit(input.charAt(i), 10); if (digit < 0) { - throw new NumberFormatException( - "Invalid character for nanoseconds conversion: " + input.charAt(i)); + throw new NumberFormatException("Invalid character for nanoseconds conversion: " + input.charAt(i)); } } result += digit; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/filter/FilterCondition.java b/web/client-api/src/main/java/io/deephaven/web/client/api/filter/FilterCondition.java index da294eb270e..c2d12da4815 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/filter/FilterCondition.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/filter/FilterCondition.java @@ -31,8 +31,8 @@ public static FilterCondition search(FilterValue value, @JsOptional FilterValue[ SearchCondition search = new SearchCondition(); search.setSearchString(value.descriptor.getLiteral().getStringValue()); if (columns != null) { - search.setOptionalReferencesList(Arrays.stream(columns) - .map(v -> v.descriptor.getReference()).toArray(Reference[]::new)); + search.setOptionalReferencesList( + Arrays.stream(columns).map(v -> v.descriptor.getReference()).toArray(Reference[]::new)); } Condition c = new Condition(); @@ -58,16 +58,14 @@ public FilterCondition not() { @JsIgnore protected static FilterCondition createAndValidate(Condition descriptor) { - // TODO (deephaven-core#723) re-introduce client-side validation so that a client knows - // right away when + // TODO (deephaven-core#723) re-introduce client-side validation so that a client knows right away when // they build something invalid return new FilterCondition(descriptor); } public FilterCondition and(FilterCondition... filters) { AndCondition and = new AndCondition(); - and.setFiltersList( - Stream.concat(Stream.of(descriptor), Arrays.stream(filters).map(v -> v.descriptor)) + and.setFiltersList(Stream.concat(Stream.of(descriptor), Arrays.stream(filters).map(v -> v.descriptor)) .toArray(Condition[]::new)); Condition c = new Condition(); @@ -79,8 +77,7 @@ public FilterCondition and(FilterCondition... filters) { public FilterCondition or(FilterCondition... filters) { OrCondition or = new OrCondition(); - or.setFiltersList( - Stream.concat(Stream.of(descriptor), Arrays.stream(filters).map(v -> v.descriptor)) + or.setFiltersList(Stream.concat(Stream.of(descriptor), Arrays.stream(filters).map(v -> v.descriptor)) .toArray(Condition[]::new)); Condition c = new Condition(); @@ -97,8 +94,7 @@ public Condition makeDescriptor() { @JsMethod public String toString() { - // TODO (deephaven-core#723) implement a readable tostring rather than turning the pb object - // into a string + // TODO (deephaven-core#723) implement a readable tostring rather than turning the pb object into a string return descriptor.toString(); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/filter/FilterValue.java b/web/client-api/src/main/java/io/deephaven/web/client/api/filter/FilterValue.java index e6c15726fa9..f93d470bb87 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/filter/FilterValue.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/filter/FilterValue.java @@ -138,8 +138,7 @@ public FilterCondition lessThanOrEqualTo(FilterValue term) { } public FilterCondition in(FilterValue[] terms) { - return makeIn(terms, Table_pb.MatchType.getREGULAR(), - Table_pb.CaseSensitivity.getMATCH_CASE()); + return makeIn(terms, Table_pb.MatchType.getREGULAR(), Table_pb.CaseSensitivity.getMATCH_CASE()); } private FilterCondition makeIn(FilterValue[] terms, double matchType, double casesensitivity) { @@ -155,18 +154,15 @@ private FilterCondition makeIn(FilterValue[] terms, double matchType, double cas } public FilterCondition inIgnoreCase(FilterValue[] terms) { - return makeIn(terms, Table_pb.MatchType.getREGULAR(), - Table_pb.CaseSensitivity.getIGNORE_CASE()); + return makeIn(terms, Table_pb.MatchType.getREGULAR(), Table_pb.CaseSensitivity.getIGNORE_CASE()); } public FilterCondition notIn(FilterValue[] terms) { - return makeIn(terms, Table_pb.MatchType.getINVERTED(), - Table_pb.CaseSensitivity.getMATCH_CASE()); + return makeIn(terms, Table_pb.MatchType.getINVERTED(), Table_pb.CaseSensitivity.getMATCH_CASE()); } public FilterCondition notInIgnoreCase(FilterValue[] terms) { - return makeIn(terms, Table_pb.MatchType.getINVERTED(), - Table_pb.CaseSensitivity.getIGNORE_CASE()); + return makeIn(terms, Table_pb.MatchType.getINVERTED(), Table_pb.CaseSensitivity.getIGNORE_CASE()); } public FilterCondition contains(FilterValue term) { @@ -237,8 +233,7 @@ public FilterCondition invoke(String method, FilterValue... args) { @Override public String toString() { - // TODO (deephaven-core#723) implement a readable tostring rather than turning the pb object - // into a string + // TODO (deephaven-core#723) implement a readable tostring rather than turning the pb object into a string return descriptor.toString(); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsDateTimeFormat.java b/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsDateTimeFormat.java index 663b078da7f..9478a4c9792 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsDateTimeFormat.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsDateTimeFormat.java @@ -15,11 +15,11 @@ import java.util.*; /** - * Largely an exported wrapper for the GWT DateFormat, but also includes support for formatting - * nanoseconds as an additional 6 decimal places after the rest of the number. + * Largely an exported wrapper for the GWT DateFormat, but also includes support for formatting nanoseconds as an + * additional 6 decimal places after the rest of the number. * - * Other concerns that this handles includes accepting a js Date and ignoring the lack of nanos, - * accepting a js Number and assuming it to be a lossy nano value, and parsing into a js Date. + * Other concerns that this handles includes accepting a js Date and ignoring the lack of nanos, accepting a js Number + * and assuming it to be a lossy nano value, and parsing into a js Date. */ @JsType(namespace = "dh.i18n", name = "DateTimeFormat") public class JsDateTimeFormat { @@ -88,8 +88,7 @@ public JsDateTimeFormat(String pattern) { int subsecondOffset = pattern.indexOf('S'); this.wrappedStart = DateTimeFormat.getFormat(pattern.substring(0, subsecondOffset)); if (subsecondOffset + count < pattern.length()) { - this.wrappedEnd = - DateTimeFormat.getFormat(pattern.substring(subsecondOffset + count)); + this.wrappedEnd = DateTimeFormat.getFormat(pattern.substring(subsecondOffset + count)); } else { this.wrappedEnd = null; } @@ -111,8 +110,8 @@ public JsDateTimeFormat(String pattern) { // synthesize a gwt TimeZone with the correct offset data to get nice output in some tz // other than the browser's current or UTC+/-OFFSET public String format(Object date, @JsOptional JsTimeZone timeZone) { - long nanos = longFromDate(date).orElseThrow( - () -> new IllegalStateException("Can't format non-number, non-date value " + date)); + long nanos = longFromDate(date) + .orElseThrow(() -> new IllegalStateException("Can't format non-number, non-date value " + date)); return formatAsLongNanos(nanos, timeZone); } @@ -138,7 +137,7 @@ public static OptionalLong longFromDate(Object date) { // positives (less than a three hour window). if (Math.abs(jsNumber) < Math.pow(2, 43)) { JsLog.warn( - "Number passed as date looks suspiciously small, as though it might be millis since Jan 1, 1970, but will be interpreted as if it were nanoseconds since that date."); + "Number passed as date looks suspiciously small, as though it might be millis since Jan 1, 1970, but will be interpreted as if it were nanoseconds since that date."); } return OptionalLong.of((long) jsNumber); } else { @@ -180,7 +179,7 @@ public DateWrapper parse(String text, @JsOptional JsTimeZone tz) { Date curDate = new Date(); @SuppressWarnings("deprecation") Date date = new Date(curDate.getYear(), curDate.getMonth(), - curDate.getDate()); + curDate.getDate()); // pass the date to each formatter and let it parse its own part of the string int endOfStart = wrappedStart.parse(text, 0, date); @@ -226,15 +225,14 @@ public JsDate parseAsDate(String text) { } @JsIgnore - public long parseWithTimezoneAsLong(String dateTimeString, - com.google.gwt.i18n.client.TimeZone timeZone, boolean needsAdjustment) { + public long parseWithTimezoneAsLong(String dateTimeString, com.google.gwt.i18n.client.TimeZone timeZone, + boolean needsAdjustment) { final long nanos = parse(dateTimeString, null).getWrapped(); final int remainder = (int) (nanos % JsDateTimeFormat.NANOS_PER_MILLI); long millis = nanos / JsDateTimeFormat.NANOS_PER_MILLI; final Date date = new Date(millis); - final int diff = - (date.getTimezoneOffset() - timeZone.getStandardOffset()) * NUM_MILLISECONDS_IN_MINUTE; + final int diff = (date.getTimezoneOffset() - timeZone.getStandardOffset()) * NUM_MILLISECONDS_IN_MINUTE; // Adjust time for timezone offset difference millis -= diff; @@ -261,14 +259,12 @@ public long parseWithTimezoneAsLong(String dateTimeString, final int transitionMinutes = transitionPoint * 60; // This is the Spring DST transition Check - if (timeInMinutes > transitionMinutes - && timeInMinutes < transitionMinutes + adjustment) { + if (timeInMinutes > transitionMinutes && timeInMinutes < transitionMinutes + adjustment) { // The format call is expensive, so we check the transition plus adjustment first final String formatAfterAdjustment = - format(LongWrapper.of(millis * NANOS_PER_MILLI), new JsTimeZone(timeZone)); + format(LongWrapper.of(millis * NANOS_PER_MILLI), new JsTimeZone(timeZone)); if (!formatAfterAdjustment.equals(dateTimeString)) { - throw new IllegalArgumentException( - dateTimeString + " occurs during a DST transition" + + throw new IllegalArgumentException(dateTimeString + " occurs during a DST transition" + " timeInMinutes = " + timeInMinutes + " transitionMinutes = " + transitionMinutes + " adjustment = " + adjustment); @@ -277,15 +273,12 @@ public long parseWithTimezoneAsLong(String dateTimeString, if (index < transitionPoints.length - 1) { final int nextTransitionMinutes = transitionPoints[index + 1] * 60; final int nextAdjustment = adjustments[index + 1]; - if (timeInMinutes > nextTransitionMinutes - && timeInMinutes < nextTransitionMinutes + nextAdjustment) { - // The format call is expensive, so we check the transition plus adjustment - // first + if (timeInMinutes > nextTransitionMinutes && timeInMinutes < nextTransitionMinutes + nextAdjustment) { + // The format call is expensive, so we check the transition plus adjustment first final String formatAfterAdjustment = - format(LongWrapper.of(millis * NANOS_PER_MILLI), new JsTimeZone(timeZone)); + format(LongWrapper.of(millis * NANOS_PER_MILLI), new JsTimeZone(timeZone)); if (!formatAfterAdjustment.equals(dateTimeString)) { - throw new IllegalArgumentException( - dateTimeString + " occurs during a DST transition" + + throw new IllegalArgumentException(dateTimeString + " occurs during a DST transition" + " timeInMinutes = " + timeInMinutes + " nextTransitionMinutes = " + nextTransitionMinutes + " nextAdjustment = " + nextAdjustment); @@ -296,10 +289,8 @@ public long parseWithTimezoneAsLong(String dateTimeString, // This is the Fall DST transition check if (adjustment == 0 && index > 0) { final int prevAdjustment = adjustments[index - 1]; - if (timeInMinutes > transitionMinutes - && timeInMinutes < transitionMinutes + prevAdjustment) { - throw new IllegalArgumentException( - dateTimeString + " occurs during a DST transition" + + if (timeInMinutes > transitionMinutes && timeInMinutes < transitionMinutes + prevAdjustment) { + throw new IllegalArgumentException(dateTimeString + " occurs during a DST transition" + " timeInMinutes = " + timeInMinutes + " transitionMinutes = " + transitionMinutes + " prevAdjustment = " + prevAdjustment); @@ -309,10 +300,8 @@ public long parseWithTimezoneAsLong(String dateTimeString, final int nextAdjustment = adjustments[index + 1]; if (nextAdjustment == 0) { final int nextTransitionMinutes = transitionPoints[index + 1] * 60; - if (timeInMinutes < nextTransitionMinutes - && timeInMinutes > nextTransitionMinutes - adjustment) { - throw new IllegalArgumentException( - dateTimeString + " occurs during a DST transition" + + if (timeInMinutes < nextTransitionMinutes && timeInMinutes > nextTransitionMinutes - adjustment) { + throw new IllegalArgumentException(dateTimeString + " occurs during a DST transition" + " timeInMinutes = " + timeInMinutes + " nextTransitionMinutes = " + nextTransitionMinutes + " adjustment = " + adjustment); @@ -335,10 +324,10 @@ private static native int[] getAdjustments(TimeZone tz) /*-{ @Override public String toString() { return "DateTimeFormat { " + - "pattern='" + pattern + '\'' + - ", wrappedStart=" + wrappedStart + - ", wrappedEnd=" + wrappedEnd + - ", nanoCount=" + nanoCount + - " }"; + "pattern='" + pattern + '\'' + + ", wrappedStart=" + wrappedStart + + ", wrappedEnd=" + wrappedEnd + + ", nanoCount=" + nanoCount + + " }"; } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsNumberFormat.java b/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsNumberFormat.java index edee8ef5ed9..42d0a865637 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsNumberFormat.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsNumberFormat.java @@ -49,8 +49,7 @@ public String format(Object number) { } else if (number instanceof LongWrapper) { return wrapped.format((Long) ((LongWrapper) number).getWrapped()); } - throw new IllegalStateException( - "Can't format non-number object of type " + Js.typeof(number)); + throw new IllegalStateException("Can't format non-number object of type " + Js.typeof(number)); } @Override diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsTimeZone.java b/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsTimeZone.java index 8bd36d3b69c..6dd7e39ff36 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsTimeZone.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/i18n/JsTimeZone.java @@ -11,8 +11,7 @@ public class JsTimeZone { public static JsTimeZone getTimeZone(String tzCode) { - if (tzCode.equals("UTC") || tzCode.equals("GMT") || tzCode.equals("Etc/GMT") - || tzCode.equals("Z")) { + if (tzCode.equals("UTC") || tzCode.equals("GMT") || tzCode.equals("Etc/GMT") || tzCode.equals("Z")) { return new JsTimeZone(TimeZone.createTimeZone(0)); } return new JsTimeZone(TimeZone.createTimeZone(getJsonForCode(tzCode))); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/input/ColumnValueDehydrater.java b/web/client-api/src/main/java/io/deephaven/web/client/api/input/ColumnValueDehydrater.java index 26c4c07bbe9..232fc2d65c5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/input/ColumnValueDehydrater.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/input/ColumnValueDehydrater.java @@ -12,8 +12,7 @@ import static io.deephaven.web.shared.data.ColumnValue.*; /** - * A tool for turning javascript objects (and all their "glory") into usable {@link ColumnValue} - * objects. + * A tool for turning javascript objects (and all their "glory") into usable {@link ColumnValue} objects. * * This is only usable within client code. */ @@ -34,8 +33,7 @@ private static String serialize(String type, Object value, boolean arrayComponen } if (type.contains("[]")) { if (!JsArray.isArray(value)) { - throw new UnsupportedOperationException( - "Expected array type " + type + " but got " + value); + throw new UnsupportedOperationException("Expected array type " + type + " but got " + value); } final String componentType = type.replace("[]", ""); StringBuilder result = new StringBuilder(); @@ -91,8 +89,7 @@ private static String serialize(String type, Object value, boolean arrayComponen case "java.lang.Integer": // perhaps add (optional) validation that users are actually sending ints. // like `web.typesafe=true` to turn on paranoia checks. - return val == Integer.MIN_VALUE ? nullSentinel() - : Integer.toString((int) val); + return val == Integer.MIN_VALUE ? nullSentinel() : Integer.toString((int) val); case "double": case "java.lang.Double": case "java.math.BigDecimal": @@ -103,8 +100,7 @@ private static String serialize(String type, Object value, boolean arrayComponen case "io.deephaven.db.tables.utils.DBDateTime": // TODO: check if Long.MIN_VALUE actually works as expected from js; // in theory, the cast here will make the rounding, if any, equivalent - return val == (double) Long.MIN_VALUE ? nullSentinel() - : Long.toString((long) val); + return val == (double) Long.MIN_VALUE ? nullSentinel() : Long.toString((long) val); case "byte": case "java.lang.Byte": return val == Byte.MIN_VALUE ? nullSentinel() : Byte.toString((byte) val); @@ -115,16 +111,13 @@ private static String serialize(String type, Object value, boolean arrayComponen return Byte.toString((byte) val); case "char": case "java.lang.Character": - return val == Character.MAX_VALUE - 1 ? nullSentinel() - : String.valueOf(value).substring(0, 1); + return val == Character.MAX_VALUE - 1 ? nullSentinel() : String.valueOf(value).substring(0, 1); case "short": case "java.lang.Short": - return val == Short.MIN_VALUE ? nullSentinel() - : Short.toString((short) val); + return val == Short.MIN_VALUE ? nullSentinel() : Short.toString((short) val); case "float": case "java.lang.Float": - return val == Float.MIN_VALUE ? nullSentinel() - : Float.toString((float) val); + return val == Float.MIN_VALUE ? nullSentinel() : Float.toString((float) val); default: throw unsupported(value); } @@ -136,7 +129,7 @@ private static String serialize(String type, Object value, boolean arrayComponen switch (type) { case "java.lang.String": return arrayComponent ? v : // arrays are escaped in the forEach loop above - v.replaceAll(nullSentinel(), ESCAPER + nullSentinel()); + v.replaceAll(nullSentinel(), ESCAPER + nullSentinel()); case "io.deephaven.db.tables.utils.DBDateTime": // TODO: check if datetime string to parse into a long timestamp. // otherwise, we expect long ints for DBDateTime (for now) @@ -150,8 +143,7 @@ private static String serialize(String type, Object value, boolean arrayComponen case "java.lang.Short": case "java.math.BigInteger": // new BigInteger() should validate all of the above - assert !new BigInteger(v).toString().isEmpty(); // something we don't want - // to pay for in prod + assert !new BigInteger(v).toString().isEmpty(); // something we don't want to pay for in prod return v.split("[.]")[0]; // be forgiving to js... case "double": case "java.lang.Double": @@ -159,8 +151,7 @@ private static String serialize(String type, Object value, boolean arrayComponen case "java.lang.Float": case "java.math.BigDecimal": // new BigDecimal() should validate all of the above - assert !new BigDecimal(v).toString().isEmpty(); // something we don't want - // to pay for in prod + assert !new BigDecimal(v).toString().isEmpty(); // something we don't want to pay for in prod return v; case "boolean": case "java.lang.Boolean": @@ -180,7 +171,7 @@ private static String serialize(String type, Object value, boolean arrayComponen return v.substring(0, 1); } assert !(arrayComponent && v.contains(escaper())) - : "\\1 is a control character that should not be sent in unhandled array types."; + : "\\1 is a control character that should not be sent in unhandled array types."; return v; // good luck } case "object": { @@ -236,12 +227,11 @@ private static Function findFunction(Object value, String prefix) { } private static UnsupportedOperationException unsupported(Object value) { - return new UnsupportedOperationException( - "Cannot handle " + Js.typeof(value) + " : " + value); + return new UnsupportedOperationException("Cannot handle " + Js.typeof(value) + " : " + value); } private static IllegalArgumentException illegal(String type, Object value) { return new IllegalArgumentException( - "Cannot handle " + Js.typeof(value) + " value " + value + " for " + type + " column"); + "Cannot handle " + Js.typeof(value) + " value " + value + " for " + type + " column"); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/input/JsInputTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/input/JsInputTable.java index 2c81d6dcbd6..613d1ed097e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/input/JsInputTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/input/JsInputTable.java @@ -73,8 +73,7 @@ public Promise addRows(JsPropertyMap[] rows) { // assert that all keys are filled in... for (String key : keys) { if (!row.has(key)) { - throw new IllegalStateException( - "Missing key " + key + " in " + Global.JSON.stringify(row)); + throw new IllegalStateException("Missing key " + key + " in " + Global.JSON.stringify(row)); } } @@ -100,8 +99,8 @@ public Promise addTable(JsTable tableToAdd) { public Promise addTables(JsTable[] tablesToAdd) { return Callbacks.promise(this.table, c -> { - // table.getServer().addTablesToInputTable(table.getHeadHandle(), - // Arrays.stream(tablesToAdd).map(t -> t.getHandle()).toArray(TableHandle[]::new), c); + // table.getServer().addTablesToInputTable(table.getHeadHandle(), Arrays.stream(tablesToAdd).map(t -> + // t.getHandle()).toArray(TableHandle[]::new), c); throw new UnsupportedOperationException("addTablesToInputTable"); }).then(response -> Promise.resolve(this)); } @@ -123,8 +122,7 @@ public Promise deleteTables(JsTable[] tablesToDelete) { final ClientTableState cts = connection.newState(tableToDelete.state(), op); cleanups.add(cts.retain(this)); - // final HandleMapping mapping = new HandleMapping(tableToDelete.getHandle(), - // cts.getHandle()); + // final HandleMapping mapping = new HandleMapping(tableToDelete.getHandle(), cts.getHandle()); // op.fromState(cts); // op.setAppendTo(cts.getPrevious()); // op.setHandles(mapping); @@ -142,12 +140,11 @@ public Promise deleteTables(JsTable[] tablesToDelete) { throw new UnsupportedOperationException("batch"); }).then(response -> { if (response.getFailureMessages().length > 0) { - return (Promise) Promise.reject( - "Unable to delete tables: " + Arrays.toString(response.getFailureMessages())); + return (Promise) Promise + .reject("Unable to delete tables: " + Arrays.toString(response.getFailureMessages())); } return Callbacks.promise(this.table, c -> { - // table.getServer().deleteTablesFromInputTable(table.getHeadHandle(), - // response.getSuccess(), c); + // table.getServer().deleteTablesFromInputTable(table.getHeadHandle(), response.getSuccess(), c); throw new UnsupportedOperationException("deleteTablesFromInputTable"); }); }).then(success -> { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/lifecycle/HasLifecycle.java b/web/client-api/src/main/java/io/deephaven/web/client/api/lifecycle/HasLifecycle.java index a11b01859b6..55a38db1a99 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/lifecycle/HasLifecycle.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/lifecycle/HasLifecycle.java @@ -9,8 +9,8 @@ import static io.deephaven.web.client.api.JsTable.EVENT_RECONNECTFAILED; /** - * An abstraction over the lifecycle methods used to reconnect JsTable, so we can have non-JsTable - * bound states get the same revivification treatment. + * An abstraction over the lifecycle methods used to reconnect JsTable, so we can have non-JsTable bound states get the + * same revivification treatment. */ public interface HasLifecycle { @@ -24,8 +24,8 @@ public interface HasLifecycle { void revive(ClientTableState state); /** - * You should probably just call notifyDeath(this, failure), assuming you implement in an object - * that HasEventHandlers. + * You should probably just call notifyDeath(this, failure), assuming you implement in an object that + * HasEventHandlers. */ void die(Object failure); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/state/StateCache.java b/web/client-api/src/main/java/io/deephaven/web/client/api/state/StateCache.java index be69610226b..432a9cd49fa 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/state/StateCache.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/state/StateCache.java @@ -13,8 +13,8 @@ /** * A container for all known table states within the application. * - * You should only remove entries from this cache when all JsTable who might reference a given state - * have abandoned said state. + * You should only remove entries from this cache when all JsTable who might reference a given state have abandoned said + * state. * */ public class StateCache { @@ -37,8 +37,7 @@ public ClientTableState getNullable(TableTicket handle) { return allStates.get(handle); } - public ClientTableState create(TableTicket handle, - Function factory) { + public ClientTableState create(TableTicket handle, Function factory) { if (handle.getState() != TableTicket.State.PENDING) { throw new IllegalStateException("Should be pending " + handle); } @@ -51,7 +50,7 @@ public ClientTableState create(TableTicket handle, public void release(ClientTableState state) { final ClientTableState was = allStates.remove(state.getHandle()); assert was == null || was == state - : "Released a state with the same handle but a different instance than expected"; + : "Released a state with the same handle but a different instance than expected"; } public Collection getAllStates() { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java index e40fa4c2f9f..a60e9ae0dc7 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/SubscriptionTableData.java @@ -42,18 +42,15 @@ private interface ArrayCopy { // array of data columns, cast each to a jsarray to read rows private Object[] data; - public SubscriptionTableData(JsArray columns, Integer rowStyleColumn, - HasEventHandling evented) { + public SubscriptionTableData(JsArray columns, Integer rowStyleColumn, HasEventHandling evented) { this.columns = columns; this.rowStyleColumn = rowStyleColumn; this.evented = evented; } - // TODO support this being called multiple times so we can keep viewports going without clearing - // the data + // TODO support this being called multiple times so we can keep viewports going without clearing the data public TableData handleSnapshot(TableSnapshot snapshot) { - // when changing snapshots we should actually rewrite the columns, possibly emulate - // ViewportData more? + // when changing snapshots we should actually rewrite the columns, possibly emulate ViewportData more? ColumnData[] dataColumns = snapshot.getDataColumns(); data = new Object[dataColumns.length]; reusableDestinations = RangeSet.empty(); @@ -93,9 +90,9 @@ public TableData handleSnapshot(TableSnapshot snapshot) { } /** - * Helper to avoid appending many times when modifying indexes. The append() method should be - * called for each key _in order_ to ensure that RangeSet.addRange isn't called excessively. - * When no more items will be added, flush() must be called. + * Helper to avoid appending many times when modifying indexes. The append() method should be called for each key + * _in order_ to ensure that RangeSet.addRange isn't called excessively. When no more items will be added, flush() + * must be called. */ private static class RangeSetAppendHelper { private final RangeSet rangeSet; @@ -125,8 +122,7 @@ public void append(long key) { // key appends to our current range currentFirst = key; } else { - // existing range doesn't match the new item, finish the old range and start a new - // one + // existing range doesn't match the new item, finish the old range and start a new one rangeSet.addRange(new Range(currentFirst, currentLast)); currentFirst = key; currentLast = key; @@ -142,14 +138,12 @@ public void flush() { } public TableData handleDelta(DeltaUpdates delta) { - // delete old data, track slots freed up. we do this by row since they might be - // non-contiguous or out of order + // delete old data, track slots freed up. we do this by row since they might be non-contiguous or out of order RangeSetAppendHelper reusableHelper = new RangeSetAppendHelper(reusableDestinations); delta.getRemoved().indexIterator().forEachRemaining((long index) -> { long dest = redirectedIndexes.remove(index); reusableHelper.append(dest); - // TODO consider trimming the columns down too, and truncating the reusable slots at the - // end + // TODO consider trimming the columns down too, and truncating the reusable slots at the end }); reusableHelper.flush(); // clean up index by ranges, not by row @@ -168,8 +162,7 @@ public TableData handleDelta(DeltaUpdates delta) { } index.removeRange(shiftedRange.getRange()); final NavigableSet toMove = redirectedIndexes.navigableKeySet() - .subSet(shiftedRange.getRange().getFirst(), true, shiftedRange.getRange().getLast(), - true); + .subSet(shiftedRange.getRange().getFirst(), true, shiftedRange.getRange().getLast(), true); // iterate backward and move them forward for (Long key : toMove.descendingSet()) { long shiftedKey = key + offset; @@ -187,13 +180,11 @@ public TableData handleDelta(DeltaUpdates delta) { } index.removeRange(shiftedRange.getRange()); final NavigableSet toMove = redirectedIndexes.navigableKeySet() - .subSet(shiftedRange.getRange().getFirst(), true, - shiftedRange.getRange().getLast(), true); + .subSet(shiftedRange.getRange().getFirst(), true, shiftedRange.getRange().getLast(), true); // iterate forward and move them backward for (Long key : toMove) { long shiftedKey = key + offset; - Long oldValue = - redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); + Long oldValue = redirectedIndexes.put(shiftedKey, redirectedIndexes.remove(key)); assert oldValue == null : shiftedKey + " already has a value, " + oldValue; shifter.append(shiftedKey); } @@ -201,16 +192,14 @@ public TableData handleDelta(DeltaUpdates delta) { } shifter.flush(); - // Find space for the rows we're about to add. We must not adjust the index until this is - // done, it is used + // Find space for the rows we're about to add. We must not adjust the index until this is done, it is used // to see where the end of the data is RangeSet addedDestination = freeRows(delta.getAdded().size()); // Within each column, append additions DeltaUpdates.ColumnAdditions[] additions = delta.getSerializedAdditions(); for (int i = 0; i < additions.length; i++) { DeltaUpdates.ColumnAdditions addedColumn = delta.getSerializedAdditions()[i]; - Column column = - columns.find((c, i1, i2) -> c.getIndex() == addedColumn.getColumnIndex()); + Column column = columns.find((c, i1, i2) -> c.getIndex() == addedColumn.getColumnIndex()); ArrayCopy arrayCopy = arrayCopyFuncForColumn(column); @@ -224,8 +213,7 @@ public TableData handleDelta(DeltaUpdates delta) { long dest = destIter.nextLong(); Long old = redirectedIndexes.put(origIndex, dest); assert old == null || old == dest; - arrayCopy.copyTo(data[addedColumn.getColumnIndex()], dest, - addedColumn.getValues().getData(), j++); + arrayCopy.copyTo(data[addedColumn.getColumnIndex()], dest, addedColumn.getValues().getData(), j++); } } @@ -241,27 +229,23 @@ public TableData handleDelta(DeltaUpdates delta) { continue; } - modifiedColumn.getRowsIncluded().rangeIterator() - .forEachRemaining(allModified::addRange); - Column column = - columns.find((c, i1, i2) -> c.getIndex() == modifiedColumn.getColumnIndex()); + modifiedColumn.getRowsIncluded().rangeIterator().forEachRemaining(allModified::addRange); + Column column = columns.find((c, i1, i2) -> c.getIndex() == modifiedColumn.getColumnIndex()); ArrayCopy arrayCopy = arrayCopyFuncForColumn(column); - PrimitiveIterator.OfLong modifiedIndexes = - modifiedColumn.getRowsIncluded().indexIterator(); + PrimitiveIterator.OfLong modifiedIndexes = modifiedColumn.getRowsIncluded().indexIterator(); int j = 0; while (modifiedIndexes.hasNext()) { long origIndex = modifiedIndexes.nextLong(); - arrayCopy.copyTo(data[modifiedColumn.getColumnIndex()], - redirectedIndexes.get(origIndex), modifiedColumn.getValues().getData(), j++); + arrayCopy.copyTo(data[modifiedColumn.getColumnIndex()], redirectedIndexes.get(origIndex), + modifiedColumn.getValues().getData(), j++); } } // Check that the index sizes make sense assert redirectedIndexes.size() == index.size(); - // Note that we can't do this assert, since we don't truncate arrays, we just leave nulls at - // the end + // Note that we can't do this assert, since we don't truncate arrays, we just leave nulls at the end // assert Js.asArrayLike(data[0]).getLength() == redirectedIndexes.size(); return notifyUpdates(delta.getAdded(), delta.getRemoved(), allModified); @@ -304,8 +288,7 @@ private ArrayCopy arrayCopyFuncForColumn(@Nullable Column column) { if (value == null) { Js.asArrayLike(destArray).setAt((int) destPos, null); } else { - Js.asArrayLike(destArray).setAt((int) destPos, - new BigDecimalWrapper(value)); + Js.asArrayLike(destArray).setAt((int) destPos, new BigDecimalWrapper(value)); } }; case "java.math.BigInteger": @@ -314,8 +297,7 @@ private ArrayCopy arrayCopyFuncForColumn(@Nullable Column column) { if (value == null) { Js.asArrayLike(destArray).setAt((int) destPos, null); } else { - Js.asArrayLike(destArray).setAt((int) destPos, - new BigIntegerWrapper(value)); + Js.asArrayLike(destArray).setAt((int) destPos, new BigIntegerWrapper(value)); } }; case "java.time.LocalDate": @@ -420,8 +402,7 @@ private RangeSet freeRows(long required) { RangeSet reused = RangeSet.empty(); long taken = 0; RangeSet stillUnused = RangeSet.empty(); - // TODO this could be more efficient, iterating entire ranges until we only need a - // partial range + // TODO this could be more efficient, iterating entire ranges until we only need a partial range PrimitiveIterator.OfLong iterator = reusableDestinations.indexIterator(); while (taken < required) { assert iterator.hasNext(); @@ -507,8 +488,7 @@ public Format getFormat(Column column) { numberFormat = formatStrings.getAnyAt(redirectedIndex).asString(); } if (column.getFormatStringColumnIndex() != null) { - JsArray formatStrings = - Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); + JsArray formatStrings = Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); formatString = formatStrings.getAnyAt(redirectedIndex).asString(); } return new Format(cellColors, rowColors, numberFormat, formatString); @@ -517,8 +497,8 @@ public Format getFormat(Column column) { /** - * Event data, describing the indexes that were added/removed/updated, and providing access to - * Rows (and thus data in columns) either by index, or scanning the complete present index. + * Event data, describing the indexes that were added/removed/updated, and providing access to Rows (and thus data + * in columns) either by index, or scanning the complete present index. */ public class UpdateEventData implements TableData { private JsRangeSet added; @@ -596,8 +576,7 @@ public Format getFormat(long index, Column column) { numberFormat = formatStrings.getAnyAt(redirectedIndex).asString(); } if (column.getFormatStringColumnIndex() != null) { - JsArray formatStrings = - Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); + JsArray formatStrings = Js.uncheckedCast(data[column.getFormatStringColumnIndex()]); formatString = formatStrings.getAnyAt(redirectedIndex).asString(); } return new Format(cellColors, rowColors, numberFormat, formatString); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java index 0a259ae0fc2..8cddc87bcfe 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableSubscription.java @@ -11,13 +11,12 @@ import jsinterop.annotations.JsProperty; /** - * Represents a non-viewport subscription to a table, and all data currently known to be present in - * the subscribed columns. This class handles incoming snapshots and deltas, and fires events to - * consumers to notify of data changes. + * Represents a non-viewport subscription to a table, and all data currently known to be present in the subscribed + * columns. This class handles incoming snapshots and deltas, and fires events to consumers to notify of data changes. * - * Unlike {@link TableViewportSubscription}, the "original" table does not have a reference to this - * instance, only the "private" table instance does, since the original cannot modify the - * subscription, and the private instance must forward data to it. + * Unlike {@link TableViewportSubscription}, the "original" table does not have a reference to this instance, only the + * "private" table instance does, since the original cannot modify the subscription, and the private instance must + * forward data to it. */ public class TableSubscription extends HasEventHandling { @@ -34,8 +33,7 @@ public class TableSubscription extends HasEventHandling { private Promise copy; // copy from the initially given table so we don't need to way - public TableSubscription(JsArray columns, JsTable existingTable, - Double updateIntervalMs) { + public TableSubscription(JsArray columns, JsTable existingTable, Double updateIntervalMs) { copy = existingTable.copy(false).then(table -> new Promise<>((resolve, reject) -> { table.state().onRunning(newState -> { @@ -48,7 +46,7 @@ public TableSubscription(JsArray columns, JsTable existingTable, this.columns = columns; Integer rowStyleColumn = existingTable.state().getRowFormatColumn() == null ? null - : existingTable.state().getRowFormatColumn().getIndex(); + : existingTable.state().getRowFormatColumn().getIndex(); this.data = new SubscriptionTableData(columns, rowStyleColumn, this); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java index 5f021b103d3..571b4ec0079 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/TableViewportSubscription.java @@ -27,27 +27,23 @@ import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; /** - * Encapsulates event handling around table subscriptions by "cheating" and wrapping up a JsTable - * instance to do the real dirty work. This allows a viewport to stay open on the old table if - * desired, while this one remains open. + * Encapsulates event handling around table subscriptions by "cheating" and wrapping up a JsTable instance to do the + * real dirty work. This allows a viewport to stay open on the old table if desired, while this one remains open. *

    - * As this just wraps a JsTable (and thus a CTS), it holds its own flattened, pUT'd handle to get - * deltas from the server. The setViewport method can be used to adjust this table instead of - * creating a new one. + * As this just wraps a JsTable (and thus a CTS), it holds its own flattened, pUT'd handle to get deltas from the + * server. The setViewport method can be used to adjust this table instead of creating a new one. *

    - * Existing methods on JsTable like setViewport and getViewportData are intended to proxy to this, - * which then will talk to the underlying handle and accumulated data. + * Existing methods on JsTable like setViewport and getViewportData are intended to proxy to this, which then will talk + * to the underlying handle and accumulated data. *

    - * As long as we keep the existing methods/events on JsTable, close() is not required if no other - * method is called, with the idea then that the caller did not actually use this type. This means - * that for every exported method (which then will mark the instance of "actually being used, please - * don't automatically close me"), there must be an internal version called by those existing - * JsTable method, which will allow this instance to be cleaned up once the JsTable deems it no - * longer in use. + * As long as we keep the existing methods/events on JsTable, close() is not required if no other method is called, with + * the idea then that the caller did not actually use this type. This means that for every exported method (which then + * will mark the instance of "actually being used, please don't automatically close me"), there must be an internal + * version called by those existing JsTable method, which will allow this instance to be cleaned up once the JsTable + * deems it no longer in use. *

    - * Note that if the caller does close an instance, this shuts down the JsTable's use of this (while - * the converse is not true), providing a way to stop the server from streaming updates to the - * client. + * Note that if the caller does close an instance, this shuts down the JsTable's use of this (while the converse is not + * true), providing a way to stop the server from streaming updates to the client. */ public class TableViewportSubscription extends HasEventHandling { /** @@ -55,14 +51,14 @@ public class TableViewportSubscription extends HasEventHandling { */ public enum Status { /** - * Waiting for some prerequisite before we can begin, usually waiting to make sure the - * original table is ready to be subscribed to. Once the original table is ready, we will - * enter the ACTIVE state, even if the first update hasn't yet arrived. + * Waiting for some prerequisite before we can begin, usually waiting to make sure the original table is ready + * to be subscribed to. Once the original table is ready, we will enter the ACTIVE state, even if the first + * update hasn't yet arrived. */ STARTING, /** - * Successfully created, viewport is at least begun on the server, updates are subscribed - * and if changes happen on the server, we will be notified. + * Successfully created, viewport is at least begun on the server, updates are subscribed and if changes happen + * on the server, we will be notified. */ ACTIVE, /** @@ -78,14 +74,14 @@ public enum Status { private final Promise copy; private JsTable realized; - private boolean retained;// if the sub is set up to not close the underlying table once the - // original table is done with it + private boolean retained;// if the sub is set up to not close the underlying table once the original table is done + // with it private boolean originalActive = true; private Status status = Status.STARTING; - public TableViewportSubscription(double firstRow, double lastRow, Column[] columns, - Double updateIntervalMs, JsTable existingTable) { + public TableViewportSubscription(double firstRow, double lastRow, Column[] columns, Double updateIntervalMs, + JsTable existingTable) { refresh = updateIntervalMs == null ? 1000.0 : updateIntervalMs; // first off, copy the table, and flatten/pUT it, then apply the new viewport to that this.original = existingTable; @@ -116,16 +112,14 @@ public TableViewportSubscription(double firstRow, double lastRow, Column[] colum double originalSize = newState.getSize(); realized = table; status = Status.ACTIVE; - // At this point we're now responsible for notifying of size changes, since we will - // shortly have a viewport, - // a more precise way to track the table size (at least w.r.t. the range of the - // viewport), so if there - // is any difference in size between "realized" and "original", notify now to finish - // the transition. + // At this point we're now responsible for notifying of size changes, since we will shortly have a + // viewport, + // a more precise way to track the table size (at least w.r.t. the range of the viewport), so if there + // is any difference in size between "realized" and "original", notify now to finish the transition. if (realized.getSize() != originalSize) { JsLog.debug( - "firing size changed to transition between table managing its own size changes and viewport sub taking over", - realized.getSize()); + "firing size changed to transition between table managing its own size changes and viewport sub taking over", + realized.getSize()); CustomEventInit init = CustomEventInit.create(); init.setDetail(realized.getSize()); refire(new CustomEvent(JsTable.EVENT_SIZECHANGED, init)); @@ -146,15 +140,11 @@ public ClientTableState state() { private void refire(Event e) { this.fireEvent(e.type, e); if (originalActive && state() == original.state()) { - // When these fail to match, it probably means that the original's state was paused, but - // we're still - // holding on to it. Since we haven't been internalClose()d yet, that means we're still - // waiting for - // the new state to resolve or fail, so we can be restored, or stopped. In theory, we - // should put this + // When these fail to match, it probably means that the original's state was paused, but we're still + // holding on to it. Since we haven't been internalClose()d yet, that means we're still waiting for + // the new state to resolve or fail, so we can be restored, or stopped. In theory, we should put this // assert back, and make the pause code also tell us to pause. - // assert state() == original.state() : "Table owning this viewport subscription forgot - // to release it"; + // assert state() == original.state() : "Table owning this viewport subscription forgot to release it"; original.fireEvent(e.type, e); } } @@ -165,16 +155,15 @@ private void retainForExternalUse() { @JsMethod public void setViewport(double firstRow, double lastRow, @JsOptional Column[] columns, - @JsOptional Double updateIntervalMs) { + @JsOptional Double updateIntervalMs) { retainForExternalUse(); setInternalViewport(firstRow, lastRow, columns, updateIntervalMs); } - public void setInternalViewport(double firstRow, double lastRow, Column[] columns, - Double updateIntervalMs) { + public void setInternalViewport(double firstRow, double lastRow, Column[] columns, Double updateIntervalMs) { if (updateIntervalMs != null && refresh != updateIntervalMs) { throw new IllegalArgumentException( - "Can't change refreshIntervalMs on a later call to setViewport, it must be consistent or omitted"); + "Can't change refreshIntervalMs on a later call to setViewport, it must be consistent or omitted"); } copy.then(table -> { table.setInternalViewport(firstRow, lastRow, columns); @@ -185,26 +174,22 @@ public void setInternalViewport(double firstRow, double lastRow, Column[] column @JsMethod public void close() { if (status == Status.DONE) { - JsLog.warn( - "TableViewportSubscription.close called on subscription that's already done."); + JsLog.warn("TableViewportSubscription.close called on subscription that's already done."); } retained = false; internalClose(); } /** - * Internal API method to indicate that the Table itself has no further use for this. The - * subscription should stop forwarding events and optionally close the underlying - * table/subscription. + * Internal API method to indicate that the Table itself has no further use for this. The subscription should stop + * forwarding events and optionally close the underlying table/subscription. */ public void internalClose() { - // indicate that the base table shouldn't get events any more, even if it this is still - // retained elsewhere + // indicate that the base table shouldn't get events any more, even if it this is still retained elsewhere originalActive = false; if (retained || status == Status.DONE) { - // the JsTable has indicated it is no longer interested in this viewport, but other - // calling + // the JsTable has indicated it is no longer interested in this viewport, but other calling // code has retained it, keep it open for now. return; } @@ -233,15 +218,13 @@ public Promise getInternalViewportData() { public Status getStatus() { if (realized == null) { assert status != Status.ACTIVE - : "when the realized table is null, status should only be DONE or STARTING, instead is " - + status; + : "when the realized table is null, status should only be DONE or STARTING, instead is " + status; } else { if (realized.isAlive()) { assert status == Status.ACTIVE - : "realized table is alive, expected status ACTIVE, instead is " + status; + : "realized table is alive, expected status ACTIVE, instead is " + status; } else { - assert status == Status.DONE - : "realized table is closed, expected status DONE, instead is " + status; + assert status == Status.DONE : "realized table is closed, expected status DONE, instead is " + status; } } @@ -264,28 +247,26 @@ public Promise snapshot(JsRangeSet rows, Column[] columns) { return copy.then(table -> { final ClientTableState state = table.state(); String[] columnTypes = Arrays.stream(state.getAllColumns()) - .map(Column::getType) - .toArray(String[]::new); + .map(Column::getType) + .toArray(String[]::new); final BitSet columnBitset = table.lastVisibleState().makeBitset(columns); return Callbacks.promise(this, c -> { ResponseStreamWrapper stream = - ResponseStreamWrapper.of(table.getConnection().flightServiceClient().doGet( - Js.uncheckedCast(state.getHandle().makeTicket()), - table.getConnection().metadata())); + ResponseStreamWrapper.of(table.getConnection().flightServiceClient().doGet( + Js.uncheckedCast(state.getHandle().makeTicket()), table.getConnection().metadata())); stream.onData(flightData -> { - Message message = - Message.getRootAsMessage(new ByteBuffer(flightData.getDataHeader_asU8())); + Message message = Message.getRootAsMessage(new ByteBuffer(flightData.getDataHeader_asU8())); if (message.headerType() == MessageHeader.Schema) { // ignore for now, we'll handle this later return; } assert message.headerType() == MessageHeader.RecordBatch; RecordBatch header = message.header(new RecordBatch()); - TableSnapshot snapshot = BarrageUtils.createSnapshot(header, BarrageUtils - .typedArrayToLittleEndianByteBuffer(flightData.getDataBody_asU8()), null, - true, columnTypes); + TableSnapshot snapshot = BarrageUtils.createSnapshot(header, + BarrageUtils.typedArrayToLittleEndianByteBuffer(flightData.getDataBody_asU8()), null, true, + columnTypes); c.onSuccess(snapshot); }); @@ -295,10 +276,9 @@ public Promise snapshot(JsRangeSet rows, Column[] columns) { } }); }).then(defer()).then(snapshot -> { - SubscriptionTableData pretendSubscription = - new SubscriptionTableData(Js.uncheckedCast(columns), + SubscriptionTableData pretendSubscription = new SubscriptionTableData(Js.uncheckedCast(columns), state.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : state.getRowFormatColumn().getIndex(), + : state.getRowFormatColumn().getIndex(), null); TableData data = pretendSubscription.handleSnapshot(snapshot); return Promise.resolve(data); @@ -307,8 +287,8 @@ public Promise snapshot(JsRangeSet rows, Column[] columns) { } /** - * Instead of a micro-task between chained promises, insert a regular task so that control is - * returned to the browser long enough to prevent the UI hanging. + * Instead of a micro-task between chained promises, insert a regular task so that control is returned to the + * browser long enough to prevent the UI hanging. */ private IThenable.ThenOnFulfilledCallbackFn defer() { return val -> new Promise<>((resolve, reject) -> { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java index e04d3a67a2b..d21247c2d10 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportData.java @@ -45,8 +45,8 @@ public class MergeResults { private final int rowFormatColumn; - public ViewportData(RangeSet includedRows, Object[] dataColumns, JsArray columns, - int rowFormatColumn, long maxLength) { + public ViewportData(RangeSet includedRows, Object[] dataColumns, JsArray columns, int rowFormatColumn, + long maxLength) { assert maxLength <= Integer.MAX_VALUE; this.maxLength = (int) maxLength; @@ -376,8 +376,7 @@ public MergeResults merge(DeltaUpdates updates) { final long internalOffsetAsLong = (j + shiftDelta); if (internalOffsetAsLong >= 0 && internalOffsetAsLong < maxLength) { - // because internalOffsetAsLong is less than maxLen; we know it must be fit - // in an int + // because internalOffsetAsLong is less than maxLen; we know it must be fit in an int final int internalOffset = (int) internalOffsetAsLong; updated.added.add(internalOffset); Any toMove = existingColumnData.getAt(j); @@ -399,15 +398,13 @@ public MergeResults merge(DeltaUpdates updates) { } final long begin = Math.max(shiftedRange.getRange().getFirst() - offset, 0); - final int end = - (int) Math.min(shiftedRange.getRange().getLast() - offset, length - 1); + final int end = (int) Math.min(shiftedRange.getRange().getLast() - offset, length - 1); if (end < begin) { // this range is out of our viewport continue; } - // iterate forward and move them backward (note: since begin is <= end, we now know - // it fits in an int) + // iterate forward and move them backward (note: since begin is <= end, we now know it fits in an int) for (int j = (int) begin; j <= end; ++j) { for (int i = 0; i < data.length; ++i) { final JsArray existingColumnData = Js.uncheckedCast(data[i]); @@ -417,8 +414,7 @@ public MergeResults merge(DeltaUpdates updates) { final long internalOffsetAsLong = j + shiftDelta; if (internalOffsetAsLong >= 0 && internalOffsetAsLong < maxLength) { - // because internalOffsetAsLong is less than maxLen; we know it must be - // fit in an int + // because internalOffsetAsLong is less than maxLen; we know it must be fit in an int final int internalOffset = (int) internalOffsetAsLong; updated.added.add(internalOffset); existingColumnData.setAt(internalOffset, existingColumnData.getAt(j)); @@ -431,26 +427,20 @@ public MergeResults merge(DeltaUpdates updates) { } } - DeltaUpdates.ColumnModifications[] serializedModifications = - updates.getSerializedModifications(); - for (int modifiedColIndex = - 0; modifiedColIndex < serializedModifications.length; modifiedColIndex++) { - final DeltaUpdates.ColumnModifications modifiedColumn = - serializedModifications[modifiedColIndex]; - final OfLong it = - modifiedColumn == null ? null : modifiedColumn.getRowsIncluded().indexIterator(); + DeltaUpdates.ColumnModifications[] serializedModifications = updates.getSerializedModifications(); + for (int modifiedColIndex = 0; modifiedColIndex < serializedModifications.length; modifiedColIndex++) { + final DeltaUpdates.ColumnModifications modifiedColumn = serializedModifications[modifiedColIndex]; + final OfLong it = modifiedColumn == null ? null : modifiedColumn.getRowsIncluded().indexIterator(); if (it == null || !it.hasNext()) { continue; } // look for a local Column which matches this index so we know how to clean it - final Column column = - columns.find((c, i1, i2) -> c.getIndex() == modifiedColumn.getColumnIndex()); + final Column column = columns.find((c, i1, i2) -> c.getIndex() == modifiedColumn.getColumnIndex()); final JsArray updatedColumnData = - Js.uncheckedCast(cleanData(modifiedColumn.getValues().getData(), column)); - final JsArray existingColumnData = - Js.uncheckedCast(data[modifiedColumn.getColumnIndex()]); + Js.uncheckedCast(cleanData(modifiedColumn.getValues().getData(), column)); + final JsArray existingColumnData = Js.uncheckedCast(data[modifiedColumn.getColumnIndex()]); if (updatedColumnData.length == 0) { continue; } @@ -462,8 +452,8 @@ public MergeResults merge(DeltaUpdates updates) { long internalOffset = (modifiedOffset - offset); if (internalOffset < 0 || internalOffset >= maxLength) { i++; - continue;// data we don't need to see, either meant for another table, or we - // just sent a viewport update + continue;// data we don't need to see, either meant for another table, or we just sent a viewport + // update } existingColumnData.setAt((int) internalOffset, updatedColumnData.getAnyAt(i)); updated.modified.add((int) internalOffset); @@ -473,16 +463,13 @@ public MergeResults merge(DeltaUpdates updates) { if (!updates.getIncludedAdditions().isEmpty()) { DeltaUpdates.ColumnAdditions[] serializedAdditions = updates.getSerializedAdditions(); - for (int addedColIndex = - 0; addedColIndex < serializedAdditions.length; addedColIndex++) { + for (int addedColIndex = 0; addedColIndex < serializedAdditions.length; addedColIndex++) { DeltaUpdates.ColumnAdditions addedColumn = serializedAdditions[addedColIndex]; - Column column = - columns.find((c, i1, i2) -> c.getIndex() == addedColumn.getColumnIndex()); + Column column = columns.find((c, i1, i2) -> c.getIndex() == addedColumn.getColumnIndex()); final JsArray addedColumnData = - Js.uncheckedCast(cleanData(addedColumn.getValues().getData(), column)); - final JsArray existingColumnData = - Js.uncheckedCast(data[addedColumn.getColumnIndex()]); + Js.uncheckedCast(cleanData(addedColumn.getValues().getData(), column)); + final JsArray existingColumnData = Js.uncheckedCast(data[addedColumn.getColumnIndex()]); if (addedColumnData.length == 0) { continue; } @@ -494,8 +481,8 @@ public MergeResults merge(DeltaUpdates updates) { int internalOffset = (int) (addedOffset - offset); if (internalOffset < 0 || internalOffset >= maxLength) { i++; - continue;// data we don't need to see, either meant for another table, or we - // just sent a viewport update + continue;// data we don't need to see, either meant for another table, or we just sent a + // viewport update } assert internalOffset < existingColumnData.length; existingColumnData.setAt(internalOffset, addedColumnData.getAnyAt(i)); @@ -521,8 +508,8 @@ public MergeResults merge(DeltaUpdates updates) { length = length + updated.added.size() - updated.removed.size(); assert 0 <= length && length <= maxLength; - // Viewport footprint should be small enough that we can afford to see if this update - // corrupted our view of the world: + // Viewport footprint should be small enough that we can afford to see if this update corrupted our view of the + // world: assert !dataContainsNullSentinels(); return updated; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportRow.java b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportRow.java index 64da00978b2..3a5be53c5e6 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportRow.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/subscription/ViewportRow.java @@ -22,8 +22,7 @@ public ViewportRow(int offsetInSnapshot, Object[] dataColumns, Object rowStyleCo @Override public LongWrapper getIndex() { - throw new UnsupportedOperationException( - "Viewports don't currently represent their position with an index"); + throw new UnsupportedOperationException("Viewports don't currently represent their position with an index"); } @Override @@ -31,8 +30,8 @@ public LongWrapper getIndex() { public Any get(Column column) { JsArray uncheckedData = Js.uncheckedCast(dataColumns[column.getIndex()]); if (uncheckedData == null) { - throw new java.util.NoSuchElementException("Column " + column.getName() - + " not found in row, was it specified in the viewport?"); + throw new java.util.NoSuchElementException( + "Column " + column.getName() + " not found in row, was it specified in the viewport?"); } return uncheckedData.getAnyAt(offsetInSnapshot); } @@ -52,13 +51,11 @@ public Format getFormat(Column column) { rowColors = rowStyleColumn.getAnyAt(offsetInSnapshot).asLong(); } if (column.getFormatColumnIndex() != null) { - JsArray formatStrings = - Js.uncheckedCast(dataColumns[column.getFormatColumnIndex()]); + JsArray formatStrings = Js.uncheckedCast(dataColumns[column.getFormatColumnIndex()]); numberFormat = formatStrings.getAnyAt(offsetInSnapshot).asString(); } if (column.getFormatStringColumnIndex() != null) { - JsArray formatStrings = - Js.uncheckedCast(dataColumns[column.getFormatStringColumnIndex()]); + JsArray formatStrings = Js.uncheckedCast(dataColumns[column.getFormatStringColumnIndex()]); formatString = formatStrings.getAnyAt(offsetInSnapshot).asString(); } return new Format(cellColors, rowColors, numberFormat, formatString); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsRollupConfig.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsRollupConfig.java index 790bdc96026..bdfd5f94e09 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsRollupConfig.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsRollupConfig.java @@ -50,8 +50,8 @@ public JsRollupConfig(JsPropertyMap source) { public RollupTableRequest buildRequest() { RollupTableRequest rollupRequest = new RollupTableRequest(); ArrayList aggregations = new ArrayList<>(); - this.aggregations.forEach(key -> aggregations.add( - "" + key + "=" + String.join(",", JsArrays.toStringArray(this.aggregations.get(key))))); + this.aggregations.forEach(key -> aggregations + .add("" + key + "=" + String.join(",", JsArrays.toStringArray(this.aggregations.get(key))))); rollupRequest.setAggregations(aggregations.toArray(new String[0])); JsArrays.setArray(groupingColumns, rollupRequest::setGroupingColumns); rollupRequest.setIncludeConstituents(includeConstituents); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java index 59d0c8b887b..8e437a6c6c4 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTable.java @@ -34,27 +34,24 @@ import static io.deephaven.web.client.api.subscription.ViewportData.NO_ROW_FORMAT_COLUMN; /** - * Behaves like a JsTable externally, but data, state, and viewports are managed by an entirely - * different mechanism, and so reimplemented here. + * Behaves like a JsTable externally, but data, state, and viewports are managed by an entirely different mechanism, and + * so reimplemented here. * - * Any time a change is made, we build a new request and send it to the server, and wait for the - * updated state. + * Any time a change is made, we build a new request and send it to the server, and wait for the updated state. * - * Semantics around getting updates from the server are slightly different - we don't "unset" the - * viewport here after operations are performed, but encourage the client code to re-set them to the - * desired position. + * Semantics around getting updates from the server are slightly different - we don't "unset" the viewport here after + * operations are performed, but encourage the client code to re-set them to the desired position. * - * The "__Hierarchical_Children" column should generally be left out of the UI, but is provided for - * debugging purposes. + * The "__Hierarchical_Children" column should generally be left out of the UI, but is provided for debugging purposes. * * The table size will be -1 until a viewport has been fetched. */ public class JsTreeTable extends HasEventHandling implements HasLifecycle { @JsProperty(namespace = "dh.TreeTable") public static final String EVENT_UPDATED = "updated", - EVENT_DISCONNECT = "disconnect", - EVENT_RECONNECT = "reconnect", - EVENT_RECONNECTFAILED = "reconnectfailed"; + EVENT_DISCONNECT = "disconnect", + EVENT_RECONNECT = "reconnect", + EVENT_RECONNECTFAILED = "reconnectfailed"; private static final String TABLE_AGGREGATION_COLUMN_PREFIX = "Rollup_"; @@ -91,21 +88,17 @@ class TreeViewportData { private final Object[] data; private TreeViewportData(RangeSet includedRows, ColumnData[] dataColumns, Key[] keyColumn, - Key[] parentKeyColumn, BitSet childPresence, double offset, Column[] columns, - int rowFormatColumn, String[] constituentColumnNames, - ColumnData[] constituentColumnData) { + Key[] parentKeyColumn, BitSet childPresence, double offset, Column[] columns, int rowFormatColumn, + String[] constituentColumnNames, ColumnData[] constituentColumnData) { this.keyColumn = keyColumn; this.parentKeyColumn = parentKeyColumn; this.childPresence = childPresence; this.offset = offset; - this.columns = - JsObject.freeze(Js.cast(Js.>uncheckedCast(columns).slice())); + this.columns = JsObject.freeze(Js.cast(Js.>uncheckedCast(columns).slice())); - // Unlike ViewportData, assume that we own this copy of the data and can mutate at will. - // As such, + // Unlike ViewportData, assume that we own this copy of the data and can mutate at will. As such, // we'll just clean the data that the requested columns know about for now. - // TODO to improve this, we can have synthetic columns to handle data that wasn't - // requested/expected, + // TODO to improve this, we can have synthetic columns to handle data that wasn't requested/expected, // and then can share code with ViewportData this.data = new Object[dataColumns.length]; @@ -132,8 +125,7 @@ private TreeViewportData(RangeSet includedRows, ColumnData[] dataColumns, Key[] data[c.getStyleColumnIndex()] = dataColumns[c.getStyleColumnIndex()].getData(); } if (c.getFormatStringColumnIndex() != null) { - data[c.getFormatStringColumnIndex()] = - dataColumns[c.getFormatStringColumnIndex()].getData(); + data[c.getFormatStringColumnIndex()] = dataColumns[c.getFormatStringColumnIndex()].getData(); } // if there is a matching constituent column array, clean it and copy from it @@ -141,19 +133,16 @@ private TreeViewportData(RangeSet includedRows, ColumnData[] dataColumns, Key[] if (sourceColumn != null) { ColumnData constituentColumn = constituentColumns.get(sourceColumn.getName()); if (constituentColumn != null) { - JsArray cleanConstituentColumn = Js.uncheckedCast( - ViewportData.cleanData(constituentColumn.getData(), sourceColumn)); + JsArray cleanConstituentColumn = + Js.uncheckedCast(ViewportData.cleanData(constituentColumn.getData(), sourceColumn)); // Overwrite the data with constituent values, if any - // We use cleanConstituentColumn to find max item rather than data[index], - // since we - // are okay stopping at the last constituent value, in case the server sends - // shorter + // We use cleanConstituentColumn to find max item rather than data[index], since we + // are okay stopping at the last constituent value, in case the server sends shorter // arrays. - for (int rowIndex = childPresence - .nextClearBit(0); rowIndex < cleanConstituentColumn.length; rowIndex = - childPresence.nextClearBit(rowIndex + 1)) { - Js.asArrayLike(data[index]).setAt(rowIndex, - cleanConstituentColumn.getAt(rowIndex)); + for (int rowIndex = + childPresence.nextClearBit(0); rowIndex < cleanConstituentColumn.length; rowIndex = + childPresence.nextClearBit(rowIndex + 1)) { + Js.asArrayLike(data[index]).setAt(rowIndex, cleanConstituentColumn.getAt(rowIndex)); } } } @@ -185,27 +174,25 @@ public JsArray getRows() { /** - * Checks if two viewport data objects contain the same data, based on comparing four - * fields, none of which can be null. * The columnData array is the actual contents of the - * rows - if these change, clearly we have different data * The constituentColumns array is - * the actual contents of the constituent column values, mapped by their column name - if - * these change, the visible data will be different * The childPresence field is the main - * other change that could happen, where a node changes its status of having children. * The - * keyColumn contents, if they change, might require the UI to change the "expanded" - * property. This is a stretch, but it could happen. * The parentColumn is even more of a - * stretch, but if it were to change without the item itself moving its position in the - * viewport, the depth (and indentation in the UI) would visibly change. We aren't - * interested in the other fields - rows and data are just a different way to see the - * original data in the columnData field, and if either offset or columns change, we would - * automatically force an event to happen anyway, so that we confirm to the user that the - * change happened (even if the visible data didn't change for some reason). + * Checks if two viewport data objects contain the same data, based on comparing four fields, none of which can + * be null. * The columnData array is the actual contents of the rows - if these change, clearly we have + * different data * The constituentColumns array is the actual contents of the constituent column values, mapped + * by their column name - if these change, the visible data will be different * The childPresence field is the + * main other change that could happen, where a node changes its status of having children. * The keyColumn + * contents, if they change, might require the UI to change the "expanded" property. This is a stretch, but it + * could happen. * The parentColumn is even more of a stretch, but if it were to change without the item itself + * moving its position in the viewport, the depth (and indentation in the UI) would visibly change. We aren't + * interested in the other fields - rows and data are just a different way to see the original data in the + * columnData field, and if either offset or columns change, we would automatically force an event to happen + * anyway, so that we confirm to the user that the change happened (even if the visible data didn't change for + * some reason). */ public boolean containsSameDataAs(TreeViewportData that) { return Arrays.equals(keyColumn, that.keyColumn) - && Arrays.equals(parentKeyColumn, that.parentKeyColumn) - && childPresence.equals(that.childPresence) - && Arrays.equals(columnData, that.columnData) - && Objects.equals(constituentColumns, that.constituentColumns); + && Arrays.equals(parentKeyColumn, that.parentKeyColumn) + && childPresence.equals(that.childPresence) + && Arrays.equals(columnData, that.columnData) + && Objects.equals(constituentColumns, that.constituentColumns); } /** @@ -242,8 +229,8 @@ public Key myKey() { } /** - * Tracks state of a given table that is part of the tree. Updates from the server in the form - * of a TableDetails object are folded into this as needed + * Tracks state of a given table that is part of the tree. Updates from the server in the form of a TableDetails + * object are folded into this as needed */ class TreeNodeState { private Key key; @@ -258,8 +245,8 @@ public TreeNodeState(Key key, int depth) { public void expand(Key child) { if (expandedChildren.add(child)) { TreeNodeState childState = new TreeNodeState( - child, - depth + 1); + child, + depth + 1); expandedMap.put(child, childState); JsLog.debug("user expanded ", child); scheduleSnapshotQuery(false); @@ -299,8 +286,7 @@ public TableDetails toTableDetails() { private List filters = new ArrayList<>(); private List sorts = new ArrayList<>(); - // the "next" set of filters/sorts that we'll use. these either are "==" to the above fields, or - // are scheduled + // the "next" set of filters/sorts that we'll use. these either are "==" to the above fields, or are scheduled // to replace them soon. private List nextFilters = new ArrayList<>(); private List nextSort = new ArrayList<>(); @@ -313,8 +299,7 @@ public TableDetails toTableDetails() { private final Map expandedMap = new HashMap<>(); private JsRunnable queuedOperations = null; - private TreeTableRequest.TreeRequestOperation[] nextRequestOps = - new TreeTableRequest.TreeRequestOperation[0]; + private TreeTableRequest.TreeRequestOperation[] nextRequestOps = new TreeTableRequest.TreeRequestOperation[0]; private Double viewportUpdateTimeoutId; private boolean scheduled; @@ -351,47 +336,44 @@ public JsTreeTable(ClientTableState state, WorkerConnection workerConnection) { throw new UnsupportedOperationException("fetchTableAttributeAsTable"); } else { - final String failure = - "Attempting to connect to a source table on a close()d tree table"; + final String failure = "Attempting to connect to a source table on a close()d tree table"; JsLog.debug(failure, this, LazyString.of(baseTable::toStringMinimal)); callback.apply(failure, null); newState.setResolution(ClientTableState.ResolutionState.FAILED, failure); } }, "treetable source attr") - .refetch(this, workerConnection.metadata()) - .then(sourceState -> { - JsTable table = new JsTable(workerConnection, sourceState); - - table.addEventListener(JsTable.INTERNAL_EVENT_SIZELISTENER, ignore -> { - if (nextSnapshotState == NextSnapshotState.TIMER_RUNNING) { - nextSnapshotState = NextSnapshotState.QUERY_WHEN_TIMER_ENDS; - } else if (nextSnapshotState == NextSnapshotState.QUERY_WHEN_UPDATE_SEEN) { - scheduleSnapshotQuery(false); - } - }); + .refetch(this, workerConnection.metadata()) + .then(sourceState -> { + JsTable table = new JsTable(workerConnection, sourceState); + + table.addEventListener(JsTable.INTERNAL_EVENT_SIZELISTENER, ignore -> { + if (nextSnapshotState == NextSnapshotState.TIMER_RUNNING) { + nextSnapshotState = NextSnapshotState.QUERY_WHEN_TIMER_ENDS; + } else if (nextSnapshotState == NextSnapshotState.QUERY_WHEN_UPDATE_SEEN) { + scheduleSnapshotQuery(false); + } + }); - return Promise.resolve(table); - }, fail -> { - // noinspection unchecked - return (Promise) (Promise) Promise.reject( - "Failed to fetch tree's source table - is this table actually a tree? " + fail); - }); + return Promise.resolve(table); + }, fail -> { + // noinspection unchecked + return (Promise) (Promise) Promise + .reject("Failed to fetch tree's source table - is this table actually a tree? " + fail); + }); } /** - * Must be called on any tree table that needs the source table in order to be usable. At this - * time. it is only required to work out constituent column types for rollup columns. + * Must be called on any tree table that needs the source table in order to be usable. At this time. it is only + * required to work out constituent column types for rollup columns. */ public Promise finishFetch() { - RollupDefinition rollupDefinition = - baseTable.getTableDef().getAttributes().getRollupDefinition(); + RollupDefinition rollupDefinition = baseTable.getTableDef().getAttributes().getRollupDefinition(); if (rollupDefinition == null) { return Promise.resolve(this); } return sourceTable.then(sourceTable -> { - // reconcile our table definition against the source table and assign constituent types - // to our columns + // reconcile our table definition against the source table and assign constituent types to our columns final Set presentNames = new HashSet<>(); for (int i = 0; i < rollupDefinition.getRollupColumnNames().length; i++) { String rollupColName = rollupDefinition.getRollupColumnNames()[i]; @@ -407,10 +389,9 @@ public Promise finishFetch() { } } - // for each column _not_ in this list of names, explicitly assign a null constituent - // type - groupedColumns = JsObject.freeze(Js.cast(getColumns() - .filter((column, index, array) -> !presentNames.contains(column.getName())))); + // for each column _not_ in this list of names, explicitly assign a null constituent type + groupedColumns = JsObject.freeze( + Js.cast(getColumns().filter((column, index, array) -> !presentNames.contains(column.getName())))); if (rollupDefinition.getLeafType() == RollupDefinition.LeafType.Constituent) { groupedColumns.forEach((column, index, array) -> { column.setConstituentType(null); @@ -422,20 +403,19 @@ public Promise finishFetch() { } /** - * Requests an update as soon as possible, canceling any future update but scheduling a new one. - * Any change in viewport or configuration should call this. + * Requests an update as soon as possible, canceling any future update but scheduling a new one. Any change in + * viewport or configuration should call this. * - * @param alwaysFireEvent force the updated event to fire based on this scheduled snapshot, even - * if the data is the same as before + * @param alwaysFireEvent force the updated event to fire based on this scheduled snapshot, even if the data is the + * same as before */ private void scheduleSnapshotQuery(boolean alwaysFireEvent) { - // track if we should force the event to fire when the data comes back, even if there is no - // change + // track if we should force the event to fire when the data comes back, even if there is no change alwaysFireNextEvent |= alwaysFireEvent; if (running) { - // already in flight, so when the response comes back make sure the queue is non-empty - // so we schedule another + // already in flight, so when the response comes back make sure the queue is non-empty so we schedule + // another if (queuedOperations == null) { queuedOperations = JsRunnable.doNothing(); } @@ -453,8 +433,7 @@ private void scheduleSnapshotQuery(boolean alwaysFireEvent) { } /** - * Requests an update from the server. Should only be called by itself and - * scheduleSnapshotQuery. + * Requests an update from the server. Should only be called by itself and scheduleSnapshotQuery. */ private void snapshotQuery() { if (closed) { @@ -472,8 +451,8 @@ private void snapshotQuery() { // clear any size update, we're getting data either way, future updates should be noted nextSnapshotState = NextSnapshotState.TIMER_RUNNING; viewportUpdateTimeoutId = DomGlobal.setTimeout(p -> { - // timer has elapsed, we'll actually perform our regular check only if a change was seen - // since we were started + // timer has elapsed, we'll actually perform our regular check only if a change was seen since we were + // started if (nextSnapshotState == NextSnapshotState.QUERY_WHEN_TIMER_ENDS) { scheduleSnapshotQuery(false); } else { @@ -489,10 +468,10 @@ private void snapshotQuery() { boolean alwaysFireEvent = this.alwaysFireNextEvent; this.alwaysFireNextEvent = false; - JsLog.debug("Sending tree table request", this, LazyString.of(baseTable::getHandle), - query, alwaysFireEvent); - // connection.getServer().treeSnapshotQuery(baseTable.getHandle(), query, - // Callbacks.of((success, failure) -> { + JsLog.debug("Sending tree table request", this, LazyString.of(baseTable::getHandle), query, + alwaysFireEvent); + // connection.getServer().treeSnapshotQuery(baseTable.getHandle(), query, Callbacks.of((success, failure) -> + // { // try { // if (success != null) { // handleUpdate(queryColumns, nextSort, nextFilters, success, alwaysFireEvent); @@ -503,8 +482,7 @@ private void snapshotQuery() { // running = false; // if (queuedOperations != null) { // // Something changed since our last request, start another one. - // // We allow skipping the event since whatever enqueued the operation should have - // passed true + // // We allow skipping the event since whatever enqueued the operation should have passed true // // if needed, or it could have been a slow reply from the server, etc. // scheduleSnapshotQuery(false); // } @@ -516,8 +494,8 @@ private void snapshotQuery() { } } - private void handleUpdate(Column[] columns, List nextSort, - List nextFilters, TreeTableResult result, boolean alwaysFireEvent) { + private void handleUpdate(Column[] columns, List nextSort, List nextFilters, + TreeTableResult result, boolean alwaysFireEvent) { JsLog.debug("tree table response arrived", result); lastResult = result; if (closed) { @@ -544,17 +522,17 @@ private void handleUpdate(Column[] columns, List nextSort, } TreeViewportData vd = new TreeViewportData( - includedRows, - result.getSnapshotData(), - result.getKeyColumn(), - result.getParentKeyColumn(), - result.getChildPresence(), - result.getSnapshotStart(), - columns, - baseTable.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN - : baseTable.getRowFormatColumn().getIndex(), - result.getConstituentColumnNames(), - result.getConstituentColumnData()); + includedRows, + result.getSnapshotData(), + result.getKeyColumn(), + result.getParentKeyColumn(), + result.getChildPresence(), + result.getSnapshotStart(), + columns, + baseTable.getRowFormatColumn() == null ? NO_ROW_FORMAT_COLUMN + : baseTable.getRowFormatColumn().getIndex(), + result.getConstituentColumnNames(), + result.getConstituentColumnData()); // if requested to fire the event, or if the data has changed in some way, fire the event final boolean fireEvent = alwaysFireEvent || !vd.containsSameDataAs(currentViewportData); @@ -591,20 +569,18 @@ private void handleUpdate(Column[] columns, List nextSort, } /** - * Creates a request object based on the current state of request info. We don't presently build - * this ahead of time and maintain it as things change, but instead read from the rest of the - * tree's state to decide what to build. + * Creates a request object based on the current state of request info. We don't presently build this ahead of time + * and maintain it as things change, but instead read from the rest of the tree's state to decide what to build. * - * Sort is always assigned, since a node could be expanded, might now have children (and the - * server needs to know how to sort it), etc - the server will only sort individual "children" - * tables lazily, so this must always be provided. + * Sort is always assigned, since a node could be expanded, might now have children (and the server needs to know + * how to sort it), etc - the server will only sort individual "children" tables lazily, so this must always be + * provided. * - * Filters are sent when the filter changes, or when something else changes that will result in - * rebuilding one or more children trees - the two cases that exist today are changing the sort, - * or reconnecting to the server. When filters are changed, the bookkeeping is done - * automatically, but for other purposes the releaseAllNodes helper method should be used to - * both release nodes and indicate that when refetched the filter may need to be applied again - * as well. + * Filters are sent when the filter changes, or when something else changes that will result in rebuilding one or + * more children trees - the two cases that exist today are changing the sort, or reconnecting to the server. When + * filters are changed, the bookkeeping is done automatically, but for other purposes the releaseAllNodes helper + * method should be used to both release nodes and indicate that when refetched the filter may need to be applied + * again as well. */ private TreeTableRequest buildQuery() { TreeTableRequest request = new TreeTableRequest(); @@ -619,8 +595,8 @@ private TreeTableRequest buildQuery() { // if any of those operations asks for a close, just do the close and skip the rest if (Arrays.asList(nextRequestOps).contains(TreeTableRequest.TreeRequestOperation.Close)) { closed = true; - request.setIncludedOps(new TreeTableRequest.TreeRequestOperation[] { - TreeTableRequest.TreeRequestOperation.Close}); + request.setIncludedOps( + new TreeTableRequest.TreeRequestOperation[] {TreeTableRequest.TreeRequestOperation.Close}); request.setExpandedNodes(new TableDetails[0]); request.setSorts(new SortDescriptor[0]); request.setFilters(new FilterDescriptor[0]); @@ -628,34 +604,29 @@ private TreeTableRequest buildQuery() { return request; } - request.setExpandedNodes(expandedMap.values().stream().map(TreeNodeState::toTableDetails) - .toArray(TableDetails[]::new)); + request.setExpandedNodes( + expandedMap.values().stream().map(TreeNodeState::toTableDetails).toArray(TableDetails[]::new)); - final int hierarchicalChildrenColumnIndex = Arrays - .stream(this.baseTable.getTableDef().getColumns()) - .filter(col -> col.getName().equals( - this.baseTable.getTableDef().getAttributes().getTreeHierarchicalColumnName())) - .mapToInt(ColumnDefinition::getColumnIndex) - .findFirst().orElseThrow( - () -> new IllegalStateException("TreeTable definition has no hierarchy column")); + final int hierarchicalChildrenColumnIndex = Arrays.stream(this.baseTable.getTableDef().getColumns()) + .filter(col -> col.getName() + .equals(this.baseTable.getTableDef().getAttributes().getTreeHierarchicalColumnName())) + .mapToInt(ColumnDefinition::getColumnIndex) + .findFirst() + .orElseThrow(() -> new IllegalStateException("TreeTable definition has no hierarchy column")); request.setKeyColumn(hierarchicalChildrenColumnIndex); - // avoid sending filters unless they changed for smaller overhead on requests, there is no - // need to recompute + // avoid sending filters unless they changed for smaller overhead on requests, there is no need to recompute // filters on child tables, only on the root table if (!filters.equals(nextFilters)) { request.setFilters( - nextFilters.stream().map(FilterCondition::makeDescriptor) - .toArray(FilterDescriptor[]::new)); + nextFilters.stream().map(FilterCondition::makeDescriptor).toArray(FilterDescriptor[]::new)); } else { request.setFilters(new FilterDescriptor[0]); } - // always include the sort setup, the viewport content could have changed in practically any - // way - request - .setSorts(nextSort.stream().map(Sort::makeDescriptor).toArray(SortDescriptor[]::new)); + // always include the sort setup, the viewport content could have changed in practically any way + request.setSorts(nextSort.stream().map(Sort::makeDescriptor).toArray(SortDescriptor[]::new)); BitSet columnsBitset = baseTable.makeBitset(this.columns); columnsBitset.set(hierarchicalChildrenColumnIndex); @@ -701,8 +672,7 @@ public void setExpanded(Object row, boolean isExpanded) { final TreeRow r; if (row instanceof Double) { - r = currentViewportData.rows - .getAt((int) ((double) row - lastResult.getSnapshotStart())); + r = currentViewportData.rows.getAt((int) ((double) row - lastResult.getSnapshotStart())); } else if (row instanceof TreeRow) { r = (TreeRow) row; } else { @@ -712,31 +682,28 @@ public void setExpanded(Object row, boolean isExpanded) { Key myRowKey = r.myKey(); Key parentRowKey = r.parentKey(); JsLog.debug("setExpanded enqueued"); - // With the keys collected for the currently-visible item to expand/collapse, we can enqueue - // an operation + // With the keys collected for the currently-visible item to expand/collapse, we can enqueue an operation // to modify that node enqueue(isExpanded ? TreeTableRequest.TreeRequestOperation.Expand - : TreeTableRequest.TreeRequestOperation.Contract, () -> { - TreeNodeState node = expandedMap.get(parentRowKey); - if (node == null) { - throw new IllegalStateException( - "Parent isn't available, can't manipulate child: setExpanded(" + row + ", " - + isExpanded + ")"); - } - if (isExpanded) { - node.expand(myRowKey); - } else { - node.collapse(myRowKey); - } - }); + : TreeTableRequest.TreeRequestOperation.Contract, () -> { + TreeNodeState node = expandedMap.get(parentRowKey); + if (node == null) { + throw new IllegalStateException("Parent isn't available, can't manipulate child: setExpanded(" + + row + ", " + isExpanded + ")"); + } + if (isExpanded) { + node.expand(myRowKey); + } else { + node.collapse(myRowKey); + } + }); scheduleSnapshotQuery(true); } @JsMethod public boolean isExpanded(Object row) { if (row instanceof Double) { - row = currentViewportData.rows - .getAt((int) ((double) row - lastResult.getSnapshotStart())); + row = currentViewportData.rows.getAt((int) ((double) row - lastResult.getSnapshotStart())); } else if (!(row instanceof TreeRow)) { throw new IllegalArgumentException("row parameter must be an index or a row"); } @@ -748,7 +715,7 @@ public boolean isExpanded(Object row) { // JsTable-like methods @JsMethod public void setViewport(double firstRow, double lastRow, @JsOptional JsArray columns, - @JsOptional Double updateInterval) { + @JsOptional Double updateInterval) { this.firstRow = firstRow; this.lastRow = lastRow; this.columns = columns != null ? Js.uncheckedCast(columns.slice()) : baseTable.getColumns(); @@ -764,8 +731,8 @@ public Promise getViewportData() { if (currentViewportData == null) { // only one of these two will fire, and when they do, they'll remove both handlers. addEventListenerOneShot( - EventPair.of(EVENT_UPDATED, e -> promise.succeed(currentViewportData)), - EventPair.of(EVENT_REQUEST_FAILED, promise::fail)); + EventPair.of(EVENT_UPDATED, e -> promise.succeed(currentViewportData)), + EventPair.of(EVENT_REQUEST_FAILED, promise::fail)); } else { promise.succeed(currentViewportData); } @@ -799,10 +766,9 @@ public JsArray applySort(Sort[] sort) { } /** - * Helper to indicate that table handles in nodes are no longer valid and need to be rebuilt. - * Parameter "invalidateFilters" should be true in all cases except when setting a new filter, - * and will result in the current filters being sent to the server to correctly filter any - * re-created handles. + * Helper to indicate that table handles in nodes are no longer valid and need to be rebuilt. Parameter + * "invalidateFilters" should be true in all cases except when setting a new filter, and will result in the current + * filters being sent to the server to correctly filter any re-created handles. */ private void releaseAllNodes(boolean invalidateFilters) { if (invalidateFilters) { @@ -870,8 +836,7 @@ public Column findColumn(String key) { @JsProperty public boolean isIncludeConstituents() { RollupDefinition rollupDef = baseTable.getTableDef().getAttributes().getRollupDefinition(); - return rollupDef != null - && rollupDef.getLeafType() == RollupDefinition.LeafType.Constituent; + return rollupDef != null && rollupDef.getLeafType() == RollupDefinition.LeafType.Constituent; } @JsProperty @@ -889,21 +854,21 @@ public Column[] findColumns(String[] keys) { } /** - * Provides Table-like selectDistinct functionality, but with a few quirks, since it is only - * fetching the distinct values for the given columns in the source table: + * Provides Table-like selectDistinct functionality, but with a few quirks, since it is only fetching the distinct + * values for the given columns in the source table: *
      *
    • Rollups may make no sense, since values are aggregated.
    • - *
    • Values found on orphaned (and remvoed) nodes will show up in the resulting table, even - * though they are not in the tree.
    • - *
    • Values found on parent nodes which are only present in the tree since a child is visible - * will not be present in the resulting table.
    • + *
    • Values found on orphaned (and remvoed) nodes will show up in the resulting table, even though they are not in + * the tree.
    • + *
    • Values found on parent nodes which are only present in the tree since a child is visible will not be present + * in the resulting table.
    • *
    */ @JsMethod public Promise selectDistinct(Column[] columns) { return sourceTable.then(t -> { - // if this is the first time it is used, it might not be filtered correctly, so check - // that the filters match up. + // if this is the first time it is used, it might not be filtered correctly, so check that the filters match + // up. if (!t.getFilter().asList().equals(getFilter().asList())) { t.applyFilter(getFilter().asList().toArray(new FilterCondition[0])); } @@ -923,8 +888,8 @@ public Promise getTotalsTableConfig() { @JsMethod public Promise getTotalsTable(@JsOptional Object config) { return sourceTable.then(t -> { - // if this is the first time it is used, it might not be filtered correctly, so check - // that the filters match up. + // if this is the first time it is used, it might not be filtered correctly, so check that the filters match + // up. if (!t.getFilter().asList().equals(getFilter().asList())) { t.applyFilter(getFilter().asList().toArray(new FilterCondition[0])); } @@ -938,13 +903,12 @@ public Promise getGrandTotalsTable(@JsOptional Object config) { } /** - * Indicates that the connect to the server has been reestablished, and the treetable should - * resume with the given state. + * Indicates that the connect to the server has been reestablished, and the treetable should resume with the given + * state. */ public void revive(ClientTableState state) { assert !scheduled && !running; - JsLog.debug("Reviving tree table", this, " for state ", - LazyString.of(state::toStringMinimal)); + JsLog.debug("Reviving tree table", this, " for state ", LazyString.of(state::toStringMinimal)); assert state == baseTable; unsuppressEvents(); LazyPromise.runLater(() -> { @@ -961,8 +925,7 @@ public void revive(ClientTableState state) { } /** - * Indicates that while the server connection has been reestablished, this table cannot be used - * for some reason. + * Indicates that while the server connection has been reestablished, this table cannot be used for some reason. */ public void die(Object error) { notifyDeath(this, error); @@ -990,8 +953,7 @@ public void disconnected() { // } // KeySerializer serializer = new KeySerializer_Impl(); // TypeSerializer typeSerializer = serializer.createSerializer(); - // final StringSerializationStreamWriter writer = new - // StringSerializationStreamWriter(typeSerializer); + // final StringSerializationStreamWriter writer = new StringSerializationStreamWriter(typeSerializer); // writer.prepareToWrite(); // writer.writeInt(SERIALIZED_VERSION); // writer.writeString(typeSerializer.getChecksum()); @@ -1008,8 +970,8 @@ public void disconnected() { // return writer.toString(); // } // - // private void writeTreeNode(KeySerializer serializer, SerializationStreamWriter writer, Key - // key) throws SerializationException { + // private void writeTreeNode(KeySerializer serializer, SerializationStreamWriter writer, Key key) throws + // SerializationException { // TreeNodeState node = expandedMap.get(key); // if (node == null) { // writer.writeInt(0); @@ -1026,8 +988,7 @@ public void disconnected() { // public void restoreExpandedState(String nodesToRestore) throws SerializationException { // // sanity check that nothing has been expanded yet so we can safely do this // if (!expandedMap.get(Key.root()).expandedChildren.isEmpty()) { - // throw new IllegalArgumentException("Tree already has expanded children, ignoring - // restoreExpandedState call"); + // throw new IllegalArgumentException("Tree already has expanded children, ignoring restoreExpandedState call"); // } // if (nodesToRestore.isEmpty()) { // // no work to do, empty set of items expanded @@ -1035,26 +996,24 @@ public void disconnected() { // } // KeySerializer serializer = new KeySerializer_Impl(); // TypeSerializer typeSerializer = serializer.createSerializer(); - // StringSerializationStreamReader reader = new StringSerializationStreamReader(typeSerializer, - // nodesToRestore); + // StringSerializationStreamReader reader = new StringSerializationStreamReader(typeSerializer, nodesToRestore); // int vers = reader.readInt(); // if (vers != SERIALIZED_VERSION) { - // throw new IllegalArgumentException("Failed to deserialize, current version doesn't match the - // serialized data. Expected version " + SERIALIZED_VERSION + ", actual version " + vers); + // throw new IllegalArgumentException("Failed to deserialize, current version doesn't match the serialized data. + // Expected version " + SERIALIZED_VERSION + ", actual version " + vers); // } // String checksum = reader.readString(); // if (!checksum.equals(typeSerializer.getChecksum())) { - // throw new IllegalArgumentException("Failed to deserialize, current type definition doesn't - // match the serialized data. Expected: " + typeSerializer.getChecksum() + ", actual: " + - // checksum); + // throw new IllegalArgumentException("Failed to deserialize, current type definition doesn't match the serialized + // data. Expected: " + typeSerializer.getChecksum() + ", actual: " + checksum); // } // // // read each key, assuming root as the first key // readTreeNode(serializer, reader, Key.root()); // } // - // private void readTreeNode(KeySerializer serializer, SerializationStreamReader reader, Key - // key) throws SerializationException { + // private void readTreeNode(KeySerializer serializer, SerializationStreamReader reader, Key key) throws + // SerializationException { // TreeNodeState node = expandedMap.get(key); // int count = reader.readInt(); // for (int i = 0; i < count; i++) { @@ -1068,10 +1027,10 @@ public void disconnected() { public Promise copy() { return connection.newState((c, state, metadata) -> { // connection.getServer().reexport(this.baseTable.getHandle(), state.getHandle(), c); - throw new UnsupportedOperationException("reexport");// probably not needed at all with - // new session mechanism? + throw new UnsupportedOperationException("reexport");// probably not needed at all with new session + // mechanism? }, "reexport for tree.copy()") - .refetch(this, connection.metadata()) - .then(state -> Promise.resolve(new JsTreeTable(state, connection))); + .refetch(this, connection.metadata()) + .then(state -> Promise.resolve(new JsTreeTable(state, connection))); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTableConfig.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTableConfig.java index e789ccf9d70..54f65dc1c31 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTableConfig.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/JsTreeTableConfig.java @@ -6,8 +6,7 @@ import jsinterop.base.JsPropertyMap; /** - * Configuration object for running Table.treeTable to produce a hierarchical view of a given "flat" - * table. + * Configuration object for running Table.treeTable to produce a hierarchical view of a given "flat" table. */ @JsType(name = "TreeTableConfig", namespace = "dh") public class JsTreeTableConfig { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/enums/JsAggregationOperation.java b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/enums/JsAggregationOperation.java index 46da81ce803..3bace098c9e 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/tree/enums/JsAggregationOperation.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/tree/enums/JsAggregationOperation.java @@ -5,19 +5,19 @@ @JsType(name = "AggregationOperation", namespace = "dh") public class JsAggregationOperation { public static final String COUNT = "Count", - COUNT_DISTINCT = "CountDistinct", - DISTINCT = "Distinct", - MIN = "Min", - MAX = "Max", - SUM = "Sum", - ABS_SUM = "AbsSum", - VAR = "Var", - AVG = "Avg", - STD = "Std", - FIRST = "First", - LAST = "Last", - UNIQUE = "Unique", - // Array operation isn't legal in all contexts, just omit it for now - // ARRAY = "Array", - SKIP = "Skip"; + COUNT_DISTINCT = "CountDistinct", + DISTINCT = "Distinct", + MIN = "Min", + MAX = "Max", + SUM = "Sum", + ABS_SUM = "AbsSum", + VAR = "Var", + AVG = "Avg", + STD = "Std", + FIRST = "First", + LAST = "Last", + UNIQUE = "Unique", + // Array operation isn't legal in all contexts, just omit it for now + // ARRAY = "Array", + SKIP = "Skip"; } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsBusinessCalendar.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsBusinessCalendar.java index d25612cd276..3ce0c94e38a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsBusinessCalendar.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsBusinessCalendar.java @@ -16,11 +16,10 @@ public JsBusinessCalendar(BusinessCalendarDescriptor businessCalendarDescriptor) this.businessCalendarDescriptor = businessCalendarDescriptor; JsObject.freeze(this.businessCalendarDescriptor); timeZone = JsTimeZone.getTimeZone(businessCalendarDescriptor.getTimeZone()); - businessPeriods = businessCalendarDescriptor.getBusinessPeriodsList() - .map((p0, p1, p2) -> new JsBusinessPeriod(p0)); + businessPeriods = + businessCalendarDescriptor.getBusinessPeriodsList().map((p0, p1, p2) -> new JsBusinessPeriod(p0)); JsObject.freeze(businessPeriods); - holidays = - businessCalendarDescriptor.getHolidaysList().map((p0, p1, p2) -> new JsHoliday(p0)); + holidays = businessCalendarDescriptor.getHolidaysList().map((p0, p1, p2) -> new JsHoliday(p0)); JsObject.freeze(holidays); } @@ -37,7 +36,7 @@ public JsTimeZone getTimeZone() { @JsProperty public String[] getBusinessDays() { return businessCalendarDescriptor.getBusinessDaysList() - .map((p0, p1, p2) -> JsDayOfWeek.values()[(int) (double) p0]); + .map((p0, p1, p2) -> JsDayOfWeek.values()[(int) (double) p0]); } @JsProperty diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsHoliday.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsHoliday.java index 649b1195e3c..0a8039d2e6f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsHoliday.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/calendar/JsHoliday.java @@ -11,9 +11,8 @@ public class JsHoliday { public JsHoliday(Holiday holiday) { date = new LocalDateWrapper(holiday.getDate().getYear(), holiday.getDate().getMonth(), - holiday.getDate().getDay()); - businessPeriods = - holiday.getBusinessPeriodsList().map((p0, p1, p2) -> new JsBusinessPeriod(p0)); + holiday.getDate().getDay()); + businessPeriods = holiday.getBusinessPeriodsList().map((p0, p1, p2) -> new JsBusinessPeriod(p0)); JsObject.freeze(businessPeriods); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java index f2e1a5f83fb..0d504784957 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/ChartData.java @@ -17,9 +17,9 @@ import java.util.Map.Entry; /** - * Helper class to manage snapshots and deltas and keep not only a contiguous JS array of data per - * column in the underlying table, but also support a mapping function to let client code translate - * data in some way for display and keep that cached as well. + * Helper class to manage snapshots and deltas and keep not only a contiguous JS array of data per column in the + * underlying table, but also support a mapping function to let client code translate data in some way for display and + * keep that cached as well. */ @JsType(namespace = "dh.plot") public class ChartData { @@ -33,8 +33,7 @@ public ChartData(JsTable table) { } public void update(Object eventDetail) { - assert eventDetail instanceof UpdateEventData - : "update() can only take table subscription event instances"; + assert eventDetail instanceof UpdateEventData : "update() can only take table subscription event instances"; UpdateEventData tableData = (UpdateEventData) eventDetail; Iterator addedIterator = tableData.getAdded().getRange().rangeIterator(); @@ -50,28 +49,24 @@ public void update(Object eventDetail) { JsArray[] allColumns; if (nextRemoved != null) { // noinspection unchecked - allColumns = cachedData.values().stream().flatMap(m -> m.values().stream()) - .toArray(JsArray[]::new); + allColumns = cachedData.values().stream().flatMap(m -> m.values().stream()).toArray(JsArray[]::new); } else { allColumns = null; } while (nextAdded != null || nextRemoved != null || nextModified != null) { if (i >= indexes.length) { - // We're past the end, nothing possibly left to remove, just append all the new - // items + // We're past the end, nothing possibly left to remove, just append all the new items // Note that this is the first case we'll hit on initial snapshot assert nextRemoved == null; assert nextModified == null; while (nextAdded != null) { insertDataRange(tableData, nextAdded, i); - // increment i past these new items so our offset is correct if there is a next - // block + // increment i past these new items so our offset is correct if there is a next block i += nextAdded.size(); - // not bothering with i or lastIndexSeen since we will break after this while - // loop + // not bothering with i or lastIndexSeen since we will break after this while loop nextAdded = addedIterator.hasNext() ? addedIterator.next() : null; } break; @@ -92,11 +87,10 @@ public void update(Object eventDetail) { nextAdded = addedIterator.hasNext() ? addedIterator.next() : null; } else if (nextModified != null && nextModified.getFirst() == nextIndex) { - assert nextModified.getLast() >= nextIndex; // somehow being asked to update an item - // which wasn't present + assert nextModified.getLast() >= nextIndex; // somehow being asked to update an item which wasn't + // present - // the updated block is contiguous, make sure there are at least that many items to - // tweak + // the updated block is contiguous, make sure there are at least that many items to tweak assert indexes.length - i >= nextModified.size(); replaceDataRange(tableData, nextModified, i); @@ -106,11 +100,9 @@ public void update(Object eventDetail) { nextModified = modifiedIterator.hasNext() ? modifiedIterator.next() : null; } else if (nextRemoved != null && nextRemoved.getFirst() == nextIndex) { - assert nextRemoved.getLast() >= nextIndex; // somehow being asked to remove an item - // which wasn't present + assert nextRemoved.getLast() >= nextIndex; // somehow being asked to remove an item which wasn't present - // the block being removed is contiguous, so make sure there are at least that many - // and splice them out + // the block being removed is contiguous, so make sure there are at least that many and splice them out assert indexes.length - i >= nextRemoved.size(); // splice out the indexes @@ -134,10 +126,9 @@ public void update(Object eventDetail) { if (JsSettings.isDevMode()) { assert ((UpdateEventData) eventDetail).getRows().length == indexes.length; assert cachedData.values().stream().flatMap(m -> m.values().stream()) - .allMatch(arr -> arr.length == indexes.length); - assert cachedData.values().stream().flatMap(m -> m.values().stream()).allMatch( - arr -> arr.reduce((Object val, Any p1, int p2, Any[] p3) -> ((Integer) val) + 1, - 0) == indexes.length); + .allMatch(arr -> arr.length == indexes.length); + assert cachedData.values().stream().flatMap(m -> m.values().stream()).allMatch(arr -> arr + .reduce((Object val, Any p1, int p2, Any[] p3) -> ((Integer) val) + 1, 0) == indexes.length); JsRangeSet fullIndex = tableData.getFullIndex(); PrimitiveIterator.OfLong iter = fullIndex.getRange().indexIterator(); @@ -148,13 +139,10 @@ public void update(Object eventDetail) { } private void replaceDataRange(UpdateEventData tableData, Range range, int offset) { - // we don't touch the indexes at all, only need to walk each column and replace values in - // this range - for (Entry, JsArray>> columnMap : cachedData - .entrySet()) { + // we don't touch the indexes at all, only need to walk each column and replace values in this range + for (Entry, JsArray>> columnMap : cachedData.entrySet()) { Column col = table.findColumn(columnMap.getKey()); - for (Entry, JsArray> mapFuncAndArray : columnMap.getValue() - .entrySet()) { + for (Entry, JsArray> mapFuncAndArray : columnMap.getValue().entrySet()) { JsFunction func = mapFuncAndArray.getKey(); JsArray arr = mapFuncAndArray.getValue(); @@ -165,8 +153,7 @@ private void replaceDataRange(UpdateEventData tableData, Range range, int offset } } else { for (int i = 0; i < range.size(); i++) { - arr.setAt(offset + i, - func.apply(tableData.getData(range.getFirst() + i, col))); + arr.setAt(offset + i, func.apply(tableData.getData(range.getFirst() + i, col))); } } } @@ -178,17 +165,14 @@ private void insertDataRange(UpdateEventData tableData, Range range, int offset) batchSplice(offset, asArray(indexes), longs(range)); // splice in data to each column - for (Entry, JsArray>> columnMap : cachedData - .entrySet()) { + for (Entry, JsArray>> columnMap : cachedData.entrySet()) { String columnName = columnMap.getKey(); Column col = table.findColumn(columnName); - for (Entry, JsArray> mapFuncAndArray : columnMap.getValue() - .entrySet()) { + for (Entry, JsArray> mapFuncAndArray : columnMap.getValue().entrySet()) { JsFunction func = mapFuncAndArray.getKey(); JsArray arr = mapFuncAndArray.getValue(); - // here we create a new array and splice it in, to avoid slowly growing the data - // array in the case + // here we create a new array and splice it in, to avoid slowly growing the data array in the case // of many rows being added Any[] values = values(tableData, func, col, range); batchSplice(offset, arr, values); @@ -204,15 +188,13 @@ private Any[] batchSplice(int offset, JsArray existingData, Any[] dataToIns int batchSize = 1 << 15; for (int i = 0; i < lengthToInsert; i += batchSize) { - existingData.splice(offset + i, 0, - jsArrayToInsert.slice(i, Math.min(i + batchSize, lengthToInsert))); + existingData.splice(offset + i, 0, jsArrayToInsert.slice(i, Math.min(i + batchSize, lengthToInsert))); } return Js.uncheckedCast(existingData); } - private Any[] values(UpdateEventData tableData, JsFunction mapFunc, Column col, - Range insertedRange) { + private Any[] values(UpdateEventData tableData, JsFunction mapFunc, Column col, Range insertedRange) { JsArray result = new JsArray<>(); if (mapFunc == null) { @@ -243,34 +225,29 @@ private static Any[] longs(Range range) { return Js.uncheckedCast(longs); } - public JsArray getColumn(String columnName, JsFunction mappingFunc, - TableData currentUpdate) { + public JsArray getColumn(String columnName, JsFunction mappingFunc, TableData currentUpdate) { // returns the existing column, if any, otherwise iterates over existing data and builds it return cachedData - .computeIfAbsent(columnName, ignore -> new IdentityHashMap<>()) - .computeIfAbsent(mappingFunc, - ignore -> collectAllData(columnName, mappingFunc, currentUpdate)); + .computeIfAbsent(columnName, ignore -> new IdentityHashMap<>()) + .computeIfAbsent(mappingFunc, ignore -> collectAllData(columnName, mappingFunc, currentUpdate)); } /** * Helper to build the full array since it hasn't been requested before. */ - private JsArray collectAllData(String columnName, JsFunction mappingFunc, - TableData currentUpdate) { + private JsArray collectAllData(String columnName, JsFunction mappingFunc, TableData currentUpdate) { Column column = table.findColumn(columnName); if (mappingFunc == null) { return Js.uncheckedCast(currentUpdate.getRows().map((p0, p1, p2) -> p0.get(column))); } - return Js.uncheckedCast( - currentUpdate.getRows().map((p0, p1, p2) -> mappingFunc.apply(p0.get(column)))); + return Js.uncheckedCast(currentUpdate.getRows().map((p0, p1, p2) -> mappingFunc.apply(p0.get(column)))); } /** - * Removes some column from the cache, avoiding extra computation on incoming events, and - * possibly freeing some memory. If this pair of column name and map function are requested - * again, it will be recomputed from scratch. + * Removes some column from the cache, avoiding extra computation on incoming events, and possibly freeing some + * memory. If this pair of column name and map function are requested again, it will be recomputed from scratch. */ public void removeColumn(String columnName, JsFunction mappingFunc) { Map, JsArray> map = cachedData.get(columnName); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/DataUpdateEvent.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/DataUpdateEvent.java index 6a6d4535768..c01d7b92d3c 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/DataUpdateEvent.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/DataUpdateEvent.java @@ -17,7 +17,7 @@ public static final DataUpdateEvent empty(JsSeries... series) { return new DataUpdateEvent(series, null, null) { @Override public JsArray getArray(JsSeries series, int sourceType, - @JsOptional JsFunction mappingFunc) { + @JsOptional JsFunction mappingFunc) { return new JsArray<>(); } }; @@ -43,8 +43,7 @@ public JsArray getArray(JsSeries series, int sourceName) { } @JsMethod - public JsArray getArray(JsSeries series, int sourceType, - @JsOptional JsFunction mappingFunc) { + public JsArray getArray(JsSeries series, int sourceType, @JsOptional JsFunction mappingFunc) { String columnName = getColumnName(series, sourceType); return data.getColumn(columnName, mappingFunc, currentUpdate); @@ -52,9 +51,8 @@ public JsArray getArray(JsSeries series, int sourceType, private String getColumnName(JsSeries series, int sourceType) { return series.getDescriptor().getDataSourcesList().asList().stream() - .filter(sd -> sd.getType() == sourceType) - .findFirst().map(SourceDescriptor::getColumnName) - .orElseThrow(() -> new IllegalArgumentException( - "No sourceType " + sourceType + " in provided series")); + .filter(sd -> sd.getType() == sourceType) + .findFirst().map(SourceDescriptor::getColumnName) + .orElseThrow(() -> new IllegalArgumentException("No sourceType " + sourceType + " in provided series")); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/DownsampleOptions.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/DownsampleOptions.java index 3b8d8213f5e..30ff79f8ba7 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/DownsampleOptions.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/DownsampleOptions.java @@ -5,26 +5,26 @@ @JsType(namespace = "dh.plot") public final class DownsampleOptions { /** - * Max number of items in the series before DEFAULT will not attempt to load the series without - * downsampling. Above this size if downsample fails or is not applicable, the series won't be - * loaded unless DISABLE is passed to series.subscribe(). + * Max number of items in the series before DEFAULT will not attempt to load the series without downsampling. Above + * this size if downsample fails or is not applicable, the series won't be loaded unless DISABLE is passed to + * series.subscribe(). */ public static int MAX_SERIES_SIZE = 30_000; /** - * Max number of items in the series where the subscription will be allowed at all. Above this - * limit, even with downsampling disabled, the series will not load data. + * Max number of items in the series where the subscription will be allowed at all. Above this limit, even with + * downsampling disabled, the series will not load data. */ public static int MAX_SUBSCRIPTION_SIZE = 200_000; /** - * Flag to let the API decide what data will be available, based on the nature of the data, the - * series, and how the axes are configured. + * Flag to let the API decide what data will be available, based on the nature of the data, the series, and how the + * axes are configured. */ public static final DownsampleOptions DEFAULT = new DownsampleOptions(); /** - * Flat to entirely disable downsampling, and force all data to load, no matter how many items - * that would be, up to the limit of MAX_SUBSCRIPTION_SIZE. + * Flat to entirely disable downsampling, and force all data to load, no matter how many items that would be, up to + * the limit of MAX_SUBSCRIPTION_SIZE. */ public static final DownsampleOptions DISABLE = new DownsampleOptions(); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java index 226270a62f3..1a08f524275 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/FigureSubscription.java @@ -14,17 +14,15 @@ import java.util.stream.Collectors; /** - * Represents the subscriptions that are needed to get data for the series in a figure. The equals - * and hashcode method are designed to only match the features which can change while still - * maintaining the current subscription - the actual "subscribed" series can change as long as they - * don't affect these other parameters. + * Represents the subscriptions that are needed to get data for the series in a figure. The equals and hashcode method + * are designed to only match the features which can change while still maintaining the current subscription - the + * actual "subscribed" series can change as long as they don't affect these other parameters. */ public final class FigureSubscription { /** - * Minimum multiple that the table size needs to be decreased by in order to trigger - * downsampling. For example, if there were 10k rows in the table, and downsampling might only - * reduce this to 6k, if the factor is 2 we would not downsample. If there had been 15k rows, - * then with a factor of 2 we would downsample. + * Minimum multiple that the table size needs to be decreased by in order to trigger downsampling. For example, if + * there were 10k rows in the table, and downsampling might only reduce this to 6k, if the factor is 2 we would not + * downsample. If there had been 15k rows, then with a factor of 2 we would downsample. */ private static final int MIN_DOWNSAMPLE_FACTOR = 2; @@ -33,10 +31,8 @@ public final class FigureSubscription { // The table that we are subscribing to private final JsTable originalTable; - // The series that are currently watching this table - note that this can change, so is not - // included - // in the hashcode/equals. What cannot change is the set of columns that are required by these - // series, + // The series that are currently watching this table - note that this can change, so is not included + // in the hashcode/equals. What cannot change is the set of columns that are required by these series, // this must be checked by the method that mutates this set. private final Set includedSeries = new HashSet<>(); private final Set requiredColumns; @@ -52,8 +48,8 @@ public final class FigureSubscription { private boolean firstEventFired = false; public FigureSubscription(JsFigure figure, final JsTable originalTable, - final JsFigure.AxisRange downsampleAxisRange, - final JsFigure.DownsampleParams downsampleParams, final Set series) { + final JsFigure.AxisRange downsampleAxisRange, final JsFigure.DownsampleParams downsampleParams, + final Set series) { this.figure = figure; this.originalTable = originalTable; if (downsampleAxisRange == null) { @@ -63,27 +59,26 @@ public FigureSubscription(JsFigure figure, final JsTable originalTable, this.downsampleParams = downsampleParams; this.includedSeries.addAll(series); - this.requiredColumns = Collections - .unmodifiableSet(includedSeries.stream().flatMap(s -> Arrays.stream(s.getSources())) - .map(source -> source.getDescriptor().getColumnName()).collect(Collectors.toSet())); + this.requiredColumns = + Collections.unmodifiableSet(includedSeries.stream().flatMap(s -> Arrays.stream(s.getSources())) + .map(source -> source.getDescriptor().getColumnName()).collect(Collectors.toSet())); } /** - * Ensures that all of these series are present in this subscription, and returns the ones which - * were added, if any, so they can get a notification of their current data. + * Ensures that all of these series are present in this subscription, and returns the ones which were added, if any, + * so they can get a notification of their current data. * - * Replacement series must match this subscription already - the only facet that will be checked - * is that the columns match. + * Replacement series must match this subscription already - the only facet that will be checked is that the columns + * match. */ public Set replaceSeries(final Set replacements) { final Set copy = new HashSet<>(replacements); copy.removeAll(includedSeries); assert requiredColumns.containsAll(copy.stream().flatMap(s -> Arrays.stream(s.getSources())) - .map(s -> s.getDescriptor().getColumnName()).collect(Collectors.toSet())); + .map(s -> s.getDescriptor().getColumnName()).collect(Collectors.toSet())); includedSeries.addAll(copy); - // For each of the series in copy, if this subscription is downsampled we need to notify of - // this fact. + // For each of the series in copy, if this subscription is downsampled we need to notify of this fact. if (downsampleAxisRange != null) { CustomEventInit init = CustomEventInit.create(); init.setDetail(replacements.toArray()); @@ -91,15 +86,14 @@ public Set replaceSeries(final Set replacements) { } if (firstEventFired) { - // Next, if any data has loaded, regardless of downsample state, we need to fire an - // update event for those series + // Next, if any data has loaded, regardless of downsample state, we need to fire an update event for those + // series CustomEventInit event = CustomEventInit.create(); - event.setDetail( - new DataUpdateEvent(replacements.toArray(new JsSeries[0]), currentData, null)); + event.setDetail(new DataUpdateEvent(replacements.toArray(new JsSeries[0]), currentData, null)); figure.fireEvent(JsFigure.EVENT_UPDATED, event); - // Finally, if data was loaded and also the subscription is downsampled, we need to - // notify that the downsample + // Finally, if data was loaded and also the subscription is downsampled, we need to notify that the + // downsample // is complete if (downsampleAxisRange != null) { CustomEventInit successInit = CustomEventInit.create(); @@ -113,22 +107,20 @@ public Set replaceSeries(final Set replacements) { /* - * Equality of two FigureSubscriptions is based on: o identity of the original table o equality - * of the oneclick filter o equality of the downsampleAxisRange o equality of the - * downsampleParams EXCEPT the series array (since we permit that to change) o equality of the - * required columns found in all includedSeries As such, the hashCode should also be based on - * these. + * Equality of two FigureSubscriptions is based on: o identity of the original table o equality of the oneclick + * filter o equality of the downsampleAxisRange o equality of the downsampleParams EXCEPT the series array (since we + * permit that to change) o equality of the required columns found in all includedSeries As such, the hashCode + * should also be based on these. */ @Override public int hashCode() { int result = figure.hashCode(); - result = 31 * result + originalTable.hashCode();// JsTable has no hashCode, so each instance - // should have its own value (until - // collisions happen) + result = 31 * result + originalTable.hashCode();// JsTable has no hashCode, so each instance should have its own + // value (until collisions happen) result = 31 * result + (downsampleAxisRange == null ? 0 : downsampleAxisRange.hashCode()); result = 31 * result + (downsampleParams == null ? 0 - : downsampleParams.getPixelCount() * 31 + Arrays.hashCode(downsampleParams.getyCols())); + : downsampleParams.getPixelCount() * 31 + Arrays.hashCode(downsampleParams.getyCols())); result = 31 * result + requiredColumns.hashCode(); return result; } @@ -151,13 +143,11 @@ public boolean equals(Object o) { } if ((other.downsampleParams == null && downsampleParams != null - || other.downsampleParams != null && downsampleParams == null) // one is null and the - // other isnt - || (other.downsampleParams != null && // now we know both are non-null, check if their - // two fields that we care about match - (other.downsampleParams.getPixelCount() != downsampleParams.getPixelCount() - || Arrays.equals(other.downsampleParams.getyCols(), - downsampleParams.getyCols())))) { + || other.downsampleParams != null && downsampleParams == null) // one is null and the other isnt + || (other.downsampleParams != null && // now we know both are non-null, check if their two fields that + // we care about match + (other.downsampleParams.getPixelCount() != downsampleParams.getPixelCount() + || Arrays.equals(other.downsampleParams.getyCols(), downsampleParams.getyCols())))) { return false; } @@ -183,31 +173,30 @@ public void subscribe() { // check if there are too many items, and downsampling isn't outright disabled if (table.getSize() > DownsampleOptions.MAX_SERIES_SIZE) { if (includedSeries.stream().map(JsSeries::getDownsampleOptions) - .noneMatch(options -> options == DownsampleOptions.DISABLE)) { - // stop, we can't downsample, we haven't been told to disable, and there are - // too many items to fetch them outright - figure.downsampleNeeded("Disable downsampling to retrieve all items", - includedSeries, table.getSize()); + .noneMatch(options -> options == DownsampleOptions.DISABLE)) { + // stop, we can't downsample, we haven't been told to disable, and there are too many items to + // fetch them outright + figure.downsampleNeeded("Disable downsampling to retrieve all items", includedSeries, + table.getSize()); // noinspection unchecked return (Promise) (Promise) Promise.reject( - "Too many items to display, disable downsampling to display this series or size the axes"); + "Too many items to display, disable downsampling to display this series or size the axes"); } else if (table.getSize() > DownsampleOptions.MAX_SUBSCRIPTION_SIZE) { - figure.downsampleNeeded("Too many items to disable downsampling", - includedSeries, table.getSize()); + figure.downsampleNeeded("Too many items to disable downsampling", includedSeries, + table.getSize()); // noinspection unchecked return (Promise) (Promise) Promise - .reject("Too many items to disable downsampling"); + .reject("Too many items to disable downsampling"); } } // we actually sub to a copy, so that we can close it no matter what when we're done return subscribe(tablePromise); - } else if (table.getSize() < 2 * (1 + downsampleParams.getyCols().length) - * downsampleParams.getPixelCount() * MIN_DOWNSAMPLE_FACTOR) { - // Each "pixel" requires at most 2 rows per column in the table (max and min), so we - // check if the pixel count times that is sufficiently - // greater than the pixel size. The MIN_DOWNSAMPLE_FACTOR field is used to define - // "sufficiently greater" + } else if (table.getSize() < 2 * (1 + downsampleParams.getyCols().length) * downsampleParams.getPixelCount() + * MIN_DOWNSAMPLE_FACTOR) { + // Each "pixel" requires at most 2 rows per column in the table (max and min), so we check if the pixel + // count times that is sufficiently + // greater than the pixel size. The MIN_DOWNSAMPLE_FACTOR field is used to define "sufficiently greater" return subscribe(tablePromise); // TODO revisit this so we can watch the row count and downsample later if needed @@ -223,26 +212,27 @@ public void subscribe() { CustomEventInit init = CustomEventInit.create(); init.setDetail(includedSeries.toArray()); figure.fireEvent(JsFigure.EVENT_DOWNSAMPLESTARTED, init); - Promise downsampled = tablePromise.then(t -> t - .downsample(zoomRange, downsampleParams.getPixelCount(), - downsampleAxisRange.getxCol(), downsampleParams.getyCols()) - .then(resultTable -> Promise.resolve(resultTable), err -> { - figure.downsampleFailed(err.toString(), includedSeries, table.getSize()); - - if (table.getSize() > DownsampleOptions.MAX_SERIES_SIZE) { - if (includedSeries.stream().map(JsSeries::getDownsampleOptions) - .noneMatch(options -> options == DownsampleOptions.DISABLE)) { - // stop, we can't downsample, we haven't been told to disable, and - // there are too many items to fetch them outright - // noinspection unchecked - return (Promise) (Promise) Promise.reject(""); - } - } else if (table.getSize() > DownsampleOptions.MAX_SUBSCRIPTION_SIZE) { - // noinspection unchecked - return (Promise) (Promise) Promise.reject(""); - } - return Promise.resolve(table); - })); + Promise downsampled = + tablePromise.then(t -> t + .downsample(zoomRange, downsampleParams.getPixelCount(), downsampleAxisRange.getxCol(), + downsampleParams.getyCols()) + .then(resultTable -> Promise.resolve(resultTable), err -> { + figure.downsampleFailed(err.toString(), includedSeries, table.getSize()); + + if (table.getSize() > DownsampleOptions.MAX_SERIES_SIZE) { + if (includedSeries.stream().map(JsSeries::getDownsampleOptions) + .noneMatch(options -> options == DownsampleOptions.DISABLE)) { + // stop, we can't downsample, we haven't been told to disable, and there are + // too many items to fetch them outright + // noinspection unchecked + return (Promise) (Promise) Promise.reject(""); + } + } else if (table.getSize() > DownsampleOptions.MAX_SUBSCRIPTION_SIZE) { + // noinspection unchecked + return (Promise) (Promise) Promise.reject(""); + } + return Promise.resolve(table); + })); return subscribe(downsampled); } }); @@ -259,20 +249,19 @@ private Promise subscribe(final Promise tablePromise } TableSubscription sub = table.subscribe(table.getColumns()); - // TODO, technically we can probably unsubscribe to the table at this point, since we're - // listening to the subscription itself + // TODO, technically we can probably unsubscribe to the table at this point, since we're listening to the + // subscription itself this.currentData = new ChartData(table); sub.addEventListener(TableSubscription.EVENT_UPDATED, e -> { - // refire with specifics for the columns that we're watching here, after updating - // data arrays + // refire with specifics for the columns that we're watching here, after updating data arrays SubscriptionTableData.UpdateEventData subscriptionUpdateData = - (SubscriptionTableData.UpdateEventData) ((CustomEvent) e).detail; + (SubscriptionTableData.UpdateEventData) ((CustomEvent) e).detail; currentData.update(subscriptionUpdateData); CustomEventInit event = CustomEventInit.create(); - event.setDetail(new DataUpdateEvent(includedSeries.toArray(new JsSeries[0]), - currentData, subscriptionUpdateData)); + event.setDetail(new DataUpdateEvent(includedSeries.toArray(new JsSeries[0]), currentData, + subscriptionUpdateData)); figure.fireEvent(JsFigure.EVENT_UPDATED, event); if (!firstEventFired) { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsAxis.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsAxis.java index cb0130d9e6c..59b13bd164f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsAxis.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsAxis.java @@ -23,8 +23,7 @@ public JsAxis(AxisDescriptor descriptor, JsFigure jsFigure) { this.axis = descriptor; this.jsFigure = jsFigure; - final BusinessCalendarDescriptor businessCalendarDescriptor = - descriptor.getBusinessCalendarDescriptor(); + final BusinessCalendarDescriptor businessCalendarDescriptor = descriptor.getBusinessCalendarDescriptor(); if (businessCalendarDescriptor != null) { businessCalendar = new JsBusinessCalendar(businessCalendarDescriptor); } else { @@ -156,12 +155,10 @@ public boolean isTimeAxis() { } @JsMethod - public void range(@JsOptional Double pixelCount, @JsOptional Object min, - @JsOptional Object max) { + public void range(@JsOptional Double pixelCount, @JsOptional Object min, @JsOptional Object max) { if (pixelCount == null || !Js.typeof(Js.asAny(pixelCount)).equals("number")) { if (this.pixels != null) { - JsLog.warn( - "Turning off downsampling on a chart where it is running is not currently supported"); + JsLog.warn("Turning off downsampling on a chart where it is running is not currently supported"); return; } JsLog.warn("Ignoring Axis.range() call with non-numeric pixel count"); @@ -175,17 +172,15 @@ public void range(@JsOptional Double pixelCount, @JsOptional Object min, if (min != null || max != null) { if (min == null || max == null) { - throw new IllegalArgumentException( - "If min or max are provided, both must be provided"); + throw new IllegalArgumentException("If min or max are provided, both must be provided"); } - if (min instanceof Number && (double) min < 10 - || max instanceof Number && (double) max < 10) { + if (min instanceof Number && (double) min < 10 || max instanceof Number && (double) max < 10) { JsLog.warn("Ignoring max/min, at least one doesn't make sense", max, min); } else { - this.min = JsDateTimeFormat.longFromDate(min).orElseThrow( - () -> new IllegalArgumentException("Cannot interpret min as a date: " + min)); - this.max = JsDateTimeFormat.longFromDate(max).orElseThrow( - () -> new IllegalArgumentException("Cannot interpret max as a date: " + max)); + this.min = JsDateTimeFormat.longFromDate(min) + .orElseThrow(() -> new IllegalArgumentException("Cannot interpret min as a date: " + min)); + this.max = JsDateTimeFormat.longFromDate(max) + .orElseThrow(() -> new IllegalArgumentException("Cannot interpret max as a date: " + max)); } } else { this.min = null; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsChart.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsChart.java index 1c67f18c6f9..fdb35a36ecc 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsChart.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsChart.java @@ -25,20 +25,18 @@ public class JsChart extends HasEventHandling { public JsChart(ChartDescriptor descriptor, JsFigure jsFigure) { this.descriptor = descriptor; // build axes first, key them in a map for easy reuse when constructing series instances - axes = descriptor.getAxesList().asList().stream() - .map((axisDescriptor) -> new JsAxis(axisDescriptor, jsFigure)).toArray(JsAxis[]::new); + axes = descriptor.getAxesList().asList().stream().map((axisDescriptor) -> new JsAxis(axisDescriptor, jsFigure)) + .toArray(JsAxis[]::new); JsObject.freeze(axes); Map indexed = new HashMap<>(); for (int i = 0; i < axes.length; i++) { indexed.put(axes[i].getId(), axes[i]); } series = descriptor.getSeriesList().asList().stream() - .map((seriesDescriptor) -> new JsSeries(seriesDescriptor, jsFigure, indexed)) - .toArray(JsSeries[]::new); + .map((seriesDescriptor) -> new JsSeries(seriesDescriptor, jsFigure, indexed)).toArray(JsSeries[]::new); multiSeries = descriptor.getMultiSeriesList().asList().stream() - .map((multiSeriesDescriptor) -> new JsMultiSeries(multiSeriesDescriptor, jsFigure, - indexed, this)) - .toArray(JsMultiSeries[]::new); + .map((multiSeriesDescriptor) -> new JsMultiSeries(multiSeriesDescriptor, jsFigure, indexed, this)) + .toArray(JsMultiSeries[]::new); JsObject.freeze(multiSeries); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigure.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigure.java index 413e6c83db3..6def07736e0 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigure.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigure.java @@ -40,14 +40,14 @@ private static native Throwable ofObject(Object obj) /*-{ @JsProperty(namespace = "dh.plot.Figure") public static final String EVENT_UPDATED = "updated", - EVENT_SERIES_ADDED = "seriesadded", - EVENT_DISCONNECT = JsTable.EVENT_DISCONNECT, - EVENT_RECONNECT = JsTable.EVENT_RECONNECT, - EVENT_RECONNECTFAILED = JsTable.EVENT_RECONNECTFAILED, - EVENT_DOWNSAMPLESTARTED = "downsamplestarted", - EVENT_DOWNSAMPLEFINISHED = "downsamplefinished", - EVENT_DOWNSAMPLEFAILED = "downsamplefailed", - EVENT_DOWNSAMPLENEEDED = "downsampleneeded"; + EVENT_SERIES_ADDED = "seriesadded", + EVENT_DISCONNECT = JsTable.EVENT_DISCONNECT, + EVENT_RECONNECT = JsTable.EVENT_RECONNECT, + EVENT_RECONNECTFAILED = JsTable.EVENT_RECONNECTFAILED, + EVENT_DOWNSAMPLESTARTED = "downsamplestarted", + EVENT_DOWNSAMPLEFINISHED = "downsamplefinished", + EVENT_DOWNSAMPLEFAILED = "downsamplefailed", + EVENT_DOWNSAMPLENEEDED = "downsampleneeded"; public interface FigureFetch { void fetch(JsBiConsumer callback); @@ -109,8 +109,7 @@ public String toString() { private final Map downsampled = new HashMap<>(); - private final Map activeFigureSubscriptions = - new HashMap<>(); + private final Map activeFigureSubscriptions = new HashMap<>(); private boolean subCheckEnqueued = false; @@ -133,7 +132,7 @@ public Promise refetch() { this.descriptor = response.getFigureDescriptor(); charts = descriptor.getChartsList().asList().stream() - .map(chartDescriptor -> new JsChart(chartDescriptor, this)).toArray(JsChart[]::new); + .map(chartDescriptor -> new JsChart(chartDescriptor, this)).toArray(JsChart[]::new); JsObject.freeze(charts); return this.tableFetch.fetch(this, descriptor); @@ -149,11 +148,11 @@ public Promise refetch() { registerTableWithId(table, Js.cast(JsArray.of((double) i))); } Arrays.stream(charts) - .flatMap(c -> Arrays.stream(c.getSeries())) - .forEach(s -> s.initSources(plotHandlesToTables, plotHandlesToTableMaps)); + .flatMap(c -> Arrays.stream(c.getSeries())) + .forEach(s -> s.initSources(plotHandlesToTables, plotHandlesToTableMaps)); Arrays.stream(charts) - .flatMap(c -> Arrays.stream(c.getMultiSeries())) - .forEach(s -> s.initSources(plotHandlesToTableMaps)); + .flatMap(c -> Arrays.stream(c.getMultiSeries())) + .forEach(s -> s.initSources(plotHandlesToTableMaps)); return null; }).then(ignore -> { @@ -162,7 +161,7 @@ public Promise refetch() { return Promise.resolve(this); }, err -> { final FigureFetchError fetchError = new FigureFetchError(ofObject(err), - this.descriptor != null ? this.descriptor.getErrorsList() : new JsArray<>()); + this.descriptor != null ? this.descriptor.getErrorsList() : new JsArray<>()); final CustomEventInit init = CustomEventInit.create(); init.setDetail(fetchError); unsuppressEvents(); @@ -224,13 +223,13 @@ public void subscribe() { public void subscribe(@JsOptional DownsampleOptions forceDisableDownsample) { // iterate all series, mark all as subscribed, will enqueue a check automatically Arrays.stream(charts).flatMap(c -> Arrays.stream(c.getSeries())) - .forEach(s -> s.subscribe(forceDisableDownsample)); + .forEach(s -> s.subscribe(forceDisableDownsample)); } public void unsubscribe() { // iterate all series, mark all as unsubscribed Arrays.stream(charts).flatMap(c -> Arrays.stream(c.getSeries())) - .forEach(JsSeries::markUnsubscribed); + .forEach(JsSeries::markUnsubscribed); // clear all subscriptions, no need to do a real check activeFigureSubscriptions.keySet().forEach(FigureSubscription::unsubscribe); @@ -240,74 +239,60 @@ public void unsubscribe() { @JsIgnore public void downsampleNeeded(String message, Set series, double tableSize) { CustomEventInit failInit = CustomEventInit.create(); - failInit - .setDetail(JsPropertyMap.of("series", series, "message", message, "size", tableSize)); + failInit.setDetail(JsPropertyMap.of("series", series, "message", message, "size", tableSize)); fireEvent(EVENT_DOWNSAMPLENEEDED, failInit); } @JsIgnore public void downsampleFailed(String message, Set series, double tableSize) { CustomEventInit failInit = CustomEventInit.create(); - failInit - .setDetail(JsPropertyMap.of("series", series, "message", message, "size", tableSize)); + failInit.setDetail(JsPropertyMap.of("series", series, "message", message, "size", tableSize)); fireEvent(EVENT_DOWNSAMPLEFAILED, failInit); } private void updateSubscriptions() { - // mark that we're performing the subscription check, any future changes will need to - // re-enqueue this step + // mark that we're performing the subscription check, any future changes will need to re-enqueue this step subCheckEnqueued = false; - // Collect the subscriptions that we will need for the current series and their - // configurations - final Map> downsampleMappings = - Arrays.stream(charts) + // Collect the subscriptions that we will need for the current series and their configurations + final Map> downsampleMappings = Arrays.stream(charts) .flatMap(c -> Arrays.stream(c.getSeries())) .filter(JsSeries::isSubscribed) .filter(series -> series.getOneClick() == null - || (series.getOneClick().allRequiredValuesSet() - && series.getOneClick().getTable() != null)) + || (series.getOneClick().allRequiredValuesSet() && series.getOneClick().getTable() != null)) .collect( - Collectors.groupingBy( - this::tableForSeries, Collectors.groupingBy( - this::groupByAxisRange, - Collectors.reducing(DownsampleParams.EMPTY, this::makeParamsForSeries, - DownsampleParams::merge)))); - - final Set newSubscriptions = - downsampleMappings.entrySet().stream().flatMap(outerEntry -> { - JsTable table = outerEntry.getKey(); - Map mapping = outerEntry.getValue(); - return mapping.entrySet().stream().map(innerEntry -> { - AxisRange range = innerEntry.getKey(); - DownsampleParams params = innerEntry.getValue(); - return new FigureSubscription(this, table, range, range == null ? null : params, + this::tableForSeries, + Collectors.groupingBy( + this::groupByAxisRange, + Collectors.reducing(DownsampleParams.EMPTY, this::makeParamsForSeries, + DownsampleParams::merge)))); + + final Set newSubscriptions = downsampleMappings.entrySet().stream().flatMap(outerEntry -> { + JsTable table = outerEntry.getKey(); + Map mapping = outerEntry.getValue(); + return mapping.entrySet().stream().map(innerEntry -> { + AxisRange range = innerEntry.getKey(); + DownsampleParams params = innerEntry.getValue(); + return new FigureSubscription(this, table, range, range == null ? null : params, new HashSet<>(Arrays.asList(params.series))); - }); - }).collect(Collectors.toSet()); + }); + }).collect(Collectors.toSet()); - // Given those subscriptions, check our existing subscriptions to determine which new - // subscriptions + // Given those subscriptions, check our existing subscriptions to determine which new subscriptions // need to be created, and which existing ones are no longer needed. - // Note that when we compare these, we only check the original table and the mutations - // applied to that table - // (filters, downsample), we don't include the series instances themselves, as there is no - // need to re-subscribe - // just because a series is now being drawn which shares the same data as other visible - // series. - - // Both unsubscribing and creating a subscription will delegate to the FigureSubscription - // class to let it + // Note that when we compare these, we only check the original table and the mutations applied to that table + // (filters, downsample), we don't include the series instances themselves, as there is no need to re-subscribe + // just because a series is now being drawn which shares the same data as other visible series. + + // Both unsubscribing and creating a subscription will delegate to the FigureSubscription class to let it // get things started. final Set unseen = new HashSet<>(activeFigureSubscriptions.values()); for (final FigureSubscription newSubscription : newSubscriptions) { if (activeFigureSubscriptions.containsKey(newSubscription)) { // already present, update series (if needed), and let it fire events - activeFigureSubscriptions.get(newSubscription) - .replaceSeries(newSubscription.getSeries()); - JsLog.info("Saw same subscription again", - activeFigureSubscriptions.get(newSubscription)); + activeFigureSubscriptions.get(newSubscription).replaceSeries(newSubscription.getSeries()); + JsLog.info("Saw same subscription again", activeFigureSubscriptions.get(newSubscription)); // mark as seen unseen.remove(newSubscription); @@ -334,8 +319,7 @@ private JsTable tableForSeries(JsSeries s) { // otherwise grab the first table we can find // TODO loop, assert all match - return plotHandlesToTables - .get(s.getDescriptor().getDataSourcesList().getAt(0).getTableId()); + return plotHandlesToTables.get(s.getDescriptor().getDataSourcesList().getAt(0).getTableId()); } // First, break down the ranges so we can tell when they are entirely incompatible. They @@ -400,13 +384,12 @@ private AxisRange groupByAxisRange(JsSeries s) { if (!source.getColumnType().equals("io.deephaven.db.tables.utils.DBDateTime")) { continue; } - DownsampledAxisDetails downsampledAxisDetails = - downsampled.get(source.getAxis().getDescriptor()); + DownsampledAxisDetails downsampledAxisDetails = downsampled.get(source.getAxis().getDescriptor()); if (downsampledAxisDetails == null) { continue; } return new AxisRange(source.getDescriptor().getColumnName(), downsampledAxisDetails.min, - downsampledAxisDetails.max); + downsampledAxisDetails.max); } return null; } @@ -415,24 +398,24 @@ private boolean canDownsampleSeries(JsSeries series) { if (series.getShapesVisible() == Boolean.TRUE) { return false; } - // this was formerly a switch/case, but since we're referencing JS we need to use - // expressions that look like non-constants to java + // this was formerly a switch/case, but since we're referencing JS we need to use expressions that look like + // non-constants to java int plotStyle = series.getPlotStyle(); if (plotStyle == FigureDescriptor.SeriesPlotStyle.getBAR() - || plotStyle == FigureDescriptor.SeriesPlotStyle.getSTACKED_BAR() - || plotStyle == FigureDescriptor.SeriesPlotStyle.getPIE()) { + || plotStyle == FigureDescriptor.SeriesPlotStyle.getSTACKED_BAR() + || plotStyle == FigureDescriptor.SeriesPlotStyle.getPIE()) { // category charts, can't remove categories return false; } else if (plotStyle == FigureDescriptor.SeriesPlotStyle.getSCATTER()) { // pointless without shapes visible, this ensures we aren't somehow trying to draw it return false; } else if (plotStyle == FigureDescriptor.SeriesPlotStyle.getLINE() - || plotStyle == FigureDescriptor.SeriesPlotStyle.getAREA() - || plotStyle == FigureDescriptor.SeriesPlotStyle.getSTACKED_AREA() - || plotStyle == FigureDescriptor.SeriesPlotStyle.getHISTOGRAM() - || plotStyle == FigureDescriptor.SeriesPlotStyle.getOHLC() - || plotStyle == FigureDescriptor.SeriesPlotStyle.getSTEP() - || plotStyle == FigureDescriptor.SeriesPlotStyle.getERROR_BAR()) { + || plotStyle == FigureDescriptor.SeriesPlotStyle.getAREA() + || plotStyle == FigureDescriptor.SeriesPlotStyle.getSTACKED_AREA() + || plotStyle == FigureDescriptor.SeriesPlotStyle.getHISTOGRAM() + || plotStyle == FigureDescriptor.SeriesPlotStyle.getOHLC() + || plotStyle == FigureDescriptor.SeriesPlotStyle.getSTEP() + || plotStyle == FigureDescriptor.SeriesPlotStyle.getERROR_BAR()) { // allowed, fall through (listed so we can default to not downsample) return true; } @@ -446,8 +429,7 @@ private DownsampleParams makeParamsForSeries(JsSeries s) { // ... again, loop and find x axis, this time also y cols for (int i = 0; i < s.getSources().length; i++) { SeriesDataSource source = s.getSources()[i]; - DownsampledAxisDetails downsampledAxisDetails = - downsampled.get(source.getAxis().getDescriptor()); + DownsampledAxisDetails downsampledAxisDetails = downsampled.get(source.getAxis().getDescriptor()); if (downsampledAxisDetails == null) { yCols[yCols.length] = source.getDescriptor().getColumnName(); } else { @@ -457,8 +439,7 @@ private DownsampleParams makeParamsForSeries(JsSeries s) { return new DownsampleParams(new JsSeries[] {s}, yCols, pixels); } - // Then, aggregate the series instances and find the max pixel count, all the value columns to - // use + // Then, aggregate the series instances and find the max pixel count, all the value columns to use public static class DownsampleParams { static DownsampleParams EMPTY = new DownsampleParams(new JsSeries[0], new String[0], 0); @@ -474,15 +455,15 @@ public static class DownsampleParams { public DownsampleParams merge(DownsampleParams other) { return new DownsampleParams( - Stream.of(series, other.series) - .flatMap(Arrays::stream) - .distinct() - .toArray(JsSeries[]::new), - Stream.of(yCols, other.yCols) - .flatMap(Arrays::stream) - .distinct() - .toArray(String[]::new), - Math.max(pixelCount, other.pixelCount)); + Stream.of(series, other.series) + .flatMap(Arrays::stream) + .distinct() + .toArray(JsSeries[]::new), + Stream.of(yCols, other.yCols) + .flatMap(Arrays::stream) + .distinct() + .toArray(String[]::new), + Math.max(pixelCount, other.pixelCount)); } public JsSeries[] getSeries() { @@ -503,8 +484,7 @@ public void enqueueSubscriptionCheck() { if (!subCheckEnqueued) { for (JsTable table : tables) { if (table.isClosed()) { - throw new IllegalStateException( - "Cannot subscribe, at least one table is disconnected"); + throw new IllegalStateException("Cannot subscribe, at least one table is disconnected"); } } subCheckEnqueued = true; @@ -513,24 +493,24 @@ public void enqueueSubscriptionCheck() { } /** - * Verifies that the underlying tables have the columns the series are expected. Throws an - * FigureSourceException if not found + * Verifies that the underlying tables have the columns the series are expected. Throws an FigureSourceException if + * not found */ @JsIgnore public void verifyTables() { Arrays.stream(charts) - .flatMap(c -> Arrays.stream(c.getSeries())) - .forEach(s -> { - JsTable table = tableForSeries(s); - Arrays.stream(s.getSources()) - .forEach(source -> { - try { - table.findColumn(source.getDescriptor().getColumnName()); - } catch (NoSuchElementException e) { - throw new FigureSourceException(table, source, e.toString()); - } - }); - }); + .flatMap(c -> Arrays.stream(c.getSeries())) + .forEach(s -> { + JsTable table = tableForSeries(s); + Arrays.stream(s.getSources()) + .forEach(source -> { + try { + table.findColumn(source.getDescriptor().getColumnName()); + } catch (NoSuchElementException e) { + throw new FigureSourceException(table, source, e.toString()); + } + }); + }); } public void close() { @@ -567,8 +547,7 @@ public void updateDownsampleRange(AxisDescriptor axis, Integer pixels, Long min, if (pixels == null) { downsampled.remove(axis); } else { - if (axis.getLog() || axis.getType() != AxisDescriptor.AxisType.getX() - || axis.getInvert()) { + if (axis.getLog() || axis.getType() != AxisDescriptor.AxisType.getX() || axis.getInvert()) { return; } downsampled.put(axis, new DownsampledAxisDetails(pixels, min, max)); @@ -623,17 +602,17 @@ public static class FigureTableFetchData { private FigureClose onClose; public FigureTableFetchData( - JsTable[] tables, - TableMap[] tableMaps, - Map plotHandlesToTableMaps) { + JsTable[] tables, + TableMap[] tableMaps, + Map plotHandlesToTableMaps) { this(tables, tableMaps, plotHandlesToTableMaps, null); } public FigureTableFetchData( - JsTable[] tables, - TableMap[] tableMaps, - Map plotHandlesToTableMaps, - FigureClose onClose) { + JsTable[] tables, + TableMap[] tableMaps, + Map plotHandlesToTableMaps, + FigureClose onClose) { this.tables = tables; this.tableMaps = tableMaps; this.plotHandlesToTableMaps = plotHandlesToTableMaps; @@ -656,10 +635,8 @@ public Promise fetch(JsFigure figure, FigureDescriptor descriptor) { JsTable[] tables; // TODO (deephaven-core#62) implement fetch for tablemaps - // // iterate through the tablemaps we're supposed to have, fetch keys for them, and - // construct them - TableMap[] tableMaps = new TableMap[0];// new - // TableMap[descriptor.getTableMapIdsList().length]; + // // iterate through the tablemaps we're supposed to have, fetch keys for them, and construct them + TableMap[] tableMaps = new TableMap[0];// new TableMap[descriptor.getTableMapIdsList().length]; // Promise[] tableMapPromises = new Promise[descriptor.getTablemapsList().length]; Map plotHandlesToTableMaps = new HashMap<>(); // for (int i = 0; i < descriptor.getTablemapsList().length; i++) { @@ -675,8 +652,7 @@ public Promise fetch(JsFigure figure, FigureDescriptor descriptor) { // decl.setHandle(descriptor.getTablemapsList().getAt(index)); // TableMap tableMap = new TableMap(connection, c -> c.onSuccess(decl)); // - // // never attempt a reconnect, we'll get a new tablemap with the figure when it - // reconnects + // // never attempt a reconnect, we'll get a new tablemap with the figure when it reconnects // tableMap.addEventListener(TableMap.EVENT_DISCONNECT, ignore -> tableMap.close()); // // JsArray plotIds = descriptor.getTablemapidsList().getAt(index).getIdsList(); @@ -688,13 +664,12 @@ public Promise fetch(JsFigure figure, FigureDescriptor descriptor) { // }); // } - // iterate through the table handles we're supposed to have and prep TableHandles for - // them + // iterate through the table handles we're supposed to have and prep TableHandles for them tables = new JsTable[descriptor.getTablesList().length]; for (int i = 0; i < descriptor.getTablesList().length; i++) { - ClientTableState clientTableState = connection.newStateFromUnsolicitedTable( - descriptor.getTablesList().getAt(i), "table " + i + " for plot"); + ClientTableState clientTableState = connection + .newStateFromUnsolicitedTable(descriptor.getTablesList().getAt(i), "table " + i + " for plot"); JsTable table = new JsTable(connection, clientTableState); // never attempt a reconnect, since we might have a different figure schema entirely table.addEventListener(JsTable.EVENT_DISCONNECT, ignore -> table.close()); @@ -704,8 +679,8 @@ public Promise fetch(JsFigure figure, FigureDescriptor descriptor) { connection.registerFigure(figure); return Promise.resolve( - new FigureTableFetchData(tables, tableMaps, plotHandlesToTableMaps, - f -> this.connection.releaseFigure(f))); + new FigureTableFetchData(tables, tableMaps, plotHandlesToTableMaps, + f -> this.connection.releaseFigure(f))); } } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureFactory.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureFactory.java index 5ba64442f6c..b1fce7f7300 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureFactory.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsFigureFactory.java @@ -34,74 +34,69 @@ private static Promise create(JsFigureDescriptor descriptor) { JsArray tables = descriptor.getTables(); if (tables == null || tables.length == 0) { - return (Promise) (Promise) Promise - .reject("No tables provided for Figure creation"); + return (Promise) (Promise) Promise.reject("No tables provided for Figure creation"); } FigureDescriptor figureDescriptor = convertJsFigureDescriptor(descriptor); FetchFigureResponse response = new FetchFigureResponse(); response.setFigureDescriptor(figureDescriptor); return JsPromise.all(tables.map((table, index, all) -> table.copy(false))) - .then(tableCopies -> new JsFigure( - c -> c.apply(null, response), - (figure, descriptor1) -> { - // We need to listen for disconnects and reconnects - boolean[] isTableDisconnected = new boolean[tableCopies.length]; - ArrayList removerFns = new ArrayList<>(tableCopies.length * 3); - - for (int i = 0; i < tableCopies.length; i++) { - final int tableIndex = i; - // Tables are closed when the figure is closed, no need to remove listeners - // later - removerFns.add( - tableCopies[i].addEventListener(JsTable.EVENT_DISCONNECT, ignore -> { - isTableDisconnected[tableIndex] = true; - for (int j = 0; j < isTableDisconnected.length; j++) { - if (isTableDisconnected[j] && j != tableIndex) { - return; + .then(tableCopies -> new JsFigure( + c -> c.apply(null, response), + (figure, descriptor1) -> { + // We need to listen for disconnects and reconnects + boolean[] isTableDisconnected = new boolean[tableCopies.length]; + ArrayList removerFns = new ArrayList<>(tableCopies.length * 3); + + for (int i = 0; i < tableCopies.length; i++) { + final int tableIndex = i; + // Tables are closed when the figure is closed, no need to remove listeners later + removerFns.add(tableCopies[i].addEventListener(JsTable.EVENT_DISCONNECT, ignore -> { + isTableDisconnected[tableIndex] = true; + for (int j = 0; j < isTableDisconnected.length; j++) { + if (isTableDisconnected[j] && j != tableIndex) { + return; + } } - } - - figure.fireEvent(JsFigure.EVENT_DISCONNECT); - figure.unsubscribe(); - })); - removerFns.add( - tableCopies[i].addEventListener(JsTable.EVENT_RECONNECT, ignore -> { - isTableDisconnected[tableIndex] = false; - for (int j = 0; j < isTableDisconnected.length; j++) { - if (isTableDisconnected[j]) { - return; + + figure.fireEvent(JsFigure.EVENT_DISCONNECT); + figure.unsubscribe(); + })); + removerFns.add(tableCopies[i].addEventListener(JsTable.EVENT_RECONNECT, ignore -> { + isTableDisconnected[tableIndex] = false; + for (int j = 0; j < isTableDisconnected.length; j++) { + if (isTableDisconnected[j]) { + return; + } + } + + try { + figure.verifyTables(); + figure.fireEvent(JsFigure.EVENT_RECONNECT); + figure.enqueueSubscriptionCheck(); + } catch (JsFigure.FigureSourceException e) { + final CustomEventInit init = CustomEventInit.create(); + init.setDetail(e); + figure.fireEvent(JsFigure.EVENT_RECONNECTFAILED, init); + } + })); + removerFns.add(tableCopies[i].addEventListener(JsTable.EVENT_RECONNECTFAILED, err -> { + for (RemoverFn removerFn : removerFns) { + removerFn.remove(); } - } + figure.unsubscribe(); - try { - figure.verifyTables(); - figure.fireEvent(JsFigure.EVENT_RECONNECT); - figure.enqueueSubscriptionCheck(); - } catch (JsFigure.FigureSourceException e) { final CustomEventInit init = CustomEventInit.create(); - init.setDetail(e); + init.setDetail(err); figure.fireEvent(JsFigure.EVENT_RECONNECTFAILED, init); - } - })); - removerFns.add( - tableCopies[i].addEventListener(JsTable.EVENT_RECONNECTFAILED, err -> { - for (RemoverFn removerFn : removerFns) { - removerFn.remove(); - } - figure.unsubscribe(); - - final CustomEventInit init = CustomEventInit.create(); - init.setDetail(err); - figure.fireEvent(JsFigure.EVENT_RECONNECTFAILED, init); - })); - } - - return Promise.resolve(new JsFigure.FigureTableFetchData( - tableCopies, - new TableMap[0], - Collections.emptyMap())); - }).refetch()); + })); + } + + return Promise.resolve(new JsFigure.FigureTableFetchData( + tableCopies, + new TableMap[0], + Collections.emptyMap())); + }).refetch()); } private static FigureDescriptor convertJsFigureDescriptor(JsFigureDescriptor jsDescriptor) { @@ -114,8 +109,7 @@ private static FigureDescriptor convertJsFigureDescriptor(JsFigureDescriptor jsD descriptor.setRows(jsDescriptor.rows); JsArray tables = jsDescriptor.getTables(); - // The only thing used by the Figure with the tableIds (outside of the default fetchTables - // function) is the + // The only thing used by the Figure with the tableIds (outside of the default fetchTables function) is the // length of these tableIds. descriptor.setTablesList(new JsArray<>()); descriptor.getTablesList().length = tables.length; @@ -130,8 +124,7 @@ private static FigureDescriptor convertJsFigureDescriptor(JsFigureDescriptor jsD return descriptor; } - private static ChartDescriptor convertJsChartDescriptor(JsChartDescriptor jsDescriptor, - JsArray tables) { + private static ChartDescriptor convertJsChartDescriptor(JsChartDescriptor jsDescriptor, JsArray tables) { ChartDescriptor descriptor = new ChartDescriptor(); descriptor.setColspan(jsDescriptor.colspan); @@ -193,8 +186,7 @@ private static AxisDescriptor convertJsAxisDescriptor(JsAxisDescriptor jsDescrip descriptor.setMajorTicksVisible(jsDescriptor.majorTicksVisible); descriptor.setMinorTickCount(jsDescriptor.minorTickCount); descriptor.setGapBetweenMajorTicks(jsDescriptor.gapBetweenMajorTicks); - descriptor.setMajorTickLocationsList( - Js.>uncheckedCast(jsDescriptor.majorTickLocations)); + descriptor.setMajorTickLocationsList(Js.>uncheckedCast(jsDescriptor.majorTickLocations)); descriptor.setTickLabelAngle(jsDescriptor.tickLabelAngle); descriptor.setInvert(jsDescriptor.invert); descriptor.setIsTimeAxis(jsDescriptor.isTimeAxis); @@ -202,8 +194,8 @@ private static AxisDescriptor convertJsAxisDescriptor(JsAxisDescriptor jsDescrip return descriptor; } - private static SeriesDescriptor convertJsSeriesDescriptor(JsSeriesDescriptor jsDescriptor, - JsArray tables, Map axisMap) { + private static SeriesDescriptor convertJsSeriesDescriptor(JsSeriesDescriptor jsDescriptor, JsArray tables, + Map axisMap) { SeriesDescriptor descriptor = new SeriesDescriptor(); descriptor.setPlotStyle(Js.coerceToInt(jsDescriptor.plotStyle)); @@ -214,8 +206,7 @@ private static SeriesDescriptor convertJsSeriesDescriptor(JsSeriesDescriptor jsD if (jsDescriptor.shapesVisible != null) { descriptor.setShapesVisible(jsDescriptor.shapesVisible); } - descriptor.setGradientVisible( - jsDescriptor.gradientVisible != null ? jsDescriptor.gradientVisible : false); + descriptor.setGradientVisible(jsDescriptor.gradientVisible != null ? jsDescriptor.gradientVisible : false); descriptor.setLineColor(jsDescriptor.lineColor); descriptor.setPointLabelFormat(jsDescriptor.pointLabelFormat); descriptor.setXToolTipPattern(jsDescriptor.xToolTipPattern); @@ -238,8 +229,8 @@ private static SeriesDescriptor convertJsSeriesDescriptor(JsSeriesDescriptor jsD return descriptor; } - private static SourceDescriptor convertJsSourceDescriptor(JsSourceDescriptor jsDescriptor, - JsArray tables, Map axisMap) { + private static SourceDescriptor convertJsSourceDescriptor(JsSourceDescriptor jsDescriptor, JsArray tables, + Map axisMap) { SourceDescriptor descriptor = new SourceDescriptor(); descriptor.setAxisId(axisMap.get(jsDescriptor.axis).getId()); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsMultiSeries.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsMultiSeries.java index b6a636c657f..8e2b49a9899 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsMultiSeries.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsMultiSeries.java @@ -16,8 +16,7 @@ public class JsMultiSeries { private final Map axes; private final JsChart parent; - public JsMultiSeries(MultiSeriesDescriptor descriptor, JsFigure figure, - Map axes, JsChart parent) { + public JsMultiSeries(MultiSeriesDescriptor descriptor, JsFigure figure, Map axes, JsChart parent) { this.descriptor = descriptor; this.figure = figure; @@ -27,20 +26,20 @@ public JsMultiSeries(MultiSeriesDescriptor descriptor, JsFigure figure, @JsIgnore public void initSources(Map plotHandlesToTableMaps) { - descriptor.getDataSourcesList().asList().stream() - .mapToInt(MultiSeriesSourceDescriptor::getTableMapId).distinct() - // TODO assert only one at this stage - .forEach(plotHandle -> { - TableMap tableMap = plotHandlesToTableMaps.get(plotHandle); - tableMap.getKeys().forEach((p0, p1, p2) -> { - requestTable(tableMap, p0); - return null; - }); - tableMap.addEventListener(TableMap.EVENT_KEYADDED, event -> { - requestTable(tableMap, ((CustomEvent) event).detail); - }); + descriptor.getDataSourcesList().asList().stream().mapToInt(MultiSeriesSourceDescriptor::getTableMapId) + .distinct() + // TODO assert only one at this stage + .forEach(plotHandle -> { + TableMap tableMap = plotHandlesToTableMaps.get(plotHandle); + tableMap.getKeys().forEach((p0, p1, p2) -> { + requestTable(tableMap, p0); + return null; + }); + tableMap.addEventListener(TableMap.EVENT_KEYADDED, event -> { + requestTable(tableMap, ((CustomEvent) event).detail); + }); - }); + }); } private void requestTable(TableMap tableMap, Object key) { @@ -55,37 +54,33 @@ private void requestTable(TableMap tableMap, Object key) { seriesInstance.setLineColor(getOrDefault(seriesName, descriptor.getLineColor())); seriesInstance.setShapeColor(getOrDefault(seriesName, descriptor.getPointColor())); seriesInstance.setLinesVisible(getOrDefault(seriesName, descriptor.getLinesVisible())); - seriesInstance - .setShapesVisible(getOrDefault(seriesName, descriptor.getPointsVisible())); + seriesInstance.setShapesVisible(getOrDefault(seriesName, descriptor.getPointsVisible())); Boolean gradientVisible = getOrDefault(seriesName, descriptor.getGradientVisible()); if (gradientVisible != null) { seriesInstance.setGradientVisible(gradientVisible); } - seriesInstance - .setYToolTipPattern(getOrDefault(seriesName, descriptor.getYToolTipPattern())); - seriesInstance - .setXToolTipPattern(getOrDefault(seriesName, descriptor.getXToolTipPattern())); + seriesInstance.setYToolTipPattern(getOrDefault(seriesName, descriptor.getYToolTipPattern())); + seriesInstance.setXToolTipPattern(getOrDefault(seriesName, descriptor.getXToolTipPattern())); seriesInstance.setShapeLabel(getOrDefault(seriesName, descriptor.getPointLabel())); seriesInstance.setShapeSize(getOrDefault(seriesName, descriptor.getPointSize())); seriesInstance.setShape(getOrDefault(seriesName, descriptor.getPointShape())); - seriesInstance - .setPointLabelFormat(getOrDefault(seriesName, descriptor.getPointLabelFormat())); + seriesInstance.setPointLabelFormat(getOrDefault(seriesName, descriptor.getPointLabelFormat())); int tableId = figure.registerTable(table); seriesInstance.setDataSourcesList( - descriptor.getDataSourcesList() - .map((multiSeriesSource, p1, p2) -> { - SourceDescriptor sourceDescriptor = new SourceDescriptor(); - sourceDescriptor.setColumnName(multiSeriesSource.getColumnName()); - sourceDescriptor.setAxisId(multiSeriesSource.getAxisId()); - sourceDescriptor.setTableId(tableId); - sourceDescriptor.setType(multiSeriesSource.getType()); - return sourceDescriptor; - }) + descriptor.getDataSourcesList() + .map((multiSeriesSource, p1, p2) -> { + SourceDescriptor sourceDescriptor = new SourceDescriptor(); + sourceDescriptor.setColumnName(multiSeriesSource.getColumnName()); + sourceDescriptor.setAxisId(multiSeriesSource.getAxisId()); + sourceDescriptor.setTableId(tableId); + sourceDescriptor.setType(multiSeriesSource.getType()); + return sourceDescriptor; + }) ); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeries.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeries.java index 91efdf32711..19a9c75bb9c 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeries.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeries.java @@ -37,8 +37,7 @@ public JsSeries(SeriesDescriptor series, JsFigure jsFigure, Map for (int i = 0; i < series.getDataSourcesList().length; i++) { SourceDescriptor dataSource = series.getDataSourcesList().getAt(i); - sources[sources.length] = - new SeriesDataSource(axes.get(dataSource.getAxisId()), dataSource); + sources[sources.length] = new SeriesDataSource(axes.get(dataSource.getAxisId()), dataSource); // set up oneclick if needed, make sure series make sense if (oneClick == null) { @@ -55,9 +54,9 @@ public JsSeries(SeriesDescriptor series, JsFigure jsFigure, Map } /** - * Post-construct initialization, once we have tables loaded, allowing js to get the type of the - * data that it will be consuming. This is safe to do post-construction, since we don't actually - * return the JsFigure etc until tables are loaded. + * Post-construct initialization, once we have tables loaded, allowing js to get the type of the data that it will + * be consuming. This is safe to do post-construction, since we don't actually return the JsFigure etc until tables + * are loaded. */ @JsIgnore public void initSources(Map tables, Map tableMaps) { @@ -68,8 +67,7 @@ public void initSources(Map tables, Map tab } /** - * JS doesn't support method overloads, so we just ignore this one and mark the arg as optional - * in the JS version. + * JS doesn't support method overloads, so we just ignore this one and mark the arg as optional in the JS version. */ @JsIgnore public void subscribe() { @@ -77,8 +75,7 @@ public void subscribe() { } public void subscribe(@JsOptional DownsampleOptions forceDisableDownsample) { - this.downsample = - forceDisableDownsample == null ? DownsampleOptions.DEFAULT : forceDisableDownsample; + this.downsample = forceDisableDownsample == null ? DownsampleOptions.DEFAULT : forceDisableDownsample; subscribed = true; jsFigure.enqueueSubscriptionCheck(); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeriesDescriptor.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeriesDescriptor.java index 4d48ad04acd..1203970c410 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeriesDescriptor.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/JsSeriesDescriptor.java @@ -42,8 +42,7 @@ public JsSeriesDescriptor(JsPropertyMap source, Map dataSources = JsData.getRequiredProperty(source, "dataSources").cast(); this.dataSources = Js.uncheckedCast(dataSources.map( - (sourceSource, index, - all) -> new JsSourceDescriptor((JsPropertyMap) sourceSource, axisMap))); + (sourceSource, index, all) -> new JsSourceDescriptor((JsPropertyMap) sourceSource, axisMap))); linesVisible = JsData.getNullableBooleanProperty(source, "linesVisible"); shapesVisible = JsData.getNullableBooleanProperty(source, "shapesVisible"); gradientVisible = JsData.getNullableBooleanProperty(source, "gradientVisible"); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/OneClick.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/OneClick.java index 870198956cc..520e9431a5d 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/OneClick.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/OneClick.java @@ -57,7 +57,7 @@ public Object getColumns() { JsPropertyMap[] fakeColumns = new JsPropertyMap[oneClick.getColumnsList().length]; for (int i = 0; i < fakeColumns.length; i++) { fakeColumns[i] = JsPropertyMap.of("name", oneClick.getColumnsList().getAt(i), "type", - oneClick.getColumnsList().getAt(i)); + oneClick.getColumnsList().getAt(i)); } return fakeColumns; } @@ -119,8 +119,7 @@ private Object[] getCurrentKeys() { return new Object[] {key}; } - // Some of the values aren't set, need to iterate through all the table map keys and select - // the ones that match + // Some of the values aren't set, need to iterate through all the table map keys and select the ones that match return Arrays.stream(JsArray.from(tableMap.getKeys())).filter(tableKey -> { if (!(tableKey instanceof String[])) { return false; @@ -147,20 +146,20 @@ private Promise doFetchTable(Object[] keys) { return tableMap.getTable(keys[0]); } else { Promise[] promises = - Arrays.stream(keys).map(key -> tableMap.getTable(key)).toArray(Promise[]::new); + Arrays.stream(keys).map(key -> tableMap.getTable(key)).toArray(Promise[]::new); return JsPromise.all(promises) - .then(resolved -> { - JsTable[] tables = Arrays.stream(resolved).filter(table -> table != null) - .toArray(JsTable[]::new); - if (tables.length > 1) { - return tables[0].getConnection().mergeTables(tables, tableMap); - } else if (tables.length == 1) { - return Promise.resolve(tables[0]); - } else { - // No keys matched, just hand back a null table - return Promise.resolve((JsTable) null); - } - }); + .then(resolved -> { + JsTable[] tables = + Arrays.stream(resolved).filter(table -> table != null).toArray(JsTable[]::new); + if (tables.length > 1) { + return tables[0].getConnection().mergeTables(tables, tableMap); + } else if (tables.length == 1) { + return Promise.resolve(tables[0]); + } else { + // No keys matched, just hand back a null table + return Promise.resolve((JsTable) null); + } + }); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/SeriesDataSource.java b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/SeriesDataSource.java index eeed755c31b..6c6273d93cb 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/SeriesDataSource.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/api/widget/plot/SeriesDataSource.java @@ -19,8 +19,8 @@ public SeriesDataSource(JsAxis axis, SourceDescriptor type) { public void initColumnType(Map tables) { if (sourceDescriptor.getTableId() != -1) { - columnType = tables.get(sourceDescriptor.getTableId()) - .findColumn(sourceDescriptor.getColumnName()).getType(); + columnType = + tables.get(sourceDescriptor.getTableId()).findColumn(sourceDescriptor.getColumnName()).getType(); } else if (sourceDescriptor.getTableMapId() != -1) { columnType = sourceDescriptor.getColumnType(); } else { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/fu/CancellablePromise.java b/web/client-api/src/main/java/io/deephaven/web/client/fu/CancellablePromise.java index d7711926aa5..3a6874c4ad5 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/fu/CancellablePromise.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/fu/CancellablePromise.java @@ -12,8 +12,7 @@ * * We do not extend Promise as js interop is not yet good enough to extend native types. * - * So, instead, we just hack on a "cancel" property pointing to a function that can be invoked only - * from javascript. + * So, instead, we just hack on a "cancel" property pointing to a function that can be invoked only from javascript. */ @JsType(namespace = JsPackage.GLOBAL, name = "Promise", isNative = true) public class CancellablePromise extends Promise { @@ -26,8 +25,7 @@ public CancellablePromise(PromiseExecutorCallbackFn executor) { } @JsOverlay - public static CancellablePromise from(PromiseExecutorCallbackFn exe, - JsRunnable cancel) { + public static CancellablePromise from(PromiseExecutorCallbackFn exe, JsRunnable cancel) { CancellablePromise promise = new CancellablePromise<>(exe); promise.cancel = cancel; return promise; diff --git a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsData.java b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsData.java index 46c481d1190..290b85b894a 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsData.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsData.java @@ -6,8 +6,7 @@ import jsinterop.base.JsPropertyMap; /** - * A place to collect utility methods for managing data in/out of javascript / handling column type - * information. + * A place to collect utility methods for managing data in/out of javascript / handling column type information. */ public class JsData { /** @@ -45,8 +44,7 @@ public static JsArray newArray(String type) { */ public static Any getRequiredProperty(JsPropertyMap source, String propertyName) { if (!source.has(propertyName)) { - throw new IllegalArgumentException( - "Property '" + propertyName + "' must be specified."); + throw new IllegalArgumentException("Property '" + propertyName + "' must be specified."); } Any value = source.getAny(propertyName); if (value == null) { @@ -59,8 +57,7 @@ public static Any getProperty(JsPropertyMap source, String propertyName) return getProperty(source, propertyName, null); } - public static Any getProperty(JsPropertyMap source, String propertyName, - Any defaultValue) { + public static Any getProperty(JsPropertyMap source, String propertyName, Any defaultValue) { if (source.has(propertyName)) { return source.getAny(propertyName); } @@ -68,8 +65,7 @@ public static Any getProperty(JsPropertyMap source, String propertyName, return defaultValue; } - public static String getRequiredStringProperty(JsPropertyMap source, - String propertyName) { + public static String getRequiredStringProperty(JsPropertyMap source, String propertyName) { return getRequiredProperty(source, propertyName).asString(); } @@ -77,8 +73,7 @@ public static String getStringProperty(JsPropertyMap source, String prop return getStringProperty(source, propertyName, null); } - public static String getStringProperty(JsPropertyMap source, String propertyName, - String defaultValue) { + public static String getStringProperty(JsPropertyMap source, String propertyName, String defaultValue) { Any value = getProperty(source, propertyName, Js.asAny(defaultValue)); return value == null ? null : value.asString(); } @@ -91,14 +86,12 @@ public static int getIntProperty(JsPropertyMap source, String propertyNa return getIntProperty(source, propertyName, 0); } - public static int getIntProperty(JsPropertyMap source, String propertyName, - int defaultValue) { + public static int getIntProperty(JsPropertyMap source, String propertyName, int defaultValue) { Any value = getProperty(source, propertyName, Js.asAny(defaultValue)); return value == null ? 0 : value.asInt(); } - public static double getRequiredDoubleProperty(JsPropertyMap source, - String propertyName) { + public static double getRequiredDoubleProperty(JsPropertyMap source, String propertyName) { return getRequiredProperty(source, propertyName).asDouble(); } @@ -106,25 +99,22 @@ public static double getDoubleProperty(JsPropertyMap source, String prop return getDoubleProperty(source, propertyName, 0); } - public static double getDoubleProperty(JsPropertyMap source, String propertyName, - double defaultValue) { + public static double getDoubleProperty(JsPropertyMap source, String propertyName, double defaultValue) { Any value = getProperty(source, propertyName, Js.asAny(defaultValue)); return value == null ? 0 : value.asDouble(); } - public static Double getNullableDoubleProperty(JsPropertyMap source, - String propertyName) { + public static Double getNullableDoubleProperty(JsPropertyMap source, String propertyName) { return getNullableDoubleProperty(source, propertyName, null); } - public static Double getNullableDoubleProperty(JsPropertyMap source, - String propertyName, Double defaultValue) { + public static Double getNullableDoubleProperty(JsPropertyMap source, String propertyName, + Double defaultValue) { Any value = getProperty(source, propertyName, Js.asAny(defaultValue)); return value == null ? null : value.asDouble(); } - public static boolean getRequiredBooleanProperty(JsPropertyMap source, - String propertyName) { + public static boolean getRequiredBooleanProperty(JsPropertyMap source, String propertyName) { return getRequiredProperty(source, propertyName).asBoolean(); } @@ -132,19 +122,17 @@ public static boolean getBooleanProperty(JsPropertyMap source, String pr return getBooleanProperty(source, propertyName, false); } - public static boolean getBooleanProperty(JsPropertyMap source, String propertyName, - boolean defaultValue) { + public static boolean getBooleanProperty(JsPropertyMap source, String propertyName, boolean defaultValue) { Any value = getProperty(source, propertyName, Js.asAny(defaultValue)); return value == null ? false : value.asBoolean(); } - public static Boolean getNullableBooleanProperty(JsPropertyMap source, - String propertyName) { + public static Boolean getNullableBooleanProperty(JsPropertyMap source, String propertyName) { return getNullableBooleanProperty(source, propertyName, null); } - public static Boolean getNullableBooleanProperty(JsPropertyMap source, - String propertyName, Boolean defaultValue) { + public static Boolean getNullableBooleanProperty(JsPropertyMap source, String propertyName, + Boolean defaultValue) { Any value = getProperty(source, propertyName, Js.asAny(defaultValue)); return value == null ? null : value.asBoolean(); } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsLog.java b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsLog.java index 0c35af784e6..914b9e8bbb0 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/fu/JsLog.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/fu/JsLog.java @@ -5,8 +5,7 @@ import jsinterop.annotations.JsProperty; /** - * A place where we can shuffle off things we want to log for development, but not necessarily in - * production. + * A place where we can shuffle off things we want to log for development, but not necessarily in production. * */ public class JsLog { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/fu/LazyPromise.java b/web/client-api/src/main/java/io/deephaven/web/client/fu/LazyPromise.java index 1157baff2ae..cb1c5dddf77 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/fu/LazyPromise.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/fu/LazyPromise.java @@ -15,8 +15,8 @@ /** * Tired of needing to create annoying promise lambdas? * - * If you want to create a promise early in a method's scope, and then configure it's handling of - * resolve/reject callbacks later, or in a conditional nature, than LazyPromise is for you! + * If you want to create a promise early in a method's scope, and then configure it's handling of resolve/reject + * callbacks later, or in a conditional nature, than LazyPromise is for you! * */ public class LazyPromise implements PromiseLike { @@ -42,9 +42,8 @@ public static void runLater(JsRunnable task) { * @param timeout How many millis to wait until failing the promise with a timeout. * @return a real promise that we will resolve when we are resolved. * - * This method overload is not strictly necessary to call when explicitly wiring up - * failure handling for this LazyPromise which you can guarantee will be eventually - * called. + * This method overload is not strictly necessary to call when explicitly wiring up failure handling for + * this LazyPromise which you can guarantee will be eventually called. * * To create a promise without a timeout, see {@link #asPromise()}. */ @@ -56,9 +55,9 @@ public final Promise asPromise(int timeout) { /** * @return a real promise that we will resolve when we are resolved. * - * Use this method if you are safely wiring up failure handling for this LazyPromise. If - * you aren't explicitly wiring up calls to {@link #fail(Object)} this LazyPromise, then - * you should setup a timeout (see {@link #asPromise(int)} and {@link #timeout(int)} + * Use this method if you are safely wiring up failure handling for this LazyPromise. If you aren't + * explicitly wiring up calls to {@link #fail(Object)} this LazyPromise, then you should setup a timeout + * (see {@link #asPromise(int)} and {@link #timeout(int)} */ public final Promise asPromise() { return new Promise<>(((resolve, reject) -> { @@ -93,19 +92,19 @@ private void cancel() { public final CancellablePromise asPromise(JsFunction mapper, JsRunnable cancel) { return CancellablePromise.from( - (resolve, reject) -> { - onSuccess(result -> { - final V mapped; - try { - mapped = mapper.apply(result); - } catch (Exception e) { - reject.onInvoke(e.getMessage()); - return; - } - resolve.onInvoke(mapped); - }); - onFailure(reject::onInvoke); - }, cancel.andThen(this::cancel)); + (resolve, reject) -> { + onSuccess(result -> { + final V mapped; + try { + mapped = mapper.apply(result); + } catch (Exception e) { + reject.onInvoke(e.getMessage()); + return; + } + resolve.onInvoke(mapped); + }); + onFailure(reject::onInvoke); + }, cancel.andThen(this::cancel)); } protected boolean spyReject(RejectCallbackFn reject) { @@ -226,13 +225,13 @@ public void onFailure(JsConsumer failure) { /** * Create a deferred promise from a known value. * - * Rather than resolve immediately, this forces asynchronicity, to give the calling code time to - * unwind its stack before running. + * Rather than resolve immediately, this forces asynchronicity, to give the calling code time to unwind its stack + * before running. */ public static Promise promiseLater(T table) { // runs on next microtask return Promise.resolve((Object) null) - .then(ignored -> Promise.resolve(table)); + .then(ignored -> Promise.resolve(table)); } public static IThenable reject(Object failure) { @@ -253,9 +252,8 @@ public LazyPromise timeout(int wait) { /** * Eats exceptions in exchange for messages logged to the console. * - * Only use this when it is preferable to print a nice error message instead of leaving uncaught - * promises to bubble (confusing) stack traces into console, especially if it is reasonable for - * a given promise to fail. + * Only use this when it is preferable to print a nice error message instead of leaving uncaught promises to bubble + * (confusing) stack traces into console, especially if it is reasonable for a given promise to fail. * * This prevents "uncaught exceptions" when we have tests that expect failure. */ diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ActiveTableBinding.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ActiveTableBinding.java index b5abf09bd62..3638d98578f 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ActiveTableBinding.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ActiveTableBinding.java @@ -7,21 +7,20 @@ import io.deephaven.web.shared.data.Viewport; /** - * An active binding describes the link between a {@link JsTable} and the {@link ClientTableState} - * it is currently using. + * An active binding describes the link between a {@link JsTable} and the {@link ClientTableState} it is currently + * using. * - * Each JsTable can only have one ActiveTableBinding; in order to get a new one, you must "lose" - * your current one (note the private constructor). + * Each JsTable can only have one ActiveTableBinding; in order to get a new one, you must "lose" your current one (note + * the private constructor). * * This allows us to control transitions to/from an active state. * - * Currently, the new state is created and a new binding built off of it, then the old binding is - * paused. + * Currently, the new state is created and a new binding built off of it, then the old binding is paused. * * Equality semantics of this object are based solely on the JsTable object identity. * - * Instances of these objects start life in the constructor of {@link JsTable}. From there, the only - * way to get a new one is to tell the old one to change state. + * Instances of these objects start life in the constructor of {@link JsTable}. From there, the only way to get a new + * one is to tell the old one to change state. * */ public class ActiveTableBinding implements HasTableState { @@ -41,8 +40,8 @@ public class ActiveTableBinding implements HasTableState { private final ClientTableState state; /** - * This instance's one and only "paused form". We tightly control instances of these, so you can - * put them into a JsMap / IdentityHashMap. + * This instance's one and only "paused form". We tightly control instances of these, so you can put them into a + * JsMap / IdentityHashMap. */ private final PausedTableBinding paused; @@ -51,11 +50,11 @@ public class ActiveTableBinding implements HasTableState { /** * Our source state in a "paused form". * - * Note that this is "paused with respect to a given table"; a given {@link ClientTableState} - * can have many concurrent Paused|ActiveTableBindings. + * Note that this is "paused with respect to a given table"; a given {@link ClientTableState} can have many + * concurrent Paused|ActiveTableBindings. * - * Note that all paused|active connected bindings should share the same table. When a new table - * copy is made, the new table must copy all bindings to the new table. + * Note that all paused|active connected bindings should share the same table. When a new table copy is made, the + * new table must copy all bindings to the new table. */ private PausedTableBinding rollback; private Viewport viewport; @@ -64,8 +63,8 @@ public class ActiveTableBinding implements HasTableState { private boolean subscriptionPending; private ActiveTableBinding( - JsTable table, - ClientTableState state) { + JsTable table, + ClientTableState state) { this.table = table; this.state = state; paused = new PausedTableBinding(this); @@ -75,11 +74,9 @@ public ActiveTableBinding changeState(ClientTableState newState) { HasTableState existing = newState.getBinding(table); if (newState == this.state) { if (existing.isActive()) { - assert this == existing - : "Multiple bindings found for " + newState + " and " + table; + assert this == existing : "Multiple bindings found for " + newState + " and " + table; } else { - assert paused == existing - : "Multiple bindings found for " + newState + " and " + table; + assert paused == existing : "Multiple bindings found for " + newState + " and " + table; newState.unpause(table); } return this; @@ -141,7 +138,7 @@ public boolean equals(Object o) { // so we wind up using the object itself in map keys, // ensure we pin lots of memory even if client code would have GC'ed. :'( return getClass() == o.getClass() && - (table == ((ActiveTableBinding) o).table); + (table == ((ActiveTableBinding) o).table); } @Override @@ -150,16 +147,13 @@ public int hashCode() { } public void setRollback(PausedTableBinding rollback) { - assert rollback == null - || rollback.getState().getResolution() == ClientTableState.ResolutionState.RUNNING - : "Can't use binding as rollback if it is in state " - + rollback.getState().getResolution(); + assert rollback == null || rollback.getState().getResolution() == ClientTableState.ResolutionState.RUNNING + : "Can't use binding as rollback if it is in state " + rollback.getState().getResolution(); this.rollback = rollback; } public static ActiveTableBinding create(JsTable table, ClientTableState state) { - assert state.getActiveBinding(table) == null - : "Cannot create binding for table more than once"; + assert state.getActiveBinding(table) == null : "Cannot create binding for table more than once"; final ActiveTableBinding sub = new ActiveTableBinding(table, state); if (!state.isRunning()) { state.onFailed(e -> { @@ -188,8 +182,7 @@ public void rollback() { return; } assert rollback != null; - assert table.getBinding() == this - : "You should only perform a rollback from the current active state"; + assert table.getBinding() == this : "You should only perform a rollback from the current active state"; switch (rollback.getState().getResolution()) { case RUNNING: table.setState(rollback.getState()); @@ -205,8 +198,7 @@ public void rollback() { table.setState(rollback.getState()); break; default: - throw new UnsupportedOperationException( - "Cannot rollback to a " + rollback.getState() + " state"); + throw new UnsupportedOperationException("Cannot rollback to a " + rollback.getState() + " state"); } } diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java index 826e6544a89..4cde7b76381 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/ClientTableState.java @@ -35,56 +35,51 @@ /** * Container for state information pertaining to a given {@link TableHandle}. * - * Where JsTable is a mutable object which can point to any given ClientTableState, each - * ClientTableState represents an immutable table configuration / handle which can have zero or more - * JsTable objects bound to it. + * Where JsTable is a mutable object which can point to any given ClientTableState, each ClientTableState represents an + * immutable table configuration / handle which can have zero or more JsTable objects bound to it. * - * This type is used to replace Table#StackEntry, and works with TableList to form an arbitrary - * "chain of table mutations leading to table state at handle N". + * This type is used to replace Table#StackEntry, and works with TableList to form an arbitrary "chain of table + * mutations leading to table state at handle N". * - * Each JsTable maintains their own TableList / linking structure of ClientTableState instances, and - * each CTS holds maps of "the state each bound JsTable holds -for this particular handle-". + * Each JsTable maintains their own TableList / linking structure of ClientTableState instances, and each CTS holds maps + * of "the state each bound JsTable holds -for this particular handle-". * - * Being mutable, a JsTable can change its binding to any given CTS instance, and will need to - * temporarily abandon it's current state when transitioning to a new one. We need to be able to - * reinstate the last good active state whenever a request fails, otherwise we will releaseTable - * from the given state. + * Being mutable, a JsTable can change its binding to any given CTS instance, and will need to temporarily abandon it's + * current state when transitioning to a new one. We need to be able to reinstate the last good active state whenever a + * request fails, otherwise we will releaseTable from the given state. * - * Once no JsTable is making any use of any ClientTableState, that state should be released on the - * server and discarded by the client (an interim state which an active state is based on must - * always be retained). + * Once no JsTable is making any use of any ClientTableState, that state should be released on the server and discarded + * by the client (an interim state which an active state is based on must always be retained). * - * By making the JsTable read it's state from a ClientTableState, switching to another state should - * be relatively seamless; JsTable is mutable and as stateless as possible, with as much state as - * possible shoved into CTS (so it can go away and be restored sanely). + * By making the JsTable read it's state from a ClientTableState, switching to another state should be relatively + * seamless; JsTable is mutable and as stateless as possible, with as much state as possible shoved into CTS (so it can + * go away and be restored sanely). * * Consider making this a js type with restricted, read-only property access. */ public final class ClientTableState extends TableConfig { public enum ResolutionState { /** - * Table has been created on the client, but client does not yet have a handle ID referring - * to the table on the server + * Table has been created on the client, but client does not yet have a handle ID referring to the table on the + * server */ UNRESOLVED, /** - * Table exists on both client and server, but isn't yet ready to be used - no definition - * has been received. + * Table exists on both client and server, but isn't yet ready to be used - no definition has been received. */ RESOLVED, /** - * Table exists on both the client and the server, and is able to be used. This is - * sort-of-terminal, as it may change during a reconnect. + * Table exists on both the client and the server, and is able to be used. This is sort-of-terminal, as it may + * change during a reconnect. */ RUNNING, /** - * The table no longer exists on the server, and will not be recreated. This is a terminal - * state. + * The table no longer exists on the server, and will not be recreated. This is a terminal state. */ RELEASED, /** - * The table failed to be created on the server and so cannot be used. This is - * sort-of-terminal, as it may change during a reconnect. + * The table failed to be created on the server and so cannot be used. This is sort-of-terminal, as it may + * change during a reconnect. */ FAILED } @@ -143,9 +138,9 @@ public enum ResolutionState { * * Any state can have N other states after it, but only one state before it. * - * We maintain this linked list so we can reconstitute a valid TableList from any given table - * state (TableList maintains it's own linking structure on top of states, as each Table will - * use a private TableList to track it's own state stack). + * We maintain this linked list so we can reconstitute a valid TableList from any given table state (TableList + * maintains it's own linking structure on top of states, as each Table will use a private TableList to track it's + * own state stack). * */ private final ClientTableState source; @@ -154,27 +149,26 @@ public enum ResolutionState { private boolean stayDead; public ClientTableState( - WorkerConnection connection, - TableTicket handle, - JsTableFetch fetcher, - String fetchSummary) { - this(connection, handle, Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), null, fetcher, fetchSummary); + WorkerConnection connection, + TableTicket handle, + JsTableFetch fetcher, + String fetchSummary) { + this(connection, handle, Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), + Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), null, fetcher, fetchSummary); } private ClientTableState( - WorkerConnection connection, - TableTicket handle, - List sort, - List conditions, - List filter, - List customColumns, - List dropColumns, - List viewColumns, - ClientTableState source, - JsTableFetch fetch, - String fetchSummary) { + WorkerConnection connection, + TableTicket handle, + List sort, + List conditions, + List filter, + List customColumns, + List dropColumns, + List viewColumns, + ClientTableState source, + JsTableFetch fetch, + String fetchSummary) { super(sort, conditions, filter, customColumns, dropColumns, viewColumns); this.touch = System.currentTimeMillis(); this.fetch = fetch; @@ -182,8 +176,7 @@ private ClientTableState( this.handle = handle; this.size = SIZE_UNINITIALIZED; // make sure a "change" to 0 fires an event. columnLookup = resetLookup(); - this.resolution = - handle.isResolved() ? ResolutionState.RESOLVED : ResolutionState.UNRESOLVED; + this.resolution = handle.isResolved() ? ResolutionState.RESOLVED : ResolutionState.UNRESOLVED; this.source = source; this.fetchSummary = fetchSummary; @@ -250,21 +243,17 @@ public ClientTableState newState(TableTicket newHandle, TableConfig config) { final List viewColumns = config.getViewColumns(); final ClientTableState newState = new ClientTableState( - connection, newHandle, sorts, conditions, filters, customColumns, dropColumns, - viewColumns, this, - (c, s, metadata) -> { - // This fetcher will not be used for the initial fetch, only for refetches. - // Importantly, any CTS with a source (what we are creating here; source=this, - // above) - // is revived, it does not use the refetcher; we directly rebuild batch operations - // instead. - // It may make sense to actually have batches route through reviver instead. - connection.getReviver().revive(metadata, s); - }, config.toSummaryString()); + connection, newHandle, sorts, conditions, filters, customColumns, dropColumns, viewColumns, this, + (c, s, metadata) -> { + // This fetcher will not be used for the initial fetch, only for refetches. + // Importantly, any CTS with a source (what we are creating here; source=this, above) + // is revived, it does not use the refetcher; we directly rebuild batch operations instead. + // It may make sense to actually have batches route through reviver instead. + connection.getReviver().revive(metadata, s); + }, config.toSummaryString()); newState.setFlat(config.isFlat()); if (!isRunning()) { - onFailed(reason -> newState.setResolution(ResolutionState.FAILED, reason), - JsRunnable.doNothing()); + onFailed(reason -> newState.setResolution(ResolutionState.FAILED, reason), JsRunnable.doNothing()); } touch(); return newState; @@ -285,8 +274,7 @@ public void setResolution(ResolutionState resolution) { public void setResolution(ResolutionState resolution, String failMsg) { if (this.resolution == ResolutionState.RELEASED) { - assert resolution == ResolutionState.RELEASED - : "Trying to unrelease CTS " + this + " to " + resolution; + assert resolution == ResolutionState.RELEASED : "Trying to unrelease CTS " + this + " to " + resolution; return; } this.resolution = resolution; @@ -319,7 +307,7 @@ public void setResolution(ResolutionState resolution, String failMsg) { public List getCustomColumnsString() { return getCustomColumns().stream().map(CustomColumnDescriptor::getExpression) - .collect(Collectors.toList()); + .collect(Collectors.toList()); } public Column[] getColumns() { @@ -378,7 +366,7 @@ private void createColumns() { // iterate through the columns, combine format columns into the normal model Map byNameMap = Arrays.stream(columnDefinitions) - .collect(columnCollector(false)); + .collect(columnCollector(false)); Column[] columns = new Column[0]; allColumns = new Column[0]; Map columnDescriptions = new HashMap<>(); @@ -404,12 +392,12 @@ private void createColumns() { // note the use of columns.length as jsIndex is accurate for visible columns allColumns[allColumns.length] = makeColumn(columns.length, - definition, - format == null || !format.isNumberFormatColumn() ? null : format.getColumnIndex(), - style == null ? null : style.getColumnIndex(), - isPartitionColumn, - format == null || format.isNumberFormatColumn() ? null : format.getColumnIndex(), - columnDescriptions.get(name)); + definition, + format == null || !format.isNumberFormatColumn() ? null : format.getColumnIndex(), + style == null ? null : style.getColumnIndex(), + isPartitionColumn, + format == null || format.isNumberFormatColumn() ? null : format.getColumnIndex(), + columnDescriptions.get(name)); if (definition.isVisible()) { columns[columns.length] = allColumns[allColumns.length - 1]; @@ -420,18 +408,16 @@ private void createColumns() { this.columnLookup = resetLookup(); } - private static Column makeColumn(int jsIndex, ColumnDefinition definition, - Integer numberFormatIndex, Integer styleIndex, boolean isPartitionColumn, - Integer formatStringIndex, String description) { - return new Column(jsIndex, definition.getColumnIndex(), numberFormatIndex, styleIndex, - definition.getType(), definition.getName(), isPartitionColumn, formatStringIndex, - description); + private static Column makeColumn(int jsIndex, ColumnDefinition definition, Integer numberFormatIndex, + Integer styleIndex, boolean isPartitionColumn, Integer formatStringIndex, String description) { + return new Column(jsIndex, definition.getColumnIndex(), numberFormatIndex, styleIndex, definition.getType(), + definition.getName(), isPartitionColumn, formatStringIndex, description); } private static Collector> columnCollector( - boolean ordered) { + boolean ordered) { return Collectors.toMap(ColumnDefinition::getName, Function.identity(), assertNoDupes(), - ordered ? LinkedHashMap::new : HashMap::new); + ordered ? LinkedHashMap::new : HashMap::new); } private static BinaryOperator assertNoDupes() { @@ -465,8 +451,7 @@ public void onRunning(JsConsumer callback, JsRunnable other) { onRunning(callback, other.asConsumer(), other); } - public void onRunning(JsConsumer callback, JsConsumer failed, - JsRunnable release) { + public void onRunning(JsConsumer callback, JsConsumer failed, JsRunnable release) { if (resolution == ResolutionState.RELEASED) { release.run(); return; @@ -475,14 +460,12 @@ public void onRunning(JsConsumer callback, JsConsumer return; } if (resolution == ResolutionState.RUNNING) { - // wait a moment and execute the callback, after verifying that we remained in that - // state + // wait a moment and execute the callback, after verifying that we remained in that state LazyPromise.runLater(() -> { if (resolution == ResolutionState.RUNNING) { callback.apply(this); } else { - // Resubmit the callback? Probably not, this means we momentarily were in the - // correct state, but + // Resubmit the callback? Probably not, this means we momentarily were in the correct state, but // if we return to it, things could get weird // onRunning(callback); } @@ -515,8 +498,7 @@ public void onFailed(JsConsumer callback, JsRunnable other) { if (resolution == ResolutionState.FAILED) { callback.apply(failMsg); } else { - // Resubmit the callback? Probably not, this means we momentarily were in the - // correct state, but + // Resubmit the callback? Probably not, this means we momentarily were in the correct state, but // if we return to it, things could get weird // onFailed(callback); } @@ -540,17 +522,15 @@ public void onFailed(JsConsumer callback, JsRunnable other) { } /** - * Checks if the current state can be used as a root to turn into the state described by the - * supplied arguments. + * Checks if the current state can be used as a root to turn into the state described by the supplied arguments. * - * While it is technically valid to interleave filters and sorts, for maximal performance, we - * want to ensure filters are always applied before sorts; so, if you have added a filter, but - * the current state has any sorts at all, we will return false, even though this state would be - * correct when used, it won't be maximally efficient. + * While it is technically valid to interleave filters and sorts, for maximal performance, we want to ensure filters + * are always applied before sorts; so, if you have added a filter, but the current state has any sorts at all, we + * will return false, even though this state would be correct when used, it won't be maximally efficient. * - * Additionally, custom columns might be used in filters and sorts, but a filter or sort on a - * custom column cannot be created until that custom column has been exported and provides a - * table definition so that we can supply Column instances to operate upon. + * Additionally, custom columns might be used in filters and sorts, but a filter or sort on a custom column cannot + * be created until that custom column has been exported and provides a table definition so that we can supply + * Column instances to operate upon. * * @param sorts The full set of sorts we want in our resulting table state * @param filters The full set of filters we want in our resulting table state @@ -559,7 +539,7 @@ public void onFailed(JsConsumer callback, JsRunnable other) { * @return true if this state can be used to add */ public boolean isCompatible(List sorts, List filters, - List customColumns, boolean flat) { + List customColumns, boolean flat) { // start with the easy wins... if (isEmptyConfig()) { // an empty root state is compatible to everyone. @@ -597,8 +577,7 @@ public boolean isCompatible(List sorts, List filters, if (!filters.containsAll(getFilters())) { return false; } - // custom columns must be an exact prefix of the requested columns (we already checked size - // above) + // custom columns must be an exact prefix of the requested columns (we already checked size above) if (!startsWith(customColumns, getCustomColumns())) { return false; } @@ -608,8 +587,7 @@ public boolean isCompatible(List sorts, List filters, // this is a performance optimization; we are better off to throw away intermediate // tables than to perform filters after any sorts. if (getSorts().size() > 0 && getFilters().size() != filters.size()) { - // note, if we have sorts applied and filter lengths are the same, but with different - // contents, + // note, if we have sorts applied and filter lengths are the same, but with different contents, // we will have already failed the isSuperSet check above return false; } @@ -638,8 +616,7 @@ private boolean startsWith(List candidate, List prefix) { public Column findColumn(String key) { Column c = columnLookup.get().get(key); if (c == null) { - // we could consider making this forgiving, but it's better if clients don't send us - // garbage requests. + // we could consider making this forgiving, but it's better if clients don't send us garbage requests. throw new NoSuchElementException(key); } return c; @@ -648,21 +625,19 @@ public Column findColumn(String key) { /** * @return true if there are no tables bound to this state. * - * If a table that had a subscription for this state was orphaned by a pending request, - * we want to clear the subscription immediately so it becomes inert (immediately remove - * the subscription), but we may need to rollback the request, and we don't want to - * release the handle until the pending request is finished (whereupon we will remove - * the binding). + * If a table that had a subscription for this state was orphaned by a pending request, we want to clear the + * subscription immediately so it becomes inert (immediately remove the subscription), but we may need to + * rollback the request, and we don't want to release the handle until the pending request is finished + * (whereupon we will remove the binding). */ public boolean isEmpty() { return active.size == 0 && paused.size == 0 && retainers.size == 0; } /** - * Call this with a marker object that you want to use to cause the state to be retained. You - * either need to hold onto the returned function to clear your binding, or call - * {@link #unretain(Object)} when finished to avoid keeping this handle alive beyond its - * usefulness. + * Call this with a marker object that you want to use to cause the state to be retained. You either need to hold + * onto the returned function to clear your binding, or call {@link #unretain(Object)} when finished to avoid + * keeping this handle alive beyond its usefulness. */ public JsRunnable retain(Object retainer) { retainers.add(retainer); @@ -681,8 +656,7 @@ public boolean isActiveEmpty() { } public boolean hasNoSubscriptions() { - return JsItr.iterate(active.values()) - .allMatch(binding -> binding.getSubscription() == null); + return JsItr.iterate(active.values()).allMatch(binding -> binding.getSubscription() == null); } public boolean hasSort(Sort candidate) { @@ -699,7 +673,7 @@ public boolean hasFilter(FilterCondition candidate) { public boolean isFinished() { return resolution == ResolutionState.RUNNING || resolution == ResolutionState.FAILED - || resolution == ResolutionState.RELEASED; + || resolution == ResolutionState.RELEASED; } public boolean isDisconnected() { @@ -737,16 +711,14 @@ public boolean releaseTable(JsTable table) { public void setDesiredViewport(JsTable table, long firstRow, long lastRow, Column[] columns) { touch(); final ActiveTableBinding sub = active.get(table); - assert sub != null - : "You cannot set the desired viewport on a non-active state + table combination"; + assert sub != null : "You cannot set the desired viewport on a non-active state + table combination"; final RangeSet rows = sub.setDesiredViewport(firstRow, lastRow, columns); - // let event loop eat multiple viewport sets and only apply the last one (winner of who gets - // spot in map) + // let event loop eat multiple viewport sets and only apply the last one (winner of who gets spot in map) LazyPromise.runLater(() -> { if (sub.getRows() == rows) { // winner! now, on to the next hurdle... ensuring we have columns. - // TODO: have an onColumnsReady callback, for cases when we know we're only waiting - // on non-column-modifying operations + // TODO: have an onColumnsReady callback, for cases when we know we're only waiting on + // non-column-modifying operations onRunning(self -> { if (sub.getRows() == rows) { // winner again! @@ -800,8 +772,7 @@ public BitSet makeBitset(Column[] columns) { public MappedIterable getBoundTables() { - assert iterate(active.keys()).noneMatch(paused::has) - : "State cannot be active and paused at the same time; " + assert iterate(active.keys()).noneMatch(paused::has) : "State cannot be active and paused at the same time; " + "active: " + active + " paused: " + paused; return iterate(active.keys()).plus(iterate(paused.keys())); } @@ -810,7 +781,7 @@ public void forActiveSubscriptions(JsBiConsumer callback) { JsItr.forEach(active, (table, binding) -> { if (binding.getSubscription() != null) { assert binding.getTable() == table - : "Corrupt binding between " + table + " and " + binding + " in " + active; + : "Corrupt binding between " + table + " and " + binding + " in " + active; callback.apply((JsTable) table, binding.getSubscription()); } }); @@ -861,32 +832,32 @@ public boolean isSubscribed() { @Override public String toString() { return "ClientTableState{" + - "handle=" + handle + - ", resolution=" + resolution + - ", active=" + active + - ", paused=" + paused + - ", size=" + size + - ", tableDef=" + tableDef + - ", rowFormatColumn=" + rowFormatColumn + - ", failMsg='" + failMsg + '\'' + - ", sorts=" + getSorts() + - ", filters=" + getFilters() + - ", customColumns=" + getCustomColumns() + - ", selectDistinct=" + getSelectDistinct() + - "} "; + "handle=" + handle + + ", resolution=" + resolution + + ", active=" + active + + ", paused=" + paused + + ", size=" + size + + ", tableDef=" + tableDef + + ", rowFormatColumn=" + rowFormatColumn + + ", failMsg='" + failMsg + '\'' + + ", sorts=" + getSorts() + + ", filters=" + getFilters() + + ", customColumns=" + getCustomColumns() + + ", selectDistinct=" + getSelectDistinct() + + "} "; } public String toStringMinimal() { return "ClientTableState{" + - "handle=" + handle + - ", resolution=" + resolution + - ", active=" + active.size + - ", paused=" + paused.size + - ", retainers=" + retainers.size + - ", size=" + size + - ", tableDef=" + tableDef + - ", rowFormatColumn=" + rowFormatColumn + - "} "; + "handle=" + handle + + ", resolution=" + resolution + + ", active=" + active.size + + ", paused=" + paused.size + + ", retainers=" + retainers.size + + ", size=" + size + + ", tableDef=" + tableDef + + ", rowFormatColumn=" + rowFormatColumn + + "} "; } public ClientTableState getPrevious() { @@ -904,8 +875,8 @@ public HasTableState getBinding(JsTable table) { public ActiveTableBinding getActiveBinding(JsTable table) { final HasTableState existing = getBinding(table); return existing == null ? null - : existing.isActive() ? (ActiveTableBinding) existing - : ((PausedTableBinding) existing).getActiveBinding(); + : existing.isActive() ? (ActiveTableBinding) existing + : ((PausedTableBinding) existing).getActiveBinding(); } public Iterable getActiveBindings() { @@ -951,7 +922,7 @@ public MappedIterable reversed() { */ public void cleanup() { assert JsItr.iterate(active.keys()).allMatch(t -> !t.isAlive() || t.state() == this) - : "active map not up-to-date with tables"; + : "active map not up-to-date with tables"; if (getResolution() == ResolutionState.RELEASED) { for (PausedTableBinding sub : JsItr.iterate(paused.values())) { releaseTable(sub.getActiveBinding().getTable()); @@ -960,8 +931,7 @@ public void cleanup() { // notify any retainers who have events that we've been released. for (Object retainer : JsItr.iterate(retainers.values())) { if (retainer instanceof HasEventHandling) { - ((HasEventHandling) retainer) - .fireEventWithDetail(HasEventHandling.INTERNAL_EVENT_RELEASED, this); + ((HasEventHandling) retainer).fireEventWithDetail(HasEventHandling.INTERNAL_EVENT_RELEASED, this); } } @@ -974,8 +944,8 @@ public void cleanup() { if (table.isClosed()) { releaseTable(table); } else if (!table.hasHandle(getHandle()) && !table.hasRollbackHandle(getHandle())) { - // release the table if it no longer has a reference to the handle with any of it's - // bindings, including paused bindings + // release the table if it no longer has a reference to the handle with any of it's bindings, including + // paused bindings // don't releaseTable for tables that are in flux if (table.state().isRunning()) { releaseTable(table); @@ -999,21 +969,19 @@ public static Comparator newestFirst() { } public Promise fetchTable(HasEventHandling failHandler, BrowserHeaders metadata) { - return refetch(failHandler, metadata) - .then(cts -> Promise.resolve(new JsTable(connection, cts))); + return refetch(failHandler, metadata).then(cts -> Promise.resolve(new JsTable(connection, cts))); } - public Promise refetch(HasEventHandling failHandler, - BrowserHeaders metadata) { + public Promise refetch(HasEventHandling failHandler, BrowserHeaders metadata) { final Promise promise = - Callbacks.grpcUnaryPromise(c -> fetch.fetch(c, this, metadata)); + Callbacks.grpcUnaryPromise(c -> fetch.fetch(c, this, metadata)); // noinspection unchecked return promise.then(def -> { if (resolution == ResolutionState.RELEASED) { // was released before we managed to finish the fetch, ignore // noinspection rawtypes,unchecked return (Promise) Promise.reject( - "Table already released, cannot process incoming table definition, this can be safely ignored."); + "Table already released, cannot process incoming table definition, this can be safely ignored."); } applyTableCreationResponse(def); return Promise.resolve(this); @@ -1050,7 +1018,7 @@ public void applyTableCreationResponse(ExportedTableCreationResponse def) { cols[i].setType(fieldMetadata.get("deephaven:type")); cols[i].setStyleColumn("true".equals(fieldMetadata.get("deephaven:isStyle"))); cols[i].setFormatColumn("true".equals(fieldMetadata.get("deephaven:isDateFormat")) - || "true".equals(fieldMetadata.get("deephaven:isNumberFormat"))); + || "true".equals(fieldMetadata.get("deephaven:isNumberFormat"))); cols[i].setForRow("true".equals(fieldMetadata.get("deephaven:isRowStyle"))); String formatColumnName = fieldMetadata.get("deephaven:dateFormatColumn"); @@ -1067,11 +1035,11 @@ public void applyTableCreationResponse(ExportedTableCreationResponse def) { attributes.setKeys(new String[0]); attributes.setRemainingKeys(new String[0]); setTableDef(new InitialTableDefinition() - .setAttributes(attributes) - .setColumns(cols) - .setFlat(false) - .setId(null) - .setSize(Long.parseLong(def.getSize()))); + .setAttributes(attributes) + .setColumns(cols) + .setFlat(false) + .setId(null) + .setSize(Long.parseLong(def.getSize()))); setResolution(ResolutionState.RUNNING); setSize(Long.parseLong(def.getSize())); diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/HasTableBinding.java b/web/client-api/src/main/java/io/deephaven/web/client/state/HasTableBinding.java index 0a64023acfd..ad1483b3411 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/HasTableBinding.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/HasTableBinding.java @@ -4,9 +4,8 @@ import io.deephaven.web.client.api.TableTicket; /** - * In order to not-require a JsTable to be bound to a {@link ClientTableState}, we will use this - * interface, which exposes the parts of JsTable that we require in managing handles and their - * lifecycles. + * In order to not-require a JsTable to be bound to a {@link ClientTableState}, we will use this interface, which + * exposes the parts of JsTable that we require in managing handles and their lifecycles. * */ public interface HasTableBinding { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/PausedTableBinding.java b/web/client-api/src/main/java/io/deephaven/web/client/state/PausedTableBinding.java index 486ef19a6de..dc42f4eea56 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/PausedTableBinding.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/PausedTableBinding.java @@ -5,10 +5,9 @@ /** * Represents a previously active binding that we're still keeping around for a bit. * - * We will mark/sweep all paused bindings at once to decide when to release an item. Since we give - * requests a crazy ten minute timeout (seriously, if we actually need that long, we should have a - * long-running-request option that sends back a "work token" where we will notify you of success - * later. + * We will mark/sweep all paused bindings at once to decide when to release an item. Since we give requests a crazy ten + * minute timeout (seriously, if we actually need that long, we should have a long-running-request option that sends + * back a "work token" where we will notify you of success later. */ public class PausedTableBinding implements HasTableState { diff --git a/web/client-api/src/main/java/io/deephaven/web/client/state/TableReviver.java b/web/client-api/src/main/java/io/deephaven/web/client/state/TableReviver.java index 67992285910..582796b68e1 100644 --- a/web/client-api/src/main/java/io/deephaven/web/client/state/TableReviver.java +++ b/web/client-api/src/main/java/io/deephaven/web/client/state/TableReviver.java @@ -23,9 +23,8 @@ /** * Instances of this class are responsible for bringing CTS back to life. * - * The {@link RequestBatcher} class has been refactored to take an interface, - * {@link HasTableBinding}, which the TableReviver implements, so that it can assemble "rebuild this - * state" requests. + * The {@link RequestBatcher} class has been refactored to take an interface, {@link HasTableBinding}, which the + * TableReviver implements, so that it can assemble "rebuild this state" requests. * */ public class TableReviver implements HasTableBinding { @@ -45,8 +44,8 @@ public void revive(BrowserHeaders metadata, ClientTableState... states) { enqueued = new IdentityHashSet<>(); LazyPromise.runLater(() -> { final ClientTableState[] toRevive = enqueued.stream() - .filter(ClientTableState::shouldResuscitate) - .toArray(ClientTableState[]::new); + .filter(ClientTableState::shouldResuscitate) + .toArray(ClientTableState[]::new); enqueued = null; doRevive(toRevive, metadata); }); @@ -74,18 +73,17 @@ private void doRevive(ClientTableState[] states, BrowserHeaders metadata) { for (ClientTableState state : reviveFirst) { JsLog.debug("Attempting revive on ", state); state.maybeRevive(metadata).then( - success -> { - state.forActiveLifecycles(t -> t.revive(state)); - return null; - }, failure -> { - state.forActiveLifecycles(t -> t.die(failure)); - return null; - }); + success -> { + state.forActiveLifecycles(t -> t.revive(state)); + return null; + }, failure -> { + state.forActiveLifecycles(t -> t.die(failure)); + return null; + }); } if (!reviveLast.isEmpty()) { - // Instead of using RequestBatcher, we should just be rebuilding the SerializedTableOps - // directly. + // Instead of using RequestBatcher, we should just be rebuilding the SerializedTableOps directly. int cnt = 0, page = 6; BatchBuilder builder = new BatchBuilder(); Map all = new LinkedHashMap<>(); @@ -118,8 +116,8 @@ private void sendRequest(BatchBuilder requester, Map stream = ResponseStreamWrapper - .of(connection.tableServiceClient().batch(req, connection.metadata())); + ResponseStreamWrapper stream = + ResponseStreamWrapper.of(connection.tableServiceClient().batch(req, connection.metadata())); stream.onData(response -> { TableReference resultid = response.getResultId(); if (!resultid.hasTicket()) { @@ -193,19 +191,19 @@ public void setState(ClientTableState appendTo) { @Override public void rollback() { assert false : "Revivification requests should not be sent through the RequestBatcher " + - "(who is, currently, the only caller of rollback())"; + "(who is, currently, the only caller of rollback())"; } @Override public void setRollback(ActiveTableBinding rollbackTo) { assert false : "Revivification requests should not be sent through the RequestBatcher " + - "(who is, currently, the only caller of setRollback())"; + "(who is, currently, the only caller of setRollback())"; } @Override public void maybeReviveSubscription() { // should never be called assert false : "Revivification requests should not be sent through the RequestBatcher " + - "(who is, currently, the only caller of maybeReviveSubscription())"; + "(who is, currently, the only caller of maybeReviveSubscription())"; } } diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReaderTest.java b/web/client-api/src/test/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReaderTest.java index 2d6d87b6ee2..df72c8631a1 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReaderTest.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/barrage/CompressedRangeSetReaderTest.java @@ -52,8 +52,7 @@ public void testSimpleRange() { assertRoundTrip(RangeSet.ofRange(10, 19)); - assertRoundTrip( - RangeSet.ofRange((long) Integer.MAX_VALUE + 10, (long) Integer.MAX_VALUE + 19)); + assertRoundTrip(RangeSet.ofRange((long) Integer.MAX_VALUE + 10, (long) Integer.MAX_VALUE + 19)); } @Test diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/filter/FilterConditionTestGwt.java b/web/client-api/src/test/java/io/deephaven/web/client/api/filter/FilterConditionTestGwt.java index 18e65eea0b7..d128dd10aef 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/filter/FilterConditionTestGwt.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/filter/FilterConditionTestGwt.java @@ -4,8 +4,8 @@ import io.deephaven.web.client.api.Column; /** - * Tests basic construction of filter condition instances from simple tables. This does not fully - * end-to-end test the filter, just the API around the simple AST we use, especially validation. + * Tests basic construction of filter condition instances from simple tables. This does not fully end-to-end test the + * filter, just the API around the simple AST we use, especially validation. */ public class FilterConditionTestGwt extends GWTTestCase { @@ -25,44 +25,38 @@ private FilterValue[] arr(FilterValue filterValue) { public void testCreateSimpleFilters() { Column c = getColumn(); - assertEquals("ColumnName == (ignore case) 1", - c.filter().eqIgnoreCase(FilterValue.ofNumber(1)).toString()); - assertEquals("ColumnName != (ignore case) 1", - c.filter().notEqIgnoreCase(FilterValue.ofNumber(1)).toString()); + assertEquals("ColumnName == (ignore case) 1", c.filter().eqIgnoreCase(FilterValue.ofNumber(1)).toString()); + assertEquals("ColumnName != (ignore case) 1", c.filter().notEqIgnoreCase(FilterValue.ofNumber(1)).toString()); assertEquals("ColumnName == 1", c.filter().eq(FilterValue.ofNumber(1)).toString()); assertEquals("ColumnName != 1", c.filter().notEq(FilterValue.ofNumber(1)).toString()); assertEquals("ColumnName > 1", c.filter().greaterThan(FilterValue.ofNumber(1)).toString()); assertEquals("ColumnName < 1", c.filter().lessThan(FilterValue.ofNumber(1)).toString()); - assertEquals("ColumnName >= 1", - c.filter().greaterThanOrEqualTo(FilterValue.ofNumber(1)).toString()); - assertEquals("ColumnName <= 1", - c.filter().lessThanOrEqualTo(FilterValue.ofNumber(1)).toString()); + assertEquals("ColumnName >= 1", c.filter().greaterThanOrEqualTo(FilterValue.ofNumber(1)).toString()); + assertEquals("ColumnName <= 1", c.filter().lessThanOrEqualTo(FilterValue.ofNumber(1)).toString()); assertEquals("ColumnName in 1", c.filter().in(arr(FilterValue.ofNumber(1))).toString()); - assertEquals("ColumnName not in 1", - c.filter().notIn(arr(FilterValue.ofNumber(1))).toString()); - assertEquals("ColumnName icase in 1", - c.filter().inIgnoreCase(arr(FilterValue.ofNumber(1))).toString()); - assertEquals("ColumnName icase not in 1", - c.filter().notInIgnoreCase(arr(FilterValue.ofNumber(1))).toString()); + assertEquals("ColumnName not in 1", c.filter().notIn(arr(FilterValue.ofNumber(1))).toString()); + assertEquals("ColumnName icase in 1", c.filter().inIgnoreCase(arr(FilterValue.ofNumber(1))).toString()); + assertEquals("ColumnName icase not in 1", c.filter().notInIgnoreCase(arr(FilterValue.ofNumber(1))).toString()); assertEquals("ColumnName == true", c.filter().isTrue().toString()); assertEquals("ColumnName == false", c.filter().isFalse().toString()); assertEquals("isNull(ColumnName)", c.filter().isNull().toString()); assertEquals("ColumnName.foo1()", c.filter().invoke("foo1").toString()); - assertEquals("ColumnName.foo2(1)", - c.filter().invoke("foo2", FilterValue.ofNumber(1)).toString()); + assertEquals("ColumnName.foo2(1)", c.filter().invoke("foo2", FilterValue.ofNumber(1)).toString()); assertEquals("ColumnName.foo3(1, 2, \"three\")", - c.filter().invoke("foo3", FilterValue.ofNumber(1), FilterValue.ofNumber(2), - FilterValue.ofString("three")).toString()); + c.filter() + .invoke("foo3", FilterValue.ofNumber(1), FilterValue.ofNumber(2), FilterValue.ofString("three")) + .toString()); assertEquals("foo4()", FilterCondition.invoke("foo4").toString()); assertEquals("foo5(1)", FilterCondition.invoke("foo5", FilterValue.ofNumber(1)).toString()); assertEquals("foo6(1, 2, \"three\")", - FilterCondition.invoke("foo6", FilterValue.ofNumber(1), FilterValue.ofNumber(2), - FilterValue.ofString("three")).toString()); + FilterCondition + .invoke("foo6", FilterValue.ofNumber(1), FilterValue.ofNumber(2), FilterValue.ofString("three")) + .toString()); } public void testCreateCombinedFilters() { @@ -70,27 +64,24 @@ public void testCreateCombinedFilters() { // individual AND assertEquals("(ColumnName == 1 && ColumnName != 2)", - c.filter().eq(FilterValue.ofNumber(1)).and(c.filter().notEq(FilterValue.ofNumber(2))) - .toString()); + c.filter().eq(FilterValue.ofNumber(1)).and(c.filter().notEq(FilterValue.ofNumber(2))).toString()); // individual OR assertEquals("(ColumnName == 1 || ColumnName != 2)", - c.filter().eq(FilterValue.ofNumber(1)).or(c.filter().notEq(FilterValue.ofNumber(2))) - .toString()); + c.filter().eq(FilterValue.ofNumber(1)).or(c.filter().notEq(FilterValue.ofNumber(2))).toString()); // individual NOT assertEquals("!(ColumnName == 1)", c.filter().eq(FilterValue.ofNumber(1)).not().toString()); // nested/combined - assertEquals( - "(ColumnName == 1 && !((ColumnName == 2 || ColumnName == 3 || ColumnName == 4)))", - c.filter().eq(FilterValue.ofNumber(1)).and( - c.filter().eq(FilterValue.ofNumber(2)) - .or( - c.filter().eq(FilterValue.ofNumber(3)), - c.filter().eq(FilterValue.ofNumber(4))) - .not()) - .toString() + assertEquals("(ColumnName == 1 && !((ColumnName == 2 || ColumnName == 3 || ColumnName == 4)))", + c.filter().eq(FilterValue.ofNumber(1)).and( + c.filter().eq(FilterValue.ofNumber(2)) + .or( + c.filter().eq(FilterValue.ofNumber(3)), + c.filter().eq(FilterValue.ofNumber(4))) + .not()) + .toString() ); } diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/i18n/JsDateTimeFormatTestGwt.java b/web/client-api/src/test/java/io/deephaven/web/client/api/i18n/JsDateTimeFormatTestGwt.java index 3c05db84539..4818af99285 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/i18n/JsDateTimeFormatTestGwt.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/i18n/JsDateTimeFormatTestGwt.java @@ -44,12 +44,10 @@ public void testGetFormat() { nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSS zzzz", "2018-04-26T12:34:56.123 " + tz); assertEquals(123_000_000, nanos % 1_000_000_000); // SSSSSS - nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSS zzzz", - "2018-04-26T12:34:56.123456 " + tz); + nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSS zzzz", "2018-04-26T12:34:56.123456 " + tz); assertEquals(123_456_000, nanos % 1_000_000_000); // SSSSSSSSS - nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", - "2018-04-26T12:34:56.123456789 " + tz); + nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", "2018-04-26T12:34:56.123456789 " + tz); assertEquals(123_456_789, nanos % 1_000_000_000); // test with leading zeros @@ -59,34 +57,26 @@ public void testGetFormat() { nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSS zzzz", "2018-04-26T12:34:56.001 " + tz); assertEquals(1_000_000, nanos % 1_000_000_000); - nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSS zzzz", - "2018-04-26T12:34:56.001000 " + tz); + nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSS zzzz", "2018-04-26T12:34:56.001000 " + tz); assertEquals(1_000_000, nanos % 1_000_000_000); - nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSS zzzz", - "2018-04-26T12:34:56.001001 " + tz); + nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSS zzzz", "2018-04-26T12:34:56.001001 " + tz); assertEquals(1_001_000, nanos % 1_000_000_000); - nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSS zzzz", - "2018-04-26T12:34:56.100000 " + tz); + nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSS zzzz", "2018-04-26T12:34:56.100000 " + tz); assertEquals(100_000_000, nanos % 1_000_000_000); - nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", - "2018-04-26T12:34:56.000000000 " + tz); + nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", "2018-04-26T12:34:56.000000000 " + tz); assertEquals(0, nanos % 1_000_000_000); - nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", - "2018-04-26T12:34:56.000000001 " + tz); + nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", "2018-04-26T12:34:56.000000001 " + tz); assertEquals(1, nanos % 1_000_000_000); - nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", - "2018-04-26T12:34:56.000000010 " + tz); + nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", "2018-04-26T12:34:56.000000010 " + tz); assertEquals(10, nanos % 1_000_000_000); - nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", - "2018-04-26T12:34:56.000001234 " + tz); + nanos = assertRoundTrip("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS zzzz", "2018-04-26T12:34:56.000001234 " + tz); assertEquals(1234, nanos % 1_000_000_000); } /** - * Helper which takes a string, parses the string, and then formats the long again. The input is - * checked to match the output of the final format call, and the intermediate value is returned - * for any subsequent checks. + * Helper which takes a string, parses the string, and then formats the long again. The input is checked to match + * the output of the final format call, and the intermediate value is returned for any subsequent checks. */ private long assertRoundTrip(String formatString, String input) { JsDateTimeFormat format = JsDateTimeFormat.getFormat(formatString); diff --git a/web/client-api/src/test/java/io/deephaven/web/client/api/i18n/JsNumberFormatTestGwt.java b/web/client-api/src/test/java/io/deephaven/web/client/api/i18n/JsNumberFormatTestGwt.java index 2132502a82e..ba0af7bcaee 100644 --- a/web/client-api/src/test/java/io/deephaven/web/client/api/i18n/JsNumberFormatTestGwt.java +++ b/web/client-api/src/test/java/io/deephaven/web/client/api/i18n/JsNumberFormatTestGwt.java @@ -4,8 +4,8 @@ import io.deephaven.web.client.api.LongWrapper; /** - * Since the JsNumberFormat test class is so thin, this mostly serves to confirm that a few - * behaviors are the same in the JS client as they are in the swing console. + * Since the JsNumberFormat test class is so thin, this mostly serves to confirm that a few behaviors are the same in + * the JS client as they are in the swing console. */ public class JsNumberFormatTestGwt extends GWTTestCase { @@ -53,23 +53,15 @@ public void testLongFormat() { assertEquals("-1", format.format(LongWrapper.of(-1))); assertEquals("1", format.format(LongWrapper.of(1))); - assertEquals("9,999,000,000,000,000", - format.format(LongWrapper.of(9_999_000_000_000_000L))); - assertEquals("9,999,000,000,000,001", - format.format(LongWrapper.of(9_999_000_000_000_001L))); - assertEquals("9,999,000,000,000,002", - format.format(LongWrapper.of(9_999_000_000_000_002L))); - assertEquals("9,999,000,000,000,003", - format.format(LongWrapper.of(9_999_000_000_000_003L))); + assertEquals("9,999,000,000,000,000", format.format(LongWrapper.of(9_999_000_000_000_000L))); + assertEquals("9,999,000,000,000,001", format.format(LongWrapper.of(9_999_000_000_000_001L))); + assertEquals("9,999,000,000,000,002", format.format(LongWrapper.of(9_999_000_000_000_002L))); + assertEquals("9,999,000,000,000,003", format.format(LongWrapper.of(9_999_000_000_000_003L))); - assertEquals("-9,999,000,000,000,000", - format.format(LongWrapper.of(-9_999_000_000_000_000L))); - assertEquals("-9,999,000,000,000,001", - format.format(LongWrapper.of(-9_999_000_000_000_001L))); - assertEquals("-9,999,000,000,000,002", - format.format(LongWrapper.of(-9_999_000_000_000_002L))); - assertEquals("-9,999,000,000,000,003", - format.format(LongWrapper.of(-9_999_000_000_000_003L))); + assertEquals("-9,999,000,000,000,000", format.format(LongWrapper.of(-9_999_000_000_000_000L))); + assertEquals("-9,999,000,000,000,001", format.format(LongWrapper.of(-9_999_000_000_000_001L))); + assertEquals("-9,999,000,000,000,002", format.format(LongWrapper.of(-9_999_000_000_000_002L))); + assertEquals("-9,999,000,000,000,003", format.format(LongWrapper.of(-9_999_000_000_000_003L))); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/Flatbuf.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/Flatbuf.java index 5540a8331d1..29434039033 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/Flatbuf.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/Flatbuf.java @@ -10,9 +10,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf", + namespace = JsPackage.GLOBAL) public class Flatbuf { @JsFunction public interface UnionListToMessageHeaderAccessorFn { @@ -94,26 +94,25 @@ default boolean isSchema() { @JsOverlay default Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType onInvoke( - double p0, DictionaryBatch p1) { + double p0, DictionaryBatch p1) { return onInvoke( - p0, Js.uncheckedCast(p1)); + p0, Js.uncheckedCast(p1)); } Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType onInvoke( - double p0, Flatbuf.UnionListToMessageHeaderAccessorFn.P1UnionType p1); + double p0, Flatbuf.UnionListToMessageHeaderAccessorFn.P1UnionType p1); @JsOverlay default Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType onInvoke( - double p0, RecordBatch p1) { + double p0, RecordBatch p1) { return onInvoke( - p0, Js.uncheckedCast(p1)); + p0, Js.uncheckedCast(p1)); } @JsOverlay - default Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType onInvoke(double p0, - Schema p1) { + default Flatbuf.UnionListToMessageHeaderAccessorFn.UnionType onInvoke(double p0, Schema p1) { return onInvoke( - p0, Js.uncheckedCast(p1)); + p0, Js.uncheckedCast(p1)); } } @@ -235,23 +234,20 @@ default boolean isSchema() { @JsOverlay default Flatbuf.UnionToMessageHeaderAccessorFn.UnionType onInvoke(DictionaryBatch p0) { - return onInvoke( - Js.uncheckedCast(p0)); + return onInvoke(Js.uncheckedCast(p0)); } Flatbuf.UnionToMessageHeaderAccessorFn.UnionType onInvoke( - Flatbuf.UnionToMessageHeaderAccessorFn.P0UnionType p0); + Flatbuf.UnionToMessageHeaderAccessorFn.P0UnionType p0); @JsOverlay default Flatbuf.UnionToMessageHeaderAccessorFn.UnionType onInvoke(RecordBatch p0) { - return onInvoke( - Js.uncheckedCast(p0)); + return onInvoke(Js.uncheckedCast(p0)); } @JsOverlay default Flatbuf.UnionToMessageHeaderAccessorFn.UnionType onInvoke(Schema p0) { - return onInvoke( - Js.uncheckedCast(p0)); + return onInvoke(Js.uncheckedCast(p0)); } } @@ -294,8 +290,8 @@ default boolean isSchema() { } public static native Flatbuf.UnionListToMessageHeaderUnionType unionListToMessageHeader( - int type, Flatbuf.UnionListToMessageHeaderAccessorFn accessor, double index); + int type, Flatbuf.UnionListToMessageHeaderAccessorFn accessor, double index); public static native Flatbuf.UnionToMessageHeaderUnionType unionToMessageHeader( - int type, Flatbuf.UnionToMessageHeaderAccessorFn accessor); + int type, Flatbuf.UnionToMessageHeaderAccessorFn accessor); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompression.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompression.java index d82137bfa1f..1589e2f9435 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompression.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompression.java @@ -6,26 +6,25 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.BodyCompression", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.BodyCompression", + namespace = JsPackage.GLOBAL) public class BodyCompression { public static native void addCodec(Builder builder, int codec); public static native void addMethod(Builder builder, int method); public static native double createBodyCompression( - Builder builder, int codec, int method); + Builder builder, int codec, int method); public static native double endBodyCompression(Builder builder); - public static native BodyCompression getRootAsBodyCompression(ByteBuffer bb, - BodyCompression obj); + public static native BodyCompression getRootAsBodyCompression(ByteBuffer bb, BodyCompression obj); public static native BodyCompression getRootAsBodyCompression(ByteBuffer bb); public static native BodyCompression getSizePrefixedRootAsBodyCompression( - ByteBuffer bb, BodyCompression obj); + ByteBuffer bb, BodyCompression obj); public static native BodyCompression getSizePrefixedRootAsBodyCompression(ByteBuffer bb); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompressionMethod.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompressionMethod.java index 5bb914c8e8d..e5b1fa98c14 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompressionMethod.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/BodyCompressionMethod.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.BodyCompressionMethod", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.BodyCompressionMethod", + namespace = JsPackage.GLOBAL) public class BodyCompressionMethod { public static int BUFFER; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/CompressionType.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/CompressionType.java index 930b185a999..9b5bc864e12 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/CompressionType.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/CompressionType.java @@ -4,10 +4,10 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.CompressionType", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.CompressionType", + namespace = JsPackage.GLOBAL) public class CompressionType { public static int LZ4_FRAME, - ZSTD; + ZSTD; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/DictionaryBatch.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/DictionaryBatch.java index 597bad2acff..418ecdc559a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/DictionaryBatch.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/DictionaryBatch.java @@ -7,9 +7,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.DictionaryBatch", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.DictionaryBatch", + namespace = JsPackage.GLOBAL) public class DictionaryBatch { public static native void addData(Builder builder, double dataOffset); @@ -19,13 +19,12 @@ public class DictionaryBatch { public static native double endDictionaryBatch(Builder builder); - public static native DictionaryBatch getRootAsDictionaryBatch(ByteBuffer bb, - DictionaryBatch obj); + public static native DictionaryBatch getRootAsDictionaryBatch(ByteBuffer bb, DictionaryBatch obj); public static native DictionaryBatch getRootAsDictionaryBatch(ByteBuffer bb); public static native DictionaryBatch getSizePrefixedRootAsDictionaryBatch( - ByteBuffer bb, DictionaryBatch obj); + ByteBuffer bb, DictionaryBatch obj); public static native DictionaryBatch getSizePrefixedRootAsDictionaryBatch(ByteBuffer bb); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/FieldNode.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/FieldNode.java index 6b89f950b6f..05642159ef8 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/FieldNode.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/FieldNode.java @@ -7,9 +7,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.FieldNode", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.FieldNode", + namespace = JsPackage.GLOBAL) public class FieldNode { public static native double createFieldNode(Builder builder, Long length, Long null_count); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/Message.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/Message.java index eff03ad7c52..a8879f9001c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/Message.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/Message.java @@ -11,9 +11,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.Message", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.Message", + namespace = JsPackage.GLOBAL) public class Message { public static native void addBodyLength(Builder builder, Long bodyLength); @@ -33,12 +33,12 @@ public static final double createCustomMetadataVector(Builder builder, double[] } public static native double createMessage( - Builder builder, - int version, - int headerType, - double headerOffset, - Long bodyLength, - double customMetadataOffset); + Builder builder, + int version, + int headerType, + double headerOffset, + Long bodyLength, + double customMetadataOffset); public static native double endMessage(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/MessageHeader.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/MessageHeader.java index 514c67bd2d9..ced1ac28405 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/MessageHeader.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/MessageHeader.java @@ -4,12 +4,12 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.MessageHeader", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.MessageHeader", + namespace = JsPackage.GLOBAL) public class MessageHeader { public static int DictionaryBatch, - NONE, - RecordBatch, - Schema; + NONE, + RecordBatch, + Schema; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/RecordBatch.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/RecordBatch.java index 642f3cd7880..40647dfcc8d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/RecordBatch.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/message_generated/org/apache/arrow/flatbuf/RecordBatch.java @@ -8,9 +8,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.RecordBatch", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Message_generated.org.apache.arrow.flatbuf.RecordBatch", + namespace = JsPackage.GLOBAL) public class RecordBatch { public static native void addBuffers(Builder builder, double buffersOffset); @@ -26,8 +26,7 @@ public class RecordBatch { public static native RecordBatch getRootAsRecordBatch(ByteBuffer bb); - public static native RecordBatch getSizePrefixedRootAsRecordBatch(ByteBuffer bb, - RecordBatch obj); + public static native RecordBatch getSizePrefixedRootAsRecordBatch(ByteBuffer bb, RecordBatch obj); public static native RecordBatch getSizePrefixedRootAsRecordBatch(ByteBuffer bb); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/Flatbuf.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/Flatbuf.java index 2de04180f6b..6664b77cd7e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/Flatbuf.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/Flatbuf.java @@ -28,9 +28,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf", + namespace = JsPackage.GLOBAL) public class Flatbuf { @JsFunction public interface UnionListToTypeAccessorFn { @@ -472,132 +472,110 @@ default boolean isUtf8() { @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Binary p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Bool p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Date p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Decimal p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Duration p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay - default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, - FixedSizeBinary p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, FixedSizeBinary p1) { + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, FixedSizeList p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, FloatingPoint p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Int p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Interval p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, LargeBinary p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, LargeList p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, LargeUtf8 p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, List p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Map p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Null p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke( - double p0, Flatbuf.UnionListToTypeAccessorFn.P1UnionType p1); + double p0, Flatbuf.UnionListToTypeAccessorFn.P1UnionType p1); @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Struct_ p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Time p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Timestamp p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Union p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } @JsOverlay default Flatbuf.UnionListToTypeAccessorFn.UnionType onInvoke(double p0, Utf8 p1) { - return onInvoke(p0, - Js.uncheckedCast(p1)); + return onInvoke(p0, Js.uncheckedCast(p1)); } } @@ -1337,8 +1315,7 @@ default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Null p0) { return onInvoke(Js.uncheckedCast(p0)); } - Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke( - Flatbuf.UnionToTypeAccessorFn.P0UnionType p0); + Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Flatbuf.UnionToTypeAccessorFn.P0UnionType p0); @JsOverlay default Flatbuf.UnionToTypeAccessorFn.UnionType onInvoke(Struct_ p0) { @@ -1585,8 +1562,8 @@ default boolean isUtf8() { } public static native Flatbuf.UnionListToTypeUnionType unionListToType( - int type, Flatbuf.UnionListToTypeAccessorFn accessor, double index); + int type, Flatbuf.UnionListToTypeAccessorFn accessor, double index); public static native Flatbuf.UnionToTypeUnionType unionToType( - int type, Flatbuf.UnionToTypeAccessorFn accessor); + int type, Flatbuf.UnionToTypeAccessorFn accessor); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Binary.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Binary.java index 72fa077eee0..516077d1e7b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Binary.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Binary.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Binary", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Binary", + namespace = JsPackage.GLOBAL) public class Binary { public static native double createBinary(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Bool.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Bool.java index 35efc84946d..34227896fe9 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Bool.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Bool.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Bool", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Bool", + namespace = JsPackage.GLOBAL) public class Bool { public static native double createBool(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Buffer.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Buffer.java index d3eb5e00b99..7ad8d7192bc 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Buffer.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Buffer.java @@ -7,9 +7,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Buffer", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Buffer", + namespace = JsPackage.GLOBAL) public class Buffer { public static native double createBuffer(Builder builder, Long offset, Long length); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Date.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Date.java index f1539485fde..54f0893f4d7 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Date.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Date.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Date", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Date", + namespace = JsPackage.GLOBAL) public class Date { public static native void addUnit(Builder builder, int unit); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DateUnit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DateUnit.java index 0ff945b3e00..8cbf62db0c6 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DateUnit.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DateUnit.java @@ -4,10 +4,10 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.DateUnit", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.DateUnit", + namespace = JsPackage.GLOBAL) public class DateUnit { public static int DAY, - MILLISECOND; + MILLISECOND; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Decimal.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Decimal.java index 6b9d5551cbe..c0f5685011a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Decimal.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Decimal.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Decimal", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Decimal", + namespace = JsPackage.GLOBAL) public class Decimal { public static native void addBitWidth(Builder builder, double bitWidth); @@ -17,7 +17,7 @@ public class Decimal { public static native void addScale(Builder builder, double scale); public static native double createDecimal( - Builder builder, double precision, double scale, double bitWidth); + Builder builder, double precision, double scale, double bitWidth); public static native double endDecimal(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryEncoding.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryEncoding.java index 724233cd6b2..b67d921f721 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryEncoding.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryEncoding.java @@ -7,9 +7,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.DictionaryEncoding", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.DictionaryEncoding", + namespace = JsPackage.GLOBAL) public class DictionaryEncoding { public static native void addDictionaryKind(Builder builder, int dictionaryKind); @@ -22,12 +22,12 @@ public class DictionaryEncoding { public static native double endDictionaryEncoding(Builder builder); public static native DictionaryEncoding getRootAsDictionaryEncoding( - ByteBuffer bb, DictionaryEncoding obj); + ByteBuffer bb, DictionaryEncoding obj); public static native DictionaryEncoding getRootAsDictionaryEncoding(ByteBuffer bb); public static native DictionaryEncoding getSizePrefixedRootAsDictionaryEncoding( - ByteBuffer bb, DictionaryEncoding obj); + ByteBuffer bb, DictionaryEncoding obj); public static native DictionaryEncoding getSizePrefixedRootAsDictionaryEncoding(ByteBuffer bb); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryKind.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryKind.java index dc554241b57..a19d98c9545 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryKind.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/DictionaryKind.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.DictionaryKind", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.DictionaryKind", + namespace = JsPackage.GLOBAL) public class DictionaryKind { public static int DenseArray; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Duration.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Duration.java index c826164f077..651ec5dcf53 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Duration.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Duration.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Duration", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Duration", + namespace = JsPackage.GLOBAL) public class Duration { public static native void addUnit(Builder builder, int unit); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Endianness.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Endianness.java index 73388ac6b7f..ccf35bffa95 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Endianness.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Endianness.java @@ -4,10 +4,10 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Endianness", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Endianness", + namespace = JsPackage.GLOBAL) public class Endianness { public static int Big, - Little; + Little; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Feature.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Feature.java index f5d7a8d3d53..43067bfe48c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Feature.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Feature.java @@ -4,11 +4,11 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Feature", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Feature", + namespace = JsPackage.GLOBAL) public class Feature { public static int COMPRESSED_BODY, - DICTIONARY_REPLACEMENT, - UNUSED; + DICTIONARY_REPLACEMENT, + UNUSED; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Field.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Field.java index 11a928078b1..7e3873f0b40 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Field.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Field.java @@ -11,9 +11,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Field", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Field", + namespace = JsPackage.GLOBAL) public class Field { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface NameUnionType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeBinary.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeBinary.java index 2186c39ad82..a995bba5138 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeBinary.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeBinary.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.FixedSizeBinary", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.FixedSizeBinary", + namespace = JsPackage.GLOBAL) public class FixedSizeBinary { public static native void addByteWidth(Builder builder, double byteWidth); @@ -16,13 +16,12 @@ public class FixedSizeBinary { public static native double endFixedSizeBinary(Builder builder); - public static native FixedSizeBinary getRootAsFixedSizeBinary(ByteBuffer bb, - FixedSizeBinary obj); + public static native FixedSizeBinary getRootAsFixedSizeBinary(ByteBuffer bb, FixedSizeBinary obj); public static native FixedSizeBinary getRootAsFixedSizeBinary(ByteBuffer bb); public static native FixedSizeBinary getSizePrefixedRootAsFixedSizeBinary( - ByteBuffer bb, FixedSizeBinary obj); + ByteBuffer bb, FixedSizeBinary obj); public static native FixedSizeBinary getSizePrefixedRootAsFixedSizeBinary(ByteBuffer bb); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeList.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeList.java index 67f16cc45a0..d220c3867c1 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeList.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FixedSizeList.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.FixedSizeList", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.FixedSizeList", + namespace = JsPackage.GLOBAL) public class FixedSizeList { public static native void addListSize(Builder builder, double listSize); @@ -21,7 +21,7 @@ public class FixedSizeList { public static native FixedSizeList getRootAsFixedSizeList(ByteBuffer bb); public static native FixedSizeList getSizePrefixedRootAsFixedSizeList( - ByteBuffer bb, FixedSizeList obj); + ByteBuffer bb, FixedSizeList obj); public static native FixedSizeList getSizePrefixedRootAsFixedSizeList(ByteBuffer bb); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FloatingPoint.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FloatingPoint.java index ad132475030..067bd41cabf 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FloatingPoint.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/FloatingPoint.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.FloatingPoint", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.FloatingPoint", + namespace = JsPackage.GLOBAL) public class FloatingPoint { public static native void addPrecision(Builder builder, int precision); @@ -21,7 +21,7 @@ public class FloatingPoint { public static native FloatingPoint getRootAsFloatingPoint(ByteBuffer bb); public static native FloatingPoint getSizePrefixedRootAsFloatingPoint( - ByteBuffer bb, FloatingPoint obj); + ByteBuffer bb, FloatingPoint obj); public static native FloatingPoint getSizePrefixedRootAsFloatingPoint(ByteBuffer bb); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Int.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Int.java index c7787776c4e..e2899ae06d9 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Int.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Int.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Int", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Int", + namespace = JsPackage.GLOBAL) public class Int { public static native void addBitWidth(Builder builder, double bitWidth); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Interval.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Interval.java index 3b27e242651..46d8fa57345 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Interval.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Interval.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Interval", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Interval", + namespace = JsPackage.GLOBAL) public class Interval { public static native void addUnit(Builder builder, int unit); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/IntervalUnit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/IntervalUnit.java index 418b9470c00..3c38846693a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/IntervalUnit.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/IntervalUnit.java @@ -4,10 +4,10 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.IntervalUnit", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.IntervalUnit", + namespace = JsPackage.GLOBAL) public class IntervalUnit { public static int DAY_TIME, - YEAR_MONTH; + YEAR_MONTH; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/KeyValue.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/KeyValue.java index 6cc3e180657..81c93e8c240 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/KeyValue.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/KeyValue.java @@ -10,9 +10,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.KeyValue", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.KeyValue", + namespace = JsPackage.GLOBAL) public class KeyValue { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface KeyUnionType { @@ -74,8 +74,7 @@ default boolean isUint8Array() { public static native void addValue(Builder builder, double valueOffset); - public static native double createKeyValue(Builder builder, double keyOffset, - double valueOffset); + public static native double createKeyValue(Builder builder, double keyOffset, double valueOffset); public static native double endKeyValue(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeBinary.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeBinary.java index 9572f717259..a04e70ea884 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeBinary.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeBinary.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.LargeBinary", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.LargeBinary", + namespace = JsPackage.GLOBAL) public class LargeBinary { public static native double createLargeBinary(Builder builder); @@ -18,8 +18,7 @@ public class LargeBinary { public static native LargeBinary getRootAsLargeBinary(ByteBuffer bb); - public static native LargeBinary getSizePrefixedRootAsLargeBinary(ByteBuffer bb, - LargeBinary obj); + public static native LargeBinary getSizePrefixedRootAsLargeBinary(ByteBuffer bb, LargeBinary obj); public static native LargeBinary getSizePrefixedRootAsLargeBinary(ByteBuffer bb); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeList.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeList.java index 65a376244fb..3b25ae16799 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeList.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeList.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.LargeList", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.LargeList", + namespace = JsPackage.GLOBAL) public class LargeList { public static native double createLargeList(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeUtf8.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeUtf8.java index 46aa07c93c5..b2bbe827d73 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeUtf8.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/LargeUtf8.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.LargeUtf8", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.LargeUtf8", + namespace = JsPackage.GLOBAL) public class LargeUtf8 { public static native double createLargeUtf8(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/List.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/List.java index e9b1bc4f8b0..23e67702bb8 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/List.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/List.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.List", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.List", + namespace = JsPackage.GLOBAL) public class List { public static native double createList(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Map.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Map.java index b39d387a159..e2308b8d66b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Map.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Map.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Map", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Map", + namespace = JsPackage.GLOBAL) public class Map { public static native void addKeysSorted(Builder builder, boolean keysSorted); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/MetadataVersion.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/MetadataVersion.java index 44162d04109..fcb2ac9a349 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/MetadataVersion.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/MetadataVersion.java @@ -4,13 +4,13 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.MetadataVersion", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.MetadataVersion", + namespace = JsPackage.GLOBAL) public class MetadataVersion { public static int V1, - V2, - V3, - V4, - V5; + V2, + V3, + V4, + V5; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Null.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Null.java index e46412eeadb..57436cb6ed4 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Null.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Null.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Null", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Null", + namespace = JsPackage.GLOBAL) public class Null { public static native double createNull(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Precision.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Precision.java index 993d8ec0b49..a9f348804e4 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Precision.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Precision.java @@ -4,11 +4,11 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Precision", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Precision", + namespace = JsPackage.GLOBAL) public class Precision { public static int DOUBLE, - HALF, - SINGLE; + HALF, + SINGLE; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Schema.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Schema.java index feaf33b1a0d..cb7e7b72fca 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Schema.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Schema.java @@ -10,9 +10,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Schema", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Schema", + namespace = JsPackage.GLOBAL) public class Schema { public static native void addCustomMetadata(Builder builder, double customMetadataOffset); @@ -44,11 +44,11 @@ public static final double createFieldsVector(Builder builder, double[] data) { } public static native double createSchema( - Builder builder, - int endianness, - double fieldsOffset, - double customMetadataOffset, - double featuresOffset); + Builder builder, + int endianness, + double fieldsOffset, + double customMetadataOffset, + double featuresOffset); public static native double endSchema(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Struct_.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Struct_.java index 260ade0d5e7..a14c1dd704a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Struct_.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Struct_.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Struct_", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Struct_", + namespace = JsPackage.GLOBAL) public class Struct_ { public static native double createStruct_(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Time.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Time.java index 3af74277170..3334ffa294c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Time.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Time.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Time", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Time", + namespace = JsPackage.GLOBAL) public class Time { public static native void addBitWidth(Builder builder, double bitWidth); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/TimeUnit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/TimeUnit.java index 8ea25d8a278..ff3c4d2b854 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/TimeUnit.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/TimeUnit.java @@ -4,12 +4,12 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.TimeUnit", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.TimeUnit", + namespace = JsPackage.GLOBAL) public class TimeUnit { public static int MICROSECOND, - MILLISECOND, - NANOSECOND, - SECOND; + MILLISECOND, + NANOSECOND, + SECOND; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Timestamp.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Timestamp.java index 2f3634e6b29..ecf1e951624 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Timestamp.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Timestamp.java @@ -10,9 +10,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Timestamp", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Timestamp", + namespace = JsPackage.GLOBAL) public class Timestamp { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface TimezoneUnionType { @@ -47,7 +47,7 @@ default boolean isUint8Array() { public static native void addUnit(Builder builder, int unit); public static native double createTimestamp( - Builder builder, int unit, double timezoneOffset); + Builder builder, int unit, double timezoneOffset); public static native double endTimestamp(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Type.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Type.java index cb289350ddb..d19c6edcd3e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Type.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Type.java @@ -4,30 +4,30 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Type", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Type", + namespace = JsPackage.GLOBAL) public class Type { public static int Binary, - Bool, - Date, - Decimal, - Duration, - FixedSizeBinary, - FixedSizeList, - FloatingPoint, - Int, - Interval, - LargeBinary, - LargeList, - LargeUtf8, - List, - Map, - NONE, - Null, - Struct_, - Time, - Timestamp, - Union, - Utf8; + Bool, + Date, + Decimal, + Duration, + FixedSizeBinary, + FixedSizeList, + FloatingPoint, + Int, + Interval, + LargeBinary, + LargeList, + LargeUtf8, + List, + Map, + NONE, + Null, + Struct_, + Time, + Timestamp, + Union, + Utf8; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Union.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Union.java index 836b7152144..0d34d856fcd 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Union.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Union.java @@ -11,9 +11,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Union", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Union", + namespace = JsPackage.GLOBAL) public class Union { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface CreateTypeIdsVectorDataUnionType { @@ -59,27 +59,27 @@ default boolean isUint8Array() { @Deprecated public static native double createTypeIdsVector( - Builder builder, Union.CreateTypeIdsVectorDataUnionType data); + Builder builder, Union.CreateTypeIdsVectorDataUnionType data); @JsOverlay @Deprecated public static final double createTypeIdsVector(Builder builder, Int32Array data) { return createTypeIdsVector( - builder, Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createTypeIdsVector(Builder builder, JsArray data) { return createTypeIdsVector( - builder, Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createTypeIdsVector(Builder builder, Uint8Array data) { return createTypeIdsVector( - builder, Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/UnionMode.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/UnionMode.java index bd5c2ab9a99..55027107281 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/UnionMode.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/UnionMode.java @@ -4,10 +4,10 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.UnionMode", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.UnionMode", + namespace = JsPackage.GLOBAL) public class UnionMode { public static int Dense, - Sparse; + Sparse; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Utf8.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Utf8.java index 608267396c9..ee590697382 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Utf8.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/flatbuf/schema_generated/org/apache/arrow/flatbuf/Utf8.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Utf8", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.flatbuf.Schema_generated.org.apache.arrow.flatbuf.Utf8", + namespace = JsPackage.GLOBAL) public class Utf8 { public static native double createUtf8(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb/BrowserNextResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb/BrowserNextResponse.java index 3c359b7f928..1c69cfc9d74 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb/BrowserNextResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb/BrowserNextResponse.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb.BrowserNextResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb.BrowserNextResponse", + namespace = JsPackage.GLOBAL) public class BrowserNextResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetAppMetadataUnionType { @@ -108,21 +108,20 @@ static BrowserNextResponse.ToObjectReturnType create() { BrowserNextResponse.ToObjectReturnType.GetAppMetadataUnionType getAppMetadata(); @JsProperty - void setAppMetadata( - BrowserNextResponse.ToObjectReturnType.GetAppMetadataUnionType appMetadata); + void setAppMetadata(BrowserNextResponse.ToObjectReturnType.GetAppMetadataUnionType appMetadata); @JsOverlay default void setAppMetadata(String appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast( + appMetadata)); } @JsOverlay default void setAppMetadata(Uint8Array appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast( + appMetadata)); } } @@ -166,32 +165,32 @@ static BrowserNextResponse.ToObjectReturnType0 create() { @JsProperty void setAppMetadata( - BrowserNextResponse.ToObjectReturnType0.GetAppMetadataUnionType appMetadata); + BrowserNextResponse.ToObjectReturnType0.GetAppMetadataUnionType appMetadata); @JsOverlay default void setAppMetadata(String appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast( + appMetadata)); } @JsOverlay default void setAppMetadata(Uint8Array appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast( + appMetadata)); } } public static native BrowserNextResponse deserializeBinary(Uint8Array bytes); public static native BrowserNextResponse deserializeBinaryFromReader( - BrowserNextResponse message, Object reader); + BrowserNextResponse message, Object reader); public static native void serializeBinaryToWriter(BrowserNextResponse message, Object writer); public static native BrowserNextResponse.ToObjectReturnType toObject( - boolean includeInstance, BrowserNextResponse msg); + boolean includeInstance, BrowserNextResponse msg); public native BrowserNextResponse.GetAppMetadataUnionType getAppMetadata(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BidirectionalStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BidirectionalStream.java index 6ca81c28a4d..9e9064e966d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BidirectionalStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BidirectionalStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.BidirectionalStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.BidirectionalStream", + namespace = JsPackage.GLOBAL) public interface BidirectionalStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BrowserFlightService.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BrowserFlightService.java index 4e0fcb02519..40a44e619cb 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BrowserFlightService.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BrowserFlightService.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.BrowserFlightService", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.BrowserFlightService", + namespace = JsPackage.GLOBAL) public class BrowserFlightService { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface NextDoExchangeType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BrowserFlightServiceClient.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BrowserFlightServiceClient.java index 636ce9a3eac..4fc10a75fa8 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BrowserFlightServiceClient.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/BrowserFlightServiceClient.java @@ -15,9 +15,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.BrowserFlightServiceClient", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.BrowserFlightServiceClient", + namespace = JsPackage.GLOBAL) public class BrowserFlightServiceClient { @JsFunction public interface NextDoExchangeCallbackFn { @@ -48,7 +48,7 @@ static BrowserFlightServiceClient.NextDoExchangeCallbackFn.P0Type create() { } void onInvoke( - BrowserFlightServiceClient.NextDoExchangeCallbackFn.P0Type p0, BrowserNextResponse p1); + BrowserFlightServiceClient.NextDoExchangeCallbackFn.P0Type p0, BrowserNextResponse p1); } @JsFunction @@ -80,8 +80,8 @@ static BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackFn.P0Type cr } void onInvoke( - BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackFn.P0Type p0, - BrowserNextResponse p1); + BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackFn.P0Type p0, + BrowserNextResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -140,8 +140,7 @@ static BrowserFlightServiceClient.NextDoPutCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(BrowserFlightServiceClient.NextDoPutCallbackFn.P0Type p0, - BrowserNextResponse p1); + void onInvoke(BrowserFlightServiceClient.NextDoPutCallbackFn.P0Type p0, BrowserNextResponse p1); } @JsFunction @@ -173,8 +172,8 @@ static BrowserFlightServiceClient.NextDoPutMetadata_or_callbackFn.P0Type create( } void onInvoke( - BrowserFlightServiceClient.NextDoPutMetadata_or_callbackFn.P0Type p0, - BrowserNextResponse p1); + BrowserFlightServiceClient.NextDoPutMetadata_or_callbackFn.P0Type p0, + BrowserNextResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -234,7 +233,7 @@ static BrowserFlightServiceClient.NextHandshakeCallbackFn.P0Type create() { } void onInvoke( - BrowserFlightServiceClient.NextHandshakeCallbackFn.P0Type p0, BrowserNextResponse p1); + BrowserFlightServiceClient.NextHandshakeCallbackFn.P0Type p0, BrowserNextResponse p1); } @JsFunction @@ -266,8 +265,8 @@ static BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackFn.P0Type cre } void onInvoke( - BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackFn.P0Type p0, - BrowserNextResponse p1); + BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackFn.P0Type p0, + BrowserNextResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -306,172 +305,172 @@ public BrowserFlightServiceClient(String serviceHost) {} @JsOverlay public final UnaryResponse nextDoExchange( - FlightData requestMessage, - BrowserHeaders metadata_or_callback, - BrowserFlightServiceClient.NextDoExchangeCallbackFn callback) { + FlightData requestMessage, + BrowserHeaders metadata_or_callback, + BrowserFlightServiceClient.NextDoExchangeCallbackFn callback) { return nextDoExchange( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse nextDoExchange( - FlightData requestMessage, BrowserHeaders metadata_or_callback) { + FlightData requestMessage, BrowserHeaders metadata_or_callback) { return nextDoExchange( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse nextDoExchange( - FlightData requestMessage, - BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackFn metadata_or_callback, - BrowserFlightServiceClient.NextDoExchangeCallbackFn callback) { + FlightData requestMessage, + BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackFn metadata_or_callback, + BrowserFlightServiceClient.NextDoExchangeCallbackFn callback) { return nextDoExchange( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse nextDoExchange( - FlightData requestMessage, - BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackFn metadata_or_callback) { + FlightData requestMessage, + BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackFn metadata_or_callback) { return nextDoExchange( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse nextDoExchange( - FlightData requestMessage, - BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackUnionType metadata_or_callback, - BrowserFlightServiceClient.NextDoExchangeCallbackFn callback); + FlightData requestMessage, + BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackUnionType metadata_or_callback, + BrowserFlightServiceClient.NextDoExchangeCallbackFn callback); public native UnaryResponse nextDoExchange( - FlightData requestMessage, - BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackUnionType metadata_or_callback); + FlightData requestMessage, + BrowserFlightServiceClient.NextDoExchangeMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse nextDoPut( - FlightData requestMessage, - BrowserHeaders metadata_or_callback, - BrowserFlightServiceClient.NextDoPutCallbackFn callback) { + FlightData requestMessage, + BrowserHeaders metadata_or_callback, + BrowserFlightServiceClient.NextDoPutCallbackFn callback) { return nextDoPut( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse nextDoPut( - FlightData requestMessage, BrowserHeaders metadata_or_callback) { + FlightData requestMessage, BrowserHeaders metadata_or_callback) { return nextDoPut( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse nextDoPut( - FlightData requestMessage, - BrowserFlightServiceClient.NextDoPutMetadata_or_callbackFn metadata_or_callback, - BrowserFlightServiceClient.NextDoPutCallbackFn callback) { + FlightData requestMessage, + BrowserFlightServiceClient.NextDoPutMetadata_or_callbackFn metadata_or_callback, + BrowserFlightServiceClient.NextDoPutCallbackFn callback) { return nextDoPut( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse nextDoPut( - FlightData requestMessage, - BrowserFlightServiceClient.NextDoPutMetadata_or_callbackFn metadata_or_callback) { + FlightData requestMessage, + BrowserFlightServiceClient.NextDoPutMetadata_or_callbackFn metadata_or_callback) { return nextDoPut( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse nextDoPut( - FlightData requestMessage, - BrowserFlightServiceClient.NextDoPutMetadata_or_callbackUnionType metadata_or_callback, - BrowserFlightServiceClient.NextDoPutCallbackFn callback); + FlightData requestMessage, + BrowserFlightServiceClient.NextDoPutMetadata_or_callbackUnionType metadata_or_callback, + BrowserFlightServiceClient.NextDoPutCallbackFn callback); public native UnaryResponse nextDoPut( - FlightData requestMessage, - BrowserFlightServiceClient.NextDoPutMetadata_or_callbackUnionType metadata_or_callback); + FlightData requestMessage, + BrowserFlightServiceClient.NextDoPutMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse nextHandshake( - HandshakeRequest requestMessage, - BrowserHeaders metadata_or_callback, - BrowserFlightServiceClient.NextHandshakeCallbackFn callback) { + HandshakeRequest requestMessage, + BrowserHeaders metadata_or_callback, + BrowserFlightServiceClient.NextHandshakeCallbackFn callback) { return nextHandshake( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse nextHandshake( - HandshakeRequest requestMessage, BrowserHeaders metadata_or_callback) { + HandshakeRequest requestMessage, BrowserHeaders metadata_or_callback) { return nextHandshake( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse nextHandshake( - HandshakeRequest requestMessage, - BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackFn metadata_or_callback, - BrowserFlightServiceClient.NextHandshakeCallbackFn callback) { + HandshakeRequest requestMessage, + BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackFn metadata_or_callback, + BrowserFlightServiceClient.NextHandshakeCallbackFn callback) { return nextHandshake( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse nextHandshake( - HandshakeRequest requestMessage, - BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackFn metadata_or_callback) { + HandshakeRequest requestMessage, + BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackFn metadata_or_callback) { return nextHandshake( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse nextHandshake( - HandshakeRequest requestMessage, - BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackUnionType metadata_or_callback, - BrowserFlightServiceClient.NextHandshakeCallbackFn callback); + HandshakeRequest requestMessage, + BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackUnionType metadata_or_callback, + BrowserFlightServiceClient.NextHandshakeCallbackFn callback); public native UnaryResponse nextHandshake( - HandshakeRequest requestMessage, - BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackUnionType metadata_or_callback); + HandshakeRequest requestMessage, + BrowserFlightServiceClient.NextHandshakeMetadata_or_callbackUnionType metadata_or_callback); public native ResponseStream openDoExchange( - FlightData requestMessage, BrowserHeaders metadata); + FlightData requestMessage, BrowserHeaders metadata); public native ResponseStream openDoExchange(FlightData requestMessage); public native ResponseStream openDoPut( - FlightData requestMessage, BrowserHeaders metadata); + FlightData requestMessage, BrowserHeaders metadata); public native ResponseStream openDoPut(FlightData requestMessage); public native ResponseStream openHandshake( - HandshakeRequest requestMessage, BrowserHeaders metadata); + HandshakeRequest requestMessage, BrowserHeaders metadata); public native ResponseStream openHandshake(HandshakeRequest requestMessage); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/RequestStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/RequestStream.java index ca395fd50f6..dd7ed03446e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/RequestStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/RequestStream.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.RequestStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.RequestStream", + namespace = JsPackage.GLOBAL) public interface RequestStream { @JsFunction public interface OnHandlerFn { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/ResponseStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/ResponseStream.java index 8d23c079c62..eb7bd510eef 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/ResponseStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/ResponseStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.ResponseStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.ResponseStream", + namespace = JsPackage.GLOBAL) public interface ResponseStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/UnaryResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/UnaryResponse.java index a910ecd4b92..1386bbb945b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/UnaryResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/browserflight_pb_service/UnaryResponse.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.UnaryResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.BrowserFlight_pb_service.UnaryResponse", + namespace = JsPackage.GLOBAL) public interface UnaryResponse { void cancel(); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Action.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Action.java index 6da163f5680..86d47c10c1a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Action.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Action.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.Action", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.Action", + namespace = JsPackage.GLOBAL) public class Action { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetBodyUnionType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/ActionType.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/ActionType.java index a7b13f44a13..80135735a8e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/ActionType.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/ActionType.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.ActionType", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.ActionType", + namespace = JsPackage.GLOBAL) public class ActionType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -60,7 +60,7 @@ static ActionType.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(ActionType message, Object writer); public static native ActionType.ToObjectReturnType toObject( - boolean includeInstance, ActionType msg); + boolean includeInstance, ActionType msg); public native String getDescription(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/BasicAuth.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/BasicAuth.java index a9c020ff169..ae099053186 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/BasicAuth.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/BasicAuth.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.BasicAuth", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.BasicAuth", + namespace = JsPackage.GLOBAL) public class BasicAuth { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -60,7 +60,7 @@ static BasicAuth.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(BasicAuth message, Object writer); public static native BasicAuth.ToObjectReturnType toObject( - boolean includeInstance, BasicAuth msg); + boolean includeInstance, BasicAuth msg); public native String getPassword(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Criteria.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Criteria.java index 594d7403af3..5d847c084ef 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Criteria.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Criteria.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.Criteria", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.Criteria", + namespace = JsPackage.GLOBAL) public class Criteria { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetExpressionUnionType { @@ -113,13 +113,13 @@ static Criteria.ToObjectReturnType create() { @JsOverlay default void setExpression(String expression) { setExpression( - Js.uncheckedCast(expression)); + Js.uncheckedCast(expression)); } @JsOverlay default void setExpression(Uint8Array expression) { setExpression( - Js.uncheckedCast(expression)); + Js.uncheckedCast(expression)); } } @@ -167,13 +167,13 @@ static Criteria.ToObjectReturnType0 create() { @JsOverlay default void setExpression(String expression) { setExpression( - Js.uncheckedCast(expression)); + Js.uncheckedCast(expression)); } @JsOverlay default void setExpression(Uint8Array expression) { setExpression( - Js.uncheckedCast(expression)); + Js.uncheckedCast(expression)); } } @@ -183,8 +183,7 @@ default void setExpression(Uint8Array expression) { public static native void serializeBinaryToWriter(Criteria message, Object writer); - public static native Criteria.ToObjectReturnType toObject(boolean includeInstance, - Criteria msg); + public static native Criteria.ToObjectReturnType toObject(boolean includeInstance, Criteria msg); public native Criteria.GetExpressionUnionType getExpression(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Empty.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Empty.java index b6df2b7771f..46debc711c6 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Empty.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Empty.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.Empty", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.Empty", + namespace = JsPackage.GLOBAL) public class Empty { public static native Empty deserializeBinary(Uint8Array bytes); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightData.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightData.java index b215d2e2ebd..b2403f2628d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightData.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightData.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightData", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightData", + namespace = JsPackage.GLOBAL) public class FlightData { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetAppMetadataUnionType { @@ -190,7 +190,7 @@ public interface FlightDescriptorFieldType { public interface GetCmdUnionType { @JsOverlay static FlightData.ToObjectReturnType.FlightDescriptorFieldType.GetCmdUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -230,21 +230,18 @@ static FlightData.ToObjectReturnType.FlightDescriptorFieldType create() { double getType(); @JsProperty - void setCmd( - FlightData.ToObjectReturnType.FlightDescriptorFieldType.GetCmdUnionType cmd); + void setCmd(FlightData.ToObjectReturnType.FlightDescriptorFieldType.GetCmdUnionType cmd); @JsOverlay default void setCmd(String cmd) { setCmd( - Js.uncheckedCast( - cmd)); + Js.uncheckedCast(cmd)); } @JsOverlay default void setCmd(Uint8Array cmd) { setCmd( - Js.uncheckedCast( - cmd)); + Js.uncheckedCast(cmd)); } @JsProperty @@ -366,15 +363,13 @@ static FlightData.ToObjectReturnType create() { @JsOverlay default void setAppMetadata(String appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast(appMetadata)); } @JsOverlay default void setAppMetadata(Uint8Array appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast(appMetadata)); } @JsProperty @@ -382,14 +377,12 @@ default void setAppMetadata(Uint8Array appMetadata) { @JsOverlay default void setDataBody(String dataBody) { - setDataBody( - Js.uncheckedCast(dataBody)); + setDataBody(Js.uncheckedCast(dataBody)); } @JsOverlay default void setDataBody(Uint8Array dataBody) { - setDataBody( - Js.uncheckedCast(dataBody)); + setDataBody(Js.uncheckedCast(dataBody)); } @JsProperty @@ -398,18 +391,18 @@ default void setDataBody(Uint8Array dataBody) { @JsOverlay default void setDataHeader(String dataHeader) { setDataHeader( - Js.uncheckedCast(dataHeader)); + Js.uncheckedCast(dataHeader)); } @JsOverlay default void setDataHeader(Uint8Array dataHeader) { setDataHeader( - Js.uncheckedCast(dataHeader)); + Js.uncheckedCast(dataHeader)); } @JsProperty void setFlightDescriptor( - FlightData.ToObjectReturnType.FlightDescriptorFieldType flightDescriptor); + FlightData.ToObjectReturnType.FlightDescriptorFieldType flightDescriptor); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -420,7 +413,7 @@ public interface FlightDescriptorFieldType { public interface GetCmdUnionType { @JsOverlay static FlightData.ToObjectReturnType0.FlightDescriptorFieldType.GetCmdUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -460,21 +453,20 @@ static FlightData.ToObjectReturnType0.FlightDescriptorFieldType create() { double getType(); @JsProperty - void setCmd( - FlightData.ToObjectReturnType0.FlightDescriptorFieldType.GetCmdUnionType cmd); + void setCmd(FlightData.ToObjectReturnType0.FlightDescriptorFieldType.GetCmdUnionType cmd); @JsOverlay default void setCmd(String cmd) { setCmd( - Js.uncheckedCast( - cmd)); + Js.uncheckedCast( + cmd)); } @JsOverlay default void setCmd(Uint8Array cmd) { setCmd( - Js.uncheckedCast( - cmd)); + Js.uncheckedCast( + cmd)); } @JsProperty @@ -596,15 +588,13 @@ static FlightData.ToObjectReturnType0 create() { @JsOverlay default void setAppMetadata(String appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast(appMetadata)); } @JsOverlay default void setAppMetadata(Uint8Array appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast(appMetadata)); } @JsProperty @@ -612,14 +602,12 @@ default void setAppMetadata(Uint8Array appMetadata) { @JsOverlay default void setDataBody(String dataBody) { - setDataBody( - Js.uncheckedCast(dataBody)); + setDataBody(Js.uncheckedCast(dataBody)); } @JsOverlay default void setDataBody(Uint8Array dataBody) { - setDataBody( - Js.uncheckedCast(dataBody)); + setDataBody(Js.uncheckedCast(dataBody)); } @JsProperty @@ -628,20 +616,18 @@ default void setDataBody(Uint8Array dataBody) { @JsOverlay default void setDataHeader(String dataHeader) { setDataHeader( - Js.uncheckedCast( - dataHeader)); + Js.uncheckedCast(dataHeader)); } @JsOverlay default void setDataHeader(Uint8Array dataHeader) { setDataHeader( - Js.uncheckedCast( - dataHeader)); + Js.uncheckedCast(dataHeader)); } @JsProperty void setFlightDescriptor( - FlightData.ToObjectReturnType0.FlightDescriptorFieldType flightDescriptor); + FlightData.ToObjectReturnType0.FlightDescriptorFieldType flightDescriptor); } public static native FlightData deserializeBinary(Uint8Array bytes); @@ -651,7 +637,7 @@ void setFlightDescriptor( public static native void serializeBinaryToWriter(FlightData message, Object writer); public static native FlightData.ToObjectReturnType toObject( - boolean includeInstance, FlightData msg); + boolean includeInstance, FlightData msg); public native void clearFlightDescriptor(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightDescriptor.java index d38a4db038b..b74ad3966de 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightDescriptor.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightDescriptor", + namespace = JsPackage.GLOBAL) public class FlightDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetCmdUnionType { @@ -214,12 +214,12 @@ default void setPathList(String[] pathList) { public static native FlightDescriptor deserializeBinary(Uint8Array bytes); public static native FlightDescriptor deserializeBinaryFromReader( - FlightDescriptor message, Object reader); + FlightDescriptor message, Object reader); public static native void serializeBinaryToWriter(FlightDescriptor message, Object writer); public static native FlightDescriptor.ToObjectReturnType toObject( - boolean includeInstance, FlightDescriptor msg); + boolean includeInstance, FlightDescriptor msg); public native String addPath(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightEndpoint.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightEndpoint.java index a2f1ec81a9f..1202afed547 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightEndpoint.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightEndpoint.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightEndpoint", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightEndpoint", + namespace = JsPackage.GLOBAL) public class FlightEndpoint { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -35,8 +35,7 @@ public interface TicketFieldType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { @JsOverlay - static FlightEndpoint.ToObjectReturnType.TicketFieldType.GetTicketUnionType of( - Object o) { + static FlightEndpoint.ToObjectReturnType.TicketFieldType.GetTicketUnionType of(Object o) { return Js.cast(o); } @@ -70,21 +69,20 @@ static FlightEndpoint.ToObjectReturnType.TicketFieldType create() { FlightEndpoint.ToObjectReturnType.TicketFieldType.GetTicketUnionType getTicket(); @JsProperty - void setTicket( - FlightEndpoint.ToObjectReturnType.TicketFieldType.GetTicketUnionType ticket); + void setTicket(FlightEndpoint.ToObjectReturnType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -101,14 +99,14 @@ static FlightEndpoint.ToObjectReturnType create() { @JsProperty void setLocationList( - JsArray locationList); + JsArray locationList); @JsOverlay default void setLocationList( - FlightEndpoint.ToObjectReturnType.LocationListFieldType[] locationList) { + FlightEndpoint.ToObjectReturnType.LocationListFieldType[] locationList) { setLocationList( - Js.>uncheckedCast( - locationList)); + Js.>uncheckedCast( + locationList)); } @JsProperty @@ -136,8 +134,7 @@ public interface TicketFieldType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { @JsOverlay - static FlightEndpoint.ToObjectReturnType0.TicketFieldType.GetTicketUnionType of( - Object o) { + static FlightEndpoint.ToObjectReturnType0.TicketFieldType.GetTicketUnionType of(Object o) { return Js.cast(o); } @@ -171,21 +168,20 @@ static FlightEndpoint.ToObjectReturnType0.TicketFieldType create() { FlightEndpoint.ToObjectReturnType0.TicketFieldType.GetTicketUnionType getTicket(); @JsProperty - void setTicket( - FlightEndpoint.ToObjectReturnType0.TicketFieldType.GetTicketUnionType ticket); + void setTicket(FlightEndpoint.ToObjectReturnType0.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -202,14 +198,14 @@ static FlightEndpoint.ToObjectReturnType0 create() { @JsProperty void setLocationList( - JsArray locationList); + JsArray locationList); @JsOverlay default void setLocationList( - FlightEndpoint.ToObjectReturnType0.LocationListFieldType[] locationList) { + FlightEndpoint.ToObjectReturnType0.LocationListFieldType[] locationList) { setLocationList( - Js.>uncheckedCast( - locationList)); + Js.>uncheckedCast( + locationList)); } @JsProperty @@ -219,12 +215,12 @@ default void setLocationList( public static native FlightEndpoint deserializeBinary(Uint8Array bytes); public static native FlightEndpoint deserializeBinaryFromReader( - FlightEndpoint message, Object reader); + FlightEndpoint message, Object reader); public static native void serializeBinaryToWriter(FlightEndpoint message, Object writer); public static native FlightEndpoint.ToObjectReturnType toObject( - boolean includeInstance, FlightEndpoint msg); + boolean includeInstance, FlightEndpoint msg); public native Location addLocation(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightInfo.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightInfo.java index f10972faadd..bd06a606a63 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightInfo.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/FlightInfo.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightInfo", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightInfo", + namespace = JsPackage.GLOBAL) public class FlightInfo { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetSchemaUnionType { @@ -94,7 +94,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static FlightInfo.ToObjectReturnType.EndpointListFieldType.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -129,20 +129,20 @@ static FlightInfo.ToObjectReturnType.EndpointListFieldType.TicketFieldType creat @JsProperty void setTicket( - FlightInfo.ToObjectReturnType.EndpointListFieldType.TicketFieldType.GetTicketUnionType ticket); + FlightInfo.ToObjectReturnType.EndpointListFieldType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -159,19 +159,18 @@ static FlightInfo.ToObjectReturnType.EndpointListFieldType create() { @JsProperty void setLocationList( - JsArray locationList); + JsArray locationList); @JsOverlay default void setLocationList( - FlightInfo.ToObjectReturnType.EndpointListFieldType.LocationListFieldType[] locationList) { + FlightInfo.ToObjectReturnType.EndpointListFieldType.LocationListFieldType[] locationList) { setLocationList( - Js.>uncheckedCast( - locationList)); + Js.>uncheckedCast( + locationList)); } @JsProperty - void setTicket( - FlightInfo.ToObjectReturnType.EndpointListFieldType.TicketFieldType ticket); + void setTicket(FlightInfo.ToObjectReturnType.EndpointListFieldType.TicketFieldType ticket); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -180,7 +179,7 @@ public interface FlightDescriptorFieldType { public interface GetCmdUnionType { @JsOverlay static FlightInfo.ToObjectReturnType.FlightDescriptorFieldType.GetCmdUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -220,21 +219,18 @@ static FlightInfo.ToObjectReturnType.FlightDescriptorFieldType create() { double getType(); @JsProperty - void setCmd( - FlightInfo.ToObjectReturnType.FlightDescriptorFieldType.GetCmdUnionType cmd); + void setCmd(FlightInfo.ToObjectReturnType.FlightDescriptorFieldType.GetCmdUnionType cmd); @JsOverlay default void setCmd(String cmd) { setCmd( - Js.uncheckedCast( - cmd)); + Js.uncheckedCast(cmd)); } @JsOverlay default void setCmd(Uint8Array cmd) { setCmd( - Js.uncheckedCast( - cmd)); + Js.uncheckedCast(cmd)); } @JsProperty @@ -299,19 +295,18 @@ static FlightInfo.ToObjectReturnType create() { @JsOverlay default void setEndpointList( - FlightInfo.ToObjectReturnType.EndpointListFieldType[] endpointList) { + FlightInfo.ToObjectReturnType.EndpointListFieldType[] endpointList) { setEndpointList( - Js.>uncheckedCast( - endpointList)); + Js.>uncheckedCast( + endpointList)); } @JsProperty - void setEndpointList( - JsArray endpointList); + void setEndpointList(JsArray endpointList); @JsProperty void setFlightDescriptor( - FlightInfo.ToObjectReturnType.FlightDescriptorFieldType flightDescriptor); + FlightInfo.ToObjectReturnType.FlightDescriptorFieldType flightDescriptor); @JsProperty void setSchema(FlightInfo.ToObjectReturnType.GetSchemaUnionType schema); @@ -357,7 +352,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static FlightInfo.ToObjectReturnType0.EndpointListFieldType.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -392,20 +387,20 @@ static FlightInfo.ToObjectReturnType0.EndpointListFieldType.TicketFieldType crea @JsProperty void setTicket( - FlightInfo.ToObjectReturnType0.EndpointListFieldType.TicketFieldType.GetTicketUnionType ticket); + FlightInfo.ToObjectReturnType0.EndpointListFieldType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -422,19 +417,18 @@ static FlightInfo.ToObjectReturnType0.EndpointListFieldType create() { @JsProperty void setLocationList( - JsArray locationList); + JsArray locationList); @JsOverlay default void setLocationList( - FlightInfo.ToObjectReturnType0.EndpointListFieldType.LocationListFieldType[] locationList) { + FlightInfo.ToObjectReturnType0.EndpointListFieldType.LocationListFieldType[] locationList) { setLocationList( - Js.>uncheckedCast( - locationList)); + Js.>uncheckedCast( + locationList)); } @JsProperty - void setTicket( - FlightInfo.ToObjectReturnType0.EndpointListFieldType.TicketFieldType ticket); + void setTicket(FlightInfo.ToObjectReturnType0.EndpointListFieldType.TicketFieldType ticket); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -443,7 +437,7 @@ public interface FlightDescriptorFieldType { public interface GetCmdUnionType { @JsOverlay static FlightInfo.ToObjectReturnType0.FlightDescriptorFieldType.GetCmdUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -483,21 +477,20 @@ static FlightInfo.ToObjectReturnType0.FlightDescriptorFieldType create() { double getType(); @JsProperty - void setCmd( - FlightInfo.ToObjectReturnType0.FlightDescriptorFieldType.GetCmdUnionType cmd); + void setCmd(FlightInfo.ToObjectReturnType0.FlightDescriptorFieldType.GetCmdUnionType cmd); @JsOverlay default void setCmd(String cmd) { setCmd( - Js.uncheckedCast( - cmd)); + Js.uncheckedCast( + cmd)); } @JsOverlay default void setCmd(Uint8Array cmd) { setCmd( - Js.uncheckedCast( - cmd)); + Js.uncheckedCast( + cmd)); } @JsProperty @@ -562,19 +555,19 @@ static FlightInfo.ToObjectReturnType0 create() { @JsOverlay default void setEndpointList( - FlightInfo.ToObjectReturnType0.EndpointListFieldType[] endpointList) { + FlightInfo.ToObjectReturnType0.EndpointListFieldType[] endpointList) { setEndpointList( - Js.>uncheckedCast( - endpointList)); + Js.>uncheckedCast( + endpointList)); } @JsProperty void setEndpointList( - JsArray endpointList); + JsArray endpointList); @JsProperty void setFlightDescriptor( - FlightInfo.ToObjectReturnType0.FlightDescriptorFieldType flightDescriptor); + FlightInfo.ToObjectReturnType0.FlightDescriptorFieldType flightDescriptor); @JsProperty void setSchema(FlightInfo.ToObjectReturnType0.GetSchemaUnionType schema); @@ -603,7 +596,7 @@ default void setSchema(Uint8Array schema) { public static native void serializeBinaryToWriter(FlightInfo message, Object writer); public static native FlightInfo.ToObjectReturnType toObject( - boolean includeInstance, FlightInfo msg); + boolean includeInstance, FlightInfo msg); public native FlightEndpoint addEndpoint(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/HandshakeRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/HandshakeRequest.java index e94768cc543..4451525c5fa 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/HandshakeRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/HandshakeRequest.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.HandshakeRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.HandshakeRequest", + namespace = JsPackage.GLOBAL) public class HandshakeRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetPayloadUnionType { @@ -116,13 +116,13 @@ static HandshakeRequest.ToObjectReturnType create() { @JsOverlay default void setPayload(String payload) { setPayload( - Js.uncheckedCast(payload)); + Js.uncheckedCast(payload)); } @JsOverlay default void setPayload(Uint8Array payload) { setPayload( - Js.uncheckedCast(payload)); + Js.uncheckedCast(payload)); } @JsProperty @@ -176,15 +176,13 @@ static HandshakeRequest.ToObjectReturnType0 create() { @JsOverlay default void setPayload(String payload) { setPayload( - Js.uncheckedCast( - payload)); + Js.uncheckedCast(payload)); } @JsOverlay default void setPayload(Uint8Array payload) { setPayload( - Js.uncheckedCast( - payload)); + Js.uncheckedCast(payload)); } @JsProperty @@ -194,12 +192,12 @@ default void setPayload(Uint8Array payload) { public static native HandshakeRequest deserializeBinary(Uint8Array bytes); public static native HandshakeRequest deserializeBinaryFromReader( - HandshakeRequest message, Object reader); + HandshakeRequest message, Object reader); public static native void serializeBinaryToWriter(HandshakeRequest message, Object writer); public static native HandshakeRequest.ToObjectReturnType toObject( - boolean includeInstance, HandshakeRequest msg); + boolean includeInstance, HandshakeRequest msg); public native HandshakeRequest.GetPayloadUnionType getPayload(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/HandshakeResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/HandshakeResponse.java index 81ceb83658e..39b6f893f1d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/HandshakeResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/HandshakeResponse.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.HandshakeResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.HandshakeResponse", + namespace = JsPackage.GLOBAL) public class HandshakeResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetPayloadUnionType { @@ -116,15 +116,13 @@ static HandshakeResponse.ToObjectReturnType create() { @JsOverlay default void setPayload(String payload) { setPayload( - Js.uncheckedCast( - payload)); + Js.uncheckedCast(payload)); } @JsOverlay default void setPayload(Uint8Array payload) { setPayload( - Js.uncheckedCast( - payload)); + Js.uncheckedCast(payload)); } @JsProperty @@ -178,15 +176,13 @@ static HandshakeResponse.ToObjectReturnType0 create() { @JsOverlay default void setPayload(String payload) { setPayload( - Js.uncheckedCast( - payload)); + Js.uncheckedCast(payload)); } @JsOverlay default void setPayload(Uint8Array payload) { setPayload( - Js.uncheckedCast( - payload)); + Js.uncheckedCast(payload)); } @JsProperty @@ -196,12 +192,12 @@ default void setPayload(Uint8Array payload) { public static native HandshakeResponse deserializeBinary(Uint8Array bytes); public static native HandshakeResponse deserializeBinaryFromReader( - HandshakeResponse message, Object reader); + HandshakeResponse message, Object reader); public static native void serializeBinaryToWriter(HandshakeResponse message, Object writer); public static native HandshakeResponse.ToObjectReturnType toObject( - boolean includeInstance, HandshakeResponse msg); + boolean includeInstance, HandshakeResponse msg); public native HandshakeResponse.GetPayloadUnionType getPayload(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Location.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Location.java index 9c5c9d12b02..b1f7002b2c5 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Location.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Location.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.Location", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.Location", + namespace = JsPackage.GLOBAL) public class Location { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -47,8 +47,7 @@ static Location.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(Location message, Object writer); - public static native Location.ToObjectReturnType toObject(boolean includeInstance, - Location msg); + public static native Location.ToObjectReturnType toObject(boolean includeInstance, Location msg); public native String getUri(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/PutResult.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/PutResult.java index 0f4a2b0ce98..64ceb772d5d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/PutResult.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/PutResult.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.PutResult", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.PutResult", + namespace = JsPackage.GLOBAL) public class PutResult { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetAppMetadataUnionType { @@ -113,15 +113,13 @@ static PutResult.ToObjectReturnType create() { @JsOverlay default void setAppMetadata(String appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast(appMetadata)); } @JsOverlay default void setAppMetadata(Uint8Array appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast(appMetadata)); } } @@ -169,15 +167,13 @@ static PutResult.ToObjectReturnType0 create() { @JsOverlay default void setAppMetadata(String appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast(appMetadata)); } @JsOverlay default void setAppMetadata(Uint8Array appMetadata) { setAppMetadata( - Js.uncheckedCast( - appMetadata)); + Js.uncheckedCast(appMetadata)); } } @@ -188,7 +184,7 @@ default void setAppMetadata(Uint8Array appMetadata) { public static native void serializeBinaryToWriter(PutResult message, Object writer); public static native PutResult.ToObjectReturnType toObject( - boolean includeInstance, PutResult msg); + boolean includeInstance, PutResult msg); public native PutResult.GetAppMetadataUnionType getAppMetadata(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Result.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Result.java index c5f3f6ac3fa..e71e5c22aca 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Result.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Result.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.Result", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.Result", + namespace = JsPackage.GLOBAL) public class Result { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetBodyUnionType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/SchemaResult.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/SchemaResult.java index c3cacb0c598..cf423f86ded 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/SchemaResult.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/SchemaResult.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.SchemaResult", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.SchemaResult", + namespace = JsPackage.GLOBAL) public class SchemaResult { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetSchemaUnionType { @@ -164,26 +164,24 @@ static SchemaResult.ToObjectReturnType0 create() { @JsOverlay default void setSchema(String schema) { - setSchema( - Js.uncheckedCast(schema)); + setSchema(Js.uncheckedCast(schema)); } @JsOverlay default void setSchema(Uint8Array schema) { - setSchema( - Js.uncheckedCast(schema)); + setSchema(Js.uncheckedCast(schema)); } } public static native SchemaResult deserializeBinary(Uint8Array bytes); public static native SchemaResult deserializeBinaryFromReader( - SchemaResult message, Object reader); + SchemaResult message, Object reader); public static native void serializeBinaryToWriter(SchemaResult message, Object writer); public static native SchemaResult.ToObjectReturnType toObject( - boolean includeInstance, SchemaResult msg); + boolean includeInstance, SchemaResult msg); public native SchemaResult.GetSchemaUnionType getSchema(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Ticket.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Ticket.java index 14324a944e6..fb58eea8b03 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Ticket.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/Ticket.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.Ticket", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.Ticket", + namespace = JsPackage.GLOBAL) public class Ticket { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/flightdescriptor/DescriptorTypeMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/flightdescriptor/DescriptorTypeMap.java index 6b7a3b73d96..d08d1ba7efa 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/flightdescriptor/DescriptorTypeMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb/flightdescriptor/DescriptorTypeMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightDescriptor.DescriptorTypeMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb.FlightDescriptor.DescriptorTypeMap", + namespace = JsPackage.GLOBAL) public interface DescriptorTypeMap { @JsOverlay static DescriptorTypeMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/BidirectionalStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/BidirectionalStream.java index 851f9bca234..32cc4659e29 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/BidirectionalStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/BidirectionalStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb_service.BidirectionalStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb_service.BidirectionalStream", + namespace = JsPackage.GLOBAL) public interface BidirectionalStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/FlightService.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/FlightService.java index 7ded438947d..dbebd016f9a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/FlightService.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/FlightService.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb_service.FlightService", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb_service.FlightService", + namespace = JsPackage.GLOBAL) public class FlightService { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface DoActionType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/FlightServiceClient.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/FlightServiceClient.java index 20d1cc75e28..559700124df 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/FlightServiceClient.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/FlightServiceClient.java @@ -23,9 +23,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb_service.FlightServiceClient", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb_service.FlightServiceClient", + namespace = JsPackage.GLOBAL) public class FlightServiceClient { @JsFunction public interface GetFlightInfoCallbackFn { @@ -86,8 +86,7 @@ static FlightServiceClient.GetFlightInfoMetadata_or_callbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(FlightServiceClient.GetFlightInfoMetadata_or_callbackFn.P0Type p0, - FlightInfo p1); + void onInvoke(FlightServiceClient.GetFlightInfoMetadata_or_callbackFn.P0Type p0, FlightInfo p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -177,8 +176,7 @@ static FlightServiceClient.GetSchemaMetadata_or_callbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(FlightServiceClient.GetSchemaMetadata_or_callbackFn.P0Type p0, - SchemaResult p1); + void onInvoke(FlightServiceClient.GetSchemaMetadata_or_callbackFn.P0Type p0, SchemaResult p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -233,120 +231,120 @@ public FlightServiceClient(String serviceHost) {} @JsOverlay public final UnaryResponse getFlightInfo( - FlightDescriptor requestMessage, - BrowserHeaders metadata_or_callback, - FlightServiceClient.GetFlightInfoCallbackFn callback) { + FlightDescriptor requestMessage, + BrowserHeaders metadata_or_callback, + FlightServiceClient.GetFlightInfoCallbackFn callback) { return getFlightInfo( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse getFlightInfo( - FlightDescriptor requestMessage, BrowserHeaders metadata_or_callback) { + FlightDescriptor requestMessage, BrowserHeaders metadata_or_callback) { return getFlightInfo( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse getFlightInfo( - FlightDescriptor requestMessage, - FlightServiceClient.GetFlightInfoMetadata_or_callbackFn metadata_or_callback, - FlightServiceClient.GetFlightInfoCallbackFn callback) { + FlightDescriptor requestMessage, + FlightServiceClient.GetFlightInfoMetadata_or_callbackFn metadata_or_callback, + FlightServiceClient.GetFlightInfoCallbackFn callback) { return getFlightInfo( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse getFlightInfo( - FlightDescriptor requestMessage, - FlightServiceClient.GetFlightInfoMetadata_or_callbackFn metadata_or_callback) { + FlightDescriptor requestMessage, + FlightServiceClient.GetFlightInfoMetadata_or_callbackFn metadata_or_callback) { return getFlightInfo( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse getFlightInfo( - FlightDescriptor requestMessage, - FlightServiceClient.GetFlightInfoMetadata_or_callbackUnionType metadata_or_callback, - FlightServiceClient.GetFlightInfoCallbackFn callback); + FlightDescriptor requestMessage, + FlightServiceClient.GetFlightInfoMetadata_or_callbackUnionType metadata_or_callback, + FlightServiceClient.GetFlightInfoCallbackFn callback); public native UnaryResponse getFlightInfo( - FlightDescriptor requestMessage, - FlightServiceClient.GetFlightInfoMetadata_or_callbackUnionType metadata_or_callback); + FlightDescriptor requestMessage, + FlightServiceClient.GetFlightInfoMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse getSchema( - FlightDescriptor requestMessage, - BrowserHeaders metadata_or_callback, - FlightServiceClient.GetSchemaCallbackFn callback) { + FlightDescriptor requestMessage, + BrowserHeaders metadata_or_callback, + FlightServiceClient.GetSchemaCallbackFn callback) { return getSchema( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse getSchema( - FlightDescriptor requestMessage, BrowserHeaders metadata_or_callback) { + FlightDescriptor requestMessage, BrowserHeaders metadata_or_callback) { return getSchema( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse getSchema( - FlightDescriptor requestMessage, - FlightServiceClient.GetSchemaMetadata_or_callbackFn metadata_or_callback, - FlightServiceClient.GetSchemaCallbackFn callback) { + FlightDescriptor requestMessage, + FlightServiceClient.GetSchemaMetadata_or_callbackFn metadata_or_callback, + FlightServiceClient.GetSchemaCallbackFn callback) { return getSchema( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse getSchema( - FlightDescriptor requestMessage, - FlightServiceClient.GetSchemaMetadata_or_callbackFn metadata_or_callback) { + FlightDescriptor requestMessage, + FlightServiceClient.GetSchemaMetadata_or_callbackFn metadata_or_callback) { return getSchema( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse getSchema( - FlightDescriptor requestMessage, - FlightServiceClient.GetSchemaMetadata_or_callbackUnionType metadata_or_callback, - FlightServiceClient.GetSchemaCallbackFn callback); + FlightDescriptor requestMessage, + FlightServiceClient.GetSchemaMetadata_or_callbackUnionType metadata_or_callback, + FlightServiceClient.GetSchemaCallbackFn callback); public native UnaryResponse getSchema( - FlightDescriptor requestMessage, - FlightServiceClient.GetSchemaMetadata_or_callbackUnionType metadata_or_callback); + FlightDescriptor requestMessage, + FlightServiceClient.GetSchemaMetadata_or_callbackUnionType metadata_or_callback); public native BidirectionalStream handshake(); public native BidirectionalStream handshake( - BrowserHeaders metadata); + BrowserHeaders metadata); public native ResponseStream listActions( - Empty requestMessage, BrowserHeaders metadata); + Empty requestMessage, BrowserHeaders metadata); public native ResponseStream listActions(Empty requestMessage); public native ResponseStream listFlights( - Criteria requestMessage, BrowserHeaders metadata); + Criteria requestMessage, BrowserHeaders metadata); public native ResponseStream listFlights(Criteria requestMessage); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/RequestStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/RequestStream.java index bbeb903c2e0..ab85ed8e315 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/RequestStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/RequestStream.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb_service.RequestStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb_service.RequestStream", + namespace = JsPackage.GLOBAL) public interface RequestStream { @JsFunction public interface OnHandlerFn { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/ResponseStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/ResponseStream.java index 5d701e941fb..59491d24516 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/ResponseStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/ResponseStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb_service.ResponseStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb_service.ResponseStream", + namespace = JsPackage.GLOBAL) public interface ResponseStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/UnaryResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/UnaryResponse.java index 44e992e9d89..cf5c6447e08 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/UnaryResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/arrow/flight/protocol/flight_pb_service/UnaryResponse.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.arrow.flight.protocol.Flight_pb_service.UnaryResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.arrow.flight.protocol.Flight_pb_service.UnaryResponse", + namespace = JsPackage.GLOBAL) public interface UnaryResponse { void cancel(); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/BrowserHeaders.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/BrowserHeaders.java index 04b391988e2..5aa1a3ffed8 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/BrowserHeaders.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/BrowserHeaders.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.browserHeaders.BrowserHeaders", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.browserHeaders.BrowserHeaders", + namespace = JsPackage.GLOBAL) public class BrowserHeaders { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface AppendValueUnionType { @@ -179,24 +179,24 @@ public BrowserHeaders(BrowserHeaders init, BrowserHeaders.BrowserHeadersOptionsT public BrowserHeaders(BrowserHeaders init) {} public BrowserHeaders( - BrowserHeaders.ConstructorInitUnionType init, - BrowserHeaders.BrowserHeadersOptionsType options) {} + BrowserHeaders.ConstructorInitUnionType init, + BrowserHeaders.BrowserHeadersOptionsType options) {} public BrowserHeaders(BrowserHeaders.ConstructorInitUnionType init) {} public BrowserHeaders( - JsPropertyMap init, - BrowserHeaders.BrowserHeadersOptionsType options) {} + JsPropertyMap init, + BrowserHeaders.BrowserHeadersOptionsType options) {} public BrowserHeaders( - JsPropertyMap init) {} + JsPropertyMap init) {} public BrowserHeaders( - Map init, - BrowserHeaders.BrowserHeadersOptionsType options) {} + Map init, + BrowserHeaders.BrowserHeadersOptionsType options) {} public BrowserHeaders( - Map init) {} + Map init) {} public BrowserHeaders(Object init, BrowserHeaders.BrowserHeadersOptionsType options) {} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/IterateHeaders.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/IterateHeaders.java index 99f45020bdc..ff6d3d9e0a2 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/IterateHeaders.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/IterateHeaders.java @@ -9,9 +9,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.browserHeaders.iterateHeaders", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.browserHeaders.iterateHeaders", + namespace = JsPackage.GLOBAL) public class IterateHeaders { @JsFunction public interface IterateHeadersCallbackFn { @@ -29,8 +29,8 @@ public interface IterateHeadersKeysCallbackFn { } public static native void iterateHeaders( - WindowHeaders headers, IterateHeaders.IterateHeadersCallbackFn callback); + WindowHeaders headers, IterateHeaders.IterateHeadersCallbackFn callback); public static native void iterateHeadersKeys( - WindowHeaders headers, IterateHeaders.IterateHeadersKeysCallbackFn callback); + WindowHeaders headers, IterateHeaders.IterateHeadersKeysCallbackFn callback); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/windowheaders/WindowHeaders.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/windowheaders/WindowHeaders.java index 17fa71bc06e..2c697ded3a9 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/windowheaders/WindowHeaders.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/browserheaders/windowheaders/WindowHeaders.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.browserHeaders.WindowHeaders.WindowHeaders", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.browserHeaders.WindowHeaders.WindowHeaders", + namespace = JsPackage.GLOBAL) public interface WindowHeaders { @JsFunction public interface ForEachCallbackFn { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Encoding.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Encoding.java index 2f1bbe8963d..5ce01561ed8 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Encoding.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/flatbuffers/Encoding.java @@ -6,5 +6,5 @@ @JsType(isNative = true, name = "dhinternal.flatbuffers.Encoding", namespace = JsPackage.GLOBAL) public class Encoding { public static int UTF16_STRING, - UTF8_BYTES; + UTF8_BYTES; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Client.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Client.java index 72efc88a731..b9833c1c5bc 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Client.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Client.java @@ -7,5 +7,5 @@ @JsType(isNative = true, name = "dhinternal.grpcWeb.client", namespace = JsPackage.GLOBAL) public class Client { public static native io.deephaven.javascript.proto.dhinternal.grpcweb.client.Client client( - M methodDescriptor, ClientRpcOptions props); + M methodDescriptor, ClientRpcOptions props); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Grpc.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Grpc.java index bc5ea086e4c..3e16dcf9728 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Grpc.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Grpc.java @@ -58,7 +58,7 @@ public interface XhrTransportFn { public static Grpc.UnaryFn unary; public static native Client client( - M methodDescriptor, ClientRpcOptions props); + M methodDescriptor, ClientRpcOptions props); public static native int httpStatusToCode(double httpStatus); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Invoke.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Invoke.java index c1b66c07340..507cf4f00cf 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Invoke.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Invoke.java @@ -8,5 +8,5 @@ @JsType(isNative = true, name = "dhinternal.grpcWeb.invoke", namespace = JsPackage.GLOBAL) public class Invoke { public static native Request invoke( - M methodDescriptor, InvokeRpcOptions props); + M methodDescriptor, InvokeRpcOptions props); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Unary.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Unary.java index 53334324ab0..4275778b42a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Unary.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/Unary.java @@ -8,5 +8,5 @@ @JsType(isNative = true, name = "dhinternal.grpcWeb.unary", namespace = JsPackage.GLOBAL) public class Unary { public static native Request unary( - M methodDescriptor, UnaryRpcOptions props); + M methodDescriptor, UnaryRpcOptions props); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/chunkparser/ChunkParser.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/chunkparser/ChunkParser.java index 1bbb2fe9bb0..f82795381b0 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/chunkparser/ChunkParser.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/chunkparser/ChunkParser.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.ChunkParser.ChunkParser", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.ChunkParser.ChunkParser", + namespace = JsPackage.GLOBAL) public class ChunkParser { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ParseReturnType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/chunkparser/ChunkType.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/chunkparser/ChunkType.java index 63fd237d522..94139e78010 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/chunkparser/ChunkType.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/chunkparser/ChunkType.java @@ -4,10 +4,10 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.ChunkParser.ChunkType", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.ChunkParser.ChunkType", + namespace = JsPackage.GLOBAL) public class ChunkType { public static int MESSAGE, - TRAILERS; + TRAILERS; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/Client.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/Client.java index 66fcafbac30..3c62357a9d1 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/Client.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/Client.java @@ -118,7 +118,7 @@ default void start(BrowserHeaders metadata) { @JsOverlay default void start( - JsPropertyMap metadata) { + JsPropertyMap metadata) { start(Js.uncheckedCast(metadata)); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/ClientRpcOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/ClientRpcOptions.java index de34dee5140..0f133b8c8bb 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/ClientRpcOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/ClientRpcOptions.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.client.ClientRpcOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.client.ClientRpcOptions", + namespace = JsPackage.GLOBAL) public interface ClientRpcOptions extends RpcOptions { @JsOverlay static ClientRpcOptions create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/RpcOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/RpcOptions.java index 5b06b2535b6..c5efdfa0a8f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/RpcOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/client/RpcOptions.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.client.RpcOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.client.RpcOptions", + namespace = JsPackage.GLOBAL) public interface RpcOptions { @JsOverlay static RpcOptions create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Client.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Client.java index 692698dce27..78cd556d6fa 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Client.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Client.java @@ -5,5 +5,5 @@ @JsType(isNative = true, name = "dhinternal.grpcWeb.grpc.Client", namespace = JsPackage.GLOBAL) public interface Client - extends io.deephaven.javascript.proto.dhinternal.grpcweb.client.Client { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.client.Client { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ClientRpcOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ClientRpcOptions.java index 01a9a4f10b7..2161571f5bc 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ClientRpcOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ClientRpcOptions.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.ClientRpcOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.ClientRpcOptions", + namespace = JsPackage.GLOBAL) public interface ClientRpcOptions - extends io.deephaven.javascript.proto.dhinternal.grpcweb.client.ClientRpcOptions { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.client.ClientRpcOptions { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Code.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Code.java index 61dafa9ce98..f810aa215ae 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Code.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Code.java @@ -6,20 +6,20 @@ @JsType(isNative = true, name = "dhinternal.grpcWeb.grpc.Code", namespace = JsPackage.GLOBAL) public class Code { public static int Aborted, - AlreadyExists, - Canceled, - DataLoss, - DeadlineExceeded, - FailedPrecondition, - Internal, - InvalidArgument, - NotFound, - OK, - OutOfRange, - PermissionDenied, - ResourceExhausted, - Unauthenticated, - Unavailable, - Unimplemented, - Unknown; + AlreadyExists, + Canceled, + DataLoss, + DeadlineExceeded, + FailedPrecondition, + Internal, + InvalidArgument, + NotFound, + OK, + OutOfRange, + PermissionDenied, + ResourceExhausted, + Unauthenticated, + Unavailable, + Unimplemented, + Unknown; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/CrossBrowserHttpTransportInit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/CrossBrowserHttpTransportInit.java index 33e5c75f3d5..38cccce16ca 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/CrossBrowserHttpTransportInit.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/CrossBrowserHttpTransportInit.java @@ -4,10 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.CrossBrowserHttpTransportInit", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.CrossBrowserHttpTransportInit", + namespace = JsPackage.GLOBAL) public interface CrossBrowserHttpTransportInit - extends - io.deephaven.javascript.proto.dhinternal.grpcweb.transports.http.http.CrossBrowserHttpTransportInit { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.transports.http.http.CrossBrowserHttpTransportInit { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/FetchReadableStreamInit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/FetchReadableStreamInit.java index 16aaadebf86..aa3c03f3d57 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/FetchReadableStreamInit.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/FetchReadableStreamInit.java @@ -4,8 +4,8 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.FetchReadableStreamInit", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.FetchReadableStreamInit", + namespace = JsPackage.GLOBAL) public interface FetchReadableStreamInit { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/InvokeRpcOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/InvokeRpcOptions.java index 112f1d96fd5..97f64fd2b1c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/InvokeRpcOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/InvokeRpcOptions.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.InvokeRpcOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.InvokeRpcOptions", + namespace = JsPackage.GLOBAL) public interface InvokeRpcOptions - extends io.deephaven.javascript.proto.dhinternal.grpcweb.invoke.InvokeRpcOptions { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.invoke.InvokeRpcOptions { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/MethodDefinition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/MethodDefinition.java index 595efc1b2fa..7c57ad96955 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/MethodDefinition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/MethodDefinition.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.MethodDefinition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.MethodDefinition", + namespace = JsPackage.GLOBAL) public interface MethodDefinition - extends io.deephaven.javascript.proto.dhinternal.grpcweb.service.MethodDefinition { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.service.MethodDefinition { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ProtobufMessage.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ProtobufMessage.java index 0f795a37cd9..4e0e9c0702e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ProtobufMessage.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ProtobufMessage.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.ProtobufMessage", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.ProtobufMessage", + namespace = JsPackage.GLOBAL) public interface ProtobufMessage - extends io.deephaven.javascript.proto.dhinternal.grpcweb.message.ProtobufMessage { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.message.ProtobufMessage { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ProtobufMessageClass.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ProtobufMessageClass.java index db1a3c8e0c7..32ce641fded 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ProtobufMessageClass.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ProtobufMessageClass.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.ProtobufMessageClass", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.ProtobufMessageClass", + namespace = JsPackage.GLOBAL) public interface ProtobufMessageClass - extends io.deephaven.javascript.proto.dhinternal.grpcweb.message.ProtobufMessageClass { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.message.ProtobufMessageClass { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/RpcOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/RpcOptions.java index d77d082545a..39075c11957 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/RpcOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/RpcOptions.java @@ -5,5 +5,5 @@ @JsType(isNative = true, name = "dhinternal.grpcWeb.grpc.RpcOptions", namespace = JsPackage.GLOBAL) public interface RpcOptions - extends io.deephaven.javascript.proto.dhinternal.grpcweb.client.RpcOptions { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.client.RpcOptions { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ServiceDefinition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ServiceDefinition.java index 16ea5111705..cffd1fa9d13 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ServiceDefinition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/ServiceDefinition.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.ServiceDefinition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.ServiceDefinition", + namespace = JsPackage.GLOBAL) public interface ServiceDefinition - extends io.deephaven.javascript.proto.dhinternal.grpcweb.service.ServiceDefinition { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.service.ServiceDefinition { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Transport.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Transport.java index 28b88d3d75c..3170333cd90 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Transport.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/Transport.java @@ -5,5 +5,5 @@ @JsType(isNative = true, name = "dhinternal.grpcWeb.grpc.Transport", namespace = JsPackage.GLOBAL) public interface Transport - extends io.deephaven.javascript.proto.dhinternal.grpcweb.transports.transport.Transport { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.transports.transport.Transport { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/TransportFactory.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/TransportFactory.java index e79c13131c1..fd7f05a42cc 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/TransportFactory.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/TransportFactory.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.TransportFactory", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.TransportFactory", + namespace = JsPackage.GLOBAL) public interface TransportFactory - extends io.deephaven.javascript.proto.dhinternal.grpcweb.transports.transport.TransportFactory { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.transports.transport.TransportFactory { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/TransportOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/TransportOptions.java index ccbcc416839..b8315ca6444 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/TransportOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/TransportOptions.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.TransportOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.TransportOptions", + namespace = JsPackage.GLOBAL) public interface TransportOptions - extends io.deephaven.javascript.proto.dhinternal.grpcweb.transports.transport.TransportOptions { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.transports.transport.TransportOptions { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryMethodDefinition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryMethodDefinition.java index 371784e0a17..9b741f0f15c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryMethodDefinition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryMethodDefinition.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.UnaryMethodDefinition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.UnaryMethodDefinition", + namespace = JsPackage.GLOBAL) public interface UnaryMethodDefinition - extends io.deephaven.javascript.proto.dhinternal.grpcweb.service.UnaryMethodDefinition { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.service.UnaryMethodDefinition { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryOutput.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryOutput.java index bdd4de3a2e1..371528e75e7 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryOutput.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryOutput.java @@ -5,5 +5,5 @@ @JsType(isNative = true, name = "dhinternal.grpcWeb.grpc.UnaryOutput", namespace = JsPackage.GLOBAL) public interface UnaryOutput - extends io.deephaven.javascript.proto.dhinternal.grpcweb.unary.UnaryOutput { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.unary.UnaryOutput { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryRpcOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryRpcOptions.java index 0f2df6b92a8..ad89fc92c6f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryRpcOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/UnaryRpcOptions.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.UnaryRpcOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.UnaryRpcOptions", + namespace = JsPackage.GLOBAL) public interface UnaryRpcOptions - extends io.deephaven.javascript.proto.dhinternal.grpcweb.unary.UnaryRpcOptions { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.unary.UnaryRpcOptions { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/XhrTransportInit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/XhrTransportInit.java index 596f676448b..67bf2ee69a4 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/XhrTransportInit.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/grpc/XhrTransportInit.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.grpc.XhrTransportInit", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.grpc.XhrTransportInit", + namespace = JsPackage.GLOBAL) public interface XhrTransportInit - extends io.deephaven.javascript.proto.dhinternal.grpcweb.transports.http.xhr.XhrTransportInit { + extends io.deephaven.javascript.proto.dhinternal.grpcweb.transports.http.xhr.XhrTransportInit { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/invoke/InvokeRpcOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/invoke/InvokeRpcOptions.java index 7e6e50018ae..ec3f869a7a3 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/invoke/InvokeRpcOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/invoke/InvokeRpcOptions.java @@ -12,9 +12,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.invoke.InvokeRpcOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.invoke.InvokeRpcOptions", + namespace = JsPackage.GLOBAL) public interface InvokeRpcOptions extends RpcOptions { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetMetadataJsPropertyMapTypeParameterUnionType { @@ -138,7 +138,7 @@ default void setMetadata(BrowserHeaders metadata) { @JsOverlay default void setMetadata( - JsPropertyMap metadata) { + JsPropertyMap metadata) { setMetadata(Js.uncheckedCast(metadata)); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/message/ProtobufMessage.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/message/ProtobufMessage.java index 7e99f95ecf2..521d8120fd3 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/message/ProtobufMessage.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/message/ProtobufMessage.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.message.ProtobufMessage", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.message.ProtobufMessage", + namespace = JsPackage.GLOBAL) public interface ProtobufMessage { Uint8Array serializeBinary(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/message/ProtobufMessageClass.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/message/ProtobufMessageClass.java index 8039eb214d7..9f2ecb269b8 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/message/ProtobufMessageClass.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/message/ProtobufMessageClass.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.message.ProtobufMessageClass", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.message.ProtobufMessageClass", + namespace = JsPackage.GLOBAL) public interface ProtobufMessageClass { T deserializeBinary(Uint8Array bytes); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/MethodDefinition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/MethodDefinition.java index 26ad42d98b4..53768bbfe8e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/MethodDefinition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/MethodDefinition.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.service.MethodDefinition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.service.MethodDefinition", + namespace = JsPackage.GLOBAL) public interface MethodDefinition { @JsOverlay static MethodDefinition create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/ServiceDefinition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/ServiceDefinition.java index aeab2fff482..cf83ae826e9 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/ServiceDefinition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/ServiceDefinition.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.service.ServiceDefinition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.service.ServiceDefinition", + namespace = JsPackage.GLOBAL) public interface ServiceDefinition { @JsOverlay static ServiceDefinition create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/UnaryMethodDefinition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/UnaryMethodDefinition.java index 09a74e5a7d5..04a7b45e0af 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/UnaryMethodDefinition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/service/UnaryMethodDefinition.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.service.UnaryMethodDefinition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.service.UnaryMethodDefinition", + namespace = JsPackage.GLOBAL) public interface UnaryMethodDefinition extends MethodDefinition { @JsOverlay static UnaryMethodDefinition create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/Transport.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/Transport.java index 1b4d9799524..453b8216481 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/Transport.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/Transport.java @@ -6,12 +6,12 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.Transport", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.Transport", + namespace = JsPackage.GLOBAL) public class Transport { public static native io.deephaven.javascript.proto.dhinternal.grpcweb.transports.transport.Transport makeDefaultTransport( - TransportOptions options); + TransportOptions options); public static native void setDefaultTransportFactory(TransportFactory t); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Fetch.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Fetch.java index da42168c5d6..97c270028d5 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Fetch.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Fetch.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.http.fetch", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.http.fetch", + namespace = JsPackage.GLOBAL) public class Fetch { public static native TransportFactory FetchReadableStreamTransport(Object init); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Http.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Http.java index 6694d1bab9f..9cb453b553a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Http.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Http.java @@ -6,10 +6,10 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.http.http", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.http.http", + namespace = JsPackage.GLOBAL) public class Http { public static native TransportFactory CrossBrowserHttpTransport( - CrossBrowserHttpTransportInit init); + CrossBrowserHttpTransportInit init); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Xhr.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Xhr.java index 5b582429e24..62f72476b25 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Xhr.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/Xhr.java @@ -7,9 +7,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.http.xhr", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.http.xhr", + namespace = JsPackage.GLOBAL) public class Xhr { public static native TransportFactory XhrTransport(XhrTransportInit init); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/XhrUtil.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/XhrUtil.java index 5ad008e2a18..745b8bee2ff 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/XhrUtil.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/XhrUtil.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.http.xhrUtil", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.http.xhrUtil", + namespace = JsPackage.GLOBAL) public class XhrUtil { public static native boolean detectMozXHRSupport(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/http/CrossBrowserHttpTransportInit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/http/CrossBrowserHttpTransportInit.java index ab851ca9365..c3db6cae3ff 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/http/CrossBrowserHttpTransportInit.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/http/CrossBrowserHttpTransportInit.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.http.http.CrossBrowserHttpTransportInit", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.http.http.CrossBrowserHttpTransportInit", + namespace = JsPackage.GLOBAL) public interface CrossBrowserHttpTransportInit { @JsOverlay static CrossBrowserHttpTransportInit create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/MozChunkedArrayBufferXHR.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/MozChunkedArrayBufferXHR.java index d7ba0dd131f..63d7198a0ed 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/MozChunkedArrayBufferXHR.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/MozChunkedArrayBufferXHR.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.http.xhr.MozChunkedArrayBufferXHR", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.http.xhr.MozChunkedArrayBufferXHR", + namespace = JsPackage.GLOBAL) public class MozChunkedArrayBufferXHR extends XHR { public MozChunkedArrayBufferXHR() { // This super call is here only for the code to compile; it is never executed. diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/XHR.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/XHR.java index f9fe683f108..7f20c8012fb 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/XHR.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/XHR.java @@ -8,9 +8,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.http.xhr.XHR", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.http.xhr.XHR", + namespace = JsPackage.GLOBAL) public class XHR implements Transport { public double index; public XhrTransportInit init; diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/XhrTransportInit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/XhrTransportInit.java index 98fb31b492b..a1f7c309d46 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/XhrTransportInit.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/http/xhr/XhrTransportInit.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.http.xhr.XhrTransportInit", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.http.xhr.XhrTransportInit", + namespace = JsPackage.GLOBAL) public interface XhrTransportInit { @JsOverlay static XhrTransportInit create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/Transport.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/Transport.java index c8e6f1da653..84e949bd27a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/Transport.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/Transport.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.Transport.Transport", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.Transport.Transport", + namespace = JsPackage.GLOBAL) public interface Transport { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/TransportFactory.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/TransportFactory.java index 8cef5077d28..4aa24c375c3 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/TransportFactory.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/TransportFactory.java @@ -4,8 +4,8 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.Transport.TransportFactory", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.Transport.TransportFactory", + namespace = JsPackage.GLOBAL) public interface TransportFactory { } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/TransportOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/TransportOptions.java index 263964d389a..7cd6a5608a5 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/TransportOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/transports/transport/TransportOptions.java @@ -14,9 +14,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.transports.Transport.TransportOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.transports.Transport.TransportOptions", + namespace = JsPackage.GLOBAL) public interface TransportOptions { @JsFunction public interface OnChunkFn { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/unary/UnaryOutput.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/unary/UnaryOutput.java index e722ffadb28..7fbc11d631c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/unary/UnaryOutput.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/unary/UnaryOutput.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.unary.UnaryOutput", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.unary.UnaryOutput", + namespace = JsPackage.GLOBAL) public interface UnaryOutput { @JsOverlay static UnaryOutput create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/unary/UnaryRpcOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/unary/UnaryRpcOptions.java index 0464327dceb..be52a4ccbc5 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/unary/UnaryRpcOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/grpcweb/unary/UnaryRpcOptions.java @@ -12,9 +12,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.grpcWeb.unary.UnaryRpcOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.grpcWeb.unary.UnaryRpcOptions", + namespace = JsPackage.GLOBAL) public interface UnaryRpcOptions extends RpcOptions { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetMetadataJsPropertyMapTypeParameterUnionType { @@ -122,7 +122,7 @@ default void setMetadata(BrowserHeaders metadata) { @JsOverlay default void setMetadata( - JsPropertyMap metadata) { + JsPropertyMap metadata) { setMetadata(Js.uncheckedCast(metadata)); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageType.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageType.java index 6f5bfa1f599..a1c435577c1 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageType.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageType.java @@ -4,15 +4,15 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageType", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageType", + namespace = JsPackage.GLOBAL) public class BarrageMessageType { public static int BarrageSerializationOptions, - BarrageSubscriptionRequest, - BarrageUpdateMetadata, - NewSessionRequest, - None, - RefreshSessionRequest, - SessionInfoResponse; + BarrageSubscriptionRequest, + BarrageUpdateMetadata, + NewSessionRequest, + None, + RefreshSessionRequest, + SessionInfoResponse; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageWrapper.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageWrapper.java index 0713835b4fd..1cc25bc9333 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageWrapper.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageMessageWrapper.java @@ -12,9 +12,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageWrapper", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageMessageWrapper", + namespace = JsPackage.GLOBAL) public class BarrageMessageWrapper { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface CreateMsgPayloadVectorDataUnionType { @@ -93,7 +93,7 @@ default boolean isUint8Array() { } public static native void addHalfCloseAfterMessage( - Builder builder, boolean halfCloseAfterMessage); + Builder builder, boolean halfCloseAfterMessage); public static native void addMagic(Builder builder, double magic); @@ -106,40 +106,37 @@ public static native void addHalfCloseAfterMessage( public static native void addSequence(Builder builder, Long sequence); public static native double createBarrageMessageWrapper( - Builder builder, - double magic, - int msgType, - double msgPayloadOffset, - double rpcTicketOffset, - Long sequence, - boolean halfCloseAfterMessage); + Builder builder, + double magic, + int msgType, + double msgPayloadOffset, + double rpcTicketOffset, + Long sequence, + boolean halfCloseAfterMessage); @Deprecated public static native double createMsgPayloadVector( - Builder builder, BarrageMessageWrapper.CreateMsgPayloadVectorDataUnionType data); + Builder builder, BarrageMessageWrapper.CreateMsgPayloadVectorDataUnionType data); @JsOverlay @Deprecated public static final double createMsgPayloadVector(Builder builder, Int8Array data) { return createMsgPayloadVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createMsgPayloadVector(Builder builder, JsArray data) { return createMsgPayloadVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createMsgPayloadVector(Builder builder, Uint8Array data) { return createMsgPayloadVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @@ -150,30 +147,27 @@ public static final double createMsgPayloadVector(Builder builder, double[] data @Deprecated public static native double createRpcTicketVector( - Builder builder, BarrageMessageWrapper.CreateRpcTicketVectorDataUnionType data); + Builder builder, BarrageMessageWrapper.CreateRpcTicketVectorDataUnionType data); @JsOverlay @Deprecated public static final double createRpcTicketVector(Builder builder, Int8Array data) { return createRpcTicketVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createRpcTicketVector(Builder builder, JsArray data) { return createRpcTicketVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createRpcTicketVector(Builder builder, Uint8Array data) { return createRpcTicketVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @@ -185,15 +179,15 @@ public static final double createRpcTicketVector(Builder builder, double[] data) public static native double endBarrageMessageWrapper(Builder builder); public static native BarrageMessageWrapper getRootAsBarrageMessageWrapper( - ByteBuffer bb, BarrageMessageWrapper obj); + ByteBuffer bb, BarrageMessageWrapper obj); public static native BarrageMessageWrapper getRootAsBarrageMessageWrapper(ByteBuffer bb); public static native BarrageMessageWrapper getSizePrefixedRootAsBarrageMessageWrapper( - ByteBuffer bb, BarrageMessageWrapper obj); + ByteBuffer bb, BarrageMessageWrapper obj); public static native BarrageMessageWrapper getSizePrefixedRootAsBarrageMessageWrapper( - ByteBuffer bb); + ByteBuffer bb); public static native void startBarrageMessageWrapper(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageModColumnMetadata.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageModColumnMetadata.java index 5acb20e3456..cb29c331679 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageModColumnMetadata.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageModColumnMetadata.java @@ -11,9 +11,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageModColumnMetadata", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageModColumnMetadata", + namespace = JsPackage.GLOBAL) public class BarrageModColumnMetadata { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface CreateModifiedRowsVectorDataUnionType { @@ -56,34 +56,34 @@ default boolean isUint8Array() { public static native void addModifiedRows(Builder builder, double modifiedRowsOffset); public static native double createBarrageModColumnMetadata( - Builder builder, double modifiedRowsOffset); + Builder builder, double modifiedRowsOffset); @Deprecated public static native double createModifiedRowsVector( - Builder builder, BarrageModColumnMetadata.CreateModifiedRowsVectorDataUnionType data); + Builder builder, BarrageModColumnMetadata.CreateModifiedRowsVectorDataUnionType data); @JsOverlay @Deprecated public static final double createModifiedRowsVector(Builder builder, Int8Array data) { return createModifiedRowsVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createModifiedRowsVector(Builder builder, JsArray data) { return createModifiedRowsVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createModifiedRowsVector(Builder builder, Uint8Array data) { return createModifiedRowsVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @@ -95,15 +95,15 @@ public static final double createModifiedRowsVector(Builder builder, double[] da public static native double endBarrageModColumnMetadata(Builder builder); public static native BarrageModColumnMetadata getRootAsBarrageModColumnMetadata( - ByteBuffer bb, BarrageModColumnMetadata obj); + ByteBuffer bb, BarrageModColumnMetadata obj); public static native BarrageModColumnMetadata getRootAsBarrageModColumnMetadata(ByteBuffer bb); public static native BarrageModColumnMetadata getSizePrefixedRootAsBarrageModColumnMetadata( - ByteBuffer bb, BarrageModColumnMetadata obj); + ByteBuffer bb, BarrageModColumnMetadata obj); public static native BarrageModColumnMetadata getSizePrefixedRootAsBarrageModColumnMetadata( - ByteBuffer bb); + ByteBuffer bb); public static native void startBarrageModColumnMetadata(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSerializationOptions.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSerializationOptions.java index bf2cc77b6f1..f33ec58901f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSerializationOptions.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSerializationOptions.java @@ -6,31 +6,31 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageSerializationOptions", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageSerializationOptions", + namespace = JsPackage.GLOBAL) public class BarrageSerializationOptions { public static native void addColumnConversionMode( - Builder builder, int columnConversionMode); + Builder builder, int columnConversionMode); public static native void addUseDeephavenNulls(Builder builder, boolean useDeephavenNulls); public static native double createBarrageSerializationOptions( - Builder builder, int columnConversionMode, boolean useDeephavenNulls); + Builder builder, int columnConversionMode, boolean useDeephavenNulls); public static native double endBarrageSerializationOptions(Builder builder); public static native BarrageSerializationOptions getRootAsBarrageSerializationOptions( - ByteBuffer bb, BarrageSerializationOptions obj); + ByteBuffer bb, BarrageSerializationOptions obj); public static native BarrageSerializationOptions getRootAsBarrageSerializationOptions( - ByteBuffer bb); + ByteBuffer bb); public static native BarrageSerializationOptions getSizePrefixedRootAsBarrageSerializationOptions( - ByteBuffer bb, BarrageSerializationOptions obj); + ByteBuffer bb, BarrageSerializationOptions obj); public static native BarrageSerializationOptions getSizePrefixedRootAsBarrageSerializationOptions( - ByteBuffer bb); + ByteBuffer bb); public static native void startBarrageSerializationOptions(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionRequest.java index ee1a6d8ff70..9fa819ec0cd 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageSubscriptionRequest.java @@ -12,9 +12,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageSubscriptionRequest", + namespace = JsPackage.GLOBAL) public class BarrageSubscriptionRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface CreateColumnsVectorDataUnionType { @@ -133,7 +133,7 @@ default boolean isUint8Array() { public static native void addColumns(Builder builder, double columnsOffset); public static native void addSerializationOptions( - Builder builder, double serializationOptionsOffset); + Builder builder, double serializationOptionsOffset); public static native void addTicket(Builder builder, double ticketOffset); @@ -143,30 +143,30 @@ public static native void addSerializationOptions( @Deprecated public static native double createColumnsVector( - Builder builder, BarrageSubscriptionRequest.CreateColumnsVectorDataUnionType data); + Builder builder, BarrageSubscriptionRequest.CreateColumnsVectorDataUnionType data); @JsOverlay @Deprecated public static final double createColumnsVector(Builder builder, Int8Array data) { return createColumnsVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createColumnsVector(Builder builder, JsArray data) { return createColumnsVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createColumnsVector(Builder builder, Uint8Array data) { return createColumnsVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @@ -177,30 +177,30 @@ public static final double createColumnsVector(Builder builder, double[] data) { @Deprecated public static native double createTicketVector( - Builder builder, BarrageSubscriptionRequest.CreateTicketVectorDataUnionType data); + Builder builder, BarrageSubscriptionRequest.CreateTicketVectorDataUnionType data); @JsOverlay @Deprecated public static final double createTicketVector(Builder builder, Int8Array data) { return createTicketVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createTicketVector(Builder builder, JsArray data) { return createTicketVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createTicketVector(Builder builder, Uint8Array data) { return createTicketVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @@ -211,30 +211,30 @@ public static final double createTicketVector(Builder builder, double[] data) { @Deprecated public static native double createViewportVector( - Builder builder, BarrageSubscriptionRequest.CreateViewportVectorDataUnionType data); + Builder builder, BarrageSubscriptionRequest.CreateViewportVectorDataUnionType data); @JsOverlay @Deprecated public static final double createViewportVector(Builder builder, Int8Array data) { return createViewportVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createViewportVector(Builder builder, JsArray data) { return createViewportVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createViewportVector(Builder builder, Uint8Array data) { return createViewportVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @@ -246,16 +246,16 @@ public static final double createViewportVector(Builder builder, double[] data) public static native double endBarrageSubscriptionRequest(Builder builder); public static native BarrageSubscriptionRequest getRootAsBarrageSubscriptionRequest( - ByteBuffer bb, BarrageSubscriptionRequest obj); + ByteBuffer bb, BarrageSubscriptionRequest obj); public static native BarrageSubscriptionRequest getRootAsBarrageSubscriptionRequest( - ByteBuffer bb); + ByteBuffer bb); public static native BarrageSubscriptionRequest getSizePrefixedRootAsBarrageSubscriptionRequest( - ByteBuffer bb, BarrageSubscriptionRequest obj); + ByteBuffer bb, BarrageSubscriptionRequest obj); public static native BarrageSubscriptionRequest getSizePrefixedRootAsBarrageSubscriptionRequest( - ByteBuffer bb); + ByteBuffer bb); public static native void startBarrageSubscriptionRequest(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageUpdateMetadata.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageUpdateMetadata.java index bf6e483c1f7..ecc3234bfe3 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageUpdateMetadata.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/BarrageUpdateMetadata.java @@ -12,9 +12,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageUpdateMetadata", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.BarrageUpdateMetadata", + namespace = JsPackage.GLOBAL) public class BarrageUpdateMetadata { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface CreateAddedRowsIncludedVectorDataUnionType { @@ -248,8 +248,7 @@ default boolean isUint8Array() { public static native void addAddedRowsIncluded(Builder builder, double addedRowsIncludedOffset); - public static native void addEffectiveColumnSet(Builder builder, - double effectiveColumnSetOffset); + public static native void addEffectiveColumnSet(Builder builder, double effectiveColumnSetOffset); public static native void addEffectiveViewport(Builder builder, double effectiveViewportOffset); @@ -271,34 +270,30 @@ public static native void addEffectiveColumnSet(Builder builder, @Deprecated public static native double createAddedRowsIncludedVector( - Builder builder, BarrageUpdateMetadata.CreateAddedRowsIncludedVectorDataUnionType data); + Builder builder, BarrageUpdateMetadata.CreateAddedRowsIncludedVectorDataUnionType data); @JsOverlay @Deprecated public static final double createAddedRowsIncludedVector(Builder builder, Int8Array data) { return createAddedRowsIncludedVector( - builder, - Js.uncheckedCast( - data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated - public static final double createAddedRowsIncludedVector(Builder builder, - JsArray data) { + public static final double createAddedRowsIncludedVector(Builder builder, JsArray data) { return createAddedRowsIncludedVector( - builder, - Js.uncheckedCast( - data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createAddedRowsIncludedVector(Builder builder, Uint8Array data) { return createAddedRowsIncludedVector( - builder, - Js.uncheckedCast( - data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @@ -309,30 +304,27 @@ public static final double createAddedRowsIncludedVector(Builder builder, double @Deprecated public static native double createAddedRowsVector( - Builder builder, BarrageUpdateMetadata.CreateAddedRowsVectorDataUnionType data); + Builder builder, BarrageUpdateMetadata.CreateAddedRowsVectorDataUnionType data); @JsOverlay @Deprecated public static final double createAddedRowsVector(Builder builder, Int8Array data) { return createAddedRowsVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createAddedRowsVector(Builder builder, JsArray data) { return createAddedRowsVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createAddedRowsVector(Builder builder, Uint8Array data) { return createAddedRowsVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @@ -342,50 +334,46 @@ public static final double createAddedRowsVector(Builder builder, double[] data) } public static native double createBarrageUpdateMetadata( - Builder builder, - double numAddBatches, - double numModBatches, - Long firstSeq, - Long lastSeq, - boolean isSnapshot, - double effectiveViewportOffset, - double effectiveColumnSetOffset, - double addedRowsOffset, - double removedRowsOffset, - double shiftDataOffset, - double addedRowsIncludedOffset, - double nodesOffset); + Builder builder, + double numAddBatches, + double numModBatches, + Long firstSeq, + Long lastSeq, + boolean isSnapshot, + double effectiveViewportOffset, + double effectiveColumnSetOffset, + double addedRowsOffset, + double removedRowsOffset, + double shiftDataOffset, + double addedRowsIncludedOffset, + double nodesOffset); @Deprecated public static native double createEffectiveColumnSetVector( - Builder builder, BarrageUpdateMetadata.CreateEffectiveColumnSetVectorDataUnionType data); + Builder builder, BarrageUpdateMetadata.CreateEffectiveColumnSetVectorDataUnionType data); @JsOverlay @Deprecated public static final double createEffectiveColumnSetVector(Builder builder, Int8Array data) { return createEffectiveColumnSetVector( - builder, - Js.uncheckedCast( - data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated - public static final double createEffectiveColumnSetVector(Builder builder, - JsArray data) { + public static final double createEffectiveColumnSetVector(Builder builder, JsArray data) { return createEffectiveColumnSetVector( - builder, - Js.uncheckedCast( - data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createEffectiveColumnSetVector(Builder builder, Uint8Array data) { return createEffectiveColumnSetVector( - builder, - Js.uncheckedCast( - data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @@ -396,34 +384,30 @@ public static final double createEffectiveColumnSetVector(Builder builder, doubl @Deprecated public static native double createEffectiveViewportVector( - Builder builder, BarrageUpdateMetadata.CreateEffectiveViewportVectorDataUnionType data); + Builder builder, BarrageUpdateMetadata.CreateEffectiveViewportVectorDataUnionType data); @JsOverlay @Deprecated public static final double createEffectiveViewportVector(Builder builder, Int8Array data) { return createEffectiveViewportVector( - builder, - Js.uncheckedCast( - data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated - public static final double createEffectiveViewportVector(Builder builder, - JsArray data) { + public static final double createEffectiveViewportVector(Builder builder, JsArray data) { return createEffectiveViewportVector( - builder, - Js.uncheckedCast( - data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createEffectiveViewportVector(Builder builder, Uint8Array data) { return createEffectiveViewportVector( - builder, - Js.uncheckedCast( - data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @@ -441,30 +425,30 @@ public static final double createNodesVector(Builder builder, double[] data) { @Deprecated public static native double createRemovedRowsVector( - Builder builder, BarrageUpdateMetadata.CreateRemovedRowsVectorDataUnionType data); + Builder builder, BarrageUpdateMetadata.CreateRemovedRowsVectorDataUnionType data); @JsOverlay @Deprecated public static final double createRemovedRowsVector(Builder builder, Int8Array data) { return createRemovedRowsVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createRemovedRowsVector(Builder builder, JsArray data) { return createRemovedRowsVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createRemovedRowsVector(Builder builder, Uint8Array data) { return createRemovedRowsVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @@ -475,30 +459,27 @@ public static final double createRemovedRowsVector(Builder builder, double[] dat @Deprecated public static native double createShiftDataVector( - Builder builder, BarrageUpdateMetadata.CreateShiftDataVectorDataUnionType data); + Builder builder, BarrageUpdateMetadata.CreateShiftDataVectorDataUnionType data); @JsOverlay @Deprecated public static final double createShiftDataVector(Builder builder, Int8Array data) { return createShiftDataVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createShiftDataVector(Builder builder, JsArray data) { return createShiftDataVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createShiftDataVector(Builder builder, Uint8Array data) { return createShiftDataVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @@ -510,15 +491,15 @@ public static final double createShiftDataVector(Builder builder, double[] data) public static native double endBarrageUpdateMetadata(Builder builder); public static native BarrageUpdateMetadata getRootAsBarrageUpdateMetadata( - ByteBuffer bb, BarrageUpdateMetadata obj); + ByteBuffer bb, BarrageUpdateMetadata obj); public static native BarrageUpdateMetadata getRootAsBarrageUpdateMetadata(ByteBuffer bb); public static native BarrageUpdateMetadata getSizePrefixedRootAsBarrageUpdateMetadata( - ByteBuffer bb, BarrageUpdateMetadata obj); + ByteBuffer bb, BarrageUpdateMetadata obj); public static native BarrageUpdateMetadata getSizePrefixedRootAsBarrageUpdateMetadata( - ByteBuffer bb); + ByteBuffer bb); public static native void startAddedRowsIncludedVector(Builder builder, double numElems); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/ColumnConversionMode.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/ColumnConversionMode.java index 67ff724b8eb..cc491e27292 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/ColumnConversionMode.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/ColumnConversionMode.java @@ -4,11 +4,11 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.ColumnConversionMode", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.ColumnConversionMode", + namespace = JsPackage.GLOBAL) public class ColumnConversionMode { public static int JavaSerialization, - Stringify, - ThrowError; + Stringify, + ThrowError; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/NewSessionRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/NewSessionRequest.java index c40939f8b67..e04187a05f6 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/NewSessionRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/NewSessionRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.NewSessionRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.NewSessionRequest", + namespace = JsPackage.GLOBAL) public class NewSessionRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface CreatePayloadVectorDataUnionType { @@ -58,31 +58,31 @@ default boolean isUint8Array() { public static native void addProtocolVersion(Builder builder, double protocolVersion); public static native double createNewSessionRequest( - Builder builder, double protocolVersion, double payloadOffset); + Builder builder, double protocolVersion, double payloadOffset); @Deprecated public static native double createPayloadVector( - Builder builder, NewSessionRequest.CreatePayloadVectorDataUnionType data); + Builder builder, NewSessionRequest.CreatePayloadVectorDataUnionType data); @JsOverlay @Deprecated public static final double createPayloadVector(Builder builder, Int8Array data) { return createPayloadVector( - builder, Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createPayloadVector(Builder builder, JsArray data) { return createPayloadVector( - builder, Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createPayloadVector(Builder builder, Uint8Array data) { return createPayloadVector( - builder, Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @@ -94,12 +94,12 @@ public static final double createPayloadVector(Builder builder, double[] data) { public static native double endNewSessionRequest(Builder builder); public static native NewSessionRequest getRootAsNewSessionRequest( - ByteBuffer bb, NewSessionRequest obj); + ByteBuffer bb, NewSessionRequest obj); public static native NewSessionRequest getRootAsNewSessionRequest(ByteBuffer bb); public static native NewSessionRequest getSizePrefixedRootAsNewSessionRequest( - ByteBuffer bb, NewSessionRequest obj); + ByteBuffer bb, NewSessionRequest obj); public static native NewSessionRequest getSizePrefixedRootAsNewSessionRequest(ByteBuffer bb); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/RefreshSessionRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/RefreshSessionRequest.java index 3e37b1fc299..56ae8724a6e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/RefreshSessionRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/RefreshSessionRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.RefreshSessionRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.RefreshSessionRequest", + namespace = JsPackage.GLOBAL) public class RefreshSessionRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface CreateSessionVectorDataUnionType { @@ -59,30 +59,27 @@ default boolean isUint8Array() { @Deprecated public static native double createSessionVector( - Builder builder, RefreshSessionRequest.CreateSessionVectorDataUnionType data); + Builder builder, RefreshSessionRequest.CreateSessionVectorDataUnionType data); @JsOverlay @Deprecated public static final double createSessionVector(Builder builder, Int8Array data) { return createSessionVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createSessionVector(Builder builder, JsArray data) { return createSessionVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createSessionVector(Builder builder, Uint8Array data) { return createSessionVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @@ -94,15 +91,15 @@ public static final double createSessionVector(Builder builder, double[] data) { public static native double endRefreshSessionRequest(Builder builder); public static native RefreshSessionRequest getRootAsRefreshSessionRequest( - ByteBuffer bb, RefreshSessionRequest obj); + ByteBuffer bb, RefreshSessionRequest obj); public static native RefreshSessionRequest getRootAsRefreshSessionRequest(ByteBuffer bb); public static native RefreshSessionRequest getSizePrefixedRootAsRefreshSessionRequest( - ByteBuffer bb, RefreshSessionRequest obj); + ByteBuffer bb, RefreshSessionRequest obj); public static native RefreshSessionRequest getSizePrefixedRootAsRefreshSessionRequest( - ByteBuffer bb); + ByteBuffer bb); public static native void startRefreshSessionRequest(Builder builder); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/SessionInfoResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/SessionInfoResponse.java index 86c6104e5df..ad703fdf187 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/SessionInfoResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/barrage/flatbuf/barrage_generated/io/deephaven/barrage/flatbuf/SessionInfoResponse.java @@ -12,9 +12,9 @@ import jsinterop.base.Js; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.SessionInfoResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.barrage.flatbuf.Barrage_generated.io.deephaven.barrage.flatbuf.SessionInfoResponse", + namespace = JsPackage.GLOBAL) public class SessionInfoResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface CreateMetadataHeaderVectorDataUnionType { @@ -96,35 +96,34 @@ default boolean isUint8Array() { public static native void addSessionToken(Builder builder, double sessionTokenOffset); - public static native void addTokenRefreshDeadlineMs(Builder builder, - Long tokenRefreshDeadlineMs); + public static native void addTokenRefreshDeadlineMs(Builder builder, Long tokenRefreshDeadlineMs); @Deprecated public static native double createMetadataHeaderVector( - Builder builder, SessionInfoResponse.CreateMetadataHeaderVectorDataUnionType data); + Builder builder, SessionInfoResponse.CreateMetadataHeaderVectorDataUnionType data); @JsOverlay @Deprecated public static final double createMetadataHeaderVector(Builder builder, Int8Array data) { return createMetadataHeaderVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createMetadataHeaderVector(Builder builder, JsArray data) { return createMetadataHeaderVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createMetadataHeaderVector(Builder builder, Uint8Array data) { return createMetadataHeaderVector( - builder, - Js.uncheckedCast(data)); + builder, + Js.uncheckedCast(data)); } @JsOverlay @@ -134,37 +133,34 @@ public static final double createMetadataHeaderVector(Builder builder, double[] } public static native double createSessionInfoResponse( - Builder builder, - double metadataHeaderOffset, - double sessionTokenOffset, - Long tokenRefreshDeadlineMs); + Builder builder, + double metadataHeaderOffset, + double sessionTokenOffset, + Long tokenRefreshDeadlineMs); @Deprecated public static native double createSessionTokenVector( - Builder builder, SessionInfoResponse.CreateSessionTokenVectorDataUnionType data); + Builder builder, SessionInfoResponse.CreateSessionTokenVectorDataUnionType data); @JsOverlay @Deprecated public static final double createSessionTokenVector(Builder builder, Int8Array data) { return createSessionTokenVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createSessionTokenVector(Builder builder, JsArray data) { return createSessionTokenVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @Deprecated public static final double createSessionTokenVector(Builder builder, Uint8Array data) { return createSessionTokenVector( - builder, - Js.uncheckedCast(data)); + builder, Js.uncheckedCast(data)); } @JsOverlay @@ -176,15 +172,14 @@ public static final double createSessionTokenVector(Builder builder, double[] da public static native double endSessionInfoResponse(Builder builder); public static native SessionInfoResponse getRootAsSessionInfoResponse( - ByteBuffer bb, SessionInfoResponse obj); + ByteBuffer bb, SessionInfoResponse obj); public static native SessionInfoResponse getRootAsSessionInfoResponse(ByteBuffer bb); public static native SessionInfoResponse getSizePrefixedRootAsSessionInfoResponse( - ByteBuffer bb, SessionInfoResponse obj); + ByteBuffer bb, SessionInfoResponse obj); - public static native SessionInfoResponse getSizePrefixedRootAsSessionInfoResponse( - ByteBuffer bb); + public static native SessionInfoResponse getSizePrefixedRootAsSessionInfoResponse(ByteBuffer bb); public static native void startMetadataHeaderVector(Builder builder, double numElems); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/Table_pb.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/Table_pb.java index 92600d05e18..8e66d27d8f7 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/Table_pb.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/Table_pb.java @@ -6,9 +6,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb", + namespace = JsPackage.GLOBAL) public class Table_pb { public static CaseSensitivityMap CaseSensitivity; public static MatchTypeMap MatchType; diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/BindTableToVariableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/BindTableToVariableRequest.java index bd347dc0cf5..e813a15da2b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/BindTableToVariableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/BindTableToVariableRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.BindTableToVariableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.BindTableToVariableRequest", + namespace = JsPackage.GLOBAL) public class BindTableToVariableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static BindTableToVariableRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static BindTableToVariableRequest.ToObjectReturnType.ConsoleIdFieldType create() @JsProperty void setTicket( - BindTableToVariableRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + BindTableToVariableRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -89,8 +89,7 @@ static BindTableToVariableRequest.ToObjectReturnType create() { String getVariableName(); @JsProperty - void setConsoleId( - BindTableToVariableRequest.ToObjectReturnType.ConsoleIdFieldType consoleId); + void setConsoleId(BindTableToVariableRequest.ToObjectReturnType.ConsoleIdFieldType consoleId); @JsProperty void setTableId(Object tableId); @@ -107,7 +106,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static BindTableToVariableRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -142,20 +141,20 @@ static BindTableToVariableRequest.ToObjectReturnType0.ConsoleIdFieldType create( @JsProperty void setTicket( - BindTableToVariableRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + BindTableToVariableRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -174,8 +173,7 @@ static BindTableToVariableRequest.ToObjectReturnType0 create() { String getVariableName(); @JsProperty - void setConsoleId( - BindTableToVariableRequest.ToObjectReturnType0.ConsoleIdFieldType consoleId); + void setConsoleId(BindTableToVariableRequest.ToObjectReturnType0.ConsoleIdFieldType consoleId); @JsProperty void setTableId(Object tableId); @@ -187,13 +185,13 @@ void setConsoleId( public static native BindTableToVariableRequest deserializeBinary(Uint8Array bytes); public static native BindTableToVariableRequest deserializeBinaryFromReader( - BindTableToVariableRequest message, Object reader); + BindTableToVariableRequest message, Object reader); public static native void serializeBinaryToWriter( - BindTableToVariableRequest message, Object writer); + BindTableToVariableRequest message, Object writer); public static native BindTableToVariableRequest.ToObjectReturnType toObject( - boolean includeInstance, BindTableToVariableRequest msg); + boolean includeInstance, BindTableToVariableRequest msg); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/BindTableToVariableResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/BindTableToVariableResponse.java index 8e64bc6f200..7130978d456 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/BindTableToVariableResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/BindTableToVariableResponse.java @@ -5,17 +5,17 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.BindTableToVariableResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.BindTableToVariableResponse", + namespace = JsPackage.GLOBAL) public class BindTableToVariableResponse { public static native BindTableToVariableResponse deserializeBinary(Uint8Array bytes); public static native BindTableToVariableResponse deserializeBinaryFromReader( - BindTableToVariableResponse message, Object reader); + BindTableToVariableResponse message, Object reader); public static native void serializeBinaryToWriter( - BindTableToVariableResponse message, Object writer); + BindTableToVariableResponse message, Object writer); public static native Object toObject(boolean includeInstance, BindTableToVariableResponse msg); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CancelCommandRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CancelCommandRequest.java index 3aa83695124..da5b8b09b6c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CancelCommandRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CancelCommandRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.CancelCommandRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.CancelCommandRequest", + namespace = JsPackage.GLOBAL) public class CancelCommandRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static CancelCommandRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static CancelCommandRequest.ToObjectReturnType.ConsoleIdFieldType create() { @JsProperty void setTicket( - CancelCommandRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + CancelCommandRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -100,7 +100,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static CancelCommandRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -135,20 +135,20 @@ static CancelCommandRequest.ToObjectReturnType0.ConsoleIdFieldType create() { @JsProperty void setTicket( - CancelCommandRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + CancelCommandRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -173,12 +173,12 @@ static CancelCommandRequest.ToObjectReturnType0 create() { public static native CancelCommandRequest deserializeBinary(Uint8Array bytes); public static native CancelCommandRequest deserializeBinaryFromReader( - CancelCommandRequest message, Object reader); + CancelCommandRequest message, Object reader); public static native void serializeBinaryToWriter(CancelCommandRequest message, Object writer); public static native CancelCommandRequest.ToObjectReturnType toObject( - boolean includeInstance, CancelCommandRequest msg); + boolean includeInstance, CancelCommandRequest msg); public native void clearCommandId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CancelCommandResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CancelCommandResponse.java index fe19ee8d058..59010ba53d9 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CancelCommandResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CancelCommandResponse.java @@ -5,14 +5,14 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.CancelCommandResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.CancelCommandResponse", + namespace = JsPackage.GLOBAL) public class CancelCommandResponse { public static native CancelCommandResponse deserializeBinary(Uint8Array bytes); public static native CancelCommandResponse deserializeBinaryFromReader( - CancelCommandResponse message, Object reader); + CancelCommandResponse message, Object reader); public static native void serializeBinaryToWriter(CancelCommandResponse message, Object writer); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ChangeDocumentRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ChangeDocumentRequest.java index a3a65f36658..8e89928a961 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ChangeDocumentRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ChangeDocumentRequest.java @@ -12,9 +12,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.ChangeDocumentRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.ChangeDocumentRequest", + namespace = JsPackage.GLOBAL) public class ChangeDocumentRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -24,7 +24,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static ChangeDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -59,20 +59,20 @@ static ChangeDocumentRequest.ToObjectReturnType.ConsoleIdFieldType create() { @JsProperty void setTicket( - ChangeDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + ChangeDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -116,7 +116,7 @@ static ChangeDocumentRequest.ToObjectReturnType.ContentChangesListFieldType.Rang @JsProperty void setStart( - ChangeDocumentRequest.ToObjectReturnType.ContentChangesListFieldType.RangeFieldType.StartFieldType start); + ChangeDocumentRequest.ToObjectReturnType.ContentChangesListFieldType.RangeFieldType.StartFieldType start); } @JsOverlay @@ -135,7 +135,7 @@ static ChangeDocumentRequest.ToObjectReturnType.ContentChangesListFieldType crea @JsProperty void setRange( - ChangeDocumentRequest.ToObjectReturnType.ContentChangesListFieldType.RangeFieldType range); + ChangeDocumentRequest.ToObjectReturnType.ContentChangesListFieldType.RangeFieldType range); @JsProperty void setRangeLength(double rangeLength); @@ -183,19 +183,19 @@ static ChangeDocumentRequest.ToObjectReturnType create() { @JsOverlay default void setContentChangesList( - ChangeDocumentRequest.ToObjectReturnType.ContentChangesListFieldType[] contentChangesList) { + ChangeDocumentRequest.ToObjectReturnType.ContentChangesListFieldType[] contentChangesList) { setContentChangesList( - Js.>uncheckedCast( - contentChangesList)); + Js.>uncheckedCast( + contentChangesList)); } @JsProperty void setContentChangesList( - JsArray contentChangesList); + JsArray contentChangesList); @JsProperty void setTextDocument( - ChangeDocumentRequest.ToObjectReturnType.TextDocumentFieldType textDocument); + ChangeDocumentRequest.ToObjectReturnType.TextDocumentFieldType textDocument); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -206,7 +206,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static ChangeDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -241,20 +241,20 @@ static ChangeDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType create() { @JsProperty void setTicket( - ChangeDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + ChangeDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -298,7 +298,7 @@ static ChangeDocumentRequest.ToObjectReturnType0.ContentChangesListFieldType.Ran @JsProperty void setStart( - ChangeDocumentRequest.ToObjectReturnType0.ContentChangesListFieldType.RangeFieldType.StartFieldType start); + ChangeDocumentRequest.ToObjectReturnType0.ContentChangesListFieldType.RangeFieldType.StartFieldType start); } @JsOverlay @@ -317,7 +317,7 @@ static ChangeDocumentRequest.ToObjectReturnType0.ContentChangesListFieldType cre @JsProperty void setRange( - ChangeDocumentRequest.ToObjectReturnType0.ContentChangesListFieldType.RangeFieldType range); + ChangeDocumentRequest.ToObjectReturnType0.ContentChangesListFieldType.RangeFieldType range); @JsProperty void setRangeLength(double rangeLength); @@ -365,38 +365,38 @@ static ChangeDocumentRequest.ToObjectReturnType0 create() { @JsOverlay default void setContentChangesList( - ChangeDocumentRequest.ToObjectReturnType0.ContentChangesListFieldType[] contentChangesList) { + ChangeDocumentRequest.ToObjectReturnType0.ContentChangesListFieldType[] contentChangesList) { setContentChangesList( - Js.>uncheckedCast( - contentChangesList)); + Js.>uncheckedCast( + contentChangesList)); } @JsProperty void setContentChangesList( - JsArray contentChangesList); + JsArray contentChangesList); @JsProperty void setTextDocument( - ChangeDocumentRequest.ToObjectReturnType0.TextDocumentFieldType textDocument); + ChangeDocumentRequest.ToObjectReturnType0.TextDocumentFieldType textDocument); } public static native ChangeDocumentRequest deserializeBinary(Uint8Array bytes); public static native ChangeDocumentRequest deserializeBinaryFromReader( - ChangeDocumentRequest message, Object reader); + ChangeDocumentRequest message, Object reader); public static native void serializeBinaryToWriter(ChangeDocumentRequest message, Object writer); public static native ChangeDocumentRequest.ToObjectReturnType toObject( - boolean includeInstance, ChangeDocumentRequest msg); + boolean includeInstance, ChangeDocumentRequest msg); public native TextDocumentContentChangeEvent addContentChanges(); public native TextDocumentContentChangeEvent addContentChanges( - TextDocumentContentChangeEvent value, double index); + TextDocumentContentChangeEvent value, double index); public native TextDocumentContentChangeEvent addContentChanges( - TextDocumentContentChangeEvent value); + TextDocumentContentChangeEvent value); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ChangeDocumentResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ChangeDocumentResponse.java index a5ae8b08460..3e361015f58 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ChangeDocumentResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ChangeDocumentResponse.java @@ -5,17 +5,16 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.ChangeDocumentResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.ChangeDocumentResponse", + namespace = JsPackage.GLOBAL) public class ChangeDocumentResponse { public static native ChangeDocumentResponse deserializeBinary(Uint8Array bytes); public static native ChangeDocumentResponse deserializeBinaryFromReader( - ChangeDocumentResponse message, Object reader); + ChangeDocumentResponse message, Object reader); - public static native void serializeBinaryToWriter(ChangeDocumentResponse message, - Object writer); + public static native void serializeBinaryToWriter(ChangeDocumentResponse message, Object writer); public static native Object toObject(boolean includeInstance, ChangeDocumentResponse msg); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CloseDocumentRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CloseDocumentRequest.java index 3c5ebd04b83..d662c4265e6 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CloseDocumentRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CloseDocumentRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.CloseDocumentRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.CloseDocumentRequest", + namespace = JsPackage.GLOBAL) public class CloseDocumentRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType create() { @JsProperty void setTicket( - CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + CloseDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -110,7 +110,7 @@ static CloseDocumentRequest.ToObjectReturnType create() { @JsProperty void setTextDocument( - CloseDocumentRequest.ToObjectReturnType.TextDocumentFieldType textDocument); + CloseDocumentRequest.ToObjectReturnType.TextDocumentFieldType textDocument); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -121,7 +121,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -156,20 +156,20 @@ static CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType create() { @JsProperty void setTicket( - CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + CloseDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -209,18 +209,18 @@ static CloseDocumentRequest.ToObjectReturnType0 create() { @JsProperty void setTextDocument( - CloseDocumentRequest.ToObjectReturnType0.TextDocumentFieldType textDocument); + CloseDocumentRequest.ToObjectReturnType0.TextDocumentFieldType textDocument); } public static native CloseDocumentRequest deserializeBinary(Uint8Array bytes); public static native CloseDocumentRequest deserializeBinaryFromReader( - CloseDocumentRequest message, Object reader); + CloseDocumentRequest message, Object reader); public static native void serializeBinaryToWriter(CloseDocumentRequest message, Object writer); public static native CloseDocumentRequest.ToObjectReturnType toObject( - boolean includeInstance, CloseDocumentRequest msg); + boolean includeInstance, CloseDocumentRequest msg); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CloseDocumentResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CloseDocumentResponse.java index da9cf7b442b..3ae951c6090 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CloseDocumentResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CloseDocumentResponse.java @@ -5,14 +5,14 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.CloseDocumentResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.CloseDocumentResponse", + namespace = JsPackage.GLOBAL) public class CloseDocumentResponse { public static native CloseDocumentResponse deserializeBinary(Uint8Array bytes); public static native CloseDocumentResponse deserializeBinaryFromReader( - CloseDocumentResponse message, Object reader); + CloseDocumentResponse message, Object reader); public static native void serializeBinaryToWriter(CloseDocumentResponse message, Object writer); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CompletionContext.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CompletionContext.java index 60192c9fb9e..cbca5abd9fa 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CompletionContext.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CompletionContext.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.CompletionContext", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.CompletionContext", + namespace = JsPackage.GLOBAL) public class CompletionContext { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -56,12 +56,12 @@ static CompletionContext.ToObjectReturnType0 create() { public static native CompletionContext deserializeBinary(Uint8Array bytes); public static native CompletionContext deserializeBinaryFromReader( - CompletionContext message, Object reader); + CompletionContext message, Object reader); public static native void serializeBinaryToWriter(CompletionContext message, Object writer); public static native CompletionContext.ToObjectReturnType toObject( - boolean includeInstance, CompletionContext msg); + boolean includeInstance, CompletionContext msg); public native String getTriggerCharacter(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CompletionItem.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CompletionItem.java index c425faf1ef7..2463ac27a04 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CompletionItem.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/CompletionItem.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.CompletionItem", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.CompletionItem", + namespace = JsPackage.GLOBAL) public class CompletionItem { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -56,7 +56,7 @@ static CompletionItem.ToObjectReturnType.TextEditFieldType.RangeFieldType create @JsProperty void setStart( - CompletionItem.ToObjectReturnType.TextEditFieldType.RangeFieldType.StartFieldType start); + CompletionItem.ToObjectReturnType.TextEditFieldType.RangeFieldType.StartFieldType start); } @JsOverlay @@ -219,7 +219,7 @@ static CompletionItem.ToObjectReturnType0.TextEditFieldType.RangeFieldType creat @JsProperty void setStart( - CompletionItem.ToObjectReturnType0.TextEditFieldType.RangeFieldType.StartFieldType start); + CompletionItem.ToObjectReturnType0.TextEditFieldType.RangeFieldType.StartFieldType start); } @JsOverlay @@ -234,8 +234,7 @@ static CompletionItem.ToObjectReturnType0.TextEditFieldType create() { String getText(); @JsProperty - void setRange( - CompletionItem.ToObjectReturnType0.TextEditFieldType.RangeFieldType range); + void setRange(CompletionItem.ToObjectReturnType0.TextEditFieldType.RangeFieldType range); @JsProperty void setText(String text); @@ -344,12 +343,12 @@ default void setCommitCharactersList(String[] commitCharactersList) { public static native CompletionItem deserializeBinary(Uint8Array bytes); public static native CompletionItem deserializeBinaryFromReader( - CompletionItem message, Object reader); + CompletionItem message, Object reader); public static native void serializeBinaryToWriter(CompletionItem message, Object writer); public static native CompletionItem.ToObjectReturnType toObject( - boolean includeInstance, CompletionItem msg); + boolean includeInstance, CompletionItem msg); public native TextEdit addAdditionalTextEdits(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/DocumentRange.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/DocumentRange.java index 2e4ceaf7ab9..eced482201d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/DocumentRange.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/DocumentRange.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.DocumentRange", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.DocumentRange", + namespace = JsPackage.GLOBAL) public class DocumentRange { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -96,12 +96,12 @@ static DocumentRange.ToObjectReturnType0 create() { public static native DocumentRange deserializeBinary(Uint8Array bytes); public static native DocumentRange deserializeBinaryFromReader( - DocumentRange message, Object reader); + DocumentRange message, Object reader); public static native void serializeBinaryToWriter(DocumentRange message, Object writer); public static native DocumentRange.ToObjectReturnType toObject( - boolean includeInstance, DocumentRange msg); + boolean includeInstance, DocumentRange msg); public native void clearEnd(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ExecuteCommandRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ExecuteCommandRequest.java index c128a88ffcb..71051842018 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ExecuteCommandRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ExecuteCommandRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.ExecuteCommandRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.ExecuteCommandRequest", + namespace = JsPackage.GLOBAL) public class ExecuteCommandRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static ExecuteCommandRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static ExecuteCommandRequest.ToObjectReturnType.ConsoleIdFieldType create() { @JsProperty void setTicket( - ExecuteCommandRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + ExecuteCommandRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -100,7 +100,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static ExecuteCommandRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -135,20 +135,20 @@ static ExecuteCommandRequest.ToObjectReturnType0.ConsoleIdFieldType create() { @JsProperty void setTicket( - ExecuteCommandRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + ExecuteCommandRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -173,12 +173,12 @@ static ExecuteCommandRequest.ToObjectReturnType0 create() { public static native ExecuteCommandRequest deserializeBinary(Uint8Array bytes); public static native ExecuteCommandRequest deserializeBinaryFromReader( - ExecuteCommandRequest message, Object reader); + ExecuteCommandRequest message, Object reader); public static native void serializeBinaryToWriter(ExecuteCommandRequest message, Object writer); public static native ExecuteCommandRequest.ToObjectReturnType toObject( - boolean includeInstance, ExecuteCommandRequest msg); + boolean includeInstance, ExecuteCommandRequest msg); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ExecuteCommandResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ExecuteCommandResponse.java index 0f59dfe0ea2..adc864c5fd9 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ExecuteCommandResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/ExecuteCommandResponse.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.ExecuteCommandResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.ExecuteCommandResponse", + namespace = JsPackage.GLOBAL) public class ExecuteCommandResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -55,15 +55,15 @@ static ExecuteCommandResponse.ToObjectReturnType create() { @JsOverlay default void setCreatedList( - ExecuteCommandResponse.ToObjectReturnType.CreatedListFieldType[] createdList) { + ExecuteCommandResponse.ToObjectReturnType.CreatedListFieldType[] createdList) { setCreatedList( - Js.>uncheckedCast( - createdList)); + Js.>uncheckedCast( + createdList)); } @JsProperty void setCreatedList( - JsArray createdList); + JsArray createdList); @JsProperty void setErrorMessage(String errorMessage); @@ -126,15 +126,15 @@ static ExecuteCommandResponse.ToObjectReturnType0 create() { @JsOverlay default void setCreatedList( - ExecuteCommandResponse.ToObjectReturnType0.CreatedListFieldType[] createdList) { + ExecuteCommandResponse.ToObjectReturnType0.CreatedListFieldType[] createdList) { setCreatedList( - Js.>uncheckedCast( - createdList)); + Js.>uncheckedCast( + createdList)); } @JsProperty void setCreatedList( - JsArray createdList); + JsArray createdList); @JsProperty void setErrorMessage(String errorMessage); @@ -159,13 +159,12 @@ default void setUpdatedList(Object[] updatedList) { public static native ExecuteCommandResponse deserializeBinary(Uint8Array bytes); public static native ExecuteCommandResponse deserializeBinaryFromReader( - ExecuteCommandResponse message, Object reader); + ExecuteCommandResponse message, Object reader); - public static native void serializeBinaryToWriter(ExecuteCommandResponse message, - Object writer); + public static native void serializeBinaryToWriter(ExecuteCommandResponse message, Object writer); public static native ExecuteCommandResponse.ToObjectReturnType toObject( - boolean includeInstance, ExecuteCommandResponse msg); + boolean includeInstance, ExecuteCommandResponse msg); public native VariableDefinition addCreated(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchFigureRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchFigureRequest.java index 8969f0df5ee..4b9d8cecdbd 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchFigureRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchFigureRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FetchFigureRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FetchFigureRequest", + namespace = JsPackage.GLOBAL) public class FetchFigureRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static FetchFigureRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static FetchFigureRequest.ToObjectReturnType.ConsoleIdFieldType create() { @JsProperty void setTicket( - FetchFigureRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + FetchFigureRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -100,7 +100,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static FetchFigureRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -135,20 +135,20 @@ static FetchFigureRequest.ToObjectReturnType0.ConsoleIdFieldType create() { @JsProperty void setTicket( - FetchFigureRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + FetchFigureRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -173,12 +173,12 @@ static FetchFigureRequest.ToObjectReturnType0 create() { public static native FetchFigureRequest deserializeBinary(Uint8Array bytes); public static native FetchFigureRequest deserializeBinaryFromReader( - FetchFigureRequest message, Object reader); + FetchFigureRequest message, Object reader); public static native void serializeBinaryToWriter(FetchFigureRequest message, Object writer); public static native FetchFigureRequest.ToObjectReturnType toObject( - boolean includeInstance, FetchFigureRequest msg); + boolean includeInstance, FetchFigureRequest msg); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchFigureResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchFigureResponse.java index 2a30b515e06..765da46c220 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchFigureResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchFigureResponse.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FetchFigureResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FetchFigureResponse", + namespace = JsPackage.GLOBAL) public class FetchFigureResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -88,13 +88,12 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsLi @JsOverlay default void setBusinessPeriodsList(Object[] businessPeriodsList) { - setBusinessPeriodsList( - Js.>uncheckedCast(businessPeriodsList)); + setBusinessPeriodsList(Js.>uncheckedCast(businessPeriodsList)); } @JsProperty void setDate( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -122,33 +121,32 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsLi @JsOverlay default void setBusinessDaysList(double[] businessDaysList) { - setBusinessDaysList( - Js.>uncheckedCast(businessDaysList)); + setBusinessDaysList(Js.>uncheckedCast(businessDaysList)); } @JsOverlay default void setBusinessPeriodsList( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -227,7 +225,7 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsLi @JsProperty void setBusinessCalendarDescriptor( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); @JsProperty void setColor(String color); @@ -264,8 +262,7 @@ void setBusinessCalendarDescriptor( @JsOverlay default void setMajorTickLocationsList(double[] majorTickLocationsList) { - setMajorTickLocationsList( - Js.>uncheckedCast(majorTickLocationsList)); + setMajorTickLocationsList(Js.>uncheckedCast(majorTickLocationsList)); } @JsProperty @@ -487,26 +484,26 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsLi @JsOverlay default void setDataSourcesList( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(Object gradientVisible); @JsProperty void setLineColor( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.LineColorFieldType lineColor); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.LineColorFieldType lineColor); @JsProperty void setLinesVisible( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); @JsProperty void setName(String name); @@ -528,7 +525,7 @@ void setLinesVisible( @JsProperty void setPointSize( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.PointSizeFieldType pointSize); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.PointSizeFieldType pointSize); @JsProperty void setPointsVisible(Object pointsVisible); @@ -565,8 +562,7 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsLi @JsOverlay default void setColumnTypesList(String[] columnTypesList) { - setColumnTypesList( - Js.>uncheckedCast(columnTypesList)); + setColumnTypesList(Js.>uncheckedCast(columnTypesList)); } @JsProperty @@ -618,7 +614,7 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsLi @JsProperty void setOneClick( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); @JsProperty void setTableId(double tableId); @@ -679,15 +675,15 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsLi @JsOverlay default void setDataSourcesList( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(boolean gradientVisible); @@ -775,15 +771,15 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsLi @JsOverlay default void setAxesList( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType[] axesList) { + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType[] axesList) { setAxesList( - Js.>uncheckedCast( - axesList)); + Js.>uncheckedCast( + axesList)); } @JsProperty void setAxesList( - JsArray axesList); + JsArray axesList); @JsProperty void setChartType(double chartType); @@ -802,14 +798,14 @@ void setAxesList( @JsProperty void setMultiSeriesList( - JsArray multiSeriesList); + JsArray multiSeriesList); @JsOverlay default void setMultiSeriesList( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType[] multiSeriesList) { + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType[] multiSeriesList) { setMultiSeriesList( - Js.>uncheckedCast( - multiSeriesList)); + Js.>uncheckedCast( + multiSeriesList)); } @JsProperty @@ -817,14 +813,14 @@ default void setMultiSeriesList( @JsProperty void setSeriesList( - JsArray seriesList); + JsArray seriesList); @JsOverlay default void setSeriesList( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType[] seriesList) { + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType[] seriesList) { setSeriesList( - Js.>uncheckedCast( - seriesList)); + Js.>uncheckedCast( + seriesList)); } @JsProperty @@ -846,7 +842,7 @@ public interface TablesListFieldType { public interface GetSchemaHeaderUnionType { @JsOverlay static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.GetSchemaHeaderUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -879,7 +875,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -914,20 +910,20 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesLi @JsProperty void setTicket( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -947,7 +943,7 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesLi @JsProperty void setTicket( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType ticket); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType ticket); } @JsOverlay @@ -981,24 +977,24 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesLi @JsProperty void setResultId( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType resultId); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType resultId); @JsProperty void setSchemaHeader( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.GetSchemaHeaderUnionType schemaHeader); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType.GetSchemaHeaderUnionType schemaHeader); @JsOverlay default void setSchemaHeader(String schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsOverlay default void setSchemaHeader(Uint8Array schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsProperty @@ -1042,15 +1038,15 @@ static FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType create() @JsOverlay default void setChartsList( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType[] chartsList) { + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.ChartsListFieldType[] chartsList) { setChartsList( - Js.>uncheckedCast( - chartsList)); + Js.>uncheckedCast( + chartsList)); } @JsProperty void setChartsList( - JsArray chartsList); + JsArray chartsList); @JsProperty void setCols(double cols); @@ -1068,14 +1064,14 @@ default void setErrorsList(String[] errorsList) { @JsProperty void setTablesList( - JsArray tablesList); + JsArray tablesList); @JsOverlay default void setTablesList( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType[] tablesList) { + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType.TablesListFieldType[] tablesList) { setTablesList( - Js.>uncheckedCast( - tablesList)); + Js.>uncheckedCast( + tablesList)); } @JsProperty @@ -1101,7 +1097,7 @@ static FetchFigureResponse.ToObjectReturnType create() { @JsProperty void setFigureDescriptor( - FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType figureDescriptor); + FetchFigureResponse.ToObjectReturnType.FigureDescriptorFieldType figureDescriptor); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1178,13 +1174,12 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsL @JsOverlay default void setBusinessPeriodsList(Object[] businessPeriodsList) { - setBusinessPeriodsList( - Js.>uncheckedCast(businessPeriodsList)); + setBusinessPeriodsList(Js.>uncheckedCast(businessPeriodsList)); } @JsProperty void setDate( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -1212,33 +1207,32 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsL @JsOverlay default void setBusinessDaysList(double[] businessDaysList) { - setBusinessDaysList( - Js.>uncheckedCast(businessDaysList)); + setBusinessDaysList(Js.>uncheckedCast(businessDaysList)); } @JsOverlay default void setBusinessPeriodsList( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -1317,7 +1311,7 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsL @JsProperty void setBusinessCalendarDescriptor( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); @JsProperty void setColor(String color); @@ -1354,8 +1348,7 @@ void setBusinessCalendarDescriptor( @JsOverlay default void setMajorTickLocationsList(double[] majorTickLocationsList) { - setMajorTickLocationsList( - Js.>uncheckedCast(majorTickLocationsList)); + setMajorTickLocationsList(Js.>uncheckedCast(majorTickLocationsList)); } @JsProperty @@ -1577,26 +1570,26 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsL @JsOverlay default void setDataSourcesList( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(Object gradientVisible); @JsProperty void setLineColor( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.LineColorFieldType lineColor); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.LineColorFieldType lineColor); @JsProperty void setLinesVisible( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); @JsProperty void setName(String name); @@ -1618,7 +1611,7 @@ void setLinesVisible( @JsProperty void setPointSize( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.PointSizeFieldType pointSize); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType.PointSizeFieldType pointSize); @JsProperty void setPointsVisible(Object pointsVisible); @@ -1655,8 +1648,7 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsL @JsOverlay default void setColumnTypesList(String[] columnTypesList) { - setColumnTypesList( - Js.>uncheckedCast(columnTypesList)); + setColumnTypesList(Js.>uncheckedCast(columnTypesList)); } @JsProperty @@ -1708,7 +1700,7 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsL @JsProperty void setOneClick( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); @JsProperty void setTableId(double tableId); @@ -1769,15 +1761,15 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsL @JsOverlay default void setDataSourcesList( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(boolean gradientVisible); @@ -1865,15 +1857,15 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsL @JsOverlay default void setAxesList( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType[] axesList) { + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.AxesListFieldType[] axesList) { setAxesList( - Js.>uncheckedCast( - axesList)); + Js.>uncheckedCast( + axesList)); } @JsProperty void setAxesList( - JsArray axesList); + JsArray axesList); @JsProperty void setChartType(double chartType); @@ -1892,14 +1884,14 @@ void setAxesList( @JsProperty void setMultiSeriesList( - JsArray multiSeriesList); + JsArray multiSeriesList); @JsOverlay default void setMultiSeriesList( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType[] multiSeriesList) { + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.MultiSeriesListFieldType[] multiSeriesList) { setMultiSeriesList( - Js.>uncheckedCast( - multiSeriesList)); + Js.>uncheckedCast( + multiSeriesList)); } @JsProperty @@ -1907,14 +1899,14 @@ default void setMultiSeriesList( @JsProperty void setSeriesList( - JsArray seriesList); + JsArray seriesList); @JsOverlay default void setSeriesList( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType[] seriesList) { + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType.SeriesListFieldType[] seriesList) { setSeriesList( - Js.>uncheckedCast( - seriesList)); + Js.>uncheckedCast( + seriesList)); } @JsProperty @@ -1936,7 +1928,7 @@ public interface TablesListFieldType { public interface GetSchemaHeaderUnionType { @JsOverlay static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.GetSchemaHeaderUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -1969,7 +1961,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -2004,20 +1996,20 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesL @JsProperty void setTicket( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -2037,7 +2029,7 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesL @JsProperty void setTicket( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType ticket); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType.TicketFieldType ticket); } @JsOverlay @@ -2071,24 +2063,24 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesL @JsProperty void setResultId( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType resultId); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.ResultIdFieldType resultId); @JsProperty void setSchemaHeader( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.GetSchemaHeaderUnionType schemaHeader); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType.GetSchemaHeaderUnionType schemaHeader); @JsOverlay default void setSchemaHeader(String schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsOverlay default void setSchemaHeader(Uint8Array schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsProperty @@ -2132,15 +2124,15 @@ static FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType create( @JsOverlay default void setChartsList( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType[] chartsList) { + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.ChartsListFieldType[] chartsList) { setChartsList( - Js.>uncheckedCast( - chartsList)); + Js.>uncheckedCast( + chartsList)); } @JsProperty void setChartsList( - JsArray chartsList); + JsArray chartsList); @JsProperty void setCols(double cols); @@ -2158,14 +2150,14 @@ default void setErrorsList(String[] errorsList) { @JsProperty void setTablesList( - JsArray tablesList); + JsArray tablesList); @JsOverlay default void setTablesList( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType[] tablesList) { + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType.TablesListFieldType[] tablesList) { setTablesList( - Js.>uncheckedCast( - tablesList)); + Js.>uncheckedCast( + tablesList)); } @JsProperty @@ -2191,18 +2183,18 @@ static FetchFigureResponse.ToObjectReturnType0 create() { @JsProperty void setFigureDescriptor( - FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType figureDescriptor); + FetchFigureResponse.ToObjectReturnType0.FigureDescriptorFieldType figureDescriptor); } public static native FetchFigureResponse deserializeBinary(Uint8Array bytes); public static native FetchFigureResponse deserializeBinaryFromReader( - FetchFigureResponse message, Object reader); + FetchFigureResponse message, Object reader); public static native void serializeBinaryToWriter(FetchFigureResponse message, Object writer); public static native FetchFigureResponse.ToObjectReturnType toObject( - boolean includeInstance, FetchFigureResponse msg); + boolean includeInstance, FetchFigureResponse msg); public native void clearFigureDescriptor(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchPandasTableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchPandasTableRequest.java index 62e688301d9..c4122628af5 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchPandasTableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchPandasTableRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FetchPandasTableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FetchPandasTableRequest", + namespace = JsPackage.GLOBAL) public class FetchPandasTableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static FetchPandasTableRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static FetchPandasTableRequest.ToObjectReturnType.ConsoleIdFieldType create() { @JsProperty void setTicket( - FetchPandasTableRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + FetchPandasTableRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -100,7 +100,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static FetchPandasTableRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -135,20 +135,20 @@ static FetchPandasTableRequest.ToObjectReturnType0.ConsoleIdFieldType create() { @JsProperty void setTicket( - FetchPandasTableRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + FetchPandasTableRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -173,13 +173,12 @@ static FetchPandasTableRequest.ToObjectReturnType0 create() { public static native FetchPandasTableRequest deserializeBinary(Uint8Array bytes); public static native FetchPandasTableRequest deserializeBinaryFromReader( - FetchPandasTableRequest message, Object reader); + FetchPandasTableRequest message, Object reader); - public static native void serializeBinaryToWriter(FetchPandasTableRequest message, - Object writer); + public static native void serializeBinaryToWriter(FetchPandasTableRequest message, Object writer); public static native FetchPandasTableRequest.ToObjectReturnType toObject( - boolean includeInstance, FetchPandasTableRequest msg); + boolean includeInstance, FetchPandasTableRequest msg); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableMapRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableMapRequest.java index 66d2703b540..8439e3b831b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableMapRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableMapRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FetchTableMapRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FetchTableMapRequest", + namespace = JsPackage.GLOBAL) public class FetchTableMapRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static FetchTableMapRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static FetchTableMapRequest.ToObjectReturnType.ConsoleIdFieldType create() { @JsProperty void setTicket( - FetchTableMapRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + FetchTableMapRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -94,7 +94,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static FetchTableMapRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -129,20 +129,20 @@ static FetchTableMapRequest.ToObjectReturnType0.ConsoleIdFieldType create() { @JsProperty void setTicket( - FetchTableMapRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + FetchTableMapRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -161,12 +161,12 @@ static FetchTableMapRequest.ToObjectReturnType0 create() { public static native FetchTableMapRequest deserializeBinary(Uint8Array bytes); public static native FetchTableMapRequest deserializeBinaryFromReader( - FetchTableMapRequest message, Object reader); + FetchTableMapRequest message, Object reader); public static native void serializeBinaryToWriter(FetchTableMapRequest message, Object writer); public static native FetchTableMapRequest.ToObjectReturnType toObject( - boolean includeInstance, FetchTableMapRequest msg); + boolean includeInstance, FetchTableMapRequest msg); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableMapResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableMapResponse.java index 7b5cf378494..ddd53133514 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableMapResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableMapResponse.java @@ -5,14 +5,14 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FetchTableMapResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FetchTableMapResponse", + namespace = JsPackage.GLOBAL) public class FetchTableMapResponse { public static native FetchTableMapResponse deserializeBinary(Uint8Array bytes); public static native FetchTableMapResponse deserializeBinaryFromReader( - FetchTableMapResponse message, Object reader); + FetchTableMapResponse message, Object reader); public static native void serializeBinaryToWriter(FetchTableMapResponse message, Object writer); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableRequest.java index ec54a5e2e11..ef5f1e6a5f8 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FetchTableRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FetchTableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FetchTableRequest", + namespace = JsPackage.GLOBAL) public class FetchTableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static FetchTableRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static FetchTableRequest.ToObjectReturnType.ConsoleIdFieldType create() { @JsProperty void setTicket( - FetchTableRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + FetchTableRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -106,7 +106,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static FetchTableRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -141,20 +141,20 @@ static FetchTableRequest.ToObjectReturnType0.ConsoleIdFieldType create() { @JsProperty void setTicket( - FetchTableRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + FetchTableRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -185,12 +185,12 @@ static FetchTableRequest.ToObjectReturnType0 create() { public static native FetchTableRequest deserializeBinary(Uint8Array bytes); public static native FetchTableRequest deserializeBinaryFromReader( - FetchTableRequest message, Object reader); + FetchTableRequest message, Object reader); public static native void serializeBinaryToWriter(FetchTableRequest message, Object writer); public static native FetchTableRequest.ToObjectReturnType toObject( - boolean includeInstance, FetchTableRequest msg); + boolean includeInstance, FetchTableRequest msg); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FigureDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FigureDescriptor.java index b6fd96faaab..4319a6db1cc 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FigureDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/FigureDescriptor.java @@ -14,9 +14,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor", + namespace = JsPackage.GLOBAL) public class FigureDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -90,13 +90,12 @@ static FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType @JsOverlay default void setBusinessPeriodsList(Object[] businessPeriodsList) { - setBusinessPeriodsList( - Js.>uncheckedCast(businessPeriodsList)); + setBusinessPeriodsList(Js.>uncheckedCast(businessPeriodsList)); } @JsProperty void setDate( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -129,27 +128,27 @@ default void setBusinessDaysList(double[] businessDaysList) { @JsOverlay default void setBusinessPeriodsList( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -228,7 +227,7 @@ static FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType @JsProperty void setBusinessCalendarDescriptor( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); @JsProperty void setColor(String color); @@ -265,8 +264,7 @@ void setBusinessCalendarDescriptor( @JsOverlay default void setMajorTickLocationsList(double[] majorTickLocationsList) { - setMajorTickLocationsList( - Js.>uncheckedCast(majorTickLocationsList)); + setMajorTickLocationsList(Js.>uncheckedCast(majorTickLocationsList)); } @JsProperty @@ -488,26 +486,26 @@ static FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFi @JsOverlay default void setDataSourcesList( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(Object gradientVisible); @JsProperty void setLineColor( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType.LineColorFieldType lineColor); + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType.LineColorFieldType lineColor); @JsProperty void setLinesVisible( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); @JsProperty void setName(String name); @@ -529,7 +527,7 @@ void setLinesVisible( @JsProperty void setPointSize( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType.PointSizeFieldType pointSize); + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType.PointSizeFieldType pointSize); @JsProperty void setPointsVisible(Object pointsVisible); @@ -618,7 +616,7 @@ static FigureDescriptor.ToObjectReturnType.ChartsListFieldType.SeriesListFieldTy @JsProperty void setOneClick( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); @JsProperty void setTableId(double tableId); @@ -679,15 +677,15 @@ static FigureDescriptor.ToObjectReturnType.ChartsListFieldType.SeriesListFieldTy @JsOverlay default void setDataSourcesList( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(boolean gradientVisible); @@ -775,15 +773,15 @@ static FigureDescriptor.ToObjectReturnType.ChartsListFieldType create() { @JsOverlay default void setAxesList( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType[] axesList) { + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.AxesListFieldType[] axesList) { setAxesList( - Js.>uncheckedCast( - axesList)); + Js.>uncheckedCast( + axesList)); } @JsProperty void setAxesList( - JsArray axesList); + JsArray axesList); @JsProperty void setChartType(double chartType); @@ -802,14 +800,14 @@ void setAxesList( @JsProperty void setMultiSeriesList( - JsArray multiSeriesList); + JsArray multiSeriesList); @JsOverlay default void setMultiSeriesList( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType[] multiSeriesList) { + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.MultiSeriesListFieldType[] multiSeriesList) { setMultiSeriesList( - Js.>uncheckedCast( - multiSeriesList)); + Js.>uncheckedCast( + multiSeriesList)); } @JsProperty @@ -817,14 +815,14 @@ default void setMultiSeriesList( @JsProperty void setSeriesList( - JsArray seriesList); + JsArray seriesList); @JsOverlay default void setSeriesList( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType.SeriesListFieldType[] seriesList) { + FigureDescriptor.ToObjectReturnType.ChartsListFieldType.SeriesListFieldType[] seriesList) { setSeriesList( - Js.>uncheckedCast( - seriesList)); + Js.>uncheckedCast( + seriesList)); } @JsProperty @@ -846,7 +844,7 @@ public interface TablesListFieldType { public interface GetSchemaHeaderUnionType { @JsOverlay static FigureDescriptor.ToObjectReturnType.TablesListFieldType.GetSchemaHeaderUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -879,7 +877,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static FigureDescriptor.ToObjectReturnType.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -914,20 +912,20 @@ static FigureDescriptor.ToObjectReturnType.TablesListFieldType.ResultIdFieldType @JsProperty void setTicket( - FigureDescriptor.ToObjectReturnType.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); + FigureDescriptor.ToObjectReturnType.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -947,7 +945,7 @@ static FigureDescriptor.ToObjectReturnType.TablesListFieldType.ResultIdFieldType @JsProperty void setTicket( - FigureDescriptor.ToObjectReturnType.TablesListFieldType.ResultIdFieldType.TicketFieldType ticket); + FigureDescriptor.ToObjectReturnType.TablesListFieldType.ResultIdFieldType.TicketFieldType ticket); } @JsOverlay @@ -981,24 +979,24 @@ static FigureDescriptor.ToObjectReturnType.TablesListFieldType create() { @JsProperty void setResultId( - FigureDescriptor.ToObjectReturnType.TablesListFieldType.ResultIdFieldType resultId); + FigureDescriptor.ToObjectReturnType.TablesListFieldType.ResultIdFieldType resultId); @JsProperty void setSchemaHeader( - FigureDescriptor.ToObjectReturnType.TablesListFieldType.GetSchemaHeaderUnionType schemaHeader); + FigureDescriptor.ToObjectReturnType.TablesListFieldType.GetSchemaHeaderUnionType schemaHeader); @JsOverlay default void setSchemaHeader(String schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsOverlay default void setSchemaHeader(Uint8Array schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsProperty @@ -1042,15 +1040,14 @@ static FigureDescriptor.ToObjectReturnType create() { @JsOverlay default void setChartsList( - FigureDescriptor.ToObjectReturnType.ChartsListFieldType[] chartsList) { + FigureDescriptor.ToObjectReturnType.ChartsListFieldType[] chartsList) { setChartsList( - Js.>uncheckedCast( - chartsList)); + Js.>uncheckedCast( + chartsList)); } @JsProperty - void setChartsList( - JsArray chartsList); + void setChartsList(JsArray chartsList); @JsProperty void setCols(double cols); @@ -1067,15 +1064,14 @@ default void setErrorsList(String[] errorsList) { void setRows(double rows); @JsProperty - void setTablesList( - JsArray tablesList); + void setTablesList(JsArray tablesList); @JsOverlay default void setTablesList( - FigureDescriptor.ToObjectReturnType.TablesListFieldType[] tablesList) { + FigureDescriptor.ToObjectReturnType.TablesListFieldType[] tablesList) { setTablesList( - Js.>uncheckedCast( - tablesList)); + Js.>uncheckedCast( + tablesList)); } @JsProperty @@ -1163,13 +1159,12 @@ static FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldTyp @JsOverlay default void setBusinessPeriodsList(Object[] businessPeriodsList) { - setBusinessPeriodsList( - Js.>uncheckedCast(businessPeriodsList)); + setBusinessPeriodsList(Js.>uncheckedCast(businessPeriodsList)); } @JsProperty void setDate( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -1202,27 +1197,27 @@ default void setBusinessDaysList(double[] businessDaysList) { @JsOverlay default void setBusinessPeriodsList( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -1301,7 +1296,7 @@ static FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldTyp @JsProperty void setBusinessCalendarDescriptor( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); @JsProperty void setColor(String color); @@ -1338,8 +1333,7 @@ void setBusinessCalendarDescriptor( @JsOverlay default void setMajorTickLocationsList(double[] majorTickLocationsList) { - setMajorTickLocationsList( - Js.>uncheckedCast(majorTickLocationsList)); + setMajorTickLocationsList(Js.>uncheckedCast(majorTickLocationsList)); } @JsProperty @@ -1561,26 +1555,26 @@ static FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListF @JsOverlay default void setDataSourcesList( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(Object gradientVisible); @JsProperty void setLineColor( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType.LineColorFieldType lineColor); + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType.LineColorFieldType lineColor); @JsProperty void setLinesVisible( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); @JsProperty void setName(String name); @@ -1602,7 +1596,7 @@ void setLinesVisible( @JsProperty void setPointSize( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType.PointSizeFieldType pointSize); + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType.PointSizeFieldType pointSize); @JsProperty void setPointsVisible(Object pointsVisible); @@ -1691,7 +1685,7 @@ static FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.SeriesListFieldT @JsProperty void setOneClick( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); @JsProperty void setTableId(double tableId); @@ -1752,15 +1746,15 @@ static FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.SeriesListFieldT @JsOverlay default void setDataSourcesList( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(boolean gradientVisible); @@ -1848,15 +1842,15 @@ static FigureDescriptor.ToObjectReturnType0.ChartsListFieldType create() { @JsOverlay default void setAxesList( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType[] axesList) { + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.AxesListFieldType[] axesList) { setAxesList( - Js.>uncheckedCast( - axesList)); + Js.>uncheckedCast( + axesList)); } @JsProperty void setAxesList( - JsArray axesList); + JsArray axesList); @JsProperty void setChartType(double chartType); @@ -1875,14 +1869,14 @@ void setAxesList( @JsProperty void setMultiSeriesList( - JsArray multiSeriesList); + JsArray multiSeriesList); @JsOverlay default void setMultiSeriesList( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType[] multiSeriesList) { + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.MultiSeriesListFieldType[] multiSeriesList) { setMultiSeriesList( - Js.>uncheckedCast( - multiSeriesList)); + Js.>uncheckedCast( + multiSeriesList)); } @JsProperty @@ -1890,14 +1884,14 @@ default void setMultiSeriesList( @JsProperty void setSeriesList( - JsArray seriesList); + JsArray seriesList); @JsOverlay default void setSeriesList( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.SeriesListFieldType[] seriesList) { + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType.SeriesListFieldType[] seriesList) { setSeriesList( - Js.>uncheckedCast( - seriesList)); + Js.>uncheckedCast( + seriesList)); } @JsProperty @@ -1919,7 +1913,7 @@ public interface TablesListFieldType { public interface GetSchemaHeaderUnionType { @JsOverlay static FigureDescriptor.ToObjectReturnType0.TablesListFieldType.GetSchemaHeaderUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -1952,7 +1946,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static FigureDescriptor.ToObjectReturnType0.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -1987,20 +1981,20 @@ static FigureDescriptor.ToObjectReturnType0.TablesListFieldType.ResultIdFieldTyp @JsProperty void setTicket( - FigureDescriptor.ToObjectReturnType0.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); + FigureDescriptor.ToObjectReturnType0.TablesListFieldType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -2020,7 +2014,7 @@ static FigureDescriptor.ToObjectReturnType0.TablesListFieldType.ResultIdFieldTyp @JsProperty void setTicket( - FigureDescriptor.ToObjectReturnType0.TablesListFieldType.ResultIdFieldType.TicketFieldType ticket); + FigureDescriptor.ToObjectReturnType0.TablesListFieldType.ResultIdFieldType.TicketFieldType ticket); } @JsOverlay @@ -2054,24 +2048,24 @@ static FigureDescriptor.ToObjectReturnType0.TablesListFieldType create() { @JsProperty void setResultId( - FigureDescriptor.ToObjectReturnType0.TablesListFieldType.ResultIdFieldType resultId); + FigureDescriptor.ToObjectReturnType0.TablesListFieldType.ResultIdFieldType resultId); @JsProperty void setSchemaHeader( - FigureDescriptor.ToObjectReturnType0.TablesListFieldType.GetSchemaHeaderUnionType schemaHeader); + FigureDescriptor.ToObjectReturnType0.TablesListFieldType.GetSchemaHeaderUnionType schemaHeader); @JsOverlay default void setSchemaHeader(String schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsOverlay default void setSchemaHeader(Uint8Array schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsProperty @@ -2115,15 +2109,15 @@ static FigureDescriptor.ToObjectReturnType0 create() { @JsOverlay default void setChartsList( - FigureDescriptor.ToObjectReturnType0.ChartsListFieldType[] chartsList) { + FigureDescriptor.ToObjectReturnType0.ChartsListFieldType[] chartsList) { setChartsList( - Js.>uncheckedCast( - chartsList)); + Js.>uncheckedCast( + chartsList)); } @JsProperty void setChartsList( - JsArray chartsList); + JsArray chartsList); @JsProperty void setCols(double cols); @@ -2141,14 +2135,14 @@ default void setErrorsList(String[] errorsList) { @JsProperty void setTablesList( - JsArray tablesList); + JsArray tablesList); @JsOverlay default void setTablesList( - FigureDescriptor.ToObjectReturnType0.TablesListFieldType[] tablesList) { + FigureDescriptor.ToObjectReturnType0.TablesListFieldType[] tablesList) { setTablesList( - Js.>uncheckedCast( - tablesList)); + Js.>uncheckedCast( + tablesList)); } @JsProperty @@ -2170,12 +2164,12 @@ default void setTablesList( public static native FigureDescriptor deserializeBinary(Uint8Array bytes); public static native FigureDescriptor deserializeBinaryFromReader( - FigureDescriptor message, Object reader); + FigureDescriptor message, Object reader); public static native void serializeBinaryToWriter(FigureDescriptor message, Object writer); public static native FigureDescriptor.ToObjectReturnType toObject( - boolean includeInstance, FigureDescriptor msg); + boolean includeInstance, FigureDescriptor msg); public native ChartDescriptor addCharts(); @@ -2190,7 +2184,7 @@ public static native FigureDescriptor.ToObjectReturnType toObject( public native ExportedTableCreationResponse addTables(); public native ExportedTableCreationResponse addTables( - ExportedTableCreationResponse value, double index); + ExportedTableCreationResponse value, double index); public native ExportedTableCreationResponse addTables(ExportedTableCreationResponse value); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetCompletionItemsRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetCompletionItemsRequest.java index c255f3a1a1d..9e41559899f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetCompletionItemsRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetCompletionItemsRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.GetCompletionItemsRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.GetCompletionItemsRequest", + namespace = JsPackage.GLOBAL) public class GetCompletionItemsRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -21,8 +21,7 @@ public interface ConsoleIdFieldType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { @JsOverlay - static GetCompletionItemsRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + static GetCompletionItemsRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of(Object o) { return Js.cast(o); } @@ -57,20 +56,20 @@ static GetCompletionItemsRequest.ToObjectReturnType.ConsoleIdFieldType create() @JsProperty void setTicket( - GetCompletionItemsRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + GetCompletionItemsRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -152,8 +151,7 @@ static GetCompletionItemsRequest.ToObjectReturnType create() { GetCompletionItemsRequest.ToObjectReturnType.TextDocumentFieldType getTextDocument(); @JsProperty - void setConsoleId( - GetCompletionItemsRequest.ToObjectReturnType.ConsoleIdFieldType consoleId); + void setConsoleId(GetCompletionItemsRequest.ToObjectReturnType.ConsoleIdFieldType consoleId); @JsProperty void setContext(GetCompletionItemsRequest.ToObjectReturnType.ContextFieldType context); @@ -163,7 +161,7 @@ void setConsoleId( @JsProperty void setTextDocument( - GetCompletionItemsRequest.ToObjectReturnType.TextDocumentFieldType textDocument); + GetCompletionItemsRequest.ToObjectReturnType.TextDocumentFieldType textDocument); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -174,7 +172,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static GetCompletionItemsRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -209,20 +207,20 @@ static GetCompletionItemsRequest.ToObjectReturnType0.ConsoleIdFieldType create() @JsProperty void setTicket( - GetCompletionItemsRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + GetCompletionItemsRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -304,8 +302,7 @@ static GetCompletionItemsRequest.ToObjectReturnType0 create() { GetCompletionItemsRequest.ToObjectReturnType0.TextDocumentFieldType getTextDocument(); @JsProperty - void setConsoleId( - GetCompletionItemsRequest.ToObjectReturnType0.ConsoleIdFieldType consoleId); + void setConsoleId(GetCompletionItemsRequest.ToObjectReturnType0.ConsoleIdFieldType consoleId); @JsProperty void setContext(GetCompletionItemsRequest.ToObjectReturnType0.ContextFieldType context); @@ -315,19 +312,19 @@ void setConsoleId( @JsProperty void setTextDocument( - GetCompletionItemsRequest.ToObjectReturnType0.TextDocumentFieldType textDocument); + GetCompletionItemsRequest.ToObjectReturnType0.TextDocumentFieldType textDocument); } public static native GetCompletionItemsRequest deserializeBinary(Uint8Array bytes); public static native GetCompletionItemsRequest deserializeBinaryFromReader( - GetCompletionItemsRequest message, Object reader); + GetCompletionItemsRequest message, Object reader); public static native void serializeBinaryToWriter( - GetCompletionItemsRequest message, Object writer); + GetCompletionItemsRequest message, Object writer); public static native GetCompletionItemsRequest.ToObjectReturnType toObject( - boolean includeInstance, GetCompletionItemsRequest msg); + boolean includeInstance, GetCompletionItemsRequest msg); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetCompletionItemsResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetCompletionItemsResponse.java index ae7c9cb93da..36f81b9e928 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetCompletionItemsResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetCompletionItemsResponse.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.GetCompletionItemsResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.GetCompletionItemsResponse", + namespace = JsPackage.GLOBAL) public class GetCompletionItemsResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -58,7 +58,7 @@ static GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType.TextEdit @JsProperty void setStart( - GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType.TextEditFieldType.RangeFieldType.StartFieldType start); + GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType.TextEditFieldType.RangeFieldType.StartFieldType start); } @JsOverlay @@ -74,7 +74,7 @@ static GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType.TextEdit @JsProperty void setRange( - GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType.TextEditFieldType.RangeFieldType range); + GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType.TextEditFieldType.RangeFieldType range); @JsProperty void setText(String text); @@ -132,8 +132,7 @@ static GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType create() @JsOverlay default void setAdditionalTextEditsList(Object[] additionalTextEditsList) { - setAdditionalTextEditsList( - Js.>uncheckedCast(additionalTextEditsList)); + setAdditionalTextEditsList(Js.>uncheckedCast(additionalTextEditsList)); } @JsProperty @@ -179,7 +178,7 @@ default void setCommitCharactersList(String[] commitCharactersList) { @JsProperty void setTextEdit( - GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType.TextEditFieldType textEdit); + GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType.TextEditFieldType textEdit); } @JsOverlay @@ -192,15 +191,15 @@ static GetCompletionItemsResponse.ToObjectReturnType create() { @JsOverlay default void setItemsList( - GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType[] itemsList) { + GetCompletionItemsResponse.ToObjectReturnType.ItemsListFieldType[] itemsList) { setItemsList( - Js.>uncheckedCast( - itemsList)); + Js.>uncheckedCast( + itemsList)); } @JsProperty void setItemsList( - JsArray itemsList); + JsArray itemsList); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -247,7 +246,7 @@ static GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType.TextEdi @JsProperty void setStart( - GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType.TextEditFieldType.RangeFieldType.StartFieldType start); + GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType.TextEditFieldType.RangeFieldType.StartFieldType start); } @JsOverlay @@ -263,7 +262,7 @@ static GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType.TextEdi @JsProperty void setRange( - GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType.TextEditFieldType.RangeFieldType range); + GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType.TextEditFieldType.RangeFieldType range); @JsProperty void setText(String text); @@ -321,8 +320,7 @@ static GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType create( @JsOverlay default void setAdditionalTextEditsList(Object[] additionalTextEditsList) { - setAdditionalTextEditsList( - Js.>uncheckedCast(additionalTextEditsList)); + setAdditionalTextEditsList(Js.>uncheckedCast(additionalTextEditsList)); } @JsProperty @@ -368,7 +366,7 @@ default void setCommitCharactersList(String[] commitCharactersList) { @JsProperty void setTextEdit( - GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType.TextEditFieldType textEdit); + GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType.TextEditFieldType textEdit); } @JsOverlay @@ -381,27 +379,27 @@ static GetCompletionItemsResponse.ToObjectReturnType0 create() { @JsOverlay default void setItemsList( - GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType[] itemsList) { + GetCompletionItemsResponse.ToObjectReturnType0.ItemsListFieldType[] itemsList) { setItemsList( - Js.>uncheckedCast( - itemsList)); + Js.>uncheckedCast( + itemsList)); } @JsProperty void setItemsList( - JsArray itemsList); + JsArray itemsList); } public static native GetCompletionItemsResponse deserializeBinary(Uint8Array bytes); public static native GetCompletionItemsResponse deserializeBinaryFromReader( - GetCompletionItemsResponse message, Object reader); + GetCompletionItemsResponse message, Object reader); public static native void serializeBinaryToWriter( - GetCompletionItemsResponse message, Object writer); + GetCompletionItemsResponse message, Object writer); public static native GetCompletionItemsResponse.ToObjectReturnType toObject( - boolean includeInstance, GetCompletionItemsResponse msg); + boolean includeInstance, GetCompletionItemsResponse msg); public native CompletionItem addItems(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetConsoleTypesRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetConsoleTypesRequest.java index ae0818d19d5..8bf57f330ed 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetConsoleTypesRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetConsoleTypesRequest.java @@ -5,17 +5,16 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.GetConsoleTypesRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.GetConsoleTypesRequest", + namespace = JsPackage.GLOBAL) public class GetConsoleTypesRequest { public static native GetConsoleTypesRequest deserializeBinary(Uint8Array bytes); public static native GetConsoleTypesRequest deserializeBinaryFromReader( - GetConsoleTypesRequest message, Object reader); + GetConsoleTypesRequest message, Object reader); - public static native void serializeBinaryToWriter(GetConsoleTypesRequest message, - Object writer); + public static native void serializeBinaryToWriter(GetConsoleTypesRequest message, Object writer); public static native Object toObject(boolean includeInstance, GetConsoleTypesRequest msg); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetConsoleTypesResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetConsoleTypesResponse.java index dd6e8fe0f23..f4d44da53c0 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetConsoleTypesResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/GetConsoleTypesResponse.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.GetConsoleTypesResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.GetConsoleTypesResponse", + namespace = JsPackage.GLOBAL) public class GetConsoleTypesResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -55,13 +55,12 @@ default void setConsoleTypesList(String[] consoleTypesList) { public static native GetConsoleTypesResponse deserializeBinary(Uint8Array bytes); public static native GetConsoleTypesResponse deserializeBinaryFromReader( - GetConsoleTypesResponse message, Object reader); + GetConsoleTypesResponse message, Object reader); - public static native void serializeBinaryToWriter(GetConsoleTypesResponse message, - Object writer); + public static native void serializeBinaryToWriter(GetConsoleTypesResponse message, Object writer); public static native GetConsoleTypesResponse.ToObjectReturnType toObject( - boolean includeInstance, GetConsoleTypesResponse msg); + boolean includeInstance, GetConsoleTypesResponse msg); public native String addConsoleTypes(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/LogSubscriptionData.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/LogSubscriptionData.java index ae802b4ec53..172d7c7a0a8 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/LogSubscriptionData.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/LogSubscriptionData.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.LogSubscriptionData", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.LogSubscriptionData", + namespace = JsPackage.GLOBAL) public class LogSubscriptionData { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -68,12 +68,12 @@ static LogSubscriptionData.ToObjectReturnType0 create() { public static native LogSubscriptionData deserializeBinary(Uint8Array bytes); public static native LogSubscriptionData deserializeBinaryFromReader( - LogSubscriptionData message, Object reader); + LogSubscriptionData message, Object reader); public static native void serializeBinaryToWriter(LogSubscriptionData message, Object writer); public static native LogSubscriptionData.ToObjectReturnType toObject( - boolean includeInstance, LogSubscriptionData msg); + boolean includeInstance, LogSubscriptionData msg); public native String getLogLevel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/LogSubscriptionRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/LogSubscriptionRequest.java index 056f5a09d7c..e8b9f17510d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/LogSubscriptionRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/LogSubscriptionRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.LogSubscriptionRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.LogSubscriptionRequest", + namespace = JsPackage.GLOBAL) public class LogSubscriptionRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -67,13 +67,12 @@ default void setLevelsList(String[] levelsList) { public static native LogSubscriptionRequest deserializeBinary(Uint8Array bytes); public static native LogSubscriptionRequest deserializeBinaryFromReader( - LogSubscriptionRequest message, Object reader); + LogSubscriptionRequest message, Object reader); - public static native void serializeBinaryToWriter(LogSubscriptionRequest message, - Object writer); + public static native void serializeBinaryToWriter(LogSubscriptionRequest message, Object writer); public static native LogSubscriptionRequest.ToObjectReturnType toObject( - boolean includeInstance, LogSubscriptionRequest msg); + boolean includeInstance, LogSubscriptionRequest msg); public native String addLevels(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/OpenDocumentRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/OpenDocumentRequest.java index a66bc579cd1..9325c40586c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/OpenDocumentRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/OpenDocumentRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.OpenDocumentRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.OpenDocumentRequest", + namespace = JsPackage.GLOBAL) public class OpenDocumentRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static OpenDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static OpenDocumentRequest.ToObjectReturnType.ConsoleIdFieldType create() { @JsProperty void setTicket( - OpenDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); + OpenDocumentRequest.ToObjectReturnType.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -121,8 +121,7 @@ static OpenDocumentRequest.ToObjectReturnType create() { void setConsoleId(OpenDocumentRequest.ToObjectReturnType.ConsoleIdFieldType consoleId); @JsProperty - void setTextDocument( - OpenDocumentRequest.ToObjectReturnType.TextDocumentFieldType textDocument); + void setTextDocument(OpenDocumentRequest.ToObjectReturnType.TextDocumentFieldType textDocument); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -133,7 +132,7 @@ public interface ConsoleIdFieldType { public interface GetTicketUnionType { @JsOverlay static OpenDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -168,20 +167,20 @@ static OpenDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType create() { @JsProperty void setTicket( - OpenDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); + OpenDocumentRequest.ToObjectReturnType0.ConsoleIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -233,18 +232,18 @@ static OpenDocumentRequest.ToObjectReturnType0 create() { @JsProperty void setTextDocument( - OpenDocumentRequest.ToObjectReturnType0.TextDocumentFieldType textDocument); + OpenDocumentRequest.ToObjectReturnType0.TextDocumentFieldType textDocument); } public static native OpenDocumentRequest deserializeBinary(Uint8Array bytes); public static native OpenDocumentRequest deserializeBinaryFromReader( - OpenDocumentRequest message, Object reader); + OpenDocumentRequest message, Object reader); public static native void serializeBinaryToWriter(OpenDocumentRequest message, Object writer); public static native OpenDocumentRequest.ToObjectReturnType toObject( - boolean includeInstance, OpenDocumentRequest msg); + boolean includeInstance, OpenDocumentRequest msg); public native void clearConsoleId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/OpenDocumentResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/OpenDocumentResponse.java index 4a0cea512d3..4c2958ef0fd 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/OpenDocumentResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/OpenDocumentResponse.java @@ -5,14 +5,14 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.OpenDocumentResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.OpenDocumentResponse", + namespace = JsPackage.GLOBAL) public class OpenDocumentResponse { public static native OpenDocumentResponse deserializeBinary(Uint8Array bytes); public static native OpenDocumentResponse deserializeBinaryFromReader( - OpenDocumentResponse message, Object reader); + OpenDocumentResponse message, Object reader); public static native void serializeBinaryToWriter(OpenDocumentResponse message, Object writer); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/Position.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/Position.java index 2380d6ff80f..b6f2f009749 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/Position.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/Position.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.Position", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.Position", + namespace = JsPackage.GLOBAL) public class Position { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -59,8 +59,7 @@ static Position.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(Position message, Object writer); - public static native Position.ToObjectReturnType toObject(boolean includeInstance, - Position msg); + public static native Position.ToObjectReturnType toObject(boolean includeInstance, Position msg); public native double getCharacter(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/StartConsoleRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/StartConsoleRequest.java index ecc87ceaddd..4ecb95f06a7 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/StartConsoleRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/StartConsoleRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.StartConsoleRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.StartConsoleRequest", + namespace = JsPackage.GLOBAL) public class StartConsoleRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static StartConsoleRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static StartConsoleRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - StartConsoleRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + StartConsoleRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -100,7 +100,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static StartConsoleRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -135,20 +135,20 @@ static StartConsoleRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - StartConsoleRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + StartConsoleRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -173,12 +173,12 @@ static StartConsoleRequest.ToObjectReturnType0 create() { public static native StartConsoleRequest deserializeBinary(Uint8Array bytes); public static native StartConsoleRequest deserializeBinaryFromReader( - StartConsoleRequest message, Object reader); + StartConsoleRequest message, Object reader); public static native void serializeBinaryToWriter(StartConsoleRequest message, Object writer); public static native StartConsoleRequest.ToObjectReturnType toObject( - boolean includeInstance, StartConsoleRequest msg); + boolean includeInstance, StartConsoleRequest msg); public native void clearResultId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/StartConsoleResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/StartConsoleResponse.java index ff8b6d7865e..af4740cc29e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/StartConsoleResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/StartConsoleResponse.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.StartConsoleResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.StartConsoleResponse", + namespace = JsPackage.GLOBAL) public class StartConsoleResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -23,7 +23,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static StartConsoleResponse.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -58,20 +58,20 @@ static StartConsoleResponse.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - StartConsoleResponse.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + StartConsoleResponse.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -117,7 +117,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static StartConsoleResponse.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -152,20 +152,20 @@ static StartConsoleResponse.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - StartConsoleResponse.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + StartConsoleResponse.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -206,12 +206,12 @@ default void setWidgetNamesList(String[] widgetNamesList) { public static native StartConsoleResponse deserializeBinary(Uint8Array bytes); public static native StartConsoleResponse deserializeBinaryFromReader( - StartConsoleResponse message, Object reader); + StartConsoleResponse message, Object reader); public static native void serializeBinaryToWriter(StartConsoleResponse message, Object writer); public static native StartConsoleResponse.ToObjectReturnType toObject( - boolean includeInstance, StartConsoleResponse msg); + boolean includeInstance, StartConsoleResponse msg); public native String addTableNames(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/TextDocumentItem.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/TextDocumentItem.java index 197fd6b2942..2dc8f6eb9c5 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/TextDocumentItem.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/TextDocumentItem.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.TextDocumentItem", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.TextDocumentItem", + namespace = JsPackage.GLOBAL) public class TextDocumentItem { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -80,12 +80,12 @@ static TextDocumentItem.ToObjectReturnType0 create() { public static native TextDocumentItem deserializeBinary(Uint8Array bytes); public static native TextDocumentItem deserializeBinaryFromReader( - TextDocumentItem message, Object reader); + TextDocumentItem message, Object reader); public static native void serializeBinaryToWriter(TextDocumentItem message, Object writer); public static native TextDocumentItem.ToObjectReturnType toObject( - boolean includeInstance, TextDocumentItem msg); + boolean includeInstance, TextDocumentItem msg); public native String getLanguageId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/TextEdit.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/TextEdit.java index a3f783a8b51..9ff1426236f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/TextEdit.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/TextEdit.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.TextEdit", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.TextEdit", + namespace = JsPackage.GLOBAL) public class TextEdit { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -139,8 +139,7 @@ static TextEdit.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(TextEdit message, Object writer); - public static native TextEdit.ToObjectReturnType toObject(boolean includeInstance, - TextEdit msg); + public static native TextEdit.ToObjectReturnType toObject(boolean includeInstance, TextEdit msg); public native void clearRange(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/VariableDefinition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/VariableDefinition.java index b53d4d57fb0..e1ab255921a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/VariableDefinition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/VariableDefinition.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.VariableDefinition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.VariableDefinition", + namespace = JsPackage.GLOBAL) public class VariableDefinition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -56,12 +56,12 @@ static VariableDefinition.ToObjectReturnType0 create() { public static native VariableDefinition deserializeBinary(Uint8Array bytes); public static native VariableDefinition deserializeBinaryFromReader( - VariableDefinition message, Object reader); + VariableDefinition message, Object reader); public static native void serializeBinaryToWriter(VariableDefinition message, Object writer); public static native VariableDefinition.ToObjectReturnType toObject( - boolean includeInstance, VariableDefinition msg); + boolean includeInstance, VariableDefinition msg); public native String getName(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/VersionedTextDocumentIdentifier.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/VersionedTextDocumentIdentifier.java index b8ba88f1069..e05cc51c92e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/VersionedTextDocumentIdentifier.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/VersionedTextDocumentIdentifier.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.VersionedTextDocumentIdentifier", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.VersionedTextDocumentIdentifier", + namespace = JsPackage.GLOBAL) public class VersionedTextDocumentIdentifier { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -56,13 +56,13 @@ static VersionedTextDocumentIdentifier.ToObjectReturnType0 create() { public static native VersionedTextDocumentIdentifier deserializeBinary(Uint8Array bytes); public static native VersionedTextDocumentIdentifier deserializeBinaryFromReader( - VersionedTextDocumentIdentifier message, Object reader); + VersionedTextDocumentIdentifier message, Object reader); public static native void serializeBinaryToWriter( - VersionedTextDocumentIdentifier message, Object writer); + VersionedTextDocumentIdentifier message, Object writer); public static native VersionedTextDocumentIdentifier.ToObjectReturnType toObject( - boolean includeInstance, VersionedTextDocumentIdentifier msg); + boolean includeInstance, VersionedTextDocumentIdentifier msg); public native String getUri(); @@ -77,5 +77,5 @@ public static native VersionedTextDocumentIdentifier.ToObjectReturnType toObject public native VersionedTextDocumentIdentifier.ToObjectReturnType0 toObject(); public native VersionedTextDocumentIdentifier.ToObjectReturnType0 toObject( - boolean includeInstance); + boolean includeInstance); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/changedocumentrequest/TextDocumentContentChangeEvent.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/changedocumentrequest/TextDocumentContentChangeEvent.java index b515be6e6a0..6fa817b2165 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/changedocumentrequest/TextDocumentContentChangeEvent.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/changedocumentrequest/TextDocumentContentChangeEvent.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.ChangeDocumentRequest.TextDocumentContentChangeEvent", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.ChangeDocumentRequest.TextDocumentContentChangeEvent", + namespace = JsPackage.GLOBAL) public class TextDocumentContentChangeEvent { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -54,7 +54,7 @@ static TextDocumentContentChangeEvent.ToObjectReturnType.RangeFieldType create() @JsProperty void setStart( - TextDocumentContentChangeEvent.ToObjectReturnType.RangeFieldType.StartFieldType start); + TextDocumentContentChangeEvent.ToObjectReturnType.RangeFieldType.StartFieldType start); } @JsOverlay @@ -121,7 +121,7 @@ static TextDocumentContentChangeEvent.ToObjectReturnType0.RangeFieldType create( @JsProperty void setStart( - TextDocumentContentChangeEvent.ToObjectReturnType0.RangeFieldType.StartFieldType start); + TextDocumentContentChangeEvent.ToObjectReturnType0.RangeFieldType.StartFieldType start); } @JsOverlay @@ -151,13 +151,13 @@ static TextDocumentContentChangeEvent.ToObjectReturnType0 create() { public static native TextDocumentContentChangeEvent deserializeBinary(Uint8Array bytes); public static native TextDocumentContentChangeEvent deserializeBinaryFromReader( - TextDocumentContentChangeEvent message, Object reader); + TextDocumentContentChangeEvent message, Object reader); public static native void serializeBinaryToWriter( - TextDocumentContentChangeEvent message, Object writer); + TextDocumentContentChangeEvent message, Object writer); public static native TextDocumentContentChangeEvent.ToObjectReturnType toObject( - boolean includeInstance, TextDocumentContentChangeEvent msg); + boolean includeInstance, TextDocumentContentChangeEvent msg); public native void clearRange(); @@ -182,5 +182,5 @@ public static native TextDocumentContentChangeEvent.ToObjectReturnType toObject( public native TextDocumentContentChangeEvent.ToObjectReturnType0 toObject(); public native TextDocumentContentChangeEvent.ToObjectReturnType0 toObject( - boolean includeInstance); + boolean includeInstance); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/AxisDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/AxisDescriptor.java index fda13937747..d19b217b1a9 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/AxisDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/AxisDescriptor.java @@ -13,9 +13,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.AxisDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.AxisDescriptor", + namespace = JsPackage.GLOBAL) public class AxisDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -90,7 +90,7 @@ default void setBusinessPeriodsList(Object[] businessPeriodsList) { @JsProperty void setDate( - AxisDescriptor.ToObjectReturnType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); + AxisDescriptor.ToObjectReturnType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -123,27 +123,27 @@ default void setBusinessDaysList(double[] businessDaysList) { @JsOverlay default void setBusinessPeriodsList( - AxisDescriptor.ToObjectReturnType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { + AxisDescriptor.ToObjectReturnType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - AxisDescriptor.ToObjectReturnType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { + AxisDescriptor.ToObjectReturnType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -222,7 +222,7 @@ static AxisDescriptor.ToObjectReturnType create() { @JsProperty void setBusinessCalendarDescriptor( - AxisDescriptor.ToObjectReturnType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); + AxisDescriptor.ToObjectReturnType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); @JsProperty void setColor(String color); @@ -363,7 +363,7 @@ default void setBusinessPeriodsList(Object[] businessPeriodsList) { @JsProperty void setDate( - AxisDescriptor.ToObjectReturnType0.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); + AxisDescriptor.ToObjectReturnType0.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -396,27 +396,27 @@ default void setBusinessDaysList(double[] businessDaysList) { @JsOverlay default void setBusinessPeriodsList( - AxisDescriptor.ToObjectReturnType0.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { + AxisDescriptor.ToObjectReturnType0.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - AxisDescriptor.ToObjectReturnType0.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { + AxisDescriptor.ToObjectReturnType0.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -495,7 +495,7 @@ static AxisDescriptor.ToObjectReturnType0 create() { @JsProperty void setBusinessCalendarDescriptor( - AxisDescriptor.ToObjectReturnType0.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); + AxisDescriptor.ToObjectReturnType0.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); @JsProperty void setColor(String color); @@ -570,12 +570,12 @@ default void setMajorTickLocationsList(double[] majorTickLocationsList) { public static native AxisDescriptor deserializeBinary(Uint8Array bytes); public static native AxisDescriptor deserializeBinaryFromReader( - AxisDescriptor message, Object reader); + AxisDescriptor message, Object reader); public static native void serializeBinaryToWriter(AxisDescriptor message, Object writer); public static native AxisDescriptor.ToObjectReturnType toObject( - boolean includeInstance, AxisDescriptor msg); + boolean includeInstance, AxisDescriptor msg); public native double addMajorTickLocations(double value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/BoolMapWithDefault.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/BoolMapWithDefault.java index caaccd652b8..728dfef9a0e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/BoolMapWithDefault.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/BoolMapWithDefault.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BoolMapWithDefault", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BoolMapWithDefault", + namespace = JsPackage.GLOBAL) public class BoolMapWithDefault { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -89,12 +89,12 @@ default void setValuesList(boolean[] valuesList) { public static native BoolMapWithDefault deserializeBinary(Uint8Array bytes); public static native BoolMapWithDefault deserializeBinaryFromReader( - BoolMapWithDefault message, Object reader); + BoolMapWithDefault message, Object reader); public static native void serializeBinaryToWriter(BoolMapWithDefault message, Object writer); public static native BoolMapWithDefault.ToObjectReturnType toObject( - boolean includeInstance, BoolMapWithDefault msg); + boolean includeInstance, BoolMapWithDefault msg); public native String addKeys(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/BusinessCalendarDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/BusinessCalendarDescriptor.java index b09bf3aba79..7285ddabe10 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/BusinessCalendarDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/BusinessCalendarDescriptor.java @@ -13,9 +13,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor", + namespace = JsPackage.GLOBAL) public class BusinessCalendarDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -88,7 +88,7 @@ default void setBusinessPeriodsList(Object[] businessPeriodsList) { @JsProperty void setDate( - BusinessCalendarDescriptor.ToObjectReturnType.HolidaysListFieldType.DateFieldType date); + BusinessCalendarDescriptor.ToObjectReturnType.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -121,27 +121,27 @@ default void setBusinessDaysList(double[] businessDaysList) { @JsOverlay default void setBusinessPeriodsList( - BusinessCalendarDescriptor.ToObjectReturnType.BusinessPeriodsListFieldType[] businessPeriodsList) { + BusinessCalendarDescriptor.ToObjectReturnType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - BusinessCalendarDescriptor.ToObjectReturnType.HolidaysListFieldType[] holidaysList) { + BusinessCalendarDescriptor.ToObjectReturnType.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -221,7 +221,7 @@ default void setBusinessPeriodsList(Object[] businessPeriodsList) { @JsProperty void setDate( - BusinessCalendarDescriptor.ToObjectReturnType0.HolidaysListFieldType.DateFieldType date); + BusinessCalendarDescriptor.ToObjectReturnType0.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -254,27 +254,27 @@ default void setBusinessDaysList(double[] businessDaysList) { @JsOverlay default void setBusinessPeriodsList( - BusinessCalendarDescriptor.ToObjectReturnType0.BusinessPeriodsListFieldType[] businessPeriodsList) { + BusinessCalendarDescriptor.ToObjectReturnType0.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - BusinessCalendarDescriptor.ToObjectReturnType0.HolidaysListFieldType[] holidaysList) { + BusinessCalendarDescriptor.ToObjectReturnType0.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -288,13 +288,13 @@ void setHolidaysList( public static native BusinessCalendarDescriptor deserializeBinary(Uint8Array bytes); public static native BusinessCalendarDescriptor deserializeBinaryFromReader( - BusinessCalendarDescriptor message, Object reader); + BusinessCalendarDescriptor message, Object reader); public static native void serializeBinaryToWriter( - BusinessCalendarDescriptor message, Object writer); + BusinessCalendarDescriptor message, Object writer); public static native BusinessCalendarDescriptor.ToObjectReturnType toObject( - boolean includeInstance, BusinessCalendarDescriptor msg); + boolean includeInstance, BusinessCalendarDescriptor msg); public native double addBusinessDays(double value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/ChartDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/ChartDescriptor.java index a07aa2a05d0..7af5e64eb03 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/ChartDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/ChartDescriptor.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.ChartDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.ChartDescriptor", + namespace = JsPackage.GLOBAL) public class ChartDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -85,13 +85,12 @@ static ChartDescriptor.ToObjectReturnType.AxesListFieldType.BusinessCalendarDesc @JsOverlay default void setBusinessPeriodsList(Object[] businessPeriodsList) { - setBusinessPeriodsList( - Js.>uncheckedCast(businessPeriodsList)); + setBusinessPeriodsList(Js.>uncheckedCast(businessPeriodsList)); } @JsProperty void setDate( - ChartDescriptor.ToObjectReturnType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); + ChartDescriptor.ToObjectReturnType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -124,27 +123,27 @@ default void setBusinessDaysList(double[] businessDaysList) { @JsOverlay default void setBusinessPeriodsList( - ChartDescriptor.ToObjectReturnType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { + ChartDescriptor.ToObjectReturnType.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - ChartDescriptor.ToObjectReturnType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { + ChartDescriptor.ToObjectReturnType.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -223,7 +222,7 @@ static ChartDescriptor.ToObjectReturnType.AxesListFieldType create() { @JsProperty void setBusinessCalendarDescriptor( - ChartDescriptor.ToObjectReturnType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); + ChartDescriptor.ToObjectReturnType.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); @JsProperty void setColor(String color); @@ -260,8 +259,7 @@ void setBusinessCalendarDescriptor( @JsOverlay default void setMajorTickLocationsList(double[] majorTickLocationsList) { - setMajorTickLocationsList( - Js.>uncheckedCast(majorTickLocationsList)); + setMajorTickLocationsList(Js.>uncheckedCast(majorTickLocationsList)); } @JsProperty @@ -483,26 +481,26 @@ static ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType create() { @JsOverlay default void setDataSourcesList( - ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(Object gradientVisible); @JsProperty void setLineColor( - ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType.LineColorFieldType lineColor); + ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType.LineColorFieldType lineColor); @JsProperty void setLinesVisible( - ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); + ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); @JsProperty void setName(String name); @@ -524,7 +522,7 @@ void setLinesVisible( @JsProperty void setPointSize( - ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType.PointSizeFieldType pointSize); + ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType.PointSizeFieldType pointSize); @JsProperty void setPointsVisible(Object pointsVisible); @@ -613,7 +611,7 @@ static ChartDescriptor.ToObjectReturnType.SeriesListFieldType.DataSourcesListFie @JsProperty void setOneClick( - ChartDescriptor.ToObjectReturnType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); + ChartDescriptor.ToObjectReturnType.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); @JsProperty void setTableId(double tableId); @@ -674,15 +672,15 @@ static ChartDescriptor.ToObjectReturnType.SeriesListFieldType create() { @JsOverlay default void setDataSourcesList( - ChartDescriptor.ToObjectReturnType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + ChartDescriptor.ToObjectReturnType.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(boolean gradientVisible); @@ -771,8 +769,8 @@ static ChartDescriptor.ToObjectReturnType create() { @JsOverlay default void setAxesList(ChartDescriptor.ToObjectReturnType.AxesListFieldType[] axesList) { setAxesList( - Js.>uncheckedCast( - axesList)); + Js.>uncheckedCast( + axesList)); } @JsProperty @@ -795,29 +793,28 @@ default void setAxesList(ChartDescriptor.ToObjectReturnType.AxesListFieldType[] @JsProperty void setMultiSeriesList( - JsArray multiSeriesList); + JsArray multiSeriesList); @JsOverlay default void setMultiSeriesList( - ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType[] multiSeriesList) { + ChartDescriptor.ToObjectReturnType.MultiSeriesListFieldType[] multiSeriesList) { setMultiSeriesList( - Js.>uncheckedCast( - multiSeriesList)); + Js.>uncheckedCast( + multiSeriesList)); } @JsProperty void setRowspan(double rowspan); @JsProperty - void setSeriesList( - JsArray seriesList); + void setSeriesList(JsArray seriesList); @JsOverlay default void setSeriesList( - ChartDescriptor.ToObjectReturnType.SeriesListFieldType[] seriesList) { + ChartDescriptor.ToObjectReturnType.SeriesListFieldType[] seriesList) { setSeriesList( - Js.>uncheckedCast( - seriesList)); + Js.>uncheckedCast( + seriesList)); } @JsProperty @@ -903,13 +900,12 @@ static ChartDescriptor.ToObjectReturnType0.AxesListFieldType.BusinessCalendarDes @JsOverlay default void setBusinessPeriodsList(Object[] businessPeriodsList) { - setBusinessPeriodsList( - Js.>uncheckedCast(businessPeriodsList)); + setBusinessPeriodsList(Js.>uncheckedCast(businessPeriodsList)); } @JsProperty void setDate( - ChartDescriptor.ToObjectReturnType0.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); + ChartDescriptor.ToObjectReturnType0.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType.DateFieldType date); } @JsOverlay @@ -942,27 +938,27 @@ default void setBusinessDaysList(double[] businessDaysList) { @JsOverlay default void setBusinessPeriodsList( - ChartDescriptor.ToObjectReturnType0.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { + ChartDescriptor.ToObjectReturnType0.AxesListFieldType.BusinessCalendarDescriptorFieldType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsOverlay default void setHolidaysList( - ChartDescriptor.ToObjectReturnType0.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { + ChartDescriptor.ToObjectReturnType0.AxesListFieldType.BusinessCalendarDescriptorFieldType.HolidaysListFieldType[] holidaysList) { setHolidaysList( - Js.>uncheckedCast( - holidaysList)); + Js.>uncheckedCast( + holidaysList)); } @JsProperty void setHolidaysList( - JsArray holidaysList); + JsArray holidaysList); @JsProperty void setName(String name); @@ -1041,7 +1037,7 @@ static ChartDescriptor.ToObjectReturnType0.AxesListFieldType create() { @JsProperty void setBusinessCalendarDescriptor( - ChartDescriptor.ToObjectReturnType0.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); + ChartDescriptor.ToObjectReturnType0.AxesListFieldType.BusinessCalendarDescriptorFieldType businessCalendarDescriptor); @JsProperty void setColor(String color); @@ -1078,8 +1074,7 @@ void setBusinessCalendarDescriptor( @JsOverlay default void setMajorTickLocationsList(double[] majorTickLocationsList) { - setMajorTickLocationsList( - Js.>uncheckedCast(majorTickLocationsList)); + setMajorTickLocationsList(Js.>uncheckedCast(majorTickLocationsList)); } @JsProperty @@ -1301,26 +1296,26 @@ static ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType create() { @JsOverlay default void setDataSourcesList( - ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(Object gradientVisible); @JsProperty void setLineColor( - ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType.LineColorFieldType lineColor); + ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType.LineColorFieldType lineColor); @JsProperty void setLinesVisible( - ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); + ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType.LinesVisibleFieldType linesVisible); @JsProperty void setName(String name); @@ -1342,7 +1337,7 @@ void setLinesVisible( @JsProperty void setPointSize( - ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType.PointSizeFieldType pointSize); + ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType.PointSizeFieldType pointSize); @JsProperty void setPointsVisible(Object pointsVisible); @@ -1431,7 +1426,7 @@ static ChartDescriptor.ToObjectReturnType0.SeriesListFieldType.DataSourcesListFi @JsProperty void setOneClick( - ChartDescriptor.ToObjectReturnType0.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); + ChartDescriptor.ToObjectReturnType0.SeriesListFieldType.DataSourcesListFieldType.OneClickFieldType oneClick); @JsProperty void setTableId(double tableId); @@ -1492,15 +1487,15 @@ static ChartDescriptor.ToObjectReturnType0.SeriesListFieldType create() { @JsOverlay default void setDataSourcesList( - ChartDescriptor.ToObjectReturnType0.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { + ChartDescriptor.ToObjectReturnType0.SeriesListFieldType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(boolean gradientVisible); @@ -1589,8 +1584,8 @@ static ChartDescriptor.ToObjectReturnType0 create() { @JsOverlay default void setAxesList(ChartDescriptor.ToObjectReturnType0.AxesListFieldType[] axesList) { setAxesList( - Js.>uncheckedCast( - axesList)); + Js.>uncheckedCast( + axesList)); } @JsProperty @@ -1613,29 +1608,28 @@ default void setAxesList(ChartDescriptor.ToObjectReturnType0.AxesListFieldType[] @JsProperty void setMultiSeriesList( - JsArray multiSeriesList); + JsArray multiSeriesList); @JsOverlay default void setMultiSeriesList( - ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType[] multiSeriesList) { + ChartDescriptor.ToObjectReturnType0.MultiSeriesListFieldType[] multiSeriesList) { setMultiSeriesList( - Js.>uncheckedCast( - multiSeriesList)); + Js.>uncheckedCast( + multiSeriesList)); } @JsProperty void setRowspan(double rowspan); @JsProperty - void setSeriesList( - JsArray seriesList); + void setSeriesList(JsArray seriesList); @JsOverlay default void setSeriesList( - ChartDescriptor.ToObjectReturnType0.SeriesListFieldType[] seriesList) { + ChartDescriptor.ToObjectReturnType0.SeriesListFieldType[] seriesList) { setSeriesList( - Js.>uncheckedCast( - seriesList)); + Js.>uncheckedCast( + seriesList)); } @JsProperty @@ -1656,12 +1650,12 @@ default void setSeriesList( public static native ChartDescriptor deserializeBinary(Uint8Array bytes); public static native ChartDescriptor deserializeBinaryFromReader( - ChartDescriptor message, Object reader); + ChartDescriptor message, Object reader); public static native void serializeBinaryToWriter(ChartDescriptor message, Object writer); public static native ChartDescriptor.ToObjectReturnType toObject( - boolean includeInstance, ChartDescriptor msg); + boolean includeInstance, ChartDescriptor msg); public native AxisDescriptor addAxes(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/DoubleMapWithDefault.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/DoubleMapWithDefault.java index 857de85e952..52e851329a3 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/DoubleMapWithDefault.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/DoubleMapWithDefault.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.DoubleMapWithDefault", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.DoubleMapWithDefault", + namespace = JsPackage.GLOBAL) public class DoubleMapWithDefault { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -89,12 +89,12 @@ default void setValuesList(double[] valuesList) { public static native DoubleMapWithDefault deserializeBinary(Uint8Array bytes); public static native DoubleMapWithDefault deserializeBinaryFromReader( - DoubleMapWithDefault message, Object reader); + DoubleMapWithDefault message, Object reader); public static native void serializeBinaryToWriter(DoubleMapWithDefault message, Object writer); public static native DoubleMapWithDefault.ToObjectReturnType toObject( - boolean includeInstance, DoubleMapWithDefault msg); + boolean includeInstance, DoubleMapWithDefault msg); public native String addKeys(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/MultiSeriesDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/MultiSeriesDescriptor.java index a737d47282e..c05c7bf6494 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/MultiSeriesDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/MultiSeriesDescriptor.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.MultiSeriesDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.MultiSeriesDescriptor", + namespace = JsPackage.GLOBAL) public class MultiSeriesDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -205,15 +205,15 @@ static MultiSeriesDescriptor.ToObjectReturnType create() { @JsOverlay default void setDataSourcesList( - MultiSeriesDescriptor.ToObjectReturnType.DataSourcesListFieldType[] dataSourcesList) { + MultiSeriesDescriptor.ToObjectReturnType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(Object gradientVisible); @@ -223,7 +223,7 @@ void setDataSourcesList( @JsProperty void setLinesVisible( - MultiSeriesDescriptor.ToObjectReturnType.LinesVisibleFieldType linesVisible); + MultiSeriesDescriptor.ToObjectReturnType.LinesVisibleFieldType linesVisible); @JsProperty void setName(String name); @@ -447,15 +447,15 @@ static MultiSeriesDescriptor.ToObjectReturnType0 create() { @JsOverlay default void setDataSourcesList( - MultiSeriesDescriptor.ToObjectReturnType0.DataSourcesListFieldType[] dataSourcesList) { + MultiSeriesDescriptor.ToObjectReturnType0.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(Object gradientVisible); @@ -465,7 +465,7 @@ void setDataSourcesList( @JsProperty void setLinesVisible( - MultiSeriesDescriptor.ToObjectReturnType0.LinesVisibleFieldType linesVisible); + MultiSeriesDescriptor.ToObjectReturnType0.LinesVisibleFieldType linesVisible); @JsProperty void setName(String name); @@ -501,17 +501,17 @@ void setLinesVisible( public static native MultiSeriesDescriptor deserializeBinary(Uint8Array bytes); public static native MultiSeriesDescriptor deserializeBinaryFromReader( - MultiSeriesDescriptor message, Object reader); + MultiSeriesDescriptor message, Object reader); public static native void serializeBinaryToWriter(MultiSeriesDescriptor message, Object writer); public static native MultiSeriesDescriptor.ToObjectReturnType toObject( - boolean includeInstance, MultiSeriesDescriptor msg); + boolean includeInstance, MultiSeriesDescriptor msg); public native MultiSeriesSourceDescriptor addDataSources(); public native MultiSeriesSourceDescriptor addDataSources( - MultiSeriesSourceDescriptor value, double index); + MultiSeriesSourceDescriptor value, double index); public native MultiSeriesSourceDescriptor addDataSources(MultiSeriesSourceDescriptor value); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/MultiSeriesSourceDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/MultiSeriesSourceDescriptor.java index 6beefba4141..bbe4af56d8c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/MultiSeriesSourceDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/MultiSeriesSourceDescriptor.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.MultiSeriesSourceDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.MultiSeriesSourceDescriptor", + namespace = JsPackage.GLOBAL) public class MultiSeriesSourceDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -80,13 +80,13 @@ static MultiSeriesSourceDescriptor.ToObjectReturnType0 create() { public static native MultiSeriesSourceDescriptor deserializeBinary(Uint8Array bytes); public static native MultiSeriesSourceDescriptor deserializeBinaryFromReader( - MultiSeriesSourceDescriptor message, Object reader); + MultiSeriesSourceDescriptor message, Object reader); public static native void serializeBinaryToWriter( - MultiSeriesSourceDescriptor message, Object writer); + MultiSeriesSourceDescriptor message, Object writer); public static native MultiSeriesSourceDescriptor.ToObjectReturnType toObject( - boolean includeInstance, MultiSeriesSourceDescriptor msg); + boolean includeInstance, MultiSeriesSourceDescriptor msg); public native String getAxisId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/OneClickDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/OneClickDescriptor.java index e31cc92a9ab..92e8e4795b5 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/OneClickDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/OneClickDescriptor.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.OneClickDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.OneClickDescriptor", + namespace = JsPackage.GLOBAL) public class OneClickDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -89,12 +89,12 @@ default void setColumnsList(String[] columnsList) { public static native OneClickDescriptor deserializeBinary(Uint8Array bytes); public static native OneClickDescriptor deserializeBinaryFromReader( - OneClickDescriptor message, Object reader); + OneClickDescriptor message, Object reader); public static native void serializeBinaryToWriter(OneClickDescriptor message, Object writer); public static native OneClickDescriptor.ToObjectReturnType toObject( - boolean includeInstance, OneClickDescriptor msg); + boolean includeInstance, OneClickDescriptor msg); public native String addColumnTypes(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SeriesDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SeriesDescriptor.java index 26d244cced1..fc2df78f0ea 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SeriesDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SeriesDescriptor.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.SeriesDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.SeriesDescriptor", + namespace = JsPackage.GLOBAL) public class SeriesDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -91,7 +91,7 @@ static SeriesDescriptor.ToObjectReturnType.DataSourcesListFieldType create() { @JsProperty void setOneClick( - SeriesDescriptor.ToObjectReturnType.DataSourcesListFieldType.OneClickFieldType oneClick); + SeriesDescriptor.ToObjectReturnType.DataSourcesListFieldType.OneClickFieldType oneClick); @JsProperty void setTableId(double tableId); @@ -152,15 +152,15 @@ static SeriesDescriptor.ToObjectReturnType create() { @JsOverlay default void setDataSourcesList( - SeriesDescriptor.ToObjectReturnType.DataSourcesListFieldType[] dataSourcesList) { + SeriesDescriptor.ToObjectReturnType.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(boolean gradientVisible); @@ -279,7 +279,7 @@ static SeriesDescriptor.ToObjectReturnType0.DataSourcesListFieldType create() { @JsProperty void setOneClick( - SeriesDescriptor.ToObjectReturnType0.DataSourcesListFieldType.OneClickFieldType oneClick); + SeriesDescriptor.ToObjectReturnType0.DataSourcesListFieldType.OneClickFieldType oneClick); @JsProperty void setTableId(double tableId); @@ -340,15 +340,15 @@ static SeriesDescriptor.ToObjectReturnType0 create() { @JsOverlay default void setDataSourcesList( - SeriesDescriptor.ToObjectReturnType0.DataSourcesListFieldType[] dataSourcesList) { + SeriesDescriptor.ToObjectReturnType0.DataSourcesListFieldType[] dataSourcesList) { setDataSourcesList( - Js.>uncheckedCast( - dataSourcesList)); + Js.>uncheckedCast( + dataSourcesList)); } @JsProperty void setDataSourcesList( - JsArray dataSourcesList); + JsArray dataSourcesList); @JsProperty void setGradientVisible(boolean gradientVisible); @@ -393,12 +393,12 @@ void setDataSourcesList( public static native SeriesDescriptor deserializeBinary(Uint8Array bytes); public static native SeriesDescriptor deserializeBinaryFromReader( - SeriesDescriptor message, Object reader); + SeriesDescriptor message, Object reader); public static native void serializeBinaryToWriter(SeriesDescriptor message, Object writer); public static native SeriesDescriptor.ToObjectReturnType toObject( - boolean includeInstance, SeriesDescriptor msg); + boolean includeInstance, SeriesDescriptor msg); public native SourceDescriptor addDataSources(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SeriesPlotStyleMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SeriesPlotStyleMap.java index c47ff874997..e9e2dfc73ec 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SeriesPlotStyleMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SeriesPlotStyleMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.SeriesPlotStyleMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.SeriesPlotStyleMap", + namespace = JsPackage.GLOBAL) public interface SeriesPlotStyleMap { @JsOverlay static SeriesPlotStyleMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SourceDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SourceDescriptor.java index 056e536efd8..f282276964e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SourceDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SourceDescriptor.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.SourceDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.SourceDescriptor", + namespace = JsPackage.GLOBAL) public class SourceDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -189,12 +189,12 @@ static SourceDescriptor.ToObjectReturnType0 create() { public static native SourceDescriptor deserializeBinary(Uint8Array bytes); public static native SourceDescriptor deserializeBinaryFromReader( - SourceDescriptor message, Object reader); + SourceDescriptor message, Object reader); public static native void serializeBinaryToWriter(SourceDescriptor message, Object writer); public static native SourceDescriptor.ToObjectReturnType toObject( - boolean includeInstance, SourceDescriptor msg); + boolean includeInstance, SourceDescriptor msg); public native void clearOneClick(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SourceTypeMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SourceTypeMap.java index 6cba5b4e29b..bf732d404a3 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SourceTypeMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/SourceTypeMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.SourceTypeMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.SourceTypeMap", + namespace = JsPackage.GLOBAL) public interface SourceTypeMap { @JsOverlay static SourceTypeMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/StringMapWithDefault.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/StringMapWithDefault.java index b06291a4600..928c38a1e5d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/StringMapWithDefault.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/StringMapWithDefault.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.StringMapWithDefault", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.StringMapWithDefault", + namespace = JsPackage.GLOBAL) public class StringMapWithDefault { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -89,12 +89,12 @@ default void setValuesList(String[] valuesList) { public static native StringMapWithDefault deserializeBinary(Uint8Array bytes); public static native StringMapWithDefault deserializeBinaryFromReader( - StringMapWithDefault message, Object reader); + StringMapWithDefault message, Object reader); public static native void serializeBinaryToWriter(StringMapWithDefault message, Object writer); public static native StringMapWithDefault.ToObjectReturnType toObject( - boolean includeInstance, StringMapWithDefault msg); + boolean includeInstance, StringMapWithDefault msg); public native String addKeys(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisFormatTypeMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisFormatTypeMap.java index 74f06d8936d..1210e72f640 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisFormatTypeMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisFormatTypeMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.AxisDescriptor.AxisFormatTypeMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.AxisDescriptor.AxisFormatTypeMap", + namespace = JsPackage.GLOBAL) public interface AxisFormatTypeMap { @JsOverlay static AxisFormatTypeMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisPositionMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisPositionMap.java index 147ebb298b7..88a12b22702 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisPositionMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisPositionMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.AxisDescriptor.AxisPositionMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.AxisDescriptor.AxisPositionMap", + namespace = JsPackage.GLOBAL) public interface AxisPositionMap { @JsOverlay static AxisPositionMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisTypeMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisTypeMap.java index 326c75cf15c..9e41d296d46 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisTypeMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/axisdescriptor/AxisTypeMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.AxisDescriptor.AxisTypeMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.AxisDescriptor.AxisTypeMap", + namespace = JsPackage.GLOBAL) public interface AxisTypeMap { @JsOverlay static AxisTypeMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/BusinessPeriod.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/BusinessPeriod.java index 0c906bda393..b513a850a66 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/BusinessPeriod.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/BusinessPeriod.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor.BusinessPeriod", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor.BusinessPeriod", + namespace = JsPackage.GLOBAL) public class BusinessPeriod { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -56,12 +56,12 @@ static BusinessPeriod.ToObjectReturnType0 create() { public static native BusinessPeriod deserializeBinary(Uint8Array bytes); public static native BusinessPeriod deserializeBinaryFromReader( - BusinessPeriod message, Object reader); + BusinessPeriod message, Object reader); public static native void serializeBinaryToWriter(BusinessPeriod message, Object writer); public static native BusinessPeriod.ToObjectReturnType toObject( - boolean includeInstance, BusinessPeriod msg); + boolean includeInstance, BusinessPeriod msg); public native String getClose(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/DayOfWeekMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/DayOfWeekMap.java index ee993194b2e..be9e0e35a6d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/DayOfWeekMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/DayOfWeekMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor.DayOfWeekMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor.DayOfWeekMap", + namespace = JsPackage.GLOBAL) public interface DayOfWeekMap { @JsOverlay static DayOfWeekMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/Holiday.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/Holiday.java index 476928aee1f..fb56bbc09da 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/Holiday.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/Holiday.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor.Holiday", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor.Holiday", + namespace = JsPackage.GLOBAL) public class Holiday { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -75,15 +75,15 @@ static Holiday.ToObjectReturnType create() { @JsOverlay default void setBusinessPeriodsList( - Holiday.ToObjectReturnType.BusinessPeriodsListFieldType[] businessPeriodsList) { + Holiday.ToObjectReturnType.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsProperty void setDate(Holiday.ToObjectReturnType.DateFieldType date); @@ -150,15 +150,15 @@ static Holiday.ToObjectReturnType0 create() { @JsOverlay default void setBusinessPeriodsList( - Holiday.ToObjectReturnType0.BusinessPeriodsListFieldType[] businessPeriodsList) { + Holiday.ToObjectReturnType0.BusinessPeriodsListFieldType[] businessPeriodsList) { setBusinessPeriodsList( - Js.>uncheckedCast( - businessPeriodsList)); + Js.>uncheckedCast( + businessPeriodsList)); } @JsProperty void setBusinessPeriodsList( - JsArray businessPeriodsList); + JsArray businessPeriodsList); @JsProperty void setDate(Holiday.ToObjectReturnType0.DateFieldType date); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/LocalDate.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/LocalDate.java index b2c86e7b712..db14be0581a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/LocalDate.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/businesscalendardescriptor/LocalDate.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor.LocalDate", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.BusinessCalendarDescriptor.LocalDate", + namespace = JsPackage.GLOBAL) public class LocalDate { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -72,7 +72,7 @@ static LocalDate.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(LocalDate message, Object writer); public static native LocalDate.ToObjectReturnType toObject( - boolean includeInstance, LocalDate msg); + boolean includeInstance, LocalDate msg); public native int getDay(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/chartdescriptor/ChartTypeMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/chartdescriptor/ChartTypeMap.java index 5182de5d2da..687301e1f29 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/chartdescriptor/ChartTypeMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb/figuredescriptor/chartdescriptor/ChartTypeMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.ChartDescriptor.ChartTypeMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb.FigureDescriptor.ChartDescriptor.ChartTypeMap", + namespace = JsPackage.GLOBAL) public interface ChartTypeMap { @JsOverlay static ChartTypeMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/BidirectionalStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/BidirectionalStream.java index c6f49a62d5e..e340ca145e0 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/BidirectionalStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/BidirectionalStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb_service.BidirectionalStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb_service.BidirectionalStream", + namespace = JsPackage.GLOBAL) public interface BidirectionalStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ConsoleService.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ConsoleService.java index f51914f6319..5402f72c8d6 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ConsoleService.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ConsoleService.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb_service.ConsoleService", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb_service.ConsoleService", + namespace = JsPackage.GLOBAL) public class ConsoleService { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface BindTableToVariableType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ConsoleServiceClient.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ConsoleServiceClient.java index a9b731e96cb..18a2b7813f7 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ConsoleServiceClient.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ConsoleServiceClient.java @@ -37,9 +37,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb_service.ConsoleServiceClient", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb_service.ConsoleServiceClient", + namespace = JsPackage.GLOBAL) public class ConsoleServiceClient { @JsFunction public interface BindTableToVariableCallbackFn { @@ -70,8 +70,8 @@ static ConsoleServiceClient.BindTableToVariableCallbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.BindTableToVariableCallbackFn.P0Type p0, - BindTableToVariableResponse p1); + ConsoleServiceClient.BindTableToVariableCallbackFn.P0Type p0, + BindTableToVariableResponse p1); } @JsFunction @@ -103,8 +103,8 @@ static ConsoleServiceClient.BindTableToVariableMetadata_or_callbackFn.P0Type cre } void onInvoke( - ConsoleServiceClient.BindTableToVariableMetadata_or_callbackFn.P0Type p0, - BindTableToVariableResponse p1); + ConsoleServiceClient.BindTableToVariableMetadata_or_callbackFn.P0Type p0, + BindTableToVariableResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -163,8 +163,7 @@ static ConsoleServiceClient.CancelCommandCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(ConsoleServiceClient.CancelCommandCallbackFn.P0Type p0, - CancelCommandResponse p1); + void onInvoke(ConsoleServiceClient.CancelCommandCallbackFn.P0Type p0, CancelCommandResponse p1); } @JsFunction @@ -196,8 +195,8 @@ static ConsoleServiceClient.CancelCommandMetadata_or_callbackFn.P0Type create() } void onInvoke( - ConsoleServiceClient.CancelCommandMetadata_or_callbackFn.P0Type p0, - CancelCommandResponse p1); + ConsoleServiceClient.CancelCommandMetadata_or_callbackFn.P0Type p0, + CancelCommandResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -257,7 +256,7 @@ static ConsoleServiceClient.ChangeDocumentCallbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.ChangeDocumentCallbackFn.P0Type p0, ChangeDocumentResponse p1); + ConsoleServiceClient.ChangeDocumentCallbackFn.P0Type p0, ChangeDocumentResponse p1); } @JsFunction @@ -289,8 +288,8 @@ static ConsoleServiceClient.ChangeDocumentMetadata_or_callbackFn.P0Type create() } void onInvoke( - ConsoleServiceClient.ChangeDocumentMetadata_or_callbackFn.P0Type p0, - ChangeDocumentResponse p1); + ConsoleServiceClient.ChangeDocumentMetadata_or_callbackFn.P0Type p0, + ChangeDocumentResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -349,8 +348,7 @@ static ConsoleServiceClient.CloseDocumentCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(ConsoleServiceClient.CloseDocumentCallbackFn.P0Type p0, - CloseDocumentResponse p1); + void onInvoke(ConsoleServiceClient.CloseDocumentCallbackFn.P0Type p0, CloseDocumentResponse p1); } @JsFunction @@ -382,8 +380,8 @@ static ConsoleServiceClient.CloseDocumentMetadata_or_callbackFn.P0Type create() } void onInvoke( - ConsoleServiceClient.CloseDocumentMetadata_or_callbackFn.P0Type p0, - CloseDocumentResponse p1); + ConsoleServiceClient.CloseDocumentMetadata_or_callbackFn.P0Type p0, + CloseDocumentResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -443,7 +441,7 @@ static ConsoleServiceClient.ExecuteCommandCallbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.ExecuteCommandCallbackFn.P0Type p0, ExecuteCommandResponse p1); + ConsoleServiceClient.ExecuteCommandCallbackFn.P0Type p0, ExecuteCommandResponse p1); } @JsFunction @@ -475,8 +473,8 @@ static ConsoleServiceClient.ExecuteCommandMetadata_or_callbackFn.P0Type create() } void onInvoke( - ConsoleServiceClient.ExecuteCommandMetadata_or_callbackFn.P0Type p0, - ExecuteCommandResponse p1); + ConsoleServiceClient.ExecuteCommandMetadata_or_callbackFn.P0Type p0, + ExecuteCommandResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -567,8 +565,7 @@ static ConsoleServiceClient.FetchFigureMetadata_or_callbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.FetchFigureMetadata_or_callbackFn.P0Type p0, - FetchFigureResponse p1); + ConsoleServiceClient.FetchFigureMetadata_or_callbackFn.P0Type p0, FetchFigureResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -628,8 +625,8 @@ static ConsoleServiceClient.FetchPandasTableCallbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.FetchPandasTableCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + ConsoleServiceClient.FetchPandasTableCallbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsFunction @@ -661,8 +658,8 @@ static ConsoleServiceClient.FetchPandasTableMetadata_or_callbackFn.P0Type create } void onInvoke( - ConsoleServiceClient.FetchPandasTableMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + ConsoleServiceClient.FetchPandasTableMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -722,7 +719,7 @@ static ConsoleServiceClient.FetchTableCallbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.FetchTableCallbackFn.P0Type p0, ExportedTableCreationResponse p1); + ConsoleServiceClient.FetchTableCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -753,8 +750,7 @@ static ConsoleServiceClient.FetchTableMapCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(ConsoleServiceClient.FetchTableMapCallbackFn.P0Type p0, - FetchTableMapResponse p1); + void onInvoke(ConsoleServiceClient.FetchTableMapCallbackFn.P0Type p0, FetchTableMapResponse p1); } @JsFunction @@ -786,8 +782,8 @@ static ConsoleServiceClient.FetchTableMapMetadata_or_callbackFn.P0Type create() } void onInvoke( - ConsoleServiceClient.FetchTableMapMetadata_or_callbackFn.P0Type p0, - FetchTableMapResponse p1); + ConsoleServiceClient.FetchTableMapMetadata_or_callbackFn.P0Type p0, + FetchTableMapResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -847,8 +843,8 @@ static ConsoleServiceClient.FetchTableMetadata_or_callbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.FetchTableMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + ConsoleServiceClient.FetchTableMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -908,8 +904,7 @@ static ConsoleServiceClient.GetCompletionItemsCallbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.GetCompletionItemsCallbackFn.P0Type p0, - GetCompletionItemsResponse p1); + ConsoleServiceClient.GetCompletionItemsCallbackFn.P0Type p0, GetCompletionItemsResponse p1); } @JsFunction @@ -941,8 +936,8 @@ static ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackFn.P0Type crea } void onInvoke( - ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackFn.P0Type p0, - GetCompletionItemsResponse p1); + ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackFn.P0Type p0, + GetCompletionItemsResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1002,7 +997,7 @@ static ConsoleServiceClient.GetConsoleTypesCallbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.GetConsoleTypesCallbackFn.P0Type p0, GetConsoleTypesResponse p1); + ConsoleServiceClient.GetConsoleTypesCallbackFn.P0Type p0, GetConsoleTypesResponse p1); } @JsFunction @@ -1034,8 +1029,8 @@ static ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackFn.P0Type create( } void onInvoke( - ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackFn.P0Type p0, - GetConsoleTypesResponse p1); + ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackFn.P0Type p0, + GetConsoleTypesResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1094,8 +1089,7 @@ static ConsoleServiceClient.OpenDocumentCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(ConsoleServiceClient.OpenDocumentCallbackFn.P0Type p0, - OpenDocumentResponse p1); + void onInvoke(ConsoleServiceClient.OpenDocumentCallbackFn.P0Type p0, OpenDocumentResponse p1); } @JsFunction @@ -1127,8 +1121,7 @@ static ConsoleServiceClient.OpenDocumentMetadata_or_callbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.OpenDocumentMetadata_or_callbackFn.P0Type p0, - OpenDocumentResponse p1); + ConsoleServiceClient.OpenDocumentMetadata_or_callbackFn.P0Type p0, OpenDocumentResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1187,8 +1180,7 @@ static ConsoleServiceClient.StartConsoleCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(ConsoleServiceClient.StartConsoleCallbackFn.P0Type p0, - StartConsoleResponse p1); + void onInvoke(ConsoleServiceClient.StartConsoleCallbackFn.P0Type p0, StartConsoleResponse p1); } @JsFunction @@ -1220,8 +1212,7 @@ static ConsoleServiceClient.StartConsoleMetadata_or_callbackFn.P0Type create() { } void onInvoke( - ConsoleServiceClient.StartConsoleMetadata_or_callbackFn.P0Type p0, - StartConsoleResponse p1); + ConsoleServiceClient.StartConsoleMetadata_or_callbackFn.P0Type p0, StartConsoleResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1260,683 +1251,683 @@ public ConsoleServiceClient(String serviceHost) {} @JsOverlay public final UnaryResponse bindTableToVariable( - BindTableToVariableRequest requestMessage, - ConsoleServiceClient.BindTableToVariableMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.BindTableToVariableCallbackFn callback) { + BindTableToVariableRequest requestMessage, + ConsoleServiceClient.BindTableToVariableMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.BindTableToVariableCallbackFn callback) { return bindTableToVariable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse bindTableToVariable( - BindTableToVariableRequest requestMessage, - ConsoleServiceClient.BindTableToVariableMetadata_or_callbackFn metadata_or_callback) { + BindTableToVariableRequest requestMessage, + ConsoleServiceClient.BindTableToVariableMetadata_or_callbackFn metadata_or_callback) { return bindTableToVariable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse bindTableToVariable( - BindTableToVariableRequest requestMessage, - ConsoleServiceClient.BindTableToVariableMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.BindTableToVariableCallbackFn callback); + BindTableToVariableRequest requestMessage, + ConsoleServiceClient.BindTableToVariableMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.BindTableToVariableCallbackFn callback); public native UnaryResponse bindTableToVariable( - BindTableToVariableRequest requestMessage, - ConsoleServiceClient.BindTableToVariableMetadata_or_callbackUnionType metadata_or_callback); + BindTableToVariableRequest requestMessage, + ConsoleServiceClient.BindTableToVariableMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse bindTableToVariable( - BindTableToVariableRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.BindTableToVariableCallbackFn callback) { + BindTableToVariableRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.BindTableToVariableCallbackFn callback) { return bindTableToVariable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse bindTableToVariable( - BindTableToVariableRequest requestMessage, BrowserHeaders metadata_or_callback) { + BindTableToVariableRequest requestMessage, BrowserHeaders metadata_or_callback) { return bindTableToVariable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse cancelCommand( - CancelCommandRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.CancelCommandCallbackFn callback) { + CancelCommandRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.CancelCommandCallbackFn callback) { return cancelCommand( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse cancelCommand( - CancelCommandRequest requestMessage, BrowserHeaders metadata_or_callback) { + CancelCommandRequest requestMessage, BrowserHeaders metadata_or_callback) { return cancelCommand( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse cancelCommand( - CancelCommandRequest requestMessage, - ConsoleServiceClient.CancelCommandMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.CancelCommandCallbackFn callback) { + CancelCommandRequest requestMessage, + ConsoleServiceClient.CancelCommandMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.CancelCommandCallbackFn callback) { return cancelCommand( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse cancelCommand( - CancelCommandRequest requestMessage, - ConsoleServiceClient.CancelCommandMetadata_or_callbackFn metadata_or_callback) { + CancelCommandRequest requestMessage, + ConsoleServiceClient.CancelCommandMetadata_or_callbackFn metadata_or_callback) { return cancelCommand( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse cancelCommand( - CancelCommandRequest requestMessage, - ConsoleServiceClient.CancelCommandMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.CancelCommandCallbackFn callback); + CancelCommandRequest requestMessage, + ConsoleServiceClient.CancelCommandMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.CancelCommandCallbackFn callback); public native UnaryResponse cancelCommand( - CancelCommandRequest requestMessage, - ConsoleServiceClient.CancelCommandMetadata_or_callbackUnionType metadata_or_callback); + CancelCommandRequest requestMessage, + ConsoleServiceClient.CancelCommandMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse changeDocument( - ChangeDocumentRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.ChangeDocumentCallbackFn callback) { + ChangeDocumentRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.ChangeDocumentCallbackFn callback) { return changeDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse changeDocument( - ChangeDocumentRequest requestMessage, BrowserHeaders metadata_or_callback) { + ChangeDocumentRequest requestMessage, BrowserHeaders metadata_or_callback) { return changeDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse changeDocument( - ChangeDocumentRequest requestMessage, - ConsoleServiceClient.ChangeDocumentMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.ChangeDocumentCallbackFn callback) { + ChangeDocumentRequest requestMessage, + ConsoleServiceClient.ChangeDocumentMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.ChangeDocumentCallbackFn callback) { return changeDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse changeDocument( - ChangeDocumentRequest requestMessage, - ConsoleServiceClient.ChangeDocumentMetadata_or_callbackFn metadata_or_callback) { + ChangeDocumentRequest requestMessage, + ConsoleServiceClient.ChangeDocumentMetadata_or_callbackFn metadata_or_callback) { return changeDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse changeDocument( - ChangeDocumentRequest requestMessage, - ConsoleServiceClient.ChangeDocumentMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.ChangeDocumentCallbackFn callback); + ChangeDocumentRequest requestMessage, + ConsoleServiceClient.ChangeDocumentMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.ChangeDocumentCallbackFn callback); public native UnaryResponse changeDocument( - ChangeDocumentRequest requestMessage, - ConsoleServiceClient.ChangeDocumentMetadata_or_callbackUnionType metadata_or_callback); + ChangeDocumentRequest requestMessage, + ConsoleServiceClient.ChangeDocumentMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse closeDocument( - CloseDocumentRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.CloseDocumentCallbackFn callback) { + CloseDocumentRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.CloseDocumentCallbackFn callback) { return closeDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse closeDocument( - CloseDocumentRequest requestMessage, BrowserHeaders metadata_or_callback) { + CloseDocumentRequest requestMessage, BrowserHeaders metadata_or_callback) { return closeDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse closeDocument( - CloseDocumentRequest requestMessage, - ConsoleServiceClient.CloseDocumentMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.CloseDocumentCallbackFn callback) { + CloseDocumentRequest requestMessage, + ConsoleServiceClient.CloseDocumentMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.CloseDocumentCallbackFn callback) { return closeDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse closeDocument( - CloseDocumentRequest requestMessage, - ConsoleServiceClient.CloseDocumentMetadata_or_callbackFn metadata_or_callback) { + CloseDocumentRequest requestMessage, + ConsoleServiceClient.CloseDocumentMetadata_or_callbackFn metadata_or_callback) { return closeDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse closeDocument( - CloseDocumentRequest requestMessage, - ConsoleServiceClient.CloseDocumentMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.CloseDocumentCallbackFn callback); + CloseDocumentRequest requestMessage, + ConsoleServiceClient.CloseDocumentMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.CloseDocumentCallbackFn callback); public native UnaryResponse closeDocument( - CloseDocumentRequest requestMessage, - ConsoleServiceClient.CloseDocumentMetadata_or_callbackUnionType metadata_or_callback); + CloseDocumentRequest requestMessage, + ConsoleServiceClient.CloseDocumentMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse executeCommand( - ExecuteCommandRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.ExecuteCommandCallbackFn callback) { + ExecuteCommandRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.ExecuteCommandCallbackFn callback) { return executeCommand( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse executeCommand( - ExecuteCommandRequest requestMessage, BrowserHeaders metadata_or_callback) { + ExecuteCommandRequest requestMessage, BrowserHeaders metadata_or_callback) { return executeCommand( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse executeCommand( - ExecuteCommandRequest requestMessage, - ConsoleServiceClient.ExecuteCommandMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.ExecuteCommandCallbackFn callback) { + ExecuteCommandRequest requestMessage, + ConsoleServiceClient.ExecuteCommandMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.ExecuteCommandCallbackFn callback) { return executeCommand( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse executeCommand( - ExecuteCommandRequest requestMessage, - ConsoleServiceClient.ExecuteCommandMetadata_or_callbackFn metadata_or_callback) { + ExecuteCommandRequest requestMessage, + ConsoleServiceClient.ExecuteCommandMetadata_or_callbackFn metadata_or_callback) { return executeCommand( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse executeCommand( - ExecuteCommandRequest requestMessage, - ConsoleServiceClient.ExecuteCommandMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.ExecuteCommandCallbackFn callback); + ExecuteCommandRequest requestMessage, + ConsoleServiceClient.ExecuteCommandMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.ExecuteCommandCallbackFn callback); public native UnaryResponse executeCommand( - ExecuteCommandRequest requestMessage, - ConsoleServiceClient.ExecuteCommandMetadata_or_callbackUnionType metadata_or_callback); + ExecuteCommandRequest requestMessage, + ConsoleServiceClient.ExecuteCommandMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse fetchFigure( - FetchFigureRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.FetchFigureCallbackFn callback) { + FetchFigureRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.FetchFigureCallbackFn callback) { return fetchFigure( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse fetchFigure( - FetchFigureRequest requestMessage, BrowserHeaders metadata_or_callback) { + FetchFigureRequest requestMessage, BrowserHeaders metadata_or_callback) { return fetchFigure( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse fetchFigure( - FetchFigureRequest requestMessage, - ConsoleServiceClient.FetchFigureMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.FetchFigureCallbackFn callback) { + FetchFigureRequest requestMessage, + ConsoleServiceClient.FetchFigureMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.FetchFigureCallbackFn callback) { return fetchFigure( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse fetchFigure( - FetchFigureRequest requestMessage, - ConsoleServiceClient.FetchFigureMetadata_or_callbackFn metadata_or_callback) { + FetchFigureRequest requestMessage, + ConsoleServiceClient.FetchFigureMetadata_or_callbackFn metadata_or_callback) { return fetchFigure( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse fetchFigure( - FetchFigureRequest requestMessage, - ConsoleServiceClient.FetchFigureMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.FetchFigureCallbackFn callback); + FetchFigureRequest requestMessage, + ConsoleServiceClient.FetchFigureMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.FetchFigureCallbackFn callback); public native UnaryResponse fetchFigure( - FetchFigureRequest requestMessage, - ConsoleServiceClient.FetchFigureMetadata_or_callbackUnionType metadata_or_callback); + FetchFigureRequest requestMessage, + ConsoleServiceClient.FetchFigureMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse fetchPandasTable( - FetchPandasTableRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.FetchPandasTableCallbackFn callback) { + FetchPandasTableRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.FetchPandasTableCallbackFn callback) { return fetchPandasTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse fetchPandasTable( - FetchPandasTableRequest requestMessage, BrowserHeaders metadata_or_callback) { + FetchPandasTableRequest requestMessage, BrowserHeaders metadata_or_callback) { return fetchPandasTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse fetchPandasTable( - FetchPandasTableRequest requestMessage, - ConsoleServiceClient.FetchPandasTableMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.FetchPandasTableCallbackFn callback) { + FetchPandasTableRequest requestMessage, + ConsoleServiceClient.FetchPandasTableMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.FetchPandasTableCallbackFn callback) { return fetchPandasTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse fetchPandasTable( - FetchPandasTableRequest requestMessage, - ConsoleServiceClient.FetchPandasTableMetadata_or_callbackFn metadata_or_callback) { + FetchPandasTableRequest requestMessage, + ConsoleServiceClient.FetchPandasTableMetadata_or_callbackFn metadata_or_callback) { return fetchPandasTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse fetchPandasTable( - FetchPandasTableRequest requestMessage, - ConsoleServiceClient.FetchPandasTableMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.FetchPandasTableCallbackFn callback); + FetchPandasTableRequest requestMessage, + ConsoleServiceClient.FetchPandasTableMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.FetchPandasTableCallbackFn callback); public native UnaryResponse fetchPandasTable( - FetchPandasTableRequest requestMessage, - ConsoleServiceClient.FetchPandasTableMetadata_or_callbackUnionType metadata_or_callback); + FetchPandasTableRequest requestMessage, + ConsoleServiceClient.FetchPandasTableMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse fetchTable( - FetchTableRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.FetchTableCallbackFn callback) { + FetchTableRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.FetchTableCallbackFn callback) { return fetchTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse fetchTable( - FetchTableRequest requestMessage, BrowserHeaders metadata_or_callback) { + FetchTableRequest requestMessage, BrowserHeaders metadata_or_callback) { return fetchTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse fetchTable( - FetchTableRequest requestMessage, - ConsoleServiceClient.FetchTableMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.FetchTableCallbackFn callback) { + FetchTableRequest requestMessage, + ConsoleServiceClient.FetchTableMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.FetchTableCallbackFn callback) { return fetchTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse fetchTable( - FetchTableRequest requestMessage, - ConsoleServiceClient.FetchTableMetadata_or_callbackFn metadata_or_callback) { + FetchTableRequest requestMessage, + ConsoleServiceClient.FetchTableMetadata_or_callbackFn metadata_or_callback) { return fetchTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse fetchTable( - FetchTableRequest requestMessage, - ConsoleServiceClient.FetchTableMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.FetchTableCallbackFn callback); + FetchTableRequest requestMessage, + ConsoleServiceClient.FetchTableMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.FetchTableCallbackFn callback); public native UnaryResponse fetchTable( - FetchTableRequest requestMessage, - ConsoleServiceClient.FetchTableMetadata_or_callbackUnionType metadata_or_callback); + FetchTableRequest requestMessage, + ConsoleServiceClient.FetchTableMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse fetchTableMap( - FetchTableMapRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.FetchTableMapCallbackFn callback) { + FetchTableMapRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.FetchTableMapCallbackFn callback) { return fetchTableMap( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse fetchTableMap( - FetchTableMapRequest requestMessage, BrowserHeaders metadata_or_callback) { + FetchTableMapRequest requestMessage, BrowserHeaders metadata_or_callback) { return fetchTableMap( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse fetchTableMap( - FetchTableMapRequest requestMessage, - ConsoleServiceClient.FetchTableMapMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.FetchTableMapCallbackFn callback) { + FetchTableMapRequest requestMessage, + ConsoleServiceClient.FetchTableMapMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.FetchTableMapCallbackFn callback) { return fetchTableMap( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse fetchTableMap( - FetchTableMapRequest requestMessage, - ConsoleServiceClient.FetchTableMapMetadata_or_callbackFn metadata_or_callback) { + FetchTableMapRequest requestMessage, + ConsoleServiceClient.FetchTableMapMetadata_or_callbackFn metadata_or_callback) { return fetchTableMap( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse fetchTableMap( - FetchTableMapRequest requestMessage, - ConsoleServiceClient.FetchTableMapMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.FetchTableMapCallbackFn callback); + FetchTableMapRequest requestMessage, + ConsoleServiceClient.FetchTableMapMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.FetchTableMapCallbackFn callback); public native UnaryResponse fetchTableMap( - FetchTableMapRequest requestMessage, - ConsoleServiceClient.FetchTableMapMetadata_or_callbackUnionType metadata_or_callback); + FetchTableMapRequest requestMessage, + ConsoleServiceClient.FetchTableMapMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse getCompletionItems( - GetCompletionItemsRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.GetCompletionItemsCallbackFn callback) { + GetCompletionItemsRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.GetCompletionItemsCallbackFn callback) { return getCompletionItems( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse getCompletionItems( - GetCompletionItemsRequest requestMessage, BrowserHeaders metadata_or_callback) { + GetCompletionItemsRequest requestMessage, BrowserHeaders metadata_or_callback) { return getCompletionItems( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse getCompletionItems( - GetCompletionItemsRequest requestMessage, - ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.GetCompletionItemsCallbackFn callback) { + GetCompletionItemsRequest requestMessage, + ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.GetCompletionItemsCallbackFn callback) { return getCompletionItems( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse getCompletionItems( - GetCompletionItemsRequest requestMessage, - ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackFn metadata_or_callback) { + GetCompletionItemsRequest requestMessage, + ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackFn metadata_or_callback) { return getCompletionItems( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse getCompletionItems( - GetCompletionItemsRequest requestMessage, - ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.GetCompletionItemsCallbackFn callback); + GetCompletionItemsRequest requestMessage, + ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.GetCompletionItemsCallbackFn callback); public native UnaryResponse getCompletionItems( - GetCompletionItemsRequest requestMessage, - ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackUnionType metadata_or_callback); + GetCompletionItemsRequest requestMessage, + ConsoleServiceClient.GetCompletionItemsMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse getConsoleTypes( - GetConsoleTypesRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.GetConsoleTypesCallbackFn callback) { + GetConsoleTypesRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.GetConsoleTypesCallbackFn callback) { return getConsoleTypes( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse getConsoleTypes( - GetConsoleTypesRequest requestMessage, BrowserHeaders metadata_or_callback) { + GetConsoleTypesRequest requestMessage, BrowserHeaders metadata_or_callback) { return getConsoleTypes( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse getConsoleTypes( - GetConsoleTypesRequest requestMessage, - ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.GetConsoleTypesCallbackFn callback) { + GetConsoleTypesRequest requestMessage, + ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.GetConsoleTypesCallbackFn callback) { return getConsoleTypes( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse getConsoleTypes( - GetConsoleTypesRequest requestMessage, - ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackFn metadata_or_callback) { + GetConsoleTypesRequest requestMessage, + ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackFn metadata_or_callback) { return getConsoleTypes( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse getConsoleTypes( - GetConsoleTypesRequest requestMessage, - ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.GetConsoleTypesCallbackFn callback); + GetConsoleTypesRequest requestMessage, + ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.GetConsoleTypesCallbackFn callback); public native UnaryResponse getConsoleTypes( - GetConsoleTypesRequest requestMessage, - ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackUnionType metadata_or_callback); + GetConsoleTypesRequest requestMessage, + ConsoleServiceClient.GetConsoleTypesMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse openDocument( - OpenDocumentRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.OpenDocumentCallbackFn callback) { + OpenDocumentRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.OpenDocumentCallbackFn callback) { return openDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse openDocument( - OpenDocumentRequest requestMessage, BrowserHeaders metadata_or_callback) { + OpenDocumentRequest requestMessage, BrowserHeaders metadata_or_callback) { return openDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse openDocument( - OpenDocumentRequest requestMessage, - ConsoleServiceClient.OpenDocumentMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.OpenDocumentCallbackFn callback) { + OpenDocumentRequest requestMessage, + ConsoleServiceClient.OpenDocumentMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.OpenDocumentCallbackFn callback) { return openDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse openDocument( - OpenDocumentRequest requestMessage, - ConsoleServiceClient.OpenDocumentMetadata_or_callbackFn metadata_or_callback) { + OpenDocumentRequest requestMessage, + ConsoleServiceClient.OpenDocumentMetadata_or_callbackFn metadata_or_callback) { return openDocument( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse openDocument( - OpenDocumentRequest requestMessage, - ConsoleServiceClient.OpenDocumentMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.OpenDocumentCallbackFn callback); + OpenDocumentRequest requestMessage, + ConsoleServiceClient.OpenDocumentMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.OpenDocumentCallbackFn callback); public native UnaryResponse openDocument( - OpenDocumentRequest requestMessage, - ConsoleServiceClient.OpenDocumentMetadata_or_callbackUnionType metadata_or_callback); + OpenDocumentRequest requestMessage, + ConsoleServiceClient.OpenDocumentMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse startConsole( - StartConsoleRequest requestMessage, - BrowserHeaders metadata_or_callback, - ConsoleServiceClient.StartConsoleCallbackFn callback) { + StartConsoleRequest requestMessage, + BrowserHeaders metadata_or_callback, + ConsoleServiceClient.StartConsoleCallbackFn callback) { return startConsole( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse startConsole( - StartConsoleRequest requestMessage, BrowserHeaders metadata_or_callback) { + StartConsoleRequest requestMessage, BrowserHeaders metadata_or_callback) { return startConsole( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse startConsole( - StartConsoleRequest requestMessage, - ConsoleServiceClient.StartConsoleMetadata_or_callbackFn metadata_or_callback, - ConsoleServiceClient.StartConsoleCallbackFn callback) { + StartConsoleRequest requestMessage, + ConsoleServiceClient.StartConsoleMetadata_or_callbackFn metadata_or_callback, + ConsoleServiceClient.StartConsoleCallbackFn callback) { return startConsole( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse startConsole( - StartConsoleRequest requestMessage, - ConsoleServiceClient.StartConsoleMetadata_or_callbackFn metadata_or_callback) { + StartConsoleRequest requestMessage, + ConsoleServiceClient.StartConsoleMetadata_or_callbackFn metadata_or_callback) { return startConsole( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse startConsole( - StartConsoleRequest requestMessage, - ConsoleServiceClient.StartConsoleMetadata_or_callbackUnionType metadata_or_callback, - ConsoleServiceClient.StartConsoleCallbackFn callback); + StartConsoleRequest requestMessage, + ConsoleServiceClient.StartConsoleMetadata_or_callbackUnionType metadata_or_callback, + ConsoleServiceClient.StartConsoleCallbackFn callback); public native UnaryResponse startConsole( - StartConsoleRequest requestMessage, - ConsoleServiceClient.StartConsoleMetadata_or_callbackUnionType metadata_or_callback); + StartConsoleRequest requestMessage, + ConsoleServiceClient.StartConsoleMetadata_or_callbackUnionType metadata_or_callback); public native ResponseStream subscribeToLogs( - LogSubscriptionRequest requestMessage, BrowserHeaders metadata); + LogSubscriptionRequest requestMessage, BrowserHeaders metadata); public native ResponseStream subscribeToLogs( - LogSubscriptionRequest requestMessage); + LogSubscriptionRequest requestMessage); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/RequestStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/RequestStream.java index 4707419aee0..92ed10311bf 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/RequestStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/RequestStream.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb_service.RequestStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb_service.RequestStream", + namespace = JsPackage.GLOBAL) public interface RequestStream { @JsFunction public interface OnHandlerFn { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ResponseStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ResponseStream.java index 95b5182d832..e5ce311e5a4 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ResponseStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/ResponseStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb_service.ResponseStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb_service.ResponseStream", + namespace = JsPackage.GLOBAL) public interface ResponseStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/UnaryResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/UnaryResponse.java index 7a2c51c9e71..c600879b495 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/UnaryResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/console_pb_service/UnaryResponse.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.console_pb_service.UnaryResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.console_pb_service.UnaryResponse", + namespace = JsPackage.GLOBAL) public interface UnaryResponse { void cancel(); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/BasicAuth.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/BasicAuth.java index 8f47bdbb107..6d0ede494f1 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/BasicAuth.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/BasicAuth.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb.BasicAuth", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb.BasicAuth", + namespace = JsPackage.GLOBAL) public class BasicAuth { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -60,7 +60,7 @@ static BasicAuth.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(BasicAuth message, Object writer); public static native BasicAuth.ToObjectReturnType toObject( - boolean includeInstance, BasicAuth msg); + boolean includeInstance, BasicAuth msg); public native String getPassword(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ExportNotification.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ExportNotification.java index 34ad529480f..8ecb6da69c4 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ExportNotification.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ExportNotification.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb.ExportNotification", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb.ExportNotification", + namespace = JsPackage.GLOBAL) public class ExportNotification { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -23,7 +23,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static ExportNotification.ToObjectReturnType.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -58,20 +58,20 @@ static ExportNotification.ToObjectReturnType.TicketFieldType create() { @JsProperty void setTicket( - ExportNotification.ToObjectReturnType.TicketFieldType.GetTicketUnionType ticket); + ExportNotification.ToObjectReturnType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -113,7 +113,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static ExportNotification.ToObjectReturnType0.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -148,20 +148,20 @@ static ExportNotification.ToObjectReturnType0.TicketFieldType create() { @JsProperty void setTicket( - ExportNotification.ToObjectReturnType0.TicketFieldType.GetTicketUnionType ticket); + ExportNotification.ToObjectReturnType0.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -200,12 +200,12 @@ static ExportNotification.ToObjectReturnType0 create() { public static native ExportNotification deserializeBinary(Uint8Array bytes); public static native ExportNotification deserializeBinaryFromReader( - ExportNotification message, Object reader); + ExportNotification message, Object reader); public static native void serializeBinaryToWriter(ExportNotification message, Object writer); public static native ExportNotification.ToObjectReturnType toObject( - boolean includeInstance, ExportNotification msg); + boolean includeInstance, ExportNotification msg); public native void clearTicket(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ExportNotificationRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ExportNotificationRequest.java index 63a14bc6413..c339943142e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ExportNotificationRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ExportNotificationRequest.java @@ -5,17 +5,17 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb.ExportNotificationRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb.ExportNotificationRequest", + namespace = JsPackage.GLOBAL) public class ExportNotificationRequest { public static native ExportNotificationRequest deserializeBinary(Uint8Array bytes); public static native ExportNotificationRequest deserializeBinaryFromReader( - ExportNotificationRequest message, Object reader); + ExportNotificationRequest message, Object reader); public static native void serializeBinaryToWriter( - ExportNotificationRequest message, Object writer); + ExportNotificationRequest message, Object writer); public static native Object toObject(boolean includeInstance, ExportNotificationRequest msg); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/HandshakeRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/HandshakeRequest.java index 8401ce123ae..5e1769a8df0 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/HandshakeRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/HandshakeRequest.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb.HandshakeRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb.HandshakeRequest", + namespace = JsPackage.GLOBAL) public class HandshakeRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetPayloadUnionType { @@ -119,13 +119,13 @@ static HandshakeRequest.ToObjectReturnType create() { @JsOverlay default void setPayload(String payload) { setPayload( - Js.uncheckedCast(payload)); + Js.uncheckedCast(payload)); } @JsOverlay default void setPayload(Uint8Array payload) { setPayload( - Js.uncheckedCast(payload)); + Js.uncheckedCast(payload)); } } @@ -179,27 +179,25 @@ static HandshakeRequest.ToObjectReturnType0 create() { @JsOverlay default void setPayload(String payload) { setPayload( - Js.uncheckedCast( - payload)); + Js.uncheckedCast(payload)); } @JsOverlay default void setPayload(Uint8Array payload) { setPayload( - Js.uncheckedCast( - payload)); + Js.uncheckedCast(payload)); } } public static native HandshakeRequest deserializeBinary(Uint8Array bytes); public static native HandshakeRequest deserializeBinaryFromReader( - HandshakeRequest message, Object reader); + HandshakeRequest message, Object reader); public static native void serializeBinaryToWriter(HandshakeRequest message, Object writer); public static native HandshakeRequest.ToObjectReturnType toObject( - boolean includeInstance, HandshakeRequest msg); + boolean includeInstance, HandshakeRequest msg); public native double getAuthProtocol(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/HandshakeResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/HandshakeResponse.java index 4fd9ea56135..b2dec7dddd2 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/HandshakeResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/HandshakeResponse.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb.HandshakeResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb.HandshakeResponse", + namespace = JsPackage.GLOBAL) public class HandshakeResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetMetadataHeaderUnionType { @@ -202,38 +202,38 @@ static HandshakeResponse.ToObjectReturnType create() { @JsProperty void setMetadataHeader( - HandshakeResponse.ToObjectReturnType.GetMetadataHeaderUnionType metadataHeader); + HandshakeResponse.ToObjectReturnType.GetMetadataHeaderUnionType metadataHeader); @JsOverlay default void setMetadataHeader(String metadataHeader) { setMetadataHeader( - Js.uncheckedCast( - metadataHeader)); + Js.uncheckedCast( + metadataHeader)); } @JsOverlay default void setMetadataHeader(Uint8Array metadataHeader) { setMetadataHeader( - Js.uncheckedCast( - metadataHeader)); + Js.uncheckedCast( + metadataHeader)); } @JsProperty void setSessionToken( - HandshakeResponse.ToObjectReturnType.GetSessionTokenUnionType sessionToken); + HandshakeResponse.ToObjectReturnType.GetSessionTokenUnionType sessionToken); @JsOverlay default void setSessionToken(String sessionToken) { setSessionToken( - Js.uncheckedCast( - sessionToken)); + Js.uncheckedCast( + sessionToken)); } @JsOverlay default void setSessionToken(Uint8Array sessionToken) { setSessionToken( - Js.uncheckedCast( - sessionToken)); + Js.uncheckedCast( + sessionToken)); } @JsProperty @@ -320,38 +320,38 @@ static HandshakeResponse.ToObjectReturnType0 create() { @JsProperty void setMetadataHeader( - HandshakeResponse.ToObjectReturnType0.GetMetadataHeaderUnionType metadataHeader); + HandshakeResponse.ToObjectReturnType0.GetMetadataHeaderUnionType metadataHeader); @JsOverlay default void setMetadataHeader(String metadataHeader) { setMetadataHeader( - Js.uncheckedCast( - metadataHeader)); + Js.uncheckedCast( + metadataHeader)); } @JsOverlay default void setMetadataHeader(Uint8Array metadataHeader) { setMetadataHeader( - Js.uncheckedCast( - metadataHeader)); + Js.uncheckedCast( + metadataHeader)); } @JsProperty void setSessionToken( - HandshakeResponse.ToObjectReturnType0.GetSessionTokenUnionType sessionToken); + HandshakeResponse.ToObjectReturnType0.GetSessionTokenUnionType sessionToken); @JsOverlay default void setSessionToken(String sessionToken) { setSessionToken( - Js.uncheckedCast( - sessionToken)); + Js.uncheckedCast( + sessionToken)); } @JsOverlay default void setSessionToken(Uint8Array sessionToken) { setSessionToken( - Js.uncheckedCast( - sessionToken)); + Js.uncheckedCast( + sessionToken)); } @JsProperty @@ -364,12 +364,12 @@ default void setSessionToken(Uint8Array sessionToken) { public static native HandshakeResponse deserializeBinary(Uint8Array bytes); public static native HandshakeResponse deserializeBinaryFromReader( - HandshakeResponse message, Object reader); + HandshakeResponse message, Object reader); public static native void serializeBinaryToWriter(HandshakeResponse message, Object writer); public static native HandshakeResponse.ToObjectReturnType toObject( - boolean includeInstance, HandshakeResponse msg); + boolean includeInstance, HandshakeResponse msg); public native HandshakeResponse.GetMetadataHeaderUnionType getMetadataHeader(); @@ -393,14 +393,12 @@ public static native HandshakeResponse.ToObjectReturnType toObject( @JsOverlay public final void setMetadataHeader(String value) { - setMetadataHeader( - Js.uncheckedCast(value)); + setMetadataHeader(Js.uncheckedCast(value)); } @JsOverlay public final void setMetadataHeader(Uint8Array value) { - setMetadataHeader( - Js.uncheckedCast(value)); + setMetadataHeader(Js.uncheckedCast(value)); } public native void setSessionToken(HandshakeResponse.SetSessionTokenValueUnionType value); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ReleaseResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ReleaseResponse.java index 556bbcf6e9e..9b15a70addc 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ReleaseResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/ReleaseResponse.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb.ReleaseResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb.ReleaseResponse", + namespace = JsPackage.GLOBAL) public class ReleaseResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -44,12 +44,12 @@ static ReleaseResponse.ToObjectReturnType0 create() { public static native ReleaseResponse deserializeBinary(Uint8Array bytes); public static native ReleaseResponse deserializeBinaryFromReader( - ReleaseResponse message, Object reader); + ReleaseResponse message, Object reader); public static native void serializeBinaryToWriter(ReleaseResponse message, Object writer); public static native ReleaseResponse.ToObjectReturnType toObject( - boolean includeInstance, ReleaseResponse msg); + boolean includeInstance, ReleaseResponse msg); public native boolean getSuccess(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/exportnotification/StateMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/exportnotification/StateMap.java index 565efc6aa58..fd0728b282f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/exportnotification/StateMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb/exportnotification/StateMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb.ExportNotification.StateMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb.ExportNotification.StateMap", + namespace = JsPackage.GLOBAL) public interface StateMap { @JsOverlay static StateMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/BidirectionalStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/BidirectionalStream.java index b5b57a9fe33..dbe2db8b33f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/BidirectionalStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/BidirectionalStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb_service.BidirectionalStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb_service.BidirectionalStream", + namespace = JsPackage.GLOBAL) public interface BidirectionalStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/RequestStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/RequestStream.java index b915a613ee1..1ed6ab9cfff 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/RequestStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/RequestStream.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb_service.RequestStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb_service.RequestStream", + namespace = JsPackage.GLOBAL) public interface RequestStream { @JsFunction public interface OnHandlerFn { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/ResponseStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/ResponseStream.java index 8dc6b34d6f2..c1bcb364bd5 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/ResponseStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/ResponseStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb_service.ResponseStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb_service.ResponseStream", + namespace = JsPackage.GLOBAL) public interface ResponseStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/SessionService.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/SessionService.java index 018a0576516..03cc230eff6 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/SessionService.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/SessionService.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb_service.SessionService", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb_service.SessionService", + namespace = JsPackage.GLOBAL) public class SessionService { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface CloseSessionType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/SessionServiceClient.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/SessionServiceClient.java index 2d52e7fd01a..846cb8a1673 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/SessionServiceClient.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/SessionServiceClient.java @@ -16,9 +16,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb_service.SessionServiceClient", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb_service.SessionServiceClient", + namespace = JsPackage.GLOBAL) public class SessionServiceClient { @JsFunction public interface CloseSessionCallbackFn { @@ -80,7 +80,7 @@ static SessionServiceClient.CloseSessionMetadata_or_callbackFn.P0Type create() { } void onInvoke( - SessionServiceClient.CloseSessionMetadata_or_callbackFn.P0Type p0, ReleaseResponse p1); + SessionServiceClient.CloseSessionMetadata_or_callbackFn.P0Type p0, ReleaseResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -171,7 +171,7 @@ static SessionServiceClient.NewSessionMetadata_or_callbackFn.P0Type create() { } void onInvoke( - SessionServiceClient.NewSessionMetadata_or_callbackFn.P0Type p0, HandshakeResponse p1); + SessionServiceClient.NewSessionMetadata_or_callbackFn.P0Type p0, HandshakeResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -231,7 +231,7 @@ static SessionServiceClient.RefreshSessionTokenCallbackFn.P0Type create() { } void onInvoke( - SessionServiceClient.RefreshSessionTokenCallbackFn.P0Type p0, HandshakeResponse p1); + SessionServiceClient.RefreshSessionTokenCallbackFn.P0Type p0, HandshakeResponse p1); } @JsFunction @@ -263,8 +263,8 @@ static SessionServiceClient.RefreshSessionTokenMetadata_or_callbackFn.P0Type cre } void onInvoke( - SessionServiceClient.RefreshSessionTokenMetadata_or_callbackFn.P0Type p0, - HandshakeResponse p1); + SessionServiceClient.RefreshSessionTokenMetadata_or_callbackFn.P0Type p0, + HandshakeResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -354,8 +354,7 @@ static SessionServiceClient.ReleaseMetadata_or_callbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(SessionServiceClient.ReleaseMetadata_or_callbackFn.P0Type p0, - ReleaseResponse p1); + void onInvoke(SessionServiceClient.ReleaseMetadata_or_callbackFn.P0Type p0, ReleaseResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -394,214 +393,214 @@ public SessionServiceClient(String serviceHost) {} @JsOverlay public final UnaryResponse closeSession( - HandshakeRequest requestMessage, - BrowserHeaders metadata_or_callback, - SessionServiceClient.CloseSessionCallbackFn callback) { + HandshakeRequest requestMessage, + BrowserHeaders metadata_or_callback, + SessionServiceClient.CloseSessionCallbackFn callback) { return closeSession( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse closeSession( - HandshakeRequest requestMessage, BrowserHeaders metadata_or_callback) { + HandshakeRequest requestMessage, BrowserHeaders metadata_or_callback) { return closeSession( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse closeSession( - HandshakeRequest requestMessage, - SessionServiceClient.CloseSessionMetadata_or_callbackFn metadata_or_callback, - SessionServiceClient.CloseSessionCallbackFn callback) { + HandshakeRequest requestMessage, + SessionServiceClient.CloseSessionMetadata_or_callbackFn metadata_or_callback, + SessionServiceClient.CloseSessionCallbackFn callback) { return closeSession( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse closeSession( - HandshakeRequest requestMessage, - SessionServiceClient.CloseSessionMetadata_or_callbackFn metadata_or_callback) { + HandshakeRequest requestMessage, + SessionServiceClient.CloseSessionMetadata_or_callbackFn metadata_or_callback) { return closeSession( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse closeSession( - HandshakeRequest requestMessage, - SessionServiceClient.CloseSessionMetadata_or_callbackUnionType metadata_or_callback, - SessionServiceClient.CloseSessionCallbackFn callback); + HandshakeRequest requestMessage, + SessionServiceClient.CloseSessionMetadata_or_callbackUnionType metadata_or_callback, + SessionServiceClient.CloseSessionCallbackFn callback); public native UnaryResponse closeSession( - HandshakeRequest requestMessage, - SessionServiceClient.CloseSessionMetadata_or_callbackUnionType metadata_or_callback); + HandshakeRequest requestMessage, + SessionServiceClient.CloseSessionMetadata_or_callbackUnionType metadata_or_callback); public native ResponseStream exportNotifications( - ExportNotificationRequest requestMessage, BrowserHeaders metadata); + ExportNotificationRequest requestMessage, BrowserHeaders metadata); public native ResponseStream exportNotifications( - ExportNotificationRequest requestMessage); + ExportNotificationRequest requestMessage); @JsOverlay public final UnaryResponse newSession( - HandshakeRequest requestMessage, - BrowserHeaders metadata_or_callback, - SessionServiceClient.NewSessionCallbackFn callback) { + HandshakeRequest requestMessage, + BrowserHeaders metadata_or_callback, + SessionServiceClient.NewSessionCallbackFn callback) { return newSession( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse newSession( - HandshakeRequest requestMessage, BrowserHeaders metadata_or_callback) { + HandshakeRequest requestMessage, BrowserHeaders metadata_or_callback) { return newSession( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse newSession( - HandshakeRequest requestMessage, - SessionServiceClient.NewSessionMetadata_or_callbackFn metadata_or_callback, - SessionServiceClient.NewSessionCallbackFn callback) { + HandshakeRequest requestMessage, + SessionServiceClient.NewSessionMetadata_or_callbackFn metadata_or_callback, + SessionServiceClient.NewSessionCallbackFn callback) { return newSession( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse newSession( - HandshakeRequest requestMessage, - SessionServiceClient.NewSessionMetadata_or_callbackFn metadata_or_callback) { + HandshakeRequest requestMessage, + SessionServiceClient.NewSessionMetadata_or_callbackFn metadata_or_callback) { return newSession( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse newSession( - HandshakeRequest requestMessage, - SessionServiceClient.NewSessionMetadata_or_callbackUnionType metadata_or_callback, - SessionServiceClient.NewSessionCallbackFn callback); + HandshakeRequest requestMessage, + SessionServiceClient.NewSessionMetadata_or_callbackUnionType metadata_or_callback, + SessionServiceClient.NewSessionCallbackFn callback); public native UnaryResponse newSession( - HandshakeRequest requestMessage, - SessionServiceClient.NewSessionMetadata_or_callbackUnionType metadata_or_callback); + HandshakeRequest requestMessage, + SessionServiceClient.NewSessionMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse refreshSessionToken( - HandshakeRequest requestMessage, - BrowserHeaders metadata_or_callback, - SessionServiceClient.RefreshSessionTokenCallbackFn callback) { + HandshakeRequest requestMessage, + BrowserHeaders metadata_or_callback, + SessionServiceClient.RefreshSessionTokenCallbackFn callback) { return refreshSessionToken( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse refreshSessionToken( - HandshakeRequest requestMessage, BrowserHeaders metadata_or_callback) { + HandshakeRequest requestMessage, BrowserHeaders metadata_or_callback) { return refreshSessionToken( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse refreshSessionToken( - HandshakeRequest requestMessage, - SessionServiceClient.RefreshSessionTokenMetadata_or_callbackFn metadata_or_callback, - SessionServiceClient.RefreshSessionTokenCallbackFn callback) { + HandshakeRequest requestMessage, + SessionServiceClient.RefreshSessionTokenMetadata_or_callbackFn metadata_or_callback, + SessionServiceClient.RefreshSessionTokenCallbackFn callback) { return refreshSessionToken( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse refreshSessionToken( - HandshakeRequest requestMessage, - SessionServiceClient.RefreshSessionTokenMetadata_or_callbackFn metadata_or_callback) { + HandshakeRequest requestMessage, + SessionServiceClient.RefreshSessionTokenMetadata_or_callbackFn metadata_or_callback) { return refreshSessionToken( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse refreshSessionToken( - HandshakeRequest requestMessage, - SessionServiceClient.RefreshSessionTokenMetadata_or_callbackUnionType metadata_or_callback, - SessionServiceClient.RefreshSessionTokenCallbackFn callback); + HandshakeRequest requestMessage, + SessionServiceClient.RefreshSessionTokenMetadata_or_callbackUnionType metadata_or_callback, + SessionServiceClient.RefreshSessionTokenCallbackFn callback); public native UnaryResponse refreshSessionToken( - HandshakeRequest requestMessage, - SessionServiceClient.RefreshSessionTokenMetadata_or_callbackUnionType metadata_or_callback); + HandshakeRequest requestMessage, + SessionServiceClient.RefreshSessionTokenMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse release( - Ticket requestMessage, - BrowserHeaders metadata_or_callback, - SessionServiceClient.ReleaseCallbackFn callback) { + Ticket requestMessage, + BrowserHeaders metadata_or_callback, + SessionServiceClient.ReleaseCallbackFn callback) { return release( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse release(Ticket requestMessage, BrowserHeaders metadata_or_callback) { return release( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse release( - Ticket requestMessage, - SessionServiceClient.ReleaseMetadata_or_callbackFn metadata_or_callback, - SessionServiceClient.ReleaseCallbackFn callback) { + Ticket requestMessage, + SessionServiceClient.ReleaseMetadata_or_callbackFn metadata_or_callback, + SessionServiceClient.ReleaseCallbackFn callback) { return release( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse release( - Ticket requestMessage, - SessionServiceClient.ReleaseMetadata_or_callbackFn metadata_or_callback) { + Ticket requestMessage, + SessionServiceClient.ReleaseMetadata_or_callbackFn metadata_or_callback) { return release( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse release( - Ticket requestMessage, - SessionServiceClient.ReleaseMetadata_or_callbackUnionType metadata_or_callback, - SessionServiceClient.ReleaseCallbackFn callback); + Ticket requestMessage, + SessionServiceClient.ReleaseMetadata_or_callbackUnionType metadata_or_callback, + SessionServiceClient.ReleaseCallbackFn callback); public native UnaryResponse release( - Ticket requestMessage, - SessionServiceClient.ReleaseMetadata_or_callbackUnionType metadata_or_callback); + Ticket requestMessage, + SessionServiceClient.ReleaseMetadata_or_callbackUnionType metadata_or_callback); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/UnaryResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/UnaryResponse.java index e3599d63bfb..3aa65e41801 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/UnaryResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/session_pb_service/UnaryResponse.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.session_pb_service.UnaryResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.session_pb_service.UnaryResponse", + namespace = JsPackage.GLOBAL) public interface UnaryResponse { void cancel(); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/AndCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/AndCondition.java index d4ed2d4306b..4141d78274d 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/AndCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/AndCondition.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.AndCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.AndCondition", + namespace = JsPackage.GLOBAL) public class AndCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -87,11 +87,11 @@ static AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.Lhs @JsProperty void setLiteral( - AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -116,7 +116,7 @@ static AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType cre @JsProperty void setLhs( - AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -322,8 +322,7 @@ static AndCondition.ToObjectReturnType.FiltersListFieldType.SearchFieldType crea @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -370,23 +369,21 @@ static AndCondition.ToObjectReturnType.FiltersListFieldType create() { @JsProperty void setCompare( - AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType compare); + AndCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - AndCondition.ToObjectReturnType.FiltersListFieldType.ContainsFieldType contains); + AndCondition.ToObjectReturnType.FiltersListFieldType.ContainsFieldType contains); @JsProperty - void setInvoke( - AndCondition.ToObjectReturnType.FiltersListFieldType.InvokeFieldType invoke); + void setInvoke(AndCondition.ToObjectReturnType.FiltersListFieldType.InvokeFieldType invoke); @JsProperty - void setIsNull( - AndCondition.ToObjectReturnType.FiltersListFieldType.IsNullFieldType isNull); + void setIsNull(AndCondition.ToObjectReturnType.FiltersListFieldType.IsNullFieldType isNull); @JsProperty void setMatches( - AndCondition.ToObjectReturnType.FiltersListFieldType.MatchesFieldType matches); + AndCondition.ToObjectReturnType.FiltersListFieldType.MatchesFieldType matches); @JsProperty void setNot(AndCondition.ToObjectReturnType.FiltersListFieldType.NotFieldType not); @@ -395,12 +392,10 @@ void setMatches( void setOr(AndCondition.ToObjectReturnType.FiltersListFieldType.OrFieldType or); @JsProperty - void setPb_in( - AndCondition.ToObjectReturnType.FiltersListFieldType.Pb_inFieldType pb_in); + void setPb_in(AndCondition.ToObjectReturnType.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty - void setSearch( - AndCondition.ToObjectReturnType.FiltersListFieldType.SearchFieldType search); + void setSearch(AndCondition.ToObjectReturnType.FiltersListFieldType.SearchFieldType search); } @JsOverlay @@ -413,15 +408,14 @@ static AndCondition.ToObjectReturnType create() { @JsOverlay default void setFiltersList( - AndCondition.ToObjectReturnType.FiltersListFieldType[] filtersList) { + AndCondition.ToObjectReturnType.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty - void setFiltersList( - JsArray filtersList); + void setFiltersList(JsArray filtersList); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -497,11 +491,11 @@ static AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.Lh @JsProperty void setLiteral( - AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -526,7 +520,7 @@ static AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType cr @JsProperty void setLhs( - AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -732,8 +726,7 @@ static AndCondition.ToObjectReturnType0.FiltersListFieldType.SearchFieldType cre @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -780,23 +773,21 @@ static AndCondition.ToObjectReturnType0.FiltersListFieldType create() { @JsProperty void setCompare( - AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType compare); + AndCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - AndCondition.ToObjectReturnType0.FiltersListFieldType.ContainsFieldType contains); + AndCondition.ToObjectReturnType0.FiltersListFieldType.ContainsFieldType contains); @JsProperty - void setInvoke( - AndCondition.ToObjectReturnType0.FiltersListFieldType.InvokeFieldType invoke); + void setInvoke(AndCondition.ToObjectReturnType0.FiltersListFieldType.InvokeFieldType invoke); @JsProperty - void setIsNull( - AndCondition.ToObjectReturnType0.FiltersListFieldType.IsNullFieldType isNull); + void setIsNull(AndCondition.ToObjectReturnType0.FiltersListFieldType.IsNullFieldType isNull); @JsProperty void setMatches( - AndCondition.ToObjectReturnType0.FiltersListFieldType.MatchesFieldType matches); + AndCondition.ToObjectReturnType0.FiltersListFieldType.MatchesFieldType matches); @JsProperty void setNot(AndCondition.ToObjectReturnType0.FiltersListFieldType.NotFieldType not); @@ -805,12 +796,10 @@ void setMatches( void setOr(AndCondition.ToObjectReturnType0.FiltersListFieldType.OrFieldType or); @JsProperty - void setPb_in( - AndCondition.ToObjectReturnType0.FiltersListFieldType.Pb_inFieldType pb_in); + void setPb_in(AndCondition.ToObjectReturnType0.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty - void setSearch( - AndCondition.ToObjectReturnType0.FiltersListFieldType.SearchFieldType search); + void setSearch(AndCondition.ToObjectReturnType0.FiltersListFieldType.SearchFieldType search); } @JsOverlay @@ -823,26 +812,25 @@ static AndCondition.ToObjectReturnType0 create() { @JsOverlay default void setFiltersList( - AndCondition.ToObjectReturnType0.FiltersListFieldType[] filtersList) { + AndCondition.ToObjectReturnType0.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty - void setFiltersList( - JsArray filtersList); + void setFiltersList(JsArray filtersList); } public static native AndCondition deserializeBinary(Uint8Array bytes); public static native AndCondition deserializeBinaryFromReader( - AndCondition message, Object reader); + AndCondition message, Object reader); public static native void serializeBinaryToWriter(AndCondition message, Object writer); public static native AndCondition.ToObjectReturnType toObject( - boolean includeInstance, AndCondition msg); + boolean includeInstance, AndCondition msg); public native Condition addFilters(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/AsOfJoinTablesRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/AsOfJoinTablesRequest.java index 8bda0aadf29..246a455aaaf 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/AsOfJoinTablesRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/AsOfJoinTablesRequest.java @@ -12,9 +12,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.AsOfJoinTablesRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.AsOfJoinTablesRequest", + namespace = JsPackage.GLOBAL) public class AsOfJoinTablesRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -44,7 +44,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static AsOfJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -79,20 +79,20 @@ static AsOfJoinTablesRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - AsOfJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + AsOfJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -176,7 +176,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static AsOfJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -211,20 +211,20 @@ static AsOfJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - AsOfJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + AsOfJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -285,12 +285,12 @@ default void setColumnsToMatchList(String[] columnsToMatchList) { public static native AsOfJoinTablesRequest deserializeBinary(Uint8Array bytes); public static native AsOfJoinTablesRequest deserializeBinaryFromReader( - AsOfJoinTablesRequest message, Object reader); + AsOfJoinTablesRequest message, Object reader); public static native void serializeBinaryToWriter(AsOfJoinTablesRequest message, Object writer); public static native AsOfJoinTablesRequest.ToObjectReturnType toObject( - boolean includeInstance, AsOfJoinTablesRequest msg); + boolean includeInstance, AsOfJoinTablesRequest msg); public native String addColumnsToAdd(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/BatchTableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/BatchTableRequest.java index 7baa6f20a24..2471dab2533 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/BatchTableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/BatchTableRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.BatchTableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.BatchTableRequest", + namespace = JsPackage.GLOBAL) public class BatchTableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -140,15 +140,15 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.ComboAggregateField @JsOverlay default void setAggregatesList( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.ComboAggregateFieldType.AggregatesListFieldType[] aggregatesList) { + BatchTableRequest.ToObjectReturnType.OpsListFieldType.ComboAggregateFieldType.AggregatesListFieldType[] aggregatesList) { setAggregatesList( - Js.>uncheckedCast( - aggregatesList)); + Js.>uncheckedCast( + aggregatesList)); } @JsProperty void setAggregatesList( - JsArray aggregatesList); + JsArray aggregatesList); @JsProperty void setForceCombo(boolean forceCombo); @@ -271,7 +271,7 @@ default void setColumnNamesList(String[] columnNamesList) { @JsProperty void setSourceId( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.DropColumnsFieldType.SourceIdFieldType sourceId); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.DropColumnsFieldType.SourceIdFieldType sourceId); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -282,7 +282,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static BatchTableRequest.ToObjectReturnType.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -317,20 +317,20 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.EmptyTableFieldType @JsProperty void setTicket( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType ticket); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -347,7 +347,7 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.EmptyTableFieldType @JsProperty void setResultId( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType resultId); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType resultId); @JsProperty void setSize(String size); @@ -493,11 +493,11 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.Fil @JsProperty void setLiteral( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -522,7 +522,7 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.Fil @JsProperty void setLhs( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -728,8 +728,7 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.Fil @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -773,43 +772,43 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.Fil @JsProperty void setAnd( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.AndFieldType and); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.AndFieldType and); @JsProperty void setCompare( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType compare); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.ContainsFieldType contains); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.ContainsFieldType contains); @JsProperty void setInvoke( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.InvokeFieldType invoke); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.InvokeFieldType invoke); @JsProperty void setIsNull( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.IsNullFieldType isNull); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.IsNullFieldType isNull); @JsProperty void setMatches( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.MatchesFieldType matches); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.MatchesFieldType matches); @JsProperty void setNot( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.NotFieldType not); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.NotFieldType not); @JsProperty void setOr( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.OrFieldType or); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.OrFieldType or); @JsProperty void setPb_in( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.Pb_inFieldType pb_in); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty void setSearch( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.SearchFieldType search); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType.SearchFieldType search); } @JsOverlay @@ -828,15 +827,15 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType cre @JsOverlay default void setFiltersList( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType[] filtersList) { + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty void setFiltersList( - JsArray filtersList); + JsArray filtersList); @JsProperty void setResultId(Object resultId); @@ -889,8 +888,7 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.HeadByFieldType cre @JsOverlay default void setGroupByColumnSpecsList(String[] groupByColumnSpecsList) { - setGroupByColumnSpecsList( - Js.>uncheckedCast(groupByColumnSpecsList)); + setGroupByColumnSpecsList(Js.>uncheckedCast(groupByColumnSpecsList)); } @JsProperty @@ -1123,7 +1121,7 @@ default void setYColumnNamesList(String[] yColumnNamesList) { @JsProperty void setZoomRange( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.RunChartDownsampleFieldType.ZoomRangeFieldType zoomRange); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.RunChartDownsampleFieldType.ZoomRangeFieldType zoomRange); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1247,14 +1245,14 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.SortFieldType creat @JsProperty void setSortsList( - JsArray sortsList); + JsArray sortsList); @JsOverlay default void setSortsList( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.SortFieldType.SortsListFieldType[] sortsList) { + BatchTableRequest.ToObjectReturnType.OpsListFieldType.SortFieldType.SortsListFieldType[] sortsList) { setSortsList( - Js.>uncheckedCast( - sortsList)); + Js.>uncheckedCast( + sortsList)); } @JsProperty @@ -1311,8 +1309,7 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType.UngroupFieldType cr @JsOverlay default void setColumnsToUngroupList(String[] columnsToUngroupList) { - setColumnsToUngroupList( - Js.>uncheckedCast(columnsToUngroupList)); + setColumnsToUngroupList(Js.>uncheckedCast(columnsToUngroupList)); } @JsProperty @@ -1475,72 +1472,69 @@ static BatchTableRequest.ToObjectReturnType.OpsListFieldType create() { @JsProperty void setAsOfJoin( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.AsOfJoinFieldType asOfJoin); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.AsOfJoinFieldType asOfJoin); @JsProperty void setComboAggregate( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.ComboAggregateFieldType comboAggregate); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.ComboAggregateFieldType comboAggregate); @JsProperty void setCrossJoin( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.CrossJoinFieldType crossJoin); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.CrossJoinFieldType crossJoin); @JsProperty void setDropColumns( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.DropColumnsFieldType dropColumns); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.DropColumnsFieldType dropColumns); @JsProperty void setEmptyTable( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.EmptyTableFieldType emptyTable); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.EmptyTableFieldType emptyTable); @JsProperty void setExactJoin( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.ExactJoinFieldType exactJoin); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.ExactJoinFieldType exactJoin); @JsProperty - void setFilter( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType filter); + void setFilter(BatchTableRequest.ToObjectReturnType.OpsListFieldType.FilterFieldType filter); @JsProperty void setFlatten( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.FlattenFieldType flatten); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.FlattenFieldType flatten); @JsProperty void setHead(BatchTableRequest.ToObjectReturnType.OpsListFieldType.HeadFieldType head); @JsProperty - void setHeadBy( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.HeadByFieldType headBy); + void setHeadBy(BatchTableRequest.ToObjectReturnType.OpsListFieldType.HeadByFieldType headBy); @JsProperty void setLazyUpdate(Object lazyUpdate); @JsProperty void setLeftJoin( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.LeftJoinFieldType leftJoin); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.LeftJoinFieldType leftJoin); @JsProperty - void setMerge( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.MergeFieldType merge); + void setMerge(BatchTableRequest.ToObjectReturnType.OpsListFieldType.MergeFieldType merge); @JsProperty void setNaturalJoin( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.NaturalJoinFieldType naturalJoin); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.NaturalJoinFieldType naturalJoin); @JsProperty void setRunChartDownsample( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.RunChartDownsampleFieldType runChartDownsample); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.RunChartDownsampleFieldType runChartDownsample); @JsProperty void setSelect(Object select); @JsProperty void setSelectDistinct( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.SelectDistinctFieldType selectDistinct); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.SelectDistinctFieldType selectDistinct); @JsProperty void setSnapshot( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.SnapshotFieldType snapshot); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.SnapshotFieldType snapshot); @JsProperty void setSort(BatchTableRequest.ToObjectReturnType.OpsListFieldType.SortFieldType sort); @@ -1553,19 +1547,18 @@ void setSnapshot( @JsProperty void setTimeTable( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.TimeTableFieldType timeTable); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.TimeTableFieldType timeTable); @JsProperty void setUngroup( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.UngroupFieldType ungroup); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.UngroupFieldType ungroup); @JsProperty void setUnstructuredFilter( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.UnstructuredFilterFieldType unstructuredFilter); + BatchTableRequest.ToObjectReturnType.OpsListFieldType.UnstructuredFilterFieldType unstructuredFilter); @JsProperty - void setUpdate( - BatchTableRequest.ToObjectReturnType.OpsListFieldType.UpdateFieldType update); + void setUpdate(BatchTableRequest.ToObjectReturnType.OpsListFieldType.UpdateFieldType update); @JsProperty void setUpdateView(Object updateView); @@ -1588,8 +1581,8 @@ static BatchTableRequest.ToObjectReturnType create() { @JsOverlay default void setOpsList(BatchTableRequest.ToObjectReturnType.OpsListFieldType[] opsList) { setOpsList( - Js.>uncheckedCast( - opsList)); + Js.>uncheckedCast( + opsList)); } } @@ -1718,15 +1711,15 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.ComboAggregateFiel @JsOverlay default void setAggregatesList( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.ComboAggregateFieldType.AggregatesListFieldType[] aggregatesList) { + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.ComboAggregateFieldType.AggregatesListFieldType[] aggregatesList) { setAggregatesList( - Js.>uncheckedCast( - aggregatesList)); + Js.>uncheckedCast( + aggregatesList)); } @JsProperty void setAggregatesList( - JsArray aggregatesList); + JsArray aggregatesList); @JsProperty void setForceCombo(boolean forceCombo); @@ -1849,7 +1842,7 @@ default void setColumnNamesList(String[] columnNamesList) { @JsProperty void setSourceId( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.DropColumnsFieldType.SourceIdFieldType sourceId); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.DropColumnsFieldType.SourceIdFieldType sourceId); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1860,7 +1853,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -1895,20 +1888,20 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.EmptyTableFieldTyp @JsProperty void setTicket( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType ticket); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -1925,7 +1918,7 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.EmptyTableFieldTyp @JsProperty void setResultId( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType resultId); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.EmptyTableFieldType.ResultIdFieldType resultId); @JsProperty void setSize(String size); @@ -2071,11 +2064,11 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.Fi @JsProperty void setLiteral( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -2100,7 +2093,7 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.Fi @JsProperty void setLhs( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -2306,8 +2299,7 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.Fi @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -2351,43 +2343,43 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.Fi @JsProperty void setAnd( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.AndFieldType and); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.AndFieldType and); @JsProperty void setCompare( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType compare); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.ContainsFieldType contains); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.ContainsFieldType contains); @JsProperty void setInvoke( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.InvokeFieldType invoke); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.InvokeFieldType invoke); @JsProperty void setIsNull( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.IsNullFieldType isNull); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.IsNullFieldType isNull); @JsProperty void setMatches( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.MatchesFieldType matches); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.MatchesFieldType matches); @JsProperty void setNot( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.NotFieldType not); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.NotFieldType not); @JsProperty void setOr( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.OrFieldType or); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.OrFieldType or); @JsProperty void setPb_in( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.Pb_inFieldType pb_in); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty void setSearch( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.SearchFieldType search); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType.SearchFieldType search); } @JsOverlay @@ -2406,15 +2398,15 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType cr @JsOverlay default void setFiltersList( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType[] filtersList) { + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty void setFiltersList( - JsArray filtersList); + JsArray filtersList); @JsProperty void setResultId(Object resultId); @@ -2467,8 +2459,7 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.HeadByFieldType cr @JsOverlay default void setGroupByColumnSpecsList(String[] groupByColumnSpecsList) { - setGroupByColumnSpecsList( - Js.>uncheckedCast(groupByColumnSpecsList)); + setGroupByColumnSpecsList(Js.>uncheckedCast(groupByColumnSpecsList)); } @JsProperty @@ -2701,7 +2692,7 @@ default void setYColumnNamesList(String[] yColumnNamesList) { @JsProperty void setZoomRange( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.RunChartDownsampleFieldType.ZoomRangeFieldType zoomRange); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.RunChartDownsampleFieldType.ZoomRangeFieldType zoomRange); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -2825,14 +2816,14 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.SortFieldType crea @JsProperty void setSortsList( - JsArray sortsList); + JsArray sortsList); @JsOverlay default void setSortsList( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.SortFieldType.SortsListFieldType[] sortsList) { + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.SortFieldType.SortsListFieldType[] sortsList) { setSortsList( - Js.>uncheckedCast( - sortsList)); + Js.>uncheckedCast( + sortsList)); } @JsProperty @@ -2889,8 +2880,7 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType.UngroupFieldType c @JsOverlay default void setColumnsToUngroupList(String[] columnsToUngroupList) { - setColumnsToUngroupList( - Js.>uncheckedCast(columnsToUngroupList)); + setColumnsToUngroupList(Js.>uncheckedCast(columnsToUngroupList)); } @JsProperty @@ -3053,72 +3043,69 @@ static BatchTableRequest.ToObjectReturnType0.OpsListFieldType create() { @JsProperty void setAsOfJoin( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.AsOfJoinFieldType asOfJoin); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.AsOfJoinFieldType asOfJoin); @JsProperty void setComboAggregate( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.ComboAggregateFieldType comboAggregate); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.ComboAggregateFieldType comboAggregate); @JsProperty void setCrossJoin( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.CrossJoinFieldType crossJoin); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.CrossJoinFieldType crossJoin); @JsProperty void setDropColumns( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.DropColumnsFieldType dropColumns); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.DropColumnsFieldType dropColumns); @JsProperty void setEmptyTable( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.EmptyTableFieldType emptyTable); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.EmptyTableFieldType emptyTable); @JsProperty void setExactJoin( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.ExactJoinFieldType exactJoin); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.ExactJoinFieldType exactJoin); @JsProperty - void setFilter( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType filter); + void setFilter(BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FilterFieldType filter); @JsProperty void setFlatten( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FlattenFieldType flatten); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.FlattenFieldType flatten); @JsProperty void setHead(BatchTableRequest.ToObjectReturnType0.OpsListFieldType.HeadFieldType head); @JsProperty - void setHeadBy( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.HeadByFieldType headBy); + void setHeadBy(BatchTableRequest.ToObjectReturnType0.OpsListFieldType.HeadByFieldType headBy); @JsProperty void setLazyUpdate(Object lazyUpdate); @JsProperty void setLeftJoin( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.LeftJoinFieldType leftJoin); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.LeftJoinFieldType leftJoin); @JsProperty - void setMerge( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.MergeFieldType merge); + void setMerge(BatchTableRequest.ToObjectReturnType0.OpsListFieldType.MergeFieldType merge); @JsProperty void setNaturalJoin( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.NaturalJoinFieldType naturalJoin); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.NaturalJoinFieldType naturalJoin); @JsProperty void setRunChartDownsample( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.RunChartDownsampleFieldType runChartDownsample); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.RunChartDownsampleFieldType runChartDownsample); @JsProperty void setSelect(Object select); @JsProperty void setSelectDistinct( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.SelectDistinctFieldType selectDistinct); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.SelectDistinctFieldType selectDistinct); @JsProperty void setSnapshot( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.SnapshotFieldType snapshot); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.SnapshotFieldType snapshot); @JsProperty void setSort(BatchTableRequest.ToObjectReturnType0.OpsListFieldType.SortFieldType sort); @@ -3131,19 +3118,18 @@ void setSnapshot( @JsProperty void setTimeTable( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.TimeTableFieldType timeTable); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.TimeTableFieldType timeTable); @JsProperty void setUngroup( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.UngroupFieldType ungroup); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.UngroupFieldType ungroup); @JsProperty void setUnstructuredFilter( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.UnstructuredFilterFieldType unstructuredFilter); + BatchTableRequest.ToObjectReturnType0.OpsListFieldType.UnstructuredFilterFieldType unstructuredFilter); @JsProperty - void setUpdate( - BatchTableRequest.ToObjectReturnType0.OpsListFieldType.UpdateFieldType update); + void setUpdate(BatchTableRequest.ToObjectReturnType0.OpsListFieldType.UpdateFieldType update); @JsProperty void setUpdateView(Object updateView); @@ -3166,20 +3152,20 @@ static BatchTableRequest.ToObjectReturnType0 create() { @JsOverlay default void setOpsList(BatchTableRequest.ToObjectReturnType0.OpsListFieldType[] opsList) { setOpsList( - Js.>uncheckedCast( - opsList)); + Js.>uncheckedCast( + opsList)); } } public static native BatchTableRequest deserializeBinary(Uint8Array bytes); public static native BatchTableRequest deserializeBinaryFromReader( - BatchTableRequest message, Object reader); + BatchTableRequest message, Object reader); public static native void serializeBinaryToWriter(BatchTableRequest message, Object writer); public static native BatchTableRequest.ToObjectReturnType toObject( - boolean includeInstance, BatchTableRequest msg); + boolean includeInstance, BatchTableRequest msg); public native Operation addOps(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CaseSensitivityMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CaseSensitivityMap.java index 48704582ba8..e901b00b672 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CaseSensitivityMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CaseSensitivityMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.CaseSensitivityMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.CaseSensitivityMap", + namespace = JsPackage.GLOBAL) public interface CaseSensitivityMap { @JsOverlay static CaseSensitivityMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ComboAggregateRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ComboAggregateRequest.java index d7572f6dc8a..db0405c1a2b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ComboAggregateRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ComboAggregateRequest.java @@ -13,9 +13,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.ComboAggregateRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.ComboAggregateRequest", + namespace = JsPackage.GLOBAL) public class ComboAggregateRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -68,7 +68,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static ComboAggregateRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -103,20 +103,20 @@ static ComboAggregateRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - ComboAggregateRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + ComboAggregateRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -162,15 +162,15 @@ static ComboAggregateRequest.ToObjectReturnType create() { @JsOverlay default void setAggregatesList( - ComboAggregateRequest.ToObjectReturnType.AggregatesListFieldType[] aggregatesList) { + ComboAggregateRequest.ToObjectReturnType.AggregatesListFieldType[] aggregatesList) { setAggregatesList( - Js.>uncheckedCast( - aggregatesList)); + Js.>uncheckedCast( + aggregatesList)); } @JsProperty void setAggregatesList( - JsArray aggregatesList); + JsArray aggregatesList); @JsProperty void setForceCombo(boolean forceCombo); @@ -241,7 +241,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static ComboAggregateRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -276,20 +276,20 @@ static ComboAggregateRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - ComboAggregateRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + ComboAggregateRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -335,15 +335,15 @@ static ComboAggregateRequest.ToObjectReturnType0 create() { @JsOverlay default void setAggregatesList( - ComboAggregateRequest.ToObjectReturnType0.AggregatesListFieldType[] aggregatesList) { + ComboAggregateRequest.ToObjectReturnType0.AggregatesListFieldType[] aggregatesList) { setAggregatesList( - Js.>uncheckedCast( - aggregatesList)); + Js.>uncheckedCast( + aggregatesList)); } @JsProperty void setAggregatesList( - JsArray aggregatesList); + JsArray aggregatesList); @JsProperty void setForceCombo(boolean forceCombo); @@ -368,12 +368,12 @@ default void setGroupByColumnsList(String[] groupByColumnsList) { public static native ComboAggregateRequest deserializeBinary(Uint8Array bytes); public static native ComboAggregateRequest deserializeBinaryFromReader( - ComboAggregateRequest message, Object reader); + ComboAggregateRequest message, Object reader); public static native void serializeBinaryToWriter(ComboAggregateRequest message, Object writer); public static native ComboAggregateRequest.ToObjectReturnType toObject( - boolean includeInstance, ComboAggregateRequest msg); + boolean includeInstance, ComboAggregateRequest msg); public native Aggregate addAggregates(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CompareCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CompareCondition.java index 261dffba7d0..a5e515bec81 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CompareCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CompareCondition.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.CompareCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.CompareCondition", + namespace = JsPackage.GLOBAL) public class CompareCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -82,12 +82,11 @@ static CompareCondition.ToObjectReturnType.LhsFieldType create() { CompareCondition.ToObjectReturnType.LhsFieldType.ReferenceFieldType getReference(); @JsProperty - void setLiteral( - CompareCondition.ToObjectReturnType.LhsFieldType.LiteralFieldType literal); + void setLiteral(CompareCondition.ToObjectReturnType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - CompareCondition.ToObjectReturnType.LhsFieldType.ReferenceFieldType reference); + CompareCondition.ToObjectReturnType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -188,12 +187,11 @@ static CompareCondition.ToObjectReturnType0.LhsFieldType create() { CompareCondition.ToObjectReturnType0.LhsFieldType.ReferenceFieldType getReference(); @JsProperty - void setLiteral( - CompareCondition.ToObjectReturnType0.LhsFieldType.LiteralFieldType literal); + void setLiteral(CompareCondition.ToObjectReturnType0.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - CompareCondition.ToObjectReturnType0.LhsFieldType.ReferenceFieldType reference); + CompareCondition.ToObjectReturnType0.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -231,12 +229,12 @@ static CompareCondition.ToObjectReturnType0 create() { public static native CompareCondition deserializeBinary(Uint8Array bytes); public static native CompareCondition deserializeBinaryFromReader( - CompareCondition message, Object reader); + CompareCondition message, Object reader); public static native void serializeBinaryToWriter(CompareCondition message, Object writer); public static native CompareCondition.ToObjectReturnType toObject( - boolean includeInstance, CompareCondition msg); + boolean includeInstance, CompareCondition msg); public native void clearLhs(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Condition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Condition.java index 38498a7bd39..d9326f518f9 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Condition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Condition.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.Condition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.Condition", + namespace = JsPackage.GLOBAL) public class Condition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -105,11 +105,11 @@ static Condition.ToObjectReturnType.CompareFieldType.LhsFieldType create() { @JsProperty void setLiteral( - Condition.ToObjectReturnType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + Condition.ToObjectReturnType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - Condition.ToObjectReturnType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + Condition.ToObjectReturnType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -339,8 +339,7 @@ static Condition.ToObjectReturnType.SearchFieldType create() { @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -503,11 +502,11 @@ static Condition.ToObjectReturnType0.CompareFieldType.LhsFieldType create() { @JsProperty void setLiteral( - Condition.ToObjectReturnType0.CompareFieldType.LhsFieldType.LiteralFieldType literal); + Condition.ToObjectReturnType0.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - Condition.ToObjectReturnType0.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + Condition.ToObjectReturnType0.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -737,8 +736,7 @@ static Condition.ToObjectReturnType0.SearchFieldType create() { @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -818,7 +816,7 @@ static Condition.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(Condition message, Object writer); public static native Condition.ToObjectReturnType toObject( - boolean includeInstance, Condition msg); + boolean includeInstance, Condition msg); public native void clearAnd(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ContainsCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ContainsCondition.java index 122c2b9823e..4b96681208e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ContainsCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ContainsCondition.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.ContainsCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.ContainsCondition", + namespace = JsPackage.GLOBAL) public class ContainsCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -108,12 +108,12 @@ static ContainsCondition.ToObjectReturnType0 create() { public static native ContainsCondition deserializeBinary(Uint8Array bytes); public static native ContainsCondition deserializeBinaryFromReader( - ContainsCondition message, Object reader); + ContainsCondition message, Object reader); public static native void serializeBinaryToWriter(ContainsCondition message, Object writer); public static native ContainsCondition.ToObjectReturnType toObject( - boolean includeInstance, ContainsCondition msg); + boolean includeInstance, ContainsCondition msg); public native void clearReference(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CrossJoinTablesRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CrossJoinTablesRequest.java index c025b59fa3b..9f4cbcbd01b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CrossJoinTablesRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/CrossJoinTablesRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.CrossJoinTablesRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.CrossJoinTablesRequest", + namespace = JsPackage.GLOBAL) public class CrossJoinTablesRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -43,7 +43,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static CrossJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -78,20 +78,20 @@ static CrossJoinTablesRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - CrossJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + CrossJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -175,7 +175,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static CrossJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -210,20 +210,20 @@ static CrossJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - CrossJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + CrossJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -282,13 +282,12 @@ default void setColumnsToMatchList(String[] columnsToMatchList) { public static native CrossJoinTablesRequest deserializeBinary(Uint8Array bytes); public static native CrossJoinTablesRequest deserializeBinaryFromReader( - CrossJoinTablesRequest message, Object reader); + CrossJoinTablesRequest message, Object reader); - public static native void serializeBinaryToWriter(CrossJoinTablesRequest message, - Object writer); + public static native void serializeBinaryToWriter(CrossJoinTablesRequest message, Object writer); public static native CrossJoinTablesRequest.ToObjectReturnType toObject( - boolean includeInstance, CrossJoinTablesRequest msg); + boolean includeInstance, CrossJoinTablesRequest msg); public native String addColumnsToAdd(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/DropColumnsRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/DropColumnsRequest.java index 8df2f10e27a..2f3c3fe1a36 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/DropColumnsRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/DropColumnsRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.DropColumnsRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.DropColumnsRequest", + namespace = JsPackage.GLOBAL) public class DropColumnsRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -23,7 +23,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static DropColumnsRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -58,20 +58,20 @@ static DropColumnsRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - DropColumnsRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + DropColumnsRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -132,7 +132,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static DropColumnsRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -167,20 +167,20 @@ static DropColumnsRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - DropColumnsRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + DropColumnsRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -236,12 +236,12 @@ default void setColumnNamesList(String[] columnNamesList) { public static native DropColumnsRequest deserializeBinary(Uint8Array bytes); public static native DropColumnsRequest deserializeBinaryFromReader( - DropColumnsRequest message, Object reader); + DropColumnsRequest message, Object reader); public static native void serializeBinaryToWriter(DropColumnsRequest message, Object writer); public static native DropColumnsRequest.ToObjectReturnType toObject( - boolean includeInstance, DropColumnsRequest msg); + boolean includeInstance, DropColumnsRequest msg); public native String addColumnNames(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/EmptyTableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/EmptyTableRequest.java index 7958dfe3ed7..5f7a06498ce 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/EmptyTableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/EmptyTableRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.EmptyTableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.EmptyTableRequest", + namespace = JsPackage.GLOBAL) public class EmptyTableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static EmptyTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static EmptyTableRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - EmptyTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + EmptyTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -100,7 +100,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static EmptyTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -135,20 +135,20 @@ static EmptyTableRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - EmptyTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + EmptyTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -173,12 +173,12 @@ static EmptyTableRequest.ToObjectReturnType0 create() { public static native EmptyTableRequest deserializeBinary(Uint8Array bytes); public static native EmptyTableRequest deserializeBinaryFromReader( - EmptyTableRequest message, Object reader); + EmptyTableRequest message, Object reader); public static native void serializeBinaryToWriter(EmptyTableRequest message, Object writer); public static native EmptyTableRequest.ToObjectReturnType toObject( - boolean includeInstance, EmptyTableRequest msg); + boolean includeInstance, EmptyTableRequest msg); public native void clearResultId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExactJoinTablesRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExactJoinTablesRequest.java index b1539a4235e..9dd630c30ae 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExactJoinTablesRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExactJoinTablesRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.ExactJoinTablesRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.ExactJoinTablesRequest", + namespace = JsPackage.GLOBAL) public class ExactJoinTablesRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -43,7 +43,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static ExactJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -78,20 +78,20 @@ static ExactJoinTablesRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - ExactJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + ExactJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -169,7 +169,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static ExactJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -204,20 +204,20 @@ static ExactJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - ExactJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + ExactJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -270,13 +270,12 @@ default void setColumnsToMatchList(String[] columnsToMatchList) { public static native ExactJoinTablesRequest deserializeBinary(Uint8Array bytes); public static native ExactJoinTablesRequest deserializeBinaryFromReader( - ExactJoinTablesRequest message, Object reader); + ExactJoinTablesRequest message, Object reader); - public static native void serializeBinaryToWriter(ExactJoinTablesRequest message, - Object writer); + public static native void serializeBinaryToWriter(ExactJoinTablesRequest message, Object writer); public static native ExactJoinTablesRequest.ToObjectReturnType toObject( - boolean includeInstance, ExactJoinTablesRequest msg); + boolean includeInstance, ExactJoinTablesRequest msg); public native String addColumnsToAdd(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableCreationResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableCreationResponse.java index 50073b5231e..3e309faee2b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableCreationResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableCreationResponse.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.ExportedTableCreationResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.ExportedTableCreationResponse", + namespace = JsPackage.GLOBAL) public class ExportedTableCreationResponse { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetSchemaHeaderUnionType { @@ -75,7 +75,7 @@ public interface ToObjectReturnType { public interface GetSchemaHeaderUnionType { @JsOverlay static ExportedTableCreationResponse.ToObjectReturnType.GetSchemaHeaderUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -108,7 +108,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static ExportedTableCreationResponse.ToObjectReturnType.ResultIdFieldType.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -143,20 +143,20 @@ static ExportedTableCreationResponse.ToObjectReturnType.ResultIdFieldType.Ticket @JsProperty void setTicket( - ExportedTableCreationResponse.ToObjectReturnType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); + ExportedTableCreationResponse.ToObjectReturnType.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -176,7 +176,7 @@ static ExportedTableCreationResponse.ToObjectReturnType.ResultIdFieldType create @JsProperty void setTicket( - ExportedTableCreationResponse.ToObjectReturnType.ResultIdFieldType.TicketFieldType ticket); + ExportedTableCreationResponse.ToObjectReturnType.ResultIdFieldType.TicketFieldType ticket); } @JsOverlay @@ -209,25 +209,24 @@ static ExportedTableCreationResponse.ToObjectReturnType create() { void setIsStatic(boolean isStatic); @JsProperty - void setResultId( - ExportedTableCreationResponse.ToObjectReturnType.ResultIdFieldType resultId); + void setResultId(ExportedTableCreationResponse.ToObjectReturnType.ResultIdFieldType resultId); @JsProperty void setSchemaHeader( - ExportedTableCreationResponse.ToObjectReturnType.GetSchemaHeaderUnionType schemaHeader); + ExportedTableCreationResponse.ToObjectReturnType.GetSchemaHeaderUnionType schemaHeader); @JsOverlay default void setSchemaHeader(String schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsOverlay default void setSchemaHeader(Uint8Array schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsProperty @@ -243,7 +242,7 @@ public interface ToObjectReturnType0 { public interface GetSchemaHeaderUnionType { @JsOverlay static ExportedTableCreationResponse.ToObjectReturnType0.GetSchemaHeaderUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -276,7 +275,7 @@ public interface TicketFieldType { public interface GetTicketUnionType { @JsOverlay static ExportedTableCreationResponse.ToObjectReturnType0.ResultIdFieldType.TicketFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -311,20 +310,20 @@ static ExportedTableCreationResponse.ToObjectReturnType0.ResultIdFieldType.Ticke @JsProperty void setTicket( - ExportedTableCreationResponse.ToObjectReturnType0.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); + ExportedTableCreationResponse.ToObjectReturnType0.ResultIdFieldType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -344,7 +343,7 @@ static ExportedTableCreationResponse.ToObjectReturnType0.ResultIdFieldType creat @JsProperty void setTicket( - ExportedTableCreationResponse.ToObjectReturnType0.ResultIdFieldType.TicketFieldType ticket); + ExportedTableCreationResponse.ToObjectReturnType0.ResultIdFieldType.TicketFieldType ticket); } @JsOverlay @@ -377,25 +376,24 @@ static ExportedTableCreationResponse.ToObjectReturnType0 create() { void setIsStatic(boolean isStatic); @JsProperty - void setResultId( - ExportedTableCreationResponse.ToObjectReturnType0.ResultIdFieldType resultId); + void setResultId(ExportedTableCreationResponse.ToObjectReturnType0.ResultIdFieldType resultId); @JsProperty void setSchemaHeader( - ExportedTableCreationResponse.ToObjectReturnType0.GetSchemaHeaderUnionType schemaHeader); + ExportedTableCreationResponse.ToObjectReturnType0.GetSchemaHeaderUnionType schemaHeader); @JsOverlay default void setSchemaHeader(String schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsOverlay default void setSchemaHeader(Uint8Array schemaHeader) { setSchemaHeader( - Js.uncheckedCast( - schemaHeader)); + Js.uncheckedCast( + schemaHeader)); } @JsProperty @@ -408,13 +406,13 @@ default void setSchemaHeader(Uint8Array schemaHeader) { public static native ExportedTableCreationResponse deserializeBinary(Uint8Array bytes); public static native ExportedTableCreationResponse deserializeBinaryFromReader( - ExportedTableCreationResponse message, Object reader); + ExportedTableCreationResponse message, Object reader); public static native void serializeBinaryToWriter( - ExportedTableCreationResponse message, Object writer); + ExportedTableCreationResponse message, Object writer); public static native ExportedTableCreationResponse.ToObjectReturnType toObject( - boolean includeInstance, ExportedTableCreationResponse msg); + boolean includeInstance, ExportedTableCreationResponse msg); public native void clearResultId(); @@ -447,18 +445,18 @@ public static native ExportedTableCreationResponse.ToObjectReturnType toObject( public native void setResultId(TableReference value); public native void setSchemaHeader( - ExportedTableCreationResponse.SetSchemaHeaderValueUnionType value); + ExportedTableCreationResponse.SetSchemaHeaderValueUnionType value); @JsOverlay public final void setSchemaHeader(String value) { setSchemaHeader( - Js.uncheckedCast(value)); + Js.uncheckedCast(value)); } @JsOverlay public final void setSchemaHeader(Uint8Array value) { setSchemaHeader( - Js.uncheckedCast(value)); + Js.uncheckedCast(value)); } public native void setSize(String value); @@ -467,6 +465,5 @@ public final void setSchemaHeader(Uint8Array value) { public native ExportedTableCreationResponse.ToObjectReturnType0 toObject(); - public native ExportedTableCreationResponse.ToObjectReturnType0 toObject( - boolean includeInstance); + public native ExportedTableCreationResponse.ToObjectReturnType0 toObject(boolean includeInstance); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableUpdateMessage.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableUpdateMessage.java index b82b1cb92af..3f2eaa73679 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableUpdateMessage.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableUpdateMessage.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.ExportedTableUpdateMessage", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.ExportedTableUpdateMessage", + namespace = JsPackage.GLOBAL) public class ExportedTableUpdateMessage { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -21,8 +21,7 @@ public interface ExportIdFieldType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { @JsOverlay - static ExportedTableUpdateMessage.ToObjectReturnType.ExportIdFieldType.GetTicketUnionType of( - Object o) { + static ExportedTableUpdateMessage.ToObjectReturnType.ExportIdFieldType.GetTicketUnionType of(Object o) { return Js.cast(o); } @@ -57,20 +56,20 @@ static ExportedTableUpdateMessage.ToObjectReturnType.ExportIdFieldType create() @JsProperty void setTicket( - ExportedTableUpdateMessage.ToObjectReturnType.ExportIdFieldType.GetTicketUnionType ticket); + ExportedTableUpdateMessage.ToObjectReturnType.ExportIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -106,7 +105,7 @@ public interface ExportIdFieldType { public interface GetTicketUnionType { @JsOverlay static ExportedTableUpdateMessage.ToObjectReturnType0.ExportIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -141,20 +140,20 @@ static ExportedTableUpdateMessage.ToObjectReturnType0.ExportIdFieldType create() @JsProperty void setTicket( - ExportedTableUpdateMessage.ToObjectReturnType0.ExportIdFieldType.GetTicketUnionType ticket); + ExportedTableUpdateMessage.ToObjectReturnType0.ExportIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -185,13 +184,13 @@ static ExportedTableUpdateMessage.ToObjectReturnType0 create() { public static native ExportedTableUpdateMessage deserializeBinary(Uint8Array bytes); public static native ExportedTableUpdateMessage deserializeBinaryFromReader( - ExportedTableUpdateMessage message, Object reader); + ExportedTableUpdateMessage message, Object reader); public static native void serializeBinaryToWriter( - ExportedTableUpdateMessage message, Object writer); + ExportedTableUpdateMessage message, Object writer); public static native ExportedTableUpdateMessage.ToObjectReturnType toObject( - boolean includeInstance, ExportedTableUpdateMessage msg); + boolean includeInstance, ExportedTableUpdateMessage msg); public native void clearExportId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableUpdatesRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableUpdatesRequest.java index 285c4c87672..0f5c921267f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableUpdatesRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/ExportedTableUpdatesRequest.java @@ -5,17 +5,17 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.ExportedTableUpdatesRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.ExportedTableUpdatesRequest", + namespace = JsPackage.GLOBAL) public class ExportedTableUpdatesRequest { public static native ExportedTableUpdatesRequest deserializeBinary(Uint8Array bytes); public static native ExportedTableUpdatesRequest deserializeBinaryFromReader( - ExportedTableUpdatesRequest message, Object reader); + ExportedTableUpdatesRequest message, Object reader); public static native void serializeBinaryToWriter( - ExportedTableUpdatesRequest message, Object writer); + ExportedTableUpdatesRequest message, Object writer); public static native Object toObject(boolean includeInstance, ExportedTableUpdatesRequest msg); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/FilterTableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/FilterTableRequest.java index dce4fb427e2..fbb9cfa76c6 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/FilterTableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/FilterTableRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.FilterTableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.FilterTableRequest", + namespace = JsPackage.GLOBAL) public class FilterTableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -107,11 +107,11 @@ static FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldTy @JsProperty void setLiteral( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -136,7 +136,7 @@ static FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldTy @JsProperty void setLhs( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -342,8 +342,7 @@ static FilterTableRequest.ToObjectReturnType.FiltersListFieldType.SearchFieldTyp @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -386,43 +385,41 @@ static FilterTableRequest.ToObjectReturnType.FiltersListFieldType create() { FilterTableRequest.ToObjectReturnType.FiltersListFieldType.SearchFieldType getSearch(); @JsProperty - void setAnd( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.AndFieldType and); + void setAnd(FilterTableRequest.ToObjectReturnType.FiltersListFieldType.AndFieldType and); @JsProperty void setCompare( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldType compare); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.ContainsFieldType contains); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.ContainsFieldType contains); @JsProperty void setInvoke( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.InvokeFieldType invoke); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.InvokeFieldType invoke); @JsProperty void setIsNull( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.IsNullFieldType isNull); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.IsNullFieldType isNull); @JsProperty void setMatches( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.MatchesFieldType matches); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.MatchesFieldType matches); @JsProperty - void setNot( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.NotFieldType not); + void setNot(FilterTableRequest.ToObjectReturnType.FiltersListFieldType.NotFieldType not); @JsProperty void setOr(FilterTableRequest.ToObjectReturnType.FiltersListFieldType.OrFieldType or); @JsProperty void setPb_in( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.Pb_inFieldType pb_in); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty void setSearch( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType.SearchFieldType search); + FilterTableRequest.ToObjectReturnType.FiltersListFieldType.SearchFieldType search); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -431,7 +428,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static FilterTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -466,20 +463,20 @@ static FilterTableRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - FilterTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + FilterTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -519,15 +516,15 @@ static FilterTableRequest.ToObjectReturnType create() { @JsOverlay default void setFiltersList( - FilterTableRequest.ToObjectReturnType.FiltersListFieldType[] filtersList) { + FilterTableRequest.ToObjectReturnType.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty void setFiltersList( - JsArray filtersList); + JsArray filtersList); @JsProperty void setResultId(FilterTableRequest.ToObjectReturnType.ResultIdFieldType resultId); @@ -628,11 +625,11 @@ static FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldT @JsProperty void setLiteral( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -657,7 +654,7 @@ static FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldT @JsProperty void setLhs( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -863,8 +860,7 @@ static FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.SearchFieldTy @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -907,43 +903,41 @@ static FilterTableRequest.ToObjectReturnType0.FiltersListFieldType create() { FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.SearchFieldType getSearch(); @JsProperty - void setAnd( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.AndFieldType and); + void setAnd(FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.AndFieldType and); @JsProperty void setCompare( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldType compare); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.ContainsFieldType contains); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.ContainsFieldType contains); @JsProperty void setInvoke( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.InvokeFieldType invoke); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.InvokeFieldType invoke); @JsProperty void setIsNull( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.IsNullFieldType isNull); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.IsNullFieldType isNull); @JsProperty void setMatches( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.MatchesFieldType matches); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.MatchesFieldType matches); @JsProperty - void setNot( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.NotFieldType not); + void setNot(FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.NotFieldType not); @JsProperty void setOr(FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.OrFieldType or); @JsProperty void setPb_in( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.Pb_inFieldType pb_in); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty void setSearch( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.SearchFieldType search); + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType.SearchFieldType search); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -952,7 +946,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static FilterTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -987,20 +981,20 @@ static FilterTableRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - FilterTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + FilterTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -1040,15 +1034,15 @@ static FilterTableRequest.ToObjectReturnType0 create() { @JsOverlay default void setFiltersList( - FilterTableRequest.ToObjectReturnType0.FiltersListFieldType[] filtersList) { + FilterTableRequest.ToObjectReturnType0.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty void setFiltersList( - JsArray filtersList); + JsArray filtersList); @JsProperty void setResultId(FilterTableRequest.ToObjectReturnType0.ResultIdFieldType resultId); @@ -1060,12 +1054,12 @@ void setFiltersList( public static native FilterTableRequest deserializeBinary(Uint8Array bytes); public static native FilterTableRequest deserializeBinaryFromReader( - FilterTableRequest message, Object reader); + FilterTableRequest message, Object reader); public static native void serializeBinaryToWriter(FilterTableRequest message, Object writer); public static native FilterTableRequest.ToObjectReturnType toObject( - boolean includeInstance, FilterTableRequest msg); + boolean includeInstance, FilterTableRequest msg); public native Condition addFilters(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/FlattenRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/FlattenRequest.java index ee6f3169f99..1b97ebf7f9c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/FlattenRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/FlattenRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.FlattenRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.FlattenRequest", + namespace = JsPackage.GLOBAL) public class FlattenRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -21,8 +21,7 @@ public interface ResultIdFieldType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { @JsOverlay - static FlattenRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + static FlattenRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of(Object o) { return Js.cast(o); } @@ -56,21 +55,20 @@ static FlattenRequest.ToObjectReturnType.ResultIdFieldType create() { FlattenRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType getTicket(); @JsProperty - void setTicket( - FlattenRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + void setTicket(FlattenRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -120,7 +118,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static FlattenRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -155,20 +153,20 @@ static FlattenRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - FlattenRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + FlattenRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -213,12 +211,12 @@ static FlattenRequest.ToObjectReturnType0 create() { public static native FlattenRequest deserializeBinary(Uint8Array bytes); public static native FlattenRequest deserializeBinaryFromReader( - FlattenRequest message, Object reader); + FlattenRequest message, Object reader); public static native void serializeBinaryToWriter(FlattenRequest message, Object writer); public static native FlattenRequest.ToObjectReturnType toObject( - boolean includeInstance, FlattenRequest msg); + boolean includeInstance, FlattenRequest msg); public native void clearResultId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/HeadOrTailByRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/HeadOrTailByRequest.java index 65cb91cdc95..86bc2474049 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/HeadOrTailByRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/HeadOrTailByRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.HeadOrTailByRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.HeadOrTailByRequest", + namespace = JsPackage.GLOBAL) public class HeadOrTailByRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -23,7 +23,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static HeadOrTailByRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -58,20 +58,20 @@ static HeadOrTailByRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - HeadOrTailByRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + HeadOrTailByRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -138,7 +138,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static HeadOrTailByRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -173,20 +173,20 @@ static HeadOrTailByRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - HeadOrTailByRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + HeadOrTailByRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -248,12 +248,12 @@ default void setGroupByColumnSpecsList(String[] groupByColumnSpecsList) { public static native HeadOrTailByRequest deserializeBinary(Uint8Array bytes); public static native HeadOrTailByRequest deserializeBinaryFromReader( - HeadOrTailByRequest message, Object reader); + HeadOrTailByRequest message, Object reader); public static native void serializeBinaryToWriter(HeadOrTailByRequest message, Object writer); public static native HeadOrTailByRequest.ToObjectReturnType toObject( - boolean includeInstance, HeadOrTailByRequest msg); + boolean includeInstance, HeadOrTailByRequest msg); public native String addGroupByColumnSpecs(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/HeadOrTailRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/HeadOrTailRequest.java index b1ac152a29d..8b5b3f91efd 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/HeadOrTailRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/HeadOrTailRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.HeadOrTailRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.HeadOrTailRequest", + namespace = JsPackage.GLOBAL) public class HeadOrTailRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static HeadOrTailRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static HeadOrTailRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - HeadOrTailRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + HeadOrTailRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -126,7 +126,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static HeadOrTailRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -161,20 +161,20 @@ static HeadOrTailRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - HeadOrTailRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + HeadOrTailRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -225,12 +225,12 @@ static HeadOrTailRequest.ToObjectReturnType0 create() { public static native HeadOrTailRequest deserializeBinary(Uint8Array bytes); public static native HeadOrTailRequest deserializeBinaryFromReader( - HeadOrTailRequest message, Object reader); + HeadOrTailRequest message, Object reader); public static native void serializeBinaryToWriter(HeadOrTailRequest message, Object writer); public static native HeadOrTailRequest.ToObjectReturnType toObject( - boolean includeInstance, HeadOrTailRequest msg); + boolean includeInstance, HeadOrTailRequest msg); public native void clearResultId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/InCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/InCondition.java index 3b584d6a28e..b34f183ce5c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/InCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/InCondition.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.InCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.InCondition", + namespace = JsPackage.GLOBAL) public class InCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -82,12 +82,11 @@ static InCondition.ToObjectReturnType.TargetFieldType create() { InCondition.ToObjectReturnType.TargetFieldType.ReferenceFieldType getReference(); @JsProperty - void setLiteral( - InCondition.ToObjectReturnType.TargetFieldType.LiteralFieldType literal); + void setLiteral(InCondition.ToObjectReturnType.TargetFieldType.LiteralFieldType literal); @JsProperty void setReference( - InCondition.ToObjectReturnType.TargetFieldType.ReferenceFieldType reference); + InCondition.ToObjectReturnType.TargetFieldType.ReferenceFieldType reference); } @JsOverlay @@ -193,12 +192,11 @@ static InCondition.ToObjectReturnType0.TargetFieldType create() { InCondition.ToObjectReturnType0.TargetFieldType.ReferenceFieldType getReference(); @JsProperty - void setLiteral( - InCondition.ToObjectReturnType0.TargetFieldType.LiteralFieldType literal); + void setLiteral(InCondition.ToObjectReturnType0.TargetFieldType.LiteralFieldType literal); @JsProperty void setReference( - InCondition.ToObjectReturnType0.TargetFieldType.ReferenceFieldType reference); + InCondition.ToObjectReturnType0.TargetFieldType.ReferenceFieldType reference); } @JsOverlay @@ -238,13 +236,12 @@ default void setCandidatesList(Object[] candidatesList) { public static native InCondition deserializeBinary(Uint8Array bytes); - public static native InCondition deserializeBinaryFromReader(InCondition message, - Object reader); + public static native InCondition deserializeBinaryFromReader(InCondition message, Object reader); public static native void serializeBinaryToWriter(InCondition message, Object writer); public static native InCondition.ToObjectReturnType toObject( - boolean includeInstance, InCondition msg); + boolean includeInstance, InCondition msg); public native Value addCandidates(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/InvokeCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/InvokeCondition.java index f283e487ba8..9053fdfb2d2 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/InvokeCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/InvokeCondition.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.InvokeCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.InvokeCondition", + namespace = JsPackage.GLOBAL) public class InvokeCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -82,12 +82,11 @@ static InvokeCondition.ToObjectReturnType.TargetFieldType create() { InvokeCondition.ToObjectReturnType.TargetFieldType.ReferenceFieldType getReference(); @JsProperty - void setLiteral( - InvokeCondition.ToObjectReturnType.TargetFieldType.LiteralFieldType literal); + void setLiteral(InvokeCondition.ToObjectReturnType.TargetFieldType.LiteralFieldType literal); @JsProperty void setReference( - InvokeCondition.ToObjectReturnType.TargetFieldType.ReferenceFieldType reference); + InvokeCondition.ToObjectReturnType.TargetFieldType.ReferenceFieldType reference); } @JsOverlay @@ -187,12 +186,11 @@ static InvokeCondition.ToObjectReturnType0.TargetFieldType create() { InvokeCondition.ToObjectReturnType0.TargetFieldType.ReferenceFieldType getReference(); @JsProperty - void setLiteral( - InvokeCondition.ToObjectReturnType0.TargetFieldType.LiteralFieldType literal); + void setLiteral(InvokeCondition.ToObjectReturnType0.TargetFieldType.LiteralFieldType literal); @JsProperty void setReference( - InvokeCondition.ToObjectReturnType0.TargetFieldType.ReferenceFieldType reference); + InvokeCondition.ToObjectReturnType0.TargetFieldType.ReferenceFieldType reference); } @JsOverlay @@ -227,12 +225,12 @@ default void setArgumentsList(Object[] argumentsList) { public static native InvokeCondition deserializeBinary(Uint8Array bytes); public static native InvokeCondition deserializeBinaryFromReader( - InvokeCondition message, Object reader); + InvokeCondition message, Object reader); public static native void serializeBinaryToWriter(InvokeCondition message, Object writer); public static native InvokeCondition.ToObjectReturnType toObject( - boolean includeInstance, InvokeCondition msg); + boolean includeInstance, InvokeCondition msg); public native Value addArguments(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/IsNullCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/IsNullCondition.java index 90c756778e3..a4ef6819c15 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/IsNullCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/IsNullCondition.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.IsNullCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.IsNullCondition", + namespace = JsPackage.GLOBAL) public class IsNullCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -72,12 +72,12 @@ static IsNullCondition.ToObjectReturnType0 create() { public static native IsNullCondition deserializeBinary(Uint8Array bytes); public static native IsNullCondition deserializeBinaryFromReader( - IsNullCondition message, Object reader); + IsNullCondition message, Object reader); public static native void serializeBinaryToWriter(IsNullCondition message, Object writer); public static native IsNullCondition.ToObjectReturnType toObject( - boolean includeInstance, IsNullCondition msg); + boolean includeInstance, IsNullCondition msg); public native void clearReference(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/LeftJoinTablesRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/LeftJoinTablesRequest.java index 2cd28ad2a29..8c283080ab8 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/LeftJoinTablesRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/LeftJoinTablesRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.LeftJoinTablesRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.LeftJoinTablesRequest", + namespace = JsPackage.GLOBAL) public class LeftJoinTablesRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -43,7 +43,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static LeftJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -78,20 +78,20 @@ static LeftJoinTablesRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - LeftJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + LeftJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -169,7 +169,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static LeftJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -204,20 +204,20 @@ static LeftJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - LeftJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + LeftJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -270,12 +270,12 @@ default void setColumnsToMatchList(String[] columnsToMatchList) { public static native LeftJoinTablesRequest deserializeBinary(Uint8Array bytes); public static native LeftJoinTablesRequest deserializeBinaryFromReader( - LeftJoinTablesRequest message, Object reader); + LeftJoinTablesRequest message, Object reader); public static native void serializeBinaryToWriter(LeftJoinTablesRequest message, Object writer); public static native LeftJoinTablesRequest.ToObjectReturnType toObject( - boolean includeInstance, LeftJoinTablesRequest msg); + boolean includeInstance, LeftJoinTablesRequest msg); public native String addColumnsToAdd(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Literal.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Literal.java index 4cb45808651..3f2be608c7c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Literal.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Literal.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.Literal", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.Literal", + namespace = JsPackage.GLOBAL) public class Literal { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MatchTypeMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MatchTypeMap.java index 1ce3a2b40ef..4476b87de02 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MatchTypeMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MatchTypeMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.MatchTypeMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.MatchTypeMap", + namespace = JsPackage.GLOBAL) public interface MatchTypeMap { @JsOverlay static MatchTypeMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MatchesCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MatchesCondition.java index 20489ee944b..5339679c98f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MatchesCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MatchesCondition.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.MatchesCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.MatchesCondition", + namespace = JsPackage.GLOBAL) public class MatchesCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -108,12 +108,12 @@ static MatchesCondition.ToObjectReturnType0 create() { public static native MatchesCondition deserializeBinary(Uint8Array bytes); public static native MatchesCondition deserializeBinaryFromReader( - MatchesCondition message, Object reader); + MatchesCondition message, Object reader); public static native void serializeBinaryToWriter(MatchesCondition message, Object writer); public static native MatchesCondition.ToObjectReturnType toObject( - boolean includeInstance, MatchesCondition msg); + boolean includeInstance, MatchesCondition msg); public native void clearReference(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MergeTablesRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MergeTablesRequest.java index dcd8910517c..bd0eb00c420 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MergeTablesRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/MergeTablesRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.MergeTablesRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.MergeTablesRequest", + namespace = JsPackage.GLOBAL) public class MergeTablesRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -23,7 +23,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static MergeTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -58,20 +58,20 @@ static MergeTablesRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - MergeTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + MergeTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -117,14 +117,14 @@ static MergeTablesRequest.ToObjectReturnType create() { @JsProperty void setSourceIdsList( - JsArray sourceIdsList); + JsArray sourceIdsList); @JsOverlay default void setSourceIdsList( - MergeTablesRequest.ToObjectReturnType.SourceIdsListFieldType[] sourceIdsList) { + MergeTablesRequest.ToObjectReturnType.SourceIdsListFieldType[] sourceIdsList) { setSourceIdsList( - Js.>uncheckedCast( - sourceIdsList)); + Js.>uncheckedCast( + sourceIdsList)); } } @@ -136,7 +136,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static MergeTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -171,20 +171,20 @@ static MergeTablesRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - MergeTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + MergeTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -230,26 +230,26 @@ static MergeTablesRequest.ToObjectReturnType0 create() { @JsProperty void setSourceIdsList( - JsArray sourceIdsList); + JsArray sourceIdsList); @JsOverlay default void setSourceIdsList( - MergeTablesRequest.ToObjectReturnType0.SourceIdsListFieldType[] sourceIdsList) { + MergeTablesRequest.ToObjectReturnType0.SourceIdsListFieldType[] sourceIdsList) { setSourceIdsList( - Js.>uncheckedCast( - sourceIdsList)); + Js.>uncheckedCast( + sourceIdsList)); } } public static native MergeTablesRequest deserializeBinary(Uint8Array bytes); public static native MergeTablesRequest deserializeBinaryFromReader( - MergeTablesRequest message, Object reader); + MergeTablesRequest message, Object reader); public static native void serializeBinaryToWriter(MergeTablesRequest message, Object writer); public static native MergeTablesRequest.ToObjectReturnType toObject( - boolean includeInstance, MergeTablesRequest msg); + boolean includeInstance, MergeTablesRequest msg); public native TableReference addSourceIds(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/NaturalJoinTablesRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/NaturalJoinTablesRequest.java index caabc64de7f..b170caf4b7c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/NaturalJoinTablesRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/NaturalJoinTablesRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.NaturalJoinTablesRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.NaturalJoinTablesRequest", + namespace = JsPackage.GLOBAL) public class NaturalJoinTablesRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -43,7 +43,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static NaturalJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -78,20 +78,20 @@ static NaturalJoinTablesRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - NaturalJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + NaturalJoinTablesRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -169,7 +169,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static NaturalJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -204,20 +204,20 @@ static NaturalJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - NaturalJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + NaturalJoinTablesRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -270,13 +270,13 @@ default void setColumnsToMatchList(String[] columnsToMatchList) { public static native NaturalJoinTablesRequest deserializeBinary(Uint8Array bytes); public static native NaturalJoinTablesRequest deserializeBinaryFromReader( - NaturalJoinTablesRequest message, Object reader); + NaturalJoinTablesRequest message, Object reader); public static native void serializeBinaryToWriter( - NaturalJoinTablesRequest message, Object writer); + NaturalJoinTablesRequest message, Object writer); public static native NaturalJoinTablesRequest.ToObjectReturnType toObject( - boolean includeInstance, NaturalJoinTablesRequest msg); + boolean includeInstance, NaturalJoinTablesRequest msg); public native String addColumnsToAdd(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/NotCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/NotCondition.java index 9bfb587faf9..321c5cf9c92 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/NotCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/NotCondition.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.NotCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.NotCondition", + namespace = JsPackage.GLOBAL) public class NotCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -106,11 +106,11 @@ static NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType.LhsField @JsProperty void setLiteral( - NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -135,7 +135,7 @@ static NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType create() @JsProperty void setLhs( - NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType.LhsFieldType lhs); + NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -327,8 +327,7 @@ static NotCondition.ToObjectReturnType.FilterFieldType.SearchFieldType create() @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -374,12 +373,10 @@ static NotCondition.ToObjectReturnType.FilterFieldType create() { void setAnd(NotCondition.ToObjectReturnType.FilterFieldType.AndFieldType and); @JsProperty - void setCompare( - NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType compare); + void setCompare(NotCondition.ToObjectReturnType.FilterFieldType.CompareFieldType compare); @JsProperty - void setContains( - NotCondition.ToObjectReturnType.FilterFieldType.ContainsFieldType contains); + void setContains(NotCondition.ToObjectReturnType.FilterFieldType.ContainsFieldType contains); @JsProperty void setInvoke(NotCondition.ToObjectReturnType.FilterFieldType.InvokeFieldType invoke); @@ -388,8 +385,7 @@ void setContains( void setIsNull(NotCondition.ToObjectReturnType.FilterFieldType.IsNullFieldType isNull); @JsProperty - void setMatches( - NotCondition.ToObjectReturnType.FilterFieldType.MatchesFieldType matches); + void setMatches(NotCondition.ToObjectReturnType.FilterFieldType.MatchesFieldType matches); @JsProperty void setNot(Object not); @@ -508,11 +504,11 @@ static NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType.LhsFiel @JsProperty void setLiteral( - NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -537,7 +533,7 @@ static NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType create( @JsProperty void setLhs( - NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType.LhsFieldType lhs); + NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -729,8 +725,7 @@ static NotCondition.ToObjectReturnType0.FilterFieldType.SearchFieldType create() @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -776,12 +771,10 @@ static NotCondition.ToObjectReturnType0.FilterFieldType create() { void setAnd(NotCondition.ToObjectReturnType0.FilterFieldType.AndFieldType and); @JsProperty - void setCompare( - NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType compare); + void setCompare(NotCondition.ToObjectReturnType0.FilterFieldType.CompareFieldType compare); @JsProperty - void setContains( - NotCondition.ToObjectReturnType0.FilterFieldType.ContainsFieldType contains); + void setContains(NotCondition.ToObjectReturnType0.FilterFieldType.ContainsFieldType contains); @JsProperty void setInvoke(NotCondition.ToObjectReturnType0.FilterFieldType.InvokeFieldType invoke); @@ -790,8 +783,7 @@ void setContains( void setIsNull(NotCondition.ToObjectReturnType0.FilterFieldType.IsNullFieldType isNull); @JsProperty - void setMatches( - NotCondition.ToObjectReturnType0.FilterFieldType.MatchesFieldType matches); + void setMatches(NotCondition.ToObjectReturnType0.FilterFieldType.MatchesFieldType matches); @JsProperty void setNot(Object not); @@ -821,12 +813,12 @@ static NotCondition.ToObjectReturnType0 create() { public static native NotCondition deserializeBinary(Uint8Array bytes); public static native NotCondition deserializeBinaryFromReader( - NotCondition message, Object reader); + NotCondition message, Object reader); public static native void serializeBinaryToWriter(NotCondition message, Object writer); public static native NotCondition.ToObjectReturnType toObject( - boolean includeInstance, NotCondition msg); + boolean includeInstance, NotCondition msg); public native void clearFilter(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/OrCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/OrCondition.java index 703f402829e..fb26843a5fd 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/OrCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/OrCondition.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.OrCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.OrCondition", + namespace = JsPackage.GLOBAL) public class OrCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -106,11 +106,11 @@ static OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsF @JsProperty void setLiteral( - OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -135,7 +135,7 @@ static OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType crea @JsProperty void setLhs( - OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -322,8 +322,7 @@ static OrCondition.ToObjectReturnType.FiltersListFieldType.SearchFieldType creat @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -369,24 +368,20 @@ static OrCondition.ToObjectReturnType.FiltersListFieldType create() { void setAnd(OrCondition.ToObjectReturnType.FiltersListFieldType.AndFieldType and); @JsProperty - void setCompare( - OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType compare); + void setCompare(OrCondition.ToObjectReturnType.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - OrCondition.ToObjectReturnType.FiltersListFieldType.ContainsFieldType contains); + OrCondition.ToObjectReturnType.FiltersListFieldType.ContainsFieldType contains); @JsProperty - void setInvoke( - OrCondition.ToObjectReturnType.FiltersListFieldType.InvokeFieldType invoke); + void setInvoke(OrCondition.ToObjectReturnType.FiltersListFieldType.InvokeFieldType invoke); @JsProperty - void setIsNull( - OrCondition.ToObjectReturnType.FiltersListFieldType.IsNullFieldType isNull); + void setIsNull(OrCondition.ToObjectReturnType.FiltersListFieldType.IsNullFieldType isNull); @JsProperty - void setMatches( - OrCondition.ToObjectReturnType.FiltersListFieldType.MatchesFieldType matches); + void setMatches(OrCondition.ToObjectReturnType.FiltersListFieldType.MatchesFieldType matches); @JsProperty void setNot(OrCondition.ToObjectReturnType.FiltersListFieldType.NotFieldType not); @@ -398,8 +393,7 @@ void setMatches( void setPb_in(OrCondition.ToObjectReturnType.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty - void setSearch( - OrCondition.ToObjectReturnType.FiltersListFieldType.SearchFieldType search); + void setSearch(OrCondition.ToObjectReturnType.FiltersListFieldType.SearchFieldType search); } @JsOverlay @@ -411,16 +405,14 @@ static OrCondition.ToObjectReturnType create() { JsArray getFiltersList(); @JsOverlay - default void setFiltersList( - OrCondition.ToObjectReturnType.FiltersListFieldType[] filtersList) { + default void setFiltersList(OrCondition.ToObjectReturnType.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty - void setFiltersList( - JsArray filtersList); + void setFiltersList(JsArray filtersList); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -515,11 +507,11 @@ static OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.Lhs @JsProperty void setLiteral( - OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -544,7 +536,7 @@ static OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType cre @JsProperty void setLhs( - OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -731,8 +723,7 @@ static OrCondition.ToObjectReturnType0.FiltersListFieldType.SearchFieldType crea @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -779,23 +770,21 @@ static OrCondition.ToObjectReturnType0.FiltersListFieldType create() { @JsProperty void setCompare( - OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType compare); + OrCondition.ToObjectReturnType0.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - OrCondition.ToObjectReturnType0.FiltersListFieldType.ContainsFieldType contains); + OrCondition.ToObjectReturnType0.FiltersListFieldType.ContainsFieldType contains); @JsProperty - void setInvoke( - OrCondition.ToObjectReturnType0.FiltersListFieldType.InvokeFieldType invoke); + void setInvoke(OrCondition.ToObjectReturnType0.FiltersListFieldType.InvokeFieldType invoke); @JsProperty - void setIsNull( - OrCondition.ToObjectReturnType0.FiltersListFieldType.IsNullFieldType isNull); + void setIsNull(OrCondition.ToObjectReturnType0.FiltersListFieldType.IsNullFieldType isNull); @JsProperty void setMatches( - OrCondition.ToObjectReturnType0.FiltersListFieldType.MatchesFieldType matches); + OrCondition.ToObjectReturnType0.FiltersListFieldType.MatchesFieldType matches); @JsProperty void setNot(OrCondition.ToObjectReturnType0.FiltersListFieldType.NotFieldType not); @@ -804,12 +793,10 @@ void setMatches( void setOr(Object or); @JsProperty - void setPb_in( - OrCondition.ToObjectReturnType0.FiltersListFieldType.Pb_inFieldType pb_in); + void setPb_in(OrCondition.ToObjectReturnType0.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty - void setSearch( - OrCondition.ToObjectReturnType0.FiltersListFieldType.SearchFieldType search); + void setSearch(OrCondition.ToObjectReturnType0.FiltersListFieldType.SearchFieldType search); } @JsOverlay @@ -822,26 +809,24 @@ static OrCondition.ToObjectReturnType0 create() { @JsOverlay default void setFiltersList( - OrCondition.ToObjectReturnType0.FiltersListFieldType[] filtersList) { + OrCondition.ToObjectReturnType0.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty - void setFiltersList( - JsArray filtersList); + void setFiltersList(JsArray filtersList); } public static native OrCondition deserializeBinary(Uint8Array bytes); - public static native OrCondition deserializeBinaryFromReader(OrCondition message, - Object reader); + public static native OrCondition deserializeBinaryFromReader(OrCondition message, Object reader); public static native void serializeBinaryToWriter(OrCondition message, Object writer); public static native OrCondition.ToObjectReturnType toObject( - boolean includeInstance, OrCondition msg); + boolean includeInstance, OrCondition msg); public native Condition addFilters(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Reference.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Reference.java index 31b5f125fee..bb03a4fec53 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Reference.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Reference.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.Reference", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.Reference", + namespace = JsPackage.GLOBAL) public class Reference { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -48,7 +48,7 @@ static Reference.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(Reference message, Object writer); public static native Reference.ToObjectReturnType toObject( - boolean includeInstance, Reference msg); + boolean includeInstance, Reference msg); public native String getColumnName(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/RunChartDownsampleRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/RunChartDownsampleRequest.java index c9cd1ad3df4..891eef16c48 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/RunChartDownsampleRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/RunChartDownsampleRequest.java @@ -12,9 +12,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.RunChartDownsampleRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.RunChartDownsampleRequest", + namespace = JsPackage.GLOBAL) public class RunChartDownsampleRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -24,7 +24,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static RunChartDownsampleRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -59,20 +59,20 @@ static RunChartDownsampleRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - RunChartDownsampleRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + RunChartDownsampleRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -160,8 +160,7 @@ default void setYColumnNamesList(String[] yColumnNamesList) { } @JsProperty - void setZoomRange( - RunChartDownsampleRequest.ToObjectReturnType.ZoomRangeFieldType zoomRange); + void setZoomRange(RunChartDownsampleRequest.ToObjectReturnType.ZoomRangeFieldType zoomRange); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -171,8 +170,7 @@ public interface ResultIdFieldType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { @JsOverlay - static RunChartDownsampleRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + static RunChartDownsampleRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of(Object o) { return Js.cast(o); } @@ -207,20 +205,20 @@ static RunChartDownsampleRequest.ToObjectReturnType0.ResultIdFieldType create() @JsProperty void setTicket( - RunChartDownsampleRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + RunChartDownsampleRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -308,20 +306,19 @@ default void setYColumnNamesList(String[] yColumnNamesList) { } @JsProperty - void setZoomRange( - RunChartDownsampleRequest.ToObjectReturnType0.ZoomRangeFieldType zoomRange); + void setZoomRange(RunChartDownsampleRequest.ToObjectReturnType0.ZoomRangeFieldType zoomRange); } public static native RunChartDownsampleRequest deserializeBinary(Uint8Array bytes); public static native RunChartDownsampleRequest deserializeBinaryFromReader( - RunChartDownsampleRequest message, Object reader); + RunChartDownsampleRequest message, Object reader); public static native void serializeBinaryToWriter( - RunChartDownsampleRequest message, Object writer); + RunChartDownsampleRequest message, Object writer); public static native RunChartDownsampleRequest.ToObjectReturnType toObject( - boolean includeInstance, RunChartDownsampleRequest msg); + boolean includeInstance, RunChartDownsampleRequest msg); public native String addYColumnNames(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SearchCondition.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SearchCondition.java index a5ad05ddf91..5df12b8628e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SearchCondition.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SearchCondition.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.SearchCondition", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.SearchCondition", + namespace = JsPackage.GLOBAL) public class SearchCondition { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -43,14 +43,14 @@ static SearchCondition.ToObjectReturnType create() { @JsProperty void setOptionalReferencesList( - JsArray optionalReferencesList); + JsArray optionalReferencesList); @JsOverlay default void setOptionalReferencesList( - SearchCondition.ToObjectReturnType.OptionalReferencesListFieldType[] optionalReferencesList) { + SearchCondition.ToObjectReturnType.OptionalReferencesListFieldType[] optionalReferencesList) { setOptionalReferencesList( - Js.>uncheckedCast( - optionalReferencesList)); + Js.>uncheckedCast( + optionalReferencesList)); } @JsProperty @@ -86,14 +86,14 @@ static SearchCondition.ToObjectReturnType0 create() { @JsProperty void setOptionalReferencesList( - JsArray optionalReferencesList); + JsArray optionalReferencesList); @JsOverlay default void setOptionalReferencesList( - SearchCondition.ToObjectReturnType0.OptionalReferencesListFieldType[] optionalReferencesList) { + SearchCondition.ToObjectReturnType0.OptionalReferencesListFieldType[] optionalReferencesList) { setOptionalReferencesList( - Js.>uncheckedCast( - optionalReferencesList)); + Js.>uncheckedCast( + optionalReferencesList)); } @JsProperty @@ -103,12 +103,12 @@ default void setOptionalReferencesList( public static native SearchCondition deserializeBinary(Uint8Array bytes); public static native SearchCondition deserializeBinaryFromReader( - SearchCondition message, Object reader); + SearchCondition message, Object reader); public static native void serializeBinaryToWriter(SearchCondition message, Object writer); public static native SearchCondition.ToObjectReturnType toObject( - boolean includeInstance, SearchCondition msg); + boolean includeInstance, SearchCondition msg); public native Reference addOptionalReferences(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SelectDistinctRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SelectDistinctRequest.java index 78b6c2e2a3c..dff88a5105f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SelectDistinctRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SelectDistinctRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.SelectDistinctRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.SelectDistinctRequest", + namespace = JsPackage.GLOBAL) public class SelectDistinctRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -23,7 +23,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static SelectDistinctRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -58,20 +58,20 @@ static SelectDistinctRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - SelectDistinctRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + SelectDistinctRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -132,7 +132,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static SelectDistinctRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -167,20 +167,20 @@ static SelectDistinctRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - SelectDistinctRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + SelectDistinctRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -236,12 +236,12 @@ default void setColumnNamesList(String[] columnNamesList) { public static native SelectDistinctRequest deserializeBinary(Uint8Array bytes); public static native SelectDistinctRequest deserializeBinaryFromReader( - SelectDistinctRequest message, Object reader); + SelectDistinctRequest message, Object reader); public static native void serializeBinaryToWriter(SelectDistinctRequest message, Object writer); public static native SelectDistinctRequest.ToObjectReturnType toObject( - boolean includeInstance, SelectDistinctRequest msg); + boolean includeInstance, SelectDistinctRequest msg); public native String addColumnNames(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SelectOrUpdateRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SelectOrUpdateRequest.java index 6b09abf2aa7..4df678045e2 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SelectOrUpdateRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SelectOrUpdateRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.SelectOrUpdateRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.SelectOrUpdateRequest", + namespace = JsPackage.GLOBAL) public class SelectOrUpdateRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -23,7 +23,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static SelectOrUpdateRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -58,20 +58,20 @@ static SelectOrUpdateRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - SelectOrUpdateRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + SelectOrUpdateRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -132,7 +132,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static SelectOrUpdateRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -167,20 +167,20 @@ static SelectOrUpdateRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - SelectOrUpdateRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + SelectOrUpdateRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -236,12 +236,12 @@ default void setColumnSpecsList(String[] columnSpecsList) { public static native SelectOrUpdateRequest deserializeBinary(Uint8Array bytes); public static native SelectOrUpdateRequest deserializeBinaryFromReader( - SelectOrUpdateRequest message, Object reader); + SelectOrUpdateRequest message, Object reader); public static native void serializeBinaryToWriter(SelectOrUpdateRequest message, Object writer); public static native SelectOrUpdateRequest.ToObjectReturnType toObject( - boolean includeInstance, SelectOrUpdateRequest msg); + boolean includeInstance, SelectOrUpdateRequest msg); public native String addColumnSpecs(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SnapshotTableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SnapshotTableRequest.java index db4ed2717c1..55f1e41d308 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SnapshotTableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SnapshotTableRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.SnapshotTableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.SnapshotTableRequest", + namespace = JsPackage.GLOBAL) public class SnapshotTableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -43,7 +43,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static SnapshotTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -78,20 +78,20 @@ static SnapshotTableRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - SnapshotTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + SnapshotTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -164,7 +164,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static SnapshotTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -199,20 +199,20 @@ static SnapshotTableRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - SnapshotTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + SnapshotTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -260,12 +260,12 @@ default void setStampColumnsList(String[] stampColumnsList) { public static native SnapshotTableRequest deserializeBinary(Uint8Array bytes); public static native SnapshotTableRequest deserializeBinaryFromReader( - SnapshotTableRequest message, Object reader); + SnapshotTableRequest message, Object reader); public static native void serializeBinaryToWriter(SnapshotTableRequest message, Object writer); public static native SnapshotTableRequest.ToObjectReturnType toObject( - boolean includeInstance, SnapshotTableRequest msg); + boolean includeInstance, SnapshotTableRequest msg); public native String addStampColumns(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SortDescriptor.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SortDescriptor.java index 355450d89be..95cca357243 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SortDescriptor.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SortDescriptor.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.SortDescriptor", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.SortDescriptor", + namespace = JsPackage.GLOBAL) public class SortDescriptor { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -71,12 +71,12 @@ static SortDescriptor.ToObjectReturnType0 create() { public static native SortDescriptor deserializeBinary(Uint8Array bytes); public static native SortDescriptor deserializeBinaryFromReader( - SortDescriptor message, Object reader); + SortDescriptor message, Object reader); public static native void serializeBinaryToWriter(SortDescriptor message, Object writer); public static native SortDescriptor.ToObjectReturnType toObject( - boolean includeInstance, SortDescriptor msg); + boolean includeInstance, SortDescriptor msg); public native String getColumnName(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SortTableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SortTableRequest.java index 031187c2c8b..b40a8c1c0ae 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SortTableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/SortTableRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.SortTableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.SortTableRequest", + namespace = JsPackage.GLOBAL) public class SortTableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -23,7 +23,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static SortTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -58,20 +58,20 @@ static SortTableRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - SortTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + SortTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -139,15 +139,13 @@ static SortTableRequest.ToObjectReturnType create() { void setResultId(SortTableRequest.ToObjectReturnType.ResultIdFieldType resultId); @JsProperty - void setSortsList( - JsArray sortsList); + void setSortsList(JsArray sortsList); @JsOverlay - default void setSortsList( - SortTableRequest.ToObjectReturnType.SortsListFieldType[] sortsList) { + default void setSortsList(SortTableRequest.ToObjectReturnType.SortsListFieldType[] sortsList) { setSortsList( - Js.>uncheckedCast( - sortsList)); + Js.>uncheckedCast( + sortsList)); } @JsProperty @@ -162,7 +160,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static SortTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -197,20 +195,20 @@ static SortTableRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - SortTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + SortTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -278,15 +276,13 @@ static SortTableRequest.ToObjectReturnType0 create() { void setResultId(SortTableRequest.ToObjectReturnType0.ResultIdFieldType resultId); @JsProperty - void setSortsList( - JsArray sortsList); + void setSortsList(JsArray sortsList); @JsOverlay - default void setSortsList( - SortTableRequest.ToObjectReturnType0.SortsListFieldType[] sortsList) { + default void setSortsList(SortTableRequest.ToObjectReturnType0.SortsListFieldType[] sortsList) { setSortsList( - Js.>uncheckedCast( - sortsList)); + Js.>uncheckedCast( + sortsList)); } @JsProperty @@ -296,12 +292,12 @@ default void setSortsList( public static native SortTableRequest deserializeBinary(Uint8Array bytes); public static native SortTableRequest deserializeBinaryFromReader( - SortTableRequest message, Object reader); + SortTableRequest message, Object reader); public static native void serializeBinaryToWriter(SortTableRequest message, Object writer); public static native SortTableRequest.ToObjectReturnType toObject( - boolean includeInstance, SortTableRequest msg); + boolean includeInstance, SortTableRequest msg); public native SortDescriptor addSorts(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/TableReference.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/TableReference.java index 7a2e1989e6c..cdcb841b600 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/TableReference.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/TableReference.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.TableReference", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.TableReference", + namespace = JsPackage.GLOBAL) public class TableReference { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -21,8 +21,7 @@ public interface TicketFieldType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { @JsOverlay - static TableReference.ToObjectReturnType.TicketFieldType.GetTicketUnionType of( - Object o) { + static TableReference.ToObjectReturnType.TicketFieldType.GetTicketUnionType of(Object o) { return Js.cast(o); } @@ -56,21 +55,20 @@ static TableReference.ToObjectReturnType.TicketFieldType create() { TableReference.ToObjectReturnType.TicketFieldType.GetTicketUnionType getTicket(); @JsProperty - void setTicket( - TableReference.ToObjectReturnType.TicketFieldType.GetTicketUnionType ticket); + void setTicket(TableReference.ToObjectReturnType.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -99,8 +97,7 @@ public interface TicketFieldType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { @JsOverlay - static TableReference.ToObjectReturnType0.TicketFieldType.GetTicketUnionType of( - Object o) { + static TableReference.ToObjectReturnType0.TicketFieldType.GetTicketUnionType of(Object o) { return Js.cast(o); } @@ -134,21 +131,20 @@ static TableReference.ToObjectReturnType0.TicketFieldType create() { TableReference.ToObjectReturnType0.TicketFieldType.GetTicketUnionType getTicket(); @JsProperty - void setTicket( - TableReference.ToObjectReturnType0.TicketFieldType.GetTicketUnionType ticket); + void setTicket(TableReference.ToObjectReturnType0.TicketFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -173,12 +169,12 @@ static TableReference.ToObjectReturnType0 create() { public static native TableReference deserializeBinary(Uint8Array bytes); public static native TableReference deserializeBinaryFromReader( - TableReference message, Object reader); + TableReference message, Object reader); public static native void serializeBinaryToWriter(TableReference message, Object writer); public static native TableReference.ToObjectReturnType toObject( - boolean includeInstance, TableReference msg); + boolean includeInstance, TableReference msg); public native void clearBatchOffset(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/TimeTableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/TimeTableRequest.java index 106ca0d706f..51c7032ce83 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/TimeTableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/TimeTableRequest.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.TimeTableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.TimeTableRequest", + namespace = JsPackage.GLOBAL) public class TimeTableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,7 +22,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static TimeTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -57,20 +57,20 @@ static TimeTableRequest.ToObjectReturnType.ResultIdFieldType create() { @JsProperty void setTicket( - TimeTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + TimeTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -106,7 +106,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static TimeTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -141,20 +141,20 @@ static TimeTableRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - TimeTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + TimeTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -185,12 +185,12 @@ static TimeTableRequest.ToObjectReturnType0 create() { public static native TimeTableRequest deserializeBinary(Uint8Array bytes); public static native TimeTableRequest deserializeBinaryFromReader( - TimeTableRequest message, Object reader); + TimeTableRequest message, Object reader); public static native void serializeBinaryToWriter(TimeTableRequest message, Object writer); public static native TimeTableRequest.ToObjectReturnType toObject( - boolean includeInstance, TimeTableRequest msg); + boolean includeInstance, TimeTableRequest msg); public native void clearResultId(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/UngroupRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/UngroupRequest.java index 9da8122ba28..219f38436b0 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/UngroupRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/UngroupRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.UngroupRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.UngroupRequest", + namespace = JsPackage.GLOBAL) public class UngroupRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -22,8 +22,7 @@ public interface ResultIdFieldType { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { @JsOverlay - static UngroupRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + static UngroupRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of(Object o) { return Js.cast(o); } @@ -57,21 +56,20 @@ static UngroupRequest.ToObjectReturnType.ResultIdFieldType create() { UngroupRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType getTicket(); @JsProperty - void setTicket( - UngroupRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + void setTicket(UngroupRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -138,7 +136,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static UngroupRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -173,20 +171,20 @@ static UngroupRequest.ToObjectReturnType0.ResultIdFieldType create() { @JsProperty void setTicket( - UngroupRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + UngroupRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -248,12 +246,12 @@ default void setColumnsToUngroupList(String[] columnsToUngroupList) { public static native UngroupRequest deserializeBinary(Uint8Array bytes); public static native UngroupRequest deserializeBinaryFromReader( - UngroupRequest message, Object reader); + UngroupRequest message, Object reader); public static native void serializeBinaryToWriter(UngroupRequest message, Object writer); public static native UngroupRequest.ToObjectReturnType toObject( - boolean includeInstance, UngroupRequest msg); + boolean includeInstance, UngroupRequest msg); public native String addColumnsToUngroup(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/UnstructuredFilterTableRequest.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/UnstructuredFilterTableRequest.java index 82dc8fa33d5..cdaf7dc4460 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/UnstructuredFilterTableRequest.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/UnstructuredFilterTableRequest.java @@ -11,9 +11,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.UnstructuredFilterTableRequest", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.UnstructuredFilterTableRequest", + namespace = JsPackage.GLOBAL) public class UnstructuredFilterTableRequest { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -23,7 +23,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static UnstructuredFilterTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -58,20 +58,20 @@ static UnstructuredFilterTableRequest.ToObjectReturnType.ResultIdFieldType creat @JsProperty void setTicket( - UnstructuredFilterTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); + UnstructuredFilterTableRequest.ToObjectReturnType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -118,12 +118,10 @@ default void setFiltersList(String[] filtersList) { } @JsProperty - void setResultId( - UnstructuredFilterTableRequest.ToObjectReturnType.ResultIdFieldType resultId); + void setResultId(UnstructuredFilterTableRequest.ToObjectReturnType.ResultIdFieldType resultId); @JsProperty - void setSourceId( - UnstructuredFilterTableRequest.ToObjectReturnType.SourceIdFieldType sourceId); + void setSourceId(UnstructuredFilterTableRequest.ToObjectReturnType.SourceIdFieldType sourceId); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -134,7 +132,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static UnstructuredFilterTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -169,20 +167,20 @@ static UnstructuredFilterTableRequest.ToObjectReturnType0.ResultIdFieldType crea @JsProperty void setTicket( - UnstructuredFilterTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); + UnstructuredFilterTableRequest.ToObjectReturnType0.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -229,24 +227,22 @@ default void setFiltersList(String[] filtersList) { } @JsProperty - void setResultId( - UnstructuredFilterTableRequest.ToObjectReturnType0.ResultIdFieldType resultId); + void setResultId(UnstructuredFilterTableRequest.ToObjectReturnType0.ResultIdFieldType resultId); @JsProperty - void setSourceId( - UnstructuredFilterTableRequest.ToObjectReturnType0.SourceIdFieldType sourceId); + void setSourceId(UnstructuredFilterTableRequest.ToObjectReturnType0.SourceIdFieldType sourceId); } public static native UnstructuredFilterTableRequest deserializeBinary(Uint8Array bytes); public static native UnstructuredFilterTableRequest deserializeBinaryFromReader( - UnstructuredFilterTableRequest message, Object reader); + UnstructuredFilterTableRequest message, Object reader); public static native void serializeBinaryToWriter( - UnstructuredFilterTableRequest message, Object writer); + UnstructuredFilterTableRequest message, Object writer); public static native UnstructuredFilterTableRequest.ToObjectReturnType toObject( - boolean includeInstance, UnstructuredFilterTableRequest msg); + boolean includeInstance, UnstructuredFilterTableRequest msg); public native String addFilters(String value, double index); @@ -288,5 +284,5 @@ public final void setFiltersList(String[] value) { public native UnstructuredFilterTableRequest.ToObjectReturnType0 toObject(); public native UnstructuredFilterTableRequest.ToObjectReturnType0 toObject( - boolean includeInstance); + boolean includeInstance); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Value.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Value.java index 985b0247b87..85e83f58661 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Value.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/Value.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.Value", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.Value", + namespace = JsPackage.GLOBAL) public class Value { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/asofjointablesrequest/MatchRuleMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/asofjointablesrequest/MatchRuleMap.java index 86959283f0f..9a6c01e1130 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/asofjointablesrequest/MatchRuleMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/asofjointablesrequest/MatchRuleMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.AsOfJoinTablesRequest.MatchRuleMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.AsOfJoinTablesRequest.MatchRuleMap", + namespace = JsPackage.GLOBAL) public interface MatchRuleMap { @JsOverlay static MatchRuleMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/batchtablerequest/Operation.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/batchtablerequest/Operation.java index d315f70d798..730f8612de7 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/batchtablerequest/Operation.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/batchtablerequest/Operation.java @@ -31,9 +31,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.BatchTableRequest.Operation", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.BatchTableRequest.Operation", + namespace = JsPackage.GLOBAL) public class Operation { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -158,15 +158,15 @@ static Operation.ToObjectReturnType.ComboAggregateFieldType create() { @JsOverlay default void setAggregatesList( - Operation.ToObjectReturnType.ComboAggregateFieldType.AggregatesListFieldType[] aggregatesList) { + Operation.ToObjectReturnType.ComboAggregateFieldType.AggregatesListFieldType[] aggregatesList) { setAggregatesList( - Js.>uncheckedCast( - aggregatesList)); + Js.>uncheckedCast( + aggregatesList)); } @JsProperty void setAggregatesList( - JsArray aggregatesList); + JsArray aggregatesList); @JsProperty void setForceCombo(boolean forceCombo); @@ -289,7 +289,7 @@ default void setColumnNamesList(String[] columnNamesList) { @JsProperty void setSourceId( - Operation.ToObjectReturnType.DropColumnsFieldType.SourceIdFieldType sourceId); + Operation.ToObjectReturnType.DropColumnsFieldType.SourceIdFieldType sourceId); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -300,7 +300,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static Operation.ToObjectReturnType.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -335,20 +335,20 @@ static Operation.ToObjectReturnType.EmptyTableFieldType.ResultIdFieldType create @JsProperty void setTicket( - Operation.ToObjectReturnType.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType ticket); + Operation.ToObjectReturnType.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -364,8 +364,7 @@ static Operation.ToObjectReturnType.EmptyTableFieldType create() { String getSize(); @JsProperty - void setResultId( - Operation.ToObjectReturnType.EmptyTableFieldType.ResultIdFieldType resultId); + void setResultId(Operation.ToObjectReturnType.EmptyTableFieldType.ResultIdFieldType resultId); @JsProperty void setSize(String size); @@ -511,11 +510,11 @@ static Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.Compare @JsProperty void setLiteral( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -540,7 +539,7 @@ static Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.Compare @JsProperty void setLhs( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -746,8 +745,7 @@ static Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.SearchF @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -791,43 +789,43 @@ static Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType create( @JsProperty void setAnd( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.AndFieldType and); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.AndFieldType and); @JsProperty void setCompare( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.CompareFieldType compare); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.ContainsFieldType contains); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.ContainsFieldType contains); @JsProperty void setInvoke( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.InvokeFieldType invoke); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.InvokeFieldType invoke); @JsProperty void setIsNull( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.IsNullFieldType isNull); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.IsNullFieldType isNull); @JsProperty void setMatches( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.MatchesFieldType matches); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.MatchesFieldType matches); @JsProperty void setNot( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.NotFieldType not); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.NotFieldType not); @JsProperty void setOr( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.OrFieldType or); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.OrFieldType or); @JsProperty void setPb_in( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.Pb_inFieldType pb_in); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty void setSearch( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.SearchFieldType search); + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType.SearchFieldType search); } @JsOverlay @@ -846,15 +844,15 @@ static Operation.ToObjectReturnType.FilterFieldType create() { @JsOverlay default void setFiltersList( - Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType[] filtersList) { + Operation.ToObjectReturnType.FilterFieldType.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty void setFiltersList( - JsArray filtersList); + JsArray filtersList); @JsProperty void setResultId(Object resultId); @@ -907,8 +905,7 @@ static Operation.ToObjectReturnType.HeadByFieldType create() { @JsOverlay default void setGroupByColumnSpecsList(String[] groupByColumnSpecsList) { - setGroupByColumnSpecsList( - Js.>uncheckedCast(groupByColumnSpecsList)); + setGroupByColumnSpecsList(Js.>uncheckedCast(groupByColumnSpecsList)); } @JsProperty @@ -1141,7 +1138,7 @@ default void setYColumnNamesList(String[] yColumnNamesList) { @JsProperty void setZoomRange( - Operation.ToObjectReturnType.RunChartDownsampleFieldType.ZoomRangeFieldType zoomRange); + Operation.ToObjectReturnType.RunChartDownsampleFieldType.ZoomRangeFieldType zoomRange); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1265,14 +1262,14 @@ static Operation.ToObjectReturnType.SortFieldType create() { @JsProperty void setSortsList( - JsArray sortsList); + JsArray sortsList); @JsOverlay default void setSortsList( - Operation.ToObjectReturnType.SortFieldType.SortsListFieldType[] sortsList) { + Operation.ToObjectReturnType.SortFieldType.SortsListFieldType[] sortsList) { setSortsList( - Js.>uncheckedCast( - sortsList)); + Js.>uncheckedCast( + sortsList)); } @JsProperty @@ -1534,7 +1531,7 @@ static Operation.ToObjectReturnType create() { @JsProperty void setRunChartDownsample( - Operation.ToObjectReturnType.RunChartDownsampleFieldType runChartDownsample); + Operation.ToObjectReturnType.RunChartDownsampleFieldType runChartDownsample); @JsProperty void setSelect(Object select); @@ -1562,7 +1559,7 @@ void setRunChartDownsample( @JsProperty void setUnstructuredFilter( - Operation.ToObjectReturnType.UnstructuredFilterFieldType unstructuredFilter); + Operation.ToObjectReturnType.UnstructuredFilterFieldType unstructuredFilter); @JsProperty void setUpdate(Operation.ToObjectReturnType.UpdateFieldType update); @@ -1697,15 +1694,15 @@ static Operation.ToObjectReturnType0.ComboAggregateFieldType create() { @JsOverlay default void setAggregatesList( - Operation.ToObjectReturnType0.ComboAggregateFieldType.AggregatesListFieldType[] aggregatesList) { + Operation.ToObjectReturnType0.ComboAggregateFieldType.AggregatesListFieldType[] aggregatesList) { setAggregatesList( - Js.>uncheckedCast( - aggregatesList)); + Js.>uncheckedCast( + aggregatesList)); } @JsProperty void setAggregatesList( - JsArray aggregatesList); + JsArray aggregatesList); @JsProperty void setForceCombo(boolean forceCombo); @@ -1828,7 +1825,7 @@ default void setColumnNamesList(String[] columnNamesList) { @JsProperty void setSourceId( - Operation.ToObjectReturnType0.DropColumnsFieldType.SourceIdFieldType sourceId); + Operation.ToObjectReturnType0.DropColumnsFieldType.SourceIdFieldType sourceId); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1839,7 +1836,7 @@ public interface ResultIdFieldType { public interface GetTicketUnionType { @JsOverlay static Operation.ToObjectReturnType0.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType of( - Object o) { + Object o) { return Js.cast(o); } @@ -1874,20 +1871,20 @@ static Operation.ToObjectReturnType0.EmptyTableFieldType.ResultIdFieldType creat @JsProperty void setTicket( - Operation.ToObjectReturnType0.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType ticket); + Operation.ToObjectReturnType0.EmptyTableFieldType.ResultIdFieldType.GetTicketUnionType ticket); @JsOverlay default void setTicket(String ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } @JsOverlay default void setTicket(Uint8Array ticket) { setTicket( - Js.uncheckedCast( - ticket)); + Js.uncheckedCast( + ticket)); } } @@ -1904,7 +1901,7 @@ static Operation.ToObjectReturnType0.EmptyTableFieldType create() { @JsProperty void setResultId( - Operation.ToObjectReturnType0.EmptyTableFieldType.ResultIdFieldType resultId); + Operation.ToObjectReturnType0.EmptyTableFieldType.ResultIdFieldType resultId); @JsProperty void setSize(String size); @@ -2050,11 +2047,11 @@ static Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.Compar @JsProperty void setLiteral( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.LiteralFieldType literal); @JsProperty void setReference( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType.ReferenceFieldType reference); } @JsOverlay @@ -2079,7 +2076,7 @@ static Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.Compar @JsProperty void setLhs( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.CompareFieldType.LhsFieldType lhs); @JsProperty void setOperation(double operation); @@ -2285,8 +2282,7 @@ static Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.Search @JsOverlay default void setOptionalReferencesList(Object[] optionalReferencesList) { - setOptionalReferencesList( - Js.>uncheckedCast(optionalReferencesList)); + setOptionalReferencesList(Js.>uncheckedCast(optionalReferencesList)); } @JsProperty @@ -2330,43 +2326,43 @@ static Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType create @JsProperty void setAnd( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.AndFieldType and); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.AndFieldType and); @JsProperty void setCompare( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.CompareFieldType compare); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.CompareFieldType compare); @JsProperty void setContains( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.ContainsFieldType contains); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.ContainsFieldType contains); @JsProperty void setInvoke( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.InvokeFieldType invoke); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.InvokeFieldType invoke); @JsProperty void setIsNull( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.IsNullFieldType isNull); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.IsNullFieldType isNull); @JsProperty void setMatches( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.MatchesFieldType matches); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.MatchesFieldType matches); @JsProperty void setNot( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.NotFieldType not); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.NotFieldType not); @JsProperty void setOr( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.OrFieldType or); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.OrFieldType or); @JsProperty void setPb_in( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.Pb_inFieldType pb_in); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.Pb_inFieldType pb_in); @JsProperty void setSearch( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.SearchFieldType search); + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType.SearchFieldType search); } @JsOverlay @@ -2385,15 +2381,15 @@ static Operation.ToObjectReturnType0.FilterFieldType create() { @JsOverlay default void setFiltersList( - Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType[] filtersList) { + Operation.ToObjectReturnType0.FilterFieldType.FiltersListFieldType[] filtersList) { setFiltersList( - Js.>uncheckedCast( - filtersList)); + Js.>uncheckedCast( + filtersList)); } @JsProperty void setFiltersList( - JsArray filtersList); + JsArray filtersList); @JsProperty void setResultId(Object resultId); @@ -2446,8 +2442,7 @@ static Operation.ToObjectReturnType0.HeadByFieldType create() { @JsOverlay default void setGroupByColumnSpecsList(String[] groupByColumnSpecsList) { - setGroupByColumnSpecsList( - Js.>uncheckedCast(groupByColumnSpecsList)); + setGroupByColumnSpecsList(Js.>uncheckedCast(groupByColumnSpecsList)); } @JsProperty @@ -2680,7 +2675,7 @@ default void setYColumnNamesList(String[] yColumnNamesList) { @JsProperty void setZoomRange( - Operation.ToObjectReturnType0.RunChartDownsampleFieldType.ZoomRangeFieldType zoomRange); + Operation.ToObjectReturnType0.RunChartDownsampleFieldType.ZoomRangeFieldType zoomRange); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -2804,14 +2799,14 @@ static Operation.ToObjectReturnType0.SortFieldType create() { @JsProperty void setSortsList( - JsArray sortsList); + JsArray sortsList); @JsOverlay default void setSortsList( - Operation.ToObjectReturnType0.SortFieldType.SortsListFieldType[] sortsList) { + Operation.ToObjectReturnType0.SortFieldType.SortsListFieldType[] sortsList) { setSortsList( - Js.>uncheckedCast( - sortsList)); + Js.>uncheckedCast( + sortsList)); } @JsProperty @@ -3033,8 +3028,7 @@ static Operation.ToObjectReturnType0 create() { void setAsOfJoin(Operation.ToObjectReturnType0.AsOfJoinFieldType asOfJoin); @JsProperty - void setComboAggregate( - Operation.ToObjectReturnType0.ComboAggregateFieldType comboAggregate); + void setComboAggregate(Operation.ToObjectReturnType0.ComboAggregateFieldType comboAggregate); @JsProperty void setCrossJoin(Operation.ToObjectReturnType0.CrossJoinFieldType crossJoin); @@ -3074,14 +3068,13 @@ void setComboAggregate( @JsProperty void setRunChartDownsample( - Operation.ToObjectReturnType0.RunChartDownsampleFieldType runChartDownsample); + Operation.ToObjectReturnType0.RunChartDownsampleFieldType runChartDownsample); @JsProperty void setSelect(Object select); @JsProperty - void setSelectDistinct( - Operation.ToObjectReturnType0.SelectDistinctFieldType selectDistinct); + void setSelectDistinct(Operation.ToObjectReturnType0.SelectDistinctFieldType selectDistinct); @JsProperty void setSnapshot(Operation.ToObjectReturnType0.SnapshotFieldType snapshot); @@ -3103,7 +3096,7 @@ void setSelectDistinct( @JsProperty void setUnstructuredFilter( - Operation.ToObjectReturnType0.UnstructuredFilterFieldType unstructuredFilter); + Operation.ToObjectReturnType0.UnstructuredFilterFieldType unstructuredFilter); @JsProperty void setUpdate(Operation.ToObjectReturnType0.UpdateFieldType update); @@ -3122,7 +3115,7 @@ void setUnstructuredFilter( public static native void serializeBinaryToWriter(Operation message, Object writer); public static native Operation.ToObjectReturnType toObject( - boolean includeInstance, Operation msg); + boolean includeInstance, Operation msg); public native void clearAsOfJoin(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/batchtablerequest/operation/OpCase.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/batchtablerequest/operation/OpCase.java index 7bcdb69d2ef..9e0b132e2df 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/batchtablerequest/operation/OpCase.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/batchtablerequest/operation/OpCase.java @@ -4,36 +4,36 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.BatchTableRequest.Operation.OpCase", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.BatchTableRequest.Operation.OpCase", + namespace = JsPackage.GLOBAL) public class OpCase { public static int AS_OF_JOIN, - COMBO_AGGREGATE, - CROSS_JOIN, - DROP_COLUMNS, - EMPTY_TABLE, - EXACT_JOIN, - FILTER, - FLATTEN, - HEAD, - HEAD_BY, - LAZY_UPDATE, - LEFT_JOIN, - MERGE, - NATURAL_JOIN, - OP_NOT_SET, - RUN_CHART_DOWNSAMPLE, - SELECT, - SELECT_DISTINCT, - SNAPSHOT, - SORT, - TAIL, - TAIL_BY, - TIME_TABLE, - UNGROUP, - UNSTRUCTURED_FILTER, - UPDATE, - UPDATE_VIEW, - VIEW; + COMBO_AGGREGATE, + CROSS_JOIN, + DROP_COLUMNS, + EMPTY_TABLE, + EXACT_JOIN, + FILTER, + FLATTEN, + HEAD, + HEAD_BY, + LAZY_UPDATE, + LEFT_JOIN, + MERGE, + NATURAL_JOIN, + OP_NOT_SET, + RUN_CHART_DOWNSAMPLE, + SELECT, + SELECT_DISTINCT, + SNAPSHOT, + SORT, + TAIL, + TAIL_BY, + TIME_TABLE, + UNGROUP, + UNSTRUCTURED_FILTER, + UPDATE, + UPDATE_VIEW, + VIEW; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comboaggregaterequest/AggTypeMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comboaggregaterequest/AggTypeMap.java index 0c5d073d152..8b2ad8c7e72 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comboaggregaterequest/AggTypeMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comboaggregaterequest/AggTypeMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.ComboAggregateRequest.AggTypeMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.ComboAggregateRequest.AggTypeMap", + namespace = JsPackage.GLOBAL) public interface AggTypeMap { @JsOverlay static AggTypeMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comboaggregaterequest/Aggregate.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comboaggregaterequest/Aggregate.java index 9c17a7a8370..85b1f525ad7 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comboaggregaterequest/Aggregate.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comboaggregaterequest/Aggregate.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.ComboAggregateRequest.Aggregate", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.ComboAggregateRequest.Aggregate", + namespace = JsPackage.GLOBAL) public class Aggregate { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -107,7 +107,7 @@ default void setMatchPairsList(String[] matchPairsList) { public static native void serializeBinaryToWriter(Aggregate message, Object writer); public static native Aggregate.ToObjectReturnType toObject( - boolean includeInstance, Aggregate msg); + boolean includeInstance, Aggregate msg); public native String addMatchPairs(String value, double index); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comparecondition/CompareOperationMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comparecondition/CompareOperationMap.java index d9e842b364d..c61c95db422 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comparecondition/CompareOperationMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/comparecondition/CompareOperationMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.CompareCondition.CompareOperationMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.CompareCondition.CompareOperationMap", + namespace = JsPackage.GLOBAL) public interface CompareOperationMap { @JsOverlay static CompareOperationMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/condition/DataCase.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/condition/DataCase.java index 8f357986404..45c642da769 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/condition/DataCase.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/condition/DataCase.java @@ -4,19 +4,19 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.Condition.DataCase", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.Condition.DataCase", + namespace = JsPackage.GLOBAL) public class DataCase { public static int AND, - COMPARE, - CONTAINS, - DATA_NOT_SET, - IN, - INVOKE, - IS_NULL, - MATCHES, - NOT, - OR, - SEARCH; + COMPARE, + CONTAINS, + DATA_NOT_SET, + IN, + INVOKE, + IS_NULL, + MATCHES, + NOT, + OR, + SEARCH; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/literal/ValueCase.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/literal/ValueCase.java index e1412dc7bec..aa7f6849a7c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/literal/ValueCase.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/literal/ValueCase.java @@ -4,14 +4,14 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.Literal.ValueCase", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.Literal.ValueCase", + namespace = JsPackage.GLOBAL) public class ValueCase { public static int BOOL_VALUE, - DOUBLE_VALUE, - LONG_VALUE, - NANO_TIME_VALUE, - STRING_VALUE, - VALUE_NOT_SET; + DOUBLE_VALUE, + LONG_VALUE, + NANO_TIME_VALUE, + STRING_VALUE, + VALUE_NOT_SET; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/runchartdownsamplerequest/ZoomRange.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/runchartdownsamplerequest/ZoomRange.java index ddec4ef7e62..ba0ab462235 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/runchartdownsamplerequest/ZoomRange.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/runchartdownsamplerequest/ZoomRange.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.RunChartDownsampleRequest.ZoomRange", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.RunChartDownsampleRequest.ZoomRange", + namespace = JsPackage.GLOBAL) public class ZoomRange { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface ToObjectReturnType { @@ -60,7 +60,7 @@ static ZoomRange.ToObjectReturnType0 create() { public static native void serializeBinaryToWriter(ZoomRange message, Object writer); public static native ZoomRange.ToObjectReturnType toObject( - boolean includeInstance, ZoomRange msg); + boolean includeInstance, ZoomRange msg); public native void clearMaxDateNanos(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/sortdescriptor/SortDirectionMap.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/sortdescriptor/SortDirectionMap.java index f5d90dd53d9..0c096d15bc2 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/sortdescriptor/SortDirectionMap.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/sortdescriptor/SortDirectionMap.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.SortDescriptor.SortDirectionMap", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.SortDescriptor.SortDirectionMap", + namespace = JsPackage.GLOBAL) public interface SortDirectionMap { @JsOverlay static SortDirectionMap create() { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/tablereference/RefCase.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/tablereference/RefCase.java index 8bf83d717bd..31dc77ffb0a 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/tablereference/RefCase.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/tablereference/RefCase.java @@ -4,11 +4,11 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.TableReference.RefCase", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.TableReference.RefCase", + namespace = JsPackage.GLOBAL) public class RefCase { public static int BATCH_OFFSET, - REF_NOT_SET, - TICKET; + REF_NOT_SET, + TICKET; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/value/DataCase.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/value/DataCase.java index 1b0c6e0d8e6..7e77ba7bc3b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/value/DataCase.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb/value/DataCase.java @@ -4,11 +4,11 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb.Value.DataCase", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb.Value.DataCase", + namespace = JsPackage.GLOBAL) public class DataCase { public static int DATA_NOT_SET, - LITERAL, - REFERENCE; + LITERAL, + REFERENCE; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/BidirectionalStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/BidirectionalStream.java index 5876eabc840..89ecdc31a83 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/BidirectionalStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/BidirectionalStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb_service.BidirectionalStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb_service.BidirectionalStream", + namespace = JsPackage.GLOBAL) public interface BidirectionalStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/RequestStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/RequestStream.java index 9936e0da7fb..531b64400b4 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/RequestStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/RequestStream.java @@ -10,9 +10,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb_service.RequestStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb_service.RequestStream", + namespace = JsPackage.GLOBAL) public interface RequestStream { @JsFunction public interface OnHandlerFn { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/ResponseStream.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/ResponseStream.java index 02f630327e4..40850803b3e 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/ResponseStream.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/ResponseStream.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb_service.ResponseStream", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb_service.ResponseStream", + namespace = JsPackage.GLOBAL) public interface ResponseStream { void cancel(); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableService.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableService.java index a4917a05177..38263811907 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableService.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableService.java @@ -8,9 +8,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb_service.TableService", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb_service.TableService", + namespace = JsPackage.GLOBAL) public class TableService { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface AsOfJoinTablesType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableServiceClient.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableServiceClient.java index 31e00d1a176..b41d2204a3b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableServiceClient.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/TableServiceClient.java @@ -35,9 +35,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb_service.TableServiceClient", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb_service.TableServiceClient", + namespace = JsPackage.GLOBAL) public class TableServiceClient { @JsFunction public interface AsOfJoinTablesCallbackFn { @@ -68,8 +68,7 @@ static TableServiceClient.AsOfJoinTablesCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.AsOfJoinTablesCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.AsOfJoinTablesCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -101,8 +100,8 @@ static TableServiceClient.AsOfJoinTablesMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.AsOfJoinTablesMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.AsOfJoinTablesMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -162,8 +161,7 @@ static TableServiceClient.ComboAggregateCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.ComboAggregateCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.ComboAggregateCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -195,8 +193,8 @@ static TableServiceClient.ComboAggregateMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.ComboAggregateMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.ComboAggregateMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -256,8 +254,7 @@ static TableServiceClient.CrossJoinTablesCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.CrossJoinTablesCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.CrossJoinTablesCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -289,8 +286,8 @@ static TableServiceClient.CrossJoinTablesMetadata_or_callbackFn.P0Type create() } void onInvoke( - TableServiceClient.CrossJoinTablesMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.CrossJoinTablesMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -350,7 +347,7 @@ static TableServiceClient.DropColumnsCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.DropColumnsCallbackFn.P0Type p0, ExportedTableCreationResponse p1); + TableServiceClient.DropColumnsCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -382,8 +379,8 @@ static TableServiceClient.DropColumnsMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.DropColumnsMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.DropColumnsMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -443,7 +440,7 @@ static TableServiceClient.EmptyTableCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.EmptyTableCallbackFn.P0Type p0, ExportedTableCreationResponse p1); + TableServiceClient.EmptyTableCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -475,8 +472,8 @@ static TableServiceClient.EmptyTableMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.EmptyTableMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.EmptyTableMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -536,8 +533,7 @@ static TableServiceClient.ExactJoinTablesCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.ExactJoinTablesCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.ExactJoinTablesCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -569,8 +565,8 @@ static TableServiceClient.ExactJoinTablesMetadata_or_callbackFn.P0Type create() } void onInvoke( - TableServiceClient.ExactJoinTablesMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.ExactJoinTablesMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -629,8 +625,7 @@ static TableServiceClient.FilterCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.FilterCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.FilterCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -662,8 +657,8 @@ static TableServiceClient.FilterMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.FilterMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.FilterMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -722,8 +717,7 @@ static TableServiceClient.FlattenCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.FlattenCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.FlattenCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -755,8 +749,8 @@ static TableServiceClient.FlattenMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.FlattenMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.FlattenMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -815,8 +809,7 @@ static TableServiceClient.HeadByCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.HeadByCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.HeadByCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -848,8 +841,8 @@ static TableServiceClient.HeadByMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.HeadByMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.HeadByMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -908,8 +901,7 @@ static TableServiceClient.HeadCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.HeadCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.HeadCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -941,8 +933,7 @@ static TableServiceClient.HeadMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.HeadMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.HeadMetadata_or_callbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1002,7 +993,7 @@ static TableServiceClient.LazyUpdateCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.LazyUpdateCallbackFn.P0Type p0, ExportedTableCreationResponse p1); + TableServiceClient.LazyUpdateCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1034,8 +1025,8 @@ static TableServiceClient.LazyUpdateMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.LazyUpdateMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.LazyUpdateMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1095,8 +1086,7 @@ static TableServiceClient.LeftJoinTablesCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.LeftJoinTablesCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.LeftJoinTablesCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1128,8 +1118,8 @@ static TableServiceClient.LeftJoinTablesMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.LeftJoinTablesMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.LeftJoinTablesMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1189,7 +1179,7 @@ static TableServiceClient.MergeTablesCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.MergeTablesCallbackFn.P0Type p0, ExportedTableCreationResponse p1); + TableServiceClient.MergeTablesCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1221,8 +1211,8 @@ static TableServiceClient.MergeTablesMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.MergeTablesMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.MergeTablesMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1282,8 +1272,7 @@ static TableServiceClient.NaturalJoinTablesCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.NaturalJoinTablesCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.NaturalJoinTablesCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1315,8 +1304,8 @@ static TableServiceClient.NaturalJoinTablesMetadata_or_callbackFn.P0Type create( } void onInvoke( - TableServiceClient.NaturalJoinTablesMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.NaturalJoinTablesMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1376,8 +1365,8 @@ static TableServiceClient.RunChartDownsampleCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.RunChartDownsampleCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.RunChartDownsampleCallbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsFunction @@ -1409,8 +1398,8 @@ static TableServiceClient.RunChartDownsampleMetadata_or_callbackFn.P0Type create } void onInvoke( - TableServiceClient.RunChartDownsampleMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.RunChartDownsampleMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1469,8 +1458,7 @@ static TableServiceClient.SelectCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.SelectCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.SelectCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1502,8 +1490,7 @@ static TableServiceClient.SelectDistinctCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.SelectDistinctCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.SelectDistinctCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1535,8 +1522,8 @@ static TableServiceClient.SelectDistinctMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.SelectDistinctMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.SelectDistinctMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1596,8 +1583,8 @@ static TableServiceClient.SelectMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.SelectMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.SelectMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1657,7 +1644,7 @@ static TableServiceClient.SnapshotCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.SnapshotCallbackFn.P0Type p0, ExportedTableCreationResponse p1); + TableServiceClient.SnapshotCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1689,8 +1676,8 @@ static TableServiceClient.SnapshotMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.SnapshotMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.SnapshotMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1749,8 +1736,7 @@ static TableServiceClient.SortCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.SortCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.SortCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1782,8 +1768,7 @@ static TableServiceClient.SortMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.SortMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.SortMetadata_or_callbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1842,8 +1827,7 @@ static TableServiceClient.TailByCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.TailByCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.TailByCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1875,8 +1859,8 @@ static TableServiceClient.TailByMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.TailByMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.TailByMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -1935,8 +1919,7 @@ static TableServiceClient.TailCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.TailCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.TailCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -1968,8 +1951,7 @@ static TableServiceClient.TailMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.TailMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.TailMetadata_or_callbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -2029,7 +2011,7 @@ static TableServiceClient.TimeTableCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.TimeTableCallbackFn.P0Type p0, ExportedTableCreationResponse p1); + TableServiceClient.TimeTableCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -2061,8 +2043,8 @@ static TableServiceClient.TimeTableMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.TimeTableMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.TimeTableMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -2121,8 +2103,7 @@ static TableServiceClient.UngroupCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.UngroupCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.UngroupCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -2154,8 +2135,8 @@ static TableServiceClient.UngroupMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.UngroupMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.UngroupMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -2215,8 +2196,8 @@ static TableServiceClient.UnstructuredFilterCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.UnstructuredFilterCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.UnstructuredFilterCallbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsFunction @@ -2248,8 +2229,8 @@ static TableServiceClient.UnstructuredFilterMetadata_or_callbackFn.P0Type create } void onInvoke( - TableServiceClient.UnstructuredFilterMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.UnstructuredFilterMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -2308,8 +2289,7 @@ static TableServiceClient.UpdateCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.UpdateCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.UpdateCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -2341,8 +2321,8 @@ static TableServiceClient.UpdateMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.UpdateMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.UpdateMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -2402,7 +2382,7 @@ static TableServiceClient.UpdateViewCallbackFn.P0Type create() { } void onInvoke( - TableServiceClient.UpdateViewCallbackFn.P0Type p0, ExportedTableCreationResponse p1); + TableServiceClient.UpdateViewCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -2434,8 +2414,8 @@ static TableServiceClient.UpdateViewMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.UpdateViewMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.UpdateViewMetadata_or_callbackFn.P0Type p0, + ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -2494,8 +2474,7 @@ static TableServiceClient.ViewCallbackFn.P0Type create() { void setMetadata(BrowserHeaders metadata); } - void onInvoke(TableServiceClient.ViewCallbackFn.P0Type p0, - ExportedTableCreationResponse p1); + void onInvoke(TableServiceClient.ViewCallbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsFunction @@ -2527,8 +2506,7 @@ static TableServiceClient.ViewMetadata_or_callbackFn.P0Type create() { } void onInvoke( - TableServiceClient.ViewMetadata_or_callbackFn.P0Type p0, - ExportedTableCreationResponse p1); + TableServiceClient.ViewMetadata_or_callbackFn.P0Type p0, ExportedTableCreationResponse p1); } @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) @@ -2567,1417 +2545,1417 @@ public TableServiceClient(String serviceHost) {} @JsOverlay public final UnaryResponse asOfJoinTables( - AsOfJoinTablesRequest requestMessage, - TableServiceClient.AsOfJoinTablesMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.AsOfJoinTablesCallbackFn callback) { + AsOfJoinTablesRequest requestMessage, + TableServiceClient.AsOfJoinTablesMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.AsOfJoinTablesCallbackFn callback) { return asOfJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse asOfJoinTables( - AsOfJoinTablesRequest requestMessage, - TableServiceClient.AsOfJoinTablesMetadata_or_callbackFn metadata_or_callback) { + AsOfJoinTablesRequest requestMessage, + TableServiceClient.AsOfJoinTablesMetadata_or_callbackFn metadata_or_callback) { return asOfJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse asOfJoinTables( - AsOfJoinTablesRequest requestMessage, - TableServiceClient.AsOfJoinTablesMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.AsOfJoinTablesCallbackFn callback); + AsOfJoinTablesRequest requestMessage, + TableServiceClient.AsOfJoinTablesMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.AsOfJoinTablesCallbackFn callback); public native UnaryResponse asOfJoinTables( - AsOfJoinTablesRequest requestMessage, - TableServiceClient.AsOfJoinTablesMetadata_or_callbackUnionType metadata_or_callback); + AsOfJoinTablesRequest requestMessage, + TableServiceClient.AsOfJoinTablesMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse asOfJoinTables( - AsOfJoinTablesRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.AsOfJoinTablesCallbackFn callback) { + AsOfJoinTablesRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.AsOfJoinTablesCallbackFn callback) { return asOfJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse asOfJoinTables( - AsOfJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { + AsOfJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { return asOfJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native ResponseStream batch( - BatchTableRequest requestMessage, BrowserHeaders metadata); + BatchTableRequest requestMessage, BrowserHeaders metadata); public native ResponseStream batch( - BatchTableRequest requestMessage); + BatchTableRequest requestMessage); @JsOverlay public final UnaryResponse comboAggregate( - ComboAggregateRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.ComboAggregateCallbackFn callback) { + ComboAggregateRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.ComboAggregateCallbackFn callback) { return comboAggregate( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse comboAggregate( - ComboAggregateRequest requestMessage, BrowserHeaders metadata_or_callback) { + ComboAggregateRequest requestMessage, BrowserHeaders metadata_or_callback) { return comboAggregate( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse comboAggregate( - ComboAggregateRequest requestMessage, - TableServiceClient.ComboAggregateMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.ComboAggregateCallbackFn callback) { + ComboAggregateRequest requestMessage, + TableServiceClient.ComboAggregateMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.ComboAggregateCallbackFn callback) { return comboAggregate( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse comboAggregate( - ComboAggregateRequest requestMessage, - TableServiceClient.ComboAggregateMetadata_or_callbackFn metadata_or_callback) { + ComboAggregateRequest requestMessage, + TableServiceClient.ComboAggregateMetadata_or_callbackFn metadata_or_callback) { return comboAggregate( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse comboAggregate( - ComboAggregateRequest requestMessage, - TableServiceClient.ComboAggregateMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.ComboAggregateCallbackFn callback); + ComboAggregateRequest requestMessage, + TableServiceClient.ComboAggregateMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.ComboAggregateCallbackFn callback); public native UnaryResponse comboAggregate( - ComboAggregateRequest requestMessage, - TableServiceClient.ComboAggregateMetadata_or_callbackUnionType metadata_or_callback); + ComboAggregateRequest requestMessage, + TableServiceClient.ComboAggregateMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse crossJoinTables( - CrossJoinTablesRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.CrossJoinTablesCallbackFn callback) { + CrossJoinTablesRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.CrossJoinTablesCallbackFn callback) { return crossJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse crossJoinTables( - CrossJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { + CrossJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { return crossJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse crossJoinTables( - CrossJoinTablesRequest requestMessage, - TableServiceClient.CrossJoinTablesMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.CrossJoinTablesCallbackFn callback) { + CrossJoinTablesRequest requestMessage, + TableServiceClient.CrossJoinTablesMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.CrossJoinTablesCallbackFn callback) { return crossJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse crossJoinTables( - CrossJoinTablesRequest requestMessage, - TableServiceClient.CrossJoinTablesMetadata_or_callbackFn metadata_or_callback) { + CrossJoinTablesRequest requestMessage, + TableServiceClient.CrossJoinTablesMetadata_or_callbackFn metadata_or_callback) { return crossJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse crossJoinTables( - CrossJoinTablesRequest requestMessage, - TableServiceClient.CrossJoinTablesMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.CrossJoinTablesCallbackFn callback); + CrossJoinTablesRequest requestMessage, + TableServiceClient.CrossJoinTablesMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.CrossJoinTablesCallbackFn callback); public native UnaryResponse crossJoinTables( - CrossJoinTablesRequest requestMessage, - TableServiceClient.CrossJoinTablesMetadata_or_callbackUnionType metadata_or_callback); + CrossJoinTablesRequest requestMessage, + TableServiceClient.CrossJoinTablesMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse dropColumns( - DropColumnsRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.DropColumnsCallbackFn callback) { + DropColumnsRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.DropColumnsCallbackFn callback) { return dropColumns( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse dropColumns( - DropColumnsRequest requestMessage, BrowserHeaders metadata_or_callback) { + DropColumnsRequest requestMessage, BrowserHeaders metadata_or_callback) { return dropColumns( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse dropColumns( - DropColumnsRequest requestMessage, - TableServiceClient.DropColumnsMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.DropColumnsCallbackFn callback) { + DropColumnsRequest requestMessage, + TableServiceClient.DropColumnsMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.DropColumnsCallbackFn callback) { return dropColumns( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse dropColumns( - DropColumnsRequest requestMessage, - TableServiceClient.DropColumnsMetadata_or_callbackFn metadata_or_callback) { + DropColumnsRequest requestMessage, + TableServiceClient.DropColumnsMetadata_or_callbackFn metadata_or_callback) { return dropColumns( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse dropColumns( - DropColumnsRequest requestMessage, - TableServiceClient.DropColumnsMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.DropColumnsCallbackFn callback); + DropColumnsRequest requestMessage, + TableServiceClient.DropColumnsMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.DropColumnsCallbackFn callback); public native UnaryResponse dropColumns( - DropColumnsRequest requestMessage, - TableServiceClient.DropColumnsMetadata_or_callbackUnionType metadata_or_callback); + DropColumnsRequest requestMessage, + TableServiceClient.DropColumnsMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse emptyTable( - EmptyTableRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.EmptyTableCallbackFn callback) { + EmptyTableRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.EmptyTableCallbackFn callback) { return emptyTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse emptyTable( - EmptyTableRequest requestMessage, BrowserHeaders metadata_or_callback) { + EmptyTableRequest requestMessage, BrowserHeaders metadata_or_callback) { return emptyTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse emptyTable( - EmptyTableRequest requestMessage, - TableServiceClient.EmptyTableMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.EmptyTableCallbackFn callback) { + EmptyTableRequest requestMessage, + TableServiceClient.EmptyTableMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.EmptyTableCallbackFn callback) { return emptyTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse emptyTable( - EmptyTableRequest requestMessage, - TableServiceClient.EmptyTableMetadata_or_callbackFn metadata_or_callback) { + EmptyTableRequest requestMessage, + TableServiceClient.EmptyTableMetadata_or_callbackFn metadata_or_callback) { return emptyTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse emptyTable( - EmptyTableRequest requestMessage, - TableServiceClient.EmptyTableMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.EmptyTableCallbackFn callback); + EmptyTableRequest requestMessage, + TableServiceClient.EmptyTableMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.EmptyTableCallbackFn callback); public native UnaryResponse emptyTable( - EmptyTableRequest requestMessage, - TableServiceClient.EmptyTableMetadata_or_callbackUnionType metadata_or_callback); + EmptyTableRequest requestMessage, + TableServiceClient.EmptyTableMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse exactJoinTables( - ExactJoinTablesRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.ExactJoinTablesCallbackFn callback) { + ExactJoinTablesRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.ExactJoinTablesCallbackFn callback) { return exactJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse exactJoinTables( - ExactJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { + ExactJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { return exactJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse exactJoinTables( - ExactJoinTablesRequest requestMessage, - TableServiceClient.ExactJoinTablesMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.ExactJoinTablesCallbackFn callback) { + ExactJoinTablesRequest requestMessage, + TableServiceClient.ExactJoinTablesMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.ExactJoinTablesCallbackFn callback) { return exactJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse exactJoinTables( - ExactJoinTablesRequest requestMessage, - TableServiceClient.ExactJoinTablesMetadata_or_callbackFn metadata_or_callback) { + ExactJoinTablesRequest requestMessage, + TableServiceClient.ExactJoinTablesMetadata_or_callbackFn metadata_or_callback) { return exactJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse exactJoinTables( - ExactJoinTablesRequest requestMessage, - TableServiceClient.ExactJoinTablesMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.ExactJoinTablesCallbackFn callback); + ExactJoinTablesRequest requestMessage, + TableServiceClient.ExactJoinTablesMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.ExactJoinTablesCallbackFn callback); public native UnaryResponse exactJoinTables( - ExactJoinTablesRequest requestMessage, - TableServiceClient.ExactJoinTablesMetadata_or_callbackUnionType metadata_or_callback); + ExactJoinTablesRequest requestMessage, + TableServiceClient.ExactJoinTablesMetadata_or_callbackUnionType metadata_or_callback); public native ResponseStream exportedTableUpdates( - ExportedTableUpdatesRequest requestMessage, BrowserHeaders metadata); + ExportedTableUpdatesRequest requestMessage, BrowserHeaders metadata); public native ResponseStream exportedTableUpdates( - ExportedTableUpdatesRequest requestMessage); + ExportedTableUpdatesRequest requestMessage); @JsOverlay public final UnaryResponse filter( - FilterTableRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.FilterCallbackFn callback) { + FilterTableRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.FilterCallbackFn callback) { return filter( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse filter( - FilterTableRequest requestMessage, BrowserHeaders metadata_or_callback) { + FilterTableRequest requestMessage, BrowserHeaders metadata_or_callback) { return filter( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse filter( - FilterTableRequest requestMessage, - TableServiceClient.FilterMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.FilterCallbackFn callback) { + FilterTableRequest requestMessage, + TableServiceClient.FilterMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.FilterCallbackFn callback) { return filter( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse filter( - FilterTableRequest requestMessage, - TableServiceClient.FilterMetadata_or_callbackFn metadata_or_callback) { + FilterTableRequest requestMessage, + TableServiceClient.FilterMetadata_or_callbackFn metadata_or_callback) { return filter( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse filter( - FilterTableRequest requestMessage, - TableServiceClient.FilterMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.FilterCallbackFn callback); + FilterTableRequest requestMessage, + TableServiceClient.FilterMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.FilterCallbackFn callback); public native UnaryResponse filter( - FilterTableRequest requestMessage, - TableServiceClient.FilterMetadata_or_callbackUnionType metadata_or_callback); + FilterTableRequest requestMessage, + TableServiceClient.FilterMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse flatten( - FlattenRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.FlattenCallbackFn callback) { + FlattenRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.FlattenCallbackFn callback) { return flatten( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse flatten( - FlattenRequest requestMessage, BrowserHeaders metadata_or_callback) { + FlattenRequest requestMessage, BrowserHeaders metadata_or_callback) { return flatten( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse flatten( - FlattenRequest requestMessage, - TableServiceClient.FlattenMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.FlattenCallbackFn callback) { + FlattenRequest requestMessage, + TableServiceClient.FlattenMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.FlattenCallbackFn callback) { return flatten( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse flatten( - FlattenRequest requestMessage, - TableServiceClient.FlattenMetadata_or_callbackFn metadata_or_callback) { + FlattenRequest requestMessage, + TableServiceClient.FlattenMetadata_or_callbackFn metadata_or_callback) { return flatten( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse flatten( - FlattenRequest requestMessage, - TableServiceClient.FlattenMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.FlattenCallbackFn callback); + FlattenRequest requestMessage, + TableServiceClient.FlattenMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.FlattenCallbackFn callback); public native UnaryResponse flatten( - FlattenRequest requestMessage, - TableServiceClient.FlattenMetadata_or_callbackUnionType metadata_or_callback); + FlattenRequest requestMessage, + TableServiceClient.FlattenMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse head( - HeadOrTailRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.HeadCallbackFn callback) { + HeadOrTailRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.HeadCallbackFn callback) { return head( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse head( - HeadOrTailRequest requestMessage, BrowserHeaders metadata_or_callback) { + HeadOrTailRequest requestMessage, BrowserHeaders metadata_or_callback) { return head( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse head( - HeadOrTailRequest requestMessage, - TableServiceClient.HeadMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.HeadCallbackFn callback) { + HeadOrTailRequest requestMessage, + TableServiceClient.HeadMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.HeadCallbackFn callback) { return head( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse head( - HeadOrTailRequest requestMessage, - TableServiceClient.HeadMetadata_or_callbackFn metadata_or_callback) { + HeadOrTailRequest requestMessage, + TableServiceClient.HeadMetadata_or_callbackFn metadata_or_callback) { return head( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse head( - HeadOrTailRequest requestMessage, - TableServiceClient.HeadMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.HeadCallbackFn callback); + HeadOrTailRequest requestMessage, + TableServiceClient.HeadMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.HeadCallbackFn callback); public native UnaryResponse head( - HeadOrTailRequest requestMessage, - TableServiceClient.HeadMetadata_or_callbackUnionType metadata_or_callback); + HeadOrTailRequest requestMessage, + TableServiceClient.HeadMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse headBy( - HeadOrTailByRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.HeadByCallbackFn callback) { + HeadOrTailByRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.HeadByCallbackFn callback) { return headBy( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse headBy( - HeadOrTailByRequest requestMessage, BrowserHeaders metadata_or_callback) { + HeadOrTailByRequest requestMessage, BrowserHeaders metadata_or_callback) { return headBy( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse headBy( - HeadOrTailByRequest requestMessage, - TableServiceClient.HeadByMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.HeadByCallbackFn callback) { + HeadOrTailByRequest requestMessage, + TableServiceClient.HeadByMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.HeadByCallbackFn callback) { return headBy( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse headBy( - HeadOrTailByRequest requestMessage, - TableServiceClient.HeadByMetadata_or_callbackFn metadata_or_callback) { + HeadOrTailByRequest requestMessage, + TableServiceClient.HeadByMetadata_or_callbackFn metadata_or_callback) { return headBy( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse headBy( - HeadOrTailByRequest requestMessage, - TableServiceClient.HeadByMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.HeadByCallbackFn callback); + HeadOrTailByRequest requestMessage, + TableServiceClient.HeadByMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.HeadByCallbackFn callback); public native UnaryResponse headBy( - HeadOrTailByRequest requestMessage, - TableServiceClient.HeadByMetadata_or_callbackUnionType metadata_or_callback); + HeadOrTailByRequest requestMessage, + TableServiceClient.HeadByMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse lazyUpdate( - SelectOrUpdateRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.LazyUpdateCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.LazyUpdateCallbackFn callback) { return lazyUpdate( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse lazyUpdate( - SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { + SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { return lazyUpdate( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse lazyUpdate( - SelectOrUpdateRequest requestMessage, - TableServiceClient.LazyUpdateMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.LazyUpdateCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.LazyUpdateMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.LazyUpdateCallbackFn callback) { return lazyUpdate( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse lazyUpdate( - SelectOrUpdateRequest requestMessage, - TableServiceClient.LazyUpdateMetadata_or_callbackFn metadata_or_callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.LazyUpdateMetadata_or_callbackFn metadata_or_callback) { return lazyUpdate( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse lazyUpdate( - SelectOrUpdateRequest requestMessage, - TableServiceClient.LazyUpdateMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.LazyUpdateCallbackFn callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.LazyUpdateMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.LazyUpdateCallbackFn callback); public native UnaryResponse lazyUpdate( - SelectOrUpdateRequest requestMessage, - TableServiceClient.LazyUpdateMetadata_or_callbackUnionType metadata_or_callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.LazyUpdateMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse leftJoinTables( - LeftJoinTablesRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.LeftJoinTablesCallbackFn callback) { + LeftJoinTablesRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.LeftJoinTablesCallbackFn callback) { return leftJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse leftJoinTables( - LeftJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { + LeftJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { return leftJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse leftJoinTables( - LeftJoinTablesRequest requestMessage, - TableServiceClient.LeftJoinTablesMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.LeftJoinTablesCallbackFn callback) { + LeftJoinTablesRequest requestMessage, + TableServiceClient.LeftJoinTablesMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.LeftJoinTablesCallbackFn callback) { return leftJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse leftJoinTables( - LeftJoinTablesRequest requestMessage, - TableServiceClient.LeftJoinTablesMetadata_or_callbackFn metadata_or_callback) { + LeftJoinTablesRequest requestMessage, + TableServiceClient.LeftJoinTablesMetadata_or_callbackFn metadata_or_callback) { return leftJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse leftJoinTables( - LeftJoinTablesRequest requestMessage, - TableServiceClient.LeftJoinTablesMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.LeftJoinTablesCallbackFn callback); + LeftJoinTablesRequest requestMessage, + TableServiceClient.LeftJoinTablesMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.LeftJoinTablesCallbackFn callback); public native UnaryResponse leftJoinTables( - LeftJoinTablesRequest requestMessage, - TableServiceClient.LeftJoinTablesMetadata_or_callbackUnionType metadata_or_callback); + LeftJoinTablesRequest requestMessage, + TableServiceClient.LeftJoinTablesMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse mergeTables( - MergeTablesRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.MergeTablesCallbackFn callback) { + MergeTablesRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.MergeTablesCallbackFn callback) { return mergeTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse mergeTables( - MergeTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { + MergeTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { return mergeTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse mergeTables( - MergeTablesRequest requestMessage, - TableServiceClient.MergeTablesMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.MergeTablesCallbackFn callback) { + MergeTablesRequest requestMessage, + TableServiceClient.MergeTablesMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.MergeTablesCallbackFn callback) { return mergeTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse mergeTables( - MergeTablesRequest requestMessage, - TableServiceClient.MergeTablesMetadata_or_callbackFn metadata_or_callback) { + MergeTablesRequest requestMessage, + TableServiceClient.MergeTablesMetadata_or_callbackFn metadata_or_callback) { return mergeTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse mergeTables( - MergeTablesRequest requestMessage, - TableServiceClient.MergeTablesMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.MergeTablesCallbackFn callback); + MergeTablesRequest requestMessage, + TableServiceClient.MergeTablesMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.MergeTablesCallbackFn callback); public native UnaryResponse mergeTables( - MergeTablesRequest requestMessage, - TableServiceClient.MergeTablesMetadata_or_callbackUnionType metadata_or_callback); + MergeTablesRequest requestMessage, + TableServiceClient.MergeTablesMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse naturalJoinTables( - NaturalJoinTablesRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.NaturalJoinTablesCallbackFn callback) { + NaturalJoinTablesRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.NaturalJoinTablesCallbackFn callback) { return naturalJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse naturalJoinTables( - NaturalJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { + NaturalJoinTablesRequest requestMessage, BrowserHeaders metadata_or_callback) { return naturalJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse naturalJoinTables( - NaturalJoinTablesRequest requestMessage, - TableServiceClient.NaturalJoinTablesMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.NaturalJoinTablesCallbackFn callback) { + NaturalJoinTablesRequest requestMessage, + TableServiceClient.NaturalJoinTablesMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.NaturalJoinTablesCallbackFn callback) { return naturalJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse naturalJoinTables( - NaturalJoinTablesRequest requestMessage, - TableServiceClient.NaturalJoinTablesMetadata_or_callbackFn metadata_or_callback) { + NaturalJoinTablesRequest requestMessage, + TableServiceClient.NaturalJoinTablesMetadata_or_callbackFn metadata_or_callback) { return naturalJoinTables( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse naturalJoinTables( - NaturalJoinTablesRequest requestMessage, - TableServiceClient.NaturalJoinTablesMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.NaturalJoinTablesCallbackFn callback); + NaturalJoinTablesRequest requestMessage, + TableServiceClient.NaturalJoinTablesMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.NaturalJoinTablesCallbackFn callback); public native UnaryResponse naturalJoinTables( - NaturalJoinTablesRequest requestMessage, - TableServiceClient.NaturalJoinTablesMetadata_or_callbackUnionType metadata_or_callback); + NaturalJoinTablesRequest requestMessage, + TableServiceClient.NaturalJoinTablesMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse runChartDownsample( - RunChartDownsampleRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.RunChartDownsampleCallbackFn callback) { + RunChartDownsampleRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.RunChartDownsampleCallbackFn callback) { return runChartDownsample( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse runChartDownsample( - RunChartDownsampleRequest requestMessage, BrowserHeaders metadata_or_callback) { + RunChartDownsampleRequest requestMessage, BrowserHeaders metadata_or_callback) { return runChartDownsample( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse runChartDownsample( - RunChartDownsampleRequest requestMessage, - TableServiceClient.RunChartDownsampleMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.RunChartDownsampleCallbackFn callback) { + RunChartDownsampleRequest requestMessage, + TableServiceClient.RunChartDownsampleMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.RunChartDownsampleCallbackFn callback) { return runChartDownsample( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse runChartDownsample( - RunChartDownsampleRequest requestMessage, - TableServiceClient.RunChartDownsampleMetadata_or_callbackFn metadata_or_callback) { + RunChartDownsampleRequest requestMessage, + TableServiceClient.RunChartDownsampleMetadata_or_callbackFn metadata_or_callback) { return runChartDownsample( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse runChartDownsample( - RunChartDownsampleRequest requestMessage, - TableServiceClient.RunChartDownsampleMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.RunChartDownsampleCallbackFn callback); + RunChartDownsampleRequest requestMessage, + TableServiceClient.RunChartDownsampleMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.RunChartDownsampleCallbackFn callback); public native UnaryResponse runChartDownsample( - RunChartDownsampleRequest requestMessage, - TableServiceClient.RunChartDownsampleMetadata_or_callbackUnionType metadata_or_callback); + RunChartDownsampleRequest requestMessage, + TableServiceClient.RunChartDownsampleMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse select( - SelectOrUpdateRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.SelectCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.SelectCallbackFn callback) { return select( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse select( - SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { + SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { return select( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse select( - SelectOrUpdateRequest requestMessage, - TableServiceClient.SelectMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.SelectCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.SelectMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.SelectCallbackFn callback) { return select( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse select( - SelectOrUpdateRequest requestMessage, - TableServiceClient.SelectMetadata_or_callbackFn metadata_or_callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.SelectMetadata_or_callbackFn metadata_or_callback) { return select( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse select( - SelectOrUpdateRequest requestMessage, - TableServiceClient.SelectMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.SelectCallbackFn callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.SelectMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.SelectCallbackFn callback); public native UnaryResponse select( - SelectOrUpdateRequest requestMessage, - TableServiceClient.SelectMetadata_or_callbackUnionType metadata_or_callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.SelectMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse selectDistinct( - SelectDistinctRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.SelectDistinctCallbackFn callback) { + SelectDistinctRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.SelectDistinctCallbackFn callback) { return selectDistinct( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse selectDistinct( - SelectDistinctRequest requestMessage, BrowserHeaders metadata_or_callback) { + SelectDistinctRequest requestMessage, BrowserHeaders metadata_or_callback) { return selectDistinct( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse selectDistinct( - SelectDistinctRequest requestMessage, - TableServiceClient.SelectDistinctMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.SelectDistinctCallbackFn callback) { + SelectDistinctRequest requestMessage, + TableServiceClient.SelectDistinctMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.SelectDistinctCallbackFn callback) { return selectDistinct( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse selectDistinct( - SelectDistinctRequest requestMessage, - TableServiceClient.SelectDistinctMetadata_or_callbackFn metadata_or_callback) { + SelectDistinctRequest requestMessage, + TableServiceClient.SelectDistinctMetadata_or_callbackFn metadata_or_callback) { return selectDistinct( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse selectDistinct( - SelectDistinctRequest requestMessage, - TableServiceClient.SelectDistinctMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.SelectDistinctCallbackFn callback); + SelectDistinctRequest requestMessage, + TableServiceClient.SelectDistinctMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.SelectDistinctCallbackFn callback); public native UnaryResponse selectDistinct( - SelectDistinctRequest requestMessage, - TableServiceClient.SelectDistinctMetadata_or_callbackUnionType metadata_or_callback); + SelectDistinctRequest requestMessage, + TableServiceClient.SelectDistinctMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse snapshot( - SnapshotTableRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.SnapshotCallbackFn callback) { + SnapshotTableRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.SnapshotCallbackFn callback) { return snapshot( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse snapshot( - SnapshotTableRequest requestMessage, BrowserHeaders metadata_or_callback) { + SnapshotTableRequest requestMessage, BrowserHeaders metadata_or_callback) { return snapshot( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse snapshot( - SnapshotTableRequest requestMessage, - TableServiceClient.SnapshotMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.SnapshotCallbackFn callback) { + SnapshotTableRequest requestMessage, + TableServiceClient.SnapshotMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.SnapshotCallbackFn callback) { return snapshot( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse snapshot( - SnapshotTableRequest requestMessage, - TableServiceClient.SnapshotMetadata_or_callbackFn metadata_or_callback) { + SnapshotTableRequest requestMessage, + TableServiceClient.SnapshotMetadata_or_callbackFn metadata_or_callback) { return snapshot( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse snapshot( - SnapshotTableRequest requestMessage, - TableServiceClient.SnapshotMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.SnapshotCallbackFn callback); + SnapshotTableRequest requestMessage, + TableServiceClient.SnapshotMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.SnapshotCallbackFn callback); public native UnaryResponse snapshot( - SnapshotTableRequest requestMessage, - TableServiceClient.SnapshotMetadata_or_callbackUnionType metadata_or_callback); + SnapshotTableRequest requestMessage, + TableServiceClient.SnapshotMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse sort( - SortTableRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.SortCallbackFn callback) { + SortTableRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.SortCallbackFn callback) { return sort( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse sort( - SortTableRequest requestMessage, BrowserHeaders metadata_or_callback) { + SortTableRequest requestMessage, BrowserHeaders metadata_or_callback) { return sort( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse sort( - SortTableRequest requestMessage, - TableServiceClient.SortMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.SortCallbackFn callback) { + SortTableRequest requestMessage, + TableServiceClient.SortMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.SortCallbackFn callback) { return sort( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse sort( - SortTableRequest requestMessage, - TableServiceClient.SortMetadata_or_callbackFn metadata_or_callback) { + SortTableRequest requestMessage, + TableServiceClient.SortMetadata_or_callbackFn metadata_or_callback) { return sort( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse sort( - SortTableRequest requestMessage, - TableServiceClient.SortMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.SortCallbackFn callback); + SortTableRequest requestMessage, + TableServiceClient.SortMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.SortCallbackFn callback); public native UnaryResponse sort( - SortTableRequest requestMessage, - TableServiceClient.SortMetadata_or_callbackUnionType metadata_or_callback); + SortTableRequest requestMessage, + TableServiceClient.SortMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse tail( - HeadOrTailRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.TailCallbackFn callback) { + HeadOrTailRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.TailCallbackFn callback) { return tail( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse tail( - HeadOrTailRequest requestMessage, BrowserHeaders metadata_or_callback) { + HeadOrTailRequest requestMessage, BrowserHeaders metadata_or_callback) { return tail( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse tail( - HeadOrTailRequest requestMessage, - TableServiceClient.TailMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.TailCallbackFn callback) { + HeadOrTailRequest requestMessage, + TableServiceClient.TailMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.TailCallbackFn callback) { return tail( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse tail( - HeadOrTailRequest requestMessage, - TableServiceClient.TailMetadata_or_callbackFn metadata_or_callback) { + HeadOrTailRequest requestMessage, + TableServiceClient.TailMetadata_or_callbackFn metadata_or_callback) { return tail( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse tail( - HeadOrTailRequest requestMessage, - TableServiceClient.TailMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.TailCallbackFn callback); + HeadOrTailRequest requestMessage, + TableServiceClient.TailMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.TailCallbackFn callback); public native UnaryResponse tail( - HeadOrTailRequest requestMessage, - TableServiceClient.TailMetadata_or_callbackUnionType metadata_or_callback); + HeadOrTailRequest requestMessage, + TableServiceClient.TailMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse tailBy( - HeadOrTailByRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.TailByCallbackFn callback) { + HeadOrTailByRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.TailByCallbackFn callback) { return tailBy( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse tailBy( - HeadOrTailByRequest requestMessage, BrowserHeaders metadata_or_callback) { + HeadOrTailByRequest requestMessage, BrowserHeaders metadata_or_callback) { return tailBy( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse tailBy( - HeadOrTailByRequest requestMessage, - TableServiceClient.TailByMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.TailByCallbackFn callback) { + HeadOrTailByRequest requestMessage, + TableServiceClient.TailByMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.TailByCallbackFn callback) { return tailBy( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse tailBy( - HeadOrTailByRequest requestMessage, - TableServiceClient.TailByMetadata_or_callbackFn metadata_or_callback) { + HeadOrTailByRequest requestMessage, + TableServiceClient.TailByMetadata_or_callbackFn metadata_or_callback) { return tailBy( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse tailBy( - HeadOrTailByRequest requestMessage, - TableServiceClient.TailByMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.TailByCallbackFn callback); + HeadOrTailByRequest requestMessage, + TableServiceClient.TailByMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.TailByCallbackFn callback); public native UnaryResponse tailBy( - HeadOrTailByRequest requestMessage, - TableServiceClient.TailByMetadata_or_callbackUnionType metadata_or_callback); + HeadOrTailByRequest requestMessage, + TableServiceClient.TailByMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse timeTable( - TimeTableRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.TimeTableCallbackFn callback) { + TimeTableRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.TimeTableCallbackFn callback) { return timeTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse timeTable( - TimeTableRequest requestMessage, BrowserHeaders metadata_or_callback) { + TimeTableRequest requestMessage, BrowserHeaders metadata_or_callback) { return timeTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse timeTable( - TimeTableRequest requestMessage, - TableServiceClient.TimeTableMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.TimeTableCallbackFn callback) { + TimeTableRequest requestMessage, + TableServiceClient.TimeTableMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.TimeTableCallbackFn callback) { return timeTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse timeTable( - TimeTableRequest requestMessage, - TableServiceClient.TimeTableMetadata_or_callbackFn metadata_or_callback) { + TimeTableRequest requestMessage, + TableServiceClient.TimeTableMetadata_or_callbackFn metadata_or_callback) { return timeTable( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse timeTable( - TimeTableRequest requestMessage, - TableServiceClient.TimeTableMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.TimeTableCallbackFn callback); + TimeTableRequest requestMessage, + TableServiceClient.TimeTableMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.TimeTableCallbackFn callback); public native UnaryResponse timeTable( - TimeTableRequest requestMessage, - TableServiceClient.TimeTableMetadata_or_callbackUnionType metadata_or_callback); + TimeTableRequest requestMessage, + TableServiceClient.TimeTableMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse ungroup( - UngroupRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.UngroupCallbackFn callback) { + UngroupRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.UngroupCallbackFn callback) { return ungroup( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse ungroup( - UngroupRequest requestMessage, BrowserHeaders metadata_or_callback) { + UngroupRequest requestMessage, BrowserHeaders metadata_or_callback) { return ungroup( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse ungroup( - UngroupRequest requestMessage, - TableServiceClient.UngroupMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.UngroupCallbackFn callback) { + UngroupRequest requestMessage, + TableServiceClient.UngroupMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.UngroupCallbackFn callback) { return ungroup( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse ungroup( - UngroupRequest requestMessage, - TableServiceClient.UngroupMetadata_or_callbackFn metadata_or_callback) { + UngroupRequest requestMessage, + TableServiceClient.UngroupMetadata_or_callbackFn metadata_or_callback) { return ungroup( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse ungroup( - UngroupRequest requestMessage, - TableServiceClient.UngroupMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.UngroupCallbackFn callback); + UngroupRequest requestMessage, + TableServiceClient.UngroupMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.UngroupCallbackFn callback); public native UnaryResponse ungroup( - UngroupRequest requestMessage, - TableServiceClient.UngroupMetadata_or_callbackUnionType metadata_or_callback); + UngroupRequest requestMessage, + TableServiceClient.UngroupMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse unstructuredFilter( - UnstructuredFilterTableRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.UnstructuredFilterCallbackFn callback) { + UnstructuredFilterTableRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.UnstructuredFilterCallbackFn callback) { return unstructuredFilter( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse unstructuredFilter( - UnstructuredFilterTableRequest requestMessage, BrowserHeaders metadata_or_callback) { + UnstructuredFilterTableRequest requestMessage, BrowserHeaders metadata_or_callback) { return unstructuredFilter( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse unstructuredFilter( - UnstructuredFilterTableRequest requestMessage, - TableServiceClient.UnstructuredFilterMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.UnstructuredFilterCallbackFn callback) { + UnstructuredFilterTableRequest requestMessage, + TableServiceClient.UnstructuredFilterMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.UnstructuredFilterCallbackFn callback) { return unstructuredFilter( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse unstructuredFilter( - UnstructuredFilterTableRequest requestMessage, - TableServiceClient.UnstructuredFilterMetadata_or_callbackFn metadata_or_callback) { + UnstructuredFilterTableRequest requestMessage, + TableServiceClient.UnstructuredFilterMetadata_or_callbackFn metadata_or_callback) { return unstructuredFilter( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse unstructuredFilter( - UnstructuredFilterTableRequest requestMessage, - TableServiceClient.UnstructuredFilterMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.UnstructuredFilterCallbackFn callback); + UnstructuredFilterTableRequest requestMessage, + TableServiceClient.UnstructuredFilterMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.UnstructuredFilterCallbackFn callback); public native UnaryResponse unstructuredFilter( - UnstructuredFilterTableRequest requestMessage, - TableServiceClient.UnstructuredFilterMetadata_or_callbackUnionType metadata_or_callback); + UnstructuredFilterTableRequest requestMessage, + TableServiceClient.UnstructuredFilterMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse update( - SelectOrUpdateRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.UpdateCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.UpdateCallbackFn callback) { return update( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse update( - SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { + SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { return update( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse update( - SelectOrUpdateRequest requestMessage, - TableServiceClient.UpdateMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.UpdateCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.UpdateMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.UpdateCallbackFn callback) { return update( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse update( - SelectOrUpdateRequest requestMessage, - TableServiceClient.UpdateMetadata_or_callbackFn metadata_or_callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.UpdateMetadata_or_callbackFn metadata_or_callback) { return update( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse update( - SelectOrUpdateRequest requestMessage, - TableServiceClient.UpdateMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.UpdateCallbackFn callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.UpdateMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.UpdateCallbackFn callback); public native UnaryResponse update( - SelectOrUpdateRequest requestMessage, - TableServiceClient.UpdateMetadata_or_callbackUnionType metadata_or_callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.UpdateMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse updateView( - SelectOrUpdateRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.UpdateViewCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.UpdateViewCallbackFn callback) { return updateView( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse updateView( - SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { + SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { return updateView( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse updateView( - SelectOrUpdateRequest requestMessage, - TableServiceClient.UpdateViewMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.UpdateViewCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.UpdateViewMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.UpdateViewCallbackFn callback) { return updateView( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse updateView( - SelectOrUpdateRequest requestMessage, - TableServiceClient.UpdateViewMetadata_or_callbackFn metadata_or_callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.UpdateViewMetadata_or_callbackFn metadata_or_callback) { return updateView( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse updateView( - SelectOrUpdateRequest requestMessage, - TableServiceClient.UpdateViewMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.UpdateViewCallbackFn callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.UpdateViewMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.UpdateViewCallbackFn callback); public native UnaryResponse updateView( - SelectOrUpdateRequest requestMessage, - TableServiceClient.UpdateViewMetadata_or_callbackUnionType metadata_or_callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.UpdateViewMetadata_or_callbackUnionType metadata_or_callback); @JsOverlay public final UnaryResponse view( - SelectOrUpdateRequest requestMessage, - BrowserHeaders metadata_or_callback, - TableServiceClient.ViewCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + BrowserHeaders metadata_or_callback, + TableServiceClient.ViewCallbackFn callback) { return view( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse view( - SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { + SelectOrUpdateRequest requestMessage, BrowserHeaders metadata_or_callback) { return view( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } @JsOverlay public final UnaryResponse view( - SelectOrUpdateRequest requestMessage, - TableServiceClient.ViewMetadata_or_callbackFn metadata_or_callback, - TableServiceClient.ViewCallbackFn callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.ViewMetadata_or_callbackFn metadata_or_callback, + TableServiceClient.ViewCallbackFn callback) { return view( - requestMessage, - Js.uncheckedCast( - metadata_or_callback), - callback); + requestMessage, + Js.uncheckedCast( + metadata_or_callback), + callback); } @JsOverlay public final UnaryResponse view( - SelectOrUpdateRequest requestMessage, - TableServiceClient.ViewMetadata_or_callbackFn metadata_or_callback) { + SelectOrUpdateRequest requestMessage, + TableServiceClient.ViewMetadata_or_callbackFn metadata_or_callback) { return view( - requestMessage, - Js.uncheckedCast( - metadata_or_callback)); + requestMessage, + Js.uncheckedCast( + metadata_or_callback)); } public native UnaryResponse view( - SelectOrUpdateRequest requestMessage, - TableServiceClient.ViewMetadata_or_callbackUnionType metadata_or_callback, - TableServiceClient.ViewCallbackFn callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.ViewMetadata_or_callbackUnionType metadata_or_callback, + TableServiceClient.ViewCallbackFn callback); public native UnaryResponse view( - SelectOrUpdateRequest requestMessage, - TableServiceClient.ViewMetadata_or_callbackUnionType metadata_or_callback); + SelectOrUpdateRequest requestMessage, + TableServiceClient.ViewMetadata_or_callbackUnionType metadata_or_callback); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/UnaryResponse.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/UnaryResponse.java index 9e37fa2c3f2..6ed9b1df996 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/UnaryResponse.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/table_pb_service/UnaryResponse.java @@ -4,9 +4,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.table_pb_service.UnaryResponse", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.table_pb_service.UnaryResponse", + namespace = JsPackage.GLOBAL) public interface UnaryResponse { void cancel(); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/ticket_pb/Ticket.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/ticket_pb/Ticket.java index 7efd706117d..29ac4fc450c 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/ticket_pb/Ticket.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/io/deephaven/proto/ticket_pb/Ticket.java @@ -9,9 +9,9 @@ import jsinterop.base.JsPropertyMap; @JsType( - isNative = true, - name = "dhinternal.io.deephaven.proto.ticket_pb.Ticket", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.io.deephaven.proto.ticket_pb.Ticket", + namespace = JsPackage.GLOBAL) public class Ticket { @JsType(isNative = true, name = "?", namespace = JsPackage.GLOBAL) public interface GetTicketUnionType { diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryDecoder.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryDecoder.java index 952393d5bd9..d1fb77eb2be 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryDecoder.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryDecoder.java @@ -158,7 +158,7 @@ default boolean isUint8Array() { public static native BinaryDecoder alloc(); public static native BinaryDecoder alloc( - BinaryDecoder.AllocBytesUnionType bytes, double start, double length); + BinaryDecoder.AllocBytesUnionType bytes, double start, double length); public static native BinaryDecoder alloc(BinaryDecoder.AllocBytesUnionType bytes, double start); @@ -248,7 +248,7 @@ public BinaryDecoder(ArrayBuffer bytes, double start) {} public BinaryDecoder(ArrayBuffer bytes) {} public BinaryDecoder( - BinaryDecoder.ConstructorBytesUnionType bytes, double start, double length) {} + BinaryDecoder.ConstructorBytesUnionType bytes, double start, double length) {} public BinaryDecoder(BinaryDecoder.ConstructorBytesUnionType bytes, double start) {} @@ -392,7 +392,7 @@ public final void setBlock(JsArray data) { } public native void setBlock( - BinaryDecoder.SetBlockDataUnionType data, double start, double length); + BinaryDecoder.SetBlockDataUnionType data, double start, double length); public native void setBlock(BinaryDecoder.SetBlockDataUnionType data, double start); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryIterator.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryIterator.java index 5cf32a949cd..2412f27a141 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryIterator.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryIterator.java @@ -251,36 +251,35 @@ default boolean isString() { @JsOverlay public static final BinaryIterator alloc( - BinaryDecoder decoder, - BinaryIterator.AllocNextFn next, - BinaryIterator.AllocElementsArrayUnionType[] elements) { + BinaryDecoder decoder, + BinaryIterator.AllocNextFn next, + BinaryIterator.AllocElementsArrayUnionType[] elements) { return alloc( - decoder, - next, - Js.>uncheckedCast(elements)); + decoder, + next, + Js.>uncheckedCast(elements)); } public static native BinaryIterator alloc( - BinaryDecoder decoder, - BinaryIterator.AllocNextFn next, - JsArray elements); + BinaryDecoder decoder, + BinaryIterator.AllocNextFn next, + JsArray elements); - public static native BinaryIterator alloc(BinaryDecoder decoder, - BinaryIterator.AllocNextFn next); + public static native BinaryIterator alloc(BinaryDecoder decoder, BinaryIterator.AllocNextFn next); public static native BinaryIterator alloc(BinaryDecoder decoder); public BinaryIterator() {} public BinaryIterator( - BinaryDecoder decoder, - BinaryIterator.BinaryIteratorNextFn next, - BinaryIterator.ConstructorElementsArrayUnionType[] elements) {} + BinaryDecoder decoder, + BinaryIterator.BinaryIteratorNextFn next, + BinaryIterator.ConstructorElementsArrayUnionType[] elements) {} public BinaryIterator( - BinaryDecoder decoder, - BinaryIterator.BinaryIteratorNextFn next, - JsArray elements) {} + BinaryDecoder decoder, + BinaryIterator.BinaryIteratorNextFn next, + JsArray elements) {} public BinaryIterator(BinaryDecoder decoder, BinaryIterator.BinaryIteratorNextFn next) {} diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryReader.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryReader.java index 6fe5d64fe71..260f4b703d1 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryReader.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryReader.java @@ -239,11 +239,11 @@ default boolean isUint8Array() { @JsMethod(name = "alloc") public static native BinaryReader alloc_STATIC( - BinaryReader.AllocBytesUnionType bytes, double start, double length); + BinaryReader.AllocBytesUnionType bytes, double start, double length); @JsMethod(name = "alloc") public static native BinaryReader alloc_STATIC( - BinaryReader.AllocBytesUnionType bytes, double start); + BinaryReader.AllocBytesUnionType bytes, double start); @JsMethod(name = "alloc") public static native BinaryReader alloc_STATIC(BinaryReader.AllocBytesUnionType bytes); @@ -251,8 +251,7 @@ public static native BinaryReader alloc_STATIC( @JsOverlay @JsMethod(name = "alloc") public static final BinaryReader alloc_STATIC(ArrayBuffer bytes, double start, double length) { - return alloc_STATIC(Js.uncheckedCast(bytes), start, - length); + return alloc_STATIC(Js.uncheckedCast(bytes), start, length); } @JsOverlay @@ -270,9 +269,8 @@ public static final BinaryReader alloc_STATIC(ArrayBuffer bytes) { @JsOverlay @JsMethod(name = "alloc") public static final BinaryReader alloc_STATIC( - JsArray bytes, double start, double length) { - return alloc_STATIC(Js.uncheckedCast(bytes), start, - length); + JsArray bytes, double start, double length) { + return alloc_STATIC(Js.uncheckedCast(bytes), start, length); } @JsOverlay @@ -290,8 +288,7 @@ public static final BinaryReader alloc_STATIC(JsArray bytes) { @JsOverlay @JsMethod(name = "alloc") public static final BinaryReader alloc_STATIC(String bytes, double start, double length) { - return alloc_STATIC(Js.uncheckedCast(bytes), start, - length); + return alloc_STATIC(Js.uncheckedCast(bytes), start, length); } @JsOverlay @@ -309,8 +306,7 @@ public static final BinaryReader alloc_STATIC(String bytes) { @JsOverlay @JsMethod(name = "alloc") public static final BinaryReader alloc_STATIC(Uint8Array bytes, double start, double length) { - return alloc_STATIC(Js.uncheckedCast(bytes), start, - length); + return alloc_STATIC(Js.uncheckedCast(bytes), start, length); } @JsOverlay @@ -350,8 +346,7 @@ public BinaryReader(ArrayBuffer bytes, double start) {} public BinaryReader(ArrayBuffer bytes) {} - public BinaryReader(BinaryReader.ConstructorBytesUnionType bytes, double start, - double length) {} + public BinaryReader(BinaryReader.ConstructorBytesUnionType bytes, double start, double length) {} public BinaryReader(BinaryReader.ConstructorBytesUnionType bytes, double start) {} @@ -386,7 +381,7 @@ public BinaryReader(double[] bytes) {} public native BinaryReader alloc(); public native BinaryReader alloc( - BinaryReader.AllocBytesUnionType bytes, double start, double length); + BinaryReader.AllocBytesUnionType bytes, double start, double length); public native BinaryReader alloc(BinaryReader.AllocBytesUnionType bytes, double start); @@ -508,7 +503,7 @@ public final BinaryReader alloc(double[] bytes) { public native double readFloat(); public native void readGroup( - double field, Message message, BinaryReader.ReadGroupReaderFn reader); + double field, Message message, BinaryReader.ReadGroupReaderFn reader); public native double readInt32(); @@ -591,7 +586,7 @@ public native void readGroup( public native String readVarintHash64(); public native void registerReadCallback( - String callbackName, BinaryReader.RegisterReadCallbackCallbackFn callback); + String callbackName, BinaryReader.RegisterReadCallbackCallbackFn callback); public native void reset(); @@ -630,7 +625,7 @@ public final void setBlock(JsArray bytes) { } public native void setBlock( - BinaryReader.SetBlockBytesUnionType bytes, double start, double length); + BinaryReader.SetBlockBytesUnionType bytes, double start, double length); public native void setBlock(BinaryReader.SetBlockBytesUnionType bytes, double start); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryWriter.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryWriter.java index 035f7ee9b7c..4ae6db2480b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryWriter.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/BinaryWriter.java @@ -231,7 +231,7 @@ public final void writeAny(int fieldType, double field, Uint8Array value) { } public native void writeAny( - int fieldType, double field, BinaryWriter.WriteAnyValueUnionType value); + int fieldType, double field, BinaryWriter.WriteAnyValueUnionType value); @JsOverlay public final void writeAny(int fieldType, double field, boolean value) { @@ -305,7 +305,7 @@ public final void writeBytes(double field, double[] value) { public native void writeFloat(double field); public native void writeGroup( - double field, Object value, BinaryWriter.WriteGroupWriteCallbackFn writeCallback); + double field, Object value, BinaryWriter.WriteGroupWriteCallbackFn writeCallback); public native void writeInt32(double field, double value); @@ -540,14 +540,14 @@ public final void writeRepeatedBool(double field, boolean[] value) { public native void writeRepeatedBool(double field); public native void writeRepeatedBytes( - double field, JsArray value); + double field, JsArray value); @JsOverlay public final void writeRepeatedBytes( - double field, BinaryWriter.WriteRepeatedBytesValueArrayUnionType[] value) { + double field, BinaryWriter.WriteRepeatedBytesValueArrayUnionType[] value) { writeRepeatedBytes( - field, - Js.>uncheckedCast(value)); + field, + Js.>uncheckedCast(value)); } public native void writeRepeatedBytes(double field); @@ -616,15 +616,15 @@ public final void writeRepeatedFloat(double field, double[] value) { public native void writeRepeatedFloat(double field); public native void writeRepeatedGroup( - double field, - JsArray value, - BinaryWriter.WriteRepeatedGroupWriterCallbackFn writerCallback); + double field, + JsArray value, + BinaryWriter.WriteRepeatedGroupWriterCallbackFn writerCallback); @JsOverlay public final void writeRepeatedGroup( - double field, - Message[] value, - BinaryWriter.WriteRepeatedGroupWriterCallbackFn writerCallback) { + double field, + Message[] value, + BinaryWriter.WriteRepeatedGroupWriterCallbackFn writerCallback) { writeRepeatedGroup(field, Js.>uncheckedCast(value), writerCallback); } @@ -665,15 +665,15 @@ public final void writeRepeatedInt64String(double field, String[] value) { public native void writeRepeatedInt64String(double field); public native void writeRepeatedMessage( - double field, - JsArray value, - BinaryWriter.WriteRepeatedMessageWriterCallbackFn writerCallback); + double field, + JsArray value, + BinaryWriter.WriteRepeatedMessageWriterCallbackFn writerCallback); @JsOverlay public final void writeRepeatedMessage( - double field, - Message[] value, - BinaryWriter.WriteRepeatedMessageWriterCallbackFn writerCallback) { + double field, + Message[] value, + BinaryWriter.WriteRepeatedMessageWriterCallbackFn writerCallback) { writeRepeatedMessage(field, Js.>uncheckedCast(value), writerCallback); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/ExtensionFieldBinaryInfo.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/ExtensionFieldBinaryInfo.java index 0ed2316f714..3b64431f8f4 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/ExtensionFieldBinaryInfo.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/ExtensionFieldBinaryInfo.java @@ -5,9 +5,9 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.jspb.ExtensionFieldBinaryInfo", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.jspb.ExtensionFieldBinaryInfo", + namespace = JsPackage.GLOBAL) public class ExtensionFieldBinaryInfo { @JsFunction public interface BinaryReaderFn { @@ -47,7 +47,7 @@ public interface P1Fn { } void onInvoke( - Object p0, ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryReaderFn.P1Fn p1); + Object p0, ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryReaderFn.P1Fn p1); } @JsFunction @@ -58,9 +58,9 @@ public interface P2Fn { } void onInvoke( - double p0, - Object p1, - ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryWriterFn.P2Fn p2); + double p0, + Object p1, + ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryWriterFn.P2Fn p2); } @JsFunction @@ -81,10 +81,10 @@ public interface Opt_binaryMessageSerializeFn { public boolean opt_isPacked; public ExtensionFieldBinaryInfo( - ExtensionFieldInfo fieldInfo, - ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryReaderFn binaryReaderFn, - ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryWriterFn binaryWriterFn, - ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryMessageSerializeFn binaryMessageSerializeFn, - ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryMessageDeserializeFn binaryMessageDeserializeFn, - boolean isPacked) {} + ExtensionFieldInfo fieldInfo, + ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryReaderFn binaryReaderFn, + ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryWriterFn binaryWriterFn, + ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryMessageSerializeFn binaryMessageSerializeFn, + ExtensionFieldBinaryInfo.ExtensionFieldBinaryInfoBinaryMessageDeserializeFn binaryMessageDeserializeFn, + boolean isPacked) {} } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/ExtensionFieldInfo.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/ExtensionFieldInfo.java index e9d9e6feb77..42260d4e89f 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/ExtensionFieldInfo.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/ExtensionFieldInfo.java @@ -24,11 +24,11 @@ public interface ToObjectFn { public ExtensionFieldInfo.ToObjectFn toObjectFn; public ExtensionFieldInfo( - double fieldIndex, - JsPropertyMap fieldName, - Object ctor, - ExtensionFieldInfo.ExtensionFieldInfoToObjectFn toObjectFn, - double isRepeated) {} + double fieldIndex, + JsPropertyMap fieldName, + Object ctor, + ExtensionFieldInfo.ExtensionFieldInfoToObjectFn toObjectFn, + double isRepeated) {} public native boolean isMessageType(); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/Map.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/Map.java index 2f26ee29285..fd9edcf3ce4 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/Map.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/Map.java @@ -22,13 +22,13 @@ public interface ToObjectValueToObjectFn { } public static native Map fromObject( - JsArray> entries, Object valueCtor, Object valueFromObject); + JsArray> entries, Object valueCtor, Object valueFromObject); @JsOverlay public static final Map fromObject( - Object[][] entries, Object valueCtor, Object valueFromObject) { + Object[][] entries, Object valueCtor, Object valueFromObject) { return fromObject( - Js.>>uncheckedCast(entries), valueCtor, valueFromObject); + Js.>>uncheckedCast(entries), valueCtor, valueFromObject); } public Map(JsArray> arr, JsConstructorFn valueCtor) {} @@ -45,8 +45,7 @@ public Map(Object[][] arr) {} public native Iterator> entries(); - public native void forEach(Map.ForEachCallbackFn callback, - Object thisArg); + public native void forEach(Map.ForEachCallbackFn callback, Object thisArg); public native void forEach(Map.ForEachCallbackFn callback); @@ -67,8 +66,7 @@ public native void forEach(Map.ForEachCallbackFn callback, public native JsArray> toObject(); public native JsArray> toObject( - boolean includeInstance, - Map.ToObjectValueToObjectFn valueToObject); + boolean includeInstance, Map.ToObjectValueToObjectFn valueToObject); public native JsArray> toObject(boolean includeInstance); } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/Message.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/Message.java index 2733c905621..a25ccddc90b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/Message.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/Message.java @@ -238,27 +238,27 @@ public interface ToObjectListToObjectFn { } public static native void addToRepeatedField( - Message msg, double fieldNumber, Object value, double index); + Message msg, double fieldNumber, Object value, double index); public static native void addToRepeatedField(Message msg, double fieldNumber, Object value); @JsOverlay public static final T addToRepeatedWrapperField( - Message msg, double fieldNumber, T value, Class ctor, double index) { + Message msg, double fieldNumber, T value, Class ctor, double index) { return addToRepeatedWrapperField(msg, fieldNumber, value, Js.asConstructorFn(ctor), index); } @JsOverlay public static final T addToRepeatedWrapperField( - Message msg, double fieldNumber, T value, Class ctor) { + Message msg, double fieldNumber, T value, Class ctor) { return addToRepeatedWrapperField(msg, fieldNumber, value, Js.asConstructorFn(ctor)); } public static native T addToRepeatedWrapperField( - Message msg, double fieldNumber, T value, JsConstructorFn ctor, double index); + Message msg, double fieldNumber, T value, JsConstructorFn ctor, double index); public static native T addToRepeatedWrapperField( - Message msg, double fieldNumber, T value, JsConstructorFn ctor); + Message msg, double fieldNumber, T value, JsConstructorFn ctor); public static native String bytesAsB64(Uint8Array bytes); @@ -308,259 +308,259 @@ public static final double computeOneofCase(Message msg, double[] oneof) { public static native T getFieldWithDefault(Message msg, double fieldNumber, T defaultValue); public static native Map getMapField( - Message msg, double fieldNumber, boolean noLazyCreate, Object valueCtor); + Message msg, double fieldNumber, boolean noLazyCreate, Object valueCtor); public static native double getOptionalFloatingPointField(Message msg, double fieldNumber); public static native JsArray getRepeatedFloatingPointField( - Message msg, double fieldNumber); + Message msg, double fieldNumber); @JsOverlay public static final JsArray getRepeatedWrapperField( - Message msg, Class ctor, double fieldNumber) { + Message msg, Class ctor, double fieldNumber) { return getRepeatedWrapperField(msg, Js.asConstructorFn(ctor), fieldNumber); } public static native JsArray getRepeatedWrapperField( - Message msg, JsConstructorFn ctor, double fieldNumber); + Message msg, JsConstructorFn ctor, double fieldNumber); @JsOverlay public static final T getWrapperField( - Message msg, Class ctor, double fieldNumber, double required) { + Message msg, Class ctor, double fieldNumber, double required) { return getWrapperField(msg, Js.asConstructorFn(ctor), fieldNumber, required); } @JsOverlay public static final T getWrapperField( - Message msg, Class ctor, double fieldNumber) { + Message msg, Class ctor, double fieldNumber) { return getWrapperField(msg, Js.asConstructorFn(ctor), fieldNumber); } public static native T getWrapperField( - Message msg, JsConstructorFn ctor, double fieldNumber, double required); + Message msg, JsConstructorFn ctor, double fieldNumber, double required); public static native T getWrapperField( - Message msg, JsConstructorFn ctor, double fieldNumber); + Message msg, JsConstructorFn ctor, double fieldNumber); public static native void initialize( - Message msg, - JsArray data, - Message.InitializeMessageIdUnionType messageId, - double suggestedPivot, - JsArray repeatedFields, - JsArray> oneofFields); + Message msg, + JsArray data, + Message.InitializeMessageIdUnionType messageId, + double suggestedPivot, + JsArray repeatedFields, + JsArray> oneofFields); public static native void initialize( - Message msg, - JsArray data, - Message.InitializeMessageIdUnionType messageId, - double suggestedPivot, - JsArray repeatedFields); + Message msg, + JsArray data, + Message.InitializeMessageIdUnionType messageId, + double suggestedPivot, + JsArray repeatedFields); public static native void initialize( - Message msg, - JsArray data, - Message.InitializeMessageIdUnionType messageId, - double suggestedPivot); + Message msg, + JsArray data, + Message.InitializeMessageIdUnionType messageId, + double suggestedPivot); @JsOverlay public static final void initialize( - Message msg, - JsArray data, - String messageId, - double suggestedPivot, - JsArray repeatedFields, - JsArray> oneofFields) { + Message msg, + JsArray data, + String messageId, + double suggestedPivot, + JsArray repeatedFields, + JsArray> oneofFields) { initialize( - msg, - data, - Js.uncheckedCast(messageId), - suggestedPivot, - repeatedFields, - oneofFields); + msg, + data, + Js.uncheckedCast(messageId), + suggestedPivot, + repeatedFields, + oneofFields); } @JsOverlay public static final void initialize( - Message msg, - JsArray data, - String messageId, - double suggestedPivot, - JsArray repeatedFields) { + Message msg, + JsArray data, + String messageId, + double suggestedPivot, + JsArray repeatedFields) { initialize( - msg, - data, - Js.uncheckedCast(messageId), - suggestedPivot, - repeatedFields); + msg, + data, + Js.uncheckedCast(messageId), + suggestedPivot, + repeatedFields); } @JsOverlay public static final void initialize( - Message msg, JsArray data, String messageId, double suggestedPivot) { + Message msg, JsArray data, String messageId, double suggestedPivot) { initialize( - msg, - data, - Js.uncheckedCast(messageId), - suggestedPivot); + msg, + data, + Js.uncheckedCast(messageId), + suggestedPivot); } @JsOverlay public static final void initialize( - Message msg, - JsArray data, - double messageId, - double suggestedPivot, - JsArray repeatedFields, - JsArray> oneofFields) { + Message msg, + JsArray data, + double messageId, + double suggestedPivot, + JsArray repeatedFields, + JsArray> oneofFields) { initialize( - msg, - data, - Js.uncheckedCast(messageId), - suggestedPivot, - repeatedFields, - oneofFields); + msg, + data, + Js.uncheckedCast(messageId), + suggestedPivot, + repeatedFields, + oneofFields); } @JsOverlay public static final void initialize( - Message msg, - JsArray data, - double messageId, - double suggestedPivot, - JsArray repeatedFields) { + Message msg, + JsArray data, + double messageId, + double suggestedPivot, + JsArray repeatedFields) { initialize( - msg, - data, - Js.uncheckedCast(messageId), - suggestedPivot, - repeatedFields); + msg, + data, + Js.uncheckedCast(messageId), + suggestedPivot, + repeatedFields); } @JsOverlay public static final void initialize( - Message msg, JsArray data, double messageId, double suggestedPivot) { + Message msg, JsArray data, double messageId, double suggestedPivot) { initialize( - msg, - data, - Js.uncheckedCast(messageId), - suggestedPivot); + msg, + data, + Js.uncheckedCast(messageId), + suggestedPivot); } @JsOverlay public static final void initialize( - Message msg, - Object[] data, - Message.InitializeMessageIdUnionType messageId, - double suggestedPivot, - double[] repeatedFields, - double[][] oneofFields) { + Message msg, + Object[] data, + Message.InitializeMessageIdUnionType messageId, + double suggestedPivot, + double[] repeatedFields, + double[][] oneofFields) { initialize( - msg, - Js.>uncheckedCast(data), - messageId, - suggestedPivot, - Js.>uncheckedCast(repeatedFields), - Js.>>uncheckedCast(oneofFields)); + msg, + Js.>uncheckedCast(data), + messageId, + suggestedPivot, + Js.>uncheckedCast(repeatedFields), + Js.>>uncheckedCast(oneofFields)); } @JsOverlay public static final void initialize( - Message msg, - Object[] data, - Message.InitializeMessageIdUnionType messageId, - double suggestedPivot, - double[] repeatedFields) { + Message msg, + Object[] data, + Message.InitializeMessageIdUnionType messageId, + double suggestedPivot, + double[] repeatedFields) { initialize( - msg, - Js.>uncheckedCast(data), - messageId, - suggestedPivot, - Js.>uncheckedCast(repeatedFields)); + msg, + Js.>uncheckedCast(data), + messageId, + suggestedPivot, + Js.>uncheckedCast(repeatedFields)); } @JsOverlay public static final void initialize( - Message msg, - Object[] data, - Message.InitializeMessageIdUnionType messageId, - double suggestedPivot) { + Message msg, + Object[] data, + Message.InitializeMessageIdUnionType messageId, + double suggestedPivot) { initialize(msg, Js.>uncheckedCast(data), messageId, suggestedPivot); } @JsOverlay public static final void initialize( - Message msg, - Object[] data, - String messageId, - double suggestedPivot, - double[] repeatedFields, - double[][] oneofFields) { + Message msg, + Object[] data, + String messageId, + double suggestedPivot, + double[] repeatedFields, + double[][] oneofFields) { initialize( - msg, - Js.>uncheckedCast(data), - messageId, - suggestedPivot, - Js.>uncheckedCast(repeatedFields), - Js.>>uncheckedCast(oneofFields)); + msg, + Js.>uncheckedCast(data), + messageId, + suggestedPivot, + Js.>uncheckedCast(repeatedFields), + Js.>>uncheckedCast(oneofFields)); } @JsOverlay public static final void initialize( - Message msg, - Object[] data, - String messageId, - double suggestedPivot, - double[] repeatedFields) { + Message msg, + Object[] data, + String messageId, + double suggestedPivot, + double[] repeatedFields) { initialize( - msg, - Js.>uncheckedCast(data), - messageId, - suggestedPivot, - Js.>uncheckedCast(repeatedFields)); + msg, + Js.>uncheckedCast(data), + messageId, + suggestedPivot, + Js.>uncheckedCast(repeatedFields)); } @JsOverlay public static final void initialize( - Message msg, Object[] data, String messageId, double suggestedPivot) { + Message msg, Object[] data, String messageId, double suggestedPivot) { initialize(msg, Js.>uncheckedCast(data), messageId, suggestedPivot); } @JsOverlay public static final void initialize( - Message msg, - Object[] data, - double messageId, - double suggestedPivot, - double[] repeatedFields, - double[][] oneofFields) { + Message msg, + Object[] data, + double messageId, + double suggestedPivot, + double[] repeatedFields, + double[][] oneofFields) { initialize( - msg, - Js.>uncheckedCast(data), - messageId, - suggestedPivot, - Js.>uncheckedCast(repeatedFields), - Js.>>uncheckedCast(oneofFields)); + msg, + Js.>uncheckedCast(data), + messageId, + suggestedPivot, + Js.>uncheckedCast(repeatedFields), + Js.>>uncheckedCast(oneofFields)); } @JsOverlay public static final void initialize( - Message msg, - Object[] data, - double messageId, - double suggestedPivot, - double[] repeatedFields) { + Message msg, + Object[] data, + double messageId, + double suggestedPivot, + double[] repeatedFields) { initialize( - msg, - Js.>uncheckedCast(data), - messageId, - suggestedPivot, - Js.>uncheckedCast(repeatedFields)); + msg, + Js.>uncheckedCast(data), + messageId, + suggestedPivot, + Js.>uncheckedCast(repeatedFields)); } @JsOverlay public static final void initialize( - Message msg, Object[] data, double messageId, double suggestedPivot) { + Message msg, Object[] data, double messageId, double suggestedPivot) { initialize(msg, Js.>uncheckedCast(data), messageId, suggestedPivot); } @@ -574,7 +574,7 @@ public static final void setField(Message msg, double fieldNumber, FieldValueArr } public static native void setField( - Message msg, double fieldNumber, Message.SetFieldValueUnionType value); + Message msg, double fieldNumber, Message.SetFieldValueUnionType value); @JsOverlay public static final void setField(Message msg, double fieldNumber, String value) { @@ -598,97 +598,95 @@ public static final void setField(Message msg, double fieldNumber, double value) @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, JsArray oneof, FieldValueArray value) { + Message msg, double fieldNumber, JsArray oneof, FieldValueArray value) { setOneofField( - msg, fieldNumber, oneof, Js.uncheckedCast(value)); + msg, fieldNumber, oneof, Js.uncheckedCast(value)); } public static native void setOneofField( - Message msg, - double fieldNumber, - JsArray oneof, - Message.SetOneofFieldValueUnionType value); + Message msg, + double fieldNumber, + JsArray oneof, + Message.SetOneofFieldValueUnionType value); @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, JsArray oneof, String value) { + Message msg, double fieldNumber, JsArray oneof, String value) { setOneofField( - msg, fieldNumber, oneof, Js.uncheckedCast(value)); + msg, fieldNumber, oneof, Js.uncheckedCast(value)); } @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, JsArray oneof, Uint8Array value) { + Message msg, double fieldNumber, JsArray oneof, Uint8Array value) { setOneofField( - msg, fieldNumber, oneof, Js.uncheckedCast(value)); + msg, fieldNumber, oneof, Js.uncheckedCast(value)); } @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, JsArray oneof, boolean value) { + Message msg, double fieldNumber, JsArray oneof, boolean value) { setOneofField( - msg, fieldNumber, oneof, Js.uncheckedCast(value)); + msg, fieldNumber, oneof, Js.uncheckedCast(value)); } @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, JsArray oneof, double value) { + Message msg, double fieldNumber, JsArray oneof, double value) { setOneofField( - msg, fieldNumber, oneof, Js.uncheckedCast(value)); + msg, fieldNumber, oneof, Js.uncheckedCast(value)); } @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, double[] oneof, FieldValueArray value) { + Message msg, double fieldNumber, double[] oneof, FieldValueArray value) { setOneofField(msg, fieldNumber, Js.>uncheckedCast(oneof), value); } @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, double[] oneof, - Message.SetOneofFieldValueUnionType value) { + Message msg, double fieldNumber, double[] oneof, Message.SetOneofFieldValueUnionType value) { setOneofField(msg, fieldNumber, Js.>uncheckedCast(oneof), value); } @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, double[] oneof, String value) { + Message msg, double fieldNumber, double[] oneof, String value) { setOneofField(msg, fieldNumber, Js.>uncheckedCast(oneof), value); } @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, double[] oneof, Uint8Array value) { + Message msg, double fieldNumber, double[] oneof, Uint8Array value) { setOneofField(msg, fieldNumber, Js.>uncheckedCast(oneof), value); } @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, double[] oneof, boolean value) { + Message msg, double fieldNumber, double[] oneof, boolean value) { setOneofField(msg, fieldNumber, Js.>uncheckedCast(oneof), value); } @JsOverlay public static final void setOneofField( - Message msg, double fieldNumber, double[] oneof, double value) { + Message msg, double fieldNumber, double[] oneof, double value) { setOneofField(msg, fieldNumber, Js.>uncheckedCast(oneof), value); } public static native void setOneofWrapperField( - Message msg, double fieldNumber, JsArray oneof, Object value); + Message msg, double fieldNumber, JsArray oneof, Object value); @JsOverlay public static final void setOneofWrapperField( - Message msg, double fieldNumber, double[] oneof, Object value) { + Message msg, double fieldNumber, double[] oneof, Object value) { setOneofWrapperField(msg, fieldNumber, Js.>uncheckedCast(oneof), value); } public static native void setRepeatedWrapperField( - Message msg, double fieldNumber, JsArray value); + Message msg, double fieldNumber, JsArray value); @JsOverlay - public static final void setRepeatedWrapperField(Message msg, double fieldNumber, - T[] value) { + public static final void setRepeatedWrapperField(Message msg, double fieldNumber, T[] value) { setRepeatedWrapperField(msg, fieldNumber, Js.>uncheckedCast(value)); } @@ -696,51 +694,50 @@ public static final void setRepeatedWrapperField(Message msg, double fieldNu @JsOverlay public static final void setWrapperField( - Message msg, double fieldNumber, Map value) { + Message msg, double fieldNumber, Map value) { setWrapperField( - msg, fieldNumber, Js.>uncheckedCast(value)); + msg, fieldNumber, Js.>uncheckedCast(value)); } public static native void setWrapperField( - Message msg, double fieldNumber, Message.SetWrapperFieldValueUnionType value); + Message msg, double fieldNumber, Message.SetWrapperFieldValueUnionType value); @JsOverlay public static final void setWrapperField(Message msg, double fieldNumber, T value) { setWrapperField( - msg, fieldNumber, Js.>uncheckedCast(value)); + msg, fieldNumber, Js.>uncheckedCast(value)); } public static native void setWrapperField(Message msg, double fieldNumber); public static native void toMap( - JsArray field, - Message.ToMapMapKeyGetterFn mapKeyGetterFn, - Message.ToMapToObjectFn toObjectFn, - boolean includeInstance); + JsArray field, + Message.ToMapMapKeyGetterFn mapKeyGetterFn, + Message.ToMapToObjectFn toObjectFn, + boolean includeInstance); public static native void toMap( - JsArray field, - Message.ToMapMapKeyGetterFn mapKeyGetterFn, - Message.ToMapToObjectFn toObjectFn); + JsArray field, + Message.ToMapMapKeyGetterFn mapKeyGetterFn, + Message.ToMapToObjectFn toObjectFn); public static native void toMap( - JsArray field, Message.ToMapMapKeyGetterFn mapKeyGetterFn); + JsArray field, Message.ToMapMapKeyGetterFn mapKeyGetterFn); @JsOverlay public static final void toMap( - Object[] field, - Message.ToMapMapKeyGetterFn mapKeyGetterFn, - Message.ToMapToObjectFn toObjectFn, - boolean includeInstance) { - toMap(Js.>uncheckedCast(field), mapKeyGetterFn, toObjectFn, - includeInstance); + Object[] field, + Message.ToMapMapKeyGetterFn mapKeyGetterFn, + Message.ToMapToObjectFn toObjectFn, + boolean includeInstance) { + toMap(Js.>uncheckedCast(field), mapKeyGetterFn, toObjectFn, includeInstance); } @JsOverlay public static final void toMap( - Object[] field, - Message.ToMapMapKeyGetterFn mapKeyGetterFn, - Message.ToMapToObjectFn toObjectFn) { + Object[] field, + Message.ToMapMapKeyGetterFn mapKeyGetterFn, + Message.ToMapToObjectFn toObjectFn) { toMap(Js.>uncheckedCast(field), mapKeyGetterFn, toObjectFn); } @@ -752,28 +749,28 @@ public static final void toMap(Object[] field, Message.ToMapMapKeyGetterFn mapKe public static native Object toObject(boolean includeInstance, Message msg); public static native void toObjectExtension( - Message msg, Object obj, Object extensions, Object getExtensionFn, boolean includeInstance); + Message msg, Object obj, Object extensions, Object getExtensionFn, boolean includeInstance); public static native void toObjectExtension( - Message msg, Object obj, Object extensions, Object getExtensionFn); + Message msg, Object obj, Object extensions, Object getExtensionFn); public static native JsArray toObjectList( - JsArray field, - Message.ToObjectListToObjectFn toObjectFn, - boolean includeInstance); + JsArray field, + Message.ToObjectListToObjectFn toObjectFn, + boolean includeInstance); public static native JsArray toObjectList( - JsArray field, Message.ToObjectListToObjectFn toObjectFn); + JsArray field, Message.ToObjectListToObjectFn toObjectFn); @JsOverlay public static final JsArray toObjectList( - T[] field, Message.ToObjectListToObjectFn toObjectFn, boolean includeInstance) { + T[] field, Message.ToObjectListToObjectFn toObjectFn, boolean includeInstance) { return toObjectList(Js.>uncheckedCast(field), toObjectFn, includeInstance); } @JsOverlay public static final JsArray toObjectList( - T[] field, Message.ToObjectListToObjectFn toObjectFn) { + T[] field, Message.ToObjectListToObjectFn toObjectFn) { return toObjectList(Js.>uncheckedCast(field), toObjectFn); } @@ -787,12 +784,12 @@ public static final JsArray toObjectList( public native String getJsPbMessageId(); public native void readBinaryExtension( - Message proto, BinaryReader reader, Object extensions, Object setExtensionFn); + Message proto, BinaryReader reader, Object extensions, Object setExtensionFn); public native Uint8Array serializeBinary(); public native void serializeBinaryExtensions( - Message proto, BinaryWriter writer, Object extensions, Object getExtensionFn); + Message proto, BinaryWriter writer, Object extensions, Object getExtensionFn); public native void setExtension(ExtensionFieldInfo fieldInfo, T value); diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/binaryconstants/FieldType.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/binaryconstants/FieldType.java index dc24e303368..a848104ac44 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/binaryconstants/FieldType.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/binaryconstants/FieldType.java @@ -4,29 +4,29 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.jspb.BinaryConstants.FieldType", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.jspb.BinaryConstants.FieldType", + namespace = JsPackage.GLOBAL) public class FieldType { public static int BOOL, - BYTES, - DOUBLE, - ENUM, - FHASH64, - FIXED32, - FIXED64, - FLOAT, - GROUP, - INT32, - INT64, - INVALID, - MESSAGE, - SFIXED32, - SFIXED64, - SINT32, - SINT64, - STRING, - UINT32, - UINT64, - VHASH64; + BYTES, + DOUBLE, + ENUM, + FHASH64, + FIXED32, + FIXED64, + FLOAT, + GROUP, + INT32, + INT64, + INVALID, + MESSAGE, + SFIXED32, + SFIXED64, + SINT32, + SINT64, + STRING, + UINT32, + UINT64, + VHASH64; } diff --git a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/binaryconstants/WireType.java b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/binaryconstants/WireType.java index 1926b1c665d..67c5a1e289b 100644 --- a/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/binaryconstants/WireType.java +++ b/web/client-backplane/src/main/java/io/deephaven/javascript/proto/dhinternal/jspb/binaryconstants/WireType.java @@ -4,15 +4,15 @@ import jsinterop.annotations.JsType; @JsType( - isNative = true, - name = "dhinternal.jspb.BinaryConstants.WireType", - namespace = JsPackage.GLOBAL) + isNative = true, + name = "dhinternal.jspb.BinaryConstants.WireType", + namespace = JsPackage.GLOBAL) public class WireType { public static int DELIMITED, - END_GROUP, - FIXED32, - FIXED64, - INVALID, - START_GROUP, - VARINT; + END_GROUP, + FIXED32, + FIXED64, + INVALID, + START_GROUP, + VARINT; } diff --git a/web/client-ide/src/main/java/io/deephaven/ide/client/IdeClient.java b/web/client-ide/src/main/java/io/deephaven/ide/client/IdeClient.java index 2b0d3d26c88..4cc9875594a 100644 --- a/web/client-ide/src/main/java/io/deephaven/ide/client/IdeClient.java +++ b/web/client-ide/src/main/java/io/deephaven/ide/client/IdeClient.java @@ -12,13 +12,13 @@ public class IdeClient { @Deprecated @JsMethod(name = "getExistingSession") - public Promise getExistingSession_old(String websocketUrl, String authToken, - String serviceId, String language) { + public Promise getExistingSession_old(String websocketUrl, String authToken, String serviceId, + String language) { return IdeClient.getExistingSession(websocketUrl, authToken, serviceId, language); } - public static CancellablePromise getExistingSession(String websocketUrl, - String authToken, String serviceId, String language) { + public static CancellablePromise getExistingSession(String websocketUrl, String authToken, + String serviceId, String language) { IdeConnectionOptions options = new IdeConnectionOptions(); options.authToken = authToken; options.serviceId = serviceId; diff --git a/web/client-ide/src/main/java/io/deephaven/ide/client/IdeConnection.java b/web/client-ide/src/main/java/io/deephaven/ide/client/IdeConnection.java index 2c2464619ab..67b0245cf94 100644 --- a/web/client-ide/src/main/java/io/deephaven/ide/client/IdeConnection.java +++ b/web/client-ide/src/main/java/io/deephaven/ide/client/IdeConnection.java @@ -15,8 +15,7 @@ public class IdeConnection extends QueryConnectable { @JsMethod(namespace = JsPackage.GLOBAL) private static native String atob(String encodedData); - private static AuthTokenPromiseSupplier getAuthTokenPromiseSupplier( - IdeConnectionOptions options) { + private static AuthTokenPromiseSupplier getAuthTokenPromiseSupplier(IdeConnectionOptions options) { ConnectToken token = null; if (options != null && options.authToken != null) { token = new ConnectToken(); @@ -35,8 +34,7 @@ protected String logPrefix() { } /** - * Direct connection to an already-running worker instance, without first authenticating to a - * client. + * Direct connection to an already-running worker instance, without first authenticating to a client. */ @JsConstructor public IdeConnection(String serverUrl, @JsOptional IdeConnectionOptions options) { @@ -59,8 +57,7 @@ public String getServerUrl() { @Override public Promise running() { - // This assumes that once the connection has been initialized and left a usable state, it - // cannot be used again + // This assumes that once the connection has been initialized and left a usable state, it cannot be used again if (!connection.isAvailable() || connection.get().isUsable()) { return Promise.resolve(this); } else { @@ -73,8 +70,7 @@ public void disconnected() { super.disconnected(); if (connection.isAvailable()) { - // Currently no way for an IdeConnect to recover, so make sure it doesn't try and - // reconnect + // Currently no way for an IdeConnect to recover, so make sure it doesn't try and reconnect connection.get().forceClose(); } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ConvertEqToIn.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ConvertEqToIn.java index fc764cd0e24..ffdaa95c625 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ConvertEqToIn.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ConvertEqToIn.java @@ -3,12 +3,12 @@ import io.deephaven.web.shared.data.FilterDescriptor; /** - * IN-type expressions are more efficient at runtime than EQUALS, and also compile more quickly. - * However, they only work when comparing a column reference to a literal value, so this - * transformation looks for those and replaces the EQ-type node with its corresponding IN-type node. + * IN-type expressions are more efficient at runtime than EQUALS, and also compile more quickly. However, they only work + * when comparing a column reference to a literal value, so this transformation looks for those and replaces the EQ-type + * node with its corresponding IN-type node. * - * This is sort of the opposite of {@link ConvertInvalidInExpressions}, which converts EQs to INs - * when the types of the children are not appropriate for use in a MatchFilter. + * This is sort of the opposite of {@link ConvertInvalidInExpressions}, which converts EQs to INs when the types of the + * children are not appropriate for use in a MatchFilter. * * Has no apparent pre-requisites. */ @@ -37,17 +37,15 @@ public FilterDescriptor onNotEqualIgnoreCase(final FilterDescriptor descriptor) return handle(descriptor, FilterDescriptor.FilterOperation.NOT_IN_ICASE); } - private FilterDescriptor handle(final FilterDescriptor descriptor, - final FilterDescriptor.FilterOperation in) { - // if one is a reference and one is a literal, we can process it and ensure the reference is - // first + private FilterDescriptor handle(final FilterDescriptor descriptor, final FilterDescriptor.FilterOperation in) { + // if one is a reference and one is a literal, we can process it and ensure the reference is first final FilterDescriptor first = descriptor.getChildren()[0]; final FilterDescriptor second = descriptor.getChildren()[1]; if (first.getOperation() == FilterDescriptor.FilterOperation.REFERENCE - && second.getOperation() == FilterDescriptor.FilterOperation.LITERAL) { + && second.getOperation() == FilterDescriptor.FilterOperation.LITERAL) { return replaceWithIn(in, first, second); } else if (first.getOperation() == FilterDescriptor.FilterOperation.LITERAL - && second.getOperation() == FilterDescriptor.FilterOperation.REFERENCE) { + && second.getOperation() == FilterDescriptor.FilterOperation.REFERENCE) { return replaceWithIn(in, second, first); } else { return descriptor; @@ -55,8 +53,7 @@ private FilterDescriptor handle(final FilterDescriptor descriptor, } private FilterDescriptor replaceWithIn(final FilterDescriptor.FilterOperation operation, - final FilterDescriptor reference, final FilterDescriptor literal) { - return new FilterDescriptor(operation, null, null, - new FilterDescriptor[] {reference, literal}); + final FilterDescriptor reference, final FilterDescriptor literal) { + return new FilterDescriptor(operation, null, null, new FilterDescriptor[] {reference, literal}); } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ConvertInvalidInExpressions.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ConvertInvalidInExpressions.java index 1777758351b..e33447dc495 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ConvertInvalidInExpressions.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ConvertInvalidInExpressions.java @@ -3,18 +3,17 @@ import io.deephaven.web.shared.data.FilterDescriptor; /** - * Rewrite any IN-type expression into its corresponding EQUALS if the left side is not a reference - * or if the right side does have a reference. Assumes that FlipNonReferenceMatchExpression has - * already been run, making this the second attempt to deal with these, and letting us be confident - * that these expressions cannot be expressed as more efficient "in"s. + * Rewrite any IN-type expression into its corresponding EQUALS if the left side is not a reference or if the right side + * does have a reference. Assumes that FlipNonReferenceMatchExpression has already been run, making this the second + * attempt to deal with these, and letting us be confident that these expressions cannot be expressed as more efficient + * "in"s. * - * Examples: o ColumnA in 1 - left as is o ColumnA in 1, 2 - left as is o 1 in 2 - rewritten to 1 == - * 2. o ColumnA in ColumnB - rewritten to ColumnA == ColumnB + * Examples: o ColumnA in 1 - left as is o ColumnA in 1, 2 - left as is o 1 in 2 - rewritten to 1 == 2. o ColumnA in + * ColumnB - rewritten to ColumnA == ColumnB * - * Signs that visitors were mis-ordered: o 1 in ColumnA - literal on LHS should already be handled o - * 1 in 2, 3 - literal on LHS with multiple RHS values should already be handled, should have been - * flipped and split into individual exprs o ColumnA in ColumnB, 2 - column ref on RHS should - * already be handled + * Signs that visitors were mis-ordered: o 1 in ColumnA - literal on LHS should already be handled o 1 in 2, 3 - literal + * on LHS with multiple RHS values should already be handled, should have been flipped and split into individual exprs o + * ColumnA in ColumnB, 2 - column ref on RHS should already be handled */ public class ConvertInvalidInExpressions extends ReplacingVisitor { public static FilterDescriptor execute(FilterDescriptor filter) { @@ -27,9 +26,9 @@ public FilterDescriptor onIn(FilterDescriptor descriptor) { } private FilterDescriptor handle(FilterDescriptor descriptor, - FilterDescriptor.FilterOperation replacementOperation) { - if (descriptor.getChildren().length != 2 || descriptor.getChildren()[0] - .getOperation() != descriptor.getChildren()[1].getOperation()) { + FilterDescriptor.FilterOperation replacementOperation) { + if (descriptor.getChildren().length != 2 + || descriptor.getChildren()[0].getOperation() != descriptor.getChildren()[1].getOperation()) { return descriptor; } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FilterPrinter.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FilterPrinter.java index dabe2e2b1e5..805db2a2a3b 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FilterPrinter.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FilterPrinter.java @@ -7,10 +7,10 @@ import java.util.function.UnaryOperator; /** - * Prints a readable string representing this filter. Note that this is presently only suitable for - * debugging purposes, and doesn't generate something that the SelectFilterFactory can handle at - * this time. However, this can be used to produce individual java snippets of simple conditions, - * and is used on the JVM by the visitor that builds SelectFilters recursively from the filter AST. + * Prints a readable string representing this filter. Note that this is presently only suitable for debugging purposes, + * and doesn't generate something that the SelectFilterFactory can handle at this time. However, this can be used to + * produce individual java snippets of simple conditions, and is used on the JVM by the visitor that builds + * SelectFilters recursively from the filter AST. */ public class FilterPrinter implements FilterDescriptor.Visitor { private final StringBuilder sb = new StringBuilder(); @@ -183,8 +183,7 @@ public void onIsNull(FilterDescriptor descriptor) { @Override public void onInvoke(FilterDescriptor descriptor) { - assert descriptor.getChildren().length >= 1 - : "expecting at least one child, even if it is a null value"; + assert descriptor.getChildren().length >= 1 : "expecting at least one child, even if it is a null value"; if (descriptor.getChildren()[0] != null) { assert descriptor.getChildren()[0].getOperation().expressionKind == Kind.Value; descriptor.getChildren()[0].accept(this); diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FilterValidator.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FilterValidator.java index 55a57fe8264..1ebafd1e010 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FilterValidator.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FilterValidator.java @@ -12,16 +12,15 @@ import java.util.function.Predicate; /** - * Sanity checks a filter and its children, to confirm that each object makes sense with regard to - * its fields, child count, etc. + * Sanity checks a filter and its children, to confirm that each object makes sense with regard to its fields, child + * count, etc. */ public class FilterValidator implements FilterDescriptor.Visitor { private Stack stack = new Stack<>(); private final BiPredicate invokeCheck; private final Predicate columnCheck; - public FilterValidator(BiPredicate invokeCheck, - Predicate columnCheck) { + public FilterValidator(BiPredicate invokeCheck, Predicate columnCheck) { this.invokeCheck = invokeCheck; this.columnCheck = columnCheck; } @@ -44,7 +43,7 @@ public void onAnd(FilterDescriptor descriptor) { for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; check(child.getOperation().expressionKind == Kind.Condition, - child.getOperation().expressionKind + " == Condition"); + child.getOperation().expressionKind + " == Condition"); child.accept(this); } stack.pop(); @@ -57,7 +56,7 @@ public void onOr(FilterDescriptor descriptor) { for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; check(child.getOperation().expressionKind == Kind.Condition, - child.getOperation().expressionKind + " == Condition"); + child.getOperation().expressionKind + " == Condition"); child.accept(this); } stack.pop(); @@ -70,7 +69,7 @@ public void onNot(FilterDescriptor descriptor) { for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; check(child.getOperation().expressionKind == Kind.Condition, - child.getOperation().expressionKind + " == Condition"); + child.getOperation().expressionKind + " == Condition"); child.accept(this); } stack.pop(); @@ -82,8 +81,7 @@ public void onLessThan(FilterDescriptor descriptor) { check(descriptor.getChildren().length == 2, descriptor.getChildren().length + " == 2"); for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; - check(child.getOperation().expressionKind == Kind.Value, - child.getOperation().expressionKind + " == Value"); + check(child.getOperation().expressionKind == Kind.Value, child.getOperation().expressionKind + " == Value"); child.accept(this); } stack.pop(); @@ -95,8 +93,7 @@ public void onGreaterThan(FilterDescriptor descriptor) { check(descriptor.getChildren().length == 2, descriptor.getChildren().length + " == 2"); for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; - check(child.getOperation().expressionKind == Kind.Value, - child.getOperation().expressionKind + " == Value"); + check(child.getOperation().expressionKind == Kind.Value, child.getOperation().expressionKind + " == Value"); child.accept(this); } stack.pop(); @@ -108,8 +105,7 @@ public void onLessThanOrEqualTo(FilterDescriptor descriptor) { check(descriptor.getChildren().length == 2, descriptor.getChildren().length + " == 2"); for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; - check(child.getOperation().expressionKind == Kind.Value, - child.getOperation().expressionKind + " == Value"); + check(child.getOperation().expressionKind == Kind.Value, child.getOperation().expressionKind + " == Value"); child.accept(this); } stack.pop(); @@ -121,8 +117,7 @@ public void onGreaterThanOrEqualTo(FilterDescriptor descriptor) { check(descriptor.getChildren().length == 2, descriptor.getChildren().length + " == 2"); for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; - check(child.getOperation().expressionKind == Kind.Value, - child.getOperation().expressionKind + " == Value"); + check(child.getOperation().expressionKind == Kind.Value, child.getOperation().expressionKind + " == Value"); child.accept(this); } stack.pop(); @@ -134,8 +129,7 @@ public void onEqual(FilterDescriptor descriptor) { check(descriptor.getChildren().length == 2, descriptor.getChildren().length + " == 2"); for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; - check(child.getOperation().expressionKind == Kind.Value, - child.getOperation().expressionKind + " == Value"); + check(child.getOperation().expressionKind == Kind.Value, child.getOperation().expressionKind + " == Value"); child.accept(this); } stack.pop(); @@ -147,8 +141,7 @@ public void onEqualIgnoreCase(FilterDescriptor descriptor) { check(descriptor.getChildren().length == 2, descriptor.getChildren().length + " == 2"); for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; - check(child.getOperation().expressionKind == Kind.Value, - child.getOperation().expressionKind + " == Value"); + check(child.getOperation().expressionKind == Kind.Value, child.getOperation().expressionKind + " == Value"); child.accept(this); } stack.pop(); @@ -160,8 +153,7 @@ public void onNotEqual(FilterDescriptor descriptor) { check(descriptor.getChildren().length == 2, descriptor.getChildren().length + " == 2"); for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; - check(child.getOperation().expressionKind == Kind.Value, - child.getOperation().expressionKind + " == Value"); + check(child.getOperation().expressionKind == Kind.Value, child.getOperation().expressionKind + " == Value"); child.accept(this); } stack.pop(); @@ -173,8 +165,7 @@ public void onNotEqualIgnoreCase(FilterDescriptor descriptor) { check(descriptor.getChildren().length == 2, descriptor.getChildren().length + " == 2"); for (int i = 0; i < descriptor.getChildren().length; i++) { FilterDescriptor child = descriptor.getChildren()[i]; - check(child.getOperation().expressionKind == Kind.Value, - child.getOperation().expressionKind + " == Value"); + check(child.getOperation().expressionKind == Kind.Value, child.getOperation().expressionKind + " == Value"); child.accept(this); } stack.pop(); @@ -229,10 +220,10 @@ public void onInvoke(FilterDescriptor descriptor) { // check name+args against known whitelist - may be impl'd different on client than server check(invokeCheck.test(descriptor.getValue(), descriptor.getChildren()), - "User filters are not permitted to use method " + descriptor.getValue()); + "User filters are not permitted to use method " + descriptor.getValue()); check(descriptor.getChildren().length > 0, - "Invocation is poorly formed, must have at least one child representing the instance"); + "Invocation is poorly formed, must have at least one child representing the instance"); boolean isStatic = descriptor.getChildren()[0] == null; @@ -309,21 +300,18 @@ public void onPatternIgnoreCase(FilterDescriptor descriptor) { } private void validatePatternFilter(FilterDescriptor descriptor, String name) { - check(descriptor.getChildren().length == 2, - name + " must have one column reference and one string parameter"); + check(descriptor.getChildren().length == 2, name + " must have one column reference and one string parameter"); final FilterDescriptor col = descriptor.getChildren()[0]; final FilterDescriptor param = descriptor.getChildren()[1]; // note that the REFERENCE/LITERAL restrictions could be relaxed check(col != null, name + " must not be called on a null value"); - check(col.getOperation() == FilterOperation.REFERENCE, - name + " can only be called on a column reference"); + check(col.getOperation() == FilterOperation.REFERENCE, name + " can only be called on a column reference"); onReference(col); check(param != null, name + " must not be passed a null parameter"); - check( - param.getType() == ValueType.String && param.getOperation() == FilterOperation.LITERAL, - name + " must be given a string literal parameter"); + check(param.getType() == ValueType.String && param.getOperation() == FilterOperation.LITERAL, + name + " must be given a string literal parameter"); onLiteral(param); } @@ -331,23 +319,20 @@ private void validatePatternFilter(FilterDescriptor descriptor, String name) { public void onSearch(FilterDescriptor descriptor) { // verify we aren't nested in a NOT if (stack.size() > 0) { - check(stack.peek().getOperation() != FilterOperation.NOT, - "Not(Search) is not supported"); + check(stack.peek().getOperation() != FilterOperation.NOT, "Not(Search) is not supported"); } check(descriptor.getChildren().length >= 1, "Search must have at least one param"); FilterDescriptor param = descriptor.getChildren()[0]; check(param != null, "Search must not be passed a null value"); - check( - param.getType() == ValueType.String && param.getOperation() == FilterOperation.LITERAL, - "Search must be given a string literal parameter"); + check(param.getType() == ValueType.String && param.getOperation() == FilterOperation.LITERAL, + "Search must be given a string literal parameter"); onLiteral(param); Arrays.stream(descriptor.getChildren()).skip(1).forEach(col -> { check(col != null, "Search column must not be null"); - check(col.getOperation() == FilterOperation.REFERENCE, - "Search column must be a column reference"); + check(col.getOperation() == FilterOperation.REFERENCE, "Search column must be a column reference"); onReference(col); }); } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FlipNonReferenceMatchExpression.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FlipNonReferenceMatchExpression.java index 887db920468..e5acf6c4839 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FlipNonReferenceMatchExpression.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/FlipNonReferenceMatchExpression.java @@ -3,22 +3,20 @@ import io.deephaven.web.shared.data.FilterDescriptor; /** - * Enforces that all IN-type expressions have a reference on the left side and only literals on the - * right side. Any non-reference on the left or reference on the right will trigger the entire - * IN-type expression being replaced with an OR or AND, with a sub-IN for each expression on the - * right side. + * Enforces that all IN-type expressions have a reference on the left side and only literals on the right side. Any + * non-reference on the left or reference on the right will trigger the entire IN-type expression being replaced with an + * OR or AND, with a sub-IN for each expression on the right side. * * - * Examples: o ColumnA in 1, 2, 3 - left as-is o ColumnA in 1, 2, ColumnB - rewritten as (ColumnA in - * 1 OR ColumnA in 2 OR ColumnA in ColumnB) o 1 in 3, 4, 5 - will be rewritten as (3 in 1 OR 4 in 1 - * OR 5 in 1). This is a silly case, but we're not judging. At this step. o 1 in ColumnA, 4, 5 - - * will be rewritten as (ColumnA in 1 OR 4 in 1 OR 5 in 1) o 1 in ColumnA - will be rewritten as - * ColumnA in 1 o ColumnA in ColumnB - will be rewritten as ColumnB in ColumnA. Note that like the - * second example, this isn't productive on its own, but as a pair with a reference on the right, it - * will be noticed by {@link ConvertInvalidInExpressions}. + * Examples: o ColumnA in 1, 2, 3 - left as-is o ColumnA in 1, 2, ColumnB - rewritten as (ColumnA in 1 OR ColumnA in 2 + * OR ColumnA in ColumnB) o 1 in 3, 4, 5 - will be rewritten as (3 in 1 OR 4 in 1 OR 5 in 1). This is a silly case, but + * we're not judging. At this step. o 1 in ColumnA, 4, 5 - will be rewritten as (ColumnA in 1 OR 4 in 1 OR 5 in 1) o 1 + * in ColumnA - will be rewritten as ColumnA in 1 o ColumnA in ColumnB - will be rewritten as ColumnB in ColumnA. Note + * that like the second example, this isn't productive on its own, but as a pair with a reference on the right, it will + * be noticed by {@link ConvertInvalidInExpressions}. * - * It is assumed that some time after this step, related "in" expressions will be merged together, - * and that these one-off expressions will get checked later. + * It is assumed that some time after this step, related "in" expressions will be merged together, and that these + * one-off expressions will get checked later. */ public class FlipNonReferenceMatchExpression extends ReplacingVisitor { public static FilterDescriptor execute(final FilterDescriptor filter) { @@ -45,14 +43,11 @@ public FilterDescriptor onNotInIgnoreCase(final FilterDescriptor descriptor) { return handleIn(descriptor, FilterDescriptor.FilterOperation.AND); } - private FilterDescriptor handleIn(final FilterDescriptor descriptor, - final FilterDescriptor.FilterOperation op) { + private FilterDescriptor handleIn(final FilterDescriptor descriptor, final FilterDescriptor.FilterOperation op) { // check each child - if we pass all checks we will give up - boolean rewrite = descriptor.getChildren()[0] - .getOperation() != FilterDescriptor.FilterOperation.REFERENCE; + boolean rewrite = descriptor.getChildren()[0].getOperation() != FilterDescriptor.FilterOperation.REFERENCE; for (int i = 1; !rewrite && i < descriptor.getChildren().length; i++) { - if (descriptor.getChildren()[i] - .getOperation() == FilterDescriptor.FilterOperation.REFERENCE) { + if (descriptor.getChildren()[i].getOperation() == FilterDescriptor.FilterOperation.REFERENCE) { rewrite = true; } } @@ -64,7 +59,7 @@ private FilterDescriptor handleIn(final FilterDescriptor descriptor, if (count == 1) { // make a single node to replace with, just swap the order of the two children return new FilterDescriptor(descriptor.getOperation(), null, null, - new FilterDescriptor[] {descriptor.getChildren()[1], descriptor.getChildren()[0]}); + new FilterDescriptor[] {descriptor.getChildren()[1], descriptor.getChildren()[0]}); } // make a AND/OR to join each of the new children with @@ -72,9 +67,8 @@ private FilterDescriptor handleIn(final FilterDescriptor descriptor, replacement.setOperation(op); replacement.setChildren(new FilterDescriptor[count]); for (int i = 0; i < count; i++) { - replacement.getChildren()[i] = - new FilterDescriptor(descriptor.getOperation(), null, null, new FilterDescriptor[] { - descriptor.getChildren()[i + 1], descriptor.getChildren()[0]}); + replacement.getChildren()[i] = new FilterDescriptor(descriptor.getOperation(), null, null, + new FilterDescriptor[] {descriptor.getChildren()[i + 1], descriptor.getChildren()[0]}); } return replacement; diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/GetTopLevelFilters.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/GetTopLevelFilters.java index 0dd672dabc5..23e580229e4 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/GetTopLevelFilters.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/GetTopLevelFilters.java @@ -8,12 +8,12 @@ import java.util.List; /** - * Recursively removes top-level AND operations, if any, resulting in an array of filters to run, - * potentially only containing a single filter if the top-level filter is not an AND. + * Recursively removes top-level AND operations, if any, resulting in an array of filters to run, potentially only + * containing a single filter if the top-level filter is not an AND. * - * This can be run before the FilterPrinter to provide multiple strings to execute individually. It - * should be run after other optimizations which might merge operations, and should be run before - * nested Match operations are rewritten to invocations. + * This can be run before the FilterPrinter to provide multiple strings to execute individually. It should be run after + * other optimizations which might merge operations, and should be run before nested Match operations are rewritten to + * invocations. */ public class GetTopLevelFilters { diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MakeExpressionsNullSafe.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MakeExpressionsNullSafe.java index 1d95beca54a..3e1ab6266c8 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MakeExpressionsNullSafe.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MakeExpressionsNullSafe.java @@ -4,8 +4,8 @@ import io.deephaven.web.shared.data.FilterDescriptor.FilterOperation; /** - * Rewrites logical expressions into an actual version that does what would be expected. Right now - * this is just equalsIgnoreCase and its negation, to support null values. + * Rewrites logical expressions into an actual version that does what would be expected. Right now this is just + * equalsIgnoreCase and its negation, to support null values. */ public class MakeExpressionsNullSafe extends ReplacingVisitor { public static FilterDescriptor execute(FilterDescriptor descriptor) { @@ -31,18 +31,17 @@ private FilterDescriptor rewriteEqualIgnoreCaseExpression(FilterDescriptor descr FilterDescriptor rhs = descriptor.getChildren()[1]; return node( - FilterOperation.OR, - node(FilterOperation.AND, - node(FilterOperation.IS_NULL, lhs), - node(FilterOperation.IS_NULL, rhs)), - node(FilterOperation.AND, - node(FilterOperation.NOT, - node(FilterOperation.IS_NULL, lhs)), - nodeEqIgnoreCase(FilterOperation.INVOKE, lhs, rhs))); + FilterOperation.OR, + node(FilterOperation.AND, + node(FilterOperation.IS_NULL, lhs), + node(FilterOperation.IS_NULL, rhs)), + node(FilterOperation.AND, + node(FilterOperation.NOT, + node(FilterOperation.IS_NULL, lhs)), + nodeEqIgnoreCase(FilterOperation.INVOKE, lhs, rhs))); } - private FilterDescriptor nodeEqIgnoreCase(FilterOperation invoke, FilterDescriptor lhs, - FilterDescriptor rhs) { + private FilterDescriptor nodeEqIgnoreCase(FilterOperation invoke, FilterDescriptor lhs, FilterDescriptor rhs) { FilterDescriptor node = node(invoke, lhs, rhs); node.setValue("equalsIgnoreCase");// note that this would fail validation return node; diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MergeNestedBinaryOperations.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MergeNestedBinaryOperations.java index a794058eb94..5abf26d4563 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MergeNestedBinaryOperations.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MergeNestedBinaryOperations.java @@ -6,11 +6,10 @@ import java.util.List; /** - * Any AND nested within another AND or OR nested within another OR should be flattened into just a - * single level. + * Any AND nested within another AND or OR nested within another OR should be flattened into just a single level. * - * This should be run after NOTs are distributed (so that (A AND B AND !(C OR D)) is first - * normalized to (A AND B AND (!C AND !D))). + * This should be run after NOTs are distributed (so that (A AND B AND !(C OR D)) is first normalized to (A AND B AND + * (!C AND !D))). */ public class MergeNestedBinaryOperations extends ReplacingVisitor { public static FilterDescriptor execute(FilterDescriptor descriptor) { @@ -27,8 +26,8 @@ public FilterDescriptor onAnd(FilterDescriptor descriptor) { return super.onAnd(descriptor); } - FilterDescriptor replacement = new FilterDescriptor(descriptor.getOperation(), null, null, - topLevel.toArray(new FilterDescriptor[0])); + FilterDescriptor replacement = + new FilterDescriptor(descriptor.getOperation(), null, null, topLevel.toArray(new FilterDescriptor[0])); return super.onAnd(replacement); } @@ -43,14 +42,14 @@ public FilterDescriptor onOr(FilterDescriptor descriptor) { return super.onOr(descriptor); } - FilterDescriptor replacement = new FilterDescriptor(descriptor.getOperation(), null, null, - topLevel.toArray(new FilterDescriptor[0])); + FilterDescriptor replacement = + new FilterDescriptor(descriptor.getOperation(), null, null, topLevel.toArray(new FilterDescriptor[0])); return super.onOr(replacement); } private void handleItem(List topLevel, FilterDescriptor descriptor, - FilterDescriptor.FilterOperation operation) { + FilterDescriptor.FilterOperation operation) { if (descriptor.getOperation() == operation) { for (FilterDescriptor child : descriptor.getChildren()) { handleItem(topLevel, child, operation); diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MergeRelatedSiblingExpressions.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MergeRelatedSiblingExpressions.java index 24fd8ab2015..cc9ab49638f 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MergeRelatedSiblingExpressions.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/MergeRelatedSiblingExpressions.java @@ -7,16 +7,15 @@ import java.util.stream.Stream; /** - * If two or more IN-like expressions are in the same OR (or NOT-INs in the same AND), join them - * together into a single expression. This may change the order of expressions, but should not have - * side effects - execution order of null checks vs invoke for example, should not be affected by - * this. All IN expressions are moved to the front of the list. + * If two or more IN-like expressions are in the same OR (or NOT-INs in the same AND), join them together into a single + * expression. This may change the order of expressions, but should not have side effects - execution order of null + * checks vs invoke for example, should not be affected by this. All IN expressions are moved to the front of the list. * - * Examples: o A in 1 AND B in 2 - left as is o A not in 1 AND A not in 2 - rewritten as A not in 1, - * 2 o A == B OR A in 1 OR A == C OR A in 2 - rewritten as A in 1, 2 OR A == B OR A == C + * Examples: o A in 1 AND B in 2 - left as is o A not in 1 AND A not in 2 - rewritten as A not in 1, 2 o A == B OR A in + * 1 OR A == C OR A in 2 - rewritten as A in 1, 2 OR A == B OR A == C * - * This assumes that all nested ORs and ANDs have been flattened already, NOTs normalized, and that - * we're happy with EQs vs INs and their children. + * This assumes that all nested ORs and ANDs have been flattened already, NOTs normalized, and that we're happy with EQs + * vs INs and their children. */ public class MergeRelatedSiblingExpressions extends ReplacingVisitor { public static FilterDescriptor execute(final FilterDescriptor filter) { @@ -25,8 +24,7 @@ public static FilterDescriptor execute(final FilterDescriptor filter) { @Override public FilterDescriptor onAnd(final FilterDescriptor descriptor) { - FilterDescriptor result = - mergeChildren(descriptor, FilterDescriptor.FilterOperation.NOT_IN); + FilterDescriptor result = mergeChildren(descriptor, FilterDescriptor.FilterOperation.NOT_IN); return mergeChildren(result, FilterDescriptor.FilterOperation.NOT_IN_ICASE); } @@ -36,19 +34,17 @@ public FilterDescriptor onOr(final FilterDescriptor descriptor) { return mergeChildren(result, FilterDescriptor.FilterOperation.IN_ICASE); } - private FilterDescriptor mergeChildren(FilterDescriptor descriptor, - FilterDescriptor.FilterOperation op) { + private FilterDescriptor mergeChildren(FilterDescriptor descriptor, FilterDescriptor.FilterOperation op) { // Examine each child and group by reference name and operation, // and combine those into a single operation final Map> collected = new LinkedHashMap<>(); final List leftover = new ArrayList<>(); for (final FilterDescriptor child : descriptor.getChildren()) { if (child.getOperation() == op) { - assert child.getChildren()[0] - .getOperation() == FilterDescriptor.FilterOperation.REFERENCE; + assert child.getChildren()[0].getOperation() == FilterDescriptor.FilterOperation.REFERENCE; collected.computeIfAbsent(child.getChildren()[0], ignore -> new LinkedHashSet<>()) - .addAll(Arrays.stream(child.getChildren()).skip(1) - .collect(Collectors.toCollection(LinkedHashSet::new))); + .addAll(Arrays.stream(child.getChildren()).skip(1) + .collect(Collectors.toCollection(LinkedHashSet::new))); } else { leftover.add(child); } @@ -63,11 +59,10 @@ private FilterDescriptor mergeChildren(FilterDescriptor descriptor, if (newChildCount == 1) { assert leftover.isEmpty() - : "Must be empty since collected is non-empty since the new and old child counts differ"; + : "Must be empty since collected is non-empty since the new and old child counts differ"; // only one expression remains from the collection work - Map.Entry> nameAndValues = - collected.entrySet().iterator().next(); + Map.Entry> nameAndValues = collected.entrySet().iterator().next(); return mergedFilterDescriptor(op, nameAndValues.getKey(), nameAndValues.getValue()); } @@ -75,10 +70,8 @@ private FilterDescriptor mergeChildren(FilterDescriptor descriptor, // produce new IN-type expressions int i = 0; - for (Map.Entry> nameAndValues : collected - .entrySet()) { - replacementChildren[i++] = - mergedFilterDescriptor(op, nameAndValues.getKey(), nameAndValues.getValue()); + for (Map.Entry> nameAndValues : collected.entrySet()) { + replacementChildren[i++] = mergedFilterDescriptor(op, nameAndValues.getKey(), nameAndValues.getValue()); } // copy in the remaining leftovers @@ -90,10 +83,10 @@ private FilterDescriptor mergeChildren(FilterDescriptor descriptor, return new FilterDescriptor(descriptor.getOperation(), null, null, replacementChildren); } - private FilterDescriptor mergedFilterDescriptor(FilterDescriptor.FilterOperation op, - FilterDescriptor column, Set values) { + private FilterDescriptor mergedFilterDescriptor(FilterDescriptor.FilterOperation op, FilterDescriptor column, + Set values) { return new FilterDescriptor(op, null, null, Stream.concat( - Stream.of(column), - values.stream()).toArray(FilterDescriptor[]::new)); + Stream.of(column), + values.stream()).toArray(FilterDescriptor[]::new)); } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/NormalizeNots.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/NormalizeNots.java index 8a832fdd57c..d07805e9462 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/NormalizeNots.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/NormalizeNots.java @@ -3,17 +3,15 @@ import io.deephaven.web.shared.data.FilterDescriptor; /** - * Rewrites NOT expressions, with the goal of removing as many as possible and simplifying the - * expression tree. This visitor delegates its work to {@link FilterDescriptor#not()}, which uses - * the following rules: + * Rewrites NOT expressions, with the goal of removing as many as possible and simplifying the expression tree. This + * visitor delegates its work to {@link FilterDescriptor#not()}, which uses the following rules: *
      *
    • Distribute NOTs to children of AND/OR expressions, via DeMorgan's law.
    • *
    • {@code NOT(NOT(A))} is replaced with A.
    • - *
    • Replace any operation with its opposite, if any. For example, {@code NOT(A >= B)} is replaced - * with {@code A < B}, and likewise for all the other inequality operators, {@code EQ}, and - * {@code IN}.
    • - *
    • Other operations {@code IS_NULL}, {@code INVOKE}, {@code SEARCH}, {@code CONTAINS} are left - * as-is, wrapped wrapped with a {@code NOT}.
    • + *
    • Replace any operation with its opposite, if any. For example, {@code NOT(A >= B)} is replaced with {@code A < B}, + * and likewise for all the other inequality operators, {@code EQ}, and {@code IN}.
    • + *
    • Other operations {@code IS_NULL}, {@code INVOKE}, {@code SEARCH}, {@code CONTAINS} are left as-is, wrapped + * wrapped with a {@code NOT}.
    • *
    */ public class NormalizeNots extends ReplacingVisitor { @@ -23,8 +21,7 @@ public static FilterDescriptor execute(FilterDescriptor descriptor) { @Override public FilterDescriptor onNot(FilterDescriptor descriptor) { - // First deal with this not, then visit its children. This way when we remove the current - // node, we have + // First deal with this not, then visit its children. This way when we remove the current node, we have // the change to remove children too, instead of rewriting other nodes twice. return visitChildren(descriptor.getChildren()[0].not()); } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ReplacingVisitor.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ReplacingVisitor.java index 5b9c4af68cd..c645bede2a5 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ReplacingVisitor.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/ast/ReplacingVisitor.java @@ -174,8 +174,7 @@ protected FilterDescriptor visitChildren(FilterDescriptor descriptor) { if (!changed) { return descriptor; } - return new FilterDescriptor(descriptor.getOperation(), descriptor.getValue(), - descriptor.getType(), children); + return new FilterDescriptor(descriptor.getOperation(), descriptor.getValue(), descriptor.getType(), children); } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/batch/BatchTableRequest.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/batch/BatchTableRequest.java index 6a908fec741..f7a23c07fcb 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/batch/BatchTableRequest.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/batch/BatchTableRequest.java @@ -6,14 +6,12 @@ import java.util.Arrays; /** - * A place to collect up table operations that can be batched. There are two ways to group - * operations, a single step can perform more than one operation, and multiple steps can be - * specified. Within a given step, the operations will be run in the default order and will only be - * exported to the client once, but by providing multiple steps any preferred order can be achieve, - * or intermediate tables can be exported to the client. + * A place to collect up table operations that can be batched. There are two ways to group operations, a single step can + * perform more than one operation, and multiple steps can be specified. Within a given step, the operations will be run + * in the default order and will only be exported to the client once, but by providing multiple steps any preferred + * order can be achieve, or intermediate tables can be exported to the client. * - * This object is only meant for serialization; all brains to construct it are in the RequestBatcher - * class. + * This object is only meant for serialization; all brains to construct it are in the RequestBatcher class. */ public class BatchTableRequest implements Serializable { @@ -114,16 +112,16 @@ public void setUpdateIntervalMs(int updateIntervalMs) { @Override public String toString() { return "SerializedTableOps{" + - "handles=" + handles + - ", viewColumns=" + Arrays.toString(viewColumns) + - ", dropColumns=" + Arrays.toString(dropColumns) + - ", headOrTail=" + headOrTail + - ", sorts=" + Arrays.toString(sorts) + - ", filters=" + Arrays.toString(filters) + - ", customColumns=" + Arrays.toString(customColumns) + - ", isFlat=" + isFlat + - ", updateIntervalMs=" + updateIntervalMs + - '}'; + "handles=" + handles + + ", viewColumns=" + Arrays.toString(viewColumns) + + ", dropColumns=" + Arrays.toString(dropColumns) + + ", headOrTail=" + headOrTail + + ", sorts=" + Arrays.toString(sorts) + + ", filters=" + Arrays.toString(filters) + + ", customColumns=" + Arrays.toString(customColumns) + + ", isFlat=" + isFlat + + ", updateIntervalMs=" + updateIntervalMs + + '}'; } public boolean hasDropColumns() { @@ -143,8 +141,8 @@ public boolean hasHeadOrTail() { } /** - * @return true any time we expect to need to send a full table definition back to client - * (columns added or removed, etc). + * @return true any time we expect to need to send a full table definition back to client (columns added or + * removed, etc). */ public boolean hasStructuralModification() { return hasCustomColumns(); @@ -177,8 +175,7 @@ public boolean hasUpdateIntervalMs() { public boolean isEmpty() { return !(hasDropColumns() || hasViewColumns() || hasHeadOrTail() - || hasSorts() || hasFilters() || hasCustomColumns() || isFlat() - || hasUpdateIntervalMs()); + || hasSorts() || hasFilters() || hasCustomColumns() || isFlat() || hasUpdateIntervalMs()); } } @@ -193,8 +190,8 @@ public void setOps(SerializedTableOps[] ops) { @Override public String toString() { return "BatchTableRequest{" + - "ops=" + Arrays.toString(ops) + - '}'; + "ops=" + Arrays.toString(ops) + + '}'; } public boolean isEmpty() { diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/batch/BatchTableResponse.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/batch/BatchTableResponse.java index a3c5f64e4f7..efcfdf69ed5 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/batch/BatchTableResponse.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/batch/BatchTableResponse.java @@ -7,9 +7,8 @@ import java.util.Arrays; /** - * A response object for batch requests; only contains failure messages, since successful results - * will be pushed directly the client as they come in (rather than waiting until entire batch is - * complete). + * A response object for batch requests; only contains failure messages, since successful results will be pushed + * directly the client as they come in (rather than waiting until entire batch is complete). * * Seems like we could do {@code Callback} instead... */ @@ -47,10 +46,10 @@ public void setFailureMessages(String[] failureMessages) { @Override public String toString() { return "BatchTableResponse{" + - "success=" + Arrays.toString(success) + - ", failedTableHandles=" + Arrays.toString(failedTableHandles) + - ", failureMessages=" + Arrays.toString(failureMessages) + - '}'; + "success=" + Arrays.toString(success) + + ", failedTableHandles=" + Arrays.toString(failedTableHandles) + + ", failureMessages=" + Arrays.toString(failureMessages) + + '}'; } public boolean hasFailures(TableHandle handle) { diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnHolder.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnHolder.java index e3d4f95fe00..608da8be710 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnHolder.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnHolder.java @@ -44,9 +44,9 @@ public void setColumnData(ColumnData columnData) { @Override public String toString() { return "ColumnHolder{" + - "name='" + name + '\'' + - ", type='" + type + '\'' + - ", columnData=" + columnData + - '}'; + "name='" + name + '\'' + + ", type='" + type + '\'' + + ", columnData=" + columnData + + '}'; } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnStatistics.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnStatistics.java index a9b68f6e089..2b9b9c54f89 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnStatistics.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnStatistics.java @@ -4,8 +4,7 @@ import java.util.Arrays; /** - * A DTO containing the result data from a call to GenerateComparableStatsFunction or - * GenerateNumericalStatsFunction + * A DTO containing the result data from a call to GenerateComparableStatsFunction or GenerateNumericalStatsFunction */ public class ColumnStatistics implements Serializable { public enum ColumnType { @@ -148,20 +147,20 @@ public void setMaxDateTime(final long maxDateTime) { @Override public String toString() { return "ColumnStatistics{" + - "type=" + type + - ", size=" + size + - ", count=" + count + - ", numUnique=" + numUnique + - ", uniqueKeys=" + Arrays.toString(uniqueKeys) + - ", uniqueValues=" + Arrays.toString(uniqueValues) + - ", sum=" + sum + - ", absSum=" + absSum + - ", min=" + min + - ", max=" + max + - ", absMin=" + absMin + - ", absMax=" + absMax + - ", minDateTime=" + minDateTime + - ", maxDateTime=" + maxDateTime + - '}'; + "type=" + type + + ", size=" + size + + ", count=" + count + + ", numUnique=" + numUnique + + ", uniqueKeys=" + Arrays.toString(uniqueKeys) + + ", uniqueValues=" + Arrays.toString(uniqueValues) + + ", sum=" + sum + + ", absSum=" + absSum + + ", min=" + min + + ", max=" + max + + ", absMin=" + absMin + + ", absMax=" + absMax + + ", minDateTime=" + minDateTime + + ", maxDateTime=" + maxDateTime + + '}'; } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnValue.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnValue.java index 8018f0999a4..4abfd547e4c 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnValue.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/ColumnValue.java @@ -5,22 +5,22 @@ /** * An opaque representation of a column's value to be used by clients. * - * This allows us to send a column id and a string value, and then translate those into correctly - * typed objects after serialization. + * This allows us to send a column id and a string value, and then translate those into correctly typed objects after + * serialization. * - * These will be useless without association with a valid table identifier, so the appropriate - * definition and types can be loaded on the receiving end. + * These will be useless without association with a valid table identifier, so the appropriate definition and types can + * be loaded on the receiving end. * */ public class ColumnValue implements Serializable { public static final char ARRAY_DELIMITER = ','; /** - * Tech debt: we replace all | with \1 in array values when escaping them. actual \1's in values - * should also be escaped, but will currently fail. + * Tech debt: we replace all | with \1 in array values when escaping them. actual \1's in values should also be + * escaped, but will currently fail. * - * Anything transporting arrays of binary data (or weird strings with control characters in - * them) will need to fixup the ColumnValueRe(/De)hydrater classes which reference this field. + * Anything transporting arrays of binary data (or weird strings with control characters in them) will need to fixup + * the ColumnValueRe(/De)hydrater classes which reference this field. */ public static final char ESCAPER = '\1'; public static final char ESCAPER_ESCAPED = '\2'; @@ -60,8 +60,8 @@ public void setValue(String value) { @Override public String toString() { return "ColumnValue{" + - "columnId=" + columnId + - ", value='" + value + '\'' + - '}'; + "columnId=" + columnId + + ", value='" + value + '\'' + + '}'; } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/CustomColumnDescriptor.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/CustomColumnDescriptor.java index dca95712ceb..3a2a36e2fc9 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/CustomColumnDescriptor.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/CustomColumnDescriptor.java @@ -9,11 +9,10 @@ /** * We still send plain strings to the server and receive plain strings from the client. * - * This is here mostly to have a sane place to handle client introspection of custom column - * definitions. + * This is here mostly to have a sane place to handle client introspection of custom column definitions. * - * We should probably wire this into place for our internal guts, and convert to this form - * immediately upon receiving input from user. + * We should probably wire this into place for our internal guts, and convert to this form immediately upon receiving + * input from user. * */ public class CustomColumnDescriptor implements Serializable { @@ -25,8 +24,7 @@ public class CustomColumnDescriptor implements Serializable { /** * Extracts the column name from a given column expression. * - * Based on the logic in io.deephaven.db.tables.select.SelectColumnFactory, the valid - * expressions take the form: + * Based on the logic in io.deephaven.db.tables.select.SelectColumnFactory, the valid expressions take the form: * *
          *     
    @@ -35,20 +33,18 @@ public class CustomColumnDescriptor implements Serializable {
          *     last()
          * 
    * - * So, we can safely extract a column name for this to have some semblance of identity semantics - * for custom column definitions. + * So, we can safely extract a column name for this to have some semblance of identity semantics for custom column + * definitions. * * Also, we are explicitly *NOT* supporting deprecated last() syntax, so it will be ignored. * - * @param expression A valid column expression. We perform no validation beyond an assertion on - * the resulting name. + * @param expression A valid column expression. We perform no validation beyond an assertion on the resulting name. * @return A valid column name if the input column expression is itself valid. */ private static String extractColumnName(String expression) { expression = expression.trim(); String result = expression.split("=")[0].trim(); - assert result.matches(VALID_ID_REGEX) - : "Invalid column name " + result + " extracted from " + expression; + assert result.matches(VALID_ID_REGEX) : "Invalid column name " + result + " extracted from " + expression; return result; } @@ -79,7 +75,7 @@ public boolean equals(Object o) { final CustomColumnDescriptor that = (CustomColumnDescriptor) o; return Objects.equals(expression, that.expression) && - Objects.equals(name, that.name); + Objects.equals(name, that.name); } @Override @@ -87,8 +83,7 @@ public int hashCode() { return Objects.hash(expression, name); } - public static boolean isCompatible(List was, - List is) { + public static boolean isCompatible(List was, List is) { HashSet existing = new HashSet<>(); for (CustomColumnDescriptor col : was) { existing.add(col.getName()); diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/DeltaUpdates.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/DeltaUpdates.java index 8de6e6aa37f..b217f9a27a3 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/DeltaUpdates.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/DeltaUpdates.java @@ -39,8 +39,7 @@ public static class ColumnModifications implements Serializable { public ColumnModifications() {} - public ColumnModifications(int columnIndex, RangeSet includedModifications, - ColumnData columnData) { + public ColumnModifications(int columnIndex, RangeSet includedModifications, ColumnData columnData) { setColumnIndex(columnIndex); setRowsIncluded(includedModifications); setValues(columnData); @@ -87,9 +86,8 @@ public void setValues(final ColumnData values) { public DeltaUpdates() {} - public DeltaUpdates(RangeSet added, RangeSet removed, ShiftedRange[] shifted, - RangeSet includedAdditions, ColumnAdditions[] addedColumnData, - ColumnModifications[] modifiedColumnData) { + public DeltaUpdates(RangeSet added, RangeSet removed, ShiftedRange[] shifted, RangeSet includedAdditions, + ColumnAdditions[] addedColumnData, ColumnModifications[] modifiedColumnData) { setAdded(added); setRemoved(removed); setShiftedRanges(shifted); diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/FilterDescriptor.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/FilterDescriptor.java index 1cef5784937..dfbab9e6b83 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/FilterDescriptor.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/FilterDescriptor.java @@ -17,13 +17,11 @@ public enum FilterOperation { // 1 child, condition. this is sugar over almost any other Condition operation NOT(Kind.Condition), // 2 children are values - LT(Kind.Condition), GT(Kind.Condition), LTE(Kind.Condition), GTE(Kind.Condition), EQ( - Kind.Condition), EQ_ICASE( + LT(Kind.Condition), GT(Kind.Condition), LTE(Kind.Condition), GTE(Kind.Condition), EQ(Kind.Condition), EQ_ICASE( Kind.Condition), NEQ(Kind.Condition), NEQ_ICASE(Kind.Condition), // 2+ children are values - IN(Kind.Condition), IN_ICASE(Kind.Condition), NOT_IN(Kind.Condition), NOT_IN_ICASE( - Kind.Condition), + IN(Kind.Condition), IN_ICASE(Kind.Condition), NOT_IN(Kind.Condition), NOT_IN_ICASE(Kind.Condition), // 1 child is anything (probably just value) IS_NULL(Kind.Condition), // 0+ children are anything @@ -31,8 +29,8 @@ public enum FilterOperation { // 0 children LITERAL(Kind.Value), REFERENCE(Kind.Value), - CONTAINS(Kind.Condition), CONTAINS_ICASE(Kind.Condition), MATCHES( - Kind.Condition), MATCHES_ICASE(Kind.Condition), + CONTAINS(Kind.Condition), CONTAINS_ICASE(Kind.Condition), MATCHES(Kind.Condition), MATCHES_ICASE( + Kind.Condition), SEARCH(Kind.Condition), ; @@ -45,8 +43,8 @@ public enum FilterOperation { } /** - * Describes types of value literals. This is much rougher than we'll eventually want, but as an - * internal-only detail it does fit our purposes + * Describes types of value literals. This is much rougher than we'll eventually want, but as an internal-only + * detail it does fit our purposes */ public enum ValueType { // js/java String @@ -77,8 +75,8 @@ public FilterDescriptor() { children = EMPTY; } - public FilterDescriptor(FilterOperation operation, @Nullable String value, - @Nullable ValueType type, FilterDescriptor[] children) { + public FilterDescriptor(FilterOperation operation, @Nullable String value, @Nullable ValueType type, + FilterDescriptor[] children) { setOperation(operation); setValue(value); setType(type); @@ -365,10 +363,10 @@ public int hashCode() { @Override public String toString() { return "FilterDescriptor{" + - "operation=" + operation + - ", value='" + value + '\'' + - ", type=" + type + - ", children=" + Arrays.toString(children) + - '}'; + "operation=" + operation + + ", value='" + value + '\'' + + ", type=" + type + + ", children=" + Arrays.toString(children) + + '}'; } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/HandleMapping.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/HandleMapping.java index 6c225dcc9c2..d0108bb8098 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/HandleMapping.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/HandleMapping.java @@ -57,8 +57,8 @@ public int hashCode() { @Override public String toString() { return "HandleMapping{" + - "source=" + source + - ", newId=" + newId + - '}'; + "source=" + source + + ", newId=" + newId + + '}'; } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/HeadOrTailDescriptor.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/HeadOrTailDescriptor.java index edf22dc7e0d..a62f272ad40 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/HeadOrTailDescriptor.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/HeadOrTailDescriptor.java @@ -33,8 +33,8 @@ public void setRows(long rows) { @Override public String toString() { return "HeadOrTailDescriptor{" + - "head=" + head + - ", rows=" + rows + - '}'; + "head=" + head + + ", rows=" + rows + + '}'; } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/InitialTableDefinition.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/InitialTableDefinition.java index 27dd888b7e5..a2fa23a5879 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/InitialTableDefinition.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/InitialTableDefinition.java @@ -3,8 +3,8 @@ import java.io.Serializable; /** - * A table definition constructed when using the fetch command; also includes the table id and size, - * which are not normally part of a table definition (as they will change when the table evolves) + * A table definition constructed when using the fetch command; also includes the table id and size, which are not + * normally part of a table definition (as they will change when the table evolves) */ public class InitialTableDefinition implements Serializable { diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LocalDate.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LocalDate.java index 558ce15110f..d78c8581658 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LocalDate.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LocalDate.java @@ -3,9 +3,8 @@ import java.io.Serializable; /** - * A simple container for serializing LocalDate values. This should be better for serialization than - * java.time.LocalDate since we use bytes for month and day, and is compatible with GWT (java.time - * is not available in GWT). + * A simple container for serializing LocalDate values. This should be better for serialization than java.time.LocalDate + * since we use bytes for month and day, and is compatible with GWT (java.time is not available in GWT). */ public class LocalDate implements Serializable { private int year; diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LocalTime.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LocalTime.java index de3e4839937..12aa98ec22a 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LocalTime.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LocalTime.java @@ -3,9 +3,9 @@ import java.io.Serializable; /** - * A simple container for serializing local time values. This should be better for serialization - * than java.time.LocalTime since we use bytes for hour, minute and second, and is compatible with - * GWT (java.time is not available in GWT). + * A simple container for serializing local time values. This should be better for serialization than + * java.time.LocalTime since we use bytes for hour, minute and second, and is compatible with GWT (java.time is not + * available in GWT). */ public class LocalTime implements Serializable { private byte hour, minute, second; diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LogItem.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LogItem.java index 4a812600629..901023c9082 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LogItem.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/LogItem.java @@ -11,8 +11,7 @@ public class LogItem implements Serializable { private double micros; // not using long, as js numbers are all floating point anyway - private String logLevel; // not an enum because fishlib LogLevel is a class that allows you to - // create your own + private String logLevel; // not an enum because fishlib LogLevel is a class that allows you to create your own private String message; diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java index a7fe2bc1de0..4a6786062fa 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/Range.java @@ -4,9 +4,9 @@ import java.io.Serializable; /** - * Describes a contiguous range of at least one item. Equals/hashcode compare both start and end, - * but comparing Range instances will compare only by start - the overlap(Range) method should be - * used to see if two ranges share at least one item. + * Describes a contiguous range of at least one item. Equals/hashcode compare both start and end, but comparing Range + * instances will compare only by start - the overlap(Range) method should be used to see if two ranges share at least + * one item. */ public class Range implements Serializable, Comparable { private long first; @@ -63,8 +63,7 @@ public Range[] minus(Range range) { return null; } if (range.first <= first && range.last >= last) { - // entirely encompasses the current range, return nothing at all indicating that the - // range is just removed + // entirely encompasses the current range, return nothing at all indicating that the range is just removed return new Range[0]; } @@ -75,8 +74,7 @@ public Range[] minus(Range range) { new Range(range.last + 1, last) }; } - // otherwise either the subtracted section's start is within our range _or_ its end is - // within our range, + // otherwise either the subtracted section's start is within our range _or_ its end is within our range, // and we can use that to only produce the one range we need to return if (range.first <= first) { assert range.last >= first : "removed range expected to not end before existing range"; @@ -84,8 +82,7 @@ public Range[] minus(Range range) { new Range(range.last + 1, last) }; } else { - assert range.last >= last - : "removed range expected to end by the end of the existing range"; + assert range.last >= last : "removed range expected to end by the end of the existing range"; assert range.first <= last : "removed range expected to start before existing range"; return new Range[] { new Range(first, range.first - 1) @@ -121,8 +118,8 @@ public long size() { @Override public String toString() { return "Range{" + - "first=" + first + - ", last=" + last + - '}'; + "first=" + first + + ", last=" + last + + '}'; } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java index 53b6ededfb1..1301f246a81 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RangeSet.java @@ -70,22 +70,18 @@ public void addRange(Range range) { return; } - // if more than one other entry, binarySearch to find before and after entry, and test both - // for overlapping + // if more than one other entry, binarySearch to find before and after entry, and test both for overlapping int index = Arrays.binarySearch(sortedRanges, range); if (index >= 0) { - // starting with that item, check to see if each following item is part of the existing - // range - // we know that no range before it will need to be considered, since the set should - // previously + // starting with that item, check to see if each following item is part of the existing range + // we know that no range before it will need to be considered, since the set should previously // have been broken into non-contiguous ranges Range merged = range; int end = sortedRanges.length - 1; for (int i = index; i < sortedRanges.length; i++) { Range existing = sortedRanges[i]; - // there is an item with the same start, either new item falls within it, or should - // replace it + // there is an item with the same start, either new item falls within it, or should replace it Range overlap = existing.overlap(merged); if (overlap == null) { @@ -96,8 +92,7 @@ public void addRange(Range range) { // grow the region used for replacing merged = overlap; } - // splice out [index, end] items, replacing with the newly grown overlap object (may be - // the same + // splice out [index, end] items, replacing with the newly grown overlap object (may be the same // size, and only replacing one item) int newLength = sortedRanges.length - (end - index); Range[] newArray = new Range[newLength]; @@ -120,17 +115,14 @@ public void addRange(Range range) { // replace the range that we are merging, and start the slice here instead merged = overlap; proposedIndex--; - // TODO this will make the loop start here, considering this item twice. not - // ideal, but not a big deal either + // TODO this will make the loop start here, considering this item twice. not ideal, but not a big + // deal either } } - // "end" represents the last item that needs to be merged in to the newly added item. if - // no items are to be - // merged in, then end will be proposedIndex-1, meaning nothing gets merged in, and the - // array will grow + // "end" represents the last item that needs to be merged in to the newly added item. if no items are to be + // merged in, then end will be proposedIndex-1, meaning nothing gets merged in, and the array will grow // instead of shrinking. - // if we never find an item we cannot merge with, the end of the replaced range is the - // last item of the old + // if we never find an item we cannot merge with, the end of the replaced range is the last item of the old // array, which could result in the new array having as little as only 1 item int end = sortedRanges.length - 1; // until we quit finding matches, test subsequent items @@ -152,8 +144,7 @@ public void addRange(Range range) { } newArray[proposedIndex] = merged; if (end < sortedRanges.length - 1) { - System.arraycopy(sortedRanges, end + 1, newArray, proposedIndex + 1, - sortedRanges.length - (end + 1)); + System.arraycopy(sortedRanges, end + 1, newArray, proposedIndex + 1, sortedRanges.length - (end + 1)); } sortedRanges = newArray; } @@ -165,21 +156,15 @@ public void removeRange(Range range) { return; } - // search the sorted list of ranges and find where the current range starts. two case here - // when using - // binarySearch, either the removed range starts in the same place as an existing range - // starts, or + // search the sorted list of ranges and find where the current range starts. two case here when using + // binarySearch, either the removed range starts in the same place as an existing range starts, or // it starts before an item (and so we check the item before and the item after) int index = Arrays.binarySearch(sortedRanges, range); if (index < 0) { - // adjusted index notes where the item would be if it were added, minus _one more_ to - // see if - // it overlaps the item before it. To compute "the position where the new item belongs", - // we - // would do (-index - 1), so to examine one item prior to that we'll subtract one more. - // Then, - // to confirm that we are inserting in a valid position, take the max of that value and - // zero. + // adjusted index notes where the item would be if it were added, minus _one more_ to see if + // it overlaps the item before it. To compute "the position where the new item belongs", we + // would do (-index - 1), so to examine one item prior to that we'll subtract one more. Then, + // to confirm that we are inserting in a valid position, take the max of that value and zero. index = Math.max(0, -index - 2); } @@ -201,13 +186,11 @@ public void removeRange(Range range) { // Splice in the one extra item and we're done - this entry // both started before and ended after the removed section, // so we don't even "break", we just return - assert toCheck.getFirst() < range.getFirst() - : "Expected " + range + " to start after " + toCheck; - assert toCheck.getLast() > range.getLast() - : "Expected " + range + " to end after " + toCheck; + assert toCheck.getFirst() < range.getFirst() : "Expected " + range + " to start after " + toCheck; + assert toCheck.getLast() > range.getLast() : "Expected " + range + " to end after " + toCheck; assert toRemove == 0 && beforeCount == -1 - : "Expected that no previous items in the RangeSet had been removed toRemove=" - + toRemove + ", beforeCount=" + beforeCount; + : "Expected that no previous items in the RangeSet had been removed toRemove=" + toRemove + + ", beforeCount=" + beforeCount; Range[] replacement = new Range[sortedRanges.length + 1]; if (index > 0) { @@ -215,8 +198,7 @@ public void removeRange(Range range) { } replacement[index] = remaining[0]; replacement[index + 1] = remaining[1]; - System.arraycopy(sortedRanges, index + 1, replacement, index + 2, - sortedRanges.length - (index + 1)); + System.arraycopy(sortedRanges, index + 1, replacement, index + 2, sortedRanges.length - (index + 1)); sortedRanges = replacement; @@ -226,8 +208,7 @@ public void removeRange(Range range) { // swap shortened item and move on sortedRanges[index] = remaining[0]; } else { - assert remaining.length == 0 - : "Array contains a surprising number of items: " + remaining.length; + assert remaining.length == 0 : "Array contains a surprising number of items: " + remaining.length; // splice out this item as nothing exists here any more and move on if (toRemove == 0) { @@ -241,7 +222,7 @@ public void removeRange(Range range) { Range[] replacement = new Range[sortedRanges.length - toRemove]; System.arraycopy(sortedRanges, 0, replacement, 0, beforeCount); System.arraycopy(sortedRanges, beforeCount + toRemove, replacement, beforeCount, - sortedRanges.length - beforeCount - toRemove); + sortedRanges.length - beforeCount - toRemove); sortedRanges = replacement; } else { @@ -255,8 +236,8 @@ public Iterator rangeIterator() { public PrimitiveIterator.OfLong indexIterator() { return Arrays.stream(sortedRanges) - .flatMapToLong(range -> LongStream.rangeClosed(range.getFirst(), range.getLast())) - .iterator(); + .flatMapToLong(range -> LongStream.rangeClosed(range.getFirst(), range.getLast())) + .iterator(); } public int rangeCount() { @@ -302,8 +283,8 @@ public boolean includesAllOf(RangeSet other) { continue; } if (match.getLast() > current.getLast()) { - // since the match starts within current, if it ends afterward, we know at least - // one item is missing: current.getLast() + 1 + // since the match starts within current, if it ends afterward, we know at least one item is + // missing: current.getLast() + 1 return false; } // else, the match is fully contained in current, so move on to the next item @@ -316,8 +297,8 @@ public boolean includesAllOf(RangeSet other) { @Override public String toString() { return "RangeSet{" + - "sortedRanges=" + Arrays.toString(sortedRanges) + - '}'; + "sortedRanges=" + Arrays.toString(sortedRanges) + + '}'; } public long getFirstRow() { diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RowValues.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RowValues.java index 2d29930935f..20393c1dd58 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RowValues.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/RowValues.java @@ -28,7 +28,7 @@ public void setColumns(ColumnValue[] columns) { @Override public String toString() { return "RowValues{" + - "columns=" + Arrays.toString(columns) + '}'; + "columns=" + Arrays.toString(columns) + '}'; } public boolean isEmpty() { diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/SortDescriptor.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/SortDescriptor.java index 6e1ae26a0fa..c9cf98d0cf4 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/SortDescriptor.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/SortDescriptor.java @@ -43,9 +43,9 @@ public void setAbs(boolean abs) { @Override public String toString() { return "SortDescriptor{" + - "dir='" + dir + '\'' + - ", columnName='" + columnName + '\'' + - ", abs=" + abs + - '}'; + "dir='" + dir + '\'' + + ", columnName='" + columnName + '\'' + + ", abs=" + abs + + '}'; } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableAttributesDefinition.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableAttributesDefinition.java index f7363116f55..b823b3c84cf 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableAttributesDefinition.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableAttributesDefinition.java @@ -7,17 +7,16 @@ public class TableAttributesDefinition implements Serializable { public static final String INPUT_TABLE_ATTRIBUTE = "InputTable", - TOTALS_TABLE_ATTRIBUTE = "TotalsTable", - TABLE_DESCRIPTION_ATTRIBUTE = "TableDescription", - COLUMN_DESCRIPTIONS_ATTRIBUTE = "ColumnDescriptions", - HIERARCHICAL_SOURCE_TABLE_ATTRIBUTE = "HierarchicalSourceTable", - HIERARCHICAL_SOURCE_INFO_ATTRIBUTE = "HierarchicalSourceTableInfo", - PLUGIN_NAME = "PluginName"; + TOTALS_TABLE_ATTRIBUTE = "TotalsTable", + TABLE_DESCRIPTION_ATTRIBUTE = "TableDescription", + COLUMN_DESCRIPTIONS_ATTRIBUTE = "ColumnDescriptions", + HIERARCHICAL_SOURCE_TABLE_ATTRIBUTE = "HierarchicalSourceTable", + HIERARCHICAL_SOURCE_INFO_ATTRIBUTE = "HierarchicalSourceTableInfo", + PLUGIN_NAME = "PluginName"; // special cased attributes that have a complex type yet are always sent private RollupDefinition rollupDefinition;// rollup subtype of "HierarchicalSourceTableInfo" - private String treeHierarchicalColumnName;// technically a part of - // "HierarchicalSourceTableInfo", won't be copied + private String treeHierarchicalColumnName;// technically a part of "HierarchicalSourceTableInfo", won't be copied // separately private String[][] columnDescriptions;// "ColumnDescriptions" diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableHandle.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableHandle.java index 6898082f19f..236b51e1118 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableHandle.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/data/TableHandle.java @@ -5,15 +5,15 @@ import javax.annotation.Nonnull; /** - * In order to track tables sanely, we will force the client to choose an id to be used for tables - * _before_ they are created, and then map server table handles back to those ids. + * In order to track tables sanely, we will force the client to choose an id to be used for tables _before_ they are + * created, and then map server table handles back to those ids. * - * While this forces the server to do a little bending over to figure out where a table is expected - * by the client before sending messages, this allows the client to cancel an in-flight request, - * before it finishes and the server id is known. + * While this forces the server to do a little bending over to figure out where a table is expected by the client before + * sending messages, this allows the client to cancel an in-flight request, before it finishes and the server id is + * known. * - * Note that this object uses object identity semantics in its equals method; we only consider the - * clientId in hashCode/equals, so we can safely use these as map keys before the serverId is known. + * Note that this object uses object identity semantics in its equals method; we only consider the clientId in + * hashCode/equals, so we can safely use these as map keys before the serverId is known. */ public class TableHandle extends ServerReplyHandle implements Comparable { @@ -25,15 +25,14 @@ public class TableHandle extends ServerReplyHandle implements Comparable { diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/fu/RemoverFn.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/fu/RemoverFn.java index a6cf4a01d57..c4deffaf4f4 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/fu/RemoverFn.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/fu/RemoverFn.java @@ -3,8 +3,7 @@ import jsinterop.annotations.JsFunction; /** - * Like Gwt's HandlerRegistration, but future-friendly (JsFunction), dependency-free, and easier to - * type! + * Like Gwt's HandlerRegistration, but future-friendly (JsFunction), dependency-free, and easier to type! */ @JsFunction @FunctionalInterface diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/requests/RollupTableRequest.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/requests/RollupTableRequest.java index fd97a2bd331..f830771a4fc 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/requests/RollupTableRequest.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/requests/RollupTableRequest.java @@ -77,12 +77,12 @@ public void setAggregations(String[] aggregations) { @Override public String toString() { return "RollupTableRequest{" + - "table=" + table + - ", resultHandle=" + resultHandle + - ", groupingColumns=" + Arrays.toString(groupingColumns) + - ", aggregations=" + Arrays.toString(aggregations) + - ", includeConstituents=" + includeConstituents + - ", includeOriginalColumns=" + includeOriginalColumns + - '}'; + "table=" + table + + ", resultHandle=" + resultHandle + + ", groupingColumns=" + Arrays.toString(groupingColumns) + + ", aggregations=" + Arrays.toString(aggregations) + + ", includeConstituents=" + includeConstituents + + ", includeOriginalColumns=" + includeOriginalColumns + + '}'; } } diff --git a/web/shared-beans/src/main/java/io/deephaven/web/shared/util/ParseUtils.java b/web/shared-beans/src/main/java/io/deephaven/web/shared/util/ParseUtils.java index 00382e828d2..16e79e079d7 100644 --- a/web/shared-beans/src/main/java/io/deephaven/web/shared/util/ParseUtils.java +++ b/web/shared-beans/src/main/java/io/deephaven/web/shared/util/ParseUtils.java @@ -2,8 +2,8 @@ public class ParseUtils { /** - * This method exists because Java's Boolean.parseBoolean is too permissive (that method maps - * "true" to true, any other string to false). + * This method exists because Java's Boolean.parseBoolean is too permissive (that method maps "true" to true, any + * other string to false). * * @return *
      diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/AbstractReplacingVisitorTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/AbstractReplacingVisitorTest.java index fb02d8dab3c..79524800c97 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/AbstractReplacingVisitorTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/AbstractReplacingVisitorTest.java @@ -10,8 +10,7 @@ protected void assertUnchanged(String message, FilterDescriptor input) { assertEquals(message, input, execute(input)); } - protected void assertFilterEquals(String message, FilterDescriptor input, - FilterDescriptor output) { + protected void assertFilterEquals(String message, FilterDescriptor input, FilterDescriptor output) { FilterDescriptor actual = execute(input); if (!output.equals(actual)) { @@ -21,8 +20,8 @@ protected void assertFilterEquals(String message, FilterDescriptor input, } private static String print(FilterDescriptor f) { - FilterPrinter p = new FilterPrinter(str -> "\"" + str + "\"");// not correct, but good - // enough for logging failures + FilterPrinter p = new FilterPrinter(str -> "\"" + str + "\"");// not correct, but good enough for logging + // failures return p.print(f); } diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/ConvertEqToInTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/ConvertEqToInTest.java index 3675383c138..e3dce17fe82 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/ConvertEqToInTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/ConvertEqToInTest.java @@ -10,18 +10,18 @@ public class ConvertEqToInTest extends AbstractReplacingVisitorTest { @Test public void testConvertEqToIn() { assertFilterEquals("simple EQ", - eq("ColumnA", 1), - in("ColumnA", 1)); + eq("ColumnA", 1), + in("ColumnA", 1)); assertFilterEquals("reverse EQ", - node(FilterDescriptor.FilterOperation.EQ, literal((double) 1), reference("ColumnA")), - in("ColumnA", 1)); + node(FilterDescriptor.FilterOperation.EQ, literal((double) 1), reference("ColumnA")), + in("ColumnA", 1)); assertUnchanged("two literals", - node(FilterDescriptor.FilterOperation.EQ, literals(1, 2))); + node(FilterDescriptor.FilterOperation.EQ, literals(1, 2))); assertUnchanged("two references", - node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnB"))); + node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnB"))); } @Override diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/ConvertInvalidInExpressionsTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/ConvertInvalidInExpressionsTest.java index cd66396dc57..8ff77c7f288 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/ConvertInvalidInExpressionsTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/ConvertInvalidInExpressionsTest.java @@ -10,16 +10,16 @@ public class ConvertInvalidInExpressionsTest extends AbstractReplacingVisitorTes @Test public void testConvertInvalidInExpressions() { assertUnchanged("already correct", - in("ColumnA", 1)); + in("ColumnA", 1)); assertUnchanged("already correct", - in("ColumnA", 1, 2)); + in("ColumnA", 1, 2)); assertFilterEquals("literals on both sides", - node(FilterDescriptor.FilterOperation.IN, literals(1, 2)), - node(FilterDescriptor.FilterOperation.EQ, literals(1, 2))); + node(FilterDescriptor.FilterOperation.IN, literals(1, 2)), + node(FilterDescriptor.FilterOperation.EQ, literals(1, 2))); assertFilterEquals("references on both sides", - node(FilterDescriptor.FilterOperation.IN, reference("ColumnA"), reference("ColumnB")), - node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnB"))); + node(FilterDescriptor.FilterOperation.IN, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnB"))); } @Override diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/FilterTestUtils.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/FilterTestUtils.java index 32fad90903b..2bee52fcaf6 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/FilterTestUtils.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/FilterTestUtils.java @@ -27,12 +27,10 @@ public static FilterDescriptor not(FilterDescriptor filterDescriptor) { // return node(FilterDescriptor.FilterOperation.NEQ, reference(columnName), literal(value)); // } // public static FilterDescriptor in(String columnName, String... values) { - // return node(FilterDescriptor.FilterOperation.NOT_IN, concat(reference(columnName), - // literals(values))); + // return node(FilterDescriptor.FilterOperation.NOT_IN, concat(reference(columnName), literals(values))); // } // public static FilterDescriptor notIn(String columnName, String... values) { - // return node(FilterDescriptor.FilterOperation.IN, concat(reference(columnName), - // literals(values))); + // return node(FilterDescriptor.FilterOperation.IN, concat(reference(columnName), literals(values))); // } public static FilterDescriptor reference(String columnName) { @@ -52,29 +50,24 @@ public static FilterDescriptor literal(String stringValue) { } public static FilterDescriptor[] literals(String... stringValues) { - return Stream.of(stringValues).map(FilterTestUtils::literal) - .toArray(FilterDescriptor[]::new); + return Stream.of(stringValues).map(FilterTestUtils::literal).toArray(FilterDescriptor[]::new); } public static FilterDescriptor eq(String columnName, int value) { - return node(FilterDescriptor.FilterOperation.EQ, reference(columnName), - literal((double) value)); + return node(FilterDescriptor.FilterOperation.EQ, reference(columnName), literal((double) value)); } public static FilterDescriptor notEq(String columnName, int value) { - return node(FilterDescriptor.FilterOperation.NEQ, reference(columnName), - literal((double) value)); + return node(FilterDescriptor.FilterOperation.NEQ, reference(columnName), literal((double) value)); } public static FilterDescriptor in(String columnName, int... values) { - return node(FilterDescriptor.FilterOperation.IN, - concat(reference(columnName), literals(values))); + return node(FilterDescriptor.FilterOperation.IN, concat(reference(columnName), literals(values))); } public static FilterDescriptor notIn(String columnName, int... values) { - return node(FilterDescriptor.FilterOperation.NOT_IN, - concat(reference(columnName), literals(values))); + return node(FilterDescriptor.FilterOperation.NOT_IN, concat(reference(columnName), literals(values))); } public static FilterDescriptor literal(long longValue) { @@ -102,32 +95,28 @@ private static FilterDescriptor numericLiteral(String numericAsString) { } public static FilterDescriptor[] literals(int... intValues) { - return IntStream.of(intValues).mapToObj(i -> literal((double) i)) - .toArray(FilterDescriptor[]::new); + return IntStream.of(intValues).mapToObj(i -> literal((double) i)).toArray(FilterDescriptor[]::new); } public static FilterDescriptor[] literals(double... doubleValues) { - return DoubleStream.of(doubleValues).mapToObj(FilterTestUtils::literal) - .toArray(FilterDescriptor[]::new); + return DoubleStream.of(doubleValues).mapToObj(FilterTestUtils::literal).toArray(FilterDescriptor[]::new); } private static FilterDescriptor[] concat(FilterDescriptor first, FilterDescriptor... arr) { return Stream.concat( - Stream.of(first), - Arrays.stream(arr)).toArray(FilterDescriptor[]::new); + Stream.of(first), + Arrays.stream(arr)).toArray(FilterDescriptor[]::new); } public static FilterDescriptor invoke(String method, FilterDescriptor... filterDescriptors) { - FilterDescriptor descriptor = - node(FilterDescriptor.FilterOperation.INVOKE, filterDescriptors); + FilterDescriptor descriptor = node(FilterDescriptor.FilterOperation.INVOKE, filterDescriptors); descriptor.setValue(method); return descriptor; } - public static FilterDescriptor node(FilterDescriptor.FilterOperation op, - FilterDescriptor... filterDescriptors) { + public static FilterDescriptor node(FilterDescriptor.FilterOperation op, FilterDescriptor... filterDescriptors) { FilterDescriptor filterDescriptor = new FilterDescriptor(); filterDescriptor.setOperation(op); filterDescriptor.setChildren(filterDescriptors); diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/FlipNonReferenceMatchExpressionTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/FlipNonReferenceMatchExpressionTest.java index 6470b5ead84..a1fef46442d 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/FlipNonReferenceMatchExpressionTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/FlipNonReferenceMatchExpressionTest.java @@ -10,27 +10,26 @@ public class FlipNonReferenceMatchExpressionTest extends AbstractReplacingVisito @Test public void testFlipNonReferenceMatchExpression() { assertUnchanged("correct form", - in("ColumnA", 1, 2)); + in("ColumnA", 1, 2)); assertUnchanged("correct form", - in("ColumnA", 1)); + in("ColumnA", 1)); assertFilterEquals("all literals", - node(FilterDescriptor.FilterOperation.IN, literals(1, 2, 3)), - or( - node(FilterDescriptor.FilterOperation.IN, literals(2, 1)), - node(FilterDescriptor.FilterOperation.IN, literals(3, 1)))); + node(FilterDescriptor.FilterOperation.IN, literals(1, 2, 3)), + or( + node(FilterDescriptor.FilterOperation.IN, literals(2, 1)), + node(FilterDescriptor.FilterOperation.IN, literals(3, 1)))); assertFilterEquals("reference on right", - node(FilterDescriptor.FilterOperation.IN, literal(1), reference("ColumnA"), literal(4), - literal(5)), - or( - in("ColumnA", 1), - node(FilterDescriptor.FilterOperation.IN, literal(4), literal(1)), - node(FilterDescriptor.FilterOperation.IN, literal(5), literal(1)))); + node(FilterDescriptor.FilterOperation.IN, literal(1), reference("ColumnA"), literal(4), literal(5)), + or( + in("ColumnA", 1), + node(FilterDescriptor.FilterOperation.IN, literal(4), literal(1)), + node(FilterDescriptor.FilterOperation.IN, literal(5), literal(1)))); assertFilterEquals("reference on right, no OR required", - node(FilterDescriptor.FilterOperation.IN, literal(1), reference("ColumnA")), - in("ColumnA", 1)); + node(FilterDescriptor.FilterOperation.IN, literal(1), reference("ColumnA")), + in("ColumnA", 1)); } @Override diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MakeExpressionsNullSafeTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MakeExpressionsNullSafeTest.java index 533627cf247..b8ec94ff078 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MakeExpressionsNullSafeTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MakeExpressionsNullSafeTest.java @@ -10,19 +10,18 @@ public class MakeExpressionsNullSafeTest extends AbstractReplacingVisitorTest { @Test public void testMakeExpressionsNullSafe() { assertUnchanged("doesnt affect EQ", - node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), literal("A"))); + node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), literal("A"))); assertFilterEquals("add null checks when implicit invoke is needed", - node(FilterDescriptor.FilterOperation.EQ_ICASE, reference("ColumnA"), - reference("ColumnB")), - or( - and( - node(FilterDescriptor.FilterOperation.IS_NULL, reference("ColumnA")), - node(FilterDescriptor.FilterOperation.IS_NULL, reference("ColumnB"))), - and( - not( - node(FilterDescriptor.FilterOperation.IS_NULL, reference("ColumnA"))), - invoke("equalsIgnoreCase", reference("ColumnA"), reference("ColumnB"))))); + node(FilterDescriptor.FilterOperation.EQ_ICASE, reference("ColumnA"), reference("ColumnB")), + or( + and( + node(FilterDescriptor.FilterOperation.IS_NULL, reference("ColumnA")), + node(FilterDescriptor.FilterOperation.IS_NULL, reference("ColumnB"))), + and( + not( + node(FilterDescriptor.FilterOperation.IS_NULL, reference("ColumnA"))), + invoke("equalsIgnoreCase", reference("ColumnA"), reference("ColumnB"))))); } @Override diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MergeNestedBinaryOperationsTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MergeNestedBinaryOperationsTest.java index 666d47e95cc..6fef7bccf97 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MergeNestedBinaryOperationsTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MergeNestedBinaryOperationsTest.java @@ -10,58 +10,58 @@ public class MergeNestedBinaryOperationsTest extends AbstractReplacingVisitorTes @Test public void testMergeNestedBinaryOperations() { assertUnchanged("don't merge AND and OR", - and( - or( - eq("ColumnB", 3), - eq("ColumnA", 4)), - eq("ColumnA", 1))); - assertUnchanged("don't merge AND and OR", - or( and( - eq("ColumnB", 3), - eq("ColumnA", 4)), - eq("ColumnA", 1))); + or( + eq("ColumnB", 3), + eq("ColumnA", 4)), + eq("ColumnA", 1))); + assertUnchanged("don't merge AND and OR", + or( + and( + eq("ColumnB", 3), + eq("ColumnA", 4)), + eq("ColumnA", 1))); assertFilterEquals("merge ANDs", - and( - eq("ColumnA", 3), and( - eq("ColumnB", 3), - eq("ColumnC", 3))), - and( - eq("ColumnA", 3), - eq("ColumnB", 3), - eq("ColumnC", 3))); + eq("ColumnA", 3), + and( + eq("ColumnB", 3), + eq("ColumnC", 3))), + and( + eq("ColumnA", 3), + eq("ColumnB", 3), + eq("ColumnC", 3))); assertFilterEquals("merge ANDs", - and( and( - eq("ColumnA", 3), - eq("ColumnB", 3)), - eq("ColumnC", 3)), - and( - eq("ColumnA", 3), - eq("ColumnB", 3), - eq("ColumnC", 3))); + and( + eq("ColumnA", 3), + eq("ColumnB", 3)), + eq("ColumnC", 3)), + and( + eq("ColumnA", 3), + eq("ColumnB", 3), + eq("ColumnC", 3))); assertFilterEquals("merge ORs", - or( - eq("ColumnA", 3), or( - eq("ColumnB", 3), - eq("ColumnC", 3))), - or( - eq("ColumnA", 3), - eq("ColumnB", 3), - eq("ColumnC", 3))); + eq("ColumnA", 3), + or( + eq("ColumnB", 3), + eq("ColumnC", 3))), + or( + eq("ColumnA", 3), + eq("ColumnB", 3), + eq("ColumnC", 3))); assertFilterEquals("merge ANDs", - or( or( - eq("ColumnA", 3), - eq("ColumnB", 3)), - eq("ColumnC", 3)), - or( - eq("ColumnA", 3), - eq("ColumnB", 3), - eq("ColumnC", 3))); + or( + eq("ColumnA", 3), + eq("ColumnB", 3)), + eq("ColumnC", 3)), + or( + eq("ColumnA", 3), + eq("ColumnB", 3), + eq("ColumnC", 3))); } @Override diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MergeRelatedSiblingExpressionsTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MergeRelatedSiblingExpressionsTest.java index 7ab4ffa06ba..a3aa5cc1f4e 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MergeRelatedSiblingExpressionsTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/MergeRelatedSiblingExpressionsTest.java @@ -11,41 +11,37 @@ public class MergeRelatedSiblingExpressionsTest extends AbstractReplacingVisitor @Test public void testMergeRelatedSiblings() { assertUnchanged("plain IN", - in("ColumnA", 1, 2)); + in("ColumnA", 1, 2)); assertUnchanged("IN within AND", - and( - in("ColumnA", 1), - in("ColumnA", 2))); + and( + in("ColumnA", 1), + in("ColumnA", 2))); assertUnchanged("can't merge these siblings", - or( - in("ColumnA", 1), - eq("ColumnB", 2))); + or( + in("ColumnA", 1), + eq("ColumnB", 2))); assertUnchanged("unrelated INs", - or( - in("ColumnA", 1), - in("ColumnB", 2, 3, 4))); + or( + in("ColumnA", 1), + in("ColumnB", 2, 3, 4))); assertFilterEquals("merge NOT IN within AND, remove parent", - and( - notIn("ColumnA", 1), - notIn("ColumnA", 2)), - notIn("ColumnA", 1, 2)); + and( + notIn("ColumnA", 1), + notIn("ColumnA", 2)), + notIn("ColumnA", 1, 2)); assertFilterEquals("merge only INs in OR", - or( - node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), - reference("ColumnB")), - in("ColumnA", 1), - node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), - reference("ColumnC")), - in("ColumnA", 2)), - or( - in("ColumnA", 1, 2), - node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), - reference("ColumnC")))); + or( + node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnB")), + in("ColumnA", 1), + node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnC")), + in("ColumnA", 2)), + or( + in("ColumnA", 1, 2), + node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnC")))); } @Override diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/NormalizeNotsTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/NormalizeNotsTest.java index a408536c329..2238b84e435 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/NormalizeNotsTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/ast/NormalizeNotsTest.java @@ -13,94 +13,70 @@ public void testNormalizeNots() { // this test effectively tests FilterDescriptor.not(), but also that the visitor // correctly calls it on the tree assertFilterEquals("two nots around a simple expression", - not(not(invoke("foo", reference("ColumnA")))), - invoke("foo", reference("ColumnA"))); + not(not(invoke("foo", reference("ColumnA")))), + invoke("foo", reference("ColumnA"))); assertFilterEquals("two nots around a simple expression", - not(not(eq("foo", 1))), - eq("foo", 1)); + not(not(eq("foo", 1))), + eq("foo", 1)); assertFilterEquals("two nots within a tree", - not(and( - not(or( - invoke("methodA", reference("ColumnA")), // invoke used since it can't be - // rewritten to handle a NOT - invoke("methodB", reference("ColumnA")))), - or( - invoke("methodC", reference("ColumnA")), - invoke("methodD", reference("ColumnA"))))), - or( + not(and( + not(or( + invoke("methodA", reference("ColumnA")), // invoke used since it can't be rewritten to + // handle a NOT + invoke("methodB", reference("ColumnA")))), + or( + invoke("methodC", reference("ColumnA")), + invoke("methodD", reference("ColumnA"))))), or( - invoke("methodA", reference("ColumnA")), - invoke("methodB", reference("ColumnA"))), - and( - not(invoke("methodC", reference("ColumnA"))), - not(invoke("methodD", reference("ColumnA")))))); + or( + invoke("methodA", reference("ColumnA")), + invoke("methodB", reference("ColumnA"))), + and( + not(invoke("methodC", reference("ColumnA"))), + not(invoke("methodD", reference("ColumnA")))))); assertUnchanged("other non-flippble expression", - or( - not(node(FilterDescriptor.FilterOperation.CONTAINS, reference("ColumnA"), - literal("asdf"))), - not(node(FilterDescriptor.FilterOperation.IS_NULL, reference("ColumnA"))))); + or( + not(node(FilterDescriptor.FilterOperation.CONTAINS, reference("ColumnA"), literal("asdf"))), + not(node(FilterDescriptor.FilterOperation.IS_NULL, reference("ColumnA"))))); try { execute( - not(node(FilterDescriptor.FilterOperation.SEARCH, literal("asdf")))); + not(node(FilterDescriptor.FilterOperation.SEARCH, literal("asdf")))); Assert.fail("Expected exception"); } catch (IllegalStateException expected) { Assert.assertEquals("Cannot not() a search", expected.getMessage()); } assertFilterEquals("flip various leaf expressions", - not(or( - node(FilterDescriptor.FilterOperation.LT, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.GT, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.LTE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.GTE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.EQ_ICASE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.NEQ, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.NEQ_ICASE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.IN, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.IN_ICASE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.NOT_IN, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.NOT_IN_ICASE, reference("ColumnA"), - reference("ColumnB")))), - and( - node(FilterDescriptor.FilterOperation.GTE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.LTE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.GT, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.LT, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.NEQ, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.NEQ_ICASE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.EQ_ICASE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.NOT_IN, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.NOT_IN_ICASE, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.IN, reference("ColumnA"), - reference("ColumnB")), - node(FilterDescriptor.FilterOperation.IN_ICASE, reference("ColumnA"), - reference("ColumnB")))); + not(or( + node(FilterDescriptor.FilterOperation.LT, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.GT, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.LTE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.GTE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.EQ_ICASE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.NEQ, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.NEQ_ICASE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.IN, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.IN_ICASE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.NOT_IN, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.NOT_IN_ICASE, reference("ColumnA"), + reference("ColumnB")))), + and( + node(FilterDescriptor.FilterOperation.GTE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.LTE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.GT, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.LT, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.NEQ, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.NEQ_ICASE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.EQ, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.EQ_ICASE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.NOT_IN, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.NOT_IN_ICASE, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.IN, reference("ColumnA"), reference("ColumnB")), + node(FilterDescriptor.FilterOperation.IN_ICASE, reference("ColumnA"), reference("ColumnB")))); } @Override diff --git a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java index 77b1cedeb47..cd92082b906 100644 --- a/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java +++ b/web/shared-beans/src/test/java/io/deephaven/web/shared/data/RangeSetTest.java @@ -92,8 +92,7 @@ public void testOverlappingRangesInDifferentOrder() { list.forEach(rangeSet::addRange); assertEquals(16, rangeSet.size()); - assertEquals(list.toString(), Collections.singletonList(new Range(100, 115)), - asList(rangeSet)); + assertEquals(list.toString(), Collections.singletonList(new Range(100, 115)), asList(rangeSet)); }); // same three items, but with another before that will not overlap with them @@ -103,8 +102,7 @@ public void testOverlappingRangesInDifferentOrder() { list.forEach(rangeSet::addRange); assertEquals(21, rangeSet.size()); - assertEquals(list.toString(), Arrays.asList(new Range(0, 4), new Range(100, 115)), - asList(rangeSet)); + assertEquals(list.toString(), Arrays.asList(new Range(0, 4), new Range(100, 115)), asList(rangeSet)); }); // same three items, but with another following that will not overlap with them @@ -114,8 +112,7 @@ public void testOverlappingRangesInDifferentOrder() { list.forEach(rangeSet::addRange); assertEquals(21, rangeSet.size()); - assertEquals(list.toString(), Arrays.asList(new Range(100, 115), new Range(200, 204)), - asList(rangeSet)); + assertEquals(list.toString(), Arrays.asList(new Range(100, 115), new Range(200, 204)), asList(rangeSet)); }); } @@ -206,109 +203,109 @@ public void testRemove() { // Remove section overlapping-before/after first/last/middle Supplier create = () -> of( - new Range(5, 10), - new Range(15, 20), - new Range(25, 30)); + new Range(5, 10), + new Range(15, 20), + new Range(25, 30)); rangeSet = create.get(); rangeSet.removeRange(new Range(3, 6)); assertEquals(of( - new Range(7, 10), - new Range(15, 20), - new Range(25, 30)), rangeSet); + new Range(7, 10), + new Range(15, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(8, 12)); assertEquals(of( - new Range(5, 7), - new Range(15, 20), - new Range(25, 30)), rangeSet); + new Range(5, 7), + new Range(15, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(12, 16)); assertEquals(of( - new Range(5, 10), - new Range(17, 20), - new Range(25, 30)), rangeSet); + new Range(5, 10), + new Range(17, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(18, 22)); assertEquals(of( - new Range(5, 10), - new Range(15, 17), - new Range(25, 30)), rangeSet); + new Range(5, 10), + new Range(15, 17), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(22, 27)); assertEquals(of( - new Range(5, 10), - new Range(15, 20), - new Range(28, 30)), rangeSet); + new Range(5, 10), + new Range(15, 20), + new Range(28, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(26, 31)); assertEquals(of( - new Range(5, 10), - new Range(15, 20), - new Range(25, 25)), rangeSet); + new Range(5, 10), + new Range(15, 20), + new Range(25, 25)), rangeSet); // Remove section entirely within another range, touching start or end or none rangeSet = create.get(); rangeSet.removeRange(new Range(5, 7)); assertEquals(of( - new Range(8, 10), - new Range(15, 20), - new Range(25, 30)), rangeSet); + new Range(8, 10), + new Range(15, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(7, 10)); assertEquals(of( - new Range(5, 6), - new Range(15, 20), - new Range(25, 30)), rangeSet); + new Range(5, 6), + new Range(15, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(6, 8)); assertEquals(of( - new Range(5, 5), - new Range(9, 10), - new Range(15, 20), - new Range(25, 30)), rangeSet); + new Range(5, 5), + new Range(9, 10), + new Range(15, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(15, 17)); assertEquals(of( - new Range(5, 10), - new Range(18, 20), - new Range(25, 30)), rangeSet); + new Range(5, 10), + new Range(18, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(17, 20)); assertEquals(of( - new Range(5, 10), - new Range(15, 16), - new Range(25, 30)), rangeSet); + new Range(5, 10), + new Range(15, 16), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(16, 18)); assertEquals(of( - new Range(5, 10), - new Range(15, 15), - new Range(19, 20), - new Range(25, 30)), rangeSet); + new Range(5, 10), + new Range(15, 15), + new Range(19, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(25, 27)); assertEquals(of( - new Range(5, 10), - new Range(15, 20), - new Range(28, 30)), rangeSet); + new Range(5, 10), + new Range(15, 20), + new Range(28, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(27, 30)); assertEquals(of( - new Range(5, 10), - new Range(15, 20), - new Range(25, 26)), rangeSet); + new Range(5, 10), + new Range(15, 20), + new Range(25, 26)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(26, 28)); assertEquals(of( - new Range(5, 10), - new Range(15, 20), - new Range(25, 25), - new Range(29, 30)), rangeSet); + new Range(5, 10), + new Range(15, 20), + new Range(25, 25), + new Range(29, 30)), rangeSet); // Remove section overlapping 2+ sections @@ -327,25 +324,25 @@ public void testRemove() { rangeSet = create.get(); rangeSet.removeRange(new Range(4, 16)); assertEquals(of( - new Range(17, 20), - new Range(25, 30)), rangeSet); + new Range(17, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(6, 21)); assertEquals(of( - new Range(5, 5), - new Range(25, 30)), rangeSet); + new Range(5, 5), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(9, 26)); assertEquals(of( - new Range(5, 8), - new Range(27, 30)), rangeSet); + new Range(5, 8), + new Range(27, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(11, 31)); assertEquals(of( - new Range(5, 10)), rangeSet); + new Range(5, 10)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(4, 31)); @@ -357,18 +354,18 @@ public void testRemove() { rangeSet = create.get(); rangeSet.removeRange(new Range(5, 10)); assertEquals(of( - new Range(15, 20), - new Range(25, 30)), rangeSet); + new Range(15, 20), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(15, 20)); assertEquals(of( - new Range(5, 10), - new Range(25, 30)), rangeSet); + new Range(5, 10), + new Range(25, 30)), rangeSet); rangeSet = create.get(); rangeSet.removeRange(new Range(25, 30)); assertEquals(of( - new Range(5, 10), - new Range(15, 20)), rangeSet); + new Range(5, 10), + new Range(15, 20)), rangeSet); }